summaryrefslogtreecommitdiff
path: root/src/mongo/gotools/vendor
diff options
context:
space:
mode:
Diffstat (limited to 'src/mongo/gotools/vendor')
-rw-r--r--src/mongo/gotools/vendor/src/github.com/3rf/mongo-lint/LICENSE27
-rw-r--r--src/mongo/gotools/vendor/src/github.com/3rf/mongo-lint/README59
-rw-r--r--src/mongo/gotools/vendor/src/github.com/3rf/mongo-lint/golint/golint.go67
-rw-r--r--src/mongo/gotools/vendor/src/github.com/3rf/mongo-lint/lint.go1057
-rw-r--r--src/mongo/gotools/vendor/src/github.com/3rf/mongo-lint/lint_test.go197
-rw-r--r--src/mongo/gotools/vendor/src/github.com/3rf/mongo-lint/misc/emacs/golint.el51
-rw-r--r--src/mongo/gotools/vendor/src/github.com/3rf/mongo-lint/misc/vim/ftplugin/go/lint.vim31
-rw-r--r--src/mongo/gotools/vendor/src/github.com/3rf/mongo-lint/testdata/4.go34
-rw-r--r--src/mongo/gotools/vendor/src/github.com/3rf/mongo-lint/testdata/5_test.go17
-rw-r--r--src/mongo/gotools/vendor/src/github.com/3rf/mongo-lint/testdata/blank-import-lib.go33
-rw-r--r--src/mongo/gotools/vendor/src/github.com/3rf/mongo-lint/testdata/blank-import-lib_test.go20
-rw-r--r--src/mongo/gotools/vendor/src/github.com/3rf/mongo-lint/testdata/blank-import-main.go12
-rw-r--r--src/mongo/gotools/vendor/src/github.com/3rf/mongo-lint/testdata/common-methods.go16
-rw-r--r--src/mongo/gotools/vendor/src/github.com/3rf/mongo-lint/testdata/const-block.go36
-rw-r--r--src/mongo/gotools/vendor/src/github.com/3rf/mongo-lint/testdata/else-multi.go18
-rw-r--r--src/mongo/gotools/vendor/src/github.com/3rf/mongo-lint/testdata/else.go23
-rw-r--r--src/mongo/gotools/vendor/src/github.com/3rf/mongo-lint/testdata/errorf.go22
-rw-r--r--src/mongo/gotools/vendor/src/github.com/3rf/mongo-lint/testdata/errors.go35
-rw-r--r--src/mongo/gotools/vendor/src/github.com/3rf/mongo-lint/testdata/import-dot.go6
-rw-r--r--src/mongo/gotools/vendor/src/github.com/3rf/mongo-lint/testdata/inc.go14
-rw-r--r--src/mongo/gotools/vendor/src/github.com/3rf/mongo-lint/testdata/make.go10
-rw-r--r--src/mongo/gotools/vendor/src/github.com/3rf/mongo-lint/testdata/names.go54
-rw-r--r--src/mongo/gotools/vendor/src/github.com/3rf/mongo-lint/testdata/pkg-doc1.go3
-rw-r--r--src/mongo/gotools/vendor/src/github.com/3rf/mongo-lint/testdata/pkg-doc2.go5
-rw-r--r--src/mongo/gotools/vendor/src/github.com/3rf/mongo-lint/testdata/pkg-doc3.go7
-rw-r--r--src/mongo/gotools/vendor/src/github.com/3rf/mongo-lint/testdata/pkg-doc4.go7
-rw-r--r--src/mongo/gotools/vendor/src/github.com/3rf/mongo-lint/testdata/pkg-main.go5
-rw-r--r--src/mongo/gotools/vendor/src/github.com/3rf/mongo-lint/testdata/range.go27
-rw-r--r--src/mongo/gotools/vendor/src/github.com/3rf/mongo-lint/testdata/receiver-names.go38
-rw-r--r--src/mongo/gotools/vendor/src/github.com/3rf/mongo-lint/testdata/sort.go20
-rw-r--r--src/mongo/gotools/vendor/src/github.com/3rf/mongo-lint/testdata/var-decl.go48
-rw-r--r--src/mongo/gotools/vendor/src/github.com/howeyc/gopass/LICENSE.txt13
-rw-r--r--src/mongo/gotools/vendor/src/github.com/howeyc/gopass/README.md21
-rw-r--r--src/mongo/gotools/vendor/src/github.com/howeyc/gopass/nix.go29
-rw-r--r--src/mongo/gotools/vendor/src/github.com/howeyc/gopass/pass.go56
-rw-r--r--src/mongo/gotools/vendor/src/github.com/howeyc/gopass/win.go44
-rw-r--r--src/mongo/gotools/vendor/src/github.com/jacobsa/oglematchers/.gitignore5
-rw-r--r--src/mongo/gotools/vendor/src/github.com/jacobsa/oglematchers/LICENSE202
-rw-r--r--src/mongo/gotools/vendor/src/github.com/jacobsa/oglematchers/README.md58
-rw-r--r--src/mongo/gotools/vendor/src/github.com/jacobsa/oglematchers/all_of.go70
-rw-r--r--src/mongo/gotools/vendor/src/github.com/jacobsa/oglematchers/all_of_test.go110
-rw-r--r--src/mongo/gotools/vendor/src/github.com/jacobsa/oglematchers/any.go32
-rw-r--r--src/mongo/gotools/vendor/src/github.com/jacobsa/oglematchers/any_of.go94
-rw-r--r--src/mongo/gotools/vendor/src/github.com/jacobsa/oglematchers/any_of_test.go139
-rw-r--r--src/mongo/gotools/vendor/src/github.com/jacobsa/oglematchers/any_test.go53
-rw-r--r--src/mongo/gotools/vendor/src/github.com/jacobsa/oglematchers/contains.go61
-rw-r--r--src/mongo/gotools/vendor/src/github.com/jacobsa/oglematchers/contains_test.go233
-rw-r--r--src/mongo/gotools/vendor/src/github.com/jacobsa/oglematchers/deep_equals.go88
-rw-r--r--src/mongo/gotools/vendor/src/github.com/jacobsa/oglematchers/deep_equals_test.go343
-rw-r--r--src/mongo/gotools/vendor/src/github.com/jacobsa/oglematchers/elements_are.go91
-rw-r--r--src/mongo/gotools/vendor/src/github.com/jacobsa/oglematchers/elements_are_test.go208
-rw-r--r--src/mongo/gotools/vendor/src/github.com/jacobsa/oglematchers/equals.go557
-rw-r--r--src/mongo/gotools/vendor/src/github.com/jacobsa/oglematchers/equals_test.go3843
-rw-r--r--src/mongo/gotools/vendor/src/github.com/jacobsa/oglematchers/error.go51
-rw-r--r--src/mongo/gotools/vendor/src/github.com/jacobsa/oglematchers/error_test.go92
-rw-r--r--src/mongo/gotools/vendor/src/github.com/jacobsa/oglematchers/greater_or_equal.go39
-rw-r--r--src/mongo/gotools/vendor/src/github.com/jacobsa/oglematchers/greater_or_equal_test.go1059
-rw-r--r--src/mongo/gotools/vendor/src/github.com/jacobsa/oglematchers/greater_than.go39
-rw-r--r--src/mongo/gotools/vendor/src/github.com/jacobsa/oglematchers/greater_than_test.go1079
-rw-r--r--src/mongo/gotools/vendor/src/github.com/jacobsa/oglematchers/has_same_type_as.go37
-rw-r--r--src/mongo/gotools/vendor/src/github.com/jacobsa/oglematchers/has_same_type_as_test.go181
-rw-r--r--src/mongo/gotools/vendor/src/github.com/jacobsa/oglematchers/has_substr.go46
-rw-r--r--src/mongo/gotools/vendor/src/github.com/jacobsa/oglematchers/has_substr_test.go93
-rw-r--r--src/mongo/gotools/vendor/src/github.com/jacobsa/oglematchers/identical_to.go134
-rw-r--r--src/mongo/gotools/vendor/src/github.com/jacobsa/oglematchers/identical_to_test.go849
-rw-r--r--src/mongo/gotools/vendor/src/github.com/jacobsa/oglematchers/less_or_equal.go41
-rw-r--r--src/mongo/gotools/vendor/src/github.com/jacobsa/oglematchers/less_or_equal_test.go1079
-rw-r--r--src/mongo/gotools/vendor/src/github.com/jacobsa/oglematchers/less_than.go152
-rw-r--r--src/mongo/gotools/vendor/src/github.com/jacobsa/oglematchers/less_than_test.go1059
-rw-r--r--src/mongo/gotools/vendor/src/github.com/jacobsa/oglematchers/matcher.go86
-rw-r--r--src/mongo/gotools/vendor/src/github.com/jacobsa/oglematchers/matches_regexp.go69
-rw-r--r--src/mongo/gotools/vendor/src/github.com/jacobsa/oglematchers/matches_regexp_test.go92
-rw-r--r--src/mongo/gotools/vendor/src/github.com/jacobsa/oglematchers/new_matcher.go43
-rw-r--r--src/mongo/gotools/vendor/src/github.com/jacobsa/oglematchers/not.go53
-rw-r--r--src/mongo/gotools/vendor/src/github.com/jacobsa/oglematchers/not_test.go108
-rw-r--r--src/mongo/gotools/vendor/src/github.com/jacobsa/oglematchers/panics.go74
-rw-r--r--src/mongo/gotools/vendor/src/github.com/jacobsa/oglematchers/panics_test.go141
-rw-r--r--src/mongo/gotools/vendor/src/github.com/jacobsa/oglematchers/pointee.go65
-rw-r--r--src/mongo/gotools/vendor/src/github.com/jacobsa/oglematchers/pointee_test.go152
-rw-r--r--src/mongo/gotools/vendor/src/github.com/jacobsa/oglematchers/transform_description.go36
-rw-r--r--src/mongo/gotools/vendor/src/github.com/jessevdk/go-flags/.travis.yml35
-rw-r--r--src/mongo/gotools/vendor/src/github.com/jessevdk/go-flags/LICENSE26
-rw-r--r--src/mongo/gotools/vendor/src/github.com/jessevdk/go-flags/README.md135
-rw-r--r--src/mongo/gotools/vendor/src/github.com/jessevdk/go-flags/arg.go24
-rw-r--r--src/mongo/gotools/vendor/src/github.com/jessevdk/go-flags/arg_test.go133
-rw-r--r--src/mongo/gotools/vendor/src/github.com/jessevdk/go-flags/assert_test.go177
-rwxr-xr-xsrc/mongo/gotools/vendor/src/github.com/jessevdk/go-flags/check_crosscompile.sh16
-rw-r--r--src/mongo/gotools/vendor/src/github.com/jessevdk/go-flags/closest.go59
-rw-r--r--src/mongo/gotools/vendor/src/github.com/jessevdk/go-flags/command.go441
-rw-r--r--src/mongo/gotools/vendor/src/github.com/jessevdk/go-flags/command_test.go544
-rw-r--r--src/mongo/gotools/vendor/src/github.com/jessevdk/go-flags/completion.go300
-rw-r--r--src/mongo/gotools/vendor/src/github.com/jessevdk/go-flags/completion_test.go294
-rw-r--r--src/mongo/gotools/vendor/src/github.com/jessevdk/go-flags/convert.go341
-rw-r--r--src/mongo/gotools/vendor/src/github.com/jessevdk/go-flags/convert_test.go159
-rw-r--r--src/mongo/gotools/vendor/src/github.com/jessevdk/go-flags/error.go129
-rw-r--r--src/mongo/gotools/vendor/src/github.com/jessevdk/go-flags/example_test.go110
-rw-r--r--src/mongo/gotools/vendor/src/github.com/jessevdk/go-flags/examples/add.go23
-rw-r--r--src/mongo/gotools/vendor/src/github.com/jessevdk/go-flags/examples/bash-completion9
-rw-r--r--src/mongo/gotools/vendor/src/github.com/jessevdk/go-flags/examples/main.go75
-rw-r--r--src/mongo/gotools/vendor/src/github.com/jessevdk/go-flags/examples/rm.go23
-rw-r--r--src/mongo/gotools/vendor/src/github.com/jessevdk/go-flags/flags.go256
-rw-r--r--src/mongo/gotools/vendor/src/github.com/jessevdk/go-flags/group.go379
-rw-r--r--src/mongo/gotools/vendor/src/github.com/jessevdk/go-flags/group_test.go255
-rw-r--r--src/mongo/gotools/vendor/src/github.com/jessevdk/go-flags/help.go466
-rw-r--r--src/mongo/gotools/vendor/src/github.com/jessevdk/go-flags/help_test.go460
-rw-r--r--src/mongo/gotools/vendor/src/github.com/jessevdk/go-flags/ini.go593
-rw-r--r--src/mongo/gotools/vendor/src/github.com/jessevdk/go-flags/ini_test.go950
-rw-r--r--src/mongo/gotools/vendor/src/github.com/jessevdk/go-flags/long_test.go85
-rw-r--r--src/mongo/gotools/vendor/src/github.com/jessevdk/go-flags/man.go194
-rw-r--r--src/mongo/gotools/vendor/src/github.com/jessevdk/go-flags/marshal_test.go97
-rw-r--r--src/mongo/gotools/vendor/src/github.com/jessevdk/go-flags/multitag.go140
-rw-r--r--src/mongo/gotools/vendor/src/github.com/jessevdk/go-flags/option.go414
-rw-r--r--src/mongo/gotools/vendor/src/github.com/jessevdk/go-flags/options_test.go45
-rw-r--r--src/mongo/gotools/vendor/src/github.com/jessevdk/go-flags/optstyle_other.go67
-rw-r--r--src/mongo/gotools/vendor/src/github.com/jessevdk/go-flags/optstyle_windows.go106
-rw-r--r--src/mongo/gotools/vendor/src/github.com/jessevdk/go-flags/parser.go652
-rw-r--r--src/mongo/gotools/vendor/src/github.com/jessevdk/go-flags/parser_test.go487
-rw-r--r--src/mongo/gotools/vendor/src/github.com/jessevdk/go-flags/pointer_test.go81
-rw-r--r--src/mongo/gotools/vendor/src/github.com/jessevdk/go-flags/short_test.go194
-rw-r--r--src/mongo/gotools/vendor/src/github.com/jessevdk/go-flags/tag_test.go38
-rw-r--r--src/mongo/gotools/vendor/src/github.com/jessevdk/go-flags/termsize.go28
-rw-r--r--src/mongo/gotools/vendor/src/github.com/jessevdk/go-flags/termsize_linux.go7
-rw-r--r--src/mongo/gotools/vendor/src/github.com/jessevdk/go-flags/termsize_nosysioctl.go7
-rw-r--r--src/mongo/gotools/vendor/src/github.com/jessevdk/go-flags/termsize_other.go7
-rw-r--r--src/mongo/gotools/vendor/src/github.com/jessevdk/go-flags/termsize_unix.go7
-rw-r--r--src/mongo/gotools/vendor/src/github.com/jessevdk/go-flags/unknown_test.go66
-rw-r--r--src/mongo/gotools/vendor/src/github.com/jtolds/gls/LICENSE18
-rw-r--r--src/mongo/gotools/vendor/src/github.com/jtolds/gls/README.md89
-rw-r--r--src/mongo/gotools/vendor/src/github.com/jtolds/gls/context.go144
-rw-r--r--src/mongo/gotools/vendor/src/github.com/jtolds/gls/context_test.go139
-rw-r--r--src/mongo/gotools/vendor/src/github.com/jtolds/gls/gen_sym.go13
-rw-r--r--src/mongo/gotools/vendor/src/github.com/jtolds/gls/id_pool.go34
-rw-r--r--src/mongo/gotools/vendor/src/github.com/jtolds/gls/stack_tags.go43
-rw-r--r--src/mongo/gotools/vendor/src/github.com/jtolds/gls/stack_tags_js.go101
-rw-r--r--src/mongo/gotools/vendor/src/github.com/jtolds/gls/stack_tags_main.go61
-rw-r--r--src/mongo/gotools/vendor/src/github.com/mattn/go-runewidth/.travis.yml8
-rw-r--r--src/mongo/gotools/vendor/src/github.com/mattn/go-runewidth/README.mkd26
-rw-r--r--src/mongo/gotools/vendor/src/github.com/mattn/go-runewidth/runewidth.go464
-rw-r--r--src/mongo/gotools/vendor/src/github.com/mattn/go-runewidth/runewidth_js.go8
-rw-r--r--src/mongo/gotools/vendor/src/github.com/mattn/go-runewidth/runewidth_posix.go69
-rw-r--r--src/mongo/gotools/vendor/src/github.com/mattn/go-runewidth/runewidth_test.go229
-rw-r--r--src/mongo/gotools/vendor/src/github.com/mattn/go-runewidth/runewidth_windows.go24
-rw-r--r--src/mongo/gotools/vendor/src/github.com/nsf/termbox-go/AUTHORS4
-rw-r--r--src/mongo/gotools/vendor/src/github.com/nsf/termbox-go/LICENSE19
-rw-r--r--src/mongo/gotools/vendor/src/github.com/nsf/termbox-go/README.md28
-rw-r--r--src/mongo/gotools/vendor/src/github.com/nsf/termbox-go/_demos/editbox.go300
-rw-r--r--src/mongo/gotools/vendor/src/github.com/nsf/termbox-go/_demos/interrupt.go69
-rw-r--r--src/mongo/gotools/vendor/src/github.com/nsf/termbox-go/_demos/keyboard.go722
-rw-r--r--src/mongo/gotools/vendor/src/github.com/nsf/termbox-go/_demos/output.go228
-rw-r--r--src/mongo/gotools/vendor/src/github.com/nsf/termbox-go/_demos/paint.go105
-rw-r--r--src/mongo/gotools/vendor/src/github.com/nsf/termbox-go/_demos/random_output.go46
-rw-r--r--src/mongo/gotools/vendor/src/github.com/nsf/termbox-go/_demos/raw_input.go109
-rw-r--r--src/mongo/gotools/vendor/src/github.com/nsf/termbox-go/api.go458
-rw-r--r--src/mongo/gotools/vendor/src/github.com/nsf/termbox-go/api_common.go187
-rw-r--r--src/mongo/gotools/vendor/src/github.com/nsf/termbox-go/api_windows.go239
-rwxr-xr-xsrc/mongo/gotools/vendor/src/github.com/nsf/termbox-go/collect_terminfo.py110
-rw-r--r--src/mongo/gotools/vendor/src/github.com/nsf/termbox-go/syscalls.go39
-rw-r--r--src/mongo/gotools/vendor/src/github.com/nsf/termbox-go/syscalls_darwin.go41
-rw-r--r--src/mongo/gotools/vendor/src/github.com/nsf/termbox-go/syscalls_darwin_amd64.go40
-rw-r--r--src/mongo/gotools/vendor/src/github.com/nsf/termbox-go/syscalls_freebsd.go39
-rw-r--r--src/mongo/gotools/vendor/src/github.com/nsf/termbox-go/syscalls_linux.go33
-rw-r--r--src/mongo/gotools/vendor/src/github.com/nsf/termbox-go/syscalls_netbsd.go39
-rw-r--r--src/mongo/gotools/vendor/src/github.com/nsf/termbox-go/syscalls_openbsd.go39
-rw-r--r--src/mongo/gotools/vendor/src/github.com/nsf/termbox-go/syscalls_windows.go61
-rw-r--r--src/mongo/gotools/vendor/src/github.com/nsf/termbox-go/termbox.go514
-rw-r--r--src/mongo/gotools/vendor/src/github.com/nsf/termbox-go/termbox_common.go59
-rw-r--r--src/mongo/gotools/vendor/src/github.com/nsf/termbox-go/termbox_windows.go856
-rw-r--r--src/mongo/gotools/vendor/src/github.com/nsf/termbox-go/terminfo.go221
-rw-r--r--src/mongo/gotools/vendor/src/github.com/nsf/termbox-go/terminfo_builtin.go64
-rw-r--r--src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/.gitignore3
-rw-r--r--src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/.travis.yml14
-rw-r--r--src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/LICENSE.md23
-rw-r--r--src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/README.md575
-rw-r--r--src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/assertions.goconvey3
-rw-r--r--src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/collections.go244
-rw-r--r--src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/collections_test.go157
-rw-r--r--src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/doc.go105
-rw-r--r--src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/doc_test.go57
-rw-r--r--src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/equality.go280
-rw-r--r--src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/equality_test.go269
-rw-r--r--src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/filter.go23
-rw-r--r--src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/Makefile23
-rw-r--r--src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/go-render/.travis.yml21
-rw-r--r--src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/go-render/LICENSE27
-rw-r--r--src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/go-render/PRESUBMIT.py109
-rw-r--r--src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/go-render/README.md78
-rw-r--r--src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/go-render/WATCHLISTS26
-rw-r--r--src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/go-render/pre-commit-go.yml78
-rw-r--r--src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/go-render/render/render.go327
-rw-r--r--src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/go-render/render/render_test.go170
-rw-r--r--src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/oglematchers/.gitignore5
-rw-r--r--src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/oglematchers/.travis.yml4
-rw-r--r--src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/oglematchers/LICENSE202
-rw-r--r--src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/oglematchers/README.md58
-rw-r--r--src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/oglematchers/all_of.go70
-rw-r--r--src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/oglematchers/all_of_test.go110
-rw-r--r--src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/oglematchers/any.go32
-rw-r--r--src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/oglematchers/any_of.go94
-rw-r--r--src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/oglematchers/any_of_test.go139
-rw-r--r--src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/oglematchers/any_test.go53
-rw-r--r--src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/oglematchers/contains.go61
-rw-r--r--src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/oglematchers/contains_test.go233
-rw-r--r--src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/oglematchers/deep_equals.go88
-rw-r--r--src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/oglematchers/deep_equals_test.go343
-rw-r--r--src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/oglematchers/elements_are.go91
-rw-r--r--src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/oglematchers/elements_are_test.go208
-rw-r--r--src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/oglematchers/equals.go541
-rw-r--r--src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/oglematchers/equals_test.go3864
-rw-r--r--src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/oglematchers/error.go51
-rw-r--r--src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/oglematchers/error_test.go92
-rw-r--r--src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/oglematchers/greater_or_equal.go39
-rw-r--r--src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/oglematchers/greater_or_equal_test.go1101
-rw-r--r--src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/oglematchers/greater_than.go39
-rw-r--r--src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/oglematchers/greater_than_test.go1077
-rw-r--r--src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/oglematchers/has_same_type_as.go37
-rw-r--r--src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/oglematchers/has_same_type_as_test.go181
-rw-r--r--src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/oglematchers/has_substr.go46
-rw-r--r--src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/oglematchers/has_substr_test.go93
-rw-r--r--src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/oglematchers/identical_to.go134
-rw-r--r--src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/oglematchers/identical_to_test.go849
-rw-r--r--src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/oglematchers/less_or_equal.go41
-rw-r--r--src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/oglematchers/less_or_equal_test.go1077
-rw-r--r--src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/oglematchers/less_than.go152
-rw-r--r--src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/oglematchers/less_than_test.go1057
-rw-r--r--src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/oglematchers/matcher.go86
-rw-r--r--src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/oglematchers/matches_regexp.go69
-rw-r--r--src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/oglematchers/matches_regexp_test.go92
-rw-r--r--src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/oglematchers/new_matcher.go43
-rw-r--r--src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/oglematchers/not.go53
-rw-r--r--src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/oglematchers/not_test.go108
-rw-r--r--src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/oglematchers/panics.go74
-rw-r--r--src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/oglematchers/panics_test.go141
-rw-r--r--src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/oglematchers/pointee.go65
-rw-r--r--src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/oglematchers/pointee_test.go152
-rw-r--r--src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/oglematchers/transform_description.go36
-rw-r--r--src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/oglemock/.gitignore5
-rw-r--r--src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/oglemock/.travis.yml4
-rw-r--r--src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/oglemock/LICENSE202
-rw-r--r--src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/oglemock/README.md103
-rw-r--r--src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/oglemock/action.go36
-rw-r--r--src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/oglemock/controller.go480
-rw-r--r--src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/oglemock/controller_test.go1249
-rw-r--r--src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/oglemock/createmock/createmock.go245
-rw-r--r--src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/oglemock/createmock/createmock_test.go233
-rw-r--r--src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/oglemock/createmock/testdata/gcs/bucket.go23
-rw-r--r--src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/oglemock/createmock/testdata/golden.gcs_bucket125
-rw-r--r--src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/oglemock/createmock/testdata/golden.gcs_bucket_same_package124
-rw-r--r--src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/oglemock/createmock/testdata/golden.no_interfaces1
-rw-r--r--src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/oglemock/createmock/testdata/golden.no_package1
-rw-r--r--src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/oglemock/createmock/testdata/golden.unknown_interface1
-rw-r--r--src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/oglemock/createmock/testdata/golden.unknown_package1
-rw-r--r--src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/oglemock/do_all.go53
-rw-r--r--src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/oglemock/do_all_test.go90
-rw-r--r--src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/oglemock/doc.go28
-rw-r--r--src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/oglemock/error_reporter.go29
-rw-r--r--src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/oglemock/expectation.go59
-rw-r--r--src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/oglemock/generate/generate.go369
-rw-r--r--src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/oglemock/generate/generate_test.go168
-rw-r--r--src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/oglemock/generate/testdata/complicated_pkg/complicated_pkg.go41
-rw-r--r--src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/oglemock/generate/testdata/golden.complicated_pkg.go311
-rw-r--r--src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/oglemock/generate/testdata/golden.image.go238
-rw-r--r--src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/oglemock/generate/testdata/golden.io_reader_writer.go127
-rw-r--r--src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/oglemock/generate/testdata/golden.io_reader_writer_same_package.go126
-rw-r--r--src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/oglemock/generate/testdata/golden.renamed_pkg.go66
-rw-r--r--src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/oglemock/generate/testdata/renamed_pkg/renamed_pkg.go24
-rw-r--r--src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/oglemock/generate/type_string.go147
-rw-r--r--src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/oglemock/generate/type_string_test.go220
-rw-r--r--src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/oglemock/integration_test.go129
-rw-r--r--src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/oglemock/internal_expectation.go180
-rw-r--r--src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/oglemock/internal_expectation_test.go265
-rw-r--r--src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/oglemock/invoke.go73
-rw-r--r--src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/oglemock/invoke_test.go110
-rw-r--r--src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/oglemock/mock_object.go30
-rw-r--r--src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/oglemock/return.go251
-rw-r--r--src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/oglemock/return_test.go978
-rw-r--r--src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/oglemock/sample/README.markdown6
-rw-r--r--src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/oglemock/sample/mock_io/mock_io.go71
-rw-r--r--src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/oglemock/save_arg.go83
-rw-r--r--src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/oglemock/save_arg_test.go132
-rw-r--r--src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/ogletest/.gitignore5
-rw-r--r--src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/ogletest/.travis.yml4
-rw-r--r--src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/ogletest/LICENSE202
-rw-r--r--src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/ogletest/README.md151
-rw-r--r--src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/ogletest/assert_aliases.go70
-rw-r--r--src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/ogletest/assert_that.go46
-rw-r--r--src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/ogletest/doc.go51
-rw-r--r--src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/ogletest/expect_aliases.go64
-rw-r--r--src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/ogletest/expect_call.go59
-rw-r--r--src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/ogletest/expect_that.go100
-rw-r--r--src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/ogletest/expect_that_test.go168
-rw-r--r--src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/ogletest/failure.go90
-rw-r--r--src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/ogletest/integration_test.go265
-rw-r--r--src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/ogletest/register.go86
-rw-r--r--src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/ogletest/register_test_suite.go193
-rw-r--r--src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/ogletest/run_tests.go354
-rw-r--r--src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/ogletest/srcutil/docs.go5
-rw-r--r--src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/ogletest/srcutil/methods.go65
-rw-r--r--src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/ogletest/srcutil/methods_test.go107
-rw-r--r--src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/ogletest/test_cases/failing.test.go252
-rw-r--r--src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/ogletest/test_cases/filtered.test.go79
-rw-r--r--src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/ogletest/test_cases/golden.failing_test278
-rw-r--r--src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/ogletest/test_cases/golden.filtered_test24
-rw-r--r--src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/ogletest/test_cases/golden.mock_test25
-rw-r--r--src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/ogletest/test_cases/golden.no_cases_test6
-rw-r--r--src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/ogletest/test_cases/golden.panicking_test90
-rw-r--r--src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/ogletest/test_cases/golden.passing_test22
-rw-r--r--src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/ogletest/test_cases/golden.run_twice_test14
-rw-r--r--src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/ogletest/test_cases/golden.stop_test13
-rw-r--r--src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/ogletest/test_cases/golden.unexported_test12
-rw-r--r--src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/ogletest/test_cases/mock.test.go82
-rw-r--r--src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/ogletest/test_cases/mock_image/mock_image.go115
-rw-r--r--src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/ogletest/test_cases/no_cases.test.go41
-rw-r--r--src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/ogletest/test_cases/panicking.test.go99
-rw-r--r--src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/ogletest/test_cases/passing.test.go120
-rw-r--r--src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/ogletest/test_cases/run_twice.test.go47
-rw-r--r--src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/ogletest/test_cases/stop.test.go61
-rw-r--r--src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/ogletest/test_cases/unexported.test.go43
-rw-r--r--src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/ogletest/test_info.go91
-rw-r--r--src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/reqtrace/.gitignore24
-rw-r--r--src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/reqtrace/LICENSE202
-rw-r--r--src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/reqtrace/README.md53
-rw-r--r--src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/reqtrace/reqtrace.go132
-rw-r--r--src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/reqtrace/trace_state.go175
-rw-r--r--src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/messages.go94
-rw-r--r--src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/panic.go115
-rw-r--r--src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/panic_test.go53
-rw-r--r--src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/quantity.go141
-rw-r--r--src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/quantity_test.go145
-rw-r--r--src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/serializer.go69
-rw-r--r--src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/serializer_test.go36
-rw-r--r--src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/should/should.go73
-rw-r--r--src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/strings.go227
-rw-r--r--src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/strings_test.go118
-rw-r--r--src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/time.go202
-rw-r--r--src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/time_test.go159
-rw-r--r--src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/type.go112
-rw-r--r--src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/type_test.go76
-rw-r--r--src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/utilities_for_test.go75
-rw-r--r--src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/.gitignore5
-rw-r--r--src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/.travis.yml14
-rw-r--r--src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/CONTRIBUTING.md22
-rw-r--r--src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/LICENSE.md23
-rw-r--r--src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/README.md126
-rw-r--r--src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/convey/assertions.go68
-rw-r--r--src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/convey/context.go272
-rw-r--r--src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/convey/convey.goconvey4
-rw-r--r--src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/convey/discovery.go103
-rw-r--r--src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/convey/doc.go218
-rw-r--r--src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/convey/focused_execution_test.go72
-rw-r--r--src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/convey/gotest/doc_test.go1
-rw-r--r--src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/convey/gotest/utils.go28
-rw-r--r--src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/convey/init.go81
-rw-r--r--src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/convey/isolated_execution_test.go774
-rw-r--r--src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/convey/nilReporter.go15
-rw-r--r--src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/convey/reporting/console.go16
-rw-r--r--src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/convey/reporting/doc.go5
-rw-r--r--src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/convey/reporting/dot.go40
-rw-r--r--src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/convey/reporting/dot_test.go40
-rw-r--r--src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/convey/reporting/gotest.go33
-rw-r--r--src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/convey/reporting/gotest_test.go66
-rw-r--r--src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/convey/reporting/init.go94
-rw-r--r--src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/convey/reporting/json.go88
-rw-r--r--src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/convey/reporting/printer.go57
-rw-r--r--src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/convey/reporting/printer_test.go181
-rw-r--r--src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/convey/reporting/problems.go80
-rw-r--r--src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/convey/reporting/problems_test.go51
-rw-r--r--src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/convey/reporting/reporter.go39
-rw-r--r--src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/convey/reporting/reporter_test.go94
-rw-r--r--src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/convey/reporting/reporting.goconvey2
-rw-r--r--src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/convey/reporting/reports.go179
-rw-r--r--src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/convey/reporting/statistics.go89
-rw-r--r--src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/convey/reporting/story.go73
-rw-r--r--src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/convey/reporting_hooks_test.go317
-rw-r--r--src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/convey/story_conventions_test.go175
-rw-r--r--src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/dependencies.go4
-rw-r--r--src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/doc_test.go1
-rw-r--r--src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/examples/assertion_examples_test.go125
-rw-r--r--src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/examples/bowling_game.go75
-rw-r--r--src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/examples/bowling_game_test.go80
-rw-r--r--src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/examples/doc.go5
-rw-r--r--src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/examples/examples.goconvey12
-rw-r--r--src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/examples/simple_example_test.go36
-rw-r--r--src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/goconvey.go280
-rw-r--r--src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/web/client/composer.html35
-rw-r--r--src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/web/client/favicon.icobin0 -> 15086 bytes
-rw-r--r--src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/web/client/index.html487
-rw-r--r--src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/web/client/resources/css/common.css962
-rw-r--r--src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/web/client/resources/css/composer.css65
-rw-r--r--src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/web/client/resources/css/font-awesome.min.css5
-rw-r--r--src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/web/client/resources/css/themes/dark-bigtext.css400
-rw-r--r--src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/web/client/resources/css/themes/dark.css380
-rw-r--r--src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/web/client/resources/css/themes/light.css328
-rw-r--r--src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/web/client/resources/css/tipsy.css97
-rwxr-xr-xsrc/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/web/client/resources/fonts/FontAwesome/FontAwesome.otfbin0 -> 75188 bytes
-rwxr-xr-xsrc/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/web/client/resources/fonts/FontAwesome/fontawesome-webfont.eotbin0 -> 72449 bytes
-rwxr-xr-xsrc/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/web/client/resources/fonts/FontAwesome/fontawesome-webfont.svg504
-rwxr-xr-xsrc/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/web/client/resources/fonts/FontAwesome/fontawesome-webfont.ttfbin0 -> 141564 bytes
-rwxr-xr-xsrc/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/web/client/resources/fonts/FontAwesome/fontawesome-webfont.woffbin0 -> 83760 bytes
-rwxr-xr-xsrc/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/web/client/resources/fonts/Open_Sans/LICENSE.txt202
-rwxr-xr-xsrc/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/web/client/resources/fonts/Open_Sans/OpenSans-Bold.ttfbin0 -> 224592 bytes
-rwxr-xr-xsrc/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/web/client/resources/fonts/Open_Sans/OpenSans-Italic.ttfbin0 -> 212896 bytes
-rwxr-xr-xsrc/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/web/client/resources/fonts/Open_Sans/OpenSans-Light.ttfbin0 -> 222412 bytes
-rwxr-xr-xsrc/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/web/client/resources/fonts/Open_Sans/OpenSans-LightItalic.ttfbin0 -> 213128 bytes
-rwxr-xr-xsrc/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/web/client/resources/fonts/Open_Sans/OpenSans-Regular.ttfbin0 -> 217360 bytes
-rwxr-xr-xsrc/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/web/client/resources/fonts/Orbitron/OFL.txt93
-rwxr-xr-xsrc/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/web/client/resources/fonts/Orbitron/Orbitron-Regular.ttfbin0 -> 39484 bytes
-rwxr-xr-xsrc/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/web/client/resources/fonts/Oswald/OFL.txt92
-rwxr-xr-xsrc/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/web/client/resources/fonts/Oswald/Oswald-Regular.ttfbin0 -> 50944 bytes
-rw-r--r--src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/web/client/resources/ico/goconvey-buildfail.icobin0 -> 15086 bytes
-rw-r--r--src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/web/client/resources/ico/goconvey-fail.icobin0 -> 15086 bytes
-rw-r--r--src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/web/client/resources/ico/goconvey-ok.icobin0 -> 15086 bytes
-rw-r--r--src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/web/client/resources/ico/goconvey-panic.icobin0 -> 15086 bytes
-rw-r--r--src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/web/client/resources/js/composer.js171
-rw-r--r--src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/web/client/resources/js/config.js15
-rw-r--r--src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/web/client/resources/js/convey.js46
-rw-r--r--src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/web/client/resources/js/goconvey.js1322
-rw-r--r--src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/web/client/resources/js/lib/ansispan.js67
-rw-r--r--src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/web/client/resources/js/lib/diff-match-patch.min.js49
-rw-r--r--src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/web/client/resources/js/lib/jquery-2_1_0.min.js4
-rw-r--r--src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/web/client/resources/js/lib/jquery-ui-1_10_3-custom.min.js5
-rw-r--r--src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/web/client/resources/js/lib/jquery.pretty-text-diff.min.js5
-rw-r--r--src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/web/client/resources/js/lib/jquery.tipsy.min.js5
-rw-r--r--src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/web/client/resources/js/lib/markup.min.js6
-rw-r--r--src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/web/client/resources/js/lib/moment.min.js6
-rw-r--r--src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/web/client/resources/js/lib/taboverride.min.js3
-rw-r--r--src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/web/client/resources/js/poller.js130
-rw-r--r--src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/web/server/api/api.goconvey2
-rw-r--r--src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/web/server/api/server.go164
-rw-r--r--src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/web/server/api/server_test.go462
-rw-r--r--src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/web/server/contract/contracts.go27
-rw-r--r--src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/web/server/contract/doc_test.go1
-rw-r--r--src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/web/server/contract/result.go120
-rw-r--r--src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/web/server/executor/contract.go12
-rw-r--r--src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/web/server/executor/coordinator.go71
-rw-r--r--src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/web/server/executor/executor.go84
-rw-r--r--src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/web/server/executor/executor.goconvey2
-rw-r--r--src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/web/server/executor/executor_test.go160
-rw-r--r--src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/web/server/executor/tester.go56
-rw-r--r--src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/web/server/executor/tester_test.go254
-rw-r--r--src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/web/server/messaging/doc_test.go1
-rw-r--r--src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/web/server/messaging/messages.go56
-rw-r--r--src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/web/server/parser/packageParser.go174
-rw-r--r--src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/web/server/parser/package_parser_test.go792
-rw-r--r--src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/web/server/parser/parser.go32
-rw-r--r--src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/web/server/parser/parser.goconvey2
-rw-r--r--src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/web/server/parser/parser_test.go47
-rw-r--r--src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/web/server/parser/rules.go43
-rw-r--r--src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/web/server/parser/testParser.go174
-rw-r--r--src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/web/server/parser/util.go45
-rw-r--r--src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/web/server/system/shell.go174
-rw-r--r--src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/web/server/system/shell_integration_test.go33
-rw-r--r--src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/web/server/system/shell_test.go217
-rw-r--r--src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/web/server/system/system.goconvey3
-rw-r--r--src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/web/server/watch/functional_core.go171
-rw-r--r--src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/web/server/watch/functional_core_test.go419
-rw-r--r--src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/web/server/watch/imperative_shell.go77
-rw-r--r--src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/web/server/watch/integration.go185
-rw-r--r--src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/web/server/watch/integration_test.go200
-rw-r--r--src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/web/server/watch/integration_testing/doc_test.go1
-rw-r--r--src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/web/server/watch/integration_testing/main.go10
-rw-r--r--src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/web/server/watch/integration_testing/sub/.gitignore2
-rw-r--r--src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/web/server/watch/integration_testing/sub/stuff.go4
-rw-r--r--src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/web/server/watch/integration_testing/sub/stuff_test.go17
-rw-r--r--src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/web/server/watch/integration_testing/sub/sub.goconvey7
-rw-r--r--src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/web/server/watch/util_test.go92
-rw-r--r--src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/web/server/watch/watch.goconvey3
-rw-r--r--src/mongo/gotools/vendor/src/github.com/spacemonkeygo/openssl/LICENSE191
-rw-r--r--src/mongo/gotools/vendor/src/github.com/spacemonkeygo/openssl/README.md26
-rw-r--r--src/mongo/gotools/vendor/src/github.com/spacemonkeygo/openssl/bio.go355
-rw-r--r--src/mongo/gotools/vendor/src/github.com/spacemonkeygo/openssl/build.go23
-rw-r--r--src/mongo/gotools/vendor/src/github.com/spacemonkeygo/openssl/cert.go407
-rw-r--r--src/mongo/gotools/vendor/src/github.com/spacemonkeygo/openssl/cert_test.go139
-rw-r--r--src/mongo/gotools/vendor/src/github.com/spacemonkeygo/openssl/ciphers.go355
-rw-r--r--src/mongo/gotools/vendor/src/github.com/spacemonkeygo/openssl/ciphers_test.go307
-rw-r--r--src/mongo/gotools/vendor/src/github.com/spacemonkeygo/openssl/conn.go625
-rw-r--r--src/mongo/gotools/vendor/src/github.com/spacemonkeygo/openssl/ctx.go831
-rw-r--r--src/mongo/gotools/vendor/src/github.com/spacemonkeygo/openssl/ctx_test.go48
-rw-r--r--src/mongo/gotools/vendor/src/github.com/spacemonkeygo/openssl/dhparam.go65
-rw-r--r--src/mongo/gotools/vendor/src/github.com/spacemonkeygo/openssl/digest.go53
-rw-r--r--src/mongo/gotools/vendor/src/github.com/spacemonkeygo/openssl/engine.go52
-rw-r--r--src/mongo/gotools/vendor/src/github.com/spacemonkeygo/openssl/fips.go22
-rw-r--r--src/mongo/gotools/vendor/src/github.com/spacemonkeygo/openssl/hostname.c367
-rw-r--r--src/mongo/gotools/vendor/src/github.com/spacemonkeygo/openssl/hostname.go127
-rw-r--r--src/mongo/gotools/vendor/src/github.com/spacemonkeygo/openssl/http.go61
-rw-r--r--src/mongo/gotools/vendor/src/github.com/spacemonkeygo/openssl/init.go155
-rw-r--r--src/mongo/gotools/vendor/src/github.com/spacemonkeygo/openssl/init_posix.go64
-rw-r--r--src/mongo/gotools/vendor/src/github.com/spacemonkeygo/openssl/init_windows.go60
-rw-r--r--src/mongo/gotools/vendor/src/github.com/spacemonkeygo/openssl/key.go374
-rw-r--r--src/mongo/gotools/vendor/src/github.com/spacemonkeygo/openssl/key_test.go149
-rw-r--r--src/mongo/gotools/vendor/src/github.com/spacemonkeygo/openssl/net.go134
-rw-r--r--src/mongo/gotools/vendor/src/github.com/spacemonkeygo/openssl/nid.go199
-rw-r--r--src/mongo/gotools/vendor/src/github.com/spacemonkeygo/openssl/oracle_stubs.go162
-rw-r--r--src/mongo/gotools/vendor/src/github.com/spacemonkeygo/openssl/password.c10
-rw-r--r--src/mongo/gotools/vendor/src/github.com/spacemonkeygo/openssl/pem.go32
-rw-r--r--src/mongo/gotools/vendor/src/github.com/spacemonkeygo/openssl/sha1.go99
-rw-r--r--src/mongo/gotools/vendor/src/github.com/spacemonkeygo/openssl/sha1_test.go111
-rw-r--r--src/mongo/gotools/vendor/src/github.com/spacemonkeygo/openssl/sha256.go92
-rw-r--r--src/mongo/gotools/vendor/src/github.com/spacemonkeygo/openssl/sha256_test.go109
-rw-r--r--src/mongo/gotools/vendor/src/github.com/spacemonkeygo/openssl/sni.c23
-rw-r--r--src/mongo/gotools/vendor/src/github.com/spacemonkeygo/openssl/sni_test.go23
-rw-r--r--src/mongo/gotools/vendor/src/github.com/spacemonkeygo/openssl/ssl.go167
-rw-r--r--src/mongo/gotools/vendor/src/github.com/spacemonkeygo/openssl/ssl_test.go633
-rw-r--r--src/mongo/gotools/vendor/src/github.com/spacemonkeygo/openssl/utils/errors.go50
-rw-r--r--src/mongo/gotools/vendor/src/github.com/spacemonkeygo/openssl/utils/future.go79
-rw-r--r--src/mongo/gotools/vendor/src/github.com/spacemonkeygo/openssl/verify.c31
-rw-r--r--src/mongo/gotools/vendor/src/github.com/spacemonkeygo/openssl/version.go22
-rw-r--r--src/mongo/gotools/vendor/src/github.com/spacemonkeygo/spacelog/.travis.yml6
-rw-r--r--src/mongo/gotools/vendor/src/github.com/spacemonkeygo/spacelog/LICENSE191
-rw-r--r--src/mongo/gotools/vendor/src/github.com/spacemonkeygo/spacelog/README.md19
-rw-r--r--src/mongo/gotools/vendor/src/github.com/spacemonkeygo/spacelog/capture.go67
-rw-r--r--src/mongo/gotools/vendor/src/github.com/spacemonkeygo/spacelog/capture_other.go35
-rw-r--r--src/mongo/gotools/vendor/src/github.com/spacemonkeygo/spacelog/capture_windows.go23
-rw-r--r--src/mongo/gotools/vendor/src/github.com/spacemonkeygo/spacelog/collection.go229
-rw-r--r--src/mongo/gotools/vendor/src/github.com/spacemonkeygo/spacelog/convenience.go266
-rw-r--r--src/mongo/gotools/vendor/src/github.com/spacemonkeygo/spacelog/doc.go39
-rw-r--r--src/mongo/gotools/vendor/src/github.com/spacemonkeygo/spacelog/event.go75
-rw-r--r--src/mongo/gotools/vendor/src/github.com/spacemonkeygo/spacelog/handler.go53
-rw-r--r--src/mongo/gotools/vendor/src/github.com/spacemonkeygo/spacelog/level.go126
-rw-r--r--src/mongo/gotools/vendor/src/github.com/spacemonkeygo/spacelog/logger.go61
-rw-r--r--src/mongo/gotools/vendor/src/github.com/spacemonkeygo/spacelog/output.go178
-rw-r--r--src/mongo/gotools/vendor/src/github.com/spacemonkeygo/spacelog/output_other.go19
-rw-r--r--src/mongo/gotools/vendor/src/github.com/spacemonkeygo/spacelog/output_windows.go17
-rw-r--r--src/mongo/gotools/vendor/src/github.com/spacemonkeygo/spacelog/setup.go183
-rw-r--r--src/mongo/gotools/vendor/src/github.com/spacemonkeygo/spacelog/setup/setup.go80
-rw-r--r--src/mongo/gotools/vendor/src/github.com/spacemonkeygo/spacelog/syslog.go63
-rw-r--r--src/mongo/gotools/vendor/src/github.com/spacemonkeygo/spacelog/syslog_windows.go26
-rw-r--r--src/mongo/gotools/vendor/src/github.com/spacemonkeygo/spacelog/templates.go69
-rw-r--r--src/mongo/gotools/vendor/src/github.com/spacemonkeygo/spacelog/templates_others.go22
-rw-r--r--src/mongo/gotools/vendor/src/github.com/spacemonkeygo/spacelog/templates_windows.go20
-rw-r--r--src/mongo/gotools/vendor/src/github.com/spacemonkeygo/spacelog/text.go80
-rw-r--r--src/mongo/gotools/vendor/src/golang.org/x/crypto/.gitattributes10
-rw-r--r--src/mongo/gotools/vendor/src/golang.org/x/crypto/.gitignore2
-rw-r--r--src/mongo/gotools/vendor/src/golang.org/x/crypto/AUTHORS3
-rw-r--r--src/mongo/gotools/vendor/src/golang.org/x/crypto/CONTRIBUTING.md31
-rw-r--r--src/mongo/gotools/vendor/src/golang.org/x/crypto/CONTRIBUTORS3
-rw-r--r--src/mongo/gotools/vendor/src/golang.org/x/crypto/LICENSE27
-rw-r--r--src/mongo/gotools/vendor/src/golang.org/x/crypto/PATENTS22
-rw-r--r--src/mongo/gotools/vendor/src/golang.org/x/crypto/README3
-rw-r--r--src/mongo/gotools/vendor/src/golang.org/x/crypto/bcrypt/base64.go35
-rw-r--r--src/mongo/gotools/vendor/src/golang.org/x/crypto/bcrypt/bcrypt.go294
-rw-r--r--src/mongo/gotools/vendor/src/golang.org/x/crypto/bcrypt/bcrypt_test.go226
-rw-r--r--src/mongo/gotools/vendor/src/golang.org/x/crypto/blowfish/block.go159
-rw-r--r--src/mongo/gotools/vendor/src/golang.org/x/crypto/blowfish/blowfish_test.go274
-rw-r--r--src/mongo/gotools/vendor/src/golang.org/x/crypto/blowfish/cipher.go91
-rw-r--r--src/mongo/gotools/vendor/src/golang.org/x/crypto/blowfish/const.go199
-rw-r--r--src/mongo/gotools/vendor/src/golang.org/x/crypto/bn256/bn256.go404
-rw-r--r--src/mongo/gotools/vendor/src/golang.org/x/crypto/bn256/bn256_test.go304
-rw-r--r--src/mongo/gotools/vendor/src/golang.org/x/crypto/bn256/constants.go44
-rw-r--r--src/mongo/gotools/vendor/src/golang.org/x/crypto/bn256/curve.go278
-rw-r--r--src/mongo/gotools/vendor/src/golang.org/x/crypto/bn256/example_test.go43
-rw-r--r--src/mongo/gotools/vendor/src/golang.org/x/crypto/bn256/gfp12.go200
-rw-r--r--src/mongo/gotools/vendor/src/golang.org/x/crypto/bn256/gfp2.go219
-rw-r--r--src/mongo/gotools/vendor/src/golang.org/x/crypto/bn256/gfp6.go296
-rw-r--r--src/mongo/gotools/vendor/src/golang.org/x/crypto/bn256/optate.go395
-rw-r--r--src/mongo/gotools/vendor/src/golang.org/x/crypto/bn256/twist.go249
-rw-r--r--src/mongo/gotools/vendor/src/golang.org/x/crypto/cast5/cast5.go526
-rw-r--r--src/mongo/gotools/vendor/src/golang.org/x/crypto/cast5/cast5_test.go106
-rw-r--r--src/mongo/gotools/vendor/src/golang.org/x/crypto/codereview.cfg1
-rw-r--r--src/mongo/gotools/vendor/src/golang.org/x/crypto/curve25519/const_amd64.s20
-rw-r--r--src/mongo/gotools/vendor/src/golang.org/x/crypto/curve25519/cswap_amd64.s88
-rw-r--r--src/mongo/gotools/vendor/src/golang.org/x/crypto/curve25519/curve25519.go841
-rw-r--r--src/mongo/gotools/vendor/src/golang.org/x/crypto/curve25519/curve25519_test.go29
-rw-r--r--src/mongo/gotools/vendor/src/golang.org/x/crypto/curve25519/doc.go23
-rw-r--r--src/mongo/gotools/vendor/src/golang.org/x/crypto/curve25519/freeze_amd64.s94
-rw-r--r--src/mongo/gotools/vendor/src/golang.org/x/crypto/curve25519/ladderstep_amd64.s1398
-rw-r--r--src/mongo/gotools/vendor/src/golang.org/x/crypto/curve25519/mont25519_amd64.go240
-rw-r--r--src/mongo/gotools/vendor/src/golang.org/x/crypto/curve25519/mul_amd64.s191
-rw-r--r--src/mongo/gotools/vendor/src/golang.org/x/crypto/curve25519/square_amd64.s153
-rw-r--r--src/mongo/gotools/vendor/src/golang.org/x/crypto/hkdf/example_test.go61
-rw-r--r--src/mongo/gotools/vendor/src/golang.org/x/crypto/hkdf/hkdf.go75
-rw-r--r--src/mongo/gotools/vendor/src/golang.org/x/crypto/hkdf/hkdf_test.go370
-rw-r--r--src/mongo/gotools/vendor/src/golang.org/x/crypto/md4/md4.go118
-rw-r--r--src/mongo/gotools/vendor/src/golang.org/x/crypto/md4/md4_test.go71
-rw-r--r--src/mongo/gotools/vendor/src/golang.org/x/crypto/md4/md4block.go89
-rw-r--r--src/mongo/gotools/vendor/src/golang.org/x/crypto/nacl/box/box.go85
-rw-r--r--src/mongo/gotools/vendor/src/golang.org/x/crypto/nacl/box/box_test.go78
-rw-r--r--src/mongo/gotools/vendor/src/golang.org/x/crypto/nacl/secretbox/secretbox.go149
-rw-r--r--src/mongo/gotools/vendor/src/golang.org/x/crypto/nacl/secretbox/secretbox_test.go91
-rw-r--r--src/mongo/gotools/vendor/src/golang.org/x/crypto/ocsp/ocsp.go673
-rw-r--r--src/mongo/gotools/vendor/src/golang.org/x/crypto/ocsp/ocsp_test.go584
-rw-r--r--src/mongo/gotools/vendor/src/golang.org/x/crypto/openpgp/armor/armor.go219
-rw-r--r--src/mongo/gotools/vendor/src/golang.org/x/crypto/openpgp/armor/armor_test.go95
-rw-r--r--src/mongo/gotools/vendor/src/golang.org/x/crypto/openpgp/armor/encode.go160
-rw-r--r--src/mongo/gotools/vendor/src/golang.org/x/crypto/openpgp/canonical_text.go59
-rw-r--r--src/mongo/gotools/vendor/src/golang.org/x/crypto/openpgp/canonical_text_test.go52
-rw-r--r--src/mongo/gotools/vendor/src/golang.org/x/crypto/openpgp/clearsign/clearsign.go372
-rw-r--r--src/mongo/gotools/vendor/src/golang.org/x/crypto/openpgp/clearsign/clearsign_test.go197
-rw-r--r--src/mongo/gotools/vendor/src/golang.org/x/crypto/openpgp/elgamal/elgamal.go122
-rw-r--r--src/mongo/gotools/vendor/src/golang.org/x/crypto/openpgp/elgamal/elgamal_test.go49
-rw-r--r--src/mongo/gotools/vendor/src/golang.org/x/crypto/openpgp/errors/errors.go72
-rw-r--r--src/mongo/gotools/vendor/src/golang.org/x/crypto/openpgp/keys.go633
-rw-r--r--src/mongo/gotools/vendor/src/golang.org/x/crypto/openpgp/keys_test.go370
-rw-r--r--src/mongo/gotools/vendor/src/golang.org/x/crypto/openpgp/packet/compressed.go123
-rw-r--r--src/mongo/gotools/vendor/src/golang.org/x/crypto/openpgp/packet/compressed_test.go41
-rw-r--r--src/mongo/gotools/vendor/src/golang.org/x/crypto/openpgp/packet/config.go91
-rw-r--r--src/mongo/gotools/vendor/src/golang.org/x/crypto/openpgp/packet/encrypted_key.go199
-rw-r--r--src/mongo/gotools/vendor/src/golang.org/x/crypto/openpgp/packet/encrypted_key_test.go146
-rw-r--r--src/mongo/gotools/vendor/src/golang.org/x/crypto/openpgp/packet/literal.go89
-rw-r--r--src/mongo/gotools/vendor/src/golang.org/x/crypto/openpgp/packet/ocfb.go143
-rw-r--r--src/mongo/gotools/vendor/src/golang.org/x/crypto/openpgp/packet/ocfb_test.go46
-rw-r--r--src/mongo/gotools/vendor/src/golang.org/x/crypto/openpgp/packet/one_pass_signature.go73
-rw-r--r--src/mongo/gotools/vendor/src/golang.org/x/crypto/openpgp/packet/opaque.go162
-rw-r--r--src/mongo/gotools/vendor/src/golang.org/x/crypto/openpgp/packet/opaque_test.go67
-rw-r--r--src/mongo/gotools/vendor/src/golang.org/x/crypto/openpgp/packet/packet.go539
-rw-r--r--src/mongo/gotools/vendor/src/golang.org/x/crypto/openpgp/packet/packet_test.go255
-rw-r--r--src/mongo/gotools/vendor/src/golang.org/x/crypto/openpgp/packet/private_key.go326
-rw-r--r--src/mongo/gotools/vendor/src/golang.org/x/crypto/openpgp/packet/private_key_test.go69
-rw-r--r--src/mongo/gotools/vendor/src/golang.org/x/crypto/openpgp/packet/public_key.go724
-rw-r--r--src/mongo/gotools/vendor/src/golang.org/x/crypto/openpgp/packet/public_key_test.go202
-rw-r--r--src/mongo/gotools/vendor/src/golang.org/x/crypto/openpgp/packet/public_key_v3.go280
-rw-r--r--src/mongo/gotools/vendor/src/golang.org/x/crypto/openpgp/packet/public_key_v3_test.go82
-rw-r--r--src/mongo/gotools/vendor/src/golang.org/x/crypto/openpgp/packet/reader.go76
-rw-r--r--src/mongo/gotools/vendor/src/golang.org/x/crypto/openpgp/packet/signature.go699
-rw-r--r--src/mongo/gotools/vendor/src/golang.org/x/crypto/openpgp/packet/signature_test.go42
-rw-r--r--src/mongo/gotools/vendor/src/golang.org/x/crypto/openpgp/packet/signature_v3.go146
-rw-r--r--src/mongo/gotools/vendor/src/golang.org/x/crypto/openpgp/packet/signature_v3_test.go92
-rw-r--r--src/mongo/gotools/vendor/src/golang.org/x/crypto/openpgp/packet/symmetric_key_encrypted.go155
-rw-r--r--src/mongo/gotools/vendor/src/golang.org/x/crypto/openpgp/packet/symmetric_key_encrypted_test.go103
-rw-r--r--src/mongo/gotools/vendor/src/golang.org/x/crypto/openpgp/packet/symmetrically_encrypted.go290
-rw-r--r--src/mongo/gotools/vendor/src/golang.org/x/crypto/openpgp/packet/symmetrically_encrypted_test.go123
-rw-r--r--src/mongo/gotools/vendor/src/golang.org/x/crypto/openpgp/packet/userattribute.go91
-rw-r--r--src/mongo/gotools/vendor/src/golang.org/x/crypto/openpgp/packet/userattribute_test.go109
-rw-r--r--src/mongo/gotools/vendor/src/golang.org/x/crypto/openpgp/packet/userid.go160
-rw-r--r--src/mongo/gotools/vendor/src/golang.org/x/crypto/openpgp/packet/userid_test.go87
-rw-r--r--src/mongo/gotools/vendor/src/golang.org/x/crypto/openpgp/read.go439
-rw-r--r--src/mongo/gotools/vendor/src/golang.org/x/crypto/openpgp/read_test.go512
-rw-r--r--src/mongo/gotools/vendor/src/golang.org/x/crypto/openpgp/s2k/s2k.go273
-rw-r--r--src/mongo/gotools/vendor/src/golang.org/x/crypto/openpgp/s2k/s2k_test.go137
-rw-r--r--src/mongo/gotools/vendor/src/golang.org/x/crypto/openpgp/write.go378
-rw-r--r--src/mongo/gotools/vendor/src/golang.org/x/crypto/openpgp/write_test.go259
-rw-r--r--src/mongo/gotools/vendor/src/golang.org/x/crypto/otr/libotr_test_helper.c197
-rw-r--r--src/mongo/gotools/vendor/src/golang.org/x/crypto/otr/otr.go1408
-rw-r--r--src/mongo/gotools/vendor/src/golang.org/x/crypto/otr/otr_test.go470
-rw-r--r--src/mongo/gotools/vendor/src/golang.org/x/crypto/otr/smp.go572
-rw-r--r--src/mongo/gotools/vendor/src/golang.org/x/crypto/pbkdf2/pbkdf2.go77
-rw-r--r--src/mongo/gotools/vendor/src/golang.org/x/crypto/pbkdf2/pbkdf2_test.go157
-rw-r--r--src/mongo/gotools/vendor/src/golang.org/x/crypto/pkcs12/bmp-string.go50
-rw-r--r--src/mongo/gotools/vendor/src/golang.org/x/crypto/pkcs12/bmp-string_test.go63
-rw-r--r--src/mongo/gotools/vendor/src/golang.org/x/crypto/pkcs12/crypto.go131
-rw-r--r--src/mongo/gotools/vendor/src/golang.org/x/crypto/pkcs12/crypto_test.go125
-rw-r--r--src/mongo/gotools/vendor/src/golang.org/x/crypto/pkcs12/errors.go23
-rw-r--r--src/mongo/gotools/vendor/src/golang.org/x/crypto/pkcs12/internal/rc2/bench_test.go27
-rw-r--r--src/mongo/gotools/vendor/src/golang.org/x/crypto/pkcs12/internal/rc2/rc2.go274
-rw-r--r--src/mongo/gotools/vendor/src/golang.org/x/crypto/pkcs12/internal/rc2/rc2_test.go93
-rw-r--r--src/mongo/gotools/vendor/src/golang.org/x/crypto/pkcs12/mac.go45
-rw-r--r--src/mongo/gotools/vendor/src/golang.org/x/crypto/pkcs12/mac_test.go42
-rw-r--r--src/mongo/gotools/vendor/src/golang.org/x/crypto/pkcs12/pbkdf.go170
-rw-r--r--src/mongo/gotools/vendor/src/golang.org/x/crypto/pkcs12/pbkdf_test.go34
-rw-r--r--src/mongo/gotools/vendor/src/golang.org/x/crypto/pkcs12/pkcs12.go342
-rw-r--r--src/mongo/gotools/vendor/src/golang.org/x/crypto/pkcs12/pkcs12_test.go138
-rw-r--r--src/mongo/gotools/vendor/src/golang.org/x/crypto/pkcs12/safebags.go57
-rw-r--r--src/mongo/gotools/vendor/src/golang.org/x/crypto/poly1305/const_amd64.s45
-rw-r--r--src/mongo/gotools/vendor/src/golang.org/x/crypto/poly1305/poly1305.go32
-rw-r--r--src/mongo/gotools/vendor/src/golang.org/x/crypto/poly1305/poly1305_amd64.s497
-rw-r--r--src/mongo/gotools/vendor/src/golang.org/x/crypto/poly1305/poly1305_arm.s379
-rw-r--r--src/mongo/gotools/vendor/src/golang.org/x/crypto/poly1305/poly1305_test.go86
-rw-r--r--src/mongo/gotools/vendor/src/golang.org/x/crypto/poly1305/sum_amd64.go24
-rw-r--r--src/mongo/gotools/vendor/src/golang.org/x/crypto/poly1305/sum_arm.go24
-rw-r--r--src/mongo/gotools/vendor/src/golang.org/x/crypto/poly1305/sum_ref.go1531
-rw-r--r--src/mongo/gotools/vendor/src/golang.org/x/crypto/ripemd160/ripemd160.go120
-rw-r--r--src/mongo/gotools/vendor/src/golang.org/x/crypto/ripemd160/ripemd160_test.go64
-rw-r--r--src/mongo/gotools/vendor/src/golang.org/x/crypto/ripemd160/ripemd160block.go161
-rw-r--r--src/mongo/gotools/vendor/src/golang.org/x/crypto/salsa20/salsa/hsalsa20.go144
-rw-r--r--src/mongo/gotools/vendor/src/golang.org/x/crypto/salsa20/salsa/salsa2020_amd64.s902
-rw-r--r--src/mongo/gotools/vendor/src/golang.org/x/crypto/salsa20/salsa/salsa208.go199
-rw-r--r--src/mongo/gotools/vendor/src/golang.org/x/crypto/salsa20/salsa/salsa20_amd64.go23
-rw-r--r--src/mongo/gotools/vendor/src/golang.org/x/crypto/salsa20/salsa/salsa20_ref.go234
-rw-r--r--src/mongo/gotools/vendor/src/golang.org/x/crypto/salsa20/salsa/salsa_test.go35
-rw-r--r--src/mongo/gotools/vendor/src/golang.org/x/crypto/salsa20/salsa20.go54
-rw-r--r--src/mongo/gotools/vendor/src/golang.org/x/crypto/salsa20/salsa20_test.go139
-rw-r--r--src/mongo/gotools/vendor/src/golang.org/x/crypto/scrypt/scrypt.go243
-rw-r--r--src/mongo/gotools/vendor/src/golang.org/x/crypto/scrypt/scrypt_test.go160
-rw-r--r--src/mongo/gotools/vendor/src/golang.org/x/crypto/sha3/doc.go66
-rw-r--r--src/mongo/gotools/vendor/src/golang.org/x/crypto/sha3/hashes.go65
-rw-r--r--src/mongo/gotools/vendor/src/golang.org/x/crypto/sha3/keccakf.go410
-rw-r--r--src/mongo/gotools/vendor/src/golang.org/x/crypto/sha3/register.go18
-rw-r--r--src/mongo/gotools/vendor/src/golang.org/x/crypto/sha3/sha3.go193
-rw-r--r--src/mongo/gotools/vendor/src/golang.org/x/crypto/sha3/sha3_test.go306
-rw-r--r--src/mongo/gotools/vendor/src/golang.org/x/crypto/sha3/shake.go60
-rw-r--r--src/mongo/gotools/vendor/src/golang.org/x/crypto/sha3/testdata/keccakKats.json.deflatebin0 -> 521342 bytes
-rw-r--r--src/mongo/gotools/vendor/src/golang.org/x/crypto/sha3/xor.go16
-rw-r--r--src/mongo/gotools/vendor/src/golang.org/x/crypto/sha3/xor_generic.go28
-rw-r--r--src/mongo/gotools/vendor/src/golang.org/x/crypto/sha3/xor_unaligned.go58
-rw-r--r--src/mongo/gotools/vendor/src/golang.org/x/crypto/ssh/agent/client.go615
-rw-r--r--src/mongo/gotools/vendor/src/golang.org/x/crypto/ssh/agent/client_test.go287
-rw-r--r--src/mongo/gotools/vendor/src/golang.org/x/crypto/ssh/agent/forward.go103
-rw-r--r--src/mongo/gotools/vendor/src/golang.org/x/crypto/ssh/agent/keyring.go184
-rw-r--r--src/mongo/gotools/vendor/src/golang.org/x/crypto/ssh/agent/keyring_test.go78
-rw-r--r--src/mongo/gotools/vendor/src/golang.org/x/crypto/ssh/agent/server.go209
-rw-r--r--src/mongo/gotools/vendor/src/golang.org/x/crypto/ssh/agent/server_test.go77
-rw-r--r--src/mongo/gotools/vendor/src/golang.org/x/crypto/ssh/agent/testdata_test.go64
-rw-r--r--src/mongo/gotools/vendor/src/golang.org/x/crypto/ssh/benchmark_test.go122
-rw-r--r--src/mongo/gotools/vendor/src/golang.org/x/crypto/ssh/buffer.go98
-rw-r--r--src/mongo/gotools/vendor/src/golang.org/x/crypto/ssh/buffer_test.go87
-rw-r--r--src/mongo/gotools/vendor/src/golang.org/x/crypto/ssh/certs.go501
-rw-r--r--src/mongo/gotools/vendor/src/golang.org/x/crypto/ssh/certs_test.go216
-rw-r--r--src/mongo/gotools/vendor/src/golang.org/x/crypto/ssh/channel.go631
-rw-r--r--src/mongo/gotools/vendor/src/golang.org/x/crypto/ssh/cipher.go552
-rw-r--r--src/mongo/gotools/vendor/src/golang.org/x/crypto/ssh/cipher_test.go127
-rw-r--r--src/mongo/gotools/vendor/src/golang.org/x/crypto/ssh/client.go213
-rw-r--r--src/mongo/gotools/vendor/src/golang.org/x/crypto/ssh/client_auth.go441
-rw-r--r--src/mongo/gotools/vendor/src/golang.org/x/crypto/ssh/client_auth_test.go393
-rw-r--r--src/mongo/gotools/vendor/src/golang.org/x/crypto/ssh/client_test.go39
-rw-r--r--src/mongo/gotools/vendor/src/golang.org/x/crypto/ssh/common.go354
-rw-r--r--src/mongo/gotools/vendor/src/golang.org/x/crypto/ssh/connection.go144
-rw-r--r--src/mongo/gotools/vendor/src/golang.org/x/crypto/ssh/doc.go18
-rw-r--r--src/mongo/gotools/vendor/src/golang.org/x/crypto/ssh/example_test.go211
-rw-r--r--src/mongo/gotools/vendor/src/golang.org/x/crypto/ssh/handshake.go412
-rw-r--r--src/mongo/gotools/vendor/src/golang.org/x/crypto/ssh/handshake_test.go415
-rw-r--r--src/mongo/gotools/vendor/src/golang.org/x/crypto/ssh/kex.go526
-rw-r--r--src/mongo/gotools/vendor/src/golang.org/x/crypto/ssh/kex_test.go50
-rw-r--r--src/mongo/gotools/vendor/src/golang.org/x/crypto/ssh/keys.go720
-rw-r--r--src/mongo/gotools/vendor/src/golang.org/x/crypto/ssh/keys_test.go437
-rw-r--r--src/mongo/gotools/vendor/src/golang.org/x/crypto/ssh/mac.go57
-rw-r--r--src/mongo/gotools/vendor/src/golang.org/x/crypto/ssh/mempipe_test.go110
-rw-r--r--src/mongo/gotools/vendor/src/golang.org/x/crypto/ssh/messages.go725
-rw-r--r--src/mongo/gotools/vendor/src/golang.org/x/crypto/ssh/messages_test.go254
-rw-r--r--src/mongo/gotools/vendor/src/golang.org/x/crypto/ssh/mux.go356
-rw-r--r--src/mongo/gotools/vendor/src/golang.org/x/crypto/ssh/mux_test.go525
-rw-r--r--src/mongo/gotools/vendor/src/golang.org/x/crypto/ssh/server.go495
-rw-r--r--src/mongo/gotools/vendor/src/golang.org/x/crypto/ssh/session.go605
-rw-r--r--src/mongo/gotools/vendor/src/golang.org/x/crypto/ssh/session_test.go774
-rw-r--r--src/mongo/gotools/vendor/src/golang.org/x/crypto/ssh/tcpip.go407
-rw-r--r--src/mongo/gotools/vendor/src/golang.org/x/crypto/ssh/tcpip_test.go20
-rw-r--r--src/mongo/gotools/vendor/src/golang.org/x/crypto/ssh/terminal/terminal.go892
-rw-r--r--src/mongo/gotools/vendor/src/golang.org/x/crypto/ssh/terminal/terminal_test.go269
-rw-r--r--src/mongo/gotools/vendor/src/golang.org/x/crypto/ssh/terminal/util.go128
-rw-r--r--src/mongo/gotools/vendor/src/golang.org/x/crypto/ssh/terminal/util_bsd.go12
-rw-r--r--src/mongo/gotools/vendor/src/golang.org/x/crypto/ssh/terminal/util_linux.go11
-rw-r--r--src/mongo/gotools/vendor/src/golang.org/x/crypto/ssh/terminal/util_windows.go174
-rw-r--r--src/mongo/gotools/vendor/src/golang.org/x/crypto/ssh/test/agent_unix_test.go59
-rw-r--r--src/mongo/gotools/vendor/src/golang.org/x/crypto/ssh/test/cert_test.go47
-rw-r--r--src/mongo/gotools/vendor/src/golang.org/x/crypto/ssh/test/doc.go7
-rw-r--r--src/mongo/gotools/vendor/src/golang.org/x/crypto/ssh/test/forward_unix_test.go160
-rw-r--r--src/mongo/gotools/vendor/src/golang.org/x/crypto/ssh/test/session_test.go340
-rw-r--r--src/mongo/gotools/vendor/src/golang.org/x/crypto/ssh/test/tcpip_test.go46
-rw-r--r--src/mongo/gotools/vendor/src/golang.org/x/crypto/ssh/test/test_unix_test.go261
-rw-r--r--src/mongo/gotools/vendor/src/golang.org/x/crypto/ssh/test/testdata_test.go64
-rw-r--r--src/mongo/gotools/vendor/src/golang.org/x/crypto/ssh/testdata/doc.go8
-rw-r--r--src/mongo/gotools/vendor/src/golang.org/x/crypto/ssh/testdata/keys.go43
-rw-r--r--src/mongo/gotools/vendor/src/golang.org/x/crypto/ssh/testdata_test.go63
-rw-r--r--src/mongo/gotools/vendor/src/golang.org/x/crypto/ssh/transport.go332
-rw-r--r--src/mongo/gotools/vendor/src/golang.org/x/crypto/ssh/transport_test.go109
-rw-r--r--src/mongo/gotools/vendor/src/golang.org/x/crypto/tea/cipher.go109
-rw-r--r--src/mongo/gotools/vendor/src/golang.org/x/crypto/tea/tea_test.go93
-rw-r--r--src/mongo/gotools/vendor/src/golang.org/x/crypto/twofish/twofish.go342
-rw-r--r--src/mongo/gotools/vendor/src/golang.org/x/crypto/twofish/twofish_test.go129
-rw-r--r--src/mongo/gotools/vendor/src/golang.org/x/crypto/xtea/block.go66
-rw-r--r--src/mongo/gotools/vendor/src/golang.org/x/crypto/xtea/cipher.go82
-rw-r--r--src/mongo/gotools/vendor/src/golang.org/x/crypto/xtea/xtea_test.go229
-rw-r--r--src/mongo/gotools/vendor/src/golang.org/x/crypto/xts/xts.go138
-rw-r--r--src/mongo/gotools/vendor/src/golang.org/x/crypto/xts/xts_test.go85
-rw-r--r--src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/.travis.yml45
-rw-r--r--src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/LICENSE25
-rw-r--r--src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/Makefile5
-rw-r--r--src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/README.md4
-rw-r--r--src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/auth.go467
-rw-r--r--src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/auth_test.go1180
-rw-r--r--src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/bson/LICENSE25
-rw-r--r--src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/bson/bson.go738
-rw-r--r--src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/bson/bson_test.go1832
-rw-r--r--src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/bson/decimal.go310
-rw-r--r--src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/bson/decimal_test.go4109
-rw-r--r--src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/bson/decode.go849
-rw-r--r--src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/bson/encode.go514
-rw-r--r--src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/bson/json.go380
-rw-r--r--src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/bson/json_test.go184
-rwxr-xr-xsrc/mongo/gotools/vendor/src/gopkg.in/mgo.v2/bson/specdata/update.sh27
-rw-r--r--src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/bson/specdata_test.go241
-rw-r--r--src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/bulk.go351
-rw-r--r--src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/bulk_test.go504
-rw-r--r--src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/cluster.go682
-rw-r--r--src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/cluster_test.go2090
-rw-r--r--src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/dbtest/dbserver.go196
-rw-r--r--src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/dbtest/dbserver_test.go108
-rw-r--r--src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/dbtest/export_test.go12
-rw-r--r--src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/doc.go31
-rw-r--r--src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/export_test.go33
-rw-r--r--src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/gridfs.go761
-rw-r--r--src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/gridfs_test.go708
-rw-r--r--src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/harness/certs/client.crt20
-rw-r--r--src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/harness/certs/client.key27
-rw-r--r--src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/harness/certs/client.pem57
-rw-r--r--src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/harness/certs/client.req17
-rw-r--r--src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/harness/certs/server.crt22
-rw-r--r--src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/harness/certs/server.key28
-rw-r--r--src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/harness/certs/server.pem50
-rw-r--r--src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/harness/daemons/.env57
-rw-r--r--src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/harness/daemons/cfg1/db/.empty0
-rw-r--r--src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/harness/daemons/cfg1/db/journal/tempLatencyTestbin0 -> 204800 bytes
-rwxr-xr-xsrc/mongo/gotools/vendor/src/gopkg.in/mgo.v2/harness/daemons/cfg1/db/mongod.lock0
-rwxr-xr-xsrc/mongo/gotools/vendor/src/gopkg.in/mgo.v2/harness/daemons/cfg1/log/run3
-rwxr-xr-xsrc/mongo/gotools/vendor/src/gopkg.in/mgo.v2/harness/daemons/cfg1/run8
-rw-r--r--src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/harness/daemons/cfg2/db/.empty0
-rwxr-xr-xsrc/mongo/gotools/vendor/src/gopkg.in/mgo.v2/harness/daemons/cfg2/log/run3
-rwxr-xr-xsrc/mongo/gotools/vendor/src/gopkg.in/mgo.v2/harness/daemons/cfg2/run8
-rw-r--r--src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/harness/daemons/cfg3/db/.empty0
-rwxr-xr-xsrc/mongo/gotools/vendor/src/gopkg.in/mgo.v2/harness/daemons/cfg3/log/run3
-rwxr-xr-xsrc/mongo/gotools/vendor/src/gopkg.in/mgo.v2/harness/daemons/cfg3/run9
-rw-r--r--src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/harness/daemons/db1/db/.empty0
-rwxr-xr-xsrc/mongo/gotools/vendor/src/gopkg.in/mgo.v2/harness/daemons/db1/log/run3
-rwxr-xr-xsrc/mongo/gotools/vendor/src/gopkg.in/mgo.v2/harness/daemons/db1/run15
-rw-r--r--src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/harness/daemons/db2/db/.empty0
-rwxr-xr-xsrc/mongo/gotools/vendor/src/gopkg.in/mgo.v2/harness/daemons/db2/log/run3
-rwxr-xr-xsrc/mongo/gotools/vendor/src/gopkg.in/mgo.v2/harness/daemons/db2/run8
-rw-r--r--src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/harness/daemons/db3/db/.empty0
-rwxr-xr-xsrc/mongo/gotools/vendor/src/gopkg.in/mgo.v2/harness/daemons/db3/log/run3
-rwxr-xr-xsrc/mongo/gotools/vendor/src/gopkg.in/mgo.v2/harness/daemons/db3/run12
-rw-r--r--src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/harness/daemons/rs1a/db/.empty0
-rwxr-xr-xsrc/mongo/gotools/vendor/src/gopkg.in/mgo.v2/harness/daemons/rs1a/log/run3
-rwxr-xr-xsrc/mongo/gotools/vendor/src/gopkg.in/mgo.v2/harness/daemons/rs1a/run8
-rw-r--r--src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/harness/daemons/rs1b/db/.empty0
-rwxr-xr-xsrc/mongo/gotools/vendor/src/gopkg.in/mgo.v2/harness/daemons/rs1b/log/run3
-rwxr-xr-xsrc/mongo/gotools/vendor/src/gopkg.in/mgo.v2/harness/daemons/rs1b/run8
-rw-r--r--src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/harness/daemons/rs1c/db/.empty0
-rwxr-xr-xsrc/mongo/gotools/vendor/src/gopkg.in/mgo.v2/harness/daemons/rs1c/log/run3
-rwxr-xr-xsrc/mongo/gotools/vendor/src/gopkg.in/mgo.v2/harness/daemons/rs1c/run8
-rw-r--r--src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/harness/daemons/rs2a/db/.empty0
-rwxr-xr-xsrc/mongo/gotools/vendor/src/gopkg.in/mgo.v2/harness/daemons/rs2a/log/run3
-rwxr-xr-xsrc/mongo/gotools/vendor/src/gopkg.in/mgo.v2/harness/daemons/rs2a/run8
-rw-r--r--src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/harness/daemons/rs2b/db/.empty0
-rwxr-xr-xsrc/mongo/gotools/vendor/src/gopkg.in/mgo.v2/harness/daemons/rs2b/log/run3
-rwxr-xr-xsrc/mongo/gotools/vendor/src/gopkg.in/mgo.v2/harness/daemons/rs2b/run8
-rw-r--r--src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/harness/daemons/rs2c/db/.empty0
-rwxr-xr-xsrc/mongo/gotools/vendor/src/gopkg.in/mgo.v2/harness/daemons/rs2c/log/run3
-rwxr-xr-xsrc/mongo/gotools/vendor/src/gopkg.in/mgo.v2/harness/daemons/rs2c/run8
-rw-r--r--src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/harness/daemons/rs3a/db/.empty0
-rwxr-xr-xsrc/mongo/gotools/vendor/src/gopkg.in/mgo.v2/harness/daemons/rs3a/log/run3
-rwxr-xr-xsrc/mongo/gotools/vendor/src/gopkg.in/mgo.v2/harness/daemons/rs3a/run9
-rw-r--r--src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/harness/daemons/rs3b/db/.empty0
-rwxr-xr-xsrc/mongo/gotools/vendor/src/gopkg.in/mgo.v2/harness/daemons/rs3b/log/run3
-rwxr-xr-xsrc/mongo/gotools/vendor/src/gopkg.in/mgo.v2/harness/daemons/rs3b/run9
-rw-r--r--src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/harness/daemons/rs3c/db/.empty0
-rwxr-xr-xsrc/mongo/gotools/vendor/src/gopkg.in/mgo.v2/harness/daemons/rs3c/log/run3
-rwxr-xr-xsrc/mongo/gotools/vendor/src/gopkg.in/mgo.v2/harness/daemons/rs3c/run9
-rw-r--r--src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/harness/daemons/rs4a/db/.empty0
-rwxr-xr-xsrc/mongo/gotools/vendor/src/gopkg.in/mgo.v2/harness/daemons/rs4a/log/run3
-rwxr-xr-xsrc/mongo/gotools/vendor/src/gopkg.in/mgo.v2/harness/daemons/rs4a/run8
-rwxr-xr-xsrc/mongo/gotools/vendor/src/gopkg.in/mgo.v2/harness/daemons/s1/log/run3
-rwxr-xr-xsrc/mongo/gotools/vendor/src/gopkg.in/mgo.v2/harness/daemons/s1/run7
-rwxr-xr-xsrc/mongo/gotools/vendor/src/gopkg.in/mgo.v2/harness/daemons/s2/log/run3
-rwxr-xr-xsrc/mongo/gotools/vendor/src/gopkg.in/mgo.v2/harness/daemons/s2/run7
-rwxr-xr-xsrc/mongo/gotools/vendor/src/gopkg.in/mgo.v2/harness/daemons/s3/log/run3
-rwxr-xr-xsrc/mongo/gotools/vendor/src/gopkg.in/mgo.v2/harness/daemons/s3/run8
-rw-r--r--src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/harness/mongojs/dropall.js66
-rw-r--r--src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/harness/mongojs/init.js132
-rw-r--r--src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/harness/mongojs/wait.js67
-rwxr-xr-xsrc/mongo/gotools/vendor/src/gopkg.in/mgo.v2/harness/setup.sh96
-rw-r--r--src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/internal/json/LICENSE27
-rw-r--r--src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/internal/json/bench_test.go223
-rw-r--r--src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/internal/json/decode.go1685
-rw-r--r--src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/internal/json/decode_test.go1512
-rw-r--r--src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/internal/json/encode.go1256
-rw-r--r--src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/internal/json/encode_test.go613
-rw-r--r--src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/internal/json/example_test.go252
-rw-r--r--src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/internal/json/extension.go95
-rw-r--r--src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/internal/json/extension_test.go218
-rw-r--r--src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/internal/json/fold.go143
-rw-r--r--src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/internal/json/fold_test.go116
-rw-r--r--src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/internal/json/indent.go141
-rw-r--r--src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/internal/json/number_test.go133
-rw-r--r--src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/internal/json/scanner.go697
-rw-r--r--src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/internal/json/scanner_test.go316
-rw-r--r--src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/internal/json/stream.go510
-rw-r--r--src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/internal/json/stream_test.go418
-rw-r--r--src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/internal/json/tagkey_test.go115
-rw-r--r--src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/internal/json/tags.go44
-rw-r--r--src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/internal/json/tags_test.go28
-rw-r--r--src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/internal/json/testdata/code.json.gzbin0 -> 120432 bytes
-rw-r--r--src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/internal/sasl/sasl.c77
-rw-r--r--src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/internal/sasl/sasl.go138
-rw-r--r--src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/internal/sasl/sasl_windows.c122
-rw-r--r--src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/internal/sasl/sasl_windows.go142
-rw-r--r--src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/internal/sasl/sasl_windows.h7
-rw-r--r--src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/internal/sasl/sspi_windows.c96
-rw-r--r--src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/internal/sasl/sspi_windows.h70
-rw-r--r--src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/internal/scram/scram.go266
-rw-r--r--src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/internal/scram/scram_test.go67
-rw-r--r--src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/log.go133
-rw-r--r--src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/queue.go91
-rw-r--r--src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/queue_test.go101
-rw-r--r--src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/raceoff.go5
-rw-r--r--src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/raceon.go5
-rw-r--r--src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/saslimpl.go11
-rw-r--r--src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/saslstub.go11
-rw-r--r--src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/server.go463
-rw-r--r--src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/session.go4826
-rw-r--r--src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/session_test.go4216
-rw-r--r--src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/socket.go707
-rw-r--r--src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/stats.go147
-rw-r--r--src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/suite_test.go262
-rw-r--r--src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/syscall_test.go15
-rw-r--r--src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/syscall_windows_test.go11
-rw-r--r--src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/txn/chaos.go68
-rw-r--r--src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/txn/debug.go109
-rw-r--r--src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/txn/dockey_test.go205
-rw-r--r--src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/txn/flusher.go985
-rw-r--r--src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/txn/output.txt0
-rw-r--r--src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/txn/sim_test.go388
-rw-r--r--src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/txn/tarjan.go94
-rw-r--r--src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/txn/tarjan_test.go44
-rw-r--r--src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/txn/txn.go611
-rw-r--r--src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/txn/txn_test.go778
-rw-r--r--src/mongo/gotools/vendor/src/gopkg.in/tomb.v2/LICENSE29
-rw-r--r--src/mongo/gotools/vendor/src/gopkg.in/tomb.v2/README.md4
-rw-r--r--src/mongo/gotools/vendor/src/gopkg.in/tomb.v2/tomb.go223
-rw-r--r--src/mongo/gotools/vendor/src/gopkg.in/tomb.v2/tomb_test.go183
907 files changed, 171781 insertions, 0 deletions
diff --git a/src/mongo/gotools/vendor/src/github.com/3rf/mongo-lint/LICENSE b/src/mongo/gotools/vendor/src/github.com/3rf/mongo-lint/LICENSE
new file mode 100644
index 00000000000..65d761bc9f2
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/3rf/mongo-lint/LICENSE
@@ -0,0 +1,27 @@
+Copyright (c) 2013 The Go Authors. All rights reserved.
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions are
+met:
+
+ * Redistributions of source code must retain the above copyright
+notice, this list of conditions and the following disclaimer.
+ * Redistributions in binary form must reproduce the above
+copyright notice, this list of conditions and the following disclaimer
+in the documentation and/or other materials provided with the
+distribution.
+ * Neither the name of Google Inc. nor the names of its
+contributors may be used to endorse or promote products derived from
+this software without specific prior written permission.
+
+THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
diff --git a/src/mongo/gotools/vendor/src/github.com/3rf/mongo-lint/README b/src/mongo/gotools/vendor/src/github.com/3rf/mongo-lint/README
new file mode 100644
index 00000000000..c763bdd2e49
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/3rf/mongo-lint/README
@@ -0,0 +1,59 @@
+Golint is a linter for Go source code.
+
+To install, run
+ go get github.com/golang/lint/golint
+
+Invoke golint with one or more filenames or directories.
+The output of this tool is a list of suggestions in Vim quickfix format,
+which is accepted by lots of different editors.
+
+Golint differs from gofmt. Gofmt reformats Go source code, whereas
+golint prints out style mistakes.
+
+Golint differs from govet. Govet is concerned with correctness, whereas
+golint is concerned with coding style. Golint is in use at Google, and it
+seeks to match the accepted style of the open source Go project.
+
+The suggestions made by golint are exactly that: suggestions.
+Golint is not perfect, and has both false positives and false negatives.
+Do not treat its output as a gold standard. We will not be adding pragmas
+or other knobs to suppress specific warnings, so do not expect or require
+code to be completely "lint-free".
+In short, this tool is not, and will never be, trustworthy enough for its
+suggestions to be enforced automatically, for example as part of a build process.
+
+If you find an established style that is frequently violated, and which
+you think golint could statically check, file an issue at
+ https://github.com/golang/lint/issues
+
+
+Contributions
+-------------
+Contributions to this project are welcome, though please send mail before
+starting work on anything major. Contributors retain their copyright, so we
+need you to fill out a short form before we can accept your contribution:
+ https://developers.google.com/open-source/cla/individual
+
+
+Vim
+---
+Add this to your ~/.vimrc:
+ set rtp+=$GOPATH/src/github.com/golang/lint/misc/vim
+If you have multiple entries in your GOPATH, replace $GOPATH with the right value.
+
+Running :Lint will run golint on the current file and populate the quickfix list.
+
+Optionally, add this to your ~/.vimrc to automatically run golint on :w
+ autocmd BufWritePost,FileWritePost *.go execute 'Lint' | cwindow
+
+
+Emacs
+-----
+Add this to your .emacs file:
+ (add-to-list 'load-path (concat (getenv "GOPATH") "/src/github.com/golang/lint/misc/emacs"))
+ (require 'golint)
+If you have multiple entries in your GOPATH, replace $GOPATH with the right value.
+
+Running M-x golint will run golint on the current file.
+For more usage, see Compilation-Mode:
+ http://www.gnu.org/software/emacs/manual/html_node/emacs/Compilation-Mode.html
diff --git a/src/mongo/gotools/vendor/src/github.com/3rf/mongo-lint/golint/golint.go b/src/mongo/gotools/vendor/src/github.com/3rf/mongo-lint/golint/golint.go
new file mode 100644
index 00000000000..d1b07baee59
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/3rf/mongo-lint/golint/golint.go
@@ -0,0 +1,67 @@
+// Copyright (c) 2013 The Go Authors. All rights reserved.
+//
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file or at
+// https://developers.google.com/open-source/licenses/bsd.
+
+// golint lints the Go source files named on its command line.
+package main
+
+import (
+ "flag"
+ "fmt"
+ "io/ioutil"
+ "os"
+ "path/filepath"
+ "strings"
+
+ "github.com/3rf/mongo-lint"
+)
+
+var minConfidence = flag.Float64("min_confidence", 0.8, "minimum confidence of a problem to print it")
+
+func main() {
+ flag.Parse()
+
+ for _, filename := range flag.Args() {
+ if isDir(filename) {
+ lintDir(filename)
+ } else {
+ lintFile(filename)
+ }
+ }
+}
+
+func isDir(filename string) bool {
+ fi, err := os.Stat(filename)
+ return err == nil && fi.IsDir()
+}
+
+func lintFile(filename string) {
+ src, err := ioutil.ReadFile(filename)
+ if err != nil {
+ fmt.Fprintln(os.Stderr, err)
+ return
+ }
+
+ l := new(lint.Linter)
+ ps, err := l.Lint(filename, src)
+ if err != nil {
+ fmt.Fprintf(os.Stderr, "%v:%v\n", filename, err)
+ return
+ }
+ for _, p := range ps {
+ if p.Confidence >= *minConfidence {
+ fmt.Printf("%s:%v: %s\n", filename, p.Position, p.Text)
+ }
+ }
+}
+
+func lintDir(dirname string) {
+ filepath.Walk(dirname, func(path string, info os.FileInfo, err error) error {
+ if err == nil && !info.IsDir() && strings.HasSuffix(path, ".go") {
+ lintFile(path)
+ }
+ return err
+ })
+}
diff --git a/src/mongo/gotools/vendor/src/github.com/3rf/mongo-lint/lint.go b/src/mongo/gotools/vendor/src/github.com/3rf/mongo-lint/lint.go
new file mode 100644
index 00000000000..e8a7b36bc3b
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/3rf/mongo-lint/lint.go
@@ -0,0 +1,1057 @@
+// Copyright (c) 2013 The Go Authors. All rights reserved.
+//
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file or at
+// https://developers.google.com/open-source/licenses/bsd.
+
+// Package lint contains a linter for Go source code.
+package lint
+
+import (
+ "bytes"
+ "fmt"
+ "go/ast"
+ "go/parser"
+ "go/printer"
+ "go/token"
+ "regexp"
+ "strconv"
+ "strings"
+ "unicode"
+ "unicode/utf8"
+)
+
+const styleGuideBase = "http://golang.org/s/comments"
+
+// A Linter lints Go source code.
+type Linter struct {
+}
+
+// Problem represents a problem in some source code.
+type Problem struct {
+ Position token.Position // position in source file
+ Text string // the prose that describes the problem
+ Link string // (optional) the link to the style guide for the problem
+ Confidence float64 // a value in (0,1] estimating the confidence in this problem's correctness
+ LineText string // the source line
+}
+
+func (p *Problem) String() string {
+ if p.Link != "" {
+ return p.Text + "\n\n" + p.Link
+ }
+ return p.Text
+}
+
+// Lint lints src.
+func (l *Linter) Lint(filename string, src []byte) ([]Problem, error) {
+ fset := token.NewFileSet()
+ f, err := parser.ParseFile(fset, "", src, parser.ParseComments)
+ if err != nil {
+ return nil, err
+ }
+ return (&file{fset: fset, f: f, src: src, filename: filename}).lint(), nil
+}
+
+// file represents a file being linted.
+type file struct {
+ fset *token.FileSet
+ f *ast.File
+ src []byte
+ filename string
+
+ // sortable is the set of types in the file that implement sort.Interface.
+ sortable map[string]bool
+ // main is whether this file is in a "main" package.
+ main bool
+
+ problems []Problem
+}
+
+func (f *file) isTest() bool { return strings.HasSuffix(f.filename, "_test.go") }
+
+func (f *file) lint() []Problem {
+ f.scanSortable()
+ f.main = f.isMain()
+
+ f.lintPackageComment()
+ f.lintImports()
+ f.lintBlankImports()
+ //f.lintExported()
+ //f.lintNames()
+ f.lintVarDecls()
+ f.lintElses()
+ f.lintRanges()
+ f.lintErrorf()
+ f.lintErrors()
+ f.lintErrorStrings()
+ //f.lintReceiverNames()
+ f.lintIncDec()
+ f.lintMake()
+
+ return f.problems
+}
+
+func (f *file) errorf(n ast.Node, confidence float64, link, format string, a ...interface{}) {
+ p := f.fset.Position(n.Pos())
+ f.problems = append(f.problems, Problem{
+ Position: p,
+ Text: fmt.Sprintf(format, a...),
+ Link: link,
+ Confidence: confidence,
+ LineText: srcLine(f.src, p),
+ })
+}
+
+func (f *file) scanSortable() {
+ f.sortable = make(map[string]bool)
+
+ // bitfield for which methods exist on each type.
+ const (
+ Len = 1 << iota
+ Less
+ Swap
+ )
+ nmap := map[string]int{"Len": Len, "Less": Less, "Swap": Swap}
+ has := make(map[string]int)
+ f.walk(func(n ast.Node) bool {
+ fn, ok := n.(*ast.FuncDecl)
+ if !ok || fn.Recv == nil {
+ return true
+ }
+ // TODO(dsymonds): We could check the signature to be more precise.
+ recv := receiverType(fn)
+ if i, ok := nmap[fn.Name.Name]; ok {
+ has[recv] |= i
+ }
+ return false
+ })
+ for typ, ms := range has {
+ if ms == Len|Less|Swap {
+ f.sortable[typ] = true
+ }
+ }
+}
+
+func (f *file) isMain() bool {
+ if f.f.Name.Name == "main" {
+ return true
+ }
+ return false
+}
+
+// lintPackageComment checks package comments. It complains if
+// there is no package comment, or if it is not of the right form.
+// This has a notable false positive in that a package comment
+// could rightfully appear in a different file of the same package,
+// but that's not easy to fix since this linter is file-oriented.
+func (f *file) lintPackageComment() {
+ if f.isTest() {
+ return
+ }
+
+ const link = styleGuideBase + "#Package_Comments"
+ if f.f.Doc == nil {
+ f.errorf(f.f, 0.2, link, "should have a package comment, unless it's in another file for this package")
+ return
+ }
+ s := f.f.Doc.Text()
+ prefix := "Package " + f.f.Name.Name + " "
+ if ts := strings.TrimLeft(s, " \t"); ts != s {
+ f.errorf(f.f.Doc, 1, link, "package comment should not have leading space")
+ s = ts
+ }
+ // Only non-main packages need to keep to this form.
+ if f.f.Name.Name != "main" && !strings.HasPrefix(s, prefix) {
+ f.errorf(f.f.Doc, 1, link, `package comment should be of the form "%s..."`, prefix)
+ }
+}
+
+// lintBlankImports complains if a non-main package has blank imports that are
+// not documented.
+func (f *file) lintBlankImports() {
+ // In package main and in tests, we don't complain about blank imports.
+ if f.main || f.isTest() {
+ return
+ }
+
+ // The first element of each contiguous group of blank imports should have
+ // an explanatory comment of some kind.
+ for i, imp := range f.f.Imports {
+ pos := f.fset.Position(imp.Pos())
+
+ if !isBlank(imp.Name) {
+ continue // Ignore non-blank imports.
+ }
+ if i > 0 {
+ prev := f.f.Imports[i-1]
+ prevPos := f.fset.Position(prev.Pos())
+ if isBlank(prev.Name) && prevPos.Line+1 == pos.Line {
+ continue // A subsequent blank in a group.
+ }
+ }
+
+ // This is the first blank import of a group.
+ if imp.Doc == nil && imp.Comment == nil {
+ link := ""
+ f.errorf(imp, 1, link, "a blank import should be only in a main or test package, or have a comment justifying it")
+ }
+ }
+}
+
+// lintImports examines import blocks.
+func (f *file) lintImports() {
+
+ for i, is := range f.f.Imports {
+ _ = i
+ if is.Name != nil && is.Name.Name == "." && !f.isTest() {
+ f.errorf(is, 1, styleGuideBase+"#Import_Dot", "should not use dot imports")
+ }
+
+ }
+
+}
+
+const docCommentsLink = styleGuideBase + "#Doc_Comments"
+
+// lintExported examines the doc comments of exported names.
+// It complains if any required doc comments are missing,
+// or if they are not of the right form. The exact rules are in
+// lintFuncDoc, lintTypeDoc and lintValueSpecDoc; this function
+// also tracks the GenDecl structure being traversed to permit
+// doc comments for constants to be on top of the const block.
+func (f *file) lintExported() {
+ if f.isTest() {
+ return
+ }
+
+ var lastGen *ast.GenDecl // last GenDecl entered.
+
+ // Set of GenDecls that have already had missing comments flagged.
+ genDeclMissingComments := make(map[*ast.GenDecl]bool)
+
+ f.walk(func(node ast.Node) bool {
+ switch v := node.(type) {
+ case *ast.GenDecl:
+ if v.Tok == token.IMPORT {
+ return false
+ }
+ // token.CONST, token.TYPE or token.VAR
+ lastGen = v
+ return true
+ case *ast.FuncDecl:
+ f.lintFuncDoc(v)
+ // Don't proceed inside funcs.
+ return false
+ case *ast.TypeSpec:
+ // inside a GenDecl, which usually has the doc
+ doc := v.Doc
+ if doc == nil {
+ doc = lastGen.Doc
+ }
+ f.lintTypeDoc(v, doc)
+ // Don't proceed inside types.
+ return false
+ case *ast.ValueSpec:
+ f.lintValueSpecDoc(v, lastGen, genDeclMissingComments)
+ return false
+ }
+ return true
+ })
+}
+
+var allCapsRE = regexp.MustCompile(`^[A-Z0-9_]+$`)
+
+// lintNames examines all names in the file.
+// It complains if any use underscores or incorrect known initialisms.
+func (f *file) lintNames() {
+ // Package names need slightly different handling than other names.
+ if strings.Contains(f.f.Name.Name, "_") && !strings.HasSuffix(f.f.Name.Name, "_test") {
+ f.errorf(f.f, 1, "http://golang.org/doc/effective_go.html#package-names", "don't use an underscore in package name")
+ }
+
+ check := func(id *ast.Ident, thing string) {
+ if id.Name == "_" {
+ return
+ }
+
+ // Handle two common styles from other languages that don't belong in Go.
+ if len(id.Name) >= 5 && allCapsRE.MatchString(id.Name) && strings.Contains(id.Name, "_") {
+ f.errorf(id, 0.8, styleGuideBase+"#Mixed_Caps", "don't use ALL_CAPS in Go names; use CamelCase")
+ return
+ }
+ if len(id.Name) > 2 && id.Name[0] == 'k' && id.Name[1] >= 'A' && id.Name[1] <= 'Z' {
+ should := string(id.Name[1]+'a'-'A') + id.Name[2:]
+ f.errorf(id, 0.8, "", "don't use leading k in Go names; %s %s should be %s", thing, id.Name, should)
+ }
+
+ should := lintName(id.Name)
+ if id.Name == should {
+ return
+ }
+ if len(id.Name) > 2 && strings.Contains(id.Name[1:], "_") {
+ f.errorf(id, 0.9, "http://golang.org/doc/effective_go.html#mixed-caps", "don't use underscores in Go names; %s %s should be %s", thing, id.Name, should)
+ return
+ }
+ f.errorf(id, 0.8, styleGuideBase+"#Initialisms", "%s %s should be %s", thing, id.Name, should)
+ }
+ checkList := func(fl *ast.FieldList, thing string) {
+ if fl == nil {
+ return
+ }
+ for _, f := range fl.List {
+ for _, id := range f.Names {
+ check(id, thing)
+ }
+ }
+ }
+ f.walk(func(node ast.Node) bool {
+ switch v := node.(type) {
+ case *ast.AssignStmt:
+ if v.Tok == token.ASSIGN {
+ return true
+ }
+ for _, exp := range v.Lhs {
+ if id, ok := exp.(*ast.Ident); ok {
+ check(id, "var")
+ }
+ }
+ case *ast.FuncDecl:
+ if f.isTest() && (strings.HasPrefix(v.Name.Name, "Example") || strings.HasPrefix(v.Name.Name, "Test")) {
+ return true
+ }
+ check(v.Name, "func")
+
+ thing := "func"
+ if v.Recv != nil {
+ thing = "method"
+ }
+ checkList(v.Type.Params, thing+" parameter")
+ checkList(v.Type.Results, thing+" result")
+ case *ast.GenDecl:
+ if v.Tok == token.IMPORT {
+ return true
+ }
+ var thing string
+ switch v.Tok {
+ case token.CONST:
+ thing = "const"
+ case token.TYPE:
+ thing = "type"
+ case token.VAR:
+ thing = "var"
+ }
+ for _, spec := range v.Specs {
+ switch s := spec.(type) {
+ case *ast.TypeSpec:
+ check(s.Name, thing)
+ case *ast.ValueSpec:
+ for _, id := range s.Names {
+ check(id, thing)
+ }
+ }
+ }
+ case *ast.InterfaceType:
+ // Do not check interface method names.
+ // They are often constrainted by the method names of concrete types.
+ for _, x := range v.Methods.List {
+ ft, ok := x.Type.(*ast.FuncType)
+ if !ok { // might be an embedded interface name
+ continue
+ }
+ checkList(ft.Params, "interface method parameter")
+ checkList(ft.Results, "interface method result")
+ }
+ case *ast.RangeStmt:
+ if v.Tok == token.ASSIGN {
+ return true
+ }
+ if id, ok := v.Key.(*ast.Ident); ok {
+ check(id, "range var")
+ }
+ if id, ok := v.Value.(*ast.Ident); ok {
+ check(id, "range var")
+ }
+ case *ast.StructType:
+ for _, f := range v.Fields.List {
+ for _, id := range f.Names {
+ check(id, "struct field")
+ }
+ }
+ }
+ return true
+ })
+}
+
+// lintName returns a different name if it should be different.
+func lintName(name string) (should string) {
+ // Fast path for simple cases: "_" and all lowercase.
+ if name == "_" {
+ return name
+ }
+ allLower := true
+ for _, r := range name {
+ if !unicode.IsLower(r) {
+ allLower = false
+ break
+ }
+ }
+ if allLower {
+ return name
+ }
+
+ // Split camelCase at any lower->upper transition, and split on underscores.
+ // Check each word for common initialisms.
+ runes := []rune(name)
+ w, i := 0, 0 // index of start of word, scan
+ for i+1 <= len(runes) {
+ eow := false // whether we hit the end of a word
+ if i+1 == len(runes) {
+ eow = true
+ } else if runes[i+1] == '_' {
+ // underscore; shift the remainder forward over any run of underscores
+ eow = true
+ n := 1
+ for i+n+1 < len(runes) && runes[i+n+1] == '_' {
+ n++
+ }
+ copy(runes[i+1:], runes[i+n+1:])
+ runes = runes[:len(runes)-n]
+ } else if unicode.IsLower(runes[i]) && !unicode.IsLower(runes[i+1]) {
+ // lower->non-lower
+ eow = true
+ }
+ i++
+ if !eow {
+ continue
+ }
+
+ // [w,i) is a word.
+ word := string(runes[w:i])
+ if u := strings.ToUpper(word); commonInitialisms[u] {
+ // Keep consistent case, which is lowercase only at the start.
+ if w == 0 && unicode.IsLower(runes[w]) {
+ u = strings.ToLower(u)
+ }
+ // All the common initialisms are ASCII,
+ // so we can replace the bytes exactly.
+ copy(runes[w:], []rune(u))
+ } else if w > 0 && strings.ToLower(word) == word {
+ // already all lowercase, and not the first word, so uppercase the first character.
+ runes[w] = unicode.ToUpper(runes[w])
+ }
+ w = i
+ }
+ return string(runes)
+}
+
+// commonInitialisms is a set of common initialisms.
+// Only add entries that are highly unlikely to be non-initialisms.
+// For instance, "ID" is fine (Freudian code is rare), but "AND" is not.
+var commonInitialisms = map[string]bool{
+ "API": true,
+ "ASCII": true,
+ "CPU": true,
+ "CSS": true,
+ "DNS": true,
+ "EOF": true,
+ "HTML": true,
+ "HTTP": true,
+ "HTTPS": true,
+ "ID": true,
+ "IP": true,
+ "JSON": true,
+ "LHS": true,
+ "QPS": true,
+ "RAM": true,
+ "RHS": true,
+ "RPC": true,
+ "SLA": true,
+ "SSH": true,
+ "TLS": true,
+ "TTL": true,
+ "UI": true,
+ "UID": true,
+ "URL": true,
+ "UTF8": true,
+ "VM": true,
+ "XML": true,
+}
+
+// lintTypeDoc examines the doc comment on a type.
+// It complains if they are missing from an exported type,
+// or if they are not of the standard form.
+func (f *file) lintTypeDoc(t *ast.TypeSpec, doc *ast.CommentGroup) {
+ if !ast.IsExported(t.Name.Name) {
+ return
+ }
+ if doc == nil {
+ f.errorf(t, 1, docCommentsLink, "exported type %v should have comment or be unexported", t.Name)
+ return
+ }
+
+ s := doc.Text()
+ articles := [...]string{"A", "An", "The"}
+ for _, a := range articles {
+ if strings.HasPrefix(s, a+" ") {
+ s = s[len(a)+1:]
+ break
+ }
+ }
+ if !strings.HasPrefix(s, t.Name.Name+" ") {
+ f.errorf(doc, 1, docCommentsLink, `comment on exported type %v should be of the form "%v ..." (with optional leading article)`, t.Name, t.Name)
+ }
+}
+
+var commonMethods = map[string]bool{
+ "Error": true,
+ "Read": true,
+ "ServeHTTP": true,
+ "String": true,
+ "Write": true,
+}
+
+// lintFuncDoc examines doc comments on functions and methods.
+// It complains if they are missing, or not of the right form.
+// It has specific exclusions for well-known methods (see commonMethods above).
+func (f *file) lintFuncDoc(fn *ast.FuncDecl) {
+ if !ast.IsExported(fn.Name.Name) {
+ // func is unexported
+ return
+ }
+ kind := "function"
+ name := fn.Name.Name
+ if fn.Recv != nil {
+ // method
+ kind = "method"
+ recv := receiverType(fn)
+ if !ast.IsExported(recv) {
+ // receiver is unexported
+ return
+ }
+ if commonMethods[name] {
+ return
+ }
+ switch name {
+ case "Len", "Less", "Swap":
+ if f.sortable[recv] {
+ return
+ }
+ }
+ name = recv + "." + name
+ }
+ if fn.Doc == nil {
+ f.errorf(fn, 1, docCommentsLink, "exported %s %s should have comment or be unexported", kind, name)
+ return
+ }
+ s := fn.Doc.Text()
+ prefix := fn.Name.Name + " "
+ if !strings.HasPrefix(s, prefix) {
+ f.errorf(fn.Doc, 1, docCommentsLink, `comment on exported %s %s should be of the form "%s..."`, kind, name, prefix)
+ }
+}
+
+// lintValueSpecDoc examines package-global variables and constants.
+// It complains if they are not individually declared,
+// or if they are not suitably documented in the right form (unless they are in a block that is commented).
+func (f *file) lintValueSpecDoc(vs *ast.ValueSpec, gd *ast.GenDecl, genDeclMissingComments map[*ast.GenDecl]bool) {
+ kind := "var"
+ if gd.Tok == token.CONST {
+ kind = "const"
+ }
+
+ if len(vs.Names) > 1 {
+ // Check that none are exported except for the first.
+ for _, n := range vs.Names[1:] {
+ if ast.IsExported(n.Name) {
+ f.errorf(vs, 1, "", "exported %s %s should have its own declaration", kind, n.Name)
+ return
+ }
+ }
+ }
+
+ // Only one name.
+ name := vs.Names[0].Name
+ if !ast.IsExported(name) {
+ return
+ }
+
+ if vs.Doc == nil {
+ if gd.Doc == nil && !genDeclMissingComments[gd] {
+ block := ""
+ if kind == "const" && gd.Lparen.IsValid() {
+ block = " (or a comment on this block)"
+ }
+ f.errorf(vs, 1, docCommentsLink, "exported %s %s should have comment%s or be unexported", kind, name, block)
+ genDeclMissingComments[gd] = true
+ }
+ return
+ }
+ prefix := name + " "
+ if !strings.HasPrefix(vs.Doc.Text(), prefix) {
+ f.errorf(vs.Doc, 1, docCommentsLink, `comment on exported %s %s should be of the form "%s..."`, kind, name, prefix)
+ }
+}
+
+// zeroLiteral is a set of ast.BasicLit values that are zero values.
+// It is not exhaustive.
+var zeroLiteral = map[string]bool{
+ "false": true, // bool
+ // runes
+ `'\x00'`: true,
+ `'\000'`: true,
+ // strings
+ `""`: true,
+ "``": true,
+ // numerics
+ "0": true,
+ "0.": true,
+ "0.0": true,
+ "0i": true,
+}
+
+// lintVarDecls examines variable declarations. It complains about declarations with
+// redundant LHS types that can be inferred from the RHS.
+func (f *file) lintVarDecls() {
+ var lastGen *ast.GenDecl // last GenDecl entered.
+
+ f.walk(func(node ast.Node) bool {
+ switch v := node.(type) {
+ case *ast.GenDecl:
+ if v.Tok != token.CONST && v.Tok != token.VAR {
+ return false
+ }
+ lastGen = v
+ return true
+ case *ast.ValueSpec:
+ if lastGen.Tok == token.CONST {
+ return false
+ }
+ if len(v.Names) > 1 || v.Type == nil || len(v.Values) == 0 {
+ return false
+ }
+ rhs := v.Values[0]
+ // An underscore var appears in a common idiom for compile-time interface satisfaction,
+ // as in "var _ Interface = (*Concrete)(nil)".
+ if isIdent(v.Names[0], "_") {
+ return false
+ }
+ // If the RHS is a zero value, suggest dropping it.
+ zero := false
+ if lit, ok := rhs.(*ast.BasicLit); ok {
+ zero = zeroLiteral[lit.Value]
+ } else if isIdent(rhs, "nil") {
+ zero = true
+ }
+ if zero {
+ f.errorf(rhs, 0.9, "", "should drop = %s from declaration of var %s; it is the zero value", f.render(rhs), v.Names[0])
+ return false
+ }
+ // If the LHS type is an interface, don't warn, since it is probably a
+ // concrete type on the RHS. Note that our feeble lexical check here
+ // will only pick up interface{} and other literal interface types;
+ // that covers most of the cases we care to exclude right now.
+ // TODO(dsymonds): Use typechecker to make this heuristic more accurate.
+ if _, ok := v.Type.(*ast.InterfaceType); ok {
+ return false
+ }
+ // If the RHS is an untyped const, only warn if the LHS type is its default type.
+ if defType, ok := isUntypedConst(rhs); ok && !isIdent(v.Type, defType) {
+ return false
+ }
+ f.errorf(v.Type, 0.8, "", "should omit type %s from declaration of var %s; it will be inferred from the right-hand side", f.render(v.Type), v.Names[0])
+ return false
+ }
+ return true
+ })
+}
+
+// lintElses examines else blocks. It complains about any else block whose if block ends in a return.
+func (f *file) lintElses() {
+ // We don't want to flag if { } else if { } else { } constructions.
+ // They will appear as an IfStmt whose Else field is also an IfStmt.
+ // Record such a node so we ignore it when we visit it.
+ ignore := make(map[*ast.IfStmt]bool)
+
+ f.walk(func(node ast.Node) bool {
+ ifStmt, ok := node.(*ast.IfStmt)
+ if !ok || ifStmt.Else == nil {
+ return true
+ }
+ if ignore[ifStmt] {
+ return true
+ }
+ if elseif, ok := ifStmt.Else.(*ast.IfStmt); ok {
+ ignore[elseif] = true
+ return true
+ }
+ if _, ok := ifStmt.Else.(*ast.BlockStmt); !ok {
+ // only care about elses without conditions
+ return true
+ }
+ if len(ifStmt.Body.List) == 0 {
+ return true
+ }
+ shortDecl := false // does the if statement have a ":=" initialization statement?
+ if ifStmt.Init != nil {
+ if as, ok := ifStmt.Init.(*ast.AssignStmt); ok && as.Tok == token.DEFINE {
+ shortDecl = true
+ }
+ }
+ lastStmt := ifStmt.Body.List[len(ifStmt.Body.List)-1]
+ if _, ok := lastStmt.(*ast.ReturnStmt); ok {
+ extra := ""
+ if shortDecl {
+ extra = " (move short variable declaration to its own line if necessary)"
+ }
+ f.errorf(ifStmt.Else, 1, styleGuideBase+"#Indent_Error_Flow", "if block ends with a return statement, so drop this else and outdent its block"+extra)
+ }
+ return true
+ })
+}
+
+// lintRanges examines range clauses. It complains about redundant constructions.
+func (f *file) lintRanges() {
+ f.walk(func(node ast.Node) bool {
+ rs, ok := node.(*ast.RangeStmt)
+ if !ok {
+ return true
+ }
+ if rs.Value == nil {
+ // for x = range m { ... }
+ return true // single var form
+ }
+ if !isIdent(rs.Value, "_") {
+ // for ?, y = range m { ... }
+ return true
+ }
+
+ f.errorf(rs.Value, 1, "", "should omit 2nd value from range; this loop is equivalent to `for %s %s range ...`", f.render(rs.Key), rs.Tok)
+ return true
+ })
+}
+
+// lintErrorf examines errors.New calls. It complains if its only argument is an fmt.Sprintf invocation.
+func (f *file) lintErrorf() {
+ f.walk(func(node ast.Node) bool {
+ ce, ok := node.(*ast.CallExpr)
+ if !ok {
+ return true
+ }
+ if !isPkgDot(ce.Fun, "errors", "New") || len(ce.Args) != 1 {
+ return true
+ }
+ arg := ce.Args[0]
+ ce, ok = arg.(*ast.CallExpr)
+ if !ok || !isPkgDot(ce.Fun, "fmt", "Sprintf") {
+ return true
+ }
+ f.errorf(node, 1, "", "should replace errors.New(fmt.Sprintf(...)) with fmt.Errorf(...)")
+ return true
+ })
+}
+
+// lintErrors examines global error vars. It complains if they aren't named in the standard way.
+func (f *file) lintErrors() {
+ for _, decl := range f.f.Decls {
+ gd, ok := decl.(*ast.GenDecl)
+ if !ok || gd.Tok != token.VAR {
+ continue
+ }
+ for _, spec := range gd.Specs {
+ spec := spec.(*ast.ValueSpec)
+ if len(spec.Names) != 1 || len(spec.Values) != 1 {
+ continue
+ }
+ ce, ok := spec.Values[0].(*ast.CallExpr)
+ if !ok {
+ continue
+ }
+ if !isPkgDot(ce.Fun, "errors", "New") && !isPkgDot(ce.Fun, "fmt", "Errorf") {
+ continue
+ }
+
+ id := spec.Names[0]
+ prefix := "err"
+ if id.IsExported() {
+ prefix = "Err"
+ }
+ if !strings.HasPrefix(id.Name, prefix) {
+ f.errorf(id, 0.9, "", "error var %s should have name of the form %sFoo", id.Name, prefix)
+ }
+ }
+ }
+}
+
+func lintCapAndPunct(s string) (isCap, isPunct bool) {
+ first, firstN := utf8.DecodeRuneInString(s)
+ last, _ := utf8.DecodeLastRuneInString(s)
+ isPunct = last == '.' || last == ':' || last == '!'
+ isCap = unicode.IsUpper(first)
+ if isCap && len(s) > firstN {
+ // Don't flag strings starting with something that looks like an initialism.
+ if second, _ := utf8.DecodeRuneInString(s[firstN:]); unicode.IsUpper(second) {
+ isCap = false
+ }
+ }
+ return
+}
+
+// lintErrorStrings examines error strings. It complains if they are capitalized or end in punctuation.
+func (f *file) lintErrorStrings() {
+ f.walk(func(node ast.Node) bool {
+ ce, ok := node.(*ast.CallExpr)
+ if !ok {
+ return true
+ }
+ if !isPkgDot(ce.Fun, "errors", "New") && !isPkgDot(ce.Fun, "fmt", "Errorf") {
+ return true
+ }
+ if len(ce.Args) < 1 {
+ return true
+ }
+ str, ok := ce.Args[0].(*ast.BasicLit)
+ if !ok || str.Kind != token.STRING {
+ return true
+ }
+ s, _ := strconv.Unquote(str.Value) // can assume well-formed Go
+ if s == "" {
+ return true
+ }
+ isCap, isPunct := lintCapAndPunct(s)
+ var msg string
+ switch {
+ case isCap && isPunct:
+ msg = "error strings should not be capitalized and should not end with punctuation"
+ case isCap:
+ msg = "error strings should not be capitalized"
+ case isPunct:
+ msg = "error strings should not end with punctuation"
+ default:
+ return true
+ }
+ // People use proper nouns and exported Go identifiers in error strings,
+ // so decrease the confidence of warnings for capitalization.
+ conf := 0.8
+ if isCap {
+ conf = 0.6
+ }
+ f.errorf(str, conf, styleGuideBase+"#Error_Strings", msg)
+ return true
+ })
+}
+
+var badReceiverNames = map[string]bool{
+ "me": true,
+ "this": true,
+ "self": true,
+}
+
+// lintReceiverNames examines receiver names. It complains about inconsistent
+// names used for the same type and names such as "this".
+func (f *file) lintReceiverNames() {
+ typeReceiver := map[string]string{}
+ f.walk(func(n ast.Node) bool {
+ fn, ok := n.(*ast.FuncDecl)
+ if !ok || fn.Recv == nil {
+ return true
+ }
+ names := fn.Recv.List[0].Names
+ if len(names) < 1 {
+ return true
+ }
+ name := names[0].Name
+ const link = styleGuideBase + "#Receiver_Names"
+ if badReceiverNames[name] {
+ f.errorf(n, 1, link, `receiver name should be a reflection of its identity; don't use generic names such as "me", "this", or "self"`)
+ return true
+ }
+ recv := receiverType(fn)
+ if prev, ok := typeReceiver[recv]; ok && prev != name {
+ f.errorf(n, 1, link, "receiver name %s should be consistent with previous receiver name %s for %s", name, prev, recv)
+ return true
+ }
+ typeReceiver[recv] = name
+ return true
+ })
+}
+
+// lintIncDec examines statements that increment or decrement a variable.
+// It complains if they don't use x++ or x--.
+func (f *file) lintIncDec() {
+ f.walk(func(n ast.Node) bool {
+ as, ok := n.(*ast.AssignStmt)
+ if !ok {
+ return true
+ }
+ if len(as.Lhs) != 1 {
+ return true
+ }
+ if !isOne(as.Rhs[0]) {
+ return true
+ }
+ var suffix string
+ switch as.Tok {
+ case token.ADD_ASSIGN:
+ suffix = "++"
+ case token.SUB_ASSIGN:
+ suffix = "--"
+ default:
+ return true
+ }
+ f.errorf(as, 0.8, "", "should replace %s with %s%s", f.render(as), f.render(as.Lhs[0]), suffix)
+ return true
+ })
+}
+
+// lineMake examines statements that declare and initialize a variable with make.
+// It complains if they are constructing a zero element slice.
+func (f *file) lintMake() {
+ f.walk(func(n ast.Node) bool {
+ as, ok := n.(*ast.AssignStmt)
+ if !ok {
+ return true
+ }
+ // Only want single var := assignment statements.
+ if len(as.Lhs) != 1 || as.Tok != token.DEFINE {
+ return true
+ }
+ ce, ok := as.Rhs[0].(*ast.CallExpr)
+ if !ok {
+ return true
+ }
+ // Check if ce is make([]T, 0).
+ if !isIdent(ce.Fun, "make") || len(ce.Args) != 2 || !isZero(ce.Args[1]) {
+ return true
+ }
+ at, ok := ce.Args[0].(*ast.ArrayType)
+ if !ok || at.Len != nil {
+ return true
+ }
+ f.errorf(as, 0.8, "", `can probably use "var %s %s" instead`, f.render(as.Lhs[0]), f.render(at))
+ return true
+ })
+}
+
+func receiverType(fn *ast.FuncDecl) string {
+ switch e := fn.Recv.List[0].Type.(type) {
+ case *ast.Ident:
+ return e.Name
+ case *ast.StarExpr:
+ return e.X.(*ast.Ident).Name
+ }
+ panic(fmt.Sprintf("unknown method receiver AST node type %T", fn.Recv.List[0].Type))
+}
+
+func (f *file) walk(fn func(ast.Node) bool) {
+ ast.Walk(walker(fn), f.f)
+}
+
+func (f *file) render(x interface{}) string {
+ var buf bytes.Buffer
+ if err := printer.Fprint(&buf, f.fset, x); err != nil {
+ panic(err)
+ }
+ return buf.String()
+}
+
+func (f *file) debugRender(x interface{}) string {
+ var buf bytes.Buffer
+ if err := ast.Fprint(&buf, f.fset, x, nil); err != nil {
+ panic(err)
+ }
+ return buf.String()
+}
+
+// walker adapts a function to satisfy the ast.Visitor interface.
+// The function return whether the walk should proceed into the node's children.
+type walker func(ast.Node) bool
+
+func (w walker) Visit(node ast.Node) ast.Visitor {
+ if w(node) {
+ return w
+ }
+ return nil
+}
+
+func isIdent(expr ast.Expr, ident string) bool {
+ id, ok := expr.(*ast.Ident)
+ return ok && id.Name == ident
+}
+
+// isBlank returns whether id is the blank identifier "_".
+// If id == nil, the answer is false.
+func isBlank(id *ast.Ident) bool { return id != nil && id.Name == "_" }
+
+func isPkgDot(expr ast.Expr, pkg, name string) bool {
+ sel, ok := expr.(*ast.SelectorExpr)
+ return ok && isIdent(sel.X, pkg) && isIdent(sel.Sel, name)
+}
+
+func isZero(expr ast.Expr) bool {
+ lit, ok := expr.(*ast.BasicLit)
+ return ok && lit.Kind == token.INT && lit.Value == "0"
+}
+
+func isOne(expr ast.Expr) bool {
+ lit, ok := expr.(*ast.BasicLit)
+ return ok && lit.Kind == token.INT && lit.Value == "1"
+}
+
+var basicLitKindTypes = map[token.Token]string{
+ token.FLOAT: "float64",
+ token.IMAG: "complex128",
+ token.CHAR: "rune",
+ token.STRING: "string",
+}
+
+// isUntypedConst reports whether expr is an untyped constant,
+// and indicates what its default type is.
+func isUntypedConst(expr ast.Expr) (defType string, ok bool) {
+ if isIntLiteral(expr) {
+ return "int", true
+ }
+ if bl, ok := expr.(*ast.BasicLit); ok {
+ if dt, ok := basicLitKindTypes[bl.Kind]; ok {
+ return dt, true
+ }
+ }
+ return "", false
+}
+
+func isIntLiteral(expr ast.Expr) bool {
+ // Either a BasicLit with Kind token.INT,
+ // or some combination of a UnaryExpr with Op token.SUB (for "-<lit>")
+ // or a ParenExpr (for "(<lit>)").
+Loop:
+ for {
+ switch v := expr.(type) {
+ case *ast.UnaryExpr:
+ if v.Op == token.SUB {
+ expr = v.X
+ continue Loop
+ }
+ case *ast.ParenExpr:
+ expr = v.X
+ continue Loop
+ case *ast.BasicLit:
+ if v.Kind == token.INT {
+ return true
+ }
+ }
+ return false
+ }
+}
+
+// srcLine returns the complete line at p, including the terminating newline.
+func srcLine(src []byte, p token.Position) string {
+ // Run to end of line in both directions if not at line start/end.
+ lo, hi := p.Offset, p.Offset+1
+ for lo > 0 && src[lo-1] != '\n' {
+ lo--
+ }
+ for hi < len(src) && src[hi-1] != '\n' {
+ hi++
+ }
+ return string(src[lo:hi])
+}
diff --git a/src/mongo/gotools/vendor/src/github.com/3rf/mongo-lint/lint_test.go b/src/mongo/gotools/vendor/src/github.com/3rf/mongo-lint/lint_test.go
new file mode 100644
index 00000000000..12e3afb8138
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/3rf/mongo-lint/lint_test.go
@@ -0,0 +1,197 @@
+// Copyright (c) 2013 The Go Authors. All rights reserved.
+//
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file or at
+// https://developers.google.com/open-source/licenses/bsd.
+
+package lint
+
+import (
+ "bytes"
+ "flag"
+ "go/parser"
+ "go/printer"
+ "go/token"
+ "io/ioutil"
+ "path"
+ "regexp"
+ "strings"
+ "testing"
+)
+
+var lintMatch = flag.String("lint.match", "", "restrict testdata matches to this pattern")
+
+func TestAll(t *testing.T) {
+ l := new(Linter)
+ rx, err := regexp.Compile(*lintMatch)
+ if err != nil {
+ t.Fatalf("Bad -lint.match value %q: %v", *lintMatch, err)
+ }
+
+ baseDir := "testdata"
+ fis, err := ioutil.ReadDir(baseDir)
+ if err != nil {
+ t.Fatalf("ioutil.ReadDir: %v", err)
+ }
+ if len(fis) == 0 {
+ t.Fatalf("no files in %v", baseDir)
+ }
+ for _, fi := range fis {
+ if !rx.MatchString(fi.Name()) {
+ continue
+ }
+ //t.Logf("Testing %s", fi.Name())
+ src, err := ioutil.ReadFile(path.Join(baseDir, fi.Name()))
+ if err != nil {
+ t.Fatalf("Failed reading %s: %v", fi.Name(), err)
+ }
+
+ ins := parseInstructions(t, fi.Name(), src)
+ if ins == nil {
+ t.Errorf("Test file %v does not have instructions", fi.Name())
+ continue
+ }
+
+ ps, err := l.Lint(fi.Name(), src)
+ if err != nil {
+ t.Errorf("Linting %s: %v", fi.Name(), err)
+ continue
+ }
+
+ for _, in := range ins {
+ ok := false
+ for i, p := range ps {
+ if p.Position.Line != in.Line {
+ continue
+ }
+ if in.Match.MatchString(p.Text) {
+ // remove this problem from ps
+ copy(ps[i:], ps[i+1:])
+ ps = ps[:len(ps)-1]
+
+ //t.Logf("/%v/ matched at %s:%d", in.Match, fi.Name(), in.Line)
+ ok = true
+ break
+ }
+ }
+ if !ok {
+ t.Errorf("Lint failed at %s:%d; /%v/ did not match", fi.Name(), in.Line, in.Match)
+ }
+ }
+ for _, p := range ps {
+ t.Errorf("Unexpected problem at %s:%d: %v", fi.Name(), p.Position.Line, p.Text)
+ }
+ }
+}
+
+type instruction struct {
+ Line int // the line number this applies to
+ Match *regexp.Regexp // what pattern to match
+}
+
+// parseInstructions parses instructions from the comments in a Go source file.
+// It returns nil if none were parsed.
+func parseInstructions(t *testing.T, filename string, src []byte) []instruction {
+ fset := token.NewFileSet()
+ f, err := parser.ParseFile(fset, filename, src, parser.ParseComments)
+ if err != nil {
+ t.Fatalf("Test file %v does not parse: %v", filename, err)
+ }
+ var ins []instruction
+ for _, cg := range f.Comments {
+ ln := fset.Position(cg.Pos()).Line
+ raw := cg.Text()
+ for _, line := range strings.Split(raw, "\n") {
+ if line == "" || strings.HasPrefix(line, "#") {
+ continue
+ }
+ if line == "OK" && ins == nil {
+ // so our return value will be non-nil
+ ins = make([]instruction, 0)
+ continue
+ }
+ if strings.Contains(line, "MATCH") {
+ a, b := strings.Index(line, "/"), strings.LastIndex(line, "/")
+ if a == -1 || a == b {
+ t.Fatalf("Malformed match instruction %q at %v:%d", line, filename, ln)
+ }
+ pat := line[a+1 : b]
+ rx, err := regexp.Compile(pat)
+ if err != nil {
+ t.Fatalf("Bad match pattern %q at %v:%d: %v", pat, filename, ln, err)
+ }
+ ins = append(ins, instruction{
+ Line: ln,
+ Match: rx,
+ })
+ }
+ }
+ }
+ return ins
+}
+
+func render(fset *token.FileSet, x interface{}) string {
+ var buf bytes.Buffer
+ if err := printer.Fprint(&buf, fset, x); err != nil {
+ panic(err)
+ }
+ return buf.String()
+}
+
+func TestLine(t *testing.T) {
+ tests := []struct {
+ src string
+ offset int
+ want string
+ }{
+ {"single line file", 5, "single line file"},
+ {"single line file with newline\n", 5, "single line file with newline\n"},
+ {"first\nsecond\nthird\n", 2, "first\n"},
+ {"first\nsecond\nthird\n", 9, "second\n"},
+ {"first\nsecond\nthird\n", 14, "third\n"},
+ {"first\nsecond\nthird with no newline", 16, "third with no newline"},
+ {"first byte\n", 0, "first byte\n"},
+ }
+ for _, test := range tests {
+ got := srcLine([]byte(test.src), token.Position{Offset: test.offset})
+ if got != test.want {
+ t.Errorf("srcLine(%q, offset=%d) = %q, want %q", test.src, test.offset, got, test.want)
+ }
+ }
+}
+
+func TestLintName(t *testing.T) {
+ tests := []struct {
+ name, want string
+ }{
+ {"foo_bar", "fooBar"},
+ {"foo_bar_baz", "fooBarBaz"},
+ {"Foo_bar", "FooBar"},
+ {"foo_WiFi", "fooWiFi"},
+ {"id", "id"},
+ {"Id", "ID"},
+ {"foo_id", "fooID"},
+ {"fooId", "fooID"},
+ {"fooUid", "fooUID"},
+ {"idFoo", "idFoo"},
+ {"uidFoo", "uidFoo"},
+ {"midIdDle", "midIDDle"},
+ {"APIProxy", "APIProxy"},
+ {"ApiProxy", "APIProxy"},
+ {"apiProxy", "apiProxy"},
+ {"_Leading", "_Leading"},
+ {"___Leading", "_Leading"},
+ {"trailing_", "trailing"},
+ {"trailing___", "trailing"},
+ {"a_b", "aB"},
+ {"a__b", "aB"},
+ {"a___b", "aB"},
+ {"Rpc1150", "RPC1150"},
+ }
+ for _, test := range tests {
+ got := lintName(test.name)
+ if got != test.want {
+ t.Errorf("lintName(%q) = %q, want %q", test.name, got, test.want)
+ }
+ }
+}
diff --git a/src/mongo/gotools/vendor/src/github.com/3rf/mongo-lint/misc/emacs/golint.el b/src/mongo/gotools/vendor/src/github.com/3rf/mongo-lint/misc/emacs/golint.el
new file mode 100644
index 00000000000..de729df68dd
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/3rf/mongo-lint/misc/emacs/golint.el
@@ -0,0 +1,51 @@
+;;; golint.el --- lint for the Go source code
+
+;; Copyright 2013 The Go Authors. All rights reserved.
+;; Use of this source code is governed by a BSD-style
+;; license that can be found in the LICENSE file.
+
+;; URL: https://github.com/golang/lint
+
+;;; Commentary:
+
+;; To install golint, add the following lines to your .emacs file:
+;; (add-to-list 'load-path "PATH CONTAINING golint.el" t)
+;; (require 'golint)
+;;
+;; After this, type M-x golint on Go source code.
+;;
+;; Usage:
+;; C-x `
+;; Jump directly to the line in your code which caused the first message.
+;;
+;; For more usage, see Compilation-Mode:
+;; http://www.gnu.org/software/emacs/manual/html_node/emacs/Compilation-Mode.html
+
+;;; Code:
+(require 'compile)
+
+(defun go-lint-buffer-name (mode)
+ "*Golint*")
+
+(defun golint-process-setup ()
+ "Setup compilation variables and buffer for `golint'."
+ (run-hooks 'golint-setup-hook))
+
+(define-compilation-mode golint-mode "golint"
+ "Golint is a linter for Go source code."
+ (set (make-local-variable 'compilation-scroll-output) nil)
+ (set (make-local-variable 'compilation-disable-input) t)
+ (set (make-local-variable 'compilation-process-setup-function)
+ 'golint-process-setup)
+)
+
+;;;###autoload
+(defun golint ()
+ "Run golint on the current file and populate the fix list. Pressing C-x ` will jump directly to the line in your code which caused the first message."
+ (interactive)
+ (compilation-start (concat "golint " buffer-file-name)
+ 'golint-mode))
+
+(provide 'golint)
+
+;;; golint.el ends here
diff --git a/src/mongo/gotools/vendor/src/github.com/3rf/mongo-lint/misc/vim/ftplugin/go/lint.vim b/src/mongo/gotools/vendor/src/github.com/3rf/mongo-lint/misc/vim/ftplugin/go/lint.vim
new file mode 100644
index 00000000000..7dffd181767
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/3rf/mongo-lint/misc/vim/ftplugin/go/lint.vim
@@ -0,0 +1,31 @@
+" Copyright 2013 The Go Authors. All rights reserved.
+" Use of this source code is governed by a BSD-style
+" license that can be found in the LICENSE file.
+"
+" lint.vim: Vim command to lint Go files with golint.
+"
+" https://github.com/golang/lint
+"
+" This filetype plugin add a new commands for go buffers:
+"
+" :Lint
+"
+" Run golint for the current Go file.
+"
+if exists("b:did_ftplugin_go_lint")
+ finish
+endif
+
+if !executable("golint")
+ finish
+endif
+
+command! -buffer Lint call s:GoLint()
+
+function! s:GoLint() abort
+ cexpr system('golint ' . shellescape(expand('%')))
+endfunction
+
+let b:did_ftplugin_go_lint = 1
+
+" vim:ts=4:sw=4:et
diff --git a/src/mongo/gotools/vendor/src/github.com/3rf/mongo-lint/testdata/4.go b/src/mongo/gotools/vendor/src/github.com/3rf/mongo-lint/testdata/4.go
new file mode 100644
index 00000000000..2303a9a3170
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/3rf/mongo-lint/testdata/4.go
@@ -0,0 +1,34 @@
+// Test that exported names have correct comments.
+
+// Package pkg does something.
+package pkg
+
+import "time"
+
+type T int // MATCH /exported type T.*should.*comment.*or.*unexport/
+
+func (T) F() {} // MATCH /exported method T\.F.*should.*comment.*or.*unexport/
+
+// this is a nice type.
+// MATCH /comment.*exported type U.*should.*form.*"U ..."/
+type U string
+
+// this is a neat function.
+// MATCH /comment.*exported method U\.G.*should.*form.*"G ..."/
+func (U) G() {}
+
+// A V is a string.
+type V string
+
+// V.H has a pointer receiver
+
+func (*V) H() {} // MATCH /exported method V\.H.*should.*comment.*or.*unexport/
+
+var W = "foo" // MATCH /exported var W.*should.*comment.*or.*unexport/
+
+const X = "bar" // MATCH /exported const X.*should.*comment.*or.*unexport/
+
+var Y, Z int // MATCH /exported var Z.*own declaration/
+
+// Location should be okay, since the other var name is an underscore.
+var Location, _ = time.LoadLocation("Europe/Istanbul") // not Constantinople
diff --git a/src/mongo/gotools/vendor/src/github.com/3rf/mongo-lint/testdata/5_test.go b/src/mongo/gotools/vendor/src/github.com/3rf/mongo-lint/testdata/5_test.go
new file mode 100644
index 00000000000..af174587c0a
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/3rf/mongo-lint/testdata/5_test.go
@@ -0,0 +1,17 @@
+// This file ends in _test.go, so we should not warn about doc comments.
+// OK
+
+package pkg
+
+import "testing"
+
+type H int
+
+func TestSomething(t *testing.T) {
+}
+
+func TestSomething_suffix(t *testing.T) {
+}
+
+func ExampleBuffer_reader() {
+}
diff --git a/src/mongo/gotools/vendor/src/github.com/3rf/mongo-lint/testdata/blank-import-lib.go b/src/mongo/gotools/vendor/src/github.com/3rf/mongo-lint/testdata/blank-import-lib.go
new file mode 100644
index 00000000000..edac0d75c8b
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/3rf/mongo-lint/testdata/blank-import-lib.go
@@ -0,0 +1,33 @@
+// Test that blank imports in library packages are flagged.
+
+// Package foo ...
+package foo
+
+// The instructions need to go before the imports below so they will not be
+// mistaken for documentation.
+
+/* MATCH /blank import/ */ import _ "encoding/json"
+
+import (
+ "fmt"
+ /* MATCH /blank import/ */ _ "os"
+
+ /* MATCH /blank import/ */ _ "net/http"
+ _ "path"
+)
+
+import _ "encoding/base64" // Don't gripe about this
+
+import (
+ // Don't gripe about these next two lines.
+ _ "compress/zlib"
+ _ "syscall"
+
+ /* MATCH /blank import/ */ _ "path/filepath"
+)
+
+import (
+ "go/ast"
+ _ "go/scanner" // Don't gripe about this or the following line.
+ _ "go/token"
+)
diff --git a/src/mongo/gotools/vendor/src/github.com/3rf/mongo-lint/testdata/blank-import-lib_test.go b/src/mongo/gotools/vendor/src/github.com/3rf/mongo-lint/testdata/blank-import-lib_test.go
new file mode 100644
index 00000000000..0307985f86c
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/3rf/mongo-lint/testdata/blank-import-lib_test.go
@@ -0,0 +1,20 @@
+// Test that blank imports in test packages are not flagged.
+// OK
+
+// Package foo ...
+package foo
+
+// These are essentially the same imports as in the "library" package, but
+// these should not trigger the warning because this is a test.
+
+import _ "encoding/json"
+
+import (
+ "fmt"
+ "testing"
+
+ _ "os"
+
+ _ "net/http"
+ _ "path"
+)
diff --git a/src/mongo/gotools/vendor/src/github.com/3rf/mongo-lint/testdata/blank-import-main.go b/src/mongo/gotools/vendor/src/github.com/3rf/mongo-lint/testdata/blank-import-main.go
new file mode 100644
index 00000000000..9b72b1cb02b
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/3rf/mongo-lint/testdata/blank-import-main.go
@@ -0,0 +1,12 @@
+// Test that blank imports in package main are not flagged.
+// OK
+
+// Binary foo ...
+package main
+
+import _ "fmt"
+
+import (
+ "os"
+ _ "path"
+)
diff --git a/src/mongo/gotools/vendor/src/github.com/3rf/mongo-lint/testdata/common-methods.go b/src/mongo/gotools/vendor/src/github.com/3rf/mongo-lint/testdata/common-methods.go
new file mode 100644
index 00000000000..c0bb1363a57
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/3rf/mongo-lint/testdata/common-methods.go
@@ -0,0 +1,16 @@
+// Test that we don't nag for comments on common methods.
+// OK
+
+// Package pkg ...
+package pkg
+
+import "net/http"
+
+// T is ...
+type T int
+
+func (T) Error() string { return "" }
+func (T) String() string { return "" }
+func (T) ServeHTTP(w http.ResponseWriter, r *http.Request) {}
+func (T) Read(p []byte) (n int, err error) { return 0, nil }
+func (T) Write(p []byte) (n int, err error) { return 0, nil }
diff --git a/src/mongo/gotools/vendor/src/github.com/3rf/mongo-lint/testdata/const-block.go b/src/mongo/gotools/vendor/src/github.com/3rf/mongo-lint/testdata/const-block.go
new file mode 100644
index 00000000000..4b89d6f60e7
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/3rf/mongo-lint/testdata/const-block.go
@@ -0,0 +1,36 @@
+// Test for docs in const blocks
+
+// Package foo ...
+package foo
+
+const (
+ // Prefix for something.
+ // MATCH /InlineWhatever.*form/
+ InlineWhatever = "blah"
+
+ Whatsit = "missing_comment" // MATCH /Whatsit.*should have comment.*block/
+
+ // We should only warn once per block for missing comments,
+ // but always complain about malformed comments.
+
+ WhosYourDaddy = "another_missing_one"
+
+ // Something
+ // MATCH /WhatDoesHeDo.*form/
+ WhatDoesHeDo = "it's not a tumor!"
+)
+
+// These shouldn't need doc comments.
+const (
+ Alpha = "a"
+ Beta = "b"
+ Gamma = "g"
+)
+
+// The comment on the previous const block shouldn't flow through to here.
+
+const UndocAgain = 6 // MATCH /UndocAgain.*should have comment/
+
+const (
+ SomeUndocumented = 7 // MATCH /SomeUndocumented.*should have comment.*block/
+)
diff --git a/src/mongo/gotools/vendor/src/github.com/3rf/mongo-lint/testdata/else-multi.go b/src/mongo/gotools/vendor/src/github.com/3rf/mongo-lint/testdata/else-multi.go
new file mode 100644
index 00000000000..98f39a3eb5b
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/3rf/mongo-lint/testdata/else-multi.go
@@ -0,0 +1,18 @@
+// Test of return+else warning; should not trigger on multi-branch if/else.
+// OK
+
+// Package pkg ...
+package pkg
+
+import "log"
+
+func f(x int) bool {
+ if x == 0 {
+ log.Print("x is zero")
+ } else if x > 0 {
+ return true
+ } else {
+ log.Printf("non-positive x: %d", x)
+ }
+ return false
+}
diff --git a/src/mongo/gotools/vendor/src/github.com/3rf/mongo-lint/testdata/else.go b/src/mongo/gotools/vendor/src/github.com/3rf/mongo-lint/testdata/else.go
new file mode 100644
index 00000000000..515c043d306
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/3rf/mongo-lint/testdata/else.go
@@ -0,0 +1,23 @@
+// Test of return+else warning.
+
+// Package pkg ...
+package pkg
+
+import "log"
+
+func f(x int) bool {
+ if x > 0 {
+ return true
+ } else { // MATCH /if.*return.*else.*outdent/
+ log.Printf("non-positive x: %d", x)
+ }
+ return false
+}
+
+func g(f func() bool) string {
+ if ok := f(); ok {
+ return "it's okay"
+ } else { // MATCH /if.*return.*else.*outdent.*short.*var.*declaration/
+ return "it's NOT okay!"
+ }
+}
diff --git a/src/mongo/gotools/vendor/src/github.com/3rf/mongo-lint/testdata/errorf.go b/src/mongo/gotools/vendor/src/github.com/3rf/mongo-lint/testdata/errorf.go
new file mode 100644
index 00000000000..768fb8ce60f
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/3rf/mongo-lint/testdata/errorf.go
@@ -0,0 +1,22 @@
+// Test for not using fmt.Errorf.
+
+// Package foo ...
+package foo
+
+import (
+ "errors"
+ "fmt"
+)
+
+func f(x int) error {
+ if x > 10 {
+ return errors.New(fmt.Sprintf("something %d", x)) // MATCH /should replace.*errors\.New\(fmt\.Sprintf\(\.\.\.\)\).*fmt\.Errorf\(\.\.\.\)/
+ }
+ if x > 5 {
+ return errors.New(g("blah")) // ok
+ }
+ if x > 4 {
+ return errors.New("something else") // ok
+ }
+ return nil
+}
diff --git a/src/mongo/gotools/vendor/src/github.com/3rf/mongo-lint/testdata/errors.go b/src/mongo/gotools/vendor/src/github.com/3rf/mongo-lint/testdata/errors.go
new file mode 100644
index 00000000000..2882738e01a
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/3rf/mongo-lint/testdata/errors.go
@@ -0,0 +1,35 @@
+// Test for naming errors.
+
+// Package foo ...
+package foo
+
+import (
+ "errors"
+ "fmt"
+)
+
+var unexp = errors.New("some unexported error") // MATCH /error var.*unexp.*errFoo/
+
+// Exp ...
+var Exp = errors.New("some exported error") // MATCH /error var.*Exp.*ErrFoo/
+
+var (
+ e1 = fmt.Errorf("blah %d", 4) // MATCH /error var.*e1.*errFoo/
+ // E2 ...
+ E2 = fmt.Errorf("blah %d", 5) // MATCH /error var.*E2.*ErrFoo/
+)
+
+func f() {
+ var whatever = errors.New("ok") // ok
+}
+
+// Check for the error strings themselves.
+
+func g(x int) error {
+ if x < 1 {
+ return fmt.Errorf("This %d is too low", x) // MATCH /error strings.*not be capitalized/
+ } else if x == 0 {
+ return fmt.Errorf("XML time") // ok
+ }
+ return errors.New(`too much stuff.`) // MATCH /error strings.*not end with punctuation/
+}
diff --git a/src/mongo/gotools/vendor/src/github.com/3rf/mongo-lint/testdata/import-dot.go b/src/mongo/gotools/vendor/src/github.com/3rf/mongo-lint/testdata/import-dot.go
new file mode 100644
index 00000000000..bb4c2675927
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/3rf/mongo-lint/testdata/import-dot.go
@@ -0,0 +1,6 @@
+// Test that dot imports are flagged.
+
+// Package pkg ...
+package pkg
+
+import . "fmt" // MATCH /dot import/
diff --git a/src/mongo/gotools/vendor/src/github.com/3rf/mongo-lint/testdata/inc.go b/src/mongo/gotools/vendor/src/github.com/3rf/mongo-lint/testdata/inc.go
new file mode 100644
index 00000000000..3868beea116
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/3rf/mongo-lint/testdata/inc.go
@@ -0,0 +1,14 @@
+// Test for use of x++ and x--.
+
+// Package pkg ...
+package pkg
+
+func addOne(x int) int {
+ x += 1 // MATCH /x\+\+/
+ return x
+}
+
+func subOneInLoop(y int) {
+ for ; y > 0; y -= 1 { // MATCH /y--/
+ }
+}
diff --git a/src/mongo/gotools/vendor/src/github.com/3rf/mongo-lint/testdata/make.go b/src/mongo/gotools/vendor/src/github.com/3rf/mongo-lint/testdata/make.go
new file mode 100644
index 00000000000..5211375fe69
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/3rf/mongo-lint/testdata/make.go
@@ -0,0 +1,10 @@
+// Test for pointless make() calls.
+
+// Package pkg ...
+package pkg
+
+func f() {
+ x := make([]T, 0) // MATCH /var x \[\]T/
+ y := make([]somepkg.Foo_Bar, 0) // MATCH /var y \[\]somepkg.Foo_Bar/
+ z = make([]T, 0) // ok, because we don't know where z is declared
+}
diff --git a/src/mongo/gotools/vendor/src/github.com/3rf/mongo-lint/testdata/names.go b/src/mongo/gotools/vendor/src/github.com/3rf/mongo-lint/testdata/names.go
new file mode 100644
index 00000000000..ca7ffde6e10
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/3rf/mongo-lint/testdata/names.go
@@ -0,0 +1,54 @@
+// Test for name linting.
+
+// Package pkg_with_underscores ...
+package pkg_with_underscores // MATCH /underscore.*package name/
+
+var var_name int // MATCH /underscore.*var.*var_name/
+
+type t_wow struct { // MATCH /underscore.*type.*t_wow/
+ x_damn int // MATCH /underscore.*field.*x_damn/
+ Url *url.URL // MATCH /struct field.*Url.*URL/
+}
+
+const fooId = "blah" // MATCH /fooId.*fooID/
+
+func f_it() { // MATCH /underscore.*func.*f_it/
+ more_underscore := 4 // MATCH /underscore.*var.*more_underscore/
+ if isEof := (err == io.EOF); isEof { // MATCH /var.*isEof.*isEOF/
+ more_underscore = 7 // should be okay
+ }
+
+ x := foo_proto.Blah{} // should be okay
+
+ for _, theIp := range ips { // MATCH /range var.*theIp.*theIP/
+ }
+
+ switch myJson := g(); { // MATCH /var.*myJson.*myJSON/
+ }
+ switch tApi := x.(type) { // MATCH /var.*tApi.*tAPI/
+ }
+
+ select {
+ case qId := <-c: // MATCH /var.*qId.*qID/
+ }
+}
+
+// Common styles in other languages that don't belong in Go.
+const (
+ CPP_CONST = 1 // MATCH /ALL_CAPS.*CamelCase/
+ kLeadingKay = 2 // MATCH /k.*leadingKay/
+
+ HTML = 3 // okay; no underscore
+ X509B = 4 // ditto
+)
+
+func f(bad_name int) {} // MATCH /underscore.*func parameter.*bad_name/
+func g() (no_way int) {} // MATCH /underscore.*func result.*no_way/
+func (t *t_wow) f(more_under string) {} // MATCH /underscore.*method parameter.*more_under/
+func (t *t_wow) g() (still_more string) {} // MATCH /underscore.*method result.*still_more/
+
+type i interface {
+ CheckHtml() string // okay; interface method names are often constrained by the concrete types' method names
+
+ F(foo_bar int) // MATCH /foo_bar.*fooBar/
+}
diff --git a/src/mongo/gotools/vendor/src/github.com/3rf/mongo-lint/testdata/pkg-doc1.go b/src/mongo/gotools/vendor/src/github.com/3rf/mongo-lint/testdata/pkg-doc1.go
new file mode 100644
index 00000000000..8197a8ee0f4
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/3rf/mongo-lint/testdata/pkg-doc1.go
@@ -0,0 +1,3 @@
+// Test of missing package comment.
+
+package foo // MATCH /should.*package comment.*unless/
diff --git a/src/mongo/gotools/vendor/src/github.com/3rf/mongo-lint/testdata/pkg-doc2.go b/src/mongo/gotools/vendor/src/github.com/3rf/mongo-lint/testdata/pkg-doc2.go
new file mode 100644
index 00000000000..c61febd0e9b
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/3rf/mongo-lint/testdata/pkg-doc2.go
@@ -0,0 +1,5 @@
+// Test of package comment in an incorrect form.
+
+// Some random package doc that isn't in the right form.
+// MATCH /package comment should.*form.*"Package testdata .*"/
+package testdata
diff --git a/src/mongo/gotools/vendor/src/github.com/3rf/mongo-lint/testdata/pkg-doc3.go b/src/mongo/gotools/vendor/src/github.com/3rf/mongo-lint/testdata/pkg-doc3.go
new file mode 100644
index 00000000000..95e814e0a46
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/3rf/mongo-lint/testdata/pkg-doc3.go
@@ -0,0 +1,7 @@
+// Test of block package comment.
+// OK
+
+/*
+Package foo is pretty sweet.
+*/
+package foo
diff --git a/src/mongo/gotools/vendor/src/github.com/3rf/mongo-lint/testdata/pkg-doc4.go b/src/mongo/gotools/vendor/src/github.com/3rf/mongo-lint/testdata/pkg-doc4.go
new file mode 100644
index 00000000000..23448dec31e
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/3rf/mongo-lint/testdata/pkg-doc4.go
@@ -0,0 +1,7 @@
+// Test of block package comment with leading space.
+
+/*
+ Package foo is pretty sweet.
+MATCH /package comment.*leading space/
+*/
+package foo
diff --git a/src/mongo/gotools/vendor/src/github.com/3rf/mongo-lint/testdata/pkg-main.go b/src/mongo/gotools/vendor/src/github.com/3rf/mongo-lint/testdata/pkg-main.go
new file mode 100644
index 00000000000..c261945d69a
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/3rf/mongo-lint/testdata/pkg-main.go
@@ -0,0 +1,5 @@
+// Test of package comment for package main.
+// OK
+
+// This binary does something awesome.
+package main
diff --git a/src/mongo/gotools/vendor/src/github.com/3rf/mongo-lint/testdata/range.go b/src/mongo/gotools/vendor/src/github.com/3rf/mongo-lint/testdata/range.go
new file mode 100644
index 00000000000..e8629edc342
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/3rf/mongo-lint/testdata/range.go
@@ -0,0 +1,27 @@
+// Test for range construction.
+
+// Package foo ...
+package foo
+
+func f() {
+ // with :=
+ for x, _ := range m { // MATCH /should omit 2nd value.*range.*equivalent.*for x := range/
+ }
+ // with =
+ for y, _ = range m { // MATCH /should omit 2nd value.*range.*equivalent.*for y = range/
+ }
+
+ // all OK:
+ for x := range m {
+ }
+ for x, y := range m {
+ }
+ for _, y := range m {
+ }
+ for x = range m {
+ }
+ for x, y = range m {
+ }
+ for _, y = range m {
+ }
+}
diff --git a/src/mongo/gotools/vendor/src/github.com/3rf/mongo-lint/testdata/receiver-names.go b/src/mongo/gotools/vendor/src/github.com/3rf/mongo-lint/testdata/receiver-names.go
new file mode 100644
index 00000000000..58f567dae43
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/3rf/mongo-lint/testdata/receiver-names.go
@@ -0,0 +1,38 @@
+// Test for bad receiver names.
+
+// Package foo ...
+package foo
+
+type foo struct{}
+
+func (this foo) f1() { // MATCH /should be a reflection of its identity/
+}
+
+func (self foo) f2() { // MATCH /should be a reflection of its identity/
+}
+
+func (f foo) f3() {
+}
+
+func (foo) f4() {
+}
+
+type bar struct{}
+
+func (b bar) f1() {
+}
+
+func (b bar) f2() {
+}
+
+func (a bar) f3() { // MATCH /receiver name a should be consistent with previous receiver name b for bar/
+}
+
+func (a *bar) f4() { // MATCH /receiver name a should be consistent with previous receiver name b for bar/
+}
+
+func (b *bar) f5() {
+}
+
+func (bar) f6() {
+}
diff --git a/src/mongo/gotools/vendor/src/github.com/3rf/mongo-lint/testdata/sort.go b/src/mongo/gotools/vendor/src/github.com/3rf/mongo-lint/testdata/sort.go
new file mode 100644
index 00000000000..c0990494285
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/3rf/mongo-lint/testdata/sort.go
@@ -0,0 +1,20 @@
+// Test that we don't ask for comments on sort.Interface methods.
+
+// Package pkg ...
+package pkg
+
+// T is ...
+type T []int
+
+// Len by itself should get documented.
+
+func (t T) Len() int { return len(t) } // MATCH /exported method T\.Len.*should.*comment/
+
+// U is ...
+type U []int
+
+func (u U) Len() int { return len(u) }
+func (u U) Less(i, j int) bool { return u[i] < u[j] }
+func (u U) Swap(i, j int) { u[i], u[j] = u[j], u[i] }
+
+func (u U) Other() {} // MATCH /exported method U\.Other.*should.*comment/
diff --git a/src/mongo/gotools/vendor/src/github.com/3rf/mongo-lint/testdata/var-decl.go b/src/mongo/gotools/vendor/src/github.com/3rf/mongo-lint/testdata/var-decl.go
new file mode 100644
index 00000000000..bbc687c333b
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/3rf/mongo-lint/testdata/var-decl.go
@@ -0,0 +1,48 @@
+// Test for redundant type declaration.
+
+// Package foo ...
+package foo
+
+import "fmt"
+import "net/http"
+
+var mux *http.ServeMux = http.NewServeMux() // MATCH /should.*\*http\.ServeMux.*inferred/
+var myInt int = 7 // MATCH /should.*int.*myInt.*inferred/
+
+var myZeroInt int = 0 // MATCH /should.*= 0.*myZeroInt.*zero value/
+var myZeroFlt float32 = 0. // MATCH /should.*= 0\..*myZeroFlt.*zero value/
+var myZeroF64 float64 = 0.0 // MATCH /should.*= 0\..*myZeroF64.*zero value/
+var myZeroImg complex = 0i // MATCH /should.*= 0i.*myZeroImg.*zero value/
+var myZeroStr string = "" // MATCH /should.*= "".*myZeroStr.*zero value/
+var myZeroRaw string = `` // MATCH /should.*= ``.*myZeroRaw.*zero value/
+var myZeroPtr *Q = nil // MATCH /should.*= nil.*myZeroPtr.*zero value/
+var myZeroRune rune = '\x00' // MATCH /should.*= '\\x00'.*myZeroRune.*zero value/
+var myZeroRune2 rune = '\000' // MATCH /should.*= '\\000'.*myZeroRune2.*zero value/
+
+// No warning because there's no type on the LHS
+var x = 0
+
+// This shouldn't get a warning because there's no initial values.
+var str fmt.Stringer
+
+// No warning because this is a const.
+const x uint64 = 7
+
+// No warnings because the RHS is an ideal int, and the LHS is a different int type.
+var userID int64 = 1235
+var negID int64 = -1
+var parenID int64 = (17)
+var crazyID int64 = -(-(-(-9)))
+
+// Same, but for strings and floats.
+type stringT string
+type floatT float64
+
+var stringV stringT = "abc"
+var floatV floatT = 123.45
+
+// No warning because the LHS names an interface type.
+var data interface{} = googleIPs
+
+// No warning because it's a common idiom for interface satisfaction.
+var _ Server = (*serverImpl)(nil)
diff --git a/src/mongo/gotools/vendor/src/github.com/howeyc/gopass/LICENSE.txt b/src/mongo/gotools/vendor/src/github.com/howeyc/gopass/LICENSE.txt
new file mode 100644
index 00000000000..65e7260a6ae
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/howeyc/gopass/LICENSE.txt
@@ -0,0 +1,13 @@
+Copyright (c) 2012 Chris Howey
+
+Permission to use, copy, modify, and distribute this software for any
+purpose with or without fee is hereby granted, provided that the above
+copyright notice and this permission notice appear in all copies.
+
+THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
+WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
+MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
+ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
+WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
+ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
+OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
diff --git a/src/mongo/gotools/vendor/src/github.com/howeyc/gopass/README.md b/src/mongo/gotools/vendor/src/github.com/howeyc/gopass/README.md
new file mode 100644
index 00000000000..c7a00435112
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/howeyc/gopass/README.md
@@ -0,0 +1,21 @@
+# getpasswd in Go [![GoDoc](https://godoc.org/github.com/howeyc/gopass?status.svg)](https://godoc.org/github.com/howeyc/gopass)
+
+Retrieve password from user terminal input without echo
+
+Verified on BSD, Linux, and Windows.
+
+Example:
+```go
+package main
+
+import "fmt"
+import "github.com/howeyc/gopass"
+
+func main() {
+ fmt.Printf("Password: ")
+ pass := gopass.GetPasswd() // Silent, for *'s use gopass.GetPasswdMasked()
+ // Do something with pass
+}
+```
+
+Caution: Multi-byte characters not supported!
diff --git a/src/mongo/gotools/vendor/src/github.com/howeyc/gopass/nix.go b/src/mongo/gotools/vendor/src/github.com/howeyc/gopass/nix.go
new file mode 100644
index 00000000000..76c9da42093
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/howeyc/gopass/nix.go
@@ -0,0 +1,29 @@
+// +build linux darwin freebsd netbsd openbsd
+
+package gopass
+
+import (
+ "io"
+ "syscall"
+
+ "golang.org/x/crypto/ssh/terminal"
+)
+
+const lineEnding = "\n"
+
+func getch() (byte, error) {
+ if oldState, err := terminal.MakeRaw(0); err != nil {
+ return 0, err
+ } else {
+ defer terminal.Restore(0, oldState)
+ }
+
+ var buf [1]byte
+ if n, err := syscall.Read(0, buf[:]); n == 0 || err != nil {
+ if err != nil {
+ return 0, err
+ }
+ return 0, io.EOF
+ }
+ return buf[0], nil
+}
diff --git a/src/mongo/gotools/vendor/src/github.com/howeyc/gopass/pass.go b/src/mongo/gotools/vendor/src/github.com/howeyc/gopass/pass.go
new file mode 100644
index 00000000000..d6f1c7ef8e3
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/howeyc/gopass/pass.go
@@ -0,0 +1,56 @@
+package gopass
+
+import (
+ "errors"
+ "os"
+)
+
+var (
+ ErrInterrupted = errors.New("Interrupted")
+)
+
+// getPasswd returns the input read from terminal.
+// If masked is true, typing will be matched by asterisks on the screen.
+// Otherwise, typing will echo nothing.
+func getPasswd(masked bool) ([]byte, error) {
+ var err error
+ var pass, bs, mask []byte
+ if masked {
+ bs = []byte("\b \b")
+ mask = []byte("*")
+ }
+
+ for {
+ if v, e := getch(); v == 127 || v == 8 {
+ if l := len(pass); l > 0 {
+ pass = pass[:l-1]
+ os.Stdout.Write(bs)
+ }
+ } else if v == 13 || v == 10 {
+ break
+ } else if v == 3 {
+ err = ErrInterrupted
+ break
+ } else if v != 0 {
+ pass = append(pass, v)
+ os.Stdout.Write(mask)
+ } else if e != nil {
+ err = e
+ break
+ }
+ }
+ os.Stdout.WriteString(lineEnding)
+ return pass, err
+}
+
+// GetPasswd returns the password read from the terminal without echoing input.
+// The returned byte array does not include end-of-line characters.
+func GetPasswd() ([]byte, error) {
+ return getPasswd(false)
+}
+
+// GetPasswdMasked returns the password read from the terminal, echoing asterisks.
+// The returned byte array does not include end-of-line characters.
+func GetPasswdMasked() ([]byte, error) {
+ return getPasswd(true)
+}
diff --git a/src/mongo/gotools/vendor/src/github.com/howeyc/gopass/win.go b/src/mongo/gotools/vendor/src/github.com/howeyc/gopass/win.go
new file mode 100644
index 00000000000..499dcbdb0ca
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/howeyc/gopass/win.go
@@ -0,0 +1,44 @@
+// +build windows
+
+package gopass
+
+import "errors"
+import "syscall"
+import "unsafe"
+import "unicode/utf16"
+
+const lineEnding = "\r\n"
+
+func getch() (byte, error) {
+ modkernel32 := syscall.NewLazyDLL("kernel32.dll")
+ procReadConsole := modkernel32.NewProc("ReadConsoleW")
+ procGetConsoleMode := modkernel32.NewProc("GetConsoleMode")
+ procSetConsoleMode := modkernel32.NewProc("SetConsoleMode")
+
+ var mode uint32
+ pMode := &mode
+ procGetConsoleMode.Call(uintptr(syscall.Stdin), uintptr(unsafe.Pointer(pMode)))
+
+ var echoMode, lineMode uint32
+ echoMode = 4
+ lineMode = 2
+ var newMode uint32
+ newMode = mode &^ (echoMode | lineMode)
+
+ procSetConsoleMode.Call(uintptr(syscall.Stdin), uintptr(newMode))
+ defer procSetConsoleMode.Call(uintptr(syscall.Stdin), uintptr(mode))
+
+ line := make([]uint16, 1)
+ pLine := &line[0]
+ var n uint16
+ procReadConsole.Call(uintptr(syscall.Stdin), uintptr(unsafe.Pointer(pLine)), uintptr(len(line)), uintptr(unsafe.Pointer(&n)))
+
+ b := []byte(string(utf16.Decode(line)))
+
+ // Not sure how this could happen, but it did for someone
+ if len(b) > 0 {
+ return b[0], nil
+ } else {
+ return 13, errors.New("Read error")
+ }
+}
diff --git a/src/mongo/gotools/vendor/src/github.com/jacobsa/oglematchers/.gitignore b/src/mongo/gotools/vendor/src/github.com/jacobsa/oglematchers/.gitignore
new file mode 100644
index 00000000000..dd8fc7468f4
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/jacobsa/oglematchers/.gitignore
@@ -0,0 +1,5 @@
+*.6
+6.out
+_obj/
+_test/
+_testmain.go
diff --git a/src/mongo/gotools/vendor/src/github.com/jacobsa/oglematchers/LICENSE b/src/mongo/gotools/vendor/src/github.com/jacobsa/oglematchers/LICENSE
new file mode 100644
index 00000000000..d6456956733
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/jacobsa/oglematchers/LICENSE
@@ -0,0 +1,202 @@
+
+ Apache License
+ Version 2.0, January 2004
+ http://www.apache.org/licenses/
+
+ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+ 1. Definitions.
+
+ "License" shall mean the terms and conditions for use, reproduction,
+ and distribution as defined by Sections 1 through 9 of this document.
+
+ "Licensor" shall mean the copyright owner or entity authorized by
+ the copyright owner that is granting the License.
+
+ "Legal Entity" shall mean the union of the acting entity and all
+ other entities that control, are controlled by, or are under common
+ control with that entity. For the purposes of this definition,
+ "control" means (i) the power, direct or indirect, to cause the
+ direction or management of such entity, whether by contract or
+ otherwise, or (ii) ownership of fifty percent (50%) or more of the
+ outstanding shares, or (iii) beneficial ownership of such entity.
+
+ "You" (or "Your") shall mean an individual or Legal Entity
+ exercising permissions granted by this License.
+
+ "Source" form shall mean the preferred form for making modifications,
+ including but not limited to software source code, documentation
+ source, and configuration files.
+
+ "Object" form shall mean any form resulting from mechanical
+ transformation or translation of a Source form, including but
+ not limited to compiled object code, generated documentation,
+ and conversions to other media types.
+
+ "Work" shall mean the work of authorship, whether in Source or
+ Object form, made available under the License, as indicated by a
+ copyright notice that is included in or attached to the work
+ (an example is provided in the Appendix below).
+
+ "Derivative Works" shall mean any work, whether in Source or Object
+ form, that is based on (or derived from) the Work and for which the
+ editorial revisions, annotations, elaborations, or other modifications
+ represent, as a whole, an original work of authorship. For the purposes
+ of this License, Derivative Works shall not include works that remain
+ separable from, or merely link (or bind by name) to the interfaces of,
+ the Work and Derivative Works thereof.
+
+ "Contribution" shall mean any work of authorship, including
+ the original version of the Work and any modifications or additions
+ to that Work or Derivative Works thereof, that is intentionally
+ submitted to Licensor for inclusion in the Work by the copyright owner
+ or by an individual or Legal Entity authorized to submit on behalf of
+ the copyright owner. For the purposes of this definition, "submitted"
+ means any form of electronic, verbal, or written communication sent
+ to the Licensor or its representatives, including but not limited to
+ communication on electronic mailing lists, source code control systems,
+ and issue tracking systems that are managed by, or on behalf of, the
+ Licensor for the purpose of discussing and improving the Work, but
+ excluding communication that is conspicuously marked or otherwise
+ designated in writing by the copyright owner as "Not a Contribution."
+
+ "Contributor" shall mean Licensor and any individual or Legal Entity
+ on behalf of whom a Contribution has been received by Licensor and
+ subsequently incorporated within the Work.
+
+ 2. Grant of Copyright License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ copyright license to reproduce, prepare Derivative Works of,
+ publicly display, publicly perform, sublicense, and distribute the
+ Work and such Derivative Works in Source or Object form.
+
+ 3. Grant of Patent License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ (except as stated in this section) patent license to make, have made,
+ use, offer to sell, sell, import, and otherwise transfer the Work,
+ where such license applies only to those patent claims licensable
+ by such Contributor that are necessarily infringed by their
+ Contribution(s) alone or by combination of their Contribution(s)
+ with the Work to which such Contribution(s) was submitted. If You
+ institute patent litigation against any entity (including a
+ cross-claim or counterclaim in a lawsuit) alleging that the Work
+ or a Contribution incorporated within the Work constitutes direct
+ or contributory patent infringement, then any patent licenses
+ granted to You under this License for that Work shall terminate
+ as of the date such litigation is filed.
+
+ 4. Redistribution. You may reproduce and distribute copies of the
+ Work or Derivative Works thereof in any medium, with or without
+ modifications, and in Source or Object form, provided that You
+ meet the following conditions:
+
+ (a) You must give any other recipients of the Work or
+ Derivative Works a copy of this License; and
+
+ (b) You must cause any modified files to carry prominent notices
+ stating that You changed the files; and
+
+ (c) You must retain, in the Source form of any Derivative Works
+ that You distribute, all copyright, patent, trademark, and
+ attribution notices from the Source form of the Work,
+ excluding those notices that do not pertain to any part of
+ the Derivative Works; and
+
+ (d) If the Work includes a "NOTICE" text file as part of its
+ distribution, then any Derivative Works that You distribute must
+ include a readable copy of the attribution notices contained
+ within such NOTICE file, excluding those notices that do not
+ pertain to any part of the Derivative Works, in at least one
+ of the following places: within a NOTICE text file distributed
+ as part of the Derivative Works; within the Source form or
+ documentation, if provided along with the Derivative Works; or,
+ within a display generated by the Derivative Works, if and
+ wherever such third-party notices normally appear. The contents
+ of the NOTICE file are for informational purposes only and
+ do not modify the License. You may add Your own attribution
+ notices within Derivative Works that You distribute, alongside
+ or as an addendum to the NOTICE text from the Work, provided
+ that such additional attribution notices cannot be construed
+ as modifying the License.
+
+ You may add Your own copyright statement to Your modifications and
+ may provide additional or different license terms and conditions
+ for use, reproduction, or distribution of Your modifications, or
+ for any such Derivative Works as a whole, provided Your use,
+ reproduction, and distribution of the Work otherwise complies with
+ the conditions stated in this License.
+
+ 5. Submission of Contributions. Unless You explicitly state otherwise,
+ any Contribution intentionally submitted for inclusion in the Work
+ by You to the Licensor shall be under the terms and conditions of
+ this License, without any additional terms or conditions.
+ Notwithstanding the above, nothing herein shall supersede or modify
+ the terms of any separate license agreement you may have executed
+ with Licensor regarding such Contributions.
+
+ 6. Trademarks. This License does not grant permission to use the trade
+ names, trademarks, service marks, or product names of the Licensor,
+ except as required for reasonable and customary use in describing the
+ origin of the Work and reproducing the content of the NOTICE file.
+
+ 7. Disclaimer of Warranty. Unless required by applicable law or
+ agreed to in writing, Licensor provides the Work (and each
+ Contributor provides its Contributions) on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ implied, including, without limitation, any warranties or conditions
+ of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+ PARTICULAR PURPOSE. You are solely responsible for determining the
+ appropriateness of using or redistributing the Work and assume any
+ risks associated with Your exercise of permissions under this License.
+
+ 8. Limitation of Liability. In no event and under no legal theory,
+ whether in tort (including negligence), contract, or otherwise,
+ unless required by applicable law (such as deliberate and grossly
+ negligent acts) or agreed to in writing, shall any Contributor be
+ liable to You for damages, including any direct, indirect, special,
+ incidental, or consequential damages of any character arising as a
+ result of this License or out of the use or inability to use the
+ Work (including but not limited to damages for loss of goodwill,
+ work stoppage, computer failure or malfunction, or any and all
+ other commercial damages or losses), even if such Contributor
+ has been advised of the possibility of such damages.
+
+ 9. Accepting Warranty or Additional Liability. While redistributing
+ the Work or Derivative Works thereof, You may choose to offer,
+ and charge a fee for, acceptance of support, warranty, indemnity,
+ or other liability obligations and/or rights consistent with this
+ License. However, in accepting such obligations, You may act only
+ on Your own behalf and on Your sole responsibility, not on behalf
+ of any other Contributor, and only if You agree to indemnify,
+ defend, and hold each Contributor harmless for any liability
+ incurred by, or claims asserted against, such Contributor by reason
+ of your accepting any such warranty or additional liability.
+
+ END OF TERMS AND CONDITIONS
+
+ APPENDIX: How to apply the Apache License to your work.
+
+ To apply the Apache License to your work, attach the following
+ boilerplate notice, with the fields enclosed by brackets "[]"
+ replaced with your own identifying information. (Don't include
+ the brackets!) The text should be enclosed in the appropriate
+ comment syntax for the file format. We also recommend that a
+ file or class name and description of purpose be included on the
+ same "printed page" as the copyright notice for easier
+ identification within third-party archives.
+
+ Copyright [yyyy] [name of copyright owner]
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
diff --git a/src/mongo/gotools/vendor/src/github.com/jacobsa/oglematchers/README.md b/src/mongo/gotools/vendor/src/github.com/jacobsa/oglematchers/README.md
new file mode 100644
index 00000000000..8f36612218f
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/jacobsa/oglematchers/README.md
@@ -0,0 +1,58 @@
+[![GoDoc](https://godoc.org/github.com/jacobsa/oglematchers?status.svg)](https://godoc.org/github.com/jacobsa/oglematchers)
+
+`oglematchers` is a package for the Go programming language containing a set of
+matchers, useful in a testing or mocking framework, inspired by and mostly
+compatible with [Google Test][googletest] for C++ and
+[Google JS Test][google-js-test]. The package is used by the
+[ogletest][ogletest] testing framework and [oglemock][oglemock] mocking
+framework, which may be more directly useful to you, but can be generically used
+elsewhere as well.
+
+A "matcher" is simply an object with a `Matches` method defining a set of golang
+values matched by the matcher, and a `Description` method describing that set.
+For example, here are some matchers:
+
+```go
+// Numbers
+Equals(17.13)
+LessThan(19)
+
+// Strings
+Equals("taco")
+HasSubstr("burrito")
+MatchesRegex("t.*o")
+
+// Combining matchers
+AnyOf(LessThan(17), GreaterThan(19))
+```
+
+There are lots more; see [here][reference] for a reference. You can also add
+your own simply by implementing the `oglematchers.Matcher` interface.
+
+
+Installation
+------------
+
+First, make sure you have installed Go 1.0.2 or newer. See
+[here][golang-install] for instructions.
+
+Use the following command to install `oglematchers` and keep it up to date:
+
+ go get -u github.com/jacobsa/oglematchers
+
+
+Documentation
+-------------
+
+See [here][reference] for documentation. Alternatively, you can install the
+package and then use `godoc`:
+
+ godoc github.com/jacobsa/oglematchers
+
+
+[reference]: http://godoc.org/github.com/jacobsa/oglematchers
+[golang-install]: http://golang.org/doc/install.html
+[googletest]: http://code.google.com/p/googletest/
+[google-js-test]: http://code.google.com/p/google-js-test/
+[ogletest]: http://github.com/jacobsa/ogletest
+[oglemock]: http://github.com/jacobsa/oglemock
diff --git a/src/mongo/gotools/vendor/src/github.com/jacobsa/oglematchers/all_of.go b/src/mongo/gotools/vendor/src/github.com/jacobsa/oglematchers/all_of.go
new file mode 100644
index 00000000000..d93a9740443
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/jacobsa/oglematchers/all_of.go
@@ -0,0 +1,70 @@
+// Copyright 2011 Aaron Jacobs. All Rights Reserved.
+// Author: aaronjjacobs@gmail.com (Aaron Jacobs)
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package oglematchers
+
+import (
+ "strings"
+)
+
+// AllOf accepts a set of matchers S and returns a matcher that follows the
+// algorithm below when considering a candidate c:
+//
+// 1. Return true if for every Matcher m in S, m matches c.
+//
+// 2. Otherwise, if there is a matcher m in S such that m returns a fatal
+// error for c, return that matcher's error message.
+//
+// 3. Otherwise, return false with the error from some wrapped matcher.
+//
+// This is akin to a logical AND operation for matchers.
+func AllOf(matchers ...Matcher) Matcher {
+ return &allOfMatcher{matchers}
+}
+
+type allOfMatcher struct {
+ wrappedMatchers []Matcher
+}
+
+func (m *allOfMatcher) Description() string {
+ // Special case: the empty set.
+ if len(m.wrappedMatchers) == 0 {
+ return "is anything"
+ }
+
+ // Join the descriptions for the wrapped matchers.
+ wrappedDescs := make([]string, len(m.wrappedMatchers))
+ for i, wrappedMatcher := range m.wrappedMatchers {
+ wrappedDescs[i] = wrappedMatcher.Description()
+ }
+
+ return strings.Join(wrappedDescs, ", and ")
+}
+
+func (m *allOfMatcher) Matches(c interface{}) (err error) {
+ for _, wrappedMatcher := range m.wrappedMatchers {
+ if wrappedErr := wrappedMatcher.Matches(c); wrappedErr != nil {
+ err = wrappedErr
+
+ // If the error is fatal, return immediately with this error.
+ _, ok := wrappedErr.(*FatalError)
+ if ok {
+ return
+ }
+ }
+ }
+
+ return
+}
diff --git a/src/mongo/gotools/vendor/src/github.com/jacobsa/oglematchers/all_of_test.go b/src/mongo/gotools/vendor/src/github.com/jacobsa/oglematchers/all_of_test.go
new file mode 100644
index 00000000000..f07306f10ad
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/jacobsa/oglematchers/all_of_test.go
@@ -0,0 +1,110 @@
+// Copyright 2011 Aaron Jacobs. All Rights Reserved.
+// Author: aaronjjacobs@gmail.com (Aaron Jacobs)
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package oglematchers_test
+
+import (
+ . "github.com/jacobsa/oglematchers"
+ . "github.com/jacobsa/ogletest"
+ "errors"
+)
+
+////////////////////////////////////////////////////////////////////////
+// Helpers
+////////////////////////////////////////////////////////////////////////
+
+type allOfFakeMatcher struct {
+ desc string
+ err error
+}
+
+func (m *allOfFakeMatcher) Matches(c interface{}) error {
+ return m.err
+}
+
+func (m *allOfFakeMatcher) Description() string {
+ return m.desc
+}
+
+type AllOfTest struct {
+}
+
+func init() { RegisterTestSuite(&AllOfTest{}) }
+
+////////////////////////////////////////////////////////////////////////
+// Tests
+////////////////////////////////////////////////////////////////////////
+
+func (t *AllOfTest) DescriptionWithEmptySet() {
+ m := AllOf()
+ ExpectEq("is anything", m.Description())
+}
+
+func (t *AllOfTest) DescriptionWithOneMatcher() {
+ m := AllOf(&allOfFakeMatcher{"taco", errors.New("")})
+ ExpectEq("taco", m.Description())
+}
+
+func (t *AllOfTest) DescriptionWithMultipleMatchers() {
+ m := AllOf(
+ &allOfFakeMatcher{"taco", errors.New("")},
+ &allOfFakeMatcher{"burrito", errors.New("")},
+ &allOfFakeMatcher{"enchilada", errors.New("")})
+
+ ExpectEq("taco, and burrito, and enchilada", m.Description())
+}
+
+func (t *AllOfTest) EmptySet() {
+ m := AllOf()
+ err := m.Matches(17)
+
+ ExpectEq(nil, err)
+}
+
+func (t *AllOfTest) OneMatcherReturnsFatalErrorAndSomeOthersFail() {
+ m := AllOf(
+ &allOfFakeMatcher{"", errors.New("")},
+ &allOfFakeMatcher{"", NewFatalError("taco")},
+ &allOfFakeMatcher{"", errors.New("")},
+ &allOfFakeMatcher{"", nil})
+
+ err := m.Matches(17)
+
+ ExpectTrue(isFatal(err))
+ ExpectThat(err, Error(Equals("taco")))
+}
+
+func (t *AllOfTest) OneMatcherReturnsNonFatalAndOthersSayTrue() {
+ m := AllOf(
+ &allOfFakeMatcher{"", nil},
+ &allOfFakeMatcher{"", errors.New("taco")},
+ &allOfFakeMatcher{"", nil})
+
+ err := m.Matches(17)
+
+ ExpectFalse(isFatal(err))
+ ExpectThat(err, Error(Equals("taco")))
+}
+
+func (t *AllOfTest) AllMatchersSayTrue() {
+ m := AllOf(
+ &allOfFakeMatcher{"", nil},
+ &allOfFakeMatcher{"", nil},
+ &allOfFakeMatcher{"", nil})
+
+ err := m.Matches(17)
+
+ ExpectEq(nil, err)
+}
diff --git a/src/mongo/gotools/vendor/src/github.com/jacobsa/oglematchers/any.go b/src/mongo/gotools/vendor/src/github.com/jacobsa/oglematchers/any.go
new file mode 100644
index 00000000000..f6991ec1020
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/jacobsa/oglematchers/any.go
@@ -0,0 +1,32 @@
+// Copyright 2011 Aaron Jacobs. All Rights Reserved.
+// Author: aaronjjacobs@gmail.com (Aaron Jacobs)
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package oglematchers
+
+// Any returns a matcher that matches any value.
+func Any() Matcher {
+ return &anyMatcher{}
+}
+
+type anyMatcher struct {
+}
+
+func (m *anyMatcher) Description() string {
+ return "is anything"
+}
+
+func (m *anyMatcher) Matches(c interface{}) error {
+ return nil
+}
diff --git a/src/mongo/gotools/vendor/src/github.com/jacobsa/oglematchers/any_of.go b/src/mongo/gotools/vendor/src/github.com/jacobsa/oglematchers/any_of.go
new file mode 100644
index 00000000000..2918b51f21a
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/jacobsa/oglematchers/any_of.go
@@ -0,0 +1,94 @@
+// Copyright 2011 Aaron Jacobs. All Rights Reserved.
+// Author: aaronjjacobs@gmail.com (Aaron Jacobs)
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package oglematchers
+
+import (
+ "errors"
+ "fmt"
+ "reflect"
+ "strings"
+)
+
+// AnyOf accepts a set of values S and returns a matcher that follows the
+// algorithm below when considering a candidate c:
+//
+// 1. If there exists a value m in S such that m implements the Matcher
+// interface and m matches c, return true.
+//
+// 2. Otherwise, if there exists a value v in S such that v does not implement
+// the Matcher interface and the matcher Equals(v) matches c, return true.
+//
+// 3. Otherwise, if there is a value m in S such that m implements the Matcher
+// interface and m returns a fatal error for c, return that fatal error.
+//
+// 4. Otherwise, return false.
+//
+// This is akin to a logical OR operation for matchers, with non-matchers x
+// being treated as Equals(x).
+func AnyOf(vals ...interface{}) Matcher {
+ // Get ahold of a type variable for the Matcher interface.
+ var dummy *Matcher
+ matcherType := reflect.TypeOf(dummy).Elem()
+
+ // Create a matcher for each value, or use the value itself if it's already a
+ // matcher.
+ wrapped := make([]Matcher, len(vals))
+ for i, v := range vals {
+ t := reflect.TypeOf(v)
+ if t != nil && t.Implements(matcherType) {
+ wrapped[i] = v.(Matcher)
+ } else {
+ wrapped[i] = Equals(v)
+ }
+ }
+
+ return &anyOfMatcher{wrapped}
+}
+
+type anyOfMatcher struct {
+ wrapped []Matcher
+}
+
+func (m *anyOfMatcher) Description() string {
+ wrappedDescs := make([]string, len(m.wrapped))
+ for i, matcher := range m.wrapped {
+ wrappedDescs[i] = matcher.Description()
+ }
+
+ return fmt.Sprintf("or(%s)", strings.Join(wrappedDescs, ", "))
+}
+
+func (m *anyOfMatcher) Matches(c interface{}) (err error) {
+ err = errors.New("")
+
+ // Try each matcher in turn.
+ for _, matcher := range m.wrapped {
+ wrappedErr := matcher.Matches(c)
+
+ // Return immediately if there's a match.
+ if wrappedErr == nil {
+ err = nil
+ return
+ }
+
+ // Note the fatal error, if any.
+ if _, isFatal := wrappedErr.(*FatalError); isFatal {
+ err = wrappedErr
+ }
+ }
+
+ return
+}
diff --git a/src/mongo/gotools/vendor/src/github.com/jacobsa/oglematchers/any_of_test.go b/src/mongo/gotools/vendor/src/github.com/jacobsa/oglematchers/any_of_test.go
new file mode 100644
index 00000000000..b0f85efc4a3
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/jacobsa/oglematchers/any_of_test.go
@@ -0,0 +1,139 @@
+// Copyright 2011 Aaron Jacobs. All Rights Reserved.
+// Author: aaronjjacobs@gmail.com (Aaron Jacobs)
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package oglematchers_test
+
+import (
+ "errors"
+
+ . "github.com/jacobsa/oglematchers"
+ . "github.com/jacobsa/ogletest"
+)
+
+////////////////////////////////////////////////////////////////////////
+// Helpers
+////////////////////////////////////////////////////////////////////////
+
+type fakeAnyOfMatcher struct {
+ desc string
+ err error
+}
+
+func (m *fakeAnyOfMatcher) Matches(c interface{}) error {
+ return m.err
+}
+
+func (m *fakeAnyOfMatcher) Description() string {
+ return m.desc
+}
+
+type AnyOfTest struct {
+}
+
+func init() { RegisterTestSuite(&AnyOfTest{}) }
+
+////////////////////////////////////////////////////////////////////////
+// Tests
+////////////////////////////////////////////////////////////////////////
+
+func (t *AnyOfTest) EmptySet() {
+ matcher := AnyOf()
+
+ err := matcher.Matches(0)
+ ExpectThat(err, Error(Equals("")))
+}
+
+func (t *AnyOfTest) OneTrue() {
+ matcher := AnyOf(
+ &fakeAnyOfMatcher{"", NewFatalError("foo")},
+ 17,
+ &fakeAnyOfMatcher{"", errors.New("foo")},
+ &fakeAnyOfMatcher{"", nil},
+ &fakeAnyOfMatcher{"", errors.New("foo")},
+ )
+
+ err := matcher.Matches(0)
+ ExpectEq(nil, err)
+}
+
+func (t *AnyOfTest) OneEqual() {
+ matcher := AnyOf(
+ &fakeAnyOfMatcher{"", NewFatalError("foo")},
+ &fakeAnyOfMatcher{"", errors.New("foo")},
+ 13,
+ "taco",
+ 19,
+ &fakeAnyOfMatcher{"", errors.New("foo")},
+ )
+
+ err := matcher.Matches("taco")
+ ExpectEq(nil, err)
+}
+
+func (t *AnyOfTest) OneFatal() {
+ matcher := AnyOf(
+ &fakeAnyOfMatcher{"", errors.New("foo")},
+ 17,
+ &fakeAnyOfMatcher{"", NewFatalError("taco")},
+ &fakeAnyOfMatcher{"", errors.New("foo")},
+ )
+
+ err := matcher.Matches(0)
+ ExpectThat(err, Error(Equals("taco")))
+}
+
+func (t *AnyOfTest) OneNil() {
+ var err error
+ matcher := AnyOf(
+ 13,
+ nil,
+ 19,
+ )
+
+ // No match
+ err = matcher.Matches(14)
+ ExpectNe(nil, err)
+
+ // Match
+ err = matcher.Matches(nil)
+ ExpectEq(nil, err)
+}
+
+func (t *AnyOfTest) AllFalseAndNotEqual() {
+ matcher := AnyOf(
+ &fakeAnyOfMatcher{"", errors.New("foo")},
+ 17,
+ &fakeAnyOfMatcher{"", errors.New("foo")},
+ 19,
+ )
+
+ err := matcher.Matches(0)
+ ExpectThat(err, Error(Equals("")))
+}
+
+func (t *AnyOfTest) DescriptionForEmptySet() {
+ matcher := AnyOf()
+ ExpectEq("or()", matcher.Description())
+}
+
+func (t *AnyOfTest) DescriptionForNonEmptySet() {
+ matcher := AnyOf(
+ &fakeAnyOfMatcher{"taco", nil},
+ "burrito",
+ &fakeAnyOfMatcher{"enchilada", nil},
+ )
+
+ ExpectEq("or(taco, burrito, enchilada)", matcher.Description())
+}
diff --git a/src/mongo/gotools/vendor/src/github.com/jacobsa/oglematchers/any_test.go b/src/mongo/gotools/vendor/src/github.com/jacobsa/oglematchers/any_test.go
new file mode 100644
index 00000000000..7b6f6075868
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/jacobsa/oglematchers/any_test.go
@@ -0,0 +1,53 @@
+// Copyright 2011 Aaron Jacobs. All Rights Reserved.
+// Author: aaronjjacobs@gmail.com (Aaron Jacobs)
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package oglematchers_test
+
+import (
+ . "github.com/jacobsa/oglematchers"
+ . "github.com/jacobsa/ogletest"
+)
+
+////////////////////////////////////////////////////////////////////////
+// Helpers
+////////////////////////////////////////////////////////////////////////
+
+type AnyTest struct {
+}
+
+func init() { RegisterTestSuite(&AnyTest{}) }
+
+////////////////////////////////////////////////////////////////////////
+// Tests
+////////////////////////////////////////////////////////////////////////
+
+func (t *AnyTest) Description() {
+ m := Any()
+ ExpectEq("is anything", m.Description())
+}
+
+func (t *AnyTest) Matches() {
+ var err error
+ m := Any()
+
+ err = m.Matches(nil)
+ ExpectEq(nil, err)
+
+ err = m.Matches(17)
+ ExpectEq(nil, err)
+
+ err = m.Matches("taco")
+ ExpectEq(nil, err)
+}
diff --git a/src/mongo/gotools/vendor/src/github.com/jacobsa/oglematchers/contains.go b/src/mongo/gotools/vendor/src/github.com/jacobsa/oglematchers/contains.go
new file mode 100644
index 00000000000..2f326dbc5d6
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/jacobsa/oglematchers/contains.go
@@ -0,0 +1,61 @@
+// Copyright 2012 Aaron Jacobs. All Rights Reserved.
+// Author: aaronjjacobs@gmail.com (Aaron Jacobs)
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package oglematchers
+
+import (
+ "fmt"
+ "reflect"
+)
+
+// Return a matcher that matches arrays slices with at least one element that
+// matches the supplied argument. If the argument x is not itself a Matcher,
+// this is equivalent to Contains(Equals(x)).
+func Contains(x interface{}) Matcher {
+ var result containsMatcher
+ var ok bool
+
+ if result.elementMatcher, ok = x.(Matcher); !ok {
+ result.elementMatcher = Equals(x)
+ }
+
+ return &result
+}
+
+type containsMatcher struct {
+ elementMatcher Matcher
+}
+
+func (m *containsMatcher) Description() string {
+ return fmt.Sprintf("contains: %s", m.elementMatcher.Description())
+}
+
+func (m *containsMatcher) Matches(candidate interface{}) error {
+ // The candidate must be a slice or an array.
+ v := reflect.ValueOf(candidate)
+ if v.Kind() != reflect.Slice && v.Kind() != reflect.Array {
+ return NewFatalError("which is not a slice or array")
+ }
+
+ // Check each element.
+ for i := 0; i < v.Len(); i++ {
+ elem := v.Index(i)
+ if matchErr := m.elementMatcher.Matches(elem.Interface()); matchErr == nil {
+ return nil
+ }
+ }
+
+ return fmt.Errorf("")
+}
diff --git a/src/mongo/gotools/vendor/src/github.com/jacobsa/oglematchers/contains_test.go b/src/mongo/gotools/vendor/src/github.com/jacobsa/oglematchers/contains_test.go
new file mode 100644
index 00000000000..34625fcaa5f
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/jacobsa/oglematchers/contains_test.go
@@ -0,0 +1,233 @@
+// Copyright 2012 Aaron Jacobs. All Rights Reserved.
+// Author: aaronjjacobs@gmail.com (Aaron Jacobs)
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package oglematchers_test
+
+import (
+ . "github.com/jacobsa/oglematchers"
+ . "github.com/jacobsa/ogletest"
+)
+
+////////////////////////////////////////////////////////////////////////
+// Helpers
+////////////////////////////////////////////////////////////////////////
+
+type ContainsTest struct {}
+func init() { RegisterTestSuite(&ContainsTest{}) }
+
+////////////////////////////////////////////////////////////////////////
+// Tests
+////////////////////////////////////////////////////////////////////////
+
+func (t *ContainsTest) WrongTypeCandidates() {
+ m := Contains("")
+ ExpectEq("contains: ", m.Description())
+
+ var err error
+
+ // Nil candidate
+ err = m.Matches(nil)
+ ExpectTrue(isFatal(err))
+ ExpectThat(err, Error(HasSubstr("array")))
+ ExpectThat(err, Error(HasSubstr("slice")))
+
+ // String candidate
+ err = m.Matches("")
+ ExpectTrue(isFatal(err))
+ ExpectThat(err, Error(HasSubstr("array")))
+ ExpectThat(err, Error(HasSubstr("slice")))
+
+ // Map candidate
+ err = m.Matches(make(map[string]string))
+ ExpectTrue(isFatal(err))
+ ExpectThat(err, Error(HasSubstr("array")))
+ ExpectThat(err, Error(HasSubstr("slice")))
+}
+
+func (t *ContainsTest) NilArgument() {
+ m := Contains(nil)
+ ExpectEq("contains: is nil", m.Description())
+
+ var c interface{}
+ var err error
+
+ // Empty array of pointers
+ c = [...]*int{}
+ err = m.Matches(c)
+ ExpectThat(err, Error(Equals("")))
+
+ // Empty slice of pointers
+ c = []*int{}
+ err = m.Matches(c)
+ ExpectThat(err, Error(Equals("")))
+
+ // Non-empty array of integers
+ c = [...]int{17, 0, 19}
+ err = m.Matches(c)
+ ExpectThat(err, Error(Equals("")))
+
+ // Non-empty slice of integers
+ c = []int{17, 0, 19}
+ err = m.Matches(c)
+ ExpectThat(err, Error(Equals("")))
+
+ // Non-matching array of pointers
+ c = [...]*int{new(int), new(int)}
+ err = m.Matches(c)
+ ExpectThat(err, Error(Equals("")))
+
+ // Non-matching slice of pointers
+ c = []*int{new(int), new(int)}
+ err = m.Matches(c)
+ ExpectThat(err, Error(Equals("")))
+
+ // Matching array of pointers
+ c = [...]*int{new(int), nil, new(int)}
+ err = m.Matches(c)
+ ExpectEq(nil, err)
+
+ // Matching slice of pointers
+ c = []*int{new(int), nil, new(int)}
+ err = m.Matches(c)
+ ExpectEq(nil, err)
+
+ // Non-matching slice of pointers from matching array
+ someArray := [...]*int{new(int), nil, new(int)}
+ c = someArray[0:1]
+ err = m.Matches(c)
+ ExpectThat(err, Error(Equals("")))
+}
+
+func (t *ContainsTest) StringArgument() {
+ m := Contains("taco")
+ ExpectEq("contains: taco", m.Description())
+
+ var c interface{}
+ var err error
+
+ // Non-matching array of strings
+ c = [...]string{"burrito", "enchilada"}
+ err = m.Matches(c)
+ ExpectThat(err, Error(Equals("")))
+
+ // Non-matching slice of strings
+ c = []string{"burrito", "enchilada"}
+ err = m.Matches(c)
+ ExpectThat(err, Error(Equals("")))
+
+ // Matching array of strings
+ c = [...]string{"burrito", "taco", "enchilada"}
+ err = m.Matches(c)
+ ExpectEq(nil, err)
+
+ // Matching slice of strings
+ c = []string{"burrito", "taco", "enchilada"}
+ err = m.Matches(c)
+ ExpectEq(nil, err)
+
+ // Non-matching slice of strings from matching array
+ someArray := [...]string{"burrito", "taco", "enchilada"}
+ c = someArray[0:1]
+ err = m.Matches(c)
+ ExpectThat(err, Error(Equals("")))
+}
+
+func (t *ContainsTest) IntegerArgument() {
+ m := Contains(int(17))
+ ExpectEq("contains: 17", m.Description())
+
+ var c interface{}
+ var err error
+
+ // Non-matching array of integers
+ c = [...]int{13, 19}
+ err = m.Matches(c)
+ ExpectThat(err, Error(Equals("")))
+
+ // Non-matching slice of integers
+ c = []int{13, 19}
+ err = m.Matches(c)
+ ExpectThat(err, Error(Equals("")))
+
+ // Matching array of integers
+ c = [...]int{13, 17, 19}
+ err = m.Matches(c)
+ ExpectEq(nil, err)
+
+ // Matching slice of integers
+ c = []int{13, 17, 19}
+ err = m.Matches(c)
+ ExpectEq(nil, err)
+
+ // Non-matching slice of integers from matching array
+ someArray := [...]int{13, 17, 19}
+ c = someArray[0:1]
+ err = m.Matches(c)
+ ExpectThat(err, Error(Equals("")))
+
+ // Non-matching array of floats
+ c = [...]float32{13, 17.5, 19}
+ err = m.Matches(c)
+ ExpectThat(err, Error(Equals("")))
+
+ // Non-matching slice of floats
+ c = []float32{13, 17.5, 19}
+ err = m.Matches(c)
+ ExpectThat(err, Error(Equals("")))
+
+ // Matching array of floats
+ c = [...]float32{13, 17, 19}
+ err = m.Matches(c)
+ ExpectEq(nil, err)
+
+ // Matching slice of floats
+ c = []float32{13, 17, 19}
+ err = m.Matches(c)
+ ExpectEq(nil, err)
+}
+
+func (t *ContainsTest) MatcherArgument() {
+ m := Contains(HasSubstr("ac"))
+ ExpectEq("contains: has substring \"ac\"", m.Description())
+
+ var c interface{}
+ var err error
+
+ // Non-matching array of strings
+ c = [...]string{"burrito", "enchilada"}
+ err = m.Matches(c)
+ ExpectThat(err, Error(Equals("")))
+
+ // Non-matching slice of strings
+ c = []string{"burrito", "enchilada"}
+ err = m.Matches(c)
+ ExpectThat(err, Error(Equals("")))
+
+ // Matching array of strings
+ c = [...]string{"burrito", "taco", "enchilada"}
+ err = m.Matches(c)
+ ExpectEq(nil, err)
+
+ // Matching slice of strings
+ c = []string{"burrito", "taco", "enchilada"}
+ err = m.Matches(c)
+ ExpectEq(nil, err)
+
+ // Non-matching slice of strings from matching array
+ someArray := [...]string{"burrito", "taco", "enchilada"}
+ c = someArray[0:1]
+ err = m.Matches(c)
+ ExpectThat(err, Error(Equals("")))
+}
diff --git a/src/mongo/gotools/vendor/src/github.com/jacobsa/oglematchers/deep_equals.go b/src/mongo/gotools/vendor/src/github.com/jacobsa/oglematchers/deep_equals.go
new file mode 100644
index 00000000000..1d91baef32e
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/jacobsa/oglematchers/deep_equals.go
@@ -0,0 +1,88 @@
+// Copyright 2012 Aaron Jacobs. All Rights Reserved.
+// Author: aaronjjacobs@gmail.com (Aaron Jacobs)
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package oglematchers
+
+import (
+ "bytes"
+ "errors"
+ "fmt"
+ "reflect"
+)
+
+var byteSliceType reflect.Type = reflect.TypeOf([]byte{})
+
+// DeepEquals returns a matcher that matches based on 'deep equality', as
+// defined by the reflect package. This matcher requires that values have
+// identical types to x.
+func DeepEquals(x interface{}) Matcher {
+ return &deepEqualsMatcher{x}
+}
+
+type deepEqualsMatcher struct {
+ x interface{}
+}
+
+func (m *deepEqualsMatcher) Description() string {
+ xDesc := fmt.Sprintf("%v", m.x)
+ xValue := reflect.ValueOf(m.x)
+
+ // Special case: fmt.Sprintf presents nil slices as "[]", but
+ // reflect.DeepEqual makes a distinction between nil and empty slices. Make
+ // this less confusing.
+ if xValue.Kind() == reflect.Slice && xValue.IsNil() {
+ xDesc = "<nil slice>"
+ }
+
+ return fmt.Sprintf("deep equals: %s", xDesc)
+}
+
+func (m *deepEqualsMatcher) Matches(c interface{}) error {
+ // Make sure the types match.
+ ct := reflect.TypeOf(c)
+ xt := reflect.TypeOf(m.x)
+
+ if ct != xt {
+ return NewFatalError(fmt.Sprintf("which is of type %v", ct))
+ }
+
+ // Special case: handle byte slices more efficiently.
+ cValue := reflect.ValueOf(c)
+ xValue := reflect.ValueOf(m.x)
+
+ if ct == byteSliceType && !cValue.IsNil() && !xValue.IsNil() {
+ xBytes := m.x.([]byte)
+ cBytes := c.([]byte)
+
+ if bytes.Equal(cBytes, xBytes) {
+ return nil
+ }
+
+ return errors.New("")
+ }
+
+ // Defer to the reflect package.
+ if reflect.DeepEqual(m.x, c) {
+ return nil
+ }
+
+ // Special case: if the comparison failed because c is the nil slice, given
+ // an indication of this (since its value is printed as "[]").
+ if cValue.Kind() == reflect.Slice && cValue.IsNil() {
+ return errors.New("which is nil")
+ }
+
+ return errors.New("")
+}
diff --git a/src/mongo/gotools/vendor/src/github.com/jacobsa/oglematchers/deep_equals_test.go b/src/mongo/gotools/vendor/src/github.com/jacobsa/oglematchers/deep_equals_test.go
new file mode 100644
index 00000000000..9fedfd77017
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/jacobsa/oglematchers/deep_equals_test.go
@@ -0,0 +1,343 @@
+// Copyright 2012 Aaron Jacobs. All Rights Reserved.
+// Author: aaronjjacobs@gmail.com (Aaron Jacobs)
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package oglematchers_test
+
+import (
+ . "github.com/jacobsa/oglematchers"
+ . "github.com/jacobsa/ogletest"
+ "bytes"
+ "testing"
+)
+
+////////////////////////////////////////////////////////////////////////
+// Helpers
+////////////////////////////////////////////////////////////////////////
+
+type DeepEqualsTest struct {}
+func init() { RegisterTestSuite(&DeepEqualsTest{}) }
+
+////////////////////////////////////////////////////////////////////////
+// Tests
+////////////////////////////////////////////////////////////////////////
+
+func (t *DeepEqualsTest) WrongTypeCandidateWithScalarValue() {
+ var x int = 17
+ m := DeepEquals(x)
+
+ var err error
+
+ // Nil candidate.
+ err = m.Matches(nil)
+ AssertNe(nil, err)
+ ExpectTrue(isFatal(err))
+ ExpectThat(err, Error(HasSubstr("type")))
+ ExpectThat(err, Error(HasSubstr("<nil>")))
+
+ // Int alias candidate.
+ type intAlias int
+ err = m.Matches(intAlias(x))
+ AssertNe(nil, err)
+ ExpectTrue(isFatal(err))
+ ExpectThat(err, Error(HasSubstr("type")))
+ ExpectThat(err, Error(HasSubstr("intAlias")))
+
+ // String candidate.
+ err = m.Matches("taco")
+ AssertNe(nil, err)
+ ExpectTrue(isFatal(err))
+ ExpectThat(err, Error(HasSubstr("type")))
+ ExpectThat(err, Error(HasSubstr("string")))
+
+ // Byte slice candidate.
+ err = m.Matches([]byte{})
+ AssertNe(nil, err)
+ ExpectTrue(isFatal(err))
+ ExpectThat(err, Error(HasSubstr("type")))
+ ExpectThat(err, Error(HasSubstr("[]uint8")))
+
+ // Other slice candidate.
+ err = m.Matches([]uint16{})
+ AssertNe(nil, err)
+ ExpectTrue(isFatal(err))
+ ExpectThat(err, Error(HasSubstr("type")))
+ ExpectThat(err, Error(HasSubstr("[]uint16")))
+
+ // Unsigned int candidate.
+ err = m.Matches(uint(17))
+ AssertNe(nil, err)
+ ExpectTrue(isFatal(err))
+ ExpectThat(err, Error(HasSubstr("type")))
+ ExpectThat(err, Error(HasSubstr("uint")))
+}
+
+func (t *DeepEqualsTest) WrongTypeCandidateWithByteSliceValue() {
+ x := []byte{}
+ m := DeepEquals(x)
+
+ var err error
+
+ // Nil candidate.
+ err = m.Matches(nil)
+ AssertNe(nil, err)
+ ExpectTrue(isFatal(err))
+ ExpectThat(err, Error(HasSubstr("type")))
+ ExpectThat(err, Error(HasSubstr("<nil>")))
+
+ // String candidate.
+ err = m.Matches("taco")
+ AssertNe(nil, err)
+ ExpectTrue(isFatal(err))
+ ExpectThat(err, Error(HasSubstr("type")))
+ ExpectThat(err, Error(HasSubstr("string")))
+
+ // Slice candidate with wrong value type.
+ err = m.Matches([]uint16{})
+ AssertNe(nil, err)
+ ExpectTrue(isFatal(err))
+ ExpectThat(err, Error(HasSubstr("type")))
+ ExpectThat(err, Error(HasSubstr("[]uint16")))
+}
+
+func (t *DeepEqualsTest) WrongTypeCandidateWithOtherSliceValue() {
+ x := []uint16{}
+ m := DeepEquals(x)
+
+ var err error
+
+ // Nil candidate.
+ err = m.Matches(nil)
+ AssertNe(nil, err)
+ ExpectTrue(isFatal(err))
+ ExpectThat(err, Error(HasSubstr("type")))
+ ExpectThat(err, Error(HasSubstr("<nil>")))
+
+ // String candidate.
+ err = m.Matches("taco")
+ AssertNe(nil, err)
+ ExpectTrue(isFatal(err))
+ ExpectThat(err, Error(HasSubstr("type")))
+ ExpectThat(err, Error(HasSubstr("string")))
+
+ // Byte slice candidate with wrong value type.
+ err = m.Matches([]byte{})
+ AssertNe(nil, err)
+ ExpectTrue(isFatal(err))
+ ExpectThat(err, Error(HasSubstr("type")))
+ ExpectThat(err, Error(HasSubstr("[]uint8")))
+
+ // Other slice candidate with wrong value type.
+ err = m.Matches([]uint32{})
+ AssertNe(nil, err)
+ ExpectTrue(isFatal(err))
+ ExpectThat(err, Error(HasSubstr("type")))
+ ExpectThat(err, Error(HasSubstr("[]uint32")))
+}
+
+func (t *DeepEqualsTest) WrongTypeCandidateWithNilLiteralValue() {
+ m := DeepEquals(nil)
+
+ var err error
+
+ // String candidate.
+ err = m.Matches("taco")
+ AssertNe(nil, err)
+ ExpectTrue(isFatal(err))
+ ExpectThat(err, Error(HasSubstr("type")))
+ ExpectThat(err, Error(HasSubstr("string")))
+
+ // Nil byte slice candidate.
+ err = m.Matches([]byte(nil))
+ AssertNe(nil, err)
+ ExpectTrue(isFatal(err))
+ ExpectThat(err, Error(HasSubstr("type")))
+ ExpectThat(err, Error(HasSubstr("[]uint8")))
+
+ // Nil other slice candidate.
+ err = m.Matches([]uint16(nil))
+ AssertNe(nil, err)
+ ExpectTrue(isFatal(err))
+ ExpectThat(err, Error(HasSubstr("type")))
+ ExpectThat(err, Error(HasSubstr("[]uint16")))
+}
+
+func (t *DeepEqualsTest) NilLiteralValue() {
+ m := DeepEquals(nil)
+ ExpectEq("deep equals: <nil>", m.Description())
+
+ var c interface{}
+ var err error
+
+ // Nil literal candidate.
+ c = nil
+ err = m.Matches(c)
+ ExpectEq(nil, err)
+}
+
+func (t *DeepEqualsTest) IntValue() {
+ m := DeepEquals(int(17))
+ ExpectEq("deep equals: 17", m.Description())
+
+ var c interface{}
+ var err error
+
+ // Matching int.
+ c = int(17)
+ err = m.Matches(c)
+ ExpectEq(nil, err)
+
+ // Non-matching int.
+ c = int(18)
+ err = m.Matches(c)
+ ExpectThat(err, Error(Equals("")))
+}
+
+func (t *DeepEqualsTest) ByteSliceValue() {
+ x := []byte{17, 19}
+ m := DeepEquals(x)
+ ExpectEq("deep equals: [17 19]", m.Description())
+
+ var c []byte
+ var err error
+
+ // Matching.
+ c = make([]byte, len(x))
+ AssertEq(len(x), copy(c, x))
+
+ err = m.Matches(c)
+ ExpectEq(nil, err)
+
+ // Nil slice.
+ c = []byte(nil)
+ err = m.Matches(c)
+ ExpectThat(err, Error(Equals("which is nil")))
+
+ // Prefix.
+ AssertGt(len(x), 1)
+ c = make([]byte, len(x)-1)
+ AssertEq(len(x)-1, copy(c, x))
+
+ err = m.Matches(c)
+ ExpectThat(err, Error(Equals("")))
+
+ // Suffix.
+ c = make([]byte, len(x)+1)
+ AssertEq(len(x), copy(c, x))
+
+ err = m.Matches(c)
+ ExpectThat(err, Error(Equals("")))
+}
+
+func (t *DeepEqualsTest) OtherSliceValue() {
+ x := []uint16{17, 19}
+ m := DeepEquals(x)
+ ExpectEq("deep equals: [17 19]", m.Description())
+
+ var c []uint16
+ var err error
+
+ // Matching.
+ c = make([]uint16, len(x))
+ AssertEq(len(x), copy(c, x))
+
+ err = m.Matches(c)
+ ExpectEq(nil, err)
+
+ // Nil slice.
+ c = []uint16(nil)
+ err = m.Matches(c)
+ ExpectThat(err, Error(Equals("which is nil")))
+
+ // Prefix.
+ AssertGt(len(x), 1)
+ c = make([]uint16, len(x)-1)
+ AssertEq(len(x)-1, copy(c, x))
+
+ err = m.Matches(c)
+ ExpectThat(err, Error(Equals("")))
+
+ // Suffix.
+ c = make([]uint16, len(x)+1)
+ AssertEq(len(x), copy(c, x))
+
+ err = m.Matches(c)
+ ExpectThat(err, Error(Equals("")))
+}
+
+func (t *DeepEqualsTest) NilByteSliceValue() {
+ x := []byte(nil)
+ m := DeepEquals(x)
+ ExpectEq("deep equals: <nil slice>", m.Description())
+
+ var c []byte
+ var err error
+
+ // Nil slice.
+ c = []byte(nil)
+ err = m.Matches(c)
+ ExpectEq(nil, err)
+
+ // Non-nil slice.
+ c = []byte{}
+ err = m.Matches(c)
+ ExpectThat(err, Error(Equals("")))
+}
+
+func (t *DeepEqualsTest) NilOtherSliceValue() {
+ x := []uint16(nil)
+ m := DeepEquals(x)
+ ExpectEq("deep equals: <nil slice>", m.Description())
+
+ var c []uint16
+ var err error
+
+ // Nil slice.
+ c = []uint16(nil)
+ err = m.Matches(c)
+ ExpectEq(nil, err)
+
+ // Non-nil slice.
+ c = []uint16{}
+ err = m.Matches(c)
+ ExpectThat(err, Error(Equals("")))
+}
+
+////////////////////////////////////////////////////////////////////////
+// Benchmarks
+////////////////////////////////////////////////////////////////////////
+
+func benchmarkWithSize(b *testing.B, size int) {
+ b.StopTimer()
+ buf := bytes.Repeat([]byte{0x01}, size)
+ bufCopy := make([]byte, size)
+ copy(bufCopy, buf)
+
+ matcher := DeepEquals(buf)
+ b.StartTimer()
+
+ for i := 0; i < b.N; i++ {
+ matcher.Matches(bufCopy)
+ }
+
+ b.SetBytes(int64(size))
+}
+
+func BenchmarkShortByteSlice(b *testing.B) {
+ benchmarkWithSize(b, 256)
+}
+
+func BenchmarkLongByteSlice(b *testing.B) {
+ benchmarkWithSize(b, 1<<24)
+}
diff --git a/src/mongo/gotools/vendor/src/github.com/jacobsa/oglematchers/elements_are.go b/src/mongo/gotools/vendor/src/github.com/jacobsa/oglematchers/elements_are.go
new file mode 100644
index 00000000000..2941847c705
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/jacobsa/oglematchers/elements_are.go
@@ -0,0 +1,91 @@
+// Copyright 2012 Aaron Jacobs. All Rights Reserved.
+// Author: aaronjjacobs@gmail.com (Aaron Jacobs)
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package oglematchers
+
+import (
+ "errors"
+ "fmt"
+ "reflect"
+ "strings"
+)
+
+// Given a list of arguments M, ElementsAre returns a matcher that matches
+// arrays and slices A where all of the following hold:
+//
+// * A is the same length as M.
+//
+// * For each i < len(A) where M[i] is a matcher, A[i] matches M[i].
+//
+// * For each i < len(A) where M[i] is not a matcher, A[i] matches
+// Equals(M[i]).
+//
+func ElementsAre(M ...interface{}) Matcher {
+ // Copy over matchers, or convert to Equals(x) for non-matcher x.
+ subMatchers := make([]Matcher, len(M))
+ for i, x := range M {
+ if matcher, ok := x.(Matcher); ok {
+ subMatchers[i] = matcher
+ continue
+ }
+
+ subMatchers[i] = Equals(x)
+ }
+
+ return &elementsAreMatcher{subMatchers}
+}
+
+type elementsAreMatcher struct {
+ subMatchers []Matcher
+}
+
+func (m *elementsAreMatcher) Description() string {
+ subDescs := make([]string, len(m.subMatchers))
+ for i, sm := range m.subMatchers {
+ subDescs[i] = sm.Description()
+ }
+
+ return fmt.Sprintf("elements are: [%s]", strings.Join(subDescs, ", "))
+}
+
+func (m *elementsAreMatcher) Matches(candidates interface{}) error {
+ // The candidate must be a slice or an array.
+ v := reflect.ValueOf(candidates)
+ if v.Kind() != reflect.Slice && v.Kind() != reflect.Array {
+ return NewFatalError("which is not a slice or array")
+ }
+
+ // The length must be correct.
+ if v.Len() != len(m.subMatchers) {
+ return errors.New(fmt.Sprintf("which is of length %d", v.Len()))
+ }
+
+ // Check each element.
+ for i, subMatcher := range m.subMatchers {
+ c := v.Index(i)
+ if matchErr := subMatcher.Matches(c.Interface()); matchErr != nil {
+ // Return an errors indicating which element doesn't match. If the
+ // matcher error was fatal, make this one fatal too.
+ err := errors.New(fmt.Sprintf("whose element %d doesn't match", i))
+ if _, isFatal := matchErr.(*FatalError); isFatal {
+ err = NewFatalError(err.Error())
+ }
+
+ return err
+ }
+ }
+
+ return nil
+}
diff --git a/src/mongo/gotools/vendor/src/github.com/jacobsa/oglematchers/elements_are_test.go b/src/mongo/gotools/vendor/src/github.com/jacobsa/oglematchers/elements_are_test.go
new file mode 100644
index 00000000000..56e0f375d85
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/jacobsa/oglematchers/elements_are_test.go
@@ -0,0 +1,208 @@
+// Copyright 2012 Aaron Jacobs. All Rights Reserved.
+// Author: aaronjjacobs@gmail.com (Aaron Jacobs)
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package oglematchers_test
+
+import (
+ . "github.com/jacobsa/oglematchers"
+ . "github.com/jacobsa/ogletest"
+)
+
+////////////////////////////////////////////////////////////////////////
+// Helpers
+////////////////////////////////////////////////////////////////////////
+
+type ElementsAreTest struct {
+}
+
+func init() { RegisterTestSuite(&ElementsAreTest{}) }
+
+////////////////////////////////////////////////////////////////////////
+// Tests
+////////////////////////////////////////////////////////////////////////
+
+func (t *ElementsAreTest) EmptySet() {
+ m := ElementsAre()
+ ExpectEq("elements are: []", m.Description())
+
+ var c []interface{}
+ var err error
+
+ // No candidates.
+ c = []interface{}{}
+ err = m.Matches(c)
+ ExpectEq(nil, err)
+
+ // One candidate.
+ c = []interface{}{17}
+ err = m.Matches(c)
+ ExpectThat(err, Error(HasSubstr("length 1")))
+}
+
+func (t *ElementsAreTest) OneMatcher() {
+ m := ElementsAre(LessThan(17))
+ ExpectEq("elements are: [less than 17]", m.Description())
+
+ var c []interface{}
+ var err error
+
+ // No candidates.
+ c = []interface{}{}
+ err = m.Matches(c)
+ ExpectThat(err, Error(HasSubstr("length 0")))
+
+ // Matching candidate.
+ c = []interface{}{16}
+ err = m.Matches(c)
+ ExpectEq(nil, err)
+
+ // Non-matching candidate.
+ c = []interface{}{19}
+ err = m.Matches(c)
+ ExpectNe(nil, err)
+
+ // Two candidates.
+ c = []interface{}{17, 19}
+ err = m.Matches(c)
+ ExpectThat(err, Error(HasSubstr("length 2")))
+}
+
+func (t *ElementsAreTest) OneValue() {
+ m := ElementsAre(17)
+ ExpectEq("elements are: [17]", m.Description())
+
+ var c []interface{}
+ var err error
+
+ // No candidates.
+ c = []interface{}{}
+ err = m.Matches(c)
+ ExpectThat(err, Error(HasSubstr("length 0")))
+
+ // Matching int.
+ c = []interface{}{int(17)}
+ err = m.Matches(c)
+ ExpectEq(nil, err)
+
+ // Matching float.
+ c = []interface{}{float32(17)}
+ err = m.Matches(c)
+ ExpectEq(nil, err)
+
+ // Non-matching candidate.
+ c = []interface{}{19}
+ err = m.Matches(c)
+ ExpectNe(nil, err)
+
+ // Two candidates.
+ c = []interface{}{17, 19}
+ err = m.Matches(c)
+ ExpectThat(err, Error(HasSubstr("length 2")))
+}
+
+func (t *ElementsAreTest) MultipleElements() {
+ m := ElementsAre("taco", LessThan(17))
+ ExpectEq("elements are: [taco, less than 17]", m.Description())
+
+ var c []interface{}
+ var err error
+
+ // One candidate.
+ c = []interface{}{17}
+ err = m.Matches(c)
+ ExpectThat(err, Error(HasSubstr("length 1")))
+
+ // Both matching.
+ c = []interface{}{"taco", 16}
+ err = m.Matches(c)
+ ExpectEq(nil, err)
+
+ // First non-matching.
+ c = []interface{}{"burrito", 16}
+ err = m.Matches(c)
+ ExpectThat(err, Error(Equals("whose element 0 doesn't match")))
+
+ // Second non-matching.
+ c = []interface{}{"taco", 17}
+ err = m.Matches(c)
+ ExpectThat(err, Error(Equals("whose element 1 doesn't match")))
+
+ // Three candidates.
+ c = []interface{}{"taco", 17, 19}
+ err = m.Matches(c)
+ ExpectThat(err, Error(HasSubstr("length 3")))
+}
+
+func (t *ElementsAreTest) ArrayCandidates() {
+ m := ElementsAre("taco", LessThan(17))
+
+ var err error
+
+ // One candidate.
+ err = m.Matches([1]interface{}{"taco"})
+ ExpectThat(err, Error(HasSubstr("length 1")))
+
+ // Both matching.
+ err = m.Matches([2]interface{}{"taco", 16})
+ ExpectEq(nil, err)
+
+ // First non-matching.
+ err = m.Matches([2]interface{}{"burrito", 16})
+ ExpectThat(err, Error(Equals("whose element 0 doesn't match")))
+}
+
+func (t *ElementsAreTest) WrongTypeCandidate() {
+ m := ElementsAre("taco")
+
+ var err error
+
+ // String candidate.
+ err = m.Matches("taco")
+ ExpectTrue(isFatal(err))
+ ExpectThat(err, Error(HasSubstr("array")))
+ ExpectThat(err, Error(HasSubstr("slice")))
+
+ // Map candidate.
+ err = m.Matches(map[string]string{})
+ ExpectTrue(isFatal(err))
+ ExpectThat(err, Error(HasSubstr("array")))
+ ExpectThat(err, Error(HasSubstr("slice")))
+
+ // Nil candidate.
+ err = m.Matches(nil)
+ ExpectTrue(isFatal(err))
+ ExpectThat(err, Error(HasSubstr("array")))
+ ExpectThat(err, Error(HasSubstr("slice")))
+}
+
+func (t *ElementsAreTest) PropagatesFatality() {
+ m := ElementsAre(LessThan(17))
+ ExpectEq("elements are: [less than 17]", m.Description())
+
+ var c []interface{}
+ var err error
+
+ // Non-fatal error.
+ c = []interface{}{19}
+ err = m.Matches(c)
+ AssertNe(nil, err)
+ ExpectFalse(isFatal(err))
+
+ // Fatal error.
+ c = []interface{}{"taco"}
+ err = m.Matches(c)
+ AssertNe(nil, err)
+ ExpectTrue(isFatal(err))
+}
diff --git a/src/mongo/gotools/vendor/src/github.com/jacobsa/oglematchers/equals.go b/src/mongo/gotools/vendor/src/github.com/jacobsa/oglematchers/equals.go
new file mode 100644
index 00000000000..26280c5a6eb
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/jacobsa/oglematchers/equals.go
@@ -0,0 +1,557 @@
+// Copyright 2011 Aaron Jacobs. All Rights Reserved.
+// Author: aaronjjacobs@gmail.com (Aaron Jacobs)
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package oglematchers
+
+import (
+ "errors"
+ "fmt"
+ "math"
+ "reflect"
+)
+
+// Equals(x) returns a matcher that matches values v such that v and x are
+// equivalent. This includes the case when the comparison v == x using Go's
+// built-in comparison operator is legal (except for structs, which this
+// matcher does not support), but for convenience the following rules also
+// apply:
+//
+// * Type checking is done based on underlying types rather than actual
+// types, so that e.g. two aliases for string can be compared:
+//
+// type stringAlias1 string
+// type stringAlias2 string
+//
+// a := "taco"
+// b := stringAlias1("taco")
+// c := stringAlias2("taco")
+//
+// ExpectTrue(a == b) // Legal, passes
+// ExpectTrue(b == c) // Illegal, doesn't compile
+//
+// ExpectThat(a, Equals(b)) // Passes
+// ExpectThat(b, Equals(c)) // Passes
+//
+// * Values of numeric type are treated as if they were abstract numbers, and
+// compared accordingly. Therefore Equals(17) will match int(17),
+// int16(17), uint(17), float32(17), complex64(17), and so on.
+//
+// If you want a stricter matcher that contains no such cleverness, see
+// IdenticalTo instead.
+//
+// Arrays are supported by this matcher, but do not participate in the
+// exceptions above. Two arrays compared with this matcher must have identical
+// types, and their element type must itself be comparable according to Go's ==
+// operator.
+func Equals(x interface{}) Matcher {
+ v := reflect.ValueOf(x)
+
+ // This matcher doesn't support structs.
+ if v.Kind() == reflect.Struct {
+ panic(fmt.Sprintf("oglematchers.Equals: unsupported kind %v", v.Kind()))
+ }
+
+ // The == operator is not defined for non-nil slices.
+ if v.Kind() == reflect.Slice && v.Pointer() != uintptr(0) {
+ panic(fmt.Sprintf("oglematchers.Equals: non-nil slice"))
+ }
+
+ return &equalsMatcher{v}
+}
+
+type equalsMatcher struct {
+ expectedValue reflect.Value
+}
+
+////////////////////////////////////////////////////////////////////////
+// Numeric types
+////////////////////////////////////////////////////////////////////////
+
+func isSignedInteger(v reflect.Value) bool {
+ k := v.Kind()
+ return k >= reflect.Int && k <= reflect.Int64
+}
+
+func isUnsignedInteger(v reflect.Value) bool {
+ k := v.Kind()
+ return k >= reflect.Uint && k <= reflect.Uint64
+}
+
+func isInteger(v reflect.Value) bool {
+ return isSignedInteger(v) || isUnsignedInteger(v)
+}
+
+func isFloat(v reflect.Value) bool {
+ k := v.Kind()
+ return k == reflect.Float32 || k == reflect.Float64
+}
+
+func isComplex(v reflect.Value) bool {
+ k := v.Kind()
+ return k == reflect.Complex64 || k == reflect.Complex128
+}
+
+func checkAgainstInt64(e int64, c reflect.Value) (err error) {
+ err = errors.New("")
+
+ switch {
+ case isSignedInteger(c):
+ if c.Int() == e {
+ err = nil
+ }
+
+ case isUnsignedInteger(c):
+ u := c.Uint()
+ if u <= math.MaxInt64 && int64(u) == e {
+ err = nil
+ }
+
+ // Turn around the various floating point types so that the checkAgainst*
+ // functions for them can deal with precision issues.
+ case isFloat(c), isComplex(c):
+ return Equals(c.Interface()).Matches(e)
+
+ default:
+ err = NewFatalError("which is not numeric")
+ }
+
+ return
+}
+
+func checkAgainstUint64(e uint64, c reflect.Value) (err error) {
+ err = errors.New("")
+
+ switch {
+ case isSignedInteger(c):
+ i := c.Int()
+ if i >= 0 && uint64(i) == e {
+ err = nil
+ }
+
+ case isUnsignedInteger(c):
+ if c.Uint() == e {
+ err = nil
+ }
+
+ // Turn around the various floating point types so that the checkAgainst*
+ // functions for them can deal with precision issues.
+ case isFloat(c), isComplex(c):
+ return Equals(c.Interface()).Matches(e)
+
+ default:
+ err = NewFatalError("which is not numeric")
+ }
+
+ return
+}
+
+func checkAgainstFloat32(e float32, c reflect.Value) (err error) {
+ err = errors.New("")
+
+ switch {
+ case isSignedInteger(c):
+ if float32(c.Int()) == e {
+ err = nil
+ }
+
+ case isUnsignedInteger(c):
+ if float32(c.Uint()) == e {
+ err = nil
+ }
+
+ case isFloat(c):
+ // Compare using float32 to avoid a false sense of precision; otherwise
+ // e.g. Equals(float32(0.1)) won't match float32(0.1).
+ if float32(c.Float()) == e {
+ err = nil
+ }
+
+ case isComplex(c):
+ comp := c.Complex()
+ rl := real(comp)
+ im := imag(comp)
+
+ // Compare using float32 to avoid a false sense of precision; otherwise
+ // e.g. Equals(float32(0.1)) won't match (0.1 + 0i).
+ if im == 0 && float32(rl) == e {
+ err = nil
+ }
+
+ default:
+ err = NewFatalError("which is not numeric")
+ }
+
+ return
+}
+
+func checkAgainstFloat64(e float64, c reflect.Value) (err error) {
+ err = errors.New("")
+
+ ck := c.Kind()
+
+ switch {
+ case isSignedInteger(c):
+ if float64(c.Int()) == e {
+ err = nil
+ }
+
+ case isUnsignedInteger(c):
+ if float64(c.Uint()) == e {
+ err = nil
+ }
+
+ // If the actual value is lower precision, turn the comparison around so we
+ // apply the low-precision rules. Otherwise, e.g. Equals(0.1) may not match
+ // float32(0.1).
+ case ck == reflect.Float32 || ck == reflect.Complex64:
+ return Equals(c.Interface()).Matches(e)
+
+ // Otherwise, compare with double precision.
+ case isFloat(c):
+ if c.Float() == e {
+ err = nil
+ }
+
+ case isComplex(c):
+ comp := c.Complex()
+ rl := real(comp)
+ im := imag(comp)
+
+ if im == 0 && rl == e {
+ err = nil
+ }
+
+ default:
+ err = NewFatalError("which is not numeric")
+ }
+
+ return
+}
+
+func checkAgainstComplex64(e complex64, c reflect.Value) (err error) {
+ err = errors.New("")
+ realPart := real(e)
+ imaginaryPart := imag(e)
+
+ switch {
+ case isInteger(c) || isFloat(c):
+ // If we have no imaginary part, then we should just compare against the
+ // real part. Otherwise, we can't be equal.
+ if imaginaryPart != 0 {
+ return
+ }
+
+ return checkAgainstFloat32(realPart, c)
+
+ case isComplex(c):
+ // Compare using complex64 to avoid a false sense of precision; otherwise
+ // e.g. Equals(0.1 + 0i) won't match float32(0.1).
+ if complex64(c.Complex()) == e {
+ err = nil
+ }
+
+ default:
+ err = NewFatalError("which is not numeric")
+ }
+
+ return
+}
+
+func checkAgainstComplex128(e complex128, c reflect.Value) (err error) {
+ err = errors.New("")
+ realPart := real(e)
+ imaginaryPart := imag(e)
+
+ switch {
+ case isInteger(c) || isFloat(c):
+ // If we have no imaginary part, then we should just compare against the
+ // real part. Otherwise, we can't be equal.
+ if imaginaryPart != 0 {
+ return
+ }
+
+ return checkAgainstFloat64(realPart, c)
+
+ case isComplex(c):
+ if c.Complex() == e {
+ err = nil
+ }
+
+ default:
+ err = NewFatalError("which is not numeric")
+ }
+
+ return
+}
+
+////////////////////////////////////////////////////////////////////////
+// Other types
+////////////////////////////////////////////////////////////////////////
+
+func checkAgainstBool(e bool, c reflect.Value) (err error) {
+ if c.Kind() != reflect.Bool {
+ err = NewFatalError("which is not a bool")
+ return
+ }
+
+ err = errors.New("")
+ if c.Bool() == e {
+ err = nil
+ }
+ return
+}
+
+func checkAgainstUintptr(e uintptr, c reflect.Value) (err error) {
+ if c.Kind() != reflect.Uintptr {
+ err = NewFatalError("which is not a uintptr")
+ return
+ }
+
+ err = errors.New("")
+ if uintptr(c.Uint()) == e {
+ err = nil
+ }
+ return
+}
+
+func checkAgainstChan(e reflect.Value, c reflect.Value) (err error) {
+ // Create a description of e's type, e.g. "chan int".
+ typeStr := fmt.Sprintf("%s %s", e.Type().ChanDir(), e.Type().Elem())
+
+ // Make sure c is a chan of the correct type.
+ if c.Kind() != reflect.Chan ||
+ c.Type().ChanDir() != e.Type().ChanDir() ||
+ c.Type().Elem() != e.Type().Elem() {
+ err = NewFatalError(fmt.Sprintf("which is not a %s", typeStr))
+ return
+ }
+
+ err = errors.New("")
+ if c.Pointer() == e.Pointer() {
+ err = nil
+ }
+ return
+}
+
+func checkAgainstFunc(e reflect.Value, c reflect.Value) (err error) {
+ // Make sure c is a function.
+ if c.Kind() != reflect.Func {
+ err = NewFatalError("which is not a function")
+ return
+ }
+
+ err = errors.New("")
+ if c.Pointer() == e.Pointer() {
+ err = nil
+ }
+ return
+}
+
+func checkAgainstMap(e reflect.Value, c reflect.Value) (err error) {
+ // Make sure c is a map.
+ if c.Kind() != reflect.Map {
+ err = NewFatalError("which is not a map")
+ return
+ }
+
+ err = errors.New("")
+ if c.Pointer() == e.Pointer() {
+ err = nil
+ }
+ return
+}
+
+func checkAgainstPtr(e reflect.Value, c reflect.Value) (err error) {
+ // Create a description of e's type, e.g. "*int".
+ typeStr := fmt.Sprintf("*%v", e.Type().Elem())
+
+ // Make sure c is a pointer of the correct type.
+ if c.Kind() != reflect.Ptr ||
+ c.Type().Elem() != e.Type().Elem() {
+ err = NewFatalError(fmt.Sprintf("which is not a %s", typeStr))
+ return
+ }
+
+ err = errors.New("")
+ if c.Pointer() == e.Pointer() {
+ err = nil
+ }
+ return
+}
+
+func checkAgainstSlice(e reflect.Value, c reflect.Value) (err error) {
+ // Create a description of e's type, e.g. "[]int".
+ typeStr := fmt.Sprintf("[]%v", e.Type().Elem())
+
+ // Make sure c is a slice of the correct type.
+ if c.Kind() != reflect.Slice ||
+ c.Type().Elem() != e.Type().Elem() {
+ err = NewFatalError(fmt.Sprintf("which is not a %s", typeStr))
+ return
+ }
+
+ err = errors.New("")
+ if c.Pointer() == e.Pointer() {
+ err = nil
+ }
+ return
+}
+
+func checkAgainstString(e reflect.Value, c reflect.Value) (err error) {
+ // Make sure c is a string.
+ if c.Kind() != reflect.String {
+ err = NewFatalError("which is not a string")
+ return
+ }
+
+ err = errors.New("")
+ if c.String() == e.String() {
+ err = nil
+ }
+ return
+}
+
+func checkAgainstArray(e reflect.Value, c reflect.Value) (err error) {
+ // Create a description of e's type, e.g. "[2]int".
+ typeStr := fmt.Sprintf("%v", e.Type())
+
+ // Make sure c is the correct type.
+ if c.Type() != e.Type() {
+ err = NewFatalError(fmt.Sprintf("which is not %s", typeStr))
+ return
+ }
+
+ // Check for equality.
+ if e.Interface() != c.Interface() {
+ err = errors.New("")
+ return
+ }
+
+ return
+}
+
+func checkAgainstUnsafePointer(e reflect.Value, c reflect.Value) (err error) {
+ // Make sure c is a pointer.
+ if c.Kind() != reflect.UnsafePointer {
+ err = NewFatalError("which is not a unsafe.Pointer")
+ return
+ }
+
+ err = errors.New("")
+ if c.Pointer() == e.Pointer() {
+ err = nil
+ }
+ return
+}
+
+func checkForNil(c reflect.Value) (err error) {
+ err = errors.New("")
+
+ // Make sure it is legal to call IsNil.
+ switch c.Kind() {
+ case reflect.Invalid:
+ case reflect.Chan:
+ case reflect.Func:
+ case reflect.Interface:
+ case reflect.Map:
+ case reflect.Ptr:
+ case reflect.Slice:
+
+ default:
+ err = NewFatalError("which cannot be compared to nil")
+ return
+ }
+
+ // Ask whether the value is nil. Handle a nil literal (kind Invalid)
+ // specially, since it's not legal to call IsNil there.
+ if c.Kind() == reflect.Invalid || c.IsNil() {
+ err = nil
+ }
+ return
+}
+
+////////////////////////////////////////////////////////////////////////
+// Public implementation
+////////////////////////////////////////////////////////////////////////
+
+func (m *equalsMatcher) Matches(candidate interface{}) error {
+ e := m.expectedValue
+ c := reflect.ValueOf(candidate)
+ ek := e.Kind()
+
+ switch {
+ case ek == reflect.Bool:
+ return checkAgainstBool(e.Bool(), c)
+
+ case isSignedInteger(e):
+ return checkAgainstInt64(e.Int(), c)
+
+ case isUnsignedInteger(e):
+ return checkAgainstUint64(e.Uint(), c)
+
+ case ek == reflect.Uintptr:
+ return checkAgainstUintptr(uintptr(e.Uint()), c)
+
+ case ek == reflect.Float32:
+ return checkAgainstFloat32(float32(e.Float()), c)
+
+ case ek == reflect.Float64:
+ return checkAgainstFloat64(e.Float(), c)
+
+ case ek == reflect.Complex64:
+ return checkAgainstComplex64(complex64(e.Complex()), c)
+
+ case ek == reflect.Complex128:
+ return checkAgainstComplex128(complex128(e.Complex()), c)
+
+ case ek == reflect.Chan:
+ return checkAgainstChan(e, c)
+
+ case ek == reflect.Func:
+ return checkAgainstFunc(e, c)
+
+ case ek == reflect.Map:
+ return checkAgainstMap(e, c)
+
+ case ek == reflect.Ptr:
+ return checkAgainstPtr(e, c)
+
+ case ek == reflect.Slice:
+ return checkAgainstSlice(e, c)
+
+ case ek == reflect.String:
+ return checkAgainstString(e, c)
+
+ case ek == reflect.Array:
+ return checkAgainstArray(e, c)
+
+ case ek == reflect.UnsafePointer:
+ return checkAgainstUnsafePointer(e, c)
+
+ case ek == reflect.Invalid:
+ return checkForNil(c)
+ }
+
+ panic(fmt.Sprintf("equalsMatcher.Matches: unexpected kind: %v", ek))
+}
+
+func (m *equalsMatcher) Description() string {
+ // Special case: handle nil.
+ if !m.expectedValue.IsValid() {
+ return "is nil"
+ }
+
+ return fmt.Sprintf("%v", m.expectedValue.Interface())
+}
diff --git a/src/mongo/gotools/vendor/src/github.com/jacobsa/oglematchers/equals_test.go b/src/mongo/gotools/vendor/src/github.com/jacobsa/oglematchers/equals_test.go
new file mode 100644
index 00000000000..6fdbd9b6cb7
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/jacobsa/oglematchers/equals_test.go
@@ -0,0 +1,3843 @@
+// Copyright 2011 Aaron Jacobs. All Rights Reserved.
+// Author: aaronjjacobs@gmail.com (Aaron Jacobs)
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package oglematchers_test
+
+import (
+ "fmt"
+ . "github.com/jacobsa/oglematchers"
+ . "github.com/jacobsa/ogletest"
+ "math"
+ "unsafe"
+)
+
+var someInt int = -17
+
+////////////////////////////////////////////////////////////////////////
+// Helpers
+////////////////////////////////////////////////////////////////////////
+
+type EqualsTest struct {
+}
+
+func init() { RegisterTestSuite(&EqualsTest{}) }
+
+type equalsTestCase struct {
+ candidate interface{}
+ expectedResult bool
+ shouldBeFatal bool
+ expectedError string
+}
+
+func (t *EqualsTest) checkTestCases(matcher Matcher, cases []equalsTestCase) {
+ for i, c := range cases {
+ err := matcher.Matches(c.candidate)
+ ExpectEq(
+ c.expectedResult,
+ (err == nil),
+ "Result for case %d: %v (Error: %v)", i, c, err)
+
+ if err == nil {
+ continue
+ }
+
+ _, isFatal := err.(*FatalError)
+ ExpectEq(c.shouldBeFatal, isFatal, "Fatality for case %d: %v", i, c)
+
+ ExpectThat(err, Error(Equals(c.expectedError)), "Case %d: %v", i, c)
+ }
+}
+
+////////////////////////////////////////////////////////////////////////
+// nil
+////////////////////////////////////////////////////////////////////////
+
+func (t *EqualsTest) EqualsNil() {
+ matcher := Equals(nil)
+ ExpectEq("is nil", matcher.Description())
+
+ cases := []equalsTestCase{
+ // Legal types
+ equalsTestCase{nil, true, false, ""},
+ equalsTestCase{chan int(nil), true, false, ""},
+ equalsTestCase{(func())(nil), true, false, ""},
+ equalsTestCase{interface{}(nil), true, false, ""},
+ equalsTestCase{map[int]int(nil), true, false, ""},
+ equalsTestCase{(*int)(nil), true, false, ""},
+ equalsTestCase{[]int(nil), true, false, ""},
+
+ equalsTestCase{make(chan int), false, false, ""},
+ equalsTestCase{func() {}, false, false, ""},
+ equalsTestCase{map[int]int{}, false, false, ""},
+ equalsTestCase{&someInt, false, false, ""},
+ equalsTestCase{[]int{}, false, false, ""},
+
+ // Illegal types
+ equalsTestCase{17, false, true, "which cannot be compared to nil"},
+ equalsTestCase{int8(17), false, true, "which cannot be compared to nil"},
+ equalsTestCase{uintptr(17), false, true, "which cannot be compared to nil"},
+ equalsTestCase{[...]int{}, false, true, "which cannot be compared to nil"},
+ equalsTestCase{"taco", false, true, "which cannot be compared to nil"},
+ equalsTestCase{equalsTestCase{}, false, true, "which cannot be compared to nil"},
+ equalsTestCase{unsafe.Pointer(&someInt), false, true, "which cannot be compared to nil"},
+ }
+
+ t.checkTestCases(matcher, cases)
+}
+
+////////////////////////////////////////////////////////////////////////
+// Integer literals
+////////////////////////////////////////////////////////////////////////
+
+func (t *EqualsTest) NegativeIntegerLiteral() {
+ // -2^30
+ matcher := Equals(-1073741824)
+ ExpectEq("-1073741824", matcher.Description())
+
+ cases := []equalsTestCase{
+ // Various types of -1073741824.
+ equalsTestCase{-1073741824, true, false, ""},
+ equalsTestCase{-1073741824.0, true, false, ""},
+ equalsTestCase{-1073741824 + 0i, true, false, ""},
+ equalsTestCase{int(-1073741824), true, false, ""},
+ equalsTestCase{int32(-1073741824), true, false, ""},
+ equalsTestCase{int64(-1073741824), true, false, ""},
+ equalsTestCase{float32(-1073741824), true, false, ""},
+ equalsTestCase{float64(-1073741824), true, false, ""},
+ equalsTestCase{complex64(-1073741824), true, false, ""},
+ equalsTestCase{complex128(-1073741824), true, false, ""},
+ equalsTestCase{interface{}(int(-1073741824)), true, false, ""},
+
+ // Values that would be -1073741824 in two's complement.
+ equalsTestCase{uint((1 << 32) - 1073741824), false, false, ""},
+ equalsTestCase{uint32((1 << 32) - 1073741824), false, false, ""},
+ equalsTestCase{uint64((1 << 64) - 1073741824), false, false, ""},
+
+ // Non-equal values of signed integer type.
+ equalsTestCase{int(-1073741823), false, false, ""},
+ equalsTestCase{int32(-1073741823), false, false, ""},
+ equalsTestCase{int64(-1073741823), false, false, ""},
+
+ // Non-equal values of other numeric types.
+ equalsTestCase{float64(-1073741824.1), false, false, ""},
+ equalsTestCase{float64(-1073741823.9), false, false, ""},
+ equalsTestCase{complex128(-1073741823), false, false, ""},
+ equalsTestCase{complex128(-1073741824 + 2i), false, false, ""},
+
+ // Non-numeric types.
+ equalsTestCase{uintptr(0), false, true, "which is not numeric"},
+ equalsTestCase{true, false, true, "which is not numeric"},
+ equalsTestCase{[...]int{}, false, true, "which is not numeric"},
+ equalsTestCase{make(chan int), false, true, "which is not numeric"},
+ equalsTestCase{func() {}, false, true, "which is not numeric"},
+ equalsTestCase{map[int]int{}, false, true, "which is not numeric"},
+ equalsTestCase{&someInt, false, true, "which is not numeric"},
+ equalsTestCase{[]int{}, false, true, "which is not numeric"},
+ equalsTestCase{"taco", false, true, "which is not numeric"},
+ equalsTestCase{equalsTestCase{}, false, true, "which is not numeric"},
+ }
+
+ t.checkTestCases(matcher, cases)
+}
+
+func (t *EqualsTest) PositiveIntegerLiteral() {
+ // 2^30
+ matcher := Equals(1073741824)
+ ExpectEq("1073741824", matcher.Description())
+
+ cases := []equalsTestCase{
+ // Various types of 1073741824.
+ equalsTestCase{1073741824, true, false, ""},
+ equalsTestCase{1073741824.0, true, false, ""},
+ equalsTestCase{1073741824 + 0i, true, false, ""},
+ equalsTestCase{int(1073741824), true, false, ""},
+ equalsTestCase{uint(1073741824), true, false, ""},
+ equalsTestCase{int32(1073741824), true, false, ""},
+ equalsTestCase{int64(1073741824), true, false, ""},
+ equalsTestCase{uint32(1073741824), true, false, ""},
+ equalsTestCase{uint64(1073741824), true, false, ""},
+ equalsTestCase{float32(1073741824), true, false, ""},
+ equalsTestCase{float64(1073741824), true, false, ""},
+ equalsTestCase{complex64(1073741824), true, false, ""},
+ equalsTestCase{complex128(1073741824), true, false, ""},
+ equalsTestCase{interface{}(int(1073741824)), true, false, ""},
+ equalsTestCase{interface{}(uint(1073741824)), true, false, ""},
+
+ // Non-equal values of numeric type.
+ equalsTestCase{int(1073741823), false, false, ""},
+ equalsTestCase{int32(1073741823), false, false, ""},
+ equalsTestCase{int64(1073741823), false, false, ""},
+ equalsTestCase{float64(1073741824.1), false, false, ""},
+ equalsTestCase{float64(1073741823.9), false, false, ""},
+ equalsTestCase{complex128(1073741823), false, false, ""},
+ equalsTestCase{complex128(1073741824 + 2i), false, false, ""},
+
+ // Non-numeric types.
+ equalsTestCase{uintptr(0), false, true, "which is not numeric"},
+ equalsTestCase{true, false, true, "which is not numeric"},
+ equalsTestCase{[...]int{}, false, true, "which is not numeric"},
+ equalsTestCase{make(chan int), false, true, "which is not numeric"},
+ equalsTestCase{func() {}, false, true, "which is not numeric"},
+ equalsTestCase{map[int]int{}, false, true, "which is not numeric"},
+ equalsTestCase{&someInt, false, true, "which is not numeric"},
+ equalsTestCase{[]int{}, false, true, "which is not numeric"},
+ equalsTestCase{"taco", false, true, "which is not numeric"},
+ equalsTestCase{equalsTestCase{}, false, true, "which is not numeric"},
+ }
+
+ t.checkTestCases(matcher, cases)
+}
+
+////////////////////////////////////////////////////////////////////////
+// Floating point literals
+////////////////////////////////////////////////////////////////////////
+
+func (t *EqualsTest) NegativeIntegralFloatingPointLiteral() {
+ // -2^30
+ matcher := Equals(-1073741824.0)
+ ExpectEq("-1.073741824e+09", matcher.Description())
+
+ cases := []equalsTestCase{
+ // Various types of -1073741824.
+ equalsTestCase{-1073741824, true, false, ""},
+ equalsTestCase{-1073741824.0, true, false, ""},
+ equalsTestCase{-1073741824 + 0i, true, false, ""},
+ equalsTestCase{int(-1073741824), true, false, ""},
+ equalsTestCase{int32(-1073741824), true, false, ""},
+ equalsTestCase{int64(-1073741824), true, false, ""},
+ equalsTestCase{float32(-1073741824), true, false, ""},
+ equalsTestCase{float64(-1073741824), true, false, ""},
+ equalsTestCase{complex64(-1073741824), true, false, ""},
+ equalsTestCase{complex128(-1073741824), true, false, ""},
+ equalsTestCase{interface{}(int(-1073741824)), true, false, ""},
+ equalsTestCase{interface{}(float64(-1073741824)), true, false, ""},
+
+ // Values that would be -1073741824 in two's complement.
+ equalsTestCase{uint((1 << 32) - 1073741824), false, false, ""},
+ equalsTestCase{uint32((1 << 32) - 1073741824), false, false, ""},
+ equalsTestCase{uint64((1 << 64) - 1073741824), false, false, ""},
+
+ // Non-equal values of signed integer type.
+ equalsTestCase{int(-1073741823), false, false, ""},
+ equalsTestCase{int32(-1073741823), false, false, ""},
+ equalsTestCase{int64(-1073741823), false, false, ""},
+
+ // Non-equal values of other numeric types.
+ equalsTestCase{float64(-1073741824.1), false, false, ""},
+ equalsTestCase{float64(-1073741823.9), false, false, ""},
+ equalsTestCase{complex128(-1073741823), false, false, ""},
+ equalsTestCase{complex128(-1073741824 + 2i), false, false, ""},
+
+ // Non-numeric types.
+ equalsTestCase{uintptr(0), false, true, "which is not numeric"},
+ equalsTestCase{true, false, true, "which is not numeric"},
+ equalsTestCase{[...]int{}, false, true, "which is not numeric"},
+ equalsTestCase{make(chan int), false, true, "which is not numeric"},
+ equalsTestCase{func() {}, false, true, "which is not numeric"},
+ equalsTestCase{map[int]int{}, false, true, "which is not numeric"},
+ equalsTestCase{&someInt, false, true, "which is not numeric"},
+ equalsTestCase{[]int{}, false, true, "which is not numeric"},
+ equalsTestCase{"taco", false, true, "which is not numeric"},
+ equalsTestCase{equalsTestCase{}, false, true, "which is not numeric"},
+ }
+
+ t.checkTestCases(matcher, cases)
+}
+
+func (t *EqualsTest) PositiveIntegralFloatingPointLiteral() {
+ // 2^30
+ matcher := Equals(1073741824.0)
+ ExpectEq("1.073741824e+09", matcher.Description())
+
+ cases := []equalsTestCase{
+ // Various types of 1073741824.
+ equalsTestCase{1073741824, true, false, ""},
+ equalsTestCase{1073741824.0, true, false, ""},
+ equalsTestCase{1073741824 + 0i, true, false, ""},
+ equalsTestCase{int(1073741824), true, false, ""},
+ equalsTestCase{int32(1073741824), true, false, ""},
+ equalsTestCase{int64(1073741824), true, false, ""},
+ equalsTestCase{uint(1073741824), true, false, ""},
+ equalsTestCase{uint32(1073741824), true, false, ""},
+ equalsTestCase{uint64(1073741824), true, false, ""},
+ equalsTestCase{float32(1073741824), true, false, ""},
+ equalsTestCase{float64(1073741824), true, false, ""},
+ equalsTestCase{complex64(1073741824), true, false, ""},
+ equalsTestCase{complex128(1073741824), true, false, ""},
+ equalsTestCase{interface{}(int(1073741824)), true, false, ""},
+ equalsTestCase{interface{}(float64(1073741824)), true, false, ""},
+
+ // Values that would be 1073741824 in two's complement.
+ equalsTestCase{uint((1 << 32) - 1073741824), false, false, ""},
+ equalsTestCase{uint32((1 << 32) - 1073741824), false, false, ""},
+ equalsTestCase{uint64((1 << 64) - 1073741824), false, false, ""},
+
+ // Non-equal values of numeric type.
+ equalsTestCase{int(1073741823), false, false, ""},
+ equalsTestCase{int32(1073741823), false, false, ""},
+ equalsTestCase{int64(1073741823), false, false, ""},
+ equalsTestCase{uint(1073741823), false, false, ""},
+ equalsTestCase{uint32(1073741823), false, false, ""},
+ equalsTestCase{uint64(1073741823), false, false, ""},
+ equalsTestCase{float64(1073741824.1), false, false, ""},
+ equalsTestCase{float64(1073741823.9), false, false, ""},
+ equalsTestCase{complex128(1073741823), false, false, ""},
+ equalsTestCase{complex128(1073741824 + 2i), false, false, ""},
+
+ // Non-numeric types.
+ equalsTestCase{uintptr(0), false, true, "which is not numeric"},
+ equalsTestCase{true, false, true, "which is not numeric"},
+ equalsTestCase{[...]int{}, false, true, "which is not numeric"},
+ equalsTestCase{make(chan int), false, true, "which is not numeric"},
+ equalsTestCase{func() {}, false, true, "which is not numeric"},
+ equalsTestCase{map[int]int{}, false, true, "which is not numeric"},
+ equalsTestCase{&someInt, false, true, "which is not numeric"},
+ equalsTestCase{[]int{}, false, true, "which is not numeric"},
+ equalsTestCase{"taco", false, true, "which is not numeric"},
+ equalsTestCase{equalsTestCase{}, false, true, "which is not numeric"},
+ }
+
+ t.checkTestCases(matcher, cases)
+}
+
+func (t *EqualsTest) NonIntegralFloatingPointLiteral() {
+ matcher := Equals(17.1)
+ ExpectEq("17.1", matcher.Description())
+
+ cases := []equalsTestCase{
+ // Various types of 17.1.
+ equalsTestCase{17.1, true, false, ""},
+ equalsTestCase{17.1, true, false, ""},
+ equalsTestCase{17.1 + 0i, true, false, ""},
+ equalsTestCase{float32(17.1), true, false, ""},
+ equalsTestCase{float64(17.1), true, false, ""},
+ equalsTestCase{complex64(17.1), true, false, ""},
+ equalsTestCase{complex128(17.1), true, false, ""},
+
+ // Non-equal values of numeric type.
+ equalsTestCase{17, false, false, ""},
+ equalsTestCase{17.2, false, false, ""},
+ equalsTestCase{18, false, false, ""},
+ equalsTestCase{int(17), false, false, ""},
+ equalsTestCase{int(18), false, false, ""},
+ equalsTestCase{int32(17), false, false, ""},
+ equalsTestCase{int64(17), false, false, ""},
+ equalsTestCase{uint(17), false, false, ""},
+ equalsTestCase{uint32(17), false, false, ""},
+ equalsTestCase{uint64(17), false, false, ""},
+ equalsTestCase{complex128(17.1 + 2i), false, false, ""},
+
+ // Non-numeric types.
+ equalsTestCase{uintptr(0), false, true, "which is not numeric"},
+ equalsTestCase{true, false, true, "which is not numeric"},
+ equalsTestCase{[...]int{}, false, true, "which is not numeric"},
+ equalsTestCase{make(chan int), false, true, "which is not numeric"},
+ equalsTestCase{func() {}, false, true, "which is not numeric"},
+ equalsTestCase{map[int]int{}, false, true, "which is not numeric"},
+ equalsTestCase{&someInt, false, true, "which is not numeric"},
+ equalsTestCase{[]int{}, false, true, "which is not numeric"},
+ equalsTestCase{"taco", false, true, "which is not numeric"},
+ equalsTestCase{equalsTestCase{}, false, true, "which is not numeric"},
+ }
+
+ t.checkTestCases(matcher, cases)
+}
+
+////////////////////////////////////////////////////////////////////////
+// bool
+////////////////////////////////////////////////////////////////////////
+
+func (t *EqualsTest) False() {
+ matcher := Equals(false)
+ ExpectEq("false", matcher.Description())
+
+ cases := []equalsTestCase{
+ // bools
+ equalsTestCase{false, true, false, ""},
+ equalsTestCase{bool(false), true, false, ""},
+
+ equalsTestCase{true, false, false, ""},
+ equalsTestCase{bool(true), false, false, ""},
+
+ // Other types.
+ equalsTestCase{int(0), false, true, "which is not a bool"},
+ equalsTestCase{int8(0), false, true, "which is not a bool"},
+ equalsTestCase{int16(0), false, true, "which is not a bool"},
+ equalsTestCase{int32(0), false, true, "which is not a bool"},
+ equalsTestCase{int64(0), false, true, "which is not a bool"},
+ equalsTestCase{uint(0), false, true, "which is not a bool"},
+ equalsTestCase{uint8(0), false, true, "which is not a bool"},
+ equalsTestCase{uint16(0), false, true, "which is not a bool"},
+ equalsTestCase{uint32(0), false, true, "which is not a bool"},
+ equalsTestCase{uint64(0), false, true, "which is not a bool"},
+ equalsTestCase{uintptr(0), false, true, "which is not a bool"},
+ equalsTestCase{[...]int{}, false, true, "which is not a bool"},
+ equalsTestCase{make(chan int), false, true, "which is not a bool"},
+ equalsTestCase{func() {}, false, true, "which is not a bool"},
+ equalsTestCase{map[int]int{}, false, true, "which is not a bool"},
+ equalsTestCase{&someInt, false, true, "which is not a bool"},
+ equalsTestCase{[]int{}, false, true, "which is not a bool"},
+ equalsTestCase{"taco", false, true, "which is not a bool"},
+ equalsTestCase{equalsTestCase{}, false, true, "which is not a bool"},
+ }
+
+ t.checkTestCases(matcher, cases)
+}
+
+func (t *EqualsTest) True() {
+ matcher := Equals(true)
+ ExpectEq("true", matcher.Description())
+
+ cases := []equalsTestCase{
+ // bools
+ equalsTestCase{true, true, false, ""},
+ equalsTestCase{bool(true), true, false, ""},
+
+ equalsTestCase{false, false, false, ""},
+ equalsTestCase{bool(false), false, false, ""},
+
+ // Other types.
+ equalsTestCase{int(1), false, true, "which is not a bool"},
+ equalsTestCase{int8(1), false, true, "which is not a bool"},
+ equalsTestCase{int16(1), false, true, "which is not a bool"},
+ equalsTestCase{int32(1), false, true, "which is not a bool"},
+ equalsTestCase{int64(1), false, true, "which is not a bool"},
+ equalsTestCase{uint(1), false, true, "which is not a bool"},
+ equalsTestCase{uint8(1), false, true, "which is not a bool"},
+ equalsTestCase{uint16(1), false, true, "which is not a bool"},
+ equalsTestCase{uint32(1), false, true, "which is not a bool"},
+ equalsTestCase{uint64(1), false, true, "which is not a bool"},
+ equalsTestCase{uintptr(1), false, true, "which is not a bool"},
+ equalsTestCase{[...]int{}, false, true, "which is not a bool"},
+ equalsTestCase{make(chan int), false, true, "which is not a bool"},
+ equalsTestCase{func() {}, false, true, "which is not a bool"},
+ equalsTestCase{map[int]int{}, false, true, "which is not a bool"},
+ equalsTestCase{&someInt, false, true, "which is not a bool"},
+ equalsTestCase{[]int{}, false, true, "which is not a bool"},
+ equalsTestCase{"taco", false, true, "which is not a bool"},
+ equalsTestCase{equalsTestCase{}, false, true, "which is not a bool"},
+ }
+
+ t.checkTestCases(matcher, cases)
+}
+
+////////////////////////////////////////////////////////////////////////
+// int
+////////////////////////////////////////////////////////////////////////
+
+func (t *EqualsTest) NegativeInt() {
+ // -2^30
+ matcher := Equals(int(-1073741824))
+ ExpectEq("-1073741824", matcher.Description())
+
+ cases := []equalsTestCase{
+ // Various types of -1073741824.
+ equalsTestCase{-1073741824, true, false, ""},
+ equalsTestCase{-1073741824.0, true, false, ""},
+ equalsTestCase{-1073741824 + 0i, true, false, ""},
+ equalsTestCase{int(-1073741824), true, false, ""},
+ equalsTestCase{int32(-1073741824), true, false, ""},
+ equalsTestCase{int64(-1073741824), true, false, ""},
+ equalsTestCase{float32(-1073741824), true, false, ""},
+ equalsTestCase{float64(-1073741824), true, false, ""},
+ equalsTestCase{complex64(-1073741824), true, false, ""},
+ equalsTestCase{complex128(-1073741824), true, false, ""},
+ equalsTestCase{interface{}(int(-1073741824)), true, false, ""},
+
+ // Values that would be -1073741824 in two's complement.
+ equalsTestCase{uint((1 << 32) - 1073741824), false, false, ""},
+ equalsTestCase{uint32((1 << 32) - 1073741824), false, false, ""},
+ equalsTestCase{uint64((1 << 64) - 1073741824), false, false, ""},
+
+ // Non-equal values of signed integer type.
+ equalsTestCase{int(-1073741823), false, false, ""},
+ equalsTestCase{int32(-1073741823), false, false, ""},
+ equalsTestCase{int64(-1073741823), false, false, ""},
+
+ // Non-equal values of other numeric types.
+ equalsTestCase{float64(-1073741824.1), false, false, ""},
+ equalsTestCase{float64(-1073741823.9), false, false, ""},
+ equalsTestCase{complex128(-1073741823), false, false, ""},
+ equalsTestCase{complex128(-1073741824 + 2i), false, false, ""},
+
+ // Non-numeric types.
+ equalsTestCase{uintptr(0), false, true, "which is not numeric"},
+ equalsTestCase{true, false, true, "which is not numeric"},
+ equalsTestCase{[...]int{}, false, true, "which is not numeric"},
+ equalsTestCase{make(chan int), false, true, "which is not numeric"},
+ equalsTestCase{func() {}, false, true, "which is not numeric"},
+ equalsTestCase{map[int]int{}, false, true, "which is not numeric"},
+ equalsTestCase{&someInt, false, true, "which is not numeric"},
+ equalsTestCase{[]int{}, false, true, "which is not numeric"},
+ equalsTestCase{"taco", false, true, "which is not numeric"},
+ equalsTestCase{equalsTestCase{}, false, true, "which is not numeric"},
+ }
+
+ t.checkTestCases(matcher, cases)
+}
+
+func (t *EqualsTest) PositiveInt() {
+ // 2^30
+ matcher := Equals(int(1073741824))
+ ExpectEq("1073741824", matcher.Description())
+
+ cases := []equalsTestCase{
+ // Various types of 1073741824.
+ equalsTestCase{1073741824, true, false, ""},
+ equalsTestCase{1073741824.0, true, false, ""},
+ equalsTestCase{1073741824 + 0i, true, false, ""},
+ equalsTestCase{int(1073741824), true, false, ""},
+ equalsTestCase{uint(1073741824), true, false, ""},
+ equalsTestCase{int32(1073741824), true, false, ""},
+ equalsTestCase{int64(1073741824), true, false, ""},
+ equalsTestCase{uint32(1073741824), true, false, ""},
+ equalsTestCase{uint64(1073741824), true, false, ""},
+ equalsTestCase{float32(1073741824), true, false, ""},
+ equalsTestCase{float64(1073741824), true, false, ""},
+ equalsTestCase{complex64(1073741824), true, false, ""},
+ equalsTestCase{complex128(1073741824), true, false, ""},
+ equalsTestCase{interface{}(int(1073741824)), true, false, ""},
+ equalsTestCase{interface{}(uint(1073741824)), true, false, ""},
+
+ // Non-equal values of numeric type.
+ equalsTestCase{int(1073741823), false, false, ""},
+ equalsTestCase{int32(1073741823), false, false, ""},
+ equalsTestCase{int64(1073741823), false, false, ""},
+ equalsTestCase{float64(1073741824.1), false, false, ""},
+ equalsTestCase{float64(1073741823.9), false, false, ""},
+ equalsTestCase{complex128(1073741823), false, false, ""},
+ equalsTestCase{complex128(1073741824 + 2i), false, false, ""},
+
+ // Non-numeric types.
+ equalsTestCase{uintptr(0), false, true, "which is not numeric"},
+ equalsTestCase{true, false, true, "which is not numeric"},
+ equalsTestCase{[...]int{}, false, true, "which is not numeric"},
+ equalsTestCase{make(chan int), false, true, "which is not numeric"},
+ equalsTestCase{func() {}, false, true, "which is not numeric"},
+ equalsTestCase{map[int]int{}, false, true, "which is not numeric"},
+ equalsTestCase{&someInt, false, true, "which is not numeric"},
+ equalsTestCase{[]int{}, false, true, "which is not numeric"},
+ equalsTestCase{"taco", false, true, "which is not numeric"},
+ equalsTestCase{equalsTestCase{}, false, true, "which is not numeric"},
+ }
+
+ t.checkTestCases(matcher, cases)
+}
+
+////////////////////////////////////////////////////////////////////////
+// int8
+////////////////////////////////////////////////////////////////////////
+
+func (t *EqualsTest) NegativeInt8() {
+ matcher := Equals(int8(-17))
+ ExpectEq("-17", matcher.Description())
+
+ cases := []equalsTestCase{
+ // Various types of -17.
+ equalsTestCase{-17, true, false, ""},
+ equalsTestCase{-17.0, true, false, ""},
+ equalsTestCase{-17 + 0i, true, false, ""},
+ equalsTestCase{int(-17), true, false, ""},
+ equalsTestCase{int8(-17), true, false, ""},
+ equalsTestCase{int16(-17), true, false, ""},
+ equalsTestCase{int32(-17), true, false, ""},
+ equalsTestCase{int64(-17), true, false, ""},
+ equalsTestCase{float32(-17), true, false, ""},
+ equalsTestCase{float64(-17), true, false, ""},
+ equalsTestCase{complex64(-17), true, false, ""},
+ equalsTestCase{complex128(-17), true, false, ""},
+ equalsTestCase{interface{}(int(-17)), true, false, ""},
+
+ // Values that would be -17 in two's complement.
+ equalsTestCase{uint((1 << 32) - 17), false, false, ""},
+ equalsTestCase{uint8((1 << 8) - 17), false, false, ""},
+ equalsTestCase{uint16((1 << 16) - 17), false, false, ""},
+ equalsTestCase{uint32((1 << 32) - 17), false, false, ""},
+ equalsTestCase{uint64((1 << 64) - 17), false, false, ""},
+
+ // Non-equal values of signed integer type.
+ equalsTestCase{int(-16), false, false, ""},
+ equalsTestCase{int8(-16), false, false, ""},
+ equalsTestCase{int16(-16), false, false, ""},
+ equalsTestCase{int32(-16), false, false, ""},
+ equalsTestCase{int64(-16), false, false, ""},
+
+ // Non-equal values of other numeric types.
+ equalsTestCase{float32(-17.1), false, false, ""},
+ equalsTestCase{float32(-16.9), false, false, ""},
+ equalsTestCase{complex64(-16), false, false, ""},
+ equalsTestCase{complex64(-17 + 2i), false, false, ""},
+
+ // Non-numeric types.
+ equalsTestCase{uintptr((1 << 32) - 17), false, true, "which is not numeric"},
+ equalsTestCase{true, false, true, "which is not numeric"},
+ equalsTestCase{[...]int{-17}, false, true, "which is not numeric"},
+ equalsTestCase{make(chan int), false, true, "which is not numeric"},
+ equalsTestCase{func() {}, false, true, "which is not numeric"},
+ equalsTestCase{map[int]int{}, false, true, "which is not numeric"},
+ equalsTestCase{&someInt, false, true, "which is not numeric"},
+ equalsTestCase{[]int{-17}, false, true, "which is not numeric"},
+ equalsTestCase{"-17", false, true, "which is not numeric"},
+ equalsTestCase{equalsTestCase{}, false, true, "which is not numeric"},
+ }
+
+ t.checkTestCases(matcher, cases)
+}
+
+func (t *EqualsTest) ZeroInt8() {
+ matcher := Equals(int8(0))
+ ExpectEq("0", matcher.Description())
+
+ cases := []equalsTestCase{
+ // Various types of 0.
+ equalsTestCase{0, true, false, ""},
+ equalsTestCase{0.0, true, false, ""},
+ equalsTestCase{0 + 0i, true, false, ""},
+ equalsTestCase{int(0), true, false, ""},
+ equalsTestCase{int8(0), true, false, ""},
+ equalsTestCase{int16(0), true, false, ""},
+ equalsTestCase{int32(0), true, false, ""},
+ equalsTestCase{int64(0), true, false, ""},
+ equalsTestCase{float32(0), true, false, ""},
+ equalsTestCase{float64(0), true, false, ""},
+ equalsTestCase{complex64(0), true, false, ""},
+ equalsTestCase{complex128(0), true, false, ""},
+ equalsTestCase{interface{}(int(0)), true, false, ""},
+ equalsTestCase{uint(0), true, false, ""},
+ equalsTestCase{uint8(0), true, false, ""},
+ equalsTestCase{uint16(0), true, false, ""},
+ equalsTestCase{uint32(0), true, false, ""},
+ equalsTestCase{uint64(0), true, false, ""},
+
+ // Non-equal values of numeric type.
+ equalsTestCase{int(1), false, false, ""},
+ equalsTestCase{int8(1), false, false, ""},
+ equalsTestCase{int16(1), false, false, ""},
+ equalsTestCase{int32(1), false, false, ""},
+ equalsTestCase{int64(1), false, false, ""},
+ equalsTestCase{float32(-0.1), false, false, ""},
+ equalsTestCase{float32(0.1), false, false, ""},
+ equalsTestCase{complex64(1), false, false, ""},
+ equalsTestCase{complex64(0 + 2i), false, false, ""},
+
+ // Non-numeric types.
+ equalsTestCase{uintptr(0), false, true, "which is not numeric"},
+ equalsTestCase{true, false, true, "which is not numeric"},
+ equalsTestCase{[...]int{0}, false, true, "which is not numeric"},
+ equalsTestCase{make(chan int), false, true, "which is not numeric"},
+ equalsTestCase{func() {}, false, true, "which is not numeric"},
+ equalsTestCase{map[int]int{}, false, true, "which is not numeric"},
+ equalsTestCase{&someInt, false, true, "which is not numeric"},
+ equalsTestCase{[]int{0}, false, true, "which is not numeric"},
+ equalsTestCase{"0", false, true, "which is not numeric"},
+ equalsTestCase{equalsTestCase{}, false, true, "which is not numeric"},
+ }
+
+ t.checkTestCases(matcher, cases)
+}
+
+func (t *EqualsTest) PositiveInt8() {
+ matcher := Equals(int8(17))
+ ExpectEq("17", matcher.Description())
+
+ cases := []equalsTestCase{
+ // Various types of 17.
+ equalsTestCase{17, true, false, ""},
+ equalsTestCase{17.0, true, false, ""},
+ equalsTestCase{17 + 0i, true, false, ""},
+ equalsTestCase{int(17), true, false, ""},
+ equalsTestCase{int8(17), true, false, ""},
+ equalsTestCase{int16(17), true, false, ""},
+ equalsTestCase{int32(17), true, false, ""},
+ equalsTestCase{int64(17), true, false, ""},
+ equalsTestCase{float32(17), true, false, ""},
+ equalsTestCase{float64(17), true, false, ""},
+ equalsTestCase{complex64(17), true, false, ""},
+ equalsTestCase{complex128(17), true, false, ""},
+ equalsTestCase{interface{}(int(17)), true, false, ""},
+ equalsTestCase{uint(17), true, false, ""},
+ equalsTestCase{uint8(17), true, false, ""},
+ equalsTestCase{uint16(17), true, false, ""},
+ equalsTestCase{uint32(17), true, false, ""},
+ equalsTestCase{uint64(17), true, false, ""},
+
+ // Non-equal values of numeric type.
+ equalsTestCase{int(16), false, false, ""},
+ equalsTestCase{int8(16), false, false, ""},
+ equalsTestCase{int16(16), false, false, ""},
+ equalsTestCase{int32(16), false, false, ""},
+ equalsTestCase{int64(16), false, false, ""},
+ equalsTestCase{float32(16.9), false, false, ""},
+ equalsTestCase{float32(17.1), false, false, ""},
+ equalsTestCase{complex64(16), false, false, ""},
+ equalsTestCase{complex64(17 + 2i), false, false, ""},
+
+ // Non-numeric types.
+ equalsTestCase{uintptr(17), false, true, "which is not numeric"},
+ equalsTestCase{true, false, true, "which is not numeric"},
+ equalsTestCase{[...]int{17}, false, true, "which is not numeric"},
+ equalsTestCase{make(chan int), false, true, "which is not numeric"},
+ equalsTestCase{func() {}, false, true, "which is not numeric"},
+ equalsTestCase{map[int]int{}, false, true, "which is not numeric"},
+ equalsTestCase{&someInt, false, true, "which is not numeric"},
+ equalsTestCase{[]int{17}, false, true, "which is not numeric"},
+ equalsTestCase{"17", false, true, "which is not numeric"},
+ equalsTestCase{equalsTestCase{}, false, true, "which is not numeric"},
+ }
+
+ t.checkTestCases(matcher, cases)
+}
+
+////////////////////////////////////////////////////////////////////////
+// int16
+////////////////////////////////////////////////////////////////////////
+
+func (t *EqualsTest) NegativeInt16() {
+ matcher := Equals(int16(-32766))
+ ExpectEq("-32766", matcher.Description())
+
+ cases := []equalsTestCase{
+ // Various types of -32766.
+ equalsTestCase{-32766, true, false, ""},
+ equalsTestCase{-32766.0, true, false, ""},
+ equalsTestCase{-32766 + 0i, true, false, ""},
+ equalsTestCase{int(-32766), true, false, ""},
+ equalsTestCase{int16(-32766), true, false, ""},
+ equalsTestCase{int32(-32766), true, false, ""},
+ equalsTestCase{int64(-32766), true, false, ""},
+ equalsTestCase{float32(-32766), true, false, ""},
+ equalsTestCase{float64(-32766), true, false, ""},
+ equalsTestCase{complex64(-32766), true, false, ""},
+ equalsTestCase{complex128(-32766), true, false, ""},
+ equalsTestCase{interface{}(int(-32766)), true, false, ""},
+
+ // Values that would be -32766 in two's complement.
+ equalsTestCase{uint((1 << 32) - 32766), false, false, ""},
+ equalsTestCase{uint16((1 << 16) - 32766), false, false, ""},
+ equalsTestCase{uint32((1 << 32) - 32766), false, false, ""},
+ equalsTestCase{uint64((1 << 64) - 32766), false, false, ""},
+
+ // Non-equal values of signed integer type.
+ equalsTestCase{int(-16), false, false, ""},
+ equalsTestCase{int8(-16), false, false, ""},
+ equalsTestCase{int16(-16), false, false, ""},
+ equalsTestCase{int32(-16), false, false, ""},
+ equalsTestCase{int64(-16), false, false, ""},
+
+ // Non-equal values of other numeric types.
+ equalsTestCase{float32(-32766.1), false, false, ""},
+ equalsTestCase{float32(-32765.9), false, false, ""},
+ equalsTestCase{complex64(-32766.1), false, false, ""},
+ equalsTestCase{complex64(-32766 + 2i), false, false, ""},
+
+ // Non-numeric types.
+ equalsTestCase{uintptr((1 << 32) - 32766), false, true, "which is not numeric"},
+ equalsTestCase{true, false, true, "which is not numeric"},
+ equalsTestCase{[...]int{-32766}, false, true, "which is not numeric"},
+ equalsTestCase{make(chan int), false, true, "which is not numeric"},
+ equalsTestCase{func() {}, false, true, "which is not numeric"},
+ equalsTestCase{map[int]int{}, false, true, "which is not numeric"},
+ equalsTestCase{&someInt, false, true, "which is not numeric"},
+ equalsTestCase{[]int{-32766}, false, true, "which is not numeric"},
+ equalsTestCase{"-32766", false, true, "which is not numeric"},
+ equalsTestCase{equalsTestCase{}, false, true, "which is not numeric"},
+ }
+
+ t.checkTestCases(matcher, cases)
+}
+
+func (t *EqualsTest) ZeroInt16() {
+ matcher := Equals(int16(0))
+ ExpectEq("0", matcher.Description())
+
+ cases := []equalsTestCase{
+ // Various types of 0.
+ equalsTestCase{0, true, false, ""},
+ equalsTestCase{0.0, true, false, ""},
+ equalsTestCase{0 + 0i, true, false, ""},
+ equalsTestCase{int(0), true, false, ""},
+ equalsTestCase{int8(0), true, false, ""},
+ equalsTestCase{int16(0), true, false, ""},
+ equalsTestCase{int32(0), true, false, ""},
+ equalsTestCase{int64(0), true, false, ""},
+ equalsTestCase{float32(0), true, false, ""},
+ equalsTestCase{float64(0), true, false, ""},
+ equalsTestCase{complex64(0), true, false, ""},
+ equalsTestCase{complex128(0), true, false, ""},
+ equalsTestCase{interface{}(int(0)), true, false, ""},
+ equalsTestCase{uint(0), true, false, ""},
+ equalsTestCase{uint8(0), true, false, ""},
+ equalsTestCase{uint16(0), true, false, ""},
+ equalsTestCase{uint32(0), true, false, ""},
+ equalsTestCase{uint64(0), true, false, ""},
+
+ // Non-equal values of numeric type.
+ equalsTestCase{int(1), false, false, ""},
+ equalsTestCase{int8(1), false, false, ""},
+ equalsTestCase{int16(1), false, false, ""},
+ equalsTestCase{int32(1), false, false, ""},
+ equalsTestCase{int64(1), false, false, ""},
+ equalsTestCase{float32(-0.1), false, false, ""},
+ equalsTestCase{float32(0.1), false, false, ""},
+ equalsTestCase{complex64(1), false, false, ""},
+ equalsTestCase{complex64(0 + 2i), false, false, ""},
+
+ // Non-numeric types.
+ equalsTestCase{uintptr(0), false, true, "which is not numeric"},
+ equalsTestCase{true, false, true, "which is not numeric"},
+ equalsTestCase{[...]int{0}, false, true, "which is not numeric"},
+ equalsTestCase{make(chan int), false, true, "which is not numeric"},
+ equalsTestCase{func() {}, false, true, "which is not numeric"},
+ equalsTestCase{map[int]int{}, false, true, "which is not numeric"},
+ equalsTestCase{&someInt, false, true, "which is not numeric"},
+ equalsTestCase{[]int{0}, false, true, "which is not numeric"},
+ equalsTestCase{"0", false, true, "which is not numeric"},
+ equalsTestCase{equalsTestCase{}, false, true, "which is not numeric"},
+ }
+
+ t.checkTestCases(matcher, cases)
+}
+
+func (t *EqualsTest) PositiveInt16() {
+ matcher := Equals(int16(32765))
+ ExpectEq("32765", matcher.Description())
+
+ cases := []equalsTestCase{
+ // Various types of 32765.
+ equalsTestCase{32765, true, false, ""},
+ equalsTestCase{32765.0, true, false, ""},
+ equalsTestCase{32765 + 0i, true, false, ""},
+ equalsTestCase{int(32765), true, false, ""},
+ equalsTestCase{int16(32765), true, false, ""},
+ equalsTestCase{int32(32765), true, false, ""},
+ equalsTestCase{int64(32765), true, false, ""},
+ equalsTestCase{float32(32765), true, false, ""},
+ equalsTestCase{float64(32765), true, false, ""},
+ equalsTestCase{complex64(32765), true, false, ""},
+ equalsTestCase{complex128(32765), true, false, ""},
+ equalsTestCase{interface{}(int(32765)), true, false, ""},
+ equalsTestCase{uint(32765), true, false, ""},
+ equalsTestCase{uint16(32765), true, false, ""},
+ equalsTestCase{uint32(32765), true, false, ""},
+ equalsTestCase{uint64(32765), true, false, ""},
+
+ // Non-equal values of numeric type.
+ equalsTestCase{int(32764), false, false, ""},
+ equalsTestCase{int16(32764), false, false, ""},
+ equalsTestCase{int32(32764), false, false, ""},
+ equalsTestCase{int64(32764), false, false, ""},
+ equalsTestCase{float32(32764.9), false, false, ""},
+ equalsTestCase{float32(32765.1), false, false, ""},
+ equalsTestCase{complex64(32765.9), false, false, ""},
+ equalsTestCase{complex64(32765 + 2i), false, false, ""},
+
+ // Non-numeric types.
+ equalsTestCase{uintptr(32765), false, true, "which is not numeric"},
+ equalsTestCase{true, false, true, "which is not numeric"},
+ equalsTestCase{[...]int{32765}, false, true, "which is not numeric"},
+ equalsTestCase{make(chan int), false, true, "which is not numeric"},
+ equalsTestCase{func() {}, false, true, "which is not numeric"},
+ equalsTestCase{map[int]int{}, false, true, "which is not numeric"},
+ equalsTestCase{&someInt, false, true, "which is not numeric"},
+ equalsTestCase{[]int{32765}, false, true, "which is not numeric"},
+ equalsTestCase{"32765", false, true, "which is not numeric"},
+ equalsTestCase{equalsTestCase{}, false, true, "which is not numeric"},
+ }
+
+ t.checkTestCases(matcher, cases)
+}
+
+////////////////////////////////////////////////////////////////////////
+// int32
+////////////////////////////////////////////////////////////////////////
+
+func (t *EqualsTest) NegativeInt32() {
+ // -2^30
+ matcher := Equals(int32(-1073741824))
+ ExpectEq("-1073741824", matcher.Description())
+
+ cases := []equalsTestCase{
+ // Various types of -1073741824.
+ equalsTestCase{-1073741824, true, false, ""},
+ equalsTestCase{-1073741824.0, true, false, ""},
+ equalsTestCase{-1073741824 + 0i, true, false, ""},
+ equalsTestCase{int(-1073741824), true, false, ""},
+ equalsTestCase{int32(-1073741824), true, false, ""},
+ equalsTestCase{int64(-1073741824), true, false, ""},
+ equalsTestCase{float32(-1073741824), true, false, ""},
+ equalsTestCase{float64(-1073741824), true, false, ""},
+ equalsTestCase{complex64(-1073741824), true, false, ""},
+ equalsTestCase{complex128(-1073741824), true, false, ""},
+ equalsTestCase{interface{}(int(-1073741824)), true, false, ""},
+
+ // Values that would be -1073741824 in two's complement.
+ equalsTestCase{uint((1 << 32) - 1073741824), false, false, ""},
+ equalsTestCase{uint32((1 << 32) - 1073741824), false, false, ""},
+ equalsTestCase{uint64((1 << 64) - 1073741824), false, false, ""},
+
+ // Non-equal values of signed integer type.
+ equalsTestCase{int(-1073741823), false, false, ""},
+ equalsTestCase{int32(-1073741823), false, false, ""},
+ equalsTestCase{int64(-1073741823), false, false, ""},
+
+ // Non-equal values of other numeric types.
+ equalsTestCase{float64(-1073741824.1), false, false, ""},
+ equalsTestCase{float64(-1073741823.9), false, false, ""},
+ equalsTestCase{complex128(-1073741823), false, false, ""},
+ equalsTestCase{complex128(-1073741824 + 2i), false, false, ""},
+
+ // Non-numeric types.
+ equalsTestCase{uintptr(0), false, true, "which is not numeric"},
+ equalsTestCase{true, false, true, "which is not numeric"},
+ equalsTestCase{[...]int{}, false, true, "which is not numeric"},
+ equalsTestCase{make(chan int), false, true, "which is not numeric"},
+ equalsTestCase{func() {}, false, true, "which is not numeric"},
+ equalsTestCase{map[int]int{}, false, true, "which is not numeric"},
+ equalsTestCase{&someInt, false, true, "which is not numeric"},
+ equalsTestCase{[]int{}, false, true, "which is not numeric"},
+ equalsTestCase{"taco", false, true, "which is not numeric"},
+ equalsTestCase{equalsTestCase{}, false, true, "which is not numeric"},
+ }
+
+ t.checkTestCases(matcher, cases)
+}
+
+func (t *EqualsTest) PositiveInt32() {
+ // 2^30
+ matcher := Equals(int32(1073741824))
+ ExpectEq("1073741824", matcher.Description())
+
+ cases := []equalsTestCase{
+ // Various types of 1073741824.
+ equalsTestCase{1073741824, true, false, ""},
+ equalsTestCase{1073741824.0, true, false, ""},
+ equalsTestCase{1073741824 + 0i, true, false, ""},
+ equalsTestCase{int(1073741824), true, false, ""},
+ equalsTestCase{uint(1073741824), true, false, ""},
+ equalsTestCase{int32(1073741824), true, false, ""},
+ equalsTestCase{int64(1073741824), true, false, ""},
+ equalsTestCase{uint32(1073741824), true, false, ""},
+ equalsTestCase{uint64(1073741824), true, false, ""},
+ equalsTestCase{float32(1073741824), true, false, ""},
+ equalsTestCase{float64(1073741824), true, false, ""},
+ equalsTestCase{complex64(1073741824), true, false, ""},
+ equalsTestCase{complex128(1073741824), true, false, ""},
+ equalsTestCase{interface{}(int(1073741824)), true, false, ""},
+ equalsTestCase{interface{}(uint(1073741824)), true, false, ""},
+
+ // Non-equal values of numeric type.
+ equalsTestCase{int(1073741823), false, false, ""},
+ equalsTestCase{int32(1073741823), false, false, ""},
+ equalsTestCase{int64(1073741823), false, false, ""},
+ equalsTestCase{float64(1073741824.1), false, false, ""},
+ equalsTestCase{float64(1073741823.9), false, false, ""},
+ equalsTestCase{complex128(1073741823), false, false, ""},
+ equalsTestCase{complex128(1073741824 + 2i), false, false, ""},
+
+ // Non-numeric types.
+ equalsTestCase{uintptr(0), false, true, "which is not numeric"},
+ equalsTestCase{true, false, true, "which is not numeric"},
+ equalsTestCase{[...]int{}, false, true, "which is not numeric"},
+ equalsTestCase{make(chan int), false, true, "which is not numeric"},
+ equalsTestCase{func() {}, false, true, "which is not numeric"},
+ equalsTestCase{map[int]int{}, false, true, "which is not numeric"},
+ equalsTestCase{&someInt, false, true, "which is not numeric"},
+ equalsTestCase{[]int{}, false, true, "which is not numeric"},
+ equalsTestCase{"taco", false, true, "which is not numeric"},
+ equalsTestCase{equalsTestCase{}, false, true, "which is not numeric"},
+ }
+
+ t.checkTestCases(matcher, cases)
+}
+
+////////////////////////////////////////////////////////////////////////
+// int64
+////////////////////////////////////////////////////////////////////////
+
+func (t *EqualsTest) NegativeInt64() {
+ // -2^40
+ matcher := Equals(int64(-1099511627776))
+ ExpectEq("-1099511627776", matcher.Description())
+
+ cases := []equalsTestCase{
+ // Various types of -1099511627776.
+ equalsTestCase{-1099511627776.0, true, false, ""},
+ equalsTestCase{-1099511627776 + 0i, true, false, ""},
+ equalsTestCase{int64(-1099511627776), true, false, ""},
+ equalsTestCase{float32(-1099511627776), true, false, ""},
+ equalsTestCase{float64(-1099511627776), true, false, ""},
+ equalsTestCase{complex64(-1099511627776), true, false, ""},
+ equalsTestCase{complex128(-1099511627776), true, false, ""},
+ equalsTestCase{interface{}(int64(-1099511627776)), true, false, ""},
+
+ // Values that would be -1099511627776 in two's complement.
+ equalsTestCase{uint64((1 << 64) - 1099511627776), false, false, ""},
+
+ // Non-equal values of signed integer type.
+ equalsTestCase{int64(-1099511627775), false, false, ""},
+
+ // Non-equal values of other numeric types.
+ equalsTestCase{float64(-1099511627776.1), false, false, ""},
+ equalsTestCase{float64(-1099511627775.9), false, false, ""},
+ equalsTestCase{complex128(-1099511627775), false, false, ""},
+ equalsTestCase{complex128(-1099511627776 + 2i), false, false, ""},
+
+ // Non-numeric types.
+ equalsTestCase{uintptr(0), false, true, "which is not numeric"},
+ equalsTestCase{true, false, true, "which is not numeric"},
+ equalsTestCase{[...]int{}, false, true, "which is not numeric"},
+ equalsTestCase{make(chan int), false, true, "which is not numeric"},
+ equalsTestCase{func() {}, false, true, "which is not numeric"},
+ equalsTestCase{map[int]int{}, false, true, "which is not numeric"},
+ equalsTestCase{&someInt, false, true, "which is not numeric"},
+ equalsTestCase{[]int{}, false, true, "which is not numeric"},
+ equalsTestCase{"taco", false, true, "which is not numeric"},
+ equalsTestCase{equalsTestCase{}, false, true, "which is not numeric"},
+ }
+
+ t.checkTestCases(matcher, cases)
+}
+
+func (t *EqualsTest) PositiveInt64() {
+ // 2^40
+ matcher := Equals(int64(1099511627776))
+ ExpectEq("1099511627776", matcher.Description())
+
+ cases := []equalsTestCase{
+ // Various types of 1099511627776.
+ equalsTestCase{1099511627776.0, true, false, ""},
+ equalsTestCase{1099511627776 + 0i, true, false, ""},
+ equalsTestCase{int64(1099511627776), true, false, ""},
+ equalsTestCase{uint64(1099511627776), true, false, ""},
+ equalsTestCase{float32(1099511627776), true, false, ""},
+ equalsTestCase{float64(1099511627776), true, false, ""},
+ equalsTestCase{complex64(1099511627776), true, false, ""},
+ equalsTestCase{complex128(1099511627776), true, false, ""},
+ equalsTestCase{interface{}(int64(1099511627776)), true, false, ""},
+ equalsTestCase{interface{}(uint64(1099511627776)), true, false, ""},
+
+ // Non-equal values of numeric type.
+ equalsTestCase{int64(1099511627775), false, false, ""},
+ equalsTestCase{uint64(1099511627775), false, false, ""},
+ equalsTestCase{float64(1099511627776.1), false, false, ""},
+ equalsTestCase{float64(1099511627775.9), false, false, ""},
+ equalsTestCase{complex128(1099511627775), false, false, ""},
+ equalsTestCase{complex128(1099511627776 + 2i), false, false, ""},
+
+ // Non-numeric types.
+ equalsTestCase{uintptr(0), false, true, "which is not numeric"},
+ equalsTestCase{true, false, true, "which is not numeric"},
+ equalsTestCase{[...]int{}, false, true, "which is not numeric"},
+ equalsTestCase{make(chan int), false, true, "which is not numeric"},
+ equalsTestCase{func() {}, false, true, "which is not numeric"},
+ equalsTestCase{map[int]int{}, false, true, "which is not numeric"},
+ equalsTestCase{&someInt, false, true, "which is not numeric"},
+ equalsTestCase{[]int{}, false, true, "which is not numeric"},
+ equalsTestCase{"taco", false, true, "which is not numeric"},
+ equalsTestCase{equalsTestCase{}, false, true, "which is not numeric"},
+ }
+
+ t.checkTestCases(matcher, cases)
+}
+
+func (t *EqualsTest) Int64NotExactlyRepresentableBySinglePrecision() {
+ // Single-precision floats don't have enough bits to represent the integers
+ // near this one distinctly, so [2^25-1, 2^25+2] all receive the same value
+ // and should be treated as equivalent when floats are in the mix.
+ const kTwoTo25 = 1 << 25
+ matcher := Equals(int64(kTwoTo25 + 1))
+ ExpectEq("33554433", matcher.Description())
+
+ cases := []equalsTestCase{
+ // Integers.
+ equalsTestCase{int64(kTwoTo25 + 0), false, false, ""},
+ equalsTestCase{int64(kTwoTo25 + 1), true, false, ""},
+ equalsTestCase{int64(kTwoTo25 + 2), false, false, ""},
+
+ equalsTestCase{uint64(kTwoTo25 + 0), false, false, ""},
+ equalsTestCase{uint64(kTwoTo25 + 1), true, false, ""},
+ equalsTestCase{uint64(kTwoTo25 + 2), false, false, ""},
+
+ // Single-precision floating point.
+ equalsTestCase{float32(kTwoTo25 - 2), false, false, ""},
+ equalsTestCase{float32(kTwoTo25 - 1), true, false, ""},
+ equalsTestCase{float32(kTwoTo25 + 0), true, false, ""},
+ equalsTestCase{float32(kTwoTo25 + 1), true, false, ""},
+ equalsTestCase{float32(kTwoTo25 + 2), true, false, ""},
+ equalsTestCase{float32(kTwoTo25 + 3), false, false, ""},
+
+ equalsTestCase{complex64(kTwoTo25 - 2), false, false, ""},
+ equalsTestCase{complex64(kTwoTo25 - 1), true, false, ""},
+ equalsTestCase{complex64(kTwoTo25 + 0), true, false, ""},
+ equalsTestCase{complex64(kTwoTo25 + 1), true, false, ""},
+ equalsTestCase{complex64(kTwoTo25 + 2), true, false, ""},
+ equalsTestCase{complex64(kTwoTo25 + 3), false, false, ""},
+
+ // Double-precision floating point.
+ equalsTestCase{float64(kTwoTo25 + 0), false, false, ""},
+ equalsTestCase{float64(kTwoTo25 + 1), true, false, ""},
+ equalsTestCase{float64(kTwoTo25 + 2), false, false, ""},
+
+ equalsTestCase{complex128(kTwoTo25 + 0), false, false, ""},
+ equalsTestCase{complex128(kTwoTo25 + 1), true, false, ""},
+ equalsTestCase{complex128(kTwoTo25 + 2), false, false, ""},
+ }
+
+ t.checkTestCases(matcher, cases)
+}
+
+func (t *EqualsTest) Int64NotExactlyRepresentableByDoublePrecision() {
+ // Double-precision floats don't have enough bits to represent the integers
+ // near this one distinctly, so [2^54-1, 2^54+2] all receive the same value
+ // and should be treated as equivalent when floats are in the mix.
+ const kTwoTo54 = 1 << 54
+ matcher := Equals(int64(kTwoTo54 + 1))
+ ExpectEq("18014398509481985", matcher.Description())
+
+ cases := []equalsTestCase{
+ // Integers.
+ equalsTestCase{int64(kTwoTo54 + 0), false, false, ""},
+ equalsTestCase{int64(kTwoTo54 + 1), true, false, ""},
+ equalsTestCase{int64(kTwoTo54 + 2), false, false, ""},
+
+ equalsTestCase{uint64(kTwoTo54 + 0), false, false, ""},
+ equalsTestCase{uint64(kTwoTo54 + 1), true, false, ""},
+ equalsTestCase{uint64(kTwoTo54 + 2), false, false, ""},
+
+ // Double-precision floating point.
+ equalsTestCase{float64(kTwoTo54 - 2), false, false, ""},
+ equalsTestCase{float64(kTwoTo54 - 1), true, false, ""},
+ equalsTestCase{float64(kTwoTo54 + 0), true, false, ""},
+ equalsTestCase{float64(kTwoTo54 + 1), true, false, ""},
+ equalsTestCase{float64(kTwoTo54 + 2), true, false, ""},
+ equalsTestCase{float64(kTwoTo54 + 3), false, false, ""},
+
+ equalsTestCase{complex128(kTwoTo54 - 2), false, false, ""},
+ equalsTestCase{complex128(kTwoTo54 - 1), true, false, ""},
+ equalsTestCase{complex128(kTwoTo54 + 0), true, false, ""},
+ equalsTestCase{complex128(kTwoTo54 + 1), true, false, ""},
+ equalsTestCase{complex128(kTwoTo54 + 2), true, false, ""},
+ equalsTestCase{complex128(kTwoTo54 + 3), false, false, ""},
+ }
+
+ t.checkTestCases(matcher, cases)
+}
+
+////////////////////////////////////////////////////////////////////////
+// uint
+////////////////////////////////////////////////////////////////////////
+
+func (t *EqualsTest) SmallUint() {
+ const kExpected = 17
+ matcher := Equals(uint(kExpected))
+ ExpectEq("17", matcher.Description())
+
+ cases := []equalsTestCase{
+ // Various types of the expected value.
+ equalsTestCase{17, true, false, ""},
+ equalsTestCase{17.0, true, false, ""},
+ equalsTestCase{17 + 0i, true, false, ""},
+ equalsTestCase{int(kExpected), true, false, ""},
+ equalsTestCase{int8(kExpected), true, false, ""},
+ equalsTestCase{int16(kExpected), true, false, ""},
+ equalsTestCase{int32(kExpected), true, false, ""},
+ equalsTestCase{int64(kExpected), true, false, ""},
+ equalsTestCase{uint(kExpected), true, false, ""},
+ equalsTestCase{uint8(kExpected), true, false, ""},
+ equalsTestCase{uint16(kExpected), true, false, ""},
+ equalsTestCase{uint32(kExpected), true, false, ""},
+ equalsTestCase{uint64(kExpected), true, false, ""},
+ equalsTestCase{float32(kExpected), true, false, ""},
+ equalsTestCase{float64(kExpected), true, false, ""},
+ equalsTestCase{complex64(kExpected), true, false, ""},
+ equalsTestCase{complex128(kExpected), true, false, ""},
+
+ // Non-equal values of numeric types.
+ equalsTestCase{kExpected + 1, false, false, ""},
+ equalsTestCase{int(kExpected + 1), false, false, ""},
+ equalsTestCase{int8(kExpected + 1), false, false, ""},
+ equalsTestCase{int16(kExpected + 1), false, false, ""},
+ equalsTestCase{int32(kExpected + 1), false, false, ""},
+ equalsTestCase{int64(kExpected + 1), false, false, ""},
+ equalsTestCase{uint(kExpected + 1), false, false, ""},
+ equalsTestCase{uint8(kExpected + 1), false, false, ""},
+ equalsTestCase{uint16(kExpected + 1), false, false, ""},
+ equalsTestCase{uint32(kExpected + 1), false, false, ""},
+ equalsTestCase{uint64(kExpected + 1), false, false, ""},
+ equalsTestCase{float32(kExpected + 1), false, false, ""},
+ equalsTestCase{float64(kExpected + 1), false, false, ""},
+ equalsTestCase{complex64(kExpected + 2i), false, false, ""},
+ equalsTestCase{complex64(kExpected + 1), false, false, ""},
+ equalsTestCase{complex128(kExpected + 2i), false, false, ""},
+ equalsTestCase{complex128(kExpected + 1), false, false, ""},
+
+ // Non-numeric types.
+ equalsTestCase{uintptr(0), false, true, "which is not numeric"},
+ equalsTestCase{true, false, true, "which is not numeric"},
+ equalsTestCase{[...]int{}, false, true, "which is not numeric"},
+ equalsTestCase{make(chan int), false, true, "which is not numeric"},
+ equalsTestCase{func() {}, false, true, "which is not numeric"},
+ equalsTestCase{map[int]int{}, false, true, "which is not numeric"},
+ equalsTestCase{&someInt, false, true, "which is not numeric"},
+ equalsTestCase{[]int{}, false, true, "which is not numeric"},
+ equalsTestCase{"taco", false, true, "which is not numeric"},
+ equalsTestCase{equalsTestCase{}, false, true, "which is not numeric"},
+ }
+
+ t.checkTestCases(matcher, cases)
+}
+
+func (t *EqualsTest) LargeUint() {
+ const kExpected = (1 << 16) + 17
+ matcher := Equals(uint(kExpected))
+ ExpectEq("65553", matcher.Description())
+
+ cases := []equalsTestCase{
+ // Various types of the expected value.
+ equalsTestCase{65553, true, false, ""},
+ equalsTestCase{65553.0, true, false, ""},
+ equalsTestCase{65553 + 0i, true, false, ""},
+ equalsTestCase{int32(kExpected), true, false, ""},
+ equalsTestCase{int64(kExpected), true, false, ""},
+ equalsTestCase{uint32(kExpected), true, false, ""},
+ equalsTestCase{uint64(kExpected), true, false, ""},
+ equalsTestCase{float32(kExpected), true, false, ""},
+ equalsTestCase{float64(kExpected), true, false, ""},
+ equalsTestCase{complex64(kExpected), true, false, ""},
+ equalsTestCase{complex128(kExpected), true, false, ""},
+
+ // Non-equal values of numeric types.
+ equalsTestCase{int16(17), false, false, ""},
+ equalsTestCase{int32(kExpected + 1), false, false, ""},
+ equalsTestCase{int64(kExpected + 1), false, false, ""},
+ equalsTestCase{uint16(17), false, false, ""},
+ equalsTestCase{uint32(kExpected + 1), false, false, ""},
+ equalsTestCase{uint64(kExpected + 1), false, false, ""},
+ equalsTestCase{float64(kExpected + 1), false, false, ""},
+ equalsTestCase{complex128(kExpected + 2i), false, false, ""},
+ equalsTestCase{complex128(kExpected + 1), false, false, ""},
+ }
+
+ t.checkTestCases(matcher, cases)
+}
+
+func (t *EqualsTest) UintNotExactlyRepresentableBySinglePrecision() {
+ // Single-precision floats don't have enough bits to represent the integers
+ // near this one distinctly, so [2^25-1, 2^25+2] all receive the same value
+ // and should be treated as equivalent when floats are in the mix.
+ const kTwoTo25 = 1 << 25
+ matcher := Equals(uint(kTwoTo25 + 1))
+ ExpectEq("33554433", matcher.Description())
+
+ cases := []equalsTestCase{
+ // Integers.
+ equalsTestCase{int64(kTwoTo25 + 0), false, false, ""},
+ equalsTestCase{int64(kTwoTo25 + 1), true, false, ""},
+ equalsTestCase{int64(kTwoTo25 + 2), false, false, ""},
+
+ equalsTestCase{uint64(kTwoTo25 + 0), false, false, ""},
+ equalsTestCase{uint64(kTwoTo25 + 1), true, false, ""},
+ equalsTestCase{uint64(kTwoTo25 + 2), false, false, ""},
+
+ // Single-precision floating point.
+ equalsTestCase{float32(kTwoTo25 - 2), false, false, ""},
+ equalsTestCase{float32(kTwoTo25 - 1), true, false, ""},
+ equalsTestCase{float32(kTwoTo25 + 0), true, false, ""},
+ equalsTestCase{float32(kTwoTo25 + 1), true, false, ""},
+ equalsTestCase{float32(kTwoTo25 + 2), true, false, ""},
+ equalsTestCase{float32(kTwoTo25 + 3), false, false, ""},
+
+ equalsTestCase{complex64(kTwoTo25 - 2), false, false, ""},
+ equalsTestCase{complex64(kTwoTo25 - 1), true, false, ""},
+ equalsTestCase{complex64(kTwoTo25 + 0), true, false, ""},
+ equalsTestCase{complex64(kTwoTo25 + 1), true, false, ""},
+ equalsTestCase{complex64(kTwoTo25 + 2), true, false, ""},
+ equalsTestCase{complex64(kTwoTo25 + 3), false, false, ""},
+
+ // Double-precision floating point.
+ equalsTestCase{float64(kTwoTo25 + 0), false, false, ""},
+ equalsTestCase{float64(kTwoTo25 + 1), true, false, ""},
+ equalsTestCase{float64(kTwoTo25 + 2), false, false, ""},
+
+ equalsTestCase{complex128(kTwoTo25 + 0), false, false, ""},
+ equalsTestCase{complex128(kTwoTo25 + 1), true, false, ""},
+ equalsTestCase{complex128(kTwoTo25 + 2), false, false, ""},
+ }
+
+ t.checkTestCases(matcher, cases)
+}
+
+////////////////////////////////////////////////////////////////////////
+// uint8
+////////////////////////////////////////////////////////////////////////
+
+func (t *EqualsTest) SmallUint8() {
+ const kExpected = 17
+ matcher := Equals(uint8(kExpected))
+ ExpectEq("17", matcher.Description())
+
+ cases := []equalsTestCase{
+ // Various types of the expected value.
+ equalsTestCase{17, true, false, ""},
+ equalsTestCase{17.0, true, false, ""},
+ equalsTestCase{17 + 0i, true, false, ""},
+ equalsTestCase{int(kExpected), true, false, ""},
+ equalsTestCase{int8(kExpected), true, false, ""},
+ equalsTestCase{int16(kExpected), true, false, ""},
+ equalsTestCase{int32(kExpected), true, false, ""},
+ equalsTestCase{int64(kExpected), true, false, ""},
+ equalsTestCase{uint(kExpected), true, false, ""},
+ equalsTestCase{uint8(kExpected), true, false, ""},
+ equalsTestCase{uint16(kExpected), true, false, ""},
+ equalsTestCase{uint32(kExpected), true, false, ""},
+ equalsTestCase{uint64(kExpected), true, false, ""},
+ equalsTestCase{float32(kExpected), true, false, ""},
+ equalsTestCase{float64(kExpected), true, false, ""},
+ equalsTestCase{complex64(kExpected), true, false, ""},
+ equalsTestCase{complex128(kExpected), true, false, ""},
+
+ // Non-equal values of numeric types.
+ equalsTestCase{kExpected + 1, false, false, ""},
+ equalsTestCase{int(kExpected + 1), false, false, ""},
+ equalsTestCase{int8(kExpected + 1), false, false, ""},
+ equalsTestCase{int16(kExpected + 1), false, false, ""},
+ equalsTestCase{int32(kExpected + 1), false, false, ""},
+ equalsTestCase{int64(kExpected + 1), false, false, ""},
+ equalsTestCase{uint(kExpected + 1), false, false, ""},
+ equalsTestCase{uint8(kExpected + 1), false, false, ""},
+ equalsTestCase{uint16(kExpected + 1), false, false, ""},
+ equalsTestCase{uint32(kExpected + 1), false, false, ""},
+ equalsTestCase{uint64(kExpected + 1), false, false, ""},
+ equalsTestCase{float32(kExpected + 1), false, false, ""},
+ equalsTestCase{float64(kExpected + 1), false, false, ""},
+ equalsTestCase{complex64(kExpected + 2i), false, false, ""},
+ equalsTestCase{complex64(kExpected + 1), false, false, ""},
+ equalsTestCase{complex128(kExpected + 2i), false, false, ""},
+ equalsTestCase{complex128(kExpected + 1), false, false, ""},
+
+ // Non-numeric types.
+ equalsTestCase{uintptr(0), false, true, "which is not numeric"},
+ equalsTestCase{true, false, true, "which is not numeric"},
+ equalsTestCase{[...]int{}, false, true, "which is not numeric"},
+ equalsTestCase{make(chan int), false, true, "which is not numeric"},
+ equalsTestCase{func() {}, false, true, "which is not numeric"},
+ equalsTestCase{map[int]int{}, false, true, "which is not numeric"},
+ equalsTestCase{&someInt, false, true, "which is not numeric"},
+ equalsTestCase{[]int{}, false, true, "which is not numeric"},
+ equalsTestCase{"taco", false, true, "which is not numeric"},
+ equalsTestCase{equalsTestCase{}, false, true, "which is not numeric"},
+ }
+
+ t.checkTestCases(matcher, cases)
+}
+
+////////////////////////////////////////////////////////////////////////
+// uint16
+////////////////////////////////////////////////////////////////////////
+
+func (t *EqualsTest) SmallUint16() {
+ const kExpected = 17
+ matcher := Equals(uint16(kExpected))
+ ExpectEq("17", matcher.Description())
+
+ cases := []equalsTestCase{
+ // Various types of the expected value.
+ equalsTestCase{17, true, false, ""},
+ equalsTestCase{17.0, true, false, ""},
+ equalsTestCase{17 + 0i, true, false, ""},
+ equalsTestCase{int(kExpected), true, false, ""},
+ equalsTestCase{int8(kExpected), true, false, ""},
+ equalsTestCase{int16(kExpected), true, false, ""},
+ equalsTestCase{int32(kExpected), true, false, ""},
+ equalsTestCase{int64(kExpected), true, false, ""},
+ equalsTestCase{uint(kExpected), true, false, ""},
+ equalsTestCase{uint8(kExpected), true, false, ""},
+ equalsTestCase{uint16(kExpected), true, false, ""},
+ equalsTestCase{uint32(kExpected), true, false, ""},
+ equalsTestCase{uint64(kExpected), true, false, ""},
+ equalsTestCase{float32(kExpected), true, false, ""},
+ equalsTestCase{float64(kExpected), true, false, ""},
+ equalsTestCase{complex64(kExpected), true, false, ""},
+ equalsTestCase{complex128(kExpected), true, false, ""},
+
+ // Non-equal values of numeric types.
+ equalsTestCase{kExpected + 1, false, false, ""},
+ equalsTestCase{int(kExpected + 1), false, false, ""},
+ equalsTestCase{int8(kExpected + 1), false, false, ""},
+ equalsTestCase{int16(kExpected + 1), false, false, ""},
+ equalsTestCase{int32(kExpected + 1), false, false, ""},
+ equalsTestCase{int64(kExpected + 1), false, false, ""},
+ equalsTestCase{uint(kExpected + 1), false, false, ""},
+ equalsTestCase{uint8(kExpected + 1), false, false, ""},
+ equalsTestCase{uint16(kExpected + 1), false, false, ""},
+ equalsTestCase{uint32(kExpected + 1), false, false, ""},
+ equalsTestCase{uint64(kExpected + 1), false, false, ""},
+ equalsTestCase{float32(kExpected + 1), false, false, ""},
+ equalsTestCase{float64(kExpected + 1), false, false, ""},
+ equalsTestCase{complex64(kExpected + 2i), false, false, ""},
+ equalsTestCase{complex64(kExpected + 1), false, false, ""},
+ equalsTestCase{complex128(kExpected + 2i), false, false, ""},
+ equalsTestCase{complex128(kExpected + 1), false, false, ""},
+
+ // Non-numeric types.
+ equalsTestCase{uintptr(0), false, true, "which is not numeric"},
+ equalsTestCase{true, false, true, "which is not numeric"},
+ equalsTestCase{[...]int{}, false, true, "which is not numeric"},
+ equalsTestCase{make(chan int), false, true, "which is not numeric"},
+ equalsTestCase{func() {}, false, true, "which is not numeric"},
+ equalsTestCase{map[int]int{}, false, true, "which is not numeric"},
+ equalsTestCase{&someInt, false, true, "which is not numeric"},
+ equalsTestCase{[]int{}, false, true, "which is not numeric"},
+ equalsTestCase{"taco", false, true, "which is not numeric"},
+ equalsTestCase{equalsTestCase{}, false, true, "which is not numeric"},
+ }
+
+ t.checkTestCases(matcher, cases)
+}
+
+func (t *EqualsTest) LargeUint16() {
+ const kExpected = (1 << 8) + 17
+ matcher := Equals(uint16(kExpected))
+ ExpectEq("273", matcher.Description())
+
+ cases := []equalsTestCase{
+ // Various types of the expected value.
+ equalsTestCase{273, true, false, ""},
+ equalsTestCase{273.0, true, false, ""},
+ equalsTestCase{273 + 0i, true, false, ""},
+ equalsTestCase{int16(kExpected), true, false, ""},
+ equalsTestCase{int32(kExpected), true, false, ""},
+ equalsTestCase{int64(kExpected), true, false, ""},
+ equalsTestCase{uint16(kExpected), true, false, ""},
+ equalsTestCase{uint32(kExpected), true, false, ""},
+ equalsTestCase{uint64(kExpected), true, false, ""},
+ equalsTestCase{float32(kExpected), true, false, ""},
+ equalsTestCase{float64(kExpected), true, false, ""},
+ equalsTestCase{complex64(kExpected), true, false, ""},
+ equalsTestCase{complex128(kExpected), true, false, ""},
+
+ // Non-equal values of numeric types.
+ equalsTestCase{int8(17), false, false, ""},
+ equalsTestCase{int16(kExpected + 1), false, false, ""},
+ equalsTestCase{int32(kExpected + 1), false, false, ""},
+ equalsTestCase{int64(kExpected + 1), false, false, ""},
+ equalsTestCase{uint8(17), false, false, ""},
+ equalsTestCase{uint16(kExpected + 1), false, false, ""},
+ equalsTestCase{uint32(kExpected + 1), false, false, ""},
+ equalsTestCase{uint64(kExpected + 1), false, false, ""},
+ equalsTestCase{float64(kExpected + 1), false, false, ""},
+ equalsTestCase{complex128(kExpected + 2i), false, false, ""},
+ equalsTestCase{complex128(kExpected + 1), false, false, ""},
+ }
+
+ t.checkTestCases(matcher, cases)
+}
+
+////////////////////////////////////////////////////////////////////////
+// uint32
+////////////////////////////////////////////////////////////////////////
+
+func (t *EqualsTest) SmallUint32() {
+ const kExpected = 17
+ matcher := Equals(uint32(kExpected))
+ ExpectEq("17", matcher.Description())
+
+ cases := []equalsTestCase{
+ // Various types of the expected value.
+ equalsTestCase{17, true, false, ""},
+ equalsTestCase{17.0, true, false, ""},
+ equalsTestCase{17 + 0i, true, false, ""},
+ equalsTestCase{int(kExpected), true, false, ""},
+ equalsTestCase{int8(kExpected), true, false, ""},
+ equalsTestCase{int16(kExpected), true, false, ""},
+ equalsTestCase{int32(kExpected), true, false, ""},
+ equalsTestCase{int64(kExpected), true, false, ""},
+ equalsTestCase{uint(kExpected), true, false, ""},
+ equalsTestCase{uint8(kExpected), true, false, ""},
+ equalsTestCase{uint16(kExpected), true, false, ""},
+ equalsTestCase{uint32(kExpected), true, false, ""},
+ equalsTestCase{uint64(kExpected), true, false, ""},
+ equalsTestCase{float32(kExpected), true, false, ""},
+ equalsTestCase{float64(kExpected), true, false, ""},
+ equalsTestCase{complex64(kExpected), true, false, ""},
+ equalsTestCase{complex128(kExpected), true, false, ""},
+
+ // Non-equal values of numeric types.
+ equalsTestCase{kExpected + 1, false, false, ""},
+ equalsTestCase{int(kExpected + 1), false, false, ""},
+ equalsTestCase{int8(kExpected + 1), false, false, ""},
+ equalsTestCase{int16(kExpected + 1), false, false, ""},
+ equalsTestCase{int32(kExpected + 1), false, false, ""},
+ equalsTestCase{int64(kExpected + 1), false, false, ""},
+ equalsTestCase{uint(kExpected + 1), false, false, ""},
+ equalsTestCase{uint8(kExpected + 1), false, false, ""},
+ equalsTestCase{uint16(kExpected + 1), false, false, ""},
+ equalsTestCase{uint32(kExpected + 1), false, false, ""},
+ equalsTestCase{uint64(kExpected + 1), false, false, ""},
+ equalsTestCase{float32(kExpected + 1), false, false, ""},
+ equalsTestCase{float64(kExpected + 1), false, false, ""},
+ equalsTestCase{complex64(kExpected + 2i), false, false, ""},
+ equalsTestCase{complex64(kExpected + 1), false, false, ""},
+ equalsTestCase{complex128(kExpected + 2i), false, false, ""},
+ equalsTestCase{complex128(kExpected + 1), false, false, ""},
+
+ // Non-numeric types.
+ equalsTestCase{uintptr(0), false, true, "which is not numeric"},
+ equalsTestCase{true, false, true, "which is not numeric"},
+ equalsTestCase{[...]int{}, false, true, "which is not numeric"},
+ equalsTestCase{make(chan int), false, true, "which is not numeric"},
+ equalsTestCase{func() {}, false, true, "which is not numeric"},
+ equalsTestCase{map[int]int{}, false, true, "which is not numeric"},
+ equalsTestCase{&someInt, false, true, "which is not numeric"},
+ equalsTestCase{[]int{}, false, true, "which is not numeric"},
+ equalsTestCase{"taco", false, true, "which is not numeric"},
+ equalsTestCase{equalsTestCase{}, false, true, "which is not numeric"},
+ }
+
+ t.checkTestCases(matcher, cases)
+}
+
+func (t *EqualsTest) LargeUint32() {
+ const kExpected = (1 << 16) + 17
+ matcher := Equals(uint32(kExpected))
+ ExpectEq("65553", matcher.Description())
+
+ cases := []equalsTestCase{
+ // Various types of the expected value.
+ equalsTestCase{65553, true, false, ""},
+ equalsTestCase{65553.0, true, false, ""},
+ equalsTestCase{65553 + 0i, true, false, ""},
+ equalsTestCase{int32(kExpected), true, false, ""},
+ equalsTestCase{int64(kExpected), true, false, ""},
+ equalsTestCase{uint32(kExpected), true, false, ""},
+ equalsTestCase{uint64(kExpected), true, false, ""},
+ equalsTestCase{float32(kExpected), true, false, ""},
+ equalsTestCase{float64(kExpected), true, false, ""},
+ equalsTestCase{complex64(kExpected), true, false, ""},
+ equalsTestCase{complex128(kExpected), true, false, ""},
+
+ // Non-equal values of numeric types.
+ equalsTestCase{int16(17), false, false, ""},
+ equalsTestCase{int32(kExpected + 1), false, false, ""},
+ equalsTestCase{int64(kExpected + 1), false, false, ""},
+ equalsTestCase{uint16(17), false, false, ""},
+ equalsTestCase{uint32(kExpected + 1), false, false, ""},
+ equalsTestCase{uint64(kExpected + 1), false, false, ""},
+ equalsTestCase{float64(kExpected + 1), false, false, ""},
+ equalsTestCase{complex128(kExpected + 2i), false, false, ""},
+ equalsTestCase{complex128(kExpected + 1), false, false, ""},
+ }
+
+ t.checkTestCases(matcher, cases)
+}
+
+func (t *EqualsTest) Uint32NotExactlyRepresentableBySinglePrecision() {
+ // Single-precision floats don't have enough bits to represent the integers
+ // near this one distinctly, so [2^25-1, 2^25+2] all receive the same value
+ // and should be treated as equivalent when floats are in the mix.
+ const kTwoTo25 = 1 << 25
+ matcher := Equals(uint32(kTwoTo25 + 1))
+ ExpectEq("33554433", matcher.Description())
+
+ cases := []equalsTestCase{
+ // Integers.
+ equalsTestCase{int64(kTwoTo25 + 0), false, false, ""},
+ equalsTestCase{int64(kTwoTo25 + 1), true, false, ""},
+ equalsTestCase{int64(kTwoTo25 + 2), false, false, ""},
+
+ equalsTestCase{uint64(kTwoTo25 + 0), false, false, ""},
+ equalsTestCase{uint64(kTwoTo25 + 1), true, false, ""},
+ equalsTestCase{uint64(kTwoTo25 + 2), false, false, ""},
+
+ // Single-precision floating point.
+ equalsTestCase{float32(kTwoTo25 - 2), false, false, ""},
+ equalsTestCase{float32(kTwoTo25 - 1), true, false, ""},
+ equalsTestCase{float32(kTwoTo25 + 0), true, false, ""},
+ equalsTestCase{float32(kTwoTo25 + 1), true, false, ""},
+ equalsTestCase{float32(kTwoTo25 + 2), true, false, ""},
+ equalsTestCase{float32(kTwoTo25 + 3), false, false, ""},
+
+ equalsTestCase{complex64(kTwoTo25 - 2), false, false, ""},
+ equalsTestCase{complex64(kTwoTo25 - 1), true, false, ""},
+ equalsTestCase{complex64(kTwoTo25 + 0), true, false, ""},
+ equalsTestCase{complex64(kTwoTo25 + 1), true, false, ""},
+ equalsTestCase{complex64(kTwoTo25 + 2), true, false, ""},
+ equalsTestCase{complex64(kTwoTo25 + 3), false, false, ""},
+
+ // Double-precision floating point.
+ equalsTestCase{float64(kTwoTo25 + 0), false, false, ""},
+ equalsTestCase{float64(kTwoTo25 + 1), true, false, ""},
+ equalsTestCase{float64(kTwoTo25 + 2), false, false, ""},
+
+ equalsTestCase{complex128(kTwoTo25 + 0), false, false, ""},
+ equalsTestCase{complex128(kTwoTo25 + 1), true, false, ""},
+ equalsTestCase{complex128(kTwoTo25 + 2), false, false, ""},
+ }
+
+ t.checkTestCases(matcher, cases)
+}
+
+////////////////////////////////////////////////////////////////////////
+// uint64
+////////////////////////////////////////////////////////////////////////
+
+func (t *EqualsTest) SmallUint64() {
+ const kExpected = 17
+ matcher := Equals(uint64(kExpected))
+ ExpectEq("17", matcher.Description())
+
+ cases := []equalsTestCase{
+ // Various types of the expected value.
+ equalsTestCase{17, true, false, ""},
+ equalsTestCase{17.0, true, false, ""},
+ equalsTestCase{17 + 0i, true, false, ""},
+ equalsTestCase{int(kExpected), true, false, ""},
+ equalsTestCase{int8(kExpected), true, false, ""},
+ equalsTestCase{int16(kExpected), true, false, ""},
+ equalsTestCase{int32(kExpected), true, false, ""},
+ equalsTestCase{int64(kExpected), true, false, ""},
+ equalsTestCase{uint(kExpected), true, false, ""},
+ equalsTestCase{uint8(kExpected), true, false, ""},
+ equalsTestCase{uint16(kExpected), true, false, ""},
+ equalsTestCase{uint32(kExpected), true, false, ""},
+ equalsTestCase{uint64(kExpected), true, false, ""},
+ equalsTestCase{float32(kExpected), true, false, ""},
+ equalsTestCase{float64(kExpected), true, false, ""},
+ equalsTestCase{complex64(kExpected), true, false, ""},
+ equalsTestCase{complex128(kExpected), true, false, ""},
+
+ // Non-equal values of numeric types.
+ equalsTestCase{kExpected + 1, false, false, ""},
+ equalsTestCase{int(kExpected + 1), false, false, ""},
+ equalsTestCase{int8(kExpected + 1), false, false, ""},
+ equalsTestCase{int16(kExpected + 1), false, false, ""},
+ equalsTestCase{int32(kExpected + 1), false, false, ""},
+ equalsTestCase{int64(kExpected + 1), false, false, ""},
+ equalsTestCase{uint(kExpected + 1), false, false, ""},
+ equalsTestCase{uint8(kExpected + 1), false, false, ""},
+ equalsTestCase{uint16(kExpected + 1), false, false, ""},
+ equalsTestCase{uint32(kExpected + 1), false, false, ""},
+ equalsTestCase{uint64(kExpected + 1), false, false, ""},
+ equalsTestCase{float32(kExpected + 1), false, false, ""},
+ equalsTestCase{float64(kExpected + 1), false, false, ""},
+ equalsTestCase{complex64(kExpected + 2i), false, false, ""},
+ equalsTestCase{complex64(kExpected + 1), false, false, ""},
+ equalsTestCase{complex128(kExpected + 2i), false, false, ""},
+ equalsTestCase{complex128(kExpected + 1), false, false, ""},
+
+ // Non-numeric types.
+ equalsTestCase{uintptr(0), false, true, "which is not numeric"},
+ equalsTestCase{true, false, true, "which is not numeric"},
+ equalsTestCase{[...]int{}, false, true, "which is not numeric"},
+ equalsTestCase{make(chan int), false, true, "which is not numeric"},
+ equalsTestCase{func() {}, false, true, "which is not numeric"},
+ equalsTestCase{map[int]int{}, false, true, "which is not numeric"},
+ equalsTestCase{&someInt, false, true, "which is not numeric"},
+ equalsTestCase{[]int{}, false, true, "which is not numeric"},
+ equalsTestCase{"taco", false, true, "which is not numeric"},
+ equalsTestCase{equalsTestCase{}, false, true, "which is not numeric"},
+ }
+
+ t.checkTestCases(matcher, cases)
+}
+
+func (t *EqualsTest) LargeUint64() {
+ const kExpected = (1 << 32) + 17
+ matcher := Equals(uint64(kExpected))
+ ExpectEq("4294967313", matcher.Description())
+
+ cases := []equalsTestCase{
+ // Various types of the expected value.
+ equalsTestCase{4294967313.0, true, false, ""},
+ equalsTestCase{4294967313 + 0i, true, false, ""},
+ equalsTestCase{int64(kExpected), true, false, ""},
+ equalsTestCase{uint64(kExpected), true, false, ""},
+ equalsTestCase{float32(kExpected), true, false, ""},
+ equalsTestCase{float64(kExpected), true, false, ""},
+ equalsTestCase{complex64(kExpected), true, false, ""},
+ equalsTestCase{complex128(kExpected), true, false, ""},
+
+ // Non-equal values of numeric types.
+ equalsTestCase{int(17), false, false, ""},
+ equalsTestCase{int32(17), false, false, ""},
+ equalsTestCase{int64(kExpected + 1), false, false, ""},
+ equalsTestCase{uint(17), false, false, ""},
+ equalsTestCase{uint32(17), false, false, ""},
+ equalsTestCase{uint64(kExpected + 1), false, false, ""},
+ equalsTestCase{float64(kExpected + 1), false, false, ""},
+ equalsTestCase{complex128(kExpected + 2i), false, false, ""},
+ equalsTestCase{complex128(kExpected + 1), false, false, ""},
+ }
+
+ t.checkTestCases(matcher, cases)
+}
+
+func (t *EqualsTest) Uint64NotExactlyRepresentableBySinglePrecision() {
+ // Single-precision floats don't have enough bits to represent the integers
+ // near this one distinctly, so [2^25-1, 2^25+2] all receive the same value
+ // and should be treated as equivalent when floats are in the mix.
+ const kTwoTo25 = 1 << 25
+ matcher := Equals(uint64(kTwoTo25 + 1))
+ ExpectEq("33554433", matcher.Description())
+
+ cases := []equalsTestCase{
+ // Integers.
+ equalsTestCase{int64(kTwoTo25 + 0), false, false, ""},
+ equalsTestCase{int64(kTwoTo25 + 1), true, false, ""},
+ equalsTestCase{int64(kTwoTo25 + 2), false, false, ""},
+
+ equalsTestCase{uint64(kTwoTo25 + 0), false, false, ""},
+ equalsTestCase{uint64(kTwoTo25 + 1), true, false, ""},
+ equalsTestCase{uint64(kTwoTo25 + 2), false, false, ""},
+
+ // Single-precision floating point.
+ equalsTestCase{float32(kTwoTo25 - 2), false, false, ""},
+ equalsTestCase{float32(kTwoTo25 - 1), true, false, ""},
+ equalsTestCase{float32(kTwoTo25 + 0), true, false, ""},
+ equalsTestCase{float32(kTwoTo25 + 1), true, false, ""},
+ equalsTestCase{float32(kTwoTo25 + 2), true, false, ""},
+ equalsTestCase{float32(kTwoTo25 + 3), false, false, ""},
+
+ equalsTestCase{complex64(kTwoTo25 - 2), false, false, ""},
+ equalsTestCase{complex64(kTwoTo25 - 1), true, false, ""},
+ equalsTestCase{complex64(kTwoTo25 + 0), true, false, ""},
+ equalsTestCase{complex64(kTwoTo25 + 1), true, false, ""},
+ equalsTestCase{complex64(kTwoTo25 + 2), true, false, ""},
+ equalsTestCase{complex64(kTwoTo25 + 3), false, false, ""},
+
+ // Double-precision floating point.
+ equalsTestCase{float64(kTwoTo25 + 0), false, false, ""},
+ equalsTestCase{float64(kTwoTo25 + 1), true, false, ""},
+ equalsTestCase{float64(kTwoTo25 + 2), false, false, ""},
+
+ equalsTestCase{complex128(kTwoTo25 + 0), false, false, ""},
+ equalsTestCase{complex128(kTwoTo25 + 1), true, false, ""},
+ equalsTestCase{complex128(kTwoTo25 + 2), false, false, ""},
+ }
+
+ t.checkTestCases(matcher, cases)
+}
+
+func (t *EqualsTest) Uint64NotExactlyRepresentableByDoublePrecision() {
+ // Double-precision floats don't have enough bits to represent the integers
+ // near this one distinctly, so [2^54-1, 2^54+2] all receive the same value
+ // and should be treated as equivalent when floats are in the mix.
+ const kTwoTo54 = 1 << 54
+ matcher := Equals(uint64(kTwoTo54 + 1))
+ ExpectEq("18014398509481985", matcher.Description())
+
+ cases := []equalsTestCase{
+ // Integers.
+ equalsTestCase{int64(kTwoTo54 + 0), false, false, ""},
+ equalsTestCase{int64(kTwoTo54 + 1), true, false, ""},
+ equalsTestCase{int64(kTwoTo54 + 2), false, false, ""},
+
+ equalsTestCase{uint64(kTwoTo54 + 0), false, false, ""},
+ equalsTestCase{uint64(kTwoTo54 + 1), true, false, ""},
+ equalsTestCase{uint64(kTwoTo54 + 2), false, false, ""},
+
+ // Double-precision floating point.
+ equalsTestCase{float64(kTwoTo54 - 2), false, false, ""},
+ equalsTestCase{float64(kTwoTo54 - 1), true, false, ""},
+ equalsTestCase{float64(kTwoTo54 + 0), true, false, ""},
+ equalsTestCase{float64(kTwoTo54 + 1), true, false, ""},
+ equalsTestCase{float64(kTwoTo54 + 2), true, false, ""},
+ equalsTestCase{float64(kTwoTo54 + 3), false, false, ""},
+
+ equalsTestCase{complex128(kTwoTo54 - 2), false, false, ""},
+ equalsTestCase{complex128(kTwoTo54 - 1), true, false, ""},
+ equalsTestCase{complex128(kTwoTo54 + 0), true, false, ""},
+ equalsTestCase{complex128(kTwoTo54 + 1), true, false, ""},
+ equalsTestCase{complex128(kTwoTo54 + 2), true, false, ""},
+ equalsTestCase{complex128(kTwoTo54 + 3), false, false, ""},
+ }
+
+ t.checkTestCases(matcher, cases)
+}
+
+////////////////////////////////////////////////////////////////////////
+// uintptr
+////////////////////////////////////////////////////////////////////////
+
+func (t *EqualsTest) NilUintptr() {
+ var ptr1 uintptr
+ var ptr2 uintptr
+
+ matcher := Equals(ptr1)
+ ExpectEq("0", matcher.Description())
+
+ cases := []equalsTestCase{
+ // uintptrs
+ equalsTestCase{ptr1, true, false, ""},
+ equalsTestCase{ptr2, true, false, ""},
+ equalsTestCase{uintptr(0), true, false, ""},
+ equalsTestCase{uintptr(17), false, false, ""},
+
+ // Other types.
+ equalsTestCase{0, false, true, "which is not a uintptr"},
+ equalsTestCase{bool(false), false, true, "which is not a uintptr"},
+ equalsTestCase{int(0), false, true, "which is not a uintptr"},
+ equalsTestCase{int8(0), false, true, "which is not a uintptr"},
+ equalsTestCase{int16(0), false, true, "which is not a uintptr"},
+ equalsTestCase{int32(0), false, true, "which is not a uintptr"},
+ equalsTestCase{int64(0), false, true, "which is not a uintptr"},
+ equalsTestCase{uint(0), false, true, "which is not a uintptr"},
+ equalsTestCase{uint8(0), false, true, "which is not a uintptr"},
+ equalsTestCase{uint16(0), false, true, "which is not a uintptr"},
+ equalsTestCase{uint32(0), false, true, "which is not a uintptr"},
+ equalsTestCase{uint64(0), false, true, "which is not a uintptr"},
+ equalsTestCase{true, false, true, "which is not a uintptr"},
+ equalsTestCase{[...]int{}, false, true, "which is not a uintptr"},
+ equalsTestCase{make(chan int), false, true, "which is not a uintptr"},
+ equalsTestCase{func() {}, false, true, "which is not a uintptr"},
+ equalsTestCase{map[int]int{}, false, true, "which is not a uintptr"},
+ equalsTestCase{&someInt, false, true, "which is not a uintptr"},
+ equalsTestCase{[]int{}, false, true, "which is not a uintptr"},
+ equalsTestCase{"taco", false, true, "which is not a uintptr"},
+ equalsTestCase{equalsTestCase{}, false, true, "which is not a uintptr"},
+ }
+
+ t.checkTestCases(matcher, cases)
+}
+
+func (t *EqualsTest) NonNilUintptr() {
+ matcher := Equals(uintptr(17))
+ ExpectEq("17", matcher.Description())
+
+ cases := []equalsTestCase{
+ // uintptrs
+ equalsTestCase{uintptr(17), true, false, ""},
+ equalsTestCase{uintptr(16), false, false, ""},
+ equalsTestCase{uintptr(0), false, false, ""},
+
+ // Other types.
+ equalsTestCase{0, false, true, "which is not a uintptr"},
+ equalsTestCase{bool(false), false, true, "which is not a uintptr"},
+ equalsTestCase{int(0), false, true, "which is not a uintptr"},
+ equalsTestCase{int8(0), false, true, "which is not a uintptr"},
+ equalsTestCase{int16(0), false, true, "which is not a uintptr"},
+ equalsTestCase{int32(0), false, true, "which is not a uintptr"},
+ equalsTestCase{int64(0), false, true, "which is not a uintptr"},
+ equalsTestCase{uint(0), false, true, "which is not a uintptr"},
+ equalsTestCase{uint8(0), false, true, "which is not a uintptr"},
+ equalsTestCase{uint16(0), false, true, "which is not a uintptr"},
+ equalsTestCase{uint32(0), false, true, "which is not a uintptr"},
+ equalsTestCase{uint64(0), false, true, "which is not a uintptr"},
+ equalsTestCase{true, false, true, "which is not a uintptr"},
+ equalsTestCase{[...]int{}, false, true, "which is not a uintptr"},
+ equalsTestCase{make(chan int), false, true, "which is not a uintptr"},
+ equalsTestCase{func() {}, false, true, "which is not a uintptr"},
+ equalsTestCase{map[int]int{}, false, true, "which is not a uintptr"},
+ equalsTestCase{&someInt, false, true, "which is not a uintptr"},
+ equalsTestCase{[]int{}, false, true, "which is not a uintptr"},
+ equalsTestCase{"taco", false, true, "which is not a uintptr"},
+ equalsTestCase{equalsTestCase{}, false, true, "which is not a uintptr"},
+ }
+
+ t.checkTestCases(matcher, cases)
+}
+
+////////////////////////////////////////////////////////////////////////
+// float32
+////////////////////////////////////////////////////////////////////////
+
+func (t *EqualsTest) NegativeIntegralFloat32() {
+ matcher := Equals(float32(-32769))
+ ExpectEq("-32769", matcher.Description())
+
+ cases := []equalsTestCase{
+ // Various types of -32769.
+ equalsTestCase{-32769.0, true, false, ""},
+ equalsTestCase{-32769 + 0i, true, false, ""},
+ equalsTestCase{int32(-32769), true, false, ""},
+ equalsTestCase{int64(-32769), true, false, ""},
+ equalsTestCase{float32(-32769), true, false, ""},
+ equalsTestCase{float64(-32769), true, false, ""},
+ equalsTestCase{complex64(-32769), true, false, ""},
+ equalsTestCase{complex128(-32769), true, false, ""},
+ equalsTestCase{interface{}(float32(-32769)), true, false, ""},
+ equalsTestCase{interface{}(int64(-32769)), true, false, ""},
+
+ // Values that would be -32769 in two's complement.
+ equalsTestCase{uint64((1 << 64) - 32769), false, false, ""},
+
+ // Non-equal values of numeric type.
+ equalsTestCase{int64(-32770), false, false, ""},
+ equalsTestCase{float32(-32769.1), false, false, ""},
+ equalsTestCase{float32(-32768.9), false, false, ""},
+ equalsTestCase{float64(-32769.1), false, false, ""},
+ equalsTestCase{float64(-32768.9), false, false, ""},
+ equalsTestCase{complex128(-32768), false, false, ""},
+ equalsTestCase{complex128(-32769 + 2i), false, false, ""},
+
+ // Non-numeric types.
+ equalsTestCase{uintptr(0), false, true, "which is not numeric"},
+ equalsTestCase{true, false, true, "which is not numeric"},
+ equalsTestCase{[...]int{}, false, true, "which is not numeric"},
+ equalsTestCase{make(chan int), false, true, "which is not numeric"},
+ equalsTestCase{func() {}, false, true, "which is not numeric"},
+ equalsTestCase{map[int]int{}, false, true, "which is not numeric"},
+ equalsTestCase{&someInt, false, true, "which is not numeric"},
+ equalsTestCase{[]int{}, false, true, "which is not numeric"},
+ equalsTestCase{"taco", false, true, "which is not numeric"},
+ equalsTestCase{equalsTestCase{}, false, true, "which is not numeric"},
+ }
+
+ t.checkTestCases(matcher, cases)
+}
+
+func (t *EqualsTest) NegativeNonIntegralFloat32() {
+ matcher := Equals(float32(-32769.1))
+ ExpectEq("-32769.1", matcher.Description())
+
+ cases := []equalsTestCase{
+ // Various types of -32769.1.
+ equalsTestCase{-32769.1, true, false, ""},
+ equalsTestCase{-32769.1 + 0i, true, false, ""},
+ equalsTestCase{float32(-32769.1), true, false, ""},
+ equalsTestCase{float64(-32769.1), true, false, ""},
+ equalsTestCase{complex64(-32769.1), true, false, ""},
+ equalsTestCase{complex128(-32769.1), true, false, ""},
+
+ // Non-equal values of numeric type.
+ equalsTestCase{int32(-32769), false, false, ""},
+ equalsTestCase{int32(-32770), false, false, ""},
+ equalsTestCase{int64(-32769), false, false, ""},
+ equalsTestCase{int64(-32770), false, false, ""},
+ equalsTestCase{float32(-32769.2), false, false, ""},
+ equalsTestCase{float32(-32769.0), false, false, ""},
+ equalsTestCase{float64(-32769.2), false, false, ""},
+ equalsTestCase{complex128(-32769.1 + 2i), false, false, ""},
+ }
+
+ t.checkTestCases(matcher, cases)
+}
+
+func (t *EqualsTest) LargeNegativeFloat32() {
+ const kExpected = -1 * (1 << 65)
+ matcher := Equals(float32(kExpected))
+ ExpectEq("-3.689349e+19", matcher.Description())
+
+ floatExpected := float32(kExpected)
+ castedInt := int64(floatExpected)
+
+ cases := []equalsTestCase{
+ // Equal values of numeric type.
+ equalsTestCase{kExpected + 0i, true, false, ""},
+ equalsTestCase{float32(kExpected), true, false, ""},
+ equalsTestCase{float64(kExpected), true, false, ""},
+ equalsTestCase{complex64(kExpected), true, false, ""},
+ equalsTestCase{complex128(kExpected), true, false, ""},
+
+ // Non-equal values of numeric type.
+ equalsTestCase{castedInt, false, false, ""},
+ equalsTestCase{int64(0), false, false, ""},
+ equalsTestCase{int64(math.MinInt64), false, false, ""},
+ equalsTestCase{int64(math.MaxInt64), false, false, ""},
+ equalsTestCase{float32(kExpected / 2), false, false, ""},
+ equalsTestCase{float64(kExpected / 2), false, false, ""},
+ equalsTestCase{complex128(kExpected + 2i), false, false, ""},
+ }
+
+ t.checkTestCases(matcher, cases)
+}
+
+func (t *EqualsTest) ZeroFloat32() {
+ matcher := Equals(float32(0))
+ ExpectEq("0", matcher.Description())
+
+ cases := []equalsTestCase{
+ // Various types of zero.
+ equalsTestCase{0.0, true, false, ""},
+ equalsTestCase{0 + 0i, true, false, ""},
+ equalsTestCase{int(0), true, false, ""},
+ equalsTestCase{int8(0), true, false, ""},
+ equalsTestCase{int16(0), true, false, ""},
+ equalsTestCase{int32(0), true, false, ""},
+ equalsTestCase{int64(0), true, false, ""},
+ equalsTestCase{uint(0), true, false, ""},
+ equalsTestCase{uint8(0), true, false, ""},
+ equalsTestCase{uint16(0), true, false, ""},
+ equalsTestCase{uint32(0), true, false, ""},
+ equalsTestCase{uint64(0), true, false, ""},
+ equalsTestCase{float32(0), true, false, ""},
+ equalsTestCase{float64(0), true, false, ""},
+ equalsTestCase{complex64(0), true, false, ""},
+ equalsTestCase{complex128(0), true, false, ""},
+ equalsTestCase{interface{}(float32(0)), true, false, ""},
+
+ // Non-equal values of numeric type.
+ equalsTestCase{int64(1), false, false, ""},
+ equalsTestCase{int64(-1), false, false, ""},
+ equalsTestCase{float32(1), false, false, ""},
+ equalsTestCase{float32(-1), false, false, ""},
+ equalsTestCase{complex128(0 + 2i), false, false, ""},
+
+ // Non-numeric types.
+ equalsTestCase{uintptr(0), false, true, "which is not numeric"},
+ equalsTestCase{true, false, true, "which is not numeric"},
+ equalsTestCase{[...]int{}, false, true, "which is not numeric"},
+ equalsTestCase{make(chan int), false, true, "which is not numeric"},
+ equalsTestCase{func() {}, false, true, "which is not numeric"},
+ equalsTestCase{map[int]int{}, false, true, "which is not numeric"},
+ equalsTestCase{&someInt, false, true, "which is not numeric"},
+ equalsTestCase{[]int{}, false, true, "which is not numeric"},
+ equalsTestCase{"taco", false, true, "which is not numeric"},
+ equalsTestCase{equalsTestCase{}, false, true, "which is not numeric"},
+ }
+
+ t.checkTestCases(matcher, cases)
+}
+
+func (t *EqualsTest) PositiveIntegralFloat32() {
+ matcher := Equals(float32(32769))
+ ExpectEq("32769", matcher.Description())
+
+ cases := []equalsTestCase{
+ // Various types of 32769.
+ equalsTestCase{32769.0, true, false, ""},
+ equalsTestCase{32769 + 0i, true, false, ""},
+ equalsTestCase{int(32769), true, false, ""},
+ equalsTestCase{int32(32769), true, false, ""},
+ equalsTestCase{int64(32769), true, false, ""},
+ equalsTestCase{uint(32769), true, false, ""},
+ equalsTestCase{uint32(32769), true, false, ""},
+ equalsTestCase{uint64(32769), true, false, ""},
+ equalsTestCase{float32(32769), true, false, ""},
+ equalsTestCase{float64(32769), true, false, ""},
+ equalsTestCase{complex64(32769), true, false, ""},
+ equalsTestCase{complex128(32769), true, false, ""},
+ equalsTestCase{interface{}(float32(32769)), true, false, ""},
+
+ // Non-equal values of numeric type.
+ equalsTestCase{int64(32770), false, false, ""},
+ equalsTestCase{uint64(32770), false, false, ""},
+ equalsTestCase{float32(32769.1), false, false, ""},
+ equalsTestCase{float32(32768.9), false, false, ""},
+ equalsTestCase{float64(32769.1), false, false, ""},
+ equalsTestCase{float64(32768.9), false, false, ""},
+ equalsTestCase{complex128(32768), false, false, ""},
+ equalsTestCase{complex128(32769 + 2i), false, false, ""},
+
+ // Non-numeric types.
+ equalsTestCase{uintptr(0), false, true, "which is not numeric"},
+ equalsTestCase{true, false, true, "which is not numeric"},
+ equalsTestCase{[...]int{}, false, true, "which is not numeric"},
+ equalsTestCase{make(chan int), false, true, "which is not numeric"},
+ equalsTestCase{func() {}, false, true, "which is not numeric"},
+ equalsTestCase{map[int]int{}, false, true, "which is not numeric"},
+ equalsTestCase{&someInt, false, true, "which is not numeric"},
+ equalsTestCase{[]int{}, false, true, "which is not numeric"},
+ equalsTestCase{"taco", false, true, "which is not numeric"},
+ equalsTestCase{equalsTestCase{}, false, true, "which is not numeric"},
+ }
+
+ t.checkTestCases(matcher, cases)
+}
+
+func (t *EqualsTest) PositiveNonIntegralFloat32() {
+ matcher := Equals(float32(32769.1))
+ ExpectEq("32769.1", matcher.Description())
+
+ cases := []equalsTestCase{
+ // Various types of 32769.1.
+ equalsTestCase{32769.1, true, false, ""},
+ equalsTestCase{32769.1 + 0i, true, false, ""},
+ equalsTestCase{float32(32769.1), true, false, ""},
+ equalsTestCase{float64(32769.1), true, false, ""},
+ equalsTestCase{complex64(32769.1), true, false, ""},
+ equalsTestCase{complex128(32769.1), true, false, ""},
+
+ // Non-equal values of numeric type.
+ equalsTestCase{int32(32769), false, false, ""},
+ equalsTestCase{int32(32770), false, false, ""},
+ equalsTestCase{uint64(32769), false, false, ""},
+ equalsTestCase{uint64(32770), false, false, ""},
+ equalsTestCase{float32(32769.2), false, false, ""},
+ equalsTestCase{float32(32769.0), false, false, ""},
+ equalsTestCase{float64(32769.2), false, false, ""},
+ equalsTestCase{complex128(32769.1 + 2i), false, false, ""},
+ }
+
+ t.checkTestCases(matcher, cases)
+}
+
+func (t *EqualsTest) LargePositiveFloat32() {
+ const kExpected = 1 << 65
+ matcher := Equals(float32(kExpected))
+ ExpectEq("3.689349e+19", matcher.Description())
+
+ floatExpected := float32(kExpected)
+ castedInt := uint64(floatExpected)
+
+ cases := []equalsTestCase{
+ // Equal values of numeric type.
+ equalsTestCase{kExpected + 0i, true, false, ""},
+ equalsTestCase{float32(kExpected), true, false, ""},
+ equalsTestCase{float64(kExpected), true, false, ""},
+ equalsTestCase{complex64(kExpected), true, false, ""},
+ equalsTestCase{complex128(kExpected), true, false, ""},
+
+ // Non-equal values of numeric type.
+ equalsTestCase{castedInt, false, false, ""},
+ equalsTestCase{int64(0), false, false, ""},
+ equalsTestCase{int64(math.MinInt64), false, false, ""},
+ equalsTestCase{int64(math.MaxInt64), false, false, ""},
+ equalsTestCase{uint64(0), false, false, ""},
+ equalsTestCase{uint64(math.MaxUint64), false, false, ""},
+ equalsTestCase{float32(kExpected / 2), false, false, ""},
+ equalsTestCase{float64(kExpected / 2), false, false, ""},
+ equalsTestCase{complex128(kExpected + 2i), false, false, ""},
+ }
+
+ t.checkTestCases(matcher, cases)
+}
+
+func (t *EqualsTest) Float32AboveExactIntegerRange() {
+ // Single-precision floats don't have enough bits to represent the integers
+ // near this one distinctly, so [2^25-1, 2^25+2] all receive the same value
+ // and should be treated as equivalent when floats are in the mix.
+ const kTwoTo25 = 1 << 25
+ matcher := Equals(float32(kTwoTo25 + 1))
+ ExpectEq("3.3554432e+07", matcher.Description())
+
+ cases := []equalsTestCase{
+ // Integers.
+ equalsTestCase{int64(kTwoTo25 - 2), false, false, ""},
+ equalsTestCase{int64(kTwoTo25 - 1), true, false, ""},
+ equalsTestCase{int64(kTwoTo25 + 0), true, false, ""},
+ equalsTestCase{int64(kTwoTo25 + 1), true, false, ""},
+ equalsTestCase{int64(kTwoTo25 + 2), true, false, ""},
+ equalsTestCase{int64(kTwoTo25 + 3), false, false, ""},
+
+ equalsTestCase{uint64(kTwoTo25 - 2), false, false, ""},
+ equalsTestCase{uint64(kTwoTo25 - 1), true, false, ""},
+ equalsTestCase{uint64(kTwoTo25 + 0), true, false, ""},
+ equalsTestCase{uint64(kTwoTo25 + 1), true, false, ""},
+ equalsTestCase{uint64(kTwoTo25 + 2), true, false, ""},
+ equalsTestCase{uint64(kTwoTo25 + 3), false, false, ""},
+
+ // Single-precision floating point.
+ equalsTestCase{float32(kTwoTo25 - 2), false, false, ""},
+ equalsTestCase{float32(kTwoTo25 - 1), true, false, ""},
+ equalsTestCase{float32(kTwoTo25 + 0), true, false, ""},
+ equalsTestCase{float32(kTwoTo25 + 1), true, false, ""},
+ equalsTestCase{float32(kTwoTo25 + 2), true, false, ""},
+ equalsTestCase{float32(kTwoTo25 + 3), false, false, ""},
+
+ equalsTestCase{complex64(kTwoTo25 - 2), false, false, ""},
+ equalsTestCase{complex64(kTwoTo25 - 1), true, false, ""},
+ equalsTestCase{complex64(kTwoTo25 + 0), true, false, ""},
+ equalsTestCase{complex64(kTwoTo25 + 1), true, false, ""},
+ equalsTestCase{complex64(kTwoTo25 + 2), true, false, ""},
+ equalsTestCase{complex64(kTwoTo25 + 3), false, false, ""},
+
+ // Double-precision floating point.
+ equalsTestCase{float64(kTwoTo25 - 2), false, false, ""},
+ equalsTestCase{float64(kTwoTo25 - 1), true, false, ""},
+ equalsTestCase{float64(kTwoTo25 + 0), true, false, ""},
+ equalsTestCase{float64(kTwoTo25 + 1), true, false, ""},
+ equalsTestCase{float64(kTwoTo25 + 2), true, false, ""},
+ equalsTestCase{float64(kTwoTo25 + 3), false, false, ""},
+
+ equalsTestCase{complex128(kTwoTo25 - 2), false, false, ""},
+ equalsTestCase{complex128(kTwoTo25 - 1), true, false, ""},
+ equalsTestCase{complex128(kTwoTo25 + 0), true, false, ""},
+ equalsTestCase{complex128(kTwoTo25 + 1), true, false, ""},
+ equalsTestCase{complex128(kTwoTo25 + 2), true, false, ""},
+ equalsTestCase{complex128(kTwoTo25 + 3), false, false, ""},
+ }
+
+ t.checkTestCases(matcher, cases)
+}
+
+////////////////////////////////////////////////////////////////////////
+// float64
+////////////////////////////////////////////////////////////////////////
+
+func (t *EqualsTest) NegativeIntegralFloat64() {
+ const kExpected = -(1 << 50)
+ matcher := Equals(float64(kExpected))
+ ExpectEq("-1.125899906842624e+15", matcher.Description())
+
+ cases := []equalsTestCase{
+ // Various types of the expected value.
+ equalsTestCase{-1125899906842624.0, true, false, ""},
+ equalsTestCase{-1125899906842624.0 + 0i, true, false, ""},
+ equalsTestCase{int64(kExpected), true, false, ""},
+ equalsTestCase{float32(kExpected), true, false, ""},
+ equalsTestCase{float64(kExpected), true, false, ""},
+ equalsTestCase{complex64(kExpected), true, false, ""},
+ equalsTestCase{complex128(kExpected), true, false, ""},
+ equalsTestCase{interface{}(float64(kExpected)), true, false, ""},
+
+ // Values that would be kExpected in two's complement.
+ equalsTestCase{uint64((1 << 64) + kExpected), false, false, ""},
+
+ // Non-equal values of numeric type.
+ equalsTestCase{int64(kExpected + 1), false, false, ""},
+ equalsTestCase{float32(kExpected - (1 << 30)), false, false, ""},
+ equalsTestCase{float32(kExpected + (1 << 30)), false, false, ""},
+ equalsTestCase{float64(kExpected - 0.5), false, false, ""},
+ equalsTestCase{float64(kExpected + 0.5), false, false, ""},
+ equalsTestCase{complex128(kExpected - 1), false, false, ""},
+ equalsTestCase{complex128(kExpected + 2i), false, false, ""},
+
+ // Non-numeric types.
+ equalsTestCase{uintptr(0), false, true, "which is not numeric"},
+ equalsTestCase{true, false, true, "which is not numeric"},
+ equalsTestCase{[...]int{}, false, true, "which is not numeric"},
+ equalsTestCase{make(chan int), false, true, "which is not numeric"},
+ equalsTestCase{func() {}, false, true, "which is not numeric"},
+ equalsTestCase{map[int]int{}, false, true, "which is not numeric"},
+ equalsTestCase{&someInt, false, true, "which is not numeric"},
+ equalsTestCase{[]int{}, false, true, "which is not numeric"},
+ equalsTestCase{"taco", false, true, "which is not numeric"},
+ equalsTestCase{equalsTestCase{}, false, true, "which is not numeric"},
+ }
+
+ t.checkTestCases(matcher, cases)
+}
+
+func (t *EqualsTest) NegativeNonIntegralFloat64() {
+ const kTwoTo50 = 1 << 50
+ const kExpected = -kTwoTo50 - 0.25
+
+ matcher := Equals(float64(kExpected))
+ ExpectEq("-1.1258999068426242e+15", matcher.Description())
+
+ cases := []equalsTestCase{
+ // Various types of the expected value.
+ equalsTestCase{kExpected, true, false, ""},
+ equalsTestCase{kExpected + 0i, true, false, ""},
+ equalsTestCase{float32(kExpected), true, false, ""},
+ equalsTestCase{float64(kExpected), true, false, ""},
+ equalsTestCase{complex64(kExpected), true, false, ""},
+ equalsTestCase{complex128(kExpected), true, false, ""},
+
+ // Non-equal values of numeric type.
+ equalsTestCase{int64(-kTwoTo50), false, false, ""},
+ equalsTestCase{int64(-kTwoTo50 - 1), false, false, ""},
+ equalsTestCase{float32(kExpected - (1 << 30)), false, false, ""},
+ equalsTestCase{float64(kExpected - 0.25), false, false, ""},
+ equalsTestCase{float64(kExpected + 0.25), false, false, ""},
+ equalsTestCase{complex128(kExpected + 2i), false, false, ""},
+ }
+
+ t.checkTestCases(matcher, cases)
+}
+
+func (t *EqualsTest) LargeNegativeFloat64() {
+ const kExpected = -1 * (1 << 65)
+ matcher := Equals(float64(kExpected))
+ ExpectEq("-3.6893488147419103e+19", matcher.Description())
+
+ floatExpected := float64(kExpected)
+ castedInt := int64(floatExpected)
+
+ cases := []equalsTestCase{
+ // Equal values of numeric type.
+ equalsTestCase{kExpected + 0i, true, false, ""},
+ equalsTestCase{float32(kExpected), true, false, ""},
+ equalsTestCase{float64(kExpected), true, false, ""},
+ equalsTestCase{complex64(kExpected), true, false, ""},
+ equalsTestCase{complex128(kExpected), true, false, ""},
+
+ // Non-equal values of numeric type.
+ equalsTestCase{castedInt, false, false, ""},
+ equalsTestCase{int64(0), false, false, ""},
+ equalsTestCase{int64(math.MinInt64), false, false, ""},
+ equalsTestCase{int64(math.MaxInt64), false, false, ""},
+ equalsTestCase{float32(kExpected / 2), false, false, ""},
+ equalsTestCase{float64(kExpected / 2), false, false, ""},
+ equalsTestCase{complex128(kExpected + 2i), false, false, ""},
+ }
+
+ t.checkTestCases(matcher, cases)
+}
+
+func (t *EqualsTest) ZeroFloat64() {
+ matcher := Equals(float64(0))
+ ExpectEq("0", matcher.Description())
+
+ cases := []equalsTestCase{
+ // Various types of zero.
+ equalsTestCase{0.0, true, false, ""},
+ equalsTestCase{0 + 0i, true, false, ""},
+ equalsTestCase{int(0), true, false, ""},
+ equalsTestCase{int8(0), true, false, ""},
+ equalsTestCase{int16(0), true, false, ""},
+ equalsTestCase{int32(0), true, false, ""},
+ equalsTestCase{int64(0), true, false, ""},
+ equalsTestCase{uint(0), true, false, ""},
+ equalsTestCase{uint8(0), true, false, ""},
+ equalsTestCase{uint16(0), true, false, ""},
+ equalsTestCase{uint32(0), true, false, ""},
+ equalsTestCase{uint64(0), true, false, ""},
+ equalsTestCase{float32(0), true, false, ""},
+ equalsTestCase{float64(0), true, false, ""},
+ equalsTestCase{complex64(0), true, false, ""},
+ equalsTestCase{complex128(0), true, false, ""},
+ equalsTestCase{interface{}(float32(0)), true, false, ""},
+
+ // Non-equal values of numeric type.
+ equalsTestCase{int64(1), false, false, ""},
+ equalsTestCase{int64(-1), false, false, ""},
+ equalsTestCase{float32(1), false, false, ""},
+ equalsTestCase{float32(-1), false, false, ""},
+ equalsTestCase{complex128(0 + 2i), false, false, ""},
+
+ // Non-numeric types.
+ equalsTestCase{uintptr(0), false, true, "which is not numeric"},
+ equalsTestCase{true, false, true, "which is not numeric"},
+ equalsTestCase{[...]int{}, false, true, "which is not numeric"},
+ equalsTestCase{make(chan int), false, true, "which is not numeric"},
+ equalsTestCase{func() {}, false, true, "which is not numeric"},
+ equalsTestCase{map[int]int{}, false, true, "which is not numeric"},
+ equalsTestCase{&someInt, false, true, "which is not numeric"},
+ equalsTestCase{[]int{}, false, true, "which is not numeric"},
+ equalsTestCase{"taco", false, true, "which is not numeric"},
+ equalsTestCase{equalsTestCase{}, false, true, "which is not numeric"},
+ }
+
+ t.checkTestCases(matcher, cases)
+}
+
+func (t *EqualsTest) PositiveIntegralFloat64() {
+ const kExpected = 1 << 50
+ matcher := Equals(float64(kExpected))
+ ExpectEq("1.125899906842624e+15", matcher.Description())
+
+ cases := []equalsTestCase{
+ // Various types of 32769.
+ equalsTestCase{1125899906842624.0, true, false, ""},
+ equalsTestCase{1125899906842624.0 + 0i, true, false, ""},
+ equalsTestCase{int64(kExpected), true, false, ""},
+ equalsTestCase{uint64(kExpected), true, false, ""},
+ equalsTestCase{float32(kExpected), true, false, ""},
+ equalsTestCase{float64(kExpected), true, false, ""},
+ equalsTestCase{complex64(kExpected), true, false, ""},
+ equalsTestCase{complex128(kExpected), true, false, ""},
+ equalsTestCase{interface{}(float64(kExpected)), true, false, ""},
+
+ // Non-equal values of numeric type.
+ equalsTestCase{int64(kExpected + 1), false, false, ""},
+ equalsTestCase{uint64(kExpected + 1), false, false, ""},
+ equalsTestCase{float32(kExpected - (1 << 30)), false, false, ""},
+ equalsTestCase{float32(kExpected + (1 << 30)), false, false, ""},
+ equalsTestCase{float64(kExpected - 0.5), false, false, ""},
+ equalsTestCase{float64(kExpected + 0.5), false, false, ""},
+ equalsTestCase{complex128(kExpected - 1), false, false, ""},
+ equalsTestCase{complex128(kExpected + 2i), false, false, ""},
+
+ // Non-numeric types.
+ equalsTestCase{uintptr(0), false, true, "which is not numeric"},
+ equalsTestCase{true, false, true, "which is not numeric"},
+ equalsTestCase{[...]int{}, false, true, "which is not numeric"},
+ equalsTestCase{make(chan int), false, true, "which is not numeric"},
+ equalsTestCase{func() {}, false, true, "which is not numeric"},
+ equalsTestCase{map[int]int{}, false, true, "which is not numeric"},
+ equalsTestCase{&someInt, false, true, "which is not numeric"},
+ equalsTestCase{[]int{}, false, true, "which is not numeric"},
+ equalsTestCase{"taco", false, true, "which is not numeric"},
+ equalsTestCase{equalsTestCase{}, false, true, "which is not numeric"},
+ }
+
+ t.checkTestCases(matcher, cases)
+}
+
+func (t *EqualsTest) PositiveNonIntegralFloat64() {
+ const kTwoTo50 = 1 << 50
+ const kExpected = kTwoTo50 + 0.25
+ matcher := Equals(float64(kExpected))
+ ExpectEq("1.1258999068426242e+15", matcher.Description())
+
+ cases := []equalsTestCase{
+ // Various types of the expected value.
+ equalsTestCase{kExpected, true, false, ""},
+ equalsTestCase{kExpected + 0i, true, false, ""},
+ equalsTestCase{float32(kExpected), true, false, ""},
+ equalsTestCase{float64(kExpected), true, false, ""},
+ equalsTestCase{complex64(kExpected), true, false, ""},
+ equalsTestCase{complex128(kExpected), true, false, ""},
+
+ // Non-equal values of numeric type.
+ equalsTestCase{int64(kTwoTo50), false, false, ""},
+ equalsTestCase{int64(kTwoTo50 - 1), false, false, ""},
+ equalsTestCase{float64(kExpected - 0.25), false, false, ""},
+ equalsTestCase{float64(kExpected + 0.25), false, false, ""},
+ equalsTestCase{complex128(kExpected + 2i), false, false, ""},
+ }
+
+ t.checkTestCases(matcher, cases)
+}
+
+func (t *EqualsTest) LargePositiveFloat64() {
+ const kExpected = 1 << 65
+ matcher := Equals(float64(kExpected))
+ ExpectEq("3.6893488147419103e+19", matcher.Description())
+
+ floatExpected := float64(kExpected)
+ castedInt := uint64(floatExpected)
+
+ cases := []equalsTestCase{
+ // Equal values of numeric type.
+ equalsTestCase{kExpected + 0i, true, false, ""},
+ equalsTestCase{float32(kExpected), true, false, ""},
+ equalsTestCase{float64(kExpected), true, false, ""},
+ equalsTestCase{complex64(kExpected), true, false, ""},
+ equalsTestCase{complex128(kExpected), true, false, ""},
+
+ // Non-equal values of numeric type.
+ equalsTestCase{castedInt, false, false, ""},
+ equalsTestCase{int64(0), false, false, ""},
+ equalsTestCase{int64(math.MinInt64), false, false, ""},
+ equalsTestCase{int64(math.MaxInt64), false, false, ""},
+ equalsTestCase{uint64(0), false, false, ""},
+ equalsTestCase{uint64(math.MaxUint64), false, false, ""},
+ equalsTestCase{float32(kExpected / 2), false, false, ""},
+ equalsTestCase{float64(kExpected / 2), false, false, ""},
+ equalsTestCase{complex128(kExpected + 2i), false, false, ""},
+ }
+
+ t.checkTestCases(matcher, cases)
+}
+
+func (t *EqualsTest) Float64AboveExactIntegerRange() {
+ // Double-precision floats don't have enough bits to represent the integers
+ // near this one distinctly, so [2^54-1, 2^54+2] all receive the same value
+ // and should be treated as equivalent when floats are in the mix.
+ const kTwoTo54 = 1 << 54
+ matcher := Equals(float64(kTwoTo54 + 1))
+ ExpectEq("1.8014398509481984e+16", matcher.Description())
+
+ cases := []equalsTestCase{
+ // Integers.
+ equalsTestCase{int64(kTwoTo54 - 2), false, false, ""},
+ equalsTestCase{int64(kTwoTo54 - 1), true, false, ""},
+ equalsTestCase{int64(kTwoTo54 + 0), true, false, ""},
+ equalsTestCase{int64(kTwoTo54 + 1), true, false, ""},
+ equalsTestCase{int64(kTwoTo54 + 2), true, false, ""},
+ equalsTestCase{int64(kTwoTo54 + 3), false, false, ""},
+
+ equalsTestCase{uint64(kTwoTo54 - 2), false, false, ""},
+ equalsTestCase{uint64(kTwoTo54 - 1), true, false, ""},
+ equalsTestCase{uint64(kTwoTo54 + 0), true, false, ""},
+ equalsTestCase{uint64(kTwoTo54 + 1), true, false, ""},
+ equalsTestCase{uint64(kTwoTo54 + 2), true, false, ""},
+ equalsTestCase{uint64(kTwoTo54 + 3), false, false, ""},
+
+ // Double-precision floating point.
+ equalsTestCase{float64(kTwoTo54 - 2), false, false, ""},
+ equalsTestCase{float64(kTwoTo54 - 1), true, false, ""},
+ equalsTestCase{float64(kTwoTo54 + 0), true, false, ""},
+ equalsTestCase{float64(kTwoTo54 + 1), true, false, ""},
+ equalsTestCase{float64(kTwoTo54 + 2), true, false, ""},
+ equalsTestCase{float64(kTwoTo54 + 3), false, false, ""},
+
+ equalsTestCase{complex128(kTwoTo54 - 2), false, false, ""},
+ equalsTestCase{complex128(kTwoTo54 - 1), true, false, ""},
+ equalsTestCase{complex128(kTwoTo54 + 0), true, false, ""},
+ equalsTestCase{complex128(kTwoTo54 + 1), true, false, ""},
+ equalsTestCase{complex128(kTwoTo54 + 2), true, false, ""},
+ equalsTestCase{complex128(kTwoTo54 + 3), false, false, ""},
+ }
+
+ t.checkTestCases(matcher, cases)
+}
+
+////////////////////////////////////////////////////////////////////////
+// complex64
+////////////////////////////////////////////////////////////////////////
+
+func (t *EqualsTest) NegativeIntegralComplex64() {
+ const kExpected = -32769
+ matcher := Equals(complex64(kExpected))
+ ExpectEq("(-32769+0i)", matcher.Description())
+
+ cases := []equalsTestCase{
+ // Various types of the expected value.
+ equalsTestCase{-32769.0, true, false, ""},
+ equalsTestCase{-32769.0 + 0i, true, false, ""},
+ equalsTestCase{int(kExpected), true, false, ""},
+ equalsTestCase{int32(kExpected), true, false, ""},
+ equalsTestCase{int64(kExpected), true, false, ""},
+ equalsTestCase{float32(kExpected), true, false, ""},
+ equalsTestCase{float64(kExpected), true, false, ""},
+ equalsTestCase{complex64(kExpected), true, false, ""},
+ equalsTestCase{complex128(kExpected), true, false, ""},
+ equalsTestCase{interface{}(float64(kExpected)), true, false, ""},
+
+ // Values that would be kExpected in two's complement.
+ equalsTestCase{uint32((1 << 32) + kExpected), false, false, ""},
+ equalsTestCase{uint64((1 << 64) + kExpected), false, false, ""},
+
+ // Non-equal values of numeric type.
+ equalsTestCase{int64(kExpected + 1), false, false, ""},
+ equalsTestCase{float32(kExpected - (1 << 30)), false, false, ""},
+ equalsTestCase{float32(kExpected + (1 << 30)), false, false, ""},
+ equalsTestCase{float64(kExpected - 0.5), false, false, ""},
+ equalsTestCase{float64(kExpected + 0.5), false, false, ""},
+ equalsTestCase{complex64(kExpected - 1), false, false, ""},
+ equalsTestCase{complex64(kExpected + 2i), false, false, ""},
+ equalsTestCase{complex128(kExpected - 1), false, false, ""},
+ equalsTestCase{complex128(kExpected + 2i), false, false, ""},
+
+ // Non-numeric types.
+ equalsTestCase{uintptr(0), false, true, "which is not numeric"},
+ equalsTestCase{true, false, true, "which is not numeric"},
+ equalsTestCase{[...]int{}, false, true, "which is not numeric"},
+ equalsTestCase{make(chan int), false, true, "which is not numeric"},
+ equalsTestCase{func() {}, false, true, "which is not numeric"},
+ equalsTestCase{map[int]int{}, false, true, "which is not numeric"},
+ equalsTestCase{&someInt, false, true, "which is not numeric"},
+ equalsTestCase{[]int{}, false, true, "which is not numeric"},
+ equalsTestCase{"taco", false, true, "which is not numeric"},
+ equalsTestCase{equalsTestCase{}, false, true, "which is not numeric"},
+ }
+
+ t.checkTestCases(matcher, cases)
+}
+
+func (t *EqualsTest) NegativeNonIntegralComplex64() {
+ const kTwoTo20 = 1 << 20
+ const kExpected = -kTwoTo20 - 0.25
+
+ matcher := Equals(complex64(kExpected))
+ ExpectEq("(-1.0485762e+06+0i)", matcher.Description())
+
+ cases := []equalsTestCase{
+ // Various types of the expected value.
+ equalsTestCase{kExpected, true, false, ""},
+ equalsTestCase{kExpected + 0i, true, false, ""},
+ equalsTestCase{float32(kExpected), true, false, ""},
+ equalsTestCase{float64(kExpected), true, false, ""},
+ equalsTestCase{complex64(kExpected), true, false, ""},
+ equalsTestCase{complex128(kExpected), true, false, ""},
+
+ // Non-equal values of numeric type.
+ equalsTestCase{int(-kTwoTo20), false, false, ""},
+ equalsTestCase{int(-kTwoTo20 - 1), false, false, ""},
+ equalsTestCase{int32(-kTwoTo20), false, false, ""},
+ equalsTestCase{int32(-kTwoTo20 - 1), false, false, ""},
+ equalsTestCase{int64(-kTwoTo20), false, false, ""},
+ equalsTestCase{int64(-kTwoTo20 - 1), false, false, ""},
+ equalsTestCase{float32(kExpected - (1 << 30)), false, false, ""},
+ equalsTestCase{float64(kExpected - 0.25), false, false, ""},
+ equalsTestCase{float64(kExpected + 0.25), false, false, ""},
+ equalsTestCase{complex64(kExpected - 0.75), false, false, ""},
+ equalsTestCase{complex64(kExpected + 2i), false, false, ""},
+ equalsTestCase{complex128(kExpected - 0.75), false, false, ""},
+ equalsTestCase{complex128(kExpected + 2i), false, false, ""},
+ }
+
+ t.checkTestCases(matcher, cases)
+}
+
+func (t *EqualsTest) LargeNegativeComplex64() {
+ const kExpected = -1 * (1 << 65)
+ matcher := Equals(complex64(kExpected))
+ ExpectEq("(-3.689349e+19+0i)", matcher.Description())
+
+ floatExpected := float64(kExpected)
+ castedInt := int64(floatExpected)
+
+ cases := []equalsTestCase{
+ // Equal values of numeric type.
+ equalsTestCase{kExpected + 0i, true, false, ""},
+ equalsTestCase{float32(kExpected), true, false, ""},
+ equalsTestCase{float64(kExpected), true, false, ""},
+ equalsTestCase{complex64(kExpected), true, false, ""},
+ equalsTestCase{complex128(kExpected), true, false, ""},
+
+ // Non-equal values of numeric type.
+ equalsTestCase{castedInt, false, false, ""},
+ equalsTestCase{int64(0), false, false, ""},
+ equalsTestCase{int64(math.MinInt64), false, false, ""},
+ equalsTestCase{int64(math.MaxInt64), false, false, ""},
+ equalsTestCase{float32(kExpected / 2), false, false, ""},
+ equalsTestCase{float64(kExpected / 2), false, false, ""},
+ equalsTestCase{complex64(kExpected + 2i), false, false, ""},
+ equalsTestCase{complex128(kExpected + 2i), false, false, ""},
+ }
+
+ t.checkTestCases(matcher, cases)
+}
+
+func (t *EqualsTest) ZeroComplex64() {
+ matcher := Equals(complex64(0))
+ ExpectEq("(0+0i)", matcher.Description())
+
+ cases := []equalsTestCase{
+ // Various types of zero.
+ equalsTestCase{0.0, true, false, ""},
+ equalsTestCase{0 + 0i, true, false, ""},
+ equalsTestCase{int(0), true, false, ""},
+ equalsTestCase{int8(0), true, false, ""},
+ equalsTestCase{int16(0), true, false, ""},
+ equalsTestCase{int32(0), true, false, ""},
+ equalsTestCase{int64(0), true, false, ""},
+ equalsTestCase{uint(0), true, false, ""},
+ equalsTestCase{uint8(0), true, false, ""},
+ equalsTestCase{uint16(0), true, false, ""},
+ equalsTestCase{uint32(0), true, false, ""},
+ equalsTestCase{uint64(0), true, false, ""},
+ equalsTestCase{float32(0), true, false, ""},
+ equalsTestCase{float64(0), true, false, ""},
+ equalsTestCase{complex64(0), true, false, ""},
+ equalsTestCase{complex128(0), true, false, ""},
+ equalsTestCase{interface{}(float32(0)), true, false, ""},
+
+ // Non-equal values of numeric type.
+ equalsTestCase{int64(1), false, false, ""},
+ equalsTestCase{int64(-1), false, false, ""},
+ equalsTestCase{float32(1), false, false, ""},
+ equalsTestCase{float32(-1), false, false, ""},
+ equalsTestCase{float64(1), false, false, ""},
+ equalsTestCase{float64(-1), false, false, ""},
+ equalsTestCase{complex64(0 + 2i), false, false, ""},
+ equalsTestCase{complex128(0 + 2i), false, false, ""},
+
+ // Non-numeric types.
+ equalsTestCase{uintptr(0), false, true, "which is not numeric"},
+ equalsTestCase{true, false, true, "which is not numeric"},
+ equalsTestCase{[...]int{}, false, true, "which is not numeric"},
+ equalsTestCase{make(chan int), false, true, "which is not numeric"},
+ equalsTestCase{func() {}, false, true, "which is not numeric"},
+ equalsTestCase{map[int]int{}, false, true, "which is not numeric"},
+ equalsTestCase{&someInt, false, true, "which is not numeric"},
+ equalsTestCase{[]int{}, false, true, "which is not numeric"},
+ equalsTestCase{"taco", false, true, "which is not numeric"},
+ equalsTestCase{equalsTestCase{}, false, true, "which is not numeric"},
+ }
+
+ t.checkTestCases(matcher, cases)
+}
+
+func (t *EqualsTest) PositiveIntegralComplex64() {
+ const kExpected = 1 << 20
+ matcher := Equals(complex64(kExpected))
+ ExpectEq("(1.048576e+06+0i)", matcher.Description())
+
+ cases := []equalsTestCase{
+ // Various types of 32769.
+ equalsTestCase{1048576.0, true, false, ""},
+ equalsTestCase{1048576.0 + 0i, true, false, ""},
+ equalsTestCase{int(kExpected), true, false, ""},
+ equalsTestCase{int32(kExpected), true, false, ""},
+ equalsTestCase{int64(kExpected), true, false, ""},
+ equalsTestCase{uint(kExpected), true, false, ""},
+ equalsTestCase{uint32(kExpected), true, false, ""},
+ equalsTestCase{uint64(kExpected), true, false, ""},
+ equalsTestCase{float32(kExpected), true, false, ""},
+ equalsTestCase{float64(kExpected), true, false, ""},
+ equalsTestCase{complex64(kExpected), true, false, ""},
+ equalsTestCase{complex128(kExpected), true, false, ""},
+ equalsTestCase{interface{}(float64(kExpected)), true, false, ""},
+
+ // Non-equal values of numeric type.
+ equalsTestCase{int(kExpected + 1), false, false, ""},
+ equalsTestCase{int32(kExpected + 1), false, false, ""},
+ equalsTestCase{int64(kExpected + 1), false, false, ""},
+ equalsTestCase{uint(kExpected + 1), false, false, ""},
+ equalsTestCase{uint32(kExpected + 1), false, false, ""},
+ equalsTestCase{uint64(kExpected + 1), false, false, ""},
+ equalsTestCase{float32(kExpected - (1 << 30)), false, false, ""},
+ equalsTestCase{float32(kExpected + (1 << 30)), false, false, ""},
+ equalsTestCase{float64(kExpected - 0.5), false, false, ""},
+ equalsTestCase{float64(kExpected + 0.5), false, false, ""},
+ equalsTestCase{complex128(kExpected - 1), false, false, ""},
+ equalsTestCase{complex128(kExpected + 2i), false, false, ""},
+
+ // Non-numeric types.
+ equalsTestCase{uintptr(0), false, true, "which is not numeric"},
+ equalsTestCase{true, false, true, "which is not numeric"},
+ equalsTestCase{[...]int{}, false, true, "which is not numeric"},
+ equalsTestCase{make(chan int), false, true, "which is not numeric"},
+ equalsTestCase{func() {}, false, true, "which is not numeric"},
+ equalsTestCase{map[int]int{}, false, true, "which is not numeric"},
+ equalsTestCase{&someInt, false, true, "which is not numeric"},
+ equalsTestCase{[]int{}, false, true, "which is not numeric"},
+ equalsTestCase{"taco", false, true, "which is not numeric"},
+ equalsTestCase{equalsTestCase{}, false, true, "which is not numeric"},
+ }
+
+ t.checkTestCases(matcher, cases)
+}
+
+func (t *EqualsTest) PositiveNonIntegralComplex64() {
+ const kTwoTo20 = 1 << 20
+ const kExpected = kTwoTo20 + 0.25
+ matcher := Equals(complex64(kExpected))
+ ExpectEq("(1.0485762e+06+0i)", matcher.Description())
+
+ cases := []equalsTestCase{
+ // Various types of the expected value.
+ equalsTestCase{kExpected, true, false, ""},
+ equalsTestCase{kExpected + 0i, true, false, ""},
+ equalsTestCase{float32(kExpected), true, false, ""},
+ equalsTestCase{float64(kExpected), true, false, ""},
+ equalsTestCase{complex64(kExpected), true, false, ""},
+ equalsTestCase{complex128(kExpected), true, false, ""},
+
+ // Non-equal values of numeric type.
+ equalsTestCase{int64(kTwoTo20), false, false, ""},
+ equalsTestCase{int64(kTwoTo20 - 1), false, false, ""},
+ equalsTestCase{uint64(kTwoTo20), false, false, ""},
+ equalsTestCase{uint64(kTwoTo20 - 1), false, false, ""},
+ equalsTestCase{float32(kExpected - 1), false, false, ""},
+ equalsTestCase{float32(kExpected + 1), false, false, ""},
+ equalsTestCase{float64(kExpected - 0.25), false, false, ""},
+ equalsTestCase{float64(kExpected + 0.25), false, false, ""},
+ equalsTestCase{complex64(kExpected - 1), false, false, ""},
+ equalsTestCase{complex64(kExpected - 1i), false, false, ""},
+ equalsTestCase{complex128(kExpected - 1), false, false, ""},
+ equalsTestCase{complex128(kExpected - 1i), false, false, ""},
+ }
+
+ t.checkTestCases(matcher, cases)
+}
+
+func (t *EqualsTest) LargePositiveComplex64() {
+ const kExpected = 1 << 65
+ matcher := Equals(complex64(kExpected))
+ ExpectEq("(3.689349e+19+0i)", matcher.Description())
+
+ floatExpected := float64(kExpected)
+ castedInt := uint64(floatExpected)
+
+ cases := []equalsTestCase{
+ // Equal values of numeric type.
+ equalsTestCase{kExpected + 0i, true, false, ""},
+ equalsTestCase{float32(kExpected), true, false, ""},
+ equalsTestCase{float64(kExpected), true, false, ""},
+ equalsTestCase{complex64(kExpected), true, false, ""},
+ equalsTestCase{complex128(kExpected), true, false, ""},
+
+ // Non-equal values of numeric type.
+ equalsTestCase{castedInt, false, false, ""},
+ equalsTestCase{int64(0), false, false, ""},
+ equalsTestCase{int64(math.MinInt64), false, false, ""},
+ equalsTestCase{int64(math.MaxInt64), false, false, ""},
+ equalsTestCase{uint64(0), false, false, ""},
+ equalsTestCase{uint64(math.MaxUint64), false, false, ""},
+ equalsTestCase{float32(kExpected / 2), false, false, ""},
+ equalsTestCase{float64(kExpected / 2), false, false, ""},
+ equalsTestCase{complex128(kExpected + 2i), false, false, ""},
+ }
+
+ t.checkTestCases(matcher, cases)
+}
+
+func (t *EqualsTest) Complex64AboveExactIntegerRange() {
+ // Single-precision floats don't have enough bits to represent the integers
+ // near this one distinctly, so [2^25-1, 2^25+2] all receive the same value
+ // and should be treated as equivalent when floats are in the mix.
+ const kTwoTo25 = 1 << 25
+ matcher := Equals(complex64(kTwoTo25 + 1))
+ ExpectEq("(3.3554432e+07+0i)", matcher.Description())
+
+ cases := []equalsTestCase{
+ // Integers.
+ equalsTestCase{int64(kTwoTo25 - 2), false, false, ""},
+ equalsTestCase{int64(kTwoTo25 - 1), true, false, ""},
+ equalsTestCase{int64(kTwoTo25 + 0), true, false, ""},
+ equalsTestCase{int64(kTwoTo25 + 1), true, false, ""},
+ equalsTestCase{int64(kTwoTo25 + 2), true, false, ""},
+ equalsTestCase{int64(kTwoTo25 + 3), false, false, ""},
+
+ equalsTestCase{uint64(kTwoTo25 - 2), false, false, ""},
+ equalsTestCase{uint64(kTwoTo25 - 1), true, false, ""},
+ equalsTestCase{uint64(kTwoTo25 + 0), true, false, ""},
+ equalsTestCase{uint64(kTwoTo25 + 1), true, false, ""},
+ equalsTestCase{uint64(kTwoTo25 + 2), true, false, ""},
+ equalsTestCase{uint64(kTwoTo25 + 3), false, false, ""},
+
+ // Single-precision floating point.
+ equalsTestCase{float32(kTwoTo25 - 2), false, false, ""},
+ equalsTestCase{float32(kTwoTo25 - 1), true, false, ""},
+ equalsTestCase{float32(kTwoTo25 + 0), true, false, ""},
+ equalsTestCase{float32(kTwoTo25 + 1), true, false, ""},
+ equalsTestCase{float32(kTwoTo25 + 2), true, false, ""},
+ equalsTestCase{float32(kTwoTo25 + 3), false, false, ""},
+
+ equalsTestCase{complex64(kTwoTo25 - 2), false, false, ""},
+ equalsTestCase{complex64(kTwoTo25 - 1), true, false, ""},
+ equalsTestCase{complex64(kTwoTo25 + 0), true, false, ""},
+ equalsTestCase{complex64(kTwoTo25 + 1), true, false, ""},
+ equalsTestCase{complex64(kTwoTo25 + 2), true, false, ""},
+ equalsTestCase{complex64(kTwoTo25 + 3), false, false, ""},
+
+ // Double-precision floating point.
+ equalsTestCase{float64(kTwoTo25 - 2), false, false, ""},
+ equalsTestCase{float64(kTwoTo25 - 1), true, false, ""},
+ equalsTestCase{float64(kTwoTo25 + 0), true, false, ""},
+ equalsTestCase{float64(kTwoTo25 + 1), true, false, ""},
+ equalsTestCase{float64(kTwoTo25 + 2), true, false, ""},
+ equalsTestCase{float64(kTwoTo25 + 3), false, false, ""},
+
+ equalsTestCase{complex128(kTwoTo25 - 2), false, false, ""},
+ equalsTestCase{complex128(kTwoTo25 - 1), true, false, ""},
+ equalsTestCase{complex128(kTwoTo25 + 0), true, false, ""},
+ equalsTestCase{complex128(kTwoTo25 + 1), true, false, ""},
+ equalsTestCase{complex128(kTwoTo25 + 2), true, false, ""},
+ equalsTestCase{complex128(kTwoTo25 + 3), false, false, ""},
+ }
+
+ t.checkTestCases(matcher, cases)
+}
+
+func (t *EqualsTest) Complex64WithNonZeroImaginaryPart() {
+ const kRealPart = 17
+ const kImagPart = 0.25i
+ const kExpected = kRealPart + kImagPart
+ matcher := Equals(complex64(kExpected))
+ ExpectEq("(17+0.25i)", matcher.Description())
+
+ cases := []equalsTestCase{
+ // Various types of the expected value.
+ equalsTestCase{kExpected, true, false, ""},
+ equalsTestCase{kRealPart + kImagPart, true, false, ""},
+ equalsTestCase{complex64(kExpected), true, false, ""},
+ equalsTestCase{complex128(kExpected), true, false, ""},
+
+ // Non-equal values of numeric type.
+ equalsTestCase{int(kRealPart), false, false, ""},
+ equalsTestCase{int8(kRealPart), false, false, ""},
+ equalsTestCase{int16(kRealPart), false, false, ""},
+ equalsTestCase{int32(kRealPart), false, false, ""},
+ equalsTestCase{int64(kRealPart), false, false, ""},
+ equalsTestCase{uint(kRealPart), false, false, ""},
+ equalsTestCase{uint8(kRealPart), false, false, ""},
+ equalsTestCase{uint16(kRealPart), false, false, ""},
+ equalsTestCase{uint32(kRealPart), false, false, ""},
+ equalsTestCase{uint64(kRealPart), false, false, ""},
+ equalsTestCase{float32(kRealPart), false, false, ""},
+ equalsTestCase{float64(kRealPart), false, false, ""},
+ equalsTestCase{complex64(kRealPart), false, false, ""},
+ equalsTestCase{complex64(kRealPart + kImagPart + 0.5), false, false, ""},
+ equalsTestCase{complex64(kRealPart + kImagPart + 0.5i), false, false, ""},
+ equalsTestCase{complex128(kRealPart), false, false, ""},
+ equalsTestCase{complex128(kRealPart + kImagPart + 0.5), false, false, ""},
+ equalsTestCase{complex128(kRealPart + kImagPart + 0.5i), false, false, ""},
+ }
+
+ t.checkTestCases(matcher, cases)
+}
+
+////////////////////////////////////////////////////////////////////////
+// complex128
+////////////////////////////////////////////////////////////////////////
+
+func (t *EqualsTest) NegativeIntegralComplex128() {
+ const kExpected = -32769
+ matcher := Equals(complex128(kExpected))
+ ExpectEq("(-32769+0i)", matcher.Description())
+
+ cases := []equalsTestCase{
+ // Various types of the expected value.
+ equalsTestCase{-32769.0, true, false, ""},
+ equalsTestCase{-32769.0 + 0i, true, false, ""},
+ equalsTestCase{int(kExpected), true, false, ""},
+ equalsTestCase{int32(kExpected), true, false, ""},
+ equalsTestCase{int64(kExpected), true, false, ""},
+ equalsTestCase{float32(kExpected), true, false, ""},
+ equalsTestCase{float64(kExpected), true, false, ""},
+ equalsTestCase{complex64(kExpected), true, false, ""},
+ equalsTestCase{complex128(kExpected), true, false, ""},
+ equalsTestCase{interface{}(float64(kExpected)), true, false, ""},
+
+ // Values that would be kExpected in two's complement.
+ equalsTestCase{uint32((1 << 32) + kExpected), false, false, ""},
+ equalsTestCase{uint64((1 << 64) + kExpected), false, false, ""},
+
+ // Non-equal values of numeric type.
+ equalsTestCase{int64(kExpected + 1), false, false, ""},
+ equalsTestCase{float32(kExpected - (1 << 30)), false, false, ""},
+ equalsTestCase{float32(kExpected + (1 << 30)), false, false, ""},
+ equalsTestCase{float64(kExpected - 0.5), false, false, ""},
+ equalsTestCase{float64(kExpected + 0.5), false, false, ""},
+ equalsTestCase{complex64(kExpected - 1), false, false, ""},
+ equalsTestCase{complex64(kExpected + 2i), false, false, ""},
+ equalsTestCase{complex128(kExpected - 1), false, false, ""},
+ equalsTestCase{complex128(kExpected + 2i), false, false, ""},
+
+ // Non-numeric types.
+ equalsTestCase{uintptr(0), false, true, "which is not numeric"},
+ equalsTestCase{true, false, true, "which is not numeric"},
+ equalsTestCase{[...]int{}, false, true, "which is not numeric"},
+ equalsTestCase{make(chan int), false, true, "which is not numeric"},
+ equalsTestCase{func() {}, false, true, "which is not numeric"},
+ equalsTestCase{map[int]int{}, false, true, "which is not numeric"},
+ equalsTestCase{&someInt, false, true, "which is not numeric"},
+ equalsTestCase{[]int{}, false, true, "which is not numeric"},
+ equalsTestCase{"taco", false, true, "which is not numeric"},
+ equalsTestCase{equalsTestCase{}, false, true, "which is not numeric"},
+ }
+
+ t.checkTestCases(matcher, cases)
+}
+
+func (t *EqualsTest) NegativeNonIntegralComplex128() {
+ const kTwoTo20 = 1 << 20
+ const kExpected = -kTwoTo20 - 0.25
+
+ matcher := Equals(complex128(kExpected))
+ ExpectEq("(-1.04857625e+06+0i)", matcher.Description())
+
+ cases := []equalsTestCase{
+ // Various types of the expected value.
+ equalsTestCase{kExpected, true, false, ""},
+ equalsTestCase{kExpected + 0i, true, false, ""},
+ equalsTestCase{float32(kExpected), true, false, ""},
+ equalsTestCase{float64(kExpected), true, false, ""},
+ equalsTestCase{complex64(kExpected), true, false, ""},
+ equalsTestCase{complex128(kExpected), true, false, ""},
+
+ // Non-equal values of numeric type.
+ equalsTestCase{int(-kTwoTo20), false, false, ""},
+ equalsTestCase{int(-kTwoTo20 - 1), false, false, ""},
+ equalsTestCase{int32(-kTwoTo20), false, false, ""},
+ equalsTestCase{int32(-kTwoTo20 - 1), false, false, ""},
+ equalsTestCase{int64(-kTwoTo20), false, false, ""},
+ equalsTestCase{int64(-kTwoTo20 - 1), false, false, ""},
+ equalsTestCase{float32(kExpected - (1 << 30)), false, false, ""},
+ equalsTestCase{float64(kExpected - 0.25), false, false, ""},
+ equalsTestCase{float64(kExpected + 0.25), false, false, ""},
+ equalsTestCase{complex64(kExpected - 0.75), false, false, ""},
+ equalsTestCase{complex64(kExpected + 2i), false, false, ""},
+ equalsTestCase{complex128(kExpected - 0.75), false, false, ""},
+ equalsTestCase{complex128(kExpected + 2i), false, false, ""},
+ }
+
+ t.checkTestCases(matcher, cases)
+}
+
+func (t *EqualsTest) LargeNegativeComplex128() {
+ const kExpected = -1 * (1 << 65)
+ matcher := Equals(complex128(kExpected))
+ ExpectEq("(-3.6893488147419103e+19+0i)", matcher.Description())
+
+ floatExpected := float64(kExpected)
+ castedInt := int64(floatExpected)
+
+ cases := []equalsTestCase{
+ // Equal values of numeric type.
+ equalsTestCase{kExpected + 0i, true, false, ""},
+ equalsTestCase{float32(kExpected), true, false, ""},
+ equalsTestCase{float64(kExpected), true, false, ""},
+ equalsTestCase{complex64(kExpected), true, false, ""},
+ equalsTestCase{complex128(kExpected), true, false, ""},
+
+ // Non-equal values of numeric type.
+ equalsTestCase{castedInt, false, false, ""},
+ equalsTestCase{int64(0), false, false, ""},
+ equalsTestCase{int64(math.MinInt64), false, false, ""},
+ equalsTestCase{int64(math.MaxInt64), false, false, ""},
+ equalsTestCase{float32(kExpected / 2), false, false, ""},
+ equalsTestCase{float64(kExpected / 2), false, false, ""},
+ equalsTestCase{complex64(kExpected + 2i), false, false, ""},
+ equalsTestCase{complex128(kExpected + 2i), false, false, ""},
+ }
+
+ t.checkTestCases(matcher, cases)
+}
+
+func (t *EqualsTest) ZeroComplex128() {
+ matcher := Equals(complex128(0))
+ ExpectEq("(0+0i)", matcher.Description())
+
+ cases := []equalsTestCase{
+ // Various types of zero.
+ equalsTestCase{0.0, true, false, ""},
+ equalsTestCase{0 + 0i, true, false, ""},
+ equalsTestCase{int(0), true, false, ""},
+ equalsTestCase{int8(0), true, false, ""},
+ equalsTestCase{int16(0), true, false, ""},
+ equalsTestCase{int32(0), true, false, ""},
+ equalsTestCase{int64(0), true, false, ""},
+ equalsTestCase{uint(0), true, false, ""},
+ equalsTestCase{uint8(0), true, false, ""},
+ equalsTestCase{uint16(0), true, false, ""},
+ equalsTestCase{uint32(0), true, false, ""},
+ equalsTestCase{uint64(0), true, false, ""},
+ equalsTestCase{float32(0), true, false, ""},
+ equalsTestCase{float64(0), true, false, ""},
+ equalsTestCase{complex64(0), true, false, ""},
+ equalsTestCase{complex128(0), true, false, ""},
+ equalsTestCase{interface{}(float32(0)), true, false, ""},
+
+ // Non-equal values of numeric type.
+ equalsTestCase{int64(1), false, false, ""},
+ equalsTestCase{int64(-1), false, false, ""},
+ equalsTestCase{float32(1), false, false, ""},
+ equalsTestCase{float32(-1), false, false, ""},
+ equalsTestCase{float64(1), false, false, ""},
+ equalsTestCase{float64(-1), false, false, ""},
+ equalsTestCase{complex64(0 + 2i), false, false, ""},
+ equalsTestCase{complex128(0 + 2i), false, false, ""},
+
+ // Non-numeric types.
+ equalsTestCase{uintptr(0), false, true, "which is not numeric"},
+ equalsTestCase{true, false, true, "which is not numeric"},
+ equalsTestCase{[...]int{}, false, true, "which is not numeric"},
+ equalsTestCase{make(chan int), false, true, "which is not numeric"},
+ equalsTestCase{func() {}, false, true, "which is not numeric"},
+ equalsTestCase{map[int]int{}, false, true, "which is not numeric"},
+ equalsTestCase{&someInt, false, true, "which is not numeric"},
+ equalsTestCase{[]int{}, false, true, "which is not numeric"},
+ equalsTestCase{"taco", false, true, "which is not numeric"},
+ equalsTestCase{equalsTestCase{}, false, true, "which is not numeric"},
+ }
+
+ t.checkTestCases(matcher, cases)
+}
+
+func (t *EqualsTest) PositiveIntegralComplex128() {
+ const kExpected = 1 << 20
+ matcher := Equals(complex128(kExpected))
+ ExpectEq("(1.048576e+06+0i)", matcher.Description())
+
+ cases := []equalsTestCase{
+ // Various types of 32769.
+ equalsTestCase{1048576.0, true, false, ""},
+ equalsTestCase{1048576.0 + 0i, true, false, ""},
+ equalsTestCase{int(kExpected), true, false, ""},
+ equalsTestCase{int32(kExpected), true, false, ""},
+ equalsTestCase{int64(kExpected), true, false, ""},
+ equalsTestCase{uint(kExpected), true, false, ""},
+ equalsTestCase{uint32(kExpected), true, false, ""},
+ equalsTestCase{uint64(kExpected), true, false, ""},
+ equalsTestCase{float32(kExpected), true, false, ""},
+ equalsTestCase{float64(kExpected), true, false, ""},
+ equalsTestCase{complex64(kExpected), true, false, ""},
+ equalsTestCase{complex128(kExpected), true, false, ""},
+ equalsTestCase{interface{}(float64(kExpected)), true, false, ""},
+
+ // Non-equal values of numeric type.
+ equalsTestCase{int(kExpected + 1), false, false, ""},
+ equalsTestCase{int32(kExpected + 1), false, false, ""},
+ equalsTestCase{int64(kExpected + 1), false, false, ""},
+ equalsTestCase{uint(kExpected + 1), false, false, ""},
+ equalsTestCase{uint32(kExpected + 1), false, false, ""},
+ equalsTestCase{uint64(kExpected + 1), false, false, ""},
+ equalsTestCase{float32(kExpected - (1 << 30)), false, false, ""},
+ equalsTestCase{float32(kExpected + (1 << 30)), false, false, ""},
+ equalsTestCase{float64(kExpected - 0.5), false, false, ""},
+ equalsTestCase{float64(kExpected + 0.5), false, false, ""},
+ equalsTestCase{complex128(kExpected - 1), false, false, ""},
+ equalsTestCase{complex128(kExpected + 2i), false, false, ""},
+
+ // Non-numeric types.
+ equalsTestCase{uintptr(0), false, true, "which is not numeric"},
+ equalsTestCase{true, false, true, "which is not numeric"},
+ equalsTestCase{[...]int{}, false, true, "which is not numeric"},
+ equalsTestCase{make(chan int), false, true, "which is not numeric"},
+ equalsTestCase{func() {}, false, true, "which is not numeric"},
+ equalsTestCase{map[int]int{}, false, true, "which is not numeric"},
+ equalsTestCase{&someInt, false, true, "which is not numeric"},
+ equalsTestCase{[]int{}, false, true, "which is not numeric"},
+ equalsTestCase{"taco", false, true, "which is not numeric"},
+ equalsTestCase{equalsTestCase{}, false, true, "which is not numeric"},
+ }
+
+ t.checkTestCases(matcher, cases)
+}
+
+func (t *EqualsTest) PositiveNonIntegralComplex128() {
+ const kTwoTo20 = 1 << 20
+ const kExpected = kTwoTo20 + 0.25
+ matcher := Equals(complex128(kExpected))
+ ExpectEq("(1.04857625e+06+0i)", matcher.Description())
+
+ cases := []equalsTestCase{
+ // Various types of the expected value.
+ equalsTestCase{kExpected, true, false, ""},
+ equalsTestCase{kExpected + 0i, true, false, ""},
+ equalsTestCase{float32(kExpected), true, false, ""},
+ equalsTestCase{float64(kExpected), true, false, ""},
+ equalsTestCase{complex64(kExpected), true, false, ""},
+ equalsTestCase{complex128(kExpected), true, false, ""},
+
+ // Non-equal values of numeric type.
+ equalsTestCase{int64(kTwoTo20), false, false, ""},
+ equalsTestCase{int64(kTwoTo20 - 1), false, false, ""},
+ equalsTestCase{uint64(kTwoTo20), false, false, ""},
+ equalsTestCase{uint64(kTwoTo20 - 1), false, false, ""},
+ equalsTestCase{float32(kExpected - 1), false, false, ""},
+ equalsTestCase{float32(kExpected + 1), false, false, ""},
+ equalsTestCase{float64(kExpected - 0.25), false, false, ""},
+ equalsTestCase{float64(kExpected + 0.25), false, false, ""},
+ equalsTestCase{complex64(kExpected - 1), false, false, ""},
+ equalsTestCase{complex64(kExpected - 1i), false, false, ""},
+ equalsTestCase{complex128(kExpected - 1), false, false, ""},
+ equalsTestCase{complex128(kExpected - 1i), false, false, ""},
+ }
+
+ t.checkTestCases(matcher, cases)
+}
+
+func (t *EqualsTest) LargePositiveComplex128() {
+ const kExpected = 1 << 65
+ matcher := Equals(complex128(kExpected))
+ ExpectEq("(3.6893488147419103e+19+0i)", matcher.Description())
+
+ floatExpected := float64(kExpected)
+ castedInt := uint64(floatExpected)
+
+ cases := []equalsTestCase{
+ // Equal values of numeric type.
+ equalsTestCase{kExpected + 0i, true, false, ""},
+ equalsTestCase{float32(kExpected), true, false, ""},
+ equalsTestCase{float64(kExpected), true, false, ""},
+ equalsTestCase{complex64(kExpected), true, false, ""},
+ equalsTestCase{complex128(kExpected), true, false, ""},
+
+ // Non-equal values of numeric type.
+ equalsTestCase{castedInt, false, false, ""},
+ equalsTestCase{int64(0), false, false, ""},
+ equalsTestCase{int64(math.MinInt64), false, false, ""},
+ equalsTestCase{int64(math.MaxInt64), false, false, ""},
+ equalsTestCase{uint64(0), false, false, ""},
+ equalsTestCase{uint64(math.MaxUint64), false, false, ""},
+ equalsTestCase{float32(kExpected / 2), false, false, ""},
+ equalsTestCase{float64(kExpected / 2), false, false, ""},
+ equalsTestCase{complex128(kExpected + 2i), false, false, ""},
+ }
+
+ t.checkTestCases(matcher, cases)
+}
+
+func (t *EqualsTest) Complex128AboveExactIntegerRange() {
+ // Double-precision floats don't have enough bits to represent the integers
+ // near this one distinctly, so [2^54-1, 2^54+2] all receive the same value
+ // and should be treated as equivalent when floats are in the mix.
+ const kTwoTo54 = 1 << 54
+ matcher := Equals(complex128(kTwoTo54 + 1))
+ ExpectEq("(1.8014398509481984e+16+0i)", matcher.Description())
+
+ cases := []equalsTestCase{
+ // Integers.
+ equalsTestCase{int64(kTwoTo54 - 2), false, false, ""},
+ equalsTestCase{int64(kTwoTo54 - 1), true, false, ""},
+ equalsTestCase{int64(kTwoTo54 + 0), true, false, ""},
+ equalsTestCase{int64(kTwoTo54 + 1), true, false, ""},
+ equalsTestCase{int64(kTwoTo54 + 2), true, false, ""},
+ equalsTestCase{int64(kTwoTo54 + 3), false, false, ""},
+
+ equalsTestCase{uint64(kTwoTo54 - 2), false, false, ""},
+ equalsTestCase{uint64(kTwoTo54 - 1), true, false, ""},
+ equalsTestCase{uint64(kTwoTo54 + 0), true, false, ""},
+ equalsTestCase{uint64(kTwoTo54 + 1), true, false, ""},
+ equalsTestCase{uint64(kTwoTo54 + 2), true, false, ""},
+ equalsTestCase{uint64(kTwoTo54 + 3), false, false, ""},
+
+ // Double-precision floating point.
+ equalsTestCase{float64(kTwoTo54 - 2), false, false, ""},
+ equalsTestCase{float64(kTwoTo54 - 1), true, false, ""},
+ equalsTestCase{float64(kTwoTo54 + 0), true, false, ""},
+ equalsTestCase{float64(kTwoTo54 + 1), true, false, ""},
+ equalsTestCase{float64(kTwoTo54 + 2), true, false, ""},
+ equalsTestCase{float64(kTwoTo54 + 3), false, false, ""},
+
+ equalsTestCase{complex128(kTwoTo54 - 2), false, false, ""},
+ equalsTestCase{complex128(kTwoTo54 - 1), true, false, ""},
+ equalsTestCase{complex128(kTwoTo54 + 0), true, false, ""},
+ equalsTestCase{complex128(kTwoTo54 + 1), true, false, ""},
+ equalsTestCase{complex128(kTwoTo54 + 2), true, false, ""},
+ equalsTestCase{complex128(kTwoTo54 + 3), false, false, ""},
+ }
+
+ t.checkTestCases(matcher, cases)
+}
+
+func (t *EqualsTest) Complex128WithNonZeroImaginaryPart() {
+ const kRealPart = 17
+ const kImagPart = 0.25i
+ const kExpected = kRealPart + kImagPart
+ matcher := Equals(complex128(kExpected))
+ ExpectEq("(17+0.25i)", matcher.Description())
+
+ cases := []equalsTestCase{
+ // Various types of the expected value.
+ equalsTestCase{kExpected, true, false, ""},
+ equalsTestCase{kRealPart + kImagPart, true, false, ""},
+ equalsTestCase{complex64(kExpected), true, false, ""},
+ equalsTestCase{complex128(kExpected), true, false, ""},
+
+ // Non-equal values of numeric type.
+ equalsTestCase{int(kRealPart), false, false, ""},
+ equalsTestCase{int8(kRealPart), false, false, ""},
+ equalsTestCase{int16(kRealPart), false, false, ""},
+ equalsTestCase{int32(kRealPart), false, false, ""},
+ equalsTestCase{int64(kRealPart), false, false, ""},
+ equalsTestCase{uint(kRealPart), false, false, ""},
+ equalsTestCase{uint8(kRealPart), false, false, ""},
+ equalsTestCase{uint16(kRealPart), false, false, ""},
+ equalsTestCase{uint32(kRealPart), false, false, ""},
+ equalsTestCase{uint64(kRealPart), false, false, ""},
+ equalsTestCase{float32(kRealPart), false, false, ""},
+ equalsTestCase{float64(kRealPart), false, false, ""},
+ equalsTestCase{complex64(kRealPart), false, false, ""},
+ equalsTestCase{complex64(kRealPart + kImagPart + 0.5), false, false, ""},
+ equalsTestCase{complex64(kRealPart + kImagPart + 0.5i), false, false, ""},
+ equalsTestCase{complex128(kRealPart), false, false, ""},
+ equalsTestCase{complex128(kRealPart + kImagPart + 0.5), false, false, ""},
+ equalsTestCase{complex128(kRealPart + kImagPart + 0.5i), false, false, ""},
+ }
+
+ t.checkTestCases(matcher, cases)
+}
+
+////////////////////////////////////////////////////////////////////////
+// Arrays
+////////////////////////////////////////////////////////////////////////
+
+func (t *EqualsTest) ArrayOfComparableType() {
+ expected := [3]uint{17, 19, 23}
+
+ matcher := Equals(expected)
+ ExpectEq("[17 19 23]", matcher.Description())
+
+ // To defeat constant de-duping by the compiler.
+ makeArray := func(i, j, k uint) [3]uint { return [3]uint{ i, j, k} }
+
+ type arrayAlias [3]uint
+ type uintAlias uint
+
+ cases := []equalsTestCase{
+ // Correct types, equal.
+ equalsTestCase{expected, true, false, ""},
+ equalsTestCase{[3]uint{17, 19, 23}, true, false, ""},
+ equalsTestCase{makeArray(17, 19, 23), true, false, ""},
+
+ // Correct types, not equal.
+ equalsTestCase{[3]uint{0, 0, 0}, false, false, ""},
+ equalsTestCase{[3]uint{18, 19, 23}, false, false, ""},
+ equalsTestCase{[3]uint{17, 20, 23}, false, false, ""},
+ equalsTestCase{[3]uint{17, 19, 22}, false, false, ""},
+
+ // Other types.
+ equalsTestCase{0, false, true, "which is not [3]uint"},
+ equalsTestCase{bool(false), false, true, "which is not [3]uint"},
+ equalsTestCase{int(0), false, true, "which is not [3]uint"},
+ equalsTestCase{int8(0), false, true, "which is not [3]uint"},
+ equalsTestCase{int16(0), false, true, "which is not [3]uint"},
+ equalsTestCase{int32(0), false, true, "which is not [3]uint"},
+ equalsTestCase{int64(0), false, true, "which is not [3]uint"},
+ equalsTestCase{uint(0), false, true, "which is not [3]uint"},
+ equalsTestCase{uint8(0), false, true, "which is not [3]uint"},
+ equalsTestCase{uint16(0), false, true, "which is not [3]uint"},
+ equalsTestCase{uint32(0), false, true, "which is not [3]uint"},
+ equalsTestCase{uint64(0), false, true, "which is not [3]uint"},
+ equalsTestCase{true, false, true, "which is not [3]uint"},
+ equalsTestCase{[...]int{}, false, true, "which is not [3]uint"},
+ equalsTestCase{func() {}, false, true, "which is not [3]uint"},
+ equalsTestCase{map[int]int{}, false, true, "which is not [3]uint"},
+ equalsTestCase{equalsTestCase{}, false, true, "which is not [3]uint"},
+ equalsTestCase{[2]uint{17, 19}, false, true, "which is not [3]uint"},
+ equalsTestCase{[4]uint{17, 19, 23, 0}, false, true, "which is not [3]uint"},
+ equalsTestCase{arrayAlias{17, 19, 23}, false, true, "which is not [3]uint"},
+ equalsTestCase{[3]uintAlias{17, 19, 23}, false, true, "which is not [3]uint"},
+ equalsTestCase{[3]int32{17, 19, 23}, false, true, "which is not [3]uint"},
+ }
+
+ t.checkTestCases(matcher, cases)
+}
+
+func (t *EqualsTest) ArrayOfNonComparableType() {
+ type nonComparableArray [2]map[string]string
+ f := func() {
+ ExpectEq(nonComparableArray{}, nonComparableArray{})
+ }
+
+ ExpectThat(f, Panics(MatchesRegexp("uncomparable.*nonComparableArray")))
+}
+
+////////////////////////////////////////////////////////////////////////
+// chan
+////////////////////////////////////////////////////////////////////////
+
+func (t *EqualsTest) NilChan() {
+ var nilChan1 chan int
+ var nilChan2 chan int
+ var nilChan3 chan uint
+ var nonNilChan1 chan int = make(chan int)
+ var nonNilChan2 chan uint = make(chan uint)
+
+ matcher := Equals(nilChan1)
+ ExpectEq("<nil>", matcher.Description())
+
+ cases := []equalsTestCase{
+ // int channels
+ equalsTestCase{nilChan1, true, false, ""},
+ equalsTestCase{nilChan2, true, false, ""},
+ equalsTestCase{nonNilChan1, false, false, ""},
+
+ // uint channels
+ equalsTestCase{nilChan3, false, true, "which is not a chan int"},
+ equalsTestCase{nonNilChan2, false, true, "which is not a chan int"},
+
+ // Other types.
+ equalsTestCase{0, false, true, "which is not a chan int"},
+ equalsTestCase{bool(false), false, true, "which is not a chan int"},
+ equalsTestCase{int(0), false, true, "which is not a chan int"},
+ equalsTestCase{int8(0), false, true, "which is not a chan int"},
+ equalsTestCase{int16(0), false, true, "which is not a chan int"},
+ equalsTestCase{int32(0), false, true, "which is not a chan int"},
+ equalsTestCase{int64(0), false, true, "which is not a chan int"},
+ equalsTestCase{uint(0), false, true, "which is not a chan int"},
+ equalsTestCase{uint8(0), false, true, "which is not a chan int"},
+ equalsTestCase{uint16(0), false, true, "which is not a chan int"},
+ equalsTestCase{uint32(0), false, true, "which is not a chan int"},
+ equalsTestCase{uint64(0), false, true, "which is not a chan int"},
+ equalsTestCase{true, false, true, "which is not a chan int"},
+ equalsTestCase{[...]int{}, false, true, "which is not a chan int"},
+ equalsTestCase{func() {}, false, true, "which is not a chan int"},
+ equalsTestCase{map[int]int{}, false, true, "which is not a chan int"},
+ equalsTestCase{&someInt, false, true, "which is not a chan int"},
+ equalsTestCase{[]int{}, false, true, "which is not a chan int"},
+ equalsTestCase{"taco", false, true, "which is not a chan int"},
+ equalsTestCase{equalsTestCase{}, false, true, "which is not a chan int"},
+ }
+
+ t.checkTestCases(matcher, cases)
+}
+
+func (t *EqualsTest) NonNilChan() {
+ var nilChan1 chan int
+ var nilChan2 chan uint
+ var nonNilChan1 chan int = make(chan int)
+ var nonNilChan2 chan int = make(chan int)
+ var nonNilChan3 chan uint = make(chan uint)
+
+ matcher := Equals(nonNilChan1)
+ ExpectEq(fmt.Sprintf("%v", nonNilChan1), matcher.Description())
+
+ cases := []equalsTestCase{
+ // int channels
+ equalsTestCase{nonNilChan1, true, false, ""},
+ equalsTestCase{nonNilChan2, false, false, ""},
+ equalsTestCase{nilChan1, false, false, ""},
+
+ // uint channels
+ equalsTestCase{nilChan2, false, true, "which is not a chan int"},
+ equalsTestCase{nonNilChan3, false, true, "which is not a chan int"},
+
+ // Other types.
+ equalsTestCase{0, false, true, "which is not a chan int"},
+ equalsTestCase{bool(false), false, true, "which is not a chan int"},
+ equalsTestCase{int(0), false, true, "which is not a chan int"},
+ equalsTestCase{int8(0), false, true, "which is not a chan int"},
+ equalsTestCase{int16(0), false, true, "which is not a chan int"},
+ equalsTestCase{int32(0), false, true, "which is not a chan int"},
+ equalsTestCase{int64(0), false, true, "which is not a chan int"},
+ equalsTestCase{uint(0), false, true, "which is not a chan int"},
+ equalsTestCase{uint8(0), false, true, "which is not a chan int"},
+ equalsTestCase{uint16(0), false, true, "which is not a chan int"},
+ equalsTestCase{uint32(0), false, true, "which is not a chan int"},
+ equalsTestCase{uint64(0), false, true, "which is not a chan int"},
+ equalsTestCase{true, false, true, "which is not a chan int"},
+ equalsTestCase{[...]int{}, false, true, "which is not a chan int"},
+ equalsTestCase{func() {}, false, true, "which is not a chan int"},
+ equalsTestCase{map[int]int{}, false, true, "which is not a chan int"},
+ equalsTestCase{&someInt, false, true, "which is not a chan int"},
+ equalsTestCase{[]int{}, false, true, "which is not a chan int"},
+ equalsTestCase{"taco", false, true, "which is not a chan int"},
+ equalsTestCase{equalsTestCase{}, false, true, "which is not a chan int"},
+ }
+
+ t.checkTestCases(matcher, cases)
+}
+
+func (t *EqualsTest) ChanDirection() {
+ var chan1 chan<- int
+ var chan2 <-chan int
+ var chan3 chan int
+
+ matcher := Equals(chan1)
+ ExpectEq(fmt.Sprintf("%v", chan1), matcher.Description())
+
+ cases := []equalsTestCase{
+ equalsTestCase{chan1, true, false, ""},
+ equalsTestCase{chan2, false, true, "which is not a chan<- int"},
+ equalsTestCase{chan3, false, true, "which is not a chan<- int"},
+ }
+
+ t.checkTestCases(matcher, cases)
+}
+
+////////////////////////////////////////////////////////////////////////
+// func
+////////////////////////////////////////////////////////////////////////
+
+func (t *EqualsTest) Functions() {
+ func1 := func() {}
+ func2 := func() {}
+ func3 := func(x int) {}
+
+ matcher := Equals(func1)
+ ExpectEq(fmt.Sprintf("%v", func1), matcher.Description())
+
+ cases := []equalsTestCase{
+ // Functions.
+ equalsTestCase{func1, true, false, ""},
+ equalsTestCase{func2, false, false, ""},
+ equalsTestCase{func3, false, false, ""},
+
+ // Other types.
+ equalsTestCase{0, false, true, "which is not a function"},
+ equalsTestCase{bool(false), false, true, "which is not a function"},
+ equalsTestCase{int(0), false, true, "which is not a function"},
+ equalsTestCase{int8(0), false, true, "which is not a function"},
+ equalsTestCase{int16(0), false, true, "which is not a function"},
+ equalsTestCase{int32(0), false, true, "which is not a function"},
+ equalsTestCase{int64(0), false, true, "which is not a function"},
+ equalsTestCase{uint(0), false, true, "which is not a function"},
+ equalsTestCase{uint8(0), false, true, "which is not a function"},
+ equalsTestCase{uint16(0), false, true, "which is not a function"},
+ equalsTestCase{uint32(0), false, true, "which is not a function"},
+ equalsTestCase{uint64(0), false, true, "which is not a function"},
+ equalsTestCase{true, false, true, "which is not a function"},
+ equalsTestCase{[...]int{}, false, true, "which is not a function"},
+ equalsTestCase{map[int]int{}, false, true, "which is not a function"},
+ equalsTestCase{&someInt, false, true, "which is not a function"},
+ equalsTestCase{[]int{}, false, true, "which is not a function"},
+ equalsTestCase{"taco", false, true, "which is not a function"},
+ equalsTestCase{equalsTestCase{}, false, true, "which is not a function"},
+ }
+
+ t.checkTestCases(matcher, cases)
+}
+
+////////////////////////////////////////////////////////////////////////
+// map
+////////////////////////////////////////////////////////////////////////
+
+func (t *EqualsTest) NilMap() {
+ var nilMap1 map[int]int
+ var nilMap2 map[int]int
+ var nilMap3 map[int]uint
+ var nonNilMap1 map[int]int = make(map[int]int)
+ var nonNilMap2 map[int]uint = make(map[int]uint)
+
+ matcher := Equals(nilMap1)
+ ExpectEq("map[]", matcher.Description())
+
+ cases := []equalsTestCase{
+ // Correct type.
+ equalsTestCase{nilMap1, true, false, ""},
+ equalsTestCase{nilMap2, true, false, ""},
+ equalsTestCase{nilMap3, true, false, ""},
+ equalsTestCase{nonNilMap1, false, false, ""},
+ equalsTestCase{nonNilMap2, false, false, ""},
+
+ // Other types.
+ equalsTestCase{0, false, true, "which is not a map"},
+ equalsTestCase{bool(false), false, true, "which is not a map"},
+ equalsTestCase{int(0), false, true, "which is not a map"},
+ equalsTestCase{int8(0), false, true, "which is not a map"},
+ equalsTestCase{int16(0), false, true, "which is not a map"},
+ equalsTestCase{int32(0), false, true, "which is not a map"},
+ equalsTestCase{int64(0), false, true, "which is not a map"},
+ equalsTestCase{uint(0), false, true, "which is not a map"},
+ equalsTestCase{uint8(0), false, true, "which is not a map"},
+ equalsTestCase{uint16(0), false, true, "which is not a map"},
+ equalsTestCase{uint32(0), false, true, "which is not a map"},
+ equalsTestCase{uint64(0), false, true, "which is not a map"},
+ equalsTestCase{true, false, true, "which is not a map"},
+ equalsTestCase{[...]int{}, false, true, "which is not a map"},
+ equalsTestCase{func() {}, false, true, "which is not a map"},
+ equalsTestCase{&someInt, false, true, "which is not a map"},
+ equalsTestCase{[]int{}, false, true, "which is not a map"},
+ equalsTestCase{"taco", false, true, "which is not a map"},
+ equalsTestCase{equalsTestCase{}, false, true, "which is not a map"},
+ }
+
+ t.checkTestCases(matcher, cases)
+}
+
+func (t *EqualsTest) NonNilMap() {
+ var nilMap1 map[int]int
+ var nilMap2 map[int]uint
+ var nonNilMap1 map[int]int = make(map[int]int)
+ var nonNilMap2 map[int]int = make(map[int]int)
+ var nonNilMap3 map[int]uint = make(map[int]uint)
+
+ matcher := Equals(nonNilMap1)
+ ExpectEq("map[]", matcher.Description())
+
+ cases := []equalsTestCase{
+ // Correct type.
+ equalsTestCase{nonNilMap1, true, false, ""},
+ equalsTestCase{nonNilMap2, false, false, ""},
+ equalsTestCase{nonNilMap3, false, false, ""},
+ equalsTestCase{nilMap1, false, false, ""},
+ equalsTestCase{nilMap2, false, false, ""},
+
+ // Other types.
+ equalsTestCase{0, false, true, "which is not a map"},
+ equalsTestCase{bool(false), false, true, "which is not a map"},
+ equalsTestCase{int(0), false, true, "which is not a map"},
+ equalsTestCase{int8(0), false, true, "which is not a map"},
+ equalsTestCase{int16(0), false, true, "which is not a map"},
+ equalsTestCase{int32(0), false, true, "which is not a map"},
+ equalsTestCase{int64(0), false, true, "which is not a map"},
+ equalsTestCase{uint(0), false, true, "which is not a map"},
+ equalsTestCase{uint8(0), false, true, "which is not a map"},
+ equalsTestCase{uint16(0), false, true, "which is not a map"},
+ equalsTestCase{uint32(0), false, true, "which is not a map"},
+ equalsTestCase{uint64(0), false, true, "which is not a map"},
+ equalsTestCase{true, false, true, "which is not a map"},
+ equalsTestCase{[...]int{}, false, true, "which is not a map"},
+ equalsTestCase{func() {}, false, true, "which is not a map"},
+ equalsTestCase{&someInt, false, true, "which is not a map"},
+ equalsTestCase{[]int{}, false, true, "which is not a map"},
+ equalsTestCase{"taco", false, true, "which is not a map"},
+ equalsTestCase{equalsTestCase{}, false, true, "which is not a map"},
+ }
+
+ t.checkTestCases(matcher, cases)
+}
+
+////////////////////////////////////////////////////////////////////////
+// Pointers
+////////////////////////////////////////////////////////////////////////
+
+func (t *EqualsTest) NilPointer() {
+ var someInt int = 17
+ var someUint uint = 17
+
+ var nilInt1 *int
+ var nilInt2 *int
+ var nilUint *uint
+ var nonNilInt *int = &someInt
+ var nonNilUint *uint = &someUint
+
+ matcher := Equals(nilInt1)
+ ExpectEq("<nil>", matcher.Description())
+
+ cases := []equalsTestCase{
+ // Correct type.
+ equalsTestCase{nilInt1, true, false, ""},
+ equalsTestCase{nilInt2, true, false, ""},
+ equalsTestCase{nonNilInt, false, false, ""},
+
+ // Incorrect type.
+ equalsTestCase{nilUint, false, true, "which is not a *int"},
+ equalsTestCase{nonNilUint, false, true, "which is not a *int"},
+
+ // Other types.
+ equalsTestCase{0, false, true, "which is not a *int"},
+ equalsTestCase{bool(false), false, true, "which is not a *int"},
+ equalsTestCase{int(0), false, true, "which is not a *int"},
+ equalsTestCase{int8(0), false, true, "which is not a *int"},
+ equalsTestCase{int16(0), false, true, "which is not a *int"},
+ equalsTestCase{int32(0), false, true, "which is not a *int"},
+ equalsTestCase{int64(0), false, true, "which is not a *int"},
+ equalsTestCase{uint(0), false, true, "which is not a *int"},
+ equalsTestCase{uint8(0), false, true, "which is not a *int"},
+ equalsTestCase{uint16(0), false, true, "which is not a *int"},
+ equalsTestCase{uint32(0), false, true, "which is not a *int"},
+ equalsTestCase{uint64(0), false, true, "which is not a *int"},
+ equalsTestCase{true, false, true, "which is not a *int"},
+ equalsTestCase{[...]int{}, false, true, "which is not a *int"},
+ equalsTestCase{func() {}, false, true, "which is not a *int"},
+ equalsTestCase{map[int]int{}, false, true, "which is not a *int"},
+ equalsTestCase{[]int{}, false, true, "which is not a *int"},
+ equalsTestCase{"taco", false, true, "which is not a *int"},
+ equalsTestCase{equalsTestCase{}, false, true, "which is not a *int"},
+ }
+
+ t.checkTestCases(matcher, cases)
+}
+
+func (t *EqualsTest) NonNilPointer() {
+ var someInt int = 17
+ var someOtherInt int = 17
+ var someUint uint = 17
+
+ var nilInt *int
+ var nilUint *uint
+ var nonNilInt1 *int = &someInt
+ var nonNilInt2 *int = &someOtherInt
+ var nonNilUint *uint = &someUint
+
+ matcher := Equals(nonNilInt1)
+ ExpectEq(fmt.Sprintf("%v", nonNilInt1), matcher.Description())
+
+ cases := []equalsTestCase{
+ // Correct type.
+ equalsTestCase{nonNilInt1, true, false, ""},
+ equalsTestCase{nonNilInt2, false, false, ""},
+ equalsTestCase{nilInt, false, false, ""},
+
+ // Incorrect type.
+ equalsTestCase{nilUint, false, true, "which is not a *int"},
+ equalsTestCase{nonNilUint, false, true, "which is not a *int"},
+
+ // Other types.
+ equalsTestCase{0, false, true, "which is not a *int"},
+ equalsTestCase{bool(false), false, true, "which is not a *int"},
+ equalsTestCase{int(0), false, true, "which is not a *int"},
+ equalsTestCase{int8(0), false, true, "which is not a *int"},
+ equalsTestCase{int16(0), false, true, "which is not a *int"},
+ equalsTestCase{int32(0), false, true, "which is not a *int"},
+ equalsTestCase{int64(0), false, true, "which is not a *int"},
+ equalsTestCase{uint(0), false, true, "which is not a *int"},
+ equalsTestCase{uint8(0), false, true, "which is not a *int"},
+ equalsTestCase{uint16(0), false, true, "which is not a *int"},
+ equalsTestCase{uint32(0), false, true, "which is not a *int"},
+ equalsTestCase{uint64(0), false, true, "which is not a *int"},
+ equalsTestCase{true, false, true, "which is not a *int"},
+ equalsTestCase{[...]int{}, false, true, "which is not a *int"},
+ equalsTestCase{func() {}, false, true, "which is not a *int"},
+ equalsTestCase{map[int]int{}, false, true, "which is not a *int"},
+ equalsTestCase{[]int{}, false, true, "which is not a *int"},
+ equalsTestCase{"taco", false, true, "which is not a *int"},
+ equalsTestCase{equalsTestCase{}, false, true, "which is not a *int"},
+ }
+
+ t.checkTestCases(matcher, cases)
+}
+
+////////////////////////////////////////////////////////////////////////
+// Slices
+////////////////////////////////////////////////////////////////////////
+
+func (t *EqualsTest) NilSlice() {
+ var nilInt1 []int
+ var nilInt2 []int
+ var nilUint []uint
+
+ var nonNilInt []int = make([]int, 0)
+ var nonNilUint []uint = make([]uint, 0)
+
+ matcher := Equals(nilInt1)
+ ExpectEq("[]", matcher.Description())
+
+ cases := []equalsTestCase{
+ // Correct type.
+ equalsTestCase{nilInt1, true, false, ""},
+ equalsTestCase{nilInt2, true, false, ""},
+ equalsTestCase{nonNilInt, false, false, ""},
+
+ // Incorrect type.
+ equalsTestCase{nilUint, false, true, "which is not a []int"},
+ equalsTestCase{nonNilUint, false, true, "which is not a []int"},
+
+ // Other types.
+ equalsTestCase{0, false, true, "which is not a []int"},
+ equalsTestCase{bool(false), false, true, "which is not a []int"},
+ equalsTestCase{int(0), false, true, "which is not a []int"},
+ equalsTestCase{int8(0), false, true, "which is not a []int"},
+ equalsTestCase{int16(0), false, true, "which is not a []int"},
+ equalsTestCase{int32(0), false, true, "which is not a []int"},
+ equalsTestCase{int64(0), false, true, "which is not a []int"},
+ equalsTestCase{uint(0), false, true, "which is not a []int"},
+ equalsTestCase{uint8(0), false, true, "which is not a []int"},
+ equalsTestCase{uint16(0), false, true, "which is not a []int"},
+ equalsTestCase{uint32(0), false, true, "which is not a []int"},
+ equalsTestCase{uint64(0), false, true, "which is not a []int"},
+ equalsTestCase{true, false, true, "which is not a []int"},
+ equalsTestCase{[...]int{}, false, true, "which is not a []int"},
+ equalsTestCase{func() {}, false, true, "which is not a []int"},
+ equalsTestCase{map[int]int{}, false, true, "which is not a []int"},
+ equalsTestCase{"taco", false, true, "which is not a []int"},
+ equalsTestCase{equalsTestCase{}, false, true, "which is not a []int"},
+ }
+
+ t.checkTestCases(matcher, cases)
+}
+
+func (t *EqualsTest) NonNilSlice() {
+ nonNil := make([]int, 0)
+ f := func() { Equals(nonNil) }
+ ExpectThat(f, Panics(HasSubstr("non-nil slice")))
+}
+
+////////////////////////////////////////////////////////////////////////
+// string
+////////////////////////////////////////////////////////////////////////
+
+func (t *EqualsTest) String() {
+ partial := "taco"
+ expected := fmt.Sprintf("%s%d", partial, 1)
+
+ matcher := Equals(expected)
+ ExpectEq("taco1", matcher.Description())
+
+ type stringAlias string
+
+ cases := []equalsTestCase{
+ // Correct types.
+ equalsTestCase{"taco1", true, false, ""},
+ equalsTestCase{"taco" + "1", true, false, ""},
+ equalsTestCase{expected, true, false, ""},
+ equalsTestCase{stringAlias("taco1"), true, false, ""},
+
+ equalsTestCase{"", false, false, ""},
+ equalsTestCase{"taco", false, false, ""},
+ equalsTestCase{"taco1\x00", false, false, ""},
+ equalsTestCase{"taco2", false, false, ""},
+ equalsTestCase{stringAlias("taco2"), false, false, ""},
+
+ // Other types.
+ equalsTestCase{0, false, true, "which is not a string"},
+ equalsTestCase{bool(false), false, true, "which is not a string"},
+ equalsTestCase{int(0), false, true, "which is not a string"},
+ equalsTestCase{int8(0), false, true, "which is not a string"},
+ equalsTestCase{int16(0), false, true, "which is not a string"},
+ equalsTestCase{int32(0), false, true, "which is not a string"},
+ equalsTestCase{int64(0), false, true, "which is not a string"},
+ equalsTestCase{uint(0), false, true, "which is not a string"},
+ equalsTestCase{uint8(0), false, true, "which is not a string"},
+ equalsTestCase{uint16(0), false, true, "which is not a string"},
+ equalsTestCase{uint32(0), false, true, "which is not a string"},
+ equalsTestCase{uint64(0), false, true, "which is not a string"},
+ equalsTestCase{true, false, true, "which is not a string"},
+ equalsTestCase{[...]int{}, false, true, "which is not a string"},
+ equalsTestCase{func() {}, false, true, "which is not a string"},
+ equalsTestCase{map[int]int{}, false, true, "which is not a string"},
+ equalsTestCase{equalsTestCase{}, false, true, "which is not a string"},
+ }
+
+ t.checkTestCases(matcher, cases)
+}
+
+func (t *EqualsTest) StringAlias() {
+ type stringAlias string
+
+ matcher := Equals(stringAlias("taco"))
+ ExpectEq("taco", matcher.Description())
+
+ cases := []equalsTestCase{
+ // Correct types.
+ equalsTestCase{stringAlias("taco"), true, false, ""},
+ equalsTestCase{"taco", true, false, ""},
+
+ equalsTestCase{"burrito", false, false, ""},
+ equalsTestCase{stringAlias("burrito"), false, false, ""},
+
+ // Other types.
+ equalsTestCase{0, false, true, "which is not a string"},
+ equalsTestCase{bool(false), false, true, "which is not a string"},
+ }
+
+ t.checkTestCases(matcher, cases)
+}
+
+////////////////////////////////////////////////////////////////////////
+// struct
+////////////////////////////////////////////////////////////////////////
+
+func (t *EqualsTest) Struct() {
+ type someStruct struct{ foo uint }
+ f := func() { Equals(someStruct{17}) }
+ ExpectThat(f, Panics(HasSubstr("unsupported kind struct")))
+}
+
+////////////////////////////////////////////////////////////////////////
+// unsafe.Pointer
+////////////////////////////////////////////////////////////////////////
+
+func (t *EqualsTest) NilUnsafePointer() {
+ someInt := int(17)
+
+ var nilPtr1 unsafe.Pointer
+ var nilPtr2 unsafe.Pointer
+ var nonNilPtr unsafe.Pointer = unsafe.Pointer(&someInt)
+
+ matcher := Equals(nilPtr1)
+ ExpectEq("<nil>", matcher.Description())
+
+ cases := []equalsTestCase{
+ // Correct type.
+ equalsTestCase{nilPtr1, true, false, ""},
+ equalsTestCase{nilPtr2, true, false, ""},
+ equalsTestCase{nonNilPtr, false, false, ""},
+
+ // Other types.
+ equalsTestCase{0, false, true, "which is not a unsafe.Pointer"},
+ equalsTestCase{bool(false), false, true, "which is not a unsafe.Pointer"},
+ equalsTestCase{int(0), false, true, "which is not a unsafe.Pointer"},
+ equalsTestCase{int8(0), false, true, "which is not a unsafe.Pointer"},
+ equalsTestCase{int16(0), false, true, "which is not a unsafe.Pointer"},
+ equalsTestCase{int32(0), false, true, "which is not a unsafe.Pointer"},
+ equalsTestCase{int64(0), false, true, "which is not a unsafe.Pointer"},
+ equalsTestCase{uint(0), false, true, "which is not a unsafe.Pointer"},
+ equalsTestCase{uint8(0), false, true, "which is not a unsafe.Pointer"},
+ equalsTestCase{uint16(0), false, true, "which is not a unsafe.Pointer"},
+ equalsTestCase{uint32(0), false, true, "which is not a unsafe.Pointer"},
+ equalsTestCase{uint64(0), false, true, "which is not a unsafe.Pointer"},
+ equalsTestCase{uintptr(0), false, true, "which is not a unsafe.Pointer"},
+ equalsTestCase{true, false, true, "which is not a unsafe.Pointer"},
+ equalsTestCase{[...]int{}, false, true, "which is not a unsafe.Pointer"},
+ equalsTestCase{make(chan int), false, true, "which is not a unsafe.Pointer"},
+ equalsTestCase{func() {}, false, true, "which is not a unsafe.Pointer"},
+ equalsTestCase{map[int]int{}, false, true, "which is not a unsafe.Pointer"},
+ equalsTestCase{&someInt, false, true, "which is not a unsafe.Pointer"},
+ equalsTestCase{[]int{}, false, true, "which is not a unsafe.Pointer"},
+ equalsTestCase{"taco", false, true, "which is not a unsafe.Pointer"},
+ equalsTestCase{equalsTestCase{}, false, true, "which is not a unsafe.Pointer"},
+ }
+
+ t.checkTestCases(matcher, cases)
+}
+
+func (t *EqualsTest) NonNilUnsafePointer() {
+ someInt := int(17)
+ someOtherInt := int(17)
+
+ var nilPtr unsafe.Pointer
+ var nonNilPtr1 unsafe.Pointer = unsafe.Pointer(&someInt)
+ var nonNilPtr2 unsafe.Pointer = unsafe.Pointer(&someOtherInt)
+
+ matcher := Equals(nonNilPtr1)
+ ExpectEq(fmt.Sprintf("%v", nonNilPtr1), matcher.Description())
+
+ cases := []equalsTestCase{
+ // Correct type.
+ equalsTestCase{nonNilPtr1, true, false, ""},
+ equalsTestCase{nonNilPtr2, false, false, ""},
+ equalsTestCase{nilPtr, false, false, ""},
+
+ // Other types.
+ equalsTestCase{0, false, true, "which is not a unsafe.Pointer"},
+ equalsTestCase{bool(false), false, true, "which is not a unsafe.Pointer"},
+ equalsTestCase{int(0), false, true, "which is not a unsafe.Pointer"},
+ equalsTestCase{int8(0), false, true, "which is not a unsafe.Pointer"},
+ equalsTestCase{int16(0), false, true, "which is not a unsafe.Pointer"},
+ equalsTestCase{int32(0), false, true, "which is not a unsafe.Pointer"},
+ equalsTestCase{int64(0), false, true, "which is not a unsafe.Pointer"},
+ equalsTestCase{uint(0), false, true, "which is not a unsafe.Pointer"},
+ equalsTestCase{uint8(0), false, true, "which is not a unsafe.Pointer"},
+ equalsTestCase{uint16(0), false, true, "which is not a unsafe.Pointer"},
+ equalsTestCase{uint32(0), false, true, "which is not a unsafe.Pointer"},
+ equalsTestCase{uint64(0), false, true, "which is not a unsafe.Pointer"},
+ equalsTestCase{uintptr(0), false, true, "which is not a unsafe.Pointer"},
+ equalsTestCase{true, false, true, "which is not a unsafe.Pointer"},
+ equalsTestCase{[...]int{}, false, true, "which is not a unsafe.Pointer"},
+ equalsTestCase{make(chan int), false, true, "which is not a unsafe.Pointer"},
+ equalsTestCase{func() {}, false, true, "which is not a unsafe.Pointer"},
+ equalsTestCase{map[int]int{}, false, true, "which is not a unsafe.Pointer"},
+ equalsTestCase{&someInt, false, true, "which is not a unsafe.Pointer"},
+ equalsTestCase{[]int{}, false, true, "which is not a unsafe.Pointer"},
+ equalsTestCase{"taco", false, true, "which is not a unsafe.Pointer"},
+ equalsTestCase{equalsTestCase{}, false, true, "which is not a unsafe.Pointer"},
+ }
+
+ t.checkTestCases(matcher, cases)
+}
diff --git a/src/mongo/gotools/vendor/src/github.com/jacobsa/oglematchers/error.go b/src/mongo/gotools/vendor/src/github.com/jacobsa/oglematchers/error.go
new file mode 100644
index 00000000000..8a078e36d86
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/jacobsa/oglematchers/error.go
@@ -0,0 +1,51 @@
+// Copyright 2011 Aaron Jacobs. All Rights Reserved.
+// Author: aaronjjacobs@gmail.com (Aaron Jacobs)
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package oglematchers
+
+// Error returns a matcher that matches non-nil values implementing the
+// built-in error interface for whom the return value of Error() matches the
+// supplied matcher.
+//
+// For example:
+//
+// err := errors.New("taco burrito")
+//
+// Error(Equals("taco burrito")) // matches err
+// Error(HasSubstr("taco")) // matches err
+// Error(HasSubstr("enchilada")) // doesn't match err
+//
+func Error(m Matcher) Matcher {
+ return &errorMatcher{m}
+}
+
+type errorMatcher struct {
+ wrappedMatcher Matcher
+}
+
+func (m *errorMatcher) Description() string {
+ return "error " + m.wrappedMatcher.Description()
+}
+
+func (m *errorMatcher) Matches(c interface{}) error {
+ // Make sure that c is an error.
+ e, ok := c.(error)
+ if !ok {
+ return NewFatalError("which is not an error")
+ }
+
+ // Pass on the error text to the wrapped matcher.
+ return m.wrappedMatcher.Matches(e.Error())
+}
diff --git a/src/mongo/gotools/vendor/src/github.com/jacobsa/oglematchers/error_test.go b/src/mongo/gotools/vendor/src/github.com/jacobsa/oglematchers/error_test.go
new file mode 100644
index 00000000000..42f226d95ed
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/jacobsa/oglematchers/error_test.go
@@ -0,0 +1,92 @@
+// Copyright 2011 Aaron Jacobs. All Rights Reserved.
+// Author: aaronjjacobs@gmail.com (Aaron Jacobs)
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package oglematchers_test
+
+import (
+ "errors"
+ . "github.com/jacobsa/oglematchers"
+ . "github.com/jacobsa/ogletest"
+)
+
+////////////////////////////////////////////////////////////////////////
+// Helpers
+////////////////////////////////////////////////////////////////////////
+
+type ErrorTest struct {
+ matcherCalled bool
+ suppliedCandidate interface{}
+ wrappedError error
+
+ matcher Matcher
+}
+
+func init() { RegisterTestSuite(&ErrorTest{}) }
+
+func (t *ErrorTest) SetUp(i *TestInfo) {
+ wrapped := &fakeMatcher{
+ func(c interface{}) error {
+ t.matcherCalled = true
+ t.suppliedCandidate = c
+ return t.wrappedError
+ },
+ "is foo",
+ }
+
+ t.matcher = Error(wrapped)
+}
+
+func isFatal(err error) bool {
+ _, isFatal := err.(*FatalError)
+ return isFatal
+}
+
+////////////////////////////////////////////////////////////////////////
+// Tests
+////////////////////////////////////////////////////////////////////////
+
+func (t *ErrorTest) Description() {
+ ExpectThat(t.matcher.Description(), Equals("error is foo"))
+}
+
+func (t *ErrorTest) CandidateIsNil() {
+ err := t.matcher.Matches(nil)
+
+ ExpectThat(t.matcherCalled, Equals(false))
+ ExpectThat(err.Error(), Equals("which is not an error"))
+ ExpectTrue(isFatal(err))
+}
+
+func (t *ErrorTest) CandidateIsString() {
+ err := t.matcher.Matches("taco")
+
+ ExpectThat(t.matcherCalled, Equals(false))
+ ExpectThat(err.Error(), Equals("which is not an error"))
+ ExpectTrue(isFatal(err))
+}
+
+func (t *ErrorTest) CallsWrappedMatcher() {
+ candidate := errors.New("taco")
+ t.matcher.Matches(candidate)
+
+ ExpectThat(t.matcherCalled, Equals(true))
+ ExpectThat(t.suppliedCandidate, Equals("taco"))
+}
+
+func (t *ErrorTest) ReturnsWrappedMatcherResult() {
+ t.wrappedError = errors.New("burrito")
+ err := t.matcher.Matches(errors.New(""))
+ ExpectThat(err, Equals(t.wrappedError))
+}
diff --git a/src/mongo/gotools/vendor/src/github.com/jacobsa/oglematchers/greater_or_equal.go b/src/mongo/gotools/vendor/src/github.com/jacobsa/oglematchers/greater_or_equal.go
new file mode 100644
index 00000000000..4b9d103a381
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/jacobsa/oglematchers/greater_or_equal.go
@@ -0,0 +1,39 @@
+// Copyright 2011 Aaron Jacobs. All Rights Reserved.
+// Author: aaronjjacobs@gmail.com (Aaron Jacobs)
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package oglematchers
+
+import (
+ "fmt"
+ "reflect"
+)
+
+// GreaterOrEqual returns a matcher that matches integer, floating point, or
+// strings values v such that v >= x. Comparison is not defined between numeric
+// and string types, but is defined between all integer and floating point
+// types.
+//
+// x must itself be an integer, floating point, or string type; otherwise,
+// GreaterOrEqual will panic.
+func GreaterOrEqual(x interface{}) Matcher {
+ desc := fmt.Sprintf("greater than or equal to %v", x)
+
+ // Special case: make it clear that strings are strings.
+ if reflect.TypeOf(x).Kind() == reflect.String {
+ desc = fmt.Sprintf("greater than or equal to \"%s\"", x)
+ }
+
+ return transformDescription(Not(LessThan(x)), desc)
+}
diff --git a/src/mongo/gotools/vendor/src/github.com/jacobsa/oglematchers/greater_or_equal_test.go b/src/mongo/gotools/vendor/src/github.com/jacobsa/oglematchers/greater_or_equal_test.go
new file mode 100644
index 00000000000..639f0e0acf9
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/jacobsa/oglematchers/greater_or_equal_test.go
@@ -0,0 +1,1059 @@
+// Copyright 2011 Aaron Jacobs. All Rights Reserved.
+// Author: aaronjjacobs@gmail.com (Aaron Jacobs)
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package oglematchers_test
+
+import (
+ . "github.com/jacobsa/oglematchers"
+ . "github.com/jacobsa/ogletest"
+ "math"
+)
+
+////////////////////////////////////////////////////////////////////////
+// Helpers
+////////////////////////////////////////////////////////////////////////
+
+type GreaterOrEqualTest struct {
+}
+
+func init() { RegisterTestSuite(&GreaterOrEqualTest{}) }
+
+type geTestCase struct {
+ candidate interface{}
+ expectedResult bool
+ shouldBeFatal bool
+ expectedError string
+}
+
+func (t *GreaterOrEqualTest) checkTestCases(matcher Matcher, cases []geTestCase) {
+ for i, c := range cases {
+ err := matcher.Matches(c.candidate)
+
+ ExpectThat(
+ (err == nil),
+ Equals(c.expectedResult),
+ "Case %d (candidate %v)",
+ i,
+ c.candidate)
+
+ if err == nil {
+ continue
+ }
+
+ _, isFatal := err.(*FatalError)
+ ExpectEq(
+ c.shouldBeFatal,
+ isFatal,
+ "Case %d (candidate %v)",
+ i,
+ c.candidate)
+
+ ExpectThat(
+ err,
+ Error(Equals(c.expectedError)),
+ "Case %d (candidate %v)",
+ i,
+ c.candidate)
+ }
+}
+
+////////////////////////////////////////////////////////////////////////
+// Integer literals
+////////////////////////////////////////////////////////////////////////
+
+func (t *GreaterOrEqualTest) IntegerCandidateBadTypes() {
+ matcher := GreaterOrEqual(int(-150))
+
+ cases := []geTestCase{
+ geTestCase{true, false, true, "which is not comparable"},
+ geTestCase{uintptr(17), false, true, "which is not comparable"},
+ geTestCase{complex64(-151), false, true, "which is not comparable"},
+ geTestCase{complex128(-151), false, true, "which is not comparable"},
+ geTestCase{[...]int{-151}, false, true, "which is not comparable"},
+ geTestCase{make(chan int), false, true, "which is not comparable"},
+ geTestCase{func() {}, false, true, "which is not comparable"},
+ geTestCase{map[int]int{}, false, true, "which is not comparable"},
+ geTestCase{&geTestCase{}, false, true, "which is not comparable"},
+ geTestCase{make([]int, 0), false, true, "which is not comparable"},
+ geTestCase{"-151", false, true, "which is not comparable"},
+ geTestCase{geTestCase{}, false, true, "which is not comparable"},
+ }
+
+ t.checkTestCases(matcher, cases)
+}
+
+func (t *GreaterOrEqualTest) FloatCandidateBadTypes() {
+ matcher := GreaterOrEqual(float32(-150))
+
+ cases := []geTestCase{
+ geTestCase{true, false, true, "which is not comparable"},
+ geTestCase{uintptr(17), false, true, "which is not comparable"},
+ geTestCase{complex64(-151), false, true, "which is not comparable"},
+ geTestCase{complex128(-151), false, true, "which is not comparable"},
+ geTestCase{[...]int{-151}, false, true, "which is not comparable"},
+ geTestCase{make(chan int), false, true, "which is not comparable"},
+ geTestCase{func() {}, false, true, "which is not comparable"},
+ geTestCase{map[int]int{}, false, true, "which is not comparable"},
+ geTestCase{&geTestCase{}, false, true, "which is not comparable"},
+ geTestCase{make([]int, 0), false, true, "which is not comparable"},
+ geTestCase{"-151", false, true, "which is not comparable"},
+ geTestCase{geTestCase{}, false, true, "which is not comparable"},
+ }
+
+ t.checkTestCases(matcher, cases)
+}
+
+func (t *GreaterOrEqualTest) StringCandidateBadTypes() {
+ matcher := GreaterOrEqual("17")
+
+ cases := []geTestCase{
+ geTestCase{true, false, true, "which is not comparable"},
+ geTestCase{int(0), false, true, "which is not comparable"},
+ geTestCase{int8(0), false, true, "which is not comparable"},
+ geTestCase{int16(0), false, true, "which is not comparable"},
+ geTestCase{int32(0), false, true, "which is not comparable"},
+ geTestCase{int64(0), false, true, "which is not comparable"},
+ geTestCase{uint(0), false, true, "which is not comparable"},
+ geTestCase{uint8(0), false, true, "which is not comparable"},
+ geTestCase{uint16(0), false, true, "which is not comparable"},
+ geTestCase{uint32(0), false, true, "which is not comparable"},
+ geTestCase{uint64(0), false, true, "which is not comparable"},
+ geTestCase{uintptr(17), false, true, "which is not comparable"},
+ geTestCase{float32(0), false, true, "which is not comparable"},
+ geTestCase{float64(0), false, true, "which is not comparable"},
+ geTestCase{complex64(-151), false, true, "which is not comparable"},
+ geTestCase{complex128(-151), false, true, "which is not comparable"},
+ geTestCase{[...]int{-151}, false, true, "which is not comparable"},
+ geTestCase{make(chan int), false, true, "which is not comparable"},
+ geTestCase{func() {}, false, true, "which is not comparable"},
+ geTestCase{map[int]int{}, false, true, "which is not comparable"},
+ geTestCase{&geTestCase{}, false, true, "which is not comparable"},
+ geTestCase{make([]int, 0), false, true, "which is not comparable"},
+ geTestCase{geTestCase{}, false, true, "which is not comparable"},
+ }
+
+ t.checkTestCases(matcher, cases)
+}
+
+func (t *GreaterOrEqualTest) BadArgument() {
+ panicked := false
+
+ defer func() {
+ ExpectThat(panicked, Equals(true))
+ }()
+
+ defer func() {
+ if r := recover(); r != nil {
+ panicked = true
+ }
+ }()
+
+ GreaterOrEqual(complex128(0))
+}
+
+////////////////////////////////////////////////////////////////////////
+// Integer literals
+////////////////////////////////////////////////////////////////////////
+
+func (t *GreaterOrEqualTest) NegativeIntegerLiteral() {
+ matcher := GreaterOrEqual(-150)
+ desc := matcher.Description()
+ expectedDesc := "greater than or equal to -150"
+
+ ExpectThat(desc, Equals(expectedDesc))
+
+ cases := []geTestCase{
+ // Signed integers.
+ geTestCase{-(1 << 30), false, false, ""},
+ geTestCase{-151, false, false, ""},
+ geTestCase{-150, true, false, ""},
+ geTestCase{0, true, false, ""},
+ geTestCase{17, true, false, ""},
+
+ geTestCase{int(-(1 << 30)), false, false, ""},
+ geTestCase{int(-151), false, false, ""},
+ geTestCase{int(-150), true, false, ""},
+ geTestCase{int(0), true, false, ""},
+ geTestCase{int(17), true, false, ""},
+
+ geTestCase{int8(-127), true, false, ""},
+ geTestCase{int8(0), true, false, ""},
+ geTestCase{int8(17), true, false, ""},
+
+ geTestCase{int16(-(1 << 14)), false, false, ""},
+ geTestCase{int16(-151), false, false, ""},
+ geTestCase{int16(-150), true, false, ""},
+ geTestCase{int16(0), true, false, ""},
+ geTestCase{int16(17), true, false, ""},
+
+ geTestCase{int32(-(1 << 30)), false, false, ""},
+ geTestCase{int32(-151), false, false, ""},
+ geTestCase{int32(-150), true, false, ""},
+ geTestCase{int32(0), true, false, ""},
+ geTestCase{int32(17), true, false, ""},
+
+ geTestCase{int64(-(1 << 30)), false, false, ""},
+ geTestCase{int64(-151), false, false, ""},
+ geTestCase{int64(-150), true, false, ""},
+ geTestCase{int64(0), true, false, ""},
+ geTestCase{int64(17), true, false, ""},
+
+ // Unsigned integers.
+ geTestCase{uint((1 << 32) - 151), true, false, ""},
+ geTestCase{uint(0), true, false, ""},
+ geTestCase{uint(17), true, false, ""},
+
+ geTestCase{uint8(0), true, false, ""},
+ geTestCase{uint8(17), true, false, ""},
+ geTestCase{uint8(253), true, false, ""},
+
+ geTestCase{uint16((1 << 16) - 151), true, false, ""},
+ geTestCase{uint16(0), true, false, ""},
+ geTestCase{uint16(17), true, false, ""},
+
+ geTestCase{uint32((1 << 32) - 151), true, false, ""},
+ geTestCase{uint32(0), true, false, ""},
+ geTestCase{uint32(17), true, false, ""},
+
+ geTestCase{uint64((1 << 64) - 151), true, false, ""},
+ geTestCase{uint64(0), true, false, ""},
+ geTestCase{uint64(17), true, false, ""},
+
+ // Floating point.
+ geTestCase{float32(-(1 << 30)), false, false, ""},
+ geTestCase{float32(-151), false, false, ""},
+ geTestCase{float32(-150.1), false, false, ""},
+ geTestCase{float32(-150), true, false, ""},
+ geTestCase{float32(-149.9), true, false, ""},
+ geTestCase{float32(0), true, false, ""},
+ geTestCase{float32(17), true, false, ""},
+ geTestCase{float32(160), true, false, ""},
+
+ geTestCase{float64(-(1 << 30)), false, false, ""},
+ geTestCase{float64(-151), false, false, ""},
+ geTestCase{float64(-150.1), false, false, ""},
+ geTestCase{float64(-150), true, false, ""},
+ geTestCase{float64(-149.9), true, false, ""},
+ geTestCase{float64(0), true, false, ""},
+ geTestCase{float64(17), true, false, ""},
+ geTestCase{float64(160), true, false, ""},
+ }
+
+ t.checkTestCases(matcher, cases)
+}
+
+func (t *GreaterOrEqualTest) ZeroIntegerLiteral() {
+ matcher := GreaterOrEqual(0)
+ desc := matcher.Description()
+ expectedDesc := "greater than or equal to 0"
+
+ ExpectThat(desc, Equals(expectedDesc))
+
+ cases := []geTestCase{
+ // Signed integers.
+ geTestCase{-(1 << 30), false, false, ""},
+ geTestCase{-1, false, false, ""},
+ geTestCase{0, true, false, ""},
+ geTestCase{1, true, false, ""},
+ geTestCase{17, true, false, ""},
+ geTestCase{(1 << 30), true, false, ""},
+
+ geTestCase{int(-(1 << 30)), false, false, ""},
+ geTestCase{int(-1), false, false, ""},
+ geTestCase{int(0), true, false, ""},
+ geTestCase{int(1), true, false, ""},
+ geTestCase{int(17), true, false, ""},
+
+ geTestCase{int8(-1), false, false, ""},
+ geTestCase{int8(0), true, false, ""},
+ geTestCase{int8(1), true, false, ""},
+
+ geTestCase{int16(-(1 << 14)), false, false, ""},
+ geTestCase{int16(-1), false, false, ""},
+ geTestCase{int16(0), true, false, ""},
+ geTestCase{int16(1), true, false, ""},
+ geTestCase{int16(17), true, false, ""},
+
+ geTestCase{int32(-(1 << 30)), false, false, ""},
+ geTestCase{int32(-1), false, false, ""},
+ geTestCase{int32(0), true, false, ""},
+ geTestCase{int32(1), true, false, ""},
+ geTestCase{int32(17), true, false, ""},
+
+ geTestCase{int64(-(1 << 30)), false, false, ""},
+ geTestCase{int64(-1), false, false, ""},
+ geTestCase{int64(0), true, false, ""},
+ geTestCase{int64(1), true, false, ""},
+ geTestCase{int64(17), true, false, ""},
+
+ // Unsigned integers.
+ geTestCase{uint((1 << 32) - 1), true, false, ""},
+ geTestCase{uint(0), true, false, ""},
+ geTestCase{uint(17), true, false, ""},
+
+ geTestCase{uint8(0), true, false, ""},
+ geTestCase{uint8(17), true, false, ""},
+ geTestCase{uint8(253), true, false, ""},
+
+ geTestCase{uint16((1 << 16) - 1), true, false, ""},
+ geTestCase{uint16(0), true, false, ""},
+ geTestCase{uint16(17), true, false, ""},
+
+ geTestCase{uint32((1 << 32) - 1), true, false, ""},
+ geTestCase{uint32(0), true, false, ""},
+ geTestCase{uint32(17), true, false, ""},
+
+ geTestCase{uint64((1 << 64) - 1), true, false, ""},
+ geTestCase{uint64(0), true, false, ""},
+ geTestCase{uint64(17), true, false, ""},
+
+ // Floating point.
+ geTestCase{float32(-(1 << 30)), false, false, ""},
+ geTestCase{float32(-1), false, false, ""},
+ geTestCase{float32(-0.1), false, false, ""},
+ geTestCase{float32(-0.0), true, false, ""},
+ geTestCase{float32(0), true, false, ""},
+ geTestCase{float32(0.1), true, false, ""},
+ geTestCase{float32(17), true, false, ""},
+ geTestCase{float32(160), true, false, ""},
+
+ geTestCase{float64(-(1 << 30)), false, false, ""},
+ geTestCase{float64(-1), false, false, ""},
+ geTestCase{float64(-0.1), false, false, ""},
+ geTestCase{float64(-0), true, false, ""},
+ geTestCase{float64(0), true, false, ""},
+ geTestCase{float64(17), true, false, ""},
+ geTestCase{float64(160), true, false, ""},
+ }
+
+ t.checkTestCases(matcher, cases)
+}
+
+func (t *GreaterOrEqualTest) PositiveIntegerLiteral() {
+ matcher := GreaterOrEqual(150)
+ desc := matcher.Description()
+ expectedDesc := "greater than or equal to 150"
+
+ ExpectThat(desc, Equals(expectedDesc))
+
+ cases := []geTestCase{
+ // Signed integers.
+ geTestCase{-1, false, false, ""},
+ geTestCase{149, false, false, ""},
+ geTestCase{150, true, false, ""},
+ geTestCase{151, true, false, ""},
+
+ geTestCase{int(-1), false, false, ""},
+ geTestCase{int(149), false, false, ""},
+ geTestCase{int(150), true, false, ""},
+ geTestCase{int(151), true, false, ""},
+
+ geTestCase{int8(-1), false, false, ""},
+ geTestCase{int8(0), false, false, ""},
+ geTestCase{int8(17), false, false, ""},
+ geTestCase{int8(127), false, false, ""},
+
+ geTestCase{int16(-1), false, false, ""},
+ geTestCase{int16(149), false, false, ""},
+ geTestCase{int16(150), true, false, ""},
+ geTestCase{int16(151), true, false, ""},
+
+ geTestCase{int32(-1), false, false, ""},
+ geTestCase{int32(149), false, false, ""},
+ geTestCase{int32(150), true, false, ""},
+ geTestCase{int32(151), true, false, ""},
+
+ geTestCase{int64(-1), false, false, ""},
+ geTestCase{int64(149), false, false, ""},
+ geTestCase{int64(150), true, false, ""},
+ geTestCase{int64(151), true, false, ""},
+
+ // Unsigned integers.
+ geTestCase{uint(0), false, false, ""},
+ geTestCase{uint(149), false, false, ""},
+ geTestCase{uint(150), true, false, ""},
+ geTestCase{uint(151), true, false, ""},
+
+ geTestCase{uint8(0), false, false, ""},
+ geTestCase{uint8(127), false, false, ""},
+
+ geTestCase{uint16(0), false, false, ""},
+ geTestCase{uint16(149), false, false, ""},
+ geTestCase{uint16(150), true, false, ""},
+ geTestCase{uint16(151), true, false, ""},
+
+ geTestCase{uint32(0), false, false, ""},
+ geTestCase{uint32(149), false, false, ""},
+ geTestCase{uint32(150), true, false, ""},
+ geTestCase{uint32(151), true, false, ""},
+
+ geTestCase{uint64(0), false, false, ""},
+ geTestCase{uint64(149), false, false, ""},
+ geTestCase{uint64(150), true, false, ""},
+ geTestCase{uint64(151), true, false, ""},
+
+ // Floating point.
+ geTestCase{float32(-1), false, false, ""},
+ geTestCase{float32(149), false, false, ""},
+ geTestCase{float32(149.9), false, false, ""},
+ geTestCase{float32(150), true, false, ""},
+ geTestCase{float32(150.1), true, false, ""},
+ geTestCase{float32(151), true, false, ""},
+
+ geTestCase{float64(-1), false, false, ""},
+ geTestCase{float64(149), false, false, ""},
+ geTestCase{float64(149.9), false, false, ""},
+ geTestCase{float64(150), true, false, ""},
+ geTestCase{float64(150.1), true, false, ""},
+ geTestCase{float64(151), true, false, ""},
+ }
+
+ t.checkTestCases(matcher, cases)
+}
+
+////////////////////////////////////////////////////////////////////////
+// Float literals
+////////////////////////////////////////////////////////////////////////
+
+func (t *GreaterOrEqualTest) NegativeFloatLiteral() {
+ matcher := GreaterOrEqual(-150.1)
+ desc := matcher.Description()
+ expectedDesc := "greater than or equal to -150.1"
+
+ ExpectThat(desc, Equals(expectedDesc))
+
+ cases := []geTestCase{
+ // Signed integers.
+ geTestCase{-(1 << 30), false, false, ""},
+ geTestCase{-151, false, false, ""},
+ geTestCase{-150, true, false, ""},
+ geTestCase{0, true, false, ""},
+ geTestCase{17, true, false, ""},
+
+ geTestCase{int(-(1 << 30)), false, false, ""},
+ geTestCase{int(-151), false, false, ""},
+ geTestCase{int(-150), true, false, ""},
+ geTestCase{int(0), true, false, ""},
+ geTestCase{int(17), true, false, ""},
+
+ geTestCase{int8(-127), true, false, ""},
+ geTestCase{int8(0), true, false, ""},
+ geTestCase{int8(17), true, false, ""},
+
+ geTestCase{int16(-(1 << 14)), false, false, ""},
+ geTestCase{int16(-151), false, false, ""},
+ geTestCase{int16(-150), true, false, ""},
+ geTestCase{int16(0), true, false, ""},
+ geTestCase{int16(17), true, false, ""},
+
+ geTestCase{int32(-(1 << 30)), false, false, ""},
+ geTestCase{int32(-151), false, false, ""},
+ geTestCase{int32(-150), true, false, ""},
+ geTestCase{int32(0), true, false, ""},
+ geTestCase{int32(17), true, false, ""},
+
+ geTestCase{int64(-(1 << 30)), false, false, ""},
+ geTestCase{int64(-151), false, false, ""},
+ geTestCase{int64(-150), true, false, ""},
+ geTestCase{int64(0), true, false, ""},
+ geTestCase{int64(17), true, false, ""},
+
+ // Unsigned integers.
+ geTestCase{uint((1 << 32) - 151), true, false, ""},
+ geTestCase{uint(0), true, false, ""},
+ geTestCase{uint(17), true, false, ""},
+
+ geTestCase{uint8(0), true, false, ""},
+ geTestCase{uint8(17), true, false, ""},
+ geTestCase{uint8(253), true, false, ""},
+
+ geTestCase{uint16((1 << 16) - 151), true, false, ""},
+ geTestCase{uint16(0), true, false, ""},
+ geTestCase{uint16(17), true, false, ""},
+
+ geTestCase{uint32((1 << 32) - 151), true, false, ""},
+ geTestCase{uint32(0), true, false, ""},
+ geTestCase{uint32(17), true, false, ""},
+
+ geTestCase{uint64((1 << 64) - 151), true, false, ""},
+ geTestCase{uint64(0), true, false, ""},
+ geTestCase{uint64(17), true, false, ""},
+
+ // Floating point.
+ geTestCase{float32(-(1 << 30)), false, false, ""},
+ geTestCase{float32(-151), false, false, ""},
+ geTestCase{float32(-150.2), false, false, ""},
+ geTestCase{float32(-150.1), true, false, ""},
+ geTestCase{float32(-150), true, false, ""},
+ geTestCase{float32(0), true, false, ""},
+ geTestCase{float32(17), true, false, ""},
+ geTestCase{float32(160), true, false, ""},
+
+ geTestCase{float64(-(1 << 30)), false, false, ""},
+ geTestCase{float64(-151), false, false, ""},
+ geTestCase{float64(-150.2), false, false, ""},
+ geTestCase{float64(-150.1), true, false, ""},
+ geTestCase{float64(-150), true, false, ""},
+ geTestCase{float64(0), true, false, ""},
+ geTestCase{float64(17), true, false, ""},
+ geTestCase{float64(160), true, false, ""},
+ }
+
+ t.checkTestCases(matcher, cases)
+}
+
+func (t *GreaterOrEqualTest) PositiveFloatLiteral() {
+ matcher := GreaterOrEqual(149.9)
+ desc := matcher.Description()
+ expectedDesc := "greater than or equal to 149.9"
+
+ ExpectThat(desc, Equals(expectedDesc))
+
+ cases := []geTestCase{
+ // Signed integers.
+ geTestCase{-1, false, false, ""},
+ geTestCase{149, false, false, ""},
+ geTestCase{150, true, false, ""},
+ geTestCase{151, true, false, ""},
+
+ geTestCase{int(-1), false, false, ""},
+ geTestCase{int(149), false, false, ""},
+ geTestCase{int(150), true, false, ""},
+ geTestCase{int(151), true, false, ""},
+
+ geTestCase{int8(-1), false, false, ""},
+ geTestCase{int8(0), false, false, ""},
+ geTestCase{int8(17), false, false, ""},
+ geTestCase{int8(127), false, false, ""},
+
+ geTestCase{int16(-1), false, false, ""},
+ geTestCase{int16(149), false, false, ""},
+ geTestCase{int16(150), true, false, ""},
+ geTestCase{int16(151), true, false, ""},
+
+ geTestCase{int32(-1), false, false, ""},
+ geTestCase{int32(149), false, false, ""},
+ geTestCase{int32(150), true, false, ""},
+ geTestCase{int32(151), true, false, ""},
+
+ geTestCase{int64(-1), false, false, ""},
+ geTestCase{int64(149), false, false, ""},
+ geTestCase{int64(150), true, false, ""},
+ geTestCase{int64(151), true, false, ""},
+
+ // Unsigned integers.
+ geTestCase{uint(0), false, false, ""},
+ geTestCase{uint(149), false, false, ""},
+ geTestCase{uint(150), true, false, ""},
+ geTestCase{uint(151), true, false, ""},
+
+ geTestCase{uint8(0), false, false, ""},
+ geTestCase{uint8(127), false, false, ""},
+
+ geTestCase{uint16(0), false, false, ""},
+ geTestCase{uint16(149), false, false, ""},
+ geTestCase{uint16(150), true, false, ""},
+ geTestCase{uint16(151), true, false, ""},
+
+ geTestCase{uint32(0), false, false, ""},
+ geTestCase{uint32(149), false, false, ""},
+ geTestCase{uint32(150), true, false, ""},
+ geTestCase{uint32(151), true, false, ""},
+
+ geTestCase{uint64(0), false, false, ""},
+ geTestCase{uint64(149), false, false, ""},
+ geTestCase{uint64(150), true, false, ""},
+ geTestCase{uint64(151), true, false, ""},
+
+ // Floating point.
+ geTestCase{float32(-1), false, false, ""},
+ geTestCase{float32(149), false, false, ""},
+ geTestCase{float32(149.8), false, false, ""},
+ geTestCase{float32(149.9), true, false, ""},
+ geTestCase{float32(150), true, false, ""},
+ geTestCase{float32(151), true, false, ""},
+
+ geTestCase{float64(-1), false, false, ""},
+ geTestCase{float64(149), false, false, ""},
+ geTestCase{float64(149.8), false, false, ""},
+ geTestCase{float64(149.9), true, false, ""},
+ geTestCase{float64(150), true, false, ""},
+ geTestCase{float64(151), true, false, ""},
+ }
+
+ t.checkTestCases(matcher, cases)
+}
+
+////////////////////////////////////////////////////////////////////////
+// Subtle cases
+////////////////////////////////////////////////////////////////////////
+
+func (t *GreaterOrEqualTest) Int64NotExactlyRepresentableBySinglePrecision() {
+ // Single-precision floats don't have enough bits to represent the integers
+ // near this one distinctly, so [2^25-1, 2^25+2] all receive the same value
+ // and should be treated as equivalent when floats are in the mix.
+ const kTwoTo25 = 1 << 25
+ matcher := GreaterOrEqual(int64(kTwoTo25 + 1))
+
+ desc := matcher.Description()
+ expectedDesc := "greater than or equal to 33554433"
+
+ ExpectThat(desc, Equals(expectedDesc))
+
+ cases := []geTestCase{
+ // Signed integers.
+ geTestCase{-1, false, false, ""},
+ geTestCase{kTwoTo25 + 0, false, false, ""},
+ geTestCase{kTwoTo25 + 1, true, false, ""},
+ geTestCase{kTwoTo25 + 2, true, false, ""},
+
+ geTestCase{int(-1), false, false, ""},
+ geTestCase{int(kTwoTo25 + 0), false, false, ""},
+ geTestCase{int(kTwoTo25 + 1), true, false, ""},
+ geTestCase{int(kTwoTo25 + 2), true, false, ""},
+
+ geTestCase{int8(-1), false, false, ""},
+ geTestCase{int8(127), false, false, ""},
+
+ geTestCase{int16(-1), false, false, ""},
+ geTestCase{int16(0), false, false, ""},
+ geTestCase{int16(32767), false, false, ""},
+
+ geTestCase{int32(-1), false, false, ""},
+ geTestCase{int32(kTwoTo25 + 0), false, false, ""},
+ geTestCase{int32(kTwoTo25 + 1), true, false, ""},
+ geTestCase{int32(kTwoTo25 + 2), true, false, ""},
+
+ geTestCase{int64(-1), false, false, ""},
+ geTestCase{int64(kTwoTo25 + 0), false, false, ""},
+ geTestCase{int64(kTwoTo25 + 1), true, false, ""},
+ geTestCase{int64(kTwoTo25 + 2), true, false, ""},
+
+ // Unsigned integers.
+ geTestCase{uint(0), false, false, ""},
+ geTestCase{uint(kTwoTo25 + 0), false, false, ""},
+ geTestCase{uint(kTwoTo25 + 1), true, false, ""},
+ geTestCase{uint(kTwoTo25 + 2), true, false, ""},
+
+ geTestCase{uint8(0), false, false, ""},
+ geTestCase{uint8(255), false, false, ""},
+
+ geTestCase{uint16(0), false, false, ""},
+ geTestCase{uint16(65535), false, false, ""},
+
+ geTestCase{uint32(0), false, false, ""},
+ geTestCase{uint32(kTwoTo25 + 0), false, false, ""},
+ geTestCase{uint32(kTwoTo25 + 1), true, false, ""},
+ geTestCase{uint32(kTwoTo25 + 2), true, false, ""},
+
+ geTestCase{uint64(0), false, false, ""},
+ geTestCase{uint64(kTwoTo25 + 0), false, false, ""},
+ geTestCase{uint64(kTwoTo25 + 1), true, false, ""},
+ geTestCase{uint64(kTwoTo25 + 2), true, false, ""},
+
+ // Floating point.
+ geTestCase{float32(-1), false, false, ""},
+ geTestCase{float32(kTwoTo25 - 2), false, false, ""},
+ geTestCase{float32(kTwoTo25 - 1), true, false, ""},
+ geTestCase{float32(kTwoTo25 + 0), true, false, ""},
+ geTestCase{float32(kTwoTo25 + 1), true, false, ""},
+ geTestCase{float32(kTwoTo25 + 2), true, false, ""},
+ geTestCase{float32(kTwoTo25 + 3), true, false, ""},
+
+ geTestCase{float64(-1), false, false, ""},
+ geTestCase{float64(kTwoTo25 - 2), false, false, ""},
+ geTestCase{float64(kTwoTo25 - 1), false, false, ""},
+ geTestCase{float64(kTwoTo25 + 0), false, false, ""},
+ geTestCase{float64(kTwoTo25 + 1), true, false, ""},
+ geTestCase{float64(kTwoTo25 + 2), true, false, ""},
+ geTestCase{float64(kTwoTo25 + 3), true, false, ""},
+ }
+
+ t.checkTestCases(matcher, cases)
+}
+
+func (t *GreaterOrEqualTest) Int64NotExactlyRepresentableByDoublePrecision() {
+ // Double-precision floats don't have enough bits to represent the integers
+ // near this one distinctly, so [2^54-1, 2^54+2] all receive the same value
+ // and should be treated as equivalent when floats are in the mix.
+ const kTwoTo54 = 1 << 54
+ matcher := GreaterOrEqual(int64(kTwoTo54 + 1))
+
+ desc := matcher.Description()
+ expectedDesc := "greater than or equal to 18014398509481985"
+
+ ExpectThat(desc, Equals(expectedDesc))
+
+ cases := []geTestCase{
+ // Signed integers.
+ geTestCase{-1, false, false, ""},
+ geTestCase{1 << 30, false, false, ""},
+
+ geTestCase{int(-1), false, false, ""},
+ geTestCase{int(math.MaxInt32), false, false, ""},
+
+ geTestCase{int8(-1), false, false, ""},
+ geTestCase{int8(127), false, false, ""},
+
+ geTestCase{int16(-1), false, false, ""},
+ geTestCase{int16(0), false, false, ""},
+ geTestCase{int16(32767), false, false, ""},
+
+ geTestCase{int32(-1), false, false, ""},
+ geTestCase{int32(math.MaxInt32), false, false, ""},
+
+ geTestCase{int64(-1), false, false, ""},
+ geTestCase{int64(kTwoTo54 - 1), false, false, ""},
+ geTestCase{int64(kTwoTo54 + 0), false, false, ""},
+ geTestCase{int64(kTwoTo54 + 1), true, false, ""},
+ geTestCase{int64(kTwoTo54 + 2), true, false, ""},
+
+ // Unsigned integers.
+ geTestCase{uint(0), false, false, ""},
+ geTestCase{uint(math.MaxUint32), false, false, ""},
+
+ geTestCase{uint8(0), false, false, ""},
+ geTestCase{uint8(255), false, false, ""},
+
+ geTestCase{uint16(0), false, false, ""},
+ geTestCase{uint16(65535), false, false, ""},
+
+ geTestCase{uint32(0), false, false, ""},
+ geTestCase{uint32(math.MaxUint32), false, false, ""},
+
+ geTestCase{uint64(0), false, false, ""},
+ geTestCase{uint64(kTwoTo54 - 1), false, false, ""},
+ geTestCase{uint64(kTwoTo54 + 0), false, false, ""},
+ geTestCase{uint64(kTwoTo54 + 1), true, false, ""},
+ geTestCase{uint64(kTwoTo54 + 2), true, false, ""},
+
+ // Floating point.
+ geTestCase{float64(-1), false, false, ""},
+ geTestCase{float64(kTwoTo54 - 2), false, false, ""},
+ geTestCase{float64(kTwoTo54 - 1), true, false, ""},
+ geTestCase{float64(kTwoTo54 + 0), true, false, ""},
+ geTestCase{float64(kTwoTo54 + 1), true, false, ""},
+ geTestCase{float64(kTwoTo54 + 2), true, false, ""},
+ geTestCase{float64(kTwoTo54 + 3), true, false, ""},
+ }
+
+ t.checkTestCases(matcher, cases)
+}
+
+func (t *GreaterOrEqualTest) Uint64NotExactlyRepresentableBySinglePrecision() {
+ // Single-precision floats don't have enough bits to represent the integers
+ // near this one distinctly, so [2^25-1, 2^25+2] all receive the same value
+ // and should be treated as equivalent when floats are in the mix.
+ const kTwoTo25 = 1 << 25
+ matcher := GreaterOrEqual(uint64(kTwoTo25 + 1))
+
+ desc := matcher.Description()
+ expectedDesc := "greater than or equal to 33554433"
+
+ ExpectThat(desc, Equals(expectedDesc))
+
+ cases := []geTestCase{
+ // Signed integers.
+ geTestCase{-1, false, false, ""},
+ geTestCase{kTwoTo25 + 0, false, false, ""},
+ geTestCase{kTwoTo25 + 1, true, false, ""},
+ geTestCase{kTwoTo25 + 2, true, false, ""},
+
+ geTestCase{int(-1), false, false, ""},
+ geTestCase{int(kTwoTo25 + 0), false, false, ""},
+ geTestCase{int(kTwoTo25 + 1), true, false, ""},
+ geTestCase{int(kTwoTo25 + 2), true, false, ""},
+
+ geTestCase{int8(-1), false, false, ""},
+ geTestCase{int8(127), false, false, ""},
+
+ geTestCase{int16(-1), false, false, ""},
+ geTestCase{int16(0), false, false, ""},
+ geTestCase{int16(32767), false, false, ""},
+
+ geTestCase{int32(-1), false, false, ""},
+ geTestCase{int32(kTwoTo25 + 0), false, false, ""},
+ geTestCase{int32(kTwoTo25 + 1), true, false, ""},
+ geTestCase{int32(kTwoTo25 + 2), true, false, ""},
+
+ geTestCase{int64(-1), false, false, ""},
+ geTestCase{int64(kTwoTo25 + 0), false, false, ""},
+ geTestCase{int64(kTwoTo25 + 1), true, false, ""},
+ geTestCase{int64(kTwoTo25 + 2), true, false, ""},
+
+ // Unsigned integers.
+ geTestCase{uint(0), false, false, ""},
+ geTestCase{uint(kTwoTo25 + 0), false, false, ""},
+ geTestCase{uint(kTwoTo25 + 1), true, false, ""},
+ geTestCase{uint(kTwoTo25 + 2), true, false, ""},
+
+ geTestCase{uint8(0), false, false, ""},
+ geTestCase{uint8(255), false, false, ""},
+
+ geTestCase{uint16(0), false, false, ""},
+ geTestCase{uint16(65535), false, false, ""},
+
+ geTestCase{uint32(0), false, false, ""},
+ geTestCase{uint32(kTwoTo25 + 0), false, false, ""},
+ geTestCase{uint32(kTwoTo25 + 1), true, false, ""},
+ geTestCase{uint32(kTwoTo25 + 2), true, false, ""},
+
+ geTestCase{uint64(0), false, false, ""},
+ geTestCase{uint64(kTwoTo25 + 0), false, false, ""},
+ geTestCase{uint64(kTwoTo25 + 1), true, false, ""},
+ geTestCase{uint64(kTwoTo25 + 2), true, false, ""},
+
+ // Floating point.
+ geTestCase{float32(-1), false, false, ""},
+ geTestCase{float32(kTwoTo25 - 2), false, false, ""},
+ geTestCase{float32(kTwoTo25 - 1), true, false, ""},
+ geTestCase{float32(kTwoTo25 + 0), true, false, ""},
+ geTestCase{float32(kTwoTo25 + 1), true, false, ""},
+ geTestCase{float32(kTwoTo25 + 2), true, false, ""},
+ geTestCase{float32(kTwoTo25 + 3), true, false, ""},
+
+ geTestCase{float64(-1), false, false, ""},
+ geTestCase{float64(kTwoTo25 - 2), false, false, ""},
+ geTestCase{float64(kTwoTo25 - 1), false, false, ""},
+ geTestCase{float64(kTwoTo25 + 0), false, false, ""},
+ geTestCase{float64(kTwoTo25 + 1), true, false, ""},
+ geTestCase{float64(kTwoTo25 + 2), true, false, ""},
+ geTestCase{float64(kTwoTo25 + 3), true, false, ""},
+ }
+
+ t.checkTestCases(matcher, cases)
+}
+
+func (t *GreaterOrEqualTest) Uint64NotExactlyRepresentableByDoublePrecision() {
+ // Double-precision floats don't have enough bits to represent the integers
+ // near this one distinctly, so [2^54-1, 2^54+2] all receive the same value
+ // and should be treated as equivalent when floats are in the mix.
+ const kTwoTo54 = 1 << 54
+ matcher := GreaterOrEqual(uint64(kTwoTo54 + 1))
+
+ desc := matcher.Description()
+ expectedDesc := "greater than or equal to 18014398509481985"
+
+ ExpectThat(desc, Equals(expectedDesc))
+
+ cases := []geTestCase{
+ // Signed integers.
+ geTestCase{-1, false, false, ""},
+ geTestCase{1 << 30, false, false, ""},
+
+ geTestCase{int(-1), false, false, ""},
+ geTestCase{int(math.MaxInt32), false, false, ""},
+
+ geTestCase{int8(-1), false, false, ""},
+ geTestCase{int8(127), false, false, ""},
+
+ geTestCase{int16(-1), false, false, ""},
+ geTestCase{int16(0), false, false, ""},
+ geTestCase{int16(32767), false, false, ""},
+
+ geTestCase{int32(-1), false, false, ""},
+ geTestCase{int32(math.MaxInt32), false, false, ""},
+
+ geTestCase{int64(-1), false, false, ""},
+ geTestCase{int64(kTwoTo54 - 1), false, false, ""},
+ geTestCase{int64(kTwoTo54 + 0), false, false, ""},
+ geTestCase{int64(kTwoTo54 + 1), true, false, ""},
+ geTestCase{int64(kTwoTo54 + 2), true, false, ""},
+
+ // Unsigned integers.
+ geTestCase{uint(0), false, false, ""},
+ geTestCase{uint(math.MaxUint32), false, false, ""},
+
+ geTestCase{uint8(0), false, false, ""},
+ geTestCase{uint8(255), false, false, ""},
+
+ geTestCase{uint16(0), false, false, ""},
+ geTestCase{uint16(65535), false, false, ""},
+
+ geTestCase{uint32(0), false, false, ""},
+ geTestCase{uint32(math.MaxUint32), false, false, ""},
+
+ geTestCase{uint64(0), false, false, ""},
+ geTestCase{uint64(kTwoTo54 - 1), false, false, ""},
+ geTestCase{uint64(kTwoTo54 + 0), false, false, ""},
+ geTestCase{uint64(kTwoTo54 + 1), true, false, ""},
+ geTestCase{uint64(kTwoTo54 + 2), true, false, ""},
+
+ // Floating point.
+ geTestCase{float64(-1), false, false, ""},
+ geTestCase{float64(kTwoTo54 - 2), false, false, ""},
+ geTestCase{float64(kTwoTo54 - 1), true, false, ""},
+ geTestCase{float64(kTwoTo54 + 0), true, false, ""},
+ geTestCase{float64(kTwoTo54 + 1), true, false, ""},
+ geTestCase{float64(kTwoTo54 + 2), true, false, ""},
+ geTestCase{float64(kTwoTo54 + 3), true, false, ""},
+ }
+
+ t.checkTestCases(matcher, cases)
+}
+
+func (t *GreaterOrEqualTest) Float32AboveExactIntegerRange() {
+ // Single-precision floats don't have enough bits to represent the integers
+ // near this one distinctly, so [2^25-1, 2^25+2] all receive the same value
+ // and should be treated as equivalent when floats are in the mix.
+ const kTwoTo25 = 1 << 25
+ matcher := GreaterOrEqual(float32(kTwoTo25 + 1))
+
+ desc := matcher.Description()
+ expectedDesc := "greater than or equal to 3.3554432e+07"
+
+ ExpectThat(desc, Equals(expectedDesc))
+
+ cases := []geTestCase{
+ // Signed integers.
+ geTestCase{int64(-1), false, false, ""},
+ geTestCase{int64(kTwoTo25 - 2), false, false, ""},
+ geTestCase{int64(kTwoTo25 - 1), true, false, ""},
+ geTestCase{int64(kTwoTo25 + 0), true, false, ""},
+ geTestCase{int64(kTwoTo25 + 1), true, false, ""},
+ geTestCase{int64(kTwoTo25 + 2), true, false, ""},
+ geTestCase{int64(kTwoTo25 + 3), true, false, ""},
+
+ // Unsigned integers.
+ geTestCase{uint64(0), false, false, ""},
+ geTestCase{uint64(kTwoTo25 - 2), false, false, ""},
+ geTestCase{uint64(kTwoTo25 - 1), true, false, ""},
+ geTestCase{uint64(kTwoTo25 + 0), true, false, ""},
+ geTestCase{uint64(kTwoTo25 + 1), true, false, ""},
+ geTestCase{uint64(kTwoTo25 + 2), true, false, ""},
+ geTestCase{uint64(kTwoTo25 + 3), true, false, ""},
+
+ // Floating point.
+ geTestCase{float32(-1), false, false, ""},
+ geTestCase{float32(kTwoTo25 - 2), false, false, ""},
+ geTestCase{float32(kTwoTo25 - 1), true, false, ""},
+ geTestCase{float32(kTwoTo25 + 0), true, false, ""},
+ geTestCase{float32(kTwoTo25 + 1), true, false, ""},
+ geTestCase{float32(kTwoTo25 + 2), true, false, ""},
+ geTestCase{float32(kTwoTo25 + 3), true, false, ""},
+
+ geTestCase{float64(-1), false, false, ""},
+ geTestCase{float64(kTwoTo25 - 2), false, false, ""},
+ geTestCase{float64(kTwoTo25 - 1), true, false, ""},
+ geTestCase{float64(kTwoTo25 + 0), true, false, ""},
+ geTestCase{float64(kTwoTo25 + 1), true, false, ""},
+ geTestCase{float64(kTwoTo25 + 2), true, false, ""},
+ geTestCase{float64(kTwoTo25 + 3), true, false, ""},
+ }
+
+ t.checkTestCases(matcher, cases)
+}
+
+func (t *GreaterOrEqualTest) Float64AboveExactIntegerRange() {
+ // Double-precision floats don't have enough bits to represent the integers
+ // near this one distinctly, so [2^54-1, 2^54+2] all receive the same value
+ // and should be treated as equivalent when floats are in the mix.
+ const kTwoTo54 = 1 << 54
+ matcher := GreaterOrEqual(float64(kTwoTo54 + 1))
+
+ desc := matcher.Description()
+ expectedDesc := "greater than or equal to 1.8014398509481984e+16"
+
+ ExpectThat(desc, Equals(expectedDesc))
+
+ cases := []geTestCase{
+ // Signed integers.
+ geTestCase{int64(-1), false, false, ""},
+ geTestCase{int64(kTwoTo54 - 2), false, false, ""},
+ geTestCase{int64(kTwoTo54 - 1), true, false, ""},
+ geTestCase{int64(kTwoTo54 + 0), true, false, ""},
+ geTestCase{int64(kTwoTo54 + 1), true, false, ""},
+ geTestCase{int64(kTwoTo54 + 2), true, false, ""},
+ geTestCase{int64(kTwoTo54 + 3), true, false, ""},
+
+ // Unsigned integers.
+ geTestCase{uint64(0), false, false, ""},
+ geTestCase{uint64(kTwoTo54 - 2), false, false, ""},
+ geTestCase{uint64(kTwoTo54 - 1), true, false, ""},
+ geTestCase{uint64(kTwoTo54 + 0), true, false, ""},
+ geTestCase{uint64(kTwoTo54 + 1), true, false, ""},
+ geTestCase{uint64(kTwoTo54 + 2), true, false, ""},
+ geTestCase{uint64(kTwoTo54 + 3), true, false, ""},
+
+ // Floating point.
+ geTestCase{float64(-1), false, false, ""},
+ geTestCase{float64(kTwoTo54 - 2), false, false, ""},
+ geTestCase{float64(kTwoTo54 - 1), true, false, ""},
+ geTestCase{float64(kTwoTo54 + 0), true, false, ""},
+ geTestCase{float64(kTwoTo54 + 1), true, false, ""},
+ geTestCase{float64(kTwoTo54 + 2), true, false, ""},
+ geTestCase{float64(kTwoTo54 + 3), true, false, ""},
+ }
+
+ t.checkTestCases(matcher, cases)
+}
+
+////////////////////////////////////////////////////////////////////////
+// String literals
+////////////////////////////////////////////////////////////////////////
+
+func (t *GreaterOrEqualTest) EmptyString() {
+ matcher := GreaterOrEqual("")
+ desc := matcher.Description()
+ expectedDesc := "greater than or equal to \"\""
+
+ ExpectThat(desc, Equals(expectedDesc))
+
+ cases := []geTestCase{
+ geTestCase{"", true, false, ""},
+ geTestCase{"\x00", true, false, ""},
+ geTestCase{"a", true, false, ""},
+ geTestCase{"foo", true, false, ""},
+ }
+
+ t.checkTestCases(matcher, cases)
+}
+
+func (t *GreaterOrEqualTest) SingleNullByte() {
+ matcher := GreaterOrEqual("\x00")
+ desc := matcher.Description()
+ expectedDesc := "greater than or equal to \"\x00\""
+
+ ExpectThat(desc, Equals(expectedDesc))
+
+ cases := []geTestCase{
+ geTestCase{"", false, false, ""},
+ geTestCase{"\x00", true, false, ""},
+ geTestCase{"a", true, false, ""},
+ geTestCase{"foo", true, false, ""},
+ }
+
+ t.checkTestCases(matcher, cases)
+}
+
+func (t *GreaterOrEqualTest) LongerString() {
+ matcher := GreaterOrEqual("foo\x00")
+ desc := matcher.Description()
+ expectedDesc := "greater than or equal to \"foo\x00\""
+
+ ExpectThat(desc, Equals(expectedDesc))
+
+ cases := []geTestCase{
+ geTestCase{"", false, false, ""},
+ geTestCase{"\x00", false, false, ""},
+ geTestCase{"bar", false, false, ""},
+ geTestCase{"foo", false, false, ""},
+ geTestCase{"foo\x00", true, false, ""},
+ geTestCase{"fooa", true, false, ""},
+ geTestCase{"qux", true, false, ""},
+ }
+
+ t.checkTestCases(matcher, cases)
+}
diff --git a/src/mongo/gotools/vendor/src/github.com/jacobsa/oglematchers/greater_than.go b/src/mongo/gotools/vendor/src/github.com/jacobsa/oglematchers/greater_than.go
new file mode 100644
index 00000000000..3eef32178f8
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/jacobsa/oglematchers/greater_than.go
@@ -0,0 +1,39 @@
+// Copyright 2011 Aaron Jacobs. All Rights Reserved.
+// Author: aaronjjacobs@gmail.com (Aaron Jacobs)
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package oglematchers
+
+import (
+ "fmt"
+ "reflect"
+)
+
+// GreaterThan returns a matcher that matches integer, floating point, or
+// strings values v such that v > x. Comparison is not defined between numeric
+// and string types, but is defined between all integer and floating point
+// types.
+//
+// x must itself be an integer, floating point, or string type; otherwise,
+// GreaterThan will panic.
+func GreaterThan(x interface{}) Matcher {
+ desc := fmt.Sprintf("greater than %v", x)
+
+ // Special case: make it clear that strings are strings.
+ if reflect.TypeOf(x).Kind() == reflect.String {
+ desc = fmt.Sprintf("greater than \"%s\"", x)
+ }
+
+ return transformDescription(Not(LessOrEqual(x)), desc)
+}
diff --git a/src/mongo/gotools/vendor/src/github.com/jacobsa/oglematchers/greater_than_test.go b/src/mongo/gotools/vendor/src/github.com/jacobsa/oglematchers/greater_than_test.go
new file mode 100644
index 00000000000..784692525cb
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/jacobsa/oglematchers/greater_than_test.go
@@ -0,0 +1,1079 @@
+// Copyright 2011 Aaron Jacobs. All Rights Reserved.
+// Author: aaronjjacobs@gmail.com (Aaron Jacobs)
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package oglematchers_test
+
+import (
+ . "github.com/jacobsa/oglematchers"
+ . "github.com/jacobsa/ogletest"
+ "math"
+)
+
+////////////////////////////////////////////////////////////////////////
+// Helpers
+////////////////////////////////////////////////////////////////////////
+
+type GreaterThanTest struct {
+}
+
+func init() { RegisterTestSuite(&GreaterThanTest{}) }
+
+type gtTestCase struct {
+ candidate interface{}
+ expectedResult bool
+ shouldBeFatal bool
+ expectedError string
+}
+
+func (t *GreaterThanTest) checkTestCases(matcher Matcher, cases []gtTestCase) {
+ for i, c := range cases {
+ err := matcher.Matches(c.candidate)
+
+ ExpectThat(
+ (err == nil),
+ Equals(c.expectedResult),
+ "Case %d (candidate %v)",
+ i,
+ c.candidate)
+
+ if err == nil {
+ continue
+ }
+
+ _, isFatal := err.(*FatalError)
+ ExpectEq(
+ c.shouldBeFatal,
+ isFatal,
+ "Case %d (candidate %v)",
+ i,
+ c.candidate)
+
+ ExpectThat(
+ err,
+ Error(Equals(c.expectedError)),
+ "Case %d (candidate %v)",
+ i,
+ c.candidate)
+ }
+}
+
+////////////////////////////////////////////////////////////////////////
+// Integer literals
+////////////////////////////////////////////////////////////////////////
+
+func (t *GreaterThanTest) IntegerCandidateBadTypes() {
+ matcher := GreaterThan(int(-150))
+
+ cases := []gtTestCase{
+ gtTestCase{true, false, true, "which is not comparable"},
+ gtTestCase{uintptr(17), false, true, "which is not comparable"},
+ gtTestCase{complex64(-151), false, true, "which is not comparable"},
+ gtTestCase{complex128(-151), false, true, "which is not comparable"},
+ gtTestCase{[...]int{-151}, false, true, "which is not comparable"},
+ gtTestCase{make(chan int), false, true, "which is not comparable"},
+ gtTestCase{func() {}, false, true, "which is not comparable"},
+ gtTestCase{map[int]int{}, false, true, "which is not comparable"},
+ gtTestCase{&gtTestCase{}, false, true, "which is not comparable"},
+ gtTestCase{make([]int, 0), false, true, "which is not comparable"},
+ gtTestCase{"-151", false, true, "which is not comparable"},
+ gtTestCase{gtTestCase{}, false, true, "which is not comparable"},
+ }
+
+ t.checkTestCases(matcher, cases)
+}
+
+func (t *GreaterThanTest) FloatCandidateBadTypes() {
+ matcher := GreaterThan(float32(-150))
+
+ cases := []gtTestCase{
+ gtTestCase{true, false, true, "which is not comparable"},
+ gtTestCase{uintptr(17), false, true, "which is not comparable"},
+ gtTestCase{complex64(-151), false, true, "which is not comparable"},
+ gtTestCase{complex128(-151), false, true, "which is not comparable"},
+ gtTestCase{[...]int{-151}, false, true, "which is not comparable"},
+ gtTestCase{make(chan int), false, true, "which is not comparable"},
+ gtTestCase{func() {}, false, true, "which is not comparable"},
+ gtTestCase{map[int]int{}, false, true, "which is not comparable"},
+ gtTestCase{&gtTestCase{}, false, true, "which is not comparable"},
+ gtTestCase{make([]int, 0), false, true, "which is not comparable"},
+ gtTestCase{"-151", false, true, "which is not comparable"},
+ gtTestCase{gtTestCase{}, false, true, "which is not comparable"},
+ }
+
+ t.checkTestCases(matcher, cases)
+}
+
+func (t *GreaterThanTest) StringCandidateBadTypes() {
+ matcher := GreaterThan("17")
+
+ cases := []gtTestCase{
+ gtTestCase{true, false, true, "which is not comparable"},
+ gtTestCase{int(0), false, true, "which is not comparable"},
+ gtTestCase{int8(0), false, true, "which is not comparable"},
+ gtTestCase{int16(0), false, true, "which is not comparable"},
+ gtTestCase{int32(0), false, true, "which is not comparable"},
+ gtTestCase{int64(0), false, true, "which is not comparable"},
+ gtTestCase{uint(0), false, true, "which is not comparable"},
+ gtTestCase{uint8(0), false, true, "which is not comparable"},
+ gtTestCase{uint16(0), false, true, "which is not comparable"},
+ gtTestCase{uint32(0), false, true, "which is not comparable"},
+ gtTestCase{uint64(0), false, true, "which is not comparable"},
+ gtTestCase{uintptr(17), false, true, "which is not comparable"},
+ gtTestCase{float32(0), false, true, "which is not comparable"},
+ gtTestCase{float64(0), false, true, "which is not comparable"},
+ gtTestCase{complex64(-151), false, true, "which is not comparable"},
+ gtTestCase{complex128(-151), false, true, "which is not comparable"},
+ gtTestCase{[...]int{-151}, false, true, "which is not comparable"},
+ gtTestCase{make(chan int), false, true, "which is not comparable"},
+ gtTestCase{func() {}, false, true, "which is not comparable"},
+ gtTestCase{map[int]int{}, false, true, "which is not comparable"},
+ gtTestCase{&gtTestCase{}, false, true, "which is not comparable"},
+ gtTestCase{make([]int, 0), false, true, "which is not comparable"},
+ gtTestCase{gtTestCase{}, false, true, "which is not comparable"},
+ }
+
+ t.checkTestCases(matcher, cases)
+}
+
+func (t *GreaterThanTest) BadArgument() {
+ panicked := false
+
+ defer func() {
+ ExpectThat(panicked, Equals(true))
+ }()
+
+ defer func() {
+ if r := recover(); r != nil {
+ panicked = true
+ }
+ }()
+
+ GreaterThan(complex128(0))
+}
+
+////////////////////////////////////////////////////////////////////////
+// Integer literals
+////////////////////////////////////////////////////////////////////////
+
+func (t *GreaterThanTest) NegativeIntegerLiteral() {
+ matcher := GreaterThan(-150)
+ desc := matcher.Description()
+ expectedDesc := "greater than -150"
+
+ ExpectThat(desc, Equals(expectedDesc))
+
+ cases := []gtTestCase{
+ // Signed integers.
+ gtTestCase{-(1 << 30), false, false, ""},
+ gtTestCase{-151, false, false, ""},
+ gtTestCase{-150, false, false, ""},
+ gtTestCase{-149, true, false, ""},
+ gtTestCase{0, true, false, ""},
+ gtTestCase{17, true, false, ""},
+
+ gtTestCase{int(-(1 << 30)), false, false, ""},
+ gtTestCase{int(-151), false, false, ""},
+ gtTestCase{int(-150), false, false, ""},
+ gtTestCase{int(-149), true, false, ""},
+ gtTestCase{int(0), true, false, ""},
+ gtTestCase{int(17), true, false, ""},
+
+ gtTestCase{int8(-127), true, false, ""},
+ gtTestCase{int8(0), true, false, ""},
+ gtTestCase{int8(17), true, false, ""},
+
+ gtTestCase{int16(-(1 << 14)), false, false, ""},
+ gtTestCase{int16(-151), false, false, ""},
+ gtTestCase{int16(-150), false, false, ""},
+ gtTestCase{int16(-149), true, false, ""},
+ gtTestCase{int16(0), true, false, ""},
+ gtTestCase{int16(17), true, false, ""},
+
+ gtTestCase{int32(-(1 << 30)), false, false, ""},
+ gtTestCase{int32(-151), false, false, ""},
+ gtTestCase{int32(-150), false, false, ""},
+ gtTestCase{int32(-149), true, false, ""},
+ gtTestCase{int32(0), true, false, ""},
+ gtTestCase{int32(17), true, false, ""},
+
+ gtTestCase{int64(-(1 << 30)), false, false, ""},
+ gtTestCase{int64(-151), false, false, ""},
+ gtTestCase{int64(-150), false, false, ""},
+ gtTestCase{int64(-149), true, false, ""},
+ gtTestCase{int64(0), true, false, ""},
+ gtTestCase{int64(17), true, false, ""},
+
+ // Unsigned integers.
+ gtTestCase{uint((1 << 32) - 151), true, false, ""},
+ gtTestCase{uint(0), true, false, ""},
+ gtTestCase{uint(17), true, false, ""},
+
+ gtTestCase{uint8(0), true, false, ""},
+ gtTestCase{uint8(17), true, false, ""},
+ gtTestCase{uint8(253), true, false, ""},
+
+ gtTestCase{uint16((1 << 16) - 151), true, false, ""},
+ gtTestCase{uint16(0), true, false, ""},
+ gtTestCase{uint16(17), true, false, ""},
+
+ gtTestCase{uint32((1 << 32) - 151), true, false, ""},
+ gtTestCase{uint32(0), true, false, ""},
+ gtTestCase{uint32(17), true, false, ""},
+
+ gtTestCase{uint64((1 << 64) - 151), true, false, ""},
+ gtTestCase{uint64(0), true, false, ""},
+ gtTestCase{uint64(17), true, false, ""},
+
+ // Floating point.
+ gtTestCase{float32(-(1 << 30)), false, false, ""},
+ gtTestCase{float32(-151), false, false, ""},
+ gtTestCase{float32(-150.1), false, false, ""},
+ gtTestCase{float32(-150), false, false, ""},
+ gtTestCase{float32(-149.9), true, false, ""},
+ gtTestCase{float32(0), true, false, ""},
+ gtTestCase{float32(17), true, false, ""},
+ gtTestCase{float32(160), true, false, ""},
+
+ gtTestCase{float64(-(1 << 30)), false, false, ""},
+ gtTestCase{float64(-151), false, false, ""},
+ gtTestCase{float64(-150.1), false, false, ""},
+ gtTestCase{float64(-150), false, false, ""},
+ gtTestCase{float64(-149.9), true, false, ""},
+ gtTestCase{float64(0), true, false, ""},
+ gtTestCase{float64(17), true, false, ""},
+ gtTestCase{float64(160), true, false, ""},
+ }
+
+ t.checkTestCases(matcher, cases)
+}
+
+func (t *GreaterThanTest) ZeroIntegerLiteral() {
+ matcher := GreaterThan(0)
+ desc := matcher.Description()
+ expectedDesc := "greater than 0"
+
+ ExpectThat(desc, Equals(expectedDesc))
+
+ cases := []gtTestCase{
+ // Signed integers.
+ gtTestCase{-(1 << 30), false, false, ""},
+ gtTestCase{-1, false, false, ""},
+ gtTestCase{0, false, false, ""},
+ gtTestCase{1, true, false, ""},
+ gtTestCase{17, true, false, ""},
+ gtTestCase{(1 << 30), true, false, ""},
+
+ gtTestCase{int(-(1 << 30)), false, false, ""},
+ gtTestCase{int(-1), false, false, ""},
+ gtTestCase{int(0), false, false, ""},
+ gtTestCase{int(1), true, false, ""},
+ gtTestCase{int(17), true, false, ""},
+
+ gtTestCase{int8(-1), false, false, ""},
+ gtTestCase{int8(0), false, false, ""},
+ gtTestCase{int8(1), true, false, ""},
+
+ gtTestCase{int16(-(1 << 14)), false, false, ""},
+ gtTestCase{int16(-1), false, false, ""},
+ gtTestCase{int16(0), false, false, ""},
+ gtTestCase{int16(1), true, false, ""},
+ gtTestCase{int16(17), true, false, ""},
+
+ gtTestCase{int32(-(1 << 30)), false, false, ""},
+ gtTestCase{int32(-1), false, false, ""},
+ gtTestCase{int32(0), false, false, ""},
+ gtTestCase{int32(1), true, false, ""},
+ gtTestCase{int32(17), true, false, ""},
+
+ gtTestCase{int64(-(1 << 30)), false, false, ""},
+ gtTestCase{int64(-1), false, false, ""},
+ gtTestCase{int64(0), false, false, ""},
+ gtTestCase{int64(1), true, false, ""},
+ gtTestCase{int64(17), true, false, ""},
+
+ // Unsigned integers.
+ gtTestCase{uint((1 << 32) - 1), true, false, ""},
+ gtTestCase{uint(0), false, false, ""},
+ gtTestCase{uint(1), true, false, ""},
+ gtTestCase{uint(17), true, false, ""},
+
+ gtTestCase{uint8(0), false, false, ""},
+ gtTestCase{uint8(1), true, false, ""},
+ gtTestCase{uint8(17), true, false, ""},
+ gtTestCase{uint8(253), true, false, ""},
+
+ gtTestCase{uint16((1 << 16) - 1), true, false, ""},
+ gtTestCase{uint16(0), false, false, ""},
+ gtTestCase{uint16(1), true, false, ""},
+ gtTestCase{uint16(17), true, false, ""},
+
+ gtTestCase{uint32((1 << 32) - 1), true, false, ""},
+ gtTestCase{uint32(0), false, false, ""},
+ gtTestCase{uint32(1), true, false, ""},
+ gtTestCase{uint32(17), true, false, ""},
+
+ gtTestCase{uint64((1 << 64) - 1), true, false, ""},
+ gtTestCase{uint64(0), false, false, ""},
+ gtTestCase{uint64(1), true, false, ""},
+ gtTestCase{uint64(17), true, false, ""},
+
+ // Floating point.
+ gtTestCase{float32(-(1 << 30)), false, false, ""},
+ gtTestCase{float32(-1), false, false, ""},
+ gtTestCase{float32(-0.1), false, false, ""},
+ gtTestCase{float32(-0.0), false, false, ""},
+ gtTestCase{float32(0), false, false, ""},
+ gtTestCase{float32(0.1), true, false, ""},
+ gtTestCase{float32(17), true, false, ""},
+ gtTestCase{float32(160), true, false, ""},
+
+ gtTestCase{float64(-(1 << 30)), false, false, ""},
+ gtTestCase{float64(-1), false, false, ""},
+ gtTestCase{float64(-0.1), false, false, ""},
+ gtTestCase{float64(-0), false, false, ""},
+ gtTestCase{float64(0), false, false, ""},
+ gtTestCase{float64(0.1), true, false, ""},
+ gtTestCase{float64(17), true, false, ""},
+ gtTestCase{float64(160), true, false, ""},
+ }
+
+ t.checkTestCases(matcher, cases)
+}
+
+func (t *GreaterThanTest) PositiveIntegerLiteral() {
+ matcher := GreaterThan(150)
+ desc := matcher.Description()
+ expectedDesc := "greater than 150"
+
+ ExpectThat(desc, Equals(expectedDesc))
+
+ cases := []gtTestCase{
+ // Signed integers.
+ gtTestCase{-1, false, false, ""},
+ gtTestCase{149, false, false, ""},
+ gtTestCase{150, false, false, ""},
+ gtTestCase{151, true, false, ""},
+
+ gtTestCase{int(-1), false, false, ""},
+ gtTestCase{int(149), false, false, ""},
+ gtTestCase{int(150), false, false, ""},
+ gtTestCase{int(151), true, false, ""},
+
+ gtTestCase{int8(-1), false, false, ""},
+ gtTestCase{int8(0), false, false, ""},
+ gtTestCase{int8(17), false, false, ""},
+ gtTestCase{int8(127), false, false, ""},
+
+ gtTestCase{int16(-1), false, false, ""},
+ gtTestCase{int16(149), false, false, ""},
+ gtTestCase{int16(150), false, false, ""},
+ gtTestCase{int16(151), true, false, ""},
+
+ gtTestCase{int32(-1), false, false, ""},
+ gtTestCase{int32(149), false, false, ""},
+ gtTestCase{int32(150), false, false, ""},
+ gtTestCase{int32(151), true, false, ""},
+
+ gtTestCase{int64(-1), false, false, ""},
+ gtTestCase{int64(149), false, false, ""},
+ gtTestCase{int64(150), false, false, ""},
+ gtTestCase{int64(151), true, false, ""},
+
+ // Unsigned integers.
+ gtTestCase{uint(0), false, false, ""},
+ gtTestCase{uint(149), false, false, ""},
+ gtTestCase{uint(150), false, false, ""},
+ gtTestCase{uint(151), true, false, ""},
+
+ gtTestCase{uint8(0), false, false, ""},
+ gtTestCase{uint8(127), false, false, ""},
+
+ gtTestCase{uint16(0), false, false, ""},
+ gtTestCase{uint16(149), false, false, ""},
+ gtTestCase{uint16(150), false, false, ""},
+ gtTestCase{uint16(151), true, false, ""},
+
+ gtTestCase{uint32(0), false, false, ""},
+ gtTestCase{uint32(149), false, false, ""},
+ gtTestCase{uint32(150), false, false, ""},
+ gtTestCase{uint32(151), true, false, ""},
+
+ gtTestCase{uint64(0), false, false, ""},
+ gtTestCase{uint64(149), false, false, ""},
+ gtTestCase{uint64(150), false, false, ""},
+ gtTestCase{uint64(151), true, false, ""},
+
+ // Floating point.
+ gtTestCase{float32(-1), false, false, ""},
+ gtTestCase{float32(149), false, false, ""},
+ gtTestCase{float32(149.9), false, false, ""},
+ gtTestCase{float32(150), false, false, ""},
+ gtTestCase{float32(150.1), true, false, ""},
+ gtTestCase{float32(151), true, false, ""},
+
+ gtTestCase{float64(-1), false, false, ""},
+ gtTestCase{float64(149), false, false, ""},
+ gtTestCase{float64(149.9), false, false, ""},
+ gtTestCase{float64(150), false, false, ""},
+ gtTestCase{float64(150.1), true, false, ""},
+ gtTestCase{float64(151), true, false, ""},
+ }
+
+ t.checkTestCases(matcher, cases)
+}
+
+////////////////////////////////////////////////////////////////////////
+// Float literals
+////////////////////////////////////////////////////////////////////////
+
+func (t *GreaterThanTest) NegativeFloatLiteral() {
+ matcher := GreaterThan(-150.1)
+ desc := matcher.Description()
+ expectedDesc := "greater than -150.1"
+
+ ExpectThat(desc, Equals(expectedDesc))
+
+ cases := []gtTestCase{
+ // Signed integers.
+ gtTestCase{-(1 << 30), false, false, ""},
+ gtTestCase{-151, false, false, ""},
+ gtTestCase{-150.1, false, false, ""},
+ gtTestCase{-150, true, false, ""},
+ gtTestCase{-149, true, false, ""},
+ gtTestCase{0, true, false, ""},
+ gtTestCase{17, true, false, ""},
+
+ gtTestCase{int(-(1 << 30)), false, false, ""},
+ gtTestCase{int(-151), false, false, ""},
+ gtTestCase{int(-150), true, false, ""},
+ gtTestCase{int(-149), true, false, ""},
+ gtTestCase{int(0), true, false, ""},
+ gtTestCase{int(17), true, false, ""},
+
+ gtTestCase{int8(-127), true, false, ""},
+ gtTestCase{int8(0), true, false, ""},
+ gtTestCase{int8(17), true, false, ""},
+
+ gtTestCase{int16(-(1 << 14)), false, false, ""},
+ gtTestCase{int16(-151), false, false, ""},
+ gtTestCase{int16(-150), true, false, ""},
+ gtTestCase{int16(-149), true, false, ""},
+ gtTestCase{int16(0), true, false, ""},
+ gtTestCase{int16(17), true, false, ""},
+
+ gtTestCase{int32(-(1 << 30)), false, false, ""},
+ gtTestCase{int32(-151), false, false, ""},
+ gtTestCase{int32(-150), true, false, ""},
+ gtTestCase{int32(-149), true, false, ""},
+ gtTestCase{int32(0), true, false, ""},
+ gtTestCase{int32(17), true, false, ""},
+
+ gtTestCase{int64(-(1 << 30)), false, false, ""},
+ gtTestCase{int64(-151), false, false, ""},
+ gtTestCase{int64(-150), true, false, ""},
+ gtTestCase{int64(-149), true, false, ""},
+ gtTestCase{int64(0), true, false, ""},
+ gtTestCase{int64(17), true, false, ""},
+
+ // Unsigned integers.
+ gtTestCase{uint((1 << 32) - 151), true, false, ""},
+ gtTestCase{uint(0), true, false, ""},
+ gtTestCase{uint(17), true, false, ""},
+
+ gtTestCase{uint8(0), true, false, ""},
+ gtTestCase{uint8(17), true, false, ""},
+ gtTestCase{uint8(253), true, false, ""},
+
+ gtTestCase{uint16((1 << 16) - 151), true, false, ""},
+ gtTestCase{uint16(0), true, false, ""},
+ gtTestCase{uint16(17), true, false, ""},
+
+ gtTestCase{uint32((1 << 32) - 151), true, false, ""},
+ gtTestCase{uint32(0), true, false, ""},
+ gtTestCase{uint32(17), true, false, ""},
+
+ gtTestCase{uint64((1 << 64) - 151), true, false, ""},
+ gtTestCase{uint64(0), true, false, ""},
+ gtTestCase{uint64(17), true, false, ""},
+
+ // Floating point.
+ gtTestCase{float32(-(1 << 30)), false, false, ""},
+ gtTestCase{float32(-151), false, false, ""},
+ gtTestCase{float32(-150.2), false, false, ""},
+ gtTestCase{float32(-150.1), false, false, ""},
+ gtTestCase{float32(-150), true, false, ""},
+ gtTestCase{float32(0), true, false, ""},
+ gtTestCase{float32(17), true, false, ""},
+ gtTestCase{float32(160), true, false, ""},
+
+ gtTestCase{float64(-(1 << 30)), false, false, ""},
+ gtTestCase{float64(-151), false, false, ""},
+ gtTestCase{float64(-150.2), false, false, ""},
+ gtTestCase{float64(-150.1), false, false, ""},
+ gtTestCase{float64(-150), true, false, ""},
+ gtTestCase{float64(0), true, false, ""},
+ gtTestCase{float64(17), true, false, ""},
+ gtTestCase{float64(160), true, false, ""},
+ }
+
+ t.checkTestCases(matcher, cases)
+}
+
+func (t *GreaterThanTest) PositiveFloatLiteral() {
+ matcher := GreaterThan(149.9)
+ desc := matcher.Description()
+ expectedDesc := "greater than 149.9"
+
+ ExpectThat(desc, Equals(expectedDesc))
+
+ cases := []gtTestCase{
+ // Signed integers.
+ gtTestCase{-1, false, false, ""},
+ gtTestCase{149, false, false, ""},
+ gtTestCase{149.9, false, false, ""},
+ gtTestCase{150, true, false, ""},
+ gtTestCase{151, true, false, ""},
+
+ gtTestCase{int(-1), false, false, ""},
+ gtTestCase{int(149), false, false, ""},
+ gtTestCase{int(150), true, false, ""},
+ gtTestCase{int(151), true, false, ""},
+
+ gtTestCase{int8(-1), false, false, ""},
+ gtTestCase{int8(0), false, false, ""},
+ gtTestCase{int8(17), false, false, ""},
+ gtTestCase{int8(127), false, false, ""},
+
+ gtTestCase{int16(-1), false, false, ""},
+ gtTestCase{int16(149), false, false, ""},
+ gtTestCase{int16(150), true, false, ""},
+ gtTestCase{int16(151), true, false, ""},
+
+ gtTestCase{int32(-1), false, false, ""},
+ gtTestCase{int32(149), false, false, ""},
+ gtTestCase{int32(150), true, false, ""},
+ gtTestCase{int32(151), true, false, ""},
+
+ gtTestCase{int64(-1), false, false, ""},
+ gtTestCase{int64(149), false, false, ""},
+ gtTestCase{int64(150), true, false, ""},
+ gtTestCase{int64(151), true, false, ""},
+
+ // Unsigned integers.
+ gtTestCase{uint(0), false, false, ""},
+ gtTestCase{uint(149), false, false, ""},
+ gtTestCase{uint(150), true, false, ""},
+ gtTestCase{uint(151), true, false, ""},
+
+ gtTestCase{uint8(0), false, false, ""},
+ gtTestCase{uint8(127), false, false, ""},
+
+ gtTestCase{uint16(0), false, false, ""},
+ gtTestCase{uint16(149), false, false, ""},
+ gtTestCase{uint16(150), true, false, ""},
+ gtTestCase{uint16(151), true, false, ""},
+
+ gtTestCase{uint32(0), false, false, ""},
+ gtTestCase{uint32(149), false, false, ""},
+ gtTestCase{uint32(150), true, false, ""},
+ gtTestCase{uint32(151), true, false, ""},
+
+ gtTestCase{uint64(0), false, false, ""},
+ gtTestCase{uint64(149), false, false, ""},
+ gtTestCase{uint64(150), true, false, ""},
+ gtTestCase{uint64(151), true, false, ""},
+
+ // Floating point.
+ gtTestCase{float32(-1), false, false, ""},
+ gtTestCase{float32(149), false, false, ""},
+ gtTestCase{float32(149.8), false, false, ""},
+ gtTestCase{float32(149.9), false, false, ""},
+ gtTestCase{float32(150), true, false, ""},
+ gtTestCase{float32(151), true, false, ""},
+
+ gtTestCase{float64(-1), false, false, ""},
+ gtTestCase{float64(149), false, false, ""},
+ gtTestCase{float64(149.8), false, false, ""},
+ gtTestCase{float64(149.9), false, false, ""},
+ gtTestCase{float64(150), true, false, ""},
+ gtTestCase{float64(151), true, false, ""},
+ }
+
+ t.checkTestCases(matcher, cases)
+}
+
+////////////////////////////////////////////////////////////////////////
+// Subtle cases
+////////////////////////////////////////////////////////////////////////
+
+func (t *GreaterThanTest) Int64NotExactlyRepresentableBySinglePrecision() {
+ // Single-precision floats don't have enough bits to represent the integers
+ // near this one distinctly, so [2^25-1, 2^25+2] all receive the same value
+ // and should be treated as equivalent when floats are in the mix.
+ const kTwoTo25 = 1 << 25
+ matcher := GreaterThan(int64(kTwoTo25 + 1))
+
+ desc := matcher.Description()
+ expectedDesc := "greater than 33554433"
+
+ ExpectThat(desc, Equals(expectedDesc))
+
+ cases := []gtTestCase{
+ // Signed integers.
+ gtTestCase{-1, false, false, ""},
+ gtTestCase{kTwoTo25 + 0, false, false, ""},
+ gtTestCase{kTwoTo25 + 1, false, false, ""},
+ gtTestCase{kTwoTo25 + 2, true, false, ""},
+
+ gtTestCase{int(-1), false, false, ""},
+ gtTestCase{int(kTwoTo25 + 0), false, false, ""},
+ gtTestCase{int(kTwoTo25 + 1), false, false, ""},
+ gtTestCase{int(kTwoTo25 + 2), true, false, ""},
+
+ gtTestCase{int8(-1), false, false, ""},
+ gtTestCase{int8(127), false, false, ""},
+
+ gtTestCase{int16(-1), false, false, ""},
+ gtTestCase{int16(0), false, false, ""},
+ gtTestCase{int16(32767), false, false, ""},
+
+ gtTestCase{int32(-1), false, false, ""},
+ gtTestCase{int32(kTwoTo25 + 0), false, false, ""},
+ gtTestCase{int32(kTwoTo25 + 1), false, false, ""},
+ gtTestCase{int32(kTwoTo25 + 2), true, false, ""},
+
+ gtTestCase{int64(-1), false, false, ""},
+ gtTestCase{int64(kTwoTo25 + 0), false, false, ""},
+ gtTestCase{int64(kTwoTo25 + 1), false, false, ""},
+ gtTestCase{int64(kTwoTo25 + 2), true, false, ""},
+
+ // Unsigned integers.
+ gtTestCase{uint(0), false, false, ""},
+ gtTestCase{uint(kTwoTo25 + 0), false, false, ""},
+ gtTestCase{uint(kTwoTo25 + 1), false, false, ""},
+ gtTestCase{uint(kTwoTo25 + 2), true, false, ""},
+
+ gtTestCase{uint8(0), false, false, ""},
+ gtTestCase{uint8(255), false, false, ""},
+
+ gtTestCase{uint16(0), false, false, ""},
+ gtTestCase{uint16(65535), false, false, ""},
+
+ gtTestCase{uint32(0), false, false, ""},
+ gtTestCase{uint32(kTwoTo25 + 0), false, false, ""},
+ gtTestCase{uint32(kTwoTo25 + 1), false, false, ""},
+ gtTestCase{uint32(kTwoTo25 + 2), true, false, ""},
+
+ gtTestCase{uint64(0), false, false, ""},
+ gtTestCase{uint64(kTwoTo25 + 0), false, false, ""},
+ gtTestCase{uint64(kTwoTo25 + 1), false, false, ""},
+ gtTestCase{uint64(kTwoTo25 + 2), true, false, ""},
+
+ // Floating point.
+ gtTestCase{float32(-1), false, false, ""},
+ gtTestCase{float32(kTwoTo25 - 2), false, false, ""},
+ gtTestCase{float32(kTwoTo25 - 1), false, false, ""},
+ gtTestCase{float32(kTwoTo25 + 0), false, false, ""},
+ gtTestCase{float32(kTwoTo25 + 1), false, false, ""},
+ gtTestCase{float32(kTwoTo25 + 2), false, false, ""},
+ gtTestCase{float32(kTwoTo25 + 3), true, false, ""},
+
+ gtTestCase{float64(-1), false, false, ""},
+ gtTestCase{float64(kTwoTo25 - 2), false, false, ""},
+ gtTestCase{float64(kTwoTo25 - 1), false, false, ""},
+ gtTestCase{float64(kTwoTo25 + 0), false, false, ""},
+ gtTestCase{float64(kTwoTo25 + 1), false, false, ""},
+ gtTestCase{float64(kTwoTo25 + 2), true, false, ""},
+ gtTestCase{float64(kTwoTo25 + 3), true, false, ""},
+ }
+
+ t.checkTestCases(matcher, cases)
+}
+
+func (t *GreaterThanTest) Int64NotExactlyRepresentableByDoublePrecision() {
+ // Double-precision floats don't have enough bits to represent the integers
+ // near this one distinctly, so [2^54-1, 2^54+2] all receive the same value
+ // and should be treated as equivalent when floats are in the mix.
+ const kTwoTo54 = 1 << 54
+ matcher := GreaterThan(int64(kTwoTo54 + 1))
+
+ desc := matcher.Description()
+ expectedDesc := "greater than 18014398509481985"
+
+ ExpectThat(desc, Equals(expectedDesc))
+
+ cases := []gtTestCase{
+ // Signed integers.
+ gtTestCase{-1, false, false, ""},
+ gtTestCase{1 << 30, false, false, ""},
+
+ gtTestCase{int(-1), false, false, ""},
+ gtTestCase{int(math.MaxInt32), false, false, ""},
+
+ gtTestCase{int8(-1), false, false, ""},
+ gtTestCase{int8(127), false, false, ""},
+
+ gtTestCase{int16(-1), false, false, ""},
+ gtTestCase{int16(0), false, false, ""},
+ gtTestCase{int16(32767), false, false, ""},
+
+ gtTestCase{int32(-1), false, false, ""},
+ gtTestCase{int32(math.MaxInt32), false, false, ""},
+
+ gtTestCase{int64(-1), false, false, ""},
+ gtTestCase{int64(kTwoTo54 - 1), false, false, ""},
+ gtTestCase{int64(kTwoTo54 + 0), false, false, ""},
+ gtTestCase{int64(kTwoTo54 + 1), false, false, ""},
+ gtTestCase{int64(kTwoTo54 + 2), true, false, ""},
+
+ // Unsigned integers.
+ gtTestCase{uint(0), false, false, ""},
+ gtTestCase{uint(math.MaxUint32), false, false, ""},
+
+ gtTestCase{uint8(0), false, false, ""},
+ gtTestCase{uint8(255), false, false, ""},
+
+ gtTestCase{uint16(0), false, false, ""},
+ gtTestCase{uint16(65535), false, false, ""},
+
+ gtTestCase{uint32(0), false, false, ""},
+ gtTestCase{uint32(math.MaxUint32), false, false, ""},
+
+ gtTestCase{uint64(0), false, false, ""},
+ gtTestCase{uint64(kTwoTo54 - 1), false, false, ""},
+ gtTestCase{uint64(kTwoTo54 + 0), false, false, ""},
+ gtTestCase{uint64(kTwoTo54 + 1), false, false, ""},
+ gtTestCase{uint64(kTwoTo54 + 2), true, false, ""},
+
+ // Floating point.
+ gtTestCase{float64(-1), false, false, ""},
+ gtTestCase{float64(kTwoTo54 - 2), false, false, ""},
+ gtTestCase{float64(kTwoTo54 - 1), false, false, ""},
+ gtTestCase{float64(kTwoTo54 + 0), false, false, ""},
+ gtTestCase{float64(kTwoTo54 + 1), false, false, ""},
+ gtTestCase{float64(kTwoTo54 + 2), false, false, ""},
+ gtTestCase{float64(kTwoTo54 + 3), true, false, ""},
+ }
+
+ t.checkTestCases(matcher, cases)
+}
+
+func (t *GreaterThanTest) Uint64NotExactlyRepresentableBySinglePrecision() {
+ // Single-precision floats don't have enough bits to represent the integers
+ // near this one distinctly, so [2^25-1, 2^25+2] all receive the same value
+ // and should be treated as equivalent when floats are in the mix.
+ const kTwoTo25 = 1 << 25
+ matcher := GreaterThan(uint64(kTwoTo25 + 1))
+
+ desc := matcher.Description()
+ expectedDesc := "greater than 33554433"
+
+ ExpectThat(desc, Equals(expectedDesc))
+
+ cases := []gtTestCase{
+ // Signed integers.
+ gtTestCase{-1, false, false, ""},
+ gtTestCase{kTwoTo25 + 0, false, false, ""},
+ gtTestCase{kTwoTo25 + 1, false, false, ""},
+ gtTestCase{kTwoTo25 + 2, true, false, ""},
+
+ gtTestCase{int(-1), false, false, ""},
+ gtTestCase{int(kTwoTo25 + 0), false, false, ""},
+ gtTestCase{int(kTwoTo25 + 1), false, false, ""},
+ gtTestCase{int(kTwoTo25 + 2), true, false, ""},
+
+ gtTestCase{int8(-1), false, false, ""},
+ gtTestCase{int8(127), false, false, ""},
+
+ gtTestCase{int16(-1), false, false, ""},
+ gtTestCase{int16(0), false, false, ""},
+ gtTestCase{int16(32767), false, false, ""},
+
+ gtTestCase{int32(-1), false, false, ""},
+ gtTestCase{int32(kTwoTo25 + 0), false, false, ""},
+ gtTestCase{int32(kTwoTo25 + 1), false, false, ""},
+ gtTestCase{int32(kTwoTo25 + 2), true, false, ""},
+
+ gtTestCase{int64(-1), false, false, ""},
+ gtTestCase{int64(kTwoTo25 + 0), false, false, ""},
+ gtTestCase{int64(kTwoTo25 + 1), false, false, ""},
+ gtTestCase{int64(kTwoTo25 + 2), true, false, ""},
+
+ // Unsigned integers.
+ gtTestCase{uint(0), false, false, ""},
+ gtTestCase{uint(kTwoTo25 + 0), false, false, ""},
+ gtTestCase{uint(kTwoTo25 + 1), false, false, ""},
+ gtTestCase{uint(kTwoTo25 + 2), true, false, ""},
+
+ gtTestCase{uint8(0), false, false, ""},
+ gtTestCase{uint8(255), false, false, ""},
+
+ gtTestCase{uint16(0), false, false, ""},
+ gtTestCase{uint16(65535), false, false, ""},
+
+ gtTestCase{uint32(0), false, false, ""},
+ gtTestCase{uint32(kTwoTo25 + 0), false, false, ""},
+ gtTestCase{uint32(kTwoTo25 + 1), false, false, ""},
+ gtTestCase{uint32(kTwoTo25 + 2), true, false, ""},
+
+ gtTestCase{uint64(0), false, false, ""},
+ gtTestCase{uint64(kTwoTo25 + 0), false, false, ""},
+ gtTestCase{uint64(kTwoTo25 + 1), false, false, ""},
+ gtTestCase{uint64(kTwoTo25 + 2), true, false, ""},
+
+ // Floating point.
+ gtTestCase{float32(-1), false, false, ""},
+ gtTestCase{float32(kTwoTo25 - 2), false, false, ""},
+ gtTestCase{float32(kTwoTo25 - 1), false, false, ""},
+ gtTestCase{float32(kTwoTo25 + 0), false, false, ""},
+ gtTestCase{float32(kTwoTo25 + 1), false, false, ""},
+ gtTestCase{float32(kTwoTo25 + 2), false, false, ""},
+ gtTestCase{float32(kTwoTo25 + 3), true, false, ""},
+
+ gtTestCase{float64(-1), false, false, ""},
+ gtTestCase{float64(kTwoTo25 - 2), false, false, ""},
+ gtTestCase{float64(kTwoTo25 - 1), false, false, ""},
+ gtTestCase{float64(kTwoTo25 + 0), false, false, ""},
+ gtTestCase{float64(kTwoTo25 + 1), false, false, ""},
+ gtTestCase{float64(kTwoTo25 + 2), true, false, ""},
+ gtTestCase{float64(kTwoTo25 + 3), true, false, ""},
+ }
+
+ t.checkTestCases(matcher, cases)
+}
+
+func (t *GreaterThanTest) Uint64NotExactlyRepresentableByDoublePrecision() {
+ // Double-precision floats don't have enough bits to represent the integers
+ // near this one distinctly, so [2^54-1, 2^54+2] all receive the same value
+ // and should be treated as equivalent when floats are in the mix.
+ const kTwoTo54 = 1 << 54
+ matcher := GreaterThan(uint64(kTwoTo54 + 1))
+
+ desc := matcher.Description()
+ expectedDesc := "greater than 18014398509481985"
+
+ ExpectThat(desc, Equals(expectedDesc))
+
+ cases := []gtTestCase{
+ // Signed integers.
+ gtTestCase{-1, false, false, ""},
+ gtTestCase{1 << 30, false, false, ""},
+
+ gtTestCase{int(-1), false, false, ""},
+ gtTestCase{int(math.MaxInt32), false, false, ""},
+
+ gtTestCase{int8(-1), false, false, ""},
+ gtTestCase{int8(127), false, false, ""},
+
+ gtTestCase{int16(-1), false, false, ""},
+ gtTestCase{int16(0), false, false, ""},
+ gtTestCase{int16(32767), false, false, ""},
+
+ gtTestCase{int32(-1), false, false, ""},
+ gtTestCase{int32(math.MaxInt32), false, false, ""},
+
+ gtTestCase{int64(-1), false, false, ""},
+ gtTestCase{int64(kTwoTo54 - 1), false, false, ""},
+ gtTestCase{int64(kTwoTo54 + 0), false, false, ""},
+ gtTestCase{int64(kTwoTo54 + 1), false, false, ""},
+ gtTestCase{int64(kTwoTo54 + 2), true, false, ""},
+
+ // Unsigned integers.
+ gtTestCase{uint(0), false, false, ""},
+ gtTestCase{uint(math.MaxUint32), false, false, ""},
+
+ gtTestCase{uint8(0), false, false, ""},
+ gtTestCase{uint8(255), false, false, ""},
+
+ gtTestCase{uint16(0), false, false, ""},
+ gtTestCase{uint16(65535), false, false, ""},
+
+ gtTestCase{uint32(0), false, false, ""},
+ gtTestCase{uint32(math.MaxUint32), false, false, ""},
+
+ gtTestCase{uint64(0), false, false, ""},
+ gtTestCase{uint64(kTwoTo54 - 1), false, false, ""},
+ gtTestCase{uint64(kTwoTo54 + 0), false, false, ""},
+ gtTestCase{uint64(kTwoTo54 + 1), false, false, ""},
+ gtTestCase{uint64(kTwoTo54 + 2), true, false, ""},
+
+ // Floating point.
+ gtTestCase{float64(-1), false, false, ""},
+ gtTestCase{float64(kTwoTo54 - 2), false, false, ""},
+ gtTestCase{float64(kTwoTo54 - 1), false, false, ""},
+ gtTestCase{float64(kTwoTo54 + 0), false, false, ""},
+ gtTestCase{float64(kTwoTo54 + 1), false, false, ""},
+ gtTestCase{float64(kTwoTo54 + 2), false, false, ""},
+ gtTestCase{float64(kTwoTo54 + 3), true, false, ""},
+ }
+
+ t.checkTestCases(matcher, cases)
+}
+
+func (t *GreaterThanTest) Float32AboveExactIntegerRange() {
+ // Single-precision floats don't have enough bits to represent the integers
+ // near this one distinctly, so [2^25-1, 2^25+2] all receive the same value
+ // and should be treated as equivalent when floats are in the mix.
+ const kTwoTo25 = 1 << 25
+ matcher := GreaterThan(float32(kTwoTo25 + 1))
+
+ desc := matcher.Description()
+ expectedDesc := "greater than 3.3554432e+07"
+
+ ExpectThat(desc, Equals(expectedDesc))
+
+ cases := []gtTestCase{
+ // Signed integers.
+ gtTestCase{int64(-1), false, false, ""},
+ gtTestCase{int64(kTwoTo25 - 2), false, false, ""},
+ gtTestCase{int64(kTwoTo25 - 1), false, false, ""},
+ gtTestCase{int64(kTwoTo25 + 0), false, false, ""},
+ gtTestCase{int64(kTwoTo25 + 1), false, false, ""},
+ gtTestCase{int64(kTwoTo25 + 2), false, false, ""},
+ gtTestCase{int64(kTwoTo25 + 3), true, false, ""},
+
+ // Unsigned integers.
+ gtTestCase{uint64(0), false, false, ""},
+ gtTestCase{uint64(kTwoTo25 - 2), false, false, ""},
+ gtTestCase{uint64(kTwoTo25 - 1), false, false, ""},
+ gtTestCase{uint64(kTwoTo25 + 0), false, false, ""},
+ gtTestCase{uint64(kTwoTo25 + 1), false, false, ""},
+ gtTestCase{uint64(kTwoTo25 + 2), false, false, ""},
+ gtTestCase{uint64(kTwoTo25 + 3), true, false, ""},
+
+ // Floating point.
+ gtTestCase{float32(-1), false, false, ""},
+ gtTestCase{float32(kTwoTo25 - 2), false, false, ""},
+ gtTestCase{float32(kTwoTo25 - 1), false, false, ""},
+ gtTestCase{float32(kTwoTo25 + 0), false, false, ""},
+ gtTestCase{float32(kTwoTo25 + 1), false, false, ""},
+ gtTestCase{float32(kTwoTo25 + 2), false, false, ""},
+ gtTestCase{float32(kTwoTo25 + 3), true, false, ""},
+
+ gtTestCase{float64(-1), false, false, ""},
+ gtTestCase{float64(kTwoTo25 - 2), false, false, ""},
+ gtTestCase{float64(kTwoTo25 - 1), false, false, ""},
+ gtTestCase{float64(kTwoTo25 + 0), false, false, ""},
+ gtTestCase{float64(kTwoTo25 + 1), false, false, ""},
+ gtTestCase{float64(kTwoTo25 + 2), false, false, ""},
+ gtTestCase{float64(kTwoTo25 + 3), true, false, ""},
+ }
+
+ t.checkTestCases(matcher, cases)
+}
+
+func (t *GreaterThanTest) Float64AboveExactIntegerRange() {
+ // Double-precision floats don't have enough bits to represent the integers
+ // near this one distinctly, so [2^54-1, 2^54+2] all receive the same value
+ // and should be treated as equivalent when floats are in the mix.
+ const kTwoTo54 = 1 << 54
+ matcher := GreaterThan(float64(kTwoTo54 + 1))
+
+ desc := matcher.Description()
+ expectedDesc := "greater than 1.8014398509481984e+16"
+
+ ExpectThat(desc, Equals(expectedDesc))
+
+ cases := []gtTestCase{
+ // Signed integers.
+ gtTestCase{int64(-1), false, false, ""},
+ gtTestCase{int64(kTwoTo54 - 2), false, false, ""},
+ gtTestCase{int64(kTwoTo54 - 1), false, false, ""},
+ gtTestCase{int64(kTwoTo54 + 0), false, false, ""},
+ gtTestCase{int64(kTwoTo54 + 1), false, false, ""},
+ gtTestCase{int64(kTwoTo54 + 2), false, false, ""},
+ gtTestCase{int64(kTwoTo54 + 3), true, false, ""},
+
+ // Unsigned integers.
+ gtTestCase{uint64(0), false, false, ""},
+ gtTestCase{uint64(kTwoTo54 - 2), false, false, ""},
+ gtTestCase{uint64(kTwoTo54 - 1), false, false, ""},
+ gtTestCase{uint64(kTwoTo54 + 0), false, false, ""},
+ gtTestCase{uint64(kTwoTo54 + 1), false, false, ""},
+ gtTestCase{uint64(kTwoTo54 + 2), false, false, ""},
+ gtTestCase{uint64(kTwoTo54 + 3), true, false, ""},
+
+ // Floating point.
+ gtTestCase{float64(-1), false, false, ""},
+ gtTestCase{float64(kTwoTo54 - 2), false, false, ""},
+ gtTestCase{float64(kTwoTo54 - 1), false, false, ""},
+ gtTestCase{float64(kTwoTo54 + 0), false, false, ""},
+ gtTestCase{float64(kTwoTo54 + 1), false, false, ""},
+ gtTestCase{float64(kTwoTo54 + 2), false, false, ""},
+ gtTestCase{float64(kTwoTo54 + 3), true, false, ""},
+ }
+
+ t.checkTestCases(matcher, cases)
+}
+
+////////////////////////////////////////////////////////////////////////
+// String literals
+////////////////////////////////////////////////////////////////////////
+
+func (t *GreaterThanTest) EmptyString() {
+ matcher := GreaterThan("")
+ desc := matcher.Description()
+ expectedDesc := "greater than \"\""
+
+ ExpectThat(desc, Equals(expectedDesc))
+
+ cases := []gtTestCase{
+ gtTestCase{"", false, false, ""},
+ gtTestCase{"\x00", true, false, ""},
+ gtTestCase{"a", true, false, ""},
+ gtTestCase{"foo", true, false, ""},
+ }
+
+ t.checkTestCases(matcher, cases)
+}
+
+func (t *GreaterThanTest) SingleNullByte() {
+ matcher := GreaterThan("\x00")
+ desc := matcher.Description()
+ expectedDesc := "greater than \"\x00\""
+
+ ExpectThat(desc, Equals(expectedDesc))
+
+ cases := []gtTestCase{
+ gtTestCase{"", false, false, ""},
+ gtTestCase{"\x00", false, false, ""},
+ gtTestCase{"\x00\x00", true, false, ""},
+ gtTestCase{"a", true, false, ""},
+ gtTestCase{"foo", true, false, ""},
+ }
+
+ t.checkTestCases(matcher, cases)
+}
+
+func (t *GreaterThanTest) LongerString() {
+ matcher := GreaterThan("foo\x00")
+ desc := matcher.Description()
+ expectedDesc := "greater than \"foo\x00\""
+
+ ExpectThat(desc, Equals(expectedDesc))
+
+ cases := []gtTestCase{
+ gtTestCase{"", false, false, ""},
+ gtTestCase{"\x00", false, false, ""},
+ gtTestCase{"bar", false, false, ""},
+ gtTestCase{"foo", false, false, ""},
+ gtTestCase{"foo\x00", false, false, ""},
+ gtTestCase{"foo\x00\x00", true, false, ""},
+ gtTestCase{"fooa", true, false, ""},
+ gtTestCase{"qux", true, false, ""},
+ }
+
+ t.checkTestCases(matcher, cases)
+}
diff --git a/src/mongo/gotools/vendor/src/github.com/jacobsa/oglematchers/has_same_type_as.go b/src/mongo/gotools/vendor/src/github.com/jacobsa/oglematchers/has_same_type_as.go
new file mode 100644
index 00000000000..3b286f73218
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/jacobsa/oglematchers/has_same_type_as.go
@@ -0,0 +1,37 @@
+// Copyright 2015 Aaron Jacobs. All Rights Reserved.
+// Author: aaronjjacobs@gmail.com (Aaron Jacobs)
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package oglematchers
+
+import (
+ "fmt"
+ "reflect"
+)
+
+// HasSameTypeAs returns a matcher that matches values with exactly the same
+// type as the supplied prototype.
+func HasSameTypeAs(p interface{}) Matcher {
+ expected := reflect.TypeOf(p)
+ pred := func(c interface{}) error {
+ actual := reflect.TypeOf(c)
+ if actual != expected {
+ return fmt.Errorf("which has type %v", actual)
+ }
+
+ return nil
+ }
+
+ return NewMatcher(pred, fmt.Sprintf("has type %v", expected))
+}
diff --git a/src/mongo/gotools/vendor/src/github.com/jacobsa/oglematchers/has_same_type_as_test.go b/src/mongo/gotools/vendor/src/github.com/jacobsa/oglematchers/has_same_type_as_test.go
new file mode 100644
index 00000000000..2030d5f9b1a
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/jacobsa/oglematchers/has_same_type_as_test.go
@@ -0,0 +1,181 @@
+// Copyright 2015 Aaron Jacobs. All Rights Reserved.
+// Author: aaronjjacobs@gmail.com (Aaron Jacobs)
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package oglematchers_test
+
+import (
+ "io"
+ "testing"
+
+ . "github.com/jacobsa/oglematchers"
+ . "github.com/jacobsa/ogletest"
+)
+
+func TestHasSameTypeAs(t *testing.T) { RunTests(t) }
+
+////////////////////////////////////////////////////////////////////////
+// Boilerplate
+////////////////////////////////////////////////////////////////////////
+
+type HasSameTypeAsTest struct {
+}
+
+func init() { RegisterTestSuite(&HasSameTypeAsTest{}) }
+
+////////////////////////////////////////////////////////////////////////
+// Tests
+////////////////////////////////////////////////////////////////////////
+
+func (t *HasSameTypeAsTest) CandidateIsLiteralNil() {
+ matcher := HasSameTypeAs(nil)
+ var err error
+
+ // Description
+ ExpectEq("has type <nil>", matcher.Description())
+
+ // Literal nil
+ err = matcher.Matches(nil)
+ ExpectEq(nil, err)
+
+ // nil in interface variable
+ var r io.Reader
+ err = matcher.Matches(r)
+ ExpectEq(nil, err)
+
+ // int
+ err = matcher.Matches(17)
+ AssertNe(nil, err)
+ ExpectThat(err, Error(Equals("which has type int")))
+
+ // string
+ err = matcher.Matches("")
+ AssertNe(nil, err)
+ ExpectThat(err, Error(Equals("which has type string")))
+
+ // nil map
+ var m map[string]string
+ err = matcher.Matches(m)
+
+ AssertNe(nil, err)
+ ExpectThat(err, Error(Equals("which has type map[string]string")))
+
+ // Non-nil map
+ m = make(map[string]string)
+ err = matcher.Matches(m)
+
+ AssertNe(nil, err)
+ ExpectThat(err, Error(Equals("which has type map[string]string")))
+}
+
+func (t *HasSameTypeAsTest) CandidateIsNilMap() {
+ var m map[string]string
+ matcher := HasSameTypeAs(m)
+ var err error
+
+ // Description
+ ExpectEq("has type map[string]string", matcher.Description())
+
+ // nil map
+ m = nil
+ err = matcher.Matches(m)
+ ExpectEq(nil, err)
+
+ // Non-nil map
+ m = make(map[string]string)
+ err = matcher.Matches(m)
+ ExpectEq(nil, err)
+
+ // Literal nil
+ err = matcher.Matches(nil)
+ AssertNe(nil, err)
+ ExpectThat(err, Error(Equals("which has type <nil>")))
+
+ // int
+ err = matcher.Matches(17)
+ AssertNe(nil, err)
+ ExpectThat(err, Error(Equals("which has type int")))
+
+ // string
+ err = matcher.Matches("")
+ AssertNe(nil, err)
+ ExpectThat(err, Error(Equals("which has type string")))
+}
+
+func (t *HasSameTypeAsTest) CandidateIsNilInInterfaceVariable() {
+ var r io.Reader
+ matcher := HasSameTypeAs(r)
+ var err error
+
+ // Description
+ ExpectEq("has type <nil>", matcher.Description())
+
+ // nil in interface variable
+ r = nil
+ err = matcher.Matches(r)
+ ExpectEq(nil, err)
+
+ // Literal nil
+ err = matcher.Matches(nil)
+ ExpectEq(nil, err)
+
+ // int
+ err = matcher.Matches(17)
+ AssertNe(nil, err)
+ ExpectThat(err, Error(Equals("which has type int")))
+}
+
+func (t *HasSameTypeAsTest) CandidateIsString() {
+ matcher := HasSameTypeAs("")
+ var err error
+
+ // Description
+ ExpectEq("has type string", matcher.Description())
+
+ // string
+ err = matcher.Matches("taco")
+ ExpectEq(nil, err)
+
+ // string alias
+ type Foo string
+ err = matcher.Matches(Foo("taco"))
+ ExpectThat(err, Error(MatchesRegexp("which has type .*Foo")))
+
+ // Literal nil
+ err = matcher.Matches(nil)
+ AssertNe(nil, err)
+ ExpectThat(err, Error(Equals("which has type <nil>")))
+
+ // int
+ err = matcher.Matches(17)
+ AssertNe(nil, err)
+ ExpectThat(err, Error(Equals("which has type int")))
+}
+
+func (t *HasSameTypeAsTest) CandidateIsStringAlias() {
+ type Foo string
+ matcher := HasSameTypeAs(Foo(""))
+ var err error
+
+ // Description
+ ExpectThat(matcher.Description(), MatchesRegexp("has type .*Foo"))
+
+ // string alias
+ err = matcher.Matches(Foo("taco"))
+ ExpectEq(nil, err)
+
+ // string
+ err = matcher.Matches("taco")
+ ExpectThat(err, Error(Equals("which has type string")))
+}
diff --git a/src/mongo/gotools/vendor/src/github.com/jacobsa/oglematchers/has_substr.go b/src/mongo/gotools/vendor/src/github.com/jacobsa/oglematchers/has_substr.go
new file mode 100644
index 00000000000..bf5bd6ae6d3
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/jacobsa/oglematchers/has_substr.go
@@ -0,0 +1,46 @@
+// Copyright 2011 Aaron Jacobs. All Rights Reserved.
+// Author: aaronjjacobs@gmail.com (Aaron Jacobs)
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package oglematchers
+
+import (
+ "errors"
+ "fmt"
+ "reflect"
+ "strings"
+)
+
+// HasSubstr returns a matcher that matches strings containing s as a
+// substring.
+func HasSubstr(s string) Matcher {
+ return NewMatcher(
+ func(c interface{}) error { return hasSubstr(s, c) },
+ fmt.Sprintf("has substring \"%s\"", s))
+}
+
+func hasSubstr(needle string, c interface{}) error {
+ v := reflect.ValueOf(c)
+ if v.Kind() != reflect.String {
+ return NewFatalError("which is not a string")
+ }
+
+ // Perform the substring search.
+ haystack := v.String()
+ if strings.Contains(haystack, needle) {
+ return nil
+ }
+
+ return errors.New("")
+}
diff --git a/src/mongo/gotools/vendor/src/github.com/jacobsa/oglematchers/has_substr_test.go b/src/mongo/gotools/vendor/src/github.com/jacobsa/oglematchers/has_substr_test.go
new file mode 100644
index 00000000000..e36dcd8f03f
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/jacobsa/oglematchers/has_substr_test.go
@@ -0,0 +1,93 @@
+// Copyright 2011 Aaron Jacobs. All Rights Reserved.
+// Author: aaronjjacobs@gmail.com (Aaron Jacobs)
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package oglematchers_test
+
+import (
+ . "github.com/jacobsa/oglematchers"
+ . "github.com/jacobsa/ogletest"
+)
+
+////////////////////////////////////////////////////////////////////////
+// Helpers
+////////////////////////////////////////////////////////////////////////
+
+type HasSubstrTest struct {
+
+}
+
+func init() { RegisterTestSuite(&HasSubstrTest{}) }
+
+////////////////////////////////////////////////////////////////////////
+// Tests
+////////////////////////////////////////////////////////////////////////
+
+func (t *HasSubstrTest) Description() {
+ matcher := HasSubstr("taco")
+ ExpectThat(matcher.Description(), Equals("has substring \"taco\""))
+}
+
+func (t *HasSubstrTest) CandidateIsNil() {
+ matcher := HasSubstr("")
+ err := matcher.Matches(nil)
+
+ ExpectThat(err, Error(Equals("which is not a string")))
+ ExpectTrue(isFatal(err))
+}
+
+func (t *HasSubstrTest) CandidateIsInteger() {
+ matcher := HasSubstr("")
+ err := matcher.Matches(17)
+
+ ExpectThat(err, Error(Equals("which is not a string")))
+ ExpectTrue(isFatal(err))
+}
+
+func (t *HasSubstrTest) CandidateIsByteSlice() {
+ matcher := HasSubstr("")
+ err := matcher.Matches([]byte{17})
+
+ ExpectThat(err, Error(Equals("which is not a string")))
+ ExpectTrue(isFatal(err))
+}
+
+func (t *HasSubstrTest) CandidateDoesntHaveSubstring() {
+ matcher := HasSubstr("taco")
+ err := matcher.Matches("tac")
+
+ ExpectThat(err, Error(Equals("")))
+ ExpectFalse(isFatal(err))
+}
+
+func (t *HasSubstrTest) CandidateEqualsArg() {
+ matcher := HasSubstr("taco")
+ err := matcher.Matches("taco")
+
+ ExpectThat(err, Equals(nil))
+}
+
+func (t *HasSubstrTest) CandidateHasProperSubstring() {
+ matcher := HasSubstr("taco")
+ err := matcher.Matches("burritos and tacos")
+
+ ExpectThat(err, Equals(nil))
+}
+
+func (t *HasSubstrTest) EmptyStringIsAlwaysSubString() {
+ matcher := HasSubstr("")
+ err := matcher.Matches("asdf")
+
+ ExpectThat(err, Equals(nil))
+}
diff --git a/src/mongo/gotools/vendor/src/github.com/jacobsa/oglematchers/identical_to.go b/src/mongo/gotools/vendor/src/github.com/jacobsa/oglematchers/identical_to.go
new file mode 100644
index 00000000000..ae6460ed966
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/jacobsa/oglematchers/identical_to.go
@@ -0,0 +1,134 @@
+// Copyright 2012 Aaron Jacobs. All Rights Reserved.
+// Author: aaronjjacobs@gmail.com (Aaron Jacobs)
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package oglematchers
+
+import (
+ "errors"
+ "fmt"
+ "reflect"
+)
+
+// Is the type comparable according to the definition here?
+//
+// http://weekly.golang.org/doc/go_spec.html#Comparison_operators
+//
+func isComparable(t reflect.Type) bool {
+ switch t.Kind() {
+ case reflect.Array:
+ return isComparable(t.Elem())
+
+ case reflect.Struct:
+ for i := 0; i < t.NumField(); i++ {
+ if !isComparable(t.Field(i).Type) {
+ return false
+ }
+ }
+
+ return true
+
+ case reflect.Slice, reflect.Map, reflect.Func:
+ return false
+ }
+
+ return true
+}
+
+// Should the supplied type be allowed as an argument to IdenticalTo?
+func isLegalForIdenticalTo(t reflect.Type) (bool, error) {
+ // Allow the zero type.
+ if t == nil {
+ return true, nil
+ }
+
+ // Reference types are always okay; we compare pointers.
+ switch t.Kind() {
+ case reflect.Slice, reflect.Map, reflect.Func, reflect.Chan:
+ return true, nil
+ }
+
+ // Reject other non-comparable types.
+ if !isComparable(t) {
+ return false, errors.New(fmt.Sprintf("%v is not comparable", t))
+ }
+
+ return true, nil
+}
+
+// IdenticalTo(x) returns a matcher that matches values v with type identical
+// to x such that:
+//
+// 1. If v and x are of a reference type (slice, map, function, channel), then
+// they are either both nil or are references to the same object.
+//
+// 2. Otherwise, if v and x are not of a reference type but have a valid type,
+// then v == x.
+//
+// If v and x are both the invalid type (which results from the predeclared nil
+// value, or from nil interface variables), then the matcher is satisfied.
+//
+// This function will panic if x is of a value type that is not comparable. For
+// example, x cannot be an array of functions.
+func IdenticalTo(x interface{}) Matcher {
+ t := reflect.TypeOf(x)
+
+ // Reject illegal arguments.
+ if ok, err := isLegalForIdenticalTo(t); !ok {
+ panic("IdenticalTo: " + err.Error())
+ }
+
+ return &identicalToMatcher{x}
+}
+
+type identicalToMatcher struct {
+ x interface{}
+}
+
+func (m *identicalToMatcher) Description() string {
+ t := reflect.TypeOf(m.x)
+ return fmt.Sprintf("identical to <%v> %v", t, m.x)
+}
+
+func (m *identicalToMatcher) Matches(c interface{}) error {
+ // Make sure the candidate's type is correct.
+ t := reflect.TypeOf(m.x)
+ if ct := reflect.TypeOf(c); t != ct {
+ return NewFatalError(fmt.Sprintf("which is of type %v", ct))
+ }
+
+ // Special case: two values of the invalid type are always identical.
+ if t == nil {
+ return nil
+ }
+
+ // Handle reference types.
+ switch t.Kind() {
+ case reflect.Slice, reflect.Map, reflect.Func, reflect.Chan:
+ xv := reflect.ValueOf(m.x)
+ cv := reflect.ValueOf(c)
+ if xv.Pointer() == cv.Pointer() {
+ return nil
+ }
+
+ return errors.New("which is not an identical reference")
+ }
+
+ // Are the values equal?
+ if m.x == c {
+ return nil
+ }
+
+ return errors.New("")
+}
diff --git a/src/mongo/gotools/vendor/src/github.com/jacobsa/oglematchers/identical_to_test.go b/src/mongo/gotools/vendor/src/github.com/jacobsa/oglematchers/identical_to_test.go
new file mode 100644
index 00000000000..3e68652b641
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/jacobsa/oglematchers/identical_to_test.go
@@ -0,0 +1,849 @@
+// Copyright 2012 Aaron Jacobs. All Rights Reserved.
+// Author: aaronjjacobs@gmail.com (Aaron Jacobs)
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package oglematchers_test
+
+import (
+ . "github.com/jacobsa/oglematchers"
+ . "github.com/jacobsa/ogletest"
+ "fmt"
+ "io"
+ "unsafe"
+)
+
+////////////////////////////////////////////////////////////////////////
+// Helpers
+////////////////////////////////////////////////////////////////////////
+
+type IdenticalToTest struct {
+}
+
+func init() { RegisterTestSuite(&IdenticalToTest{}) }
+
+////////////////////////////////////////////////////////////////////////
+// Tests
+////////////////////////////////////////////////////////////////////////
+
+func (t *IdenticalToTest) TypesNotIdentical() {
+ var m Matcher
+ var err error
+
+ type intAlias int
+
+ // Type alias expected value
+ m = IdenticalTo(intAlias(17))
+ err = m.Matches(int(17))
+ ExpectTrue(isFatal(err))
+ ExpectThat(err, Error(Equals("which is of type int")))
+
+ // Type alias candidate
+ m = IdenticalTo(int(17))
+ err = m.Matches(intAlias(17))
+ ExpectTrue(isFatal(err))
+ ExpectThat(err, Error(Equals("which is of type oglematchers_test.intAlias")))
+
+ // int and uint
+ m = IdenticalTo(int(17))
+ err = m.Matches(uint(17))
+ ExpectTrue(isFatal(err))
+ ExpectThat(err, Error(Equals("which is of type uint")))
+}
+
+func (t *IdenticalToTest) PredeclaredNilIdentifier() {
+ var m Matcher
+ var err error
+
+ // Nil literal
+ m = IdenticalTo(nil)
+ err = m.Matches(nil)
+ ExpectEq(nil, err)
+
+ // Zero interface var (which is the same as above since IdenticalTo takes an
+ // interface{} as an arg)
+ var nilReader io.Reader
+ var nilWriter io.Writer
+
+ m = IdenticalTo(nilReader)
+ err = m.Matches(nilWriter)
+ ExpectEq(nil, err)
+
+ // Typed nil value.
+ m = IdenticalTo(nil)
+ err = m.Matches((chan int)(nil))
+ ExpectTrue(isFatal(err))
+ ExpectThat(err, Error(Equals("which is of type chan int")))
+
+ // Non-nil value.
+ m = IdenticalTo(nil)
+ err = m.Matches("taco")
+ ExpectTrue(isFatal(err))
+ ExpectThat(err, Error(Equals("which is of type string")))
+}
+
+func (t *IdenticalToTest) Slices() {
+ var m Matcher
+ var err error
+
+ // Nil expected value
+ m = IdenticalTo(([]int)(nil))
+ ExpectEq("identical to <[]int> []", m.Description())
+
+ err = m.Matches(([]int)(nil))
+ ExpectEq(nil, err)
+
+ err = m.Matches([]int{})
+ ExpectThat(err, Error(Equals("which is not an identical reference")))
+
+ // Non-nil expected value
+ o1 := make([]int, 1)
+ o2 := make([]int, 1)
+ m = IdenticalTo(o1)
+ ExpectEq(fmt.Sprintf("identical to <[]int> %v", o1), m.Description())
+
+ err = m.Matches(o1)
+ ExpectEq(nil, err)
+
+ err = m.Matches(o2)
+ ExpectThat(err, Error(Equals("which is not an identical reference")))
+}
+
+func (t *IdenticalToTest) Maps() {
+ var m Matcher
+ var err error
+
+ // Nil expected value
+ m = IdenticalTo((map[int]int)(nil))
+ ExpectEq("identical to <map[int]int> map[]", m.Description())
+
+ err = m.Matches((map[int]int)(nil))
+ ExpectEq(nil, err)
+
+ err = m.Matches(map[int]int{})
+ ExpectThat(err, Error(Equals("which is not an identical reference")))
+
+ // Non-nil expected value
+ o1 := map[int]int{}
+ o2 := map[int]int{}
+ m = IdenticalTo(o1)
+ ExpectEq(fmt.Sprintf("identical to <map[int]int> %v", o1), m.Description())
+
+ err = m.Matches(o1)
+ ExpectEq(nil, err)
+
+ err = m.Matches(o2)
+ ExpectThat(err, Error(Equals("which is not an identical reference")))
+}
+
+func (t *IdenticalToTest) Functions() {
+ var m Matcher
+ var err error
+
+ // Nil expected value
+ m = IdenticalTo((func())(nil))
+ ExpectEq("identical to <func()> <nil>", m.Description())
+
+ err = m.Matches((func())(nil))
+ ExpectEq(nil, err)
+
+ err = m.Matches(func(){})
+ ExpectThat(err, Error(Equals("which is not an identical reference")))
+
+ // Non-nil expected value
+ o1 := func() {}
+ o2 := func() {}
+ m = IdenticalTo(o1)
+ ExpectEq(fmt.Sprintf("identical to <func()> %v", o1), m.Description())
+
+ err = m.Matches(o1)
+ ExpectEq(nil, err)
+
+ err = m.Matches(o2)
+ ExpectThat(err, Error(Equals("which is not an identical reference")))
+}
+
+func (t *IdenticalToTest) Channels() {
+ var m Matcher
+ var err error
+
+ // Nil expected value
+ m = IdenticalTo((chan int)(nil))
+ ExpectEq("identical to <chan int> <nil>", m.Description())
+
+ err = m.Matches((chan int)(nil))
+ ExpectEq(nil, err)
+
+ err = m.Matches(make(chan int))
+ ExpectThat(err, Error(Equals("which is not an identical reference")))
+
+ // Non-nil expected value
+ o1 := make(chan int)
+ o2 := make(chan int)
+ m = IdenticalTo(o1)
+ ExpectEq(fmt.Sprintf("identical to <chan int> %v", o1), m.Description())
+
+ err = m.Matches(o1)
+ ExpectEq(nil, err)
+
+ err = m.Matches(o2)
+ ExpectThat(err, Error(Equals("which is not an identical reference")))
+}
+
+func (t *IdenticalToTest) Bools() {
+ var m Matcher
+ var err error
+
+ // false
+ m = IdenticalTo(false)
+ ExpectEq("identical to <bool> false", m.Description())
+
+ err = m.Matches(false)
+ ExpectEq(nil, err)
+
+ err = m.Matches(true)
+ ExpectThat(err, Error(Equals("")))
+
+ // true
+ m = IdenticalTo(true)
+ ExpectEq("identical to <bool> true", m.Description())
+
+ err = m.Matches(false)
+ ExpectThat(err, Error(Equals("")))
+
+ err = m.Matches(true)
+ ExpectEq(nil, err)
+}
+
+func (t *IdenticalToTest) Ints() {
+ var m Matcher
+ var err error
+
+ m = IdenticalTo(int(17))
+ ExpectEq("identical to <int> 17", m.Description())
+
+ // Identical value
+ err = m.Matches(int(17))
+ ExpectEq(nil, err)
+
+ // Type alias
+ type myType int
+ err = m.Matches(myType(17))
+ ExpectTrue(isFatal(err))
+ ExpectThat(err, Error(Equals("which is of type oglematchers_test.myType")))
+
+ // Completely wrong type
+ err = m.Matches(int32(17))
+ ExpectTrue(isFatal(err))
+ ExpectThat(err, Error(Equals("which is of type int32")))
+}
+
+func (t *IdenticalToTest) Int8s() {
+ var m Matcher
+ var err error
+
+ m = IdenticalTo(int8(17))
+ ExpectEq("identical to <int8> 17", m.Description())
+
+ // Identical value
+ err = m.Matches(int8(17))
+ ExpectEq(nil, err)
+
+ // Type alias
+ type myType int8
+ err = m.Matches(myType(17))
+ ExpectTrue(isFatal(err))
+ ExpectThat(err, Error(Equals("which is of type oglematchers_test.myType")))
+
+ // Completely wrong type
+ err = m.Matches(int32(17))
+ ExpectTrue(isFatal(err))
+ ExpectThat(err, Error(Equals("which is of type int32")))
+}
+
+func (t *IdenticalToTest) Int16s() {
+ var m Matcher
+ var err error
+
+ m = IdenticalTo(int16(17))
+ ExpectEq("identical to <int16> 17", m.Description())
+
+ // Identical value
+ err = m.Matches(int16(17))
+ ExpectEq(nil, err)
+
+ // Type alias
+ type myType int16
+ err = m.Matches(myType(17))
+ ExpectTrue(isFatal(err))
+ ExpectThat(err, Error(Equals("which is of type oglematchers_test.myType")))
+
+ // Completely wrong type
+ err = m.Matches(int32(17))
+ ExpectTrue(isFatal(err))
+ ExpectThat(err, Error(Equals("which is of type int32")))
+}
+
+func (t *IdenticalToTest) Int32s() {
+ var m Matcher
+ var err error
+
+ m = IdenticalTo(int32(17))
+ ExpectEq("identical to <int32> 17", m.Description())
+
+ // Identical value
+ err = m.Matches(int32(17))
+ ExpectEq(nil, err)
+
+ // Type alias
+ type myType int32
+ err = m.Matches(myType(17))
+ ExpectTrue(isFatal(err))
+ ExpectThat(err, Error(Equals("which is of type oglematchers_test.myType")))
+
+ // Completely wrong type
+ err = m.Matches(int16(17))
+ ExpectTrue(isFatal(err))
+ ExpectThat(err, Error(Equals("which is of type int16")))
+}
+
+func (t *IdenticalToTest) Int64s() {
+ var m Matcher
+ var err error
+
+ m = IdenticalTo(int64(17))
+ ExpectEq("identical to <int64> 17", m.Description())
+
+ // Identical value
+ err = m.Matches(int64(17))
+ ExpectEq(nil, err)
+
+ // Type alias
+ type myType int64
+ err = m.Matches(myType(17))
+ ExpectTrue(isFatal(err))
+ ExpectThat(err, Error(Equals("which is of type oglematchers_test.myType")))
+
+ // Completely wrong type
+ err = m.Matches(int32(17))
+ ExpectTrue(isFatal(err))
+ ExpectThat(err, Error(Equals("which is of type int32")))
+}
+
+func (t *IdenticalToTest) Uints() {
+ var m Matcher
+ var err error
+
+ m = IdenticalTo(uint(17))
+ ExpectEq("identical to <uint> 17", m.Description())
+
+ // Identical value
+ err = m.Matches(uint(17))
+ ExpectEq(nil, err)
+
+ // Type alias
+ type myType uint
+ err = m.Matches(myType(17))
+ ExpectTrue(isFatal(err))
+ ExpectThat(err, Error(Equals("which is of type oglematchers_test.myType")))
+
+ // Completely wrong type
+ err = m.Matches(int32(17))
+ ExpectTrue(isFatal(err))
+ ExpectThat(err, Error(Equals("which is of type int32")))
+}
+
+func (t *IdenticalToTest) Uint8s() {
+ var m Matcher
+ var err error
+
+ m = IdenticalTo(uint8(17))
+ ExpectEq("identical to <uint8> 17", m.Description())
+
+ // Identical value
+ err = m.Matches(uint8(17))
+ ExpectEq(nil, err)
+
+ // Type alias
+ type myType uint8
+ err = m.Matches(myType(17))
+ ExpectTrue(isFatal(err))
+ ExpectThat(err, Error(Equals("which is of type oglematchers_test.myType")))
+
+ // Completely wrong type
+ err = m.Matches(int32(17))
+ ExpectTrue(isFatal(err))
+ ExpectThat(err, Error(Equals("which is of type int32")))
+}
+
+func (t *IdenticalToTest) Uint16s() {
+ var m Matcher
+ var err error
+
+ m = IdenticalTo(uint16(17))
+ ExpectEq("identical to <uint16> 17", m.Description())
+
+ // Identical value
+ err = m.Matches(uint16(17))
+ ExpectEq(nil, err)
+
+ // Type alias
+ type myType uint16
+ err = m.Matches(myType(17))
+ ExpectTrue(isFatal(err))
+ ExpectThat(err, Error(Equals("which is of type oglematchers_test.myType")))
+
+ // Completely wrong type
+ err = m.Matches(int32(17))
+ ExpectTrue(isFatal(err))
+ ExpectThat(err, Error(Equals("which is of type int32")))
+}
+
+func (t *IdenticalToTest) Uint32s() {
+ var m Matcher
+ var err error
+
+ m = IdenticalTo(uint32(17))
+ ExpectEq("identical to <uint32> 17", m.Description())
+
+ // Identical value
+ err = m.Matches(uint32(17))
+ ExpectEq(nil, err)
+
+ // Type alias
+ type myType uint32
+ err = m.Matches(myType(17))
+ ExpectTrue(isFatal(err))
+ ExpectThat(err, Error(Equals("which is of type oglematchers_test.myType")))
+
+ // Completely wrong type
+ err = m.Matches(int32(17))
+ ExpectTrue(isFatal(err))
+ ExpectThat(err, Error(Equals("which is of type int32")))
+}
+
+func (t *IdenticalToTest) Uint64s() {
+ var m Matcher
+ var err error
+
+ m = IdenticalTo(uint64(17))
+ ExpectEq("identical to <uint64> 17", m.Description())
+
+ // Identical value
+ err = m.Matches(uint64(17))
+ ExpectEq(nil, err)
+
+ // Type alias
+ type myType uint64
+ err = m.Matches(myType(17))
+ ExpectTrue(isFatal(err))
+ ExpectThat(err, Error(Equals("which is of type oglematchers_test.myType")))
+
+ // Completely wrong type
+ err = m.Matches(int32(17))
+ ExpectTrue(isFatal(err))
+ ExpectThat(err, Error(Equals("which is of type int32")))
+}
+
+func (t *IdenticalToTest) Uintptrs() {
+ var m Matcher
+ var err error
+
+ m = IdenticalTo(uintptr(17))
+ ExpectEq("identical to <uintptr> 17", m.Description())
+
+ // Identical value
+ err = m.Matches(uintptr(17))
+ ExpectEq(nil, err)
+
+ // Type alias
+ type myType uintptr
+ err = m.Matches(myType(17))
+ ExpectTrue(isFatal(err))
+ ExpectThat(err, Error(Equals("which is of type oglematchers_test.myType")))
+
+ // Completely wrong type
+ err = m.Matches(int32(17))
+ ExpectTrue(isFatal(err))
+ ExpectThat(err, Error(Equals("which is of type int32")))
+}
+
+func (t *IdenticalToTest) Float32s() {
+ var m Matcher
+ var err error
+
+ m = IdenticalTo(float32(17))
+ ExpectEq("identical to <float32> 17", m.Description())
+
+ // Identical value
+ err = m.Matches(float32(17))
+ ExpectEq(nil, err)
+
+ // Type alias
+ type myType float32
+ err = m.Matches(myType(17))
+ ExpectTrue(isFatal(err))
+ ExpectThat(err, Error(Equals("which is of type oglematchers_test.myType")))
+
+ // Completely wrong type
+ err = m.Matches(int32(17))
+ ExpectTrue(isFatal(err))
+ ExpectThat(err, Error(Equals("which is of type int32")))
+}
+
+func (t *IdenticalToTest) Float64s() {
+ var m Matcher
+ var err error
+
+ m = IdenticalTo(float64(17))
+ ExpectEq("identical to <float64> 17", m.Description())
+
+ // Identical value
+ err = m.Matches(float64(17))
+ ExpectEq(nil, err)
+
+ // Type alias
+ type myType float64
+ err = m.Matches(myType(17))
+ ExpectTrue(isFatal(err))
+ ExpectThat(err, Error(Equals("which is of type oglematchers_test.myType")))
+
+ // Completely wrong type
+ err = m.Matches(int32(17))
+ ExpectTrue(isFatal(err))
+ ExpectThat(err, Error(Equals("which is of type int32")))
+}
+
+func (t *IdenticalToTest) Complex64s() {
+ var m Matcher
+ var err error
+
+ m = IdenticalTo(complex64(17))
+ ExpectEq("identical to <complex64> (17+0i)", m.Description())
+
+ // Identical value
+ err = m.Matches(complex64(17))
+ ExpectEq(nil, err)
+
+ // Type alias
+ type myType complex64
+ err = m.Matches(myType(17))
+ ExpectTrue(isFatal(err))
+ ExpectThat(err, Error(Equals("which is of type oglematchers_test.myType")))
+
+ // Completely wrong type
+ err = m.Matches(int32(17))
+ ExpectTrue(isFatal(err))
+ ExpectThat(err, Error(Equals("which is of type int32")))
+}
+
+func (t *IdenticalToTest) Complex128s() {
+ var m Matcher
+ var err error
+
+ m = IdenticalTo(complex128(17))
+ ExpectEq("identical to <complex128> (17+0i)", m.Description())
+
+ // Identical value
+ err = m.Matches(complex128(17))
+ ExpectEq(nil, err)
+
+ // Type alias
+ type myType complex128
+ err = m.Matches(myType(17))
+ ExpectTrue(isFatal(err))
+ ExpectThat(err, Error(Equals("which is of type oglematchers_test.myType")))
+
+ // Completely wrong type
+ err = m.Matches(int32(17))
+ ExpectTrue(isFatal(err))
+ ExpectThat(err, Error(Equals("which is of type int32")))
+}
+
+func (t *IdenticalToTest) EmptyComparableArrays() {
+ var m Matcher
+ var err error
+
+ m = IdenticalTo([0]int{})
+ ExpectEq("identical to <[0]int> []", m.Description())
+
+ // Identical value
+ err = m.Matches([0]int{})
+ ExpectEq(nil, err)
+
+ // Length too long
+ err = m.Matches([1]int{17})
+ ExpectThat(err, Error(Equals("which is of type [1]int")))
+
+ // Element type alias
+ type myType int
+ err = m.Matches([0]myType{})
+ ExpectTrue(isFatal(err))
+ ExpectThat(err, Error(Equals("which is of type [0]oglematchers_test.myType")))
+
+ // Completely wrong element type
+ err = m.Matches([0]int32{})
+ ExpectTrue(isFatal(err))
+ ExpectThat(err, Error(Equals("which is of type [0]int32")))
+}
+
+func (t *IdenticalToTest) NonEmptyComparableArrays() {
+ var m Matcher
+ var err error
+
+ m = IdenticalTo([2]int{17, 19})
+ ExpectEq("identical to <[2]int> [17 19]", m.Description())
+
+ // Identical value
+ err = m.Matches([2]int{17, 19})
+ ExpectEq(nil, err)
+
+ // Length too short
+ err = m.Matches([1]int{17})
+ ExpectThat(err, Error(Equals("which is of type [1]int")))
+
+ // Length too long
+ err = m.Matches([3]int{17, 19, 23})
+ ExpectThat(err, Error(Equals("which is of type [3]int")))
+
+ // First element different
+ err = m.Matches([2]int{13, 19})
+ ExpectThat(err, Error(Equals("")))
+
+ // Second element different
+ err = m.Matches([2]int{17, 23})
+ ExpectThat(err, Error(Equals("")))
+
+ // Element type alias
+ type myType int
+ err = m.Matches([2]myType{17, 19})
+ ExpectTrue(isFatal(err))
+ ExpectThat(err, Error(Equals("which is of type [2]oglematchers_test.myType")))
+
+ // Completely wrong element type
+ err = m.Matches([2]int32{17, 19})
+ ExpectTrue(isFatal(err))
+ ExpectThat(err, Error(Equals("which is of type [2]int32")))
+}
+
+func (t *IdenticalToTest) NonEmptyArraysOfComparableArrays() {
+ var m Matcher
+ var err error
+
+ x := [2][2]int{
+ [2]int{17, 19},
+ [2]int{23, 29},
+ }
+ m = IdenticalTo(x)
+ ExpectEq("identical to <[2][2]int> [[17 19] [23 29]]", m.Description())
+
+ // Identical value
+ err = m.Matches([2][2]int{[2]int{17, 19}, [2]int{23, 29}})
+ ExpectEq(nil, err)
+
+ // Outer length too short
+ err = m.Matches([1][2]int{[2]int{17, 19}})
+ ExpectThat(err, Error(Equals("which is of type [1][2]int")))
+
+ // Inner length too short
+ err = m.Matches([2][1]int{[1]int{17}, [1]int{23}})
+ ExpectThat(err, Error(Equals("which is of type [2][1]int")))
+
+ // First element different
+ err = m.Matches([2][2]int{[2]int{13, 19}, [2]int{23, 29}})
+ ExpectThat(err, Error(Equals("")))
+
+ // Element type alias
+ type myType int
+ err = m.Matches([2][2]myType{[2]myType{17, 19}, [2]myType{23, 29}})
+ ExpectTrue(isFatal(err))
+ ExpectThat(err, Error(Equals("which is of type [2][2]oglematchers_test.myType")))
+}
+
+func (t *IdenticalToTest) NonComparableArrays() {
+ x := [0]func(){}
+ f := func() { IdenticalTo(x) }
+ ExpectThat(f, Panics(HasSubstr("is not comparable")))
+}
+
+func (t *IdenticalToTest) ArraysOfNonComparableArrays() {
+ x := [0][0]func(){}
+ f := func() { IdenticalTo(x) }
+ ExpectThat(f, Panics(HasSubstr("is not comparable")))
+}
+
+func (t *IdenticalToTest) Strings() {
+ var m Matcher
+ var err error
+
+ m = IdenticalTo("taco")
+ ExpectEq("identical to <string> taco", m.Description())
+
+ // Identical value
+ err = m.Matches("ta" + "co")
+ ExpectEq(nil, err)
+
+ // Type alias
+ type myType string
+ err = m.Matches(myType("taco"))
+ ExpectTrue(isFatal(err))
+ ExpectThat(err, Error(Equals("which is of type oglematchers_test.myType")))
+
+ // Completely wrong type
+ err = m.Matches(int32(17))
+ ExpectTrue(isFatal(err))
+ ExpectThat(err, Error(Equals("which is of type int32")))
+}
+
+func (t *IdenticalToTest) ComparableStructs() {
+ var m Matcher
+ var err error
+
+ type subStruct struct {
+ i int
+ }
+
+ type myStruct struct {
+ u uint
+ s subStruct
+ }
+
+ x := myStruct{17, subStruct{19}}
+ m = IdenticalTo(x)
+ ExpectEq("identical to <oglematchers_test.myStruct> {17 {19}}", m.Description())
+
+ // Identical value
+ err = m.Matches(myStruct{17, subStruct{19}})
+ ExpectEq(nil, err)
+
+ // Wrong outer field
+ err = m.Matches(myStruct{13, subStruct{19}})
+ ExpectThat(err, Error(Equals("")))
+
+ // Wrong inner field
+ err = m.Matches(myStruct{17, subStruct{23}})
+ ExpectThat(err, Error(Equals("")))
+
+ // Type alias
+ type myType myStruct
+ err = m.Matches(myType{17, subStruct{19}})
+ ExpectTrue(isFatal(err))
+ ExpectThat(err, Error(Equals("which is of type oglematchers_test.myType")))
+
+ // Completely wrong type
+ err = m.Matches(int32(17))
+ ExpectTrue(isFatal(err))
+ ExpectThat(err, Error(Equals("which is of type int32")))
+}
+
+func (t *IdenticalToTest) NonComparableStructs() {
+ type subStruct struct {
+ s []int
+ }
+
+ type myStruct struct {
+ u uint
+ s subStruct
+ }
+
+ x := myStruct{17, subStruct{[]int{19}}}
+ f := func() { IdenticalTo(x) }
+ ExpectThat(f, Panics(AllOf(HasSubstr("IdenticalTo"), HasSubstr("comparable"))))
+}
+
+func (t *IdenticalToTest) NilUnsafePointer() {
+ var m Matcher
+ var err error
+
+ x := unsafe.Pointer(nil)
+ m = IdenticalTo(x)
+ ExpectEq(fmt.Sprintf("identical to <unsafe.Pointer> %v", x), m.Description())
+
+ // Identical value
+ err = m.Matches(unsafe.Pointer(nil))
+ ExpectEq(nil, err)
+
+ // Wrong value
+ j := 17
+ err = m.Matches(unsafe.Pointer(&j))
+ ExpectThat(err, Error(Equals("")))
+
+ // Type alias
+ type myType unsafe.Pointer
+ err = m.Matches(myType(unsafe.Pointer(nil)))
+ ExpectTrue(isFatal(err))
+ ExpectThat(err, Error(Equals("which is of type oglematchers_test.myType")))
+
+ // Completely wrong type
+ err = m.Matches(int32(17))
+ ExpectTrue(isFatal(err))
+ ExpectThat(err, Error(Equals("which is of type int32")))
+}
+
+func (t *IdenticalToTest) NonNilUnsafePointer() {
+ var m Matcher
+ var err error
+
+ i := 17
+ x := unsafe.Pointer(&i)
+ m = IdenticalTo(x)
+ ExpectEq(fmt.Sprintf("identical to <unsafe.Pointer> %v", x), m.Description())
+
+ // Identical value
+ err = m.Matches(unsafe.Pointer(&i))
+ ExpectEq(nil, err)
+
+ // Nil value
+ err = m.Matches(unsafe.Pointer(nil))
+ ExpectThat(err, Error(Equals("")))
+
+ // Wrong value
+ j := 17
+ err = m.Matches(unsafe.Pointer(&j))
+ ExpectThat(err, Error(Equals("")))
+
+ // Type alias
+ type myType unsafe.Pointer
+ err = m.Matches(myType(unsafe.Pointer(&i)))
+ ExpectTrue(isFatal(err))
+ ExpectThat(err, Error(Equals("which is of type oglematchers_test.myType")))
+
+ // Completely wrong type
+ err = m.Matches(int32(17))
+ ExpectTrue(isFatal(err))
+ ExpectThat(err, Error(Equals("which is of type int32")))
+}
+
+func (t *IdenticalToTest) IntAlias() {
+ var m Matcher
+ var err error
+
+ type intAlias int
+
+ m = IdenticalTo(intAlias(17))
+ ExpectEq("identical to <oglematchers_test.intAlias> 17", m.Description())
+
+ // Identical value
+ err = m.Matches(intAlias(17))
+ ExpectEq(nil, err)
+
+ // Int
+ err = m.Matches(int(17))
+ ExpectTrue(isFatal(err))
+ ExpectThat(err, Error(Equals("which is of type int")))
+
+ // Completely wrong type
+ err = m.Matches(int32(17))
+ ExpectTrue(isFatal(err))
+ ExpectThat(err, Error(Equals("which is of type int32")))
+}
diff --git a/src/mongo/gotools/vendor/src/github.com/jacobsa/oglematchers/less_or_equal.go b/src/mongo/gotools/vendor/src/github.com/jacobsa/oglematchers/less_or_equal.go
new file mode 100644
index 00000000000..8402cdeaf09
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/jacobsa/oglematchers/less_or_equal.go
@@ -0,0 +1,41 @@
+// Copyright 2011 Aaron Jacobs. All Rights Reserved.
+// Author: aaronjjacobs@gmail.com (Aaron Jacobs)
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package oglematchers
+
+import (
+ "fmt"
+ "reflect"
+)
+
+// LessOrEqual returns a matcher that matches integer, floating point, or
+// strings values v such that v <= x. Comparison is not defined between numeric
+// and string types, but is defined between all integer and floating point
+// types.
+//
+// x must itself be an integer, floating point, or string type; otherwise,
+// LessOrEqual will panic.
+func LessOrEqual(x interface{}) Matcher {
+ desc := fmt.Sprintf("less than or equal to %v", x)
+
+ // Special case: make it clear that strings are strings.
+ if reflect.TypeOf(x).Kind() == reflect.String {
+ desc = fmt.Sprintf("less than or equal to \"%s\"", x)
+ }
+
+ // Put LessThan last so that its error messages will be used in the event of
+ // failure.
+ return transformDescription(AnyOf(Equals(x), LessThan(x)), desc)
+}
diff --git a/src/mongo/gotools/vendor/src/github.com/jacobsa/oglematchers/less_or_equal_test.go b/src/mongo/gotools/vendor/src/github.com/jacobsa/oglematchers/less_or_equal_test.go
new file mode 100644
index 00000000000..bdb4a8866fb
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/jacobsa/oglematchers/less_or_equal_test.go
@@ -0,0 +1,1079 @@
+// Copyright 2011 Aaron Jacobs. All Rights Reserved.
+// Author: aaronjjacobs@gmail.com (Aaron Jacobs)
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package oglematchers_test
+
+import (
+ . "github.com/jacobsa/oglematchers"
+ . "github.com/jacobsa/ogletest"
+ "math"
+)
+
+////////////////////////////////////////////////////////////////////////
+// Helpers
+////////////////////////////////////////////////////////////////////////
+
+type LessOrEqualTest struct {
+}
+
+func init() { RegisterTestSuite(&LessOrEqualTest{}) }
+
+type leTestCase struct {
+ candidate interface{}
+ expectedResult bool
+ shouldBeFatal bool
+ expectedError string
+}
+
+func (t *LessOrEqualTest) checkTestCases(matcher Matcher, cases []leTestCase) {
+ for i, c := range cases {
+ err := matcher.Matches(c.candidate)
+
+ ExpectThat(
+ (err == nil),
+ Equals(c.expectedResult),
+ "Case %d (candidate %v)",
+ i,
+ c.candidate)
+
+ if err == nil {
+ continue
+ }
+
+ _, isFatal := err.(*FatalError)
+ ExpectEq(
+ c.shouldBeFatal,
+ isFatal,
+ "Case %d (candidate %v)",
+ i,
+ c.candidate)
+
+ ExpectThat(
+ err,
+ Error(Equals(c.expectedError)),
+ "Case %d (candidate %v)",
+ i,
+ c.candidate)
+ }
+}
+
+////////////////////////////////////////////////////////////////////////
+// Integer literals
+////////////////////////////////////////////////////////////////////////
+
+func (t *LessOrEqualTest) IntegerCandidateBadTypes() {
+ matcher := LessOrEqual(int(-150))
+
+ cases := []leTestCase{
+ leTestCase{true, false, true, "which is not comparable"},
+ leTestCase{uintptr(17), false, true, "which is not comparable"},
+ leTestCase{complex64(-151), false, true, "which is not comparable"},
+ leTestCase{complex128(-151), false, true, "which is not comparable"},
+ leTestCase{[...]int{-151}, false, true, "which is not comparable"},
+ leTestCase{make(chan int), false, true, "which is not comparable"},
+ leTestCase{func() {}, false, true, "which is not comparable"},
+ leTestCase{map[int]int{}, false, true, "which is not comparable"},
+ leTestCase{&leTestCase{}, false, true, "which is not comparable"},
+ leTestCase{make([]int, 0), false, true, "which is not comparable"},
+ leTestCase{"-151", false, true, "which is not comparable"},
+ leTestCase{leTestCase{}, false, true, "which is not comparable"},
+ }
+
+ t.checkTestCases(matcher, cases)
+}
+
+func (t *LessOrEqualTest) FloatCandidateBadTypes() {
+ matcher := LessOrEqual(float32(-150))
+
+ cases := []leTestCase{
+ leTestCase{true, false, true, "which is not comparable"},
+ leTestCase{uintptr(17), false, true, "which is not comparable"},
+ leTestCase{complex64(-151), false, true, "which is not comparable"},
+ leTestCase{complex128(-151), false, true, "which is not comparable"},
+ leTestCase{[...]int{-151}, false, true, "which is not comparable"},
+ leTestCase{make(chan int), false, true, "which is not comparable"},
+ leTestCase{func() {}, false, true, "which is not comparable"},
+ leTestCase{map[int]int{}, false, true, "which is not comparable"},
+ leTestCase{&leTestCase{}, false, true, "which is not comparable"},
+ leTestCase{make([]int, 0), false, true, "which is not comparable"},
+ leTestCase{"-151", false, true, "which is not comparable"},
+ leTestCase{leTestCase{}, false, true, "which is not comparable"},
+ }
+
+ t.checkTestCases(matcher, cases)
+}
+
+func (t *LessOrEqualTest) StringCandidateBadTypes() {
+ matcher := LessOrEqual("17")
+
+ cases := []leTestCase{
+ leTestCase{true, false, true, "which is not comparable"},
+ leTestCase{int(0), false, true, "which is not comparable"},
+ leTestCase{int8(0), false, true, "which is not comparable"},
+ leTestCase{int16(0), false, true, "which is not comparable"},
+ leTestCase{int32(0), false, true, "which is not comparable"},
+ leTestCase{int64(0), false, true, "which is not comparable"},
+ leTestCase{uint(0), false, true, "which is not comparable"},
+ leTestCase{uint8(0), false, true, "which is not comparable"},
+ leTestCase{uint16(0), false, true, "which is not comparable"},
+ leTestCase{uint32(0), false, true, "which is not comparable"},
+ leTestCase{uint64(0), false, true, "which is not comparable"},
+ leTestCase{uintptr(17), false, true, "which is not comparable"},
+ leTestCase{float32(0), false, true, "which is not comparable"},
+ leTestCase{float64(0), false, true, "which is not comparable"},
+ leTestCase{complex64(-151), false, true, "which is not comparable"},
+ leTestCase{complex128(-151), false, true, "which is not comparable"},
+ leTestCase{[...]int{-151}, false, true, "which is not comparable"},
+ leTestCase{make(chan int), false, true, "which is not comparable"},
+ leTestCase{func() {}, false, true, "which is not comparable"},
+ leTestCase{map[int]int{}, false, true, "which is not comparable"},
+ leTestCase{&leTestCase{}, false, true, "which is not comparable"},
+ leTestCase{make([]int, 0), false, true, "which is not comparable"},
+ leTestCase{leTestCase{}, false, true, "which is not comparable"},
+ }
+
+ t.checkTestCases(matcher, cases)
+}
+
+func (t *LessOrEqualTest) BadArgument() {
+ panicked := false
+
+ defer func() {
+ ExpectThat(panicked, Equals(true))
+ }()
+
+ defer func() {
+ if r := recover(); r != nil {
+ panicked = true
+ }
+ }()
+
+ LessOrEqual(complex128(0))
+}
+
+////////////////////////////////////////////////////////////////////////
+// Integer literals
+////////////////////////////////////////////////////////////////////////
+
+func (t *LessOrEqualTest) NegativeIntegerLiteral() {
+ matcher := LessOrEqual(-150)
+ desc := matcher.Description()
+ expectedDesc := "less than or equal to -150"
+
+ ExpectThat(desc, Equals(expectedDesc))
+
+ cases := []leTestCase{
+ // Signed integers.
+ leTestCase{-(1 << 30), true, false, ""},
+ leTestCase{-151, true, false, ""},
+ leTestCase{-150, true, false, ""},
+ leTestCase{-149, false, false, ""},
+ leTestCase{0, false, false, ""},
+ leTestCase{17, false, false, ""},
+
+ leTestCase{int(-(1 << 30)), true, false, ""},
+ leTestCase{int(-151), true, false, ""},
+ leTestCase{int(-150), true, false, ""},
+ leTestCase{int(-149), false, false, ""},
+ leTestCase{int(0), false, false, ""},
+ leTestCase{int(17), false, false, ""},
+
+ leTestCase{int8(-127), false, false, ""},
+ leTestCase{int8(0), false, false, ""},
+ leTestCase{int8(17), false, false, ""},
+
+ leTestCase{int16(-(1 << 14)), true, false, ""},
+ leTestCase{int16(-151), true, false, ""},
+ leTestCase{int16(-150), true, false, ""},
+ leTestCase{int16(-149), false, false, ""},
+ leTestCase{int16(0), false, false, ""},
+ leTestCase{int16(17), false, false, ""},
+
+ leTestCase{int32(-(1 << 30)), true, false, ""},
+ leTestCase{int32(-151), true, false, ""},
+ leTestCase{int32(-150), true, false, ""},
+ leTestCase{int32(-149), false, false, ""},
+ leTestCase{int32(0), false, false, ""},
+ leTestCase{int32(17), false, false, ""},
+
+ leTestCase{int64(-(1 << 30)), true, false, ""},
+ leTestCase{int64(-151), true, false, ""},
+ leTestCase{int64(-150), true, false, ""},
+ leTestCase{int64(-149), false, false, ""},
+ leTestCase{int64(0), false, false, ""},
+ leTestCase{int64(17), false, false, ""},
+
+ // Unsigned integers.
+ leTestCase{uint((1 << 32) - 151), false, false, ""},
+ leTestCase{uint(0), false, false, ""},
+ leTestCase{uint(17), false, false, ""},
+
+ leTestCase{uint8(0), false, false, ""},
+ leTestCase{uint8(17), false, false, ""},
+ leTestCase{uint8(253), false, false, ""},
+
+ leTestCase{uint16((1 << 16) - 151), false, false, ""},
+ leTestCase{uint16(0), false, false, ""},
+ leTestCase{uint16(17), false, false, ""},
+
+ leTestCase{uint32((1 << 32) - 151), false, false, ""},
+ leTestCase{uint32(0), false, false, ""},
+ leTestCase{uint32(17), false, false, ""},
+
+ leTestCase{uint64((1 << 64) - 151), false, false, ""},
+ leTestCase{uint64(0), false, false, ""},
+ leTestCase{uint64(17), false, false, ""},
+
+ // Floating point.
+ leTestCase{float32(-(1 << 30)), true, false, ""},
+ leTestCase{float32(-151), true, false, ""},
+ leTestCase{float32(-150.1), true, false, ""},
+ leTestCase{float32(-150), true, false, ""},
+ leTestCase{float32(-149.9), false, false, ""},
+ leTestCase{float32(0), false, false, ""},
+ leTestCase{float32(17), false, false, ""},
+ leTestCase{float32(160), false, false, ""},
+
+ leTestCase{float64(-(1 << 30)), true, false, ""},
+ leTestCase{float64(-151), true, false, ""},
+ leTestCase{float64(-150.1), true, false, ""},
+ leTestCase{float64(-150), true, false, ""},
+ leTestCase{float64(-149.9), false, false, ""},
+ leTestCase{float64(0), false, false, ""},
+ leTestCase{float64(17), false, false, ""},
+ leTestCase{float64(160), false, false, ""},
+ }
+
+ t.checkTestCases(matcher, cases)
+}
+
+func (t *LessOrEqualTest) ZeroIntegerLiteral() {
+ matcher := LessOrEqual(0)
+ desc := matcher.Description()
+ expectedDesc := "less than or equal to 0"
+
+ ExpectThat(desc, Equals(expectedDesc))
+
+ cases := []leTestCase{
+ // Signed integers.
+ leTestCase{-(1 << 30), true, false, ""},
+ leTestCase{-1, true, false, ""},
+ leTestCase{0, true, false, ""},
+ leTestCase{1, false, false, ""},
+ leTestCase{17, false, false, ""},
+ leTestCase{(1 << 30), false, false, ""},
+
+ leTestCase{int(-(1 << 30)), true, false, ""},
+ leTestCase{int(-1), true, false, ""},
+ leTestCase{int(0), true, false, ""},
+ leTestCase{int(1), false, false, ""},
+ leTestCase{int(17), false, false, ""},
+
+ leTestCase{int8(-1), true, false, ""},
+ leTestCase{int8(0), true, false, ""},
+ leTestCase{int8(1), false, false, ""},
+
+ leTestCase{int16(-(1 << 14)), true, false, ""},
+ leTestCase{int16(-1), true, false, ""},
+ leTestCase{int16(0), true, false, ""},
+ leTestCase{int16(1), false, false, ""},
+ leTestCase{int16(17), false, false, ""},
+
+ leTestCase{int32(-(1 << 30)), true, false, ""},
+ leTestCase{int32(-1), true, false, ""},
+ leTestCase{int32(0), true, false, ""},
+ leTestCase{int32(1), false, false, ""},
+ leTestCase{int32(17), false, false, ""},
+
+ leTestCase{int64(-(1 << 30)), true, false, ""},
+ leTestCase{int64(-1), true, false, ""},
+ leTestCase{int64(0), true, false, ""},
+ leTestCase{int64(1), false, false, ""},
+ leTestCase{int64(17), false, false, ""},
+
+ // Unsigned integers.
+ leTestCase{uint((1 << 32) - 1), false, false, ""},
+ leTestCase{uint(0), true, false, ""},
+ leTestCase{uint(1), false, false, ""},
+ leTestCase{uint(17), false, false, ""},
+
+ leTestCase{uint8(0), true, false, ""},
+ leTestCase{uint8(1), false, false, ""},
+ leTestCase{uint8(17), false, false, ""},
+ leTestCase{uint8(253), false, false, ""},
+
+ leTestCase{uint16((1 << 16) - 1), false, false, ""},
+ leTestCase{uint16(0), true, false, ""},
+ leTestCase{uint16(1), false, false, ""},
+ leTestCase{uint16(17), false, false, ""},
+
+ leTestCase{uint32((1 << 32) - 1), false, false, ""},
+ leTestCase{uint32(0), true, false, ""},
+ leTestCase{uint32(1), false, false, ""},
+ leTestCase{uint32(17), false, false, ""},
+
+ leTestCase{uint64((1 << 64) - 1), false, false, ""},
+ leTestCase{uint64(0), true, false, ""},
+ leTestCase{uint64(1), false, false, ""},
+ leTestCase{uint64(17), false, false, ""},
+
+ // Floating point.
+ leTestCase{float32(-(1 << 30)), true, false, ""},
+ leTestCase{float32(-1), true, false, ""},
+ leTestCase{float32(-0.1), true, false, ""},
+ leTestCase{float32(-0.0), true, false, ""},
+ leTestCase{float32(0), true, false, ""},
+ leTestCase{float32(0.1), false, false, ""},
+ leTestCase{float32(17), false, false, ""},
+ leTestCase{float32(160), false, false, ""},
+
+ leTestCase{float64(-(1 << 30)), true, false, ""},
+ leTestCase{float64(-1), true, false, ""},
+ leTestCase{float64(-0.1), true, false, ""},
+ leTestCase{float64(-0), true, false, ""},
+ leTestCase{float64(0), true, false, ""},
+ leTestCase{float64(0.1), false, false, ""},
+ leTestCase{float64(17), false, false, ""},
+ leTestCase{float64(160), false, false, ""},
+ }
+
+ t.checkTestCases(matcher, cases)
+}
+
+func (t *LessOrEqualTest) PositiveIntegerLiteral() {
+ matcher := LessOrEqual(150)
+ desc := matcher.Description()
+ expectedDesc := "less than or equal to 150"
+
+ ExpectThat(desc, Equals(expectedDesc))
+
+ cases := []leTestCase{
+ // Signed integers.
+ leTestCase{-1, true, false, ""},
+ leTestCase{149, true, false, ""},
+ leTestCase{150, true, false, ""},
+ leTestCase{151, false, false, ""},
+
+ leTestCase{int(-1), true, false, ""},
+ leTestCase{int(149), true, false, ""},
+ leTestCase{int(150), true, false, ""},
+ leTestCase{int(151), false, false, ""},
+
+ leTestCase{int8(-1), true, false, ""},
+ leTestCase{int8(0), true, false, ""},
+ leTestCase{int8(17), true, false, ""},
+ leTestCase{int8(127), true, false, ""},
+
+ leTestCase{int16(-1), true, false, ""},
+ leTestCase{int16(149), true, false, ""},
+ leTestCase{int16(150), true, false, ""},
+ leTestCase{int16(151), false, false, ""},
+
+ leTestCase{int32(-1), true, false, ""},
+ leTestCase{int32(149), true, false, ""},
+ leTestCase{int32(150), true, false, ""},
+ leTestCase{int32(151), false, false, ""},
+
+ leTestCase{int64(-1), true, false, ""},
+ leTestCase{int64(149), true, false, ""},
+ leTestCase{int64(150), true, false, ""},
+ leTestCase{int64(151), false, false, ""},
+
+ // Unsigned integers.
+ leTestCase{uint(0), true, false, ""},
+ leTestCase{uint(149), true, false, ""},
+ leTestCase{uint(150), true, false, ""},
+ leTestCase{uint(151), false, false, ""},
+
+ leTestCase{uint8(0), true, false, ""},
+ leTestCase{uint8(127), true, false, ""},
+
+ leTestCase{uint16(0), true, false, ""},
+ leTestCase{uint16(149), true, false, ""},
+ leTestCase{uint16(150), true, false, ""},
+ leTestCase{uint16(151), false, false, ""},
+
+ leTestCase{uint32(0), true, false, ""},
+ leTestCase{uint32(149), true, false, ""},
+ leTestCase{uint32(150), true, false, ""},
+ leTestCase{uint32(151), false, false, ""},
+
+ leTestCase{uint64(0), true, false, ""},
+ leTestCase{uint64(149), true, false, ""},
+ leTestCase{uint64(150), true, false, ""},
+ leTestCase{uint64(151), false, false, ""},
+
+ // Floating point.
+ leTestCase{float32(-1), true, false, ""},
+ leTestCase{float32(149), true, false, ""},
+ leTestCase{float32(149.9), true, false, ""},
+ leTestCase{float32(150), true, false, ""},
+ leTestCase{float32(150.1), false, false, ""},
+ leTestCase{float32(151), false, false, ""},
+
+ leTestCase{float64(-1), true, false, ""},
+ leTestCase{float64(149), true, false, ""},
+ leTestCase{float64(149.9), true, false, ""},
+ leTestCase{float64(150), true, false, ""},
+ leTestCase{float64(150.1), false, false, ""},
+ leTestCase{float64(151), false, false, ""},
+ }
+
+ t.checkTestCases(matcher, cases)
+}
+
+////////////////////////////////////////////////////////////////////////
+// Float literals
+////////////////////////////////////////////////////////////////////////
+
+func (t *LessOrEqualTest) NegativeFloatLiteral() {
+ matcher := LessOrEqual(-150.1)
+ desc := matcher.Description()
+ expectedDesc := "less than or equal to -150.1"
+
+ ExpectThat(desc, Equals(expectedDesc))
+
+ cases := []leTestCase{
+ // Signed integers.
+ leTestCase{-(1 << 30), true, false, ""},
+ leTestCase{-151, true, false, ""},
+ leTestCase{-150.1, true, false, ""},
+ leTestCase{-150, false, false, ""},
+ leTestCase{-149, false, false, ""},
+ leTestCase{0, false, false, ""},
+ leTestCase{17, false, false, ""},
+
+ leTestCase{int(-(1 << 30)), true, false, ""},
+ leTestCase{int(-151), true, false, ""},
+ leTestCase{int(-150), false, false, ""},
+ leTestCase{int(-149), false, false, ""},
+ leTestCase{int(0), false, false, ""},
+ leTestCase{int(17), false, false, ""},
+
+ leTestCase{int8(-127), false, false, ""},
+ leTestCase{int8(0), false, false, ""},
+ leTestCase{int8(17), false, false, ""},
+
+ leTestCase{int16(-(1 << 14)), true, false, ""},
+ leTestCase{int16(-151), true, false, ""},
+ leTestCase{int16(-150), false, false, ""},
+ leTestCase{int16(-149), false, false, ""},
+ leTestCase{int16(0), false, false, ""},
+ leTestCase{int16(17), false, false, ""},
+
+ leTestCase{int32(-(1 << 30)), true, false, ""},
+ leTestCase{int32(-151), true, false, ""},
+ leTestCase{int32(-150), false, false, ""},
+ leTestCase{int32(-149), false, false, ""},
+ leTestCase{int32(0), false, false, ""},
+ leTestCase{int32(17), false, false, ""},
+
+ leTestCase{int64(-(1 << 30)), true, false, ""},
+ leTestCase{int64(-151), true, false, ""},
+ leTestCase{int64(-150), false, false, ""},
+ leTestCase{int64(-149), false, false, ""},
+ leTestCase{int64(0), false, false, ""},
+ leTestCase{int64(17), false, false, ""},
+
+ // Unsigned integers.
+ leTestCase{uint((1 << 32) - 151), false, false, ""},
+ leTestCase{uint(0), false, false, ""},
+ leTestCase{uint(17), false, false, ""},
+
+ leTestCase{uint8(0), false, false, ""},
+ leTestCase{uint8(17), false, false, ""},
+ leTestCase{uint8(253), false, false, ""},
+
+ leTestCase{uint16((1 << 16) - 151), false, false, ""},
+ leTestCase{uint16(0), false, false, ""},
+ leTestCase{uint16(17), false, false, ""},
+
+ leTestCase{uint32((1 << 32) - 151), false, false, ""},
+ leTestCase{uint32(0), false, false, ""},
+ leTestCase{uint32(17), false, false, ""},
+
+ leTestCase{uint64((1 << 64) - 151), false, false, ""},
+ leTestCase{uint64(0), false, false, ""},
+ leTestCase{uint64(17), false, false, ""},
+
+ // Floating point.
+ leTestCase{float32(-(1 << 30)), true, false, ""},
+ leTestCase{float32(-151), true, false, ""},
+ leTestCase{float32(-150.2), true, false, ""},
+ leTestCase{float32(-150.1), true, false, ""},
+ leTestCase{float32(-150), false, false, ""},
+ leTestCase{float32(0), false, false, ""},
+ leTestCase{float32(17), false, false, ""},
+ leTestCase{float32(160), false, false, ""},
+
+ leTestCase{float64(-(1 << 30)), true, false, ""},
+ leTestCase{float64(-151), true, false, ""},
+ leTestCase{float64(-150.2), true, false, ""},
+ leTestCase{float64(-150.1), true, false, ""},
+ leTestCase{float64(-150), false, false, ""},
+ leTestCase{float64(0), false, false, ""},
+ leTestCase{float64(17), false, false, ""},
+ leTestCase{float64(160), false, false, ""},
+ }
+
+ t.checkTestCases(matcher, cases)
+}
+
+func (t *LessOrEqualTest) PositiveFloatLiteral() {
+ matcher := LessOrEqual(149.9)
+ desc := matcher.Description()
+ expectedDesc := "less than or equal to 149.9"
+
+ ExpectThat(desc, Equals(expectedDesc))
+
+ cases := []leTestCase{
+ // Signed integers.
+ leTestCase{-1, true, false, ""},
+ leTestCase{149, true, false, ""},
+ leTestCase{149.9, true, false, ""},
+ leTestCase{150, false, false, ""},
+ leTestCase{151, false, false, ""},
+
+ leTestCase{int(-1), true, false, ""},
+ leTestCase{int(149), true, false, ""},
+ leTestCase{int(150), false, false, ""},
+ leTestCase{int(151), false, false, ""},
+
+ leTestCase{int8(-1), true, false, ""},
+ leTestCase{int8(0), true, false, ""},
+ leTestCase{int8(17), true, false, ""},
+ leTestCase{int8(127), true, false, ""},
+
+ leTestCase{int16(-1), true, false, ""},
+ leTestCase{int16(149), true, false, ""},
+ leTestCase{int16(150), false, false, ""},
+ leTestCase{int16(151), false, false, ""},
+
+ leTestCase{int32(-1), true, false, ""},
+ leTestCase{int32(149), true, false, ""},
+ leTestCase{int32(150), false, false, ""},
+ leTestCase{int32(151), false, false, ""},
+
+ leTestCase{int64(-1), true, false, ""},
+ leTestCase{int64(149), true, false, ""},
+ leTestCase{int64(150), false, false, ""},
+ leTestCase{int64(151), false, false, ""},
+
+ // Unsigned integers.
+ leTestCase{uint(0), true, false, ""},
+ leTestCase{uint(149), true, false, ""},
+ leTestCase{uint(150), false, false, ""},
+ leTestCase{uint(151), false, false, ""},
+
+ leTestCase{uint8(0), true, false, ""},
+ leTestCase{uint8(127), true, false, ""},
+
+ leTestCase{uint16(0), true, false, ""},
+ leTestCase{uint16(149), true, false, ""},
+ leTestCase{uint16(150), false, false, ""},
+ leTestCase{uint16(151), false, false, ""},
+
+ leTestCase{uint32(0), true, false, ""},
+ leTestCase{uint32(149), true, false, ""},
+ leTestCase{uint32(150), false, false, ""},
+ leTestCase{uint32(151), false, false, ""},
+
+ leTestCase{uint64(0), true, false, ""},
+ leTestCase{uint64(149), true, false, ""},
+ leTestCase{uint64(150), false, false, ""},
+ leTestCase{uint64(151), false, false, ""},
+
+ // Floating point.
+ leTestCase{float32(-1), true, false, ""},
+ leTestCase{float32(149), true, false, ""},
+ leTestCase{float32(149.8), true, false, ""},
+ leTestCase{float32(149.9), true, false, ""},
+ leTestCase{float32(150), false, false, ""},
+ leTestCase{float32(151), false, false, ""},
+
+ leTestCase{float64(-1), true, false, ""},
+ leTestCase{float64(149), true, false, ""},
+ leTestCase{float64(149.8), true, false, ""},
+ leTestCase{float64(149.9), true, false, ""},
+ leTestCase{float64(150), false, false, ""},
+ leTestCase{float64(151), false, false, ""},
+ }
+
+ t.checkTestCases(matcher, cases)
+}
+
+////////////////////////////////////////////////////////////////////////
+// Subtle cases
+////////////////////////////////////////////////////////////////////////
+
+func (t *LessOrEqualTest) Int64NotExactlyRepresentableBySinglePrecision() {
+ // Single-precision floats don't have enough bits to represent the integers
+ // near this one distinctly, so [2^25-1, 2^25+2] all receive the same value
+ // and should be treated as equivalent when floats are in the mix.
+ const kTwoTo25 = 1 << 25
+ matcher := LessOrEqual(int64(kTwoTo25 + 1))
+
+ desc := matcher.Description()
+ expectedDesc := "less than or equal to 33554433"
+
+ ExpectThat(desc, Equals(expectedDesc))
+
+ cases := []leTestCase{
+ // Signed integers.
+ leTestCase{-1, true, false, ""},
+ leTestCase{kTwoTo25 + 0, true, false, ""},
+ leTestCase{kTwoTo25 + 1, true, false, ""},
+ leTestCase{kTwoTo25 + 2, false, false, ""},
+
+ leTestCase{int(-1), true, false, ""},
+ leTestCase{int(kTwoTo25 + 0), true, false, ""},
+ leTestCase{int(kTwoTo25 + 1), true, false, ""},
+ leTestCase{int(kTwoTo25 + 2), false, false, ""},
+
+ leTestCase{int8(-1), true, false, ""},
+ leTestCase{int8(127), true, false, ""},
+
+ leTestCase{int16(-1), true, false, ""},
+ leTestCase{int16(0), true, false, ""},
+ leTestCase{int16(32767), true, false, ""},
+
+ leTestCase{int32(-1), true, false, ""},
+ leTestCase{int32(kTwoTo25 + 0), true, false, ""},
+ leTestCase{int32(kTwoTo25 + 1), true, false, ""},
+ leTestCase{int32(kTwoTo25 + 2), false, false, ""},
+
+ leTestCase{int64(-1), true, false, ""},
+ leTestCase{int64(kTwoTo25 + 0), true, false, ""},
+ leTestCase{int64(kTwoTo25 + 1), true, false, ""},
+ leTestCase{int64(kTwoTo25 + 2), false, false, ""},
+
+ // Unsigned integers.
+ leTestCase{uint(0), true, false, ""},
+ leTestCase{uint(kTwoTo25 + 0), true, false, ""},
+ leTestCase{uint(kTwoTo25 + 1), true, false, ""},
+ leTestCase{uint(kTwoTo25 + 2), false, false, ""},
+
+ leTestCase{uint8(0), true, false, ""},
+ leTestCase{uint8(255), true, false, ""},
+
+ leTestCase{uint16(0), true, false, ""},
+ leTestCase{uint16(65535), true, false, ""},
+
+ leTestCase{uint32(0), true, false, ""},
+ leTestCase{uint32(kTwoTo25 + 0), true, false, ""},
+ leTestCase{uint32(kTwoTo25 + 1), true, false, ""},
+ leTestCase{uint32(kTwoTo25 + 2), false, false, ""},
+
+ leTestCase{uint64(0), true, false, ""},
+ leTestCase{uint64(kTwoTo25 + 0), true, false, ""},
+ leTestCase{uint64(kTwoTo25 + 1), true, false, ""},
+ leTestCase{uint64(kTwoTo25 + 2), false, false, ""},
+
+ // Floating point.
+ leTestCase{float32(-1), true, false, ""},
+ leTestCase{float32(kTwoTo25 - 2), true, false, ""},
+ leTestCase{float32(kTwoTo25 - 1), true, false, ""},
+ leTestCase{float32(kTwoTo25 + 0), true, false, ""},
+ leTestCase{float32(kTwoTo25 + 1), true, false, ""},
+ leTestCase{float32(kTwoTo25 + 2), true, false, ""},
+ leTestCase{float32(kTwoTo25 + 3), false, false, ""},
+
+ leTestCase{float64(-1), true, false, ""},
+ leTestCase{float64(kTwoTo25 - 2), true, false, ""},
+ leTestCase{float64(kTwoTo25 - 1), true, false, ""},
+ leTestCase{float64(kTwoTo25 + 0), true, false, ""},
+ leTestCase{float64(kTwoTo25 + 1), true, false, ""},
+ leTestCase{float64(kTwoTo25 + 2), false, false, ""},
+ leTestCase{float64(kTwoTo25 + 3), false, false, ""},
+ }
+
+ t.checkTestCases(matcher, cases)
+}
+
+func (t *LessOrEqualTest) Int64NotExactlyRepresentableByDoublePrecision() {
+ // Double-precision floats don't have enough bits to represent the integers
+ // near this one distinctly, so [2^54-1, 2^54+2] all receive the same value
+ // and should be treated as equivalent when floats are in the mix.
+ const kTwoTo54 = 1 << 54
+ matcher := LessOrEqual(int64(kTwoTo54 + 1))
+
+ desc := matcher.Description()
+ expectedDesc := "less than or equal to 18014398509481985"
+
+ ExpectThat(desc, Equals(expectedDesc))
+
+ cases := []leTestCase{
+ // Signed integers.
+ leTestCase{-1, true, false, ""},
+ leTestCase{1 << 30, true, false, ""},
+
+ leTestCase{int(-1), true, false, ""},
+ leTestCase{int(math.MaxInt32), true, false, ""},
+
+ leTestCase{int8(-1), true, false, ""},
+ leTestCase{int8(127), true, false, ""},
+
+ leTestCase{int16(-1), true, false, ""},
+ leTestCase{int16(0), true, false, ""},
+ leTestCase{int16(32767), true, false, ""},
+
+ leTestCase{int32(-1), true, false, ""},
+ leTestCase{int32(math.MaxInt32), true, false, ""},
+
+ leTestCase{int64(-1), true, false, ""},
+ leTestCase{int64(kTwoTo54 - 1), true, false, ""},
+ leTestCase{int64(kTwoTo54 + 0), true, false, ""},
+ leTestCase{int64(kTwoTo54 + 1), true, false, ""},
+ leTestCase{int64(kTwoTo54 + 2), false, false, ""},
+
+ // Unsigned integers.
+ leTestCase{uint(0), true, false, ""},
+ leTestCase{uint(math.MaxUint32), true, false, ""},
+
+ leTestCase{uint8(0), true, false, ""},
+ leTestCase{uint8(255), true, false, ""},
+
+ leTestCase{uint16(0), true, false, ""},
+ leTestCase{uint16(65535), true, false, ""},
+
+ leTestCase{uint32(0), true, false, ""},
+ leTestCase{uint32(math.MaxUint32), true, false, ""},
+
+ leTestCase{uint64(0), true, false, ""},
+ leTestCase{uint64(kTwoTo54 - 1), true, false, ""},
+ leTestCase{uint64(kTwoTo54 + 0), true, false, ""},
+ leTestCase{uint64(kTwoTo54 + 1), true, false, ""},
+ leTestCase{uint64(kTwoTo54 + 2), false, false, ""},
+
+ // Floating point.
+ leTestCase{float64(-1), true, false, ""},
+ leTestCase{float64(kTwoTo54 - 2), true, false, ""},
+ leTestCase{float64(kTwoTo54 - 1), true, false, ""},
+ leTestCase{float64(kTwoTo54 + 0), true, false, ""},
+ leTestCase{float64(kTwoTo54 + 1), true, false, ""},
+ leTestCase{float64(kTwoTo54 + 2), true, false, ""},
+ leTestCase{float64(kTwoTo54 + 3), false, false, ""},
+ }
+
+ t.checkTestCases(matcher, cases)
+}
+
+func (t *LessOrEqualTest) Uint64NotExactlyRepresentableBySinglePrecision() {
+ // Single-precision floats don't have enough bits to represent the integers
+ // near this one distinctly, so [2^25-1, 2^25+2] all receive the same value
+ // and should be treated as equivalent when floats are in the mix.
+ const kTwoTo25 = 1 << 25
+ matcher := LessOrEqual(uint64(kTwoTo25 + 1))
+
+ desc := matcher.Description()
+ expectedDesc := "less than or equal to 33554433"
+
+ ExpectThat(desc, Equals(expectedDesc))
+
+ cases := []leTestCase{
+ // Signed integers.
+ leTestCase{-1, true, false, ""},
+ leTestCase{kTwoTo25 + 0, true, false, ""},
+ leTestCase{kTwoTo25 + 1, true, false, ""},
+ leTestCase{kTwoTo25 + 2, false, false, ""},
+
+ leTestCase{int(-1), true, false, ""},
+ leTestCase{int(kTwoTo25 + 0), true, false, ""},
+ leTestCase{int(kTwoTo25 + 1), true, false, ""},
+ leTestCase{int(kTwoTo25 + 2), false, false, ""},
+
+ leTestCase{int8(-1), true, false, ""},
+ leTestCase{int8(127), true, false, ""},
+
+ leTestCase{int16(-1), true, false, ""},
+ leTestCase{int16(0), true, false, ""},
+ leTestCase{int16(32767), true, false, ""},
+
+ leTestCase{int32(-1), true, false, ""},
+ leTestCase{int32(kTwoTo25 + 0), true, false, ""},
+ leTestCase{int32(kTwoTo25 + 1), true, false, ""},
+ leTestCase{int32(kTwoTo25 + 2), false, false, ""},
+
+ leTestCase{int64(-1), true, false, ""},
+ leTestCase{int64(kTwoTo25 + 0), true, false, ""},
+ leTestCase{int64(kTwoTo25 + 1), true, false, ""},
+ leTestCase{int64(kTwoTo25 + 2), false, false, ""},
+
+ // Unsigned integers.
+ leTestCase{uint(0), true, false, ""},
+ leTestCase{uint(kTwoTo25 + 0), true, false, ""},
+ leTestCase{uint(kTwoTo25 + 1), true, false, ""},
+ leTestCase{uint(kTwoTo25 + 2), false, false, ""},
+
+ leTestCase{uint8(0), true, false, ""},
+ leTestCase{uint8(255), true, false, ""},
+
+ leTestCase{uint16(0), true, false, ""},
+ leTestCase{uint16(65535), true, false, ""},
+
+ leTestCase{uint32(0), true, false, ""},
+ leTestCase{uint32(kTwoTo25 + 0), true, false, ""},
+ leTestCase{uint32(kTwoTo25 + 1), true, false, ""},
+ leTestCase{uint32(kTwoTo25 + 2), false, false, ""},
+
+ leTestCase{uint64(0), true, false, ""},
+ leTestCase{uint64(kTwoTo25 + 0), true, false, ""},
+ leTestCase{uint64(kTwoTo25 + 1), true, false, ""},
+ leTestCase{uint64(kTwoTo25 + 2), false, false, ""},
+
+ // Floating point.
+ leTestCase{float32(-1), true, false, ""},
+ leTestCase{float32(kTwoTo25 - 2), true, false, ""},
+ leTestCase{float32(kTwoTo25 - 1), true, false, ""},
+ leTestCase{float32(kTwoTo25 + 0), true, false, ""},
+ leTestCase{float32(kTwoTo25 + 1), true, false, ""},
+ leTestCase{float32(kTwoTo25 + 2), true, false, ""},
+ leTestCase{float32(kTwoTo25 + 3), false, false, ""},
+
+ leTestCase{float64(-1), true, false, ""},
+ leTestCase{float64(kTwoTo25 - 2), true, false, ""},
+ leTestCase{float64(kTwoTo25 - 1), true, false, ""},
+ leTestCase{float64(kTwoTo25 + 0), true, false, ""},
+ leTestCase{float64(kTwoTo25 + 1), true, false, ""},
+ leTestCase{float64(kTwoTo25 + 2), false, false, ""},
+ leTestCase{float64(kTwoTo25 + 3), false, false, ""},
+ }
+
+ t.checkTestCases(matcher, cases)
+}
+
+func (t *LessOrEqualTest) Uint64NotExactlyRepresentableByDoublePrecision() {
+ // Double-precision floats don't have enough bits to represent the integers
+ // near this one distinctly, so [2^54-1, 2^54+2] all receive the same value
+ // and should be treated as equivalent when floats are in the mix.
+ const kTwoTo54 = 1 << 54
+ matcher := LessOrEqual(uint64(kTwoTo54 + 1))
+
+ desc := matcher.Description()
+ expectedDesc := "less than or equal to 18014398509481985"
+
+ ExpectThat(desc, Equals(expectedDesc))
+
+ cases := []leTestCase{
+ // Signed integers.
+ leTestCase{-1, true, false, ""},
+ leTestCase{1 << 30, true, false, ""},
+
+ leTestCase{int(-1), true, false, ""},
+ leTestCase{int(math.MaxInt32), true, false, ""},
+
+ leTestCase{int8(-1), true, false, ""},
+ leTestCase{int8(127), true, false, ""},
+
+ leTestCase{int16(-1), true, false, ""},
+ leTestCase{int16(0), true, false, ""},
+ leTestCase{int16(32767), true, false, ""},
+
+ leTestCase{int32(-1), true, false, ""},
+ leTestCase{int32(math.MaxInt32), true, false, ""},
+
+ leTestCase{int64(-1), true, false, ""},
+ leTestCase{int64(kTwoTo54 - 1), true, false, ""},
+ leTestCase{int64(kTwoTo54 + 0), true, false, ""},
+ leTestCase{int64(kTwoTo54 + 1), true, false, ""},
+ leTestCase{int64(kTwoTo54 + 2), false, false, ""},
+
+ // Unsigned integers.
+ leTestCase{uint(0), true, false, ""},
+ leTestCase{uint(math.MaxUint32), true, false, ""},
+
+ leTestCase{uint8(0), true, false, ""},
+ leTestCase{uint8(255), true, false, ""},
+
+ leTestCase{uint16(0), true, false, ""},
+ leTestCase{uint16(65535), true, false, ""},
+
+ leTestCase{uint32(0), true, false, ""},
+ leTestCase{uint32(math.MaxUint32), true, false, ""},
+
+ leTestCase{uint64(0), true, false, ""},
+ leTestCase{uint64(kTwoTo54 - 1), true, false, ""},
+ leTestCase{uint64(kTwoTo54 + 0), true, false, ""},
+ leTestCase{uint64(kTwoTo54 + 1), true, false, ""},
+ leTestCase{uint64(kTwoTo54 + 2), false, false, ""},
+
+ // Floating point.
+ leTestCase{float64(-1), true, false, ""},
+ leTestCase{float64(kTwoTo54 - 2), true, false, ""},
+ leTestCase{float64(kTwoTo54 - 1), true, false, ""},
+ leTestCase{float64(kTwoTo54 + 0), true, false, ""},
+ leTestCase{float64(kTwoTo54 + 1), true, false, ""},
+ leTestCase{float64(kTwoTo54 + 2), true, false, ""},
+ leTestCase{float64(kTwoTo54 + 3), false, false, ""},
+ }
+
+ t.checkTestCases(matcher, cases)
+}
+
+func (t *LessOrEqualTest) Float32AboveExactIntegerRange() {
+ // Single-precision floats don't have enough bits to represent the integers
+ // near this one distinctly, so [2^25-1, 2^25+2] all receive the same value
+ // and should be treated as equivalent when floats are in the mix.
+ const kTwoTo25 = 1 << 25
+ matcher := LessOrEqual(float32(kTwoTo25 + 1))
+
+ desc := matcher.Description()
+ expectedDesc := "less than or equal to 3.3554432e+07"
+
+ ExpectThat(desc, Equals(expectedDesc))
+
+ cases := []leTestCase{
+ // Signed integers.
+ leTestCase{int64(-1), true, false, ""},
+ leTestCase{int64(kTwoTo25 - 2), true, false, ""},
+ leTestCase{int64(kTwoTo25 - 1), true, false, ""},
+ leTestCase{int64(kTwoTo25 + 0), true, false, ""},
+ leTestCase{int64(kTwoTo25 + 1), true, false, ""},
+ leTestCase{int64(kTwoTo25 + 2), true, false, ""},
+ leTestCase{int64(kTwoTo25 + 3), false, false, ""},
+
+ // Unsigned integers.
+ leTestCase{uint64(0), true, false, ""},
+ leTestCase{uint64(kTwoTo25 - 2), true, false, ""},
+ leTestCase{uint64(kTwoTo25 - 1), true, false, ""},
+ leTestCase{uint64(kTwoTo25 + 0), true, false, ""},
+ leTestCase{uint64(kTwoTo25 + 1), true, false, ""},
+ leTestCase{uint64(kTwoTo25 + 2), true, false, ""},
+ leTestCase{uint64(kTwoTo25 + 3), false, false, ""},
+
+ // Floating point.
+ leTestCase{float32(-1), true, false, ""},
+ leTestCase{float32(kTwoTo25 - 2), true, false, ""},
+ leTestCase{float32(kTwoTo25 - 1), true, false, ""},
+ leTestCase{float32(kTwoTo25 + 0), true, false, ""},
+ leTestCase{float32(kTwoTo25 + 1), true, false, ""},
+ leTestCase{float32(kTwoTo25 + 2), true, false, ""},
+ leTestCase{float32(kTwoTo25 + 3), false, false, ""},
+
+ leTestCase{float64(-1), true, false, ""},
+ leTestCase{float64(kTwoTo25 - 2), true, false, ""},
+ leTestCase{float64(kTwoTo25 - 1), true, false, ""},
+ leTestCase{float64(kTwoTo25 + 0), true, false, ""},
+ leTestCase{float64(kTwoTo25 + 1), true, false, ""},
+ leTestCase{float64(kTwoTo25 + 2), true, false, ""},
+ leTestCase{float64(kTwoTo25 + 3), false, false, ""},
+ }
+
+ t.checkTestCases(matcher, cases)
+}
+
+func (t *LessOrEqualTest) Float64AboveExactIntegerRange() {
+ // Double-precision floats don't have enough bits to represent the integers
+ // near this one distinctly, so [2^54-1, 2^54+2] all receive the same value
+ // and should be treated as equivalent when floats are in the mix.
+ const kTwoTo54 = 1 << 54
+ matcher := LessOrEqual(float64(kTwoTo54 + 1))
+
+ desc := matcher.Description()
+ expectedDesc := "less than or equal to 1.8014398509481984e+16"
+
+ ExpectThat(desc, Equals(expectedDesc))
+
+ cases := []leTestCase{
+ // Signed integers.
+ leTestCase{int64(-1), true, false, ""},
+ leTestCase{int64(kTwoTo54 - 2), true, false, ""},
+ leTestCase{int64(kTwoTo54 - 1), true, false, ""},
+ leTestCase{int64(kTwoTo54 + 0), true, false, ""},
+ leTestCase{int64(kTwoTo54 + 1), true, false, ""},
+ leTestCase{int64(kTwoTo54 + 2), true, false, ""},
+ leTestCase{int64(kTwoTo54 + 3), false, false, ""},
+
+ // Unsigned integers.
+ leTestCase{uint64(0), true, false, ""},
+ leTestCase{uint64(kTwoTo54 - 2), true, false, ""},
+ leTestCase{uint64(kTwoTo54 - 1), true, false, ""},
+ leTestCase{uint64(kTwoTo54 + 0), true, false, ""},
+ leTestCase{uint64(kTwoTo54 + 1), true, false, ""},
+ leTestCase{uint64(kTwoTo54 + 2), true, false, ""},
+ leTestCase{uint64(kTwoTo54 + 3), false, false, ""},
+
+ // Floating point.
+ leTestCase{float64(-1), true, false, ""},
+ leTestCase{float64(kTwoTo54 - 2), true, false, ""},
+ leTestCase{float64(kTwoTo54 - 1), true, false, ""},
+ leTestCase{float64(kTwoTo54 + 0), true, false, ""},
+ leTestCase{float64(kTwoTo54 + 1), true, false, ""},
+ leTestCase{float64(kTwoTo54 + 2), true, false, ""},
+ leTestCase{float64(kTwoTo54 + 3), false, false, ""},
+ }
+
+ t.checkTestCases(matcher, cases)
+}
+
+////////////////////////////////////////////////////////////////////////
+// String literals
+////////////////////////////////////////////////////////////////////////
+
+func (t *LessOrEqualTest) EmptyString() {
+ matcher := LessOrEqual("")
+ desc := matcher.Description()
+ expectedDesc := "less than or equal to \"\""
+
+ ExpectThat(desc, Equals(expectedDesc))
+
+ cases := []leTestCase{
+ leTestCase{"", true, false, ""},
+ leTestCase{"\x00", false, false, ""},
+ leTestCase{"a", false, false, ""},
+ leTestCase{"foo", false, false, ""},
+ }
+
+ t.checkTestCases(matcher, cases)
+}
+
+func (t *LessOrEqualTest) SingleNullByte() {
+ matcher := LessOrEqual("\x00")
+ desc := matcher.Description()
+ expectedDesc := "less than or equal to \"\x00\""
+
+ ExpectThat(desc, Equals(expectedDesc))
+
+ cases := []leTestCase{
+ leTestCase{"", true, false, ""},
+ leTestCase{"\x00", true, false, ""},
+ leTestCase{"\x00\x00", false, false, ""},
+ leTestCase{"a", false, false, ""},
+ leTestCase{"foo", false, false, ""},
+ }
+
+ t.checkTestCases(matcher, cases)
+}
+
+func (t *LessOrEqualTest) LongerString() {
+ matcher := LessOrEqual("foo\x00")
+ desc := matcher.Description()
+ expectedDesc := "less than or equal to \"foo\x00\""
+
+ ExpectThat(desc, Equals(expectedDesc))
+
+ cases := []leTestCase{
+ leTestCase{"", true, false, ""},
+ leTestCase{"\x00", true, false, ""},
+ leTestCase{"bar", true, false, ""},
+ leTestCase{"foo", true, false, ""},
+ leTestCase{"foo\x00", true, false, ""},
+ leTestCase{"foo\x00\x00", false, false, ""},
+ leTestCase{"fooa", false, false, ""},
+ leTestCase{"qux", false, false, ""},
+ }
+
+ t.checkTestCases(matcher, cases)
+}
diff --git a/src/mongo/gotools/vendor/src/github.com/jacobsa/oglematchers/less_than.go b/src/mongo/gotools/vendor/src/github.com/jacobsa/oglematchers/less_than.go
new file mode 100644
index 00000000000..8258e45d99d
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/jacobsa/oglematchers/less_than.go
@@ -0,0 +1,152 @@
+// Copyright 2011 Aaron Jacobs. All Rights Reserved.
+// Author: aaronjjacobs@gmail.com (Aaron Jacobs)
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package oglematchers
+
+import (
+ "errors"
+ "fmt"
+ "math"
+ "reflect"
+)
+
+// LessThan returns a matcher that matches integer, floating point, or strings
+// values v such that v < x. Comparison is not defined between numeric and
+// string types, but is defined between all integer and floating point types.
+//
+// x must itself be an integer, floating point, or string type; otherwise,
+// LessThan will panic.
+func LessThan(x interface{}) Matcher {
+ v := reflect.ValueOf(x)
+ kind := v.Kind()
+
+ switch {
+ case isInteger(v):
+ case isFloat(v):
+ case kind == reflect.String:
+
+ default:
+ panic(fmt.Sprintf("LessThan: unexpected kind %v", kind))
+ }
+
+ return &lessThanMatcher{v}
+}
+
+type lessThanMatcher struct {
+ limit reflect.Value
+}
+
+func (m *lessThanMatcher) Description() string {
+ // Special case: make it clear that strings are strings.
+ if m.limit.Kind() == reflect.String {
+ return fmt.Sprintf("less than \"%s\"", m.limit.String())
+ }
+
+ return fmt.Sprintf("less than %v", m.limit.Interface())
+}
+
+func compareIntegers(v1, v2 reflect.Value) (err error) {
+ err = errors.New("")
+
+ switch {
+ case isSignedInteger(v1) && isSignedInteger(v2):
+ if v1.Int() < v2.Int() {
+ err = nil
+ }
+ return
+
+ case isSignedInteger(v1) && isUnsignedInteger(v2):
+ if v1.Int() < 0 || uint64(v1.Int()) < v2.Uint() {
+ err = nil
+ }
+ return
+
+ case isUnsignedInteger(v1) && isSignedInteger(v2):
+ if v1.Uint() <= math.MaxInt64 && int64(v1.Uint()) < v2.Int() {
+ err = nil
+ }
+ return
+
+ case isUnsignedInteger(v1) && isUnsignedInteger(v2):
+ if v1.Uint() < v2.Uint() {
+ err = nil
+ }
+ return
+ }
+
+ panic(fmt.Sprintf("compareIntegers: %v %v", v1, v2))
+}
+
+func getFloat(v reflect.Value) float64 {
+ switch {
+ case isSignedInteger(v):
+ return float64(v.Int())
+
+ case isUnsignedInteger(v):
+ return float64(v.Uint())
+
+ case isFloat(v):
+ return v.Float()
+ }
+
+ panic(fmt.Sprintf("getFloat: %v", v))
+}
+
+func (m *lessThanMatcher) Matches(c interface{}) (err error) {
+ v1 := reflect.ValueOf(c)
+ v2 := m.limit
+
+ err = errors.New("")
+
+ // Handle strings as a special case.
+ if v1.Kind() == reflect.String && v2.Kind() == reflect.String {
+ if v1.String() < v2.String() {
+ err = nil
+ }
+ return
+ }
+
+ // If we get here, we require that we are dealing with integers or floats.
+ v1Legal := isInteger(v1) || isFloat(v1)
+ v2Legal := isInteger(v2) || isFloat(v2)
+ if !v1Legal || !v2Legal {
+ err = NewFatalError("which is not comparable")
+ return
+ }
+
+ // Handle the various comparison cases.
+ switch {
+ // Both integers
+ case isInteger(v1) && isInteger(v2):
+ return compareIntegers(v1, v2)
+
+ // At least one float32
+ case v1.Kind() == reflect.Float32 || v2.Kind() == reflect.Float32:
+ if float32(getFloat(v1)) < float32(getFloat(v2)) {
+ err = nil
+ }
+ return
+
+ // At least one float64
+ case v1.Kind() == reflect.Float64 || v2.Kind() == reflect.Float64:
+ if getFloat(v1) < getFloat(v2) {
+ err = nil
+ }
+ return
+ }
+
+ // We shouldn't get here.
+ panic(fmt.Sprintf("lessThanMatcher.Matches: Shouldn't get here: %v %v", v1, v2))
+}
diff --git a/src/mongo/gotools/vendor/src/github.com/jacobsa/oglematchers/less_than_test.go b/src/mongo/gotools/vendor/src/github.com/jacobsa/oglematchers/less_than_test.go
new file mode 100644
index 00000000000..6ee6e9f2e63
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/jacobsa/oglematchers/less_than_test.go
@@ -0,0 +1,1059 @@
+// Copyright 2011 Aaron Jacobs. All Rights Reserved.
+// Author: aaronjjacobs@gmail.com (Aaron Jacobs)
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package oglematchers_test
+
+import (
+ . "github.com/jacobsa/oglematchers"
+ . "github.com/jacobsa/ogletest"
+ "math"
+)
+
+////////////////////////////////////////////////////////////////////////
+// Helpers
+////////////////////////////////////////////////////////////////////////
+
+type LessThanTest struct {
+}
+
+func init() { RegisterTestSuite(&LessThanTest{}) }
+
+type ltTestCase struct {
+ candidate interface{}
+ expectedResult bool
+ shouldBeFatal bool
+ expectedError string
+}
+
+func (t *LessThanTest) checkTestCases(matcher Matcher, cases []ltTestCase) {
+ for i, c := range cases {
+ err := matcher.Matches(c.candidate)
+
+ ExpectThat(
+ (err == nil),
+ Equals(c.expectedResult),
+ "Case %d (candidate %v)",
+ i,
+ c.candidate)
+
+ if err == nil {
+ continue
+ }
+
+ _, isFatal := err.(*FatalError)
+ ExpectEq(
+ c.shouldBeFatal,
+ isFatal,
+ "Case %d (candidate %v)",
+ i,
+ c.candidate)
+
+ ExpectThat(
+ err,
+ Error(Equals(c.expectedError)),
+ "Case %d (candidate %v)",
+ i,
+ c.candidate)
+ }
+}
+
+////////////////////////////////////////////////////////////////////////
+// Integer literals
+////////////////////////////////////////////////////////////////////////
+
+func (t *LessThanTest) IntegerCandidateBadTypes() {
+ matcher := LessThan(int(-150))
+
+ cases := []ltTestCase{
+ ltTestCase{true, false, true, "which is not comparable"},
+ ltTestCase{uintptr(17), false, true, "which is not comparable"},
+ ltTestCase{complex64(-151), false, true, "which is not comparable"},
+ ltTestCase{complex128(-151), false, true, "which is not comparable"},
+ ltTestCase{[...]int{-151}, false, true, "which is not comparable"},
+ ltTestCase{make(chan int), false, true, "which is not comparable"},
+ ltTestCase{func() {}, false, true, "which is not comparable"},
+ ltTestCase{map[int]int{}, false, true, "which is not comparable"},
+ ltTestCase{&ltTestCase{}, false, true, "which is not comparable"},
+ ltTestCase{make([]int, 0), false, true, "which is not comparable"},
+ ltTestCase{"-151", false, true, "which is not comparable"},
+ ltTestCase{ltTestCase{}, false, true, "which is not comparable"},
+ }
+
+ t.checkTestCases(matcher, cases)
+}
+
+func (t *LessThanTest) FloatCandidateBadTypes() {
+ matcher := LessThan(float32(-150))
+
+ cases := []ltTestCase{
+ ltTestCase{true, false, true, "which is not comparable"},
+ ltTestCase{uintptr(17), false, true, "which is not comparable"},
+ ltTestCase{complex64(-151), false, true, "which is not comparable"},
+ ltTestCase{complex128(-151), false, true, "which is not comparable"},
+ ltTestCase{[...]int{-151}, false, true, "which is not comparable"},
+ ltTestCase{make(chan int), false, true, "which is not comparable"},
+ ltTestCase{func() {}, false, true, "which is not comparable"},
+ ltTestCase{map[int]int{}, false, true, "which is not comparable"},
+ ltTestCase{&ltTestCase{}, false, true, "which is not comparable"},
+ ltTestCase{make([]int, 0), false, true, "which is not comparable"},
+ ltTestCase{"-151", false, true, "which is not comparable"},
+ ltTestCase{ltTestCase{}, false, true, "which is not comparable"},
+ }
+
+ t.checkTestCases(matcher, cases)
+}
+
+func (t *LessThanTest) StringCandidateBadTypes() {
+ matcher := LessThan("17")
+
+ cases := []ltTestCase{
+ ltTestCase{true, false, true, "which is not comparable"},
+ ltTestCase{int(0), false, true, "which is not comparable"},
+ ltTestCase{int8(0), false, true, "which is not comparable"},
+ ltTestCase{int16(0), false, true, "which is not comparable"},
+ ltTestCase{int32(0), false, true, "which is not comparable"},
+ ltTestCase{int64(0), false, true, "which is not comparable"},
+ ltTestCase{uint(0), false, true, "which is not comparable"},
+ ltTestCase{uint8(0), false, true, "which is not comparable"},
+ ltTestCase{uint16(0), false, true, "which is not comparable"},
+ ltTestCase{uint32(0), false, true, "which is not comparable"},
+ ltTestCase{uint64(0), false, true, "which is not comparable"},
+ ltTestCase{uintptr(17), false, true, "which is not comparable"},
+ ltTestCase{float32(0), false, true, "which is not comparable"},
+ ltTestCase{float64(0), false, true, "which is not comparable"},
+ ltTestCase{complex64(-151), false, true, "which is not comparable"},
+ ltTestCase{complex128(-151), false, true, "which is not comparable"},
+ ltTestCase{[...]int{-151}, false, true, "which is not comparable"},
+ ltTestCase{make(chan int), false, true, "which is not comparable"},
+ ltTestCase{func() {}, false, true, "which is not comparable"},
+ ltTestCase{map[int]int{}, false, true, "which is not comparable"},
+ ltTestCase{&ltTestCase{}, false, true, "which is not comparable"},
+ ltTestCase{make([]int, 0), false, true, "which is not comparable"},
+ ltTestCase{ltTestCase{}, false, true, "which is not comparable"},
+ }
+
+ t.checkTestCases(matcher, cases)
+}
+
+func (t *LessThanTest) BadArgument() {
+ panicked := false
+
+ defer func() {
+ ExpectThat(panicked, Equals(true))
+ }()
+
+ defer func() {
+ if r := recover(); r != nil {
+ panicked = true
+ }
+ }()
+
+ LessThan(complex128(0))
+}
+
+////////////////////////////////////////////////////////////////////////
+// Integer literals
+////////////////////////////////////////////////////////////////////////
+
+func (t *LessThanTest) NegativeIntegerLiteral() {
+ matcher := LessThan(-150)
+ desc := matcher.Description()
+ expectedDesc := "less than -150"
+
+ ExpectThat(desc, Equals(expectedDesc))
+
+ cases := []ltTestCase{
+ // Signed integers.
+ ltTestCase{-(1 << 30), true, false, ""},
+ ltTestCase{-151, true, false, ""},
+ ltTestCase{-150, false, false, ""},
+ ltTestCase{0, false, false, ""},
+ ltTestCase{17, false, false, ""},
+
+ ltTestCase{int(-(1 << 30)), true, false, ""},
+ ltTestCase{int(-151), true, false, ""},
+ ltTestCase{int(-150), false, false, ""},
+ ltTestCase{int(0), false, false, ""},
+ ltTestCase{int(17), false, false, ""},
+
+ ltTestCase{int8(-127), false, false, ""},
+ ltTestCase{int8(0), false, false, ""},
+ ltTestCase{int8(17), false, false, ""},
+
+ ltTestCase{int16(-(1 << 14)), true, false, ""},
+ ltTestCase{int16(-151), true, false, ""},
+ ltTestCase{int16(-150), false, false, ""},
+ ltTestCase{int16(0), false, false, ""},
+ ltTestCase{int16(17), false, false, ""},
+
+ ltTestCase{int32(-(1 << 30)), true, false, ""},
+ ltTestCase{int32(-151), true, false, ""},
+ ltTestCase{int32(-150), false, false, ""},
+ ltTestCase{int32(0), false, false, ""},
+ ltTestCase{int32(17), false, false, ""},
+
+ ltTestCase{int64(-(1 << 30)), true, false, ""},
+ ltTestCase{int64(-151), true, false, ""},
+ ltTestCase{int64(-150), false, false, ""},
+ ltTestCase{int64(0), false, false, ""},
+ ltTestCase{int64(17), false, false, ""},
+
+ // Unsigned integers.
+ ltTestCase{uint((1 << 32) - 151), false, false, ""},
+ ltTestCase{uint(0), false, false, ""},
+ ltTestCase{uint(17), false, false, ""},
+
+ ltTestCase{uint8(0), false, false, ""},
+ ltTestCase{uint8(17), false, false, ""},
+ ltTestCase{uint8(253), false, false, ""},
+
+ ltTestCase{uint16((1 << 16) - 151), false, false, ""},
+ ltTestCase{uint16(0), false, false, ""},
+ ltTestCase{uint16(17), false, false, ""},
+
+ ltTestCase{uint32((1 << 32) - 151), false, false, ""},
+ ltTestCase{uint32(0), false, false, ""},
+ ltTestCase{uint32(17), false, false, ""},
+
+ ltTestCase{uint64((1 << 64) - 151), false, false, ""},
+ ltTestCase{uint64(0), false, false, ""},
+ ltTestCase{uint64(17), false, false, ""},
+
+ // Floating point.
+ ltTestCase{float32(-(1 << 30)), true, false, ""},
+ ltTestCase{float32(-151), true, false, ""},
+ ltTestCase{float32(-150.1), true, false, ""},
+ ltTestCase{float32(-150), false, false, ""},
+ ltTestCase{float32(-149.9), false, false, ""},
+ ltTestCase{float32(0), false, false, ""},
+ ltTestCase{float32(17), false, false, ""},
+ ltTestCase{float32(160), false, false, ""},
+
+ ltTestCase{float64(-(1 << 30)), true, false, ""},
+ ltTestCase{float64(-151), true, false, ""},
+ ltTestCase{float64(-150.1), true, false, ""},
+ ltTestCase{float64(-150), false, false, ""},
+ ltTestCase{float64(-149.9), false, false, ""},
+ ltTestCase{float64(0), false, false, ""},
+ ltTestCase{float64(17), false, false, ""},
+ ltTestCase{float64(160), false, false, ""},
+ }
+
+ t.checkTestCases(matcher, cases)
+}
+
+func (t *LessThanTest) ZeroIntegerLiteral() {
+ matcher := LessThan(0)
+ desc := matcher.Description()
+ expectedDesc := "less than 0"
+
+ ExpectThat(desc, Equals(expectedDesc))
+
+ cases := []ltTestCase{
+ // Signed integers.
+ ltTestCase{-(1 << 30), true, false, ""},
+ ltTestCase{-1, true, false, ""},
+ ltTestCase{0, false, false, ""},
+ ltTestCase{1, false, false, ""},
+ ltTestCase{17, false, false, ""},
+ ltTestCase{(1 << 30), false, false, ""},
+
+ ltTestCase{int(-(1 << 30)), true, false, ""},
+ ltTestCase{int(-1), true, false, ""},
+ ltTestCase{int(0), false, false, ""},
+ ltTestCase{int(1), false, false, ""},
+ ltTestCase{int(17), false, false, ""},
+
+ ltTestCase{int8(-1), true, false, ""},
+ ltTestCase{int8(0), false, false, ""},
+ ltTestCase{int8(1), false, false, ""},
+
+ ltTestCase{int16(-(1 << 14)), true, false, ""},
+ ltTestCase{int16(-1), true, false, ""},
+ ltTestCase{int16(0), false, false, ""},
+ ltTestCase{int16(1), false, false, ""},
+ ltTestCase{int16(17), false, false, ""},
+
+ ltTestCase{int32(-(1 << 30)), true, false, ""},
+ ltTestCase{int32(-1), true, false, ""},
+ ltTestCase{int32(0), false, false, ""},
+ ltTestCase{int32(1), false, false, ""},
+ ltTestCase{int32(17), false, false, ""},
+
+ ltTestCase{int64(-(1 << 30)), true, false, ""},
+ ltTestCase{int64(-1), true, false, ""},
+ ltTestCase{int64(0), false, false, ""},
+ ltTestCase{int64(1), false, false, ""},
+ ltTestCase{int64(17), false, false, ""},
+
+ // Unsigned integers.
+ ltTestCase{uint((1 << 32) - 1), false, false, ""},
+ ltTestCase{uint(0), false, false, ""},
+ ltTestCase{uint(17), false, false, ""},
+
+ ltTestCase{uint8(0), false, false, ""},
+ ltTestCase{uint8(17), false, false, ""},
+ ltTestCase{uint8(253), false, false, ""},
+
+ ltTestCase{uint16((1 << 16) - 1), false, false, ""},
+ ltTestCase{uint16(0), false, false, ""},
+ ltTestCase{uint16(17), false, false, ""},
+
+ ltTestCase{uint32((1 << 32) - 1), false, false, ""},
+ ltTestCase{uint32(0), false, false, ""},
+ ltTestCase{uint32(17), false, false, ""},
+
+ ltTestCase{uint64((1 << 64) - 1), false, false, ""},
+ ltTestCase{uint64(0), false, false, ""},
+ ltTestCase{uint64(17), false, false, ""},
+
+ // Floating point.
+ ltTestCase{float32(-(1 << 30)), true, false, ""},
+ ltTestCase{float32(-1), true, false, ""},
+ ltTestCase{float32(-0.1), true, false, ""},
+ ltTestCase{float32(-0.0), false, false, ""},
+ ltTestCase{float32(0), false, false, ""},
+ ltTestCase{float32(0.1), false, false, ""},
+ ltTestCase{float32(17), false, false, ""},
+ ltTestCase{float32(160), false, false, ""},
+
+ ltTestCase{float64(-(1 << 30)), true, false, ""},
+ ltTestCase{float64(-1), true, false, ""},
+ ltTestCase{float64(-0.1), true, false, ""},
+ ltTestCase{float64(-0), false, false, ""},
+ ltTestCase{float64(0), false, false, ""},
+ ltTestCase{float64(17), false, false, ""},
+ ltTestCase{float64(160), false, false, ""},
+ }
+
+ t.checkTestCases(matcher, cases)
+}
+
+func (t *LessThanTest) PositiveIntegerLiteral() {
+ matcher := LessThan(150)
+ desc := matcher.Description()
+ expectedDesc := "less than 150"
+
+ ExpectThat(desc, Equals(expectedDesc))
+
+ cases := []ltTestCase{
+ // Signed integers.
+ ltTestCase{-1, true, false, ""},
+ ltTestCase{149, true, false, ""},
+ ltTestCase{150, false, false, ""},
+ ltTestCase{151, false, false, ""},
+
+ ltTestCase{int(-1), true, false, ""},
+ ltTestCase{int(149), true, false, ""},
+ ltTestCase{int(150), false, false, ""},
+ ltTestCase{int(151), false, false, ""},
+
+ ltTestCase{int8(-1), true, false, ""},
+ ltTestCase{int8(0), true, false, ""},
+ ltTestCase{int8(17), true, false, ""},
+ ltTestCase{int8(127), true, false, ""},
+
+ ltTestCase{int16(-1), true, false, ""},
+ ltTestCase{int16(149), true, false, ""},
+ ltTestCase{int16(150), false, false, ""},
+ ltTestCase{int16(151), false, false, ""},
+
+ ltTestCase{int32(-1), true, false, ""},
+ ltTestCase{int32(149), true, false, ""},
+ ltTestCase{int32(150), false, false, ""},
+ ltTestCase{int32(151), false, false, ""},
+
+ ltTestCase{int64(-1), true, false, ""},
+ ltTestCase{int64(149), true, false, ""},
+ ltTestCase{int64(150), false, false, ""},
+ ltTestCase{int64(151), false, false, ""},
+
+ // Unsigned integers.
+ ltTestCase{uint(0), true, false, ""},
+ ltTestCase{uint(149), true, false, ""},
+ ltTestCase{uint(150), false, false, ""},
+ ltTestCase{uint(151), false, false, ""},
+
+ ltTestCase{uint8(0), true, false, ""},
+ ltTestCase{uint8(127), true, false, ""},
+
+ ltTestCase{uint16(0), true, false, ""},
+ ltTestCase{uint16(149), true, false, ""},
+ ltTestCase{uint16(150), false, false, ""},
+ ltTestCase{uint16(151), false, false, ""},
+
+ ltTestCase{uint32(0), true, false, ""},
+ ltTestCase{uint32(149), true, false, ""},
+ ltTestCase{uint32(150), false, false, ""},
+ ltTestCase{uint32(151), false, false, ""},
+
+ ltTestCase{uint64(0), true, false, ""},
+ ltTestCase{uint64(149), true, false, ""},
+ ltTestCase{uint64(150), false, false, ""},
+ ltTestCase{uint64(151), false, false, ""},
+
+ // Floating point.
+ ltTestCase{float32(-1), true, false, ""},
+ ltTestCase{float32(149), true, false, ""},
+ ltTestCase{float32(149.9), true, false, ""},
+ ltTestCase{float32(150), false, false, ""},
+ ltTestCase{float32(150.1), false, false, ""},
+ ltTestCase{float32(151), false, false, ""},
+
+ ltTestCase{float64(-1), true, false, ""},
+ ltTestCase{float64(149), true, false, ""},
+ ltTestCase{float64(149.9), true, false, ""},
+ ltTestCase{float64(150), false, false, ""},
+ ltTestCase{float64(150.1), false, false, ""},
+ ltTestCase{float64(151), false, false, ""},
+ }
+
+ t.checkTestCases(matcher, cases)
+}
+
+////////////////////////////////////////////////////////////////////////
+// Float literals
+////////////////////////////////////////////////////////////////////////
+
+func (t *LessThanTest) NegativeFloatLiteral() {
+ matcher := LessThan(-150.1)
+ desc := matcher.Description()
+ expectedDesc := "less than -150.1"
+
+ ExpectThat(desc, Equals(expectedDesc))
+
+ cases := []ltTestCase{
+ // Signed integers.
+ ltTestCase{-(1 << 30), true, false, ""},
+ ltTestCase{-151, true, false, ""},
+ ltTestCase{-150, false, false, ""},
+ ltTestCase{0, false, false, ""},
+ ltTestCase{17, false, false, ""},
+
+ ltTestCase{int(-(1 << 30)), true, false, ""},
+ ltTestCase{int(-151), true, false, ""},
+ ltTestCase{int(-150), false, false, ""},
+ ltTestCase{int(0), false, false, ""},
+ ltTestCase{int(17), false, false, ""},
+
+ ltTestCase{int8(-127), false, false, ""},
+ ltTestCase{int8(0), false, false, ""},
+ ltTestCase{int8(17), false, false, ""},
+
+ ltTestCase{int16(-(1 << 14)), true, false, ""},
+ ltTestCase{int16(-151), true, false, ""},
+ ltTestCase{int16(-150), false, false, ""},
+ ltTestCase{int16(0), false, false, ""},
+ ltTestCase{int16(17), false, false, ""},
+
+ ltTestCase{int32(-(1 << 30)), true, false, ""},
+ ltTestCase{int32(-151), true, false, ""},
+ ltTestCase{int32(-150), false, false, ""},
+ ltTestCase{int32(0), false, false, ""},
+ ltTestCase{int32(17), false, false, ""},
+
+ ltTestCase{int64(-(1 << 30)), true, false, ""},
+ ltTestCase{int64(-151), true, false, ""},
+ ltTestCase{int64(-150), false, false, ""},
+ ltTestCase{int64(0), false, false, ""},
+ ltTestCase{int64(17), false, false, ""},
+
+ // Unsigned integers.
+ ltTestCase{uint((1 << 32) - 151), false, false, ""},
+ ltTestCase{uint(0), false, false, ""},
+ ltTestCase{uint(17), false, false, ""},
+
+ ltTestCase{uint8(0), false, false, ""},
+ ltTestCase{uint8(17), false, false, ""},
+ ltTestCase{uint8(253), false, false, ""},
+
+ ltTestCase{uint16((1 << 16) - 151), false, false, ""},
+ ltTestCase{uint16(0), false, false, ""},
+ ltTestCase{uint16(17), false, false, ""},
+
+ ltTestCase{uint32((1 << 32) - 151), false, false, ""},
+ ltTestCase{uint32(0), false, false, ""},
+ ltTestCase{uint32(17), false, false, ""},
+
+ ltTestCase{uint64((1 << 64) - 151), false, false, ""},
+ ltTestCase{uint64(0), false, false, ""},
+ ltTestCase{uint64(17), false, false, ""},
+
+ // Floating point.
+ ltTestCase{float32(-(1 << 30)), true, false, ""},
+ ltTestCase{float32(-151), true, false, ""},
+ ltTestCase{float32(-150.2), true, false, ""},
+ ltTestCase{float32(-150.1), false, false, ""},
+ ltTestCase{float32(-150), false, false, ""},
+ ltTestCase{float32(0), false, false, ""},
+ ltTestCase{float32(17), false, false, ""},
+ ltTestCase{float32(160), false, false, ""},
+
+ ltTestCase{float64(-(1 << 30)), true, false, ""},
+ ltTestCase{float64(-151), true, false, ""},
+ ltTestCase{float64(-150.2), true, false, ""},
+ ltTestCase{float64(-150.1), false, false, ""},
+ ltTestCase{float64(-150), false, false, ""},
+ ltTestCase{float64(0), false, false, ""},
+ ltTestCase{float64(17), false, false, ""},
+ ltTestCase{float64(160), false, false, ""},
+ }
+
+ t.checkTestCases(matcher, cases)
+}
+
+func (t *LessThanTest) PositiveFloatLiteral() {
+ matcher := LessThan(149.9)
+ desc := matcher.Description()
+ expectedDesc := "less than 149.9"
+
+ ExpectThat(desc, Equals(expectedDesc))
+
+ cases := []ltTestCase{
+ // Signed integers.
+ ltTestCase{-1, true, false, ""},
+ ltTestCase{149, true, false, ""},
+ ltTestCase{150, false, false, ""},
+ ltTestCase{151, false, false, ""},
+
+ ltTestCase{int(-1), true, false, ""},
+ ltTestCase{int(149), true, false, ""},
+ ltTestCase{int(150), false, false, ""},
+ ltTestCase{int(151), false, false, ""},
+
+ ltTestCase{int8(-1), true, false, ""},
+ ltTestCase{int8(0), true, false, ""},
+ ltTestCase{int8(17), true, false, ""},
+ ltTestCase{int8(127), true, false, ""},
+
+ ltTestCase{int16(-1), true, false, ""},
+ ltTestCase{int16(149), true, false, ""},
+ ltTestCase{int16(150), false, false, ""},
+ ltTestCase{int16(151), false, false, ""},
+
+ ltTestCase{int32(-1), true, false, ""},
+ ltTestCase{int32(149), true, false, ""},
+ ltTestCase{int32(150), false, false, ""},
+ ltTestCase{int32(151), false, false, ""},
+
+ ltTestCase{int64(-1), true, false, ""},
+ ltTestCase{int64(149), true, false, ""},
+ ltTestCase{int64(150), false, false, ""},
+ ltTestCase{int64(151), false, false, ""},
+
+ // Unsigned integers.
+ ltTestCase{uint(0), true, false, ""},
+ ltTestCase{uint(149), true, false, ""},
+ ltTestCase{uint(150), false, false, ""},
+ ltTestCase{uint(151), false, false, ""},
+
+ ltTestCase{uint8(0), true, false, ""},
+ ltTestCase{uint8(127), true, false, ""},
+
+ ltTestCase{uint16(0), true, false, ""},
+ ltTestCase{uint16(149), true, false, ""},
+ ltTestCase{uint16(150), false, false, ""},
+ ltTestCase{uint16(151), false, false, ""},
+
+ ltTestCase{uint32(0), true, false, ""},
+ ltTestCase{uint32(149), true, false, ""},
+ ltTestCase{uint32(150), false, false, ""},
+ ltTestCase{uint32(151), false, false, ""},
+
+ ltTestCase{uint64(0), true, false, ""},
+ ltTestCase{uint64(149), true, false, ""},
+ ltTestCase{uint64(150), false, false, ""},
+ ltTestCase{uint64(151), false, false, ""},
+
+ // Floating point.
+ ltTestCase{float32(-1), true, false, ""},
+ ltTestCase{float32(149), true, false, ""},
+ ltTestCase{float32(149.8), true, false, ""},
+ ltTestCase{float32(149.9), false, false, ""},
+ ltTestCase{float32(150), false, false, ""},
+ ltTestCase{float32(151), false, false, ""},
+
+ ltTestCase{float64(-1), true, false, ""},
+ ltTestCase{float64(149), true, false, ""},
+ ltTestCase{float64(149.8), true, false, ""},
+ ltTestCase{float64(149.9), false, false, ""},
+ ltTestCase{float64(150), false, false, ""},
+ ltTestCase{float64(151), false, false, ""},
+ }
+
+ t.checkTestCases(matcher, cases)
+}
+
+////////////////////////////////////////////////////////////////////////
+// Subtle cases
+////////////////////////////////////////////////////////////////////////
+
+func (t *LessThanTest) Int64NotExactlyRepresentableBySinglePrecision() {
+ // Single-precision floats don't have enough bits to represent the integers
+ // near this one distinctly, so [2^25-1, 2^25+2] all receive the same value
+ // and should be treated as equivalent when floats are in the mix.
+ const kTwoTo25 = 1 << 25
+ matcher := LessThan(int64(kTwoTo25 + 1))
+
+ desc := matcher.Description()
+ expectedDesc := "less than 33554433"
+
+ ExpectThat(desc, Equals(expectedDesc))
+
+ cases := []ltTestCase{
+ // Signed integers.
+ ltTestCase{-1, true, false, ""},
+ ltTestCase{kTwoTo25 + 0, true, false, ""},
+ ltTestCase{kTwoTo25 + 1, false, false, ""},
+ ltTestCase{kTwoTo25 + 2, false, false, ""},
+
+ ltTestCase{int(-1), true, false, ""},
+ ltTestCase{int(kTwoTo25 + 0), true, false, ""},
+ ltTestCase{int(kTwoTo25 + 1), false, false, ""},
+ ltTestCase{int(kTwoTo25 + 2), false, false, ""},
+
+ ltTestCase{int8(-1), true, false, ""},
+ ltTestCase{int8(127), true, false, ""},
+
+ ltTestCase{int16(-1), true, false, ""},
+ ltTestCase{int16(0), true, false, ""},
+ ltTestCase{int16(32767), true, false, ""},
+
+ ltTestCase{int32(-1), true, false, ""},
+ ltTestCase{int32(kTwoTo25 + 0), true, false, ""},
+ ltTestCase{int32(kTwoTo25 + 1), false, false, ""},
+ ltTestCase{int32(kTwoTo25 + 2), false, false, ""},
+
+ ltTestCase{int64(-1), true, false, ""},
+ ltTestCase{int64(kTwoTo25 + 0), true, false, ""},
+ ltTestCase{int64(kTwoTo25 + 1), false, false, ""},
+ ltTestCase{int64(kTwoTo25 + 2), false, false, ""},
+
+ // Unsigned integers.
+ ltTestCase{uint(0), true, false, ""},
+ ltTestCase{uint(kTwoTo25 + 0), true, false, ""},
+ ltTestCase{uint(kTwoTo25 + 1), false, false, ""},
+ ltTestCase{uint(kTwoTo25 + 2), false, false, ""},
+
+ ltTestCase{uint8(0), true, false, ""},
+ ltTestCase{uint8(255), true, false, ""},
+
+ ltTestCase{uint16(0), true, false, ""},
+ ltTestCase{uint16(65535), true, false, ""},
+
+ ltTestCase{uint32(0), true, false, ""},
+ ltTestCase{uint32(kTwoTo25 + 0), true, false, ""},
+ ltTestCase{uint32(kTwoTo25 + 1), false, false, ""},
+ ltTestCase{uint32(kTwoTo25 + 2), false, false, ""},
+
+ ltTestCase{uint64(0), true, false, ""},
+ ltTestCase{uint64(kTwoTo25 + 0), true, false, ""},
+ ltTestCase{uint64(kTwoTo25 + 1), false, false, ""},
+ ltTestCase{uint64(kTwoTo25 + 2), false, false, ""},
+
+ // Floating point.
+ ltTestCase{float32(-1), true, false, ""},
+ ltTestCase{float32(kTwoTo25 - 2), true, false, ""},
+ ltTestCase{float32(kTwoTo25 - 1), false, false, ""},
+ ltTestCase{float32(kTwoTo25 + 0), false, false, ""},
+ ltTestCase{float32(kTwoTo25 + 1), false, false, ""},
+ ltTestCase{float32(kTwoTo25 + 2), false, false, ""},
+ ltTestCase{float32(kTwoTo25 + 3), false, false, ""},
+
+ ltTestCase{float64(-1), true, false, ""},
+ ltTestCase{float64(kTwoTo25 - 2), true, false, ""},
+ ltTestCase{float64(kTwoTo25 - 1), true, false, ""},
+ ltTestCase{float64(kTwoTo25 + 0), true, false, ""},
+ ltTestCase{float64(kTwoTo25 + 1), false, false, ""},
+ ltTestCase{float64(kTwoTo25 + 2), false, false, ""},
+ ltTestCase{float64(kTwoTo25 + 3), false, false, ""},
+ }
+
+ t.checkTestCases(matcher, cases)
+}
+
+func (t *LessThanTest) Int64NotExactlyRepresentableByDoublePrecision() {
+ // Double-precision floats don't have enough bits to represent the integers
+ // near this one distinctly, so [2^54-1, 2^54+2] all receive the same value
+ // and should be treated as equivalent when floats are in the mix.
+ const kTwoTo54 = 1 << 54
+ matcher := LessThan(int64(kTwoTo54 + 1))
+
+ desc := matcher.Description()
+ expectedDesc := "less than 18014398509481985"
+
+ ExpectThat(desc, Equals(expectedDesc))
+
+ cases := []ltTestCase{
+ // Signed integers.
+ ltTestCase{-1, true, false, ""},
+ ltTestCase{1 << 30, true, false, ""},
+
+ ltTestCase{int(-1), true, false, ""},
+ ltTestCase{int(math.MaxInt32), true, false, ""},
+
+ ltTestCase{int8(-1), true, false, ""},
+ ltTestCase{int8(127), true, false, ""},
+
+ ltTestCase{int16(-1), true, false, ""},
+ ltTestCase{int16(0), true, false, ""},
+ ltTestCase{int16(32767), true, false, ""},
+
+ ltTestCase{int32(-1), true, false, ""},
+ ltTestCase{int32(math.MaxInt32), true, false, ""},
+
+ ltTestCase{int64(-1), true, false, ""},
+ ltTestCase{int64(kTwoTo54 - 1), true, false, ""},
+ ltTestCase{int64(kTwoTo54 + 0), true, false, ""},
+ ltTestCase{int64(kTwoTo54 + 1), false, false, ""},
+ ltTestCase{int64(kTwoTo54 + 2), false, false, ""},
+
+ // Unsigned integers.
+ ltTestCase{uint(0), true, false, ""},
+ ltTestCase{uint(math.MaxUint32), true, false, ""},
+
+ ltTestCase{uint8(0), true, false, ""},
+ ltTestCase{uint8(255), true, false, ""},
+
+ ltTestCase{uint16(0), true, false, ""},
+ ltTestCase{uint16(65535), true, false, ""},
+
+ ltTestCase{uint32(0), true, false, ""},
+ ltTestCase{uint32(math.MaxUint32), true, false, ""},
+
+ ltTestCase{uint64(0), true, false, ""},
+ ltTestCase{uint64(kTwoTo54 - 1), true, false, ""},
+ ltTestCase{uint64(kTwoTo54 + 0), true, false, ""},
+ ltTestCase{uint64(kTwoTo54 + 1), false, false, ""},
+ ltTestCase{uint64(kTwoTo54 + 2), false, false, ""},
+
+ // Floating point.
+ ltTestCase{float64(-1), true, false, ""},
+ ltTestCase{float64(kTwoTo54 - 2), true, false, ""},
+ ltTestCase{float64(kTwoTo54 - 1), false, false, ""},
+ ltTestCase{float64(kTwoTo54 + 0), false, false, ""},
+ ltTestCase{float64(kTwoTo54 + 1), false, false, ""},
+ ltTestCase{float64(kTwoTo54 + 2), false, false, ""},
+ ltTestCase{float64(kTwoTo54 + 3), false, false, ""},
+ }
+
+ t.checkTestCases(matcher, cases)
+}
+
+func (t *LessThanTest) Uint64NotExactlyRepresentableBySinglePrecision() {
+ // Single-precision floats don't have enough bits to represent the integers
+ // near this one distinctly, so [2^25-1, 2^25+2] all receive the same value
+ // and should be treated as equivalent when floats are in the mix.
+ const kTwoTo25 = 1 << 25
+ matcher := LessThan(uint64(kTwoTo25 + 1))
+
+ desc := matcher.Description()
+ expectedDesc := "less than 33554433"
+
+ ExpectThat(desc, Equals(expectedDesc))
+
+ cases := []ltTestCase{
+ // Signed integers.
+ ltTestCase{-1, true, false, ""},
+ ltTestCase{kTwoTo25 + 0, true, false, ""},
+ ltTestCase{kTwoTo25 + 1, false, false, ""},
+ ltTestCase{kTwoTo25 + 2, false, false, ""},
+
+ ltTestCase{int(-1), true, false, ""},
+ ltTestCase{int(kTwoTo25 + 0), true, false, ""},
+ ltTestCase{int(kTwoTo25 + 1), false, false, ""},
+ ltTestCase{int(kTwoTo25 + 2), false, false, ""},
+
+ ltTestCase{int8(-1), true, false, ""},
+ ltTestCase{int8(127), true, false, ""},
+
+ ltTestCase{int16(-1), true, false, ""},
+ ltTestCase{int16(0), true, false, ""},
+ ltTestCase{int16(32767), true, false, ""},
+
+ ltTestCase{int32(-1), true, false, ""},
+ ltTestCase{int32(kTwoTo25 + 0), true, false, ""},
+ ltTestCase{int32(kTwoTo25 + 1), false, false, ""},
+ ltTestCase{int32(kTwoTo25 + 2), false, false, ""},
+
+ ltTestCase{int64(-1), true, false, ""},
+ ltTestCase{int64(kTwoTo25 + 0), true, false, ""},
+ ltTestCase{int64(kTwoTo25 + 1), false, false, ""},
+ ltTestCase{int64(kTwoTo25 + 2), false, false, ""},
+
+ // Unsigned integers.
+ ltTestCase{uint(0), true, false, ""},
+ ltTestCase{uint(kTwoTo25 + 0), true, false, ""},
+ ltTestCase{uint(kTwoTo25 + 1), false, false, ""},
+ ltTestCase{uint(kTwoTo25 + 2), false, false, ""},
+
+ ltTestCase{uint8(0), true, false, ""},
+ ltTestCase{uint8(255), true, false, ""},
+
+ ltTestCase{uint16(0), true, false, ""},
+ ltTestCase{uint16(65535), true, false, ""},
+
+ ltTestCase{uint32(0), true, false, ""},
+ ltTestCase{uint32(kTwoTo25 + 0), true, false, ""},
+ ltTestCase{uint32(kTwoTo25 + 1), false, false, ""},
+ ltTestCase{uint32(kTwoTo25 + 2), false, false, ""},
+
+ ltTestCase{uint64(0), true, false, ""},
+ ltTestCase{uint64(kTwoTo25 + 0), true, false, ""},
+ ltTestCase{uint64(kTwoTo25 + 1), false, false, ""},
+ ltTestCase{uint64(kTwoTo25 + 2), false, false, ""},
+
+ // Floating point.
+ ltTestCase{float32(-1), true, false, ""},
+ ltTestCase{float32(kTwoTo25 - 2), true, false, ""},
+ ltTestCase{float32(kTwoTo25 - 1), false, false, ""},
+ ltTestCase{float32(kTwoTo25 + 0), false, false, ""},
+ ltTestCase{float32(kTwoTo25 + 1), false, false, ""},
+ ltTestCase{float32(kTwoTo25 + 2), false, false, ""},
+ ltTestCase{float32(kTwoTo25 + 3), false, false, ""},
+
+ ltTestCase{float64(-1), true, false, ""},
+ ltTestCase{float64(kTwoTo25 - 2), true, false, ""},
+ ltTestCase{float64(kTwoTo25 - 1), true, false, ""},
+ ltTestCase{float64(kTwoTo25 + 0), true, false, ""},
+ ltTestCase{float64(kTwoTo25 + 1), false, false, ""},
+ ltTestCase{float64(kTwoTo25 + 2), false, false, ""},
+ ltTestCase{float64(kTwoTo25 + 3), false, false, ""},
+ }
+
+ t.checkTestCases(matcher, cases)
+}
+
+func (t *LessThanTest) Uint64NotExactlyRepresentableByDoublePrecision() {
+ // Double-precision floats don't have enough bits to represent the integers
+ // near this one distinctly, so [2^54-1, 2^54+2] all receive the same value
+ // and should be treated as equivalent when floats are in the mix.
+ const kTwoTo54 = 1 << 54
+ matcher := LessThan(uint64(kTwoTo54 + 1))
+
+ desc := matcher.Description()
+ expectedDesc := "less than 18014398509481985"
+
+ ExpectThat(desc, Equals(expectedDesc))
+
+ cases := []ltTestCase{
+ // Signed integers.
+ ltTestCase{-1, true, false, ""},
+ ltTestCase{1 << 30, true, false, ""},
+
+ ltTestCase{int(-1), true, false, ""},
+ ltTestCase{int(math.MaxInt32), true, false, ""},
+
+ ltTestCase{int8(-1), true, false, ""},
+ ltTestCase{int8(127), true, false, ""},
+
+ ltTestCase{int16(-1), true, false, ""},
+ ltTestCase{int16(0), true, false, ""},
+ ltTestCase{int16(32767), true, false, ""},
+
+ ltTestCase{int32(-1), true, false, ""},
+ ltTestCase{int32(math.MaxInt32), true, false, ""},
+
+ ltTestCase{int64(-1), true, false, ""},
+ ltTestCase{int64(kTwoTo54 - 1), true, false, ""},
+ ltTestCase{int64(kTwoTo54 + 0), true, false, ""},
+ ltTestCase{int64(kTwoTo54 + 1), false, false, ""},
+ ltTestCase{int64(kTwoTo54 + 2), false, false, ""},
+
+ // Unsigned integers.
+ ltTestCase{uint(0), true, false, ""},
+ ltTestCase{uint(math.MaxUint32), true, false, ""},
+
+ ltTestCase{uint8(0), true, false, ""},
+ ltTestCase{uint8(255), true, false, ""},
+
+ ltTestCase{uint16(0), true, false, ""},
+ ltTestCase{uint16(65535), true, false, ""},
+
+ ltTestCase{uint32(0), true, false, ""},
+ ltTestCase{uint32(math.MaxUint32), true, false, ""},
+
+ ltTestCase{uint64(0), true, false, ""},
+ ltTestCase{uint64(kTwoTo54 - 1), true, false, ""},
+ ltTestCase{uint64(kTwoTo54 + 0), true, false, ""},
+ ltTestCase{uint64(kTwoTo54 + 1), false, false, ""},
+ ltTestCase{uint64(kTwoTo54 + 2), false, false, ""},
+
+ // Floating point.
+ ltTestCase{float64(-1), true, false, ""},
+ ltTestCase{float64(kTwoTo54 - 2), true, false, ""},
+ ltTestCase{float64(kTwoTo54 - 1), false, false, ""},
+ ltTestCase{float64(kTwoTo54 + 0), false, false, ""},
+ ltTestCase{float64(kTwoTo54 + 1), false, false, ""},
+ ltTestCase{float64(kTwoTo54 + 2), false, false, ""},
+ ltTestCase{float64(kTwoTo54 + 3), false, false, ""},
+ }
+
+ t.checkTestCases(matcher, cases)
+}
+
+func (t *LessThanTest) Float32AboveExactIntegerRange() {
+ // Single-precision floats don't have enough bits to represent the integers
+ // near this one distinctly, so [2^25-1, 2^25+2] all receive the same value
+ // and should be treated as equivalent when floats are in the mix.
+ const kTwoTo25 = 1 << 25
+ matcher := LessThan(float32(kTwoTo25 + 1))
+
+ desc := matcher.Description()
+ expectedDesc := "less than 3.3554432e+07"
+
+ ExpectThat(desc, Equals(expectedDesc))
+
+ cases := []ltTestCase{
+ // Signed integers.
+ ltTestCase{int64(-1), true, false, ""},
+ ltTestCase{int64(kTwoTo25 - 2), true, false, ""},
+ ltTestCase{int64(kTwoTo25 - 1), false, false, ""},
+ ltTestCase{int64(kTwoTo25 + 0), false, false, ""},
+ ltTestCase{int64(kTwoTo25 + 1), false, false, ""},
+ ltTestCase{int64(kTwoTo25 + 2), false, false, ""},
+ ltTestCase{int64(kTwoTo25 + 3), false, false, ""},
+
+ // Unsigned integers.
+ ltTestCase{uint64(0), true, false, ""},
+ ltTestCase{uint64(kTwoTo25 - 2), true, false, ""},
+ ltTestCase{uint64(kTwoTo25 - 1), false, false, ""},
+ ltTestCase{uint64(kTwoTo25 + 0), false, false, ""},
+ ltTestCase{uint64(kTwoTo25 + 1), false, false, ""},
+ ltTestCase{uint64(kTwoTo25 + 2), false, false, ""},
+ ltTestCase{uint64(kTwoTo25 + 3), false, false, ""},
+
+ // Floating point.
+ ltTestCase{float32(-1), true, false, ""},
+ ltTestCase{float32(kTwoTo25 - 2), true, false, ""},
+ ltTestCase{float32(kTwoTo25 - 1), false, false, ""},
+ ltTestCase{float32(kTwoTo25 + 0), false, false, ""},
+ ltTestCase{float32(kTwoTo25 + 1), false, false, ""},
+ ltTestCase{float32(kTwoTo25 + 2), false, false, ""},
+ ltTestCase{float32(kTwoTo25 + 3), false, false, ""},
+
+ ltTestCase{float64(-1), true, false, ""},
+ ltTestCase{float64(kTwoTo25 - 2), true, false, ""},
+ ltTestCase{float64(kTwoTo25 - 1), false, false, ""},
+ ltTestCase{float64(kTwoTo25 + 0), false, false, ""},
+ ltTestCase{float64(kTwoTo25 + 1), false, false, ""},
+ ltTestCase{float64(kTwoTo25 + 2), false, false, ""},
+ ltTestCase{float64(kTwoTo25 + 3), false, false, ""},
+ }
+
+ t.checkTestCases(matcher, cases)
+}
+
+func (t *LessThanTest) Float64AboveExactIntegerRange() {
+ // Double-precision floats don't have enough bits to represent the integers
+ // near this one distinctly, so [2^54-1, 2^54+2] all receive the same value
+ // and should be treated as equivalent when floats are in the mix.
+ const kTwoTo54 = 1 << 54
+ matcher := LessThan(float64(kTwoTo54 + 1))
+
+ desc := matcher.Description()
+ expectedDesc := "less than 1.8014398509481984e+16"
+
+ ExpectThat(desc, Equals(expectedDesc))
+
+ cases := []ltTestCase{
+ // Signed integers.
+ ltTestCase{int64(-1), true, false, ""},
+ ltTestCase{int64(kTwoTo54 - 2), true, false, ""},
+ ltTestCase{int64(kTwoTo54 - 1), false, false, ""},
+ ltTestCase{int64(kTwoTo54 + 0), false, false, ""},
+ ltTestCase{int64(kTwoTo54 + 1), false, false, ""},
+ ltTestCase{int64(kTwoTo54 + 2), false, false, ""},
+ ltTestCase{int64(kTwoTo54 + 3), false, false, ""},
+
+ // Unsigned integers.
+ ltTestCase{uint64(0), true, false, ""},
+ ltTestCase{uint64(kTwoTo54 - 2), true, false, ""},
+ ltTestCase{uint64(kTwoTo54 - 1), false, false, ""},
+ ltTestCase{uint64(kTwoTo54 + 0), false, false, ""},
+ ltTestCase{uint64(kTwoTo54 + 1), false, false, ""},
+ ltTestCase{uint64(kTwoTo54 + 2), false, false, ""},
+ ltTestCase{uint64(kTwoTo54 + 3), false, false, ""},
+
+ // Floating point.
+ ltTestCase{float64(-1), true, false, ""},
+ ltTestCase{float64(kTwoTo54 - 2), true, false, ""},
+ ltTestCase{float64(kTwoTo54 - 1), false, false, ""},
+ ltTestCase{float64(kTwoTo54 + 0), false, false, ""},
+ ltTestCase{float64(kTwoTo54 + 1), false, false, ""},
+ ltTestCase{float64(kTwoTo54 + 2), false, false, ""},
+ ltTestCase{float64(kTwoTo54 + 3), false, false, ""},
+ }
+
+ t.checkTestCases(matcher, cases)
+}
+
+////////////////////////////////////////////////////////////////////////
+// String literals
+////////////////////////////////////////////////////////////////////////
+
+func (t *LessThanTest) EmptyString() {
+ matcher := LessThan("")
+ desc := matcher.Description()
+ expectedDesc := "less than \"\""
+
+ ExpectThat(desc, Equals(expectedDesc))
+
+ cases := []ltTestCase{
+ ltTestCase{"", false, false, ""},
+ ltTestCase{"\x00", false, false, ""},
+ ltTestCase{"a", false, false, ""},
+ ltTestCase{"foo", false, false, ""},
+ }
+
+ t.checkTestCases(matcher, cases)
+}
+
+func (t *LessThanTest) SingleNullByte() {
+ matcher := LessThan("\x00")
+ desc := matcher.Description()
+ expectedDesc := "less than \"\x00\""
+
+ ExpectThat(desc, Equals(expectedDesc))
+
+ cases := []ltTestCase{
+ ltTestCase{"", true, false, ""},
+ ltTestCase{"\x00", false, false, ""},
+ ltTestCase{"a", false, false, ""},
+ ltTestCase{"foo", false, false, ""},
+ }
+
+ t.checkTestCases(matcher, cases)
+}
+
+func (t *LessThanTest) LongerString() {
+ matcher := LessThan("foo\x00")
+ desc := matcher.Description()
+ expectedDesc := "less than \"foo\x00\""
+
+ ExpectThat(desc, Equals(expectedDesc))
+
+ cases := []ltTestCase{
+ ltTestCase{"", true, false, ""},
+ ltTestCase{"\x00", true, false, ""},
+ ltTestCase{"bar", true, false, ""},
+ ltTestCase{"foo", true, false, ""},
+ ltTestCase{"foo\x00", false, false, ""},
+ ltTestCase{"fooa", false, false, ""},
+ ltTestCase{"qux", false, false, ""},
+ }
+
+ t.checkTestCases(matcher, cases)
+}
diff --git a/src/mongo/gotools/vendor/src/github.com/jacobsa/oglematchers/matcher.go b/src/mongo/gotools/vendor/src/github.com/jacobsa/oglematchers/matcher.go
new file mode 100644
index 00000000000..8cf1cbbc963
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/jacobsa/oglematchers/matcher.go
@@ -0,0 +1,86 @@
+// Copyright 2011 Aaron Jacobs. All Rights Reserved.
+// Author: aaronjjacobs@gmail.com (Aaron Jacobs)
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+// Package oglematchers provides a set of matchers useful in a testing or
+// mocking framework. These matchers are inspired by and mostly compatible with
+// Google Test for C++ and Google JS Test.
+//
+// This package is used by github.com/jacobsa/ogletest and
+// github.com/jacobsa/oglemock, which may be more directly useful if you're not
+// writing your own testing package or defining your own matchers.
+package oglematchers
+
+// A Matcher is some predicate implicitly defining a set of values that it
+// matches. For example, GreaterThan(17) matches all numeric values greater
+// than 17, and HasSubstr("taco") matches all strings with the substring
+// "taco".
+//
+// Matchers are typically exposed to tests via constructor functions like
+// HasSubstr. In order to implement such a function you can either define your
+// own matcher type or use NewMatcher.
+type Matcher interface {
+ // Check whether the supplied value belongs to the the set defined by the
+ // matcher. Return a non-nil error if and only if it does not.
+ //
+ // The error describes why the value doesn't match. The error text is a
+ // relative clause that is suitable for being placed after the value. For
+ // example, a predicate that matches strings with a particular substring may,
+ // when presented with a numerical value, return the following error text:
+ //
+ // "which is not a string"
+ //
+ // Then the failure message may look like:
+ //
+ // Expected: has substring "taco"
+ // Actual: 17, which is not a string
+ //
+ // If the error is self-apparent based on the description of the matcher, the
+ // error text may be empty (but the error still non-nil). For example:
+ //
+ // Expected: 17
+ // Actual: 19
+ //
+ // If you are implementing a new matcher, see also the documentation on
+ // FatalError.
+ Matches(candidate interface{}) error
+
+ // Description returns a string describing the property that values matching
+ // this matcher have, as a verb phrase where the subject is the value. For
+ // example, "is greather than 17" or "has substring "taco"".
+ Description() string
+}
+
+// FatalError is an implementation of the error interface that may be returned
+// from matchers, indicating the error should be propagated. Returning a
+// *FatalError indicates that the matcher doesn't process values of the
+// supplied type, or otherwise doesn't know how to handle the value.
+//
+// For example, if GreaterThan(17) returned false for the value "taco" without
+// a fatal error, then Not(GreaterThan(17)) would return true. This is
+// technically correct, but is surprising and may mask failures where the wrong
+// sort of matcher is accidentally used. Instead, GreaterThan(17) can return a
+// fatal error, which will be propagated by Not().
+type FatalError struct {
+ errorText string
+}
+
+// NewFatalError creates a FatalError struct with the supplied error text.
+func NewFatalError(s string) *FatalError {
+ return &FatalError{s}
+}
+
+func (e *FatalError) Error() string {
+ return e.errorText
+}
diff --git a/src/mongo/gotools/vendor/src/github.com/jacobsa/oglematchers/matches_regexp.go b/src/mongo/gotools/vendor/src/github.com/jacobsa/oglematchers/matches_regexp.go
new file mode 100644
index 00000000000..1ed63f30c4e
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/jacobsa/oglematchers/matches_regexp.go
@@ -0,0 +1,69 @@
+// Copyright 2011 Aaron Jacobs. All Rights Reserved.
+// Author: aaronjjacobs@gmail.com (Aaron Jacobs)
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package oglematchers
+
+import (
+ "errors"
+ "fmt"
+ "reflect"
+ "regexp"
+)
+
+// MatchesRegexp returns a matcher that matches strings and byte slices whose
+// contents match the supplied regular expression. The semantics are those of
+// regexp.Match. In particular, that means the match is not implicitly anchored
+// to the ends of the string: MatchesRegexp("bar") will match "foo bar baz".
+func MatchesRegexp(pattern string) Matcher {
+ re, err := regexp.Compile(pattern)
+ if err != nil {
+ panic("MatchesRegexp: " + err.Error())
+ }
+
+ return &matchesRegexpMatcher{re}
+}
+
+type matchesRegexpMatcher struct {
+ re *regexp.Regexp
+}
+
+func (m *matchesRegexpMatcher) Description() string {
+ return fmt.Sprintf("matches regexp \"%s\"", m.re.String())
+}
+
+func (m *matchesRegexpMatcher) Matches(c interface{}) (err error) {
+ v := reflect.ValueOf(c)
+ isString := v.Kind() == reflect.String
+ isByteSlice := v.Kind() == reflect.Slice && v.Elem().Kind() == reflect.Uint8
+
+ err = errors.New("")
+
+ switch {
+ case isString:
+ if m.re.MatchString(v.String()) {
+ err = nil
+ }
+
+ case isByteSlice:
+ if m.re.Match(v.Bytes()) {
+ err = nil
+ }
+
+ default:
+ err = NewFatalError("which is not a string or []byte")
+ }
+
+ return
+}
diff --git a/src/mongo/gotools/vendor/src/github.com/jacobsa/oglematchers/matches_regexp_test.go b/src/mongo/gotools/vendor/src/github.com/jacobsa/oglematchers/matches_regexp_test.go
new file mode 100644
index 00000000000..7b69ce80105
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/jacobsa/oglematchers/matches_regexp_test.go
@@ -0,0 +1,92 @@
+// Copyright 2011 Aaron Jacobs. All Rights Reserved.
+// Author: aaronjjacobs@gmail.com (Aaron Jacobs)
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package oglematchers_test
+
+import (
+ . "github.com/jacobsa/oglematchers"
+ . "github.com/jacobsa/ogletest"
+)
+
+////////////////////////////////////////////////////////////////////////
+// Helpers
+////////////////////////////////////////////////////////////////////////
+
+type MatchesRegexpTest struct {
+}
+
+func init() { RegisterTestSuite(&MatchesRegexpTest{}) }
+
+////////////////////////////////////////////////////////////////////////
+// Tests
+////////////////////////////////////////////////////////////////////////
+
+func (t *MatchesRegexpTest) Description() {
+ m := MatchesRegexp("foo.*bar")
+ ExpectEq("matches regexp \"foo.*bar\"", m.Description())
+}
+
+func (t *MatchesRegexpTest) InvalidRegexp() {
+ ExpectThat(
+ func() { MatchesRegexp("(foo") },
+ Panics(HasSubstr("missing closing )")))
+}
+
+func (t *MatchesRegexpTest) CandidateIsNil() {
+ m := MatchesRegexp("")
+ err := m.Matches(nil)
+
+ ExpectThat(err, Error(Equals("which is not a string or []byte")))
+ ExpectTrue(isFatal(err))
+}
+
+func (t *MatchesRegexpTest) CandidateIsInteger() {
+ m := MatchesRegexp("")
+ err := m.Matches(17)
+
+ ExpectThat(err, Error(Equals("which is not a string or []byte")))
+ ExpectTrue(isFatal(err))
+}
+
+func (t *MatchesRegexpTest) NonMatchingCandidates() {
+ m := MatchesRegexp("fo[op]\\s+x")
+ var err error
+
+ err = m.Matches("fon x")
+ ExpectThat(err, Error(Equals("")))
+ ExpectFalse(isFatal(err))
+
+ err = m.Matches("fopx")
+ ExpectThat(err, Error(Equals("")))
+ ExpectFalse(isFatal(err))
+
+ err = m.Matches("fop ")
+ ExpectThat(err, Error(Equals("")))
+ ExpectFalse(isFatal(err))
+}
+
+func (t *MatchesRegexpTest) MatchingCandidates() {
+ m := MatchesRegexp("fo[op]\\s+x")
+ var err error
+
+ err = m.Matches("foo x")
+ ExpectEq(nil, err)
+
+ err = m.Matches("fop x")
+ ExpectEq(nil, err)
+
+ err = m.Matches("blah blah foo x blah blah")
+ ExpectEq(nil, err)
+}
diff --git a/src/mongo/gotools/vendor/src/github.com/jacobsa/oglematchers/new_matcher.go b/src/mongo/gotools/vendor/src/github.com/jacobsa/oglematchers/new_matcher.go
new file mode 100644
index 00000000000..c9d8398ee63
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/jacobsa/oglematchers/new_matcher.go
@@ -0,0 +1,43 @@
+// Copyright 2015 Aaron Jacobs. All Rights Reserved.
+// Author: aaronjjacobs@gmail.com (Aaron Jacobs)
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package oglematchers
+
+// Create a matcher with the given description and predicate function, which
+// will be invoked to handle calls to Matchers.
+//
+// Using this constructor may be a convenience over defining your own type that
+// implements Matcher if you do not need any logic in your Description method.
+func NewMatcher(
+ predicate func(interface{}) error,
+ description string) Matcher {
+ return &predicateMatcher{
+ predicate: predicate,
+ description: description,
+ }
+}
+
+type predicateMatcher struct {
+ predicate func(interface{}) error
+ description string
+}
+
+func (pm *predicateMatcher) Matches(c interface{}) error {
+ return pm.predicate(c)
+}
+
+func (pm *predicateMatcher) Description() string {
+ return pm.description
+}
diff --git a/src/mongo/gotools/vendor/src/github.com/jacobsa/oglematchers/not.go b/src/mongo/gotools/vendor/src/github.com/jacobsa/oglematchers/not.go
new file mode 100644
index 00000000000..623789fe28a
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/jacobsa/oglematchers/not.go
@@ -0,0 +1,53 @@
+// Copyright 2011 Aaron Jacobs. All Rights Reserved.
+// Author: aaronjjacobs@gmail.com (Aaron Jacobs)
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package oglematchers
+
+import (
+ "errors"
+ "fmt"
+)
+
+// Not returns a matcher that inverts the set of values matched by the wrapped
+// matcher. It does not transform the result for values for which the wrapped
+// matcher returns a fatal error.
+func Not(m Matcher) Matcher {
+ return &notMatcher{m}
+}
+
+type notMatcher struct {
+ wrapped Matcher
+}
+
+func (m *notMatcher) Matches(c interface{}) (err error) {
+ err = m.wrapped.Matches(c)
+
+ // Did the wrapped matcher say yes?
+ if err == nil {
+ return errors.New("")
+ }
+
+ // Did the wrapped matcher return a fatal error?
+ if _, isFatal := err.(*FatalError); isFatal {
+ return err
+ }
+
+ // The wrapped matcher returned a non-fatal error.
+ return nil
+}
+
+func (m *notMatcher) Description() string {
+ return fmt.Sprintf("not(%s)", m.wrapped.Description())
+}
diff --git a/src/mongo/gotools/vendor/src/github.com/jacobsa/oglematchers/not_test.go b/src/mongo/gotools/vendor/src/github.com/jacobsa/oglematchers/not_test.go
new file mode 100644
index 00000000000..d5a12967524
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/jacobsa/oglematchers/not_test.go
@@ -0,0 +1,108 @@
+// Copyright 2011 Aaron Jacobs. All Rights Reserved.
+// Author: aaronjjacobs@gmail.com (Aaron Jacobs)
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package oglematchers_test
+
+import (
+ "errors"
+ . "github.com/jacobsa/oglematchers"
+ . "github.com/jacobsa/ogletest"
+ "testing"
+)
+
+////////////////////////////////////////////////////////////////////////
+// Helpers
+////////////////////////////////////////////////////////////////////////
+
+type fakeMatcher struct {
+ matchFunc func(interface{}) error
+ description string
+}
+
+func (m *fakeMatcher) Matches(c interface{}) error {
+ return m.matchFunc(c)
+}
+
+func (m *fakeMatcher) Description() string {
+ return m.description
+}
+
+type NotTest struct {
+
+}
+
+func init() { RegisterTestSuite(&NotTest{}) }
+func TestOgletest(t *testing.T) { RunTests(t) }
+
+////////////////////////////////////////////////////////////////////////
+// Tests
+////////////////////////////////////////////////////////////////////////
+
+func (t *NotTest) CallsWrapped() {
+ var suppliedCandidate interface{}
+ matchFunc := func(c interface{}) error {
+ suppliedCandidate = c
+ return nil
+ }
+
+ wrapped := &fakeMatcher{matchFunc, ""}
+ matcher := Not(wrapped)
+
+ matcher.Matches(17)
+ ExpectThat(suppliedCandidate, Equals(17))
+}
+
+func (t *NotTest) WrappedReturnsTrue() {
+ matchFunc := func(c interface{}) error {
+ return nil
+ }
+
+ wrapped := &fakeMatcher{matchFunc, ""}
+ matcher := Not(wrapped)
+
+ err := matcher.Matches(0)
+ ExpectThat(err, Error(Equals("")))
+}
+
+func (t *NotTest) WrappedReturnsNonFatalError() {
+ matchFunc := func(c interface{}) error {
+ return errors.New("taco")
+ }
+
+ wrapped := &fakeMatcher{matchFunc, ""}
+ matcher := Not(wrapped)
+
+ err := matcher.Matches(0)
+ ExpectEq(nil, err)
+}
+
+func (t *NotTest) WrappedReturnsFatalError() {
+ matchFunc := func(c interface{}) error {
+ return NewFatalError("taco")
+ }
+
+ wrapped := &fakeMatcher{matchFunc, ""}
+ matcher := Not(wrapped)
+
+ err := matcher.Matches(0)
+ ExpectThat(err, Error(Equals("taco")))
+}
+
+func (t *NotTest) Description() {
+ wrapped := &fakeMatcher{nil, "taco"}
+ matcher := Not(wrapped)
+
+ ExpectEq("not(taco)", matcher.Description())
+}
diff --git a/src/mongo/gotools/vendor/src/github.com/jacobsa/oglematchers/panics.go b/src/mongo/gotools/vendor/src/github.com/jacobsa/oglematchers/panics.go
new file mode 100644
index 00000000000..d2cfc97869b
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/jacobsa/oglematchers/panics.go
@@ -0,0 +1,74 @@
+// Copyright 2011 Aaron Jacobs. All Rights Reserved.
+// Author: aaronjjacobs@gmail.com (Aaron Jacobs)
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package oglematchers
+
+import (
+ "errors"
+ "fmt"
+ "reflect"
+)
+
+// Panics matches zero-arg functions which, when invoked, panic with an error
+// that matches the supplied matcher.
+//
+// NOTE(jacobsa): This matcher cannot detect the case where the function panics
+// using panic(nil), by design of the language. See here for more info:
+//
+// http://goo.gl/9aIQL
+//
+func Panics(m Matcher) Matcher {
+ return &panicsMatcher{m}
+}
+
+type panicsMatcher struct {
+ wrappedMatcher Matcher
+}
+
+func (m *panicsMatcher) Description() string {
+ return "panics with: " + m.wrappedMatcher.Description()
+}
+
+func (m *panicsMatcher) Matches(c interface{}) (err error) {
+ // Make sure c is a zero-arg function.
+ v := reflect.ValueOf(c)
+ if v.Kind() != reflect.Func || v.Type().NumIn() != 0 {
+ err = NewFatalError("which is not a zero-arg function")
+ return
+ }
+
+ // Call the function and check its panic error.
+ defer func() {
+ if e := recover(); e != nil {
+ err = m.wrappedMatcher.Matches(e)
+
+ // Set a clearer error message if the matcher said no.
+ if err != nil {
+ wrappedClause := ""
+ if err.Error() != "" {
+ wrappedClause = ", " + err.Error()
+ }
+
+ err = errors.New(fmt.Sprintf("which panicked with: %v%s", e, wrappedClause))
+ }
+ }
+ }()
+
+ v.Call([]reflect.Value{})
+
+ // If we get here, the function didn't panic.
+ err = errors.New("which didn't panic")
+ return
+}
diff --git a/src/mongo/gotools/vendor/src/github.com/jacobsa/oglematchers/panics_test.go b/src/mongo/gotools/vendor/src/github.com/jacobsa/oglematchers/panics_test.go
new file mode 100644
index 00000000000..a2b494f3232
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/jacobsa/oglematchers/panics_test.go
@@ -0,0 +1,141 @@
+// Copyright 2011 Aaron Jacobs. All Rights Reserved.
+// Author: aaronjjacobs@gmail.com (Aaron Jacobs)
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package oglematchers_test
+
+import (
+ "errors"
+ . "github.com/jacobsa/oglematchers"
+ . "github.com/jacobsa/ogletest"
+)
+
+////////////////////////////////////////////////////////////////////////
+// Helpers
+////////////////////////////////////////////////////////////////////////
+
+type PanicsTest struct {
+ matcherCalled bool
+ suppliedCandidate interface{}
+ wrappedError error
+
+ matcher Matcher
+}
+
+func init() { RegisterTestSuite(&PanicsTest{}) }
+
+func (t *PanicsTest) SetUp(i *TestInfo) {
+ wrapped := &fakeMatcher{
+ func(c interface{}) error {
+ t.matcherCalled = true
+ t.suppliedCandidate = c
+ return t.wrappedError
+ },
+ "foo",
+ }
+
+ t.matcher = Panics(wrapped)
+}
+
+////////////////////////////////////////////////////////////////////////
+// Tests
+////////////////////////////////////////////////////////////////////////
+
+func (t *PanicsTest) Description() {
+ ExpectThat(t.matcher.Description(), Equals("panics with: foo"))
+}
+
+func (t *PanicsTest) CandidateIsNil() {
+ err := t.matcher.Matches(nil)
+
+ ExpectThat(err, Error(Equals("which is not a zero-arg function")))
+ ExpectTrue(isFatal(err))
+}
+
+func (t *PanicsTest) CandidateIsString() {
+ err := t.matcher.Matches("taco")
+
+ ExpectThat(err, Error(Equals("which is not a zero-arg function")))
+ ExpectTrue(isFatal(err))
+}
+
+func (t *PanicsTest) CandidateTakesArgs() {
+ err := t.matcher.Matches(func(i int) string { return "" })
+
+ ExpectThat(err, Error(Equals("which is not a zero-arg function")))
+ ExpectTrue(isFatal(err))
+}
+
+func (t *PanicsTest) CallsFunction() {
+ callCount := 0
+ t.matcher.Matches(func() string {
+ callCount++
+ return ""
+ })
+
+ ExpectThat(callCount, Equals(1))
+}
+
+func (t *PanicsTest) FunctionDoesntPanic() {
+ err := t.matcher.Matches(func() {})
+
+ ExpectThat(err, Error(Equals("which didn't panic")))
+ ExpectFalse(isFatal(err))
+}
+
+func (t *PanicsTest) CallsWrappedMatcher() {
+ expectedErr := 17
+ t.wrappedError = errors.New("")
+ t.matcher.Matches(func() { panic(expectedErr) })
+
+ ExpectThat(t.suppliedCandidate, Equals(expectedErr))
+}
+
+func (t *PanicsTest) WrappedReturnsTrue() {
+ err := t.matcher.Matches(func() { panic("") })
+
+ ExpectEq(nil, err)
+}
+
+func (t *PanicsTest) WrappedReturnsFatalErrorWithoutText() {
+ t.wrappedError = NewFatalError("")
+ err := t.matcher.Matches(func() { panic(17) })
+
+ ExpectThat(err, Error(Equals("which panicked with: 17")))
+ ExpectFalse(isFatal(err))
+}
+
+func (t *PanicsTest) WrappedReturnsFatalErrorWithText() {
+ t.wrappedError = NewFatalError("which blah")
+ err := t.matcher.Matches(func() { panic(17) })
+
+ ExpectThat(err, Error(Equals("which panicked with: 17, which blah")))
+ ExpectFalse(isFatal(err))
+}
+
+func (t *PanicsTest) WrappedReturnsNonFatalErrorWithoutText() {
+ t.wrappedError = errors.New("")
+ err := t.matcher.Matches(func() { panic(17) })
+
+ ExpectThat(err, Error(Equals("which panicked with: 17")))
+ ExpectFalse(isFatal(err))
+}
+
+func (t *PanicsTest) WrappedReturnsNonFatalErrorWithText() {
+ t.wrappedError = errors.New("which blah")
+ err := t.matcher.Matches(func() { panic(17) })
+
+ ExpectThat(err, Error(Equals("which panicked with: 17, which blah")))
+ ExpectFalse(isFatal(err))
+}
diff --git a/src/mongo/gotools/vendor/src/github.com/jacobsa/oglematchers/pointee.go b/src/mongo/gotools/vendor/src/github.com/jacobsa/oglematchers/pointee.go
new file mode 100644
index 00000000000..c5383f2402f
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/jacobsa/oglematchers/pointee.go
@@ -0,0 +1,65 @@
+// Copyright 2012 Aaron Jacobs. All Rights Reserved.
+// Author: aaronjjacobs@gmail.com (Aaron Jacobs)
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package oglematchers
+
+import (
+ "errors"
+ "fmt"
+ "reflect"
+)
+
+// Return a matcher that matches non-nil pointers whose pointee matches the
+// wrapped matcher.
+func Pointee(m Matcher) Matcher {
+ return &pointeeMatcher{m}
+}
+
+type pointeeMatcher struct {
+ wrapped Matcher
+}
+
+func (m *pointeeMatcher) Matches(c interface{}) (err error) {
+ // Make sure the candidate is of the appropriate type.
+ cv := reflect.ValueOf(c)
+ if !cv.IsValid() || cv.Kind() != reflect.Ptr {
+ return NewFatalError("which is not a pointer")
+ }
+
+ // Make sure the candidate is non-nil.
+ if cv.IsNil() {
+ return NewFatalError("")
+ }
+
+ // Defer to the wrapped matcher. Fix up empty errors so that failure messages
+ // are more helpful than just printing a pointer for "Actual".
+ pointee := cv.Elem().Interface()
+ err = m.wrapped.Matches(pointee)
+ if err != nil && err.Error() == "" {
+ s := fmt.Sprintf("whose pointee is %v", pointee)
+
+ if _, ok := err.(*FatalError); ok {
+ err = NewFatalError(s)
+ } else {
+ err = errors.New(s)
+ }
+ }
+
+ return err
+}
+
+func (m *pointeeMatcher) Description() string {
+ return fmt.Sprintf("pointee(%s)", m.wrapped.Description())
+}
diff --git a/src/mongo/gotools/vendor/src/github.com/jacobsa/oglematchers/pointee_test.go b/src/mongo/gotools/vendor/src/github.com/jacobsa/oglematchers/pointee_test.go
new file mode 100644
index 00000000000..58a8381c640
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/jacobsa/oglematchers/pointee_test.go
@@ -0,0 +1,152 @@
+// Copyright 2012 Aaron Jacobs. All Rights Reserved.
+// Author: aaronjjacobs@gmail.com (Aaron Jacobs)
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package oglematchers_test
+
+import (
+ "errors"
+ . "github.com/jacobsa/oglematchers"
+ . "github.com/jacobsa/ogletest"
+ "testing"
+)
+
+////////////////////////////////////////////////////////////////////////
+// Helpers
+////////////////////////////////////////////////////////////////////////
+
+type PointeeTest struct {}
+func init() { RegisterTestSuite(&PointeeTest{}) }
+
+func TestPointee(t *testing.T) { RunTests(t) }
+
+////////////////////////////////////////////////////////////////////////
+// Tests
+////////////////////////////////////////////////////////////////////////
+
+func (t *PointeeTest) Description() {
+ wrapped := &fakeMatcher{nil, "taco"}
+ matcher := Pointee(wrapped)
+
+ ExpectEq("pointee(taco)", matcher.Description())
+}
+
+func (t *PointeeTest) CandidateIsNotAPointer() {
+ matcher := Pointee(HasSubstr(""))
+ err := matcher.Matches([]byte{})
+
+ ExpectThat(err, Error(Equals("which is not a pointer")))
+ ExpectTrue(isFatal(err))
+}
+
+func (t *PointeeTest) CandidateIsANilLiteral() {
+ matcher := Pointee(HasSubstr(""))
+ err := matcher.Matches(nil)
+
+ ExpectThat(err, Error(Equals("which is not a pointer")))
+ ExpectTrue(isFatal(err))
+}
+
+func (t *PointeeTest) CandidateIsANilPointer() {
+ matcher := Pointee(HasSubstr(""))
+ err := matcher.Matches((*int)(nil))
+
+ ExpectThat(err, Error(Equals("")))
+ ExpectTrue(isFatal(err))
+}
+
+func (t *PointeeTest) CallsWrapped() {
+ var suppliedCandidate interface{}
+ matchFunc := func(c interface{}) error {
+ suppliedCandidate = c
+ return nil
+ }
+
+ wrapped := &fakeMatcher{matchFunc, ""}
+ matcher := Pointee(wrapped)
+
+ someSlice := []byte{}
+ matcher.Matches(&someSlice)
+ ExpectThat(suppliedCandidate, IdenticalTo(someSlice))
+}
+
+func (t *PointeeTest) WrappedReturnsOkay() {
+ matchFunc := func(c interface{}) error {
+ return nil
+ }
+
+ wrapped := &fakeMatcher{matchFunc, ""}
+ matcher := Pointee(wrapped)
+
+ err := matcher.Matches(new(int))
+ ExpectEq(nil, err)
+}
+
+func (t *PointeeTest) WrappedReturnsNonFatalNonEmptyError() {
+ matchFunc := func(c interface{}) error {
+ return errors.New("taco")
+ }
+
+ wrapped := &fakeMatcher{matchFunc, ""}
+ matcher := Pointee(wrapped)
+
+ i := 17
+ err := matcher.Matches(&i)
+ ExpectFalse(isFatal(err))
+ ExpectThat(err, Error(Equals("taco")))
+}
+
+func (t *PointeeTest) WrappedReturnsNonFatalEmptyError() {
+ matchFunc := func(c interface{}) error {
+ return errors.New("")
+ }
+
+ wrapped := &fakeMatcher{matchFunc, ""}
+ matcher := Pointee(wrapped)
+
+ i := 17
+ err := matcher.Matches(&i)
+ ExpectFalse(isFatal(err))
+ ExpectThat(err, Error(HasSubstr("whose pointee")))
+ ExpectThat(err, Error(HasSubstr("17")))
+}
+
+func (t *PointeeTest) WrappedReturnsFatalNonEmptyError() {
+ matchFunc := func(c interface{}) error {
+ return NewFatalError("taco")
+ }
+
+ wrapped := &fakeMatcher{matchFunc, ""}
+ matcher := Pointee(wrapped)
+
+ i := 17
+ err := matcher.Matches(&i)
+ ExpectTrue(isFatal(err))
+ ExpectThat(err, Error(Equals("taco")))
+}
+
+func (t *PointeeTest) WrappedReturnsFatalEmptyError() {
+ matchFunc := func(c interface{}) error {
+ return NewFatalError("")
+ }
+
+ wrapped := &fakeMatcher{matchFunc, ""}
+ matcher := Pointee(wrapped)
+
+ i := 17
+ err := matcher.Matches(&i)
+ ExpectTrue(isFatal(err))
+ ExpectThat(err, Error(HasSubstr("whose pointee")))
+ ExpectThat(err, Error(HasSubstr("17")))
+}
diff --git a/src/mongo/gotools/vendor/src/github.com/jacobsa/oglematchers/transform_description.go b/src/mongo/gotools/vendor/src/github.com/jacobsa/oglematchers/transform_description.go
new file mode 100644
index 00000000000..f79d0c03db1
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/jacobsa/oglematchers/transform_description.go
@@ -0,0 +1,36 @@
+// Copyright 2011 Aaron Jacobs. All Rights Reserved.
+// Author: aaronjjacobs@gmail.com (Aaron Jacobs)
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package oglematchers
+
+// transformDescription returns a matcher that is equivalent to the supplied
+// one, except that it has the supplied description instead of the one attached
+// to the existing matcher.
+func transformDescription(m Matcher, newDesc string) Matcher {
+ return &transformDescriptionMatcher{newDesc, m}
+}
+
+type transformDescriptionMatcher struct {
+ desc string
+ wrappedMatcher Matcher
+}
+
+func (m *transformDescriptionMatcher) Description() string {
+ return m.desc
+}
+
+func (m *transformDescriptionMatcher) Matches(c interface{}) error {
+ return m.wrappedMatcher.Matches(c)
+}
diff --git a/src/mongo/gotools/vendor/src/github.com/jessevdk/go-flags/.travis.yml b/src/mongo/gotools/vendor/src/github.com/jessevdk/go-flags/.travis.yml
new file mode 100644
index 00000000000..3165f004261
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/jessevdk/go-flags/.travis.yml
@@ -0,0 +1,35 @@
+language: go
+
+install:
+ # go-flags
+ - go get -d -v ./...
+ - go build -v ./...
+
+ # linting
+ - go get golang.org/x/tools/cmd/vet
+ - go get github.com/golang/lint
+ - go install github.com/golang/lint/golint
+
+ # code coverage
+ - go get golang.org/x/tools/cmd/cover
+ - go get github.com/onsi/ginkgo/ginkgo
+ - go get github.com/modocache/gover
+ - if [ "$TRAVIS_SECURE_ENV_VARS" = "true" ]; then go get github.com/mattn/goveralls; fi
+
+script:
+ # go-flags
+ - $(exit $(gofmt -l . | wc -l))
+ - go test -v ./...
+
+ # linting
+ - go tool vet -all=true -v=true . || true
+ - $(go env GOPATH | awk 'BEGIN{FS=":"} {print $1}')/bin/golint ./...
+
+ # code coverage
+ - $(go env GOPATH | awk 'BEGIN{FS=":"} {print $1}')/bin/ginkgo -r -cover
+ - $(go env GOPATH | awk 'BEGIN{FS=":"} {print $1}')/bin/gover
+ - if [ "$TRAVIS_SECURE_ENV_VARS" = "true" ]; then $(go env GOPATH | awk 'BEGIN{FS=":"} {print $1}')/bin/goveralls -coverprofile=gover.coverprofile -service=travis-ci -repotoken $COVERALLS_TOKEN; fi
+
+env:
+ # coveralls.io
+ secure: "RCYbiB4P0RjQRIoUx/vG/AjP3mmYCbzOmr86DCww1Z88yNcy3hYr3Cq8rpPtYU5v0g7wTpu4adaKIcqRE9xknYGbqj3YWZiCoBP1/n4Z+9sHW3Dsd9D/GRGeHUus0laJUGARjWoCTvoEtOgTdGQDoX7mH+pUUY0FBltNYUdOiiU="
diff --git a/src/mongo/gotools/vendor/src/github.com/jessevdk/go-flags/LICENSE b/src/mongo/gotools/vendor/src/github.com/jessevdk/go-flags/LICENSE
new file mode 100644
index 00000000000..bcca0d521be
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/jessevdk/go-flags/LICENSE
@@ -0,0 +1,26 @@
+Copyright (c) 2012 Jesse van den Kieboom. All rights reserved.
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions are
+met:
+
+ * Redistributions of source code must retain the above copyright
+ notice, this list of conditions and the following disclaimer.
+ * Redistributions in binary form must reproduce the above
+ copyright notice, this list of conditions and the following disclaimer
+ in the documentation and/or other materials provided with the
+ distribution.
+ * Neither the name of Google Inc. nor the names of its
+ contributors may be used to endorse or promote products derived from
+ this software without specific prior written permission.
+
+THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
diff --git a/src/mongo/gotools/vendor/src/github.com/jessevdk/go-flags/README.md b/src/mongo/gotools/vendor/src/github.com/jessevdk/go-flags/README.md
new file mode 100644
index 00000000000..9378b760bda
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/jessevdk/go-flags/README.md
@@ -0,0 +1,135 @@
+go-flags: a go library for parsing command line arguments
+=========================================================
+
+[![GoDoc](https://godoc.org/github.com/jessevdk/go-flags?status.png)](https://godoc.org/github.com/jessevdk/go-flags) [![Build Status](https://travis-ci.org/jessevdk/go-flags.svg?branch=master)](https://travis-ci.org/jessevdk/go-flags) [![Coverage Status](https://img.shields.io/coveralls/jessevdk/go-flags.svg)](https://coveralls.io/r/jessevdk/go-flags?branch=master)
+
+This library provides similar functionality to the builtin flag library of
+go, but provides much more functionality and nicer formatting. From the
+documentation:
+
+Package flags provides an extensive command line option parser.
+The flags package is similar in functionality to the go builtin flag package
+but provides more options and uses reflection to provide a convenient and
+succinct way of specifying command line options.
+
+Supported features:
+* Options with short names (-v)
+* Options with long names (--verbose)
+* Options with and without arguments (bool v.s. other type)
+* Options with optional arguments and default values
+* Multiple option groups each containing a set of options
+* Generate and print well-formatted help message
+* Passing remaining command line arguments after -- (optional)
+* Ignoring unknown command line options (optional)
+* Supports -I/usr/include -I=/usr/include -I /usr/include option argument specification
+* Supports multiple short options -aux
+* Supports all primitive go types (string, int{8..64}, uint{8..64}, float)
+* Supports same option multiple times (can store in slice or last option counts)
+* Supports maps
+* Supports function callbacks
+* Supports namespaces for (nested) option groups
+
+The flags package uses structs, reflection and struct field tags
+to allow users to specify command line options. This results in very simple
+and concise specification of your application options. For example:
+
+```go
+type Options struct {
+ Verbose []bool `short:"v" long:"verbose" description:"Show verbose debug information"`
+}
+```
+
+This specifies one option with a short name -v and a long name --verbose.
+When either -v or --verbose is found on the command line, a 'true' value
+will be appended to the Verbose field. e.g. when specifying -vvv, the
+resulting value of Verbose will be {[true, true, true]}.
+
+Example:
+--------
+```go
+var opts struct {
+ // Slice of bool will append 'true' each time the option
+ // is encountered (can be set multiple times, like -vvv)
+ Verbose []bool `short:"v" long:"verbose" description:"Show verbose debug information"`
+
+ // Example of automatic marshalling to desired type (uint)
+ Offset uint `long:"offset" description:"Offset"`
+
+ // Example of a callback, called each time the option is found.
+ Call func(string) `short:"c" description:"Call phone number"`
+
+ // Example of a required flag
+ Name string `short:"n" long:"name" description:"A name" required:"true"`
+
+ // Example of a value name
+ File string `short:"f" long:"file" description:"A file" value-name:"FILE"`
+
+ // Example of a pointer
+ Ptr *int `short:"p" description:"A pointer to an integer"`
+
+ // Example of a slice of strings
+ StringSlice []string `short:"s" description:"A slice of strings"`
+
+ // Example of a slice of pointers
+ PtrSlice []*string `long:"ptrslice" description:"A slice of pointers to string"`
+
+ // Example of a map
+ IntMap map[string]int `long:"intmap" description:"A map from string to int"`
+}
+
+// Callback which will invoke callto:<argument> to call a number.
+// Note that this works just on OS X (and probably only with
+// Skype) but it shows the idea.
+opts.Call = func(num string) {
+ cmd := exec.Command("open", "callto:"+num)
+ cmd.Start()
+ cmd.Process.Release()
+}
+
+// Make some fake arguments to parse.
+args := []string{
+ "-vv",
+ "--offset=5",
+ "-n", "Me",
+ "-p", "3",
+ "-s", "hello",
+ "-s", "world",
+ "--ptrslice", "hello",
+ "--ptrslice", "world",
+ "--intmap", "a:1",
+ "--intmap", "b:5",
+ "arg1",
+ "arg2",
+ "arg3",
+}
+
+// Parse flags from `args'. Note that here we use flags.ParseArgs for
+// the sake of making a working example. Normally, you would simply use
+// flags.Parse(&opts) which uses os.Args
+args, err := flags.ParseArgs(&opts, args)
+
+if err != nil {
+ panic(err)
+ os.Exit(1)
+}
+
+fmt.Printf("Verbosity: %v\n", opts.Verbose)
+fmt.Printf("Offset: %d\n", opts.Offset)
+fmt.Printf("Name: %s\n", opts.Name)
+fmt.Printf("Ptr: %d\n", *opts.Ptr)
+fmt.Printf("StringSlice: %v\n", opts.StringSlice)
+fmt.Printf("PtrSlice: [%v %v]\n", *opts.PtrSlice[0], *opts.PtrSlice[1])
+fmt.Printf("IntMap: [a:%v b:%v]\n", opts.IntMap["a"], opts.IntMap["b"])
+fmt.Printf("Remaining args: %s\n", strings.Join(args, " "))
+
+// Output: Verbosity: [true true]
+// Offset: 5
+// Name: Me
+// Ptr: 3
+// StringSlice: [hello world]
+// PtrSlice: [hello world]
+// IntMap: [a:1 b:5]
+// Remaining args: arg1 arg2 arg3
+```
+
+More information can be found in the godocs: <http://godoc.org/github.com/jessevdk/go-flags>
diff --git a/src/mongo/gotools/vendor/src/github.com/jessevdk/go-flags/arg.go b/src/mongo/gotools/vendor/src/github.com/jessevdk/go-flags/arg.go
new file mode 100644
index 00000000000..d1606440715
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/jessevdk/go-flags/arg.go
@@ -0,0 +1,24 @@
+package flags
+
+import (
+ "reflect"
+)
+
+// Arg represents a positional argument on the command line.
+type Arg struct {
+ // The name of the positional argument (used in the help)
+ Name string
+
+ // A description of the positional argument (used in the help)
+ Description string
+
+ // Whether a positional argument is required
+ Required int
+
+ value reflect.Value
+ tag multiTag
+}
+
+func (a *Arg) isRemaining() bool {
+ return a.value.Type().Kind() == reflect.Slice
+}
diff --git a/src/mongo/gotools/vendor/src/github.com/jessevdk/go-flags/arg_test.go b/src/mongo/gotools/vendor/src/github.com/jessevdk/go-flags/arg_test.go
new file mode 100644
index 00000000000..117e90ec698
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/jessevdk/go-flags/arg_test.go
@@ -0,0 +1,133 @@
+package flags
+
+import (
+ "testing"
+)
+
+func TestPositional(t *testing.T) {
+ var opts = struct {
+ Value bool `short:"v"`
+
+ Positional struct {
+ Command int
+ Filename string
+ Rest []string
+ } `positional-args:"yes" required:"yes"`
+ }{}
+
+ p := NewParser(&opts, Default)
+ ret, err := p.ParseArgs([]string{"10", "arg_test.go", "a", "b"})
+
+ if err != nil {
+ t.Fatalf("Unexpected error: %v", err)
+ return
+ }
+
+ if opts.Positional.Command != 10 {
+ t.Fatalf("Expected opts.Positional.Command to be 10, but got %v", opts.Positional.Command)
+ }
+
+ if opts.Positional.Filename != "arg_test.go" {
+ t.Fatalf("Expected opts.Positional.Filename to be \"arg_test.go\", but got %v", opts.Positional.Filename)
+ }
+
+ assertStringArray(t, opts.Positional.Rest, []string{"a", "b"})
+ assertStringArray(t, ret, []string{})
+}
+
+func TestPositionalRequired(t *testing.T) {
+ var opts = struct {
+ Value bool `short:"v"`
+
+ Positional struct {
+ Command int
+ Filename string
+ Rest []string
+ } `positional-args:"yes" required:"yes"`
+ }{}
+
+ p := NewParser(&opts, None)
+ _, err := p.ParseArgs([]string{"10"})
+
+ assertError(t, err, ErrRequired, "the required argument `Filename` was not provided")
+}
+
+func TestPositionalRequiredRest1Fail(t *testing.T) {
+ var opts = struct {
+ Value bool `short:"v"`
+
+ Positional struct {
+ Rest []string `required:"yes"`
+ } `positional-args:"yes"`
+ }{}
+
+ p := NewParser(&opts, None)
+ _, err := p.ParseArgs([]string{})
+
+ assertError(t, err, ErrRequired, "the required argument `Rest (at least 1 argument)` was not provided")
+}
+
+func TestPositionalRequiredRest1Pass(t *testing.T) {
+ var opts = struct {
+ Value bool `short:"v"`
+
+ Positional struct {
+ Rest []string `required:"yes"`
+ } `positional-args:"yes"`
+ }{}
+
+ p := NewParser(&opts, None)
+ _, err := p.ParseArgs([]string{"rest1"})
+
+ if err != nil {
+ t.Fatalf("Unexpected error: %v", err)
+ return
+ }
+
+ if len(opts.Positional.Rest) != 1 {
+ t.Fatalf("Expected 1 positional rest argument")
+ }
+
+ assertString(t, opts.Positional.Rest[0], "rest1")
+}
+
+func TestPositionalRequiredRest2Fail(t *testing.T) {
+ var opts = struct {
+ Value bool `short:"v"`
+
+ Positional struct {
+ Rest []string `required:"2"`
+ } `positional-args:"yes"`
+ }{}
+
+ p := NewParser(&opts, None)
+ _, err := p.ParseArgs([]string{"rest1"})
+
+ assertError(t, err, ErrRequired, "the required argument `Rest (at least 2 arguments, but got only 1)` was not provided")
+}
+
+func TestPositionalRequiredRest2Pass(t *testing.T) {
+ var opts = struct {
+ Value bool `short:"v"`
+
+ Positional struct {
+ Rest []string `required:"2"`
+ } `positional-args:"yes"`
+ }{}
+
+ p := NewParser(&opts, None)
+ _, err := p.ParseArgs([]string{"rest1", "rest2", "rest3"})
+
+ if err != nil {
+ t.Fatalf("Unexpected error: %v", err)
+ return
+ }
+
+ if len(opts.Positional.Rest) != 3 {
+ t.Fatalf("Expected 3 positional rest argument")
+ }
+
+ assertString(t, opts.Positional.Rest[0], "rest1")
+ assertString(t, opts.Positional.Rest[1], "rest2")
+ assertString(t, opts.Positional.Rest[2], "rest3")
+}
diff --git a/src/mongo/gotools/vendor/src/github.com/jessevdk/go-flags/assert_test.go b/src/mongo/gotools/vendor/src/github.com/jessevdk/go-flags/assert_test.go
new file mode 100644
index 00000000000..8e06636b66d
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/jessevdk/go-flags/assert_test.go
@@ -0,0 +1,177 @@
+package flags
+
+import (
+ "fmt"
+ "io"
+ "io/ioutil"
+ "os"
+ "os/exec"
+ "path"
+ "runtime"
+ "testing"
+)
+
+func assertCallerInfo() (string, int) {
+ ptr := make([]uintptr, 15)
+ n := runtime.Callers(1, ptr)
+
+ if n == 0 {
+ return "", 0
+ }
+
+ mef := runtime.FuncForPC(ptr[0])
+ mefile, meline := mef.FileLine(ptr[0])
+
+ for i := 2; i < n; i++ {
+ f := runtime.FuncForPC(ptr[i])
+ file, line := f.FileLine(ptr[i])
+
+ if file != mefile {
+ return file, line
+ }
+ }
+
+ return mefile, meline
+}
+
+func assertErrorf(t *testing.T, format string, args ...interface{}) {
+ msg := fmt.Sprintf(format, args...)
+
+ file, line := assertCallerInfo()
+
+ t.Errorf("%s:%d: %s", path.Base(file), line, msg)
+}
+
+func assertFatalf(t *testing.T, format string, args ...interface{}) {
+ msg := fmt.Sprintf(format, args...)
+
+ file, line := assertCallerInfo()
+
+ t.Fatalf("%s:%d: %s", path.Base(file), line, msg)
+}
+
+func assertString(t *testing.T, a string, b string) {
+ if a != b {
+ assertErrorf(t, "Expected %#v, but got %#v", b, a)
+ }
+}
+
+func assertStringArray(t *testing.T, a []string, b []string) {
+ if len(a) != len(b) {
+ assertErrorf(t, "Expected %#v, but got %#v", b, a)
+ return
+ }
+
+ for i, v := range a {
+ if b[i] != v {
+ assertErrorf(t, "Expected %#v, but got %#v", b, a)
+ return
+ }
+ }
+}
+
+func assertBoolArray(t *testing.T, a []bool, b []bool) {
+ if len(a) != len(b) {
+ assertErrorf(t, "Expected %#v, but got %#v", b, a)
+ return
+ }
+
+ for i, v := range a {
+ if b[i] != v {
+ assertErrorf(t, "Expected %#v, but got %#v", b, a)
+ return
+ }
+ }
+}
+
+func assertParserSuccess(t *testing.T, data interface{}, args ...string) (*Parser, []string) {
+ parser := NewParser(data, Default&^PrintErrors)
+ ret, err := parser.ParseArgs(args)
+
+ if err != nil {
+ t.Fatalf("Unexpected parse error: %s", err)
+ return nil, nil
+ }
+
+ return parser, ret
+}
+
+func assertParseSuccess(t *testing.T, data interface{}, args ...string) []string {
+ _, ret := assertParserSuccess(t, data, args...)
+ return ret
+}
+
+func assertError(t *testing.T, err error, typ ErrorType, msg string) {
+ if err == nil {
+ assertFatalf(t, "Expected error: %s", msg)
+ return
+ }
+
+ if e, ok := err.(*Error); !ok {
+ assertFatalf(t, "Expected Error type, but got %#v", err)
+ } else {
+ if e.Type != typ {
+ assertErrorf(t, "Expected error type {%s}, but got {%s}", typ, e.Type)
+ }
+
+ if e.Message != msg {
+ assertErrorf(t, "Expected error message %#v, but got %#v", msg, e.Message)
+ }
+ }
+}
+
+func assertParseFail(t *testing.T, typ ErrorType, msg string, data interface{}, args ...string) []string {
+ parser := NewParser(data, Default&^PrintErrors)
+ ret, err := parser.ParseArgs(args)
+
+ assertError(t, err, typ, msg)
+ return ret
+}
+
+func diff(a, b string) (string, error) {
+ atmp, err := ioutil.TempFile("", "help-diff")
+
+ if err != nil {
+ return "", err
+ }
+
+ btmp, err := ioutil.TempFile("", "help-diff")
+
+ if err != nil {
+ return "", err
+ }
+
+ if _, err := io.WriteString(atmp, a); err != nil {
+ return "", err
+ }
+
+ if _, err := io.WriteString(btmp, b); err != nil {
+ return "", err
+ }
+
+ ret, err := exec.Command("diff", "-u", "-d", "--label", "got", atmp.Name(), "--label", "expected", btmp.Name()).Output()
+
+ os.Remove(atmp.Name())
+ os.Remove(btmp.Name())
+
+ if err.Error() == "exit status 1" {
+ return string(ret), nil
+ }
+
+ return string(ret), err
+}
+
+func assertDiff(t *testing.T, actual, expected, msg string) {
+ if actual == expected {
+ return
+ }
+
+ ret, err := diff(actual, expected)
+
+ if err != nil {
+ assertErrorf(t, "Unexpected diff error: %s", err)
+ assertErrorf(t, "Unexpected %s, expected:\n\n%s\n\nbut got\n\n%s", msg, expected, actual)
+ } else {
+ assertErrorf(t, "Unexpected %s:\n\n%s", msg, ret)
+ }
+}
diff --git a/src/mongo/gotools/vendor/src/github.com/jessevdk/go-flags/check_crosscompile.sh b/src/mongo/gotools/vendor/src/github.com/jessevdk/go-flags/check_crosscompile.sh
new file mode 100755
index 00000000000..c494f6119d3
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/jessevdk/go-flags/check_crosscompile.sh
@@ -0,0 +1,16 @@
+#!/bin/bash
+
+set -e
+
+echo '# linux arm7'
+GOARM=7 GOARCH=arm GOOS=linux go build
+echo '# linux arm5'
+GOARM=5 GOARCH=arm GOOS=linux go build
+echo '# windows 386'
+GOARCH=386 GOOS=windows go build
+echo '# windows amd64'
+GOARCH=amd64 GOOS=windows go build
+echo '# darwin'
+GOARCH=amd64 GOOS=darwin go build
+echo '# freebsd'
+GOARCH=amd64 GOOS=freebsd go build
diff --git a/src/mongo/gotools/vendor/src/github.com/jessevdk/go-flags/closest.go b/src/mongo/gotools/vendor/src/github.com/jessevdk/go-flags/closest.go
new file mode 100644
index 00000000000..3b518757c43
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/jessevdk/go-flags/closest.go
@@ -0,0 +1,59 @@
+package flags
+
+func levenshtein(s string, t string) int {
+ if len(s) == 0 {
+ return len(t)
+ }
+
+ if len(t) == 0 {
+ return len(s)
+ }
+
+ dists := make([][]int, len(s)+1)
+ for i := range dists {
+ dists[i] = make([]int, len(t)+1)
+ dists[i][0] = i
+ }
+
+ for j := range t {
+ dists[0][j] = j
+ }
+
+ for i, sc := range s {
+ for j, tc := range t {
+ if sc == tc {
+ dists[i+1][j+1] = dists[i][j]
+ } else {
+ dists[i+1][j+1] = dists[i][j] + 1
+ if dists[i+1][j] < dists[i+1][j+1] {
+ dists[i+1][j+1] = dists[i+1][j] + 1
+ }
+ if dists[i][j+1] < dists[i+1][j+1] {
+ dists[i+1][j+1] = dists[i][j+1] + 1
+ }
+ }
+ }
+ }
+
+ return dists[len(s)][len(t)]
+}
+
+func closestChoice(cmd string, choices []string) (string, int) {
+ if len(choices) == 0 {
+ return "", 0
+ }
+
+ mincmd := -1
+ mindist := -1
+
+ for i, c := range choices {
+ l := levenshtein(cmd, c)
+
+ if mincmd < 0 || l < mindist {
+ mindist = l
+ mincmd = i
+ }
+ }
+
+ return choices[mincmd], mindist
+}
diff --git a/src/mongo/gotools/vendor/src/github.com/jessevdk/go-flags/command.go b/src/mongo/gotools/vendor/src/github.com/jessevdk/go-flags/command.go
new file mode 100644
index 00000000000..a30f5609099
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/jessevdk/go-flags/command.go
@@ -0,0 +1,441 @@
+package flags
+
+import (
+ "reflect"
+ "sort"
+ "strconv"
+ "strings"
+ "unsafe"
+)
+
+// Command represents an application command. Commands can be added to the
+// parser (which itself is a command) and are selected/executed when its name
+// is specified on the command line. The Command type embeds a Group and
+// therefore also carries a set of command specific options.
+type Command struct {
+ // Embedded, see Group for more information
+ *Group
+
+ // The name by which the command can be invoked
+ Name string
+
+ // The active sub command (set by parsing) or nil
+ Active *Command
+
+ // Whether subcommands are optional
+ SubcommandsOptional bool
+
+ // Aliases for the command
+ Aliases []string
+
+ // Whether positional arguments are required
+ ArgsRequired bool
+
+ commands []*Command
+ hasBuiltinHelpGroup bool
+ args []*Arg
+}
+
+// Commander is an interface which can be implemented by any command added in
+// the options. When implemented, the Execute method will be called for the last
+// specified (sub)command providing the remaining command line arguments.
+type Commander interface {
+ // Execute will be called for the last active (sub)command. The
+ // args argument contains the remaining command line arguments. The
+ // error that Execute returns will be eventually passed out of the
+ // Parse method of the Parser.
+ Execute(args []string) error
+}
+
+// Usage is an interface which can be implemented to show a custom usage string
+// in the help message shown for a command.
+type Usage interface {
+ // Usage is called for commands to allow customized printing of command
+ // usage in the generated help message.
+ Usage() string
+}
+
+type lookup struct {
+ shortNames map[string]*Option
+ longNames map[string]*Option
+
+ commands map[string]*Command
+}
+
+// AddCommand adds a new command to the parser with the given name and data. The
+// data needs to be a pointer to a struct from which the fields indicate which
+// options are in the command. The provided data can implement the Command and
+// Usage interfaces.
+func (c *Command) AddCommand(command string, shortDescription string, longDescription string, data interface{}) (*Command, error) {
+ cmd := newCommand(command, shortDescription, longDescription, data)
+
+ cmd.parent = c
+
+ if err := cmd.scan(); err != nil {
+ return nil, err
+ }
+
+ c.commands = append(c.commands, cmd)
+ return cmd, nil
+}
+
+// AddGroup adds a new group to the command with the given name and data. The
+// data needs to be a pointer to a struct from which the fields indicate which
+// options are in the group.
+func (c *Command) AddGroup(shortDescription string, longDescription string, data interface{}) (*Group, error) {
+ group := newGroup(shortDescription, longDescription, data)
+
+ group.parent = c
+
+ if err := group.scanType(c.scanSubcommandHandler(group)); err != nil {
+ return nil, err
+ }
+
+ c.groups = append(c.groups, group)
+ return group, nil
+}
+
+// Commands returns a list of subcommands of this command.
+func (c *Command) Commands() []*Command {
+ return c.commands
+}
+
+// Find locates the subcommand with the given name and returns it. If no such
+// command can be found Find will return nil.
+func (c *Command) Find(name string) *Command {
+ for _, cc := range c.commands {
+ if cc.match(name) {
+ return cc
+ }
+ }
+
+ return nil
+}
+
+// Find an option that is part of the command, or any of its
+// parent commands, by matching its long name
+// (including the option namespace).
+func (c *Command) FindOptionByLongName(longName string) (option *Option) {
+ for option == nil && c != nil {
+ option = c.Group.FindOptionByLongName(longName)
+
+ c, _ = c.parent.(*Command)
+ }
+
+ return option
+}
+
+// Find an option that is part of the command, or any of its
+// parent commands, by matching its long name
+// (including the option namespace).
+func (c *Command) FindOptionByShortName(shortName rune) (option *Option) {
+ for option == nil && c != nil {
+ option = c.Group.FindOptionByShortName(shortName)
+
+ c, _ = c.parent.(*Command)
+ }
+
+ return option
+}
+
+// Args returns a list of positional arguments associated with this command.
+func (c *Command) Args() []*Arg {
+ ret := make([]*Arg, len(c.args))
+ copy(ret, c.args)
+
+ return ret
+}
+
+func newCommand(name string, shortDescription string, longDescription string, data interface{}) *Command {
+ return &Command{
+ Group: newGroup(shortDescription, longDescription, data),
+ Name: name,
+ }
+}
+
+func (c *Command) scanSubcommandHandler(parentg *Group) scanHandler {
+ f := func(realval reflect.Value, sfield *reflect.StructField) (bool, error) {
+ mtag := newMultiTag(string(sfield.Tag))
+
+ if err := mtag.Parse(); err != nil {
+ return true, err
+ }
+
+ positional := mtag.Get("positional-args")
+
+ if len(positional) != 0 {
+ stype := realval.Type()
+
+ for i := 0; i < stype.NumField(); i++ {
+ field := stype.Field(i)
+
+ m := newMultiTag((string(field.Tag)))
+
+ if err := m.Parse(); err != nil {
+ return true, err
+ }
+
+ name := m.Get("positional-arg-name")
+
+ if len(name) == 0 {
+ name = field.Name
+ }
+
+ var required int
+
+ sreq := m.Get("required")
+
+ if sreq != "" {
+ required = 1
+
+ if preq, err := strconv.ParseInt(sreq, 10, 32); err == nil {
+ required = int(preq)
+ }
+ }
+
+ arg := &Arg{
+ Name: name,
+ Description: m.Get("description"),
+ Required: required,
+
+ value: realval.Field(i),
+ tag: m,
+ }
+
+ c.args = append(c.args, arg)
+
+ if len(mtag.Get("required")) != 0 {
+ c.ArgsRequired = true
+ }
+ }
+
+ return true, nil
+ }
+
+ subcommand := mtag.Get("command")
+
+ if len(subcommand) != 0 {
+ ptrval := reflect.NewAt(realval.Type(), unsafe.Pointer(realval.UnsafeAddr()))
+
+ shortDescription := mtag.Get("description")
+ longDescription := mtag.Get("long-description")
+ subcommandsOptional := mtag.Get("subcommands-optional")
+ aliases := mtag.GetMany("alias")
+
+ subc, err := c.AddCommand(subcommand, shortDescription, longDescription, ptrval.Interface())
+ if err != nil {
+ return true, err
+ }
+
+ subc.Hidden = mtag.Get("hidden") != ""
+
+ if len(subcommandsOptional) > 0 {
+ subc.SubcommandsOptional = true
+ }
+
+ if len(aliases) > 0 {
+ subc.Aliases = aliases
+ }
+
+ return true, nil
+ }
+
+ return parentg.scanSubGroupHandler(realval, sfield)
+ }
+
+ return f
+}
+
+func (c *Command) scan() error {
+ return c.scanType(c.scanSubcommandHandler(c.Group))
+}
+
+func (c *Command) eachOption(f func(*Command, *Group, *Option)) {
+ c.eachCommand(func(c *Command) {
+ c.eachGroup(func(g *Group) {
+ for _, option := range g.options {
+ f(c, g, option)
+ }
+ })
+ }, true)
+}
+
+func (c *Command) eachCommand(f func(*Command), recurse bool) {
+ f(c)
+
+ for _, cc := range c.commands {
+ if recurse {
+ cc.eachCommand(f, true)
+ } else {
+ f(cc)
+ }
+ }
+}
+
+func (c *Command) eachActiveGroup(f func(cc *Command, g *Group)) {
+ c.eachGroup(func(g *Group) {
+ f(c, g)
+ })
+
+ if c.Active != nil {
+ c.Active.eachActiveGroup(f)
+ }
+}
+
+func (c *Command) addHelpGroups(showHelp func() error) {
+ if !c.hasBuiltinHelpGroup {
+ c.addHelpGroup(showHelp)
+ c.hasBuiltinHelpGroup = true
+ }
+
+ for _, cc := range c.commands {
+ cc.addHelpGroups(showHelp)
+ }
+}
+
+func (c *Command) makeLookup() lookup {
+ ret := lookup{
+ shortNames: make(map[string]*Option),
+ longNames: make(map[string]*Option),
+ commands: make(map[string]*Command),
+ }
+
+ parent := c.parent
+
+ var parents []*Command
+
+ for parent != nil {
+ if cmd, ok := parent.(*Command); ok {
+ parents = append(parents, cmd)
+ parent = cmd.parent
+ } else {
+ parent = nil
+ }
+ }
+
+ for i := len(parents) - 1; i >= 0; i-- {
+ parents[i].fillLookup(&ret, true)
+ }
+
+ c.fillLookup(&ret, false)
+ return ret
+}
+
+func (c *Command) fillLookup(ret *lookup, onlyOptions bool) {
+ c.eachGroup(func(g *Group) {
+ for _, option := range g.options {
+ if option.ShortName != 0 {
+ ret.shortNames[string(option.ShortName)] = option
+ }
+
+ if len(option.LongName) > 0 {
+ ret.longNames[option.LongNameWithNamespace()] = option
+ }
+ }
+ })
+
+ if onlyOptions {
+ return
+ }
+
+ for _, subcommand := range c.commands {
+ ret.commands[subcommand.Name] = subcommand
+
+ for _, a := range subcommand.Aliases {
+ ret.commands[a] = subcommand
+ }
+ }
+}
+
+func (c *Command) groupByName(name string) *Group {
+ if grp := c.Group.groupByName(name); grp != nil {
+ return grp
+ }
+
+ for _, subc := range c.commands {
+ prefix := subc.Name + "."
+
+ if strings.HasPrefix(name, prefix) {
+ if grp := subc.groupByName(name[len(prefix):]); grp != nil {
+ return grp
+ }
+ } else if name == subc.Name {
+ return subc.Group
+ }
+ }
+
+ return nil
+}
+
+type commandList []*Command
+
+func (c commandList) Less(i, j int) bool {
+ return c[i].Name < c[j].Name
+}
+
+func (c commandList) Len() int {
+ return len(c)
+}
+
+func (c commandList) Swap(i, j int) {
+ c[i], c[j] = c[j], c[i]
+}
+
+func (c *Command) sortedVisibleCommands() []*Command {
+ ret := commandList(c.visibleCommands())
+ sort.Sort(ret)
+
+ return []*Command(ret)
+}
+
+func (c *Command) visibleCommands() []*Command {
+ ret := make([]*Command, 0, len(c.commands))
+
+ for _, cmd := range c.commands {
+ if !cmd.Hidden {
+ ret = append(ret, cmd)
+ }
+ }
+
+ return ret
+}
+
+func (c *Command) match(name string) bool {
+ if c.Name == name {
+ return true
+ }
+
+ for _, v := range c.Aliases {
+ if v == name {
+ return true
+ }
+ }
+
+ return false
+}
+
+func (c *Command) hasCliOptions() bool {
+ ret := false
+
+ c.eachGroup(func(g *Group) {
+ if g.isBuiltinHelp {
+ return
+ }
+
+ for _, opt := range g.options {
+ if opt.canCli() {
+ ret = true
+ }
+ }
+ })
+
+ return ret
+}
+
+func (c *Command) fillParseState(s *parseState) {
+ s.positional = make([]*Arg, len(c.args))
+ copy(s.positional, c.args)
+
+ s.lookup = c.makeLookup()
+ s.command = c
+}
diff --git a/src/mongo/gotools/vendor/src/github.com/jessevdk/go-flags/command_test.go b/src/mongo/gotools/vendor/src/github.com/jessevdk/go-flags/command_test.go
new file mode 100644
index 00000000000..72d397d2c7c
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/jessevdk/go-flags/command_test.go
@@ -0,0 +1,544 @@
+package flags
+
+import (
+ "fmt"
+ "testing"
+)
+
+func TestCommandInline(t *testing.T) {
+ var opts = struct {
+ Value bool `short:"v"`
+
+ Command struct {
+ G bool `short:"g"`
+ } `command:"cmd"`
+ }{}
+
+ p, ret := assertParserSuccess(t, &opts, "-v", "cmd", "-g")
+
+ assertStringArray(t, ret, []string{})
+
+ if p.Active == nil {
+ t.Errorf("Expected active command")
+ }
+
+ if !opts.Value {
+ t.Errorf("Expected Value to be true")
+ }
+
+ if !opts.Command.G {
+ t.Errorf("Expected Command.G to be true")
+ }
+
+ if p.Command.Find("cmd") != p.Active {
+ t.Errorf("Expected to find command `cmd' to be active")
+ }
+}
+
+func TestCommandInlineMulti(t *testing.T) {
+ var opts = struct {
+ Value bool `short:"v"`
+
+ C1 struct {
+ } `command:"c1"`
+
+ C2 struct {
+ G bool `short:"g"`
+ } `command:"c2"`
+ }{}
+
+ p, ret := assertParserSuccess(t, &opts, "-v", "c2", "-g")
+
+ assertStringArray(t, ret, []string{})
+
+ if p.Active == nil {
+ t.Errorf("Expected active command")
+ }
+
+ if !opts.Value {
+ t.Errorf("Expected Value to be true")
+ }
+
+ if !opts.C2.G {
+ t.Errorf("Expected C2.G to be true")
+ }
+
+ if p.Command.Find("c1") == nil {
+ t.Errorf("Expected to find command `c1'")
+ }
+
+ if c2 := p.Command.Find("c2"); c2 == nil {
+ t.Errorf("Expected to find command `c2'")
+ } else if c2 != p.Active {
+ t.Errorf("Expected to find command `c2' to be active")
+ }
+}
+
+func TestCommandFlagOrder1(t *testing.T) {
+ var opts = struct {
+ Value bool `short:"v"`
+
+ Command struct {
+ G bool `short:"g"`
+ } `command:"cmd"`
+ }{}
+
+ assertParseFail(t, ErrUnknownFlag, "unknown flag `g'", &opts, "-v", "-g", "cmd")
+}
+
+func TestCommandFlagOrder2(t *testing.T) {
+ var opts = struct {
+ Value bool `short:"v"`
+
+ Command struct {
+ G bool `short:"g"`
+ } `command:"cmd"`
+ }{}
+
+ assertParseSuccess(t, &opts, "cmd", "-v", "-g")
+
+ if !opts.Value {
+ t.Errorf("Expected Value to be true")
+ }
+
+ if !opts.Command.G {
+ t.Errorf("Expected Command.G to be true")
+ }
+}
+
+func TestCommandFlagOrderSub(t *testing.T) {
+ var opts = struct {
+ Value bool `short:"v"`
+
+ Command struct {
+ G bool `short:"g"`
+
+ SubCommand struct {
+ B bool `short:"b"`
+ } `command:"sub"`
+ } `command:"cmd"`
+ }{}
+
+ assertParseSuccess(t, &opts, "cmd", "sub", "-v", "-g", "-b")
+
+ if !opts.Value {
+ t.Errorf("Expected Value to be true")
+ }
+
+ if !opts.Command.G {
+ t.Errorf("Expected Command.G to be true")
+ }
+
+ if !opts.Command.SubCommand.B {
+ t.Errorf("Expected Command.SubCommand.B to be true")
+ }
+}
+
+func TestCommandFlagOverride1(t *testing.T) {
+ var opts = struct {
+ Value bool `short:"v"`
+
+ Command struct {
+ Value bool `short:"v"`
+ } `command:"cmd"`
+ }{}
+
+ assertParseSuccess(t, &opts, "-v", "cmd")
+
+ if !opts.Value {
+ t.Errorf("Expected Value to be true")
+ }
+
+ if opts.Command.Value {
+ t.Errorf("Expected Command.Value to be false")
+ }
+}
+
+func TestCommandFlagOverride2(t *testing.T) {
+ var opts = struct {
+ Value bool `short:"v"`
+
+ Command struct {
+ Value bool `short:"v"`
+ } `command:"cmd"`
+ }{}
+
+ assertParseSuccess(t, &opts, "cmd", "-v")
+
+ if opts.Value {
+ t.Errorf("Expected Value to be false")
+ }
+
+ if !opts.Command.Value {
+ t.Errorf("Expected Command.Value to be true")
+ }
+}
+
+func TestCommandFlagOverrideSub(t *testing.T) {
+ var opts = struct {
+ Value bool `short:"v"`
+
+ Command struct {
+ Value bool `short:"v"`
+
+ SubCommand struct {
+ Value bool `short:"v"`
+ } `command:"sub"`
+ } `command:"cmd"`
+ }{}
+
+ assertParseSuccess(t, &opts, "cmd", "sub", "-v")
+
+ if opts.Value {
+ t.Errorf("Expected Value to be false")
+ }
+
+ if opts.Command.Value {
+ t.Errorf("Expected Command.Value to be false")
+ }
+
+ if !opts.Command.SubCommand.Value {
+ t.Errorf("Expected Command.Value to be true")
+ }
+}
+
+func TestCommandFlagOverrideSub2(t *testing.T) {
+ var opts = struct {
+ Value bool `short:"v"`
+
+ Command struct {
+ Value bool `short:"v"`
+
+ SubCommand struct {
+ G bool `short:"g"`
+ } `command:"sub"`
+ } `command:"cmd"`
+ }{}
+
+ assertParseSuccess(t, &opts, "cmd", "sub", "-v")
+
+ if opts.Value {
+ t.Errorf("Expected Value to be false")
+ }
+
+ if !opts.Command.Value {
+ t.Errorf("Expected Command.Value to be true")
+ }
+}
+
+func TestCommandEstimate(t *testing.T) {
+ var opts = struct {
+ Value bool `short:"v"`
+
+ Cmd1 struct {
+ } `command:"remove"`
+
+ Cmd2 struct {
+ } `command:"add"`
+ }{}
+
+ p := NewParser(&opts, None)
+ _, err := p.ParseArgs([]string{})
+
+ assertError(t, err, ErrCommandRequired, "Please specify one command of: add or remove")
+}
+
+func TestCommandEstimate2(t *testing.T) {
+ var opts = struct {
+ Value bool `short:"v"`
+
+ Cmd1 struct {
+ } `command:"remove"`
+
+ Cmd2 struct {
+ } `command:"add"`
+ }{}
+
+ p := NewParser(&opts, None)
+ _, err := p.ParseArgs([]string{"rmive"})
+
+ assertError(t, err, ErrUnknownCommand, "Unknown command `rmive', did you mean `remove'?")
+}
+
+type testCommand struct {
+ G bool `short:"g"`
+ Executed bool
+ EArgs []string
+}
+
+func (c *testCommand) Execute(args []string) error {
+ c.Executed = true
+ c.EArgs = args
+
+ return nil
+}
+
+func TestCommandExecute(t *testing.T) {
+ var opts = struct {
+ Value bool `short:"v"`
+
+ Command testCommand `command:"cmd"`
+ }{}
+
+ assertParseSuccess(t, &opts, "-v", "cmd", "-g", "a", "b")
+
+ if !opts.Value {
+ t.Errorf("Expected Value to be true")
+ }
+
+ if !opts.Command.Executed {
+ t.Errorf("Did not execute command")
+ }
+
+ if !opts.Command.G {
+ t.Errorf("Expected Command.C to be true")
+ }
+
+ assertStringArray(t, opts.Command.EArgs, []string{"a", "b"})
+}
+
+func TestCommandClosest(t *testing.T) {
+ var opts = struct {
+ Value bool `short:"v"`
+
+ Cmd1 struct {
+ } `command:"remove"`
+
+ Cmd2 struct {
+ } `command:"add"`
+ }{}
+
+ args := assertParseFail(t, ErrUnknownCommand, "Unknown command `addd', did you mean `add'?", &opts, "-v", "addd")
+
+ assertStringArray(t, args, []string{"addd"})
+}
+
+func TestCommandAdd(t *testing.T) {
+ var opts = struct {
+ Value bool `short:"v"`
+ }{}
+
+ var cmd = struct {
+ G bool `short:"g"`
+ }{}
+
+ p := NewParser(&opts, Default)
+ c, err := p.AddCommand("cmd", "", "", &cmd)
+
+ if err != nil {
+ t.Fatalf("Unexpected error: %v", err)
+ return
+ }
+
+ ret, err := p.ParseArgs([]string{"-v", "cmd", "-g", "rest"})
+
+ if err != nil {
+ t.Fatalf("Unexpected error: %v", err)
+ return
+ }
+
+ assertStringArray(t, ret, []string{"rest"})
+
+ if !opts.Value {
+ t.Errorf("Expected Value to be true")
+ }
+
+ if !cmd.G {
+ t.Errorf("Expected Command.G to be true")
+ }
+
+ if p.Command.Find("cmd") != c {
+ t.Errorf("Expected to find command `cmd'")
+ }
+
+ if p.Commands()[0] != c {
+ t.Errorf("Expected command %#v, but got %#v", c, p.Commands()[0])
+ }
+
+ if c.Options()[0].ShortName != 'g' {
+ t.Errorf("Expected short name `g' but got %v", c.Options()[0].ShortName)
+ }
+}
+
+func TestCommandNestedInline(t *testing.T) {
+ var opts = struct {
+ Value bool `short:"v"`
+
+ Command struct {
+ G bool `short:"g"`
+
+ Nested struct {
+ N string `long:"n"`
+ } `command:"nested"`
+ } `command:"cmd"`
+ }{}
+
+ p, ret := assertParserSuccess(t, &opts, "-v", "cmd", "-g", "nested", "--n", "n", "rest")
+
+ assertStringArray(t, ret, []string{"rest"})
+
+ if !opts.Value {
+ t.Errorf("Expected Value to be true")
+ }
+
+ if !opts.Command.G {
+ t.Errorf("Expected Command.G to be true")
+ }
+
+ assertString(t, opts.Command.Nested.N, "n")
+
+ if c := p.Command.Find("cmd"); c == nil {
+ t.Errorf("Expected to find command `cmd'")
+ } else {
+ if c != p.Active {
+ t.Errorf("Expected `cmd' to be the active parser command")
+ }
+
+ if nested := c.Find("nested"); nested == nil {
+ t.Errorf("Expected to find command `nested'")
+ } else if nested != c.Active {
+ t.Errorf("Expected to find command `nested' to be the active `cmd' command")
+ }
+ }
+}
+
+func TestRequiredOnCommand(t *testing.T) {
+ var opts = struct {
+ Value bool `short:"v" required:"true"`
+
+ Command struct {
+ G bool `short:"g"`
+ } `command:"cmd"`
+ }{}
+
+ assertParseFail(t, ErrRequired, fmt.Sprintf("the required flag `%cv' was not specified", defaultShortOptDelimiter), &opts, "cmd")
+}
+
+func TestRequiredAllOnCommand(t *testing.T) {
+ var opts = struct {
+ Value bool `short:"v" required:"true"`
+ Missing bool `long:"missing" required:"true"`
+
+ Command struct {
+ G bool `short:"g"`
+ } `command:"cmd"`
+ }{}
+
+ assertParseFail(t, ErrRequired, fmt.Sprintf("the required flags `%smissing' and `%cv' were not specified", defaultLongOptDelimiter, defaultShortOptDelimiter), &opts, "cmd")
+}
+
+func TestDefaultOnCommand(t *testing.T) {
+ var opts = struct {
+ Command struct {
+ G bool `short:"g" default:"true"`
+ } `command:"cmd"`
+ }{}
+
+ assertParseSuccess(t, &opts, "cmd")
+
+ if !opts.Command.G {
+ t.Errorf("Expected G to be true")
+ }
+}
+
+func TestSubcommandsOptional(t *testing.T) {
+ var opts = struct {
+ Value bool `short:"v"`
+
+ Cmd1 struct {
+ } `command:"remove"`
+
+ Cmd2 struct {
+ } `command:"add"`
+ }{}
+
+ p := NewParser(&opts, None)
+ p.SubcommandsOptional = true
+
+ _, err := p.ParseArgs([]string{"-v"})
+
+ if err != nil {
+ t.Fatalf("Unexpected error: %v", err)
+ return
+ }
+
+ if !opts.Value {
+ t.Errorf("Expected Value to be true")
+ }
+}
+
+func TestCommandAlias(t *testing.T) {
+ var opts = struct {
+ Command struct {
+ G bool `short:"g" default:"true"`
+ } `command:"cmd" alias:"cm"`
+ }{}
+
+ assertParseSuccess(t, &opts, "cm")
+
+ if !opts.Command.G {
+ t.Errorf("Expected G to be true")
+ }
+}
+
+func TestSubCommandFindOptionByLongFlag(t *testing.T) {
+ var opts struct {
+ Testing bool `long:"testing" description:"Testing"`
+ }
+
+ var cmd struct {
+ Other bool `long:"other" description:"Other"`
+ }
+
+ p := NewParser(&opts, Default)
+ c, _ := p.AddCommand("command", "Short", "Long", &cmd)
+
+ opt := c.FindOptionByLongName("other")
+
+ if opt == nil {
+ t.Errorf("Expected option, but found none")
+ }
+
+ assertString(t, opt.LongName, "other")
+
+ opt = c.FindOptionByLongName("testing")
+
+ if opt == nil {
+ t.Errorf("Expected option, but found none")
+ }
+
+ assertString(t, opt.LongName, "testing")
+}
+
+func TestSubCommandFindOptionByShortFlag(t *testing.T) {
+ var opts struct {
+ Testing bool `short:"t" description:"Testing"`
+ }
+
+ var cmd struct {
+ Other bool `short:"o" description:"Other"`
+ }
+
+ p := NewParser(&opts, Default)
+ c, _ := p.AddCommand("command", "Short", "Long", &cmd)
+
+ opt := c.FindOptionByShortName('o')
+
+ if opt == nil {
+ t.Errorf("Expected option, but found none")
+ }
+
+ if opt.ShortName != 'o' {
+ t.Errorf("Expected 'o', but got %v", opt.ShortName)
+ }
+
+ opt = c.FindOptionByShortName('t')
+
+ if opt == nil {
+ t.Errorf("Expected option, but found none")
+ }
+
+ if opt.ShortName != 't' {
+ t.Errorf("Expected 'o', but got %v", opt.ShortName)
+ }
+}
diff --git a/src/mongo/gotools/vendor/src/github.com/jessevdk/go-flags/completion.go b/src/mongo/gotools/vendor/src/github.com/jessevdk/go-flags/completion.go
new file mode 100644
index 00000000000..894f1d6aeef
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/jessevdk/go-flags/completion.go
@@ -0,0 +1,300 @@
+package flags
+
+import (
+ "fmt"
+ "path/filepath"
+ "reflect"
+ "sort"
+ "strings"
+ "unicode/utf8"
+)
+
+// Completion is a type containing information of a completion.
+type Completion struct {
+ // The completed item
+ Item string
+
+ // A description of the completed item (optional)
+ Description string
+}
+
+type completions []Completion
+
+func (c completions) Len() int {
+ return len(c)
+}
+
+func (c completions) Less(i, j int) bool {
+ return c[i].Item < c[j].Item
+}
+
+func (c completions) Swap(i, j int) {
+ c[i], c[j] = c[j], c[i]
+}
+
+// Completer is an interface which can be implemented by types
+// to provide custom command line argument completion.
+type Completer interface {
+ // Complete receives a prefix representing a (partial) value
+ // for its type and should provide a list of possible valid
+ // completions.
+ Complete(match string) []Completion
+}
+
+type completion struct {
+ parser *Parser
+}
+
+// Filename is a string alias which provides filename completion.
+type Filename string
+
+func completionsWithoutDescriptions(items []string) []Completion {
+ ret := make([]Completion, len(items))
+
+ for i, v := range items {
+ ret[i].Item = v
+ }
+
+ return ret
+}
+
+// Complete returns a list of existing files with the given
+// prefix.
+func (f *Filename) Complete(match string) []Completion {
+ ret, _ := filepath.Glob(match + "*")
+ return completionsWithoutDescriptions(ret)
+}
+
+func (c *completion) skipPositional(s *parseState, n int) {
+ if n >= len(s.positional) {
+ s.positional = nil
+ } else {
+ s.positional = s.positional[n:]
+ }
+}
+
+func (c *completion) completeOptionNames(names map[string]*Option, prefix string, match string) []Completion {
+ n := make([]Completion, 0, len(names))
+
+ for k, opt := range names {
+ if strings.HasPrefix(k, match) {
+ n = append(n, Completion{
+ Item: prefix + k,
+ Description: opt.Description,
+ })
+ }
+ }
+
+ return n
+}
+
+func (c *completion) completeLongNames(s *parseState, prefix string, match string) []Completion {
+ return c.completeOptionNames(s.lookup.longNames, prefix, match)
+}
+
+func (c *completion) completeShortNames(s *parseState, prefix string, match string) []Completion {
+ if len(match) != 0 {
+ return []Completion{
+ Completion{
+ Item: prefix + match,
+ },
+ }
+ }
+
+ return c.completeOptionNames(s.lookup.shortNames, prefix, match)
+}
+
+func (c *completion) completeCommands(s *parseState, match string) []Completion {
+ n := make([]Completion, 0, len(s.command.commands))
+
+ for _, cmd := range s.command.commands {
+ if cmd.data != c && strings.HasPrefix(cmd.Name, match) {
+ n = append(n, Completion{
+ Item: cmd.Name,
+ Description: cmd.ShortDescription,
+ })
+ }
+ }
+
+ return n
+}
+
+func (c *completion) completeValue(value reflect.Value, prefix string, match string) []Completion {
+ i := value.Interface()
+
+ var ret []Completion
+
+ if cmp, ok := i.(Completer); ok {
+ ret = cmp.Complete(match)
+ } else if value.CanAddr() {
+ if cmp, ok = value.Addr().Interface().(Completer); ok {
+ ret = cmp.Complete(match)
+ }
+ }
+
+ for i, v := range ret {
+ ret[i].Item = prefix + v.Item
+ }
+
+ return ret
+}
+
+func (c *completion) completeArg(arg *Arg, prefix string, match string) []Completion {
+ if arg.isRemaining() {
+ // For remaining positional args (that are parsed into a slice), complete
+ // based on the element type.
+ return c.completeValue(reflect.New(arg.value.Type().Elem()), prefix, match)
+ }
+
+ return c.completeValue(arg.value, prefix, match)
+}
+
+func (c *completion) complete(args []string) []Completion {
+ if len(args) == 0 {
+ args = []string{""}
+ }
+
+ s := &parseState{
+ args: args,
+ }
+
+ c.parser.fillParseState(s)
+
+ var opt *Option
+
+ for len(s.args) > 1 {
+ arg := s.pop()
+
+ if (c.parser.Options&PassDoubleDash) != None && arg == "--" {
+ opt = nil
+ c.skipPositional(s, len(s.args)-1)
+
+ break
+ }
+
+ if argumentIsOption(arg) {
+ prefix, optname, islong := stripOptionPrefix(arg)
+ optname, _, argument := splitOption(prefix, optname, islong)
+
+ if argument == nil {
+ var o *Option
+ canarg := true
+
+ if islong {
+ o = s.lookup.longNames[optname]
+ } else {
+ for i, r := range optname {
+ sname := string(r)
+ o = s.lookup.shortNames[sname]
+
+ if o == nil {
+ break
+ }
+
+ if i == 0 && o.canArgument() && len(optname) != len(sname) {
+ canarg = false
+ break
+ }
+ }
+ }
+
+ if o == nil && (c.parser.Options&PassAfterNonOption) != None {
+ opt = nil
+ c.skipPositional(s, len(s.args)-1)
+
+ break
+ } else if o != nil && o.canArgument() && !o.OptionalArgument && canarg {
+ if len(s.args) > 1 {
+ s.pop()
+ } else {
+ opt = o
+ }
+ }
+ }
+ } else {
+ if len(s.positional) > 0 {
+ if !s.positional[0].isRemaining() {
+ // Don't advance beyond a remaining positional arg (because
+ // it consumes all subsequent args).
+ s.positional = s.positional[1:]
+ }
+ } else if cmd, ok := s.lookup.commands[arg]; ok {
+ cmd.fillParseState(s)
+ }
+
+ opt = nil
+ }
+ }
+
+ lastarg := s.args[len(s.args)-1]
+ var ret []Completion
+
+ if opt != nil {
+ // Completion for the argument of 'opt'
+ ret = c.completeValue(opt.value, "", lastarg)
+ } else if argumentStartsOption(lastarg) {
+ // Complete the option
+ prefix, optname, islong := stripOptionPrefix(lastarg)
+ optname, split, argument := splitOption(prefix, optname, islong)
+
+ if argument == nil && !islong {
+ rname, n := utf8.DecodeRuneInString(optname)
+ sname := string(rname)
+
+ if opt := s.lookup.shortNames[sname]; opt != nil && opt.canArgument() {
+ ret = c.completeValue(opt.value, prefix+sname, optname[n:])
+ } else {
+ ret = c.completeShortNames(s, prefix, optname)
+ }
+ } else if argument != nil {
+ if islong {
+ opt = s.lookup.longNames[optname]
+ } else {
+ opt = s.lookup.shortNames[optname]
+ }
+
+ if opt != nil {
+ ret = c.completeValue(opt.value, prefix+optname+split, *argument)
+ }
+ } else if islong {
+ ret = c.completeLongNames(s, prefix, optname)
+ } else {
+ ret = c.completeShortNames(s, prefix, optname)
+ }
+ } else if len(s.positional) > 0 {
+ // Complete for positional argument
+ ret = c.completeArg(s.positional[0], "", lastarg)
+ } else if len(s.command.commands) > 0 {
+ // Complete for command
+ ret = c.completeCommands(s, lastarg)
+ }
+
+ sort.Sort(completions(ret))
+ return ret
+}
+
+func (c *completion) print(items []Completion, showDescriptions bool) {
+ if showDescriptions && len(items) > 1 {
+ maxl := 0
+
+ for _, v := range items {
+ if len(v.Item) > maxl {
+ maxl = len(v.Item)
+ }
+ }
+
+ for _, v := range items {
+ fmt.Printf("%s", v.Item)
+
+ if len(v.Description) > 0 {
+ fmt.Printf("%s # %s", strings.Repeat(" ", maxl-len(v.Item)), v.Description)
+ }
+
+ fmt.Printf("\n")
+ }
+ } else {
+ for _, v := range items {
+ fmt.Println(v.Item)
+ }
+ }
+}
diff --git a/src/mongo/gotools/vendor/src/github.com/jessevdk/go-flags/completion_test.go b/src/mongo/gotools/vendor/src/github.com/jessevdk/go-flags/completion_test.go
new file mode 100644
index 00000000000..f440fd70fe5
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/jessevdk/go-flags/completion_test.go
@@ -0,0 +1,294 @@
+package flags
+
+import (
+ "bytes"
+ "io"
+ "os"
+ "path"
+ "path/filepath"
+ "reflect"
+ "runtime"
+ "strings"
+ "testing"
+)
+
+type TestComplete struct {
+}
+
+func (t *TestComplete) Complete(match string) []Completion {
+ options := []string{
+ "hello world",
+ "hello universe",
+ "hello multiverse",
+ }
+
+ ret := make([]Completion, 0, len(options))
+
+ for _, o := range options {
+ if strings.HasPrefix(o, match) {
+ ret = append(ret, Completion{
+ Item: o,
+ })
+ }
+ }
+
+ return ret
+}
+
+var completionTestOptions struct {
+ Verbose bool `short:"v" long:"verbose" description:"Verbose messages"`
+ Debug bool `short:"d" long:"debug" description:"Enable debug"`
+ Version bool `long:"version" description:"Show version"`
+ Required bool `long:"required" required:"true" description:"This is required"`
+
+ AddCommand struct {
+ Positional struct {
+ Filename Filename
+ } `positional-args:"yes"`
+ } `command:"add" description:"add an item"`
+
+ AddMultiCommand struct {
+ Positional struct {
+ Filename []Filename
+ } `positional-args:"yes"`
+ } `command:"add-multi" description:"add multiple items"`
+
+ RemoveCommand struct {
+ Other bool `short:"o"`
+ File Filename `short:"f" long:"filename"`
+ } `command:"rm" description:"remove an item"`
+
+ RenameCommand struct {
+ Completed TestComplete `short:"c" long:"completed"`
+ } `command:"rename" description:"rename an item"`
+}
+
+type completionTest struct {
+ Args []string
+ Completed []string
+ ShowDescriptions bool
+}
+
+var completionTests []completionTest
+
+func init() {
+ _, sourcefile, _, _ := runtime.Caller(0)
+ completionTestSourcedir := filepath.Join(filepath.SplitList(path.Dir(sourcefile))...)
+
+ completionTestFilename := []string{filepath.Join(completionTestSourcedir, "completion.go"), filepath.Join(completionTestSourcedir, "completion_test.go")}
+
+ completionTests = []completionTest{
+ {
+ // Short names
+ []string{"-"},
+ []string{"-d", "-v"},
+ false,
+ },
+
+ {
+ // Short names concatenated
+ []string{"-dv"},
+ []string{"-dv"},
+ false,
+ },
+
+ {
+ // Long names
+ []string{"--"},
+ []string{"--debug", "--required", "--verbose", "--version"},
+ false,
+ },
+
+ {
+ // Long names with descriptions
+ []string{"--"},
+ []string{
+ "--debug # Enable debug",
+ "--required # This is required",
+ "--verbose # Verbose messages",
+ "--version # Show version",
+ },
+ true,
+ },
+
+ {
+ // Long names partial
+ []string{"--ver"},
+ []string{"--verbose", "--version"},
+ false,
+ },
+
+ {
+ // Commands
+ []string{""},
+ []string{"add", "add-multi", "rename", "rm"},
+ false,
+ },
+
+ {
+ // Commands with descriptions
+ []string{""},
+ []string{
+ "add # add an item",
+ "add-multi # add multiple items",
+ "rename # rename an item",
+ "rm # remove an item",
+ },
+ true,
+ },
+
+ {
+ // Commands partial
+ []string{"r"},
+ []string{"rename", "rm"},
+ false,
+ },
+
+ {
+ // Positional filename
+ []string{"add", filepath.Join(completionTestSourcedir, "completion")},
+ completionTestFilename,
+ false,
+ },
+
+ {
+ // Multiple positional filename (1 arg)
+ []string{"add-multi", filepath.Join(completionTestSourcedir, "completion")},
+ completionTestFilename,
+ false,
+ },
+ {
+ // Multiple positional filename (2 args)
+ []string{"add-multi", filepath.Join(completionTestSourcedir, "completion.go"), filepath.Join(completionTestSourcedir, "completion")},
+ completionTestFilename,
+ false,
+ },
+ {
+ // Multiple positional filename (3 args)
+ []string{"add-multi", filepath.Join(completionTestSourcedir, "completion.go"), filepath.Join(completionTestSourcedir, "completion.go"), filepath.Join(completionTestSourcedir, "completion")},
+ completionTestFilename,
+ false,
+ },
+
+ {
+ // Flag filename
+ []string{"rm", "-f", path.Join(completionTestSourcedir, "completion")},
+ completionTestFilename,
+ false,
+ },
+
+ {
+ // Flag short concat last filename
+ []string{"rm", "-of", path.Join(completionTestSourcedir, "completion")},
+ completionTestFilename,
+ false,
+ },
+
+ {
+ // Flag concat filename
+ []string{"rm", "-f" + path.Join(completionTestSourcedir, "completion")},
+ []string{"-f" + completionTestFilename[0], "-f" + completionTestFilename[1]},
+ false,
+ },
+
+ {
+ // Flag equal concat filename
+ []string{"rm", "-f=" + path.Join(completionTestSourcedir, "completion")},
+ []string{"-f=" + completionTestFilename[0], "-f=" + completionTestFilename[1]},
+ false,
+ },
+
+ {
+ // Flag concat long filename
+ []string{"rm", "--filename=" + path.Join(completionTestSourcedir, "completion")},
+ []string{"--filename=" + completionTestFilename[0], "--filename=" + completionTestFilename[1]},
+ false,
+ },
+
+ {
+ // Flag long filename
+ []string{"rm", "--filename", path.Join(completionTestSourcedir, "completion")},
+ completionTestFilename,
+ false,
+ },
+
+ {
+ // Custom completed
+ []string{"rename", "-c", "hello un"},
+ []string{"hello universe"},
+ false,
+ },
+ }
+}
+
+func TestCompletion(t *testing.T) {
+ p := NewParser(&completionTestOptions, Default)
+ c := &completion{parser: p}
+
+ for _, test := range completionTests {
+ if test.ShowDescriptions {
+ continue
+ }
+
+ ret := c.complete(test.Args)
+ items := make([]string, len(ret))
+
+ for i, v := range ret {
+ items[i] = v.Item
+ }
+
+ if !reflect.DeepEqual(items, test.Completed) {
+ t.Errorf("Args: %#v, %#v\n Expected: %#v\n Got: %#v", test.Args, test.ShowDescriptions, test.Completed, items)
+ }
+ }
+}
+
+func TestParserCompletion(t *testing.T) {
+ for _, test := range completionTests {
+ if test.ShowDescriptions {
+ os.Setenv("GO_FLAGS_COMPLETION", "verbose")
+ } else {
+ os.Setenv("GO_FLAGS_COMPLETION", "1")
+ }
+
+ tmp := os.Stdout
+
+ r, w, _ := os.Pipe()
+ os.Stdout = w
+
+ out := make(chan string)
+
+ go func() {
+ var buf bytes.Buffer
+
+ io.Copy(&buf, r)
+
+ out <- buf.String()
+ }()
+
+ p := NewParser(&completionTestOptions, None)
+
+ p.CompletionHandler = func(items []Completion) {
+ comp := &completion{parser: p}
+ comp.print(items, test.ShowDescriptions)
+ }
+
+ _, err := p.ParseArgs(test.Args)
+
+ w.Close()
+
+ os.Stdout = tmp
+
+ if err != nil {
+ t.Fatalf("Unexpected error: %s", err)
+ }
+
+ got := strings.Split(strings.Trim(<-out, "\n"), "\n")
+
+ if !reflect.DeepEqual(got, test.Completed) {
+ t.Errorf("Expected: %#v\nGot: %#v", test.Completed, got)
+ }
+ }
+
+ os.Setenv("GO_FLAGS_COMPLETION", "")
+}
diff --git a/src/mongo/gotools/vendor/src/github.com/jessevdk/go-flags/convert.go b/src/mongo/gotools/vendor/src/github.com/jessevdk/go-flags/convert.go
new file mode 100644
index 00000000000..938c3ac1c2c
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/jessevdk/go-flags/convert.go
@@ -0,0 +1,341 @@
+// Copyright 2012 Jesse van den Kieboom. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package flags
+
+import (
+ "fmt"
+ "reflect"
+ "strconv"
+ "strings"
+ "time"
+)
+
+// Marshaler is the interface implemented by types that can marshal themselves
+// to a string representation of the flag.
+type Marshaler interface {
+ // MarshalFlag marshals a flag value to its string representation.
+ MarshalFlag() (string, error)
+}
+
+// Unmarshaler is the interface implemented by types that can unmarshal a flag
+// argument to themselves. The provided value is directly passed from the
+// command line.
+type Unmarshaler interface {
+ // UnmarshalFlag unmarshals a string value representation to the flag
+ // value (which therefore needs to be a pointer receiver).
+ UnmarshalFlag(value string) error
+}
+
+func getBase(options multiTag, base int) (int, error) {
+ sbase := options.Get("base")
+
+ var err error
+ var ivbase int64
+
+ if sbase != "" {
+ ivbase, err = strconv.ParseInt(sbase, 10, 32)
+ base = int(ivbase)
+ }
+
+ return base, err
+}
+
+func convertMarshal(val reflect.Value) (bool, string, error) {
+ // Check first for the Marshaler interface
+ if val.Type().NumMethod() > 0 && val.CanInterface() {
+ if marshaler, ok := val.Interface().(Marshaler); ok {
+ ret, err := marshaler.MarshalFlag()
+ return true, ret, err
+ }
+ }
+
+ return false, "", nil
+}
+
+func convertToString(val reflect.Value, options multiTag) (string, error) {
+ if ok, ret, err := convertMarshal(val); ok {
+ return ret, err
+ }
+
+ tp := val.Type()
+
+ // Support for time.Duration
+ if tp == reflect.TypeOf((*time.Duration)(nil)).Elem() {
+ stringer := val.Interface().(fmt.Stringer)
+ return stringer.String(), nil
+ }
+
+ switch tp.Kind() {
+ case reflect.String:
+ return val.String(), nil
+ case reflect.Bool:
+ if val.Bool() {
+ return "true", nil
+ }
+
+ return "false", nil
+ case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
+ base, err := getBase(options, 10)
+
+ if err != nil {
+ return "", err
+ }
+
+ return strconv.FormatInt(val.Int(), base), nil
+ case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64:
+ base, err := getBase(options, 10)
+
+ if err != nil {
+ return "", err
+ }
+
+ return strconv.FormatUint(val.Uint(), base), nil
+ case reflect.Float32, reflect.Float64:
+ return strconv.FormatFloat(val.Float(), 'g', -1, tp.Bits()), nil
+ case reflect.Slice:
+ if val.Len() == 0 {
+ return "", nil
+ }
+
+ ret := "["
+
+ for i := 0; i < val.Len(); i++ {
+ if i != 0 {
+ ret += ", "
+ }
+
+ item, err := convertToString(val.Index(i), options)
+
+ if err != nil {
+ return "", err
+ }
+
+ ret += item
+ }
+
+ return ret + "]", nil
+ case reflect.Map:
+ ret := "{"
+
+ for i, key := range val.MapKeys() {
+ if i != 0 {
+ ret += ", "
+ }
+
+ keyitem, err := convertToString(key, options)
+
+ if err != nil {
+ return "", err
+ }
+
+ item, err := convertToString(val.MapIndex(key), options)
+
+ if err != nil {
+ return "", err
+ }
+
+ ret += keyitem + ":" + item
+ }
+
+ return ret + "}", nil
+ case reflect.Ptr:
+ return convertToString(reflect.Indirect(val), options)
+ case reflect.Interface:
+ if !val.IsNil() {
+ return convertToString(val.Elem(), options)
+ }
+ }
+
+ return "", nil
+}
+
+func convertUnmarshal(val string, retval reflect.Value) (bool, error) {
+ if retval.Type().NumMethod() > 0 && retval.CanInterface() {
+ if unmarshaler, ok := retval.Interface().(Unmarshaler); ok {
+ return true, unmarshaler.UnmarshalFlag(val)
+ }
+ }
+
+ if retval.Type().Kind() != reflect.Ptr && retval.CanAddr() {
+ return convertUnmarshal(val, retval.Addr())
+ }
+
+ if retval.Type().Kind() == reflect.Interface && !retval.IsNil() {
+ return convertUnmarshal(val, retval.Elem())
+ }
+
+ return false, nil
+}
+
+func convert(val string, retval reflect.Value, options multiTag) error {
+ if ok, err := convertUnmarshal(val, retval); ok {
+ return err
+ }
+
+ tp := retval.Type()
+
+ // Support for time.Duration
+ if tp == reflect.TypeOf((*time.Duration)(nil)).Elem() {
+ parsed, err := time.ParseDuration(val)
+
+ if err != nil {
+ return err
+ }
+
+ retval.SetInt(int64(parsed))
+ return nil
+ }
+
+ switch tp.Kind() {
+ case reflect.String:
+ retval.SetString(val)
+ case reflect.Bool:
+ if val == "" {
+ retval.SetBool(true)
+ } else {
+ b, err := strconv.ParseBool(val)
+
+ if err != nil {
+ return err
+ }
+
+ retval.SetBool(b)
+ }
+ case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
+ base, err := getBase(options, 10)
+
+ if err != nil {
+ return err
+ }
+
+ parsed, err := strconv.ParseInt(val, base, tp.Bits())
+
+ if err != nil {
+ return err
+ }
+
+ retval.SetInt(parsed)
+ case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64:
+ base, err := getBase(options, 10)
+
+ if err != nil {
+ return err
+ }
+
+ parsed, err := strconv.ParseUint(val, base, tp.Bits())
+
+ if err != nil {
+ return err
+ }
+
+ retval.SetUint(parsed)
+ case reflect.Float32, reflect.Float64:
+ parsed, err := strconv.ParseFloat(val, tp.Bits())
+
+ if err != nil {
+ return err
+ }
+
+ retval.SetFloat(parsed)
+ case reflect.Slice:
+ elemtp := tp.Elem()
+
+ elemvalptr := reflect.New(elemtp)
+ elemval := reflect.Indirect(elemvalptr)
+
+ if err := convert(val, elemval, options); err != nil {
+ return err
+ }
+
+ retval.Set(reflect.Append(retval, elemval))
+ case reflect.Map:
+ parts := strings.SplitN(val, ":", 2)
+
+ key := parts[0]
+ var value string
+
+ if len(parts) == 2 {
+ value = parts[1]
+ }
+
+ keytp := tp.Key()
+ keyval := reflect.New(keytp)
+
+ if err := convert(key, keyval, options); err != nil {
+ return err
+ }
+
+ valuetp := tp.Elem()
+ valueval := reflect.New(valuetp)
+
+ if err := convert(value, valueval, options); err != nil {
+ return err
+ }
+
+ if retval.IsNil() {
+ retval.Set(reflect.MakeMap(tp))
+ }
+
+ retval.SetMapIndex(reflect.Indirect(keyval), reflect.Indirect(valueval))
+ case reflect.Ptr:
+ if retval.IsNil() {
+ retval.Set(reflect.New(retval.Type().Elem()))
+ }
+
+ return convert(val, reflect.Indirect(retval), options)
+ case reflect.Interface:
+ if !retval.IsNil() {
+ return convert(val, retval.Elem(), options)
+ }
+ }
+
+ return nil
+}
+
+func isPrint(s string) bool {
+ for _, c := range s {
+ if !strconv.IsPrint(c) {
+ return false
+ }
+ }
+
+ return true
+}
+
+func quoteIfNeeded(s string) string {
+ if !isPrint(s) {
+ return strconv.Quote(s)
+ }
+
+ return s
+}
+
+func quoteIfNeededV(s []string) []string {
+ ret := make([]string, len(s))
+
+ for i, v := range s {
+ ret[i] = quoteIfNeeded(v)
+ }
+
+ return ret
+}
+
+func quoteV(s []string) []string {
+ ret := make([]string, len(s))
+
+ for i, v := range s {
+ ret[i] = strconv.Quote(v)
+ }
+
+ return ret
+}
+
+func unquoteIfPossible(s string) (string, error) {
+ if len(s) == 0 || s[0] != '"' {
+ return s, nil
+ }
+
+ return strconv.Unquote(s)
+}
diff --git a/src/mongo/gotools/vendor/src/github.com/jessevdk/go-flags/convert_test.go b/src/mongo/gotools/vendor/src/github.com/jessevdk/go-flags/convert_test.go
new file mode 100644
index 00000000000..ef131dc8d1e
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/jessevdk/go-flags/convert_test.go
@@ -0,0 +1,159 @@
+package flags
+
+import (
+ "testing"
+ "time"
+)
+
+func expectConvert(t *testing.T, o *Option, expected string) {
+ s, err := convertToString(o.value, o.tag)
+
+ if err != nil {
+ t.Errorf("Unexpected error: %v", err)
+ return
+ }
+
+ assertString(t, s, expected)
+}
+
+func TestConvertToString(t *testing.T) {
+ d, _ := time.ParseDuration("1h2m4s")
+
+ var opts = struct {
+ String string `long:"string"`
+
+ Int int `long:"int"`
+ Int8 int8 `long:"int8"`
+ Int16 int16 `long:"int16"`
+ Int32 int32 `long:"int32"`
+ Int64 int64 `long:"int64"`
+
+ Uint uint `long:"uint"`
+ Uint8 uint8 `long:"uint8"`
+ Uint16 uint16 `long:"uint16"`
+ Uint32 uint32 `long:"uint32"`
+ Uint64 uint64 `long:"uint64"`
+
+ Float32 float32 `long:"float32"`
+ Float64 float64 `long:"float64"`
+
+ Duration time.Duration `long:"duration"`
+
+ Bool bool `long:"bool"`
+
+ IntSlice []int `long:"int-slice"`
+ IntFloatMap map[int]float64 `long:"int-float-map"`
+
+ PtrBool *bool `long:"ptr-bool"`
+ Interface interface{} `long:"interface"`
+
+ Int32Base int32 `long:"int32-base" base:"16"`
+ Uint32Base uint32 `long:"uint32-base" base:"16"`
+ }{
+ "string",
+
+ -2,
+ -1,
+ 0,
+ 1,
+ 2,
+
+ 1,
+ 2,
+ 3,
+ 4,
+ 5,
+
+ 1.2,
+ -3.4,
+
+ d,
+ true,
+
+ []int{-3, 4, -2},
+ map[int]float64{-2: 4.5},
+
+ new(bool),
+ float32(5.2),
+
+ -5823,
+ 4232,
+ }
+
+ p := NewNamedParser("test", Default)
+ grp, _ := p.AddGroup("test group", "", &opts)
+
+ expects := []string{
+ "string",
+ "-2",
+ "-1",
+ "0",
+ "1",
+ "2",
+
+ "1",
+ "2",
+ "3",
+ "4",
+ "5",
+
+ "1.2",
+ "-3.4",
+
+ "1h2m4s",
+ "true",
+
+ "[-3, 4, -2]",
+ "{-2:4.5}",
+
+ "false",
+ "5.2",
+
+ "-16bf",
+ "1088",
+ }
+
+ for i, v := range grp.Options() {
+ expectConvert(t, v, expects[i])
+ }
+}
+
+func TestConvertToStringInvalidIntBase(t *testing.T) {
+ var opts = struct {
+ Int int `long:"int" base:"no"`
+ }{
+ 2,
+ }
+
+ p := NewNamedParser("test", Default)
+ grp, _ := p.AddGroup("test group", "", &opts)
+ o := grp.Options()[0]
+
+ _, err := convertToString(o.value, o.tag)
+
+ if err != nil {
+ err = newErrorf(ErrMarshal, "%v", err)
+ }
+
+ assertError(t, err, ErrMarshal, "strconv.ParseInt: parsing \"no\": invalid syntax")
+}
+
+func TestConvertToStringInvalidUintBase(t *testing.T) {
+ var opts = struct {
+ Uint uint `long:"uint" base:"no"`
+ }{
+ 2,
+ }
+
+ p := NewNamedParser("test", Default)
+ grp, _ := p.AddGroup("test group", "", &opts)
+ o := grp.Options()[0]
+
+ _, err := convertToString(o.value, o.tag)
+
+ if err != nil {
+ err = newErrorf(ErrMarshal, "%v", err)
+ }
+
+ assertError(t, err, ErrMarshal, "strconv.ParseInt: parsing \"no\": invalid syntax")
+}
diff --git a/src/mongo/gotools/vendor/src/github.com/jessevdk/go-flags/error.go b/src/mongo/gotools/vendor/src/github.com/jessevdk/go-flags/error.go
new file mode 100644
index 00000000000..2f27aeeee2f
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/jessevdk/go-flags/error.go
@@ -0,0 +1,129 @@
+package flags
+
+import (
+ "fmt"
+)
+
+// ErrorType represents the type of error.
+type ErrorType uint
+
+const (
+ // ErrUnknown indicates a generic error.
+ ErrUnknown ErrorType = iota
+
+ // ErrExpectedArgument indicates that an argument was expected.
+ ErrExpectedArgument
+
+ // ErrUnknownFlag indicates an unknown flag.
+ ErrUnknownFlag
+
+ // ErrUnknownGroup indicates an unknown group.
+ ErrUnknownGroup
+
+ // ErrMarshal indicates a marshalling error while converting values.
+ ErrMarshal
+
+ // ErrHelp indicates that the built-in help was shown (the error
+ // contains the help message).
+ ErrHelp
+
+ // ErrNoArgumentForBool indicates that an argument was given for a
+ // boolean flag (which don't not take any arguments).
+ ErrNoArgumentForBool
+
+ // ErrRequired indicates that a required flag was not provided.
+ ErrRequired
+
+ // ErrShortNameTooLong indicates that a short flag name was specified,
+ // longer than one character.
+ ErrShortNameTooLong
+
+ // ErrDuplicatedFlag indicates that a short or long flag has been
+ // defined more than once
+ ErrDuplicatedFlag
+
+ // ErrTag indicates an error while parsing flag tags.
+ ErrTag
+
+ // ErrCommandRequired indicates that a command was required but not
+ // specified
+ ErrCommandRequired
+
+ // ErrUnknownCommand indicates that an unknown command was specified.
+ ErrUnknownCommand
+
+ // ErrInvalidChoice indicates an invalid option value which only allows
+ // a certain number of choices.
+ ErrInvalidChoice
+)
+
+func (e ErrorType) String() string {
+ switch e {
+ case ErrUnknown:
+ return "unknown"
+ case ErrExpectedArgument:
+ return "expected argument"
+ case ErrUnknownFlag:
+ return "unknown flag"
+ case ErrUnknownGroup:
+ return "unknown group"
+ case ErrMarshal:
+ return "marshal"
+ case ErrHelp:
+ return "help"
+ case ErrNoArgumentForBool:
+ return "no argument for bool"
+ case ErrRequired:
+ return "required"
+ case ErrShortNameTooLong:
+ return "short name too long"
+ case ErrDuplicatedFlag:
+ return "duplicated flag"
+ case ErrTag:
+ return "tag"
+ case ErrCommandRequired:
+ return "command required"
+ case ErrUnknownCommand:
+ return "unknown command"
+ case ErrInvalidChoice:
+ return "invalid choice"
+ }
+
+ return "unrecognized error type"
+}
+
+// Error represents a parser error. The error returned from Parse is of this
+// type. The error contains both a Type and Message.
+type Error struct {
+ // The type of error
+ Type ErrorType
+
+ // The error message
+ Message string
+}
+
+// Error returns the error's message
+func (e *Error) Error() string {
+ return e.Message
+}
+
+func newError(tp ErrorType, message string) *Error {
+ return &Error{
+ Type: tp,
+ Message: message,
+ }
+}
+
+func newErrorf(tp ErrorType, format string, args ...interface{}) *Error {
+ return newError(tp, fmt.Sprintf(format, args...))
+}
+
+func wrapError(err error) *Error {
+ ret, ok := err.(*Error)
+
+ if !ok {
+ return newError(ErrUnknown, err.Error())
+ }
+
+ return ret
+}
diff --git a/src/mongo/gotools/vendor/src/github.com/jessevdk/go-flags/example_test.go b/src/mongo/gotools/vendor/src/github.com/jessevdk/go-flags/example_test.go
new file mode 100644
index 00000000000..f7be2bb14f2
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/jessevdk/go-flags/example_test.go
@@ -0,0 +1,110 @@
+// Example of use of the flags package.
+package flags
+
+import (
+ "fmt"
+ "os/exec"
+)
+
+func Example() {
+ var opts struct {
+ // Slice of bool will append 'true' each time the option
+ // is encountered (can be set multiple times, like -vvv)
+ Verbose []bool `short:"v" long:"verbose" description:"Show verbose debug information"`
+
+ // Example of automatic marshalling to desired type (uint)
+ Offset uint `long:"offset" description:"Offset"`
+
+ // Example of a callback, called each time the option is found.
+ Call func(string) `short:"c" description:"Call phone number"`
+
+ // Example of a required flag
+ Name string `short:"n" long:"name" description:"A name" required:"true"`
+
+ // Example of a value name
+ File string `short:"f" long:"file" description:"A file" value-name:"FILE"`
+
+ // Example of a pointer
+ Ptr *int `short:"p" description:"A pointer to an integer"`
+
+ // Example of a slice of strings
+ StringSlice []string `short:"s" description:"A slice of strings"`
+
+ // Example of a slice of pointers
+ PtrSlice []*string `long:"ptrslice" description:"A slice of pointers to string"`
+
+ // Example of a map
+ IntMap map[string]int `long:"intmap" description:"A map from string to int"`
+
+ // Example of a filename (useful for completion)
+ Filename Filename `long:"filename" description:"A filename"`
+
+ // Example of positional arguments
+ Args struct {
+ Id string
+ Num int
+ Rest []string
+ } `positional-args:"yes" required:"yes"`
+ }
+
+ // Callback which will invoke callto:<argument> to call a number.
+ // Note that this works just on OS X (and probably only with
+ // Skype) but it shows the idea.
+ opts.Call = func(num string) {
+ cmd := exec.Command("open", "callto:"+num)
+ cmd.Start()
+ cmd.Process.Release()
+ }
+
+ // Make some fake arguments to parse.
+ args := []string{
+ "-vv",
+ "--offset=5",
+ "-n", "Me",
+ "-p", "3",
+ "-s", "hello",
+ "-s", "world",
+ "--ptrslice", "hello",
+ "--ptrslice", "world",
+ "--intmap", "a:1",
+ "--intmap", "b:5",
+ "--filename", "hello.go",
+ "id",
+ "10",
+ "remaining1",
+ "remaining2",
+ }
+
+ // Parse flags from `args'. Note that here we use flags.ParseArgs for
+ // the sake of making a working example. Normally, you would simply use
+ // flags.Parse(&opts) which uses os.Args
+ _, err := ParseArgs(&opts, args)
+
+ if err != nil {
+ panic(err)
+ }
+
+ fmt.Printf("Verbosity: %v\n", opts.Verbose)
+ fmt.Printf("Offset: %d\n", opts.Offset)
+ fmt.Printf("Name: %s\n", opts.Name)
+ fmt.Printf("Ptr: %d\n", *opts.Ptr)
+ fmt.Printf("StringSlice: %v\n", opts.StringSlice)
+ fmt.Printf("PtrSlice: [%v %v]\n", *opts.PtrSlice[0], *opts.PtrSlice[1])
+ fmt.Printf("IntMap: [a:%v b:%v]\n", opts.IntMap["a"], opts.IntMap["b"])
+ fmt.Printf("Filename: %v\n", opts.Filename)
+ fmt.Printf("Args.Id: %s\n", opts.Args.Id)
+ fmt.Printf("Args.Num: %d\n", opts.Args.Num)
+ fmt.Printf("Args.Rest: %v\n", opts.Args.Rest)
+
+ // Output: Verbosity: [true true]
+ // Offset: 5
+ // Name: Me
+ // Ptr: 3
+ // StringSlice: [hello world]
+ // PtrSlice: [hello world]
+ // IntMap: [a:1 b:5]
+ // Filename: hello.go
+ // Args.Id: id
+ // Args.Num: 10
+ // Args.Rest: [remaining1 remaining2]
+}
diff --git a/src/mongo/gotools/vendor/src/github.com/jessevdk/go-flags/examples/add.go b/src/mongo/gotools/vendor/src/github.com/jessevdk/go-flags/examples/add.go
new file mode 100644
index 00000000000..57d8f232b21
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/jessevdk/go-flags/examples/add.go
@@ -0,0 +1,23 @@
+package main
+
+import (
+ "fmt"
+)
+
+type AddCommand struct {
+ All bool `short:"a" long:"all" description:"Add all files"`
+}
+
+var addCommand AddCommand
+
+func (x *AddCommand) Execute(args []string) error {
+ fmt.Printf("Adding (all=%v): %#v\n", x.All, args)
+ return nil
+}
+
+func init() {
+ parser.AddCommand("add",
+ "Add a file",
+ "The add command adds a file to the repository. Use -a to add all files.",
+ &addCommand)
+}
diff --git a/src/mongo/gotools/vendor/src/github.com/jessevdk/go-flags/examples/bash-completion b/src/mongo/gotools/vendor/src/github.com/jessevdk/go-flags/examples/bash-completion
new file mode 100644
index 00000000000..974f52ad43f
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/jessevdk/go-flags/examples/bash-completion
@@ -0,0 +1,9 @@
+_examples() {
+ args=("${COMP_WORDS[@]:1:$COMP_CWORD}")
+
+ local IFS=$'\n'
+ COMPREPLY=($(GO_FLAGS_COMPLETION=1 ${COMP_WORDS[0]} "${args[@]}"))
+ return 1
+}
+
+complete -F _examples examples
diff --git a/src/mongo/gotools/vendor/src/github.com/jessevdk/go-flags/examples/main.go b/src/mongo/gotools/vendor/src/github.com/jessevdk/go-flags/examples/main.go
new file mode 100644
index 00000000000..4a22be6e86d
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/jessevdk/go-flags/examples/main.go
@@ -0,0 +1,75 @@
+package main
+
+import (
+ "errors"
+ "fmt"
+ "github.com/jessevdk/go-flags"
+ "os"
+ "strconv"
+ "strings"
+)
+
+type EditorOptions struct {
+ Input flags.Filename `short:"i" long:"input" description:"Input file" default:"-"`
+ Output flags.Filename `short:"o" long:"output" description:"Output file" default:"-"`
+}
+
+type Point struct {
+ X, Y int
+}
+
+func (p *Point) UnmarshalFlag(value string) error {
+ parts := strings.Split(value, ",")
+
+ if len(parts) != 2 {
+ return errors.New("expected two numbers separated by a ,")
+ }
+
+ x, err := strconv.ParseInt(parts[0], 10, 32)
+
+ if err != nil {
+ return err
+ }
+
+ y, err := strconv.ParseInt(parts[1], 10, 32)
+
+ if err != nil {
+ return err
+ }
+
+ p.X = int(x)
+ p.Y = int(y)
+
+ return nil
+}
+
+func (p Point) MarshalFlag() (string, error) {
+ return fmt.Sprintf("%d,%d", p.X, p.Y), nil
+}
+
+type Options struct {
+ // Example of verbosity with level
+ Verbose []bool `short:"v" long:"verbose" description:"Verbose output"`
+
+ // Example of optional value
+ User string `short:"u" long:"user" description:"User name" optional:"yes" optional-value:"pancake"`
+
+ // Example of map with multiple default values
+ Users map[string]string `long:"users" description:"User e-mail map" default:"system:system@example.org" default:"admin:admin@example.org"`
+
+ // Example of option group
+ Editor EditorOptions `group:"Editor Options"`
+
+ // Example of custom type Marshal/Unmarshal
+ Point Point `long:"point" description:"A x,y point" default:"1,2"`
+}
+
+var options Options
+
+var parser = flags.NewParser(&options, flags.Default)
+
+func main() {
+ if _, err := parser.Parse(); err != nil {
+ os.Exit(1)
+ }
+}
diff --git a/src/mongo/gotools/vendor/src/github.com/jessevdk/go-flags/examples/rm.go b/src/mongo/gotools/vendor/src/github.com/jessevdk/go-flags/examples/rm.go
new file mode 100644
index 00000000000..c9c1dd03a02
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/jessevdk/go-flags/examples/rm.go
@@ -0,0 +1,23 @@
+package main
+
+import (
+ "fmt"
+)
+
+type RmCommand struct {
+ Force bool `short:"f" long:"force" description:"Force removal of files"`
+}
+
+var rmCommand RmCommand
+
+func (x *RmCommand) Execute(args []string) error {
+ fmt.Printf("Removing (force=%v): %#v\n", x.Force, args)
+ return nil
+}
+
+func init() {
+ parser.AddCommand("rm",
+ "Remove a file",
+ "The rm command removes a file to the repository. Use -f to force removal of files.",
+ &rmCommand)
+}
diff --git a/src/mongo/gotools/vendor/src/github.com/jessevdk/go-flags/flags.go b/src/mongo/gotools/vendor/src/github.com/jessevdk/go-flags/flags.go
new file mode 100644
index 00000000000..757d42a56ea
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/jessevdk/go-flags/flags.go
@@ -0,0 +1,256 @@
+// Copyright 2012 Jesse van den Kieboom. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+/*
+Package flags provides an extensive command line option parser.
+The flags package is similar in functionality to the go built-in flag package
+but provides more options and uses reflection to provide a convenient and
+succinct way of specifying command line options.
+
+
+Supported features
+
+The following features are supported in go-flags:
+
+ Options with short names (-v)
+ Options with long names (--verbose)
+ Options with and without arguments (bool v.s. other type)
+ Options with optional arguments and default values
+ Option default values from ENVIRONMENT_VARIABLES, including slice and map values
+ Multiple option groups each containing a set of options
+ Generate and print well-formatted help message
+ Passing remaining command line arguments after -- (optional)
+ Ignoring unknown command line options (optional)
+ Supports -I/usr/include -I=/usr/include -I /usr/include option argument specification
+ Supports multiple short options -aux
+ Supports all primitive go types (string, int{8..64}, uint{8..64}, float)
+ Supports same option multiple times (can store in slice or last option counts)
+ Supports maps
+ Supports function callbacks
+ Supports namespaces for (nested) option groups
+
+Additional features specific to Windows:
+ Options with short names (/v)
+ Options with long names (/verbose)
+ Windows-style options with arguments use a colon as the delimiter
+ Modify generated help message with Windows-style / options
+
+
+Basic usage
+
+The flags package uses structs, reflection and struct field tags
+to allow users to specify command line options. This results in very simple
+and concise specification of your application options. For example:
+
+ type Options struct {
+ Verbose []bool `short:"v" long:"verbose" description:"Show verbose debug information"`
+ }
+
+This specifies one option with a short name -v and a long name --verbose.
+When either -v or --verbose is found on the command line, a 'true' value
+will be appended to the Verbose field. e.g. when specifying -vvv, the
+resulting value of Verbose will be {[true, true, true]}.
+
+Slice options work exactly the same as primitive type options, except that
+whenever the option is encountered, a value is appended to the slice.
+
+Map options from string to primitive type are also supported. On the command
+line, you specify the value for such an option as key:value. For example
+
+ type Options struct {
+ AuthorInfo string[string] `short:"a"`
+ }
+
+Then, the AuthorInfo map can be filled with something like
+-a name:Jesse -a "surname:van den Kieboom".
+
+Finally, for full control over the conversion between command line argument
+values and options, user defined types can choose to implement the Marshaler
+and Unmarshaler interfaces.
+
+
+Available field tags
+
+The following is a list of tags for struct fields supported by go-flags:
+
+ short: the short name of the option (single character)
+ long: the long name of the option
+ required: whether an option is required to appear on the command
+ line. If a required option is not present, the parser will
+ return ErrRequired (optional)
+ description: the description of the option (optional)
+ long-description: the long description of the option. Currently only
+ displayed in generated man pages (optional)
+ no-flag: if non-empty this field is ignored as an option (optional)
+
+ optional: whether an argument of the option is optional. When an
+ argument is optional it can only be specified using
+ --option=argument (optional)
+ optional-value: the value of an optional option when the option occurs
+ without an argument. This tag can be specified multiple
+ times in the case of maps or slices (optional)
+ default: the default value of an option. This tag can be specified
+ multiple times in the case of slices or maps (optional)
+ default-mask: when specified, this value will be displayed in the help
+ instead of the actual default value. This is useful
+ mostly for hiding otherwise sensitive information from
+ showing up in the help. If default-mask takes the special
+ value "-", then no default value will be shown at all
+ (optional)
+ env: the default value of the option is overridden from the
+ specified environment variable, if one has been defined.
+ (optional)
+ env-delim: the 'env' default value from environment is split into
+ multiple values with the given delimiter string, use with
+ slices and maps (optional)
+ value-name: the name of the argument value (to be shown in the help)
+ (optional)
+ choice: limits the values for an option to a set of values.
+ This tag can be specified mltiple times (optional)
+ hidden: the option is not visible in the help or man page.
+
+ base: a base (radix) used to convert strings to integer values, the
+ default base is 10 (i.e. decimal) (optional)
+
+ ini-name: the explicit ini option name (optional)
+ no-ini: if non-empty this field is ignored as an ini option
+ (optional)
+
+ group: when specified on a struct field, makes the struct
+ field a separate group with the given name (optional)
+ namespace: when specified on a group struct field, the namespace
+ gets prepended to every option's long name and
+ subgroup's namespace of this group, separated by
+ the parser's namespace delimiter (optional)
+ command: when specified on a struct field, makes the struct
+ field a (sub)command with the given name (optional)
+ subcommands-optional: when specified on a command struct field, makes
+ any subcommands of that command optional (optional)
+ alias: when specified on a command struct field, adds the
+ specified name as an alias for the command. Can be
+ be specified multiple times to add more than one
+ alias (optional)
+ positional-args: when specified on a field with a struct type,
+ uses the fields of that struct to parse remaining
+ positional command line arguments into (in order
+ of the fields). If a field has a slice type,
+ then all remaining arguments will be added to it.
+ Positional arguments are optional by default,
+ unless the "required" tag is specified together
+ with the "positional-args" tag. The "required" tag
+ can also be set on the individual rest argument
+ fields, to require only the first N positional
+ arguments. If the "required" tag is set on the
+ rest arguments slice, then its value determines
+ the minimum amount of rest arguments that needs to
+ be provided (e.g. `required:"2"`) (optional)
+ positional-arg-name: used on a field in a positional argument struct; name
+ of the positional argument placeholder to be shown in
+ the help (optional)
+
+Either the `short:` tag or the `long:` must be specified to make the field eligible as an
+option.
+
+
+Option groups
+
+Option groups are a simple way to semantically separate your options. All
+options in a particular group are shown together in the help under the name
+of the group. Namespaces can be used to specify option long names more
+precisely and emphasize the options affiliation to their group.
+
+There are currently three ways to specify option groups.
+
+ 1. Use NewNamedParser specifying the various option groups.
+ 2. Use AddGroup to add a group to an existing parser.
+ 3. Add a struct field to the top-level options annotated with the
+ group:"group-name" tag.
+
+
+
+Commands
+
+The flags package also has basic support for commands. Commands are often
+used in monolithic applications that support various commands or actions.
+Take git for example, all of the add, commit, checkout, etc. are called
+commands. Using commands you can easily separate multiple functions of your
+application.
+
+There are currently two ways to specify a command.
+
+ 1. Use AddCommand on an existing parser.
+ 2. Add a struct field to your options struct annotated with the
+ command:"command-name" tag.
+
+The most common, idiomatic way to implement commands is to define a global
+parser instance and implement each command in a separate file. These
+command files should define a go init function which calls AddCommand on
+the global parser.
+
+When parsing ends and there is an active command and that command implements
+the Commander interface, then its Execute method will be run with the
+remaining command line arguments.
+
+Command structs can have options which become valid to parse after the
+command has been specified on the command line, in addition to the options
+of all the parent commands. I.e. considering a -v flag on the parser and an
+add command, the following are equivalent:
+
+ ./app -v add
+ ./app add -v
+
+However, if the -v flag is defined on the add command, then the first of
+the two examples above would fail since the -v flag is not defined before
+the add command.
+
+
+Completion
+
+go-flags has builtin support to provide bash completion of flags, commands
+and argument values. To use completion, the binary which uses go-flags
+can be invoked in a special environment to list completion of the current
+command line argument. It should be noted that this `executes` your application,
+and it is up to the user to make sure there are no negative side effects (for
+example from init functions).
+
+Setting the environment variable `GO_FLAGS_COMPLETION=1` enables completion
+by replacing the argument parsing routine with the completion routine which
+outputs completions for the passed arguments. The basic invocation to
+complete a set of arguments is therefore:
+
+ GO_FLAGS_COMPLETION=1 ./completion-example arg1 arg2 arg3
+
+where `completion-example` is the binary, `arg1` and `arg2` are
+the current arguments, and `arg3` (the last argument) is the argument
+to be completed. If the GO_FLAGS_COMPLETION is set to "verbose", then
+descriptions of possible completion items will also be shown, if there
+are more than 1 completion items.
+
+To use this with bash completion, a simple file can be written which
+calls the binary which supports go-flags completion:
+
+ _completion_example() {
+ # All arguments except the first one
+ args=("${COMP_WORDS[@]:1:$COMP_CWORD}")
+
+ # Only split on newlines
+ local IFS=$'\n'
+
+ # Call completion (note that the first element of COMP_WORDS is
+ # the executable itself)
+ COMPREPLY=($(GO_FLAGS_COMPLETION=1 ${COMP_WORDS[0]} "${args[@]}"))
+ return 0
+ }
+
+ complete -F _completion_example completion-example
+
+Completion requires the parser option PassDoubleDash and is therefore enforced if the environment variable GO_FLAGS_COMPLETION is set.
+
+Customized completion for argument values is supported by implementing
+the flags.Completer interface for the argument value type. An example
+of a type which does so is the flags.Filename type, an alias of string
+allowing simple filename completion. A slice or array argument value
+whose element type implements flags.Completer will also be completed.
+*/
+package flags
diff --git a/src/mongo/gotools/vendor/src/github.com/jessevdk/go-flags/group.go b/src/mongo/gotools/vendor/src/github.com/jessevdk/go-flags/group.go
new file mode 100644
index 00000000000..debb8de482f
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/jessevdk/go-flags/group.go
@@ -0,0 +1,379 @@
+// Copyright 2012 Jesse van den Kieboom. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package flags
+
+import (
+ "errors"
+ "reflect"
+ "strings"
+ "unicode/utf8"
+ "unsafe"
+)
+
+// ErrNotPointerToStruct indicates that a provided data container is not
+// a pointer to a struct. Only pointers to structs are valid data containers
+// for options.
+var ErrNotPointerToStruct = errors.New("provided data is not a pointer to struct")
+
+// Group represents an option group. Option groups can be used to logically
+// group options together under a description. Groups are only used to provide
+// more structure to options both for the user (as displayed in the help message)
+// and for you, since groups can be nested.
+type Group struct {
+ // A short description of the group. The
+ // short description is primarily used in the built-in generated help
+ // message
+ ShortDescription string
+
+ // A long description of the group. The long
+ // description is primarily used to present information on commands
+ // (Command embeds Group) in the built-in generated help and man pages.
+ LongDescription string
+
+ // The namespace of the group
+ Namespace string
+
+ // If true, the group is not displayed in the help or man page
+ Hidden bool
+
+ // The parent of the group or nil if it has no parent
+ parent interface{}
+
+ // All the options in the group
+ options []*Option
+
+ // All the subgroups
+ groups []*Group
+
+ // Whether the group represents the built-in help group
+ isBuiltinHelp bool
+
+ data interface{}
+}
+
+type scanHandler func(reflect.Value, *reflect.StructField) (bool, error)
+
+// AddGroup adds a new group to the command with the given name and data. The
+// data needs to be a pointer to a struct from which the fields indicate which
+// options are in the group.
+func (g *Group) AddGroup(shortDescription string, longDescription string, data interface{}) (*Group, error) {
+ group := newGroup(shortDescription, longDescription, data)
+
+ group.parent = g
+
+ if err := group.scan(); err != nil {
+ return nil, err
+ }
+
+ g.groups = append(g.groups, group)
+ return group, nil
+}
+
+// Groups returns the list of groups embedded in this group.
+func (g *Group) Groups() []*Group {
+ return g.groups
+}
+
+// Options returns the list of options in this group.
+func (g *Group) Options() []*Option {
+ return g.options
+}
+
+// Find locates the subgroup with the given short description and returns it.
+// If no such group can be found Find will return nil. Note that the description
+// is matched case insensitively.
+func (g *Group) Find(shortDescription string) *Group {
+ lshortDescription := strings.ToLower(shortDescription)
+
+ var ret *Group
+
+ g.eachGroup(func(gg *Group) {
+ if gg != g && strings.ToLower(gg.ShortDescription) == lshortDescription {
+ ret = gg
+ }
+ })
+
+ return ret
+}
+
+func (g *Group) findOption(matcher func(*Option) bool) (option *Option) {
+ g.eachGroup(func(g *Group) {
+ for _, opt := range g.options {
+ if option == nil && matcher(opt) {
+ option = opt
+ }
+ }
+ })
+
+ return option
+}
+
+// Find an option that is part of the group, or any of its subgroups,
+// by matching its long name (including the option namespace).
+func (g *Group) FindOptionByLongName(longName string) *Option {
+ return g.findOption(func(option *Option) bool {
+ return option.LongNameWithNamespace() == longName
+ })
+}
+
+// Find an option that is part of the group, or any of its subgroups,
+// by matching its short name.
+func (g *Group) FindOptionByShortName(shortName rune) *Option {
+ return g.findOption(func(option *Option) bool {
+ return option.ShortName == shortName
+ })
+}
+
+func newGroup(shortDescription string, longDescription string, data interface{}) *Group {
+ return &Group{
+ ShortDescription: shortDescription,
+ LongDescription: longDescription,
+
+ data: data,
+ }
+}
+
+func (g *Group) optionByName(name string, namematch func(*Option, string) bool) *Option {
+ prio := 0
+ var retopt *Option
+
+ g.eachGroup(func(g *Group) {
+ for _, opt := range g.options {
+ if namematch != nil && namematch(opt, name) && prio < 4 {
+ retopt = opt
+ prio = 4
+ }
+
+ if name == opt.field.Name && prio < 3 {
+ retopt = opt
+ prio = 3
+ }
+
+ if name == opt.LongNameWithNamespace() && prio < 2 {
+ retopt = opt
+ prio = 2
+ }
+
+ if opt.ShortName != 0 && name == string(opt.ShortName) && prio < 1 {
+ retopt = opt
+ prio = 1
+ }
+ }
+ })
+
+ return retopt
+}
+
+func (g *Group) eachGroup(f func(*Group)) {
+ f(g)
+
+ for _, gg := range g.groups {
+ gg.eachGroup(f)
+ }
+}
+
+func (g *Group) scanStruct(realval reflect.Value, sfield *reflect.StructField, handler scanHandler) error {
+ stype := realval.Type()
+
+ if sfield != nil {
+ if ok, err := handler(realval, sfield); err != nil {
+ return err
+ } else if ok {
+ return nil
+ }
+ }
+
+ for i := 0; i < stype.NumField(); i++ {
+ field := stype.Field(i)
+
+ // PkgName is set only for non-exported fields, which we ignore
+ if field.PkgPath != "" && !field.Anonymous {
+ continue
+ }
+
+ mtag := newMultiTag(string(field.Tag))
+
+ if err := mtag.Parse(); err != nil {
+ return err
+ }
+
+ // Skip fields with the no-flag tag
+ if mtag.Get("no-flag") != "" {
+ continue
+ }
+
+ // Dive deep into structs or pointers to structs
+ kind := field.Type.Kind()
+ fld := realval.Field(i)
+
+ if kind == reflect.Struct {
+ if err := g.scanStruct(fld, &field, handler); err != nil {
+ return err
+ }
+ } else if kind == reflect.Ptr && field.Type.Elem().Kind() == reflect.Struct {
+ if fld.IsNil() {
+ fld.Set(reflect.New(fld.Type().Elem()))
+ }
+
+ if err := g.scanStruct(reflect.Indirect(fld), &field, handler); err != nil {
+ return err
+ }
+ }
+
+ longname := mtag.Get("long")
+ shortname := mtag.Get("short")
+
+ // Need at least either a short or long name
+ if longname == "" && shortname == "" && mtag.Get("ini-name") == "" {
+ continue
+ }
+
+ short := rune(0)
+ rc := utf8.RuneCountInString(shortname)
+
+ if rc > 1 {
+ return newErrorf(ErrShortNameTooLong,
+ "short names can only be 1 character long, not `%s'",
+ shortname)
+
+ } else if rc == 1 {
+ short, _ = utf8.DecodeRuneInString(shortname)
+ }
+
+ description := mtag.Get("description")
+ def := mtag.GetMany("default")
+
+ optionalValue := mtag.GetMany("optional-value")
+ valueName := mtag.Get("value-name")
+ defaultMask := mtag.Get("default-mask")
+
+ optional := (mtag.Get("optional") != "")
+ required := (mtag.Get("required") != "")
+ choices := mtag.GetMany("choice")
+ hidden := (mtag.Get("hidden") != "")
+
+ option := &Option{
+ Description: description,
+ ShortName: short,
+ LongName: longname,
+ Default: def,
+ EnvDefaultKey: mtag.Get("env"),
+ EnvDefaultDelim: mtag.Get("env-delim"),
+ OptionalArgument: optional,
+ OptionalValue: optionalValue,
+ Required: required,
+ ValueName: valueName,
+ DefaultMask: defaultMask,
+ Choices: choices,
+ Hidden: hidden,
+
+ group: g,
+
+ field: field,
+ value: realval.Field(i),
+ tag: mtag,
+ }
+
+ g.options = append(g.options, option)
+ }
+
+ return nil
+}
+
+func (g *Group) checkForDuplicateFlags() *Error {
+ shortNames := make(map[rune]*Option)
+ longNames := make(map[string]*Option)
+
+ var duplicateError *Error
+
+ g.eachGroup(func(g *Group) {
+ for _, option := range g.options {
+ if option.LongName != "" {
+ longName := option.LongNameWithNamespace()
+
+ if otherOption, ok := longNames[longName]; ok {
+ duplicateError = newErrorf(ErrDuplicatedFlag, "option `%s' uses the same long name as option `%s'", option, otherOption)
+ return
+ }
+ longNames[longName] = option
+ }
+ if option.ShortName != 0 {
+ if otherOption, ok := shortNames[option.ShortName]; ok {
+ duplicateError = newErrorf(ErrDuplicatedFlag, "option `%s' uses the same short name as option `%s'", option, otherOption)
+ return
+ }
+ shortNames[option.ShortName] = option
+ }
+ }
+ })
+
+ return duplicateError
+}
+
+func (g *Group) scanSubGroupHandler(realval reflect.Value, sfield *reflect.StructField) (bool, error) {
+ mtag := newMultiTag(string(sfield.Tag))
+
+ if err := mtag.Parse(); err != nil {
+ return true, err
+ }
+
+ subgroup := mtag.Get("group")
+
+ if len(subgroup) != 0 {
+ ptrval := reflect.NewAt(realval.Type(), unsafe.Pointer(realval.UnsafeAddr()))
+ description := mtag.Get("description")
+
+ group, err := g.AddGroup(subgroup, description, ptrval.Interface())
+ if err != nil {
+ return true, err
+ }
+
+ group.Namespace = mtag.Get("namespace")
+ group.Hidden = mtag.Get("hidden") != ""
+
+ return true, nil
+ }
+
+ return false, nil
+}
+
+func (g *Group) scanType(handler scanHandler) error {
+ // Get all the public fields in the data struct
+ ptrval := reflect.ValueOf(g.data)
+
+ if ptrval.Type().Kind() != reflect.Ptr {
+ panic(ErrNotPointerToStruct)
+ }
+
+ stype := ptrval.Type().Elem()
+
+ if stype.Kind() != reflect.Struct {
+ panic(ErrNotPointerToStruct)
+ }
+
+ realval := reflect.Indirect(ptrval)
+
+ if err := g.scanStruct(realval, nil, handler); err != nil {
+ return err
+ }
+
+ if err := g.checkForDuplicateFlags(); err != nil {
+ return err
+ }
+
+ return nil
+}
+
+func (g *Group) scan() error {
+ return g.scanType(g.scanSubGroupHandler)
+}
+
+func (g *Group) groupByName(name string) *Group {
+ if len(name) == 0 {
+ return g
+ }
+
+ return g.Find(name)
+}
diff --git a/src/mongo/gotools/vendor/src/github.com/jessevdk/go-flags/group_test.go b/src/mongo/gotools/vendor/src/github.com/jessevdk/go-flags/group_test.go
new file mode 100644
index 00000000000..18cd6c17394
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/jessevdk/go-flags/group_test.go
@@ -0,0 +1,255 @@
+package flags
+
+import (
+ "testing"
+)
+
+func TestGroupInline(t *testing.T) {
+ var opts = struct {
+ Value bool `short:"v"`
+
+ Group struct {
+ G bool `short:"g"`
+ } `group:"Grouped Options"`
+ }{}
+
+ p, ret := assertParserSuccess(t, &opts, "-v", "-g")
+
+ assertStringArray(t, ret, []string{})
+
+ if !opts.Value {
+ t.Errorf("Expected Value to be true")
+ }
+
+ if !opts.Group.G {
+ t.Errorf("Expected Group.G to be true")
+ }
+
+ if p.Command.Group.Find("Grouped Options") == nil {
+ t.Errorf("Expected to find group `Grouped Options'")
+ }
+}
+
+func TestGroupAdd(t *testing.T) {
+ var opts = struct {
+ Value bool `short:"v"`
+ }{}
+
+ var grp = struct {
+ G bool `short:"g"`
+ }{}
+
+ p := NewParser(&opts, Default)
+ g, err := p.AddGroup("Grouped Options", "", &grp)
+
+ if err != nil {
+ t.Fatalf("Unexpected error: %v", err)
+ return
+ }
+
+ ret, err := p.ParseArgs([]string{"-v", "-g", "rest"})
+
+ if err != nil {
+ t.Fatalf("Unexpected error: %v", err)
+ return
+ }
+
+ assertStringArray(t, ret, []string{"rest"})
+
+ if !opts.Value {
+ t.Errorf("Expected Value to be true")
+ }
+
+ if !grp.G {
+ t.Errorf("Expected Group.G to be true")
+ }
+
+ if p.Command.Group.Find("Grouped Options") != g {
+ t.Errorf("Expected to find group `Grouped Options'")
+ }
+
+ if p.Groups()[1] != g {
+ t.Errorf("Expected group %#v, but got %#v", g, p.Groups()[0])
+ }
+
+ if g.Options()[0].ShortName != 'g' {
+ t.Errorf("Expected short name `g' but got %v", g.Options()[0].ShortName)
+ }
+}
+
+func TestGroupNestedInline(t *testing.T) {
+ var opts = struct {
+ Value bool `short:"v"`
+
+ Group struct {
+ G bool `short:"g"`
+
+ Nested struct {
+ N string `long:"n"`
+ } `group:"Nested Options"`
+ } `group:"Grouped Options"`
+ }{}
+
+ p, ret := assertParserSuccess(t, &opts, "-v", "-g", "--n", "n", "rest")
+
+ assertStringArray(t, ret, []string{"rest"})
+
+ if !opts.Value {
+ t.Errorf("Expected Value to be true")
+ }
+
+ if !opts.Group.G {
+ t.Errorf("Expected Group.G to be true")
+ }
+
+ assertString(t, opts.Group.Nested.N, "n")
+
+ if p.Command.Group.Find("Grouped Options") == nil {
+ t.Errorf("Expected to find group `Grouped Options'")
+ }
+
+ if p.Command.Group.Find("Nested Options") == nil {
+ t.Errorf("Expected to find group `Nested Options'")
+ }
+}
+
+func TestGroupNestedInlineNamespace(t *testing.T) {
+ var opts = struct {
+ Opt string `long:"opt"`
+
+ Group struct {
+ Opt string `long:"opt"`
+ Group struct {
+ Opt string `long:"opt"`
+ } `group:"Subsubgroup" namespace:"sap"`
+ } `group:"Subgroup" namespace:"sip"`
+ }{}
+
+ p, ret := assertParserSuccess(t, &opts, "--opt", "a", "--sip.opt", "b", "--sip.sap.opt", "c", "rest")
+
+ assertStringArray(t, ret, []string{"rest"})
+
+ assertString(t, opts.Opt, "a")
+ assertString(t, opts.Group.Opt, "b")
+ assertString(t, opts.Group.Group.Opt, "c")
+
+ for _, name := range []string{"Subgroup", "Subsubgroup"} {
+ if p.Command.Group.Find(name) == nil {
+ t.Errorf("Expected to find group '%s'", name)
+ }
+ }
+}
+
+func TestDuplicateShortFlags(t *testing.T) {
+ var opts struct {
+ Verbose []bool `short:"v" long:"verbose" description:"Show verbose debug information"`
+ Variables []string `short:"v" long:"variable" description:"Set a variable value."`
+ }
+
+ args := []string{
+ "--verbose",
+ "-v", "123",
+ "-v", "456",
+ }
+
+ _, err := ParseArgs(&opts, args)
+
+ if err == nil {
+ t.Errorf("Expected an error with type ErrDuplicatedFlag")
+ } else {
+ err2 := err.(*Error)
+ if err2.Type != ErrDuplicatedFlag {
+ t.Errorf("Expected an error with type ErrDuplicatedFlag")
+ }
+ }
+}
+
+func TestDuplicateLongFlags(t *testing.T) {
+ var opts struct {
+ Test1 []bool `short:"a" long:"testing" description:"Test 1"`
+ Test2 []string `short:"b" long:"testing" description:"Test 2."`
+ }
+
+ args := []string{
+ "--testing",
+ }
+
+ _, err := ParseArgs(&opts, args)
+
+ if err == nil {
+ t.Errorf("Expected an error with type ErrDuplicatedFlag")
+ } else {
+ err2 := err.(*Error)
+ if err2.Type != ErrDuplicatedFlag {
+ t.Errorf("Expected an error with type ErrDuplicatedFlag")
+ }
+ }
+}
+
+func TestFindOptionByLongFlag(t *testing.T) {
+ var opts struct {
+ Testing bool `long:"testing" description:"Testing"`
+ }
+
+ p := NewParser(&opts, Default)
+ opt := p.FindOptionByLongName("testing")
+
+ if opt == nil {
+ t.Errorf("Expected option, but found none")
+ }
+
+ assertString(t, opt.LongName, "testing")
+}
+
+func TestFindOptionByShortFlag(t *testing.T) {
+ var opts struct {
+ Testing bool `short:"t" description:"Testing"`
+ }
+
+ p := NewParser(&opts, Default)
+ opt := p.FindOptionByShortName('t')
+
+ if opt == nil {
+ t.Errorf("Expected option, but found none")
+ }
+
+ if opt.ShortName != 't' {
+ t.Errorf("Expected 't', but got %v", opt.ShortName)
+ }
+}
+
+func TestFindOptionByLongFlagInSubGroup(t *testing.T) {
+ var opts struct {
+ Group struct {
+ Testing bool `long:"testing" description:"Testing"`
+ } `group:"sub-group"`
+ }
+
+ p := NewParser(&opts, Default)
+ opt := p.FindOptionByLongName("testing")
+
+ if opt == nil {
+ t.Errorf("Expected option, but found none")
+ }
+
+ assertString(t, opt.LongName, "testing")
+}
+
+func TestFindOptionByShortFlagInSubGroup(t *testing.T) {
+ var opts struct {
+ Group struct {
+ Testing bool `short:"t" description:"Testing"`
+ } `group:"sub-group"`
+ }
+
+ p := NewParser(&opts, Default)
+ opt := p.FindOptionByShortName('t')
+
+ if opt == nil {
+ t.Errorf("Expected option, but found none")
+ }
+
+ if opt.ShortName != 't' {
+ t.Errorf("Expected 't', but got %v", opt.ShortName)
+ }
+}
diff --git a/src/mongo/gotools/vendor/src/github.com/jessevdk/go-flags/help.go b/src/mongo/gotools/vendor/src/github.com/jessevdk/go-flags/help.go
new file mode 100644
index 00000000000..c0b808d84fb
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/jessevdk/go-flags/help.go
@@ -0,0 +1,466 @@
+// Copyright 2012 Jesse van den Kieboom. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package flags
+
+import (
+ "bufio"
+ "bytes"
+ "fmt"
+ "io"
+ "runtime"
+ "strings"
+ "unicode/utf8"
+)
+
+type alignmentInfo struct {
+ maxLongLen int
+ hasShort bool
+ hasValueName bool
+ terminalColumns int
+ indent bool
+}
+
+const (
+ paddingBeforeOption = 2
+ distanceBetweenOptionAndDescription = 2
+)
+
+func (a *alignmentInfo) descriptionStart() int {
+ ret := a.maxLongLen + distanceBetweenOptionAndDescription
+
+ if a.hasShort {
+ ret += 2
+ }
+
+ if a.maxLongLen > 0 {
+ ret += 4
+ }
+
+ if a.hasValueName {
+ ret += 3
+ }
+
+ return ret
+}
+
+func (a *alignmentInfo) updateLen(name string, indent bool) {
+ l := utf8.RuneCountInString(name)
+
+ if indent {
+ l = l + 4
+ }
+
+ if l > a.maxLongLen {
+ a.maxLongLen = l
+ }
+}
+
+func (p *Parser) getAlignmentInfo() alignmentInfo {
+ ret := alignmentInfo{
+ maxLongLen: 0,
+ hasShort: false,
+ hasValueName: false,
+ terminalColumns: getTerminalColumns(),
+ }
+
+ if ret.terminalColumns <= 0 {
+ ret.terminalColumns = 80
+ }
+
+ var prevcmd *Command
+
+ p.eachActiveGroup(func(c *Command, grp *Group) {
+ if c != prevcmd {
+ for _, arg := range c.args {
+ ret.updateLen(arg.Name, c != p.Command)
+ }
+ }
+
+ for _, info := range grp.options {
+ if !info.canCli() {
+ continue
+ }
+
+ if info.ShortName != 0 {
+ ret.hasShort = true
+ }
+
+ if len(info.ValueName) > 0 {
+ ret.hasValueName = true
+ }
+
+ l := info.LongNameWithNamespace() + info.ValueName
+
+ if len(info.Choices) != 0 {
+ l += "[" + strings.Join(info.Choices, "|") + "]"
+ }
+
+ ret.updateLen(l, c != p.Command)
+ }
+ })
+
+ return ret
+}
+
+func wrapText(s string, l int, prefix string) string {
+ var ret string
+
+ // Basic text wrapping of s at spaces to fit in l
+ lines := strings.Split(s, "\n")
+
+ for _, line := range lines {
+ var retline string
+
+ line = strings.TrimSpace(line)
+
+ for len(line) > l {
+ // Try to split on space
+ suffix := ""
+
+ pos := strings.LastIndex(line[:l], " ")
+
+ if pos < 0 {
+ pos = l - 1
+ suffix = "-\n"
+ }
+
+ if len(retline) != 0 {
+ retline += "\n" + prefix
+ }
+
+ retline += strings.TrimSpace(line[:pos]) + suffix
+ line = strings.TrimSpace(line[pos:])
+ }
+
+ if len(line) > 0 {
+ if len(retline) != 0 {
+ retline += "\n" + prefix
+ }
+
+ retline += line
+ }
+
+ if len(ret) > 0 {
+ ret += "\n"
+
+ if len(retline) > 0 {
+ ret += prefix
+ }
+ }
+
+ ret += retline
+ }
+
+ return ret
+}
+
+func (p *Parser) writeHelpOption(writer *bufio.Writer, option *Option, info alignmentInfo) {
+ line := &bytes.Buffer{}
+
+ prefix := paddingBeforeOption
+
+ if info.indent {
+ prefix += 4
+ }
+
+ if option.Hidden {
+ return
+ }
+
+ line.WriteString(strings.Repeat(" ", prefix))
+
+ if option.ShortName != 0 {
+ line.WriteRune(defaultShortOptDelimiter)
+ line.WriteRune(option.ShortName)
+ } else if info.hasShort {
+ line.WriteString(" ")
+ }
+
+ descstart := info.descriptionStart() + paddingBeforeOption
+
+ if len(option.LongName) > 0 {
+ if option.ShortName != 0 {
+ line.WriteString(", ")
+ } else if info.hasShort {
+ line.WriteString(" ")
+ }
+
+ line.WriteString(defaultLongOptDelimiter)
+ line.WriteString(option.LongNameWithNamespace())
+ }
+
+ if option.canArgument() {
+ line.WriteRune(defaultNameArgDelimiter)
+
+ if len(option.ValueName) > 0 {
+ line.WriteString(option.ValueName)
+ }
+
+ if len(option.Choices) > 0 {
+ line.WriteString("[" + strings.Join(option.Choices, "|") + "]")
+ }
+ }
+
+ written := line.Len()
+ line.WriteTo(writer)
+
+ if option.Description != "" {
+ dw := descstart - written
+ writer.WriteString(strings.Repeat(" ", dw))
+
+ var def string
+
+ if len(option.DefaultMask) != 0 && option.DefaultMask != "-" {
+ def = option.DefaultMask
+ } else {
+ def = option.defaultLiteral
+ }
+
+ var envDef string
+ if option.EnvDefaultKey != "" {
+ var envPrintable string
+ if runtime.GOOS == "windows" {
+ envPrintable = "%" + option.EnvDefaultKey + "%"
+ } else {
+ envPrintable = "$" + option.EnvDefaultKey
+ }
+ envDef = fmt.Sprintf(" [%s]", envPrintable)
+ }
+
+ var desc string
+
+ if def != "" {
+ desc = fmt.Sprintf("%s (default: %v)%s", option.Description, def, envDef)
+ } else {
+ desc = option.Description + envDef
+ }
+
+ writer.WriteString(wrapText(desc,
+ info.terminalColumns-descstart,
+ strings.Repeat(" ", descstart)))
+ }
+
+ writer.WriteString("\n")
+}
+
+func maxCommandLength(s []*Command) int {
+ if len(s) == 0 {
+ return 0
+ }
+
+ ret := len(s[0].Name)
+
+ for _, v := range s[1:] {
+ l := len(v.Name)
+
+ if l > ret {
+ ret = l
+ }
+ }
+
+ return ret
+}
+
+// WriteHelp writes a help message containing all the possible options and
+// their descriptions to the provided writer. Note that the HelpFlag parser
+// option provides a convenient way to add a -h/--help option group to the
+// command line parser which will automatically show the help messages using
+// this method.
+func (p *Parser) WriteHelp(writer io.Writer) {
+ if writer == nil {
+ return
+ }
+
+ wr := bufio.NewWriter(writer)
+ aligninfo := p.getAlignmentInfo()
+
+ cmd := p.Command
+
+ for cmd.Active != nil {
+ cmd = cmd.Active
+ }
+
+ if p.Name != "" {
+ wr.WriteString("Usage:\n")
+ wr.WriteString(" ")
+
+ allcmd := p.Command
+
+ for allcmd != nil {
+ var usage string
+
+ if allcmd == p.Command {
+ if len(p.Usage) != 0 {
+ usage = p.Usage
+ } else if p.Options&HelpFlag != 0 {
+ usage = "[OPTIONS]"
+ }
+ } else if us, ok := allcmd.data.(Usage); ok {
+ usage = us.Usage()
+ } else if allcmd.hasCliOptions() {
+ usage = fmt.Sprintf("[%s-OPTIONS]", allcmd.Name)
+ }
+
+ if len(usage) != 0 {
+ fmt.Fprintf(wr, " %s %s", allcmd.Name, usage)
+ } else {
+ fmt.Fprintf(wr, " %s", allcmd.Name)
+ }
+
+ if len(allcmd.args) > 0 {
+ fmt.Fprintf(wr, " ")
+ }
+
+ for i, arg := range allcmd.args {
+ if i != 0 {
+ fmt.Fprintf(wr, " ")
+ }
+
+ name := arg.Name
+
+ if arg.isRemaining() {
+ name = name + "..."
+ }
+
+ if !allcmd.ArgsRequired {
+ fmt.Fprintf(wr, "[%s]", name)
+ } else {
+ fmt.Fprintf(wr, "%s", name)
+ }
+ }
+
+ if allcmd.Active == nil && len(allcmd.commands) > 0 {
+ var co, cc string
+
+ if allcmd.SubcommandsOptional {
+ co, cc = "[", "]"
+ } else {
+ co, cc = "<", ">"
+ }
+
+ visibleCommands := allcmd.visibleCommands()
+
+ if len(visibleCommands) > 3 {
+ fmt.Fprintf(wr, " %scommand%s", co, cc)
+ } else {
+ subcommands := allcmd.sortedVisibleCommands()
+ names := make([]string, len(subcommands))
+
+ for i, subc := range subcommands {
+ names[i] = subc.Name
+ }
+
+ fmt.Fprintf(wr, " %s%s%s", co, strings.Join(names, " | "), cc)
+ }
+ }
+
+ allcmd = allcmd.Active
+ }
+
+ fmt.Fprintln(wr)
+
+ if len(cmd.LongDescription) != 0 {
+ fmt.Fprintln(wr)
+
+ t := wrapText(cmd.LongDescription,
+ aligninfo.terminalColumns,
+ "")
+
+ fmt.Fprintln(wr, t)
+ }
+ }
+
+ c := p.Command
+
+ for c != nil {
+ printcmd := c != p.Command
+
+ c.eachGroup(func(grp *Group) {
+ first := true
+
+ // Skip built-in help group for all commands except the top-level
+ // parser
+ if grp.Hidden || (grp.isBuiltinHelp && c != p.Command) {
+ return
+ }
+
+ for _, info := range grp.options {
+ if !info.canCli() || info.Hidden {
+ continue
+ }
+
+ if printcmd {
+ fmt.Fprintf(wr, "\n[%s command options]\n", c.Name)
+ aligninfo.indent = true
+ printcmd = false
+ }
+
+ if first && cmd.Group != grp {
+ fmt.Fprintln(wr)
+
+ if aligninfo.indent {
+ wr.WriteString(" ")
+ }
+
+ fmt.Fprintf(wr, "%s:\n", grp.ShortDescription)
+ first = false
+ }
+
+ p.writeHelpOption(wr, info, aligninfo)
+ }
+ })
+
+ if len(c.args) > 0 {
+ if c == p.Command {
+ fmt.Fprintf(wr, "\nArguments:\n")
+ } else {
+ fmt.Fprintf(wr, "\n[%s command arguments]\n", c.Name)
+ }
+
+ maxlen := aligninfo.descriptionStart()
+
+ for _, arg := range c.args {
+ prefix := strings.Repeat(" ", paddingBeforeOption)
+ fmt.Fprintf(wr, "%s%s", prefix, arg.Name)
+
+ if len(arg.Description) > 0 {
+ align := strings.Repeat(" ", maxlen-len(arg.Name)-1)
+ fmt.Fprintf(wr, ":%s%s", align, arg.Description)
+ }
+
+ fmt.Fprintln(wr)
+ }
+ }
+
+ c = c.Active
+ }
+
+ scommands := cmd.sortedVisibleCommands()
+
+ if len(scommands) > 0 {
+ maxnamelen := maxCommandLength(scommands)
+
+ fmt.Fprintln(wr)
+ fmt.Fprintln(wr, "Available commands:")
+
+ for _, c := range scommands {
+ fmt.Fprintf(wr, " %s", c.Name)
+
+ if len(c.ShortDescription) > 0 {
+ pad := strings.Repeat(" ", maxnamelen-len(c.Name))
+ fmt.Fprintf(wr, "%s %s", pad, c.ShortDescription)
+
+ if len(c.Aliases) > 0 {
+ fmt.Fprintf(wr, " (aliases: %s)", strings.Join(c.Aliases, ", "))
+ }
+
+ }
+
+ fmt.Fprintln(wr)
+ }
+ }
+
+ wr.Flush()
+}
diff --git a/src/mongo/gotools/vendor/src/github.com/jessevdk/go-flags/help_test.go b/src/mongo/gotools/vendor/src/github.com/jessevdk/go-flags/help_test.go
new file mode 100644
index 00000000000..33d21bf86df
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/jessevdk/go-flags/help_test.go
@@ -0,0 +1,460 @@
+package flags
+
+import (
+ "bytes"
+ "fmt"
+ "os"
+ "runtime"
+ "testing"
+ "time"
+)
+
+type helpOptions struct {
+ Verbose []bool `short:"v" long:"verbose" description:"Show verbose debug information" ini-name:"verbose"`
+ Call func(string) `short:"c" description:"Call phone number" ini-name:"call"`
+ PtrSlice []*string `long:"ptrslice" description:"A slice of pointers to string"`
+ EmptyDescription bool `long:"empty-description"`
+
+ Default string `long:"default" default:"Some\nvalue" description:"Test default value"`
+ DefaultArray []string `long:"default-array" default:"Some value" default:"Other\tvalue" description:"Test default array value"`
+ DefaultMap map[string]string `long:"default-map" default:"some:value" default:"another:value" description:"Testdefault map value"`
+ EnvDefault1 string `long:"env-default1" default:"Some value" env:"ENV_DEFAULT" description:"Test env-default1 value"`
+ EnvDefault2 string `long:"env-default2" env:"ENV_DEFAULT" description:"Test env-default2 value"`
+ OptionWithArgName string `long:"opt-with-arg-name" value-name:"something" description:"Option with named argument"`
+ OptionWithChoices string `long:"opt-with-choices" value-name:"choice" choice:"dog" choice:"cat" description:"Option with choices"`
+ Hidden string `long:"hidden" description:"Hidden option" hidden:"yes"`
+
+ OnlyIni string `ini-name:"only-ini" description:"Option only available in ini"`
+
+ Other struct {
+ StringSlice []string `short:"s" default:"some" default:"value" description:"A slice of strings"`
+ IntMap map[string]int `long:"intmap" default:"a:1" description:"A map from string to int" ini-name:"int-map"`
+ } `group:"Other Options"`
+
+ HiddenGroup struct {
+ InsideHiddenGroup string `long:"inside-hidden-group" description:"Inside hidden group"`
+ } `group:"Hidden group" hidden:"yes"`
+
+ Group struct {
+ Opt string `long:"opt" description:"This is a subgroup option"`
+ HiddenInsideGroup string `long:"hidden-inside-group" description:"Hidden inside group" hidden:"yes"`
+
+ Group struct {
+ Opt string `long:"opt" description:"This is a subsubgroup option"`
+ } `group:"Subsubgroup" namespace:"sap"`
+ } `group:"Subgroup" namespace:"sip"`
+
+ Command struct {
+ ExtraVerbose []bool `long:"extra-verbose" description:"Use for extra verbosity"`
+ } `command:"command" alias:"cm" alias:"cmd" description:"A command"`
+
+ HiddenCommand struct {
+ ExtraVerbose []bool `long:"extra-verbose" description:"Use for extra verbosity"`
+ } `command:"hidden-command" description:"A hidden command" hidden:"yes"`
+
+ Args struct {
+ Filename string `positional-arg-name:"filename" description:"A filename"`
+ Number int `positional-arg-name:"num" description:"A number"`
+ } `positional-args:"yes"`
+}
+
+func TestHelp(t *testing.T) {
+ oldEnv := EnvSnapshot()
+ defer oldEnv.Restore()
+ os.Setenv("ENV_DEFAULT", "env-def")
+
+ var opts helpOptions
+ p := NewNamedParser("TestHelp", HelpFlag)
+ p.AddGroup("Application Options", "The application options", &opts)
+
+ _, err := p.ParseArgs([]string{"--help"})
+
+ if err == nil {
+ t.Fatalf("Expected help error")
+ }
+
+ if e, ok := err.(*Error); !ok {
+ t.Fatalf("Expected flags.Error, but got %T", err)
+ } else {
+ if e.Type != ErrHelp {
+ t.Errorf("Expected flags.ErrHelp type, but got %s", e.Type)
+ }
+
+ var expected string
+
+ if runtime.GOOS == "windows" {
+ expected = `Usage:
+ TestHelp [OPTIONS] [filename] [num] <command>
+
+Application Options:
+ /v, /verbose Show verbose debug information
+ /c: Call phone number
+ /ptrslice: A slice of pointers to string
+ /empty-description
+ /default: Test default value (default:
+ "Some\nvalue")
+ /default-array: Test default array value (default:
+ Some value, "Other\tvalue")
+ /default-map: Testdefault map value (default:
+ some:value, another:value)
+ /env-default1: Test env-default1 value (default:
+ Some value) [%ENV_DEFAULT%]
+ /env-default2: Test env-default2 value
+ [%ENV_DEFAULT%]
+ /opt-with-arg-name:something Option with named argument
+ /opt-with-choices:choice[dog|cat] Option with choices
+
+Other Options:
+ /s: A slice of strings (default: some,
+ value)
+ /intmap: A map from string to int (default:
+ a:1)
+
+Subgroup:
+ /sip.opt: This is a subgroup option
+
+Subsubgroup:
+ /sip.sap.opt: This is a subsubgroup option
+
+Help Options:
+ /? Show this help message
+ /h, /help Show this help message
+
+Arguments:
+ filename: A filename
+ num: A number
+
+Available commands:
+ command A command (aliases: cm, cmd)
+`
+ } else {
+ expected = `Usage:
+ TestHelp [OPTIONS] [filename] [num] <command>
+
+Application Options:
+ -v, --verbose Show verbose debug information
+ -c= Call phone number
+ --ptrslice= A slice of pointers to string
+ --empty-description
+ --default= Test default value (default:
+ "Some\nvalue")
+ --default-array= Test default array value (default:
+ Some value, "Other\tvalue")
+ --default-map= Testdefault map value (default:
+ some:value, another:value)
+ --env-default1= Test env-default1 value (default:
+ Some value) [$ENV_DEFAULT]
+ --env-default2= Test env-default2 value
+ [$ENV_DEFAULT]
+ --opt-with-arg-name=something Option with named argument
+ --opt-with-choices=choice[dog|cat] Option with choices
+
+Other Options:
+ -s= A slice of strings (default: some,
+ value)
+ --intmap= A map from string to int (default:
+ a:1)
+
+Subgroup:
+ --sip.opt= This is a subgroup option
+
+Subsubgroup:
+ --sip.sap.opt= This is a subsubgroup option
+
+Help Options:
+ -h, --help Show this help message
+
+Arguments:
+ filename: A filename
+ num: A number
+
+Available commands:
+ command A command (aliases: cm, cmd)
+`
+ }
+
+ assertDiff(t, e.Message, expected, "help message")
+ }
+}
+
+func TestMan(t *testing.T) {
+ oldEnv := EnvSnapshot()
+ defer oldEnv.Restore()
+ os.Setenv("ENV_DEFAULT", "env-def")
+
+ var opts helpOptions
+ p := NewNamedParser("TestMan", HelpFlag)
+ p.ShortDescription = "Test manpage generation"
+ p.LongDescription = "This is a somewhat `longer' description of what this does"
+ p.AddGroup("Application Options", "The application options", &opts)
+
+ p.Commands()[0].LongDescription = "Longer `command' description"
+
+ var buf bytes.Buffer
+ p.WriteManPage(&buf)
+
+ got := buf.String()
+
+ tt := time.Now()
+
+ var envDefaultName string
+
+ if runtime.GOOS == "windows" {
+ envDefaultName = "%ENV_DEFAULT%"
+ } else {
+ envDefaultName = "$ENV_DEFAULT"
+ }
+
+ expected := fmt.Sprintf(`.TH TestMan 1 "%s"
+.SH NAME
+TestMan \- Test manpage generation
+.SH SYNOPSIS
+\fBTestMan\fP [OPTIONS]
+.SH DESCRIPTION
+This is a somewhat \fBlonger\fP description of what this does
+.SH OPTIONS
+.TP
+\fB\fB\-v\fR, \fB\-\-verbose\fR\fP
+Show verbose debug information
+.TP
+\fB\fB\-c\fR\fP
+Call phone number
+.TP
+\fB\fB\-\-ptrslice\fR\fP
+A slice of pointers to string
+.TP
+\fB\fB\-\-empty-description\fR\fP
+.TP
+\fB\fB\-\-default\fR <default: \fI"Some\\nvalue"\fR>\fP
+Test default value
+.TP
+\fB\fB\-\-default-array\fR <default: \fI"Some value", "Other\\tvalue"\fR>\fP
+Test default array value
+.TP
+\fB\fB\-\-default-map\fR <default: \fI"some:value", "another:value"\fR>\fP
+Testdefault map value
+.TP
+\fB\fB\-\-env-default1\fR <default: \fI"Some value"\fR>\fP
+Test env-default1 value
+.TP
+\fB\fB\-\-env-default2\fR <default: \fI%s\fR>\fP
+Test env-default2 value
+.TP
+\fB\fB\-\-opt-with-arg-name\fR \fIsomething\fR\fP
+Option with named argument
+.TP
+\fB\fB\-\-opt-with-choices\fR \fIchoice\fR\fP
+Option with choices
+.TP
+\fB\fB\-s\fR <default: \fI"some", "value"\fR>\fP
+A slice of strings
+.TP
+\fB\fB\-\-intmap\fR <default: \fI"a:1"\fR>\fP
+A map from string to int
+.TP
+\fB\fB\-\-sip.opt\fR\fP
+This is a subgroup option
+.TP
+\fB\fB\-\-sip.sap.opt\fR\fP
+This is a subsubgroup option
+.SH COMMANDS
+.SS command
+A command
+
+Longer \fBcommand\fP description
+
+\fBUsage\fP: TestMan [OPTIONS] command [command-OPTIONS]
+.TP
+
+\fBAliases\fP: cm, cmd
+
+.TP
+\fB\fB\-\-extra-verbose\fR\fP
+Use for extra verbosity
+`, tt.Format("2 January 2006"), envDefaultName)
+
+ assertDiff(t, got, expected, "man page")
+}
+
+type helpCommandNoOptions struct {
+ Command struct {
+ } `command:"command" description:"A command"`
+}
+
+func TestHelpCommand(t *testing.T) {
+ oldEnv := EnvSnapshot()
+ defer oldEnv.Restore()
+ os.Setenv("ENV_DEFAULT", "env-def")
+
+ var opts helpCommandNoOptions
+ p := NewNamedParser("TestHelpCommand", HelpFlag)
+ p.AddGroup("Application Options", "The application options", &opts)
+
+ _, err := p.ParseArgs([]string{"command", "--help"})
+
+ if err == nil {
+ t.Fatalf("Expected help error")
+ }
+
+ if e, ok := err.(*Error); !ok {
+ t.Fatalf("Expected flags.Error, but got %T", err)
+ } else {
+ if e.Type != ErrHelp {
+ t.Errorf("Expected flags.ErrHelp type, but got %s", e.Type)
+ }
+
+ var expected string
+
+ if runtime.GOOS == "windows" {
+ expected = `Usage:
+ TestHelpCommand [OPTIONS] command
+
+Help Options:
+ /? Show this help message
+ /h, /help Show this help message
+`
+ } else {
+ expected = `Usage:
+ TestHelpCommand [OPTIONS] command
+
+Help Options:
+ -h, --help Show this help message
+`
+ }
+
+ assertDiff(t, e.Message, expected, "help message")
+ }
+}
+
+func TestHelpDefaults(t *testing.T) {
+ var expected string
+
+ if runtime.GOOS == "windows" {
+ expected = `Usage:
+ TestHelpDefaults [OPTIONS]
+
+Application Options:
+ /with-default: With default (default: default-value)
+ /without-default: Without default
+ /with-programmatic-default: With programmatic default (default:
+ default-value)
+
+Help Options:
+ /? Show this help message
+ /h, /help Show this help message
+`
+ } else {
+ expected = `Usage:
+ TestHelpDefaults [OPTIONS]
+
+Application Options:
+ --with-default= With default (default: default-value)
+ --without-default= Without default
+ --with-programmatic-default= With programmatic default (default:
+ default-value)
+
+Help Options:
+ -h, --help Show this help message
+`
+ }
+
+ tests := []struct {
+ Args []string
+ Output string
+ }{
+ {
+ Args: []string{"-h"},
+ Output: expected,
+ },
+ {
+ Args: []string{"--with-default", "other-value", "--with-programmatic-default", "other-value", "-h"},
+ Output: expected,
+ },
+ }
+
+ for _, test := range tests {
+ var opts struct {
+ WithDefault string `long:"with-default" default:"default-value" description:"With default"`
+ WithoutDefault string `long:"without-default" description:"Without default"`
+ WithProgrammaticDefault string `long:"with-programmatic-default" description:"With programmatic default"`
+ }
+
+ opts.WithProgrammaticDefault = "default-value"
+
+ p := NewNamedParser("TestHelpDefaults", HelpFlag)
+ p.AddGroup("Application Options", "The application options", &opts)
+
+ _, err := p.ParseArgs(test.Args)
+
+ if err == nil {
+ t.Fatalf("Expected help error")
+ }
+
+ if e, ok := err.(*Error); !ok {
+ t.Fatalf("Expected flags.Error, but got %T", err)
+ } else {
+ if e.Type != ErrHelp {
+ t.Errorf("Expected flags.ErrHelp type, but got %s", e.Type)
+ }
+
+ assertDiff(t, e.Message, test.Output, "help message")
+ }
+ }
+}
+
+func TestHelpRestArgs(t *testing.T) {
+ opts := struct {
+ Verbose bool `short:"v"`
+ }{}
+
+ p := NewNamedParser("TestHelpDefaults", HelpFlag)
+ p.AddGroup("Application Options", "The application options", &opts)
+
+ retargs, err := p.ParseArgs([]string{"-h", "-v", "rest"})
+
+ if err == nil {
+ t.Fatalf("Expected help error")
+ }
+
+ assertStringArray(t, retargs, []string{"-v", "rest"})
+}
+
+func TestWrapText(t *testing.T) {
+ s := "Lorem ipsum dolor sit amet, consectetur adipisicing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo consequat. Duis aute irure dolor in reprehenderit in voluptate velit esse cillum dolore eu fugiat nulla pariatur. Excepteur sint occaecat cupidatat non proident, sunt in culpa qui officia deserunt mollit anim id est laborum."
+
+ got := wrapText(s, 60, " ")
+ expected := `Lorem ipsum dolor sit amet, consectetur adipisicing elit,
+ sed do eiusmod tempor incididunt ut labore et dolore magna
+ aliqua. Ut enim ad minim veniam, quis nostrud exercitation
+ ullamco laboris nisi ut aliquip ex ea commodo consequat.
+ Duis aute irure dolor in reprehenderit in voluptate velit
+ esse cillum dolore eu fugiat nulla pariatur. Excepteur sint
+ occaecat cupidatat non proident, sunt in culpa qui officia
+ deserunt mollit anim id est laborum.`
+
+ assertDiff(t, got, expected, "wrapped text")
+}
+
+func TestWrapParagraph(t *testing.T) {
+ s := "Lorem ipsum dolor sit amet, consectetur adipisicing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua.\n\n"
+ s += "Ut enim ad minim veniam, quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo consequat.\n\n"
+ s += "Duis aute irure dolor in reprehenderit in voluptate velit esse cillum dolore eu fugiat nulla pariatur.\n\n"
+ s += "Excepteur sint occaecat cupidatat non proident, sunt in culpa qui officia deserunt mollit anim id est laborum.\n"
+
+ got := wrapText(s, 60, " ")
+ expected := `Lorem ipsum dolor sit amet, consectetur adipisicing elit,
+ sed do eiusmod tempor incididunt ut labore et dolore magna
+ aliqua.
+
+ Ut enim ad minim veniam, quis nostrud exercitation ullamco
+ laboris nisi ut aliquip ex ea commodo consequat.
+
+ Duis aute irure dolor in reprehenderit in voluptate velit
+ esse cillum dolore eu fugiat nulla pariatur.
+
+ Excepteur sint occaecat cupidatat non proident, sunt in
+ culpa qui officia deserunt mollit anim id est laborum.
+`
+
+ assertDiff(t, got, expected, "wrapped paragraph")
+}
diff --git a/src/mongo/gotools/vendor/src/github.com/jessevdk/go-flags/ini.go b/src/mongo/gotools/vendor/src/github.com/jessevdk/go-flags/ini.go
new file mode 100644
index 00000000000..cfdf57cc2e0
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/jessevdk/go-flags/ini.go
@@ -0,0 +1,593 @@
+package flags
+
+import (
+ "bufio"
+ "fmt"
+ "io"
+ "os"
+ "reflect"
+ "sort"
+ "strconv"
+ "strings"
+)
+
+// IniError contains location information on where an error occured.
+type IniError struct {
+ // The error message.
+ Message string
+
+ // The filename of the file in which the error occurred.
+ File string
+
+ // The line number at which the error occurred.
+ LineNumber uint
+}
+
+// Error provides a "file:line: message" formatted message of the ini error.
+func (x *IniError) Error() string {
+ return fmt.Sprintf(
+ "%s:%d: %s",
+ x.File,
+ x.LineNumber,
+ x.Message,
+ )
+}
+
+// IniOptions for writing
+type IniOptions uint
+
+const (
+ // IniNone indicates no options.
+ IniNone IniOptions = 0
+
+ // IniIncludeDefaults indicates that default values should be written.
+ IniIncludeDefaults = 1 << iota
+
+ // IniCommentDefaults indicates that if IniIncludeDefaults is used
+ // options with default values are written but commented out.
+ IniCommentDefaults
+
+ // IniIncludeComments indicates that comments containing the description
+ // of an option should be written.
+ IniIncludeComments
+
+ // IniDefault provides a default set of options.
+ IniDefault = IniIncludeComments
+)
+
+// IniParser is a utility to read and write flags options from and to ini
+// formatted strings.
+type IniParser struct {
+ parser *Parser
+}
+
+type iniValue struct {
+ Name string
+ Value string
+ Quoted bool
+ LineNumber uint
+}
+
+type iniSection []iniValue
+
+type ini struct {
+ File string
+ Sections map[string]iniSection
+}
+
+// NewIniParser creates a new ini parser for a given Parser.
+func NewIniParser(p *Parser) *IniParser {
+ return &IniParser{
+ parser: p,
+ }
+}
+
+// IniParse is a convenience function to parse command line options with default
+// settings from an ini formatted file. The provided data is a pointer to a struct
+// representing the default option group (named "Application Options"). For
+// more control, use flags.NewParser.
+func IniParse(filename string, data interface{}) error {
+ p := NewParser(data, Default)
+
+ return NewIniParser(p).ParseFile(filename)
+}
+
+// ParseFile parses flags from an ini formatted file. See Parse for more
+// information on the ini file format. The returned errors can be of the type
+// flags.Error or flags.IniError.
+func (i *IniParser) ParseFile(filename string) error {
+ i.parser.clearIsSet()
+
+ ini, err := readIniFromFile(filename)
+
+ if err != nil {
+ return err
+ }
+
+ return i.parse(ini)
+}
+
+// Parse parses flags from an ini format. You can use ParseFile as a
+// convenience function to parse from a filename instead of a general
+// io.Reader.
+//
+// The format of the ini file is as follows:
+//
+// [Option group name]
+// option = value
+//
+// Each section in the ini file represents an option group or command in the
+// flags parser. The default flags parser option group (i.e. when using
+// flags.Parse) is named 'Application Options'. The ini option name is matched
+// in the following order:
+//
+// 1. Compared to the ini-name tag on the option struct field (if present)
+// 2. Compared to the struct field name
+// 3. Compared to the option long name (if present)
+// 4. Compared to the option short name (if present)
+//
+// Sections for nested groups and commands can be addressed using a dot `.'
+// namespacing notation (i.e [subcommand.Options]). Group section names are
+// matched case insensitive.
+//
+// The returned errors can be of the type flags.Error or flags.IniError.
+func (i *IniParser) Parse(reader io.Reader) error {
+ i.parser.clearIsSet()
+
+ ini, err := readIni(reader, "")
+
+ if err != nil {
+ return err
+ }
+
+ return i.parse(ini)
+}
+
+// WriteFile writes the flags as ini format into a file. See WriteIni
+// for more information. The returned error occurs when the specified file
+// could not be opened for writing.
+func (i *IniParser) WriteFile(filename string, options IniOptions) error {
+ return writeIniToFile(i, filename, options)
+}
+
+// Write writes the current values of all the flags to an ini format.
+// See Parse for more information on the ini file format. You typically
+// call this only after settings have been parsed since the default values of each
+// option are stored just before parsing the flags (this is only relevant when
+// IniIncludeDefaults is _not_ set in options).
+func (i *IniParser) Write(writer io.Writer, options IniOptions) {
+ writeIni(i, writer, options)
+}
+
+func readFullLine(reader *bufio.Reader) (string, error) {
+ var line []byte
+
+ for {
+ l, more, err := reader.ReadLine()
+
+ if err != nil {
+ return "", err
+ }
+
+ if line == nil && !more {
+ return string(l), nil
+ }
+
+ line = append(line, l...)
+
+ if !more {
+ break
+ }
+ }
+
+ return string(line), nil
+}
+
+func optionIniName(option *Option) string {
+ name := option.tag.Get("_read-ini-name")
+
+ if len(name) != 0 {
+ return name
+ }
+
+ name = option.tag.Get("ini-name")
+
+ if len(name) != 0 {
+ return name
+ }
+
+ return option.field.Name
+}
+
+func writeGroupIni(cmd *Command, group *Group, namespace string, writer io.Writer, options IniOptions) {
+ var sname string
+
+ if len(namespace) != 0 {
+ sname = namespace
+ }
+
+ if cmd.Group != group && len(group.ShortDescription) != 0 {
+ if len(sname) != 0 {
+ sname += "."
+ }
+
+ sname += group.ShortDescription
+ }
+
+ sectionwritten := false
+ comments := (options & IniIncludeComments) != IniNone
+
+ for _, option := range group.options {
+ if option.isFunc() || option.Hidden {
+ continue
+ }
+
+ if len(option.tag.Get("no-ini")) != 0 {
+ continue
+ }
+
+ val := option.value
+
+ if (options&IniIncludeDefaults) == IniNone && option.valueIsDefault() {
+ continue
+ }
+
+ if !sectionwritten {
+ fmt.Fprintf(writer, "[%s]\n", sname)
+ sectionwritten = true
+ }
+
+ if comments && len(option.Description) != 0 {
+ fmt.Fprintf(writer, "; %s\n", option.Description)
+ }
+
+ oname := optionIniName(option)
+
+ commentOption := (options&(IniIncludeDefaults|IniCommentDefaults)) == IniIncludeDefaults|IniCommentDefaults && option.valueIsDefault()
+
+ kind := val.Type().Kind()
+ switch kind {
+ case reflect.Slice:
+ kind = val.Type().Elem().Kind()
+
+ if val.Len() == 0 {
+ writeOption(writer, oname, kind, "", "", true, option.iniQuote)
+ } else {
+ for idx := 0; idx < val.Len(); idx++ {
+ v, _ := convertToString(val.Index(idx), option.tag)
+
+ writeOption(writer, oname, kind, "", v, commentOption, option.iniQuote)
+ }
+ }
+ case reflect.Map:
+ kind = val.Type().Elem().Kind()
+
+ if val.Len() == 0 {
+ writeOption(writer, oname, kind, "", "", true, option.iniQuote)
+ } else {
+ mkeys := val.MapKeys()
+ keys := make([]string, len(val.MapKeys()))
+ kkmap := make(map[string]reflect.Value)
+
+ for i, k := range mkeys {
+ keys[i], _ = convertToString(k, option.tag)
+ kkmap[keys[i]] = k
+ }
+
+ sort.Strings(keys)
+
+ for _, k := range keys {
+ v, _ := convertToString(val.MapIndex(kkmap[k]), option.tag)
+
+ writeOption(writer, oname, kind, k, v, commentOption, option.iniQuote)
+ }
+ }
+ default:
+ v, _ := convertToString(val, option.tag)
+
+ writeOption(writer, oname, kind, "", v, commentOption, option.iniQuote)
+ }
+
+ if comments {
+ fmt.Fprintln(writer)
+ }
+ }
+
+ if sectionwritten && !comments {
+ fmt.Fprintln(writer)
+ }
+}
+
+func writeOption(writer io.Writer, optionName string, optionType reflect.Kind, optionKey string, optionValue string, commentOption bool, forceQuote bool) {
+ if forceQuote || (optionType == reflect.String && !isPrint(optionValue)) {
+ optionValue = strconv.Quote(optionValue)
+ }
+
+ comment := ""
+ if commentOption {
+ comment = "; "
+ }
+
+ fmt.Fprintf(writer, "%s%s =", comment, optionName)
+
+ if optionKey != "" {
+ fmt.Fprintf(writer, " %s:%s", optionKey, optionValue)
+ } else if optionValue != "" {
+ fmt.Fprintf(writer, " %s", optionValue)
+ }
+
+ fmt.Fprintln(writer)
+}
+
+func writeCommandIni(command *Command, namespace string, writer io.Writer, options IniOptions) {
+ command.eachGroup(func(group *Group) {
+ if !group.Hidden {
+ writeGroupIni(command, group, namespace, writer, options)
+ }
+ })
+
+ for _, c := range command.commands {
+ var nns string
+
+ if c.Hidden {
+ continue
+ }
+
+ if len(namespace) != 0 {
+ nns = c.Name + "." + nns
+ } else {
+ nns = c.Name
+ }
+
+ writeCommandIni(c, nns, writer, options)
+ }
+}
+
+func writeIni(parser *IniParser, writer io.Writer, options IniOptions) {
+ writeCommandIni(parser.parser.Command, "", writer, options)
+}
+
+func writeIniToFile(parser *IniParser, filename string, options IniOptions) error {
+ file, err := os.Create(filename)
+
+ if err != nil {
+ return err
+ }
+
+ defer file.Close()
+
+ writeIni(parser, file, options)
+
+ return nil
+}
+
+func readIniFromFile(filename string) (*ini, error) {
+ file, err := os.Open(filename)
+
+ if err != nil {
+ return nil, err
+ }
+
+ defer file.Close()
+
+ return readIni(file, filename)
+}
+
+func readIni(contents io.Reader, filename string) (*ini, error) {
+ ret := &ini{
+ File: filename,
+ Sections: make(map[string]iniSection),
+ }
+
+ reader := bufio.NewReader(contents)
+
+ // Empty global section
+ section := make(iniSection, 0, 10)
+ sectionname := ""
+
+ ret.Sections[sectionname] = section
+
+ var lineno uint
+
+ for {
+ line, err := readFullLine(reader)
+
+ if err == io.EOF {
+ break
+ } else if err != nil {
+ return nil, err
+ }
+
+ lineno++
+ line = strings.TrimSpace(line)
+
+ // Skip empty lines and lines starting with ; (comments)
+ if len(line) == 0 || line[0] == ';' || line[0] == '#' {
+ continue
+ }
+
+ if line[0] == '[' {
+ if line[0] != '[' || line[len(line)-1] != ']' {
+ return nil, &IniError{
+ Message: "malformed section header",
+ File: filename,
+ LineNumber: lineno,
+ }
+ }
+
+ name := strings.TrimSpace(line[1 : len(line)-1])
+
+ if len(name) == 0 {
+ return nil, &IniError{
+ Message: "empty section name",
+ File: filename,
+ LineNumber: lineno,
+ }
+ }
+
+ sectionname = name
+ section = ret.Sections[name]
+
+ if section == nil {
+ section = make(iniSection, 0, 10)
+ ret.Sections[name] = section
+ }
+
+ continue
+ }
+
+ // Parse option here
+ keyval := strings.SplitN(line, "=", 2)
+
+ if len(keyval) != 2 {
+ return nil, &IniError{
+ Message: fmt.Sprintf("malformed key=value (%s)", line),
+ File: filename,
+ LineNumber: lineno,
+ }
+ }
+
+ name := strings.TrimSpace(keyval[0])
+ value := strings.TrimSpace(keyval[1])
+ quoted := false
+
+ if len(value) != 0 && value[0] == '"' {
+ if v, err := strconv.Unquote(value); err == nil {
+ value = v
+
+ quoted = true
+ } else {
+ return nil, &IniError{
+ Message: err.Error(),
+ File: filename,
+ LineNumber: lineno,
+ }
+ }
+ }
+
+ section = append(section, iniValue{
+ Name: name,
+ Value: value,
+ Quoted: quoted,
+ LineNumber: lineno,
+ })
+
+ ret.Sections[sectionname] = section
+ }
+
+ return ret, nil
+}
+
+func (i *IniParser) matchingGroups(name string) []*Group {
+ if len(name) == 0 {
+ var ret []*Group
+
+ i.parser.eachGroup(func(g *Group) {
+ ret = append(ret, g)
+ })
+
+ return ret
+ }
+
+ g := i.parser.groupByName(name)
+
+ if g != nil {
+ return []*Group{g}
+ }
+
+ return nil
+}
+
+func (i *IniParser) parse(ini *ini) error {
+ p := i.parser
+
+ var quotesLookup = make(map[*Option]bool)
+
+ for name, section := range ini.Sections {
+ groups := i.matchingGroups(name)
+
+ if len(groups) == 0 {
+ return newErrorf(ErrUnknownGroup, "could not find option group `%s'", name)
+ }
+
+ for _, inival := range section {
+ var opt *Option
+
+ for _, group := range groups {
+ opt = group.optionByName(inival.Name, func(o *Option, n string) bool {
+ return strings.ToLower(o.tag.Get("ini-name")) == strings.ToLower(n)
+ })
+
+ if opt != nil && len(opt.tag.Get("no-ini")) != 0 {
+ opt = nil
+ }
+
+ if opt != nil {
+ break
+ }
+ }
+
+ if opt == nil {
+ if (p.Options & IgnoreUnknown) == None {
+ return &IniError{
+ Message: fmt.Sprintf("unknown option: %s", inival.Name),
+ File: ini.File,
+ LineNumber: inival.LineNumber,
+ }
+ }
+
+ continue
+ }
+
+ pval := &inival.Value
+
+ if !opt.canArgument() && len(inival.Value) == 0 {
+ pval = nil
+ } else {
+ if opt.value.Type().Kind() == reflect.Map {
+ parts := strings.SplitN(inival.Value, ":", 2)
+
+ // only handle unquoting
+ if len(parts) == 2 && parts[1][0] == '"' {
+ if v, err := strconv.Unquote(parts[1]); err == nil {
+ parts[1] = v
+
+ inival.Quoted = true
+ } else {
+ return &IniError{
+ Message: err.Error(),
+ File: ini.File,
+ LineNumber: inival.LineNumber,
+ }
+ }
+
+ s := parts[0] + ":" + parts[1]
+
+ pval = &s
+ }
+ }
+ }
+
+ if err := opt.set(pval); err != nil {
+ return &IniError{
+ Message: err.Error(),
+ File: ini.File,
+ LineNumber: inival.LineNumber,
+ }
+ }
+
+ // either all INI values are quoted or only values who need quoting
+ if _, ok := quotesLookup[opt]; !inival.Quoted || !ok {
+ quotesLookup[opt] = inival.Quoted
+ }
+
+ opt.tag.Set("_read-ini-name", inival.Name)
+ }
+ }
+
+ for opt, quoted := range quotesLookup {
+ opt.iniQuote = quoted
+ }
+
+ return nil
+}
diff --git a/src/mongo/gotools/vendor/src/github.com/jessevdk/go-flags/ini_test.go b/src/mongo/gotools/vendor/src/github.com/jessevdk/go-flags/ini_test.go
new file mode 100644
index 00000000000..dd7fe331209
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/jessevdk/go-flags/ini_test.go
@@ -0,0 +1,950 @@
+package flags
+
+import (
+ "bytes"
+ "fmt"
+ "io/ioutil"
+ "os"
+ "reflect"
+ "strings"
+ "testing"
+)
+
+func TestWriteIni(t *testing.T) {
+ oldEnv := EnvSnapshot()
+ defer oldEnv.Restore()
+ os.Setenv("ENV_DEFAULT", "env-def")
+
+ var tests = []struct {
+ args []string
+ options IniOptions
+ expected string
+ }{
+ {
+ []string{"-vv", "--intmap=a:2", "--intmap", "b:3", "filename", "0", "command"},
+ IniDefault,
+ `[Application Options]
+; Show verbose debug information
+verbose = true
+verbose = true
+
+; Test env-default1 value
+EnvDefault1 = env-def
+
+; Test env-default2 value
+EnvDefault2 = env-def
+
+[Other Options]
+; A map from string to int
+int-map = a:2
+int-map = b:3
+
+`,
+ },
+ {
+ []string{"-vv", "--intmap=a:2", "--intmap", "b:3", "filename", "0", "command"},
+ IniDefault | IniIncludeDefaults,
+ `[Application Options]
+; Show verbose debug information
+verbose = true
+verbose = true
+
+; A slice of pointers to string
+; PtrSlice =
+
+EmptyDescription = false
+
+; Test default value
+Default = "Some\nvalue"
+
+; Test default array value
+DefaultArray = Some value
+DefaultArray = "Other\tvalue"
+
+; Testdefault map value
+DefaultMap = another:value
+DefaultMap = some:value
+
+; Test env-default1 value
+EnvDefault1 = env-def
+
+; Test env-default2 value
+EnvDefault2 = env-def
+
+; Option with named argument
+OptionWithArgName =
+
+; Option with choices
+OptionWithChoices =
+
+; Option only available in ini
+only-ini =
+
+[Other Options]
+; A slice of strings
+StringSlice = some
+StringSlice = value
+
+; A map from string to int
+int-map = a:2
+int-map = b:3
+
+[Subgroup]
+; This is a subgroup option
+Opt =
+
+[Subsubgroup]
+; This is a subsubgroup option
+Opt =
+
+[command]
+; Use for extra verbosity
+; ExtraVerbose =
+
+`,
+ },
+ {
+ []string{"filename", "0", "command"},
+ IniDefault | IniIncludeDefaults | IniCommentDefaults,
+ `[Application Options]
+; Show verbose debug information
+; verbose =
+
+; A slice of pointers to string
+; PtrSlice =
+
+; EmptyDescription = false
+
+; Test default value
+; Default = "Some\nvalue"
+
+; Test default array value
+; DefaultArray = Some value
+; DefaultArray = "Other\tvalue"
+
+; Testdefault map value
+; DefaultMap = another:value
+; DefaultMap = some:value
+
+; Test env-default1 value
+EnvDefault1 = env-def
+
+; Test env-default2 value
+EnvDefault2 = env-def
+
+; Option with named argument
+; OptionWithArgName =
+
+; Option with choices
+; OptionWithChoices =
+
+; Option only available in ini
+; only-ini =
+
+[Other Options]
+; A slice of strings
+; StringSlice = some
+; StringSlice = value
+
+; A map from string to int
+; int-map = a:1
+
+[Subgroup]
+; This is a subgroup option
+; Opt =
+
+[Subsubgroup]
+; This is a subsubgroup option
+; Opt =
+
+[command]
+; Use for extra verbosity
+; ExtraVerbose =
+
+`,
+ },
+ {
+ []string{"--default=New value", "--default-array=New value", "--default-map=new:value", "filename", "0", "command"},
+ IniDefault | IniIncludeDefaults | IniCommentDefaults,
+ `[Application Options]
+; Show verbose debug information
+; verbose =
+
+; A slice of pointers to string
+; PtrSlice =
+
+; EmptyDescription = false
+
+; Test default value
+Default = New value
+
+; Test default array value
+DefaultArray = New value
+
+; Testdefault map value
+DefaultMap = new:value
+
+; Test env-default1 value
+EnvDefault1 = env-def
+
+; Test env-default2 value
+EnvDefault2 = env-def
+
+; Option with named argument
+; OptionWithArgName =
+
+; Option with choices
+; OptionWithChoices =
+
+; Option only available in ini
+; only-ini =
+
+[Other Options]
+; A slice of strings
+; StringSlice = some
+; StringSlice = value
+
+; A map from string to int
+; int-map = a:1
+
+[Subgroup]
+; This is a subgroup option
+; Opt =
+
+[Subsubgroup]
+; This is a subsubgroup option
+; Opt =
+
+[command]
+; Use for extra verbosity
+; ExtraVerbose =
+
+`,
+ },
+ }
+
+ for _, test := range tests {
+ var opts helpOptions
+
+ p := NewNamedParser("TestIni", Default)
+ p.AddGroup("Application Options", "The application options", &opts)
+
+ _, err := p.ParseArgs(test.args)
+
+ if err != nil {
+ t.Fatalf("Unexpected error: %v", err)
+ }
+
+ inip := NewIniParser(p)
+
+ var b bytes.Buffer
+ inip.Write(&b, test.options)
+
+ got := b.String()
+ expected := test.expected
+
+ msg := fmt.Sprintf("with arguments %+v and ini options %b", test.args, test.options)
+ assertDiff(t, got, expected, msg)
+ }
+}
+
+func TestReadIni_flagEquivalent(t *testing.T) {
+ type options struct {
+ Opt1 bool `long:"opt1"`
+
+ Group1 struct {
+ Opt2 bool `long:"opt2"`
+ } `group:"group1"`
+
+ Group2 struct {
+ Opt3 bool `long:"opt3"`
+ } `group:"group2" namespace:"ns1"`
+
+ Cmd1 struct {
+ Opt4 bool `long:"opt4"`
+ Opt5 bool `long:"foo.opt5"`
+
+ Group1 struct {
+ Opt6 bool `long:"opt6"`
+ Opt7 bool `long:"foo.opt7"`
+ } `group:"group1"`
+
+ Group2 struct {
+ Opt8 bool `long:"opt8"`
+ } `group:"group2" namespace:"ns1"`
+ } `command:"cmd1"`
+ }
+
+ a := `
+opt1=true
+
+[group1]
+opt2=true
+
+[group2]
+ns1.opt3=true
+
+[cmd1]
+opt4=true
+foo.opt5=true
+
+[cmd1.group1]
+opt6=true
+foo.opt7=true
+
+[cmd1.group2]
+ns1.opt8=true
+`
+ b := `
+opt1=true
+opt2=true
+ns1.opt3=true
+
+[cmd1]
+opt4=true
+foo.opt5=true
+opt6=true
+foo.opt7=true
+ns1.opt8=true
+`
+
+ parse := func(readIni string) (opts options, writeIni string) {
+ p := NewNamedParser("TestIni", Default)
+ p.AddGroup("Application Options", "The application options", &opts)
+
+ inip := NewIniParser(p)
+ err := inip.Parse(strings.NewReader(readIni))
+
+ if err != nil {
+ t.Fatalf("Unexpected error: %s\n\nFile:\n%s", err, readIni)
+ }
+
+ var b bytes.Buffer
+ inip.Write(&b, Default)
+
+ return opts, b.String()
+ }
+
+ aOpt, aIni := parse(a)
+ bOpt, bIni := parse(b)
+
+ assertDiff(t, aIni, bIni, "")
+ if !reflect.DeepEqual(aOpt, bOpt) {
+ t.Errorf("not equal")
+ }
+}
+
+func TestReadIni(t *testing.T) {
+ var opts helpOptions
+
+ p := NewNamedParser("TestIni", Default)
+ p.AddGroup("Application Options", "The application options", &opts)
+
+ inip := NewIniParser(p)
+
+ inic := `
+; Show verbose debug information
+verbose = true
+verbose = true
+
+DefaultMap = another:"value\n1"
+DefaultMap = some:value 2
+
+[Application Options]
+; A slice of pointers to string
+; PtrSlice =
+
+; Test default value
+Default = "New\nvalue"
+
+; Test env-default1 value
+EnvDefault1 = New value
+
+[Other Options]
+# A slice of strings
+StringSlice = "some\nvalue"
+StringSlice = another value
+
+; A map from string to int
+int-map = a:2
+int-map = b:3
+
+`
+
+ b := strings.NewReader(inic)
+ err := inip.Parse(b)
+
+ if err != nil {
+ t.Fatalf("Unexpected error: %s", err)
+ }
+
+ assertBoolArray(t, opts.Verbose, []bool{true, true})
+
+ if v := map[string]string{"another": "value\n1", "some": "value 2"}; !reflect.DeepEqual(opts.DefaultMap, v) {
+ t.Fatalf("Expected %#v for DefaultMap but got %#v", v, opts.DefaultMap)
+ }
+
+ assertString(t, opts.Default, "New\nvalue")
+
+ assertString(t, opts.EnvDefault1, "New value")
+
+ assertStringArray(t, opts.Other.StringSlice, []string{"some\nvalue", "another value"})
+
+ if v, ok := opts.Other.IntMap["a"]; !ok {
+ t.Errorf("Expected \"a\" in Other.IntMap")
+ } else if v != 2 {
+ t.Errorf("Expected Other.IntMap[\"a\"] = 2, but got %v", v)
+ }
+
+ if v, ok := opts.Other.IntMap["b"]; !ok {
+ t.Errorf("Expected \"b\" in Other.IntMap")
+ } else if v != 3 {
+ t.Errorf("Expected Other.IntMap[\"b\"] = 3, but got %v", v)
+ }
+}
+
+func TestReadAndWriteIni(t *testing.T) {
+ var tests = []struct {
+ options IniOptions
+ read string
+ write string
+ }{
+ {
+ IniIncludeComments,
+ `[Application Options]
+; Show verbose debug information
+verbose = true
+verbose = true
+
+; Test default value
+Default = "quote me"
+
+; Test default array value
+DefaultArray = 1
+DefaultArray = "2"
+DefaultArray = 3
+
+; Testdefault map value
+; DefaultMap =
+
+; Test env-default1 value
+EnvDefault1 = env-def
+
+; Test env-default2 value
+EnvDefault2 = env-def
+
+[Other Options]
+; A slice of strings
+; StringSlice =
+
+; A map from string to int
+int-map = a:2
+int-map = b:"3"
+
+`,
+ `[Application Options]
+; Show verbose debug information
+verbose = true
+verbose = true
+
+; Test default value
+Default = "quote me"
+
+; Test default array value
+DefaultArray = 1
+DefaultArray = 2
+DefaultArray = 3
+
+; Testdefault map value
+; DefaultMap =
+
+; Test env-default1 value
+EnvDefault1 = env-def
+
+; Test env-default2 value
+EnvDefault2 = env-def
+
+[Other Options]
+; A slice of strings
+; StringSlice =
+
+; A map from string to int
+int-map = a:2
+int-map = b:3
+
+`,
+ },
+ {
+ IniIncludeComments,
+ `[Application Options]
+; Show verbose debug information
+verbose = true
+verbose = true
+
+; Test default value
+Default = "quote me"
+
+; Test default array value
+DefaultArray = "1"
+DefaultArray = "2"
+DefaultArray = "3"
+
+; Testdefault map value
+; DefaultMap =
+
+; Test env-default1 value
+EnvDefault1 = env-def
+
+; Test env-default2 value
+EnvDefault2 = env-def
+
+[Other Options]
+; A slice of strings
+; StringSlice =
+
+; A map from string to int
+int-map = a:"2"
+int-map = b:"3"
+
+`,
+ `[Application Options]
+; Show verbose debug information
+verbose = true
+verbose = true
+
+; Test default value
+Default = "quote me"
+
+; Test default array value
+DefaultArray = "1"
+DefaultArray = "2"
+DefaultArray = "3"
+
+; Testdefault map value
+; DefaultMap =
+
+; Test env-default1 value
+EnvDefault1 = env-def
+
+; Test env-default2 value
+EnvDefault2 = env-def
+
+[Other Options]
+; A slice of strings
+; StringSlice =
+
+; A map from string to int
+int-map = a:"2"
+int-map = b:"3"
+
+`,
+ },
+ }
+
+ for _, test := range tests {
+ var opts helpOptions
+
+ p := NewNamedParser("TestIni", Default)
+ p.AddGroup("Application Options", "The application options", &opts)
+
+ inip := NewIniParser(p)
+
+ read := strings.NewReader(test.read)
+ err := inip.Parse(read)
+ if err != nil {
+ t.Fatalf("Unexpected error: %s", err)
+ }
+
+ var write bytes.Buffer
+ inip.Write(&write, test.options)
+
+ got := write.String()
+
+ msg := fmt.Sprintf("with ini options %b", test.options)
+ assertDiff(t, got, test.write, msg)
+ }
+}
+
+func TestReadIniWrongQuoting(t *testing.T) {
+ var tests = []struct {
+ iniFile string
+ lineNumber uint
+ }{
+ {
+ iniFile: `Default = "New\nvalue`,
+ lineNumber: 1,
+ },
+ {
+ iniFile: `StringSlice = "New\nvalue`,
+ lineNumber: 1,
+ },
+ {
+ iniFile: `StringSlice = "New\nvalue"
+ StringSlice = "Second\nvalue`,
+ lineNumber: 2,
+ },
+ {
+ iniFile: `DefaultMap = some:"value`,
+ lineNumber: 1,
+ },
+ {
+ iniFile: `DefaultMap = some:value
+ DefaultMap = another:"value`,
+ lineNumber: 2,
+ },
+ }
+
+ for _, test := range tests {
+ var opts helpOptions
+
+ p := NewNamedParser("TestIni", Default)
+ p.AddGroup("Application Options", "The application options", &opts)
+
+ inip := NewIniParser(p)
+
+ inic := test.iniFile
+
+ b := strings.NewReader(inic)
+ err := inip.Parse(b)
+
+ if err == nil {
+ t.Fatalf("Expect error")
+ }
+
+ iniError := err.(*IniError)
+
+ if iniError.LineNumber != test.lineNumber {
+ t.Fatalf("Expect error on line %d", test.lineNumber)
+ }
+ }
+}
+
+func TestIniCommands(t *testing.T) {
+ var opts struct {
+ Value string `short:"v" long:"value"`
+
+ Add struct {
+ Name int `short:"n" long:"name" ini-name:"AliasName"`
+
+ Other struct {
+ O string `short:"o" long:"other"`
+ } `group:"Other Options"`
+ } `command:"add"`
+ }
+
+ p := NewNamedParser("TestIni", Default)
+ p.AddGroup("Application Options", "The application options", &opts)
+
+ inip := NewIniParser(p)
+
+ inic := `[Application Options]
+value = some value
+
+[add]
+AliasName = 5
+
+[add.Other Options]
+other = subgroup
+
+`
+
+ b := strings.NewReader(inic)
+ err := inip.Parse(b)
+
+ if err != nil {
+ t.Fatalf("Unexpected error: %s", err)
+ }
+
+ assertString(t, opts.Value, "some value")
+
+ if opts.Add.Name != 5 {
+ t.Errorf("Expected opts.Add.Name to be 5, but got %v", opts.Add.Name)
+ }
+
+ assertString(t, opts.Add.Other.O, "subgroup")
+
+ // Test writing it back
+ buf := &bytes.Buffer{}
+
+ inip.Write(buf, IniDefault)
+
+ assertDiff(t, buf.String(), inic, "ini contents")
+}
+
+func TestIniNoIni(t *testing.T) {
+ var opts struct {
+ NoValue string `short:"n" long:"novalue" no-ini:"yes"`
+ Value string `short:"v" long:"value"`
+ }
+
+ p := NewNamedParser("TestIni", Default)
+ p.AddGroup("Application Options", "The application options", &opts)
+
+ inip := NewIniParser(p)
+
+ // read INI
+ inic := `[Application Options]
+novalue = some value
+value = some other value
+`
+
+ b := strings.NewReader(inic)
+ err := inip.Parse(b)
+
+ if err == nil {
+ t.Fatalf("Expected error")
+ }
+
+ iniError := err.(*IniError)
+
+ if v := uint(2); iniError.LineNumber != v {
+ t.Errorf("Expected opts.Add.Name to be %d, but got %d", v, iniError.LineNumber)
+ }
+
+ if v := "unknown option: novalue"; iniError.Message != v {
+ t.Errorf("Expected opts.Add.Name to be %s, but got %s", v, iniError.Message)
+ }
+
+ // write INI
+ opts.NoValue = "some value"
+ opts.Value = "some other value"
+
+ file, err := ioutil.TempFile("", "")
+ if err != nil {
+ t.Fatalf("Cannot create temporary file: %s", err)
+ }
+ defer os.Remove(file.Name())
+
+ err = inip.WriteFile(file.Name(), IniIncludeDefaults)
+ if err != nil {
+ t.Fatalf("Could not write ini file: %s", err)
+ }
+
+ found, err := ioutil.ReadFile(file.Name())
+ if err != nil {
+ t.Fatalf("Could not read written ini file: %s", err)
+ }
+
+ expected := "[Application Options]\nValue = some other value\n\n"
+
+ assertDiff(t, string(found), expected, "ini content")
+}
+
+func TestIniParse(t *testing.T) {
+ file, err := ioutil.TempFile("", "")
+ if err != nil {
+ t.Fatalf("Cannot create temporary file: %s", err)
+ }
+ defer os.Remove(file.Name())
+
+ _, err = file.WriteString("value = 123")
+ if err != nil {
+ t.Fatalf("Cannot write to temporary file: %s", err)
+ }
+
+ file.Close()
+
+ var opts struct {
+ Value int `long:"value"`
+ }
+
+ err = IniParse(file.Name(), &opts)
+ if err != nil {
+ t.Fatalf("Could not parse ini: %s", err)
+ }
+
+ if opts.Value != 123 {
+ t.Fatalf("Expected Value to be \"123\" but was \"%d\"", opts.Value)
+ }
+}
+
+func TestIniCliOverrides(t *testing.T) {
+ file, err := ioutil.TempFile("", "")
+
+ if err != nil {
+ t.Fatalf("Cannot create temporary file: %s", err)
+ }
+
+ defer os.Remove(file.Name())
+
+ _, err = file.WriteString("values = 123\n")
+ _, err = file.WriteString("values = 456\n")
+
+ if err != nil {
+ t.Fatalf("Cannot write to temporary file: %s", err)
+ }
+
+ file.Close()
+
+ var opts struct {
+ Values []int `long:"values"`
+ }
+
+ p := NewParser(&opts, Default)
+ err = NewIniParser(p).ParseFile(file.Name())
+
+ if err != nil {
+ t.Fatalf("Could not parse ini: %s", err)
+ }
+
+ _, err = p.ParseArgs([]string{"--values", "111", "--values", "222"})
+
+ if err != nil {
+ t.Fatalf("Failed to parse arguments: %s", err)
+ }
+
+ if len(opts.Values) != 2 {
+ t.Fatalf("Expected Values to contain two elements, but got %d", len(opts.Values))
+ }
+
+ if opts.Values[0] != 111 {
+ t.Fatalf("Expected Values[0] to be 111, but got '%d'", opts.Values[0])
+ }
+
+ if opts.Values[1] != 222 {
+ t.Fatalf("Expected Values[0] to be 222, but got '%d'", opts.Values[1])
+ }
+}
+
+func TestIniOverrides(t *testing.T) {
+ file, err := ioutil.TempFile("", "")
+
+ if err != nil {
+ t.Fatalf("Cannot create temporary file: %s", err)
+ }
+
+ defer os.Remove(file.Name())
+
+ _, err = file.WriteString("value-with-default = \"ini-value\"\n")
+ _, err = file.WriteString("value-with-default-override-cli = \"ini-value\"\n")
+
+ if err != nil {
+ t.Fatalf("Cannot write to temporary file: %s", err)
+ }
+
+ file.Close()
+
+ var opts struct {
+ ValueWithDefault string `long:"value-with-default" default:"value"`
+ ValueWithDefaultOverrideCli string `long:"value-with-default-override-cli" default:"value"`
+ }
+
+ p := NewParser(&opts, Default)
+ err = NewIniParser(p).ParseFile(file.Name())
+
+ if err != nil {
+ t.Fatalf("Could not parse ini: %s", err)
+ }
+
+ _, err = p.ParseArgs([]string{"--value-with-default-override-cli", "cli-value"})
+
+ if err != nil {
+ t.Fatalf("Failed to parse arguments: %s", err)
+ }
+
+ assertString(t, opts.ValueWithDefault, "ini-value")
+ assertString(t, opts.ValueWithDefaultOverrideCli, "cli-value")
+}
+
+func TestWriteFile(t *testing.T) {
+ file, err := ioutil.TempFile("", "")
+ if err != nil {
+ t.Fatalf("Cannot create temporary file: %s", err)
+ }
+ defer os.Remove(file.Name())
+
+ var opts struct {
+ Value int `long:"value"`
+ }
+
+ opts.Value = 123
+
+ p := NewParser(&opts, Default)
+ ini := NewIniParser(p)
+
+ err = ini.WriteFile(file.Name(), IniIncludeDefaults)
+ if err != nil {
+ t.Fatalf("Could not write ini file: %s", err)
+ }
+
+ found, err := ioutil.ReadFile(file.Name())
+ if err != nil {
+ t.Fatalf("Could not read written ini file: %s", err)
+ }
+
+ expected := "[Application Options]\nValue = 123\n\n"
+
+ assertDiff(t, string(found), expected, "ini content")
+}
+
+func TestOverwriteRequiredOptions(t *testing.T) {
+ var tests = []struct {
+ args []string
+ expected []string
+ }{
+ {
+ args: []string{"--value", "from CLI"},
+ expected: []string{
+ "from CLI",
+ "from default",
+ },
+ },
+ {
+ args: []string{"--value", "from CLI", "--default", "from CLI"},
+ expected: []string{
+ "from CLI",
+ "from CLI",
+ },
+ },
+ {
+ args: []string{"--config", "no file name"},
+ expected: []string{
+ "from INI",
+ "from INI",
+ },
+ },
+ {
+ args: []string{"--value", "from CLI before", "--default", "from CLI before", "--config", "no file name"},
+ expected: []string{
+ "from INI",
+ "from INI",
+ },
+ },
+ {
+ args: []string{"--value", "from CLI before", "--default", "from CLI before", "--config", "no file name", "--value", "from CLI after", "--default", "from CLI after"},
+ expected: []string{
+ "from CLI after",
+ "from CLI after",
+ },
+ },
+ }
+
+ for _, test := range tests {
+ var opts struct {
+ Config func(s string) error `long:"config" no-ini:"true"`
+ Value string `long:"value" required:"true"`
+ Default string `long:"default" required:"true" default:"from default"`
+ }
+
+ p := NewParser(&opts, Default)
+
+ opts.Config = func(s string) error {
+ ini := NewIniParser(p)
+
+ return ini.Parse(bytes.NewBufferString("value = from INI\ndefault = from INI"))
+ }
+
+ _, err := p.ParseArgs(test.args)
+ if err != nil {
+ t.Fatalf("Unexpected error %s with args %+v", err, test.args)
+ }
+
+ if opts.Value != test.expected[0] {
+ t.Fatalf("Expected Value to be \"%s\" but was \"%s\" with args %+v", test.expected[0], opts.Value, test.args)
+ }
+
+ if opts.Default != test.expected[1] {
+ t.Fatalf("Expected Default to be \"%s\" but was \"%s\" with args %+v", test.expected[1], opts.Default, test.args)
+ }
+ }
+}
diff --git a/src/mongo/gotools/vendor/src/github.com/jessevdk/go-flags/long_test.go b/src/mongo/gotools/vendor/src/github.com/jessevdk/go-flags/long_test.go
new file mode 100644
index 00000000000..02fc8c70129
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/jessevdk/go-flags/long_test.go
@@ -0,0 +1,85 @@
+package flags
+
+import (
+ "testing"
+)
+
+func TestLong(t *testing.T) {
+ var opts = struct {
+ Value bool `long:"value"`
+ }{}
+
+ ret := assertParseSuccess(t, &opts, "--value")
+
+ assertStringArray(t, ret, []string{})
+
+ if !opts.Value {
+ t.Errorf("Expected Value to be true")
+ }
+}
+
+func TestLongArg(t *testing.T) {
+ var opts = struct {
+ Value string `long:"value"`
+ }{}
+
+ ret := assertParseSuccess(t, &opts, "--value", "value")
+
+ assertStringArray(t, ret, []string{})
+ assertString(t, opts.Value, "value")
+}
+
+func TestLongArgEqual(t *testing.T) {
+ var opts = struct {
+ Value string `long:"value"`
+ }{}
+
+ ret := assertParseSuccess(t, &opts, "--value=value")
+
+ assertStringArray(t, ret, []string{})
+ assertString(t, opts.Value, "value")
+}
+
+func TestLongDefault(t *testing.T) {
+ var opts = struct {
+ Value string `long:"value" default:"value"`
+ }{}
+
+ ret := assertParseSuccess(t, &opts)
+
+ assertStringArray(t, ret, []string{})
+ assertString(t, opts.Value, "value")
+}
+
+func TestLongOptional(t *testing.T) {
+ var opts = struct {
+ Value string `long:"value" optional:"yes" optional-value:"value"`
+ }{}
+
+ ret := assertParseSuccess(t, &opts, "--value")
+
+ assertStringArray(t, ret, []string{})
+ assertString(t, opts.Value, "value")
+}
+
+func TestLongOptionalArg(t *testing.T) {
+ var opts = struct {
+ Value string `long:"value" optional:"yes" optional-value:"value"`
+ }{}
+
+ ret := assertParseSuccess(t, &opts, "--value", "no")
+
+ assertStringArray(t, ret, []string{"no"})
+ assertString(t, opts.Value, "value")
+}
+
+func TestLongOptionalArgEqual(t *testing.T) {
+ var opts = struct {
+ Value string `long:"value" optional:"yes" optional-value:"value"`
+ }{}
+
+ ret := assertParseSuccess(t, &opts, "--value=value", "no")
+
+ assertStringArray(t, ret, []string{"no"})
+ assertString(t, opts.Value, "value")
+}
diff --git a/src/mongo/gotools/vendor/src/github.com/jessevdk/go-flags/man.go b/src/mongo/gotools/vendor/src/github.com/jessevdk/go-flags/man.go
new file mode 100644
index 00000000000..8e4a8b72671
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/jessevdk/go-flags/man.go
@@ -0,0 +1,194 @@
+package flags
+
+import (
+ "fmt"
+ "io"
+ "runtime"
+ "strings"
+ "time"
+)
+
+func manQuote(s string) string {
+ return strings.Replace(s, "\\", "\\\\", -1)
+}
+
+func formatForMan(wr io.Writer, s string) {
+ for {
+ idx := strings.IndexRune(s, '`')
+
+ if idx < 0 {
+ fmt.Fprintf(wr, "%s", manQuote(s))
+ break
+ }
+
+ fmt.Fprintf(wr, "%s", manQuote(s[:idx]))
+
+ s = s[idx+1:]
+ idx = strings.IndexRune(s, '\'')
+
+ if idx < 0 {
+ fmt.Fprintf(wr, "%s", manQuote(s))
+ break
+ }
+
+ fmt.Fprintf(wr, "\\fB%s\\fP", manQuote(s[:idx]))
+ s = s[idx+1:]
+ }
+}
+
+func writeManPageOptions(wr io.Writer, grp *Group) {
+ grp.eachGroup(func(group *Group) {
+ if group.Hidden {
+ return
+ }
+
+ for _, opt := range group.options {
+ if !opt.canCli() || opt.Hidden {
+ continue
+ }
+
+ fmt.Fprintln(wr, ".TP")
+ fmt.Fprintf(wr, "\\fB")
+
+ if opt.ShortName != 0 {
+ fmt.Fprintf(wr, "\\fB\\-%c\\fR", opt.ShortName)
+ }
+
+ if len(opt.LongName) != 0 {
+ if opt.ShortName != 0 {
+ fmt.Fprintf(wr, ", ")
+ }
+
+ fmt.Fprintf(wr, "\\fB\\-\\-%s\\fR", manQuote(opt.LongNameWithNamespace()))
+ }
+
+ if len(opt.ValueName) != 0 || opt.OptionalArgument {
+ if opt.OptionalArgument {
+ fmt.Fprintf(wr, " [\\fI%s=%s\\fR]", manQuote(opt.ValueName), manQuote(strings.Join(quoteV(opt.OptionalValue), ", ")))
+ } else {
+ fmt.Fprintf(wr, " \\fI%s\\fR", manQuote(opt.ValueName))
+ }
+ }
+
+ if len(opt.Default) != 0 {
+ fmt.Fprintf(wr, " <default: \\fI%s\\fR>", manQuote(strings.Join(quoteV(opt.Default), ", ")))
+ } else if len(opt.EnvDefaultKey) != 0 {
+ if runtime.GOOS == "windows" {
+ fmt.Fprintf(wr, " <default: \\fI%%%s%%\\fR>", manQuote(opt.EnvDefaultKey))
+ } else {
+ fmt.Fprintf(wr, " <default: \\fI$%s\\fR>", manQuote(opt.EnvDefaultKey))
+ }
+ }
+
+ if opt.Required {
+ fmt.Fprintf(wr, " (\\fIrequired\\fR)")
+ }
+
+ fmt.Fprintln(wr, "\\fP")
+
+ if len(opt.Description) != 0 {
+ formatForMan(wr, opt.Description)
+ fmt.Fprintln(wr, "")
+ }
+ }
+ })
+}
+
+func writeManPageSubcommands(wr io.Writer, name string, root *Command) {
+ commands := root.sortedVisibleCommands()
+
+ for _, c := range commands {
+ var nn string
+
+ if c.Hidden {
+ continue
+ }
+
+ if len(name) != 0 {
+ nn = name + " " + c.Name
+ } else {
+ nn = c.Name
+ }
+
+ writeManPageCommand(wr, nn, root, c)
+ }
+}
+
+func writeManPageCommand(wr io.Writer, name string, root *Command, command *Command) {
+ fmt.Fprintf(wr, ".SS %s\n", name)
+ fmt.Fprintln(wr, command.ShortDescription)
+
+ if len(command.LongDescription) > 0 {
+ fmt.Fprintln(wr, "")
+
+ cmdstart := fmt.Sprintf("The %s command", manQuote(command.Name))
+
+ if strings.HasPrefix(command.LongDescription, cmdstart) {
+ fmt.Fprintf(wr, "The \\fI%s\\fP command", manQuote(command.Name))
+
+ formatForMan(wr, command.LongDescription[len(cmdstart):])
+ fmt.Fprintln(wr, "")
+ } else {
+ formatForMan(wr, command.LongDescription)
+ fmt.Fprintln(wr, "")
+ }
+ }
+
+ var usage string
+ if us, ok := command.data.(Usage); ok {
+ usage = us.Usage()
+ } else if command.hasCliOptions() {
+ usage = fmt.Sprintf("[%s-OPTIONS]", command.Name)
+ }
+
+ var pre string
+ if root.hasCliOptions() {
+ pre = fmt.Sprintf("%s [OPTIONS] %s", root.Name, command.Name)
+ } else {
+ pre = fmt.Sprintf("%s %s", root.Name, command.Name)
+ }
+
+ if len(usage) > 0 {
+ fmt.Fprintf(wr, "\n\\fBUsage\\fP: %s %s\n.TP\n", manQuote(pre), manQuote(usage))
+ }
+
+ if len(command.Aliases) > 0 {
+ fmt.Fprintf(wr, "\n\\fBAliases\\fP: %s\n\n", manQuote(strings.Join(command.Aliases, ", ")))
+ }
+
+ writeManPageOptions(wr, command.Group)
+ writeManPageSubcommands(wr, name, command)
+}
+
+// WriteManPage writes a basic man page in groff format to the specified
+// writer.
+func (p *Parser) WriteManPage(wr io.Writer) {
+ t := time.Now()
+
+ fmt.Fprintf(wr, ".TH %s 1 \"%s\"\n", manQuote(p.Name), t.Format("2 January 2006"))
+ fmt.Fprintln(wr, ".SH NAME")
+ fmt.Fprintf(wr, "%s \\- %s\n", manQuote(p.Name), manQuote(p.ShortDescription))
+ fmt.Fprintln(wr, ".SH SYNOPSIS")
+
+ usage := p.Usage
+
+ if len(usage) == 0 {
+ usage = "[OPTIONS]"
+ }
+
+ fmt.Fprintf(wr, "\\fB%s\\fP %s\n", manQuote(p.Name), manQuote(usage))
+ fmt.Fprintln(wr, ".SH DESCRIPTION")
+
+ formatForMan(wr, p.LongDescription)
+ fmt.Fprintln(wr, "")
+
+ fmt.Fprintln(wr, ".SH OPTIONS")
+
+ writeManPageOptions(wr, p.Command.Group)
+
+ if len(p.visibleCommands()) > 0 {
+ fmt.Fprintln(wr, ".SH COMMANDS")
+
+ writeManPageSubcommands(wr, "", p.Command)
+ }
+}
diff --git a/src/mongo/gotools/vendor/src/github.com/jessevdk/go-flags/marshal_test.go b/src/mongo/gotools/vendor/src/github.com/jessevdk/go-flags/marshal_test.go
new file mode 100644
index 00000000000..59c9ccefb96
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/jessevdk/go-flags/marshal_test.go
@@ -0,0 +1,97 @@
+package flags
+
+import (
+ "fmt"
+ "testing"
+)
+
+type marshalled bool
+
+func (m *marshalled) UnmarshalFlag(value string) error {
+ if value == "yes" {
+ *m = true
+ } else if value == "no" {
+ *m = false
+ } else {
+ return fmt.Errorf("`%s' is not a valid value, please specify `yes' or `no'", value)
+ }
+
+ return nil
+}
+
+func (m marshalled) MarshalFlag() (string, error) {
+ if m {
+ return "yes", nil
+ }
+
+ return "no", nil
+}
+
+type marshalledError bool
+
+func (m marshalledError) MarshalFlag() (string, error) {
+ return "", newErrorf(ErrMarshal, "Failed to marshal")
+}
+
+func TestUnmarshal(t *testing.T) {
+ var opts = struct {
+ Value marshalled `short:"v"`
+ }{}
+
+ ret := assertParseSuccess(t, &opts, "-v=yes")
+
+ assertStringArray(t, ret, []string{})
+
+ if !opts.Value {
+ t.Errorf("Expected Value to be true")
+ }
+}
+
+func TestUnmarshalDefault(t *testing.T) {
+ var opts = struct {
+ Value marshalled `short:"v" default:"yes"`
+ }{}
+
+ ret := assertParseSuccess(t, &opts)
+
+ assertStringArray(t, ret, []string{})
+
+ if !opts.Value {
+ t.Errorf("Expected Value to be true")
+ }
+}
+
+func TestUnmarshalOptional(t *testing.T) {
+ var opts = struct {
+ Value marshalled `short:"v" optional:"yes" optional-value:"yes"`
+ }{}
+
+ ret := assertParseSuccess(t, &opts, "-v")
+
+ assertStringArray(t, ret, []string{})
+
+ if !opts.Value {
+ t.Errorf("Expected Value to be true")
+ }
+}
+
+func TestUnmarshalError(t *testing.T) {
+ var opts = struct {
+ Value marshalled `short:"v"`
+ }{}
+
+ assertParseFail(t, ErrMarshal, fmt.Sprintf("invalid argument for flag `%cv' (expected flags.marshalled): `invalid' is not a valid value, please specify `yes' or `no'", defaultShortOptDelimiter), &opts, "-vinvalid")
+}
+
+func TestMarshalError(t *testing.T) {
+ var opts = struct {
+ Value marshalledError `short:"v"`
+ }{}
+
+ p := NewParser(&opts, Default)
+ o := p.Command.Groups()[0].Options()[0]
+
+ _, err := convertToString(o.value, o.tag)
+
+ assertError(t, err, ErrMarshal, "Failed to marshal")
+}
diff --git a/src/mongo/gotools/vendor/src/github.com/jessevdk/go-flags/multitag.go b/src/mongo/gotools/vendor/src/github.com/jessevdk/go-flags/multitag.go
new file mode 100644
index 00000000000..96bb1a31dee
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/jessevdk/go-flags/multitag.go
@@ -0,0 +1,140 @@
+package flags
+
+import (
+ "strconv"
+)
+
+type multiTag struct {
+ value string
+ cache map[string][]string
+}
+
+func newMultiTag(v string) multiTag {
+ return multiTag{
+ value: v,
+ }
+}
+
+func (x *multiTag) scan() (map[string][]string, error) {
+ v := x.value
+
+ ret := make(map[string][]string)
+
+ // This is mostly copied from reflect.StructTag.Get
+ for v != "" {
+ i := 0
+
+ // Skip whitespace
+ for i < len(v) && v[i] == ' ' {
+ i++
+ }
+
+ v = v[i:]
+
+ if v == "" {
+ break
+ }
+
+ // Scan to colon to find key
+ i = 0
+
+ for i < len(v) && v[i] != ' ' && v[i] != ':' && v[i] != '"' {
+ i++
+ }
+
+ if i >= len(v) {
+ return nil, newErrorf(ErrTag, "expected `:' after key name, but got end of tag (in `%v`)", x.value)
+ }
+
+ if v[i] != ':' {
+ return nil, newErrorf(ErrTag, "expected `:' after key name, but got `%v' (in `%v`)", v[i], x.value)
+ }
+
+ if i+1 >= len(v) {
+ return nil, newErrorf(ErrTag, "expected `\"' to start tag value at end of tag (in `%v`)", x.value)
+ }
+
+ if v[i+1] != '"' {
+ return nil, newErrorf(ErrTag, "expected `\"' to start tag value, but got `%v' (in `%v`)", v[i+1], x.value)
+ }
+
+ name := v[:i]
+ v = v[i+1:]
+
+ // Scan quoted string to find value
+ i = 1
+
+ for i < len(v) && v[i] != '"' {
+ if v[i] == '\n' {
+ return nil, newErrorf(ErrTag, "unexpected newline in tag value `%v' (in `%v`)", name, x.value)
+ }
+
+ if v[i] == '\\' {
+ i++
+ }
+ i++
+ }
+
+ if i >= len(v) {
+ return nil, newErrorf(ErrTag, "expected end of tag value `\"' at end of tag (in `%v`)", x.value)
+ }
+
+ val, err := strconv.Unquote(v[:i+1])
+
+ if err != nil {
+ return nil, newErrorf(ErrTag, "Malformed value of tag `%v:%v` => %v (in `%v`)", name, v[:i+1], err, x.value)
+ }
+
+ v = v[i+1:]
+
+ ret[name] = append(ret[name], val)
+ }
+
+ return ret, nil
+}
+
+func (x *multiTag) Parse() error {
+ vals, err := x.scan()
+ x.cache = vals
+
+ return err
+}
+
+func (x *multiTag) cached() map[string][]string {
+ if x.cache == nil {
+ cache, _ := x.scan()
+
+ if cache == nil {
+ cache = make(map[string][]string)
+ }
+
+ x.cache = cache
+ }
+
+ return x.cache
+}
+
+func (x *multiTag) Get(key string) string {
+ c := x.cached()
+
+ if v, ok := c[key]; ok {
+ return v[len(v)-1]
+ }
+
+ return ""
+}
+
+func (x *multiTag) GetMany(key string) []string {
+ c := x.cached()
+ return c[key]
+}
+
+func (x *multiTag) Set(key string, value string) {
+ c := x.cached()
+ c[key] = []string{value}
+}
+
+func (x *multiTag) SetMany(key string, value []string) {
+ c := x.cached()
+ c[key] = value
+}
diff --git a/src/mongo/gotools/vendor/src/github.com/jessevdk/go-flags/option.go b/src/mongo/gotools/vendor/src/github.com/jessevdk/go-flags/option.go
new file mode 100644
index 00000000000..a7f4f9a9adc
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/jessevdk/go-flags/option.go
@@ -0,0 +1,414 @@
+package flags
+
+import (
+ "fmt"
+ "reflect"
+ "strings"
+ "syscall"
+ "unicode/utf8"
+)
+
+// Option flag information. Contains a description of the option, short and
+// long name as well as a default value and whether an argument for this
+// flag is optional.
+type Option struct {
+ // The description of the option flag. This description is shown
+ // automatically in the built-in help.
+ Description string
+
+ // The short name of the option (a single character). If not 0, the
+ // option flag can be 'activated' using -<ShortName>. Either ShortName
+ // or LongName needs to be non-empty.
+ ShortName rune
+
+ // The long name of the option. If not "", the option flag can be
+ // activated using --<LongName>. Either ShortName or LongName needs
+ // to be non-empty.
+ LongName string
+
+ // The default value of the option.
+ Default []string
+
+ // The optional environment default value key name.
+ EnvDefaultKey string
+
+ // The optional delimiter string for EnvDefaultKey values.
+ EnvDefaultDelim string
+
+ // If true, specifies that the argument to an option flag is optional.
+ // When no argument to the flag is specified on the command line, the
+ // value of OptionalValue will be set in the field this option represents.
+ // This is only valid for non-boolean options.
+ OptionalArgument bool
+
+ // The optional value of the option. The optional value is used when
+ // the option flag is marked as having an OptionalArgument. This means
+ // that when the flag is specified, but no option argument is given,
+ // the value of the field this option represents will be set to
+ // OptionalValue. This is only valid for non-boolean options.
+ OptionalValue []string
+
+ // If true, the option _must_ be specified on the command line. If the
+ // option is not specified, the parser will generate an ErrRequired type
+ // error.
+ Required bool
+
+ // A name for the value of an option shown in the Help as --flag [ValueName]
+ ValueName string
+
+ // A mask value to show in the help instead of the default value. This
+ // is useful for hiding sensitive information in the help, such as
+ // passwords.
+ DefaultMask string
+
+ // If non empty, only a certain set of values is allowed for an option.
+ Choices []string
+
+ // If true, the option is not displayed in the help or man page
+ Hidden bool
+
+ // The group which the option belongs to
+ group *Group
+
+ // The struct field which the option represents.
+ field reflect.StructField
+
+ // The struct field value which the option represents.
+ value reflect.Value
+
+ // Determines if the option will be always quoted in the INI output
+ iniQuote bool
+
+ tag multiTag
+ isSet bool
+ preventDefault bool
+
+ defaultLiteral string
+}
+
+// LongNameWithNamespace returns the option's long name with the group namespaces
+// prepended by walking up the option's group tree. Namespaces and the long name
+// itself are separated by the parser's namespace delimiter. If the long name is
+// empty an empty string is returned.
+func (option *Option) LongNameWithNamespace() string {
+ if len(option.LongName) == 0 {
+ return ""
+ }
+
+ // fetch the namespace delimiter from the parser which is always at the
+ // end of the group hierarchy
+ namespaceDelimiter := ""
+ g := option.group
+
+ for {
+ if p, ok := g.parent.(*Parser); ok {
+ namespaceDelimiter = p.NamespaceDelimiter
+
+ break
+ }
+
+ switch i := g.parent.(type) {
+ case *Command:
+ g = i.Group
+ case *Group:
+ g = i
+ }
+ }
+
+ // concatenate long name with namespace
+ longName := option.LongName
+ g = option.group
+
+ for g != nil {
+ if g.Namespace != "" {
+ longName = g.Namespace + namespaceDelimiter + longName
+ }
+
+ switch i := g.parent.(type) {
+ case *Command:
+ g = i.Group
+ case *Group:
+ g = i
+ case *Parser:
+ g = nil
+ }
+ }
+
+ return longName
+}
+
+// String converts an option to a human friendly readable string describing the
+// option.
+func (option *Option) String() string {
+ var s string
+ var short string
+
+ if option.ShortName != 0 {
+ data := make([]byte, utf8.RuneLen(option.ShortName))
+ utf8.EncodeRune(data, option.ShortName)
+ short = string(data)
+
+ if len(option.LongName) != 0 {
+ s = fmt.Sprintf("%s%s, %s%s",
+ string(defaultShortOptDelimiter), short,
+ defaultLongOptDelimiter, option.LongNameWithNamespace())
+ } else {
+ s = fmt.Sprintf("%s%s", string(defaultShortOptDelimiter), short)
+ }
+ } else if len(option.LongName) != 0 {
+ s = fmt.Sprintf("%s%s", defaultLongOptDelimiter, option.LongNameWithNamespace())
+ }
+
+ return s
+}
+
+// Value returns the option value as an interface{}.
+func (option *Option) Value() interface{} {
+ return option.value.Interface()
+}
+
+// IsSet returns true if option has been set
+func (option *Option) IsSet() bool {
+ return option.isSet
+}
+
+// Set the value of an option to the specified value. An error will be returned
+// if the specified value could not be converted to the corresponding option
+// value type.
+func (option *Option) set(value *string) error {
+ kind := option.value.Type().Kind()
+
+ if (kind == reflect.Map || kind == reflect.Slice) && !option.isSet {
+ option.empty()
+ }
+
+ option.isSet = true
+ option.preventDefault = true
+
+ if len(option.Choices) != 0 {
+ found := false
+
+ for _, choice := range option.Choices {
+ if choice == *value {
+ found = true
+ break
+ }
+ }
+
+ if !found {
+ allowed := strings.Join(option.Choices[0:len(option.Choices)-1], ", ")
+
+ if len(option.Choices) > 1 {
+ allowed += " or " + option.Choices[len(option.Choices)-1]
+ }
+
+ return newErrorf(ErrInvalidChoice,
+ "Invalid value `%s' for option `%s'. Allowed values are: %s",
+ *value, option, allowed)
+ }
+ }
+
+ if option.isFunc() {
+ return option.call(value)
+ } else if value != nil {
+ return convert(*value, option.value, option.tag)
+ }
+
+ return convert("", option.value, option.tag)
+}
+
+func (option *Option) canCli() bool {
+ return option.ShortName != 0 || len(option.LongName) != 0
+}
+
+func (option *Option) canArgument() bool {
+ if u := option.isUnmarshaler(); u != nil {
+ return true
+ }
+
+ return !option.isBool()
+}
+
+func (option *Option) emptyValue() reflect.Value {
+ tp := option.value.Type()
+
+ if tp.Kind() == reflect.Map {
+ return reflect.MakeMap(tp)
+ }
+
+ return reflect.Zero(tp)
+}
+
+func (option *Option) empty() {
+ if !option.isFunc() {
+ option.value.Set(option.emptyValue())
+ }
+}
+
+func (option *Option) clearDefault() {
+ usedDefault := option.Default
+
+ if envKey := option.EnvDefaultKey; envKey != "" {
+ // os.Getenv() makes no distinction between undefined and
+ // empty values, so we use syscall.Getenv()
+ if value, ok := syscall.Getenv(envKey); ok {
+ if option.EnvDefaultDelim != "" {
+ usedDefault = strings.Split(value,
+ option.EnvDefaultDelim)
+ } else {
+ usedDefault = []string{value}
+ }
+ }
+ }
+
+ if len(usedDefault) > 0 {
+ option.empty()
+
+ for _, d := range usedDefault {
+ option.set(&d)
+ }
+ } else {
+ tp := option.value.Type()
+
+ switch tp.Kind() {
+ case reflect.Map:
+ if option.value.IsNil() {
+ option.empty()
+ }
+ case reflect.Slice:
+ if option.value.IsNil() {
+ option.empty()
+ }
+ }
+ }
+}
+
+func (option *Option) valueIsDefault() bool {
+ // Check if the value of the option corresponds to its
+ // default value
+ emptyval := option.emptyValue()
+
+ checkvalptr := reflect.New(emptyval.Type())
+ checkval := reflect.Indirect(checkvalptr)
+
+ checkval.Set(emptyval)
+
+ if len(option.Default) != 0 {
+ for _, v := range option.Default {
+ convert(v, checkval, option.tag)
+ }
+ }
+
+ return reflect.DeepEqual(option.value.Interface(), checkval.Interface())
+}
+
+func (option *Option) isUnmarshaler() Unmarshaler {
+ v := option.value
+
+ for {
+ if !v.CanInterface() {
+ break
+ }
+
+ i := v.Interface()
+
+ if u, ok := i.(Unmarshaler); ok {
+ return u
+ }
+
+ if !v.CanAddr() {
+ break
+ }
+
+ v = v.Addr()
+ }
+
+ return nil
+}
+
+func (option *Option) isBool() bool {
+ tp := option.value.Type()
+
+ for {
+ switch tp.Kind() {
+ case reflect.Bool:
+ return true
+ case reflect.Slice:
+ return (tp.Elem().Kind() == reflect.Bool)
+ case reflect.Func:
+ return tp.NumIn() == 0
+ case reflect.Ptr:
+ tp = tp.Elem()
+ default:
+ return false
+ }
+ }
+}
+
+func (option *Option) isFunc() bool {
+ return option.value.Type().Kind() == reflect.Func
+}
+
+func (option *Option) call(value *string) error {
+ var retval []reflect.Value
+
+ if value == nil {
+ retval = option.value.Call(nil)
+ } else {
+ tp := option.value.Type().In(0)
+
+ val := reflect.New(tp)
+ val = reflect.Indirect(val)
+
+ if err := convert(*value, val, option.tag); err != nil {
+ return err
+ }
+
+ retval = option.value.Call([]reflect.Value{val})
+ }
+
+ if len(retval) == 1 && retval[0].Type() == reflect.TypeOf((*error)(nil)).Elem() {
+ if retval[0].Interface() == nil {
+ return nil
+ }
+
+ return retval[0].Interface().(error)
+ }
+
+ return nil
+}
+
+func (option *Option) updateDefaultLiteral() {
+ defs := option.Default
+ def := ""
+
+ if len(defs) == 0 && option.canArgument() {
+ var showdef bool
+
+ switch option.field.Type.Kind() {
+ case reflect.Func, reflect.Ptr:
+ showdef = !option.value.IsNil()
+ case reflect.Slice, reflect.String, reflect.Array:
+ showdef = option.value.Len() > 0
+ case reflect.Map:
+ showdef = !option.value.IsNil() && option.value.Len() > 0
+ default:
+ zeroval := reflect.Zero(option.field.Type)
+ showdef = !reflect.DeepEqual(zeroval.Interface(), option.value.Interface())
+ }
+
+ if showdef {
+ def, _ = convertToString(option.value, option.tag)
+ }
+ } else if len(defs) != 0 {
+ l := len(defs) - 1
+
+ for i := 0; i < l; i++ {
+ def += quoteIfNeeded(defs[i]) + ", "
+ }
+
+ def += quoteIfNeeded(defs[l])
+ }
+
+ option.defaultLiteral = def
+}
diff --git a/src/mongo/gotools/vendor/src/github.com/jessevdk/go-flags/options_test.go b/src/mongo/gotools/vendor/src/github.com/jessevdk/go-flags/options_test.go
new file mode 100644
index 00000000000..b0fe9f4565c
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/jessevdk/go-flags/options_test.go
@@ -0,0 +1,45 @@
+package flags
+
+import (
+ "testing"
+)
+
+func TestPassDoubleDash(t *testing.T) {
+ var opts = struct {
+ Value bool `short:"v"`
+ }{}
+
+ p := NewParser(&opts, PassDoubleDash)
+ ret, err := p.ParseArgs([]string{"-v", "--", "-v", "-g"})
+
+ if err != nil {
+ t.Fatalf("Unexpected error: %v", err)
+ return
+ }
+
+ if !opts.Value {
+ t.Errorf("Expected Value to be true")
+ }
+
+ assertStringArray(t, ret, []string{"-v", "-g"})
+}
+
+func TestPassAfterNonOption(t *testing.T) {
+ var opts = struct {
+ Value bool `short:"v"`
+ }{}
+
+ p := NewParser(&opts, PassAfterNonOption)
+ ret, err := p.ParseArgs([]string{"-v", "arg", "-v", "-g"})
+
+ if err != nil {
+ t.Fatalf("Unexpected error: %v", err)
+ return
+ }
+
+ if !opts.Value {
+ t.Errorf("Expected Value to be true")
+ }
+
+ assertStringArray(t, ret, []string{"arg", "-v", "-g"})
+}
diff --git a/src/mongo/gotools/vendor/src/github.com/jessevdk/go-flags/optstyle_other.go b/src/mongo/gotools/vendor/src/github.com/jessevdk/go-flags/optstyle_other.go
new file mode 100644
index 00000000000..29ca4b6063b
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/jessevdk/go-flags/optstyle_other.go
@@ -0,0 +1,67 @@
+// +build !windows
+
+package flags
+
+import (
+ "strings"
+)
+
+const (
+ defaultShortOptDelimiter = '-'
+ defaultLongOptDelimiter = "--"
+ defaultNameArgDelimiter = '='
+)
+
+func argumentStartsOption(arg string) bool {
+ return len(arg) > 0 && arg[0] == '-'
+}
+
+func argumentIsOption(arg string) bool {
+ if len(arg) > 1 && arg[0] == '-' && arg[1] != '-' {
+ return true
+ }
+
+ if len(arg) > 2 && arg[0] == '-' && arg[1] == '-' && arg[2] != '-' {
+ return true
+ }
+
+ return false
+}
+
+// stripOptionPrefix returns the option without the prefix and whether or
+// not the option is a long option or not.
+func stripOptionPrefix(optname string) (prefix string, name string, islong bool) {
+ if strings.HasPrefix(optname, "--") {
+ return "--", optname[2:], true
+ } else if strings.HasPrefix(optname, "-") {
+ return "-", optname[1:], false
+ }
+
+ return "", optname, false
+}
+
+// splitOption attempts to split the passed option into a name and an argument.
+// When there is no argument specified, nil will be returned for it.
+func splitOption(prefix string, option string, islong bool) (string, string, *string) {
+ pos := strings.Index(option, "=")
+
+ if (islong && pos >= 0) || (!islong && pos == 1) {
+ rest := option[pos+1:]
+ return option[:pos], "=", &rest
+ }
+
+ return option, "", nil
+}
+
+// addHelpGroup adds a new group that contains default help parameters.
+func (c *Command) addHelpGroup(showHelp func() error) *Group {
+ var help struct {
+ ShowHelp func() error `short:"h" long:"help" description:"Show this help message"`
+ }
+
+ help.ShowHelp = showHelp
+ ret, _ := c.AddGroup("Help Options", "", &help)
+ ret.isBuiltinHelp = true
+
+ return ret
+}
diff --git a/src/mongo/gotools/vendor/src/github.com/jessevdk/go-flags/optstyle_windows.go b/src/mongo/gotools/vendor/src/github.com/jessevdk/go-flags/optstyle_windows.go
new file mode 100644
index 00000000000..a51de9cb29c
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/jessevdk/go-flags/optstyle_windows.go
@@ -0,0 +1,106 @@
+package flags
+
+import (
+ "strings"
+)
+
+// Windows uses a front slash for both short and long options. Also it uses
+// a colon for name/argument delimter.
+const (
+ defaultShortOptDelimiter = '/'
+ defaultLongOptDelimiter = "/"
+ defaultNameArgDelimiter = ':'
+)
+
+func argumentStartsOption(arg string) bool {
+ return len(arg) > 0 && (arg[0] == '-' || arg[0] == '/')
+}
+
+func argumentIsOption(arg string) bool {
+ // Windows-style options allow front slash for the option
+ // delimiter.
+ if len(arg) > 1 && arg[0] == '/' {
+ return true
+ }
+
+ if len(arg) > 1 && arg[0] == '-' && arg[1] != '-' {
+ return true
+ }
+
+ if len(arg) > 2 && arg[0] == '-' && arg[1] == '-' && arg[2] != '-' {
+ return true
+ }
+
+ return false
+}
+
+// stripOptionPrefix returns the option without the prefix and whether or
+// not the option is a long option or not.
+func stripOptionPrefix(optname string) (prefix string, name string, islong bool) {
+ // Determine if the argument is a long option or not. Windows
+ // typically supports both long and short options with a single
+ // front slash as the option delimiter, so handle this situation
+ // nicely.
+ possplit := 0
+
+ if strings.HasPrefix(optname, "--") {
+ possplit = 2
+ islong = true
+ } else if strings.HasPrefix(optname, "-") {
+ possplit = 1
+ islong = false
+ } else if strings.HasPrefix(optname, "/") {
+ possplit = 1
+ islong = len(optname) > 2
+ }
+
+ return optname[:possplit], optname[possplit:], islong
+}
+
+// splitOption attempts to split the passed option into a name and an argument.
+// When there is no argument specified, nil will be returned for it.
+func splitOption(prefix string, option string, islong bool) (string, string, *string) {
+ if len(option) == 0 {
+ return option, "", nil
+ }
+
+ // Windows typically uses a colon for the option name and argument
+ // delimiter while POSIX typically uses an equals. Support both styles,
+ // but don't allow the two to be mixed. That is to say /foo:bar and
+ // --foo=bar are acceptable, but /foo=bar and --foo:bar are not.
+ var pos int
+ var sp string
+
+ if prefix == "/" {
+ sp = ":"
+ pos = strings.Index(option, sp)
+ } else if len(prefix) > 0 {
+ sp = "="
+ pos = strings.Index(option, sp)
+ }
+
+ if (islong && pos >= 0) || (!islong && pos == 1) {
+ rest := option[pos+1:]
+ return option[:pos], sp, &rest
+ }
+
+ return option, "", nil
+}
+
+// addHelpGroup adds a new group that contains default help parameters.
+func (c *Command) addHelpGroup(showHelp func() error) *Group {
+ // Windows CLI applications typically use /? for help, so make both
+ // that available as well as the POSIX style h and help.
+ var help struct {
+ ShowHelpWindows func() error `short:"?" description:"Show this help message"`
+ ShowHelpPosix func() error `short:"h" long:"help" description:"Show this help message"`
+ }
+
+ help.ShowHelpWindows = showHelp
+ help.ShowHelpPosix = showHelp
+
+ ret, _ := c.AddGroup("Help Options", "", &help)
+ ret.isBuiltinHelp = true
+
+ return ret
+}
diff --git a/src/mongo/gotools/vendor/src/github.com/jessevdk/go-flags/parser.go b/src/mongo/gotools/vendor/src/github.com/jessevdk/go-flags/parser.go
new file mode 100644
index 00000000000..f9e07ee10e3
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/jessevdk/go-flags/parser.go
@@ -0,0 +1,652 @@
+// Copyright 2012 Jesse van den Kieboom. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package flags
+
+import (
+ "bytes"
+ "fmt"
+ "os"
+ "path"
+ "sort"
+ "strings"
+ "unicode/utf8"
+)
+
+// A Parser provides command line option parsing. It can contain several
+// option groups each with their own set of options.
+type Parser struct {
+ // Embedded, see Command for more information
+ *Command
+
+ // A usage string to be displayed in the help message.
+ Usage string
+
+ // Option flags changing the behavior of the parser.
+ Options Options
+
+ // NamespaceDelimiter separates group namespaces and option long names
+ NamespaceDelimiter string
+
+ // UnknownOptionsHandler is a function which gets called when the parser
+ // encounters an unknown option. The function receives the unknown option
+ // name, a SplitArgument which specifies its value if set with an argument
+ // separator, and the remaining command line arguments.
+ // It should return a new list of remaining arguments to continue parsing,
+ // or an error to indicate a parse failure.
+ UnknownOptionHandler func(option string, arg SplitArgument, args []string) ([]string, error)
+
+ // CompletionHandler is a function gets called to handle the completion of
+ // items. By default, the items are printed and the application is exited.
+ // You can override this default behavior by specifying a custom CompletionHandler.
+ CompletionHandler func(items []Completion)
+
+ internalError error
+}
+
+// SplitArgument represents the argument value of an option that was passed using
+// an argument separator.
+type SplitArgument interface {
+ // String returns the option's value as a string, and a boolean indicating
+ // if the option was present.
+ Value() (string, bool)
+}
+
+type strArgument struct {
+ value *string
+}
+
+func (s strArgument) Value() (string, bool) {
+ if s.value == nil {
+ return "", false
+ }
+
+ return *s.value, true
+}
+
+// Options provides parser options that change the behavior of the option
+// parser.
+type Options uint
+
+const (
+ // None indicates no options.
+ None Options = 0
+
+ // HelpFlag adds a default Help Options group to the parser containing
+ // -h and --help options. When either -h or --help is specified on the
+ // command line, the parser will return the special error of type
+ // ErrHelp. When PrintErrors is also specified, then the help message
+ // will also be automatically printed to os.Stderr.
+ HelpFlag = 1 << iota
+
+ // PassDoubleDash passes all arguments after a double dash, --, as
+ // remaining command line arguments (i.e. they will not be parsed for
+ // flags).
+ PassDoubleDash
+
+ // IgnoreUnknown ignores any unknown options and passes them as
+ // remaining command line arguments instead of generating an error.
+ IgnoreUnknown
+
+ // PrintErrors prints any errors which occurred during parsing to
+ // os.Stderr.
+ PrintErrors
+
+ // PassAfterNonOption passes all arguments after the first non option
+ // as remaining command line arguments. This is equivalent to strict
+ // POSIX processing.
+ PassAfterNonOption
+
+ // Default is a convenient default set of options which should cover
+ // most of the uses of the flags package.
+ Default = HelpFlag | PrintErrors | PassDoubleDash
+)
+
+type parseState struct {
+ arg string
+ args []string
+ retargs []string
+ positional []*Arg
+ err error
+
+ command *Command
+ lookup lookup
+}
+
+// Parse is a convenience function to parse command line options with default
+// settings. The provided data is a pointer to a struct representing the
+// default option group (named "Application Options"). For more control, use
+// flags.NewParser.
+func Parse(data interface{}) ([]string, error) {
+ return NewParser(data, Default).Parse()
+}
+
+// ParseArgs is a convenience function to parse command line options with default
+// settings. The provided data is a pointer to a struct representing the
+// default option group (named "Application Options"). The args argument is
+// the list of command line arguments to parse. If you just want to parse the
+// default program command line arguments (i.e. os.Args), then use flags.Parse
+// instead. For more control, use flags.NewParser.
+func ParseArgs(data interface{}, args []string) ([]string, error) {
+ return NewParser(data, Default).ParseArgs(args)
+}
+
+// NewParser creates a new parser. It uses os.Args[0] as the application
+// name and then calls Parser.NewNamedParser (see Parser.NewNamedParser for
+// more details). The provided data is a pointer to a struct representing the
+// default option group (named "Application Options"), or nil if the default
+// group should not be added. The options parameter specifies a set of options
+// for the parser.
+func NewParser(data interface{}, options Options) *Parser {
+ p := NewNamedParser(path.Base(os.Args[0]), options)
+
+ if data != nil {
+ g, err := p.AddGroup("Application Options", "", data)
+
+ if err == nil {
+ g.parent = p
+ }
+
+ p.internalError = err
+ }
+
+ return p
+}
+
+// NewNamedParser creates a new parser. The appname is used to display the
+// executable name in the built-in help message. Option groups and commands can
+// be added to this parser by using AddGroup and AddCommand.
+func NewNamedParser(appname string, options Options) *Parser {
+ p := &Parser{
+ Command: newCommand(appname, "", "", nil),
+ Options: options,
+ NamespaceDelimiter: ".",
+ }
+
+ p.Command.parent = p
+
+ return p
+}
+
+// Parse parses the command line arguments from os.Args using Parser.ParseArgs.
+// For more detailed information see ParseArgs.
+func (p *Parser) Parse() ([]string, error) {
+ return p.ParseArgs(os.Args[1:])
+}
+
+// ParseArgs parses the command line arguments according to the option groups that
+// were added to the parser. On successful parsing of the arguments, the
+// remaining, non-option, arguments (if any) are returned. The returned error
+// indicates a parsing error and can be used with PrintError to display
+// contextual information on where the error occurred exactly.
+//
+// When the common help group has been added (AddHelp) and either -h or --help
+// was specified in the command line arguments, a help message will be
+// automatically printed if the PrintErrors option is enabled.
+// Furthermore, the special error type ErrHelp is returned.
+// It is up to the caller to exit the program if so desired.
+func (p *Parser) ParseArgs(args []string) ([]string, error) {
+ if p.internalError != nil {
+ return nil, p.internalError
+ }
+
+ p.eachOption(func(c *Command, g *Group, option *Option) {
+ option.isSet = false
+ option.updateDefaultLiteral()
+ })
+
+ // Add built-in help group to all commands if necessary
+ if (p.Options & HelpFlag) != None {
+ p.addHelpGroups(p.showBuiltinHelp)
+ }
+
+ compval := os.Getenv("GO_FLAGS_COMPLETION")
+
+ if len(compval) != 0 {
+ comp := &completion{parser: p}
+ items := comp.complete(args)
+
+ if p.CompletionHandler != nil {
+ p.CompletionHandler(items)
+ } else {
+ comp.print(items, compval == "verbose")
+ os.Exit(0)
+ }
+
+ return nil, nil
+ }
+
+ s := &parseState{
+ args: args,
+ retargs: make([]string, 0, len(args)),
+ }
+
+ p.fillParseState(s)
+
+ for !s.eof() {
+ arg := s.pop()
+
+ // When PassDoubleDash is set and we encounter a --, then
+ // simply append all the rest as arguments and break out
+ if (p.Options&PassDoubleDash) != None && arg == "--" {
+ s.addArgs(s.args...)
+ break
+ }
+
+ if !argumentIsOption(arg) {
+ // Note: this also sets s.err, so we can just check for
+ // nil here and use s.err later
+ if p.parseNonOption(s) != nil {
+ break
+ }
+
+ continue
+ }
+
+ var err error
+
+ prefix, optname, islong := stripOptionPrefix(arg)
+ optname, _, argument := splitOption(prefix, optname, islong)
+
+ if islong {
+ err = p.parseLong(s, optname, argument)
+ } else {
+ err = p.parseShort(s, optname, argument)
+ }
+
+ if err != nil {
+ ignoreUnknown := (p.Options & IgnoreUnknown) != None
+ parseErr := wrapError(err)
+
+ if parseErr.Type != ErrUnknownFlag || (!ignoreUnknown && p.UnknownOptionHandler == nil) {
+ s.err = parseErr
+ break
+ }
+
+ if ignoreUnknown {
+ s.addArgs(arg)
+ } else if p.UnknownOptionHandler != nil {
+ modifiedArgs, err := p.UnknownOptionHandler(optname, strArgument{argument}, s.args)
+
+ if err != nil {
+ s.err = err
+ break
+ }
+
+ s.args = modifiedArgs
+ }
+ }
+ }
+
+ if s.err == nil {
+ p.eachOption(func(c *Command, g *Group, option *Option) {
+ if option.preventDefault {
+ return
+ }
+
+ option.clearDefault()
+ })
+
+ s.checkRequired(p)
+ }
+
+ var reterr error
+
+ if s.err != nil {
+ reterr = s.err
+ } else if len(s.command.commands) != 0 && !s.command.SubcommandsOptional {
+ reterr = s.estimateCommand()
+ } else if cmd, ok := s.command.data.(Commander); ok {
+ reterr = cmd.Execute(s.retargs)
+ }
+
+ if reterr != nil {
+ var retargs []string
+
+ if ourErr, ok := reterr.(*Error); !ok || ourErr.Type != ErrHelp {
+ retargs = append([]string{s.arg}, s.args...)
+ } else {
+ retargs = s.args
+ }
+
+ return retargs, p.printError(reterr)
+ }
+
+ return s.retargs, nil
+}
+
+func (p *parseState) eof() bool {
+ return len(p.args) == 0
+}
+
+func (p *parseState) pop() string {
+ if p.eof() {
+ return ""
+ }
+
+ p.arg = p.args[0]
+ p.args = p.args[1:]
+
+ return p.arg
+}
+
+func (p *parseState) peek() string {
+ if p.eof() {
+ return ""
+ }
+
+ return p.args[0]
+}
+
+func (p *parseState) checkRequired(parser *Parser) error {
+ c := parser.Command
+
+ var required []*Option
+
+ for c != nil {
+ c.eachGroup(func(g *Group) {
+ for _, option := range g.options {
+ if !option.isSet && option.Required {
+ required = append(required, option)
+ }
+ }
+ })
+
+ c = c.Active
+ }
+
+ if len(required) == 0 {
+ if len(p.positional) > 0 {
+ var reqnames []string
+
+ for _, arg := range p.positional {
+ argRequired := (!arg.isRemaining() && p.command.ArgsRequired) || arg.Required != 0
+
+ if !argRequired {
+ continue
+ }
+
+ if arg.isRemaining() {
+ if arg.value.Len() < arg.Required {
+ var arguments string
+
+ if arg.Required > 1 {
+ arguments = "arguments, but got only " + fmt.Sprintf("%d", arg.value.Len())
+ } else {
+ arguments = "argument"
+ }
+
+ reqnames = append(reqnames, "`"+arg.Name+" (at least "+fmt.Sprintf("%d", arg.Required)+" "+arguments+")`")
+ }
+ } else {
+ reqnames = append(reqnames, "`"+arg.Name+"`")
+ }
+ }
+
+ if len(reqnames) == 0 {
+ return nil
+ }
+
+ var msg string
+
+ if len(reqnames) == 1 {
+ msg = fmt.Sprintf("the required argument %s was not provided", reqnames[0])
+ } else {
+ msg = fmt.Sprintf("the required arguments %s and %s were not provided",
+ strings.Join(reqnames[:len(reqnames)-1], ", "), reqnames[len(reqnames)-1])
+ }
+
+ p.err = newError(ErrRequired, msg)
+ return p.err
+ }
+
+ return nil
+ }
+
+ names := make([]string, 0, len(required))
+
+ for _, k := range required {
+ names = append(names, "`"+k.String()+"'")
+ }
+
+ sort.Strings(names)
+
+ var msg string
+
+ if len(names) == 1 {
+ msg = fmt.Sprintf("the required flag %s was not specified", names[0])
+ } else {
+ msg = fmt.Sprintf("the required flags %s and %s were not specified",
+ strings.Join(names[:len(names)-1], ", "), names[len(names)-1])
+ }
+
+ p.err = newError(ErrRequired, msg)
+ return p.err
+}
+
+func (p *parseState) estimateCommand() error {
+ commands := p.command.sortedVisibleCommands()
+ cmdnames := make([]string, len(commands))
+
+ for i, v := range commands {
+ cmdnames[i] = v.Name
+ }
+
+ var msg string
+ var errtype ErrorType
+
+ if len(p.retargs) != 0 {
+ c, l := closestChoice(p.retargs[0], cmdnames)
+ msg = fmt.Sprintf("Unknown command `%s'", p.retargs[0])
+ errtype = ErrUnknownCommand
+
+ if float32(l)/float32(len(c)) < 0.5 {
+ msg = fmt.Sprintf("%s, did you mean `%s'?", msg, c)
+ } else if len(cmdnames) == 1 {
+ msg = fmt.Sprintf("%s. You should use the %s command",
+ msg,
+ cmdnames[0])
+ } else {
+ msg = fmt.Sprintf("%s. Please specify one command of: %s or %s",
+ msg,
+ strings.Join(cmdnames[:len(cmdnames)-1], ", "),
+ cmdnames[len(cmdnames)-1])
+ }
+ } else {
+ errtype = ErrCommandRequired
+
+ if len(cmdnames) == 1 {
+ msg = fmt.Sprintf("Please specify the %s command", cmdnames[0])
+ } else {
+ msg = fmt.Sprintf("Please specify one command of: %s or %s",
+ strings.Join(cmdnames[:len(cmdnames)-1], ", "),
+ cmdnames[len(cmdnames)-1])
+ }
+ }
+
+ return newError(errtype, msg)
+}
+
+func (p *Parser) parseOption(s *parseState, name string, option *Option, canarg bool, argument *string) (err error) {
+ if !option.canArgument() {
+ if argument != nil {
+ return newErrorf(ErrNoArgumentForBool, "bool flag `%s' cannot have an argument", option)
+ }
+
+ err = option.set(nil)
+ } else if argument != nil || (canarg && !s.eof()) {
+ var arg string
+
+ if argument != nil {
+ arg = *argument
+ } else {
+ arg = s.pop()
+
+ if argumentIsOption(arg) {
+ return newErrorf(ErrExpectedArgument, "expected argument for flag `%s', but got option `%s'", option, arg)
+ } else if p.Options&PassDoubleDash != 0 && arg == "--" {
+ return newErrorf(ErrExpectedArgument, "expected argument for flag `%s', but got double dash `--'", option)
+ }
+ }
+
+ if option.tag.Get("unquote") != "false" {
+ arg, err = unquoteIfPossible(arg)
+ }
+
+ if err == nil {
+ err = option.set(&arg)
+ }
+ } else if option.OptionalArgument {
+ option.empty()
+
+ for _, v := range option.OptionalValue {
+ err = option.set(&v)
+
+ if err != nil {
+ break
+ }
+ }
+ } else {
+ err = newErrorf(ErrExpectedArgument, "expected argument for flag `%s'", option)
+ }
+
+ if err != nil {
+ if _, ok := err.(*Error); !ok {
+ err = newErrorf(ErrMarshal, "invalid argument for flag `%s' (expected %s): %s",
+ option,
+ option.value.Type(),
+ err.Error())
+ }
+ }
+
+ return err
+}
+
+func (p *Parser) parseLong(s *parseState, name string, argument *string) error {
+ if option := s.lookup.longNames[name]; option != nil {
+ // Only long options that are required can consume an argument
+ // from the argument list
+ canarg := !option.OptionalArgument
+
+ return p.parseOption(s, name, option, canarg, argument)
+ }
+
+ return newErrorf(ErrUnknownFlag, "unknown flag `%s'", name)
+}
+
+func (p *Parser) splitShortConcatArg(s *parseState, optname string) (string, *string) {
+ c, n := utf8.DecodeRuneInString(optname)
+
+ if n == len(optname) {
+ return optname, nil
+ }
+
+ first := string(c)
+
+ if option := s.lookup.shortNames[first]; option != nil && option.canArgument() {
+ arg := optname[n:]
+ return first, &arg
+ }
+
+ return optname, nil
+}
+
+func (p *Parser) parseShort(s *parseState, optname string, argument *string) error {
+ if argument == nil {
+ optname, argument = p.splitShortConcatArg(s, optname)
+ }
+
+ for i, c := range optname {
+ shortname := string(c)
+
+ if option := s.lookup.shortNames[shortname]; option != nil {
+ // Only the last short argument can consume an argument from
+ // the arguments list, and only if it's non optional
+ canarg := (i+utf8.RuneLen(c) == len(optname)) && !option.OptionalArgument
+
+ if err := p.parseOption(s, shortname, option, canarg, argument); err != nil {
+ return err
+ }
+ } else {
+ return newErrorf(ErrUnknownFlag, "unknown flag `%s'", shortname)
+ }
+
+ // Only the first option can have a concatted argument, so just
+ // clear argument here
+ argument = nil
+ }
+
+ return nil
+}
+
+func (p *parseState) addArgs(args ...string) error {
+ for len(p.positional) > 0 && len(args) > 0 {
+ arg := p.positional[0]
+
+ if err := convert(args[0], arg.value, arg.tag); err != nil {
+ return err
+ }
+
+ if !arg.isRemaining() {
+ p.positional = p.positional[1:]
+ }
+
+ args = args[1:]
+ }
+
+ p.retargs = append(p.retargs, args...)
+ return nil
+}
+
+func (p *Parser) parseNonOption(s *parseState) error {
+ if len(s.positional) > 0 {
+ return s.addArgs(s.arg)
+ }
+
+ if cmd := s.lookup.commands[s.arg]; cmd != nil {
+ s.command.Active = cmd
+ cmd.fillParseState(s)
+ } else if (p.Options & PassAfterNonOption) != None {
+ // If PassAfterNonOption is set then all remaining arguments
+ // are considered positional
+ if err := s.addArgs(s.arg); err != nil {
+ return err
+ }
+
+ if err := s.addArgs(s.args...); err != nil {
+ return err
+ }
+
+ s.args = []string{}
+ } else {
+ return s.addArgs(s.arg)
+ }
+
+ return nil
+}
+
+func (p *Parser) showBuiltinHelp() error {
+ var b bytes.Buffer
+
+ p.WriteHelp(&b)
+ return newError(ErrHelp, b.String())
+}
+
+func (p *Parser) printError(err error) error {
+ if err != nil && (p.Options&PrintErrors) != None {
+ fmt.Fprintln(os.Stderr, err)
+ }
+
+ return err
+}
+
+func (p *Parser) clearIsSet() {
+ p.eachCommand(func(c *Command) {
+ c.eachGroup(func(g *Group) {
+ for _, option := range g.options {
+ option.isSet = false
+ }
+ })
+ }, true)
+}
diff --git a/src/mongo/gotools/vendor/src/github.com/jessevdk/go-flags/parser_test.go b/src/mongo/gotools/vendor/src/github.com/jessevdk/go-flags/parser_test.go
new file mode 100644
index 00000000000..b57dbee9a25
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/jessevdk/go-flags/parser_test.go
@@ -0,0 +1,487 @@
+package flags
+
+import (
+ "fmt"
+ "os"
+ "reflect"
+ "strconv"
+ "strings"
+ "testing"
+ "time"
+)
+
+type defaultOptions struct {
+ Int int `long:"i"`
+ IntDefault int `long:"id" default:"1"`
+
+ Float64 float64 `long:"f"`
+ Float64Default float64 `long:"fd" default:"-3.14"`
+
+ NumericFlag bool `short:"3" default:"false"`
+
+ String string `long:"str"`
+ StringDefault string `long:"strd" default:"abc"`
+ StringNotUnquoted string `long:"strnot" unquote:"false"`
+
+ Time time.Duration `long:"t"`
+ TimeDefault time.Duration `long:"td" default:"1m"`
+
+ Map map[string]int `long:"m"`
+ MapDefault map[string]int `long:"md" default:"a:1"`
+
+ Slice []int `long:"s"`
+ SliceDefault []int `long:"sd" default:"1" default:"2"`
+}
+
+func TestDefaults(t *testing.T) {
+ var tests = []struct {
+ msg string
+ args []string
+ expected defaultOptions
+ }{
+ {
+ msg: "no arguments, expecting default values",
+ args: []string{},
+ expected: defaultOptions{
+ Int: 0,
+ IntDefault: 1,
+
+ Float64: 0.0,
+ Float64Default: -3.14,
+
+ NumericFlag: false,
+
+ String: "",
+ StringDefault: "abc",
+
+ Time: 0,
+ TimeDefault: time.Minute,
+
+ Map: map[string]int{},
+ MapDefault: map[string]int{"a": 1},
+
+ Slice: []int{},
+ SliceDefault: []int{1, 2},
+ },
+ },
+ {
+ msg: "non-zero value arguments, expecting overwritten arguments",
+ args: []string{"--i=3", "--id=3", "--f=-2.71", "--fd=2.71", "-3", "--str=def", "--strd=def", "--t=3ms", "--td=3ms", "--m=c:3", "--md=c:3", "--s=3", "--sd=3"},
+ expected: defaultOptions{
+ Int: 3,
+ IntDefault: 3,
+
+ Float64: -2.71,
+ Float64Default: 2.71,
+
+ NumericFlag: true,
+
+ String: "def",
+ StringDefault: "def",
+
+ Time: 3 * time.Millisecond,
+ TimeDefault: 3 * time.Millisecond,
+
+ Map: map[string]int{"c": 3},
+ MapDefault: map[string]int{"c": 3},
+
+ Slice: []int{3},
+ SliceDefault: []int{3},
+ },
+ },
+ {
+ msg: "zero value arguments, expecting overwritten arguments",
+ args: []string{"--i=0", "--id=0", "--f=0", "--fd=0", "--str", "", "--strd=\"\"", "--t=0ms", "--td=0s", "--m=:0", "--md=:0", "--s=0", "--sd=0"},
+ expected: defaultOptions{
+ Int: 0,
+ IntDefault: 0,
+
+ Float64: 0,
+ Float64Default: 0,
+
+ String: "",
+ StringDefault: "",
+
+ Time: 0,
+ TimeDefault: 0,
+
+ Map: map[string]int{"": 0},
+ MapDefault: map[string]int{"": 0},
+
+ Slice: []int{0},
+ SliceDefault: []int{0},
+ },
+ },
+ }
+
+ for _, test := range tests {
+ var opts defaultOptions
+
+ _, err := ParseArgs(&opts, test.args)
+ if err != nil {
+ t.Fatalf("%s:\nUnexpected error: %v", test.msg, err)
+ }
+
+ if opts.Slice == nil {
+ opts.Slice = []int{}
+ }
+
+ if !reflect.DeepEqual(opts, test.expected) {
+ t.Errorf("%s:\nUnexpected options with arguments %+v\nexpected\n%+v\nbut got\n%+v\n", test.msg, test.args, test.expected, opts)
+ }
+ }
+}
+
+func TestUnquoting(t *testing.T) {
+ var tests = []struct {
+ arg string
+ err error
+ value string
+ }{
+ {
+ arg: "\"abc",
+ err: strconv.ErrSyntax,
+ value: "",
+ },
+ {
+ arg: "\"\"abc\"",
+ err: strconv.ErrSyntax,
+ value: "",
+ },
+ {
+ arg: "\"abc\"",
+ err: nil,
+ value: "abc",
+ },
+ {
+ arg: "\"\\\"abc\\\"\"",
+ err: nil,
+ value: "\"abc\"",
+ },
+ {
+ arg: "\"\\\"abc\"",
+ err: nil,
+ value: "\"abc",
+ },
+ }
+
+ for _, test := range tests {
+ var opts defaultOptions
+
+ for _, delimiter := range []bool{false, true} {
+ p := NewParser(&opts, None)
+
+ var err error
+ if delimiter {
+ _, err = p.ParseArgs([]string{"--str=" + test.arg, "--strnot=" + test.arg})
+ } else {
+ _, err = p.ParseArgs([]string{"--str", test.arg, "--strnot", test.arg})
+ }
+
+ if test.err == nil {
+ if err != nil {
+ t.Fatalf("Expected no error but got: %v", err)
+ }
+
+ if test.value != opts.String {
+ t.Fatalf("Expected String to be %q but got %q", test.value, opts.String)
+ }
+ if q := strconv.Quote(test.value); q != opts.StringNotUnquoted {
+ t.Fatalf("Expected StringDefault to be %q but got %q", q, opts.StringNotUnquoted)
+ }
+ } else {
+ if err == nil {
+ t.Fatalf("Expected error")
+ } else if e, ok := err.(*Error); ok {
+ if strings.HasPrefix(e.Message, test.err.Error()) {
+ t.Fatalf("Expected error message to end with %q but got %v", test.err.Error(), e.Message)
+ }
+ }
+ }
+ }
+ }
+}
+
+// envRestorer keeps a copy of a set of env variables and can restore the env from them
+type envRestorer struct {
+ env map[string]string
+}
+
+func (r *envRestorer) Restore() {
+ os.Clearenv()
+ for k, v := range r.env {
+ os.Setenv(k, v)
+ }
+}
+
+// EnvSnapshot returns a snapshot of the currently set env variables
+func EnvSnapshot() *envRestorer {
+ r := envRestorer{make(map[string]string)}
+ for _, kv := range os.Environ() {
+ parts := strings.SplitN(kv, "=", 2)
+ if len(parts) != 2 {
+ panic("got a weird env variable: " + kv)
+ }
+ r.env[parts[0]] = parts[1]
+ }
+ return &r
+}
+
+type envDefaultOptions struct {
+ Int int `long:"i" default:"1" env:"TEST_I"`
+ Time time.Duration `long:"t" default:"1m" env:"TEST_T"`
+ Map map[string]int `long:"m" default:"a:1" env:"TEST_M" env-delim:";"`
+ Slice []int `long:"s" default:"1" default:"2" env:"TEST_S" env-delim:","`
+}
+
+func TestEnvDefaults(t *testing.T) {
+ var tests = []struct {
+ msg string
+ args []string
+ expected envDefaultOptions
+ env map[string]string
+ }{
+ {
+ msg: "no arguments, no env, expecting default values",
+ args: []string{},
+ expected: envDefaultOptions{
+ Int: 1,
+ Time: time.Minute,
+ Map: map[string]int{"a": 1},
+ Slice: []int{1, 2},
+ },
+ },
+ {
+ msg: "no arguments, env defaults, expecting env default values",
+ args: []string{},
+ expected: envDefaultOptions{
+ Int: 2,
+ Time: 2 * time.Minute,
+ Map: map[string]int{"a": 2, "b": 3},
+ Slice: []int{4, 5, 6},
+ },
+ env: map[string]string{
+ "TEST_I": "2",
+ "TEST_T": "2m",
+ "TEST_M": "a:2;b:3",
+ "TEST_S": "4,5,6",
+ },
+ },
+ {
+ msg: "non-zero value arguments, expecting overwritten arguments",
+ args: []string{"--i=3", "--t=3ms", "--m=c:3", "--s=3"},
+ expected: envDefaultOptions{
+ Int: 3,
+ Time: 3 * time.Millisecond,
+ Map: map[string]int{"c": 3},
+ Slice: []int{3},
+ },
+ env: map[string]string{
+ "TEST_I": "2",
+ "TEST_T": "2m",
+ "TEST_M": "a:2;b:3",
+ "TEST_S": "4,5,6",
+ },
+ },
+ {
+ msg: "zero value arguments, expecting overwritten arguments",
+ args: []string{"--i=0", "--t=0ms", "--m=:0", "--s=0"},
+ expected: envDefaultOptions{
+ Int: 0,
+ Time: 0,
+ Map: map[string]int{"": 0},
+ Slice: []int{0},
+ },
+ env: map[string]string{
+ "TEST_I": "2",
+ "TEST_T": "2m",
+ "TEST_M": "a:2;b:3",
+ "TEST_S": "4,5,6",
+ },
+ },
+ }
+
+ oldEnv := EnvSnapshot()
+ defer oldEnv.Restore()
+
+ for _, test := range tests {
+ var opts envDefaultOptions
+ oldEnv.Restore()
+ for envKey, envValue := range test.env {
+ os.Setenv(envKey, envValue)
+ }
+ _, err := ParseArgs(&opts, test.args)
+ if err != nil {
+ t.Fatalf("%s:\nUnexpected error: %v", test.msg, err)
+ }
+
+ if opts.Slice == nil {
+ opts.Slice = []int{}
+ }
+
+ if !reflect.DeepEqual(opts, test.expected) {
+ t.Errorf("%s:\nUnexpected options with arguments %+v\nexpected\n%+v\nbut got\n%+v\n", test.msg, test.args, test.expected, opts)
+ }
+ }
+}
+
+func TestOptionAsArgument(t *testing.T) {
+ var tests = []struct {
+ args []string
+ expectError bool
+ errType ErrorType
+ errMsg string
+ rest []string
+ }{
+ {
+ // short option must not be accepted as argument
+ args: []string{"--string-slice", "foobar", "--string-slice", "-o"},
+ expectError: true,
+ errType: ErrExpectedArgument,
+ errMsg: "expected argument for flag `" + defaultLongOptDelimiter + "string-slice', but got option `-o'",
+ },
+ {
+ // long option must not be accepted as argument
+ args: []string{"--string-slice", "foobar", "--string-slice", "--other-option"},
+ expectError: true,
+ errType: ErrExpectedArgument,
+ errMsg: "expected argument for flag `" + defaultLongOptDelimiter + "string-slice', but got option `--other-option'",
+ },
+ {
+ // long option must not be accepted as argument
+ args: []string{"--string-slice", "--"},
+ expectError: true,
+ errType: ErrExpectedArgument,
+ errMsg: "expected argument for flag `" + defaultLongOptDelimiter + "string-slice', but got double dash `--'",
+ },
+ {
+ // quoted and appended option should be accepted as argument (even if it looks like an option)
+ args: []string{"--string-slice", "foobar", "--string-slice=\"--other-option\""},
+ },
+ {
+ // Accept any single character arguments including '-'
+ args: []string{"--string-slice", "-"},
+ },
+ {
+ // Do not accept arguments which start with '-' even if the next character is a digit
+ args: []string{"--string-slice", "-3.14"},
+ expectError: true,
+ errType: ErrExpectedArgument,
+ errMsg: "expected argument for flag `" + defaultLongOptDelimiter + "string-slice', but got option `-3.14'",
+ },
+ {
+ // Do not accept arguments which start with '-' if the next character is not a digit
+ args: []string{"--string-slice", "-character"},
+ expectError: true,
+ errType: ErrExpectedArgument,
+ errMsg: "expected argument for flag `" + defaultLongOptDelimiter + "string-slice', but got option `-character'",
+ },
+ {
+ args: []string{"-o", "-", "-"},
+ rest: []string{"-", "-"},
+ },
+ }
+ var opts struct {
+ StringSlice []string `long:"string-slice"`
+ OtherOption bool `long:"other-option" short:"o"`
+ }
+
+ for _, test := range tests {
+ if test.expectError {
+ assertParseFail(t, test.errType, test.errMsg, &opts, test.args...)
+ } else {
+ args := assertParseSuccess(t, &opts, test.args...)
+
+ assertStringArray(t, args, test.rest)
+ }
+ }
+}
+
+func TestUnknownFlagHandler(t *testing.T) {
+
+ var opts struct {
+ Flag1 string `long:"flag1"`
+ Flag2 string `long:"flag2"`
+ }
+
+ p := NewParser(&opts, None)
+
+ var unknownFlag1 string
+ var unknownFlag2 bool
+ var unknownFlag3 string
+
+ // Set up a callback to intercept unknown options during parsing
+ p.UnknownOptionHandler = func(option string, arg SplitArgument, args []string) ([]string, error) {
+ if option == "unknownFlag1" {
+ if argValue, ok := arg.Value(); ok {
+ unknownFlag1 = argValue
+ return args, nil
+ }
+ // consume a value from remaining args list
+ unknownFlag1 = args[0]
+ return args[1:], nil
+ } else if option == "unknownFlag2" {
+ // treat this one as a bool switch, don't consume any args
+ unknownFlag2 = true
+ return args, nil
+ } else if option == "unknownFlag3" {
+ if argValue, ok := arg.Value(); ok {
+ unknownFlag3 = argValue
+ return args, nil
+ }
+ // consume a value from remaining args list
+ unknownFlag3 = args[0]
+ return args[1:], nil
+ }
+
+ return args, fmt.Errorf("Unknown flag: %v", option)
+ }
+
+ // Parse args containing some unknown flags, verify that
+ // our callback can handle all of them
+ _, err := p.ParseArgs([]string{"--flag1=stuff", "--unknownFlag1", "blah", "--unknownFlag2", "--unknownFlag3=baz", "--flag2=foo"})
+
+ if err != nil {
+ assertErrorf(t, "Parser returned unexpected error %v", err)
+ }
+
+ assertString(t, opts.Flag1, "stuff")
+ assertString(t, opts.Flag2, "foo")
+ assertString(t, unknownFlag1, "blah")
+ assertString(t, unknownFlag3, "baz")
+
+ if !unknownFlag2 {
+ assertErrorf(t, "Flag should have been set by unknown handler, but had value: %v", unknownFlag2)
+ }
+
+ // Parse args with unknown flags that callback doesn't handle, verify it returns error
+ _, err = p.ParseArgs([]string{"--flag1=stuff", "--unknownFlagX", "blah", "--flag2=foo"})
+
+ if err == nil {
+ assertErrorf(t, "Parser should have returned error, but returned nil")
+ }
+}
+
+func TestChoices(t *testing.T) {
+ var opts struct {
+ Choice string `long:"choose" choice:"v1" choice:"v2"`
+ }
+
+ assertParseFail(t, ErrInvalidChoice, "Invalid value `invalid' for option `"+defaultLongOptDelimiter+"choose'. Allowed values are: v1 or v2", &opts, "--choose", "invalid")
+ assertParseSuccess(t, &opts, "--choose", "v2")
+ assertString(t, opts.Choice, "v2")
+}
+
+func TestEmbedded(t *testing.T) {
+ type embedded struct {
+ V bool `short:"v"`
+ }
+ var opts struct {
+ embedded
+ }
+
+ assertParseSuccess(t, &opts, "-v")
+ if !opts.V {
+ t.Errorf("Expected V to be true")
+ }
+}
diff --git a/src/mongo/gotools/vendor/src/github.com/jessevdk/go-flags/pointer_test.go b/src/mongo/gotools/vendor/src/github.com/jessevdk/go-flags/pointer_test.go
new file mode 100644
index 00000000000..e17445f696c
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/jessevdk/go-flags/pointer_test.go
@@ -0,0 +1,81 @@
+package flags
+
+import (
+ "testing"
+)
+
+func TestPointerBool(t *testing.T) {
+ var opts = struct {
+ Value *bool `short:"v"`
+ }{}
+
+ ret := assertParseSuccess(t, &opts, "-v")
+
+ assertStringArray(t, ret, []string{})
+
+ if !*opts.Value {
+ t.Errorf("Expected Value to be true")
+ }
+}
+
+func TestPointerString(t *testing.T) {
+ var opts = struct {
+ Value *string `short:"v"`
+ }{}
+
+ ret := assertParseSuccess(t, &opts, "-v", "value")
+
+ assertStringArray(t, ret, []string{})
+ assertString(t, *opts.Value, "value")
+}
+
+func TestPointerSlice(t *testing.T) {
+ var opts = struct {
+ Value *[]string `short:"v"`
+ }{}
+
+ ret := assertParseSuccess(t, &opts, "-v", "value1", "-v", "value2")
+
+ assertStringArray(t, ret, []string{})
+ assertStringArray(t, *opts.Value, []string{"value1", "value2"})
+}
+
+func TestPointerMap(t *testing.T) {
+ var opts = struct {
+ Value *map[string]int `short:"v"`
+ }{}
+
+ ret := assertParseSuccess(t, &opts, "-v", "k1:2", "-v", "k2:-5")
+
+ assertStringArray(t, ret, []string{})
+
+ if v, ok := (*opts.Value)["k1"]; !ok {
+ t.Errorf("Expected key \"k1\" to exist")
+ } else if v != 2 {
+ t.Errorf("Expected \"k1\" to be 2, but got %#v", v)
+ }
+
+ if v, ok := (*opts.Value)["k2"]; !ok {
+ t.Errorf("Expected key \"k2\" to exist")
+ } else if v != -5 {
+ t.Errorf("Expected \"k2\" to be -5, but got %#v", v)
+ }
+}
+
+type PointerGroup struct {
+ Value bool `short:"v"`
+}
+
+func TestPointerGroup(t *testing.T) {
+ var opts = struct {
+ Group *PointerGroup `group:"Group Options"`
+ }{}
+
+ ret := assertParseSuccess(t, &opts, "-v")
+
+ assertStringArray(t, ret, []string{})
+
+ if !opts.Group.Value {
+ t.Errorf("Expected Group.Value to be true")
+ }
+}
diff --git a/src/mongo/gotools/vendor/src/github.com/jessevdk/go-flags/short_test.go b/src/mongo/gotools/vendor/src/github.com/jessevdk/go-flags/short_test.go
new file mode 100644
index 00000000000..95712c16238
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/jessevdk/go-flags/short_test.go
@@ -0,0 +1,194 @@
+package flags
+
+import (
+ "fmt"
+ "testing"
+)
+
+func TestShort(t *testing.T) {
+ var opts = struct {
+ Value bool `short:"v"`
+ }{}
+
+ ret := assertParseSuccess(t, &opts, "-v")
+
+ assertStringArray(t, ret, []string{})
+
+ if !opts.Value {
+ t.Errorf("Expected Value to be true")
+ }
+}
+
+func TestShortTooLong(t *testing.T) {
+ var opts = struct {
+ Value bool `short:"vv"`
+ }{}
+
+ assertParseFail(t, ErrShortNameTooLong, "short names can only be 1 character long, not `vv'", &opts)
+}
+
+func TestShortRequired(t *testing.T) {
+ var opts = struct {
+ Value bool `short:"v" required:"true"`
+ }{}
+
+ assertParseFail(t, ErrRequired, fmt.Sprintf("the required flag `%cv' was not specified", defaultShortOptDelimiter), &opts)
+}
+
+func TestShortMultiConcat(t *testing.T) {
+ var opts = struct {
+ V bool `short:"v"`
+ O bool `short:"o"`
+ F bool `short:"f"`
+ }{}
+
+ ret := assertParseSuccess(t, &opts, "-vo", "-f")
+
+ assertStringArray(t, ret, []string{})
+
+ if !opts.V {
+ t.Errorf("Expected V to be true")
+ }
+
+ if !opts.O {
+ t.Errorf("Expected O to be true")
+ }
+
+ if !opts.F {
+ t.Errorf("Expected F to be true")
+ }
+}
+
+func TestShortMultiRequiredConcat(t *testing.T) {
+ var opts = struct {
+ V bool `short:"v" required:"true"`
+ O bool `short:"o" required:"true"`
+ F bool `short:"f" required:"true"`
+ }{}
+
+ ret := assertParseSuccess(t, &opts, "-vo", "-f")
+
+ assertStringArray(t, ret, []string{})
+
+ if !opts.V {
+ t.Errorf("Expected V to be true")
+ }
+
+ if !opts.O {
+ t.Errorf("Expected O to be true")
+ }
+
+ if !opts.F {
+ t.Errorf("Expected F to be true")
+ }
+}
+
+func TestShortMultiSlice(t *testing.T) {
+ var opts = struct {
+ Values []bool `short:"v"`
+ }{}
+
+ ret := assertParseSuccess(t, &opts, "-v", "-v")
+
+ assertStringArray(t, ret, []string{})
+ assertBoolArray(t, opts.Values, []bool{true, true})
+}
+
+func TestShortMultiSliceConcat(t *testing.T) {
+ var opts = struct {
+ Values []bool `short:"v"`
+ }{}
+
+ ret := assertParseSuccess(t, &opts, "-vvv")
+
+ assertStringArray(t, ret, []string{})
+ assertBoolArray(t, opts.Values, []bool{true, true, true})
+}
+
+func TestShortWithEqualArg(t *testing.T) {
+ var opts = struct {
+ Value string `short:"v"`
+ }{}
+
+ ret := assertParseSuccess(t, &opts, "-v=value")
+
+ assertStringArray(t, ret, []string{})
+ assertString(t, opts.Value, "value")
+}
+
+func TestShortWithArg(t *testing.T) {
+ var opts = struct {
+ Value string `short:"v"`
+ }{}
+
+ ret := assertParseSuccess(t, &opts, "-vvalue")
+
+ assertStringArray(t, ret, []string{})
+ assertString(t, opts.Value, "value")
+}
+
+func TestShortArg(t *testing.T) {
+ var opts = struct {
+ Value string `short:"v"`
+ }{}
+
+ ret := assertParseSuccess(t, &opts, "-v", "value")
+
+ assertStringArray(t, ret, []string{})
+ assertString(t, opts.Value, "value")
+}
+
+func TestShortMultiWithEqualArg(t *testing.T) {
+ var opts = struct {
+ F []bool `short:"f"`
+ Value string `short:"v"`
+ }{}
+
+ assertParseFail(t, ErrExpectedArgument, fmt.Sprintf("expected argument for flag `%cv'", defaultShortOptDelimiter), &opts, "-ffv=value")
+}
+
+func TestShortMultiArg(t *testing.T) {
+ var opts = struct {
+ F []bool `short:"f"`
+ Value string `short:"v"`
+ }{}
+
+ ret := assertParseSuccess(t, &opts, "-ffv", "value")
+
+ assertStringArray(t, ret, []string{})
+ assertBoolArray(t, opts.F, []bool{true, true})
+ assertString(t, opts.Value, "value")
+}
+
+func TestShortMultiArgConcatFail(t *testing.T) {
+ var opts = struct {
+ F []bool `short:"f"`
+ Value string `short:"v"`
+ }{}
+
+ assertParseFail(t, ErrExpectedArgument, fmt.Sprintf("expected argument for flag `%cv'", defaultShortOptDelimiter), &opts, "-ffvvalue")
+}
+
+func TestShortMultiArgConcat(t *testing.T) {
+ var opts = struct {
+ F []bool `short:"f"`
+ Value string `short:"v"`
+ }{}
+
+ ret := assertParseSuccess(t, &opts, "-vff")
+
+ assertStringArray(t, ret, []string{})
+ assertString(t, opts.Value, "ff")
+}
+
+func TestShortOptional(t *testing.T) {
+ var opts = struct {
+ F []bool `short:"f"`
+ Value string `short:"v" optional:"yes" optional-value:"value"`
+ }{}
+
+ ret := assertParseSuccess(t, &opts, "-fv", "f")
+
+ assertStringArray(t, ret, []string{"f"})
+ assertString(t, opts.Value, "value")
+}
diff --git a/src/mongo/gotools/vendor/src/github.com/jessevdk/go-flags/tag_test.go b/src/mongo/gotools/vendor/src/github.com/jessevdk/go-flags/tag_test.go
new file mode 100644
index 00000000000..9daa7401b91
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/jessevdk/go-flags/tag_test.go
@@ -0,0 +1,38 @@
+package flags
+
+import (
+ "testing"
+)
+
+func TestTagMissingColon(t *testing.T) {
+ var opts = struct {
+ Value bool `short`
+ }{}
+
+ assertParseFail(t, ErrTag, "expected `:' after key name, but got end of tag (in `short`)", &opts, "")
+}
+
+func TestTagMissingValue(t *testing.T) {
+ var opts = struct {
+ Value bool `short:`
+ }{}
+
+ assertParseFail(t, ErrTag, "expected `\"' to start tag value at end of tag (in `short:`)", &opts, "")
+}
+
+func TestTagMissingQuote(t *testing.T) {
+ var opts = struct {
+ Value bool `short:"v`
+ }{}
+
+ assertParseFail(t, ErrTag, "expected end of tag value `\"' at end of tag (in `short:\"v`)", &opts, "")
+}
+
+func TestTagNewline(t *testing.T) {
+ var opts = struct {
+ Value bool `long:"verbose" description:"verbose
+something"`
+ }{}
+
+ assertParseFail(t, ErrTag, "unexpected newline in tag value `description' (in `long:\"verbose\" description:\"verbose\nsomething\"`)", &opts, "")
+}
diff --git a/src/mongo/gotools/vendor/src/github.com/jessevdk/go-flags/termsize.go b/src/mongo/gotools/vendor/src/github.com/jessevdk/go-flags/termsize.go
new file mode 100644
index 00000000000..df97e7e821d
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/jessevdk/go-flags/termsize.go
@@ -0,0 +1,28 @@
+// +build !windows,!plan9,!solaris
+
+package flags
+
+import (
+ "syscall"
+ "unsafe"
+)
+
+type winsize struct {
+ row, col uint16
+ xpixel, ypixel uint16
+}
+
+func getTerminalColumns() int {
+ ws := winsize{}
+
+ if tIOCGWINSZ != 0 {
+ syscall.Syscall(syscall.SYS_IOCTL,
+ uintptr(0),
+ uintptr(tIOCGWINSZ),
+ uintptr(unsafe.Pointer(&ws)))
+
+ return int(ws.col)
+ }
+
+ return 80
+}
diff --git a/src/mongo/gotools/vendor/src/github.com/jessevdk/go-flags/termsize_linux.go b/src/mongo/gotools/vendor/src/github.com/jessevdk/go-flags/termsize_linux.go
new file mode 100644
index 00000000000..e3975e2835f
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/jessevdk/go-flags/termsize_linux.go
@@ -0,0 +1,7 @@
+// +build linux
+
+package flags
+
+const (
+ tIOCGWINSZ = 0x5413
+)
diff --git a/src/mongo/gotools/vendor/src/github.com/jessevdk/go-flags/termsize_nosysioctl.go b/src/mongo/gotools/vendor/src/github.com/jessevdk/go-flags/termsize_nosysioctl.go
new file mode 100644
index 00000000000..2a9bbe005cb
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/jessevdk/go-flags/termsize_nosysioctl.go
@@ -0,0 +1,7 @@
+// +build windows plan9 solaris
+
+package flags
+
+func getTerminalColumns() int {
+ return 80
+}
diff --git a/src/mongo/gotools/vendor/src/github.com/jessevdk/go-flags/termsize_other.go b/src/mongo/gotools/vendor/src/github.com/jessevdk/go-flags/termsize_other.go
new file mode 100644
index 00000000000..308215155ea
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/jessevdk/go-flags/termsize_other.go
@@ -0,0 +1,7 @@
+// +build !darwin,!freebsd,!netbsd,!openbsd,!linux
+
+package flags
+
+const (
+ tIOCGWINSZ = 0
+)
diff --git a/src/mongo/gotools/vendor/src/github.com/jessevdk/go-flags/termsize_unix.go b/src/mongo/gotools/vendor/src/github.com/jessevdk/go-flags/termsize_unix.go
new file mode 100644
index 00000000000..fcc11860101
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/jessevdk/go-flags/termsize_unix.go
@@ -0,0 +1,7 @@
+// +build darwin freebsd netbsd openbsd
+
+package flags
+
+const (
+ tIOCGWINSZ = 0x40087468
+)
diff --git a/src/mongo/gotools/vendor/src/github.com/jessevdk/go-flags/unknown_test.go b/src/mongo/gotools/vendor/src/github.com/jessevdk/go-flags/unknown_test.go
new file mode 100644
index 00000000000..858be45885e
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/jessevdk/go-flags/unknown_test.go
@@ -0,0 +1,66 @@
+package flags
+
+import (
+ "testing"
+)
+
+func TestUnknownFlags(t *testing.T) {
+ var opts = struct {
+ Verbose []bool `short:"v" long:"verbose" description:"Verbose output"`
+ }{}
+
+ args := []string{
+ "-f",
+ }
+
+ p := NewParser(&opts, 0)
+ args, err := p.ParseArgs(args)
+
+ if err == nil {
+ t.Fatal("Expected error for unknown argument")
+ }
+}
+
+func TestIgnoreUnknownFlags(t *testing.T) {
+ var opts = struct {
+ Verbose []bool `short:"v" long:"verbose" description:"Verbose output"`
+ }{}
+
+ args := []string{
+ "hello",
+ "world",
+ "-v",
+ "--foo=bar",
+ "--verbose",
+ "-f",
+ }
+
+ p := NewParser(&opts, IgnoreUnknown)
+ args, err := p.ParseArgs(args)
+
+ if err != nil {
+ t.Fatal(err)
+ }
+
+ exargs := []string{
+ "hello",
+ "world",
+ "--foo=bar",
+ "-f",
+ }
+
+ issame := (len(args) == len(exargs))
+
+ if issame {
+ for i := 0; i < len(args); i++ {
+ if args[i] != exargs[i] {
+ issame = false
+ break
+ }
+ }
+ }
+
+ if !issame {
+ t.Fatalf("Expected %v but got %v", exargs, args)
+ }
+}
diff --git a/src/mongo/gotools/vendor/src/github.com/jtolds/gls/LICENSE b/src/mongo/gotools/vendor/src/github.com/jtolds/gls/LICENSE
new file mode 100644
index 00000000000..9b4a822d92c
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/jtolds/gls/LICENSE
@@ -0,0 +1,18 @@
+Copyright (c) 2013, Space Monkey, Inc.
+
+Permission is hereby granted, free of charge, to any person obtaining a copy of
+this software and associated documentation files (the "Software"), to deal in
+the Software without restriction, including without limitation the rights to
+use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
+the Software, and to permit persons to whom the Software is furnished to do so,
+subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all
+copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
+FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
+COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
+IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
+CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
diff --git a/src/mongo/gotools/vendor/src/github.com/jtolds/gls/README.md b/src/mongo/gotools/vendor/src/github.com/jtolds/gls/README.md
new file mode 100644
index 00000000000..4ebb692fb18
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/jtolds/gls/README.md
@@ -0,0 +1,89 @@
+gls
+===
+
+Goroutine local storage
+
+### IMPORTANT NOTE ###
+
+It is my duty to point you to https://blog.golang.org/context, which is how
+Google solves all of the problems you'd perhaps consider using this package
+for at scale.
+
+One downside to Google's approach is that *all* of your functions must have
+a new first argument, but after clearing that hurdle everything else is much
+better.
+
+If you aren't interested in this warning, read on.
+
+### Huhwaht? Why? ###
+
+Every so often, a thread shows up on the
+[golang-nuts](https://groups.google.com/d/forum/golang-nuts) asking for some
+form of goroutine-local-storage, or some kind of goroutine id, or some kind of
+context. There are a few valid use cases for goroutine-local-storage, one of
+the most prominent being log line context. One poster was interested in being
+able to log an HTTP request context id in every log line in the same goroutine
+as the incoming HTTP request, without having to change every library and
+function call he was interested in logging.
+
+This would be pretty useful. Provided that you could get some kind of
+goroutine-local-storage, you could call
+[log.SetOutput](http://golang.org/pkg/log/#SetOutput) with your own logging
+writer that checks goroutine-local-storage for some context information and
+adds that context to your log lines.
+
+But alas, Andrew Gerrand's typically diplomatic answer to the question of
+goroutine-local variables was:
+
+> We wouldn't even be having this discussion if thread local storage wasn't
+> useful. But every feature comes at a cost, and in my opinion the cost of
+> threadlocals far outweighs their benefits. They're just not a good fit for
+> Go.
+
+So, yeah, that makes sense. That's a pretty good reason for why the language
+won't support a specific and (relatively) unuseful feature that requires some
+runtime changes, just for the sake of a little bit of log improvement.
+
+But does Go require runtime changes?
+
+### How it works ###
+
+Go has pretty fantastic introspective and reflective features, but one thing Go
+doesn't give you is any kind of access to the stack pointer, or frame pointer,
+or goroutine id, or anything contextual about your current stack. It gives you
+access to your list of callers, but only along with program counters, which are
+fixed at compile time.
+
+But it does give you the stack.
+
+So, we define 16 special functions and embed base-16 tags into the stack using
+the call order of those 16 functions. Then, we can read our tags back out of
+the stack looking at the callers list.
+
+We then use these tags as an index into a traditional map for implementing
+this library.
+
+### What are people saying? ###
+
+"Wow, that's horrifying."
+
+"This is the most terrible thing I have seen in a very long time."
+
+"Where is it getting a context from? Is this serializing all the requests?
+What the heck is the client being bound to? What are these tags? Why does he
+need callers? Oh god no. No no no."
+
+### Docs ###
+
+Please see the docs at http://godoc.org/github.com/jtolds/gls
+
+### Related ###
+
+If you're okay relying on the string format of the current runtime stacktrace
+including a unique goroutine id (not guaranteed by the spec or anything, but
+very unlikely to change within a Go release), you might be able to squeeze
+out a bit more performance by using this similar library, inspired by some
+code Brad Fitzpatrick wrote for debugging his HTTP/2 library:
+https://github.com/tylerb/gls (in contrast, jtolds/gls doesn't require
+any knowledge of the string format of the runtime stacktrace, which
+probably adds unnecessary overhead).
diff --git a/src/mongo/gotools/vendor/src/github.com/jtolds/gls/context.go b/src/mongo/gotools/vendor/src/github.com/jtolds/gls/context.go
new file mode 100644
index 00000000000..90cfcf7db17
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/jtolds/gls/context.go
@@ -0,0 +1,144 @@
+// Package gls implements goroutine-local storage.
+package gls
+
+import (
+ "sync"
+)
+
+const (
+ maxCallers = 64
+)
+
+var (
+ stackTagPool = &idPool{}
+ mgrRegistry = make(map[*ContextManager]bool)
+ mgrRegistryMtx sync.RWMutex
+)
+
+// Values is simply a map of key types to value types. Used by SetValues to
+// set multiple values at once.
+type Values map[interface{}]interface{}
+
+// ContextManager is the main entrypoint for interacting with
+// Goroutine-local-storage. You can have multiple independent ContextManagers
+// at any given time. ContextManagers are usually declared globally for a given
+// class of context variables. You should use NewContextManager for
+// construction.
+type ContextManager struct {
+ mtx sync.RWMutex
+ values map[uint]Values
+}
+
+// NewContextManager returns a brand new ContextManager. It also registers the
+// new ContextManager in the ContextManager registry which is used by the Go
+// method. ContextManagers are typically defined globally at package scope.
+func NewContextManager() *ContextManager {
+ mgr := &ContextManager{values: make(map[uint]Values)}
+ mgrRegistryMtx.Lock()
+ defer mgrRegistryMtx.Unlock()
+ mgrRegistry[mgr] = true
+ return mgr
+}
+
+// Unregister removes a ContextManager from the global registry, used by the
+// Go method. Only intended for use when you're completely done with a
+// ContextManager. Use of Unregister at all is rare.
+func (m *ContextManager) Unregister() {
+ mgrRegistryMtx.Lock()
+ defer mgrRegistryMtx.Unlock()
+ delete(mgrRegistry, m)
+}
+
+// SetValues takes a collection of values and a function to call for those
+// values to be set in. Anything further down the stack will have the set
+// values available through GetValue. SetValues will add new values or replace
+// existing values of the same key and will not mutate or change values for
+// previous stack frames.
+// SetValues is slow (makes a copy of all current and new values for the new
+// gls-context) in order to reduce the amount of lookups GetValue requires.
+func (m *ContextManager) SetValues(new_values Values, context_call func()) {
+ if len(new_values) == 0 {
+ context_call()
+ return
+ }
+
+ tags := readStackTags(1)
+
+ m.mtx.Lock()
+ values := new_values
+ for _, tag := range tags {
+ if existing_values, ok := m.values[tag]; ok {
+ // oh, we found existing values, let's make a copy
+ values = make(Values, len(existing_values)+len(new_values))
+ for key, val := range existing_values {
+ values[key] = val
+ }
+ for key, val := range new_values {
+ values[key] = val
+ }
+ break
+ }
+ }
+ new_tag := stackTagPool.Acquire()
+ m.values[new_tag] = values
+ m.mtx.Unlock()
+ defer func() {
+ m.mtx.Lock()
+ delete(m.values, new_tag)
+ m.mtx.Unlock()
+ stackTagPool.Release(new_tag)
+ }()
+
+ addStackTag(new_tag, context_call)
+}
+
+// GetValue will return a previously set value, provided that the value was set
+// by SetValues somewhere higher up the stack. If the value is not found, ok
+// will be false.
+func (m *ContextManager) GetValue(key interface{}) (value interface{}, ok bool) {
+
+ tags := readStackTags(1)
+ m.mtx.RLock()
+ defer m.mtx.RUnlock()
+ for _, tag := range tags {
+ if values, ok := m.values[tag]; ok {
+ value, ok := values[key]
+ return value, ok
+ }
+ }
+ return "", false
+}
+
+func (m *ContextManager) getValues() Values {
+ tags := readStackTags(2)
+ m.mtx.RLock()
+ defer m.mtx.RUnlock()
+ for _, tag := range tags {
+ if values, ok := m.values[tag]; ok {
+ return values
+ }
+ }
+ return nil
+}
+
+// Go preserves ContextManager values and Goroutine-local-storage across new
+// goroutine invocations. The Go method makes a copy of all existing values on
+// all registered context managers and makes sure they are still set after
+// kicking off the provided function in a new goroutine. If you don't use this
+// Go method instead of the standard 'go' keyword, you will lose values in
+// ContextManagers, as goroutines have brand new stacks.
+func Go(cb func()) {
+ mgrRegistryMtx.RLock()
+ defer mgrRegistryMtx.RUnlock()
+
+ for mgr, _ := range mgrRegistry {
+ values := mgr.getValues()
+ if len(values) > 0 {
+ mgr_copy := mgr
+ cb_copy := cb
+ cb = func() { mgr_copy.SetValues(values, cb_copy) }
+ }
+ }
+
+ go cb()
+}
diff --git a/src/mongo/gotools/vendor/src/github.com/jtolds/gls/context_test.go b/src/mongo/gotools/vendor/src/github.com/jtolds/gls/context_test.go
new file mode 100644
index 00000000000..ae5bde4aede
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/jtolds/gls/context_test.go
@@ -0,0 +1,139 @@
+package gls
+
+import (
+ "fmt"
+ "sync"
+ "testing"
+)
+
+func TestContexts(t *testing.T) {
+ mgr1 := NewContextManager()
+ mgr2 := NewContextManager()
+
+ CheckVal := func(mgr *ContextManager, key, exp_val string) {
+ val, ok := mgr.GetValue(key)
+ if len(exp_val) == 0 {
+ if ok {
+ t.Fatalf("expected no value for key %s, got %s", key, val)
+ }
+ return
+ }
+ if !ok {
+ t.Fatalf("expected value %s for key %s, got no value",
+ exp_val, key)
+ }
+ if exp_val != val {
+ t.Fatalf("expected value %s for key %s, got %s", exp_val, key,
+ val)
+ }
+
+ }
+
+ Check := func(exp_m1v1, exp_m1v2, exp_m2v1, exp_m2v2 string) {
+ CheckVal(mgr1, "key1", exp_m1v1)
+ CheckVal(mgr1, "key2", exp_m1v2)
+ CheckVal(mgr2, "key1", exp_m2v1)
+ CheckVal(mgr2, "key2", exp_m2v2)
+ }
+
+ Check("", "", "", "")
+ mgr2.SetValues(Values{"key1": "val1c"}, func() {
+ Check("", "", "val1c", "")
+ mgr1.SetValues(Values{"key1": "val1a"}, func() {
+ Check("val1a", "", "val1c", "")
+ mgr1.SetValues(Values{"key2": "val1b"}, func() {
+ Check("val1a", "val1b", "val1c", "")
+ var wg sync.WaitGroup
+ wg.Add(2)
+ go func() {
+ defer wg.Done()
+ Check("", "", "", "")
+ }()
+ Go(func() {
+ defer wg.Done()
+ Check("val1a", "val1b", "val1c", "")
+ })
+ wg.Wait()
+ })
+ })
+ })
+}
+
+func ExampleContextManager_SetValues() {
+ var (
+ mgr = NewContextManager()
+ request_id_key = GenSym()
+ )
+
+ MyLog := func() {
+ if request_id, ok := mgr.GetValue(request_id_key); ok {
+ fmt.Println("My request id is:", request_id)
+ } else {
+ fmt.Println("No request id found")
+ }
+ }
+
+ mgr.SetValues(Values{request_id_key: "12345"}, func() {
+ MyLog()
+ })
+ MyLog()
+
+ // Output: My request id is: 12345
+ // No request id found
+}
+
+func ExampleGo() {
+ var (
+ mgr = NewContextManager()
+ request_id_key = GenSym()
+ )
+
+ MyLog := func() {
+ if request_id, ok := mgr.GetValue(request_id_key); ok {
+ fmt.Println("My request id is:", request_id)
+ } else {
+ fmt.Println("No request id found")
+ }
+ }
+
+ mgr.SetValues(Values{request_id_key: "12345"}, func() {
+ var wg sync.WaitGroup
+ wg.Add(1)
+ go func() {
+ defer wg.Done()
+ MyLog()
+ }()
+ wg.Wait()
+ wg.Add(1)
+ Go(func() {
+ defer wg.Done()
+ MyLog()
+ })
+ wg.Wait()
+ })
+
+ // Output: No request id found
+ // My request id is: 12345
+}
+
+func BenchmarkGetValue(b *testing.B) {
+ mgr := NewContextManager()
+ mgr.SetValues(Values{"test_key": "test_val"}, func() {
+ b.ResetTimer()
+ for i := 0; i < b.N; i++ {
+ val, ok := mgr.GetValue("test_key")
+ if !ok || val != "test_val" {
+ b.FailNow()
+ }
+ }
+ })
+}
+
+func BenchmarkSetValues(b *testing.B) {
+ mgr := NewContextManager()
+ for i := 0; i < b.N/2; i++ {
+ mgr.SetValues(Values{"test_key": "test_val"}, func() {
+ mgr.SetValues(Values{"test_key2": "test_val2"}, func() {})
+ })
+ }
+}
diff --git a/src/mongo/gotools/vendor/src/github.com/jtolds/gls/gen_sym.go b/src/mongo/gotools/vendor/src/github.com/jtolds/gls/gen_sym.go
new file mode 100644
index 00000000000..8d5fc24d4a4
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/jtolds/gls/gen_sym.go
@@ -0,0 +1,13 @@
+package gls
+
+var (
+ symPool = &idPool{}
+)
+
+// ContextKey is a throwaway value you can use as a key to a ContextManager
+type ContextKey struct{ id uint }
+
+// GenSym will return a brand new, never-before-used ContextKey
+func GenSym() ContextKey {
+ return ContextKey{id: symPool.Acquire()}
+}
diff --git a/src/mongo/gotools/vendor/src/github.com/jtolds/gls/id_pool.go b/src/mongo/gotools/vendor/src/github.com/jtolds/gls/id_pool.go
new file mode 100644
index 00000000000..b7974ae0026
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/jtolds/gls/id_pool.go
@@ -0,0 +1,34 @@
+package gls
+
+// though this could probably be better at keeping ids smaller, the goal of
+// this class is to keep a registry of the smallest unique integer ids
+// per-process possible
+
+import (
+ "sync"
+)
+
+type idPool struct {
+ mtx sync.Mutex
+ released []uint
+ max_id uint
+}
+
+func (p *idPool) Acquire() (id uint) {
+ p.mtx.Lock()
+ defer p.mtx.Unlock()
+ if len(p.released) > 0 {
+ id = p.released[len(p.released)-1]
+ p.released = p.released[:len(p.released)-1]
+ return id
+ }
+ id = p.max_id
+ p.max_id++
+ return id
+}
+
+func (p *idPool) Release(id uint) {
+ p.mtx.Lock()
+ defer p.mtx.Unlock()
+ p.released = append(p.released, id)
+}
diff --git a/src/mongo/gotools/vendor/src/github.com/jtolds/gls/stack_tags.go b/src/mongo/gotools/vendor/src/github.com/jtolds/gls/stack_tags.go
new file mode 100644
index 00000000000..9b8e39ba7c2
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/jtolds/gls/stack_tags.go
@@ -0,0 +1,43 @@
+package gls
+
+// so, basically, we're going to encode integer tags in base-16 on the stack
+
+const (
+ bitWidth = 4
+)
+
+func addStackTag(tag uint, context_call func()) {
+ if context_call == nil {
+ return
+ }
+ markS(tag, context_call)
+}
+
+func markS(tag uint, cb func()) { _m(tag, cb) }
+func mark0(tag uint, cb func()) { _m(tag, cb) }
+func mark1(tag uint, cb func()) { _m(tag, cb) }
+func mark2(tag uint, cb func()) { _m(tag, cb) }
+func mark3(tag uint, cb func()) { _m(tag, cb) }
+func mark4(tag uint, cb func()) { _m(tag, cb) }
+func mark5(tag uint, cb func()) { _m(tag, cb) }
+func mark6(tag uint, cb func()) { _m(tag, cb) }
+func mark7(tag uint, cb func()) { _m(tag, cb) }
+func mark8(tag uint, cb func()) { _m(tag, cb) }
+func mark9(tag uint, cb func()) { _m(tag, cb) }
+func markA(tag uint, cb func()) { _m(tag, cb) }
+func markB(tag uint, cb func()) { _m(tag, cb) }
+func markC(tag uint, cb func()) { _m(tag, cb) }
+func markD(tag uint, cb func()) { _m(tag, cb) }
+func markE(tag uint, cb func()) { _m(tag, cb) }
+func markF(tag uint, cb func()) { _m(tag, cb) }
+
+var pc_lookup = make(map[uintptr]int8, 17)
+var mark_lookup [16]func(uint, func())
+
+func _m(tag_remainder uint, cb func()) {
+ if tag_remainder == 0 {
+ cb()
+ } else {
+ mark_lookup[tag_remainder&0xf](tag_remainder>>bitWidth, cb)
+ }
+}
diff --git a/src/mongo/gotools/vendor/src/github.com/jtolds/gls/stack_tags_js.go b/src/mongo/gotools/vendor/src/github.com/jtolds/gls/stack_tags_js.go
new file mode 100644
index 00000000000..21d5595926b
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/jtolds/gls/stack_tags_js.go
@@ -0,0 +1,101 @@
+// +build js
+
+package gls
+
+// This file is used for GopherJS builds, which don't have normal runtime support
+
+import (
+ "regexp"
+ "strconv"
+ "strings"
+
+ "github.com/gopherjs/gopherjs/js"
+)
+
+var stackRE = regexp.MustCompile("\\s+at (\\S*) \\([^:]+:(\\d+):(\\d+)")
+
+func findPtr() uintptr {
+ jsStack := js.Global.Get("Error").New().Get("stack").Call("split", "\n")
+ for i := 1; i < jsStack.Get("length").Int(); i++ {
+ item := jsStack.Index(i).String()
+ matches := stackRE.FindAllStringSubmatch(item, -1)
+ if matches == nil {
+ return 0
+ }
+ pkgPath := matches[0][1]
+ if strings.HasPrefix(pkgPath, "$packages.github.com/jtolds/gls.mark") {
+ line, _ := strconv.Atoi(matches[0][2])
+ char, _ := strconv.Atoi(matches[0][3])
+ x := (uintptr(line) << 16) | uintptr(char)
+ return x
+ }
+ }
+
+ return 0
+}
+
+func init() {
+ setEntries := func(f func(uint, func()), v int8) {
+ var ptr uintptr
+ f(0, func() {
+ ptr = findPtr()
+ })
+ pc_lookup[ptr] = v
+ if v >= 0 {
+ mark_lookup[v] = f
+ }
+ }
+ setEntries(markS, -0x1)
+ setEntries(mark0, 0x0)
+ setEntries(mark1, 0x1)
+ setEntries(mark2, 0x2)
+ setEntries(mark3, 0x3)
+ setEntries(mark4, 0x4)
+ setEntries(mark5, 0x5)
+ setEntries(mark6, 0x6)
+ setEntries(mark7, 0x7)
+ setEntries(mark8, 0x8)
+ setEntries(mark9, 0x9)
+ setEntries(markA, 0xa)
+ setEntries(markB, 0xb)
+ setEntries(markC, 0xc)
+ setEntries(markD, 0xd)
+ setEntries(markE, 0xe)
+ setEntries(markF, 0xf)
+}
+
+func currentStack(skip int) (stack []uintptr) {
+ jsStack := js.Global.Get("Error").New().Get("stack").Call("split", "\n")
+ for i := skip + 2; i < jsStack.Get("length").Int(); i++ {
+ item := jsStack.Index(i).String()
+ matches := stackRE.FindAllStringSubmatch(item, -1)
+ if matches == nil {
+ return stack
+ }
+ line, _ := strconv.Atoi(matches[0][2])
+ char, _ := strconv.Atoi(matches[0][3])
+ x := (uintptr(line) << 16) | uintptr(char)&0xffff
+ stack = append(stack, x)
+ }
+
+ return stack
+}
+
+func readStackTags(skip int) (tags []uint) {
+ stack := currentStack(skip)
+ var current_tag uint
+ for _, pc := range stack {
+ val, ok := pc_lookup[pc]
+ if !ok {
+ continue
+ }
+ if val < 0 {
+ tags = append(tags, current_tag)
+ current_tag = 0
+ continue
+ }
+ current_tag <<= bitWidth
+ current_tag += uint(val)
+ }
+ return
+}
diff --git a/src/mongo/gotools/vendor/src/github.com/jtolds/gls/stack_tags_main.go b/src/mongo/gotools/vendor/src/github.com/jtolds/gls/stack_tags_main.go
new file mode 100644
index 00000000000..cb302b9ef63
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/jtolds/gls/stack_tags_main.go
@@ -0,0 +1,61 @@
+// +build !js
+
+package gls
+
+// This file is used for standard Go builds, which have the expected runtime support
+
+import (
+ "reflect"
+ "runtime"
+)
+
+func init() {
+ setEntries := func(f func(uint, func()), v int8) {
+ pc_lookup[reflect.ValueOf(f).Pointer()] = v
+ if v >= 0 {
+ mark_lookup[v] = f
+ }
+ }
+ setEntries(markS, -0x1)
+ setEntries(mark0, 0x0)
+ setEntries(mark1, 0x1)
+ setEntries(mark2, 0x2)
+ setEntries(mark3, 0x3)
+ setEntries(mark4, 0x4)
+ setEntries(mark5, 0x5)
+ setEntries(mark6, 0x6)
+ setEntries(mark7, 0x7)
+ setEntries(mark8, 0x8)
+ setEntries(mark9, 0x9)
+ setEntries(markA, 0xa)
+ setEntries(markB, 0xb)
+ setEntries(markC, 0xc)
+ setEntries(markD, 0xd)
+ setEntries(markE, 0xe)
+ setEntries(markF, 0xf)
+}
+
+func currentStack(skip int) []uintptr {
+ stack := make([]uintptr, maxCallers)
+ return stack[:runtime.Callers(3+skip, stack)]
+}
+
+func readStackTags(skip int) (tags []uint) {
+ stack := currentStack(skip)
+ var current_tag uint
+ for _, pc := range stack {
+ pc = runtime.FuncForPC(pc).Entry()
+ val, ok := pc_lookup[pc]
+ if !ok {
+ continue
+ }
+ if val < 0 {
+ tags = append(tags, current_tag)
+ current_tag = 0
+ continue
+ }
+ current_tag <<= bitWidth
+ current_tag += uint(val)
+ }
+ return
+}
diff --git a/src/mongo/gotools/vendor/src/github.com/mattn/go-runewidth/.travis.yml b/src/mongo/gotools/vendor/src/github.com/mattn/go-runewidth/.travis.yml
new file mode 100644
index 00000000000..5c9c2a30f07
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/mattn/go-runewidth/.travis.yml
@@ -0,0 +1,8 @@
+language: go
+go:
+ - tip
+before_install:
+ - go get github.com/mattn/goveralls
+ - go get golang.org/x/tools/cmd/cover
+script:
+ - $HOME/gopath/bin/goveralls -repotoken lAKAWPzcGsD3A8yBX3BGGtRUdJ6CaGERL
diff --git a/src/mongo/gotools/vendor/src/github.com/mattn/go-runewidth/README.mkd b/src/mongo/gotools/vendor/src/github.com/mattn/go-runewidth/README.mkd
new file mode 100644
index 00000000000..ffb0edd2c4e
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/mattn/go-runewidth/README.mkd
@@ -0,0 +1,26 @@
+go-runewidth
+============
+
+[![Build Status](https://travis-ci.org/mattn/go-runewidth.png?branch=master)](https://travis-ci.org/mattn/go-runewidth)
+[![Coverage Status](https://coveralls.io/repos/mattn/go-runewidth/badge.png?branch=HEAD)](https://coveralls.io/r/mattn/go-runewidth?branch=HEAD)
+[![GoDoc](https://godoc.org/github.com/mattn/go-runewidth?status.svg)](http://godoc.org/github.com/mattn/go-runewidth)
+
+Provides functions to get fixed width of the character or string.
+
+Usage
+-----
+
+```go
+runewidth.StringWidth("つのだ☆HIRO") == 12
+```
+
+
+Author
+------
+
+Yasuhiro Matsumoto
+
+License
+-------
+
+under the MIT License: http://mattn.mit-license.org/2013
diff --git a/src/mongo/gotools/vendor/src/github.com/mattn/go-runewidth/runewidth.go b/src/mongo/gotools/vendor/src/github.com/mattn/go-runewidth/runewidth.go
new file mode 100644
index 00000000000..3fbf33d595e
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/mattn/go-runewidth/runewidth.go
@@ -0,0 +1,464 @@
+package runewidth
+
+var EastAsianWidth = IsEastAsian()
+var DefaultCondition = &Condition{EastAsianWidth}
+
+type interval struct {
+ first rune
+ last rune
+}
+
+var combining = []interval{
+ {0x0300, 0x036F}, {0x0483, 0x0486}, {0x0488, 0x0489},
+ {0x0591, 0x05BD}, {0x05BF, 0x05BF}, {0x05C1, 0x05C2},
+ {0x05C4, 0x05C5}, {0x05C7, 0x05C7}, {0x0600, 0x0603},
+ {0x0610, 0x0615}, {0x064B, 0x065E}, {0x0670, 0x0670},
+ {0x06D6, 0x06E4}, {0x06E7, 0x06E8}, {0x06EA, 0x06ED},
+ {0x070F, 0x070F}, {0x0711, 0x0711}, {0x0730, 0x074A},
+ {0x07A6, 0x07B0}, {0x07EB, 0x07F3}, {0x0901, 0x0902},
+ {0x093C, 0x093C}, {0x0941, 0x0948}, {0x094D, 0x094D},
+ {0x0951, 0x0954}, {0x0962, 0x0963}, {0x0981, 0x0981},
+ {0x09BC, 0x09BC}, {0x09C1, 0x09C4}, {0x09CD, 0x09CD},
+ {0x09E2, 0x09E3}, {0x0A01, 0x0A02}, {0x0A3C, 0x0A3C},
+ {0x0A41, 0x0A42}, {0x0A47, 0x0A48}, {0x0A4B, 0x0A4D},
+ {0x0A70, 0x0A71}, {0x0A81, 0x0A82}, {0x0ABC, 0x0ABC},
+ {0x0AC1, 0x0AC5}, {0x0AC7, 0x0AC8}, {0x0ACD, 0x0ACD},
+ {0x0AE2, 0x0AE3}, {0x0B01, 0x0B01}, {0x0B3C, 0x0B3C},
+ {0x0B3F, 0x0B3F}, {0x0B41, 0x0B43}, {0x0B4D, 0x0B4D},
+ {0x0B56, 0x0B56}, {0x0B82, 0x0B82}, {0x0BC0, 0x0BC0},
+ {0x0BCD, 0x0BCD}, {0x0C3E, 0x0C40}, {0x0C46, 0x0C48},
+ {0x0C4A, 0x0C4D}, {0x0C55, 0x0C56}, {0x0CBC, 0x0CBC},
+ {0x0CBF, 0x0CBF}, {0x0CC6, 0x0CC6}, {0x0CCC, 0x0CCD},
+ {0x0CE2, 0x0CE3}, {0x0D41, 0x0D43}, {0x0D4D, 0x0D4D},
+ {0x0DCA, 0x0DCA}, {0x0DD2, 0x0DD4}, {0x0DD6, 0x0DD6},
+ {0x0E31, 0x0E31}, {0x0E34, 0x0E3A}, {0x0E47, 0x0E4E},
+ {0x0EB1, 0x0EB1}, {0x0EB4, 0x0EB9}, {0x0EBB, 0x0EBC},
+ {0x0EC8, 0x0ECD}, {0x0F18, 0x0F19}, {0x0F35, 0x0F35},
+ {0x0F37, 0x0F37}, {0x0F39, 0x0F39}, {0x0F71, 0x0F7E},
+ {0x0F80, 0x0F84}, {0x0F86, 0x0F87}, {0x0F90, 0x0F97},
+ {0x0F99, 0x0FBC}, {0x0FC6, 0x0FC6}, {0x102D, 0x1030},
+ {0x1032, 0x1032}, {0x1036, 0x1037}, {0x1039, 0x1039},
+ {0x1058, 0x1059}, {0x1160, 0x11FF}, {0x135F, 0x135F},
+ {0x1712, 0x1714}, {0x1732, 0x1734}, {0x1752, 0x1753},
+ {0x1772, 0x1773}, {0x17B4, 0x17B5}, {0x17B7, 0x17BD},
+ {0x17C6, 0x17C6}, {0x17C9, 0x17D3}, {0x17DD, 0x17DD},
+ {0x180B, 0x180D}, {0x18A9, 0x18A9}, {0x1920, 0x1922},
+ {0x1927, 0x1928}, {0x1932, 0x1932}, {0x1939, 0x193B},
+ {0x1A17, 0x1A18}, {0x1B00, 0x1B03}, {0x1B34, 0x1B34},
+ {0x1B36, 0x1B3A}, {0x1B3C, 0x1B3C}, {0x1B42, 0x1B42},
+ {0x1B6B, 0x1B73}, {0x1DC0, 0x1DCA}, {0x1DFE, 0x1DFF},
+ {0x200B, 0x200F}, {0x202A, 0x202E}, {0x2060, 0x2063},
+ {0x206A, 0x206F}, {0x20D0, 0x20EF}, {0x302A, 0x302F},
+ {0x3099, 0x309A}, {0xA806, 0xA806}, {0xA80B, 0xA80B},
+ {0xA825, 0xA826}, {0xFB1E, 0xFB1E}, {0xFE00, 0xFE0F},
+ {0xFE20, 0xFE23}, {0xFEFF, 0xFEFF}, {0xFFF9, 0xFFFB},
+ {0x10A01, 0x10A03}, {0x10A05, 0x10A06}, {0x10A0C, 0x10A0F},
+ {0x10A38, 0x10A3A}, {0x10A3F, 0x10A3F}, {0x1D167, 0x1D169},
+ {0x1D173, 0x1D182}, {0x1D185, 0x1D18B}, {0x1D1AA, 0x1D1AD},
+ {0x1D242, 0x1D244}, {0xE0001, 0xE0001}, {0xE0020, 0xE007F},
+ {0xE0100, 0xE01EF},
+}
+
+type ctype int
+
+const (
+ narrow ctype = iota
+ ambiguous
+ wide
+ halfwidth
+ fullwidth
+ neutral
+)
+
+type intervalType struct {
+ first rune
+ last rune
+ ctype ctype
+}
+
+var ctypes = []intervalType{
+ {0x0020, 0x007E, narrow},
+ {0x00A1, 0x00A1, ambiguous},
+ {0x00A2, 0x00A3, narrow},
+ {0x00A4, 0x00A4, ambiguous},
+ {0x00A5, 0x00A6, narrow},
+ {0x00A7, 0x00A8, ambiguous},
+ {0x00AA, 0x00AA, ambiguous},
+ {0x00AC, 0x00AC, narrow},
+ {0x00AD, 0x00AE, ambiguous},
+ {0x00AF, 0x00AF, narrow},
+ {0x00B0, 0x00B4, ambiguous},
+ {0x00B6, 0x00BA, ambiguous},
+ {0x00BC, 0x00BF, ambiguous},
+ {0x00C6, 0x00C6, ambiguous},
+ {0x00D0, 0x00D0, ambiguous},
+ {0x00D7, 0x00D8, ambiguous},
+ {0x00DE, 0x00E1, ambiguous},
+ {0x00E6, 0x00E6, ambiguous},
+ {0x00E8, 0x00EA, ambiguous},
+ {0x00EC, 0x00ED, ambiguous},
+ {0x00F0, 0x00F0, ambiguous},
+ {0x00F2, 0x00F3, ambiguous},
+ {0x00F7, 0x00FA, ambiguous},
+ {0x00FC, 0x00FC, ambiguous},
+ {0x00FE, 0x00FE, ambiguous},
+ {0x0101, 0x0101, ambiguous},
+ {0x0111, 0x0111, ambiguous},
+ {0x0113, 0x0113, ambiguous},
+ {0x011B, 0x011B, ambiguous},
+ {0x0126, 0x0127, ambiguous},
+ {0x012B, 0x012B, ambiguous},
+ {0x0131, 0x0133, ambiguous},
+ {0x0138, 0x0138, ambiguous},
+ {0x013F, 0x0142, ambiguous},
+ {0x0144, 0x0144, ambiguous},
+ {0x0148, 0x014B, ambiguous},
+ {0x014D, 0x014D, ambiguous},
+ {0x0152, 0x0153, ambiguous},
+ {0x0166, 0x0167, ambiguous},
+ {0x016B, 0x016B, ambiguous},
+ {0x01CE, 0x01CE, ambiguous},
+ {0x01D0, 0x01D0, ambiguous},
+ {0x01D2, 0x01D2, ambiguous},
+ {0x01D4, 0x01D4, ambiguous},
+ {0x01D6, 0x01D6, ambiguous},
+ {0x01D8, 0x01D8, ambiguous},
+ {0x01DA, 0x01DA, ambiguous},
+ {0x01DC, 0x01DC, ambiguous},
+ {0x0251, 0x0251, ambiguous},
+ {0x0261, 0x0261, ambiguous},
+ {0x02C4, 0x02C4, ambiguous},
+ {0x02C7, 0x02C7, ambiguous},
+ {0x02C9, 0x02CB, ambiguous},
+ {0x02CD, 0x02CD, ambiguous},
+ {0x02D0, 0x02D0, ambiguous},
+ {0x02D8, 0x02DB, ambiguous},
+ {0x02DD, 0x02DD, ambiguous},
+ {0x02DF, 0x02DF, ambiguous},
+ {0x0300, 0x036F, ambiguous},
+ {0x0391, 0x03A2, ambiguous},
+ {0x03A3, 0x03A9, ambiguous},
+ {0x03B1, 0x03C1, ambiguous},
+ {0x03C3, 0x03C9, ambiguous},
+ {0x0401, 0x0401, ambiguous},
+ {0x0410, 0x044F, ambiguous},
+ {0x0451, 0x0451, ambiguous},
+ {0x1100, 0x115F, wide},
+ {0x2010, 0x2010, ambiguous},
+ {0x2013, 0x2016, ambiguous},
+ {0x2018, 0x2019, ambiguous},
+ {0x201C, 0x201D, ambiguous},
+ {0x2020, 0x2022, ambiguous},
+ {0x2024, 0x2027, ambiguous},
+ {0x2030, 0x2030, ambiguous},
+ {0x2032, 0x2033, ambiguous},
+ {0x2035, 0x2035, ambiguous},
+ {0x203B, 0x203B, ambiguous},
+ {0x203E, 0x203E, ambiguous},
+ {0x2074, 0x2074, ambiguous},
+ {0x207F, 0x207F, ambiguous},
+ {0x2081, 0x2084, ambiguous},
+ {0x20A9, 0x20A9, halfwidth},
+ {0x20AC, 0x20AC, ambiguous},
+ {0x2103, 0x2103, ambiguous},
+ {0x2105, 0x2105, ambiguous},
+ {0x2109, 0x2109, ambiguous},
+ {0x2113, 0x2113, ambiguous},
+ {0x2116, 0x2116, ambiguous},
+ {0x2121, 0x2122, ambiguous},
+ {0x2126, 0x2126, ambiguous},
+ {0x212B, 0x212B, ambiguous},
+ {0x2153, 0x2154, ambiguous},
+ {0x215B, 0x215E, ambiguous},
+ {0x2160, 0x216B, ambiguous},
+ {0x2170, 0x2179, ambiguous},
+ {0x2189, 0x218A, ambiguous},
+ {0x2190, 0x2199, ambiguous},
+ {0x21B8, 0x21B9, ambiguous},
+ {0x21D2, 0x21D2, ambiguous},
+ {0x21D4, 0x21D4, ambiguous},
+ {0x21E7, 0x21E7, ambiguous},
+ {0x2200, 0x2200, ambiguous},
+ {0x2202, 0x2203, ambiguous},
+ {0x2207, 0x2208, ambiguous},
+ {0x220B, 0x220B, ambiguous},
+ {0x220F, 0x220F, ambiguous},
+ {0x2211, 0x2211, ambiguous},
+ {0x2215, 0x2215, ambiguous},
+ {0x221A, 0x221A, ambiguous},
+ {0x221D, 0x2220, ambiguous},
+ {0x2223, 0x2223, ambiguous},
+ {0x2225, 0x2225, ambiguous},
+ {0x2227, 0x222C, ambiguous},
+ {0x222E, 0x222E, ambiguous},
+ {0x2234, 0x2237, ambiguous},
+ {0x223C, 0x223D, ambiguous},
+ {0x2248, 0x2248, ambiguous},
+ {0x224C, 0x224C, ambiguous},
+ {0x2252, 0x2252, ambiguous},
+ {0x2260, 0x2261, ambiguous},
+ {0x2264, 0x2267, ambiguous},
+ {0x226A, 0x226B, ambiguous},
+ {0x226E, 0x226F, ambiguous},
+ {0x2282, 0x2283, ambiguous},
+ {0x2286, 0x2287, ambiguous},
+ {0x2295, 0x2295, ambiguous},
+ {0x2299, 0x2299, ambiguous},
+ {0x22A5, 0x22A5, ambiguous},
+ {0x22BF, 0x22BF, ambiguous},
+ {0x2312, 0x2312, ambiguous},
+ {0x2329, 0x232A, wide},
+ {0x2460, 0x24E9, ambiguous},
+ {0x24EB, 0x254B, ambiguous},
+ {0x2550, 0x2573, ambiguous},
+ {0x2580, 0x258F, ambiguous},
+ {0x2592, 0x2595, ambiguous},
+ {0x25A0, 0x25A1, ambiguous},
+ {0x25A3, 0x25A9, ambiguous},
+ {0x25B2, 0x25B3, ambiguous},
+ {0x25B6, 0x25B7, ambiguous},
+ {0x25BC, 0x25BD, ambiguous},
+ {0x25C0, 0x25C1, ambiguous},
+ {0x25C6, 0x25C8, ambiguous},
+ {0x25CB, 0x25CB, ambiguous},
+ {0x25CE, 0x25D1, ambiguous},
+ {0x25E2, 0x25E5, ambiguous},
+ {0x25EF, 0x25EF, ambiguous},
+ {0x2605, 0x2606, ambiguous},
+ {0x2609, 0x2609, ambiguous},
+ {0x260E, 0x260F, ambiguous},
+ {0x2614, 0x2615, ambiguous},
+ {0x261C, 0x261C, ambiguous},
+ {0x261E, 0x261E, ambiguous},
+ {0x2640, 0x2640, ambiguous},
+ {0x2642, 0x2642, ambiguous},
+ {0x2660, 0x2661, ambiguous},
+ {0x2663, 0x2665, ambiguous},
+ {0x2667, 0x266A, ambiguous},
+ {0x266C, 0x266D, ambiguous},
+ {0x266F, 0x266F, ambiguous},
+ {0x269E, 0x269F, ambiguous},
+ {0x26BE, 0x26BF, ambiguous},
+ {0x26C4, 0x26CD, ambiguous},
+ {0x26CF, 0x26E1, ambiguous},
+ {0x26E3, 0x26E3, ambiguous},
+ {0x26E8, 0x26FF, ambiguous},
+ {0x273D, 0x273D, ambiguous},
+ {0x2757, 0x2757, ambiguous},
+ {0x2776, 0x277F, ambiguous},
+ {0x27E6, 0x27ED, narrow},
+ {0x2985, 0x2986, narrow},
+ {0x2B55, 0x2B59, ambiguous},
+ {0x2E80, 0x2E9A, wide},
+ {0x2E9B, 0x2EF4, wide},
+ {0x2F00, 0x2FD6, wide},
+ {0x2FF0, 0x2FFC, wide},
+ {0x3000, 0x3000, fullwidth},
+ {0x3001, 0x303E, wide},
+ {0x3041, 0x3097, wide},
+ {0x3099, 0x3100, wide},
+ {0x3105, 0x312E, wide},
+ {0x3131, 0x318F, wide},
+ {0x3190, 0x31BB, wide},
+ {0x31C0, 0x31E4, wide},
+ {0x31F0, 0x321F, wide},
+ {0x3220, 0x3247, wide},
+ {0x3248, 0x324F, ambiguous},
+ {0x3250, 0x32FF, wide},
+ {0x3300, 0x4DBF, wide},
+ {0x4E00, 0xA48D, wide},
+ {0xA490, 0xA4C7, wide},
+ {0xA960, 0xA97D, wide},
+ {0xAC00, 0xD7A4, wide},
+ {0xE000, 0xF8FF, ambiguous},
+ {0xF900, 0xFAFF, wide},
+ {0xFE00, 0xFE0F, ambiguous},
+ {0xFE10, 0xFE1A, wide},
+ {0xFE30, 0xFE53, wide},
+ {0xFE54, 0xFE67, wide},
+ {0xFE68, 0xFE6C, wide},
+ {0xFF01, 0xFF60, fullwidth},
+ {0xFF61, 0xFFBF, halfwidth},
+ {0xFFC2, 0xFFC8, halfwidth},
+ {0xFFCA, 0xFFD0, halfwidth},
+ {0xFFD2, 0xFFD8, halfwidth},
+ {0xFFDA, 0xFFDD, halfwidth},
+ {0xFFE0, 0xFFE7, fullwidth},
+ {0xFFE8, 0xFFEF, halfwidth},
+ {0xFFFD, 0xFFFE, ambiguous},
+ {0x1B000, 0x1B002, wide},
+ {0x1F100, 0x1F10A, ambiguous},
+ {0x1F110, 0x1F12D, ambiguous},
+ {0x1F130, 0x1F169, ambiguous},
+ {0x1F170, 0x1F19B, ambiguous},
+ {0x1F200, 0x1F203, wide},
+ {0x1F210, 0x1F23B, wide},
+ {0x1F240, 0x1F249, wide},
+ {0x1F250, 0x1F252, wide},
+ {0x20000, 0x2FFFE, wide},
+ {0x30000, 0x3FFFE, wide},
+ {0xE0100, 0xE01F0, ambiguous},
+ {0xF0000, 0xFFFFD, ambiguous},
+ {0x100000, 0x10FFFE, ambiguous},
+}
+
+type Condition struct {
+ EastAsianWidth bool
+}
+
+func NewCondition() *Condition {
+ return &Condition{EastAsianWidth}
+}
+
+// RuneWidth returns the number of cells in r.
+// See http://www.unicode.org/reports/tr11/
+func (c *Condition) RuneWidth(r rune) int {
+ if r == 0 {
+ return 0
+ }
+ if r < 32 || (r >= 0x7f && r < 0xa0) {
+ return 1
+ }
+ for _, iv := range combining {
+ if iv.first <= r && r <= iv.last {
+ return 0
+ }
+ }
+
+ if c.EastAsianWidth && IsAmbiguousWidth(r) {
+ return 2
+ }
+
+ if r >= 0x1100 &&
+ (r <= 0x115f || r == 0x2329 || r == 0x232a ||
+ (r >= 0x2e80 && r <= 0xa4cf && r != 0x303f) ||
+ (r >= 0xac00 && r <= 0xd7a3) ||
+ (r >= 0xf900 && r <= 0xfaff) ||
+ (r >= 0xfe30 && r <= 0xfe6f) ||
+ (r >= 0xff00 && r <= 0xff60) ||
+ (r >= 0xffe0 && r <= 0xffe6) ||
+ (r >= 0x20000 && r <= 0x2fffd) ||
+ (r >= 0x30000 && r <= 0x3fffd)) {
+ return 2
+ }
+ return 1
+}
+
+func (c *Condition) StringWidth(s string) (width int) {
+ for _, r := range []rune(s) {
+ width += c.RuneWidth(r)
+ }
+ return width
+}
+
+func (c *Condition) Truncate(s string, w int, tail string) string {
+ if c.StringWidth(s) <= w {
+ return s
+ }
+ r := []rune(s)
+ tw := c.StringWidth(tail)
+ w -= tw
+ width := 0
+ i := 0
+ for ; i < len(r); i++ {
+ cw := c.RuneWidth(r[i])
+ if width+cw > w {
+ break
+ }
+ width += cw
+ }
+ return string(r[0:i]) + tail
+}
+
+func (c *Condition) Wrap(s string, w int) string {
+ width := 0
+ out := ""
+ for _, r := range []rune(s) {
+ cw := RuneWidth(r)
+ if r == '\n' {
+ out += string(r)
+ width = 0
+ continue
+ } else if width+cw > w {
+ out += "\n"
+ width = 0
+ out += string(r)
+ width += cw
+ continue
+ }
+ out += string(r)
+ width += cw
+ }
+ return out
+}
+
+func (c *Condition) FillLeft(s string, w int) string {
+ width := c.StringWidth(s)
+ count := w - width
+ if count > 0 {
+ b := make([]byte, count)
+ for i := range b {
+ b[i] = ' '
+ }
+ return string(b) + s
+ }
+ return s
+}
+
+func (c *Condition) FillRight(s string, w int) string {
+ width := c.StringWidth(s)
+ count := w - width
+ if count > 0 {
+ b := make([]byte, count)
+ for i := range b {
+ b[i] = ' '
+ }
+ return s + string(b)
+ }
+ return s
+}
+
+// RuneWidth returns the number of cells in r.
+// See http://www.unicode.org/reports/tr11/
+func RuneWidth(r rune) int {
+ return DefaultCondition.RuneWidth(r)
+}
+
+func ct(r rune) ctype {
+ for _, iv := range ctypes {
+ if iv.first <= r && r <= iv.last {
+ return iv.ctype
+ }
+ }
+ return neutral
+}
+
+// IsAmbiguousWidth returns whether is ambiguous width or not.
+func IsAmbiguousWidth(r rune) bool {
+ return ct(r) == ambiguous
+}
+
+// IsAmbiguousWidth returns whether is ambiguous width or not.
+func IsNeutralWidth(r rune) bool {
+ return ct(r) == neutral
+}
+
+func StringWidth(s string) (width int) {
+ return DefaultCondition.StringWidth(s)
+}
+
+func Truncate(s string, w int, tail string) string {
+ return DefaultCondition.Truncate(s, w, tail)
+}
+
+func Wrap(s string, w int) string {
+ return DefaultCondition.Wrap(s, w)
+}
+
+func FillLeft(s string, w int) string {
+ return DefaultCondition.FillLeft(s, w)
+}
+
+func FillRight(s string, w int) string {
+ return DefaultCondition.FillRight(s, w)
+}
diff --git a/src/mongo/gotools/vendor/src/github.com/mattn/go-runewidth/runewidth_js.go b/src/mongo/gotools/vendor/src/github.com/mattn/go-runewidth/runewidth_js.go
new file mode 100644
index 00000000000..0ce32c5e7b7
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/mattn/go-runewidth/runewidth_js.go
@@ -0,0 +1,8 @@
+// +build js
+
+package runewidth
+
+func IsEastAsian() bool {
+ // TODO: Implement this for the web. Detect east asian in a compatible way, and return true.
+ return false
+}
diff --git a/src/mongo/gotools/vendor/src/github.com/mattn/go-runewidth/runewidth_posix.go b/src/mongo/gotools/vendor/src/github.com/mattn/go-runewidth/runewidth_posix.go
new file mode 100644
index 00000000000..a4495909d88
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/mattn/go-runewidth/runewidth_posix.go
@@ -0,0 +1,69 @@
+// +build !windows,!js
+
+package runewidth
+
+import (
+ "os"
+ "regexp"
+ "strings"
+)
+
+var reLoc = regexp.MustCompile(`^[a-z][a-z][a-z]?(?:_[A-Z][A-Z])?\.(.+)`)
+
+func IsEastAsian() bool {
+ locale := os.Getenv("LC_CTYPE")
+ if locale == "" {
+ locale = os.Getenv("LANG")
+ }
+
+ // ignore C locale
+ if locale == "POSIX" || locale == "C" {
+ return false
+ }
+ if len(locale) > 1 && locale[0] == 'C' && (locale[1] == '.' || locale[1] == '-') {
+ return false
+ }
+
+ charset := strings.ToLower(locale)
+ r := reLoc.FindStringSubmatch(locale)
+ if len(r) == 2 {
+ charset = strings.ToLower(r[1])
+ }
+
+ if strings.HasSuffix(charset, "@cjk_narrow") {
+ return false
+ }
+
+ for pos, b := range []byte(charset) {
+ if b == '@' {
+ charset = charset[:pos]
+ break
+ }
+ }
+
+ mbc_max := 1
+ switch charset {
+ case "utf-8", "utf8":
+ mbc_max = 6
+ case "jis":
+ mbc_max = 8
+ case "eucjp":
+ mbc_max = 3
+ case "euckr", "euccn":
+ mbc_max = 2
+ case "sjis", "cp932", "cp51932", "cp936", "cp949", "cp950":
+ mbc_max = 2
+ case "big5":
+ mbc_max = 2
+ case "gbk", "gb2312":
+ mbc_max = 2
+ }
+
+ if mbc_max > 1 && (charset[0] != 'u' ||
+ strings.HasPrefix(locale, "ja") ||
+ strings.HasPrefix(locale, "ko") ||
+ strings.HasPrefix(locale, "zh")) {
+ return true
+ }
+ return false
+}
diff --git a/src/mongo/gotools/vendor/src/github.com/mattn/go-runewidth/runewidth_test.go b/src/mongo/gotools/vendor/src/github.com/mattn/go-runewidth/runewidth_test.go
new file mode 100644
index 00000000000..f9431282c76
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/mattn/go-runewidth/runewidth_test.go
@@ -0,0 +1,229 @@
+package runewidth
+
+import (
+ "testing"
+)
+
+var runewidthtests = []struct {
+ in rune
+ out int
+}{
+ {'世', 2},
+ {'界', 2},
+ {'セ', 1},
+ {'カ', 1},
+ {'イ', 1},
+ {'☆', 2}, // double width in ambiguous
+ {'\x00', 0},
+ {'\x01', 1},
+ {'\u0300', 0},
+}
+
+func TestRuneWidth(t *testing.T) {
+ c := NewCondition()
+ c.EastAsianWidth = true
+ for _, tt := range runewidthtests {
+ if out := c.RuneWidth(tt.in); out != tt.out {
+ t.Errorf("Width(%q) = %q, want %q", tt.in, out, tt.out)
+ }
+ }
+}
+
+var isambiguouswidthtests = []struct {
+ in rune
+ out bool
+}{
+ {'世', false},
+ {'■', true},
+ {'界', false},
+ {'○', true},
+ {'㈱', false},
+ {'①', true},
+ {'②', true},
+ {'③', true},
+ {'④', true},
+ {'⑤', true},
+ {'⑥', true},
+ {'⑦', true},
+ {'⑧', true},
+ {'⑨', true},
+ {'⑩', true},
+ {'⑪', true},
+ {'⑫', true},
+ {'⑬', true},
+ {'⑭', true},
+ {'⑮', true},
+ {'⑯', true},
+ {'⑰', true},
+ {'⑱', true},
+ {'⑲', true},
+ {'⑳', true},
+ {'☆', true},
+}
+
+func TestIsAmbiguousWidth(t *testing.T) {
+ for _, tt := range isambiguouswidthtests {
+ if out := IsAmbiguousWidth(tt.in); out != tt.out {
+ t.Errorf("IsAmbiguousWidth(%q) = %q, want %q", tt.in, out, tt.out)
+ }
+ }
+}
+
+var stringwidthtests = []struct {
+ in string
+ out int
+}{
+ {"■㈱の世界①", 12},
+ {"スター☆", 8},
+}
+
+func TestStringWidth(t *testing.T) {
+ c := NewCondition()
+ c.EastAsianWidth = true
+ for _, tt := range stringwidthtests {
+ if out := c.StringWidth(tt.in); out != tt.out {
+ t.Errorf("StringWidth(%q) = %q, want %q", tt.in, out, tt.out)
+ }
+ }
+}
+
+func TestStringWidthInvalid(t *testing.T) {
+ s := "こんにちわ\x00世界"
+ if out := StringWidth(s); out != 14 {
+ t.Errorf("StringWidth(%q) = %q, want %q", s, out, 14)
+ }
+}
+
+func TestTruncateSmaller(t *testing.T) {
+ s := "あいうえお"
+ expected := "あいうえお"
+
+ if out := Truncate(s, 10, "..."); out != expected {
+ t.Errorf("Truncate(%q) = %q, want %q", s, out, expected)
+ }
+}
+
+func TestTruncate(t *testing.T) {
+ s := "あいうえおあいうえおえおおおおおおおおおおおおおおおおおおおおおおおおおおおおおお"
+ expected := "あいうえおあいうえおえおおおおおおおおおおおおおおおおおおおおおおおおおおお..."
+
+ out := Truncate(s, 80, "...")
+ if out != expected {
+ t.Errorf("Truncate(%q) = %q, want %q", s, out, expected)
+ }
+ width := StringWidth(out)
+ if width != 79 {
+ t.Errorf("width of Truncate(%q) should be %d, but %d", s, 79, width)
+ }
+}
+
+func TestTruncateFit(t *testing.T) {
+ s := "aあいうえおあいうえおえおおおおおおおおおおおおおおおおおおおおおおおおおおおおおお"
+ expected := "aあいうえおあいうえおえおおおおおおおおおおおおおおおおおおおおおおおおおおお..."
+
+ out := Truncate(s, 80, "...")
+ if out != expected {
+ t.Errorf("Truncate(%q) = %q, want %q", s, out, expected)
+ }
+ width := StringWidth(out)
+ if width != 80 {
+ t.Errorf("width of Truncate(%q) should be %d, but %d", s, 80, width)
+ }
+}
+
+func TestTruncateJustFit(t *testing.T) {
+ s := "あいうえおあいうえおえおおおおおおおおおおおおおおおおおおおおおおおおおおおおお"
+ expected := "あいうえおあいうえおえおおおおおおおおおおおおおおおおおおおおおおおおおおおおお"
+
+ out := Truncate(s, 80, "...")
+ if out != expected {
+ t.Errorf("Truncate(%q) = %q, want %q", s, out, expected)
+ }
+ width := StringWidth(out)
+ if width != 80 {
+ t.Errorf("width of Truncate(%q) should be %d, but %d", s, 80, width)
+ }
+}
+
+func TestWrap(t *testing.T) {
+ s := `東京特許許可局局長はよく柿喰う客だ/東京特許許可局局長はよく柿喰う客だ
+123456789012345678901234567890
+
+END`
+ expected := `東京特許許可局局長はよく柿喰う
+客だ/東京特許許可局局長はよく
+柿喰う客だ
+123456789012345678901234567890
+
+END`
+
+ if out := Wrap(s, 30); out != expected {
+ t.Errorf("Wrap(%q) = %q, want %q", s, out, expected)
+ }
+}
+
+func TestTruncateNoNeeded(t *testing.T) {
+ s := "あいうえおあい"
+ expected := "あいうえおあい"
+
+ if out := Truncate(s, 80, "..."); out != expected {
+ t.Errorf("Truncate(%q) = %q, want %q", s, out, expected)
+ }
+}
+
+var isneutralwidthtests = []struct {
+ in rune
+ out bool
+}{
+ {'→', false},
+ {'┊', false},
+ {'┈', false},
+ {'~', false},
+ {'└', false},
+ {'⣀', true},
+ {'⣀', true},
+}
+
+func TestIsNeutralWidth(t *testing.T) {
+ for _, tt := range isneutralwidthtests {
+ if out := IsNeutralWidth(tt.in); out != tt.out {
+ t.Errorf("IsNeutralWidth(%q) = %q, want %q", tt.in, out, tt.out)
+ }
+ }
+}
+
+func TestFillLeft(t *testing.T) {
+ s := "あxいうえお"
+ expected := " あxいうえお"
+
+ if out := FillLeft(s, 15); out != expected {
+ t.Errorf("FillLeft(%q) = %q, want %q", s, out, expected)
+ }
+}
+
+func TestFillLeftFit(t *testing.T) {
+ s := "あいうえお"
+ expected := "あいうえお"
+
+ if out := FillLeft(s, 10); out != expected {
+ t.Errorf("FillLeft(%q) = %q, want %q", s, out, expected)
+ }
+}
+
+func TestFillRight(t *testing.T) {
+ s := "あxいうえお"
+ expected := "あxいうえお "
+
+ if out := FillRight(s, 15); out != expected {
+ t.Errorf("FillRight(%q) = %q, want %q", s, out, expected)
+ }
+}
+
+func TestFillRightFit(t *testing.T) {
+ s := "あいうえお"
+ expected := "あいうえお"
+
+ if out := FillRight(s, 10); out != expected {
+ t.Errorf("FillRight(%q) = %q, want %q", s, out, expected)
+ }
+}
diff --git a/src/mongo/gotools/vendor/src/github.com/mattn/go-runewidth/runewidth_windows.go b/src/mongo/gotools/vendor/src/github.com/mattn/go-runewidth/runewidth_windows.go
new file mode 100644
index 00000000000..bdd84454bec
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/mattn/go-runewidth/runewidth_windows.go
@@ -0,0 +1,24 @@
+package runewidth
+
+import (
+ "syscall"
+)
+
+var (
+ kernel32 = syscall.NewLazyDLL("kernel32")
+ procGetConsoleOutputCP = kernel32.NewProc("GetConsoleOutputCP")
+)
+
+func IsEastAsian() bool {
+ r1, _, _ := procGetConsoleOutputCP.Call()
+ if r1 == 0 {
+ return false
+ }
+
+ switch int(r1) {
+ case 932, 51932, 936, 949, 950:
+ return true
+ }
+
+ return false
+}
diff --git a/src/mongo/gotools/vendor/src/github.com/nsf/termbox-go/AUTHORS b/src/mongo/gotools/vendor/src/github.com/nsf/termbox-go/AUTHORS
new file mode 100644
index 00000000000..fe26fb0fb05
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/nsf/termbox-go/AUTHORS
@@ -0,0 +1,4 @@
+# Please keep this file sorted.
+
+Georg Reinke <guelfey@googlemail.com>
+nsf <no.smile.face@gmail.com>
diff --git a/src/mongo/gotools/vendor/src/github.com/nsf/termbox-go/LICENSE b/src/mongo/gotools/vendor/src/github.com/nsf/termbox-go/LICENSE
new file mode 100644
index 00000000000..d9bc068ce74
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/nsf/termbox-go/LICENSE
@@ -0,0 +1,19 @@
+Copyright (C) 2012 termbox-go authors
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in
+all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+THE SOFTWARE.
diff --git a/src/mongo/gotools/vendor/src/github.com/nsf/termbox-go/README.md b/src/mongo/gotools/vendor/src/github.com/nsf/termbox-go/README.md
new file mode 100644
index 00000000000..9a7b35602ed
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/nsf/termbox-go/README.md
@@ -0,0 +1,28 @@
+## Termbox
+Termbox is a library that provides a minimalistic API which allows the programmer to write text-based user interfaces. The library is crossplatform and has both terminal-based implementations on *nix operating systems and a winapi console based implementation for windows operating systems. The basic idea is an abstraction of the greatest common subset of features available on all major terminals and other terminal-like APIs in a minimalistic fashion. Small API means it is easy to implement, test, maintain and learn it, that's what makes the termbox a distinct library in its area.
+
+### Installation
+Install and update this go package with `go get -u github.com/nsf/termbox-go`
+
+### Examples
+For examples of what can be done take a look at demos in the _demos directory. You can try them with go run: `go run _demos/keyboard.go`
+
+There are also some interesting projects using termbox-go:
+ - [godit](https://github.com/nsf/godit) is an emacsish lightweight text editor written using termbox.
+ - [gomatrix](https://github.com/GeertJohan/gomatrix) connects to The Matrix and displays its data streams in your terminal.
+ - [gotetris](https://github.com/jjinux/gotetris) is an implementation of Tetris.
+ - [sokoban-go](https://github.com/rn2dy/sokoban-go) is an implementation of sokoban game.
+ - [hecate](https://github.com/evanmiller/hecate) is a hex editor designed by Satan.
+ - [httopd](https://github.com/verdverm/httopd) is top for httpd logs.
+ - [mop](https://github.com/michaeldv/mop) is stock market tracker for hackers.
+ - [termui](https://github.com/gizak/termui) is a terminal dashboard.
+ - [termloop](https://github.com/JoelOtter/termloop) is a terminal game engine.
+ - [xterm-color-chart](https://github.com/kutuluk/xterm-color-chart) is a XTerm 256 color chart.
+ - [gocui](https://github.com/jroimartin/gocui) is a minimalist Go library aimed at creating console user interfaces.
+ - [dry](https://github.com/moncho/dry) is an interactive cli to manage Docker containers.
+ - [pxl](https://github.com/ichinaski/pxl) displays images in the terminal.
+ - [snake-game](https://github.com/DyegoCosta/snake-game) is an implementation of the Snake game.
+ - [gone](https://github.com/guillaumebreton/gone) is a CLI pomodoro® timer.
+
+### API reference
+[godoc.org/github.com/nsf/termbox-go](http://godoc.org/github.com/nsf/termbox-go)
diff --git a/src/mongo/gotools/vendor/src/github.com/nsf/termbox-go/_demos/editbox.go b/src/mongo/gotools/vendor/src/github.com/nsf/termbox-go/_demos/editbox.go
new file mode 100644
index 00000000000..e429080d313
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/nsf/termbox-go/_demos/editbox.go
@@ -0,0 +1,300 @@
+package main
+
+import (
+ "github.com/mattn/go-runewidth"
+ "github.com/nsf/termbox-go"
+ "unicode/utf8"
+)
+
+func tbprint(x, y int, fg, bg termbox.Attribute, msg string) {
+ for _, c := range msg {
+ termbox.SetCell(x, y, c, fg, bg)
+ x += runewidth.RuneWidth(c)
+ }
+}
+
+func fill(x, y, w, h int, cell termbox.Cell) {
+ for ly := 0; ly < h; ly++ {
+ for lx := 0; lx < w; lx++ {
+ termbox.SetCell(x+lx, y+ly, cell.Ch, cell.Fg, cell.Bg)
+ }
+ }
+}
+
+func rune_advance_len(r rune, pos int) int {
+ if r == '\t' {
+ return tabstop_length - pos%tabstop_length
+ }
+ return runewidth.RuneWidth(r)
+}
+
+func voffset_coffset(text []byte, boffset int) (voffset, coffset int) {
+ text = text[:boffset]
+ for len(text) > 0 {
+ r, size := utf8.DecodeRune(text)
+ text = text[size:]
+ coffset += 1
+ voffset += rune_advance_len(r, voffset)
+ }
+ return
+}
+
+func byte_slice_grow(s []byte, desired_cap int) []byte {
+ if cap(s) < desired_cap {
+ ns := make([]byte, len(s), desired_cap)
+ copy(ns, s)
+ return ns
+ }
+ return s
+}
+
+func byte_slice_remove(text []byte, from, to int) []byte {
+ size := to - from
+ copy(text[from:], text[to:])
+ text = text[:len(text)-size]
+ return text
+}
+
+func byte_slice_insert(text []byte, offset int, what []byte) []byte {
+ n := len(text) + len(what)
+ text = byte_slice_grow(text, n)
+ text = text[:n]
+ copy(text[offset+len(what):], text[offset:])
+ copy(text[offset:], what)
+ return text
+}
+
+const preferred_horizontal_threshold = 5
+const tabstop_length = 8
+
+type EditBox struct {
+ text []byte
+ line_voffset int
+ cursor_boffset int // cursor offset in bytes
+ cursor_voffset int // visual cursor offset in termbox cells
+ cursor_coffset int // cursor offset in unicode code points
+}
+
+// Draws the EditBox in the given location, 'h' is not used at the moment
+func (eb *EditBox) Draw(x, y, w, h int) {
+ eb.AdjustVOffset(w)
+
+ const coldef = termbox.ColorDefault
+ fill(x, y, w, h, termbox.Cell{Ch: ' '})
+
+ t := eb.text
+ lx := 0
+ tabstop := 0
+ for {
+ rx := lx - eb.line_voffset
+ if len(t) == 0 {
+ break
+ }
+
+ if lx == tabstop {
+ tabstop += tabstop_length
+ }
+
+ if rx >= w {
+ termbox.SetCell(x+w-1, y, '→',
+ coldef, coldef)
+ break
+ }
+
+ r, size := utf8.DecodeRune(t)
+ if r == '\t' {
+ for ; lx < tabstop; lx++ {
+ rx = lx - eb.line_voffset
+ if rx >= w {
+ goto next
+ }
+
+ if rx >= 0 {
+ termbox.SetCell(x+rx, y, ' ', coldef, coldef)
+ }
+ }
+ } else {
+ if rx >= 0 {
+ termbox.SetCell(x+rx, y, r, coldef, coldef)
+ }
+ lx += runewidth.RuneWidth(r)
+ }
+ next:
+ t = t[size:]
+ }
+
+ if eb.line_voffset != 0 {
+ termbox.SetCell(x, y, '←', coldef, coldef)
+ }
+}
+
+// Adjusts line visual offset to a proper value depending on width
+func (eb *EditBox) AdjustVOffset(width int) {
+ ht := preferred_horizontal_threshold
+ max_h_threshold := (width - 1) / 2
+ if ht > max_h_threshold {
+ ht = max_h_threshold
+ }
+
+ threshold := width - 1
+ if eb.line_voffset != 0 {
+ threshold = width - ht
+ }
+ if eb.cursor_voffset-eb.line_voffset >= threshold {
+ eb.line_voffset = eb.cursor_voffset + (ht - width + 1)
+ }
+
+ if eb.line_voffset != 0 && eb.cursor_voffset-eb.line_voffset < ht {
+ eb.line_voffset = eb.cursor_voffset - ht
+ if eb.line_voffset < 0 {
+ eb.line_voffset = 0
+ }
+ }
+}
+
+func (eb *EditBox) MoveCursorTo(boffset int) {
+ eb.cursor_boffset = boffset
+ eb.cursor_voffset, eb.cursor_coffset = voffset_coffset(eb.text, boffset)
+}
+
+func (eb *EditBox) RuneUnderCursor() (rune, int) {
+ return utf8.DecodeRune(eb.text[eb.cursor_boffset:])
+}
+
+func (eb *EditBox) RuneBeforeCursor() (rune, int) {
+ return utf8.DecodeLastRune(eb.text[:eb.cursor_boffset])
+}
+
+func (eb *EditBox) MoveCursorOneRuneBackward() {
+ if eb.cursor_boffset == 0 {
+ return
+ }
+ _, size := eb.RuneBeforeCursor()
+ eb.MoveCursorTo(eb.cursor_boffset - size)
+}
+
+func (eb *EditBox) MoveCursorOneRuneForward() {
+ if eb.cursor_boffset == len(eb.text) {
+ return
+ }
+ _, size := eb.RuneUnderCursor()
+ eb.MoveCursorTo(eb.cursor_boffset + size)
+}
+
+func (eb *EditBox) MoveCursorToBeginningOfTheLine() {
+ eb.MoveCursorTo(0)
+}
+
+func (eb *EditBox) MoveCursorToEndOfTheLine() {
+ eb.MoveCursorTo(len(eb.text))
+}
+
+func (eb *EditBox) DeleteRuneBackward() {
+ if eb.cursor_boffset == 0 {
+ return
+ }
+
+ eb.MoveCursorOneRuneBackward()
+ _, size := eb.RuneUnderCursor()
+ eb.text = byte_slice_remove(eb.text, eb.cursor_boffset, eb.cursor_boffset+size)
+}
+
+func (eb *EditBox) DeleteRuneForward() {
+ if eb.cursor_boffset == len(eb.text) {
+ return
+ }
+ _, size := eb.RuneUnderCursor()
+ eb.text = byte_slice_remove(eb.text, eb.cursor_boffset, eb.cursor_boffset+size)
+}
+
+func (eb *EditBox) DeleteTheRestOfTheLine() {
+ eb.text = eb.text[:eb.cursor_boffset]
+}
+
+func (eb *EditBox) InsertRune(r rune) {
+ var buf [utf8.UTFMax]byte
+ n := utf8.EncodeRune(buf[:], r)
+ eb.text = byte_slice_insert(eb.text, eb.cursor_boffset, buf[:n])
+ eb.MoveCursorOneRuneForward()
+}
+
+// Please, keep in mind that cursor depends on the value of line_voffset, which
+// is being set on Draw() call, so.. call this method after Draw() one.
+func (eb *EditBox) CursorX() int {
+ return eb.cursor_voffset - eb.line_voffset
+}
+
+var edit_box EditBox
+
+const edit_box_width = 30
+
+func redraw_all() {
+ const coldef = termbox.ColorDefault
+ termbox.Clear(coldef, coldef)
+ w, h := termbox.Size()
+
+ midy := h / 2
+ midx := (w - edit_box_width) / 2
+
+ // unicode box drawing chars around the edit box
+ termbox.SetCell(midx-1, midy, '│', coldef, coldef)
+ termbox.SetCell(midx+edit_box_width, midy, '│', coldef, coldef)
+ termbox.SetCell(midx-1, midy-1, '┌', coldef, coldef)
+ termbox.SetCell(midx-1, midy+1, '└', coldef, coldef)
+ termbox.SetCell(midx+edit_box_width, midy-1, '┐', coldef, coldef)
+ termbox.SetCell(midx+edit_box_width, midy+1, '┘', coldef, coldef)
+ fill(midx, midy-1, edit_box_width, 1, termbox.Cell{Ch: '─'})
+ fill(midx, midy+1, edit_box_width, 1, termbox.Cell{Ch: '─'})
+
+ edit_box.Draw(midx, midy, edit_box_width, 1)
+ termbox.SetCursor(midx+edit_box.CursorX(), midy)
+
+ tbprint(midx+6, midy+3, coldef, coldef, "Press ESC to quit")
+ termbox.Flush()
+}
+
+func main() {
+ err := termbox.Init()
+ if err != nil {
+ panic(err)
+ }
+ defer termbox.Close()
+ termbox.SetInputMode(termbox.InputEsc)
+
+ redraw_all()
+mainloop:
+ for {
+ switch ev := termbox.PollEvent(); ev.Type {
+ case termbox.EventKey:
+ switch ev.Key {
+ case termbox.KeyEsc:
+ break mainloop
+ case termbox.KeyArrowLeft, termbox.KeyCtrlB:
+ edit_box.MoveCursorOneRuneBackward()
+ case termbox.KeyArrowRight, termbox.KeyCtrlF:
+ edit_box.MoveCursorOneRuneForward()
+ case termbox.KeyBackspace, termbox.KeyBackspace2:
+ edit_box.DeleteRuneBackward()
+ case termbox.KeyDelete, termbox.KeyCtrlD:
+ edit_box.DeleteRuneForward()
+ case termbox.KeyTab:
+ edit_box.InsertRune('\t')
+ case termbox.KeySpace:
+ edit_box.InsertRune(' ')
+ case termbox.KeyCtrlK:
+ edit_box.DeleteTheRestOfTheLine()
+ case termbox.KeyHome, termbox.KeyCtrlA:
+ edit_box.MoveCursorToBeginningOfTheLine()
+ case termbox.KeyEnd, termbox.KeyCtrlE:
+ edit_box.MoveCursorToEndOfTheLine()
+ default:
+ if ev.Ch != 0 {
+ edit_box.InsertRune(ev.Ch)
+ }
+ }
+ case termbox.EventError:
+ panic(ev.Err)
+ }
+ redraw_all()
+ }
+}
diff --git a/src/mongo/gotools/vendor/src/github.com/nsf/termbox-go/_demos/interrupt.go b/src/mongo/gotools/vendor/src/github.com/nsf/termbox-go/_demos/interrupt.go
new file mode 100644
index 00000000000..55345219fd0
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/nsf/termbox-go/_demos/interrupt.go
@@ -0,0 +1,69 @@
+package main
+
+import (
+ "fmt"
+ "github.com/nsf/termbox-go"
+ "time"
+)
+
+func tbPrint(x, y int, fg, bg termbox.Attribute, msg string) {
+ for _, c := range msg {
+ termbox.SetCell(x, y, c, fg, bg)
+ x++
+ }
+}
+
+func draw(i int) {
+ termbox.Clear(termbox.ColorDefault, termbox.ColorDefault)
+ defer termbox.Flush()
+
+ w, h := termbox.Size()
+ s := fmt.Sprintf("count = %d", i)
+
+ tbPrint((w/2)-(len(s)/2), h/2, termbox.ColorRed, termbox.ColorDefault, s)
+}
+
+func main() {
+ err := termbox.Init()
+ if err != nil {
+ panic(err)
+ }
+ termbox.SetInputMode(termbox.InputEsc)
+
+ go func() {
+ time.Sleep(5 * time.Second)
+ termbox.Interrupt()
+
+ // This should never run - the Interrupt(), above, should cause the event
+ // loop below to exit, which then exits the process. If something goes
+ // wrong, this panic will trigger and show what happened.
+ time.Sleep(1 * time.Second)
+ panic("this should never run")
+ }()
+
+ var count int
+
+ draw(count)
+mainloop:
+ for {
+ switch ev := termbox.PollEvent(); ev.Type {
+ case termbox.EventKey:
+ if ev.Ch == '+' {
+ count++
+ } else if ev.Ch == '-' {
+ count--
+ }
+
+ case termbox.EventError:
+ panic(ev.Err)
+
+ case termbox.EventInterrupt:
+ break mainloop
+ }
+
+ draw(count)
+ }
+ termbox.Close()
+
+ fmt.Println("Finished")
+}
diff --git a/src/mongo/gotools/vendor/src/github.com/nsf/termbox-go/_demos/keyboard.go b/src/mongo/gotools/vendor/src/github.com/nsf/termbox-go/_demos/keyboard.go
new file mode 100644
index 00000000000..b6a258e4ca2
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/nsf/termbox-go/_demos/keyboard.go
@@ -0,0 +1,722 @@
+package main
+
+import "github.com/nsf/termbox-go"
+import "fmt"
+
+type key struct {
+ x int
+ y int
+ ch rune
+}
+
+var K_ESC = []key{{1, 1, 'E'}, {2, 1, 'S'}, {3, 1, 'C'}}
+var K_F1 = []key{{6, 1, 'F'}, {7, 1, '1'}}
+var K_F2 = []key{{9, 1, 'F'}, {10, 1, '2'}}
+var K_F3 = []key{{12, 1, 'F'}, {13, 1, '3'}}
+var K_F4 = []key{{15, 1, 'F'}, {16, 1, '4'}}
+var K_F5 = []key{{19, 1, 'F'}, {20, 1, '5'}}
+var K_F6 = []key{{22, 1, 'F'}, {23, 1, '6'}}
+var K_F7 = []key{{25, 1, 'F'}, {26, 1, '7'}}
+var K_F8 = []key{{28, 1, 'F'}, {29, 1, '8'}}
+var K_F9 = []key{{33, 1, 'F'}, {34, 1, '9'}}
+var K_F10 = []key{{36, 1, 'F'}, {37, 1, '1'}, {38, 1, '0'}}
+var K_F11 = []key{{40, 1, 'F'}, {41, 1, '1'}, {42, 1, '1'}}
+var K_F12 = []key{{44, 1, 'F'}, {45, 1, '1'}, {46, 1, '2'}}
+var K_PRN = []key{{50, 1, 'P'}, {51, 1, 'R'}, {52, 1, 'N'}}
+var K_SCR = []key{{54, 1, 'S'}, {55, 1, 'C'}, {56, 1, 'R'}}
+var K_BRK = []key{{58, 1, 'B'}, {59, 1, 'R'}, {60, 1, 'K'}}
+var K_LED1 = []key{{66, 1, '-'}}
+var K_LED2 = []key{{70, 1, '-'}}
+var K_LED3 = []key{{74, 1, '-'}}
+var K_TILDE = []key{{1, 4, '`'}}
+var K_TILDE_SHIFT = []key{{1, 4, '~'}}
+var K_1 = []key{{4, 4, '1'}}
+var K_1_SHIFT = []key{{4, 4, '!'}}
+var K_2 = []key{{7, 4, '2'}}
+var K_2_SHIFT = []key{{7, 4, '@'}}
+var K_3 = []key{{10, 4, '3'}}
+var K_3_SHIFT = []key{{10, 4, '#'}}
+var K_4 = []key{{13, 4, '4'}}
+var K_4_SHIFT = []key{{13, 4, '$'}}
+var K_5 = []key{{16, 4, '5'}}
+var K_5_SHIFT = []key{{16, 4, '%'}}
+var K_6 = []key{{19, 4, '6'}}
+var K_6_SHIFT = []key{{19, 4, '^'}}
+var K_7 = []key{{22, 4, '7'}}
+var K_7_SHIFT = []key{{22, 4, '&'}}
+var K_8 = []key{{25, 4, '8'}}
+var K_8_SHIFT = []key{{25, 4, '*'}}
+var K_9 = []key{{28, 4, '9'}}
+var K_9_SHIFT = []key{{28, 4, '('}}
+var K_0 = []key{{31, 4, '0'}}
+var K_0_SHIFT = []key{{31, 4, ')'}}
+var K_MINUS = []key{{34, 4, '-'}}
+var K_MINUS_SHIFT = []key{{34, 4, '_'}}
+var K_EQUALS = []key{{37, 4, '='}}
+var K_EQUALS_SHIFT = []key{{37, 4, '+'}}
+var K_BACKSLASH = []key{{40, 4, '\\'}}
+var K_BACKSLASH_SHIFT = []key{{40, 4, '|'}}
+var K_BACKSPACE = []key{{44, 4, 0x2190}, {45, 4, 0x2500}, {46, 4, 0x2500}}
+var K_INS = []key{{50, 4, 'I'}, {51, 4, 'N'}, {52, 4, 'S'}}
+var K_HOM = []key{{54, 4, 'H'}, {55, 4, 'O'}, {56, 4, 'M'}}
+var K_PGU = []key{{58, 4, 'P'}, {59, 4, 'G'}, {60, 4, 'U'}}
+var K_K_NUMLOCK = []key{{65, 4, 'N'}}
+var K_K_SLASH = []key{{68, 4, '/'}}
+var K_K_STAR = []key{{71, 4, '*'}}
+var K_K_MINUS = []key{{74, 4, '-'}}
+var K_TAB = []key{{1, 6, 'T'}, {2, 6, 'A'}, {3, 6, 'B'}}
+var K_q = []key{{6, 6, 'q'}}
+var K_Q = []key{{6, 6, 'Q'}}
+var K_w = []key{{9, 6, 'w'}}
+var K_W = []key{{9, 6, 'W'}}
+var K_e = []key{{12, 6, 'e'}}
+var K_E = []key{{12, 6, 'E'}}
+var K_r = []key{{15, 6, 'r'}}
+var K_R = []key{{15, 6, 'R'}}
+var K_t = []key{{18, 6, 't'}}
+var K_T = []key{{18, 6, 'T'}}
+var K_y = []key{{21, 6, 'y'}}
+var K_Y = []key{{21, 6, 'Y'}}
+var K_u = []key{{24, 6, 'u'}}
+var K_U = []key{{24, 6, 'U'}}
+var K_i = []key{{27, 6, 'i'}}
+var K_I = []key{{27, 6, 'I'}}
+var K_o = []key{{30, 6, 'o'}}
+var K_O = []key{{30, 6, 'O'}}
+var K_p = []key{{33, 6, 'p'}}
+var K_P = []key{{33, 6, 'P'}}
+var K_LSQB = []key{{36, 6, '['}}
+var K_LCUB = []key{{36, 6, '{'}}
+var K_RSQB = []key{{39, 6, ']'}}
+var K_RCUB = []key{{39, 6, '}'}}
+var K_ENTER = []key{
+ {43, 6, 0x2591}, {44, 6, 0x2591}, {45, 6, 0x2591}, {46, 6, 0x2591},
+ {43, 7, 0x2591}, {44, 7, 0x2591}, {45, 7, 0x21B5}, {46, 7, 0x2591},
+ {41, 8, 0x2591}, {42, 8, 0x2591}, {43, 8, 0x2591}, {44, 8, 0x2591},
+ {45, 8, 0x2591}, {46, 8, 0x2591},
+}
+var K_DEL = []key{{50, 6, 'D'}, {51, 6, 'E'}, {52, 6, 'L'}}
+var K_END = []key{{54, 6, 'E'}, {55, 6, 'N'}, {56, 6, 'D'}}
+var K_PGD = []key{{58, 6, 'P'}, {59, 6, 'G'}, {60, 6, 'D'}}
+var K_K_7 = []key{{65, 6, '7'}}
+var K_K_8 = []key{{68, 6, '8'}}
+var K_K_9 = []key{{71, 6, '9'}}
+var K_K_PLUS = []key{{74, 6, ' '}, {74, 7, '+'}, {74, 8, ' '}}
+var K_CAPS = []key{{1, 8, 'C'}, {2, 8, 'A'}, {3, 8, 'P'}, {4, 8, 'S'}}
+var K_a = []key{{7, 8, 'a'}}
+var K_A = []key{{7, 8, 'A'}}
+var K_s = []key{{10, 8, 's'}}
+var K_S = []key{{10, 8, 'S'}}
+var K_d = []key{{13, 8, 'd'}}
+var K_D = []key{{13, 8, 'D'}}
+var K_f = []key{{16, 8, 'f'}}
+var K_F = []key{{16, 8, 'F'}}
+var K_g = []key{{19, 8, 'g'}}
+var K_G = []key{{19, 8, 'G'}}
+var K_h = []key{{22, 8, 'h'}}
+var K_H = []key{{22, 8, 'H'}}
+var K_j = []key{{25, 8, 'j'}}
+var K_J = []key{{25, 8, 'J'}}
+var K_k = []key{{28, 8, 'k'}}
+var K_K = []key{{28, 8, 'K'}}
+var K_l = []key{{31, 8, 'l'}}
+var K_L = []key{{31, 8, 'L'}}
+var K_SEMICOLON = []key{{34, 8, ';'}}
+var K_PARENTHESIS = []key{{34, 8, ':'}}
+var K_QUOTE = []key{{37, 8, '\''}}
+var K_DOUBLEQUOTE = []key{{37, 8, '"'}}
+var K_K_4 = []key{{65, 8, '4'}}
+var K_K_5 = []key{{68, 8, '5'}}
+var K_K_6 = []key{{71, 8, '6'}}
+var K_LSHIFT = []key{{1, 10, 'S'}, {2, 10, 'H'}, {3, 10, 'I'}, {4, 10, 'F'}, {5, 10, 'T'}}
+var K_z = []key{{9, 10, 'z'}}
+var K_Z = []key{{9, 10, 'Z'}}
+var K_x = []key{{12, 10, 'x'}}
+var K_X = []key{{12, 10, 'X'}}
+var K_c = []key{{15, 10, 'c'}}
+var K_C = []key{{15, 10, 'C'}}
+var K_v = []key{{18, 10, 'v'}}
+var K_V = []key{{18, 10, 'V'}}
+var K_b = []key{{21, 10, 'b'}}
+var K_B = []key{{21, 10, 'B'}}
+var K_n = []key{{24, 10, 'n'}}
+var K_N = []key{{24, 10, 'N'}}
+var K_m = []key{{27, 10, 'm'}}
+var K_M = []key{{27, 10, 'M'}}
+var K_COMMA = []key{{30, 10, ','}}
+var K_LANB = []key{{30, 10, '<'}}
+var K_PERIOD = []key{{33, 10, '.'}}
+var K_RANB = []key{{33, 10, '>'}}
+var K_SLASH = []key{{36, 10, '/'}}
+var K_QUESTION = []key{{36, 10, '?'}}
+var K_RSHIFT = []key{{42, 10, 'S'}, {43, 10, 'H'}, {44, 10, 'I'}, {45, 10, 'F'}, {46, 10, 'T'}}
+var K_ARROW_UP = []key{{54, 10, '('}, {55, 10, 0x2191}, {56, 10, ')'}}
+var K_K_1 = []key{{65, 10, '1'}}
+var K_K_2 = []key{{68, 10, '2'}}
+var K_K_3 = []key{{71, 10, '3'}}
+var K_K_ENTER = []key{{74, 10, 0x2591}, {74, 11, 0x2591}, {74, 12, 0x2591}}
+var K_LCTRL = []key{{1, 12, 'C'}, {2, 12, 'T'}, {3, 12, 'R'}, {4, 12, 'L'}}
+var K_LWIN = []key{{6, 12, 'W'}, {7, 12, 'I'}, {8, 12, 'N'}}
+var K_LALT = []key{{10, 12, 'A'}, {11, 12, 'L'}, {12, 12, 'T'}}
+var K_SPACE = []key{
+ {14, 12, ' '}, {15, 12, ' '}, {16, 12, ' '}, {17, 12, ' '}, {18, 12, ' '},
+ {19, 12, 'S'}, {20, 12, 'P'}, {21, 12, 'A'}, {22, 12, 'C'}, {23, 12, 'E'},
+ {24, 12, ' '}, {25, 12, ' '}, {26, 12, ' '}, {27, 12, ' '}, {28, 12, ' '},
+}
+var K_RALT = []key{{30, 12, 'A'}, {31, 12, 'L'}, {32, 12, 'T'}}
+var K_RWIN = []key{{34, 12, 'W'}, {35, 12, 'I'}, {36, 12, 'N'}}
+var K_RPROP = []key{{38, 12, 'P'}, {39, 12, 'R'}, {40, 12, 'O'}, {41, 12, 'P'}}
+var K_RCTRL = []key{{43, 12, 'C'}, {44, 12, 'T'}, {45, 12, 'R'}, {46, 12, 'L'}}
+var K_ARROW_LEFT = []key{{50, 12, '('}, {51, 12, 0x2190}, {52, 12, ')'}}
+var K_ARROW_DOWN = []key{{54, 12, '('}, {55, 12, 0x2193}, {56, 12, ')'}}
+var K_ARROW_RIGHT = []key{{58, 12, '('}, {59, 12, 0x2192}, {60, 12, ')'}}
+var K_K_0 = []key{{65, 12, ' '}, {66, 12, '0'}, {67, 12, ' '}, {68, 12, ' '}}
+var K_K_PERIOD = []key{{71, 12, '.'}}
+
+type combo struct {
+ keys [][]key
+}
+
+var combos = []combo{
+ {[][]key{K_TILDE, K_2, K_SPACE, K_LCTRL, K_RCTRL}},
+ {[][]key{K_A, K_LCTRL, K_RCTRL}},
+ {[][]key{K_B, K_LCTRL, K_RCTRL}},
+ {[][]key{K_C, K_LCTRL, K_RCTRL}},
+ {[][]key{K_D, K_LCTRL, K_RCTRL}},
+ {[][]key{K_E, K_LCTRL, K_RCTRL}},
+ {[][]key{K_F, K_LCTRL, K_RCTRL}},
+ {[][]key{K_G, K_LCTRL, K_RCTRL}},
+ {[][]key{K_H, K_BACKSPACE, K_LCTRL, K_RCTRL}},
+ {[][]key{K_I, K_TAB, K_LCTRL, K_RCTRL}},
+ {[][]key{K_J, K_LCTRL, K_RCTRL}},
+ {[][]key{K_K, K_LCTRL, K_RCTRL}},
+ {[][]key{K_L, K_LCTRL, K_RCTRL}},
+ {[][]key{K_M, K_ENTER, K_K_ENTER, K_LCTRL, K_RCTRL}},
+ {[][]key{K_N, K_LCTRL, K_RCTRL}},
+ {[][]key{K_O, K_LCTRL, K_RCTRL}},
+ {[][]key{K_P, K_LCTRL, K_RCTRL}},
+ {[][]key{K_Q, K_LCTRL, K_RCTRL}},
+ {[][]key{K_R, K_LCTRL, K_RCTRL}},
+ {[][]key{K_S, K_LCTRL, K_RCTRL}},
+ {[][]key{K_T, K_LCTRL, K_RCTRL}},
+ {[][]key{K_U, K_LCTRL, K_RCTRL}},
+ {[][]key{K_V, K_LCTRL, K_RCTRL}},
+ {[][]key{K_W, K_LCTRL, K_RCTRL}},
+ {[][]key{K_X, K_LCTRL, K_RCTRL}},
+ {[][]key{K_Y, K_LCTRL, K_RCTRL}},
+ {[][]key{K_Z, K_LCTRL, K_RCTRL}},
+ {[][]key{K_LSQB, K_ESC, K_3, K_LCTRL, K_RCTRL}},
+ {[][]key{K_4, K_BACKSLASH, K_LCTRL, K_RCTRL}},
+ {[][]key{K_RSQB, K_5, K_LCTRL, K_RCTRL}},
+ {[][]key{K_6, K_LCTRL, K_RCTRL}},
+ {[][]key{K_7, K_SLASH, K_MINUS_SHIFT, K_LCTRL, K_RCTRL}},
+ {[][]key{K_SPACE}},
+ {[][]key{K_1_SHIFT, K_LSHIFT, K_RSHIFT}},
+ {[][]key{K_DOUBLEQUOTE, K_LSHIFT, K_RSHIFT}},
+ {[][]key{K_3_SHIFT, K_LSHIFT, K_RSHIFT}},
+ {[][]key{K_4_SHIFT, K_LSHIFT, K_RSHIFT}},
+ {[][]key{K_5_SHIFT, K_LSHIFT, K_RSHIFT}},
+ {[][]key{K_7_SHIFT, K_LSHIFT, K_RSHIFT}},
+ {[][]key{K_QUOTE}},
+ {[][]key{K_9_SHIFT, K_LSHIFT, K_RSHIFT}},
+ {[][]key{K_0_SHIFT, K_LSHIFT, K_RSHIFT}},
+ {[][]key{K_8_SHIFT, K_K_STAR, K_LSHIFT, K_RSHIFT}},
+ {[][]key{K_EQUALS_SHIFT, K_K_PLUS, K_LSHIFT, K_RSHIFT}},
+ {[][]key{K_COMMA}},
+ {[][]key{K_MINUS, K_K_MINUS}},
+ {[][]key{K_PERIOD, K_K_PERIOD}},
+ {[][]key{K_SLASH, K_K_SLASH}},
+ {[][]key{K_0, K_K_0}},
+ {[][]key{K_1, K_K_1}},
+ {[][]key{K_2, K_K_2}},
+ {[][]key{K_3, K_K_3}},
+ {[][]key{K_4, K_K_4}},
+ {[][]key{K_5, K_K_5}},
+ {[][]key{K_6, K_K_6}},
+ {[][]key{K_7, K_K_7}},
+ {[][]key{K_8, K_K_8}},
+ {[][]key{K_9, K_K_9}},
+ {[][]key{K_PARENTHESIS, K_LSHIFT, K_RSHIFT}},
+ {[][]key{K_SEMICOLON}},
+ {[][]key{K_LANB, K_LSHIFT, K_RSHIFT}},
+ {[][]key{K_EQUALS}},
+ {[][]key{K_RANB, K_LSHIFT, K_RSHIFT}},
+ {[][]key{K_QUESTION, K_LSHIFT, K_RSHIFT}},
+ {[][]key{K_2_SHIFT, K_LSHIFT, K_RSHIFT}},
+ {[][]key{K_A, K_LSHIFT, K_RSHIFT}},
+ {[][]key{K_B, K_LSHIFT, K_RSHIFT}},
+ {[][]key{K_C, K_LSHIFT, K_RSHIFT}},
+ {[][]key{K_D, K_LSHIFT, K_RSHIFT}},
+ {[][]key{K_E, K_LSHIFT, K_RSHIFT}},
+ {[][]key{K_F, K_LSHIFT, K_RSHIFT}},
+ {[][]key{K_G, K_LSHIFT, K_RSHIFT}},
+ {[][]key{K_H, K_LSHIFT, K_RSHIFT}},
+ {[][]key{K_I, K_LSHIFT, K_RSHIFT}},
+ {[][]key{K_J, K_LSHIFT, K_RSHIFT}},
+ {[][]key{K_K, K_LSHIFT, K_RSHIFT}},
+ {[][]key{K_L, K_LSHIFT, K_RSHIFT}},
+ {[][]key{K_M, K_LSHIFT, K_RSHIFT}},
+ {[][]key{K_N, K_LSHIFT, K_RSHIFT}},
+ {[][]key{K_O, K_LSHIFT, K_RSHIFT}},
+ {[][]key{K_P, K_LSHIFT, K_RSHIFT}},
+ {[][]key{K_Q, K_LSHIFT, K_RSHIFT}},
+ {[][]key{K_R, K_LSHIFT, K_RSHIFT}},
+ {[][]key{K_S, K_LSHIFT, K_RSHIFT}},
+ {[][]key{K_T, K_LSHIFT, K_RSHIFT}},
+ {[][]key{K_U, K_LSHIFT, K_RSHIFT}},
+ {[][]key{K_V, K_LSHIFT, K_RSHIFT}},
+ {[][]key{K_W, K_LSHIFT, K_RSHIFT}},
+ {[][]key{K_X, K_LSHIFT, K_RSHIFT}},
+ {[][]key{K_Y, K_LSHIFT, K_RSHIFT}},
+ {[][]key{K_Z, K_LSHIFT, K_RSHIFT}},
+ {[][]key{K_LSQB}},
+ {[][]key{K_BACKSLASH}},
+ {[][]key{K_RSQB}},
+ {[][]key{K_6_SHIFT, K_LSHIFT, K_RSHIFT}},
+ {[][]key{K_MINUS_SHIFT, K_LSHIFT, K_RSHIFT}},
+ {[][]key{K_TILDE}},
+ {[][]key{K_a}},
+ {[][]key{K_b}},
+ {[][]key{K_c}},
+ {[][]key{K_d}},
+ {[][]key{K_e}},
+ {[][]key{K_f}},
+ {[][]key{K_g}},
+ {[][]key{K_h}},
+ {[][]key{K_i}},
+ {[][]key{K_j}},
+ {[][]key{K_k}},
+ {[][]key{K_l}},
+ {[][]key{K_m}},
+ {[][]key{K_n}},
+ {[][]key{K_o}},
+ {[][]key{K_p}},
+ {[][]key{K_q}},
+ {[][]key{K_r}},
+ {[][]key{K_s}},
+ {[][]key{K_t}},
+ {[][]key{K_u}},
+ {[][]key{K_v}},
+ {[][]key{K_w}},
+ {[][]key{K_x}},
+ {[][]key{K_y}},
+ {[][]key{K_z}},
+ {[][]key{K_LCUB, K_LSHIFT, K_RSHIFT}},
+ {[][]key{K_BACKSLASH_SHIFT, K_LSHIFT, K_RSHIFT}},
+ {[][]key{K_RCUB, K_LSHIFT, K_RSHIFT}},
+ {[][]key{K_TILDE_SHIFT, K_LSHIFT, K_RSHIFT}},
+ {[][]key{K_8, K_BACKSPACE, K_LCTRL, K_RCTRL}},
+}
+
+var func_combos = []combo{
+ {[][]key{K_F1}},
+ {[][]key{K_F2}},
+ {[][]key{K_F3}},
+ {[][]key{K_F4}},
+ {[][]key{K_F5}},
+ {[][]key{K_F6}},
+ {[][]key{K_F7}},
+ {[][]key{K_F8}},
+ {[][]key{K_F9}},
+ {[][]key{K_F10}},
+ {[][]key{K_F11}},
+ {[][]key{K_F12}},
+ {[][]key{K_INS}},
+ {[][]key{K_DEL}},
+ {[][]key{K_HOM}},
+ {[][]key{K_END}},
+ {[][]key{K_PGU}},
+ {[][]key{K_PGD}},
+ {[][]key{K_ARROW_UP}},
+ {[][]key{K_ARROW_DOWN}},
+ {[][]key{K_ARROW_LEFT}},
+ {[][]key{K_ARROW_RIGHT}},
+}
+
+func print_tb(x, y int, fg, bg termbox.Attribute, msg string) {
+ for _, c := range msg {
+ termbox.SetCell(x, y, c, fg, bg)
+ x++
+ }
+}
+
+func printf_tb(x, y int, fg, bg termbox.Attribute, format string, args ...interface{}) {
+ s := fmt.Sprintf(format, args...)
+ print_tb(x, y, fg, bg, s)
+}
+
+func draw_key(k []key, fg, bg termbox.Attribute) {
+ for _, k := range k {
+ termbox.SetCell(k.x+2, k.y+4, k.ch, fg, bg)
+ }
+}
+
+func draw_keyboard() {
+ termbox.SetCell(0, 0, 0x250C, termbox.ColorWhite, termbox.ColorBlack)
+ termbox.SetCell(79, 0, 0x2510, termbox.ColorWhite, termbox.ColorBlack)
+ termbox.SetCell(0, 23, 0x2514, termbox.ColorWhite, termbox.ColorBlack)
+ termbox.SetCell(79, 23, 0x2518, termbox.ColorWhite, termbox.ColorBlack)
+
+ for i := 1; i < 79; i++ {
+ termbox.SetCell(i, 0, 0x2500, termbox.ColorWhite, termbox.ColorBlack)
+ termbox.SetCell(i, 23, 0x2500, termbox.ColorWhite, termbox.ColorBlack)
+ termbox.SetCell(i, 17, 0x2500, termbox.ColorWhite, termbox.ColorBlack)
+ termbox.SetCell(i, 4, 0x2500, termbox.ColorWhite, termbox.ColorBlack)
+ }
+ for i := 1; i < 23; i++ {
+ termbox.SetCell(0, i, 0x2502, termbox.ColorWhite, termbox.ColorBlack)
+ termbox.SetCell(79, i, 0x2502, termbox.ColorWhite, termbox.ColorBlack)
+ }
+ termbox.SetCell(0, 17, 0x251C, termbox.ColorWhite, termbox.ColorBlack)
+ termbox.SetCell(79, 17, 0x2524, termbox.ColorWhite, termbox.ColorBlack)
+ termbox.SetCell(0, 4, 0x251C, termbox.ColorWhite, termbox.ColorBlack)
+ termbox.SetCell(79, 4, 0x2524, termbox.ColorWhite, termbox.ColorBlack)
+ for i := 5; i < 17; i++ {
+ termbox.SetCell(1, i, 0x2588, termbox.ColorYellow, termbox.ColorYellow)
+ termbox.SetCell(78, i, 0x2588, termbox.ColorYellow, termbox.ColorYellow)
+ }
+
+ draw_key(K_ESC, termbox.ColorWhite, termbox.ColorBlue)
+ draw_key(K_F1, termbox.ColorWhite, termbox.ColorBlue)
+ draw_key(K_F2, termbox.ColorWhite, termbox.ColorBlue)
+ draw_key(K_F3, termbox.ColorWhite, termbox.ColorBlue)
+ draw_key(K_F4, termbox.ColorWhite, termbox.ColorBlue)
+ draw_key(K_F5, termbox.ColorWhite, termbox.ColorBlue)
+ draw_key(K_F6, termbox.ColorWhite, termbox.ColorBlue)
+ draw_key(K_F7, termbox.ColorWhite, termbox.ColorBlue)
+ draw_key(K_F8, termbox.ColorWhite, termbox.ColorBlue)
+ draw_key(K_F9, termbox.ColorWhite, termbox.ColorBlue)
+ draw_key(K_F10, termbox.ColorWhite, termbox.ColorBlue)
+ draw_key(K_F11, termbox.ColorWhite, termbox.ColorBlue)
+ draw_key(K_F12, termbox.ColorWhite, termbox.ColorBlue)
+ draw_key(K_PRN, termbox.ColorWhite, termbox.ColorBlue)
+ draw_key(K_SCR, termbox.ColorWhite, termbox.ColorBlue)
+ draw_key(K_BRK, termbox.ColorWhite, termbox.ColorBlue)
+ draw_key(K_LED1, termbox.ColorWhite, termbox.ColorBlue)
+ draw_key(K_LED2, termbox.ColorWhite, termbox.ColorBlue)
+ draw_key(K_LED3, termbox.ColorWhite, termbox.ColorBlue)
+
+ draw_key(K_TILDE, termbox.ColorWhite, termbox.ColorBlue)
+ draw_key(K_1, termbox.ColorWhite, termbox.ColorBlue)
+ draw_key(K_2, termbox.ColorWhite, termbox.ColorBlue)
+ draw_key(K_3, termbox.ColorWhite, termbox.ColorBlue)
+ draw_key(K_4, termbox.ColorWhite, termbox.ColorBlue)
+ draw_key(K_5, termbox.ColorWhite, termbox.ColorBlue)
+ draw_key(K_6, termbox.ColorWhite, termbox.ColorBlue)
+ draw_key(K_7, termbox.ColorWhite, termbox.ColorBlue)
+ draw_key(K_8, termbox.ColorWhite, termbox.ColorBlue)
+ draw_key(K_9, termbox.ColorWhite, termbox.ColorBlue)
+ draw_key(K_0, termbox.ColorWhite, termbox.ColorBlue)
+ draw_key(K_MINUS, termbox.ColorWhite, termbox.ColorBlue)
+ draw_key(K_EQUALS, termbox.ColorWhite, termbox.ColorBlue)
+ draw_key(K_BACKSLASH, termbox.ColorWhite, termbox.ColorBlue)
+ draw_key(K_BACKSPACE, termbox.ColorWhite, termbox.ColorBlue)
+ draw_key(K_INS, termbox.ColorWhite, termbox.ColorBlue)
+ draw_key(K_HOM, termbox.ColorWhite, termbox.ColorBlue)
+ draw_key(K_PGU, termbox.ColorWhite, termbox.ColorBlue)
+ draw_key(K_K_NUMLOCK, termbox.ColorWhite, termbox.ColorBlue)
+ draw_key(K_K_SLASH, termbox.ColorWhite, termbox.ColorBlue)
+ draw_key(K_K_STAR, termbox.ColorWhite, termbox.ColorBlue)
+ draw_key(K_K_MINUS, termbox.ColorWhite, termbox.ColorBlue)
+
+ draw_key(K_TAB, termbox.ColorWhite, termbox.ColorBlue)
+ draw_key(K_q, termbox.ColorWhite, termbox.ColorBlue)
+ draw_key(K_w, termbox.ColorWhite, termbox.ColorBlue)
+ draw_key(K_e, termbox.ColorWhite, termbox.ColorBlue)
+ draw_key(K_r, termbox.ColorWhite, termbox.ColorBlue)
+ draw_key(K_t, termbox.ColorWhite, termbox.ColorBlue)
+ draw_key(K_y, termbox.ColorWhite, termbox.ColorBlue)
+ draw_key(K_u, termbox.ColorWhite, termbox.ColorBlue)
+ draw_key(K_i, termbox.ColorWhite, termbox.ColorBlue)
+ draw_key(K_o, termbox.ColorWhite, termbox.ColorBlue)
+ draw_key(K_p, termbox.ColorWhite, termbox.ColorBlue)
+ draw_key(K_LSQB, termbox.ColorWhite, termbox.ColorBlue)
+ draw_key(K_RSQB, termbox.ColorWhite, termbox.ColorBlue)
+ draw_key(K_ENTER, termbox.ColorWhite, termbox.ColorBlue)
+ draw_key(K_DEL, termbox.ColorWhite, termbox.ColorBlue)
+ draw_key(K_END, termbox.ColorWhite, termbox.ColorBlue)
+ draw_key(K_PGD, termbox.ColorWhite, termbox.ColorBlue)
+ draw_key(K_K_7, termbox.ColorWhite, termbox.ColorBlue)
+ draw_key(K_K_8, termbox.ColorWhite, termbox.ColorBlue)
+ draw_key(K_K_9, termbox.ColorWhite, termbox.ColorBlue)
+ draw_key(K_K_PLUS, termbox.ColorWhite, termbox.ColorBlue)
+
+ draw_key(K_CAPS, termbox.ColorWhite, termbox.ColorBlue)
+ draw_key(K_a, termbox.ColorWhite, termbox.ColorBlue)
+ draw_key(K_s, termbox.ColorWhite, termbox.ColorBlue)
+ draw_key(K_d, termbox.ColorWhite, termbox.ColorBlue)
+ draw_key(K_f, termbox.ColorWhite, termbox.ColorBlue)
+ draw_key(K_g, termbox.ColorWhite, termbox.ColorBlue)
+ draw_key(K_h, termbox.ColorWhite, termbox.ColorBlue)
+ draw_key(K_j, termbox.ColorWhite, termbox.ColorBlue)
+ draw_key(K_k, termbox.ColorWhite, termbox.ColorBlue)
+ draw_key(K_l, termbox.ColorWhite, termbox.ColorBlue)
+ draw_key(K_SEMICOLON, termbox.ColorWhite, termbox.ColorBlue)
+ draw_key(K_QUOTE, termbox.ColorWhite, termbox.ColorBlue)
+ draw_key(K_K_4, termbox.ColorWhite, termbox.ColorBlue)
+ draw_key(K_K_5, termbox.ColorWhite, termbox.ColorBlue)
+ draw_key(K_K_6, termbox.ColorWhite, termbox.ColorBlue)
+
+ draw_key(K_LSHIFT, termbox.ColorWhite, termbox.ColorBlue)
+ draw_key(K_z, termbox.ColorWhite, termbox.ColorBlue)
+ draw_key(K_x, termbox.ColorWhite, termbox.ColorBlue)
+ draw_key(K_c, termbox.ColorWhite, termbox.ColorBlue)
+ draw_key(K_v, termbox.ColorWhite, termbox.ColorBlue)
+ draw_key(K_b, termbox.ColorWhite, termbox.ColorBlue)
+ draw_key(K_n, termbox.ColorWhite, termbox.ColorBlue)
+ draw_key(K_m, termbox.ColorWhite, termbox.ColorBlue)
+ draw_key(K_COMMA, termbox.ColorWhite, termbox.ColorBlue)
+ draw_key(K_PERIOD, termbox.ColorWhite, termbox.ColorBlue)
+ draw_key(K_SLASH, termbox.ColorWhite, termbox.ColorBlue)
+ draw_key(K_RSHIFT, termbox.ColorWhite, termbox.ColorBlue)
+ draw_key(K_ARROW_UP, termbox.ColorWhite, termbox.ColorBlue)
+ draw_key(K_K_1, termbox.ColorWhite, termbox.ColorBlue)
+ draw_key(K_K_2, termbox.ColorWhite, termbox.ColorBlue)
+ draw_key(K_K_3, termbox.ColorWhite, termbox.ColorBlue)
+ draw_key(K_K_ENTER, termbox.ColorWhite, termbox.ColorBlue)
+
+ draw_key(K_LCTRL, termbox.ColorWhite, termbox.ColorBlue)
+ draw_key(K_LWIN, termbox.ColorWhite, termbox.ColorBlue)
+ draw_key(K_LALT, termbox.ColorWhite, termbox.ColorBlue)
+ draw_key(K_SPACE, termbox.ColorWhite, termbox.ColorBlue)
+ draw_key(K_RCTRL, termbox.ColorWhite, termbox.ColorBlue)
+ draw_key(K_RPROP, termbox.ColorWhite, termbox.ColorBlue)
+ draw_key(K_RWIN, termbox.ColorWhite, termbox.ColorBlue)
+ draw_key(K_RALT, termbox.ColorWhite, termbox.ColorBlue)
+ draw_key(K_ARROW_LEFT, termbox.ColorWhite, termbox.ColorBlue)
+ draw_key(K_ARROW_DOWN, termbox.ColorWhite, termbox.ColorBlue)
+ draw_key(K_ARROW_RIGHT, termbox.ColorWhite, termbox.ColorBlue)
+ draw_key(K_K_0, termbox.ColorWhite, termbox.ColorBlue)
+ draw_key(K_K_PERIOD, termbox.ColorWhite, termbox.ColorBlue)
+
+ printf_tb(33, 1, termbox.ColorMagenta|termbox.AttrBold, termbox.ColorBlack, "Keyboard demo!")
+ printf_tb(21, 2, termbox.ColorMagenta, termbox.ColorBlack, "(press CTRL+X and then CTRL+Q to exit)")
+ printf_tb(15, 3, termbox.ColorMagenta, termbox.ColorBlack, "(press CTRL+X and then CTRL+C to change input mode)")
+
+ inputmode := termbox.SetInputMode(termbox.InputCurrent)
+ inputmode_str := ""
+ switch {
+ case inputmode&termbox.InputEsc != 0:
+ inputmode_str = "termbox.InputEsc"
+ case inputmode&termbox.InputAlt != 0:
+ inputmode_str = "termbox.InputAlt"
+ }
+
+ if inputmode&termbox.InputMouse != 0 {
+ inputmode_str += " | termbox.InputMouse"
+ }
+ printf_tb(3, 18, termbox.ColorWhite, termbox.ColorBlack, "Input mode: %s", inputmode_str)
+}
+
+var fcmap = []string{
+ "CTRL+2, CTRL+~",
+ "CTRL+A",
+ "CTRL+B",
+ "CTRL+C",
+ "CTRL+D",
+ "CTRL+E",
+ "CTRL+F",
+ "CTRL+G",
+ "CTRL+H, BACKSPACE",
+ "CTRL+I, TAB",
+ "CTRL+J",
+ "CTRL+K",
+ "CTRL+L",
+ "CTRL+M, ENTER",
+ "CTRL+N",
+ "CTRL+O",
+ "CTRL+P",
+ "CTRL+Q",
+ "CTRL+R",
+ "CTRL+S",
+ "CTRL+T",
+ "CTRL+U",
+ "CTRL+V",
+ "CTRL+W",
+ "CTRL+X",
+ "CTRL+Y",
+ "CTRL+Z",
+ "CTRL+3, ESC, CTRL+[",
+ "CTRL+4, CTRL+\\",
+ "CTRL+5, CTRL+]",
+ "CTRL+6",
+ "CTRL+7, CTRL+/, CTRL+_",
+ "SPACE",
+}
+
+var fkmap = []string{
+ "F1",
+ "F2",
+ "F3",
+ "F4",
+ "F5",
+ "F6",
+ "F7",
+ "F8",
+ "F9",
+ "F10",
+ "F11",
+ "F12",
+ "INSERT",
+ "DELETE",
+ "HOME",
+ "END",
+ "PGUP",
+ "PGDN",
+ "ARROW UP",
+ "ARROW DOWN",
+ "ARROW LEFT",
+ "ARROW RIGHT",
+}
+
+func funckeymap(k termbox.Key) string {
+ if k == termbox.KeyCtrl8 {
+ return "CTRL+8, BACKSPACE 2" /* 0x7F */
+ } else if k >= termbox.KeyArrowRight && k <= 0xFFFF {
+ return fkmap[0xFFFF-k]
+ } else if k <= termbox.KeySpace {
+ return fcmap[k]
+ }
+ return "UNKNOWN"
+}
+
+func pretty_print_press(ev *termbox.Event) {
+ printf_tb(3, 19, termbox.ColorWhite, termbox.ColorBlack, "Key: ")
+ printf_tb(8, 19, termbox.ColorYellow, termbox.ColorBlack, "decimal: %d", ev.Key)
+ printf_tb(8, 20, termbox.ColorGreen, termbox.ColorBlack, "hex: 0x%X", ev.Key)
+ printf_tb(8, 21, termbox.ColorCyan, termbox.ColorBlack, "octal: 0%o", ev.Key)
+ printf_tb(8, 22, termbox.ColorRed, termbox.ColorBlack, "string: %s", funckeymap(ev.Key))
+
+ printf_tb(54, 19, termbox.ColorWhite, termbox.ColorBlack, "Char: ")
+ printf_tb(60, 19, termbox.ColorYellow, termbox.ColorBlack, "decimal: %d", ev.Ch)
+ printf_tb(60, 20, termbox.ColorGreen, termbox.ColorBlack, "hex: 0x%X", ev.Ch)
+ printf_tb(60, 21, termbox.ColorCyan, termbox.ColorBlack, "octal: 0%o", ev.Ch)
+ printf_tb(60, 22, termbox.ColorRed, termbox.ColorBlack, "string: %s", string(ev.Ch))
+
+ modifier := "none"
+ if ev.Mod != 0 {
+ modifier = "termbox.ModAlt"
+ }
+ printf_tb(54, 18, termbox.ColorWhite, termbox.ColorBlack, "Modifier: %s", modifier)
+}
+
+func pretty_print_resize(ev *termbox.Event) {
+ printf_tb(3, 19, termbox.ColorWhite, termbox.ColorBlack, "Resize event: %d x %d", ev.Width, ev.Height)
+}
+
+var counter = 0
+
+func pretty_print_mouse(ev *termbox.Event) {
+ printf_tb(3, 19, termbox.ColorWhite, termbox.ColorBlack, "Mouse event: %d x %d", ev.MouseX, ev.MouseY)
+ button := ""
+ switch ev.Key {
+ case termbox.MouseLeft:
+ button = "MouseLeft: %d"
+ case termbox.MouseMiddle:
+ button = "MouseMiddle: %d"
+ case termbox.MouseRight:
+ button = "MouseRight: %d"
+ case termbox.MouseWheelUp:
+ button = "MouseWheelUp: %d"
+ case termbox.MouseWheelDown:
+ button = "MouseWheelDown: %d"
+ case termbox.MouseRelease:
+ button = "MouseRelease: %d"
+ }
+ if ev.Mod&termbox.ModMotion != 0 {
+ button += "*"
+ }
+ counter++
+ printf_tb(43, 19, termbox.ColorWhite, termbox.ColorBlack, "Key: ")
+ printf_tb(48, 19, termbox.ColorYellow, termbox.ColorBlack, button, counter)
+}
+
+func dispatch_press(ev *termbox.Event) {
+ if ev.Mod&termbox.ModAlt != 0 {
+ draw_key(K_LALT, termbox.ColorWhite, termbox.ColorRed)
+ draw_key(K_RALT, termbox.ColorWhite, termbox.ColorRed)
+ }
+
+ var k *combo
+ if ev.Key >= termbox.KeyArrowRight {
+ k = &func_combos[0xFFFF-ev.Key]
+ } else if ev.Ch < 128 {
+ if ev.Ch == 0 && ev.Key < 128 {
+ k = &combos[ev.Key]
+ } else {
+ k = &combos[ev.Ch]
+ }
+ }
+ if k == nil {
+ return
+ }
+
+ keys := k.keys
+ for _, k := range keys {
+ draw_key(k, termbox.ColorWhite, termbox.ColorRed)
+ }
+}
+
+func main() {
+ err := termbox.Init()
+ if err != nil {
+ panic(err)
+ }
+ defer termbox.Close()
+
+ termbox.SetInputMode(termbox.InputEsc | termbox.InputMouse)
+
+ termbox.Clear(termbox.ColorDefault, termbox.ColorDefault)
+ draw_keyboard()
+ termbox.Flush()
+ inputmode := 0
+ ctrlxpressed := false
+loop:
+ for {
+ switch ev := termbox.PollEvent(); ev.Type {
+ case termbox.EventKey:
+ if ev.Key == termbox.KeyCtrlS && ctrlxpressed {
+ termbox.Sync()
+ }
+ if ev.Key == termbox.KeyCtrlQ && ctrlxpressed {
+ break loop
+ }
+ if ev.Key == termbox.KeyCtrlC && ctrlxpressed {
+ chmap := []termbox.InputMode{
+ termbox.InputEsc | termbox.InputMouse,
+ termbox.InputAlt | termbox.InputMouse,
+ termbox.InputEsc,
+ termbox.InputAlt,
+ }
+ inputmode++
+ if inputmode >= len(chmap) {
+ inputmode = 0
+ }
+ termbox.SetInputMode(chmap[inputmode])
+ }
+ if ev.Key == termbox.KeyCtrlX {
+ ctrlxpressed = true
+ } else {
+ ctrlxpressed = false
+ }
+
+ termbox.Clear(termbox.ColorDefault, termbox.ColorDefault)
+ draw_keyboard()
+ dispatch_press(&ev)
+ pretty_print_press(&ev)
+ termbox.Flush()
+ case termbox.EventResize:
+ termbox.Clear(termbox.ColorDefault, termbox.ColorDefault)
+ draw_keyboard()
+ pretty_print_resize(&ev)
+ termbox.Flush()
+ case termbox.EventMouse:
+ termbox.Clear(termbox.ColorDefault, termbox.ColorDefault)
+ draw_keyboard()
+ pretty_print_mouse(&ev)
+ termbox.Flush()
+ case termbox.EventError:
+ panic(ev.Err)
+ }
+ }
+}
diff --git a/src/mongo/gotools/vendor/src/github.com/nsf/termbox-go/_demos/output.go b/src/mongo/gotools/vendor/src/github.com/nsf/termbox-go/_demos/output.go
new file mode 100644
index 00000000000..2b9479b078d
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/nsf/termbox-go/_demos/output.go
@@ -0,0 +1,228 @@
+package main
+
+import "github.com/mattn/go-runewidth"
+import "github.com/nsf/termbox-go"
+
+const chars = "nnnnnnnnnbbbbbbbbbuuuuuuuuuBBBBBBBBB"
+
+var output_mode = termbox.OutputNormal
+
+func next_char(current int) int {
+ current++
+ if current >= len(chars) {
+ return 0
+ }
+ return current
+}
+
+func print_combinations_table(sx, sy int, attrs []termbox.Attribute) {
+ var bg termbox.Attribute
+ current_char := 0
+ y := sy
+
+ all_attrs := []termbox.Attribute{
+ 0,
+ termbox.AttrBold,
+ termbox.AttrUnderline,
+ termbox.AttrBold | termbox.AttrUnderline,
+ }
+
+ draw_line := func() {
+ x := sx
+ for _, a := range all_attrs {
+ for c := termbox.ColorDefault; c <= termbox.ColorWhite; c++ {
+ fg := a | c
+ termbox.SetCell(x, y, rune(chars[current_char]), fg, bg)
+ current_char = next_char(current_char)
+ x++
+ }
+ }
+ }
+
+ for _, a := range attrs {
+ for c := termbox.ColorDefault; c <= termbox.ColorWhite; c++ {
+ bg = a | c
+ draw_line()
+ y++
+ }
+ }
+}
+
+func print_wide(x, y int, s string) {
+ red := false
+ for _, r := range s {
+ c := termbox.ColorDefault
+ if red {
+ c = termbox.ColorRed
+ }
+ termbox.SetCell(x, y, r, termbox.ColorDefault, c)
+ w := runewidth.RuneWidth(r)
+ if w == 0 || (w == 2 && runewidth.IsAmbiguousWidth(r)) {
+ w = 1
+ }
+ x += w
+
+ red = !red
+ }
+}
+
+const hello_world = "こんにちは世界"
+
+func draw_all() {
+ termbox.Clear(termbox.ColorDefault, termbox.ColorDefault)
+
+ switch output_mode {
+
+ case termbox.OutputNormal:
+ print_combinations_table(1, 1, []termbox.Attribute{
+ 0,
+ termbox.AttrBold,
+ })
+ print_combinations_table(2+len(chars), 1, []termbox.Attribute{
+ termbox.AttrReverse,
+ })
+ print_wide(2+len(chars), 11, hello_world)
+
+ case termbox.OutputGrayscale:
+ for y := 0; y < 26; y++ {
+ for x := 0; x < 26; x++ {
+ termbox.SetCell(x, y, 'n',
+ termbox.Attribute(x+1),
+ termbox.Attribute(y+1))
+ termbox.SetCell(x+27, y, 'b',
+ termbox.Attribute(x+1)|termbox.AttrBold,
+ termbox.Attribute(26-y))
+ termbox.SetCell(x+54, y, 'u',
+ termbox.Attribute(x+1)|termbox.AttrUnderline,
+ termbox.Attribute(y+1))
+ }
+ termbox.SetCell(82, y, 'd',
+ termbox.Attribute(y+1),
+ termbox.ColorDefault)
+ termbox.SetCell(83, y, 'd',
+ termbox.ColorDefault,
+ termbox.Attribute(26-y))
+ }
+
+ case termbox.Output216:
+ for r := 0; r < 6; r++ {
+ for g := 0; g < 6; g++ {
+ for b := 0; b < 6; b++ {
+ y := r
+ x := g + 6*b
+ c1 := termbox.Attribute(1 + r*36 + g*6 + b)
+ bg := termbox.Attribute(1 + g*36 + b*6 + r)
+ c2 := termbox.Attribute(1 + b*36 + r*6 + g)
+ bc1 := c1 | termbox.AttrBold
+ uc1 := c1 | termbox.AttrUnderline
+ bc2 := c2 | termbox.AttrBold
+ uc2 := c2 | termbox.AttrUnderline
+ termbox.SetCell(x, y, 'n', c1, bg)
+ termbox.SetCell(x, y+6, 'b', bc1, bg)
+ termbox.SetCell(x, y+12, 'u', uc1, bg)
+ termbox.SetCell(x, y+18, 'B', bc1|uc1, bg)
+ termbox.SetCell(x+37, y, 'n', c2, bg)
+ termbox.SetCell(x+37, y+6, 'b', bc2, bg)
+ termbox.SetCell(x+37, y+12, 'u', uc2, bg)
+ termbox.SetCell(x+37, y+18, 'B', bc2|uc2, bg)
+ }
+ c1 := termbox.Attribute(1 + g*6 + r*36)
+ c2 := termbox.Attribute(6 + g*6 + r*36)
+ termbox.SetCell(74+g, r, 'd', c1, termbox.ColorDefault)
+ termbox.SetCell(74+g, r+6, 'd', c2, termbox.ColorDefault)
+ termbox.SetCell(74+g, r+12, 'd', termbox.ColorDefault, c1)
+ termbox.SetCell(74+g, r+18, 'd', termbox.ColorDefault, c2)
+ }
+ }
+
+ case termbox.Output256:
+ for y := 0; y < 4; y++ {
+ for x := 0; x < 8; x++ {
+ for z := 0; z < 8; z++ {
+ bg := termbox.Attribute(1 + y*64 + x*8 + z)
+ c1 := termbox.Attribute(256 - y*64 - x*8 - z)
+ c2 := termbox.Attribute(1 + y*64 + z*8 + x)
+ c3 := termbox.Attribute(256 - y*64 - z*8 - x)
+ c4 := termbox.Attribute(1 + y*64 + x*4 + z*4)
+ bold := c2 | termbox.AttrBold
+ under := c3 | termbox.AttrUnderline
+ both := c1 | termbox.AttrBold | termbox.AttrUnderline
+ termbox.SetCell(z+8*x, y, ' ', 0, bg)
+ termbox.SetCell(z+8*x, y+5, 'n', c4, bg)
+ termbox.SetCell(z+8*x, y+10, 'b', bold, bg)
+ termbox.SetCell(z+8*x, y+15, 'u', under, bg)
+ termbox.SetCell(z+8*x, y+20, 'B', both, bg)
+ }
+ }
+ }
+ for x := 0; x < 12; x++ {
+ for y := 0; y < 2; y++ {
+ c1 := termbox.Attribute(233 + y*12 + x)
+ termbox.SetCell(66+x, y, 'd', c1, termbox.ColorDefault)
+ termbox.SetCell(66+x, 2+y, 'd', termbox.ColorDefault, c1)
+ }
+ }
+ for x := 0; x < 6; x++ {
+ for y := 0; y < 6; y++ {
+ c1 := termbox.Attribute(17 + x*6 + y*36)
+ c2 := termbox.Attribute(17 + 5 + x*6 + y*36)
+ termbox.SetCell(66+x, 6+y, 'd', c1, termbox.ColorDefault)
+ termbox.SetCell(66+x, 12+y, 'd', c2, termbox.ColorDefault)
+ termbox.SetCell(72+x, 6+y, 'd', termbox.ColorDefault, c1)
+ termbox.SetCell(72+x, 12+y, 'd', termbox.ColorDefault, c2)
+ }
+ }
+
+ }
+
+ termbox.Flush()
+}
+
+var available_modes = []termbox.OutputMode{
+ termbox.OutputNormal,
+ termbox.OutputGrayscale,
+ termbox.Output216,
+ termbox.Output256,
+}
+
+var output_mode_index = 0
+
+func switch_output_mode(direction int) {
+ output_mode_index += direction
+ if output_mode_index < 0 {
+ output_mode_index = len(available_modes) - 1
+ } else if output_mode_index >= len(available_modes) {
+ output_mode_index = 0
+ }
+ output_mode = termbox.SetOutputMode(available_modes[output_mode_index])
+ termbox.Clear(termbox.ColorDefault, termbox.ColorDefault)
+ termbox.Sync()
+}
+
+func main() {
+ err := termbox.Init()
+ if err != nil {
+ panic(err)
+ }
+ defer termbox.Close()
+
+ draw_all()
+loop:
+ for {
+ switch ev := termbox.PollEvent(); ev.Type {
+ case termbox.EventKey:
+ switch ev.Key {
+ case termbox.KeyEsc:
+ break loop
+ case termbox.KeyArrowUp, termbox.KeyArrowRight:
+ switch_output_mode(1)
+ draw_all()
+ case termbox.KeyArrowDown, termbox.KeyArrowLeft:
+ switch_output_mode(-1)
+ draw_all()
+ }
+ case termbox.EventResize:
+ draw_all()
+ }
+ }
+}
diff --git a/src/mongo/gotools/vendor/src/github.com/nsf/termbox-go/_demos/paint.go b/src/mongo/gotools/vendor/src/github.com/nsf/termbox-go/_demos/paint.go
new file mode 100644
index 00000000000..fbafd18ae99
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/nsf/termbox-go/_demos/paint.go
@@ -0,0 +1,105 @@
+package main
+
+import (
+ "github.com/nsf/termbox-go"
+)
+
+var curCol = 0
+var curRune = 0
+var backbuf []termbox.Cell
+var bbw, bbh int
+
+var runes = []rune{' ', '░', '▒', '▓', '█'}
+var colors = []termbox.Attribute{
+ termbox.ColorBlack,
+ termbox.ColorRed,
+ termbox.ColorGreen,
+ termbox.ColorYellow,
+ termbox.ColorBlue,
+ termbox.ColorMagenta,
+ termbox.ColorCyan,
+ termbox.ColorWhite,
+}
+
+type attrFunc func(int) (rune, termbox.Attribute, termbox.Attribute)
+
+func updateAndDrawButtons(current *int, x, y int, mx, my int, n int, attrf attrFunc) {
+ lx, ly := x, y
+ for i := 0; i < n; i++ {
+ if lx <= mx && mx <= lx+3 && ly <= my && my <= ly+1 {
+ *current = i
+ }
+ r, fg, bg := attrf(i)
+ termbox.SetCell(lx+0, ly+0, r, fg, bg)
+ termbox.SetCell(lx+1, ly+0, r, fg, bg)
+ termbox.SetCell(lx+2, ly+0, r, fg, bg)
+ termbox.SetCell(lx+3, ly+0, r, fg, bg)
+ termbox.SetCell(lx+0, ly+1, r, fg, bg)
+ termbox.SetCell(lx+1, ly+1, r, fg, bg)
+ termbox.SetCell(lx+2, ly+1, r, fg, bg)
+ termbox.SetCell(lx+3, ly+1, r, fg, bg)
+ lx += 4
+ }
+ lx, ly = x, y
+ for i := 0; i < n; i++ {
+ if *current == i {
+ fg := termbox.ColorRed | termbox.AttrBold
+ bg := termbox.ColorDefault
+ termbox.SetCell(lx+0, ly+2, '^', fg, bg)
+ termbox.SetCell(lx+1, ly+2, '^', fg, bg)
+ termbox.SetCell(lx+2, ly+2, '^', fg, bg)
+ termbox.SetCell(lx+3, ly+2, '^', fg, bg)
+ }
+ lx += 4
+ }
+}
+
+func update_and_redraw_all(mx, my int) {
+ termbox.Clear(termbox.ColorDefault, termbox.ColorDefault)
+ if mx != -1 && my != -1 {
+ backbuf[bbw*my+mx] = termbox.Cell{Ch: runes[curRune], Fg: colors[curCol]}
+ }
+ copy(termbox.CellBuffer(), backbuf)
+ _, h := termbox.Size()
+ updateAndDrawButtons(&curRune, 0, 0, mx, my, len(runes), func(i int) (rune, termbox.Attribute, termbox.Attribute) {
+ return runes[i], termbox.ColorDefault, termbox.ColorDefault
+ })
+ updateAndDrawButtons(&curCol, 0, h-3, mx, my, len(colors), func(i int) (rune, termbox.Attribute, termbox.Attribute) {
+ return ' ', termbox.ColorDefault, colors[i]
+ })
+ termbox.Flush()
+}
+
+func reallocBackBuffer(w, h int) {
+ bbw, bbh = w, h
+ backbuf = make([]termbox.Cell, w*h)
+}
+
+func main() {
+ err := termbox.Init()
+ if err != nil {
+ panic(err)
+ }
+ defer termbox.Close()
+ termbox.SetInputMode(termbox.InputEsc | termbox.InputMouse)
+ reallocBackBuffer(termbox.Size())
+ update_and_redraw_all(-1, -1)
+
+mainloop:
+ for {
+ mx, my := -1, -1
+ switch ev := termbox.PollEvent(); ev.Type {
+ case termbox.EventKey:
+ if ev.Key == termbox.KeyEsc {
+ break mainloop
+ }
+ case termbox.EventMouse:
+ if ev.Key == termbox.MouseLeft {
+ mx, my = ev.MouseX, ev.MouseY
+ }
+ case termbox.EventResize:
+ reallocBackBuffer(ev.Width, ev.Height)
+ }
+ update_and_redraw_all(mx, my)
+ }
+}
diff --git a/src/mongo/gotools/vendor/src/github.com/nsf/termbox-go/_demos/random_output.go b/src/mongo/gotools/vendor/src/github.com/nsf/termbox-go/_demos/random_output.go
new file mode 100644
index 00000000000..efcf0b7c9de
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/nsf/termbox-go/_demos/random_output.go
@@ -0,0 +1,46 @@
+package main
+
+import "github.com/nsf/termbox-go"
+import "math/rand"
+import "time"
+
+func draw() {
+ w, h := termbox.Size()
+ termbox.Clear(termbox.ColorDefault, termbox.ColorDefault)
+ for y := 0; y < h; y++ {
+ for x := 0; x < w; x++ {
+ termbox.SetCell(x, y, ' ', termbox.ColorDefault,
+ termbox.Attribute(rand.Int()%8)+1)
+ }
+ }
+ termbox.Flush()
+}
+
+func main() {
+ err := termbox.Init()
+ if err != nil {
+ panic(err)
+ }
+ defer termbox.Close()
+
+ event_queue := make(chan termbox.Event)
+ go func() {
+ for {
+ event_queue <- termbox.PollEvent()
+ }
+ }()
+
+ draw()
+loop:
+ for {
+ select {
+ case ev := <-event_queue:
+ if ev.Type == termbox.EventKey && ev.Key == termbox.KeyEsc {
+ break loop
+ }
+ default:
+ draw()
+ time.Sleep(10 * time.Millisecond)
+ }
+ }
+}
diff --git a/src/mongo/gotools/vendor/src/github.com/nsf/termbox-go/_demos/raw_input.go b/src/mongo/gotools/vendor/src/github.com/nsf/termbox-go/_demos/raw_input.go
new file mode 100644
index 00000000000..97a489758f4
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/nsf/termbox-go/_demos/raw_input.go
@@ -0,0 +1,109 @@
+package main
+
+import (
+ "fmt"
+ "github.com/nsf/termbox-go"
+ "strings"
+)
+
+func tbprint(x, y int, fg, bg termbox.Attribute, msg string) {
+ for _, c := range msg {
+ termbox.SetCell(x, y, c, fg, bg)
+ x++
+ }
+}
+
+var current string
+var curev termbox.Event
+
+func mouse_button_str(k termbox.Key) string {
+ switch k {
+ case termbox.MouseLeft:
+ return "MouseLeft"
+ case termbox.MouseMiddle:
+ return "MouseMiddle"
+ case termbox.MouseRight:
+ return "MouseRight"
+ case termbox.MouseRelease:
+ return "MouseRelease"
+ case termbox.MouseWheelUp:
+ return "MouseWheelUp"
+ case termbox.MouseWheelDown:
+ return "MouseWheelDown"
+ }
+ return "Key"
+}
+
+func mod_str(m termbox.Modifier) string {
+ var out []string
+ if m&termbox.ModAlt != 0 {
+ out = append(out, "ModAlt")
+ }
+ if m&termbox.ModMotion != 0 {
+ out = append(out, "ModMotion")
+ }
+ return strings.Join(out, " | ")
+}
+
+func redraw_all() {
+ const coldef = termbox.ColorDefault
+ termbox.Clear(coldef, coldef)
+ tbprint(0, 0, termbox.ColorMagenta, coldef, "Press 'q' to quit")
+ tbprint(0, 1, coldef, coldef, current)
+ switch curev.Type {
+ case termbox.EventKey:
+ tbprint(0, 2, coldef, coldef,
+ fmt.Sprintf("EventKey: k: %d, c: %c, mod: %s", curev.Key, curev.Ch, mod_str(curev.Mod)))
+ case termbox.EventMouse:
+ tbprint(0, 2, coldef, coldef,
+ fmt.Sprintf("EventMouse: x: %d, y: %d, b: %s, mod: %s",
+ curev.MouseX, curev.MouseY, mouse_button_str(curev.Key), mod_str(curev.Mod)))
+ case termbox.EventNone:
+ tbprint(0, 2, coldef, coldef, "EventNone")
+ }
+ tbprint(0, 3, coldef, coldef, fmt.Sprintf("%d", curev.N))
+ termbox.Flush()
+}
+
+func main() {
+ err := termbox.Init()
+ if err != nil {
+ panic(err)
+ }
+ defer termbox.Close()
+ termbox.SetInputMode(termbox.InputAlt | termbox.InputMouse)
+ redraw_all()
+
+ data := make([]byte, 0, 64)
+mainloop:
+ for {
+ if cap(data)-len(data) < 32 {
+ newdata := make([]byte, len(data), len(data)+32)
+ copy(newdata, data)
+ data = newdata
+ }
+ beg := len(data)
+ d := data[beg : beg+32]
+ switch ev := termbox.PollRawEvent(d); ev.Type {
+ case termbox.EventRaw:
+ data = data[:beg+ev.N]
+ current = fmt.Sprintf("%q", data)
+ if current == `"q"` {
+ break mainloop
+ }
+
+ for {
+ ev := termbox.ParseEvent(data)
+ if ev.N == 0 {
+ break
+ }
+ curev = ev
+ copy(data, data[curev.N:])
+ data = data[:len(data)-curev.N]
+ }
+ case termbox.EventError:
+ panic(ev.Err)
+ }
+ redraw_all()
+ }
+}
diff --git a/src/mongo/gotools/vendor/src/github.com/nsf/termbox-go/api.go b/src/mongo/gotools/vendor/src/github.com/nsf/termbox-go/api.go
new file mode 100644
index 00000000000..b339e532f8e
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/nsf/termbox-go/api.go
@@ -0,0 +1,458 @@
+// +build !windows
+
+package termbox
+
+import "github.com/mattn/go-runewidth"
+import "fmt"
+import "os"
+import "os/signal"
+import "syscall"
+import "runtime"
+
+// public API
+
+// Initializes termbox library. This function should be called before any other functions.
+// After successful initialization, the library must be finalized using 'Close' function.
+//
+// Example usage:
+// err := termbox.Init()
+// if err != nil {
+// panic(err)
+// }
+// defer termbox.Close()
+func Init() error {
+ var err error
+
+ out, err = os.OpenFile("/dev/tty", syscall.O_WRONLY, 0)
+ if err != nil {
+ return err
+ }
+ in, err = syscall.Open("/dev/tty", syscall.O_RDONLY, 0)
+ if err != nil {
+ return err
+ }
+
+ err = setup_term()
+ if err != nil {
+ return fmt.Errorf("termbox: error while reading terminfo data: %v", err)
+ }
+
+ signal.Notify(sigwinch, syscall.SIGWINCH)
+ signal.Notify(sigio, syscall.SIGIO)
+
+ _, err = fcntl(in, syscall.F_SETFL, syscall.O_ASYNC|syscall.O_NONBLOCK)
+ if err != nil {
+ return err
+ }
+ _, err = fcntl(in, syscall.F_SETOWN, syscall.Getpid())
+ if runtime.GOOS != "darwin" && err != nil {
+ return err
+ }
+ err = tcgetattr(out.Fd(), &orig_tios)
+ if err != nil {
+ return err
+ }
+
+ tios := orig_tios
+ tios.Iflag &^= syscall_IGNBRK | syscall_BRKINT | syscall_PARMRK |
+ syscall_ISTRIP | syscall_INLCR | syscall_IGNCR |
+ syscall_ICRNL | syscall_IXON
+ tios.Lflag &^= syscall_ECHO | syscall_ECHONL | syscall_ICANON |
+ syscall_ISIG | syscall_IEXTEN
+ tios.Cflag &^= syscall_CSIZE | syscall_PARENB
+ tios.Cflag |= syscall_CS8
+ tios.Cc[syscall_VMIN] = 1
+ tios.Cc[syscall_VTIME] = 0
+
+ err = tcsetattr(out.Fd(), &tios)
+ if err != nil {
+ return err
+ }
+
+ out.WriteString(funcs[t_enter_ca])
+ out.WriteString(funcs[t_enter_keypad])
+ out.WriteString(funcs[t_hide_cursor])
+ out.WriteString(funcs[t_clear_screen])
+
+ termw, termh = get_term_size(out.Fd())
+ back_buffer.init(termw, termh)
+ front_buffer.init(termw, termh)
+ back_buffer.clear()
+ front_buffer.clear()
+
+ go func() {
+ buf := make([]byte, 128)
+ for {
+ select {
+ case <-sigio:
+ for {
+ n, err := syscall.Read(in, buf)
+ if err == syscall.EAGAIN || err == syscall.EWOULDBLOCK {
+ break
+ }
+ select {
+ case input_comm <- input_event{buf[:n], err}:
+ ie := <-input_comm
+ buf = ie.data[:128]
+ case <-quit:
+ return
+ }
+ }
+ case <-quit:
+ return
+ }
+ }
+ }()
+
+ IsInit = true
+ return nil
+}
+
+// Interrupt an in-progress call to PollEvent by causing it to return
+// EventInterrupt. Note that this function will block until the PollEvent
+// function has successfully been interrupted.
+func Interrupt() {
+ interrupt_comm <- struct{}{}
+}
+
+// Finalizes termbox library, should be called after successful initialization
+// when termbox's functionality isn't required anymore.
+func Close() {
+ quit <- 1
+ out.WriteString(funcs[t_show_cursor])
+ out.WriteString(funcs[t_sgr0])
+ out.WriteString(funcs[t_clear_screen])
+ out.WriteString(funcs[t_exit_ca])
+ out.WriteString(funcs[t_exit_keypad])
+ out.WriteString(funcs[t_exit_mouse])
+ tcsetattr(out.Fd(), &orig_tios)
+
+ out.Close()
+ syscall.Close(in)
+
+ // reset the state, so that on next Init() it will work again
+ termw = 0
+ termh = 0
+ input_mode = InputEsc
+ out = nil
+ in = 0
+ lastfg = attr_invalid
+ lastbg = attr_invalid
+ lastx = coord_invalid
+ lasty = coord_invalid
+ cursor_x = cursor_hidden
+ cursor_y = cursor_hidden
+ foreground = ColorDefault
+ background = ColorDefault
+ IsInit = false
+}
+
+// Synchronizes the internal back buffer with the terminal.
+func Flush() error {
+ // invalidate cursor position
+ lastx = coord_invalid
+ lasty = coord_invalid
+
+ update_size_maybe()
+
+ for y := 0; y < front_buffer.height; y++ {
+ line_offset := y * front_buffer.width
+ for x := 0; x < front_buffer.width; {
+ cell_offset := line_offset + x
+ back := &back_buffer.cells[cell_offset]
+ front := &front_buffer.cells[cell_offset]
+ if back.Ch < ' ' {
+ back.Ch = ' '
+ }
+ w := runewidth.RuneWidth(back.Ch)
+ if w == 0 || w == 2 && runewidth.IsAmbiguousWidth(back.Ch) {
+ w = 1
+ }
+ if *back == *front {
+ x += w
+ continue
+ }
+ *front = *back
+ send_attr(back.Fg, back.Bg)
+
+ if w == 2 && x == front_buffer.width-1 {
+ // there's not enough space for 2-cells rune,
+ // let's just put a space in there
+ send_char(x, y, ' ')
+ } else {
+ send_char(x, y, back.Ch)
+ if w == 2 {
+ next := cell_offset + 1
+ front_buffer.cells[next] = Cell{
+ Ch: 0,
+ Fg: back.Fg,
+ Bg: back.Bg,
+ }
+ }
+ }
+ x += w
+ }
+ }
+ if !is_cursor_hidden(cursor_x, cursor_y) {
+ write_cursor(cursor_x, cursor_y)
+ }
+ return flush()
+}
+
+// Sets the position of the cursor. See also HideCursor().
+func SetCursor(x, y int) {
+ if is_cursor_hidden(cursor_x, cursor_y) && !is_cursor_hidden(x, y) {
+ outbuf.WriteString(funcs[t_show_cursor])
+ }
+
+ if !is_cursor_hidden(cursor_x, cursor_y) && is_cursor_hidden(x, y) {
+ outbuf.WriteString(funcs[t_hide_cursor])
+ }
+
+ cursor_x, cursor_y = x, y
+ if !is_cursor_hidden(cursor_x, cursor_y) {
+ write_cursor(cursor_x, cursor_y)
+ }
+}
+
+// The shortcut for SetCursor(-1, -1).
+func HideCursor() {
+ SetCursor(cursor_hidden, cursor_hidden)
+}
+
+// Changes cell's parameters in the internal back buffer at the specified
+// position.
+func SetCell(x, y int, ch rune, fg, bg Attribute) {
+ if x < 0 || x >= back_buffer.width {
+ return
+ }
+ if y < 0 || y >= back_buffer.height {
+ return
+ }
+
+ back_buffer.cells[y*back_buffer.width+x] = Cell{ch, fg, bg}
+}
+
+// Returns a slice into the termbox's back buffer. You can get its dimensions
+// using 'Size' function. The slice remains valid as long as no 'Clear' or
+// 'Flush' function calls were made after call to this function.
+func CellBuffer() []Cell {
+ return back_buffer.cells
+}
+
+// After getting a raw event from PollRawEvent function call, you can parse it
+// again into an ordinary one using termbox logic. That is parse an event as
+// termbox would do it. Returned event in addition to usual Event struct fields
+// sets N field to the amount of bytes used within 'data' slice. If the length
+// of 'data' slice is zero or event cannot be parsed for some other reason, the
+// function will return a special event type: EventNone.
+//
+// IMPORTANT: EventNone may contain a non-zero N, which means you should skip
+// these bytes, because termbox cannot recognize them.
+//
+// NOTE: This API is experimental and may change in future.
+func ParseEvent(data []byte) Event {
+ event := Event{Type: EventKey}
+ ok := extract_event(data, &event)
+ if !ok {
+ return Event{Type: EventNone, N: event.N}
+ }
+ return event
+}
+
+// Wait for an event and return it. This is a blocking function call. Instead
+// of EventKey and EventMouse it returns EventRaw events. Raw event is written
+// into `data` slice and Event's N field is set to the amount of bytes written.
+// The minimum required length of the 'data' slice is 1. This requirement may
+// vary on different platforms.
+//
+// NOTE: This API is experimental and may change in future.
+func PollRawEvent(data []byte) Event {
+ if len(data) == 0 {
+ panic("len(data) >= 1 is a requirement")
+ }
+
+ var event Event
+ if extract_raw_event(data, &event) {
+ return event
+ }
+
+ for {
+ select {
+ case ev := <-input_comm:
+ if ev.err != nil {
+ return Event{Type: EventError, Err: ev.err}
+ }
+
+ inbuf = append(inbuf, ev.data...)
+ input_comm <- ev
+ if extract_raw_event(data, &event) {
+ return event
+ }
+ case <-interrupt_comm:
+ event.Type = EventInterrupt
+ return event
+
+ case <-sigwinch:
+ event.Type = EventResize
+ event.Width, event.Height = get_term_size(out.Fd())
+ return event
+ }
+ }
+}
+
+// Wait for an event and return it. This is a blocking function call.
+func PollEvent() Event {
+ var event Event
+
+ // try to extract event from input buffer, return on success
+ event.Type = EventKey
+ ok := extract_event(inbuf, &event)
+ if event.N != 0 {
+ copy(inbuf, inbuf[event.N:])
+ inbuf = inbuf[:len(inbuf)-event.N]
+ }
+ if ok {
+ return event
+ }
+
+ for {
+ select {
+ case ev := <-input_comm:
+ if ev.err != nil {
+ return Event{Type: EventError, Err: ev.err}
+ }
+
+ inbuf = append(inbuf, ev.data...)
+ input_comm <- ev
+ ok := extract_event(inbuf, &event)
+ if event.N != 0 {
+ copy(inbuf, inbuf[event.N:])
+ inbuf = inbuf[:len(inbuf)-event.N]
+ }
+ if ok {
+ return event
+ }
+ case <-interrupt_comm:
+ event.Type = EventInterrupt
+ return event
+
+ case <-sigwinch:
+ event.Type = EventResize
+ event.Width, event.Height = get_term_size(out.Fd())
+ return event
+ }
+ }
+ panic("unreachable")
+}
+
+// Returns the size of the internal back buffer (which is mostly the same as
+// terminal's window size in characters). But it doesn't always match the size
+// of the terminal window, after the terminal size has changed, the internal
+// back buffer will get in sync only after Clear or Flush function calls.
+func Size() (width int, height int) {
+ return termw, termh
+}
+
+// Clears the internal back buffer.
+func Clear(fg, bg Attribute) error {
+ foreground, background = fg, bg
+ err := update_size_maybe()
+ back_buffer.clear()
+ return err
+}
+
+// Sets termbox input mode. Termbox has two input modes:
+//
+// 1. Esc input mode. When ESC sequence is in the buffer and it doesn't match
+// any known sequence. ESC means KeyEsc. This is the default input mode.
+//
+// 2. Alt input mode. When ESC sequence is in the buffer and it doesn't match
+// any known sequence. ESC enables ModAlt modifier for the next keyboard event.
+//
+// Both input modes can be OR'ed with Mouse mode. Setting Mouse mode bit up will
+// enable mouse button press/release and drag events.
+//
+// If 'mode' is InputCurrent, returns the current input mode. See also Input*
+// constants.
+func SetInputMode(mode InputMode) InputMode {
+ if mode == InputCurrent {
+ return input_mode
+ }
+ if mode&(InputEsc|InputAlt) == 0 {
+ mode |= InputEsc
+ }
+ if mode&(InputEsc|InputAlt) == InputEsc|InputAlt {
+ mode &^= InputAlt
+ }
+ if mode&InputMouse != 0 {
+ out.WriteString(funcs[t_enter_mouse])
+ } else {
+ out.WriteString(funcs[t_exit_mouse])
+ }
+
+ input_mode = mode
+ return input_mode
+}
+
+// Sets the termbox output mode. Termbox has four output options:
+//
+// 1. OutputNormal => [1..8]
+// This mode provides 8 different colors:
+// black, red, green, yellow, blue, magenta, cyan, white
+// Shortcut: ColorBlack, ColorRed, ...
+// Attributes: AttrBold, AttrUnderline, AttrReverse
+//
+// Example usage:
+// SetCell(x, y, '@', ColorBlack | AttrBold, ColorRed);
+//
+// 2. Output256 => [1..256]
+// In this mode you can leverage the 256 terminal mode:
+// 0x01 - 0x08: the 8 colors as in OutputNormal
+// 0x09 - 0x10: Color* | AttrBold
+// 0x11 - 0xe8: 216 different colors
+// 0xe9 - 0x1ff: 24 different shades of grey
+//
+// Example usage:
+// SetCell(x, y, '@', 184, 240);
+// SetCell(x, y, '@', 0xb8, 0xf0);
+//
+// 3. Output216 => [1..216]
+// This mode supports the 3rd range of the 256 mode only.
+// But you dont need to provide an offset.
+//
+// 4. OutputGrayscale => [1..26]
+// This mode supports the 4th range of the 256 mode
+// and black and white colors from 3th range of the 256 mode
+// But you dont need to provide an offset.
+//
+// In all modes, 0x00 represents the default color.
+//
+// `go run _demos/output.go` to see its impact on your terminal.
+//
+// If 'mode' is OutputCurrent, it returns the current output mode.
+//
+// Note that this may return a different OutputMode than the one requested,
+// as the requested mode may not be available on the target platform.
+func SetOutputMode(mode OutputMode) OutputMode {
+ if mode == OutputCurrent {
+ return output_mode
+ }
+
+ output_mode = mode
+ return output_mode
+}
+
+// Sync comes handy when something causes desync between termbox's understanding
+// of a terminal buffer and the reality. Such as a third party process. Sync
+// forces a complete resync between the termbox and a terminal, it may not be
+// visually pretty though.
+func Sync() error {
+ front_buffer.clear()
+ err := send_clear()
+ if err != nil {
+ return err
+ }
+
+ return Flush()
+}
diff --git a/src/mongo/gotools/vendor/src/github.com/nsf/termbox-go/api_common.go b/src/mongo/gotools/vendor/src/github.com/nsf/termbox-go/api_common.go
new file mode 100644
index 00000000000..9f23661f561
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/nsf/termbox-go/api_common.go
@@ -0,0 +1,187 @@
+// termbox is a library for creating cross-platform text-based interfaces
+package termbox
+
+// public API, common OS agnostic part
+
+type (
+ InputMode int
+ OutputMode int
+ EventType uint8
+ Modifier uint8
+ Key uint16
+ Attribute uint16
+)
+
+// This type represents a termbox event. The 'Mod', 'Key' and 'Ch' fields are
+// valid if 'Type' is EventKey. The 'Width' and 'Height' fields are valid if
+// 'Type' is EventResize. The 'Err' field is valid if 'Type' is EventError.
+type Event struct {
+ Type EventType // one of Event* constants
+ Mod Modifier // one of Mod* constants or 0
+ Key Key // one of Key* constants, invalid if 'Ch' is not 0
+ Ch rune // a unicode character
+ Width int // width of the screen
+ Height int // height of the screen
+ Err error // error in case if input failed
+ MouseX int // x coord of mouse
+ MouseY int // y coord of mouse
+ N int // number of bytes written when getting a raw event
+}
+
+// A cell, single conceptual entity on the screen. The screen is basically a 2d
+// array of cells. 'Ch' is a unicode character, 'Fg' and 'Bg' are foreground
+// and background attributes respectively.
+type Cell struct {
+ Ch rune
+ Fg Attribute
+ Bg Attribute
+}
+
+// To know if termbox has been initialized or not
+var (
+ IsInit bool = false
+)
+
+// Key constants, see Event.Key field.
+const (
+ KeyF1 Key = 0xFFFF - iota
+ KeyF2
+ KeyF3
+ KeyF4
+ KeyF5
+ KeyF6
+ KeyF7
+ KeyF8
+ KeyF9
+ KeyF10
+ KeyF11
+ KeyF12
+ KeyInsert
+ KeyDelete
+ KeyHome
+ KeyEnd
+ KeyPgup
+ KeyPgdn
+ KeyArrowUp
+ KeyArrowDown
+ KeyArrowLeft
+ KeyArrowRight
+ key_min // see terminfo
+ MouseLeft
+ MouseMiddle
+ MouseRight
+ MouseRelease
+ MouseWheelUp
+ MouseWheelDown
+)
+
+const (
+ KeyCtrlTilde Key = 0x00
+ KeyCtrl2 Key = 0x00
+ KeyCtrlSpace Key = 0x00
+ KeyCtrlA Key = 0x01
+ KeyCtrlB Key = 0x02
+ KeyCtrlC Key = 0x03
+ KeyCtrlD Key = 0x04
+ KeyCtrlE Key = 0x05
+ KeyCtrlF Key = 0x06
+ KeyCtrlG Key = 0x07
+ KeyBackspace Key = 0x08
+ KeyCtrlH Key = 0x08
+ KeyTab Key = 0x09
+ KeyCtrlI Key = 0x09
+ KeyCtrlJ Key = 0x0A
+ KeyCtrlK Key = 0x0B
+ KeyCtrlL Key = 0x0C
+ KeyEnter Key = 0x0D
+ KeyCtrlM Key = 0x0D
+ KeyCtrlN Key = 0x0E
+ KeyCtrlO Key = 0x0F
+ KeyCtrlP Key = 0x10
+ KeyCtrlQ Key = 0x11
+ KeyCtrlR Key = 0x12
+ KeyCtrlS Key = 0x13
+ KeyCtrlT Key = 0x14
+ KeyCtrlU Key = 0x15
+ KeyCtrlV Key = 0x16
+ KeyCtrlW Key = 0x17
+ KeyCtrlX Key = 0x18
+ KeyCtrlY Key = 0x19
+ KeyCtrlZ Key = 0x1A
+ KeyEsc Key = 0x1B
+ KeyCtrlLsqBracket Key = 0x1B
+ KeyCtrl3 Key = 0x1B
+ KeyCtrl4 Key = 0x1C
+ KeyCtrlBackslash Key = 0x1C
+ KeyCtrl5 Key = 0x1D
+ KeyCtrlRsqBracket Key = 0x1D
+ KeyCtrl6 Key = 0x1E
+ KeyCtrl7 Key = 0x1F
+ KeyCtrlSlash Key = 0x1F
+ KeyCtrlUnderscore Key = 0x1F
+ KeySpace Key = 0x20
+ KeyBackspace2 Key = 0x7F
+ KeyCtrl8 Key = 0x7F
+)
+
+// Alt modifier constant, see Event.Mod field and SetInputMode function.
+const (
+ ModAlt Modifier = 1 << iota
+ ModMotion
+)
+
+// Cell colors, you can combine a color with multiple attributes using bitwise
+// OR ('|').
+const (
+ ColorDefault Attribute = iota
+ ColorBlack
+ ColorRed
+ ColorGreen
+ ColorYellow
+ ColorBlue
+ ColorMagenta
+ ColorCyan
+ ColorWhite
+)
+
+// Cell attributes, it is possible to use multiple attributes by combining them
+// using bitwise OR ('|'). Although, colors cannot be combined. But you can
+// combine attributes and a single color.
+//
+// It's worth mentioning that some platforms don't support certain attibutes.
+// For example windows console doesn't support AttrUnderline. And on some
+// terminals applying AttrBold to background may result in blinking text. Use
+// them with caution and test your code on various terminals.
+const (
+ AttrBold Attribute = 1 << (iota + 9)
+ AttrUnderline
+ AttrReverse
+)
+
+// Input mode. See SetInputMode function.
+const (
+ InputEsc InputMode = 1 << iota
+ InputAlt
+ InputMouse
+ InputCurrent InputMode = 0
+)
+
+// Output mode. See SetOutputMode function.
+const (
+ OutputCurrent OutputMode = iota
+ OutputNormal
+ Output256
+ Output216
+ OutputGrayscale
+)
+
+// Event type. See Event.Type field.
+const (
+ EventKey EventType = iota
+ EventResize
+ EventMouse
+ EventError
+ EventInterrupt
+ EventRaw
+ EventNone
+)
diff --git a/src/mongo/gotools/vendor/src/github.com/nsf/termbox-go/api_windows.go b/src/mongo/gotools/vendor/src/github.com/nsf/termbox-go/api_windows.go
new file mode 100644
index 00000000000..7def30a67df
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/nsf/termbox-go/api_windows.go
@@ -0,0 +1,239 @@
+package termbox
+
+import (
+ "syscall"
+)
+
+// public API
+
+// Initializes termbox library. This function should be called before any other functions.
+// After successful initialization, the library must be finalized using 'Close' function.
+//
+// Example usage:
+// err := termbox.Init()
+// if err != nil {
+// panic(err)
+// }
+// defer termbox.Close()
+func Init() error {
+ var err error
+
+ interrupt, err = create_event()
+ if err != nil {
+ return err
+ }
+
+ in, err = syscall.Open("CONIN$", syscall.O_RDWR, 0)
+ if err != nil {
+ return err
+ }
+ out, err = syscall.Open("CONOUT$", syscall.O_RDWR, 0)
+ if err != nil {
+ return err
+ }
+
+ err = get_console_mode(in, &orig_mode)
+ if err != nil {
+ return err
+ }
+
+ err = set_console_mode(in, enable_window_input)
+ if err != nil {
+ return err
+ }
+
+ orig_size = get_term_size(out)
+ win_size := get_win_size(out)
+
+ err = set_console_screen_buffer_size(out, win_size)
+ if err != nil {
+ return err
+ }
+
+ err = get_console_cursor_info(out, &orig_cursor_info)
+ if err != nil {
+ return err
+ }
+
+ show_cursor(false)
+ term_size = get_term_size(out)
+ back_buffer.init(int(term_size.x), int(term_size.y))
+ front_buffer.init(int(term_size.x), int(term_size.y))
+ back_buffer.clear()
+ front_buffer.clear()
+ clear()
+
+ diffbuf = make([]diff_msg, 0, 32)
+
+ go input_event_producer()
+ IsInit = true
+ return nil
+}
+
+// Finalizes termbox library, should be called after successful initialization
+// when termbox's functionality isn't required anymore.
+func Close() {
+ // we ignore errors here, because we can't really do anything about them
+ Clear(0, 0)
+ Flush()
+
+ // stop event producer
+ cancel_comm <- true
+ set_event(interrupt)
+ select {
+ case <-input_comm:
+ default:
+ }
+ <-cancel_done_comm
+
+ set_console_cursor_info(out, &orig_cursor_info)
+ set_console_cursor_position(out, coord{})
+ set_console_screen_buffer_size(out, orig_size)
+ set_console_mode(in, orig_mode)
+ syscall.Close(in)
+ syscall.Close(out)
+ syscall.Close(interrupt)
+ IsInit = false
+}
+
+// Interrupt an in-progress call to PollEvent by causing it to return
+// EventInterrupt. Note that this function will block until the PollEvent
+// function has successfully been interrupted.
+func Interrupt() {
+ interrupt_comm <- struct{}{}
+}
+
+// Synchronizes the internal back buffer with the terminal.
+func Flush() error {
+ update_size_maybe()
+ prepare_diff_messages()
+ for _, diff := range diffbuf {
+ r := small_rect{
+ left: 0,
+ top: diff.pos,
+ right: term_size.x - 1,
+ bottom: diff.pos + diff.lines - 1,
+ }
+ write_console_output(out, diff.chars, r)
+ }
+ if !is_cursor_hidden(cursor_x, cursor_y) {
+ move_cursor(cursor_x, cursor_y)
+ }
+ return nil
+}
+
+// Sets the position of the cursor. See also HideCursor().
+func SetCursor(x, y int) {
+ if is_cursor_hidden(cursor_x, cursor_y) && !is_cursor_hidden(x, y) {
+ show_cursor(true)
+ }
+
+ if !is_cursor_hidden(cursor_x, cursor_y) && is_cursor_hidden(x, y) {
+ show_cursor(false)
+ }
+
+ cursor_x, cursor_y = x, y
+ if !is_cursor_hidden(cursor_x, cursor_y) {
+ move_cursor(cursor_x, cursor_y)
+ }
+}
+
+// The shortcut for SetCursor(-1, -1).
+func HideCursor() {
+ SetCursor(cursor_hidden, cursor_hidden)
+}
+
+// Changes cell's parameters in the internal back buffer at the specified
+// position.
+func SetCell(x, y int, ch rune, fg, bg Attribute) {
+ if x < 0 || x >= back_buffer.width {
+ return
+ }
+ if y < 0 || y >= back_buffer.height {
+ return
+ }
+
+ back_buffer.cells[y*back_buffer.width+x] = Cell{ch, fg, bg}
+}
+
+// Returns a slice into the termbox's back buffer. You can get its dimensions
+// using 'Size' function. The slice remains valid as long as no 'Clear' or
+// 'Flush' function calls were made after call to this function.
+func CellBuffer() []Cell {
+ return back_buffer.cells
+}
+
+// Wait for an event and return it. This is a blocking function call.
+func PollEvent() Event {
+ select {
+ case ev := <-input_comm:
+ return ev
+ case <-interrupt_comm:
+ return Event{Type: EventInterrupt}
+ }
+}
+
+// Returns the size of the internal back buffer (which is mostly the same as
+// console's window size in characters). But it doesn't always match the size
+// of the console window, after the console size has changed, the internal back
+// buffer will get in sync only after Clear or Flush function calls.
+func Size() (int, int) {
+ return int(term_size.x), int(term_size.y)
+}
+
+// Clears the internal back buffer.
+func Clear(fg, bg Attribute) error {
+ foreground, background = fg, bg
+ update_size_maybe()
+ back_buffer.clear()
+ return nil
+}
+
+// Sets termbox input mode. Termbox has two input modes:
+//
+// 1. Esc input mode. When ESC sequence is in the buffer and it doesn't match
+// any known sequence. ESC means KeyEsc. This is the default input mode.
+//
+// 2. Alt input mode. When ESC sequence is in the buffer and it doesn't match
+// any known sequence. ESC enables ModAlt modifier for the next keyboard event.
+//
+// Both input modes can be OR'ed with Mouse mode. Setting Mouse mode bit up will
+// enable mouse button press/release and drag events.
+//
+// If 'mode' is InputCurrent, returns the current input mode. See also Input*
+// constants.
+func SetInputMode(mode InputMode) InputMode {
+ if mode == InputCurrent {
+ return input_mode
+ }
+ if mode&InputMouse != 0 {
+ err := set_console_mode(in, enable_window_input|enable_mouse_input|enable_extended_flags)
+ if err != nil {
+ panic(err)
+ }
+ } else {
+ err := set_console_mode(in, enable_window_input)
+ if err != nil {
+ panic(err)
+ }
+ }
+
+ input_mode = mode
+ return input_mode
+}
+
+// Sets the termbox output mode.
+//
+// Windows console does not support extra colour modes,
+// so this will always set and return OutputNormal.
+func SetOutputMode(mode OutputMode) OutputMode {
+ return OutputNormal
+}
+
+// Sync comes handy when something causes desync between termbox's understanding
+// of a terminal buffer and the reality. Such as a third party process. Sync
+// forces a complete resync between the termbox and a terminal, it may not be
+// visually pretty though. At the moment on Windows it does nothing.
+func Sync() error {
+ return nil
+}
diff --git a/src/mongo/gotools/vendor/src/github.com/nsf/termbox-go/collect_terminfo.py b/src/mongo/gotools/vendor/src/github.com/nsf/termbox-go/collect_terminfo.py
new file mode 100755
index 00000000000..5e50975e63a
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/nsf/termbox-go/collect_terminfo.py
@@ -0,0 +1,110 @@
+#!/usr/bin/env python
+
+import sys, os, subprocess
+
+def escaped(s):
+ return repr(s)[1:-1]
+
+def tput(term, name):
+ try:
+ return subprocess.check_output(['tput', '-T%s' % term, name]).decode()
+ except subprocess.CalledProcessError as e:
+ return e.output.decode()
+
+
+def w(s):
+ if s == None:
+ return
+ sys.stdout.write(s)
+
+terminals = {
+ 'xterm' : 'xterm',
+ 'rxvt-256color' : 'rxvt_256color',
+ 'rxvt-unicode' : 'rxvt_unicode',
+ 'linux' : 'linux',
+ 'Eterm' : 'eterm',
+ 'screen' : 'screen'
+}
+
+keys = [
+ "F1", "kf1",
+ "F2", "kf2",
+ "F3", "kf3",
+ "F4", "kf4",
+ "F5", "kf5",
+ "F6", "kf6",
+ "F7", "kf7",
+ "F8", "kf8",
+ "F9", "kf9",
+ "F10", "kf10",
+ "F11", "kf11",
+ "F12", "kf12",
+ "INSERT", "kich1",
+ "DELETE", "kdch1",
+ "HOME", "khome",
+ "END", "kend",
+ "PGUP", "kpp",
+ "PGDN", "knp",
+ "KEY_UP", "kcuu1",
+ "KEY_DOWN", "kcud1",
+ "KEY_LEFT", "kcub1",
+ "KEY_RIGHT", "kcuf1"
+]
+
+funcs = [
+ "T_ENTER_CA", "smcup",
+ "T_EXIT_CA", "rmcup",
+ "T_SHOW_CURSOR", "cnorm",
+ "T_HIDE_CURSOR", "civis",
+ "T_CLEAR_SCREEN", "clear",
+ "T_SGR0", "sgr0",
+ "T_UNDERLINE", "smul",
+ "T_BOLD", "bold",
+ "T_BLINK", "blink",
+ "T_REVERSE", "rev",
+ "T_ENTER_KEYPAD", "smkx",
+ "T_EXIT_KEYPAD", "rmkx"
+]
+
+def iter_pairs(iterable):
+ iterable = iter(iterable)
+ while True:
+ yield (next(iterable), next(iterable))
+
+def do_term(term, nick):
+ w("// %s\n" % term)
+ w("var %s_keys = []string{\n\t" % nick)
+ for k, v in iter_pairs(keys):
+ w('"')
+ w(escaped(tput(term, v)))
+ w('",')
+ w("\n}\n")
+ w("var %s_funcs = []string{\n\t" % nick)
+ for k,v in iter_pairs(funcs):
+ w('"')
+ if v == "sgr":
+ w("\\033[3%d;4%dm")
+ elif v == "cup":
+ w("\\033[%d;%dH")
+ else:
+ w(escaped(tput(term, v)))
+ w('", ')
+ w("\n}\n\n")
+
+def do_terms(d):
+ w("var terms = []struct {\n")
+ w("\tname string\n")
+ w("\tkeys []string\n")
+ w("\tfuncs []string\n")
+ w("}{\n")
+ for k, v in d.items():
+ w('\t{"%s", %s_keys, %s_funcs},\n' % (k, v, v))
+ w("}\n\n")
+
+w("// +build !windows\n\npackage termbox\n\n")
+
+for k,v in terminals.items():
+ do_term(k, v)
+
+do_terms(terminals)
+
diff --git a/src/mongo/gotools/vendor/src/github.com/nsf/termbox-go/syscalls.go b/src/mongo/gotools/vendor/src/github.com/nsf/termbox-go/syscalls.go
new file mode 100644
index 00000000000..4f52bb9af9a
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/nsf/termbox-go/syscalls.go
@@ -0,0 +1,39 @@
+// +build ignore
+
+package termbox
+
+/*
+#include <termios.h>
+#include <sys/ioctl.h>
+*/
+import "C"
+
+type syscall_Termios C.struct_termios
+
+const (
+ syscall_IGNBRK = C.IGNBRK
+ syscall_BRKINT = C.BRKINT
+ syscall_PARMRK = C.PARMRK
+ syscall_ISTRIP = C.ISTRIP
+ syscall_INLCR = C.INLCR
+ syscall_IGNCR = C.IGNCR
+ syscall_ICRNL = C.ICRNL
+ syscall_IXON = C.IXON
+ syscall_OPOST = C.OPOST
+ syscall_ECHO = C.ECHO
+ syscall_ECHONL = C.ECHONL
+ syscall_ICANON = C.ICANON
+ syscall_ISIG = C.ISIG
+ syscall_IEXTEN = C.IEXTEN
+ syscall_CSIZE = C.CSIZE
+ syscall_PARENB = C.PARENB
+ syscall_CS8 = C.CS8
+ syscall_VMIN = C.VMIN
+ syscall_VTIME = C.VTIME
+
+ // on darwin change these to (on *bsd too?):
+ // C.TIOCGETA
+ // C.TIOCSETA
+ syscall_TCGETS = C.TCGETS
+ syscall_TCSETS = C.TCSETS
+)
diff --git a/src/mongo/gotools/vendor/src/github.com/nsf/termbox-go/syscalls_darwin.go b/src/mongo/gotools/vendor/src/github.com/nsf/termbox-go/syscalls_darwin.go
new file mode 100644
index 00000000000..25b78f7ab70
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/nsf/termbox-go/syscalls_darwin.go
@@ -0,0 +1,41 @@
+// Created by cgo -godefs - DO NOT EDIT
+// cgo -godefs syscalls.go
+
+// +build !amd64
+
+package termbox
+
+type syscall_Termios struct {
+ Iflag uint32
+ Oflag uint32
+ Cflag uint32
+ Lflag uint32
+ Cc [20]uint8
+ Ispeed uint32
+ Ospeed uint32
+}
+
+const (
+ syscall_IGNBRK = 0x1
+ syscall_BRKINT = 0x2
+ syscall_PARMRK = 0x8
+ syscall_ISTRIP = 0x20
+ syscall_INLCR = 0x40
+ syscall_IGNCR = 0x80
+ syscall_ICRNL = 0x100
+ syscall_IXON = 0x200
+ syscall_OPOST = 0x1
+ syscall_ECHO = 0x8
+ syscall_ECHONL = 0x10
+ syscall_ICANON = 0x100
+ syscall_ISIG = 0x80
+ syscall_IEXTEN = 0x400
+ syscall_CSIZE = 0x300
+ syscall_PARENB = 0x1000
+ syscall_CS8 = 0x300
+ syscall_VMIN = 0x10
+ syscall_VTIME = 0x11
+
+ syscall_TCGETS = 0x402c7413
+ syscall_TCSETS = 0x802c7414
+)
diff --git a/src/mongo/gotools/vendor/src/github.com/nsf/termbox-go/syscalls_darwin_amd64.go b/src/mongo/gotools/vendor/src/github.com/nsf/termbox-go/syscalls_darwin_amd64.go
new file mode 100644
index 00000000000..11f25be79a4
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/nsf/termbox-go/syscalls_darwin_amd64.go
@@ -0,0 +1,40 @@
+// Created by cgo -godefs - DO NOT EDIT
+// cgo -godefs syscalls.go
+
+package termbox
+
+type syscall_Termios struct {
+ Iflag uint64
+ Oflag uint64
+ Cflag uint64
+ Lflag uint64
+ Cc [20]uint8
+ Pad_cgo_0 [4]byte
+ Ispeed uint64
+ Ospeed uint64
+}
+
+const (
+ syscall_IGNBRK = 0x1
+ syscall_BRKINT = 0x2
+ syscall_PARMRK = 0x8
+ syscall_ISTRIP = 0x20
+ syscall_INLCR = 0x40
+ syscall_IGNCR = 0x80
+ syscall_ICRNL = 0x100
+ syscall_IXON = 0x200
+ syscall_OPOST = 0x1
+ syscall_ECHO = 0x8
+ syscall_ECHONL = 0x10
+ syscall_ICANON = 0x100
+ syscall_ISIG = 0x80
+ syscall_IEXTEN = 0x400
+ syscall_CSIZE = 0x300
+ syscall_PARENB = 0x1000
+ syscall_CS8 = 0x300
+ syscall_VMIN = 0x10
+ syscall_VTIME = 0x11
+
+ syscall_TCGETS = 0x40487413
+ syscall_TCSETS = 0x80487414
+)
diff --git a/src/mongo/gotools/vendor/src/github.com/nsf/termbox-go/syscalls_freebsd.go b/src/mongo/gotools/vendor/src/github.com/nsf/termbox-go/syscalls_freebsd.go
new file mode 100644
index 00000000000..e03624ebc71
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/nsf/termbox-go/syscalls_freebsd.go
@@ -0,0 +1,39 @@
+// Created by cgo -godefs - DO NOT EDIT
+// cgo -godefs syscalls.go
+
+package termbox
+
+type syscall_Termios struct {
+ Iflag uint32
+ Oflag uint32
+ Cflag uint32
+ Lflag uint32
+ Cc [20]uint8
+ Ispeed uint32
+ Ospeed uint32
+}
+
+const (
+ syscall_IGNBRK = 0x1
+ syscall_BRKINT = 0x2
+ syscall_PARMRK = 0x8
+ syscall_ISTRIP = 0x20
+ syscall_INLCR = 0x40
+ syscall_IGNCR = 0x80
+ syscall_ICRNL = 0x100
+ syscall_IXON = 0x200
+ syscall_OPOST = 0x1
+ syscall_ECHO = 0x8
+ syscall_ECHONL = 0x10
+ syscall_ICANON = 0x100
+ syscall_ISIG = 0x80
+ syscall_IEXTEN = 0x400
+ syscall_CSIZE = 0x300
+ syscall_PARENB = 0x1000
+ syscall_CS8 = 0x300
+ syscall_VMIN = 0x10
+ syscall_VTIME = 0x11
+
+ syscall_TCGETS = 0x402c7413
+ syscall_TCSETS = 0x802c7414
+)
diff --git a/src/mongo/gotools/vendor/src/github.com/nsf/termbox-go/syscalls_linux.go b/src/mongo/gotools/vendor/src/github.com/nsf/termbox-go/syscalls_linux.go
new file mode 100644
index 00000000000..b88960de617
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/nsf/termbox-go/syscalls_linux.go
@@ -0,0 +1,33 @@
+// Created by cgo -godefs - DO NOT EDIT
+// cgo -godefs syscalls.go
+
+package termbox
+
+import "syscall"
+
+type syscall_Termios syscall.Termios
+
+const (
+ syscall_IGNBRK = syscall.IGNBRK
+ syscall_BRKINT = syscall.BRKINT
+ syscall_PARMRK = syscall.PARMRK
+ syscall_ISTRIP = syscall.ISTRIP
+ syscall_INLCR = syscall.INLCR
+ syscall_IGNCR = syscall.IGNCR
+ syscall_ICRNL = syscall.ICRNL
+ syscall_IXON = syscall.IXON
+ syscall_OPOST = syscall.OPOST
+ syscall_ECHO = syscall.ECHO
+ syscall_ECHONL = syscall.ECHONL
+ syscall_ICANON = syscall.ICANON
+ syscall_ISIG = syscall.ISIG
+ syscall_IEXTEN = syscall.IEXTEN
+ syscall_CSIZE = syscall.CSIZE
+ syscall_PARENB = syscall.PARENB
+ syscall_CS8 = syscall.CS8
+ syscall_VMIN = syscall.VMIN
+ syscall_VTIME = syscall.VTIME
+
+ syscall_TCGETS = syscall.TCGETS
+ syscall_TCSETS = syscall.TCSETS
+)
diff --git a/src/mongo/gotools/vendor/src/github.com/nsf/termbox-go/syscalls_netbsd.go b/src/mongo/gotools/vendor/src/github.com/nsf/termbox-go/syscalls_netbsd.go
new file mode 100644
index 00000000000..49a3355b9a2
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/nsf/termbox-go/syscalls_netbsd.go
@@ -0,0 +1,39 @@
+// Created by cgo -godefs - DO NOT EDIT
+// cgo -godefs syscalls.go
+
+package termbox
+
+type syscall_Termios struct {
+ Iflag uint32
+ Oflag uint32
+ Cflag uint32
+ Lflag uint32
+ Cc [20]uint8
+ Ispeed int32
+ Ospeed int32
+}
+
+const (
+ syscall_IGNBRK = 0x1
+ syscall_BRKINT = 0x2
+ syscall_PARMRK = 0x8
+ syscall_ISTRIP = 0x20
+ syscall_INLCR = 0x40
+ syscall_IGNCR = 0x80
+ syscall_ICRNL = 0x100
+ syscall_IXON = 0x200
+ syscall_OPOST = 0x1
+ syscall_ECHO = 0x8
+ syscall_ECHONL = 0x10
+ syscall_ICANON = 0x100
+ syscall_ISIG = 0x80
+ syscall_IEXTEN = 0x400
+ syscall_CSIZE = 0x300
+ syscall_PARENB = 0x1000
+ syscall_CS8 = 0x300
+ syscall_VMIN = 0x10
+ syscall_VTIME = 0x11
+
+ syscall_TCGETS = 0x402c7413
+ syscall_TCSETS = 0x802c7414
+)
diff --git a/src/mongo/gotools/vendor/src/github.com/nsf/termbox-go/syscalls_openbsd.go b/src/mongo/gotools/vendor/src/github.com/nsf/termbox-go/syscalls_openbsd.go
new file mode 100644
index 00000000000..49a3355b9a2
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/nsf/termbox-go/syscalls_openbsd.go
@@ -0,0 +1,39 @@
+// Created by cgo -godefs - DO NOT EDIT
+// cgo -godefs syscalls.go
+
+package termbox
+
+type syscall_Termios struct {
+ Iflag uint32
+ Oflag uint32
+ Cflag uint32
+ Lflag uint32
+ Cc [20]uint8
+ Ispeed int32
+ Ospeed int32
+}
+
+const (
+ syscall_IGNBRK = 0x1
+ syscall_BRKINT = 0x2
+ syscall_PARMRK = 0x8
+ syscall_ISTRIP = 0x20
+ syscall_INLCR = 0x40
+ syscall_IGNCR = 0x80
+ syscall_ICRNL = 0x100
+ syscall_IXON = 0x200
+ syscall_OPOST = 0x1
+ syscall_ECHO = 0x8
+ syscall_ECHONL = 0x10
+ syscall_ICANON = 0x100
+ syscall_ISIG = 0x80
+ syscall_IEXTEN = 0x400
+ syscall_CSIZE = 0x300
+ syscall_PARENB = 0x1000
+ syscall_CS8 = 0x300
+ syscall_VMIN = 0x10
+ syscall_VTIME = 0x11
+
+ syscall_TCGETS = 0x402c7413
+ syscall_TCSETS = 0x802c7414
+)
diff --git a/src/mongo/gotools/vendor/src/github.com/nsf/termbox-go/syscalls_windows.go b/src/mongo/gotools/vendor/src/github.com/nsf/termbox-go/syscalls_windows.go
new file mode 100644
index 00000000000..472d002a56a
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/nsf/termbox-go/syscalls_windows.go
@@ -0,0 +1,61 @@
+// Created by cgo -godefs - DO NOT EDIT
+// cgo -godefs -- -DUNICODE syscalls.go
+
+package termbox
+
+const (
+ foreground_blue = 0x1
+ foreground_green = 0x2
+ foreground_red = 0x4
+ foreground_intensity = 0x8
+ background_blue = 0x10
+ background_green = 0x20
+ background_red = 0x40
+ background_intensity = 0x80
+ std_input_handle = -0xa
+ std_output_handle = -0xb
+ key_event = 0x1
+ mouse_event = 0x2
+ window_buffer_size_event = 0x4
+ enable_window_input = 0x8
+ enable_mouse_input = 0x10
+ enable_extended_flags = 0x80
+
+ vk_f1 = 0x70
+ vk_f2 = 0x71
+ vk_f3 = 0x72
+ vk_f4 = 0x73
+ vk_f5 = 0x74
+ vk_f6 = 0x75
+ vk_f7 = 0x76
+ vk_f8 = 0x77
+ vk_f9 = 0x78
+ vk_f10 = 0x79
+ vk_f11 = 0x7a
+ vk_f12 = 0x7b
+ vk_insert = 0x2d
+ vk_delete = 0x2e
+ vk_home = 0x24
+ vk_end = 0x23
+ vk_pgup = 0x21
+ vk_pgdn = 0x22
+ vk_arrow_up = 0x26
+ vk_arrow_down = 0x28
+ vk_arrow_left = 0x25
+ vk_arrow_right = 0x27
+ vk_backspace = 0x8
+ vk_tab = 0x9
+ vk_enter = 0xd
+ vk_esc = 0x1b
+ vk_space = 0x20
+
+ left_alt_pressed = 0x2
+ left_ctrl_pressed = 0x8
+ right_alt_pressed = 0x1
+ right_ctrl_pressed = 0x4
+ shift_pressed = 0x10
+
+ generic_read = 0x80000000
+ generic_write = 0x40000000
+ console_textmode_buffer = 0x1
+)
diff --git a/src/mongo/gotools/vendor/src/github.com/nsf/termbox-go/termbox.go b/src/mongo/gotools/vendor/src/github.com/nsf/termbox-go/termbox.go
new file mode 100644
index 00000000000..6e5ba6c8fa5
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/nsf/termbox-go/termbox.go
@@ -0,0 +1,514 @@
+// +build !windows
+
+package termbox
+
+import "unicode/utf8"
+import "bytes"
+import "syscall"
+import "unsafe"
+import "strings"
+import "strconv"
+import "os"
+import "io"
+
+// private API
+
+const (
+ t_enter_ca = iota
+ t_exit_ca
+ t_show_cursor
+ t_hide_cursor
+ t_clear_screen
+ t_sgr0
+ t_underline
+ t_bold
+ t_blink
+ t_reverse
+ t_enter_keypad
+ t_exit_keypad
+ t_enter_mouse
+ t_exit_mouse
+ t_max_funcs
+)
+
+const (
+ coord_invalid = -2
+ attr_invalid = Attribute(0xFFFF)
+)
+
+type input_event struct {
+ data []byte
+ err error
+}
+
+var (
+ // term specific sequences
+ keys []string
+ funcs []string
+
+ // termbox inner state
+ orig_tios syscall_Termios
+ back_buffer cellbuf
+ front_buffer cellbuf
+ termw int
+ termh int
+ input_mode = InputEsc
+ output_mode = OutputNormal
+ out *os.File
+ in int
+ lastfg = attr_invalid
+ lastbg = attr_invalid
+ lastx = coord_invalid
+ lasty = coord_invalid
+ cursor_x = cursor_hidden
+ cursor_y = cursor_hidden
+ foreground = ColorDefault
+ background = ColorDefault
+ inbuf = make([]byte, 0, 64)
+ outbuf bytes.Buffer
+ sigwinch = make(chan os.Signal, 1)
+ sigio = make(chan os.Signal, 1)
+ quit = make(chan int)
+ input_comm = make(chan input_event)
+ interrupt_comm = make(chan struct{})
+ intbuf = make([]byte, 0, 16)
+
+ // grayscale indexes
+ grayscale = []Attribute{
+ 0, 17, 233, 234, 235, 236, 237, 238, 239, 240, 241, 242, 243, 244,
+ 245, 246, 247, 248, 249, 250, 251, 252, 253, 254, 255, 256, 232,
+ }
+)
+
+func write_cursor(x, y int) {
+ outbuf.WriteString("\033[")
+ outbuf.Write(strconv.AppendUint(intbuf, uint64(y+1), 10))
+ outbuf.WriteString(";")
+ outbuf.Write(strconv.AppendUint(intbuf, uint64(x+1), 10))
+ outbuf.WriteString("H")
+}
+
+func write_sgr_fg(a Attribute) {
+ switch output_mode {
+ case Output256, Output216, OutputGrayscale:
+ outbuf.WriteString("\033[38;5;")
+ outbuf.Write(strconv.AppendUint(intbuf, uint64(a-1), 10))
+ outbuf.WriteString("m")
+ default:
+ outbuf.WriteString("\033[3")
+ outbuf.Write(strconv.AppendUint(intbuf, uint64(a-1), 10))
+ outbuf.WriteString("m")
+ }
+}
+
+func write_sgr_bg(a Attribute) {
+ switch output_mode {
+ case Output256, Output216, OutputGrayscale:
+ outbuf.WriteString("\033[48;5;")
+ outbuf.Write(strconv.AppendUint(intbuf, uint64(a-1), 10))
+ outbuf.WriteString("m")
+ default:
+ outbuf.WriteString("\033[4")
+ outbuf.Write(strconv.AppendUint(intbuf, uint64(a-1), 10))
+ outbuf.WriteString("m")
+ }
+}
+
+func write_sgr(fg, bg Attribute) {
+ switch output_mode {
+ case Output256, Output216, OutputGrayscale:
+ outbuf.WriteString("\033[38;5;")
+ outbuf.Write(strconv.AppendUint(intbuf, uint64(fg-1), 10))
+ outbuf.WriteString("m")
+ outbuf.WriteString("\033[48;5;")
+ outbuf.Write(strconv.AppendUint(intbuf, uint64(bg-1), 10))
+ outbuf.WriteString("m")
+ default:
+ outbuf.WriteString("\033[3")
+ outbuf.Write(strconv.AppendUint(intbuf, uint64(fg-1), 10))
+ outbuf.WriteString(";4")
+ outbuf.Write(strconv.AppendUint(intbuf, uint64(bg-1), 10))
+ outbuf.WriteString("m")
+ }
+}
+
+type winsize struct {
+ rows uint16
+ cols uint16
+ xpixels uint16
+ ypixels uint16
+}
+
+func get_term_size(fd uintptr) (int, int) {
+ var sz winsize
+ _, _, _ = syscall.Syscall(syscall.SYS_IOCTL,
+ fd, uintptr(syscall.TIOCGWINSZ), uintptr(unsafe.Pointer(&sz)))
+ return int(sz.cols), int(sz.rows)
+}
+
+func send_attr(fg, bg Attribute) {
+ if fg == lastfg && bg == lastbg {
+ return
+ }
+
+ outbuf.WriteString(funcs[t_sgr0])
+
+ var fgcol, bgcol Attribute
+
+ switch output_mode {
+ case Output256:
+ fgcol = fg & 0x1FF
+ bgcol = bg & 0x1FF
+ case Output216:
+ fgcol = fg & 0xFF
+ bgcol = bg & 0xFF
+ if fgcol > 216 {
+ fgcol = ColorDefault
+ }
+ if bgcol > 216 {
+ bgcol = ColorDefault
+ }
+ if fgcol != ColorDefault {
+ fgcol += 0x10
+ }
+ if bgcol != ColorDefault {
+ bgcol += 0x10
+ }
+ case OutputGrayscale:
+ fgcol = fg & 0x1F
+ bgcol = bg & 0x1F
+ if fgcol > 26 {
+ fgcol = ColorDefault
+ }
+ if bgcol > 26 {
+ bgcol = ColorDefault
+ }
+ if fgcol != ColorDefault {
+ fgcol = grayscale[fgcol]
+ }
+ if bgcol != ColorDefault {
+ bgcol = grayscale[bgcol]
+ }
+ default:
+ fgcol = fg & 0x0F
+ bgcol = bg & 0x0F
+ }
+
+ if fgcol != ColorDefault {
+ if bgcol != ColorDefault {
+ write_sgr(fgcol, bgcol)
+ } else {
+ write_sgr_fg(fgcol)
+ }
+ } else if bgcol != ColorDefault {
+ write_sgr_bg(bgcol)
+ }
+
+ if fg&AttrBold != 0 {
+ outbuf.WriteString(funcs[t_bold])
+ }
+ if bg&AttrBold != 0 {
+ outbuf.WriteString(funcs[t_blink])
+ }
+ if fg&AttrUnderline != 0 {
+ outbuf.WriteString(funcs[t_underline])
+ }
+ if fg&AttrReverse|bg&AttrReverse != 0 {
+ outbuf.WriteString(funcs[t_reverse])
+ }
+
+ lastfg, lastbg = fg, bg
+}
+
+func send_char(x, y int, ch rune) {
+ var buf [8]byte
+ n := utf8.EncodeRune(buf[:], ch)
+ if x-1 != lastx || y != lasty {
+ write_cursor(x, y)
+ }
+ lastx, lasty = x, y
+ outbuf.Write(buf[:n])
+}
+
+func flush() error {
+ _, err := io.Copy(out, &outbuf)
+ outbuf.Reset()
+ if err != nil {
+ return err
+ }
+ return nil
+}
+
+func send_clear() error {
+ send_attr(foreground, background)
+ outbuf.WriteString(funcs[t_clear_screen])
+ if !is_cursor_hidden(cursor_x, cursor_y) {
+ write_cursor(cursor_x, cursor_y)
+ }
+
+ // we need to invalidate cursor position too and these two vars are
+ // used only for simple cursor positioning optimization, cursor
+ // actually may be in the correct place, but we simply discard
+ // optimization once and it gives us simple solution for the case when
+ // cursor moved
+ lastx = coord_invalid
+ lasty = coord_invalid
+
+ return flush()
+}
+
+func update_size_maybe() error {
+ w, h := get_term_size(out.Fd())
+ if w != termw || h != termh {
+ termw, termh = w, h
+ back_buffer.resize(termw, termh)
+ front_buffer.resize(termw, termh)
+ front_buffer.clear()
+ return send_clear()
+ }
+ return nil
+}
+
+func tcsetattr(fd uintptr, termios *syscall_Termios) error {
+ r, _, e := syscall.Syscall(syscall.SYS_IOCTL,
+ fd, uintptr(syscall_TCSETS), uintptr(unsafe.Pointer(termios)))
+ if r != 0 {
+ return os.NewSyscallError("SYS_IOCTL", e)
+ }
+ return nil
+}
+
+func tcgetattr(fd uintptr, termios *syscall_Termios) error {
+ r, _, e := syscall.Syscall(syscall.SYS_IOCTL,
+ fd, uintptr(syscall_TCGETS), uintptr(unsafe.Pointer(termios)))
+ if r != 0 {
+ return os.NewSyscallError("SYS_IOCTL", e)
+ }
+ return nil
+}
+
+func parse_mouse_event(event *Event, buf string) (int, bool) {
+ if strings.HasPrefix(buf, "\033[M") && len(buf) >= 6 {
+ // X10 mouse encoding, the simplest one
+ // \033 [ M Cb Cx Cy
+ b := buf[3] - 32
+ switch b & 3 {
+ case 0:
+ if b&64 != 0 {
+ event.Key = MouseWheelUp
+ } else {
+ event.Key = MouseLeft
+ }
+ case 1:
+ if b&64 != 0 {
+ event.Key = MouseWheelDown
+ } else {
+ event.Key = MouseMiddle
+ }
+ case 2:
+ event.Key = MouseRight
+ case 3:
+ event.Key = MouseRelease
+ default:
+ return 6, false
+ }
+ event.Type = EventMouse // KeyEvent by default
+ if b&32 != 0 {
+ event.Mod |= ModMotion
+ }
+
+ // the coord is 1,1 for upper left
+ event.MouseX = int(buf[4]) - 1 - 32
+ event.MouseY = int(buf[5]) - 1 - 32
+ return 6, true
+ } else if strings.HasPrefix(buf, "\033[<") || strings.HasPrefix(buf, "\033[") {
+ // xterm 1006 extended mode or urxvt 1015 extended mode
+ // xterm: \033 [ < Cb ; Cx ; Cy (M or m)
+ // urxvt: \033 [ Cb ; Cx ; Cy M
+
+ // find the first M or m, that's where we stop
+ mi := strings.IndexAny(buf, "Mm")
+ if mi == -1 {
+ return 0, false
+ }
+
+ // whether it's a capital M or not
+ isM := buf[mi] == 'M'
+
+ // whether it's urxvt or not
+ isU := false
+
+ // buf[2] is safe here, because having M or m found means we have at
+ // least 3 bytes in a string
+ if buf[2] == '<' {
+ buf = buf[3:mi]
+ } else {
+ isU = true
+ buf = buf[2:mi]
+ }
+
+ s1 := strings.Index(buf, ";")
+ s2 := strings.LastIndex(buf, ";")
+ // not found or only one ';'
+ if s1 == -1 || s2 == -1 || s1 == s2 {
+ return 0, false
+ }
+
+ n1, err := strconv.ParseInt(buf[0:s1], 10, 64)
+ if err != nil {
+ return 0, false
+ }
+ n2, err := strconv.ParseInt(buf[s1+1:s2], 10, 64)
+ if err != nil {
+ return 0, false
+ }
+ n3, err := strconv.ParseInt(buf[s2+1:], 10, 64)
+ if err != nil {
+ return 0, false
+ }
+
+ // on urxvt, first number is encoded exactly as in X10, but we need to
+ // make it zero-based, on xterm it is zero-based already
+ if isU {
+ n1 -= 32
+ }
+ switch n1 & 3 {
+ case 0:
+ if n1&64 != 0 {
+ event.Key = MouseWheelUp
+ } else {
+ event.Key = MouseLeft
+ }
+ case 1:
+ if n1&64 != 0 {
+ event.Key = MouseWheelDown
+ } else {
+ event.Key = MouseMiddle
+ }
+ case 2:
+ event.Key = MouseRight
+ case 3:
+ event.Key = MouseRelease
+ default:
+ return mi + 1, false
+ }
+ if !isM {
+ // on xterm mouse release is signaled by lowercase m
+ event.Key = MouseRelease
+ }
+
+ event.Type = EventMouse // KeyEvent by default
+ if n1&32 != 0 {
+ event.Mod |= ModMotion
+ }
+
+ event.MouseX = int(n2) - 1
+ event.MouseY = int(n3) - 1
+ return mi + 1, true
+ }
+
+ return 0, false
+}
+
+func parse_escape_sequence(event *Event, buf []byte) (int, bool) {
+ bufstr := string(buf)
+ for i, key := range keys {
+ if strings.HasPrefix(bufstr, key) {
+ event.Ch = 0
+ event.Key = Key(0xFFFF - i)
+ return len(key), true
+ }
+ }
+
+ // if none of the keys match, let's try mouse seqences
+ return parse_mouse_event(event, bufstr)
+}
+
+func extract_raw_event(data []byte, event *Event) bool {
+ if len(inbuf) == 0 {
+ return false
+ }
+
+ n := len(data)
+ if n == 0 {
+ return false
+ }
+
+ n = copy(data, inbuf)
+ copy(inbuf, inbuf[n:])
+ inbuf = inbuf[:len(inbuf)-n]
+
+ event.N = n
+ event.Type = EventRaw
+ return true
+}
+
+func extract_event(inbuf []byte, event *Event) bool {
+ if len(inbuf) == 0 {
+ event.N = 0
+ return false
+ }
+
+ if inbuf[0] == '\033' {
+ // possible escape sequence
+ if n, ok := parse_escape_sequence(event, inbuf); n != 0 {
+ event.N = n
+ return ok
+ }
+
+ // it's not escape sequence, then it's Alt or Esc, check input_mode
+ switch {
+ case input_mode&InputEsc != 0:
+ // if we're in escape mode, fill Esc event, pop buffer, return success
+ event.Ch = 0
+ event.Key = KeyEsc
+ event.Mod = 0
+ event.N = 1
+ return true
+ case input_mode&InputAlt != 0:
+ // if we're in alt mode, set Alt modifier to event and redo parsing
+ event.Mod = ModAlt
+ ok := extract_event(inbuf[1:], event)
+ if ok {
+ event.N++
+ } else {
+ event.N = 0
+ }
+ return ok
+ default:
+ panic("unreachable")
+ }
+ }
+
+ // if we're here, this is not an escape sequence and not an alt sequence
+ // so, it's a FUNCTIONAL KEY or a UNICODE character
+
+ // first of all check if it's a functional key
+ if Key(inbuf[0]) <= KeySpace || Key(inbuf[0]) == KeyBackspace2 {
+ // fill event, pop buffer, return success
+ event.Ch = 0
+ event.Key = Key(inbuf[0])
+ event.N = 1
+ return true
+ }
+
+ // the only possible option is utf8 rune
+ if r, n := utf8.DecodeRune(inbuf); r != utf8.RuneError {
+ event.Ch = r
+ event.Key = 0
+ event.N = n
+ return true
+ }
+
+ return false
+}
+
+func fcntl(fd int, cmd int, arg int) (val int, err error) {
+ r, _, e := syscall.Syscall(syscall.SYS_FCNTL, uintptr(fd), uintptr(cmd),
+ uintptr(arg))
+ val = int(r)
+ if e != 0 {
+ err = e
+ }
+ return
+}
diff --git a/src/mongo/gotools/vendor/src/github.com/nsf/termbox-go/termbox_common.go b/src/mongo/gotools/vendor/src/github.com/nsf/termbox-go/termbox_common.go
new file mode 100644
index 00000000000..c3355cc25e3
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/nsf/termbox-go/termbox_common.go
@@ -0,0 +1,59 @@
+package termbox
+
+// private API, common OS agnostic part
+
+type cellbuf struct {
+ width int
+ height int
+ cells []Cell
+}
+
+func (this *cellbuf) init(width, height int) {
+ this.width = width
+ this.height = height
+ this.cells = make([]Cell, width*height)
+}
+
+func (this *cellbuf) resize(width, height int) {
+ if this.width == width && this.height == height {
+ return
+ }
+
+ oldw := this.width
+ oldh := this.height
+ oldcells := this.cells
+
+ this.init(width, height)
+ this.clear()
+
+ minw, minh := oldw, oldh
+
+ if width < minw {
+ minw = width
+ }
+ if height < minh {
+ minh = height
+ }
+
+ for i := 0; i < minh; i++ {
+ srco, dsto := i*oldw, i*width
+ src := oldcells[srco : srco+minw]
+ dst := this.cells[dsto : dsto+minw]
+ copy(dst, src)
+ }
+}
+
+func (this *cellbuf) clear() {
+ for i := range this.cells {
+ c := &this.cells[i]
+ c.Ch = ' '
+ c.Fg = foreground
+ c.Bg = background
+ }
+}
+
+const cursor_hidden = -1
+
+func is_cursor_hidden(x, y int) bool {
+ return x == cursor_hidden || y == cursor_hidden
+}
diff --git a/src/mongo/gotools/vendor/src/github.com/nsf/termbox-go/termbox_windows.go b/src/mongo/gotools/vendor/src/github.com/nsf/termbox-go/termbox_windows.go
new file mode 100644
index 00000000000..f7dad7b8a5f
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/nsf/termbox-go/termbox_windows.go
@@ -0,0 +1,856 @@
+package termbox
+
+import "syscall"
+import "unsafe"
+import "unicode/utf16"
+import "github.com/mattn/go-runewidth"
+
+type (
+ wchar uint16
+ short int16
+ dword uint32
+ word uint16
+ char_info struct {
+ char wchar
+ attr word
+ }
+ coord struct {
+ x short
+ y short
+ }
+ small_rect struct {
+ left short
+ top short
+ right short
+ bottom short
+ }
+ console_screen_buffer_info struct {
+ size coord
+ cursor_position coord
+ attributes word
+ window small_rect
+ maximum_window_size coord
+ }
+ console_cursor_info struct {
+ size dword
+ visible int32
+ }
+ input_record struct {
+ event_type word
+ _ [2]byte
+ event [16]byte
+ }
+ key_event_record struct {
+ key_down int32
+ repeat_count word
+ virtual_key_code word
+ virtual_scan_code word
+ unicode_char wchar
+ control_key_state dword
+ }
+ window_buffer_size_record struct {
+ size coord
+ }
+ mouse_event_record struct {
+ mouse_pos coord
+ button_state dword
+ control_key_state dword
+ event_flags dword
+ }
+)
+
+const (
+ mouse_lmb = 0x1
+ mouse_rmb = 0x2
+ mouse_mmb = 0x4 | 0x8 | 0x10
+)
+
+func (this coord) uintptr() uintptr {
+ return uintptr(*(*int32)(unsafe.Pointer(&this)))
+}
+
+var kernel32 = syscall.NewLazyDLL("kernel32.dll")
+var is_cjk = runewidth.IsEastAsian()
+
+var (
+ proc_set_console_active_screen_buffer = kernel32.NewProc("SetConsoleActiveScreenBuffer")
+ proc_set_console_screen_buffer_size = kernel32.NewProc("SetConsoleScreenBufferSize")
+ proc_create_console_screen_buffer = kernel32.NewProc("CreateConsoleScreenBuffer")
+ proc_get_console_screen_buffer_info = kernel32.NewProc("GetConsoleScreenBufferInfo")
+ proc_write_console_output = kernel32.NewProc("WriteConsoleOutputW")
+ proc_write_console_output_character = kernel32.NewProc("WriteConsoleOutputCharacterW")
+ proc_write_console_output_attribute = kernel32.NewProc("WriteConsoleOutputAttribute")
+ proc_set_console_cursor_info = kernel32.NewProc("SetConsoleCursorInfo")
+ proc_set_console_cursor_position = kernel32.NewProc("SetConsoleCursorPosition")
+ proc_get_console_cursor_info = kernel32.NewProc("GetConsoleCursorInfo")
+ proc_read_console_input = kernel32.NewProc("ReadConsoleInputW")
+ proc_get_console_mode = kernel32.NewProc("GetConsoleMode")
+ proc_set_console_mode = kernel32.NewProc("SetConsoleMode")
+ proc_fill_console_output_character = kernel32.NewProc("FillConsoleOutputCharacterW")
+ proc_fill_console_output_attribute = kernel32.NewProc("FillConsoleOutputAttribute")
+ proc_create_event = kernel32.NewProc("CreateEventW")
+ proc_wait_for_multiple_objects = kernel32.NewProc("WaitForMultipleObjects")
+ proc_set_event = kernel32.NewProc("SetEvent")
+)
+
+func set_console_active_screen_buffer(h syscall.Handle) (err error) {
+ r0, _, e1 := syscall.Syscall(proc_set_console_active_screen_buffer.Addr(),
+ 1, uintptr(h), 0, 0)
+ if int(r0) == 0 {
+ if e1 != 0 {
+ err = error(e1)
+ } else {
+ err = syscall.EINVAL
+ }
+ }
+ return
+}
+
+func set_console_screen_buffer_size(h syscall.Handle, size coord) (err error) {
+ r0, _, e1 := syscall.Syscall(proc_set_console_screen_buffer_size.Addr(),
+ 2, uintptr(h), size.uintptr(), 0)
+ if int(r0) == 0 {
+ if e1 != 0 {
+ err = error(e1)
+ } else {
+ err = syscall.EINVAL
+ }
+ }
+ return
+}
+
+func create_console_screen_buffer() (h syscall.Handle, err error) {
+ r0, _, e1 := syscall.Syscall6(proc_create_console_screen_buffer.Addr(),
+ 5, uintptr(generic_read|generic_write), 0, 0, console_textmode_buffer, 0, 0)
+ if int(r0) == 0 {
+ if e1 != 0 {
+ err = error(e1)
+ } else {
+ err = syscall.EINVAL
+ }
+ }
+ return syscall.Handle(r0), err
+}
+
+func get_console_screen_buffer_info(h syscall.Handle, info *console_screen_buffer_info) (err error) {
+ r0, _, e1 := syscall.Syscall(proc_get_console_screen_buffer_info.Addr(),
+ 2, uintptr(h), uintptr(unsafe.Pointer(info)), 0)
+ if int(r0) == 0 {
+ if e1 != 0 {
+ err = error(e1)
+ } else {
+ err = syscall.EINVAL
+ }
+ }
+ return
+}
+
+func write_console_output(h syscall.Handle, chars []char_info, dst small_rect) (err error) {
+ tmp_coord = coord{dst.right - dst.left + 1, dst.bottom - dst.top + 1}
+ tmp_rect = dst
+ r0, _, e1 := syscall.Syscall6(proc_write_console_output.Addr(),
+ 5, uintptr(h), uintptr(unsafe.Pointer(&chars[0])), tmp_coord.uintptr(),
+ tmp_coord0.uintptr(), uintptr(unsafe.Pointer(&tmp_rect)), 0)
+ if int(r0) == 0 {
+ if e1 != 0 {
+ err = error(e1)
+ } else {
+ err = syscall.EINVAL
+ }
+ }
+ return
+}
+
+func write_console_output_character(h syscall.Handle, chars []wchar, pos coord) (err error) {
+ r0, _, e1 := syscall.Syscall6(proc_write_console_output_character.Addr(),
+ 5, uintptr(h), uintptr(unsafe.Pointer(&chars[0])), uintptr(len(chars)),
+ pos.uintptr(), uintptr(unsafe.Pointer(&tmp_arg)), 0)
+ if int(r0) == 0 {
+ if e1 != 0 {
+ err = error(e1)
+ } else {
+ err = syscall.EINVAL
+ }
+ }
+ return
+}
+
+func write_console_output_attribute(h syscall.Handle, attrs []word, pos coord) (err error) {
+ r0, _, e1 := syscall.Syscall6(proc_write_console_output_attribute.Addr(),
+ 5, uintptr(h), uintptr(unsafe.Pointer(&attrs[0])), uintptr(len(attrs)),
+ pos.uintptr(), uintptr(unsafe.Pointer(&tmp_arg)), 0)
+ if int(r0) == 0 {
+ if e1 != 0 {
+ err = error(e1)
+ } else {
+ err = syscall.EINVAL
+ }
+ }
+ return
+}
+
+func set_console_cursor_info(h syscall.Handle, info *console_cursor_info) (err error) {
+ r0, _, e1 := syscall.Syscall(proc_set_console_cursor_info.Addr(),
+ 2, uintptr(h), uintptr(unsafe.Pointer(info)), 0)
+ if int(r0) == 0 {
+ if e1 != 0 {
+ err = error(e1)
+ } else {
+ err = syscall.EINVAL
+ }
+ }
+ return
+}
+
+func get_console_cursor_info(h syscall.Handle, info *console_cursor_info) (err error) {
+ r0, _, e1 := syscall.Syscall(proc_get_console_cursor_info.Addr(),
+ 2, uintptr(h), uintptr(unsafe.Pointer(info)), 0)
+ if int(r0) == 0 {
+ if e1 != 0 {
+ err = error(e1)
+ } else {
+ err = syscall.EINVAL
+ }
+ }
+ return
+}
+
+func set_console_cursor_position(h syscall.Handle, pos coord) (err error) {
+ r0, _, e1 := syscall.Syscall(proc_set_console_cursor_position.Addr(),
+ 2, uintptr(h), pos.uintptr(), 0)
+ if int(r0) == 0 {
+ if e1 != 0 {
+ err = error(e1)
+ } else {
+ err = syscall.EINVAL
+ }
+ }
+ return
+}
+
+func read_console_input(h syscall.Handle, record *input_record) (err error) {
+ r0, _, e1 := syscall.Syscall6(proc_read_console_input.Addr(),
+ 4, uintptr(h), uintptr(unsafe.Pointer(record)), 1, uintptr(unsafe.Pointer(&tmp_arg)), 0, 0)
+ if int(r0) == 0 {
+ if e1 != 0 {
+ err = error(e1)
+ } else {
+ err = syscall.EINVAL
+ }
+ }
+ return
+}
+
+func get_console_mode(h syscall.Handle, mode *dword) (err error) {
+ r0, _, e1 := syscall.Syscall(proc_get_console_mode.Addr(),
+ 2, uintptr(h), uintptr(unsafe.Pointer(mode)), 0)
+ if int(r0) == 0 {
+ if e1 != 0 {
+ err = error(e1)
+ } else {
+ err = syscall.EINVAL
+ }
+ }
+ return
+}
+
+func set_console_mode(h syscall.Handle, mode dword) (err error) {
+ r0, _, e1 := syscall.Syscall(proc_set_console_mode.Addr(),
+ 2, uintptr(h), uintptr(mode), 0)
+ if int(r0) == 0 {
+ if e1 != 0 {
+ err = error(e1)
+ } else {
+ err = syscall.EINVAL
+ }
+ }
+ return
+}
+
+func fill_console_output_character(h syscall.Handle, char wchar, n int) (err error) {
+ r0, _, e1 := syscall.Syscall6(proc_fill_console_output_character.Addr(),
+ 5, uintptr(h), uintptr(char), uintptr(n), tmp_coord.uintptr(),
+ uintptr(unsafe.Pointer(&tmp_arg)), 0)
+ if int(r0) == 0 {
+ if e1 != 0 {
+ err = error(e1)
+ } else {
+ err = syscall.EINVAL
+ }
+ }
+ return
+}
+
+func fill_console_output_attribute(h syscall.Handle, attr word, n int) (err error) {
+ r0, _, e1 := syscall.Syscall6(proc_fill_console_output_attribute.Addr(),
+ 5, uintptr(h), uintptr(attr), uintptr(n), tmp_coord.uintptr(),
+ uintptr(unsafe.Pointer(&tmp_arg)), 0)
+ if int(r0) == 0 {
+ if e1 != 0 {
+ err = error(e1)
+ } else {
+ err = syscall.EINVAL
+ }
+ }
+ return
+}
+
+func create_event() (out syscall.Handle, err error) {
+ r0, _, e1 := syscall.Syscall6(proc_create_event.Addr(),
+ 4, 0, 0, 0, 0, 0, 0)
+ if int(r0) == 0 {
+ if e1 != 0 {
+ err = error(e1)
+ } else {
+ err = syscall.EINVAL
+ }
+ }
+ return syscall.Handle(r0), err
+}
+
+func wait_for_multiple_objects(objects []syscall.Handle) (err error) {
+ r0, _, e1 := syscall.Syscall6(proc_wait_for_multiple_objects.Addr(),
+ 4, uintptr(len(objects)), uintptr(unsafe.Pointer(&objects[0])),
+ 0, 0xFFFFFFFF, 0, 0)
+ if uint32(r0) == 0xFFFFFFFF {
+ if e1 != 0 {
+ err = error(e1)
+ } else {
+ err = syscall.EINVAL
+ }
+ }
+ return
+}
+
+func set_event(ev syscall.Handle) (err error) {
+ r0, _, e1 := syscall.Syscall(proc_set_event.Addr(),
+ 1, uintptr(ev), 0, 0)
+ if int(r0) == 0 {
+ if e1 != 0 {
+ err = error(e1)
+ } else {
+ err = syscall.EINVAL
+ }
+ }
+ return
+}
+
+type diff_msg struct {
+ pos short
+ lines short
+ chars []char_info
+}
+
+type input_event struct {
+ event Event
+ err error
+}
+
+var (
+ orig_cursor_info console_cursor_info
+ orig_size coord
+ orig_mode dword
+ orig_screen syscall.Handle
+ back_buffer cellbuf
+ front_buffer cellbuf
+ term_size coord
+ input_mode = InputEsc
+ cursor_x = cursor_hidden
+ cursor_y = cursor_hidden
+ foreground = ColorDefault
+ background = ColorDefault
+ in syscall.Handle
+ out syscall.Handle
+ interrupt syscall.Handle
+ charbuf []char_info
+ diffbuf []diff_msg
+ beg_x = -1
+ beg_y = -1
+ beg_i = -1
+ input_comm = make(chan Event)
+ interrupt_comm = make(chan struct{})
+ cancel_comm = make(chan bool, 1)
+ cancel_done_comm = make(chan bool)
+ alt_mode_esc = false
+
+ // these ones just to prevent heap allocs at all costs
+ tmp_info console_screen_buffer_info
+ tmp_arg dword
+ tmp_coord0 = coord{0, 0}
+ tmp_coord = coord{0, 0}
+ tmp_rect = small_rect{0, 0, 0, 0}
+)
+
+func get_cursor_position(out syscall.Handle) coord {
+ err := get_console_screen_buffer_info(out, &tmp_info)
+ if err != nil {
+ panic(err)
+ }
+ return tmp_info.cursor_position
+}
+
+func get_term_size(out syscall.Handle) coord {
+ err := get_console_screen_buffer_info(out, &tmp_info)
+ if err != nil {
+ panic(err)
+ }
+ return tmp_info.size
+}
+
+func get_win_size(out syscall.Handle) coord {
+ err := get_console_screen_buffer_info(out, &tmp_info)
+ if err != nil {
+ panic(err)
+ }
+ return coord{
+ x: tmp_info.window.right - tmp_info.window.left + 1,
+ y: tmp_info.window.bottom - tmp_info.window.top + 1,
+ }
+}
+
+func update_size_maybe() {
+ size := get_term_size(out)
+ if size.x != term_size.x || size.y != term_size.y {
+ term_size = size
+ back_buffer.resize(int(size.x), int(size.y))
+ front_buffer.resize(int(size.x), int(size.y))
+ front_buffer.clear()
+ clear()
+
+ area := int(size.x) * int(size.y)
+ if cap(charbuf) < area {
+ charbuf = make([]char_info, 0, area)
+ }
+ }
+}
+
+var color_table_bg = []word{
+ 0, // default (black)
+ 0, // black
+ background_red,
+ background_green,
+ background_red | background_green, // yellow
+ background_blue,
+ background_red | background_blue, // magenta
+ background_green | background_blue, // cyan
+ background_red | background_blue | background_green, // white
+}
+
+var color_table_fg = []word{
+ foreground_red | foreground_blue | foreground_green, // default (white)
+ 0,
+ foreground_red,
+ foreground_green,
+ foreground_red | foreground_green, // yellow
+ foreground_blue,
+ foreground_red | foreground_blue, // magenta
+ foreground_green | foreground_blue, // cyan
+ foreground_red | foreground_blue | foreground_green, // white
+}
+
+const (
+ replacement_char = '\uFFFD'
+ max_rune = '\U0010FFFF'
+ surr1 = 0xd800
+ surr2 = 0xdc00
+ surr3 = 0xe000
+ surr_self = 0x10000
+)
+
+func append_diff_line(y int) int {
+ n := 0
+ for x := 0; x < front_buffer.width; {
+ cell_offset := y*front_buffer.width + x
+ back := &back_buffer.cells[cell_offset]
+ front := &front_buffer.cells[cell_offset]
+ attr, char := cell_to_char_info(*back)
+ charbuf = append(charbuf, char_info{attr: attr, char: char[0]})
+ *front = *back
+ n++
+ w := runewidth.RuneWidth(back.Ch)
+ if w == 0 || w == 2 && runewidth.IsAmbiguousWidth(back.Ch) {
+ w = 1
+ }
+ x += w
+ // If not CJK, fill trailing space with whitespace
+ if !is_cjk && w == 2 {
+ charbuf = append(charbuf, char_info{attr: attr, char: ' '})
+ }
+ }
+ return n
+}
+
+// compares 'back_buffer' with 'front_buffer' and prepares all changes in the form of
+// 'diff_msg's in the 'diff_buf'
+func prepare_diff_messages() {
+ // clear buffers
+ diffbuf = diffbuf[:0]
+ charbuf = charbuf[:0]
+
+ var diff diff_msg
+ gbeg := 0
+ for y := 0; y < front_buffer.height; y++ {
+ same := true
+ line_offset := y * front_buffer.width
+ for x := 0; x < front_buffer.width; x++ {
+ cell_offset := line_offset + x
+ back := &back_buffer.cells[cell_offset]
+ front := &front_buffer.cells[cell_offset]
+ if *back != *front {
+ same = false
+ break
+ }
+ }
+ if same && diff.lines > 0 {
+ diffbuf = append(diffbuf, diff)
+ diff = diff_msg{}
+ }
+ if !same {
+ beg := len(charbuf)
+ end := beg + append_diff_line(y)
+ if diff.lines == 0 {
+ diff.pos = short(y)
+ gbeg = beg
+ }
+ diff.lines++
+ diff.chars = charbuf[gbeg:end]
+ }
+ }
+ if diff.lines > 0 {
+ diffbuf = append(diffbuf, diff)
+ diff = diff_msg{}
+ }
+}
+
+func get_ct(table []word, idx int) word {
+ idx = idx & 0x0F
+ if idx >= len(table) {
+ idx = len(table) - 1
+ }
+ return table[idx]
+}
+
+func cell_to_char_info(c Cell) (attr word, wc [2]wchar) {
+ attr = get_ct(color_table_fg, int(c.Fg)) | get_ct(color_table_bg, int(c.Bg))
+ if c.Fg&AttrReverse|c.Bg&AttrReverse != 0 {
+ attr = (attr&0xF0)>>4 | (attr&0x0F)<<4
+ }
+ if c.Fg&AttrBold != 0 {
+ attr |= foreground_intensity
+ }
+ if c.Bg&AttrBold != 0 {
+ attr |= background_intensity
+ }
+
+ r0, r1 := utf16.EncodeRune(c.Ch)
+ if r0 == 0xFFFD {
+ wc[0] = wchar(c.Ch)
+ wc[1] = ' '
+ } else {
+ wc[0] = wchar(r0)
+ wc[1] = wchar(r1)
+ }
+ return
+}
+
+func move_cursor(x, y int) {
+ err := set_console_cursor_position(out, coord{short(x), short(y)})
+ if err != nil {
+ panic(err)
+ }
+}
+
+func show_cursor(visible bool) {
+ var v int32
+ if visible {
+ v = 1
+ }
+
+ var info console_cursor_info
+ info.size = 100
+ info.visible = v
+ err := set_console_cursor_info(out, &info)
+ if err != nil {
+ panic(err)
+ }
+}
+
+func clear() {
+ var err error
+ attr, char := cell_to_char_info(Cell{
+ ' ',
+ foreground,
+ background,
+ })
+
+ area := int(term_size.x) * int(term_size.y)
+ err = fill_console_output_attribute(out, attr, area)
+ if err != nil {
+ panic(err)
+ }
+ err = fill_console_output_character(out, char[0], area)
+ if err != nil {
+ panic(err)
+ }
+ if !is_cursor_hidden(cursor_x, cursor_y) {
+ move_cursor(cursor_x, cursor_y)
+ }
+}
+
+func key_event_record_to_event(r *key_event_record) (Event, bool) {
+ if r.key_down == 0 {
+ return Event{}, false
+ }
+
+ e := Event{Type: EventKey}
+ if input_mode&InputAlt != 0 {
+ if alt_mode_esc {
+ e.Mod = ModAlt
+ alt_mode_esc = false
+ }
+ if r.control_key_state&(left_alt_pressed|right_alt_pressed) != 0 {
+ e.Mod = ModAlt
+ }
+ }
+
+ ctrlpressed := r.control_key_state&(left_ctrl_pressed|right_ctrl_pressed) != 0
+
+ if r.virtual_key_code >= vk_f1 && r.virtual_key_code <= vk_f12 {
+ switch r.virtual_key_code {
+ case vk_f1:
+ e.Key = KeyF1
+ case vk_f2:
+ e.Key = KeyF2
+ case vk_f3:
+ e.Key = KeyF3
+ case vk_f4:
+ e.Key = KeyF4
+ case vk_f5:
+ e.Key = KeyF5
+ case vk_f6:
+ e.Key = KeyF6
+ case vk_f7:
+ e.Key = KeyF7
+ case vk_f8:
+ e.Key = KeyF8
+ case vk_f9:
+ e.Key = KeyF9
+ case vk_f10:
+ e.Key = KeyF10
+ case vk_f11:
+ e.Key = KeyF11
+ case vk_f12:
+ e.Key = KeyF12
+ default:
+ panic("unreachable")
+ }
+
+ return e, true
+ }
+
+ if r.virtual_key_code <= vk_delete {
+ switch r.virtual_key_code {
+ case vk_insert:
+ e.Key = KeyInsert
+ case vk_delete:
+ e.Key = KeyDelete
+ case vk_home:
+ e.Key = KeyHome
+ case vk_end:
+ e.Key = KeyEnd
+ case vk_pgup:
+ e.Key = KeyPgup
+ case vk_pgdn:
+ e.Key = KeyPgdn
+ case vk_arrow_up:
+ e.Key = KeyArrowUp
+ case vk_arrow_down:
+ e.Key = KeyArrowDown
+ case vk_arrow_left:
+ e.Key = KeyArrowLeft
+ case vk_arrow_right:
+ e.Key = KeyArrowRight
+ case vk_backspace:
+ if ctrlpressed {
+ e.Key = KeyBackspace2
+ } else {
+ e.Key = KeyBackspace
+ }
+ case vk_tab:
+ e.Key = KeyTab
+ case vk_enter:
+ e.Key = KeyEnter
+ case vk_esc:
+ switch {
+ case input_mode&InputEsc != 0:
+ e.Key = KeyEsc
+ case input_mode&InputAlt != 0:
+ alt_mode_esc = true
+ return Event{}, false
+ }
+ case vk_space:
+ if ctrlpressed {
+ // manual return here, because KeyCtrlSpace is zero
+ e.Key = KeyCtrlSpace
+ return e, true
+ } else {
+ e.Key = KeySpace
+ }
+ }
+
+ if e.Key != 0 {
+ return e, true
+ }
+ }
+
+ if ctrlpressed {
+ if Key(r.unicode_char) >= KeyCtrlA && Key(r.unicode_char) <= KeyCtrlRsqBracket {
+ e.Key = Key(r.unicode_char)
+ if input_mode&InputAlt != 0 && e.Key == KeyEsc {
+ alt_mode_esc = true
+ return Event{}, false
+ }
+ return e, true
+ }
+ switch r.virtual_key_code {
+ case 192, 50:
+ // manual return here, because KeyCtrl2 is zero
+ e.Key = KeyCtrl2
+ return e, true
+ case 51:
+ if input_mode&InputAlt != 0 {
+ alt_mode_esc = true
+ return Event{}, false
+ }
+ e.Key = KeyCtrl3
+ case 52:
+ e.Key = KeyCtrl4
+ case 53:
+ e.Key = KeyCtrl5
+ case 54:
+ e.Key = KeyCtrl6
+ case 189, 191, 55:
+ e.Key = KeyCtrl7
+ case 8, 56:
+ e.Key = KeyCtrl8
+ }
+
+ if e.Key != 0 {
+ return e, true
+ }
+ }
+
+ if r.unicode_char != 0 {
+ e.Ch = rune(r.unicode_char)
+ return e, true
+ }
+
+ return Event{}, false
+}
+
+func input_event_producer() {
+ var r input_record
+ var err error
+ var last_button Key
+ var last_button_pressed Key
+ var last_state = dword(0)
+ var last_x, last_y = -1, -1
+ handles := []syscall.Handle{in, interrupt}
+ for {
+ err = wait_for_multiple_objects(handles)
+ if err != nil {
+ input_comm <- Event{Type: EventError, Err: err}
+ }
+
+ select {
+ case <-cancel_comm:
+ cancel_done_comm <- true
+ return
+ default:
+ }
+
+ err = read_console_input(in, &r)
+ if err != nil {
+ input_comm <- Event{Type: EventError, Err: err}
+ }
+
+ switch r.event_type {
+ case key_event:
+ kr := (*key_event_record)(unsafe.Pointer(&r.event))
+ ev, ok := key_event_record_to_event(kr)
+ if ok {
+ for i := 0; i < int(kr.repeat_count); i++ {
+ input_comm <- ev
+ }
+ }
+ case window_buffer_size_event:
+ sr := *(*window_buffer_size_record)(unsafe.Pointer(&r.event))
+ input_comm <- Event{
+ Type: EventResize,
+ Width: int(sr.size.x),
+ Height: int(sr.size.y),
+ }
+ case mouse_event:
+ mr := *(*mouse_event_record)(unsafe.Pointer(&r.event))
+ ev := Event{Type: EventMouse}
+ switch mr.event_flags {
+ case 0, 2:
+ // single or double click
+ cur_state := mr.button_state
+ switch {
+ case last_state&mouse_lmb == 0 && cur_state&mouse_lmb != 0:
+ last_button = MouseLeft
+ last_button_pressed = last_button
+ case last_state&mouse_rmb == 0 && cur_state&mouse_rmb != 0:
+ last_button = MouseRight
+ last_button_pressed = last_button
+ case last_state&mouse_mmb == 0 && cur_state&mouse_mmb != 0:
+ last_button = MouseMiddle
+ last_button_pressed = last_button
+ case last_state&mouse_lmb != 0 && cur_state&mouse_lmb == 0:
+ last_button = MouseRelease
+ case last_state&mouse_rmb != 0 && cur_state&mouse_rmb == 0:
+ last_button = MouseRelease
+ case last_state&mouse_mmb != 0 && cur_state&mouse_mmb == 0:
+ last_button = MouseRelease
+ default:
+ last_state = cur_state
+ continue
+ }
+ last_state = cur_state
+ ev.Key = last_button
+ last_x, last_y = int(mr.mouse_pos.x), int(mr.mouse_pos.y)
+ ev.MouseX = last_x
+ ev.MouseY = last_y
+ case 1:
+ // mouse motion
+ x, y := int(mr.mouse_pos.x), int(mr.mouse_pos.y)
+ if last_state != 0 && (last_x != x || last_y != y) {
+ ev.Key = last_button_pressed
+ ev.Mod = ModMotion
+ ev.MouseX = x
+ ev.MouseY = y
+ last_x, last_y = x, y
+ } else {
+ ev.Type = EventNone
+ }
+ case 4:
+ // mouse wheel
+ n := int16(mr.button_state >> 16)
+ if n > 0 {
+ ev.Key = MouseWheelUp
+ } else {
+ ev.Key = MouseWheelDown
+ }
+ last_x, last_y = int(mr.mouse_pos.x), int(mr.mouse_pos.y)
+ ev.MouseX = last_x
+ ev.MouseY = last_y
+ default:
+ ev.Type = EventNone
+ }
+ if ev.Type != EventNone {
+ input_comm <- ev
+ }
+ }
+ }
+}
diff --git a/src/mongo/gotools/vendor/src/github.com/nsf/termbox-go/terminfo.go b/src/mongo/gotools/vendor/src/github.com/nsf/termbox-go/terminfo.go
new file mode 100644
index 00000000000..35dbd70b894
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/nsf/termbox-go/terminfo.go
@@ -0,0 +1,221 @@
+// +build !windows
+// This file contains a simple and incomplete implementation of the terminfo
+// database. Information was taken from the ncurses manpages term(5) and
+// terminfo(5). Currently, only the string capabilities for special keys and for
+// functions without parameters are actually used. Colors are still done with
+// ANSI escape sequences. Other special features that are not (yet?) supported
+// are reading from ~/.terminfo, the TERMINFO_DIRS variable, Berkeley database
+// format and extended capabilities.
+
+package termbox
+
+import (
+ "bytes"
+ "encoding/binary"
+ "encoding/hex"
+ "errors"
+ "fmt"
+ "io/ioutil"
+ "os"
+ "strings"
+)
+
+const (
+ ti_magic = 0432
+ ti_header_length = 12
+ ti_mouse_enter = "\x1b[?1000h\x1b[?1002h\x1b[?1015h\x1b[?1006h"
+ ti_mouse_leave = "\x1b[?1006l\x1b[?1015l\x1b[?1002l\x1b[?1000l"
+)
+
+func load_terminfo() ([]byte, error) {
+ var data []byte
+ var err error
+
+ term := os.Getenv("TERM")
+ if term == "" {
+ return nil, fmt.Errorf("termbox: TERM not set")
+ }
+
+ // The following behaviour follows the one described in terminfo(5) as
+ // distributed by ncurses.
+
+ terminfo := os.Getenv("TERMINFO")
+ if terminfo != "" {
+ // if TERMINFO is set, no other directory should be searched
+ return ti_try_path(terminfo)
+ }
+
+ // next, consider ~/.terminfo
+ home := os.Getenv("HOME")
+ if home != "" {
+ data, err = ti_try_path(home + "/.terminfo")
+ if err == nil {
+ return data, nil
+ }
+ }
+
+ // next, TERMINFO_DIRS
+ dirs := os.Getenv("TERMINFO_DIRS")
+ if dirs != "" {
+ for _, dir := range strings.Split(dirs, ":") {
+ if dir == "" {
+ // "" -> "/usr/share/terminfo"
+ dir = "/usr/share/terminfo"
+ }
+ data, err = ti_try_path(dir)
+ if err == nil {
+ return data, nil
+ }
+ }
+ }
+
+ // fall back to /usr/share/terminfo
+ return ti_try_path("/usr/share/terminfo")
+}
+
+func ti_try_path(path string) (data []byte, err error) {
+ // load_terminfo already made sure it is set
+ term := os.Getenv("TERM")
+
+ // first try, the typical *nix path
+ terminfo := path + "/" + term[0:1] + "/" + term
+ data, err = ioutil.ReadFile(terminfo)
+ if err == nil {
+ return
+ }
+
+ // fallback to darwin specific dirs structure
+ terminfo = path + "/" + hex.EncodeToString([]byte(term[:1])) + "/" + term
+ data, err = ioutil.ReadFile(terminfo)
+ return
+}
+
+func setup_term_builtin() error {
+ name := os.Getenv("TERM")
+ if name == "" {
+ return errors.New("termbox: TERM environment variable not set")
+ }
+
+ for _, t := range terms {
+ if t.name == name {
+ keys = t.keys
+ funcs = t.funcs
+ return nil
+ }
+ }
+
+ compat_table := []struct {
+ partial string
+ keys []string
+ funcs []string
+ }{
+ {"xterm", xterm_keys, xterm_funcs},
+ {"rxvt", rxvt_unicode_keys, rxvt_unicode_funcs},
+ {"linux", linux_keys, linux_funcs},
+ {"Eterm", eterm_keys, eterm_funcs},
+ {"screen", screen_keys, screen_funcs},
+ // let's assume that 'cygwin' is xterm compatible
+ {"cygwin", xterm_keys, xterm_funcs},
+ {"st", xterm_keys, xterm_funcs},
+ }
+
+ // try compatibility variants
+ for _, it := range compat_table {
+ if strings.Contains(name, it.partial) {
+ keys = it.keys
+ funcs = it.funcs
+ return nil
+ }
+ }
+
+ return errors.New("termbox: unsupported terminal")
+}
+
+func setup_term() (err error) {
+ var data []byte
+ var header [6]int16
+ var str_offset, table_offset int16
+
+ data, err = load_terminfo()
+ if err != nil {
+ return setup_term_builtin()
+ }
+
+ rd := bytes.NewReader(data)
+ // 0: magic number, 1: size of names section, 2: size of boolean section, 3:
+ // size of numbers section (in integers), 4: size of the strings section (in
+ // integers), 5: size of the string table
+
+ err = binary.Read(rd, binary.LittleEndian, header[:])
+ if err != nil {
+ return
+ }
+
+ if (header[1]+header[2])%2 != 0 {
+ // old quirk to align everything on word boundaries
+ header[2] += 1
+ }
+ str_offset = ti_header_length + header[1] + header[2] + 2*header[3]
+ table_offset = str_offset + 2*header[4]
+
+ keys = make([]string, 0xFFFF-key_min)
+ for i, _ := range keys {
+ keys[i], err = ti_read_string(rd, str_offset+2*ti_keys[i], table_offset)
+ if err != nil {
+ return
+ }
+ }
+ funcs = make([]string, t_max_funcs)
+ // the last two entries are reserved for mouse. because the table offset is
+ // not there, the two entries have to fill in manually
+ for i, _ := range funcs[:len(funcs)-2] {
+ funcs[i], err = ti_read_string(rd, str_offset+2*ti_funcs[i], table_offset)
+ if err != nil {
+ return
+ }
+ }
+ funcs[t_max_funcs-2] = ti_mouse_enter
+ funcs[t_max_funcs-1] = ti_mouse_leave
+ return nil
+}
+
+func ti_read_string(rd *bytes.Reader, str_off, table int16) (string, error) {
+ var off int16
+
+ _, err := rd.Seek(int64(str_off), 0)
+ if err != nil {
+ return "", err
+ }
+ err = binary.Read(rd, binary.LittleEndian, &off)
+ if err != nil {
+ return "", err
+ }
+ _, err = rd.Seek(int64(table+off), 0)
+ if err != nil {
+ return "", err
+ }
+ var bs []byte
+ for {
+ b, err := rd.ReadByte()
+ if err != nil {
+ return "", err
+ }
+ if b == byte(0x00) {
+ break
+ }
+ bs = append(bs, b)
+ }
+ return string(bs), nil
+}
+
+// "Maps" the function constants from termbox.go to the number of the respective
+// string capability in the terminfo file. Taken from (ncurses) term.h.
+var ti_funcs = []int16{
+ 28, 40, 16, 13, 5, 39, 36, 27, 26, 34, 89, 88,
+}
+
+// Same as above for the special keys.
+var ti_keys = []int16{
+ 66, 68 /* apparently not a typo; 67 is F10 for whatever reason */, 69, 70,
+ 71, 72, 73, 74, 75, 67, 216, 217, 77, 59, 76, 164, 82, 81, 87, 61, 79, 83,
+}
diff --git a/src/mongo/gotools/vendor/src/github.com/nsf/termbox-go/terminfo_builtin.go b/src/mongo/gotools/vendor/src/github.com/nsf/termbox-go/terminfo_builtin.go
new file mode 100644
index 00000000000..a94866067ba
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/nsf/termbox-go/terminfo_builtin.go
@@ -0,0 +1,64 @@
+// +build !windows
+
+package termbox
+
+// Eterm
+var eterm_keys = []string{
+ "\x1b[11~", "\x1b[12~", "\x1b[13~", "\x1b[14~", "\x1b[15~", "\x1b[17~", "\x1b[18~", "\x1b[19~", "\x1b[20~", "\x1b[21~", "\x1b[23~", "\x1b[24~", "\x1b[2~", "\x1b[3~", "\x1b[7~", "\x1b[8~", "\x1b[5~", "\x1b[6~", "\x1b[A", "\x1b[B", "\x1b[D", "\x1b[C",
+}
+var eterm_funcs = []string{
+ "\x1b7\x1b[?47h", "\x1b[2J\x1b[?47l\x1b8", "\x1b[?25h", "\x1b[?25l", "\x1b[H\x1b[2J", "\x1b[m\x0f", "\x1b[4m", "\x1b[1m", "\x1b[5m", "\x1b[7m", "", "", "", "",
+}
+
+// screen
+var screen_keys = []string{
+ "\x1bOP", "\x1bOQ", "\x1bOR", "\x1bOS", "\x1b[15~", "\x1b[17~", "\x1b[18~", "\x1b[19~", "\x1b[20~", "\x1b[21~", "\x1b[23~", "\x1b[24~", "\x1b[2~", "\x1b[3~", "\x1b[1~", "\x1b[4~", "\x1b[5~", "\x1b[6~", "\x1bOA", "\x1bOB", "\x1bOD", "\x1bOC",
+}
+var screen_funcs = []string{
+ "\x1b[?1049h", "\x1b[?1049l", "\x1b[34h\x1b[?25h", "\x1b[?25l", "\x1b[H\x1b[J", "\x1b[m\x0f", "\x1b[4m", "\x1b[1m", "\x1b[5m", "\x1b[7m", "\x1b[?1h\x1b=", "\x1b[?1l\x1b>", ti_mouse_enter, ti_mouse_leave,
+}
+
+// xterm
+var xterm_keys = []string{
+ "\x1bOP", "\x1bOQ", "\x1bOR", "\x1bOS", "\x1b[15~", "\x1b[17~", "\x1b[18~", "\x1b[19~", "\x1b[20~", "\x1b[21~", "\x1b[23~", "\x1b[24~", "\x1b[2~", "\x1b[3~", "\x1bOH", "\x1bOF", "\x1b[5~", "\x1b[6~", "\x1bOA", "\x1bOB", "\x1bOD", "\x1bOC",
+}
+var xterm_funcs = []string{
+ "\x1b[?1049h", "\x1b[?1049l", "\x1b[?12l\x1b[?25h", "\x1b[?25l", "\x1b[H\x1b[2J", "\x1b(B\x1b[m", "\x1b[4m", "\x1b[1m", "\x1b[5m", "\x1b[7m", "\x1b[?1h\x1b=", "\x1b[?1l\x1b>", ti_mouse_enter, ti_mouse_leave,
+}
+
+// rxvt-unicode
+var rxvt_unicode_keys = []string{
+ "\x1b[11~", "\x1b[12~", "\x1b[13~", "\x1b[14~", "\x1b[15~", "\x1b[17~", "\x1b[18~", "\x1b[19~", "\x1b[20~", "\x1b[21~", "\x1b[23~", "\x1b[24~", "\x1b[2~", "\x1b[3~", "\x1b[7~", "\x1b[8~", "\x1b[5~", "\x1b[6~", "\x1b[A", "\x1b[B", "\x1b[D", "\x1b[C",
+}
+var rxvt_unicode_funcs = []string{
+ "\x1b[?1049h", "\x1b[r\x1b[?1049l", "\x1b[?25h", "\x1b[?25l", "\x1b[H\x1b[2J", "\x1b[m\x1b(B", "\x1b[4m", "\x1b[1m", "\x1b[5m", "\x1b[7m", "\x1b=", "\x1b>", ti_mouse_enter, ti_mouse_leave,
+}
+
+// linux
+var linux_keys = []string{
+ "\x1b[[A", "\x1b[[B", "\x1b[[C", "\x1b[[D", "\x1b[[E", "\x1b[17~", "\x1b[18~", "\x1b[19~", "\x1b[20~", "\x1b[21~", "\x1b[23~", "\x1b[24~", "\x1b[2~", "\x1b[3~", "\x1b[1~", "\x1b[4~", "\x1b[5~", "\x1b[6~", "\x1b[A", "\x1b[B", "\x1b[D", "\x1b[C",
+}
+var linux_funcs = []string{
+ "", "", "\x1b[?25h\x1b[?0c", "\x1b[?25l\x1b[?1c", "\x1b[H\x1b[J", "\x1b[0;10m", "\x1b[4m", "\x1b[1m", "\x1b[5m", "\x1b[7m", "", "", "", "",
+}
+
+// rxvt-256color
+var rxvt_256color_keys = []string{
+ "\x1b[11~", "\x1b[12~", "\x1b[13~", "\x1b[14~", "\x1b[15~", "\x1b[17~", "\x1b[18~", "\x1b[19~", "\x1b[20~", "\x1b[21~", "\x1b[23~", "\x1b[24~", "\x1b[2~", "\x1b[3~", "\x1b[7~", "\x1b[8~", "\x1b[5~", "\x1b[6~", "\x1b[A", "\x1b[B", "\x1b[D", "\x1b[C",
+}
+var rxvt_256color_funcs = []string{
+ "\x1b7\x1b[?47h", "\x1b[2J\x1b[?47l\x1b8", "\x1b[?25h", "\x1b[?25l", "\x1b[H\x1b[2J", "\x1b[m\x0f", "\x1b[4m", "\x1b[1m", "\x1b[5m", "\x1b[7m", "\x1b=", "\x1b>", ti_mouse_enter, ti_mouse_leave,
+}
+
+var terms = []struct {
+ name string
+ keys []string
+ funcs []string
+}{
+ {"Eterm", eterm_keys, eterm_funcs},
+ {"screen", screen_keys, screen_funcs},
+ {"xterm", xterm_keys, xterm_funcs},
+ {"rxvt-unicode", rxvt_unicode_keys, rxvt_unicode_funcs},
+ {"linux", linux_keys, linux_funcs},
+ {"rxvt-256color", rxvt_256color_keys, rxvt_256color_funcs},
+}
diff --git a/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/.gitignore b/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/.gitignore
new file mode 100644
index 00000000000..6ad551742d3
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/.gitignore
@@ -0,0 +1,3 @@
+.DS_Store
+Thumbs.db
+/.idea
diff --git a/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/.travis.yml b/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/.travis.yml
new file mode 100644
index 00000000000..44217c97335
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/.travis.yml
@@ -0,0 +1,14 @@
+language: go
+
+go:
+ - 1.2
+ - 1.3
+ - 1.4
+ - 1.5
+
+install:
+ - go get -t ./...
+
+script: go test -v
+
+sudo: false
diff --git a/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/LICENSE.md b/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/LICENSE.md
new file mode 100644
index 00000000000..48a3731c01a
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/LICENSE.md
@@ -0,0 +1,23 @@
+Copyright (c) 2015 SmartyStreets, LLC
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all
+copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+SOFTWARE.
+
+NOTE: Various optional and subordinate components carry their own licensing
+requirements and restrictions. Use of those components is subject to the terms
+and conditions outlined the respective license of each component.
diff --git a/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/README.md b/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/README.md
new file mode 100644
index 00000000000..58383bb00af
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/README.md
@@ -0,0 +1,575 @@
+# assertions
+--
+ import "github.com/smartystreets/assertions"
+
+Package assertions contains the implementations for all assertions which are
+referenced in goconvey's `convey` package
+(github.com/smartystreets/goconvey/convey) and gunit
+(github.com/smartystreets/gunit) for use with the So(...) method. They can also
+be used in traditional Go test functions and even in applications.
+
+Many of the assertions lean heavily on work done by Aaron Jacobs in his
+excellent oglematchers library. (https://github.com/jacobsa/oglematchers) The
+ShouldResemble assertion leans heavily on work done by Daniel Jacques in his
+very helpful go-render library. (https://github.com/luci/go-render)
+
+## Usage
+
+#### func GoConveyMode
+
+```go
+func GoConveyMode(yes bool)
+```
+GoConveyMode provides control over JSON serialization of failures. When using
+the assertions in this package from the convey package JSON results are very
+helpful and can be rendered in a DIFF view. In that case, this function will be
+called with a true value to enable the JSON serialization. By default, the
+assertions in this package will not serializer a JSON result, making standalone
+ussage more convenient.
+
+#### func ShouldAlmostEqual
+
+```go
+func ShouldAlmostEqual(actual interface{}, expected ...interface{}) string
+```
+ShouldAlmostEqual makes sure that two parameters are close enough to being
+equal. The acceptable delta may be specified with a third argument, or a very
+small default delta will be used.
+
+#### func ShouldBeBetween
+
+```go
+func ShouldBeBetween(actual interface{}, expected ...interface{}) string
+```
+ShouldBeBetween receives exactly three parameters: an actual value, a lower
+bound, and an upper bound. It ensures that the actual value is between both
+bounds (but not equal to either of them).
+
+#### func ShouldBeBetweenOrEqual
+
+```go
+func ShouldBeBetweenOrEqual(actual interface{}, expected ...interface{}) string
+```
+ShouldBeBetweenOrEqual receives exactly three parameters: an actual value, a
+lower bound, and an upper bound. It ensures that the actual value is between
+both bounds or equal to one of them.
+
+#### func ShouldBeBlank
+
+```go
+func ShouldBeBlank(actual interface{}, expected ...interface{}) string
+```
+ShouldBeBlank receives exactly 1 string parameter and ensures that it is equal
+to "".
+
+#### func ShouldBeChronological
+
+```go
+func ShouldBeChronological(actual interface{}, expected ...interface{}) string
+```
+ShouldBeChronological receives a []time.Time slice and asserts that the are in
+chronological order starting with the first time.Time as the earliest.
+
+#### func ShouldBeEmpty
+
+```go
+func ShouldBeEmpty(actual interface{}, expected ...interface{}) string
+```
+ShouldBeEmpty receives a single parameter (actual) and determines whether or not
+calling len(actual) would return `0`. It obeys the rules specified by the len
+function for determining length: http://golang.org/pkg/builtin/#len
+
+#### func ShouldBeFalse
+
+```go
+func ShouldBeFalse(actual interface{}, expected ...interface{}) string
+```
+ShouldBeFalse receives a single parameter and ensures that it is false.
+
+#### func ShouldBeGreaterThan
+
+```go
+func ShouldBeGreaterThan(actual interface{}, expected ...interface{}) string
+```
+ShouldBeGreaterThan receives exactly two parameters and ensures that the first
+is greater than the second.
+
+#### func ShouldBeGreaterThanOrEqualTo
+
+```go
+func ShouldBeGreaterThanOrEqualTo(actual interface{}, expected ...interface{}) string
+```
+ShouldBeGreaterThanOrEqualTo receives exactly two parameters and ensures that
+the first is greater than or equal to the second.
+
+#### func ShouldBeIn
+
+```go
+func ShouldBeIn(actual interface{}, expected ...interface{}) string
+```
+ShouldBeIn receives at least 2 parameters. The first is a proposed member of the
+collection that is passed in either as the second parameter, or of the
+collection that is comprised of all the remaining parameters. This assertion
+ensures that the proposed member is in the collection (using ShouldEqual).
+
+#### func ShouldBeLessThan
+
+```go
+func ShouldBeLessThan(actual interface{}, expected ...interface{}) string
+```
+ShouldBeLessThan receives exactly two parameters and ensures that the first is
+less than the second.
+
+#### func ShouldBeLessThanOrEqualTo
+
+```go
+func ShouldBeLessThanOrEqualTo(actual interface{}, expected ...interface{}) string
+```
+ShouldBeLessThan receives exactly two parameters and ensures that the first is
+less than or equal to the second.
+
+#### func ShouldBeNil
+
+```go
+func ShouldBeNil(actual interface{}, expected ...interface{}) string
+```
+ShouldBeNil receives a single parameter and ensures that it is nil.
+
+#### func ShouldBeTrue
+
+```go
+func ShouldBeTrue(actual interface{}, expected ...interface{}) string
+```
+ShouldBeTrue receives a single parameter and ensures that it is true.
+
+#### func ShouldBeZeroValue
+
+```go
+func ShouldBeZeroValue(actual interface{}, expected ...interface{}) string
+```
+ShouldBeZeroValue receives a single parameter and ensures that it is the Go
+equivalent of the default value, or "zero" value.
+
+#### func ShouldContain
+
+```go
+func ShouldContain(actual interface{}, expected ...interface{}) string
+```
+ShouldContain receives exactly two parameters. The first is a slice and the
+second is a proposed member. Membership is determined using ShouldEqual.
+
+#### func ShouldContainKey
+
+```go
+func ShouldContainKey(actual interface{}, expected ...interface{}) string
+```
+ShouldContainKey receives exactly two parameters. The first is a map and the
+second is a proposed key. Keys are compared with a simple '=='.
+
+#### func ShouldContainSubstring
+
+```go
+func ShouldContainSubstring(actual interface{}, expected ...interface{}) string
+```
+ShouldContainSubstring receives exactly 2 string parameters and ensures that the
+first contains the second as a substring.
+
+#### func ShouldEndWith
+
+```go
+func ShouldEndWith(actual interface{}, expected ...interface{}) string
+```
+ShouldEndWith receives exactly 2 string parameters and ensures that the first
+ends with the second.
+
+#### func ShouldEqual
+
+```go
+func ShouldEqual(actual interface{}, expected ...interface{}) string
+```
+ShouldEqual receives exactly two parameters and does an equality check.
+
+#### func ShouldEqualTrimSpace
+
+```go
+func ShouldEqualTrimSpace(actual interface{}, expected ...interface{}) string
+```
+ShouldEqualTrimSpace receives exactly 2 string parameters and ensures that the
+first is equal to the second after removing all leading and trailing whitespace
+using strings.TrimSpace(first).
+
+#### func ShouldEqualWithout
+
+```go
+func ShouldEqualWithout(actual interface{}, expected ...interface{}) string
+```
+ShouldEqualWithout receives exactly 3 string parameters and ensures that the
+first is equal to the second after removing all instances of the third from the
+first using strings.Replace(first, third, "", -1).
+
+#### func ShouldHappenAfter
+
+```go
+func ShouldHappenAfter(actual interface{}, expected ...interface{}) string
+```
+ShouldHappenAfter receives exactly 2 time.Time arguments and asserts that the
+first happens after the second.
+
+#### func ShouldHappenBefore
+
+```go
+func ShouldHappenBefore(actual interface{}, expected ...interface{}) string
+```
+ShouldHappenBefore receives exactly 2 time.Time arguments and asserts that the
+first happens before the second.
+
+#### func ShouldHappenBetween
+
+```go
+func ShouldHappenBetween(actual interface{}, expected ...interface{}) string
+```
+ShouldHappenBetween receives exactly 3 time.Time arguments and asserts that the
+first happens between (not on) the second and third.
+
+#### func ShouldHappenOnOrAfter
+
+```go
+func ShouldHappenOnOrAfter(actual interface{}, expected ...interface{}) string
+```
+ShouldHappenOnOrAfter receives exactly 2 time.Time arguments and asserts that
+the first happens on or after the second.
+
+#### func ShouldHappenOnOrBefore
+
+```go
+func ShouldHappenOnOrBefore(actual interface{}, expected ...interface{}) string
+```
+ShouldHappenOnOrBefore receives exactly 2 time.Time arguments and asserts that
+the first happens on or before the second.
+
+#### func ShouldHappenOnOrBetween
+
+```go
+func ShouldHappenOnOrBetween(actual interface{}, expected ...interface{}) string
+```
+ShouldHappenOnOrBetween receives exactly 3 time.Time arguments and asserts that
+the first happens between or on the second and third.
+
+#### func ShouldHappenWithin
+
+```go
+func ShouldHappenWithin(actual interface{}, expected ...interface{}) string
+```
+ShouldHappenWithin receives a time.Time, a time.Duration, and a time.Time (3
+arguments) and asserts that the first time.Time happens within or on the
+duration specified relative to the other time.Time.
+
+#### func ShouldHaveLength
+
+```go
+func ShouldHaveLength(actual interface{}, expected ...interface{}) string
+```
+ShouldHaveLength receives 2 parameters. The first is a collection to check the
+length of, the second being the expected length. It obeys the rules specified by
+the len function for determining length: http://golang.org/pkg/builtin/#len
+
+#### func ShouldHaveSameTypeAs
+
+```go
+func ShouldHaveSameTypeAs(actual interface{}, expected ...interface{}) string
+```
+ShouldHaveSameTypeAs receives exactly two parameters and compares their
+underlying types for equality.
+
+#### func ShouldImplement
+
+```go
+func ShouldImplement(actual interface{}, expectedList ...interface{}) string
+```
+ShouldImplement receives exactly two parameters and ensures that the first
+implements the interface type of the second.
+
+#### func ShouldNotAlmostEqual
+
+```go
+func ShouldNotAlmostEqual(actual interface{}, expected ...interface{}) string
+```
+ShouldNotAlmostEqual is the inverse of ShouldAlmostEqual
+
+#### func ShouldNotBeBetween
+
+```go
+func ShouldNotBeBetween(actual interface{}, expected ...interface{}) string
+```
+ShouldNotBeBetween receives exactly three parameters: an actual value, a lower
+bound, and an upper bound. It ensures that the actual value is NOT between both
+bounds.
+
+#### func ShouldNotBeBetweenOrEqual
+
+```go
+func ShouldNotBeBetweenOrEqual(actual interface{}, expected ...interface{}) string
+```
+ShouldNotBeBetweenOrEqual receives exactly three parameters: an actual value, a
+lower bound, and an upper bound. It ensures that the actual value is nopt
+between the bounds nor equal to either of them.
+
+#### func ShouldNotBeBlank
+
+```go
+func ShouldNotBeBlank(actual interface{}, expected ...interface{}) string
+```
+ShouldNotBeBlank receives exactly 1 string parameter and ensures that it is
+equal to "".
+
+#### func ShouldNotBeEmpty
+
+```go
+func ShouldNotBeEmpty(actual interface{}, expected ...interface{}) string
+```
+ShouldNotBeEmpty receives a single parameter (actual) and determines whether or
+not calling len(actual) would return a value greater than zero. It obeys the
+rules specified by the `len` function for determining length:
+http://golang.org/pkg/builtin/#len
+
+#### func ShouldNotBeIn
+
+```go
+func ShouldNotBeIn(actual interface{}, expected ...interface{}) string
+```
+ShouldNotBeIn receives at least 2 parameters. The first is a proposed member of
+the collection that is passed in either as the second parameter, or of the
+collection that is comprised of all the remaining parameters. This assertion
+ensures that the proposed member is NOT in the collection (using ShouldEqual).
+
+#### func ShouldNotBeNil
+
+```go
+func ShouldNotBeNil(actual interface{}, expected ...interface{}) string
+```
+ShouldNotBeNil receives a single parameter and ensures that it is not nil.
+
+#### func ShouldNotContain
+
+```go
+func ShouldNotContain(actual interface{}, expected ...interface{}) string
+```
+ShouldNotContain receives exactly two parameters. The first is a slice and the
+second is a proposed member. Membership is determinied using ShouldEqual.
+
+#### func ShouldNotContainKey
+
+```go
+func ShouldNotContainKey(actual interface{}, expected ...interface{}) string
+```
+ShouldNotContainKey receives exactly two parameters. The first is a map and the
+second is a proposed absent key. Keys are compared with a simple '=='.
+
+#### func ShouldNotContainSubstring
+
+```go
+func ShouldNotContainSubstring(actual interface{}, expected ...interface{}) string
+```
+ShouldNotContainSubstring receives exactly 2 string parameters and ensures that
+the first does NOT contain the second as a substring.
+
+#### func ShouldNotEndWith
+
+```go
+func ShouldNotEndWith(actual interface{}, expected ...interface{}) string
+```
+ShouldEndWith receives exactly 2 string parameters and ensures that the first
+does not end with the second.
+
+#### func ShouldNotEqual
+
+```go
+func ShouldNotEqual(actual interface{}, expected ...interface{}) string
+```
+ShouldNotEqual receives exactly two parameters and does an inequality check.
+
+#### func ShouldNotHappenOnOrBetween
+
+```go
+func ShouldNotHappenOnOrBetween(actual interface{}, expected ...interface{}) string
+```
+ShouldNotHappenOnOrBetween receives exactly 3 time.Time arguments and asserts
+that the first does NOT happen between or on the second or third.
+
+#### func ShouldNotHappenWithin
+
+```go
+func ShouldNotHappenWithin(actual interface{}, expected ...interface{}) string
+```
+ShouldNotHappenWithin receives a time.Time, a time.Duration, and a time.Time (3
+arguments) and asserts that the first time.Time does NOT happen within or on the
+duration specified relative to the other time.Time.
+
+#### func ShouldNotHaveSameTypeAs
+
+```go
+func ShouldNotHaveSameTypeAs(actual interface{}, expected ...interface{}) string
+```
+ShouldNotHaveSameTypeAs receives exactly two parameters and compares their
+underlying types for inequality.
+
+#### func ShouldNotImplement
+
+```go
+func ShouldNotImplement(actual interface{}, expectedList ...interface{}) string
+```
+ShouldNotImplement receives exactly two parameters and ensures that the first
+does NOT implement the interface type of the second.
+
+#### func ShouldNotPanic
+
+```go
+func ShouldNotPanic(actual interface{}, expected ...interface{}) (message string)
+```
+ShouldNotPanic receives a void, niladic function and expects to execute the
+function without any panic.
+
+#### func ShouldNotPanicWith
+
+```go
+func ShouldNotPanicWith(actual interface{}, expected ...interface{}) (message string)
+```
+ShouldNotPanicWith receives a void, niladic function and expects to recover a
+panic whose content differs from the second argument.
+
+#### func ShouldNotPointTo
+
+```go
+func ShouldNotPointTo(actual interface{}, expected ...interface{}) string
+```
+ShouldNotPointTo receives exactly two parameters and checks to see that they
+point to different addresess.
+
+#### func ShouldNotResemble
+
+```go
+func ShouldNotResemble(actual interface{}, expected ...interface{}) string
+```
+ShouldNotResemble receives exactly two parameters and does an inverse deep equal
+check (see reflect.DeepEqual)
+
+#### func ShouldNotStartWith
+
+```go
+func ShouldNotStartWith(actual interface{}, expected ...interface{}) string
+```
+ShouldNotStartWith receives exactly 2 string parameters and ensures that the
+first does not start with the second.
+
+#### func ShouldPanic
+
+```go
+func ShouldPanic(actual interface{}, expected ...interface{}) (message string)
+```
+ShouldPanic receives a void, niladic function and expects to recover a panic.
+
+#### func ShouldPanicWith
+
+```go
+func ShouldPanicWith(actual interface{}, expected ...interface{}) (message string)
+```
+ShouldPanicWith receives a void, niladic function and expects to recover a panic
+with the second argument as the content.
+
+#### func ShouldPointTo
+
+```go
+func ShouldPointTo(actual interface{}, expected ...interface{}) string
+```
+ShouldPointTo receives exactly two parameters and checks to see that they point
+to the same address.
+
+#### func ShouldResemble
+
+```go
+func ShouldResemble(actual interface{}, expected ...interface{}) string
+```
+ShouldResemble receives exactly two parameters and does a deep equal check (see
+reflect.DeepEqual)
+
+#### func ShouldStartWith
+
+```go
+func ShouldStartWith(actual interface{}, expected ...interface{}) string
+```
+ShouldStartWith receives exactly 2 string parameters and ensures that the first
+starts with the second.
+
+#### func So
+
+```go
+func So(actual interface{}, assert assertion, expected ...interface{}) (bool, string)
+```
+So is a convenience function (as opposed to an inconvenience function?) for
+running assertions on arbitrary arguments in any context, be it for testing or
+even application logging. It allows you to perform assertion-like behavior (and
+get nicely formatted messages detailing discrepancies) but without the program
+blowing up or panicking. All that is required is to import this package and call
+`So` with one of the assertions exported by this package as the second
+parameter. The first return parameter is a boolean indicating if the assertion
+was true. The second return parameter is the well-formatted message showing why
+an assertion was incorrect, or blank if the assertion was correct.
+
+Example:
+
+ if ok, message := So(x, ShouldBeGreaterThan, y); !ok {
+ log.Println(message)
+ }
+
+#### type Assertion
+
+```go
+type Assertion struct {
+}
+```
+
+
+#### func New
+
+```go
+func New(t testingT) *Assertion
+```
+New swallows the *testing.T struct and prints failed assertions using t.Error.
+Example: assertions.New(t).So(1, should.Equal, 1)
+
+#### func (*Assertion) Failed
+
+```go
+func (this *Assertion) Failed() bool
+```
+Failed reports whether any calls to So (on this Assertion instance) have failed.
+
+#### func (*Assertion) So
+
+```go
+func (this *Assertion) So(actual interface{}, assert assertion, expected ...interface{}) bool
+```
+So calls the standalone So function and additionally, calls t.Error in failure
+scenarios.
+
+#### type FailureView
+
+```go
+type FailureView struct {
+ Message string `json:"Message"`
+ Expected string `json:"Expected"`
+ Actual string `json:"Actual"`
+}
+```
+
+This struct is also declared in
+github.com/smartystreets/goconvey/convey/reporting. The json struct tags should
+be equal in both declarations.
+
+#### type Serializer
+
+```go
+type Serializer interface {
+ // contains filtered or unexported methods
+}
+```
diff --git a/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/assertions.goconvey b/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/assertions.goconvey
new file mode 100644
index 00000000000..e76cf275d47
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/assertions.goconvey
@@ -0,0 +1,3 @@
+#ignore
+-timeout=1s
+-coverpkg=github.com/smartystreets/assertions,github.com/smartystreets/assertions/internal/oglematchers \ No newline at end of file
diff --git a/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/collections.go b/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/collections.go
new file mode 100644
index 00000000000..d7f407e913f
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/collections.go
@@ -0,0 +1,244 @@
+package assertions
+
+import (
+ "fmt"
+ "reflect"
+
+ "github.com/smartystreets/assertions/internal/oglematchers"
+)
+
+// ShouldContain receives exactly two parameters. The first is a slice and the
+// second is a proposed member. Membership is determined using ShouldEqual.
+func ShouldContain(actual interface{}, expected ...interface{}) string {
+ if fail := need(1, expected); fail != success {
+ return fail
+ }
+
+ if matchError := oglematchers.Contains(expected[0]).Matches(actual); matchError != nil {
+ typeName := reflect.TypeOf(actual)
+
+ if fmt.Sprintf("%v", matchError) == "which is not a slice or array" {
+ return fmt.Sprintf(shouldHaveBeenAValidCollection, typeName)
+ }
+ return fmt.Sprintf(shouldHaveContained, typeName, expected[0])
+ }
+ return success
+}
+
+// ShouldNotContain receives exactly two parameters. The first is a slice and the
+// second is a proposed member. Membership is determinied using ShouldEqual.
+func ShouldNotContain(actual interface{}, expected ...interface{}) string {
+ if fail := need(1, expected); fail != success {
+ return fail
+ }
+ typeName := reflect.TypeOf(actual)
+
+ if matchError := oglematchers.Contains(expected[0]).Matches(actual); matchError != nil {
+ if fmt.Sprintf("%v", matchError) == "which is not a slice or array" {
+ return fmt.Sprintf(shouldHaveBeenAValidCollection, typeName)
+ }
+ return success
+ }
+ return fmt.Sprintf(shouldNotHaveContained, typeName, expected[0])
+}
+
+// ShouldContainKey receives exactly two parameters. The first is a map and the
+// second is a proposed key. Keys are compared with a simple '=='.
+func ShouldContainKey(actual interface{}, expected ...interface{}) string {
+ if fail := need(1, expected); fail != success {
+ return fail
+ }
+
+ keys, isMap := mapKeys(actual)
+ if !isMap {
+ return fmt.Sprintf(shouldHaveBeenAValidMap, reflect.TypeOf(actual))
+ }
+
+ if !keyFound(keys, expected[0]) {
+ return fmt.Sprintf(shouldHaveContainedKey, reflect.TypeOf(actual), expected)
+ }
+
+ return ""
+}
+
+// ShouldNotContainKey receives exactly two parameters. The first is a map and the
+// second is a proposed absent key. Keys are compared with a simple '=='.
+func ShouldNotContainKey(actual interface{}, expected ...interface{}) string {
+ if fail := need(1, expected); fail != success {
+ return fail
+ }
+
+ keys, isMap := mapKeys(actual)
+ if !isMap {
+ return fmt.Sprintf(shouldHaveBeenAValidMap, reflect.TypeOf(actual))
+ }
+
+ if keyFound(keys, expected[0]) {
+ return fmt.Sprintf(shouldNotHaveContainedKey, reflect.TypeOf(actual), expected)
+ }
+
+ return ""
+}
+
+func mapKeys(m interface{}) ([]reflect.Value, bool) {
+ value := reflect.ValueOf(m)
+ if value.Kind() != reflect.Map {
+ return nil, false
+ }
+ return value.MapKeys(), true
+}
+func keyFound(keys []reflect.Value, expectedKey interface{}) bool {
+ found := false
+ for _, key := range keys {
+ if key.Interface() == expectedKey {
+ found = true
+ }
+ }
+ return found
+}
+
+// ShouldBeIn receives at least 2 parameters. The first is a proposed member of the collection
+// that is passed in either as the second parameter, or of the collection that is comprised
+// of all the remaining parameters. This assertion ensures that the proposed member is in
+// the collection (using ShouldEqual).
+func ShouldBeIn(actual interface{}, expected ...interface{}) string {
+ if fail := atLeast(1, expected); fail != success {
+ return fail
+ }
+
+ if len(expected) == 1 {
+ return shouldBeIn(actual, expected[0])
+ }
+ return shouldBeIn(actual, expected)
+}
+func shouldBeIn(actual interface{}, expected interface{}) string {
+ if matchError := oglematchers.Contains(actual).Matches(expected); matchError != nil {
+ return fmt.Sprintf(shouldHaveBeenIn, actual, reflect.TypeOf(expected))
+ }
+ return success
+}
+
+// ShouldNotBeIn receives at least 2 parameters. The first is a proposed member of the collection
+// that is passed in either as the second parameter, or of the collection that is comprised
+// of all the remaining parameters. This assertion ensures that the proposed member is NOT in
+// the collection (using ShouldEqual).
+func ShouldNotBeIn(actual interface{}, expected ...interface{}) string {
+ if fail := atLeast(1, expected); fail != success {
+ return fail
+ }
+
+ if len(expected) == 1 {
+ return shouldNotBeIn(actual, expected[0])
+ }
+ return shouldNotBeIn(actual, expected)
+}
+func shouldNotBeIn(actual interface{}, expected interface{}) string {
+ if matchError := oglematchers.Contains(actual).Matches(expected); matchError == nil {
+ return fmt.Sprintf(shouldNotHaveBeenIn, actual, reflect.TypeOf(expected))
+ }
+ return success
+}
+
+// ShouldBeEmpty receives a single parameter (actual) and determines whether or not
+// calling len(actual) would return `0`. It obeys the rules specified by the len
+// function for determining length: http://golang.org/pkg/builtin/#len
+func ShouldBeEmpty(actual interface{}, expected ...interface{}) string {
+ if fail := need(0, expected); fail != success {
+ return fail
+ }
+
+ if actual == nil {
+ return success
+ }
+
+ value := reflect.ValueOf(actual)
+ switch value.Kind() {
+ case reflect.Slice:
+ if value.Len() == 0 {
+ return success
+ }
+ case reflect.Chan:
+ if value.Len() == 0 {
+ return success
+ }
+ case reflect.Map:
+ if value.Len() == 0 {
+ return success
+ }
+ case reflect.String:
+ if value.Len() == 0 {
+ return success
+ }
+ case reflect.Ptr:
+ elem := value.Elem()
+ kind := elem.Kind()
+ if (kind == reflect.Slice || kind == reflect.Array) && elem.Len() == 0 {
+ return success
+ }
+ }
+
+ return fmt.Sprintf(shouldHaveBeenEmpty, actual)
+}
+
+// ShouldNotBeEmpty receives a single parameter (actual) and determines whether or not
+// calling len(actual) would return a value greater than zero. It obeys the rules
+// specified by the `len` function for determining length: http://golang.org/pkg/builtin/#len
+func ShouldNotBeEmpty(actual interface{}, expected ...interface{}) string {
+ if fail := need(0, expected); fail != success {
+ return fail
+ }
+
+ if empty := ShouldBeEmpty(actual, expected...); empty != success {
+ return success
+ }
+ return fmt.Sprintf(shouldNotHaveBeenEmpty, actual)
+}
+
+// ShouldHaveLength receives 2 parameters. The first is a collection to check
+// the length of, the second being the expected length. It obeys the rules
+// specified by the len function for determining length:
+// http://golang.org/pkg/builtin/#len
+func ShouldHaveLength(actual interface{}, expected ...interface{}) string {
+ if fail := need(1, expected); fail != success {
+ return fail
+ }
+
+ var expectedLen int64
+ lenValue := reflect.ValueOf(expected[0])
+ switch lenValue.Kind() {
+ case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
+ expectedLen = lenValue.Int()
+ case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64:
+ expectedLen = int64(lenValue.Uint())
+ default:
+ return fmt.Sprintf(shouldHaveBeenAValidInteger, reflect.TypeOf(expected[0]))
+ }
+
+ if expectedLen < 0 {
+ return fmt.Sprintf(shouldHaveBeenAValidLength, expected[0])
+ }
+
+ value := reflect.ValueOf(actual)
+ switch value.Kind() {
+ case reflect.Slice,
+ reflect.Chan,
+ reflect.Map,
+ reflect.String:
+ if int64(value.Len()) == expectedLen {
+ return success
+ } else {
+ return fmt.Sprintf(shouldHaveHadLength, actual, value.Len(), expectedLen)
+ }
+ case reflect.Ptr:
+ elem := value.Elem()
+ kind := elem.Kind()
+ if kind == reflect.Slice || kind == reflect.Array {
+ if int64(elem.Len()) == expectedLen {
+ return success
+ } else {
+ return fmt.Sprintf(shouldHaveHadLength, actual, elem.Len(), expectedLen)
+ }
+ }
+ }
+ return fmt.Sprintf(shouldHaveBeenAValidCollection, reflect.TypeOf(actual))
+}
diff --git a/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/collections_test.go b/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/collections_test.go
new file mode 100644
index 00000000000..6c7948f3c7a
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/collections_test.go
@@ -0,0 +1,157 @@
+package assertions
+
+import (
+ "fmt"
+ "testing"
+ "time"
+)
+
+func TestShouldContainKey(t *testing.T) {
+ fail(t, so(map[int]int{}, ShouldContainKey), "This assertion requires exactly 1 comparison values (you provided 0).")
+ fail(t, so(map[int]int{}, ShouldContainKey, 1, 2, 3), "This assertion requires exactly 1 comparison values (you provided 3).")
+
+ fail(t, so(Thing1{}, ShouldContainKey, 1), "You must provide a valid map type (was assertions.Thing1)!")
+ fail(t, so(nil, ShouldContainKey, 1), "You must provide a valid map type (was <nil>)!")
+ fail(t, so(map[int]int{1: 41}, ShouldContainKey, 2), "Expected the map[int]int to contain the key: [2] (but it didn't)!")
+
+ pass(t, so(map[int]int{1: 41}, ShouldContainKey, 1))
+ pass(t, so(map[int]int{1: 41, 2: 42, 3: 43}, ShouldContainKey, 2))
+}
+
+func TestShouldNotContainKey(t *testing.T) {
+ fail(t, so(map[int]int{}, ShouldNotContainKey), "This assertion requires exactly 1 comparison values (you provided 0).")
+ fail(t, so(map[int]int{}, ShouldNotContainKey, 1, 2, 3), "This assertion requires exactly 1 comparison values (you provided 3).")
+
+ fail(t, so(Thing1{}, ShouldNotContainKey, 1), "You must provide a valid map type (was assertions.Thing1)!")
+ fail(t, so(nil, ShouldNotContainKey, 1), "You must provide a valid map type (was <nil>)!")
+ fail(t, so(map[int]int{1: 41}, ShouldNotContainKey, 1), "Expected the map[int]int NOT to contain the key: [1] (but it did)!")
+ pass(t, so(map[int]int{1: 41}, ShouldNotContainKey, 2))
+}
+
+func TestShouldContain(t *testing.T) {
+ fail(t, so([]int{}, ShouldContain), "This assertion requires exactly 1 comparison values (you provided 0).")
+ fail(t, so([]int{}, ShouldContain, 1, 2, 3), "This assertion requires exactly 1 comparison values (you provided 3).")
+
+ fail(t, so(Thing1{}, ShouldContain, 1), "You must provide a valid container (was assertions.Thing1)!")
+ fail(t, so(nil, ShouldContain, 1), "You must provide a valid container (was <nil>)!")
+ fail(t, so([]int{1}, ShouldContain, 2), "Expected the container ([]int) to contain: '2' (but it didn't)!")
+
+ pass(t, so([]int{1}, ShouldContain, 1))
+ pass(t, so([]int{1, 2, 3}, ShouldContain, 2))
+}
+
+func TestShouldNotContain(t *testing.T) {
+ fail(t, so([]int{}, ShouldNotContain), "This assertion requires exactly 1 comparison values (you provided 0).")
+ fail(t, so([]int{}, ShouldNotContain, 1, 2, 3), "This assertion requires exactly 1 comparison values (you provided 3).")
+
+ fail(t, so(Thing1{}, ShouldNotContain, 1), "You must provide a valid container (was assertions.Thing1)!")
+ fail(t, so(nil, ShouldNotContain, 1), "You must provide a valid container (was <nil>)!")
+
+ fail(t, so([]int{1}, ShouldNotContain, 1), "Expected the container ([]int) NOT to contain: '1' (but it did)!")
+ fail(t, so([]int{1, 2, 3}, ShouldNotContain, 2), "Expected the container ([]int) NOT to contain: '2' (but it did)!")
+
+ pass(t, so([]int{1}, ShouldNotContain, 2))
+}
+
+func TestShouldBeIn(t *testing.T) {
+ fail(t, so(4, ShouldBeIn), needNonEmptyCollection)
+
+ container := []int{1, 2, 3, 4}
+ pass(t, so(4, ShouldBeIn, container))
+ pass(t, so(4, ShouldBeIn, 1, 2, 3, 4))
+
+ fail(t, so(4, ShouldBeIn, 1, 2, 3), "Expected '4' to be in the container ([]interface {}), but it wasn't!")
+ fail(t, so(4, ShouldBeIn, []int{1, 2, 3}), "Expected '4' to be in the container ([]int), but it wasn't!")
+}
+
+func TestShouldNotBeIn(t *testing.T) {
+ fail(t, so(4, ShouldNotBeIn), needNonEmptyCollection)
+
+ container := []int{1, 2, 3, 4}
+ pass(t, so(42, ShouldNotBeIn, container))
+ pass(t, so(42, ShouldNotBeIn, 1, 2, 3, 4))
+
+ fail(t, so(2, ShouldNotBeIn, 1, 2, 3), "Expected '2' NOT to be in the container ([]interface {}), but it was!")
+ fail(t, so(2, ShouldNotBeIn, []int{1, 2, 3}), "Expected '2' NOT to be in the container ([]int), but it was!")
+}
+
+func TestShouldBeEmpty(t *testing.T) {
+ fail(t, so(1, ShouldBeEmpty, 2, 3), "This assertion requires exactly 0 comparison values (you provided 2).")
+
+ pass(t, so([]int{}, ShouldBeEmpty)) // empty slice
+ pass(t, so([]interface{}{}, ShouldBeEmpty)) // empty slice
+ pass(t, so(map[string]int{}, ShouldBeEmpty)) // empty map
+ pass(t, so("", ShouldBeEmpty)) // empty string
+ pass(t, so(&[]int{}, ShouldBeEmpty)) // pointer to empty slice
+ pass(t, so(&[0]int{}, ShouldBeEmpty)) // pointer to empty array
+ pass(t, so(nil, ShouldBeEmpty)) // nil
+ pass(t, so(make(chan string), ShouldBeEmpty)) // empty channel
+
+ fail(t, so([]int{1}, ShouldBeEmpty), "Expected [1] to be empty (but it wasn't)!") // non-empty slice
+ fail(t, so([]interface{}{1}, ShouldBeEmpty), "Expected [1] to be empty (but it wasn't)!") // non-empty slice
+ fail(t, so(map[string]int{"hi": 0}, ShouldBeEmpty), "Expected map[hi:0] to be empty (but it wasn't)!") // non-empty map
+ fail(t, so("hi", ShouldBeEmpty), "Expected hi to be empty (but it wasn't)!") // non-empty string
+ fail(t, so(&[]int{1}, ShouldBeEmpty), "Expected &[1] to be empty (but it wasn't)!") // pointer to non-empty slice
+ fail(t, so(&[1]int{1}, ShouldBeEmpty), "Expected &[1] to be empty (but it wasn't)!") // pointer to non-empty array
+ c := make(chan int, 1) // non-empty channel
+ go func() { c <- 1 }()
+ time.Sleep(time.Millisecond)
+ fail(t, so(c, ShouldBeEmpty), fmt.Sprintf("Expected %+v to be empty (but it wasn't)!", c))
+}
+
+func TestShouldNotBeEmpty(t *testing.T) {
+ fail(t, so(1, ShouldNotBeEmpty, 2, 3), "This assertion requires exactly 0 comparison values (you provided 2).")
+
+ fail(t, so([]int{}, ShouldNotBeEmpty), "Expected [] to NOT be empty (but it was)!") // empty slice
+ fail(t, so([]interface{}{}, ShouldNotBeEmpty), "Expected [] to NOT be empty (but it was)!") // empty slice
+ fail(t, so(map[string]int{}, ShouldNotBeEmpty), "Expected map[] to NOT be empty (but it was)!") // empty map
+ fail(t, so("", ShouldNotBeEmpty), "Expected to NOT be empty (but it was)!") // empty string
+ fail(t, so(&[]int{}, ShouldNotBeEmpty), "Expected &[] to NOT be empty (but it was)!") // pointer to empty slice
+ fail(t, so(&[0]int{}, ShouldNotBeEmpty), "Expected &[] to NOT be empty (but it was)!") // pointer to empty array
+ fail(t, so(nil, ShouldNotBeEmpty), "Expected <nil> to NOT be empty (but it was)!") // nil
+ c := make(chan int, 0) // non-empty channel
+ fail(t, so(c, ShouldNotBeEmpty), fmt.Sprintf("Expected %+v to NOT be empty (but it was)!", c)) // empty channel
+
+ pass(t, so([]int{1}, ShouldNotBeEmpty)) // non-empty slice
+ pass(t, so([]interface{}{1}, ShouldNotBeEmpty)) // non-empty slice
+ pass(t, so(map[string]int{"hi": 0}, ShouldNotBeEmpty)) // non-empty map
+ pass(t, so("hi", ShouldNotBeEmpty)) // non-empty string
+ pass(t, so(&[]int{1}, ShouldNotBeEmpty)) // pointer to non-empty slice
+ pass(t, so(&[1]int{1}, ShouldNotBeEmpty)) // pointer to non-empty array
+ c = make(chan int, 1)
+ go func() { c <- 1 }()
+ time.Sleep(time.Millisecond)
+ pass(t, so(c, ShouldNotBeEmpty))
+}
+
+func TestShouldHaveLength(t *testing.T) {
+ fail(t, so(1, ShouldHaveLength, 2), "You must provide a valid container (was int)!")
+ fail(t, so(nil, ShouldHaveLength, 1), "You must provide a valid container (was <nil>)!")
+ fail(t, so("hi", ShouldHaveLength, float64(1.0)), "You must provide a valid integer (was float64)!")
+ fail(t, so([]string{}, ShouldHaveLength), "This assertion requires exactly 1 comparison values (you provided 0).")
+ fail(t, so([]string{}, ShouldHaveLength, 1, 2), "This assertion requires exactly 1 comparison values (you provided 2).")
+ fail(t, so([]string{}, ShouldHaveLength, -10), "You must provide a valid positive integer (was -10)!")
+
+ fail(t, so([]int{}, ShouldHaveLength, 1), "Expected [] (length: 0) to have length equal to '1', but it wasn't!") // empty slice
+ fail(t, so([]interface{}{}, ShouldHaveLength, 1), "Expected [] (length: 0) to have length equal to '1', but it wasn't!") // empty slice
+ fail(t, so(map[string]int{}, ShouldHaveLength, 1), "Expected map[] (length: 0) to have length equal to '1', but it wasn't!") // empty map
+ fail(t, so("", ShouldHaveLength, 1), "Expected (length: 0) to have length equal to '1', but it wasn't!") // empty string
+ fail(t, so(&[]int{}, ShouldHaveLength, 1), "Expected &[] (length: 0) to have length equal to '1', but it wasn't!") // pointer to empty slice
+ fail(t, so(&[0]int{}, ShouldHaveLength, 1), "Expected &[] (length: 0) to have length equal to '1', but it wasn't!") // pointer to empty array
+ c := make(chan int, 0) // non-empty channel
+ fail(t, so(c, ShouldHaveLength, 1), fmt.Sprintf("Expected %+v (length: 0) to have length equal to '1', but it wasn't!", c))
+ c = make(chan int) // empty channel
+ fail(t, so(c, ShouldHaveLength, 1), fmt.Sprintf("Expected %+v (length: 0) to have length equal to '1', but it wasn't!", c))
+
+ pass(t, so([]int{1}, ShouldHaveLength, 1)) // non-empty slice
+ pass(t, so([]interface{}{1}, ShouldHaveLength, 1)) // non-empty slice
+ pass(t, so(map[string]int{"hi": 0}, ShouldHaveLength, 1)) // non-empty map
+ pass(t, so("hi", ShouldHaveLength, 2)) // non-empty string
+ pass(t, so(&[]int{1}, ShouldHaveLength, 1)) // pointer to non-empty slice
+ pass(t, so(&[1]int{1}, ShouldHaveLength, 1)) // pointer to non-empty array
+ c = make(chan int, 1)
+ go func() { c <- 1 }()
+ time.Sleep(time.Millisecond)
+ pass(t, so(c, ShouldHaveLength, 1))
+
+}
diff --git a/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/doc.go b/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/doc.go
new file mode 100644
index 00000000000..5720fc298c6
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/doc.go
@@ -0,0 +1,105 @@
+// Package assertions contains the implementations for all assertions which
+// are referenced in goconvey's `convey` package
+// (github.com/smartystreets/goconvey/convey) and gunit (github.com/smartystreets/gunit)
+// for use with the So(...) method.
+// They can also be used in traditional Go test functions and even in
+// applications.
+//
+// Many of the assertions lean heavily on work done by Aaron Jacobs in his excellent oglematchers library.
+// (https://github.com/jacobsa/oglematchers)
+// The ShouldResemble assertion leans heavily on work done by Daniel Jacques in his very helpful go-render library.
+// (https://github.com/luci/go-render)
+package assertions
+
+import (
+ "fmt"
+ "runtime"
+)
+
+// By default we use a no-op serializer. The actual Serializer provides a JSON
+// representation of failure results on selected assertions so the goconvey
+// web UI can display a convenient diff.
+var serializer Serializer = new(noopSerializer)
+
+// GoConveyMode provides control over JSON serialization of failures. When
+// using the assertions in this package from the convey package JSON results
+// are very helpful and can be rendered in a DIFF view. In that case, this function
+// will be called with a true value to enable the JSON serialization. By default,
+// the assertions in this package will not serializer a JSON result, making
+// standalone ussage more convenient.
+func GoConveyMode(yes bool) {
+ if yes {
+ serializer = newSerializer()
+ } else {
+ serializer = new(noopSerializer)
+ }
+}
+
+type testingT interface {
+ Error(args ...interface{})
+}
+
+type Assertion struct {
+ t testingT
+ failed bool
+}
+
+// New swallows the *testing.T struct and prints failed assertions using t.Error.
+// Example: assertions.New(t).So(1, should.Equal, 1)
+func New(t testingT) *Assertion {
+ return &Assertion{t: t}
+}
+
+// Failed reports whether any calls to So (on this Assertion instance) have failed.
+func (this *Assertion) Failed() bool {
+ return this.failed
+}
+
+// So calls the standalone So function and additionally, calls t.Error in failure scenarios.
+func (this *Assertion) So(actual interface{}, assert assertion, expected ...interface{}) bool {
+ ok, result := So(actual, assert, expected...)
+ if !ok {
+ this.failed = true
+ _, file, line, _ := runtime.Caller(1)
+ this.t.Error(fmt.Sprintf("\n%s:%d\n%s", file, line, result))
+ }
+ return ok
+}
+
+// So is a convenience function (as opposed to an inconvenience function?)
+// for running assertions on arbitrary arguments in any context, be it for testing or even
+// application logging. It allows you to perform assertion-like behavior (and get nicely
+// formatted messages detailing discrepancies) but without the program blowing up or panicking.
+// All that is required is to import this package and call `So` with one of the assertions
+// exported by this package as the second parameter.
+// The first return parameter is a boolean indicating if the assertion was true. The second
+// return parameter is the well-formatted message showing why an assertion was incorrect, or
+// blank if the assertion was correct.
+//
+// Example:
+//
+// if ok, message := So(x, ShouldBeGreaterThan, y); !ok {
+// log.Println(message)
+// }
+//
+func So(actual interface{}, assert assertion, expected ...interface{}) (bool, string) {
+ if result := so(actual, assert, expected...); len(result) == 0 {
+ return true, result
+ } else {
+ return false, result
+ }
+}
+
+// so is like So, except that it only returns the string message, which is blank if the
+// assertion passed. Used to facilitate testing.
+func so(actual interface{}, assert func(interface{}, ...interface{}) string, expected ...interface{}) string {
+ return assert(actual, expected...)
+}
+
+// assertion is an alias for a function with a signature that the So()
+// function can handle. Any future or custom assertions should conform to this
+// method signature. The return value should be an empty string if the assertion
+// passes and a well-formed failure message if not.
+type assertion func(actual interface{}, expected ...interface{}) string
+
+////////////////////////////////////////////////////////////////////////////
diff --git a/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/doc_test.go b/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/doc_test.go
new file mode 100644
index 00000000000..041faaffcb2
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/doc_test.go
@@ -0,0 +1,57 @@
+package assertions
+
+import (
+ "bytes"
+ "fmt"
+ "testing"
+)
+
+func TestPassingAssertion(t *testing.T) {
+ fake := &FakeT{buffer: new(bytes.Buffer)}
+ assertion := New(fake)
+ passed := assertion.So(1, ShouldEqual, 1)
+
+ if !passed {
+ t.Error("Assertion failed when it should have passed.")
+ }
+ if fake.buffer.Len() > 0 {
+ t.Error("Unexpected error message was printed.")
+ }
+}
+
+func TestFailingAssertion(t *testing.T) {
+ fake := &FakeT{buffer: new(bytes.Buffer)}
+ assertion := New(fake)
+ passed := assertion.So(1, ShouldEqual, 2)
+
+ if passed {
+ t.Error("Assertion passed when it should have failed.")
+ }
+ if fake.buffer.Len() == 0 {
+ t.Error("Expected error message not printed.")
+ }
+}
+
+func TestFailingGroupsOfAssertions(t *testing.T) {
+ fake := &FakeT{buffer: new(bytes.Buffer)}
+ assertion1 := New(fake)
+ assertion2 := New(fake)
+
+ assertion1.So(1, ShouldEqual, 2) // fail
+ assertion2.So(1, ShouldEqual, 1) // pass
+
+ if !assertion1.Failed() {
+ t.Error("Expected the first assertion to have been marked as failed.")
+ }
+ if assertion2.Failed() {
+ t.Error("Expected the second assertion to NOT have been marked as failed.")
+ }
+}
+
+type FakeT struct {
+ buffer *bytes.Buffer
+}
+
+func (this *FakeT) Error(args ...interface{}) {
+ fmt.Fprint(this.buffer, args...)
+}
diff --git a/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/equality.go b/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/equality.go
new file mode 100644
index 00000000000..2b6049c37d9
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/equality.go
@@ -0,0 +1,280 @@
+package assertions
+
+import (
+ "errors"
+ "fmt"
+ "math"
+ "reflect"
+ "strings"
+
+ "github.com/smartystreets/assertions/internal/oglematchers"
+ "github.com/smartystreets/assertions/internal/go-render/render"
+)
+
+// default acceptable delta for ShouldAlmostEqual
+const defaultDelta = 0.0000000001
+
+// ShouldEqual receives exactly two parameters and does an equality check.
+func ShouldEqual(actual interface{}, expected ...interface{}) string {
+ if message := need(1, expected); message != success {
+ return message
+ }
+ return shouldEqual(actual, expected[0])
+}
+func shouldEqual(actual, expected interface{}) (message string) {
+ defer func() {
+ if r := recover(); r != nil {
+ message = serializer.serialize(expected, actual, fmt.Sprintf(shouldHaveBeenEqual, expected, actual))
+ return
+ }
+ }()
+
+ if matchError := oglematchers.Equals(expected).Matches(actual); matchError != nil {
+ expectedSyntax := fmt.Sprintf("%v", expected)
+ actualSyntax := fmt.Sprintf("%v", actual)
+ if expectedSyntax == actualSyntax && reflect.TypeOf(expected) != reflect.TypeOf(actual) {
+ message = fmt.Sprintf(shouldHaveBeenEqualTypeMismatch, expected, expected, actual, actual)
+ } else {
+ message = fmt.Sprintf(shouldHaveBeenEqual, expected, actual)
+ }
+ message = serializer.serialize(expected, actual, message)
+ return
+ }
+
+ return success
+}
+
+// ShouldNotEqual receives exactly two parameters and does an inequality check.
+func ShouldNotEqual(actual interface{}, expected ...interface{}) string {
+ if fail := need(1, expected); fail != success {
+ return fail
+ } else if ShouldEqual(actual, expected[0]) == success {
+ return fmt.Sprintf(shouldNotHaveBeenEqual, actual, expected[0])
+ }
+ return success
+}
+
+// ShouldAlmostEqual makes sure that two parameters are close enough to being equal.
+// The acceptable delta may be specified with a third argument,
+// or a very small default delta will be used.
+func ShouldAlmostEqual(actual interface{}, expected ...interface{}) string {
+ actualFloat, expectedFloat, deltaFloat, err := cleanAlmostEqualInput(actual, expected...)
+
+ if err != "" {
+ return err
+ }
+
+ if math.Abs(actualFloat-expectedFloat) <= deltaFloat {
+ return success
+ } else {
+ return fmt.Sprintf(shouldHaveBeenAlmostEqual, actualFloat, expectedFloat)
+ }
+}
+
+// ShouldNotAlmostEqual is the inverse of ShouldAlmostEqual
+func ShouldNotAlmostEqual(actual interface{}, expected ...interface{}) string {
+ actualFloat, expectedFloat, deltaFloat, err := cleanAlmostEqualInput(actual, expected...)
+
+ if err != "" {
+ return err
+ }
+
+ if math.Abs(actualFloat-expectedFloat) > deltaFloat {
+ return success
+ } else {
+ return fmt.Sprintf(shouldHaveNotBeenAlmostEqual, actualFloat, expectedFloat)
+ }
+}
+
+func cleanAlmostEqualInput(actual interface{}, expected ...interface{}) (float64, float64, float64, string) {
+ deltaFloat := 0.0000000001
+
+ if len(expected) == 0 {
+ return 0.0, 0.0, 0.0, "This assertion requires exactly one comparison value and an optional delta (you provided neither)"
+ } else if len(expected) == 2 {
+ delta, err := getFloat(expected[1])
+
+ if err != nil {
+ return 0.0, 0.0, 0.0, "delta must be a numerical type"
+ }
+
+ deltaFloat = delta
+ } else if len(expected) > 2 {
+ return 0.0, 0.0, 0.0, "This assertion requires exactly one comparison value and an optional delta (you provided more values)"
+ }
+
+ actualFloat, err := getFloat(actual)
+
+ if err != nil {
+ return 0.0, 0.0, 0.0, err.Error()
+ }
+
+ expectedFloat, err := getFloat(expected[0])
+
+ if err != nil {
+ return 0.0, 0.0, 0.0, err.Error()
+ }
+
+ return actualFloat, expectedFloat, deltaFloat, ""
+}
+
+// returns the float value of any real number, or error if it is not a numerical type
+func getFloat(num interface{}) (float64, error) {
+ numValue := reflect.ValueOf(num)
+ numKind := numValue.Kind()
+
+ if numKind == reflect.Int ||
+ numKind == reflect.Int8 ||
+ numKind == reflect.Int16 ||
+ numKind == reflect.Int32 ||
+ numKind == reflect.Int64 {
+ return float64(numValue.Int()), nil
+ } else if numKind == reflect.Uint ||
+ numKind == reflect.Uint8 ||
+ numKind == reflect.Uint16 ||
+ numKind == reflect.Uint32 ||
+ numKind == reflect.Uint64 {
+ return float64(numValue.Uint()), nil
+ } else if numKind == reflect.Float32 ||
+ numKind == reflect.Float64 {
+ return numValue.Float(), nil
+ } else {
+ return 0.0, errors.New("must be a numerical type, but was " + numKind.String())
+ }
+}
+
+// ShouldResemble receives exactly two parameters and does a deep equal check (see reflect.DeepEqual)
+func ShouldResemble(actual interface{}, expected ...interface{}) string {
+ if message := need(1, expected); message != success {
+ return message
+ }
+
+ if matchError := oglematchers.DeepEquals(expected[0]).Matches(actual); matchError != nil {
+ return serializer.serializeDetailed(expected[0], actual,
+ fmt.Sprintf(shouldHaveResembled, render.Render(expected[0]), render.Render(actual)))
+ }
+
+ return success
+}
+
+// ShouldNotResemble receives exactly two parameters and does an inverse deep equal check (see reflect.DeepEqual)
+func ShouldNotResemble(actual interface{}, expected ...interface{}) string {
+ if message := need(1, expected); message != success {
+ return message
+ } else if ShouldResemble(actual, expected[0]) == success {
+ return fmt.Sprintf(shouldNotHaveResembled, render.Render(actual), render.Render(expected[0]))
+ }
+ return success
+}
+
+// ShouldPointTo receives exactly two parameters and checks to see that they point to the same address.
+func ShouldPointTo(actual interface{}, expected ...interface{}) string {
+ if message := need(1, expected); message != success {
+ return message
+ }
+ return shouldPointTo(actual, expected[0])
+
+}
+func shouldPointTo(actual, expected interface{}) string {
+ actualValue := reflect.ValueOf(actual)
+ expectedValue := reflect.ValueOf(expected)
+
+ if ShouldNotBeNil(actual) != success {
+ return fmt.Sprintf(shouldHaveBeenNonNilPointer, "first", "nil")
+ } else if ShouldNotBeNil(expected) != success {
+ return fmt.Sprintf(shouldHaveBeenNonNilPointer, "second", "nil")
+ } else if actualValue.Kind() != reflect.Ptr {
+ return fmt.Sprintf(shouldHaveBeenNonNilPointer, "first", "not")
+ } else if expectedValue.Kind() != reflect.Ptr {
+ return fmt.Sprintf(shouldHaveBeenNonNilPointer, "second", "not")
+ } else if ShouldEqual(actualValue.Pointer(), expectedValue.Pointer()) != success {
+ actualAddress := reflect.ValueOf(actual).Pointer()
+ expectedAddress := reflect.ValueOf(expected).Pointer()
+ return serializer.serialize(expectedAddress, actualAddress, fmt.Sprintf(shouldHavePointedTo,
+ actual, actualAddress,
+ expected, expectedAddress))
+ }
+ return success
+}
+
+// ShouldNotPointTo receives exactly two parameters and checks to see that they point to different addresess.
+func ShouldNotPointTo(actual interface{}, expected ...interface{}) string {
+ if message := need(1, expected); message != success {
+ return message
+ }
+ compare := ShouldPointTo(actual, expected[0])
+ if strings.HasPrefix(compare, shouldBePointers) {
+ return compare
+ } else if compare == success {
+ return fmt.Sprintf(shouldNotHavePointedTo, actual, expected[0], reflect.ValueOf(actual).Pointer())
+ }
+ return success
+}
+
+// ShouldBeNil receives a single parameter and ensures that it is nil.
+func ShouldBeNil(actual interface{}, expected ...interface{}) string {
+ if fail := need(0, expected); fail != success {
+ return fail
+ } else if actual == nil {
+ return success
+ } else if interfaceHasNilValue(actual) {
+ return success
+ }
+ return fmt.Sprintf(shouldHaveBeenNil, actual)
+}
+func interfaceHasNilValue(actual interface{}) bool {
+ value := reflect.ValueOf(actual)
+ kind := value.Kind()
+ nilable := kind == reflect.Slice ||
+ kind == reflect.Chan ||
+ kind == reflect.Func ||
+ kind == reflect.Ptr ||
+ kind == reflect.Map
+
+ // Careful: reflect.Value.IsNil() will panic unless it's an interface, chan, map, func, slice, or ptr
+ // Reference: http://golang.org/pkg/reflect/#Value.IsNil
+ return nilable && value.IsNil()
+}
+
+// ShouldNotBeNil receives a single parameter and ensures that it is not nil.
+func ShouldNotBeNil(actual interface{}, expected ...interface{}) string {
+ if fail := need(0, expected); fail != success {
+ return fail
+ } else if ShouldBeNil(actual) == success {
+ return fmt.Sprintf(shouldNotHaveBeenNil, actual)
+ }
+ return success
+}
+
+// ShouldBeTrue receives a single parameter and ensures that it is true.
+func ShouldBeTrue(actual interface{}, expected ...interface{}) string {
+ if fail := need(0, expected); fail != success {
+ return fail
+ } else if actual != true {
+ return fmt.Sprintf(shouldHaveBeenTrue, actual)
+ }
+ return success
+}
+
+// ShouldBeFalse receives a single parameter and ensures that it is false.
+func ShouldBeFalse(actual interface{}, expected ...interface{}) string {
+ if fail := need(0, expected); fail != success {
+ return fail
+ } else if actual != false {
+ return fmt.Sprintf(shouldHaveBeenFalse, actual)
+ }
+ return success
+}
+
+// ShouldBeZeroValue receives a single parameter and ensures that it is
+// the Go equivalent of the default value, or "zero" value.
+func ShouldBeZeroValue(actual interface{}, expected ...interface{}) string {
+ if fail := need(0, expected); fail != success {
+ return fail
+ }
+ zeroVal := reflect.Zero(reflect.TypeOf(actual)).Interface()
+ if !reflect.DeepEqual(zeroVal, actual) {
+ return serializer.serialize(zeroVal, actual, fmt.Sprintf(shouldHaveBeenZeroValue, actual))
+ }
+ return success
+}
diff --git a/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/equality_test.go b/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/equality_test.go
new file mode 100644
index 00000000000..5050e4b1619
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/equality_test.go
@@ -0,0 +1,269 @@
+package assertions
+
+import (
+ "fmt"
+ "reflect"
+ "testing"
+)
+
+func TestShouldEqual(t *testing.T) {
+ serializer = newFakeSerializer()
+
+ fail(t, so(1, ShouldEqual), "This assertion requires exactly 1 comparison values (you provided 0).")
+ fail(t, so(1, ShouldEqual, 1, 2), "This assertion requires exactly 1 comparison values (you provided 2).")
+ fail(t, so(1, ShouldEqual, 1, 2, 3), "This assertion requires exactly 1 comparison values (you provided 3).")
+
+ pass(t, so(1, ShouldEqual, 1))
+ fail(t, so(1, ShouldEqual, 2), "2|1|Expected: '2' Actual: '1' (Should be equal)")
+ fail(t, so(1, ShouldEqual, "1"), "1|1|Expected: '1' (string) Actual: '1' (int) (Should be equal, type mismatch)")
+
+ pass(t, so(true, ShouldEqual, true))
+ fail(t, so(true, ShouldEqual, false), "false|true|Expected: 'false' Actual: 'true' (Should be equal)")
+
+ pass(t, so("hi", ShouldEqual, "hi"))
+ fail(t, so("hi", ShouldEqual, "bye"), "bye|hi|Expected: 'bye' Actual: 'hi' (Should be equal)")
+
+ pass(t, so(42, ShouldEqual, uint(42)))
+
+ fail(t, so(Thing1{"hi"}, ShouldEqual, Thing1{}), "{}|{hi}|Expected: '{}' Actual: '{hi}' (Should be equal)")
+ fail(t, so(Thing1{"hi"}, ShouldEqual, Thing1{"hi"}), "{hi}|{hi}|Expected: '{hi}' Actual: '{hi}' (Should be equal)")
+ fail(t, so(&Thing1{"hi"}, ShouldEqual, &Thing1{"hi"}), "&{hi}|&{hi}|Expected: '&{hi}' Actual: '&{hi}' (Should be equal)")
+
+ fail(t, so(Thing1{}, ShouldEqual, Thing2{}), "{}|{}|Expected: '{}' Actual: '{}' (Should be equal)")
+}
+
+func TestShouldNotEqual(t *testing.T) {
+ fail(t, so(1, ShouldNotEqual), "This assertion requires exactly 1 comparison values (you provided 0).")
+ fail(t, so(1, ShouldNotEqual, 1, 2), "This assertion requires exactly 1 comparison values (you provided 2).")
+ fail(t, so(1, ShouldNotEqual, 1, 2, 3), "This assertion requires exactly 1 comparison values (you provided 3).")
+
+ pass(t, so(1, ShouldNotEqual, 2))
+ pass(t, so(1, ShouldNotEqual, "1"))
+ fail(t, so(1, ShouldNotEqual, 1), "Expected '1' to NOT equal '1' (but it did)!")
+
+ pass(t, so(true, ShouldNotEqual, false))
+ fail(t, so(true, ShouldNotEqual, true), "Expected 'true' to NOT equal 'true' (but it did)!")
+
+ pass(t, so("hi", ShouldNotEqual, "bye"))
+ fail(t, so("hi", ShouldNotEqual, "hi"), "Expected 'hi' to NOT equal 'hi' (but it did)!")
+
+ pass(t, so(&Thing1{"hi"}, ShouldNotEqual, &Thing1{"hi"}))
+ pass(t, so(Thing1{"hi"}, ShouldNotEqual, Thing1{"hi"}))
+ pass(t, so(Thing1{}, ShouldNotEqual, Thing1{}))
+ pass(t, so(Thing1{}, ShouldNotEqual, Thing2{}))
+}
+
+func TestShouldAlmostEqual(t *testing.T) {
+ fail(t, so(1, ShouldAlmostEqual), "This assertion requires exactly one comparison value and an optional delta (you provided neither)")
+ fail(t, so(1, ShouldAlmostEqual, 1, 2, 3), "This assertion requires exactly one comparison value and an optional delta (you provided more values)")
+
+ // with the default delta
+ pass(t, so(1, ShouldAlmostEqual, .99999999999999))
+ pass(t, so(1.3612499999999996, ShouldAlmostEqual, 1.36125))
+ pass(t, so(0.7285312499999999, ShouldAlmostEqual, 0.72853125))
+ fail(t, so(1, ShouldAlmostEqual, .99), "Expected '1' to almost equal '0.99' (but it didn't)!")
+
+ // with a different delta
+ pass(t, so(100.0, ShouldAlmostEqual, 110.0, 10.0))
+ fail(t, so(100.0, ShouldAlmostEqual, 111.0, 10.5), "Expected '100' to almost equal '111' (but it didn't)!")
+
+ // ints should work
+ pass(t, so(100, ShouldAlmostEqual, 100.0))
+ fail(t, so(100, ShouldAlmostEqual, 99.0), "Expected '100' to almost equal '99' (but it didn't)!")
+
+ // float32 should work
+ pass(t, so(float64(100.0), ShouldAlmostEqual, float32(100.0)))
+ fail(t, so(float32(100.0), ShouldAlmostEqual, 99.0, float32(0.1)), "Expected '100' to almost equal '99' (but it didn't)!")
+}
+
+func TestShouldNotAlmostEqual(t *testing.T) {
+ fail(t, so(1, ShouldNotAlmostEqual), "This assertion requires exactly one comparison value and an optional delta (you provided neither)")
+ fail(t, so(1, ShouldNotAlmostEqual, 1, 2, 3), "This assertion requires exactly one comparison value and an optional delta (you provided more values)")
+
+ // with the default delta
+ fail(t, so(1, ShouldNotAlmostEqual, .99999999999999), "Expected '1' to NOT almost equal '0.99999999999999' (but it did)!")
+ fail(t, so(1.3612499999999996, ShouldNotAlmostEqual, 1.36125), "Expected '1.3612499999999996' to NOT almost equal '1.36125' (but it did)!")
+ pass(t, so(1, ShouldNotAlmostEqual, .99))
+
+ // with a different delta
+ fail(t, so(100.0, ShouldNotAlmostEqual, 110.0, 10.0), "Expected '100' to NOT almost equal '110' (but it did)!")
+ pass(t, so(100.0, ShouldNotAlmostEqual, 111.0, 10.5))
+
+ // ints should work
+ fail(t, so(100, ShouldNotAlmostEqual, 100.0), "Expected '100' to NOT almost equal '100' (but it did)!")
+ pass(t, so(100, ShouldNotAlmostEqual, 99.0))
+
+ // float32 should work
+ fail(t, so(float64(100.0), ShouldNotAlmostEqual, float32(100.0)), "Expected '100' to NOT almost equal '100' (but it did)!")
+ pass(t, so(float32(100.0), ShouldNotAlmostEqual, 99.0, float32(0.1)))
+}
+
+func TestShouldResemble(t *testing.T) {
+ serializer = newFakeSerializer()
+
+ fail(t, so(Thing1{"hi"}, ShouldResemble), "This assertion requires exactly 1 comparison values (you provided 0).")
+ fail(t, so(Thing1{"hi"}, ShouldResemble, Thing1{"hi"}, Thing1{"hi"}), "This assertion requires exactly 1 comparison values (you provided 2).")
+
+ pass(t, so(Thing1{"hi"}, ShouldResemble, Thing1{"hi"}))
+ fail(t, so(Thing1{"hi"}, ShouldResemble, Thing1{"bye"}), `{bye}|{hi}|Expected: '"assertions.Thing1{a:\"bye\"}"' Actual: '"assertions.Thing1{a:\"hi\"}"' (Should resemble)!`)
+
+ var (
+ a []int
+ b []int = []int{}
+ )
+
+ fail(t, so(a, ShouldResemble, b), `[]|[]|Expected: '"[]int{}"' Actual: '"[]int(nil)"' (Should resemble)!`)
+ fail(t, so(2, ShouldResemble, 1), `1|2|Expected: '"1"' Actual: '"2"' (Should resemble)!`)
+
+ fail(t, so(StringStringMapAlias{"hi": "bye"}, ShouldResemble, map[string]string{"hi": "bye"}),
+ `map[hi:bye]|map[hi:bye]|Expected: '"map[string]string{\"hi\":\"bye\"}"' Actual: '"assertions.StringStringMapAlias{\"hi\":\"bye\"}"' (Should resemble)!`)
+ fail(t, so(StringSliceAlias{"hi", "bye"}, ShouldResemble, []string{"hi", "bye"}),
+ `[hi bye]|[hi bye]|Expected: '"[]string{\"hi\", \"bye\"}"' Actual: '"assertions.StringSliceAlias{\"hi\", \"bye\"}"' (Should resemble)!`)
+
+ // some types come out looking the same when represented with "%#v" so we show type mismatch info:
+ fail(t, so(StringAlias("hi"), ShouldResemble, "hi"), `hi|hi|Expected: '"\"hi\""' Actual: '"assertions.StringAlias(\"hi\")"' (Should resemble)!`)
+ fail(t, so(IntAlias(42), ShouldResemble, 42), `42|42|Expected: '"42"' Actual: '"assertions.IntAlias(42)"' (Should resemble)!`)
+}
+
+func TestShouldNotResemble(t *testing.T) {
+ fail(t, so(Thing1{"hi"}, ShouldNotResemble), "This assertion requires exactly 1 comparison values (you provided 0).")
+ fail(t, so(Thing1{"hi"}, ShouldNotResemble, Thing1{"hi"}, Thing1{"hi"}), "This assertion requires exactly 1 comparison values (you provided 2).")
+
+ pass(t, so(Thing1{"hi"}, ShouldNotResemble, Thing1{"bye"}))
+ fail(t, so(Thing1{"hi"}, ShouldNotResemble, Thing1{"hi"}),
+ `Expected '"assertions.Thing1{a:\"hi\"}"' to NOT resemble '"assertions.Thing1{a:\"hi\"}"' (but it did)!`)
+
+ pass(t, so(map[string]string{"hi": "bye"}, ShouldResemble, map[string]string{"hi": "bye"}))
+ pass(t, so(IntAlias(42), ShouldNotResemble, 42))
+
+ pass(t, so(StringSliceAlias{"hi", "bye"}, ShouldNotResemble, []string{"hi", "bye"}))
+}
+
+func TestShouldPointTo(t *testing.T) {
+ serializer = newFakeSerializer()
+
+ t1 := &Thing1{}
+ t2 := t1
+ t3 := &Thing1{}
+
+ pointer1 := reflect.ValueOf(t1).Pointer()
+ pointer3 := reflect.ValueOf(t3).Pointer()
+
+ fail(t, so(t1, ShouldPointTo), "This assertion requires exactly 1 comparison values (you provided 0).")
+ fail(t, so(t1, ShouldPointTo, t2, t3), "This assertion requires exactly 1 comparison values (you provided 2).")
+
+ pass(t, so(t1, ShouldPointTo, t2))
+ fail(t, so(t1, ShouldPointTo, t3), fmt.Sprintf(
+ "%v|%v|Expected '&{a:}' (address: '%v') and '&{a:}' (address: '%v') to be the same address (but their weren't)!",
+ pointer3, pointer1, pointer1, pointer3))
+
+ t4 := Thing1{}
+ t5 := t4
+
+ fail(t, so(t4, ShouldPointTo, t5), "Both arguments should be pointers (the first was not)!")
+ fail(t, so(&t4, ShouldPointTo, t5), "Both arguments should be pointers (the second was not)!")
+ fail(t, so(nil, ShouldPointTo, nil), "Both arguments should be pointers (the first was nil)!")
+ fail(t, so(&t4, ShouldPointTo, nil), "Both arguments should be pointers (the second was nil)!")
+}
+
+func TestShouldNotPointTo(t *testing.T) {
+ t1 := &Thing1{}
+ t2 := t1
+ t3 := &Thing1{}
+
+ pointer1 := reflect.ValueOf(t1).Pointer()
+
+ fail(t, so(t1, ShouldNotPointTo), "This assertion requires exactly 1 comparison values (you provided 0).")
+ fail(t, so(t1, ShouldNotPointTo, t2, t3), "This assertion requires exactly 1 comparison values (you provided 2).")
+
+ pass(t, so(t1, ShouldNotPointTo, t3))
+ fail(t, so(t1, ShouldNotPointTo, t2), fmt.Sprintf("Expected '&{a:}' and '&{a:}' to be different references (but they matched: '%v')!", pointer1))
+
+ t4 := Thing1{}
+ t5 := t4
+
+ fail(t, so(t4, ShouldNotPointTo, t5), "Both arguments should be pointers (the first was not)!")
+ fail(t, so(&t4, ShouldNotPointTo, t5), "Both arguments should be pointers (the second was not)!")
+ fail(t, so(nil, ShouldNotPointTo, nil), "Both arguments should be pointers (the first was nil)!")
+ fail(t, so(&t4, ShouldNotPointTo, nil), "Both arguments should be pointers (the second was nil)!")
+}
+
+func TestShouldBeNil(t *testing.T) {
+ fail(t, so(nil, ShouldBeNil, nil, nil, nil), "This assertion requires exactly 0 comparison values (you provided 3).")
+ fail(t, so(nil, ShouldBeNil, nil), "This assertion requires exactly 0 comparison values (you provided 1).")
+
+ pass(t, so(nil, ShouldBeNil))
+ fail(t, so(1, ShouldBeNil), "Expected: nil Actual: '1'")
+
+ var thing Thinger
+ pass(t, so(thing, ShouldBeNil))
+ thing = &Thing{}
+ fail(t, so(thing, ShouldBeNil), "Expected: nil Actual: '&{}'")
+
+ var thingOne *Thing1
+ pass(t, so(thingOne, ShouldBeNil))
+
+ var nilSlice []int = nil
+ pass(t, so(nilSlice, ShouldBeNil))
+
+ var nilMap map[string]string = nil
+ pass(t, so(nilMap, ShouldBeNil))
+
+ var nilChannel chan int = nil
+ pass(t, so(nilChannel, ShouldBeNil))
+
+ var nilFunc func() = nil
+ pass(t, so(nilFunc, ShouldBeNil))
+
+ var nilInterface interface{} = nil
+ pass(t, so(nilInterface, ShouldBeNil))
+}
+
+func TestShouldNotBeNil(t *testing.T) {
+ fail(t, so(nil, ShouldNotBeNil, nil, nil, nil), "This assertion requires exactly 0 comparison values (you provided 3).")
+ fail(t, so(nil, ShouldNotBeNil, nil), "This assertion requires exactly 0 comparison values (you provided 1).")
+
+ fail(t, so(nil, ShouldNotBeNil), "Expected '<nil>' to NOT be nil (but it was)!")
+ pass(t, so(1, ShouldNotBeNil))
+
+ var thing Thinger
+ fail(t, so(thing, ShouldNotBeNil), "Expected '<nil>' to NOT be nil (but it was)!")
+ thing = &Thing{}
+ pass(t, so(thing, ShouldNotBeNil))
+}
+
+func TestShouldBeTrue(t *testing.T) {
+ fail(t, so(true, ShouldBeTrue, 1, 2, 3), "This assertion requires exactly 0 comparison values (you provided 3).")
+ fail(t, so(true, ShouldBeTrue, 1), "This assertion requires exactly 0 comparison values (you provided 1).")
+
+ fail(t, so(false, ShouldBeTrue), "Expected: true Actual: false")
+ fail(t, so(1, ShouldBeTrue), "Expected: true Actual: 1")
+ pass(t, so(true, ShouldBeTrue))
+}
+
+func TestShouldBeFalse(t *testing.T) {
+ fail(t, so(false, ShouldBeFalse, 1, 2, 3), "This assertion requires exactly 0 comparison values (you provided 3).")
+ fail(t, so(false, ShouldBeFalse, 1), "This assertion requires exactly 0 comparison values (you provided 1).")
+
+ fail(t, so(true, ShouldBeFalse), "Expected: false Actual: true")
+ fail(t, so(1, ShouldBeFalse), "Expected: false Actual: 1")
+ pass(t, so(false, ShouldBeFalse))
+}
+
+func TestShouldBeZeroValue(t *testing.T) {
+ serializer = newFakeSerializer()
+
+ fail(t, so(0, ShouldBeZeroValue, 1, 2, 3), "This assertion requires exactly 0 comparison values (you provided 3).")
+ fail(t, so(false, ShouldBeZeroValue, true), "This assertion requires exactly 0 comparison values (you provided 1).")
+
+ fail(t, so(1, ShouldBeZeroValue), "0|1|'1' should have been the zero value") //"Expected: (zero value) Actual: 1")
+ fail(t, so(true, ShouldBeZeroValue), "false|true|'true' should have been the zero value") //"Expected: (zero value) Actual: true")
+ fail(t, so("123", ShouldBeZeroValue), "|123|'123' should have been the zero value") //"Expected: (zero value) Actual: 123")
+ fail(t, so(" ", ShouldBeZeroValue), "| |' ' should have been the zero value") //"Expected: (zero value) Actual: ")
+ fail(t, so([]string{"Nonempty"}, ShouldBeZeroValue), "[]|[Nonempty]|'[Nonempty]' should have been the zero value") //"Expected: (zero value) Actual: [Nonempty]")
+ fail(t, so(struct{ a string }{a: "asdf"}, ShouldBeZeroValue), "{}|{asdf}|'{a:asdf}' should have been the zero value")
+ pass(t, so(0, ShouldBeZeroValue))
+ pass(t, so(false, ShouldBeZeroValue))
+ pass(t, so("", ShouldBeZeroValue))
+ pass(t, so(struct{}{}, ShouldBeZeroValue))
+} \ No newline at end of file
diff --git a/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/filter.go b/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/filter.go
new file mode 100644
index 00000000000..ee368a97ed7
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/filter.go
@@ -0,0 +1,23 @@
+package assertions
+
+import "fmt"
+
+const (
+ success = ""
+ needExactValues = "This assertion requires exactly %d comparison values (you provided %d)."
+ needNonEmptyCollection = "This assertion requires at least 1 comparison value (you provided 0)."
+)
+
+func need(needed int, expected []interface{}) string {
+ if len(expected) != needed {
+ return fmt.Sprintf(needExactValues, needed, len(expected))
+ }
+ return success
+}
+
+func atLeast(minimum int, expected []interface{}) string {
+ if len(expected) < 1 {
+ return needNonEmptyCollection
+ }
+ return success
+}
diff --git a/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/Makefile b/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/Makefile
new file mode 100644
index 00000000000..0894b82bd81
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/Makefile
@@ -0,0 +1,23 @@
+# This Makefile pulls the latest oglematchers (with dependencies),
+# rewrites the imports to match this location,
+# and ensures that all the tests pass.
+
+go: clean clone rewrite
+
+clean:
+ rm -rf ogle*
+ rm -rf reqtrace
+ rm -rf go-render
+
+clone:
+ git clone https://github.com/jacobsa/ogletest.git && rm -rf ogletest/.git
+ git clone https://github.com/jacobsa/oglemock.git && rm -rf oglemock/.git
+ git clone https://github.com/jacobsa/oglematchers.git && rm -rf oglematchers/.git
+ git clone https://github.com/jacobsa/reqtrace.git && rm -rf reqtrace/.git
+ git clone https://github.com/luci/go-render.git && rm -rf go-render/.git
+
+rewrite:
+ grep -rl --exclude Makefile 'github.com/jacobsa' . | xargs sed -i '' 's#github.com/jacobsa#github.com/smartystreets/assertions/internal#g'
+
+test:
+ go test github.com/smartystreets/assertions/...
diff --git a/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/go-render/.travis.yml b/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/go-render/.travis.yml
new file mode 100644
index 00000000000..5a19a5faf38
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/go-render/.travis.yml
@@ -0,0 +1,21 @@
+# Copyright (c) 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# {sudo: required, dist: trusty} is the magic incantation to pick the trusty
+# beta environment, which is the only environment we can get that has >4GB
+# memory. Currently the `go test -race` tests that we run will peak at just
+# over 4GB, which results in everything getting OOM-killed.
+sudo: required
+dist: trusty
+
+language: go
+
+go:
+- 1.4.2
+
+before_install:
+ - go get github.com/maruel/pre-commit-go/cmd/pcg
+
+script:
+ - pcg
diff --git a/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/go-render/LICENSE b/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/go-render/LICENSE
new file mode 100644
index 00000000000..6280ff0e06b
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/go-render/LICENSE
@@ -0,0 +1,27 @@
+// Copyright (c) 2015 The Chromium Authors. All rights reserved.
+//
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following disclaimer
+// in the documentation and/or other materials provided with the
+// distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived from
+// this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
diff --git a/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/go-render/PRESUBMIT.py b/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/go-render/PRESUBMIT.py
new file mode 100644
index 00000000000..d05f0cd8734
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/go-render/PRESUBMIT.py
@@ -0,0 +1,109 @@
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Top-level presubmit script.
+
+See https://dev.chromium.org/developers/how-tos/depottools/presubmit-scripts for
+details on the presubmit API built into depot_tools.
+"""
+
+import os
+import sys
+
+
+def PreCommitGo(input_api, output_api, pcg_mode):
+ """Run go-specific checks via pre-commit-go (pcg) if it's in PATH."""
+ if input_api.is_committing:
+ error_type = output_api.PresubmitError
+ else:
+ error_type = output_api.PresubmitPromptWarning
+
+ exe = 'pcg.exe' if sys.platform == 'win32' else 'pcg'
+ pcg = None
+ for p in os.environ['PATH'].split(os.pathsep):
+ pcg = os.path.join(p, exe)
+ if os.access(pcg, os.X_OK):
+ break
+ else:
+ return [
+ error_type(
+ 'pre-commit-go executable (pcg) could not be found in PATH. All Go '
+ 'checks are skipped. See https://github.com/maruel/pre-commit-go.')
+ ]
+
+ cmd = [pcg, 'run', '-m', ','.join(pcg_mode)]
+ if input_api.verbose:
+ cmd.append('-v')
+ # pcg can figure out what files to check on its own based on upstream ref,
+ # but on PRESUBMIT try builder upsteram isn't set, and it's just 1 commit.
+ if os.getenv('PRESUBMIT_BUILDER', ''):
+ cmd.extend(['-r', 'HEAD~1'])
+ return input_api.RunTests([
+ input_api.Command(
+ name='pre-commit-go: %s' % ', '.join(pcg_mode),
+ cmd=cmd,
+ kwargs={},
+ message=error_type),
+ ])
+
+
+def header(input_api):
+ """Returns the expected license header regexp for this project."""
+ current_year = int(input_api.time.strftime('%Y'))
+ allowed_years = (str(s) for s in reversed(xrange(2011, current_year + 1)))
+ years_re = '(' + '|'.join(allowed_years) + ')'
+ license_header = (
+ r'.*? Copyright %(year)s The Chromium Authors\. '
+ r'All rights reserved\.\n'
+ r'.*? Use of this source code is governed by a BSD-style license '
+ r'that can be\n'
+ r'.*? found in the LICENSE file\.(?: \*/)?\n'
+ ) % {
+ 'year': years_re,
+ }
+ return license_header
+
+
+def source_file_filter(input_api):
+ """Returns filter that selects source code files only."""
+ bl = list(input_api.DEFAULT_BLACK_LIST) + [
+ r'.+\.pb\.go$',
+ r'.+_string\.go$',
+ ]
+ wl = list(input_api.DEFAULT_WHITE_LIST) + [
+ r'.+\.go$',
+ ]
+ return lambda x: input_api.FilterSourceFile(x, white_list=wl, black_list=bl)
+
+
+def CommonChecks(input_api, output_api):
+ results = []
+ results.extend(
+ input_api.canned_checks.CheckChangeHasNoStrayWhitespace(
+ input_api, output_api,
+ source_file_filter=source_file_filter(input_api)))
+ results.extend(
+ input_api.canned_checks.CheckLicense(
+ input_api, output_api, header(input_api),
+ source_file_filter=source_file_filter(input_api)))
+ return results
+
+
+def CheckChangeOnUpload(input_api, output_api):
+ results = CommonChecks(input_api, output_api)
+ results.extend(PreCommitGo(input_api, output_api, ['lint', 'pre-commit']))
+ return results
+
+
+def CheckChangeOnCommit(input_api, output_api):
+ results = CommonChecks(input_api, output_api)
+ results.extend(input_api.canned_checks.CheckChangeHasDescription(
+ input_api, output_api))
+ results.extend(input_api.canned_checks.CheckDoNotSubmitInDescription(
+ input_api, output_api))
+ results.extend(input_api.canned_checks.CheckDoNotSubmitInFiles(
+ input_api, output_api))
+ results.extend(PreCommitGo(
+ input_api, output_api, ['continuous-integration']))
+ return results
diff --git a/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/go-render/README.md b/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/go-render/README.md
new file mode 100644
index 00000000000..a85380c421a
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/go-render/README.md
@@ -0,0 +1,78 @@
+go-render: A verbose recursive Go type-to-string conversion library.
+====================================================================
+
+[![GoDoc](https://godoc.org/github.com/luci/go-render?status.svg)](https://godoc.org/github.com/luci/go-render)
+[![Build Status](https://travis-ci.org/luci/go-render.svg)](https://travis-ci.org/luci/go-render)
+
+This is not an official Google product.
+
+## Overview
+
+The *render* package implements a more verbose form of the standard Go string
+formatter, `fmt.Sprintf("%#v", value)`, adding:
+ - Pointer recursion. Normally, Go stops at the first pointer and prints its
+ address. The *render* package will recurse and continue to render pointer
+ values.
+ - Recursion loop detection. Recursion is nice, but if a recursion path detects
+ a loop, *render* will note this and move on.
+ - Custom type name rendering.
+ - Deterministic key sorting for `string`- and `int`-keyed maps.
+ - Testing!
+
+Call `render.Render` and pass it an `interface{}`.
+
+For example:
+
+```Go
+type customType int
+type testStruct struct {
+ S string
+ V *map[string]int
+ I interface{}
+}
+
+a := testStruct{
+ S: "hello",
+ V: &map[string]int{"foo": 0, "bar": 1},
+ I: customType(42),
+}
+
+fmt.Println("Render test:")
+fmt.Printf("fmt.Printf: %#v\n", a)))
+fmt.Printf("render.Render: %s\n", Render(a))
+```
+
+Yields:
+```
+fmt.Printf: render.testStruct{S:"hello", V:(*map[string]int)(0x600dd065), I:42}
+render.Render: render.testStruct{S:"hello", V:(*map[string]int){"bar":1, "foo":0}, I:render.customType(42)}
+```
+
+This is not intended to be a high-performance library, but it's not terrible
+either.
+
+Contributing
+------------
+
+ * Sign the [Google CLA](https://cla.developers.google.com/clas).
+ * Make sure your `user.email` and `user.name` are configured in `git config`.
+ * Install the [pcg](https://github.com/maruel/pre-commit-go) git hook:
+ `go get -u github.com/maruel/pre-commit-go/cmd/... && pcg`
+
+Run the following to setup the code review tool and create your first review:
+
+ git clone https://chromium.googlesource.com/chromium/tools/depot_tools.git $HOME/src/depot_tools
+ export PATH="$PATH:$HOME/src/depot_tools"
+ cd $GOROOT/github.com/luci/go-render
+ git checkout -b work origin/master
+
+ # hack hack
+
+ git commit -a -m "This is awesome\nR=joe@example.com"
+ # This will ask for your Google Account credentials.
+ git cl upload -s
+ # Wait for LGTM over email.
+ # Check the commit queue box in codereview website.
+ # Wait for the change to be tested and landed automatically.
+
+Use `git cl help` and `git cl help <cmd>` for more details.
diff --git a/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/go-render/WATCHLISTS b/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/go-render/WATCHLISTS
new file mode 100644
index 00000000000..e4172088dd3
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/go-render/WATCHLISTS
@@ -0,0 +1,26 @@
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# Watchlist Rules
+# Refer: http://dev.chromium.org/developers/contributing-code/watchlists
+
+{
+
+ 'WATCHLIST_DEFINITIONS': {
+ 'all': {
+ 'filepath': '.+',
+ },
+ },
+
+ 'WATCHLISTS': {
+ 'all': [
+ # Add yourself here to get explicitly spammed.
+ 'maruel@chromium.org',
+ 'tandrii+luci-go@chromium.org',
+ 'todd@cloudera.com',
+ 'andrew.wang@cloudera.com',
+ ],
+ },
+
+}
diff --git a/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/go-render/pre-commit-go.yml b/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/go-render/pre-commit-go.yml
new file mode 100644
index 00000000000..074ee1f84df
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/go-render/pre-commit-go.yml
@@ -0,0 +1,78 @@
+# https://github.com/maruel/pre-commit-go configuration file to run checks
+# automatically on commit, on push and on continuous integration service after
+# a push or on merge of a pull request.
+#
+# See https://godoc.org/github.com/maruel/pre-commit-go/checks for more
+# information.
+
+min_version: 0.4.7
+modes:
+ continuous-integration:
+ checks:
+ build:
+ - build_all: false
+ extra_args: []
+ coverage:
+ - use_global_inference: false
+ use_coveralls: true
+ global:
+ min_coverage: 50
+ max_coverage: 100
+ per_dir_default:
+ min_coverage: 1
+ max_coverage: 100
+ per_dir: {}
+ gofmt:
+ - {}
+ goimports:
+ - {}
+ test:
+ - extra_args:
+ - -v
+ - -race
+ max_duration: 600
+ lint:
+ checks:
+ golint:
+ - blacklist: []
+ govet:
+ - blacklist:
+ - ' composite literal uses unkeyed fields'
+ max_duration: 15
+ pre-commit:
+ checks:
+ build:
+ - build_all: false
+ extra_args: []
+ gofmt:
+ - {}
+ test:
+ - extra_args:
+ - -short
+ max_duration: 35
+ pre-push:
+ checks:
+ coverage:
+ - use_global_inference: false
+ use_coveralls: false
+ global:
+ min_coverage: 50
+ max_coverage: 100
+ per_dir_default:
+ min_coverage: 1
+ max_coverage: 100
+ per_dir: {}
+ goimports:
+ - {}
+ test:
+ - extra_args:
+ - -v
+ - -race
+ max_duration: 35
+
+ignore_patterns:
+- .*
+- _*
+- '*.pb.go'
+- '*_string.go'
+- '*-gen.go'
diff --git a/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/go-render/render/render.go b/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/go-render/render/render.go
new file mode 100644
index 00000000000..e070a6b3b58
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/go-render/render/render.go
@@ -0,0 +1,327 @@
+// Copyright 2015 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+package render
+
+import (
+ "bytes"
+ "fmt"
+ "reflect"
+ "sort"
+ "strconv"
+)
+
+var implicitTypeMap = map[reflect.Kind]string{
+ reflect.Bool: "bool",
+ reflect.String: "string",
+ reflect.Int: "int",
+ reflect.Int8: "int8",
+ reflect.Int16: "int16",
+ reflect.Int32: "int32",
+ reflect.Int64: "int64",
+ reflect.Uint: "uint",
+ reflect.Uint8: "uint8",
+ reflect.Uint16: "uint16",
+ reflect.Uint32: "uint32",
+ reflect.Uint64: "uint64",
+ reflect.Float32: "float32",
+ reflect.Float64: "float64",
+ reflect.Complex64: "complex64",
+ reflect.Complex128: "complex128",
+}
+
+// Render converts a structure to a string representation. Unline the "%#v"
+// format string, this resolves pointer types' contents in structs, maps, and
+// slices/arrays and prints their field values.
+func Render(v interface{}) string {
+ buf := bytes.Buffer{}
+ s := (*traverseState)(nil)
+ s.render(&buf, 0, reflect.ValueOf(v))
+ return buf.String()
+}
+
+// renderPointer is called to render a pointer value.
+//
+// This is overridable so that the test suite can have deterministic pointer
+// values in its expectations.
+var renderPointer = func(buf *bytes.Buffer, p uintptr) {
+ fmt.Fprintf(buf, "0x%016x", p)
+}
+
+// traverseState is used to note and avoid recursion as struct members are being
+// traversed.
+//
+// traverseState is allowed to be nil. Specifically, the root state is nil.
+type traverseState struct {
+ parent *traverseState
+ ptr uintptr
+}
+
+func (s *traverseState) forkFor(ptr uintptr) *traverseState {
+ for cur := s; cur != nil; cur = cur.parent {
+ if ptr == cur.ptr {
+ return nil
+ }
+ }
+
+ fs := &traverseState{
+ parent: s,
+ ptr: ptr,
+ }
+ return fs
+}
+
+func (s *traverseState) render(buf *bytes.Buffer, ptrs int, v reflect.Value) {
+ if v.Kind() == reflect.Invalid {
+ buf.WriteString("nil")
+ return
+ }
+ vt := v.Type()
+
+ // If the type being rendered is a potentially recursive type (a type that
+ // can contain itself as a member), we need to avoid recursion.
+ //
+ // If we've already seen this type before, mark that this is the case and
+ // write a recursion placeholder instead of actually rendering it.
+ //
+ // If we haven't seen it before, fork our `seen` tracking so any higher-up
+ // renderers will also render it at least once, then mark that we've seen it
+ // to avoid recursing on lower layers.
+ pe := uintptr(0)
+ vk := vt.Kind()
+ switch vk {
+ case reflect.Ptr:
+ // Since structs and arrays aren't pointers, they can't directly be
+ // recursed, but they can contain pointers to themselves. Record their
+ // pointer to avoid this.
+ switch v.Elem().Kind() {
+ case reflect.Struct, reflect.Array:
+ pe = v.Pointer()
+ }
+
+ case reflect.Slice, reflect.Map:
+ pe = v.Pointer()
+ }
+ if pe != 0 {
+ s = s.forkFor(pe)
+ if s == nil {
+ buf.WriteString("<REC(")
+ writeType(buf, ptrs, vt)
+ buf.WriteString(")>")
+ return
+ }
+ }
+
+ switch vk {
+ case reflect.Struct:
+ writeType(buf, ptrs, vt)
+ buf.WriteRune('{')
+ for i := 0; i < vt.NumField(); i++ {
+ if i > 0 {
+ buf.WriteString(", ")
+ }
+ buf.WriteString(vt.Field(i).Name)
+ buf.WriteRune(':')
+
+ s.render(buf, 0, v.Field(i))
+ }
+ buf.WriteRune('}')
+
+ case reflect.Slice:
+ if v.IsNil() {
+ writeType(buf, ptrs, vt)
+ buf.WriteString("(nil)")
+ return
+ }
+ fallthrough
+
+ case reflect.Array:
+ writeType(buf, ptrs, vt)
+ buf.WriteString("{")
+ for i := 0; i < v.Len(); i++ {
+ if i > 0 {
+ buf.WriteString(", ")
+ }
+
+ s.render(buf, 0, v.Index(i))
+ }
+ buf.WriteRune('}')
+
+ case reflect.Map:
+ writeType(buf, ptrs, vt)
+ if v.IsNil() {
+ buf.WriteString("(nil)")
+ } else {
+ buf.WriteString("{")
+
+ mkeys := v.MapKeys()
+ tryAndSortMapKeys(vt, mkeys)
+
+ for i, mk := range mkeys {
+ if i > 0 {
+ buf.WriteString(", ")
+ }
+
+ s.render(buf, 0, mk)
+ buf.WriteString(":")
+ s.render(buf, 0, v.MapIndex(mk))
+ }
+ buf.WriteRune('}')
+ }
+
+ case reflect.Ptr:
+ ptrs++
+ fallthrough
+ case reflect.Interface:
+ if v.IsNil() {
+ writeType(buf, ptrs, v.Type())
+ buf.WriteRune('(')
+ fmt.Fprint(buf, "nil")
+ buf.WriteRune(')')
+ } else {
+ s.render(buf, ptrs, v.Elem())
+ }
+
+ case reflect.Chan, reflect.Func, reflect.UnsafePointer:
+ writeType(buf, ptrs, vt)
+ buf.WriteRune('(')
+ renderPointer(buf, v.Pointer())
+ buf.WriteRune(')')
+
+ default:
+ tstr := vt.String()
+ implicit := ptrs == 0 && implicitTypeMap[vk] == tstr
+ if !implicit {
+ writeType(buf, ptrs, vt)
+ buf.WriteRune('(')
+ }
+
+ switch vk {
+ case reflect.String:
+ fmt.Fprintf(buf, "%q", v.String())
+ case reflect.Bool:
+ fmt.Fprintf(buf, "%v", v.Bool())
+
+ case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
+ fmt.Fprintf(buf, "%d", v.Int())
+
+ case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64:
+ fmt.Fprintf(buf, "%d", v.Uint())
+
+ case reflect.Float32, reflect.Float64:
+ fmt.Fprintf(buf, "%g", v.Float())
+
+ case reflect.Complex64, reflect.Complex128:
+ fmt.Fprintf(buf, "%g", v.Complex())
+ }
+
+ if !implicit {
+ buf.WriteRune(')')
+ }
+ }
+}
+
+func writeType(buf *bytes.Buffer, ptrs int, t reflect.Type) {
+ parens := ptrs > 0
+ switch t.Kind() {
+ case reflect.Chan, reflect.Func, reflect.UnsafePointer:
+ parens = true
+ }
+
+ if parens {
+ buf.WriteRune('(')
+ for i := 0; i < ptrs; i++ {
+ buf.WriteRune('*')
+ }
+ }
+
+ switch t.Kind() {
+ case reflect.Ptr:
+ if ptrs == 0 {
+ // This pointer was referenced from within writeType (e.g., as part of
+ // rendering a list), and so hasn't had its pointer asterisk accounted
+ // for.
+ buf.WriteRune('*')
+ }
+ writeType(buf, 0, t.Elem())
+
+ case reflect.Interface:
+ if n := t.Name(); n != "" {
+ buf.WriteString(t.String())
+ } else {
+ buf.WriteString("interface{}")
+ }
+
+ case reflect.Array:
+ buf.WriteRune('[')
+ buf.WriteString(strconv.FormatInt(int64(t.Len()), 10))
+ buf.WriteRune(']')
+ writeType(buf, 0, t.Elem())
+
+ case reflect.Slice:
+ if t == reflect.SliceOf(t.Elem()) {
+ buf.WriteString("[]")
+ writeType(buf, 0, t.Elem())
+ } else {
+ // Custom slice type, use type name.
+ buf.WriteString(t.String())
+ }
+
+ case reflect.Map:
+ if t == reflect.MapOf(t.Key(), t.Elem()) {
+ buf.WriteString("map[")
+ writeType(buf, 0, t.Key())
+ buf.WriteRune(']')
+ writeType(buf, 0, t.Elem())
+ } else {
+ // Custom map type, use type name.
+ buf.WriteString(t.String())
+ }
+
+ default:
+ buf.WriteString(t.String())
+ }
+
+ if parens {
+ buf.WriteRune(')')
+ }
+}
+
+type sortableValueSlice struct {
+ kind reflect.Kind
+ elements []reflect.Value
+}
+
+func (s *sortableValueSlice) Len() int {
+ return len(s.elements)
+}
+
+func (s *sortableValueSlice) Less(i, j int) bool {
+ switch s.kind {
+ case reflect.String:
+ return s.elements[i].String() < s.elements[j].String()
+
+ case reflect.Int:
+ return s.elements[i].Int() < s.elements[j].Int()
+
+ default:
+ panic(fmt.Errorf("unsupported sort kind: %s", s.kind))
+ }
+}
+
+func (s *sortableValueSlice) Swap(i, j int) {
+ s.elements[i], s.elements[j] = s.elements[j], s.elements[i]
+}
+
+func tryAndSortMapKeys(mt reflect.Type, k []reflect.Value) {
+ // Try our stock sortable values.
+ switch mt.Key().Kind() {
+ case reflect.String, reflect.Int:
+ vs := &sortableValueSlice{
+ kind: mt.Key().Kind(),
+ elements: k,
+ }
+ sort.Sort(vs)
+ }
+}
diff --git a/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/go-render/render/render_test.go b/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/go-render/render/render_test.go
new file mode 100644
index 00000000000..1737cb702ae
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/go-render/render/render_test.go
@@ -0,0 +1,170 @@
+// Copyright 2015 Google Inc. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+package render
+
+import (
+ "bytes"
+ "fmt"
+ "regexp"
+ "runtime"
+ "testing"
+)
+
+func init() {
+ // For testing purposes, pointers will render as "PTR" so that they are
+ // deterministic.
+ renderPointer = func(buf *bytes.Buffer, p uintptr) {
+ buf.WriteString("PTR")
+ }
+}
+
+func assertRendersLike(t *testing.T, name string, v interface{}, exp string) {
+ act := Render(v)
+ if act != exp {
+ _, _, line, _ := runtime.Caller(1)
+ t.Errorf("On line #%d, [%s] did not match expectations:\nExpected: %s\nActual : %s\n", line, name, exp, act)
+ }
+}
+
+func TestRenderList(t *testing.T) {
+ t.Parallel()
+
+ // Note that we make some of the fields exportable. This is to avoid a fun case
+ // where the first reflect.Value has a read-only bit set, but follow-on values
+ // do not, so recursion tests are off by one.
+ type testStruct struct {
+ Name string
+ I interface{}
+
+ m string
+ }
+
+ type myStringSlice []string
+ type myStringMap map[string]string
+ type myIntType int
+ type myStringType string
+
+ s0 := "string0"
+ s0P := &s0
+ mit := myIntType(42)
+ stringer := fmt.Stringer(nil)
+
+ for i, tc := range []struct {
+ a interface{}
+ s string
+ }{
+ {nil, `nil`},
+ {make(chan int), `(chan int)(PTR)`},
+ {&stringer, `(*fmt.Stringer)(nil)`},
+ {123, `123`},
+ {"hello", `"hello"`},
+ {(*testStruct)(nil), `(*render.testStruct)(nil)`},
+ {(**testStruct)(nil), `(**render.testStruct)(nil)`},
+ {[]***testStruct(nil), `[]***render.testStruct(nil)`},
+ {testStruct{Name: "foo", I: &testStruct{Name: "baz"}},
+ `render.testStruct{Name:"foo", I:(*render.testStruct){Name:"baz", I:interface{}(nil), m:""}, m:""}`},
+ {[]byte(nil), `[]uint8(nil)`},
+ {[]byte{}, `[]uint8{}`},
+ {map[string]string(nil), `map[string]string(nil)`},
+ {[]*testStruct{
+ {Name: "foo"},
+ {Name: "bar"},
+ }, `[]*render.testStruct{(*render.testStruct){Name:"foo", I:interface{}(nil), m:""}, ` +
+ `(*render.testStruct){Name:"bar", I:interface{}(nil), m:""}}`},
+ {myStringSlice{"foo", "bar"}, `render.myStringSlice{"foo", "bar"}`},
+ {myStringMap{"foo": "bar"}, `render.myStringMap{"foo":"bar"}`},
+ {myIntType(12), `render.myIntType(12)`},
+ {&mit, `(*render.myIntType)(42)`},
+ {myStringType("foo"), `render.myStringType("foo")`},
+ {struct {
+ a int
+ b string
+ }{123, "foo"}, `struct { a int; b string }{a:123, b:"foo"}`},
+ {[]string{"foo", "foo", "bar", "baz", "qux", "qux"},
+ `[]string{"foo", "foo", "bar", "baz", "qux", "qux"}`},
+ {[...]int{1, 2, 3}, `[3]int{1, 2, 3}`},
+ {map[string]bool{
+ "foo": true,
+ "bar": false,
+ }, `map[string]bool{"bar":false, "foo":true}`},
+ {map[int]string{1: "foo", 2: "bar"}, `map[int]string{1:"foo", 2:"bar"}`},
+ {uint32(1337), `1337`},
+ {3.14, `3.14`},
+ {complex(3, 0.14), `(3+0.14i)`},
+ {&s0, `(*string)("string0")`},
+ {&s0P, `(**string)("string0")`},
+ {[]interface{}{nil, 1, 2, nil}, `[]interface{}{interface{}(nil), 1, 2, interface{}(nil)}`},
+ } {
+ assertRendersLike(t, fmt.Sprintf("Input #%d", i), tc.a, tc.s)
+ }
+}
+
+func TestRenderRecursiveStruct(t *testing.T) {
+ type testStruct struct {
+ Name string
+ I interface{}
+ }
+
+ s := &testStruct{
+ Name: "recursive",
+ }
+ s.I = s
+
+ assertRendersLike(t, "Recursive struct", s,
+ `(*render.testStruct){Name:"recursive", I:<REC(*render.testStruct)>}`)
+}
+
+func TestRenderRecursiveArray(t *testing.T) {
+ a := [2]interface{}{}
+ a[0] = &a
+ a[1] = &a
+
+ assertRendersLike(t, "Recursive array", &a,
+ `(*[2]interface{}){<REC(*[2]interface{})>, <REC(*[2]interface{})>}`)
+}
+
+func TestRenderRecursiveMap(t *testing.T) {
+ m := map[string]interface{}{}
+ foo := "foo"
+ m["foo"] = m
+ m["bar"] = [](*string){&foo, &foo}
+ v := []map[string]interface{}{m, m}
+
+ assertRendersLike(t, "Recursive map", v,
+ `[]map[string]interface{}{map[string]interface{}{`+
+ `"bar":[]*string{(*string)("foo"), (*string)("foo")}, `+
+ `"foo":<REC(map[string]interface{})>}, `+
+ `map[string]interface{}{`+
+ `"bar":[]*string{(*string)("foo"), (*string)("foo")}, `+
+ `"foo":<REC(map[string]interface{})>}}`)
+}
+
+func ExampleInReadme() {
+ type customType int
+ type testStruct struct {
+ S string
+ V *map[string]int
+ I interface{}
+ }
+
+ a := testStruct{
+ S: "hello",
+ V: &map[string]int{"foo": 0, "bar": 1},
+ I: customType(42),
+ }
+
+ fmt.Println("Render test:")
+ fmt.Printf("fmt.Printf: %s\n", sanitizePointer(fmt.Sprintf("%#v", a)))
+ fmt.Printf("render.Render: %s\n", Render(a))
+ // Output: Render test:
+ // fmt.Printf: render.testStruct{S:"hello", V:(*map[string]int)(0x600dd065), I:42}
+ // render.Render: render.testStruct{S:"hello", V:(*map[string]int){"bar":1, "foo":0}, I:render.customType(42)}
+}
+
+var pointerRE = regexp.MustCompile(`\(0x[a-f0-9]+\)`)
+
+func sanitizePointer(s string) string {
+ return pointerRE.ReplaceAllString(s, "(0x600dd065)")
+}
diff --git a/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/oglematchers/.gitignore b/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/oglematchers/.gitignore
new file mode 100644
index 00000000000..dd8fc7468f4
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/oglematchers/.gitignore
@@ -0,0 +1,5 @@
+*.6
+6.out
+_obj/
+_test/
+_testmain.go
diff --git a/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/oglematchers/.travis.yml b/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/oglematchers/.travis.yml
new file mode 100644
index 00000000000..b97211926e8
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/oglematchers/.travis.yml
@@ -0,0 +1,4 @@
+# Cf. http://docs.travis-ci.com/user/getting-started/
+# Cf. http://docs.travis-ci.com/user/languages/go/
+
+language: go
diff --git a/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/oglematchers/LICENSE b/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/oglematchers/LICENSE
new file mode 100644
index 00000000000..d6456956733
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/oglematchers/LICENSE
@@ -0,0 +1,202 @@
+
+ Apache License
+ Version 2.0, January 2004
+ http://www.apache.org/licenses/
+
+ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+ 1. Definitions.
+
+ "License" shall mean the terms and conditions for use, reproduction,
+ and distribution as defined by Sections 1 through 9 of this document.
+
+ "Licensor" shall mean the copyright owner or entity authorized by
+ the copyright owner that is granting the License.
+
+ "Legal Entity" shall mean the union of the acting entity and all
+ other entities that control, are controlled by, or are under common
+ control with that entity. For the purposes of this definition,
+ "control" means (i) the power, direct or indirect, to cause the
+ direction or management of such entity, whether by contract or
+ otherwise, or (ii) ownership of fifty percent (50%) or more of the
+ outstanding shares, or (iii) beneficial ownership of such entity.
+
+ "You" (or "Your") shall mean an individual or Legal Entity
+ exercising permissions granted by this License.
+
+ "Source" form shall mean the preferred form for making modifications,
+ including but not limited to software source code, documentation
+ source, and configuration files.
+
+ "Object" form shall mean any form resulting from mechanical
+ transformation or translation of a Source form, including but
+ not limited to compiled object code, generated documentation,
+ and conversions to other media types.
+
+ "Work" shall mean the work of authorship, whether in Source or
+ Object form, made available under the License, as indicated by a
+ copyright notice that is included in or attached to the work
+ (an example is provided in the Appendix below).
+
+ "Derivative Works" shall mean any work, whether in Source or Object
+ form, that is based on (or derived from) the Work and for which the
+ editorial revisions, annotations, elaborations, or other modifications
+ represent, as a whole, an original work of authorship. For the purposes
+ of this License, Derivative Works shall not include works that remain
+ separable from, or merely link (or bind by name) to the interfaces of,
+ the Work and Derivative Works thereof.
+
+ "Contribution" shall mean any work of authorship, including
+ the original version of the Work and any modifications or additions
+ to that Work or Derivative Works thereof, that is intentionally
+ submitted to Licensor for inclusion in the Work by the copyright owner
+ or by an individual or Legal Entity authorized to submit on behalf of
+ the copyright owner. For the purposes of this definition, "submitted"
+ means any form of electronic, verbal, or written communication sent
+ to the Licensor or its representatives, including but not limited to
+ communication on electronic mailing lists, source code control systems,
+ and issue tracking systems that are managed by, or on behalf of, the
+ Licensor for the purpose of discussing and improving the Work, but
+ excluding communication that is conspicuously marked or otherwise
+ designated in writing by the copyright owner as "Not a Contribution."
+
+ "Contributor" shall mean Licensor and any individual or Legal Entity
+ on behalf of whom a Contribution has been received by Licensor and
+ subsequently incorporated within the Work.
+
+ 2. Grant of Copyright License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ copyright license to reproduce, prepare Derivative Works of,
+ publicly display, publicly perform, sublicense, and distribute the
+ Work and such Derivative Works in Source or Object form.
+
+ 3. Grant of Patent License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ (except as stated in this section) patent license to make, have made,
+ use, offer to sell, sell, import, and otherwise transfer the Work,
+ where such license applies only to those patent claims licensable
+ by such Contributor that are necessarily infringed by their
+ Contribution(s) alone or by combination of their Contribution(s)
+ with the Work to which such Contribution(s) was submitted. If You
+ institute patent litigation against any entity (including a
+ cross-claim or counterclaim in a lawsuit) alleging that the Work
+ or a Contribution incorporated within the Work constitutes direct
+ or contributory patent infringement, then any patent licenses
+ granted to You under this License for that Work shall terminate
+ as of the date such litigation is filed.
+
+ 4. Redistribution. You may reproduce and distribute copies of the
+ Work or Derivative Works thereof in any medium, with or without
+ modifications, and in Source or Object form, provided that You
+ meet the following conditions:
+
+ (a) You must give any other recipients of the Work or
+ Derivative Works a copy of this License; and
+
+ (b) You must cause any modified files to carry prominent notices
+ stating that You changed the files; and
+
+ (c) You must retain, in the Source form of any Derivative Works
+ that You distribute, all copyright, patent, trademark, and
+ attribution notices from the Source form of the Work,
+ excluding those notices that do not pertain to any part of
+ the Derivative Works; and
+
+ (d) If the Work includes a "NOTICE" text file as part of its
+ distribution, then any Derivative Works that You distribute must
+ include a readable copy of the attribution notices contained
+ within such NOTICE file, excluding those notices that do not
+ pertain to any part of the Derivative Works, in at least one
+ of the following places: within a NOTICE text file distributed
+ as part of the Derivative Works; within the Source form or
+ documentation, if provided along with the Derivative Works; or,
+ within a display generated by the Derivative Works, if and
+ wherever such third-party notices normally appear. The contents
+ of the NOTICE file are for informational purposes only and
+ do not modify the License. You may add Your own attribution
+ notices within Derivative Works that You distribute, alongside
+ or as an addendum to the NOTICE text from the Work, provided
+ that such additional attribution notices cannot be construed
+ as modifying the License.
+
+ You may add Your own copyright statement to Your modifications and
+ may provide additional or different license terms and conditions
+ for use, reproduction, or distribution of Your modifications, or
+ for any such Derivative Works as a whole, provided Your use,
+ reproduction, and distribution of the Work otherwise complies with
+ the conditions stated in this License.
+
+ 5. Submission of Contributions. Unless You explicitly state otherwise,
+ any Contribution intentionally submitted for inclusion in the Work
+ by You to the Licensor shall be under the terms and conditions of
+ this License, without any additional terms or conditions.
+ Notwithstanding the above, nothing herein shall supersede or modify
+ the terms of any separate license agreement you may have executed
+ with Licensor regarding such Contributions.
+
+ 6. Trademarks. This License does not grant permission to use the trade
+ names, trademarks, service marks, or product names of the Licensor,
+ except as required for reasonable and customary use in describing the
+ origin of the Work and reproducing the content of the NOTICE file.
+
+ 7. Disclaimer of Warranty. Unless required by applicable law or
+ agreed to in writing, Licensor provides the Work (and each
+ Contributor provides its Contributions) on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ implied, including, without limitation, any warranties or conditions
+ of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+ PARTICULAR PURPOSE. You are solely responsible for determining the
+ appropriateness of using or redistributing the Work and assume any
+ risks associated with Your exercise of permissions under this License.
+
+ 8. Limitation of Liability. In no event and under no legal theory,
+ whether in tort (including negligence), contract, or otherwise,
+ unless required by applicable law (such as deliberate and grossly
+ negligent acts) or agreed to in writing, shall any Contributor be
+ liable to You for damages, including any direct, indirect, special,
+ incidental, or consequential damages of any character arising as a
+ result of this License or out of the use or inability to use the
+ Work (including but not limited to damages for loss of goodwill,
+ work stoppage, computer failure or malfunction, or any and all
+ other commercial damages or losses), even if such Contributor
+ has been advised of the possibility of such damages.
+
+ 9. Accepting Warranty or Additional Liability. While redistributing
+ the Work or Derivative Works thereof, You may choose to offer,
+ and charge a fee for, acceptance of support, warranty, indemnity,
+ or other liability obligations and/or rights consistent with this
+ License. However, in accepting such obligations, You may act only
+ on Your own behalf and on Your sole responsibility, not on behalf
+ of any other Contributor, and only if You agree to indemnify,
+ defend, and hold each Contributor harmless for any liability
+ incurred by, or claims asserted against, such Contributor by reason
+ of your accepting any such warranty or additional liability.
+
+ END OF TERMS AND CONDITIONS
+
+ APPENDIX: How to apply the Apache License to your work.
+
+ To apply the Apache License to your work, attach the following
+ boilerplate notice, with the fields enclosed by brackets "[]"
+ replaced with your own identifying information. (Don't include
+ the brackets!) The text should be enclosed in the appropriate
+ comment syntax for the file format. We also recommend that a
+ file or class name and description of purpose be included on the
+ same "printed page" as the copyright notice for easier
+ identification within third-party archives.
+
+ Copyright [yyyy] [name of copyright owner]
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
diff --git a/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/oglematchers/README.md b/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/oglematchers/README.md
new file mode 100644
index 00000000000..215a2bb7a8b
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/oglematchers/README.md
@@ -0,0 +1,58 @@
+[![GoDoc](https://godoc.org/github.com/smartystreets/assertions/internal/oglematchers?status.svg)](https://godoc.org/github.com/smartystreets/assertions/internal/oglematchers)
+
+`oglematchers` is a package for the Go programming language containing a set of
+matchers, useful in a testing or mocking framework, inspired by and mostly
+compatible with [Google Test][googletest] for C++ and
+[Google JS Test][google-js-test]. The package is used by the
+[ogletest][ogletest] testing framework and [oglemock][oglemock] mocking
+framework, which may be more directly useful to you, but can be generically used
+elsewhere as well.
+
+A "matcher" is simply an object with a `Matches` method defining a set of golang
+values matched by the matcher, and a `Description` method describing that set.
+For example, here are some matchers:
+
+```go
+// Numbers
+Equals(17.13)
+LessThan(19)
+
+// Strings
+Equals("taco")
+HasSubstr("burrito")
+MatchesRegex("t.*o")
+
+// Combining matchers
+AnyOf(LessThan(17), GreaterThan(19))
+```
+
+There are lots more; see [here][reference] for a reference. You can also add
+your own simply by implementing the `oglematchers.Matcher` interface.
+
+
+Installation
+------------
+
+First, make sure you have installed Go 1.0.2 or newer. See
+[here][golang-install] for instructions.
+
+Use the following command to install `oglematchers` and keep it up to date:
+
+ go get -u github.com/smartystreets/assertions/internal/oglematchers
+
+
+Documentation
+-------------
+
+See [here][reference] for documentation. Alternatively, you can install the
+package and then use `godoc`:
+
+ godoc github.com/smartystreets/assertions/internal/oglematchers
+
+
+[reference]: http://godoc.org/github.com/smartystreets/assertions/internal/oglematchers
+[golang-install]: http://golang.org/doc/install.html
+[googletest]: http://code.google.com/p/googletest/
+[google-js-test]: http://code.google.com/p/google-js-test/
+[ogletest]: http://github.com/smartystreets/assertions/internal/ogletest
+[oglemock]: http://github.com/smartystreets/assertions/internal/oglemock
diff --git a/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/oglematchers/all_of.go b/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/oglematchers/all_of.go
new file mode 100644
index 00000000000..d93a9740443
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/oglematchers/all_of.go
@@ -0,0 +1,70 @@
+// Copyright 2011 Aaron Jacobs. All Rights Reserved.
+// Author: aaronjjacobs@gmail.com (Aaron Jacobs)
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package oglematchers
+
+import (
+ "strings"
+)
+
+// AllOf accepts a set of matchers S and returns a matcher that follows the
+// algorithm below when considering a candidate c:
+//
+// 1. Return true if for every Matcher m in S, m matches c.
+//
+// 2. Otherwise, if there is a matcher m in S such that m returns a fatal
+// error for c, return that matcher's error message.
+//
+// 3. Otherwise, return false with the error from some wrapped matcher.
+//
+// This is akin to a logical AND operation for matchers.
+func AllOf(matchers ...Matcher) Matcher {
+ return &allOfMatcher{matchers}
+}
+
+type allOfMatcher struct {
+ wrappedMatchers []Matcher
+}
+
+func (m *allOfMatcher) Description() string {
+ // Special case: the empty set.
+ if len(m.wrappedMatchers) == 0 {
+ return "is anything"
+ }
+
+ // Join the descriptions for the wrapped matchers.
+ wrappedDescs := make([]string, len(m.wrappedMatchers))
+ for i, wrappedMatcher := range m.wrappedMatchers {
+ wrappedDescs[i] = wrappedMatcher.Description()
+ }
+
+ return strings.Join(wrappedDescs, ", and ")
+}
+
+func (m *allOfMatcher) Matches(c interface{}) (err error) {
+ for _, wrappedMatcher := range m.wrappedMatchers {
+ if wrappedErr := wrappedMatcher.Matches(c); wrappedErr != nil {
+ err = wrappedErr
+
+ // If the error is fatal, return immediately with this error.
+ _, ok := wrappedErr.(*FatalError)
+ if ok {
+ return
+ }
+ }
+ }
+
+ return
+}
diff --git a/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/oglematchers/all_of_test.go b/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/oglematchers/all_of_test.go
new file mode 100644
index 00000000000..0f9d198fcbe
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/oglematchers/all_of_test.go
@@ -0,0 +1,110 @@
+// Copyright 2011 Aaron Jacobs. All Rights Reserved.
+// Author: aaronjjacobs@gmail.com (Aaron Jacobs)
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package oglematchers_test
+
+import (
+ . "github.com/smartystreets/assertions/internal/oglematchers"
+ . "github.com/smartystreets/assertions/internal/ogletest"
+ "errors"
+)
+
+////////////////////////////////////////////////////////////////////////
+// Helpers
+////////////////////////////////////////////////////////////////////////
+
+type allOfFakeMatcher struct {
+ desc string
+ err error
+}
+
+func (m *allOfFakeMatcher) Matches(c interface{}) error {
+ return m.err
+}
+
+func (m *allOfFakeMatcher) Description() string {
+ return m.desc
+}
+
+type AllOfTest struct {
+}
+
+func init() { RegisterTestSuite(&AllOfTest{}) }
+
+////////////////////////////////////////////////////////////////////////
+// Tests
+////////////////////////////////////////////////////////////////////////
+
+func (t *AllOfTest) DescriptionWithEmptySet() {
+ m := AllOf()
+ ExpectEq("is anything", m.Description())
+}
+
+func (t *AllOfTest) DescriptionWithOneMatcher() {
+ m := AllOf(&allOfFakeMatcher{"taco", errors.New("")})
+ ExpectEq("taco", m.Description())
+}
+
+func (t *AllOfTest) DescriptionWithMultipleMatchers() {
+ m := AllOf(
+ &allOfFakeMatcher{"taco", errors.New("")},
+ &allOfFakeMatcher{"burrito", errors.New("")},
+ &allOfFakeMatcher{"enchilada", errors.New("")})
+
+ ExpectEq("taco, and burrito, and enchilada", m.Description())
+}
+
+func (t *AllOfTest) EmptySet() {
+ m := AllOf()
+ err := m.Matches(17)
+
+ ExpectEq(nil, err)
+}
+
+func (t *AllOfTest) OneMatcherReturnsFatalErrorAndSomeOthersFail() {
+ m := AllOf(
+ &allOfFakeMatcher{"", errors.New("")},
+ &allOfFakeMatcher{"", NewFatalError("taco")},
+ &allOfFakeMatcher{"", errors.New("")},
+ &allOfFakeMatcher{"", nil})
+
+ err := m.Matches(17)
+
+ ExpectTrue(isFatal(err))
+ ExpectThat(err, Error(Equals("taco")))
+}
+
+func (t *AllOfTest) OneMatcherReturnsNonFatalAndOthersSayTrue() {
+ m := AllOf(
+ &allOfFakeMatcher{"", nil},
+ &allOfFakeMatcher{"", errors.New("taco")},
+ &allOfFakeMatcher{"", nil})
+
+ err := m.Matches(17)
+
+ ExpectFalse(isFatal(err))
+ ExpectThat(err, Error(Equals("taco")))
+}
+
+func (t *AllOfTest) AllMatchersSayTrue() {
+ m := AllOf(
+ &allOfFakeMatcher{"", nil},
+ &allOfFakeMatcher{"", nil},
+ &allOfFakeMatcher{"", nil})
+
+ err := m.Matches(17)
+
+ ExpectEq(nil, err)
+}
diff --git a/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/oglematchers/any.go b/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/oglematchers/any.go
new file mode 100644
index 00000000000..f6991ec1020
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/oglematchers/any.go
@@ -0,0 +1,32 @@
+// Copyright 2011 Aaron Jacobs. All Rights Reserved.
+// Author: aaronjjacobs@gmail.com (Aaron Jacobs)
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package oglematchers
+
+// Any returns a matcher that matches any value.
+func Any() Matcher {
+ return &anyMatcher{}
+}
+
+type anyMatcher struct {
+}
+
+func (m *anyMatcher) Description() string {
+ return "is anything"
+}
+
+func (m *anyMatcher) Matches(c interface{}) error {
+ return nil
+}
diff --git a/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/oglematchers/any_of.go b/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/oglematchers/any_of.go
new file mode 100644
index 00000000000..2918b51f21a
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/oglematchers/any_of.go
@@ -0,0 +1,94 @@
+// Copyright 2011 Aaron Jacobs. All Rights Reserved.
+// Author: aaronjjacobs@gmail.com (Aaron Jacobs)
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package oglematchers
+
+import (
+ "errors"
+ "fmt"
+ "reflect"
+ "strings"
+)
+
+// AnyOf accepts a set of values S and returns a matcher that follows the
+// algorithm below when considering a candidate c:
+//
+// 1. If there exists a value m in S such that m implements the Matcher
+// interface and m matches c, return true.
+//
+// 2. Otherwise, if there exists a value v in S such that v does not implement
+// the Matcher interface and the matcher Equals(v) matches c, return true.
+//
+// 3. Otherwise, if there is a value m in S such that m implements the Matcher
+// interface and m returns a fatal error for c, return that fatal error.
+//
+// 4. Otherwise, return false.
+//
+// This is akin to a logical OR operation for matchers, with non-matchers x
+// being treated as Equals(x).
+func AnyOf(vals ...interface{}) Matcher {
+ // Get ahold of a type variable for the Matcher interface.
+ var dummy *Matcher
+ matcherType := reflect.TypeOf(dummy).Elem()
+
+ // Create a matcher for each value, or use the value itself if it's already a
+ // matcher.
+ wrapped := make([]Matcher, len(vals))
+ for i, v := range vals {
+ t := reflect.TypeOf(v)
+ if t != nil && t.Implements(matcherType) {
+ wrapped[i] = v.(Matcher)
+ } else {
+ wrapped[i] = Equals(v)
+ }
+ }
+
+ return &anyOfMatcher{wrapped}
+}
+
+type anyOfMatcher struct {
+ wrapped []Matcher
+}
+
+func (m *anyOfMatcher) Description() string {
+ wrappedDescs := make([]string, len(m.wrapped))
+ for i, matcher := range m.wrapped {
+ wrappedDescs[i] = matcher.Description()
+ }
+
+ return fmt.Sprintf("or(%s)", strings.Join(wrappedDescs, ", "))
+}
+
+func (m *anyOfMatcher) Matches(c interface{}) (err error) {
+ err = errors.New("")
+
+ // Try each matcher in turn.
+ for _, matcher := range m.wrapped {
+ wrappedErr := matcher.Matches(c)
+
+ // Return immediately if there's a match.
+ if wrappedErr == nil {
+ err = nil
+ return
+ }
+
+ // Note the fatal error, if any.
+ if _, isFatal := wrappedErr.(*FatalError); isFatal {
+ err = wrappedErr
+ }
+ }
+
+ return
+}
diff --git a/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/oglematchers/any_of_test.go b/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/oglematchers/any_of_test.go
new file mode 100644
index 00000000000..f0b5025406f
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/oglematchers/any_of_test.go
@@ -0,0 +1,139 @@
+// Copyright 2011 Aaron Jacobs. All Rights Reserved.
+// Author: aaronjjacobs@gmail.com (Aaron Jacobs)
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package oglematchers_test
+
+import (
+ "errors"
+
+ . "github.com/smartystreets/assertions/internal/oglematchers"
+ . "github.com/smartystreets/assertions/internal/ogletest"
+)
+
+////////////////////////////////////////////////////////////////////////
+// Helpers
+////////////////////////////////////////////////////////////////////////
+
+type fakeAnyOfMatcher struct {
+ desc string
+ err error
+}
+
+func (m *fakeAnyOfMatcher) Matches(c interface{}) error {
+ return m.err
+}
+
+func (m *fakeAnyOfMatcher) Description() string {
+ return m.desc
+}
+
+type AnyOfTest struct {
+}
+
+func init() { RegisterTestSuite(&AnyOfTest{}) }
+
+////////////////////////////////////////////////////////////////////////
+// Tests
+////////////////////////////////////////////////////////////////////////
+
+func (t *AnyOfTest) EmptySet() {
+ matcher := AnyOf()
+
+ err := matcher.Matches(0)
+ ExpectThat(err, Error(Equals("")))
+}
+
+func (t *AnyOfTest) OneTrue() {
+ matcher := AnyOf(
+ &fakeAnyOfMatcher{"", NewFatalError("foo")},
+ 17,
+ &fakeAnyOfMatcher{"", errors.New("foo")},
+ &fakeAnyOfMatcher{"", nil},
+ &fakeAnyOfMatcher{"", errors.New("foo")},
+ )
+
+ err := matcher.Matches(0)
+ ExpectEq(nil, err)
+}
+
+func (t *AnyOfTest) OneEqual() {
+ matcher := AnyOf(
+ &fakeAnyOfMatcher{"", NewFatalError("foo")},
+ &fakeAnyOfMatcher{"", errors.New("foo")},
+ 13,
+ "taco",
+ 19,
+ &fakeAnyOfMatcher{"", errors.New("foo")},
+ )
+
+ err := matcher.Matches("taco")
+ ExpectEq(nil, err)
+}
+
+func (t *AnyOfTest) OneFatal() {
+ matcher := AnyOf(
+ &fakeAnyOfMatcher{"", errors.New("foo")},
+ 17,
+ &fakeAnyOfMatcher{"", NewFatalError("taco")},
+ &fakeAnyOfMatcher{"", errors.New("foo")},
+ )
+
+ err := matcher.Matches(0)
+ ExpectThat(err, Error(Equals("taco")))
+}
+
+func (t *AnyOfTest) OneNil() {
+ var err error
+ matcher := AnyOf(
+ 13,
+ nil,
+ 19,
+ )
+
+ // No match
+ err = matcher.Matches(14)
+ ExpectNe(nil, err)
+
+ // Match
+ err = matcher.Matches(nil)
+ ExpectEq(nil, err)
+}
+
+func (t *AnyOfTest) AllFalseAndNotEqual() {
+ matcher := AnyOf(
+ &fakeAnyOfMatcher{"", errors.New("foo")},
+ 17,
+ &fakeAnyOfMatcher{"", errors.New("foo")},
+ 19,
+ )
+
+ err := matcher.Matches(0)
+ ExpectThat(err, Error(Equals("")))
+}
+
+func (t *AnyOfTest) DescriptionForEmptySet() {
+ matcher := AnyOf()
+ ExpectEq("or()", matcher.Description())
+}
+
+func (t *AnyOfTest) DescriptionForNonEmptySet() {
+ matcher := AnyOf(
+ &fakeAnyOfMatcher{"taco", nil},
+ "burrito",
+ &fakeAnyOfMatcher{"enchilada", nil},
+ )
+
+ ExpectEq("or(taco, burrito, enchilada)", matcher.Description())
+}
diff --git a/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/oglematchers/any_test.go b/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/oglematchers/any_test.go
new file mode 100644
index 00000000000..410cc12825e
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/oglematchers/any_test.go
@@ -0,0 +1,53 @@
+// Copyright 2011 Aaron Jacobs. All Rights Reserved.
+// Author: aaronjjacobs@gmail.com (Aaron Jacobs)
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package oglematchers_test
+
+import (
+ . "github.com/smartystreets/assertions/internal/oglematchers"
+ . "github.com/smartystreets/assertions/internal/ogletest"
+)
+
+////////////////////////////////////////////////////////////////////////
+// Helpers
+////////////////////////////////////////////////////////////////////////
+
+type AnyTest struct {
+}
+
+func init() { RegisterTestSuite(&AnyTest{}) }
+
+////////////////////////////////////////////////////////////////////////
+// Tests
+////////////////////////////////////////////////////////////////////////
+
+func (t *AnyTest) Description() {
+ m := Any()
+ ExpectEq("is anything", m.Description())
+}
+
+func (t *AnyTest) Matches() {
+ var err error
+ m := Any()
+
+ err = m.Matches(nil)
+ ExpectEq(nil, err)
+
+ err = m.Matches(17)
+ ExpectEq(nil, err)
+
+ err = m.Matches("taco")
+ ExpectEq(nil, err)
+}
diff --git a/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/oglematchers/contains.go b/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/oglematchers/contains.go
new file mode 100644
index 00000000000..2f326dbc5d6
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/oglematchers/contains.go
@@ -0,0 +1,61 @@
+// Copyright 2012 Aaron Jacobs. All Rights Reserved.
+// Author: aaronjjacobs@gmail.com (Aaron Jacobs)
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package oglematchers
+
+import (
+ "fmt"
+ "reflect"
+)
+
+// Return a matcher that matches arrays slices with at least one element that
+// matches the supplied argument. If the argument x is not itself a Matcher,
+// this is equivalent to Contains(Equals(x)).
+func Contains(x interface{}) Matcher {
+ var result containsMatcher
+ var ok bool
+
+ if result.elementMatcher, ok = x.(Matcher); !ok {
+ result.elementMatcher = Equals(x)
+ }
+
+ return &result
+}
+
+type containsMatcher struct {
+ elementMatcher Matcher
+}
+
+func (m *containsMatcher) Description() string {
+ return fmt.Sprintf("contains: %s", m.elementMatcher.Description())
+}
+
+func (m *containsMatcher) Matches(candidate interface{}) error {
+ // The candidate must be a slice or an array.
+ v := reflect.ValueOf(candidate)
+ if v.Kind() != reflect.Slice && v.Kind() != reflect.Array {
+ return NewFatalError("which is not a slice or array")
+ }
+
+ // Check each element.
+ for i := 0; i < v.Len(); i++ {
+ elem := v.Index(i)
+ if matchErr := m.elementMatcher.Matches(elem.Interface()); matchErr == nil {
+ return nil
+ }
+ }
+
+ return fmt.Errorf("")
+}
diff --git a/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/oglematchers/contains_test.go b/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/oglematchers/contains_test.go
new file mode 100644
index 00000000000..dfc981c1488
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/oglematchers/contains_test.go
@@ -0,0 +1,233 @@
+// Copyright 2012 Aaron Jacobs. All Rights Reserved.
+// Author: aaronjjacobs@gmail.com (Aaron Jacobs)
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package oglematchers_test
+
+import (
+ . "github.com/smartystreets/assertions/internal/oglematchers"
+ . "github.com/smartystreets/assertions/internal/ogletest"
+)
+
+////////////////////////////////////////////////////////////////////////
+// Helpers
+////////////////////////////////////////////////////////////////////////
+
+type ContainsTest struct {}
+func init() { RegisterTestSuite(&ContainsTest{}) }
+
+////////////////////////////////////////////////////////////////////////
+// Tests
+////////////////////////////////////////////////////////////////////////
+
+func (t *ContainsTest) WrongTypeCandidates() {
+ m := Contains("")
+ ExpectEq("contains: ", m.Description())
+
+ var err error
+
+ // Nil candidate
+ err = m.Matches(nil)
+ ExpectTrue(isFatal(err))
+ ExpectThat(err, Error(HasSubstr("array")))
+ ExpectThat(err, Error(HasSubstr("slice")))
+
+ // String candidate
+ err = m.Matches("")
+ ExpectTrue(isFatal(err))
+ ExpectThat(err, Error(HasSubstr("array")))
+ ExpectThat(err, Error(HasSubstr("slice")))
+
+ // Map candidate
+ err = m.Matches(make(map[string]string))
+ ExpectTrue(isFatal(err))
+ ExpectThat(err, Error(HasSubstr("array")))
+ ExpectThat(err, Error(HasSubstr("slice")))
+}
+
+func (t *ContainsTest) NilArgument() {
+ m := Contains(nil)
+ ExpectEq("contains: is nil", m.Description())
+
+ var c interface{}
+ var err error
+
+ // Empty array of pointers
+ c = [...]*int{}
+ err = m.Matches(c)
+ ExpectThat(err, Error(Equals("")))
+
+ // Empty slice of pointers
+ c = []*int{}
+ err = m.Matches(c)
+ ExpectThat(err, Error(Equals("")))
+
+ // Non-empty array of integers
+ c = [...]int{17, 0, 19}
+ err = m.Matches(c)
+ ExpectThat(err, Error(Equals("")))
+
+ // Non-empty slice of integers
+ c = []int{17, 0, 19}
+ err = m.Matches(c)
+ ExpectThat(err, Error(Equals("")))
+
+ // Non-matching array of pointers
+ c = [...]*int{new(int), new(int)}
+ err = m.Matches(c)
+ ExpectThat(err, Error(Equals("")))
+
+ // Non-matching slice of pointers
+ c = []*int{new(int), new(int)}
+ err = m.Matches(c)
+ ExpectThat(err, Error(Equals("")))
+
+ // Matching array of pointers
+ c = [...]*int{new(int), nil, new(int)}
+ err = m.Matches(c)
+ ExpectEq(nil, err)
+
+ // Matching slice of pointers
+ c = []*int{new(int), nil, new(int)}
+ err = m.Matches(c)
+ ExpectEq(nil, err)
+
+ // Non-matching slice of pointers from matching array
+ someArray := [...]*int{new(int), nil, new(int)}
+ c = someArray[0:1]
+ err = m.Matches(c)
+ ExpectThat(err, Error(Equals("")))
+}
+
+func (t *ContainsTest) StringArgument() {
+ m := Contains("taco")
+ ExpectEq("contains: taco", m.Description())
+
+ var c interface{}
+ var err error
+
+ // Non-matching array of strings
+ c = [...]string{"burrito", "enchilada"}
+ err = m.Matches(c)
+ ExpectThat(err, Error(Equals("")))
+
+ // Non-matching slice of strings
+ c = []string{"burrito", "enchilada"}
+ err = m.Matches(c)
+ ExpectThat(err, Error(Equals("")))
+
+ // Matching array of strings
+ c = [...]string{"burrito", "taco", "enchilada"}
+ err = m.Matches(c)
+ ExpectEq(nil, err)
+
+ // Matching slice of strings
+ c = []string{"burrito", "taco", "enchilada"}
+ err = m.Matches(c)
+ ExpectEq(nil, err)
+
+ // Non-matching slice of strings from matching array
+ someArray := [...]string{"burrito", "taco", "enchilada"}
+ c = someArray[0:1]
+ err = m.Matches(c)
+ ExpectThat(err, Error(Equals("")))
+}
+
+func (t *ContainsTest) IntegerArgument() {
+ m := Contains(int(17))
+ ExpectEq("contains: 17", m.Description())
+
+ var c interface{}
+ var err error
+
+ // Non-matching array of integers
+ c = [...]int{13, 19}
+ err = m.Matches(c)
+ ExpectThat(err, Error(Equals("")))
+
+ // Non-matching slice of integers
+ c = []int{13, 19}
+ err = m.Matches(c)
+ ExpectThat(err, Error(Equals("")))
+
+ // Matching array of integers
+ c = [...]int{13, 17, 19}
+ err = m.Matches(c)
+ ExpectEq(nil, err)
+
+ // Matching slice of integers
+ c = []int{13, 17, 19}
+ err = m.Matches(c)
+ ExpectEq(nil, err)
+
+ // Non-matching slice of integers from matching array
+ someArray := [...]int{13, 17, 19}
+ c = someArray[0:1]
+ err = m.Matches(c)
+ ExpectThat(err, Error(Equals("")))
+
+ // Non-matching array of floats
+ c = [...]float32{13, 17.5, 19}
+ err = m.Matches(c)
+ ExpectThat(err, Error(Equals("")))
+
+ // Non-matching slice of floats
+ c = []float32{13, 17.5, 19}
+ err = m.Matches(c)
+ ExpectThat(err, Error(Equals("")))
+
+ // Matching array of floats
+ c = [...]float32{13, 17, 19}
+ err = m.Matches(c)
+ ExpectEq(nil, err)
+
+ // Matching slice of floats
+ c = []float32{13, 17, 19}
+ err = m.Matches(c)
+ ExpectEq(nil, err)
+}
+
+func (t *ContainsTest) MatcherArgument() {
+ m := Contains(HasSubstr("ac"))
+ ExpectEq("contains: has substring \"ac\"", m.Description())
+
+ var c interface{}
+ var err error
+
+ // Non-matching array of strings
+ c = [...]string{"burrito", "enchilada"}
+ err = m.Matches(c)
+ ExpectThat(err, Error(Equals("")))
+
+ // Non-matching slice of strings
+ c = []string{"burrito", "enchilada"}
+ err = m.Matches(c)
+ ExpectThat(err, Error(Equals("")))
+
+ // Matching array of strings
+ c = [...]string{"burrito", "taco", "enchilada"}
+ err = m.Matches(c)
+ ExpectEq(nil, err)
+
+ // Matching slice of strings
+ c = []string{"burrito", "taco", "enchilada"}
+ err = m.Matches(c)
+ ExpectEq(nil, err)
+
+ // Non-matching slice of strings from matching array
+ someArray := [...]string{"burrito", "taco", "enchilada"}
+ c = someArray[0:1]
+ err = m.Matches(c)
+ ExpectThat(err, Error(Equals("")))
+}
diff --git a/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/oglematchers/deep_equals.go b/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/oglematchers/deep_equals.go
new file mode 100644
index 00000000000..1d91baef32e
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/oglematchers/deep_equals.go
@@ -0,0 +1,88 @@
+// Copyright 2012 Aaron Jacobs. All Rights Reserved.
+// Author: aaronjjacobs@gmail.com (Aaron Jacobs)
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package oglematchers
+
+import (
+ "bytes"
+ "errors"
+ "fmt"
+ "reflect"
+)
+
+var byteSliceType reflect.Type = reflect.TypeOf([]byte{})
+
+// DeepEquals returns a matcher that matches based on 'deep equality', as
+// defined by the reflect package. This matcher requires that values have
+// identical types to x.
+func DeepEquals(x interface{}) Matcher {
+ return &deepEqualsMatcher{x}
+}
+
+type deepEqualsMatcher struct {
+ x interface{}
+}
+
+func (m *deepEqualsMatcher) Description() string {
+ xDesc := fmt.Sprintf("%v", m.x)
+ xValue := reflect.ValueOf(m.x)
+
+ // Special case: fmt.Sprintf presents nil slices as "[]", but
+ // reflect.DeepEqual makes a distinction between nil and empty slices. Make
+ // this less confusing.
+ if xValue.Kind() == reflect.Slice && xValue.IsNil() {
+ xDesc = "<nil slice>"
+ }
+
+ return fmt.Sprintf("deep equals: %s", xDesc)
+}
+
+func (m *deepEqualsMatcher) Matches(c interface{}) error {
+ // Make sure the types match.
+ ct := reflect.TypeOf(c)
+ xt := reflect.TypeOf(m.x)
+
+ if ct != xt {
+ return NewFatalError(fmt.Sprintf("which is of type %v", ct))
+ }
+
+ // Special case: handle byte slices more efficiently.
+ cValue := reflect.ValueOf(c)
+ xValue := reflect.ValueOf(m.x)
+
+ if ct == byteSliceType && !cValue.IsNil() && !xValue.IsNil() {
+ xBytes := m.x.([]byte)
+ cBytes := c.([]byte)
+
+ if bytes.Equal(cBytes, xBytes) {
+ return nil
+ }
+
+ return errors.New("")
+ }
+
+ // Defer to the reflect package.
+ if reflect.DeepEqual(m.x, c) {
+ return nil
+ }
+
+ // Special case: if the comparison failed because c is the nil slice, given
+ // an indication of this (since its value is printed as "[]").
+ if cValue.Kind() == reflect.Slice && cValue.IsNil() {
+ return errors.New("which is nil")
+ }
+
+ return errors.New("")
+}
diff --git a/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/oglematchers/deep_equals_test.go b/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/oglematchers/deep_equals_test.go
new file mode 100644
index 00000000000..a28113aaa6b
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/oglematchers/deep_equals_test.go
@@ -0,0 +1,343 @@
+// Copyright 2012 Aaron Jacobs. All Rights Reserved.
+// Author: aaronjjacobs@gmail.com (Aaron Jacobs)
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package oglematchers_test
+
+import (
+ . "github.com/smartystreets/assertions/internal/oglematchers"
+ . "github.com/smartystreets/assertions/internal/ogletest"
+ "bytes"
+ "testing"
+)
+
+////////////////////////////////////////////////////////////////////////
+// Helpers
+////////////////////////////////////////////////////////////////////////
+
+type DeepEqualsTest struct {}
+func init() { RegisterTestSuite(&DeepEqualsTest{}) }
+
+////////////////////////////////////////////////////////////////////////
+// Tests
+////////////////////////////////////////////////////////////////////////
+
+func (t *DeepEqualsTest) WrongTypeCandidateWithScalarValue() {
+ var x int = 17
+ m := DeepEquals(x)
+
+ var err error
+
+ // Nil candidate.
+ err = m.Matches(nil)
+ AssertNe(nil, err)
+ ExpectTrue(isFatal(err))
+ ExpectThat(err, Error(HasSubstr("type")))
+ ExpectThat(err, Error(HasSubstr("<nil>")))
+
+ // Int alias candidate.
+ type intAlias int
+ err = m.Matches(intAlias(x))
+ AssertNe(nil, err)
+ ExpectTrue(isFatal(err))
+ ExpectThat(err, Error(HasSubstr("type")))
+ ExpectThat(err, Error(HasSubstr("intAlias")))
+
+ // String candidate.
+ err = m.Matches("taco")
+ AssertNe(nil, err)
+ ExpectTrue(isFatal(err))
+ ExpectThat(err, Error(HasSubstr("type")))
+ ExpectThat(err, Error(HasSubstr("string")))
+
+ // Byte slice candidate.
+ err = m.Matches([]byte{})
+ AssertNe(nil, err)
+ ExpectTrue(isFatal(err))
+ ExpectThat(err, Error(HasSubstr("type")))
+ ExpectThat(err, Error(HasSubstr("[]uint8")))
+
+ // Other slice candidate.
+ err = m.Matches([]uint16{})
+ AssertNe(nil, err)
+ ExpectTrue(isFatal(err))
+ ExpectThat(err, Error(HasSubstr("type")))
+ ExpectThat(err, Error(HasSubstr("[]uint16")))
+
+ // Unsigned int candidate.
+ err = m.Matches(uint(17))
+ AssertNe(nil, err)
+ ExpectTrue(isFatal(err))
+ ExpectThat(err, Error(HasSubstr("type")))
+ ExpectThat(err, Error(HasSubstr("uint")))
+}
+
+func (t *DeepEqualsTest) WrongTypeCandidateWithByteSliceValue() {
+ x := []byte{}
+ m := DeepEquals(x)
+
+ var err error
+
+ // Nil candidate.
+ err = m.Matches(nil)
+ AssertNe(nil, err)
+ ExpectTrue(isFatal(err))
+ ExpectThat(err, Error(HasSubstr("type")))
+ ExpectThat(err, Error(HasSubstr("<nil>")))
+
+ // String candidate.
+ err = m.Matches("taco")
+ AssertNe(nil, err)
+ ExpectTrue(isFatal(err))
+ ExpectThat(err, Error(HasSubstr("type")))
+ ExpectThat(err, Error(HasSubstr("string")))
+
+ // Slice candidate with wrong value type.
+ err = m.Matches([]uint16{})
+ AssertNe(nil, err)
+ ExpectTrue(isFatal(err))
+ ExpectThat(err, Error(HasSubstr("type")))
+ ExpectThat(err, Error(HasSubstr("[]uint16")))
+}
+
+func (t *DeepEqualsTest) WrongTypeCandidateWithOtherSliceValue() {
+ x := []uint16{}
+ m := DeepEquals(x)
+
+ var err error
+
+ // Nil candidate.
+ err = m.Matches(nil)
+ AssertNe(nil, err)
+ ExpectTrue(isFatal(err))
+ ExpectThat(err, Error(HasSubstr("type")))
+ ExpectThat(err, Error(HasSubstr("<nil>")))
+
+ // String candidate.
+ err = m.Matches("taco")
+ AssertNe(nil, err)
+ ExpectTrue(isFatal(err))
+ ExpectThat(err, Error(HasSubstr("type")))
+ ExpectThat(err, Error(HasSubstr("string")))
+
+ // Byte slice candidate with wrong value type.
+ err = m.Matches([]byte{})
+ AssertNe(nil, err)
+ ExpectTrue(isFatal(err))
+ ExpectThat(err, Error(HasSubstr("type")))
+ ExpectThat(err, Error(HasSubstr("[]uint8")))
+
+ // Other slice candidate with wrong value type.
+ err = m.Matches([]uint32{})
+ AssertNe(nil, err)
+ ExpectTrue(isFatal(err))
+ ExpectThat(err, Error(HasSubstr("type")))
+ ExpectThat(err, Error(HasSubstr("[]uint32")))
+}
+
+func (t *DeepEqualsTest) WrongTypeCandidateWithNilLiteralValue() {
+ m := DeepEquals(nil)
+
+ var err error
+
+ // String candidate.
+ err = m.Matches("taco")
+ AssertNe(nil, err)
+ ExpectTrue(isFatal(err))
+ ExpectThat(err, Error(HasSubstr("type")))
+ ExpectThat(err, Error(HasSubstr("string")))
+
+ // Nil byte slice candidate.
+ err = m.Matches([]byte(nil))
+ AssertNe(nil, err)
+ ExpectTrue(isFatal(err))
+ ExpectThat(err, Error(HasSubstr("type")))
+ ExpectThat(err, Error(HasSubstr("[]uint8")))
+
+ // Nil other slice candidate.
+ err = m.Matches([]uint16(nil))
+ AssertNe(nil, err)
+ ExpectTrue(isFatal(err))
+ ExpectThat(err, Error(HasSubstr("type")))
+ ExpectThat(err, Error(HasSubstr("[]uint16")))
+}
+
+func (t *DeepEqualsTest) NilLiteralValue() {
+ m := DeepEquals(nil)
+ ExpectEq("deep equals: <nil>", m.Description())
+
+ var c interface{}
+ var err error
+
+ // Nil literal candidate.
+ c = nil
+ err = m.Matches(c)
+ ExpectEq(nil, err)
+}
+
+func (t *DeepEqualsTest) IntValue() {
+ m := DeepEquals(int(17))
+ ExpectEq("deep equals: 17", m.Description())
+
+ var c interface{}
+ var err error
+
+ // Matching int.
+ c = int(17)
+ err = m.Matches(c)
+ ExpectEq(nil, err)
+
+ // Non-matching int.
+ c = int(18)
+ err = m.Matches(c)
+ ExpectThat(err, Error(Equals("")))
+}
+
+func (t *DeepEqualsTest) ByteSliceValue() {
+ x := []byte{17, 19}
+ m := DeepEquals(x)
+ ExpectEq("deep equals: [17 19]", m.Description())
+
+ var c []byte
+ var err error
+
+ // Matching.
+ c = make([]byte, len(x))
+ AssertEq(len(x), copy(c, x))
+
+ err = m.Matches(c)
+ ExpectEq(nil, err)
+
+ // Nil slice.
+ c = []byte(nil)
+ err = m.Matches(c)
+ ExpectThat(err, Error(Equals("which is nil")))
+
+ // Prefix.
+ AssertGt(len(x), 1)
+ c = make([]byte, len(x)-1)
+ AssertEq(len(x)-1, copy(c, x))
+
+ err = m.Matches(c)
+ ExpectThat(err, Error(Equals("")))
+
+ // Suffix.
+ c = make([]byte, len(x)+1)
+ AssertEq(len(x), copy(c, x))
+
+ err = m.Matches(c)
+ ExpectThat(err, Error(Equals("")))
+}
+
+func (t *DeepEqualsTest) OtherSliceValue() {
+ x := []uint16{17, 19}
+ m := DeepEquals(x)
+ ExpectEq("deep equals: [17 19]", m.Description())
+
+ var c []uint16
+ var err error
+
+ // Matching.
+ c = make([]uint16, len(x))
+ AssertEq(len(x), copy(c, x))
+
+ err = m.Matches(c)
+ ExpectEq(nil, err)
+
+ // Nil slice.
+ c = []uint16(nil)
+ err = m.Matches(c)
+ ExpectThat(err, Error(Equals("which is nil")))
+
+ // Prefix.
+ AssertGt(len(x), 1)
+ c = make([]uint16, len(x)-1)
+ AssertEq(len(x)-1, copy(c, x))
+
+ err = m.Matches(c)
+ ExpectThat(err, Error(Equals("")))
+
+ // Suffix.
+ c = make([]uint16, len(x)+1)
+ AssertEq(len(x), copy(c, x))
+
+ err = m.Matches(c)
+ ExpectThat(err, Error(Equals("")))
+}
+
+func (t *DeepEqualsTest) NilByteSliceValue() {
+ x := []byte(nil)
+ m := DeepEquals(x)
+ ExpectEq("deep equals: <nil slice>", m.Description())
+
+ var c []byte
+ var err error
+
+ // Nil slice.
+ c = []byte(nil)
+ err = m.Matches(c)
+ ExpectEq(nil, err)
+
+ // Non-nil slice.
+ c = []byte{}
+ err = m.Matches(c)
+ ExpectThat(err, Error(Equals("")))
+}
+
+func (t *DeepEqualsTest) NilOtherSliceValue() {
+ x := []uint16(nil)
+ m := DeepEquals(x)
+ ExpectEq("deep equals: <nil slice>", m.Description())
+
+ var c []uint16
+ var err error
+
+ // Nil slice.
+ c = []uint16(nil)
+ err = m.Matches(c)
+ ExpectEq(nil, err)
+
+ // Non-nil slice.
+ c = []uint16{}
+ err = m.Matches(c)
+ ExpectThat(err, Error(Equals("")))
+}
+
+////////////////////////////////////////////////////////////////////////
+// Benchmarks
+////////////////////////////////////////////////////////////////////////
+
+func benchmarkWithSize(b *testing.B, size int) {
+ b.StopTimer()
+ buf := bytes.Repeat([]byte{0x01}, size)
+ bufCopy := make([]byte, size)
+ copy(bufCopy, buf)
+
+ matcher := DeepEquals(buf)
+ b.StartTimer()
+
+ for i := 0; i < b.N; i++ {
+ matcher.Matches(bufCopy)
+ }
+
+ b.SetBytes(int64(size))
+}
+
+func BenchmarkShortByteSlice(b *testing.B) {
+ benchmarkWithSize(b, 256)
+}
+
+func BenchmarkLongByteSlice(b *testing.B) {
+ benchmarkWithSize(b, 1<<24)
+}
diff --git a/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/oglematchers/elements_are.go b/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/oglematchers/elements_are.go
new file mode 100644
index 00000000000..2941847c705
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/oglematchers/elements_are.go
@@ -0,0 +1,91 @@
+// Copyright 2012 Aaron Jacobs. All Rights Reserved.
+// Author: aaronjjacobs@gmail.com (Aaron Jacobs)
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package oglematchers
+
+import (
+ "errors"
+ "fmt"
+ "reflect"
+ "strings"
+)
+
+// Given a list of arguments M, ElementsAre returns a matcher that matches
+// arrays and slices A where all of the following hold:
+//
+// * A is the same length as M.
+//
+// * For each i < len(A) where M[i] is a matcher, A[i] matches M[i].
+//
+// * For each i < len(A) where M[i] is not a matcher, A[i] matches
+// Equals(M[i]).
+//
+func ElementsAre(M ...interface{}) Matcher {
+ // Copy over matchers, or convert to Equals(x) for non-matcher x.
+ subMatchers := make([]Matcher, len(M))
+ for i, x := range M {
+ if matcher, ok := x.(Matcher); ok {
+ subMatchers[i] = matcher
+ continue
+ }
+
+ subMatchers[i] = Equals(x)
+ }
+
+ return &elementsAreMatcher{subMatchers}
+}
+
+type elementsAreMatcher struct {
+ subMatchers []Matcher
+}
+
+func (m *elementsAreMatcher) Description() string {
+ subDescs := make([]string, len(m.subMatchers))
+ for i, sm := range m.subMatchers {
+ subDescs[i] = sm.Description()
+ }
+
+ return fmt.Sprintf("elements are: [%s]", strings.Join(subDescs, ", "))
+}
+
+func (m *elementsAreMatcher) Matches(candidates interface{}) error {
+ // The candidate must be a slice or an array.
+ v := reflect.ValueOf(candidates)
+ if v.Kind() != reflect.Slice && v.Kind() != reflect.Array {
+ return NewFatalError("which is not a slice or array")
+ }
+
+ // The length must be correct.
+ if v.Len() != len(m.subMatchers) {
+ return errors.New(fmt.Sprintf("which is of length %d", v.Len()))
+ }
+
+ // Check each element.
+ for i, subMatcher := range m.subMatchers {
+ c := v.Index(i)
+ if matchErr := subMatcher.Matches(c.Interface()); matchErr != nil {
+ // Return an errors indicating which element doesn't match. If the
+ // matcher error was fatal, make this one fatal too.
+ err := errors.New(fmt.Sprintf("whose element %d doesn't match", i))
+ if _, isFatal := matchErr.(*FatalError); isFatal {
+ err = NewFatalError(err.Error())
+ }
+
+ return err
+ }
+ }
+
+ return nil
+}
diff --git a/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/oglematchers/elements_are_test.go b/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/oglematchers/elements_are_test.go
new file mode 100644
index 00000000000..172584fa140
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/oglematchers/elements_are_test.go
@@ -0,0 +1,208 @@
+// Copyright 2012 Aaron Jacobs. All Rights Reserved.
+// Author: aaronjjacobs@gmail.com (Aaron Jacobs)
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package oglematchers_test
+
+import (
+ . "github.com/smartystreets/assertions/internal/oglematchers"
+ . "github.com/smartystreets/assertions/internal/ogletest"
+)
+
+////////////////////////////////////////////////////////////////////////
+// Helpers
+////////////////////////////////////////////////////////////////////////
+
+type ElementsAreTest struct {
+}
+
+func init() { RegisterTestSuite(&ElementsAreTest{}) }
+
+////////////////////////////////////////////////////////////////////////
+// Tests
+////////////////////////////////////////////////////////////////////////
+
+func (t *ElementsAreTest) EmptySet() {
+ m := ElementsAre()
+ ExpectEq("elements are: []", m.Description())
+
+ var c []interface{}
+ var err error
+
+ // No candidates.
+ c = []interface{}{}
+ err = m.Matches(c)
+ ExpectEq(nil, err)
+
+ // One candidate.
+ c = []interface{}{17}
+ err = m.Matches(c)
+ ExpectThat(err, Error(HasSubstr("length 1")))
+}
+
+func (t *ElementsAreTest) OneMatcher() {
+ m := ElementsAre(LessThan(17))
+ ExpectEq("elements are: [less than 17]", m.Description())
+
+ var c []interface{}
+ var err error
+
+ // No candidates.
+ c = []interface{}{}
+ err = m.Matches(c)
+ ExpectThat(err, Error(HasSubstr("length 0")))
+
+ // Matching candidate.
+ c = []interface{}{16}
+ err = m.Matches(c)
+ ExpectEq(nil, err)
+
+ // Non-matching candidate.
+ c = []interface{}{19}
+ err = m.Matches(c)
+ ExpectNe(nil, err)
+
+ // Two candidates.
+ c = []interface{}{17, 19}
+ err = m.Matches(c)
+ ExpectThat(err, Error(HasSubstr("length 2")))
+}
+
+func (t *ElementsAreTest) OneValue() {
+ m := ElementsAre(17)
+ ExpectEq("elements are: [17]", m.Description())
+
+ var c []interface{}
+ var err error
+
+ // No candidates.
+ c = []interface{}{}
+ err = m.Matches(c)
+ ExpectThat(err, Error(HasSubstr("length 0")))
+
+ // Matching int.
+ c = []interface{}{int(17)}
+ err = m.Matches(c)
+ ExpectEq(nil, err)
+
+ // Matching float.
+ c = []interface{}{float32(17)}
+ err = m.Matches(c)
+ ExpectEq(nil, err)
+
+ // Non-matching candidate.
+ c = []interface{}{19}
+ err = m.Matches(c)
+ ExpectNe(nil, err)
+
+ // Two candidates.
+ c = []interface{}{17, 19}
+ err = m.Matches(c)
+ ExpectThat(err, Error(HasSubstr("length 2")))
+}
+
+func (t *ElementsAreTest) MultipleElements() {
+ m := ElementsAre("taco", LessThan(17))
+ ExpectEq("elements are: [taco, less than 17]", m.Description())
+
+ var c []interface{}
+ var err error
+
+ // One candidate.
+ c = []interface{}{17}
+ err = m.Matches(c)
+ ExpectThat(err, Error(HasSubstr("length 1")))
+
+ // Both matching.
+ c = []interface{}{"taco", 16}
+ err = m.Matches(c)
+ ExpectEq(nil, err)
+
+ // First non-matching.
+ c = []interface{}{"burrito", 16}
+ err = m.Matches(c)
+ ExpectThat(err, Error(Equals("whose element 0 doesn't match")))
+
+ // Second non-matching.
+ c = []interface{}{"taco", 17}
+ err = m.Matches(c)
+ ExpectThat(err, Error(Equals("whose element 1 doesn't match")))
+
+ // Three candidates.
+ c = []interface{}{"taco", 17, 19}
+ err = m.Matches(c)
+ ExpectThat(err, Error(HasSubstr("length 3")))
+}
+
+func (t *ElementsAreTest) ArrayCandidates() {
+ m := ElementsAre("taco", LessThan(17))
+
+ var err error
+
+ // One candidate.
+ err = m.Matches([1]interface{}{"taco"})
+ ExpectThat(err, Error(HasSubstr("length 1")))
+
+ // Both matching.
+ err = m.Matches([2]interface{}{"taco", 16})
+ ExpectEq(nil, err)
+
+ // First non-matching.
+ err = m.Matches([2]interface{}{"burrito", 16})
+ ExpectThat(err, Error(Equals("whose element 0 doesn't match")))
+}
+
+func (t *ElementsAreTest) WrongTypeCandidate() {
+ m := ElementsAre("taco")
+
+ var err error
+
+ // String candidate.
+ err = m.Matches("taco")
+ ExpectTrue(isFatal(err))
+ ExpectThat(err, Error(HasSubstr("array")))
+ ExpectThat(err, Error(HasSubstr("slice")))
+
+ // Map candidate.
+ err = m.Matches(map[string]string{})
+ ExpectTrue(isFatal(err))
+ ExpectThat(err, Error(HasSubstr("array")))
+ ExpectThat(err, Error(HasSubstr("slice")))
+
+ // Nil candidate.
+ err = m.Matches(nil)
+ ExpectTrue(isFatal(err))
+ ExpectThat(err, Error(HasSubstr("array")))
+ ExpectThat(err, Error(HasSubstr("slice")))
+}
+
+func (t *ElementsAreTest) PropagatesFatality() {
+ m := ElementsAre(LessThan(17))
+ ExpectEq("elements are: [less than 17]", m.Description())
+
+ var c []interface{}
+ var err error
+
+ // Non-fatal error.
+ c = []interface{}{19}
+ err = m.Matches(c)
+ AssertNe(nil, err)
+ ExpectFalse(isFatal(err))
+
+ // Fatal error.
+ c = []interface{}{"taco"}
+ err = m.Matches(c)
+ AssertNe(nil, err)
+ ExpectTrue(isFatal(err))
+}
diff --git a/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/oglematchers/equals.go b/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/oglematchers/equals.go
new file mode 100644
index 00000000000..a510707b3c7
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/oglematchers/equals.go
@@ -0,0 +1,541 @@
+// Copyright 2011 Aaron Jacobs. All Rights Reserved.
+// Author: aaronjjacobs@gmail.com (Aaron Jacobs)
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package oglematchers
+
+import (
+ "errors"
+ "fmt"
+ "math"
+ "reflect"
+)
+
+// Equals(x) returns a matcher that matches values v such that v and x are
+// equivalent. This includes the case when the comparison v == x using Go's
+// built-in comparison operator is legal (except for structs, which this
+// matcher does not support), but for convenience the following rules also
+// apply:
+//
+// * Type checking is done based on underlying types rather than actual
+// types, so that e.g. two aliases for string can be compared:
+//
+// type stringAlias1 string
+// type stringAlias2 string
+//
+// a := "taco"
+// b := stringAlias1("taco")
+// c := stringAlias2("taco")
+//
+// ExpectTrue(a == b) // Legal, passes
+// ExpectTrue(b == c) // Illegal, doesn't compile
+//
+// ExpectThat(a, Equals(b)) // Passes
+// ExpectThat(b, Equals(c)) // Passes
+//
+// * Values of numeric type are treated as if they were abstract numbers, and
+// compared accordingly. Therefore Equals(17) will match int(17),
+// int16(17), uint(17), float32(17), complex64(17), and so on.
+//
+// If you want a stricter matcher that contains no such cleverness, see
+// IdenticalTo instead.
+//
+// Arrays are supported by this matcher, but do not participate in the
+// exceptions above. Two arrays compared with this matcher must have identical
+// types, and their element type must itself be comparable according to Go's ==
+// operator.
+func Equals(x interface{}) Matcher {
+ v := reflect.ValueOf(x)
+
+ // This matcher doesn't support structs.
+ if v.Kind() == reflect.Struct {
+ panic(fmt.Sprintf("oglematchers.Equals: unsupported kind %v", v.Kind()))
+ }
+
+ // The == operator is not defined for non-nil slices.
+ if v.Kind() == reflect.Slice && v.Pointer() != uintptr(0) {
+ panic(fmt.Sprintf("oglematchers.Equals: non-nil slice"))
+ }
+
+ return &equalsMatcher{v}
+}
+
+type equalsMatcher struct {
+ expectedValue reflect.Value
+}
+
+////////////////////////////////////////////////////////////////////////
+// Numeric types
+////////////////////////////////////////////////////////////////////////
+
+func isSignedInteger(v reflect.Value) bool {
+ k := v.Kind()
+ return k >= reflect.Int && k <= reflect.Int64
+}
+
+func isUnsignedInteger(v reflect.Value) bool {
+ k := v.Kind()
+ return k >= reflect.Uint && k <= reflect.Uintptr
+}
+
+func isInteger(v reflect.Value) bool {
+ return isSignedInteger(v) || isUnsignedInteger(v)
+}
+
+func isFloat(v reflect.Value) bool {
+ k := v.Kind()
+ return k == reflect.Float32 || k == reflect.Float64
+}
+
+func isComplex(v reflect.Value) bool {
+ k := v.Kind()
+ return k == reflect.Complex64 || k == reflect.Complex128
+}
+
+func checkAgainstInt64(e int64, c reflect.Value) (err error) {
+ err = errors.New("")
+
+ switch {
+ case isSignedInteger(c):
+ if c.Int() == e {
+ err = nil
+ }
+
+ case isUnsignedInteger(c):
+ u := c.Uint()
+ if u <= math.MaxInt64 && int64(u) == e {
+ err = nil
+ }
+
+ // Turn around the various floating point types so that the checkAgainst*
+ // functions for them can deal with precision issues.
+ case isFloat(c), isComplex(c):
+ return Equals(c.Interface()).Matches(e)
+
+ default:
+ err = NewFatalError("which is not numeric")
+ }
+
+ return
+}
+
+func checkAgainstUint64(e uint64, c reflect.Value) (err error) {
+ err = errors.New("")
+
+ switch {
+ case isSignedInteger(c):
+ i := c.Int()
+ if i >= 0 && uint64(i) == e {
+ err = nil
+ }
+
+ case isUnsignedInteger(c):
+ if c.Uint() == e {
+ err = nil
+ }
+
+ // Turn around the various floating point types so that the checkAgainst*
+ // functions for them can deal with precision issues.
+ case isFloat(c), isComplex(c):
+ return Equals(c.Interface()).Matches(e)
+
+ default:
+ err = NewFatalError("which is not numeric")
+ }
+
+ return
+}
+
+func checkAgainstFloat32(e float32, c reflect.Value) (err error) {
+ err = errors.New("")
+
+ switch {
+ case isSignedInteger(c):
+ if float32(c.Int()) == e {
+ err = nil
+ }
+
+ case isUnsignedInteger(c):
+ if float32(c.Uint()) == e {
+ err = nil
+ }
+
+ case isFloat(c):
+ // Compare using float32 to avoid a false sense of precision; otherwise
+ // e.g. Equals(float32(0.1)) won't match float32(0.1).
+ if float32(c.Float()) == e {
+ err = nil
+ }
+
+ case isComplex(c):
+ comp := c.Complex()
+ rl := real(comp)
+ im := imag(comp)
+
+ // Compare using float32 to avoid a false sense of precision; otherwise
+ // e.g. Equals(float32(0.1)) won't match (0.1 + 0i).
+ if im == 0 && float32(rl) == e {
+ err = nil
+ }
+
+ default:
+ err = NewFatalError("which is not numeric")
+ }
+
+ return
+}
+
+func checkAgainstFloat64(e float64, c reflect.Value) (err error) {
+ err = errors.New("")
+
+ ck := c.Kind()
+
+ switch {
+ case isSignedInteger(c):
+ if float64(c.Int()) == e {
+ err = nil
+ }
+
+ case isUnsignedInteger(c):
+ if float64(c.Uint()) == e {
+ err = nil
+ }
+
+ // If the actual value is lower precision, turn the comparison around so we
+ // apply the low-precision rules. Otherwise, e.g. Equals(0.1) may not match
+ // float32(0.1).
+ case ck == reflect.Float32 || ck == reflect.Complex64:
+ return Equals(c.Interface()).Matches(e)
+
+ // Otherwise, compare with double precision.
+ case isFloat(c):
+ if c.Float() == e {
+ err = nil
+ }
+
+ case isComplex(c):
+ comp := c.Complex()
+ rl := real(comp)
+ im := imag(comp)
+
+ if im == 0 && rl == e {
+ err = nil
+ }
+
+ default:
+ err = NewFatalError("which is not numeric")
+ }
+
+ return
+}
+
+func checkAgainstComplex64(e complex64, c reflect.Value) (err error) {
+ err = errors.New("")
+ realPart := real(e)
+ imaginaryPart := imag(e)
+
+ switch {
+ case isInteger(c) || isFloat(c):
+ // If we have no imaginary part, then we should just compare against the
+ // real part. Otherwise, we can't be equal.
+ if imaginaryPart != 0 {
+ return
+ }
+
+ return checkAgainstFloat32(realPart, c)
+
+ case isComplex(c):
+ // Compare using complex64 to avoid a false sense of precision; otherwise
+ // e.g. Equals(0.1 + 0i) won't match float32(0.1).
+ if complex64(c.Complex()) == e {
+ err = nil
+ }
+
+ default:
+ err = NewFatalError("which is not numeric")
+ }
+
+ return
+}
+
+func checkAgainstComplex128(e complex128, c reflect.Value) (err error) {
+ err = errors.New("")
+ realPart := real(e)
+ imaginaryPart := imag(e)
+
+ switch {
+ case isInteger(c) || isFloat(c):
+ // If we have no imaginary part, then we should just compare against the
+ // real part. Otherwise, we can't be equal.
+ if imaginaryPart != 0 {
+ return
+ }
+
+ return checkAgainstFloat64(realPart, c)
+
+ case isComplex(c):
+ if c.Complex() == e {
+ err = nil
+ }
+
+ default:
+ err = NewFatalError("which is not numeric")
+ }
+
+ return
+}
+
+////////////////////////////////////////////////////////////////////////
+// Other types
+////////////////////////////////////////////////////////////////////////
+
+func checkAgainstBool(e bool, c reflect.Value) (err error) {
+ if c.Kind() != reflect.Bool {
+ err = NewFatalError("which is not a bool")
+ return
+ }
+
+ err = errors.New("")
+ if c.Bool() == e {
+ err = nil
+ }
+ return
+}
+
+func checkAgainstChan(e reflect.Value, c reflect.Value) (err error) {
+ // Create a description of e's type, e.g. "chan int".
+ typeStr := fmt.Sprintf("%s %s", e.Type().ChanDir(), e.Type().Elem())
+
+ // Make sure c is a chan of the correct type.
+ if c.Kind() != reflect.Chan ||
+ c.Type().ChanDir() != e.Type().ChanDir() ||
+ c.Type().Elem() != e.Type().Elem() {
+ err = NewFatalError(fmt.Sprintf("which is not a %s", typeStr))
+ return
+ }
+
+ err = errors.New("")
+ if c.Pointer() == e.Pointer() {
+ err = nil
+ }
+ return
+}
+
+func checkAgainstFunc(e reflect.Value, c reflect.Value) (err error) {
+ // Make sure c is a function.
+ if c.Kind() != reflect.Func {
+ err = NewFatalError("which is not a function")
+ return
+ }
+
+ err = errors.New("")
+ if c.Pointer() == e.Pointer() {
+ err = nil
+ }
+ return
+}
+
+func checkAgainstMap(e reflect.Value, c reflect.Value) (err error) {
+ // Make sure c is a map.
+ if c.Kind() != reflect.Map {
+ err = NewFatalError("which is not a map")
+ return
+ }
+
+ err = errors.New("")
+ if c.Pointer() == e.Pointer() {
+ err = nil
+ }
+ return
+}
+
+func checkAgainstPtr(e reflect.Value, c reflect.Value) (err error) {
+ // Create a description of e's type, e.g. "*int".
+ typeStr := fmt.Sprintf("*%v", e.Type().Elem())
+
+ // Make sure c is a pointer of the correct type.
+ if c.Kind() != reflect.Ptr ||
+ c.Type().Elem() != e.Type().Elem() {
+ err = NewFatalError(fmt.Sprintf("which is not a %s", typeStr))
+ return
+ }
+
+ err = errors.New("")
+ if c.Pointer() == e.Pointer() {
+ err = nil
+ }
+ return
+}
+
+func checkAgainstSlice(e reflect.Value, c reflect.Value) (err error) {
+ // Create a description of e's type, e.g. "[]int".
+ typeStr := fmt.Sprintf("[]%v", e.Type().Elem())
+
+ // Make sure c is a slice of the correct type.
+ if c.Kind() != reflect.Slice ||
+ c.Type().Elem() != e.Type().Elem() {
+ err = NewFatalError(fmt.Sprintf("which is not a %s", typeStr))
+ return
+ }
+
+ err = errors.New("")
+ if c.Pointer() == e.Pointer() {
+ err = nil
+ }
+ return
+}
+
+func checkAgainstString(e reflect.Value, c reflect.Value) (err error) {
+ // Make sure c is a string.
+ if c.Kind() != reflect.String {
+ err = NewFatalError("which is not a string")
+ return
+ }
+
+ err = errors.New("")
+ if c.String() == e.String() {
+ err = nil
+ }
+ return
+}
+
+func checkAgainstArray(e reflect.Value, c reflect.Value) (err error) {
+ // Create a description of e's type, e.g. "[2]int".
+ typeStr := fmt.Sprintf("%v", e.Type())
+
+ // Make sure c is the correct type.
+ if c.Type() != e.Type() {
+ err = NewFatalError(fmt.Sprintf("which is not %s", typeStr))
+ return
+ }
+
+ // Check for equality.
+ if e.Interface() != c.Interface() {
+ err = errors.New("")
+ return
+ }
+
+ return
+}
+
+func checkAgainstUnsafePointer(e reflect.Value, c reflect.Value) (err error) {
+ // Make sure c is a pointer.
+ if c.Kind() != reflect.UnsafePointer {
+ err = NewFatalError("which is not a unsafe.Pointer")
+ return
+ }
+
+ err = errors.New("")
+ if c.Pointer() == e.Pointer() {
+ err = nil
+ }
+ return
+}
+
+func checkForNil(c reflect.Value) (err error) {
+ err = errors.New("")
+
+ // Make sure it is legal to call IsNil.
+ switch c.Kind() {
+ case reflect.Invalid:
+ case reflect.Chan:
+ case reflect.Func:
+ case reflect.Interface:
+ case reflect.Map:
+ case reflect.Ptr:
+ case reflect.Slice:
+
+ default:
+ err = NewFatalError("which cannot be compared to nil")
+ return
+ }
+
+ // Ask whether the value is nil. Handle a nil literal (kind Invalid)
+ // specially, since it's not legal to call IsNil there.
+ if c.Kind() == reflect.Invalid || c.IsNil() {
+ err = nil
+ }
+ return
+}
+
+////////////////////////////////////////////////////////////////////////
+// Public implementation
+////////////////////////////////////////////////////////////////////////
+
+func (m *equalsMatcher) Matches(candidate interface{}) error {
+ e := m.expectedValue
+ c := reflect.ValueOf(candidate)
+ ek := e.Kind()
+
+ switch {
+ case ek == reflect.Bool:
+ return checkAgainstBool(e.Bool(), c)
+
+ case isSignedInteger(e):
+ return checkAgainstInt64(e.Int(), c)
+
+ case isUnsignedInteger(e):
+ return checkAgainstUint64(e.Uint(), c)
+
+ case ek == reflect.Float32:
+ return checkAgainstFloat32(float32(e.Float()), c)
+
+ case ek == reflect.Float64:
+ return checkAgainstFloat64(e.Float(), c)
+
+ case ek == reflect.Complex64:
+ return checkAgainstComplex64(complex64(e.Complex()), c)
+
+ case ek == reflect.Complex128:
+ return checkAgainstComplex128(complex128(e.Complex()), c)
+
+ case ek == reflect.Chan:
+ return checkAgainstChan(e, c)
+
+ case ek == reflect.Func:
+ return checkAgainstFunc(e, c)
+
+ case ek == reflect.Map:
+ return checkAgainstMap(e, c)
+
+ case ek == reflect.Ptr:
+ return checkAgainstPtr(e, c)
+
+ case ek == reflect.Slice:
+ return checkAgainstSlice(e, c)
+
+ case ek == reflect.String:
+ return checkAgainstString(e, c)
+
+ case ek == reflect.Array:
+ return checkAgainstArray(e, c)
+
+ case ek == reflect.UnsafePointer:
+ return checkAgainstUnsafePointer(e, c)
+
+ case ek == reflect.Invalid:
+ return checkForNil(c)
+ }
+
+ panic(fmt.Sprintf("equalsMatcher.Matches: unexpected kind: %v", ek))
+}
+
+func (m *equalsMatcher) Description() string {
+ // Special case: handle nil.
+ if !m.expectedValue.IsValid() {
+ return "is nil"
+ }
+
+ return fmt.Sprintf("%v", m.expectedValue.Interface())
+}
diff --git a/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/oglematchers/equals_test.go b/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/oglematchers/equals_test.go
new file mode 100644
index 00000000000..6ac5df27329
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/oglematchers/equals_test.go
@@ -0,0 +1,3864 @@
+// Copyright 2011 Aaron Jacobs. All Rights Reserved.
+// Author: aaronjjacobs@gmail.com (Aaron Jacobs)
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package oglematchers_test
+
+import (
+ "fmt"
+ "math"
+ "unsafe"
+
+ . "github.com/smartystreets/assertions/internal/oglematchers"
+ . "github.com/smartystreets/assertions/internal/ogletest"
+)
+
+var someInt int = -17
+
+////////////////////////////////////////////////////////////////////////
+// Helpers
+////////////////////////////////////////////////////////////////////////
+
+type EqualsTest struct {
+}
+
+func init() { RegisterTestSuite(&EqualsTest{}) }
+
+type equalsTestCase struct {
+ candidate interface{}
+ expectedResult bool
+ shouldBeFatal bool
+ expectedError string
+}
+
+func (t *EqualsTest) checkTestCases(matcher Matcher, cases []equalsTestCase) {
+ for i, c := range cases {
+ err := matcher.Matches(c.candidate)
+ ExpectEq(
+ c.expectedResult,
+ (err == nil),
+ "Result for case %d: %v (Error: %v)", i, c, err)
+
+ if err == nil {
+ continue
+ }
+
+ _, isFatal := err.(*FatalError)
+ ExpectEq(c.shouldBeFatal, isFatal, "Fatality for case %d: %v", i, c)
+
+ ExpectThat(err, Error(Equals(c.expectedError)), "Case %d: %v", i, c)
+ }
+}
+
+////////////////////////////////////////////////////////////////////////
+// nil
+////////////////////////////////////////////////////////////////////////
+
+func (t *EqualsTest) EqualsNil() {
+ matcher := Equals(nil)
+ ExpectEq("is nil", matcher.Description())
+
+ cases := []equalsTestCase{
+ // Legal types
+ equalsTestCase{nil, true, false, ""},
+ equalsTestCase{chan int(nil), true, false, ""},
+ equalsTestCase{(func())(nil), true, false, ""},
+ equalsTestCase{interface{}(nil), true, false, ""},
+ equalsTestCase{map[int]int(nil), true, false, ""},
+ equalsTestCase{(*int)(nil), true, false, ""},
+ equalsTestCase{[]int(nil), true, false, ""},
+
+ equalsTestCase{make(chan int), false, false, ""},
+ equalsTestCase{func() {}, false, false, ""},
+ equalsTestCase{map[int]int{}, false, false, ""},
+ equalsTestCase{&someInt, false, false, ""},
+ equalsTestCase{[]int{}, false, false, ""},
+
+ // Illegal types
+ equalsTestCase{17, false, true, "which cannot be compared to nil"},
+ equalsTestCase{int8(17), false, true, "which cannot be compared to nil"},
+ equalsTestCase{uintptr(17), false, true, "which cannot be compared to nil"},
+ equalsTestCase{[...]int{}, false, true, "which cannot be compared to nil"},
+ equalsTestCase{"taco", false, true, "which cannot be compared to nil"},
+ equalsTestCase{equalsTestCase{}, false, true, "which cannot be compared to nil"},
+ equalsTestCase{unsafe.Pointer(&someInt), false, true, "which cannot be compared to nil"},
+ }
+
+ t.checkTestCases(matcher, cases)
+}
+
+////////////////////////////////////////////////////////////////////////
+// Integer literals
+////////////////////////////////////////////////////////////////////////
+
+func (t *EqualsTest) NegativeIntegerLiteral() {
+ // -2^30
+ matcher := Equals(-1073741824)
+ ExpectEq("-1073741824", matcher.Description())
+
+ cases := []equalsTestCase{
+ // Various types of -1073741824.
+ equalsTestCase{-1073741824, true, false, ""},
+ equalsTestCase{-1073741824.0, true, false, ""},
+ equalsTestCase{-1073741824 + 0i, true, false, ""},
+ equalsTestCase{int(-1073741824), true, false, ""},
+ equalsTestCase{int32(-1073741824), true, false, ""},
+ equalsTestCase{int64(-1073741824), true, false, ""},
+ equalsTestCase{float32(-1073741824), true, false, ""},
+ equalsTestCase{float64(-1073741824), true, false, ""},
+ equalsTestCase{complex64(-1073741824), true, false, ""},
+ equalsTestCase{complex128(-1073741824), true, false, ""},
+ equalsTestCase{interface{}(int(-1073741824)), true, false, ""},
+
+ // Values that would be -1073741824 in two's complement.
+ equalsTestCase{uint((1 << 32) - 1073741824), false, false, ""},
+ equalsTestCase{uint32((1 << 32) - 1073741824), false, false, ""},
+ equalsTestCase{uint64((1 << 64) - 1073741824), false, false, ""},
+ equalsTestCase{uintptr((1 << 64) - 1073741824), false, false, ""},
+
+ // Non-equal values of signed integer type.
+ equalsTestCase{int(-1073741823), false, false, ""},
+ equalsTestCase{int32(-1073741823), false, false, ""},
+ equalsTestCase{int64(-1073741823), false, false, ""},
+
+ // Non-equal values of other numeric types.
+ equalsTestCase{float64(-1073741824.1), false, false, ""},
+ equalsTestCase{float64(-1073741823.9), false, false, ""},
+ equalsTestCase{complex128(-1073741823), false, false, ""},
+ equalsTestCase{complex128(-1073741824 + 2i), false, false, ""},
+
+ // Non-numeric types.
+ equalsTestCase{true, false, true, "which is not numeric"},
+ equalsTestCase{[...]int{}, false, true, "which is not numeric"},
+ equalsTestCase{make(chan int), false, true, "which is not numeric"},
+ equalsTestCase{func() {}, false, true, "which is not numeric"},
+ equalsTestCase{map[int]int{}, false, true, "which is not numeric"},
+ equalsTestCase{&someInt, false, true, "which is not numeric"},
+ equalsTestCase{[]int{}, false, true, "which is not numeric"},
+ equalsTestCase{"taco", false, true, "which is not numeric"},
+ equalsTestCase{equalsTestCase{}, false, true, "which is not numeric"},
+ }
+
+ t.checkTestCases(matcher, cases)
+}
+
+func (t *EqualsTest) PositiveIntegerLiteral() {
+ // 2^30
+ matcher := Equals(1073741824)
+ ExpectEq("1073741824", matcher.Description())
+
+ cases := []equalsTestCase{
+ // Various types of 1073741824.
+ equalsTestCase{1073741824, true, false, ""},
+ equalsTestCase{1073741824.0, true, false, ""},
+ equalsTestCase{1073741824 + 0i, true, false, ""},
+ equalsTestCase{int(1073741824), true, false, ""},
+ equalsTestCase{uint(1073741824), true, false, ""},
+ equalsTestCase{int32(1073741824), true, false, ""},
+ equalsTestCase{int64(1073741824), true, false, ""},
+ equalsTestCase{uint32(1073741824), true, false, ""},
+ equalsTestCase{uint64(1073741824), true, false, ""},
+ equalsTestCase{uintptr(1073741824), true, false, ""},
+ equalsTestCase{float32(1073741824), true, false, ""},
+ equalsTestCase{float64(1073741824), true, false, ""},
+ equalsTestCase{complex64(1073741824), true, false, ""},
+ equalsTestCase{complex128(1073741824), true, false, ""},
+ equalsTestCase{interface{}(int(1073741824)), true, false, ""},
+ equalsTestCase{interface{}(uint(1073741824)), true, false, ""},
+
+ // Non-equal values of numeric type.
+ equalsTestCase{int(1073741823), false, false, ""},
+ equalsTestCase{int32(1073741823), false, false, ""},
+ equalsTestCase{int64(1073741823), false, false, ""},
+ equalsTestCase{float64(1073741824.1), false, false, ""},
+ equalsTestCase{float64(1073741823.9), false, false, ""},
+ equalsTestCase{complex128(1073741823), false, false, ""},
+ equalsTestCase{complex128(1073741824 + 2i), false, false, ""},
+
+ // Non-numeric types.
+ equalsTestCase{true, false, true, "which is not numeric"},
+ equalsTestCase{[...]int{}, false, true, "which is not numeric"},
+ equalsTestCase{make(chan int), false, true, "which is not numeric"},
+ equalsTestCase{func() {}, false, true, "which is not numeric"},
+ equalsTestCase{map[int]int{}, false, true, "which is not numeric"},
+ equalsTestCase{&someInt, false, true, "which is not numeric"},
+ equalsTestCase{[]int{}, false, true, "which is not numeric"},
+ equalsTestCase{"taco", false, true, "which is not numeric"},
+ equalsTestCase{equalsTestCase{}, false, true, "which is not numeric"},
+ }
+
+ t.checkTestCases(matcher, cases)
+}
+
+////////////////////////////////////////////////////////////////////////
+// Floating point literals
+////////////////////////////////////////////////////////////////////////
+
+func (t *EqualsTest) NegativeIntegralFloatingPointLiteral() {
+ // -2^30
+ matcher := Equals(-1073741824.0)
+ ExpectEq("-1.073741824e+09", matcher.Description())
+
+ cases := []equalsTestCase{
+ // Various types of -1073741824.
+ equalsTestCase{-1073741824, true, false, ""},
+ equalsTestCase{-1073741824.0, true, false, ""},
+ equalsTestCase{-1073741824 + 0i, true, false, ""},
+ equalsTestCase{int(-1073741824), true, false, ""},
+ equalsTestCase{int32(-1073741824), true, false, ""},
+ equalsTestCase{int64(-1073741824), true, false, ""},
+ equalsTestCase{float32(-1073741824), true, false, ""},
+ equalsTestCase{float64(-1073741824), true, false, ""},
+ equalsTestCase{complex64(-1073741824), true, false, ""},
+ equalsTestCase{complex128(-1073741824), true, false, ""},
+ equalsTestCase{interface{}(int(-1073741824)), true, false, ""},
+ equalsTestCase{interface{}(float64(-1073741824)), true, false, ""},
+
+ // Values that would be -1073741824 in two's complement.
+ equalsTestCase{uint((1 << 32) - 1073741824), false, false, ""},
+ equalsTestCase{uint32((1 << 32) - 1073741824), false, false, ""},
+ equalsTestCase{uint64((1 << 64) - 1073741824), false, false, ""},
+ equalsTestCase{uintptr((1 << 64) - 1073741824), false, false, ""},
+
+ // Non-equal values of signed integer type.
+ equalsTestCase{int(-1073741823), false, false, ""},
+ equalsTestCase{int32(-1073741823), false, false, ""},
+ equalsTestCase{int64(-1073741823), false, false, ""},
+
+ // Non-equal values of other numeric types.
+ equalsTestCase{float64(-1073741824.1), false, false, ""},
+ equalsTestCase{float64(-1073741823.9), false, false, ""},
+ equalsTestCase{complex128(-1073741823), false, false, ""},
+ equalsTestCase{complex128(-1073741824 + 2i), false, false, ""},
+
+ // Non-numeric types.
+ equalsTestCase{true, false, true, "which is not numeric"},
+ equalsTestCase{[...]int{}, false, true, "which is not numeric"},
+ equalsTestCase{make(chan int), false, true, "which is not numeric"},
+ equalsTestCase{func() {}, false, true, "which is not numeric"},
+ equalsTestCase{map[int]int{}, false, true, "which is not numeric"},
+ equalsTestCase{&someInt, false, true, "which is not numeric"},
+ equalsTestCase{[]int{}, false, true, "which is not numeric"},
+ equalsTestCase{"taco", false, true, "which is not numeric"},
+ equalsTestCase{equalsTestCase{}, false, true, "which is not numeric"},
+ }
+
+ t.checkTestCases(matcher, cases)
+}
+
+func (t *EqualsTest) PositiveIntegralFloatingPointLiteral() {
+ // 2^30
+ matcher := Equals(1073741824.0)
+ ExpectEq("1.073741824e+09", matcher.Description())
+
+ cases := []equalsTestCase{
+ // Various types of 1073741824.
+ equalsTestCase{1073741824, true, false, ""},
+ equalsTestCase{1073741824.0, true, false, ""},
+ equalsTestCase{1073741824 + 0i, true, false, ""},
+ equalsTestCase{int(1073741824), true, false, ""},
+ equalsTestCase{int32(1073741824), true, false, ""},
+ equalsTestCase{int64(1073741824), true, false, ""},
+ equalsTestCase{uint(1073741824), true, false, ""},
+ equalsTestCase{uint32(1073741824), true, false, ""},
+ equalsTestCase{uint64(1073741824), true, false, ""},
+ equalsTestCase{float32(1073741824), true, false, ""},
+ equalsTestCase{float64(1073741824), true, false, ""},
+ equalsTestCase{complex64(1073741824), true, false, ""},
+ equalsTestCase{complex128(1073741824), true, false, ""},
+ equalsTestCase{interface{}(int(1073741824)), true, false, ""},
+ equalsTestCase{interface{}(float64(1073741824)), true, false, ""},
+
+ // Values that would be 1073741824 in two's complement.
+ equalsTestCase{uint((1 << 32) - 1073741824), false, false, ""},
+ equalsTestCase{uint32((1 << 32) - 1073741824), false, false, ""},
+ equalsTestCase{uint64((1 << 64) - 1073741824), false, false, ""},
+ equalsTestCase{uintptr((1 << 64) - 1073741824), false, false, ""},
+
+ // Non-equal values of numeric type.
+ equalsTestCase{int(1073741823), false, false, ""},
+ equalsTestCase{int32(1073741823), false, false, ""},
+ equalsTestCase{int64(1073741823), false, false, ""},
+ equalsTestCase{uint(1073741823), false, false, ""},
+ equalsTestCase{uint32(1073741823), false, false, ""},
+ equalsTestCase{uint64(1073741823), false, false, ""},
+ equalsTestCase{float64(1073741824.1), false, false, ""},
+ equalsTestCase{float64(1073741823.9), false, false, ""},
+ equalsTestCase{complex128(1073741823), false, false, ""},
+ equalsTestCase{complex128(1073741824 + 2i), false, false, ""},
+
+ // Non-numeric types.
+ equalsTestCase{true, false, true, "which is not numeric"},
+ equalsTestCase{[...]int{}, false, true, "which is not numeric"},
+ equalsTestCase{make(chan int), false, true, "which is not numeric"},
+ equalsTestCase{func() {}, false, true, "which is not numeric"},
+ equalsTestCase{map[int]int{}, false, true, "which is not numeric"},
+ equalsTestCase{&someInt, false, true, "which is not numeric"},
+ equalsTestCase{[]int{}, false, true, "which is not numeric"},
+ equalsTestCase{"taco", false, true, "which is not numeric"},
+ equalsTestCase{equalsTestCase{}, false, true, "which is not numeric"},
+ }
+
+ t.checkTestCases(matcher, cases)
+}
+
+func (t *EqualsTest) NonIntegralFloatingPointLiteral() {
+ matcher := Equals(17.1)
+ ExpectEq("17.1", matcher.Description())
+
+ cases := []equalsTestCase{
+ // Various types of 17.1.
+ equalsTestCase{17.1, true, false, ""},
+ equalsTestCase{17.1, true, false, ""},
+ equalsTestCase{17.1 + 0i, true, false, ""},
+ equalsTestCase{float32(17.1), true, false, ""},
+ equalsTestCase{float64(17.1), true, false, ""},
+ equalsTestCase{complex64(17.1), true, false, ""},
+ equalsTestCase{complex128(17.1), true, false, ""},
+
+ // Non-equal values of numeric type.
+ equalsTestCase{17, false, false, ""},
+ equalsTestCase{17.2, false, false, ""},
+ equalsTestCase{18, false, false, ""},
+ equalsTestCase{int(17), false, false, ""},
+ equalsTestCase{int(18), false, false, ""},
+ equalsTestCase{int32(17), false, false, ""},
+ equalsTestCase{int64(17), false, false, ""},
+ equalsTestCase{uint(17), false, false, ""},
+ equalsTestCase{uint32(17), false, false, ""},
+ equalsTestCase{uint64(17), false, false, ""},
+ equalsTestCase{uintptr(17), false, false, ""},
+ equalsTestCase{complex128(17.1 + 2i), false, false, ""},
+
+ // Non-numeric types.
+ equalsTestCase{true, false, true, "which is not numeric"},
+ equalsTestCase{[...]int{}, false, true, "which is not numeric"},
+ equalsTestCase{make(chan int), false, true, "which is not numeric"},
+ equalsTestCase{func() {}, false, true, "which is not numeric"},
+ equalsTestCase{map[int]int{}, false, true, "which is not numeric"},
+ equalsTestCase{&someInt, false, true, "which is not numeric"},
+ equalsTestCase{[]int{}, false, true, "which is not numeric"},
+ equalsTestCase{"taco", false, true, "which is not numeric"},
+ equalsTestCase{equalsTestCase{}, false, true, "which is not numeric"},
+ }
+
+ t.checkTestCases(matcher, cases)
+}
+
+////////////////////////////////////////////////////////////////////////
+// bool
+////////////////////////////////////////////////////////////////////////
+
+func (t *EqualsTest) False() {
+ matcher := Equals(false)
+ ExpectEq("false", matcher.Description())
+
+ cases := []equalsTestCase{
+ // bools
+ equalsTestCase{false, true, false, ""},
+ equalsTestCase{bool(false), true, false, ""},
+
+ equalsTestCase{true, false, false, ""},
+ equalsTestCase{bool(true), false, false, ""},
+
+ // Other types.
+ equalsTestCase{int(0), false, true, "which is not a bool"},
+ equalsTestCase{int8(0), false, true, "which is not a bool"},
+ equalsTestCase{int16(0), false, true, "which is not a bool"},
+ equalsTestCase{int32(0), false, true, "which is not a bool"},
+ equalsTestCase{int64(0), false, true, "which is not a bool"},
+ equalsTestCase{uint(0), false, true, "which is not a bool"},
+ equalsTestCase{uint8(0), false, true, "which is not a bool"},
+ equalsTestCase{uint16(0), false, true, "which is not a bool"},
+ equalsTestCase{uint32(0), false, true, "which is not a bool"},
+ equalsTestCase{uint64(0), false, true, "which is not a bool"},
+ equalsTestCase{uintptr(0), false, true, "which is not a bool"},
+ equalsTestCase{[...]int{}, false, true, "which is not a bool"},
+ equalsTestCase{make(chan int), false, true, "which is not a bool"},
+ equalsTestCase{func() {}, false, true, "which is not a bool"},
+ equalsTestCase{map[int]int{}, false, true, "which is not a bool"},
+ equalsTestCase{&someInt, false, true, "which is not a bool"},
+ equalsTestCase{[]int{}, false, true, "which is not a bool"},
+ equalsTestCase{"taco", false, true, "which is not a bool"},
+ equalsTestCase{equalsTestCase{}, false, true, "which is not a bool"},
+ }
+
+ t.checkTestCases(matcher, cases)
+}
+
+func (t *EqualsTest) True() {
+ matcher := Equals(true)
+ ExpectEq("true", matcher.Description())
+
+ cases := []equalsTestCase{
+ // bools
+ equalsTestCase{true, true, false, ""},
+ equalsTestCase{bool(true), true, false, ""},
+
+ equalsTestCase{false, false, false, ""},
+ equalsTestCase{bool(false), false, false, ""},
+
+ // Other types.
+ equalsTestCase{int(1), false, true, "which is not a bool"},
+ equalsTestCase{int8(1), false, true, "which is not a bool"},
+ equalsTestCase{int16(1), false, true, "which is not a bool"},
+ equalsTestCase{int32(1), false, true, "which is not a bool"},
+ equalsTestCase{int64(1), false, true, "which is not a bool"},
+ equalsTestCase{uint(1), false, true, "which is not a bool"},
+ equalsTestCase{uint8(1), false, true, "which is not a bool"},
+ equalsTestCase{uint16(1), false, true, "which is not a bool"},
+ equalsTestCase{uint32(1), false, true, "which is not a bool"},
+ equalsTestCase{uint64(1), false, true, "which is not a bool"},
+ equalsTestCase{uintptr(1), false, true, "which is not a bool"},
+ equalsTestCase{[...]int{}, false, true, "which is not a bool"},
+ equalsTestCase{make(chan int), false, true, "which is not a bool"},
+ equalsTestCase{func() {}, false, true, "which is not a bool"},
+ equalsTestCase{map[int]int{}, false, true, "which is not a bool"},
+ equalsTestCase{&someInt, false, true, "which is not a bool"},
+ equalsTestCase{[]int{}, false, true, "which is not a bool"},
+ equalsTestCase{"taco", false, true, "which is not a bool"},
+ equalsTestCase{equalsTestCase{}, false, true, "which is not a bool"},
+ }
+
+ t.checkTestCases(matcher, cases)
+}
+
+////////////////////////////////////////////////////////////////////////
+// int
+////////////////////////////////////////////////////////////////////////
+
+func (t *EqualsTest) NegativeInt() {
+ // -2^30
+ matcher := Equals(int(-1073741824))
+ ExpectEq("-1073741824", matcher.Description())
+
+ cases := []equalsTestCase{
+ // Various types of -1073741824.
+ equalsTestCase{-1073741824, true, false, ""},
+ equalsTestCase{-1073741824.0, true, false, ""},
+ equalsTestCase{-1073741824 + 0i, true, false, ""},
+ equalsTestCase{int(-1073741824), true, false, ""},
+ equalsTestCase{int32(-1073741824), true, false, ""},
+ equalsTestCase{int64(-1073741824), true, false, ""},
+ equalsTestCase{float32(-1073741824), true, false, ""},
+ equalsTestCase{float64(-1073741824), true, false, ""},
+ equalsTestCase{complex64(-1073741824), true, false, ""},
+ equalsTestCase{complex128(-1073741824), true, false, ""},
+ equalsTestCase{interface{}(int(-1073741824)), true, false, ""},
+
+ // Values that would be -1073741824 in two's complement.
+ equalsTestCase{uint((1 << 32) - 1073741824), false, false, ""},
+ equalsTestCase{uint32((1 << 32) - 1073741824), false, false, ""},
+ equalsTestCase{uint64((1 << 64) - 1073741824), false, false, ""},
+ equalsTestCase{uintptr((1 << 64) - 1073741824), false, false, ""},
+
+ // Non-equal values of signed integer type.
+ equalsTestCase{int(-1073741823), false, false, ""},
+ equalsTestCase{int32(-1073741823), false, false, ""},
+ equalsTestCase{int64(-1073741823), false, false, ""},
+
+ // Non-equal values of other numeric types.
+ equalsTestCase{float64(-1073741824.1), false, false, ""},
+ equalsTestCase{float64(-1073741823.9), false, false, ""},
+ equalsTestCase{complex128(-1073741823), false, false, ""},
+ equalsTestCase{complex128(-1073741824 + 2i), false, false, ""},
+
+ // Non-numeric types.
+ equalsTestCase{true, false, true, "which is not numeric"},
+ equalsTestCase{[...]int{}, false, true, "which is not numeric"},
+ equalsTestCase{make(chan int), false, true, "which is not numeric"},
+ equalsTestCase{func() {}, false, true, "which is not numeric"},
+ equalsTestCase{map[int]int{}, false, true, "which is not numeric"},
+ equalsTestCase{&someInt, false, true, "which is not numeric"},
+ equalsTestCase{[]int{}, false, true, "which is not numeric"},
+ equalsTestCase{"taco", false, true, "which is not numeric"},
+ equalsTestCase{equalsTestCase{}, false, true, "which is not numeric"},
+ }
+
+ t.checkTestCases(matcher, cases)
+}
+
+func (t *EqualsTest) PositiveInt() {
+ // 2^30
+ matcher := Equals(int(1073741824))
+ ExpectEq("1073741824", matcher.Description())
+
+ cases := []equalsTestCase{
+ // Various types of 1073741824.
+ equalsTestCase{1073741824, true, false, ""},
+ equalsTestCase{1073741824.0, true, false, ""},
+ equalsTestCase{1073741824 + 0i, true, false, ""},
+ equalsTestCase{int(1073741824), true, false, ""},
+ equalsTestCase{uint(1073741824), true, false, ""},
+ equalsTestCase{int32(1073741824), true, false, ""},
+ equalsTestCase{int64(1073741824), true, false, ""},
+ equalsTestCase{uint32(1073741824), true, false, ""},
+ equalsTestCase{uint64(1073741824), true, false, ""},
+ equalsTestCase{uintptr(1073741824), true, false, ""},
+ equalsTestCase{float32(1073741824), true, false, ""},
+ equalsTestCase{float64(1073741824), true, false, ""},
+ equalsTestCase{complex64(1073741824), true, false, ""},
+ equalsTestCase{complex128(1073741824), true, false, ""},
+ equalsTestCase{interface{}(int(1073741824)), true, false, ""},
+ equalsTestCase{interface{}(uint(1073741824)), true, false, ""},
+
+ // Non-equal values of numeric type.
+ equalsTestCase{int(1073741823), false, false, ""},
+ equalsTestCase{int32(1073741823), false, false, ""},
+ equalsTestCase{int64(1073741823), false, false, ""},
+ equalsTestCase{float64(1073741824.1), false, false, ""},
+ equalsTestCase{float64(1073741823.9), false, false, ""},
+ equalsTestCase{complex128(1073741823), false, false, ""},
+ equalsTestCase{complex128(1073741824 + 2i), false, false, ""},
+
+ // Non-numeric types.
+ equalsTestCase{true, false, true, "which is not numeric"},
+ equalsTestCase{[...]int{}, false, true, "which is not numeric"},
+ equalsTestCase{make(chan int), false, true, "which is not numeric"},
+ equalsTestCase{func() {}, false, true, "which is not numeric"},
+ equalsTestCase{map[int]int{}, false, true, "which is not numeric"},
+ equalsTestCase{&someInt, false, true, "which is not numeric"},
+ equalsTestCase{[]int{}, false, true, "which is not numeric"},
+ equalsTestCase{"taco", false, true, "which is not numeric"},
+ equalsTestCase{equalsTestCase{}, false, true, "which is not numeric"},
+ }
+
+ t.checkTestCases(matcher, cases)
+}
+
+////////////////////////////////////////////////////////////////////////
+// int8
+////////////////////////////////////////////////////////////////////////
+
+func (t *EqualsTest) NegativeInt8() {
+ matcher := Equals(int8(-17))
+ ExpectEq("-17", matcher.Description())
+
+ cases := []equalsTestCase{
+ // Various types of -17.
+ equalsTestCase{-17, true, false, ""},
+ equalsTestCase{-17.0, true, false, ""},
+ equalsTestCase{-17 + 0i, true, false, ""},
+ equalsTestCase{int(-17), true, false, ""},
+ equalsTestCase{int8(-17), true, false, ""},
+ equalsTestCase{int16(-17), true, false, ""},
+ equalsTestCase{int32(-17), true, false, ""},
+ equalsTestCase{int64(-17), true, false, ""},
+ equalsTestCase{float32(-17), true, false, ""},
+ equalsTestCase{float64(-17), true, false, ""},
+ equalsTestCase{complex64(-17), true, false, ""},
+ equalsTestCase{complex128(-17), true, false, ""},
+ equalsTestCase{interface{}(int(-17)), true, false, ""},
+
+ // Values that would be -17 in two's complement.
+ equalsTestCase{uint((1 << 32) - 17), false, false, ""},
+ equalsTestCase{uint8((1 << 8) - 17), false, false, ""},
+ equalsTestCase{uint16((1 << 16) - 17), false, false, ""},
+ equalsTestCase{uint32((1 << 32) - 17), false, false, ""},
+ equalsTestCase{uint64((1 << 64) - 17), false, false, ""},
+ equalsTestCase{uintptr((1 << 64) - 17), false, false, ""},
+
+ // Non-equal values of signed integer type.
+ equalsTestCase{int(-16), false, false, ""},
+ equalsTestCase{int8(-16), false, false, ""},
+ equalsTestCase{int16(-16), false, false, ""},
+ equalsTestCase{int32(-16), false, false, ""},
+ equalsTestCase{int64(-16), false, false, ""},
+
+ // Non-equal values of other numeric types.
+ equalsTestCase{float32(-17.1), false, false, ""},
+ equalsTestCase{float32(-16.9), false, false, ""},
+ equalsTestCase{complex64(-16), false, false, ""},
+ equalsTestCase{complex64(-17 + 2i), false, false, ""},
+
+ // Non-numeric types.
+ equalsTestCase{true, false, true, "which is not numeric"},
+ equalsTestCase{[...]int{-17}, false, true, "which is not numeric"},
+ equalsTestCase{make(chan int), false, true, "which is not numeric"},
+ equalsTestCase{func() {}, false, true, "which is not numeric"},
+ equalsTestCase{map[int]int{}, false, true, "which is not numeric"},
+ equalsTestCase{&someInt, false, true, "which is not numeric"},
+ equalsTestCase{[]int{-17}, false, true, "which is not numeric"},
+ equalsTestCase{"-17", false, true, "which is not numeric"},
+ equalsTestCase{equalsTestCase{}, false, true, "which is not numeric"},
+ }
+
+ t.checkTestCases(matcher, cases)
+}
+
+func (t *EqualsTest) ZeroInt8() {
+ matcher := Equals(int8(0))
+ ExpectEq("0", matcher.Description())
+
+ cases := []equalsTestCase{
+ // Various types of 0.
+ equalsTestCase{0, true, false, ""},
+ equalsTestCase{0.0, true, false, ""},
+ equalsTestCase{0 + 0i, true, false, ""},
+ equalsTestCase{int(0), true, false, ""},
+ equalsTestCase{int8(0), true, false, ""},
+ equalsTestCase{int16(0), true, false, ""},
+ equalsTestCase{int32(0), true, false, ""},
+ equalsTestCase{int64(0), true, false, ""},
+ equalsTestCase{float32(0), true, false, ""},
+ equalsTestCase{float64(0), true, false, ""},
+ equalsTestCase{complex64(0), true, false, ""},
+ equalsTestCase{complex128(0), true, false, ""},
+ equalsTestCase{interface{}(int(0)), true, false, ""},
+ equalsTestCase{uint(0), true, false, ""},
+ equalsTestCase{uint8(0), true, false, ""},
+ equalsTestCase{uint16(0), true, false, ""},
+ equalsTestCase{uint32(0), true, false, ""},
+ equalsTestCase{uint64(0), true, false, ""},
+ equalsTestCase{uintptr(0), true, false, ""},
+
+ // Non-equal values of numeric type.
+ equalsTestCase{int(1), false, false, ""},
+ equalsTestCase{int8(1), false, false, ""},
+ equalsTestCase{int16(1), false, false, ""},
+ equalsTestCase{int32(1), false, false, ""},
+ equalsTestCase{int64(1), false, false, ""},
+ equalsTestCase{float32(-0.1), false, false, ""},
+ equalsTestCase{float32(0.1), false, false, ""},
+ equalsTestCase{complex64(1), false, false, ""},
+ equalsTestCase{complex64(0 + 2i), false, false, ""},
+
+ // Non-numeric types.
+ equalsTestCase{true, false, true, "which is not numeric"},
+ equalsTestCase{[...]int{0}, false, true, "which is not numeric"},
+ equalsTestCase{make(chan int), false, true, "which is not numeric"},
+ equalsTestCase{func() {}, false, true, "which is not numeric"},
+ equalsTestCase{map[int]int{}, false, true, "which is not numeric"},
+ equalsTestCase{&someInt, false, true, "which is not numeric"},
+ equalsTestCase{[]int{0}, false, true, "which is not numeric"},
+ equalsTestCase{"0", false, true, "which is not numeric"},
+ equalsTestCase{equalsTestCase{}, false, true, "which is not numeric"},
+ }
+
+ t.checkTestCases(matcher, cases)
+}
+
+func (t *EqualsTest) PositiveInt8() {
+ matcher := Equals(int8(17))
+ ExpectEq("17", matcher.Description())
+
+ cases := []equalsTestCase{
+ // Various types of 17.
+ equalsTestCase{17, true, false, ""},
+ equalsTestCase{17.0, true, false, ""},
+ equalsTestCase{17 + 0i, true, false, ""},
+ equalsTestCase{int(17), true, false, ""},
+ equalsTestCase{int8(17), true, false, ""},
+ equalsTestCase{int16(17), true, false, ""},
+ equalsTestCase{int32(17), true, false, ""},
+ equalsTestCase{int64(17), true, false, ""},
+ equalsTestCase{float32(17), true, false, ""},
+ equalsTestCase{float64(17), true, false, ""},
+ equalsTestCase{complex64(17), true, false, ""},
+ equalsTestCase{complex128(17), true, false, ""},
+ equalsTestCase{interface{}(int(17)), true, false, ""},
+ equalsTestCase{uint(17), true, false, ""},
+ equalsTestCase{uint8(17), true, false, ""},
+ equalsTestCase{uint16(17), true, false, ""},
+ equalsTestCase{uint32(17), true, false, ""},
+ equalsTestCase{uint64(17), true, false, ""},
+ equalsTestCase{uintptr(17), true, false, ""},
+
+ // Non-equal values of numeric type.
+ equalsTestCase{int(16), false, false, ""},
+ equalsTestCase{int8(16), false, false, ""},
+ equalsTestCase{int16(16), false, false, ""},
+ equalsTestCase{int32(16), false, false, ""},
+ equalsTestCase{int64(16), false, false, ""},
+ equalsTestCase{float32(16.9), false, false, ""},
+ equalsTestCase{float32(17.1), false, false, ""},
+ equalsTestCase{complex64(16), false, false, ""},
+ equalsTestCase{complex64(17 + 2i), false, false, ""},
+
+ // Non-numeric types.
+ equalsTestCase{true, false, true, "which is not numeric"},
+ equalsTestCase{[...]int{17}, false, true, "which is not numeric"},
+ equalsTestCase{make(chan int), false, true, "which is not numeric"},
+ equalsTestCase{func() {}, false, true, "which is not numeric"},
+ equalsTestCase{map[int]int{}, false, true, "which is not numeric"},
+ equalsTestCase{&someInt, false, true, "which is not numeric"},
+ equalsTestCase{[]int{17}, false, true, "which is not numeric"},
+ equalsTestCase{"17", false, true, "which is not numeric"},
+ equalsTestCase{equalsTestCase{}, false, true, "which is not numeric"},
+ }
+
+ t.checkTestCases(matcher, cases)
+}
+
+////////////////////////////////////////////////////////////////////////
+// int16
+////////////////////////////////////////////////////////////////////////
+
+func (t *EqualsTest) NegativeInt16() {
+ matcher := Equals(int16(-32766))
+ ExpectEq("-32766", matcher.Description())
+
+ cases := []equalsTestCase{
+ // Various types of -32766.
+ equalsTestCase{-32766, true, false, ""},
+ equalsTestCase{-32766.0, true, false, ""},
+ equalsTestCase{-32766 + 0i, true, false, ""},
+ equalsTestCase{int(-32766), true, false, ""},
+ equalsTestCase{int16(-32766), true, false, ""},
+ equalsTestCase{int32(-32766), true, false, ""},
+ equalsTestCase{int64(-32766), true, false, ""},
+ equalsTestCase{float32(-32766), true, false, ""},
+ equalsTestCase{float64(-32766), true, false, ""},
+ equalsTestCase{complex64(-32766), true, false, ""},
+ equalsTestCase{complex128(-32766), true, false, ""},
+ equalsTestCase{interface{}(int(-32766)), true, false, ""},
+
+ // Values that would be -32766 in two's complement.
+ equalsTestCase{uint((1 << 32) - 32766), false, false, ""},
+ equalsTestCase{uint16((1 << 16) - 32766), false, false, ""},
+ equalsTestCase{uint32((1 << 32) - 32766), false, false, ""},
+ equalsTestCase{uint64((1 << 64) - 32766), false, false, ""},
+ equalsTestCase{uintptr((1 << 64) - 32766), false, false, ""},
+
+ // Non-equal values of signed integer type.
+ equalsTestCase{int(-16), false, false, ""},
+ equalsTestCase{int8(-16), false, false, ""},
+ equalsTestCase{int16(-16), false, false, ""},
+ equalsTestCase{int32(-16), false, false, ""},
+ equalsTestCase{int64(-16), false, false, ""},
+
+ // Non-equal values of other numeric types.
+ equalsTestCase{float32(-32766.1), false, false, ""},
+ equalsTestCase{float32(-32765.9), false, false, ""},
+ equalsTestCase{complex64(-32766.1), false, false, ""},
+ equalsTestCase{complex64(-32766 + 2i), false, false, ""},
+
+ // Non-numeric types.
+ equalsTestCase{true, false, true, "which is not numeric"},
+ equalsTestCase{[...]int{-32766}, false, true, "which is not numeric"},
+ equalsTestCase{make(chan int), false, true, "which is not numeric"},
+ equalsTestCase{func() {}, false, true, "which is not numeric"},
+ equalsTestCase{map[int]int{}, false, true, "which is not numeric"},
+ equalsTestCase{&someInt, false, true, "which is not numeric"},
+ equalsTestCase{[]int{-32766}, false, true, "which is not numeric"},
+ equalsTestCase{"-32766", false, true, "which is not numeric"},
+ equalsTestCase{equalsTestCase{}, false, true, "which is not numeric"},
+ }
+
+ t.checkTestCases(matcher, cases)
+}
+
+func (t *EqualsTest) ZeroInt16() {
+ matcher := Equals(int16(0))
+ ExpectEq("0", matcher.Description())
+
+ cases := []equalsTestCase{
+ // Various types of 0.
+ equalsTestCase{0, true, false, ""},
+ equalsTestCase{0.0, true, false, ""},
+ equalsTestCase{0 + 0i, true, false, ""},
+ equalsTestCase{int(0), true, false, ""},
+ equalsTestCase{int8(0), true, false, ""},
+ equalsTestCase{int16(0), true, false, ""},
+ equalsTestCase{int32(0), true, false, ""},
+ equalsTestCase{int64(0), true, false, ""},
+ equalsTestCase{float32(0), true, false, ""},
+ equalsTestCase{float64(0), true, false, ""},
+ equalsTestCase{complex64(0), true, false, ""},
+ equalsTestCase{complex128(0), true, false, ""},
+ equalsTestCase{interface{}(int(0)), true, false, ""},
+ equalsTestCase{uint(0), true, false, ""},
+ equalsTestCase{uint8(0), true, false, ""},
+ equalsTestCase{uint16(0), true, false, ""},
+ equalsTestCase{uint32(0), true, false, ""},
+ equalsTestCase{uint64(0), true, false, ""},
+ equalsTestCase{uintptr(0), true, false, ""},
+
+ // Non-equal values of numeric type.
+ equalsTestCase{int(1), false, false, ""},
+ equalsTestCase{int8(1), false, false, ""},
+ equalsTestCase{int16(1), false, false, ""},
+ equalsTestCase{int32(1), false, false, ""},
+ equalsTestCase{int64(1), false, false, ""},
+ equalsTestCase{float32(-0.1), false, false, ""},
+ equalsTestCase{float32(0.1), false, false, ""},
+ equalsTestCase{complex64(1), false, false, ""},
+ equalsTestCase{complex64(0 + 2i), false, false, ""},
+
+ // Non-numeric types.
+ equalsTestCase{true, false, true, "which is not numeric"},
+ equalsTestCase{[...]int{0}, false, true, "which is not numeric"},
+ equalsTestCase{make(chan int), false, true, "which is not numeric"},
+ equalsTestCase{func() {}, false, true, "which is not numeric"},
+ equalsTestCase{map[int]int{}, false, true, "which is not numeric"},
+ equalsTestCase{&someInt, false, true, "which is not numeric"},
+ equalsTestCase{[]int{0}, false, true, "which is not numeric"},
+ equalsTestCase{"0", false, true, "which is not numeric"},
+ equalsTestCase{equalsTestCase{}, false, true, "which is not numeric"},
+ }
+
+ t.checkTestCases(matcher, cases)
+}
+
+func (t *EqualsTest) PositiveInt16() {
+ matcher := Equals(int16(32765))
+ ExpectEq("32765", matcher.Description())
+
+ cases := []equalsTestCase{
+ // Various types of 32765.
+ equalsTestCase{32765, true, false, ""},
+ equalsTestCase{32765.0, true, false, ""},
+ equalsTestCase{32765 + 0i, true, false, ""},
+ equalsTestCase{int(32765), true, false, ""},
+ equalsTestCase{int16(32765), true, false, ""},
+ equalsTestCase{int32(32765), true, false, ""},
+ equalsTestCase{int64(32765), true, false, ""},
+ equalsTestCase{float32(32765), true, false, ""},
+ equalsTestCase{float64(32765), true, false, ""},
+ equalsTestCase{complex64(32765), true, false, ""},
+ equalsTestCase{complex128(32765), true, false, ""},
+ equalsTestCase{interface{}(int(32765)), true, false, ""},
+ equalsTestCase{uint(32765), true, false, ""},
+ equalsTestCase{uint16(32765), true, false, ""},
+ equalsTestCase{uint32(32765), true, false, ""},
+ equalsTestCase{uint64(32765), true, false, ""},
+ equalsTestCase{uintptr(32765), true, false, ""},
+
+ // Non-equal values of numeric type.
+ equalsTestCase{int(32764), false, false, ""},
+ equalsTestCase{int16(32764), false, false, ""},
+ equalsTestCase{int32(32764), false, false, ""},
+ equalsTestCase{int64(32764), false, false, ""},
+ equalsTestCase{float32(32764.9), false, false, ""},
+ equalsTestCase{float32(32765.1), false, false, ""},
+ equalsTestCase{complex64(32765.9), false, false, ""},
+ equalsTestCase{complex64(32765 + 2i), false, false, ""},
+
+ // Non-numeric types.
+ equalsTestCase{true, false, true, "which is not numeric"},
+ equalsTestCase{[...]int{32765}, false, true, "which is not numeric"},
+ equalsTestCase{make(chan int), false, true, "which is not numeric"},
+ equalsTestCase{func() {}, false, true, "which is not numeric"},
+ equalsTestCase{map[int]int{}, false, true, "which is not numeric"},
+ equalsTestCase{&someInt, false, true, "which is not numeric"},
+ equalsTestCase{[]int{32765}, false, true, "which is not numeric"},
+ equalsTestCase{"32765", false, true, "which is not numeric"},
+ equalsTestCase{equalsTestCase{}, false, true, "which is not numeric"},
+ }
+
+ t.checkTestCases(matcher, cases)
+}
+
+////////////////////////////////////////////////////////////////////////
+// int32
+////////////////////////////////////////////////////////////////////////
+
+func (t *EqualsTest) NegativeInt32() {
+ // -2^30
+ matcher := Equals(int32(-1073741824))
+ ExpectEq("-1073741824", matcher.Description())
+
+ cases := []equalsTestCase{
+ // Various types of -1073741824.
+ equalsTestCase{-1073741824, true, false, ""},
+ equalsTestCase{-1073741824.0, true, false, ""},
+ equalsTestCase{-1073741824 + 0i, true, false, ""},
+ equalsTestCase{int(-1073741824), true, false, ""},
+ equalsTestCase{int32(-1073741824), true, false, ""},
+ equalsTestCase{int64(-1073741824), true, false, ""},
+ equalsTestCase{float32(-1073741824), true, false, ""},
+ equalsTestCase{float64(-1073741824), true, false, ""},
+ equalsTestCase{complex64(-1073741824), true, false, ""},
+ equalsTestCase{complex128(-1073741824), true, false, ""},
+ equalsTestCase{interface{}(int(-1073741824)), true, false, ""},
+
+ // Values that would be -1073741824 in two's complement.
+ equalsTestCase{uint((1 << 32) - 1073741824), false, false, ""},
+ equalsTestCase{uint32((1 << 32) - 1073741824), false, false, ""},
+ equalsTestCase{uint64((1 << 64) - 1073741824), false, false, ""},
+ equalsTestCase{uintptr((1 << 64) - 1073741824), false, false, ""},
+
+ // Non-equal values of signed integer type.
+ equalsTestCase{int(-1073741823), false, false, ""},
+ equalsTestCase{int32(-1073741823), false, false, ""},
+ equalsTestCase{int64(-1073741823), false, false, ""},
+
+ // Non-equal values of other numeric types.
+ equalsTestCase{float64(-1073741824.1), false, false, ""},
+ equalsTestCase{float64(-1073741823.9), false, false, ""},
+ equalsTestCase{complex128(-1073741823), false, false, ""},
+ equalsTestCase{complex128(-1073741824 + 2i), false, false, ""},
+
+ // Non-numeric types.
+ equalsTestCase{true, false, true, "which is not numeric"},
+ equalsTestCase{[...]int{}, false, true, "which is not numeric"},
+ equalsTestCase{make(chan int), false, true, "which is not numeric"},
+ equalsTestCase{func() {}, false, true, "which is not numeric"},
+ equalsTestCase{map[int]int{}, false, true, "which is not numeric"},
+ equalsTestCase{&someInt, false, true, "which is not numeric"},
+ equalsTestCase{[]int{}, false, true, "which is not numeric"},
+ equalsTestCase{"taco", false, true, "which is not numeric"},
+ equalsTestCase{equalsTestCase{}, false, true, "which is not numeric"},
+ }
+
+ t.checkTestCases(matcher, cases)
+}
+
+func (t *EqualsTest) PositiveInt32() {
+ // 2^30
+ matcher := Equals(int32(1073741824))
+ ExpectEq("1073741824", matcher.Description())
+
+ cases := []equalsTestCase{
+ // Various types of 1073741824.
+ equalsTestCase{1073741824, true, false, ""},
+ equalsTestCase{1073741824.0, true, false, ""},
+ equalsTestCase{1073741824 + 0i, true, false, ""},
+ equalsTestCase{int(1073741824), true, false, ""},
+ equalsTestCase{uint(1073741824), true, false, ""},
+ equalsTestCase{int32(1073741824), true, false, ""},
+ equalsTestCase{int64(1073741824), true, false, ""},
+ equalsTestCase{uint32(1073741824), true, false, ""},
+ equalsTestCase{uint64(1073741824), true, false, ""},
+ equalsTestCase{uintptr(1073741824), true, false, ""},
+ equalsTestCase{float32(1073741824), true, false, ""},
+ equalsTestCase{float64(1073741824), true, false, ""},
+ equalsTestCase{complex64(1073741824), true, false, ""},
+ equalsTestCase{complex128(1073741824), true, false, ""},
+ equalsTestCase{interface{}(int(1073741824)), true, false, ""},
+ equalsTestCase{interface{}(uint(1073741824)), true, false, ""},
+
+ // Non-equal values of numeric type.
+ equalsTestCase{int(1073741823), false, false, ""},
+ equalsTestCase{int32(1073741823), false, false, ""},
+ equalsTestCase{int64(1073741823), false, false, ""},
+ equalsTestCase{float64(1073741824.1), false, false, ""},
+ equalsTestCase{float64(1073741823.9), false, false, ""},
+ equalsTestCase{complex128(1073741823), false, false, ""},
+ equalsTestCase{complex128(1073741824 + 2i), false, false, ""},
+
+ // Non-numeric types.
+ equalsTestCase{true, false, true, "which is not numeric"},
+ equalsTestCase{[...]int{}, false, true, "which is not numeric"},
+ equalsTestCase{make(chan int), false, true, "which is not numeric"},
+ equalsTestCase{func() {}, false, true, "which is not numeric"},
+ equalsTestCase{map[int]int{}, false, true, "which is not numeric"},
+ equalsTestCase{&someInt, false, true, "which is not numeric"},
+ equalsTestCase{[]int{}, false, true, "which is not numeric"},
+ equalsTestCase{"taco", false, true, "which is not numeric"},
+ equalsTestCase{equalsTestCase{}, false, true, "which is not numeric"},
+ }
+
+ t.checkTestCases(matcher, cases)
+}
+
+////////////////////////////////////////////////////////////////////////
+// int64
+////////////////////////////////////////////////////////////////////////
+
+func (t *EqualsTest) NegativeInt64() {
+ // -2^40
+ matcher := Equals(int64(-1099511627776))
+ ExpectEq("-1099511627776", matcher.Description())
+
+ cases := []equalsTestCase{
+ // Various types of -1099511627776.
+ equalsTestCase{-1099511627776.0, true, false, ""},
+ equalsTestCase{-1099511627776 + 0i, true, false, ""},
+ equalsTestCase{int64(-1099511627776), true, false, ""},
+ equalsTestCase{float32(-1099511627776), true, false, ""},
+ equalsTestCase{float64(-1099511627776), true, false, ""},
+ equalsTestCase{complex64(-1099511627776), true, false, ""},
+ equalsTestCase{complex128(-1099511627776), true, false, ""},
+ equalsTestCase{interface{}(int64(-1099511627776)), true, false, ""},
+
+ // Values that would be -1099511627776 in two's complement.
+ equalsTestCase{uint64((1 << 64) - 1099511627776), false, false, ""},
+
+ // Non-equal values of signed integer type.
+ equalsTestCase{int64(-1099511627775), false, false, ""},
+
+ // Non-equal values of other numeric types.
+ equalsTestCase{float64(-1099511627776.1), false, false, ""},
+ equalsTestCase{float64(-1099511627775.9), false, false, ""},
+ equalsTestCase{complex128(-1099511627775), false, false, ""},
+ equalsTestCase{complex128(-1099511627776 + 2i), false, false, ""},
+
+ // Non-numeric types.
+ equalsTestCase{true, false, true, "which is not numeric"},
+ equalsTestCase{[...]int{}, false, true, "which is not numeric"},
+ equalsTestCase{make(chan int), false, true, "which is not numeric"},
+ equalsTestCase{func() {}, false, true, "which is not numeric"},
+ equalsTestCase{map[int]int{}, false, true, "which is not numeric"},
+ equalsTestCase{&someInt, false, true, "which is not numeric"},
+ equalsTestCase{[]int{}, false, true, "which is not numeric"},
+ equalsTestCase{"taco", false, true, "which is not numeric"},
+ equalsTestCase{equalsTestCase{}, false, true, "which is not numeric"},
+ }
+
+ t.checkTestCases(matcher, cases)
+}
+
+func (t *EqualsTest) PositiveInt64() {
+ // 2^40
+ matcher := Equals(int64(1099511627776))
+ ExpectEq("1099511627776", matcher.Description())
+
+ cases := []equalsTestCase{
+ // Various types of 1099511627776.
+ equalsTestCase{1099511627776.0, true, false, ""},
+ equalsTestCase{1099511627776 + 0i, true, false, ""},
+ equalsTestCase{int64(1099511627776), true, false, ""},
+ equalsTestCase{uint64(1099511627776), true, false, ""},
+ equalsTestCase{uintptr(1099511627776), true, false, ""},
+ equalsTestCase{float32(1099511627776), true, false, ""},
+ equalsTestCase{float64(1099511627776), true, false, ""},
+ equalsTestCase{complex64(1099511627776), true, false, ""},
+ equalsTestCase{complex128(1099511627776), true, false, ""},
+ equalsTestCase{interface{}(int64(1099511627776)), true, false, ""},
+ equalsTestCase{interface{}(uint64(1099511627776)), true, false, ""},
+
+ // Non-equal values of numeric type.
+ equalsTestCase{int64(1099511627775), false, false, ""},
+ equalsTestCase{uint64(1099511627775), false, false, ""},
+ equalsTestCase{float64(1099511627776.1), false, false, ""},
+ equalsTestCase{float64(1099511627775.9), false, false, ""},
+ equalsTestCase{complex128(1099511627775), false, false, ""},
+ equalsTestCase{complex128(1099511627776 + 2i), false, false, ""},
+
+ // Non-numeric types.
+ equalsTestCase{true, false, true, "which is not numeric"},
+ equalsTestCase{[...]int{}, false, true, "which is not numeric"},
+ equalsTestCase{make(chan int), false, true, "which is not numeric"},
+ equalsTestCase{func() {}, false, true, "which is not numeric"},
+ equalsTestCase{map[int]int{}, false, true, "which is not numeric"},
+ equalsTestCase{&someInt, false, true, "which is not numeric"},
+ equalsTestCase{[]int{}, false, true, "which is not numeric"},
+ equalsTestCase{"taco", false, true, "which is not numeric"},
+ equalsTestCase{equalsTestCase{}, false, true, "which is not numeric"},
+ }
+
+ t.checkTestCases(matcher, cases)
+}
+
+func (t *EqualsTest) Int64NotExactlyRepresentableBySinglePrecision() {
+ // Single-precision floats don't have enough bits to represent the integers
+ // near this one distinctly, so [2^25-1, 2^25+2] all receive the same value
+ // and should be treated as equivalent when floats are in the mix.
+ const kTwoTo25 = 1 << 25
+ matcher := Equals(int64(kTwoTo25 + 1))
+ ExpectEq("33554433", matcher.Description())
+
+ cases := []equalsTestCase{
+ // Integers.
+ equalsTestCase{int64(kTwoTo25 + 0), false, false, ""},
+ equalsTestCase{int64(kTwoTo25 + 1), true, false, ""},
+ equalsTestCase{int64(kTwoTo25 + 2), false, false, ""},
+
+ equalsTestCase{uint64(kTwoTo25 + 0), false, false, ""},
+ equalsTestCase{uint64(kTwoTo25 + 1), true, false, ""},
+ equalsTestCase{uint64(kTwoTo25 + 2), false, false, ""},
+
+ // Single-precision floating point.
+ equalsTestCase{float32(kTwoTo25 - 2), false, false, ""},
+ equalsTestCase{float32(kTwoTo25 - 1), true, false, ""},
+ equalsTestCase{float32(kTwoTo25 + 0), true, false, ""},
+ equalsTestCase{float32(kTwoTo25 + 1), true, false, ""},
+ equalsTestCase{float32(kTwoTo25 + 2), true, false, ""},
+ equalsTestCase{float32(kTwoTo25 + 3), false, false, ""},
+
+ equalsTestCase{complex64(kTwoTo25 - 2), false, false, ""},
+ equalsTestCase{complex64(kTwoTo25 - 1), true, false, ""},
+ equalsTestCase{complex64(kTwoTo25 + 0), true, false, ""},
+ equalsTestCase{complex64(kTwoTo25 + 1), true, false, ""},
+ equalsTestCase{complex64(kTwoTo25 + 2), true, false, ""},
+ equalsTestCase{complex64(kTwoTo25 + 3), false, false, ""},
+
+ // Double-precision floating point.
+ equalsTestCase{float64(kTwoTo25 + 0), false, false, ""},
+ equalsTestCase{float64(kTwoTo25 + 1), true, false, ""},
+ equalsTestCase{float64(kTwoTo25 + 2), false, false, ""},
+
+ equalsTestCase{complex128(kTwoTo25 + 0), false, false, ""},
+ equalsTestCase{complex128(kTwoTo25 + 1), true, false, ""},
+ equalsTestCase{complex128(kTwoTo25 + 2), false, false, ""},
+ }
+
+ t.checkTestCases(matcher, cases)
+}
+
+func (t *EqualsTest) Int64NotExactlyRepresentableByDoublePrecision() {
+ // Double-precision floats don't have enough bits to represent the integers
+ // near this one distinctly, so [2^54-1, 2^54+2] all receive the same value
+ // and should be treated as equivalent when floats are in the mix.
+ const kTwoTo54 = 1 << 54
+ matcher := Equals(int64(kTwoTo54 + 1))
+ ExpectEq("18014398509481985", matcher.Description())
+
+ cases := []equalsTestCase{
+ // Integers.
+ equalsTestCase{int64(kTwoTo54 + 0), false, false, ""},
+ equalsTestCase{int64(kTwoTo54 + 1), true, false, ""},
+ equalsTestCase{int64(kTwoTo54 + 2), false, false, ""},
+
+ equalsTestCase{uint64(kTwoTo54 + 0), false, false, ""},
+ equalsTestCase{uint64(kTwoTo54 + 1), true, false, ""},
+ equalsTestCase{uint64(kTwoTo54 + 2), false, false, ""},
+
+ // Double-precision floating point.
+ equalsTestCase{float64(kTwoTo54 - 2), false, false, ""},
+ equalsTestCase{float64(kTwoTo54 - 1), true, false, ""},
+ equalsTestCase{float64(kTwoTo54 + 0), true, false, ""},
+ equalsTestCase{float64(kTwoTo54 + 1), true, false, ""},
+ equalsTestCase{float64(kTwoTo54 + 2), true, false, ""},
+ equalsTestCase{float64(kTwoTo54 + 3), false, false, ""},
+
+ equalsTestCase{complex128(kTwoTo54 - 2), false, false, ""},
+ equalsTestCase{complex128(kTwoTo54 - 1), true, false, ""},
+ equalsTestCase{complex128(kTwoTo54 + 0), true, false, ""},
+ equalsTestCase{complex128(kTwoTo54 + 1), true, false, ""},
+ equalsTestCase{complex128(kTwoTo54 + 2), true, false, ""},
+ equalsTestCase{complex128(kTwoTo54 + 3), false, false, ""},
+ }
+
+ t.checkTestCases(matcher, cases)
+}
+
+////////////////////////////////////////////////////////////////////////
+// uint
+////////////////////////////////////////////////////////////////////////
+
+func (t *EqualsTest) SmallUint() {
+ const kExpected = 17
+ matcher := Equals(uint(kExpected))
+ ExpectEq("17", matcher.Description())
+
+ cases := []equalsTestCase{
+ // Various types of the expected value.
+ equalsTestCase{17, true, false, ""},
+ equalsTestCase{17.0, true, false, ""},
+ equalsTestCase{17 + 0i, true, false, ""},
+ equalsTestCase{int(kExpected), true, false, ""},
+ equalsTestCase{int8(kExpected), true, false, ""},
+ equalsTestCase{int16(kExpected), true, false, ""},
+ equalsTestCase{int32(kExpected), true, false, ""},
+ equalsTestCase{int64(kExpected), true, false, ""},
+ equalsTestCase{uint(kExpected), true, false, ""},
+ equalsTestCase{uint8(kExpected), true, false, ""},
+ equalsTestCase{uint16(kExpected), true, false, ""},
+ equalsTestCase{uint32(kExpected), true, false, ""},
+ equalsTestCase{uint64(kExpected), true, false, ""},
+ equalsTestCase{uintptr(kExpected), true, false, ""},
+ equalsTestCase{float32(kExpected), true, false, ""},
+ equalsTestCase{float64(kExpected), true, false, ""},
+ equalsTestCase{complex64(kExpected), true, false, ""},
+ equalsTestCase{complex128(kExpected), true, false, ""},
+
+ // Non-equal values of numeric types.
+ equalsTestCase{kExpected + 1, false, false, ""},
+ equalsTestCase{int(kExpected + 1), false, false, ""},
+ equalsTestCase{int8(kExpected + 1), false, false, ""},
+ equalsTestCase{int16(kExpected + 1), false, false, ""},
+ equalsTestCase{int32(kExpected + 1), false, false, ""},
+ equalsTestCase{int64(kExpected + 1), false, false, ""},
+ equalsTestCase{uint(kExpected + 1), false, false, ""},
+ equalsTestCase{uint8(kExpected + 1), false, false, ""},
+ equalsTestCase{uint16(kExpected + 1), false, false, ""},
+ equalsTestCase{uint32(kExpected + 1), false, false, ""},
+ equalsTestCase{uint64(kExpected + 1), false, false, ""},
+ equalsTestCase{uintptr(kExpected + 1), false, false, ""},
+ equalsTestCase{float32(kExpected + 1), false, false, ""},
+ equalsTestCase{float64(kExpected + 1), false, false, ""},
+ equalsTestCase{complex64(kExpected + 2i), false, false, ""},
+ equalsTestCase{complex64(kExpected + 1), false, false, ""},
+ equalsTestCase{complex128(kExpected + 2i), false, false, ""},
+ equalsTestCase{complex128(kExpected + 1), false, false, ""},
+
+ // Non-numeric types.
+ equalsTestCase{true, false, true, "which is not numeric"},
+ equalsTestCase{[...]int{}, false, true, "which is not numeric"},
+ equalsTestCase{make(chan int), false, true, "which is not numeric"},
+ equalsTestCase{func() {}, false, true, "which is not numeric"},
+ equalsTestCase{map[int]int{}, false, true, "which is not numeric"},
+ equalsTestCase{&someInt, false, true, "which is not numeric"},
+ equalsTestCase{[]int{}, false, true, "which is not numeric"},
+ equalsTestCase{"taco", false, true, "which is not numeric"},
+ equalsTestCase{equalsTestCase{}, false, true, "which is not numeric"},
+ }
+
+ t.checkTestCases(matcher, cases)
+}
+
+func (t *EqualsTest) LargeUint() {
+ const kExpected = (1 << 16) + 17
+ matcher := Equals(uint(kExpected))
+ ExpectEq("65553", matcher.Description())
+
+ cases := []equalsTestCase{
+ // Various types of the expected value.
+ equalsTestCase{65553, true, false, ""},
+ equalsTestCase{65553.0, true, false, ""},
+ equalsTestCase{65553 + 0i, true, false, ""},
+ equalsTestCase{int32(kExpected), true, false, ""},
+ equalsTestCase{int64(kExpected), true, false, ""},
+ equalsTestCase{uint32(kExpected), true, false, ""},
+ equalsTestCase{uint64(kExpected), true, false, ""},
+ equalsTestCase{float32(kExpected), true, false, ""},
+ equalsTestCase{float64(kExpected), true, false, ""},
+ equalsTestCase{complex64(kExpected), true, false, ""},
+ equalsTestCase{complex128(kExpected), true, false, ""},
+
+ // Non-equal values of numeric types.
+ equalsTestCase{int16(17), false, false, ""},
+ equalsTestCase{int32(kExpected + 1), false, false, ""},
+ equalsTestCase{int64(kExpected + 1), false, false, ""},
+ equalsTestCase{uint16(17), false, false, ""},
+ equalsTestCase{uint32(kExpected + 1), false, false, ""},
+ equalsTestCase{uint64(kExpected + 1), false, false, ""},
+ equalsTestCase{float64(kExpected + 1), false, false, ""},
+ equalsTestCase{complex128(kExpected + 2i), false, false, ""},
+ equalsTestCase{complex128(kExpected + 1), false, false, ""},
+ }
+
+ t.checkTestCases(matcher, cases)
+}
+
+func (t *EqualsTest) UintNotExactlyRepresentableBySinglePrecision() {
+ // Single-precision floats don't have enough bits to represent the integers
+ // near this one distinctly, so [2^25-1, 2^25+2] all receive the same value
+ // and should be treated as equivalent when floats are in the mix.
+ const kTwoTo25 = 1 << 25
+ matcher := Equals(uint(kTwoTo25 + 1))
+ ExpectEq("33554433", matcher.Description())
+
+ cases := []equalsTestCase{
+ // Integers.
+ equalsTestCase{int64(kTwoTo25 + 0), false, false, ""},
+ equalsTestCase{int64(kTwoTo25 + 1), true, false, ""},
+ equalsTestCase{int64(kTwoTo25 + 2), false, false, ""},
+
+ equalsTestCase{uint64(kTwoTo25 + 0), false, false, ""},
+ equalsTestCase{uint64(kTwoTo25 + 1), true, false, ""},
+ equalsTestCase{uint64(kTwoTo25 + 2), false, false, ""},
+
+ // Single-precision floating point.
+ equalsTestCase{float32(kTwoTo25 - 2), false, false, ""},
+ equalsTestCase{float32(kTwoTo25 - 1), true, false, ""},
+ equalsTestCase{float32(kTwoTo25 + 0), true, false, ""},
+ equalsTestCase{float32(kTwoTo25 + 1), true, false, ""},
+ equalsTestCase{float32(kTwoTo25 + 2), true, false, ""},
+ equalsTestCase{float32(kTwoTo25 + 3), false, false, ""},
+
+ equalsTestCase{complex64(kTwoTo25 - 2), false, false, ""},
+ equalsTestCase{complex64(kTwoTo25 - 1), true, false, ""},
+ equalsTestCase{complex64(kTwoTo25 + 0), true, false, ""},
+ equalsTestCase{complex64(kTwoTo25 + 1), true, false, ""},
+ equalsTestCase{complex64(kTwoTo25 + 2), true, false, ""},
+ equalsTestCase{complex64(kTwoTo25 + 3), false, false, ""},
+
+ // Double-precision floating point.
+ equalsTestCase{float64(kTwoTo25 + 0), false, false, ""},
+ equalsTestCase{float64(kTwoTo25 + 1), true, false, ""},
+ equalsTestCase{float64(kTwoTo25 + 2), false, false, ""},
+
+ equalsTestCase{complex128(kTwoTo25 + 0), false, false, ""},
+ equalsTestCase{complex128(kTwoTo25 + 1), true, false, ""},
+ equalsTestCase{complex128(kTwoTo25 + 2), false, false, ""},
+ }
+
+ t.checkTestCases(matcher, cases)
+}
+
+////////////////////////////////////////////////////////////////////////
+// uint8
+////////////////////////////////////////////////////////////////////////
+
+func (t *EqualsTest) SmallUint8() {
+ const kExpected = 17
+ matcher := Equals(uint8(kExpected))
+ ExpectEq("17", matcher.Description())
+
+ cases := []equalsTestCase{
+ // Various types of the expected value.
+ equalsTestCase{17, true, false, ""},
+ equalsTestCase{17.0, true, false, ""},
+ equalsTestCase{17 + 0i, true, false, ""},
+ equalsTestCase{int(kExpected), true, false, ""},
+ equalsTestCase{int8(kExpected), true, false, ""},
+ equalsTestCase{int16(kExpected), true, false, ""},
+ equalsTestCase{int32(kExpected), true, false, ""},
+ equalsTestCase{int64(kExpected), true, false, ""},
+ equalsTestCase{uint(kExpected), true, false, ""},
+ equalsTestCase{uint8(kExpected), true, false, ""},
+ equalsTestCase{uint16(kExpected), true, false, ""},
+ equalsTestCase{uint32(kExpected), true, false, ""},
+ equalsTestCase{uint64(kExpected), true, false, ""},
+ equalsTestCase{uintptr(kExpected), true, false, ""},
+ equalsTestCase{float32(kExpected), true, false, ""},
+ equalsTestCase{float64(kExpected), true, false, ""},
+ equalsTestCase{complex64(kExpected), true, false, ""},
+ equalsTestCase{complex128(kExpected), true, false, ""},
+
+ // Non-equal values of numeric types.
+ equalsTestCase{kExpected + 1, false, false, ""},
+ equalsTestCase{int(kExpected + 1), false, false, ""},
+ equalsTestCase{int8(kExpected + 1), false, false, ""},
+ equalsTestCase{int16(kExpected + 1), false, false, ""},
+ equalsTestCase{int32(kExpected + 1), false, false, ""},
+ equalsTestCase{int64(kExpected + 1), false, false, ""},
+ equalsTestCase{uint(kExpected + 1), false, false, ""},
+ equalsTestCase{uint8(kExpected + 1), false, false, ""},
+ equalsTestCase{uint16(kExpected + 1), false, false, ""},
+ equalsTestCase{uint32(kExpected + 1), false, false, ""},
+ equalsTestCase{uint64(kExpected + 1), false, false, ""},
+ equalsTestCase{uintptr(kExpected + 1), false, false, ""},
+ equalsTestCase{float32(kExpected + 1), false, false, ""},
+ equalsTestCase{float64(kExpected + 1), false, false, ""},
+ equalsTestCase{complex64(kExpected + 2i), false, false, ""},
+ equalsTestCase{complex64(kExpected + 1), false, false, ""},
+ equalsTestCase{complex128(kExpected + 2i), false, false, ""},
+ equalsTestCase{complex128(kExpected + 1), false, false, ""},
+
+ // Non-numeric types.
+ equalsTestCase{true, false, true, "which is not numeric"},
+ equalsTestCase{[...]int{}, false, true, "which is not numeric"},
+ equalsTestCase{make(chan int), false, true, "which is not numeric"},
+ equalsTestCase{func() {}, false, true, "which is not numeric"},
+ equalsTestCase{map[int]int{}, false, true, "which is not numeric"},
+ equalsTestCase{&someInt, false, true, "which is not numeric"},
+ equalsTestCase{[]int{}, false, true, "which is not numeric"},
+ equalsTestCase{"taco", false, true, "which is not numeric"},
+ equalsTestCase{equalsTestCase{}, false, true, "which is not numeric"},
+ }
+
+ t.checkTestCases(matcher, cases)
+}
+
+////////////////////////////////////////////////////////////////////////
+// uint16
+////////////////////////////////////////////////////////////////////////
+
+func (t *EqualsTest) SmallUint16() {
+ const kExpected = 17
+ matcher := Equals(uint16(kExpected))
+ ExpectEq("17", matcher.Description())
+
+ cases := []equalsTestCase{
+ // Various types of the expected value.
+ equalsTestCase{17, true, false, ""},
+ equalsTestCase{17.0, true, false, ""},
+ equalsTestCase{17 + 0i, true, false, ""},
+ equalsTestCase{int(kExpected), true, false, ""},
+ equalsTestCase{int8(kExpected), true, false, ""},
+ equalsTestCase{int16(kExpected), true, false, ""},
+ equalsTestCase{int32(kExpected), true, false, ""},
+ equalsTestCase{int64(kExpected), true, false, ""},
+ equalsTestCase{uint(kExpected), true, false, ""},
+ equalsTestCase{uint8(kExpected), true, false, ""},
+ equalsTestCase{uint16(kExpected), true, false, ""},
+ equalsTestCase{uint32(kExpected), true, false, ""},
+ equalsTestCase{uint64(kExpected), true, false, ""},
+ equalsTestCase{uintptr(kExpected), true, false, ""},
+ equalsTestCase{float32(kExpected), true, false, ""},
+ equalsTestCase{float64(kExpected), true, false, ""},
+ equalsTestCase{complex64(kExpected), true, false, ""},
+ equalsTestCase{complex128(kExpected), true, false, ""},
+
+ // Non-equal values of numeric types.
+ equalsTestCase{kExpected + 1, false, false, ""},
+ equalsTestCase{int(kExpected + 1), false, false, ""},
+ equalsTestCase{int8(kExpected + 1), false, false, ""},
+ equalsTestCase{int16(kExpected + 1), false, false, ""},
+ equalsTestCase{int32(kExpected + 1), false, false, ""},
+ equalsTestCase{int64(kExpected + 1), false, false, ""},
+ equalsTestCase{uint(kExpected + 1), false, false, ""},
+ equalsTestCase{uint8(kExpected + 1), false, false, ""},
+ equalsTestCase{uint16(kExpected + 1), false, false, ""},
+ equalsTestCase{uint32(kExpected + 1), false, false, ""},
+ equalsTestCase{uint64(kExpected + 1), false, false, ""},
+ equalsTestCase{uintptr(kExpected + 1), false, false, ""},
+ equalsTestCase{float32(kExpected + 1), false, false, ""},
+ equalsTestCase{float64(kExpected + 1), false, false, ""},
+ equalsTestCase{complex64(kExpected + 2i), false, false, ""},
+ equalsTestCase{complex64(kExpected + 1), false, false, ""},
+ equalsTestCase{complex128(kExpected + 2i), false, false, ""},
+ equalsTestCase{complex128(kExpected + 1), false, false, ""},
+
+ // Non-numeric types.
+ equalsTestCase{true, false, true, "which is not numeric"},
+ equalsTestCase{[...]int{}, false, true, "which is not numeric"},
+ equalsTestCase{make(chan int), false, true, "which is not numeric"},
+ equalsTestCase{func() {}, false, true, "which is not numeric"},
+ equalsTestCase{map[int]int{}, false, true, "which is not numeric"},
+ equalsTestCase{&someInt, false, true, "which is not numeric"},
+ equalsTestCase{[]int{}, false, true, "which is not numeric"},
+ equalsTestCase{"taco", false, true, "which is not numeric"},
+ equalsTestCase{equalsTestCase{}, false, true, "which is not numeric"},
+ }
+
+ t.checkTestCases(matcher, cases)
+}
+
+func (t *EqualsTest) LargeUint16() {
+ const kExpected = (1 << 8) + 17
+ matcher := Equals(uint16(kExpected))
+ ExpectEq("273", matcher.Description())
+
+ cases := []equalsTestCase{
+ // Various types of the expected value.
+ equalsTestCase{273, true, false, ""},
+ equalsTestCase{273.0, true, false, ""},
+ equalsTestCase{273 + 0i, true, false, ""},
+ equalsTestCase{int16(kExpected), true, false, ""},
+ equalsTestCase{int32(kExpected), true, false, ""},
+ equalsTestCase{int64(kExpected), true, false, ""},
+ equalsTestCase{uint16(kExpected), true, false, ""},
+ equalsTestCase{uint32(kExpected), true, false, ""},
+ equalsTestCase{uint64(kExpected), true, false, ""},
+ equalsTestCase{float32(kExpected), true, false, ""},
+ equalsTestCase{float64(kExpected), true, false, ""},
+ equalsTestCase{complex64(kExpected), true, false, ""},
+ equalsTestCase{complex128(kExpected), true, false, ""},
+
+ // Non-equal values of numeric types.
+ equalsTestCase{int8(17), false, false, ""},
+ equalsTestCase{int16(kExpected + 1), false, false, ""},
+ equalsTestCase{int32(kExpected + 1), false, false, ""},
+ equalsTestCase{int64(kExpected + 1), false, false, ""},
+ equalsTestCase{uint8(17), false, false, ""},
+ equalsTestCase{uint16(kExpected + 1), false, false, ""},
+ equalsTestCase{uint32(kExpected + 1), false, false, ""},
+ equalsTestCase{uint64(kExpected + 1), false, false, ""},
+ equalsTestCase{float64(kExpected + 1), false, false, ""},
+ equalsTestCase{complex128(kExpected + 2i), false, false, ""},
+ equalsTestCase{complex128(kExpected + 1), false, false, ""},
+ }
+
+ t.checkTestCases(matcher, cases)
+}
+
+////////////////////////////////////////////////////////////////////////
+// uint32
+////////////////////////////////////////////////////////////////////////
+
+func (t *EqualsTest) SmallUint32() {
+ const kExpected = 17
+ matcher := Equals(uint32(kExpected))
+ ExpectEq("17", matcher.Description())
+
+ cases := []equalsTestCase{
+ // Various types of the expected value.
+ equalsTestCase{17, true, false, ""},
+ equalsTestCase{17.0, true, false, ""},
+ equalsTestCase{17 + 0i, true, false, ""},
+ equalsTestCase{int(kExpected), true, false, ""},
+ equalsTestCase{int8(kExpected), true, false, ""},
+ equalsTestCase{int16(kExpected), true, false, ""},
+ equalsTestCase{int32(kExpected), true, false, ""},
+ equalsTestCase{int64(kExpected), true, false, ""},
+ equalsTestCase{uint(kExpected), true, false, ""},
+ equalsTestCase{uint8(kExpected), true, false, ""},
+ equalsTestCase{uint16(kExpected), true, false, ""},
+ equalsTestCase{uint32(kExpected), true, false, ""},
+ equalsTestCase{uint64(kExpected), true, false, ""},
+ equalsTestCase{uintptr(kExpected), true, false, ""},
+ equalsTestCase{float32(kExpected), true, false, ""},
+ equalsTestCase{float64(kExpected), true, false, ""},
+ equalsTestCase{complex64(kExpected), true, false, ""},
+ equalsTestCase{complex128(kExpected), true, false, ""},
+
+ // Non-equal values of numeric types.
+ equalsTestCase{kExpected + 1, false, false, ""},
+ equalsTestCase{int(kExpected + 1), false, false, ""},
+ equalsTestCase{int8(kExpected + 1), false, false, ""},
+ equalsTestCase{int16(kExpected + 1), false, false, ""},
+ equalsTestCase{int32(kExpected + 1), false, false, ""},
+ equalsTestCase{int64(kExpected + 1), false, false, ""},
+ equalsTestCase{uint(kExpected + 1), false, false, ""},
+ equalsTestCase{uint8(kExpected + 1), false, false, ""},
+ equalsTestCase{uint16(kExpected + 1), false, false, ""},
+ equalsTestCase{uint32(kExpected + 1), false, false, ""},
+ equalsTestCase{uint64(kExpected + 1), false, false, ""},
+ equalsTestCase{uintptr(kExpected + 1), false, false, ""},
+ equalsTestCase{float32(kExpected + 1), false, false, ""},
+ equalsTestCase{float64(kExpected + 1), false, false, ""},
+ equalsTestCase{complex64(kExpected + 2i), false, false, ""},
+ equalsTestCase{complex64(kExpected + 1), false, false, ""},
+ equalsTestCase{complex128(kExpected + 2i), false, false, ""},
+ equalsTestCase{complex128(kExpected + 1), false, false, ""},
+
+ // Non-numeric types.
+ equalsTestCase{true, false, true, "which is not numeric"},
+ equalsTestCase{[...]int{}, false, true, "which is not numeric"},
+ equalsTestCase{make(chan int), false, true, "which is not numeric"},
+ equalsTestCase{func() {}, false, true, "which is not numeric"},
+ equalsTestCase{map[int]int{}, false, true, "which is not numeric"},
+ equalsTestCase{&someInt, false, true, "which is not numeric"},
+ equalsTestCase{[]int{}, false, true, "which is not numeric"},
+ equalsTestCase{"taco", false, true, "which is not numeric"},
+ equalsTestCase{equalsTestCase{}, false, true, "which is not numeric"},
+ }
+
+ t.checkTestCases(matcher, cases)
+}
+
+func (t *EqualsTest) LargeUint32() {
+ const kExpected = (1 << 16) + 17
+ matcher := Equals(uint32(kExpected))
+ ExpectEq("65553", matcher.Description())
+
+ cases := []equalsTestCase{
+ // Various types of the expected value.
+ equalsTestCase{65553, true, false, ""},
+ equalsTestCase{65553.0, true, false, ""},
+ equalsTestCase{65553 + 0i, true, false, ""},
+ equalsTestCase{int32(kExpected), true, false, ""},
+ equalsTestCase{int64(kExpected), true, false, ""},
+ equalsTestCase{uint32(kExpected), true, false, ""},
+ equalsTestCase{uint64(kExpected), true, false, ""},
+ equalsTestCase{float32(kExpected), true, false, ""},
+ equalsTestCase{float64(kExpected), true, false, ""},
+ equalsTestCase{complex64(kExpected), true, false, ""},
+ equalsTestCase{complex128(kExpected), true, false, ""},
+
+ // Non-equal values of numeric types.
+ equalsTestCase{int16(17), false, false, ""},
+ equalsTestCase{int32(kExpected + 1), false, false, ""},
+ equalsTestCase{int64(kExpected + 1), false, false, ""},
+ equalsTestCase{uint16(17), false, false, ""},
+ equalsTestCase{uint32(kExpected + 1), false, false, ""},
+ equalsTestCase{uint64(kExpected + 1), false, false, ""},
+ equalsTestCase{float64(kExpected + 1), false, false, ""},
+ equalsTestCase{complex128(kExpected + 2i), false, false, ""},
+ equalsTestCase{complex128(kExpected + 1), false, false, ""},
+ }
+
+ t.checkTestCases(matcher, cases)
+}
+
+func (t *EqualsTest) Uint32NotExactlyRepresentableBySinglePrecision() {
+ // Single-precision floats don't have enough bits to represent the integers
+ // near this one distinctly, so [2^25-1, 2^25+2] all receive the same value
+ // and should be treated as equivalent when floats are in the mix.
+ const kTwoTo25 = 1 << 25
+ matcher := Equals(uint32(kTwoTo25 + 1))
+ ExpectEq("33554433", matcher.Description())
+
+ cases := []equalsTestCase{
+ // Integers.
+ equalsTestCase{int64(kTwoTo25 + 0), false, false, ""},
+ equalsTestCase{int64(kTwoTo25 + 1), true, false, ""},
+ equalsTestCase{int64(kTwoTo25 + 2), false, false, ""},
+
+ equalsTestCase{uint64(kTwoTo25 + 0), false, false, ""},
+ equalsTestCase{uint64(kTwoTo25 + 1), true, false, ""},
+ equalsTestCase{uint64(kTwoTo25 + 2), false, false, ""},
+
+ // Single-precision floating point.
+ equalsTestCase{float32(kTwoTo25 - 2), false, false, ""},
+ equalsTestCase{float32(kTwoTo25 - 1), true, false, ""},
+ equalsTestCase{float32(kTwoTo25 + 0), true, false, ""},
+ equalsTestCase{float32(kTwoTo25 + 1), true, false, ""},
+ equalsTestCase{float32(kTwoTo25 + 2), true, false, ""},
+ equalsTestCase{float32(kTwoTo25 + 3), false, false, ""},
+
+ equalsTestCase{complex64(kTwoTo25 - 2), false, false, ""},
+ equalsTestCase{complex64(kTwoTo25 - 1), true, false, ""},
+ equalsTestCase{complex64(kTwoTo25 + 0), true, false, ""},
+ equalsTestCase{complex64(kTwoTo25 + 1), true, false, ""},
+ equalsTestCase{complex64(kTwoTo25 + 2), true, false, ""},
+ equalsTestCase{complex64(kTwoTo25 + 3), false, false, ""},
+
+ // Double-precision floating point.
+ equalsTestCase{float64(kTwoTo25 + 0), false, false, ""},
+ equalsTestCase{float64(kTwoTo25 + 1), true, false, ""},
+ equalsTestCase{float64(kTwoTo25 + 2), false, false, ""},
+
+ equalsTestCase{complex128(kTwoTo25 + 0), false, false, ""},
+ equalsTestCase{complex128(kTwoTo25 + 1), true, false, ""},
+ equalsTestCase{complex128(kTwoTo25 + 2), false, false, ""},
+ }
+
+ t.checkTestCases(matcher, cases)
+}
+
+////////////////////////////////////////////////////////////////////////
+// uint64
+////////////////////////////////////////////////////////////////////////
+
+func (t *EqualsTest) SmallUint64() {
+ const kExpected = 17
+ matcher := Equals(uint64(kExpected))
+ ExpectEq("17", matcher.Description())
+
+ cases := []equalsTestCase{
+ // Various types of the expected value.
+ equalsTestCase{17, true, false, ""},
+ equalsTestCase{17.0, true, false, ""},
+ equalsTestCase{17 + 0i, true, false, ""},
+ equalsTestCase{int(kExpected), true, false, ""},
+ equalsTestCase{int8(kExpected), true, false, ""},
+ equalsTestCase{int16(kExpected), true, false, ""},
+ equalsTestCase{int32(kExpected), true, false, ""},
+ equalsTestCase{int64(kExpected), true, false, ""},
+ equalsTestCase{uint(kExpected), true, false, ""},
+ equalsTestCase{uint8(kExpected), true, false, ""},
+ equalsTestCase{uint16(kExpected), true, false, ""},
+ equalsTestCase{uint32(kExpected), true, false, ""},
+ equalsTestCase{uint64(kExpected), true, false, ""},
+ equalsTestCase{uintptr(kExpected), true, false, ""},
+ equalsTestCase{float32(kExpected), true, false, ""},
+ equalsTestCase{float64(kExpected), true, false, ""},
+ equalsTestCase{complex64(kExpected), true, false, ""},
+ equalsTestCase{complex128(kExpected), true, false, ""},
+
+ // Non-equal values of numeric types.
+ equalsTestCase{kExpected + 1, false, false, ""},
+ equalsTestCase{int(kExpected + 1), false, false, ""},
+ equalsTestCase{int8(kExpected + 1), false, false, ""},
+ equalsTestCase{int16(kExpected + 1), false, false, ""},
+ equalsTestCase{int32(kExpected + 1), false, false, ""},
+ equalsTestCase{int64(kExpected + 1), false, false, ""},
+ equalsTestCase{uint(kExpected + 1), false, false, ""},
+ equalsTestCase{uint8(kExpected + 1), false, false, ""},
+ equalsTestCase{uint16(kExpected + 1), false, false, ""},
+ equalsTestCase{uint32(kExpected + 1), false, false, ""},
+ equalsTestCase{uint64(kExpected + 1), false, false, ""},
+ equalsTestCase{uintptr(kExpected + 1), false, false, ""},
+ equalsTestCase{float32(kExpected + 1), false, false, ""},
+ equalsTestCase{float64(kExpected + 1), false, false, ""},
+ equalsTestCase{complex64(kExpected + 2i), false, false, ""},
+ equalsTestCase{complex64(kExpected + 1), false, false, ""},
+ equalsTestCase{complex128(kExpected + 2i), false, false, ""},
+ equalsTestCase{complex128(kExpected + 1), false, false, ""},
+
+ // Non-numeric types.
+ equalsTestCase{true, false, true, "which is not numeric"},
+ equalsTestCase{[...]int{}, false, true, "which is not numeric"},
+ equalsTestCase{make(chan int), false, true, "which is not numeric"},
+ equalsTestCase{func() {}, false, true, "which is not numeric"},
+ equalsTestCase{map[int]int{}, false, true, "which is not numeric"},
+ equalsTestCase{&someInt, false, true, "which is not numeric"},
+ equalsTestCase{[]int{}, false, true, "which is not numeric"},
+ equalsTestCase{"taco", false, true, "which is not numeric"},
+ equalsTestCase{equalsTestCase{}, false, true, "which is not numeric"},
+ }
+
+ t.checkTestCases(matcher, cases)
+}
+
+func (t *EqualsTest) LargeUint64() {
+ const kExpected = (1 << 32) + 17
+ matcher := Equals(uint64(kExpected))
+ ExpectEq("4294967313", matcher.Description())
+
+ cases := []equalsTestCase{
+ // Various types of the expected value.
+ equalsTestCase{4294967313.0, true, false, ""},
+ equalsTestCase{4294967313 + 0i, true, false, ""},
+ equalsTestCase{int64(kExpected), true, false, ""},
+ equalsTestCase{uint64(kExpected), true, false, ""},
+ equalsTestCase{float32(kExpected), true, false, ""},
+ equalsTestCase{float64(kExpected), true, false, ""},
+ equalsTestCase{complex64(kExpected), true, false, ""},
+ equalsTestCase{complex128(kExpected), true, false, ""},
+
+ // Non-equal values of numeric types.
+ equalsTestCase{int(17), false, false, ""},
+ equalsTestCase{int32(17), false, false, ""},
+ equalsTestCase{int64(kExpected + 1), false, false, ""},
+ equalsTestCase{uint(17), false, false, ""},
+ equalsTestCase{uint32(17), false, false, ""},
+ equalsTestCase{uint64(kExpected + 1), false, false, ""},
+ equalsTestCase{float64(kExpected + 1), false, false, ""},
+ equalsTestCase{complex128(kExpected + 2i), false, false, ""},
+ equalsTestCase{complex128(kExpected + 1), false, false, ""},
+ }
+
+ t.checkTestCases(matcher, cases)
+}
+
+func (t *EqualsTest) Uint64NotExactlyRepresentableBySinglePrecision() {
+ // Single-precision floats don't have enough bits to represent the integers
+ // near this one distinctly, so [2^25-1, 2^25+2] all receive the same value
+ // and should be treated as equivalent when floats are in the mix.
+ const kTwoTo25 = 1 << 25
+ matcher := Equals(uint64(kTwoTo25 + 1))
+ ExpectEq("33554433", matcher.Description())
+
+ cases := []equalsTestCase{
+ // Integers.
+ equalsTestCase{int64(kTwoTo25 + 0), false, false, ""},
+ equalsTestCase{int64(kTwoTo25 + 1), true, false, ""},
+ equalsTestCase{int64(kTwoTo25 + 2), false, false, ""},
+
+ equalsTestCase{uint64(kTwoTo25 + 0), false, false, ""},
+ equalsTestCase{uint64(kTwoTo25 + 1), true, false, ""},
+ equalsTestCase{uint64(kTwoTo25 + 2), false, false, ""},
+
+ // Single-precision floating point.
+ equalsTestCase{float32(kTwoTo25 - 2), false, false, ""},
+ equalsTestCase{float32(kTwoTo25 - 1), true, false, ""},
+ equalsTestCase{float32(kTwoTo25 + 0), true, false, ""},
+ equalsTestCase{float32(kTwoTo25 + 1), true, false, ""},
+ equalsTestCase{float32(kTwoTo25 + 2), true, false, ""},
+ equalsTestCase{float32(kTwoTo25 + 3), false, false, ""},
+
+ equalsTestCase{complex64(kTwoTo25 - 2), false, false, ""},
+ equalsTestCase{complex64(kTwoTo25 - 1), true, false, ""},
+ equalsTestCase{complex64(kTwoTo25 + 0), true, false, ""},
+ equalsTestCase{complex64(kTwoTo25 + 1), true, false, ""},
+ equalsTestCase{complex64(kTwoTo25 + 2), true, false, ""},
+ equalsTestCase{complex64(kTwoTo25 + 3), false, false, ""},
+
+ // Double-precision floating point.
+ equalsTestCase{float64(kTwoTo25 + 0), false, false, ""},
+ equalsTestCase{float64(kTwoTo25 + 1), true, false, ""},
+ equalsTestCase{float64(kTwoTo25 + 2), false, false, ""},
+
+ equalsTestCase{complex128(kTwoTo25 + 0), false, false, ""},
+ equalsTestCase{complex128(kTwoTo25 + 1), true, false, ""},
+ equalsTestCase{complex128(kTwoTo25 + 2), false, false, ""},
+ }
+
+ t.checkTestCases(matcher, cases)
+}
+
+func (t *EqualsTest) Uint64NotExactlyRepresentableByDoublePrecision() {
+ // Double-precision floats don't have enough bits to represent the integers
+ // near this one distinctly, so [2^54-1, 2^54+2] all receive the same value
+ // and should be treated as equivalent when floats are in the mix.
+ const kTwoTo54 = 1 << 54
+ matcher := Equals(uint64(kTwoTo54 + 1))
+ ExpectEq("18014398509481985", matcher.Description())
+
+ cases := []equalsTestCase{
+ // Integers.
+ equalsTestCase{int64(kTwoTo54 + 0), false, false, ""},
+ equalsTestCase{int64(kTwoTo54 + 1), true, false, ""},
+ equalsTestCase{int64(kTwoTo54 + 2), false, false, ""},
+
+ equalsTestCase{uint64(kTwoTo54 + 0), false, false, ""},
+ equalsTestCase{uint64(kTwoTo54 + 1), true, false, ""},
+ equalsTestCase{uint64(kTwoTo54 + 2), false, false, ""},
+
+ // Double-precision floating point.
+ equalsTestCase{float64(kTwoTo54 - 2), false, false, ""},
+ equalsTestCase{float64(kTwoTo54 - 1), true, false, ""},
+ equalsTestCase{float64(kTwoTo54 + 0), true, false, ""},
+ equalsTestCase{float64(kTwoTo54 + 1), true, false, ""},
+ equalsTestCase{float64(kTwoTo54 + 2), true, false, ""},
+ equalsTestCase{float64(kTwoTo54 + 3), false, false, ""},
+
+ equalsTestCase{complex128(kTwoTo54 - 2), false, false, ""},
+ equalsTestCase{complex128(kTwoTo54 - 1), true, false, ""},
+ equalsTestCase{complex128(kTwoTo54 + 0), true, false, ""},
+ equalsTestCase{complex128(kTwoTo54 + 1), true, false, ""},
+ equalsTestCase{complex128(kTwoTo54 + 2), true, false, ""},
+ equalsTestCase{complex128(kTwoTo54 + 3), false, false, ""},
+ }
+
+ t.checkTestCases(matcher, cases)
+}
+
+////////////////////////////////////////////////////////////////////////
+// uintptr
+////////////////////////////////////////////////////////////////////////
+
+func (t *EqualsTest) SmallUintptr() {
+ const kExpected = 17
+ matcher := Equals(uintptr(kExpected))
+ ExpectEq("17", matcher.Description())
+
+ cases := []equalsTestCase{
+ // Various types of the expected value.
+ equalsTestCase{17, true, false, ""},
+ equalsTestCase{17.0, true, false, ""},
+ equalsTestCase{17 + 0i, true, false, ""},
+ equalsTestCase{int(kExpected), true, false, ""},
+ equalsTestCase{int8(kExpected), true, false, ""},
+ equalsTestCase{int16(kExpected), true, false, ""},
+ equalsTestCase{int32(kExpected), true, false, ""},
+ equalsTestCase{int64(kExpected), true, false, ""},
+ equalsTestCase{uint(kExpected), true, false, ""},
+ equalsTestCase{uint8(kExpected), true, false, ""},
+ equalsTestCase{uint16(kExpected), true, false, ""},
+ equalsTestCase{uint32(kExpected), true, false, ""},
+ equalsTestCase{uint64(kExpected), true, false, ""},
+ equalsTestCase{uintptr(kExpected), true, false, ""},
+ equalsTestCase{float32(kExpected), true, false, ""},
+ equalsTestCase{float64(kExpected), true, false, ""},
+ equalsTestCase{complex64(kExpected), true, false, ""},
+ equalsTestCase{complex128(kExpected), true, false, ""},
+
+ // Non-equal values of numeric types.
+ equalsTestCase{kExpected + 1, false, false, ""},
+ equalsTestCase{int(kExpected + 1), false, false, ""},
+ equalsTestCase{int8(kExpected + 1), false, false, ""},
+ equalsTestCase{int16(kExpected + 1), false, false, ""},
+ equalsTestCase{int32(kExpected + 1), false, false, ""},
+ equalsTestCase{int64(kExpected + 1), false, false, ""},
+ equalsTestCase{uint(kExpected + 1), false, false, ""},
+ equalsTestCase{uint8(kExpected + 1), false, false, ""},
+ equalsTestCase{uint16(kExpected + 1), false, false, ""},
+ equalsTestCase{uint32(kExpected + 1), false, false, ""},
+ equalsTestCase{uint64(kExpected + 1), false, false, ""},
+ equalsTestCase{uintptr(kExpected + 1), false, false, ""},
+ equalsTestCase{float32(kExpected + 1), false, false, ""},
+ equalsTestCase{float64(kExpected + 1), false, false, ""},
+ equalsTestCase{complex64(kExpected + 2i), false, false, ""},
+ equalsTestCase{complex64(kExpected + 1), false, false, ""},
+ equalsTestCase{complex128(kExpected + 2i), false, false, ""},
+ equalsTestCase{complex128(kExpected + 1), false, false, ""},
+
+ // Non-numeric types.
+ equalsTestCase{true, false, true, "which is not numeric"},
+ equalsTestCase{[...]int{}, false, true, "which is not numeric"},
+ equalsTestCase{make(chan int), false, true, "which is not numeric"},
+ equalsTestCase{func() {}, false, true, "which is not numeric"},
+ equalsTestCase{map[int]int{}, false, true, "which is not numeric"},
+ equalsTestCase{&someInt, false, true, "which is not numeric"},
+ equalsTestCase{[]int{}, false, true, "which is not numeric"},
+ equalsTestCase{"taco", false, true, "which is not numeric"},
+ equalsTestCase{equalsTestCase{}, false, true, "which is not numeric"},
+ }
+
+ t.checkTestCases(matcher, cases)
+}
+
+func (t *EqualsTest) LargeUintptr() {
+ const kExpected = (1 << 32) + 17
+ matcher := Equals(uintptr(kExpected))
+ ExpectEq("4294967313", matcher.Description())
+
+ cases := []equalsTestCase{
+ // Various types of the expected value.
+ equalsTestCase{4294967313.0, true, false, ""},
+ equalsTestCase{4294967313 + 0i, true, false, ""},
+ equalsTestCase{int64(kExpected), true, false, ""},
+ equalsTestCase{uint64(kExpected), true, false, ""},
+ equalsTestCase{uintptr(kExpected), true, false, ""},
+ equalsTestCase{float32(kExpected), true, false, ""},
+ equalsTestCase{float64(kExpected), true, false, ""},
+ equalsTestCase{complex64(kExpected), true, false, ""},
+ equalsTestCase{complex128(kExpected), true, false, ""},
+
+ // Non-equal values of numeric types.
+ equalsTestCase{int(17), false, false, ""},
+ equalsTestCase{int32(17), false, false, ""},
+ equalsTestCase{int64(kExpected + 1), false, false, ""},
+ equalsTestCase{uint(17), false, false, ""},
+ equalsTestCase{uint32(17), false, false, ""},
+ equalsTestCase{uint64(kExpected + 1), false, false, ""},
+ equalsTestCase{uintptr(kExpected + 1), false, false, ""},
+ equalsTestCase{float64(kExpected + 1), false, false, ""},
+ equalsTestCase{complex128(kExpected + 2i), false, false, ""},
+ equalsTestCase{complex128(kExpected + 1), false, false, ""},
+ }
+
+ t.checkTestCases(matcher, cases)
+}
+
+////////////////////////////////////////////////////////////////////////
+// float32
+////////////////////////////////////////////////////////////////////////
+
+func (t *EqualsTest) NegativeIntegralFloat32() {
+ matcher := Equals(float32(-32769))
+ ExpectEq("-32769", matcher.Description())
+
+ cases := []equalsTestCase{
+ // Various types of -32769.
+ equalsTestCase{-32769.0, true, false, ""},
+ equalsTestCase{-32769 + 0i, true, false, ""},
+ equalsTestCase{int32(-32769), true, false, ""},
+ equalsTestCase{int64(-32769), true, false, ""},
+ equalsTestCase{float32(-32769), true, false, ""},
+ equalsTestCase{float64(-32769), true, false, ""},
+ equalsTestCase{complex64(-32769), true, false, ""},
+ equalsTestCase{complex128(-32769), true, false, ""},
+ equalsTestCase{interface{}(float32(-32769)), true, false, ""},
+ equalsTestCase{interface{}(int64(-32769)), true, false, ""},
+
+ // Values that would be -32769 in two's complement.
+ equalsTestCase{uint64((1 << 64) - 32769), false, false, ""},
+ equalsTestCase{uintptr((1 << 64) - 32769), false, false, ""},
+
+ // Non-equal values of numeric type.
+ equalsTestCase{int64(-32770), false, false, ""},
+ equalsTestCase{float32(-32769.1), false, false, ""},
+ equalsTestCase{float32(-32768.9), false, false, ""},
+ equalsTestCase{float64(-32769.1), false, false, ""},
+ equalsTestCase{float64(-32768.9), false, false, ""},
+ equalsTestCase{complex128(-32768), false, false, ""},
+ equalsTestCase{complex128(-32769 + 2i), false, false, ""},
+
+ // Non-numeric types.
+ equalsTestCase{true, false, true, "which is not numeric"},
+ equalsTestCase{[...]int{}, false, true, "which is not numeric"},
+ equalsTestCase{make(chan int), false, true, "which is not numeric"},
+ equalsTestCase{func() {}, false, true, "which is not numeric"},
+ equalsTestCase{map[int]int{}, false, true, "which is not numeric"},
+ equalsTestCase{&someInt, false, true, "which is not numeric"},
+ equalsTestCase{[]int{}, false, true, "which is not numeric"},
+ equalsTestCase{"taco", false, true, "which is not numeric"},
+ equalsTestCase{equalsTestCase{}, false, true, "which is not numeric"},
+ }
+
+ t.checkTestCases(matcher, cases)
+}
+
+func (t *EqualsTest) NegativeNonIntegralFloat32() {
+ matcher := Equals(float32(-32769.1))
+ ExpectEq("-32769.1", matcher.Description())
+
+ cases := []equalsTestCase{
+ // Various types of -32769.1.
+ equalsTestCase{-32769.1, true, false, ""},
+ equalsTestCase{-32769.1 + 0i, true, false, ""},
+ equalsTestCase{float32(-32769.1), true, false, ""},
+ equalsTestCase{float64(-32769.1), true, false, ""},
+ equalsTestCase{complex64(-32769.1), true, false, ""},
+ equalsTestCase{complex128(-32769.1), true, false, ""},
+
+ // Non-equal values of numeric type.
+ equalsTestCase{int32(-32769), false, false, ""},
+ equalsTestCase{int32(-32770), false, false, ""},
+ equalsTestCase{int64(-32769), false, false, ""},
+ equalsTestCase{int64(-32770), false, false, ""},
+ equalsTestCase{float32(-32769.2), false, false, ""},
+ equalsTestCase{float32(-32769.0), false, false, ""},
+ equalsTestCase{float64(-32769.2), false, false, ""},
+ equalsTestCase{complex128(-32769.1 + 2i), false, false, ""},
+ }
+
+ t.checkTestCases(matcher, cases)
+}
+
+func (t *EqualsTest) LargeNegativeFloat32() {
+ const kExpected = -1 * (1 << 65)
+ matcher := Equals(float32(kExpected))
+ ExpectEq("-3.689349e+19", matcher.Description())
+
+ floatExpected := float32(kExpected)
+ castedInt := int64(floatExpected)
+
+ cases := []equalsTestCase{
+ // Equal values of numeric type.
+ equalsTestCase{kExpected + 0i, true, false, ""},
+ equalsTestCase{float32(kExpected), true, false, ""},
+ equalsTestCase{float64(kExpected), true, false, ""},
+ equalsTestCase{complex64(kExpected), true, false, ""},
+ equalsTestCase{complex128(kExpected), true, false, ""},
+
+ // Non-equal values of numeric type.
+ equalsTestCase{castedInt, false, false, ""},
+ equalsTestCase{int64(0), false, false, ""},
+ equalsTestCase{int64(math.MinInt64), false, false, ""},
+ equalsTestCase{int64(math.MaxInt64), false, false, ""},
+ equalsTestCase{float32(kExpected / 2), false, false, ""},
+ equalsTestCase{float64(kExpected / 2), false, false, ""},
+ equalsTestCase{complex128(kExpected + 2i), false, false, ""},
+ }
+
+ t.checkTestCases(matcher, cases)
+}
+
+func (t *EqualsTest) ZeroFloat32() {
+ matcher := Equals(float32(0))
+ ExpectEq("0", matcher.Description())
+
+ cases := []equalsTestCase{
+ // Various types of zero.
+ equalsTestCase{0.0, true, false, ""},
+ equalsTestCase{0 + 0i, true, false, ""},
+ equalsTestCase{int(0), true, false, ""},
+ equalsTestCase{int8(0), true, false, ""},
+ equalsTestCase{int16(0), true, false, ""},
+ equalsTestCase{int32(0), true, false, ""},
+ equalsTestCase{int64(0), true, false, ""},
+ equalsTestCase{uint(0), true, false, ""},
+ equalsTestCase{uint8(0), true, false, ""},
+ equalsTestCase{uint16(0), true, false, ""},
+ equalsTestCase{uint32(0), true, false, ""},
+ equalsTestCase{uint64(0), true, false, ""},
+ equalsTestCase{uintptr(0), true, false, ""},
+ equalsTestCase{float32(0), true, false, ""},
+ equalsTestCase{float64(0), true, false, ""},
+ equalsTestCase{complex64(0), true, false, ""},
+ equalsTestCase{complex128(0), true, false, ""},
+ equalsTestCase{interface{}(float32(0)), true, false, ""},
+
+ // Non-equal values of numeric type.
+ equalsTestCase{int64(1), false, false, ""},
+ equalsTestCase{int64(-1), false, false, ""},
+ equalsTestCase{float32(1), false, false, ""},
+ equalsTestCase{float32(-1), false, false, ""},
+ equalsTestCase{complex128(0 + 2i), false, false, ""},
+
+ // Non-numeric types.
+ equalsTestCase{true, false, true, "which is not numeric"},
+ equalsTestCase{[...]int{}, false, true, "which is not numeric"},
+ equalsTestCase{make(chan int), false, true, "which is not numeric"},
+ equalsTestCase{func() {}, false, true, "which is not numeric"},
+ equalsTestCase{map[int]int{}, false, true, "which is not numeric"},
+ equalsTestCase{&someInt, false, true, "which is not numeric"},
+ equalsTestCase{[]int{}, false, true, "which is not numeric"},
+ equalsTestCase{"taco", false, true, "which is not numeric"},
+ equalsTestCase{equalsTestCase{}, false, true, "which is not numeric"},
+ }
+
+ t.checkTestCases(matcher, cases)
+}
+
+func (t *EqualsTest) PositiveIntegralFloat32() {
+ matcher := Equals(float32(32769))
+ ExpectEq("32769", matcher.Description())
+
+ cases := []equalsTestCase{
+ // Various types of 32769.
+ equalsTestCase{32769.0, true, false, ""},
+ equalsTestCase{32769 + 0i, true, false, ""},
+ equalsTestCase{int(32769), true, false, ""},
+ equalsTestCase{int32(32769), true, false, ""},
+ equalsTestCase{int64(32769), true, false, ""},
+ equalsTestCase{uint(32769), true, false, ""},
+ equalsTestCase{uint32(32769), true, false, ""},
+ equalsTestCase{uint64(32769), true, false, ""},
+ equalsTestCase{uintptr(32769), true, false, ""},
+ equalsTestCase{float32(32769), true, false, ""},
+ equalsTestCase{float64(32769), true, false, ""},
+ equalsTestCase{complex64(32769), true, false, ""},
+ equalsTestCase{complex128(32769), true, false, ""},
+ equalsTestCase{interface{}(float32(32769)), true, false, ""},
+
+ // Non-equal values of numeric type.
+ equalsTestCase{int64(32770), false, false, ""},
+ equalsTestCase{uint64(32770), false, false, ""},
+ equalsTestCase{float32(32769.1), false, false, ""},
+ equalsTestCase{float32(32768.9), false, false, ""},
+ equalsTestCase{float64(32769.1), false, false, ""},
+ equalsTestCase{float64(32768.9), false, false, ""},
+ equalsTestCase{complex128(32768), false, false, ""},
+ equalsTestCase{complex128(32769 + 2i), false, false, ""},
+
+ // Non-numeric types.
+ equalsTestCase{true, false, true, "which is not numeric"},
+ equalsTestCase{[...]int{}, false, true, "which is not numeric"},
+ equalsTestCase{make(chan int), false, true, "which is not numeric"},
+ equalsTestCase{func() {}, false, true, "which is not numeric"},
+ equalsTestCase{map[int]int{}, false, true, "which is not numeric"},
+ equalsTestCase{&someInt, false, true, "which is not numeric"},
+ equalsTestCase{[]int{}, false, true, "which is not numeric"},
+ equalsTestCase{"taco", false, true, "which is not numeric"},
+ equalsTestCase{equalsTestCase{}, false, true, "which is not numeric"},
+ }
+
+ t.checkTestCases(matcher, cases)
+}
+
+func (t *EqualsTest) PositiveNonIntegralFloat32() {
+ matcher := Equals(float32(32769.1))
+ ExpectEq("32769.1", matcher.Description())
+
+ cases := []equalsTestCase{
+ // Various types of 32769.1.
+ equalsTestCase{32769.1, true, false, ""},
+ equalsTestCase{32769.1 + 0i, true, false, ""},
+ equalsTestCase{float32(32769.1), true, false, ""},
+ equalsTestCase{float64(32769.1), true, false, ""},
+ equalsTestCase{complex64(32769.1), true, false, ""},
+ equalsTestCase{complex128(32769.1), true, false, ""},
+
+ // Non-equal values of numeric type.
+ equalsTestCase{int32(32769), false, false, ""},
+ equalsTestCase{int32(32770), false, false, ""},
+ equalsTestCase{uint64(32769), false, false, ""},
+ equalsTestCase{uint64(32770), false, false, ""},
+ equalsTestCase{float32(32769.2), false, false, ""},
+ equalsTestCase{float32(32769.0), false, false, ""},
+ equalsTestCase{float64(32769.2), false, false, ""},
+ equalsTestCase{complex128(32769.1 + 2i), false, false, ""},
+ }
+
+ t.checkTestCases(matcher, cases)
+}
+
+func (t *EqualsTest) LargePositiveFloat32() {
+ const kExpected = 1 << 65
+ matcher := Equals(float32(kExpected))
+ ExpectEq("3.689349e+19", matcher.Description())
+
+ floatExpected := float32(kExpected)
+ castedInt := uint64(floatExpected)
+
+ cases := []equalsTestCase{
+ // Equal values of numeric type.
+ equalsTestCase{kExpected + 0i, true, false, ""},
+ equalsTestCase{float32(kExpected), true, false, ""},
+ equalsTestCase{float64(kExpected), true, false, ""},
+ equalsTestCase{complex64(kExpected), true, false, ""},
+ equalsTestCase{complex128(kExpected), true, false, ""},
+
+ // Non-equal values of numeric type.
+ equalsTestCase{castedInt, false, false, ""},
+ equalsTestCase{int64(0), false, false, ""},
+ equalsTestCase{int64(math.MinInt64), false, false, ""},
+ equalsTestCase{int64(math.MaxInt64), false, false, ""},
+ equalsTestCase{uint64(0), false, false, ""},
+ equalsTestCase{uint64(math.MaxUint64), false, false, ""},
+ equalsTestCase{float32(kExpected / 2), false, false, ""},
+ equalsTestCase{float64(kExpected / 2), false, false, ""},
+ equalsTestCase{complex128(kExpected + 2i), false, false, ""},
+ }
+
+ t.checkTestCases(matcher, cases)
+}
+
+func (t *EqualsTest) Float32AboveExactIntegerRange() {
+ // Single-precision floats don't have enough bits to represent the integers
+ // near this one distinctly, so [2^25-1, 2^25+2] all receive the same value
+ // and should be treated as equivalent when floats are in the mix.
+ const kTwoTo25 = 1 << 25
+ matcher := Equals(float32(kTwoTo25 + 1))
+ ExpectEq("3.3554432e+07", matcher.Description())
+
+ cases := []equalsTestCase{
+ // Integers.
+ equalsTestCase{int64(kTwoTo25 - 2), false, false, ""},
+ equalsTestCase{int64(kTwoTo25 - 1), true, false, ""},
+ equalsTestCase{int64(kTwoTo25 + 0), true, false, ""},
+ equalsTestCase{int64(kTwoTo25 + 1), true, false, ""},
+ equalsTestCase{int64(kTwoTo25 + 2), true, false, ""},
+ equalsTestCase{int64(kTwoTo25 + 3), false, false, ""},
+
+ equalsTestCase{uint64(kTwoTo25 - 2), false, false, ""},
+ equalsTestCase{uint64(kTwoTo25 - 1), true, false, ""},
+ equalsTestCase{uint64(kTwoTo25 + 0), true, false, ""},
+ equalsTestCase{uint64(kTwoTo25 + 1), true, false, ""},
+ equalsTestCase{uint64(kTwoTo25 + 2), true, false, ""},
+ equalsTestCase{uint64(kTwoTo25 + 3), false, false, ""},
+
+ // Single-precision floating point.
+ equalsTestCase{float32(kTwoTo25 - 2), false, false, ""},
+ equalsTestCase{float32(kTwoTo25 - 1), true, false, ""},
+ equalsTestCase{float32(kTwoTo25 + 0), true, false, ""},
+ equalsTestCase{float32(kTwoTo25 + 1), true, false, ""},
+ equalsTestCase{float32(kTwoTo25 + 2), true, false, ""},
+ equalsTestCase{float32(kTwoTo25 + 3), false, false, ""},
+
+ equalsTestCase{complex64(kTwoTo25 - 2), false, false, ""},
+ equalsTestCase{complex64(kTwoTo25 - 1), true, false, ""},
+ equalsTestCase{complex64(kTwoTo25 + 0), true, false, ""},
+ equalsTestCase{complex64(kTwoTo25 + 1), true, false, ""},
+ equalsTestCase{complex64(kTwoTo25 + 2), true, false, ""},
+ equalsTestCase{complex64(kTwoTo25 + 3), false, false, ""},
+
+ // Double-precision floating point.
+ equalsTestCase{float64(kTwoTo25 - 2), false, false, ""},
+ equalsTestCase{float64(kTwoTo25 - 1), true, false, ""},
+ equalsTestCase{float64(kTwoTo25 + 0), true, false, ""},
+ equalsTestCase{float64(kTwoTo25 + 1), true, false, ""},
+ equalsTestCase{float64(kTwoTo25 + 2), true, false, ""},
+ equalsTestCase{float64(kTwoTo25 + 3), false, false, ""},
+
+ equalsTestCase{complex128(kTwoTo25 - 2), false, false, ""},
+ equalsTestCase{complex128(kTwoTo25 - 1), true, false, ""},
+ equalsTestCase{complex128(kTwoTo25 + 0), true, false, ""},
+ equalsTestCase{complex128(kTwoTo25 + 1), true, false, ""},
+ equalsTestCase{complex128(kTwoTo25 + 2), true, false, ""},
+ equalsTestCase{complex128(kTwoTo25 + 3), false, false, ""},
+ }
+
+ t.checkTestCases(matcher, cases)
+}
+
+////////////////////////////////////////////////////////////////////////
+// float64
+////////////////////////////////////////////////////////////////////////
+
+func (t *EqualsTest) NegativeIntegralFloat64() {
+ const kExpected = -(1 << 50)
+ matcher := Equals(float64(kExpected))
+ ExpectEq("-1.125899906842624e+15", matcher.Description())
+
+ cases := []equalsTestCase{
+ // Various types of the expected value.
+ equalsTestCase{-1125899906842624.0, true, false, ""},
+ equalsTestCase{-1125899906842624.0 + 0i, true, false, ""},
+ equalsTestCase{int64(kExpected), true, false, ""},
+ equalsTestCase{float32(kExpected), true, false, ""},
+ equalsTestCase{float64(kExpected), true, false, ""},
+ equalsTestCase{complex64(kExpected), true, false, ""},
+ equalsTestCase{complex128(kExpected), true, false, ""},
+ equalsTestCase{interface{}(float64(kExpected)), true, false, ""},
+
+ // Values that would be kExpected in two's complement.
+ equalsTestCase{uint64((1 << 64) + kExpected), false, false, ""},
+
+ // Non-equal values of numeric type.
+ equalsTestCase{int64(kExpected + 1), false, false, ""},
+ equalsTestCase{float32(kExpected - (1 << 30)), false, false, ""},
+ equalsTestCase{float32(kExpected + (1 << 30)), false, false, ""},
+ equalsTestCase{float64(kExpected - 0.5), false, false, ""},
+ equalsTestCase{float64(kExpected + 0.5), false, false, ""},
+ equalsTestCase{complex128(kExpected - 1), false, false, ""},
+ equalsTestCase{complex128(kExpected + 2i), false, false, ""},
+
+ // Non-numeric types.
+ equalsTestCase{true, false, true, "which is not numeric"},
+ equalsTestCase{[...]int{}, false, true, "which is not numeric"},
+ equalsTestCase{make(chan int), false, true, "which is not numeric"},
+ equalsTestCase{func() {}, false, true, "which is not numeric"},
+ equalsTestCase{map[int]int{}, false, true, "which is not numeric"},
+ equalsTestCase{&someInt, false, true, "which is not numeric"},
+ equalsTestCase{[]int{}, false, true, "which is not numeric"},
+ equalsTestCase{"taco", false, true, "which is not numeric"},
+ equalsTestCase{equalsTestCase{}, false, true, "which is not numeric"},
+ }
+
+ t.checkTestCases(matcher, cases)
+}
+
+func (t *EqualsTest) NegativeNonIntegralFloat64() {
+ const kTwoTo50 = 1 << 50
+ const kExpected = -kTwoTo50 - 0.25
+
+ matcher := Equals(float64(kExpected))
+ ExpectEq("-1.1258999068426242e+15", matcher.Description())
+
+ cases := []equalsTestCase{
+ // Various types of the expected value.
+ equalsTestCase{kExpected, true, false, ""},
+ equalsTestCase{kExpected + 0i, true, false, ""},
+ equalsTestCase{float32(kExpected), true, false, ""},
+ equalsTestCase{float64(kExpected), true, false, ""},
+ equalsTestCase{complex64(kExpected), true, false, ""},
+ equalsTestCase{complex128(kExpected), true, false, ""},
+
+ // Non-equal values of numeric type.
+ equalsTestCase{int64(-kTwoTo50), false, false, ""},
+ equalsTestCase{int64(-kTwoTo50 - 1), false, false, ""},
+ equalsTestCase{float32(kExpected - (1 << 30)), false, false, ""},
+ equalsTestCase{float64(kExpected - 0.25), false, false, ""},
+ equalsTestCase{float64(kExpected + 0.25), false, false, ""},
+ equalsTestCase{complex128(kExpected + 2i), false, false, ""},
+ }
+
+ t.checkTestCases(matcher, cases)
+}
+
+func (t *EqualsTest) LargeNegativeFloat64() {
+ const kExpected = -1 * (1 << 65)
+ matcher := Equals(float64(kExpected))
+ ExpectEq("-3.6893488147419103e+19", matcher.Description())
+
+ floatExpected := float64(kExpected)
+ castedInt := int64(floatExpected)
+
+ cases := []equalsTestCase{
+ // Equal values of numeric type.
+ equalsTestCase{kExpected + 0i, true, false, ""},
+ equalsTestCase{float32(kExpected), true, false, ""},
+ equalsTestCase{float64(kExpected), true, false, ""},
+ equalsTestCase{complex64(kExpected), true, false, ""},
+ equalsTestCase{complex128(kExpected), true, false, ""},
+
+ // Non-equal values of numeric type.
+ equalsTestCase{castedInt, false, false, ""},
+ equalsTestCase{int64(0), false, false, ""},
+ equalsTestCase{int64(math.MinInt64), false, false, ""},
+ equalsTestCase{int64(math.MaxInt64), false, false, ""},
+ equalsTestCase{float32(kExpected / 2), false, false, ""},
+ equalsTestCase{float64(kExpected / 2), false, false, ""},
+ equalsTestCase{complex128(kExpected + 2i), false, false, ""},
+ }
+
+ t.checkTestCases(matcher, cases)
+}
+
+func (t *EqualsTest) ZeroFloat64() {
+ matcher := Equals(float64(0))
+ ExpectEq("0", matcher.Description())
+
+ cases := []equalsTestCase{
+ // Various types of zero.
+ equalsTestCase{0.0, true, false, ""},
+ equalsTestCase{0 + 0i, true, false, ""},
+ equalsTestCase{int(0), true, false, ""},
+ equalsTestCase{int8(0), true, false, ""},
+ equalsTestCase{int16(0), true, false, ""},
+ equalsTestCase{int32(0), true, false, ""},
+ equalsTestCase{int64(0), true, false, ""},
+ equalsTestCase{uint(0), true, false, ""},
+ equalsTestCase{uint8(0), true, false, ""},
+ equalsTestCase{uint16(0), true, false, ""},
+ equalsTestCase{uint32(0), true, false, ""},
+ equalsTestCase{uint64(0), true, false, ""},
+ equalsTestCase{uintptr(0), true, false, ""},
+ equalsTestCase{float32(0), true, false, ""},
+ equalsTestCase{float64(0), true, false, ""},
+ equalsTestCase{complex64(0), true, false, ""},
+ equalsTestCase{complex128(0), true, false, ""},
+ equalsTestCase{interface{}(float32(0)), true, false, ""},
+
+ // Non-equal values of numeric type.
+ equalsTestCase{int64(1), false, false, ""},
+ equalsTestCase{int64(-1), false, false, ""},
+ equalsTestCase{float32(1), false, false, ""},
+ equalsTestCase{float32(-1), false, false, ""},
+ equalsTestCase{complex128(0 + 2i), false, false, ""},
+
+ // Non-numeric types.
+ equalsTestCase{true, false, true, "which is not numeric"},
+ equalsTestCase{[...]int{}, false, true, "which is not numeric"},
+ equalsTestCase{make(chan int), false, true, "which is not numeric"},
+ equalsTestCase{func() {}, false, true, "which is not numeric"},
+ equalsTestCase{map[int]int{}, false, true, "which is not numeric"},
+ equalsTestCase{&someInt, false, true, "which is not numeric"},
+ equalsTestCase{[]int{}, false, true, "which is not numeric"},
+ equalsTestCase{"taco", false, true, "which is not numeric"},
+ equalsTestCase{equalsTestCase{}, false, true, "which is not numeric"},
+ }
+
+ t.checkTestCases(matcher, cases)
+}
+
+func (t *EqualsTest) PositiveIntegralFloat64() {
+ const kExpected = 1 << 50
+ matcher := Equals(float64(kExpected))
+ ExpectEq("1.125899906842624e+15", matcher.Description())
+
+ cases := []equalsTestCase{
+ // Various types of 32769.
+ equalsTestCase{1125899906842624.0, true, false, ""},
+ equalsTestCase{1125899906842624.0 + 0i, true, false, ""},
+ equalsTestCase{int64(kExpected), true, false, ""},
+ equalsTestCase{uint64(kExpected), true, false, ""},
+ equalsTestCase{uintptr(kExpected), true, false, ""},
+ equalsTestCase{float32(kExpected), true, false, ""},
+ equalsTestCase{float64(kExpected), true, false, ""},
+ equalsTestCase{complex64(kExpected), true, false, ""},
+ equalsTestCase{complex128(kExpected), true, false, ""},
+ equalsTestCase{interface{}(float64(kExpected)), true, false, ""},
+
+ // Non-equal values of numeric type.
+ equalsTestCase{int64(kExpected + 1), false, false, ""},
+ equalsTestCase{uint64(kExpected + 1), false, false, ""},
+ equalsTestCase{uintptr(kExpected + 1), false, false, ""},
+ equalsTestCase{float32(kExpected - (1 << 30)), false, false, ""},
+ equalsTestCase{float32(kExpected + (1 << 30)), false, false, ""},
+ equalsTestCase{float64(kExpected - 0.5), false, false, ""},
+ equalsTestCase{float64(kExpected + 0.5), false, false, ""},
+ equalsTestCase{complex128(kExpected - 1), false, false, ""},
+ equalsTestCase{complex128(kExpected + 2i), false, false, ""},
+
+ // Non-numeric types.
+ equalsTestCase{true, false, true, "which is not numeric"},
+ equalsTestCase{[...]int{}, false, true, "which is not numeric"},
+ equalsTestCase{make(chan int), false, true, "which is not numeric"},
+ equalsTestCase{func() {}, false, true, "which is not numeric"},
+ equalsTestCase{map[int]int{}, false, true, "which is not numeric"},
+ equalsTestCase{&someInt, false, true, "which is not numeric"},
+ equalsTestCase{[]int{}, false, true, "which is not numeric"},
+ equalsTestCase{"taco", false, true, "which is not numeric"},
+ equalsTestCase{equalsTestCase{}, false, true, "which is not numeric"},
+ }
+
+ t.checkTestCases(matcher, cases)
+}
+
+func (t *EqualsTest) PositiveNonIntegralFloat64() {
+ const kTwoTo50 = 1 << 50
+ const kExpected = kTwoTo50 + 0.25
+ matcher := Equals(float64(kExpected))
+ ExpectEq("1.1258999068426242e+15", matcher.Description())
+
+ cases := []equalsTestCase{
+ // Various types of the expected value.
+ equalsTestCase{kExpected, true, false, ""},
+ equalsTestCase{kExpected + 0i, true, false, ""},
+ equalsTestCase{float32(kExpected), true, false, ""},
+ equalsTestCase{float64(kExpected), true, false, ""},
+ equalsTestCase{complex64(kExpected), true, false, ""},
+ equalsTestCase{complex128(kExpected), true, false, ""},
+
+ // Non-equal values of numeric type.
+ equalsTestCase{int64(kTwoTo50), false, false, ""},
+ equalsTestCase{int64(kTwoTo50 - 1), false, false, ""},
+ equalsTestCase{float64(kExpected - 0.25), false, false, ""},
+ equalsTestCase{float64(kExpected + 0.25), false, false, ""},
+ equalsTestCase{complex128(kExpected + 2i), false, false, ""},
+ }
+
+ t.checkTestCases(matcher, cases)
+}
+
+func (t *EqualsTest) LargePositiveFloat64() {
+ const kExpected = 1 << 65
+ matcher := Equals(float64(kExpected))
+ ExpectEq("3.6893488147419103e+19", matcher.Description())
+
+ floatExpected := float64(kExpected)
+ castedInt := uint64(floatExpected)
+
+ cases := []equalsTestCase{
+ // Equal values of numeric type.
+ equalsTestCase{kExpected + 0i, true, false, ""},
+ equalsTestCase{float32(kExpected), true, false, ""},
+ equalsTestCase{float64(kExpected), true, false, ""},
+ equalsTestCase{complex64(kExpected), true, false, ""},
+ equalsTestCase{complex128(kExpected), true, false, ""},
+
+ // Non-equal values of numeric type.
+ equalsTestCase{castedInt, false, false, ""},
+ equalsTestCase{int64(0), false, false, ""},
+ equalsTestCase{int64(math.MinInt64), false, false, ""},
+ equalsTestCase{int64(math.MaxInt64), false, false, ""},
+ equalsTestCase{uint64(0), false, false, ""},
+ equalsTestCase{uint64(math.MaxUint64), false, false, ""},
+ equalsTestCase{float32(kExpected / 2), false, false, ""},
+ equalsTestCase{float64(kExpected / 2), false, false, ""},
+ equalsTestCase{complex128(kExpected + 2i), false, false, ""},
+ }
+
+ t.checkTestCases(matcher, cases)
+}
+
+func (t *EqualsTest) Float64AboveExactIntegerRange() {
+ // Double-precision floats don't have enough bits to represent the integers
+ // near this one distinctly, so [2^54-1, 2^54+2] all receive the same value
+ // and should be treated as equivalent when floats are in the mix.
+ const kTwoTo54 = 1 << 54
+ matcher := Equals(float64(kTwoTo54 + 1))
+ ExpectEq("1.8014398509481984e+16", matcher.Description())
+
+ cases := []equalsTestCase{
+ // Integers.
+ equalsTestCase{int64(kTwoTo54 - 2), false, false, ""},
+ equalsTestCase{int64(kTwoTo54 - 1), true, false, ""},
+ equalsTestCase{int64(kTwoTo54 + 0), true, false, ""},
+ equalsTestCase{int64(kTwoTo54 + 1), true, false, ""},
+ equalsTestCase{int64(kTwoTo54 + 2), true, false, ""},
+ equalsTestCase{int64(kTwoTo54 + 3), false, false, ""},
+
+ equalsTestCase{uint64(kTwoTo54 - 2), false, false, ""},
+ equalsTestCase{uint64(kTwoTo54 - 1), true, false, ""},
+ equalsTestCase{uint64(kTwoTo54 + 0), true, false, ""},
+ equalsTestCase{uint64(kTwoTo54 + 1), true, false, ""},
+ equalsTestCase{uint64(kTwoTo54 + 2), true, false, ""},
+ equalsTestCase{uint64(kTwoTo54 + 3), false, false, ""},
+
+ // Double-precision floating point.
+ equalsTestCase{float64(kTwoTo54 - 2), false, false, ""},
+ equalsTestCase{float64(kTwoTo54 - 1), true, false, ""},
+ equalsTestCase{float64(kTwoTo54 + 0), true, false, ""},
+ equalsTestCase{float64(kTwoTo54 + 1), true, false, ""},
+ equalsTestCase{float64(kTwoTo54 + 2), true, false, ""},
+ equalsTestCase{float64(kTwoTo54 + 3), false, false, ""},
+
+ equalsTestCase{complex128(kTwoTo54 - 2), false, false, ""},
+ equalsTestCase{complex128(kTwoTo54 - 1), true, false, ""},
+ equalsTestCase{complex128(kTwoTo54 + 0), true, false, ""},
+ equalsTestCase{complex128(kTwoTo54 + 1), true, false, ""},
+ equalsTestCase{complex128(kTwoTo54 + 2), true, false, ""},
+ equalsTestCase{complex128(kTwoTo54 + 3), false, false, ""},
+ }
+
+ t.checkTestCases(matcher, cases)
+}
+
+////////////////////////////////////////////////////////////////////////
+// complex64
+////////////////////////////////////////////////////////////////////////
+
+func (t *EqualsTest) NegativeIntegralComplex64() {
+ const kExpected = -32769
+ matcher := Equals(complex64(kExpected))
+ ExpectEq("(-32769+0i)", matcher.Description())
+
+ cases := []equalsTestCase{
+ // Various types of the expected value.
+ equalsTestCase{-32769.0, true, false, ""},
+ equalsTestCase{-32769.0 + 0i, true, false, ""},
+ equalsTestCase{int(kExpected), true, false, ""},
+ equalsTestCase{int32(kExpected), true, false, ""},
+ equalsTestCase{int64(kExpected), true, false, ""},
+ equalsTestCase{float32(kExpected), true, false, ""},
+ equalsTestCase{float64(kExpected), true, false, ""},
+ equalsTestCase{complex64(kExpected), true, false, ""},
+ equalsTestCase{complex128(kExpected), true, false, ""},
+ equalsTestCase{interface{}(float64(kExpected)), true, false, ""},
+
+ // Values that would be kExpected in two's complement.
+ equalsTestCase{uint32((1 << 32) + kExpected), false, false, ""},
+ equalsTestCase{uint64((1 << 64) + kExpected), false, false, ""},
+ equalsTestCase{uintptr((1 << 64) + kExpected), false, false, ""},
+
+ // Non-equal values of numeric type.
+ equalsTestCase{int64(kExpected + 1), false, false, ""},
+ equalsTestCase{float32(kExpected - (1 << 30)), false, false, ""},
+ equalsTestCase{float32(kExpected + (1 << 30)), false, false, ""},
+ equalsTestCase{float64(kExpected - 0.5), false, false, ""},
+ equalsTestCase{float64(kExpected + 0.5), false, false, ""},
+ equalsTestCase{complex64(kExpected - 1), false, false, ""},
+ equalsTestCase{complex64(kExpected + 2i), false, false, ""},
+ equalsTestCase{complex128(kExpected - 1), false, false, ""},
+ equalsTestCase{complex128(kExpected + 2i), false, false, ""},
+
+ // Non-numeric types.
+ equalsTestCase{true, false, true, "which is not numeric"},
+ equalsTestCase{[...]int{}, false, true, "which is not numeric"},
+ equalsTestCase{make(chan int), false, true, "which is not numeric"},
+ equalsTestCase{func() {}, false, true, "which is not numeric"},
+ equalsTestCase{map[int]int{}, false, true, "which is not numeric"},
+ equalsTestCase{&someInt, false, true, "which is not numeric"},
+ equalsTestCase{[]int{}, false, true, "which is not numeric"},
+ equalsTestCase{"taco", false, true, "which is not numeric"},
+ equalsTestCase{equalsTestCase{}, false, true, "which is not numeric"},
+ }
+
+ t.checkTestCases(matcher, cases)
+}
+
+func (t *EqualsTest) NegativeNonIntegralComplex64() {
+ const kTwoTo20 = 1 << 20
+ const kExpected = -kTwoTo20 - 0.25
+
+ matcher := Equals(complex64(kExpected))
+ ExpectEq("(-1.0485762e+06+0i)", matcher.Description())
+
+ cases := []equalsTestCase{
+ // Various types of the expected value.
+ equalsTestCase{kExpected, true, false, ""},
+ equalsTestCase{kExpected + 0i, true, false, ""},
+ equalsTestCase{float32(kExpected), true, false, ""},
+ equalsTestCase{float64(kExpected), true, false, ""},
+ equalsTestCase{complex64(kExpected), true, false, ""},
+ equalsTestCase{complex128(kExpected), true, false, ""},
+
+ // Non-equal values of numeric type.
+ equalsTestCase{int(-kTwoTo20), false, false, ""},
+ equalsTestCase{int(-kTwoTo20 - 1), false, false, ""},
+ equalsTestCase{int32(-kTwoTo20), false, false, ""},
+ equalsTestCase{int32(-kTwoTo20 - 1), false, false, ""},
+ equalsTestCase{int64(-kTwoTo20), false, false, ""},
+ equalsTestCase{int64(-kTwoTo20 - 1), false, false, ""},
+ equalsTestCase{float32(kExpected - (1 << 30)), false, false, ""},
+ equalsTestCase{float64(kExpected - 0.25), false, false, ""},
+ equalsTestCase{float64(kExpected + 0.25), false, false, ""},
+ equalsTestCase{complex64(kExpected - 0.75), false, false, ""},
+ equalsTestCase{complex64(kExpected + 2i), false, false, ""},
+ equalsTestCase{complex128(kExpected - 0.75), false, false, ""},
+ equalsTestCase{complex128(kExpected + 2i), false, false, ""},
+ }
+
+ t.checkTestCases(matcher, cases)
+}
+
+func (t *EqualsTest) LargeNegativeComplex64() {
+ const kExpected = -1 * (1 << 65)
+ matcher := Equals(complex64(kExpected))
+ ExpectEq("(-3.689349e+19+0i)", matcher.Description())
+
+ floatExpected := float64(kExpected)
+ castedInt := int64(floatExpected)
+
+ cases := []equalsTestCase{
+ // Equal values of numeric type.
+ equalsTestCase{kExpected + 0i, true, false, ""},
+ equalsTestCase{float32(kExpected), true, false, ""},
+ equalsTestCase{float64(kExpected), true, false, ""},
+ equalsTestCase{complex64(kExpected), true, false, ""},
+ equalsTestCase{complex128(kExpected), true, false, ""},
+
+ // Non-equal values of numeric type.
+ equalsTestCase{castedInt, false, false, ""},
+ equalsTestCase{int64(0), false, false, ""},
+ equalsTestCase{int64(math.MinInt64), false, false, ""},
+ equalsTestCase{int64(math.MaxInt64), false, false, ""},
+ equalsTestCase{float32(kExpected / 2), false, false, ""},
+ equalsTestCase{float64(kExpected / 2), false, false, ""},
+ equalsTestCase{complex64(kExpected + 2i), false, false, ""},
+ equalsTestCase{complex128(kExpected + 2i), false, false, ""},
+ }
+
+ t.checkTestCases(matcher, cases)
+}
+
+func (t *EqualsTest) ZeroComplex64() {
+ matcher := Equals(complex64(0))
+ ExpectEq("(0+0i)", matcher.Description())
+
+ cases := []equalsTestCase{
+ // Various types of zero.
+ equalsTestCase{0.0, true, false, ""},
+ equalsTestCase{0 + 0i, true, false, ""},
+ equalsTestCase{int(0), true, false, ""},
+ equalsTestCase{int8(0), true, false, ""},
+ equalsTestCase{int16(0), true, false, ""},
+ equalsTestCase{int32(0), true, false, ""},
+ equalsTestCase{int64(0), true, false, ""},
+ equalsTestCase{uint(0), true, false, ""},
+ equalsTestCase{uint8(0), true, false, ""},
+ equalsTestCase{uint16(0), true, false, ""},
+ equalsTestCase{uint32(0), true, false, ""},
+ equalsTestCase{uint64(0), true, false, ""},
+ equalsTestCase{uintptr(0), true, false, ""},
+ equalsTestCase{float32(0), true, false, ""},
+ equalsTestCase{float64(0), true, false, ""},
+ equalsTestCase{complex64(0), true, false, ""},
+ equalsTestCase{complex128(0), true, false, ""},
+ equalsTestCase{interface{}(float32(0)), true, false, ""},
+
+ // Non-equal values of numeric type.
+ equalsTestCase{int64(1), false, false, ""},
+ equalsTestCase{int64(-1), false, false, ""},
+ equalsTestCase{float32(1), false, false, ""},
+ equalsTestCase{float32(-1), false, false, ""},
+ equalsTestCase{float64(1), false, false, ""},
+ equalsTestCase{float64(-1), false, false, ""},
+ equalsTestCase{complex64(0 + 2i), false, false, ""},
+ equalsTestCase{complex128(0 + 2i), false, false, ""},
+
+ // Non-numeric types.
+ equalsTestCase{true, false, true, "which is not numeric"},
+ equalsTestCase{[...]int{}, false, true, "which is not numeric"},
+ equalsTestCase{make(chan int), false, true, "which is not numeric"},
+ equalsTestCase{func() {}, false, true, "which is not numeric"},
+ equalsTestCase{map[int]int{}, false, true, "which is not numeric"},
+ equalsTestCase{&someInt, false, true, "which is not numeric"},
+ equalsTestCase{[]int{}, false, true, "which is not numeric"},
+ equalsTestCase{"taco", false, true, "which is not numeric"},
+ equalsTestCase{equalsTestCase{}, false, true, "which is not numeric"},
+ }
+
+ t.checkTestCases(matcher, cases)
+}
+
+func (t *EqualsTest) PositiveIntegralComplex64() {
+ const kExpected = 1 << 20
+ matcher := Equals(complex64(kExpected))
+ ExpectEq("(1.048576e+06+0i)", matcher.Description())
+
+ cases := []equalsTestCase{
+ // Various types of 32769.
+ equalsTestCase{1048576.0, true, false, ""},
+ equalsTestCase{1048576.0 + 0i, true, false, ""},
+ equalsTestCase{int(kExpected), true, false, ""},
+ equalsTestCase{int32(kExpected), true, false, ""},
+ equalsTestCase{int64(kExpected), true, false, ""},
+ equalsTestCase{uint(kExpected), true, false, ""},
+ equalsTestCase{uint32(kExpected), true, false, ""},
+ equalsTestCase{uint64(kExpected), true, false, ""},
+ equalsTestCase{uintptr(kExpected), true, false, ""},
+ equalsTestCase{float32(kExpected), true, false, ""},
+ equalsTestCase{float64(kExpected), true, false, ""},
+ equalsTestCase{complex64(kExpected), true, false, ""},
+ equalsTestCase{complex128(kExpected), true, false, ""},
+ equalsTestCase{interface{}(float64(kExpected)), true, false, ""},
+
+ // Non-equal values of numeric type.
+ equalsTestCase{int(kExpected + 1), false, false, ""},
+ equalsTestCase{int32(kExpected + 1), false, false, ""},
+ equalsTestCase{int64(kExpected + 1), false, false, ""},
+ equalsTestCase{uint(kExpected + 1), false, false, ""},
+ equalsTestCase{uint32(kExpected + 1), false, false, ""},
+ equalsTestCase{uint64(kExpected + 1), false, false, ""},
+ equalsTestCase{uintptr(kExpected + 1), false, false, ""},
+ equalsTestCase{float32(kExpected - (1 << 30)), false, false, ""},
+ equalsTestCase{float32(kExpected + (1 << 30)), false, false, ""},
+ equalsTestCase{float64(kExpected - 0.5), false, false, ""},
+ equalsTestCase{float64(kExpected + 0.5), false, false, ""},
+ equalsTestCase{complex128(kExpected - 1), false, false, ""},
+ equalsTestCase{complex128(kExpected + 2i), false, false, ""},
+
+ // Non-numeric types.
+ equalsTestCase{true, false, true, "which is not numeric"},
+ equalsTestCase{[...]int{}, false, true, "which is not numeric"},
+ equalsTestCase{make(chan int), false, true, "which is not numeric"},
+ equalsTestCase{func() {}, false, true, "which is not numeric"},
+ equalsTestCase{map[int]int{}, false, true, "which is not numeric"},
+ equalsTestCase{&someInt, false, true, "which is not numeric"},
+ equalsTestCase{[]int{}, false, true, "which is not numeric"},
+ equalsTestCase{"taco", false, true, "which is not numeric"},
+ equalsTestCase{equalsTestCase{}, false, true, "which is not numeric"},
+ }
+
+ t.checkTestCases(matcher, cases)
+}
+
+func (t *EqualsTest) PositiveNonIntegralComplex64() {
+ const kTwoTo20 = 1 << 20
+ const kExpected = kTwoTo20 + 0.25
+ matcher := Equals(complex64(kExpected))
+ ExpectEq("(1.0485762e+06+0i)", matcher.Description())
+
+ cases := []equalsTestCase{
+ // Various types of the expected value.
+ equalsTestCase{kExpected, true, false, ""},
+ equalsTestCase{kExpected + 0i, true, false, ""},
+ equalsTestCase{float32(kExpected), true, false, ""},
+ equalsTestCase{float64(kExpected), true, false, ""},
+ equalsTestCase{complex64(kExpected), true, false, ""},
+ equalsTestCase{complex128(kExpected), true, false, ""},
+
+ // Non-equal values of numeric type.
+ equalsTestCase{int64(kTwoTo20), false, false, ""},
+ equalsTestCase{int64(kTwoTo20 - 1), false, false, ""},
+ equalsTestCase{uint64(kTwoTo20), false, false, ""},
+ equalsTestCase{uint64(kTwoTo20 - 1), false, false, ""},
+ equalsTestCase{float32(kExpected - 1), false, false, ""},
+ equalsTestCase{float32(kExpected + 1), false, false, ""},
+ equalsTestCase{float64(kExpected - 0.25), false, false, ""},
+ equalsTestCase{float64(kExpected + 0.25), false, false, ""},
+ equalsTestCase{complex64(kExpected - 1), false, false, ""},
+ equalsTestCase{complex64(kExpected - 1i), false, false, ""},
+ equalsTestCase{complex128(kExpected - 1), false, false, ""},
+ equalsTestCase{complex128(kExpected - 1i), false, false, ""},
+ }
+
+ t.checkTestCases(matcher, cases)
+}
+
+func (t *EqualsTest) LargePositiveComplex64() {
+ const kExpected = 1 << 65
+ matcher := Equals(complex64(kExpected))
+ ExpectEq("(3.689349e+19+0i)", matcher.Description())
+
+ floatExpected := float64(kExpected)
+ castedInt := uint64(floatExpected)
+
+ cases := []equalsTestCase{
+ // Equal values of numeric type.
+ equalsTestCase{kExpected + 0i, true, false, ""},
+ equalsTestCase{float32(kExpected), true, false, ""},
+ equalsTestCase{float64(kExpected), true, false, ""},
+ equalsTestCase{complex64(kExpected), true, false, ""},
+ equalsTestCase{complex128(kExpected), true, false, ""},
+
+ // Non-equal values of numeric type.
+ equalsTestCase{castedInt, false, false, ""},
+ equalsTestCase{int64(0), false, false, ""},
+ equalsTestCase{int64(math.MinInt64), false, false, ""},
+ equalsTestCase{int64(math.MaxInt64), false, false, ""},
+ equalsTestCase{uint64(0), false, false, ""},
+ equalsTestCase{uint64(math.MaxUint64), false, false, ""},
+ equalsTestCase{float32(kExpected / 2), false, false, ""},
+ equalsTestCase{float64(kExpected / 2), false, false, ""},
+ equalsTestCase{complex128(kExpected + 2i), false, false, ""},
+ }
+
+ t.checkTestCases(matcher, cases)
+}
+
+func (t *EqualsTest) Complex64AboveExactIntegerRange() {
+ // Single-precision floats don't have enough bits to represent the integers
+ // near this one distinctly, so [2^25-1, 2^25+2] all receive the same value
+ // and should be treated as equivalent when floats are in the mix.
+ const kTwoTo25 = 1 << 25
+ matcher := Equals(complex64(kTwoTo25 + 1))
+ ExpectEq("(3.3554432e+07+0i)", matcher.Description())
+
+ cases := []equalsTestCase{
+ // Integers.
+ equalsTestCase{int64(kTwoTo25 - 2), false, false, ""},
+ equalsTestCase{int64(kTwoTo25 - 1), true, false, ""},
+ equalsTestCase{int64(kTwoTo25 + 0), true, false, ""},
+ equalsTestCase{int64(kTwoTo25 + 1), true, false, ""},
+ equalsTestCase{int64(kTwoTo25 + 2), true, false, ""},
+ equalsTestCase{int64(kTwoTo25 + 3), false, false, ""},
+
+ equalsTestCase{uint64(kTwoTo25 - 2), false, false, ""},
+ equalsTestCase{uint64(kTwoTo25 - 1), true, false, ""},
+ equalsTestCase{uint64(kTwoTo25 + 0), true, false, ""},
+ equalsTestCase{uint64(kTwoTo25 + 1), true, false, ""},
+ equalsTestCase{uint64(kTwoTo25 + 2), true, false, ""},
+ equalsTestCase{uint64(kTwoTo25 + 3), false, false, ""},
+
+ // Single-precision floating point.
+ equalsTestCase{float32(kTwoTo25 - 2), false, false, ""},
+ equalsTestCase{float32(kTwoTo25 - 1), true, false, ""},
+ equalsTestCase{float32(kTwoTo25 + 0), true, false, ""},
+ equalsTestCase{float32(kTwoTo25 + 1), true, false, ""},
+ equalsTestCase{float32(kTwoTo25 + 2), true, false, ""},
+ equalsTestCase{float32(kTwoTo25 + 3), false, false, ""},
+
+ equalsTestCase{complex64(kTwoTo25 - 2), false, false, ""},
+ equalsTestCase{complex64(kTwoTo25 - 1), true, false, ""},
+ equalsTestCase{complex64(kTwoTo25 + 0), true, false, ""},
+ equalsTestCase{complex64(kTwoTo25 + 1), true, false, ""},
+ equalsTestCase{complex64(kTwoTo25 + 2), true, false, ""},
+ equalsTestCase{complex64(kTwoTo25 + 3), false, false, ""},
+
+ // Double-precision floating point.
+ equalsTestCase{float64(kTwoTo25 - 2), false, false, ""},
+ equalsTestCase{float64(kTwoTo25 - 1), true, false, ""},
+ equalsTestCase{float64(kTwoTo25 + 0), true, false, ""},
+ equalsTestCase{float64(kTwoTo25 + 1), true, false, ""},
+ equalsTestCase{float64(kTwoTo25 + 2), true, false, ""},
+ equalsTestCase{float64(kTwoTo25 + 3), false, false, ""},
+
+ equalsTestCase{complex128(kTwoTo25 - 2), false, false, ""},
+ equalsTestCase{complex128(kTwoTo25 - 1), true, false, ""},
+ equalsTestCase{complex128(kTwoTo25 + 0), true, false, ""},
+ equalsTestCase{complex128(kTwoTo25 + 1), true, false, ""},
+ equalsTestCase{complex128(kTwoTo25 + 2), true, false, ""},
+ equalsTestCase{complex128(kTwoTo25 + 3), false, false, ""},
+ }
+
+ t.checkTestCases(matcher, cases)
+}
+
+func (t *EqualsTest) Complex64WithNonZeroImaginaryPart() {
+ const kRealPart = 17
+ const kImagPart = 0.25i
+ const kExpected = kRealPart + kImagPart
+ matcher := Equals(complex64(kExpected))
+ ExpectEq("(17+0.25i)", matcher.Description())
+
+ cases := []equalsTestCase{
+ // Various types of the expected value.
+ equalsTestCase{kExpected, true, false, ""},
+ equalsTestCase{kRealPart + kImagPart, true, false, ""},
+ equalsTestCase{complex64(kExpected), true, false, ""},
+ equalsTestCase{complex128(kExpected), true, false, ""},
+
+ // Non-equal values of numeric type.
+ equalsTestCase{int(kRealPart), false, false, ""},
+ equalsTestCase{int8(kRealPart), false, false, ""},
+ equalsTestCase{int16(kRealPart), false, false, ""},
+ equalsTestCase{int32(kRealPart), false, false, ""},
+ equalsTestCase{int64(kRealPart), false, false, ""},
+ equalsTestCase{uint(kRealPart), false, false, ""},
+ equalsTestCase{uint8(kRealPart), false, false, ""},
+ equalsTestCase{uint16(kRealPart), false, false, ""},
+ equalsTestCase{uint32(kRealPart), false, false, ""},
+ equalsTestCase{uint64(kRealPart), false, false, ""},
+ equalsTestCase{float32(kRealPart), false, false, ""},
+ equalsTestCase{float64(kRealPart), false, false, ""},
+ equalsTestCase{complex64(kRealPart), false, false, ""},
+ equalsTestCase{complex64(kRealPart + kImagPart + 0.5), false, false, ""},
+ equalsTestCase{complex64(kRealPart + kImagPart + 0.5i), false, false, ""},
+ equalsTestCase{complex128(kRealPart), false, false, ""},
+ equalsTestCase{complex128(kRealPart + kImagPart + 0.5), false, false, ""},
+ equalsTestCase{complex128(kRealPart + kImagPart + 0.5i), false, false, ""},
+ }
+
+ t.checkTestCases(matcher, cases)
+}
+
+////////////////////////////////////////////////////////////////////////
+// complex128
+////////////////////////////////////////////////////////////////////////
+
+func (t *EqualsTest) NegativeIntegralComplex128() {
+ const kExpected = -32769
+ matcher := Equals(complex128(kExpected))
+ ExpectEq("(-32769+0i)", matcher.Description())
+
+ cases := []equalsTestCase{
+ // Various types of the expected value.
+ equalsTestCase{-32769.0, true, false, ""},
+ equalsTestCase{-32769.0 + 0i, true, false, ""},
+ equalsTestCase{int(kExpected), true, false, ""},
+ equalsTestCase{int32(kExpected), true, false, ""},
+ equalsTestCase{int64(kExpected), true, false, ""},
+ equalsTestCase{float32(kExpected), true, false, ""},
+ equalsTestCase{float64(kExpected), true, false, ""},
+ equalsTestCase{complex64(kExpected), true, false, ""},
+ equalsTestCase{complex128(kExpected), true, false, ""},
+ equalsTestCase{interface{}(float64(kExpected)), true, false, ""},
+
+ // Values that would be kExpected in two's complement.
+ equalsTestCase{uint32((1 << 32) + kExpected), false, false, ""},
+ equalsTestCase{uint64((1 << 64) + kExpected), false, false, ""},
+ equalsTestCase{uintptr((1 << 64) + kExpected), false, false, ""},
+
+ // Non-equal values of numeric type.
+ equalsTestCase{int64(kExpected + 1), false, false, ""},
+ equalsTestCase{float32(kExpected - (1 << 30)), false, false, ""},
+ equalsTestCase{float32(kExpected + (1 << 30)), false, false, ""},
+ equalsTestCase{float64(kExpected - 0.5), false, false, ""},
+ equalsTestCase{float64(kExpected + 0.5), false, false, ""},
+ equalsTestCase{complex64(kExpected - 1), false, false, ""},
+ equalsTestCase{complex64(kExpected + 2i), false, false, ""},
+ equalsTestCase{complex128(kExpected - 1), false, false, ""},
+ equalsTestCase{complex128(kExpected + 2i), false, false, ""},
+
+ // Non-numeric types.
+ equalsTestCase{true, false, true, "which is not numeric"},
+ equalsTestCase{[...]int{}, false, true, "which is not numeric"},
+ equalsTestCase{make(chan int), false, true, "which is not numeric"},
+ equalsTestCase{func() {}, false, true, "which is not numeric"},
+ equalsTestCase{map[int]int{}, false, true, "which is not numeric"},
+ equalsTestCase{&someInt, false, true, "which is not numeric"},
+ equalsTestCase{[]int{}, false, true, "which is not numeric"},
+ equalsTestCase{"taco", false, true, "which is not numeric"},
+ equalsTestCase{equalsTestCase{}, false, true, "which is not numeric"},
+ }
+
+ t.checkTestCases(matcher, cases)
+}
+
+func (t *EqualsTest) NegativeNonIntegralComplex128() {
+ const kTwoTo20 = 1 << 20
+ const kExpected = -kTwoTo20 - 0.25
+
+ matcher := Equals(complex128(kExpected))
+ ExpectEq("(-1.04857625e+06+0i)", matcher.Description())
+
+ cases := []equalsTestCase{
+ // Various types of the expected value.
+ equalsTestCase{kExpected, true, false, ""},
+ equalsTestCase{kExpected + 0i, true, false, ""},
+ equalsTestCase{float32(kExpected), true, false, ""},
+ equalsTestCase{float64(kExpected), true, false, ""},
+ equalsTestCase{complex64(kExpected), true, false, ""},
+ equalsTestCase{complex128(kExpected), true, false, ""},
+
+ // Non-equal values of numeric type.
+ equalsTestCase{int(-kTwoTo20), false, false, ""},
+ equalsTestCase{int(-kTwoTo20 - 1), false, false, ""},
+ equalsTestCase{int32(-kTwoTo20), false, false, ""},
+ equalsTestCase{int32(-kTwoTo20 - 1), false, false, ""},
+ equalsTestCase{int64(-kTwoTo20), false, false, ""},
+ equalsTestCase{int64(-kTwoTo20 - 1), false, false, ""},
+ equalsTestCase{float32(kExpected - (1 << 30)), false, false, ""},
+ equalsTestCase{float64(kExpected - 0.25), false, false, ""},
+ equalsTestCase{float64(kExpected + 0.25), false, false, ""},
+ equalsTestCase{complex64(kExpected - 0.75), false, false, ""},
+ equalsTestCase{complex64(kExpected + 2i), false, false, ""},
+ equalsTestCase{complex128(kExpected - 0.75), false, false, ""},
+ equalsTestCase{complex128(kExpected + 2i), false, false, ""},
+ }
+
+ t.checkTestCases(matcher, cases)
+}
+
+func (t *EqualsTest) LargeNegativeComplex128() {
+ const kExpected = -1 * (1 << 65)
+ matcher := Equals(complex128(kExpected))
+ ExpectEq("(-3.6893488147419103e+19+0i)", matcher.Description())
+
+ floatExpected := float64(kExpected)
+ castedInt := int64(floatExpected)
+
+ cases := []equalsTestCase{
+ // Equal values of numeric type.
+ equalsTestCase{kExpected + 0i, true, false, ""},
+ equalsTestCase{float32(kExpected), true, false, ""},
+ equalsTestCase{float64(kExpected), true, false, ""},
+ equalsTestCase{complex64(kExpected), true, false, ""},
+ equalsTestCase{complex128(kExpected), true, false, ""},
+
+ // Non-equal values of numeric type.
+ equalsTestCase{castedInt, false, false, ""},
+ equalsTestCase{int64(0), false, false, ""},
+ equalsTestCase{int64(math.MinInt64), false, false, ""},
+ equalsTestCase{int64(math.MaxInt64), false, false, ""},
+ equalsTestCase{float32(kExpected / 2), false, false, ""},
+ equalsTestCase{float64(kExpected / 2), false, false, ""},
+ equalsTestCase{complex64(kExpected + 2i), false, false, ""},
+ equalsTestCase{complex128(kExpected + 2i), false, false, ""},
+ }
+
+ t.checkTestCases(matcher, cases)
+}
+
+func (t *EqualsTest) ZeroComplex128() {
+ matcher := Equals(complex128(0))
+ ExpectEq("(0+0i)", matcher.Description())
+
+ cases := []equalsTestCase{
+ // Various types of zero.
+ equalsTestCase{0.0, true, false, ""},
+ equalsTestCase{0 + 0i, true, false, ""},
+ equalsTestCase{int(0), true, false, ""},
+ equalsTestCase{int8(0), true, false, ""},
+ equalsTestCase{int16(0), true, false, ""},
+ equalsTestCase{int32(0), true, false, ""},
+ equalsTestCase{int64(0), true, false, ""},
+ equalsTestCase{uint(0), true, false, ""},
+ equalsTestCase{uint8(0), true, false, ""},
+ equalsTestCase{uint16(0), true, false, ""},
+ equalsTestCase{uint32(0), true, false, ""},
+ equalsTestCase{uint64(0), true, false, ""},
+ equalsTestCase{uintptr(0), true, false, ""},
+ equalsTestCase{float32(0), true, false, ""},
+ equalsTestCase{float64(0), true, false, ""},
+ equalsTestCase{complex64(0), true, false, ""},
+ equalsTestCase{complex128(0), true, false, ""},
+ equalsTestCase{interface{}(float32(0)), true, false, ""},
+
+ // Non-equal values of numeric type.
+ equalsTestCase{int64(1), false, false, ""},
+ equalsTestCase{int64(-1), false, false, ""},
+ equalsTestCase{float32(1), false, false, ""},
+ equalsTestCase{float32(-1), false, false, ""},
+ equalsTestCase{float64(1), false, false, ""},
+ equalsTestCase{float64(-1), false, false, ""},
+ equalsTestCase{complex64(0 + 2i), false, false, ""},
+ equalsTestCase{complex128(0 + 2i), false, false, ""},
+
+ // Non-numeric types.
+ equalsTestCase{true, false, true, "which is not numeric"},
+ equalsTestCase{[...]int{}, false, true, "which is not numeric"},
+ equalsTestCase{make(chan int), false, true, "which is not numeric"},
+ equalsTestCase{func() {}, false, true, "which is not numeric"},
+ equalsTestCase{map[int]int{}, false, true, "which is not numeric"},
+ equalsTestCase{&someInt, false, true, "which is not numeric"},
+ equalsTestCase{[]int{}, false, true, "which is not numeric"},
+ equalsTestCase{"taco", false, true, "which is not numeric"},
+ equalsTestCase{equalsTestCase{}, false, true, "which is not numeric"},
+ }
+
+ t.checkTestCases(matcher, cases)
+}
+
+func (t *EqualsTest) PositiveIntegralComplex128() {
+ const kExpected = 1 << 20
+ matcher := Equals(complex128(kExpected))
+ ExpectEq("(1.048576e+06+0i)", matcher.Description())
+
+ cases := []equalsTestCase{
+ // Various types of 32769.
+ equalsTestCase{1048576.0, true, false, ""},
+ equalsTestCase{1048576.0 + 0i, true, false, ""},
+ equalsTestCase{int(kExpected), true, false, ""},
+ equalsTestCase{int32(kExpected), true, false, ""},
+ equalsTestCase{int64(kExpected), true, false, ""},
+ equalsTestCase{uint(kExpected), true, false, ""},
+ equalsTestCase{uint32(kExpected), true, false, ""},
+ equalsTestCase{uint64(kExpected), true, false, ""},
+ equalsTestCase{uintptr(kExpected), true, false, ""},
+ equalsTestCase{float32(kExpected), true, false, ""},
+ equalsTestCase{float64(kExpected), true, false, ""},
+ equalsTestCase{complex64(kExpected), true, false, ""},
+ equalsTestCase{complex128(kExpected), true, false, ""},
+ equalsTestCase{interface{}(float64(kExpected)), true, false, ""},
+
+ // Non-equal values of numeric type.
+ equalsTestCase{int(kExpected + 1), false, false, ""},
+ equalsTestCase{int32(kExpected + 1), false, false, ""},
+ equalsTestCase{int64(kExpected + 1), false, false, ""},
+ equalsTestCase{uint(kExpected + 1), false, false, ""},
+ equalsTestCase{uint32(kExpected + 1), false, false, ""},
+ equalsTestCase{uint64(kExpected + 1), false, false, ""},
+ equalsTestCase{uintptr(kExpected + 1), false, false, ""},
+ equalsTestCase{float32(kExpected - (1 << 30)), false, false, ""},
+ equalsTestCase{float32(kExpected + (1 << 30)), false, false, ""},
+ equalsTestCase{float64(kExpected - 0.5), false, false, ""},
+ equalsTestCase{float64(kExpected + 0.5), false, false, ""},
+ equalsTestCase{complex128(kExpected - 1), false, false, ""},
+ equalsTestCase{complex128(kExpected + 2i), false, false, ""},
+
+ // Non-numeric types.
+ equalsTestCase{true, false, true, "which is not numeric"},
+ equalsTestCase{[...]int{}, false, true, "which is not numeric"},
+ equalsTestCase{make(chan int), false, true, "which is not numeric"},
+ equalsTestCase{func() {}, false, true, "which is not numeric"},
+ equalsTestCase{map[int]int{}, false, true, "which is not numeric"},
+ equalsTestCase{&someInt, false, true, "which is not numeric"},
+ equalsTestCase{[]int{}, false, true, "which is not numeric"},
+ equalsTestCase{"taco", false, true, "which is not numeric"},
+ equalsTestCase{equalsTestCase{}, false, true, "which is not numeric"},
+ }
+
+ t.checkTestCases(matcher, cases)
+}
+
+func (t *EqualsTest) PositiveNonIntegralComplex128() {
+ const kTwoTo20 = 1 << 20
+ const kExpected = kTwoTo20 + 0.25
+ matcher := Equals(complex128(kExpected))
+ ExpectEq("(1.04857625e+06+0i)", matcher.Description())
+
+ cases := []equalsTestCase{
+ // Various types of the expected value.
+ equalsTestCase{kExpected, true, false, ""},
+ equalsTestCase{kExpected + 0i, true, false, ""},
+ equalsTestCase{float32(kExpected), true, false, ""},
+ equalsTestCase{float64(kExpected), true, false, ""},
+ equalsTestCase{complex64(kExpected), true, false, ""},
+ equalsTestCase{complex128(kExpected), true, false, ""},
+
+ // Non-equal values of numeric type.
+ equalsTestCase{int64(kTwoTo20), false, false, ""},
+ equalsTestCase{int64(kTwoTo20 - 1), false, false, ""},
+ equalsTestCase{uint64(kTwoTo20), false, false, ""},
+ equalsTestCase{uint64(kTwoTo20 - 1), false, false, ""},
+ equalsTestCase{float32(kExpected - 1), false, false, ""},
+ equalsTestCase{float32(kExpected + 1), false, false, ""},
+ equalsTestCase{float64(kExpected - 0.25), false, false, ""},
+ equalsTestCase{float64(kExpected + 0.25), false, false, ""},
+ equalsTestCase{complex64(kExpected - 1), false, false, ""},
+ equalsTestCase{complex64(kExpected - 1i), false, false, ""},
+ equalsTestCase{complex128(kExpected - 1), false, false, ""},
+ equalsTestCase{complex128(kExpected - 1i), false, false, ""},
+ }
+
+ t.checkTestCases(matcher, cases)
+}
+
+func (t *EqualsTest) LargePositiveComplex128() {
+ const kExpected = 1 << 65
+ matcher := Equals(complex128(kExpected))
+ ExpectEq("(3.6893488147419103e+19+0i)", matcher.Description())
+
+ floatExpected := float64(kExpected)
+ castedInt := uint64(floatExpected)
+
+ cases := []equalsTestCase{
+ // Equal values of numeric type.
+ equalsTestCase{kExpected + 0i, true, false, ""},
+ equalsTestCase{float32(kExpected), true, false, ""},
+ equalsTestCase{float64(kExpected), true, false, ""},
+ equalsTestCase{complex64(kExpected), true, false, ""},
+ equalsTestCase{complex128(kExpected), true, false, ""},
+
+ // Non-equal values of numeric type.
+ equalsTestCase{castedInt, false, false, ""},
+ equalsTestCase{int64(0), false, false, ""},
+ equalsTestCase{int64(math.MinInt64), false, false, ""},
+ equalsTestCase{int64(math.MaxInt64), false, false, ""},
+ equalsTestCase{uint64(0), false, false, ""},
+ equalsTestCase{uint64(math.MaxUint64), false, false, ""},
+ equalsTestCase{float32(kExpected / 2), false, false, ""},
+ equalsTestCase{float64(kExpected / 2), false, false, ""},
+ equalsTestCase{complex128(kExpected + 2i), false, false, ""},
+ }
+
+ t.checkTestCases(matcher, cases)
+}
+
+func (t *EqualsTest) Complex128AboveExactIntegerRange() {
+ // Double-precision floats don't have enough bits to represent the integers
+ // near this one distinctly, so [2^54-1, 2^54+2] all receive the same value
+ // and should be treated as equivalent when floats are in the mix.
+ const kTwoTo54 = 1 << 54
+ matcher := Equals(complex128(kTwoTo54 + 1))
+ ExpectEq("(1.8014398509481984e+16+0i)", matcher.Description())
+
+ cases := []equalsTestCase{
+ // Integers.
+ equalsTestCase{int64(kTwoTo54 - 2), false, false, ""},
+ equalsTestCase{int64(kTwoTo54 - 1), true, false, ""},
+ equalsTestCase{int64(kTwoTo54 + 0), true, false, ""},
+ equalsTestCase{int64(kTwoTo54 + 1), true, false, ""},
+ equalsTestCase{int64(kTwoTo54 + 2), true, false, ""},
+ equalsTestCase{int64(kTwoTo54 + 3), false, false, ""},
+
+ equalsTestCase{uint64(kTwoTo54 - 2), false, false, ""},
+ equalsTestCase{uint64(kTwoTo54 - 1), true, false, ""},
+ equalsTestCase{uint64(kTwoTo54 + 0), true, false, ""},
+ equalsTestCase{uint64(kTwoTo54 + 1), true, false, ""},
+ equalsTestCase{uint64(kTwoTo54 + 2), true, false, ""},
+ equalsTestCase{uint64(kTwoTo54 + 3), false, false, ""},
+
+ // Double-precision floating point.
+ equalsTestCase{float64(kTwoTo54 - 2), false, false, ""},
+ equalsTestCase{float64(kTwoTo54 - 1), true, false, ""},
+ equalsTestCase{float64(kTwoTo54 + 0), true, false, ""},
+ equalsTestCase{float64(kTwoTo54 + 1), true, false, ""},
+ equalsTestCase{float64(kTwoTo54 + 2), true, false, ""},
+ equalsTestCase{float64(kTwoTo54 + 3), false, false, ""},
+
+ equalsTestCase{complex128(kTwoTo54 - 2), false, false, ""},
+ equalsTestCase{complex128(kTwoTo54 - 1), true, false, ""},
+ equalsTestCase{complex128(kTwoTo54 + 0), true, false, ""},
+ equalsTestCase{complex128(kTwoTo54 + 1), true, false, ""},
+ equalsTestCase{complex128(kTwoTo54 + 2), true, false, ""},
+ equalsTestCase{complex128(kTwoTo54 + 3), false, false, ""},
+ }
+
+ t.checkTestCases(matcher, cases)
+}
+
+func (t *EqualsTest) Complex128WithNonZeroImaginaryPart() {
+ const kRealPart = 17
+ const kImagPart = 0.25i
+ const kExpected = kRealPart + kImagPart
+ matcher := Equals(complex128(kExpected))
+ ExpectEq("(17+0.25i)", matcher.Description())
+
+ cases := []equalsTestCase{
+ // Various types of the expected value.
+ equalsTestCase{kExpected, true, false, ""},
+ equalsTestCase{kRealPart + kImagPart, true, false, ""},
+ equalsTestCase{complex64(kExpected), true, false, ""},
+ equalsTestCase{complex128(kExpected), true, false, ""},
+
+ // Non-equal values of numeric type.
+ equalsTestCase{int(kRealPart), false, false, ""},
+ equalsTestCase{int8(kRealPart), false, false, ""},
+ equalsTestCase{int16(kRealPart), false, false, ""},
+ equalsTestCase{int32(kRealPart), false, false, ""},
+ equalsTestCase{int64(kRealPart), false, false, ""},
+ equalsTestCase{uint(kRealPart), false, false, ""},
+ equalsTestCase{uint8(kRealPart), false, false, ""},
+ equalsTestCase{uint16(kRealPart), false, false, ""},
+ equalsTestCase{uint32(kRealPart), false, false, ""},
+ equalsTestCase{uint64(kRealPart), false, false, ""},
+ equalsTestCase{float32(kRealPart), false, false, ""},
+ equalsTestCase{float64(kRealPart), false, false, ""},
+ equalsTestCase{complex64(kRealPart), false, false, ""},
+ equalsTestCase{complex64(kRealPart + kImagPart + 0.5), false, false, ""},
+ equalsTestCase{complex64(kRealPart + kImagPart + 0.5i), false, false, ""},
+ equalsTestCase{complex128(kRealPart), false, false, ""},
+ equalsTestCase{complex128(kRealPart + kImagPart + 0.5), false, false, ""},
+ equalsTestCase{complex128(kRealPart + kImagPart + 0.5i), false, false, ""},
+ }
+
+ t.checkTestCases(matcher, cases)
+}
+
+////////////////////////////////////////////////////////////////////////
+// Arrays
+////////////////////////////////////////////////////////////////////////
+
+func (t *EqualsTest) ArrayOfComparableType() {
+ expected := [3]uint{17, 19, 23}
+
+ matcher := Equals(expected)
+ ExpectEq("[17 19 23]", matcher.Description())
+
+ // To defeat constant de-duping by the compiler.
+ makeArray := func(i, j, k uint) [3]uint { return [3]uint{i, j, k} }
+
+ type arrayAlias [3]uint
+ type uintAlias uint
+
+ cases := []equalsTestCase{
+ // Correct types, equal.
+ equalsTestCase{expected, true, false, ""},
+ equalsTestCase{[3]uint{17, 19, 23}, true, false, ""},
+ equalsTestCase{makeArray(17, 19, 23), true, false, ""},
+
+ // Correct types, not equal.
+ equalsTestCase{[3]uint{0, 0, 0}, false, false, ""},
+ equalsTestCase{[3]uint{18, 19, 23}, false, false, ""},
+ equalsTestCase{[3]uint{17, 20, 23}, false, false, ""},
+ equalsTestCase{[3]uint{17, 19, 22}, false, false, ""},
+
+ // Other types.
+ equalsTestCase{0, false, true, "which is not [3]uint"},
+ equalsTestCase{bool(false), false, true, "which is not [3]uint"},
+ equalsTestCase{int(0), false, true, "which is not [3]uint"},
+ equalsTestCase{int8(0), false, true, "which is not [3]uint"},
+ equalsTestCase{int16(0), false, true, "which is not [3]uint"},
+ equalsTestCase{int32(0), false, true, "which is not [3]uint"},
+ equalsTestCase{int64(0), false, true, "which is not [3]uint"},
+ equalsTestCase{uint(0), false, true, "which is not [3]uint"},
+ equalsTestCase{uint8(0), false, true, "which is not [3]uint"},
+ equalsTestCase{uint16(0), false, true, "which is not [3]uint"},
+ equalsTestCase{uint32(0), false, true, "which is not [3]uint"},
+ equalsTestCase{uint64(0), false, true, "which is not [3]uint"},
+ equalsTestCase{true, false, true, "which is not [3]uint"},
+ equalsTestCase{[...]int{}, false, true, "which is not [3]uint"},
+ equalsTestCase{func() {}, false, true, "which is not [3]uint"},
+ equalsTestCase{map[int]int{}, false, true, "which is not [3]uint"},
+ equalsTestCase{equalsTestCase{}, false, true, "which is not [3]uint"},
+ equalsTestCase{[2]uint{17, 19}, false, true, "which is not [3]uint"},
+ equalsTestCase{[4]uint{17, 19, 23, 0}, false, true, "which is not [3]uint"},
+ equalsTestCase{arrayAlias{17, 19, 23}, false, true, "which is not [3]uint"},
+ equalsTestCase{[3]uintAlias{17, 19, 23}, false, true, "which is not [3]uint"},
+ equalsTestCase{[3]int32{17, 19, 23}, false, true, "which is not [3]uint"},
+ }
+
+ t.checkTestCases(matcher, cases)
+}
+
+func (t *EqualsTest) ArrayOfNonComparableType() {
+ type nonComparableArray [2]map[string]string
+ f := func() {
+ ExpectEq(nonComparableArray{}, nonComparableArray{})
+ }
+
+ ExpectThat(f, Panics(MatchesRegexp("uncomparable.*nonComparableArray")))
+}
+
+////////////////////////////////////////////////////////////////////////
+// chan
+////////////////////////////////////////////////////////////////////////
+
+func (t *EqualsTest) NilChan() {
+ var nilChan1 chan int
+ var nilChan2 chan int
+ var nilChan3 chan uint
+ var nonNilChan1 chan int = make(chan int)
+ var nonNilChan2 chan uint = make(chan uint)
+
+ matcher := Equals(nilChan1)
+ ExpectEq("<nil>", matcher.Description())
+
+ cases := []equalsTestCase{
+ // int channels
+ equalsTestCase{nilChan1, true, false, ""},
+ equalsTestCase{nilChan2, true, false, ""},
+ equalsTestCase{nonNilChan1, false, false, ""},
+
+ // uint channels
+ equalsTestCase{nilChan3, false, true, "which is not a chan int"},
+ equalsTestCase{nonNilChan2, false, true, "which is not a chan int"},
+
+ // Other types.
+ equalsTestCase{0, false, true, "which is not a chan int"},
+ equalsTestCase{bool(false), false, true, "which is not a chan int"},
+ equalsTestCase{int(0), false, true, "which is not a chan int"},
+ equalsTestCase{int8(0), false, true, "which is not a chan int"},
+ equalsTestCase{int16(0), false, true, "which is not a chan int"},
+ equalsTestCase{int32(0), false, true, "which is not a chan int"},
+ equalsTestCase{int64(0), false, true, "which is not a chan int"},
+ equalsTestCase{uint(0), false, true, "which is not a chan int"},
+ equalsTestCase{uint8(0), false, true, "which is not a chan int"},
+ equalsTestCase{uint16(0), false, true, "which is not a chan int"},
+ equalsTestCase{uint32(0), false, true, "which is not a chan int"},
+ equalsTestCase{uint64(0), false, true, "which is not a chan int"},
+ equalsTestCase{true, false, true, "which is not a chan int"},
+ equalsTestCase{[...]int{}, false, true, "which is not a chan int"},
+ equalsTestCase{func() {}, false, true, "which is not a chan int"},
+ equalsTestCase{map[int]int{}, false, true, "which is not a chan int"},
+ equalsTestCase{&someInt, false, true, "which is not a chan int"},
+ equalsTestCase{[]int{}, false, true, "which is not a chan int"},
+ equalsTestCase{"taco", false, true, "which is not a chan int"},
+ equalsTestCase{equalsTestCase{}, false, true, "which is not a chan int"},
+ }
+
+ t.checkTestCases(matcher, cases)
+}
+
+func (t *EqualsTest) NonNilChan() {
+ var nilChan1 chan int
+ var nilChan2 chan uint
+ var nonNilChan1 chan int = make(chan int)
+ var nonNilChan2 chan int = make(chan int)
+ var nonNilChan3 chan uint = make(chan uint)
+
+ matcher := Equals(nonNilChan1)
+ ExpectEq(fmt.Sprintf("%v", nonNilChan1), matcher.Description())
+
+ cases := []equalsTestCase{
+ // int channels
+ equalsTestCase{nonNilChan1, true, false, ""},
+ equalsTestCase{nonNilChan2, false, false, ""},
+ equalsTestCase{nilChan1, false, false, ""},
+
+ // uint channels
+ equalsTestCase{nilChan2, false, true, "which is not a chan int"},
+ equalsTestCase{nonNilChan3, false, true, "which is not a chan int"},
+
+ // Other types.
+ equalsTestCase{0, false, true, "which is not a chan int"},
+ equalsTestCase{bool(false), false, true, "which is not a chan int"},
+ equalsTestCase{int(0), false, true, "which is not a chan int"},
+ equalsTestCase{int8(0), false, true, "which is not a chan int"},
+ equalsTestCase{int16(0), false, true, "which is not a chan int"},
+ equalsTestCase{int32(0), false, true, "which is not a chan int"},
+ equalsTestCase{int64(0), false, true, "which is not a chan int"},
+ equalsTestCase{uint(0), false, true, "which is not a chan int"},
+ equalsTestCase{uint8(0), false, true, "which is not a chan int"},
+ equalsTestCase{uint16(0), false, true, "which is not a chan int"},
+ equalsTestCase{uint32(0), false, true, "which is not a chan int"},
+ equalsTestCase{uint64(0), false, true, "which is not a chan int"},
+ equalsTestCase{true, false, true, "which is not a chan int"},
+ equalsTestCase{[...]int{}, false, true, "which is not a chan int"},
+ equalsTestCase{func() {}, false, true, "which is not a chan int"},
+ equalsTestCase{map[int]int{}, false, true, "which is not a chan int"},
+ equalsTestCase{&someInt, false, true, "which is not a chan int"},
+ equalsTestCase{[]int{}, false, true, "which is not a chan int"},
+ equalsTestCase{"taco", false, true, "which is not a chan int"},
+ equalsTestCase{equalsTestCase{}, false, true, "which is not a chan int"},
+ }
+
+ t.checkTestCases(matcher, cases)
+}
+
+func (t *EqualsTest) ChanDirection() {
+ var chan1 chan<- int
+ var chan2 <-chan int
+ var chan3 chan int
+
+ matcher := Equals(chan1)
+ ExpectEq(fmt.Sprintf("%v", chan1), matcher.Description())
+
+ cases := []equalsTestCase{
+ equalsTestCase{chan1, true, false, ""},
+ equalsTestCase{chan2, false, true, "which is not a chan<- int"},
+ equalsTestCase{chan3, false, true, "which is not a chan<- int"},
+ }
+
+ t.checkTestCases(matcher, cases)
+}
+
+////////////////////////////////////////////////////////////////////////
+// func
+////////////////////////////////////////////////////////////////////////
+
+func (t *EqualsTest) Functions() {
+ func1 := func() {}
+ func2 := func() {}
+ func3 := func(x int) {}
+
+ matcher := Equals(func1)
+ ExpectEq(fmt.Sprintf("%v", func1), matcher.Description())
+
+ cases := []equalsTestCase{
+ // Functions.
+ equalsTestCase{func1, true, false, ""},
+ equalsTestCase{func2, false, false, ""},
+ equalsTestCase{func3, false, false, ""},
+
+ // Other types.
+ equalsTestCase{0, false, true, "which is not a function"},
+ equalsTestCase{bool(false), false, true, "which is not a function"},
+ equalsTestCase{int(0), false, true, "which is not a function"},
+ equalsTestCase{int8(0), false, true, "which is not a function"},
+ equalsTestCase{int16(0), false, true, "which is not a function"},
+ equalsTestCase{int32(0), false, true, "which is not a function"},
+ equalsTestCase{int64(0), false, true, "which is not a function"},
+ equalsTestCase{uint(0), false, true, "which is not a function"},
+ equalsTestCase{uint8(0), false, true, "which is not a function"},
+ equalsTestCase{uint16(0), false, true, "which is not a function"},
+ equalsTestCase{uint32(0), false, true, "which is not a function"},
+ equalsTestCase{uint64(0), false, true, "which is not a function"},
+ equalsTestCase{true, false, true, "which is not a function"},
+ equalsTestCase{[...]int{}, false, true, "which is not a function"},
+ equalsTestCase{map[int]int{}, false, true, "which is not a function"},
+ equalsTestCase{&someInt, false, true, "which is not a function"},
+ equalsTestCase{[]int{}, false, true, "which is not a function"},
+ equalsTestCase{"taco", false, true, "which is not a function"},
+ equalsTestCase{equalsTestCase{}, false, true, "which is not a function"},
+ }
+
+ t.checkTestCases(matcher, cases)
+}
+
+////////////////////////////////////////////////////////////////////////
+// map
+////////////////////////////////////////////////////////////////////////
+
+func (t *EqualsTest) NilMap() {
+ var nilMap1 map[int]int
+ var nilMap2 map[int]int
+ var nilMap3 map[int]uint
+ var nonNilMap1 map[int]int = make(map[int]int)
+ var nonNilMap2 map[int]uint = make(map[int]uint)
+
+ matcher := Equals(nilMap1)
+ ExpectEq("map[]", matcher.Description())
+
+ cases := []equalsTestCase{
+ // Correct type.
+ equalsTestCase{nilMap1, true, false, ""},
+ equalsTestCase{nilMap2, true, false, ""},
+ equalsTestCase{nilMap3, true, false, ""},
+ equalsTestCase{nonNilMap1, false, false, ""},
+ equalsTestCase{nonNilMap2, false, false, ""},
+
+ // Other types.
+ equalsTestCase{0, false, true, "which is not a map"},
+ equalsTestCase{bool(false), false, true, "which is not a map"},
+ equalsTestCase{int(0), false, true, "which is not a map"},
+ equalsTestCase{int8(0), false, true, "which is not a map"},
+ equalsTestCase{int16(0), false, true, "which is not a map"},
+ equalsTestCase{int32(0), false, true, "which is not a map"},
+ equalsTestCase{int64(0), false, true, "which is not a map"},
+ equalsTestCase{uint(0), false, true, "which is not a map"},
+ equalsTestCase{uint8(0), false, true, "which is not a map"},
+ equalsTestCase{uint16(0), false, true, "which is not a map"},
+ equalsTestCase{uint32(0), false, true, "which is not a map"},
+ equalsTestCase{uint64(0), false, true, "which is not a map"},
+ equalsTestCase{true, false, true, "which is not a map"},
+ equalsTestCase{[...]int{}, false, true, "which is not a map"},
+ equalsTestCase{func() {}, false, true, "which is not a map"},
+ equalsTestCase{&someInt, false, true, "which is not a map"},
+ equalsTestCase{[]int{}, false, true, "which is not a map"},
+ equalsTestCase{"taco", false, true, "which is not a map"},
+ equalsTestCase{equalsTestCase{}, false, true, "which is not a map"},
+ }
+
+ t.checkTestCases(matcher, cases)
+}
+
+func (t *EqualsTest) NonNilMap() {
+ var nilMap1 map[int]int
+ var nilMap2 map[int]uint
+ var nonNilMap1 map[int]int = make(map[int]int)
+ var nonNilMap2 map[int]int = make(map[int]int)
+ var nonNilMap3 map[int]uint = make(map[int]uint)
+
+ matcher := Equals(nonNilMap1)
+ ExpectEq("map[]", matcher.Description())
+
+ cases := []equalsTestCase{
+ // Correct type.
+ equalsTestCase{nonNilMap1, true, false, ""},
+ equalsTestCase{nonNilMap2, false, false, ""},
+ equalsTestCase{nonNilMap3, false, false, ""},
+ equalsTestCase{nilMap1, false, false, ""},
+ equalsTestCase{nilMap2, false, false, ""},
+
+ // Other types.
+ equalsTestCase{0, false, true, "which is not a map"},
+ equalsTestCase{bool(false), false, true, "which is not a map"},
+ equalsTestCase{int(0), false, true, "which is not a map"},
+ equalsTestCase{int8(0), false, true, "which is not a map"},
+ equalsTestCase{int16(0), false, true, "which is not a map"},
+ equalsTestCase{int32(0), false, true, "which is not a map"},
+ equalsTestCase{int64(0), false, true, "which is not a map"},
+ equalsTestCase{uint(0), false, true, "which is not a map"},
+ equalsTestCase{uint8(0), false, true, "which is not a map"},
+ equalsTestCase{uint16(0), false, true, "which is not a map"},
+ equalsTestCase{uint32(0), false, true, "which is not a map"},
+ equalsTestCase{uint64(0), false, true, "which is not a map"},
+ equalsTestCase{true, false, true, "which is not a map"},
+ equalsTestCase{[...]int{}, false, true, "which is not a map"},
+ equalsTestCase{func() {}, false, true, "which is not a map"},
+ equalsTestCase{&someInt, false, true, "which is not a map"},
+ equalsTestCase{[]int{}, false, true, "which is not a map"},
+ equalsTestCase{"taco", false, true, "which is not a map"},
+ equalsTestCase{equalsTestCase{}, false, true, "which is not a map"},
+ }
+
+ t.checkTestCases(matcher, cases)
+}
+
+////////////////////////////////////////////////////////////////////////
+// Pointers
+////////////////////////////////////////////////////////////////////////
+
+func (t *EqualsTest) NilPointer() {
+ var someInt int = 17
+ var someUint uint = 17
+
+ var nilInt1 *int
+ var nilInt2 *int
+ var nilUint *uint
+ var nonNilInt *int = &someInt
+ var nonNilUint *uint = &someUint
+
+ matcher := Equals(nilInt1)
+ ExpectEq("<nil>", matcher.Description())
+
+ cases := []equalsTestCase{
+ // Correct type.
+ equalsTestCase{nilInt1, true, false, ""},
+ equalsTestCase{nilInt2, true, false, ""},
+ equalsTestCase{nonNilInt, false, false, ""},
+
+ // Incorrect type.
+ equalsTestCase{nilUint, false, true, "which is not a *int"},
+ equalsTestCase{nonNilUint, false, true, "which is not a *int"},
+
+ // Other types.
+ equalsTestCase{0, false, true, "which is not a *int"},
+ equalsTestCase{bool(false), false, true, "which is not a *int"},
+ equalsTestCase{int(0), false, true, "which is not a *int"},
+ equalsTestCase{int8(0), false, true, "which is not a *int"},
+ equalsTestCase{int16(0), false, true, "which is not a *int"},
+ equalsTestCase{int32(0), false, true, "which is not a *int"},
+ equalsTestCase{int64(0), false, true, "which is not a *int"},
+ equalsTestCase{uint(0), false, true, "which is not a *int"},
+ equalsTestCase{uint8(0), false, true, "which is not a *int"},
+ equalsTestCase{uint16(0), false, true, "which is not a *int"},
+ equalsTestCase{uint32(0), false, true, "which is not a *int"},
+ equalsTestCase{uint64(0), false, true, "which is not a *int"},
+ equalsTestCase{true, false, true, "which is not a *int"},
+ equalsTestCase{[...]int{}, false, true, "which is not a *int"},
+ equalsTestCase{func() {}, false, true, "which is not a *int"},
+ equalsTestCase{map[int]int{}, false, true, "which is not a *int"},
+ equalsTestCase{[]int{}, false, true, "which is not a *int"},
+ equalsTestCase{"taco", false, true, "which is not a *int"},
+ equalsTestCase{equalsTestCase{}, false, true, "which is not a *int"},
+ }
+
+ t.checkTestCases(matcher, cases)
+}
+
+func (t *EqualsTest) NonNilPointer() {
+ var someInt int = 17
+ var someOtherInt int = 17
+ var someUint uint = 17
+
+ var nilInt *int
+ var nilUint *uint
+ var nonNilInt1 *int = &someInt
+ var nonNilInt2 *int = &someOtherInt
+ var nonNilUint *uint = &someUint
+
+ matcher := Equals(nonNilInt1)
+ ExpectEq(fmt.Sprintf("%v", nonNilInt1), matcher.Description())
+
+ cases := []equalsTestCase{
+ // Correct type.
+ equalsTestCase{nonNilInt1, true, false, ""},
+ equalsTestCase{nonNilInt2, false, false, ""},
+ equalsTestCase{nilInt, false, false, ""},
+
+ // Incorrect type.
+ equalsTestCase{nilUint, false, true, "which is not a *int"},
+ equalsTestCase{nonNilUint, false, true, "which is not a *int"},
+
+ // Other types.
+ equalsTestCase{0, false, true, "which is not a *int"},
+ equalsTestCase{bool(false), false, true, "which is not a *int"},
+ equalsTestCase{int(0), false, true, "which is not a *int"},
+ equalsTestCase{int8(0), false, true, "which is not a *int"},
+ equalsTestCase{int16(0), false, true, "which is not a *int"},
+ equalsTestCase{int32(0), false, true, "which is not a *int"},
+ equalsTestCase{int64(0), false, true, "which is not a *int"},
+ equalsTestCase{uint(0), false, true, "which is not a *int"},
+ equalsTestCase{uint8(0), false, true, "which is not a *int"},
+ equalsTestCase{uint16(0), false, true, "which is not a *int"},
+ equalsTestCase{uint32(0), false, true, "which is not a *int"},
+ equalsTestCase{uint64(0), false, true, "which is not a *int"},
+ equalsTestCase{true, false, true, "which is not a *int"},
+ equalsTestCase{[...]int{}, false, true, "which is not a *int"},
+ equalsTestCase{func() {}, false, true, "which is not a *int"},
+ equalsTestCase{map[int]int{}, false, true, "which is not a *int"},
+ equalsTestCase{[]int{}, false, true, "which is not a *int"},
+ equalsTestCase{"taco", false, true, "which is not a *int"},
+ equalsTestCase{equalsTestCase{}, false, true, "which is not a *int"},
+ }
+
+ t.checkTestCases(matcher, cases)
+}
+
+////////////////////////////////////////////////////////////////////////
+// Slices
+////////////////////////////////////////////////////////////////////////
+
+func (t *EqualsTest) NilSlice() {
+ var nilInt1 []int
+ var nilInt2 []int
+ var nilUint []uint
+
+ var nonNilInt []int = make([]int, 0)
+ var nonNilUint []uint = make([]uint, 0)
+
+ matcher := Equals(nilInt1)
+ ExpectEq("[]", matcher.Description())
+
+ cases := []equalsTestCase{
+ // Correct type.
+ equalsTestCase{nilInt1, true, false, ""},
+ equalsTestCase{nilInt2, true, false, ""},
+ equalsTestCase{nonNilInt, false, false, ""},
+
+ // Incorrect type.
+ equalsTestCase{nilUint, false, true, "which is not a []int"},
+ equalsTestCase{nonNilUint, false, true, "which is not a []int"},
+
+ // Other types.
+ equalsTestCase{0, false, true, "which is not a []int"},
+ equalsTestCase{bool(false), false, true, "which is not a []int"},
+ equalsTestCase{int(0), false, true, "which is not a []int"},
+ equalsTestCase{int8(0), false, true, "which is not a []int"},
+ equalsTestCase{int16(0), false, true, "which is not a []int"},
+ equalsTestCase{int32(0), false, true, "which is not a []int"},
+ equalsTestCase{int64(0), false, true, "which is not a []int"},
+ equalsTestCase{uint(0), false, true, "which is not a []int"},
+ equalsTestCase{uint8(0), false, true, "which is not a []int"},
+ equalsTestCase{uint16(0), false, true, "which is not a []int"},
+ equalsTestCase{uint32(0), false, true, "which is not a []int"},
+ equalsTestCase{uint64(0), false, true, "which is not a []int"},
+ equalsTestCase{true, false, true, "which is not a []int"},
+ equalsTestCase{[...]int{}, false, true, "which is not a []int"},
+ equalsTestCase{func() {}, false, true, "which is not a []int"},
+ equalsTestCase{map[int]int{}, false, true, "which is not a []int"},
+ equalsTestCase{"taco", false, true, "which is not a []int"},
+ equalsTestCase{equalsTestCase{}, false, true, "which is not a []int"},
+ }
+
+ t.checkTestCases(matcher, cases)
+}
+
+func (t *EqualsTest) NonNilSlice() {
+ nonNil := make([]int, 0)
+ f := func() { Equals(nonNil) }
+ ExpectThat(f, Panics(HasSubstr("non-nil slice")))
+}
+
+////////////////////////////////////////////////////////////////////////
+// string
+////////////////////////////////////////////////////////////////////////
+
+func (t *EqualsTest) String() {
+ partial := "taco"
+ expected := fmt.Sprintf("%s%d", partial, 1)
+
+ matcher := Equals(expected)
+ ExpectEq("taco1", matcher.Description())
+
+ type stringAlias string
+
+ cases := []equalsTestCase{
+ // Correct types.
+ equalsTestCase{"taco1", true, false, ""},
+ equalsTestCase{"taco" + "1", true, false, ""},
+ equalsTestCase{expected, true, false, ""},
+ equalsTestCase{stringAlias("taco1"), true, false, ""},
+
+ equalsTestCase{"", false, false, ""},
+ equalsTestCase{"taco", false, false, ""},
+ equalsTestCase{"taco1\x00", false, false, ""},
+ equalsTestCase{"taco2", false, false, ""},
+ equalsTestCase{stringAlias("taco2"), false, false, ""},
+
+ // Other types.
+ equalsTestCase{0, false, true, "which is not a string"},
+ equalsTestCase{bool(false), false, true, "which is not a string"},
+ equalsTestCase{int(0), false, true, "which is not a string"},
+ equalsTestCase{int8(0), false, true, "which is not a string"},
+ equalsTestCase{int16(0), false, true, "which is not a string"},
+ equalsTestCase{int32(0), false, true, "which is not a string"},
+ equalsTestCase{int64(0), false, true, "which is not a string"},
+ equalsTestCase{uint(0), false, true, "which is not a string"},
+ equalsTestCase{uint8(0), false, true, "which is not a string"},
+ equalsTestCase{uint16(0), false, true, "which is not a string"},
+ equalsTestCase{uint32(0), false, true, "which is not a string"},
+ equalsTestCase{uint64(0), false, true, "which is not a string"},
+ equalsTestCase{true, false, true, "which is not a string"},
+ equalsTestCase{[...]int{}, false, true, "which is not a string"},
+ equalsTestCase{func() {}, false, true, "which is not a string"},
+ equalsTestCase{map[int]int{}, false, true, "which is not a string"},
+ equalsTestCase{equalsTestCase{}, false, true, "which is not a string"},
+ }
+
+ t.checkTestCases(matcher, cases)
+}
+
+func (t *EqualsTest) StringAlias() {
+ type stringAlias string
+
+ matcher := Equals(stringAlias("taco"))
+ ExpectEq("taco", matcher.Description())
+
+ cases := []equalsTestCase{
+ // Correct types.
+ equalsTestCase{stringAlias("taco"), true, false, ""},
+ equalsTestCase{"taco", true, false, ""},
+
+ equalsTestCase{"burrito", false, false, ""},
+ equalsTestCase{stringAlias("burrito"), false, false, ""},
+
+ // Other types.
+ equalsTestCase{0, false, true, "which is not a string"},
+ equalsTestCase{bool(false), false, true, "which is not a string"},
+ }
+
+ t.checkTestCases(matcher, cases)
+}
+
+////////////////////////////////////////////////////////////////////////
+// struct
+////////////////////////////////////////////////////////////////////////
+
+func (t *EqualsTest) Struct() {
+ type someStruct struct{ foo uint }
+ f := func() { Equals(someStruct{17}) }
+ ExpectThat(f, Panics(HasSubstr("unsupported kind struct")))
+}
+
+////////////////////////////////////////////////////////////////////////
+// unsafe.Pointer
+////////////////////////////////////////////////////////////////////////
+
+func (t *EqualsTest) NilUnsafePointer() {
+ someInt := int(17)
+
+ var nilPtr1 unsafe.Pointer
+ var nilPtr2 unsafe.Pointer
+ var nonNilPtr unsafe.Pointer = unsafe.Pointer(&someInt)
+
+ matcher := Equals(nilPtr1)
+ ExpectEq("<nil>", matcher.Description())
+
+ cases := []equalsTestCase{
+ // Correct type.
+ equalsTestCase{nilPtr1, true, false, ""},
+ equalsTestCase{nilPtr2, true, false, ""},
+ equalsTestCase{nonNilPtr, false, false, ""},
+
+ // Other types.
+ equalsTestCase{0, false, true, "which is not a unsafe.Pointer"},
+ equalsTestCase{bool(false), false, true, "which is not a unsafe.Pointer"},
+ equalsTestCase{int(0), false, true, "which is not a unsafe.Pointer"},
+ equalsTestCase{int8(0), false, true, "which is not a unsafe.Pointer"},
+ equalsTestCase{int16(0), false, true, "which is not a unsafe.Pointer"},
+ equalsTestCase{int32(0), false, true, "which is not a unsafe.Pointer"},
+ equalsTestCase{int64(0), false, true, "which is not a unsafe.Pointer"},
+ equalsTestCase{uint(0), false, true, "which is not a unsafe.Pointer"},
+ equalsTestCase{uint8(0), false, true, "which is not a unsafe.Pointer"},
+ equalsTestCase{uint16(0), false, true, "which is not a unsafe.Pointer"},
+ equalsTestCase{uint32(0), false, true, "which is not a unsafe.Pointer"},
+ equalsTestCase{uint64(0), false, true, "which is not a unsafe.Pointer"},
+ equalsTestCase{true, false, true, "which is not a unsafe.Pointer"},
+ equalsTestCase{[...]int{}, false, true, "which is not a unsafe.Pointer"},
+ equalsTestCase{make(chan int), false, true, "which is not a unsafe.Pointer"},
+ equalsTestCase{func() {}, false, true, "which is not a unsafe.Pointer"},
+ equalsTestCase{map[int]int{}, false, true, "which is not a unsafe.Pointer"},
+ equalsTestCase{&someInt, false, true, "which is not a unsafe.Pointer"},
+ equalsTestCase{[]int{}, false, true, "which is not a unsafe.Pointer"},
+ equalsTestCase{"taco", false, true, "which is not a unsafe.Pointer"},
+ equalsTestCase{equalsTestCase{}, false, true, "which is not a unsafe.Pointer"},
+ }
+
+ t.checkTestCases(matcher, cases)
+}
+
+func (t *EqualsTest) NonNilUnsafePointer() {
+ someInt := int(17)
+ someOtherInt := int(17)
+
+ var nilPtr unsafe.Pointer
+ var nonNilPtr1 unsafe.Pointer = unsafe.Pointer(&someInt)
+ var nonNilPtr2 unsafe.Pointer = unsafe.Pointer(&someOtherInt)
+
+ matcher := Equals(nonNilPtr1)
+ ExpectEq(fmt.Sprintf("%v", nonNilPtr1), matcher.Description())
+
+ cases := []equalsTestCase{
+ // Correct type.
+ equalsTestCase{nonNilPtr1, true, false, ""},
+ equalsTestCase{nonNilPtr2, false, false, ""},
+ equalsTestCase{nilPtr, false, false, ""},
+
+ // Other types.
+ equalsTestCase{0, false, true, "which is not a unsafe.Pointer"},
+ equalsTestCase{bool(false), false, true, "which is not a unsafe.Pointer"},
+ equalsTestCase{int(0), false, true, "which is not a unsafe.Pointer"},
+ equalsTestCase{int8(0), false, true, "which is not a unsafe.Pointer"},
+ equalsTestCase{int16(0), false, true, "which is not a unsafe.Pointer"},
+ equalsTestCase{int32(0), false, true, "which is not a unsafe.Pointer"},
+ equalsTestCase{int64(0), false, true, "which is not a unsafe.Pointer"},
+ equalsTestCase{uint(0), false, true, "which is not a unsafe.Pointer"},
+ equalsTestCase{uint8(0), false, true, "which is not a unsafe.Pointer"},
+ equalsTestCase{uint16(0), false, true, "which is not a unsafe.Pointer"},
+ equalsTestCase{uint32(0), false, true, "which is not a unsafe.Pointer"},
+ equalsTestCase{uint64(0), false, true, "which is not a unsafe.Pointer"},
+ equalsTestCase{true, false, true, "which is not a unsafe.Pointer"},
+ equalsTestCase{[...]int{}, false, true, "which is not a unsafe.Pointer"},
+ equalsTestCase{make(chan int), false, true, "which is not a unsafe.Pointer"},
+ equalsTestCase{func() {}, false, true, "which is not a unsafe.Pointer"},
+ equalsTestCase{map[int]int{}, false, true, "which is not a unsafe.Pointer"},
+ equalsTestCase{&someInt, false, true, "which is not a unsafe.Pointer"},
+ equalsTestCase{[]int{}, false, true, "which is not a unsafe.Pointer"},
+ equalsTestCase{"taco", false, true, "which is not a unsafe.Pointer"},
+ equalsTestCase{equalsTestCase{}, false, true, "which is not a unsafe.Pointer"},
+ }
+
+ t.checkTestCases(matcher, cases)
+}
diff --git a/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/oglematchers/error.go b/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/oglematchers/error.go
new file mode 100644
index 00000000000..8a078e36d86
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/oglematchers/error.go
@@ -0,0 +1,51 @@
+// Copyright 2011 Aaron Jacobs. All Rights Reserved.
+// Author: aaronjjacobs@gmail.com (Aaron Jacobs)
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package oglematchers
+
+// Error returns a matcher that matches non-nil values implementing the
+// built-in error interface for whom the return value of Error() matches the
+// supplied matcher.
+//
+// For example:
+//
+// err := errors.New("taco burrito")
+//
+// Error(Equals("taco burrito")) // matches err
+// Error(HasSubstr("taco")) // matches err
+// Error(HasSubstr("enchilada")) // doesn't match err
+//
+func Error(m Matcher) Matcher {
+ return &errorMatcher{m}
+}
+
+type errorMatcher struct {
+ wrappedMatcher Matcher
+}
+
+func (m *errorMatcher) Description() string {
+ return "error " + m.wrappedMatcher.Description()
+}
+
+func (m *errorMatcher) Matches(c interface{}) error {
+ // Make sure that c is an error.
+ e, ok := c.(error)
+ if !ok {
+ return NewFatalError("which is not an error")
+ }
+
+ // Pass on the error text to the wrapped matcher.
+ return m.wrappedMatcher.Matches(e.Error())
+}
diff --git a/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/oglematchers/error_test.go b/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/oglematchers/error_test.go
new file mode 100644
index 00000000000..f92167cad1d
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/oglematchers/error_test.go
@@ -0,0 +1,92 @@
+// Copyright 2011 Aaron Jacobs. All Rights Reserved.
+// Author: aaronjjacobs@gmail.com (Aaron Jacobs)
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package oglematchers_test
+
+import (
+ "errors"
+ . "github.com/smartystreets/assertions/internal/oglematchers"
+ . "github.com/smartystreets/assertions/internal/ogletest"
+)
+
+////////////////////////////////////////////////////////////////////////
+// Helpers
+////////////////////////////////////////////////////////////////////////
+
+type ErrorTest struct {
+ matcherCalled bool
+ suppliedCandidate interface{}
+ wrappedError error
+
+ matcher Matcher
+}
+
+func init() { RegisterTestSuite(&ErrorTest{}) }
+
+func (t *ErrorTest) SetUp(i *TestInfo) {
+ wrapped := &fakeMatcher{
+ func(c interface{}) error {
+ t.matcherCalled = true
+ t.suppliedCandidate = c
+ return t.wrappedError
+ },
+ "is foo",
+ }
+
+ t.matcher = Error(wrapped)
+}
+
+func isFatal(err error) bool {
+ _, isFatal := err.(*FatalError)
+ return isFatal
+}
+
+////////////////////////////////////////////////////////////////////////
+// Tests
+////////////////////////////////////////////////////////////////////////
+
+func (t *ErrorTest) Description() {
+ ExpectThat(t.matcher.Description(), Equals("error is foo"))
+}
+
+func (t *ErrorTest) CandidateIsNil() {
+ err := t.matcher.Matches(nil)
+
+ ExpectThat(t.matcherCalled, Equals(false))
+ ExpectThat(err.Error(), Equals("which is not an error"))
+ ExpectTrue(isFatal(err))
+}
+
+func (t *ErrorTest) CandidateIsString() {
+ err := t.matcher.Matches("taco")
+
+ ExpectThat(t.matcherCalled, Equals(false))
+ ExpectThat(err.Error(), Equals("which is not an error"))
+ ExpectTrue(isFatal(err))
+}
+
+func (t *ErrorTest) CallsWrappedMatcher() {
+ candidate := errors.New("taco")
+ t.matcher.Matches(candidate)
+
+ ExpectThat(t.matcherCalled, Equals(true))
+ ExpectThat(t.suppliedCandidate, Equals("taco"))
+}
+
+func (t *ErrorTest) ReturnsWrappedMatcherResult() {
+ t.wrappedError = errors.New("burrito")
+ err := t.matcher.Matches(errors.New(""))
+ ExpectThat(err, Equals(t.wrappedError))
+}
diff --git a/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/oglematchers/greater_or_equal.go b/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/oglematchers/greater_or_equal.go
new file mode 100644
index 00000000000..4b9d103a381
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/oglematchers/greater_or_equal.go
@@ -0,0 +1,39 @@
+// Copyright 2011 Aaron Jacobs. All Rights Reserved.
+// Author: aaronjjacobs@gmail.com (Aaron Jacobs)
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package oglematchers
+
+import (
+ "fmt"
+ "reflect"
+)
+
+// GreaterOrEqual returns a matcher that matches integer, floating point, or
+// strings values v such that v >= x. Comparison is not defined between numeric
+// and string types, but is defined between all integer and floating point
+// types.
+//
+// x must itself be an integer, floating point, or string type; otherwise,
+// GreaterOrEqual will panic.
+func GreaterOrEqual(x interface{}) Matcher {
+ desc := fmt.Sprintf("greater than or equal to %v", x)
+
+ // Special case: make it clear that strings are strings.
+ if reflect.TypeOf(x).Kind() == reflect.String {
+ desc = fmt.Sprintf("greater than or equal to \"%s\"", x)
+ }
+
+ return transformDescription(Not(LessThan(x)), desc)
+}
diff --git a/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/oglematchers/greater_or_equal_test.go b/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/oglematchers/greater_or_equal_test.go
new file mode 100644
index 00000000000..f5e29d1ce59
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/oglematchers/greater_or_equal_test.go
@@ -0,0 +1,1101 @@
+// Copyright 2011 Aaron Jacobs. All Rights Reserved.
+// Author: aaronjjacobs@gmail.com (Aaron Jacobs)
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package oglematchers_test
+
+import (
+ "math"
+
+ . "github.com/smartystreets/assertions/internal/oglematchers"
+ . "github.com/smartystreets/assertions/internal/ogletest"
+)
+
+////////////////////////////////////////////////////////////////////////
+// Helpers
+////////////////////////////////////////////////////////////////////////
+
+type GreaterOrEqualTest struct {
+}
+
+func init() { RegisterTestSuite(&GreaterOrEqualTest{}) }
+
+type geTestCase struct {
+ candidate interface{}
+ expectedResult bool
+ shouldBeFatal bool
+ expectedError string
+}
+
+func (t *GreaterOrEqualTest) checkTestCases(matcher Matcher, cases []geTestCase) {
+ for i, c := range cases {
+ err := matcher.Matches(c.candidate)
+
+ ExpectThat(
+ (err == nil),
+ Equals(c.expectedResult),
+ "Case %d (candidate %v)",
+ i,
+ c.candidate)
+
+ if err == nil {
+ continue
+ }
+
+ _, isFatal := err.(*FatalError)
+ ExpectEq(
+ c.shouldBeFatal,
+ isFatal,
+ "Case %d (candidate %v)",
+ i,
+ c.candidate)
+
+ ExpectThat(
+ err,
+ Error(Equals(c.expectedError)),
+ "Case %d (candidate %v)",
+ i,
+ c.candidate)
+ }
+}
+
+////////////////////////////////////////////////////////////////////////
+// Integer literals
+////////////////////////////////////////////////////////////////////////
+
+func (t *GreaterOrEqualTest) IntegerCandidateBadTypes() {
+ matcher := GreaterOrEqual(int(-150))
+
+ cases := []geTestCase{
+ geTestCase{true, false, true, "which is not comparable"},
+ geTestCase{complex64(-151), false, true, "which is not comparable"},
+ geTestCase{complex128(-151), false, true, "which is not comparable"},
+ geTestCase{[...]int{-151}, false, true, "which is not comparable"},
+ geTestCase{make(chan int), false, true, "which is not comparable"},
+ geTestCase{func() {}, false, true, "which is not comparable"},
+ geTestCase{map[int]int{}, false, true, "which is not comparable"},
+ geTestCase{&geTestCase{}, false, true, "which is not comparable"},
+ geTestCase{make([]int, 0), false, true, "which is not comparable"},
+ geTestCase{"-151", false, true, "which is not comparable"},
+ geTestCase{geTestCase{}, false, true, "which is not comparable"},
+ }
+
+ t.checkTestCases(matcher, cases)
+}
+
+func (t *GreaterOrEqualTest) FloatCandidateBadTypes() {
+ matcher := GreaterOrEqual(float32(-150))
+
+ cases := []geTestCase{
+ geTestCase{true, false, true, "which is not comparable"},
+ geTestCase{complex64(-151), false, true, "which is not comparable"},
+ geTestCase{complex128(-151), false, true, "which is not comparable"},
+ geTestCase{[...]int{-151}, false, true, "which is not comparable"},
+ geTestCase{make(chan int), false, true, "which is not comparable"},
+ geTestCase{func() {}, false, true, "which is not comparable"},
+ geTestCase{map[int]int{}, false, true, "which is not comparable"},
+ geTestCase{&geTestCase{}, false, true, "which is not comparable"},
+ geTestCase{make([]int, 0), false, true, "which is not comparable"},
+ geTestCase{"-151", false, true, "which is not comparable"},
+ geTestCase{geTestCase{}, false, true, "which is not comparable"},
+ }
+
+ t.checkTestCases(matcher, cases)
+}
+
+func (t *GreaterOrEqualTest) StringCandidateBadTypes() {
+ matcher := GreaterOrEqual("17")
+
+ cases := []geTestCase{
+ geTestCase{true, false, true, "which is not comparable"},
+ geTestCase{int(0), false, true, "which is not comparable"},
+ geTestCase{int8(0), false, true, "which is not comparable"},
+ geTestCase{int16(0), false, true, "which is not comparable"},
+ geTestCase{int32(0), false, true, "which is not comparable"},
+ geTestCase{int64(0), false, true, "which is not comparable"},
+ geTestCase{uint(0), false, true, "which is not comparable"},
+ geTestCase{uint8(0), false, true, "which is not comparable"},
+ geTestCase{uint16(0), false, true, "which is not comparable"},
+ geTestCase{uint32(0), false, true, "which is not comparable"},
+ geTestCase{uint64(0), false, true, "which is not comparable"},
+ geTestCase{float32(0), false, true, "which is not comparable"},
+ geTestCase{float64(0), false, true, "which is not comparable"},
+ geTestCase{complex64(-151), false, true, "which is not comparable"},
+ geTestCase{complex128(-151), false, true, "which is not comparable"},
+ geTestCase{[...]int{-151}, false, true, "which is not comparable"},
+ geTestCase{make(chan int), false, true, "which is not comparable"},
+ geTestCase{func() {}, false, true, "which is not comparable"},
+ geTestCase{map[int]int{}, false, true, "which is not comparable"},
+ geTestCase{&geTestCase{}, false, true, "which is not comparable"},
+ geTestCase{make([]int, 0), false, true, "which is not comparable"},
+ geTestCase{geTestCase{}, false, true, "which is not comparable"},
+ }
+
+ t.checkTestCases(matcher, cases)
+}
+
+func (t *GreaterOrEqualTest) BadArgument() {
+ panicked := false
+
+ defer func() {
+ ExpectThat(panicked, Equals(true))
+ }()
+
+ defer func() {
+ if r := recover(); r != nil {
+ panicked = true
+ }
+ }()
+
+ GreaterOrEqual(complex128(0))
+}
+
+////////////////////////////////////////////////////////////////////////
+// Integer literals
+////////////////////////////////////////////////////////////////////////
+
+func (t *GreaterOrEqualTest) NegativeIntegerLiteral() {
+ matcher := GreaterOrEqual(-150)
+ desc := matcher.Description()
+ expectedDesc := "greater than or equal to -150"
+
+ ExpectThat(desc, Equals(expectedDesc))
+
+ cases := []geTestCase{
+ // Signed integers.
+ geTestCase{-(1 << 30), false, false, ""},
+ geTestCase{-151, false, false, ""},
+ geTestCase{-150, true, false, ""},
+ geTestCase{0, true, false, ""},
+ geTestCase{17, true, false, ""},
+
+ geTestCase{int(-(1 << 30)), false, false, ""},
+ geTestCase{int(-151), false, false, ""},
+ geTestCase{int(-150), true, false, ""},
+ geTestCase{int(0), true, false, ""},
+ geTestCase{int(17), true, false, ""},
+
+ geTestCase{int8(-127), true, false, ""},
+ geTestCase{int8(0), true, false, ""},
+ geTestCase{int8(17), true, false, ""},
+
+ geTestCase{int16(-(1 << 14)), false, false, ""},
+ geTestCase{int16(-151), false, false, ""},
+ geTestCase{int16(-150), true, false, ""},
+ geTestCase{int16(0), true, false, ""},
+ geTestCase{int16(17), true, false, ""},
+
+ geTestCase{int32(-(1 << 30)), false, false, ""},
+ geTestCase{int32(-151), false, false, ""},
+ geTestCase{int32(-150), true, false, ""},
+ geTestCase{int32(0), true, false, ""},
+ geTestCase{int32(17), true, false, ""},
+
+ geTestCase{int64(-(1 << 30)), false, false, ""},
+ geTestCase{int64(-151), false, false, ""},
+ geTestCase{int64(-150), true, false, ""},
+ geTestCase{int64(0), true, false, ""},
+ geTestCase{int64(17), true, false, ""},
+
+ // Unsigned integers.
+ geTestCase{uint((1 << 32) - 151), true, false, ""},
+ geTestCase{uint(0), true, false, ""},
+ geTestCase{uint(17), true, false, ""},
+
+ geTestCase{uint8(0), true, false, ""},
+ geTestCase{uint8(17), true, false, ""},
+ geTestCase{uint8(253), true, false, ""},
+
+ geTestCase{uint16((1 << 16) - 151), true, false, ""},
+ geTestCase{uint16(0), true, false, ""},
+ geTestCase{uint16(17), true, false, ""},
+
+ geTestCase{uint32((1 << 32) - 151), true, false, ""},
+ geTestCase{uint32(0), true, false, ""},
+ geTestCase{uint32(17), true, false, ""},
+
+ geTestCase{uint64((1 << 64) - 151), true, false, ""},
+ geTestCase{uint64(0), true, false, ""},
+ geTestCase{uint64(17), true, false, ""},
+
+ geTestCase{uintptr((1 << 64) - 151), true, false, ""},
+ geTestCase{uintptr(0), true, false, ""},
+ geTestCase{uintptr(17), true, false, ""},
+
+ // Floating point.
+ geTestCase{float32(-(1 << 30)), false, false, ""},
+ geTestCase{float32(-151), false, false, ""},
+ geTestCase{float32(-150.1), false, false, ""},
+ geTestCase{float32(-150), true, false, ""},
+ geTestCase{float32(-149.9), true, false, ""},
+ geTestCase{float32(0), true, false, ""},
+ geTestCase{float32(17), true, false, ""},
+ geTestCase{float32(160), true, false, ""},
+
+ geTestCase{float64(-(1 << 30)), false, false, ""},
+ geTestCase{float64(-151), false, false, ""},
+ geTestCase{float64(-150.1), false, false, ""},
+ geTestCase{float64(-150), true, false, ""},
+ geTestCase{float64(-149.9), true, false, ""},
+ geTestCase{float64(0), true, false, ""},
+ geTestCase{float64(17), true, false, ""},
+ geTestCase{float64(160), true, false, ""},
+ }
+
+ t.checkTestCases(matcher, cases)
+}
+
+func (t *GreaterOrEqualTest) ZeroIntegerLiteral() {
+ matcher := GreaterOrEqual(0)
+ desc := matcher.Description()
+ expectedDesc := "greater than or equal to 0"
+
+ ExpectThat(desc, Equals(expectedDesc))
+
+ cases := []geTestCase{
+ // Signed integers.
+ geTestCase{-(1 << 30), false, false, ""},
+ geTestCase{-1, false, false, ""},
+ geTestCase{0, true, false, ""},
+ geTestCase{1, true, false, ""},
+ geTestCase{17, true, false, ""},
+ geTestCase{(1 << 30), true, false, ""},
+
+ geTestCase{int(-(1 << 30)), false, false, ""},
+ geTestCase{int(-1), false, false, ""},
+ geTestCase{int(0), true, false, ""},
+ geTestCase{int(1), true, false, ""},
+ geTestCase{int(17), true, false, ""},
+
+ geTestCase{int8(-1), false, false, ""},
+ geTestCase{int8(0), true, false, ""},
+ geTestCase{int8(1), true, false, ""},
+
+ geTestCase{int16(-(1 << 14)), false, false, ""},
+ geTestCase{int16(-1), false, false, ""},
+ geTestCase{int16(0), true, false, ""},
+ geTestCase{int16(1), true, false, ""},
+ geTestCase{int16(17), true, false, ""},
+
+ geTestCase{int32(-(1 << 30)), false, false, ""},
+ geTestCase{int32(-1), false, false, ""},
+ geTestCase{int32(0), true, false, ""},
+ geTestCase{int32(1), true, false, ""},
+ geTestCase{int32(17), true, false, ""},
+
+ geTestCase{int64(-(1 << 30)), false, false, ""},
+ geTestCase{int64(-1), false, false, ""},
+ geTestCase{int64(0), true, false, ""},
+ geTestCase{int64(1), true, false, ""},
+ geTestCase{int64(17), true, false, ""},
+
+ // Unsigned integers.
+ geTestCase{uint((1 << 32) - 1), true, false, ""},
+ geTestCase{uint(0), true, false, ""},
+ geTestCase{uint(17), true, false, ""},
+
+ geTestCase{uint8(0), true, false, ""},
+ geTestCase{uint8(17), true, false, ""},
+ geTestCase{uint8(253), true, false, ""},
+
+ geTestCase{uint16((1 << 16) - 1), true, false, ""},
+ geTestCase{uint16(0), true, false, ""},
+ geTestCase{uint16(17), true, false, ""},
+
+ geTestCase{uint32((1 << 32) - 1), true, false, ""},
+ geTestCase{uint32(0), true, false, ""},
+ geTestCase{uint32(17), true, false, ""},
+
+ geTestCase{uint64((1 << 64) - 1), true, false, ""},
+ geTestCase{uint64(0), true, false, ""},
+ geTestCase{uint64(17), true, false, ""},
+
+ geTestCase{uintptr((1 << 64) - 1), true, false, ""},
+ geTestCase{uintptr(0), true, false, ""},
+ geTestCase{uintptr(17), true, false, ""},
+
+ // Floating point.
+ geTestCase{float32(-(1 << 30)), false, false, ""},
+ geTestCase{float32(-1), false, false, ""},
+ geTestCase{float32(-0.1), false, false, ""},
+ geTestCase{float32(-0.0), true, false, ""},
+ geTestCase{float32(0), true, false, ""},
+ geTestCase{float32(0.1), true, false, ""},
+ geTestCase{float32(17), true, false, ""},
+ geTestCase{float32(160), true, false, ""},
+
+ geTestCase{float64(-(1 << 30)), false, false, ""},
+ geTestCase{float64(-1), false, false, ""},
+ geTestCase{float64(-0.1), false, false, ""},
+ geTestCase{float64(-0), true, false, ""},
+ geTestCase{float64(0), true, false, ""},
+ geTestCase{float64(17), true, false, ""},
+ geTestCase{float64(160), true, false, ""},
+ }
+
+ t.checkTestCases(matcher, cases)
+}
+
+func (t *GreaterOrEqualTest) PositiveIntegerLiteral() {
+ matcher := GreaterOrEqual(150)
+ desc := matcher.Description()
+ expectedDesc := "greater than or equal to 150"
+
+ ExpectThat(desc, Equals(expectedDesc))
+
+ cases := []geTestCase{
+ // Signed integers.
+ geTestCase{-1, false, false, ""},
+ geTestCase{149, false, false, ""},
+ geTestCase{150, true, false, ""},
+ geTestCase{151, true, false, ""},
+
+ geTestCase{int(-1), false, false, ""},
+ geTestCase{int(149), false, false, ""},
+ geTestCase{int(150), true, false, ""},
+ geTestCase{int(151), true, false, ""},
+
+ geTestCase{int8(-1), false, false, ""},
+ geTestCase{int8(0), false, false, ""},
+ geTestCase{int8(17), false, false, ""},
+ geTestCase{int8(127), false, false, ""},
+
+ geTestCase{int16(-1), false, false, ""},
+ geTestCase{int16(149), false, false, ""},
+ geTestCase{int16(150), true, false, ""},
+ geTestCase{int16(151), true, false, ""},
+
+ geTestCase{int32(-1), false, false, ""},
+ geTestCase{int32(149), false, false, ""},
+ geTestCase{int32(150), true, false, ""},
+ geTestCase{int32(151), true, false, ""},
+
+ geTestCase{int64(-1), false, false, ""},
+ geTestCase{int64(149), false, false, ""},
+ geTestCase{int64(150), true, false, ""},
+ geTestCase{int64(151), true, false, ""},
+
+ // Unsigned integers.
+ geTestCase{uint(0), false, false, ""},
+ geTestCase{uint(149), false, false, ""},
+ geTestCase{uint(150), true, false, ""},
+ geTestCase{uint(151), true, false, ""},
+
+ geTestCase{uint8(0), false, false, ""},
+ geTestCase{uint8(127), false, false, ""},
+
+ geTestCase{uint16(0), false, false, ""},
+ geTestCase{uint16(149), false, false, ""},
+ geTestCase{uint16(150), true, false, ""},
+ geTestCase{uint16(151), true, false, ""},
+
+ geTestCase{uint32(0), false, false, ""},
+ geTestCase{uint32(149), false, false, ""},
+ geTestCase{uint32(150), true, false, ""},
+ geTestCase{uint32(151), true, false, ""},
+
+ geTestCase{uint64(0), false, false, ""},
+ geTestCase{uint64(149), false, false, ""},
+ geTestCase{uint64(150), true, false, ""},
+ geTestCase{uint64(151), true, false, ""},
+
+ geTestCase{uintptr(0), false, false, ""},
+ geTestCase{uintptr(149), false, false, ""},
+ geTestCase{uintptr(150), true, false, ""},
+ geTestCase{uintptr(151), true, false, ""},
+
+ // Floating point.
+ geTestCase{float32(-1), false, false, ""},
+ geTestCase{float32(149), false, false, ""},
+ geTestCase{float32(149.9), false, false, ""},
+ geTestCase{float32(150), true, false, ""},
+ geTestCase{float32(150.1), true, false, ""},
+ geTestCase{float32(151), true, false, ""},
+
+ geTestCase{float64(-1), false, false, ""},
+ geTestCase{float64(149), false, false, ""},
+ geTestCase{float64(149.9), false, false, ""},
+ geTestCase{float64(150), true, false, ""},
+ geTestCase{float64(150.1), true, false, ""},
+ geTestCase{float64(151), true, false, ""},
+ }
+
+ t.checkTestCases(matcher, cases)
+}
+
+////////////////////////////////////////////////////////////////////////
+// Float literals
+////////////////////////////////////////////////////////////////////////
+
+func (t *GreaterOrEqualTest) NegativeFloatLiteral() {
+ matcher := GreaterOrEqual(-150.1)
+ desc := matcher.Description()
+ expectedDesc := "greater than or equal to -150.1"
+
+ ExpectThat(desc, Equals(expectedDesc))
+
+ cases := []geTestCase{
+ // Signed integers.
+ geTestCase{-(1 << 30), false, false, ""},
+ geTestCase{-151, false, false, ""},
+ geTestCase{-150, true, false, ""},
+ geTestCase{0, true, false, ""},
+ geTestCase{17, true, false, ""},
+
+ geTestCase{int(-(1 << 30)), false, false, ""},
+ geTestCase{int(-151), false, false, ""},
+ geTestCase{int(-150), true, false, ""},
+ geTestCase{int(0), true, false, ""},
+ geTestCase{int(17), true, false, ""},
+
+ geTestCase{int8(-127), true, false, ""},
+ geTestCase{int8(0), true, false, ""},
+ geTestCase{int8(17), true, false, ""},
+
+ geTestCase{int16(-(1 << 14)), false, false, ""},
+ geTestCase{int16(-151), false, false, ""},
+ geTestCase{int16(-150), true, false, ""},
+ geTestCase{int16(0), true, false, ""},
+ geTestCase{int16(17), true, false, ""},
+
+ geTestCase{int32(-(1 << 30)), false, false, ""},
+ geTestCase{int32(-151), false, false, ""},
+ geTestCase{int32(-150), true, false, ""},
+ geTestCase{int32(0), true, false, ""},
+ geTestCase{int32(17), true, false, ""},
+
+ geTestCase{int64(-(1 << 30)), false, false, ""},
+ geTestCase{int64(-151), false, false, ""},
+ geTestCase{int64(-150), true, false, ""},
+ geTestCase{int64(0), true, false, ""},
+ geTestCase{int64(17), true, false, ""},
+
+ // Unsigned integers.
+ geTestCase{uint((1 << 32) - 151), true, false, ""},
+ geTestCase{uint(0), true, false, ""},
+ geTestCase{uint(17), true, false, ""},
+
+ geTestCase{uint8(0), true, false, ""},
+ geTestCase{uint8(17), true, false, ""},
+ geTestCase{uint8(253), true, false, ""},
+
+ geTestCase{uint16((1 << 16) - 151), true, false, ""},
+ geTestCase{uint16(0), true, false, ""},
+ geTestCase{uint16(17), true, false, ""},
+
+ geTestCase{uint32((1 << 32) - 151), true, false, ""},
+ geTestCase{uint32(0), true, false, ""},
+ geTestCase{uint32(17), true, false, ""},
+
+ geTestCase{uint64((1 << 64) - 151), true, false, ""},
+ geTestCase{uint64(0), true, false, ""},
+ geTestCase{uint64(17), true, false, ""},
+
+ geTestCase{uintptr((1 << 64) - 151), true, false, ""},
+ geTestCase{uintptr(0), true, false, ""},
+ geTestCase{uintptr(17), true, false, ""},
+
+ // Floating point.
+ geTestCase{float32(-(1 << 30)), false, false, ""},
+ geTestCase{float32(-151), false, false, ""},
+ geTestCase{float32(-150.2), false, false, ""},
+ geTestCase{float32(-150.1), true, false, ""},
+ geTestCase{float32(-150), true, false, ""},
+ geTestCase{float32(0), true, false, ""},
+ geTestCase{float32(17), true, false, ""},
+ geTestCase{float32(160), true, false, ""},
+
+ geTestCase{float64(-(1 << 30)), false, false, ""},
+ geTestCase{float64(-151), false, false, ""},
+ geTestCase{float64(-150.2), false, false, ""},
+ geTestCase{float64(-150.1), true, false, ""},
+ geTestCase{float64(-150), true, false, ""},
+ geTestCase{float64(0), true, false, ""},
+ geTestCase{float64(17), true, false, ""},
+ geTestCase{float64(160), true, false, ""},
+ }
+
+ t.checkTestCases(matcher, cases)
+}
+
+func (t *GreaterOrEqualTest) PositiveFloatLiteral() {
+ matcher := GreaterOrEqual(149.9)
+ desc := matcher.Description()
+ expectedDesc := "greater than or equal to 149.9"
+
+ ExpectThat(desc, Equals(expectedDesc))
+
+ cases := []geTestCase{
+ // Signed integers.
+ geTestCase{-1, false, false, ""},
+ geTestCase{149, false, false, ""},
+ geTestCase{150, true, false, ""},
+ geTestCase{151, true, false, ""},
+
+ geTestCase{int(-1), false, false, ""},
+ geTestCase{int(149), false, false, ""},
+ geTestCase{int(150), true, false, ""},
+ geTestCase{int(151), true, false, ""},
+
+ geTestCase{int8(-1), false, false, ""},
+ geTestCase{int8(0), false, false, ""},
+ geTestCase{int8(17), false, false, ""},
+ geTestCase{int8(127), false, false, ""},
+
+ geTestCase{int16(-1), false, false, ""},
+ geTestCase{int16(149), false, false, ""},
+ geTestCase{int16(150), true, false, ""},
+ geTestCase{int16(151), true, false, ""},
+
+ geTestCase{int32(-1), false, false, ""},
+ geTestCase{int32(149), false, false, ""},
+ geTestCase{int32(150), true, false, ""},
+ geTestCase{int32(151), true, false, ""},
+
+ geTestCase{int64(-1), false, false, ""},
+ geTestCase{int64(149), false, false, ""},
+ geTestCase{int64(150), true, false, ""},
+ geTestCase{int64(151), true, false, ""},
+
+ // Unsigned integers.
+ geTestCase{uint(0), false, false, ""},
+ geTestCase{uint(149), false, false, ""},
+ geTestCase{uint(150), true, false, ""},
+ geTestCase{uint(151), true, false, ""},
+
+ geTestCase{uint8(0), false, false, ""},
+ geTestCase{uint8(127), false, false, ""},
+
+ geTestCase{uint16(0), false, false, ""},
+ geTestCase{uint16(149), false, false, ""},
+ geTestCase{uint16(150), true, false, ""},
+ geTestCase{uint16(151), true, false, ""},
+
+ geTestCase{uint32(0), false, false, ""},
+ geTestCase{uint32(149), false, false, ""},
+ geTestCase{uint32(150), true, false, ""},
+ geTestCase{uint32(151), true, false, ""},
+
+ geTestCase{uint64(0), false, false, ""},
+ geTestCase{uint64(149), false, false, ""},
+ geTestCase{uint64(150), true, false, ""},
+ geTestCase{uint64(151), true, false, ""},
+
+ geTestCase{uintptr(0), false, false, ""},
+ geTestCase{uintptr(149), false, false, ""},
+ geTestCase{uintptr(150), true, false, ""},
+ geTestCase{uintptr(151), true, false, ""},
+
+ // Floating point.
+ geTestCase{float32(-1), false, false, ""},
+ geTestCase{float32(149), false, false, ""},
+ geTestCase{float32(149.8), false, false, ""},
+ geTestCase{float32(149.9), true, false, ""},
+ geTestCase{float32(150), true, false, ""},
+ geTestCase{float32(151), true, false, ""},
+
+ geTestCase{float64(-1), false, false, ""},
+ geTestCase{float64(149), false, false, ""},
+ geTestCase{float64(149.8), false, false, ""},
+ geTestCase{float64(149.9), true, false, ""},
+ geTestCase{float64(150), true, false, ""},
+ geTestCase{float64(151), true, false, ""},
+ }
+
+ t.checkTestCases(matcher, cases)
+}
+
+////////////////////////////////////////////////////////////////////////
+// Subtle cases
+////////////////////////////////////////////////////////////////////////
+
+func (t *GreaterOrEqualTest) Int64NotExactlyRepresentableBySinglePrecision() {
+ // Single-precision floats don't have enough bits to represent the integers
+ // near this one distinctly, so [2^25-1, 2^25+2] all receive the same value
+ // and should be treated as equivalent when floats are in the mix.
+ const kTwoTo25 = 1 << 25
+ matcher := GreaterOrEqual(int64(kTwoTo25 + 1))
+
+ desc := matcher.Description()
+ expectedDesc := "greater than or equal to 33554433"
+
+ ExpectThat(desc, Equals(expectedDesc))
+
+ cases := []geTestCase{
+ // Signed integers.
+ geTestCase{-1, false, false, ""},
+ geTestCase{kTwoTo25 + 0, false, false, ""},
+ geTestCase{kTwoTo25 + 1, true, false, ""},
+ geTestCase{kTwoTo25 + 2, true, false, ""},
+
+ geTestCase{int(-1), false, false, ""},
+ geTestCase{int(kTwoTo25 + 0), false, false, ""},
+ geTestCase{int(kTwoTo25 + 1), true, false, ""},
+ geTestCase{int(kTwoTo25 + 2), true, false, ""},
+
+ geTestCase{int8(-1), false, false, ""},
+ geTestCase{int8(127), false, false, ""},
+
+ geTestCase{int16(-1), false, false, ""},
+ geTestCase{int16(0), false, false, ""},
+ geTestCase{int16(32767), false, false, ""},
+
+ geTestCase{int32(-1), false, false, ""},
+ geTestCase{int32(kTwoTo25 + 0), false, false, ""},
+ geTestCase{int32(kTwoTo25 + 1), true, false, ""},
+ geTestCase{int32(kTwoTo25 + 2), true, false, ""},
+
+ geTestCase{int64(-1), false, false, ""},
+ geTestCase{int64(kTwoTo25 + 0), false, false, ""},
+ geTestCase{int64(kTwoTo25 + 1), true, false, ""},
+ geTestCase{int64(kTwoTo25 + 2), true, false, ""},
+
+ // Unsigned integers.
+ geTestCase{uint(0), false, false, ""},
+ geTestCase{uint(kTwoTo25 + 0), false, false, ""},
+ geTestCase{uint(kTwoTo25 + 1), true, false, ""},
+ geTestCase{uint(kTwoTo25 + 2), true, false, ""},
+
+ geTestCase{uint8(0), false, false, ""},
+ geTestCase{uint8(255), false, false, ""},
+
+ geTestCase{uint16(0), false, false, ""},
+ geTestCase{uint16(65535), false, false, ""},
+
+ geTestCase{uint32(0), false, false, ""},
+ geTestCase{uint32(kTwoTo25 + 0), false, false, ""},
+ geTestCase{uint32(kTwoTo25 + 1), true, false, ""},
+ geTestCase{uint32(kTwoTo25 + 2), true, false, ""},
+
+ geTestCase{uint64(0), false, false, ""},
+ geTestCase{uint64(kTwoTo25 + 0), false, false, ""},
+ geTestCase{uint64(kTwoTo25 + 1), true, false, ""},
+ geTestCase{uint64(kTwoTo25 + 2), true, false, ""},
+
+ geTestCase{uintptr(0), false, false, ""},
+ geTestCase{uintptr(kTwoTo25 + 0), false, false, ""},
+ geTestCase{uintptr(kTwoTo25 + 1), true, false, ""},
+ geTestCase{uintptr(kTwoTo25 + 2), true, false, ""},
+
+ // Floating point.
+ geTestCase{float32(-1), false, false, ""},
+ geTestCase{float32(kTwoTo25 - 2), false, false, ""},
+ geTestCase{float32(kTwoTo25 - 1), true, false, ""},
+ geTestCase{float32(kTwoTo25 + 0), true, false, ""},
+ geTestCase{float32(kTwoTo25 + 1), true, false, ""},
+ geTestCase{float32(kTwoTo25 + 2), true, false, ""},
+ geTestCase{float32(kTwoTo25 + 3), true, false, ""},
+
+ geTestCase{float64(-1), false, false, ""},
+ geTestCase{float64(kTwoTo25 - 2), false, false, ""},
+ geTestCase{float64(kTwoTo25 - 1), false, false, ""},
+ geTestCase{float64(kTwoTo25 + 0), false, false, ""},
+ geTestCase{float64(kTwoTo25 + 1), true, false, ""},
+ geTestCase{float64(kTwoTo25 + 2), true, false, ""},
+ geTestCase{float64(kTwoTo25 + 3), true, false, ""},
+ }
+
+ t.checkTestCases(matcher, cases)
+}
+
+func (t *GreaterOrEqualTest) Int64NotExactlyRepresentableByDoublePrecision() {
+ // Double-precision floats don't have enough bits to represent the integers
+ // near this one distinctly, so [2^54-1, 2^54+2] all receive the same value
+ // and should be treated as equivalent when floats are in the mix.
+ const kTwoTo54 = 1 << 54
+ matcher := GreaterOrEqual(int64(kTwoTo54 + 1))
+
+ desc := matcher.Description()
+ expectedDesc := "greater than or equal to 18014398509481985"
+
+ ExpectThat(desc, Equals(expectedDesc))
+
+ cases := []geTestCase{
+ // Signed integers.
+ geTestCase{-1, false, false, ""},
+ geTestCase{1 << 30, false, false, ""},
+
+ geTestCase{int(-1), false, false, ""},
+ geTestCase{int(math.MaxInt32), false, false, ""},
+
+ geTestCase{int8(-1), false, false, ""},
+ geTestCase{int8(127), false, false, ""},
+
+ geTestCase{int16(-1), false, false, ""},
+ geTestCase{int16(0), false, false, ""},
+ geTestCase{int16(32767), false, false, ""},
+
+ geTestCase{int32(-1), false, false, ""},
+ geTestCase{int32(math.MaxInt32), false, false, ""},
+
+ geTestCase{int64(-1), false, false, ""},
+ geTestCase{int64(kTwoTo54 - 1), false, false, ""},
+ geTestCase{int64(kTwoTo54 + 0), false, false, ""},
+ geTestCase{int64(kTwoTo54 + 1), true, false, ""},
+ geTestCase{int64(kTwoTo54 + 2), true, false, ""},
+
+ // Unsigned integers.
+ geTestCase{uint(0), false, false, ""},
+ geTestCase{uint(math.MaxUint32), false, false, ""},
+
+ geTestCase{uint8(0), false, false, ""},
+ geTestCase{uint8(255), false, false, ""},
+
+ geTestCase{uint16(0), false, false, ""},
+ geTestCase{uint16(65535), false, false, ""},
+
+ geTestCase{uint32(0), false, false, ""},
+ geTestCase{uint32(math.MaxUint32), false, false, ""},
+
+ geTestCase{uint64(0), false, false, ""},
+ geTestCase{uint64(kTwoTo54 - 1), false, false, ""},
+ geTestCase{uint64(kTwoTo54 + 0), false, false, ""},
+ geTestCase{uint64(kTwoTo54 + 1), true, false, ""},
+ geTestCase{uint64(kTwoTo54 + 2), true, false, ""},
+
+ geTestCase{uintptr(0), false, false, ""},
+ geTestCase{uintptr(kTwoTo54 - 1), false, false, ""},
+ geTestCase{uintptr(kTwoTo54 + 0), false, false, ""},
+ geTestCase{uintptr(kTwoTo54 + 1), true, false, ""},
+ geTestCase{uintptr(kTwoTo54 + 2), true, false, ""},
+
+ // Floating point.
+ geTestCase{float64(-1), false, false, ""},
+ geTestCase{float64(kTwoTo54 - 2), false, false, ""},
+ geTestCase{float64(kTwoTo54 - 1), true, false, ""},
+ geTestCase{float64(kTwoTo54 + 0), true, false, ""},
+ geTestCase{float64(kTwoTo54 + 1), true, false, ""},
+ geTestCase{float64(kTwoTo54 + 2), true, false, ""},
+ geTestCase{float64(kTwoTo54 + 3), true, false, ""},
+ }
+
+ t.checkTestCases(matcher, cases)
+}
+
+func (t *GreaterOrEqualTest) Uint64NotExactlyRepresentableBySinglePrecision() {
+ // Single-precision floats don't have enough bits to represent the integers
+ // near this one distinctly, so [2^25-1, 2^25+2] all receive the same value
+ // and should be treated as equivalent when floats are in the mix.
+ const kTwoTo25 = 1 << 25
+ matcher := GreaterOrEqual(uint64(kTwoTo25 + 1))
+
+ desc := matcher.Description()
+ expectedDesc := "greater than or equal to 33554433"
+
+ ExpectThat(desc, Equals(expectedDesc))
+
+ cases := []geTestCase{
+ // Signed integers.
+ geTestCase{-1, false, false, ""},
+ geTestCase{kTwoTo25 + 0, false, false, ""},
+ geTestCase{kTwoTo25 + 1, true, false, ""},
+ geTestCase{kTwoTo25 + 2, true, false, ""},
+
+ geTestCase{int(-1), false, false, ""},
+ geTestCase{int(kTwoTo25 + 0), false, false, ""},
+ geTestCase{int(kTwoTo25 + 1), true, false, ""},
+ geTestCase{int(kTwoTo25 + 2), true, false, ""},
+
+ geTestCase{int8(-1), false, false, ""},
+ geTestCase{int8(127), false, false, ""},
+
+ geTestCase{int16(-1), false, false, ""},
+ geTestCase{int16(0), false, false, ""},
+ geTestCase{int16(32767), false, false, ""},
+
+ geTestCase{int32(-1), false, false, ""},
+ geTestCase{int32(kTwoTo25 + 0), false, false, ""},
+ geTestCase{int32(kTwoTo25 + 1), true, false, ""},
+ geTestCase{int32(kTwoTo25 + 2), true, false, ""},
+
+ geTestCase{int64(-1), false, false, ""},
+ geTestCase{int64(kTwoTo25 + 0), false, false, ""},
+ geTestCase{int64(kTwoTo25 + 1), true, false, ""},
+ geTestCase{int64(kTwoTo25 + 2), true, false, ""},
+
+ // Unsigned integers.
+ geTestCase{uint(0), false, false, ""},
+ geTestCase{uint(kTwoTo25 + 0), false, false, ""},
+ geTestCase{uint(kTwoTo25 + 1), true, false, ""},
+ geTestCase{uint(kTwoTo25 + 2), true, false, ""},
+
+ geTestCase{uint8(0), false, false, ""},
+ geTestCase{uint8(255), false, false, ""},
+
+ geTestCase{uint16(0), false, false, ""},
+ geTestCase{uint16(65535), false, false, ""},
+
+ geTestCase{uint32(0), false, false, ""},
+ geTestCase{uint32(kTwoTo25 + 0), false, false, ""},
+ geTestCase{uint32(kTwoTo25 + 1), true, false, ""},
+ geTestCase{uint32(kTwoTo25 + 2), true, false, ""},
+
+ geTestCase{uint64(0), false, false, ""},
+ geTestCase{uint64(kTwoTo25 + 0), false, false, ""},
+ geTestCase{uint64(kTwoTo25 + 1), true, false, ""},
+ geTestCase{uint64(kTwoTo25 + 2), true, false, ""},
+
+ geTestCase{uintptr(0), false, false, ""},
+ geTestCase{uintptr(kTwoTo25 + 0), false, false, ""},
+ geTestCase{uintptr(kTwoTo25 + 1), true, false, ""},
+ geTestCase{uintptr(kTwoTo25 + 2), true, false, ""},
+
+ // Floating point.
+ geTestCase{float32(-1), false, false, ""},
+ geTestCase{float32(kTwoTo25 - 2), false, false, ""},
+ geTestCase{float32(kTwoTo25 - 1), true, false, ""},
+ geTestCase{float32(kTwoTo25 + 0), true, false, ""},
+ geTestCase{float32(kTwoTo25 + 1), true, false, ""},
+ geTestCase{float32(kTwoTo25 + 2), true, false, ""},
+ geTestCase{float32(kTwoTo25 + 3), true, false, ""},
+
+ geTestCase{float64(-1), false, false, ""},
+ geTestCase{float64(kTwoTo25 - 2), false, false, ""},
+ geTestCase{float64(kTwoTo25 - 1), false, false, ""},
+ geTestCase{float64(kTwoTo25 + 0), false, false, ""},
+ geTestCase{float64(kTwoTo25 + 1), true, false, ""},
+ geTestCase{float64(kTwoTo25 + 2), true, false, ""},
+ geTestCase{float64(kTwoTo25 + 3), true, false, ""},
+ }
+
+ t.checkTestCases(matcher, cases)
+}
+
+func (t *GreaterOrEqualTest) Uint64NotExactlyRepresentableByDoublePrecision() {
+ // Double-precision floats don't have enough bits to represent the integers
+ // near this one distinctly, so [2^54-1, 2^54+2] all receive the same value
+ // and should be treated as equivalent when floats are in the mix.
+ const kTwoTo54 = 1 << 54
+ matcher := GreaterOrEqual(uint64(kTwoTo54 + 1))
+
+ desc := matcher.Description()
+ expectedDesc := "greater than or equal to 18014398509481985"
+
+ ExpectThat(desc, Equals(expectedDesc))
+
+ cases := []geTestCase{
+ // Signed integers.
+ geTestCase{-1, false, false, ""},
+ geTestCase{1 << 30, false, false, ""},
+
+ geTestCase{int(-1), false, false, ""},
+ geTestCase{int(math.MaxInt32), false, false, ""},
+
+ geTestCase{int8(-1), false, false, ""},
+ geTestCase{int8(127), false, false, ""},
+
+ geTestCase{int16(-1), false, false, ""},
+ geTestCase{int16(0), false, false, ""},
+ geTestCase{int16(32767), false, false, ""},
+
+ geTestCase{int32(-1), false, false, ""},
+ geTestCase{int32(math.MaxInt32), false, false, ""},
+
+ geTestCase{int64(-1), false, false, ""},
+ geTestCase{int64(kTwoTo54 - 1), false, false, ""},
+ geTestCase{int64(kTwoTo54 + 0), false, false, ""},
+ geTestCase{int64(kTwoTo54 + 1), true, false, ""},
+ geTestCase{int64(kTwoTo54 + 2), true, false, ""},
+
+ // Unsigned integers.
+ geTestCase{uint(0), false, false, ""},
+ geTestCase{uint(math.MaxUint32), false, false, ""},
+
+ geTestCase{uint8(0), false, false, ""},
+ geTestCase{uint8(255), false, false, ""},
+
+ geTestCase{uint16(0), false, false, ""},
+ geTestCase{uint16(65535), false, false, ""},
+
+ geTestCase{uint32(0), false, false, ""},
+ geTestCase{uint32(math.MaxUint32), false, false, ""},
+
+ geTestCase{uint64(0), false, false, ""},
+ geTestCase{uint64(kTwoTo54 - 1), false, false, ""},
+ geTestCase{uint64(kTwoTo54 + 0), false, false, ""},
+ geTestCase{uint64(kTwoTo54 + 1), true, false, ""},
+ geTestCase{uint64(kTwoTo54 + 2), true, false, ""},
+
+ geTestCase{uintptr(0), false, false, ""},
+ geTestCase{uintptr(kTwoTo54 - 1), false, false, ""},
+ geTestCase{uintptr(kTwoTo54 + 0), false, false, ""},
+ geTestCase{uintptr(kTwoTo54 + 1), true, false, ""},
+ geTestCase{uintptr(kTwoTo54 + 2), true, false, ""},
+
+ // Floating point.
+ geTestCase{float64(-1), false, false, ""},
+ geTestCase{float64(kTwoTo54 - 2), false, false, ""},
+ geTestCase{float64(kTwoTo54 - 1), true, false, ""},
+ geTestCase{float64(kTwoTo54 + 0), true, false, ""},
+ geTestCase{float64(kTwoTo54 + 1), true, false, ""},
+ geTestCase{float64(kTwoTo54 + 2), true, false, ""},
+ geTestCase{float64(kTwoTo54 + 3), true, false, ""},
+ }
+
+ t.checkTestCases(matcher, cases)
+}
+
+func (t *GreaterOrEqualTest) Float32AboveExactIntegerRange() {
+ // Single-precision floats don't have enough bits to represent the integers
+ // near this one distinctly, so [2^25-1, 2^25+2] all receive the same value
+ // and should be treated as equivalent when floats are in the mix.
+ const kTwoTo25 = 1 << 25
+ matcher := GreaterOrEqual(float32(kTwoTo25 + 1))
+
+ desc := matcher.Description()
+ expectedDesc := "greater than or equal to 3.3554432e+07"
+
+ ExpectThat(desc, Equals(expectedDesc))
+
+ cases := []geTestCase{
+ // Signed integers.
+ geTestCase{int64(-1), false, false, ""},
+ geTestCase{int64(kTwoTo25 - 2), false, false, ""},
+ geTestCase{int64(kTwoTo25 - 1), true, false, ""},
+ geTestCase{int64(kTwoTo25 + 0), true, false, ""},
+ geTestCase{int64(kTwoTo25 + 1), true, false, ""},
+ geTestCase{int64(kTwoTo25 + 2), true, false, ""},
+ geTestCase{int64(kTwoTo25 + 3), true, false, ""},
+
+ // Unsigned integers.
+ geTestCase{uint64(0), false, false, ""},
+ geTestCase{uint64(kTwoTo25 - 2), false, false, ""},
+ geTestCase{uint64(kTwoTo25 - 1), true, false, ""},
+ geTestCase{uint64(kTwoTo25 + 0), true, false, ""},
+ geTestCase{uint64(kTwoTo25 + 1), true, false, ""},
+ geTestCase{uint64(kTwoTo25 + 2), true, false, ""},
+ geTestCase{uint64(kTwoTo25 + 3), true, false, ""},
+
+ // Floating point.
+ geTestCase{float32(-1), false, false, ""},
+ geTestCase{float32(kTwoTo25 - 2), false, false, ""},
+ geTestCase{float32(kTwoTo25 - 1), true, false, ""},
+ geTestCase{float32(kTwoTo25 + 0), true, false, ""},
+ geTestCase{float32(kTwoTo25 + 1), true, false, ""},
+ geTestCase{float32(kTwoTo25 + 2), true, false, ""},
+ geTestCase{float32(kTwoTo25 + 3), true, false, ""},
+
+ geTestCase{float64(-1), false, false, ""},
+ geTestCase{float64(kTwoTo25 - 2), false, false, ""},
+ geTestCase{float64(kTwoTo25 - 1), true, false, ""},
+ geTestCase{float64(kTwoTo25 + 0), true, false, ""},
+ geTestCase{float64(kTwoTo25 + 1), true, false, ""},
+ geTestCase{float64(kTwoTo25 + 2), true, false, ""},
+ geTestCase{float64(kTwoTo25 + 3), true, false, ""},
+ }
+
+ t.checkTestCases(matcher, cases)
+}
+
+func (t *GreaterOrEqualTest) Float64AboveExactIntegerRange() {
+ // Double-precision floats don't have enough bits to represent the integers
+ // near this one distinctly, so [2^54-1, 2^54+2] all receive the same value
+ // and should be treated as equivalent when floats are in the mix.
+ const kTwoTo54 = 1 << 54
+ matcher := GreaterOrEqual(float64(kTwoTo54 + 1))
+
+ desc := matcher.Description()
+ expectedDesc := "greater than or equal to 1.8014398509481984e+16"
+
+ ExpectThat(desc, Equals(expectedDesc))
+
+ cases := []geTestCase{
+ // Signed integers.
+ geTestCase{int64(-1), false, false, ""},
+ geTestCase{int64(kTwoTo54 - 2), false, false, ""},
+ geTestCase{int64(kTwoTo54 - 1), true, false, ""},
+ geTestCase{int64(kTwoTo54 + 0), true, false, ""},
+ geTestCase{int64(kTwoTo54 + 1), true, false, ""},
+ geTestCase{int64(kTwoTo54 + 2), true, false, ""},
+ geTestCase{int64(kTwoTo54 + 3), true, false, ""},
+
+ // Unsigned integers.
+ geTestCase{uint64(0), false, false, ""},
+ geTestCase{uint64(kTwoTo54 - 2), false, false, ""},
+ geTestCase{uint64(kTwoTo54 - 1), true, false, ""},
+ geTestCase{uint64(kTwoTo54 + 0), true, false, ""},
+ geTestCase{uint64(kTwoTo54 + 1), true, false, ""},
+ geTestCase{uint64(kTwoTo54 + 2), true, false, ""},
+ geTestCase{uint64(kTwoTo54 + 3), true, false, ""},
+
+ // Floating point.
+ geTestCase{float64(-1), false, false, ""},
+ geTestCase{float64(kTwoTo54 - 2), false, false, ""},
+ geTestCase{float64(kTwoTo54 - 1), true, false, ""},
+ geTestCase{float64(kTwoTo54 + 0), true, false, ""},
+ geTestCase{float64(kTwoTo54 + 1), true, false, ""},
+ geTestCase{float64(kTwoTo54 + 2), true, false, ""},
+ geTestCase{float64(kTwoTo54 + 3), true, false, ""},
+ }
+
+ t.checkTestCases(matcher, cases)
+}
+
+////////////////////////////////////////////////////////////////////////
+// String literals
+////////////////////////////////////////////////////////////////////////
+
+func (t *GreaterOrEqualTest) EmptyString() {
+ matcher := GreaterOrEqual("")
+ desc := matcher.Description()
+ expectedDesc := "greater than or equal to \"\""
+
+ ExpectThat(desc, Equals(expectedDesc))
+
+ cases := []geTestCase{
+ geTestCase{"", true, false, ""},
+ geTestCase{"\x00", true, false, ""},
+ geTestCase{"a", true, false, ""},
+ geTestCase{"foo", true, false, ""},
+ }
+
+ t.checkTestCases(matcher, cases)
+}
+
+func (t *GreaterOrEqualTest) SingleNullByte() {
+ matcher := GreaterOrEqual("\x00")
+ desc := matcher.Description()
+ expectedDesc := "greater than or equal to \"\x00\""
+
+ ExpectThat(desc, Equals(expectedDesc))
+
+ cases := []geTestCase{
+ geTestCase{"", false, false, ""},
+ geTestCase{"\x00", true, false, ""},
+ geTestCase{"a", true, false, ""},
+ geTestCase{"foo", true, false, ""},
+ }
+
+ t.checkTestCases(matcher, cases)
+}
+
+func (t *GreaterOrEqualTest) LongerString() {
+ matcher := GreaterOrEqual("foo\x00")
+ desc := matcher.Description()
+ expectedDesc := "greater than or equal to \"foo\x00\""
+
+ ExpectThat(desc, Equals(expectedDesc))
+
+ cases := []geTestCase{
+ geTestCase{"", false, false, ""},
+ geTestCase{"\x00", false, false, ""},
+ geTestCase{"bar", false, false, ""},
+ geTestCase{"foo", false, false, ""},
+ geTestCase{"foo\x00", true, false, ""},
+ geTestCase{"fooa", true, false, ""},
+ geTestCase{"qux", true, false, ""},
+ }
+
+ t.checkTestCases(matcher, cases)
+}
diff --git a/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/oglematchers/greater_than.go b/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/oglematchers/greater_than.go
new file mode 100644
index 00000000000..3eef32178f8
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/oglematchers/greater_than.go
@@ -0,0 +1,39 @@
+// Copyright 2011 Aaron Jacobs. All Rights Reserved.
+// Author: aaronjjacobs@gmail.com (Aaron Jacobs)
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package oglematchers
+
+import (
+ "fmt"
+ "reflect"
+)
+
+// GreaterThan returns a matcher that matches integer, floating point, or
+// strings values v such that v > x. Comparison is not defined between numeric
+// and string types, but is defined between all integer and floating point
+// types.
+//
+// x must itself be an integer, floating point, or string type; otherwise,
+// GreaterThan will panic.
+func GreaterThan(x interface{}) Matcher {
+ desc := fmt.Sprintf("greater than %v", x)
+
+ // Special case: make it clear that strings are strings.
+ if reflect.TypeOf(x).Kind() == reflect.String {
+ desc = fmt.Sprintf("greater than \"%s\"", x)
+ }
+
+ return transformDescription(Not(LessOrEqual(x)), desc)
+}
diff --git a/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/oglematchers/greater_than_test.go b/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/oglematchers/greater_than_test.go
new file mode 100644
index 00000000000..bf70fe56633
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/oglematchers/greater_than_test.go
@@ -0,0 +1,1077 @@
+// Copyright 2011 Aaron Jacobs. All Rights Reserved.
+// Author: aaronjjacobs@gmail.com (Aaron Jacobs)
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package oglematchers_test
+
+import (
+ "math"
+
+ . "github.com/smartystreets/assertions/internal/oglematchers"
+ . "github.com/smartystreets/assertions/internal/ogletest"
+)
+
+////////////////////////////////////////////////////////////////////////
+// Helpers
+////////////////////////////////////////////////////////////////////////
+
+type GreaterThanTest struct {
+}
+
+func init() { RegisterTestSuite(&GreaterThanTest{}) }
+
+type gtTestCase struct {
+ candidate interface{}
+ expectedResult bool
+ shouldBeFatal bool
+ expectedError string
+}
+
+func (t *GreaterThanTest) checkTestCases(matcher Matcher, cases []gtTestCase) {
+ for i, c := range cases {
+ err := matcher.Matches(c.candidate)
+
+ ExpectThat(
+ (err == nil),
+ Equals(c.expectedResult),
+ "Case %d (candidate %v)",
+ i,
+ c.candidate)
+
+ if err == nil {
+ continue
+ }
+
+ _, isFatal := err.(*FatalError)
+ ExpectEq(
+ c.shouldBeFatal,
+ isFatal,
+ "Case %d (candidate %v)",
+ i,
+ c.candidate)
+
+ ExpectThat(
+ err,
+ Error(Equals(c.expectedError)),
+ "Case %d (candidate %v)",
+ i,
+ c.candidate)
+ }
+}
+
+////////////////////////////////////////////////////////////////////////
+// Integer literals
+////////////////////////////////////////////////////////////////////////
+
+func (t *GreaterThanTest) IntegerCandidateBadTypes() {
+ matcher := GreaterThan(int(-150))
+
+ cases := []gtTestCase{
+ gtTestCase{true, false, true, "which is not comparable"},
+ gtTestCase{complex64(-151), false, true, "which is not comparable"},
+ gtTestCase{complex128(-151), false, true, "which is not comparable"},
+ gtTestCase{[...]int{-151}, false, true, "which is not comparable"},
+ gtTestCase{make(chan int), false, true, "which is not comparable"},
+ gtTestCase{func() {}, false, true, "which is not comparable"},
+ gtTestCase{map[int]int{}, false, true, "which is not comparable"},
+ gtTestCase{&gtTestCase{}, false, true, "which is not comparable"},
+ gtTestCase{make([]int, 0), false, true, "which is not comparable"},
+ gtTestCase{"-151", false, true, "which is not comparable"},
+ gtTestCase{gtTestCase{}, false, true, "which is not comparable"},
+ }
+
+ t.checkTestCases(matcher, cases)
+}
+
+func (t *GreaterThanTest) FloatCandidateBadTypes() {
+ matcher := GreaterThan(float32(-150))
+
+ cases := []gtTestCase{
+ gtTestCase{true, false, true, "which is not comparable"},
+ gtTestCase{complex64(-151), false, true, "which is not comparable"},
+ gtTestCase{complex128(-151), false, true, "which is not comparable"},
+ gtTestCase{[...]int{-151}, false, true, "which is not comparable"},
+ gtTestCase{make(chan int), false, true, "which is not comparable"},
+ gtTestCase{func() {}, false, true, "which is not comparable"},
+ gtTestCase{map[int]int{}, false, true, "which is not comparable"},
+ gtTestCase{&gtTestCase{}, false, true, "which is not comparable"},
+ gtTestCase{make([]int, 0), false, true, "which is not comparable"},
+ gtTestCase{"-151", false, true, "which is not comparable"},
+ gtTestCase{gtTestCase{}, false, true, "which is not comparable"},
+ }
+
+ t.checkTestCases(matcher, cases)
+}
+
+func (t *GreaterThanTest) StringCandidateBadTypes() {
+ matcher := GreaterThan("17")
+
+ cases := []gtTestCase{
+ gtTestCase{true, false, true, "which is not comparable"},
+ gtTestCase{int(0), false, true, "which is not comparable"},
+ gtTestCase{int8(0), false, true, "which is not comparable"},
+ gtTestCase{int16(0), false, true, "which is not comparable"},
+ gtTestCase{int32(0), false, true, "which is not comparable"},
+ gtTestCase{int64(0), false, true, "which is not comparable"},
+ gtTestCase{uint(0), false, true, "which is not comparable"},
+ gtTestCase{uint8(0), false, true, "which is not comparable"},
+ gtTestCase{uint16(0), false, true, "which is not comparable"},
+ gtTestCase{uint32(0), false, true, "which is not comparable"},
+ gtTestCase{uint64(0), false, true, "which is not comparable"},
+ gtTestCase{float32(0), false, true, "which is not comparable"},
+ gtTestCase{float64(0), false, true, "which is not comparable"},
+ gtTestCase{complex64(-151), false, true, "which is not comparable"},
+ gtTestCase{complex128(-151), false, true, "which is not comparable"},
+ gtTestCase{[...]int{-151}, false, true, "which is not comparable"},
+ gtTestCase{make(chan int), false, true, "which is not comparable"},
+ gtTestCase{func() {}, false, true, "which is not comparable"},
+ gtTestCase{map[int]int{}, false, true, "which is not comparable"},
+ gtTestCase{&gtTestCase{}, false, true, "which is not comparable"},
+ gtTestCase{make([]int, 0), false, true, "which is not comparable"},
+ gtTestCase{gtTestCase{}, false, true, "which is not comparable"},
+ }
+
+ t.checkTestCases(matcher, cases)
+}
+
+func (t *GreaterThanTest) BadArgument() {
+ panicked := false
+
+ defer func() {
+ ExpectThat(panicked, Equals(true))
+ }()
+
+ defer func() {
+ if r := recover(); r != nil {
+ panicked = true
+ }
+ }()
+
+ GreaterThan(complex128(0))
+}
+
+////////////////////////////////////////////////////////////////////////
+// Integer literals
+////////////////////////////////////////////////////////////////////////
+
+func (t *GreaterThanTest) NegativeIntegerLiteral() {
+ matcher := GreaterThan(-150)
+ desc := matcher.Description()
+ expectedDesc := "greater than -150"
+
+ ExpectThat(desc, Equals(expectedDesc))
+
+ cases := []gtTestCase{
+ // Signed integers.
+ gtTestCase{-(1 << 30), false, false, ""},
+ gtTestCase{-151, false, false, ""},
+ gtTestCase{-150, false, false, ""},
+ gtTestCase{-149, true, false, ""},
+ gtTestCase{0, true, false, ""},
+ gtTestCase{17, true, false, ""},
+
+ gtTestCase{int(-(1 << 30)), false, false, ""},
+ gtTestCase{int(-151), false, false, ""},
+ gtTestCase{int(-150), false, false, ""},
+ gtTestCase{int(-149), true, false, ""},
+ gtTestCase{int(0), true, false, ""},
+ gtTestCase{int(17), true, false, ""},
+
+ gtTestCase{int8(-127), true, false, ""},
+ gtTestCase{int8(0), true, false, ""},
+ gtTestCase{int8(17), true, false, ""},
+
+ gtTestCase{int16(-(1 << 14)), false, false, ""},
+ gtTestCase{int16(-151), false, false, ""},
+ gtTestCase{int16(-150), false, false, ""},
+ gtTestCase{int16(-149), true, false, ""},
+ gtTestCase{int16(0), true, false, ""},
+ gtTestCase{int16(17), true, false, ""},
+
+ gtTestCase{int32(-(1 << 30)), false, false, ""},
+ gtTestCase{int32(-151), false, false, ""},
+ gtTestCase{int32(-150), false, false, ""},
+ gtTestCase{int32(-149), true, false, ""},
+ gtTestCase{int32(0), true, false, ""},
+ gtTestCase{int32(17), true, false, ""},
+
+ gtTestCase{int64(-(1 << 30)), false, false, ""},
+ gtTestCase{int64(-151), false, false, ""},
+ gtTestCase{int64(-150), false, false, ""},
+ gtTestCase{int64(-149), true, false, ""},
+ gtTestCase{int64(0), true, false, ""},
+ gtTestCase{int64(17), true, false, ""},
+
+ // Unsigned integers.
+ gtTestCase{uint((1 << 32) - 151), true, false, ""},
+ gtTestCase{uint(0), true, false, ""},
+ gtTestCase{uint(17), true, false, ""},
+
+ gtTestCase{uint8(0), true, false, ""},
+ gtTestCase{uint8(17), true, false, ""},
+ gtTestCase{uint8(253), true, false, ""},
+
+ gtTestCase{uint16((1 << 16) - 151), true, false, ""},
+ gtTestCase{uint16(0), true, false, ""},
+ gtTestCase{uint16(17), true, false, ""},
+
+ gtTestCase{uint32((1 << 32) - 151), true, false, ""},
+ gtTestCase{uint32(0), true, false, ""},
+ gtTestCase{uint32(17), true, false, ""},
+
+ gtTestCase{uint64((1 << 64) - 151), true, false, ""},
+ gtTestCase{uint64(0), true, false, ""},
+ gtTestCase{uint64(17), true, false, ""},
+
+ // Floating point.
+ gtTestCase{float32(-(1 << 30)), false, false, ""},
+ gtTestCase{float32(-151), false, false, ""},
+ gtTestCase{float32(-150.1), false, false, ""},
+ gtTestCase{float32(-150), false, false, ""},
+ gtTestCase{float32(-149.9), true, false, ""},
+ gtTestCase{float32(0), true, false, ""},
+ gtTestCase{float32(17), true, false, ""},
+ gtTestCase{float32(160), true, false, ""},
+
+ gtTestCase{float64(-(1 << 30)), false, false, ""},
+ gtTestCase{float64(-151), false, false, ""},
+ gtTestCase{float64(-150.1), false, false, ""},
+ gtTestCase{float64(-150), false, false, ""},
+ gtTestCase{float64(-149.9), true, false, ""},
+ gtTestCase{float64(0), true, false, ""},
+ gtTestCase{float64(17), true, false, ""},
+ gtTestCase{float64(160), true, false, ""},
+ }
+
+ t.checkTestCases(matcher, cases)
+}
+
+func (t *GreaterThanTest) ZeroIntegerLiteral() {
+ matcher := GreaterThan(0)
+ desc := matcher.Description()
+ expectedDesc := "greater than 0"
+
+ ExpectThat(desc, Equals(expectedDesc))
+
+ cases := []gtTestCase{
+ // Signed integers.
+ gtTestCase{-(1 << 30), false, false, ""},
+ gtTestCase{-1, false, false, ""},
+ gtTestCase{0, false, false, ""},
+ gtTestCase{1, true, false, ""},
+ gtTestCase{17, true, false, ""},
+ gtTestCase{(1 << 30), true, false, ""},
+
+ gtTestCase{int(-(1 << 30)), false, false, ""},
+ gtTestCase{int(-1), false, false, ""},
+ gtTestCase{int(0), false, false, ""},
+ gtTestCase{int(1), true, false, ""},
+ gtTestCase{int(17), true, false, ""},
+
+ gtTestCase{int8(-1), false, false, ""},
+ gtTestCase{int8(0), false, false, ""},
+ gtTestCase{int8(1), true, false, ""},
+
+ gtTestCase{int16(-(1 << 14)), false, false, ""},
+ gtTestCase{int16(-1), false, false, ""},
+ gtTestCase{int16(0), false, false, ""},
+ gtTestCase{int16(1), true, false, ""},
+ gtTestCase{int16(17), true, false, ""},
+
+ gtTestCase{int32(-(1 << 30)), false, false, ""},
+ gtTestCase{int32(-1), false, false, ""},
+ gtTestCase{int32(0), false, false, ""},
+ gtTestCase{int32(1), true, false, ""},
+ gtTestCase{int32(17), true, false, ""},
+
+ gtTestCase{int64(-(1 << 30)), false, false, ""},
+ gtTestCase{int64(-1), false, false, ""},
+ gtTestCase{int64(0), false, false, ""},
+ gtTestCase{int64(1), true, false, ""},
+ gtTestCase{int64(17), true, false, ""},
+
+ // Unsigned integers.
+ gtTestCase{uint((1 << 32) - 1), true, false, ""},
+ gtTestCase{uint(0), false, false, ""},
+ gtTestCase{uint(1), true, false, ""},
+ gtTestCase{uint(17), true, false, ""},
+
+ gtTestCase{uint8(0), false, false, ""},
+ gtTestCase{uint8(1), true, false, ""},
+ gtTestCase{uint8(17), true, false, ""},
+ gtTestCase{uint8(253), true, false, ""},
+
+ gtTestCase{uint16((1 << 16) - 1), true, false, ""},
+ gtTestCase{uint16(0), false, false, ""},
+ gtTestCase{uint16(1), true, false, ""},
+ gtTestCase{uint16(17), true, false, ""},
+
+ gtTestCase{uint32((1 << 32) - 1), true, false, ""},
+ gtTestCase{uint32(0), false, false, ""},
+ gtTestCase{uint32(1), true, false, ""},
+ gtTestCase{uint32(17), true, false, ""},
+
+ gtTestCase{uint64((1 << 64) - 1), true, false, ""},
+ gtTestCase{uint64(0), false, false, ""},
+ gtTestCase{uint64(1), true, false, ""},
+ gtTestCase{uint64(17), true, false, ""},
+
+ // Floating point.
+ gtTestCase{float32(-(1 << 30)), false, false, ""},
+ gtTestCase{float32(-1), false, false, ""},
+ gtTestCase{float32(-0.1), false, false, ""},
+ gtTestCase{float32(-0.0), false, false, ""},
+ gtTestCase{float32(0), false, false, ""},
+ gtTestCase{float32(0.1), true, false, ""},
+ gtTestCase{float32(17), true, false, ""},
+ gtTestCase{float32(160), true, false, ""},
+
+ gtTestCase{float64(-(1 << 30)), false, false, ""},
+ gtTestCase{float64(-1), false, false, ""},
+ gtTestCase{float64(-0.1), false, false, ""},
+ gtTestCase{float64(-0), false, false, ""},
+ gtTestCase{float64(0), false, false, ""},
+ gtTestCase{float64(0.1), true, false, ""},
+ gtTestCase{float64(17), true, false, ""},
+ gtTestCase{float64(160), true, false, ""},
+ }
+
+ t.checkTestCases(matcher, cases)
+}
+
+func (t *GreaterThanTest) PositiveIntegerLiteral() {
+ matcher := GreaterThan(150)
+ desc := matcher.Description()
+ expectedDesc := "greater than 150"
+
+ ExpectThat(desc, Equals(expectedDesc))
+
+ cases := []gtTestCase{
+ // Signed integers.
+ gtTestCase{-1, false, false, ""},
+ gtTestCase{149, false, false, ""},
+ gtTestCase{150, false, false, ""},
+ gtTestCase{151, true, false, ""},
+
+ gtTestCase{int(-1), false, false, ""},
+ gtTestCase{int(149), false, false, ""},
+ gtTestCase{int(150), false, false, ""},
+ gtTestCase{int(151), true, false, ""},
+
+ gtTestCase{int8(-1), false, false, ""},
+ gtTestCase{int8(0), false, false, ""},
+ gtTestCase{int8(17), false, false, ""},
+ gtTestCase{int8(127), false, false, ""},
+
+ gtTestCase{int16(-1), false, false, ""},
+ gtTestCase{int16(149), false, false, ""},
+ gtTestCase{int16(150), false, false, ""},
+ gtTestCase{int16(151), true, false, ""},
+
+ gtTestCase{int32(-1), false, false, ""},
+ gtTestCase{int32(149), false, false, ""},
+ gtTestCase{int32(150), false, false, ""},
+ gtTestCase{int32(151), true, false, ""},
+
+ gtTestCase{int64(-1), false, false, ""},
+ gtTestCase{int64(149), false, false, ""},
+ gtTestCase{int64(150), false, false, ""},
+ gtTestCase{int64(151), true, false, ""},
+
+ // Unsigned integers.
+ gtTestCase{uint(0), false, false, ""},
+ gtTestCase{uint(149), false, false, ""},
+ gtTestCase{uint(150), false, false, ""},
+ gtTestCase{uint(151), true, false, ""},
+
+ gtTestCase{uint8(0), false, false, ""},
+ gtTestCase{uint8(127), false, false, ""},
+
+ gtTestCase{uint16(0), false, false, ""},
+ gtTestCase{uint16(149), false, false, ""},
+ gtTestCase{uint16(150), false, false, ""},
+ gtTestCase{uint16(151), true, false, ""},
+
+ gtTestCase{uint32(0), false, false, ""},
+ gtTestCase{uint32(149), false, false, ""},
+ gtTestCase{uint32(150), false, false, ""},
+ gtTestCase{uint32(151), true, false, ""},
+
+ gtTestCase{uint64(0), false, false, ""},
+ gtTestCase{uint64(149), false, false, ""},
+ gtTestCase{uint64(150), false, false, ""},
+ gtTestCase{uint64(151), true, false, ""},
+
+ // Floating point.
+ gtTestCase{float32(-1), false, false, ""},
+ gtTestCase{float32(149), false, false, ""},
+ gtTestCase{float32(149.9), false, false, ""},
+ gtTestCase{float32(150), false, false, ""},
+ gtTestCase{float32(150.1), true, false, ""},
+ gtTestCase{float32(151), true, false, ""},
+
+ gtTestCase{float64(-1), false, false, ""},
+ gtTestCase{float64(149), false, false, ""},
+ gtTestCase{float64(149.9), false, false, ""},
+ gtTestCase{float64(150), false, false, ""},
+ gtTestCase{float64(150.1), true, false, ""},
+ gtTestCase{float64(151), true, false, ""},
+ }
+
+ t.checkTestCases(matcher, cases)
+}
+
+////////////////////////////////////////////////////////////////////////
+// Float literals
+////////////////////////////////////////////////////////////////////////
+
+func (t *GreaterThanTest) NegativeFloatLiteral() {
+ matcher := GreaterThan(-150.1)
+ desc := matcher.Description()
+ expectedDesc := "greater than -150.1"
+
+ ExpectThat(desc, Equals(expectedDesc))
+
+ cases := []gtTestCase{
+ // Signed integers.
+ gtTestCase{-(1 << 30), false, false, ""},
+ gtTestCase{-151, false, false, ""},
+ gtTestCase{-150.1, false, false, ""},
+ gtTestCase{-150, true, false, ""},
+ gtTestCase{-149, true, false, ""},
+ gtTestCase{0, true, false, ""},
+ gtTestCase{17, true, false, ""},
+
+ gtTestCase{int(-(1 << 30)), false, false, ""},
+ gtTestCase{int(-151), false, false, ""},
+ gtTestCase{int(-150), true, false, ""},
+ gtTestCase{int(-149), true, false, ""},
+ gtTestCase{int(0), true, false, ""},
+ gtTestCase{int(17), true, false, ""},
+
+ gtTestCase{int8(-127), true, false, ""},
+ gtTestCase{int8(0), true, false, ""},
+ gtTestCase{int8(17), true, false, ""},
+
+ gtTestCase{int16(-(1 << 14)), false, false, ""},
+ gtTestCase{int16(-151), false, false, ""},
+ gtTestCase{int16(-150), true, false, ""},
+ gtTestCase{int16(-149), true, false, ""},
+ gtTestCase{int16(0), true, false, ""},
+ gtTestCase{int16(17), true, false, ""},
+
+ gtTestCase{int32(-(1 << 30)), false, false, ""},
+ gtTestCase{int32(-151), false, false, ""},
+ gtTestCase{int32(-150), true, false, ""},
+ gtTestCase{int32(-149), true, false, ""},
+ gtTestCase{int32(0), true, false, ""},
+ gtTestCase{int32(17), true, false, ""},
+
+ gtTestCase{int64(-(1 << 30)), false, false, ""},
+ gtTestCase{int64(-151), false, false, ""},
+ gtTestCase{int64(-150), true, false, ""},
+ gtTestCase{int64(-149), true, false, ""},
+ gtTestCase{int64(0), true, false, ""},
+ gtTestCase{int64(17), true, false, ""},
+
+ // Unsigned integers.
+ gtTestCase{uint((1 << 32) - 151), true, false, ""},
+ gtTestCase{uint(0), true, false, ""},
+ gtTestCase{uint(17), true, false, ""},
+
+ gtTestCase{uint8(0), true, false, ""},
+ gtTestCase{uint8(17), true, false, ""},
+ gtTestCase{uint8(253), true, false, ""},
+
+ gtTestCase{uint16((1 << 16) - 151), true, false, ""},
+ gtTestCase{uint16(0), true, false, ""},
+ gtTestCase{uint16(17), true, false, ""},
+
+ gtTestCase{uint32((1 << 32) - 151), true, false, ""},
+ gtTestCase{uint32(0), true, false, ""},
+ gtTestCase{uint32(17), true, false, ""},
+
+ gtTestCase{uint64((1 << 64) - 151), true, false, ""},
+ gtTestCase{uint64(0), true, false, ""},
+ gtTestCase{uint64(17), true, false, ""},
+
+ // Floating point.
+ gtTestCase{float32(-(1 << 30)), false, false, ""},
+ gtTestCase{float32(-151), false, false, ""},
+ gtTestCase{float32(-150.2), false, false, ""},
+ gtTestCase{float32(-150.1), false, false, ""},
+ gtTestCase{float32(-150), true, false, ""},
+ gtTestCase{float32(0), true, false, ""},
+ gtTestCase{float32(17), true, false, ""},
+ gtTestCase{float32(160), true, false, ""},
+
+ gtTestCase{float64(-(1 << 30)), false, false, ""},
+ gtTestCase{float64(-151), false, false, ""},
+ gtTestCase{float64(-150.2), false, false, ""},
+ gtTestCase{float64(-150.1), false, false, ""},
+ gtTestCase{float64(-150), true, false, ""},
+ gtTestCase{float64(0), true, false, ""},
+ gtTestCase{float64(17), true, false, ""},
+ gtTestCase{float64(160), true, false, ""},
+ }
+
+ t.checkTestCases(matcher, cases)
+}
+
+func (t *GreaterThanTest) PositiveFloatLiteral() {
+ matcher := GreaterThan(149.9)
+ desc := matcher.Description()
+ expectedDesc := "greater than 149.9"
+
+ ExpectThat(desc, Equals(expectedDesc))
+
+ cases := []gtTestCase{
+ // Signed integers.
+ gtTestCase{-1, false, false, ""},
+ gtTestCase{149, false, false, ""},
+ gtTestCase{149.9, false, false, ""},
+ gtTestCase{150, true, false, ""},
+ gtTestCase{151, true, false, ""},
+
+ gtTestCase{int(-1), false, false, ""},
+ gtTestCase{int(149), false, false, ""},
+ gtTestCase{int(150), true, false, ""},
+ gtTestCase{int(151), true, false, ""},
+
+ gtTestCase{int8(-1), false, false, ""},
+ gtTestCase{int8(0), false, false, ""},
+ gtTestCase{int8(17), false, false, ""},
+ gtTestCase{int8(127), false, false, ""},
+
+ gtTestCase{int16(-1), false, false, ""},
+ gtTestCase{int16(149), false, false, ""},
+ gtTestCase{int16(150), true, false, ""},
+ gtTestCase{int16(151), true, false, ""},
+
+ gtTestCase{int32(-1), false, false, ""},
+ gtTestCase{int32(149), false, false, ""},
+ gtTestCase{int32(150), true, false, ""},
+ gtTestCase{int32(151), true, false, ""},
+
+ gtTestCase{int64(-1), false, false, ""},
+ gtTestCase{int64(149), false, false, ""},
+ gtTestCase{int64(150), true, false, ""},
+ gtTestCase{int64(151), true, false, ""},
+
+ // Unsigned integers.
+ gtTestCase{uint(0), false, false, ""},
+ gtTestCase{uint(149), false, false, ""},
+ gtTestCase{uint(150), true, false, ""},
+ gtTestCase{uint(151), true, false, ""},
+
+ gtTestCase{uint8(0), false, false, ""},
+ gtTestCase{uint8(127), false, false, ""},
+
+ gtTestCase{uint16(0), false, false, ""},
+ gtTestCase{uint16(149), false, false, ""},
+ gtTestCase{uint16(150), true, false, ""},
+ gtTestCase{uint16(151), true, false, ""},
+
+ gtTestCase{uint32(0), false, false, ""},
+ gtTestCase{uint32(149), false, false, ""},
+ gtTestCase{uint32(150), true, false, ""},
+ gtTestCase{uint32(151), true, false, ""},
+
+ gtTestCase{uint64(0), false, false, ""},
+ gtTestCase{uint64(149), false, false, ""},
+ gtTestCase{uint64(150), true, false, ""},
+ gtTestCase{uint64(151), true, false, ""},
+
+ // Floating point.
+ gtTestCase{float32(-1), false, false, ""},
+ gtTestCase{float32(149), false, false, ""},
+ gtTestCase{float32(149.8), false, false, ""},
+ gtTestCase{float32(149.9), false, false, ""},
+ gtTestCase{float32(150), true, false, ""},
+ gtTestCase{float32(151), true, false, ""},
+
+ gtTestCase{float64(-1), false, false, ""},
+ gtTestCase{float64(149), false, false, ""},
+ gtTestCase{float64(149.8), false, false, ""},
+ gtTestCase{float64(149.9), false, false, ""},
+ gtTestCase{float64(150), true, false, ""},
+ gtTestCase{float64(151), true, false, ""},
+ }
+
+ t.checkTestCases(matcher, cases)
+}
+
+////////////////////////////////////////////////////////////////////////
+// Subtle cases
+////////////////////////////////////////////////////////////////////////
+
+func (t *GreaterThanTest) Int64NotExactlyRepresentableBySinglePrecision() {
+ // Single-precision floats don't have enough bits to represent the integers
+ // near this one distinctly, so [2^25-1, 2^25+2] all receive the same value
+ // and should be treated as equivalent when floats are in the mix.
+ const kTwoTo25 = 1 << 25
+ matcher := GreaterThan(int64(kTwoTo25 + 1))
+
+ desc := matcher.Description()
+ expectedDesc := "greater than 33554433"
+
+ ExpectThat(desc, Equals(expectedDesc))
+
+ cases := []gtTestCase{
+ // Signed integers.
+ gtTestCase{-1, false, false, ""},
+ gtTestCase{kTwoTo25 + 0, false, false, ""},
+ gtTestCase{kTwoTo25 + 1, false, false, ""},
+ gtTestCase{kTwoTo25 + 2, true, false, ""},
+
+ gtTestCase{int(-1), false, false, ""},
+ gtTestCase{int(kTwoTo25 + 0), false, false, ""},
+ gtTestCase{int(kTwoTo25 + 1), false, false, ""},
+ gtTestCase{int(kTwoTo25 + 2), true, false, ""},
+
+ gtTestCase{int8(-1), false, false, ""},
+ gtTestCase{int8(127), false, false, ""},
+
+ gtTestCase{int16(-1), false, false, ""},
+ gtTestCase{int16(0), false, false, ""},
+ gtTestCase{int16(32767), false, false, ""},
+
+ gtTestCase{int32(-1), false, false, ""},
+ gtTestCase{int32(kTwoTo25 + 0), false, false, ""},
+ gtTestCase{int32(kTwoTo25 + 1), false, false, ""},
+ gtTestCase{int32(kTwoTo25 + 2), true, false, ""},
+
+ gtTestCase{int64(-1), false, false, ""},
+ gtTestCase{int64(kTwoTo25 + 0), false, false, ""},
+ gtTestCase{int64(kTwoTo25 + 1), false, false, ""},
+ gtTestCase{int64(kTwoTo25 + 2), true, false, ""},
+
+ // Unsigned integers.
+ gtTestCase{uint(0), false, false, ""},
+ gtTestCase{uint(kTwoTo25 + 0), false, false, ""},
+ gtTestCase{uint(kTwoTo25 + 1), false, false, ""},
+ gtTestCase{uint(kTwoTo25 + 2), true, false, ""},
+
+ gtTestCase{uint8(0), false, false, ""},
+ gtTestCase{uint8(255), false, false, ""},
+
+ gtTestCase{uint16(0), false, false, ""},
+ gtTestCase{uint16(65535), false, false, ""},
+
+ gtTestCase{uint32(0), false, false, ""},
+ gtTestCase{uint32(kTwoTo25 + 0), false, false, ""},
+ gtTestCase{uint32(kTwoTo25 + 1), false, false, ""},
+ gtTestCase{uint32(kTwoTo25 + 2), true, false, ""},
+
+ gtTestCase{uint64(0), false, false, ""},
+ gtTestCase{uint64(kTwoTo25 + 0), false, false, ""},
+ gtTestCase{uint64(kTwoTo25 + 1), false, false, ""},
+ gtTestCase{uint64(kTwoTo25 + 2), true, false, ""},
+
+ // Floating point.
+ gtTestCase{float32(-1), false, false, ""},
+ gtTestCase{float32(kTwoTo25 - 2), false, false, ""},
+ gtTestCase{float32(kTwoTo25 - 1), false, false, ""},
+ gtTestCase{float32(kTwoTo25 + 0), false, false, ""},
+ gtTestCase{float32(kTwoTo25 + 1), false, false, ""},
+ gtTestCase{float32(kTwoTo25 + 2), false, false, ""},
+ gtTestCase{float32(kTwoTo25 + 3), true, false, ""},
+
+ gtTestCase{float64(-1), false, false, ""},
+ gtTestCase{float64(kTwoTo25 - 2), false, false, ""},
+ gtTestCase{float64(kTwoTo25 - 1), false, false, ""},
+ gtTestCase{float64(kTwoTo25 + 0), false, false, ""},
+ gtTestCase{float64(kTwoTo25 + 1), false, false, ""},
+ gtTestCase{float64(kTwoTo25 + 2), true, false, ""},
+ gtTestCase{float64(kTwoTo25 + 3), true, false, ""},
+ }
+
+ t.checkTestCases(matcher, cases)
+}
+
+func (t *GreaterThanTest) Int64NotExactlyRepresentableByDoublePrecision() {
+ // Double-precision floats don't have enough bits to represent the integers
+ // near this one distinctly, so [2^54-1, 2^54+2] all receive the same value
+ // and should be treated as equivalent when floats are in the mix.
+ const kTwoTo54 = 1 << 54
+ matcher := GreaterThan(int64(kTwoTo54 + 1))
+
+ desc := matcher.Description()
+ expectedDesc := "greater than 18014398509481985"
+
+ ExpectThat(desc, Equals(expectedDesc))
+
+ cases := []gtTestCase{
+ // Signed integers.
+ gtTestCase{-1, false, false, ""},
+ gtTestCase{1 << 30, false, false, ""},
+
+ gtTestCase{int(-1), false, false, ""},
+ gtTestCase{int(math.MaxInt32), false, false, ""},
+
+ gtTestCase{int8(-1), false, false, ""},
+ gtTestCase{int8(127), false, false, ""},
+
+ gtTestCase{int16(-1), false, false, ""},
+ gtTestCase{int16(0), false, false, ""},
+ gtTestCase{int16(32767), false, false, ""},
+
+ gtTestCase{int32(-1), false, false, ""},
+ gtTestCase{int32(math.MaxInt32), false, false, ""},
+
+ gtTestCase{int64(-1), false, false, ""},
+ gtTestCase{int64(kTwoTo54 - 1), false, false, ""},
+ gtTestCase{int64(kTwoTo54 + 0), false, false, ""},
+ gtTestCase{int64(kTwoTo54 + 1), false, false, ""},
+ gtTestCase{int64(kTwoTo54 + 2), true, false, ""},
+
+ // Unsigned integers.
+ gtTestCase{uint(0), false, false, ""},
+ gtTestCase{uint(math.MaxUint32), false, false, ""},
+
+ gtTestCase{uint8(0), false, false, ""},
+ gtTestCase{uint8(255), false, false, ""},
+
+ gtTestCase{uint16(0), false, false, ""},
+ gtTestCase{uint16(65535), false, false, ""},
+
+ gtTestCase{uint32(0), false, false, ""},
+ gtTestCase{uint32(math.MaxUint32), false, false, ""},
+
+ gtTestCase{uint64(0), false, false, ""},
+ gtTestCase{uint64(kTwoTo54 - 1), false, false, ""},
+ gtTestCase{uint64(kTwoTo54 + 0), false, false, ""},
+ gtTestCase{uint64(kTwoTo54 + 1), false, false, ""},
+ gtTestCase{uint64(kTwoTo54 + 2), true, false, ""},
+
+ // Floating point.
+ gtTestCase{float64(-1), false, false, ""},
+ gtTestCase{float64(kTwoTo54 - 2), false, false, ""},
+ gtTestCase{float64(kTwoTo54 - 1), false, false, ""},
+ gtTestCase{float64(kTwoTo54 + 0), false, false, ""},
+ gtTestCase{float64(kTwoTo54 + 1), false, false, ""},
+ gtTestCase{float64(kTwoTo54 + 2), false, false, ""},
+ gtTestCase{float64(kTwoTo54 + 3), true, false, ""},
+ }
+
+ t.checkTestCases(matcher, cases)
+}
+
+func (t *GreaterThanTest) Uint64NotExactlyRepresentableBySinglePrecision() {
+ // Single-precision floats don't have enough bits to represent the integers
+ // near this one distinctly, so [2^25-1, 2^25+2] all receive the same value
+ // and should be treated as equivalent when floats are in the mix.
+ const kTwoTo25 = 1 << 25
+ matcher := GreaterThan(uint64(kTwoTo25 + 1))
+
+ desc := matcher.Description()
+ expectedDesc := "greater than 33554433"
+
+ ExpectThat(desc, Equals(expectedDesc))
+
+ cases := []gtTestCase{
+ // Signed integers.
+ gtTestCase{-1, false, false, ""},
+ gtTestCase{kTwoTo25 + 0, false, false, ""},
+ gtTestCase{kTwoTo25 + 1, false, false, ""},
+ gtTestCase{kTwoTo25 + 2, true, false, ""},
+
+ gtTestCase{int(-1), false, false, ""},
+ gtTestCase{int(kTwoTo25 + 0), false, false, ""},
+ gtTestCase{int(kTwoTo25 + 1), false, false, ""},
+ gtTestCase{int(kTwoTo25 + 2), true, false, ""},
+
+ gtTestCase{int8(-1), false, false, ""},
+ gtTestCase{int8(127), false, false, ""},
+
+ gtTestCase{int16(-1), false, false, ""},
+ gtTestCase{int16(0), false, false, ""},
+ gtTestCase{int16(32767), false, false, ""},
+
+ gtTestCase{int32(-1), false, false, ""},
+ gtTestCase{int32(kTwoTo25 + 0), false, false, ""},
+ gtTestCase{int32(kTwoTo25 + 1), false, false, ""},
+ gtTestCase{int32(kTwoTo25 + 2), true, false, ""},
+
+ gtTestCase{int64(-1), false, false, ""},
+ gtTestCase{int64(kTwoTo25 + 0), false, false, ""},
+ gtTestCase{int64(kTwoTo25 + 1), false, false, ""},
+ gtTestCase{int64(kTwoTo25 + 2), true, false, ""},
+
+ // Unsigned integers.
+ gtTestCase{uint(0), false, false, ""},
+ gtTestCase{uint(kTwoTo25 + 0), false, false, ""},
+ gtTestCase{uint(kTwoTo25 + 1), false, false, ""},
+ gtTestCase{uint(kTwoTo25 + 2), true, false, ""},
+
+ gtTestCase{uint8(0), false, false, ""},
+ gtTestCase{uint8(255), false, false, ""},
+
+ gtTestCase{uint16(0), false, false, ""},
+ gtTestCase{uint16(65535), false, false, ""},
+
+ gtTestCase{uint32(0), false, false, ""},
+ gtTestCase{uint32(kTwoTo25 + 0), false, false, ""},
+ gtTestCase{uint32(kTwoTo25 + 1), false, false, ""},
+ gtTestCase{uint32(kTwoTo25 + 2), true, false, ""},
+
+ gtTestCase{uint64(0), false, false, ""},
+ gtTestCase{uint64(kTwoTo25 + 0), false, false, ""},
+ gtTestCase{uint64(kTwoTo25 + 1), false, false, ""},
+ gtTestCase{uint64(kTwoTo25 + 2), true, false, ""},
+
+ // Floating point.
+ gtTestCase{float32(-1), false, false, ""},
+ gtTestCase{float32(kTwoTo25 - 2), false, false, ""},
+ gtTestCase{float32(kTwoTo25 - 1), false, false, ""},
+ gtTestCase{float32(kTwoTo25 + 0), false, false, ""},
+ gtTestCase{float32(kTwoTo25 + 1), false, false, ""},
+ gtTestCase{float32(kTwoTo25 + 2), false, false, ""},
+ gtTestCase{float32(kTwoTo25 + 3), true, false, ""},
+
+ gtTestCase{float64(-1), false, false, ""},
+ gtTestCase{float64(kTwoTo25 - 2), false, false, ""},
+ gtTestCase{float64(kTwoTo25 - 1), false, false, ""},
+ gtTestCase{float64(kTwoTo25 + 0), false, false, ""},
+ gtTestCase{float64(kTwoTo25 + 1), false, false, ""},
+ gtTestCase{float64(kTwoTo25 + 2), true, false, ""},
+ gtTestCase{float64(kTwoTo25 + 3), true, false, ""},
+ }
+
+ t.checkTestCases(matcher, cases)
+}
+
+func (t *GreaterThanTest) Uint64NotExactlyRepresentableByDoublePrecision() {
+ // Double-precision floats don't have enough bits to represent the integers
+ // near this one distinctly, so [2^54-1, 2^54+2] all receive the same value
+ // and should be treated as equivalent when floats are in the mix.
+ const kTwoTo54 = 1 << 54
+ matcher := GreaterThan(uint64(kTwoTo54 + 1))
+
+ desc := matcher.Description()
+ expectedDesc := "greater than 18014398509481985"
+
+ ExpectThat(desc, Equals(expectedDesc))
+
+ cases := []gtTestCase{
+ // Signed integers.
+ gtTestCase{-1, false, false, ""},
+ gtTestCase{1 << 30, false, false, ""},
+
+ gtTestCase{int(-1), false, false, ""},
+ gtTestCase{int(math.MaxInt32), false, false, ""},
+
+ gtTestCase{int8(-1), false, false, ""},
+ gtTestCase{int8(127), false, false, ""},
+
+ gtTestCase{int16(-1), false, false, ""},
+ gtTestCase{int16(0), false, false, ""},
+ gtTestCase{int16(32767), false, false, ""},
+
+ gtTestCase{int32(-1), false, false, ""},
+ gtTestCase{int32(math.MaxInt32), false, false, ""},
+
+ gtTestCase{int64(-1), false, false, ""},
+ gtTestCase{int64(kTwoTo54 - 1), false, false, ""},
+ gtTestCase{int64(kTwoTo54 + 0), false, false, ""},
+ gtTestCase{int64(kTwoTo54 + 1), false, false, ""},
+ gtTestCase{int64(kTwoTo54 + 2), true, false, ""},
+
+ // Unsigned integers.
+ gtTestCase{uint(0), false, false, ""},
+ gtTestCase{uint(math.MaxUint32), false, false, ""},
+
+ gtTestCase{uint8(0), false, false, ""},
+ gtTestCase{uint8(255), false, false, ""},
+
+ gtTestCase{uint16(0), false, false, ""},
+ gtTestCase{uint16(65535), false, false, ""},
+
+ gtTestCase{uint32(0), false, false, ""},
+ gtTestCase{uint32(math.MaxUint32), false, false, ""},
+
+ gtTestCase{uint64(0), false, false, ""},
+ gtTestCase{uint64(kTwoTo54 - 1), false, false, ""},
+ gtTestCase{uint64(kTwoTo54 + 0), false, false, ""},
+ gtTestCase{uint64(kTwoTo54 + 1), false, false, ""},
+ gtTestCase{uint64(kTwoTo54 + 2), true, false, ""},
+
+ // Floating point.
+ gtTestCase{float64(-1), false, false, ""},
+ gtTestCase{float64(kTwoTo54 - 2), false, false, ""},
+ gtTestCase{float64(kTwoTo54 - 1), false, false, ""},
+ gtTestCase{float64(kTwoTo54 + 0), false, false, ""},
+ gtTestCase{float64(kTwoTo54 + 1), false, false, ""},
+ gtTestCase{float64(kTwoTo54 + 2), false, false, ""},
+ gtTestCase{float64(kTwoTo54 + 3), true, false, ""},
+ }
+
+ t.checkTestCases(matcher, cases)
+}
+
+func (t *GreaterThanTest) Float32AboveExactIntegerRange() {
+ // Single-precision floats don't have enough bits to represent the integers
+ // near this one distinctly, so [2^25-1, 2^25+2] all receive the same value
+ // and should be treated as equivalent when floats are in the mix.
+ const kTwoTo25 = 1 << 25
+ matcher := GreaterThan(float32(kTwoTo25 + 1))
+
+ desc := matcher.Description()
+ expectedDesc := "greater than 3.3554432e+07"
+
+ ExpectThat(desc, Equals(expectedDesc))
+
+ cases := []gtTestCase{
+ // Signed integers.
+ gtTestCase{int64(-1), false, false, ""},
+ gtTestCase{int64(kTwoTo25 - 2), false, false, ""},
+ gtTestCase{int64(kTwoTo25 - 1), false, false, ""},
+ gtTestCase{int64(kTwoTo25 + 0), false, false, ""},
+ gtTestCase{int64(kTwoTo25 + 1), false, false, ""},
+ gtTestCase{int64(kTwoTo25 + 2), false, false, ""},
+ gtTestCase{int64(kTwoTo25 + 3), true, false, ""},
+
+ // Unsigned integers.
+ gtTestCase{uint64(0), false, false, ""},
+ gtTestCase{uint64(kTwoTo25 - 2), false, false, ""},
+ gtTestCase{uint64(kTwoTo25 - 1), false, false, ""},
+ gtTestCase{uint64(kTwoTo25 + 0), false, false, ""},
+ gtTestCase{uint64(kTwoTo25 + 1), false, false, ""},
+ gtTestCase{uint64(kTwoTo25 + 2), false, false, ""},
+ gtTestCase{uint64(kTwoTo25 + 3), true, false, ""},
+
+ // Floating point.
+ gtTestCase{float32(-1), false, false, ""},
+ gtTestCase{float32(kTwoTo25 - 2), false, false, ""},
+ gtTestCase{float32(kTwoTo25 - 1), false, false, ""},
+ gtTestCase{float32(kTwoTo25 + 0), false, false, ""},
+ gtTestCase{float32(kTwoTo25 + 1), false, false, ""},
+ gtTestCase{float32(kTwoTo25 + 2), false, false, ""},
+ gtTestCase{float32(kTwoTo25 + 3), true, false, ""},
+
+ gtTestCase{float64(-1), false, false, ""},
+ gtTestCase{float64(kTwoTo25 - 2), false, false, ""},
+ gtTestCase{float64(kTwoTo25 - 1), false, false, ""},
+ gtTestCase{float64(kTwoTo25 + 0), false, false, ""},
+ gtTestCase{float64(kTwoTo25 + 1), false, false, ""},
+ gtTestCase{float64(kTwoTo25 + 2), false, false, ""},
+ gtTestCase{float64(kTwoTo25 + 3), true, false, ""},
+ }
+
+ t.checkTestCases(matcher, cases)
+}
+
+func (t *GreaterThanTest) Float64AboveExactIntegerRange() {
+ // Double-precision floats don't have enough bits to represent the integers
+ // near this one distinctly, so [2^54-1, 2^54+2] all receive the same value
+ // and should be treated as equivalent when floats are in the mix.
+ const kTwoTo54 = 1 << 54
+ matcher := GreaterThan(float64(kTwoTo54 + 1))
+
+ desc := matcher.Description()
+ expectedDesc := "greater than 1.8014398509481984e+16"
+
+ ExpectThat(desc, Equals(expectedDesc))
+
+ cases := []gtTestCase{
+ // Signed integers.
+ gtTestCase{int64(-1), false, false, ""},
+ gtTestCase{int64(kTwoTo54 - 2), false, false, ""},
+ gtTestCase{int64(kTwoTo54 - 1), false, false, ""},
+ gtTestCase{int64(kTwoTo54 + 0), false, false, ""},
+ gtTestCase{int64(kTwoTo54 + 1), false, false, ""},
+ gtTestCase{int64(kTwoTo54 + 2), false, false, ""},
+ gtTestCase{int64(kTwoTo54 + 3), true, false, ""},
+
+ // Unsigned integers.
+ gtTestCase{uint64(0), false, false, ""},
+ gtTestCase{uint64(kTwoTo54 - 2), false, false, ""},
+ gtTestCase{uint64(kTwoTo54 - 1), false, false, ""},
+ gtTestCase{uint64(kTwoTo54 + 0), false, false, ""},
+ gtTestCase{uint64(kTwoTo54 + 1), false, false, ""},
+ gtTestCase{uint64(kTwoTo54 + 2), false, false, ""},
+ gtTestCase{uint64(kTwoTo54 + 3), true, false, ""},
+
+ // Floating point.
+ gtTestCase{float64(-1), false, false, ""},
+ gtTestCase{float64(kTwoTo54 - 2), false, false, ""},
+ gtTestCase{float64(kTwoTo54 - 1), false, false, ""},
+ gtTestCase{float64(kTwoTo54 + 0), false, false, ""},
+ gtTestCase{float64(kTwoTo54 + 1), false, false, ""},
+ gtTestCase{float64(kTwoTo54 + 2), false, false, ""},
+ gtTestCase{float64(kTwoTo54 + 3), true, false, ""},
+ }
+
+ t.checkTestCases(matcher, cases)
+}
+
+////////////////////////////////////////////////////////////////////////
+// String literals
+////////////////////////////////////////////////////////////////////////
+
+func (t *GreaterThanTest) EmptyString() {
+ matcher := GreaterThan("")
+ desc := matcher.Description()
+ expectedDesc := "greater than \"\""
+
+ ExpectThat(desc, Equals(expectedDesc))
+
+ cases := []gtTestCase{
+ gtTestCase{"", false, false, ""},
+ gtTestCase{"\x00", true, false, ""},
+ gtTestCase{"a", true, false, ""},
+ gtTestCase{"foo", true, false, ""},
+ }
+
+ t.checkTestCases(matcher, cases)
+}
+
+func (t *GreaterThanTest) SingleNullByte() {
+ matcher := GreaterThan("\x00")
+ desc := matcher.Description()
+ expectedDesc := "greater than \"\x00\""
+
+ ExpectThat(desc, Equals(expectedDesc))
+
+ cases := []gtTestCase{
+ gtTestCase{"", false, false, ""},
+ gtTestCase{"\x00", false, false, ""},
+ gtTestCase{"\x00\x00", true, false, ""},
+ gtTestCase{"a", true, false, ""},
+ gtTestCase{"foo", true, false, ""},
+ }
+
+ t.checkTestCases(matcher, cases)
+}
+
+func (t *GreaterThanTest) LongerString() {
+ matcher := GreaterThan("foo\x00")
+ desc := matcher.Description()
+ expectedDesc := "greater than \"foo\x00\""
+
+ ExpectThat(desc, Equals(expectedDesc))
+
+ cases := []gtTestCase{
+ gtTestCase{"", false, false, ""},
+ gtTestCase{"\x00", false, false, ""},
+ gtTestCase{"bar", false, false, ""},
+ gtTestCase{"foo", false, false, ""},
+ gtTestCase{"foo\x00", false, false, ""},
+ gtTestCase{"foo\x00\x00", true, false, ""},
+ gtTestCase{"fooa", true, false, ""},
+ gtTestCase{"qux", true, false, ""},
+ }
+
+ t.checkTestCases(matcher, cases)
+}
diff --git a/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/oglematchers/has_same_type_as.go b/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/oglematchers/has_same_type_as.go
new file mode 100644
index 00000000000..3b286f73218
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/oglematchers/has_same_type_as.go
@@ -0,0 +1,37 @@
+// Copyright 2015 Aaron Jacobs. All Rights Reserved.
+// Author: aaronjjacobs@gmail.com (Aaron Jacobs)
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package oglematchers
+
+import (
+ "fmt"
+ "reflect"
+)
+
+// HasSameTypeAs returns a matcher that matches values with exactly the same
+// type as the supplied prototype.
+func HasSameTypeAs(p interface{}) Matcher {
+ expected := reflect.TypeOf(p)
+ pred := func(c interface{}) error {
+ actual := reflect.TypeOf(c)
+ if actual != expected {
+ return fmt.Errorf("which has type %v", actual)
+ }
+
+ return nil
+ }
+
+ return NewMatcher(pred, fmt.Sprintf("has type %v", expected))
+}
diff --git a/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/oglematchers/has_same_type_as_test.go b/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/oglematchers/has_same_type_as_test.go
new file mode 100644
index 00000000000..a4a3e308aa3
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/oglematchers/has_same_type_as_test.go
@@ -0,0 +1,181 @@
+// Copyright 2015 Aaron Jacobs. All Rights Reserved.
+// Author: aaronjjacobs@gmail.com (Aaron Jacobs)
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package oglematchers_test
+
+import (
+ "io"
+ "testing"
+
+ . "github.com/smartystreets/assertions/internal/oglematchers"
+ . "github.com/smartystreets/assertions/internal/ogletest"
+)
+
+func TestHasSameTypeAs(t *testing.T) { RunTests(t) }
+
+////////////////////////////////////////////////////////////////////////
+// Boilerplate
+////////////////////////////////////////////////////////////////////////
+
+type HasSameTypeAsTest struct {
+}
+
+func init() { RegisterTestSuite(&HasSameTypeAsTest{}) }
+
+////////////////////////////////////////////////////////////////////////
+// Tests
+////////////////////////////////////////////////////////////////////////
+
+func (t *HasSameTypeAsTest) CandidateIsLiteralNil() {
+ matcher := HasSameTypeAs(nil)
+ var err error
+
+ // Description
+ ExpectEq("has type <nil>", matcher.Description())
+
+ // Literal nil
+ err = matcher.Matches(nil)
+ ExpectEq(nil, err)
+
+ // nil in interface variable
+ var r io.Reader
+ err = matcher.Matches(r)
+ ExpectEq(nil, err)
+
+ // int
+ err = matcher.Matches(17)
+ AssertNe(nil, err)
+ ExpectThat(err, Error(Equals("which has type int")))
+
+ // string
+ err = matcher.Matches("")
+ AssertNe(nil, err)
+ ExpectThat(err, Error(Equals("which has type string")))
+
+ // nil map
+ var m map[string]string
+ err = matcher.Matches(m)
+
+ AssertNe(nil, err)
+ ExpectThat(err, Error(Equals("which has type map[string]string")))
+
+ // Non-nil map
+ m = make(map[string]string)
+ err = matcher.Matches(m)
+
+ AssertNe(nil, err)
+ ExpectThat(err, Error(Equals("which has type map[string]string")))
+}
+
+func (t *HasSameTypeAsTest) CandidateIsNilMap() {
+ var m map[string]string
+ matcher := HasSameTypeAs(m)
+ var err error
+
+ // Description
+ ExpectEq("has type map[string]string", matcher.Description())
+
+ // nil map
+ m = nil
+ err = matcher.Matches(m)
+ ExpectEq(nil, err)
+
+ // Non-nil map
+ m = make(map[string]string)
+ err = matcher.Matches(m)
+ ExpectEq(nil, err)
+
+ // Literal nil
+ err = matcher.Matches(nil)
+ AssertNe(nil, err)
+ ExpectThat(err, Error(Equals("which has type <nil>")))
+
+ // int
+ err = matcher.Matches(17)
+ AssertNe(nil, err)
+ ExpectThat(err, Error(Equals("which has type int")))
+
+ // string
+ err = matcher.Matches("")
+ AssertNe(nil, err)
+ ExpectThat(err, Error(Equals("which has type string")))
+}
+
+func (t *HasSameTypeAsTest) CandidateIsNilInInterfaceVariable() {
+ var r io.Reader
+ matcher := HasSameTypeAs(r)
+ var err error
+
+ // Description
+ ExpectEq("has type <nil>", matcher.Description())
+
+ // nil in interface variable
+ r = nil
+ err = matcher.Matches(r)
+ ExpectEq(nil, err)
+
+ // Literal nil
+ err = matcher.Matches(nil)
+ ExpectEq(nil, err)
+
+ // int
+ err = matcher.Matches(17)
+ AssertNe(nil, err)
+ ExpectThat(err, Error(Equals("which has type int")))
+}
+
+func (t *HasSameTypeAsTest) CandidateIsString() {
+ matcher := HasSameTypeAs("")
+ var err error
+
+ // Description
+ ExpectEq("has type string", matcher.Description())
+
+ // string
+ err = matcher.Matches("taco")
+ ExpectEq(nil, err)
+
+ // string alias
+ type Foo string
+ err = matcher.Matches(Foo("taco"))
+ ExpectThat(err, Error(MatchesRegexp("which has type .*Foo")))
+
+ // Literal nil
+ err = matcher.Matches(nil)
+ AssertNe(nil, err)
+ ExpectThat(err, Error(Equals("which has type <nil>")))
+
+ // int
+ err = matcher.Matches(17)
+ AssertNe(nil, err)
+ ExpectThat(err, Error(Equals("which has type int")))
+}
+
+func (t *HasSameTypeAsTest) CandidateIsStringAlias() {
+ type Foo string
+ matcher := HasSameTypeAs(Foo(""))
+ var err error
+
+ // Description
+ ExpectThat(matcher.Description(), MatchesRegexp("has type .*Foo"))
+
+ // string alias
+ err = matcher.Matches(Foo("taco"))
+ ExpectEq(nil, err)
+
+ // string
+ err = matcher.Matches("taco")
+ ExpectThat(err, Error(Equals("which has type string")))
+}
diff --git a/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/oglematchers/has_substr.go b/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/oglematchers/has_substr.go
new file mode 100644
index 00000000000..bf5bd6ae6d3
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/oglematchers/has_substr.go
@@ -0,0 +1,46 @@
+// Copyright 2011 Aaron Jacobs. All Rights Reserved.
+// Author: aaronjjacobs@gmail.com (Aaron Jacobs)
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package oglematchers
+
+import (
+ "errors"
+ "fmt"
+ "reflect"
+ "strings"
+)
+
+// HasSubstr returns a matcher that matches strings containing s as a
+// substring.
+func HasSubstr(s string) Matcher {
+ return NewMatcher(
+ func(c interface{}) error { return hasSubstr(s, c) },
+ fmt.Sprintf("has substring \"%s\"", s))
+}
+
+func hasSubstr(needle string, c interface{}) error {
+ v := reflect.ValueOf(c)
+ if v.Kind() != reflect.String {
+ return NewFatalError("which is not a string")
+ }
+
+ // Perform the substring search.
+ haystack := v.String()
+ if strings.Contains(haystack, needle) {
+ return nil
+ }
+
+ return errors.New("")
+}
diff --git a/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/oglematchers/has_substr_test.go b/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/oglematchers/has_substr_test.go
new file mode 100644
index 00000000000..6fc913a2490
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/oglematchers/has_substr_test.go
@@ -0,0 +1,93 @@
+// Copyright 2011 Aaron Jacobs. All Rights Reserved.
+// Author: aaronjjacobs@gmail.com (Aaron Jacobs)
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package oglematchers_test
+
+import (
+ . "github.com/smartystreets/assertions/internal/oglematchers"
+ . "github.com/smartystreets/assertions/internal/ogletest"
+)
+
+////////////////////////////////////////////////////////////////////////
+// Helpers
+////////////////////////////////////////////////////////////////////////
+
+type HasSubstrTest struct {
+
+}
+
+func init() { RegisterTestSuite(&HasSubstrTest{}) }
+
+////////////////////////////////////////////////////////////////////////
+// Tests
+////////////////////////////////////////////////////////////////////////
+
+func (t *HasSubstrTest) Description() {
+ matcher := HasSubstr("taco")
+ ExpectThat(matcher.Description(), Equals("has substring \"taco\""))
+}
+
+func (t *HasSubstrTest) CandidateIsNil() {
+ matcher := HasSubstr("")
+ err := matcher.Matches(nil)
+
+ ExpectThat(err, Error(Equals("which is not a string")))
+ ExpectTrue(isFatal(err))
+}
+
+func (t *HasSubstrTest) CandidateIsInteger() {
+ matcher := HasSubstr("")
+ err := matcher.Matches(17)
+
+ ExpectThat(err, Error(Equals("which is not a string")))
+ ExpectTrue(isFatal(err))
+}
+
+func (t *HasSubstrTest) CandidateIsByteSlice() {
+ matcher := HasSubstr("")
+ err := matcher.Matches([]byte{17})
+
+ ExpectThat(err, Error(Equals("which is not a string")))
+ ExpectTrue(isFatal(err))
+}
+
+func (t *HasSubstrTest) CandidateDoesntHaveSubstring() {
+ matcher := HasSubstr("taco")
+ err := matcher.Matches("tac")
+
+ ExpectThat(err, Error(Equals("")))
+ ExpectFalse(isFatal(err))
+}
+
+func (t *HasSubstrTest) CandidateEqualsArg() {
+ matcher := HasSubstr("taco")
+ err := matcher.Matches("taco")
+
+ ExpectThat(err, Equals(nil))
+}
+
+func (t *HasSubstrTest) CandidateHasProperSubstring() {
+ matcher := HasSubstr("taco")
+ err := matcher.Matches("burritos and tacos")
+
+ ExpectThat(err, Equals(nil))
+}
+
+func (t *HasSubstrTest) EmptyStringIsAlwaysSubString() {
+ matcher := HasSubstr("")
+ err := matcher.Matches("asdf")
+
+ ExpectThat(err, Equals(nil))
+}
diff --git a/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/oglematchers/identical_to.go b/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/oglematchers/identical_to.go
new file mode 100644
index 00000000000..ae6460ed966
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/oglematchers/identical_to.go
@@ -0,0 +1,134 @@
+// Copyright 2012 Aaron Jacobs. All Rights Reserved.
+// Author: aaronjjacobs@gmail.com (Aaron Jacobs)
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package oglematchers
+
+import (
+ "errors"
+ "fmt"
+ "reflect"
+)
+
+// Is the type comparable according to the definition here?
+//
+// http://weekly.golang.org/doc/go_spec.html#Comparison_operators
+//
+func isComparable(t reflect.Type) bool {
+ switch t.Kind() {
+ case reflect.Array:
+ return isComparable(t.Elem())
+
+ case reflect.Struct:
+ for i := 0; i < t.NumField(); i++ {
+ if !isComparable(t.Field(i).Type) {
+ return false
+ }
+ }
+
+ return true
+
+ case reflect.Slice, reflect.Map, reflect.Func:
+ return false
+ }
+
+ return true
+}
+
+// Should the supplied type be allowed as an argument to IdenticalTo?
+func isLegalForIdenticalTo(t reflect.Type) (bool, error) {
+ // Allow the zero type.
+ if t == nil {
+ return true, nil
+ }
+
+ // Reference types are always okay; we compare pointers.
+ switch t.Kind() {
+ case reflect.Slice, reflect.Map, reflect.Func, reflect.Chan:
+ return true, nil
+ }
+
+ // Reject other non-comparable types.
+ if !isComparable(t) {
+ return false, errors.New(fmt.Sprintf("%v is not comparable", t))
+ }
+
+ return true, nil
+}
+
+// IdenticalTo(x) returns a matcher that matches values v with type identical
+// to x such that:
+//
+// 1. If v and x are of a reference type (slice, map, function, channel), then
+// they are either both nil or are references to the same object.
+//
+// 2. Otherwise, if v and x are not of a reference type but have a valid type,
+// then v == x.
+//
+// If v and x are both the invalid type (which results from the predeclared nil
+// value, or from nil interface variables), then the matcher is satisfied.
+//
+// This function will panic if x is of a value type that is not comparable. For
+// example, x cannot be an array of functions.
+func IdenticalTo(x interface{}) Matcher {
+ t := reflect.TypeOf(x)
+
+ // Reject illegal arguments.
+ if ok, err := isLegalForIdenticalTo(t); !ok {
+ panic("IdenticalTo: " + err.Error())
+ }
+
+ return &identicalToMatcher{x}
+}
+
+type identicalToMatcher struct {
+ x interface{}
+}
+
+func (m *identicalToMatcher) Description() string {
+ t := reflect.TypeOf(m.x)
+ return fmt.Sprintf("identical to <%v> %v", t, m.x)
+}
+
+func (m *identicalToMatcher) Matches(c interface{}) error {
+ // Make sure the candidate's type is correct.
+ t := reflect.TypeOf(m.x)
+ if ct := reflect.TypeOf(c); t != ct {
+ return NewFatalError(fmt.Sprintf("which is of type %v", ct))
+ }
+
+ // Special case: two values of the invalid type are always identical.
+ if t == nil {
+ return nil
+ }
+
+ // Handle reference types.
+ switch t.Kind() {
+ case reflect.Slice, reflect.Map, reflect.Func, reflect.Chan:
+ xv := reflect.ValueOf(m.x)
+ cv := reflect.ValueOf(c)
+ if xv.Pointer() == cv.Pointer() {
+ return nil
+ }
+
+ return errors.New("which is not an identical reference")
+ }
+
+ // Are the values equal?
+ if m.x == c {
+ return nil
+ }
+
+ return errors.New("")
+}
diff --git a/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/oglematchers/identical_to_test.go b/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/oglematchers/identical_to_test.go
new file mode 100644
index 00000000000..cc03b214ad6
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/oglematchers/identical_to_test.go
@@ -0,0 +1,849 @@
+// Copyright 2012 Aaron Jacobs. All Rights Reserved.
+// Author: aaronjjacobs@gmail.com (Aaron Jacobs)
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package oglematchers_test
+
+import (
+ . "github.com/smartystreets/assertions/internal/oglematchers"
+ . "github.com/smartystreets/assertions/internal/ogletest"
+ "fmt"
+ "io"
+ "unsafe"
+)
+
+////////////////////////////////////////////////////////////////////////
+// Helpers
+////////////////////////////////////////////////////////////////////////
+
+type IdenticalToTest struct {
+}
+
+func init() { RegisterTestSuite(&IdenticalToTest{}) }
+
+////////////////////////////////////////////////////////////////////////
+// Tests
+////////////////////////////////////////////////////////////////////////
+
+func (t *IdenticalToTest) TypesNotIdentical() {
+ var m Matcher
+ var err error
+
+ type intAlias int
+
+ // Type alias expected value
+ m = IdenticalTo(intAlias(17))
+ err = m.Matches(int(17))
+ ExpectTrue(isFatal(err))
+ ExpectThat(err, Error(Equals("which is of type int")))
+
+ // Type alias candidate
+ m = IdenticalTo(int(17))
+ err = m.Matches(intAlias(17))
+ ExpectTrue(isFatal(err))
+ ExpectThat(err, Error(Equals("which is of type oglematchers_test.intAlias")))
+
+ // int and uint
+ m = IdenticalTo(int(17))
+ err = m.Matches(uint(17))
+ ExpectTrue(isFatal(err))
+ ExpectThat(err, Error(Equals("which is of type uint")))
+}
+
+func (t *IdenticalToTest) PredeclaredNilIdentifier() {
+ var m Matcher
+ var err error
+
+ // Nil literal
+ m = IdenticalTo(nil)
+ err = m.Matches(nil)
+ ExpectEq(nil, err)
+
+ // Zero interface var (which is the same as above since IdenticalTo takes an
+ // interface{} as an arg)
+ var nilReader io.Reader
+ var nilWriter io.Writer
+
+ m = IdenticalTo(nilReader)
+ err = m.Matches(nilWriter)
+ ExpectEq(nil, err)
+
+ // Typed nil value.
+ m = IdenticalTo(nil)
+ err = m.Matches((chan int)(nil))
+ ExpectTrue(isFatal(err))
+ ExpectThat(err, Error(Equals("which is of type chan int")))
+
+ // Non-nil value.
+ m = IdenticalTo(nil)
+ err = m.Matches("taco")
+ ExpectTrue(isFatal(err))
+ ExpectThat(err, Error(Equals("which is of type string")))
+}
+
+func (t *IdenticalToTest) Slices() {
+ var m Matcher
+ var err error
+
+ // Nil expected value
+ m = IdenticalTo(([]int)(nil))
+ ExpectEq("identical to <[]int> []", m.Description())
+
+ err = m.Matches(([]int)(nil))
+ ExpectEq(nil, err)
+
+ err = m.Matches([]int{})
+ ExpectThat(err, Error(Equals("which is not an identical reference")))
+
+ // Non-nil expected value
+ o1 := make([]int, 1)
+ o2 := make([]int, 1)
+ m = IdenticalTo(o1)
+ ExpectEq(fmt.Sprintf("identical to <[]int> %v", o1), m.Description())
+
+ err = m.Matches(o1)
+ ExpectEq(nil, err)
+
+ err = m.Matches(o2)
+ ExpectThat(err, Error(Equals("which is not an identical reference")))
+}
+
+func (t *IdenticalToTest) Maps() {
+ var m Matcher
+ var err error
+
+ // Nil expected value
+ m = IdenticalTo((map[int]int)(nil))
+ ExpectEq("identical to <map[int]int> map[]", m.Description())
+
+ err = m.Matches((map[int]int)(nil))
+ ExpectEq(nil, err)
+
+ err = m.Matches(map[int]int{})
+ ExpectThat(err, Error(Equals("which is not an identical reference")))
+
+ // Non-nil expected value
+ o1 := map[int]int{}
+ o2 := map[int]int{}
+ m = IdenticalTo(o1)
+ ExpectEq(fmt.Sprintf("identical to <map[int]int> %v", o1), m.Description())
+
+ err = m.Matches(o1)
+ ExpectEq(nil, err)
+
+ err = m.Matches(o2)
+ ExpectThat(err, Error(Equals("which is not an identical reference")))
+}
+
+func (t *IdenticalToTest) Functions() {
+ var m Matcher
+ var err error
+
+ // Nil expected value
+ m = IdenticalTo((func())(nil))
+ ExpectEq("identical to <func()> <nil>", m.Description())
+
+ err = m.Matches((func())(nil))
+ ExpectEq(nil, err)
+
+ err = m.Matches(func(){})
+ ExpectThat(err, Error(Equals("which is not an identical reference")))
+
+ // Non-nil expected value
+ o1 := func() {}
+ o2 := func() {}
+ m = IdenticalTo(o1)
+ ExpectEq(fmt.Sprintf("identical to <func()> %v", o1), m.Description())
+
+ err = m.Matches(o1)
+ ExpectEq(nil, err)
+
+ err = m.Matches(o2)
+ ExpectThat(err, Error(Equals("which is not an identical reference")))
+}
+
+func (t *IdenticalToTest) Channels() {
+ var m Matcher
+ var err error
+
+ // Nil expected value
+ m = IdenticalTo((chan int)(nil))
+ ExpectEq("identical to <chan int> <nil>", m.Description())
+
+ err = m.Matches((chan int)(nil))
+ ExpectEq(nil, err)
+
+ err = m.Matches(make(chan int))
+ ExpectThat(err, Error(Equals("which is not an identical reference")))
+
+ // Non-nil expected value
+ o1 := make(chan int)
+ o2 := make(chan int)
+ m = IdenticalTo(o1)
+ ExpectEq(fmt.Sprintf("identical to <chan int> %v", o1), m.Description())
+
+ err = m.Matches(o1)
+ ExpectEq(nil, err)
+
+ err = m.Matches(o2)
+ ExpectThat(err, Error(Equals("which is not an identical reference")))
+}
+
+func (t *IdenticalToTest) Bools() {
+ var m Matcher
+ var err error
+
+ // false
+ m = IdenticalTo(false)
+ ExpectEq("identical to <bool> false", m.Description())
+
+ err = m.Matches(false)
+ ExpectEq(nil, err)
+
+ err = m.Matches(true)
+ ExpectThat(err, Error(Equals("")))
+
+ // true
+ m = IdenticalTo(true)
+ ExpectEq("identical to <bool> true", m.Description())
+
+ err = m.Matches(false)
+ ExpectThat(err, Error(Equals("")))
+
+ err = m.Matches(true)
+ ExpectEq(nil, err)
+}
+
+func (t *IdenticalToTest) Ints() {
+ var m Matcher
+ var err error
+
+ m = IdenticalTo(int(17))
+ ExpectEq("identical to <int> 17", m.Description())
+
+ // Identical value
+ err = m.Matches(int(17))
+ ExpectEq(nil, err)
+
+ // Type alias
+ type myType int
+ err = m.Matches(myType(17))
+ ExpectTrue(isFatal(err))
+ ExpectThat(err, Error(Equals("which is of type oglematchers_test.myType")))
+
+ // Completely wrong type
+ err = m.Matches(int32(17))
+ ExpectTrue(isFatal(err))
+ ExpectThat(err, Error(Equals("which is of type int32")))
+}
+
+func (t *IdenticalToTest) Int8s() {
+ var m Matcher
+ var err error
+
+ m = IdenticalTo(int8(17))
+ ExpectEq("identical to <int8> 17", m.Description())
+
+ // Identical value
+ err = m.Matches(int8(17))
+ ExpectEq(nil, err)
+
+ // Type alias
+ type myType int8
+ err = m.Matches(myType(17))
+ ExpectTrue(isFatal(err))
+ ExpectThat(err, Error(Equals("which is of type oglematchers_test.myType")))
+
+ // Completely wrong type
+ err = m.Matches(int32(17))
+ ExpectTrue(isFatal(err))
+ ExpectThat(err, Error(Equals("which is of type int32")))
+}
+
+func (t *IdenticalToTest) Int16s() {
+ var m Matcher
+ var err error
+
+ m = IdenticalTo(int16(17))
+ ExpectEq("identical to <int16> 17", m.Description())
+
+ // Identical value
+ err = m.Matches(int16(17))
+ ExpectEq(nil, err)
+
+ // Type alias
+ type myType int16
+ err = m.Matches(myType(17))
+ ExpectTrue(isFatal(err))
+ ExpectThat(err, Error(Equals("which is of type oglematchers_test.myType")))
+
+ // Completely wrong type
+ err = m.Matches(int32(17))
+ ExpectTrue(isFatal(err))
+ ExpectThat(err, Error(Equals("which is of type int32")))
+}
+
+func (t *IdenticalToTest) Int32s() {
+ var m Matcher
+ var err error
+
+ m = IdenticalTo(int32(17))
+ ExpectEq("identical to <int32> 17", m.Description())
+
+ // Identical value
+ err = m.Matches(int32(17))
+ ExpectEq(nil, err)
+
+ // Type alias
+ type myType int32
+ err = m.Matches(myType(17))
+ ExpectTrue(isFatal(err))
+ ExpectThat(err, Error(Equals("which is of type oglematchers_test.myType")))
+
+ // Completely wrong type
+ err = m.Matches(int16(17))
+ ExpectTrue(isFatal(err))
+ ExpectThat(err, Error(Equals("which is of type int16")))
+}
+
+func (t *IdenticalToTest) Int64s() {
+ var m Matcher
+ var err error
+
+ m = IdenticalTo(int64(17))
+ ExpectEq("identical to <int64> 17", m.Description())
+
+ // Identical value
+ err = m.Matches(int64(17))
+ ExpectEq(nil, err)
+
+ // Type alias
+ type myType int64
+ err = m.Matches(myType(17))
+ ExpectTrue(isFatal(err))
+ ExpectThat(err, Error(Equals("which is of type oglematchers_test.myType")))
+
+ // Completely wrong type
+ err = m.Matches(int32(17))
+ ExpectTrue(isFatal(err))
+ ExpectThat(err, Error(Equals("which is of type int32")))
+}
+
+func (t *IdenticalToTest) Uints() {
+ var m Matcher
+ var err error
+
+ m = IdenticalTo(uint(17))
+ ExpectEq("identical to <uint> 17", m.Description())
+
+ // Identical value
+ err = m.Matches(uint(17))
+ ExpectEq(nil, err)
+
+ // Type alias
+ type myType uint
+ err = m.Matches(myType(17))
+ ExpectTrue(isFatal(err))
+ ExpectThat(err, Error(Equals("which is of type oglematchers_test.myType")))
+
+ // Completely wrong type
+ err = m.Matches(int32(17))
+ ExpectTrue(isFatal(err))
+ ExpectThat(err, Error(Equals("which is of type int32")))
+}
+
+func (t *IdenticalToTest) Uint8s() {
+ var m Matcher
+ var err error
+
+ m = IdenticalTo(uint8(17))
+ ExpectEq("identical to <uint8> 17", m.Description())
+
+ // Identical value
+ err = m.Matches(uint8(17))
+ ExpectEq(nil, err)
+
+ // Type alias
+ type myType uint8
+ err = m.Matches(myType(17))
+ ExpectTrue(isFatal(err))
+ ExpectThat(err, Error(Equals("which is of type oglematchers_test.myType")))
+
+ // Completely wrong type
+ err = m.Matches(int32(17))
+ ExpectTrue(isFatal(err))
+ ExpectThat(err, Error(Equals("which is of type int32")))
+}
+
+func (t *IdenticalToTest) Uint16s() {
+ var m Matcher
+ var err error
+
+ m = IdenticalTo(uint16(17))
+ ExpectEq("identical to <uint16> 17", m.Description())
+
+ // Identical value
+ err = m.Matches(uint16(17))
+ ExpectEq(nil, err)
+
+ // Type alias
+ type myType uint16
+ err = m.Matches(myType(17))
+ ExpectTrue(isFatal(err))
+ ExpectThat(err, Error(Equals("which is of type oglematchers_test.myType")))
+
+ // Completely wrong type
+ err = m.Matches(int32(17))
+ ExpectTrue(isFatal(err))
+ ExpectThat(err, Error(Equals("which is of type int32")))
+}
+
+func (t *IdenticalToTest) Uint32s() {
+ var m Matcher
+ var err error
+
+ m = IdenticalTo(uint32(17))
+ ExpectEq("identical to <uint32> 17", m.Description())
+
+ // Identical value
+ err = m.Matches(uint32(17))
+ ExpectEq(nil, err)
+
+ // Type alias
+ type myType uint32
+ err = m.Matches(myType(17))
+ ExpectTrue(isFatal(err))
+ ExpectThat(err, Error(Equals("which is of type oglematchers_test.myType")))
+
+ // Completely wrong type
+ err = m.Matches(int32(17))
+ ExpectTrue(isFatal(err))
+ ExpectThat(err, Error(Equals("which is of type int32")))
+}
+
+func (t *IdenticalToTest) Uint64s() {
+ var m Matcher
+ var err error
+
+ m = IdenticalTo(uint64(17))
+ ExpectEq("identical to <uint64> 17", m.Description())
+
+ // Identical value
+ err = m.Matches(uint64(17))
+ ExpectEq(nil, err)
+
+ // Type alias
+ type myType uint64
+ err = m.Matches(myType(17))
+ ExpectTrue(isFatal(err))
+ ExpectThat(err, Error(Equals("which is of type oglematchers_test.myType")))
+
+ // Completely wrong type
+ err = m.Matches(int32(17))
+ ExpectTrue(isFatal(err))
+ ExpectThat(err, Error(Equals("which is of type int32")))
+}
+
+func (t *IdenticalToTest) Uintptrs() {
+ var m Matcher
+ var err error
+
+ m = IdenticalTo(uintptr(17))
+ ExpectEq("identical to <uintptr> 17", m.Description())
+
+ // Identical value
+ err = m.Matches(uintptr(17))
+ ExpectEq(nil, err)
+
+ // Type alias
+ type myType uintptr
+ err = m.Matches(myType(17))
+ ExpectTrue(isFatal(err))
+ ExpectThat(err, Error(Equals("which is of type oglematchers_test.myType")))
+
+ // Completely wrong type
+ err = m.Matches(int32(17))
+ ExpectTrue(isFatal(err))
+ ExpectThat(err, Error(Equals("which is of type int32")))
+}
+
+func (t *IdenticalToTest) Float32s() {
+ var m Matcher
+ var err error
+
+ m = IdenticalTo(float32(17))
+ ExpectEq("identical to <float32> 17", m.Description())
+
+ // Identical value
+ err = m.Matches(float32(17))
+ ExpectEq(nil, err)
+
+ // Type alias
+ type myType float32
+ err = m.Matches(myType(17))
+ ExpectTrue(isFatal(err))
+ ExpectThat(err, Error(Equals("which is of type oglematchers_test.myType")))
+
+ // Completely wrong type
+ err = m.Matches(int32(17))
+ ExpectTrue(isFatal(err))
+ ExpectThat(err, Error(Equals("which is of type int32")))
+}
+
+func (t *IdenticalToTest) Float64s() {
+ var m Matcher
+ var err error
+
+ m = IdenticalTo(float64(17))
+ ExpectEq("identical to <float64> 17", m.Description())
+
+ // Identical value
+ err = m.Matches(float64(17))
+ ExpectEq(nil, err)
+
+ // Type alias
+ type myType float64
+ err = m.Matches(myType(17))
+ ExpectTrue(isFatal(err))
+ ExpectThat(err, Error(Equals("which is of type oglematchers_test.myType")))
+
+ // Completely wrong type
+ err = m.Matches(int32(17))
+ ExpectTrue(isFatal(err))
+ ExpectThat(err, Error(Equals("which is of type int32")))
+}
+
+func (t *IdenticalToTest) Complex64s() {
+ var m Matcher
+ var err error
+
+ m = IdenticalTo(complex64(17))
+ ExpectEq("identical to <complex64> (17+0i)", m.Description())
+
+ // Identical value
+ err = m.Matches(complex64(17))
+ ExpectEq(nil, err)
+
+ // Type alias
+ type myType complex64
+ err = m.Matches(myType(17))
+ ExpectTrue(isFatal(err))
+ ExpectThat(err, Error(Equals("which is of type oglematchers_test.myType")))
+
+ // Completely wrong type
+ err = m.Matches(int32(17))
+ ExpectTrue(isFatal(err))
+ ExpectThat(err, Error(Equals("which is of type int32")))
+}
+
+func (t *IdenticalToTest) Complex128s() {
+ var m Matcher
+ var err error
+
+ m = IdenticalTo(complex128(17))
+ ExpectEq("identical to <complex128> (17+0i)", m.Description())
+
+ // Identical value
+ err = m.Matches(complex128(17))
+ ExpectEq(nil, err)
+
+ // Type alias
+ type myType complex128
+ err = m.Matches(myType(17))
+ ExpectTrue(isFatal(err))
+ ExpectThat(err, Error(Equals("which is of type oglematchers_test.myType")))
+
+ // Completely wrong type
+ err = m.Matches(int32(17))
+ ExpectTrue(isFatal(err))
+ ExpectThat(err, Error(Equals("which is of type int32")))
+}
+
+func (t *IdenticalToTest) EmptyComparableArrays() {
+ var m Matcher
+ var err error
+
+ m = IdenticalTo([0]int{})
+ ExpectEq("identical to <[0]int> []", m.Description())
+
+ // Identical value
+ err = m.Matches([0]int{})
+ ExpectEq(nil, err)
+
+ // Length too long
+ err = m.Matches([1]int{17})
+ ExpectThat(err, Error(Equals("which is of type [1]int")))
+
+ // Element type alias
+ type myType int
+ err = m.Matches([0]myType{})
+ ExpectTrue(isFatal(err))
+ ExpectThat(err, Error(Equals("which is of type [0]oglematchers_test.myType")))
+
+ // Completely wrong element type
+ err = m.Matches([0]int32{})
+ ExpectTrue(isFatal(err))
+ ExpectThat(err, Error(Equals("which is of type [0]int32")))
+}
+
+func (t *IdenticalToTest) NonEmptyComparableArrays() {
+ var m Matcher
+ var err error
+
+ m = IdenticalTo([2]int{17, 19})
+ ExpectEq("identical to <[2]int> [17 19]", m.Description())
+
+ // Identical value
+ err = m.Matches([2]int{17, 19})
+ ExpectEq(nil, err)
+
+ // Length too short
+ err = m.Matches([1]int{17})
+ ExpectThat(err, Error(Equals("which is of type [1]int")))
+
+ // Length too long
+ err = m.Matches([3]int{17, 19, 23})
+ ExpectThat(err, Error(Equals("which is of type [3]int")))
+
+ // First element different
+ err = m.Matches([2]int{13, 19})
+ ExpectThat(err, Error(Equals("")))
+
+ // Second element different
+ err = m.Matches([2]int{17, 23})
+ ExpectThat(err, Error(Equals("")))
+
+ // Element type alias
+ type myType int
+ err = m.Matches([2]myType{17, 19})
+ ExpectTrue(isFatal(err))
+ ExpectThat(err, Error(Equals("which is of type [2]oglematchers_test.myType")))
+
+ // Completely wrong element type
+ err = m.Matches([2]int32{17, 19})
+ ExpectTrue(isFatal(err))
+ ExpectThat(err, Error(Equals("which is of type [2]int32")))
+}
+
+func (t *IdenticalToTest) NonEmptyArraysOfComparableArrays() {
+ var m Matcher
+ var err error
+
+ x := [2][2]int{
+ [2]int{17, 19},
+ [2]int{23, 29},
+ }
+ m = IdenticalTo(x)
+ ExpectEq("identical to <[2][2]int> [[17 19] [23 29]]", m.Description())
+
+ // Identical value
+ err = m.Matches([2][2]int{[2]int{17, 19}, [2]int{23, 29}})
+ ExpectEq(nil, err)
+
+ // Outer length too short
+ err = m.Matches([1][2]int{[2]int{17, 19}})
+ ExpectThat(err, Error(Equals("which is of type [1][2]int")))
+
+ // Inner length too short
+ err = m.Matches([2][1]int{[1]int{17}, [1]int{23}})
+ ExpectThat(err, Error(Equals("which is of type [2][1]int")))
+
+ // First element different
+ err = m.Matches([2][2]int{[2]int{13, 19}, [2]int{23, 29}})
+ ExpectThat(err, Error(Equals("")))
+
+ // Element type alias
+ type myType int
+ err = m.Matches([2][2]myType{[2]myType{17, 19}, [2]myType{23, 29}})
+ ExpectTrue(isFatal(err))
+ ExpectThat(err, Error(Equals("which is of type [2][2]oglematchers_test.myType")))
+}
+
+func (t *IdenticalToTest) NonComparableArrays() {
+ x := [0]func(){}
+ f := func() { IdenticalTo(x) }
+ ExpectThat(f, Panics(HasSubstr("is not comparable")))
+}
+
+func (t *IdenticalToTest) ArraysOfNonComparableArrays() {
+ x := [0][0]func(){}
+ f := func() { IdenticalTo(x) }
+ ExpectThat(f, Panics(HasSubstr("is not comparable")))
+}
+
+func (t *IdenticalToTest) Strings() {
+ var m Matcher
+ var err error
+
+ m = IdenticalTo("taco")
+ ExpectEq("identical to <string> taco", m.Description())
+
+ // Identical value
+ err = m.Matches("ta" + "co")
+ ExpectEq(nil, err)
+
+ // Type alias
+ type myType string
+ err = m.Matches(myType("taco"))
+ ExpectTrue(isFatal(err))
+ ExpectThat(err, Error(Equals("which is of type oglematchers_test.myType")))
+
+ // Completely wrong type
+ err = m.Matches(int32(17))
+ ExpectTrue(isFatal(err))
+ ExpectThat(err, Error(Equals("which is of type int32")))
+}
+
+func (t *IdenticalToTest) ComparableStructs() {
+ var m Matcher
+ var err error
+
+ type subStruct struct {
+ i int
+ }
+
+ type myStruct struct {
+ u uint
+ s subStruct
+ }
+
+ x := myStruct{17, subStruct{19}}
+ m = IdenticalTo(x)
+ ExpectEq("identical to <oglematchers_test.myStruct> {17 {19}}", m.Description())
+
+ // Identical value
+ err = m.Matches(myStruct{17, subStruct{19}})
+ ExpectEq(nil, err)
+
+ // Wrong outer field
+ err = m.Matches(myStruct{13, subStruct{19}})
+ ExpectThat(err, Error(Equals("")))
+
+ // Wrong inner field
+ err = m.Matches(myStruct{17, subStruct{23}})
+ ExpectThat(err, Error(Equals("")))
+
+ // Type alias
+ type myType myStruct
+ err = m.Matches(myType{17, subStruct{19}})
+ ExpectTrue(isFatal(err))
+ ExpectThat(err, Error(Equals("which is of type oglematchers_test.myType")))
+
+ // Completely wrong type
+ err = m.Matches(int32(17))
+ ExpectTrue(isFatal(err))
+ ExpectThat(err, Error(Equals("which is of type int32")))
+}
+
+func (t *IdenticalToTest) NonComparableStructs() {
+ type subStruct struct {
+ s []int
+ }
+
+ type myStruct struct {
+ u uint
+ s subStruct
+ }
+
+ x := myStruct{17, subStruct{[]int{19}}}
+ f := func() { IdenticalTo(x) }
+ ExpectThat(f, Panics(AllOf(HasSubstr("IdenticalTo"), HasSubstr("comparable"))))
+}
+
+func (t *IdenticalToTest) NilUnsafePointer() {
+ var m Matcher
+ var err error
+
+ x := unsafe.Pointer(nil)
+ m = IdenticalTo(x)
+ ExpectEq(fmt.Sprintf("identical to <unsafe.Pointer> %v", x), m.Description())
+
+ // Identical value
+ err = m.Matches(unsafe.Pointer(nil))
+ ExpectEq(nil, err)
+
+ // Wrong value
+ j := 17
+ err = m.Matches(unsafe.Pointer(&j))
+ ExpectThat(err, Error(Equals("")))
+
+ // Type alias
+ type myType unsafe.Pointer
+ err = m.Matches(myType(unsafe.Pointer(nil)))
+ ExpectTrue(isFatal(err))
+ ExpectThat(err, Error(Equals("which is of type oglematchers_test.myType")))
+
+ // Completely wrong type
+ err = m.Matches(int32(17))
+ ExpectTrue(isFatal(err))
+ ExpectThat(err, Error(Equals("which is of type int32")))
+}
+
+func (t *IdenticalToTest) NonNilUnsafePointer() {
+ var m Matcher
+ var err error
+
+ i := 17
+ x := unsafe.Pointer(&i)
+ m = IdenticalTo(x)
+ ExpectEq(fmt.Sprintf("identical to <unsafe.Pointer> %v", x), m.Description())
+
+ // Identical value
+ err = m.Matches(unsafe.Pointer(&i))
+ ExpectEq(nil, err)
+
+ // Nil value
+ err = m.Matches(unsafe.Pointer(nil))
+ ExpectThat(err, Error(Equals("")))
+
+ // Wrong value
+ j := 17
+ err = m.Matches(unsafe.Pointer(&j))
+ ExpectThat(err, Error(Equals("")))
+
+ // Type alias
+ type myType unsafe.Pointer
+ err = m.Matches(myType(unsafe.Pointer(&i)))
+ ExpectTrue(isFatal(err))
+ ExpectThat(err, Error(Equals("which is of type oglematchers_test.myType")))
+
+ // Completely wrong type
+ err = m.Matches(int32(17))
+ ExpectTrue(isFatal(err))
+ ExpectThat(err, Error(Equals("which is of type int32")))
+}
+
+func (t *IdenticalToTest) IntAlias() {
+ var m Matcher
+ var err error
+
+ type intAlias int
+
+ m = IdenticalTo(intAlias(17))
+ ExpectEq("identical to <oglematchers_test.intAlias> 17", m.Description())
+
+ // Identical value
+ err = m.Matches(intAlias(17))
+ ExpectEq(nil, err)
+
+ // Int
+ err = m.Matches(int(17))
+ ExpectTrue(isFatal(err))
+ ExpectThat(err, Error(Equals("which is of type int")))
+
+ // Completely wrong type
+ err = m.Matches(int32(17))
+ ExpectTrue(isFatal(err))
+ ExpectThat(err, Error(Equals("which is of type int32")))
+}
diff --git a/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/oglematchers/less_or_equal.go b/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/oglematchers/less_or_equal.go
new file mode 100644
index 00000000000..8402cdeaf09
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/oglematchers/less_or_equal.go
@@ -0,0 +1,41 @@
+// Copyright 2011 Aaron Jacobs. All Rights Reserved.
+// Author: aaronjjacobs@gmail.com (Aaron Jacobs)
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package oglematchers
+
+import (
+ "fmt"
+ "reflect"
+)
+
+// LessOrEqual returns a matcher that matches integer, floating point, or
+// strings values v such that v <= x. Comparison is not defined between numeric
+// and string types, but is defined between all integer and floating point
+// types.
+//
+// x must itself be an integer, floating point, or string type; otherwise,
+// LessOrEqual will panic.
+func LessOrEqual(x interface{}) Matcher {
+ desc := fmt.Sprintf("less than or equal to %v", x)
+
+ // Special case: make it clear that strings are strings.
+ if reflect.TypeOf(x).Kind() == reflect.String {
+ desc = fmt.Sprintf("less than or equal to \"%s\"", x)
+ }
+
+ // Put LessThan last so that its error messages will be used in the event of
+ // failure.
+ return transformDescription(AnyOf(Equals(x), LessThan(x)), desc)
+}
diff --git a/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/oglematchers/less_or_equal_test.go b/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/oglematchers/less_or_equal_test.go
new file mode 100644
index 00000000000..a1a2ae7d60e
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/oglematchers/less_or_equal_test.go
@@ -0,0 +1,1077 @@
+// Copyright 2011 Aaron Jacobs. All Rights Reserved.
+// Author: aaronjjacobs@gmail.com (Aaron Jacobs)
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package oglematchers_test
+
+import (
+ "math"
+
+ . "github.com/smartystreets/assertions/internal/oglematchers"
+ . "github.com/smartystreets/assertions/internal/ogletest"
+)
+
+////////////////////////////////////////////////////////////////////////
+// Helpers
+////////////////////////////////////////////////////////////////////////
+
+type LessOrEqualTest struct {
+}
+
+func init() { RegisterTestSuite(&LessOrEqualTest{}) }
+
+type leTestCase struct {
+ candidate interface{}
+ expectedResult bool
+ shouldBeFatal bool
+ expectedError string
+}
+
+func (t *LessOrEqualTest) checkTestCases(matcher Matcher, cases []leTestCase) {
+ for i, c := range cases {
+ err := matcher.Matches(c.candidate)
+
+ ExpectThat(
+ (err == nil),
+ Equals(c.expectedResult),
+ "Case %d (candidate %v)",
+ i,
+ c.candidate)
+
+ if err == nil {
+ continue
+ }
+
+ _, isFatal := err.(*FatalError)
+ ExpectEq(
+ c.shouldBeFatal,
+ isFatal,
+ "Case %d (candidate %v)",
+ i,
+ c.candidate)
+
+ ExpectThat(
+ err,
+ Error(Equals(c.expectedError)),
+ "Case %d (candidate %v)",
+ i,
+ c.candidate)
+ }
+}
+
+////////////////////////////////////////////////////////////////////////
+// Integer literals
+////////////////////////////////////////////////////////////////////////
+
+func (t *LessOrEqualTest) IntegerCandidateBadTypes() {
+ matcher := LessOrEqual(int(-150))
+
+ cases := []leTestCase{
+ leTestCase{true, false, true, "which is not comparable"},
+ leTestCase{complex64(-151), false, true, "which is not comparable"},
+ leTestCase{complex128(-151), false, true, "which is not comparable"},
+ leTestCase{[...]int{-151}, false, true, "which is not comparable"},
+ leTestCase{make(chan int), false, true, "which is not comparable"},
+ leTestCase{func() {}, false, true, "which is not comparable"},
+ leTestCase{map[int]int{}, false, true, "which is not comparable"},
+ leTestCase{&leTestCase{}, false, true, "which is not comparable"},
+ leTestCase{make([]int, 0), false, true, "which is not comparable"},
+ leTestCase{"-151", false, true, "which is not comparable"},
+ leTestCase{leTestCase{}, false, true, "which is not comparable"},
+ }
+
+ t.checkTestCases(matcher, cases)
+}
+
+func (t *LessOrEqualTest) FloatCandidateBadTypes() {
+ matcher := LessOrEqual(float32(-150))
+
+ cases := []leTestCase{
+ leTestCase{true, false, true, "which is not comparable"},
+ leTestCase{complex64(-151), false, true, "which is not comparable"},
+ leTestCase{complex128(-151), false, true, "which is not comparable"},
+ leTestCase{[...]int{-151}, false, true, "which is not comparable"},
+ leTestCase{make(chan int), false, true, "which is not comparable"},
+ leTestCase{func() {}, false, true, "which is not comparable"},
+ leTestCase{map[int]int{}, false, true, "which is not comparable"},
+ leTestCase{&leTestCase{}, false, true, "which is not comparable"},
+ leTestCase{make([]int, 0), false, true, "which is not comparable"},
+ leTestCase{"-151", false, true, "which is not comparable"},
+ leTestCase{leTestCase{}, false, true, "which is not comparable"},
+ }
+
+ t.checkTestCases(matcher, cases)
+}
+
+func (t *LessOrEqualTest) StringCandidateBadTypes() {
+ matcher := LessOrEqual("17")
+
+ cases := []leTestCase{
+ leTestCase{true, false, true, "which is not comparable"},
+ leTestCase{int(0), false, true, "which is not comparable"},
+ leTestCase{int8(0), false, true, "which is not comparable"},
+ leTestCase{int16(0), false, true, "which is not comparable"},
+ leTestCase{int32(0), false, true, "which is not comparable"},
+ leTestCase{int64(0), false, true, "which is not comparable"},
+ leTestCase{uint(0), false, true, "which is not comparable"},
+ leTestCase{uint8(0), false, true, "which is not comparable"},
+ leTestCase{uint16(0), false, true, "which is not comparable"},
+ leTestCase{uint32(0), false, true, "which is not comparable"},
+ leTestCase{uint64(0), false, true, "which is not comparable"},
+ leTestCase{float32(0), false, true, "which is not comparable"},
+ leTestCase{float64(0), false, true, "which is not comparable"},
+ leTestCase{complex64(-151), false, true, "which is not comparable"},
+ leTestCase{complex128(-151), false, true, "which is not comparable"},
+ leTestCase{[...]int{-151}, false, true, "which is not comparable"},
+ leTestCase{make(chan int), false, true, "which is not comparable"},
+ leTestCase{func() {}, false, true, "which is not comparable"},
+ leTestCase{map[int]int{}, false, true, "which is not comparable"},
+ leTestCase{&leTestCase{}, false, true, "which is not comparable"},
+ leTestCase{make([]int, 0), false, true, "which is not comparable"},
+ leTestCase{leTestCase{}, false, true, "which is not comparable"},
+ }
+
+ t.checkTestCases(matcher, cases)
+}
+
+func (t *LessOrEqualTest) BadArgument() {
+ panicked := false
+
+ defer func() {
+ ExpectThat(panicked, Equals(true))
+ }()
+
+ defer func() {
+ if r := recover(); r != nil {
+ panicked = true
+ }
+ }()
+
+ LessOrEqual(complex128(0))
+}
+
+////////////////////////////////////////////////////////////////////////
+// Integer literals
+////////////////////////////////////////////////////////////////////////
+
+func (t *LessOrEqualTest) NegativeIntegerLiteral() {
+ matcher := LessOrEqual(-150)
+ desc := matcher.Description()
+ expectedDesc := "less than or equal to -150"
+
+ ExpectThat(desc, Equals(expectedDesc))
+
+ cases := []leTestCase{
+ // Signed integers.
+ leTestCase{-(1 << 30), true, false, ""},
+ leTestCase{-151, true, false, ""},
+ leTestCase{-150, true, false, ""},
+ leTestCase{-149, false, false, ""},
+ leTestCase{0, false, false, ""},
+ leTestCase{17, false, false, ""},
+
+ leTestCase{int(-(1 << 30)), true, false, ""},
+ leTestCase{int(-151), true, false, ""},
+ leTestCase{int(-150), true, false, ""},
+ leTestCase{int(-149), false, false, ""},
+ leTestCase{int(0), false, false, ""},
+ leTestCase{int(17), false, false, ""},
+
+ leTestCase{int8(-127), false, false, ""},
+ leTestCase{int8(0), false, false, ""},
+ leTestCase{int8(17), false, false, ""},
+
+ leTestCase{int16(-(1 << 14)), true, false, ""},
+ leTestCase{int16(-151), true, false, ""},
+ leTestCase{int16(-150), true, false, ""},
+ leTestCase{int16(-149), false, false, ""},
+ leTestCase{int16(0), false, false, ""},
+ leTestCase{int16(17), false, false, ""},
+
+ leTestCase{int32(-(1 << 30)), true, false, ""},
+ leTestCase{int32(-151), true, false, ""},
+ leTestCase{int32(-150), true, false, ""},
+ leTestCase{int32(-149), false, false, ""},
+ leTestCase{int32(0), false, false, ""},
+ leTestCase{int32(17), false, false, ""},
+
+ leTestCase{int64(-(1 << 30)), true, false, ""},
+ leTestCase{int64(-151), true, false, ""},
+ leTestCase{int64(-150), true, false, ""},
+ leTestCase{int64(-149), false, false, ""},
+ leTestCase{int64(0), false, false, ""},
+ leTestCase{int64(17), false, false, ""},
+
+ // Unsigned integers.
+ leTestCase{uint((1 << 32) - 151), false, false, ""},
+ leTestCase{uint(0), false, false, ""},
+ leTestCase{uint(17), false, false, ""},
+
+ leTestCase{uint8(0), false, false, ""},
+ leTestCase{uint8(17), false, false, ""},
+ leTestCase{uint8(253), false, false, ""},
+
+ leTestCase{uint16((1 << 16) - 151), false, false, ""},
+ leTestCase{uint16(0), false, false, ""},
+ leTestCase{uint16(17), false, false, ""},
+
+ leTestCase{uint32((1 << 32) - 151), false, false, ""},
+ leTestCase{uint32(0), false, false, ""},
+ leTestCase{uint32(17), false, false, ""},
+
+ leTestCase{uint64((1 << 64) - 151), false, false, ""},
+ leTestCase{uint64(0), false, false, ""},
+ leTestCase{uint64(17), false, false, ""},
+
+ // Floating point.
+ leTestCase{float32(-(1 << 30)), true, false, ""},
+ leTestCase{float32(-151), true, false, ""},
+ leTestCase{float32(-150.1), true, false, ""},
+ leTestCase{float32(-150), true, false, ""},
+ leTestCase{float32(-149.9), false, false, ""},
+ leTestCase{float32(0), false, false, ""},
+ leTestCase{float32(17), false, false, ""},
+ leTestCase{float32(160), false, false, ""},
+
+ leTestCase{float64(-(1 << 30)), true, false, ""},
+ leTestCase{float64(-151), true, false, ""},
+ leTestCase{float64(-150.1), true, false, ""},
+ leTestCase{float64(-150), true, false, ""},
+ leTestCase{float64(-149.9), false, false, ""},
+ leTestCase{float64(0), false, false, ""},
+ leTestCase{float64(17), false, false, ""},
+ leTestCase{float64(160), false, false, ""},
+ }
+
+ t.checkTestCases(matcher, cases)
+}
+
+func (t *LessOrEqualTest) ZeroIntegerLiteral() {
+ matcher := LessOrEqual(0)
+ desc := matcher.Description()
+ expectedDesc := "less than or equal to 0"
+
+ ExpectThat(desc, Equals(expectedDesc))
+
+ cases := []leTestCase{
+ // Signed integers.
+ leTestCase{-(1 << 30), true, false, ""},
+ leTestCase{-1, true, false, ""},
+ leTestCase{0, true, false, ""},
+ leTestCase{1, false, false, ""},
+ leTestCase{17, false, false, ""},
+ leTestCase{(1 << 30), false, false, ""},
+
+ leTestCase{int(-(1 << 30)), true, false, ""},
+ leTestCase{int(-1), true, false, ""},
+ leTestCase{int(0), true, false, ""},
+ leTestCase{int(1), false, false, ""},
+ leTestCase{int(17), false, false, ""},
+
+ leTestCase{int8(-1), true, false, ""},
+ leTestCase{int8(0), true, false, ""},
+ leTestCase{int8(1), false, false, ""},
+
+ leTestCase{int16(-(1 << 14)), true, false, ""},
+ leTestCase{int16(-1), true, false, ""},
+ leTestCase{int16(0), true, false, ""},
+ leTestCase{int16(1), false, false, ""},
+ leTestCase{int16(17), false, false, ""},
+
+ leTestCase{int32(-(1 << 30)), true, false, ""},
+ leTestCase{int32(-1), true, false, ""},
+ leTestCase{int32(0), true, false, ""},
+ leTestCase{int32(1), false, false, ""},
+ leTestCase{int32(17), false, false, ""},
+
+ leTestCase{int64(-(1 << 30)), true, false, ""},
+ leTestCase{int64(-1), true, false, ""},
+ leTestCase{int64(0), true, false, ""},
+ leTestCase{int64(1), false, false, ""},
+ leTestCase{int64(17), false, false, ""},
+
+ // Unsigned integers.
+ leTestCase{uint((1 << 32) - 1), false, false, ""},
+ leTestCase{uint(0), true, false, ""},
+ leTestCase{uint(1), false, false, ""},
+ leTestCase{uint(17), false, false, ""},
+
+ leTestCase{uint8(0), true, false, ""},
+ leTestCase{uint8(1), false, false, ""},
+ leTestCase{uint8(17), false, false, ""},
+ leTestCase{uint8(253), false, false, ""},
+
+ leTestCase{uint16((1 << 16) - 1), false, false, ""},
+ leTestCase{uint16(0), true, false, ""},
+ leTestCase{uint16(1), false, false, ""},
+ leTestCase{uint16(17), false, false, ""},
+
+ leTestCase{uint32((1 << 32) - 1), false, false, ""},
+ leTestCase{uint32(0), true, false, ""},
+ leTestCase{uint32(1), false, false, ""},
+ leTestCase{uint32(17), false, false, ""},
+
+ leTestCase{uint64((1 << 64) - 1), false, false, ""},
+ leTestCase{uint64(0), true, false, ""},
+ leTestCase{uint64(1), false, false, ""},
+ leTestCase{uint64(17), false, false, ""},
+
+ // Floating point.
+ leTestCase{float32(-(1 << 30)), true, false, ""},
+ leTestCase{float32(-1), true, false, ""},
+ leTestCase{float32(-0.1), true, false, ""},
+ leTestCase{float32(-0.0), true, false, ""},
+ leTestCase{float32(0), true, false, ""},
+ leTestCase{float32(0.1), false, false, ""},
+ leTestCase{float32(17), false, false, ""},
+ leTestCase{float32(160), false, false, ""},
+
+ leTestCase{float64(-(1 << 30)), true, false, ""},
+ leTestCase{float64(-1), true, false, ""},
+ leTestCase{float64(-0.1), true, false, ""},
+ leTestCase{float64(-0), true, false, ""},
+ leTestCase{float64(0), true, false, ""},
+ leTestCase{float64(0.1), false, false, ""},
+ leTestCase{float64(17), false, false, ""},
+ leTestCase{float64(160), false, false, ""},
+ }
+
+ t.checkTestCases(matcher, cases)
+}
+
+func (t *LessOrEqualTest) PositiveIntegerLiteral() {
+ matcher := LessOrEqual(150)
+ desc := matcher.Description()
+ expectedDesc := "less than or equal to 150"
+
+ ExpectThat(desc, Equals(expectedDesc))
+
+ cases := []leTestCase{
+ // Signed integers.
+ leTestCase{-1, true, false, ""},
+ leTestCase{149, true, false, ""},
+ leTestCase{150, true, false, ""},
+ leTestCase{151, false, false, ""},
+
+ leTestCase{int(-1), true, false, ""},
+ leTestCase{int(149), true, false, ""},
+ leTestCase{int(150), true, false, ""},
+ leTestCase{int(151), false, false, ""},
+
+ leTestCase{int8(-1), true, false, ""},
+ leTestCase{int8(0), true, false, ""},
+ leTestCase{int8(17), true, false, ""},
+ leTestCase{int8(127), true, false, ""},
+
+ leTestCase{int16(-1), true, false, ""},
+ leTestCase{int16(149), true, false, ""},
+ leTestCase{int16(150), true, false, ""},
+ leTestCase{int16(151), false, false, ""},
+
+ leTestCase{int32(-1), true, false, ""},
+ leTestCase{int32(149), true, false, ""},
+ leTestCase{int32(150), true, false, ""},
+ leTestCase{int32(151), false, false, ""},
+
+ leTestCase{int64(-1), true, false, ""},
+ leTestCase{int64(149), true, false, ""},
+ leTestCase{int64(150), true, false, ""},
+ leTestCase{int64(151), false, false, ""},
+
+ // Unsigned integers.
+ leTestCase{uint(0), true, false, ""},
+ leTestCase{uint(149), true, false, ""},
+ leTestCase{uint(150), true, false, ""},
+ leTestCase{uint(151), false, false, ""},
+
+ leTestCase{uint8(0), true, false, ""},
+ leTestCase{uint8(127), true, false, ""},
+
+ leTestCase{uint16(0), true, false, ""},
+ leTestCase{uint16(149), true, false, ""},
+ leTestCase{uint16(150), true, false, ""},
+ leTestCase{uint16(151), false, false, ""},
+
+ leTestCase{uint32(0), true, false, ""},
+ leTestCase{uint32(149), true, false, ""},
+ leTestCase{uint32(150), true, false, ""},
+ leTestCase{uint32(151), false, false, ""},
+
+ leTestCase{uint64(0), true, false, ""},
+ leTestCase{uint64(149), true, false, ""},
+ leTestCase{uint64(150), true, false, ""},
+ leTestCase{uint64(151), false, false, ""},
+
+ // Floating point.
+ leTestCase{float32(-1), true, false, ""},
+ leTestCase{float32(149), true, false, ""},
+ leTestCase{float32(149.9), true, false, ""},
+ leTestCase{float32(150), true, false, ""},
+ leTestCase{float32(150.1), false, false, ""},
+ leTestCase{float32(151), false, false, ""},
+
+ leTestCase{float64(-1), true, false, ""},
+ leTestCase{float64(149), true, false, ""},
+ leTestCase{float64(149.9), true, false, ""},
+ leTestCase{float64(150), true, false, ""},
+ leTestCase{float64(150.1), false, false, ""},
+ leTestCase{float64(151), false, false, ""},
+ }
+
+ t.checkTestCases(matcher, cases)
+}
+
+////////////////////////////////////////////////////////////////////////
+// Float literals
+////////////////////////////////////////////////////////////////////////
+
+func (t *LessOrEqualTest) NegativeFloatLiteral() {
+ matcher := LessOrEqual(-150.1)
+ desc := matcher.Description()
+ expectedDesc := "less than or equal to -150.1"
+
+ ExpectThat(desc, Equals(expectedDesc))
+
+ cases := []leTestCase{
+ // Signed integers.
+ leTestCase{-(1 << 30), true, false, ""},
+ leTestCase{-151, true, false, ""},
+ leTestCase{-150.1, true, false, ""},
+ leTestCase{-150, false, false, ""},
+ leTestCase{-149, false, false, ""},
+ leTestCase{0, false, false, ""},
+ leTestCase{17, false, false, ""},
+
+ leTestCase{int(-(1 << 30)), true, false, ""},
+ leTestCase{int(-151), true, false, ""},
+ leTestCase{int(-150), false, false, ""},
+ leTestCase{int(-149), false, false, ""},
+ leTestCase{int(0), false, false, ""},
+ leTestCase{int(17), false, false, ""},
+
+ leTestCase{int8(-127), false, false, ""},
+ leTestCase{int8(0), false, false, ""},
+ leTestCase{int8(17), false, false, ""},
+
+ leTestCase{int16(-(1 << 14)), true, false, ""},
+ leTestCase{int16(-151), true, false, ""},
+ leTestCase{int16(-150), false, false, ""},
+ leTestCase{int16(-149), false, false, ""},
+ leTestCase{int16(0), false, false, ""},
+ leTestCase{int16(17), false, false, ""},
+
+ leTestCase{int32(-(1 << 30)), true, false, ""},
+ leTestCase{int32(-151), true, false, ""},
+ leTestCase{int32(-150), false, false, ""},
+ leTestCase{int32(-149), false, false, ""},
+ leTestCase{int32(0), false, false, ""},
+ leTestCase{int32(17), false, false, ""},
+
+ leTestCase{int64(-(1 << 30)), true, false, ""},
+ leTestCase{int64(-151), true, false, ""},
+ leTestCase{int64(-150), false, false, ""},
+ leTestCase{int64(-149), false, false, ""},
+ leTestCase{int64(0), false, false, ""},
+ leTestCase{int64(17), false, false, ""},
+
+ // Unsigned integers.
+ leTestCase{uint((1 << 32) - 151), false, false, ""},
+ leTestCase{uint(0), false, false, ""},
+ leTestCase{uint(17), false, false, ""},
+
+ leTestCase{uint8(0), false, false, ""},
+ leTestCase{uint8(17), false, false, ""},
+ leTestCase{uint8(253), false, false, ""},
+
+ leTestCase{uint16((1 << 16) - 151), false, false, ""},
+ leTestCase{uint16(0), false, false, ""},
+ leTestCase{uint16(17), false, false, ""},
+
+ leTestCase{uint32((1 << 32) - 151), false, false, ""},
+ leTestCase{uint32(0), false, false, ""},
+ leTestCase{uint32(17), false, false, ""},
+
+ leTestCase{uint64((1 << 64) - 151), false, false, ""},
+ leTestCase{uint64(0), false, false, ""},
+ leTestCase{uint64(17), false, false, ""},
+
+ // Floating point.
+ leTestCase{float32(-(1 << 30)), true, false, ""},
+ leTestCase{float32(-151), true, false, ""},
+ leTestCase{float32(-150.2), true, false, ""},
+ leTestCase{float32(-150.1), true, false, ""},
+ leTestCase{float32(-150), false, false, ""},
+ leTestCase{float32(0), false, false, ""},
+ leTestCase{float32(17), false, false, ""},
+ leTestCase{float32(160), false, false, ""},
+
+ leTestCase{float64(-(1 << 30)), true, false, ""},
+ leTestCase{float64(-151), true, false, ""},
+ leTestCase{float64(-150.2), true, false, ""},
+ leTestCase{float64(-150.1), true, false, ""},
+ leTestCase{float64(-150), false, false, ""},
+ leTestCase{float64(0), false, false, ""},
+ leTestCase{float64(17), false, false, ""},
+ leTestCase{float64(160), false, false, ""},
+ }
+
+ t.checkTestCases(matcher, cases)
+}
+
+func (t *LessOrEqualTest) PositiveFloatLiteral() {
+ matcher := LessOrEqual(149.9)
+ desc := matcher.Description()
+ expectedDesc := "less than or equal to 149.9"
+
+ ExpectThat(desc, Equals(expectedDesc))
+
+ cases := []leTestCase{
+ // Signed integers.
+ leTestCase{-1, true, false, ""},
+ leTestCase{149, true, false, ""},
+ leTestCase{149.9, true, false, ""},
+ leTestCase{150, false, false, ""},
+ leTestCase{151, false, false, ""},
+
+ leTestCase{int(-1), true, false, ""},
+ leTestCase{int(149), true, false, ""},
+ leTestCase{int(150), false, false, ""},
+ leTestCase{int(151), false, false, ""},
+
+ leTestCase{int8(-1), true, false, ""},
+ leTestCase{int8(0), true, false, ""},
+ leTestCase{int8(17), true, false, ""},
+ leTestCase{int8(127), true, false, ""},
+
+ leTestCase{int16(-1), true, false, ""},
+ leTestCase{int16(149), true, false, ""},
+ leTestCase{int16(150), false, false, ""},
+ leTestCase{int16(151), false, false, ""},
+
+ leTestCase{int32(-1), true, false, ""},
+ leTestCase{int32(149), true, false, ""},
+ leTestCase{int32(150), false, false, ""},
+ leTestCase{int32(151), false, false, ""},
+
+ leTestCase{int64(-1), true, false, ""},
+ leTestCase{int64(149), true, false, ""},
+ leTestCase{int64(150), false, false, ""},
+ leTestCase{int64(151), false, false, ""},
+
+ // Unsigned integers.
+ leTestCase{uint(0), true, false, ""},
+ leTestCase{uint(149), true, false, ""},
+ leTestCase{uint(150), false, false, ""},
+ leTestCase{uint(151), false, false, ""},
+
+ leTestCase{uint8(0), true, false, ""},
+ leTestCase{uint8(127), true, false, ""},
+
+ leTestCase{uint16(0), true, false, ""},
+ leTestCase{uint16(149), true, false, ""},
+ leTestCase{uint16(150), false, false, ""},
+ leTestCase{uint16(151), false, false, ""},
+
+ leTestCase{uint32(0), true, false, ""},
+ leTestCase{uint32(149), true, false, ""},
+ leTestCase{uint32(150), false, false, ""},
+ leTestCase{uint32(151), false, false, ""},
+
+ leTestCase{uint64(0), true, false, ""},
+ leTestCase{uint64(149), true, false, ""},
+ leTestCase{uint64(150), false, false, ""},
+ leTestCase{uint64(151), false, false, ""},
+
+ // Floating point.
+ leTestCase{float32(-1), true, false, ""},
+ leTestCase{float32(149), true, false, ""},
+ leTestCase{float32(149.8), true, false, ""},
+ leTestCase{float32(149.9), true, false, ""},
+ leTestCase{float32(150), false, false, ""},
+ leTestCase{float32(151), false, false, ""},
+
+ leTestCase{float64(-1), true, false, ""},
+ leTestCase{float64(149), true, false, ""},
+ leTestCase{float64(149.8), true, false, ""},
+ leTestCase{float64(149.9), true, false, ""},
+ leTestCase{float64(150), false, false, ""},
+ leTestCase{float64(151), false, false, ""},
+ }
+
+ t.checkTestCases(matcher, cases)
+}
+
+////////////////////////////////////////////////////////////////////////
+// Subtle cases
+////////////////////////////////////////////////////////////////////////
+
+func (t *LessOrEqualTest) Int64NotExactlyRepresentableBySinglePrecision() {
+ // Single-precision floats don't have enough bits to represent the integers
+ // near this one distinctly, so [2^25-1, 2^25+2] all receive the same value
+ // and should be treated as equivalent when floats are in the mix.
+ const kTwoTo25 = 1 << 25
+ matcher := LessOrEqual(int64(kTwoTo25 + 1))
+
+ desc := matcher.Description()
+ expectedDesc := "less than or equal to 33554433"
+
+ ExpectThat(desc, Equals(expectedDesc))
+
+ cases := []leTestCase{
+ // Signed integers.
+ leTestCase{-1, true, false, ""},
+ leTestCase{kTwoTo25 + 0, true, false, ""},
+ leTestCase{kTwoTo25 + 1, true, false, ""},
+ leTestCase{kTwoTo25 + 2, false, false, ""},
+
+ leTestCase{int(-1), true, false, ""},
+ leTestCase{int(kTwoTo25 + 0), true, false, ""},
+ leTestCase{int(kTwoTo25 + 1), true, false, ""},
+ leTestCase{int(kTwoTo25 + 2), false, false, ""},
+
+ leTestCase{int8(-1), true, false, ""},
+ leTestCase{int8(127), true, false, ""},
+
+ leTestCase{int16(-1), true, false, ""},
+ leTestCase{int16(0), true, false, ""},
+ leTestCase{int16(32767), true, false, ""},
+
+ leTestCase{int32(-1), true, false, ""},
+ leTestCase{int32(kTwoTo25 + 0), true, false, ""},
+ leTestCase{int32(kTwoTo25 + 1), true, false, ""},
+ leTestCase{int32(kTwoTo25 + 2), false, false, ""},
+
+ leTestCase{int64(-1), true, false, ""},
+ leTestCase{int64(kTwoTo25 + 0), true, false, ""},
+ leTestCase{int64(kTwoTo25 + 1), true, false, ""},
+ leTestCase{int64(kTwoTo25 + 2), false, false, ""},
+
+ // Unsigned integers.
+ leTestCase{uint(0), true, false, ""},
+ leTestCase{uint(kTwoTo25 + 0), true, false, ""},
+ leTestCase{uint(kTwoTo25 + 1), true, false, ""},
+ leTestCase{uint(kTwoTo25 + 2), false, false, ""},
+
+ leTestCase{uint8(0), true, false, ""},
+ leTestCase{uint8(255), true, false, ""},
+
+ leTestCase{uint16(0), true, false, ""},
+ leTestCase{uint16(65535), true, false, ""},
+
+ leTestCase{uint32(0), true, false, ""},
+ leTestCase{uint32(kTwoTo25 + 0), true, false, ""},
+ leTestCase{uint32(kTwoTo25 + 1), true, false, ""},
+ leTestCase{uint32(kTwoTo25 + 2), false, false, ""},
+
+ leTestCase{uint64(0), true, false, ""},
+ leTestCase{uint64(kTwoTo25 + 0), true, false, ""},
+ leTestCase{uint64(kTwoTo25 + 1), true, false, ""},
+ leTestCase{uint64(kTwoTo25 + 2), false, false, ""},
+
+ // Floating point.
+ leTestCase{float32(-1), true, false, ""},
+ leTestCase{float32(kTwoTo25 - 2), true, false, ""},
+ leTestCase{float32(kTwoTo25 - 1), true, false, ""},
+ leTestCase{float32(kTwoTo25 + 0), true, false, ""},
+ leTestCase{float32(kTwoTo25 + 1), true, false, ""},
+ leTestCase{float32(kTwoTo25 + 2), true, false, ""},
+ leTestCase{float32(kTwoTo25 + 3), false, false, ""},
+
+ leTestCase{float64(-1), true, false, ""},
+ leTestCase{float64(kTwoTo25 - 2), true, false, ""},
+ leTestCase{float64(kTwoTo25 - 1), true, false, ""},
+ leTestCase{float64(kTwoTo25 + 0), true, false, ""},
+ leTestCase{float64(kTwoTo25 + 1), true, false, ""},
+ leTestCase{float64(kTwoTo25 + 2), false, false, ""},
+ leTestCase{float64(kTwoTo25 + 3), false, false, ""},
+ }
+
+ t.checkTestCases(matcher, cases)
+}
+
+func (t *LessOrEqualTest) Int64NotExactlyRepresentableByDoublePrecision() {
+ // Double-precision floats don't have enough bits to represent the integers
+ // near this one distinctly, so [2^54-1, 2^54+2] all receive the same value
+ // and should be treated as equivalent when floats are in the mix.
+ const kTwoTo54 = 1 << 54
+ matcher := LessOrEqual(int64(kTwoTo54 + 1))
+
+ desc := matcher.Description()
+ expectedDesc := "less than or equal to 18014398509481985"
+
+ ExpectThat(desc, Equals(expectedDesc))
+
+ cases := []leTestCase{
+ // Signed integers.
+ leTestCase{-1, true, false, ""},
+ leTestCase{1 << 30, true, false, ""},
+
+ leTestCase{int(-1), true, false, ""},
+ leTestCase{int(math.MaxInt32), true, false, ""},
+
+ leTestCase{int8(-1), true, false, ""},
+ leTestCase{int8(127), true, false, ""},
+
+ leTestCase{int16(-1), true, false, ""},
+ leTestCase{int16(0), true, false, ""},
+ leTestCase{int16(32767), true, false, ""},
+
+ leTestCase{int32(-1), true, false, ""},
+ leTestCase{int32(math.MaxInt32), true, false, ""},
+
+ leTestCase{int64(-1), true, false, ""},
+ leTestCase{int64(kTwoTo54 - 1), true, false, ""},
+ leTestCase{int64(kTwoTo54 + 0), true, false, ""},
+ leTestCase{int64(kTwoTo54 + 1), true, false, ""},
+ leTestCase{int64(kTwoTo54 + 2), false, false, ""},
+
+ // Unsigned integers.
+ leTestCase{uint(0), true, false, ""},
+ leTestCase{uint(math.MaxUint32), true, false, ""},
+
+ leTestCase{uint8(0), true, false, ""},
+ leTestCase{uint8(255), true, false, ""},
+
+ leTestCase{uint16(0), true, false, ""},
+ leTestCase{uint16(65535), true, false, ""},
+
+ leTestCase{uint32(0), true, false, ""},
+ leTestCase{uint32(math.MaxUint32), true, false, ""},
+
+ leTestCase{uint64(0), true, false, ""},
+ leTestCase{uint64(kTwoTo54 - 1), true, false, ""},
+ leTestCase{uint64(kTwoTo54 + 0), true, false, ""},
+ leTestCase{uint64(kTwoTo54 + 1), true, false, ""},
+ leTestCase{uint64(kTwoTo54 + 2), false, false, ""},
+
+ // Floating point.
+ leTestCase{float64(-1), true, false, ""},
+ leTestCase{float64(kTwoTo54 - 2), true, false, ""},
+ leTestCase{float64(kTwoTo54 - 1), true, false, ""},
+ leTestCase{float64(kTwoTo54 + 0), true, false, ""},
+ leTestCase{float64(kTwoTo54 + 1), true, false, ""},
+ leTestCase{float64(kTwoTo54 + 2), true, false, ""},
+ leTestCase{float64(kTwoTo54 + 3), false, false, ""},
+ }
+
+ t.checkTestCases(matcher, cases)
+}
+
+func (t *LessOrEqualTest) Uint64NotExactlyRepresentableBySinglePrecision() {
+ // Single-precision floats don't have enough bits to represent the integers
+ // near this one distinctly, so [2^25-1, 2^25+2] all receive the same value
+ // and should be treated as equivalent when floats are in the mix.
+ const kTwoTo25 = 1 << 25
+ matcher := LessOrEqual(uint64(kTwoTo25 + 1))
+
+ desc := matcher.Description()
+ expectedDesc := "less than or equal to 33554433"
+
+ ExpectThat(desc, Equals(expectedDesc))
+
+ cases := []leTestCase{
+ // Signed integers.
+ leTestCase{-1, true, false, ""},
+ leTestCase{kTwoTo25 + 0, true, false, ""},
+ leTestCase{kTwoTo25 + 1, true, false, ""},
+ leTestCase{kTwoTo25 + 2, false, false, ""},
+
+ leTestCase{int(-1), true, false, ""},
+ leTestCase{int(kTwoTo25 + 0), true, false, ""},
+ leTestCase{int(kTwoTo25 + 1), true, false, ""},
+ leTestCase{int(kTwoTo25 + 2), false, false, ""},
+
+ leTestCase{int8(-1), true, false, ""},
+ leTestCase{int8(127), true, false, ""},
+
+ leTestCase{int16(-1), true, false, ""},
+ leTestCase{int16(0), true, false, ""},
+ leTestCase{int16(32767), true, false, ""},
+
+ leTestCase{int32(-1), true, false, ""},
+ leTestCase{int32(kTwoTo25 + 0), true, false, ""},
+ leTestCase{int32(kTwoTo25 + 1), true, false, ""},
+ leTestCase{int32(kTwoTo25 + 2), false, false, ""},
+
+ leTestCase{int64(-1), true, false, ""},
+ leTestCase{int64(kTwoTo25 + 0), true, false, ""},
+ leTestCase{int64(kTwoTo25 + 1), true, false, ""},
+ leTestCase{int64(kTwoTo25 + 2), false, false, ""},
+
+ // Unsigned integers.
+ leTestCase{uint(0), true, false, ""},
+ leTestCase{uint(kTwoTo25 + 0), true, false, ""},
+ leTestCase{uint(kTwoTo25 + 1), true, false, ""},
+ leTestCase{uint(kTwoTo25 + 2), false, false, ""},
+
+ leTestCase{uint8(0), true, false, ""},
+ leTestCase{uint8(255), true, false, ""},
+
+ leTestCase{uint16(0), true, false, ""},
+ leTestCase{uint16(65535), true, false, ""},
+
+ leTestCase{uint32(0), true, false, ""},
+ leTestCase{uint32(kTwoTo25 + 0), true, false, ""},
+ leTestCase{uint32(kTwoTo25 + 1), true, false, ""},
+ leTestCase{uint32(kTwoTo25 + 2), false, false, ""},
+
+ leTestCase{uint64(0), true, false, ""},
+ leTestCase{uint64(kTwoTo25 + 0), true, false, ""},
+ leTestCase{uint64(kTwoTo25 + 1), true, false, ""},
+ leTestCase{uint64(kTwoTo25 + 2), false, false, ""},
+
+ // Floating point.
+ leTestCase{float32(-1), true, false, ""},
+ leTestCase{float32(kTwoTo25 - 2), true, false, ""},
+ leTestCase{float32(kTwoTo25 - 1), true, false, ""},
+ leTestCase{float32(kTwoTo25 + 0), true, false, ""},
+ leTestCase{float32(kTwoTo25 + 1), true, false, ""},
+ leTestCase{float32(kTwoTo25 + 2), true, false, ""},
+ leTestCase{float32(kTwoTo25 + 3), false, false, ""},
+
+ leTestCase{float64(-1), true, false, ""},
+ leTestCase{float64(kTwoTo25 - 2), true, false, ""},
+ leTestCase{float64(kTwoTo25 - 1), true, false, ""},
+ leTestCase{float64(kTwoTo25 + 0), true, false, ""},
+ leTestCase{float64(kTwoTo25 + 1), true, false, ""},
+ leTestCase{float64(kTwoTo25 + 2), false, false, ""},
+ leTestCase{float64(kTwoTo25 + 3), false, false, ""},
+ }
+
+ t.checkTestCases(matcher, cases)
+}
+
+func (t *LessOrEqualTest) Uint64NotExactlyRepresentableByDoublePrecision() {
+ // Double-precision floats don't have enough bits to represent the integers
+ // near this one distinctly, so [2^54-1, 2^54+2] all receive the same value
+ // and should be treated as equivalent when floats are in the mix.
+ const kTwoTo54 = 1 << 54
+ matcher := LessOrEqual(uint64(kTwoTo54 + 1))
+
+ desc := matcher.Description()
+ expectedDesc := "less than or equal to 18014398509481985"
+
+ ExpectThat(desc, Equals(expectedDesc))
+
+ cases := []leTestCase{
+ // Signed integers.
+ leTestCase{-1, true, false, ""},
+ leTestCase{1 << 30, true, false, ""},
+
+ leTestCase{int(-1), true, false, ""},
+ leTestCase{int(math.MaxInt32), true, false, ""},
+
+ leTestCase{int8(-1), true, false, ""},
+ leTestCase{int8(127), true, false, ""},
+
+ leTestCase{int16(-1), true, false, ""},
+ leTestCase{int16(0), true, false, ""},
+ leTestCase{int16(32767), true, false, ""},
+
+ leTestCase{int32(-1), true, false, ""},
+ leTestCase{int32(math.MaxInt32), true, false, ""},
+
+ leTestCase{int64(-1), true, false, ""},
+ leTestCase{int64(kTwoTo54 - 1), true, false, ""},
+ leTestCase{int64(kTwoTo54 + 0), true, false, ""},
+ leTestCase{int64(kTwoTo54 + 1), true, false, ""},
+ leTestCase{int64(kTwoTo54 + 2), false, false, ""},
+
+ // Unsigned integers.
+ leTestCase{uint(0), true, false, ""},
+ leTestCase{uint(math.MaxUint32), true, false, ""},
+
+ leTestCase{uint8(0), true, false, ""},
+ leTestCase{uint8(255), true, false, ""},
+
+ leTestCase{uint16(0), true, false, ""},
+ leTestCase{uint16(65535), true, false, ""},
+
+ leTestCase{uint32(0), true, false, ""},
+ leTestCase{uint32(math.MaxUint32), true, false, ""},
+
+ leTestCase{uint64(0), true, false, ""},
+ leTestCase{uint64(kTwoTo54 - 1), true, false, ""},
+ leTestCase{uint64(kTwoTo54 + 0), true, false, ""},
+ leTestCase{uint64(kTwoTo54 + 1), true, false, ""},
+ leTestCase{uint64(kTwoTo54 + 2), false, false, ""},
+
+ // Floating point.
+ leTestCase{float64(-1), true, false, ""},
+ leTestCase{float64(kTwoTo54 - 2), true, false, ""},
+ leTestCase{float64(kTwoTo54 - 1), true, false, ""},
+ leTestCase{float64(kTwoTo54 + 0), true, false, ""},
+ leTestCase{float64(kTwoTo54 + 1), true, false, ""},
+ leTestCase{float64(kTwoTo54 + 2), true, false, ""},
+ leTestCase{float64(kTwoTo54 + 3), false, false, ""},
+ }
+
+ t.checkTestCases(matcher, cases)
+}
+
+func (t *LessOrEqualTest) Float32AboveExactIntegerRange() {
+ // Single-precision floats don't have enough bits to represent the integers
+ // near this one distinctly, so [2^25-1, 2^25+2] all receive the same value
+ // and should be treated as equivalent when floats are in the mix.
+ const kTwoTo25 = 1 << 25
+ matcher := LessOrEqual(float32(kTwoTo25 + 1))
+
+ desc := matcher.Description()
+ expectedDesc := "less than or equal to 3.3554432e+07"
+
+ ExpectThat(desc, Equals(expectedDesc))
+
+ cases := []leTestCase{
+ // Signed integers.
+ leTestCase{int64(-1), true, false, ""},
+ leTestCase{int64(kTwoTo25 - 2), true, false, ""},
+ leTestCase{int64(kTwoTo25 - 1), true, false, ""},
+ leTestCase{int64(kTwoTo25 + 0), true, false, ""},
+ leTestCase{int64(kTwoTo25 + 1), true, false, ""},
+ leTestCase{int64(kTwoTo25 + 2), true, false, ""},
+ leTestCase{int64(kTwoTo25 + 3), false, false, ""},
+
+ // Unsigned integers.
+ leTestCase{uint64(0), true, false, ""},
+ leTestCase{uint64(kTwoTo25 - 2), true, false, ""},
+ leTestCase{uint64(kTwoTo25 - 1), true, false, ""},
+ leTestCase{uint64(kTwoTo25 + 0), true, false, ""},
+ leTestCase{uint64(kTwoTo25 + 1), true, false, ""},
+ leTestCase{uint64(kTwoTo25 + 2), true, false, ""},
+ leTestCase{uint64(kTwoTo25 + 3), false, false, ""},
+
+ // Floating point.
+ leTestCase{float32(-1), true, false, ""},
+ leTestCase{float32(kTwoTo25 - 2), true, false, ""},
+ leTestCase{float32(kTwoTo25 - 1), true, false, ""},
+ leTestCase{float32(kTwoTo25 + 0), true, false, ""},
+ leTestCase{float32(kTwoTo25 + 1), true, false, ""},
+ leTestCase{float32(kTwoTo25 + 2), true, false, ""},
+ leTestCase{float32(kTwoTo25 + 3), false, false, ""},
+
+ leTestCase{float64(-1), true, false, ""},
+ leTestCase{float64(kTwoTo25 - 2), true, false, ""},
+ leTestCase{float64(kTwoTo25 - 1), true, false, ""},
+ leTestCase{float64(kTwoTo25 + 0), true, false, ""},
+ leTestCase{float64(kTwoTo25 + 1), true, false, ""},
+ leTestCase{float64(kTwoTo25 + 2), true, false, ""},
+ leTestCase{float64(kTwoTo25 + 3), false, false, ""},
+ }
+
+ t.checkTestCases(matcher, cases)
+}
+
+func (t *LessOrEqualTest) Float64AboveExactIntegerRange() {
+ // Double-precision floats don't have enough bits to represent the integers
+ // near this one distinctly, so [2^54-1, 2^54+2] all receive the same value
+ // and should be treated as equivalent when floats are in the mix.
+ const kTwoTo54 = 1 << 54
+ matcher := LessOrEqual(float64(kTwoTo54 + 1))
+
+ desc := matcher.Description()
+ expectedDesc := "less than or equal to 1.8014398509481984e+16"
+
+ ExpectThat(desc, Equals(expectedDesc))
+
+ cases := []leTestCase{
+ // Signed integers.
+ leTestCase{int64(-1), true, false, ""},
+ leTestCase{int64(kTwoTo54 - 2), true, false, ""},
+ leTestCase{int64(kTwoTo54 - 1), true, false, ""},
+ leTestCase{int64(kTwoTo54 + 0), true, false, ""},
+ leTestCase{int64(kTwoTo54 + 1), true, false, ""},
+ leTestCase{int64(kTwoTo54 + 2), true, false, ""},
+ leTestCase{int64(kTwoTo54 + 3), false, false, ""},
+
+ // Unsigned integers.
+ leTestCase{uint64(0), true, false, ""},
+ leTestCase{uint64(kTwoTo54 - 2), true, false, ""},
+ leTestCase{uint64(kTwoTo54 - 1), true, false, ""},
+ leTestCase{uint64(kTwoTo54 + 0), true, false, ""},
+ leTestCase{uint64(kTwoTo54 + 1), true, false, ""},
+ leTestCase{uint64(kTwoTo54 + 2), true, false, ""},
+ leTestCase{uint64(kTwoTo54 + 3), false, false, ""},
+
+ // Floating point.
+ leTestCase{float64(-1), true, false, ""},
+ leTestCase{float64(kTwoTo54 - 2), true, false, ""},
+ leTestCase{float64(kTwoTo54 - 1), true, false, ""},
+ leTestCase{float64(kTwoTo54 + 0), true, false, ""},
+ leTestCase{float64(kTwoTo54 + 1), true, false, ""},
+ leTestCase{float64(kTwoTo54 + 2), true, false, ""},
+ leTestCase{float64(kTwoTo54 + 3), false, false, ""},
+ }
+
+ t.checkTestCases(matcher, cases)
+}
+
+////////////////////////////////////////////////////////////////////////
+// String literals
+////////////////////////////////////////////////////////////////////////
+
+func (t *LessOrEqualTest) EmptyString() {
+ matcher := LessOrEqual("")
+ desc := matcher.Description()
+ expectedDesc := "less than or equal to \"\""
+
+ ExpectThat(desc, Equals(expectedDesc))
+
+ cases := []leTestCase{
+ leTestCase{"", true, false, ""},
+ leTestCase{"\x00", false, false, ""},
+ leTestCase{"a", false, false, ""},
+ leTestCase{"foo", false, false, ""},
+ }
+
+ t.checkTestCases(matcher, cases)
+}
+
+func (t *LessOrEqualTest) SingleNullByte() {
+ matcher := LessOrEqual("\x00")
+ desc := matcher.Description()
+ expectedDesc := "less than or equal to \"\x00\""
+
+ ExpectThat(desc, Equals(expectedDesc))
+
+ cases := []leTestCase{
+ leTestCase{"", true, false, ""},
+ leTestCase{"\x00", true, false, ""},
+ leTestCase{"\x00\x00", false, false, ""},
+ leTestCase{"a", false, false, ""},
+ leTestCase{"foo", false, false, ""},
+ }
+
+ t.checkTestCases(matcher, cases)
+}
+
+func (t *LessOrEqualTest) LongerString() {
+ matcher := LessOrEqual("foo\x00")
+ desc := matcher.Description()
+ expectedDesc := "less than or equal to \"foo\x00\""
+
+ ExpectThat(desc, Equals(expectedDesc))
+
+ cases := []leTestCase{
+ leTestCase{"", true, false, ""},
+ leTestCase{"\x00", true, false, ""},
+ leTestCase{"bar", true, false, ""},
+ leTestCase{"foo", true, false, ""},
+ leTestCase{"foo\x00", true, false, ""},
+ leTestCase{"foo\x00\x00", false, false, ""},
+ leTestCase{"fooa", false, false, ""},
+ leTestCase{"qux", false, false, ""},
+ }
+
+ t.checkTestCases(matcher, cases)
+}
diff --git a/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/oglematchers/less_than.go b/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/oglematchers/less_than.go
new file mode 100644
index 00000000000..8258e45d99d
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/oglematchers/less_than.go
@@ -0,0 +1,152 @@
+// Copyright 2011 Aaron Jacobs. All Rights Reserved.
+// Author: aaronjjacobs@gmail.com (Aaron Jacobs)
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package oglematchers
+
+import (
+ "errors"
+ "fmt"
+ "math"
+ "reflect"
+)
+
+// LessThan returns a matcher that matches integer, floating point, or strings
+// values v such that v < x. Comparison is not defined between numeric and
+// string types, but is defined between all integer and floating point types.
+//
+// x must itself be an integer, floating point, or string type; otherwise,
+// LessThan will panic.
+func LessThan(x interface{}) Matcher {
+ v := reflect.ValueOf(x)
+ kind := v.Kind()
+
+ switch {
+ case isInteger(v):
+ case isFloat(v):
+ case kind == reflect.String:
+
+ default:
+ panic(fmt.Sprintf("LessThan: unexpected kind %v", kind))
+ }
+
+ return &lessThanMatcher{v}
+}
+
+type lessThanMatcher struct {
+ limit reflect.Value
+}
+
+func (m *lessThanMatcher) Description() string {
+ // Special case: make it clear that strings are strings.
+ if m.limit.Kind() == reflect.String {
+ return fmt.Sprintf("less than \"%s\"", m.limit.String())
+ }
+
+ return fmt.Sprintf("less than %v", m.limit.Interface())
+}
+
+func compareIntegers(v1, v2 reflect.Value) (err error) {
+ err = errors.New("")
+
+ switch {
+ case isSignedInteger(v1) && isSignedInteger(v2):
+ if v1.Int() < v2.Int() {
+ err = nil
+ }
+ return
+
+ case isSignedInteger(v1) && isUnsignedInteger(v2):
+ if v1.Int() < 0 || uint64(v1.Int()) < v2.Uint() {
+ err = nil
+ }
+ return
+
+ case isUnsignedInteger(v1) && isSignedInteger(v2):
+ if v1.Uint() <= math.MaxInt64 && int64(v1.Uint()) < v2.Int() {
+ err = nil
+ }
+ return
+
+ case isUnsignedInteger(v1) && isUnsignedInteger(v2):
+ if v1.Uint() < v2.Uint() {
+ err = nil
+ }
+ return
+ }
+
+ panic(fmt.Sprintf("compareIntegers: %v %v", v1, v2))
+}
+
+func getFloat(v reflect.Value) float64 {
+ switch {
+ case isSignedInteger(v):
+ return float64(v.Int())
+
+ case isUnsignedInteger(v):
+ return float64(v.Uint())
+
+ case isFloat(v):
+ return v.Float()
+ }
+
+ panic(fmt.Sprintf("getFloat: %v", v))
+}
+
+func (m *lessThanMatcher) Matches(c interface{}) (err error) {
+ v1 := reflect.ValueOf(c)
+ v2 := m.limit
+
+ err = errors.New("")
+
+ // Handle strings as a special case.
+ if v1.Kind() == reflect.String && v2.Kind() == reflect.String {
+ if v1.String() < v2.String() {
+ err = nil
+ }
+ return
+ }
+
+ // If we get here, we require that we are dealing with integers or floats.
+ v1Legal := isInteger(v1) || isFloat(v1)
+ v2Legal := isInteger(v2) || isFloat(v2)
+ if !v1Legal || !v2Legal {
+ err = NewFatalError("which is not comparable")
+ return
+ }
+
+ // Handle the various comparison cases.
+ switch {
+ // Both integers
+ case isInteger(v1) && isInteger(v2):
+ return compareIntegers(v1, v2)
+
+ // At least one float32
+ case v1.Kind() == reflect.Float32 || v2.Kind() == reflect.Float32:
+ if float32(getFloat(v1)) < float32(getFloat(v2)) {
+ err = nil
+ }
+ return
+
+ // At least one float64
+ case v1.Kind() == reflect.Float64 || v2.Kind() == reflect.Float64:
+ if getFloat(v1) < getFloat(v2) {
+ err = nil
+ }
+ return
+ }
+
+ // We shouldn't get here.
+ panic(fmt.Sprintf("lessThanMatcher.Matches: Shouldn't get here: %v %v", v1, v2))
+}
diff --git a/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/oglematchers/less_than_test.go b/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/oglematchers/less_than_test.go
new file mode 100644
index 00000000000..59f5b7f56bd
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/oglematchers/less_than_test.go
@@ -0,0 +1,1057 @@
+// Copyright 2011 Aaron Jacobs. All Rights Reserved.
+// Author: aaronjjacobs@gmail.com (Aaron Jacobs)
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package oglematchers_test
+
+import (
+ "math"
+
+ . "github.com/smartystreets/assertions/internal/oglematchers"
+ . "github.com/smartystreets/assertions/internal/ogletest"
+)
+
+////////////////////////////////////////////////////////////////////////
+// Helpers
+////////////////////////////////////////////////////////////////////////
+
+type LessThanTest struct {
+}
+
+func init() { RegisterTestSuite(&LessThanTest{}) }
+
+type ltTestCase struct {
+ candidate interface{}
+ expectedResult bool
+ shouldBeFatal bool
+ expectedError string
+}
+
+func (t *LessThanTest) checkTestCases(matcher Matcher, cases []ltTestCase) {
+ for i, c := range cases {
+ err := matcher.Matches(c.candidate)
+
+ ExpectThat(
+ (err == nil),
+ Equals(c.expectedResult),
+ "Case %d (candidate %v)",
+ i,
+ c.candidate)
+
+ if err == nil {
+ continue
+ }
+
+ _, isFatal := err.(*FatalError)
+ ExpectEq(
+ c.shouldBeFatal,
+ isFatal,
+ "Case %d (candidate %v)",
+ i,
+ c.candidate)
+
+ ExpectThat(
+ err,
+ Error(Equals(c.expectedError)),
+ "Case %d (candidate %v)",
+ i,
+ c.candidate)
+ }
+}
+
+////////////////////////////////////////////////////////////////////////
+// Integer literals
+////////////////////////////////////////////////////////////////////////
+
+func (t *LessThanTest) IntegerCandidateBadTypes() {
+ matcher := LessThan(int(-150))
+
+ cases := []ltTestCase{
+ ltTestCase{true, false, true, "which is not comparable"},
+ ltTestCase{complex64(-151), false, true, "which is not comparable"},
+ ltTestCase{complex128(-151), false, true, "which is not comparable"},
+ ltTestCase{[...]int{-151}, false, true, "which is not comparable"},
+ ltTestCase{make(chan int), false, true, "which is not comparable"},
+ ltTestCase{func() {}, false, true, "which is not comparable"},
+ ltTestCase{map[int]int{}, false, true, "which is not comparable"},
+ ltTestCase{&ltTestCase{}, false, true, "which is not comparable"},
+ ltTestCase{make([]int, 0), false, true, "which is not comparable"},
+ ltTestCase{"-151", false, true, "which is not comparable"},
+ ltTestCase{ltTestCase{}, false, true, "which is not comparable"},
+ }
+
+ t.checkTestCases(matcher, cases)
+}
+
+func (t *LessThanTest) FloatCandidateBadTypes() {
+ matcher := LessThan(float32(-150))
+
+ cases := []ltTestCase{
+ ltTestCase{true, false, true, "which is not comparable"},
+ ltTestCase{complex64(-151), false, true, "which is not comparable"},
+ ltTestCase{complex128(-151), false, true, "which is not comparable"},
+ ltTestCase{[...]int{-151}, false, true, "which is not comparable"},
+ ltTestCase{make(chan int), false, true, "which is not comparable"},
+ ltTestCase{func() {}, false, true, "which is not comparable"},
+ ltTestCase{map[int]int{}, false, true, "which is not comparable"},
+ ltTestCase{&ltTestCase{}, false, true, "which is not comparable"},
+ ltTestCase{make([]int, 0), false, true, "which is not comparable"},
+ ltTestCase{"-151", false, true, "which is not comparable"},
+ ltTestCase{ltTestCase{}, false, true, "which is not comparable"},
+ }
+
+ t.checkTestCases(matcher, cases)
+}
+
+func (t *LessThanTest) StringCandidateBadTypes() {
+ matcher := LessThan("17")
+
+ cases := []ltTestCase{
+ ltTestCase{true, false, true, "which is not comparable"},
+ ltTestCase{int(0), false, true, "which is not comparable"},
+ ltTestCase{int8(0), false, true, "which is not comparable"},
+ ltTestCase{int16(0), false, true, "which is not comparable"},
+ ltTestCase{int32(0), false, true, "which is not comparable"},
+ ltTestCase{int64(0), false, true, "which is not comparable"},
+ ltTestCase{uint(0), false, true, "which is not comparable"},
+ ltTestCase{uint8(0), false, true, "which is not comparable"},
+ ltTestCase{uint16(0), false, true, "which is not comparable"},
+ ltTestCase{uint32(0), false, true, "which is not comparable"},
+ ltTestCase{uint64(0), false, true, "which is not comparable"},
+ ltTestCase{float32(0), false, true, "which is not comparable"},
+ ltTestCase{float64(0), false, true, "which is not comparable"},
+ ltTestCase{complex64(-151), false, true, "which is not comparable"},
+ ltTestCase{complex128(-151), false, true, "which is not comparable"},
+ ltTestCase{[...]int{-151}, false, true, "which is not comparable"},
+ ltTestCase{make(chan int), false, true, "which is not comparable"},
+ ltTestCase{func() {}, false, true, "which is not comparable"},
+ ltTestCase{map[int]int{}, false, true, "which is not comparable"},
+ ltTestCase{&ltTestCase{}, false, true, "which is not comparable"},
+ ltTestCase{make([]int, 0), false, true, "which is not comparable"},
+ ltTestCase{ltTestCase{}, false, true, "which is not comparable"},
+ }
+
+ t.checkTestCases(matcher, cases)
+}
+
+func (t *LessThanTest) BadArgument() {
+ panicked := false
+
+ defer func() {
+ ExpectThat(panicked, Equals(true))
+ }()
+
+ defer func() {
+ if r := recover(); r != nil {
+ panicked = true
+ }
+ }()
+
+ LessThan(complex128(0))
+}
+
+////////////////////////////////////////////////////////////////////////
+// Integer literals
+////////////////////////////////////////////////////////////////////////
+
+func (t *LessThanTest) NegativeIntegerLiteral() {
+ matcher := LessThan(-150)
+ desc := matcher.Description()
+ expectedDesc := "less than -150"
+
+ ExpectThat(desc, Equals(expectedDesc))
+
+ cases := []ltTestCase{
+ // Signed integers.
+ ltTestCase{-(1 << 30), true, false, ""},
+ ltTestCase{-151, true, false, ""},
+ ltTestCase{-150, false, false, ""},
+ ltTestCase{0, false, false, ""},
+ ltTestCase{17, false, false, ""},
+
+ ltTestCase{int(-(1 << 30)), true, false, ""},
+ ltTestCase{int(-151), true, false, ""},
+ ltTestCase{int(-150), false, false, ""},
+ ltTestCase{int(0), false, false, ""},
+ ltTestCase{int(17), false, false, ""},
+
+ ltTestCase{int8(-127), false, false, ""},
+ ltTestCase{int8(0), false, false, ""},
+ ltTestCase{int8(17), false, false, ""},
+
+ ltTestCase{int16(-(1 << 14)), true, false, ""},
+ ltTestCase{int16(-151), true, false, ""},
+ ltTestCase{int16(-150), false, false, ""},
+ ltTestCase{int16(0), false, false, ""},
+ ltTestCase{int16(17), false, false, ""},
+
+ ltTestCase{int32(-(1 << 30)), true, false, ""},
+ ltTestCase{int32(-151), true, false, ""},
+ ltTestCase{int32(-150), false, false, ""},
+ ltTestCase{int32(0), false, false, ""},
+ ltTestCase{int32(17), false, false, ""},
+
+ ltTestCase{int64(-(1 << 30)), true, false, ""},
+ ltTestCase{int64(-151), true, false, ""},
+ ltTestCase{int64(-150), false, false, ""},
+ ltTestCase{int64(0), false, false, ""},
+ ltTestCase{int64(17), false, false, ""},
+
+ // Unsigned integers.
+ ltTestCase{uint((1 << 32) - 151), false, false, ""},
+ ltTestCase{uint(0), false, false, ""},
+ ltTestCase{uint(17), false, false, ""},
+
+ ltTestCase{uint8(0), false, false, ""},
+ ltTestCase{uint8(17), false, false, ""},
+ ltTestCase{uint8(253), false, false, ""},
+
+ ltTestCase{uint16((1 << 16) - 151), false, false, ""},
+ ltTestCase{uint16(0), false, false, ""},
+ ltTestCase{uint16(17), false, false, ""},
+
+ ltTestCase{uint32((1 << 32) - 151), false, false, ""},
+ ltTestCase{uint32(0), false, false, ""},
+ ltTestCase{uint32(17), false, false, ""},
+
+ ltTestCase{uint64((1 << 64) - 151), false, false, ""},
+ ltTestCase{uint64(0), false, false, ""},
+ ltTestCase{uint64(17), false, false, ""},
+
+ // Floating point.
+ ltTestCase{float32(-(1 << 30)), true, false, ""},
+ ltTestCase{float32(-151), true, false, ""},
+ ltTestCase{float32(-150.1), true, false, ""},
+ ltTestCase{float32(-150), false, false, ""},
+ ltTestCase{float32(-149.9), false, false, ""},
+ ltTestCase{float32(0), false, false, ""},
+ ltTestCase{float32(17), false, false, ""},
+ ltTestCase{float32(160), false, false, ""},
+
+ ltTestCase{float64(-(1 << 30)), true, false, ""},
+ ltTestCase{float64(-151), true, false, ""},
+ ltTestCase{float64(-150.1), true, false, ""},
+ ltTestCase{float64(-150), false, false, ""},
+ ltTestCase{float64(-149.9), false, false, ""},
+ ltTestCase{float64(0), false, false, ""},
+ ltTestCase{float64(17), false, false, ""},
+ ltTestCase{float64(160), false, false, ""},
+ }
+
+ t.checkTestCases(matcher, cases)
+}
+
+func (t *LessThanTest) ZeroIntegerLiteral() {
+ matcher := LessThan(0)
+ desc := matcher.Description()
+ expectedDesc := "less than 0"
+
+ ExpectThat(desc, Equals(expectedDesc))
+
+ cases := []ltTestCase{
+ // Signed integers.
+ ltTestCase{-(1 << 30), true, false, ""},
+ ltTestCase{-1, true, false, ""},
+ ltTestCase{0, false, false, ""},
+ ltTestCase{1, false, false, ""},
+ ltTestCase{17, false, false, ""},
+ ltTestCase{(1 << 30), false, false, ""},
+
+ ltTestCase{int(-(1 << 30)), true, false, ""},
+ ltTestCase{int(-1), true, false, ""},
+ ltTestCase{int(0), false, false, ""},
+ ltTestCase{int(1), false, false, ""},
+ ltTestCase{int(17), false, false, ""},
+
+ ltTestCase{int8(-1), true, false, ""},
+ ltTestCase{int8(0), false, false, ""},
+ ltTestCase{int8(1), false, false, ""},
+
+ ltTestCase{int16(-(1 << 14)), true, false, ""},
+ ltTestCase{int16(-1), true, false, ""},
+ ltTestCase{int16(0), false, false, ""},
+ ltTestCase{int16(1), false, false, ""},
+ ltTestCase{int16(17), false, false, ""},
+
+ ltTestCase{int32(-(1 << 30)), true, false, ""},
+ ltTestCase{int32(-1), true, false, ""},
+ ltTestCase{int32(0), false, false, ""},
+ ltTestCase{int32(1), false, false, ""},
+ ltTestCase{int32(17), false, false, ""},
+
+ ltTestCase{int64(-(1 << 30)), true, false, ""},
+ ltTestCase{int64(-1), true, false, ""},
+ ltTestCase{int64(0), false, false, ""},
+ ltTestCase{int64(1), false, false, ""},
+ ltTestCase{int64(17), false, false, ""},
+
+ // Unsigned integers.
+ ltTestCase{uint((1 << 32) - 1), false, false, ""},
+ ltTestCase{uint(0), false, false, ""},
+ ltTestCase{uint(17), false, false, ""},
+
+ ltTestCase{uint8(0), false, false, ""},
+ ltTestCase{uint8(17), false, false, ""},
+ ltTestCase{uint8(253), false, false, ""},
+
+ ltTestCase{uint16((1 << 16) - 1), false, false, ""},
+ ltTestCase{uint16(0), false, false, ""},
+ ltTestCase{uint16(17), false, false, ""},
+
+ ltTestCase{uint32((1 << 32) - 1), false, false, ""},
+ ltTestCase{uint32(0), false, false, ""},
+ ltTestCase{uint32(17), false, false, ""},
+
+ ltTestCase{uint64((1 << 64) - 1), false, false, ""},
+ ltTestCase{uint64(0), false, false, ""},
+ ltTestCase{uint64(17), false, false, ""},
+
+ // Floating point.
+ ltTestCase{float32(-(1 << 30)), true, false, ""},
+ ltTestCase{float32(-1), true, false, ""},
+ ltTestCase{float32(-0.1), true, false, ""},
+ ltTestCase{float32(-0.0), false, false, ""},
+ ltTestCase{float32(0), false, false, ""},
+ ltTestCase{float32(0.1), false, false, ""},
+ ltTestCase{float32(17), false, false, ""},
+ ltTestCase{float32(160), false, false, ""},
+
+ ltTestCase{float64(-(1 << 30)), true, false, ""},
+ ltTestCase{float64(-1), true, false, ""},
+ ltTestCase{float64(-0.1), true, false, ""},
+ ltTestCase{float64(-0), false, false, ""},
+ ltTestCase{float64(0), false, false, ""},
+ ltTestCase{float64(17), false, false, ""},
+ ltTestCase{float64(160), false, false, ""},
+ }
+
+ t.checkTestCases(matcher, cases)
+}
+
+func (t *LessThanTest) PositiveIntegerLiteral() {
+ matcher := LessThan(150)
+ desc := matcher.Description()
+ expectedDesc := "less than 150"
+
+ ExpectThat(desc, Equals(expectedDesc))
+
+ cases := []ltTestCase{
+ // Signed integers.
+ ltTestCase{-1, true, false, ""},
+ ltTestCase{149, true, false, ""},
+ ltTestCase{150, false, false, ""},
+ ltTestCase{151, false, false, ""},
+
+ ltTestCase{int(-1), true, false, ""},
+ ltTestCase{int(149), true, false, ""},
+ ltTestCase{int(150), false, false, ""},
+ ltTestCase{int(151), false, false, ""},
+
+ ltTestCase{int8(-1), true, false, ""},
+ ltTestCase{int8(0), true, false, ""},
+ ltTestCase{int8(17), true, false, ""},
+ ltTestCase{int8(127), true, false, ""},
+
+ ltTestCase{int16(-1), true, false, ""},
+ ltTestCase{int16(149), true, false, ""},
+ ltTestCase{int16(150), false, false, ""},
+ ltTestCase{int16(151), false, false, ""},
+
+ ltTestCase{int32(-1), true, false, ""},
+ ltTestCase{int32(149), true, false, ""},
+ ltTestCase{int32(150), false, false, ""},
+ ltTestCase{int32(151), false, false, ""},
+
+ ltTestCase{int64(-1), true, false, ""},
+ ltTestCase{int64(149), true, false, ""},
+ ltTestCase{int64(150), false, false, ""},
+ ltTestCase{int64(151), false, false, ""},
+
+ // Unsigned integers.
+ ltTestCase{uint(0), true, false, ""},
+ ltTestCase{uint(149), true, false, ""},
+ ltTestCase{uint(150), false, false, ""},
+ ltTestCase{uint(151), false, false, ""},
+
+ ltTestCase{uint8(0), true, false, ""},
+ ltTestCase{uint8(127), true, false, ""},
+
+ ltTestCase{uint16(0), true, false, ""},
+ ltTestCase{uint16(149), true, false, ""},
+ ltTestCase{uint16(150), false, false, ""},
+ ltTestCase{uint16(151), false, false, ""},
+
+ ltTestCase{uint32(0), true, false, ""},
+ ltTestCase{uint32(149), true, false, ""},
+ ltTestCase{uint32(150), false, false, ""},
+ ltTestCase{uint32(151), false, false, ""},
+
+ ltTestCase{uint64(0), true, false, ""},
+ ltTestCase{uint64(149), true, false, ""},
+ ltTestCase{uint64(150), false, false, ""},
+ ltTestCase{uint64(151), false, false, ""},
+
+ // Floating point.
+ ltTestCase{float32(-1), true, false, ""},
+ ltTestCase{float32(149), true, false, ""},
+ ltTestCase{float32(149.9), true, false, ""},
+ ltTestCase{float32(150), false, false, ""},
+ ltTestCase{float32(150.1), false, false, ""},
+ ltTestCase{float32(151), false, false, ""},
+
+ ltTestCase{float64(-1), true, false, ""},
+ ltTestCase{float64(149), true, false, ""},
+ ltTestCase{float64(149.9), true, false, ""},
+ ltTestCase{float64(150), false, false, ""},
+ ltTestCase{float64(150.1), false, false, ""},
+ ltTestCase{float64(151), false, false, ""},
+ }
+
+ t.checkTestCases(matcher, cases)
+}
+
+////////////////////////////////////////////////////////////////////////
+// Float literals
+////////////////////////////////////////////////////////////////////////
+
+func (t *LessThanTest) NegativeFloatLiteral() {
+ matcher := LessThan(-150.1)
+ desc := matcher.Description()
+ expectedDesc := "less than -150.1"
+
+ ExpectThat(desc, Equals(expectedDesc))
+
+ cases := []ltTestCase{
+ // Signed integers.
+ ltTestCase{-(1 << 30), true, false, ""},
+ ltTestCase{-151, true, false, ""},
+ ltTestCase{-150, false, false, ""},
+ ltTestCase{0, false, false, ""},
+ ltTestCase{17, false, false, ""},
+
+ ltTestCase{int(-(1 << 30)), true, false, ""},
+ ltTestCase{int(-151), true, false, ""},
+ ltTestCase{int(-150), false, false, ""},
+ ltTestCase{int(0), false, false, ""},
+ ltTestCase{int(17), false, false, ""},
+
+ ltTestCase{int8(-127), false, false, ""},
+ ltTestCase{int8(0), false, false, ""},
+ ltTestCase{int8(17), false, false, ""},
+
+ ltTestCase{int16(-(1 << 14)), true, false, ""},
+ ltTestCase{int16(-151), true, false, ""},
+ ltTestCase{int16(-150), false, false, ""},
+ ltTestCase{int16(0), false, false, ""},
+ ltTestCase{int16(17), false, false, ""},
+
+ ltTestCase{int32(-(1 << 30)), true, false, ""},
+ ltTestCase{int32(-151), true, false, ""},
+ ltTestCase{int32(-150), false, false, ""},
+ ltTestCase{int32(0), false, false, ""},
+ ltTestCase{int32(17), false, false, ""},
+
+ ltTestCase{int64(-(1 << 30)), true, false, ""},
+ ltTestCase{int64(-151), true, false, ""},
+ ltTestCase{int64(-150), false, false, ""},
+ ltTestCase{int64(0), false, false, ""},
+ ltTestCase{int64(17), false, false, ""},
+
+ // Unsigned integers.
+ ltTestCase{uint((1 << 32) - 151), false, false, ""},
+ ltTestCase{uint(0), false, false, ""},
+ ltTestCase{uint(17), false, false, ""},
+
+ ltTestCase{uint8(0), false, false, ""},
+ ltTestCase{uint8(17), false, false, ""},
+ ltTestCase{uint8(253), false, false, ""},
+
+ ltTestCase{uint16((1 << 16) - 151), false, false, ""},
+ ltTestCase{uint16(0), false, false, ""},
+ ltTestCase{uint16(17), false, false, ""},
+
+ ltTestCase{uint32((1 << 32) - 151), false, false, ""},
+ ltTestCase{uint32(0), false, false, ""},
+ ltTestCase{uint32(17), false, false, ""},
+
+ ltTestCase{uint64((1 << 64) - 151), false, false, ""},
+ ltTestCase{uint64(0), false, false, ""},
+ ltTestCase{uint64(17), false, false, ""},
+
+ // Floating point.
+ ltTestCase{float32(-(1 << 30)), true, false, ""},
+ ltTestCase{float32(-151), true, false, ""},
+ ltTestCase{float32(-150.2), true, false, ""},
+ ltTestCase{float32(-150.1), false, false, ""},
+ ltTestCase{float32(-150), false, false, ""},
+ ltTestCase{float32(0), false, false, ""},
+ ltTestCase{float32(17), false, false, ""},
+ ltTestCase{float32(160), false, false, ""},
+
+ ltTestCase{float64(-(1 << 30)), true, false, ""},
+ ltTestCase{float64(-151), true, false, ""},
+ ltTestCase{float64(-150.2), true, false, ""},
+ ltTestCase{float64(-150.1), false, false, ""},
+ ltTestCase{float64(-150), false, false, ""},
+ ltTestCase{float64(0), false, false, ""},
+ ltTestCase{float64(17), false, false, ""},
+ ltTestCase{float64(160), false, false, ""},
+ }
+
+ t.checkTestCases(matcher, cases)
+}
+
+func (t *LessThanTest) PositiveFloatLiteral() {
+ matcher := LessThan(149.9)
+ desc := matcher.Description()
+ expectedDesc := "less than 149.9"
+
+ ExpectThat(desc, Equals(expectedDesc))
+
+ cases := []ltTestCase{
+ // Signed integers.
+ ltTestCase{-1, true, false, ""},
+ ltTestCase{149, true, false, ""},
+ ltTestCase{150, false, false, ""},
+ ltTestCase{151, false, false, ""},
+
+ ltTestCase{int(-1), true, false, ""},
+ ltTestCase{int(149), true, false, ""},
+ ltTestCase{int(150), false, false, ""},
+ ltTestCase{int(151), false, false, ""},
+
+ ltTestCase{int8(-1), true, false, ""},
+ ltTestCase{int8(0), true, false, ""},
+ ltTestCase{int8(17), true, false, ""},
+ ltTestCase{int8(127), true, false, ""},
+
+ ltTestCase{int16(-1), true, false, ""},
+ ltTestCase{int16(149), true, false, ""},
+ ltTestCase{int16(150), false, false, ""},
+ ltTestCase{int16(151), false, false, ""},
+
+ ltTestCase{int32(-1), true, false, ""},
+ ltTestCase{int32(149), true, false, ""},
+ ltTestCase{int32(150), false, false, ""},
+ ltTestCase{int32(151), false, false, ""},
+
+ ltTestCase{int64(-1), true, false, ""},
+ ltTestCase{int64(149), true, false, ""},
+ ltTestCase{int64(150), false, false, ""},
+ ltTestCase{int64(151), false, false, ""},
+
+ // Unsigned integers.
+ ltTestCase{uint(0), true, false, ""},
+ ltTestCase{uint(149), true, false, ""},
+ ltTestCase{uint(150), false, false, ""},
+ ltTestCase{uint(151), false, false, ""},
+
+ ltTestCase{uint8(0), true, false, ""},
+ ltTestCase{uint8(127), true, false, ""},
+
+ ltTestCase{uint16(0), true, false, ""},
+ ltTestCase{uint16(149), true, false, ""},
+ ltTestCase{uint16(150), false, false, ""},
+ ltTestCase{uint16(151), false, false, ""},
+
+ ltTestCase{uint32(0), true, false, ""},
+ ltTestCase{uint32(149), true, false, ""},
+ ltTestCase{uint32(150), false, false, ""},
+ ltTestCase{uint32(151), false, false, ""},
+
+ ltTestCase{uint64(0), true, false, ""},
+ ltTestCase{uint64(149), true, false, ""},
+ ltTestCase{uint64(150), false, false, ""},
+ ltTestCase{uint64(151), false, false, ""},
+
+ // Floating point.
+ ltTestCase{float32(-1), true, false, ""},
+ ltTestCase{float32(149), true, false, ""},
+ ltTestCase{float32(149.8), true, false, ""},
+ ltTestCase{float32(149.9), false, false, ""},
+ ltTestCase{float32(150), false, false, ""},
+ ltTestCase{float32(151), false, false, ""},
+
+ ltTestCase{float64(-1), true, false, ""},
+ ltTestCase{float64(149), true, false, ""},
+ ltTestCase{float64(149.8), true, false, ""},
+ ltTestCase{float64(149.9), false, false, ""},
+ ltTestCase{float64(150), false, false, ""},
+ ltTestCase{float64(151), false, false, ""},
+ }
+
+ t.checkTestCases(matcher, cases)
+}
+
+////////////////////////////////////////////////////////////////////////
+// Subtle cases
+////////////////////////////////////////////////////////////////////////
+
+func (t *LessThanTest) Int64NotExactlyRepresentableBySinglePrecision() {
+ // Single-precision floats don't have enough bits to represent the integers
+ // near this one distinctly, so [2^25-1, 2^25+2] all receive the same value
+ // and should be treated as equivalent when floats are in the mix.
+ const kTwoTo25 = 1 << 25
+ matcher := LessThan(int64(kTwoTo25 + 1))
+
+ desc := matcher.Description()
+ expectedDesc := "less than 33554433"
+
+ ExpectThat(desc, Equals(expectedDesc))
+
+ cases := []ltTestCase{
+ // Signed integers.
+ ltTestCase{-1, true, false, ""},
+ ltTestCase{kTwoTo25 + 0, true, false, ""},
+ ltTestCase{kTwoTo25 + 1, false, false, ""},
+ ltTestCase{kTwoTo25 + 2, false, false, ""},
+
+ ltTestCase{int(-1), true, false, ""},
+ ltTestCase{int(kTwoTo25 + 0), true, false, ""},
+ ltTestCase{int(kTwoTo25 + 1), false, false, ""},
+ ltTestCase{int(kTwoTo25 + 2), false, false, ""},
+
+ ltTestCase{int8(-1), true, false, ""},
+ ltTestCase{int8(127), true, false, ""},
+
+ ltTestCase{int16(-1), true, false, ""},
+ ltTestCase{int16(0), true, false, ""},
+ ltTestCase{int16(32767), true, false, ""},
+
+ ltTestCase{int32(-1), true, false, ""},
+ ltTestCase{int32(kTwoTo25 + 0), true, false, ""},
+ ltTestCase{int32(kTwoTo25 + 1), false, false, ""},
+ ltTestCase{int32(kTwoTo25 + 2), false, false, ""},
+
+ ltTestCase{int64(-1), true, false, ""},
+ ltTestCase{int64(kTwoTo25 + 0), true, false, ""},
+ ltTestCase{int64(kTwoTo25 + 1), false, false, ""},
+ ltTestCase{int64(kTwoTo25 + 2), false, false, ""},
+
+ // Unsigned integers.
+ ltTestCase{uint(0), true, false, ""},
+ ltTestCase{uint(kTwoTo25 + 0), true, false, ""},
+ ltTestCase{uint(kTwoTo25 + 1), false, false, ""},
+ ltTestCase{uint(kTwoTo25 + 2), false, false, ""},
+
+ ltTestCase{uint8(0), true, false, ""},
+ ltTestCase{uint8(255), true, false, ""},
+
+ ltTestCase{uint16(0), true, false, ""},
+ ltTestCase{uint16(65535), true, false, ""},
+
+ ltTestCase{uint32(0), true, false, ""},
+ ltTestCase{uint32(kTwoTo25 + 0), true, false, ""},
+ ltTestCase{uint32(kTwoTo25 + 1), false, false, ""},
+ ltTestCase{uint32(kTwoTo25 + 2), false, false, ""},
+
+ ltTestCase{uint64(0), true, false, ""},
+ ltTestCase{uint64(kTwoTo25 + 0), true, false, ""},
+ ltTestCase{uint64(kTwoTo25 + 1), false, false, ""},
+ ltTestCase{uint64(kTwoTo25 + 2), false, false, ""},
+
+ // Floating point.
+ ltTestCase{float32(-1), true, false, ""},
+ ltTestCase{float32(kTwoTo25 - 2), true, false, ""},
+ ltTestCase{float32(kTwoTo25 - 1), false, false, ""},
+ ltTestCase{float32(kTwoTo25 + 0), false, false, ""},
+ ltTestCase{float32(kTwoTo25 + 1), false, false, ""},
+ ltTestCase{float32(kTwoTo25 + 2), false, false, ""},
+ ltTestCase{float32(kTwoTo25 + 3), false, false, ""},
+
+ ltTestCase{float64(-1), true, false, ""},
+ ltTestCase{float64(kTwoTo25 - 2), true, false, ""},
+ ltTestCase{float64(kTwoTo25 - 1), true, false, ""},
+ ltTestCase{float64(kTwoTo25 + 0), true, false, ""},
+ ltTestCase{float64(kTwoTo25 + 1), false, false, ""},
+ ltTestCase{float64(kTwoTo25 + 2), false, false, ""},
+ ltTestCase{float64(kTwoTo25 + 3), false, false, ""},
+ }
+
+ t.checkTestCases(matcher, cases)
+}
+
+func (t *LessThanTest) Int64NotExactlyRepresentableByDoublePrecision() {
+ // Double-precision floats don't have enough bits to represent the integers
+ // near this one distinctly, so [2^54-1, 2^54+2] all receive the same value
+ // and should be treated as equivalent when floats are in the mix.
+ const kTwoTo54 = 1 << 54
+ matcher := LessThan(int64(kTwoTo54 + 1))
+
+ desc := matcher.Description()
+ expectedDesc := "less than 18014398509481985"
+
+ ExpectThat(desc, Equals(expectedDesc))
+
+ cases := []ltTestCase{
+ // Signed integers.
+ ltTestCase{-1, true, false, ""},
+ ltTestCase{1 << 30, true, false, ""},
+
+ ltTestCase{int(-1), true, false, ""},
+ ltTestCase{int(math.MaxInt32), true, false, ""},
+
+ ltTestCase{int8(-1), true, false, ""},
+ ltTestCase{int8(127), true, false, ""},
+
+ ltTestCase{int16(-1), true, false, ""},
+ ltTestCase{int16(0), true, false, ""},
+ ltTestCase{int16(32767), true, false, ""},
+
+ ltTestCase{int32(-1), true, false, ""},
+ ltTestCase{int32(math.MaxInt32), true, false, ""},
+
+ ltTestCase{int64(-1), true, false, ""},
+ ltTestCase{int64(kTwoTo54 - 1), true, false, ""},
+ ltTestCase{int64(kTwoTo54 + 0), true, false, ""},
+ ltTestCase{int64(kTwoTo54 + 1), false, false, ""},
+ ltTestCase{int64(kTwoTo54 + 2), false, false, ""},
+
+ // Unsigned integers.
+ ltTestCase{uint(0), true, false, ""},
+ ltTestCase{uint(math.MaxUint32), true, false, ""},
+
+ ltTestCase{uint8(0), true, false, ""},
+ ltTestCase{uint8(255), true, false, ""},
+
+ ltTestCase{uint16(0), true, false, ""},
+ ltTestCase{uint16(65535), true, false, ""},
+
+ ltTestCase{uint32(0), true, false, ""},
+ ltTestCase{uint32(math.MaxUint32), true, false, ""},
+
+ ltTestCase{uint64(0), true, false, ""},
+ ltTestCase{uint64(kTwoTo54 - 1), true, false, ""},
+ ltTestCase{uint64(kTwoTo54 + 0), true, false, ""},
+ ltTestCase{uint64(kTwoTo54 + 1), false, false, ""},
+ ltTestCase{uint64(kTwoTo54 + 2), false, false, ""},
+
+ // Floating point.
+ ltTestCase{float64(-1), true, false, ""},
+ ltTestCase{float64(kTwoTo54 - 2), true, false, ""},
+ ltTestCase{float64(kTwoTo54 - 1), false, false, ""},
+ ltTestCase{float64(kTwoTo54 + 0), false, false, ""},
+ ltTestCase{float64(kTwoTo54 + 1), false, false, ""},
+ ltTestCase{float64(kTwoTo54 + 2), false, false, ""},
+ ltTestCase{float64(kTwoTo54 + 3), false, false, ""},
+ }
+
+ t.checkTestCases(matcher, cases)
+}
+
+func (t *LessThanTest) Uint64NotExactlyRepresentableBySinglePrecision() {
+ // Single-precision floats don't have enough bits to represent the integers
+ // near this one distinctly, so [2^25-1, 2^25+2] all receive the same value
+ // and should be treated as equivalent when floats are in the mix.
+ const kTwoTo25 = 1 << 25
+ matcher := LessThan(uint64(kTwoTo25 + 1))
+
+ desc := matcher.Description()
+ expectedDesc := "less than 33554433"
+
+ ExpectThat(desc, Equals(expectedDesc))
+
+ cases := []ltTestCase{
+ // Signed integers.
+ ltTestCase{-1, true, false, ""},
+ ltTestCase{kTwoTo25 + 0, true, false, ""},
+ ltTestCase{kTwoTo25 + 1, false, false, ""},
+ ltTestCase{kTwoTo25 + 2, false, false, ""},
+
+ ltTestCase{int(-1), true, false, ""},
+ ltTestCase{int(kTwoTo25 + 0), true, false, ""},
+ ltTestCase{int(kTwoTo25 + 1), false, false, ""},
+ ltTestCase{int(kTwoTo25 + 2), false, false, ""},
+
+ ltTestCase{int8(-1), true, false, ""},
+ ltTestCase{int8(127), true, false, ""},
+
+ ltTestCase{int16(-1), true, false, ""},
+ ltTestCase{int16(0), true, false, ""},
+ ltTestCase{int16(32767), true, false, ""},
+
+ ltTestCase{int32(-1), true, false, ""},
+ ltTestCase{int32(kTwoTo25 + 0), true, false, ""},
+ ltTestCase{int32(kTwoTo25 + 1), false, false, ""},
+ ltTestCase{int32(kTwoTo25 + 2), false, false, ""},
+
+ ltTestCase{int64(-1), true, false, ""},
+ ltTestCase{int64(kTwoTo25 + 0), true, false, ""},
+ ltTestCase{int64(kTwoTo25 + 1), false, false, ""},
+ ltTestCase{int64(kTwoTo25 + 2), false, false, ""},
+
+ // Unsigned integers.
+ ltTestCase{uint(0), true, false, ""},
+ ltTestCase{uint(kTwoTo25 + 0), true, false, ""},
+ ltTestCase{uint(kTwoTo25 + 1), false, false, ""},
+ ltTestCase{uint(kTwoTo25 + 2), false, false, ""},
+
+ ltTestCase{uint8(0), true, false, ""},
+ ltTestCase{uint8(255), true, false, ""},
+
+ ltTestCase{uint16(0), true, false, ""},
+ ltTestCase{uint16(65535), true, false, ""},
+
+ ltTestCase{uint32(0), true, false, ""},
+ ltTestCase{uint32(kTwoTo25 + 0), true, false, ""},
+ ltTestCase{uint32(kTwoTo25 + 1), false, false, ""},
+ ltTestCase{uint32(kTwoTo25 + 2), false, false, ""},
+
+ ltTestCase{uint64(0), true, false, ""},
+ ltTestCase{uint64(kTwoTo25 + 0), true, false, ""},
+ ltTestCase{uint64(kTwoTo25 + 1), false, false, ""},
+ ltTestCase{uint64(kTwoTo25 + 2), false, false, ""},
+
+ // Floating point.
+ ltTestCase{float32(-1), true, false, ""},
+ ltTestCase{float32(kTwoTo25 - 2), true, false, ""},
+ ltTestCase{float32(kTwoTo25 - 1), false, false, ""},
+ ltTestCase{float32(kTwoTo25 + 0), false, false, ""},
+ ltTestCase{float32(kTwoTo25 + 1), false, false, ""},
+ ltTestCase{float32(kTwoTo25 + 2), false, false, ""},
+ ltTestCase{float32(kTwoTo25 + 3), false, false, ""},
+
+ ltTestCase{float64(-1), true, false, ""},
+ ltTestCase{float64(kTwoTo25 - 2), true, false, ""},
+ ltTestCase{float64(kTwoTo25 - 1), true, false, ""},
+ ltTestCase{float64(kTwoTo25 + 0), true, false, ""},
+ ltTestCase{float64(kTwoTo25 + 1), false, false, ""},
+ ltTestCase{float64(kTwoTo25 + 2), false, false, ""},
+ ltTestCase{float64(kTwoTo25 + 3), false, false, ""},
+ }
+
+ t.checkTestCases(matcher, cases)
+}
+
+func (t *LessThanTest) Uint64NotExactlyRepresentableByDoublePrecision() {
+ // Double-precision floats don't have enough bits to represent the integers
+ // near this one distinctly, so [2^54-1, 2^54+2] all receive the same value
+ // and should be treated as equivalent when floats are in the mix.
+ const kTwoTo54 = 1 << 54
+ matcher := LessThan(uint64(kTwoTo54 + 1))
+
+ desc := matcher.Description()
+ expectedDesc := "less than 18014398509481985"
+
+ ExpectThat(desc, Equals(expectedDesc))
+
+ cases := []ltTestCase{
+ // Signed integers.
+ ltTestCase{-1, true, false, ""},
+ ltTestCase{1 << 30, true, false, ""},
+
+ ltTestCase{int(-1), true, false, ""},
+ ltTestCase{int(math.MaxInt32), true, false, ""},
+
+ ltTestCase{int8(-1), true, false, ""},
+ ltTestCase{int8(127), true, false, ""},
+
+ ltTestCase{int16(-1), true, false, ""},
+ ltTestCase{int16(0), true, false, ""},
+ ltTestCase{int16(32767), true, false, ""},
+
+ ltTestCase{int32(-1), true, false, ""},
+ ltTestCase{int32(math.MaxInt32), true, false, ""},
+
+ ltTestCase{int64(-1), true, false, ""},
+ ltTestCase{int64(kTwoTo54 - 1), true, false, ""},
+ ltTestCase{int64(kTwoTo54 + 0), true, false, ""},
+ ltTestCase{int64(kTwoTo54 + 1), false, false, ""},
+ ltTestCase{int64(kTwoTo54 + 2), false, false, ""},
+
+ // Unsigned integers.
+ ltTestCase{uint(0), true, false, ""},
+ ltTestCase{uint(math.MaxUint32), true, false, ""},
+
+ ltTestCase{uint8(0), true, false, ""},
+ ltTestCase{uint8(255), true, false, ""},
+
+ ltTestCase{uint16(0), true, false, ""},
+ ltTestCase{uint16(65535), true, false, ""},
+
+ ltTestCase{uint32(0), true, false, ""},
+ ltTestCase{uint32(math.MaxUint32), true, false, ""},
+
+ ltTestCase{uint64(0), true, false, ""},
+ ltTestCase{uint64(kTwoTo54 - 1), true, false, ""},
+ ltTestCase{uint64(kTwoTo54 + 0), true, false, ""},
+ ltTestCase{uint64(kTwoTo54 + 1), false, false, ""},
+ ltTestCase{uint64(kTwoTo54 + 2), false, false, ""},
+
+ // Floating point.
+ ltTestCase{float64(-1), true, false, ""},
+ ltTestCase{float64(kTwoTo54 - 2), true, false, ""},
+ ltTestCase{float64(kTwoTo54 - 1), false, false, ""},
+ ltTestCase{float64(kTwoTo54 + 0), false, false, ""},
+ ltTestCase{float64(kTwoTo54 + 1), false, false, ""},
+ ltTestCase{float64(kTwoTo54 + 2), false, false, ""},
+ ltTestCase{float64(kTwoTo54 + 3), false, false, ""},
+ }
+
+ t.checkTestCases(matcher, cases)
+}
+
+func (t *LessThanTest) Float32AboveExactIntegerRange() {
+ // Single-precision floats don't have enough bits to represent the integers
+ // near this one distinctly, so [2^25-1, 2^25+2] all receive the same value
+ // and should be treated as equivalent when floats are in the mix.
+ const kTwoTo25 = 1 << 25
+ matcher := LessThan(float32(kTwoTo25 + 1))
+
+ desc := matcher.Description()
+ expectedDesc := "less than 3.3554432e+07"
+
+ ExpectThat(desc, Equals(expectedDesc))
+
+ cases := []ltTestCase{
+ // Signed integers.
+ ltTestCase{int64(-1), true, false, ""},
+ ltTestCase{int64(kTwoTo25 - 2), true, false, ""},
+ ltTestCase{int64(kTwoTo25 - 1), false, false, ""},
+ ltTestCase{int64(kTwoTo25 + 0), false, false, ""},
+ ltTestCase{int64(kTwoTo25 + 1), false, false, ""},
+ ltTestCase{int64(kTwoTo25 + 2), false, false, ""},
+ ltTestCase{int64(kTwoTo25 + 3), false, false, ""},
+
+ // Unsigned integers.
+ ltTestCase{uint64(0), true, false, ""},
+ ltTestCase{uint64(kTwoTo25 - 2), true, false, ""},
+ ltTestCase{uint64(kTwoTo25 - 1), false, false, ""},
+ ltTestCase{uint64(kTwoTo25 + 0), false, false, ""},
+ ltTestCase{uint64(kTwoTo25 + 1), false, false, ""},
+ ltTestCase{uint64(kTwoTo25 + 2), false, false, ""},
+ ltTestCase{uint64(kTwoTo25 + 3), false, false, ""},
+
+ // Floating point.
+ ltTestCase{float32(-1), true, false, ""},
+ ltTestCase{float32(kTwoTo25 - 2), true, false, ""},
+ ltTestCase{float32(kTwoTo25 - 1), false, false, ""},
+ ltTestCase{float32(kTwoTo25 + 0), false, false, ""},
+ ltTestCase{float32(kTwoTo25 + 1), false, false, ""},
+ ltTestCase{float32(kTwoTo25 + 2), false, false, ""},
+ ltTestCase{float32(kTwoTo25 + 3), false, false, ""},
+
+ ltTestCase{float64(-1), true, false, ""},
+ ltTestCase{float64(kTwoTo25 - 2), true, false, ""},
+ ltTestCase{float64(kTwoTo25 - 1), false, false, ""},
+ ltTestCase{float64(kTwoTo25 + 0), false, false, ""},
+ ltTestCase{float64(kTwoTo25 + 1), false, false, ""},
+ ltTestCase{float64(kTwoTo25 + 2), false, false, ""},
+ ltTestCase{float64(kTwoTo25 + 3), false, false, ""},
+ }
+
+ t.checkTestCases(matcher, cases)
+}
+
+func (t *LessThanTest) Float64AboveExactIntegerRange() {
+ // Double-precision floats don't have enough bits to represent the integers
+ // near this one distinctly, so [2^54-1, 2^54+2] all receive the same value
+ // and should be treated as equivalent when floats are in the mix.
+ const kTwoTo54 = 1 << 54
+ matcher := LessThan(float64(kTwoTo54 + 1))
+
+ desc := matcher.Description()
+ expectedDesc := "less than 1.8014398509481984e+16"
+
+ ExpectThat(desc, Equals(expectedDesc))
+
+ cases := []ltTestCase{
+ // Signed integers.
+ ltTestCase{int64(-1), true, false, ""},
+ ltTestCase{int64(kTwoTo54 - 2), true, false, ""},
+ ltTestCase{int64(kTwoTo54 - 1), false, false, ""},
+ ltTestCase{int64(kTwoTo54 + 0), false, false, ""},
+ ltTestCase{int64(kTwoTo54 + 1), false, false, ""},
+ ltTestCase{int64(kTwoTo54 + 2), false, false, ""},
+ ltTestCase{int64(kTwoTo54 + 3), false, false, ""},
+
+ // Unsigned integers.
+ ltTestCase{uint64(0), true, false, ""},
+ ltTestCase{uint64(kTwoTo54 - 2), true, false, ""},
+ ltTestCase{uint64(kTwoTo54 - 1), false, false, ""},
+ ltTestCase{uint64(kTwoTo54 + 0), false, false, ""},
+ ltTestCase{uint64(kTwoTo54 + 1), false, false, ""},
+ ltTestCase{uint64(kTwoTo54 + 2), false, false, ""},
+ ltTestCase{uint64(kTwoTo54 + 3), false, false, ""},
+
+ // Floating point.
+ ltTestCase{float64(-1), true, false, ""},
+ ltTestCase{float64(kTwoTo54 - 2), true, false, ""},
+ ltTestCase{float64(kTwoTo54 - 1), false, false, ""},
+ ltTestCase{float64(kTwoTo54 + 0), false, false, ""},
+ ltTestCase{float64(kTwoTo54 + 1), false, false, ""},
+ ltTestCase{float64(kTwoTo54 + 2), false, false, ""},
+ ltTestCase{float64(kTwoTo54 + 3), false, false, ""},
+ }
+
+ t.checkTestCases(matcher, cases)
+}
+
+////////////////////////////////////////////////////////////////////////
+// String literals
+////////////////////////////////////////////////////////////////////////
+
+func (t *LessThanTest) EmptyString() {
+ matcher := LessThan("")
+ desc := matcher.Description()
+ expectedDesc := "less than \"\""
+
+ ExpectThat(desc, Equals(expectedDesc))
+
+ cases := []ltTestCase{
+ ltTestCase{"", false, false, ""},
+ ltTestCase{"\x00", false, false, ""},
+ ltTestCase{"a", false, false, ""},
+ ltTestCase{"foo", false, false, ""},
+ }
+
+ t.checkTestCases(matcher, cases)
+}
+
+func (t *LessThanTest) SingleNullByte() {
+ matcher := LessThan("\x00")
+ desc := matcher.Description()
+ expectedDesc := "less than \"\x00\""
+
+ ExpectThat(desc, Equals(expectedDesc))
+
+ cases := []ltTestCase{
+ ltTestCase{"", true, false, ""},
+ ltTestCase{"\x00", false, false, ""},
+ ltTestCase{"a", false, false, ""},
+ ltTestCase{"foo", false, false, ""},
+ }
+
+ t.checkTestCases(matcher, cases)
+}
+
+func (t *LessThanTest) LongerString() {
+ matcher := LessThan("foo\x00")
+ desc := matcher.Description()
+ expectedDesc := "less than \"foo\x00\""
+
+ ExpectThat(desc, Equals(expectedDesc))
+
+ cases := []ltTestCase{
+ ltTestCase{"", true, false, ""},
+ ltTestCase{"\x00", true, false, ""},
+ ltTestCase{"bar", true, false, ""},
+ ltTestCase{"foo", true, false, ""},
+ ltTestCase{"foo\x00", false, false, ""},
+ ltTestCase{"fooa", false, false, ""},
+ ltTestCase{"qux", false, false, ""},
+ }
+
+ t.checkTestCases(matcher, cases)
+}
diff --git a/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/oglematchers/matcher.go b/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/oglematchers/matcher.go
new file mode 100644
index 00000000000..78159a0727c
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/oglematchers/matcher.go
@@ -0,0 +1,86 @@
+// Copyright 2011 Aaron Jacobs. All Rights Reserved.
+// Author: aaronjjacobs@gmail.com (Aaron Jacobs)
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+// Package oglematchers provides a set of matchers useful in a testing or
+// mocking framework. These matchers are inspired by and mostly compatible with
+// Google Test for C++ and Google JS Test.
+//
+// This package is used by github.com/smartystreets/assertions/internal/ogletest and
+// github.com/smartystreets/assertions/internal/oglemock, which may be more directly useful if you're not
+// writing your own testing package or defining your own matchers.
+package oglematchers
+
+// A Matcher is some predicate implicitly defining a set of values that it
+// matches. For example, GreaterThan(17) matches all numeric values greater
+// than 17, and HasSubstr("taco") matches all strings with the substring
+// "taco".
+//
+// Matchers are typically exposed to tests via constructor functions like
+// HasSubstr. In order to implement such a function you can either define your
+// own matcher type or use NewMatcher.
+type Matcher interface {
+ // Check whether the supplied value belongs to the the set defined by the
+ // matcher. Return a non-nil error if and only if it does not.
+ //
+ // The error describes why the value doesn't match. The error text is a
+ // relative clause that is suitable for being placed after the value. For
+ // example, a predicate that matches strings with a particular substring may,
+ // when presented with a numerical value, return the following error text:
+ //
+ // "which is not a string"
+ //
+ // Then the failure message may look like:
+ //
+ // Expected: has substring "taco"
+ // Actual: 17, which is not a string
+ //
+ // If the error is self-apparent based on the description of the matcher, the
+ // error text may be empty (but the error still non-nil). For example:
+ //
+ // Expected: 17
+ // Actual: 19
+ //
+ // If you are implementing a new matcher, see also the documentation on
+ // FatalError.
+ Matches(candidate interface{}) error
+
+ // Description returns a string describing the property that values matching
+ // this matcher have, as a verb phrase where the subject is the value. For
+ // example, "is greather than 17" or "has substring "taco"".
+ Description() string
+}
+
+// FatalError is an implementation of the error interface that may be returned
+// from matchers, indicating the error should be propagated. Returning a
+// *FatalError indicates that the matcher doesn't process values of the
+// supplied type, or otherwise doesn't know how to handle the value.
+//
+// For example, if GreaterThan(17) returned false for the value "taco" without
+// a fatal error, then Not(GreaterThan(17)) would return true. This is
+// technically correct, but is surprising and may mask failures where the wrong
+// sort of matcher is accidentally used. Instead, GreaterThan(17) can return a
+// fatal error, which will be propagated by Not().
+type FatalError struct {
+ errorText string
+}
+
+// NewFatalError creates a FatalError struct with the supplied error text.
+func NewFatalError(s string) *FatalError {
+ return &FatalError{s}
+}
+
+func (e *FatalError) Error() string {
+ return e.errorText
+}
diff --git a/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/oglematchers/matches_regexp.go b/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/oglematchers/matches_regexp.go
new file mode 100644
index 00000000000..1ed63f30c4e
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/oglematchers/matches_regexp.go
@@ -0,0 +1,69 @@
+// Copyright 2011 Aaron Jacobs. All Rights Reserved.
+// Author: aaronjjacobs@gmail.com (Aaron Jacobs)
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package oglematchers
+
+import (
+ "errors"
+ "fmt"
+ "reflect"
+ "regexp"
+)
+
+// MatchesRegexp returns a matcher that matches strings and byte slices whose
+// contents match the supplied regular expression. The semantics are those of
+// regexp.Match. In particular, that means the match is not implicitly anchored
+// to the ends of the string: MatchesRegexp("bar") will match "foo bar baz".
+func MatchesRegexp(pattern string) Matcher {
+ re, err := regexp.Compile(pattern)
+ if err != nil {
+ panic("MatchesRegexp: " + err.Error())
+ }
+
+ return &matchesRegexpMatcher{re}
+}
+
+type matchesRegexpMatcher struct {
+ re *regexp.Regexp
+}
+
+func (m *matchesRegexpMatcher) Description() string {
+ return fmt.Sprintf("matches regexp \"%s\"", m.re.String())
+}
+
+func (m *matchesRegexpMatcher) Matches(c interface{}) (err error) {
+ v := reflect.ValueOf(c)
+ isString := v.Kind() == reflect.String
+ isByteSlice := v.Kind() == reflect.Slice && v.Elem().Kind() == reflect.Uint8
+
+ err = errors.New("")
+
+ switch {
+ case isString:
+ if m.re.MatchString(v.String()) {
+ err = nil
+ }
+
+ case isByteSlice:
+ if m.re.Match(v.Bytes()) {
+ err = nil
+ }
+
+ default:
+ err = NewFatalError("which is not a string or []byte")
+ }
+
+ return
+}
diff --git a/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/oglematchers/matches_regexp_test.go b/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/oglematchers/matches_regexp_test.go
new file mode 100644
index 00000000000..031c6cb3eff
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/oglematchers/matches_regexp_test.go
@@ -0,0 +1,92 @@
+// Copyright 2011 Aaron Jacobs. All Rights Reserved.
+// Author: aaronjjacobs@gmail.com (Aaron Jacobs)
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package oglematchers_test
+
+import (
+ . "github.com/smartystreets/assertions/internal/oglematchers"
+ . "github.com/smartystreets/assertions/internal/ogletest"
+)
+
+////////////////////////////////////////////////////////////////////////
+// Helpers
+////////////////////////////////////////////////////////////////////////
+
+type MatchesRegexpTest struct {
+}
+
+func init() { RegisterTestSuite(&MatchesRegexpTest{}) }
+
+////////////////////////////////////////////////////////////////////////
+// Tests
+////////////////////////////////////////////////////////////////////////
+
+func (t *MatchesRegexpTest) Description() {
+ m := MatchesRegexp("foo.*bar")
+ ExpectEq("matches regexp \"foo.*bar\"", m.Description())
+}
+
+func (t *MatchesRegexpTest) InvalidRegexp() {
+ ExpectThat(
+ func() { MatchesRegexp("(foo") },
+ Panics(HasSubstr("missing closing )")))
+}
+
+func (t *MatchesRegexpTest) CandidateIsNil() {
+ m := MatchesRegexp("")
+ err := m.Matches(nil)
+
+ ExpectThat(err, Error(Equals("which is not a string or []byte")))
+ ExpectTrue(isFatal(err))
+}
+
+func (t *MatchesRegexpTest) CandidateIsInteger() {
+ m := MatchesRegexp("")
+ err := m.Matches(17)
+
+ ExpectThat(err, Error(Equals("which is not a string or []byte")))
+ ExpectTrue(isFatal(err))
+}
+
+func (t *MatchesRegexpTest) NonMatchingCandidates() {
+ m := MatchesRegexp("fo[op]\\s+x")
+ var err error
+
+ err = m.Matches("fon x")
+ ExpectThat(err, Error(Equals("")))
+ ExpectFalse(isFatal(err))
+
+ err = m.Matches("fopx")
+ ExpectThat(err, Error(Equals("")))
+ ExpectFalse(isFatal(err))
+
+ err = m.Matches("fop ")
+ ExpectThat(err, Error(Equals("")))
+ ExpectFalse(isFatal(err))
+}
+
+func (t *MatchesRegexpTest) MatchingCandidates() {
+ m := MatchesRegexp("fo[op]\\s+x")
+ var err error
+
+ err = m.Matches("foo x")
+ ExpectEq(nil, err)
+
+ err = m.Matches("fop x")
+ ExpectEq(nil, err)
+
+ err = m.Matches("blah blah foo x blah blah")
+ ExpectEq(nil, err)
+}
diff --git a/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/oglematchers/new_matcher.go b/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/oglematchers/new_matcher.go
new file mode 100644
index 00000000000..c9d8398ee63
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/oglematchers/new_matcher.go
@@ -0,0 +1,43 @@
+// Copyright 2015 Aaron Jacobs. All Rights Reserved.
+// Author: aaronjjacobs@gmail.com (Aaron Jacobs)
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package oglematchers
+
+// Create a matcher with the given description and predicate function, which
+// will be invoked to handle calls to Matchers.
+//
+// Using this constructor may be a convenience over defining your own type that
+// implements Matcher if you do not need any logic in your Description method.
+func NewMatcher(
+ predicate func(interface{}) error,
+ description string) Matcher {
+ return &predicateMatcher{
+ predicate: predicate,
+ description: description,
+ }
+}
+
+type predicateMatcher struct {
+ predicate func(interface{}) error
+ description string
+}
+
+func (pm *predicateMatcher) Matches(c interface{}) error {
+ return pm.predicate(c)
+}
+
+func (pm *predicateMatcher) Description() string {
+ return pm.description
+}
diff --git a/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/oglematchers/not.go b/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/oglematchers/not.go
new file mode 100644
index 00000000000..623789fe28a
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/oglematchers/not.go
@@ -0,0 +1,53 @@
+// Copyright 2011 Aaron Jacobs. All Rights Reserved.
+// Author: aaronjjacobs@gmail.com (Aaron Jacobs)
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package oglematchers
+
+import (
+ "errors"
+ "fmt"
+)
+
+// Not returns a matcher that inverts the set of values matched by the wrapped
+// matcher. It does not transform the result for values for which the wrapped
+// matcher returns a fatal error.
+func Not(m Matcher) Matcher {
+ return &notMatcher{m}
+}
+
+type notMatcher struct {
+ wrapped Matcher
+}
+
+func (m *notMatcher) Matches(c interface{}) (err error) {
+ err = m.wrapped.Matches(c)
+
+ // Did the wrapped matcher say yes?
+ if err == nil {
+ return errors.New("")
+ }
+
+ // Did the wrapped matcher return a fatal error?
+ if _, isFatal := err.(*FatalError); isFatal {
+ return err
+ }
+
+ // The wrapped matcher returned a non-fatal error.
+ return nil
+}
+
+func (m *notMatcher) Description() string {
+ return fmt.Sprintf("not(%s)", m.wrapped.Description())
+}
diff --git a/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/oglematchers/not_test.go b/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/oglematchers/not_test.go
new file mode 100644
index 00000000000..9c65b85ef87
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/oglematchers/not_test.go
@@ -0,0 +1,108 @@
+// Copyright 2011 Aaron Jacobs. All Rights Reserved.
+// Author: aaronjjacobs@gmail.com (Aaron Jacobs)
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package oglematchers_test
+
+import (
+ "errors"
+ . "github.com/smartystreets/assertions/internal/oglematchers"
+ . "github.com/smartystreets/assertions/internal/ogletest"
+ "testing"
+)
+
+////////////////////////////////////////////////////////////////////////
+// Helpers
+////////////////////////////////////////////////////////////////////////
+
+type fakeMatcher struct {
+ matchFunc func(interface{}) error
+ description string
+}
+
+func (m *fakeMatcher) Matches(c interface{}) error {
+ return m.matchFunc(c)
+}
+
+func (m *fakeMatcher) Description() string {
+ return m.description
+}
+
+type NotTest struct {
+
+}
+
+func init() { RegisterTestSuite(&NotTest{}) }
+func TestOgletest(t *testing.T) { RunTests(t) }
+
+////////////////////////////////////////////////////////////////////////
+// Tests
+////////////////////////////////////////////////////////////////////////
+
+func (t *NotTest) CallsWrapped() {
+ var suppliedCandidate interface{}
+ matchFunc := func(c interface{}) error {
+ suppliedCandidate = c
+ return nil
+ }
+
+ wrapped := &fakeMatcher{matchFunc, ""}
+ matcher := Not(wrapped)
+
+ matcher.Matches(17)
+ ExpectThat(suppliedCandidate, Equals(17))
+}
+
+func (t *NotTest) WrappedReturnsTrue() {
+ matchFunc := func(c interface{}) error {
+ return nil
+ }
+
+ wrapped := &fakeMatcher{matchFunc, ""}
+ matcher := Not(wrapped)
+
+ err := matcher.Matches(0)
+ ExpectThat(err, Error(Equals("")))
+}
+
+func (t *NotTest) WrappedReturnsNonFatalError() {
+ matchFunc := func(c interface{}) error {
+ return errors.New("taco")
+ }
+
+ wrapped := &fakeMatcher{matchFunc, ""}
+ matcher := Not(wrapped)
+
+ err := matcher.Matches(0)
+ ExpectEq(nil, err)
+}
+
+func (t *NotTest) WrappedReturnsFatalError() {
+ matchFunc := func(c interface{}) error {
+ return NewFatalError("taco")
+ }
+
+ wrapped := &fakeMatcher{matchFunc, ""}
+ matcher := Not(wrapped)
+
+ err := matcher.Matches(0)
+ ExpectThat(err, Error(Equals("taco")))
+}
+
+func (t *NotTest) Description() {
+ wrapped := &fakeMatcher{nil, "taco"}
+ matcher := Not(wrapped)
+
+ ExpectEq("not(taco)", matcher.Description())
+}
diff --git a/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/oglematchers/panics.go b/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/oglematchers/panics.go
new file mode 100644
index 00000000000..d2cfc97869b
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/oglematchers/panics.go
@@ -0,0 +1,74 @@
+// Copyright 2011 Aaron Jacobs. All Rights Reserved.
+// Author: aaronjjacobs@gmail.com (Aaron Jacobs)
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package oglematchers
+
+import (
+ "errors"
+ "fmt"
+ "reflect"
+)
+
+// Panics matches zero-arg functions which, when invoked, panic with an error
+// that matches the supplied matcher.
+//
+// NOTE(jacobsa): This matcher cannot detect the case where the function panics
+// using panic(nil), by design of the language. See here for more info:
+//
+// http://goo.gl/9aIQL
+//
+func Panics(m Matcher) Matcher {
+ return &panicsMatcher{m}
+}
+
+type panicsMatcher struct {
+ wrappedMatcher Matcher
+}
+
+func (m *panicsMatcher) Description() string {
+ return "panics with: " + m.wrappedMatcher.Description()
+}
+
+func (m *panicsMatcher) Matches(c interface{}) (err error) {
+ // Make sure c is a zero-arg function.
+ v := reflect.ValueOf(c)
+ if v.Kind() != reflect.Func || v.Type().NumIn() != 0 {
+ err = NewFatalError("which is not a zero-arg function")
+ return
+ }
+
+ // Call the function and check its panic error.
+ defer func() {
+ if e := recover(); e != nil {
+ err = m.wrappedMatcher.Matches(e)
+
+ // Set a clearer error message if the matcher said no.
+ if err != nil {
+ wrappedClause := ""
+ if err.Error() != "" {
+ wrappedClause = ", " + err.Error()
+ }
+
+ err = errors.New(fmt.Sprintf("which panicked with: %v%s", e, wrappedClause))
+ }
+ }
+ }()
+
+ v.Call([]reflect.Value{})
+
+ // If we get here, the function didn't panic.
+ err = errors.New("which didn't panic")
+ return
+}
diff --git a/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/oglematchers/panics_test.go b/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/oglematchers/panics_test.go
new file mode 100644
index 00000000000..fbb66bf31e2
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/oglematchers/panics_test.go
@@ -0,0 +1,141 @@
+// Copyright 2011 Aaron Jacobs. All Rights Reserved.
+// Author: aaronjjacobs@gmail.com (Aaron Jacobs)
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package oglematchers_test
+
+import (
+ "errors"
+ . "github.com/smartystreets/assertions/internal/oglematchers"
+ . "github.com/smartystreets/assertions/internal/ogletest"
+)
+
+////////////////////////////////////////////////////////////////////////
+// Helpers
+////////////////////////////////////////////////////////////////////////
+
+type PanicsTest struct {
+ matcherCalled bool
+ suppliedCandidate interface{}
+ wrappedError error
+
+ matcher Matcher
+}
+
+func init() { RegisterTestSuite(&PanicsTest{}) }
+
+func (t *PanicsTest) SetUp(i *TestInfo) {
+ wrapped := &fakeMatcher{
+ func(c interface{}) error {
+ t.matcherCalled = true
+ t.suppliedCandidate = c
+ return t.wrappedError
+ },
+ "foo",
+ }
+
+ t.matcher = Panics(wrapped)
+}
+
+////////////////////////////////////////////////////////////////////////
+// Tests
+////////////////////////////////////////////////////////////////////////
+
+func (t *PanicsTest) Description() {
+ ExpectThat(t.matcher.Description(), Equals("panics with: foo"))
+}
+
+func (t *PanicsTest) CandidateIsNil() {
+ err := t.matcher.Matches(nil)
+
+ ExpectThat(err, Error(Equals("which is not a zero-arg function")))
+ ExpectTrue(isFatal(err))
+}
+
+func (t *PanicsTest) CandidateIsString() {
+ err := t.matcher.Matches("taco")
+
+ ExpectThat(err, Error(Equals("which is not a zero-arg function")))
+ ExpectTrue(isFatal(err))
+}
+
+func (t *PanicsTest) CandidateTakesArgs() {
+ err := t.matcher.Matches(func(i int) string { return "" })
+
+ ExpectThat(err, Error(Equals("which is not a zero-arg function")))
+ ExpectTrue(isFatal(err))
+}
+
+func (t *PanicsTest) CallsFunction() {
+ callCount := 0
+ t.matcher.Matches(func() string {
+ callCount++
+ return ""
+ })
+
+ ExpectThat(callCount, Equals(1))
+}
+
+func (t *PanicsTest) FunctionDoesntPanic() {
+ err := t.matcher.Matches(func() {})
+
+ ExpectThat(err, Error(Equals("which didn't panic")))
+ ExpectFalse(isFatal(err))
+}
+
+func (t *PanicsTest) CallsWrappedMatcher() {
+ expectedErr := 17
+ t.wrappedError = errors.New("")
+ t.matcher.Matches(func() { panic(expectedErr) })
+
+ ExpectThat(t.suppliedCandidate, Equals(expectedErr))
+}
+
+func (t *PanicsTest) WrappedReturnsTrue() {
+ err := t.matcher.Matches(func() { panic("") })
+
+ ExpectEq(nil, err)
+}
+
+func (t *PanicsTest) WrappedReturnsFatalErrorWithoutText() {
+ t.wrappedError = NewFatalError("")
+ err := t.matcher.Matches(func() { panic(17) })
+
+ ExpectThat(err, Error(Equals("which panicked with: 17")))
+ ExpectFalse(isFatal(err))
+}
+
+func (t *PanicsTest) WrappedReturnsFatalErrorWithText() {
+ t.wrappedError = NewFatalError("which blah")
+ err := t.matcher.Matches(func() { panic(17) })
+
+ ExpectThat(err, Error(Equals("which panicked with: 17, which blah")))
+ ExpectFalse(isFatal(err))
+}
+
+func (t *PanicsTest) WrappedReturnsNonFatalErrorWithoutText() {
+ t.wrappedError = errors.New("")
+ err := t.matcher.Matches(func() { panic(17) })
+
+ ExpectThat(err, Error(Equals("which panicked with: 17")))
+ ExpectFalse(isFatal(err))
+}
+
+func (t *PanicsTest) WrappedReturnsNonFatalErrorWithText() {
+ t.wrappedError = errors.New("which blah")
+ err := t.matcher.Matches(func() { panic(17) })
+
+ ExpectThat(err, Error(Equals("which panicked with: 17, which blah")))
+ ExpectFalse(isFatal(err))
+}
diff --git a/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/oglematchers/pointee.go b/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/oglematchers/pointee.go
new file mode 100644
index 00000000000..c5383f2402f
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/oglematchers/pointee.go
@@ -0,0 +1,65 @@
+// Copyright 2012 Aaron Jacobs. All Rights Reserved.
+// Author: aaronjjacobs@gmail.com (Aaron Jacobs)
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package oglematchers
+
+import (
+ "errors"
+ "fmt"
+ "reflect"
+)
+
+// Return a matcher that matches non-nil pointers whose pointee matches the
+// wrapped matcher.
+func Pointee(m Matcher) Matcher {
+ return &pointeeMatcher{m}
+}
+
+type pointeeMatcher struct {
+ wrapped Matcher
+}
+
+func (m *pointeeMatcher) Matches(c interface{}) (err error) {
+ // Make sure the candidate is of the appropriate type.
+ cv := reflect.ValueOf(c)
+ if !cv.IsValid() || cv.Kind() != reflect.Ptr {
+ return NewFatalError("which is not a pointer")
+ }
+
+ // Make sure the candidate is non-nil.
+ if cv.IsNil() {
+ return NewFatalError("")
+ }
+
+ // Defer to the wrapped matcher. Fix up empty errors so that failure messages
+ // are more helpful than just printing a pointer for "Actual".
+ pointee := cv.Elem().Interface()
+ err = m.wrapped.Matches(pointee)
+ if err != nil && err.Error() == "" {
+ s := fmt.Sprintf("whose pointee is %v", pointee)
+
+ if _, ok := err.(*FatalError); ok {
+ err = NewFatalError(s)
+ } else {
+ err = errors.New(s)
+ }
+ }
+
+ return err
+}
+
+func (m *pointeeMatcher) Description() string {
+ return fmt.Sprintf("pointee(%s)", m.wrapped.Description())
+}
diff --git a/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/oglematchers/pointee_test.go b/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/oglematchers/pointee_test.go
new file mode 100644
index 00000000000..3bb72a702be
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/oglematchers/pointee_test.go
@@ -0,0 +1,152 @@
+// Copyright 2012 Aaron Jacobs. All Rights Reserved.
+// Author: aaronjjacobs@gmail.com (Aaron Jacobs)
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package oglematchers_test
+
+import (
+ "errors"
+ . "github.com/smartystreets/assertions/internal/oglematchers"
+ . "github.com/smartystreets/assertions/internal/ogletest"
+ "testing"
+)
+
+////////////////////////////////////////////////////////////////////////
+// Helpers
+////////////////////////////////////////////////////////////////////////
+
+type PointeeTest struct {}
+func init() { RegisterTestSuite(&PointeeTest{}) }
+
+func TestPointee(t *testing.T) { RunTests(t) }
+
+////////////////////////////////////////////////////////////////////////
+// Tests
+////////////////////////////////////////////////////////////////////////
+
+func (t *PointeeTest) Description() {
+ wrapped := &fakeMatcher{nil, "taco"}
+ matcher := Pointee(wrapped)
+
+ ExpectEq("pointee(taco)", matcher.Description())
+}
+
+func (t *PointeeTest) CandidateIsNotAPointer() {
+ matcher := Pointee(HasSubstr(""))
+ err := matcher.Matches([]byte{})
+
+ ExpectThat(err, Error(Equals("which is not a pointer")))
+ ExpectTrue(isFatal(err))
+}
+
+func (t *PointeeTest) CandidateIsANilLiteral() {
+ matcher := Pointee(HasSubstr(""))
+ err := matcher.Matches(nil)
+
+ ExpectThat(err, Error(Equals("which is not a pointer")))
+ ExpectTrue(isFatal(err))
+}
+
+func (t *PointeeTest) CandidateIsANilPointer() {
+ matcher := Pointee(HasSubstr(""))
+ err := matcher.Matches((*int)(nil))
+
+ ExpectThat(err, Error(Equals("")))
+ ExpectTrue(isFatal(err))
+}
+
+func (t *PointeeTest) CallsWrapped() {
+ var suppliedCandidate interface{}
+ matchFunc := func(c interface{}) error {
+ suppliedCandidate = c
+ return nil
+ }
+
+ wrapped := &fakeMatcher{matchFunc, ""}
+ matcher := Pointee(wrapped)
+
+ someSlice := []byte{}
+ matcher.Matches(&someSlice)
+ ExpectThat(suppliedCandidate, IdenticalTo(someSlice))
+}
+
+func (t *PointeeTest) WrappedReturnsOkay() {
+ matchFunc := func(c interface{}) error {
+ return nil
+ }
+
+ wrapped := &fakeMatcher{matchFunc, ""}
+ matcher := Pointee(wrapped)
+
+ err := matcher.Matches(new(int))
+ ExpectEq(nil, err)
+}
+
+func (t *PointeeTest) WrappedReturnsNonFatalNonEmptyError() {
+ matchFunc := func(c interface{}) error {
+ return errors.New("taco")
+ }
+
+ wrapped := &fakeMatcher{matchFunc, ""}
+ matcher := Pointee(wrapped)
+
+ i := 17
+ err := matcher.Matches(&i)
+ ExpectFalse(isFatal(err))
+ ExpectThat(err, Error(Equals("taco")))
+}
+
+func (t *PointeeTest) WrappedReturnsNonFatalEmptyError() {
+ matchFunc := func(c interface{}) error {
+ return errors.New("")
+ }
+
+ wrapped := &fakeMatcher{matchFunc, ""}
+ matcher := Pointee(wrapped)
+
+ i := 17
+ err := matcher.Matches(&i)
+ ExpectFalse(isFatal(err))
+ ExpectThat(err, Error(HasSubstr("whose pointee")))
+ ExpectThat(err, Error(HasSubstr("17")))
+}
+
+func (t *PointeeTest) WrappedReturnsFatalNonEmptyError() {
+ matchFunc := func(c interface{}) error {
+ return NewFatalError("taco")
+ }
+
+ wrapped := &fakeMatcher{matchFunc, ""}
+ matcher := Pointee(wrapped)
+
+ i := 17
+ err := matcher.Matches(&i)
+ ExpectTrue(isFatal(err))
+ ExpectThat(err, Error(Equals("taco")))
+}
+
+func (t *PointeeTest) WrappedReturnsFatalEmptyError() {
+ matchFunc := func(c interface{}) error {
+ return NewFatalError("")
+ }
+
+ wrapped := &fakeMatcher{matchFunc, ""}
+ matcher := Pointee(wrapped)
+
+ i := 17
+ err := matcher.Matches(&i)
+ ExpectTrue(isFatal(err))
+ ExpectThat(err, Error(HasSubstr("whose pointee")))
+ ExpectThat(err, Error(HasSubstr("17")))
+}
diff --git a/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/oglematchers/transform_description.go b/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/oglematchers/transform_description.go
new file mode 100644
index 00000000000..f79d0c03db1
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/oglematchers/transform_description.go
@@ -0,0 +1,36 @@
+// Copyright 2011 Aaron Jacobs. All Rights Reserved.
+// Author: aaronjjacobs@gmail.com (Aaron Jacobs)
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package oglematchers
+
+// transformDescription returns a matcher that is equivalent to the supplied
+// one, except that it has the supplied description instead of the one attached
+// to the existing matcher.
+func transformDescription(m Matcher, newDesc string) Matcher {
+ return &transformDescriptionMatcher{newDesc, m}
+}
+
+type transformDescriptionMatcher struct {
+ desc string
+ wrappedMatcher Matcher
+}
+
+func (m *transformDescriptionMatcher) Description() string {
+ return m.desc
+}
+
+func (m *transformDescriptionMatcher) Matches(c interface{}) error {
+ return m.wrappedMatcher.Matches(c)
+}
diff --git a/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/oglemock/.gitignore b/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/oglemock/.gitignore
new file mode 100644
index 00000000000..dd8fc7468f4
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/oglemock/.gitignore
@@ -0,0 +1,5 @@
+*.6
+6.out
+_obj/
+_test/
+_testmain.go
diff --git a/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/oglemock/.travis.yml b/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/oglemock/.travis.yml
new file mode 100644
index 00000000000..b97211926e8
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/oglemock/.travis.yml
@@ -0,0 +1,4 @@
+# Cf. http://docs.travis-ci.com/user/getting-started/
+# Cf. http://docs.travis-ci.com/user/languages/go/
+
+language: go
diff --git a/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/oglemock/LICENSE b/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/oglemock/LICENSE
new file mode 100644
index 00000000000..d6456956733
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/oglemock/LICENSE
@@ -0,0 +1,202 @@
+
+ Apache License
+ Version 2.0, January 2004
+ http://www.apache.org/licenses/
+
+ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+ 1. Definitions.
+
+ "License" shall mean the terms and conditions for use, reproduction,
+ and distribution as defined by Sections 1 through 9 of this document.
+
+ "Licensor" shall mean the copyright owner or entity authorized by
+ the copyright owner that is granting the License.
+
+ "Legal Entity" shall mean the union of the acting entity and all
+ other entities that control, are controlled by, or are under common
+ control with that entity. For the purposes of this definition,
+ "control" means (i) the power, direct or indirect, to cause the
+ direction or management of such entity, whether by contract or
+ otherwise, or (ii) ownership of fifty percent (50%) or more of the
+ outstanding shares, or (iii) beneficial ownership of such entity.
+
+ "You" (or "Your") shall mean an individual or Legal Entity
+ exercising permissions granted by this License.
+
+ "Source" form shall mean the preferred form for making modifications,
+ including but not limited to software source code, documentation
+ source, and configuration files.
+
+ "Object" form shall mean any form resulting from mechanical
+ transformation or translation of a Source form, including but
+ not limited to compiled object code, generated documentation,
+ and conversions to other media types.
+
+ "Work" shall mean the work of authorship, whether in Source or
+ Object form, made available under the License, as indicated by a
+ copyright notice that is included in or attached to the work
+ (an example is provided in the Appendix below).
+
+ "Derivative Works" shall mean any work, whether in Source or Object
+ form, that is based on (or derived from) the Work and for which the
+ editorial revisions, annotations, elaborations, or other modifications
+ represent, as a whole, an original work of authorship. For the purposes
+ of this License, Derivative Works shall not include works that remain
+ separable from, or merely link (or bind by name) to the interfaces of,
+ the Work and Derivative Works thereof.
+
+ "Contribution" shall mean any work of authorship, including
+ the original version of the Work and any modifications or additions
+ to that Work or Derivative Works thereof, that is intentionally
+ submitted to Licensor for inclusion in the Work by the copyright owner
+ or by an individual or Legal Entity authorized to submit on behalf of
+ the copyright owner. For the purposes of this definition, "submitted"
+ means any form of electronic, verbal, or written communication sent
+ to the Licensor or its representatives, including but not limited to
+ communication on electronic mailing lists, source code control systems,
+ and issue tracking systems that are managed by, or on behalf of, the
+ Licensor for the purpose of discussing and improving the Work, but
+ excluding communication that is conspicuously marked or otherwise
+ designated in writing by the copyright owner as "Not a Contribution."
+
+ "Contributor" shall mean Licensor and any individual or Legal Entity
+ on behalf of whom a Contribution has been received by Licensor and
+ subsequently incorporated within the Work.
+
+ 2. Grant of Copyright License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ copyright license to reproduce, prepare Derivative Works of,
+ publicly display, publicly perform, sublicense, and distribute the
+ Work and such Derivative Works in Source or Object form.
+
+ 3. Grant of Patent License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ (except as stated in this section) patent license to make, have made,
+ use, offer to sell, sell, import, and otherwise transfer the Work,
+ where such license applies only to those patent claims licensable
+ by such Contributor that are necessarily infringed by their
+ Contribution(s) alone or by combination of their Contribution(s)
+ with the Work to which such Contribution(s) was submitted. If You
+ institute patent litigation against any entity (including a
+ cross-claim or counterclaim in a lawsuit) alleging that the Work
+ or a Contribution incorporated within the Work constitutes direct
+ or contributory patent infringement, then any patent licenses
+ granted to You under this License for that Work shall terminate
+ as of the date such litigation is filed.
+
+ 4. Redistribution. You may reproduce and distribute copies of the
+ Work or Derivative Works thereof in any medium, with or without
+ modifications, and in Source or Object form, provided that You
+ meet the following conditions:
+
+ (a) You must give any other recipients of the Work or
+ Derivative Works a copy of this License; and
+
+ (b) You must cause any modified files to carry prominent notices
+ stating that You changed the files; and
+
+ (c) You must retain, in the Source form of any Derivative Works
+ that You distribute, all copyright, patent, trademark, and
+ attribution notices from the Source form of the Work,
+ excluding those notices that do not pertain to any part of
+ the Derivative Works; and
+
+ (d) If the Work includes a "NOTICE" text file as part of its
+ distribution, then any Derivative Works that You distribute must
+ include a readable copy of the attribution notices contained
+ within such NOTICE file, excluding those notices that do not
+ pertain to any part of the Derivative Works, in at least one
+ of the following places: within a NOTICE text file distributed
+ as part of the Derivative Works; within the Source form or
+ documentation, if provided along with the Derivative Works; or,
+ within a display generated by the Derivative Works, if and
+ wherever such third-party notices normally appear. The contents
+ of the NOTICE file are for informational purposes only and
+ do not modify the License. You may add Your own attribution
+ notices within Derivative Works that You distribute, alongside
+ or as an addendum to the NOTICE text from the Work, provided
+ that such additional attribution notices cannot be construed
+ as modifying the License.
+
+ You may add Your own copyright statement to Your modifications and
+ may provide additional or different license terms and conditions
+ for use, reproduction, or distribution of Your modifications, or
+ for any such Derivative Works as a whole, provided Your use,
+ reproduction, and distribution of the Work otherwise complies with
+ the conditions stated in this License.
+
+ 5. Submission of Contributions. Unless You explicitly state otherwise,
+ any Contribution intentionally submitted for inclusion in the Work
+ by You to the Licensor shall be under the terms and conditions of
+ this License, without any additional terms or conditions.
+ Notwithstanding the above, nothing herein shall supersede or modify
+ the terms of any separate license agreement you may have executed
+ with Licensor regarding such Contributions.
+
+ 6. Trademarks. This License does not grant permission to use the trade
+ names, trademarks, service marks, or product names of the Licensor,
+ except as required for reasonable and customary use in describing the
+ origin of the Work and reproducing the content of the NOTICE file.
+
+ 7. Disclaimer of Warranty. Unless required by applicable law or
+ agreed to in writing, Licensor provides the Work (and each
+ Contributor provides its Contributions) on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ implied, including, without limitation, any warranties or conditions
+ of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+ PARTICULAR PURPOSE. You are solely responsible for determining the
+ appropriateness of using or redistributing the Work and assume any
+ risks associated with Your exercise of permissions under this License.
+
+ 8. Limitation of Liability. In no event and under no legal theory,
+ whether in tort (including negligence), contract, or otherwise,
+ unless required by applicable law (such as deliberate and grossly
+ negligent acts) or agreed to in writing, shall any Contributor be
+ liable to You for damages, including any direct, indirect, special,
+ incidental, or consequential damages of any character arising as a
+ result of this License or out of the use or inability to use the
+ Work (including but not limited to damages for loss of goodwill,
+ work stoppage, computer failure or malfunction, or any and all
+ other commercial damages or losses), even if such Contributor
+ has been advised of the possibility of such damages.
+
+ 9. Accepting Warranty or Additional Liability. While redistributing
+ the Work or Derivative Works thereof, You may choose to offer,
+ and charge a fee for, acceptance of support, warranty, indemnity,
+ or other liability obligations and/or rights consistent with this
+ License. However, in accepting such obligations, You may act only
+ on Your own behalf and on Your sole responsibility, not on behalf
+ of any other Contributor, and only if You agree to indemnify,
+ defend, and hold each Contributor harmless for any liability
+ incurred by, or claims asserted against, such Contributor by reason
+ of your accepting any such warranty or additional liability.
+
+ END OF TERMS AND CONDITIONS
+
+ APPENDIX: How to apply the Apache License to your work.
+
+ To apply the Apache License to your work, attach the following
+ boilerplate notice, with the fields enclosed by brackets "[]"
+ replaced with your own identifying information. (Don't include
+ the brackets!) The text should be enclosed in the appropriate
+ comment syntax for the file format. We also recommend that a
+ file or class name and description of purpose be included on the
+ same "printed page" as the copyright notice for easier
+ identification within third-party archives.
+
+ Copyright [yyyy] [name of copyright owner]
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
diff --git a/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/oglemock/README.md b/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/oglemock/README.md
new file mode 100644
index 00000000000..c5cb5c06b33
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/oglemock/README.md
@@ -0,0 +1,103 @@
+[![GoDoc](https://godoc.org/github.com/smartystreets/assertions/internal/oglemock?status.svg)](https://godoc.org/github.com/smartystreets/assertions/internal/oglemock)
+
+`oglemock` is a mocking framework for the Go programming language with the
+following features:
+
+ * An extensive and extensible set of matchers for expressing call
+ expectations (provided by the [oglematchers][] package).
+
+ * Clean, readable output that tells you exactly what you need to know.
+
+ * Style and semantics similar to [Google Mock][googlemock] and
+ [Google JS Test][google-js-test].
+
+ * Seamless integration with the [ogletest][] unit testing framework.
+
+It can be integrated into any testing framework (including Go's `testing`
+package), but out of the box support is built in to [ogletest][] and that is the
+easiest place to use it.
+
+
+Installation
+------------
+
+First, make sure you have installed Go 1.0.2 or newer. See
+[here][golang-install] for instructions.
+
+Use the following command to install `oglemock` and its dependencies, and to
+keep them up to date:
+
+ go get -u github.com/smartystreets/assertions/internal/oglemock
+ go get -u github.com/smartystreets/assertions/internal/oglemock/createmock
+
+Those commands will install the `oglemock` package itself, along with the
+`createmock` tool that is used to auto-generate mock types.
+
+
+Generating and using mock types
+-------------------------------
+
+Automatically generating a mock implementation of an interface is easy. If you
+want to mock interfaces `Bar` and `Baz` from package `foo`, simply run the
+following:
+
+ createmock foo Bar Baz
+
+That will print source code that can be saved to a file and used in your tests.
+For example, to create a `mock_io` package containing mock implementations of
+`io.Reader` and `io.Writer`:
+
+ mkdir mock_io
+ createmock io Reader Writer > mock_io/mock_io.go
+
+The new package will be named `mock_io`, and contain types called `MockReader`
+and `MockWriter`, which implement `io.Reader` and `io.Writer` respectively.
+
+For each generated mock type, there is a corresponding function for creating an
+instance of that type given a `Controller` object (see below). For example, to
+create a mock reader:
+
+```go
+someController := [...] // See next section.
+someReader := mock_io.NewMockReader(someController, "Mock file reader")
+```
+
+The snippet above creates a mock `io.Reader` that reports failures to
+`someController`. The reader can subsequently have expectations set up and be
+passed to your code under test that uses an `io.Reader`.
+
+
+Getting ahold of a controller
+-----------------------------
+
+[oglemock.Controller][controller-ref] is used to create mock objects, and to set
+up and verify expectations for them. You can create one by calling
+`NewController` with an `ErrorReporter`, which is the basic type used to
+interface between `oglemock` and the testing framework within which it is being
+used.
+
+If you are using [ogletest][] you don't need to worry about any of this, since
+the `TestInfo` struct provided to your test's `SetUp` function already contains
+a working `Controller` that you can use to create mock object, and you can use
+the built-in `ExpectCall` function for setting expectations. (See the
+[ogletest documentation][ogletest-docs] for more info.) Otherwise, you will need
+to implement the simple [ErrorReporter interface][reporter-ref] for your test
+environment.
+
+
+Documentation
+-------------
+
+For thorough documentation, including information on how to set up expectations,
+see [here][oglemock-docs].
+
+
+[controller-ref]: http://godoc.org/github.com/smartystreets/assertions/internal/oglemock#Controller
+[reporter-ref]: http://godoc.org/github.com/smartystreets/assertions/internal/oglemock#ErrorReporter
+[golang-install]: http://golang.org/doc/install.html
+[google-js-test]: http://code.google.com/p/google-js-test/
+[googlemock]: http://code.google.com/p/googlemock/
+[oglematchers]: https://github.com/smartystreets/assertions/internal/oglematchers
+[oglemock-docs]: http://godoc.org/github.com/smartystreets/assertions/internal/oglemock
+[ogletest]: https://github.com/smartystreets/assertions/internal/ogletest
+[ogletest-docs]: http://godoc.org/github.com/smartystreets/assertions/internal/ogletest
diff --git a/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/oglemock/action.go b/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/oglemock/action.go
new file mode 100644
index 00000000000..9fd40d81fe8
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/oglemock/action.go
@@ -0,0 +1,36 @@
+// Copyright 2011 Aaron Jacobs. All Rights Reserved.
+// Author: aaronjjacobs@gmail.com (Aaron Jacobs)
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package oglemock
+
+import (
+ "reflect"
+)
+
+// Action represents an action to be taken in response to a call to a mock
+// method.
+type Action interface {
+ // Set the signature of the function with which this action is being used.
+ // This must be called before Invoke is called.
+ SetSignature(signature reflect.Type) error
+
+ // Invoke runs the specified action, given the arguments to the mock method.
+ // It returns zero or more values that may be treated as the return values of
+ // the method. If the action doesn't return any values, it may return the nil
+ // slice.
+ //
+ // You must call SetSignature before calling Invoke.
+ Invoke(methodArgs []interface{}) []interface{}
+}
diff --git a/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/oglemock/controller.go b/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/oglemock/controller.go
new file mode 100644
index 00000000000..93a1d6239e1
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/oglemock/controller.go
@@ -0,0 +1,480 @@
+// Copyright 2011 Aaron Jacobs. All Rights Reserved.
+// Author: aaronjjacobs@gmail.com (Aaron Jacobs)
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package oglemock
+
+import (
+ "errors"
+ "fmt"
+ "log"
+ "math"
+ "reflect"
+ "sync"
+)
+
+// PartialExpecation is a function that should be called exactly once with
+// expected arguments or matchers in order to set up an expected method call.
+// See Controller.ExpectMethodCall below. It returns an expectation that can be
+// further modified (e.g. by calling WillOnce).
+//
+// If the arguments are of the wrong type, the function reports a fatal error
+// and returns nil.
+type PartialExpecation func(...interface{}) Expectation
+
+// Controller represents an object that implements the central logic of
+// oglemock: recording and verifying expectations, responding to mock method
+// calls, and so on.
+type Controller interface {
+ // ExpectCall expresses an expectation that the method of the given name
+ // should be called on the supplied mock object. It returns a function that
+ // should be called with the expected arguments, matchers for the arguments,
+ // or a mix of both.
+ //
+ // fileName and lineNumber should indicate the line on which the expectation
+ // was made, if known.
+ //
+ // For example:
+ //
+ // mockWriter := [...]
+ // controller.ExpectCall(mockWriter, "Write", "foo.go", 17)(ElementsAre(0x1))
+ // .WillOnce(Return(1, nil))
+ //
+ // If the mock object doesn't have a method of the supplied name, the
+ // function reports a fatal error and returns nil.
+ ExpectCall(
+ o MockObject,
+ methodName string,
+ fileName string,
+ lineNumber int) PartialExpecation
+
+ // Finish causes the controller to check for any unsatisfied expectations,
+ // and report them as errors if they exist.
+ //
+ // The controller may panic if any of its methods (including this one) are
+ // called after Finish is called.
+ Finish()
+
+ // HandleMethodCall looks for a registered expectation matching the call of
+ // the given method on mock object o, invokes the appropriate action (if
+ // any), and returns the values returned by that action (if any).
+ //
+ // If the action returns nothing, the controller returns zero values. If
+ // there is no matching expectation, the controller reports an error and
+ // returns zero values.
+ //
+ // If the mock object doesn't have a method of the supplied name, the
+ // arguments are of the wrong type, or the action returns the wrong types,
+ // the function reports a fatal error.
+ //
+ // HandleMethodCall is exported for the sake of mock implementations, and
+ // should not be used directly.
+ HandleMethodCall(
+ o MockObject,
+ methodName string,
+ fileName string,
+ lineNumber int,
+ args []interface{}) []interface{}
+}
+
+// methodMap represents a map from method name to set of expectations for that
+// method.
+type methodMap map[string][]*InternalExpectation
+
+// objectMap represents a map from mock object ID to a methodMap for that object.
+type objectMap map[uintptr]methodMap
+
+// NewController sets up a fresh controller, without any expectations set, and
+// configures the controller to use the supplied error reporter.
+func NewController(reporter ErrorReporter) Controller {
+ return &controllerImpl{reporter, sync.RWMutex{}, objectMap{}}
+}
+
+type controllerImpl struct {
+ reporter ErrorReporter
+
+ mutex sync.RWMutex
+ expectationsByObject objectMap // Protected by mutex
+}
+
+// Return the list of registered expectations for the named method of the
+// supplied object, or an empty slice if none have been registered. When this
+// method returns, it is guaranteed that c.expectationsByObject has an entry
+// for the object.
+//
+// c.mutex must be held for reading.
+func (c *controllerImpl) getExpectationsLocked(
+ o MockObject,
+ methodName string) []*InternalExpectation {
+ id := o.Oglemock_Id()
+
+ // Look up the mock object.
+ expectationsByMethod, ok := c.expectationsByObject[id]
+ if !ok {
+ expectationsByMethod = methodMap{}
+ c.expectationsByObject[id] = expectationsByMethod
+ }
+
+ result, ok := expectationsByMethod[methodName]
+ if !ok {
+ return []*InternalExpectation{}
+ }
+
+ return result
+}
+
+// Add an expectation to the list registered for the named method of the
+// supplied mock object.
+//
+// c.mutex must be held for writing.
+func (c *controllerImpl) addExpectationLocked(
+ o MockObject,
+ methodName string,
+ exp *InternalExpectation) {
+ // Get the existing list.
+ existing := c.getExpectationsLocked(o, methodName)
+
+ // Store a modified list.
+ id := o.Oglemock_Id()
+ c.expectationsByObject[id][methodName] = append(existing, exp)
+}
+
+func (c *controllerImpl) ExpectCall(
+ o MockObject,
+ methodName string,
+ fileName string,
+ lineNumber int) PartialExpecation {
+ // Find the signature for the requested method.
+ ov := reflect.ValueOf(o)
+ method := ov.MethodByName(methodName)
+ if method.Kind() == reflect.Invalid {
+ c.reporter.ReportFatalError(
+ fileName,
+ lineNumber,
+ errors.New("Unknown method: "+methodName))
+ return nil
+ }
+
+ partialAlreadyCalled := false // Protected by c.mutex
+ return func(args ...interface{}) Expectation {
+ c.mutex.Lock()
+ defer c.mutex.Unlock()
+
+ // This function should only be called once.
+ if partialAlreadyCalled {
+ c.reporter.ReportFatalError(
+ fileName,
+ lineNumber,
+ errors.New("Partial expectation called more than once."))
+ return nil
+ }
+
+ partialAlreadyCalled = true
+
+ // Make sure that the number of args is legal. Keep in mind that the
+ // method's type has an extra receiver arg.
+ if len(args) != method.Type().NumIn() {
+ c.reporter.ReportFatalError(
+ fileName,
+ lineNumber,
+ errors.New(
+ fmt.Sprintf(
+ "Expectation for %s given wrong number of arguments: "+
+ "expected %d, got %d.",
+ methodName,
+ method.Type().NumIn(),
+ len(args))))
+ return nil
+ }
+
+ // Create an expectation and insert it into the controller's map.
+ exp := InternalNewExpectation(
+ c.reporter,
+ method.Type(),
+ args,
+ fileName,
+ lineNumber)
+
+ c.addExpectationLocked(o, methodName, exp)
+
+ // Return the expectation to the user.
+ return exp
+ }
+}
+
+func (c *controllerImpl) Finish() {
+ c.mutex.Lock()
+ defer c.mutex.Unlock()
+
+ // Check whether the minimum cardinality for each registered expectation has
+ // been satisfied.
+ for _, expectationsByMethod := range c.expectationsByObject {
+ for methodName, expectations := range expectationsByMethod {
+ for _, exp := range expectations {
+ exp.mutex.Lock()
+ defer exp.mutex.Unlock()
+
+ minCardinality, _ := computeCardinalityLocked(exp)
+ if exp.NumMatches < minCardinality {
+ c.reporter.ReportError(
+ exp.FileName,
+ exp.LineNumber,
+ errors.New(
+ fmt.Sprintf(
+ "Unsatisfied expectation; expected %s to be called "+
+ "at least %d times; called %d times.",
+ methodName,
+ minCardinality,
+ exp.NumMatches)))
+ }
+ }
+ }
+ }
+}
+
+// expectationMatches checks the matchers for the expectation against the
+// supplied arguments.
+func expectationMatches(exp *InternalExpectation, args []interface{}) bool {
+ matchers := exp.ArgMatchers
+ if len(args) != len(matchers) {
+ panic("expectationMatches: len(args)")
+ }
+
+ // Check each matcher.
+ for i, matcher := range matchers {
+ if err := matcher.Matches(args[i]); err != nil {
+ return false
+ }
+ }
+
+ return true
+}
+
+// Return the expectation that matches the supplied arguments. If there is more
+// than one such expectation, the one furthest along in the list for the method
+// is returned. If there is no such expectation, nil is returned.
+//
+// c.mutex must be held for reading.
+func (c *controllerImpl) chooseExpectationLocked(
+ o MockObject,
+ methodName string,
+ args []interface{}) *InternalExpectation {
+ // Do we have any expectations for this method?
+ expectations := c.getExpectationsLocked(o, methodName)
+ if len(expectations) == 0 {
+ return nil
+ }
+
+ for i := len(expectations) - 1; i >= 0; i-- {
+ if expectationMatches(expectations[i], args) {
+ return expectations[i]
+ }
+ }
+
+ return nil
+}
+
+// makeZeroReturnValues creates a []interface{} containing appropriate zero
+// values for returning from the supplied method type.
+func makeZeroReturnValues(signature reflect.Type) []interface{} {
+ result := make([]interface{}, signature.NumOut())
+
+ for i, _ := range result {
+ outType := signature.Out(i)
+ zeroVal := reflect.Zero(outType)
+ result[i] = zeroVal.Interface()
+ }
+
+ return result
+}
+
+// computeCardinality decides on the [min, max] range of the number of expected
+// matches for the supplied expectations, according to the rules documented in
+// expectation.go.
+//
+// exp.mutex must be held for reading.
+func computeCardinalityLocked(exp *InternalExpectation) (min, max uint) {
+ // Explicit cardinality.
+ if exp.ExpectedNumMatches >= 0 {
+ min = uint(exp.ExpectedNumMatches)
+ max = min
+ return
+ }
+
+ // Implicit count based on one-time actions.
+ if len(exp.OneTimeActions) != 0 {
+ min = uint(len(exp.OneTimeActions))
+ max = min
+
+ // If there is a fallback action, this is only a lower bound.
+ if exp.FallbackAction != nil {
+ max = math.MaxUint32
+ }
+
+ return
+ }
+
+ // Implicit lack of restriction based on a fallback action being configured.
+ if exp.FallbackAction != nil {
+ min = 0
+ max = math.MaxUint32
+ return
+ }
+
+ // Implicit cardinality of one.
+ min = 1
+ max = 1
+ return
+}
+
+// chooseAction returns the action that should be invoked for the i'th match to
+// the supplied expectation (counting from zero). If the implicit "return zero
+// values" action should be used, it returns nil.
+//
+// exp.mutex must be held for reading.
+func chooseActionLocked(i uint, exp *InternalExpectation) Action {
+ // Exhaust one-time actions first.
+ if i < uint(len(exp.OneTimeActions)) {
+ return exp.OneTimeActions[i]
+ }
+
+ // Fallback action (or nil if none is configured).
+ return exp.FallbackAction
+}
+
+// Find an action for the method call, updating expectation match state in the
+// process. Return either an action that should be invoked or a set of zero
+// values to return immediately.
+//
+// This is split out from HandleMethodCall in order to more easily avoid
+// invoking the action with locks held.
+func (c *controllerImpl) chooseActionAndUpdateExpectations(
+ o MockObject,
+ methodName string,
+ fileName string,
+ lineNumber int,
+ args []interface{},
+) (action Action, zeroVals []interface{}) {
+ c.mutex.Lock()
+ defer c.mutex.Unlock()
+
+ // Find the signature for the requested method.
+ ov := reflect.ValueOf(o)
+ method := ov.MethodByName(methodName)
+ if method.Kind() == reflect.Invalid {
+ c.reporter.ReportFatalError(
+ fileName,
+ lineNumber,
+ errors.New("Unknown method: "+methodName),
+ )
+
+ // Should never get here in real code.
+ log.Println("ReportFatalError unexpectedly returned.")
+ return
+ }
+
+ // HACK(jacobsa): Make sure we got the correct number of arguments. This will
+ // need to be refined when issue #5 (variadic methods) is handled.
+ if len(args) != method.Type().NumIn() {
+ c.reporter.ReportFatalError(
+ fileName,
+ lineNumber,
+ errors.New(
+ fmt.Sprintf(
+ "Wrong number of arguments: expected %d; got %d",
+ method.Type().NumIn(),
+ len(args),
+ ),
+ ),
+ )
+
+ // Should never get here in real code.
+ log.Println("ReportFatalError unexpectedly returned.")
+ return
+ }
+
+ // Find an expectation matching this call.
+ expectation := c.chooseExpectationLocked(o, methodName, args)
+ if expectation == nil {
+ c.reporter.ReportError(
+ fileName,
+ lineNumber,
+ errors.New(
+ fmt.Sprintf("Unexpected call to %s with args: %v", methodName, args),
+ ),
+ )
+
+ zeroVals = makeZeroReturnValues(method.Type())
+ return
+ }
+
+ expectation.mutex.Lock()
+ defer expectation.mutex.Unlock()
+
+ // Increase the number of matches recorded, and check whether we're over the
+ // number expected.
+ expectation.NumMatches++
+ _, maxCardinality := computeCardinalityLocked(expectation)
+ if expectation.NumMatches > maxCardinality {
+ c.reporter.ReportError(
+ expectation.FileName,
+ expectation.LineNumber,
+ errors.New(
+ fmt.Sprintf(
+ "Unexpected call to %s: "+
+ "expected to be called at most %d times; called %d times.",
+ methodName,
+ maxCardinality,
+ expectation.NumMatches,
+ ),
+ ),
+ )
+
+ zeroVals = makeZeroReturnValues(method.Type())
+ return
+ }
+
+ // Choose an action to invoke. If there is none, just return zero values.
+ action = chooseActionLocked(expectation.NumMatches-1, expectation)
+ if action == nil {
+ zeroVals = makeZeroReturnValues(method.Type())
+ return
+ }
+
+ // Let the action take over.
+ return
+}
+
+func (c *controllerImpl) HandleMethodCall(
+ o MockObject,
+ methodName string,
+ fileName string,
+ lineNumber int,
+ args []interface{},
+) []interface{} {
+ // Figure out whether to invoke an action or return zero values.
+ action, zeroVals := c.chooseActionAndUpdateExpectations(
+ o,
+ methodName,
+ fileName,
+ lineNumber,
+ args,
+ )
+
+ if action != nil {
+ return action.Invoke(args)
+ }
+
+ return zeroVals
+}
diff --git a/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/oglemock/controller_test.go b/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/oglemock/controller_test.go
new file mode 100644
index 00000000000..0ff5e5c41bb
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/oglemock/controller_test.go
@@ -0,0 +1,1249 @@
+// Copyright 2011 Aaron Jacobs. All Rights Reserved.
+// Author: aaronjjacobs@gmail.com (Aaron Jacobs)
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package oglemock_test
+
+import (
+ . "github.com/smartystreets/assertions/internal/oglematchers"
+ . "github.com/smartystreets/assertions/internal/oglemock"
+ . "github.com/smartystreets/assertions/internal/ogletest"
+ "reflect"
+)
+
+////////////////////////////////////////////////////////////
+// Helpers
+////////////////////////////////////////////////////////////
+
+type errorReport struct {
+ fileName string
+ lineNumber int
+ err error
+}
+
+type fakeErrorReporter struct {
+ errors []errorReport
+ fatalErrors []errorReport
+}
+
+func (r *fakeErrorReporter) ReportError(fileName string, lineNumber int, err error) {
+ report := errorReport{fileName, lineNumber, err}
+ r.errors = append(r.errors, report)
+}
+
+func (r *fakeErrorReporter) ReportFatalError(fileName string, lineNumber int, err error) {
+ report := errorReport{fileName, lineNumber, err}
+ r.fatalErrors = append(r.fatalErrors, report)
+}
+
+type trivialMockObject struct {
+ id uintptr
+ desc string
+}
+
+func (o *trivialMockObject) Oglemock_Id() uintptr {
+ return o.id
+}
+
+func (o *trivialMockObject) Oglemock_Description() string {
+ return o.desc
+}
+
+// Method being mocked
+func (o *trivialMockObject) StringToInt(s string) int {
+ return 0
+}
+
+// Method being mocked
+func (o *trivialMockObject) TwoIntsToString(i, j int) string {
+ return ""
+}
+
+type ControllerTest struct {
+ reporter fakeErrorReporter
+ controller Controller
+
+ mock1 MockObject
+ mock2 MockObject
+}
+
+func (t *ControllerTest) SetUp(c *TestInfo) {
+ t.reporter.errors = make([]errorReport, 0)
+ t.reporter.fatalErrors = make([]errorReport, 0)
+ t.controller = NewController(&t.reporter)
+
+ t.mock1 = &trivialMockObject{17, "taco"}
+ t.mock2 = &trivialMockObject{19, "burrito"}
+}
+
+func init() { RegisterTestSuite(&ControllerTest{}) }
+
+////////////////////////////////////////////////////////////
+// Tests
+////////////////////////////////////////////////////////////
+
+func (t *ControllerTest) FinishWithoutAnyEvents() {
+ t.controller.Finish()
+ ExpectEq(0, len(t.reporter.errors))
+ ExpectEq(0, len(t.reporter.fatalErrors))
+}
+
+func (t *ControllerTest) HandleCallForUnknownObject() {
+ p := []byte{255}
+ t.controller.HandleMethodCall(
+ t.mock1,
+ "StringToInt",
+ "taco.go",
+ 112,
+ []interface{}{p})
+
+ // The error should be reported immediately.
+ AssertEq(1, len(t.reporter.errors))
+ AssertEq(0, len(t.reporter.fatalErrors))
+
+ ExpectEq("taco.go", t.reporter.errors[0].fileName)
+ ExpectEq(112, t.reporter.errors[0].lineNumber)
+ ExpectThat(t.reporter.errors[0].err, Error(HasSubstr("Unexpected")))
+ ExpectThat(t.reporter.errors[0].err, Error(HasSubstr("StringToInt")))
+ ExpectThat(t.reporter.errors[0].err, Error(HasSubstr("[255]")))
+
+ // Finish should change nothing.
+ t.controller.Finish()
+
+ ExpectEq(1, len(t.reporter.errors))
+ ExpectEq(0, len(t.reporter.fatalErrors))
+}
+
+func (t *ControllerTest) ExpectCallForUnknownMethod() {
+ ExpectEq(
+ nil,
+ t.controller.ExpectCall(t.mock1, "Frobnicate", "burrito.go", 117))
+
+ // A fatal error should be reported immediately.
+ AssertEq(0, len(t.reporter.errors))
+ AssertEq(1, len(t.reporter.fatalErrors))
+
+ report := t.reporter.fatalErrors[0]
+ ExpectEq("burrito.go", report.fileName)
+ ExpectEq(117, report.lineNumber)
+ ExpectThat(report.err, Error(HasSubstr("Unknown method")))
+ ExpectThat(report.err, Error(HasSubstr("Frobnicate")))
+}
+
+func (t *ControllerTest) PartialExpectationGivenWrongNumberOfArgs() {
+ ExpectEq(
+ nil,
+ t.controller.ExpectCall(t.mock1, "TwoIntsToString", "burrito.go", 117)(
+ 17, 19, 23))
+
+ // A fatal error should be reported immediately.
+ AssertEq(0, len(t.reporter.errors))
+ AssertEq(1, len(t.reporter.fatalErrors))
+
+ report := t.reporter.fatalErrors[0]
+ ExpectEq("burrito.go", report.fileName)
+ ExpectEq(117, report.lineNumber)
+ ExpectThat(report.err, Error(HasSubstr("TwoIntsToString")))
+ ExpectThat(report.err, Error(HasSubstr("arguments")))
+ ExpectThat(report.err, Error(HasSubstr("expected 2")))
+ ExpectThat(report.err, Error(HasSubstr("got 3")))
+}
+
+func (t *ControllerTest) PartialExpectationCalledTwice() {
+ partial := t.controller.ExpectCall(t.mock1, "StringToInt", "burrito.go", 117)
+ AssertNe(nil, partial("taco"))
+ ExpectEq(nil, partial("taco"))
+
+ // A fatal error should be reported immediately.
+ AssertEq(0, len(t.reporter.errors))
+ AssertEq(1, len(t.reporter.fatalErrors))
+
+ report := t.reporter.fatalErrors[0]
+ ExpectEq("burrito.go", report.fileName)
+ ExpectEq(117, report.lineNumber)
+ ExpectThat(report.err, Error(HasSubstr("called more than once")))
+}
+
+func (t *ControllerTest) HandleMethodCallForUnknownMethod() {
+ ExpectEq(
+ nil,
+ t.controller.HandleMethodCall(
+ t.mock1,
+ "Frobnicate",
+ "burrito.go",
+ 117,
+ []interface{}{}))
+
+ // A fatal error should be reported immediately.
+ AssertEq(0, len(t.reporter.errors))
+ AssertEq(1, len(t.reporter.fatalErrors))
+
+ report := t.reporter.fatalErrors[0]
+ ExpectEq("burrito.go", report.fileName)
+ ExpectEq(117, report.lineNumber)
+ ExpectThat(report.err, Error(HasSubstr("Unknown method")))
+ ExpectThat(report.err, Error(HasSubstr("Frobnicate")))
+}
+
+func (t *ControllerTest) HandleMethodCallGivenWrongNumberOfArgs() {
+ t.controller.ExpectCall(t.mock1, "TwoIntsToString", "", 0)(17, 19)
+
+ ExpectEq(
+ nil,
+ t.controller.HandleMethodCall(
+ t.mock1,
+ "TwoIntsToString",
+ "burrito.go",
+ 117,
+ []interface{}{17, 19, 23}))
+
+ // A fatal error should be reported immediately.
+ AssertEq(0, len(t.reporter.errors))
+ AssertEq(1, len(t.reporter.fatalErrors))
+
+ report := t.reporter.fatalErrors[0]
+ ExpectEq("burrito.go", report.fileName)
+ ExpectEq(117, report.lineNumber)
+ ExpectThat(report.err, Error(HasSubstr("arguments")))
+ ExpectThat(report.err, Error(HasSubstr("expected 2")))
+ ExpectThat(report.err, Error(HasSubstr("got 3")))
+}
+
+func (t *ControllerTest) ExpectThenNonMatchingCall() {
+ // Expectation -- set up a fallback action to make it optional.
+ partial := t.controller.ExpectCall(
+ t.mock1,
+ "TwoIntsToString",
+ "burrito.go",
+ 117)
+
+ exp := partial(LessThan(10), Equals(2))
+ exp.WillRepeatedly(Return(""))
+
+ // Call
+ t.controller.HandleMethodCall(
+ t.mock1,
+ "TwoIntsToString",
+ "taco.go",
+ 112,
+ []interface{}{8, 1})
+
+ // The error should be reported immediately.
+ AssertEq(1, len(t.reporter.errors))
+ AssertEq(0, len(t.reporter.fatalErrors))
+
+ ExpectEq("taco.go", t.reporter.errors[0].fileName)
+ ExpectEq(112, t.reporter.errors[0].lineNumber)
+ ExpectThat(t.reporter.errors[0].err, Error(HasSubstr("Unexpected")))
+ ExpectThat(t.reporter.errors[0].err, Error(HasSubstr("TwoIntsToString")))
+ ExpectThat(t.reporter.errors[0].err, Error(HasSubstr("[8 1]")))
+
+ // Finish should change nothing.
+ t.controller.Finish()
+
+ ExpectEq(1, len(t.reporter.errors))
+ ExpectEq(0, len(t.reporter.fatalErrors))
+}
+
+func (t *ControllerTest) ExplicitCardinalityNotSatisfied() {
+ // Expectation -- set up an explicit cardinality of three.
+ partial := t.controller.ExpectCall(
+ t.mock1,
+ "StringToInt",
+ "burrito.go",
+ 117)
+
+ exp := partial(HasSubstr(""))
+ exp.Times(3)
+
+ // Call twice.
+ t.controller.HandleMethodCall(
+ t.mock1,
+ "StringToInt",
+ "",
+ 0,
+ []interface{}{""})
+
+ t.controller.HandleMethodCall(
+ t.mock1,
+ "StringToInt",
+ "",
+ 0,
+ []interface{}{""})
+
+ // The error should not yet be reported.
+ ExpectEq(0, len(t.reporter.errors))
+ ExpectEq(0, len(t.reporter.fatalErrors))
+
+ // Finish should cause the error to be reported.
+ t.controller.Finish()
+
+ AssertEq(1, len(t.reporter.errors))
+ AssertEq(0, len(t.reporter.fatalErrors))
+
+ ExpectEq("burrito.go", t.reporter.errors[0].fileName)
+ ExpectEq(117, t.reporter.errors[0].lineNumber)
+ ExpectThat(t.reporter.errors[0].err, Error(HasSubstr("Unsatisfied")))
+ ExpectThat(t.reporter.errors[0].err, Error(HasSubstr("StringToInt")))
+ ExpectThat(t.reporter.errors[0].err, Error(HasSubstr("at least 3 times")))
+ ExpectThat(t.reporter.errors[0].err, Error(HasSubstr("called 2 times")))
+}
+
+func (t *ControllerTest) ImplicitOneTimeActionCountNotSatisfied() {
+ // Expectation -- add three one-time actions.
+ partial := t.controller.ExpectCall(
+ t.mock1,
+ "StringToInt",
+ "burrito.go",
+ 117)
+
+ exp := partial(HasSubstr(""))
+ exp.WillOnce(Return(0))
+ exp.WillOnce(Return(1))
+ exp.WillOnce(Return(2))
+
+ // Call twice.
+ t.controller.HandleMethodCall(
+ t.mock1,
+ "StringToInt",
+ "",
+ 0,
+ []interface{}{""})
+
+ t.controller.HandleMethodCall(
+ t.mock1,
+ "StringToInt",
+ "",
+ 0,
+ []interface{}{""})
+
+ // The error should not yet be reported.
+ ExpectEq(0, len(t.reporter.errors))
+ ExpectEq(0, len(t.reporter.fatalErrors))
+
+ // Finish should cause the error to be reported.
+ t.controller.Finish()
+
+ AssertEq(1, len(t.reporter.errors))
+ AssertEq(0, len(t.reporter.fatalErrors))
+
+ ExpectEq("burrito.go", t.reporter.errors[0].fileName)
+ ExpectEq(117, t.reporter.errors[0].lineNumber)
+ ExpectThat(t.reporter.errors[0].err, Error(HasSubstr("Unsatisfied")))
+ ExpectThat(t.reporter.errors[0].err, Error(HasSubstr("StringToInt")))
+ ExpectThat(t.reporter.errors[0].err, Error(HasSubstr("at least 3 times")))
+ ExpectThat(t.reporter.errors[0].err, Error(HasSubstr("called 2 times")))
+}
+
+func (t *ControllerTest) ImplicitOneTimeActionLowerBoundNotSatisfied() {
+ // Expectation -- add three one-time actions and a fallback.
+ partial := t.controller.ExpectCall(
+ t.mock1,
+ "StringToInt",
+ "burrito.go",
+ 117)
+
+ exp := partial(HasSubstr(""))
+ exp.WillOnce(Return(0))
+ exp.WillOnce(Return(1))
+ exp.WillOnce(Return(2))
+ exp.WillRepeatedly(Return(3))
+
+ // Call twice.
+ t.controller.HandleMethodCall(
+ t.mock1,
+ "StringToInt",
+ "",
+ 0,
+ []interface{}{""})
+
+ t.controller.HandleMethodCall(
+ t.mock1,
+ "StringToInt",
+ "",
+ 0,
+ []interface{}{""})
+
+ // The error should not yet be reported.
+ ExpectEq(0, len(t.reporter.errors))
+ ExpectEq(0, len(t.reporter.fatalErrors))
+
+ // Finish should cause the error to be reported.
+ t.controller.Finish()
+
+ AssertEq(1, len(t.reporter.errors))
+ AssertEq(0, len(t.reporter.fatalErrors))
+
+ ExpectEq("burrito.go", t.reporter.errors[0].fileName)
+ ExpectEq(117, t.reporter.errors[0].lineNumber)
+ ExpectThat(t.reporter.errors[0].err, Error(HasSubstr("Unsatisfied")))
+ ExpectThat(t.reporter.errors[0].err, Error(HasSubstr("StringToInt")))
+ ExpectThat(t.reporter.errors[0].err, Error(HasSubstr("at least 3 times")))
+ ExpectThat(t.reporter.errors[0].err, Error(HasSubstr("called 2 times")))
+}
+
+func (t *ControllerTest) ImplicitCardinalityOfOneNotSatisfied() {
+ // Expectation -- add no actions.
+ partial := t.controller.ExpectCall(
+ t.mock1,
+ "StringToInt",
+ "burrito.go",
+ 117)
+
+ partial(HasSubstr(""))
+
+ // Don't call.
+
+ // The error should not yet be reported.
+ ExpectEq(0, len(t.reporter.errors))
+ ExpectEq(0, len(t.reporter.fatalErrors))
+
+ // Finish should cause the error to be reported.
+ t.controller.Finish()
+
+ AssertEq(1, len(t.reporter.errors))
+ AssertEq(0, len(t.reporter.fatalErrors))
+
+ ExpectEq("burrito.go", t.reporter.errors[0].fileName)
+ ExpectEq(117, t.reporter.errors[0].lineNumber)
+ ExpectThat(t.reporter.errors[0].err, Error(HasSubstr("Unsatisfied")))
+ ExpectThat(t.reporter.errors[0].err, Error(HasSubstr("StringToInt")))
+ ExpectThat(t.reporter.errors[0].err, Error(HasSubstr("at least 1 time")))
+ ExpectThat(t.reporter.errors[0].err, Error(HasSubstr("called 0 times")))
+}
+
+func (t *ControllerTest) ExplicitCardinalityOverrun() {
+ // Expectation -- call times(2).
+ partial := t.controller.ExpectCall(
+ t.mock1,
+ "StringToInt",
+ "burrito.go",
+ 117)
+
+ exp := partial(HasSubstr(""))
+ exp.Times(2)
+
+ // Call three times.
+ t.controller.HandleMethodCall(
+ t.mock1,
+ "StringToInt",
+ "",
+ 0,
+ []interface{}{""})
+
+ t.controller.HandleMethodCall(
+ t.mock1,
+ "StringToInt",
+ "",
+ 0,
+ []interface{}{""})
+
+ t.controller.HandleMethodCall(
+ t.mock1,
+ "StringToInt",
+ "",
+ 0,
+ []interface{}{""})
+
+ // The error should be reported immediately.
+ AssertEq(1, len(t.reporter.errors))
+ AssertEq(0, len(t.reporter.fatalErrors))
+
+ ExpectEq("burrito.go", t.reporter.errors[0].fileName)
+ ExpectEq(117, t.reporter.errors[0].lineNumber)
+ ExpectThat(t.reporter.errors[0].err, Error(HasSubstr("Unexpected")))
+ ExpectThat(t.reporter.errors[0].err, Error(HasSubstr("StringToInt")))
+ ExpectThat(t.reporter.errors[0].err, Error(HasSubstr("at most 2 times")))
+ ExpectThat(t.reporter.errors[0].err, Error(HasSubstr("called 3 times")))
+
+ // Finish should change nothing.
+ t.controller.Finish()
+
+ ExpectEq(1, len(t.reporter.errors))
+ ExpectEq(0, len(t.reporter.fatalErrors))
+}
+
+func (t *ControllerTest) ImplicitOneTimeActionCountOverrun() {
+ // Expectation -- add a one-time action.
+ partial := t.controller.ExpectCall(
+ t.mock1,
+ "StringToInt",
+ "burrito.go",
+ 117)
+
+ exp := partial(HasSubstr(""))
+ exp.WillOnce(Return(0))
+
+ // Call twice.
+ t.controller.HandleMethodCall(
+ t.mock1,
+ "StringToInt",
+ "",
+ 0,
+ []interface{}{""})
+
+ t.controller.HandleMethodCall(
+ t.mock1,
+ "StringToInt",
+ "",
+ 0,
+ []interface{}{""})
+
+ // The error should be reported immediately.
+ AssertEq(1, len(t.reporter.errors))
+ AssertEq(0, len(t.reporter.fatalErrors))
+
+ ExpectEq("burrito.go", t.reporter.errors[0].fileName)
+ ExpectEq(117, t.reporter.errors[0].lineNumber)
+ ExpectThat(t.reporter.errors[0].err, Error(HasSubstr("Unexpected")))
+ ExpectThat(t.reporter.errors[0].err, Error(HasSubstr("StringToInt")))
+ ExpectThat(t.reporter.errors[0].err, Error(HasSubstr("at most 1 time")))
+ ExpectThat(t.reporter.errors[0].err, Error(HasSubstr("called 2 times")))
+
+ // Finish should change nothing.
+ t.controller.Finish()
+
+ ExpectEq(1, len(t.reporter.errors))
+ ExpectEq(0, len(t.reporter.fatalErrors))
+}
+
+func (t *ControllerTest) ImplicitCardinalityOfOneOverrun() {
+ // Expectation -- don't add any actions.
+ partial := t.controller.ExpectCall(
+ t.mock1,
+ "StringToInt",
+ "burrito.go",
+ 117)
+
+ partial(HasSubstr(""))
+
+ // Call twice.
+ t.controller.HandleMethodCall(
+ t.mock1,
+ "StringToInt",
+ "",
+ 0,
+ []interface{}{""})
+
+ t.controller.HandleMethodCall(
+ t.mock1,
+ "StringToInt",
+ "",
+ 0,
+ []interface{}{""})
+
+ // The error should be reported immediately.
+ AssertEq(1, len(t.reporter.errors))
+ AssertEq(0, len(t.reporter.fatalErrors))
+
+ ExpectEq("burrito.go", t.reporter.errors[0].fileName)
+ ExpectEq(117, t.reporter.errors[0].lineNumber)
+ ExpectThat(t.reporter.errors[0].err, Error(HasSubstr("Unexpected")))
+ ExpectThat(t.reporter.errors[0].err, Error(HasSubstr("StringToInt")))
+ ExpectThat(t.reporter.errors[0].err, Error(HasSubstr("at most 1 time")))
+ ExpectThat(t.reporter.errors[0].err, Error(HasSubstr("called 2 times")))
+
+ // Finish should change nothing.
+ t.controller.Finish()
+
+ ExpectEq(1, len(t.reporter.errors))
+ ExpectEq(0, len(t.reporter.fatalErrors))
+}
+
+func (t *ControllerTest) ExplicitCardinalitySatisfied() {
+ // Expectation -- set up an explicit cardinality of two.
+ partial := t.controller.ExpectCall(
+ t.mock1,
+ "StringToInt",
+ "burrito.go",
+ 117)
+
+ exp := partial(HasSubstr(""))
+ exp.Times(2)
+
+ // Call twice.
+ t.controller.HandleMethodCall(
+ t.mock1,
+ "StringToInt",
+ "",
+ 0,
+ []interface{}{""})
+
+ t.controller.HandleMethodCall(
+ t.mock1,
+ "StringToInt",
+ "",
+ 0,
+ []interface{}{""})
+
+ // There should be no errors.
+ t.controller.Finish()
+
+ ExpectEq(0, len(t.reporter.errors))
+ ExpectEq(0, len(t.reporter.fatalErrors))
+}
+
+func (t *ControllerTest) ImplicitOneTimeActionCountSatisfied() {
+ // Expectation -- set up two one-time actions.
+ partial := t.controller.ExpectCall(
+ t.mock1,
+ "StringToInt",
+ "burrito.go",
+ 117)
+
+ exp := partial(HasSubstr(""))
+ exp.WillOnce(Return(0))
+ exp.WillOnce(Return(1))
+
+ // Call twice.
+ t.controller.HandleMethodCall(
+ t.mock1,
+ "StringToInt",
+ "",
+ 0,
+ []interface{}{""})
+
+ t.controller.HandleMethodCall(
+ t.mock1,
+ "StringToInt",
+ "",
+ 0,
+ []interface{}{""})
+
+ // There should be no errors.
+ t.controller.Finish()
+
+ ExpectEq(0, len(t.reporter.errors))
+ ExpectEq(0, len(t.reporter.fatalErrors))
+}
+
+func (t *ControllerTest) ImplicitOneTimeActionLowerBoundJustSatisfied() {
+ // Expectation -- set up two one-time actions and a fallback.
+ partial := t.controller.ExpectCall(
+ t.mock1,
+ "StringToInt",
+ "burrito.go",
+ 117)
+
+ exp := partial(HasSubstr(""))
+ exp.WillOnce(Return(0))
+ exp.WillOnce(Return(1))
+ exp.WillRepeatedly(Return(2))
+
+ // Call twice.
+ t.controller.HandleMethodCall(
+ t.mock1,
+ "StringToInt",
+ "",
+ 0,
+ []interface{}{""})
+
+ t.controller.HandleMethodCall(
+ t.mock1,
+ "StringToInt",
+ "",
+ 0,
+ []interface{}{""})
+
+ // There should be no errors.
+ t.controller.Finish()
+
+ ExpectEq(0, len(t.reporter.errors))
+ ExpectEq(0, len(t.reporter.fatalErrors))
+}
+
+func (t *ControllerTest) ImplicitOneTimeActionLowerBoundMoreThanSatisfied() {
+ // Expectation -- set up two one-time actions and a fallback.
+ partial := t.controller.ExpectCall(
+ t.mock1,
+ "StringToInt",
+ "burrito.go",
+ 117)
+
+ exp := partial(HasSubstr(""))
+ exp.WillOnce(Return(0))
+ exp.WillOnce(Return(1))
+ exp.WillRepeatedly(Return(2))
+
+ // Call four times.
+ t.controller.HandleMethodCall(
+ t.mock1,
+ "StringToInt",
+ "",
+ 0,
+ []interface{}{""})
+
+ t.controller.HandleMethodCall(
+ t.mock1,
+ "StringToInt",
+ "",
+ 0,
+ []interface{}{""})
+
+ t.controller.HandleMethodCall(
+ t.mock1,
+ "StringToInt",
+ "",
+ 0,
+ []interface{}{""})
+
+ t.controller.HandleMethodCall(
+ t.mock1,
+ "StringToInt",
+ "",
+ 0,
+ []interface{}{""})
+
+ // There should be no errors.
+ t.controller.Finish()
+
+ ExpectEq(0, len(t.reporter.errors))
+ ExpectEq(0, len(t.reporter.fatalErrors))
+}
+
+func (t *ControllerTest) FallbackActionConfiguredWithZeroCalls() {
+ // Expectation -- set up a fallback action.
+ partial := t.controller.ExpectCall(
+ t.mock1,
+ "StringToInt",
+ "burrito.go",
+ 117)
+
+ exp := partial(HasSubstr(""))
+ exp.WillRepeatedly(Return(0))
+
+ // Don't call.
+
+ // There should be no errors.
+ t.controller.Finish()
+
+ ExpectEq(0, len(t.reporter.errors))
+ ExpectEq(0, len(t.reporter.fatalErrors))
+}
+
+func (t *ControllerTest) FallbackActionConfiguredWithMultipleCalls() {
+ // Expectation -- set up a fallback action.
+ partial := t.controller.ExpectCall(
+ t.mock1,
+ "StringToInt",
+ "burrito.go",
+ 117)
+
+ exp := partial(HasSubstr(""))
+ exp.WillRepeatedly(Return(0))
+
+ // Call twice.
+ t.controller.HandleMethodCall(
+ t.mock1,
+ "StringToInt",
+ "",
+ 0,
+ []interface{}{""})
+
+ t.controller.HandleMethodCall(
+ t.mock1,
+ "StringToInt",
+ "",
+ 0,
+ []interface{}{""})
+
+ // There should be no errors.
+ t.controller.Finish()
+
+ ExpectEq(0, len(t.reporter.errors))
+ ExpectEq(0, len(t.reporter.fatalErrors))
+}
+
+func (t *ControllerTest) ImplicitCardinalityOfOneSatisfied() {
+ // Expectation -- don't add actions.
+ partial := t.controller.ExpectCall(
+ t.mock1,
+ "StringToInt",
+ "burrito.go",
+ 117)
+
+ partial(HasSubstr(""))
+
+ // Call once.
+ t.controller.HandleMethodCall(
+ t.mock1,
+ "StringToInt",
+ "",
+ 0,
+ []interface{}{""})
+
+ // There should be no errors.
+ t.controller.Finish()
+
+ ExpectEq(0, len(t.reporter.errors))
+ ExpectEq(0, len(t.reporter.fatalErrors))
+}
+
+func (t *ControllerTest) InvokesOneTimeActions() {
+ var res []interface{}
+
+ // Expectation -- set up two one-time actions.
+ partial := t.controller.ExpectCall(
+ t.mock1,
+ "StringToInt",
+ "burrito.go",
+ 117)
+
+ suppliedArg := ""
+ expectedReturn := 17
+
+ f := func(s string) int {
+ suppliedArg = s
+ return expectedReturn
+ }
+
+ exp := partial(HasSubstr(""))
+ exp.WillOnce(Invoke(f))
+ exp.WillOnce(Return(1))
+
+ AssertThat(t.reporter.fatalErrors, ElementsAre())
+
+ // Call 0
+ res = t.controller.HandleMethodCall(
+ t.mock1,
+ "StringToInt",
+ "",
+ 0,
+ []interface{}{"taco"})
+
+ ExpectEq("taco", suppliedArg)
+ ExpectThat(res, ElementsAre(IdenticalTo(expectedReturn)))
+
+ // Call 1
+ res = t.controller.HandleMethodCall(
+ t.mock1,
+ "StringToInt",
+ "",
+ 0,
+ []interface{}{""})
+
+ ExpectThat(len(res), Equals(1))
+ ExpectThat(res[0], Equals(1))
+}
+
+func (t *ControllerTest) InvokesFallbackActionAfterOneTimeActions() {
+ var res []interface{}
+
+ // Expectation -- set up two one-time actions and a fallback.
+ partial := t.controller.ExpectCall(
+ t.mock1,
+ "StringToInt",
+ "burrito.go",
+ 117)
+
+ exp := partial(HasSubstr(""))
+ exp.WillOnce(Return(0))
+ exp.WillOnce(Return(1))
+ exp.WillRepeatedly(Return(2))
+
+ // Call 0
+ res = t.controller.HandleMethodCall(
+ t.mock1,
+ "StringToInt",
+ "",
+ 0,
+ []interface{}{""})
+
+ ExpectThat(len(res), Equals(1))
+ ExpectThat(res[0], Equals(0))
+
+ // Call 1
+ res = t.controller.HandleMethodCall(
+ t.mock1,
+ "StringToInt",
+ "",
+ 0,
+ []interface{}{""})
+
+ ExpectThat(len(res), Equals(1))
+ ExpectThat(res[0], Equals(1))
+
+ // Call 2
+ res = t.controller.HandleMethodCall(
+ t.mock1,
+ "StringToInt",
+ "",
+ 0,
+ []interface{}{""})
+
+ ExpectThat(len(res), Equals(1))
+ ExpectThat(res[0], Equals(2))
+
+ // Call 3
+ res = t.controller.HandleMethodCall(
+ t.mock1,
+ "StringToInt",
+ "",
+ 0,
+ []interface{}{""})
+
+ ExpectThat(len(res), Equals(1))
+ ExpectThat(res[0], Equals(2))
+}
+
+func (t *ControllerTest) InvokesFallbackActionWithoutOneTimeActions() {
+ var res []interface{}
+
+ // Expectation -- set up only a fallback action.
+ partial := t.controller.ExpectCall(
+ t.mock1,
+ "StringToInt",
+ "burrito.go",
+ 117)
+
+ exp := partial(HasSubstr(""))
+ exp.WillRepeatedly(Return(2))
+
+ // Call 0
+ res = t.controller.HandleMethodCall(
+ t.mock1,
+ "StringToInt",
+ "",
+ 0,
+ []interface{}{""})
+
+ ExpectThat(len(res), Equals(1))
+ ExpectThat(res[0], Equals(2))
+
+ // Call 1
+ res = t.controller.HandleMethodCall(
+ t.mock1,
+ "StringToInt",
+ "",
+ 0,
+ []interface{}{""})
+
+ ExpectThat(len(res), Equals(1))
+ ExpectThat(res[0], Equals(2))
+
+ // Call 2
+ res = t.controller.HandleMethodCall(
+ t.mock1,
+ "StringToInt",
+ "",
+ 0,
+ []interface{}{""})
+
+ ExpectThat(len(res), Equals(1))
+ ExpectThat(res[0], Equals(2))
+}
+
+func (t *ControllerTest) ImplicitActionReturnsZeroInts() {
+ var res []interface{}
+
+ // Expectation -- set up a cardinality of two.
+ partial := t.controller.ExpectCall(
+ t.mock1,
+ "StringToInt",
+ "burrito.go",
+ 117)
+
+ exp := partial(HasSubstr(""))
+ exp.Times(2)
+
+ // Call 0
+ res = t.controller.HandleMethodCall(
+ t.mock1,
+ "StringToInt",
+ "",
+ 0,
+ []interface{}{""})
+
+ ExpectThat(len(res), Equals(1))
+ ExpectThat(reflect.TypeOf(res[0]), Equals(reflect.TypeOf(int(0))))
+ ExpectThat(res[0], Equals(0))
+
+ // Call 1
+ res = t.controller.HandleMethodCall(
+ t.mock1,
+ "StringToInt",
+ "",
+ 0,
+ []interface{}{""})
+
+ ExpectThat(len(res), Equals(1))
+ ExpectThat(reflect.TypeOf(res[0]), Equals(reflect.TypeOf(int(0))))
+ ExpectThat(res[0], Equals(0))
+}
+
+func (t *ControllerTest) ImplicitActionReturnsEmptyStrings() {
+ var res []interface{}
+
+ // Expectation -- set up a cardinality of two.
+ partial := t.controller.ExpectCall(
+ t.mock1,
+ "TwoIntsToString",
+ "burrito.go",
+ 117)
+
+ exp := partial(LessThan(100), LessThan(100))
+ exp.Times(2)
+
+ // Call 0
+ res = t.controller.HandleMethodCall(
+ t.mock1,
+ "TwoIntsToString",
+ "",
+ 0,
+ []interface{}{0, 0})
+
+ ExpectThat(len(res), Equals(1))
+ ExpectThat(res[0], Equals(""))
+
+ // Call 1
+ res = t.controller.HandleMethodCall(
+ t.mock1,
+ "TwoIntsToString",
+ "",
+ 0,
+ []interface{}{0, 0})
+
+ ExpectThat(len(res), Equals(1))
+ ExpectThat(res[0], Equals(""))
+}
+
+func (t *ControllerTest) ExpectationsAreMatchedLastToFirst() {
+ var res []interface{}
+
+ // General expectation
+ partial := t.controller.ExpectCall(
+ t.mock1,
+ "StringToInt",
+ "burrito.go",
+ 117)
+
+ exp := partial(HasSubstr(""))
+ exp.WillRepeatedly(Return(17))
+
+ // More specific expectation
+ partial = t.controller.ExpectCall(
+ t.mock1,
+ "StringToInt",
+ "burrito.go",
+ 117)
+
+ exp = partial(Equals("taco"))
+ exp.WillRepeatedly(Return(19))
+
+ // Call -- the second expectation should match.
+ res = t.controller.HandleMethodCall(
+ t.mock1,
+ "StringToInt",
+ "",
+ 0,
+ []interface{}{"taco"})
+
+ ExpectThat(len(res), Equals(1))
+ ExpectThat(res[0], Equals(19))
+
+ // Call -- the first expectation should match because the second doesn't.
+ res = t.controller.HandleMethodCall(
+ t.mock1,
+ "StringToInt",
+ "",
+ 0,
+ []interface{}{"burrito"})
+
+ ExpectThat(len(res), Equals(1))
+ ExpectThat(res[0], Equals(17))
+}
+
+func (t *ControllerTest) ExpectationsAreSegregatedByMockObject() {
+ var res []interface{}
+
+ // Expectation for mock1 -- return 17.
+ partial := t.controller.ExpectCall(
+ t.mock1,
+ "StringToInt",
+ "burrito.go",
+ 117)
+
+ exp := partial(HasSubstr(""))
+ exp.WillRepeatedly(Return(17))
+
+ // Expectation for mock2 -- return 19.
+ partial = t.controller.ExpectCall(
+ t.mock2,
+ "StringToInt",
+ "burrito.go",
+ 117)
+
+ exp = partial(HasSubstr(""))
+ exp.WillRepeatedly(Return(19))
+
+ // Call mock1.
+ res = t.controller.HandleMethodCall(
+ t.mock1,
+ "StringToInt",
+ "",
+ 0,
+ []interface{}{""})
+
+ ExpectThat(len(res), Equals(1))
+ ExpectThat(res[0], Equals(17))
+
+ // Call mock2.
+ res = t.controller.HandleMethodCall(
+ t.mock2,
+ "StringToInt",
+ "",
+ 0,
+ []interface{}{""})
+
+ ExpectThat(len(res), Equals(1))
+ ExpectThat(res[0], Equals(19))
+}
+
+func (t *ControllerTest) ExpectationsAreSegregatedByMethodName() {
+ var res []interface{}
+
+ // Expectation for StringToInt
+ partial := t.controller.ExpectCall(
+ t.mock1,
+ "StringToInt",
+ "burrito.go",
+ 117)
+
+ exp := partial(HasSubstr(""))
+ exp.WillRepeatedly(Return(17))
+
+ // Expectation for TwoIntsToString
+ partial = t.controller.ExpectCall(
+ t.mock1,
+ "TwoIntsToString",
+ "burrito.go",
+ 117)
+
+ exp = partial(1, 2)
+ exp.WillRepeatedly(Return("taco"))
+
+ // Call StringToInt.
+ res = t.controller.HandleMethodCall(
+ t.mock1,
+ "StringToInt",
+ "",
+ 0,
+ []interface{}{""})
+
+ ExpectThat(len(res), Equals(1))
+ ExpectThat(res[0], Equals(17))
+
+ // Call TwoIntsToString.
+ res = t.controller.HandleMethodCall(
+ t.mock1,
+ "TwoIntsToString",
+ "",
+ 0,
+ []interface{}{1, 2})
+
+ ExpectThat(len(res), Equals(1))
+ ExpectThat(res[0], Equals("taco"))
+}
+
+func (t *ControllerTest) ActionCallsAgainMatchingDifferentExpectation() {
+ var res []interface{}
+
+ // Expectation for StringToInt
+ partial := t.controller.ExpectCall(
+ t.mock1,
+ "StringToInt",
+ "burrito.go",
+ 117)
+
+ exp := partial(HasSubstr(""))
+ exp.WillOnce(Return(17))
+
+ // Expectation for TwoIntsToString -- pretend we call StringToInt.
+ partial = t.controller.ExpectCall(
+ t.mock1,
+ "TwoIntsToString",
+ "burrito.go",
+ 117)
+
+ exp = partial(1, 2)
+ exp.WillOnce(Invoke(func(int, int) string {
+ t.controller.HandleMethodCall(
+ t.mock1,
+ "StringToInt",
+ "taco.go",
+ 112,
+ []interface{}{""})
+
+ return "queso"
+ }))
+
+ // Call TwoIntsToString.
+ res = t.controller.HandleMethodCall(
+ t.mock1,
+ "TwoIntsToString",
+ "",
+ 0,
+ []interface{}{1, 2})
+
+ AssertThat(res, ElementsAre("queso"))
+
+ // Finish. Everything should be satisfied.
+ t.controller.Finish()
+
+ ExpectThat(t.reporter.errors, ElementsAre())
+ ExpectThat(t.reporter.fatalErrors, ElementsAre())
+}
+
+func (t *ControllerTest) ActionCallsAgainMatchingSameExpectation() {
+ var res []interface{}
+
+ // Expectation for StringToInt -- should be called twice. The first time it
+ // should call itself.
+ partial := t.controller.ExpectCall(
+ t.mock1,
+ "StringToInt",
+ "burrito.go",
+ 117)
+
+ exp := partial(HasSubstr(""))
+ exp.Times(2)
+ exp.WillOnce(Invoke(func(string) int {
+ subCallRes := t.controller.HandleMethodCall(
+ t.mock1,
+ "StringToInt",
+ "taco.go",
+ 112,
+ []interface{}{""})
+
+ return subCallRes[0].(int) + 19
+ }))
+
+ exp.WillOnce(Return(17))
+
+ // Call.
+ res = t.controller.HandleMethodCall(
+ t.mock1,
+ "StringToInt",
+ "",
+ 0,
+ []interface{}{""})
+
+ AssertThat(res, ElementsAre(17+19))
+
+ // Finish. Everything should be satisfied.
+ t.controller.Finish()
+
+ ExpectThat(t.reporter.errors, ElementsAre())
+ ExpectThat(t.reporter.fatalErrors, ElementsAre())
+}
diff --git a/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/oglemock/createmock/createmock.go b/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/oglemock/createmock/createmock.go
new file mode 100644
index 00000000000..c5427dc8ba9
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/oglemock/createmock/createmock.go
@@ -0,0 +1,245 @@
+// Copyright 2012 Aaron Jacobs. All Rights Reserved.
+// Author: aaronjjacobs@gmail.com (Aaron Jacobs)
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+// createmock is used to generate source code for mock versions of interfaces
+// from installed packages.
+package main
+
+import (
+ "errors"
+ "flag"
+ "fmt"
+ "go/build"
+ "io/ioutil"
+ "log"
+ "os"
+ "os/exec"
+ "path"
+ "regexp"
+ "text/template"
+
+ // Ensure that the generate package, which is used by the generated code, is
+ // installed by goinstall.
+ _ "github.com/smartystreets/assertions/internal/oglemock/generate"
+)
+
+var fSamePackage = flag.Bool(
+ "same_package",
+ false,
+ "Generate output appropriate for including in the same package as the "+
+ "mocked interfaces.")
+
+// A template for generated code that is used to print the result.
+const tmplStr = `
+{{$interfacePkgPath := .InterfacePkgPath}}
+
+package main
+
+import (
+ {{range $identifier, $import := .Imports}}
+ {{$identifier}} "{{$import}}"
+ {{end}}
+)
+
+func getTypeForPtr(ptr interface{}) reflect.Type {
+ return reflect.TypeOf(ptr).Elem()
+}
+
+func main() {
+ // Reduce noise in logging output.
+ log.SetFlags(0)
+
+ interfaces := []reflect.Type{
+ {{range $typeName := .TypeNames}}
+ getTypeForPtr((*{{pathBase $interfacePkgPath}}.{{$typeName}})(nil)),
+ {{end}}
+ }
+
+ err := generate.GenerateMockSource(
+ os.Stdout,
+ "{{.OutputPkgPath}}",
+ interfaces)
+
+ if err != nil {
+ log.Fatalf("Error generating mock source: %v", err)
+ }
+}
+`
+
+// A map from import identifier to package to use that identifier for,
+// containing elements for each import needed by the generated code.
+type importMap map[string]string
+
+type tmplArg struct {
+ // The full path of the package from which the interfaces come.
+ InterfacePkgPath string
+
+ // The package path to assume for the generated code.
+ OutputPkgPath string
+
+ // Imports needed by the generated code.
+ Imports importMap
+
+ // Types to be mocked, relative to their package's name.
+ TypeNames []string
+}
+
+var unknownPackageRegexp = regexp.MustCompile(
+ `tool\.go:\d+:\d+: cannot find package "([^"]+)"`)
+
+var undefinedInterfaceRegexp = regexp.MustCompile(`tool\.go:\d+: undefined: [\pL_0-9]+\.([\pL_0-9]+)`)
+
+// Does the 'go build' output indicate that a package wasn't found? If so,
+// return the name of the package.
+func findUnknownPackage(output []byte) *string {
+ if match := unknownPackageRegexp.FindSubmatch(output); match != nil {
+ res := string(match[1])
+ return &res
+ }
+
+ return nil
+}
+
+// Does the 'go build' output indicate that an interface wasn't found? If so,
+// return the name of the interface.
+func findUndefinedInterface(output []byte) *string {
+ if match := undefinedInterfaceRegexp.FindSubmatch(output); match != nil {
+ res := string(match[1])
+ return &res
+ }
+
+ return nil
+}
+
+// Split out from main so that deferred calls are executed even in the event of
+// an error.
+func run() error {
+ // Reduce noise in logging output.
+ log.SetFlags(0)
+
+ // Check the command-line arguments.
+ flag.Parse()
+
+ cmdLineArgs := flag.Args()
+ if len(cmdLineArgs) < 2 {
+ return errors.New("Usage: createmock [package] [interface ...]")
+ }
+
+ // Create a temporary directory inside of $GOPATH to hold generated code.
+ buildPkg, err := build.Import("github.com/smartystreets/assertions/internal/oglemock", "", build.FindOnly)
+ if err != nil {
+ return errors.New(fmt.Sprintf("Couldn't find oglemock in $GOPATH: %v", err))
+ }
+
+ tmpDir, err := ioutil.TempDir(buildPkg.SrcRoot, "tmp-createmock-")
+ if err != nil {
+ return errors.New(fmt.Sprintf("Creating temp dir: %v", err))
+ }
+
+ defer os.RemoveAll(tmpDir)
+
+ // Create a file to hold generated code.
+ codeFile, err := os.Create(path.Join(tmpDir, "tool.go"))
+ if err != nil {
+ return errors.New(fmt.Sprintf("Couldn't create a file to hold code: %v", err))
+ }
+
+ // Create an appropriate path for the built binary.
+ binaryPath := path.Join(tmpDir, "tool")
+
+ // Create an appropriate template argument.
+ arg := tmplArg{
+ InterfacePkgPath: cmdLineArgs[0],
+ TypeNames: cmdLineArgs[1:],
+ }
+
+ if *fSamePackage {
+ arg.OutputPkgPath = arg.InterfacePkgPath
+ } else {
+ arg.OutputPkgPath = "mock_" + path.Base(arg.InterfacePkgPath)
+ }
+
+ arg.Imports = make(importMap)
+ arg.Imports[path.Base(arg.InterfacePkgPath)] = arg.InterfacePkgPath
+ arg.Imports["generate"] = "github.com/smartystreets/assertions/internal/oglemock/generate"
+ arg.Imports["log"] = "log"
+ arg.Imports["os"] = "os"
+ arg.Imports["reflect"] = "reflect"
+
+ // Execute the template to generate code that will itself generate the mock
+ // code. Write the code to the temp file.
+ tmpl := template.Must(
+ template.New("code").Funcs(
+ template.FuncMap{
+ "pathBase": path.Base,
+ }).Parse(tmplStr))
+ if err := tmpl.Execute(codeFile, arg); err != nil {
+ return errors.New(fmt.Sprintf("Error executing template: %v", err))
+ }
+
+ codeFile.Close()
+
+ // Attempt to build the code.
+ cmd := exec.Command("go", "build", "-o", binaryPath)
+ cmd.Dir = tmpDir
+ buildOutput, err := cmd.CombinedOutput()
+
+ if err != nil {
+ // Did the compilation fail due to the user-specified package not being found?
+ pkg := findUnknownPackage(buildOutput)
+ if pkg != nil && *pkg == arg.InterfacePkgPath {
+ return errors.New(fmt.Sprintf("Unknown package: %s", *pkg))
+ }
+
+ // Did the compilation fail due to an unknown interface?
+ if in := findUndefinedInterface(buildOutput); in != nil {
+ return errors.New(fmt.Sprintf("Unknown interface: %s", *in))
+ }
+
+ // Otherwise return a generic error.
+ return errors.New(fmt.Sprintf(
+ "%s\n\nError building generated code:\n\n"+
+ " %v\n\nPlease report this oglemock bug.",
+ buildOutput,
+ err))
+ }
+
+ // Run the binary.
+ cmd = exec.Command(binaryPath)
+ binaryOutput, err := cmd.CombinedOutput()
+
+ if err != nil {
+ return errors.New(fmt.Sprintf(
+ "%s\n\nError running generated code:\n\n"+
+ " %v\n\n Please report this oglemock bug.",
+ binaryOutput,
+ err))
+ }
+
+ // Copy its output.
+ _, err = os.Stdout.Write(binaryOutput)
+ if err != nil {
+ return errors.New(fmt.Sprintf("Error copying binary output: %v", err))
+ }
+
+ return nil
+}
+
+func main() {
+ if err := run(); err != nil {
+ fmt.Fprintf(os.Stderr, "%s\n", err.Error())
+ os.Exit(1)
+ }
+}
diff --git a/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/oglemock/createmock/createmock_test.go b/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/oglemock/createmock/createmock_test.go
new file mode 100644
index 00000000000..ddfc07a3e8d
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/oglemock/createmock/createmock_test.go
@@ -0,0 +1,233 @@
+// Copyright 2012 Aaron Jacobs. All Rights Reserved.
+// Author: aaronjjacobs@gmail.com (Aaron Jacobs)
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package main
+
+import (
+ "bytes"
+ "flag"
+ "fmt"
+ "go/build"
+ "io/ioutil"
+ "os"
+ "os/exec"
+ "path"
+ "syscall"
+ "testing"
+
+ . "github.com/smartystreets/assertions/internal/ogletest"
+)
+
+var dumpNew = flag.Bool("dump_new", false, "Dump new golden files.")
+
+////////////////////////////////////////////////////////////
+// Helpers
+////////////////////////////////////////////////////////////
+
+var tempDir string
+var createmockPath string
+
+type CreateMockTest struct {
+}
+
+func TestOgletest(t *testing.T) { RunTests(t) }
+func init() { RegisterTestSuite(&CreateMockTest{}) }
+
+func (t *CreateMockTest) SetUpTestSuite() {
+ // Create a temporary file to hold the built createmock binary.
+ tempDir, err := ioutil.TempDir("", "createmock-")
+ if err != nil {
+ panic("Creating temporary directory: " + err.Error())
+ }
+
+ createmockPath = path.Join(tempDir, "createmock")
+
+ // Build the createmock tool so that it can be used in the tests below.
+ cmd := exec.Command("go", "build", "-o", createmockPath, "github.com/smartystreets/assertions/internal/oglemock/createmock")
+ if output, err := cmd.CombinedOutput(); err != nil {
+ panic(fmt.Sprintf("Error building createmock: %v\n\n%s", err, output))
+ }
+}
+
+func (t *CreateMockTest) TearDownTestSuite() {
+ // Delete the createmock binary we built above.
+ os.RemoveAll(tempDir)
+ tempDir = ""
+ createmockPath = ""
+}
+
+func (t *CreateMockTest) runGoldenTest(
+ caseName string,
+ expectedReturnCode int,
+ createmockArgs ...string) {
+ // Run createmock.
+ cmd := exec.Command(createmockPath, createmockArgs...)
+ output, err := cmd.CombinedOutput()
+
+ // Make sure the process actually exited.
+ exitError, ok := err.(*exec.ExitError)
+ if err != nil && (!ok || !exitError.Exited()) {
+ panic("exec.Command.CombinedOutput: " + err.Error())
+ }
+
+ // Extract a return code.
+ var actualReturnCode int
+ if exitError != nil {
+ actualReturnCode = exitError.Sys().(syscall.WaitStatus).ExitStatus()
+ }
+
+ // Make sure the return code is correct.
+ ExpectEq(expectedReturnCode, actualReturnCode)
+
+ // Read the golden file.
+ goldenPath := path.Join("testdata", "golden."+caseName)
+ goldenData := readFileOrDie(goldenPath)
+
+ // Compare the two.
+ identical := (string(output) == string(goldenData))
+ ExpectTrue(identical, "Output doesn't match for case '%s'.", caseName)
+
+ // Write out a new golden file if requested.
+ if !identical && *dumpNew {
+ writeContentsToFileOrDie(output, goldenPath)
+ }
+}
+
+// Ensure that when createmock is run with the supplied args, it produces
+// output that can be compiled.
+func (t *CreateMockTest) runCompilationTest(createmockArgs ...string) {
+ // Create a temporary directory inside of $GOPATH to hold generated code.
+ buildPkg, err := build.Import("github.com/smartystreets/assertions/internal/oglemock", "", build.FindOnly)
+ AssertEq(nil, err)
+
+ tmpDir, err := ioutil.TempDir(buildPkg.SrcRoot, "tmp-createmock_test-")
+ AssertEq(nil, err)
+ defer os.RemoveAll(tmpDir)
+
+ // Create a file to hold the mock code.
+ codeFile, err := os.Create(path.Join(tmpDir, "mock.go"))
+ AssertEq(nil, err)
+
+ // Run createmock and save its output to the file created above.
+ stdErrBuf := new(bytes.Buffer)
+
+ cmd := exec.Command(createmockPath, createmockArgs...)
+ cmd.Stdout = codeFile
+ cmd.Stderr = stdErrBuf
+
+ err = cmd.Run()
+ AssertEq(nil, err, "createmock stderr output:\n\n%s", stdErrBuf.String())
+ codeFile.Close()
+
+ // Run 'go build' in the directory and make sure it exits with return code
+ // zero.
+ cmd = exec.Command("go", "build")
+ cmd.Dir = tmpDir
+ output, err := cmd.CombinedOutput()
+
+ ExpectEq(nil, err, "go build output:\n\n%s", output)
+}
+
+func writeContentsToFileOrDie(contents []byte, path string) {
+ if err := ioutil.WriteFile(path, contents, 0600); err != nil {
+ panic("ioutil.WriteFile: " + err.Error())
+ }
+}
+
+func readFileOrDie(path string) []byte {
+ contents, err := ioutil.ReadFile(path)
+ if err != nil {
+ panic("ioutil.ReadFile: " + err.Error())
+ }
+
+ return contents
+}
+
+////////////////////////////////////////////////////////////
+// Tests
+////////////////////////////////////////////////////////////
+
+func (t *CreateMockTest) NoPackage() {
+ t.runGoldenTest(
+ "no_package",
+ 1)
+}
+
+func (t *CreateMockTest) NoInterfaces() {
+ t.runGoldenTest(
+ "no_interfaces",
+ 1,
+ "io")
+}
+
+func (t *CreateMockTest) UnknownPackage() {
+ t.runGoldenTest(
+ "unknown_package",
+ 1,
+ "foo/bar",
+ "Reader")
+}
+
+func (t *CreateMockTest) UnknownInterface() {
+ t.runGoldenTest(
+ "unknown_interface",
+ 1,
+ "io",
+ "Frobnicator")
+}
+
+func (t *CreateMockTest) GCSBucket() {
+ t.runGoldenTest(
+ "gcs_bucket",
+ 0,
+ "github.com/smartystreets/assertions/internal/oglemock/createmock/testdata/gcs",
+ "Bucket")
+}
+
+func (t *CreateMockTest) GCSBucket_SamePackage() {
+ t.runGoldenTest(
+ "gcs_bucket_same_package",
+ 0,
+ "--same_package",
+ "github.com/smartystreets/assertions/internal/oglemock/createmock/testdata/gcs",
+ "Bucket")
+}
+
+func (t *CreateMockTest) IoReaderAndWriter() {
+ t.runCompilationTest(
+ "io",
+ "Reader",
+ "Writer")
+}
+
+func (t *CreateMockTest) OsFileInfo() {
+ // Note that os is also used by the code that createmock generates; there
+ // should be no conflict.
+ t.runCompilationTest(
+ "os",
+ "FileInfo")
+}
+
+func (t *CreateMockTest) ComplicatedSamplePackage() {
+ t.runCompilationTest(
+ "github.com/smartystreets/assertions/internal/oglemock/generate/testdata/complicated_pkg",
+ "ComplicatedThing")
+}
+
+func (t *CreateMockTest) RenamedSamplePackage() {
+ t.runCompilationTest(
+ "github.com/smartystreets/assertions/internal/oglemock/generate/testdata/renamed_pkg",
+ "SomeInterface")
+}
diff --git a/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/oglemock/createmock/testdata/gcs/bucket.go b/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/oglemock/createmock/testdata/gcs/bucket.go
new file mode 100644
index 00000000000..da714f305c5
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/oglemock/createmock/testdata/gcs/bucket.go
@@ -0,0 +1,23 @@
+package gcs
+
+import "golang.org/x/net/context"
+
+type Bucket interface {
+ Name() string
+ CreateObject(context.Context, *CreateObjectRequest) (*Object, error)
+ CopyObject(ctx context.Context, req *CopyObjectRequest) (o *Object, err error)
+}
+
+type Object struct {
+}
+
+type CreateObjectRequest struct {
+}
+
+type CopyObjectRequest struct {
+}
+
+type Int int
+type Array []int
+type Chan <-chan int
+type Ptr *int
diff --git a/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/oglemock/createmock/testdata/golden.gcs_bucket b/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/oglemock/createmock/testdata/golden.gcs_bucket
new file mode 100644
index 00000000000..05a5114546c
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/oglemock/createmock/testdata/golden.gcs_bucket
@@ -0,0 +1,125 @@
+// This file was auto-generated using createmock. See the following page for
+// more information:
+//
+// https://github.com/smartystreets/assertions/internal/oglemock
+//
+
+package mock_gcs
+
+import (
+ fmt "fmt"
+ oglemock "github.com/smartystreets/assertions/internal/oglemock"
+ gcs "github.com/smartystreets/assertions/internal/oglemock/createmock/testdata/gcs"
+ context "golang.org/x/net/context"
+ runtime "runtime"
+ unsafe "unsafe"
+)
+
+type MockBucket interface {
+ gcs.Bucket
+ oglemock.MockObject
+}
+
+type mockBucket struct {
+ controller oglemock.Controller
+ description string
+}
+
+func NewMockBucket(
+ c oglemock.Controller,
+ desc string) MockBucket {
+ return &mockBucket{
+ controller: c,
+ description: desc,
+ }
+}
+
+func (m *mockBucket) Oglemock_Id() uintptr {
+ return uintptr(unsafe.Pointer(m))
+}
+
+func (m *mockBucket) Oglemock_Description() string {
+ return m.description
+}
+
+func (m *mockBucket) CopyObject(p0 context.Context, p1 *gcs.CopyObjectRequest) (o0 *gcs.Object, o1 error) {
+ // Get a file name and line number for the caller.
+ _, file, line, _ := runtime.Caller(1)
+
+ // Hand the call off to the controller, which does most of the work.
+ retVals := m.controller.HandleMethodCall(
+ m,
+ "CopyObject",
+ file,
+ line,
+ []interface{}{p0, p1})
+
+ if len(retVals) != 2 {
+ panic(fmt.Sprintf("mockBucket.CopyObject: invalid return values: %v", retVals))
+ }
+
+ // o0 *gcs.Object
+ if retVals[0] != nil {
+ o0 = retVals[0].(*gcs.Object)
+ }
+
+ // o1 error
+ if retVals[1] != nil {
+ o1 = retVals[1].(error)
+ }
+
+ return
+}
+
+func (m *mockBucket) CreateObject(p0 context.Context, p1 *gcs.CreateObjectRequest) (o0 *gcs.Object, o1 error) {
+ // Get a file name and line number for the caller.
+ _, file, line, _ := runtime.Caller(1)
+
+ // Hand the call off to the controller, which does most of the work.
+ retVals := m.controller.HandleMethodCall(
+ m,
+ "CreateObject",
+ file,
+ line,
+ []interface{}{p0, p1})
+
+ if len(retVals) != 2 {
+ panic(fmt.Sprintf("mockBucket.CreateObject: invalid return values: %v", retVals))
+ }
+
+ // o0 *gcs.Object
+ if retVals[0] != nil {
+ o0 = retVals[0].(*gcs.Object)
+ }
+
+ // o1 error
+ if retVals[1] != nil {
+ o1 = retVals[1].(error)
+ }
+
+ return
+}
+
+func (m *mockBucket) Name() (o0 string) {
+ // Get a file name and line number for the caller.
+ _, file, line, _ := runtime.Caller(1)
+
+ // Hand the call off to the controller, which does most of the work.
+ retVals := m.controller.HandleMethodCall(
+ m,
+ "Name",
+ file,
+ line,
+ []interface{}{})
+
+ if len(retVals) != 1 {
+ panic(fmt.Sprintf("mockBucket.Name: invalid return values: %v", retVals))
+ }
+
+ // o0 string
+ if retVals[0] != nil {
+ o0 = retVals[0].(string)
+ }
+
+ return
+}
diff --git a/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/oglemock/createmock/testdata/golden.gcs_bucket_same_package b/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/oglemock/createmock/testdata/golden.gcs_bucket_same_package
new file mode 100644
index 00000000000..d78819076f5
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/oglemock/createmock/testdata/golden.gcs_bucket_same_package
@@ -0,0 +1,124 @@
+// This file was auto-generated using createmock. See the following page for
+// more information:
+//
+// https://github.com/smartystreets/assertions/internal/oglemock
+//
+
+package gcs
+
+import (
+ fmt "fmt"
+ oglemock "github.com/smartystreets/assertions/internal/oglemock"
+ context "golang.org/x/net/context"
+ runtime "runtime"
+ unsafe "unsafe"
+)
+
+type MockBucket interface {
+ Bucket
+ oglemock.MockObject
+}
+
+type mockBucket struct {
+ controller oglemock.Controller
+ description string
+}
+
+func NewMockBucket(
+ c oglemock.Controller,
+ desc string) MockBucket {
+ return &mockBucket{
+ controller: c,
+ description: desc,
+ }
+}
+
+func (m *mockBucket) Oglemock_Id() uintptr {
+ return uintptr(unsafe.Pointer(m))
+}
+
+func (m *mockBucket) Oglemock_Description() string {
+ return m.description
+}
+
+func (m *mockBucket) CopyObject(p0 context.Context, p1 *CopyObjectRequest) (o0 *Object, o1 error) {
+ // Get a file name and line number for the caller.
+ _, file, line, _ := runtime.Caller(1)
+
+ // Hand the call off to the controller, which does most of the work.
+ retVals := m.controller.HandleMethodCall(
+ m,
+ "CopyObject",
+ file,
+ line,
+ []interface{}{p0, p1})
+
+ if len(retVals) != 2 {
+ panic(fmt.Sprintf("mockBucket.CopyObject: invalid return values: %v", retVals))
+ }
+
+ // o0 *Object
+ if retVals[0] != nil {
+ o0 = retVals[0].(*Object)
+ }
+
+ // o1 error
+ if retVals[1] != nil {
+ o1 = retVals[1].(error)
+ }
+
+ return
+}
+
+func (m *mockBucket) CreateObject(p0 context.Context, p1 *CreateObjectRequest) (o0 *Object, o1 error) {
+ // Get a file name and line number for the caller.
+ _, file, line, _ := runtime.Caller(1)
+
+ // Hand the call off to the controller, which does most of the work.
+ retVals := m.controller.HandleMethodCall(
+ m,
+ "CreateObject",
+ file,
+ line,
+ []interface{}{p0, p1})
+
+ if len(retVals) != 2 {
+ panic(fmt.Sprintf("mockBucket.CreateObject: invalid return values: %v", retVals))
+ }
+
+ // o0 *Object
+ if retVals[0] != nil {
+ o0 = retVals[0].(*Object)
+ }
+
+ // o1 error
+ if retVals[1] != nil {
+ o1 = retVals[1].(error)
+ }
+
+ return
+}
+
+func (m *mockBucket) Name() (o0 string) {
+ // Get a file name and line number for the caller.
+ _, file, line, _ := runtime.Caller(1)
+
+ // Hand the call off to the controller, which does most of the work.
+ retVals := m.controller.HandleMethodCall(
+ m,
+ "Name",
+ file,
+ line,
+ []interface{}{})
+
+ if len(retVals) != 1 {
+ panic(fmt.Sprintf("mockBucket.Name: invalid return values: %v", retVals))
+ }
+
+ // o0 string
+ if retVals[0] != nil {
+ o0 = retVals[0].(string)
+ }
+
+ return
+}
diff --git a/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/oglemock/createmock/testdata/golden.no_interfaces b/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/oglemock/createmock/testdata/golden.no_interfaces
new file mode 100644
index 00000000000..b70535fae6b
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/oglemock/createmock/testdata/golden.no_interfaces
@@ -0,0 +1 @@
+Usage: createmock [package] [interface ...]
diff --git a/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/oglemock/createmock/testdata/golden.no_package b/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/oglemock/createmock/testdata/golden.no_package
new file mode 100644
index 00000000000..b70535fae6b
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/oglemock/createmock/testdata/golden.no_package
@@ -0,0 +1 @@
+Usage: createmock [package] [interface ...]
diff --git a/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/oglemock/createmock/testdata/golden.unknown_interface b/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/oglemock/createmock/testdata/golden.unknown_interface
new file mode 100644
index 00000000000..c32950a1790
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/oglemock/createmock/testdata/golden.unknown_interface
@@ -0,0 +1 @@
+Unknown interface: Frobnicator
diff --git a/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/oglemock/createmock/testdata/golden.unknown_package b/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/oglemock/createmock/testdata/golden.unknown_package
new file mode 100644
index 00000000000..d07e915d2cf
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/oglemock/createmock/testdata/golden.unknown_package
@@ -0,0 +1 @@
+Unknown package: foo/bar
diff --git a/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/oglemock/do_all.go b/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/oglemock/do_all.go
new file mode 100644
index 00000000000..c0cd3ffbd69
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/oglemock/do_all.go
@@ -0,0 +1,53 @@
+// Copyright 2015 Aaron Jacobs. All Rights Reserved.
+// Author: aaronjjacobs@gmail.com (Aaron Jacobs)
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package oglemock
+
+import (
+ "fmt"
+ "reflect"
+)
+
+// Create an Action that invokes the supplied actions one after another. The
+// return values from the final action are used; others are ignored.
+func DoAll(first Action, others ...Action) Action {
+ return &doAll{
+ wrapped: append([]Action{first}, others...),
+ }
+}
+
+type doAll struct {
+ wrapped []Action
+}
+
+func (a *doAll) SetSignature(signature reflect.Type) (err error) {
+ for i, w := range a.wrapped {
+ err = w.SetSignature(signature)
+ if err != nil {
+ err = fmt.Errorf("Action %v: %v", i, err)
+ return
+ }
+ }
+
+ return
+}
+
+func (a *doAll) Invoke(methodArgs []interface{}) (rets []interface{}) {
+ for _, w := range a.wrapped {
+ rets = w.Invoke(methodArgs)
+ }
+
+ return
+}
diff --git a/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/oglemock/do_all_test.go b/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/oglemock/do_all_test.go
new file mode 100644
index 00000000000..f835b66c7c5
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/oglemock/do_all_test.go
@@ -0,0 +1,90 @@
+// Copyright 2015 Aaron Jacobs. All Rights Reserved.
+// Author: aaronjjacobs@gmail.com (Aaron Jacobs)
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package oglemock_test
+
+import (
+ "reflect"
+ "testing"
+
+ . "github.com/smartystreets/assertions/internal/oglematchers"
+ "github.com/smartystreets/assertions/internal/oglemock"
+ . "github.com/smartystreets/assertions/internal/ogletest"
+)
+
+func TestDoAll(t *testing.T) { RunTests(t) }
+
+////////////////////////////////////////////////////////////
+// Boilerplate
+////////////////////////////////////////////////////////////
+
+type DoAllTest struct {
+}
+
+func init() { RegisterTestSuite(&DoAllTest{}) }
+
+////////////////////////////////////////////////////////////
+// Test functions
+////////////////////////////////////////////////////////////
+
+func (t *DoAllTest) FirstActionDoesntLikeSignature() {
+ f := func(a int, b string) {}
+
+ a0 := oglemock.Invoke(func() {})
+ a1 := oglemock.Invoke(f)
+ a2 := oglemock.Return()
+
+ err := oglemock.DoAll(a0, a1, a2).SetSignature(reflect.TypeOf(f))
+ ExpectThat(err, Error(HasSubstr("Action 0")))
+ ExpectThat(err, Error(HasSubstr("func()")))
+}
+
+func (t *DoAllTest) LastActionDoesntLikeSignature() {
+ f := func(a int, b string) {}
+
+ a0 := oglemock.Invoke(f)
+ a1 := oglemock.Invoke(f)
+ a2 := oglemock.Return(17)
+
+ err := oglemock.DoAll(a0, a1, a2).SetSignature(reflect.TypeOf(f))
+ ExpectThat(err, Error(HasSubstr("Action 2")))
+ ExpectThat(err, Error(HasSubstr("1 vals; expected 0")))
+}
+
+func (t *DoAllTest) SingleAction() {
+ f := func(a int) string { return "" }
+ a0 := oglemock.Return("taco")
+
+ action := oglemock.DoAll(a0)
+ AssertEq(nil, action.SetSignature(reflect.TypeOf(f)))
+
+ rets := action.Invoke([]interface{}{17})
+ ExpectThat(rets, ElementsAre("taco"))
+}
+
+func (t *DoAllTest) MultipleActions() {
+ f := func(a int) string { return "" }
+
+ var saved int
+ a0 := oglemock.SaveArg(0, &saved)
+ a1 := oglemock.Return("taco")
+
+ action := oglemock.DoAll(a0, a1)
+ AssertEq(nil, action.SetSignature(reflect.TypeOf(f)))
+
+ rets := action.Invoke([]interface{}{17})
+ ExpectEq(17, saved)
+ ExpectThat(rets, ElementsAre("taco"))
+}
diff --git a/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/oglemock/doc.go b/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/oglemock/doc.go
new file mode 100644
index 00000000000..d397f652033
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/oglemock/doc.go
@@ -0,0 +1,28 @@
+// Copyright 2015 Aaron Jacobs. All Rights Reserved.
+// Author: aaronjjacobs@gmail.com (Aaron Jacobs)
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+// Package oglemock provides a mocking framework for unit tests.
+//
+// Among its features are the following:
+//
+// * An extensive and extensible set of matchers for expressing call
+// expectations (provided by the oglematchers package).
+//
+// * Style and semantics similar to Google Mock and Google JS Test.
+//
+// * Easy integration with the ogletest unit testing framework.
+//
+// See https://github.com/smartystreets/assertions/internal/oglemock for more information.
+package oglemock
diff --git a/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/oglemock/error_reporter.go b/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/oglemock/error_reporter.go
new file mode 100644
index 00000000000..0c3a65ee187
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/oglemock/error_reporter.go
@@ -0,0 +1,29 @@
+// Copyright 2011 Aaron Jacobs. All Rights Reserved.
+// Author: aaronjjacobs@gmail.com (Aaron Jacobs)
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package oglemock
+
+// ErrorReporter is an interface that wraps methods for reporting errors that
+// should cause test failures.
+type ErrorReporter interface {
+ // Report that some failure (e.g. an unsatisfied expectation) occurred. If
+ // known, fileName and lineNumber should contain information about where it
+ // occurred. The test may continue if the test framework supports it.
+ ReportError(fileName string, lineNumber int, err error)
+
+ // Like ReportError, but the test should be halted immediately. It is assumed
+ // that this method does not return.
+ ReportFatalError(fileName string, lineNumber int, err error)
+}
diff --git a/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/oglemock/expectation.go b/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/oglemock/expectation.go
new file mode 100644
index 00000000000..d18bfb8bce9
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/oglemock/expectation.go
@@ -0,0 +1,59 @@
+// Copyright 2011 Aaron Jacobs. All Rights Reserved.
+// Author: aaronjjacobs@gmail.com (Aaron Jacobs)
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package oglemock
+
+// Expectation is an expectation for zero or more calls to a mock method with
+// particular arguments or sets of arguments.
+type Expectation interface {
+ // Times expresses that a matching method call should happen exactly N times.
+ // Times must not be called more than once, and must not be called after
+ // WillOnce or WillRepeatedly.
+ //
+ // The full rules for the cardinality of an expectation are as follows:
+ //
+ // 1. If an explicit cardinality is set with Times(N), then anything other
+ // than exactly N matching calls will cause a test failure.
+ //
+ // 2. Otherwise, if there are any one-time actions set up, then it is
+ // expected there will be at least that many matching calls. If there is
+ // not also a fallback action, then it is expected that there will be
+ // exactly that many.
+ //
+ // 3. Otherwise, if there is a fallback action configured, any number of
+ // matching calls (including zero) is allowed.
+ //
+ // 4. Otherwise, the implicit cardinality is one.
+ //
+ Times(n uint) Expectation
+
+ // WillOnce configures a "one-time action". WillOnce can be called zero or
+ // more times, but must be called after any call to Times and before any call
+ // to WillRepeatedly.
+ //
+ // When matching method calls are made on the mock object, one-time actions
+ // are invoked one per matching call in the order that they were set up until
+ // they are exhausted. Afterward the fallback action, if any, will be used.
+ WillOnce(a Action) Expectation
+
+ // WillRepeatedly configures a "fallback action". WillRepeatedly can be
+ // called zero or one times, and must not be called before Times or WillOnce.
+ //
+ // Once all one-time actions are exhausted (see above), the fallback action
+ // will be invoked for any further method calls. If WillRepeatedly is not
+ // called, the fallback action is implicitly an action that returns zero
+ // values for the method's return values.
+ WillRepeatedly(a Action) Expectation
+}
diff --git a/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/oglemock/generate/generate.go b/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/oglemock/generate/generate.go
new file mode 100644
index 00000000000..aca3de5541b
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/oglemock/generate/generate.go
@@ -0,0 +1,369 @@
+// Copyright 2012 Aaron Jacobs. All Rights Reserved.
+// Author: aaronjjacobs@gmail.com (Aaron Jacobs)
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+// Package generate implements code generation for mock classes. This is an
+// implementation detail of the createmock command, which you probably want to
+// use directly instead.
+package generate
+
+import (
+ "bytes"
+ "errors"
+ "fmt"
+ "go/ast"
+ "go/parser"
+ "go/printer"
+ "go/token"
+ "io"
+ "path"
+ "reflect"
+ "regexp"
+ "text/template"
+)
+
+const gTmplStr = `
+// This file was auto-generated using createmock. See the following page for
+// more information:
+//
+// https://github.com/smartystreets/assertions/internal/oglemock
+//
+
+package {{pathBase .OutputPkgPath}}
+
+import (
+ {{range $identifier, $import := .Imports}}{{$identifier}} "{{$import}}"
+ {{end}}
+)
+
+{{range .Interfaces}}
+ {{$interfaceName := printf "Mock%s" .Name}}
+ {{$structName := printf "mock%s" .Name}}
+
+ type {{$interfaceName}} interface {
+ {{getTypeString .}}
+ oglemock.MockObject
+ }
+
+ type {{$structName}} struct {
+ controller oglemock.Controller
+ description string
+ }
+
+ func New{{printf "Mock%s" .Name}}(
+ c oglemock.Controller,
+ desc string) {{$interfaceName}} {
+ return &{{$structName}}{
+ controller: c,
+ description: desc,
+ }
+ }
+
+ func (m *{{$structName}}) Oglemock_Id() uintptr {
+ return uintptr(unsafe.Pointer(m))
+ }
+
+ func (m *{{$structName}}) Oglemock_Description() string {
+ return m.description
+ }
+
+ {{range getMethods .}}
+ {{$funcType := .Type}}
+ {{$inputTypes := getInputs $funcType}}
+ {{$outputTypes := getOutputs $funcType}}
+
+ func (m *{{$structName}}) {{.Name}}({{range $i, $type := $inputTypes}}p{{$i}} {{getInputTypeString $i $funcType}}, {{end}}) ({{range $i, $type := $outputTypes}}o{{$i}} {{getTypeString $type}}, {{end}}) {
+ // Get a file name and line number for the caller.
+ _, file, line, _ := runtime.Caller(1)
+
+ // Hand the call off to the controller, which does most of the work.
+ retVals := m.controller.HandleMethodCall(
+ m,
+ "{{.Name}}",
+ file,
+ line,
+ []interface{}{ {{range $i, $type := $inputTypes}}p{{$i}}, {{end}} })
+
+ if len(retVals) != {{len $outputTypes}} {
+ panic(fmt.Sprintf("{{$structName}}.{{.Name}}: invalid return values: %v", retVals))
+ }
+
+ {{range $i, $type := $outputTypes}}
+ // o{{$i}} {{getTypeString $type}}
+ if retVals[{{$i}}] != nil {
+ o{{$i}} = retVals[{{$i}}].({{getTypeString $type}})
+ }
+ {{end}}
+
+ return
+ }
+ {{end}}
+{{end}}
+`
+
+type tmplArg struct {
+ // The set of interfaces to mock, and the full name of the package from which
+ // they all come.
+ Interfaces []reflect.Type
+ InterfacePkgPath string
+
+ // The package path for the generate code.
+ OutputPkgPath string
+
+ // Imports needed by the interfaces.
+ Imports importMap
+}
+
+func (a *tmplArg) getInputTypeString(i int, ft reflect.Type) string {
+ numInputs := ft.NumIn()
+ if i == numInputs-1 && ft.IsVariadic() {
+ return "..." + a.getTypeString(ft.In(i).Elem())
+ }
+
+ return a.getTypeString(ft.In(i))
+}
+
+func (a *tmplArg) getTypeString(t reflect.Type) string {
+ return typeString(t, a.OutputPkgPath)
+}
+
+func getMethods(it reflect.Type) []reflect.Method {
+ numMethods := it.NumMethod()
+ methods := make([]reflect.Method, numMethods)
+
+ for i := 0; i < numMethods; i++ {
+ methods[i] = it.Method(i)
+ }
+
+ return methods
+}
+
+func getInputs(ft reflect.Type) []reflect.Type {
+ numIn := ft.NumIn()
+ inputs := make([]reflect.Type, numIn)
+
+ for i := 0; i < numIn; i++ {
+ inputs[i] = ft.In(i)
+ }
+
+ return inputs
+}
+
+func getOutputs(ft reflect.Type) []reflect.Type {
+ numOut := ft.NumOut()
+ outputs := make([]reflect.Type, numOut)
+
+ for i := 0; i < numOut; i++ {
+ outputs[i] = ft.Out(i)
+ }
+
+ return outputs
+}
+
+// A map from import identifier to package to use that identifier for,
+// containing elements for each import needed by a set of mocked interfaces.
+type importMap map[string]string
+
+var typePackageIdentifierRegexp = regexp.MustCompile(`^([\pL_0-9]+)\.[\pL_0-9]+$`)
+
+// Add an import for the supplied type, without recursing.
+func addImportForType(imports importMap, t reflect.Type) {
+ // If there is no package path, this is a built-in type and we don't need an
+ // import.
+ pkgPath := t.PkgPath()
+ if pkgPath == "" {
+ return
+ }
+
+ // Work around a bug in Go:
+ //
+ // http://code.google.com/p/go/issues/detail?id=2660
+ //
+ var errorPtr *error
+ if t == reflect.TypeOf(errorPtr).Elem() {
+ return
+ }
+
+ // Use the identifier that's part of the type's string representation as the
+ // import identifier. This means that we'll do the right thing for package
+ // "foo/bar" with declaration "package baz".
+ match := typePackageIdentifierRegexp.FindStringSubmatch(t.String())
+ if match == nil {
+ return
+ }
+
+ imports[match[1]] = pkgPath
+}
+
+// Add all necessary imports for the type, recursing as appropriate.
+func addImportsForType(imports importMap, t reflect.Type) {
+ // Add any import needed for the type itself.
+ addImportForType(imports, t)
+
+ // Handle special cases where recursion is needed.
+ switch t.Kind() {
+ case reflect.Array, reflect.Chan, reflect.Ptr, reflect.Slice:
+ addImportsForType(imports, t.Elem())
+
+ case reflect.Func:
+ // Input parameters.
+ for i := 0; i < t.NumIn(); i++ {
+ addImportsForType(imports, t.In(i))
+ }
+
+ // Return values.
+ for i := 0; i < t.NumOut(); i++ {
+ addImportsForType(imports, t.Out(i))
+ }
+
+ case reflect.Map:
+ addImportsForType(imports, t.Key())
+ addImportsForType(imports, t.Elem())
+ }
+}
+
+// Add imports for each of the methods of the interface, but not the interface
+// itself.
+func addImportsForInterfaceMethods(imports importMap, it reflect.Type) {
+ // Handle each method.
+ for i := 0; i < it.NumMethod(); i++ {
+ m := it.Method(i)
+ addImportsForType(imports, m.Type)
+ }
+}
+
+// Given a set of interfaces, return a map from import identifier to package to
+// use that identifier for, containing elements for each import needed by the
+// mock versions of those interfaces in a package with the given path.
+func getImports(
+ interfaces []reflect.Type,
+ pkgPath string) importMap {
+ imports := make(importMap)
+ for _, it := range interfaces {
+ addImportForType(imports, it)
+ addImportsForInterfaceMethods(imports, it)
+ }
+
+ // Make sure there are imports for other types used by the generated code
+ // itself.
+ imports["fmt"] = "fmt"
+ imports["oglemock"] = "github.com/smartystreets/assertions/internal/oglemock"
+ imports["runtime"] = "runtime"
+ imports["unsafe"] = "unsafe"
+
+ // Remove any self-imports generated above.
+ for k, v := range imports {
+ if v == pkgPath {
+ delete(imports, k)
+ }
+ }
+
+ return imports
+}
+
+// Given a set of interfaces to mock, write out source code suitable for
+// inclusion in a package with the supplied full package path containing mock
+// implementations of those interfaces.
+func GenerateMockSource(
+ w io.Writer,
+ outputPkgPath string,
+ interfaces []reflect.Type) (err error) {
+ // Sanity-check arguments.
+ if outputPkgPath == "" {
+ return errors.New("Package path must be non-empty.")
+ }
+
+ if len(interfaces) == 0 {
+ return errors.New("List of interfaces must be non-empty.")
+ }
+
+ // Make sure each type is indeed an interface.
+ for _, it := range interfaces {
+ if it.Kind() != reflect.Interface {
+ return errors.New("Invalid type: " + it.String())
+ }
+ }
+
+ // Make sure each interface is from the same package.
+ interfacePkgPath := interfaces[0].PkgPath()
+ for _, t := range interfaces {
+ if t.PkgPath() != interfacePkgPath {
+ err = fmt.Errorf(
+ "Package path mismatch: %q vs. %q",
+ interfacePkgPath,
+ t.PkgPath())
+
+ return
+ }
+ }
+
+ // Set up an appropriate template arg.
+ arg := tmplArg{
+ Interfaces: interfaces,
+ InterfacePkgPath: interfacePkgPath,
+ OutputPkgPath: outputPkgPath,
+ Imports: getImports(interfaces, outputPkgPath),
+ }
+
+ // Configure and parse the template.
+ tmpl := template.New("code")
+ tmpl.Funcs(template.FuncMap{
+ "pathBase": path.Base,
+ "getMethods": getMethods,
+ "getInputs": getInputs,
+ "getOutputs": getOutputs,
+ "getInputTypeString": arg.getInputTypeString,
+ "getTypeString": arg.getTypeString,
+ })
+
+ _, err = tmpl.Parse(gTmplStr)
+ if err != nil {
+ err = fmt.Errorf("Parse: %v", err)
+ return
+ }
+
+ // Execute the template, collecting the raw output into a buffer.
+ buf := new(bytes.Buffer)
+ if err := tmpl.Execute(buf, arg); err != nil {
+ return err
+ }
+
+ // Parse the output.
+ fset := token.NewFileSet()
+ astFile, err := parser.ParseFile(
+ fset,
+ path.Base(outputPkgPath+".go"),
+ buf,
+ parser.ParseComments)
+
+ if err != nil {
+ err = fmt.Errorf("parser.ParseFile: %v", err)
+ return
+ }
+
+ // Sort the import lines in the AST in the same way that gofmt does.
+ ast.SortImports(fset, astFile)
+
+ // Pretty-print the AST, using the same options that gofmt does by default.
+ cfg := &printer.Config{
+ Mode: printer.UseSpaces | printer.TabIndent,
+ Tabwidth: 8,
+ }
+
+ if err = cfg.Fprint(w, fset, astFile); err != nil {
+ return errors.New("Error pretty printing: " + err.Error())
+ }
+
+ return nil
+}
diff --git a/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/oglemock/generate/generate_test.go b/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/oglemock/generate/generate_test.go
new file mode 100644
index 00000000000..8347e4d030b
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/oglemock/generate/generate_test.go
@@ -0,0 +1,168 @@
+// Copyright 2012 Aaron Jacobs. All Rights Reserved.
+// Author: aaronjjacobs@gmail.com (Aaron Jacobs)
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package generate_test
+
+import (
+ "bytes"
+ "flag"
+ "image"
+ "io"
+ "io/ioutil"
+ "path"
+ "reflect"
+ "testing"
+
+ . "github.com/smartystreets/assertions/internal/oglematchers"
+ "github.com/smartystreets/assertions/internal/oglemock/generate"
+ "github.com/smartystreets/assertions/internal/oglemock/generate/testdata/complicated_pkg"
+ "github.com/smartystreets/assertions/internal/oglemock/generate/testdata/renamed_pkg"
+ . "github.com/smartystreets/assertions/internal/ogletest"
+)
+
+var dumpNew = flag.Bool("dump_new", false, "Dump new golden files.")
+
+////////////////////////////////////////////////////////////
+// Helpers
+////////////////////////////////////////////////////////////
+
+type GenerateTest struct {
+}
+
+func TestOgletest(t *testing.T) { RunTests(t) }
+func init() { RegisterTestSuite(&GenerateTest{}) }
+
+func (t *GenerateTest) runGoldenTest(
+ caseName string,
+ outputPkgPath string,
+ nilPtrs ...interface{}) {
+ // Make a slice of interface types to give to GenerateMockSource.
+ interfaces := make([]reflect.Type, len(nilPtrs))
+ for i, ptr := range nilPtrs {
+ interfaces[i] = reflect.TypeOf(ptr).Elem()
+ }
+
+ // Create the mock source.
+ buf := new(bytes.Buffer)
+ err := generate.GenerateMockSource(buf, outputPkgPath, interfaces)
+ AssertEq(nil, err, "Error from GenerateMockSource: %v", err)
+
+ // Read the golden file.
+ goldenPath := path.Join("testdata", "golden."+caseName+".go")
+ goldenData := readFileOrDie(goldenPath)
+
+ // Compare the two.
+ identical := (buf.String() == string(goldenData))
+ ExpectTrue(identical, "Output doesn't match for case '%s'.", caseName)
+
+ // Write out a new golden file if requested.
+ if !identical && *dumpNew {
+ writeContentsToFileOrDie(buf.Bytes(), goldenPath)
+ }
+}
+
+func writeContentsToFileOrDie(contents []byte, path string) {
+ if err := ioutil.WriteFile(path, contents, 0600); err != nil {
+ panic("ioutil.WriteFile: " + err.Error())
+ }
+}
+
+func readFileOrDie(path string) []byte {
+ contents, err := ioutil.ReadFile(path)
+ if err != nil {
+ panic("ioutil.ReadFile: " + err.Error())
+ }
+
+ return contents
+}
+
+////////////////////////////////////////////////////////////
+// Tests
+////////////////////////////////////////////////////////////
+
+func (t *GenerateTest) EmptyOutputPackagePath() {
+ err := generate.GenerateMockSource(
+ new(bytes.Buffer),
+ "",
+ []reflect.Type{
+ reflect.TypeOf((*io.Reader)(nil)).Elem(),
+ })
+
+ ExpectThat(err, Error(HasSubstr("Package path")))
+ ExpectThat(err, Error(HasSubstr("non-empty")))
+}
+
+func (t *GenerateTest) EmptySetOfInterfaces() {
+ err := generate.GenerateMockSource(
+ new(bytes.Buffer),
+ "foo",
+ []reflect.Type{})
+
+ ExpectThat(err, Error(HasSubstr("interfaces")))
+ ExpectThat(err, Error(HasSubstr("non-empty")))
+}
+
+func (t *GenerateTest) NonInterfaceType() {
+ err := generate.GenerateMockSource(
+ new(bytes.Buffer),
+ "foo",
+ []reflect.Type{
+ reflect.TypeOf((*io.Reader)(nil)).Elem(),
+ reflect.TypeOf(17),
+ reflect.TypeOf((*io.Writer)(nil)).Elem(),
+ })
+
+ ExpectThat(err, Error(HasSubstr("Invalid type")))
+}
+
+func (t *GenerateTest) IoReaderAndWriter() {
+ // Mock io.Reader and io.Writer.
+ t.runGoldenTest(
+ "io_reader_writer",
+ "some/pkg",
+ (*io.Reader)(nil),
+ (*io.Writer)(nil))
+}
+
+func (t *GenerateTest) IoReaderAndWriter_SamePackage() {
+ // Mock io.Reader and io.Writer.
+ t.runGoldenTest(
+ "io_reader_writer_same_package",
+ "io",
+ (*io.Reader)(nil),
+ (*io.Writer)(nil))
+}
+
+func (t *GenerateTest) Image() {
+ t.runGoldenTest(
+ "image",
+ "some/pkg",
+ (*image.Image)(nil),
+ (*image.PalettedImage)(nil))
+}
+
+func (t *GenerateTest) ComplicatedPackage() {
+ t.runGoldenTest(
+ "complicated_pkg",
+ "some/pkg",
+ (*complicated_pkg.ComplicatedThing)(nil))
+}
+
+func (t *GenerateTest) RenamedPackage() {
+ t.runGoldenTest(
+ "renamed_pkg",
+ "some/pkg",
+ (*tony.SomeInterface)(nil))
+}
diff --git a/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/oglemock/generate/testdata/complicated_pkg/complicated_pkg.go b/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/oglemock/generate/testdata/complicated_pkg/complicated_pkg.go
new file mode 100644
index 00000000000..acc054370d5
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/oglemock/generate/testdata/complicated_pkg/complicated_pkg.go
@@ -0,0 +1,41 @@
+// Copyright 2012 Aaron Jacobs. All Rights Reserved.
+// Author: aaronjjacobs@gmail.com (Aaron Jacobs)
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+// Package complicated_pkg contains an interface with lots of interesting
+// cases, for use in integration testing.
+package complicated_pkg
+
+import (
+ "image"
+ "io"
+ "net"
+
+ "github.com/smartystreets/assertions/internal/oglemock/generate/testdata/renamed_pkg"
+)
+
+type Byte uint8
+
+type ComplicatedThing interface {
+ Channels(a chan chan<- <-chan net.Conn) chan int
+ Pointers(a *int, b *net.Conn, c **io.Reader) (*int, error)
+ Functions(a func(int, image.Image) int) func(string, int) net.Conn
+ Maps(a map[string]*int) (map[int]*string, error)
+ Arrays(a [3]string) ([3]int, error)
+ Slices(a []string) ([]int, error)
+ NamedScalarType(a Byte) ([]Byte, error)
+ EmptyInterface(a interface{}) (interface{}, error)
+ RenamedPackage(a tony.SomeUint8Alias)
+ Variadic(a int, b ...net.Conn) int
+}
diff --git a/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/oglemock/generate/testdata/golden.complicated_pkg.go b/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/oglemock/generate/testdata/golden.complicated_pkg.go
new file mode 100644
index 00000000000..6bcf1979837
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/oglemock/generate/testdata/golden.complicated_pkg.go
@@ -0,0 +1,311 @@
+// This file was auto-generated using createmock. See the following page for
+// more information:
+//
+// https://github.com/smartystreets/assertions/internal/oglemock
+//
+
+package pkg
+
+import (
+ fmt "fmt"
+ oglemock "github.com/smartystreets/assertions/internal/oglemock"
+ complicated_pkg "github.com/smartystreets/assertions/internal/oglemock/generate/testdata/complicated_pkg"
+ tony "github.com/smartystreets/assertions/internal/oglemock/generate/testdata/renamed_pkg"
+ image "image"
+ io "io"
+ net "net"
+ runtime "runtime"
+ unsafe "unsafe"
+)
+
+type MockComplicatedThing interface {
+ complicated_pkg.ComplicatedThing
+ oglemock.MockObject
+}
+
+type mockComplicatedThing struct {
+ controller oglemock.Controller
+ description string
+}
+
+func NewMockComplicatedThing(
+ c oglemock.Controller,
+ desc string) MockComplicatedThing {
+ return &mockComplicatedThing{
+ controller: c,
+ description: desc,
+ }
+}
+
+func (m *mockComplicatedThing) Oglemock_Id() uintptr {
+ return uintptr(unsafe.Pointer(m))
+}
+
+func (m *mockComplicatedThing) Oglemock_Description() string {
+ return m.description
+}
+
+func (m *mockComplicatedThing) Arrays(p0 [3]string) (o0 [3]int, o1 error) {
+ // Get a file name and line number for the caller.
+ _, file, line, _ := runtime.Caller(1)
+
+ // Hand the call off to the controller, which does most of the work.
+ retVals := m.controller.HandleMethodCall(
+ m,
+ "Arrays",
+ file,
+ line,
+ []interface{}{p0})
+
+ if len(retVals) != 2 {
+ panic(fmt.Sprintf("mockComplicatedThing.Arrays: invalid return values: %v", retVals))
+ }
+
+ // o0 [3]int
+ if retVals[0] != nil {
+ o0 = retVals[0].([3]int)
+ }
+
+ // o1 error
+ if retVals[1] != nil {
+ o1 = retVals[1].(error)
+ }
+
+ return
+}
+
+func (m *mockComplicatedThing) Channels(p0 chan chan<- <-chan net.Conn) (o0 chan int) {
+ // Get a file name and line number for the caller.
+ _, file, line, _ := runtime.Caller(1)
+
+ // Hand the call off to the controller, which does most of the work.
+ retVals := m.controller.HandleMethodCall(
+ m,
+ "Channels",
+ file,
+ line,
+ []interface{}{p0})
+
+ if len(retVals) != 1 {
+ panic(fmt.Sprintf("mockComplicatedThing.Channels: invalid return values: %v", retVals))
+ }
+
+ // o0 chan int
+ if retVals[0] != nil {
+ o0 = retVals[0].(chan int)
+ }
+
+ return
+}
+
+func (m *mockComplicatedThing) EmptyInterface(p0 interface{}) (o0 interface{}, o1 error) {
+ // Get a file name and line number for the caller.
+ _, file, line, _ := runtime.Caller(1)
+
+ // Hand the call off to the controller, which does most of the work.
+ retVals := m.controller.HandleMethodCall(
+ m,
+ "EmptyInterface",
+ file,
+ line,
+ []interface{}{p0})
+
+ if len(retVals) != 2 {
+ panic(fmt.Sprintf("mockComplicatedThing.EmptyInterface: invalid return values: %v", retVals))
+ }
+
+ // o0 interface { }
+ if retVals[0] != nil {
+ o0 = retVals[0].(interface{})
+ }
+
+ // o1 error
+ if retVals[1] != nil {
+ o1 = retVals[1].(error)
+ }
+
+ return
+}
+
+func (m *mockComplicatedThing) Functions(p0 func(int, image.Image) int) (o0 func(string, int) net.Conn) {
+ // Get a file name and line number for the caller.
+ _, file, line, _ := runtime.Caller(1)
+
+ // Hand the call off to the controller, which does most of the work.
+ retVals := m.controller.HandleMethodCall(
+ m,
+ "Functions",
+ file,
+ line,
+ []interface{}{p0})
+
+ if len(retVals) != 1 {
+ panic(fmt.Sprintf("mockComplicatedThing.Functions: invalid return values: %v", retVals))
+ }
+
+ // o0 func(string, int) (net.Conn)
+ if retVals[0] != nil {
+ o0 = retVals[0].(func(string, int) net.Conn)
+ }
+
+ return
+}
+
+func (m *mockComplicatedThing) Maps(p0 map[string]*int) (o0 map[int]*string, o1 error) {
+ // Get a file name and line number for the caller.
+ _, file, line, _ := runtime.Caller(1)
+
+ // Hand the call off to the controller, which does most of the work.
+ retVals := m.controller.HandleMethodCall(
+ m,
+ "Maps",
+ file,
+ line,
+ []interface{}{p0})
+
+ if len(retVals) != 2 {
+ panic(fmt.Sprintf("mockComplicatedThing.Maps: invalid return values: %v", retVals))
+ }
+
+ // o0 map[int]*string
+ if retVals[0] != nil {
+ o0 = retVals[0].(map[int]*string)
+ }
+
+ // o1 error
+ if retVals[1] != nil {
+ o1 = retVals[1].(error)
+ }
+
+ return
+}
+
+func (m *mockComplicatedThing) NamedScalarType(p0 complicated_pkg.Byte) (o0 []complicated_pkg.Byte, o1 error) {
+ // Get a file name and line number for the caller.
+ _, file, line, _ := runtime.Caller(1)
+
+ // Hand the call off to the controller, which does most of the work.
+ retVals := m.controller.HandleMethodCall(
+ m,
+ "NamedScalarType",
+ file,
+ line,
+ []interface{}{p0})
+
+ if len(retVals) != 2 {
+ panic(fmt.Sprintf("mockComplicatedThing.NamedScalarType: invalid return values: %v", retVals))
+ }
+
+ // o0 []complicated_pkg.Byte
+ if retVals[0] != nil {
+ o0 = retVals[0].([]complicated_pkg.Byte)
+ }
+
+ // o1 error
+ if retVals[1] != nil {
+ o1 = retVals[1].(error)
+ }
+
+ return
+}
+
+func (m *mockComplicatedThing) Pointers(p0 *int, p1 *net.Conn, p2 **io.Reader) (o0 *int, o1 error) {
+ // Get a file name and line number for the caller.
+ _, file, line, _ := runtime.Caller(1)
+
+ // Hand the call off to the controller, which does most of the work.
+ retVals := m.controller.HandleMethodCall(
+ m,
+ "Pointers",
+ file,
+ line,
+ []interface{}{p0, p1, p2})
+
+ if len(retVals) != 2 {
+ panic(fmt.Sprintf("mockComplicatedThing.Pointers: invalid return values: %v", retVals))
+ }
+
+ // o0 *int
+ if retVals[0] != nil {
+ o0 = retVals[0].(*int)
+ }
+
+ // o1 error
+ if retVals[1] != nil {
+ o1 = retVals[1].(error)
+ }
+
+ return
+}
+
+func (m *mockComplicatedThing) RenamedPackage(p0 tony.SomeUint8Alias) {
+ // Get a file name and line number for the caller.
+ _, file, line, _ := runtime.Caller(1)
+
+ // Hand the call off to the controller, which does most of the work.
+ retVals := m.controller.HandleMethodCall(
+ m,
+ "RenamedPackage",
+ file,
+ line,
+ []interface{}{p0})
+
+ if len(retVals) != 0 {
+ panic(fmt.Sprintf("mockComplicatedThing.RenamedPackage: invalid return values: %v", retVals))
+ }
+
+ return
+}
+
+func (m *mockComplicatedThing) Slices(p0 []string) (o0 []int, o1 error) {
+ // Get a file name and line number for the caller.
+ _, file, line, _ := runtime.Caller(1)
+
+ // Hand the call off to the controller, which does most of the work.
+ retVals := m.controller.HandleMethodCall(
+ m,
+ "Slices",
+ file,
+ line,
+ []interface{}{p0})
+
+ if len(retVals) != 2 {
+ panic(fmt.Sprintf("mockComplicatedThing.Slices: invalid return values: %v", retVals))
+ }
+
+ // o0 []int
+ if retVals[0] != nil {
+ o0 = retVals[0].([]int)
+ }
+
+ // o1 error
+ if retVals[1] != nil {
+ o1 = retVals[1].(error)
+ }
+
+ return
+}
+
+func (m *mockComplicatedThing) Variadic(p0 int, p1 ...net.Conn) (o0 int) {
+ // Get a file name and line number for the caller.
+ _, file, line, _ := runtime.Caller(1)
+
+ // Hand the call off to the controller, which does most of the work.
+ retVals := m.controller.HandleMethodCall(
+ m,
+ "Variadic",
+ file,
+ line,
+ []interface{}{p0, p1})
+
+ if len(retVals) != 1 {
+ panic(fmt.Sprintf("mockComplicatedThing.Variadic: invalid return values: %v", retVals))
+ }
+
+ // o0 int
+ if retVals[0] != nil {
+ o0 = retVals[0].(int)
+ }
+
+ return
+}
diff --git a/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/oglemock/generate/testdata/golden.image.go b/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/oglemock/generate/testdata/golden.image.go
new file mode 100644
index 00000000000..dd083e2930e
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/oglemock/generate/testdata/golden.image.go
@@ -0,0 +1,238 @@
+// This file was auto-generated using createmock. See the following page for
+// more information:
+//
+// https://github.com/smartystreets/assertions/internal/oglemock
+//
+
+package pkg
+
+import (
+ fmt "fmt"
+ oglemock "github.com/smartystreets/assertions/internal/oglemock"
+ image "image"
+ color "image/color"
+ runtime "runtime"
+ unsafe "unsafe"
+)
+
+type MockImage interface {
+ image.Image
+ oglemock.MockObject
+}
+
+type mockImage struct {
+ controller oglemock.Controller
+ description string
+}
+
+func NewMockImage(
+ c oglemock.Controller,
+ desc string) MockImage {
+ return &mockImage{
+ controller: c,
+ description: desc,
+ }
+}
+
+func (m *mockImage) Oglemock_Id() uintptr {
+ return uintptr(unsafe.Pointer(m))
+}
+
+func (m *mockImage) Oglemock_Description() string {
+ return m.description
+}
+
+func (m *mockImage) At(p0 int, p1 int) (o0 color.Color) {
+ // Get a file name and line number for the caller.
+ _, file, line, _ := runtime.Caller(1)
+
+ // Hand the call off to the controller, which does most of the work.
+ retVals := m.controller.HandleMethodCall(
+ m,
+ "At",
+ file,
+ line,
+ []interface{}{p0, p1})
+
+ if len(retVals) != 1 {
+ panic(fmt.Sprintf("mockImage.At: invalid return values: %v", retVals))
+ }
+
+ // o0 color.Color
+ if retVals[0] != nil {
+ o0 = retVals[0].(color.Color)
+ }
+
+ return
+}
+
+func (m *mockImage) Bounds() (o0 image.Rectangle) {
+ // Get a file name and line number for the caller.
+ _, file, line, _ := runtime.Caller(1)
+
+ // Hand the call off to the controller, which does most of the work.
+ retVals := m.controller.HandleMethodCall(
+ m,
+ "Bounds",
+ file,
+ line,
+ []interface{}{})
+
+ if len(retVals) != 1 {
+ panic(fmt.Sprintf("mockImage.Bounds: invalid return values: %v", retVals))
+ }
+
+ // o0 image.Rectangle
+ if retVals[0] != nil {
+ o0 = retVals[0].(image.Rectangle)
+ }
+
+ return
+}
+
+func (m *mockImage) ColorModel() (o0 color.Model) {
+ // Get a file name and line number for the caller.
+ _, file, line, _ := runtime.Caller(1)
+
+ // Hand the call off to the controller, which does most of the work.
+ retVals := m.controller.HandleMethodCall(
+ m,
+ "ColorModel",
+ file,
+ line,
+ []interface{}{})
+
+ if len(retVals) != 1 {
+ panic(fmt.Sprintf("mockImage.ColorModel: invalid return values: %v", retVals))
+ }
+
+ // o0 color.Model
+ if retVals[0] != nil {
+ o0 = retVals[0].(color.Model)
+ }
+
+ return
+}
+
+type MockPalettedImage interface {
+ image.PalettedImage
+ oglemock.MockObject
+}
+
+type mockPalettedImage struct {
+ controller oglemock.Controller
+ description string
+}
+
+func NewMockPalettedImage(
+ c oglemock.Controller,
+ desc string) MockPalettedImage {
+ return &mockPalettedImage{
+ controller: c,
+ description: desc,
+ }
+}
+
+func (m *mockPalettedImage) Oglemock_Id() uintptr {
+ return uintptr(unsafe.Pointer(m))
+}
+
+func (m *mockPalettedImage) Oglemock_Description() string {
+ return m.description
+}
+
+func (m *mockPalettedImage) At(p0 int, p1 int) (o0 color.Color) {
+ // Get a file name and line number for the caller.
+ _, file, line, _ := runtime.Caller(1)
+
+ // Hand the call off to the controller, which does most of the work.
+ retVals := m.controller.HandleMethodCall(
+ m,
+ "At",
+ file,
+ line,
+ []interface{}{p0, p1})
+
+ if len(retVals) != 1 {
+ panic(fmt.Sprintf("mockPalettedImage.At: invalid return values: %v", retVals))
+ }
+
+ // o0 color.Color
+ if retVals[0] != nil {
+ o0 = retVals[0].(color.Color)
+ }
+
+ return
+}
+
+func (m *mockPalettedImage) Bounds() (o0 image.Rectangle) {
+ // Get a file name and line number for the caller.
+ _, file, line, _ := runtime.Caller(1)
+
+ // Hand the call off to the controller, which does most of the work.
+ retVals := m.controller.HandleMethodCall(
+ m,
+ "Bounds",
+ file,
+ line,
+ []interface{}{})
+
+ if len(retVals) != 1 {
+ panic(fmt.Sprintf("mockPalettedImage.Bounds: invalid return values: %v", retVals))
+ }
+
+ // o0 image.Rectangle
+ if retVals[0] != nil {
+ o0 = retVals[0].(image.Rectangle)
+ }
+
+ return
+}
+
+func (m *mockPalettedImage) ColorIndexAt(p0 int, p1 int) (o0 uint8) {
+ // Get a file name and line number for the caller.
+ _, file, line, _ := runtime.Caller(1)
+
+ // Hand the call off to the controller, which does most of the work.
+ retVals := m.controller.HandleMethodCall(
+ m,
+ "ColorIndexAt",
+ file,
+ line,
+ []interface{}{p0, p1})
+
+ if len(retVals) != 1 {
+ panic(fmt.Sprintf("mockPalettedImage.ColorIndexAt: invalid return values: %v", retVals))
+ }
+
+ // o0 uint8
+ if retVals[0] != nil {
+ o0 = retVals[0].(uint8)
+ }
+
+ return
+}
+
+func (m *mockPalettedImage) ColorModel() (o0 color.Model) {
+ // Get a file name and line number for the caller.
+ _, file, line, _ := runtime.Caller(1)
+
+ // Hand the call off to the controller, which does most of the work.
+ retVals := m.controller.HandleMethodCall(
+ m,
+ "ColorModel",
+ file,
+ line,
+ []interface{}{})
+
+ if len(retVals) != 1 {
+ panic(fmt.Sprintf("mockPalettedImage.ColorModel: invalid return values: %v", retVals))
+ }
+
+ // o0 color.Model
+ if retVals[0] != nil {
+ o0 = retVals[0].(color.Model)
+ }
+
+ return
+}
diff --git a/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/oglemock/generate/testdata/golden.io_reader_writer.go b/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/oglemock/generate/testdata/golden.io_reader_writer.go
new file mode 100644
index 00000000000..2d1c7df0490
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/oglemock/generate/testdata/golden.io_reader_writer.go
@@ -0,0 +1,127 @@
+// This file was auto-generated using createmock. See the following page for
+// more information:
+//
+// https://github.com/smartystreets/assertions/internal/oglemock
+//
+
+package pkg
+
+import (
+ fmt "fmt"
+ oglemock "github.com/smartystreets/assertions/internal/oglemock"
+ io "io"
+ runtime "runtime"
+ unsafe "unsafe"
+)
+
+type MockReader interface {
+ io.Reader
+ oglemock.MockObject
+}
+
+type mockReader struct {
+ controller oglemock.Controller
+ description string
+}
+
+func NewMockReader(
+ c oglemock.Controller,
+ desc string) MockReader {
+ return &mockReader{
+ controller: c,
+ description: desc,
+ }
+}
+
+func (m *mockReader) Oglemock_Id() uintptr {
+ return uintptr(unsafe.Pointer(m))
+}
+
+func (m *mockReader) Oglemock_Description() string {
+ return m.description
+}
+
+func (m *mockReader) Read(p0 []uint8) (o0 int, o1 error) {
+ // Get a file name and line number for the caller.
+ _, file, line, _ := runtime.Caller(1)
+
+ // Hand the call off to the controller, which does most of the work.
+ retVals := m.controller.HandleMethodCall(
+ m,
+ "Read",
+ file,
+ line,
+ []interface{}{p0})
+
+ if len(retVals) != 2 {
+ panic(fmt.Sprintf("mockReader.Read: invalid return values: %v", retVals))
+ }
+
+ // o0 int
+ if retVals[0] != nil {
+ o0 = retVals[0].(int)
+ }
+
+ // o1 error
+ if retVals[1] != nil {
+ o1 = retVals[1].(error)
+ }
+
+ return
+}
+
+type MockWriter interface {
+ io.Writer
+ oglemock.MockObject
+}
+
+type mockWriter struct {
+ controller oglemock.Controller
+ description string
+}
+
+func NewMockWriter(
+ c oglemock.Controller,
+ desc string) MockWriter {
+ return &mockWriter{
+ controller: c,
+ description: desc,
+ }
+}
+
+func (m *mockWriter) Oglemock_Id() uintptr {
+ return uintptr(unsafe.Pointer(m))
+}
+
+func (m *mockWriter) Oglemock_Description() string {
+ return m.description
+}
+
+func (m *mockWriter) Write(p0 []uint8) (o0 int, o1 error) {
+ // Get a file name and line number for the caller.
+ _, file, line, _ := runtime.Caller(1)
+
+ // Hand the call off to the controller, which does most of the work.
+ retVals := m.controller.HandleMethodCall(
+ m,
+ "Write",
+ file,
+ line,
+ []interface{}{p0})
+
+ if len(retVals) != 2 {
+ panic(fmt.Sprintf("mockWriter.Write: invalid return values: %v", retVals))
+ }
+
+ // o0 int
+ if retVals[0] != nil {
+ o0 = retVals[0].(int)
+ }
+
+ // o1 error
+ if retVals[1] != nil {
+ o1 = retVals[1].(error)
+ }
+
+ return
+}
diff --git a/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/oglemock/generate/testdata/golden.io_reader_writer_same_package.go b/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/oglemock/generate/testdata/golden.io_reader_writer_same_package.go
new file mode 100644
index 00000000000..86c4b0391e6
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/oglemock/generate/testdata/golden.io_reader_writer_same_package.go
@@ -0,0 +1,126 @@
+// This file was auto-generated using createmock. See the following page for
+// more information:
+//
+// https://github.com/smartystreets/assertions/internal/oglemock
+//
+
+package io
+
+import (
+ fmt "fmt"
+ oglemock "github.com/smartystreets/assertions/internal/oglemock"
+ runtime "runtime"
+ unsafe "unsafe"
+)
+
+type MockReader interface {
+ Reader
+ oglemock.MockObject
+}
+
+type mockReader struct {
+ controller oglemock.Controller
+ description string
+}
+
+func NewMockReader(
+ c oglemock.Controller,
+ desc string) MockReader {
+ return &mockReader{
+ controller: c,
+ description: desc,
+ }
+}
+
+func (m *mockReader) Oglemock_Id() uintptr {
+ return uintptr(unsafe.Pointer(m))
+}
+
+func (m *mockReader) Oglemock_Description() string {
+ return m.description
+}
+
+func (m *mockReader) Read(p0 []uint8) (o0 int, o1 error) {
+ // Get a file name and line number for the caller.
+ _, file, line, _ := runtime.Caller(1)
+
+ // Hand the call off to the controller, which does most of the work.
+ retVals := m.controller.HandleMethodCall(
+ m,
+ "Read",
+ file,
+ line,
+ []interface{}{p0})
+
+ if len(retVals) != 2 {
+ panic(fmt.Sprintf("mockReader.Read: invalid return values: %v", retVals))
+ }
+
+ // o0 int
+ if retVals[0] != nil {
+ o0 = retVals[0].(int)
+ }
+
+ // o1 error
+ if retVals[1] != nil {
+ o1 = retVals[1].(error)
+ }
+
+ return
+}
+
+type MockWriter interface {
+ Writer
+ oglemock.MockObject
+}
+
+type mockWriter struct {
+ controller oglemock.Controller
+ description string
+}
+
+func NewMockWriter(
+ c oglemock.Controller,
+ desc string) MockWriter {
+ return &mockWriter{
+ controller: c,
+ description: desc,
+ }
+}
+
+func (m *mockWriter) Oglemock_Id() uintptr {
+ return uintptr(unsafe.Pointer(m))
+}
+
+func (m *mockWriter) Oglemock_Description() string {
+ return m.description
+}
+
+func (m *mockWriter) Write(p0 []uint8) (o0 int, o1 error) {
+ // Get a file name and line number for the caller.
+ _, file, line, _ := runtime.Caller(1)
+
+ // Hand the call off to the controller, which does most of the work.
+ retVals := m.controller.HandleMethodCall(
+ m,
+ "Write",
+ file,
+ line,
+ []interface{}{p0})
+
+ if len(retVals) != 2 {
+ panic(fmt.Sprintf("mockWriter.Write: invalid return values: %v", retVals))
+ }
+
+ // o0 int
+ if retVals[0] != nil {
+ o0 = retVals[0].(int)
+ }
+
+ // o1 error
+ if retVals[1] != nil {
+ o1 = retVals[1].(error)
+ }
+
+ return
+}
diff --git a/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/oglemock/generate/testdata/golden.renamed_pkg.go b/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/oglemock/generate/testdata/golden.renamed_pkg.go
new file mode 100644
index 00000000000..fe3d313007a
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/oglemock/generate/testdata/golden.renamed_pkg.go
@@ -0,0 +1,66 @@
+// This file was auto-generated using createmock. See the following page for
+// more information:
+//
+// https://github.com/smartystreets/assertions/internal/oglemock
+//
+
+package pkg
+
+import (
+ fmt "fmt"
+ oglemock "github.com/smartystreets/assertions/internal/oglemock"
+ tony "github.com/smartystreets/assertions/internal/oglemock/generate/testdata/renamed_pkg"
+ runtime "runtime"
+ unsafe "unsafe"
+)
+
+type MockSomeInterface interface {
+ tony.SomeInterface
+ oglemock.MockObject
+}
+
+type mockSomeInterface struct {
+ controller oglemock.Controller
+ description string
+}
+
+func NewMockSomeInterface(
+ c oglemock.Controller,
+ desc string) MockSomeInterface {
+ return &mockSomeInterface{
+ controller: c,
+ description: desc,
+ }
+}
+
+func (m *mockSomeInterface) Oglemock_Id() uintptr {
+ return uintptr(unsafe.Pointer(m))
+}
+
+func (m *mockSomeInterface) Oglemock_Description() string {
+ return m.description
+}
+
+func (m *mockSomeInterface) DoFoo(p0 int) (o0 int) {
+ // Get a file name and line number for the caller.
+ _, file, line, _ := runtime.Caller(1)
+
+ // Hand the call off to the controller, which does most of the work.
+ retVals := m.controller.HandleMethodCall(
+ m,
+ "DoFoo",
+ file,
+ line,
+ []interface{}{p0})
+
+ if len(retVals) != 1 {
+ panic(fmt.Sprintf("mockSomeInterface.DoFoo: invalid return values: %v", retVals))
+ }
+
+ // o0 int
+ if retVals[0] != nil {
+ o0 = retVals[0].(int)
+ }
+
+ return
+}
diff --git a/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/oglemock/generate/testdata/renamed_pkg/renamed_pkg.go b/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/oglemock/generate/testdata/renamed_pkg/renamed_pkg.go
new file mode 100644
index 00000000000..1461cd6960d
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/oglemock/generate/testdata/renamed_pkg/renamed_pkg.go
@@ -0,0 +1,24 @@
+// Copyright 2012 Aaron Jacobs. All Rights Reserved.
+// Author: aaronjjacobs@gmail.com (Aaron Jacobs)
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+// A package that calls itself something different than its package path would
+// have you believe.
+package tony
+
+type SomeUint8Alias uint8
+
+type SomeInterface interface {
+ DoFoo(a int) int
+}
diff --git a/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/oglemock/generate/type_string.go b/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/oglemock/generate/type_string.go
new file mode 100644
index 00000000000..c4d46e718d9
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/oglemock/generate/type_string.go
@@ -0,0 +1,147 @@
+// Copyright 2015 Aaron Jacobs. All Rights Reserved.
+// Author: aaronjjacobs@gmail.com (Aaron Jacobs)
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package generate
+
+import (
+ "fmt"
+ "log"
+ "reflect"
+ "strings"
+)
+
+// Return the string that should be used to refer to the supplied type within
+// the given package. The output is not guaranteed to be pretty, and should be
+// run through a tool like gofmt afterward.
+//
+// For example, a pointer to an io.Reader may be rendered as "*Reader" or
+// "*io.Reader" depending on whether the package path is "io" or not.
+func typeString(
+ t reflect.Type,
+ pkgPath string) (s string) {
+ // Is this type named? If so we use its name, possibly with a package prefix.
+ //
+ // Examples:
+ //
+ // int
+ // string
+ // error
+ // gcs.Bucket
+ //
+ if t.Name() != "" {
+ if t.PkgPath() == pkgPath {
+ s = t.Name()
+ } else {
+ s = t.String()
+ }
+
+ return
+ }
+
+ // This type is unnamed. Recurse.
+ switch t.Kind() {
+ case reflect.Array:
+ s = fmt.Sprintf("[%d]%s", t.Len(), typeString(t.Elem(), pkgPath))
+
+ case reflect.Chan:
+ s = fmt.Sprintf("%s %s", t.ChanDir(), typeString(t.Elem(), pkgPath))
+
+ case reflect.Func:
+ s = typeString_Func(t, pkgPath)
+
+ case reflect.Interface:
+ s = typeString_Interface(t, pkgPath)
+
+ case reflect.Map:
+ s = fmt.Sprintf(
+ "map[%s]%s",
+ typeString(t.Key(), pkgPath),
+ typeString(t.Elem(), pkgPath))
+
+ case reflect.Ptr:
+ s = fmt.Sprintf("*%s", typeString(t.Elem(), pkgPath))
+
+ case reflect.Slice:
+ s = fmt.Sprintf("[]%s", typeString(t.Elem(), pkgPath))
+
+ case reflect.Struct:
+ s = typeString_Struct(t, pkgPath)
+
+ default:
+ log.Panicf("Unhandled kind %v for type: %v", t.Kind(), t)
+ }
+
+ return
+}
+
+func typeString_FuncOrMethod(
+ name string,
+ t reflect.Type,
+ pkgPath string) (s string) {
+ // Deal with input types.
+ var in []string
+ for i := 0; i < t.NumIn(); i++ {
+ in = append(in, typeString(t.In(i), pkgPath))
+ }
+
+ // And output types.
+ var out []string
+ for i := 0; i < t.NumOut(); i++ {
+ out = append(out, typeString(t.Out(i), pkgPath))
+ }
+
+ // Put it all together.
+ s = fmt.Sprintf(
+ "%s(%s) (%s)",
+ name,
+ strings.Join(in, ", "),
+ strings.Join(out, ", "))
+
+ return
+}
+
+func typeString_Func(
+ t reflect.Type,
+ pkgPath string) (s string) {
+ return typeString_FuncOrMethod("func", t, pkgPath)
+}
+
+func typeString_Struct(
+ t reflect.Type,
+ pkgPath string) (s string) {
+ var fields []string
+ for i := 0; i < t.NumField(); i++ {
+ f := t.Field(i)
+ fString := fmt.Sprintf("%s %s", f.Name, typeString(f.Type, pkgPath))
+ fields = append(fields, fString)
+ }
+
+ s = fmt.Sprintf("struct { %s }", strings.Join(fields, "; "))
+ return
+}
+
+func typeString_Interface(
+ t reflect.Type,
+ pkgPath string) (s string) {
+ var methods []string
+ for i := 0; i < t.NumMethod(); i++ {
+ m := t.Method(i)
+ mString := typeString_FuncOrMethod(m.Name, m.Type, pkgPath)
+ methods = append(methods, mString)
+ }
+
+ s = fmt.Sprintf("interface { %s }", strings.Join(methods, "; "))
+ return
+}
diff --git a/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/oglemock/generate/type_string_test.go b/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/oglemock/generate/type_string_test.go
new file mode 100644
index 00000000000..7d13c4e177e
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/oglemock/generate/type_string_test.go
@@ -0,0 +1,220 @@
+// Copyright 2015 Aaron Jacobs. All Rights Reserved.
+// Author: aaronjjacobs@gmail.com (Aaron Jacobs)
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package generate
+
+import (
+ "io"
+ "reflect"
+ "testing"
+ "unsafe"
+
+ "github.com/smartystreets/assertions/internal/oglemock/createmock/testdata/gcs"
+ . "github.com/smartystreets/assertions/internal/ogletest"
+)
+
+func TestTypeString(t *testing.T) { RunTests(t) }
+
+////////////////////////////////////////////////////////////////////////
+// Boilerplate
+////////////////////////////////////////////////////////////////////////
+
+type TypeStringTest struct {
+}
+
+func init() { RegisterTestSuite(&TypeStringTest{}) }
+
+////////////////////////////////////////////////////////////////////////
+// Test functions
+////////////////////////////////////////////////////////////////////////
+
+func (t *TypeStringTest) TestCases() {
+ const gcsPkgPath = "github.com/smartystreets/assertions/internal/oglemock/createmock/testdata/gcs"
+ to := reflect.TypeOf
+
+ testCases := []struct {
+ t reflect.Type
+ pkgPath string
+ expected string
+ }{
+ /////////////////////////
+ // Scalar types
+ /////////////////////////
+
+ 0: {to(true), "", "bool"},
+ 1: {to(true), "some/pkg", "bool"},
+ 2: {to(int(17)), "some/pkg", "int"},
+ 3: {to(int32(17)), "some/pkg", "int32"},
+ 4: {to(uint(17)), "some/pkg", "uint"},
+ 5: {to(uint32(17)), "some/pkg", "uint32"},
+ 6: {to(uintptr(17)), "some/pkg", "uintptr"},
+ 7: {to(float32(17)), "some/pkg", "float32"},
+ 8: {to(complex64(17)), "some/pkg", "complex64"},
+
+ /////////////////////////
+ // Structs
+ /////////////////////////
+
+ 9: {to(gcs.Object{}), "some/pkg", "gcs.Object"},
+ 10: {to(gcs.Object{}), gcsPkgPath, "Object"},
+
+ 11: {
+ to(struct {
+ a int
+ b gcs.Object
+ }{}),
+ "some/pkg",
+ "struct { a int; b gcs.Object }",
+ },
+
+ 12: {
+ to(struct {
+ a int
+ b gcs.Object
+ }{}),
+ gcsPkgPath,
+ "struct { a int; b Object }",
+ },
+
+ /////////////////////////
+ // Pointers
+ /////////////////////////
+
+ 13: {to((*int)(nil)), gcsPkgPath, "*int"},
+ 14: {to((*gcs.Object)(nil)), "some/pkg", "*gcs.Object"},
+ 15: {to((*gcs.Object)(nil)), gcsPkgPath, "*Object"},
+
+ /////////////////////////
+ // Arrays
+ /////////////////////////
+
+ 16: {to([3]int{}), "some/pkg", "[3]int"},
+ 17: {to([3]gcs.Object{}), gcsPkgPath, "[3]Object"},
+
+ /////////////////////////
+ // Channels
+ /////////////////////////
+
+ 18: {to((chan int)(nil)), "some/pkg", "chan int"},
+ 19: {to((<-chan int)(nil)), "some/pkg", "<-chan int"},
+ 20: {to((chan<- int)(nil)), "some/pkg", "chan<- int"},
+ 21: {to((<-chan gcs.Object)(nil)), gcsPkgPath, "<-chan Object"},
+
+ /////////////////////////
+ // Functions
+ /////////////////////////
+
+ 22: {
+ to(func(int, gcs.Object) {}),
+ gcsPkgPath,
+ "func(int, Object) ()",
+ },
+
+ 23: {
+ to(func() (*gcs.Object, error) { return nil, nil }),
+ gcsPkgPath,
+ "func() (*Object, error)",
+ },
+
+ 24: {
+ to(func(int, gcs.Object) (*gcs.Object, error) { return nil, nil }),
+ gcsPkgPath,
+ "func(int, Object) (*Object, error)",
+ },
+
+ /////////////////////////
+ // Interfaces
+ /////////////////////////
+
+ 25: {to((*error)(nil)).Elem(), "some/pkg", "error"},
+ 26: {to((*io.Reader)(nil)).Elem(), "some/pkg", "io.Reader"},
+ 27: {to((*io.Reader)(nil)).Elem(), "io", "Reader"},
+
+ 28: {
+ to((*interface{})(nil)).Elem(),
+ "some/pkg",
+ "interface { }",
+ },
+
+ 29: {
+ to((*interface {
+ Foo(int)
+ Bar(gcs.Object)
+ })(nil)).Elem(),
+ "some/pkg",
+ "interface { Bar(gcs.Object) (); Foo(int) () }",
+ },
+
+ 30: {
+ to((*interface {
+ Foo(int)
+ Bar(gcs.Object)
+ })(nil)).Elem(),
+ gcsPkgPath,
+ "interface { Bar(Object) (); Foo(int) () }",
+ },
+
+ /////////////////////////
+ // Maps
+ /////////////////////////
+
+ 31: {to(map[*gcs.Object]gcs.Object{}), gcsPkgPath, "map[*Object]Object"},
+
+ /////////////////////////
+ // Slices
+ /////////////////////////
+
+ 32: {to([]int{}), "some/pkg", "[]int"},
+ 33: {to([]gcs.Object{}), gcsPkgPath, "[]Object"},
+
+ /////////////////////////
+ // Strings
+ /////////////////////////
+
+ 34: {to(""), gcsPkgPath, "string"},
+
+ /////////////////////////
+ // Unsafe pointer
+ /////////////////////////
+
+ 35: {to(unsafe.Pointer(nil)), gcsPkgPath, "unsafe.Pointer"},
+
+ /////////////////////////
+ // Other named types
+ /////////////////////////
+
+ 36: {to(gcs.Int(17)), "some/pkg", "gcs.Int"},
+ 37: {to(gcs.Int(17)), gcsPkgPath, "Int"},
+
+ 38: {to(gcs.Array{}), "some/pkg", "gcs.Array"},
+ 39: {to(gcs.Array{}), gcsPkgPath, "Array"},
+
+ 40: {to(gcs.Chan(nil)), "some/pkg", "gcs.Chan"},
+ 41: {to(gcs.Chan(nil)), gcsPkgPath, "Chan"},
+
+ 42: {to(gcs.Ptr(nil)), "some/pkg", "gcs.Ptr"},
+ 43: {to(gcs.Ptr(nil)), gcsPkgPath, "Ptr"},
+
+ 44: {to((*gcs.Int)(nil)), "some/pkg", "*gcs.Int"},
+ 45: {to((*gcs.Int)(nil)), gcsPkgPath, "*Int"},
+ }
+
+ for i, tc := range testCases {
+ ExpectEq(
+ tc.expected,
+ typeString(tc.t, tc.pkgPath),
+ "Case %d: %v, %q", i, tc.t, tc.pkgPath)
+ }
+}
diff --git a/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/oglemock/integration_test.go b/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/oglemock/integration_test.go
new file mode 100644
index 00000000000..e72f0cbb13b
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/oglemock/integration_test.go
@@ -0,0 +1,129 @@
+// Copyright 2012 Aaron Jacobs. All Rights Reserved.
+// Author: aaronjjacobs@gmail.com (Aaron Jacobs)
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package oglemock_test
+
+import (
+ "errors"
+ . "github.com/smartystreets/assertions/internal/oglematchers"
+ "github.com/smartystreets/assertions/internal/oglemock"
+ "github.com/smartystreets/assertions/internal/oglemock/sample/mock_io"
+ . "github.com/smartystreets/assertions/internal/ogletest"
+ "path"
+ "runtime"
+)
+
+////////////////////////////////////////////////////////////
+// Helpers
+////////////////////////////////////////////////////////////
+
+func getLineNumber() int {
+ _, _, line, _ := runtime.Caller(1)
+ return line
+}
+
+type IntegrationTest struct {
+ reporter fakeErrorReporter
+ controller oglemock.Controller
+
+ reader mock_io.MockReader
+}
+
+func init() { RegisterTestSuite(&IntegrationTest{}) }
+
+func (t *IntegrationTest) SetUp(c *TestInfo) {
+ t.reporter.errors = make([]errorReport, 0)
+ t.reporter.fatalErrors = make([]errorReport, 0)
+ t.controller = oglemock.NewController(&t.reporter)
+
+ t.reader = mock_io.NewMockReader(t.controller, "")
+}
+
+////////////////////////////////////////////////////////////
+// Tests
+////////////////////////////////////////////////////////////
+
+func (t *IntegrationTest) UnexpectedCall() {
+ t.reader.Read([]uint8{1, 2, 3})
+ expectedLine := getLineNumber() - 1
+
+ // An error should have been reported.
+ AssertEq(1, len(t.reporter.errors), "%v", t.reporter.errors)
+ AssertEq(0, len(t.reporter.fatalErrors), "%v", t.reporter.fatalErrors)
+
+ r := t.reporter.errors[0]
+ ExpectEq("integration_test.go", path.Base(r.fileName))
+ ExpectEq(expectedLine, r.lineNumber)
+ ExpectThat(r.err, Error(HasSubstr("Unexpected")))
+ ExpectThat(r.err, Error(HasSubstr("Read")))
+ ExpectThat(r.err, Error(HasSubstr("[1 2 3]")))
+}
+
+func (t *IntegrationTest) ZeroValues() {
+ // Make an unexpected call.
+ n, err := t.reader.Read([]uint8{})
+
+ // Check the return values.
+ ExpectEq(0, n)
+ ExpectEq(nil, err)
+}
+
+func (t *IntegrationTest) ExpectedCalls() {
+ // Expectations
+ t.controller.ExpectCall(t.reader, "Read", "", 112)(nil).
+ WillOnce(oglemock.Return(17, nil)).
+ WillOnce(oglemock.Return(19, nil))
+
+ t.controller.ExpectCall(t.reader, "Read", "", 112)(Not(Equals(nil))).
+ WillOnce(oglemock.Return(23, errors.New("taco")))
+
+ // Calls
+ var n int
+ var err error
+
+ n, err = t.reader.Read(nil)
+ ExpectEq(17, n)
+ ExpectEq(nil, err)
+
+ n, err = t.reader.Read([]byte{})
+ ExpectEq(23, n)
+ ExpectThat(err, Error(Equals("taco")))
+
+ n, err = t.reader.Read(nil)
+ ExpectEq(19, n)
+ ExpectEq(nil, err)
+
+ // Errors
+ AssertEq(0, len(t.reporter.errors), "%v", t.reporter.errors)
+ AssertEq(0, len(t.reporter.fatalErrors), "%v", t.reporter.fatalErrors)
+}
+
+func (t *IntegrationTest) WrongTypeForReturn() {
+ t.controller.ExpectCall(t.reader, "Read", "foo.go", 112)(nil).
+ WillOnce(oglemock.Return(0, errors.New(""))).
+ WillOnce(oglemock.Return("taco", errors.New("")))
+
+ // Errors
+ AssertEq(0, len(t.reporter.errors), "%v", t.reporter.errors)
+ AssertEq(1, len(t.reporter.fatalErrors), "%v", t.reporter.fatalErrors)
+
+ r := t.reporter.fatalErrors[0]
+ ExpectEq("foo.go", r.fileName)
+ ExpectEq(112, r.lineNumber)
+ ExpectThat(r.err, Error(HasSubstr("Return")))
+ ExpectThat(r.err, Error(HasSubstr("arg 0")))
+ ExpectThat(r.err, Error(HasSubstr("int")))
+ ExpectThat(r.err, Error(HasSubstr("string")))
+}
diff --git a/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/oglemock/internal_expectation.go b/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/oglemock/internal_expectation.go
new file mode 100644
index 00000000000..8fa8aeafa42
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/oglemock/internal_expectation.go
@@ -0,0 +1,180 @@
+// Copyright 2011 Aaron Jacobs. All Rights Reserved.
+// Author: aaronjjacobs@gmail.com (Aaron Jacobs)
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package oglemock
+
+import (
+ "errors"
+ "fmt"
+ "github.com/smartystreets/assertions/internal/oglematchers"
+ "reflect"
+ "sync"
+)
+
+// InternalExpectation is exported for purposes of testing only. You should not
+// touch it.
+//
+// InternalExpectation represents an expectation for zero or more calls to a
+// mock method, and a set of actions to be taken when those calls are received.
+type InternalExpectation struct {
+ // The signature of the method to which this expectation is bound, for
+ // checking action types.
+ methodSignature reflect.Type
+
+ // An error reporter to use for reporting errors in the way that expectations
+ // are set.
+ errorReporter ErrorReporter
+
+ // A mutex protecting mutable fields of the struct.
+ mutex sync.Mutex
+
+ // Matchers that the arguments to the mock method must satisfy in order to
+ // match this expectation.
+ ArgMatchers []oglematchers.Matcher
+
+ // The name of the file in which this expectation was expressed.
+ FileName string
+
+ // The line number at which this expectation was expressed.
+ LineNumber int
+
+ // The number of times this expectation should be matched, as explicitly
+ // listed by the user. If there was no explicit number expressed, this is -1.
+ ExpectedNumMatches int
+
+ // Actions to be taken for the first N calls, one per call in order, where N
+ // is the length of this slice.
+ OneTimeActions []Action
+
+ // An action to be taken when the one-time actions have expired, or nil if
+ // there is no such action.
+ FallbackAction Action
+
+ // The number of times this expectation has been matched so far.
+ NumMatches uint
+}
+
+// InternalNewExpectation is exported for purposes of testing only. You should
+// not touch it.
+func InternalNewExpectation(
+ reporter ErrorReporter,
+ methodSignature reflect.Type,
+ args []interface{},
+ fileName string,
+ lineNumber int) *InternalExpectation {
+ result := &InternalExpectation{}
+
+ // Store fields that can be stored directly.
+ result.methodSignature = methodSignature
+ result.errorReporter = reporter
+ result.FileName = fileName
+ result.LineNumber = lineNumber
+
+ // Set up defaults.
+ result.ExpectedNumMatches = -1
+ result.OneTimeActions = make([]Action, 0)
+
+ // Set up the ArgMatchers slice, using Equals(x) for each x that is not a
+ // matcher itself.
+ result.ArgMatchers = make([]oglematchers.Matcher, len(args))
+ for i, x := range args {
+ if matcher, ok := x.(oglematchers.Matcher); ok {
+ result.ArgMatchers[i] = matcher
+ } else {
+ result.ArgMatchers[i] = oglematchers.Equals(x)
+ }
+ }
+
+ return result
+}
+
+func (e *InternalExpectation) Times(n uint) Expectation {
+ e.mutex.Lock()
+ defer e.mutex.Unlock()
+
+ // It is illegal to call this more than once.
+ if e.ExpectedNumMatches != -1 {
+ e.reportFatalError("Times called more than once.")
+ return nil
+ }
+
+ // It is illegal to call this after any actions are configured.
+ if len(e.OneTimeActions) != 0 {
+ e.reportFatalError("Times called after WillOnce.")
+ return nil
+ }
+
+ if e.FallbackAction != nil {
+ e.reportFatalError("Times called after WillRepeatedly.")
+ return nil
+ }
+
+ // Make sure the number is reasonable (and will fit in an int).
+ if n > 1000 {
+ e.reportFatalError("Expectation.Times: N must be at most 1000")
+ return nil
+ }
+
+ e.ExpectedNumMatches = int(n)
+ return e
+}
+
+func (e *InternalExpectation) WillOnce(a Action) Expectation {
+ e.mutex.Lock()
+ defer e.mutex.Unlock()
+
+ // It is illegal to call this after WillRepeatedly.
+ if e.FallbackAction != nil {
+ e.reportFatalError("WillOnce called after WillRepeatedly.")
+ return nil
+ }
+
+ // Tell the action about the method's signature.
+ if err := a.SetSignature(e.methodSignature); err != nil {
+ e.reportFatalError(fmt.Sprintf("WillOnce given invalid action: %v", err))
+ return nil
+ }
+
+ // Store the action.
+ e.OneTimeActions = append(e.OneTimeActions, a)
+
+ return e
+}
+
+func (e *InternalExpectation) WillRepeatedly(a Action) Expectation {
+ e.mutex.Lock()
+ defer e.mutex.Unlock()
+
+ // It is illegal to call this twice.
+ if e.FallbackAction != nil {
+ e.reportFatalError("WillRepeatedly called more than once.")
+ return nil
+ }
+
+ // Tell the action about the method's signature.
+ if err := a.SetSignature(e.methodSignature); err != nil {
+ e.reportFatalError(fmt.Sprintf("WillRepeatedly given invalid action: %v", err))
+ return nil
+ }
+
+ // Store the action.
+ e.FallbackAction = a
+
+ return e
+}
+
+func (e *InternalExpectation) reportFatalError(errorText string) {
+ e.errorReporter.ReportFatalError(e.FileName, e.LineNumber, errors.New(errorText))
+}
diff --git a/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/oglemock/internal_expectation_test.go b/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/oglemock/internal_expectation_test.go
new file mode 100644
index 00000000000..977fe1ac3f7
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/oglemock/internal_expectation_test.go
@@ -0,0 +1,265 @@
+// Copyright 2011 Aaron Jacobs. All Rights Reserved.
+// Author: aaronjjacobs@gmail.com (Aaron Jacobs)
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package oglemock_test
+
+import (
+ . "github.com/smartystreets/assertions/internal/oglematchers"
+ . "github.com/smartystreets/assertions/internal/oglemock"
+ . "github.com/smartystreets/assertions/internal/ogletest"
+ "reflect"
+)
+
+////////////////////////////////////////////////////////////
+// Helpers
+////////////////////////////////////////////////////////////
+
+var emptyReturnSig reflect.Type = reflect.TypeOf(func(i int) {})
+var float64ReturnSig reflect.Type = reflect.TypeOf(func(i int) float64 { return 17.0 })
+
+type InternalExpectationTest struct {
+ reporter fakeErrorReporter
+}
+
+func init() { RegisterTestSuite(&InternalExpectationTest{}) }
+
+func (t *InternalExpectationTest) SetUp(c *TestInfo) {
+ t.reporter.errors = make([]errorReport, 0)
+ t.reporter.fatalErrors = make([]errorReport, 0)
+}
+
+func (t *InternalExpectationTest) makeExpectation(
+ sig reflect.Type,
+ args []interface{},
+ fileName string,
+ lineNumber int) *InternalExpectation {
+ return InternalNewExpectation(&t.reporter, sig, args, fileName, lineNumber)
+}
+
+////////////////////////////////////////////////////////////
+// Tests
+////////////////////////////////////////////////////////////
+
+func (t *InternalExpectationTest) StoresFileNameAndLineNumber() {
+ args := []interface{}{}
+ exp := t.makeExpectation(emptyReturnSig, args, "taco", 17)
+
+ ExpectThat(exp.FileName, Equals("taco"))
+ ExpectThat(exp.LineNumber, Equals(17))
+}
+
+func (t *InternalExpectationTest) NoArgs() {
+ args := []interface{}{}
+ exp := t.makeExpectation(emptyReturnSig, args, "", 0)
+
+ ExpectThat(len(exp.ArgMatchers), Equals(0))
+}
+
+func (t *InternalExpectationTest) MixOfMatchersAndNonMatchers() {
+ args := []interface{}{Equals(17), 19, Equals(23)}
+ exp := t.makeExpectation(emptyReturnSig, args, "", 0)
+
+ // Matcher args
+ ExpectThat(len(exp.ArgMatchers), Equals(3))
+ ExpectThat(exp.ArgMatchers[0], Equals(args[0]))
+ ExpectThat(exp.ArgMatchers[2], Equals(args[2]))
+
+ // Non-matcher arg
+ var err error
+ matcher1 := exp.ArgMatchers[1]
+
+ err = matcher1.Matches(17)
+ ExpectNe(nil, err)
+
+ err = matcher1.Matches(19)
+ ExpectEq(nil, err)
+
+ err = matcher1.Matches(23)
+ ExpectNe(nil, err)
+}
+
+func (t *InternalExpectationTest) NoTimes() {
+ exp := t.makeExpectation(emptyReturnSig, []interface{}{}, "", 0)
+
+ ExpectThat(exp.ExpectedNumMatches, Equals(-1))
+}
+
+func (t *InternalExpectationTest) TimesN() {
+ exp := t.makeExpectation(emptyReturnSig, []interface{}{}, "", 0)
+ exp.Times(17)
+
+ ExpectThat(exp.ExpectedNumMatches, Equals(17))
+}
+
+func (t *InternalExpectationTest) NoActions() {
+ exp := t.makeExpectation(emptyReturnSig, []interface{}{}, "", 0)
+
+ ExpectThat(len(exp.OneTimeActions), Equals(0))
+ ExpectThat(exp.FallbackAction, Equals(nil))
+}
+
+func (t *InternalExpectationTest) WillOnce() {
+ action0 := Return(17.0)
+ action1 := Return(19.0)
+
+ exp := t.makeExpectation(float64ReturnSig, []interface{}{}, "", 0)
+ exp.WillOnce(action0).WillOnce(action1)
+
+ ExpectThat(len(exp.OneTimeActions), Equals(2))
+ ExpectThat(exp.OneTimeActions[0], Equals(action0))
+ ExpectThat(exp.OneTimeActions[1], Equals(action1))
+}
+
+func (t *InternalExpectationTest) WillRepeatedly() {
+ action := Return(17.0)
+
+ exp := t.makeExpectation(float64ReturnSig, []interface{}{}, "", 0)
+ exp.WillRepeatedly(action)
+
+ ExpectThat(exp.FallbackAction, Equals(action))
+}
+
+func (t *InternalExpectationTest) BothKindsOfAction() {
+ action0 := Return(17.0)
+ action1 := Return(19.0)
+ action2 := Return(23.0)
+
+ exp := t.makeExpectation(float64ReturnSig, []interface{}{}, "", 0)
+ exp.WillOnce(action0).WillOnce(action1).WillRepeatedly(action2)
+
+ ExpectThat(len(exp.OneTimeActions), Equals(2))
+ ExpectThat(exp.OneTimeActions[0], Equals(action0))
+ ExpectThat(exp.OneTimeActions[1], Equals(action1))
+ ExpectThat(exp.FallbackAction, Equals(action2))
+}
+
+func (t *InternalExpectationTest) TimesCalledWithHugeNumber() {
+ exp := t.makeExpectation(emptyReturnSig, []interface{}{}, "taco.go", 112)
+ exp.Times(1 << 30)
+
+ AssertEq(1, len(t.reporter.fatalErrors))
+ AssertEq(0, len(t.reporter.errors))
+
+ r := t.reporter.fatalErrors[0]
+ ExpectEq("taco.go", r.fileName)
+ ExpectEq(112, r.lineNumber)
+ ExpectThat(r.err, Error(HasSubstr("Times")))
+ ExpectThat(r.err, Error(HasSubstr("N must be at most 1000")))
+}
+
+func (t *InternalExpectationTest) TimesCalledTwice() {
+ exp := t.makeExpectation(emptyReturnSig, []interface{}{}, "taco.go", 112)
+ exp.Times(17)
+ exp.Times(17)
+
+ AssertEq(1, len(t.reporter.fatalErrors))
+ AssertEq(0, len(t.reporter.errors))
+
+ r := t.reporter.fatalErrors[0]
+ ExpectEq("taco.go", r.fileName)
+ ExpectEq(112, r.lineNumber)
+ ExpectThat(r.err, Error(HasSubstr("Times")))
+ ExpectThat(r.err, Error(HasSubstr("more than once")))
+}
+
+func (t *InternalExpectationTest) TimesCalledAfterWillOnce() {
+ exp := t.makeExpectation(emptyReturnSig, []interface{}{}, "taco.go", 112)
+ exp.WillOnce(Return())
+ exp.Times(17)
+
+ AssertEq(1, len(t.reporter.fatalErrors))
+ AssertEq(0, len(t.reporter.errors))
+
+ r := t.reporter.fatalErrors[0]
+ ExpectEq("taco.go", r.fileName)
+ ExpectEq(112, r.lineNumber)
+ ExpectThat(r.err, Error(HasSubstr("Times")))
+ ExpectThat(r.err, Error(HasSubstr("after WillOnce")))
+}
+
+func (t *InternalExpectationTest) TimesCalledAfterWillRepeatedly() {
+ exp := t.makeExpectation(emptyReturnSig, []interface{}{}, "taco.go", 112)
+ exp.WillRepeatedly(Return())
+ exp.Times(17)
+
+ AssertEq(1, len(t.reporter.fatalErrors))
+ AssertEq(0, len(t.reporter.errors))
+
+ r := t.reporter.fatalErrors[0]
+ ExpectEq("taco.go", r.fileName)
+ ExpectEq(112, r.lineNumber)
+ ExpectThat(r.err, Error(HasSubstr("Times")))
+ ExpectThat(r.err, Error(HasSubstr("after WillRepeatedly")))
+}
+
+func (t *InternalExpectationTest) WillOnceCalledAfterWillRepeatedly() {
+ exp := t.makeExpectation(emptyReturnSig, []interface{}{}, "taco.go", 112)
+ exp.WillRepeatedly(Return())
+ exp.WillOnce(Return())
+
+ AssertEq(1, len(t.reporter.fatalErrors))
+ AssertEq(0, len(t.reporter.errors))
+
+ r := t.reporter.fatalErrors[0]
+ ExpectEq("taco.go", r.fileName)
+ ExpectEq(112, r.lineNumber)
+ ExpectThat(r.err, Error(HasSubstr("WillOnce")))
+ ExpectThat(r.err, Error(HasSubstr("after WillRepeatedly")))
+}
+
+func (t *InternalExpectationTest) OneTimeActionRejectsSignature() {
+ exp := t.makeExpectation(float64ReturnSig, []interface{}{}, "taco.go", 112)
+ exp.WillOnce(Return("taco"))
+
+ AssertEq(1, len(t.reporter.fatalErrors))
+ AssertEq(0, len(t.reporter.errors))
+
+ r := t.reporter.fatalErrors[0]
+ ExpectEq("taco.go", r.fileName)
+ ExpectEq(112, r.lineNumber)
+ ExpectThat(r.err, Error(HasSubstr("arg 0")))
+ ExpectThat(r.err, Error(HasSubstr("expected float64")))
+ ExpectThat(r.err, Error(HasSubstr("given string")))
+}
+
+func (t *InternalExpectationTest) WillRepeatedlyCalledTwice() {
+ exp := t.makeExpectation(emptyReturnSig, []interface{}{}, "taco.go", 112)
+ exp.WillRepeatedly(Return())
+ exp.WillRepeatedly(Return())
+
+ AssertEq(1, len(t.reporter.fatalErrors))
+ AssertEq(0, len(t.reporter.errors))
+
+ r := t.reporter.fatalErrors[0]
+ ExpectEq("taco.go", r.fileName)
+ ExpectEq(112, r.lineNumber)
+ ExpectThat(r.err, Error(HasSubstr("WillRepeatedly")))
+ ExpectThat(r.err, Error(HasSubstr("once")))
+}
+
+func (t *InternalExpectationTest) FallbackActionRejectsSignature() {
+ exp := t.makeExpectation(float64ReturnSig, []interface{}{}, "taco.go", 112)
+ exp.WillRepeatedly(Return("taco"))
+
+ AssertEq(1, len(t.reporter.fatalErrors))
+ AssertEq(0, len(t.reporter.errors))
+
+ r := t.reporter.fatalErrors[0]
+ ExpectEq("taco.go", r.fileName)
+ ExpectEq(112, r.lineNumber)
+ ExpectThat(r.err, Error(HasSubstr("arg 0")))
+ ExpectThat(r.err, Error(HasSubstr("expected float64")))
+ ExpectThat(r.err, Error(HasSubstr("given string")))
+}
diff --git a/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/oglemock/invoke.go b/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/oglemock/invoke.go
new file mode 100644
index 00000000000..07630cbbb7e
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/oglemock/invoke.go
@@ -0,0 +1,73 @@
+// Copyright 2012 Aaron Jacobs. All Rights Reserved.
+// Author: aaronjjacobs@gmail.com (Aaron Jacobs)
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package oglemock
+
+import (
+ "errors"
+ "fmt"
+ "reflect"
+)
+
+// Create an Action that invokes the supplied function, returning whatever it
+// returns. The signature of the function must match that of the mocked method
+// exactly.
+func Invoke(f interface{}) Action {
+ // Make sure f is a function.
+ fv := reflect.ValueOf(f)
+ fk := fv.Kind()
+
+ if fk != reflect.Func {
+ desc := "<nil>"
+ if fk != reflect.Invalid {
+ desc = fv.Type().String()
+ }
+
+ panic(fmt.Sprintf("Invoke: expected function, got %s", desc))
+ }
+
+ return &invokeAction{fv}
+}
+
+type invokeAction struct {
+ f reflect.Value
+}
+
+func (a *invokeAction) SetSignature(signature reflect.Type) error {
+ // The signature must match exactly.
+ ft := a.f.Type()
+ if ft != signature {
+ return errors.New(fmt.Sprintf("Invoke: expected %v, got %v", signature, ft))
+ }
+
+ return nil
+}
+
+func (a *invokeAction) Invoke(vals []interface{}) []interface{} {
+ // Create a slice of args for the function.
+ in := make([]reflect.Value, len(vals))
+ for i, x := range vals {
+ in[i] = reflect.ValueOf(x)
+ }
+
+ // Call the function and return its return values.
+ out := a.f.Call(in)
+ result := make([]interface{}, len(out))
+ for i, v := range out {
+ result[i] = v.Interface()
+ }
+
+ return result
+}
diff --git a/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/oglemock/invoke_test.go b/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/oglemock/invoke_test.go
new file mode 100644
index 00000000000..9e1478ba8cc
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/oglemock/invoke_test.go
@@ -0,0 +1,110 @@
+// Copyright 2012 Aaron Jacobs. All Rights Reserved.
+// Author: aaronjjacobs@gmail.com (Aaron Jacobs)
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package oglemock_test
+
+import (
+ . "github.com/smartystreets/assertions/internal/oglematchers"
+ "github.com/smartystreets/assertions/internal/oglemock"
+ . "github.com/smartystreets/assertions/internal/ogletest"
+ "reflect"
+)
+
+////////////////////////////////////////////////////////////
+// Helpers
+////////////////////////////////////////////////////////////
+
+type InvokeTest struct {
+}
+
+func init() { RegisterTestSuite(&InvokeTest{}) }
+
+////////////////////////////////////////////////////////////
+// Tests
+////////////////////////////////////////////////////////////
+
+func (t *InvokeTest) ArgumentIsNil() {
+ f := func() { oglemock.Invoke(nil) }
+ ExpectThat(f, Panics(MatchesRegexp("Invoke.*function.*<nil>")))
+}
+
+func (t *InvokeTest) ArgumentIsInt() {
+ f := func() { oglemock.Invoke(17) }
+ ExpectThat(f, Panics(MatchesRegexp("Invoke.*function.*int")))
+}
+
+func (t *InvokeTest) FunctionHasOneWrongInputType() {
+ f := func(a int, b int32, c string) {}
+ g := func(a int, b int, c string) {}
+
+ err := oglemock.Invoke(f).SetSignature(reflect.TypeOf(g))
+ ExpectThat(err, Error(HasSubstr("func(int, int32, string)")))
+ ExpectThat(err, Error(HasSubstr("func(int, int, string)")))
+}
+
+func (t *InvokeTest) FunctionHasOneWrongOutputType() {
+ f := func() (int32, string) { return 0, "" }
+ g := func() (int, string) { return 0, "" }
+
+ err := oglemock.Invoke(f).SetSignature(reflect.TypeOf(g))
+ ExpectThat(err, Error(HasSubstr("func() (int32, string)")))
+ ExpectThat(err, Error(HasSubstr("func() (int, string)")))
+}
+
+func (t *InvokeTest) CallsFunction() {
+ var actualArg0, actualArg1 interface{}
+
+ f := func(a uintptr, b int8) {
+ actualArg0 = a
+ actualArg1 = b
+ }
+
+ a := oglemock.Invoke(f)
+
+ // Set signature.
+ AssertEq(nil, a.SetSignature(reflect.TypeOf(f)))
+
+ // Call the action.
+ expectedArg0 := uintptr(17)
+ expectedArg1 := int8(-7)
+
+ a.Invoke([]interface{}{expectedArg0, expectedArg1})
+
+ ExpectThat(actualArg0, IdenticalTo(expectedArg0))
+ ExpectThat(actualArg1, IdenticalTo(expectedArg1))
+}
+
+func (t *InvokeTest) ReturnsFunctionResult() {
+ expectedReturn0 := int16(3)
+ expectedReturn1 := "taco"
+
+ f := func() (int16, string) {
+ return expectedReturn0, expectedReturn1
+ }
+
+ a := oglemock.Invoke(f)
+
+ // Set signature.
+ AssertEq(nil, a.SetSignature(reflect.TypeOf(f)))
+
+ // Call the action.
+ res := a.Invoke([]interface{}{})
+
+ ExpectThat(
+ res,
+ ElementsAre(
+ IdenticalTo(expectedReturn0),
+ IdenticalTo(expectedReturn1)))
+}
diff --git a/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/oglemock/mock_object.go b/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/oglemock/mock_object.go
new file mode 100644
index 00000000000..de995efc667
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/oglemock/mock_object.go
@@ -0,0 +1,30 @@
+// Copyright 2011 Aaron Jacobs. All Rights Reserved.
+// Author: aaronjjacobs@gmail.com (Aaron Jacobs)
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package oglemock
+
+// MockObject is an interface that mock object implementations must conform to
+// in order to register expectations with and hand off calls to a
+// MockController. Users should not interact with this interface directly.
+type MockObject interface {
+ // Oglemock_Id returns an identifier for the mock object that is guaranteed
+ // to be unique within the process at least until the mock object is garbage
+ // collected.
+ Oglemock_Id() uintptr
+
+ // Oglemock_Description returns a description of the mock object that may be
+ // helpful in test failure messages.
+ Oglemock_Description() string
+}
diff --git a/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/oglemock/return.go b/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/oglemock/return.go
new file mode 100644
index 00000000000..c66d248f44a
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/oglemock/return.go
@@ -0,0 +1,251 @@
+// Copyright 2011 Aaron Jacobs. All Rights Reserved.
+// Author: aaronjjacobs@gmail.com (Aaron Jacobs)
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package oglemock
+
+import (
+ "errors"
+ "fmt"
+ "math"
+ "reflect"
+)
+
+var intType = reflect.TypeOf(int(0))
+var float64Type = reflect.TypeOf(float64(0))
+var complex128Type = reflect.TypeOf(complex128(0))
+
+// Return creates an Action that returns the values passed to Return as
+// arguments, after suitable legal type conversions. The following rules apply.
+// Given an argument x to Return and a corresponding type T in the method's
+// signature, at least one of the following must hold:
+//
+// * x is assignable to T. (See "Assignability" in the language spec.) Note
+// that this in particular applies that x may be a type that implements an
+// interface T. It also implies that the nil literal can be used if T is a
+// pointer, function, interface, slice, channel, or map type.
+//
+// * T is any numeric type, and x is an int that is in-range for that type.
+// This facilities using raw integer constants: Return(17).
+//
+// * T is a floating-point or complex number type, and x is a float64. This
+// facilities using raw floating-point constants: Return(17.5).
+//
+// * T is a complex number type, and x is a complex128. This facilities using
+// raw complex constants: Return(17+2i).
+//
+func Return(vals ...interface{}) Action {
+ return &returnAction{vals, nil}
+}
+
+type returnAction struct {
+ returnVals []interface{}
+ signature reflect.Type
+}
+
+func (a *returnAction) Invoke(vals []interface{}) []interface{} {
+ if a.signature == nil {
+ panic("You must first call SetSignature with a valid signature.")
+ }
+
+ res, err := a.buildInvokeResult(a.signature)
+ if err != nil {
+ panic(err)
+ }
+
+ return res
+}
+
+func (a *returnAction) SetSignature(signature reflect.Type) error {
+ if _, err := a.buildInvokeResult(signature); err != nil {
+ return err
+ }
+
+ a.signature = signature
+ return nil
+}
+
+// A version of Invoke that does error checking, used by both public methods.
+func (a *returnAction) buildInvokeResult(
+ sig reflect.Type) (res []interface{}, err error) {
+ // Check the length of the return value.
+ numOut := sig.NumOut()
+ numVals := len(a.returnVals)
+
+ if numOut != numVals {
+ err = errors.New(
+ fmt.Sprintf("Return given %d vals; expected %d.", numVals, numOut))
+ return
+ }
+
+ // Attempt to coerce each return value.
+ res = make([]interface{}, numOut)
+
+ for i, val := range a.returnVals {
+ resType := sig.Out(i)
+ res[i], err = a.coerce(val, resType)
+
+ if err != nil {
+ res = nil
+ err = errors.New(fmt.Sprintf("Return: arg %d: %v", i, err))
+ return
+ }
+ }
+
+ return
+}
+
+func (a *returnAction) coerce(x interface{}, t reflect.Type) (interface{}, error) {
+ xv := reflect.ValueOf(x)
+ rv := reflect.New(t).Elem()
+
+ // Special case: the language spec says that the predeclared identifier nil
+ // is assignable to pointers, functions, interface, slices, channels, and map
+ // types. However, reflect.ValueOf(nil) returns an invalid value that will
+ // not cooperate below. So handle invalid values here, assuming that they
+ // resulted from Return(nil).
+ if !xv.IsValid() {
+ switch t.Kind() {
+ case reflect.Ptr, reflect.Func, reflect.Interface, reflect.Chan, reflect.Slice, reflect.Map, reflect.UnsafePointer:
+ return rv.Interface(), nil
+ }
+
+ return nil, errors.New(fmt.Sprintf("expected %v, given <nil>", t))
+ }
+
+ // If x is assignable to type t, let the reflect package do the heavy
+ // lifting.
+ if reflect.TypeOf(x).AssignableTo(t) {
+ rv.Set(xv)
+ return rv.Interface(), nil
+ }
+
+ // Handle numeric types as described in the documentation on Return.
+ switch {
+ case xv.Type() == intType && a.isNumeric(t):
+ return a.coerceInt(xv.Int(), t)
+
+ case xv.Type() == float64Type && (a.isFloatingPoint(t) || a.isComplex(t)):
+ return a.coerceFloat(xv.Float(), t)
+
+ case xv.Type() == complex128Type && a.isComplex(t):
+ return a.coerceComplex(xv.Complex(), t)
+ }
+
+ // The value wasn't of a legal type.
+ return nil, errors.New(fmt.Sprintf("expected %v, given %v", t, xv.Type()))
+}
+
+func (a *returnAction) isNumeric(t reflect.Type) bool {
+ return (t.Kind() >= reflect.Int && t.Kind() <= reflect.Uint64) ||
+ a.isFloatingPoint(t) ||
+ a.isComplex(t)
+}
+
+func (a *returnAction) isFloatingPoint(t reflect.Type) bool {
+ return t.Kind() == reflect.Float32 || t.Kind() == reflect.Float64
+}
+
+func (a *returnAction) isComplex(t reflect.Type) bool {
+ return t.Kind() == reflect.Complex64 || t.Kind() == reflect.Complex128
+}
+
+func (a *returnAction) coerceInt(x int64, t reflect.Type) (interface{}, error) {
+ k := t.Kind()
+
+ // Floating point and complex numbers: promote appropriately.
+ if a.isFloatingPoint(t) || a.isComplex(t) {
+ return a.coerceFloat(float64(x), t)
+ }
+
+ // Integers: range check.
+ var min, max int64
+ unsigned := false
+
+ switch k {
+ case reflect.Int8:
+ min = math.MinInt8
+ max = math.MaxInt8
+
+ case reflect.Int16:
+ min = math.MinInt16
+ max = math.MaxInt16
+
+ case reflect.Int32:
+ min = math.MinInt32
+ max = math.MaxInt32
+
+ case reflect.Int64:
+ min = math.MinInt64
+ max = math.MaxInt64
+
+ case reflect.Uint:
+ unsigned = true
+ min = 0
+ max = math.MaxUint32
+
+ case reflect.Uint8:
+ unsigned = true
+ min = 0
+ max = math.MaxUint8
+
+ case reflect.Uint16:
+ unsigned = true
+ min = 0
+ max = math.MaxUint16
+
+ case reflect.Uint32:
+ unsigned = true
+ min = 0
+ max = math.MaxUint32
+
+ case reflect.Uint64:
+ unsigned = true
+ min = 0
+ max = math.MaxInt64
+
+ default:
+ panic(fmt.Sprintf("Unexpected type: %v", t))
+ }
+
+ if x < min || x > max {
+ return nil, errors.New("int value out of range")
+ }
+
+ rv := reflect.New(t).Elem()
+ if unsigned {
+ rv.SetUint(uint64(x))
+ } else {
+ rv.SetInt(x)
+ }
+
+ return rv.Interface(), nil
+}
+
+func (a *returnAction) coerceFloat(x float64, t reflect.Type) (interface{}, error) {
+ // Promote complex numbers.
+ if a.isComplex(t) {
+ return a.coerceComplex(complex(x, 0), t)
+ }
+
+ rv := reflect.New(t).Elem()
+ rv.SetFloat(x)
+ return rv.Interface(), nil
+}
+
+func (a *returnAction) coerceComplex(x complex128, t reflect.Type) (interface{}, error) {
+ rv := reflect.New(t).Elem()
+ rv.SetComplex(x)
+ return rv.Interface(), nil
+}
diff --git a/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/oglemock/return_test.go b/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/oglemock/return_test.go
new file mode 100644
index 00000000000..f1794bd764a
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/oglemock/return_test.go
@@ -0,0 +1,978 @@
+// Copyright 2011 Aaron Jacobs. All Rights Reserved.
+// Author: aaronjjacobs@gmail.com (Aaron Jacobs)
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package oglemock_test
+
+import (
+ "bytes"
+ . "github.com/smartystreets/assertions/internal/oglematchers"
+ "github.com/smartystreets/assertions/internal/oglemock"
+ . "github.com/smartystreets/assertions/internal/ogletest"
+ "io"
+ "math"
+ "reflect"
+ "testing"
+ "unsafe"
+)
+
+////////////////////////////////////////////////////////////
+// Helpers
+////////////////////////////////////////////////////////////
+
+var someInt int = 17
+
+type ReturnTest struct {
+}
+
+func init() { RegisterTestSuite(&ReturnTest{}) }
+func TestOgletest(t *testing.T) { RunTests(t) }
+
+type returnTestCase struct {
+ suppliedVal interface{}
+ expectedVal interface{}
+ expectedSetSignatureErrorSubstring string
+}
+
+func (t *ReturnTest) runTestCases(signature reflect.Type, cases []returnTestCase) {
+ for i, c := range cases {
+ a := oglemock.Return(c.suppliedVal)
+
+ // SetSignature
+ err := a.SetSignature(signature)
+ if c.expectedSetSignatureErrorSubstring == "" {
+ ExpectEq(nil, err, "Test case %d: %v", i, c)
+
+ if err != nil {
+ continue
+ }
+ } else {
+ ExpectThat(err, Error(HasSubstr(c.expectedSetSignatureErrorSubstring)),
+ "Test case %d: %v", i, c)
+ continue
+ }
+
+ // Invoke
+ res := a.Invoke([]interface{}{})
+ AssertThat(res, ElementsAre(Any()))
+ ExpectThat(res[0], IdenticalTo(c.expectedVal), "Test case %d: %v", i, c)
+ }
+}
+
+////////////////////////////////////////////////////////////
+// Tests
+////////////////////////////////////////////////////////////
+
+func (t *ReturnTest) SetSignatureNotCalled() {
+ a := oglemock.Return()
+ f := func() { a.Invoke([]interface{}{}) }
+ ExpectThat(f, Panics(MatchesRegexp("first call SetSignature")))
+}
+
+func (t *ReturnTest) NoReturnValues() {
+ sig := reflect.TypeOf(func() {})
+ var a oglemock.Action
+ var err error
+ var vals []interface{}
+
+ // No values.
+ a = oglemock.Return()
+ err = a.SetSignature(sig)
+ AssertEq(nil, err)
+
+ vals = a.Invoke([]interface{}{})
+ ExpectThat(vals, ElementsAre())
+
+ // One value.
+ a = oglemock.Return(17)
+ err = a.SetSignature(sig)
+ ExpectThat(err, Error(HasSubstr("given 1 val")))
+ ExpectThat(err, Error(HasSubstr("expected 0")))
+
+ // Two values.
+ a = oglemock.Return(17, 19)
+ err = a.SetSignature(sig)
+ ExpectThat(err, Error(HasSubstr("given 2 vals")))
+ ExpectThat(err, Error(HasSubstr("expected 0")))
+}
+
+func (t *ReturnTest) MultipleReturnValues() {
+ sig := reflect.TypeOf(func() (int, string) { return 0, "" })
+ var a oglemock.Action
+ var err error
+ var vals []interface{}
+
+ // No values.
+ a = oglemock.Return()
+ err = a.SetSignature(sig)
+ ExpectThat(err, Error(HasSubstr("given 0 vals")))
+ ExpectThat(err, Error(HasSubstr("expected 2")))
+
+ // One value.
+ a = oglemock.Return(17)
+ err = a.SetSignature(sig)
+ ExpectThat(err, Error(HasSubstr("given 1 val")))
+ ExpectThat(err, Error(HasSubstr("expected 2")))
+
+ // Two values.
+ a = oglemock.Return(17, "taco")
+ err = a.SetSignature(sig)
+ AssertEq(nil, err)
+
+ vals = a.Invoke([]interface{}{})
+ ExpectThat(vals, ElementsAre(IdenticalTo(int(17)), "taco"))
+}
+
+func (t *ReturnTest) Bool() {
+ sig := reflect.TypeOf(func() bool { return false })
+ cases := []returnTestCase{
+ // Identical types.
+ {bool(true), bool(true), ""},
+ {bool(false), bool(false), ""},
+
+ // Wrong types.
+ {nil, nil, "given <nil>"},
+ {int(1), nil, "given int"},
+ {float64(1), nil, "given float64"},
+ {complex128(1), nil, "given complex128"},
+ {&someInt, nil, "given *int"},
+ {make(chan int), nil, "given chan int"},
+ }
+
+ t.runTestCases(sig, cases)
+}
+
+func (t *ReturnTest) Int() {
+ sig := reflect.TypeOf(func() int { return 0 })
+ cases := []returnTestCase{
+ // Identical types.
+ {int(math.MinInt32), int(math.MinInt32), ""},
+ {int(math.MaxInt32), int(math.MaxInt32), ""},
+
+ // Wrong types.
+ {nil, nil, "given <nil>"},
+ {int16(1), nil, "given int16"},
+ {float64(1), nil, "given float64"},
+ {complex128(1), nil, "given complex128"},
+ {&someInt, nil, "given *int"},
+ {make(chan int), nil, "given chan int"},
+ }
+
+ t.runTestCases(sig, cases)
+}
+
+func (t *ReturnTest) Int8() {
+ sig := reflect.TypeOf(func() int8 { return 0 })
+ cases := []returnTestCase{
+ // Identical types.
+ {int8(math.MinInt8), int8(math.MinInt8), ""},
+ {int8(math.MaxInt8), int8(math.MaxInt8), ""},
+
+ // In-range ints.
+ {int(math.MinInt8), int8(math.MinInt8), ""},
+ {int(math.MaxInt8), int8(math.MaxInt8), ""},
+
+ // Out of range ints.
+ {int(math.MinInt8 - 1), nil, "out of range"},
+ {int(math.MaxInt8 + 1), nil, "out of range"},
+
+ // Wrong types.
+ {nil, nil, "given <nil>"},
+ {int16(1), nil, "given int16"},
+ {float64(1), nil, "given float64"},
+ {complex128(1), nil, "given complex128"},
+ {&someInt, nil, "given *int"},
+ {make(chan int), nil, "given chan int"},
+ }
+
+ t.runTestCases(sig, cases)
+}
+
+func (t *ReturnTest) Int16() {
+ sig := reflect.TypeOf(func() int16 { return 0 })
+ cases := []returnTestCase{
+ // Identical types.
+ {int16(math.MinInt16), int16(math.MinInt16), ""},
+ {int16(math.MaxInt16), int16(math.MaxInt16), ""},
+
+ // In-range ints.
+ {int(math.MinInt16), int16(math.MinInt16), ""},
+ {int(math.MaxInt16), int16(math.MaxInt16), ""},
+
+ // Out of range ints.
+ {int(math.MinInt16 - 1), nil, "out of range"},
+ {int(math.MaxInt16 + 1), nil, "out of range"},
+
+ // Wrong types.
+ {nil, nil, "given <nil>"},
+ {int8(1), nil, "given int8"},
+ {float64(1), nil, "given float64"},
+ {complex128(1), nil, "given complex128"},
+ {&someInt, nil, "given *int"},
+ {make(chan int), nil, "given chan int"},
+ }
+
+ t.runTestCases(sig, cases)
+}
+
+func (t *ReturnTest) Int32() {
+ sig := reflect.TypeOf(func() int32 { return 0 })
+ cases := []returnTestCase{
+ // Identical types.
+ {int32(math.MinInt32), int32(math.MinInt32), ""},
+ {int32(math.MaxInt32), int32(math.MaxInt32), ""},
+
+ // Aliased version of type.
+ {rune(17), int32(17), ""},
+
+ // In-range ints.
+ {int(math.MinInt32), int32(math.MinInt32), ""},
+ {int(math.MaxInt32), int32(math.MaxInt32), ""},
+
+ // Wrong types.
+ {nil, nil, "given <nil>"},
+ {int16(1), nil, "given int16"},
+ {float64(1), nil, "given float64"},
+ {complex128(1), nil, "given complex128"},
+ {&someInt, nil, "given *int"},
+ {make(chan int), nil, "given chan int"},
+ }
+
+ t.runTestCases(sig, cases)
+}
+
+func (t *ReturnTest) Rune() {
+ sig := reflect.TypeOf(func() rune { return 0 })
+ cases := []returnTestCase{
+ // Identical types.
+ {rune(math.MinInt32), rune(math.MinInt32), ""},
+ {rune(math.MaxInt32), rune(math.MaxInt32), ""},
+
+ // Aliased version of type.
+ {int32(17), rune(17), ""},
+
+ // In-range ints.
+ {int(math.MinInt32), rune(math.MinInt32), ""},
+ {int(math.MaxInt32), rune(math.MaxInt32), ""},
+
+ // Wrong types.
+ {nil, nil, "given <nil>"},
+ {int16(1), nil, "given int16"},
+ {float64(1), nil, "given float64"},
+ {complex128(1), nil, "given complex128"},
+ {&someInt, nil, "given *int"},
+ {make(chan int), nil, "given chan int"},
+ }
+
+ t.runTestCases(sig, cases)
+}
+
+func (t *ReturnTest) Int64() {
+ sig := reflect.TypeOf(func() int64 { return 0 })
+ cases := []returnTestCase{
+ // Identical types.
+ {int64(math.MinInt64), int64(math.MinInt64), ""},
+ {int64(math.MaxInt64), int64(math.MaxInt64), ""},
+
+ // In-range ints.
+ {int(math.MinInt32), int64(math.MinInt32), ""},
+ {int(math.MaxInt32), int64(math.MaxInt32), ""},
+
+ // Wrong types.
+ {nil, nil, "given <nil>"},
+ {int16(1), nil, "given int16"},
+ {float64(1), nil, "given float64"},
+ {complex128(1), nil, "given complex128"},
+ {&someInt, nil, "given *int"},
+ {make(chan int), nil, "given chan int"},
+ }
+
+ t.runTestCases(sig, cases)
+}
+
+func (t *ReturnTest) Uint() {
+ sig := reflect.TypeOf(func() uint { return 0 })
+ cases := []returnTestCase{
+ // Identical types.
+ {uint(0), uint(0), ""},
+ {uint(math.MaxUint32), uint(math.MaxUint32), ""},
+
+ // In-range ints.
+ {int(0), uint(0), ""},
+ {int(math.MaxInt32), uint(math.MaxInt32), ""},
+
+ // Out of range ints.
+ {int(-1), nil, "out of range"},
+
+ // Wrong types.
+ {nil, nil, "given <nil>"},
+ {int16(1), nil, "given int16"},
+ {float64(1), nil, "given float64"},
+ {complex128(1), nil, "given complex128"},
+ {&someInt, nil, "given *int"},
+ {make(chan int), nil, "given chan int"},
+ }
+
+ t.runTestCases(sig, cases)
+}
+
+func (t *ReturnTest) Uint8() {
+ sig := reflect.TypeOf(func() uint8 { return 0 })
+ cases := []returnTestCase{
+ // Identical types.
+ {uint8(0), uint8(0), ""},
+ {uint8(math.MaxUint8), uint8(math.MaxUint8), ""},
+
+ // Aliased version of type.
+ {byte(17), uint8(17), ""},
+
+ // In-range ints.
+ {int(0), uint8(0), ""},
+ {int(math.MaxUint8), uint8(math.MaxUint8), ""},
+
+ // Out of range ints.
+ {int(-1), nil, "out of range"},
+ {int(math.MaxUint8 + 1), nil, "out of range"},
+
+ // Wrong types.
+ {nil, nil, "given <nil>"},
+ {int16(1), nil, "given int16"},
+ {float64(1), nil, "given float64"},
+ {complex128(1), nil, "given complex128"},
+ {&someInt, nil, "given *int"},
+ {make(chan int), nil, "given chan int"},
+ }
+
+ t.runTestCases(sig, cases)
+}
+
+func (t *ReturnTest) Byte() {
+ sig := reflect.TypeOf(func() byte { return 0 })
+ cases := []returnTestCase{
+ // Identical types.
+ {byte(0), byte(0), ""},
+ {byte(math.MaxUint8), byte(math.MaxUint8), ""},
+
+ // Aliased version of type.
+ {uint8(17), byte(17), ""},
+
+ // In-range ints.
+ {int(0), byte(0), ""},
+ {int(math.MaxUint8), byte(math.MaxUint8), ""},
+
+ // Out of range ints.
+ {int(-1), nil, "out of range"},
+ {int(math.MaxUint8 + 1), nil, "out of range"},
+
+ // Wrong types.
+ {nil, nil, "given <nil>"},
+ {int16(1), nil, "given int16"},
+ {float64(1), nil, "given float64"},
+ {complex128(1), nil, "given complex128"},
+ {&someInt, nil, "given *int"},
+ {make(chan int), nil, "given chan int"},
+ }
+
+ t.runTestCases(sig, cases)
+}
+
+func (t *ReturnTest) Uint16() {
+ sig := reflect.TypeOf(func() uint16 { return 0 })
+ cases := []returnTestCase{
+ // Identical types.
+ {uint16(0), uint16(0), ""},
+ {uint16(math.MaxUint16), uint16(math.MaxUint16), ""},
+
+ // In-range ints.
+ {int(0), uint16(0), ""},
+ {int(math.MaxUint16), uint16(math.MaxUint16), ""},
+
+ // Out of range ints.
+ {int(-1), nil, "out of range"},
+ {int(math.MaxUint16 + 1), nil, "out of range"},
+
+ // Wrong types.
+ {nil, nil, "given <nil>"},
+ {int16(1), nil, "given int16"},
+ {float64(1), nil, "given float64"},
+ {complex128(1), nil, "given complex128"},
+ {&someInt, nil, "given *int"},
+ {make(chan int), nil, "given chan int"},
+ }
+
+ t.runTestCases(sig, cases)
+}
+
+func (t *ReturnTest) Uint32() {
+ sig := reflect.TypeOf(func() uint32 { return 0 })
+ cases := []returnTestCase{
+ // Identical types.
+ {uint32(0), uint32(0), ""},
+ {uint32(math.MaxUint32), uint32(math.MaxUint32), ""},
+
+ // In-range ints.
+ {int(0), uint32(0), ""},
+ {int(math.MaxInt32), uint32(math.MaxInt32), ""},
+
+ // Out of range ints.
+ {int(-1), nil, "out of range"},
+
+ // Wrong types.
+ {nil, nil, "given <nil>"},
+ {int16(1), nil, "given int16"},
+ {float64(1), nil, "given float64"},
+ {complex128(1), nil, "given complex128"},
+ {&someInt, nil, "given *int"},
+ {make(chan int), nil, "given chan int"},
+ }
+
+ t.runTestCases(sig, cases)
+}
+
+func (t *ReturnTest) Uint64() {
+ sig := reflect.TypeOf(func() uint64 { return 0 })
+ cases := []returnTestCase{
+ // Identical types.
+ {uint64(0), uint64(0), ""},
+ {uint64(math.MaxUint64), uint64(math.MaxUint64), ""},
+
+ // In-range ints.
+ {int(0), uint64(0), ""},
+ {int(math.MaxInt32), uint64(math.MaxInt32), ""},
+
+ // Out of range ints.
+ {int(-1), nil, "out of range"},
+
+ // Wrong types.
+ {nil, nil, "given <nil>"},
+ {int16(1), nil, "given int16"},
+ {float64(1), nil, "given float64"},
+ {complex128(1), nil, "given complex128"},
+ {&someInt, nil, "given *int"},
+ {make(chan int), nil, "given chan int"},
+ }
+
+ t.runTestCases(sig, cases)
+}
+
+func (t *ReturnTest) Uintptr() {
+ sig := reflect.TypeOf(func() uintptr { return 0 })
+ cases := []returnTestCase{
+ // Identical types.
+ {uintptr(17), uintptr(17), ""},
+
+ // Wrong types.
+ {nil, nil, "given <nil>"},
+ {int(1), nil, "given int"},
+ {float64(1), nil, "given float64"},
+ {complex128(1), nil, "given complex128"},
+ {&someInt, nil, "given *int"},
+ {make(chan int), nil, "given chan int"},
+ }
+
+ t.runTestCases(sig, cases)
+}
+
+func (t *ReturnTest) Float32() {
+ sig := reflect.TypeOf(func() float32 { return 0 })
+ cases := []returnTestCase{
+ // Identical types.
+ {float32(-17.5), float32(-17.5), ""},
+ {float32(17.5), float32(17.5), ""},
+
+ // In-range ints.
+ {int(-17), float32(-17), ""},
+ {int(17), float32(17), ""},
+
+ // Float64s
+ {float64(-17.5), float32(-17.5), ""},
+ {float64(17.5), float32(17.5), ""},
+
+ // Wrong types.
+ {nil, nil, "given <nil>"},
+ {int16(1), nil, "given int16"},
+ {complex128(1), nil, "given complex128"},
+ {&someInt, nil, "given *int"},
+ {make(chan int), nil, "given chan int"},
+ }
+
+ t.runTestCases(sig, cases)
+}
+
+func (t *ReturnTest) Float64() {
+ sig := reflect.TypeOf(func() float64 { return 0 })
+ cases := []returnTestCase{
+ // Identical types.
+ {float64(-17.5), float64(-17.5), ""},
+ {float64(17.5), float64(17.5), ""},
+
+ // In-range ints.
+ {int(-17), float64(-17), ""},
+ {int(17), float64(17), ""},
+
+ // Wrong types.
+ {nil, nil, "given <nil>"},
+ {int16(1), nil, "given int16"},
+ {float32(1), nil, "given float32"},
+ {complex128(1), nil, "given complex128"},
+ {&someInt, nil, "given *int"},
+ {make(chan int), nil, "given chan int"},
+ }
+
+ t.runTestCases(sig, cases)
+}
+
+func (t *ReturnTest) Complex64() {
+ sig := reflect.TypeOf(func() complex64 { return 0 })
+ cases := []returnTestCase{
+ // Identical types.
+ {complex64(-17.5 - 1i), complex64(-17.5 - 1i), ""},
+ {complex64(17.5 + 1i), complex64(17.5 + 1i), ""},
+
+ // In-range ints.
+ {int(-17), complex64(-17), ""},
+ {int(17), complex64(17), ""},
+
+ // Float64s
+ {float64(-17.5), complex64(-17.5), ""},
+ {float64(17.5), complex64(17.5), ""},
+
+ // Complex128s
+ {complex128(-17.5 - 1i), complex64(-17.5 - 1i), ""},
+ {complex128(17.5 + 1i), complex64(17.5 + 1i), ""},
+
+ // Wrong types.
+ {nil, nil, "given <nil>"},
+ {int16(1), nil, "given int16"},
+ {float32(1), nil, "given float32"},
+ {&someInt, nil, "given *int"},
+ {make(chan int), nil, "given chan int"},
+ }
+
+ t.runTestCases(sig, cases)
+}
+
+func (t *ReturnTest) Complex128() {
+ sig := reflect.TypeOf(func() complex128 { return 0 })
+ cases := []returnTestCase{
+ // Identical types.
+ {complex128(-17.5 - 1i), complex128(-17.5 - 1i), ""},
+ {complex128(17.5 + 1i), complex128(17.5 + 1i), ""},
+
+ // In-range ints.
+ {int(-17), complex128(-17), ""},
+ {int(17), complex128(17), ""},
+
+ // Float64s
+ {float64(-17.5), complex128(-17.5), ""},
+ {float64(17.5), complex128(17.5), ""},
+
+ // Wrong types.
+ {nil, nil, "given <nil>"},
+ {int16(1), nil, "given int16"},
+ {float32(1), nil, "given float32"},
+ {complex64(1), nil, "given complex64"},
+ {&someInt, nil, "given *int"},
+ {make(chan int), nil, "given chan int"},
+ }
+
+ t.runTestCases(sig, cases)
+}
+
+func (t *ReturnTest) ArrayOfInt() {
+ type namedElemType int
+
+ sig := reflect.TypeOf(func() [2]int { return [2]int{0, 0} })
+ cases := []returnTestCase{
+ // Identical types.
+ {[2]int{19, 23}, [2]int{19, 23}, ""},
+
+ // Wrong length.
+ {[1]int{17}, nil, "given [1]int"},
+
+ // Wrong element types.
+ {[2]namedElemType{19, 23}, nil, "given [2]oglemock_test.namedElemType"},
+ {[2]string{"", ""}, nil, "given [2]string"},
+
+ // Wrong types.
+ {nil, nil, "given <nil>"},
+ {int(1), nil, "given int"},
+ {float64(1), nil, "given float64"},
+ {complex128(1), nil, "given complex128"},
+ {&someInt, nil, "given *int"},
+ {make(chan int), nil, "given chan int"},
+ }
+
+ t.runTestCases(sig, cases)
+}
+
+func (t *ReturnTest) ChanOfInt() {
+ type namedElemType int
+ someChan := make(chan int)
+
+ sig := reflect.TypeOf(func() chan int { return nil })
+ cases := []returnTestCase{
+ // Identical types.
+ {someChan, someChan, ""},
+
+ // Nil values.
+ {(interface{})(nil), (chan int)(nil), ""},
+ {(chan int)(nil), (chan int)(nil), ""},
+
+ // Wrong element types.
+ {make(chan string), nil, "given chan string"},
+ {make(chan namedElemType), nil, "given chan oglemock_test.namedElemType"},
+
+ // Wrong direction
+ {(<-chan int)(someChan), nil, "given <-chan int"},
+ {(chan<- int)(someChan), nil, "given chan<- int"},
+
+ // Wrong types.
+ {(func())(nil), nil, "given func()"},
+ {int(1), nil, "given int"},
+ {float64(1), nil, "given float64"},
+ {complex128(1), nil, "given complex128"},
+ {&someInt, nil, "given *int"},
+ }
+
+ t.runTestCases(sig, cases)
+}
+
+func (t *ReturnTest) SendChanOfInt() {
+ type namedElemType int
+
+ someChan := make(chan<- int)
+ someBidirectionalChannel := make(chan int)
+
+ sig := reflect.TypeOf(func() chan<- int { return nil })
+ cases := []returnTestCase{
+ // Identical types.
+ {someChan, someChan, ""},
+
+ // Nil values.
+ {(interface{})(nil), (chan<- int)(nil), ""},
+ {(chan int)(nil), (chan<- int)(nil), ""},
+
+ // Bidirectional channel
+ {someBidirectionalChannel, (chan<- int)(someBidirectionalChannel), ""},
+
+ // Wrong direction
+ {(<-chan int)(someBidirectionalChannel), nil, "given <-chan int"},
+
+ // Wrong element types.
+ {make(chan string), nil, "given chan string"},
+ {make(chan namedElemType), nil, "given chan oglemock_test.namedElemType"},
+
+ // Wrong types.
+ {(func())(nil), nil, "given func()"},
+ {int(1), nil, "given int"},
+ {float64(1), nil, "given float64"},
+ {complex128(1), nil, "given complex128"},
+ {&someInt, nil, "given *int"},
+ }
+
+ t.runTestCases(sig, cases)
+}
+
+func (t *ReturnTest) RecvChanOfInt() {
+ type namedElemType int
+
+ someChan := make(<-chan int)
+ someBidirectionalChannel := make(chan int)
+
+ sig := reflect.TypeOf(func() <-chan int { return nil })
+ cases := []returnTestCase{
+ // Identical types.
+ {someChan, someChan, ""},
+
+ // Nil values.
+ {(interface{})(nil), (<-chan int)(nil), ""},
+ {(chan int)(nil), (<-chan int)(nil), ""},
+
+ // Bidirectional channel
+ {someBidirectionalChannel, (<-chan int)(someBidirectionalChannel), ""},
+
+ // Wrong direction
+ {(chan<- int)(someBidirectionalChannel), nil, "given chan<- int"},
+
+ // Wrong element types.
+ {make(chan string), nil, "given chan string"},
+ {make(chan namedElemType), nil, "given chan oglemock_test.namedElemType"},
+
+ // Wrong types.
+ {(func())(nil), nil, "given func()"},
+ {int(1), nil, "given int"},
+ {float64(1), nil, "given float64"},
+ {complex128(1), nil, "given complex128"},
+ {&someInt, nil, "given *int"},
+ }
+
+ t.runTestCases(sig, cases)
+}
+
+func (t *ReturnTest) Func() {
+ someFunc := func(string) int { return 0 }
+
+ sig := reflect.TypeOf(func() func(string) int { return nil })
+ cases := []returnTestCase{
+ // Identical types.
+ {someFunc, someFunc, ""},
+
+ // Nil values.
+ {(interface{})(nil), (func(string) int)(nil), ""},
+ {(func(string) int)(nil), (func(string) int)(nil), ""},
+
+ // Wrong parameter and return types.
+ {func(int) int { return 0 }, nil, "given func(int) int"},
+ {func(string) string { return "" }, nil, "given func(string) string"},
+
+ // Wrong types.
+ {int(1), nil, "given int"},
+ {float64(1), nil, "given float64"},
+ {complex128(1), nil, "given complex128"},
+ {&someInt, nil, "given *int"},
+ {(chan int)(nil), nil, "given chan int"},
+ }
+
+ t.runTestCases(sig, cases)
+}
+
+func (t *ReturnTest) Interface() {
+ sig := reflect.TypeOf(func() io.Reader { return nil })
+
+ someBuffer := new(bytes.Buffer)
+
+ cases := []returnTestCase{
+ // Type that implements interface.
+ {someBuffer, someBuffer, ""},
+
+ // Nil value.
+ {(interface{})(nil), (interface{})(nil), ""},
+
+ // Non-implementing types.
+ {(chan int)(nil), nil, "given chan int"},
+ {int(1), nil, "given int"},
+ {float64(1), nil, "given float64"},
+ {complex128(1), nil, "given complex128"},
+ {&someInt, nil, "given *int"},
+ }
+
+ t.runTestCases(sig, cases)
+}
+
+func (t *ReturnTest) MapFromStringToInt() {
+ type namedElemType string
+
+ someMap := make(map[string]int)
+
+ sig := reflect.TypeOf(func() map[string]int { return nil })
+ cases := []returnTestCase{
+ // Identical types.
+ {someMap, someMap, ""},
+
+ // Nil values.
+ {(interface{})(nil), (map[string]int)(nil), ""},
+ {(map[string]int)(nil), (map[string]int)(nil), ""},
+
+ // Wrong element types.
+ {make(map[int]int), nil, "given map[int]int"},
+ {make(map[namedElemType]int), nil, "given map[oglemock_test.namedElemType]int"},
+ {make(map[string]string), nil, "given map[string]string"},
+
+ // Wrong types.
+ {(func())(nil), nil, "given func()"},
+ {int(1), nil, "given int"},
+ {float64(1), nil, "given float64"},
+ {complex128(1), nil, "given complex128"},
+ {&someInt, nil, "given *int"},
+ }
+
+ t.runTestCases(sig, cases)
+}
+
+func (t *ReturnTest) PointerToString() {
+ type namedElemType string
+
+ someStr := ""
+
+ sig := reflect.TypeOf(func() *string { return nil })
+ cases := []returnTestCase{
+ // Identical types.
+ {(*string)(&someStr), (*string)(&someStr), ""},
+
+ // Nil values.
+ {(interface{})(nil), (*string)(nil), ""},
+ {(*string)(nil), (*string)(nil), ""},
+
+ // Wrong element types.
+ {&someInt, nil, "given *int"},
+
+ // Wrong types.
+ {(func())(nil), nil, "given func()"},
+ {int(1), nil, "given int"},
+ {float64(1), nil, "given float64"},
+ {complex128(1), nil, "given complex128"},
+ {unsafe.Pointer(&someStr), nil, "given unsafe.Pointer"},
+ }
+
+ t.runTestCases(sig, cases)
+}
+
+func (t *ReturnTest) SliceOfInts() {
+ type namedElemType int
+
+ someSlice := make([]int, 1)
+
+ sig := reflect.TypeOf(func() []int { return nil })
+ cases := []returnTestCase{
+ // Identical types.
+ {someSlice, someSlice, ""},
+
+ // Nil values.
+ {(interface{})(nil), ([]int)(nil), ""},
+ {([]int)(nil), ([]int)(nil), ""},
+
+ // Wrong element types.
+ {make([]string, 1), nil, "given []string"},
+ {make([]namedElemType, 1), nil, "given []oglemock_test.namedElemType"},
+
+ // Wrong types.
+ {(func())(nil), nil, "given func()"},
+ {int(1), nil, "given int"},
+ {float64(1), nil, "given float64"},
+ {complex128(1), nil, "given complex128"},
+ {&someInt, nil, "given *int"},
+ }
+
+ t.runTestCases(sig, cases)
+}
+
+func (t *ReturnTest) String() {
+ sig := reflect.TypeOf(func() string { return "" })
+ cases := []returnTestCase{
+ // Identical types.
+ {string(""), string(""), ""},
+ {string("taco"), string("taco"), ""},
+
+ // Wrong types.
+ {nil, nil, "given <nil>"},
+ {int(1), nil, "given int"},
+ {float64(1), nil, "given float64"},
+ {complex128(1), nil, "given complex128"},
+ {&someInt, nil, "given *int"},
+ {make(chan int), nil, "given chan int"},
+ }
+
+ t.runTestCases(sig, cases)
+}
+
+func (t *ReturnTest) Struct() {
+ type myStruct struct {
+ a int
+ }
+
+ type otherStruct struct{}
+
+ sig := reflect.TypeOf(func() myStruct { return myStruct{0} })
+ cases := []returnTestCase{
+ // Identical types.
+ {myStruct{17}, myStruct{17}, ""},
+
+ // Wrong field types.
+ {otherStruct{}, nil, "given oglemock_test.otherStruct"},
+
+ // Wrong types.
+ {nil, nil, "given <nil>"},
+ {int(1), nil, "given int"},
+ {float64(1), nil, "given float64"},
+ {complex128(1), nil, "given complex128"},
+ {&someInt, nil, "given *int"},
+ {make(chan int), nil, "given chan int"},
+ }
+
+ t.runTestCases(sig, cases)
+}
+
+func (t *ReturnTest) UnsafePointer() {
+ someStr := ""
+
+ sig := reflect.TypeOf(func() unsafe.Pointer { return nil })
+ cases := []returnTestCase{
+ // Identical types.
+ {unsafe.Pointer(&someStr), unsafe.Pointer(&someStr), ""},
+
+ // Nil values.
+ {(interface{})(nil), unsafe.Pointer(nil), ""},
+ {unsafe.Pointer(nil), unsafe.Pointer(nil), ""},
+
+ // Wrong types.
+ {(func())(nil), nil, "given func()"},
+ {int(1), nil, "given int"},
+ {float64(1), nil, "given float64"},
+ {complex128(1), nil, "given complex128"},
+ {(*string)(&someStr), nil, "given *string"},
+ }
+
+ t.runTestCases(sig, cases)
+}
+
+func (t *ReturnTest) UserDefinedNumericType() {
+ type myType int16
+
+ sig := reflect.TypeOf(func() myType { return 0 })
+ cases := []returnTestCase{
+ // Identical types.
+ {myType(math.MinInt16), myType(math.MinInt16), ""},
+ {myType(math.MaxInt16), myType(math.MaxInt16), ""},
+
+ // In-range ints.
+ {int(math.MinInt16), myType(math.MinInt16), ""},
+ {int(math.MaxInt16), myType(math.MaxInt16), ""},
+
+ // Out of range ints.
+ {int(math.MinInt16 - 1), nil, "out of range"},
+ {int(math.MaxInt16 + 1), nil, "out of range"},
+
+ // Wrong types.
+ {nil, nil, "given <nil>"},
+ {int16(1), nil, "given int16"},
+ {float64(1), nil, "given float64"},
+ {complex128(1), nil, "given complex128"},
+ {&someInt, nil, "given *int"},
+ {make(chan int), nil, "given chan int"},
+ }
+
+ t.runTestCases(sig, cases)
+}
+
+func (t *ReturnTest) UserDefinedNonNumericType() {
+ type myType string
+
+ sig := reflect.TypeOf(func() myType { return "" })
+ cases := []returnTestCase{
+ // Identical types.
+ {myType("taco"), myType("taco"), ""},
+
+ // Wrong types.
+ {nil, nil, "given <nil>"},
+ {int(1), nil, "given int"},
+ {float64(1), nil, "given float64"},
+ {complex128(1), nil, "given complex128"},
+ {string(""), nil, "given string"},
+ {&someInt, nil, "given *int"},
+ {make(chan int), nil, "given chan int"},
+ }
+
+ t.runTestCases(sig, cases)
+}
diff --git a/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/oglemock/sample/README.markdown b/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/oglemock/sample/README.markdown
new file mode 100644
index 00000000000..60d5d2cb1ab
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/oglemock/sample/README.markdown
@@ -0,0 +1,6 @@
+This directory contains sample code generated with the `createmock` command. For
+example, the file `mock_io.go` can be regenerated with:
+
+ createmock io Reader > sample/mock_io/mock_io.go
+
+The files are also used by `integration_test.go`.
diff --git a/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/oglemock/sample/mock_io/mock_io.go b/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/oglemock/sample/mock_io/mock_io.go
new file mode 100644
index 00000000000..76e8f00056d
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/oglemock/sample/mock_io/mock_io.go
@@ -0,0 +1,71 @@
+// This file was auto-generated using createmock. See the following page for
+// more information:
+//
+// https://github.com/smartystreets/assertions/internal/oglemock
+//
+
+package mock_io
+
+import (
+ fmt "fmt"
+ oglemock "github.com/smartystreets/assertions/internal/oglemock"
+ io "io"
+ runtime "runtime"
+ unsafe "unsafe"
+)
+
+type MockReader interface {
+ io.Reader
+ oglemock.MockObject
+}
+
+type mockReader struct {
+ controller oglemock.Controller
+ description string
+}
+
+func NewMockReader(
+ c oglemock.Controller,
+ desc string) MockReader {
+ return &mockReader{
+ controller: c,
+ description: desc,
+ }
+}
+
+func (m *mockReader) Oglemock_Id() uintptr {
+ return uintptr(unsafe.Pointer(m))
+}
+
+func (m *mockReader) Oglemock_Description() string {
+ return m.description
+}
+
+func (m *mockReader) Read(p0 []uint8) (o0 int, o1 error) {
+ // Get a file name and line number for the caller.
+ _, file, line, _ := runtime.Caller(1)
+
+ // Hand the call off to the controller, which does most of the work.
+ retVals := m.controller.HandleMethodCall(
+ m,
+ "Read",
+ file,
+ line,
+ []interface{}{p0})
+
+ if len(retVals) != 2 {
+ panic(fmt.Sprintf("mockReader.Read: invalid return values: %v", retVals))
+ }
+
+ // o0 int
+ if retVals[0] != nil {
+ o0 = retVals[0].(int)
+ }
+
+ // o1 error
+ if retVals[1] != nil {
+ o1 = retVals[1].(error)
+ }
+
+ return
+}
diff --git a/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/oglemock/save_arg.go b/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/oglemock/save_arg.go
new file mode 100644
index 00000000000..27cfcf6193b
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/oglemock/save_arg.go
@@ -0,0 +1,83 @@
+// Copyright 2015 Aaron Jacobs. All Rights Reserved.
+// Author: aaronjjacobs@gmail.com (Aaron Jacobs)
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package oglemock
+
+import (
+ "fmt"
+ "reflect"
+)
+
+// Create an Action that saves the argument at the given zero-based index to
+// the supplied destination, which must be a pointer to a type that is
+// assignable from the argument type.
+func SaveArg(index int, dst interface{}) Action {
+ return &saveArg{
+ index: index,
+ dstPointer: dst,
+ }
+}
+
+type saveArg struct {
+ index int
+ dstPointer interface{}
+
+ // Set by SetSignature.
+ dstValue reflect.Value
+}
+
+func (a *saveArg) SetSignature(signature reflect.Type) (err error) {
+ // Extract the source type.
+ if a.index >= signature.NumIn() {
+ err = fmt.Errorf(
+ "Out of range argument index %v for function type %v",
+ a.index,
+ signature)
+ return
+ }
+
+ srcType := signature.In(a.index)
+
+ // The destination must be a pointer.
+ v := reflect.ValueOf(a.dstPointer)
+ if v.Kind() != reflect.Ptr {
+ err = fmt.Errorf("Destination is %v, not a pointer", v.Kind())
+ return
+ }
+
+ // Dereference the pointer.
+ if v.IsNil() {
+ err = fmt.Errorf("Destination pointer must be non-nil")
+ return
+ }
+
+ a.dstValue = v.Elem()
+
+ // The destination must be assignable from the source.
+ if !srcType.AssignableTo(a.dstValue.Type()) {
+ err = fmt.Errorf(
+ "%v is not assignable to %v",
+ srcType,
+ a.dstValue.Type())
+ return
+ }
+
+ return
+}
+
+func (a *saveArg) Invoke(methodArgs []interface{}) (rets []interface{}) {
+ a.dstValue.Set(reflect.ValueOf(methodArgs[a.index]))
+ return
+}
diff --git a/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/oglemock/save_arg_test.go b/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/oglemock/save_arg_test.go
new file mode 100644
index 00000000000..4051907e0dd
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/oglemock/save_arg_test.go
@@ -0,0 +1,132 @@
+// Copyright 2015 Aaron Jacobs. All Rights Reserved.
+// Author: aaronjjacobs@gmail.com (Aaron Jacobs)
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package oglemock_test
+
+import (
+ "io"
+ "os"
+ "reflect"
+ "testing"
+
+ . "github.com/smartystreets/assertions/internal/oglematchers"
+ "github.com/smartystreets/assertions/internal/oglemock"
+ . "github.com/smartystreets/assertions/internal/ogletest"
+)
+
+func TestSaveArg(t *testing.T) { RunTests(t) }
+
+////////////////////////////////////////////////////////////
+// Boilerplate
+////////////////////////////////////////////////////////////
+
+type SaveArgTest struct {
+}
+
+func init() { RegisterTestSuite(&SaveArgTest{}) }
+
+////////////////////////////////////////////////////////////
+// Test functions
+////////////////////////////////////////////////////////////
+
+func (t *SaveArgTest) FunctionHasNoArguments() {
+ const index = 0
+ var dst int
+ f := func() (int, string) { return 0, "" }
+
+ err := oglemock.SaveArg(index, &dst).SetSignature(reflect.TypeOf(f))
+ ExpectThat(err, Error(HasSubstr("index 0")))
+ ExpectThat(err, Error(HasSubstr("Out of range")))
+ ExpectThat(err, Error(HasSubstr("func() (int, string)")))
+}
+
+func (t *SaveArgTest) ArgumentIndexOutOfRange() {
+ const index = 2
+ var dst int
+ f := func(a int, b int) {}
+
+ err := oglemock.SaveArg(index, &dst).SetSignature(reflect.TypeOf(f))
+ ExpectThat(err, Error(HasSubstr("index 2")))
+ ExpectThat(err, Error(HasSubstr("Out of range")))
+ ExpectThat(err, Error(HasSubstr("func(int, int)")))
+}
+
+func (t *SaveArgTest) DestinationIsLiteralNil() {
+ const index = 0
+ f := func(a int, b int) {}
+
+ err := oglemock.SaveArg(index, nil).SetSignature(reflect.TypeOf(f))
+ ExpectThat(err, Error(HasSubstr("not a pointer")))
+}
+
+func (t *SaveArgTest) DestinationIsNotAPointer() {
+ const index = 0
+ f := func(a int, b int) {}
+
+ err := oglemock.SaveArg(index, uint(17)).SetSignature(reflect.TypeOf(f))
+ ExpectThat(err, Error(HasSubstr("pointer")))
+ ExpectThat(err, Error(HasSubstr("uint")))
+}
+
+func (t *SaveArgTest) DestinationIsNilPointer() {
+ const index = 1
+ var dst *int
+ f := func(a int, b int) {}
+
+ err := oglemock.SaveArg(index, dst).SetSignature(reflect.TypeOf(f))
+ ExpectThat(err, Error(HasSubstr("pointer")))
+ ExpectThat(err, Error(HasSubstr("non-nil")))
+}
+
+func (t *SaveArgTest) DestinationNotAssignableFromSource() {
+ const index = 1
+ var dst int
+ f := func(a int, b string) {}
+
+ err := oglemock.SaveArg(index, &dst).SetSignature(reflect.TypeOf(f))
+ ExpectThat(err, Error(HasSubstr("int")))
+ ExpectThat(err, Error(HasSubstr("assignable")))
+ ExpectThat(err, Error(HasSubstr("string")))
+}
+
+func (t *SaveArgTest) ExactTypeMatch() {
+ const index = 1
+ var dst int
+ f := func(a int, b int) {}
+
+ action := oglemock.SaveArg(index, &dst)
+ AssertEq(nil, action.SetSignature(reflect.TypeOf(f)))
+
+ var a int = 17
+ var b int = 19
+ _ = action.Invoke([]interface{}{a, b})
+
+ ExpectEq(19, dst)
+}
+
+func (t *SaveArgTest) AssignableTypeMatch() {
+ const index = 1
+ var dst io.Reader
+ f := func(a int, b *os.File) {}
+
+ action := oglemock.SaveArg(index, &dst)
+ AssertEq(nil, action.SetSignature(reflect.TypeOf(f)))
+
+ var a int = 17
+ var b *os.File = os.Stdout
+ _ = action.Invoke([]interface{}{a, b})
+
+ ExpectEq(os.Stdout, dst)
+}
diff --git a/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/ogletest/.gitignore b/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/ogletest/.gitignore
new file mode 100644
index 00000000000..dd8fc7468f4
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/ogletest/.gitignore
@@ -0,0 +1,5 @@
+*.6
+6.out
+_obj/
+_test/
+_testmain.go
diff --git a/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/ogletest/.travis.yml b/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/ogletest/.travis.yml
new file mode 100644
index 00000000000..b97211926e8
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/ogletest/.travis.yml
@@ -0,0 +1,4 @@
+# Cf. http://docs.travis-ci.com/user/getting-started/
+# Cf. http://docs.travis-ci.com/user/languages/go/
+
+language: go
diff --git a/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/ogletest/LICENSE b/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/ogletest/LICENSE
new file mode 100644
index 00000000000..d6456956733
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/ogletest/LICENSE
@@ -0,0 +1,202 @@
+
+ Apache License
+ Version 2.0, January 2004
+ http://www.apache.org/licenses/
+
+ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+ 1. Definitions.
+
+ "License" shall mean the terms and conditions for use, reproduction,
+ and distribution as defined by Sections 1 through 9 of this document.
+
+ "Licensor" shall mean the copyright owner or entity authorized by
+ the copyright owner that is granting the License.
+
+ "Legal Entity" shall mean the union of the acting entity and all
+ other entities that control, are controlled by, or are under common
+ control with that entity. For the purposes of this definition,
+ "control" means (i) the power, direct or indirect, to cause the
+ direction or management of such entity, whether by contract or
+ otherwise, or (ii) ownership of fifty percent (50%) or more of the
+ outstanding shares, or (iii) beneficial ownership of such entity.
+
+ "You" (or "Your") shall mean an individual or Legal Entity
+ exercising permissions granted by this License.
+
+ "Source" form shall mean the preferred form for making modifications,
+ including but not limited to software source code, documentation
+ source, and configuration files.
+
+ "Object" form shall mean any form resulting from mechanical
+ transformation or translation of a Source form, including but
+ not limited to compiled object code, generated documentation,
+ and conversions to other media types.
+
+ "Work" shall mean the work of authorship, whether in Source or
+ Object form, made available under the License, as indicated by a
+ copyright notice that is included in or attached to the work
+ (an example is provided in the Appendix below).
+
+ "Derivative Works" shall mean any work, whether in Source or Object
+ form, that is based on (or derived from) the Work and for which the
+ editorial revisions, annotations, elaborations, or other modifications
+ represent, as a whole, an original work of authorship. For the purposes
+ of this License, Derivative Works shall not include works that remain
+ separable from, or merely link (or bind by name) to the interfaces of,
+ the Work and Derivative Works thereof.
+
+ "Contribution" shall mean any work of authorship, including
+ the original version of the Work and any modifications or additions
+ to that Work or Derivative Works thereof, that is intentionally
+ submitted to Licensor for inclusion in the Work by the copyright owner
+ or by an individual or Legal Entity authorized to submit on behalf of
+ the copyright owner. For the purposes of this definition, "submitted"
+ means any form of electronic, verbal, or written communication sent
+ to the Licensor or its representatives, including but not limited to
+ communication on electronic mailing lists, source code control systems,
+ and issue tracking systems that are managed by, or on behalf of, the
+ Licensor for the purpose of discussing and improving the Work, but
+ excluding communication that is conspicuously marked or otherwise
+ designated in writing by the copyright owner as "Not a Contribution."
+
+ "Contributor" shall mean Licensor and any individual or Legal Entity
+ on behalf of whom a Contribution has been received by Licensor and
+ subsequently incorporated within the Work.
+
+ 2. Grant of Copyright License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ copyright license to reproduce, prepare Derivative Works of,
+ publicly display, publicly perform, sublicense, and distribute the
+ Work and such Derivative Works in Source or Object form.
+
+ 3. Grant of Patent License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ (except as stated in this section) patent license to make, have made,
+ use, offer to sell, sell, import, and otherwise transfer the Work,
+ where such license applies only to those patent claims licensable
+ by such Contributor that are necessarily infringed by their
+ Contribution(s) alone or by combination of their Contribution(s)
+ with the Work to which such Contribution(s) was submitted. If You
+ institute patent litigation against any entity (including a
+ cross-claim or counterclaim in a lawsuit) alleging that the Work
+ or a Contribution incorporated within the Work constitutes direct
+ or contributory patent infringement, then any patent licenses
+ granted to You under this License for that Work shall terminate
+ as of the date such litigation is filed.
+
+ 4. Redistribution. You may reproduce and distribute copies of the
+ Work or Derivative Works thereof in any medium, with or without
+ modifications, and in Source or Object form, provided that You
+ meet the following conditions:
+
+ (a) You must give any other recipients of the Work or
+ Derivative Works a copy of this License; and
+
+ (b) You must cause any modified files to carry prominent notices
+ stating that You changed the files; and
+
+ (c) You must retain, in the Source form of any Derivative Works
+ that You distribute, all copyright, patent, trademark, and
+ attribution notices from the Source form of the Work,
+ excluding those notices that do not pertain to any part of
+ the Derivative Works; and
+
+ (d) If the Work includes a "NOTICE" text file as part of its
+ distribution, then any Derivative Works that You distribute must
+ include a readable copy of the attribution notices contained
+ within such NOTICE file, excluding those notices that do not
+ pertain to any part of the Derivative Works, in at least one
+ of the following places: within a NOTICE text file distributed
+ as part of the Derivative Works; within the Source form or
+ documentation, if provided along with the Derivative Works; or,
+ within a display generated by the Derivative Works, if and
+ wherever such third-party notices normally appear. The contents
+ of the NOTICE file are for informational purposes only and
+ do not modify the License. You may add Your own attribution
+ notices within Derivative Works that You distribute, alongside
+ or as an addendum to the NOTICE text from the Work, provided
+ that such additional attribution notices cannot be construed
+ as modifying the License.
+
+ You may add Your own copyright statement to Your modifications and
+ may provide additional or different license terms and conditions
+ for use, reproduction, or distribution of Your modifications, or
+ for any such Derivative Works as a whole, provided Your use,
+ reproduction, and distribution of the Work otherwise complies with
+ the conditions stated in this License.
+
+ 5. Submission of Contributions. Unless You explicitly state otherwise,
+ any Contribution intentionally submitted for inclusion in the Work
+ by You to the Licensor shall be under the terms and conditions of
+ this License, without any additional terms or conditions.
+ Notwithstanding the above, nothing herein shall supersede or modify
+ the terms of any separate license agreement you may have executed
+ with Licensor regarding such Contributions.
+
+ 6. Trademarks. This License does not grant permission to use the trade
+ names, trademarks, service marks, or product names of the Licensor,
+ except as required for reasonable and customary use in describing the
+ origin of the Work and reproducing the content of the NOTICE file.
+
+ 7. Disclaimer of Warranty. Unless required by applicable law or
+ agreed to in writing, Licensor provides the Work (and each
+ Contributor provides its Contributions) on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ implied, including, without limitation, any warranties or conditions
+ of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+ PARTICULAR PURPOSE. You are solely responsible for determining the
+ appropriateness of using or redistributing the Work and assume any
+ risks associated with Your exercise of permissions under this License.
+
+ 8. Limitation of Liability. In no event and under no legal theory,
+ whether in tort (including negligence), contract, or otherwise,
+ unless required by applicable law (such as deliberate and grossly
+ negligent acts) or agreed to in writing, shall any Contributor be
+ liable to You for damages, including any direct, indirect, special,
+ incidental, or consequential damages of any character arising as a
+ result of this License or out of the use or inability to use the
+ Work (including but not limited to damages for loss of goodwill,
+ work stoppage, computer failure or malfunction, or any and all
+ other commercial damages or losses), even if such Contributor
+ has been advised of the possibility of such damages.
+
+ 9. Accepting Warranty or Additional Liability. While redistributing
+ the Work or Derivative Works thereof, You may choose to offer,
+ and charge a fee for, acceptance of support, warranty, indemnity,
+ or other liability obligations and/or rights consistent with this
+ License. However, in accepting such obligations, You may act only
+ on Your own behalf and on Your sole responsibility, not on behalf
+ of any other Contributor, and only if You agree to indemnify,
+ defend, and hold each Contributor harmless for any liability
+ incurred by, or claims asserted against, such Contributor by reason
+ of your accepting any such warranty or additional liability.
+
+ END OF TERMS AND CONDITIONS
+
+ APPENDIX: How to apply the Apache License to your work.
+
+ To apply the Apache License to your work, attach the following
+ boilerplate notice, with the fields enclosed by brackets "[]"
+ replaced with your own identifying information. (Don't include
+ the brackets!) The text should be enclosed in the appropriate
+ comment syntax for the file format. We also recommend that a
+ file or class name and description of purpose be included on the
+ same "printed page" as the copyright notice for easier
+ identification within third-party archives.
+
+ Copyright [yyyy] [name of copyright owner]
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
diff --git a/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/ogletest/README.md b/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/ogletest/README.md
new file mode 100644
index 00000000000..8e54862082b
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/ogletest/README.md
@@ -0,0 +1,151 @@
+[![GoDoc](https://godoc.org/github.com/smartystreets/assertions/internal/ogletest?status.svg)](https://godoc.org/github.com/smartystreets/assertions/internal/ogletest)
+
+`ogletest` is a unit testing framework for Go with the following features:
+
+ * An extensive and extensible set of matchers for expressing expectations.
+ * Automatic failure messages; no need to say `t.Errorf("Expected %v, got
+ %v"...)`.
+ * Clean, readable output that tells you exactly what you need to know.
+ * Built-in support for mocking through the [oglemock][] package.
+ * Style and semantics similar to [Google Test][googletest] and
+ [Google JS Test][google-js-test].
+
+It integrates with Go's built-in `testing` package, so it works with the
+`go test` command, and even with other types of test within your package. Unlike
+the `testing` package which offers only basic capabilities for signalling
+failures, it offers ways to express expectations and get nice failure messages
+automatically.
+
+
+Installation
+------------
+
+First, make sure you have installed Go 1.0.2 or newer. See
+[here][golang-install] for instructions.
+
+Use the following command to install `ogletest` and its dependencies, and to
+keep them up to date:
+
+ go get -u github.com/smartystreets/assertions/internal/ogletest
+
+
+Documentation
+-------------
+
+See [here][reference] for package documentation containing an exhaustive list of
+exported symbols. Alternatively, you can install the package and then use
+`godoc`:
+
+ godoc github.com/smartystreets/assertions/internal/ogletest
+
+An important part of `ogletest` is its use of matchers provided by the
+[oglematchers][matcher-reference] package. See that package's documentation
+for information on the built-in matchers available, and check out the
+`oglematchers.Matcher` interface if you want to define your own.
+
+
+Example
+-------
+
+Let's say you have a function in your package `people` with the following
+signature:
+
+```go
+// GetRandomPerson returns the name and phone number of Tony, Dennis, or Scott.
+func GetRandomPerson() (name, phone string) {
+ [...]
+}
+```
+
+A silly function, but it will do for an example. You can write a couple of tests
+for it as follows:
+
+```go
+package people
+
+import (
+ "github.com/smartystreets/assertions/internal/oglematchers"
+ "github.com/smartystreets/assertions/internal/ogletest"
+ "testing"
+)
+
+// Give ogletest a chance to run your tests when invoked by 'go test'.
+func TestOgletest(t *testing.T) { ogletest.RunTests(t) }
+
+// Create a test suite, which groups together logically related test methods
+// (defined below). You can share common setup and teardown code here; see the
+// package docs for more info.
+type PeopleTest struct {}
+func init() { ogletest.RegisterTestSuite(&PeopleTest{}) }
+
+func (t *PeopleTest) ReturnsCorrectNames() {
+ // Call the function a few times, and make sure it never strays from the set
+ // of expected names.
+ for i := 0; i < 25; i++ {
+ name, _ := GetRandomPerson()
+ ogletest.ExpectThat(name, oglematchers.AnyOf("Tony", "Dennis", "Scott"))
+ }
+}
+
+func (t *PeopleTest) FormatsPhoneNumbersCorrectly() {
+ // Call the function a few times, and make sure it returns phone numbers in a
+ // standard US format.
+ for i := 0; i < 25; i++ {
+ _, phone := GetRandomPerson()
+ ogletest.ExpectThat(phone, oglematchers.MatchesRegexp(`^\(\d{3}\) \d{3}-\d{4}$`))
+}
+```
+
+Note that test control functions (`RunTests`, `ExpectThat`, and so on) are part
+of the `ogletest` package, whereas built-in matchers (`AnyOf`, `MatchesRegexp`,
+and more) are part of the [oglematchers][matcher-reference] library. You can of
+course use dot imports so that you don't need to prefix each function with its
+package name:
+
+```go
+import (
+ . "github.com/smartystreets/assertions/internal/oglematchers"
+ . "github.com/smartystreets/assertions/internal/ogletest"
+)
+```
+
+If you save the test in a file whose name ends in `_test.go`, you can run your
+tests by simply invoking the following in your package directory:
+
+ go test
+
+Here's what the failure output of ogletest looks like, if your function's
+implementation is bad.
+
+ [----------] Running tests from PeopleTest
+ [ RUN ] PeopleTest.FormatsPhoneNumbersCorrectly
+ people_test.go:32:
+ Expected: matches regexp "^\(\d{3}\) \d{3}-\d{4}$"
+ Actual: +1 800 555 5555
+
+ [ FAILED ] PeopleTest.FormatsPhoneNumbersCorrectly
+ [ RUN ] PeopleTest.ReturnsCorrectNames
+ people_test.go:23:
+ Expected: or(Tony, Dennis, Scott)
+ Actual: Bart
+
+ [ FAILED ] PeopleTest.ReturnsCorrectNames
+ [----------] Finished with tests from PeopleTest
+
+And if the test passes:
+
+ [----------] Running tests from PeopleTest
+ [ RUN ] PeopleTest.FormatsPhoneNumbersCorrectly
+ [ OK ] PeopleTest.FormatsPhoneNumbersCorrectly
+ [ RUN ] PeopleTest.ReturnsCorrectNames
+ [ OK ] PeopleTest.ReturnsCorrectNames
+ [----------] Finished with tests from PeopleTest
+
+
+[reference]: http://godoc.org/github.com/smartystreets/assertions/internal/ogletest
+[matcher-reference]: http://godoc.org/github.com/smartystreets/assertions/internal/oglematchers
+[golang-install]: http://golang.org/doc/install.html
+[googletest]: http://code.google.com/p/googletest/
+[google-js-test]: http://code.google.com/p/google-js-test/
+[howtowrite]: http://golang.org/doc/code.html
+[oglemock]: https://github.com/smartystreets/assertions/internal/oglemock
diff --git a/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/ogletest/assert_aliases.go b/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/ogletest/assert_aliases.go
new file mode 100644
index 00000000000..70fa25c327a
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/ogletest/assert_aliases.go
@@ -0,0 +1,70 @@
+// Copyright 2011 Aaron Jacobs. All Rights Reserved.
+// Author: aaronjjacobs@gmail.com (Aaron Jacobs)
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package ogletest
+
+import (
+ "github.com/smartystreets/assertions/internal/oglematchers"
+)
+
+// AssertEq(e, a) is equivalent to AssertThat(a, oglematchers.Equals(e)).
+func AssertEq(expected, actual interface{}, errorParts ...interface{}) {
+ assertThat(
+ actual,
+ oglematchers.Equals(expected),
+ 1,
+ errorParts)
+}
+
+// AssertNe(e, a) is equivalent to
+// AssertThat(a, oglematchers.Not(oglematchers.Equals(e))).
+func AssertNe(expected, actual interface{}, errorParts ...interface{}) {
+ assertThat(
+ actual,
+ oglematchers.Not(oglematchers.Equals(expected)),
+ 1,
+ errorParts)
+}
+
+// AssertLt(x, y) is equivalent to AssertThat(x, oglematchers.LessThan(y)).
+func AssertLt(x, y interface{}, errorParts ...interface{}) {
+ assertThat(x, oglematchers.LessThan(y), 1, errorParts)
+}
+
+// AssertLe(x, y) is equivalent to AssertThat(x, oglematchers.LessOrEqual(y)).
+func AssertLe(x, y interface{}, errorParts ...interface{}) {
+ assertThat(x, oglematchers.LessOrEqual(y), 1, errorParts)
+}
+
+// AssertGt(x, y) is equivalent to AssertThat(x, oglematchers.GreaterThan(y)).
+func AssertGt(x, y interface{}, errorParts ...interface{}) {
+ assertThat(x, oglematchers.GreaterThan(y), 1, errorParts)
+}
+
+// AssertGe(x, y) is equivalent to
+// AssertThat(x, oglematchers.GreaterOrEqual(y)).
+func AssertGe(x, y interface{}, errorParts ...interface{}) {
+ assertThat(x, oglematchers.GreaterOrEqual(y), 1, errorParts)
+}
+
+// AssertTrue(b) is equivalent to AssertThat(b, oglematchers.Equals(true)).
+func AssertTrue(b interface{}, errorParts ...interface{}) {
+ assertThat(b, oglematchers.Equals(true), 1, errorParts)
+}
+
+// AssertFalse(b) is equivalent to AssertThat(b, oglematchers.Equals(false)).
+func AssertFalse(b interface{}, errorParts ...interface{}) {
+ assertThat(b, oglematchers.Equals(false), 1, errorParts)
+}
diff --git a/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/ogletest/assert_that.go b/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/ogletest/assert_that.go
new file mode 100644
index 00000000000..65c8fbce052
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/ogletest/assert_that.go
@@ -0,0 +1,46 @@
+// Copyright 2011 Aaron Jacobs. All Rights Reserved.
+// Author: aaronjjacobs@gmail.com (Aaron Jacobs)
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package ogletest
+
+import (
+ "github.com/smartystreets/assertions/internal/oglematchers"
+)
+
+func assertThat(
+ x interface{},
+ m oglematchers.Matcher,
+ depth int,
+ errorParts []interface{}) {
+ passed := expectThat(x, m, depth+1, errorParts)
+ if !passed {
+ AbortTest()
+ }
+}
+
+// AssertThat is identical to ExpectThat, except that in the event of failure
+// it halts the currently running test immediately. It is thus useful for
+// things like bounds checking:
+//
+// someSlice := [...]
+// AssertEq(1, len(someSlice)) // Protects next line from panicking.
+// ExpectEq("taco", someSlice[0])
+//
+func AssertThat(
+ x interface{},
+ m oglematchers.Matcher,
+ errorParts ...interface{}) {
+ assertThat(x, m, 1, errorParts)
+}
diff --git a/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/ogletest/doc.go b/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/ogletest/doc.go
new file mode 100644
index 00000000000..bf6507fae4d
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/ogletest/doc.go
@@ -0,0 +1,51 @@
+// Copyright 2011 Aaron Jacobs. All Rights Reserved.
+// Author: aaronjjacobs@gmail.com (Aaron Jacobs)
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+// Package ogletest provides a framework for writing expressive unit tests. It
+// integrates with the builtin testing package, so it works with the gotest
+// command. Unlike the testing package which offers only basic capabilities for
+// signalling failures, it offers ways to express expectations and get nice
+// failure messages automatically.
+//
+// For example:
+//
+// ////////////////////////////////////////////////////////////////////////
+// // testing package test
+// ////////////////////////////////////////////////////////////////////////
+//
+// someStr, err := ComputeSomeString()
+// if err != nil {
+// t.Errorf("ComputeSomeString: expected nil error, got %v", err)
+// }
+//
+// !strings.Contains(someStr, "foo") {
+// t.Errorf("ComputeSomeString: expected substring foo, got %v", someStr)
+// }
+//
+// ////////////////////////////////////////////////////////////////////////
+// // ogletest test
+// ////////////////////////////////////////////////////////////////////////
+//
+// someStr, err := ComputeSomeString()
+// ExpectEq(nil, err)
+// ExpectThat(someStr, HasSubstr("foo")
+//
+// Failure messages require no work from the user, and look like the following:
+//
+// foo_test.go:103:
+// Expected: has substring "foo"
+// Actual: "bar baz"
+//
+package ogletest
diff --git a/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/ogletest/expect_aliases.go b/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/ogletest/expect_aliases.go
new file mode 100644
index 00000000000..5bc1dc120a7
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/ogletest/expect_aliases.go
@@ -0,0 +1,64 @@
+// Copyright 2011 Aaron Jacobs. All Rights Reserved.
+// Author: aaronjjacobs@gmail.com (Aaron Jacobs)
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package ogletest
+
+import "github.com/smartystreets/assertions/internal/oglematchers"
+
+// ExpectEq(e, a) is equivalent to ExpectThat(a, oglematchers.Equals(e)).
+func ExpectEq(expected, actual interface{}, errorParts ...interface{}) {
+ expectThat(actual, oglematchers.Equals(expected), 1, errorParts)
+}
+
+// ExpectNe(e, a) is equivalent to
+// ExpectThat(a, oglematchers.Not(oglematchers.Equals(e))).
+func ExpectNe(expected, actual interface{}, errorParts ...interface{}) {
+ expectThat(
+ actual,
+ oglematchers.Not(oglematchers.Equals(expected)),
+ 1,
+ errorParts)
+}
+
+// ExpectLt(x, y) is equivalent to ExpectThat(x, oglematchers.LessThan(y)).
+func ExpectLt(x, y interface{}, errorParts ...interface{}) {
+ expectThat(x, oglematchers.LessThan(y), 1, errorParts)
+}
+
+// ExpectLe(x, y) is equivalent to ExpectThat(x, oglematchers.LessOrEqual(y)).
+func ExpectLe(x, y interface{}, errorParts ...interface{}) {
+ expectThat(x, oglematchers.LessOrEqual(y), 1, errorParts)
+}
+
+// ExpectGt(x, y) is equivalent to ExpectThat(x, oglematchers.GreaterThan(y)).
+func ExpectGt(x, y interface{}, errorParts ...interface{}) {
+ expectThat(x, oglematchers.GreaterThan(y), 1, errorParts)
+}
+
+// ExpectGe(x, y) is equivalent to
+// ExpectThat(x, oglematchers.GreaterOrEqual(y)).
+func ExpectGe(x, y interface{}, errorParts ...interface{}) {
+ expectThat(x, oglematchers.GreaterOrEqual(y), 1, errorParts)
+}
+
+// ExpectTrue(b) is equivalent to ExpectThat(b, oglematchers.Equals(true)).
+func ExpectTrue(b interface{}, errorParts ...interface{}) {
+ expectThat(b, oglematchers.Equals(true), 1, errorParts)
+}
+
+// ExpectFalse(b) is equivalent to ExpectThat(b, oglematchers.Equals(false)).
+func ExpectFalse(b interface{}, errorParts ...interface{}) {
+ expectThat(b, oglematchers.Equals(false), 1, errorParts)
+}
diff --git a/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/ogletest/expect_call.go b/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/ogletest/expect_call.go
new file mode 100644
index 00000000000..b8bf542a8bc
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/ogletest/expect_call.go
@@ -0,0 +1,59 @@
+// Copyright 2012 Aaron Jacobs. All Rights Reserved.
+// Author: aaronjjacobs@gmail.com (Aaron Jacobs)
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package ogletest
+
+import (
+ "github.com/smartystreets/assertions/internal/oglemock"
+ "runtime"
+)
+
+// ExpectCall expresses an expectation that the method of the given name
+// should be called on the supplied mock object. It returns a function that
+// should be called with the expected arguments, matchers for the arguments,
+// or a mix of both.
+//
+// For example:
+//
+// mockWriter := [...]
+// ogletest.ExpectCall(mockWriter, "Write")(oglematchers.ElementsAre(0x1))
+// .WillOnce(oglemock.Return(1, nil))
+//
+// This is a shortcut for calling i.MockController.ExpectCall, where i is the
+// TestInfo struct for the currently-running test. Unlike that direct approach,
+// this function automatically sets the correct file name and line number for
+// the expectation.
+func ExpectCall(o oglemock.MockObject, method string) oglemock.PartialExpecation {
+ // Get information about the call site.
+ _, file, lineNumber, ok := runtime.Caller(1)
+ if !ok {
+ panic("ExpectCall: runtime.Caller")
+ }
+
+ // Grab the current test info.
+ info := currentlyRunningTest
+ if info == nil {
+ panic("ExpectCall: no test info.")
+ }
+
+ // Grab the mock controller.
+ controller := currentlyRunningTest.MockController
+ if controller == nil {
+ panic("ExpectCall: no mock controller.")
+ }
+
+ // Report the expectation.
+ return controller.ExpectCall(o, method, file, lineNumber)
+}
diff --git a/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/ogletest/expect_that.go b/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/ogletest/expect_that.go
new file mode 100644
index 00000000000..69fc669785d
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/ogletest/expect_that.go
@@ -0,0 +1,100 @@
+// Copyright 2011 Aaron Jacobs. All Rights Reserved.
+// Author: aaronjjacobs@gmail.com (Aaron Jacobs)
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package ogletest
+
+import (
+ "fmt"
+ "path"
+ "reflect"
+ "runtime"
+
+ "github.com/smartystreets/assertions/internal/oglematchers"
+)
+
+// ExpectThat confirms that the supplied matcher matches the value x, adding a
+// failure record to the currently running test if it does not. If additional
+// parameters are supplied, the first will be used as a format string for the
+// later ones, and the user-supplied error message will be added to the test
+// output in the event of a failure.
+//
+// For example:
+//
+// ExpectThat(userName, Equals("jacobsa"))
+// ExpectThat(users[i], Equals("jacobsa"), "while processing user %d", i)
+//
+func ExpectThat(
+ x interface{},
+ m oglematchers.Matcher,
+ errorParts ...interface{}) {
+ expectThat(x, m, 1, errorParts)
+}
+
+// The generalized form of ExpectThat. depth is the distance on the stack
+// between the caller's frame and the user's frame. Returns passed iff the
+// match succeeded.
+func expectThat(
+ x interface{},
+ m oglematchers.Matcher,
+ depth int,
+ errorParts []interface{}) (passed bool) {
+ // Check whether the value matches. If it does, we are finished.
+ matcherErr := m.Matches(x)
+ if matcherErr == nil {
+ passed = true
+ return
+ }
+
+ var r FailureRecord
+
+ // Get information about the call site.
+ var ok bool
+ if _, r.FileName, r.LineNumber, ok = runtime.Caller(depth + 1); !ok {
+ panic("expectThat: runtime.Caller")
+ }
+
+ r.FileName = path.Base(r.FileName)
+
+ // Create an appropriate failure message. Make sure that the expected and
+ // actual values align properly.
+ relativeClause := ""
+ if matcherErr.Error() != "" {
+ relativeClause = fmt.Sprintf(", %s", matcherErr.Error())
+ }
+
+ r.Error = fmt.Sprintf(
+ "Expected: %s\nActual: %v%s",
+ m.Description(),
+ x,
+ relativeClause)
+
+ // Add the user error, if any.
+ if len(errorParts) != 0 {
+ v := reflect.ValueOf(errorParts[0])
+ if v.Kind() != reflect.String {
+ panic(fmt.Sprintf("ExpectThat: invalid format string type %v", v.Kind()))
+ }
+
+ r.Error = fmt.Sprintf(
+ "%s\n%s",
+ r.Error,
+ fmt.Sprintf(v.String(), errorParts[1:]...))
+ }
+
+ // Report the failure.
+ AddFailureRecord(r)
+
+ return
+}
diff --git a/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/ogletest/expect_that_test.go b/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/ogletest/expect_that_test.go
new file mode 100644
index 00000000000..e3e37236700
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/ogletest/expect_that_test.go
@@ -0,0 +1,168 @@
+// Copyright 2011 Aaron Jacobs. All Rights Reserved.
+// Author: aaronjjacobs@gmail.com (Aaron Jacobs)
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package ogletest
+
+import (
+ "errors"
+ "testing"
+
+ . "github.com/smartystreets/assertions/internal/oglematchers"
+)
+
+////////////////////////////////////////////////////////////////////////
+// Helpers
+////////////////////////////////////////////////////////////////////////
+
+// Set up a new test state with empty fields.
+func setUpCurrentTest() {
+ currentlyRunningTest = newTestInfo()
+}
+
+type fakeExpectThatMatcher struct {
+ desc string
+ err error
+}
+
+func (m *fakeExpectThatMatcher) Matches(c interface{}) error {
+ return m.err
+}
+
+func (m *fakeExpectThatMatcher) Description() string {
+ return m.desc
+}
+
+func assertEqInt(t *testing.T, e, c int) {
+ if e != c {
+ t.Fatalf("Expected %d, got %d", e, c)
+ }
+}
+
+func expectEqInt(t *testing.T, e, c int) {
+ if e != c {
+ t.Errorf("Expected %v, got %v", e, c)
+ }
+}
+
+func expectEqStr(t *testing.T, e, c string) {
+ if e != c {
+ t.Errorf("Expected %s, got %s", e, c)
+ }
+}
+
+////////////////////////////////////////////////////////////////////////
+// Tests
+////////////////////////////////////////////////////////////////////////
+
+func TestNoCurrentTest(t *testing.T) {
+ panicked := false
+
+ defer func() {
+ if !panicked {
+ t.Errorf("Expected panic; got none.")
+ }
+ }()
+
+ defer func() {
+ if r := recover(); r != nil {
+ panicked = true
+ }
+ }()
+
+ currentlyRunningTest = nil
+ ExpectThat(17, Equals(19))
+}
+
+func TestNoFailure(t *testing.T) {
+ setUpCurrentTest()
+ matcher := &fakeExpectThatMatcher{"", nil}
+ ExpectThat(17, matcher)
+
+ assertEqInt(t, 0, len(currentlyRunningTest.failureRecords))
+}
+
+func TestInvalidFormatString(t *testing.T) {
+ panicked := false
+
+ defer func() {
+ if !panicked {
+ t.Errorf("Expected panic; got none.")
+ }
+ }()
+
+ defer func() {
+ if r := recover(); r != nil {
+ panicked = true
+ }
+ }()
+
+ setUpCurrentTest()
+ matcher := &fakeExpectThatMatcher{"", errors.New("")}
+ ExpectThat(17, matcher, 19, "blah")
+}
+
+func TestNoMatchWithoutErrorText(t *testing.T) {
+ setUpCurrentTest()
+ matcher := &fakeExpectThatMatcher{"taco", errors.New("")}
+ ExpectThat(17, matcher)
+
+ assertEqInt(t, 1, len(currentlyRunningTest.failureRecords))
+
+ record := currentlyRunningTest.failureRecords[0]
+ expectEqStr(t, "expect_that_test.go", record.FileName)
+ expectEqInt(t, 119, record.LineNumber)
+ expectEqStr(t, "Expected: taco\nActual: 17", record.Error)
+}
+
+func TestNoMatchWithErrorTExt(t *testing.T) {
+ setUpCurrentTest()
+ matcher := &fakeExpectThatMatcher{"taco", errors.New("which is foo")}
+ ExpectThat(17, matcher)
+
+ assertEqInt(t, 1, len(currentlyRunningTest.failureRecords))
+ record := currentlyRunningTest.failureRecords[0]
+
+ expectEqStr(
+ t,
+ "Expected: taco\nActual: 17, which is foo",
+ record.Error)
+}
+
+func TestFailureWithUserMessage(t *testing.T) {
+ setUpCurrentTest()
+ matcher := &fakeExpectThatMatcher{"taco", errors.New("")}
+ ExpectThat(17, matcher, "Asd: %d %s", 19, "taco")
+
+ assertEqInt(t, 1, len(currentlyRunningTest.failureRecords))
+ record := currentlyRunningTest.failureRecords[0]
+
+ expectEqStr(t, "Expected: taco\nActual: 17\nAsd: 19 taco", record.Error)
+}
+
+func TestAdditionalFailure(t *testing.T) {
+ setUpCurrentTest()
+ matcher := &fakeExpectThatMatcher{"", errors.New("")}
+
+ // Fail twice.
+ ExpectThat(17, matcher, "taco")
+ ExpectThat(19, matcher, "burrito")
+
+ assertEqInt(t, 2, len(currentlyRunningTest.failureRecords))
+ record1 := currentlyRunningTest.failureRecords[0]
+ record2 := currentlyRunningTest.failureRecords[1]
+
+ expectEqStr(t, "Expected: \nActual: 17\ntaco", record1.Error)
+ expectEqStr(t, "Expected: \nActual: 19\nburrito", record2.Error)
+}
diff --git a/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/ogletest/failure.go b/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/ogletest/failure.go
new file mode 100644
index 00000000000..95be2cf4502
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/ogletest/failure.go
@@ -0,0 +1,90 @@
+// Copyright 2015 Aaron Jacobs. All Rights Reserved.
+// Author: aaronjjacobs@gmail.com (Aaron Jacobs)
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package ogletest
+
+import (
+ "fmt"
+ "path"
+ "runtime"
+)
+
+// FailureRecord represents a single failed expectation or assertion for a
+// test. Most users don't want to interact with these directly; they are
+// generated implicitly using ExpectThat, AssertThat, ExpectLt, etc.
+type FailureRecord struct {
+ // The file name within which the expectation failed, e.g. "foo_test.go".
+ FileName string
+
+ // The line number at which the expectation failed.
+ LineNumber int
+
+ // The error associated with the file:line pair above. For example, the
+ // following expectation:
+ //
+ // ExpectEq(17, "taco")"
+ //
+ // May cause this error:
+ //
+ // Expected: 17
+ // Actual: "taco", which is not numeric
+ //
+ Error string
+}
+
+// Record a failure for the currently running test (and continue running it).
+// Most users will want to use ExpectThat, ExpectEq, etc. instead of this
+// function. Those that do want to report arbitrary errors will probably be
+// satisfied with AddFailure, which is easier to use.
+func AddFailureRecord(r FailureRecord) {
+ currentlyRunningTest.mu.Lock()
+ defer currentlyRunningTest.mu.Unlock()
+
+ currentlyRunningTest.failureRecords = append(
+ currentlyRunningTest.failureRecords,
+ r)
+}
+
+// Call AddFailureRecord with a record whose file name and line number come
+// from the caller of this function, and whose error string is created by
+// calling fmt.Sprintf using the arguments to this function.
+func AddFailure(format string, a ...interface{}) {
+ r := FailureRecord{
+ Error: fmt.Sprintf(format, a...),
+ }
+
+ // Get information about the call site.
+ var ok bool
+ if _, r.FileName, r.LineNumber, ok = runtime.Caller(1); !ok {
+ panic("Can't find caller")
+ }
+
+ r.FileName = path.Base(r.FileName)
+
+ AddFailureRecord(r)
+}
+
+// A sentinel type that is used in a conspiracy between AbortTest and runTests.
+// If runTests sees an abortError as the value given to a panic() call, it will
+// avoid printing the panic error.
+type abortError struct {
+}
+
+// Immediately stop executing the running test, causing it to fail with the
+// failures previously recorded. Behavior is undefined if no failures have been
+// recorded.
+func AbortTest() {
+ panic(abortError{})
+}
diff --git a/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/ogletest/integration_test.go b/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/ogletest/integration_test.go
new file mode 100644
index 00000000000..ec45184e326
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/ogletest/integration_test.go
@@ -0,0 +1,265 @@
+// Copyright 2011 Aaron Jacobs. All Rights Reserved.
+// Author: aaronjjacobs@gmail.com (Aaron Jacobs)
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package ogletest_test
+
+import (
+ "errors"
+ "flag"
+ "fmt"
+ "go/build"
+ "io/ioutil"
+ "os"
+ "os/exec"
+ "path"
+ "regexp"
+ "strings"
+ "syscall"
+ "testing"
+)
+
+const ogletestPkg = "github.com/smartystreets/assertions/internal/ogletest"
+
+var dumpNew = flag.Bool("dump_new", false, "Dump new golden files.")
+var objDir string
+
+////////////////////////////////////////////////////////////////////////
+// Helpers
+////////////////////////////////////////////////////////////////////////
+
+// Install the possibly locally-modified copy of ogletest, so that these
+// integration tests run using the package currently being worked on by the
+// programmer. Also install other dependencies needed by the test cases, so
+// that `go test` complaining about non-up-to-date packages doesn't make it
+// into the golden files.
+func installLocalPackages() error {
+ cmd := exec.Command(
+ "go",
+ "install",
+ ogletestPkg,
+ "github.com/smartystreets/assertions/internal/oglemock",
+ "github.com/smartystreets/assertions/internal/ogletest/test_cases/mock_image")
+
+ output, err := cmd.CombinedOutput()
+
+ if err != nil {
+ return errors.New(fmt.Sprintf("%v:\n%s", err, output))
+ }
+
+ return nil
+}
+
+// getCaseNames looks for integration test cases as files in the test_cases
+// directory.
+func getCaseNames() ([]string, error) {
+ // Open the test cases directory.
+ dir, err := os.Open("test_cases")
+ if err != nil {
+ return nil, errors.New(fmt.Sprintf("Opening dir: %v", err))
+ }
+
+ // Get a list of the names in the directory.
+ names, err := dir.Readdirnames(0)
+ if err != nil {
+ return nil, errors.New(fmt.Sprintf("Readdirnames: %v", err))
+ }
+
+ // Filter the names.
+ result := make([]string, len(names))
+ resultLen := 0
+ for _, name := range names {
+ // Skip golden files and hidden files.
+ if strings.HasPrefix(name, "golden.") || strings.HasPrefix(name, ".") {
+ continue
+ }
+
+ // Check for the right format.
+ if !strings.HasSuffix(name, ".test.go") {
+ continue
+ }
+
+ // Store the name minus the extension.
+ result[resultLen] = name[:len(name)-8]
+ resultLen++
+ }
+
+ return result[:resultLen], nil
+}
+
+func writeContentsToFileOrDie(contents []byte, path string) {
+ if err := ioutil.WriteFile(path, contents, 0600); err != nil {
+ panic("ioutil.WriteFile: " + err.Error())
+ }
+}
+
+func readFileOrDie(path string) []byte {
+ contents, err := ioutil.ReadFile(path)
+ if err != nil {
+ panic("ioutil.ReadFile: " + err.Error())
+ }
+
+ return contents
+}
+
+// cleanOutput transforms the supplied output so that it no longer contains
+// information that changes from run to run, making the golden tests less
+// flaky.
+func cleanOutput(o []byte, testPkg string) []byte {
+ // Replace references to the last component of the test package name, which
+ // contains a unique number.
+ o = []byte(strings.Replace(string(o), path.Base(testPkg), "somepkg", -1))
+
+ // Replace things that look like line numbers and process counters in stack
+ // traces.
+ stackFrameRe := regexp.MustCompile(`\t\S+\.(c|go):\d+`)
+ o = stackFrameRe.ReplaceAll(o, []byte("\tsome_file.txt:0"))
+
+ // Replace full paths in failure messages with fake paths.
+ pathRe := regexp.MustCompile(`/\S+/(\w+\.(?:go|s):\d+)`)
+ o = pathRe.ReplaceAll(o, []byte("/some/path/$1"))
+
+ // Replace unstable timings in gotest fail messages.
+ timingRe1 := regexp.MustCompile(`--- FAIL: .* \(\d\.\d{2}s\)`)
+ o = timingRe1.ReplaceAll(o, []byte("--- FAIL: TestSomething (1.23s)"))
+
+ timingRe2 := regexp.MustCompile(`FAIL.*somepkg\s*\d\.\d{2,}s`)
+ o = timingRe2.ReplaceAll(o, []byte("FAIL somepkg 1.234s"))
+
+ timingRe3 := regexp.MustCompile(`ok.*somepkg\s*\d\.\d{2,}s`)
+ o = timingRe3.ReplaceAll(o, []byte("ok somepkg 1.234s"))
+
+ timingRe4 := regexp.MustCompile(`SlowTest \([0-9.]+ms\)`)
+ o = timingRe4.ReplaceAll(o, []byte("SlowTest (1234ms)"))
+
+ return o
+}
+
+// Create a temporary package directory somewhere that 'go test' can find, and
+// return the directory and package name.
+func createTempPackageDir(caseName string) (dir, pkg string) {
+ // Figure out where the local source code for ogletest is.
+ buildPkg, err := build.Import(ogletestPkg, "", build.FindOnly)
+ if err != nil {
+ panic("Finding ogletest tree: " + err.Error())
+ }
+
+ // Create a temporary directory underneath this.
+ ogletestPkgDir := buildPkg.Dir
+ prefix := fmt.Sprintf("tmp-%s-", caseName)
+
+ dir, err = ioutil.TempDir(ogletestPkgDir, prefix)
+ if err != nil {
+ panic("ioutil.TempDir: " + err.Error())
+ }
+
+ pkg = path.Join("github.com/smartystreets/assertions/internal/ogletest", dir[len(ogletestPkgDir):])
+ return
+}
+
+// runTestCase runs the case with the supplied name (e.g. "passing"), and
+// returns its output and exit code.
+func runTestCase(name string) ([]byte, int, error) {
+ // Create a temporary directory for the test files.
+ testDir, testPkg := createTempPackageDir(name)
+ defer os.RemoveAll(testDir)
+
+ // Create the test source file.
+ sourceFile := name + ".test.go"
+ testContents := readFileOrDie(path.Join("test_cases", sourceFile))
+ writeContentsToFileOrDie(testContents, path.Join(testDir, name+"_test.go"))
+
+ // Invoke 'go test'. Use the package directory as working dir instead of
+ // giving the package name as an argument so that 'go test' prints passing
+ // test output. Special case: pass a test filter to the filtered case.
+ cmd := exec.Command("go", "test")
+ if name == "filtered" {
+ cmd.Args = append(cmd.Args, "--ogletest.run=Test(Bar|Baz)")
+ }
+
+ cmd.Dir = testDir
+ output, err := cmd.CombinedOutput()
+
+ // Clean up the process's output.
+ output = cleanOutput(output, testPkg)
+
+ // Did the process exist with zero code?
+ if err == nil {
+ return output, 0, nil
+ }
+
+ // Make sure the process actually exited.
+ exitError, ok := err.(*exec.ExitError)
+ if !ok || !exitError.Exited() {
+ return nil, 0, errors.New("exec.Command.Output: " + err.Error())
+ }
+
+ return output, exitError.Sys().(syscall.WaitStatus).ExitStatus(), nil
+}
+
+// checkGolden file checks the supplied actual output for the named test case
+// against the golden file for that case. If requested by the user, it rewrites
+// the golden file on failure.
+func checkAgainstGoldenFile(caseName string, output []byte) bool {
+ goldenFile := path.Join("test_cases", "golden."+caseName+"_test")
+ goldenContents := readFileOrDie(goldenFile)
+
+ result := string(output) == string(goldenContents)
+ if !result && *dumpNew {
+ writeContentsToFileOrDie(output, goldenFile)
+ }
+
+ return result
+}
+
+////////////////////////////////////////////////////////////////////////
+// Tests
+////////////////////////////////////////////////////////////////////////
+
+func TestGoldenFiles(t *testing.T) {
+ // Ensure the local package is installed. This will prevent the test cases
+ // from using the installed version, which may be out of date.
+ err := installLocalPackages()
+ if err != nil {
+ t.Fatalf("Error installing local ogletest: %v", err)
+ }
+
+ // We expect there to be at least one case.
+ caseNames, err := getCaseNames()
+ if err != nil || len(caseNames) == 0 {
+ t.Fatalf("Error getting cases: %v", err)
+ }
+
+ // Run each test case.
+ for _, caseName := range caseNames {
+ // Run the test case.
+ output, exitCode, err := runTestCase(caseName)
+ if err != nil {
+ t.Fatalf("Running test case %s: %v", caseName, err)
+ }
+
+ // Check the status code. We assume all test cases fail except for the
+ // passing one.
+ shouldPass := (caseName == "passing" || caseName == "no_cases")
+ didPass := exitCode == 0
+ if shouldPass != didPass {
+ t.Errorf("Bad exit code for test case %s: %d", caseName, exitCode)
+ }
+
+ // Check the output against the golden file.
+ if !checkAgainstGoldenFile(caseName, output) {
+ t.Errorf("Output for test case %s doesn't match golden file.", caseName)
+ }
+ }
+}
diff --git a/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/ogletest/register.go b/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/ogletest/register.go
new file mode 100644
index 00000000000..756f2aa9ad9
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/ogletest/register.go
@@ -0,0 +1,86 @@
+// Copyright 2015 Aaron Jacobs. All Rights Reserved.
+// Author: aaronjjacobs@gmail.com (Aaron Jacobs)
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package ogletest
+
+// The input to ogletest.Register. Most users will want to use
+// ogletest.RegisterTestSuite.
+//
+// A test suite is the basic unit of registration in ogletest. It consists of
+// zero or more named test functions which will be run in sequence, along with
+// optional setup and tear-down functions.
+type TestSuite struct {
+ // The name of the overall suite, e.g. "MyPackageTest".
+ Name string
+
+ // If non-nil, a function that will be run exactly once, before any of the
+ // test functions are run.
+ SetUp func()
+
+ // The test functions comprising this suite.
+ TestFunctions []TestFunction
+
+ // If non-nil, a function that will be run exactly once, after all of the
+ // test functions have run.
+ TearDown func()
+}
+
+type TestFunction struct {
+ // The name of this test function, relative to the suite in which it resides.
+ // If the name is "TweaksFrobnicator", then the function might be presented
+ // in the ogletest UI as "FooTest.TweaksFrobnicator".
+ Name string
+
+ // If non-nil, a function that is run before Run, passed a pointer to a
+ // struct containing information about the test run.
+ SetUp func(*TestInfo)
+
+ // The function to invoke for the test body. Must be non-nil. Will not be run
+ // if SetUp panics.
+ Run func()
+
+ // If non-nil, a function that is run after Run.
+ TearDown func()
+}
+
+// Register a test suite for execution by RunTests.
+//
+// This is the most general registration mechanism. Most users will want
+// RegisterTestSuite, which is a wrapper around this function that requires
+// less boilerplate.
+//
+// Panics on invalid input.
+func Register(suite TestSuite) {
+ // Make sure the suite is legal.
+ if suite.Name == "" {
+ panic("Test suites must have names.")
+ }
+
+ for _, tf := range suite.TestFunctions {
+ if tf.Name == "" {
+ panic("Test functions must have names.")
+ }
+
+ if tf.Run == nil {
+ panic("Test functions must have non-nil run fields.")
+ }
+ }
+
+ // Save the suite for later.
+ registeredSuites = append(registeredSuites, suite)
+}
+
+// The list of test suites previously registered.
+var registeredSuites []TestSuite
diff --git a/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/ogletest/register_test_suite.go b/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/ogletest/register_test_suite.go
new file mode 100644
index 00000000000..7303dfa8a6e
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/ogletest/register_test_suite.go
@@ -0,0 +1,193 @@
+// Copyright 2011 Aaron Jacobs. All Rights Reserved.
+// Author: aaronjjacobs@gmail.com (Aaron Jacobs)
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package ogletest
+
+import (
+ "fmt"
+ "reflect"
+
+ "github.com/smartystreets/assertions/internal/ogletest/srcutil"
+)
+
+// Test suites that implement this interface have special meaning to
+// RegisterTestSuite.
+type SetUpTestSuiteInterface interface {
+ // This method will be called exactly once, before the first test method is
+ // run. The receiver of this method will be a zero value of the test suite
+ // type, and is not shared with any other methods. Use this method to set up
+ // any necessary global state shared by all of the test methods.
+ SetUpTestSuite()
+}
+
+// Test suites that implement this interface have special meaning to
+// RegisterTestSuite.
+type TearDownTestSuiteInterface interface {
+ // This method will be called exactly once, after the last test method is
+ // run. The receiver of this method will be a zero value of the test suite
+ // type, and is not shared with any other methods. Use this method to clean
+ // up after any necessary global state shared by all of the test methods.
+ TearDownTestSuite()
+}
+
+// Test suites that implement this interface have special meaning to
+// Register.
+type SetUpInterface interface {
+ // This method is called before each test method is invoked, with the same
+ // receiver as that test method. At the time this method is invoked, the
+ // receiver is a zero value for the test suite type. Use this method for
+ // common setup code that works on data not shared across tests.
+ SetUp(*TestInfo)
+}
+
+// Test suites that implement this interface have special meaning to
+// Register.
+type TearDownInterface interface {
+ // This method is called after each test method is invoked, with the same
+ // receiver as that test method. Use this method for common cleanup code that
+ // works on data not shared across tests.
+ TearDown()
+}
+
+// RegisterTestSuite tells ogletest about a test suite containing tests that it
+// should run. Any exported method on the type pointed to by the supplied
+// prototype value will be treated as test methods, with the exception of the
+// methods defined by the following interfaces, which when present are treated
+// as described in the documentation for those interfaces:
+//
+// * SetUpTestSuiteInterface
+// * SetUpInterface
+// * TearDownInterface
+// * TearDownTestSuiteInterface
+//
+// Each test method is invoked on a different receiver, which is initially a
+// zero value of the test suite type.
+//
+// Example:
+//
+// // Some value that is needed by the tests but is expensive to compute.
+// var someExpensiveThing uint
+//
+// type FooTest struct {
+// // Path to a temporary file used by the tests. Each test gets a
+// // different temporary file.
+// tempFile string
+// }
+// func init() { ogletest.RegisterTestSuite(&FooTest{}) }
+//
+// func (t *FooTest) SetUpTestSuite() {
+// someExpensiveThing = ComputeSomeExpensiveThing()
+// }
+//
+// func (t *FooTest) SetUp(ti *ogletest.TestInfo) {
+// t.tempFile = CreateTempFile()
+// }
+//
+// func (t *FooTest) TearDown() {
+// DeleteTempFile(t.tempFile)
+// }
+//
+// func (t *FooTest) FrobinicatorIsSuccessfullyTweaked() {
+// res := DoSomethingWithExpensiveThing(someExpensiveThing, t.tempFile)
+// ExpectThat(res, Equals(true))
+// }
+//
+func RegisterTestSuite(p interface{}) {
+ if p == nil {
+ panic("RegisterTestSuite called with nil suite.")
+ }
+
+ val := reflect.ValueOf(p)
+ typ := val.Type()
+ var zeroInstance reflect.Value
+
+ // We will transform to a TestSuite struct.
+ suite := TestSuite{}
+ suite.Name = typ.Elem().Name()
+
+ zeroInstance = reflect.New(typ.Elem())
+ if i, ok := zeroInstance.Interface().(SetUpTestSuiteInterface); ok {
+ suite.SetUp = func() { i.SetUpTestSuite() }
+ }
+
+ zeroInstance = reflect.New(typ.Elem())
+ if i, ok := zeroInstance.Interface().(TearDownTestSuiteInterface); ok {
+ suite.TearDown = func() { i.TearDownTestSuite() }
+ }
+
+ // Transform a list of test methods for the suite, filtering them to just the
+ // ones that we don't need to skip.
+ for _, method := range filterMethods(suite.Name, srcutil.GetMethodsInSourceOrder(typ)) {
+ var tf TestFunction
+ tf.Name = method.Name
+
+ // Create an instance to be operated on by all of the TestFunction's
+ // internal functions.
+ instance := reflect.New(typ.Elem())
+
+ // Bind the functions to the instance.
+ if i, ok := instance.Interface().(SetUpInterface); ok {
+ tf.SetUp = func(ti *TestInfo) { i.SetUp(ti) }
+ }
+
+ methodCopy := method
+ tf.Run = func() { runTestMethod(instance, methodCopy) }
+
+ if i, ok := instance.Interface().(TearDownInterface); ok {
+ tf.TearDown = func() { i.TearDown() }
+ }
+
+ // Save the TestFunction.
+ suite.TestFunctions = append(suite.TestFunctions, tf)
+ }
+
+ // Register the suite.
+ Register(suite)
+}
+
+func runTestMethod(suite reflect.Value, method reflect.Method) {
+ if method.Func.Type().NumIn() != 1 {
+ panic(fmt.Sprintf(
+ "%s: expected 1 args, actually %d.",
+ method.Name,
+ method.Func.Type().NumIn()))
+ }
+
+ method.Func.Call([]reflect.Value{suite})
+}
+
+func filterMethods(suiteName string, in []reflect.Method) (out []reflect.Method) {
+ for _, m := range in {
+ // Skip set up, tear down, and unexported methods.
+ if isSpecialMethod(m.Name) || !isExportedMethod(m.Name) {
+ continue
+ }
+
+ out = append(out, m)
+ }
+
+ return
+}
+
+func isSpecialMethod(name string) bool {
+ return (name == "SetUpTestSuite") ||
+ (name == "TearDownTestSuite") ||
+ (name == "SetUp") ||
+ (name == "TearDown")
+}
+
+func isExportedMethod(name string) bool {
+ return len(name) > 0 && name[0] >= 'A' && name[0] <= 'Z'
+}
diff --git a/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/ogletest/run_tests.go b/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/ogletest/run_tests.go
new file mode 100644
index 00000000000..003aeb019de
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/ogletest/run_tests.go
@@ -0,0 +1,354 @@
+// Copyright 2011 Aaron Jacobs. All Rights Reserved.
+// Author: aaronjjacobs@gmail.com (Aaron Jacobs)
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package ogletest
+
+import (
+ "bytes"
+ "flag"
+ "fmt"
+ "os"
+ "path"
+ "regexp"
+ "runtime"
+ "sync"
+ "sync/atomic"
+ "testing"
+ "time"
+
+ "github.com/smartystreets/assertions/internal/reqtrace"
+)
+
+var fTestFilter = flag.String(
+ "ogletest.run",
+ "",
+ "Regexp for matching tests to run.")
+
+var fStopEarly = flag.Bool(
+ "ogletest.stop_early",
+ false,
+ "If true, stop after the first failure.")
+
+// runTestsOnce protects RunTests from executing multiple times.
+var runTestsOnce sync.Once
+
+func isAbortError(x interface{}) bool {
+ _, ok := x.(abortError)
+ return ok
+}
+
+// Run a single test function, returning a slice of failure records.
+func runTestFunction(tf TestFunction) (failures []FailureRecord) {
+ // Set up a clean slate for this test. Make sure to reset it after everything
+ // below is finished, so we don't accidentally use it elsewhere.
+ currentlyRunningTest = newTestInfo()
+ defer func() {
+ currentlyRunningTest = nil
+ }()
+
+ ti := currentlyRunningTest
+
+ // Start a trace.
+ var reportOutcome reqtrace.ReportFunc
+ ti.Ctx, reportOutcome = reqtrace.Trace(ti.Ctx, tf.Name)
+
+ // Run the SetUp function, if any, paying attention to whether it panics.
+ setUpPanicked := false
+ if tf.SetUp != nil {
+ setUpPanicked = runWithProtection(func() { tf.SetUp(ti) })
+ }
+
+ // Run the test function itself, but only if the SetUp function didn't panic.
+ // (This includes AssertThat errors.)
+ if !setUpPanicked {
+ runWithProtection(tf.Run)
+ }
+
+ // Run the TearDown function, if any.
+ if tf.TearDown != nil {
+ runWithProtection(tf.TearDown)
+ }
+
+ // Tell the mock controller for the tests to report any errors it's sitting
+ // on.
+ ti.MockController.Finish()
+
+ // Report the outcome to reqtrace.
+ if len(ti.failureRecords) == 0 {
+ reportOutcome(nil)
+ } else {
+ reportOutcome(fmt.Errorf("%v failure records", len(ti.failureRecords)))
+ }
+
+ return ti.failureRecords
+}
+
+// Run everything registered with Register (including via the wrapper
+// RegisterTestSuite).
+//
+// Failures are communicated to the supplied testing.T object. This is the
+// bridge between ogletest and the testing package (and `go test`); you should
+// ensure that it's called at least once by creating a test function compatible
+// with `go test` and calling it there.
+//
+// For example:
+//
+// import (
+// "github.com/smartystreets/assertions/internal/ogletest"
+// "testing"
+// )
+//
+// func TestOgletest(t *testing.T) {
+// ogletest.RunTests(t)
+// }
+//
+func RunTests(t *testing.T) {
+ runTestsOnce.Do(func() { runTestsInternal(t) })
+}
+
+// Signalling between RunTests and StopRunningTests.
+var gStopRunning uint64
+
+// Request that RunTests stop what it's doing. After the currently running test
+// is finished, including tear-down, the program will exit with an error code.
+func StopRunningTests() {
+ atomic.StoreUint64(&gStopRunning, 1)
+}
+
+// runTestsInternal does the real work of RunTests, which simply wraps it in a
+// sync.Once.
+func runTestsInternal(t *testing.T) {
+ // Process each registered suite.
+ for _, suite := range registeredSuites {
+ // Stop now if we've already seen a failure and we've been told to stop
+ // early.
+ if t.Failed() && *fStopEarly {
+ break
+ }
+
+ // Print a banner.
+ fmt.Printf("[----------] Running tests from %s\n", suite.Name)
+
+ // Run the SetUp function, if any.
+ if suite.SetUp != nil {
+ suite.SetUp()
+ }
+
+ // Run each test function that the user has not told us to skip.
+ stoppedEarly := false
+ for _, tf := range filterTestFunctions(suite) {
+ // Did the user request that we stop running tests? If so, skip the rest
+ // of this suite (and exit after tearing it down).
+ if atomic.LoadUint64(&gStopRunning) != 0 {
+ stoppedEarly = true
+ break
+ }
+
+ // Print a banner for the start of this test function.
+ fmt.Printf("[ RUN ] %s.%s\n", suite.Name, tf.Name)
+
+ // Run the test function.
+ startTime := time.Now()
+ failures := runTestFunction(tf)
+ runDuration := time.Since(startTime)
+
+ // Print any failures, and mark the test as having failed if there are any.
+ for _, record := range failures {
+ t.Fail()
+ fmt.Printf(
+ "%s:%d:\n%s\n\n",
+ record.FileName,
+ record.LineNumber,
+ record.Error)
+ }
+
+ // Print a banner for the end of the test.
+ bannerMessage := "[ OK ]"
+ if len(failures) != 0 {
+ bannerMessage = "[ FAILED ]"
+ }
+
+ // Print a summary of the time taken, if long enough.
+ var timeMessage string
+ if runDuration >= 25*time.Millisecond {
+ timeMessage = fmt.Sprintf(" (%s)", runDuration.String())
+ }
+
+ fmt.Printf(
+ "%s %s.%s%s\n",
+ bannerMessage,
+ suite.Name,
+ tf.Name,
+ timeMessage)
+
+ // Stop running tests from this suite if we've been told to stop early
+ // and this test failed.
+ if t.Failed() && *fStopEarly {
+ break
+ }
+ }
+
+ // Run the suite's TearDown function, if any.
+ if suite.TearDown != nil {
+ suite.TearDown()
+ }
+
+ // Were we told to exit early?
+ if stoppedEarly {
+ fmt.Println("Exiting early due to user request.")
+ os.Exit(1)
+ }
+
+ fmt.Printf("[----------] Finished with tests from %s\n", suite.Name)
+ }
+}
+
+// Return true iff the supplied program counter appears to lie within panic().
+func isPanic(pc uintptr) bool {
+ f := runtime.FuncForPC(pc)
+ if f == nil {
+ return false
+ }
+
+ return f.Name() == "runtime.gopanic" || f.Name() == "runtime.sigpanic"
+}
+
+// Find the deepest stack frame containing something that appears to be a
+// panic. Return the 'skip' value that a caller to this function would need
+// to supply to runtime.Caller for that frame, or a negative number if not found.
+func findPanic() int {
+ localSkip := -1
+ for i := 0; ; i++ {
+ // Stop if we've passed the base of the stack.
+ pc, _, _, ok := runtime.Caller(i)
+ if !ok {
+ break
+ }
+
+ // Is this a panic?
+ if isPanic(pc) {
+ localSkip = i
+ }
+ }
+
+ return localSkip - 1
+}
+
+// Attempt to find the file base name and line number for the ultimate source
+// of a panic, on the panicking stack. Return a human-readable sentinel if
+// unsuccessful.
+func findPanicFileLine() (string, int) {
+ panicSkip := findPanic()
+ if panicSkip < 0 {
+ return "(unknown)", 0
+ }
+
+ // Find the trigger of the panic.
+ _, file, line, ok := runtime.Caller(panicSkip + 1)
+ if !ok {
+ return "(unknown)", 0
+ }
+
+ return path.Base(file), line
+}
+
+// Run the supplied function, catching panics (including AssertThat errors) and
+// reporting them to the currently-running test as appropriate. Return true iff
+// the function panicked.
+func runWithProtection(f func()) (panicked bool) {
+ defer func() {
+ // If the test didn't panic, we're done.
+ r := recover()
+ if r == nil {
+ return
+ }
+
+ panicked = true
+
+ // We modify the currently running test below.
+ currentlyRunningTest.mu.Lock()
+ defer currentlyRunningTest.mu.Unlock()
+
+ // If the function panicked (and the panic was not due to an AssertThat
+ // failure), add a failure for the panic.
+ if !isAbortError(r) {
+ var panicRecord FailureRecord
+ panicRecord.FileName, panicRecord.LineNumber = findPanicFileLine()
+ panicRecord.Error = fmt.Sprintf(
+ "panic: %v\n\n%s", r, formatPanicStack())
+
+ currentlyRunningTest.failureRecords = append(
+ currentlyRunningTest.failureRecords,
+ panicRecord)
+ }
+ }()
+
+ f()
+ return
+}
+
+func formatPanicStack() string {
+ buf := new(bytes.Buffer)
+
+ // Find the panic. If successful, we'll skip to below it. Otherwise, we'll
+ // format everything.
+ var initialSkip int
+ if panicSkip := findPanic(); panicSkip >= 0 {
+ initialSkip = panicSkip + 1
+ }
+
+ for i := initialSkip; ; i++ {
+ pc, file, line, ok := runtime.Caller(i)
+ if !ok {
+ break
+ }
+
+ // Choose a function name to display.
+ funcName := "(unknown)"
+ if f := runtime.FuncForPC(pc); f != nil {
+ funcName = f.Name()
+ }
+
+ // Stop if we've gotten as far as the test runner code.
+ if funcName == "github.com/smartystreets/assertions/internal/ogletest.runTestMethod" ||
+ funcName == "github.com/smartystreets/assertions/internal/ogletest.runWithProtection" {
+ break
+ }
+
+ // Add an entry for this frame.
+ fmt.Fprintf(buf, "%s\n\t%s:%d\n", funcName, file, line)
+ }
+
+ return buf.String()
+}
+
+// Filter test functions according to the user-supplied filter flag.
+func filterTestFunctions(suite TestSuite) (out []TestFunction) {
+ re, err := regexp.Compile(*fTestFilter)
+ if err != nil {
+ panic("Invalid value for --ogletest.run: " + err.Error())
+ }
+
+ for _, tf := range suite.TestFunctions {
+ fullName := fmt.Sprintf("%s.%s", suite.Name, tf.Name)
+ if !re.MatchString(fullName) {
+ continue
+ }
+
+ out = append(out, tf)
+ }
+
+ return
+}
diff --git a/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/ogletest/srcutil/docs.go b/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/ogletest/srcutil/docs.go
new file mode 100644
index 00000000000..d9b9bc8e5fe
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/ogletest/srcutil/docs.go
@@ -0,0 +1,5 @@
+// Copyright 2015 Aaron Jacobs. All Rights Reserved.
+// Author: aaronjjacobs@gmail.com (Aaron Jacobs)
+
+// Functions for working with source code.
+package srcutil
diff --git a/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/ogletest/srcutil/methods.go b/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/ogletest/srcutil/methods.go
new file mode 100644
index 00000000000..a8c5828ea3a
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/ogletest/srcutil/methods.go
@@ -0,0 +1,65 @@
+// Copyright 2012 Aaron Jacobs. All Rights Reserved.
+// Author: aaronjjacobs@gmail.com (Aaron Jacobs)
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package srcutil
+
+import (
+ "fmt"
+ "reflect"
+ "runtime"
+ "sort"
+)
+
+func getLine(m reflect.Method) int {
+ pc := m.Func.Pointer()
+
+ f := runtime.FuncForPC(pc)
+ if f == nil {
+ panic(fmt.Sprintf("Couldn't get runtime func for method (pc=%d): %v", pc, m))
+ }
+
+ _, line := f.FileLine(pc)
+ return line
+}
+
+type sortableMethodSet []reflect.Method
+
+func (s sortableMethodSet) Len() int {
+ return len(s)
+}
+
+func (s sortableMethodSet) Less(i, j int) bool {
+ return getLine(s[i]) < getLine(s[j])
+}
+
+func (s sortableMethodSet) Swap(i, j int) {
+ s[i], s[j] = s[j], s[i]
+}
+
+// Given a type t, return all of the methods of t sorted such that source file
+// order is preserved. Order across files is undefined. Order within lines is
+// undefined.
+func GetMethodsInSourceOrder(t reflect.Type) []reflect.Method {
+ // Build the list of methods.
+ methods := sortableMethodSet{}
+ for i := 0; i < t.NumMethod(); i++ {
+ methods = append(methods, t.Method(i))
+ }
+
+ // Sort it.
+ sort.Sort(methods)
+
+ return methods
+}
diff --git a/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/ogletest/srcutil/methods_test.go b/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/ogletest/srcutil/methods_test.go
new file mode 100644
index 00000000000..95c07fd4697
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/ogletest/srcutil/methods_test.go
@@ -0,0 +1,107 @@
+// Copyright 2012 Aaron Jacobs. All Rights Reserved.
+// Author: aaronjjacobs@gmail.com (Aaron Jacobs)
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package srcutil_test
+
+import (
+ "fmt"
+ "reflect"
+ "testing"
+
+ . "github.com/smartystreets/assertions/internal/oglematchers"
+ . "github.com/smartystreets/assertions/internal/ogletest"
+ "github.com/smartystreets/assertions/internal/ogletest/srcutil"
+)
+
+func TestRegisterMethodsTest(t *testing.T) { RunTests(t) }
+
+////////////////////////////////////////////////////////////////////////
+// Helpers
+////////////////////////////////////////////////////////////////////////
+
+type MethodsTest struct {
+}
+
+func init() { RegisterTestSuite(&MethodsTest{}) }
+
+type OneMethodType int
+
+func (x OneMethodType) Foo() {}
+
+type MultipleMethodsType int
+
+func (x MultipleMethodsType) Foo() {}
+func (x MultipleMethodsType) Bar() {}
+func (x MultipleMethodsType) Baz() {}
+
+type methodNameMatcher struct {
+ expected string
+}
+
+func (m *methodNameMatcher) Description() string {
+ return fmt.Sprintf("method named %s", m.expected)
+}
+
+func (m *methodNameMatcher) Matches(x interface{}) error {
+ method, ok := x.(reflect.Method)
+ if !ok {
+ panic("Invalid argument.")
+ }
+
+ if method.Name != m.expected {
+ return fmt.Errorf("whose name is %s", method.Name)
+ }
+
+ return nil
+}
+
+func NameIs(name string) Matcher {
+ return &methodNameMatcher{name}
+}
+
+////////////////////////////////////////////////////////////////////////
+// Tests
+////////////////////////////////////////////////////////////////////////
+
+func (t *MethodsTest) NoMethods() {
+ type foo int
+
+ methods := srcutil.GetMethodsInSourceOrder(reflect.TypeOf(foo(17)))
+ ExpectThat(methods, ElementsAre())
+}
+
+func (t *MethodsTest) OneMethod() {
+ methods := srcutil.GetMethodsInSourceOrder(reflect.TypeOf(OneMethodType(17)))
+ ExpectThat(
+ methods,
+ ElementsAre(
+ NameIs("Foo"),
+ ))
+}
+
+func (t *MethodsTest) MultipleMethods() {
+ methods := srcutil.GetMethodsInSourceOrder(reflect.TypeOf(MultipleMethodsType(17)))
+ ExpectThat(
+ methods,
+ ElementsAre(
+ NameIs("Foo"),
+ NameIs("Bar"),
+ NameIs("Baz"),
+ ))
+
+ ExpectEq("Foo", methods[0].Name)
+ ExpectEq("Bar", methods[1].Name)
+ ExpectEq("Baz", methods[2].Name)
+}
diff --git a/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/ogletest/test_cases/failing.test.go b/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/ogletest/test_cases/failing.test.go
new file mode 100644
index 00000000000..17c50e19487
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/ogletest/test_cases/failing.test.go
@@ -0,0 +1,252 @@
+// Copyright 2011 Aaron Jacobs. All Rights Reserved.
+// Author: aaronjjacobs@gmail.com (Aaron Jacobs)
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package oglematchers_test
+
+import (
+ "fmt"
+ "testing"
+
+ . "github.com/smartystreets/assertions/internal/oglematchers"
+ . "github.com/smartystreets/assertions/internal/ogletest"
+)
+
+func TestFailingTest(t *testing.T) { RunTests(t) }
+
+////////////////////////////////////////////////////////////////////////
+// Usual failures
+////////////////////////////////////////////////////////////////////////
+
+type FailingTest struct {
+}
+
+var _ TearDownInterface = &FailingTest{}
+var _ TearDownTestSuiteInterface = &FailingTest{}
+
+func init() { RegisterTestSuite(&FailingTest{}) }
+
+func (t *FailingTest) TearDown() {
+ fmt.Println("TearDown running.")
+}
+
+func (t *FailingTest) TearDownTestSuite() {
+ fmt.Println("TearDownTestSuite running.")
+}
+
+func (t *FailingTest) PassingMethod() {
+}
+
+func (t *FailingTest) Equals() {
+ ExpectThat(17, Equals(17.5))
+ ExpectThat(17, Equals("taco"))
+}
+
+func (t *FailingTest) LessThan() {
+ ExpectThat(18, LessThan(17))
+ ExpectThat(18, LessThan("taco"))
+}
+
+func (t *FailingTest) HasSubstr() {
+ ExpectThat("taco", HasSubstr("ac"))
+ ExpectThat(17, HasSubstr("ac"))
+}
+
+func (t *FailingTest) ExpectWithUserErrorMessages() {
+ ExpectThat(17, Equals(19), "foo bar: %d", 112)
+ ExpectEq(17, 17.5, "foo bar: %d", 112)
+ ExpectLe(17, 16.9, "foo bar: %d", 112)
+ ExpectLt(17, 16.9, "foo bar: %d", 112)
+ ExpectGe(17, 17.1, "foo bar: %d", 112)
+ ExpectGt(17, "taco", "foo bar: %d", 112)
+ ExpectNe(17, 17.0, "foo bar: %d", 112)
+ ExpectFalse(true, "foo bar: %d", 112)
+ ExpectTrue(false, "foo bar: %d", 112)
+}
+
+func (t *FailingTest) AssertWithUserErrorMessages() {
+ AssertThat(17, Equals(19), "foo bar: %d", 112)
+}
+
+func (t *FailingTest) ExpectationAliases() {
+ ExpectEq(17, 17.5)
+ ExpectEq("taco", 17.5)
+
+ ExpectLe(17, 16.9)
+ ExpectLt(17, 16.9)
+ ExpectLt(17, "taco")
+
+ ExpectGe(17, 17.1)
+ ExpectGt(17, 17.1)
+ ExpectGt(17, "taco")
+
+ ExpectNe(17, 17.0)
+ ExpectNe(17, "taco")
+
+ ExpectFalse(true)
+ ExpectFalse("taco")
+
+ ExpectTrue(false)
+ ExpectTrue("taco")
+}
+
+func (t *FailingTest) AssertThatFailure() {
+ AssertThat(17, Equals(19))
+ panic("Shouldn't get here.")
+}
+
+func (t *FailingTest) AssertEqFailure() {
+ AssertEq(19, 17)
+ panic("Shouldn't get here.")
+}
+
+func (t *FailingTest) AssertNeFailure() {
+ AssertNe(19, 19)
+ panic("Shouldn't get here.")
+}
+
+func (t *FailingTest) AssertLeFailure() {
+ AssertLe(19, 17)
+ panic("Shouldn't get here.")
+}
+
+func (t *FailingTest) AssertLtFailure() {
+ AssertLt(19, 17)
+ panic("Shouldn't get here.")
+}
+
+func (t *FailingTest) AssertGeFailure() {
+ AssertGe(17, 19)
+ panic("Shouldn't get here.")
+}
+
+func (t *FailingTest) AssertGtFailure() {
+ AssertGt(17, 19)
+ panic("Shouldn't get here.")
+}
+
+func (t *FailingTest) AssertTrueFailure() {
+ AssertTrue("taco")
+ panic("Shouldn't get here.")
+}
+
+func (t *FailingTest) AssertFalseFailure() {
+ AssertFalse("taco")
+ panic("Shouldn't get here.")
+}
+
+func (t *FailingTest) AddFailureRecord() {
+ r := FailureRecord{
+ FileName: "foo.go",
+ LineNumber: 17,
+ Error: "taco\nburrito",
+ }
+
+ AddFailureRecord(r)
+}
+
+func (t *FailingTest) AddFailure() {
+ AddFailure("taco")
+ AddFailure("burrito: %d", 17)
+}
+
+func (t *FailingTest) AddFailureThenAbortTest() {
+ AddFailure("enchilada")
+ AbortTest()
+ fmt.Println("Shouldn't get here.")
+}
+
+////////////////////////////////////////////////////////////////////////
+// Expectation failure during SetUp
+////////////////////////////////////////////////////////////////////////
+
+type ExpectFailDuringSetUpTest struct {
+}
+
+func init() { RegisterTestSuite(&ExpectFailDuringSetUpTest{}) }
+
+func (t *ExpectFailDuringSetUpTest) SetUp(i *TestInfo) {
+ ExpectFalse(true)
+}
+
+func (t *ExpectFailDuringSetUpTest) TearDown() {
+ fmt.Println("TearDown running.")
+}
+
+func (t *ExpectFailDuringSetUpTest) PassingMethod() {
+ fmt.Println("Method running.")
+}
+
+////////////////////////////////////////////////////////////////////////
+// Assertion failure during SetUp
+////////////////////////////////////////////////////////////////////////
+
+type AssertFailDuringSetUpTest struct {
+}
+
+func init() { RegisterTestSuite(&AssertFailDuringSetUpTest{}) }
+
+func (t *AssertFailDuringSetUpTest) SetUp(i *TestInfo) {
+ AssertFalse(true)
+}
+
+func (t *AssertFailDuringSetUpTest) TearDown() {
+ fmt.Println("TearDown running.")
+}
+
+func (t *AssertFailDuringSetUpTest) PassingMethod() {
+ fmt.Println("Method running.")
+}
+
+////////////////////////////////////////////////////////////////////////
+// Expectation failure during TearDown
+////////////////////////////////////////////////////////////////////////
+
+type ExpectFailDuringTearDownTest struct {
+}
+
+func init() { RegisterTestSuite(&ExpectFailDuringTearDownTest{}) }
+
+func (t *ExpectFailDuringTearDownTest) SetUp(i *TestInfo) {
+ fmt.Println("SetUp running.")
+}
+
+func (t *ExpectFailDuringTearDownTest) TearDown() {
+ ExpectFalse(true)
+}
+
+func (t *ExpectFailDuringTearDownTest) PassingMethod() {
+ fmt.Println("Method running.")
+}
+
+////////////////////////////////////////////////////////////////////////
+// Assertion failure during TearDown
+////////////////////////////////////////////////////////////////////////
+
+type AssertFailDuringTearDownTest struct {
+}
+
+func init() { RegisterTestSuite(&AssertFailDuringTearDownTest{}) }
+
+func (t *AssertFailDuringTearDownTest) SetUp(i *TestInfo) {
+ fmt.Println("SetUp running.")
+}
+
+func (t *AssertFailDuringTearDownTest) TearDown() {
+ AssertFalse(true)
+}
+
+func (t *AssertFailDuringTearDownTest) PassingMethod() {
+ fmt.Println("Method running.")
+}
diff --git a/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/ogletest/test_cases/filtered.test.go b/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/ogletest/test_cases/filtered.test.go
new file mode 100644
index 00000000000..e559c5f926c
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/ogletest/test_cases/filtered.test.go
@@ -0,0 +1,79 @@
+// Copyright 2011 Aaron Jacobs. All Rights Reserved.
+// Author: aaronjjacobs@gmail.com (Aaron Jacobs)
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package oglematchers_test
+
+import (
+ "fmt"
+ . "github.com/smartystreets/assertions/internal/oglematchers"
+ . "github.com/smartystreets/assertions/internal/ogletest"
+ "testing"
+)
+
+func TestFiltered(t *testing.T) { RunTests(t) }
+
+////////////////////////////////////////////////////////////////////////
+// Partially filtered out
+////////////////////////////////////////////////////////////////////////
+
+type PartiallyFilteredTest struct {
+}
+
+func init() { RegisterTestSuite(&PartiallyFilteredTest{}) }
+
+func (t *PartiallyFilteredTest) PassingTestFoo() {
+ ExpectThat(19, Equals(19))
+}
+
+func (t *PartiallyFilteredTest) PassingTestBar() {
+ ExpectThat(17, Equals(17))
+}
+
+func (t *PartiallyFilteredTest) PartiallyFilteredTestFoo() {
+ ExpectThat(18, LessThan(17))
+}
+
+func (t *PartiallyFilteredTest) PartiallyFilteredTestBar() {
+ ExpectThat("taco", HasSubstr("blah"))
+}
+
+func (t *PartiallyFilteredTest) PartiallyFilteredTestBaz() {
+ ExpectThat(18, LessThan(17))
+}
+
+////////////////////////////////////////////////////////////////////////
+// Completely filtered out
+////////////////////////////////////////////////////////////////////////
+
+type CompletelyFilteredTest struct {
+}
+
+func init() { RegisterTestSuite(&CompletelyFilteredTest{}) }
+
+func (t *CompletelyFilteredTest) SetUpTestSuite() {
+ fmt.Println("SetUpTestSuite run!")
+}
+
+func (t *CompletelyFilteredTest) TearDownTestSuite() {
+ fmt.Println("TearDownTestSuite run!")
+}
+
+func (t *PartiallyFilteredTest) SomePassingTest() {
+ ExpectThat(19, Equals(19))
+}
+
+func (t *PartiallyFilteredTest) SomeFailingTest() {
+ ExpectThat(19, Equals(17))
+}
diff --git a/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/ogletest/test_cases/golden.failing_test b/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/ogletest/test_cases/golden.failing_test
new file mode 100644
index 00000000000..f0cd76af500
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/ogletest/test_cases/golden.failing_test
@@ -0,0 +1,278 @@
+[----------] Running tests from FailingTest
+[ RUN ] FailingTest.PassingMethod
+TearDown running.
+[ OK ] FailingTest.PassingMethod
+[ RUN ] FailingTest.Equals
+TearDown running.
+failing_test.go:52:
+Expected: 17.5
+Actual: 17
+
+failing_test.go:53:
+Expected: taco
+Actual: 17, which is not a string
+
+[ FAILED ] FailingTest.Equals
+[ RUN ] FailingTest.LessThan
+TearDown running.
+failing_test.go:57:
+Expected: less than 17
+Actual: 18
+
+failing_test.go:58:
+Expected: less than "taco"
+Actual: 18, which is not comparable
+
+[ FAILED ] FailingTest.LessThan
+[ RUN ] FailingTest.HasSubstr
+TearDown running.
+failing_test.go:63:
+Expected: has substring "ac"
+Actual: 17, which is not a string
+
+[ FAILED ] FailingTest.HasSubstr
+[ RUN ] FailingTest.ExpectWithUserErrorMessages
+TearDown running.
+failing_test.go:67:
+Expected: 19
+Actual: 17
+foo bar: 112
+
+failing_test.go:68:
+Expected: 17
+Actual: 17.5
+foo bar: 112
+
+failing_test.go:69:
+Expected: less than or equal to 16.9
+Actual: 17
+foo bar: 112
+
+failing_test.go:70:
+Expected: less than 16.9
+Actual: 17
+foo bar: 112
+
+failing_test.go:71:
+Expected: greater than or equal to 17.1
+Actual: 17
+foo bar: 112
+
+failing_test.go:72:
+Expected: greater than "taco"
+Actual: 17, which is not comparable
+foo bar: 112
+
+failing_test.go:73:
+Expected: not(17)
+Actual: 17
+foo bar: 112
+
+failing_test.go:74:
+Expected: false
+Actual: true
+foo bar: 112
+
+failing_test.go:75:
+Expected: true
+Actual: false
+foo bar: 112
+
+[ FAILED ] FailingTest.ExpectWithUserErrorMessages
+[ RUN ] FailingTest.AssertWithUserErrorMessages
+TearDown running.
+failing_test.go:79:
+Expected: 19
+Actual: 17
+foo bar: 112
+
+[ FAILED ] FailingTest.AssertWithUserErrorMessages
+[ RUN ] FailingTest.ExpectationAliases
+TearDown running.
+failing_test.go:83:
+Expected: 17
+Actual: 17.5
+
+failing_test.go:84:
+Expected: taco
+Actual: 17.5, which is not a string
+
+failing_test.go:86:
+Expected: less than or equal to 16.9
+Actual: 17
+
+failing_test.go:87:
+Expected: less than 16.9
+Actual: 17
+
+failing_test.go:88:
+Expected: less than "taco"
+Actual: 17, which is not comparable
+
+failing_test.go:90:
+Expected: greater than or equal to 17.1
+Actual: 17
+
+failing_test.go:91:
+Expected: greater than 17.1
+Actual: 17
+
+failing_test.go:92:
+Expected: greater than "taco"
+Actual: 17, which is not comparable
+
+failing_test.go:94:
+Expected: not(17)
+Actual: 17
+
+failing_test.go:95:
+Expected: not(17)
+Actual: taco, which is not numeric
+
+failing_test.go:97:
+Expected: false
+Actual: true
+
+failing_test.go:98:
+Expected: false
+Actual: taco, which is not a bool
+
+failing_test.go:100:
+Expected: true
+Actual: false
+
+failing_test.go:101:
+Expected: true
+Actual: taco, which is not a bool
+
+[ FAILED ] FailingTest.ExpectationAliases
+[ RUN ] FailingTest.AssertThatFailure
+TearDown running.
+failing_test.go:105:
+Expected: 19
+Actual: 17
+
+[ FAILED ] FailingTest.AssertThatFailure
+[ RUN ] FailingTest.AssertEqFailure
+TearDown running.
+failing_test.go:110:
+Expected: 19
+Actual: 17
+
+[ FAILED ] FailingTest.AssertEqFailure
+[ RUN ] FailingTest.AssertNeFailure
+TearDown running.
+failing_test.go:115:
+Expected: not(19)
+Actual: 19
+
+[ FAILED ] FailingTest.AssertNeFailure
+[ RUN ] FailingTest.AssertLeFailure
+TearDown running.
+failing_test.go:120:
+Expected: less than or equal to 17
+Actual: 19
+
+[ FAILED ] FailingTest.AssertLeFailure
+[ RUN ] FailingTest.AssertLtFailure
+TearDown running.
+failing_test.go:125:
+Expected: less than 17
+Actual: 19
+
+[ FAILED ] FailingTest.AssertLtFailure
+[ RUN ] FailingTest.AssertGeFailure
+TearDown running.
+failing_test.go:130:
+Expected: greater than or equal to 19
+Actual: 17
+
+[ FAILED ] FailingTest.AssertGeFailure
+[ RUN ] FailingTest.AssertGtFailure
+TearDown running.
+failing_test.go:135:
+Expected: greater than 19
+Actual: 17
+
+[ FAILED ] FailingTest.AssertGtFailure
+[ RUN ] FailingTest.AssertTrueFailure
+TearDown running.
+failing_test.go:140:
+Expected: true
+Actual: taco, which is not a bool
+
+[ FAILED ] FailingTest.AssertTrueFailure
+[ RUN ] FailingTest.AssertFalseFailure
+TearDown running.
+failing_test.go:145:
+Expected: false
+Actual: taco, which is not a bool
+
+[ FAILED ] FailingTest.AssertFalseFailure
+[ RUN ] FailingTest.AddFailureRecord
+TearDown running.
+foo.go:17:
+taco
+burrito
+
+[ FAILED ] FailingTest.AddFailureRecord
+[ RUN ] FailingTest.AddFailure
+TearDown running.
+failing_test.go:160:
+taco
+
+failing_test.go:161:
+burrito: 17
+
+[ FAILED ] FailingTest.AddFailure
+[ RUN ] FailingTest.AddFailureThenAbortTest
+TearDown running.
+failing_test.go:165:
+enchilada
+
+[ FAILED ] FailingTest.AddFailureThenAbortTest
+TearDownTestSuite running.
+[----------] Finished with tests from FailingTest
+[----------] Running tests from ExpectFailDuringSetUpTest
+[ RUN ] ExpectFailDuringSetUpTest.PassingMethod
+Method running.
+TearDown running.
+failing_test.go:180:
+Expected: false
+Actual: true
+
+[ FAILED ] ExpectFailDuringSetUpTest.PassingMethod
+[----------] Finished with tests from ExpectFailDuringSetUpTest
+[----------] Running tests from AssertFailDuringSetUpTest
+[ RUN ] AssertFailDuringSetUpTest.PassingMethod
+TearDown running.
+failing_test.go:201:
+Expected: false
+Actual: true
+
+[ FAILED ] AssertFailDuringSetUpTest.PassingMethod
+[----------] Finished with tests from AssertFailDuringSetUpTest
+[----------] Running tests from ExpectFailDuringTearDownTest
+[ RUN ] ExpectFailDuringTearDownTest.PassingMethod
+SetUp running.
+Method running.
+failing_test.go:226:
+Expected: false
+Actual: true
+
+[ FAILED ] ExpectFailDuringTearDownTest.PassingMethod
+[----------] Finished with tests from ExpectFailDuringTearDownTest
+[----------] Running tests from AssertFailDuringTearDownTest
+[ RUN ] AssertFailDuringTearDownTest.PassingMethod
+SetUp running.
+Method running.
+failing_test.go:247:
+Expected: false
+Actual: true
+
+[ FAILED ] AssertFailDuringTearDownTest.PassingMethod
+[----------] Finished with tests from AssertFailDuringTearDownTest
+--- FAIL: TestSomething (1.23s)
+FAIL
+exit status 1
+FAIL somepkg 1.234s
diff --git a/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/ogletest/test_cases/golden.filtered_test b/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/ogletest/test_cases/golden.filtered_test
new file mode 100644
index 00000000000..39fa697140c
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/ogletest/test_cases/golden.filtered_test
@@ -0,0 +1,24 @@
+[----------] Running tests from PartiallyFilteredTest
+[ RUN ] PartiallyFilteredTest.PassingTestBar
+[ OK ] PartiallyFilteredTest.PassingTestBar
+[ RUN ] PartiallyFilteredTest.PartiallyFilteredTestBar
+filtered_test.go:49:
+Expected: has substring "blah"
+Actual: taco
+
+[ FAILED ] PartiallyFilteredTest.PartiallyFilteredTestBar
+[ RUN ] PartiallyFilteredTest.PartiallyFilteredTestBaz
+filtered_test.go:53:
+Expected: less than 17
+Actual: 18
+
+[ FAILED ] PartiallyFilteredTest.PartiallyFilteredTestBaz
+[----------] Finished with tests from PartiallyFilteredTest
+[----------] Running tests from CompletelyFilteredTest
+SetUpTestSuite run!
+TearDownTestSuite run!
+[----------] Finished with tests from CompletelyFilteredTest
+--- FAIL: TestSomething (1.23s)
+FAIL
+exit status 1
+FAIL somepkg 1.234s
diff --git a/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/ogletest/test_cases/golden.mock_test b/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/ogletest/test_cases/golden.mock_test
new file mode 100644
index 00000000000..4ca29791de7
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/ogletest/test_cases/golden.mock_test
@@ -0,0 +1,25 @@
+[----------] Running tests from MockTest
+[ RUN ] MockTest.ExpectationSatisfied
+[ OK ] MockTest.ExpectationSatisfied
+[ RUN ] MockTest.MockExpectationNotSatisfied
+/some/path/mock_test.go:56:
+Unsatisfied expectation; expected At to be called at least 1 times; called 0 times.
+
+[ FAILED ] MockTest.MockExpectationNotSatisfied
+[ RUN ] MockTest.ExpectCallForUnknownMethod
+/some/path/mock_test.go:61:
+Unknown method: FooBar
+
+[ FAILED ] MockTest.ExpectCallForUnknownMethod
+[ RUN ] MockTest.UnexpectedCall
+/some/path/mock_test.go:65:
+Unexpected call to At with args: [11 23]
+
+[ FAILED ] MockTest.UnexpectedCall
+[ RUN ] MockTest.InvokeFunction
+[ OK ] MockTest.InvokeFunction
+[----------] Finished with tests from MockTest
+--- FAIL: TestSomething (1.23s)
+FAIL
+exit status 1
+FAIL somepkg 1.234s
diff --git a/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/ogletest/test_cases/golden.no_cases_test b/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/ogletest/test_cases/golden.no_cases_test
new file mode 100644
index 00000000000..8631385581d
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/ogletest/test_cases/golden.no_cases_test
@@ -0,0 +1,6 @@
+[----------] Running tests from NoCasesTest
+SetUpTestSuite run!
+TearDownTestSuite run!
+[----------] Finished with tests from NoCasesTest
+PASS
+ok somepkg 1.234s
diff --git a/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/ogletest/test_cases/golden.panicking_test b/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/ogletest/test_cases/golden.panicking_test
new file mode 100644
index 00000000000..32eac65f7ba
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/ogletest/test_cases/golden.panicking_test
@@ -0,0 +1,90 @@
+[----------] Running tests from PanickingTest
+[ RUN ] PanickingTest.ExplicitPanic
+TearDown running.
+panicking_test.go:47:
+panic: Panic in ExplicitPanic
+
+github.com/smartystreets/assertions/internal/ogletest/somepkg_test.(*PanickingTest).ExplicitPanic
+ some_file.txt:0
+runtime.call16
+ /some/path/asm_amd64.s:401
+reflect.Value.call
+ some_file.txt:0
+reflect.Value.Call
+ some_file.txt:0
+
+
+[ FAILED ] PanickingTest.ExplicitPanic
+[ RUN ] PanickingTest.ExplicitPanicInHelperFunction
+TearDown running.
+panicking_test.go:34:
+panic: Panic in someFuncThatPanics
+
+github.com/smartystreets/assertions/internal/ogletest/somepkg_test.someFuncThatPanics
+ some_file.txt:0
+github.com/smartystreets/assertions/internal/ogletest/somepkg_test.(*PanickingTest).ExplicitPanicInHelperFunction
+ some_file.txt:0
+runtime.call16
+ /some/path/asm_amd64.s:401
+reflect.Value.call
+ some_file.txt:0
+reflect.Value.Call
+ some_file.txt:0
+
+
+[ FAILED ] PanickingTest.ExplicitPanicInHelperFunction
+[ RUN ] PanickingTest.NilPointerDerefence
+TearDown running.
+panicking_test.go:56:
+panic: runtime error: invalid memory address or nil pointer dereference
+
+github.com/smartystreets/assertions/internal/ogletest/somepkg_test.(*PanickingTest).NilPointerDerefence
+ some_file.txt:0
+runtime.call16
+ /some/path/asm_amd64.s:401
+reflect.Value.call
+ some_file.txt:0
+reflect.Value.Call
+ some_file.txt:0
+
+
+[ FAILED ] PanickingTest.NilPointerDerefence
+[ RUN ] PanickingTest.ZzzSomeOtherTest
+TearDown running.
+[ OK ] PanickingTest.ZzzSomeOtherTest
+[----------] Finished with tests from PanickingTest
+[----------] Running tests from SetUpPanicTest
+[ RUN ] SetUpPanicTest.SomeTestCase
+SetUp about to panic.
+TearDown running.
+panicking_test.go:74:
+panic: Panic in SetUp
+
+github.com/smartystreets/assertions/internal/ogletest/somepkg_test.(*SetUpPanicTest).SetUp
+ some_file.txt:0
+github.com/smartystreets/assertions/internal/ogletest.func·003
+ some_file.txt:0
+github.com/smartystreets/assertions/internal/ogletest.func·007
+ some_file.txt:0
+
+
+[ FAILED ] SetUpPanicTest.SomeTestCase
+[----------] Finished with tests from SetUpPanicTest
+[----------] Running tests from TearDownPanicTest
+[ RUN ] TearDownPanicTest.SomeTestCase
+TearDown about to panic.
+panicking_test.go:95:
+panic: Panic in TearDown
+
+github.com/smartystreets/assertions/internal/ogletest/somepkg_test.(*TearDownPanicTest).TearDown
+ some_file.txt:0
+github.com/smartystreets/assertions/internal/ogletest.func·005
+ some_file.txt:0
+
+
+[ FAILED ] TearDownPanicTest.SomeTestCase
+[----------] Finished with tests from TearDownPanicTest
+--- FAIL: TestSomething (1.23s)
+FAIL
+exit status 1
+FAIL somepkg 1.234s
diff --git a/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/ogletest/test_cases/golden.passing_test b/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/ogletest/test_cases/golden.passing_test
new file mode 100644
index 00000000000..031128842ac
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/ogletest/test_cases/golden.passing_test
@@ -0,0 +1,22 @@
+[----------] Running tests from PassingTest
+[ RUN ] PassingTest.EmptyTestMethod
+[ OK ] PassingTest.EmptyTestMethod
+[ RUN ] PassingTest.SuccessfullMatches
+[ OK ] PassingTest.SuccessfullMatches
+[ RUN ] PassingTest.ExpectAliases
+[ OK ] PassingTest.ExpectAliases
+[ RUN ] PassingTest.AssertAliases
+[ OK ] PassingTest.AssertAliases
+[ RUN ] PassingTest.SlowTest
+[ OK ] PassingTest.SlowTest (1234ms)
+[----------] Finished with tests from PassingTest
+[----------] Running tests from PassingTestWithHelpers
+SetUpTestSuite ran.
+[ RUN ] PassingTestWithHelpers.EmptyTestMethod
+SetUp ran.
+TearDown ran.
+[ OK ] PassingTestWithHelpers.EmptyTestMethod
+TearDownTestSuite ran.
+[----------] Finished with tests from PassingTestWithHelpers
+PASS
+ok somepkg 1.234s
diff --git a/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/ogletest/test_cases/golden.run_twice_test b/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/ogletest/test_cases/golden.run_twice_test
new file mode 100644
index 00000000000..0749f916454
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/ogletest/test_cases/golden.run_twice_test
@@ -0,0 +1,14 @@
+[----------] Running tests from RunTwiceTest
+[ RUN ] RunTwiceTest.PassingMethod
+[ OK ] RunTwiceTest.PassingMethod
+[ RUN ] RunTwiceTest.FailingMethod
+run_twice_test.go:46:
+Expected: 17.5
+Actual: 17
+
+[ FAILED ] RunTwiceTest.FailingMethod
+[----------] Finished with tests from RunTwiceTest
+--- FAIL: TestSomething (1.23s)
+FAIL
+exit status 1
+FAIL somepkg 1.234s
diff --git a/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/ogletest/test_cases/golden.stop_test b/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/ogletest/test_cases/golden.stop_test
new file mode 100644
index 00000000000..e7d42c79337
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/ogletest/test_cases/golden.stop_test
@@ -0,0 +1,13 @@
+[----------] Running tests from StopTest
+[ RUN ] StopTest.First
+TearDown running.
+[ OK ] StopTest.First
+[ RUN ] StopTest.Second
+About to call StopRunningTests.
+Called StopRunningTests.
+TearDown running.
+[ OK ] StopTest.Second
+TearDownTestSuite running.
+Exiting early due to user request.
+exit status 1
+FAIL somepkg 1.234s
diff --git a/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/ogletest/test_cases/golden.unexported_test b/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/ogletest/test_cases/golden.unexported_test
new file mode 100644
index 00000000000..6221e65ea9a
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/ogletest/test_cases/golden.unexported_test
@@ -0,0 +1,12 @@
+[----------] Running tests from UnexportedTest
+[ RUN ] UnexportedTest.SomeTest
+unexported_test.go:42:
+Expected: 4
+Actual: 3
+
+[ FAILED ] UnexportedTest.SomeTest
+[----------] Finished with tests from UnexportedTest
+--- FAIL: TestSomething (1.23s)
+FAIL
+exit status 1
+FAIL somepkg 1.234s
diff --git a/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/ogletest/test_cases/mock.test.go b/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/ogletest/test_cases/mock.test.go
new file mode 100644
index 00000000000..8e0fca9cc01
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/ogletest/test_cases/mock.test.go
@@ -0,0 +1,82 @@
+// Copyright 2011 Aaron Jacobs. All Rights Reserved.
+// Author: aaronjjacobs@gmail.com (Aaron Jacobs)
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package oglematchers_test
+
+import (
+ . "github.com/smartystreets/assertions/internal/oglematchers"
+ "github.com/smartystreets/assertions/internal/oglemock"
+ . "github.com/smartystreets/assertions/internal/ogletest"
+ "github.com/smartystreets/assertions/internal/ogletest/test_cases/mock_image"
+ "image/color"
+ "testing"
+)
+
+////////////////////////////////////////////////////////////////////////
+// Helpers
+////////////////////////////////////////////////////////////////////////
+
+type MockTest struct {
+ controller oglemock.Controller
+ image mock_image.MockImage
+}
+
+func init() { RegisterTestSuite(&MockTest{}) }
+func TestMockTest(t *testing.T) { RunTests(t) }
+
+func (t *MockTest) SetUp(i *TestInfo) {
+ t.controller = i.MockController
+ t.image = mock_image.NewMockImage(t.controller, "some mock image")
+}
+
+////////////////////////////////////////////////////////////////////////
+// Tests
+////////////////////////////////////////////////////////////////////////
+
+func (t *MockTest) ExpectationSatisfied() {
+ ExpectCall(t.image, "At")(11, GreaterThan(19)).
+ WillOnce(oglemock.Return(color.Gray{0}))
+
+ ExpectThat(t.image.At(11, 23), IdenticalTo(color.Gray{0}))
+}
+
+func (t *MockTest) MockExpectationNotSatisfied() {
+ ExpectCall(t.image, "At")(11, GreaterThan(19)).
+ WillOnce(oglemock.Return(color.Gray{0}))
+}
+
+func (t *MockTest) ExpectCallForUnknownMethod() {
+ ExpectCall(t.image, "FooBar")(11)
+}
+
+func (t *MockTest) UnexpectedCall() {
+ t.image.At(11, 23)
+}
+
+func (t *MockTest) InvokeFunction() {
+ var suppliedX, suppliedY int
+ f := func(x, y int) color.Color {
+ suppliedX = x
+ suppliedY = y
+ return color.Gray{17}
+ }
+
+ ExpectCall(t.image, "At")(Any(), Any()).
+ WillOnce(oglemock.Invoke(f))
+
+ ExpectThat(t.image.At(-1, 12), IdenticalTo(color.Gray{17}))
+ ExpectEq(-1, suppliedX)
+ ExpectEq(12, suppliedY)
+}
diff --git a/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/ogletest/test_cases/mock_image/mock_image.go b/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/ogletest/test_cases/mock_image/mock_image.go
new file mode 100644
index 00000000000..a8d55bc4808
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/ogletest/test_cases/mock_image/mock_image.go
@@ -0,0 +1,115 @@
+// This file was auto-generated using createmock. See the following page for
+// more information:
+//
+// https://github.com/smartystreets/assertions/internal/oglemock
+//
+
+package mock_image
+
+import (
+ fmt "fmt"
+ oglemock "github.com/smartystreets/assertions/internal/oglemock"
+ image "image"
+ color "image/color"
+ runtime "runtime"
+ unsafe "unsafe"
+)
+
+type MockImage interface {
+ image.Image
+ oglemock.MockObject
+}
+
+type mockImage struct {
+ controller oglemock.Controller
+ description string
+}
+
+func NewMockImage(
+ c oglemock.Controller,
+ desc string) MockImage {
+ return &mockImage{
+ controller: c,
+ description: desc,
+ }
+}
+
+func (m *mockImage) Oglemock_Id() uintptr {
+ return uintptr(unsafe.Pointer(m))
+}
+
+func (m *mockImage) Oglemock_Description() string {
+ return m.description
+}
+
+func (m *mockImage) At(p0 int, p1 int) (o0 color.Color) {
+ // Get a file name and line number for the caller.
+ _, file, line, _ := runtime.Caller(1)
+
+ // Hand the call off to the controller, which does most of the work.
+ retVals := m.controller.HandleMethodCall(
+ m,
+ "At",
+ file,
+ line,
+ []interface{}{p0, p1})
+
+ if len(retVals) != 1 {
+ panic(fmt.Sprintf("mockImage.At: invalid return values: %v", retVals))
+ }
+
+ // o0 color.Color
+ if retVals[0] != nil {
+ o0 = retVals[0].(color.Color)
+ }
+
+ return
+}
+
+func (m *mockImage) Bounds() (o0 image.Rectangle) {
+ // Get a file name and line number for the caller.
+ _, file, line, _ := runtime.Caller(1)
+
+ // Hand the call off to the controller, which does most of the work.
+ retVals := m.controller.HandleMethodCall(
+ m,
+ "Bounds",
+ file,
+ line,
+ []interface{}{})
+
+ if len(retVals) != 1 {
+ panic(fmt.Sprintf("mockImage.Bounds: invalid return values: %v", retVals))
+ }
+
+ // o0 image.Rectangle
+ if retVals[0] != nil {
+ o0 = retVals[0].(image.Rectangle)
+ }
+
+ return
+}
+
+func (m *mockImage) ColorModel() (o0 color.Model) {
+ // Get a file name and line number for the caller.
+ _, file, line, _ := runtime.Caller(1)
+
+ // Hand the call off to the controller, which does most of the work.
+ retVals := m.controller.HandleMethodCall(
+ m,
+ "ColorModel",
+ file,
+ line,
+ []interface{}{})
+
+ if len(retVals) != 1 {
+ panic(fmt.Sprintf("mockImage.ColorModel: invalid return values: %v", retVals))
+ }
+
+ // o0 color.Model
+ if retVals[0] != nil {
+ o0 = retVals[0].(color.Model)
+ }
+
+ return
+}
diff --git a/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/ogletest/test_cases/no_cases.test.go b/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/ogletest/test_cases/no_cases.test.go
new file mode 100644
index 00000000000..ad204e053e5
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/ogletest/test_cases/no_cases.test.go
@@ -0,0 +1,41 @@
+// Copyright 2012 Aaron Jacobs. All Rights Reserved.
+// Author: aaronjjacobs@gmail.com (Aaron Jacobs)
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package oglematchers_test
+
+import (
+ "fmt"
+ . "github.com/smartystreets/assertions/internal/ogletest"
+ "testing"
+)
+
+func TestNoCases(t *testing.T) { RunTests(t) }
+
+////////////////////////////////////////////////////////////////////////
+// Helpers
+////////////////////////////////////////////////////////////////////////
+
+type NoCasesTest struct {
+}
+
+func init() { RegisterTestSuite(&NoCasesTest{}) }
+
+func (t *NoCasesTest) SetUpTestSuite() {
+ fmt.Println("SetUpTestSuite run!")
+}
+
+func (t *NoCasesTest) TearDownTestSuite() {
+ fmt.Println("TearDownTestSuite run!")
+}
diff --git a/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/ogletest/test_cases/panicking.test.go b/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/ogletest/test_cases/panicking.test.go
new file mode 100644
index 00000000000..59d1fe3bae0
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/ogletest/test_cases/panicking.test.go
@@ -0,0 +1,99 @@
+// Copyright 2011 Aaron Jacobs. All Rights Reserved.
+// Author: aaronjjacobs@gmail.com (Aaron Jacobs)
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package oglematchers_test
+
+import (
+ "fmt"
+ "log"
+ "testing"
+
+ . "github.com/smartystreets/assertions/internal/oglematchers"
+ . "github.com/smartystreets/assertions/internal/ogletest"
+)
+
+func TestPanickingTest(t *testing.T) { RunTests(t) }
+
+////////////////////////////////////////////////////////////////////////
+// PanickingTest
+////////////////////////////////////////////////////////////////////////
+
+func someFuncThatPanics() {
+ panic("Panic in someFuncThatPanics")
+}
+
+type PanickingTest struct {
+}
+
+func init() { RegisterTestSuite(&PanickingTest{}) }
+
+func (t *PanickingTest) TearDown() {
+ fmt.Println("TearDown running.")
+}
+
+func (t *PanickingTest) ExplicitPanic() {
+ panic("Panic in ExplicitPanic")
+}
+
+func (t *PanickingTest) ExplicitPanicInHelperFunction() {
+ someFuncThatPanics()
+}
+
+func (t *PanickingTest) NilPointerDerefence() {
+ var p *int
+ log.Println(*p)
+}
+
+func (t *PanickingTest) ZzzSomeOtherTest() {
+ ExpectThat(17, Equals(17.0))
+}
+
+////////////////////////////////////////////////////////////////////////
+// SetUpPanicTest
+////////////////////////////////////////////////////////////////////////
+
+type SetUpPanicTest struct {
+}
+
+func init() { RegisterTestSuite(&SetUpPanicTest{}) }
+
+func (t *SetUpPanicTest) SetUp(ti *TestInfo) {
+ fmt.Println("SetUp about to panic.")
+ panic("Panic in SetUp")
+}
+
+func (t *SetUpPanicTest) TearDown() {
+ fmt.Println("TearDown running.")
+}
+
+func (t *SetUpPanicTest) SomeTestCase() {
+}
+
+////////////////////////////////////////////////////////////////////////
+// TearDownPanicTest
+////////////////////////////////////////////////////////////////////////
+
+type TearDownPanicTest struct {
+}
+
+func init() { RegisterTestSuite(&TearDownPanicTest{}) }
+
+func (t *TearDownPanicTest) TearDown() {
+ fmt.Println("TearDown about to panic.")
+ panic("Panic in TearDown")
+}
+
+func (t *TearDownPanicTest) SomeTestCase() {
+}
diff --git a/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/ogletest/test_cases/passing.test.go b/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/ogletest/test_cases/passing.test.go
new file mode 100644
index 00000000000..01d8e63446e
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/ogletest/test_cases/passing.test.go
@@ -0,0 +1,120 @@
+// Copyright 2011 Aaron Jacobs. All Rights Reserved.
+// Author: aaronjjacobs@gmail.com (Aaron Jacobs)
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package oglematchers_test
+
+import (
+ "fmt"
+ "testing"
+ "time"
+
+ . "github.com/smartystreets/assertions/internal/oglematchers"
+ . "github.com/smartystreets/assertions/internal/ogletest"
+)
+
+func TestPassingTest(t *testing.T) { RunTests(t) }
+
+////////////////////////////////////////////////////////////////////////
+// PassingTest
+////////////////////////////////////////////////////////////////////////
+
+type PassingTest struct {
+}
+
+func init() { RegisterTestSuite(&PassingTest{}) }
+
+func (t *PassingTest) EmptyTestMethod() {
+}
+
+func (t *PassingTest) SuccessfullMatches() {
+ ExpectThat(17, Equals(17.0))
+ ExpectThat(16.9, LessThan(17))
+ ExpectThat("taco", HasSubstr("ac"))
+
+ AssertThat(17, Equals(17.0))
+ AssertThat(16.9, LessThan(17))
+ AssertThat("taco", HasSubstr("ac"))
+}
+
+func (t *PassingTest) ExpectAliases() {
+ ExpectEq(17, 17.0)
+
+ ExpectLe(17, 17.0)
+ ExpectLe(17, 18.0)
+ ExpectLt(17, 18.0)
+
+ ExpectGe(17, 17.0)
+ ExpectGe(17, 16.0)
+ ExpectGt(17, 16.0)
+
+ ExpectNe(17, 18.0)
+
+ ExpectTrue(true)
+ ExpectFalse(false)
+}
+
+func (t *PassingTest) AssertAliases() {
+ AssertEq(17, 17.0)
+
+ AssertLe(17, 17.0)
+ AssertLe(17, 18.0)
+ AssertLt(17, 18.0)
+
+ AssertGe(17, 17.0)
+ AssertGe(17, 16.0)
+ AssertGt(17, 16.0)
+
+ AssertNe(17, 18.0)
+
+ AssertTrue(true)
+ AssertFalse(false)
+}
+
+func (t *PassingTest) SlowTest() {
+ time.Sleep(37 * time.Millisecond)
+}
+
+////////////////////////////////////////////////////////////////////////
+// PassingTestWithHelpers
+////////////////////////////////////////////////////////////////////////
+
+type PassingTestWithHelpers struct {
+}
+
+var _ SetUpTestSuiteInterface = &PassingTestWithHelpers{}
+var _ SetUpInterface = &PassingTestWithHelpers{}
+var _ TearDownInterface = &PassingTestWithHelpers{}
+var _ TearDownTestSuiteInterface = &PassingTestWithHelpers{}
+
+func init() { RegisterTestSuite(&PassingTestWithHelpers{}) }
+
+func (t *PassingTestWithHelpers) SetUpTestSuite() {
+ fmt.Println("SetUpTestSuite ran.")
+}
+
+func (t *PassingTestWithHelpers) SetUp(ti *TestInfo) {
+ fmt.Println("SetUp ran.")
+}
+
+func (t *PassingTestWithHelpers) TearDown() {
+ fmt.Println("TearDown ran.")
+}
+
+func (t *PassingTestWithHelpers) TearDownTestSuite() {
+ fmt.Println("TearDownTestSuite ran.")
+}
+
+func (t *PassingTestWithHelpers) EmptyTestMethod() {
+}
diff --git a/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/ogletest/test_cases/run_twice.test.go b/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/ogletest/test_cases/run_twice.test.go
new file mode 100644
index 00000000000..a3a36c17525
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/ogletest/test_cases/run_twice.test.go
@@ -0,0 +1,47 @@
+// Copyright 2011 Aaron Jacobs. All Rights Reserved.
+// Author: aaronjjacobs@gmail.com (Aaron Jacobs)
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package oglematchers_test
+
+import (
+ . "github.com/smartystreets/assertions/internal/oglematchers"
+ . "github.com/smartystreets/assertions/internal/ogletest"
+ "testing"
+)
+
+////////////////////////////////////////////////////////////////////////
+// Helpers
+////////////////////////////////////////////////////////////////////////
+
+type RunTwiceTest struct {
+}
+
+func init() { RegisterTestSuite(&RunTwiceTest{}) }
+
+// Set up two helpers that call RunTests. The test should still only be run
+// once.
+func TestOgletest(t *testing.T) { RunTests(t) }
+func TestOgletest2(t *testing.T) { RunTests(t) }
+
+////////////////////////////////////////////////////////////////////////
+// Tests
+////////////////////////////////////////////////////////////////////////
+
+func (t *RunTwiceTest) PassingMethod() {
+}
+
+func (t *RunTwiceTest) FailingMethod() {
+ ExpectThat(17, Equals(17.5))
+}
diff --git a/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/ogletest/test_cases/stop.test.go b/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/ogletest/test_cases/stop.test.go
new file mode 100644
index 00000000000..a008c081e91
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/ogletest/test_cases/stop.test.go
@@ -0,0 +1,61 @@
+// Copyright 2015 Aaron Jacobs. All Rights Reserved.
+// Author: aaronjjacobs@gmail.com (Aaron Jacobs)
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package oglematchers_test
+
+import (
+ "fmt"
+ "testing"
+
+ . "github.com/smartystreets/assertions/internal/ogletest"
+)
+
+func TestStop(t *testing.T) { RunTests(t) }
+
+////////////////////////////////////////////////////////////////////////
+// Boilerplate
+////////////////////////////////////////////////////////////////////////
+
+type StopTest struct {
+}
+
+var _ TearDownInterface = &StopTest{}
+var _ TearDownTestSuiteInterface = &StopTest{}
+
+func init() { RegisterTestSuite(&StopTest{}) }
+
+func (t *StopTest) TearDown() {
+ fmt.Println("TearDown running.")
+}
+
+func (t *StopTest) TearDownTestSuite() {
+ fmt.Println("TearDownTestSuite running.")
+}
+
+////////////////////////////////////////////////////////////////////////
+// Tests
+////////////////////////////////////////////////////////////////////////
+
+func (t *StopTest) First() {
+}
+
+func (t *StopTest) Second() {
+ fmt.Println("About to call StopRunningTests.")
+ StopRunningTests()
+ fmt.Println("Called StopRunningTests.")
+}
+
+func (t *StopTest) Third() {
+}
diff --git a/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/ogletest/test_cases/unexported.test.go b/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/ogletest/test_cases/unexported.test.go
new file mode 100644
index 00000000000..a425e78d404
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/ogletest/test_cases/unexported.test.go
@@ -0,0 +1,43 @@
+// Copyright 2011 Aaron Jacobs. All Rights Reserved.
+// Author: aaronjjacobs@gmail.com (Aaron Jacobs)
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package oglematchers_test
+
+import (
+ . "github.com/smartystreets/assertions/internal/oglematchers"
+ . "github.com/smartystreets/assertions/internal/ogletest"
+ "testing"
+)
+
+////////////////////////////////////////////////////////////////////////
+// Helpers
+////////////////////////////////////////////////////////////////////////
+
+type UnexportedTest struct {
+}
+
+func init() { RegisterTestSuite(&UnexportedTest{}) }
+func TestUnexportedTest(t *testing.T) { RunTests(t) }
+
+func (t *UnexportedTest) someUnexportedMethod() {
+}
+
+////////////////////////////////////////////////////////////////////////
+// Tests
+////////////////////////////////////////////////////////////////////////
+
+func (t *UnexportedTest) SomeTest() {
+ ExpectThat(3, Equals(4))
+}
diff --git a/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/ogletest/test_info.go b/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/ogletest/test_info.go
new file mode 100644
index 00000000000..3ae12526bc2
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/ogletest/test_info.go
@@ -0,0 +1,91 @@
+// Copyright 2011 Aaron Jacobs. All Rights Reserved.
+// Author: aaronjjacobs@gmail.com (Aaron Jacobs)
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package ogletest
+
+import (
+ "sync"
+
+ "golang.org/x/net/context"
+
+ "github.com/smartystreets/assertions/internal/oglemock"
+)
+
+// TestInfo represents information about a currently running or previously-run
+// test.
+type TestInfo struct {
+ // A mock controller that is set up to report errors to the ogletest test
+ // runner. This can be used for setting up mock expectations and handling
+ // mock calls. The Finish method should not be run by the user; ogletest will
+ // do that automatically after the test's TearDown method is run.
+ //
+ // Note that this feature is still experimental, and is subject to change.
+ MockController oglemock.Controller
+
+ // A context that can be used by tests for long-running operations. In
+ // particular, this enables conveniently tracing the execution of a test
+ // function with reqtrace.
+ Ctx context.Context
+
+ // A mutex protecting shared state.
+ mu sync.RWMutex
+
+ // A set of failure records that the test has produced.
+ //
+ // GUARDED_BY(mu)
+ failureRecords []FailureRecord
+}
+
+// currentlyRunningTest is the state for the currently running test, if any.
+var currentlyRunningTest *TestInfo
+
+// newTestInfo creates a valid but empty TestInfo struct.
+func newTestInfo() (info *TestInfo) {
+ info = &TestInfo{}
+ info.MockController = oglemock.NewController(&testInfoErrorReporter{info})
+ info.Ctx = context.Background()
+
+ return
+}
+
+// testInfoErrorReporter is an oglemock.ErrorReporter that writes failure
+// records into a test info struct.
+type testInfoErrorReporter struct {
+ testInfo *TestInfo
+}
+
+func (r *testInfoErrorReporter) ReportError(
+ fileName string,
+ lineNumber int,
+ err error) {
+ r.testInfo.mu.Lock()
+ defer r.testInfo.mu.Unlock()
+
+ record := FailureRecord{
+ FileName: fileName,
+ LineNumber: lineNumber,
+ Error: err.Error(),
+ }
+
+ r.testInfo.failureRecords = append(r.testInfo.failureRecords, record)
+}
+
+func (r *testInfoErrorReporter) ReportFatalError(
+ fileName string,
+ lineNumber int,
+ err error) {
+ r.ReportError(fileName, lineNumber, err)
+ AbortTest()
+}
diff --git a/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/reqtrace/.gitignore b/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/reqtrace/.gitignore
new file mode 100644
index 00000000000..daf913b1b34
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/reqtrace/.gitignore
@@ -0,0 +1,24 @@
+# Compiled Object files, Static and Dynamic libs (Shared Objects)
+*.o
+*.a
+*.so
+
+# Folders
+_obj
+_test
+
+# Architecture specific extensions/prefixes
+*.[568vq]
+[568vq].out
+
+*.cgo1.go
+*.cgo2.c
+_cgo_defun.c
+_cgo_gotypes.go
+_cgo_export.*
+
+_testmain.go
+
+*.exe
+*.test
+*.prof
diff --git a/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/reqtrace/LICENSE b/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/reqtrace/LICENSE
new file mode 100644
index 00000000000..8f71f43fee3
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/reqtrace/LICENSE
@@ -0,0 +1,202 @@
+ Apache License
+ Version 2.0, January 2004
+ http://www.apache.org/licenses/
+
+ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+ 1. Definitions.
+
+ "License" shall mean the terms and conditions for use, reproduction,
+ and distribution as defined by Sections 1 through 9 of this document.
+
+ "Licensor" shall mean the copyright owner or entity authorized by
+ the copyright owner that is granting the License.
+
+ "Legal Entity" shall mean the union of the acting entity and all
+ other entities that control, are controlled by, or are under common
+ control with that entity. For the purposes of this definition,
+ "control" means (i) the power, direct or indirect, to cause the
+ direction or management of such entity, whether by contract or
+ otherwise, or (ii) ownership of fifty percent (50%) or more of the
+ outstanding shares, or (iii) beneficial ownership of such entity.
+
+ "You" (or "Your") shall mean an individual or Legal Entity
+ exercising permissions granted by this License.
+
+ "Source" form shall mean the preferred form for making modifications,
+ including but not limited to software source code, documentation
+ source, and configuration files.
+
+ "Object" form shall mean any form resulting from mechanical
+ transformation or translation of a Source form, including but
+ not limited to compiled object code, generated documentation,
+ and conversions to other media types.
+
+ "Work" shall mean the work of authorship, whether in Source or
+ Object form, made available under the License, as indicated by a
+ copyright notice that is included in or attached to the work
+ (an example is provided in the Appendix below).
+
+ "Derivative Works" shall mean any work, whether in Source or Object
+ form, that is based on (or derived from) the Work and for which the
+ editorial revisions, annotations, elaborations, or other modifications
+ represent, as a whole, an original work of authorship. For the purposes
+ of this License, Derivative Works shall not include works that remain
+ separable from, or merely link (or bind by name) to the interfaces of,
+ the Work and Derivative Works thereof.
+
+ "Contribution" shall mean any work of authorship, including
+ the original version of the Work and any modifications or additions
+ to that Work or Derivative Works thereof, that is intentionally
+ submitted to Licensor for inclusion in the Work by the copyright owner
+ or by an individual or Legal Entity authorized to submit on behalf of
+ the copyright owner. For the purposes of this definition, "submitted"
+ means any form of electronic, verbal, or written communication sent
+ to the Licensor or its representatives, including but not limited to
+ communication on electronic mailing lists, source code control systems,
+ and issue tracking systems that are managed by, or on behalf of, the
+ Licensor for the purpose of discussing and improving the Work, but
+ excluding communication that is conspicuously marked or otherwise
+ designated in writing by the copyright owner as "Not a Contribution."
+
+ "Contributor" shall mean Licensor and any individual or Legal Entity
+ on behalf of whom a Contribution has been received by Licensor and
+ subsequently incorporated within the Work.
+
+ 2. Grant of Copyright License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ copyright license to reproduce, prepare Derivative Works of,
+ publicly display, publicly perform, sublicense, and distribute the
+ Work and such Derivative Works in Source or Object form.
+
+ 3. Grant of Patent License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ (except as stated in this section) patent license to make, have made,
+ use, offer to sell, sell, import, and otherwise transfer the Work,
+ where such license applies only to those patent claims licensable
+ by such Contributor that are necessarily infringed by their
+ Contribution(s) alone or by combination of their Contribution(s)
+ with the Work to which such Contribution(s) was submitted. If You
+ institute patent litigation against any entity (including a
+ cross-claim or counterclaim in a lawsuit) alleging that the Work
+ or a Contribution incorporated within the Work constitutes direct
+ or contributory patent infringement, then any patent licenses
+ granted to You under this License for that Work shall terminate
+ as of the date such litigation is filed.
+
+ 4. Redistribution. You may reproduce and distribute copies of the
+ Work or Derivative Works thereof in any medium, with or without
+ modifications, and in Source or Object form, provided that You
+ meet the following conditions:
+
+ (a) You must give any other recipients of the Work or
+ Derivative Works a copy of this License; and
+
+ (b) You must cause any modified files to carry prominent notices
+ stating that You changed the files; and
+
+ (c) You must retain, in the Source form of any Derivative Works
+ that You distribute, all copyright, patent, trademark, and
+ attribution notices from the Source form of the Work,
+ excluding those notices that do not pertain to any part of
+ the Derivative Works; and
+
+ (d) If the Work includes a "NOTICE" text file as part of its
+ distribution, then any Derivative Works that You distribute must
+ include a readable copy of the attribution notices contained
+ within such NOTICE file, excluding those notices that do not
+ pertain to any part of the Derivative Works, in at least one
+ of the following places: within a NOTICE text file distributed
+ as part of the Derivative Works; within the Source form or
+ documentation, if provided along with the Derivative Works; or,
+ within a display generated by the Derivative Works, if and
+ wherever such third-party notices normally appear. The contents
+ of the NOTICE file are for informational purposes only and
+ do not modify the License. You may add Your own attribution
+ notices within Derivative Works that You distribute, alongside
+ or as an addendum to the NOTICE text from the Work, provided
+ that such additional attribution notices cannot be construed
+ as modifying the License.
+
+ You may add Your own copyright statement to Your modifications and
+ may provide additional or different license terms and conditions
+ for use, reproduction, or distribution of Your modifications, or
+ for any such Derivative Works as a whole, provided Your use,
+ reproduction, and distribution of the Work otherwise complies with
+ the conditions stated in this License.
+
+ 5. Submission of Contributions. Unless You explicitly state otherwise,
+ any Contribution intentionally submitted for inclusion in the Work
+ by You to the Licensor shall be under the terms and conditions of
+ this License, without any additional terms or conditions.
+ Notwithstanding the above, nothing herein shall supersede or modify
+ the terms of any separate license agreement you may have executed
+ with Licensor regarding such Contributions.
+
+ 6. Trademarks. This License does not grant permission to use the trade
+ names, trademarks, service marks, or product names of the Licensor,
+ except as required for reasonable and customary use in describing the
+ origin of the Work and reproducing the content of the NOTICE file.
+
+ 7. Disclaimer of Warranty. Unless required by applicable law or
+ agreed to in writing, Licensor provides the Work (and each
+ Contributor provides its Contributions) on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ implied, including, without limitation, any warranties or conditions
+ of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+ PARTICULAR PURPOSE. You are solely responsible for determining the
+ appropriateness of using or redistributing the Work and assume any
+ risks associated with Your exercise of permissions under this License.
+
+ 8. Limitation of Liability. In no event and under no legal theory,
+ whether in tort (including negligence), contract, or otherwise,
+ unless required by applicable law (such as deliberate and grossly
+ negligent acts) or agreed to in writing, shall any Contributor be
+ liable to You for damages, including any direct, indirect, special,
+ incidental, or consequential damages of any character arising as a
+ result of this License or out of the use or inability to use the
+ Work (including but not limited to damages for loss of goodwill,
+ work stoppage, computer failure or malfunction, or any and all
+ other commercial damages or losses), even if such Contributor
+ has been advised of the possibility of such damages.
+
+ 9. Accepting Warranty or Additional Liability. While redistributing
+ the Work or Derivative Works thereof, You may choose to offer,
+ and charge a fee for, acceptance of support, warranty, indemnity,
+ or other liability obligations and/or rights consistent with this
+ License. However, in accepting such obligations, You may act only
+ on Your own behalf and on Your sole responsibility, not on behalf
+ of any other Contributor, and only if You agree to indemnify,
+ defend, and hold each Contributor harmless for any liability
+ incurred by, or claims asserted against, such Contributor by reason
+ of your accepting any such warranty or additional liability.
+
+ END OF TERMS AND CONDITIONS
+
+ APPENDIX: How to apply the Apache License to your work.
+
+ To apply the Apache License to your work, attach the following
+ boilerplate notice, with the fields enclosed by brackets "{}"
+ replaced with your own identifying information. (Don't include
+ the brackets!) The text should be enclosed in the appropriate
+ comment syntax for the file format. We also recommend that a
+ file or class name and description of purpose be included on the
+ same "printed page" as the copyright notice for easier
+ identification within third-party archives.
+
+ Copyright {yyyy} {name of copyright owner}
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+
diff --git a/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/reqtrace/README.md b/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/reqtrace/README.md
new file mode 100644
index 00000000000..4392452b5b1
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/reqtrace/README.md
@@ -0,0 +1,53 @@
+[![GoDoc](https://godoc.org/github.com/smartystreets/assertions/internal/reqtrace?status.svg)](https://godoc.org/github.com/smartystreets/assertions/internal/reqtrace)
+
+reqtrace is a package for simple request tracing. It requires nothing of its
+user except:
+
+ * They must use [golang.org/x/net/context][context].
+ * They must add a single line to each function they want to be visible in
+ traces.
+
+[context]: http://godoc.org/golang.org/x/net/context
+
+In particular, reqtrace is console-based and doesn't require an HTTP server.
+
+**Warning**: This package is still barebones and in its early days. I reserve
+the right to make backwards-incompatible changes to its API. But if it's useful
+to you in your current form, have at it.
+
+## Use
+
+Call reqtrace.Trace anywhere you want to start a new root trace. (This is
+probably where you create your root context.) This returns a new context that
+you should pass to child operations, and a reporting function that you must use
+to inform reqtrace when the trace is complete.
+
+For example:
+
+```Go
+func HandleRequest(r *someRequest) (err error) {
+ ctx, report := reqtrace.Trace(context.Background(), "HandleRequest")
+ defer func() { report(err) }()
+
+ // Do two things for this request.
+ DoSomething(ctx, r)
+ DoSomethingElse(ctx, r)
+}
+```
+
+Within other functions that you want to show up in the trace, you
+reqtrace.StartSpan (or its more convenient sibling reqtrace.StartSpanWithError):
+
+```Go
+func DoSomething(ctx context.Context, r *someRequest) (err error) {
+ defer reqtrace.StartSpanWithError(&ctx, &err, "DoSomething")()
+
+ // Process the request somehow using ctx. If downstream code also annotes
+ // using reqtrace, reqtrace will know that its spans are descendants of
+ // this one.
+ CallAnotherLibrary(ctx, r.Param)
+}
+```
+
+When `--reqtrace.enable` is set, the completion of a trace will cause helpful
+ASCII art to be spit out.
diff --git a/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/reqtrace/reqtrace.go b/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/reqtrace/reqtrace.go
new file mode 100644
index 00000000000..853c024244f
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/reqtrace/reqtrace.go
@@ -0,0 +1,132 @@
+// Copyright 2015 Google Inc. All Rights Reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+// Package reqtrace contains a very simple request tracing framework.
+package reqtrace
+
+import (
+ "flag"
+
+ "golang.org/x/net/context"
+)
+
+type contextKey int
+
+var fEnabled = flag.Bool("reqtrace.enable", false, "Collect and print traces.")
+
+// The key used to associate a *traceState with a context.
+const traceStateKey contextKey = 0
+
+// A function that must be called exactly once to report the outcome of an
+// operation represented by a span.
+type ReportFunc func(error)
+
+// Return false only if traces are disabled, i.e. Trace will never cause a
+// trace to be initiated.
+//
+// REQUIRES: flag.Parsed()
+func Enabled() (enabled bool) {
+ enabled = *fEnabled
+ return
+}
+
+// Begin a span within the current trace. Return a new context that should be
+// used for operations that logically occur within the span, and a report
+// function that must be called with the outcome of the logical operation
+// represented by the span.
+//
+// If no trace is active, no span will be created but ctx and report will still
+// be valid.
+func StartSpan(
+ parent context.Context,
+ desc string) (ctx context.Context, report ReportFunc) {
+ // Look for the trace state.
+ val := parent.Value(traceStateKey)
+ if val == nil {
+ // Nothing to do.
+ ctx = parent
+ report = func(err error) {}
+ return
+ }
+
+ ts := val.(*traceState)
+
+ // Set up the report function.
+ report = ts.CreateSpan(desc)
+
+ // For now we don't do anything interesting with the context. In the future,
+ // we may use it to record span hierarchy.
+ ctx = parent
+
+ return
+}
+
+// A wrapper around StartSpan that can be more convenient to use when the
+// lifetime of a span matches the lifetime of a function. Intended to be used
+// in a defer statement within a function using a named error return parameter.
+//
+// Equivalent to calling StartSpan with *ctx, replacing *ctx with the resulting
+// new context, then setting f to a function that will invoke the report
+// function with the contents of *error at the time that it is called.
+//
+// Example:
+//
+// func DoSomething(ctx context.Context) (err error) {
+// defer reqtrace.StartSpanWithError(&ctx, &err, "DoSomething")()
+// [...]
+// }
+//
+func StartSpanWithError(
+ ctx *context.Context,
+ err *error,
+ desc string) (f func()) {
+ var report ReportFunc
+ *ctx, report = StartSpan(*ctx, desc)
+ f = func() { report(*err) }
+ return
+}
+
+// Like StartSpan, but begins a root span for a new trace if no trace is active
+// in the supplied context and tracing is enabled for the process.
+func Trace(
+ parent context.Context,
+ desc string) (ctx context.Context, report ReportFunc) {
+ // If tracing is disabled, this is a no-op.
+ if !*fEnabled {
+ ctx = parent
+ report = func(err error) {}
+ return
+ }
+
+ // Is this context already being traced? If so, simply add a span.
+ if parent.Value(traceStateKey) != nil {
+ ctx, report = StartSpan(parent, desc)
+ return
+ }
+
+ // Set up a new trace state.
+ ts := new(traceState)
+ baseReport := ts.CreateSpan(desc)
+
+ // Log when finished.
+ report = func(err error) {
+ baseReport(err)
+ ts.Log()
+ }
+
+ // Set up the context.
+ ctx = context.WithValue(parent, traceStateKey, ts)
+
+ return
+}
diff --git a/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/reqtrace/trace_state.go b/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/reqtrace/trace_state.go
new file mode 100644
index 00000000000..614ef90d892
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/internal/reqtrace/trace_state.go
@@ -0,0 +1,175 @@
+// Copyright 2015 Google Inc. All Rights Reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package reqtrace
+
+import (
+ "log"
+ "math"
+ "os"
+ "strings"
+ "sync"
+ "time"
+)
+
+const logFlags = 0
+
+var gLogger = log.New(os.Stderr, "reqtrace: ", logFlags)
+
+type span struct {
+ // Fixed at creation.
+ desc string
+ start time.Time
+
+ // Updated by report functions.
+ finished bool
+ end time.Time
+ err error
+}
+
+// All of the state for a particular trace root. The zero value is usable.
+type traceState struct {
+ mu sync.Mutex
+
+ // The list of spans associated with this state. Append-only.
+ //
+ // GUARDED_BY(mu)
+ spans []*span
+}
+
+func (ts *traceState) report(spanIndex int, err error) {
+ ts.mu.Lock()
+ defer ts.mu.Unlock()
+
+ s := ts.spans[spanIndex]
+ s.finished = true
+ s.end = time.Now()
+ s.err = err
+}
+
+// Associate a new span with the trace. Return a function that will report its
+// completion.
+func (ts *traceState) CreateSpan(desc string) (report ReportFunc) {
+ ts.mu.Lock()
+ defer ts.mu.Unlock()
+
+ index := len(ts.spans)
+ ts.spans = append(ts.spans, &span{desc: desc, start: time.Now()})
+
+ report = func(err error) { ts.report(index, err) }
+ return
+}
+
+func round(x float64) float64 {
+ if x < 0 {
+ return math.Ceil(x - 0.5)
+ }
+
+ return math.Floor(x + 0.5)
+}
+
+// Log information about the spans in this trace.
+func (ts *traceState) Log() {
+ ts.mu.Lock()
+ defer ts.mu.Unlock()
+ gLogger.Println()
+
+ // Special case: we require at least one span.
+ if len(ts.spans) == 0 {
+ return
+ }
+
+ // Print a banner for this trace.
+ const bannerHalfLength = 45
+
+ gLogger.Println()
+ gLogger.Printf(
+ "%s %s %s",
+ strings.Repeat("=", bannerHalfLength),
+ ts.spans[0].desc,
+ strings.Repeat("=", bannerHalfLength))
+ gLogger.Printf("Start time: %v", ts.spans[0].start.Format(time.RFC3339Nano))
+ gLogger.Println()
+
+ // Find the minimum start time and maximum end time of all durations.
+ var minStart time.Time
+ var maxEnd time.Time
+ for _, s := range ts.spans {
+ if !s.finished {
+ continue
+ }
+
+ if minStart.IsZero() || s.start.Before(minStart) {
+ minStart = s.start
+ }
+
+ if maxEnd.Before(s.end) {
+ maxEnd = s.end
+ }
+ }
+
+ // Bail out if something weird happened.
+ //
+ // TODO(jacobsa): Be more graceful.
+ totalDuration := maxEnd.Sub(minStart)
+ if minStart.IsZero() || maxEnd.IsZero() || totalDuration <= 0 {
+ gLogger.Println("(Weird trace)")
+ return
+ }
+
+ // Calculate the number of nanoseconds elapsed, as a floating point number.
+ totalNs := float64(totalDuration / time.Nanosecond)
+
+ // Log each span with some ASCII art showing its length relative to the
+ // total.
+ const totalNumCols float64 = 120
+ for _, s := range ts.spans {
+ if !s.finished {
+ gLogger.Printf("(Unfinished: %s)", s.desc)
+ gLogger.Println()
+ continue
+ }
+
+ // Calculate the duration of the span, and its width relative to the
+ // longest span.
+ d := s.end.Sub(s.start)
+ if d <= 0 {
+ gLogger.Println("(Weird duration)")
+ gLogger.Println()
+ continue
+ }
+
+ durationRatio := float64(d/time.Nanosecond) / totalNs
+
+ // We will offset the label and banner proportional to the time since the
+ // start of the earliest span.
+ offsetRatio := float64(s.start.Sub(minStart)/time.Nanosecond) / totalNs
+ offsetChars := int(round(offsetRatio * totalNumCols))
+ offsetStr := strings.Repeat(" ", offsetChars)
+
+ // Print the description and duration.
+ gLogger.Printf("%s%v", offsetStr, s.desc)
+ gLogger.Printf("%s%v", offsetStr, d)
+
+ // Print a banner showing the duration graphically.
+ bannerChars := int(round(durationRatio * totalNumCols))
+ var dashes string
+ if bannerChars > 2 {
+ dashes = strings.Repeat("-", bannerChars-2)
+ }
+
+ gLogger.Printf("%s|%s|", offsetStr, dashes)
+ gLogger.Println()
+ }
+}
diff --git a/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/messages.go b/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/messages.go
new file mode 100644
index 00000000000..ae1a15116f5
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/messages.go
@@ -0,0 +1,94 @@
+package assertions
+
+const ( // equality
+ shouldHaveBeenEqual = "Expected: '%v'\nActual: '%v'\n(Should be equal)"
+ shouldNotHaveBeenEqual = "Expected '%v'\nto NOT equal '%v'\n(but it did)!"
+ shouldHaveBeenEqualTypeMismatch = "Expected: '%v' (%T)\nActual: '%v' (%T)\n(Should be equal, type mismatch)"
+ shouldHaveBeenAlmostEqual = "Expected '%v' to almost equal '%v' (but it didn't)!"
+ shouldHaveNotBeenAlmostEqual = "Expected '%v' to NOT almost equal '%v' (but it did)!"
+ shouldHaveResembled = "Expected: '%#v'\nActual: '%#v'\n(Should resemble)!"
+ shouldHaveResembledTypeMismatch = "Expected: '%#v' (%T)\nActual: '%#v' (%T)\n(Should resemble, type mismatch)"
+ shouldNotHaveResembled = "Expected '%#v'\nto NOT resemble '%#v'\n(but it did)!"
+ shouldBePointers = "Both arguments should be pointers "
+ shouldHaveBeenNonNilPointer = shouldBePointers + "(the %s was %s)!"
+ shouldHavePointedTo = "Expected '%+v' (address: '%v') and '%+v' (address: '%v') to be the same address (but their weren't)!"
+ shouldNotHavePointedTo = "Expected '%+v' and '%+v' to be different references (but they matched: '%v')!"
+ shouldHaveBeenNil = "Expected: nil\nActual: '%v'"
+ shouldNotHaveBeenNil = "Expected '%+v' to NOT be nil (but it was)!"
+ shouldHaveBeenTrue = "Expected: true\nActual: %v"
+ shouldHaveBeenFalse = "Expected: false\nActual: %v"
+ shouldHaveBeenZeroValue = "'%+v' should have been the zero value" //"Expected: (zero value)\nActual: %v"
+)
+
+const ( // quantity comparisons
+ shouldHaveBeenGreater = "Expected '%v' to be greater than '%v' (but it wasn't)!"
+ shouldHaveBeenGreaterOrEqual = "Expected '%v' to be greater than or equal to '%v' (but it wasn't)!"
+ shouldHaveBeenLess = "Expected '%v' to be less than '%v' (but it wasn't)!"
+ shouldHaveBeenLessOrEqual = "Expected '%v' to be less than or equal to '%v' (but it wasn't)!"
+ shouldHaveBeenBetween = "Expected '%v' to be between '%v' and '%v' (but it wasn't)!"
+ shouldNotHaveBeenBetween = "Expected '%v' NOT to be between '%v' and '%v' (but it was)!"
+ shouldHaveDifferentUpperAndLower = "The lower and upper bounds must be different values (they were both '%v')."
+ shouldHaveBeenBetweenOrEqual = "Expected '%v' to be between '%v' and '%v' or equal to one of them (but it wasn't)!"
+ shouldNotHaveBeenBetweenOrEqual = "Expected '%v' NOT to be between '%v' and '%v' or equal to one of them (but it was)!"
+)
+
+const ( // collections
+ shouldHaveContained = "Expected the container (%v) to contain: '%v' (but it didn't)!"
+ shouldNotHaveContained = "Expected the container (%v) NOT to contain: '%v' (but it did)!"
+ shouldHaveContainedKey = "Expected the %v to contain the key: %v (but it didn't)!"
+ shouldNotHaveContainedKey = "Expected the %v NOT to contain the key: %v (but it did)!"
+ shouldHaveBeenIn = "Expected '%v' to be in the container (%v), but it wasn't!"
+ shouldNotHaveBeenIn = "Expected '%v' NOT to be in the container (%v), but it was!"
+ shouldHaveBeenAValidCollection = "You must provide a valid container (was %v)!"
+ shouldHaveBeenAValidMap = "You must provide a valid map type (was %v)!"
+ shouldHaveBeenEmpty = "Expected %+v to be empty (but it wasn't)!"
+ shouldNotHaveBeenEmpty = "Expected %+v to NOT be empty (but it was)!"
+ shouldHaveBeenAValidInteger = "You must provide a valid integer (was %v)!"
+ shouldHaveBeenAValidLength = "You must provide a valid positive integer (was %v)!"
+ shouldHaveHadLength = "Expected %+v (length: %v) to have length equal to '%v', but it wasn't!"
+)
+
+const ( // strings
+ shouldHaveStartedWith = "Expected '%v'\nto start with '%v'\n(but it didn't)!"
+ shouldNotHaveStartedWith = "Expected '%v'\nNOT to start with '%v'\n(but it did)!"
+ shouldHaveEndedWith = "Expected '%v'\nto end with '%v'\n(but it didn't)!"
+ shouldNotHaveEndedWith = "Expected '%v'\nNOT to end with '%v'\n(but it did)!"
+ shouldAllBeStrings = "All arguments to this assertion must be strings (you provided: %v)."
+ shouldBothBeStrings = "Both arguments to this assertion must be strings (you provided %v and %v)."
+ shouldBeString = "The argument to this assertion must be a string (you provided %v)."
+ shouldHaveContainedSubstring = "Expected '%s' to contain substring '%s' (but it didn't)!"
+ shouldNotHaveContainedSubstring = "Expected '%s' NOT to contain substring '%s' (but it did)!"
+ shouldHaveBeenBlank = "Expected '%s' to be blank (but it wasn't)!"
+ shouldNotHaveBeenBlank = "Expected value to NOT be blank (but it was)!"
+)
+
+const ( // panics
+ shouldUseVoidNiladicFunction = "You must provide a void, niladic function as the first argument!"
+ shouldHavePanickedWith = "Expected func() to panic with '%v' (but it panicked with '%v')!"
+ shouldHavePanicked = "Expected func() to panic (but it didn't)!"
+ shouldNotHavePanicked = "Expected func() NOT to panic (error: '%+v')!"
+ shouldNotHavePanickedWith = "Expected func() NOT to panic with '%v' (but it did)!"
+)
+
+const ( // type checking
+ shouldHaveBeenA = "Expected '%v' to be: '%v' (but was: '%v')!"
+ shouldNotHaveBeenA = "Expected '%v' to NOT be: '%v' (but it was)!"
+
+ shouldHaveImplemented = "Expected: '%v interface support'\nActual: '%v' does not implement the interface!"
+ shouldNotHaveImplemented = "Expected '%v'\nto NOT implement '%v'\n(but it did)!"
+ shouldCompareWithInterfacePointer = "The expected value must be a pointer to an interface type (eg. *fmt.Stringer)"
+ shouldNotBeNilActual = "The actual value was 'nil' and should be a value or a pointer to a value!"
+)
+
+const ( // time comparisons
+ shouldUseTimes = "You must provide time instances as arguments to this assertion."
+ shouldUseTimeSlice = "You must provide a slice of time instances as the first argument to this assertion."
+ shouldUseDurationAndTime = "You must provide a duration and a time as arguments to this assertion."
+ shouldHaveHappenedBefore = "Expected '%v' to happen before '%v' (it happened '%v' after)!"
+ shouldHaveHappenedAfter = "Expected '%v' to happen after '%v' (it happened '%v' before)!"
+ shouldHaveHappenedBetween = "Expected '%v' to happen between '%v' and '%v' (it happened '%v' outside threshold)!"
+ shouldNotHaveHappenedOnOrBetween = "Expected '%v' to NOT happen on or between '%v' and '%v' (but it did)!"
+
+ // format params: incorrect-index, previous-index, previous-time, incorrect-index, incorrect-time
+ shouldHaveBeenChronological = "The 'Time' at index [%d] should have happened after the previous one (but it didn't!):\n [%d]: %s\n [%d]: %s (see, it happened before!)"
+)
diff --git a/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/panic.go b/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/panic.go
new file mode 100644
index 00000000000..7e75db1784b
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/panic.go
@@ -0,0 +1,115 @@
+package assertions
+
+import "fmt"
+
+// ShouldPanic receives a void, niladic function and expects to recover a panic.
+func ShouldPanic(actual interface{}, expected ...interface{}) (message string) {
+ if fail := need(0, expected); fail != success {
+ return fail
+ }
+
+ action, _ := actual.(func())
+
+ if action == nil {
+ message = shouldUseVoidNiladicFunction
+ return
+ }
+
+ defer func() {
+ recovered := recover()
+ if recovered == nil {
+ message = shouldHavePanicked
+ } else {
+ message = success
+ }
+ }()
+ action()
+
+ return
+}
+
+// ShouldNotPanic receives a void, niladic function and expects to execute the function without any panic.
+func ShouldNotPanic(actual interface{}, expected ...interface{}) (message string) {
+ if fail := need(0, expected); fail != success {
+ return fail
+ }
+
+ action, _ := actual.(func())
+
+ if action == nil {
+ message = shouldUseVoidNiladicFunction
+ return
+ }
+
+ defer func() {
+ recovered := recover()
+ if recovered != nil {
+ message = fmt.Sprintf(shouldNotHavePanicked, recovered)
+ } else {
+ message = success
+ }
+ }()
+ action()
+
+ return
+}
+
+// ShouldPanicWith receives a void, niladic function and expects to recover a panic with the second argument as the content.
+func ShouldPanicWith(actual interface{}, expected ...interface{}) (message string) {
+ if fail := need(1, expected); fail != success {
+ return fail
+ }
+
+ action, _ := actual.(func())
+
+ if action == nil {
+ message = shouldUseVoidNiladicFunction
+ return
+ }
+
+ defer func() {
+ recovered := recover()
+ if recovered == nil {
+ message = shouldHavePanicked
+ } else {
+ if equal := ShouldEqual(recovered, expected[0]); equal != success {
+ message = serializer.serialize(expected[0], recovered, fmt.Sprintf(shouldHavePanickedWith, expected[0], recovered))
+ } else {
+ message = success
+ }
+ }
+ }()
+ action()
+
+ return
+}
+
+// ShouldNotPanicWith receives a void, niladic function and expects to recover a panic whose content differs from the second argument.
+func ShouldNotPanicWith(actual interface{}, expected ...interface{}) (message string) {
+ if fail := need(1, expected); fail != success {
+ return fail
+ }
+
+ action, _ := actual.(func())
+
+ if action == nil {
+ message = shouldUseVoidNiladicFunction
+ return
+ }
+
+ defer func() {
+ recovered := recover()
+ if recovered == nil {
+ message = success
+ } else {
+ if equal := ShouldEqual(recovered, expected[0]); equal == success {
+ message = fmt.Sprintf(shouldNotHavePanickedWith, expected[0])
+ } else {
+ message = success
+ }
+ }
+ }()
+ action()
+
+ return
+}
diff --git a/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/panic_test.go b/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/panic_test.go
new file mode 100644
index 00000000000..15eafac4fbb
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/panic_test.go
@@ -0,0 +1,53 @@
+package assertions
+
+import (
+ "fmt"
+ "testing"
+)
+
+func TestShouldPanic(t *testing.T) {
+ fail(t, so(func() {}, ShouldPanic, 1), "This assertion requires exactly 0 comparison values (you provided 1).")
+ fail(t, so(func() {}, ShouldPanic, 1, 2, 3), "This assertion requires exactly 0 comparison values (you provided 3).")
+
+ fail(t, so(1, ShouldPanic), shouldUseVoidNiladicFunction)
+ fail(t, so(func(i int) {}, ShouldPanic), shouldUseVoidNiladicFunction)
+ fail(t, so(func() int { panic("hi") }, ShouldPanic), shouldUseVoidNiladicFunction)
+
+ fail(t, so(func() {}, ShouldPanic), shouldHavePanicked)
+ pass(t, so(func() { panic("hi") }, ShouldPanic))
+}
+
+func TestShouldNotPanic(t *testing.T) {
+ fail(t, so(func() {}, ShouldNotPanic, 1), "This assertion requires exactly 0 comparison values (you provided 1).")
+ fail(t, so(func() {}, ShouldNotPanic, 1, 2, 3), "This assertion requires exactly 0 comparison values (you provided 3).")
+
+ fail(t, so(1, ShouldNotPanic), shouldUseVoidNiladicFunction)
+ fail(t, so(func(i int) {}, ShouldNotPanic), shouldUseVoidNiladicFunction)
+
+ fail(t, so(func() { panic("hi") }, ShouldNotPanic), fmt.Sprintf(shouldNotHavePanicked, "hi"))
+ pass(t, so(func() {}, ShouldNotPanic))
+}
+
+func TestShouldPanicWith(t *testing.T) {
+ fail(t, so(func() {}, ShouldPanicWith), "This assertion requires exactly 1 comparison values (you provided 0).")
+ fail(t, so(func() {}, ShouldPanicWith, 1, 2, 3), "This assertion requires exactly 1 comparison values (you provided 3).")
+
+ fail(t, so(1, ShouldPanicWith, 1), shouldUseVoidNiladicFunction)
+ fail(t, so(func(i int) {}, ShouldPanicWith, "hi"), shouldUseVoidNiladicFunction)
+ fail(t, so(func() {}, ShouldPanicWith, "bye"), shouldHavePanicked)
+ fail(t, so(func() { panic("hi") }, ShouldPanicWith, "bye"), "bye|hi|Expected func() to panic with 'bye' (but it panicked with 'hi')!")
+
+ pass(t, so(func() { panic("hi") }, ShouldPanicWith, "hi"))
+}
+
+func TestShouldNotPanicWith(t *testing.T) {
+ fail(t, so(func() {}, ShouldNotPanicWith), "This assertion requires exactly 1 comparison values (you provided 0).")
+ fail(t, so(func() {}, ShouldNotPanicWith, 1, 2, 3), "This assertion requires exactly 1 comparison values (you provided 3).")
+
+ fail(t, so(1, ShouldNotPanicWith, 1), shouldUseVoidNiladicFunction)
+ fail(t, so(func(i int) {}, ShouldNotPanicWith, "hi"), shouldUseVoidNiladicFunction)
+ fail(t, so(func() { panic("hi") }, ShouldNotPanicWith, "hi"), "Expected func() NOT to panic with 'hi' (but it did)!")
+
+ pass(t, so(func() {}, ShouldNotPanicWith, "bye"))
+ pass(t, so(func() { panic("hi") }, ShouldNotPanicWith, "bye"))
+}
diff --git a/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/quantity.go b/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/quantity.go
new file mode 100644
index 00000000000..80789f0cc71
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/quantity.go
@@ -0,0 +1,141 @@
+package assertions
+
+import (
+ "fmt"
+
+ "github.com/smartystreets/assertions/internal/oglematchers"
+)
+
+// ShouldBeGreaterThan receives exactly two parameters and ensures that the first is greater than the second.
+func ShouldBeGreaterThan(actual interface{}, expected ...interface{}) string {
+ if fail := need(1, expected); fail != success {
+ return fail
+ }
+
+ if matchError := oglematchers.GreaterThan(expected[0]).Matches(actual); matchError != nil {
+ return fmt.Sprintf(shouldHaveBeenGreater, actual, expected[0])
+ }
+ return success
+}
+
+// ShouldBeGreaterThanOrEqualTo receives exactly two parameters and ensures that the first is greater than or equal to the second.
+func ShouldBeGreaterThanOrEqualTo(actual interface{}, expected ...interface{}) string {
+ if fail := need(1, expected); fail != success {
+ return fail
+ } else if matchError := oglematchers.GreaterOrEqual(expected[0]).Matches(actual); matchError != nil {
+ return fmt.Sprintf(shouldHaveBeenGreaterOrEqual, actual, expected[0])
+ }
+ return success
+}
+
+// ShouldBeLessThan receives exactly two parameters and ensures that the first is less than the second.
+func ShouldBeLessThan(actual interface{}, expected ...interface{}) string {
+ if fail := need(1, expected); fail != success {
+ return fail
+ } else if matchError := oglematchers.LessThan(expected[0]).Matches(actual); matchError != nil {
+ return fmt.Sprintf(shouldHaveBeenLess, actual, expected[0])
+ }
+ return success
+}
+
+// ShouldBeLessThan receives exactly two parameters and ensures that the first is less than or equal to the second.
+func ShouldBeLessThanOrEqualTo(actual interface{}, expected ...interface{}) string {
+ if fail := need(1, expected); fail != success {
+ return fail
+ } else if matchError := oglematchers.LessOrEqual(expected[0]).Matches(actual); matchError != nil {
+ return fmt.Sprintf(shouldHaveBeenLess, actual, expected[0])
+ }
+ return success
+}
+
+// ShouldBeBetween receives exactly three parameters: an actual value, a lower bound, and an upper bound.
+// It ensures that the actual value is between both bounds (but not equal to either of them).
+func ShouldBeBetween(actual interface{}, expected ...interface{}) string {
+ if fail := need(2, expected); fail != success {
+ return fail
+ }
+ lower, upper, fail := deriveBounds(expected)
+
+ if fail != success {
+ return fail
+ } else if !isBetween(actual, lower, upper) {
+ return fmt.Sprintf(shouldHaveBeenBetween, actual, lower, upper)
+ }
+ return success
+}
+
+// ShouldNotBeBetween receives exactly three parameters: an actual value, a lower bound, and an upper bound.
+// It ensures that the actual value is NOT between both bounds.
+func ShouldNotBeBetween(actual interface{}, expected ...interface{}) string {
+ if fail := need(2, expected); fail != success {
+ return fail
+ }
+ lower, upper, fail := deriveBounds(expected)
+
+ if fail != success {
+ return fail
+ } else if isBetween(actual, lower, upper) {
+ return fmt.Sprintf(shouldNotHaveBeenBetween, actual, lower, upper)
+ }
+ return success
+}
+func deriveBounds(values []interface{}) (lower interface{}, upper interface{}, fail string) {
+ lower = values[0]
+ upper = values[1]
+
+ if ShouldNotEqual(lower, upper) != success {
+ return nil, nil, fmt.Sprintf(shouldHaveDifferentUpperAndLower, lower)
+ } else if ShouldBeLessThan(lower, upper) != success {
+ lower, upper = upper, lower
+ }
+ return lower, upper, success
+}
+func isBetween(value, lower, upper interface{}) bool {
+ if ShouldBeGreaterThan(value, lower) != success {
+ return false
+ } else if ShouldBeLessThan(value, upper) != success {
+ return false
+ }
+ return true
+}
+
+// ShouldBeBetweenOrEqual receives exactly three parameters: an actual value, a lower bound, and an upper bound.
+// It ensures that the actual value is between both bounds or equal to one of them.
+func ShouldBeBetweenOrEqual(actual interface{}, expected ...interface{}) string {
+ if fail := need(2, expected); fail != success {
+ return fail
+ }
+ lower, upper, fail := deriveBounds(expected)
+
+ if fail != success {
+ return fail
+ } else if !isBetweenOrEqual(actual, lower, upper) {
+ return fmt.Sprintf(shouldHaveBeenBetweenOrEqual, actual, lower, upper)
+ }
+ return success
+}
+
+// ShouldNotBeBetweenOrEqual receives exactly three parameters: an actual value, a lower bound, and an upper bound.
+// It ensures that the actual value is nopt between the bounds nor equal to either of them.
+func ShouldNotBeBetweenOrEqual(actual interface{}, expected ...interface{}) string {
+ if fail := need(2, expected); fail != success {
+ return fail
+ }
+ lower, upper, fail := deriveBounds(expected)
+
+ if fail != success {
+ return fail
+ } else if isBetweenOrEqual(actual, lower, upper) {
+ return fmt.Sprintf(shouldNotHaveBeenBetweenOrEqual, actual, lower, upper)
+ }
+ return success
+}
+
+func isBetweenOrEqual(value, lower, upper interface{}) bool {
+ if ShouldBeGreaterThanOrEqualTo(value, lower) != success {
+ return false
+ } else if ShouldBeLessThanOrEqualTo(value, upper) != success {
+ return false
+ }
+ return true
+}
diff --git a/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/quantity_test.go b/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/quantity_test.go
new file mode 100644
index 00000000000..7546e7250a8
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/quantity_test.go
@@ -0,0 +1,145 @@
+package assertions
+
+import "testing"
+
+func TestShouldBeGreaterThan(t *testing.T) {
+ fail(t, so(1, ShouldBeGreaterThan), "This assertion requires exactly 1 comparison values (you provided 0).")
+ fail(t, so(1, ShouldBeGreaterThan, 0, 0), "This assertion requires exactly 1 comparison values (you provided 2).")
+
+ pass(t, so(1, ShouldBeGreaterThan, 0))
+ pass(t, so(1.1, ShouldBeGreaterThan, 1))
+ pass(t, so(1, ShouldBeGreaterThan, uint(0)))
+ pass(t, so("b", ShouldBeGreaterThan, "a"))
+
+ fail(t, so(0, ShouldBeGreaterThan, 1), "Expected '0' to be greater than '1' (but it wasn't)!")
+ fail(t, so(1, ShouldBeGreaterThan, 1.1), "Expected '1' to be greater than '1.1' (but it wasn't)!")
+ fail(t, so(uint(0), ShouldBeGreaterThan, 1.1), "Expected '0' to be greater than '1.1' (but it wasn't)!")
+ fail(t, so("a", ShouldBeGreaterThan, "b"), "Expected 'a' to be greater than 'b' (but it wasn't)!")
+}
+
+func TestShouldBeGreaterThanOrEqual(t *testing.T) {
+ fail(t, so(1, ShouldBeGreaterThanOrEqualTo), "This assertion requires exactly 1 comparison values (you provided 0).")
+ fail(t, so(1, ShouldBeGreaterThanOrEqualTo, 0, 0), "This assertion requires exactly 1 comparison values (you provided 2).")
+
+ pass(t, so(1, ShouldBeGreaterThanOrEqualTo, 1))
+ pass(t, so(1.1, ShouldBeGreaterThanOrEqualTo, 1.1))
+ pass(t, so(1, ShouldBeGreaterThanOrEqualTo, uint(1)))
+ pass(t, so("b", ShouldBeGreaterThanOrEqualTo, "b"))
+
+ pass(t, so(1, ShouldBeGreaterThanOrEqualTo, 0))
+ pass(t, so(1.1, ShouldBeGreaterThanOrEqualTo, 1))
+ pass(t, so(1, ShouldBeGreaterThanOrEqualTo, uint(0)))
+ pass(t, so("b", ShouldBeGreaterThanOrEqualTo, "a"))
+
+ fail(t, so(0, ShouldBeGreaterThanOrEqualTo, 1), "Expected '0' to be greater than or equal to '1' (but it wasn't)!")
+ fail(t, so(1, ShouldBeGreaterThanOrEqualTo, 1.1), "Expected '1' to be greater than or equal to '1.1' (but it wasn't)!")
+ fail(t, so(uint(0), ShouldBeGreaterThanOrEqualTo, 1.1), "Expected '0' to be greater than or equal to '1.1' (but it wasn't)!")
+ fail(t, so("a", ShouldBeGreaterThanOrEqualTo, "b"), "Expected 'a' to be greater than or equal to 'b' (but it wasn't)!")
+}
+
+func TestShouldBeLessThan(t *testing.T) {
+ fail(t, so(1, ShouldBeLessThan), "This assertion requires exactly 1 comparison values (you provided 0).")
+ fail(t, so(1, ShouldBeLessThan, 0, 0), "This assertion requires exactly 1 comparison values (you provided 2).")
+
+ pass(t, so(0, ShouldBeLessThan, 1))
+ pass(t, so(1, ShouldBeLessThan, 1.1))
+ pass(t, so(uint(0), ShouldBeLessThan, 1))
+ pass(t, so("a", ShouldBeLessThan, "b"))
+
+ fail(t, so(1, ShouldBeLessThan, 0), "Expected '1' to be less than '0' (but it wasn't)!")
+ fail(t, so(1.1, ShouldBeLessThan, 1), "Expected '1.1' to be less than '1' (but it wasn't)!")
+ fail(t, so(1.1, ShouldBeLessThan, uint(0)), "Expected '1.1' to be less than '0' (but it wasn't)!")
+ fail(t, so("b", ShouldBeLessThan, "a"), "Expected 'b' to be less than 'a' (but it wasn't)!")
+}
+
+func TestShouldBeLessThanOrEqualTo(t *testing.T) {
+ fail(t, so(1, ShouldBeLessThanOrEqualTo), "This assertion requires exactly 1 comparison values (you provided 0).")
+ fail(t, so(1, ShouldBeLessThanOrEqualTo, 0, 0), "This assertion requires exactly 1 comparison values (you provided 2).")
+
+ pass(t, so(1, ShouldBeLessThanOrEqualTo, 1))
+ pass(t, so(1.1, ShouldBeLessThanOrEqualTo, 1.1))
+ pass(t, so(uint(1), ShouldBeLessThanOrEqualTo, 1))
+ pass(t, so("b", ShouldBeLessThanOrEqualTo, "b"))
+
+ pass(t, so(0, ShouldBeLessThanOrEqualTo, 1))
+ pass(t, so(1, ShouldBeLessThanOrEqualTo, 1.1))
+ pass(t, so(uint(0), ShouldBeLessThanOrEqualTo, 1))
+ pass(t, so("a", ShouldBeLessThanOrEqualTo, "b"))
+
+ fail(t, so(1, ShouldBeLessThanOrEqualTo, 0), "Expected '1' to be less than '0' (but it wasn't)!")
+ fail(t, so(1.1, ShouldBeLessThanOrEqualTo, 1), "Expected '1.1' to be less than '1' (but it wasn't)!")
+ fail(t, so(1.1, ShouldBeLessThanOrEqualTo, uint(0)), "Expected '1.1' to be less than '0' (but it wasn't)!")
+ fail(t, so("b", ShouldBeLessThanOrEqualTo, "a"), "Expected 'b' to be less than 'a' (but it wasn't)!")
+}
+
+func TestShouldBeBetween(t *testing.T) {
+ fail(t, so(1, ShouldBeBetween), "This assertion requires exactly 2 comparison values (you provided 0).")
+ fail(t, so(1, ShouldBeBetween, 1, 2, 3), "This assertion requires exactly 2 comparison values (you provided 3).")
+
+ fail(t, so(4, ShouldBeBetween, 1, 1), "The lower and upper bounds must be different values (they were both '1').")
+
+ fail(t, so(7, ShouldBeBetween, 8, 12), "Expected '7' to be between '8' and '12' (but it wasn't)!")
+ fail(t, so(8, ShouldBeBetween, 8, 12), "Expected '8' to be between '8' and '12' (but it wasn't)!")
+ pass(t, so(9, ShouldBeBetween, 8, 12))
+ pass(t, so(10, ShouldBeBetween, 8, 12))
+ pass(t, so(11, ShouldBeBetween, 8, 12))
+ fail(t, so(12, ShouldBeBetween, 8, 12), "Expected '12' to be between '8' and '12' (but it wasn't)!")
+ fail(t, so(13, ShouldBeBetween, 8, 12), "Expected '13' to be between '8' and '12' (but it wasn't)!")
+
+ pass(t, so(1, ShouldBeBetween, 2, 0))
+ fail(t, so(-1, ShouldBeBetween, 2, 0), "Expected '-1' to be between '0' and '2' (but it wasn't)!")
+}
+
+func TestShouldNotBeBetween(t *testing.T) {
+ fail(t, so(1, ShouldNotBeBetween), "This assertion requires exactly 2 comparison values (you provided 0).")
+ fail(t, so(1, ShouldNotBeBetween, 1, 2, 3), "This assertion requires exactly 2 comparison values (you provided 3).")
+
+ fail(t, so(4, ShouldNotBeBetween, 1, 1), "The lower and upper bounds must be different values (they were both '1').")
+
+ pass(t, so(7, ShouldNotBeBetween, 8, 12))
+ pass(t, so(8, ShouldNotBeBetween, 8, 12))
+ fail(t, so(9, ShouldNotBeBetween, 8, 12), "Expected '9' NOT to be between '8' and '12' (but it was)!")
+ fail(t, so(10, ShouldNotBeBetween, 8, 12), "Expected '10' NOT to be between '8' and '12' (but it was)!")
+ fail(t, so(11, ShouldNotBeBetween, 8, 12), "Expected '11' NOT to be between '8' and '12' (but it was)!")
+ pass(t, so(12, ShouldNotBeBetween, 8, 12))
+ pass(t, so(13, ShouldNotBeBetween, 8, 12))
+
+ pass(t, so(-1, ShouldNotBeBetween, 2, 0))
+ fail(t, so(1, ShouldNotBeBetween, 2, 0), "Expected '1' NOT to be between '0' and '2' (but it was)!")
+}
+
+func TestShouldBeBetweenOrEqual(t *testing.T) {
+ fail(t, so(1, ShouldBeBetweenOrEqual), "This assertion requires exactly 2 comparison values (you provided 0).")
+ fail(t, so(1, ShouldBeBetweenOrEqual, 1, 2, 3), "This assertion requires exactly 2 comparison values (you provided 3).")
+
+ fail(t, so(4, ShouldBeBetweenOrEqual, 1, 1), "The lower and upper bounds must be different values (they were both '1').")
+
+ fail(t, so(7, ShouldBeBetweenOrEqual, 8, 12), "Expected '7' to be between '8' and '12' or equal to one of them (but it wasn't)!")
+ pass(t, so(8, ShouldBeBetweenOrEqual, 8, 12))
+ pass(t, so(9, ShouldBeBetweenOrEqual, 8, 12))
+ pass(t, so(10, ShouldBeBetweenOrEqual, 8, 12))
+ pass(t, so(11, ShouldBeBetweenOrEqual, 8, 12))
+ pass(t, so(12, ShouldBeBetweenOrEqual, 8, 12))
+ fail(t, so(13, ShouldBeBetweenOrEqual, 8, 12), "Expected '13' to be between '8' and '12' or equal to one of them (but it wasn't)!")
+
+ pass(t, so(1, ShouldBeBetweenOrEqual, 2, 0))
+ fail(t, so(-1, ShouldBeBetweenOrEqual, 2, 0), "Expected '-1' to be between '0' and '2' or equal to one of them (but it wasn't)!")
+}
+
+func TestShouldNotBeBetweenOrEqual(t *testing.T) {
+ fail(t, so(1, ShouldNotBeBetweenOrEqual), "This assertion requires exactly 2 comparison values (you provided 0).")
+ fail(t, so(1, ShouldNotBeBetweenOrEqual, 1, 2, 3), "This assertion requires exactly 2 comparison values (you provided 3).")
+
+ fail(t, so(4, ShouldNotBeBetweenOrEqual, 1, 1), "The lower and upper bounds must be different values (they were both '1').")
+
+ pass(t, so(7, ShouldNotBeBetweenOrEqual, 8, 12))
+ fail(t, so(8, ShouldNotBeBetweenOrEqual, 8, 12), "Expected '8' NOT to be between '8' and '12' or equal to one of them (but it was)!")
+ fail(t, so(9, ShouldNotBeBetweenOrEqual, 8, 12), "Expected '9' NOT to be between '8' and '12' or equal to one of them (but it was)!")
+ fail(t, so(10, ShouldNotBeBetweenOrEqual, 8, 12), "Expected '10' NOT to be between '8' and '12' or equal to one of them (but it was)!")
+ fail(t, so(11, ShouldNotBeBetweenOrEqual, 8, 12), "Expected '11' NOT to be between '8' and '12' or equal to one of them (but it was)!")
+ fail(t, so(12, ShouldNotBeBetweenOrEqual, 8, 12), "Expected '12' NOT to be between '8' and '12' or equal to one of them (but it was)!")
+ pass(t, so(13, ShouldNotBeBetweenOrEqual, 8, 12))
+
+ pass(t, so(-1, ShouldNotBeBetweenOrEqual, 2, 0))
+ fail(t, so(1, ShouldNotBeBetweenOrEqual, 2, 0), "Expected '1' NOT to be between '0' and '2' or equal to one of them (but it was)!")
+}
diff --git a/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/serializer.go b/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/serializer.go
new file mode 100644
index 00000000000..90ae3e3b692
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/serializer.go
@@ -0,0 +1,69 @@
+package assertions
+
+import (
+ "encoding/json"
+ "fmt"
+
+ "github.com/smartystreets/assertions/internal/go-render/render"
+)
+
+type Serializer interface {
+ serialize(expected, actual interface{}, message string) string
+ serializeDetailed(expected, actual interface{}, message string) string
+}
+
+type failureSerializer struct{}
+
+func (self *failureSerializer) serializeDetailed(expected, actual interface{}, message string) string {
+ view := FailureView{
+ Message: message,
+ Expected: render.Render(expected),
+ Actual: render.Render(actual),
+ }
+ serialized, err := json.Marshal(view)
+ if err != nil {
+ return message
+ }
+ return string(serialized)
+}
+
+func (self *failureSerializer) serialize(expected, actual interface{}, message string) string {
+ view := FailureView{
+ Message: message,
+ Expected: fmt.Sprintf("%+v", expected),
+ Actual: fmt.Sprintf("%+v", actual),
+ }
+ serialized, err := json.Marshal(view)
+ if err != nil {
+ return message
+ }
+ return string(serialized)
+}
+
+func newSerializer() *failureSerializer {
+ return &failureSerializer{}
+}
+
+///////////////////////////////////////////////////////////////////////////////
+
+// This struct is also declared in github.com/smartystreets/goconvey/convey/reporting.
+// The json struct tags should be equal in both declarations.
+type FailureView struct {
+ Message string `json:"Message"`
+ Expected string `json:"Expected"`
+ Actual string `json:"Actual"`
+}
+
+///////////////////////////////////////////////////////
+
+// noopSerializer just gives back the original message. This is useful when we are using
+// the assertions from a context other than the web UI, that requires the JSON structure
+// provided by the failureSerializer.
+type noopSerializer struct{}
+
+func (self *noopSerializer) serialize(expected, actual interface{}, message string) string {
+ return message
+}
+func (self *noopSerializer) serializeDetailed(expected, actual interface{}, message string) string {
+ return message
+}
diff --git a/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/serializer_test.go b/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/serializer_test.go
new file mode 100644
index 00000000000..597b40ac183
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/serializer_test.go
@@ -0,0 +1,36 @@
+package assertions
+
+import (
+ "encoding/json"
+ "fmt"
+ "testing"
+)
+
+func TestSerializerCreatesSerializedVersionOfAssertionResult(t *testing.T) {
+ thing1 := Thing1{"Hi"}
+ thing2 := Thing2{"Bye"}
+ message := "Super-hip failure message."
+ serializer := newSerializer()
+
+ actualResult := serializer.serialize(thing1, thing2, message)
+
+ expectedResult, _ := json.Marshal(FailureView{
+ Message: message,
+ Expected: fmt.Sprintf("%+v", thing1),
+ Actual: fmt.Sprintf("%+v", thing2),
+ })
+
+ if actualResult != string(expectedResult) {
+ t.Errorf("\nExpected: %s\nActual: %s", string(expectedResult), actualResult)
+ }
+
+ actualResult = serializer.serializeDetailed(thing1, thing2, message)
+ expectedResult, _ = json.Marshal(FailureView{
+ Message: message,
+ Expected: fmt.Sprintf("%#v", thing1),
+ Actual: fmt.Sprintf("%#v", thing2),
+ })
+ if actualResult != string(expectedResult) {
+ t.Errorf("\nExpected: %s\nActual: %s", string(expectedResult), actualResult)
+ }
+}
diff --git a/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/should/should.go b/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/should/should.go
new file mode 100644
index 00000000000..596e43b8f8b
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/should/should.go
@@ -0,0 +1,73 @@
+// package should is simply a rewording of the assertion
+// functions in the assertions package.
+package should
+
+import "github.com/smartystreets/assertions"
+
+var (
+ Equal = assertions.ShouldEqual
+ NotEqual = assertions.ShouldNotEqual
+ AlmostEqual = assertions.ShouldAlmostEqual
+ NotAlmostEqual = assertions.ShouldNotAlmostEqual
+ Resemble = assertions.ShouldResemble
+ NotResemble = assertions.ShouldNotResemble
+ PointTo = assertions.ShouldPointTo
+ NotPointTo = assertions.ShouldNotPointTo
+ BeNil = assertions.ShouldBeNil
+ NotBeNil = assertions.ShouldNotBeNil
+ BeTrue = assertions.ShouldBeTrue
+ BeFalse = assertions.ShouldBeFalse
+ BeZeroValue = assertions.ShouldBeZeroValue
+
+ BeGreaterThan = assertions.ShouldBeGreaterThan
+ BeGreaterThanOrEqualTo = assertions.ShouldBeGreaterThanOrEqualTo
+ BeLessThan = assertions.ShouldBeLessThan
+ BeLessThanOrEqualTo = assertions.ShouldBeLessThanOrEqualTo
+ BeBetween = assertions.ShouldBeBetween
+ NotBeBetween = assertions.ShouldNotBeBetween
+ BeBetweenOrEqual = assertions.ShouldBeBetweenOrEqual
+ NotBeBetweenOrEqual = assertions.ShouldNotBeBetweenOrEqual
+
+ Contain = assertions.ShouldContain
+ NotContain = assertions.ShouldNotContain
+ ContainKey = assertions.ShouldContainKey
+ NotContainKey = assertions.ShouldNotContainKey
+ BeIn = assertions.ShouldBeIn
+ NotBeIn = assertions.ShouldNotBeIn
+ BeEmpty = assertions.ShouldBeEmpty
+ NotBeEmpty = assertions.ShouldNotBeEmpty
+ HaveLength = assertions.ShouldHaveLength
+
+ StartWith = assertions.ShouldStartWith
+ NotStartWith = assertions.ShouldNotStartWith
+ EndWith = assertions.ShouldEndWith
+ NotEndWith = assertions.ShouldNotEndWith
+ BeBlank = assertions.ShouldBeBlank
+ NotBeBlank = assertions.ShouldNotBeBlank
+ ContainSubstring = assertions.ShouldContainSubstring
+ NotContainSubstring = assertions.ShouldNotContainSubstring
+
+ EqualWithout = assertions.ShouldEqualWithout
+ EqualTrimSpace = assertions.ShouldEqualTrimSpace
+
+ Panic = assertions.ShouldPanic
+ NotPanic = assertions.ShouldNotPanic
+ PanicWith = assertions.ShouldPanicWith
+ NotPanicWith = assertions.ShouldNotPanicWith
+
+ HaveSameTypeAs = assertions.ShouldHaveSameTypeAs
+ NotHaveSameTypeAs = assertions.ShouldNotHaveSameTypeAs
+ Implement = assertions.ShouldImplement
+ NotImplement = assertions.ShouldNotImplement
+
+ HappenBefore = assertions.ShouldHappenBefore
+ HappenOnOrBefore = assertions.ShouldHappenOnOrBefore
+ HappenAfter = assertions.ShouldHappenAfter
+ HappenOnOrAfter = assertions.ShouldHappenOnOrAfter
+ HappenBetween = assertions.ShouldHappenBetween
+ HappenOnOrBetween = assertions.ShouldHappenOnOrBetween
+ NotHappenOnOrBetween = assertions.ShouldNotHappenOnOrBetween
+ HappenWithin = assertions.ShouldHappenWithin
+ NotHappenWithin = assertions.ShouldNotHappenWithin
+ BeChronological = assertions.ShouldBeChronological
+)
diff --git a/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/strings.go b/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/strings.go
new file mode 100644
index 00000000000..dbc3f04790e
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/strings.go
@@ -0,0 +1,227 @@
+package assertions
+
+import (
+ "fmt"
+ "reflect"
+ "strings"
+)
+
+// ShouldStartWith receives exactly 2 string parameters and ensures that the first starts with the second.
+func ShouldStartWith(actual interface{}, expected ...interface{}) string {
+ if fail := need(1, expected); fail != success {
+ return fail
+ }
+
+ value, valueIsString := actual.(string)
+ prefix, prefixIsString := expected[0].(string)
+
+ if !valueIsString || !prefixIsString {
+ return fmt.Sprintf(shouldBothBeStrings, reflect.TypeOf(actual), reflect.TypeOf(expected[0]))
+ }
+
+ return shouldStartWith(value, prefix)
+}
+func shouldStartWith(value, prefix string) string {
+ if !strings.HasPrefix(value, prefix) {
+ shortval := value
+ if len(shortval) > len(prefix) {
+ shortval = shortval[:len(prefix)] + "..."
+ }
+ return serializer.serialize(prefix, shortval, fmt.Sprintf(shouldHaveStartedWith, value, prefix))
+ }
+ return success
+}
+
+// ShouldNotStartWith receives exactly 2 string parameters and ensures that the first does not start with the second.
+func ShouldNotStartWith(actual interface{}, expected ...interface{}) string {
+ if fail := need(1, expected); fail != success {
+ return fail
+ }
+
+ value, valueIsString := actual.(string)
+ prefix, prefixIsString := expected[0].(string)
+
+ if !valueIsString || !prefixIsString {
+ return fmt.Sprintf(shouldBothBeStrings, reflect.TypeOf(actual), reflect.TypeOf(expected[0]))
+ }
+
+ return shouldNotStartWith(value, prefix)
+}
+func shouldNotStartWith(value, prefix string) string {
+ if strings.HasPrefix(value, prefix) {
+ if value == "" {
+ value = "<empty>"
+ }
+ if prefix == "" {
+ prefix = "<empty>"
+ }
+ return fmt.Sprintf(shouldNotHaveStartedWith, value, prefix)
+ }
+ return success
+}
+
+// ShouldEndWith receives exactly 2 string parameters and ensures that the first ends with the second.
+func ShouldEndWith(actual interface{}, expected ...interface{}) string {
+ if fail := need(1, expected); fail != success {
+ return fail
+ }
+
+ value, valueIsString := actual.(string)
+ suffix, suffixIsString := expected[0].(string)
+
+ if !valueIsString || !suffixIsString {
+ return fmt.Sprintf(shouldBothBeStrings, reflect.TypeOf(actual), reflect.TypeOf(expected[0]))
+ }
+
+ return shouldEndWith(value, suffix)
+}
+func shouldEndWith(value, suffix string) string {
+ if !strings.HasSuffix(value, suffix) {
+ shortval := value
+ if len(shortval) > len(suffix) {
+ shortval = "..." + shortval[len(shortval)-len(suffix):]
+ }
+ return serializer.serialize(suffix, shortval, fmt.Sprintf(shouldHaveEndedWith, value, suffix))
+ }
+ return success
+}
+
+// ShouldEndWith receives exactly 2 string parameters and ensures that the first does not end with the second.
+func ShouldNotEndWith(actual interface{}, expected ...interface{}) string {
+ if fail := need(1, expected); fail != success {
+ return fail
+ }
+
+ value, valueIsString := actual.(string)
+ suffix, suffixIsString := expected[0].(string)
+
+ if !valueIsString || !suffixIsString {
+ return fmt.Sprintf(shouldBothBeStrings, reflect.TypeOf(actual), reflect.TypeOf(expected[0]))
+ }
+
+ return shouldNotEndWith(value, suffix)
+}
+func shouldNotEndWith(value, suffix string) string {
+ if strings.HasSuffix(value, suffix) {
+ if value == "" {
+ value = "<empty>"
+ }
+ if suffix == "" {
+ suffix = "<empty>"
+ }
+ return fmt.Sprintf(shouldNotHaveEndedWith, value, suffix)
+ }
+ return success
+}
+
+// ShouldContainSubstring receives exactly 2 string parameters and ensures that the first contains the second as a substring.
+func ShouldContainSubstring(actual interface{}, expected ...interface{}) string {
+ if fail := need(1, expected); fail != success {
+ return fail
+ }
+
+ long, longOk := actual.(string)
+ short, shortOk := expected[0].(string)
+
+ if !longOk || !shortOk {
+ return fmt.Sprintf(shouldBothBeStrings, reflect.TypeOf(actual), reflect.TypeOf(expected[0]))
+ }
+
+ if !strings.Contains(long, short) {
+ return serializer.serialize(expected[0], actual, fmt.Sprintf(shouldHaveContainedSubstring, long, short))
+ }
+ return success
+}
+
+// ShouldNotContainSubstring receives exactly 2 string parameters and ensures that the first does NOT contain the second as a substring.
+func ShouldNotContainSubstring(actual interface{}, expected ...interface{}) string {
+ if fail := need(1, expected); fail != success {
+ return fail
+ }
+
+ long, longOk := actual.(string)
+ short, shortOk := expected[0].(string)
+
+ if !longOk || !shortOk {
+ return fmt.Sprintf(shouldBothBeStrings, reflect.TypeOf(actual), reflect.TypeOf(expected[0]))
+ }
+
+ if strings.Contains(long, short) {
+ return fmt.Sprintf(shouldNotHaveContainedSubstring, long, short)
+ }
+ return success
+}
+
+// ShouldBeBlank receives exactly 1 string parameter and ensures that it is equal to "".
+func ShouldBeBlank(actual interface{}, expected ...interface{}) string {
+ if fail := need(0, expected); fail != success {
+ return fail
+ }
+ value, ok := actual.(string)
+ if !ok {
+ return fmt.Sprintf(shouldBeString, reflect.TypeOf(actual))
+ }
+ if value != "" {
+ return serializer.serialize("", value, fmt.Sprintf(shouldHaveBeenBlank, value))
+ }
+ return success
+}
+
+// ShouldNotBeBlank receives exactly 1 string parameter and ensures that it is equal to "".
+func ShouldNotBeBlank(actual interface{}, expected ...interface{}) string {
+ if fail := need(0, expected); fail != success {
+ return fail
+ }
+ value, ok := actual.(string)
+ if !ok {
+ return fmt.Sprintf(shouldBeString, reflect.TypeOf(actual))
+ }
+ if value == "" {
+ return shouldNotHaveBeenBlank
+ }
+ return success
+}
+
+// ShouldEqualWithout receives exactly 3 string parameters and ensures that the first is equal to the second
+// after removing all instances of the third from the first using strings.Replace(first, third, "", -1).
+func ShouldEqualWithout(actual interface{}, expected ...interface{}) string {
+ if fail := need(2, expected); fail != success {
+ return fail
+ }
+ actualString, ok1 := actual.(string)
+ expectedString, ok2 := expected[0].(string)
+ replace, ok3 := expected[1].(string)
+
+ if !ok1 || !ok2 || !ok3 {
+ return fmt.Sprintf(shouldAllBeStrings, []reflect.Type{
+ reflect.TypeOf(actual),
+ reflect.TypeOf(expected[0]),
+ reflect.TypeOf(expected[1]),
+ })
+ }
+
+ replaced := strings.Replace(actualString, replace, "", -1)
+ if replaced == expectedString {
+ return ""
+ }
+
+ return fmt.Sprintf("Expected '%s' to equal '%s' but without any '%s' (but it didn't).", actualString, expectedString, replace)
+}
+
+// ShouldEqualTrimSpace receives exactly 2 string parameters and ensures that the first is equal to the second
+// after removing all leading and trailing whitespace using strings.TrimSpace(first).
+func ShouldEqualTrimSpace(actual interface{}, expected ...interface{}) string {
+ if fail := need(1, expected); fail != success {
+ return fail
+ }
+
+ actualString, valueIsString := actual.(string)
+ _, value2IsString := expected[0].(string)
+
+ if !valueIsString || !value2IsString {
+ return fmt.Sprintf(shouldBothBeStrings, reflect.TypeOf(actual), reflect.TypeOf(expected[0]))
+ }
+
+ actualString = strings.TrimSpace(actualString)
+ return ShouldEqual(actualString, expected[0])
+}
diff --git a/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/strings_test.go b/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/strings_test.go
new file mode 100644
index 00000000000..ad8d0c88585
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/strings_test.go
@@ -0,0 +1,118 @@
+package assertions
+
+import "testing"
+
+func TestShouldStartWith(t *testing.T) {
+ serializer = newFakeSerializer()
+
+ fail(t, so("", ShouldStartWith), "This assertion requires exactly 1 comparison values (you provided 0).")
+ fail(t, so("", ShouldStartWith, "asdf", "asdf"), "This assertion requires exactly 1 comparison values (you provided 2).")
+
+ pass(t, so("", ShouldStartWith, ""))
+ fail(t, so("", ShouldStartWith, "x"), "x||Expected '' to start with 'x' (but it didn't)!")
+ pass(t, so("abc", ShouldStartWith, "abc"))
+ fail(t, so("abc", ShouldStartWith, "abcd"), "abcd|abc|Expected 'abc' to start with 'abcd' (but it didn't)!")
+
+ pass(t, so("superman", ShouldStartWith, "super"))
+ fail(t, so("superman", ShouldStartWith, "bat"), "bat|sup...|Expected 'superman' to start with 'bat' (but it didn't)!")
+ fail(t, so("superman", ShouldStartWith, "man"), "man|sup...|Expected 'superman' to start with 'man' (but it didn't)!")
+
+ fail(t, so(1, ShouldStartWith, 2), "Both arguments to this assertion must be strings (you provided int and int).")
+}
+
+func TestShouldNotStartWith(t *testing.T) {
+ fail(t, so("", ShouldNotStartWith), "This assertion requires exactly 1 comparison values (you provided 0).")
+ fail(t, so("", ShouldNotStartWith, "asdf", "asdf"), "This assertion requires exactly 1 comparison values (you provided 2).")
+
+ fail(t, so("", ShouldNotStartWith, ""), "Expected '<empty>' NOT to start with '<empty>' (but it did)!")
+ fail(t, so("superman", ShouldNotStartWith, "super"), "Expected 'superman' NOT to start with 'super' (but it did)!")
+ pass(t, so("superman", ShouldNotStartWith, "bat"))
+ pass(t, so("superman", ShouldNotStartWith, "man"))
+
+ fail(t, so(1, ShouldNotStartWith, 2), "Both arguments to this assertion must be strings (you provided int and int).")
+}
+
+func TestShouldEndWith(t *testing.T) {
+ serializer = newFakeSerializer()
+
+ fail(t, so("", ShouldEndWith), "This assertion requires exactly 1 comparison values (you provided 0).")
+ fail(t, so("", ShouldEndWith, "", ""), "This assertion requires exactly 1 comparison values (you provided 2).")
+
+ pass(t, so("", ShouldEndWith, ""))
+ fail(t, so("", ShouldEndWith, "z"), "z||Expected '' to end with 'z' (but it didn't)!")
+ pass(t, so("xyz", ShouldEndWith, "xyz"))
+ fail(t, so("xyz", ShouldEndWith, "wxyz"), "wxyz|xyz|Expected 'xyz' to end with 'wxyz' (but it didn't)!")
+
+ pass(t, so("superman", ShouldEndWith, "man"))
+ fail(t, so("superman", ShouldEndWith, "super"), "super|...erman|Expected 'superman' to end with 'super' (but it didn't)!")
+ fail(t, so("superman", ShouldEndWith, "blah"), "blah|...rman|Expected 'superman' to end with 'blah' (but it didn't)!")
+
+ fail(t, so(1, ShouldEndWith, 2), "Both arguments to this assertion must be strings (you provided int and int).")
+}
+
+func TestShouldNotEndWith(t *testing.T) {
+ fail(t, so("", ShouldNotEndWith), "This assertion requires exactly 1 comparison values (you provided 0).")
+ fail(t, so("", ShouldNotEndWith, "", ""), "This assertion requires exactly 1 comparison values (you provided 2).")
+
+ fail(t, so("", ShouldNotEndWith, ""), "Expected '<empty>' NOT to end with '<empty>' (but it did)!")
+ fail(t, so("superman", ShouldNotEndWith, "man"), "Expected 'superman' NOT to end with 'man' (but it did)!")
+ pass(t, so("superman", ShouldNotEndWith, "super"))
+
+ fail(t, so(1, ShouldNotEndWith, 2), "Both arguments to this assertion must be strings (you provided int and int).")
+}
+
+func TestShouldContainSubstring(t *testing.T) {
+ serializer = newFakeSerializer()
+
+ fail(t, so("asdf", ShouldContainSubstring), "This assertion requires exactly 1 comparison values (you provided 0).")
+ fail(t, so("asdf", ShouldContainSubstring, 1, 2, 3), "This assertion requires exactly 1 comparison values (you provided 3).")
+
+ fail(t, so(123, ShouldContainSubstring, 23), "Both arguments to this assertion must be strings (you provided int and int).")
+
+ pass(t, so("asdf", ShouldContainSubstring, "sd"))
+ fail(t, so("qwer", ShouldContainSubstring, "sd"), "sd|qwer|Expected 'qwer' to contain substring 'sd' (but it didn't)!")
+}
+
+func TestShouldNotContainSubstring(t *testing.T) {
+ fail(t, so("asdf", ShouldNotContainSubstring), "This assertion requires exactly 1 comparison values (you provided 0).")
+ fail(t, so("asdf", ShouldNotContainSubstring, 1, 2, 3), "This assertion requires exactly 1 comparison values (you provided 3).")
+
+ fail(t, so(123, ShouldNotContainSubstring, 23), "Both arguments to this assertion must be strings (you provided int and int).")
+
+ pass(t, so("qwer", ShouldNotContainSubstring, "sd"))
+ fail(t, so("asdf", ShouldNotContainSubstring, "sd"), "Expected 'asdf' NOT to contain substring 'sd' (but it did)!")
+}
+
+func TestShouldBeBlank(t *testing.T) {
+ serializer = newFakeSerializer()
+
+ fail(t, so("", ShouldBeBlank, "adsf"), "This assertion requires exactly 0 comparison values (you provided 1).")
+ fail(t, so(1, ShouldBeBlank), "The argument to this assertion must be a string (you provided int).")
+
+ fail(t, so("asdf", ShouldBeBlank), "|asdf|Expected 'asdf' to be blank (but it wasn't)!")
+ pass(t, so("", ShouldBeBlank))
+}
+
+func TestShouldNotBeBlank(t *testing.T) {
+ fail(t, so("", ShouldNotBeBlank, "adsf"), "This assertion requires exactly 0 comparison values (you provided 1).")
+ fail(t, so(1, ShouldNotBeBlank), "The argument to this assertion must be a string (you provided int).")
+
+ fail(t, so("", ShouldNotBeBlank), "Expected value to NOT be blank (but it was)!")
+ pass(t, so("asdf", ShouldNotBeBlank))
+}
+
+func TestShouldEqualWithout(t *testing.T) {
+ fail(t, so("", ShouldEqualWithout, ""), "This assertion requires exactly 2 comparison values (you provided 1).")
+ fail(t, so(1, ShouldEqualWithout, 2, 3), "All arguments to this assertion must be strings (you provided: [int int int]).")
+
+ fail(t, so("asdf", ShouldEqualWithout, "qwer", "q"), "Expected 'asdf' to equal 'qwer' but without any 'q' (but it didn't).")
+ pass(t, so("asdf", ShouldEqualWithout, "df", "as"))
+}
+
+func TestShouldEqualTrimSpace(t *testing.T) {
+ fail(t, so(" asdf ", ShouldEqualTrimSpace), "This assertion requires exactly 1 comparison values (you provided 0).")
+ fail(t, so(1, ShouldEqualTrimSpace, 2), "Both arguments to this assertion must be strings (you provided int and int).")
+
+ fail(t, so("asdf", ShouldEqualTrimSpace, "qwer"), "qwer|asdf|Expected: 'qwer' Actual: 'asdf' (Should be equal)")
+ pass(t, so(" asdf\t\n", ShouldEqualTrimSpace, "asdf"))
+}
diff --git a/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/time.go b/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/time.go
new file mode 100644
index 00000000000..7e05026143f
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/time.go
@@ -0,0 +1,202 @@
+package assertions
+
+import (
+ "fmt"
+ "time"
+)
+
+// ShouldHappenBefore receives exactly 2 time.Time arguments and asserts that the first happens before the second.
+func ShouldHappenBefore(actual interface{}, expected ...interface{}) string {
+ if fail := need(1, expected); fail != success {
+ return fail
+ }
+ actualTime, firstOk := actual.(time.Time)
+ expectedTime, secondOk := expected[0].(time.Time)
+
+ if !firstOk || !secondOk {
+ return shouldUseTimes
+ }
+
+ if !actualTime.Before(expectedTime) {
+ return fmt.Sprintf(shouldHaveHappenedBefore, actualTime, expectedTime, actualTime.Sub(expectedTime))
+ }
+
+ return success
+}
+
+// ShouldHappenOnOrBefore receives exactly 2 time.Time arguments and asserts that the first happens on or before the second.
+func ShouldHappenOnOrBefore(actual interface{}, expected ...interface{}) string {
+ if fail := need(1, expected); fail != success {
+ return fail
+ }
+ actualTime, firstOk := actual.(time.Time)
+ expectedTime, secondOk := expected[0].(time.Time)
+
+ if !firstOk || !secondOk {
+ return shouldUseTimes
+ }
+
+ if actualTime.Equal(expectedTime) {
+ return success
+ }
+ return ShouldHappenBefore(actualTime, expectedTime)
+}
+
+// ShouldHappenAfter receives exactly 2 time.Time arguments and asserts that the first happens after the second.
+func ShouldHappenAfter(actual interface{}, expected ...interface{}) string {
+ if fail := need(1, expected); fail != success {
+ return fail
+ }
+ actualTime, firstOk := actual.(time.Time)
+ expectedTime, secondOk := expected[0].(time.Time)
+
+ if !firstOk || !secondOk {
+ return shouldUseTimes
+ }
+ if !actualTime.After(expectedTime) {
+ return fmt.Sprintf(shouldHaveHappenedAfter, actualTime, expectedTime, expectedTime.Sub(actualTime))
+ }
+ return success
+}
+
+// ShouldHappenOnOrAfter receives exactly 2 time.Time arguments and asserts that the first happens on or after the second.
+func ShouldHappenOnOrAfter(actual interface{}, expected ...interface{}) string {
+ if fail := need(1, expected); fail != success {
+ return fail
+ }
+ actualTime, firstOk := actual.(time.Time)
+ expectedTime, secondOk := expected[0].(time.Time)
+
+ if !firstOk || !secondOk {
+ return shouldUseTimes
+ }
+ if actualTime.Equal(expectedTime) {
+ return success
+ }
+ return ShouldHappenAfter(actualTime, expectedTime)
+}
+
+// ShouldHappenBetween receives exactly 3 time.Time arguments and asserts that the first happens between (not on) the second and third.
+func ShouldHappenBetween(actual interface{}, expected ...interface{}) string {
+ if fail := need(2, expected); fail != success {
+ return fail
+ }
+ actualTime, firstOk := actual.(time.Time)
+ min, secondOk := expected[0].(time.Time)
+ max, thirdOk := expected[1].(time.Time)
+
+ if !firstOk || !secondOk || !thirdOk {
+ return shouldUseTimes
+ }
+
+ if !actualTime.After(min) {
+ return fmt.Sprintf(shouldHaveHappenedBetween, actualTime, min, max, min.Sub(actualTime))
+ }
+ if !actualTime.Before(max) {
+ return fmt.Sprintf(shouldHaveHappenedBetween, actualTime, min, max, actualTime.Sub(max))
+ }
+ return success
+}
+
+// ShouldHappenOnOrBetween receives exactly 3 time.Time arguments and asserts that the first happens between or on the second and third.
+func ShouldHappenOnOrBetween(actual interface{}, expected ...interface{}) string {
+ if fail := need(2, expected); fail != success {
+ return fail
+ }
+ actualTime, firstOk := actual.(time.Time)
+ min, secondOk := expected[0].(time.Time)
+ max, thirdOk := expected[1].(time.Time)
+
+ if !firstOk || !secondOk || !thirdOk {
+ return shouldUseTimes
+ }
+ if actualTime.Equal(min) || actualTime.Equal(max) {
+ return success
+ }
+ return ShouldHappenBetween(actualTime, min, max)
+}
+
+// ShouldNotHappenOnOrBetween receives exactly 3 time.Time arguments and asserts that the first
+// does NOT happen between or on the second or third.
+func ShouldNotHappenOnOrBetween(actual interface{}, expected ...interface{}) string {
+ if fail := need(2, expected); fail != success {
+ return fail
+ }
+ actualTime, firstOk := actual.(time.Time)
+ min, secondOk := expected[0].(time.Time)
+ max, thirdOk := expected[1].(time.Time)
+
+ if !firstOk || !secondOk || !thirdOk {
+ return shouldUseTimes
+ }
+ if actualTime.Equal(min) || actualTime.Equal(max) {
+ return fmt.Sprintf(shouldNotHaveHappenedOnOrBetween, actualTime, min, max)
+ }
+ if actualTime.After(min) && actualTime.Before(max) {
+ return fmt.Sprintf(shouldNotHaveHappenedOnOrBetween, actualTime, min, max)
+ }
+ return success
+}
+
+// ShouldHappenWithin receives a time.Time, a time.Duration, and a time.Time (3 arguments)
+// and asserts that the first time.Time happens within or on the duration specified relative to
+// the other time.Time.
+func ShouldHappenWithin(actual interface{}, expected ...interface{}) string {
+ if fail := need(2, expected); fail != success {
+ return fail
+ }
+ actualTime, firstOk := actual.(time.Time)
+ tolerance, secondOk := expected[0].(time.Duration)
+ threshold, thirdOk := expected[1].(time.Time)
+
+ if !firstOk || !secondOk || !thirdOk {
+ return shouldUseDurationAndTime
+ }
+
+ min := threshold.Add(-tolerance)
+ max := threshold.Add(tolerance)
+ return ShouldHappenOnOrBetween(actualTime, min, max)
+}
+
+// ShouldNotHappenWithin receives a time.Time, a time.Duration, and a time.Time (3 arguments)
+// and asserts that the first time.Time does NOT happen within or on the duration specified relative to
+// the other time.Time.
+func ShouldNotHappenWithin(actual interface{}, expected ...interface{}) string {
+ if fail := need(2, expected); fail != success {
+ return fail
+ }
+ actualTime, firstOk := actual.(time.Time)
+ tolerance, secondOk := expected[0].(time.Duration)
+ threshold, thirdOk := expected[1].(time.Time)
+
+ if !firstOk || !secondOk || !thirdOk {
+ return shouldUseDurationAndTime
+ }
+
+ min := threshold.Add(-tolerance)
+ max := threshold.Add(tolerance)
+ return ShouldNotHappenOnOrBetween(actualTime, min, max)
+}
+
+// ShouldBeChronological receives a []time.Time slice and asserts that the are
+// in chronological order starting with the first time.Time as the earliest.
+func ShouldBeChronological(actual interface{}, expected ...interface{}) string {
+ if fail := need(0, expected); fail != success {
+ return fail
+ }
+
+ times, ok := actual.([]time.Time)
+ if !ok {
+ return shouldUseTimeSlice
+ }
+
+ var previous time.Time
+ for i, current := range times {
+ if i > 0 && current.Before(previous) {
+ return fmt.Sprintf(shouldHaveBeenChronological,
+ i, i-1, previous.String(), i, current.String())
+ }
+ previous = current
+ }
+ return ""
+}
diff --git a/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/time_test.go b/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/time_test.go
new file mode 100644
index 00000000000..f9dda8f8f34
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/time_test.go
@@ -0,0 +1,159 @@
+package assertions
+
+import (
+ "fmt"
+ "testing"
+ "time"
+)
+
+func TestShouldHappenBefore(t *testing.T) {
+ fail(t, so(0, ShouldHappenBefore), "This assertion requires exactly 1 comparison values (you provided 0).")
+ fail(t, so(0, ShouldHappenBefore, 1, 2, 3), "This assertion requires exactly 1 comparison values (you provided 3).")
+
+ fail(t, so(0, ShouldHappenBefore, 1), shouldUseTimes)
+ fail(t, so(0, ShouldHappenBefore, time.Now()), shouldUseTimes)
+ fail(t, so(time.Now(), ShouldHappenBefore, 0), shouldUseTimes)
+
+ fail(t, so(january3, ShouldHappenBefore, january1), fmt.Sprintf("Expected '%s' to happen before '%s' (it happened '48h0m0s' after)!", pretty(january3), pretty(january1)))
+ fail(t, so(january3, ShouldHappenBefore, january3), fmt.Sprintf("Expected '%s' to happen before '%s' (it happened '0' after)!", pretty(january3), pretty(january3)))
+ pass(t, so(january1, ShouldHappenBefore, january3))
+}
+
+func TestShouldHappenOnOrBefore(t *testing.T) {
+ fail(t, so(0, ShouldHappenOnOrBefore), "This assertion requires exactly 1 comparison values (you provided 0).")
+ fail(t, so(0, ShouldHappenOnOrBefore, 1, 2, 3), "This assertion requires exactly 1 comparison values (you provided 3).")
+
+ fail(t, so(0, ShouldHappenOnOrBefore, 1), shouldUseTimes)
+ fail(t, so(0, ShouldHappenOnOrBefore, time.Now()), shouldUseTimes)
+ fail(t, so(time.Now(), ShouldHappenOnOrBefore, 0), shouldUseTimes)
+
+ fail(t, so(january3, ShouldHappenOnOrBefore, january1), fmt.Sprintf("Expected '%s' to happen before '%s' (it happened '48h0m0s' after)!", pretty(january3), pretty(january1)))
+ pass(t, so(january3, ShouldHappenOnOrBefore, january3))
+ pass(t, so(january1, ShouldHappenOnOrBefore, january3))
+}
+
+func TestShouldHappenAfter(t *testing.T) {
+ fail(t, so(0, ShouldHappenAfter), "This assertion requires exactly 1 comparison values (you provided 0).")
+ fail(t, so(0, ShouldHappenAfter, 1, 2, 3), "This assertion requires exactly 1 comparison values (you provided 3).")
+
+ fail(t, so(0, ShouldHappenAfter, 1), shouldUseTimes)
+ fail(t, so(0, ShouldHappenAfter, time.Now()), shouldUseTimes)
+ fail(t, so(time.Now(), ShouldHappenAfter, 0), shouldUseTimes)
+
+ fail(t, so(january1, ShouldHappenAfter, january2), fmt.Sprintf("Expected '%s' to happen after '%s' (it happened '24h0m0s' before)!", pretty(january1), pretty(january2)))
+ fail(t, so(january1, ShouldHappenAfter, january1), fmt.Sprintf("Expected '%s' to happen after '%s' (it happened '0' before)!", pretty(january1), pretty(january1)))
+ pass(t, so(january3, ShouldHappenAfter, january1))
+}
+
+func TestShouldHappenOnOrAfter(t *testing.T) {
+ fail(t, so(0, ShouldHappenOnOrAfter), "This assertion requires exactly 1 comparison values (you provided 0).")
+ fail(t, so(0, ShouldHappenOnOrAfter, 1, 2, 3), "This assertion requires exactly 1 comparison values (you provided 3).")
+
+ fail(t, so(0, ShouldHappenOnOrAfter, 1), shouldUseTimes)
+ fail(t, so(0, ShouldHappenOnOrAfter, time.Now()), shouldUseTimes)
+ fail(t, so(time.Now(), ShouldHappenOnOrAfter, 0), shouldUseTimes)
+
+ fail(t, so(january1, ShouldHappenOnOrAfter, january2), fmt.Sprintf("Expected '%s' to happen after '%s' (it happened '24h0m0s' before)!", pretty(january1), pretty(january2)))
+ pass(t, so(january1, ShouldHappenOnOrAfter, january1))
+ pass(t, so(january3, ShouldHappenOnOrAfter, january1))
+}
+
+func TestShouldHappenBetween(t *testing.T) {
+ fail(t, so(0, ShouldHappenBetween), "This assertion requires exactly 2 comparison values (you provided 0).")
+ fail(t, so(0, ShouldHappenBetween, 1, 2, 3), "This assertion requires exactly 2 comparison values (you provided 3).")
+
+ fail(t, so(0, ShouldHappenBetween, 1, 2), shouldUseTimes)
+ fail(t, so(0, ShouldHappenBetween, time.Now(), time.Now()), shouldUseTimes)
+ fail(t, so(time.Now(), ShouldHappenBetween, 0, time.Now()), shouldUseTimes)
+ fail(t, so(time.Now(), ShouldHappenBetween, time.Now(), 9), shouldUseTimes)
+
+ fail(t, so(january1, ShouldHappenBetween, january2, january4), fmt.Sprintf("Expected '%s' to happen between '%s' and '%s' (it happened '24h0m0s' outside threshold)!", pretty(january1), pretty(january2), pretty(january4)))
+ fail(t, so(january2, ShouldHappenBetween, january2, january4), fmt.Sprintf("Expected '%s' to happen between '%s' and '%s' (it happened '0' outside threshold)!", pretty(january2), pretty(january2), pretty(january4)))
+ pass(t, so(january3, ShouldHappenBetween, january2, january4))
+ fail(t, so(january4, ShouldHappenBetween, january2, january4), fmt.Sprintf("Expected '%s' to happen between '%s' and '%s' (it happened '0' outside threshold)!", pretty(january4), pretty(january2), pretty(january4)))
+ fail(t, so(january5, ShouldHappenBetween, january2, january4), fmt.Sprintf("Expected '%s' to happen between '%s' and '%s' (it happened '24h0m0s' outside threshold)!", pretty(january5), pretty(january2), pretty(january4)))
+}
+
+func TestShouldHappenOnOrBetween(t *testing.T) {
+ fail(t, so(0, ShouldHappenOnOrBetween), "This assertion requires exactly 2 comparison values (you provided 0).")
+ fail(t, so(0, ShouldHappenOnOrBetween, 1, 2, 3), "This assertion requires exactly 2 comparison values (you provided 3).")
+
+ fail(t, so(0, ShouldHappenOnOrBetween, 1, time.Now()), shouldUseTimes)
+ fail(t, so(0, ShouldHappenOnOrBetween, time.Now(), 1), shouldUseTimes)
+ fail(t, so(time.Now(), ShouldHappenOnOrBetween, 0, 1), shouldUseTimes)
+
+ fail(t, so(january1, ShouldHappenOnOrBetween, january2, january4), fmt.Sprintf("Expected '%s' to happen between '%s' and '%s' (it happened '24h0m0s' outside threshold)!", pretty(january1), pretty(january2), pretty(january4)))
+ pass(t, so(january2, ShouldHappenOnOrBetween, january2, january4))
+ pass(t, so(january3, ShouldHappenOnOrBetween, january2, january4))
+ pass(t, so(january4, ShouldHappenOnOrBetween, january2, january4))
+ fail(t, so(january5, ShouldHappenOnOrBetween, january2, january4), fmt.Sprintf("Expected '%s' to happen between '%s' and '%s' (it happened '24h0m0s' outside threshold)!", pretty(january5), pretty(january2), pretty(january4)))
+}
+
+func TestShouldNotHappenOnOrBetween(t *testing.T) {
+ fail(t, so(0, ShouldNotHappenOnOrBetween), "This assertion requires exactly 2 comparison values (you provided 0).")
+ fail(t, so(0, ShouldNotHappenOnOrBetween, 1, 2, 3), "This assertion requires exactly 2 comparison values (you provided 3).")
+
+ fail(t, so(0, ShouldNotHappenOnOrBetween, 1, time.Now()), shouldUseTimes)
+ fail(t, so(0, ShouldNotHappenOnOrBetween, time.Now(), 1), shouldUseTimes)
+ fail(t, so(time.Now(), ShouldNotHappenOnOrBetween, 0, 1), shouldUseTimes)
+
+ pass(t, so(january1, ShouldNotHappenOnOrBetween, january2, january4))
+ fail(t, so(january2, ShouldNotHappenOnOrBetween, january2, january4), fmt.Sprintf("Expected '%s' to NOT happen on or between '%s' and '%s' (but it did)!", pretty(january2), pretty(january2), pretty(january4)))
+ fail(t, so(january3, ShouldNotHappenOnOrBetween, january2, january4), fmt.Sprintf("Expected '%s' to NOT happen on or between '%s' and '%s' (but it did)!", pretty(january3), pretty(january2), pretty(january4)))
+ fail(t, so(january4, ShouldNotHappenOnOrBetween, january2, january4), fmt.Sprintf("Expected '%s' to NOT happen on or between '%s' and '%s' (but it did)!", pretty(january4), pretty(january2), pretty(january4)))
+ pass(t, so(january5, ShouldNotHappenOnOrBetween, january2, january4))
+}
+
+func TestShouldHappenWithin(t *testing.T) {
+ fail(t, so(0, ShouldHappenWithin), "This assertion requires exactly 2 comparison values (you provided 0).")
+ fail(t, so(0, ShouldHappenWithin, 1, 2, 3), "This assertion requires exactly 2 comparison values (you provided 3).")
+
+ fail(t, so(0, ShouldHappenWithin, 1, 2), shouldUseDurationAndTime)
+ fail(t, so(0, ShouldHappenWithin, oneDay, time.Now()), shouldUseDurationAndTime)
+ fail(t, so(time.Now(), ShouldHappenWithin, 0, time.Now()), shouldUseDurationAndTime)
+
+ fail(t, so(january1, ShouldHappenWithin, oneDay, january3), fmt.Sprintf("Expected '%s' to happen between '%s' and '%s' (it happened '24h0m0s' outside threshold)!", pretty(january1), pretty(january2), pretty(january4)))
+ pass(t, so(january2, ShouldHappenWithin, oneDay, january3))
+ pass(t, so(january3, ShouldHappenWithin, oneDay, january3))
+ pass(t, so(january4, ShouldHappenWithin, oneDay, january3))
+ fail(t, so(january5, ShouldHappenWithin, oneDay, january3), fmt.Sprintf("Expected '%s' to happen between '%s' and '%s' (it happened '24h0m0s' outside threshold)!", pretty(january5), pretty(january2), pretty(january4)))
+}
+
+func TestShouldNotHappenWithin(t *testing.T) {
+ fail(t, so(0, ShouldNotHappenWithin), "This assertion requires exactly 2 comparison values (you provided 0).")
+ fail(t, so(0, ShouldNotHappenWithin, 1, 2, 3), "This assertion requires exactly 2 comparison values (you provided 3).")
+
+ fail(t, so(0, ShouldNotHappenWithin, 1, 2), shouldUseDurationAndTime)
+ fail(t, so(0, ShouldNotHappenWithin, oneDay, time.Now()), shouldUseDurationAndTime)
+ fail(t, so(time.Now(), ShouldNotHappenWithin, 0, time.Now()), shouldUseDurationAndTime)
+
+ pass(t, so(january1, ShouldNotHappenWithin, oneDay, january3))
+ fail(t, so(january2, ShouldNotHappenWithin, oneDay, january3), fmt.Sprintf("Expected '%s' to NOT happen on or between '%s' and '%s' (but it did)!", pretty(january2), pretty(january2), pretty(january4)))
+ fail(t, so(january3, ShouldNotHappenWithin, oneDay, january3), fmt.Sprintf("Expected '%s' to NOT happen on or between '%s' and '%s' (but it did)!", pretty(january3), pretty(january2), pretty(january4)))
+ fail(t, so(january4, ShouldNotHappenWithin, oneDay, january3), fmt.Sprintf("Expected '%s' to NOT happen on or between '%s' and '%s' (but it did)!", pretty(january4), pretty(january2), pretty(january4)))
+ pass(t, so(january5, ShouldNotHappenWithin, oneDay, january3))
+}
+
+func TestShouldBeChronological(t *testing.T) {
+ fail(t, so(0, ShouldBeChronological, 1, 2, 3), "This assertion requires exactly 0 comparison values (you provided 3).")
+ fail(t, so(0, ShouldBeChronological), shouldUseTimeSlice)
+ fail(t, so([]time.Time{january5, january1}, ShouldBeChronological),
+ "The 'Time' at index [1] should have happened after the previous one (but it didn't!):\n [0]: 2013-01-05 00:00:00 +0000 UTC\n [1]: 2013-01-01 00:00:00 +0000 UTC (see, it happened before!)")
+
+ pass(t, so([]time.Time{january1, january2, january3, january4, january5}, ShouldBeChronological))
+}
+
+const layout = "2006-01-02 15:04"
+
+var january1, _ = time.Parse(layout, "2013-01-01 00:00")
+var january2, _ = time.Parse(layout, "2013-01-02 00:00")
+var january3, _ = time.Parse(layout, "2013-01-03 00:00")
+var january4, _ = time.Parse(layout, "2013-01-04 00:00")
+var january5, _ = time.Parse(layout, "2013-01-05 00:00")
+
+var oneDay, _ = time.ParseDuration("24h0m0s")
+var twoDays, _ = time.ParseDuration("48h0m0s")
+
+func pretty(t time.Time) string {
+ return fmt.Sprintf("%v", t)
+}
diff --git a/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/type.go b/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/type.go
new file mode 100644
index 00000000000..3fc00f68cd0
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/type.go
@@ -0,0 +1,112 @@
+package assertions
+
+import (
+ "fmt"
+ "reflect"
+)
+
+// ShouldHaveSameTypeAs receives exactly two parameters and compares their underlying types for equality.
+func ShouldHaveSameTypeAs(actual interface{}, expected ...interface{}) string {
+ if fail := need(1, expected); fail != success {
+ return fail
+ }
+
+ first := reflect.TypeOf(actual)
+ second := reflect.TypeOf(expected[0])
+
+ if equal := ShouldEqual(first, second); equal != success {
+ return serializer.serialize(second, first, fmt.Sprintf(shouldHaveBeenA, actual, second, first))
+ }
+ return success
+}
+
+// ShouldNotHaveSameTypeAs receives exactly two parameters and compares their underlying types for inequality.
+func ShouldNotHaveSameTypeAs(actual interface{}, expected ...interface{}) string {
+ if fail := need(1, expected); fail != success {
+ return fail
+ }
+
+ first := reflect.TypeOf(actual)
+ second := reflect.TypeOf(expected[0])
+
+ if equal := ShouldEqual(first, second); equal == success {
+ return fmt.Sprintf(shouldNotHaveBeenA, actual, second)
+ }
+ return success
+}
+
+// ShouldImplement receives exactly two parameters and ensures
+// that the first implements the interface type of the second.
+func ShouldImplement(actual interface{}, expectedList ...interface{}) string {
+ if fail := need(1, expectedList); fail != success {
+ return fail
+ }
+
+ expected := expectedList[0]
+ if fail := ShouldBeNil(expected); fail != success {
+ return shouldCompareWithInterfacePointer
+ }
+
+ if fail := ShouldNotBeNil(actual); fail != success {
+ return shouldNotBeNilActual
+ }
+
+ var actualType reflect.Type
+ if reflect.TypeOf(actual).Kind() != reflect.Ptr {
+ actualType = reflect.PtrTo(reflect.TypeOf(actual))
+ } else {
+ actualType = reflect.TypeOf(actual)
+ }
+
+ expectedType := reflect.TypeOf(expected)
+ if fail := ShouldNotBeNil(expectedType); fail != success {
+ return shouldCompareWithInterfacePointer
+ }
+
+ expectedInterface := expectedType.Elem()
+
+ if actualType == nil {
+ return fmt.Sprintf(shouldHaveImplemented, expectedInterface, actual)
+ }
+
+ if !actualType.Implements(expectedInterface) {
+ return fmt.Sprintf(shouldHaveImplemented, expectedInterface, actualType)
+ }
+ return success
+}
+
+// ShouldNotImplement receives exactly two parameters and ensures
+// that the first does NOT implement the interface type of the second.
+func ShouldNotImplement(actual interface{}, expectedList ...interface{}) string {
+ if fail := need(1, expectedList); fail != success {
+ return fail
+ }
+
+ expected := expectedList[0]
+ if fail := ShouldBeNil(expected); fail != success {
+ return shouldCompareWithInterfacePointer
+ }
+
+ if fail := ShouldNotBeNil(actual); fail != success {
+ return shouldNotBeNilActual
+ }
+
+ var actualType reflect.Type
+ if reflect.TypeOf(actual).Kind() != reflect.Ptr {
+ actualType = reflect.PtrTo(reflect.TypeOf(actual))
+ } else {
+ actualType = reflect.TypeOf(actual)
+ }
+
+ expectedType := reflect.TypeOf(expected)
+ if fail := ShouldNotBeNil(expectedType); fail != success {
+ return shouldCompareWithInterfacePointer
+ }
+
+ expectedInterface := expectedType.Elem()
+
+ if actualType.Implements(expectedInterface) {
+ return fmt.Sprintf(shouldNotHaveImplemented, actualType, expectedInterface)
+ }
+ return success
+}
diff --git a/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/type_test.go b/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/type_test.go
new file mode 100644
index 00000000000..4b8d1984670
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/type_test.go
@@ -0,0 +1,76 @@
+package assertions
+
+import (
+ "bytes"
+ "io"
+ "net/http"
+ "testing"
+)
+
+func TestShouldHaveSameTypeAs(t *testing.T) {
+ serializer = newFakeSerializer()
+
+ fail(t, so(1, ShouldHaveSameTypeAs), "This assertion requires exactly 1 comparison values (you provided 0).")
+ fail(t, so(1, ShouldHaveSameTypeAs, 1, 2, 3), "This assertion requires exactly 1 comparison values (you provided 3).")
+
+ fail(t, so(nil, ShouldHaveSameTypeAs, 0), "int|<nil>|Expected '<nil>' to be: 'int' (but was: '<nil>')!")
+ fail(t, so(1, ShouldHaveSameTypeAs, "asdf"), "string|int|Expected '1' to be: 'string' (but was: 'int')!")
+
+ pass(t, so(1, ShouldHaveSameTypeAs, 0))
+ pass(t, so(nil, ShouldHaveSameTypeAs, nil))
+}
+
+func TestShouldNotHaveSameTypeAs(t *testing.T) {
+ fail(t, so(1, ShouldNotHaveSameTypeAs), "This assertion requires exactly 1 comparison values (you provided 0).")
+ fail(t, so(1, ShouldNotHaveSameTypeAs, 1, 2, 3), "This assertion requires exactly 1 comparison values (you provided 3).")
+
+ fail(t, so(1, ShouldNotHaveSameTypeAs, 0), "Expected '1' to NOT be: 'int' (but it was)!")
+ fail(t, so(nil, ShouldNotHaveSameTypeAs, nil), "Expected '<nil>' to NOT be: '<nil>' (but it was)!")
+
+ pass(t, so(nil, ShouldNotHaveSameTypeAs, 0))
+ pass(t, so(1, ShouldNotHaveSameTypeAs, "asdf"))
+}
+
+func TestShouldImplement(t *testing.T) {
+ var ioReader *io.Reader = nil
+ var response http.Response = http.Response{}
+ var responsePtr *http.Response = new(http.Response)
+ var reader = bytes.NewBufferString("")
+
+ fail(t, so(reader, ShouldImplement), "This assertion requires exactly 1 comparison values (you provided 0).")
+ fail(t, so(reader, ShouldImplement, ioReader, ioReader), "This assertion requires exactly 1 comparison values (you provided 2).")
+ fail(t, so(reader, ShouldImplement, ioReader, ioReader, ioReader), "This assertion requires exactly 1 comparison values (you provided 3).")
+
+ fail(t, so(reader, ShouldImplement, "foo"), shouldCompareWithInterfacePointer)
+ fail(t, so(reader, ShouldImplement, 1), shouldCompareWithInterfacePointer)
+ fail(t, so(reader, ShouldImplement, nil), shouldCompareWithInterfacePointer)
+
+ fail(t, so(nil, ShouldImplement, ioReader), shouldNotBeNilActual)
+ fail(t, so(1, ShouldImplement, ioReader), "Expected: 'io.Reader interface support'\nActual: '*int' does not implement the interface!")
+
+ fail(t, so(response, ShouldImplement, ioReader), "Expected: 'io.Reader interface support'\nActual: '*http.Response' does not implement the interface!")
+ fail(t, so(responsePtr, ShouldImplement, ioReader), "Expected: 'io.Reader interface support'\nActual: '*http.Response' does not implement the interface!")
+ pass(t, so(reader, ShouldImplement, ioReader))
+ pass(t, so(reader, ShouldImplement, (*io.Reader)(nil)))
+}
+
+func TestShouldNotImplement(t *testing.T) {
+ var ioReader *io.Reader = nil
+ var response http.Response = http.Response{}
+ var responsePtr *http.Response = new(http.Response)
+ var reader io.Reader = bytes.NewBufferString("")
+
+ fail(t, so(reader, ShouldNotImplement), "This assertion requires exactly 1 comparison values (you provided 0).")
+ fail(t, so(reader, ShouldNotImplement, ioReader, ioReader), "This assertion requires exactly 1 comparison values (you provided 2).")
+ fail(t, so(reader, ShouldNotImplement, ioReader, ioReader, ioReader), "This assertion requires exactly 1 comparison values (you provided 3).")
+
+ fail(t, so(reader, ShouldNotImplement, "foo"), shouldCompareWithInterfacePointer)
+ fail(t, so(reader, ShouldNotImplement, 1), shouldCompareWithInterfacePointer)
+ fail(t, so(reader, ShouldNotImplement, nil), shouldCompareWithInterfacePointer)
+
+ fail(t, so(reader, ShouldNotImplement, ioReader), "Expected '*bytes.Buffer'\nto NOT implement 'io.Reader' (but it did)!")
+ fail(t, so(nil, ShouldNotImplement, ioReader), shouldNotBeNilActual)
+ pass(t, so(1, ShouldNotImplement, ioReader))
+ pass(t, so(response, ShouldNotImplement, ioReader))
+ pass(t, so(responsePtr, ShouldNotImplement, ioReader))
+}
diff --git a/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/utilities_for_test.go b/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/utilities_for_test.go
new file mode 100644
index 00000000000..7243ebcb937
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/smartystreets/assertions/utilities_for_test.go
@@ -0,0 +1,75 @@
+package assertions
+
+import (
+ "fmt"
+ "path"
+ "runtime"
+ "strings"
+ "testing"
+)
+
+func pass(t *testing.T, result string) {
+ if result != success {
+ _, file, line, _ := runtime.Caller(1)
+ base := path.Base(file)
+ t.Errorf("Expectation should have passed but failed (see %s: line %d): '%s'", base, line, result)
+ }
+}
+
+func fail(t *testing.T, actual string, expected string) {
+ actual = format(actual)
+ expected = format(expected)
+
+ if actual != expected {
+ if actual == "" {
+ actual = "(empty)"
+ }
+ _, file, line, _ := runtime.Caller(1)
+ base := path.Base(file)
+ t.Errorf("Expectation should have failed but passed (see %s: line %d). \nExpected: %s\nActual: %s\n",
+ base, line, expected, actual)
+ }
+}
+func format(message string) string {
+ message = strings.Replace(message, "\n", " ", -1)
+ for strings.Contains(message, " ") {
+ message = strings.Replace(message, " ", " ", -1)
+ }
+ return message
+}
+
+type Thing1 struct {
+ a string
+}
+type Thing2 struct {
+ a string
+}
+
+type Thinger interface {
+ Hi()
+}
+
+type Thing struct{}
+
+func (self *Thing) Hi() {}
+
+type IntAlias int
+type StringAlias string
+type StringSliceAlias []string
+type StringStringMapAlias map[string]string
+
+/******** FakeSerialzier ********/
+
+type fakeSerializer struct{}
+
+func (self *fakeSerializer) serialize(expected, actual interface{}, message string) string {
+ return fmt.Sprintf("%v|%v|%s", expected, actual, message)
+}
+
+func (self *fakeSerializer) serializeDetailed(expected, actual interface{}, message string) string {
+ return fmt.Sprintf("%v|%v|%s", expected, actual, message)
+}
+
+func newFakeSerializer() *fakeSerializer {
+ return new(fakeSerializer)
+}
diff --git a/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/.gitignore b/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/.gitignore
new file mode 100644
index 00000000000..c9205c5335a
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/.gitignore
@@ -0,0 +1,5 @@
+.DS_Store
+Thumbs.db
+examples/output.json
+web/client/reports/
+/.idea \ No newline at end of file
diff --git a/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/.travis.yml b/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/.travis.yml
new file mode 100644
index 00000000000..a5124b0491b
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/.travis.yml
@@ -0,0 +1,14 @@
+language: go
+
+go:
+ - 1.2
+ - 1.3
+ - 1.4
+ - 1.5
+
+install:
+ - go get -t ./...
+
+script: go test -short -v ./...
+
+sudo: false
diff --git a/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/CONTRIBUTING.md b/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/CONTRIBUTING.md
new file mode 100644
index 00000000000..9c9053b83ad
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/CONTRIBUTING.md
@@ -0,0 +1,22 @@
+# Subject: GoConvey maintainers wanted
+
+We'd like to open the project up to additional maintainers who want to move the project forward in a meaningful way.
+
+We've spent significant time at SmartyStreets building GoConvey and it has perfectly met (and exceeded) all of our initial design specifications. We've used it to great effect. Being so well-matched to our development workflows at SmartyStreets, we haven't had a need to hack on it lately. This had been frustrating to many in the community who have ideas for the project and would like to see new features released (and some old bugs fixed). The release of Go 1.5 and the new vendoring experiment has been a source of confusion and hassle for those who have already upgraded and find that GoConvey needs to be brought up to speed.
+
+Comment below if you're interested. Preference will be given to those that have already contributed to the project. Checkout the issues listing if you need some ideas for contributing.
+
+GoConvey is a popular 2-pronged, open-source github project (1,600+ stargazers, 100+ forks):
+
+- A package you import in your test code that allows you to write BDD-style tests.
+- An executable that runs a local web server which displays auto-updating test results in a web browser.
+
+----
+
+- http://goconvey.co/
+- https://github.com/smartystreets/goconvey
+- https://github.com/smartystreets/goconvey/wiki
+
+_I should mention that the [assertions package](https://github.com/smartystreets/assertions) imported by the convey package is used by other projects at SmartyStreets and so we will be continuing to maintain that project internally._
+
+We hope to hear from you soon. Thanks!
diff --git a/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/LICENSE.md b/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/LICENSE.md
new file mode 100644
index 00000000000..5bc993c93c9
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/LICENSE.md
@@ -0,0 +1,23 @@
+Copyright (c) 2014 SmartyStreets, LLC
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all
+copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+SOFTWARE.
+
+NOTE: Various optional and subordinate components carry their own licensing
+requirements and restrictions. Use of those components is subject to the terms
+and conditions outlined the respective license of each component.
diff --git a/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/README.md b/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/README.md
new file mode 100644
index 00000000000..a07ce5a6eb2
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/README.md
@@ -0,0 +1,126 @@
+GoConvey is awesome Go testing
+==============================
+
+[![Build Status](https://travis-ci.org/smartystreets/goconvey.png)](https://travis-ci.org/smartystreets/goconvey)
+[![GoDoc](https://godoc.org/github.com/smartystreets/goconvey?status.svg)](http://godoc.org/github.com/smartystreets/goconvey)
+
+
+Welcome to GoConvey, a yummy Go testing tool for gophers. Works with `go test`. Use it in the terminal or browser according to your viewing pleasure. **[View full feature tour.](http://goconvey.co)**
+
+**Features:**
+
+- Directly integrates with `go test`
+- Fully-automatic web UI (works with native Go tests, too)
+- Huge suite of regression tests
+- Shows test coverage (Go 1.2+)
+- Readable, colorized console output (understandable by any manager, IT or not)
+- Test code generator
+- Desktop notifications (optional)
+- Immediately open problem lines in [Sublime Text](http://www.sublimetext.com) ([some assembly required](https://github.com/asuth/subl-handler))
+
+
+You can ask questions about how to use GoConvey on [StackOverflow](http://stackoverflow.com/questions/ask?tags=goconvey,go&title=GoConvey%3A%20). Use the tags `go` and `goconvey`.
+
+**Menu:**
+
+- [Installation](#installation)
+- [Quick start](#quick-start)
+- [Documentation](#documentation)
+- [Screenshots](#screenshots)
+- [Contributors](#contributors-thanks)
+
+
+
+
+Installation
+------------
+
+ $ go get github.com/smartystreets/goconvey
+
+[Quick start](https://github.com/smartystreets/goconvey/wiki#get-going-in-25-seconds)
+-----------
+
+Make a test, for example:
+
+```go
+package package_name
+
+import (
+ "testing"
+ . "github.com/smartystreets/goconvey/convey"
+)
+
+func TestSpec(t *testing.T) {
+
+ // Only pass t into top-level Convey calls
+ Convey("Given some integer with a starting value", t, func() {
+ x := 1
+
+ Convey("When the integer is incremented", func() {
+ x++
+
+ Convey("The value should be greater by one", func() {
+ So(x, ShouldEqual, 2)
+ })
+ })
+ })
+}
+```
+
+
+#### [In the browser](https://github.com/smartystreets/goconvey/wiki/Web-UI)
+
+Start up the GoConvey web server at your project's path:
+
+ $ $GOPATH/bin/goconvey
+
+Then watch the test results display in your browser at:
+
+ http://localhost:8080
+
+
+If the browser doesn't open automatically, please click [http://localhost:8080](http://localhost:8080) to open manually.
+
+There you have it.
+![](http://d79i1fxsrar4t.cloudfront.net/goconvey.co/gc-1-dark.png)
+As long as GoConvey is running, test results will automatically update in your browser window.
+
+![](http://d79i1fxsrar4t.cloudfront.net/goconvey.co/gc-5-dark.png)
+The design is responsive, so you can squish the browser real tight if you need to put it beside your code.
+
+
+The [web UI](https://github.com/smartystreets/goconvey/wiki/Web-UI) supports traditional Go tests, so use it even if you're not using GoConvey tests.
+
+
+
+#### [In the terminal](https://github.com/smartystreets/goconvey/wiki/Execution)
+
+Just do what you do best:
+
+ $ go test
+
+Or if you want the output to include the story:
+
+ $ go test -v
+
+
+[Documentation](https://github.com/smartystreets/goconvey/wiki)
+
+-----------
+
+Check out the
+
+- [GoConvey wiki](https://github.com/smartystreets/goconvey/wiki),
+- [![GoDoc](https://godoc.org/github.com/smartystreets/goconvey?status.png)](http://godoc.org/github.com/smartystreets/goconvey)
+- and the *_test.go files scattered throughout this project.
+
+[Screenshots](http://goconvey.co)
+
+-----------
+
+For web UI and terminal screenshots, check out [the full feature tour](http://goconvey.co).
+
+
+----------------------
+
+GoConvey is brought to you by [SmartyStreets](https://github.com/smartystreets) and [several contributors](https://github.com/smartystreets/goconvey/graphs/contributors) (Thanks!).
diff --git a/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/convey/assertions.go b/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/convey/assertions.go
new file mode 100644
index 00000000000..1e87b826dff
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/convey/assertions.go
@@ -0,0 +1,68 @@
+package convey
+
+import "github.com/smartystreets/assertions"
+
+var (
+ ShouldEqual = assertions.ShouldEqual
+ ShouldNotEqual = assertions.ShouldNotEqual
+ ShouldAlmostEqual = assertions.ShouldAlmostEqual
+ ShouldNotAlmostEqual = assertions.ShouldNotAlmostEqual
+ ShouldResemble = assertions.ShouldResemble
+ ShouldNotResemble = assertions.ShouldNotResemble
+ ShouldPointTo = assertions.ShouldPointTo
+ ShouldNotPointTo = assertions.ShouldNotPointTo
+ ShouldBeNil = assertions.ShouldBeNil
+ ShouldNotBeNil = assertions.ShouldNotBeNil
+ ShouldBeTrue = assertions.ShouldBeTrue
+ ShouldBeFalse = assertions.ShouldBeFalse
+ ShouldBeZeroValue = assertions.ShouldBeZeroValue
+
+ ShouldBeGreaterThan = assertions.ShouldBeGreaterThan
+ ShouldBeGreaterThanOrEqualTo = assertions.ShouldBeGreaterThanOrEqualTo
+ ShouldBeLessThan = assertions.ShouldBeLessThan
+ ShouldBeLessThanOrEqualTo = assertions.ShouldBeLessThanOrEqualTo
+ ShouldBeBetween = assertions.ShouldBeBetween
+ ShouldNotBeBetween = assertions.ShouldNotBeBetween
+ ShouldBeBetweenOrEqual = assertions.ShouldBeBetweenOrEqual
+ ShouldNotBeBetweenOrEqual = assertions.ShouldNotBeBetweenOrEqual
+
+ ShouldContain = assertions.ShouldContain
+ ShouldNotContain = assertions.ShouldNotContain
+ ShouldContainKey = assertions.ShouldContainKey
+ ShouldNotContainKey = assertions.ShouldNotContainKey
+ ShouldBeIn = assertions.ShouldBeIn
+ ShouldNotBeIn = assertions.ShouldNotBeIn
+ ShouldBeEmpty = assertions.ShouldBeEmpty
+ ShouldNotBeEmpty = assertions.ShouldNotBeEmpty
+ ShouldHaveLength = assertions.ShouldHaveLength
+
+ ShouldStartWith = assertions.ShouldStartWith
+ ShouldNotStartWith = assertions.ShouldNotStartWith
+ ShouldEndWith = assertions.ShouldEndWith
+ ShouldNotEndWith = assertions.ShouldNotEndWith
+ ShouldBeBlank = assertions.ShouldBeBlank
+ ShouldNotBeBlank = assertions.ShouldNotBeBlank
+ ShouldContainSubstring = assertions.ShouldContainSubstring
+ ShouldNotContainSubstring = assertions.ShouldNotContainSubstring
+
+ ShouldPanic = assertions.ShouldPanic
+ ShouldNotPanic = assertions.ShouldNotPanic
+ ShouldPanicWith = assertions.ShouldPanicWith
+ ShouldNotPanicWith = assertions.ShouldNotPanicWith
+
+ ShouldHaveSameTypeAs = assertions.ShouldHaveSameTypeAs
+ ShouldNotHaveSameTypeAs = assertions.ShouldNotHaveSameTypeAs
+ ShouldImplement = assertions.ShouldImplement
+ ShouldNotImplement = assertions.ShouldNotImplement
+
+ ShouldHappenBefore = assertions.ShouldHappenBefore
+ ShouldHappenOnOrBefore = assertions.ShouldHappenOnOrBefore
+ ShouldHappenAfter = assertions.ShouldHappenAfter
+ ShouldHappenOnOrAfter = assertions.ShouldHappenOnOrAfter
+ ShouldHappenBetween = assertions.ShouldHappenBetween
+ ShouldHappenOnOrBetween = assertions.ShouldHappenOnOrBetween
+ ShouldNotHappenOnOrBetween = assertions.ShouldNotHappenOnOrBetween
+ ShouldHappenWithin = assertions.ShouldHappenWithin
+ ShouldNotHappenWithin = assertions.ShouldNotHappenWithin
+ ShouldBeChronological = assertions.ShouldBeChronological
+)
diff --git a/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/convey/context.go b/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/convey/context.go
new file mode 100644
index 00000000000..2c75c2d7b1b
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/convey/context.go
@@ -0,0 +1,272 @@
+package convey
+
+import (
+ "fmt"
+
+ "github.com/jtolds/gls"
+ "github.com/smartystreets/goconvey/convey/reporting"
+)
+
+type conveyErr struct {
+ fmt string
+ params []interface{}
+}
+
+func (e *conveyErr) Error() string {
+ return fmt.Sprintf(e.fmt, e.params...)
+}
+
+func conveyPanic(fmt string, params ...interface{}) {
+ panic(&conveyErr{fmt, params})
+}
+
+const (
+ missingGoTest = `Top-level calls to Convey(...) need a reference to the *testing.T.
+ Hint: Convey("description here", t, func() { /* notice that the second argument was the *testing.T (t)! */ }) `
+ extraGoTest = `Only the top-level call to Convey(...) needs a reference to the *testing.T.`
+ noStackContext = "Convey operation made without context on goroutine stack.\n" +
+ "Hint: Perhaps you meant to use `Convey(..., func(c C){...})` ?"
+ differentConveySituations = "Different set of Convey statements on subsequent pass!\nDid not expect %#v."
+ multipleIdenticalConvey = "Multiple convey suites with identical names: %#v"
+)
+
+const (
+ failureHalt = "___FAILURE_HALT___"
+
+ nodeKey = "node"
+)
+
+///////////////////////////////// Stack Context /////////////////////////////////
+
+func getCurrentContext() *context {
+ ctx, ok := ctxMgr.GetValue(nodeKey)
+ if ok {
+ return ctx.(*context)
+ }
+ return nil
+}
+
+func mustGetCurrentContext() *context {
+ ctx := getCurrentContext()
+ if ctx == nil {
+ conveyPanic(noStackContext)
+ }
+ return ctx
+}
+
+//////////////////////////////////// Context ////////////////////////////////////
+
+// context magically handles all coordination of Convey's and So assertions.
+//
+// It is tracked on the stack as goroutine-local-storage with the gls package,
+// or explicitly if the user decides to call convey like:
+//
+// Convey(..., func(c C) {
+// c.So(...)
+// })
+//
+// This implements the `C` interface.
+type context struct {
+ reporter reporting.Reporter
+
+ children map[string]*context
+
+ resets []func()
+
+ executedOnce bool
+ expectChildRun *bool
+ complete bool
+
+ focus bool
+ failureMode FailureMode
+}
+
+// rootConvey is the main entry point to a test suite. This is called when
+// there's no context in the stack already, and items must contain a `t` object,
+// or this panics.
+func rootConvey(items ...interface{}) {
+ entry := discover(items)
+
+ if entry.Test == nil {
+ conveyPanic(missingGoTest)
+ }
+
+ expectChildRun := true
+ ctx := &context{
+ reporter: buildReporter(),
+
+ children: make(map[string]*context),
+
+ expectChildRun: &expectChildRun,
+
+ focus: entry.Focus,
+ failureMode: defaultFailureMode.combine(entry.FailMode),
+ }
+ ctxMgr.SetValues(gls.Values{nodeKey: ctx}, func() {
+ ctx.reporter.BeginStory(reporting.NewStoryReport(entry.Test))
+ defer ctx.reporter.EndStory()
+
+ for ctx.shouldVisit() {
+ ctx.conveyInner(entry.Situation, entry.Func)
+ expectChildRun = true
+ }
+ })
+}
+
+//////////////////////////////////// Methods ////////////////////////////////////
+
+func (ctx *context) SkipConvey(items ...interface{}) {
+ ctx.Convey(items, skipConvey)
+}
+
+func (ctx *context) FocusConvey(items ...interface{}) {
+ ctx.Convey(items, focusConvey)
+}
+
+func (ctx *context) Convey(items ...interface{}) {
+ entry := discover(items)
+
+ // we're a branch, or leaf (on the wind)
+ if entry.Test != nil {
+ conveyPanic(extraGoTest)
+ }
+ if ctx.focus && !entry.Focus {
+ return
+ }
+
+ var inner_ctx *context
+ if ctx.executedOnce {
+ var ok bool
+ inner_ctx, ok = ctx.children[entry.Situation]
+ if !ok {
+ conveyPanic(differentConveySituations, entry.Situation)
+ }
+ } else {
+ if _, ok := ctx.children[entry.Situation]; ok {
+ conveyPanic(multipleIdenticalConvey, entry.Situation)
+ }
+ inner_ctx = &context{
+ reporter: ctx.reporter,
+
+ children: make(map[string]*context),
+
+ expectChildRun: ctx.expectChildRun,
+
+ focus: entry.Focus,
+ failureMode: ctx.failureMode.combine(entry.FailMode),
+ }
+ ctx.children[entry.Situation] = inner_ctx
+ }
+
+ if inner_ctx.shouldVisit() {
+ ctxMgr.SetValues(gls.Values{nodeKey: inner_ctx}, func() {
+ inner_ctx.conveyInner(entry.Situation, entry.Func)
+ })
+ }
+}
+
+func (ctx *context) SkipSo(stuff ...interface{}) {
+ ctx.assertionReport(reporting.NewSkipReport())
+}
+
+func (ctx *context) So(actual interface{}, assert assertion, expected ...interface{}) {
+ if result := assert(actual, expected...); result == assertionSuccess {
+ ctx.assertionReport(reporting.NewSuccessReport())
+ } else {
+ ctx.assertionReport(reporting.NewFailureReport(result))
+ }
+}
+
+func (ctx *context) Reset(action func()) {
+ /* TODO: Failure mode configuration */
+ ctx.resets = append(ctx.resets, action)
+}
+
+func (ctx *context) Print(items ...interface{}) (int, error) {
+ fmt.Fprint(ctx.reporter, items...)
+ return fmt.Print(items...)
+}
+
+func (ctx *context) Println(items ...interface{}) (int, error) {
+ fmt.Fprintln(ctx.reporter, items...)
+ return fmt.Println(items...)
+}
+
+func (ctx *context) Printf(format string, items ...interface{}) (int, error) {
+ fmt.Fprintf(ctx.reporter, format, items...)
+ return fmt.Printf(format, items...)
+}
+
+//////////////////////////////////// Private ////////////////////////////////////
+
+// shouldVisit returns true iff we should traverse down into a Convey. Note
+// that just because we don't traverse a Convey this time, doesn't mean that
+// we may not traverse it on a subsequent pass.
+func (c *context) shouldVisit() bool {
+ return !c.complete && *c.expectChildRun
+}
+
+// conveyInner is the function which actually executes the user's anonymous test
+// function body. At this point, Convey or RootConvey has decided that this
+// function should actually run.
+func (ctx *context) conveyInner(situation string, f func(C)) {
+ // Record/Reset state for next time.
+ defer func() {
+ ctx.executedOnce = true
+
+ // This is only needed at the leaves, but there's no harm in also setting it
+ // when returning from branch Convey's
+ *ctx.expectChildRun = false
+ }()
+
+ // Set up+tear down our scope for the reporter
+ ctx.reporter.Enter(reporting.NewScopeReport(situation))
+ defer ctx.reporter.Exit()
+
+ // Recover from any panics in f, and assign the `complete` status for this
+ // node of the tree.
+ defer func() {
+ ctx.complete = true
+ if problem := recover(); problem != nil {
+ if problem, ok := problem.(*conveyErr); ok {
+ panic(problem)
+ }
+ if problem != failureHalt {
+ ctx.reporter.Report(reporting.NewErrorReport(problem))
+ }
+ } else {
+ for _, child := range ctx.children {
+ if !child.complete {
+ ctx.complete = false
+ return
+ }
+ }
+ }
+ }()
+
+ // Resets are registered as the `f` function executes, so nil them here.
+ // All resets are run in registration order (FIFO).
+ ctx.resets = []func(){}
+ defer func() {
+ for _, r := range ctx.resets {
+ // panics handled by the previous defer
+ r()
+ }
+ }()
+
+ if f == nil {
+ // if f is nil, this was either a Convey(..., nil), or a SkipConvey
+ ctx.reporter.Report(reporting.NewSkipReport())
+ } else {
+ f(ctx)
+ }
+}
+
+// assertionReport is a helper for So and SkipSo which makes the report and
+// then possibly panics, depending on the current context's failureMode.
+func (ctx *context) assertionReport(r *reporting.AssertionResult) {
+ ctx.reporter.Report(r)
+ if r.Failure != "" && ctx.failureMode == FailureHalts {
+ panic(failureHalt)
+ }
+}
diff --git a/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/convey/convey.goconvey b/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/convey/convey.goconvey
new file mode 100644
index 00000000000..a2d9327dc91
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/convey/convey.goconvey
@@ -0,0 +1,4 @@
+#ignore
+-timeout=1s
+#-covermode=count
+#-coverpkg=github.com/smartystreets/goconvey/convey,github.com/smartystreets/goconvey/convey/gotest,github.com/smartystreets/goconvey/convey/reporting \ No newline at end of file
diff --git a/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/convey/discovery.go b/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/convey/discovery.go
new file mode 100644
index 00000000000..eb8d4cb2cee
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/convey/discovery.go
@@ -0,0 +1,103 @@
+package convey
+
+type actionSpecifier uint8
+
+const (
+ noSpecifier actionSpecifier = iota
+ skipConvey
+ focusConvey
+)
+
+type suite struct {
+ Situation string
+ Test t
+ Focus bool
+ Func func(C) // nil means skipped
+ FailMode FailureMode
+}
+
+func newSuite(situation string, failureMode FailureMode, f func(C), test t, specifier actionSpecifier) *suite {
+ ret := &suite{
+ Situation: situation,
+ Test: test,
+ Func: f,
+ FailMode: failureMode,
+ }
+ switch specifier {
+ case skipConvey:
+ ret.Func = nil
+ case focusConvey:
+ ret.Focus = true
+ }
+ return ret
+}
+
+func discover(items []interface{}) *suite {
+ name, items := parseName(items)
+ test, items := parseGoTest(items)
+ failure, items := parseFailureMode(items)
+ action, items := parseAction(items)
+ specifier, items := parseSpecifier(items)
+
+ if len(items) != 0 {
+ conveyPanic(parseError)
+ }
+
+ return newSuite(name, failure, action, test, specifier)
+}
+func item(items []interface{}) interface{} {
+ if len(items) == 0 {
+ conveyPanic(parseError)
+ }
+ return items[0]
+}
+func parseName(items []interface{}) (string, []interface{}) {
+ if name, parsed := item(items).(string); parsed {
+ return name, items[1:]
+ }
+ conveyPanic(parseError)
+ panic("never get here")
+}
+func parseGoTest(items []interface{}) (t, []interface{}) {
+ if test, parsed := item(items).(t); parsed {
+ return test, items[1:]
+ }
+ return nil, items
+}
+func parseFailureMode(items []interface{}) (FailureMode, []interface{}) {
+ if mode, parsed := item(items).(FailureMode); parsed {
+ return mode, items[1:]
+ }
+ return FailureInherits, items
+}
+func parseAction(items []interface{}) (func(C), []interface{}) {
+ switch x := item(items).(type) {
+ case nil:
+ return nil, items[1:]
+ case func(C):
+ return x, items[1:]
+ case func():
+ return func(C) { x() }, items[1:]
+ }
+ conveyPanic(parseError)
+ panic("never get here")
+}
+func parseSpecifier(items []interface{}) (actionSpecifier, []interface{}) {
+ if len(items) == 0 {
+ return noSpecifier, items
+ }
+ if spec, ok := items[0].(actionSpecifier); ok {
+ return spec, items[1:]
+ }
+ conveyPanic(parseError)
+ panic("never get here")
+}
+
+// This interface allows us to pass the *testing.T struct
+// throughout the internals of this package without ever
+// having to import the "testing" package.
+type t interface {
+ Fail()
+}
+
+const parseError = "You must provide a name (string), then a *testing.T (if in outermost scope), an optional FailureMode, and then an action (func())."
diff --git a/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/convey/doc.go b/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/convey/doc.go
new file mode 100644
index 00000000000..2562ce4c284
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/convey/doc.go
@@ -0,0 +1,218 @@
+// Package convey contains all of the public-facing entry points to this project.
+// This means that it should never be required of the user to import any other
+// packages from this project as they serve internal purposes.
+package convey
+
+import "github.com/smartystreets/goconvey/convey/reporting"
+
+////////////////////////////////// suite //////////////////////////////////
+
+// C is the Convey context which you can optionally obtain in your action
+// by calling Convey like:
+//
+// Convey(..., func(c C) {
+// ...
+// })
+//
+// See the documentation on Convey for more details.
+//
+// All methods in this context behave identically to the global functions of the
+// same name in this package.
+type C interface {
+ Convey(items ...interface{})
+ SkipConvey(items ...interface{})
+ FocusConvey(items ...interface{})
+
+ So(actual interface{}, assert assertion, expected ...interface{})
+ SkipSo(stuff ...interface{})
+
+ Reset(action func())
+
+ Println(items ...interface{}) (int, error)
+ Print(items ...interface{}) (int, error)
+ Printf(format string, items ...interface{}) (int, error)
+}
+
+// Convey is the method intended for use when declaring the scopes of
+// a specification. Each scope has a description and a func() which may contain
+// other calls to Convey(), Reset() or Should-style assertions. Convey calls can
+// be nested as far as you see fit.
+//
+// IMPORTANT NOTE: The top-level Convey() within a Test method
+// must conform to the following signature:
+//
+// Convey(description string, t *testing.T, action func())
+//
+// All other calls should look like this (no need to pass in *testing.T):
+//
+// Convey(description string, action func())
+//
+// Don't worry, goconvey will panic if you get it wrong so you can fix it.
+//
+// Additionally, you may explicitly obtain access to the Convey context by doing:
+//
+// Convey(description string, action func(c C))
+//
+// You may need to do this if you want to pass the context through to a
+// goroutine, or to close over the context in a handler to a library which
+// calls your handler in a goroutine (httptest comes to mind).
+//
+// All Convey()-blocks also accept an optional parameter of FailureMode which sets
+// how goconvey should treat failures for So()-assertions in the block and
+// nested blocks. See the constants in this file for the available options.
+//
+// By default it will inherit from its parent block and the top-level blocks
+// default to the FailureHalts setting.
+//
+// This parameter is inserted before the block itself:
+//
+// Convey(description string, t *testing.T, mode FailureMode, action func())
+// Convey(description string, mode FailureMode, action func())
+//
+// See the examples package for, well, examples.
+func Convey(items ...interface{}) {
+ if ctx := getCurrentContext(); ctx == nil {
+ rootConvey(items...)
+ } else {
+ ctx.Convey(items...)
+ }
+}
+
+// SkipConvey is analagous to Convey except that the scope is not executed
+// (which means that child scopes defined within this scope are not run either).
+// The reporter will be notified that this step was skipped.
+func SkipConvey(items ...interface{}) {
+ Convey(append(items, skipConvey)...)
+}
+
+// FocusConvey is has the inverse effect of SkipConvey. If the top-level
+// Convey is changed to `FocusConvey`, only nested scopes that are defined
+// with FocusConvey will be run. The rest will be ignored completely. This
+// is handy when debugging a large suite that runs a misbehaving function
+// repeatedly as you can disable all but one of that function
+// without swaths of `SkipConvey` calls, just a targeted chain of calls
+// to FocusConvey.
+func FocusConvey(items ...interface{}) {
+ Convey(append(items, focusConvey)...)
+}
+
+// Reset registers a cleanup function to be run after each Convey()
+// in the same scope. See the examples package for a simple use case.
+func Reset(action func()) {
+ mustGetCurrentContext().Reset(action)
+}
+
+/////////////////////////////////// Assertions ///////////////////////////////////
+
+// assertion is an alias for a function with a signature that the convey.So()
+// method can handle. Any future or custom assertions should conform to this
+// method signature. The return value should be an empty string if the assertion
+// passes and a well-formed failure message if not.
+type assertion func(actual interface{}, expected ...interface{}) string
+
+const assertionSuccess = ""
+
+// So is the means by which assertions are made against the system under test.
+// The majority of exported names in the assertions package begin with the word
+// 'Should' and describe how the first argument (actual) should compare with any
+// of the final (expected) arguments. How many final arguments are accepted
+// depends on the particular assertion that is passed in as the assert argument.
+// See the examples package for use cases and the assertions package for
+// documentation on specific assertion methods. A failing assertion will
+// cause t.Fail() to be invoked--you should never call this method (or other
+// failure-inducing methods) in your test code. Leave that to GoConvey.
+func So(actual interface{}, assert assertion, expected ...interface{}) {
+ mustGetCurrentContext().So(actual, assert, expected...)
+}
+
+// SkipSo is analagous to So except that the assertion that would have been passed
+// to So is not executed and the reporter is notified that the assertion was skipped.
+func SkipSo(stuff ...interface{}) {
+ mustGetCurrentContext().SkipSo()
+}
+
+// FailureMode is a type which determines how the So() blocks should fail
+// if their assertion fails. See constants further down for acceptable values
+type FailureMode string
+
+const (
+
+ // FailureContinues is a failure mode which prevents failing
+ // So()-assertions from halting Convey-block execution, instead
+ // allowing the test to continue past failing So()-assertions.
+ FailureContinues FailureMode = "continue"
+
+ // FailureHalts is the default setting for a top-level Convey()-block
+ // and will cause all failing So()-assertions to halt further execution
+ // in that test-arm and continue on to the next arm.
+ FailureHalts FailureMode = "halt"
+
+ // FailureInherits is the default setting for failure-mode, it will
+ // default to the failure-mode of the parent block. You should never
+ // need to specify this mode in your tests..
+ FailureInherits FailureMode = "inherits"
+)
+
+func (f FailureMode) combine(other FailureMode) FailureMode {
+ if other == FailureInherits {
+ return f
+ }
+ return other
+}
+
+var defaultFailureMode FailureMode = FailureHalts
+
+// SetDefaultFailureMode allows you to specify the default failure mode
+// for all Convey blocks. It is meant to be used in an init function to
+// allow the default mode to be changdd across all tests for an entire packgae
+// but it can be used anywhere.
+func SetDefaultFailureMode(mode FailureMode) {
+ if mode == FailureContinues || mode == FailureHalts {
+ defaultFailureMode = mode
+ } else {
+ panic("You may only use the constants named 'FailureContinues' and 'FailureHalts' as default failure modes.")
+ }
+}
+
+//////////////////////////////////// Print functions ////////////////////////////////////
+
+// Print is analogous to fmt.Print (and it even calls fmt.Print). It ensures that
+// output is aligned with the corresponding scopes in the web UI.
+func Print(items ...interface{}) (written int, err error) {
+ return mustGetCurrentContext().Print(items...)
+}
+
+// Print is analogous to fmt.Println (and it even calls fmt.Println). It ensures that
+// output is aligned with the corresponding scopes in the web UI.
+func Println(items ...interface{}) (written int, err error) {
+ return mustGetCurrentContext().Println(items...)
+}
+
+// Print is analogous to fmt.Printf (and it even calls fmt.Printf). It ensures that
+// output is aligned with the corresponding scopes in the web UI.
+func Printf(format string, items ...interface{}) (written int, err error) {
+ return mustGetCurrentContext().Printf(format, items...)
+}
+
+///////////////////////////////////////////////////////////////////////////////
+
+// SuppressConsoleStatistics prevents automatic printing of console statistics.
+// Calling PrintConsoleStatistics explicitly will force printing of statistics.
+func SuppressConsoleStatistics() {
+ reporting.SuppressConsoleStatistics()
+}
+
+// ConsoleStatistics may be called at any time to print assertion statistics.
+// Generally, the best place to do this would be in a TestMain function,
+// after all tests have been run. Something like this:
+//
+// func TestMain(m *testing.M) {
+// convey.SuppressConsoleStatistics()
+// result := m.Run()
+// convey.PrintConsoleStatistics()
+// os.Exit(result)
+// }
+//
+func PrintConsoleStatistics() {
+ reporting.PrintConsoleStatistics()
+}
diff --git a/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/convey/focused_execution_test.go b/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/convey/focused_execution_test.go
new file mode 100644
index 00000000000..294e32fa17e
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/convey/focused_execution_test.go
@@ -0,0 +1,72 @@
+package convey
+
+import "testing"
+
+func TestFocusOnlyAtTopLevel(t *testing.T) {
+ output := prepare()
+
+ FocusConvey("hi", t, func() {
+ output += "done"
+ })
+
+ expectEqual(t, "done", output)
+}
+
+func TestFocus(t *testing.T) {
+ output := prepare()
+
+ FocusConvey("hi", t, func() {
+ output += "1"
+
+ Convey("bye", func() {
+ output += "2"
+ })
+ })
+
+ expectEqual(t, "1", output)
+}
+
+func TestNestedFocus(t *testing.T) {
+ output := prepare()
+
+ FocusConvey("hi", t, func() {
+ output += "1"
+
+ Convey("This shouldn't run", func() {
+ output += "boink!"
+ })
+
+ FocusConvey("This should run", func() {
+ output += "2"
+
+ FocusConvey("The should run too", func() {
+ output += "3"
+
+ })
+
+ Convey("The should NOT run", func() {
+ output += "blah blah blah!"
+ })
+ })
+ })
+
+ expectEqual(t, "123", output)
+}
+
+func TestForgotTopLevelFocus(t *testing.T) {
+ output := prepare()
+
+ Convey("1", t, func() {
+ output += "1"
+
+ FocusConvey("This will be run because the top-level lacks Focus", func() {
+ output += "2"
+ })
+
+ Convey("3", func() {
+ output += "3"
+ })
+ })
+
+ expectEqual(t, "1213", output)
+}
diff --git a/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/convey/gotest/doc_test.go b/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/convey/gotest/doc_test.go
new file mode 100644
index 00000000000..1b6406be99b
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/convey/gotest/doc_test.go
@@ -0,0 +1 @@
+package gotest
diff --git a/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/convey/gotest/utils.go b/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/convey/gotest/utils.go
new file mode 100644
index 00000000000..3a5c848a445
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/convey/gotest/utils.go
@@ -0,0 +1,28 @@
+// Package gotest contains internal functionality. Although this package
+// contains one or more exported names it is not intended for public
+// consumption. See the examples package for how to use this project.
+package gotest
+
+import (
+ "runtime"
+ "strings"
+)
+
+func ResolveExternalCaller() (file string, line int, name string) {
+ var caller_id uintptr
+ callers := runtime.Callers(0, callStack)
+
+ for x := 0; x < callers; x++ {
+ caller_id, file, line, _ = runtime.Caller(x)
+ if strings.HasSuffix(file, "_test.go") || strings.HasSuffix(file, "_tests.go") {
+ name = runtime.FuncForPC(caller_id).Name()
+ return
+ }
+ }
+ file, line, name = "<unkown file>", -1, "<unknown name>"
+ return // panic?
+}
+
+const maxStackDepth = 100 // This had better be enough...
+
+var callStack []uintptr = make([]uintptr, maxStackDepth, maxStackDepth)
diff --git a/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/convey/init.go b/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/convey/init.go
new file mode 100644
index 00000000000..732b72142e0
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/convey/init.go
@@ -0,0 +1,81 @@
+package convey
+
+import (
+ "flag"
+ "os"
+
+ "github.com/jtolds/gls"
+ "github.com/smartystreets/assertions"
+ "github.com/smartystreets/goconvey/convey/reporting"
+)
+
+func init() {
+ assertions.GoConveyMode(true)
+
+ declareFlags()
+
+ ctxMgr = gls.NewContextManager()
+}
+
+func declareFlags() {
+ flag.BoolVar(&json, "json", false, "When true, emits results in JSON blocks. Default: 'false'")
+ flag.BoolVar(&silent, "silent", false, "When true, all output from GoConvey is suppressed.")
+ flag.BoolVar(&story, "story", false, "When true, emits story output, otherwise emits dot output. When not provided, this flag mirros the value of the '-test.v' flag")
+
+ if noStoryFlagProvided() {
+ story = verboseEnabled
+ }
+
+ // FYI: flag.Parse() is called from the testing package.
+}
+
+func noStoryFlagProvided() bool {
+ return !story && !storyDisabled
+}
+
+func buildReporter() reporting.Reporter {
+ selectReporter := os.Getenv("GOCONVEY_REPORTER")
+
+ switch {
+ case testReporter != nil:
+ return testReporter
+ case json || selectReporter == "json":
+ return reporting.BuildJsonReporter()
+ case silent || selectReporter == "silent":
+ return reporting.BuildSilentReporter()
+ case selectReporter == "dot":
+ // Story is turned on when verbose is set, so we need to check for dot reporter first.
+ return reporting.BuildDotReporter()
+ case story || selectReporter == "story":
+ return reporting.BuildStoryReporter()
+ default:
+ return reporting.BuildDotReporter()
+ }
+}
+
+var (
+ ctxMgr *gls.ContextManager
+
+ // only set by internal tests
+ testReporter reporting.Reporter
+)
+
+var (
+ json bool
+ silent bool
+ story bool
+
+ verboseEnabled = flagFound("-test.v=true")
+ storyDisabled = flagFound("-story=false")
+)
+
+// flagFound parses the command line args manually for flags defined in other
+// packages. Like the '-v' flag from the "testing" package, for instance.
+func flagFound(flagValue string) bool {
+ for _, arg := range os.Args {
+ if arg == flagValue {
+ return true
+ }
+ }
+ return false
+}
diff --git a/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/convey/isolated_execution_test.go b/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/convey/isolated_execution_test.go
new file mode 100644
index 00000000000..7e22b3caa53
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/convey/isolated_execution_test.go
@@ -0,0 +1,774 @@
+package convey
+
+import (
+ "strconv"
+ "testing"
+ "time"
+)
+
+func TestSingleScope(t *testing.T) {
+ output := prepare()
+
+ Convey("hi", t, func() {
+ output += "done"
+ })
+
+ expectEqual(t, "done", output)
+}
+
+func TestSingleScopeWithMultipleConveys(t *testing.T) {
+ output := prepare()
+
+ Convey("1", t, func() {
+ output += "1"
+ })
+
+ Convey("2", t, func() {
+ output += "2"
+ })
+
+ expectEqual(t, "12", output)
+}
+
+func TestNestedScopes(t *testing.T) {
+ output := prepare()
+
+ Convey("a", t, func() {
+ output += "a "
+
+ Convey("bb", func() {
+ output += "bb "
+
+ Convey("ccc", func() {
+ output += "ccc | "
+ })
+ })
+ })
+
+ expectEqual(t, "a bb ccc | ", output)
+}
+
+func TestNestedScopesWithIsolatedExecution(t *testing.T) {
+ output := prepare()
+
+ Convey("a", t, func() {
+ output += "a "
+
+ Convey("aa", func() {
+ output += "aa "
+
+ Convey("aaa", func() {
+ output += "aaa | "
+ })
+
+ Convey("aaa1", func() {
+ output += "aaa1 | "
+ })
+ })
+
+ Convey("ab", func() {
+ output += "ab "
+
+ Convey("abb", func() {
+ output += "abb | "
+ })
+ })
+ })
+
+ expectEqual(t, "a aa aaa | a aa aaa1 | a ab abb | ", output)
+}
+
+func TestSingleScopeWithConveyAndNestedReset(t *testing.T) {
+ output := prepare()
+
+ Convey("1", t, func() {
+ output += "1"
+
+ Reset(func() {
+ output += "a"
+ })
+ })
+
+ expectEqual(t, "1a", output)
+}
+
+func TestPanicingReset(t *testing.T) {
+ output := prepare()
+
+ Convey("1", t, func() {
+ output += "1"
+
+ Reset(func() {
+ panic("nooo")
+ })
+
+ Convey("runs since the reset hasn't yet", func() {
+ output += "a"
+ })
+
+ Convey("but this doesnt", func() {
+ output += "nope"
+ })
+ })
+
+ expectEqual(t, "1a", output)
+}
+
+func TestSingleScopeWithMultipleRegistrationsAndReset(t *testing.T) {
+ output := prepare()
+
+ Convey("reset after each nested convey", t, func() {
+ Convey("first output", func() {
+ output += "1"
+ })
+
+ Convey("second output", func() {
+ output += "2"
+ })
+
+ Reset(func() {
+ output += "a"
+ })
+ })
+
+ expectEqual(t, "1a2a", output)
+}
+
+func TestSingleScopeWithMultipleRegistrationsAndMultipleResets(t *testing.T) {
+ output := prepare()
+
+ Convey("each reset is run at end of each nested convey", t, func() {
+ Convey("1", func() {
+ output += "1"
+ })
+
+ Convey("2", func() {
+ output += "2"
+ })
+
+ Reset(func() {
+ output += "a"
+ })
+
+ Reset(func() {
+ output += "b"
+ })
+ })
+
+ expectEqual(t, "1ab2ab", output)
+}
+
+func Test_Failure_AtHigherLevelScopePreventsChildScopesFromRunning(t *testing.T) {
+ output := prepare()
+
+ Convey("This step fails", t, func() {
+ So(1, ShouldEqual, 2)
+
+ Convey("this should NOT be executed", func() {
+ output += "a"
+ })
+ })
+
+ expectEqual(t, "", output)
+}
+
+func Test_Panic_AtHigherLevelScopePreventsChildScopesFromRunning(t *testing.T) {
+ output := prepare()
+
+ Convey("This step panics", t, func() {
+ Convey("this happens, because the panic didn't happen yet", func() {
+ output += "1"
+ })
+
+ output += "a"
+
+ Convey("this should NOT be executed", func() {
+ output += "2"
+ })
+
+ output += "b"
+
+ panic("Hi")
+
+ output += "nope"
+ })
+
+ expectEqual(t, "1ab", output)
+}
+
+func Test_Panic_InChildScopeDoes_NOT_PreventExecutionOfSiblingScopes(t *testing.T) {
+ output := prepare()
+
+ Convey("This is the parent", t, func() {
+ Convey("This step panics", func() {
+ panic("Hi")
+ output += "1"
+ })
+
+ Convey("This sibling should execute", func() {
+ output += "2"
+ })
+ })
+
+ expectEqual(t, "2", output)
+}
+
+func Test_Failure_InChildScopeDoes_NOT_PreventExecutionOfSiblingScopes(t *testing.T) {
+ output := prepare()
+
+ Convey("This is the parent", t, func() {
+ Convey("This step fails", func() {
+ So(1, ShouldEqual, 2)
+ output += "1"
+ })
+
+ Convey("This sibling should execute", func() {
+ output += "2"
+ })
+ })
+
+ expectEqual(t, "2", output)
+}
+
+func TestResetsAreAlwaysExecutedAfterScope_Panics(t *testing.T) {
+ output := prepare()
+
+ Convey("This is the parent", t, func() {
+ Convey("This step panics", func() {
+ panic("Hi")
+ output += "1"
+ })
+
+ Convey("This sibling step does not panic", func() {
+ output += "a"
+
+ Reset(func() {
+ output += "b"
+ })
+ })
+
+ Reset(func() {
+ output += "2"
+ })
+ })
+
+ expectEqual(t, "2ab2", output)
+}
+
+func TestResetsAreAlwaysExecutedAfterScope_Failures(t *testing.T) {
+ output := prepare()
+
+ Convey("This is the parent", t, func() {
+ Convey("This step fails", func() {
+ So(1, ShouldEqual, 2)
+ output += "1"
+ })
+
+ Convey("This sibling step does not fail", func() {
+ output += "a"
+
+ Reset(func() {
+ output += "b"
+ })
+ })
+
+ Reset(func() {
+ output += "2"
+ })
+ })
+
+ expectEqual(t, "2ab2", output)
+}
+
+func TestSkipTopLevel(t *testing.T) {
+ output := prepare()
+
+ SkipConvey("hi", t, func() {
+ output += "This shouldn't be executed!"
+ })
+
+ expectEqual(t, "", output)
+}
+
+func TestSkipNestedLevel(t *testing.T) {
+ output := prepare()
+
+ Convey("hi", t, func() {
+ output += "yes"
+
+ SkipConvey("bye", func() {
+ output += "no"
+ })
+ })
+
+ expectEqual(t, "yes", output)
+}
+
+func TestSkipNestedLevelSkipsAllChildLevels(t *testing.T) {
+ output := prepare()
+
+ Convey("hi", t, func() {
+ output += "yes"
+
+ SkipConvey("bye", func() {
+ output += "no"
+
+ Convey("byebye", func() {
+ output += "no-no"
+ })
+ })
+ })
+
+ expectEqual(t, "yes", output)
+}
+
+func TestIterativeConveys(t *testing.T) {
+ output := prepare()
+
+ Convey("Test", t, func() {
+ for x := 0; x < 10; x++ {
+ y := strconv.Itoa(x)
+
+ Convey(y, func() {
+ output += y
+ })
+ }
+ })
+
+ expectEqual(t, "0123456789", output)
+}
+
+func TestClosureVariables(t *testing.T) {
+ output := prepare()
+
+ i := 0
+
+ Convey("A", t, func() {
+ i = i + 1
+ j := i
+
+ output += "A" + strconv.Itoa(i) + " "
+
+ Convey("B", func() {
+ k := j
+ j = j + 1
+
+ output += "B" + strconv.Itoa(k) + " "
+
+ Convey("C", func() {
+ output += "C" + strconv.Itoa(k) + strconv.Itoa(j) + " "
+ })
+
+ Convey("D", func() {
+ output += "D" + strconv.Itoa(k) + strconv.Itoa(j) + " "
+ })
+ })
+
+ Convey("C", func() {
+ output += "C" + strconv.Itoa(j) + " "
+ })
+ })
+
+ output += "D" + strconv.Itoa(i) + " "
+
+ expectEqual(t, "A1 B1 C12 A2 B2 D23 A3 C3 D3 ", output)
+}
+
+func TestClosureVariablesWithReset(t *testing.T) {
+ output := prepare()
+
+ i := 0
+
+ Convey("A", t, func() {
+ i = i + 1
+ j := i
+
+ output += "A" + strconv.Itoa(i) + " "
+
+ Reset(func() {
+ output += "R" + strconv.Itoa(i) + strconv.Itoa(j) + " "
+ })
+
+ Convey("B", func() {
+ output += "B" + strconv.Itoa(j) + " "
+ })
+
+ Convey("C", func() {
+ output += "C" + strconv.Itoa(j) + " "
+ })
+ })
+
+ output += "D" + strconv.Itoa(i) + " "
+
+ expectEqual(t, "A1 B1 R11 A2 C2 R22 D2 ", output)
+}
+
+func TestWrappedSimple(t *testing.T) {
+ prepare()
+ output := resetTestString{""}
+
+ Convey("A", t, func() {
+ func() {
+ output.output += "A "
+
+ Convey("B", func() {
+ output.output += "B "
+
+ Convey("C", func() {
+ output.output += "C "
+ })
+
+ })
+
+ Convey("D", func() {
+ output.output += "D "
+ })
+ }()
+ })
+
+ expectEqual(t, "A B C A D ", output.output)
+}
+
+type resetTestString struct {
+ output string
+}
+
+func addReset(o *resetTestString, f func()) func() {
+ return func() {
+ Reset(func() {
+ o.output += "R "
+ })
+
+ f()
+ }
+}
+
+func TestWrappedReset(t *testing.T) {
+ prepare()
+ output := resetTestString{""}
+
+ Convey("A", t, addReset(&output, func() {
+ output.output += "A "
+
+ Convey("B", func() {
+ output.output += "B "
+ })
+
+ Convey("C", func() {
+ output.output += "C "
+ })
+ }))
+
+ expectEqual(t, "A B R A C R ", output.output)
+}
+
+func TestWrappedReset2(t *testing.T) {
+ prepare()
+ output := resetTestString{""}
+
+ Convey("A", t, func() {
+ Reset(func() {
+ output.output += "R "
+ })
+
+ func() {
+ output.output += "A "
+
+ Convey("B", func() {
+ output.output += "B "
+
+ Convey("C", func() {
+ output.output += "C "
+ })
+ })
+
+ Convey("D", func() {
+ output.output += "D "
+ })
+ }()
+ })
+
+ expectEqual(t, "A B C R A D R ", output.output)
+}
+
+func TestInfiniteLoopWithTrailingFail(t *testing.T) {
+ done := make(chan int)
+
+ go func() {
+ Convey("This fails", t, func() {
+ Convey("and this is run", func() {
+ So(true, ShouldEqual, true)
+ })
+
+ /* And this prevents the whole block to be marked as run */
+ So(false, ShouldEqual, true)
+ })
+
+ done <- 1
+ }()
+
+ select {
+ case <-done:
+ return
+ case <-time.After(1 * time.Millisecond):
+ t.Fail()
+ }
+}
+
+func TestOutermostResetInvokedForGrandchildren(t *testing.T) {
+ output := prepare()
+
+ Convey("A", t, func() {
+ output += "A "
+
+ Reset(func() {
+ output += "rA "
+ })
+
+ Convey("B", func() {
+ output += "B "
+
+ Reset(func() {
+ output += "rB "
+ })
+
+ Convey("C", func() {
+ output += "C "
+
+ Reset(func() {
+ output += "rC "
+ })
+ })
+
+ Convey("D", func() {
+ output += "D "
+
+ Reset(func() {
+ output += "rD "
+ })
+ })
+ })
+ })
+
+ expectEqual(t, "A B C rC rB rA A B D rD rB rA ", output)
+}
+
+func TestFailureOption(t *testing.T) {
+ output := prepare()
+
+ Convey("A", t, FailureHalts, func() {
+ output += "A "
+ So(true, ShouldEqual, true)
+ output += "B "
+ So(false, ShouldEqual, true)
+ output += "C "
+ })
+
+ expectEqual(t, "A B ", output)
+}
+
+func TestFailureOption2(t *testing.T) {
+ output := prepare()
+
+ Convey("A", t, func() {
+ output += "A "
+ So(true, ShouldEqual, true)
+ output += "B "
+ So(false, ShouldEqual, true)
+ output += "C "
+ })
+
+ expectEqual(t, "A B ", output)
+}
+
+func TestFailureOption3(t *testing.T) {
+ output := prepare()
+
+ Convey("A", t, FailureContinues, func() {
+ output += "A "
+ So(true, ShouldEqual, true)
+ output += "B "
+ So(false, ShouldEqual, true)
+ output += "C "
+ })
+
+ expectEqual(t, "A B C ", output)
+}
+
+func TestFailureOptionInherit(t *testing.T) {
+ output := prepare()
+
+ Convey("A", t, FailureContinues, func() {
+ output += "A1 "
+ So(false, ShouldEqual, true)
+ output += "A2 "
+
+ Convey("B", func() {
+ output += "B1 "
+ So(true, ShouldEqual, true)
+ output += "B2 "
+ So(false, ShouldEqual, true)
+ output += "B3 "
+ })
+ })
+
+ expectEqual(t, "A1 A2 B1 B2 B3 ", output)
+}
+
+func TestFailureOptionInherit2(t *testing.T) {
+ output := prepare()
+
+ Convey("A", t, FailureHalts, func() {
+ output += "A1 "
+ So(false, ShouldEqual, true)
+ output += "A2 "
+
+ Convey("B", func() {
+ output += "A1 "
+ So(true, ShouldEqual, true)
+ output += "A2 "
+ So(false, ShouldEqual, true)
+ output += "A3 "
+ })
+ })
+
+ expectEqual(t, "A1 ", output)
+}
+
+func TestFailureOptionInherit3(t *testing.T) {
+ output := prepare()
+
+ Convey("A", t, FailureHalts, func() {
+ output += "A1 "
+ So(true, ShouldEqual, true)
+ output += "A2 "
+
+ Convey("B", func() {
+ output += "B1 "
+ So(true, ShouldEqual, true)
+ output += "B2 "
+ So(false, ShouldEqual, true)
+ output += "B3 "
+ })
+ })
+
+ expectEqual(t, "A1 A2 B1 B2 ", output)
+}
+
+func TestFailureOptionNestedOverride(t *testing.T) {
+ output := prepare()
+
+ Convey("A", t, FailureContinues, func() {
+ output += "A "
+ So(false, ShouldEqual, true)
+ output += "B "
+
+ Convey("C", FailureHalts, func() {
+ output += "C "
+ So(true, ShouldEqual, true)
+ output += "D "
+ So(false, ShouldEqual, true)
+ output += "E "
+ })
+ })
+
+ expectEqual(t, "A B C D ", output)
+}
+
+func TestFailureOptionNestedOverride2(t *testing.T) {
+ output := prepare()
+
+ Convey("A", t, FailureHalts, func() {
+ output += "A "
+ So(true, ShouldEqual, true)
+ output += "B "
+
+ Convey("C", FailureContinues, func() {
+ output += "C "
+ So(true, ShouldEqual, true)
+ output += "D "
+ So(false, ShouldEqual, true)
+ output += "E "
+ })
+ })
+
+ expectEqual(t, "A B C D E ", output)
+}
+
+func TestMultipleInvocationInheritance(t *testing.T) {
+ output := prepare()
+
+ Convey("A", t, FailureHalts, func() {
+ output += "A1 "
+ So(true, ShouldEqual, true)
+ output += "A2 "
+
+ Convey("B", FailureContinues, func() {
+ output += "B1 "
+ So(true, ShouldEqual, true)
+ output += "B2 "
+ So(false, ShouldEqual, true)
+ output += "B3 "
+ })
+
+ Convey("C", func() {
+ output += "C1 "
+ So(true, ShouldEqual, true)
+ output += "C2 "
+ So(false, ShouldEqual, true)
+ output += "C3 "
+ })
+ })
+
+ expectEqual(t, "A1 A2 B1 B2 B3 A1 A2 C1 C2 ", output)
+}
+
+func TestMultipleInvocationInheritance2(t *testing.T) {
+ output := prepare()
+
+ Convey("A", t, FailureContinues, func() {
+ output += "A1 "
+ So(true, ShouldEqual, true)
+ output += "A2 "
+ So(false, ShouldEqual, true)
+ output += "A3 "
+
+ Convey("B", FailureHalts, func() {
+ output += "B1 "
+ So(true, ShouldEqual, true)
+ output += "B2 "
+ So(false, ShouldEqual, true)
+ output += "B3 "
+ })
+
+ Convey("C", func() {
+ output += "C1 "
+ So(true, ShouldEqual, true)
+ output += "C2 "
+ So(false, ShouldEqual, true)
+ output += "C3 "
+ })
+ })
+
+ expectEqual(t, "A1 A2 A3 B1 B2 A1 A2 A3 C1 C2 C3 ", output)
+}
+
+func TestSetDefaultFailureMode(t *testing.T) {
+ output := prepare()
+
+ SetDefaultFailureMode(FailureContinues) // the default is normally FailureHalts
+ defer SetDefaultFailureMode(FailureHalts)
+
+ Convey("A", t, func() {
+ output += "A1 "
+ So(true, ShouldBeFalse)
+ output += "A2 "
+ })
+
+ expectEqual(t, "A1 A2 ", output)
+}
+
+func prepare() string {
+ testReporter = newNilReporter()
+ return ""
+}
diff --git a/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/convey/nilReporter.go b/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/convey/nilReporter.go
new file mode 100644
index 00000000000..777b2a51228
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/convey/nilReporter.go
@@ -0,0 +1,15 @@
+package convey
+
+import (
+ "github.com/smartystreets/goconvey/convey/reporting"
+)
+
+type nilReporter struct{}
+
+func (self *nilReporter) BeginStory(story *reporting.StoryReport) {}
+func (self *nilReporter) Enter(scope *reporting.ScopeReport) {}
+func (self *nilReporter) Report(report *reporting.AssertionResult) {}
+func (self *nilReporter) Exit() {}
+func (self *nilReporter) EndStory() {}
+func (self *nilReporter) Write(p []byte) (int, error) { return len(p), nil }
+func newNilReporter() *nilReporter { return &nilReporter{} }
diff --git a/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/convey/reporting/console.go b/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/convey/reporting/console.go
new file mode 100644
index 00000000000..7bf67dbb2b1
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/convey/reporting/console.go
@@ -0,0 +1,16 @@
+package reporting
+
+import (
+ "fmt"
+ "io"
+)
+
+type console struct{}
+
+func (self *console) Write(p []byte) (n int, err error) {
+ return fmt.Print(string(p))
+}
+
+func NewConsole() io.Writer {
+ return new(console)
+}
diff --git a/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/convey/reporting/doc.go b/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/convey/reporting/doc.go
new file mode 100644
index 00000000000..a37d0019466
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/convey/reporting/doc.go
@@ -0,0 +1,5 @@
+// Package reporting contains internal functionality related
+// to console reporting and output. Although this package has
+// exported names is not intended for public consumption. See the
+// examples package for how to use this project.
+package reporting
diff --git a/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/convey/reporting/dot.go b/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/convey/reporting/dot.go
new file mode 100644
index 00000000000..47d57c6b0d9
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/convey/reporting/dot.go
@@ -0,0 +1,40 @@
+package reporting
+
+import "fmt"
+
+type dot struct{ out *Printer }
+
+func (self *dot) BeginStory(story *StoryReport) {}
+
+func (self *dot) Enter(scope *ScopeReport) {}
+
+func (self *dot) Report(report *AssertionResult) {
+ if report.Error != nil {
+ fmt.Print(redColor)
+ self.out.Insert(dotError)
+ } else if report.Failure != "" {
+ fmt.Print(yellowColor)
+ self.out.Insert(dotFailure)
+ } else if report.Skipped {
+ fmt.Print(yellowColor)
+ self.out.Insert(dotSkip)
+ } else {
+ fmt.Print(greenColor)
+ self.out.Insert(dotSuccess)
+ }
+ fmt.Print(resetColor)
+}
+
+func (self *dot) Exit() {}
+
+func (self *dot) EndStory() {}
+
+func (self *dot) Write(content []byte) (written int, err error) {
+ return len(content), nil // no-op
+}
+
+func NewDotReporter(out *Printer) *dot {
+ self := new(dot)
+ self.out = out
+ return self
+}
diff --git a/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/convey/reporting/dot_test.go b/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/convey/reporting/dot_test.go
new file mode 100644
index 00000000000..a8d20d46f08
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/convey/reporting/dot_test.go
@@ -0,0 +1,40 @@
+package reporting
+
+import (
+ "errors"
+ "testing"
+)
+
+func TestDotReporterAssertionPrinting(t *testing.T) {
+ monochrome()
+ file := newMemoryFile()
+ printer := NewPrinter(file)
+ reporter := NewDotReporter(printer)
+
+ reporter.Report(NewSuccessReport())
+ reporter.Report(NewFailureReport("failed"))
+ reporter.Report(NewErrorReport(errors.New("error")))
+ reporter.Report(NewSkipReport())
+
+ expected := dotSuccess + dotFailure + dotError + dotSkip
+
+ if file.buffer != expected {
+ t.Errorf("\nExpected: '%s'\nActual: '%s'", expected, file.buffer)
+ }
+}
+
+func TestDotReporterOnlyReportsAssertions(t *testing.T) {
+ monochrome()
+ file := newMemoryFile()
+ printer := NewPrinter(file)
+ reporter := NewDotReporter(printer)
+
+ reporter.BeginStory(nil)
+ reporter.Enter(nil)
+ reporter.Exit()
+ reporter.EndStory()
+
+ if file.buffer != "" {
+ t.Errorf("\nExpected: '(blank)'\nActual: '%s'", file.buffer)
+ }
+}
diff --git a/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/convey/reporting/gotest.go b/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/convey/reporting/gotest.go
new file mode 100644
index 00000000000..c396e16b17a
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/convey/reporting/gotest.go
@@ -0,0 +1,33 @@
+package reporting
+
+type gotestReporter struct{ test T }
+
+func (self *gotestReporter) BeginStory(story *StoryReport) {
+ self.test = story.Test
+}
+
+func (self *gotestReporter) Enter(scope *ScopeReport) {}
+
+func (self *gotestReporter) Report(r *AssertionResult) {
+ if !passed(r) {
+ self.test.Fail()
+ }
+}
+
+func (self *gotestReporter) Exit() {}
+
+func (self *gotestReporter) EndStory() {
+ self.test = nil
+}
+
+func (self *gotestReporter) Write(content []byte) (written int, err error) {
+ return len(content), nil // no-op
+}
+
+func NewGoTestReporter() *gotestReporter {
+ return new(gotestReporter)
+}
+
+func passed(r *AssertionResult) bool {
+ return r.Error == nil && r.Failure == ""
+}
diff --git a/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/convey/reporting/gotest_test.go b/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/convey/reporting/gotest_test.go
new file mode 100644
index 00000000000..fda189458e5
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/convey/reporting/gotest_test.go
@@ -0,0 +1,66 @@
+package reporting
+
+import "testing"
+
+func TestReporterReceivesSuccessfulReport(t *testing.T) {
+ reporter := NewGoTestReporter()
+ test := new(fakeTest)
+ reporter.BeginStory(NewStoryReport(test))
+ reporter.Report(NewSuccessReport())
+
+ if test.failed {
+ t.Errorf("Should have have marked test as failed--the report reflected success.")
+ }
+}
+
+func TestReporterReceivesFailureReport(t *testing.T) {
+ reporter := NewGoTestReporter()
+ test := new(fakeTest)
+ reporter.BeginStory(NewStoryReport(test))
+ reporter.Report(NewFailureReport("This is a failure."))
+
+ if !test.failed {
+ t.Errorf("Test should have been marked as failed (but it wasn't).")
+ }
+}
+
+func TestReporterReceivesErrorReport(t *testing.T) {
+ reporter := NewGoTestReporter()
+ test := new(fakeTest)
+ reporter.BeginStory(NewStoryReport(test))
+ reporter.Report(NewErrorReport("This is an error."))
+
+ if !test.failed {
+ t.Errorf("Test should have been marked as failed (but it wasn't).")
+ }
+}
+
+func TestReporterIsResetAtTheEndOfTheStory(t *testing.T) {
+ defer catch(t)
+ reporter := NewGoTestReporter()
+ test := new(fakeTest)
+ reporter.BeginStory(NewStoryReport(test))
+ reporter.EndStory()
+
+ reporter.Report(NewSuccessReport())
+}
+
+func TestReporterNoopMethods(t *testing.T) {
+ reporter := NewGoTestReporter()
+ reporter.Enter(NewScopeReport("title"))
+ reporter.Exit()
+}
+
+func catch(t *testing.T) {
+ if r := recover(); r != nil {
+ t.Log("Getting to this point means we've passed (because we caught a panic appropriately).")
+ }
+}
+
+type fakeTest struct {
+ failed bool
+}
+
+func (self *fakeTest) Fail() {
+ self.failed = true
+}
diff --git a/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/convey/reporting/init.go b/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/convey/reporting/init.go
new file mode 100644
index 00000000000..44d080e90e6
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/convey/reporting/init.go
@@ -0,0 +1,94 @@
+package reporting
+
+import (
+ "os"
+ "runtime"
+ "strings"
+)
+
+func init() {
+ if !isColorableTerminal() {
+ monochrome()
+ }
+
+ if runtime.GOOS == "windows" {
+ success, failure, error_ = dotSuccess, dotFailure, dotError
+ }
+}
+
+func BuildJsonReporter() Reporter {
+ out := NewPrinter(NewConsole())
+ return NewReporters(
+ NewGoTestReporter(),
+ NewJsonReporter(out))
+}
+func BuildDotReporter() Reporter {
+ out := NewPrinter(NewConsole())
+ return NewReporters(
+ NewGoTestReporter(),
+ NewDotReporter(out),
+ NewProblemReporter(out),
+ consoleStatistics)
+}
+func BuildStoryReporter() Reporter {
+ out := NewPrinter(NewConsole())
+ return NewReporters(
+ NewGoTestReporter(),
+ NewStoryReporter(out),
+ NewProblemReporter(out),
+ consoleStatistics)
+}
+func BuildSilentReporter() Reporter {
+ out := NewPrinter(NewConsole())
+ return NewReporters(
+ NewGoTestReporter(),
+ NewSilentProblemReporter(out))
+}
+
+var (
+ newline = "\n"
+ success = "✔"
+ failure = "✘"
+ error_ = "🔥"
+ skip = "⚠"
+ dotSuccess = "."
+ dotFailure = "x"
+ dotError = "E"
+ dotSkip = "S"
+ errorTemplate = "* %s \nLine %d: - %v \n%s\n"
+ failureTemplate = "* %s \nLine %d:\n%s\n"
+)
+
+var (
+ greenColor = "\033[32m"
+ yellowColor = "\033[33m"
+ redColor = "\033[31m"
+ resetColor = "\033[0m"
+)
+
+var consoleStatistics = NewStatisticsReporter(NewPrinter(NewConsole()))
+
+func SuppressConsoleStatistics() { consoleStatistics.Suppress() }
+func PrintConsoleStatistics() { consoleStatistics.PrintSummary() }
+
+// QuiteMode disables all console output symbols. This is only meant to be used
+// for tests that are internal to goconvey where the output is distracting or
+// otherwise not needed in the test output.
+func QuietMode() {
+ success, failure, error_, skip, dotSuccess, dotFailure, dotError, dotSkip = "", "", "", "", "", "", "", ""
+}
+
+func monochrome() {
+ greenColor, yellowColor, redColor, resetColor = "", "", "", ""
+}
+
+func isColorableTerminal() bool {
+ return strings.Contains(os.Getenv("TERM"), "color")
+}
+
+// This interface allows us to pass the *testing.T struct
+// throughout the internals of this tool without ever
+// having to import the "testing" package.
+type T interface {
+ Fail()
+}
diff --git a/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/convey/reporting/json.go b/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/convey/reporting/json.go
new file mode 100644
index 00000000000..f8526979f85
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/convey/reporting/json.go
@@ -0,0 +1,88 @@
+// TODO: under unit test
+
+package reporting
+
+import (
+ "bytes"
+ "encoding/json"
+ "fmt"
+ "strings"
+)
+
+type JsonReporter struct {
+ out *Printer
+ currentKey []string
+ current *ScopeResult
+ index map[string]*ScopeResult
+ scopes []*ScopeResult
+}
+
+func (self *JsonReporter) depth() int { return len(self.currentKey) }
+
+func (self *JsonReporter) BeginStory(story *StoryReport) {}
+
+func (self *JsonReporter) Enter(scope *ScopeReport) {
+ self.currentKey = append(self.currentKey, scope.Title)
+ ID := strings.Join(self.currentKey, "|")
+ if _, found := self.index[ID]; !found {
+ next := newScopeResult(scope.Title, self.depth(), scope.File, scope.Line)
+ self.scopes = append(self.scopes, next)
+ self.index[ID] = next
+ }
+ self.current = self.index[ID]
+}
+
+func (self *JsonReporter) Report(report *AssertionResult) {
+ self.current.Assertions = append(self.current.Assertions, report)
+}
+
+func (self *JsonReporter) Exit() {
+ self.currentKey = self.currentKey[:len(self.currentKey)-1]
+}
+
+func (self *JsonReporter) EndStory() {
+ self.report()
+ self.reset()
+}
+func (self *JsonReporter) report() {
+ scopes := []string{}
+ for _, scope := range self.scopes {
+ serialized, err := json.Marshal(scope)
+ if err != nil {
+ self.out.Println(jsonMarshalFailure)
+ panic(err)
+ }
+ var buffer bytes.Buffer
+ json.Indent(&buffer, serialized, "", " ")
+ scopes = append(scopes, buffer.String())
+ }
+ self.out.Print(fmt.Sprintf("%s\n%s,\n%s\n", OpenJson, strings.Join(scopes, ","), CloseJson))
+}
+func (self *JsonReporter) reset() {
+ self.scopes = []*ScopeResult{}
+ self.index = map[string]*ScopeResult{}
+ self.currentKey = nil
+}
+
+func (self *JsonReporter) Write(content []byte) (written int, err error) {
+ self.current.Output += string(content)
+ return len(content), nil
+}
+
+func NewJsonReporter(out *Printer) *JsonReporter {
+ self := new(JsonReporter)
+ self.out = out
+ self.reset()
+ return self
+}
+
+const OpenJson = ">->->OPEN-JSON->->->" // "⌦"
+const CloseJson = "<-<-<-CLOSE-JSON<-<-<" // "⌫"
+const jsonMarshalFailure = `
+
+GOCONVEY_JSON_MARSHALL_FAILURE: There was an error when attempting to convert test results to JSON.
+Please file a bug report and reference the code that caused this failure if possible.
+
+Here's the panic:
+
+`
diff --git a/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/convey/reporting/printer.go b/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/convey/reporting/printer.go
new file mode 100644
index 00000000000..6d4a879c40d
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/convey/reporting/printer.go
@@ -0,0 +1,57 @@
+package reporting
+
+import (
+ "fmt"
+ "io"
+ "strings"
+)
+
+type Printer struct {
+ out io.Writer
+ prefix string
+}
+
+func (self *Printer) Println(message string, values ...interface{}) {
+ formatted := self.format(message, values...) + newline
+ self.out.Write([]byte(formatted))
+}
+
+func (self *Printer) Print(message string, values ...interface{}) {
+ formatted := self.format(message, values...)
+ self.out.Write([]byte(formatted))
+}
+
+func (self *Printer) Insert(text string) {
+ self.out.Write([]byte(text))
+}
+
+func (self *Printer) format(message string, values ...interface{}) string {
+ var formatted string
+ if len(values) == 0 {
+ formatted = self.prefix + message
+ } else {
+ formatted = self.prefix + fmt.Sprintf(message, values...)
+ }
+ indented := strings.Replace(formatted, newline, newline+self.prefix, -1)
+ return strings.TrimRight(indented, space)
+}
+
+func (self *Printer) Indent() {
+ self.prefix += pad
+}
+
+func (self *Printer) Dedent() {
+ if len(self.prefix) >= padLength {
+ self.prefix = self.prefix[:len(self.prefix)-padLength]
+ }
+}
+
+func NewPrinter(out io.Writer) *Printer {
+ self := new(Printer)
+ self.out = out
+ return self
+}
+
+const space = " "
+const pad = space + space
+const padLength = len(pad)
diff --git a/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/convey/reporting/printer_test.go b/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/convey/reporting/printer_test.go
new file mode 100644
index 00000000000..94202d5ac97
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/convey/reporting/printer_test.go
@@ -0,0 +1,181 @@
+package reporting
+
+import "testing"
+
+func TestPrint(t *testing.T) {
+ file := newMemoryFile()
+ printer := NewPrinter(file)
+ const expected = "Hello, World!"
+
+ printer.Print(expected)
+
+ if file.buffer != expected {
+ t.Errorf("Expected '%s' to equal '%s'.", expected, file.buffer)
+ }
+}
+
+func TestPrintFormat(t *testing.T) {
+ file := newMemoryFile()
+ printer := NewPrinter(file)
+ template := "Hi, %s"
+ name := "Ralph"
+ expected := "Hi, Ralph"
+
+ printer.Print(template, name)
+
+ if file.buffer != expected {
+ t.Errorf("Expected '%s' to equal '%s'.", expected, file.buffer)
+ }
+}
+
+func TestPrintPreservesEncodedStrings(t *testing.T) {
+ file := newMemoryFile()
+ printer := NewPrinter(file)
+ const expected = "= -> %3D"
+ printer.Print(expected)
+
+ if file.buffer != expected {
+ t.Errorf("Expected '%s' to equal '%s'.", expected, file.buffer)
+ }
+}
+
+func TestPrintln(t *testing.T) {
+ file := newMemoryFile()
+ printer := NewPrinter(file)
+ const expected = "Hello, World!"
+
+ printer.Println(expected)
+
+ if file.buffer != expected+"\n" {
+ t.Errorf("Expected '%s' to equal '%s'.", expected, file.buffer)
+ }
+}
+
+func TestPrintlnFormat(t *testing.T) {
+ file := newMemoryFile()
+ printer := NewPrinter(file)
+ template := "Hi, %s"
+ name := "Ralph"
+ expected := "Hi, Ralph\n"
+
+ printer.Println(template, name)
+
+ if file.buffer != expected {
+ t.Errorf("Expected '%s' to equal '%s'.", expected, file.buffer)
+ }
+}
+
+func TestPrintlnPreservesEncodedStrings(t *testing.T) {
+ file := newMemoryFile()
+ printer := NewPrinter(file)
+ const expected = "= -> %3D"
+ printer.Println(expected)
+
+ if file.buffer != expected+"\n" {
+ t.Errorf("Expected '%s' to equal '%s'.", expected, file.buffer)
+ }
+}
+
+func TestPrintIndented(t *testing.T) {
+ file := newMemoryFile()
+ printer := NewPrinter(file)
+ const message = "Hello, World!\nGoodbye, World!"
+ const expected = " Hello, World!\n Goodbye, World!"
+
+ printer.Indent()
+ printer.Print(message)
+
+ if file.buffer != expected {
+ t.Errorf("Expected '%s' to equal '%s'.", expected, file.buffer)
+ }
+}
+
+func TestPrintDedented(t *testing.T) {
+ file := newMemoryFile()
+ printer := NewPrinter(file)
+ const expected = "Hello, World!\nGoodbye, World!"
+
+ printer.Indent()
+ printer.Dedent()
+ printer.Print(expected)
+
+ if file.buffer != expected {
+ t.Errorf("Expected '%s' to equal '%s'.", expected, file.buffer)
+ }
+}
+
+func TestPrintlnIndented(t *testing.T) {
+ file := newMemoryFile()
+ printer := NewPrinter(file)
+ const message = "Hello, World!\nGoodbye, World!"
+ const expected = " Hello, World!\n Goodbye, World!\n"
+
+ printer.Indent()
+ printer.Println(message)
+
+ if file.buffer != expected {
+ t.Errorf("Expected '%s' to equal '%s'.", expected, file.buffer)
+ }
+}
+
+func TestPrintlnDedented(t *testing.T) {
+ file := newMemoryFile()
+ printer := NewPrinter(file)
+ const expected = "Hello, World!\nGoodbye, World!"
+
+ printer.Indent()
+ printer.Dedent()
+ printer.Println(expected)
+
+ if file.buffer != expected+"\n" {
+ t.Errorf("Expected '%s' to equal '%s'.", expected, file.buffer)
+ }
+}
+
+func TestDedentTooFarShouldNotPanic(t *testing.T) {
+ defer func() {
+ if r := recover(); r != nil {
+ t.Error("Should not have panicked!")
+ }
+ }()
+ file := newMemoryFile()
+ printer := NewPrinter(file)
+
+ printer.Dedent()
+
+ t.Log("Getting to this point without panicking means we passed.")
+}
+
+func TestInsert(t *testing.T) {
+ file := newMemoryFile()
+ printer := NewPrinter(file)
+
+ printer.Indent()
+ printer.Print("Hi")
+ printer.Insert(" there")
+ printer.Dedent()
+
+ expected := " Hi there"
+ if file.buffer != expected {
+ t.Errorf("Should have written '%s' but instead wrote '%s'.", expected, file.buffer)
+ }
+}
+
+////////////////// memoryFile ////////////////////
+
+type memoryFile struct {
+ buffer string
+}
+
+func (self *memoryFile) Write(p []byte) (n int, err error) {
+ self.buffer += string(p)
+ return len(p), nil
+}
+
+func (self *memoryFile) String() string {
+ return self.buffer
+}
+
+func newMemoryFile() *memoryFile {
+ return new(memoryFile)
+}
diff --git a/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/convey/reporting/problems.go b/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/convey/reporting/problems.go
new file mode 100644
index 00000000000..9ae493ac3b7
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/convey/reporting/problems.go
@@ -0,0 +1,80 @@
+package reporting
+
+import "fmt"
+
+type problem struct {
+ silent bool
+ out *Printer
+ errors []*AssertionResult
+ failures []*AssertionResult
+}
+
+func (self *problem) BeginStory(story *StoryReport) {}
+
+func (self *problem) Enter(scope *ScopeReport) {}
+
+func (self *problem) Report(report *AssertionResult) {
+ if report.Error != nil {
+ self.errors = append(self.errors, report)
+ } else if report.Failure != "" {
+ self.failures = append(self.failures, report)
+ }
+}
+
+func (self *problem) Exit() {}
+
+func (self *problem) EndStory() {
+ self.show(self.showErrors, redColor)
+ self.show(self.showFailures, yellowColor)
+ self.prepareForNextStory()
+}
+func (self *problem) show(display func(), color string) {
+ if !self.silent {
+ fmt.Print(color)
+ }
+ display()
+ if !self.silent {
+ fmt.Print(resetColor)
+ }
+ self.out.Dedent()
+}
+func (self *problem) showErrors() {
+ for i, e := range self.errors {
+ if i == 0 {
+ self.out.Println("\nErrors:\n")
+ self.out.Indent()
+ }
+ self.out.Println(errorTemplate, e.File, e.Line, e.Error, e.StackTrace)
+ }
+}
+func (self *problem) showFailures() {
+ for i, f := range self.failures {
+ if i == 0 {
+ self.out.Println("\nFailures:\n")
+ self.out.Indent()
+ }
+ self.out.Println(failureTemplate, f.File, f.Line, f.Failure)
+ }
+}
+
+func (self *problem) Write(content []byte) (written int, err error) {
+ return len(content), nil // no-op
+}
+
+func NewProblemReporter(out *Printer) *problem {
+ self := new(problem)
+ self.out = out
+ self.prepareForNextStory()
+ return self
+}
+
+func NewSilentProblemReporter(out *Printer) *problem {
+ self := NewProblemReporter(out)
+ self.silent = true
+ return self
+}
+
+func (self *problem) prepareForNextStory() {
+ self.errors = []*AssertionResult{}
+ self.failures = []*AssertionResult{}
+}
diff --git a/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/convey/reporting/problems_test.go b/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/convey/reporting/problems_test.go
new file mode 100644
index 00000000000..92f0ca35cca
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/convey/reporting/problems_test.go
@@ -0,0 +1,51 @@
+package reporting
+
+import (
+ "strings"
+ "testing"
+)
+
+func TestNoopProblemReporterActions(t *testing.T) {
+ file, reporter := setup()
+ reporter.BeginStory(nil)
+ reporter.Enter(nil)
+ reporter.Exit()
+ expected := ""
+ actual := file.String()
+ if expected != actual {
+ t.Errorf("Expected: '(blank)'\nActual: '%s'", actual)
+ }
+}
+
+func TestReporterPrintsFailuresAndErrorsAtTheEndOfTheStory(t *testing.T) {
+ file, reporter := setup()
+ reporter.Report(NewFailureReport("failed"))
+ reporter.Report(NewErrorReport("error"))
+ reporter.Report(NewSuccessReport())
+ reporter.EndStory()
+
+ result := file.String()
+ if !strings.Contains(result, "Errors:\n") {
+ t.Errorf("Expected errors, found none.")
+ }
+ if !strings.Contains(result, "Failures:\n") {
+ t.Errorf("Expected failures, found none.")
+ }
+
+ // Each stack trace looks like: `* /path/to/file.go`, so look for `* `.
+ // With go 1.4+ there is a line in some stack traces that looks like this:
+ // `testing.(*M).Run(0x2082d60a0, 0x25b7c0)`
+ // So we can't just look for "*" anymore.
+ problemCount := strings.Count(result, "* ")
+ if problemCount != 2 {
+ t.Errorf("Expected one failure and one error (total of 2 '*' characters). Got %d", problemCount)
+ }
+}
+
+func setup() (file *memoryFile, reporter *problem) {
+ monochrome()
+ file = newMemoryFile()
+ printer := NewPrinter(file)
+ reporter = NewProblemReporter(printer)
+ return
+}
diff --git a/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/convey/reporting/reporter.go b/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/convey/reporting/reporter.go
new file mode 100644
index 00000000000..cce6c5e4388
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/convey/reporting/reporter.go
@@ -0,0 +1,39 @@
+package reporting
+
+import "io"
+
+type Reporter interface {
+ BeginStory(story *StoryReport)
+ Enter(scope *ScopeReport)
+ Report(r *AssertionResult)
+ Exit()
+ EndStory()
+ io.Writer
+}
+
+type reporters struct{ collection []Reporter }
+
+func (self *reporters) BeginStory(s *StoryReport) { self.foreach(func(r Reporter) { r.BeginStory(s) }) }
+func (self *reporters) Enter(s *ScopeReport) { self.foreach(func(r Reporter) { r.Enter(s) }) }
+func (self *reporters) Report(a *AssertionResult) { self.foreach(func(r Reporter) { r.Report(a) }) }
+func (self *reporters) Exit() { self.foreach(func(r Reporter) { r.Exit() }) }
+func (self *reporters) EndStory() { self.foreach(func(r Reporter) { r.EndStory() }) }
+
+func (self *reporters) Write(contents []byte) (written int, err error) {
+ self.foreach(func(r Reporter) {
+ written, err = r.Write(contents)
+ })
+ return written, err
+}
+
+func (self *reporters) foreach(action func(Reporter)) {
+ for _, r := range self.collection {
+ action(r)
+ }
+}
+
+func NewReporters(collection ...Reporter) *reporters {
+ self := new(reporters)
+ self.collection = collection
+ return self
+}
diff --git a/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/convey/reporting/reporter_test.go b/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/convey/reporting/reporter_test.go
new file mode 100644
index 00000000000..4e5caf63b2b
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/convey/reporting/reporter_test.go
@@ -0,0 +1,94 @@
+package reporting
+
+import (
+ "runtime"
+ "testing"
+)
+
+func TestEachNestedReporterReceivesTheCallFromTheContainingReporter(t *testing.T) {
+ fake1 := newFakeReporter()
+ fake2 := newFakeReporter()
+ reporter := NewReporters(fake1, fake2)
+
+ reporter.BeginStory(nil)
+ assertTrue(t, fake1.begun)
+ assertTrue(t, fake2.begun)
+
+ reporter.Enter(NewScopeReport("scope"))
+ assertTrue(t, fake1.entered)
+ assertTrue(t, fake2.entered)
+
+ reporter.Report(NewSuccessReport())
+ assertTrue(t, fake1.reported)
+ assertTrue(t, fake2.reported)
+
+ reporter.Exit()
+ assertTrue(t, fake1.exited)
+ assertTrue(t, fake2.exited)
+
+ reporter.EndStory()
+ assertTrue(t, fake1.ended)
+ assertTrue(t, fake2.ended)
+
+ content := []byte("hi")
+ written, err := reporter.Write(content)
+ assertTrue(t, fake1.written)
+ assertTrue(t, fake2.written)
+ assertEqual(t, written, len(content))
+ assertNil(t, err)
+
+}
+
+func assertTrue(t *testing.T, value bool) {
+ if !value {
+ _, _, line, _ := runtime.Caller(1)
+ t.Errorf("Value should have been true (but was false). See line %d", line)
+ }
+}
+
+func assertEqual(t *testing.T, expected, actual int) {
+ if actual != expected {
+ _, _, line, _ := runtime.Caller(1)
+ t.Errorf("Value should have been %d (but was %d). See line %d", expected, actual, line)
+ }
+}
+
+func assertNil(t *testing.T, err error) {
+ if err != nil {
+ _, _, line, _ := runtime.Caller(1)
+ t.Errorf("Error should have been <nil> (but wasn't). See line %d", err, line)
+ }
+}
+
+type fakeReporter struct {
+ begun bool
+ entered bool
+ reported bool
+ exited bool
+ ended bool
+ written bool
+}
+
+func newFakeReporter() *fakeReporter {
+ return &fakeReporter{}
+}
+
+func (self *fakeReporter) BeginStory(story *StoryReport) {
+ self.begun = true
+}
+func (self *fakeReporter) Enter(scope *ScopeReport) {
+ self.entered = true
+}
+func (self *fakeReporter) Report(report *AssertionResult) {
+ self.reported = true
+}
+func (self *fakeReporter) Exit() {
+ self.exited = true
+}
+func (self *fakeReporter) EndStory() {
+ self.ended = true
+}
+func (self *fakeReporter) Write(content []byte) (int, error) {
+ self.written = true
+ return len(content), nil
+}
diff --git a/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/convey/reporting/reporting.goconvey b/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/convey/reporting/reporting.goconvey
new file mode 100644
index 00000000000..79982854b53
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/convey/reporting/reporting.goconvey
@@ -0,0 +1,2 @@
+#ignore
+-timeout=1s
diff --git a/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/convey/reporting/reports.go b/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/convey/reporting/reports.go
new file mode 100644
index 00000000000..712e6ade625
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/convey/reporting/reports.go
@@ -0,0 +1,179 @@
+package reporting
+
+import (
+ "encoding/json"
+ "fmt"
+ "runtime"
+ "strings"
+
+ "github.com/smartystreets/goconvey/convey/gotest"
+)
+
+////////////////// ScopeReport ////////////////////
+
+type ScopeReport struct {
+ Title string
+ File string
+ Line int
+}
+
+func NewScopeReport(title string) *ScopeReport {
+ file, line, _ := gotest.ResolveExternalCaller()
+ self := new(ScopeReport)
+ self.Title = title
+ self.File = file
+ self.Line = line
+ return self
+}
+
+////////////////// ScopeResult ////////////////////
+
+type ScopeResult struct {
+ Title string
+ File string
+ Line int
+ Depth int
+ Assertions []*AssertionResult
+ Output string
+}
+
+func newScopeResult(title string, depth int, file string, line int) *ScopeResult {
+ self := new(ScopeResult)
+ self.Title = title
+ self.Depth = depth
+ self.File = file
+ self.Line = line
+ self.Assertions = []*AssertionResult{}
+ return self
+}
+
+/////////////////// StoryReport /////////////////////
+
+type StoryReport struct {
+ Test T
+ Name string
+ File string
+ Line int
+}
+
+func NewStoryReport(test T) *StoryReport {
+ file, line, name := gotest.ResolveExternalCaller()
+ name = removePackagePath(name)
+ self := new(StoryReport)
+ self.Test = test
+ self.Name = name
+ self.File = file
+ self.Line = line
+ return self
+}
+
+// name comes in looking like "github.com/smartystreets/goconvey/examples.TestName".
+// We only want the stuff after the last '.', which is the name of the test function.
+func removePackagePath(name string) string {
+ parts := strings.Split(name, ".")
+ return parts[len(parts)-1]
+}
+
+/////////////////// FailureView ////////////////////////
+
+// This struct is also declared in github.com/smartystreets/assertions.
+// The json struct tags should be equal in both declarations.
+type FailureView struct {
+ Message string `json:"Message"`
+ Expected string `json:"Expected"`
+ Actual string `json:"Actual"`
+}
+
+////////////////////AssertionResult //////////////////////
+
+type AssertionResult struct {
+ File string
+ Line int
+ Expected string
+ Actual string
+ Failure string
+ Error interface{}
+ StackTrace string
+ Skipped bool
+}
+
+func NewFailureReport(failure string) *AssertionResult {
+ report := new(AssertionResult)
+ report.File, report.Line = caller()
+ report.StackTrace = stackTrace()
+ parseFailure(failure, report)
+ return report
+}
+func parseFailure(failure string, report *AssertionResult) {
+ view := new(FailureView)
+ err := json.Unmarshal([]byte(failure), view)
+ if err == nil {
+ report.Failure = view.Message
+ report.Expected = view.Expected
+ report.Actual = view.Actual
+ } else {
+ report.Failure = failure
+ }
+}
+func NewErrorReport(err interface{}) *AssertionResult {
+ report := new(AssertionResult)
+ report.File, report.Line = caller()
+ report.StackTrace = fullStackTrace()
+ report.Error = fmt.Sprintf("%v", err)
+ return report
+}
+func NewSuccessReport() *AssertionResult {
+ return new(AssertionResult)
+}
+func NewSkipReport() *AssertionResult {
+ report := new(AssertionResult)
+ report.File, report.Line = caller()
+ report.StackTrace = fullStackTrace()
+ report.Skipped = true
+ return report
+}
+
+func caller() (file string, line int) {
+ file, line, _ = gotest.ResolveExternalCaller()
+ return
+}
+
+func stackTrace() string {
+ buffer := make([]byte, 1024*64)
+ n := runtime.Stack(buffer, false)
+ return removeInternalEntries(string(buffer[:n]))
+}
+func fullStackTrace() string {
+ buffer := make([]byte, 1024*64)
+ n := runtime.Stack(buffer, true)
+ return removeInternalEntries(string(buffer[:n]))
+}
+func removeInternalEntries(stack string) string {
+ lines := strings.Split(stack, newline)
+ filtered := []string{}
+ for _, line := range lines {
+ if !isExternal(line) {
+ filtered = append(filtered, line)
+ }
+ }
+ return strings.Join(filtered, newline)
+}
+func isExternal(line string) bool {
+ for _, p := range internalPackages {
+ if strings.Contains(line, p) {
+ return true
+ }
+ }
+ return false
+}
+
+// NOTE: any new packages that host goconvey packages will need to be added here!
+// An alternative is to scan the goconvey directory and then exclude stuff like
+// the examples package but that's nasty too.
+var internalPackages = []string{
+ "goconvey/assertions",
+ "goconvey/convey",
+ "goconvey/execution",
+ "goconvey/gotest",
+ "goconvey/reporting",
+}
diff --git a/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/convey/reporting/statistics.go b/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/convey/reporting/statistics.go
new file mode 100644
index 00000000000..28e1d2071d4
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/convey/reporting/statistics.go
@@ -0,0 +1,89 @@
+package reporting
+
+import "fmt"
+
+func (self *statistics) BeginStory(story *StoryReport) {}
+
+func (self *statistics) Enter(scope *ScopeReport) {}
+
+func (self *statistics) Report(report *AssertionResult) {
+ if !self.failing && report.Failure != "" {
+ self.failing = true
+ }
+ if !self.erroring && report.Error != nil {
+ self.erroring = true
+ }
+ if report.Skipped {
+ self.skipped += 1
+ } else {
+ self.total++
+ }
+}
+
+func (self *statistics) Exit() {}
+
+func (self *statistics) EndStory() {
+ if !self.suppressed {
+ self.PrintSummary()
+ }
+}
+
+func (self *statistics) Suppress() {
+ self.suppressed = true
+}
+
+func (self *statistics) PrintSummary() {
+ self.reportAssertions()
+ self.reportSkippedSections()
+ self.completeReport()
+}
+func (self *statistics) reportAssertions() {
+ self.decideColor()
+ self.out.Print("\n%d total %s", self.total, plural("assertion", self.total))
+}
+func (self *statistics) decideColor() {
+ if self.failing && !self.erroring {
+ fmt.Print(yellowColor)
+ } else if self.erroring {
+ fmt.Print(redColor)
+ } else {
+ fmt.Print(greenColor)
+ }
+}
+func (self *statistics) reportSkippedSections() {
+ if self.skipped > 0 {
+ fmt.Print(yellowColor)
+ self.out.Print(" (one or more sections skipped)")
+ }
+}
+func (self *statistics) completeReport() {
+ fmt.Print(resetColor)
+ self.out.Print("\n")
+ self.out.Print("\n")
+}
+
+func (self *statistics) Write(content []byte) (written int, err error) {
+ return len(content), nil // no-op
+}
+
+func NewStatisticsReporter(out *Printer) *statistics {
+ self := statistics{}
+ self.out = out
+ return &self
+}
+
+type statistics struct {
+ out *Printer
+ total int
+ failing bool
+ erroring bool
+ skipped int
+ suppressed bool
+}
+
+func plural(word string, count int) string {
+ if count == 1 {
+ return word
+ }
+ return word + "s"
+}
diff --git a/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/convey/reporting/story.go b/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/convey/reporting/story.go
new file mode 100644
index 00000000000..9e73c971f8f
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/convey/reporting/story.go
@@ -0,0 +1,73 @@
+// TODO: in order for this reporter to be completely honest
+// we need to retrofit to be more like the json reporter such that:
+// 1. it maintains ScopeResult collections, which count assertions
+// 2. it reports only after EndStory(), so that all tick marks
+// are placed near the appropriate title.
+// 3. Under unit test
+
+package reporting
+
+import (
+ "fmt"
+ "strings"
+)
+
+type story struct {
+ out *Printer
+ titlesById map[string]string
+ currentKey []string
+}
+
+func (self *story) BeginStory(story *StoryReport) {}
+
+func (self *story) Enter(scope *ScopeReport) {
+ self.out.Indent()
+
+ self.currentKey = append(self.currentKey, scope.Title)
+ ID := strings.Join(self.currentKey, "|")
+
+ if _, found := self.titlesById[ID]; !found {
+ self.out.Println("")
+ self.out.Print(scope.Title)
+ self.out.Insert(" ")
+ self.titlesById[ID] = scope.Title
+ }
+}
+
+func (self *story) Report(report *AssertionResult) {
+ if report.Error != nil {
+ fmt.Print(redColor)
+ self.out.Insert(error_)
+ } else if report.Failure != "" {
+ fmt.Print(yellowColor)
+ self.out.Insert(failure)
+ } else if report.Skipped {
+ fmt.Print(yellowColor)
+ self.out.Insert(skip)
+ } else {
+ fmt.Print(greenColor)
+ self.out.Insert(success)
+ }
+ fmt.Print(resetColor)
+}
+
+func (self *story) Exit() {
+ self.out.Dedent()
+ self.currentKey = self.currentKey[:len(self.currentKey)-1]
+}
+
+func (self *story) EndStory() {
+ self.titlesById = make(map[string]string)
+ self.out.Println("\n")
+}
+
+func (self *story) Write(content []byte) (written int, err error) {
+ return len(content), nil // no-op
+}
+
+func NewStoryReporter(out *Printer) *story {
+ self := new(story)
+ self.out = out
+ self.titlesById = make(map[string]string)
+ return self
+}
diff --git a/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/convey/reporting_hooks_test.go b/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/convey/reporting_hooks_test.go
new file mode 100644
index 00000000000..69125c3cf44
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/convey/reporting_hooks_test.go
@@ -0,0 +1,317 @@
+package convey
+
+import (
+ "fmt"
+ "net/http"
+ "net/http/httptest"
+ "path"
+ "runtime"
+ "strconv"
+ "strings"
+ "testing"
+
+ "github.com/smartystreets/goconvey/convey/reporting"
+)
+
+func TestSingleScopeReported(t *testing.T) {
+ myReporter, test := setupFakeReporter()
+
+ Convey("A", test, func() {
+ So(1, ShouldEqual, 1)
+ })
+
+ expectEqual(t, "Begin|A|Success|Exit|End", myReporter.wholeStory())
+}
+
+func TestNestedScopeReported(t *testing.T) {
+ myReporter, test := setupFakeReporter()
+
+ Convey("A", test, func() {
+ Convey("B", func() {
+ So(1, ShouldEqual, 1)
+ })
+ })
+
+ expectEqual(t, "Begin|A|B|Success|Exit|Exit|End", myReporter.wholeStory())
+}
+
+func TestFailureReported(t *testing.T) {
+ myReporter, test := setupFakeReporter()
+
+ Convey("A", test, func() {
+ So(1, ShouldBeNil)
+ })
+
+ expectEqual(t, "Begin|A|Failure|Exit|End", myReporter.wholeStory())
+}
+
+func TestFirstFailureEndsScopeExecution(t *testing.T) {
+ myReporter, test := setupFakeReporter()
+
+ Convey("A", test, func() {
+ So(1, ShouldBeNil)
+ So(nil, ShouldBeNil)
+ })
+
+ expectEqual(t, "Begin|A|Failure|Exit|End", myReporter.wholeStory())
+}
+
+func TestComparisonFailureDeserializedAndReported(t *testing.T) {
+ myReporter, test := setupFakeReporter()
+
+ Convey("A", test, func() {
+ So("hi", ShouldEqual, "bye")
+ })
+
+ expectEqual(t, "Begin|A|Failure(bye/hi)|Exit|End", myReporter.wholeStory())
+}
+
+func TestNestedFailureReported(t *testing.T) {
+ myReporter, test := setupFakeReporter()
+
+ Convey("A", test, func() {
+ Convey("B", func() {
+ So(2, ShouldBeNil)
+ })
+ })
+
+ expectEqual(t, "Begin|A|B|Failure|Exit|Exit|End", myReporter.wholeStory())
+}
+
+func TestSuccessAndFailureReported(t *testing.T) {
+ myReporter, test := setupFakeReporter()
+
+ Convey("A", test, func() {
+ So(nil, ShouldBeNil)
+ So(1, ShouldBeNil)
+ })
+
+ expectEqual(t, "Begin|A|Success|Failure|Exit|End", myReporter.wholeStory())
+}
+
+func TestIncompleteActionReportedAsSkipped(t *testing.T) {
+ myReporter, test := setupFakeReporter()
+
+ Convey("A", test, func() {
+ Convey("B", nil)
+ })
+
+ expectEqual(t, "Begin|A|B|Skipped|Exit|Exit|End", myReporter.wholeStory())
+}
+
+func TestSkippedConveyReportedAsSkipped(t *testing.T) {
+ myReporter, test := setupFakeReporter()
+
+ Convey("A", test, func() {
+ SkipConvey("B", func() {
+ So(1, ShouldEqual, 1)
+ })
+ })
+
+ expectEqual(t, "Begin|A|B|Skipped|Exit|Exit|End", myReporter.wholeStory())
+}
+
+func TestMultipleSkipsAreReported(t *testing.T) {
+ myReporter, test := setupFakeReporter()
+
+ Convey("A", test, func() {
+ Convey("0", func() {
+ So(nil, ShouldBeNil)
+ })
+
+ SkipConvey("1", func() {})
+ SkipConvey("2", func() {})
+
+ Convey("3", nil)
+ Convey("4", nil)
+
+ Convey("5", func() {
+ So(nil, ShouldBeNil)
+ })
+ })
+
+ expected := "Begin" +
+ "|A|0|Success|Exit|Exit" +
+ "|A|1|Skipped|Exit|Exit" +
+ "|A|2|Skipped|Exit|Exit" +
+ "|A|3|Skipped|Exit|Exit" +
+ "|A|4|Skipped|Exit|Exit" +
+ "|A|5|Success|Exit|Exit" +
+ "|End"
+
+ expectEqual(t, expected, myReporter.wholeStory())
+}
+
+func TestSkippedAssertionIsNotReported(t *testing.T) {
+ myReporter, test := setupFakeReporter()
+
+ Convey("A", test, func() {
+ SkipSo(1, ShouldEqual, 1)
+ })
+
+ expectEqual(t, "Begin|A|Skipped|Exit|End", myReporter.wholeStory())
+}
+
+func TestMultipleSkippedAssertionsAreNotReported(t *testing.T) {
+ myReporter, test := setupFakeReporter()
+
+ Convey("A", test, func() {
+ SkipSo(1, ShouldEqual, 1)
+ So(1, ShouldEqual, 1)
+ SkipSo(1, ShouldEqual, 1)
+ })
+
+ expectEqual(t, "Begin|A|Skipped|Success|Skipped|Exit|End", myReporter.wholeStory())
+}
+
+func TestErrorByManualPanicReported(t *testing.T) {
+ myReporter, test := setupFakeReporter()
+
+ Convey("A", test, func() {
+ panic("Gopher alert!")
+ })
+
+ expectEqual(t, "Begin|A|Error|Exit|End", myReporter.wholeStory())
+}
+
+func TestIterativeConveysReported(t *testing.T) {
+ myReporter, test := setupFakeReporter()
+
+ Convey("A", test, func() {
+ for x := 0; x < 3; x++ {
+ Convey(strconv.Itoa(x), func() {
+ So(x, ShouldEqual, x)
+ })
+ }
+ })
+
+ expectEqual(t, "Begin|A|0|Success|Exit|Exit|A|1|Success|Exit|Exit|A|2|Success|Exit|Exit|End", myReporter.wholeStory())
+}
+
+func TestNestedIterativeConveysReported(t *testing.T) {
+ myReporter, test := setupFakeReporter()
+
+ Convey("A", test, func() {
+ for x := 0; x < 3; x++ {
+ Convey(strconv.Itoa(x), func() {
+ for y := 0; y < 3; y++ {
+ Convey("< "+strconv.Itoa(y), func() {
+ So(x, ShouldBeLessThan, y)
+ })
+ }
+ })
+ }
+ })
+
+ expectEqual(t, ("Begin|" +
+ "A|0|< 0|Failure|Exit|Exit|Exit|" +
+ "A|0|< 1|Success|Exit|Exit|Exit|" +
+ "A|0|< 2|Success|Exit|Exit|Exit|" +
+ "A|1|< 0|Failure|Exit|Exit|Exit|" +
+ "A|1|< 1|Failure|Exit|Exit|Exit|" +
+ "A|1|< 2|Success|Exit|Exit|Exit|" +
+ "A|2|< 0|Failure|Exit|Exit|Exit|" +
+ "A|2|< 1|Failure|Exit|Exit|Exit|" +
+ "A|2|< 2|Failure|Exit|Exit|Exit|" +
+ "End"), myReporter.wholeStory())
+}
+
+func TestEmbeddedAssertionReported(t *testing.T) {
+ myReporter, test := setupFakeReporter()
+
+ Convey("A", test, func(c C) {
+ ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
+ c.So(r.FormValue("msg"), ShouldEqual, "ping")
+ }))
+ http.DefaultClient.Get(ts.URL + "?msg=ping")
+ })
+
+ expectEqual(t, "Begin|A|Success|Exit|End", myReporter.wholeStory())
+}
+
+func TestEmbeddedContextHelperReported(t *testing.T) {
+ myReporter, test := setupFakeReporter()
+
+ helper := func(c C) http.HandlerFunc {
+ return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
+ c.Convey("Embedded", func() {
+ So(r.FormValue("msg"), ShouldEqual, "ping")
+ })
+ })
+ }
+
+ Convey("A", test, func(c C) {
+ ts := httptest.NewServer(helper(c))
+ http.DefaultClient.Get(ts.URL + "?msg=ping")
+ })
+
+ expectEqual(t, "Begin|A|Embedded|Success|Exit|Exit|End", myReporter.wholeStory())
+}
+
+func expectEqual(t *testing.T, expected interface{}, actual interface{}) {
+ if expected != actual {
+ _, file, line, _ := runtime.Caller(1)
+ t.Errorf("Expected '%v' to be '%v' but it wasn't. See '%s' at line %d.",
+ actual, expected, path.Base(file), line)
+ }
+}
+
+func setupFakeReporter() (*fakeReporter, *fakeGoTest) {
+ myReporter := new(fakeReporter)
+ myReporter.calls = []string{}
+ testReporter = myReporter
+ return myReporter, new(fakeGoTest)
+}
+
+type fakeReporter struct {
+ calls []string
+}
+
+func (self *fakeReporter) BeginStory(story *reporting.StoryReport) {
+ self.calls = append(self.calls, "Begin")
+}
+
+func (self *fakeReporter) Enter(scope *reporting.ScopeReport) {
+ self.calls = append(self.calls, scope.Title)
+}
+
+func (self *fakeReporter) Report(report *reporting.AssertionResult) {
+ if report.Error != nil {
+ self.calls = append(self.calls, "Error")
+ } else if report.Failure != "" {
+ message := "Failure"
+ if report.Expected != "" || report.Actual != "" {
+ message += fmt.Sprintf("(%s/%s)", report.Expected, report.Actual)
+ }
+ self.calls = append(self.calls, message)
+ } else if report.Skipped {
+ self.calls = append(self.calls, "Skipped")
+ } else {
+ self.calls = append(self.calls, "Success")
+ }
+}
+
+func (self *fakeReporter) Exit() {
+ self.calls = append(self.calls, "Exit")
+}
+
+func (self *fakeReporter) EndStory() {
+ self.calls = append(self.calls, "End")
+}
+
+func (self *fakeReporter) Write(content []byte) (int, error) {
+ return len(content), nil // no-op
+}
+
+func (self *fakeReporter) wholeStory() string {
+ return strings.Join(self.calls, "|")
+}
+
+////////////////////////////////
+
+type fakeGoTest struct{}
+
+func (self *fakeGoTest) Fail() {}
+func (self *fakeGoTest) Fatalf(format string, args ...interface{}) {}
+
+var test t = new(fakeGoTest)
diff --git a/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/convey/story_conventions_test.go b/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/convey/story_conventions_test.go
new file mode 100644
index 00000000000..84832c78d5b
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/convey/story_conventions_test.go
@@ -0,0 +1,175 @@
+package convey
+
+import (
+ "reflect"
+ "testing"
+)
+
+func expectPanic(t *testing.T, f string) interface{} {
+ r := recover()
+ if r != nil {
+ if cp, ok := r.(*conveyErr); ok {
+ if cp.fmt != f {
+ t.Error("Incorrect panic message.")
+ }
+ } else {
+ t.Errorf("Incorrect panic type. %s", reflect.TypeOf(r))
+ }
+ } else {
+ t.Error("Expected panic but none occured")
+ }
+ return r
+}
+
+func TestMissingTopLevelGoTestReferenceCausesPanic(t *testing.T) {
+ output := map[string]bool{}
+
+ defer expectEqual(t, false, output["good"])
+ defer expectPanic(t, missingGoTest)
+
+ Convey("Hi", func() {
+ output["bad"] = true // this shouldn't happen
+ })
+}
+
+func TestMissingTopLevelGoTestReferenceAfterGoodExample(t *testing.T) {
+ output := map[string]bool{}
+
+ defer func() {
+ expectEqual(t, true, output["good"])
+ expectEqual(t, false, output["bad"])
+ }()
+ defer expectPanic(t, missingGoTest)
+
+ Convey("Good example", t, func() {
+ output["good"] = true
+ })
+
+ Convey("Bad example", func() {
+ output["bad"] = true // shouldn't happen
+ })
+}
+
+func TestExtraReferencePanics(t *testing.T) {
+ output := map[string]bool{}
+
+ defer expectEqual(t, false, output["bad"])
+ defer expectPanic(t, extraGoTest)
+
+ Convey("Good example", t, func() {
+ Convey("Bad example - passing in *testing.T a second time!", t, func() {
+ output["bad"] = true // shouldn't happen
+ })
+ })
+}
+
+func TestParseRegistrationMissingRequiredElements(t *testing.T) {
+ defer expectPanic(t, parseError)
+
+ Convey()
+}
+
+func TestParseRegistration_MissingNameString(t *testing.T) {
+ defer expectPanic(t, parseError)
+
+ Convey(func() {})
+}
+
+func TestParseRegistration_MissingActionFunc(t *testing.T) {
+ defer expectPanic(t, parseError)
+
+ Convey("Hi there", 12345)
+}
+
+func TestFailureModeNoContext(t *testing.T) {
+ Convey("Foo", t, func() {
+ done := make(chan int, 1)
+ go func() {
+ defer func() { done <- 1 }()
+ defer expectPanic(t, noStackContext)
+ So(len("I have no context"), ShouldBeGreaterThan, 0)
+ }()
+ <-done
+ })
+}
+
+func TestFailureModeDuplicateSuite(t *testing.T) {
+ Convey("cool", t, func() {
+ defer expectPanic(t, multipleIdenticalConvey)
+
+ Convey("dup", nil)
+ Convey("dup", nil)
+ })
+}
+
+func TestFailureModeIndeterminentSuiteNames(t *testing.T) {
+ defer expectPanic(t, differentConveySituations)
+
+ name := "bob"
+ Convey("cool", t, func() {
+ for i := 0; i < 3; i++ {
+ Convey(name, func() {})
+ name += "bob"
+ }
+ })
+}
+
+func TestFailureModeNestedIndeterminentSuiteNames(t *testing.T) {
+ defer expectPanic(t, differentConveySituations)
+
+ name := "bob"
+ Convey("cool", t, func() {
+ Convey("inner", func() {
+ for i := 0; i < 3; i++ {
+ Convey(name, func() {})
+ name += "bob"
+ }
+ })
+ })
+}
+
+func TestFailureModeParameterButMissing(t *testing.T) {
+ defer expectPanic(t, parseError)
+
+ prepare()
+
+ Convey("Foobar", t, FailureHalts)
+}
+
+func TestFailureModeParameterWithAction(t *testing.T) {
+ prepare()
+
+ Convey("Foobar", t, FailureHalts, func() {})
+}
+
+func TestExtraConveyParameters(t *testing.T) {
+ defer expectPanic(t, parseError)
+
+ prepare()
+
+ Convey("Foobar", t, FailureHalts, func() {}, "This is not supposed to be here")
+}
+
+func TestExtraConveyParameters2(t *testing.T) {
+ defer expectPanic(t, parseError)
+
+ prepare()
+
+ Convey("Foobar", t, func() {}, "This is not supposed to be here")
+}
+
+func TestExtraConveyParameters3(t *testing.T) {
+ defer expectPanic(t, parseError)
+
+ output := prepare()
+
+ Convey("A", t, func() {
+ output += "A "
+
+ Convey("B", func() {
+ output += "B "
+ }, "This is not supposed to be here")
+ })
+
+ expectEqual(t, "A ", output)
+}
diff --git a/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/dependencies.go b/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/dependencies.go
new file mode 100644
index 00000000000..0839e27fdf2
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/dependencies.go
@@ -0,0 +1,4 @@
+package main
+
+import _ "github.com/jtolds/gls"
+import _ "github.com/smartystreets/assertions"
diff --git a/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/doc_test.go b/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/doc_test.go
new file mode 100644
index 00000000000..06ab7d0f9a3
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/doc_test.go
@@ -0,0 +1 @@
+package main
diff --git a/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/examples/assertion_examples_test.go b/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/examples/assertion_examples_test.go
new file mode 100644
index 00000000000..a933292a2e7
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/examples/assertion_examples_test.go
@@ -0,0 +1,125 @@
+package examples
+
+import (
+ "bytes"
+ "io"
+ "testing"
+ "time"
+
+ . "github.com/smartystreets/goconvey/convey"
+)
+
+func TestAssertionsAreAvailableFromConveyPackage(t *testing.T) {
+ SetDefaultFailureMode(FailureContinues)
+ defer SetDefaultFailureMode(FailureHalts)
+
+ Convey("Equality assertions should be accessible", t, func() {
+ thing1a := thing{a: "asdf"}
+ thing1b := thing{a: "asdf"}
+ thing2 := thing{a: "qwer"}
+
+ So(1, ShouldEqual, 1)
+ So(1, ShouldNotEqual, 2)
+ So(1, ShouldAlmostEqual, 1.000000000000001)
+ So(1, ShouldNotAlmostEqual, 2, 0.5)
+ So(thing1a, ShouldResemble, thing1b)
+ So(thing1a, ShouldNotResemble, thing2)
+ So(&thing1a, ShouldPointTo, &thing1a)
+ So(&thing1a, ShouldNotPointTo, &thing1b)
+ So(nil, ShouldBeNil)
+ So(1, ShouldNotBeNil)
+ So(true, ShouldBeTrue)
+ So(false, ShouldBeFalse)
+ So(0, ShouldBeZeroValue)
+ })
+
+ Convey("Numeric comparison assertions should be accessible", t, func() {
+ So(1, ShouldBeGreaterThan, 0)
+ So(1, ShouldBeGreaterThanOrEqualTo, 1)
+ So(1, ShouldBeLessThan, 2)
+ So(1, ShouldBeLessThanOrEqualTo, 1)
+ So(1, ShouldBeBetween, 0, 2)
+ So(1, ShouldNotBeBetween, 2, 4)
+ So(1, ShouldBeBetweenOrEqual, 1, 2)
+ So(1, ShouldNotBeBetweenOrEqual, 2, 4)
+ })
+
+ Convey("Container assertions should be accessible", t, func() {
+ So([]int{1, 2, 3}, ShouldContain, 2)
+ So([]int{1, 2, 3}, ShouldNotContain, 4)
+ So(map[int]int{1: 1, 2: 2, 3: 3}, ShouldContainKey, 2)
+ So(map[int]int{1: 1, 2: 2, 3: 3}, ShouldNotContainKey, 4)
+ So(1, ShouldBeIn, []int{1, 2, 3})
+ So(4, ShouldNotBeIn, []int{1, 2, 3})
+ So([]int{}, ShouldBeEmpty)
+ So([]int{1}, ShouldNotBeEmpty)
+ So([]int{1, 2}, ShouldHaveLength, 2)
+ })
+
+ Convey("String assertions should be accessible", t, func() {
+ So("asdf", ShouldStartWith, "a")
+ So("asdf", ShouldNotStartWith, "z")
+ So("asdf", ShouldEndWith, "df")
+ So("asdf", ShouldNotEndWith, "as")
+ So("", ShouldBeBlank)
+ So("asdf", ShouldNotBeBlank)
+ So("asdf", ShouldContainSubstring, "sd")
+ So("asdf", ShouldNotContainSubstring, "af")
+ })
+
+ Convey("Panic recovery assertions should be accessible", t, func() {
+ So(panics, ShouldPanic)
+ So(func() {}, ShouldNotPanic)
+ So(panics, ShouldPanicWith, "Goofy Gophers!")
+ So(panics, ShouldNotPanicWith, "Guileless Gophers!")
+ })
+
+ Convey("Type-checking assertions should be accessible", t, func() {
+
+ // NOTE: Values or pointers may be checked. If a value is passed,
+ // it will be cast as a pointer to the value to avoid cases where
+ // the struct being tested takes pointer receivers. Go allows values
+ // or pointers to be passed as receivers on methods with a value
+ // receiver, but only pointers on methods with pointer receivers.
+ // See:
+ // http://golang.org/doc/effective_go.html#pointers_vs_values
+ // http://golang.org/doc/effective_go.html#blank_implements
+ // http://blog.golang.org/laws-of-reflection
+
+ So(1, ShouldHaveSameTypeAs, 0)
+ So(1, ShouldNotHaveSameTypeAs, "1")
+
+ So(bytes.NewBufferString(""), ShouldImplement, (*io.Reader)(nil))
+ So("string", ShouldNotImplement, (*io.Reader)(nil))
+ })
+
+ Convey("Time assertions should be accessible", t, func() {
+ january1, _ := time.Parse(timeLayout, "2013-01-01 00:00")
+ january2, _ := time.Parse(timeLayout, "2013-01-02 00:00")
+ january3, _ := time.Parse(timeLayout, "2013-01-03 00:00")
+ january4, _ := time.Parse(timeLayout, "2013-01-04 00:00")
+ january5, _ := time.Parse(timeLayout, "2013-01-05 00:00")
+ oneDay, _ := time.ParseDuration("24h0m0s")
+
+ So(january1, ShouldHappenBefore, january4)
+ So(january1, ShouldHappenOnOrBefore, january1)
+ So(january2, ShouldHappenAfter, january1)
+ So(january2, ShouldHappenOnOrAfter, january2)
+ So(january3, ShouldHappenBetween, january2, january5)
+ So(january3, ShouldHappenOnOrBetween, january3, january5)
+ So(january1, ShouldNotHappenOnOrBetween, january2, january5)
+ So(january2, ShouldHappenWithin, oneDay, january3)
+ So(january5, ShouldNotHappenWithin, oneDay, january1)
+ So([]time.Time{january1, january2}, ShouldBeChronological)
+ })
+}
+
+type thing struct {
+ a string
+}
+
+func panics() {
+ panic("Goofy Gophers!")
+}
+
+const timeLayout = "2006-01-02 15:04"
diff --git a/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/examples/bowling_game.go b/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/examples/bowling_game.go
new file mode 100644
index 00000000000..547bf93d1c3
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/examples/bowling_game.go
@@ -0,0 +1,75 @@
+package examples
+
+// Game contains the state of a bowling game.
+type Game struct {
+ rolls []int
+ current int
+}
+
+// NewGame allocates and starts a new game of bowling.
+func NewGame() *Game {
+ game := new(Game)
+ game.rolls = make([]int, maxThrowsPerGame)
+ return game
+}
+
+// Roll rolls the ball and knocks down the number of pins specified by pins.
+func (self *Game) Roll(pins int) {
+ self.rolls[self.current] = pins
+ self.current++
+}
+
+// Score calculates and returns the player's current score.
+func (self *Game) Score() (sum int) {
+ for throw, frame := 0, 0; frame < framesPerGame; frame++ {
+ if self.isStrike(throw) {
+ sum += self.strikeBonusFor(throw)
+ throw += 1
+ } else if self.isSpare(throw) {
+ sum += self.spareBonusFor(throw)
+ throw += 2
+ } else {
+ sum += self.framePointsAt(throw)
+ throw += 2
+ }
+ }
+ return sum
+}
+
+// isStrike determines if a given throw is a strike or not. A strike is knocking
+// down all pins in one throw.
+func (self *Game) isStrike(throw int) bool {
+ return self.rolls[throw] == allPins
+}
+
+// strikeBonusFor calculates and returns the strike bonus for a throw.
+func (self *Game) strikeBonusFor(throw int) int {
+ return allPins + self.framePointsAt(throw+1)
+}
+
+// isSpare determines if a given frame is a spare or not. A spare is knocking
+// down all pins in one frame with two throws.
+func (self *Game) isSpare(throw int) bool {
+ return self.framePointsAt(throw) == allPins
+}
+
+// spareBonusFor calculates and returns the spare bonus for a throw.
+func (self *Game) spareBonusFor(throw int) int {
+ return allPins + self.rolls[throw+2]
+}
+
+// framePointsAt computes and returns the score in a frame specified by throw.
+func (self *Game) framePointsAt(throw int) int {
+ return self.rolls[throw] + self.rolls[throw+1]
+}
+
+const (
+ // allPins is the number of pins allocated per fresh throw.
+ allPins = 10
+
+ // framesPerGame is the number of frames per bowling game.
+ framesPerGame = 10
+
+ // maxThrowsPerGame is the maximum number of throws possible in a single game.
+ maxThrowsPerGame = 21
+)
diff --git a/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/examples/bowling_game_test.go b/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/examples/bowling_game_test.go
new file mode 100644
index 00000000000..18e997d44ae
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/examples/bowling_game_test.go
@@ -0,0 +1,80 @@
+/*
+
+Reference: http://butunclebob.com/ArticleS.UncleBob.TheBowlingGameKata
+
+See the very first link (which happens to be the very first word of
+the first paragraph) on the page for a tutorial.
+
+*/
+
+package examples
+
+import (
+ "testing"
+
+ . "github.com/smartystreets/goconvey/convey"
+)
+
+func TestBowlingGameScoring(t *testing.T) {
+ Convey("Given a fresh score card", t, func() {
+ game := NewGame()
+
+ Convey("When all gutter balls are thrown", func() {
+ game.rollMany(20, 0)
+
+ Convey("The score should be zero", func() {
+ So(game.Score(), ShouldEqual, 0)
+ })
+ })
+
+ Convey("When all throws knock down only one pin", func() {
+ game.rollMany(20, 1)
+
+ Convey("The score should be 20", func() {
+ So(game.Score(), ShouldEqual, 20)
+ })
+ })
+
+ Convey("When a spare is thrown", func() {
+ game.rollSpare()
+ game.Roll(3)
+ game.rollMany(17, 0)
+
+ Convey("The score should include a spare bonus.", func() {
+ So(game.Score(), ShouldEqual, 16)
+ })
+ })
+
+ Convey("When a strike is thrown", func() {
+ game.rollStrike()
+ game.Roll(3)
+ game.Roll(4)
+ game.rollMany(16, 0)
+
+ Convey("The score should include a strike bonus.", func() {
+ So(game.Score(), ShouldEqual, 24)
+ })
+ })
+
+ Convey("When all strikes are thrown", func() {
+ game.rollMany(21, 10)
+
+ Convey("The score should be 300.", func() {
+ So(game.Score(), ShouldEqual, 300)
+ })
+ })
+ })
+}
+
+func (self *Game) rollMany(times, pins int) {
+ for x := 0; x < times; x++ {
+ self.Roll(pins)
+ }
+}
+func (self *Game) rollSpare() {
+ self.Roll(5)
+ self.Roll(5)
+}
+func (self *Game) rollStrike() {
+ self.Roll(10)
+}
diff --git a/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/examples/doc.go b/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/examples/doc.go
new file mode 100644
index 00000000000..dae661e18dc
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/examples/doc.go
@@ -0,0 +1,5 @@
+// Package examples contains, well, examples of how to use goconvey to
+// specify behavior of a system under test. It contains a well-known example
+// by Robert C. Martin called "Bowling Game Kata" as well as another very
+// trivial example that demonstrates Reset() and some of the assertions.
+package examples
diff --git a/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/examples/examples.goconvey b/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/examples/examples.goconvey
new file mode 100644
index 00000000000..b5c805fbf4e
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/examples/examples.goconvey
@@ -0,0 +1,12 @@
+// Uncomment the next line to disable the package when running the GoConvey UI:
+//IGNORE
+
+// Uncomment the next line to limit testing to the specified test function name pattern:
+//-run=TestAssertionsAreAvailableFromConveyPackage
+
+// Uncomment the next line to limit testing to those tests that don't bail when testing.Short() is true:
+//-short
+
+// include any additional `go test` flags or application-specific flags below:
+
+-timeout=1s
diff --git a/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/examples/simple_example_test.go b/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/examples/simple_example_test.go
new file mode 100644
index 00000000000..dadfd8136a3
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/examples/simple_example_test.go
@@ -0,0 +1,36 @@
+package examples
+
+import (
+ "testing"
+
+ . "github.com/smartystreets/goconvey/convey"
+)
+
+func TestIntegerManipulation(t *testing.T) {
+ t.Parallel()
+
+ Convey("Given a starting integer value", t, func() {
+ x := 42
+
+ Convey("When incremented", func() {
+ x++
+
+ Convey("The value should be greater by one", func() {
+ So(x, ShouldEqual, 43)
+ })
+ Convey("The value should NOT be what it used to be", func() {
+ So(x, ShouldNotEqual, 42)
+ })
+ })
+ Convey("When decremented", func() {
+ x--
+
+ Convey("The value should be lesser by one", func() {
+ So(x, ShouldEqual, 41)
+ })
+ Convey("The value should NOT be what it used to be", func() {
+ So(x, ShouldNotEqual, 42)
+ })
+ })
+ })
+}
diff --git a/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/goconvey.go b/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/goconvey.go
new file mode 100644
index 00000000000..4d5fc0ef6d5
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/goconvey.go
@@ -0,0 +1,280 @@
+// This executable provides an HTTP server that watches for file system changes
+// to .go files within the working directory (and all nested go packages).
+// Navigating to the configured host and port in a web browser will display the
+// latest results of running `go test` in each go package.
+package main
+
+import (
+ "flag"
+ "fmt"
+ "log"
+ "net/http"
+ "os"
+ "os/exec"
+ "path/filepath"
+ "runtime"
+ "strings"
+ "time"
+
+ "go/build"
+
+ "github.com/smartystreets/goconvey/web/server/api"
+ "github.com/smartystreets/goconvey/web/server/contract"
+ "github.com/smartystreets/goconvey/web/server/executor"
+ "github.com/smartystreets/goconvey/web/server/messaging"
+ "github.com/smartystreets/goconvey/web/server/parser"
+ "github.com/smartystreets/goconvey/web/server/system"
+ "github.com/smartystreets/goconvey/web/server/watch"
+)
+
+func init() {
+ flags()
+ folders()
+}
+func flags() {
+ flag.IntVar(&port, "port", 8080, "The port at which to serve http.")
+ flag.StringVar(&host, "host", "127.0.0.1", "The host at which to serve http.")
+ flag.DurationVar(&nap, "poll", quarterSecond, "The interval to wait between polling the file system for changes (default: 250ms).")
+ flag.IntVar(&packages, "packages", 10, "The number of packages to test in parallel. Higher == faster but more costly in terms of computing. (default: 10)")
+ flag.StringVar(&gobin, "gobin", "go", "The path to the 'go' binary (default: search on the PATH).")
+ flag.BoolVar(&cover, "cover", true, "Enable package-level coverage statistics. Requires Go 1.2+ and the go cover tool. (default: true)")
+ flag.IntVar(&depth, "depth", -1, "The directory scanning depth. If -1, scan infinitely deep directory structures. 0: scan working directory. 1+: Scan into nested directories, limited to value. (default: -1)")
+ flag.StringVar(&timeout, "timeout", "0", "The test execution timeout if none is specified in the *.goconvey file (default is '0', which is the same as not providing this option).")
+ flag.StringVar(&watchedSuffixes, "watchedSuffixes", ".go", "A comma separated list of file suffixes to watch for modifications (default: .go).")
+ flag.StringVar(&excludedDirs, "excludedDirs", "vendor,node_modules", "A comma separated list of directories that will be excluded from being watched")
+ flag.StringVar(&workDir, "workDir", "", "set goconvey working directory (default current directory)")
+
+ log.SetOutput(os.Stdout)
+ log.SetFlags(log.LstdFlags | log.Lshortfile)
+}
+func folders() {
+ _, file, _, _ := runtime.Caller(0)
+ here := filepath.Dir(file)
+ static = filepath.Join(here, "/web/client")
+ reports = filepath.Join(static, "reports")
+}
+
+func main() {
+ flag.Parse()
+ log.Printf(initialConfiguration, host, port, nap, cover)
+
+ working := getWorkDir()
+ cover = coverageEnabled(cover, reports)
+ shell := system.NewShell(gobin, reports, cover, timeout)
+
+ watcherInput := make(chan messaging.WatcherCommand)
+ watcherOutput := make(chan messaging.Folders)
+ excludedDirItems := strings.Split(excludedDirs, `,`)
+ watcher := watch.NewWatcher(working, depth, nap, watcherInput, watcherOutput, watchedSuffixes, excludedDirItems)
+
+ parser := parser.NewParser(parser.ParsePackageResults)
+ tester := executor.NewConcurrentTester(shell)
+ tester.SetBatchSize(packages)
+
+ longpollChan := make(chan chan string)
+ executor := executor.NewExecutor(tester, parser, longpollChan)
+ server := api.NewHTTPServer(working, watcherInput, executor, longpollChan)
+ go runTestOnUpdates(watcherOutput, executor, server)
+ go watcher.Listen()
+ go launchBrowser(host, port)
+ serveHTTP(server)
+}
+
+func browserCmd() (string, bool) {
+ browser := map[string]string{
+ "darwin": "open",
+ "linux": "xdg-open",
+ "win32": "start",
+ }
+ cmd, ok := browser[runtime.GOOS]
+ return cmd, ok
+}
+
+func launchBrowser(host string, port int) {
+ browser, ok := browserCmd()
+ if !ok {
+ log.Printf("Skipped launching browser for this OS: %s", runtime.GOOS)
+ return
+ }
+
+ log.Printf("Launching browser on %s:%d", host, port)
+ url := fmt.Sprintf("http://%s:%d", host, port)
+ cmd := exec.Command(browser, url)
+
+ output, err := cmd.CombinedOutput()
+ if err != nil {
+ log.Println(err)
+ }
+ log.Println(string(output))
+}
+
+func runTestOnUpdates(queue chan messaging.Folders, executor contract.Executor, server contract.Server) {
+ for update := range queue {
+ log.Println("Received request from watcher to execute tests...")
+ packages := extractPackages(update)
+ output := executor.ExecuteTests(packages)
+ root := extractRoot(update, packages)
+ server.ReceiveUpdate(root, output)
+ }
+}
+
+func extractPackages(folderList messaging.Folders) []*contract.Package {
+ packageList := []*contract.Package{}
+ for _, folder := range folderList {
+ hasImportCycle := testFilesImportTheirOwnPackage(folder.Path)
+ packageList = append(packageList, contract.NewPackage(folder, hasImportCycle))
+ }
+ return packageList
+}
+
+func extractRoot(folderList messaging.Folders, packageList []*contract.Package) string {
+ path := packageList[0].Path
+ folder := folderList[path]
+ return folder.Root
+}
+
+// This method exists because of a bug in the go cover tool that
+// causes an infinite loop when you try to run `go test -cover`
+// on a package that has an import cycle defined in one of it's
+// test files. Yuck.
+func testFilesImportTheirOwnPackage(packagePath string) bool {
+ meta, err := build.ImportDir(packagePath, build.AllowBinary)
+ if err != nil {
+ return false
+ }
+
+ for _, dependency := range meta.TestImports {
+ if dependency == meta.ImportPath {
+ return true
+ }
+ }
+ return false
+}
+
+func serveHTTP(server contract.Server) {
+ serveStaticResources()
+ serveAjaxMethods(server)
+ activateServer()
+}
+
+func serveStaticResources() {
+ http.Handle("/", http.FileServer(http.Dir(static)))
+}
+
+func serveAjaxMethods(server contract.Server) {
+ http.HandleFunc("/watch", server.Watch)
+ http.HandleFunc("/ignore", server.Ignore)
+ http.HandleFunc("/reinstate", server.Reinstate)
+ http.HandleFunc("/latest", server.Results)
+ http.HandleFunc("/execute", server.Execute)
+ http.HandleFunc("/status", server.Status)
+ http.HandleFunc("/status/poll", server.LongPollStatus)
+ http.HandleFunc("/pause", server.TogglePause)
+}
+
+func activateServer() {
+ log.Printf("Serving HTTP at: http://%s:%d\n", host, port)
+ err := http.ListenAndServe(fmt.Sprintf("%s:%d", host, port), nil)
+ if err != nil {
+ log.Println(err)
+ }
+}
+
+func coverageEnabled(cover bool, reports string) bool {
+ return (cover &&
+ goVersion_1_2_orGreater() &&
+ coverToolInstalled() &&
+ ensureReportDirectoryExists(reports))
+}
+func goVersion_1_2_orGreater() bool {
+ version := runtime.Version() // 'go1.2....'
+ major, minor := version[2], version[4]
+ version_1_2 := major >= byte('1') && minor >= byte('2')
+ if !version_1_2 {
+ log.Printf(pleaseUpgradeGoVersion, version)
+ return false
+ }
+ return true
+}
+func coverToolInstalled() bool {
+ working := getWorkDir()
+ command := system.NewCommand(working, "go", "tool", "cover").Execute()
+ installed := strings.Contains(command.Output, "Usage of 'go tool cover':")
+ if !installed {
+ log.Print(coverToolMissing)
+ return false
+ }
+ return true
+}
+func ensureReportDirectoryExists(reports string) bool {
+ result, err := exists(reports)
+ if err != nil {
+ log.Fatal(err)
+ }
+ if result {
+ return true
+ }
+
+ if err := os.Mkdir(reports, 0755); err == nil {
+ return true
+ }
+
+ log.Printf(reportDirectoryUnavailable, reports)
+ return false
+}
+func exists(path string) (bool, error) {
+ _, err := os.Stat(path)
+ if err == nil {
+ return true, nil
+ }
+ if os.IsNotExist(err) {
+ return false, nil
+ }
+ return false, err
+}
+func getWorkDir() string {
+ working := ""
+ var err error
+ if workDir != "" {
+ working = workDir
+ } else {
+ working, err = os.Getwd()
+ if err != nil {
+ log.Fatal(err)
+ }
+ }
+ result, err := exists(working)
+ if err != nil {
+ log.Fatal(err)
+ }
+ if !result {
+ log.Fatalf("Path:%s does not exists", working)
+ }
+ return working
+}
+
+var (
+ port int
+ host string
+ gobin string
+ nap time.Duration
+ packages int
+ cover bool
+ depth int
+ timeout string
+ watchedSuffixes string
+ excludedDirs string
+
+ static string
+ reports string
+
+ quarterSecond = time.Millisecond * 250
+ workDir string
+)
+
+const (
+ initialConfiguration = "Initial configuration: [host: %s] [port: %d] [poll: %v] [cover: %v]\n"
+ pleaseUpgradeGoVersion = "Go version is less that 1.2 (%s), please upgrade to the latest stable version to enable coverage reporting.\n"
+ coverToolMissing = "Go cover tool is not installed or not accessible: for Go < 1.5 run`go get golang.org/x/tools/cmd/cover`\n For >= Go 1.5 run `go install $GOROOT/src/cmd/cover`\n"
+ reportDirectoryUnavailable = "Could not find or create the coverage report directory (at: '%s'). You probably won't see any coverage statistics...\n"
+)
diff --git a/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/web/client/composer.html b/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/web/client/composer.html
new file mode 100644
index 00000000000..e0d32409893
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/web/client/composer.html
@@ -0,0 +1,35 @@
+<!DOCTYPE html>
+<html>
+ <head>
+ <meta http-equiv="content-type" content="text/html; charset=UTF-8">
+ <title>GoConvey Composer</title>
+ <link rel="stylesheet" type="text/css" href="/resources/css/composer.css">
+ <script src="/resources/js/lib/markup.min.js"></script>
+ <script src="/resources/js/lib/taboverride.min.js"></script>
+ <script src="/resources/js/lib/jquery-2_1_0.min.js"></script>
+ <script src="/resources/js/composer.js"></script>
+ </head>
+ <body>
+ <header>
+ <h1>
+ <span class="logo">GoConvey</span>
+ <span class="afterlogo">Composer</span>
+ </h1>
+ </header>
+ <main>
+ <textarea id="input" placeholder="Type test cases here, one per line, with tab indentation"></textarea>
+ <div id="output"></div>
+ </main>
+
+<script id="tpl-convey" type="text/template">{{.}}{{if .|notTestFunc}}{{depth|indent}}Convey("{{title}}", {{if showT}}t, {{/if}}{{if stories|empty}}nil{{else}}func() {
+
+{{stories|recursivelyRender}}{{depth|indent}}}{{/if}})
+{{else}}func {{title|properCase|safeFunc}}(t *testing.T) {
+
+{{stories|recursivelyRender}}}
+{{/if}}
+{{/.}}</script>
+
+
+ </body>
+</html> \ No newline at end of file
diff --git a/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/web/client/favicon.ico b/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/web/client/favicon.ico
new file mode 100644
index 00000000000..bb3df78c2ab
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/web/client/favicon.ico
Binary files differ
diff --git a/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/web/client/index.html b/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/web/client/index.html
new file mode 100644
index 00000000000..6895c7c4936
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/web/client/index.html
@@ -0,0 +1,487 @@
+<!DOCTYPE html>
+<html>
+ <head>
+ <title>GoConvey</title>
+ <meta name="apple-mobile-web-app-capable" content="yes">
+ <link rel="stylesheet" href="/resources/css/font-awesome.min.css">
+ <link rel="stylesheet" href="/resources/css/tipsy.css">
+ <link rel="stylesheet" href="/resources/css/common.css">
+ <link rel="icon" class="favicon" href="/favicon.ico" type="image/vnd.microsoft.icon">
+ <link rel="shortcut icon" class="favicon" href="/favicon.ico" type="image/vnd.microsoft.icon">
+ <script src="/resources/js/lib/markup.min.js"></script>
+ <script src="/resources/js/lib/moment.min.js"></script>
+ <script src="/resources/js/lib/ansispan.js"></script>
+ <script src="/resources/js/lib/diff-match-patch.min.js"></script>
+ <script src="/resources/js/lib/jquery-2_1_0.min.js"></script>
+ <script src="/resources/js/lib/jquery-ui-1_10_3-custom.min.js"></script>
+ <script src="/resources/js/lib/jquery.pretty-text-diff.min.js"></script>
+ <script src="/resources/js/lib/jquery.tipsy.min.js"></script>
+
+ <!-- Script ordering is important -->
+ <script src="/resources/js/poller.js"></script>
+ <script src="/resources/js/convey.js"></script>
+ <script src="/resources/js/config.js"></script>
+ <script src="/resources/js/goconvey.js"></script>
+ </head>
+ <body>
+ <header>
+ <div class="overall ok">
+ <div class="status">PASS</div>
+ </div>
+
+ <div class="toggler narrow" data-toggle="controls">
+ Controls
+ </div>
+
+ <div id="controls" class="controls hide-narrow">
+ <div class="server-not-down">
+ <ul>
+ <li id="logo" title="Powered by GoConvey"><a href="http://goconvey.co" target="_blank">GoConvey</a></li>
+ </ul>
+
+ <div class="float-left" id="path-container">
+ <input type="text" id="path" placeholder="Watched directory" title="Change watched directory">
+ </div>
+
+ <ul class="float-right" id="control-buttons">
+ <li class="fa fa-pause" id="play-pause" title="Play/pause tests"></li>
+ <li class="fa fa-refresh" id="run-tests" title="Run tests"></li>
+ <li class="fa fa-history" id="show-history" title="Test history"></li>
+ <li class="fa fa-bell-o" id="toggle-notif" title="Toggle notifications"></li>
+ <li class="fa fa-cog" id="show-settings" title="Settings"></li>
+ <li class="fa fa-pencil-square-o" id="show-gen" title="Composer"></li>
+ </ul>
+ </div>
+ <div class="server-down">
+ <span class="flash">NOTICE:</span>
+ <span class="notice-message"><!-- Populated by Javascript --></span>
+ </div>
+
+ <hr class="clear">
+
+
+ <div class="expandable settings">
+ <div class="container">
+ <div class="setting">
+ <div class="setting-meta">
+ Theme
+ </div>
+ <div class="setting-val">
+ <ol class="enum" id="theme"><!-- Populated by Javascript --></ol>
+ <script id="tpl-theme-enum" type="text/template">
+ {{.}}<li data-theme="{{id}}">{{name}}</li>{{/.}}
+ </script>
+ </div>
+ </div>
+ <div class="setting">
+ <div class="setting-meta">
+ Default
+ </div>
+ <div class="setting-val">
+ <ol class="enum" id="pkg-expand-collapse">
+ <li data-pkg-expand-collapse="expanded">Expand All</li>
+ <li data-pkg-expand-collapse="collapsed">Collapse All</li>
+ </ol>
+ </div>
+ </div>
+ <div class="setting">
+ <div class="setting-meta">
+ Debug Output
+ </div>
+ <div class="setting-val">
+ <ol class="enum" id="show-debug-output">
+ <li data-show-debug-output="show">Show</li>
+ <li data-show-debug-output="hide">Hide</li>
+ </ol>
+ </div>
+ </div>
+ <div class="setting">
+ <div class="setting-meta">
+ Effects
+ </div>
+ <div class="setting-val">
+ <ol class="enum" id="ui-effects">
+ <li data-ui-effects="true">Cinematic</li>
+ <li data-ui-effects="false">Off</li>
+ </ol>
+ </div>
+ </div>
+ </div>
+ </div>
+
+ <div class="expandable history">
+ <div class="container"><!-- Populated by Javascript --></div>
+ <script id="tpl-history" type="text/template">
+ <div class="item {{overall.status.class}} frame-{{id}}" data-frameid="{{id}}">
+ <div class="status momentjs" id="frame-{{id}}"></div>
+ <div class="summary">{{overall.status.text|upcase}}<br>{{overall.passed}}/{{overall.assertions}} pass<br>{{overall.failures}} fail, {{overall.skipped}} skip</div>
+ </div>
+ </script>
+ </div>
+
+
+ </div>
+ </header>
+
+
+
+
+
+
+ <div class="frame cf">
+
+
+ <div class="col" id="col-1">
+ <div class="toggler" data-toggle="coverage">
+ Coverage
+ </div>
+ <div class="togglable" id="coverage"><!-- Populated by Javascript --></div>
+ <script id="tpl-coverage" type="text/template">
+ <div class="templated">
+ {{.}}
+ <div class="pkg-cover">
+ <div class="pkg-cover-bar" data-pkg="{{PackageName}}" data-width="{{Coverage|coveragePct}}"></div>
+ <div class="pkg-cover-name rtl pad-right{{if Coverage|coveragePct|equals>0}} no-coverage{{/if}}" title="{{Coverage|coveragePct}}% coverage">
+ <a class="fa fa-level-down" href="#pkg-{{_id}}" style="padding: 0 5px;"></a>{{if Coverage|more>0}}<a href="/reports/{{PackageName|coverageReportName}}.html" target="_blank">{{PackageName|boldPkgName}}</a>{{else}}{{PackageName|boldPkgName}}{{/if}}
+ </div>
+ </div>
+ {{/.}}
+ </div>
+ </script>
+
+
+
+ <div class="toggler" data-toggle="ignored">
+ Ignored
+ </div>
+ <div class="togglable" id="ignored"><!-- Populated by Javascript --></div>
+ <script id="tpl-ignored" type="text/template">
+ <div class="templated">
+ <div class="rtl pkg-list">
+ {{.}}
+ <div>
+ <a class="fa fa-level-down" href="#pkg-{{_id}}" style="padding: 0 5px;"></a>{{PackageName|boldPkgName}}
+ </div>
+ {{/.}}
+ </div>
+ </div>
+ </script>
+
+
+ <div class="toggler" data-toggle="notestfn">
+ No Test Functions
+ </div>
+ <div class="togglable" id="notestfn"><!-- Populated by Javascript --></div>
+ <script id="tpl-notestfn" type="text/template">
+ <div class="templated">
+ <div class="rtl pkg-list">
+ {{.}}
+ {{PackageName|boldPkgName}}<br>
+ {{/.}}
+ </div>
+ </div>
+ </script>
+
+
+ <div class="toggler" data-toggle="notestfiles">
+ No Test Files
+ </div>
+ <div class="togglable" id="notestfiles"><!-- Populated by Javascript --></div>
+ <script id="tpl-notestfiles" type="text/template">
+ <div class="templated">
+ <div class="rtl pkg-list">
+ {{.}}
+ {{PackageName|boldPkgName}}<br>
+ {{/.}}
+ </div>
+ </div>
+ </script>
+
+
+
+ <div class="toggler" data-toggle="nogofiles">
+ No Go Files
+ </div>
+ <div class="togglable" id="nogofiles"><!-- Populated by Javascript --></div>
+ <script id="tpl-nogofiles" type="text/template">
+ <div class="templated">
+ <div class="rtl pkg-list">
+ {{.}}
+ {{PackageName|boldPkgName}}<br>
+ {{/.}}
+ </div>
+ </div>
+ </script>
+ </div>
+
+
+
+
+
+
+
+
+
+
+
+ <div class="col" id="col-2">
+
+ <div class="toggler buildfailures" data-toggle="buildfailures">
+ Build Failures
+ </div>
+ <div class="togglable buildfailures" id="buildfailures"><!-- Populated by Javascript --></div>
+ <script id="tpl-buildfailures" type="text/template">
+ <div class="templated">
+ {{.}}
+ <div class="buildfail">
+ <div class="buildfail-pkg"><i class="fa fa-wrench"></i>&nbsp; {{PackageName|boldPkgName}}</div>
+ <div class="buildfail-output">{{BuildOutput|htmlSafe|ansiColours}}</div>
+ </div>
+ {{/.}}
+ </div>
+ </script>
+
+
+
+ <div class="toggler panics" data-toggle="panics">
+ Panics
+ </div>
+ <div class="togglable panics" id="panics"><!-- Populated by Javascript --></div>
+ <script id="tpl-panics" type="text/template">
+ <div class="templated">
+ {{.}}
+ <div class="panic">
+ <div class="panic-pkg">
+ <i class="fa fa-bomb"></i>&nbsp; {{_pkg|boldPkgName}}
+ </div>
+ <div class="panic-details">
+ <div class="panic-story">
+ <div class="panic-file">
+ {{if File|notempty}}
+ <a href="goconvey://open/?url=file://{{File|url}}&line={{Line}}">{{File|relativePath}}{{if Line|more>0}}:{{Line}}{{/if}}&nbsp; <i class="fa fa-external-link"></i></a>
+ {{else}}
+ <b>{{TestName}}</b>
+ {{/if}}
+ </div>
+ {{if _path|notempty}}
+ {{_path}}
+ <div class="story-links{{if Depth|more>-1}} depth-{{Depth}}{{/if}}"><a href="#test-{{_id}}">{{Title}}</a></div>
+ {{/_path}}
+ {{/if}}
+ {{if StackTrace|notempty}}<div class="depth-{{_maxDepth}} panic-summary">{{Error}}</div>{{/if}}
+ </div>
+ <div class="panic-output">{{if StackTrace|empty}}{{Error|htmlSafe|ansiColours}}{{else}}{{StackTrace|htmlSafe|ansiColours}}{{/if}}</div>
+ </div>
+ {{/.}}
+ </div>
+ </script>
+
+
+
+
+ <div class="toggler failures" data-toggle="failures">
+ Failures
+ </div>
+ <div class="togglable failures" id="failures"><!-- Populated by Javascript --></div>
+ <script id="tpl-failures" type="text/template">
+ <div class="templated">
+ {{.}}
+ <div class="failure">
+ <div class="failure-pkg"><i class="fa fa-file-code-o"></i>&nbsp; {{_pkg|boldPkgName}}</div>
+ <div class="failure-details">
+ <div class="failure-story">
+ <div class="failure-file">
+ {{if File|notempty}}
+ <a href="goconvey://open/?url=file://{{File|url}}&line={{Line}}">{{File|relativePath}}{{if Line|more>0}}:{{Line}}{{/if}}&nbsp; <i class="fa fa-external-link"></i></a>
+ {{else}}
+ <b class="test-name-link"><a href="#test-{{_id}}">{{TestName}}</a></b>
+ {{/if}}
+ </div>
+ {{if _path|notempty}}
+ {{_path}}
+ <div class="story-links{{if Depth|more>-1}} depth-{{Depth}}{{/if}}"><a href="#test-{{_id}}">{{Title}}</a></div>
+ {{/_path}}
+ {{/if}}
+ </div>
+ <div class="failure-output">{{if Failure|notempty}}{{Failure|htmlSafe|ansiColours}}{{else}}{{if Message|notempty}}{{Message|htmlSafe|ansiColours}}{{else}}{{StackTrace|htmlSafe|ansiColours}}{{/if}}{{/if}}</div>
+ {{if .|needsDiff}}
+ <table class="diffviewer">
+ <tr>
+ <td class="exp">Expected</td>
+ <td class="original">{{Expected|htmlSafe|ansiColours}}</td>
+ </tr>
+ <tr>
+ <td class="act">Actual</td>
+ <td class="changed">{{Actual|htmlSafe|ansiColours}}</td>
+ </tr>
+ <tr>
+ <td>Diff</td>
+ <td class="diff"></td>
+ </tr>
+ </table>
+ {{/if}}
+ </div>
+ </div>
+ {{/.}}
+ </div>
+ </script>
+
+
+
+
+ <div class="toggler stories" data-toggle="stories">
+ Stories
+ </div>
+ <div class="togglable stories" id="stories"><!-- Populated by Javascript --></div>
+
+ <script id="tpl-stories" type="text/template">
+ <table>
+ {{.}}
+ <tr class="story-pkg expanded pkg-{{_id}}" data-pkg="{{_id}}" data-pkg-name="{{PackageName}}" data-pkg-state="expanded" id="pkg-{{_id}}">
+ <td colspan="2">
+ <span class="pkg-toggle-container">
+ <a href="javascript:" class="fa fa-minus-square-o pkg-toggle"></a>
+ <span class="toggle-all-pkg">ALL</span>
+ </span>
+ </td>
+ <td class="story-pkg-name">
+ {{PackageName|boldPkgName}}
+ </td>
+ <td class="story-pkg-summary">
+ {{if _panicked|more>0}}<span class="story-pkg-panic-count"><span class="statusicon panic"><i class="fa fa-bolt"></i></span> {{_panicked}}
+ &nbsp;</span>{{/if}}
+ {{if _failed|more>0}}<span class="story-pkg-failure-count"><span class="statusicon fail">&#10007;</span> {{_failed}}
+ &nbsp;</span>{{/if}}
+ {{if _passed|more>0}}<span class="story-pkg-pass-count"><span class="statusicon ok">&#10003;</span> {{_passed}}</span>{{/if}}
+ {{if _skipped|more>0}}<span class="story-pkg-skip-count">&nbsp; <span class="statusicon skip"><b>S</b></span> {{_skipped}}</span>{{/if}}
+ </td>
+ <td class="story-pkg-watch-td">
+ {{if Outcome|equals>disabled}}
+ <span class="fa fa-lg fa-eye-slash disabled" title="Disabled"></span>
+ {{else}}
+ <a class="fa fa-lg ignore {{if Outcome|notequals>ignored}}fa-eye unwatch{{else}}fa-eye-slash watch clr-red{{/if}}" href="javascript:" title="Toggle ignore" data-pkg="{{PackageName}}"></a>
+ {{/if}}
+ </td>
+ </tr>
+ {{TestResults}}
+ <tr id="test-{{_id}}" class="story-line {{if _status.class}}{{_status.class}}{{else}}skip{{/if}} test-{{_id}} pkg-{{_pkgid}}">
+ <td class="story-line-status"></td>
+ <td class="story-line-summary-container">
+
+ {{if Stories|empty}} <!-- Not apparently a GoConvey test -->
+ {{if _passed}}<span class="statusicon ok">&#10003;</span><br>{{/if}}
+ {{if _failed}}<span class="statusicon fail">&#10007;</span><br>{{/if}}
+ {{if _panicked}}<span class="statusicon panic"><i class="fa fa-bolt"></i></span><br>{{/if}}
+ {{if _skipped}}<span class="statusicon skip"><b>S</b></span><br>{{/if}}
+ {{/if}}
+
+ </td>
+ <td colspan="3" class="depth-0 story-line-desc">
+ <b>{{TestName|htmlSafe|ansiColours}}</b>
+ {{if Message}}<div class="message">{{Message|htmlSafe|ansiColours}}</div>{{/if}}
+ </td>
+ </tr>
+
+
+
+ {{Stories}}
+ <tr class="story-line {{if _status.class}}{{_status.class}}{{else}}skip{{/if}} pkg-{{_pkgid}}" id="test-{{_id}}">
+ <td class="story-line-status"></td>
+ <td class="story-line-summary-container">
+
+ {{if _passed}}<span class="statusicon ok">&#10003; {{_passed}}</span><br>{{/if}}
+ {{if _failed}}<span class="statusicon fail">&#10007; {{_failed}}</span><br>{{/if}}
+ {{if _panicked}}<span class="statusicon panic"><i class="fa fa-bolt"></i> {{_panicked}}</span><br>{{/if}}
+ {{if _skipped}}<span class="statusicon skip"><b>S</b> {{_skipped}}</span><br>{{/if}}
+
+ </td>
+ <td colspan="3" class="depth-{{Depth}} story-line-desc">
+ {{Title|htmlSafe|ansiColours}}
+ {{if Output}}<div class="message">{{Output|htmlSafe|ansiColours}}</div>{{/if}}
+ {{if _failed}}
+ {{Assertions}}
+ {{if _failed}}
+ <div class="failure">
+ <div class="failure-details">
+ <div class="failure-output">{{if Failure|notempty}}{{Failure|htmlSafe|ansiColours}}{{else}}{{if Message|notempty}}{{Message|htmlSafe|ansiColours}}{{else}}
+ {{StackTrace|htmlSafe|ansiColours}}{{/if}}{{/if}}</div>
+ </div>
+ </div>
+ {{/if}}
+ {{/Assertions}}
+ {{/if}}
+ {{if _panicked}}
+ {{Assertions}}
+ {{if _panicked}}
+ <div class="panic">
+ <div class="panic-details">
+ <div class="panic-output">{{if Panic|notempty}}{{Panic|htmlSafe|ansiColours}}{{else}}{{if Message|notempty}}{{Message|htmlSafe|ansiColours}}{{else}}{{StackTrace|htmlSafe|ansiColours}}{{/if}}{{/if}}</div>
+ </div>
+ </div>
+ {{/if}}
+ {{/Assertions}}
+ {{/if}}
+ </td>
+ </tr>
+ {{/Stories}}
+
+
+ {{/TestResults}}
+ {{/.}}
+ </table>
+ </script>
+ </div>
+
+
+
+ <div class="col" id="col-3">
+ <div class="toggler" data-toggle="log">
+ LOG
+ </div>
+ <div class="togglable log" id="log"><!-- Populated by Javascript --></div>
+ <script id="tpl-log-line" type="text/template">
+ <div class="line"><span class="timestamp">[{{time}}]</span> {{msg|clean}}</div>
+ </script>
+ </div>
+
+ </div>
+
+ <footer>
+ <section>
+ <span id="summary">
+ <span class="info" id="time"><!-- Populated by Javascript --></span>
+ <span class="info" id="last-test-container">Last test <span id="last-test"><!-- Populated by Javascript --></span></span>
+ <span class="info" id="assert-count"><!-- Populated by Javascript --></span>
+ <span class="info fail-clr" id="fail-count"><!-- Populated by Javascript --></span>
+ <span class="info panic-clr" id="panic-count"><!-- Populated by Javascript --></span>
+ <span class="info skip-clr" id="skip-count"><!-- Populated by Javascript --></span>
+ <span class="momentjs" id="duration"><!-- Populated by Javascript --></span>
+ </span>
+ <span id="narrow-summary">
+ <span id="narrow-assert-count"><!-- Populated by Javascript --></span>:
+ <span class="fail-clr" id="narrow-fail-count"><!-- Populated by Javascript --></span> /
+ <span class="panic-clr" id="narrow-panic-count"><!-- Populated by Javascript --></span> /
+ <span class="skip-clr" id="narrow-skip-count"><!-- Populated by Javascript --></span>
+ </span>
+ </section>
+ <section>
+ <span class="server-not-down">
+ <span class="recording">
+ <i class="fa fa-circle"></i> LIVE
+ </span>
+ <span class="replay" title="Click for current test results">
+ <i class="fa fa-play"></i> REPLAY
+ </span>
+ <span class="paused">
+ <i class="fa fa-pause"></i> PAUSED
+ </span>
+ </span>
+ <span class="server-down">
+ <i class="fa fa-exclamation-triangle fa-lg flash"></i>
+ <span class="notice-message"><!-- Populated by Javascript --></span>
+ </span>
+ </section>
+ </footer>
+
+ </body>
+</html>
diff --git a/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/web/client/resources/css/common.css b/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/web/client/resources/css/common.css
new file mode 100644
index 00000000000..7aa56d1f668
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/web/client/resources/css/common.css
@@ -0,0 +1,962 @@
+/* Eric Meyer's Reset CSS v2.0 */
+html,body,div,span,applet,object,iframe,h1,h2,h3,h4,h5,h6,p,blockquote,pre,a,abbr,acronym,address,big,cite,code,del,dfn,em,img,ins,kbd,q,s,samp,small,strike,strong,sub,sup,tt,var,b,u,i,center,dl,dt,dd,ol,ul,li,fieldset,form,label,legend,table,caption,tbody,tfoot,thead,tr,th,td,article,aside,canvas,details,embed,figure,figcaption,footer,header,hgroup,menu,nav,output,ruby,section,summary,time,mark,audio,video{border:0;font-size:100%;font:inherit;vertical-align:baseline;margin:0;padding:0}article,aside,details,figcaption,figure,footer,header,hgroup,menu,nav,section{display:block}body{line-height:1}ol,ul{list-style:none}blockquote,q{quotes:none}blockquote:before,blockquote:after,q:before,q:after{content:none}table{border-collapse:collapse;border-spacing:0}
+
+@font-face {
+ font-family: 'Open Sans';
+ src: local("Open Sans"), url("../fonts/Open_Sans/OpenSans-Regular.ttf");
+}
+@font-face {
+ font-family: 'Orbitron';
+ src: local("Orbitron"), url("../fonts/Orbitron/Orbitron-Regular.ttf");
+}
+@font-face {
+ font-family: 'Oswald';
+ src: local("Oswald"), url("../fonts/Oswald/Oswald-Regular.ttf");
+}
+
+::selection {
+ background: #87AFBC;
+ color: #FFF;
+ text-shadow: none;
+}
+
+::-moz-selection {
+ background: #87AFBC;
+ color: #FFF;
+ text-shadow: none;
+}
+
+::-webkit-input-placeholder {
+ font-style: italic;
+}
+:-moz-placeholder {
+ font-style: italic;
+}
+::-moz-placeholder {
+ font-style: italic;
+}
+:-ms-input-placeholder {
+ font-style: italic;
+}
+
+
+
+html, body {
+ height: 100%;
+ min-height: 100%;
+}
+
+body {
+ -webkit-transform: translate3d(0, 0, 0); /* attempts to fix Chrome glitching on Mac */
+ background-position: fixed;
+ background-repeat: no-repeat;
+ font-family: Menlo, Monaco, 'Courier New', monospace;
+ line-height: 1.5em;
+ font-size: 14px;
+ overflow: hidden;
+ display: none;
+}
+
+a {
+ text-decoration: none;
+}
+
+a:hover {
+ text-decoration: underline;
+}
+
+a.fa {
+ text-decoration: none;
+}
+
+b {
+ font-weight: bold;
+}
+
+i {
+ font-style: italic;
+}
+
+hr {
+ border: 0;
+ background: 0;
+ height: 0;
+ margin: 0;
+ padding: 0;
+}
+
+input[type=text] {
+ -webkit-box-sizing: border-box;
+ -moz-box-sizing: border-box;
+ box-sizing: border-box;
+
+ background: none;
+ border: none;
+ border-bottom-width: 1px;
+ border-bottom-style: solid;
+ outline: none;
+ padding-bottom: .1em;
+ font: 300 18px/1.5em 'Open Sans', sans-serif;
+}
+
+.overall {
+ padding: 30px 0 15px;
+ position: relative;
+ z-index: 50;
+}
+
+.status {
+ line-height: 1em;
+ font-family: 'Orbitron', monospace;
+ text-align: center;
+}
+
+.overall .status {
+ font-size: 46px;
+ letter-spacing: 5px;
+ text-transform: uppercase;
+ white-space: nowrap;
+}
+
+.toggler {
+ font-size: 10px;
+ padding: 3px 5px;
+ text-decoration: none;
+ text-transform: uppercase;
+ cursor: pointer;
+ line-height: 1.5em;
+}
+
+.toggler.narrow {
+ display: none;
+}
+
+.togglable {
+ overflow-x: auto;
+}
+
+.controls {
+ font-size: 18px;
+ line-height: 1em;
+}
+
+.controls li {
+ text-decoration: none;
+ display: block;
+ float: left;
+ padding: .75em;
+ cursor: pointer;
+}
+
+.server-down {
+ display: none;
+ text-align: center;
+ padding: 10px 0;
+}
+
+footer .server-down {
+ padding: 8px 15px;
+ text-transform: uppercase;
+}
+
+#logo {
+ font-family: 'Oswald', 'Impact', 'Arial Black', sans-serif;
+}
+
+#path-container {
+ margin-top: .4em;
+}
+
+#path {
+ width: 100%;
+ text-align: center;
+ border-bottom-width: 0;
+}
+
+#path:hover,
+#path:focus {
+ border-bottom-width: 1px;
+}
+
+.expandable {
+ -webkit-box-sizing: border-box;
+ -moz-box-sizing: border-box;
+ box-sizing: border-box;
+
+ border-top-width: 1px;
+ border-top-style: solid;
+ overflow-y: hidden;
+ overflow-x: auto;
+ text-align: center;
+ white-space: nowrap;
+ display: none;
+}
+
+.settings {
+ white-space: normal;
+ overflow-x: auto;
+ white-space: nowrap;
+}
+
+.settings .setting-meta,
+.settings .setting-val {
+ display: inline-block;
+}
+
+.settings .container {
+ padding: 15px 0;
+}
+
+.settings .setting {
+ font-size: 13px;
+ display: inline-block;
+ margin-right: 5%;
+}
+
+.settings .setting:first-child {
+ margin-left: 5%;
+}
+
+.settings .setting .setting-meta {
+ text-align: right;
+ padding-right: 1em;
+ vertical-align: middle;
+ max-width: 150px;
+}
+
+.settings .setting .setting-meta small {
+ font-size: 8px;
+ text-transform: uppercase;
+ display: block;
+ line-height: 1.25em;
+}
+
+.history .container {
+ padding: 15px 0 15px 25%;
+}
+
+.history .item {
+ -webkit-box-sizing: border-box;
+ -moz-box-sizing: border-box;
+ box-sizing: border-box;
+
+ transition: all .1s linear;
+ -moz-transition: all .1s linear;
+ -webkit-transition: all .1s linear;
+ -o-transition: all .1s linear;
+
+ display: inline-block;
+ text-align: left;
+ margin: 0 20px;
+ padding: 20px;
+ height: 100%;
+ width: 175px;
+ opacity: .7;
+ cursor: pointer;
+}
+
+.history .item:hover {
+ opacity: 1;
+}
+
+.history .item:nth-child(odd):hover {
+ -webkit-transform: scale(1.1) rotate(5deg);
+ -moz-transform: scale(1.1) rotate(5deg);
+}
+
+.history .item:nth-child(even):hover {
+ -webkit-transform: scale(1.1) rotate(-5deg);
+ -moz-transform: scale(1.1) rotate(-5deg);
+}
+
+.history .item .summary {
+ font: 14px/1.5em 'Monaco', 'Menlo', 'Courier New', monospace;
+}
+
+.history .item.selected {
+ opacity: 1;
+}
+
+.history .status {
+ font-size: 13px;
+}
+
+
+
+
+
+
+.frame {
+ position: relative;
+ z-index: 0;
+ width: 100%;
+}
+
+.frame .col {
+ -webkit-box-sizing: border-box;
+ -moz-box-sizing: border-box;
+ box-sizing: border-box;
+
+ border-right-width: 1px;
+ border-right-style: solid;
+ float: left;
+ height: 100%;
+ overflow-y: auto;
+}
+
+.frame .col:first-child {
+ border-left: none;
+}
+
+.frame .col:last-child {
+ border-right: none;
+}
+
+
+#col-1 {
+ width: 15%;
+}
+
+#col-2 {
+ width: 60%;
+}
+
+#col-3 {
+ width: 25%;
+}
+
+#coverage {
+ font-size: 10px;
+ white-space: nowrap;
+}
+
+#coverage-color-template {
+ display: none;
+}
+
+.rtl {
+ direction: rtl;
+}
+
+.pkg-cover {
+ position: relative;
+}
+
+.pkg-cover a {
+ color: inherit !important;
+ text-decoration: none;
+}
+
+.pkg-cover-bar {
+ position: absolute;
+ top: 0;
+ left: 0;
+ height: 100%;
+ z-index: 1;
+}
+
+.pkg-cover-name {
+ position: relative;
+ z-index: 2;
+}
+
+.pkg-cover-name,
+.pkg-list {
+ font-family: 'Menlo', monospace;
+ font-size: 10px;
+ padding-right: 2%;
+ white-space: nowrap;
+}
+
+.buildfail-pkg,
+.panic-pkg,
+.failure-pkg {
+ padding: 5px 10px;
+ font: 14px 'Open Sans', sans-serif;
+}
+
+.buildfail-output,
+.panic-output,
+.failure-output {
+ padding: 10px;
+ font-size: 12px;
+ line-height: 1.25em;
+ overflow-y: auto;
+ white-space: pre-wrap;
+ font-family: 'Menlo', monospace;
+}
+
+.panic-story,
+.failure-story {
+ font-size: 10px;
+ line-height: 1.25em;
+ font-family: 'Open Sans', sans-serif;
+}
+
+.panic-summary {
+ font-size: 14px;
+ font-weight: bold;
+ line-height: 1.5em;
+}
+
+.panic-file,
+.failure-file {
+ font-size: 13px;
+ line-height: 1.5em;
+}
+
+.diffviewer {
+ border-collapse: collapse;
+ width: 100%;
+}
+
+.diffviewer td {
+ border-bottom-width: 1px;
+ border-bottom-style: solid;
+ padding: 2px 5px;
+ font-size: 14px;
+}
+
+.diffviewer .original,
+.diffviewer .changed,
+.diffviewer .diff {
+ white-space: pre-wrap;
+}
+
+.diffviewer tr:first-child td {
+ border-top-width: 1px;
+ border-top-style: solid;
+}
+
+.diffviewer td:first-child {
+ width: 65px;
+ font-size: 10px;
+ border-right-width: 1px;
+ border-right-style: solid;
+ text-transform: uppercase;
+}
+
+.diff ins {
+ text-decoration: none;
+}
+
+
+
+#stories table {
+ width: 100%;
+}
+
+
+.story-pkg {
+ cursor: pointer;
+}
+
+.story-pkg td {
+ font: 16px 'Open Sans', sans-serif;
+ white-space: nowrap;
+ padding: 10px;
+}
+
+.story-pkg td:first-child {
+ width: 1em;
+}
+
+.story-line {
+ font: 12px 'Open Sans', sans-serif;
+ cursor: default;
+}
+
+.story-line td {
+ padding-top: 7px;
+ padding-bottom: 7px;
+}
+
+.pkg-toggle-container {
+ position: relative;
+ display: inline-block;
+}
+
+.toggle-all-pkg {
+ font-size: 10px;
+ text-transform: uppercase;
+ position: absolute;
+ padding: 5px;
+ font-family: 'Menlo', 'Open Sans', sans-serif;
+ display: none;
+}
+
+.story-line-summary-container {
+ padding: 0 10px 0 10px;
+ white-space: nowrap;
+ width: 35px;
+ text-align: center;
+}
+
+.story-line-status {
+ width: 6px;
+ min-width: 6px;
+ height: 100%;
+}
+
+.story-line-desc {
+ padding: 5px;
+}
+
+.story-line-desc .message {
+ font-family: 'Menlo', monospace;
+ white-space: pre-wrap;
+}
+
+.statusicon {
+ font: 14px 'Open Sans', sans-serif;
+}
+
+.statusicon.skip {
+ font-size: 16px;
+}
+
+
+.depth-0 { padding-left: 1.5em !important; }
+.depth-1 { padding-left: 3em !important; }
+.depth-2 { padding-left: 4.5em !important; }
+.depth-3 { padding-left: 6em !important; }
+.depth-4 { padding-left: 7.5em !important; }
+.depth-5 { padding-left: 9em !important; }
+.depth-6 { padding-left: 10.5em !important; }
+.depth-7 { padding-left: 11em !important; }
+
+
+.log {
+ font-size: 11px;
+ line-height: 1.5em;
+ padding: 5px;
+ padding-bottom: .5em;
+}
+
+.log .line {
+ white-space: pre-wrap;
+ padding-left: 2em;
+ text-indent: -2em;
+}
+
+
+
+
+
+footer {
+ -webkit-box-sizing: border-box;
+ -moz-box-sizing: border-box;
+ box-sizing: border-box;
+
+ position: absolute;
+ bottom: 0;
+ left: 0;
+ padding: 5px 15px;
+ width: 100%;
+ border-top-width: 1px;
+ border-top-style: solid;
+ font-size: 12px;
+}
+
+footer section {
+ float: left;
+}
+
+footer section:first-child {
+ width: 80%;
+}
+
+footer section:last-child {
+ text-align: right;
+ width: 20%;
+}
+
+footer .info {
+ padding: 0 10px;
+}
+
+footer .info:first-child {
+ padding-left: 0;
+}
+
+#narrow-summary {
+ display: none;
+}
+
+footer .replay,
+footer .paused {
+ display: none;
+}
+
+footer .replay {
+ cursor: pointer;
+}
+
+footer .server-down .notice-message {
+ font-size: 10px;
+}
+
+
+
+
+.rel {
+ position: relative;
+}
+
+.text-right {
+ text-align: right;
+}
+
+.text-center {
+ text-align: center;
+}
+
+.text-left {
+ text-align: left;
+}
+
+.float-left {
+ float: left;
+}
+
+.float-right {
+ float: right;
+}
+
+.clear {
+ clear: both;
+}
+
+.nowrap {
+ white-space: nowrap;
+}
+
+.clr-blue {
+ color: #2B597F;
+}
+
+.show {
+ display: block;
+}
+
+.hide {
+ display: none;
+}
+
+.enum {
+ cursor: pointer;
+ display: inline-block;
+ font-size: 12px;
+ border-width: 1px;
+ border-style: solid;
+ border-radius: 9px;
+ vertical-align: middle;
+}
+
+.enum > li {
+ display: block;
+ float: left;
+ padding: 5px 12px;
+ border-left-width: 1px;
+ border-left-style: solid;
+}
+
+.enum > li:first-child {
+ border-left: 0px;
+ border-top-left-radius: 8px;
+ border-bottom-left-radius: 8px;
+}
+
+.enum > li:last-child {
+ border-top-right-radius: 8px;
+ border-bottom-right-radius: 8px;
+}
+
+
+
+
+
+
+
+
+.disabled {
+ cursor: default !important;
+ background: transparent !important;
+}
+
+.spin-once {
+ -webkit-animation: spin 0.5s 1 ease;
+ animation: spin 0.5s 1 ease;
+}
+
+.spin-slowly {
+ -webkit-animation: spin .75s infinite linear;
+ animation: spin .75s infinite linear;
+}
+
+.throb {
+ -webkit-animation: throb 2.5s ease-in-out infinite;
+ -moz-animation: throb 2.5s ease-in-out infinite;
+ -o-animation: throb 2.5s ease-in-out infinite;
+ animation: throb 2.5s ease-in-out infinite;
+}
+
+.flash {
+ -webkit-animation: flash 4s linear infinite;
+ -moz-animation: flash 4s linear infinite;
+ -o-animation: flash 4s linear infinite;
+ animation: flash 4s linear infinite;
+}
+
+
+
+
+
+/* Clearfix */
+.cf:before,
+.cf:after {
+ content: " ";
+ display: table;
+}
+.cf:after {
+ clear: both;
+}
+
+
+
+
+
+
+@media (max-width: 1099px) {
+ #col-1 {
+ width: 25%;
+ }
+
+ #col-2 {
+ width: 75%;
+ border-right: none;
+ }
+
+ #col-3 {
+ display: none;
+ }
+
+ footer #duration {
+ display: none;
+ }
+}
+
+@media (max-width: 900px) {
+ footer #last-test-container {
+ display: none;
+ }
+}
+
+@media (min-width: 850px) and (max-width: 1220px) {
+ #path {
+ font-size: 14px;
+ margin-top: 5px;
+ }
+}
+
+@media (min-width: 700px) and (max-width: 849px) {
+ #path {
+ font-size: 12px;
+ margin-top: 8px;
+ }
+}
+
+@media (max-width: 799px) {
+ #col-1 {
+ display: none;
+ }
+
+ #col-2 {
+ width: 100%;
+ }
+
+ #stories .story-pkg-name {
+ font-size: 14px;
+ }
+
+ #stories .story-pkg-watch-td {
+ display: none;
+ }
+}
+
+@media (max-width: 700px) {
+ #path-container {
+ display: none;
+ }
+
+ footer #time {
+ display: none;
+ }
+
+ footer .info {
+ padding: 0 5px;
+ }
+
+ footer .server-down .notice-message {
+ display: none;
+ }
+}
+
+@media (max-width: 499px) {
+ .toggler.narrow {
+ display: block;
+ }
+
+ #show-gen {
+ display: none;
+ }
+
+ .hide-narrow {
+ display: none;
+ }
+
+ .show-narrow {
+ display: block;
+ }
+
+ .overall .status {
+ font-size: 28px;
+ letter-spacing: 1px;
+ }
+
+ .toggler {
+ display: block;
+ }
+
+ .controls ul {
+ text-align: center;
+ float: none;
+ }
+
+ .controls li {
+ display: inline-block;
+ float: none;
+ }
+
+ .enum > li {
+ float: left;
+ display: block;
+ }
+
+ #logo {
+ display: none;
+ }
+
+ .history .item {
+ margin: 0 5px;
+ }
+
+ .history .item .summary {
+ display: none;
+ }
+
+ .server-down {
+ font-size: 14px;
+ }
+
+ #stories .story-pkg-name {
+ font-size: 16px;
+ }
+
+ #stories .not-pkg-name {
+ display: none;
+ }
+
+ footer #duration {
+ display: none;
+ }
+
+ footer #summary {
+ display: none;
+ }
+
+ footer #narrow-summary {
+ display: inline;
+ }
+}
+
+
+
+
+/**
+ Custom CSS Animations
+**/
+
+
+
+@-webkit-keyframes throb {
+ 0% { opacity: 1; }
+ 50% { opacity: .35; }
+ 100% { opacity: 1; }
+}
+@-moz-keyframes throb {
+ 0% { opacity: 1; }
+ 50% { opacity: .35; }
+ 100% { opacity: 1; }
+}
+@-o-keyframes throb {
+ 0% { opacity: 1; }
+ 50% { opacity: .35; }
+ 100% { opacity: 1; }
+}
+@keyframes throb {
+ 0% { opacity: 1; }
+ 50% { opacity: .35; }
+ 100% { opacity: 1; }
+}
+
+
+@-webkit-keyframes flash {
+ 70% { opacity: 1; }
+ 90% { opacity: 0; }
+ 98% { opacity: 0; }
+ 100% { opacity: 1; }
+}
+@-moz-keyframes flash {
+ 70% { opacity: 1; }
+ 90% { opacity: 0; }
+ 98% { opacity: 0; }
+ 100% { opacity: 1; }
+}
+@-o-keyframes flash {
+ 70% { opacity: 1; }
+ 90% { opacity: 0; }
+ 98% { opacity: 0; }
+ 100% { opacity: 1; }
+}
+@keyframes flash {
+ 70% { opacity: 1; }
+ 90% { opacity: 0; }
+ 98% { opacity: 0; }
+ 100% { opacity: 1; }
+}
+
+
+
+
+
+
+
+
+
+
+
+/*
+#coverage {
+ perspective: 1000;
+}
+
+#coverage .pkg-cover {
+ -webkit-transition: .7s;
+ transform-style: preserve-3d;
+ position: relative;
+}
+
+#coverage:hover .pkg-cover {
+ -webkit-transform: rotateX(180deg);
+}*/
diff --git a/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/web/client/resources/css/composer.css b/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/web/client/resources/css/composer.css
new file mode 100644
index 00000000000..6dd344ba5c5
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/web/client/resources/css/composer.css
@@ -0,0 +1,65 @@
+/* Eric Meyer's Reset CSS v2.0 */
+html,body,div,span,applet,object,iframe,h1,h2,h3,h4,h5,h6,p,blockquote,pre,a,abbr,acronym,address,big,cite,code,del,dfn,em,img,ins,kbd,q,s,samp,small,strike,strong,sub,sup,tt,var,b,u,i,center,dl,dt,dd,ol,ul,li,fieldset,form,label,legend,table,caption,tbody,tfoot,thead,tr,th,td,article,aside,canvas,details,embed,figure,figcaption,footer,header,hgroup,menu,nav,output,ruby,section,summary,time,mark,audio,video{border:0;font-size:100%;font:inherit;vertical-align:baseline;margin:0;padding:0}article,aside,details,figcaption,figure,footer,header,hgroup,menu,nav,section{display:block}body{line-height:1}ol,ul{list-style:none}blockquote,q{quotes:none}blockquote:before,blockquote:after,q:before,q:after{content:none}table{border-collapse:collapse;border-spacing:0}
+
+@font-face {
+ font-family: 'Open Sans';
+ src: local("Open Sans"), url("../fonts/Open_Sans/OpenSans-Regular.ttf");
+}
+@font-face {
+ font-family: 'Oswald';
+ src: local("Oswald"), url("../fonts/Oswald/Oswald-Regular.ttf");
+}
+
+body {
+ font-family: 'Open Sans', 'Helvetica Neue', sans-serif;
+ font-size: 16px;
+}
+
+header {
+ background: #2C3F49;
+ padding: 10px;
+}
+
+.logo {
+ font-family: Oswald, sans-serif;
+ font-size: 24px;
+ margin-right: 5px;
+ color: #DDD;
+}
+
+.afterlogo {
+ font-size: 12px;
+ text-transform: uppercase;
+ position: relative;
+ top: -3px;
+ color: #999;
+}
+
+#input,
+#output {
+ -webkit-box-sizing: border-box;
+ -moz-box-sizing: border-box;
+ box-sizing: border-box;
+
+ padding: 15px;
+ height: 80%;
+ float: left;
+ overflow: auto;
+}
+
+#input {
+ border: 0;
+ font: 300 18px/1.5em 'Open Sans';
+ resize: none;
+ outline: none;
+ width: 50%;
+}
+
+#output {
+ width: 50%;
+ display: inline-block;
+ background: #F0F0F0;
+ font: 14px/1.25em 'Menlo', 'Monaco', 'Courier New', monospace;
+ border-left: 1px solid #CCC;
+ white-space: pre-wrap;
+} \ No newline at end of file
diff --git a/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/web/client/resources/css/font-awesome.min.css b/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/web/client/resources/css/font-awesome.min.css
new file mode 100644
index 00000000000..40403f53189
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/web/client/resources/css/font-awesome.min.css
@@ -0,0 +1,5 @@
+/*!
+ * Font Awesome 4.1.0 by @davegandy - http://fontawesome.io - @fontawesome
+ * License - http://fontawesome.io/license (Font: SIL OFL 1.1, CSS: MIT License)
+ */
+ @font-face{font-family:'FontAwesome';src:url('../fonts/FontAwesome/fontawesome-webfont.eot?v=4.1.0');src:url('../fonts/FontAwesome/fontawesome-webfont.eot?#iefix&v=4.1.0') format('embedded-opentype'),url('../fonts/FontAwesome/fontawesome-webfont.woff?v=4.1.0') format('woff'),url('../fonts/FontAwesome/fontawesome-webfont.ttf?v=4.1.0') format('truetype'),url('../fonts/FontAwesome/fontawesome-webfont.svg?v=4.1.0#fontawesomeregular') format('svg');font-weight:normal;font-style:normal}.fa{display:inline-block;font-family:FontAwesome;font-style:normal;font-weight:normal;line-height:1;-webkit-font-smoothing:antialiased;-moz-osx-font-smoothing:grayscale}.fa-lg{font-size:1.33333333em;line-height:.75em;vertical-align:-15%}.fa-2x{font-size:2em}.fa-3x{font-size:3em}.fa-4x{font-size:4em}.fa-5x{font-size:5em}.fa-fw{width:1.28571429em;text-align:center}.fa-ul{padding-left:0;margin-left:2.14285714em;list-style-type:none}.fa-ul>li{position:relative}.fa-li{position:absolute;left:-2.14285714em;width:2.14285714em;top:.14285714em;text-align:center}.fa-li.fa-lg{left:-1.85714286em}.fa-border{padding:.2em .25em .15em;border:solid .08em #eee;border-radius:.1em}.pull-right{float:right}.pull-left{float:left}.fa.pull-left{margin-right:.3em}.fa.pull-right{margin-left:.3em}.fa-spin{-webkit-animation:spin 2s infinite linear;-moz-animation:spin 2s infinite linear;-o-animation:spin 2s infinite linear;animation:spin 2s infinite linear}@-moz-keyframes spin{0%{-moz-transform:rotate(0deg)}100%{-moz-transform:rotate(359deg)}}@-webkit-keyframes spin{0%{-webkit-transform:rotate(0deg)}100%{-webkit-transform:rotate(359deg)}}@-o-keyframes spin{0%{-o-transform:rotate(0deg)}100%{-o-transform:rotate(359deg)}}@keyframes spin{0%{-webkit-transform:rotate(0deg);transform:rotate(0deg)}100%{-webkit-transform:rotate(359deg);transform:rotate(359deg)}}.fa-rotate-90{filter:progid:DXImageTransform.Microsoft.BasicImage(rotation=1);-webkit-transform:rotate(90deg);-moz-transform:rotate(90deg);-ms-transform:rotate(90deg);-o-transform:rotate(90deg);transform:rotate(90deg)}.fa-rotate-180{filter:progid:DXImageTransform.Microsoft.BasicImage(rotation=2);-webkit-transform:rotate(180deg);-moz-transform:rotate(180deg);-ms-transform:rotate(180deg);-o-transform:rotate(180deg);transform:rotate(180deg)}.fa-rotate-270{filter:progid:DXImageTransform.Microsoft.BasicImage(rotation=3);-webkit-transform:rotate(270deg);-moz-transform:rotate(270deg);-ms-transform:rotate(270deg);-o-transform:rotate(270deg);transform:rotate(270deg)}.fa-flip-horizontal{filter:progid:DXImageTransform.Microsoft.BasicImage(rotation=0, mirror=1);-webkit-transform:scale(-1, 1);-moz-transform:scale(-1, 1);-ms-transform:scale(-1, 1);-o-transform:scale(-1, 1);transform:scale(-1, 1)}.fa-flip-vertical{filter:progid:DXImageTransform.Microsoft.BasicImage(rotation=2, mirror=1);-webkit-transform:scale(1, -1);-moz-transform:scale(1, -1);-ms-transform:scale(1, -1);-o-transform:scale(1, -1);transform:scale(1, -1)}.fa-stack{position:relative;display:inline-block;width:2em;height:2em;line-height:2em;vertical-align:middle}.fa-stack-1x,.fa-stack-2x{position:absolute;left:0;width:100%;text-align:center}.fa-stack-1x{line-height:inherit}.fa-stack-2x{font-size:2em}.fa-inverse{color:#fff}.fa-glass:before{content:"\f000"}.fa-music:before{content:"\f001"}.fa-search:before{content:"\f002"}.fa-envelope-o:before{content:"\f003"}.fa-heart:before{content:"\f004"}.fa-star:before{content:"\f005"}.fa-star-o:before{content:"\f006"}.fa-user:before{content:"\f007"}.fa-film:before{content:"\f008"}.fa-th-large:before{content:"\f009"}.fa-th:before{content:"\f00a"}.fa-th-list:before{content:"\f00b"}.fa-check:before{content:"\f00c"}.fa-times:before{content:"\f00d"}.fa-search-plus:before{content:"\f00e"}.fa-search-minus:before{content:"\f010"}.fa-power-off:before{content:"\f011"}.fa-signal:before{content:"\f012"}.fa-gear:before,.fa-cog:before{content:"\f013"}.fa-trash-o:before{content:"\f014"}.fa-home:before{content:"\f015"}.fa-file-o:before{content:"\f016"}.fa-clock-o:before{content:"\f017"}.fa-road:before{content:"\f018"}.fa-download:before{content:"\f019"}.fa-arrow-circle-o-down:before{content:"\f01a"}.fa-arrow-circle-o-up:before{content:"\f01b"}.fa-inbox:before{content:"\f01c"}.fa-play-circle-o:before{content:"\f01d"}.fa-rotate-right:before,.fa-repeat:before{content:"\f01e"}.fa-refresh:before{content:"\f021"}.fa-list-alt:before{content:"\f022"}.fa-lock:before{content:"\f023"}.fa-flag:before{content:"\f024"}.fa-headphones:before{content:"\f025"}.fa-volume-off:before{content:"\f026"}.fa-volume-down:before{content:"\f027"}.fa-volume-up:before{content:"\f028"}.fa-qrcode:before{content:"\f029"}.fa-barcode:before{content:"\f02a"}.fa-tag:before{content:"\f02b"}.fa-tags:before{content:"\f02c"}.fa-book:before{content:"\f02d"}.fa-bookmark:before{content:"\f02e"}.fa-print:before{content:"\f02f"}.fa-camera:before{content:"\f030"}.fa-font:before{content:"\f031"}.fa-bold:before{content:"\f032"}.fa-italic:before{content:"\f033"}.fa-text-height:before{content:"\f034"}.fa-text-width:before{content:"\f035"}.fa-align-left:before{content:"\f036"}.fa-align-center:before{content:"\f037"}.fa-align-right:before{content:"\f038"}.fa-align-justify:before{content:"\f039"}.fa-list:before{content:"\f03a"}.fa-dedent:before,.fa-outdent:before{content:"\f03b"}.fa-indent:before{content:"\f03c"}.fa-video-camera:before{content:"\f03d"}.fa-photo:before,.fa-image:before,.fa-picture-o:before{content:"\f03e"}.fa-pencil:before{content:"\f040"}.fa-map-marker:before{content:"\f041"}.fa-adjust:before{content:"\f042"}.fa-tint:before{content:"\f043"}.fa-edit:before,.fa-pencil-square-o:before{content:"\f044"}.fa-share-square-o:before{content:"\f045"}.fa-check-square-o:before{content:"\f046"}.fa-arrows:before{content:"\f047"}.fa-step-backward:before{content:"\f048"}.fa-fast-backward:before{content:"\f049"}.fa-backward:before{content:"\f04a"}.fa-play:before{content:"\f04b"}.fa-pause:before{content:"\f04c"}.fa-stop:before{content:"\f04d"}.fa-forward:before{content:"\f04e"}.fa-fast-forward:before{content:"\f050"}.fa-step-forward:before{content:"\f051"}.fa-eject:before{content:"\f052"}.fa-chevron-left:before{content:"\f053"}.fa-chevron-right:before{content:"\f054"}.fa-plus-circle:before{content:"\f055"}.fa-minus-circle:before{content:"\f056"}.fa-times-circle:before{content:"\f057"}.fa-check-circle:before{content:"\f058"}.fa-question-circle:before{content:"\f059"}.fa-info-circle:before{content:"\f05a"}.fa-crosshairs:before{content:"\f05b"}.fa-times-circle-o:before{content:"\f05c"}.fa-check-circle-o:before{content:"\f05d"}.fa-ban:before{content:"\f05e"}.fa-arrow-left:before{content:"\f060"}.fa-arrow-right:before{content:"\f061"}.fa-arrow-up:before{content:"\f062"}.fa-arrow-down:before{content:"\f063"}.fa-mail-forward:before,.fa-share:before{content:"\f064"}.fa-expand:before{content:"\f065"}.fa-compress:before{content:"\f066"}.fa-plus:before{content:"\f067"}.fa-minus:before{content:"\f068"}.fa-asterisk:before{content:"\f069"}.fa-exclamation-circle:before{content:"\f06a"}.fa-gift:before{content:"\f06b"}.fa-leaf:before{content:"\f06c"}.fa-fire:before{content:"\f06d"}.fa-eye:before{content:"\f06e"}.fa-eye-slash:before{content:"\f070"}.fa-warning:before,.fa-exclamation-triangle:before{content:"\f071"}.fa-plane:before{content:"\f072"}.fa-calendar:before{content:"\f073"}.fa-random:before{content:"\f074"}.fa-comment:before{content:"\f075"}.fa-magnet:before{content:"\f076"}.fa-chevron-up:before{content:"\f077"}.fa-chevron-down:before{content:"\f078"}.fa-retweet:before{content:"\f079"}.fa-shopping-cart:before{content:"\f07a"}.fa-folder:before{content:"\f07b"}.fa-folder-open:before{content:"\f07c"}.fa-arrows-v:before{content:"\f07d"}.fa-arrows-h:before{content:"\f07e"}.fa-bar-chart-o:before{content:"\f080"}.fa-twitter-square:before{content:"\f081"}.fa-facebook-square:before{content:"\f082"}.fa-camera-retro:before{content:"\f083"}.fa-key:before{content:"\f084"}.fa-gears:before,.fa-cogs:before{content:"\f085"}.fa-comments:before{content:"\f086"}.fa-thumbs-o-up:before{content:"\f087"}.fa-thumbs-o-down:before{content:"\f088"}.fa-star-half:before{content:"\f089"}.fa-heart-o:before{content:"\f08a"}.fa-sign-out:before{content:"\f08b"}.fa-linkedin-square:before{content:"\f08c"}.fa-thumb-tack:before{content:"\f08d"}.fa-external-link:before{content:"\f08e"}.fa-sign-in:before{content:"\f090"}.fa-trophy:before{content:"\f091"}.fa-github-square:before{content:"\f092"}.fa-upload:before{content:"\f093"}.fa-lemon-o:before{content:"\f094"}.fa-phone:before{content:"\f095"}.fa-square-o:before{content:"\f096"}.fa-bookmark-o:before{content:"\f097"}.fa-phone-square:before{content:"\f098"}.fa-twitter:before{content:"\f099"}.fa-facebook:before{content:"\f09a"}.fa-github:before{content:"\f09b"}.fa-unlock:before{content:"\f09c"}.fa-credit-card:before{content:"\f09d"}.fa-rss:before{content:"\f09e"}.fa-hdd-o:before{content:"\f0a0"}.fa-bullhorn:before{content:"\f0a1"}.fa-bell:before{content:"\f0f3"}.fa-certificate:before{content:"\f0a3"}.fa-hand-o-right:before{content:"\f0a4"}.fa-hand-o-left:before{content:"\f0a5"}.fa-hand-o-up:before{content:"\f0a6"}.fa-hand-o-down:before{content:"\f0a7"}.fa-arrow-circle-left:before{content:"\f0a8"}.fa-arrow-circle-right:before{content:"\f0a9"}.fa-arrow-circle-up:before{content:"\f0aa"}.fa-arrow-circle-down:before{content:"\f0ab"}.fa-globe:before{content:"\f0ac"}.fa-wrench:before{content:"\f0ad"}.fa-tasks:before{content:"\f0ae"}.fa-filter:before{content:"\f0b0"}.fa-briefcase:before{content:"\f0b1"}.fa-arrows-alt:before{content:"\f0b2"}.fa-group:before,.fa-users:before{content:"\f0c0"}.fa-chain:before,.fa-link:before{content:"\f0c1"}.fa-cloud:before{content:"\f0c2"}.fa-flask:before{content:"\f0c3"}.fa-cut:before,.fa-scissors:before{content:"\f0c4"}.fa-copy:before,.fa-files-o:before{content:"\f0c5"}.fa-paperclip:before{content:"\f0c6"}.fa-save:before,.fa-floppy-o:before{content:"\f0c7"}.fa-square:before{content:"\f0c8"}.fa-navicon:before,.fa-reorder:before,.fa-bars:before{content:"\f0c9"}.fa-list-ul:before{content:"\f0ca"}.fa-list-ol:before{content:"\f0cb"}.fa-strikethrough:before{content:"\f0cc"}.fa-underline:before{content:"\f0cd"}.fa-table:before{content:"\f0ce"}.fa-magic:before{content:"\f0d0"}.fa-truck:before{content:"\f0d1"}.fa-pinterest:before{content:"\f0d2"}.fa-pinterest-square:before{content:"\f0d3"}.fa-google-plus-square:before{content:"\f0d4"}.fa-google-plus:before{content:"\f0d5"}.fa-money:before{content:"\f0d6"}.fa-caret-down:before{content:"\f0d7"}.fa-caret-up:before{content:"\f0d8"}.fa-caret-left:before{content:"\f0d9"}.fa-caret-right:before{content:"\f0da"}.fa-columns:before{content:"\f0db"}.fa-unsorted:before,.fa-sort:before{content:"\f0dc"}.fa-sort-down:before,.fa-sort-desc:before{content:"\f0dd"}.fa-sort-up:before,.fa-sort-asc:before{content:"\f0de"}.fa-envelope:before{content:"\f0e0"}.fa-linkedin:before{content:"\f0e1"}.fa-rotate-left:before,.fa-undo:before{content:"\f0e2"}.fa-legal:before,.fa-gavel:before{content:"\f0e3"}.fa-dashboard:before,.fa-tachometer:before{content:"\f0e4"}.fa-comment-o:before{content:"\f0e5"}.fa-comments-o:before{content:"\f0e6"}.fa-flash:before,.fa-bolt:before{content:"\f0e7"}.fa-sitemap:before{content:"\f0e8"}.fa-umbrella:before{content:"\f0e9"}.fa-paste:before,.fa-clipboard:before{content:"\f0ea"}.fa-lightbulb-o:before{content:"\f0eb"}.fa-exchange:before{content:"\f0ec"}.fa-cloud-download:before{content:"\f0ed"}.fa-cloud-upload:before{content:"\f0ee"}.fa-user-md:before{content:"\f0f0"}.fa-stethoscope:before{content:"\f0f1"}.fa-suitcase:before{content:"\f0f2"}.fa-bell-o:before{content:"\f0a2"}.fa-coffee:before{content:"\f0f4"}.fa-cutlery:before{content:"\f0f5"}.fa-file-text-o:before{content:"\f0f6"}.fa-building-o:before{content:"\f0f7"}.fa-hospital-o:before{content:"\f0f8"}.fa-ambulance:before{content:"\f0f9"}.fa-medkit:before{content:"\f0fa"}.fa-fighter-jet:before{content:"\f0fb"}.fa-beer:before{content:"\f0fc"}.fa-h-square:before{content:"\f0fd"}.fa-plus-square:before{content:"\f0fe"}.fa-angle-double-left:before{content:"\f100"}.fa-angle-double-right:before{content:"\f101"}.fa-angle-double-up:before{content:"\f102"}.fa-angle-double-down:before{content:"\f103"}.fa-angle-left:before{content:"\f104"}.fa-angle-right:before{content:"\f105"}.fa-angle-up:before{content:"\f106"}.fa-angle-down:before{content:"\f107"}.fa-desktop:before{content:"\f108"}.fa-laptop:before{content:"\f109"}.fa-tablet:before{content:"\f10a"}.fa-mobile-phone:before,.fa-mobile:before{content:"\f10b"}.fa-circle-o:before{content:"\f10c"}.fa-quote-left:before{content:"\f10d"}.fa-quote-right:before{content:"\f10e"}.fa-spinner:before{content:"\f110"}.fa-circle:before{content:"\f111"}.fa-mail-reply:before,.fa-reply:before{content:"\f112"}.fa-github-alt:before{content:"\f113"}.fa-folder-o:before{content:"\f114"}.fa-folder-open-o:before{content:"\f115"}.fa-smile-o:before{content:"\f118"}.fa-frown-o:before{content:"\f119"}.fa-meh-o:before{content:"\f11a"}.fa-gamepad:before{content:"\f11b"}.fa-keyboard-o:before{content:"\f11c"}.fa-flag-o:before{content:"\f11d"}.fa-flag-checkered:before{content:"\f11e"}.fa-terminal:before{content:"\f120"}.fa-code:before{content:"\f121"}.fa-mail-reply-all:before,.fa-reply-all:before{content:"\f122"}.fa-star-half-empty:before,.fa-star-half-full:before,.fa-star-half-o:before{content:"\f123"}.fa-location-arrow:before{content:"\f124"}.fa-crop:before{content:"\f125"}.fa-code-fork:before{content:"\f126"}.fa-unlink:before,.fa-chain-broken:before{content:"\f127"}.fa-question:before{content:"\f128"}.fa-info:before{content:"\f129"}.fa-exclamation:before{content:"\f12a"}.fa-superscript:before{content:"\f12b"}.fa-subscript:before{content:"\f12c"}.fa-eraser:before{content:"\f12d"}.fa-puzzle-piece:before{content:"\f12e"}.fa-microphone:before{content:"\f130"}.fa-microphone-slash:before{content:"\f131"}.fa-shield:before{content:"\f132"}.fa-calendar-o:before{content:"\f133"}.fa-fire-extinguisher:before{content:"\f134"}.fa-rocket:before{content:"\f135"}.fa-maxcdn:before{content:"\f136"}.fa-chevron-circle-left:before{content:"\f137"}.fa-chevron-circle-right:before{content:"\f138"}.fa-chevron-circle-up:before{content:"\f139"}.fa-chevron-circle-down:before{content:"\f13a"}.fa-html5:before{content:"\f13b"}.fa-css3:before{content:"\f13c"}.fa-anchor:before{content:"\f13d"}.fa-unlock-alt:before{content:"\f13e"}.fa-bullseye:before{content:"\f140"}.fa-ellipsis-h:before{content:"\f141"}.fa-ellipsis-v:before{content:"\f142"}.fa-rss-square:before{content:"\f143"}.fa-play-circle:before{content:"\f144"}.fa-ticket:before{content:"\f145"}.fa-minus-square:before{content:"\f146"}.fa-minus-square-o:before{content:"\f147"}.fa-level-up:before{content:"\f148"}.fa-level-down:before{content:"\f149"}.fa-check-square:before{content:"\f14a"}.fa-pencil-square:before{content:"\f14b"}.fa-external-link-square:before{content:"\f14c"}.fa-share-square:before{content:"\f14d"}.fa-compass:before{content:"\f14e"}.fa-toggle-down:before,.fa-caret-square-o-down:before{content:"\f150"}.fa-toggle-up:before,.fa-caret-square-o-up:before{content:"\f151"}.fa-toggle-right:before,.fa-caret-square-o-right:before{content:"\f152"}.fa-euro:before,.fa-eur:before{content:"\f153"}.fa-gbp:before{content:"\f154"}.fa-dollar:before,.fa-usd:before{content:"\f155"}.fa-rupee:before,.fa-inr:before{content:"\f156"}.fa-cny:before,.fa-rmb:before,.fa-yen:before,.fa-jpy:before{content:"\f157"}.fa-ruble:before,.fa-rouble:before,.fa-rub:before{content:"\f158"}.fa-won:before,.fa-krw:before{content:"\f159"}.fa-bitcoin:before,.fa-btc:before{content:"\f15a"}.fa-file:before{content:"\f15b"}.fa-file-text:before{content:"\f15c"}.fa-sort-alpha-asc:before{content:"\f15d"}.fa-sort-alpha-desc:before{content:"\f15e"}.fa-sort-amount-asc:before{content:"\f160"}.fa-sort-amount-desc:before{content:"\f161"}.fa-sort-numeric-asc:before{content:"\f162"}.fa-sort-numeric-desc:before{content:"\f163"}.fa-thumbs-up:before{content:"\f164"}.fa-thumbs-down:before{content:"\f165"}.fa-youtube-square:before{content:"\f166"}.fa-youtube:before{content:"\f167"}.fa-xing:before{content:"\f168"}.fa-xing-square:before{content:"\f169"}.fa-youtube-play:before{content:"\f16a"}.fa-dropbox:before{content:"\f16b"}.fa-stack-overflow:before{content:"\f16c"}.fa-instagram:before{content:"\f16d"}.fa-flickr:before{content:"\f16e"}.fa-adn:before{content:"\f170"}.fa-bitbucket:before{content:"\f171"}.fa-bitbucket-square:before{content:"\f172"}.fa-tumblr:before{content:"\f173"}.fa-tumblr-square:before{content:"\f174"}.fa-long-arrow-down:before{content:"\f175"}.fa-long-arrow-up:before{content:"\f176"}.fa-long-arrow-left:before{content:"\f177"}.fa-long-arrow-right:before{content:"\f178"}.fa-apple:before{content:"\f179"}.fa-windows:before{content:"\f17a"}.fa-android:before{content:"\f17b"}.fa-linux:before{content:"\f17c"}.fa-dribbble:before{content:"\f17d"}.fa-skype:before{content:"\f17e"}.fa-foursquare:before{content:"\f180"}.fa-trello:before{content:"\f181"}.fa-female:before{content:"\f182"}.fa-male:before{content:"\f183"}.fa-gittip:before{content:"\f184"}.fa-sun-o:before{content:"\f185"}.fa-moon-o:before{content:"\f186"}.fa-archive:before{content:"\f187"}.fa-bug:before{content:"\f188"}.fa-vk:before{content:"\f189"}.fa-weibo:before{content:"\f18a"}.fa-renren:before{content:"\f18b"}.fa-pagelines:before{content:"\f18c"}.fa-stack-exchange:before{content:"\f18d"}.fa-arrow-circle-o-right:before{content:"\f18e"}.fa-arrow-circle-o-left:before{content:"\f190"}.fa-toggle-left:before,.fa-caret-square-o-left:before{content:"\f191"}.fa-dot-circle-o:before{content:"\f192"}.fa-wheelchair:before{content:"\f193"}.fa-vimeo-square:before{content:"\f194"}.fa-turkish-lira:before,.fa-try:before{content:"\f195"}.fa-plus-square-o:before{content:"\f196"}.fa-space-shuttle:before{content:"\f197"}.fa-slack:before{content:"\f198"}.fa-envelope-square:before{content:"\f199"}.fa-wordpress:before{content:"\f19a"}.fa-openid:before{content:"\f19b"}.fa-institution:before,.fa-bank:before,.fa-university:before{content:"\f19c"}.fa-mortar-board:before,.fa-graduation-cap:before{content:"\f19d"}.fa-yahoo:before{content:"\f19e"}.fa-google:before{content:"\f1a0"}.fa-reddit:before{content:"\f1a1"}.fa-reddit-square:before{content:"\f1a2"}.fa-stumbleupon-circle:before{content:"\f1a3"}.fa-stumbleupon:before{content:"\f1a4"}.fa-delicious:before{content:"\f1a5"}.fa-digg:before{content:"\f1a6"}.fa-pied-piper-square:before,.fa-pied-piper:before{content:"\f1a7"}.fa-pied-piper-alt:before{content:"\f1a8"}.fa-drupal:before{content:"\f1a9"}.fa-joomla:before{content:"\f1aa"}.fa-language:before{content:"\f1ab"}.fa-fax:before{content:"\f1ac"}.fa-building:before{content:"\f1ad"}.fa-child:before{content:"\f1ae"}.fa-paw:before{content:"\f1b0"}.fa-spoon:before{content:"\f1b1"}.fa-cube:before{content:"\f1b2"}.fa-cubes:before{content:"\f1b3"}.fa-behance:before{content:"\f1b4"}.fa-behance-square:before{content:"\f1b5"}.fa-steam:before{content:"\f1b6"}.fa-steam-square:before{content:"\f1b7"}.fa-recycle:before{content:"\f1b8"}.fa-automobile:before,.fa-car:before{content:"\f1b9"}.fa-cab:before,.fa-taxi:before{content:"\f1ba"}.fa-tree:before{content:"\f1bb"}.fa-spotify:before{content:"\f1bc"}.fa-deviantart:before{content:"\f1bd"}.fa-soundcloud:before{content:"\f1be"}.fa-database:before{content:"\f1c0"}.fa-file-pdf-o:before{content:"\f1c1"}.fa-file-word-o:before{content:"\f1c2"}.fa-file-excel-o:before{content:"\f1c3"}.fa-file-powerpoint-o:before{content:"\f1c4"}.fa-file-photo-o:before,.fa-file-picture-o:before,.fa-file-image-o:before{content:"\f1c5"}.fa-file-zip-o:before,.fa-file-archive-o:before{content:"\f1c6"}.fa-file-sound-o:before,.fa-file-audio-o:before{content:"\f1c7"}.fa-file-movie-o:before,.fa-file-video-o:before{content:"\f1c8"}.fa-file-code-o:before{content:"\f1c9"}.fa-vine:before{content:"\f1ca"}.fa-codepen:before{content:"\f1cb"}.fa-jsfiddle:before{content:"\f1cc"}.fa-life-bouy:before,.fa-life-saver:before,.fa-support:before,.fa-life-ring:before{content:"\f1cd"}.fa-circle-o-notch:before{content:"\f1ce"}.fa-ra:before,.fa-rebel:before{content:"\f1d0"}.fa-ge:before,.fa-empire:before{content:"\f1d1"}.fa-git-square:before{content:"\f1d2"}.fa-git:before{content:"\f1d3"}.fa-hacker-news:before{content:"\f1d4"}.fa-tencent-weibo:before{content:"\f1d5"}.fa-qq:before{content:"\f1d6"}.fa-wechat:before,.fa-weixin:before{content:"\f1d7"}.fa-send:before,.fa-paper-plane:before{content:"\f1d8"}.fa-send-o:before,.fa-paper-plane-o:before{content:"\f1d9"}.fa-history:before{content:"\f1da"}.fa-circle-thin:before{content:"\f1db"}.fa-header:before{content:"\f1dc"}.fa-paragraph:before{content:"\f1dd"}.fa-sliders:before{content:"\f1de"}.fa-share-alt:before{content:"\f1e0"}.fa-share-alt-square:before{content:"\f1e1"}.fa-bomb:before{content:"\f1e2"} \ No newline at end of file
diff --git a/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/web/client/resources/css/themes/dark-bigtext.css b/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/web/client/resources/css/themes/dark-bigtext.css
new file mode 100644
index 00000000000..38d71340208
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/web/client/resources/css/themes/dark-bigtext.css
@@ -0,0 +1,400 @@
+/* This is a fork of the dark.css theme. The only changes from dark.css are near the very end. */
+
+::-webkit-scrollbar {
+ width: 10px;
+ height: 10px;
+}
+
+::-webkit-scrollbar-corner {
+ background: transparent;
+}
+
+::-webkit-scrollbar-thumb {
+ background-color: rgba(255, 255, 255, .35);
+ border-radius: 10px;
+}
+
+body {
+ color: #D0D0D0;
+ background: fixed #040607;
+ background: fixed -moz-linear-gradient(top, hsl(200,27%,2%) 0%, hsl(203,29%,26%) 100%);
+ background: fixed -webkit-gradient(linear, left top, left bottom, color-stop(0%,hsl(200,27%,2%)), color-stop(100%,hsl(203,29%,26%)));
+ background: fixed -webkit-linear-gradient(top, hsl(200,27%,2%) 0%,hsl(203,29%,26%) 100%);
+ background: fixed -o-linear-gradient(top, hsl(200,27%,2%) 0%,hsl(203,29%,26%) 100%);
+ background: fixed -ms-linear-gradient(top, hsl(200,27%,2%) 0%,hsl(203,29%,26%) 100%);
+ background: fixed linear-gradient(to bottom, hsl(200,27%,2%) 0%,hsl(203,29%,26%) 100%);
+ filter: progid:DXImageTransform.Microsoft.gradient( startColorstr='#040607', endColorstr='#2f4756',GradientType=0 );
+}
+
+a,
+.toggle-all-pkg {
+ color: #247D9E;
+}
+
+a:hover,
+.toggle-all-pkg:hover {
+ color: #33B5E5;
+}
+
+input[type=text] {
+ border-bottom-color: #33B5E5;
+ color: #BBB;
+}
+
+::-webkit-input-placeholder {
+ color: #555;
+}
+:-moz-placeholder {
+ color: #555;
+}
+::-moz-placeholder {
+ color: #555;
+}
+:-ms-input-placeholder {
+ color: #555;
+}
+
+.overall {
+ /*
+ Using box-shadow here is not very performant but allows us
+ to animate the change of the background color much more easily.
+ This box-shadow is an ALTERNATIVE, not supplement, to using gradients
+ in this case.
+ */
+ box-shadow: inset 0 150px 100px -110px rgba(0, 0, 0, .5);
+}
+
+.overall.ok {
+ background: #688E00;
+}
+
+.overall.fail {
+ background: #DB8700;
+}
+
+.overall.panic {
+ background: #A80000;
+}
+
+.overall.buildfail {
+ background: #A4A8AA;
+}
+
+.overall .status {
+ color: #EEE;
+}
+
+.server-down {
+ background: rgba(255, 45, 45, 0.55);
+ color: #FFF;
+}
+
+.toggler {
+ background: #132535;
+}
+
+.toggler:hover {
+ background: #1C374F;
+}
+
+.controls {
+ border-bottom: 1px solid #33B5E5;
+}
+
+.controls li {
+ color: #2A5A84;
+}
+
+.controls li:hover {
+ background: #132535;
+ color: #33B5E5;
+}
+
+.sel {
+ background: #33B5E5 !important;
+ color: #FFF !important;
+}
+
+.pkg-cover-name {
+ text-shadow: 1px 1px 0px #000;
+}
+
+.pkg-cover-name b,
+.story-pkg-name b {
+ color: #FFF;
+ font-weight: bold;
+}
+
+.pkg-cover:hover,
+.pkg-cover:hover b {
+ color: #FFF;
+}
+
+.expandable {
+ border-top-color: #33B5E5;
+}
+
+.expandable {
+ background: rgba(0, 0, 0, .2);
+}
+
+.history .item.ok {
+ background: #3f5400;
+ background: -moz-linear-gradient(top, hsl(75,100%,16%) 0%, hsl(76,100%,28%) 100%);
+ background: -webkit-gradient(linear, left top, left bottom, color-stop(0%,hsl(75,100%,16%)), color-stop(100%,hsl(76,100%,28%)));
+ background: -webkit-linear-gradient(top, hsl(75,100%,16%) 0%,hsl(76,100%,28%) 100%);
+ background: -o-linear-gradient(top, hsl(75,100%,16%) 0%,hsl(76,100%,28%) 100%);
+ background: -ms-linear-gradient(top, hsl(75,100%,16%) 0%,hsl(76,100%,28%) 100%);
+ background: linear-gradient(to bottom, hsl(75,100%,16%) 0%,hsl(76,100%,28%) 100%);
+ filter: progid:DXImageTransform.Microsoft.gradient( startColorstr='#3f5400', endColorstr='#698f00',GradientType=0 );
+}
+
+.history .item.fail {
+ background: #7f4e00;
+ background: -moz-linear-gradient(top, hsl(37,100%,25%) 0%, hsl(37,100%,43%) 100%);
+ background: -webkit-gradient(linear, left top, left bottom, color-stop(0%,hsl(37,100%,25%)), color-stop(100%,hsl(37,100%,43%)));
+ background: -webkit-linear-gradient(top, hsl(37,100%,25%) 0%,hsl(37,100%,43%) 100%);
+ background: -o-linear-gradient(top, hsl(37,100%,25%) 0%,hsl(37,100%,43%) 100%);
+ background: -ms-linear-gradient(top, hsl(37,100%,25%) 0%,hsl(37,100%,43%) 100%);
+ background: linear-gradient(to bottom, hsl(37,100%,25%) 0%,hsl(37,100%,43%) 100%);
+ filter: progid:DXImageTransform.Microsoft.gradient( startColorstr='#7f4e00', endColorstr='#db8700',GradientType=0 );
+}
+
+.history .item.panic {
+ background: #660000;
+ background: -moz-linear-gradient(top, hsl(0,100%,20%) 0%, hsl(0,100%,33%) 100%);
+ background: -webkit-gradient(linear, left top, left bottom, color-stop(0%,hsl(0,100%,20%)), color-stop(100%,hsl(0,100%,33%)));
+ background: -webkit-linear-gradient(top, hsl(0,100%,20%) 0%,hsl(0,100%,33%) 100%);
+ background: -o-linear-gradient(top, hsl(0,100%,20%) 0%,hsl(0,100%,33%) 100%);
+ background: -ms-linear-gradient(top, hsl(0,100%,20%) 0%,hsl(0,100%,33%) 100%);
+ background: linear-gradient(to bottom, hsl(0,100%,20%) 0%,hsl(0,100%,33%) 100%);
+ filter: progid:DXImageTransform.Microsoft.gradient( startColorstr='#660000', endColorstr='#a80000',GradientType=0 );
+}
+
+.history .item.buildfail {
+ background: #282f33;
+ background: -moz-linear-gradient(top, hsl(202,12%,18%) 0%, hsl(208,5%,48%) 100%);
+ background: -webkit-gradient(linear, left top, left bottom, color-stop(0%,hsl(202,12%,18%)), color-stop(100%,hsl(208,5%,48%)));
+ background: -webkit-linear-gradient(top, hsl(202,12%,18%) 0%,hsl(208,5%,48%) 100%);
+ background: -o-linear-gradient(top, hsl(202,12%,18%) 0%,hsl(208,5%,48%) 100%);
+ background: -ms-linear-gradient(top, hsl(202,12%,18%) 0%,hsl(208,5%,48%) 100%);
+ background: linear-gradient(to bottom, hsl(202,12%,18%) 0%,hsl(208,5%,48%) 100%);
+ filter: progid:DXImageTransform.Microsoft.gradient( startColorstr='#282f33', endColorstr='#757c82',GradientType=0 );
+}
+
+.enum {
+ border-color: #2B597F;
+}
+
+.enum > li {
+ border-left-color: #2B597F;
+}
+
+.enum > li:hover {
+ background: rgba(55, 114, 163, .25);
+}
+
+.group {
+ background: -moz-linear-gradient(top, rgba(16,59,71,0) 0%, rgba(16,59,71,1) 100%);
+ background: -webkit-gradient(linear, left top, left bottom, color-stop(0%,rgba(16,59,71,0)), color-stop(100%,rgba(16,59,71,1)));
+ background: -webkit-linear-gradient(top, rgba(16,59,71,0) 0%,rgba(16,59,71,1) 100%);
+ background: -o-linear-gradient(top, rgba(16,59,71,0) 0%,rgba(16,59,71,1) 100%);
+ background: -ms-linear-gradient(top, rgba(16,59,71,0) 0%,rgba(16,59,71,1) 100%);
+ background: linear-gradient(to top, rgba(16,59,71,0) 0%,rgba(16,59,71,1) 100%);
+ filter: progid:DXImageTransform.Microsoft.gradient( startColorstr='#00103b47', endColorstr='#103b47',GradientType=0 );
+}
+
+.stats {
+ color: #FFF;
+}
+
+.error {
+ color: #F58888 !important;
+ background: rgba(255, 45, 45, 0.35) !important;
+}
+
+.spin-slowly,
+.spin-once {
+ color: #33B5E5 !important;
+}
+
+.frame .col,
+footer {
+ border-color: #33B5E5;
+}
+
+footer {
+ background: rgba(0, 0, 0, .5);
+}
+
+footer .recording .fa {
+ color: #CC0000;
+}
+
+footer .replay .fa {
+ color: #33B5E5;
+}
+
+footer .paused .fa {
+ color: #AAA;
+}
+
+footer .recording.replay .fa {
+ color: #33B5E5;
+}
+
+
+
+.buildfail-pkg {
+ background: rgba(255, 255, 255, .1);
+}
+.buildfail-output {
+ background: rgba(255, 255, 255, .2);
+}
+
+
+
+.panic-pkg {
+ background: rgba(255, 0, 0, .3);
+}
+.panic-story {
+ padding: 10px;
+ background: rgba(255, 0, 0, .1);
+}
+.panic-story a,
+.panic-summary {
+ color: #E94A4A;
+}
+.panic-output {
+ color: #FF8181;
+}
+
+
+
+.failure-pkg {
+ background: rgba(255, 153, 0, .42);
+}
+.failure-story {
+ padding: 10px;
+ background: rgba(255, 153, 0, .1);
+}
+.failure-story a {
+ color: #FFB518;
+}
+.failure-output {
+ color: #FFBD47;
+}
+.failure-file {
+ color: #FFF;
+}
+
+
+.diffviewer td {
+ border-color: rgba(0, 0, 0, .3);
+}
+
+/* prettyTextDiff expected/deleted colors */
+.diffviewer .exp,
+.diff del {
+ background: rgba(131, 252, 131, 0.22);
+}
+
+/* prettyTextDiff actual/inserted colors */
+.diffviewer .act,
+.diff ins {
+ background: rgba(255, 52, 52, 0.33);
+}
+
+
+
+.story-links a,
+.test-name-link a {
+ color: inherit;
+}
+
+
+
+.story-pkg {
+ background: rgba(0, 0, 0, .4);
+}
+
+.story-pkg:hover {
+ background: rgba(255, 255, 255, .05);
+}
+
+.story-line + .story-line {
+ border-top: 1px dashed rgba(255, 255, 255, .08);
+}
+
+.story-line-desc .message {
+ color: #999;
+}
+
+.story-line-summary-container {
+ border-right: 1px dashed #333;
+}
+
+.story-line.ok .story-line-status { background: #008000; }
+.story-line.ok:hover, .story-line.ok.story-line-sel { background: rgba(0, 128, 0, .1); }
+
+.story-line.fail .story-line-status { background: #EA9C4D; }
+.story-line.fail:hover, .story-line.fail.story-line-sel { background: rgba(234, 156, 77, .1); }
+
+.story-line.panic .story-line-status { background: #FF3232; }
+.story-line.panic:hover, .story-line.panic.story-line-sel { background: rgba(255, 50, 50, .1); }
+
+.story-line.skip .story-line-status { background: #AAA; }
+.story-line.skip:hover, .story-line.skip.story-line-sel { background: rgba(255, 255, 255, .1); }
+
+.statusicon.ok { color: #76C13C; }
+.statusicon.fail, .fail-clr { color: #EA9C4D; }
+.statusicon.panic, .statusicon.panic .fa, .panic-clr { color: #FF3232; }
+.statusicon.skip, .skip-clr { color: #888; }
+
+
+.log .timestamp {
+ color: #999;
+}
+
+
+.clr-red {
+ color: #FF2222;
+}
+
+
+.tipsy-inner {
+ background-color: #FAFAFA;
+ color: #222;
+}
+
+.tipsy-arrow {
+ border: 8px dashed #FAFAFA;
+}
+
+.tipsy-arrow-n,
+.tipsy-arrow-s,
+.tipsy-arrow-e,
+.tipsy-arrow-w,
+{
+ border-color: #FAFAFA;
+}
+
+/***************************************************************/
+/*************************** Tweaks ****************************/
+/***************************************************************/
+
+
+/* More space for stories */
+div#col-3 { display: none; } /* hides the log */
+div#col-2 { width: 85%; } /* fill it in with stories */
+
+/* Bigger Text */
+.story-line { font-size: 16px; }
+.story-line b { font-size: 20px; }
+td.story-pkg-name { font-size: 24px; }
+
+/* Smaller Header */
+div.overall { padding: 10px 0 0px; }
+.overall .status { font-size: 36px; }
+
+/***************************************************************/
diff --git a/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/web/client/resources/css/themes/dark.css b/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/web/client/resources/css/themes/dark.css
new file mode 100644
index 00000000000..132e19dbf1d
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/web/client/resources/css/themes/dark.css
@@ -0,0 +1,380 @@
+::-webkit-scrollbar {
+ width: 10px;
+ height: 10px;
+}
+
+::-webkit-scrollbar-corner {
+ background: transparent;
+}
+
+::-webkit-scrollbar-thumb {
+ background-color: rgba(255, 255, 255, .35);
+ border-radius: 10px;
+}
+
+body {
+ color: #D0D0D0;
+ background: fixed #040607;
+ background: fixed -moz-linear-gradient(top, hsl(200,27%,2%) 0%, hsl(203,29%,26%) 100%);
+ background: fixed -webkit-gradient(linear, left top, left bottom, color-stop(0%,hsl(200,27%,2%)), color-stop(100%,hsl(203,29%,26%)));
+ background: fixed -webkit-linear-gradient(top, hsl(200,27%,2%) 0%,hsl(203,29%,26%) 100%);
+ background: fixed -o-linear-gradient(top, hsl(200,27%,2%) 0%,hsl(203,29%,26%) 100%);
+ background: fixed -ms-linear-gradient(top, hsl(200,27%,2%) 0%,hsl(203,29%,26%) 100%);
+ background: fixed linear-gradient(to bottom, hsl(200,27%,2%) 0%,hsl(203,29%,26%) 100%);
+ filter: progid:DXImageTransform.Microsoft.gradient( startColorstr='#040607', endColorstr='#2f4756',GradientType=0 );
+}
+
+a,
+.toggle-all-pkg {
+ color: #247D9E;
+}
+
+a:hover,
+.toggle-all-pkg:hover {
+ color: #33B5E5;
+}
+
+input[type=text] {
+ border-bottom-color: #33B5E5;
+ color: #BBB;
+}
+
+::-webkit-input-placeholder {
+ color: #555;
+}
+:-moz-placeholder {
+ color: #555;
+}
+::-moz-placeholder {
+ color: #555;
+}
+:-ms-input-placeholder {
+ color: #555;
+}
+
+.overall {
+ /*
+ Using box-shadow here is not very performant but allows us
+ to animate the change of the background color much more easily.
+ This box-shadow is an ALTERNATIVE, not supplement, to using gradients
+ in this case.
+ */
+ box-shadow: inset 0 150px 100px -110px rgba(0, 0, 0, .5);
+}
+
+.overall.ok {
+ background: #688E00;
+}
+
+.overall.fail {
+ background: #DB8700;
+}
+
+.overall.panic {
+ background: #A80000;
+}
+
+.overall.buildfail {
+ background: #A4A8AA;
+}
+
+.overall .status {
+ color: #EEE;
+}
+
+.server-down {
+ background: rgba(255, 45, 45, 0.55);
+ color: #FFF;
+}
+
+.toggler {
+ background: #132535;
+}
+
+.toggler:hover {
+ background: #1C374F;
+}
+
+.controls {
+ border-bottom: 1px solid #33B5E5;
+}
+
+.controls li {
+ color: #2A5A84;
+}
+
+.controls li:hover {
+ background: #132535;
+ color: #33B5E5;
+}
+
+.sel {
+ background: #33B5E5 !important;
+ color: #FFF !important;
+}
+
+.pkg-cover-name {
+ text-shadow: 1px 1px 0px #000;
+}
+
+.pkg-cover-name b,
+.story-pkg-name b {
+ color: #FFF;
+ font-weight: bold;
+}
+
+.pkg-cover:hover,
+.pkg-cover:hover b {
+ color: #FFF;
+}
+
+.expandable {
+ border-top-color: #33B5E5;
+}
+
+.expandable {
+ background: rgba(0, 0, 0, .2);
+}
+
+.history .item.ok {
+ background: #3f5400;
+ background: -moz-linear-gradient(top, hsl(75,100%,16%) 0%, hsl(76,100%,28%) 100%);
+ background: -webkit-gradient(linear, left top, left bottom, color-stop(0%,hsl(75,100%,16%)), color-stop(100%,hsl(76,100%,28%)));
+ background: -webkit-linear-gradient(top, hsl(75,100%,16%) 0%,hsl(76,100%,28%) 100%);
+ background: -o-linear-gradient(top, hsl(75,100%,16%) 0%,hsl(76,100%,28%) 100%);
+ background: -ms-linear-gradient(top, hsl(75,100%,16%) 0%,hsl(76,100%,28%) 100%);
+ background: linear-gradient(to bottom, hsl(75,100%,16%) 0%,hsl(76,100%,28%) 100%);
+ filter: progid:DXImageTransform.Microsoft.gradient( startColorstr='#3f5400', endColorstr='#698f00',GradientType=0 );
+}
+
+.history .item.fail {
+ background: #7f4e00;
+ background: -moz-linear-gradient(top, hsl(37,100%,25%) 0%, hsl(37,100%,43%) 100%);
+ background: -webkit-gradient(linear, left top, left bottom, color-stop(0%,hsl(37,100%,25%)), color-stop(100%,hsl(37,100%,43%)));
+ background: -webkit-linear-gradient(top, hsl(37,100%,25%) 0%,hsl(37,100%,43%) 100%);
+ background: -o-linear-gradient(top, hsl(37,100%,25%) 0%,hsl(37,100%,43%) 100%);
+ background: -ms-linear-gradient(top, hsl(37,100%,25%) 0%,hsl(37,100%,43%) 100%);
+ background: linear-gradient(to bottom, hsl(37,100%,25%) 0%,hsl(37,100%,43%) 100%);
+ filter: progid:DXImageTransform.Microsoft.gradient( startColorstr='#7f4e00', endColorstr='#db8700',GradientType=0 );
+}
+
+.history .item.panic {
+ background: #660000;
+ background: -moz-linear-gradient(top, hsl(0,100%,20%) 0%, hsl(0,100%,33%) 100%);
+ background: -webkit-gradient(linear, left top, left bottom, color-stop(0%,hsl(0,100%,20%)), color-stop(100%,hsl(0,100%,33%)));
+ background: -webkit-linear-gradient(top, hsl(0,100%,20%) 0%,hsl(0,100%,33%) 100%);
+ background: -o-linear-gradient(top, hsl(0,100%,20%) 0%,hsl(0,100%,33%) 100%);
+ background: -ms-linear-gradient(top, hsl(0,100%,20%) 0%,hsl(0,100%,33%) 100%);
+ background: linear-gradient(to bottom, hsl(0,100%,20%) 0%,hsl(0,100%,33%) 100%);
+ filter: progid:DXImageTransform.Microsoft.gradient( startColorstr='#660000', endColorstr='#a80000',GradientType=0 );
+}
+
+.history .item.buildfail {
+ background: #282f33;
+ background: -moz-linear-gradient(top, hsl(202,12%,18%) 0%, hsl(208,5%,48%) 100%);
+ background: -webkit-gradient(linear, left top, left bottom, color-stop(0%,hsl(202,12%,18%)), color-stop(100%,hsl(208,5%,48%)));
+ background: -webkit-linear-gradient(top, hsl(202,12%,18%) 0%,hsl(208,5%,48%) 100%);
+ background: -o-linear-gradient(top, hsl(202,12%,18%) 0%,hsl(208,5%,48%) 100%);
+ background: -ms-linear-gradient(top, hsl(202,12%,18%) 0%,hsl(208,5%,48%) 100%);
+ background: linear-gradient(to bottom, hsl(202,12%,18%) 0%,hsl(208,5%,48%) 100%);
+ filter: progid:DXImageTransform.Microsoft.gradient( startColorstr='#282f33', endColorstr='#757c82',GradientType=0 );
+}
+
+.enum {
+ border-color: #2B597F;
+}
+
+.enum > li {
+ border-left-color: #2B597F;
+}
+
+.enum > li:hover {
+ background: rgba(55, 114, 163, .25);
+}
+
+.group {
+ background: -moz-linear-gradient(top, rgba(16,59,71,0) 0%, rgba(16,59,71,1) 100%);
+ background: -webkit-gradient(linear, left top, left bottom, color-stop(0%,rgba(16,59,71,0)), color-stop(100%,rgba(16,59,71,1)));
+ background: -webkit-linear-gradient(top, rgba(16,59,71,0) 0%,rgba(16,59,71,1) 100%);
+ background: -o-linear-gradient(top, rgba(16,59,71,0) 0%,rgba(16,59,71,1) 100%);
+ background: -ms-linear-gradient(top, rgba(16,59,71,0) 0%,rgba(16,59,71,1) 100%);
+ background: linear-gradient(to top, rgba(16,59,71,0) 0%,rgba(16,59,71,1) 100%);
+ filter: progid:DXImageTransform.Microsoft.gradient( startColorstr='#00103b47', endColorstr='#103b47',GradientType=0 );
+}
+
+.stats {
+ color: #FFF;
+}
+
+.error {
+ color: #F58888 !important;
+ background: rgba(255, 45, 45, 0.35) !important;
+}
+
+.spin-slowly,
+.spin-once {
+ color: #33B5E5 !important;
+}
+
+.frame .col,
+footer {
+ border-color: #33B5E5;
+}
+
+footer {
+ background: rgba(0, 0, 0, .5);
+}
+
+footer .recording .fa {
+ color: #CC0000;
+}
+
+footer .replay .fa {
+ color: #33B5E5;
+}
+
+footer .paused .fa {
+ color: #AAA;
+}
+
+footer .recording.replay .fa {
+ color: #33B5E5;
+}
+
+
+
+.buildfail-pkg {
+ background: rgba(255, 255, 255, .1);
+}
+.buildfail-output {
+ background: rgba(255, 255, 255, .2);
+}
+
+
+
+.panic-pkg {
+ background: rgba(255, 0, 0, .3);
+}
+.panic-story {
+ padding: 10px;
+ background: rgba(255, 0, 0, .1);
+}
+.panic-story a,
+.panic-summary {
+ color: #E94A4A;
+}
+.panic-output {
+ color: #FF8181;
+}
+
+
+
+.failure-pkg {
+ background: rgba(255, 153, 0, .42);
+}
+.failure-story {
+ padding: 10px;
+ background: rgba(255, 153, 0, .1);
+}
+.failure-story a {
+ color: #FFB518;
+}
+.failure-output {
+ color: #FFBD47;
+}
+.failure-file {
+ color: #FFF;
+}
+
+
+.diffviewer td {
+ border-color: rgba(0, 0, 0, .3);
+}
+
+/* prettyTextDiff expected/deleted colors */
+.diffviewer .exp,
+.diff del {
+ background: rgba(131, 252, 131, 0.22);
+}
+
+/* prettyTextDiff actual/inserted colors */
+.diffviewer .act,
+.diff ins {
+ background: rgba(255, 52, 52, 0.33);
+}
+
+
+
+.story-links a,
+.test-name-link a {
+ color: inherit;
+}
+
+
+
+.story-pkg {
+ background: rgba(0, 0, 0, .4);
+}
+
+.story-pkg:hover {
+ background: rgba(255, 255, 255, .05);
+}
+
+.story-line + .story-line {
+ border-top: 1px dashed rgba(255, 255, 255, .08);
+}
+
+.story-line-desc .message {
+ color: #999;
+}
+
+.story-line-summary-container {
+ border-right: 1px dashed #333;
+}
+
+.story-line.ok .story-line-status { background: #008000; }
+.story-line.ok:hover, .story-line.ok.story-line-sel { background: rgba(0, 128, 0, .1); }
+
+.story-line.fail .story-line-status { background: #EA9C4D; }
+.story-line.fail:hover, .story-line.fail.story-line-sel { background: rgba(234, 156, 77, .1); }
+
+.story-line.panic .story-line-status { background: #FF3232; }
+.story-line.panic:hover, .story-line.panic.story-line-sel { background: rgba(255, 50, 50, .1); }
+
+.story-line.skip .story-line-status { background: #AAA; }
+.story-line.skip:hover, .story-line.skip.story-line-sel { background: rgba(255, 255, 255, .1); }
+
+.statusicon.ok { color: #76C13C; }
+.statusicon.fail, .fail-clr { color: #EA9C4D; }
+.statusicon.panic, .statusicon.panic .fa, .panic-clr { color: #FF3232; }
+.statusicon.skip, .skip-clr { color: #888; }
+
+.ansi-green { color: #76C13C; }
+.ansi-yellow { color: #EA9C4D; }
+
+.log .timestamp {
+ color: #999;
+}
+
+
+.clr-red {
+ color: #FF2222;
+}
+
+
+.tipsy-inner {
+ background-color: #FAFAFA;
+ color: #222;
+}
+
+.tipsy-arrow {
+ border: 8px dashed #FAFAFA;
+}
+
+.tipsy-arrow-n,
+.tipsy-arrow-s,
+.tipsy-arrow-e,
+.tipsy-arrow-w,
+{
+ border-color: #FAFAFA;
+}
diff --git a/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/web/client/resources/css/themes/light.css b/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/web/client/resources/css/themes/light.css
new file mode 100644
index 00000000000..decfc7f4135
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/web/client/resources/css/themes/light.css
@@ -0,0 +1,328 @@
+::-webkit-scrollbar-thumb {
+ background-color: rgba(0, 0, 0, .35);
+ border-radius: 10px;
+}
+
+::-webkit-input-placeholder {
+ color: #CCC;
+}
+:-moz-placeholder {
+ color: #CCC;
+}
+::-moz-placeholder {
+ color: #CCC;
+}
+:-ms-input-placeholder {
+ color: #CCC;
+}
+
+body {
+ color: #444;
+ background: #F4F4F4;
+}
+
+a {
+ color: #247D9E;
+}
+
+a:hover {
+ color: #33B5E5;
+}
+
+.overall.ok,
+.history .item.ok {
+ background: #8CB700; /* Can't decide: #5AA02C */
+}
+
+.overall.fail,
+.history .item.fail {
+ background: #E79C07;
+}
+
+.overall.panic,
+.history .item.panic {
+ background: #BB0000;
+}
+
+.overall.buildfail,
+.history .item.buildfail {
+ background: #828c95;
+}
+
+.overall .status {
+ color: #EEE;
+}
+
+.server-down {
+ background: #BB0000;
+ color: #FFF;
+}
+
+.toggler {
+ background: #6887A3;
+ color: #FFF;
+}
+
+.toggler:hover {
+ background: #465B6D;
+}
+
+.toggler .fa {
+ color: #FFF;
+}
+
+#logo {
+ color: #6887A3;
+}
+
+.controls {
+ border-bottom: 1px solid #33B5E5;
+}
+
+li.fa,
+a.fa,
+.toggle-all-pkg {
+ color: #6887A3;
+}
+
+li.fa:hover,
+a.fa:hover,
+.toggle-all-pkg:hover {
+ color: #465B6D;
+}
+
+li.fa:active,
+a.fa:active,
+.toggle-all-pkg:active {
+ color: #33B5E5;
+}
+
+.controls li,
+.enum > li {
+ border-left-color: #33B5E5;
+}
+
+.controls li:hover,
+.enum > li:hover {
+ background: #CFE6F9;
+}
+
+.enum {
+ border-color: #33B5E5;
+}
+
+.sel {
+ background: #33B5E5 !important;
+ color: #FFF !important;
+}
+
+.pkg-cover-name b,
+.story-pkg-name b {
+ color: #000;
+ font-weight: bold;
+}
+
+.expandable {
+ background: rgba(0, 0, 0, .1);
+ border-top-color: #33B5E5;
+}
+
+.history .item {
+ color: #FFF;
+}
+
+.spin-slowly,
+.spin-once {
+ color: #33B5E5 !important;
+}
+
+
+input[type=text] {
+ border-bottom-color: #33B5E5;
+ color: #333;
+}
+
+.error {
+ color: #CC0000 !important;
+ background: #FFD2D2 !important;
+}
+
+
+footer {
+ background: #F4F4F4;
+}
+
+.frame .col,
+footer {
+ border-color: #33B5E5;
+}
+
+footer .recording .fa {
+ color: #CC0000;
+}
+
+footer .replay .fa {
+ color: #33B5E5;
+}
+
+footer .paused .fa {
+ color: #333;
+}
+
+
+.buildfail-pkg {
+ background: #CCC;
+}
+.buildfail-output {
+ background: #EEE;
+}
+
+
+
+.panic-pkg {
+ background: #E94D4D;
+ color: #FFF;
+}
+.panics .panic-details {
+ border: 5px solid #E94D4D;
+ border-top: 0;
+ border-bottom: 0;
+}
+.panic-details {
+ color: #CC0000;
+}
+.panics .panic:last-child .panic-details {
+ border-bottom: 5px solid #E94D4D;
+}
+.panic-story {
+ padding: 10px;
+}
+.panics .panic-output {
+ background: #FFF;
+}
+
+
+
+
+.failure-pkg {
+ background: #FFA300;
+ color: #FFF;
+}
+.failures .failure-details {
+ border: 5px solid #FFA300;
+ border-top: 0;
+ border-bottom: 0;
+}
+.failures .failure:last-child .failure-details {
+ border-bottom: 5px solid #FFA300;
+}
+.failure-story {
+ padding: 10px;
+ color: #A87A00;
+}
+.stories .failure-output {
+ color: #EA9C4D;
+}
+.failures .failure-output {
+ background: #FFF;
+}
+.failure-file {
+ color: #000;
+}
+
+.diffviewer td {
+ border-color: #CCC;
+ background: #FFF;
+}
+
+/* prettyTextDiff expected/deleted colors */
+.diffviewer .exp,
+.diff del {
+ background: #ADFFAD;
+}
+
+/* prettyTextDiff actual/inserted colors */
+.diffviewer .act,
+.diff ins {
+ background: #FFC0C0;
+}
+
+
+
+.story-links a,
+.test-name-link a {
+ color: inherit;
+}
+
+
+
+.story-pkg {
+ background: #E8E8E8;
+}
+
+.story-pkg:hover {
+ background: #DFDFDF;
+}
+
+.story-line {
+ background: #FFF;
+}
+
+.story-line-desc .message {
+ color: #888;
+}
+
+.story-line + .story-line {
+ border-top: 1px dashed #DDD;
+}
+
+.story-line-summary-container {
+ border-right: 1px dashed #DDD;
+}
+
+.story-line.ok .story-line-status { background: #8CB700; }
+.story-line.ok:hover, .story-line.ok.story-line-sel { background: #F4FFD8; }
+
+.story-line.fail .story-line-status { background: #E79C07; }
+.story-line.fail:hover, .story-line.fail.story-line-sel { background: #FFF1DB; }
+
+.story-line.panic .story-line-status { background: #DD0606; }
+.story-line.panic:hover, .story-line.panic.story-line-sel { background: #FFE8E8; }
+
+.story-line.skip .story-line-status { background: #4E4E4E; }
+.story-line.skip:hover, .story-line.skip.story-line-sel { background: #F2F2F2; }
+
+.statusicon.ok { color: #76C13C; }
+.statusicon.fail, .fail-clr { color: #EA9C4D; }
+.statusicon.panic, .statusicon.panic .fa, .panic-clr { color: #FF3232; }
+.statusicon.skip, .skip-clr { color: #AAA; }
+
+.ansi-green { color: #76C13C; }
+.ansi-yellow { color: #EA9C4D; }
+
+.log .timestamp {
+ color: #999;
+}
+
+.clr-red,
+a.clr-red {
+ color: #CC0000;
+}
+
+
+.tipsy-inner {
+ background-color: #000;
+ color: #FFF;
+}
+
+.tipsy-arrow {
+ border: 8px dashed #000;
+}
+
+.tipsy-arrow-n,
+.tipsy-arrow-s,
+.tipsy-arrow-e,
+.tipsy-arrow-w,
+{
+ border-color: #000;
+}
diff --git a/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/web/client/resources/css/tipsy.css b/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/web/client/resources/css/tipsy.css
new file mode 100644
index 00000000000..25d261a4ff5
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/web/client/resources/css/tipsy.css
@@ -0,0 +1,97 @@
+.tipsy {
+ font-size: 12px;
+ position: absolute;
+ padding: 8px;
+ z-index: 100000;
+ font-family: 'Open Sans';
+ line-height: 1.25em;
+}
+
+.tipsy-inner {
+ max-width: 200px;
+ padding: 5px 7px;
+ text-align: center;
+}
+
+/* Rounded corners */
+/*.tipsy-inner { border-radius: 3px; -moz-border-radius: 3px; -webkit-border-radius: 3px; }*/
+
+/* Shadow */
+/*.tipsy-inner { box-shadow: 0 0 5px #000000; -webkit-box-shadow: 0 0 5px #000000; -moz-box-shadow: 0 0 5px #000000; }*/
+
+.tipsy-arrow {
+ position: absolute;
+ width: 0;
+ height: 0;
+ line-height: 0;
+}
+
+.tipsy-n .tipsy-arrow,
+.tipsy-nw .tipsy-arrow,
+.tipsy-ne .tipsy-arrow {
+ border-bottom-style: solid;
+ border-top: none;
+ border-left-color: transparent;
+ border-right-color: transparent;
+}
+
+
+.tipsy-n .tipsy-arrow {
+ top: 0px;
+ left: 50%;
+ margin-left: -7px;
+}
+.tipsy-nw .tipsy-arrow {
+ top: 0;
+ left: 10px;
+}
+.tipsy-ne .tipsy-arrow {
+ top: 0;
+ right: 10px;
+}
+
+.tipsy-s .tipsy-arrow,
+.tipsy-sw .tipsy-arrow,
+.tipsy-se .tipsy-arrow {
+ border-top-style: solid;
+ border-bottom: none;
+ border-left-color: transparent;
+ border-right-color: transparent;
+}
+
+
+.tipsy-s .tipsy-arrow {
+ bottom: 0;
+ left: 50%;
+ margin-left: -7px;
+}
+
+.tipsy-sw .tipsy-arrow {
+ bottom: 0;
+ left: 10px;
+}
+
+.tipsy-se .tipsy-arrow {
+ bottom: 0;
+ right: 10px;
+}
+
+.tipsy-e .tipsy-arrow {
+ right: 0;
+ top: 50%;
+ margin-top: -7px;
+ border-left-style: solid;
+ border-right: none;
+ border-top-color: transparent;
+ border-bottom-color: transparent;
+}
+
+.tipsy-w .tipsy-arrow {
+ left: 0;
+ top: 50%;
+ margin-top: -7px;
+ border-right-style: solid;
+ border-left: none;
+ border-top-color: transparent;
+ border-bottom-color: transparent;
+} \ No newline at end of file
diff --git a/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/web/client/resources/fonts/FontAwesome/FontAwesome.otf b/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/web/client/resources/fonts/FontAwesome/FontAwesome.otf
new file mode 100755
index 00000000000..3461e3fce6a
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/web/client/resources/fonts/FontAwesome/FontAwesome.otf
Binary files differ
diff --git a/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/web/client/resources/fonts/FontAwesome/fontawesome-webfont.eot b/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/web/client/resources/fonts/FontAwesome/fontawesome-webfont.eot
new file mode 100755
index 00000000000..6cfd5660956
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/web/client/resources/fonts/FontAwesome/fontawesome-webfont.eot
Binary files differ
diff --git a/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/web/client/resources/fonts/FontAwesome/fontawesome-webfont.svg b/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/web/client/resources/fonts/FontAwesome/fontawesome-webfont.svg
new file mode 100755
index 00000000000..a9f84695031
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/web/client/resources/fonts/FontAwesome/fontawesome-webfont.svg
@@ -0,0 +1,504 @@
+<?xml version="1.0" standalone="no"?>
+<!DOCTYPE svg PUBLIC "-//W3C//DTD SVG 1.1//EN" "http://www.w3.org/Graphics/SVG/1.1/DTD/svg11.dtd" >
+<svg xmlns="http://www.w3.org/2000/svg">
+<metadata></metadata>
+<defs>
+<font id="fontawesomeregular" horiz-adv-x="1536" >
+<font-face units-per-em="1792" ascent="1536" descent="-256" />
+<missing-glyph horiz-adv-x="448" />
+<glyph unicode=" " horiz-adv-x="448" />
+<glyph unicode="&#x09;" horiz-adv-x="448" />
+<glyph unicode="&#xa0;" horiz-adv-x="448" />
+<glyph unicode="&#xa8;" horiz-adv-x="1792" />
+<glyph unicode="&#xa9;" horiz-adv-x="1792" />
+<glyph unicode="&#xae;" horiz-adv-x="1792" />
+<glyph unicode="&#xb4;" horiz-adv-x="1792" />
+<glyph unicode="&#xc6;" horiz-adv-x="1792" />
+<glyph unicode="&#xd8;" horiz-adv-x="1792" />
+<glyph unicode="&#x2000;" horiz-adv-x="768" />
+<glyph unicode="&#x2001;" horiz-adv-x="1537" />
+<glyph unicode="&#x2002;" horiz-adv-x="768" />
+<glyph unicode="&#x2003;" horiz-adv-x="1537" />
+<glyph unicode="&#x2004;" horiz-adv-x="512" />
+<glyph unicode="&#x2005;" horiz-adv-x="384" />
+<glyph unicode="&#x2006;" horiz-adv-x="256" />
+<glyph unicode="&#x2007;" horiz-adv-x="256" />
+<glyph unicode="&#x2008;" horiz-adv-x="192" />
+<glyph unicode="&#x2009;" horiz-adv-x="307" />
+<glyph unicode="&#x200a;" horiz-adv-x="85" />
+<glyph unicode="&#x202f;" horiz-adv-x="307" />
+<glyph unicode="&#x205f;" horiz-adv-x="384" />
+<glyph unicode="&#x2122;" horiz-adv-x="1792" />
+<glyph unicode="&#x221e;" horiz-adv-x="1792" />
+<glyph unicode="&#x2260;" horiz-adv-x="1792" />
+<glyph unicode="&#x25fc;" horiz-adv-x="500" d="M0 0z" />
+<glyph unicode="&#xf000;" horiz-adv-x="1792" d="M93 1350q0 23 18 36.5t38 17.5t43 4h1408q23 0 43 -4t38 -17.5t18 -36.5q0 -35 -43 -78l-632 -632v-768h320q26 0 45 -19t19 -45t-19 -45t-45 -19h-896q-26 0 -45 19t-19 45t19 45t45 19h320v768l-632 632q-43 43 -43 78z" />
+<glyph unicode="&#xf001;" d="M0 -64q0 50 34 89t86 60.5t103.5 32t96.5 10.5q105 0 192 -39v967q0 31 19 56.5t49 35.5l832 256q12 4 28 4q40 0 68 -28t28 -68v-1120q0 -50 -34 -89t-86 -60.5t-103.5 -32t-96.5 -10.5t-96.5 10.5t-103.5 32t-86 60.5t-34 89t34 89t86 60.5t103.5 32t96.5 10.5 q105 0 192 -39v537l-768 -237v-709q0 -50 -34 -89t-86 -60.5t-103.5 -32t-96.5 -10.5t-96.5 10.5t-103.5 32t-86 60.5t-34 89z" />
+<glyph unicode="&#xf002;" horiz-adv-x="1664" d="M0 704q0 143 55.5 273.5t150 225t225 150t273.5 55.5t273.5 -55.5t225 -150t150 -225t55.5 -273.5q0 -220 -124 -399l343 -343q37 -37 37 -90q0 -52 -38 -90t-90 -38q-54 0 -90 38l-343 342q-179 -124 -399 -124q-143 0 -273.5 55.5t-225 150t-150 225t-55.5 273.5z M256 704q0 -185 131.5 -316.5t316.5 -131.5t316.5 131.5t131.5 316.5t-131.5 316.5t-316.5 131.5t-316.5 -131.5t-131.5 -316.5z" />
+<glyph unicode="&#xf003;" horiz-adv-x="1792" d="M0 32v1088q0 66 47 113t113 47h1472q66 0 113 -47t47 -113v-1088q0 -66 -47 -113t-113 -47h-1472q-66 0 -113 47t-47 113zM128 32q0 -13 9.5 -22.5t22.5 -9.5h1472q13 0 22.5 9.5t9.5 22.5v768q-32 -36 -69 -66q-268 -206 -426 -338q-51 -43 -83 -67t-86.5 -48.5 t-102.5 -24.5h-1h-1q-48 0 -102.5 24.5t-86.5 48.5t-83 67q-158 132 -426 338q-37 30 -69 66v-768zM128 1120q0 -168 147 -284q193 -152 401 -317q6 -5 35 -29.5t46 -37.5t44.5 -31.5t50.5 -27.5t43 -9h1h1q20 0 43 9t50.5 27.5t44.5 31.5t46 37.5t35 29.5q208 165 401 317 q54 43 100.5 115.5t46.5 131.5v11v13.5t-0.5 13t-3 12.5t-5.5 9t-9 7.5t-14 2.5h-1472q-13 0 -22.5 -9.5t-9.5 -22.5z" />
+<glyph unicode="&#xf004;" horiz-adv-x="1792" d="M0 940q0 220 127 344t351 124q62 0 126.5 -21.5t120 -58t95.5 -68.5t76 -68q36 36 76 68t95.5 68.5t120 58t126.5 21.5q224 0 351 -124t127 -344q0 -221 -229 -450l-623 -600q-18 -18 -44 -18t-44 18l-624 602q-10 8 -27.5 26t-55.5 65.5t-68 97.5t-53.5 121t-23.5 138z " />
+<glyph unicode="&#xf005;" horiz-adv-x="1664" d="M0 889q0 37 56 46l502 73l225 455q19 41 49 41t49 -41l225 -455l502 -73q56 -9 56 -46q0 -22 -26 -48l-363 -354l86 -500q1 -7 1 -20q0 -21 -10.5 -35.5t-30.5 -14.5q-19 0 -40 12l-449 236l-449 -236q-22 -12 -40 -12q-21 0 -31.5 14.5t-10.5 35.5q0 6 2 20l86 500 l-364 354q-25 27 -25 48z" />
+<glyph unicode="&#xf006;" horiz-adv-x="1664" d="M0 889q0 37 56 46l502 73l225 455q19 41 49 41t49 -41l225 -455l502 -73q56 -9 56 -46q0 -22 -26 -48l-363 -354l86 -500q1 -7 1 -20q0 -50 -41 -50q-19 0 -40 12l-449 236l-449 -236q-22 -12 -40 -12q-21 0 -31.5 14.5t-10.5 35.5q0 6 2 20l86 500l-364 354 q-25 27 -25 48zM221 829l306 -297l-73 -421l378 199l377 -199l-72 421l306 297l-422 62l-189 382l-189 -382z" />
+<glyph unicode="&#xf007;" horiz-adv-x="1408" d="M0 131q0 53 3.5 103.5t14 109t26.5 108.5t43 97.5t62 81t85.5 53.5t111.5 20q9 0 42 -21.5t74.5 -48t108 -48t133.5 -21.5t133.5 21.5t108 48t74.5 48t42 21.5q61 0 111.5 -20t85.5 -53.5t62 -81t43 -97.5t26.5 -108.5t14 -109t3.5 -103.5q0 -120 -73 -189.5t-194 -69.5 h-874q-121 0 -194 69.5t-73 189.5zM320 1024q0 159 112.5 271.5t271.5 112.5t271.5 -112.5t112.5 -271.5t-112.5 -271.5t-271.5 -112.5t-271.5 112.5t-112.5 271.5z" />
+<glyph unicode="&#xf008;" horiz-adv-x="1920" d="M0 -96v1344q0 66 47 113t113 47h1600q66 0 113 -47t47 -113v-1344q0 -66 -47 -113t-113 -47h-1600q-66 0 -113 47t-47 113zM128 64v-128q0 -26 19 -45t45 -19h128q26 0 45 19t19 45v128q0 26 -19 45t-45 19h-128q-26 0 -45 -19t-19 -45zM128 320q0 -26 19 -45t45 -19h128 q26 0 45 19t19 45v128q0 26 -19 45t-45 19h-128q-26 0 -45 -19t-19 -45v-128zM128 704q0 -26 19 -45t45 -19h128q26 0 45 19t19 45v128q0 26 -19 45t-45 19h-128q-26 0 -45 -19t-19 -45v-128zM128 1088q0 -26 19 -45t45 -19h128q26 0 45 19t19 45v128q0 26 -19 45t-45 19 h-128q-26 0 -45 -19t-19 -45v-128zM512 -64q0 -26 19 -45t45 -19h768q26 0 45 19t19 45v512q0 26 -19 45t-45 19h-768q-26 0 -45 -19t-19 -45v-512zM512 704q0 -26 19 -45t45 -19h768q26 0 45 19t19 45v512q0 26 -19 45t-45 19h-768q-26 0 -45 -19t-19 -45v-512zM1536 64 v-128q0 -26 19 -45t45 -19h128q26 0 45 19t19 45v128q0 26 -19 45t-45 19h-128q-26 0 -45 -19t-19 -45zM1536 320q0 -26 19 -45t45 -19h128q26 0 45 19t19 45v128q0 26 -19 45t-45 19h-128q-26 0 -45 -19t-19 -45v-128zM1536 704q0 -26 19 -45t45 -19h128q26 0 45 19t19 45 v128q0 26 -19 45t-45 19h-128q-26 0 -45 -19t-19 -45v-128zM1536 1088q0 -26 19 -45t45 -19h128q26 0 45 19t19 45v128q0 26 -19 45t-45 19h-128q-26 0 -45 -19t-19 -45v-128z" />
+<glyph unicode="&#xf009;" horiz-adv-x="1664" d="M0 128v384q0 52 38 90t90 38h512q52 0 90 -38t38 -90v-384q0 -52 -38 -90t-90 -38h-512q-52 0 -90 38t-38 90zM0 896v384q0 52 38 90t90 38h512q52 0 90 -38t38 -90v-384q0 -52 -38 -90t-90 -38h-512q-52 0 -90 38t-38 90zM896 128v384q0 52 38 90t90 38h512q52 0 90 -38 t38 -90v-384q0 -52 -38 -90t-90 -38h-512q-52 0 -90 38t-38 90zM896 896v384q0 52 38 90t90 38h512q52 0 90 -38t38 -90v-384q0 -52 -38 -90t-90 -38h-512q-52 0 -90 38t-38 90z" />
+<glyph unicode="&#xf00a;" horiz-adv-x="1792" d="M0 96v192q0 40 28 68t68 28h320q40 0 68 -28t28 -68v-192q0 -40 -28 -68t-68 -28h-320q-40 0 -68 28t-28 68zM0 608v192q0 40 28 68t68 28h320q40 0 68 -28t28 -68v-192q0 -40 -28 -68t-68 -28h-320q-40 0 -68 28t-28 68zM0 1120v192q0 40 28 68t68 28h320q40 0 68 -28 t28 -68v-192q0 -40 -28 -68t-68 -28h-320q-40 0 -68 28t-28 68zM640 96v192q0 40 28 68t68 28h320q40 0 68 -28t28 -68v-192q0 -40 -28 -68t-68 -28h-320q-40 0 -68 28t-28 68zM640 608v192q0 40 28 68t68 28h320q40 0 68 -28t28 -68v-192q0 -40 -28 -68t-68 -28h-320 q-40 0 -68 28t-28 68zM640 1120v192q0 40 28 68t68 28h320q40 0 68 -28t28 -68v-192q0 -40 -28 -68t-68 -28h-320q-40 0 -68 28t-28 68zM1280 96v192q0 40 28 68t68 28h320q40 0 68 -28t28 -68v-192q0 -40 -28 -68t-68 -28h-320q-40 0 -68 28t-28 68zM1280 608v192 q0 40 28 68t68 28h320q40 0 68 -28t28 -68v-192q0 -40 -28 -68t-68 -28h-320q-40 0 -68 28t-28 68zM1280 1120v192q0 40 28 68t68 28h320q40 0 68 -28t28 -68v-192q0 -40 -28 -68t-68 -28h-320q-40 0 -68 28t-28 68z" />
+<glyph unicode="&#xf00b;" horiz-adv-x="1792" d="M0 96v192q0 40 28 68t68 28h320q40 0 68 -28t28 -68v-192q0 -40 -28 -68t-68 -28h-320q-40 0 -68 28t-28 68zM0 608v192q0 40 28 68t68 28h320q40 0 68 -28t28 -68v-192q0 -40 -28 -68t-68 -28h-320q-40 0 -68 28t-28 68zM0 1120v192q0 40 28 68t68 28h320q40 0 68 -28 t28 -68v-192q0 -40 -28 -68t-68 -28h-320q-40 0 -68 28t-28 68zM640 96v192q0 40 28 68t68 28h960q40 0 68 -28t28 -68v-192q0 -40 -28 -68t-68 -28h-960q-40 0 -68 28t-28 68zM640 608v192q0 40 28 68t68 28h960q40 0 68 -28t28 -68v-192q0 -40 -28 -68t-68 -28h-960 q-40 0 -68 28t-28 68zM640 1120v192q0 40 28 68t68 28h960q40 0 68 -28t28 -68v-192q0 -40 -28 -68t-68 -28h-960q-40 0 -68 28t-28 68z" />
+<glyph unicode="&#xf00c;" horiz-adv-x="1792" d="M121 608q0 40 28 68l136 136q28 28 68 28t68 -28l294 -295l656 657q28 28 68 28t68 -28l136 -136q28 -28 28 -68t-28 -68l-724 -724l-136 -136q-28 -28 -68 -28t-68 28l-136 136l-362 362q-28 28 -28 68z" />
+<glyph unicode="&#xf00d;" horiz-adv-x="1408" d="M110 214q0 40 28 68l294 294l-294 294q-28 28 -28 68t28 68l136 136q28 28 68 28t68 -28l294 -294l294 294q28 28 68 28t68 -28l136 -136q28 -28 28 -68t-28 -68l-294 -294l294 -294q28 -28 28 -68t-28 -68l-136 -136q-28 -28 -68 -28t-68 28l-294 294l-294 -294 q-28 -28 -68 -28t-68 28l-136 136q-28 28 -28 68z" />
+<glyph unicode="&#xf00e;" horiz-adv-x="1664" d="M0 704q0 143 55.5 273.5t150 225t225 150t273.5 55.5t273.5 -55.5t225 -150t150 -225t55.5 -273.5q0 -220 -124 -399l343 -343q37 -37 37 -90t-37.5 -90.5t-90.5 -37.5q-54 0 -90 38l-343 342q-179 -124 -399 -124q-143 0 -273.5 55.5t-225 150t-150 225t-55.5 273.5z M256 704q0 -185 131.5 -316.5t316.5 -131.5t316.5 131.5t131.5 316.5t-131.5 316.5t-316.5 131.5t-316.5 -131.5t-131.5 -316.5zM384 672v64q0 13 9.5 22.5t22.5 9.5h224v224q0 13 9.5 22.5t22.5 9.5h64q13 0 22.5 -9.5t9.5 -22.5v-224h224q13 0 22.5 -9.5t9.5 -22.5v-64 q0 -13 -9.5 -22.5t-22.5 -9.5h-224v-224q0 -13 -9.5 -22.5t-22.5 -9.5h-64q-13 0 -22.5 9.5t-9.5 22.5v224h-224q-13 0 -22.5 9.5t-9.5 22.5z" />
+<glyph unicode="&#xf010;" horiz-adv-x="1664" d="M0 704q0 143 55.5 273.5t150 225t225 150t273.5 55.5t273.5 -55.5t225 -150t150 -225t55.5 -273.5q0 -220 -124 -399l343 -343q37 -37 37 -90t-37.5 -90.5t-90.5 -37.5q-54 0 -90 38l-343 342q-179 -124 -399 -124q-143 0 -273.5 55.5t-225 150t-150 225t-55.5 273.5z M256 704q0 -185 131.5 -316.5t316.5 -131.5t316.5 131.5t131.5 316.5t-131.5 316.5t-316.5 131.5t-316.5 -131.5t-131.5 -316.5zM384 672v64q0 13 9.5 22.5t22.5 9.5h576q13 0 22.5 -9.5t9.5 -22.5v-64q0 -13 -9.5 -22.5t-22.5 -9.5h-576q-13 0 -22.5 9.5t-9.5 22.5z" />
+<glyph unicode="&#xf011;" d="M0 640q0 182 80.5 343t226.5 270q43 32 95.5 25t83.5 -50q32 -42 24.5 -94.5t-49.5 -84.5q-98 -74 -151.5 -181t-53.5 -228q0 -104 40.5 -198.5t109.5 -163.5t163.5 -109.5t198.5 -40.5t198.5 40.5t163.5 109.5t109.5 163.5t40.5 198.5q0 121 -53.5 228t-151.5 181 q-42 32 -49.5 84.5t24.5 94.5q31 43 84 50t95 -25q146 -109 226.5 -270t80.5 -343q0 -156 -61 -298t-164 -245t-245 -164t-298 -61t-298 61t-245 164t-164 245t-61 298zM640 768v640q0 52 38 90t90 38t90 -38t38 -90v-640q0 -52 -38 -90t-90 -38t-90 38t-38 90z" />
+<glyph unicode="&#xf012;" horiz-adv-x="1792" d="M0 -96v192q0 14 9 23t23 9h192q14 0 23 -9t9 -23v-192q0 -14 -9 -23t-23 -9h-192q-14 0 -23 9t-9 23zM384 -96v320q0 14 9 23t23 9h192q14 0 23 -9t9 -23v-320q0 -14 -9 -23t-23 -9h-192q-14 0 -23 9t-9 23zM768 -96v576q0 14 9 23t23 9h192q14 0 23 -9t9 -23v-576 q0 -14 -9 -23t-23 -9h-192q-14 0 -23 9t-9 23zM1152 -96v960q0 14 9 23t23 9h192q14 0 23 -9t9 -23v-960q0 -14 -9 -23t-23 -9h-192q-14 0 -23 9t-9 23zM1536 -96v1472q0 14 9 23t23 9h192q14 0 23 -9t9 -23v-1472q0 -14 -9 -23t-23 -9h-192q-14 0 -23 9t-9 23z" />
+<glyph unicode="&#xf013;" d="M0 531v222q0 12 8 23t19 13l186 28q14 46 39 92q-40 57 -107 138q-10 12 -10 24q0 10 9 23q26 36 98.5 107.5t94.5 71.5q13 0 26 -10l138 -107q44 23 91 38q16 136 29 186q7 28 36 28h222q14 0 24.5 -8.5t11.5 -21.5l28 -184q49 -16 90 -37l142 107q9 9 24 9q13 0 25 -10 q129 -119 165 -170q7 -8 7 -22q0 -12 -8 -23q-15 -21 -51 -66.5t-54 -70.5q26 -50 41 -98l183 -28q13 -2 21 -12.5t8 -23.5v-222q0 -12 -8 -23t-20 -13l-185 -28q-19 -54 -39 -91q35 -50 107 -138q10 -12 10 -25t-9 -23q-27 -37 -99 -108t-94 -71q-12 0 -26 9l-138 108 q-44 -23 -91 -38q-16 -136 -29 -186q-7 -28 -36 -28h-222q-14 0 -24.5 8.5t-11.5 21.5l-28 184q-49 16 -90 37l-141 -107q-10 -9 -25 -9q-14 0 -25 11q-126 114 -165 168q-7 10 -7 23q0 12 8 23q15 21 51 66.5t54 70.5q-27 50 -41 99l-183 27q-13 2 -21 12.5t-8 23.5z M512 640q0 -106 75 -181t181 -75t181 75t75 181t-75 181t-181 75t-181 -75t-75 -181z" />
+<glyph unicode="&#xf014;" horiz-adv-x="1408" d="M0 1056v64q0 14 9 23t23 9h309l70 167q15 37 54 63t79 26h320q40 0 79 -26t54 -63l70 -167h309q14 0 23 -9t9 -23v-64q0 -14 -9 -23t-23 -9h-96v-948q0 -83 -47 -143.5t-113 -60.5h-832q-66 0 -113 58.5t-47 141.5v952h-96q-14 0 -23 9t-9 23zM256 76q0 -22 7 -40.5 t14.5 -27t10.5 -8.5h832q3 0 10.5 8.5t14.5 27t7 40.5v948h-896v-948zM384 224v576q0 14 9 23t23 9h64q14 0 23 -9t9 -23v-576q0 -14 -9 -23t-23 -9h-64q-14 0 -23 9t-9 23zM480 1152h448l-48 117q-7 9 -17 11h-317q-10 -2 -17 -11zM640 224v576q0 14 9 23t23 9h64 q14 0 23 -9t9 -23v-576q0 -14 -9 -23t-23 -9h-64q-14 0 -23 9t-9 23zM896 224v576q0 14 9 23t23 9h64q14 0 23 -9t9 -23v-576q0 -14 -9 -23t-23 -9h-64q-14 0 -23 9t-9 23z" />
+<glyph unicode="&#xf015;" horiz-adv-x="1664" d="M26 636.5q1 13.5 11 21.5l719 599q32 26 76 26t76 -26l244 -204v195q0 14 9 23t23 9h192q14 0 23 -9t9 -23v-408l219 -182q10 -8 11 -21.5t-7 -23.5l-62 -74q-8 -9 -21 -11h-3q-13 0 -21 7l-692 577l-692 -577q-12 -8 -24 -7q-13 2 -21 11l-62 74q-8 10 -7 23.5zM256 64 v480q0 1 0.5 3t0.5 3l575 474l575 -474q1 -2 1 -6v-480q0 -26 -19 -45t-45 -19h-384v384h-256v-384h-384q-26 0 -45 19t-19 45z" />
+<glyph unicode="&#xf016;" d="M0 -160v1600q0 40 28 68t68 28h896q40 0 88 -20t76 -48l312 -312q28 -28 48 -76t20 -88v-1152q0 -40 -28 -68t-68 -28h-1344q-40 0 -68 28t-28 68zM128 -128h1280v1024h-416q-40 0 -68 28t-28 68v416h-768v-1536zM1024 1024h376q-10 29 -22 41l-313 313q-12 12 -41 22 v-376z" />
+<glyph unicode="&#xf017;" d="M0 640q0 209 103 385.5t279.5 279.5t385.5 103t385.5 -103t279.5 -279.5t103 -385.5t-103 -385.5t-279.5 -279.5t-385.5 -103t-385.5 103t-279.5 279.5t-103 385.5zM224 640q0 -148 73 -273t198 -198t273 -73t273 73t198 198t73 273t-73 273t-198 198t-273 73t-273 -73 t-198 -198t-73 -273zM512 544v64q0 14 9 23t23 9h224v352q0 14 9 23t23 9h64q14 0 23 -9t9 -23v-448q0 -14 -9 -23t-23 -9h-320q-14 0 -23 9t-9 23z" />
+<glyph unicode="&#xf018;" horiz-adv-x="1920" d="M50 73q0 54 26 116l417 1044q8 19 26 33t38 14h339q-13 0 -23 -9.5t-11 -22.5l-15 -192q-1 -14 8 -23t22 -9h166q13 0 22 9t8 23l-15 192q-1 13 -11 22.5t-23 9.5h339q20 0 38 -14t26 -33l417 -1044q26 -62 26 -116q0 -73 -46 -73h-704q13 0 22 9.5t8 22.5l-20 256 q-1 13 -11 22.5t-23 9.5h-272q-13 0 -23 -9.5t-11 -22.5l-20 -256q-1 -13 8 -22.5t22 -9.5h-704q-46 0 -46 73zM809 540q-1 -12 8 -20t21 -8h244q12 0 21 8t8 20v4l-24 320q-1 13 -11 22.5t-23 9.5h-186q-13 0 -23 -9.5t-11 -22.5l-24 -320v-4z" />
+<glyph unicode="&#xf019;" horiz-adv-x="1664" d="M0 96v320q0 40 28 68t68 28h465l135 -136q58 -56 136 -56t136 56l136 136h464q40 0 68 -28t28 -68v-320q0 -40 -28 -68t-68 -28h-1472q-40 0 -68 28t-28 68zM325 985q17 39 59 39h256v448q0 26 19 45t45 19h256q26 0 45 -19t19 -45v-448h256q42 0 59 -39q17 -41 -14 -70 l-448 -448q-18 -19 -45 -19t-45 19l-448 448q-31 29 -14 70zM1152 192q0 -26 19 -45t45 -19t45 19t19 45t-19 45t-45 19t-45 -19t-19 -45zM1408 192q0 -26 19 -45t45 -19t45 19t19 45t-19 45t-45 19t-45 -19t-19 -45z" />
+<glyph unicode="&#xf01a;" d="M0 640q0 209 103 385.5t279.5 279.5t385.5 103t385.5 -103t279.5 -279.5t103 -385.5t-103 -385.5t-279.5 -279.5t-385.5 -103t-385.5 103t-279.5 279.5t-103 385.5zM224 640q0 -148 73 -273t198 -198t273 -73t273 73t198 198t73 273t-73 273t-198 198t-273 73t-273 -73 t-198 -198t-73 -273zM418 620q8 20 30 20h192v352q0 14 9 23t23 9h192q14 0 23 -9t9 -23v-352h192q14 0 23 -9t9 -23q0 -12 -10 -24l-319 -319q-11 -9 -23 -9t-23 9l-320 320q-15 16 -7 35z" />
+<glyph unicode="&#xf01b;" d="M0 640q0 209 103 385.5t279.5 279.5t385.5 103t385.5 -103t279.5 -279.5t103 -385.5t-103 -385.5t-279.5 -279.5t-385.5 -103t-385.5 103t-279.5 279.5t-103 385.5zM224 640q0 -148 73 -273t198 -198t273 -73t273 73t198 198t73 273t-73 273t-198 198t-273 73t-273 -73 t-198 -198t-73 -273zM416 672q0 12 10 24l319 319q11 9 23 9t23 -9l320 -320q15 -16 7 -35q-8 -20 -30 -20h-192v-352q0 -14 -9 -23t-23 -9h-192q-14 0 -23 9t-9 23v352h-192q-14 0 -23 9t-9 23z" />
+<glyph unicode="&#xf01c;" d="M0 64v482q0 62 25 123l238 552q10 25 36.5 42t52.5 17h832q26 0 52.5 -17t36.5 -42l238 -552q25 -61 25 -123v-482q0 -26 -19 -45t-45 -19h-1408q-26 0 -45 19t-19 45zM197 576h316l95 -192h320l95 192h316q-1 3 -2.5 8t-2.5 8l-212 496h-708l-212 -496q-1 -2 -2.5 -8 t-2.5 -8z" />
+<glyph unicode="&#xf01d;" d="M0 640q0 209 103 385.5t279.5 279.5t385.5 103t385.5 -103t279.5 -279.5t103 -385.5t-103 -385.5t-279.5 -279.5t-385.5 -103t-385.5 103t-279.5 279.5t-103 385.5zM224 640q0 -148 73 -273t198 -198t273 -73t273 73t198 198t73 273t-73 273t-198 198t-273 73t-273 -73 t-198 -198t-73 -273zM512 320v640q0 37 32 56q33 18 64 -1l544 -320q32 -18 32 -55t-32 -55l-544 -320q-15 -9 -32 -9q-16 0 -32 8q-32 19 -32 56z" />
+<glyph unicode="&#xf01e;" d="M0 640q0 156 61 298t164 245t245 164t298 61q147 0 284.5 -55.5t244.5 -156.5l130 129q29 31 70 14q39 -17 39 -59v-448q0 -26 -19 -45t-45 -19h-448q-42 0 -59 40q-17 39 14 69l138 138q-148 137 -349 137q-104 0 -198.5 -40.5t-163.5 -109.5t-109.5 -163.5 t-40.5 -198.5t40.5 -198.5t109.5 -163.5t163.5 -109.5t198.5 -40.5q119 0 225 52t179 147q7 10 23 12q14 0 25 -9l137 -138q9 -8 9.5 -20.5t-7.5 -22.5q-109 -132 -264 -204.5t-327 -72.5q-156 0 -298 61t-245 164t-164 245t-61 298z" />
+<glyph unicode="&#xf021;" d="M0 0v448q0 26 19 45t45 19h448q26 0 45 -19t19 -45t-19 -45l-137 -137q71 -66 161 -102t187 -36q134 0 250 65t186 179q11 17 53 117q8 23 30 23h192q13 0 22.5 -9.5t9.5 -22.5q0 -5 -1 -7q-64 -268 -268 -434.5t-478 -166.5q-146 0 -282.5 55t-243.5 157l-129 -129 q-19 -19 -45 -19t-45 19t-19 45zM18 800v7q65 268 270 434.5t480 166.5q146 0 284 -55.5t245 -156.5l130 129q19 19 45 19t45 -19t19 -45v-448q0 -26 -19 -45t-45 -19h-448q-26 0 -45 19t-19 45t19 45l138 138q-148 137 -349 137q-134 0 -250 -65t-186 -179 q-11 -17 -53 -117q-8 -23 -30 -23h-199q-13 0 -22.5 9.5t-9.5 22.5z" />
+<glyph unicode="&#xf022;" horiz-adv-x="1792" d="M0 160v1088q0 66 47 113t113 47h1472q66 0 113 -47t47 -113v-1088q0 -66 -47 -113t-113 -47h-1472q-66 0 -113 47t-47 113zM128 160q0 -13 9.5 -22.5t22.5 -9.5h1472q13 0 22.5 9.5t9.5 22.5v832q0 13 -9.5 22.5t-22.5 9.5h-1472q-13 0 -22.5 -9.5t-9.5 -22.5v-832z M256 288v64q0 13 9.5 22.5t22.5 9.5h64q13 0 22.5 -9.5t9.5 -22.5v-64q0 -13 -9.5 -22.5t-22.5 -9.5h-64q-13 0 -22.5 9.5t-9.5 22.5zM256 544v64q0 13 9.5 22.5t22.5 9.5h64q13 0 22.5 -9.5t9.5 -22.5v-64q0 -13 -9.5 -22.5t-22.5 -9.5h-64q-13 0 -22.5 9.5t-9.5 22.5z M256 800v64q0 13 9.5 22.5t22.5 9.5h64q13 0 22.5 -9.5t9.5 -22.5v-64q0 -13 -9.5 -22.5t-22.5 -9.5h-64q-13 0 -22.5 9.5t-9.5 22.5zM512 288v64q0 13 9.5 22.5t22.5 9.5h960q13 0 22.5 -9.5t9.5 -22.5v-64q0 -13 -9.5 -22.5t-22.5 -9.5h-960q-13 0 -22.5 9.5t-9.5 22.5z M512 544v64q0 13 9.5 22.5t22.5 9.5h960q13 0 22.5 -9.5t9.5 -22.5v-64q0 -13 -9.5 -22.5t-22.5 -9.5h-960q-13 0 -22.5 9.5t-9.5 22.5zM512 800v64q0 13 9.5 22.5t22.5 9.5h960q13 0 22.5 -9.5t9.5 -22.5v-64q0 -13 -9.5 -22.5t-22.5 -9.5h-960q-13 0 -22.5 9.5t-9.5 22.5z " />
+<glyph unicode="&#xf023;" horiz-adv-x="1152" d="M0 96v576q0 40 28 68t68 28h32v192q0 184 132 316t316 132t316 -132t132 -316v-192h32q40 0 68 -28t28 -68v-576q0 -40 -28 -68t-68 -28h-960q-40 0 -68 28t-28 68zM320 768h512v192q0 106 -75 181t-181 75t-181 -75t-75 -181v-192z" />
+<glyph unicode="&#xf024;" horiz-adv-x="1792" d="M64 1280q0 53 37.5 90.5t90.5 37.5t90.5 -37.5t37.5 -90.5q0 -72 -64 -110v-1266q0 -13 -9.5 -22.5t-22.5 -9.5h-64q-13 0 -22.5 9.5t-9.5 22.5v1266q-64 38 -64 110zM320 320v742q0 32 31 55q21 14 79 43q236 120 421 120q107 0 200 -29t219 -88q38 -19 88 -19 q54 0 117.5 21t110 47t88 47t54.5 21q26 0 45 -19t19 -45v-763q0 -25 -12.5 -38.5t-39.5 -27.5q-215 -116 -369 -116q-61 0 -123.5 22t-108.5 48t-115.5 48t-142.5 22q-192 0 -464 -146q-17 -9 -33 -9q-26 0 -45 19t-19 45z" />
+<glyph unicode="&#xf025;" horiz-adv-x="1664" d="M0 650q0 151 67 291t179 242.5t266 163.5t320 61t320 -61t266 -163.5t179 -242.5t67 -291q0 -166 -60 -314l-20 -49l-185 -33q-22 -83 -90.5 -136.5t-156.5 -53.5v-32q0 -14 -9 -23t-23 -9h-64q-14 0 -23 9t-9 23v576q0 14 9 23t23 9h64q14 0 23 -9t9 -23v-32 q71 0 130 -35.5t93 -95.5l68 12q29 95 29 193q0 148 -88 279t-236.5 209t-315.5 78t-315.5 -78t-236.5 -209t-88 -279q0 -98 29 -193l68 -12q34 60 93 95.5t130 35.5v32q0 14 9 23t23 9h64q14 0 23 -9t9 -23v-576q0 -14 -9 -23t-23 -9h-64q-14 0 -23 9t-9 23v32 q-88 0 -156.5 53.5t-90.5 136.5l-185 33l-20 49q-60 148 -60 314z" />
+<glyph unicode="&#xf026;" horiz-adv-x="768" d="M0 448v384q0 26 19 45t45 19h262l333 333q19 19 45 19t45 -19t19 -45v-1088q0 -26 -19 -45t-45 -19t-45 19l-333 333h-262q-26 0 -45 19t-19 45z" />
+<glyph unicode="&#xf027;" horiz-adv-x="1152" d="M0 448v384q0 26 19 45t45 19h262l333 333q19 19 45 19t45 -19t19 -45v-1088q0 -26 -19 -45t-45 -19t-45 19l-333 333h-262q-26 0 -45 19t-19 45zM908 464q0 21 12 35.5t29 25t34 23t29 35.5t12 57t-12 57t-29 35.5t-34 23t-29 25t-12 35.5q0 27 19 45.5t45 18.5 q15 0 25 -5q70 -27 112.5 -93t42.5 -142t-42.5 -141.5t-112.5 -93.5q-10 -5 -25 -5q-26 0 -45 18.5t-19 45.5z" />
+<glyph unicode="&#xf028;" horiz-adv-x="1664" d="M0 448v384q0 26 19 45t45 19h262l333 333q19 19 45 19t45 -19t19 -45v-1088q0 -26 -19 -45t-45 -19t-45 19l-333 333h-262q-26 0 -45 19t-19 45zM908 464q0 21 12 35.5t29 25t34 23t29 35.5t12 57t-12 57t-29 35.5t-34 23t-29 25t-12 35.5q0 27 19 45.5t45 18.5 q15 0 25 -5q70 -27 112.5 -93t42.5 -142t-42.5 -141.5t-112.5 -93.5q-10 -5 -25 -5q-26 0 -45 18.5t-19 45.5zM1008 228q0 39 39 59q56 29 76 44q74 54 115.5 135.5t41.5 173.5t-41.5 173.5t-115.5 135.5q-20 15 -76 44q-39 20 -39 59q0 26 19 45t45 19q13 0 26 -5 q140 -59 225 -188.5t85 -282.5t-85 -282.5t-225 -188.5q-13 -5 -25 -5q-27 0 -46 19t-19 45zM1109 -7q0 36 39 59q7 4 22.5 10.5t22.5 10.5q46 25 82 51q123 91 192 227t69 289t-69 289t-192 227q-36 26 -82 51q-7 4 -22.5 10.5t-22.5 10.5q-39 23 -39 59q0 26 19 45t45 19 q13 0 26 -5q211 -91 338 -283.5t127 -422.5t-127 -422.5t-338 -283.5q-13 -5 -26 -5q-26 0 -45 19t-19 45z" />
+<glyph unicode="&#xf029;" horiz-adv-x="1408" d="M0 0v640h640v-640h-640zM0 768v640h640v-640h-640zM128 129h384v383h-384v-383zM128 896h384v384h-384v-384zM256 256v128h128v-128h-128zM256 1024v128h128v-128h-128zM768 0v640h384v-128h128v128h128v-384h-384v128h-128v-384h-128zM768 768v640h640v-640h-640z M896 896h384v384h-384v-384zM1024 0v128h128v-128h-128zM1024 1024v128h128v-128h-128zM1280 0v128h128v-128h-128z" />
+<glyph unicode="&#xf02a;" horiz-adv-x="1792" d="M0 0v1408h63v-1408h-63zM94 1v1407h32v-1407h-32zM189 1v1407h31v-1407h-31zM346 1v1407h31v-1407h-31zM472 1v1407h62v-1407h-62zM629 1v1407h31v-1407h-31zM692 1v1407h31v-1407h-31zM755 1v1407h31v-1407h-31zM880 1v1407h63v-1407h-63zM1037 1v1407h63v-1407h-63z M1163 1v1407h63v-1407h-63zM1289 1v1407h63v-1407h-63zM1383 1v1407h63v-1407h-63zM1541 1v1407h94v-1407h-94zM1666 1v1407h32v-1407h-32zM1729 0v1408h63v-1408h-63z" />
+<glyph unicode="&#xf02b;" d="M0 864v416q0 52 38 90t90 38h416q53 0 117 -26.5t102 -64.5l715 -714q37 -39 37 -91q0 -53 -37 -90l-491 -492q-39 -37 -91 -37q-53 0 -90 37l-715 716q-38 37 -64.5 101t-26.5 117zM192 1088q0 -53 37.5 -90.5t90.5 -37.5t90.5 37.5t37.5 90.5t-37.5 90.5t-90.5 37.5 t-90.5 -37.5t-37.5 -90.5z" />
+<glyph unicode="&#xf02c;" horiz-adv-x="1920" d="M0 864v416q0 52 38 90t90 38h416q53 0 117 -26.5t102 -64.5l715 -714q37 -39 37 -91q0 -53 -37 -90l-491 -492q-39 -37 -91 -37q-53 0 -90 37l-715 716q-38 37 -64.5 101t-26.5 117zM192 1088q0 -53 37.5 -90.5t90.5 -37.5t90.5 37.5t37.5 90.5t-37.5 90.5t-90.5 37.5 t-90.5 -37.5t-37.5 -90.5zM704 1408h224q53 0 117 -26.5t102 -64.5l715 -714q37 -39 37 -91q0 -53 -37 -90l-491 -492q-39 -37 -91 -37q-36 0 -59 14t-53 45l470 470q37 37 37 90q0 52 -37 91l-715 714q-38 38 -102 64.5t-117 26.5z" />
+<glyph unicode="&#xf02d;" horiz-adv-x="1664" d="M10 184q0 4 3 27t4 37q1 8 -3 21.5t-3 19.5q2 11 8 21t16.5 23.5t16.5 23.5q23 38 45 91.5t30 91.5q3 10 0.5 30t-0.5 28q3 11 17 28t17 23q21 36 42 92t25 90q1 9 -2.5 32t0.5 28q4 13 22 30.5t22 22.5q19 26 42.5 84.5t27.5 96.5q1 8 -3 25.5t-2 26.5q2 8 9 18t18 23 t17 21q8 12 16.5 30.5t15 35t16 36t19.5 32t26.5 23.5t36 11.5t47.5 -5.5l-1 -3q38 9 51 9h761q74 0 114 -56t18 -130l-274 -906q-36 -119 -71.5 -153.5t-128.5 -34.5h-869q-27 0 -38 -15q-11 -16 -1 -43q24 -70 144 -70h923q29 0 56 15.5t35 41.5l300 987q7 22 5 57 q38 -15 59 -43q40 -57 18 -129l-275 -906q-19 -64 -76.5 -107.5t-122.5 -43.5h-923q-77 0 -148.5 53.5t-99.5 131.5q-24 67 -2 127zM492 800q-4 -13 2 -22.5t20 -9.5h608q13 0 25.5 9.5t16.5 22.5l21 64q4 13 -2 22.5t-20 9.5h-608q-13 0 -25.5 -9.5t-16.5 -22.5zM575 1056 q-4 -13 2 -22.5t20 -9.5h608q13 0 25.5 9.5t16.5 22.5l21 64q4 13 -2 22.5t-20 9.5h-608q-13 0 -25.5 -9.5t-16.5 -22.5z" />
+<glyph unicode="&#xf02e;" horiz-adv-x="1280" d="M0 7v1289q0 34 19.5 62t52.5 41q21 9 44 9h1048q23 0 44 -9q33 -13 52.5 -41t19.5 -62v-1289q0 -34 -19.5 -62t-52.5 -41q-19 -8 -44 -8q-48 0 -83 32l-441 424l-441 -424q-36 -33 -83 -33q-23 0 -44 9q-33 13 -52.5 41t-19.5 62z" />
+<glyph unicode="&#xf02f;" horiz-adv-x="1664" d="M0 160v416q0 79 56.5 135.5t135.5 56.5h64v544q0 40 28 68t68 28h672q40 0 88 -20t76 -48l152 -152q28 -28 48 -76t20 -88v-256h64q79 0 135.5 -56.5t56.5 -135.5v-416q0 -13 -9.5 -22.5t-22.5 -9.5h-224v-160q0 -40 -28 -68t-68 -28h-960q-40 0 -68 28t-28 68v160h-224 q-13 0 -22.5 9.5t-9.5 22.5zM384 0h896v256h-896v-256zM384 640h896v384h-160q-40 0 -68 28t-28 68v160h-640v-640zM1408 576q0 -26 19 -45t45 -19t45 19t19 45t-19 45t-45 19t-45 -19t-19 -45z" />
+<glyph unicode="&#xf030;" horiz-adv-x="1920" d="M0 128v896q0 106 75 181t181 75h224l51 136q19 49 69.5 84.5t103.5 35.5h512q53 0 103.5 -35.5t69.5 -84.5l51 -136h224q106 0 181 -75t75 -181v-896q0 -106 -75 -181t-181 -75h-1408q-106 0 -181 75t-75 181zM512 576q0 -185 131.5 -316.5t316.5 -131.5t316.5 131.5 t131.5 316.5t-131.5 316.5t-316.5 131.5t-316.5 -131.5t-131.5 -316.5zM672 576q0 119 84.5 203.5t203.5 84.5t203.5 -84.5t84.5 -203.5t-84.5 -203.5t-203.5 -84.5t-203.5 84.5t-84.5 203.5z" />
+<glyph unicode="&#xf031;" horiz-adv-x="1664" d="M0 -128l2 79q23 7 56 12.5t57 10.5t49.5 14.5t44.5 29t31 50.5l237 616l280 724h75h53q8 -14 11 -21l205 -480q33 -78 106 -257.5t114 -274.5q15 -34 58 -144.5t72 -168.5q20 -45 35 -57q19 -15 88 -29.5t84 -20.5q6 -38 6 -57q0 -4 -0.5 -13t-0.5 -13q-63 0 -190 8 t-191 8q-76 0 -215 -7t-178 -8q0 43 4 78l131 28q1 0 12.5 2.5t15.5 3.5t14.5 4.5t15 6.5t11 8t9 11t2.5 14q0 16 -31 96.5t-72 177.5t-42 100l-450 2q-26 -58 -76.5 -195.5t-50.5 -162.5q0 -22 14 -37.5t43.5 -24.5t48.5 -13.5t57 -8.5t41 -4q1 -19 1 -58q0 -9 -2 -27 q-58 0 -174.5 10t-174.5 10q-8 0 -26.5 -4t-21.5 -4q-80 -14 -188 -14zM555 527q33 0 136.5 -2t160.5 -2q19 0 57 2q-87 253 -184 452z" />
+<glyph unicode="&#xf032;" horiz-adv-x="1408" d="M0 -128l2 94q15 4 85 16t106 27q7 12 12.5 27t8.5 33.5t5.5 32.5t3 37.5t0.5 34v35.5v30q0 982 -22 1025q-4 8 -22 14.5t-44.5 11t-49.5 7t-48.5 4.5t-30.5 3l-4 83q98 2 340 11.5t373 9.5q23 0 68.5 -0.5t67.5 -0.5q70 0 136.5 -13t128.5 -42t108 -71t74 -104.5 t28 -137.5q0 -52 -16.5 -95.5t-39 -72t-64.5 -57.5t-73 -45t-84 -40q154 -35 256.5 -134t102.5 -248q0 -100 -35 -179.5t-93.5 -130.5t-138 -85.5t-163.5 -48.5t-176 -14q-44 0 -132 3t-132 3q-106 0 -307 -11t-231 -12zM533 1292q0 -50 4 -151t4 -152q0 -27 -0.5 -80 t-0.5 -79q0 -46 1 -69q42 -7 109 -7q82 0 143 13t110 44.5t74.5 89.5t25.5 142q0 70 -29 122.5t-79 82t-108 43.5t-124 14q-50 0 -130 -13zM538.5 165q0.5 -37 4.5 -83.5t12 -66.5q74 -32 140 -32q376 0 376 335q0 114 -41 180q-27 44 -61.5 74t-67.5 46.5t-80.5 25 t-84 10.5t-94.5 2q-73 0 -101 -10q0 -53 -0.5 -159t-0.5 -158q0 -8 -1 -67.5t-0.5 -96.5z" />
+<glyph unicode="&#xf033;" horiz-adv-x="1024" d="M0 -126l17 85q6 2 81.5 21.5t111.5 37.5q28 35 41 101q1 7 62 289t114 543.5t52 296.5v25q-24 13 -54.5 18.5t-69.5 8t-58 5.5l19 103q33 -2 120 -6.5t149.5 -7t120.5 -2.5q48 0 98.5 2.5t121 7t98.5 6.5q-5 -39 -19 -89q-30 -10 -101.5 -28.5t-108.5 -33.5 q-8 -19 -14 -42.5t-9 -40t-7.5 -45.5t-6.5 -42q-27 -148 -87.5 -419.5t-77.5 -355.5q-2 -9 -13 -58t-20 -90t-16 -83.5t-6 -57.5l1 -18q17 -4 185 -31q-3 -44 -16 -99q-11 0 -32.5 -1.5t-32.5 -1.5q-29 0 -87 10t-86 10q-138 2 -206 2q-51 0 -143 -9t-121 -11z" />
+<glyph unicode="&#xf034;" horiz-adv-x="1792" d="M0 1023v383l81 1l54 -27q12 -5 211 -5q44 0 132 2t132 2q36 0 107.5 -0.5t107.5 -0.5h293q6 0 21 -0.5t20.5 0t16 3t17.5 9t15 17.5l42 1q4 0 14 -0.5t14 -0.5q2 -112 2 -336q0 -80 -5 -109q-39 -14 -68 -18q-25 44 -54 128q-3 9 -11 48t-14.5 73.5t-7.5 35.5 q-6 8 -12 12.5t-15.5 6t-13 2.5t-18 0.5t-16.5 -0.5q-17 0 -66.5 0.5t-74.5 0.5t-64 -2t-71 -6q-9 -81 -8 -136q0 -94 2 -388t2 -455q0 -16 -2.5 -71.5t0 -91.5t12.5 -69q40 -21 124 -42.5t120 -37.5q5 -40 5 -50q0 -14 -3 -29l-34 -1q-76 -2 -218 8t-207 10q-50 0 -151 -9 t-152 -9q-3 51 -3 52v9q17 27 61.5 43t98.5 29t78 27q19 42 19 383q0 101 -3 303t-3 303v117q0 2 0.5 15.5t0.5 25t-1 25.5t-3 24t-5 14q-11 12 -162 12q-33 0 -93 -12t-80 -26q-19 -13 -34 -72.5t-31.5 -111t-42.5 -53.5q-42 26 -56 44zM1414 109.5q9 18.5 42 18.5h80v1024 h-80q-33 0 -42 18.5t11 44.5l126 162q20 26 49 26t49 -26l126 -162q20 -26 11 -44.5t-42 -18.5h-80v-1024h80q33 0 42 -18.5t-11 -44.5l-126 -162q-20 -26 -49 -26t-49 26l-126 162q-20 26 -11 44.5z" />
+<glyph unicode="&#xf035;" d="M0 1023v383l81 1l54 -27q12 -5 211 -5q44 0 132 2t132 2q70 0 246.5 1t304.5 0.5t247 -4.5q33 -1 56 31l42 1q4 0 14 -0.5t14 -0.5q2 -112 2 -336q0 -80 -5 -109q-39 -14 -68 -18q-25 44 -54 128q-3 9 -11 47.5t-15 73.5t-7 36q-10 13 -27 19q-5 2 -66 2q-30 0 -93 1 t-103 1t-94 -2t-96 -7q-9 -81 -8 -136l1 -152v52q0 -55 1 -154t1.5 -180t0.5 -153q0 -16 -2.5 -71.5t0 -91.5t12.5 -69q40 -21 124 -42.5t120 -37.5q5 -40 5 -50q0 -14 -3 -29l-34 -1q-76 -2 -218 8t-207 10q-50 0 -151 -9t-152 -9q-3 51 -3 52v9q17 27 61.5 43t98.5 29 t78 27q7 16 11.5 74t6 145.5t1.5 155t-0.5 153.5t-0.5 89q0 7 -2.5 21.5t-2.5 22.5q0 7 0.5 44t1 73t0 76.5t-3 67.5t-6.5 32q-11 12 -162 12q-41 0 -163 -13.5t-138 -24.5q-19 -12 -34 -71.5t-31.5 -111.5t-42.5 -54q-42 26 -56 44zM5 -64q0 28 26 49q4 3 36 30t59.5 49 t57.5 41.5t42 19.5q13 0 20.5 -10.5t10 -28.5t2.5 -33.5t-1.5 -33t-1.5 -19.5h1024q0 2 -1.5 19.5t-1.5 33t2.5 33.5t10 28.5t20.5 10.5q12 0 42 -19.5t57.5 -41.5t59.5 -49t36 -30q26 -21 26 -49t-26 -49q-4 -3 -36 -30t-59.5 -49t-57.5 -41.5t-42 -19.5q-13 0 -20.5 10.5 t-10 28.5t-2.5 33.5t1.5 33t1.5 19.5h-1024q0 -2 1.5 -19.5t1.5 -33t-2.5 -33.5t-10 -28.5t-20.5 -10.5q-12 0 -42 19.5t-57.5 41.5t-59.5 49t-36 30q-26 21 -26 49z" />
+<glyph unicode="&#xf036;" horiz-adv-x="1792" d="M0 64v128q0 26 19 45t45 19h1664q26 0 45 -19t19 -45v-128q0 -26 -19 -45t-45 -19h-1664q-26 0 -45 19t-19 45zM0 448v128q0 26 19 45t45 19h1280q26 0 45 -19t19 -45v-128q0 -26 -19 -45t-45 -19h-1280q-26 0 -45 19t-19 45zM0 832v128q0 26 19 45t45 19h1536 q26 0 45 -19t19 -45v-128q0 -26 -19 -45t-45 -19h-1536q-26 0 -45 19t-19 45zM0 1216v128q0 26 19 45t45 19h1152q26 0 45 -19t19 -45v-128q0 -26 -19 -45t-45 -19h-1152q-26 0 -45 19t-19 45z" />
+<glyph unicode="&#xf037;" horiz-adv-x="1792" d="M0 64v128q0 26 19 45t45 19h1664q26 0 45 -19t19 -45v-128q0 -26 -19 -45t-45 -19h-1664q-26 0 -45 19t-19 45zM128 832v128q0 26 19 45t45 19h1408q26 0 45 -19t19 -45v-128q0 -26 -19 -45t-45 -19h-1408q-26 0 -45 19t-19 45zM384 448v128q0 26 19 45t45 19h896 q26 0 45 -19t19 -45v-128q0 -26 -19 -45t-45 -19h-896q-26 0 -45 19t-19 45zM512 1216v128q0 26 19 45t45 19h640q26 0 45 -19t19 -45v-128q0 -26 -19 -45t-45 -19h-640q-26 0 -45 19t-19 45z" />
+<glyph unicode="&#xf038;" horiz-adv-x="1792" d="M0 64v128q0 26 19 45t45 19h1664q26 0 45 -19t19 -45v-128q0 -26 -19 -45t-45 -19h-1664q-26 0 -45 19t-19 45zM128 832v128q0 26 19 45t45 19h1536q26 0 45 -19t19 -45v-128q0 -26 -19 -45t-45 -19h-1536q-26 0 -45 19t-19 45zM384 448v128q0 26 19 45t45 19h1280 q26 0 45 -19t19 -45v-128q0 -26 -19 -45t-45 -19h-1280q-26 0 -45 19t-19 45zM512 1216v128q0 26 19 45t45 19h1152q26 0 45 -19t19 -45v-128q0 -26 -19 -45t-45 -19h-1152q-26 0 -45 19t-19 45z" />
+<glyph unicode="&#xf039;" horiz-adv-x="1792" d="M0 64v128q0 26 19 45t45 19h1664q26 0 45 -19t19 -45v-128q0 -26 -19 -45t-45 -19h-1664q-26 0 -45 19t-19 45zM0 448v128q0 26 19 45t45 19h1664q26 0 45 -19t19 -45v-128q0 -26 -19 -45t-45 -19h-1664q-26 0 -45 19t-19 45zM0 832v128q0 26 19 45t45 19h1664 q26 0 45 -19t19 -45v-128q0 -26 -19 -45t-45 -19h-1664q-26 0 -45 19t-19 45zM0 1216v128q0 26 19 45t45 19h1664q26 0 45 -19t19 -45v-128q0 -26 -19 -45t-45 -19h-1664q-26 0 -45 19t-19 45z" />
+<glyph unicode="&#xf03a;" horiz-adv-x="1792" d="M0 32v192q0 13 9.5 22.5t22.5 9.5h192q13 0 22.5 -9.5t9.5 -22.5v-192q0 -13 -9.5 -22.5t-22.5 -9.5h-192q-13 0 -22.5 9.5t-9.5 22.5zM0 416v192q0 13 9.5 22.5t22.5 9.5h192q13 0 22.5 -9.5t9.5 -22.5v-192q0 -13 -9.5 -22.5t-22.5 -9.5h-192q-13 0 -22.5 9.5 t-9.5 22.5zM0 800v192q0 13 9.5 22.5t22.5 9.5h192q13 0 22.5 -9.5t9.5 -22.5v-192q0 -13 -9.5 -22.5t-22.5 -9.5h-192q-13 0 -22.5 9.5t-9.5 22.5zM0 1184v192q0 13 9.5 22.5t22.5 9.5h192q13 0 22.5 -9.5t9.5 -22.5v-192q0 -13 -9.5 -22.5t-22.5 -9.5h-192 q-13 0 -22.5 9.5t-9.5 22.5zM384 32v192q0 13 9.5 22.5t22.5 9.5h1344q13 0 22.5 -9.5t9.5 -22.5v-192q0 -13 -9.5 -22.5t-22.5 -9.5h-1344q-13 0 -22.5 9.5t-9.5 22.5zM384 416v192q0 13 9.5 22.5t22.5 9.5h1344q13 0 22.5 -9.5t9.5 -22.5v-192q0 -13 -9.5 -22.5 t-22.5 -9.5h-1344q-13 0 -22.5 9.5t-9.5 22.5zM384 800v192q0 13 9.5 22.5t22.5 9.5h1344q13 0 22.5 -9.5t9.5 -22.5v-192q0 -13 -9.5 -22.5t-22.5 -9.5h-1344q-13 0 -22.5 9.5t-9.5 22.5zM384 1184v192q0 13 9.5 22.5t22.5 9.5h1344q13 0 22.5 -9.5t9.5 -22.5v-192 q0 -13 -9.5 -22.5t-22.5 -9.5h-1344q-13 0 -22.5 9.5t-9.5 22.5z" />
+<glyph unicode="&#xf03b;" horiz-adv-x="1792" d="M0 32v192q0 13 9.5 22.5t22.5 9.5h1728q13 0 22.5 -9.5t9.5 -22.5v-192q0 -13 -9.5 -22.5t-22.5 -9.5h-1728q-13 0 -22.5 9.5t-9.5 22.5zM0 1184v192q0 13 9.5 22.5t22.5 9.5h1728q13 0 22.5 -9.5t9.5 -22.5v-192q0 -13 -9.5 -22.5t-22.5 -9.5h-1728q-13 0 -22.5 9.5 t-9.5 22.5zM32 704q0 14 9 23l288 288q9 9 23 9q13 0 22.5 -9.5t9.5 -22.5v-576q0 -13 -9.5 -22.5t-22.5 -9.5q-14 0 -23 9l-288 288q-9 9 -9 23zM640 416v192q0 13 9.5 22.5t22.5 9.5h1088q13 0 22.5 -9.5t9.5 -22.5v-192q0 -13 -9.5 -22.5t-22.5 -9.5h-1088 q-13 0 -22.5 9.5t-9.5 22.5zM640 800v192q0 13 9.5 22.5t22.5 9.5h1088q13 0 22.5 -9.5t9.5 -22.5v-192q0 -13 -9.5 -22.5t-22.5 -9.5h-1088q-13 0 -22.5 9.5t-9.5 22.5z" />
+<glyph unicode="&#xf03c;" horiz-adv-x="1792" d="M0 32v192q0 13 9.5 22.5t22.5 9.5h1728q13 0 22.5 -9.5t9.5 -22.5v-192q0 -13 -9.5 -22.5t-22.5 -9.5h-1728q-13 0 -22.5 9.5t-9.5 22.5zM0 416v576q0 13 9.5 22.5t22.5 9.5q14 0 23 -9l288 -288q9 -9 9 -23t-9 -23l-288 -288q-9 -9 -23 -9q-13 0 -22.5 9.5t-9.5 22.5z M0 1184v192q0 13 9.5 22.5t22.5 9.5h1728q13 0 22.5 -9.5t9.5 -22.5v-192q0 -13 -9.5 -22.5t-22.5 -9.5h-1728q-13 0 -22.5 9.5t-9.5 22.5zM640 416v192q0 13 9.5 22.5t22.5 9.5h1088q13 0 22.5 -9.5t9.5 -22.5v-192q0 -13 -9.5 -22.5t-22.5 -9.5h-1088q-13 0 -22.5 9.5 t-9.5 22.5zM640 800v192q0 13 9.5 22.5t22.5 9.5h1088q13 0 22.5 -9.5t9.5 -22.5v-192q0 -13 -9.5 -22.5t-22.5 -9.5h-1088q-13 0 -22.5 9.5t-9.5 22.5z" />
+<glyph unicode="&#xf03d;" horiz-adv-x="1792" d="M0 288v704q0 119 84.5 203.5t203.5 84.5h704q119 0 203.5 -84.5t84.5 -203.5v-165l403 402q18 19 45 19q12 0 25 -5q39 -17 39 -59v-1088q0 -42 -39 -59q-13 -5 -25 -5q-27 0 -45 19l-403 403v-166q0 -119 -84.5 -203.5t-203.5 -84.5h-704q-119 0 -203.5 84.5 t-84.5 203.5z" />
+<glyph unicode="&#xf03e;" horiz-adv-x="1920" d="M0 32v1216q0 66 47 113t113 47h1600q66 0 113 -47t47 -113v-1216q0 -66 -47 -113t-113 -47h-1600q-66 0 -113 47t-47 113zM128 32q0 -13 9.5 -22.5t22.5 -9.5h1600q13 0 22.5 9.5t9.5 22.5v1216q0 13 -9.5 22.5t-22.5 9.5h-1600q-13 0 -22.5 -9.5t-9.5 -22.5v-1216z M256 128v192l320 320l160 -160l512 512l416 -416v-448h-1408zM256 960q0 80 56 136t136 56t136 -56t56 -136t-56 -136t-136 -56t-136 56t-56 136z" />
+<glyph unicode="&#xf040;" d="M0 -128v416l832 832l416 -416l-832 -832h-416zM128 128h128v-128h107l91 91l-235 235l-91 -91v-107zM298 384q0 -22 22 -22q10 0 17 7l542 542q7 7 7 17q0 22 -22 22q-10 0 -17 -7l-542 -542q-7 -7 -7 -17zM896 1184l166 165q36 38 90 38q53 0 91 -38l235 -234 q37 -39 37 -91q0 -53 -37 -90l-166 -166z" />
+<glyph unicode="&#xf041;" horiz-adv-x="1024" d="M0 896q0 212 150 362t362 150t362 -150t150 -362q0 -109 -33 -179l-364 -774q-16 -33 -47.5 -52t-67.5 -19t-67.5 19t-46.5 52l-365 774q-33 70 -33 179zM256 896q0 -106 75 -181t181 -75t181 75t75 181t-75 181t-181 75t-181 -75t-75 -181z" />
+<glyph unicode="&#xf042;" d="M0 640q0 209 103 385.5t279.5 279.5t385.5 103t385.5 -103t279.5 -279.5t103 -385.5t-103 -385.5t-279.5 -279.5t-385.5 -103t-385.5 103t-279.5 279.5t-103 385.5zM224 640q0 -148 73 -273t198 -198t273 -73v1088q-148 0 -273 -73t-198 -198t-73 -273z" />
+<glyph unicode="&#xf043;" horiz-adv-x="1024" d="M0 512q0 145 81 275q6 9 62.5 90.5t101 151t99.5 178t83 201.5q9 30 34 47t51 17t51.5 -17t33.5 -47q28 -93 83 -201.5t99.5 -178t101 -151t62.5 -90.5q81 -127 81 -275q0 -212 -150 -362t-362 -150t-362 150t-150 362zM256 384q0 -53 37.5 -90.5t90.5 -37.5t90.5 37.5 t37.5 90.5q0 36 -20 69q-1 1 -15.5 22.5t-25.5 38t-25 44t-21 50.5q-4 16 -21 16t-21 -16q-7 -23 -21 -50.5t-25 -44t-25.5 -38t-15.5 -22.5q-20 -33 -20 -69z" />
+<glyph unicode="&#xf044;" horiz-adv-x="1792" d="M0 288v832q0 119 84.5 203.5t203.5 84.5h832q63 0 117 -25q15 -7 18 -23q3 -17 -9 -29l-49 -49q-14 -14 -32 -8q-23 6 -45 6h-832q-66 0 -113 -47t-47 -113v-832q0 -66 47 -113t113 -47h832q66 0 113 47t47 113v126q0 13 9 22l64 64q15 15 35 7t20 -29v-190 q0 -119 -84.5 -203.5t-203.5 -84.5h-832q-119 0 -203.5 84.5t-84.5 203.5zM640 256v288l672 672l288 -288l-672 -672h-288zM736 448h96v-96h56l116 116l-152 152l-116 -116v-56zM944 688q16 -16 33 1l350 350q17 17 1 33t-33 -1l-350 -350q-17 -17 -1 -33zM1376 1280l92 92 q28 28 68 28t68 -28l152 -152q28 -28 28 -68t-28 -68l-92 -92z" />
+<glyph unicode="&#xf045;" horiz-adv-x="1664" d="M0 288v832q0 119 84.5 203.5t203.5 84.5h255q13 0 22.5 -9.5t9.5 -22.5q0 -27 -26 -32q-77 -26 -133 -60q-10 -4 -16 -4h-112q-66 0 -113 -47t-47 -113v-832q0 -66 47 -113t113 -47h832q66 0 113 47t47 113v214q0 19 18 29q28 13 54 37q16 16 35 8q21 -9 21 -29v-259 q0 -119 -84.5 -203.5t-203.5 -84.5h-832q-119 0 -203.5 84.5t-84.5 203.5zM256 704q0 49 3.5 91t14 90t28 88t47 81.5t68.5 74t94.5 61.5t124.5 48.5t159.5 30.5t196.5 11h160v192q0 42 39 59q13 5 25 5q26 0 45 -19l384 -384q19 -19 19 -45t-19 -45l-384 -384 q-18 -19 -45 -19q-12 0 -25 5q-39 17 -39 59v192h-160q-323 0 -438 -131q-119 -137 -74 -473q3 -23 -20 -34q-8 -2 -12 -2q-16 0 -26 13q-10 14 -21 31t-39.5 68.5t-49.5 99.5t-38.5 114t-17.5 122z" />
+<glyph unicode="&#xf046;" horiz-adv-x="1664" d="M0 288v832q0 119 84.5 203.5t203.5 84.5h832q63 0 117 -25q15 -7 18 -23q3 -17 -9 -29l-49 -49q-10 -10 -23 -10q-3 0 -9 2q-23 6 -45 6h-832q-66 0 -113 -47t-47 -113v-832q0 -66 47 -113t113 -47h832q66 0 113 47t47 113v254q0 13 9 22l64 64q10 10 23 10q6 0 12 -3 q20 -8 20 -29v-318q0 -119 -84.5 -203.5t-203.5 -84.5h-832q-119 0 -203.5 84.5t-84.5 203.5zM257 768q0 33 24 57l110 110q24 24 57 24t57 -24l263 -263l647 647q24 24 57 24t57 -24l110 -110q24 -24 24 -57t-24 -57l-814 -814q-24 -24 -57 -24t-57 24l-430 430 q-24 24 -24 57z" />
+<glyph unicode="&#xf047;" horiz-adv-x="1792" d="M0 640q0 26 19 45l256 256q19 19 45 19t45 -19t19 -45v-128h384v384h-128q-26 0 -45 19t-19 45t19 45l256 256q19 19 45 19t45 -19l256 -256q19 -19 19 -45t-19 -45t-45 -19h-128v-384h384v128q0 26 19 45t45 19t45 -19l256 -256q19 -19 19 -45t-19 -45l-256 -256 q-19 -19 -45 -19t-45 19t-19 45v128h-384v-384h128q26 0 45 -19t19 -45t-19 -45l-256 -256q-19 -19 -45 -19t-45 19l-256 256q-19 19 -19 45t19 45t45 19h128v384h-384v-128q0 -26 -19 -45t-45 -19t-45 19l-256 256q-19 19 -19 45z" />
+<glyph unicode="&#xf048;" horiz-adv-x="1024" d="M0 -64v1408q0 26 19 45t45 19h128q26 0 45 -19t19 -45v-678q4 11 13 19l710 710q19 19 32 13t13 -32v-1472q0 -26 -13 -32t-32 13l-710 710q-9 9 -13 19v-678q0 -26 -19 -45t-45 -19h-128q-26 0 -45 19t-19 45z" />
+<glyph unicode="&#xf049;" horiz-adv-x="1792" d="M0 -64v1408q0 26 19 45t45 19h128q26 0 45 -19t19 -45v-678q4 11 13 19l710 710q19 19 32 13t13 -32v-710q4 11 13 19l710 710q19 19 32 13t13 -32v-1472q0 -26 -13 -32t-32 13l-710 710q-9 9 -13 19v-710q0 -26 -13 -32t-32 13l-710 710q-9 9 -13 19v-678q0 -26 -19 -45 t-45 -19h-128q-26 0 -45 19t-19 45z" />
+<glyph unicode="&#xf04a;" horiz-adv-x="1664" d="M122 640q0 26 19 45l710 710q19 19 32 13t13 -32v-710q5 11 13 19l710 710q19 19 32 13t13 -32v-1472q0 -26 -13 -32t-32 13l-710 710q-8 9 -13 19v-710q0 -26 -13 -32t-32 13l-710 710q-19 19 -19 45z" />
+<glyph unicode="&#xf04b;" horiz-adv-x="1408" d="M0 -96v1472q0 26 16.5 36t39.5 -3l1328 -738q23 -13 23 -31t-23 -31l-1328 -738q-23 -13 -39.5 -3t-16.5 36z" />
+<glyph unicode="&#xf04c;" d="M0 -64v1408q0 26 19 45t45 19h512q26 0 45 -19t19 -45v-1408q0 -26 -19 -45t-45 -19h-512q-26 0 -45 19t-19 45zM896 -64v1408q0 26 19 45t45 19h512q26 0 45 -19t19 -45v-1408q0 -26 -19 -45t-45 -19h-512q-26 0 -45 19t-19 45z" />
+<glyph unicode="&#xf04d;" d="M0 -64v1408q0 26 19 45t45 19h1408q26 0 45 -19t19 -45v-1408q0 -26 -19 -45t-45 -19h-1408q-26 0 -45 19t-19 45z" />
+<glyph unicode="&#xf04e;" horiz-adv-x="1664" d="M0 -96v1472q0 26 13 32t32 -13l710 -710q8 -8 13 -19v710q0 26 13 32t32 -13l710 -710q19 -19 19 -45t-19 -45l-710 -710q-19 -19 -32 -13t-13 32v710q-5 -10 -13 -19l-710 -710q-19 -19 -32 -13t-13 32z" />
+<glyph unicode="&#xf050;" horiz-adv-x="1792" d="M0 -96v1472q0 26 13 32t32 -13l710 -710q8 -8 13 -19v710q0 26 13 32t32 -13l710 -710q8 -8 13 -19v678q0 26 19 45t45 19h128q26 0 45 -19t19 -45v-1408q0 -26 -19 -45t-45 -19h-128q-26 0 -45 19t-19 45v678q-5 -10 -13 -19l-710 -710q-19 -19 -32 -13t-13 32v710 q-5 -10 -13 -19l-710 -710q-19 -19 -32 -13t-13 32z" />
+<glyph unicode="&#xf051;" horiz-adv-x="1024" d="M0 -96v1472q0 26 13 32t32 -13l710 -710q8 -8 13 -19v678q0 26 19 45t45 19h128q26 0 45 -19t19 -45v-1408q0 -26 -19 -45t-45 -19h-128q-26 0 -45 19t-19 45v678q-5 -10 -13 -19l-710 -710q-19 -19 -32 -13t-13 32z" />
+<glyph unicode="&#xf052;" horiz-adv-x="1538" d="M1 64v256q0 26 19 45t45 19h1408q26 0 45 -19t19 -45v-256q0 -26 -19 -45t-45 -19h-1408q-26 0 -45 19t-19 45zM1 525q-6 13 13 32l710 710q19 19 45 19t45 -19l710 -710q19 -19 13 -32t-32 -13h-1472q-26 0 -32 13z" />
+<glyph unicode="&#xf053;" horiz-adv-x="1280" d="M154 704q0 26 19 45l742 742q19 19 45 19t45 -19l166 -166q19 -19 19 -45t-19 -45l-531 -531l531 -531q19 -19 19 -45t-19 -45l-166 -166q-19 -19 -45 -19t-45 19l-742 742q-19 19 -19 45z" />
+<glyph unicode="&#xf054;" horiz-adv-x="1280" d="M90 128q0 26 19 45l531 531l-531 531q-19 19 -19 45t19 45l166 166q19 19 45 19t45 -19l742 -742q19 -19 19 -45t-19 -45l-742 -742q-19 -19 -45 -19t-45 19l-166 166q-19 19 -19 45z" />
+<glyph unicode="&#xf055;" d="M0 640q0 209 103 385.5t279.5 279.5t385.5 103t385.5 -103t279.5 -279.5t103 -385.5t-103 -385.5t-279.5 -279.5t-385.5 -103t-385.5 103t-279.5 279.5t-103 385.5zM320 576q0 -26 19 -45t45 -19h256v-256q0 -26 19 -45t45 -19h128q26 0 45 19t19 45v256h256q26 0 45 19 t19 45v128q0 26 -19 45t-45 19h-256v256q0 26 -19 45t-45 19h-128q-26 0 -45 -19t-19 -45v-256h-256q-26 0 -45 -19t-19 -45v-128z" />
+<glyph unicode="&#xf056;" d="M0 640q0 209 103 385.5t279.5 279.5t385.5 103t385.5 -103t279.5 -279.5t103 -385.5t-103 -385.5t-279.5 -279.5t-385.5 -103t-385.5 103t-279.5 279.5t-103 385.5zM320 576q0 -26 19 -45t45 -19h768q26 0 45 19t19 45v128q0 26 -19 45t-45 19h-768q-26 0 -45 -19 t-19 -45v-128z" />
+<glyph unicode="&#xf057;" d="M0 640q0 209 103 385.5t279.5 279.5t385.5 103t385.5 -103t279.5 -279.5t103 -385.5t-103 -385.5t-279.5 -279.5t-385.5 -103t-385.5 103t-279.5 279.5t-103 385.5zM387 414q0 -27 19 -46l90 -90q19 -19 46 -19q26 0 45 19l181 181l181 -181q19 -19 45 -19q27 0 46 19 l90 90q19 19 19 46q0 26 -19 45l-181 181l181 181q19 19 19 45q0 27 -19 46l-90 90q-19 19 -46 19q-26 0 -45 -19l-181 -181l-181 181q-19 19 -45 19q-27 0 -46 -19l-90 -90q-19 -19 -19 -46q0 -26 19 -45l181 -181l-181 -181q-19 -19 -19 -45z" />
+<glyph unicode="&#xf058;" d="M0 640q0 209 103 385.5t279.5 279.5t385.5 103t385.5 -103t279.5 -279.5t103 -385.5t-103 -385.5t-279.5 -279.5t-385.5 -103t-385.5 103t-279.5 279.5t-103 385.5zM252 621q0 -27 18 -45l362 -362q19 -19 45 -19q27 0 46 19l543 543q18 18 18 45q0 28 -18 46l-91 90 q-19 19 -45 19t-45 -19l-408 -407l-226 226q-19 19 -45 19t-45 -19l-91 -90q-18 -18 -18 -46z" />
+<glyph unicode="&#xf059;" d="M0 640q0 209 103 385.5t279.5 279.5t385.5 103t385.5 -103t279.5 -279.5t103 -385.5t-103 -385.5t-279.5 -279.5t-385.5 -103t-385.5 103t-279.5 279.5t-103 385.5zM417 939q-15 -24 8 -42l132 -100q7 -6 19 -6q16 0 25 12q53 68 86 92q34 24 86 24q48 0 85.5 -26 t37.5 -59q0 -38 -20 -61t-68 -45q-63 -28 -115.5 -86.5t-52.5 -125.5v-36q0 -14 9 -23t23 -9h192q14 0 23 9t9 23q0 19 21.5 49.5t54.5 49.5q32 18 49 28.5t46 35t44.5 48t28 60.5t12.5 81q0 88 -55.5 163t-138.5 116t-170 41q-243 0 -371 -213zM640 160q0 -14 9 -23t23 -9 h192q14 0 23 9t9 23v192q0 14 -9 23t-23 9h-192q-14 0 -23 -9t-9 -23v-192z" />
+<glyph unicode="&#xf05a;" d="M0 640q0 209 103 385.5t279.5 279.5t385.5 103t385.5 -103t279.5 -279.5t103 -385.5t-103 -385.5t-279.5 -279.5t-385.5 -103t-385.5 103t-279.5 279.5t-103 385.5zM512 160q0 -14 9 -23t23 -9h448q14 0 23 9t9 23v160q0 14 -9 23t-23 9h-96v512q0 14 -9 23t-23 9h-320 q-14 0 -23 -9t-9 -23v-160q0 -14 9 -23t23 -9h96v-320h-96q-14 0 -23 -9t-9 -23v-160zM640 1056q0 -14 9 -23t23 -9h192q14 0 23 9t9 23v160q0 14 -9 23t-23 9h-192q-14 0 -23 -9t-9 -23v-160z" />
+<glyph unicode="&#xf05b;" d="M0 576v128q0 26 19 45t45 19h143q37 161 154.5 278.5t278.5 154.5v143q0 26 19 45t45 19h128q26 0 45 -19t19 -45v-143q161 -37 278.5 -154.5t154.5 -278.5h143q26 0 45 -19t19 -45v-128q0 -26 -19 -45t-45 -19h-143q-37 -161 -154.5 -278.5t-278.5 -154.5v-143 q0 -26 -19 -45t-45 -19h-128q-26 0 -45 19t-19 45v143q-161 37 -278.5 154.5t-154.5 278.5h-143q-26 0 -45 19t-19 45zM339 512q32 -108 112.5 -188.5t188.5 -112.5v109q0 26 19 45t45 19h128q26 0 45 -19t19 -45v-109q108 32 188.5 112.5t112.5 188.5h-109q-26 0 -45 19 t-19 45v128q0 26 19 45t45 19h109q-32 108 -112.5 188.5t-188.5 112.5v-109q0 -26 -19 -45t-45 -19h-128q-26 0 -45 19t-19 45v109q-108 -32 -188.5 -112.5t-112.5 -188.5h109q26 0 45 -19t19 -45v-128q0 -26 -19 -45t-45 -19h-109z" />
+<glyph unicode="&#xf05c;" d="M0 640q0 209 103 385.5t279.5 279.5t385.5 103t385.5 -103t279.5 -279.5t103 -385.5t-103 -385.5t-279.5 -279.5t-385.5 -103t-385.5 103t-279.5 279.5t-103 385.5zM224 640q0 -148 73 -273t198 -198t273 -73t273 73t198 198t73 273t-73 273t-198 198t-273 73t-273 -73 t-198 -198t-73 -273zM429 480q0 13 10 23l137 137l-137 137q-10 10 -10 23t10 23l146 146q10 10 23 10t23 -10l137 -137l137 137q10 10 23 10t23 -10l146 -146q10 -10 10 -23t-10 -23l-137 -137l137 -137q10 -10 10 -23t-10 -23l-146 -146q-10 -10 -23 -10t-23 10l-137 137 l-137 -137q-10 -10 -23 -10t-23 10l-146 146q-10 10 -10 23z" />
+<glyph unicode="&#xf05d;" d="M0 640q0 209 103 385.5t279.5 279.5t385.5 103t385.5 -103t279.5 -279.5t103 -385.5t-103 -385.5t-279.5 -279.5t-385.5 -103t-385.5 103t-279.5 279.5t-103 385.5zM224 640q0 -148 73 -273t198 -198t273 -73t273 73t198 198t73 273t-73 273t-198 198t-273 73t-273 -73 t-198 -198t-73 -273zM346 640q0 26 19 45l102 102q19 19 45 19t45 -19l147 -147l275 275q19 19 45 19t45 -19l102 -102q19 -19 19 -45t-19 -45l-422 -422q-19 -19 -45 -19t-45 19l-294 294q-19 19 -19 45z" />
+<glyph unicode="&#xf05e;" d="M0 643q0 157 61 299.5t163.5 245.5t245 164t298.5 61t298.5 -61t245 -164t163.5 -245.5t61 -299.5t-61 -300t-163.5 -246t-245 -164t-298.5 -61t-298.5 61t-245 164t-163.5 246t-61 300zM224 643q0 -162 89 -299l755 754q-135 91 -300 91q-148 0 -273 -73t-198 -199 t-73 -274zM471 185q137 -89 297 -89q111 0 211.5 43.5t173.5 116.5t116 174.5t43 212.5q0 161 -87 295z" />
+<glyph unicode="&#xf060;" d="M64 576q0 52 37 91l651 650q38 38 91 38q52 0 90 -38l75 -74q38 -38 38 -91t-38 -91l-293 -293h704q52 0 84.5 -37.5t32.5 -90.5v-128q0 -53 -32.5 -90.5t-84.5 -37.5h-704l293 -294q38 -36 38 -90t-38 -90l-75 -76q-37 -37 -90 -37q-52 0 -91 37l-651 652q-37 37 -37 90 z" />
+<glyph unicode="&#xf061;" d="M0 512v128q0 53 32.5 90.5t84.5 37.5h704l-293 294q-38 36 -38 90t38 90l75 75q38 38 90 38q53 0 91 -38l651 -651q37 -35 37 -90q0 -54 -37 -91l-651 -651q-39 -37 -91 -37q-51 0 -90 37l-75 75q-38 38 -38 91t38 91l293 293h-704q-52 0 -84.5 37.5t-32.5 90.5z" />
+<glyph unicode="&#xf062;" horiz-adv-x="1664" d="M53 565q0 53 38 91l651 651q35 37 90 37q54 0 91 -37l651 -651q37 -39 37 -91q0 -51 -37 -90l-75 -75q-38 -38 -91 -38q-54 0 -90 38l-294 293v-704q0 -52 -37.5 -84.5t-90.5 -32.5h-128q-53 0 -90.5 32.5t-37.5 84.5v704l-294 -293q-36 -38 -90 -38t-90 38l-75 75 q-38 38 -38 90z" />
+<glyph unicode="&#xf063;" horiz-adv-x="1664" d="M53 704q0 53 38 91l74 75q39 37 91 37q53 0 90 -37l294 -294v704q0 52 38 90t90 38h128q52 0 90 -38t38 -90v-704l294 294q37 37 90 37q52 0 91 -37l75 -75q37 -39 37 -91q0 -53 -37 -90l-651 -652q-39 -37 -91 -37q-53 0 -90 37l-651 652q-38 36 -38 90z" />
+<glyph unicode="&#xf064;" horiz-adv-x="1792" d="M0 416q0 199 53 333q162 403 875 403h224v256q0 26 19 45t45 19t45 -19l512 -512q19 -19 19 -45t-19 -45l-512 -512q-19 -19 -45 -19t-45 19t-19 45v256h-224q-98 0 -175.5 -6t-154 -21.5t-133 -42.5t-105.5 -69.5t-80 -101t-48.5 -138.5t-17.5 -181q0 -55 5 -123 q0 -6 2.5 -23.5t2.5 -26.5q0 -15 -8.5 -25t-23.5 -10q-16 0 -28 17q-7 9 -13 22t-13.5 30t-10.5 24q-127 285 -127 451z" />
+<glyph unicode="&#xf065;" d="M0 -64v448q0 26 19 45t45 19t45 -19l144 -144l332 332q10 10 23 10t23 -10l114 -114q10 -10 10 -23t-10 -23l-332 -332l144 -144q19 -19 19 -45t-19 -45t-45 -19h-448q-26 0 -45 19t-19 45zM781 800q0 13 10 23l332 332l-144 144q-19 19 -19 45t19 45t45 19h448 q26 0 45 -19t19 -45v-448q0 -26 -19 -45t-45 -19t-45 19l-144 144l-332 -332q-10 -10 -23 -10t-23 10l-114 114q-10 10 -10 23z" />
+<glyph unicode="&#xf066;" d="M13 32q0 13 10 23l332 332l-144 144q-19 19 -19 45t19 45t45 19h448q26 0 45 -19t19 -45v-448q0 -26 -19 -45t-45 -19t-45 19l-144 144l-332 -332q-10 -10 -23 -10t-23 10l-114 114q-10 10 -10 23zM768 704v448q0 26 19 45t45 19t45 -19l144 -144l332 332q10 10 23 10 t23 -10l114 -114q10 -10 10 -23t-10 -23l-332 -332l144 -144q19 -19 19 -45t-19 -45t-45 -19h-448q-26 0 -45 19t-19 45z" />
+<glyph unicode="&#xf067;" horiz-adv-x="1408" d="M0 608v192q0 40 28 68t68 28h416v416q0 40 28 68t68 28h192q40 0 68 -28t28 -68v-416h416q40 0 68 -28t28 -68v-192q0 -40 -28 -68t-68 -28h-416v-416q0 -40 -28 -68t-68 -28h-192q-40 0 -68 28t-28 68v416h-416q-40 0 -68 28t-28 68z" />
+<glyph unicode="&#xf068;" horiz-adv-x="1408" d="M0 608v192q0 40 28 68t68 28h1216q40 0 68 -28t28 -68v-192q0 -40 -28 -68t-68 -28h-1216q-40 0 -68 28t-28 68z" />
+<glyph unicode="&#xf069;" horiz-adv-x="1664" d="M122.5 408.5q13.5 51.5 59.5 77.5l266 154l-266 154q-46 26 -59.5 77.5t12.5 97.5l64 110q26 46 77.5 59.5t97.5 -12.5l266 -153v307q0 52 38 90t90 38h128q52 0 90 -38t38 -90v-307l266 153q46 26 97.5 12.5t77.5 -59.5l64 -110q26 -46 12.5 -97.5t-59.5 -77.5 l-266 -154l266 -154q46 -26 59.5 -77.5t-12.5 -97.5l-64 -110q-26 -46 -77.5 -59.5t-97.5 12.5l-266 153v-307q0 -52 -38 -90t-90 -38h-128q-52 0 -90 38t-38 90v307l-266 -153q-46 -26 -97.5 -12.5t-77.5 59.5l-64 110q-26 46 -12.5 97.5z" />
+<glyph unicode="&#xf06a;" d="M0 640q0 209 103 385.5t279.5 279.5t385.5 103t385.5 -103t279.5 -279.5t103 -385.5t-103 -385.5t-279.5 -279.5t-385.5 -103t-385.5 103t-279.5 279.5t-103 385.5zM624 1126l17 -621q0 -10 10 -17.5t24 -7.5h185q14 0 23.5 7.5t10.5 17.5l18 621q0 12 -10 18 q-10 8 -24 8h-220q-14 0 -24 -8q-10 -6 -10 -18zM640 161q0 -13 10 -23t23 -10h192q13 0 22 9.5t9 23.5v190q0 14 -9 23.5t-22 9.5h-192q-13 0 -23 -10t-10 -23v-190z" />
+<glyph unicode="&#xf06b;" d="M0 544v320q0 14 9 23t23 9h440q-93 0 -158.5 65.5t-65.5 158.5t65.5 158.5t158.5 65.5q107 0 168 -77l128 -165l128 165q61 77 168 77q93 0 158.5 -65.5t65.5 -158.5t-65.5 -158.5t-158.5 -65.5h440q14 0 23 -9t9 -23v-320q0 -14 -9 -23t-23 -9h-96v-416q0 -40 -28 -68 t-68 -28h-1088q-40 0 -68 28t-28 68v416h-96q-14 0 -23 9t-9 23zM376 1120q0 -40 28 -68t68 -28h195l-126 161q-26 31 -69 31q-40 0 -68 -28t-28 -68zM608 180q0 -25 18 -38.5t46 -13.5h192q28 0 46 13.5t18 38.5v56v468v192h-320v-192v-468v-56zM870 1024h194q40 0 68 28 t28 68t-28 68t-68 28q-43 0 -69 -31z" />
+<glyph unicode="&#xf06c;" horiz-adv-x="1792" d="M0 121q0 35 31 73.5t68 65.5t68 56t31 48q0 4 -14 38t-16 44q-9 51 -9 104q0 115 43.5 220t119 184.5t170.5 139t204 95.5q55 18 145 25.5t179.5 9t178.5 6t163.5 24t113.5 56.5l29.5 29.5t29.5 28t27 20t36.5 16t43.5 4.5q39 0 70.5 -46t47.5 -112t24 -124t8 -96 q0 -95 -20 -193q-46 -224 -184.5 -383t-357.5 -268q-214 -108 -438 -108q-148 0 -286 47q-15 5 -88 42t-96 37q-16 0 -39.5 -32t-45 -70t-52.5 -70t-60 -32q-30 0 -51 11t-31 24t-27 42q-2 4 -6 11t-5.5 10t-3 9.5t-1.5 13.5zM384 448q0 -26 19 -45t45 -19q24 0 45 19 q27 24 74 71t67 66q137 124 268.5 176t313.5 52q26 0 45 19t19 45t-19 45t-45 19q-172 0 -318 -49.5t-259.5 -134t-235.5 -219.5q-19 -21 -19 -45z" />
+<glyph unicode="&#xf06d;" horiz-adv-x="1408" d="M0 -160q0 13 9.5 22.5t22.5 9.5h1344q13 0 22.5 -9.5t9.5 -22.5v-64q0 -13 -9.5 -22.5t-22.5 -9.5h-1344q-13 0 -22.5 9.5t-9.5 22.5v64zM256 640q0 78 24.5 144t64 112.5t87.5 88t96 77.5t87.5 72t64 81.5t24.5 96.5q0 94 -66 224l3 -1l-1 1q90 -41 160 -83t138.5 -100 t113.5 -122.5t72.5 -150.5t27.5 -184q0 -78 -24.5 -144t-64 -112.5t-87.5 -88t-96 -77.5t-87.5 -72t-64 -81.5t-24.5 -96.5q0 -96 67 -224l-4 1l1 -1q-90 41 -160 83t-138.5 100t-113.5 122.5t-72.5 150.5t-27.5 184z" />
+<glyph unicode="&#xf06e;" horiz-adv-x="1792" d="M0 576q0 34 20 69q140 229 376.5 368t499.5 139t499.5 -139t376.5 -368q20 -35 20 -69t-20 -69q-140 -230 -376.5 -368.5t-499.5 -138.5t-499.5 139t-376.5 368q-20 35 -20 69zM128 576q133 -205 333.5 -326.5t434.5 -121.5t434.5 121.5t333.5 326.5q-152 236 -381 353 q61 -104 61 -225q0 -185 -131.5 -316.5t-316.5 -131.5t-316.5 131.5t-131.5 316.5q0 121 61 225q-229 -117 -381 -353zM592 704q0 -20 14 -34t34 -14t34 14t14 34q0 86 61 147t147 61q20 0 34 14t14 34t-14 34t-34 14q-125 0 -214.5 -89.5t-89.5 -214.5z" />
+<glyph unicode="&#xf070;" horiz-adv-x="1792" d="M0 576q0 38 20 69q153 235 380 371t496 136q89 0 180 -17l54 97q10 16 28 16q5 0 18 -6t31 -15.5t33 -18.5t31.5 -18.5t19.5 -11.5q16 -10 16 -27q0 -7 -1 -9q-105 -188 -315 -566t-316 -567l-49 -89q-10 -16 -28 -16q-12 0 -134 70q-16 10 -16 28q0 12 44 87 q-143 65 -263.5 173t-208.5 245q-20 31 -20 69zM128 576q167 -258 427 -375l78 141q-87 63 -136 159t-49 203q0 121 61 225q-229 -117 -381 -353zM592 704q0 -20 14 -34t34 -14t34 14t14 34q0 86 61 147t147 61q20 0 34 14t14 34t-14 34t-34 14q-125 0 -214.5 -89.5 t-89.5 -214.5zM896 0l74 132q212 18 392.5 137t301.5 307q-115 179 -282 294l63 112q95 -64 182.5 -153t144.5 -184q20 -34 20 -69t-20 -69q-39 -64 -109 -145q-150 -172 -347.5 -267t-419.5 -95zM1056 286l280 502q8 -45 8 -84q0 -139 -79 -253.5t-209 -164.5z" />
+<glyph unicode="&#xf071;" horiz-adv-x="1792" d="M16 61l768 1408q17 31 47 49t65 18t65 -18t47 -49l768 -1408q35 -63 -2 -126q-17 -29 -46.5 -46t-63.5 -17h-1536q-34 0 -63.5 17t-46.5 46q-37 63 -2 126zM752 992l17 -457q0 -10 10 -16.5t24 -6.5h185q14 0 23.5 6.5t10.5 16.5l18 459q0 12 -10 19q-13 11 -24 11h-220 q-11 0 -24 -11q-10 -7 -10 -21zM768 161q0 -14 9.5 -23.5t22.5 -9.5h192q13 0 22.5 9.5t9.5 23.5v190q0 14 -9.5 23.5t-22.5 9.5h-192q-13 0 -22.5 -9.5t-9.5 -23.5v-190z" />
+<glyph unicode="&#xf072;" horiz-adv-x="1408" d="M0 477q-1 13 9 25l96 97q9 9 23 9q6 0 8 -1l194 -53l259 259l-508 279q-14 8 -17 24q-2 16 9 27l128 128q14 13 30 8l665 -159l160 160q76 76 172 108t148 -12q44 -52 12 -148t-108 -172l-161 -161l160 -696q5 -19 -12 -33l-128 -96q-7 -6 -19 -6q-4 0 -7 1q-15 3 -21 16 l-279 508l-259 -259l53 -194q5 -17 -8 -31l-96 -96q-9 -9 -23 -9h-2q-15 2 -24 13l-189 252l-252 189q-11 7 -13 23z" />
+<glyph unicode="&#xf073;" horiz-adv-x="1664" d="M0 -128v1280q0 52 38 90t90 38h128v96q0 66 47 113t113 47h64q66 0 113 -47t47 -113v-96h384v96q0 66 47 113t113 47h64q66 0 113 -47t47 -113v-96h128q52 0 90 -38t38 -90v-1280q0 -52 -38 -90t-90 -38h-1408q-52 0 -90 38t-38 90zM128 -128h288v288h-288v-288zM128 224 h288v320h-288v-320zM128 608h288v288h-288v-288zM384 1088q0 -13 9.5 -22.5t22.5 -9.5h64q13 0 22.5 9.5t9.5 22.5v288q0 13 -9.5 22.5t-22.5 9.5h-64q-13 0 -22.5 -9.5t-9.5 -22.5v-288zM480 -128h320v288h-320v-288zM480 224h320v320h-320v-320zM480 608h320v288h-320 v-288zM864 -128h320v288h-320v-288zM864 224h320v320h-320v-320zM864 608h320v288h-320v-288zM1152 1088q0 -13 9.5 -22.5t22.5 -9.5h64q13 0 22.5 9.5t9.5 22.5v288q0 13 -9.5 22.5t-22.5 9.5h-64q-13 0 -22.5 -9.5t-9.5 -22.5v-288zM1248 -128h288v288h-288v-288z M1248 224h288v320h-288v-320zM1248 608h288v288h-288v-288z" />
+<glyph unicode="&#xf074;" horiz-adv-x="1792" d="M0 160v192q0 14 9 23t23 9h224q48 0 87 15t69 45t51 61.5t45 77.5q32 62 78 171q29 66 49.5 111t54 105t64 100t74 83t90 68.5t106.5 42t128 16.5h256v192q0 14 9 23t23 9q12 0 24 -10l319 -319q9 -9 9 -23t-9 -23l-320 -320q-9 -9 -23 -9q-13 0 -22.5 9.5t-9.5 22.5v192 h-256q-48 0 -87 -15t-69 -45t-51 -61.5t-45 -77.5q-32 -62 -78 -171q-29 -66 -49.5 -111t-54 -105t-64 -100t-74 -83t-90 -68.5t-106.5 -42t-128 -16.5h-224q-14 0 -23 9t-9 23zM0 1056v192q0 14 9 23t23 9h224q250 0 410 -225q-60 -92 -137 -273q-22 45 -37 72.5 t-40.5 63.5t-51 56.5t-63 35t-81.5 14.5h-224q-14 0 -23 9t-9 23zM743 353q59 93 136 273q22 -45 37 -72.5t40.5 -63.5t51 -56.5t63 -35t81.5 -14.5h256v192q0 14 9 23t23 9q12 0 24 -10l319 -319q9 -9 9 -23t-9 -23l-320 -320q-9 -9 -23 -9q-13 0 -22.5 9.5t-9.5 22.5v192 q-32 0 -85 -0.5t-81 -1t-73 1t-71 5t-64 10.5t-63 18.5t-58 28.5t-59 40t-55 53.5t-56 69.5z" />
+<glyph unicode="&#xf075;" horiz-adv-x="1792" d="M0 640q0 130 71 248.5t191 204.5t286 136.5t348 50.5q244 0 450 -85.5t326 -233t120 -321.5t-120 -321.5t-326 -233t-450 -85.5q-70 0 -145 8q-198 -175 -460 -242q-49 -14 -114 -22q-17 -2 -30.5 9t-17.5 29v1q-3 4 -0.5 12t2 10t4.5 9.5l6 9t7 8.5t8 9q7 8 31 34.5 t34.5 38t31 39.5t32.5 51t27 59t26 76q-157 89 -247.5 220t-90.5 281z" />
+<glyph unicode="&#xf076;" d="M0 576v128q0 26 19 45t45 19h384q26 0 45 -19t19 -45v-128q0 -52 23.5 -90t53.5 -57t71 -30t64 -13t44 -2t44 2t64 13t71 30t53.5 57t23.5 90v128q0 26 19 45t45 19h384q26 0 45 -19t19 -45v-128q0 -201 -98.5 -362t-274 -251.5t-395.5 -90.5t-395.5 90.5t-274 251.5 t-98.5 362zM0 960v384q0 26 19 45t45 19h384q26 0 45 -19t19 -45v-384q0 -26 -19 -45t-45 -19h-384q-26 0 -45 19t-19 45zM1024 960v384q0 26 19 45t45 19h384q26 0 45 -19t19 -45v-384q0 -26 -19 -45t-45 -19h-384q-26 0 -45 19t-19 45z" />
+<glyph unicode="&#xf077;" horiz-adv-x="1792" d="M90 250.5q0 26.5 19 45.5l742 741q19 19 45 19t45 -19l742 -741q19 -19 19 -45.5t-19 -45.5l-166 -165q-19 -19 -45 -19t-45 19l-531 531l-531 -531q-19 -19 -45 -19t-45 19l-166 165q-19 19 -19 45.5z" />
+<glyph unicode="&#xf078;" horiz-adv-x="1792" d="M90 773.5q0 26.5 19 45.5l166 165q19 19 45 19t45 -19l531 -531l531 531q19 19 45 19t45 -19l166 -165q19 -19 19 -45.5t-19 -45.5l-742 -741q-19 -19 -45 -19t-45 19l-742 741q-19 19 -19 45.5z" />
+<glyph unicode="&#xf079;" horiz-adv-x="1920" d="M0 704q0 24 15 41l320 384q19 22 49 22t49 -22l320 -384q15 -17 15 -41q0 -26 -19 -45t-45 -19h-192v-384h576q16 0 25 -11l160 -192q7 -11 7 -21q0 -13 -9.5 -22.5t-22.5 -9.5h-960q-8 0 -13.5 2t-9 7t-5.5 8t-3 11.5t-1 11.5v13v11v160v416h-192q-26 0 -45 19t-19 45z M640 1120q0 13 9.5 22.5t22.5 9.5h960q8 0 13.5 -2t9 -7t5.5 -8t3 -11.5t1 -11.5v-13v-11v-160v-416h192q26 0 45 -19t19 -45q0 -24 -15 -41l-320 -384q-20 -23 -49 -23t-49 23l-320 384q-15 17 -15 41q0 26 19 45t45 19h192v384h-576q-16 0 -25 12l-160 192q-7 9 -7 20z " />
+<glyph unicode="&#xf07a;" horiz-adv-x="1664" d="M0 1216q0 26 19 45t45 19h256q16 0 28.5 -6.5t20 -15.5t13 -24.5t7.5 -26.5t5.5 -29.5t4.5 -25.5h1201q26 0 45 -19t19 -45v-512q0 -24 -16 -42.5t-41 -21.5l-1044 -122q1 -7 4.5 -21.5t6 -26.5t2.5 -22q0 -16 -24 -64h920q26 0 45 -19t19 -45t-19 -45t-45 -19h-1024 q-26 0 -45 19t-19 45q0 14 11 39.5t29.5 59.5t20.5 38l-177 823h-204q-26 0 -45 19t-19 45zM384 0q0 53 37.5 90.5t90.5 37.5t90.5 -37.5t37.5 -90.5t-37.5 -90.5t-90.5 -37.5t-90.5 37.5t-37.5 90.5zM1280 0q0 53 37.5 90.5t90.5 37.5t90.5 -37.5t37.5 -90.5t-37.5 -90.5 t-90.5 -37.5t-90.5 37.5t-37.5 90.5z" />
+<glyph unicode="&#xf07b;" horiz-adv-x="1664" d="M0 224v960q0 92 66 158t158 66h320q92 0 158 -66t66 -158v-32h672q92 0 158 -66t66 -158v-704q0 -92 -66 -158t-158 -66h-1216q-92 0 -158 66t-66 158z" />
+<glyph unicode="&#xf07c;" horiz-adv-x="1920" d="M0 224v960q0 92 66 158t158 66h320q92 0 158 -66t66 -158v-32h544q92 0 158 -66t66 -158v-160h-832q-94 0 -197 -47.5t-164 -119.5l-337 -396l-5 -6q0 4 -0.5 12.5t-0.5 12.5zM73 56q0 31 31 66l336 396q43 51 120.5 86.5t143.5 35.5h1088q34 0 60.5 -13t26.5 -43 q0 -31 -31 -66l-336 -396q-43 -51 -120.5 -86.5t-143.5 -35.5h-1088q-34 0 -60.5 13t-26.5 43z" />
+<glyph unicode="&#xf07d;" horiz-adv-x="768" d="M64 64q0 26 19 45t45 19h128v1024h-128q-26 0 -45 19t-19 45t19 45l256 256q19 19 45 19t45 -19l256 -256q19 -19 19 -45t-19 -45t-45 -19h-128v-1024h128q26 0 45 -19t19 -45t-19 -45l-256 -256q-19 -19 -45 -19t-45 19l-256 256q-19 19 -19 45z" />
+<glyph unicode="&#xf07e;" horiz-adv-x="1792" d="M0 640q0 26 19 45l256 256q19 19 45 19t45 -19t19 -45v-128h1024v128q0 26 19 45t45 19t45 -19l256 -256q19 -19 19 -45t-19 -45l-256 -256q-19 -19 -45 -19t-45 19t-19 45v128h-1024v-128q0 -26 -19 -45t-45 -19t-45 19l-256 256q-19 19 -19 45z" />
+<glyph unicode="&#xf080;" horiz-adv-x="1920" d="M0 32v1216q0 66 47 113t113 47h1600q66 0 113 -47t47 -113v-1216q0 -66 -47 -113t-113 -47h-1600q-66 0 -113 47t-47 113zM128 32q0 -13 9.5 -22.5t22.5 -9.5h1600q13 0 22.5 9.5t9.5 22.5v1216q0 13 -9.5 22.5t-22.5 9.5h-1600q-13 0 -22.5 -9.5t-9.5 -22.5v-1216z M256 128v384h256v-384h-256zM640 128v896h256v-896h-256zM1024 128v640h256v-640h-256zM1408 128v1024h256v-1024h-256z" />
+<glyph unicode="&#xf081;" d="M0 160v960q0 119 84.5 203.5t203.5 84.5h960q119 0 203.5 -84.5t84.5 -203.5v-960q0 -119 -84.5 -203.5t-203.5 -84.5h-960q-119 0 -203.5 84.5t-84.5 203.5zM256 286q148 -94 322 -94q112 0 210 35.5t168 95t120.5 137t75 162t24.5 168.5q0 18 -1 27q63 45 105 109 q-56 -25 -121 -34q68 40 93 117q-65 -38 -134 -51q-61 66 -153 66q-87 0 -148.5 -61.5t-61.5 -148.5q0 -29 5 -48q-129 7 -242 65t-192 155q-29 -50 -29 -106q0 -114 91 -175q-47 1 -100 26v-2q0 -75 50 -133.5t123 -72.5q-29 -8 -51 -8q-13 0 -39 4q21 -63 74.5 -104 t121.5 -42q-116 -90 -261 -90q-26 0 -50 3z" />
+<glyph unicode="&#xf082;" d="M0 160v960q0 119 84.5 203.5t203.5 84.5h960q119 0 203.5 -84.5t84.5 -203.5v-960q0 -119 -84.5 -203.5t-203.5 -84.5h-192v608h203l30 224h-233v143q0 54 28 83t96 29l132 1v207q-96 9 -180 9q-136 0 -218 -80.5t-82 -225.5v-166h-224v-224h224v-608h-544 q-119 0 -203.5 84.5t-84.5 203.5z" />
+<glyph unicode="&#xf083;" horiz-adv-x="1792" d="M0 0v1280q0 53 37.5 90.5t90.5 37.5h1536q53 0 90.5 -37.5t37.5 -90.5v-1280q0 -53 -37.5 -90.5t-90.5 -37.5h-1536q-53 0 -90.5 37.5t-37.5 90.5zM128 0h1536v128h-1536v-128zM128 1024h1536v118v138h-828l-64 -128h-644v-128zM256 1216h384v128h-384v-128zM512 574 q0 -159 112.5 -271.5t271.5 -112.5t271.5 112.5t112.5 271.5t-112.5 271.5t-271.5 112.5t-271.5 -112.5t-112.5 -271.5zM640 574q0 106 75 181t181 75t181 -75t75 -181t-75 -181t-181 -75t-181 75t-75 181zM736 576q0 -14 9 -23t23 -9t23 9t9 23q0 40 28 68t68 28q14 0 23 9 t9 23t-9 23t-23 9q-66 0 -113 -47t-47 -113z" />
+<glyph unicode="&#xf084;" horiz-adv-x="1792" d="M0 752q0 160 95 313t248 248t313 95q163 0 265.5 -102.5t102.5 -265.5q0 -189 -131 -365l355 -355l96 96q-3 3 -26 24.5t-40 38.5t-33 36.5t-16 28.5q0 17 49 66t66 49q13 0 23 -10q6 -6 46 -44.5t82 -79.5t86.5 -86t73 -78t28.5 -41q0 -17 -49 -66t-66 -49 q-9 0 -28.5 16t-36.5 33t-38.5 40t-24.5 26l-96 -96l220 -220q28 -28 28 -68q0 -42 -39 -81t-81 -39q-40 0 -68 28l-671 671q-176 -131 -365 -131q-163 0 -265.5 102.5t-102.5 265.5zM192 768q0 -80 56 -136t136 -56t136 56t56 136q0 42 -19 83q41 -19 83 -19q80 0 136 56 t56 136t-56 136t-136 56t-136 -56t-56 -136q0 -42 19 -83q-41 19 -83 19q-80 0 -136 -56t-56 -136z" />
+<glyph unicode="&#xf085;" horiz-adv-x="1920" d="M0 549v185q0 10 7 19.5t16 10.5l155 24q11 35 32 76q-34 48 -90 115q-7 11 -7 20q0 12 7 20q22 30 82 89t79 59q11 0 21 -7l115 -90q34 18 77 32q11 108 23 154q7 24 30 24h186q11 0 20 -7.5t10 -17.5l23 -153q34 -10 75 -31l118 89q8 7 20 7q11 0 21 -8 q144 -133 144 -160q0 -9 -7 -19q-12 -16 -42 -54t-45 -60q23 -48 34 -82l152 -23q10 -2 17 -10.5t7 -19.5v-185q0 -10 -7 -19.5t-16 -10.5l-155 -24q-11 -35 -32 -76q34 -48 90 -115q7 -10 7 -20q0 -12 -7 -19q-23 -30 -82.5 -89.5t-78.5 -59.5q-11 0 -21 7l-115 90 q-37 -19 -77 -31q-11 -108 -23 -155q-7 -24 -30 -24h-186q-11 0 -20 7.5t-10 17.5l-23 153q-34 10 -75 31l-118 -89q-7 -7 -20 -7q-11 0 -21 8q-144 133 -144 160q0 9 7 19q10 14 41 53t47 61q-23 44 -35 82l-152 24q-10 1 -17 9.5t-7 19.5zM384 640q0 -106 75 -181t181 -75 t181 75t75 181t-75 181t-181 75t-181 -75t-75 -181zM1152 58v140q0 16 149 31q13 29 30 52q-51 113 -51 138q0 4 4 7q4 2 35 20t59 34t30 16q8 0 46 -46.5t52 -67.5q20 2 30 2t30 -2q51 71 92 112l6 2q4 0 124 -70q4 -3 4 -7q0 -25 -51 -138q17 -23 30 -52q149 -15 149 -31 v-140q0 -16 -149 -31q-12 -27 -30 -52q51 -113 51 -138q0 -4 -4 -7q-122 -71 -124 -71q-8 0 -46 47t-52 68q-20 -2 -30 -2t-30 2q-14 -21 -52 -68t-46 -47q-2 0 -124 71q-4 3 -4 7q0 25 51 138q-18 25 -30 52q-149 15 -149 31zM1152 1082v140q0 16 149 31q13 29 30 52 q-51 113 -51 138q0 4 4 7q4 2 35 20t59 34t30 16q8 0 46 -46.5t52 -67.5q20 2 30 2t30 -2q51 71 92 112l6 2q4 0 124 -70q4 -3 4 -7q0 -25 -51 -138q17 -23 30 -52q149 -15 149 -31v-140q0 -16 -149 -31q-12 -27 -30 -52q51 -113 51 -138q0 -4 -4 -7q-122 -71 -124 -71 q-8 0 -46 47t-52 68q-20 -2 -30 -2t-30 2q-14 -21 -52 -68t-46 -47q-2 0 -124 71q-4 3 -4 7q0 25 51 138q-18 25 -30 52q-149 15 -149 31zM1408 128q0 -53 37.5 -90.5t90.5 -37.5t90.5 37.5t37.5 90.5q0 52 -38 90t-90 38t-90 -38t-38 -90zM1408 1152q0 -53 37.5 -90.5 t90.5 -37.5t90.5 37.5t37.5 90.5q0 52 -38 90t-90 38t-90 -38t-38 -90z" />
+<glyph unicode="&#xf086;" horiz-adv-x="1792" d="M0 768q0 139 94 257t256.5 186.5t353.5 68.5t353.5 -68.5t256.5 -186.5t94 -257t-94 -257t-256.5 -186.5t-353.5 -68.5q-86 0 -176 16q-124 -88 -278 -128q-36 -9 -86 -16h-3q-11 0 -20.5 8t-11.5 21q-1 3 -1 6.5t0.5 6.5t2 6l2.5 5t3.5 5.5t4 5t4.5 5t4 4.5q5 6 23 25 t26 29.5t22.5 29t25 38.5t20.5 44q-124 72 -195 177t-71 224zM616 132q58 -4 88 -4q161 0 309 45t264 129q125 92 192 212t67 254q0 77 -23 152q129 -71 204 -178t75 -230q0 -120 -71 -224.5t-195 -176.5q10 -24 20.5 -44t25 -38.5t22.5 -29t26 -29.5t23 -25q1 -1 4 -4.5 t4.5 -5t4 -5t3.5 -5.5l2.5 -5t2 -6t0.5 -6.5t-1 -6.5q-3 -14 -13 -22t-22 -7q-50 7 -86 16q-154 40 -278 128q-90 -16 -176 -16q-271 0 -472 132z" />
+<glyph unicode="&#xf087;" d="M0 128v640q0 53 37.5 90.5t90.5 37.5h274q36 24 137 155q58 75 107 128q24 25 35.5 85.5t30.5 126.5t62 108q39 37 90 37q84 0 151 -32.5t102 -101.5t35 -186q0 -93 -48 -192h176q104 0 180 -76t76 -179q0 -89 -49 -163q9 -33 9 -69q0 -77 -38 -144q3 -21 3 -43 q0 -101 -60 -178q1 -139 -85 -219.5t-227 -80.5h-36h-93q-96 0 -189.5 22.5t-216.5 65.5q-116 40 -138 40h-288q-53 0 -90.5 37.5t-37.5 90.5zM128 192q0 -26 19 -45t45 -19t45 19t19 45t-19 45t-45 19t-45 -19t-19 -45zM384 128h32q13 0 31.5 -3t33 -6.5t38 -11t35 -11.5 t35.5 -12.5t29 -10.5q211 -73 342 -73h121q192 0 192 167q0 26 -5 56q30 16 47.5 52.5t17.5 73.5t-18 69q53 50 53 119q0 25 -10 55.5t-25 47.5q32 1 53.5 47t21.5 81q0 51 -39 89.5t-89 38.5h-352q0 58 48 159.5t48 160.5q0 98 -32 145t-128 47q-26 -26 -38 -85 t-30.5 -125.5t-59.5 -109.5q-22 -23 -77 -91q-4 -5 -23 -30t-31.5 -41t-34.5 -42.5t-40 -44t-38.5 -35.5t-40 -27t-35.5 -9h-32v-640z" />
+<glyph unicode="&#xf088;" d="M0 512v640q0 53 37.5 90.5t90.5 37.5h288q22 0 138 40q128 44 223 66t200 22h112q140 0 226.5 -79t85.5 -216v-5q60 -77 60 -178q0 -22 -3 -43q38 -67 38 -144q0 -36 -9 -69q49 -74 49 -163q0 -103 -76 -179t-180 -76h-176q48 -99 48 -192q0 -118 -35 -186 q-35 -69 -102 -101.5t-151 -32.5q-51 0 -90 37q-34 33 -54 82t-25.5 90.5t-17.5 84.5t-31 64q-48 50 -107 127q-101 131 -137 155h-274q-53 0 -90.5 37.5t-37.5 90.5zM128 1088q0 -26 19 -45t45 -19t45 19t19 45t-19 45t-45 19t-45 -19t-19 -45zM384 512h32q16 0 35.5 -9 t40 -27t38.5 -35.5t40 -44t34.5 -42.5t31.5 -41t23 -30q55 -68 77 -91q41 -43 59.5 -109.5t30.5 -125.5t38 -85q96 0 128 47t32 145q0 59 -48 160.5t-48 159.5h352q50 0 89 38.5t39 89.5q0 35 -21.5 81t-53.5 47q15 17 25 47.5t10 55.5q0 69 -53 119q18 32 18 69t-17.5 73.5 t-47.5 52.5q5 30 5 56q0 85 -49 126t-136 41h-128q-131 0 -342 -73q-5 -2 -29 -10.5t-35.5 -12.5t-35 -11.5t-38 -11t-33 -6.5t-31.5 -3h-32v-640z" />
+<glyph unicode="&#xf089;" horiz-adv-x="896" d="M0 889q0 37 56 46l502 73l225 455q19 41 49 41v-1339l-449 -236q-22 -12 -40 -12q-21 0 -31.5 14.5t-10.5 35.5q0 6 2 20l86 500l-364 354q-25 27 -25 48z" />
+<glyph unicode="&#xf08a;" horiz-adv-x="1792" d="M0 940q0 220 127 344t351 124q62 0 126.5 -21.5t120 -58t95.5 -68.5t76 -68q36 36 76 68t95.5 68.5t120 58t126.5 21.5q224 0 351 -124t127 -344q0 -221 -229 -450l-623 -600q-18 -18 -44 -18t-44 18l-624 602q-10 8 -27.5 26t-55.5 65.5t-68 97.5t-53.5 121t-23.5 138z M128 940q0 -168 187 -355l581 -560l580 559q188 188 188 356q0 81 -21.5 143t-55 98.5t-81.5 59.5t-94 31t-98 8t-112 -25.5t-110.5 -64t-86.5 -72t-60 -61.5q-18 -22 -49 -22t-49 22q-24 28 -60 61.5t-86.5 72t-110.5 64t-112 25.5t-98 -8t-94 -31t-81.5 -59.5t-55 -98.5 t-21.5 -143z" />
+<glyph unicode="&#xf08b;" horiz-adv-x="1664" d="M0 288v704q0 119 84.5 203.5t203.5 84.5h320q13 0 22.5 -9.5t9.5 -22.5q0 -4 1 -20t0.5 -26.5t-3 -23.5t-10 -19.5t-20.5 -6.5h-320q-66 0 -113 -47t-47 -113v-704q0 -66 47 -113t113 -47h288h11h13t11.5 -1t11.5 -3t8 -5.5t7 -9t2 -13.5q0 -4 1 -20t0.5 -26.5t-3 -23.5 t-10 -19.5t-20.5 -6.5h-320q-119 0 -203.5 84.5t-84.5 203.5zM384 448v384q0 26 19 45t45 19h448v288q0 26 19 45t45 19t45 -19l544 -544q19 -19 19 -45t-19 -45l-544 -544q-19 -19 -45 -19t-45 19t-19 45v288h-448q-26 0 -45 19t-19 45z" />
+<glyph unicode="&#xf08c;" d="M0 160v960q0 119 84.5 203.5t203.5 84.5h960q119 0 203.5 -84.5t84.5 -203.5v-960q0 -119 -84.5 -203.5t-203.5 -84.5h-960q-119 0 -203.5 84.5t-84.5 203.5zM223 1030q0 -51 35.5 -85.5t92.5 -34.5h1q59 0 95 34.5t36 85.5q-1 52 -36 86t-93 34t-94.5 -34t-36.5 -86z M237 122h231v694h-231v-694zM595 122h231v388q0 38 7 56q15 35 45 59.5t74 24.5q116 0 116 -157v-371h231v398q0 154 -73 233t-193 79q-136 0 -209 -117h2v101h-231q3 -66 0 -694z" />
+<glyph unicode="&#xf08d;" horiz-adv-x="1152" d="M0 320q0 123 78.5 221.5t177.5 98.5v512q-52 0 -90 38t-38 90t38 90t90 38h640q52 0 90 -38t38 -90t-38 -90t-90 -38v-512q99 0 177.5 -98.5t78.5 -221.5q0 -26 -19 -45t-45 -19h-429l-51 -483q-2 -12 -10.5 -20.5t-20.5 -8.5h-1q-27 0 -32 27l-76 485h-404q-26 0 -45 19 t-19 45zM416 672q0 -14 9 -23t23 -9t23 9t9 23v448q0 14 -9 23t-23 9t-23 -9t-9 -23v-448z" />
+<glyph unicode="&#xf08e;" horiz-adv-x="1792" d="M0 288v832q0 119 84.5 203.5t203.5 84.5h704q14 0 23 -9t9 -23v-64q0 -14 -9 -23t-23 -9h-704q-66 0 -113 -47t-47 -113v-832q0 -66 47 -113t113 -47h832q66 0 113 47t47 113v320q0 14 9 23t23 9h64q14 0 23 -9t9 -23v-320q0 -119 -84.5 -203.5t-203.5 -84.5h-832 q-119 0 -203.5 84.5t-84.5 203.5zM685 576q0 13 10 23l652 652l-176 176q-19 19 -19 45t19 45t45 19h512q26 0 45 -19t19 -45v-512q0 -26 -19 -45t-45 -19t-45 19l-176 176l-652 -652q-10 -10 -23 -10t-23 10l-114 114q-10 10 -10 23z" />
+<glyph unicode="&#xf090;" d="M0 448v384q0 26 19 45t45 19h448v288q0 26 19 45t45 19t45 -19l544 -544q19 -19 19 -45t-19 -45l-544 -544q-19 -19 -45 -19t-45 19t-19 45v288h-448q-26 0 -45 19t-19 45zM894.5 78.5q0.5 10.5 3 23.5t10 19.5t20.5 6.5h320q66 0 113 47t47 113v704q0 66 -47 113 t-113 47h-288h-11h-13t-11.5 1t-11.5 3t-8 5.5t-7 9t-2 13.5q0 4 -1 20t-0.5 26.5t3 23.5t10 19.5t20.5 6.5h320q119 0 203.5 -84.5t84.5 -203.5v-704q0 -119 -84.5 -203.5t-203.5 -84.5h-320q-13 0 -22.5 9.5t-9.5 22.5q0 4 -1 20t-0.5 26.5z" />
+<glyph unicode="&#xf091;" horiz-adv-x="1664" d="M0 928v128q0 40 28 68t68 28h288v96q0 66 47 113t113 47h576q66 0 113 -47t47 -113v-96h288q40 0 68 -28t28 -68v-128q0 -71 -41.5 -143t-112 -130t-173 -97.5t-215.5 -44.5q-42 -54 -95 -95q-38 -34 -52.5 -72.5t-14.5 -89.5q0 -54 30.5 -91t97.5 -37q75 0 133.5 -45.5 t58.5 -114.5v-64q0 -14 -9 -23t-23 -9h-832q-14 0 -23 9t-9 23v64q0 69 58.5 114.5t133.5 45.5q67 0 97.5 37t30.5 91q0 51 -14.5 89.5t-52.5 72.5q-53 41 -95 95q-113 5 -215.5 44.5t-173 97.5t-112 130t-41.5 143zM128 928q0 -78 94.5 -162t235.5 -113q-74 162 -74 371 h-256v-96zM1206 653q141 29 235.5 113t94.5 162v96h-256q0 -209 -74 -371z" />
+<glyph unicode="&#xf092;" d="M0 160v960q0 119 84.5 203.5t203.5 84.5h960q119 0 203.5 -84.5t84.5 -203.5v-960q0 -119 -84.5 -203.5t-203.5 -84.5h-224q-16 0 -24.5 1t-19.5 5t-16 14.5t-5 27.5v239q0 97 -52 142q57 6 102.5 18t94 39t81 66.5t53 105t20.5 150.5q0 121 -79 206q37 91 -8 204 q-28 9 -81 -11t-92 -44l-38 -24q-93 26 -192 26t-192 -26q-16 11 -42.5 27t-83.5 38.5t-86 13.5q-44 -113 -7 -204q-79 -85 -79 -206q0 -85 20.5 -150t52.5 -105t80.5 -67t94 -39t102.5 -18q-40 -36 -49 -103q-21 -10 -45 -15t-57 -5t-65.5 21.5t-55.5 62.5q-19 32 -48.5 52 t-49.5 24l-20 3q-21 0 -29 -4.5t-5 -11.5t9 -14t13 -12l7 -5q22 -10 43.5 -38t31.5 -51l10 -23q13 -38 44 -61.5t67 -30t69.5 -7t55.5 3.5l23 4q0 -38 0.5 -103t0.5 -68q0 -22 -11 -33.5t-22 -13t-33 -1.5h-224q-119 0 -203.5 84.5t-84.5 203.5zM271 315q3 5 13 2 q10 -5 7 -12q-5 -7 -13 -2q-10 5 -7 12zM304 290q6 6 16 -3q9 -11 2 -16q-6 -7 -16 3q-9 11 -2 16zM335 233q-9 13 0 18q9 7 17 -6q9 -12 0 -19q-8 -6 -17 7zM370 206q8 9 20 -3q12 -11 4 -19q-8 -9 -20 3q-13 11 -4 19zM419 168q4 11 19 7q16 -5 13 -16q-4 -12 -19 -6 q-17 4 -13 15zM481 154q0 11 16 11q17 2 17 -11q0 -11 -16 -11q-17 -2 -17 11zM540 158q-2 12 14 15q16 2 18 -9q2 -10 -14 -14t-18 8z" />
+<glyph unicode="&#xf093;" horiz-adv-x="1664" d="M0 -32v320q0 40 28 68t68 28h427q21 -56 70.5 -92t110.5 -36h256q61 0 110.5 36t70.5 92h427q40 0 68 -28t28 -68v-320q0 -40 -28 -68t-68 -28h-1472q-40 0 -68 28t-28 68zM325 936q-17 39 14 69l448 448q18 19 45 19t45 -19l448 -448q31 -30 14 -69q-17 -40 -59 -40 h-256v-448q0 -26 -19 -45t-45 -19h-256q-26 0 -45 19t-19 45v448h-256q-42 0 -59 40zM1152 64q0 -26 19 -45t45 -19t45 19t19 45t-19 45t-45 19t-45 -19t-19 -45zM1408 64q0 -26 19 -45t45 -19t45 19t19 45t-19 45t-45 19t-45 -19t-19 -45z" />
+<glyph unicode="&#xf094;" d="M0 433q0 111 18 217.5t54.5 209.5t100.5 194t150 156q78 59 232 120q194 78 316 78q60 0 175.5 -24t173.5 -24q19 0 57 5t58 5q81 0 118 -50.5t37 -134.5q0 -23 -5 -68t-5 -68q0 -10 1 -18.5t3 -17t4 -13.5t6.5 -16t6.5 -17q16 -40 25 -118.5t9 -136.5q0 -165 -70 -327.5 t-196 -288t-281 -180.5q-124 -44 -326 -44q-57 0 -170 14.5t-169 14.5q-24 0 -72.5 -14.5t-73.5 -14.5q-73 0 -123.5 55.5t-50.5 128.5q0 24 11 68t11 67q0 40 -12.5 120.5t-12.5 121.5zM128 434q0 -40 12.5 -120t12.5 -121q0 -23 -11 -66.5t-11 -65.5t12 -36.5t34 -14.5 q24 0 72.5 11t73.5 11q57 0 169.5 -15.5t169.5 -15.5q181 0 284 36q129 45 235.5 152.5t166 245.5t59.5 275q0 44 -7 113.5t-18 96.5q-12 30 -17 44t-9 36.5t-4 48.5q0 23 5 68.5t5 67.5q0 37 -10 55q-4 1 -13 1q-19 0 -58 -4.5t-59 -4.5q-60 0 -176 24t-175 24 q-43 0 -94.5 -11.5t-85 -23.5t-89.5 -34q-137 -54 -202 -103q-96 -73 -159.5 -189.5t-88 -236t-24.5 -248.5z" />
+<glyph unicode="&#xf095;" horiz-adv-x="1408" d="M0 1069q0 92 51 186q56 101 106 122q25 11 68.5 21t70.5 10q14 0 21 -3q18 -6 53 -76q11 -19 30 -54t35 -63.5t31 -53.5q3 -4 17.5 -25t21.5 -35.5t7 -28.5q0 -20 -28.5 -50t-62 -55t-62 -53t-28.5 -46q0 -9 5 -22.5t8.5 -20.5t14 -24t11.5 -19q76 -137 174 -235 t235 -174q2 -1 19 -11.5t24 -14t20.5 -8.5t22.5 -5q18 0 46 28.5t53 62t55 62t50 28.5q14 0 28.5 -7t35.5 -21.5t25 -17.5q25 -15 53.5 -31t63.5 -35t54 -30q70 -35 76 -53q3 -7 3 -21q0 -27 -10 -70.5t-21 -68.5q-21 -50 -122 -106q-94 -51 -186 -51q-27 0 -52.5 3.5 t-57.5 12.5t-47.5 14.5t-55.5 20.5t-49 18q-98 35 -175 83q-128 79 -264.5 215.5t-215.5 264.5q-48 77 -83 175q-3 9 -18 49t-20.5 55.5t-14.5 47.5t-12.5 57.5t-3.5 52.5z" />
+<glyph unicode="&#xf096;" horiz-adv-x="1408" d="M0 288v832q0 119 84.5 203.5t203.5 84.5h832q119 0 203.5 -84.5t84.5 -203.5v-832q0 -119 -84.5 -203.5t-203.5 -84.5h-832q-119 0 -203.5 84.5t-84.5 203.5zM128 288q0 -66 47 -113t113 -47h832q66 0 113 47t47 113v832q0 66 -47 113t-113 47h-832q-66 0 -113 -47 t-47 -113v-832z" />
+<glyph unicode="&#xf097;" horiz-adv-x="1280" d="M0 7v1289q0 34 19.5 62t52.5 41q21 9 44 9h1048q23 0 44 -9q33 -13 52.5 -41t19.5 -62v-1289q0 -34 -19.5 -62t-52.5 -41q-19 -8 -44 -8q-48 0 -83 32l-441 424l-441 -424q-36 -33 -83 -33q-23 0 -44 9q-33 13 -52.5 41t-19.5 62zM128 38l423 406l89 85l89 -85l423 -406 v1242h-1024v-1242z" />
+<glyph unicode="&#xf098;" d="M0 160v960q0 119 84.5 203.5t203.5 84.5h960q119 0 203.5 -84.5t84.5 -203.5v-960q0 -119 -84.5 -203.5t-203.5 -84.5h-960q-119 0 -203.5 84.5t-84.5 203.5zM256 905q0 -16 2.5 -34t5 -30.5t9 -33t10 -29.5t12.5 -33t11 -30q60 -164 216.5 -320.5t320.5 -216.5 q6 -2 30 -11t33 -12.5t29.5 -10t33 -9t30.5 -5t34 -2.5q57 0 130.5 34t94.5 80q22 53 22 101q0 11 -2 16q-3 8 -38.5 29.5t-88.5 49.5l-53 29q-5 3 -19 13t-25 15t-21 5q-18 0 -47 -32.5t-57 -65.5t-44 -33q-7 0 -16.5 3.5t-15.5 6.5t-17 9.5t-14 8.5q-99 55 -170.5 126.5 t-126.5 170.5q-2 3 -8.5 14t-9.5 17t-6.5 15.5t-3.5 16.5q0 13 20.5 33.5t45 38.5t45 39.5t20.5 36.5q0 10 -5 21t-15 25t-13 19q-3 6 -15 28.5t-25 45.5t-26.5 47.5t-25 40.5t-16.5 18t-16 2q-48 0 -101 -22q-46 -21 -80 -94.5t-34 -130.5z" />
+<glyph unicode="&#xf099;" horiz-adv-x="1664" d="M44 145q35 -4 78 -4q225 0 401 138q-105 2 -188 64.5t-114 159.5q33 -5 61 -5q43 0 85 11q-112 23 -185.5 111.5t-73.5 205.5v4q68 -38 146 -41q-66 44 -105 115t-39 154q0 88 44 163q121 -149 294.5 -238.5t371.5 -99.5q-8 38 -8 74q0 134 94.5 228.5t228.5 94.5 q140 0 236 -102q109 21 205 78q-37 -115 -142 -178q93 10 186 50q-67 -98 -162 -167q1 -14 1 -42q0 -130 -38 -259.5t-115.5 -248.5t-184.5 -210.5t-258 -146t-323 -54.5q-271 0 -496 145z" />
+<glyph unicode="&#xf09a;" horiz-adv-x="1024" d="M95 631v296h255v218q0 186 104 288.5t277 102.5q147 0 228 -12v-264h-157q-86 0 -116 -36t-30 -108v-189h293l-39 -296h-254v-759h-306v759h-255z" />
+<glyph unicode="&#xf09b;" d="M0 640q0 209 103 385.5t279.5 279.5t385.5 103t385.5 -103t279.5 -279.5t103 -385.5q0 -251 -146.5 -451.5t-378.5 -277.5q-27 -5 -39.5 7t-12.5 30v211q0 97 -52 142q57 6 102.5 18t94 39t81 66.5t53 105t20.5 150.5q0 121 -79 206q37 91 -8 204q-28 9 -81 -11t-92 -44 l-38 -24q-93 26 -192 26t-192 -26q-16 11 -42.5 27t-83.5 38.5t-86 13.5q-44 -113 -7 -204q-79 -85 -79 -206q0 -85 20.5 -150t52.5 -105t80.5 -67t94 -39t102.5 -18q-40 -36 -49 -103q-21 -10 -45 -15t-57 -5t-65.5 21.5t-55.5 62.5q-19 32 -48.5 52t-49.5 24l-20 3 q-21 0 -29 -4.5t-5 -11.5t9 -14t13 -12l7 -5q22 -10 43.5 -38t31.5 -51l10 -23q13 -38 44 -61.5t67 -30t69.5 -7t55.5 3.5l23 4q0 -38 0.5 -89t0.5 -54q0 -18 -13 -30t-40 -7q-232 77 -378.5 277.5t-146.5 451.5z" />
+<glyph unicode="&#xf09c;" horiz-adv-x="1664" d="M0 96v576q0 40 28 68t68 28h672v192q0 185 131.5 316.5t316.5 131.5t316.5 -131.5t131.5 -316.5v-256q0 -26 -19 -45t-45 -19h-64q-26 0 -45 19t-19 45v256q0 106 -75 181t-181 75t-181 -75t-75 -181v-192h96q40 0 68 -28t28 -68v-576q0 -40 -28 -68t-68 -28h-960 q-40 0 -68 28t-28 68z" />
+<glyph unicode="&#xf09d;" horiz-adv-x="1920" d="M0 32v1216q0 66 47 113t113 47h1600q66 0 113 -47t47 -113v-1216q0 -66 -47 -113t-113 -47h-1600q-66 0 -113 47t-47 113zM128 32q0 -13 9.5 -22.5t22.5 -9.5h1600q13 0 22.5 9.5t9.5 22.5v608h-1664v-608zM128 1024h1664v224q0 13 -9.5 22.5t-22.5 9.5h-1600 q-13 0 -22.5 -9.5t-9.5 -22.5v-224zM256 128v128h256v-128h-256zM640 128v128h384v-128h-384z" />
+<glyph unicode="&#xf09e;" horiz-adv-x="1408" d="M0 192q0 80 56 136t136 56t136 -56t56 -136t-56 -136t-136 -56t-136 56t-56 136zM0 697v135q0 29 21 47q17 17 43 17h5q160 -13 306 -80.5t259 -181.5q114 -113 181.5 -259t80.5 -306q2 -28 -17 -48q-18 -21 -47 -21h-135q-25 0 -43 16.5t-20 41.5q-22 229 -184.5 391.5 t-391.5 184.5q-25 2 -41.5 20t-16.5 43zM0 1201v143q0 28 20 46q18 18 44 18h3q262 -13 501.5 -120t425.5 -294q187 -186 294 -425.5t120 -501.5q2 -27 -18 -47q-18 -20 -46 -20h-143q-26 0 -44.5 17.5t-19.5 42.5q-12 215 -101 408.5t-231.5 336t-336 231.5t-408.5 102 q-25 1 -42.5 19.5t-17.5 43.5z" />
+<glyph unicode="&#xf0a0;" d="M0 160v320q0 25 16 75l197 606q17 53 63 86t101 33h782q55 0 101 -33t63 -86l197 -606q16 -50 16 -75v-320q0 -66 -47 -113t-113 -47h-1216q-66 0 -113 47t-47 113zM128 160q0 -13 9.5 -22.5t22.5 -9.5h1216q13 0 22.5 9.5t9.5 22.5v320q0 13 -9.5 22.5t-22.5 9.5h-1216 q-13 0 -22.5 -9.5t-9.5 -22.5v-320zM178 640h1180l-157 482q-4 13 -16 21.5t-26 8.5h-782q-14 0 -26 -8.5t-16 -21.5zM880 320q0 33 23.5 56.5t56.5 23.5t56.5 -23.5t23.5 -56.5t-23.5 -56.5t-56.5 -23.5t-56.5 23.5t-23.5 56.5zM1136 320q0 33 23.5 56.5t56.5 23.5 t56.5 -23.5t23.5 -56.5t-23.5 -56.5t-56.5 -23.5t-56.5 23.5t-23.5 56.5z" />
+<glyph unicode="&#xf0a1;" horiz-adv-x="1792" d="M0 672v192q0 66 47 113t113 47h480q435 0 896 384q52 0 90 -38t38 -90v-384q53 0 90.5 -37.5t37.5 -90.5t-37.5 -90.5t-90.5 -37.5v-384q0 -52 -38 -90t-90 -38q-417 347 -812 380q-58 -19 -91 -66t-31 -100.5t40 -92.5q-20 -33 -23 -65.5t6 -58t33.5 -55t48 -50 t61.5 -50.5q-29 -58 -111.5 -83t-168.5 -11.5t-132 55.5q-7 23 -29.5 87.5t-32 94.5t-23 89t-15 101t3.5 98.5t22 110.5h-122q-66 0 -113 47t-47 113zM768 633q377 -42 768 -341v954q-394 -302 -768 -343v-270z" />
+<glyph unicode="&#xf0a2;" horiz-adv-x="1664" d="M0 128q190 161 287 397.5t97 498.5q0 165 96 262t264 117q-8 18 -8 37q0 40 28 68t68 28t68 -28t28 -68q0 -19 -8 -37q168 -20 264 -117t96 -262q0 -262 97 -498.5t287 -397.5q0 -52 -38 -90t-90 -38h-448q0 -106 -75 -181t-181 -75t-181 75t-75 181h-448q-52 0 -90 38 t-38 90zM183 128h1298q-164 181 -246.5 411.5t-82.5 484.5q0 256 -320 256t-320 -256q0 -254 -82.5 -484.5t-246.5 -411.5zM656 0q0 -73 51.5 -124.5t124.5 -51.5q16 0 16 16t-16 16q-59 0 -101.5 42.5t-42.5 101.5q0 16 -16 16t-16 -16z" />
+<glyph unicode="&#xf0a3;" d="M2 435q-10 42 20 70l138 135l-138 135q-30 28 -20 70q12 41 52 51l188 48l-53 186q-12 41 19 70q29 31 70 19l186 -53l48 188q10 41 51 51q41 12 70 -19l135 -139l135 139q29 30 70 19q41 -10 51 -51l48 -188l186 53q41 12 70 -19q31 -29 19 -70l-53 -186l188 -48 q40 -10 52 -51q10 -42 -20 -70l-138 -135l138 -135q30 -28 20 -70q-12 -41 -52 -51l-188 -48l53 -186q12 -41 -19 -70q-29 -31 -70 -19l-186 53l-48 -188q-10 -40 -51 -52q-12 -2 -19 -2q-31 0 -51 22l-135 138l-135 -138q-28 -30 -70 -20q-41 11 -51 52l-48 188l-186 -53 q-41 -12 -70 19q-31 29 -19 70l53 186l-188 48q-40 10 -52 51z" />
+<glyph unicode="&#xf0a4;" horiz-adv-x="1792" d="M0 128v640q0 53 37.5 90.5t90.5 37.5h288q10 0 21.5 4.5t23.5 14t22.5 18t24 22.5t20.5 21.5t19 21.5t14 17q65 74 100 129q13 21 33 62t37 72t40.5 63t55 49.5t69.5 17.5q125 0 206.5 -67t81.5 -189q0 -68 -22 -128h374q104 0 180 -76t76 -179q0 -105 -75.5 -181 t-180.5 -76h-169q-4 -62 -37 -119q3 -21 3 -43q0 -101 -60 -178q1 -139 -85 -219.5t-227 -80.5q-133 0 -322 69q-164 59 -223 59h-288q-53 0 -90.5 37.5t-37.5 90.5zM128 192q0 -26 19 -45t45 -19t45 19t19 45t-19 45t-45 19t-45 -19t-19 -45zM384 128h32q72 0 167 -32 t193.5 -64t179.5 -32q189 0 189 167q0 26 -5 56q30 16 47.5 52.5t17.5 73.5t-18 69q53 50 53 119q0 25 -10 55.5t-25 47.5h331q52 0 90 38t38 90q0 51 -39 89.5t-89 38.5h-576q0 20 15 48.5t33 55t33 68t15 84.5q0 67 -44.5 97.5t-115.5 30.5q-24 0 -90 -139 q-24 -44 -37 -65q-40 -64 -112 -145q-71 -81 -101 -106q-69 -57 -140 -57h-32v-640z" />
+<glyph unicode="&#xf0a5;" horiz-adv-x="1792" d="M0 769q0 103 76 179t180 76h374q-22 60 -22 128q0 122 81.5 189t206.5 67q38 0 69.5 -17.5t55 -49.5t40.5 -63t37 -72t33 -62q35 -55 100 -129q2 -3 14 -17t19 -21.5t20.5 -21.5t24 -22.5t22.5 -18t23.5 -14t21.5 -4.5h288q53 0 90.5 -37.5t37.5 -90.5v-640 q0 -53 -37.5 -90.5t-90.5 -37.5h-288q-59 0 -223 -59q-190 -69 -317 -69q-142 0 -230 77.5t-87 217.5l1 5q-61 76 -61 178q0 22 3 43q-33 57 -37 119h-169q-105 0 -180.5 76t-75.5 181zM128 768q0 -52 38 -90t90 -38h331q-15 -17 -25 -47.5t-10 -55.5q0 -69 53 -119 q-18 -32 -18 -69t17.5 -73.5t47.5 -52.5q-4 -24 -4 -56q0 -85 48.5 -126t135.5 -41q84 0 183 32t194 64t167 32h32v640h-32q-35 0 -67.5 12t-62.5 37t-50 46t-49 54q-2 3 -3.5 4.5t-4 4.5t-4.5 5q-72 81 -112 145q-14 22 -38 68q-1 3 -10.5 22.5t-18.5 36t-20 35.5 t-21.5 30.5t-18.5 11.5q-71 0 -115.5 -30.5t-44.5 -97.5q0 -43 15 -84.5t33 -68t33 -55t15 -48.5h-576q-50 0 -89 -38.5t-39 -89.5zM1536 192q0 -26 19 -45t45 -19t45 19t19 45t-19 45t-45 19t-45 -19t-19 -45z" />
+<glyph unicode="&#xf0a6;" d="M0 640q0 125 67 206.5t189 81.5q68 0 128 -22v374q0 104 76 180t179 76q105 0 181 -75.5t76 -180.5v-169q62 -4 119 -37q21 3 43 3q101 0 178 -60q139 1 219.5 -85t80.5 -227q0 -133 -69 -322q-59 -164 -59 -223v-288q0 -53 -37.5 -90.5t-90.5 -37.5h-640 q-53 0 -90.5 37.5t-37.5 90.5v288q0 10 -4.5 21.5t-14 23.5t-18 22.5t-22.5 24t-21.5 20.5t-21.5 19t-17 14q-74 65 -129 100q-21 13 -62 33t-72 37t-63 40.5t-49.5 55t-17.5 69.5zM128 640q0 -24 139 -90q44 -24 65 -37q64 -40 145 -112q81 -71 106 -101q57 -69 57 -140 v-32h640v32q0 72 32 167t64 193.5t32 179.5q0 189 -167 189q-26 0 -56 -5q-16 30 -52.5 47.5t-73.5 17.5t-69 -18q-50 53 -119 53q-25 0 -55.5 -10t-47.5 -25v331q0 52 -38 90t-90 38q-51 0 -89.5 -39t-38.5 -89v-576q-20 0 -48.5 15t-55 33t-68 33t-84.5 15 q-67 0 -97.5 -44.5t-30.5 -115.5zM1152 -64q0 -26 19 -45t45 -19t45 19t19 45t-19 45t-45 19t-45 -19t-19 -45z" />
+<glyph unicode="&#xf0a7;" d="M0 640q0 38 17.5 69.5t49.5 55t63 40.5t72 37t62 33q55 35 129 100q3 2 17 14t21.5 19t21.5 20.5t22.5 24t18 22.5t14 23.5t4.5 21.5v288q0 53 37.5 90.5t90.5 37.5h640q53 0 90.5 -37.5t37.5 -90.5v-288q0 -59 59 -223q69 -190 69 -317q0 -142 -77.5 -230t-217.5 -87 l-5 1q-76 -61 -178 -61q-22 0 -43 3q-54 -30 -119 -37v-169q0 -105 -76 -180.5t-181 -75.5q-103 0 -179 76t-76 180v374q-54 -22 -128 -22q-121 0 -188.5 81.5t-67.5 206.5zM128 640q0 -71 30.5 -115.5t97.5 -44.5q43 0 84.5 15t68 33t55 33t48.5 15v-576q0 -50 38.5 -89 t89.5 -39q52 0 90 38t38 90v331q46 -35 103 -35q69 0 119 53q32 -18 69 -18t73.5 17.5t52.5 47.5q24 -4 56 -4q85 0 126 48.5t41 135.5q0 84 -32 183t-64 194t-32 167v32h-640v-32q0 -35 -12 -67.5t-37 -62.5t-46 -50t-54 -49q-9 -8 -14 -12q-81 -72 -145 -112 q-22 -14 -68 -38q-3 -1 -22.5 -10.5t-36 -18.5t-35.5 -20t-30.5 -21.5t-11.5 -18.5zM1152 1344q0 -26 19 -45t45 -19t45 19t19 45t-19 45t-45 19t-45 -19t-19 -45z" />
+<glyph unicode="&#xf0a8;" d="M0 640q0 209 103 385.5t279.5 279.5t385.5 103t385.5 -103t279.5 -279.5t103 -385.5t-103 -385.5t-279.5 -279.5t-385.5 -103t-385.5 103t-279.5 279.5t-103 385.5zM251 640q0 -27 18 -45l91 -91l362 -362q18 -18 45 -18t45 18l91 91q18 18 18 45t-18 45l-189 189h502 q26 0 45 19t19 45v128q0 26 -19 45t-45 19h-502l189 189q19 19 19 45t-19 45l-91 91q-18 18 -45 18t-45 -18l-362 -362l-91 -91q-18 -18 -18 -45z" />
+<glyph unicode="&#xf0a9;" d="M0 640q0 209 103 385.5t279.5 279.5t385.5 103t385.5 -103t279.5 -279.5t103 -385.5t-103 -385.5t-279.5 -279.5t-385.5 -103t-385.5 103t-279.5 279.5t-103 385.5zM256 576q0 -26 19 -45t45 -19h502l-189 -189q-19 -19 -19 -45t19 -45l91 -91q18 -18 45 -18t45 18 l362 362l91 91q18 18 18 45t-18 45l-91 91l-362 362q-18 18 -45 18t-45 -18l-91 -91q-18 -18 -18 -45t18 -45l189 -189h-502q-26 0 -45 -19t-19 -45v-128z" />
+<glyph unicode="&#xf0aa;" d="M0 640q0 209 103 385.5t279.5 279.5t385.5 103t385.5 -103t279.5 -279.5t103 -385.5t-103 -385.5t-279.5 -279.5t-385.5 -103t-385.5 103t-279.5 279.5t-103 385.5zM252 641q0 -27 18 -45l91 -91q18 -18 45 -18t45 18l189 189v-502q0 -26 19 -45t45 -19h128q26 0 45 19 t19 45v502l189 -189q19 -19 45 -19t45 19l91 91q18 18 18 45t-18 45l-362 362l-91 91q-18 18 -45 18t-45 -18l-91 -91l-362 -362q-18 -18 -18 -45z" />
+<glyph unicode="&#xf0ab;" d="M0 640q0 209 103 385.5t279.5 279.5t385.5 103t385.5 -103t279.5 -279.5t103 -385.5t-103 -385.5t-279.5 -279.5t-385.5 -103t-385.5 103t-279.5 279.5t-103 385.5zM252 639q0 -27 18 -45l362 -362l91 -91q18 -18 45 -18t45 18l91 91l362 362q18 18 18 45t-18 45l-91 91 q-18 18 -45 18t-45 -18l-189 -189v502q0 26 -19 45t-45 19h-128q-26 0 -45 -19t-19 -45v-502l-189 189q-19 19 -45 19t-45 -19l-91 -91q-18 -18 -18 -45z" />
+<glyph unicode="&#xf0ac;" d="M0 640q0 209 103 385.5t279.5 279.5t385.5 103t385.5 -103t279.5 -279.5t103 -385.5t-103 -385.5t-279.5 -279.5t-385.5 -103t-385.5 103t-279.5 279.5t-103 385.5zM226 979q7 -7 12 -8q4 -1 5 -9t2.5 -11t11.5 3q9 -8 3 -19q1 1 44 -27q19 -17 21 -21q3 -11 -10 -18 q-1 2 -9 9t-9 4q-3 -5 0.5 -18.5t10.5 -12.5q-7 0 -9.5 -16t-2.5 -35.5t-1 -23.5l2 -1q-3 -12 5.5 -34.5t21.5 -19.5q-13 -3 20 -43q6 -8 8 -9q3 -2 12 -7.5t15 -10t10 -10.5q4 -5 10 -22.5t14 -23.5q-2 -6 9.5 -20t10.5 -23q-1 0 -2.5 -1t-2.5 -1q3 -7 15.5 -14t15.5 -13 q1 -3 2 -10t3 -11t8 -2q2 20 -24 62q-15 25 -17 29q-3 5 -5.5 15.5t-4.5 14.5q2 0 6 -1.5t8.5 -3.5t7.5 -4t2 -3q-3 -7 2 -17.5t12 -18.5t17 -19t12 -13q6 -6 14 -19.5t0 -13.5q9 0 20 -10t17 -20q5 -8 8 -26t5 -24q2 -7 8.5 -13.5t12.5 -9.5l16 -8t13 -7q5 -2 18.5 -10.5 t21.5 -11.5q10 -4 16 -4t14.5 2.5t13.5 3.5q15 2 29 -15t21 -21q36 -19 55 -11q-2 -1 0.5 -7.5t8 -15.5t9 -14.5t5.5 -8.5q5 -6 18 -15t18 -15q6 4 7 9q-3 -8 7 -20t18 -10q14 3 14 32q-31 -15 -49 18q0 1 -2.5 5.5t-4 8.5t-2.5 8.5t0 7.5t5 3q9 0 10 3.5t-2 12.5t-4 13 q-1 8 -11 20t-12 15q-5 -9 -16 -8t-16 9q0 -1 -1.5 -5.5t-1.5 -6.5q-13 0 -15 1q1 3 2.5 17.5t3.5 22.5q1 4 5.5 12t7.5 14.5t4 12.5t-4.5 9.5t-17.5 2.5q-19 -1 -26 -20q-1 -3 -3 -10.5t-5 -11.5t-9 -7q-7 -3 -24 -2t-24 5q-13 8 -22.5 29t-9.5 37q0 10 2.5 26.5t3 25 t-5.5 24.5q3 2 9 9.5t10 10.5q2 1 4.5 1.5t4.5 0t4 1.5t3 6q-1 1 -4 3q-3 3 -4 3q7 -3 28.5 1.5t27.5 -1.5q15 -11 22 2q0 1 -2.5 9.5t-0.5 13.5q5 -27 29 -9q3 -3 15.5 -5t17.5 -5q3 -2 7 -5.5t5.5 -4.5t5 0.5t8.5 6.5q10 -14 12 -24q11 -40 19 -44q7 -3 11 -2t4.5 9.5 t0 14t-1.5 12.5l-1 8v18l-1 8q-15 3 -18.5 12t1.5 18.5t15 18.5q1 1 8 3.5t15.5 6.5t12.5 8q21 19 15 35q7 0 11 9q-1 0 -5 3t-7.5 5t-4.5 2q9 5 2 16q5 3 7.5 11t7.5 10q9 -12 21 -2q7 8 1 16q5 7 20.5 10.5t18.5 9.5q7 -2 8 2t1 12t3 12q4 5 15 9t13 5l17 11q3 4 0 4 q18 -2 31 11q10 11 -6 20q3 6 -3 9.5t-15 5.5q3 1 11.5 0.5t10.5 1.5q15 10 -7 16q-17 5 -43 -12q-2 -1 -9.5 -9.5t-13.5 -9.5q2 0 4.5 5t5 11t3.5 7q6 7 22 15q14 6 52 12q34 8 51 -11q-2 2 9.5 13t14.5 12q3 2 15 4.5t15 7.5l2 22q-12 -1 -17.5 7t-6.5 21q0 -2 -6 -8 q0 7 -4.5 8t-11.5 -1t-9 -1q-10 3 -15 7.5t-8 16.5t-4 15q-2 5 -9.5 10.5t-9.5 10.5q-1 2 -2.5 5.5t-3 6.5t-4 5.5t-5.5 2.5t-7 -5t-7.5 -10t-4.5 -5q-3 2 -6 1.5t-4.5 -1t-4.5 -3t-5 -3.5q-3 -2 -8.5 -3t-8.5 -2q15 5 -1 11q-10 4 -16 3q9 4 7.5 12t-8.5 14h5 q-1 4 -8.5 8.5t-17.5 8.5t-13 6q-8 5 -34 9.5t-33 0.5q-5 -6 -4.5 -10.5t4 -14t3.5 -12.5q1 -6 -5.5 -13t-6.5 -12q0 -7 14 -15.5t10 -21.5q-3 -8 -16 -16t-16 -12q-5 -8 -1.5 -18.5t10.5 -16.5q2 -2 1.5 -4t-3.5 -4.5t-5.5 -4t-6.5 -3.5l-3 -2q-11 -5 -20.5 6t-13.5 26 q-7 25 -16 30q-23 8 -29 -1q-5 13 -41 26q-25 9 -58 4q6 1 0 15q-7 15 -19 12q3 6 4 17.5t1 13.5q3 13 12 23q1 1 7 8.5t9.5 13.5t0.5 6q35 -4 50 11q5 5 11.5 17t10.5 17q9 6 14 5.5t14.5 -5.5t14.5 -5q14 -1 15.5 11t-7.5 20q12 -1 3 17q-5 7 -8 9q-12 4 -27 -5 q-8 -4 2 -8q-1 1 -9.5 -10.5t-16.5 -17.5t-16 5q-1 1 -5.5 13.5t-9.5 13.5q-8 0 -16 -15q3 8 -11 15t-24 8q19 12 -8 27q-7 4 -20.5 5t-19.5 -4q-5 -7 -5.5 -11.5t5 -8t10.5 -5.5t11.5 -4t8.5 -3q14 -10 8 -14q-2 -1 -8.5 -3.5t-11.5 -4.5t-6 -4q-3 -4 0 -14t-2 -14 q-5 5 -9 17.5t-7 16.5q7 -9 -25 -6l-10 1q-4 0 -16 -2t-20.5 -1t-13.5 8q-4 8 0 20q1 4 4 2q-4 3 -11 9.5t-10 8.5q-46 -15 -94 -41q6 -1 12 1q5 2 13 6.5t10 5.5q34 14 42 7l5 5q14 -16 20 -25q-7 4 -30 1q-20 -6 -22 -12q7 -12 5 -18q-4 3 -11.5 10t-14.5 11t-15 5 q-16 0 -22 -1q-146 -80 -235 -222zM877 26q0 -6 2 -16q206 36 351 189q-3 3 -12.5 4.5t-12.5 3.5q-18 7 -24 8q1 7 -2.5 13t-8 9t-12.5 8t-11 7q-2 2 -7 6t-7 5.5t-7.5 4.5t-8.5 2t-10 -1l-3 -1q-3 -1 -5.5 -2.5t-5.5 -3t-4 -3t0 -2.5q-21 17 -36 22q-5 1 -11 5.5t-10.5 7 t-10 1.5t-11.5 -7q-5 -5 -6 -15t-2 -13q-7 5 0 17.5t2 18.5q-3 6 -10.5 4.5t-12 -4.5t-11.5 -8.5t-9 -6.5t-8.5 -5.5t-8.5 -7.5q-3 -4 -6 -12t-5 -11q-2 4 -11.5 6.5t-9.5 5.5q2 -10 4 -35t5 -38q7 -31 -12 -48q-27 -25 -29 -40q-4 -22 12 -26q0 -7 -8 -20.5t-7 -21.5z" />
+<glyph unicode="&#xf0ad;" horiz-adv-x="1664" d="M21 0q0 53 38 91l681 681q39 -98 114.5 -173.5t173.5 -114.5l-682 -682q-37 -37 -90 -37q-52 0 -91 37l-106 108q-38 36 -38 90zM256 64q0 -26 19 -45t45 -19t45 19t19 45t-19 45t-45 19t-45 -19t-19 -45zM768 960q0 185 131.5 316.5t316.5 131.5q58 0 121.5 -16.5 t107.5 -46.5q16 -11 16 -28t-16 -28l-293 -169v-224l193 -107q5 3 79 48.5t135.5 81t70.5 35.5q15 0 23.5 -10t8.5 -25q0 -39 -23 -106q-47 -134 -164.5 -217.5t-258.5 -83.5q-185 0 -316.5 131.5t-131.5 316.5z" />
+<glyph unicode="&#xf0ae;" horiz-adv-x="1792" d="M0 64v256q0 26 19 45t45 19h1664q26 0 45 -19t19 -45v-256q0 -26 -19 -45t-45 -19h-1664q-26 0 -45 19t-19 45zM0 576v256q0 26 19 45t45 19h1664q26 0 45 -19t19 -45v-256q0 -26 -19 -45t-45 -19h-1664q-26 0 -45 19t-19 45zM0 1088v256q0 26 19 45t45 19h1664 q26 0 45 -19t19 -45v-256q0 -26 -19 -45t-45 -19h-1664q-26 0 -45 19t-19 45zM640 640h1024v128h-1024v-128zM1024 128h640v128h-640v-128zM1280 1152h384v128h-384v-128z" />
+<glyph unicode="&#xf0b0;" horiz-adv-x="1408" d="M5 1241q17 39 59 39h1280q42 0 59 -39q17 -41 -14 -70l-493 -493v-742q0 -42 -39 -59q-13 -5 -25 -5q-27 0 -45 19l-256 256q-19 19 -19 45v486l-493 493q-31 29 -14 70z" />
+<glyph unicode="&#xf0b1;" horiz-adv-x="1792" d="M0 160v480h672v-160q0 -26 19 -45t45 -19h320q26 0 45 19t19 45v160h672v-480q0 -66 -47 -113t-113 -47h-1472q-66 0 -113 47t-47 113zM0 736v384q0 66 47 113t113 47h352v160q0 40 28 68t68 28h576q40 0 68 -28t28 -68v-160h352q66 0 113 -47t47 -113v-384h-1792z M640 1280h512v128h-512v-128zM768 512v128h256v-128h-256z" />
+<glyph unicode="&#xf0b2;" d="M0 -64v448q0 42 40 59q39 17 69 -14l144 -144l355 355l-355 355l-144 -144q-19 -19 -45 -19q-12 0 -24 5q-40 17 -40 59v448q0 26 19 45t45 19h448q42 0 59 -40q17 -39 -14 -69l-144 -144l355 -355l355 355l-144 144q-31 30 -14 69q17 40 59 40h448q26 0 45 -19t19 -45 v-448q0 -42 -39 -59q-13 -5 -25 -5q-26 0 -45 19l-144 144l-355 -355l355 -355l144 144q29 31 70 14q39 -17 39 -59v-448q0 -26 -19 -45t-45 -19h-448q-42 0 -59 40q-17 39 14 69l144 144l-355 355l-355 -355l144 -144q31 -30 14 -69q-17 -40 -59 -40h-448q-26 0 -45 19 t-19 45z" />
+<glyph unicode="&#xf0c0;" horiz-adv-x="1920" d="M0 671q0 353 124 353q6 0 43.5 -21t97.5 -42.5t119 -21.5q67 0 133 23q-5 -37 -5 -66q0 -139 81 -256q-162 -5 -265 -128h-134q-82 0 -138 40.5t-56 118.5zM128 1280q0 106 75 181t181 75t181 -75t75 -181t-75 -181t-181 -75t-181 75t-75 181zM256 3q0 53 3.5 103.5 t14 109t26.5 108.5t43 97.5t62 81t85.5 53.5t111.5 20q10 0 43 -21.5t73 -48t107 -48t135 -21.5t135 21.5t107 48t73 48t43 21.5q61 0 111.5 -20t85.5 -53.5t62 -81t43 -97.5t26.5 -108.5t14 -109t3.5 -103.5q0 -120 -73 -189.5t-194 -69.5h-874q-121 0 -194 69.5t-73 189.5 zM576 896q0 159 112.5 271.5t271.5 112.5t271.5 -112.5t112.5 -271.5t-112.5 -271.5t-271.5 -112.5t-271.5 112.5t-112.5 271.5zM1280 1280q0 106 75 181t181 75t181 -75t75 -181t-75 -181t-181 -75t-181 75t-75 181zM1327 640q81 117 81 256q0 29 -5 66q66 -23 133 -23 q59 0 119 21.5t97.5 42.5t43.5 21q124 0 124 -353q0 -78 -56 -118.5t-138 -40.5h-134q-103 123 -265 128z" />
+<glyph unicode="&#xf0c1;" horiz-adv-x="1664" d="M16 1088q0 120 85 203l147 146q83 83 203 83q121 0 204 -85l206 -207q83 -83 83 -203q0 -123 -88 -209l88 -88q86 88 208 88q120 0 204 -84l208 -208q84 -84 84 -204t-85 -203l-147 -146q-83 -83 -203 -83q-121 0 -204 85l-206 207q-83 83 -83 203q0 123 88 209l-88 88 q-86 -88 -208 -88q-120 0 -204 84l-208 208q-84 84 -84 204zM208 1088q0 -40 28 -68l208 -208q27 -27 68 -27q42 0 72 31q-3 3 -19 18.5t-21.5 21.5t-15 19t-13 25.5t-3.5 27.5q0 40 28 68t68 28q15 0 27.5 -3.5t25.5 -13t19 -15t21.5 -21.5t18.5 -19q33 31 33 73 q0 40 -28 68l-206 207q-28 28 -68 28q-39 0 -68 -27l-147 -146q-28 -28 -28 -67zM911 383q0 -40 28 -68l206 -207q27 -27 68 -27q40 0 68 26l147 146q28 28 28 67q0 40 -28 68l-208 208q-28 28 -68 28q-42 0 -72 -32q3 -3 19 -18.5t21.5 -21.5t15 -19t13 -25.5t3.5 -27.5 q0 -40 -28 -68t-68 -28q-15 0 -27.5 3.5t-25.5 13t-19 15t-21.5 21.5t-18.5 19q-33 -31 -33 -73z" />
+<glyph unicode="&#xf0c2;" horiz-adv-x="1920" d="M0 448q0 132 71 241.5t187 163.5q-2 28 -2 43q0 212 150 362t362 150q158 0 286.5 -88t187.5 -230q70 62 166 62q106 0 181 -75t75 -181q0 -75 -41 -138q129 -30 213 -134.5t84 -239.5q0 -159 -112.5 -271.5t-271.5 -112.5h-1088q-185 0 -316.5 131.5t-131.5 316.5z" />
+<glyph unicode="&#xf0c3;" horiz-adv-x="1664" d="M115.5 -64.5q-34.5 63.5 21.5 152.5l503 793v399h-64q-26 0 -45 19t-19 45t19 45t45 19h512q26 0 45 -19t19 -45t-19 -45t-45 -19h-64v-399l503 -793q56 -89 21.5 -152.5t-140.5 -63.5h-1152q-106 0 -140.5 63.5zM476 384h712l-272 429l-20 31v37v399h-128v-399v-37 l-20 -31z" />
+<glyph unicode="&#xf0c4;" horiz-adv-x="1792" d="M1 157q7 76 56 147t131 124q132 84 278 84q83 0 151 -31q9 13 22 22l122 73l-122 73q-13 9 -22 22q-68 -31 -151 -31q-146 0 -278 84q-82 53 -131 124t-56 147q-5 59 15.5 113t63.5 93q85 79 222 79q145 0 277 -84q83 -52 132 -123t56 -148q4 -48 -10 -97q4 -1 12 -5 l110 -66l690 387q14 8 31 8q16 0 29 -7l128 -64q30 -16 35 -51q3 -36 -25 -56l-507 -398l507 -398q28 -20 25 -56q-5 -35 -35 -51l-128 -64q-13 -7 -29 -7q-17 0 -31 8l-690 387l-110 -66q-8 -4 -12 -5q14 -49 10 -97q-7 -77 -56 -147.5t-132 -123.5q-132 -84 -277 -84 q-136 0 -222 78q-90 84 -79 207zM168 176q-25 -66 21 -108q39 -36 113 -36q100 0 192 59q81 51 106 117t-21 108q-39 36 -113 36q-100 0 -192 -59q-81 -51 -106 -117zM168 976q25 -66 106 -117q92 -59 192 -59q74 0 113 36q46 42 21 108t-106 117q-92 59 -192 59 q-74 0 -113 -36q-46 -42 -21 -108zM672 448l9 -8q2 -2 7 -6q4 -4 11 -12t11 -12l26 -26l160 96l96 -32l736 576l-128 64l-768 -431v-113zM672 704l96 -58v11q0 36 33 56l14 8l-79 47l-26 -26q-3 -3 -10 -11t-12 -12q-2 -2 -4 -3.5t-3 -2.5zM896 576q0 26 19 45t45 19t45 -19 t19 -45t-19 -45t-45 -19t-45 19t-19 45zM1018 391l582 -327l128 64l-520 408l-177 -138q-2 -3 -13 -7z" />
+<glyph unicode="&#xf0c5;" horiz-adv-x="1792" d="M0 224v672q0 40 20 88t48 76l408 408q28 28 76 48t88 20h416q40 0 68 -28t28 -68v-328q68 40 128 40h416q40 0 68 -28t28 -68v-1216q0 -40 -28 -68t-68 -28h-960q-40 0 -68 28t-28 68v288h-544q-40 0 -68 28t-28 68zM128 256h512v256q0 40 20 88t48 76l316 316v416h-384 v-416q0 -40 -28 -68t-68 -28h-416v-640zM213 1024h299v299zM768 -128h896v1152h-384v-416q0 -40 -28 -68t-68 -28h-416v-640zM853 640h299v299z" />
+<glyph unicode="&#xf0c6;" horiz-adv-x="1408" d="M4 1023q0 159 110 270t269 111q158 0 273 -113l605 -606q10 -10 10 -22q0 -16 -30.5 -46.5t-46.5 -30.5q-13 0 -23 10l-606 607q-79 77 -181 77q-106 0 -179 -75t-73 -181q0 -105 76 -181l776 -777q63 -63 145 -63q64 0 106 42t42 106q0 82 -63 145l-581 581 q-26 24 -60 24q-29 0 -48 -19t-19 -48q0 -32 25 -59l410 -410q10 -10 10 -22q0 -16 -31 -47t-47 -31q-12 0 -22 10l-410 410q-63 61 -63 149q0 82 57 139t139 57q88 0 149 -63l581 -581q100 -98 100 -235q0 -117 -79 -196t-196 -79q-135 0 -235 100l-777 776 q-113 115 -113 271z" />
+<glyph unicode="&#xf0c7;" d="M0 -32v1344q0 40 28 68t68 28h928q40 0 88 -20t76 -48l280 -280q28 -28 48 -76t20 -88v-928q0 -40 -28 -68t-68 -28h-1344q-40 0 -68 28t-28 68zM128 0h128v416q0 40 28 68t68 28h832q40 0 68 -28t28 -68v-416h128v896q0 14 -10 38.5t-20 34.5l-281 281q-10 10 -34 20 t-39 10v-416q0 -40 -28 -68t-68 -28h-576q-40 0 -68 28t-28 68v416h-128v-1280zM384 0h768v384h-768v-384zM640 928q0 -13 9.5 -22.5t22.5 -9.5h192q13 0 22.5 9.5t9.5 22.5v320q0 13 -9.5 22.5t-22.5 9.5h-192q-13 0 -22.5 -9.5t-9.5 -22.5v-320z" />
+<glyph unicode="&#xf0c8;" d="M0 160v960q0 119 84.5 203.5t203.5 84.5h960q119 0 203.5 -84.5t84.5 -203.5v-960q0 -119 -84.5 -203.5t-203.5 -84.5h-960q-119 0 -203.5 84.5t-84.5 203.5z" />
+<glyph unicode="&#xf0c9;" d="M0 64v128q0 26 19 45t45 19h1408q26 0 45 -19t19 -45v-128q0 -26 -19 -45t-45 -19h-1408q-26 0 -45 19t-19 45zM0 576v128q0 26 19 45t45 19h1408q26 0 45 -19t19 -45v-128q0 -26 -19 -45t-45 -19h-1408q-26 0 -45 19t-19 45zM0 1088v128q0 26 19 45t45 19h1408 q26 0 45 -19t19 -45v-128q0 -26 -19 -45t-45 -19h-1408q-26 0 -45 19t-19 45z" />
+<glyph unicode="&#xf0ca;" horiz-adv-x="1792" d="M0 128q0 80 56 136t136 56t136 -56t56 -136t-56 -136t-136 -56t-136 56t-56 136zM0 640q0 80 56 136t136 56t136 -56t56 -136t-56 -136t-136 -56t-136 56t-56 136zM0 1152q0 80 56 136t136 56t136 -56t56 -136t-56 -136t-136 -56t-136 56t-56 136zM512 32v192 q0 13 9.5 22.5t22.5 9.5h1216q13 0 22.5 -9.5t9.5 -22.5v-192q0 -13 -9.5 -22.5t-22.5 -9.5h-1216q-13 0 -22.5 9.5t-9.5 22.5zM512 544v192q0 13 9.5 22.5t22.5 9.5h1216q13 0 22.5 -9.5t9.5 -22.5v-192q0 -13 -9.5 -22.5t-22.5 -9.5h-1216q-13 0 -22.5 9.5t-9.5 22.5z M512 1056v192q0 13 9.5 22.5t22.5 9.5h1216q13 0 22.5 -9.5t9.5 -22.5v-192q0 -13 -9.5 -22.5t-22.5 -9.5h-1216q-13 0 -22.5 9.5t-9.5 22.5z" />
+<glyph unicode="&#xf0cb;" horiz-adv-x="1792" d="M15 438q0 51 23.5 93t56.5 68t66 47.5t56.5 43.5t23.5 45q0 25 -14.5 38.5t-39.5 13.5q-46 0 -81 -58l-85 59q24 51 71.5 79.5t105.5 28.5q73 0 123 -41.5t50 -112.5q0 -50 -34 -91.5t-75 -64.5t-75.5 -50.5t-35.5 -52.5h127v60h105v-159h-362q-6 36 -6 54zM19 -190 l57 88q49 -45 106 -45q29 0 50.5 14.5t21.5 42.5q0 64 -105 56l-26 56q8 10 32.5 43.5t42.5 54t37 38.5v1q-16 0 -48.5 -1t-48.5 -1v-53h-106v152h333v-88l-95 -115q51 -12 81 -49t30 -88q0 -80 -54.5 -126t-135.5 -46q-106 0 -172 66zM34 1400l136 127h106v-404h108v-99 h-335v99h107q0 41 0.5 122t0.5 121v12h-2q-8 -17 -50 -54zM512 32v192q0 14 9 23t23 9h1216q13 0 22.5 -9.5t9.5 -22.5v-192q0 -13 -9.5 -22.5t-22.5 -9.5h-1216q-13 0 -22.5 9.5t-9.5 22.5zM512 544v192q0 14 9 23t23 9h1216q13 0 22.5 -9.5t9.5 -22.5v-192 q0 -13 -9.5 -22.5t-22.5 -9.5h-1216q-13 0 -22.5 9.5t-9.5 22.5zM512 1056v192q0 13 9.5 22.5t22.5 9.5h1216q13 0 22.5 -9.5t9.5 -22.5v-192q0 -13 -9.5 -22.5t-22.5 -9.5h-1216q-13 0 -22.5 9.5t-9.5 22.5z" />
+<glyph unicode="&#xf0cc;" horiz-adv-x="1792" d="M0 544v64q0 14 9 23t23 9h1728q14 0 23 -9t9 -23v-64q0 -14 -9 -23t-23 -9h-1728q-14 0 -23 9t-9 23zM384 972q0 181 134 309q133 127 393 127q50 0 167 -19q66 -12 177 -48q10 -38 21 -118q14 -123 14 -183q0 -18 -5 -45l-12 -3l-84 6l-14 2q-50 149 -103 205 q-88 91 -210 91q-114 0 -182 -59q-67 -58 -67 -146q0 -73 66 -140t279 -129q69 -20 173 -66q58 -28 95 -52h-743q-28 35 -51 80q-48 97 -48 188zM414 154q-1 30 0 68l2 37v44l102 2q15 -34 30 -71t22.5 -56t12.5 -27q35 -57 80 -94q43 -36 105 -57q59 -22 132 -22 q64 0 139 27q77 26 122 86q47 61 47 129q0 84 -81 157q-34 29 -137 71h411q7 -39 7 -92q0 -111 -41 -212q-23 -55 -71 -104q-37 -35 -109 -81q-80 -48 -153 -66q-80 -21 -203 -21q-114 0 -195 23l-140 40q-57 16 -72 28q-8 8 -8 22v13q0 108 -2 156z" />
+<glyph unicode="&#xf0cd;" d="M0 -32v-64q0 -14 9 -23t23 -9h1472q14 0 23 9t9 23v64q0 14 -9 23t-23 9h-1472q-14 0 -23 -9t-9 -23zM0 1405q13 1 40 1q60 0 112 -4q132 -7 166 -7q86 0 168 3q116 4 146 5q56 0 86 2l-1 -14l2 -64v-9q-60 -9 -124 -9q-60 0 -79 -25q-13 -14 -13 -132q0 -13 0.5 -32.5 t0.5 -25.5l1 -229l14 -280q6 -124 51 -202q35 -59 96 -92q88 -47 177 -47q104 0 191 28q56 18 99 51q48 36 65 64q36 56 53 114q21 73 21 229q0 79 -3.5 128t-11 122.5t-13.5 159.5l-4 59q-5 67 -24 88q-34 35 -77 34l-100 -2l-14 3l2 86h84l205 -10q76 -3 196 10l18 -2 q6 -38 6 -51q0 -7 -4 -31q-45 -12 -84 -13q-73 -11 -79 -17q-15 -15 -15 -41q0 -7 1.5 -27t1.5 -31q8 -19 22 -396q6 -195 -15 -304q-15 -76 -41 -122q-38 -65 -112 -123q-75 -57 -182 -89q-109 -33 -255 -33q-167 0 -284 46q-119 47 -179 122q-61 76 -83 195 q-16 80 -16 237v333q0 188 -17 213q-25 36 -147 39q-37 2 -45 4z" />
+<glyph unicode="&#xf0ce;" horiz-adv-x="1664" d="M0 160v1088q0 66 47 113t113 47h1344q66 0 113 -47t47 -113v-1088q0 -66 -47 -113t-113 -47h-1344q-66 0 -113 47t-47 113zM128 160q0 -14 9 -23t23 -9h320q14 0 23 9t9 23v192q0 14 -9 23t-23 9h-320q-14 0 -23 -9t-9 -23v-192zM128 544q0 -14 9 -23t23 -9h320 q14 0 23 9t9 23v192q0 14 -9 23t-23 9h-320q-14 0 -23 -9t-9 -23v-192zM128 928q0 -14 9 -23t23 -9h320q14 0 23 9t9 23v192q0 14 -9 23t-23 9h-320q-14 0 -23 -9t-9 -23v-192zM640 160q0 -14 9 -23t23 -9h320q14 0 23 9t9 23v192q0 14 -9 23t-23 9h-320q-14 0 -23 -9 t-9 -23v-192zM640 544q0 -14 9 -23t23 -9h320q14 0 23 9t9 23v192q0 14 -9 23t-23 9h-320q-14 0 -23 -9t-9 -23v-192zM640 928q0 -14 9 -23t23 -9h320q14 0 23 9t9 23v192q0 14 -9 23t-23 9h-320q-14 0 -23 -9t-9 -23v-192zM1152 160q0 -14 9 -23t23 -9h320q14 0 23 9t9 23 v192q0 14 -9 23t-23 9h-320q-14 0 -23 -9t-9 -23v-192zM1152 544q0 -14 9 -23t23 -9h320q14 0 23 9t9 23v192q0 14 -9 23t-23 9h-320q-14 0 -23 -9t-9 -23v-192zM1152 928q0 -14 9 -23t23 -9h320q14 0 23 9t9 23v192q0 14 -9 23t-23 9h-320q-14 0 -23 -9t-9 -23v-192z" />
+<glyph unicode="&#xf0d0;" horiz-adv-x="1664" d="M27 160q0 27 18 45l1286 1286q18 18 45 18t45 -18l198 -198q18 -18 18 -45t-18 -45l-1286 -1286q-18 -18 -45 -18t-45 18l-198 198q-18 18 -18 45zM128 1408l98 30l30 98l30 -98l98 -30l-98 -30l-30 -98l-30 98zM320 1216l196 60l60 196l60 -196l196 -60l-196 -60 l-60 -196l-60 196zM768 1408l98 30l30 98l30 -98l98 -30l-98 -30l-30 -98l-30 98zM1083 1062l107 -107l293 293l-107 107zM1408 768l98 30l30 98l30 -98l98 -30l-98 -30l-30 -98l-30 98z" />
+<glyph unicode="&#xf0d1;" horiz-adv-x="1792" d="M64 192q0 26 19 45t45 19v320q0 8 -0.5 35t0 38t2.5 34.5t6.5 37t14 30.5t22.5 30l198 198q19 19 50.5 32t58.5 13h160v192q0 26 19 45t45 19h1024q26 0 45 -19t19 -45v-1024q0 -15 -4 -26.5t-13.5 -18.5t-16.5 -11.5t-23.5 -6t-22.5 -2t-25.5 0t-22.5 0.5 q0 -106 -75 -181t-181 -75t-181 75t-75 181h-384q0 -106 -75 -181t-181 -75t-181 75t-75 181h-64q-3 0 -22.5 -0.5t-25.5 0t-22.5 2t-23.5 6t-16.5 11.5t-13.5 18.5t-4 26.5zM256 640h384v256h-158q-13 0 -22 -9l-195 -195q-9 -9 -9 -22v-30zM384 128q0 -52 38 -90t90 -38 t90 38t38 90t-38 90t-90 38t-90 -38t-38 -90zM1280 128q0 -52 38 -90t90 -38t90 38t38 90t-38 90t-90 38t-90 -38t-38 -90z" />
+<glyph unicode="&#xf0d2;" d="M0 640q0 209 103 385.5t279.5 279.5t385.5 103t385.5 -103t279.5 -279.5t103 -385.5t-103 -385.5t-279.5 -279.5t-385.5 -103q-111 0 -218 32q59 93 78 164q9 34 54 211q20 -39 73 -67.5t114 -28.5q121 0 216 68.5t147 188.5t52 270q0 114 -59.5 214t-172.5 163t-255 63 q-105 0 -196 -29t-154.5 -77t-109 -110.5t-67 -129.5t-21.5 -134q0 -104 40 -183t117 -111q30 -12 38 20q2 7 8 31t8 30q6 23 -11 43q-51 61 -51 151q0 151 104.5 259.5t273.5 108.5q151 0 235.5 -82t84.5 -213q0 -170 -68.5 -289t-175.5 -119q-61 0 -98 43.5t-23 104.5 q8 35 26.5 93.5t30 103t11.5 75.5q0 50 -27 83t-77 33q-62 0 -105 -57t-43 -142q0 -73 25 -122l-99 -418q-17 -70 -13 -177q-206 91 -333 281t-127 423z" />
+<glyph unicode="&#xf0d3;" d="M0 160v960q0 119 84.5 203.5t203.5 84.5h960q119 0 203.5 -84.5t84.5 -203.5v-960q0 -119 -84.5 -203.5t-203.5 -84.5h-725q85 122 108 210q9 34 53 209q21 -39 73.5 -67t112.5 -28q181 0 295.5 147.5t114.5 373.5q0 84 -35 162.5t-96.5 139t-152.5 97t-197 36.5 q-104 0 -194.5 -28.5t-153 -76.5t-107.5 -109.5t-66.5 -128t-21.5 -132.5q0 -102 39.5 -180t116.5 -110q13 -5 23.5 0t14.5 19q10 44 15 61q6 23 -11 42q-50 62 -50 150q0 150 103.5 256.5t270.5 106.5q149 0 232.5 -81t83.5 -210q0 -168 -67.5 -286t-173.5 -118 q-60 0 -97 43.5t-23 103.5q8 34 26.5 92.5t29.5 102t11 74.5q0 49 -26.5 81.5t-75.5 32.5q-61 0 -103.5 -56.5t-42.5 -139.5q0 -72 24 -121l-98 -414q-24 -100 -7 -254h-183q-119 0 -203.5 84.5t-84.5 203.5z" />
+<glyph unicode="&#xf0d4;" d="M0 160v960q0 119 84.5 203.5t203.5 84.5h960q119 0 203.5 -84.5t84.5 -203.5v-960q0 -119 -84.5 -203.5t-203.5 -84.5h-960q-119 0 -203.5 84.5t-84.5 203.5zM276 309q0 -43 18.5 -77.5t48.5 -56.5t69 -37t77.5 -21t76.5 -6q60 0 120.5 15.5t113.5 46t86 82.5t33 117 q0 49 -20 89.5t-49 66.5t-58 47.5t-49 44t-20 44.5t15.5 42.5t37.5 39.5t44 42t37.5 59.5t15.5 82.5q0 60 -22.5 99.5t-72.5 90.5h83l88 64h-265q-85 0 -161 -32t-127.5 -98t-51.5 -153q0 -93 64.5 -154.5t158.5 -61.5q22 0 43 3q-13 -29 -13 -54q0 -44 40 -94 q-175 -12 -257 -63q-47 -29 -75.5 -73t-28.5 -95zM395 338q0 46 25 80t65.5 51.5t82 25t84.5 7.5q20 0 31 -2q2 -1 23 -16.5t26 -19t23 -18t24.5 -22t19 -22.5t17 -26t9 -26.5t4.5 -31.5q0 -76 -58.5 -112.5t-139.5 -36.5q-41 0 -80.5 9.5t-75.5 28.5t-58 53t-22 78z M462 969q0 61 32 104t92 43q53 0 93.5 -45t58 -101t17.5 -107q0 -60 -33 -99.5t-92 -39.5q-53 0 -93 42.5t-57.5 96.5t-17.5 106zM960 672h128v-160h64v160h128v64h-128v128h-64v-128h-128v-64z" />
+<glyph unicode="&#xf0d5;" horiz-adv-x="1664" d="M32 182q0 81 44.5 150t118.5 115q131 82 404 100q-32 42 -47.5 74t-15.5 73q0 36 21 85q-46 -4 -68 -4q-148 0 -249.5 96.5t-101.5 244.5q0 82 36 159t99 131q77 66 182.5 98t217.5 32h418l-138 -88h-131q74 -63 112 -133t38 -160q0 -72 -24.5 -129.5t-59 -93t-69.5 -65 t-59.5 -61.5t-24.5 -66q0 -36 32 -70.5t77.5 -68t90.5 -73.5t77 -104t32 -142q0 -90 -48 -173q-72 -122 -211 -179.5t-298 -57.5q-132 0 -246.5 41.5t-171.5 137.5q-37 60 -37 131zM218 228q0 -70 35 -123.5t91.5 -83t119 -44t127.5 -14.5q58 0 111.5 13t99 39t73 73 t27.5 109q0 25 -7 49t-14.5 42t-27 41.5t-29.5 35t-38.5 34.5t-36.5 29t-41.5 30t-36.5 26q-16 2 -48 2q-53 0 -105 -7t-107.5 -25t-97 -46t-68.5 -74.5t-27 -105.5zM324 1222q0 -46 10 -97.5t31.5 -103t52 -92.5t75 -67t96.5 -26q38 0 78 16.5t66 43.5q53 57 53 159 q0 58 -17 125t-48.5 129.5t-84.5 103.5t-117 41q-42 0 -82.5 -19.5t-65.5 -52.5q-47 -59 -47 -160zM1084 731v108h212v217h105v-217h213v-108h-213v-219h-105v219h-212z" />
+<glyph unicode="&#xf0d6;" horiz-adv-x="1920" d="M0 64v1152q0 26 19 45t45 19h1792q26 0 45 -19t19 -45v-1152q0 -26 -19 -45t-45 -19h-1792q-26 0 -45 19t-19 45zM128 384q106 0 181 -75t75 -181h1152q0 106 75 181t181 75v512q-106 0 -181 75t-75 181h-1152q0 -106 -75 -181t-181 -75v-512zM640 640q0 70 21 142 t59.5 134t101.5 101t138 39t138 -39t101.5 -101t59.5 -134t21 -142t-21 -142t-59.5 -134t-101.5 -101t-138 -39t-138 39t-101.5 101t-59.5 134t-21 142zM762 791l77 -80q42 37 55 57h2v-288h-128v-96h384v96h-128v448h-114z" />
+<glyph unicode="&#xf0d7;" horiz-adv-x="1024" d="M0 832q0 26 19 45t45 19h896q26 0 45 -19t19 -45t-19 -45l-448 -448q-19 -19 -45 -19t-45 19l-448 448q-19 19 -19 45z" />
+<glyph unicode="&#xf0d8;" horiz-adv-x="1024" d="M0 320q0 26 19 45l448 448q19 19 45 19t45 -19l448 -448q19 -19 19 -45t-19 -45t-45 -19h-896q-26 0 -45 19t-19 45z" />
+<glyph unicode="&#xf0d9;" horiz-adv-x="640" d="M64 640q0 26 19 45l448 448q19 19 45 19t45 -19t19 -45v-896q0 -26 -19 -45t-45 -19t-45 19l-448 448q-19 19 -19 45z" />
+<glyph unicode="&#xf0da;" horiz-adv-x="640" d="M0 192v896q0 26 19 45t45 19t45 -19l448 -448q19 -19 19 -45t-19 -45l-448 -448q-19 -19 -45 -19t-45 19t-19 45z" />
+<glyph unicode="&#xf0db;" horiz-adv-x="1664" d="M0 32v1216q0 66 47 113t113 47h1344q66 0 113 -47t47 -113v-1216q0 -66 -47 -113t-113 -47h-1344q-66 0 -113 47t-47 113zM128 32q0 -13 9.5 -22.5t22.5 -9.5h608v1152h-640v-1120zM896 0h608q13 0 22.5 9.5t9.5 22.5v1120h-640v-1152z" />
+<glyph unicode="&#xf0dc;" horiz-adv-x="1024" d="M0 448q0 26 19 45t45 19h896q26 0 45 -19t19 -45t-19 -45l-448 -448q-19 -19 -45 -19t-45 19l-448 448q-19 19 -19 45zM0 832q0 26 19 45l448 448q19 19 45 19t45 -19l448 -448q19 -19 19 -45t-19 -45t-45 -19h-896q-26 0 -45 19t-19 45z" />
+<glyph unicode="&#xf0dd;" horiz-adv-x="1024" d="M0 448q0 26 19 45t45 19h896q26 0 45 -19t19 -45t-19 -45l-448 -448q-19 -19 -45 -19t-45 19l-448 448q-19 19 -19 45z" />
+<glyph unicode="&#xf0de;" horiz-adv-x="1024" d="M0 832q0 26 19 45l448 448q19 19 45 19t45 -19l448 -448q19 -19 19 -45t-19 -45t-45 -19h-896q-26 0 -45 19t-19 45z" />
+<glyph unicode="&#xf0e0;" horiz-adv-x="1792" d="M0 32v794q44 -49 101 -87q362 -246 497 -345q57 -42 92.5 -65.5t94.5 -48t110 -24.5h1h1q51 0 110 24.5t94.5 48t92.5 65.5q170 123 498 345q57 39 100 87v-794q0 -66 -47 -113t-113 -47h-1472q-66 0 -113 47t-47 113zM0 1098q0 78 41.5 130t118.5 52h1472 q65 0 112.5 -47t47.5 -113q0 -79 -49 -151t-122 -123q-376 -261 -468 -325q-10 -7 -42.5 -30.5t-54 -38t-52 -32.5t-57.5 -27t-50 -9h-1h-1q-23 0 -50 9t-57.5 27t-52 32.5t-54 38t-42.5 30.5q-91 64 -262 182.5t-205 142.5q-62 42 -117 115.5t-55 136.5z" />
+<glyph unicode="&#xf0e1;" d="M0 1217q0 74 51.5 122.5t134.5 48.5t133 -48.5t51 -122.5q1 -73 -50.5 -122t-135.5 -49h-2q-82 0 -132 49t-50 122zM19 -80v991h330v-991h-330zM531 -80q2 399 2 647t-1 296l-1 48h329v-144h-2q20 32 41 56t56.5 52t87 43.5t114.5 15.5q171 0 275 -113.5t104 -332.5v-568 h-329v530q0 105 -40.5 164.5t-126.5 59.5q-63 0 -105.5 -34.5t-63.5 -85.5q-11 -30 -11 -81v-553h-329z" />
+<glyph unicode="&#xf0e2;" d="M0 832v448q0 42 40 59q39 17 69 -14l130 -129q107 101 244.5 156.5t284.5 55.5q156 0 298 -61t245 -164t164 -245t61 -298t-61 -298t-164 -245t-245 -164t-298 -61q-172 0 -327 72.5t-264 204.5q-7 10 -6.5 22.5t8.5 20.5l137 138q10 9 25 9q16 -2 23 -12 q73 -95 179 -147t225 -52q104 0 198.5 40.5t163.5 109.5t109.5 163.5t40.5 198.5t-40.5 198.5t-109.5 163.5t-163.5 109.5t-198.5 40.5q-98 0 -188 -35.5t-160 -101.5l137 -138q31 -30 14 -69q-17 -40 -59 -40h-448q-26 0 -45 19t-19 45z" />
+<glyph unicode="&#xf0e3;" horiz-adv-x="1792" d="M40 736q0 13 4.5 26t9 22t15.5 22t16.5 18.5t20.5 19t18 16.5q30 28 68 28q10 0 18 -1.5t16.5 -5.5t13.5 -6t13.5 -10t11.5 -10t13 -12.5t12 -12.5q-14 14 -14 34t14 34l348 348q14 14 34 14t34 -14q-2 2 -12.5 12t-12.5 13t-10 11.5t-10 13.5t-6 13.5t-5.5 16.5t-1.5 18 q0 38 28 68q3 3 16.5 18t19 20.5t18.5 16.5t22 15.5t22 9t26 4.5q40 0 68 -28l408 -408q28 -28 28 -68q0 -13 -4.5 -26t-9 -22t-15.5 -22t-16.5 -18.5t-20.5 -19t-18 -16.5q-30 -28 -68 -28q-10 0 -18 1.5t-16.5 5.5t-13.5 6t-13.5 10t-11.5 10t-13 12.5t-12 12.5 q14 -14 14 -34t-14 -34l-126 -126l256 -256q43 43 96 43q52 0 91 -37l363 -363q37 -39 37 -91q0 -53 -37 -90l-107 -108q-39 -37 -91 -37q-53 0 -90 37l-363 364q-38 36 -38 90q0 53 43 96l-256 256l-126 -126q-14 -14 -34 -14t-34 14q2 -2 12.5 -12t12.5 -13t10 -11.5 t10 -13.5t6 -13.5t5.5 -16.5t1.5 -18q0 -38 -28 -68q-3 -3 -16.5 -18t-19 -20.5t-18.5 -16.5t-22 -15.5t-22 -9t-26 -4.5q-40 0 -68 28l-408 408q-28 28 -28 68z" />
+<glyph unicode="&#xf0e4;" horiz-adv-x="1792" d="M0 384q0 182 71 348t191 286t286 191t348 71t348 -71t286 -191t191 -286t71 -348q0 -261 -141 -483q-19 -29 -54 -29h-1402q-35 0 -54 29q-141 221 -141 483zM128 384q0 -53 37.5 -90.5t90.5 -37.5t90.5 37.5t37.5 90.5t-37.5 90.5t-90.5 37.5t-90.5 -37.5t-37.5 -90.5z M320 832q0 -53 37.5 -90.5t90.5 -37.5t90.5 37.5t37.5 90.5t-37.5 90.5t-90.5 37.5t-90.5 -37.5t-37.5 -90.5zM710 241q-20 -77 20 -146t117 -89t146 20t89 117q16 60 -6 117t-72 91l101 382q6 26 -7.5 48.5t-38.5 29.5t-48 -6.5t-30 -39.5l-101 -382q-60 -5 -107 -43.5 t-63 -98.5zM768 1024q0 -53 37.5 -90.5t90.5 -37.5t90.5 37.5t37.5 90.5t-37.5 90.5t-90.5 37.5t-90.5 -37.5t-37.5 -90.5zM1216 832q0 -53 37.5 -90.5t90.5 -37.5t90.5 37.5t37.5 90.5t-37.5 90.5t-90.5 37.5t-90.5 -37.5t-37.5 -90.5zM1408 384q0 -53 37.5 -90.5 t90.5 -37.5t90.5 37.5t37.5 90.5t-37.5 90.5t-90.5 37.5t-90.5 -37.5t-37.5 -90.5z" />
+<glyph unicode="&#xf0e5;" horiz-adv-x="1792" d="M0 640q0 174 120 321.5t326 233t450 85.5t450 -85.5t326 -233t120 -321.5t-120 -321.5t-326 -233t-450 -85.5q-70 0 -145 8q-198 -175 -460 -242q-49 -14 -114 -22h-5q-15 0 -27 10.5t-16 27.5v1q-3 4 -0.5 12t2 10t4.5 9.5l6 9t7 8.5t8 9q7 8 31 34.5t34.5 38t31 39.5 t32.5 51t27 59t26 76q-157 89 -247.5 220t-90.5 281zM128 640q0 -112 71.5 -213.5t201.5 -175.5l87 -50l-27 -96q-24 -91 -70 -172q152 63 275 171l43 38l57 -6q69 -8 130 -8q204 0 381.5 69.5t282 187.5t104.5 255t-104.5 255t-282 187.5t-381.5 69.5t-381.5 -69.5 t-282 -187.5t-104.5 -255z" />
+<glyph unicode="&#xf0e6;" horiz-adv-x="1792" d="M0 768q0 139 94 257t256.5 186.5t353.5 68.5t353.5 -68.5t256.5 -186.5t94 -257t-94 -257t-256.5 -186.5t-353.5 -68.5q-86 0 -176 16q-124 -88 -278 -128q-36 -9 -86 -16h-3q-11 0 -20.5 8t-11.5 21q-1 3 -1 6.5t0.5 6.5t2 6l2.5 5t3.5 5.5t4 5t4.5 5t4 4.5q5 6 23 25 t26 29.5t22.5 29t25 38.5t20.5 44q-124 72 -195 177t-71 224zM128 768q0 -82 53 -158t149 -132l97 -56l-35 -84q34 20 62 39l44 31l53 -10q78 -14 153 -14q153 0 286 52t211.5 141t78.5 191t-78.5 191t-211.5 141t-286 52t-286 -52t-211.5 -141t-78.5 -191zM616 132 q58 -4 88 -4q161 0 309 45t264 129q125 92 192 212t67 254q0 77 -23 152q129 -71 204 -178t75 -230q0 -120 -71 -224.5t-195 -176.5q10 -24 20.5 -44t25 -38.5t22.5 -29t26 -29.5t23 -25q1 -1 4 -4.5t4.5 -5t4 -5t3.5 -5.5l2.5 -5t2 -6t0.5 -6.5t-1 -6.5q-3 -14 -13 -22 t-22 -7q-50 7 -86 16q-154 40 -278 128q-90 -16 -176 -16q-271 0 -472 132z" />
+<glyph unicode="&#xf0e7;" horiz-adv-x="896" d="M1 551l201 825q4 14 16 23t28 9h328q19 0 32 -12.5t13 -29.5q0 -8 -5 -18l-171 -463l396 98q8 2 12 2q19 0 34 -15q18 -20 7 -44l-540 -1157q-13 -25 -42 -25q-4 0 -14 2q-17 5 -25.5 19t-4.5 30l197 808l-406 -101q-4 -1 -12 -1q-18 0 -31 11q-18 15 -13 39z" />
+<glyph unicode="&#xf0e8;" horiz-adv-x="1792" d="M0 -32v320q0 40 28 68t68 28h96v192q0 52 38 90t90 38h512v192h-96q-40 0 -68 28t-28 68v320q0 40 28 68t68 28h320q40 0 68 -28t28 -68v-320q0 -40 -28 -68t-68 -28h-96v-192h512q52 0 90 -38t38 -90v-192h96q40 0 68 -28t28 -68v-320q0 -40 -28 -68t-68 -28h-320 q-40 0 -68 28t-28 68v320q0 40 28 68t68 28h96v192h-512v-192h96q40 0 68 -28t28 -68v-320q0 -40 -28 -68t-68 -28h-320q-40 0 -68 28t-28 68v320q0 40 28 68t68 28h96v192h-512v-192h96q40 0 68 -28t28 -68v-320q0 -40 -28 -68t-68 -28h-320q-40 0 -68 28t-28 68z" />
+<glyph unicode="&#xf0e9;" horiz-adv-x="1664" d="M0 681q0 5 1 7q45 183 172.5 319.5t298 204.5t360.5 68q140 0 274.5 -40t246.5 -113.5t194.5 -187t115.5 -251.5q1 -2 1 -7q0 -13 -9.5 -22.5t-22.5 -9.5q-11 0 -23 10q-49 46 -93 69t-102 23q-68 0 -128 -37t-103 -97q-7 -10 -17.5 -28t-14.5 -24q-11 -17 -28 -17 q-18 0 -29 17q-4 6 -14.5 24t-17.5 28q-43 60 -102.5 97t-127.5 37t-127.5 -37t-102.5 -97q-7 -10 -17.5 -28t-14.5 -24q-11 -17 -29 -17q-17 0 -28 17q-4 6 -14.5 24t-17.5 28q-43 60 -103 97t-128 37q-58 0 -102 -23t-93 -69q-12 -10 -23 -10q-13 0 -22.5 9.5t-9.5 22.5z M384 128q0 26 19 45t45 19t45 -19t19 -45q0 -50 39 -89t89 -39t89 39t39 89v580q33 11 64 11t64 -11v-580q0 -104 -76 -180t-180 -76t-180 76t-76 180zM768 1310v98q0 26 19 45t45 19t45 -19t19 -45v-98q-42 2 -64 2t-64 -2z" />
+<glyph unicode="&#xf0ea;" horiz-adv-x="1792" d="M0 96v1344q0 40 28 68t68 28h1088q40 0 68 -28t28 -68v-328q21 -13 36 -28l408 -408q28 -28 48 -76t20 -88v-672q0 -40 -28 -68t-68 -28h-960q-40 0 -68 28t-28 68v160h-544q-40 0 -68 28t-28 68zM256 1312q0 -13 9.5 -22.5t22.5 -9.5h704q13 0 22.5 9.5t9.5 22.5v64 q0 13 -9.5 22.5t-22.5 9.5h-704q-13 0 -22.5 -9.5t-9.5 -22.5v-64zM768 -128h896v640h-416q-40 0 -68 28t-28 68v416h-384v-1152zM1280 640h299l-299 299v-299z" />
+<glyph unicode="&#xf0eb;" horiz-adv-x="1024" d="M0 960q0 99 44.5 184.5t117 142t164 89t186.5 32.5t186.5 -32.5t164 -89t117 -142t44.5 -184.5q0 -155 -103 -268q-45 -49 -74.5 -87t-59.5 -95.5t-34 -107.5q47 -28 47 -82q0 -37 -25 -64q25 -27 25 -64q0 -52 -45 -81q13 -23 13 -47q0 -46 -31.5 -71t-77.5 -25 q-20 -44 -60 -70t-87 -26t-87 26t-60 70q-46 0 -77.5 25t-31.5 71q0 24 13 47q-45 29 -45 81q0 37 25 64q-25 27 -25 64q0 54 47 82q-4 50 -34 107.5t-59.5 95.5t-74.5 87q-103 113 -103 268zM128 960q0 -101 68 -180q10 -11 30.5 -33t30.5 -33q128 -153 141 -298h228 q13 145 141 298q10 11 30.5 33t30.5 33q68 79 68 180q0 72 -34.5 134t-90 101.5t-123 62t-136.5 22.5t-136.5 -22.5t-123 -62t-90 -101.5t-34.5 -134zM480 1088q0 13 9.5 22.5t22.5 9.5q50 0 99.5 -16t87 -54t37.5 -90q0 -13 -9.5 -22.5t-22.5 -9.5t-22.5 9.5t-9.5 22.5 q0 46 -54 71t-106 25q-13 0 -22.5 9.5t-9.5 22.5z" />
+<glyph unicode="&#xf0ec;" horiz-adv-x="1792" d="M0 256q0 14 9 23l320 320q9 9 23 9q13 0 22.5 -9.5t9.5 -22.5v-192h1376q13 0 22.5 -9.5t9.5 -22.5v-192q0 -13 -9.5 -22.5t-22.5 -9.5h-1376v-192q0 -13 -9.5 -22.5t-22.5 -9.5q-12 0 -24 10l-319 320q-9 9 -9 22zM0 800v192q0 13 9.5 22.5t22.5 9.5h1376v192q0 14 9 23 t23 9q12 0 24 -10l319 -319q9 -9 9 -23t-9 -23l-320 -320q-9 -9 -23 -9q-13 0 -22.5 9.5t-9.5 22.5v192h-1376q-13 0 -22.5 9.5t-9.5 22.5z" />
+<glyph unicode="&#xf0ed;" horiz-adv-x="1920" d="M0 448q0 130 70 240t188 165q-2 30 -2 43q0 212 150 362t362 150q156 0 285.5 -87t188.5 -231q71 62 166 62q106 0 181 -75t75 -181q0 -76 -41 -138q130 -31 213.5 -135.5t83.5 -238.5q0 -159 -112.5 -271.5t-271.5 -112.5h-1088q-185 0 -316.5 131.5t-131.5 316.5z M512 608q0 -14 9 -23l352 -352q9 -9 23 -9t23 9l351 351q10 12 10 24q0 14 -9 23t-23 9h-224v352q0 13 -9.5 22.5t-22.5 9.5h-192q-13 0 -22.5 -9.5t-9.5 -22.5v-352h-224q-13 0 -22.5 -9.5t-9.5 -22.5z" />
+<glyph unicode="&#xf0ee;" horiz-adv-x="1920" d="M0 448q0 130 70 240t188 165q-2 30 -2 43q0 212 150 362t362 150q156 0 285.5 -87t188.5 -231q71 62 166 62q106 0 181 -75t75 -181q0 -76 -41 -138q130 -31 213.5 -135.5t83.5 -238.5q0 -159 -112.5 -271.5t-271.5 -112.5h-1088q-185 0 -316.5 131.5t-131.5 316.5z M512 672q0 -14 9 -23t23 -9h224v-352q0 -13 9.5 -22.5t22.5 -9.5h192q13 0 22.5 9.5t9.5 22.5v352h224q13 0 22.5 9.5t9.5 22.5q0 14 -9 23l-352 352q-9 9 -23 9t-23 -9l-351 -351q-10 -12 -10 -24z" />
+<glyph unicode="&#xf0f0;" horiz-adv-x="1408" d="M0 131q0 68 5.5 131t24 138t47.5 132.5t81 103t120 60.5q-22 -52 -22 -120v-203q-58 -20 -93 -70t-35 -111q0 -80 56 -136t136 -56t136 56t56 136q0 61 -35.5 111t-92.5 70v203q0 62 25 93q132 -104 295 -104t295 104q25 -31 25 -93v-64q-106 0 -181 -75t-75 -181v-89 q-32 -29 -32 -71q0 -40 28 -68t68 -28t68 28t28 68q0 42 -32 71v89q0 52 38 90t90 38t90 -38t38 -90v-89q-32 -29 -32 -71q0 -40 28 -68t68 -28t68 28t28 68q0 42 -32 71v89q0 68 -34.5 127.5t-93.5 93.5q0 10 0.5 42.5t0 48t-2.5 41.5t-7 47t-13 40q68 -15 120 -60.5 t81 -103t47.5 -132.5t24 -138t5.5 -131q0 -121 -73 -190t-194 -69h-874q-121 0 -194 69t-73 190zM256 192q0 26 19 45t45 19t45 -19t19 -45t-19 -45t-45 -19t-45 19t-19 45zM320 1024q0 159 112.5 271.5t271.5 112.5t271.5 -112.5t112.5 -271.5t-112.5 -271.5t-271.5 -112.5 t-271.5 112.5t-112.5 271.5z" />
+<glyph unicode="&#xf0f1;" horiz-adv-x="1408" d="M0 768v512q0 26 19 45t45 19q6 0 16 -2q17 30 47 48t65 18q53 0 90.5 -37.5t37.5 -90.5t-37.5 -90.5t-90.5 -37.5q-33 0 -64 18v-402q0 -106 94 -181t226 -75t226 75t94 181v402q-31 -18 -64 -18q-53 0 -90.5 37.5t-37.5 90.5t37.5 90.5t90.5 37.5q35 0 65 -18t47 -48 q10 2 16 2q26 0 45 -19t19 -45v-512q0 -144 -110 -252t-274 -128v-132q0 -106 94 -181t226 -75t226 75t94 181v395q-57 21 -92.5 70t-35.5 111q0 80 56 136t136 56t136 -56t56 -136q0 -62 -35.5 -111t-92.5 -70v-395q0 -159 -131.5 -271.5t-316.5 -112.5t-316.5 112.5 t-131.5 271.5v132q-164 20 -274 128t-110 252zM1152 832q0 -26 19 -45t45 -19t45 19t19 45t-19 45t-45 19t-45 -19t-19 -45z" />
+<glyph unicode="&#xf0f2;" horiz-adv-x="1792" d="M0 96v832q0 92 66 158t158 66h64v-1280h-64q-92 0 -158 66t-66 158zM384 -128v1280h128v160q0 40 28 68t68 28h576q40 0 68 -28t28 -68v-160h128v-1280h-1024zM640 1152h512v128h-512v-128zM1504 -128v1280h64q92 0 158 -66t66 -158v-832q0 -92 -66 -158t-158 -66h-64z " />
+<glyph unicode="&#xf0f3;" horiz-adv-x="1664" d="M0 128q190 161 287 397.5t97 498.5q0 165 96 262t264 117q-8 18 -8 37q0 40 28 68t68 28t68 -28t28 -68q0 -19 -8 -37q168 -20 264 -117t96 -262q0 -262 97 -498.5t287 -397.5q0 -52 -38 -90t-90 -38h-448q0 -106 -75 -181t-181 -75t-181 75t-75 181h-448q-52 0 -90 38 t-38 90zM656 0q0 -73 51.5 -124.5t124.5 -51.5q16 0 16 16t-16 16q-59 0 -101.5 42.5t-42.5 101.5q0 16 -16 16t-16 -16z" />
+<glyph unicode="&#xf0f4;" horiz-adv-x="1920" d="M0 128h1792q0 -106 -75 -181t-181 -75h-1280q-106 0 -181 75t-75 181zM256 480v736q0 26 19 45t45 19h1152q159 0 271.5 -112.5t112.5 -271.5t-112.5 -271.5t-271.5 -112.5h-64v-32q0 -92 -66 -158t-158 -66h-704q-92 0 -158 66t-66 158zM1408 704h64q80 0 136 56t56 136 t-56 136t-136 56h-64v-384z" />
+<glyph unicode="&#xf0f5;" horiz-adv-x="1408" d="M0 832v640q0 26 19 45t45 19t45 -19t19 -45v-416q0 -26 19 -45t45 -19t45 19t19 45v416q0 26 19 45t45 19t45 -19t19 -45v-416q0 -26 19 -45t45 -19t45 19t19 45v416q0 26 19 45t45 19t45 -19t19 -45v-640q0 -61 -35.5 -111t-92.5 -70v-779q0 -52 -38 -90t-90 -38h-128 q-52 0 -90 38t-38 90v779q-57 20 -92.5 70t-35.5 111zM768 416v800q0 132 94 226t226 94h256q26 0 45 -19t19 -45v-1600q0 -52 -38 -90t-90 -38h-128q-52 0 -90 38t-38 90v512h-224q-13 0 -22.5 9.5t-9.5 22.5z" />
+<glyph unicode="&#xf0f6;" d="M0 -160v1600q0 40 28 68t68 28h896q40 0 88 -20t76 -48l312 -312q28 -28 48 -76t20 -88v-1152q0 -40 -28 -68t-68 -28h-1344q-40 0 -68 28t-28 68zM128 -128h1280v1024h-416q-40 0 -68 28t-28 68v416h-768v-1536zM384 160v64q0 14 9 23t23 9h704q14 0 23 -9t9 -23v-64 q0 -14 -9 -23t-23 -9h-704q-14 0 -23 9t-9 23zM384 416v64q0 14 9 23t23 9h704q14 0 23 -9t9 -23v-64q0 -14 -9 -23t-23 -9h-704q-14 0 -23 9t-9 23zM384 672v64q0 14 9 23t23 9h704q14 0 23 -9t9 -23v-64q0 -14 -9 -23t-23 -9h-704q-14 0 -23 9t-9 23zM1024 1024h376 q-10 29 -22 41l-313 313q-12 12 -41 22v-376z" />
+<glyph unicode="&#xf0f7;" horiz-adv-x="1408" d="M0 -192v1664q0 26 19 45t45 19h1280q26 0 45 -19t19 -45v-1664q0 -26 -19 -45t-45 -19h-1280q-26 0 -45 19t-19 45zM128 -128h384v224q0 13 9.5 22.5t22.5 9.5h320q13 0 22.5 -9.5t9.5 -22.5v-224h384v1536h-1152v-1536zM256 160v64q0 13 9.5 22.5t22.5 9.5h64 q13 0 22.5 -9.5t9.5 -22.5v-64q0 -13 -9.5 -22.5t-22.5 -9.5h-64q-13 0 -22.5 9.5t-9.5 22.5zM256 416v64q0 13 9.5 22.5t22.5 9.5h64q13 0 22.5 -9.5t9.5 -22.5v-64q0 -13 -9.5 -22.5t-22.5 -9.5h-64q-13 0 -22.5 9.5t-9.5 22.5zM256 672v64q0 13 9.5 22.5t22.5 9.5h64 q13 0 22.5 -9.5t9.5 -22.5v-64q0 -13 -9.5 -22.5t-22.5 -9.5h-64q-13 0 -22.5 9.5t-9.5 22.5zM256 928v64q0 13 9.5 22.5t22.5 9.5h64q13 0 22.5 -9.5t9.5 -22.5v-64q0 -13 -9.5 -22.5t-22.5 -9.5h-64q-13 0 -22.5 9.5t-9.5 22.5zM256 1184v64q0 13 9.5 22.5t22.5 9.5h64 q13 0 22.5 -9.5t9.5 -22.5v-64q0 -13 -9.5 -22.5t-22.5 -9.5h-64q-13 0 -22.5 9.5t-9.5 22.5zM512 416v64q0 13 9.5 22.5t22.5 9.5h64q13 0 22.5 -9.5t9.5 -22.5v-64q0 -13 -9.5 -22.5t-22.5 -9.5h-64q-13 0 -22.5 9.5t-9.5 22.5zM512 672v64q0 13 9.5 22.5t22.5 9.5h64 q13 0 22.5 -9.5t9.5 -22.5v-64q0 -13 -9.5 -22.5t-22.5 -9.5h-64q-13 0 -22.5 9.5t-9.5 22.5zM512 928v64q0 13 9.5 22.5t22.5 9.5h64q13 0 22.5 -9.5t9.5 -22.5v-64q0 -13 -9.5 -22.5t-22.5 -9.5h-64q-13 0 -22.5 9.5t-9.5 22.5zM512 1184v64q0 13 9.5 22.5t22.5 9.5h64 q13 0 22.5 -9.5t9.5 -22.5v-64q0 -13 -9.5 -22.5t-22.5 -9.5h-64q-13 0 -22.5 9.5t-9.5 22.5zM768 416v64q0 13 9.5 22.5t22.5 9.5h64q13 0 22.5 -9.5t9.5 -22.5v-64q0 -13 -9.5 -22.5t-22.5 -9.5h-64q-13 0 -22.5 9.5t-9.5 22.5zM768 672v64q0 13 9.5 22.5t22.5 9.5h64 q13 0 22.5 -9.5t9.5 -22.5v-64q0 -13 -9.5 -22.5t-22.5 -9.5h-64q-13 0 -22.5 9.5t-9.5 22.5zM768 928v64q0 13 9.5 22.5t22.5 9.5h64q13 0 22.5 -9.5t9.5 -22.5v-64q0 -13 -9.5 -22.5t-22.5 -9.5h-64q-13 0 -22.5 9.5t-9.5 22.5zM768 1184v64q0 13 9.5 22.5t22.5 9.5h64 q13 0 22.5 -9.5t9.5 -22.5v-64q0 -13 -9.5 -22.5t-22.5 -9.5h-64q-13 0 -22.5 9.5t-9.5 22.5zM1024 160v64q0 13 9.5 22.5t22.5 9.5h64q13 0 22.5 -9.5t9.5 -22.5v-64q0 -13 -9.5 -22.5t-22.5 -9.5h-64q-13 0 -22.5 9.5t-9.5 22.5zM1024 416v64q0 13 9.5 22.5t22.5 9.5h64 q13 0 22.5 -9.5t9.5 -22.5v-64q0 -13 -9.5 -22.5t-22.5 -9.5h-64q-13 0 -22.5 9.5t-9.5 22.5zM1024 672v64q0 13 9.5 22.5t22.5 9.5h64q13 0 22.5 -9.5t9.5 -22.5v-64q0 -13 -9.5 -22.5t-22.5 -9.5h-64q-13 0 -22.5 9.5t-9.5 22.5zM1024 928v64q0 13 9.5 22.5t22.5 9.5h64 q13 0 22.5 -9.5t9.5 -22.5v-64q0 -13 -9.5 -22.5t-22.5 -9.5h-64q-13 0 -22.5 9.5t-9.5 22.5zM1024 1184v64q0 13 9.5 22.5t22.5 9.5h64q13 0 22.5 -9.5t9.5 -22.5v-64q0 -13 -9.5 -22.5t-22.5 -9.5h-64q-13 0 -22.5 9.5t-9.5 22.5z" />
+<glyph unicode="&#xf0f8;" horiz-adv-x="1408" d="M0 -192v1280q0 26 19 45t45 19h320v288q0 40 28 68t68 28h448q40 0 68 -28t28 -68v-288h320q26 0 45 -19t19 -45v-1280q0 -26 -19 -45t-45 -19h-1280q-26 0 -45 19t-19 45zM128 -128h384v224q0 13 9.5 22.5t22.5 9.5h320q13 0 22.5 -9.5t9.5 -22.5v-224h384v1152h-256 v-32q0 -40 -28 -68t-68 -28h-448q-40 0 -68 28t-28 68v32h-256v-1152zM256 160v64q0 13 9.5 22.5t22.5 9.5h64q13 0 22.5 -9.5t9.5 -22.5v-64q0 -13 -9.5 -22.5t-22.5 -9.5h-64q-13 0 -22.5 9.5t-9.5 22.5zM256 416v64q0 13 9.5 22.5t22.5 9.5h64q13 0 22.5 -9.5t9.5 -22.5 v-64q0 -13 -9.5 -22.5t-22.5 -9.5h-64q-13 0 -22.5 9.5t-9.5 22.5zM256 672v64q0 13 9.5 22.5t22.5 9.5h64q13 0 22.5 -9.5t9.5 -22.5v-64q0 -13 -9.5 -22.5t-22.5 -9.5h-64q-13 0 -22.5 9.5t-9.5 22.5zM512 416v64q0 13 9.5 22.5t22.5 9.5h64q13 0 22.5 -9.5t9.5 -22.5v-64 q0 -13 -9.5 -22.5t-22.5 -9.5h-64q-13 0 -22.5 9.5t-9.5 22.5zM512 672v64q0 13 9.5 22.5t22.5 9.5h64q13 0 22.5 -9.5t9.5 -22.5v-64q0 -13 -9.5 -22.5t-22.5 -9.5h-64q-13 0 -22.5 9.5t-9.5 22.5zM512 1056q0 -13 9.5 -22.5t22.5 -9.5h64q13 0 22.5 9.5t9.5 22.5v96h128 v-96q0 -13 9.5 -22.5t22.5 -9.5h64q13 0 22.5 9.5t9.5 22.5v320q0 13 -9.5 22.5t-22.5 9.5h-64q-13 0 -22.5 -9.5t-9.5 -22.5v-96h-128v96q0 13 -9.5 22.5t-22.5 9.5h-64q-13 0 -22.5 -9.5t-9.5 -22.5v-320zM768 416v64q0 13 9.5 22.5t22.5 9.5h64q13 0 22.5 -9.5t9.5 -22.5 v-64q0 -13 -9.5 -22.5t-22.5 -9.5h-64q-13 0 -22.5 9.5t-9.5 22.5zM768 672v64q0 13 9.5 22.5t22.5 9.5h64q13 0 22.5 -9.5t9.5 -22.5v-64q0 -13 -9.5 -22.5t-22.5 -9.5h-64q-13 0 -22.5 9.5t-9.5 22.5zM1024 160v64q0 13 9.5 22.5t22.5 9.5h64q13 0 22.5 -9.5t9.5 -22.5 v-64q0 -13 -9.5 -22.5t-22.5 -9.5h-64q-13 0 -22.5 9.5t-9.5 22.5zM1024 416v64q0 13 9.5 22.5t22.5 9.5h64q13 0 22.5 -9.5t9.5 -22.5v-64q0 -13 -9.5 -22.5t-22.5 -9.5h-64q-13 0 -22.5 9.5t-9.5 22.5zM1024 672v64q0 13 9.5 22.5t22.5 9.5h64q13 0 22.5 -9.5t9.5 -22.5 v-64q0 -13 -9.5 -22.5t-22.5 -9.5h-64q-13 0 -22.5 9.5t-9.5 22.5z" />
+<glyph unicode="&#xf0f9;" horiz-adv-x="1920" d="M64 192q0 26 19 45t45 19v416q0 26 13 58t32 51l198 198q19 19 51 32t58 13h160v320q0 26 19 45t45 19h1152q26 0 45 -19t19 -45v-1152q0 -26 -19 -45t-45 -19h-192q0 -106 -75 -181t-181 -75t-181 75t-75 181h-384q0 -106 -75 -181t-181 -75t-181 75t-75 181h-128 q-26 0 -45 19t-19 45zM256 640h384v256h-158q-14 -2 -22 -9l-195 -195q-7 -12 -9 -22v-30zM384 128q0 -53 37.5 -90.5t90.5 -37.5t90.5 37.5t37.5 90.5t-37.5 90.5t-90.5 37.5t-90.5 -37.5t-37.5 -90.5zM896 800q0 -14 9 -23t23 -9h224v-224q0 -14 9 -23t23 -9h192 q14 0 23 9t9 23v224h224q14 0 23 9t9 23v192q0 14 -9 23t-23 9h-224v224q0 14 -9 23t-23 9h-192q-14 0 -23 -9t-9 -23v-224h-224q-14 0 -23 -9t-9 -23v-192zM1280 128q0 -53 37.5 -90.5t90.5 -37.5t90.5 37.5t37.5 90.5t-37.5 90.5t-90.5 37.5t-90.5 -37.5t-37.5 -90.5z" />
+<glyph unicode="&#xf0fa;" horiz-adv-x="1792" d="M0 96v832q0 92 66 158t158 66h32v-1280h-32q-92 0 -158 66t-66 158zM352 -128v1280h160v160q0 40 28 68t68 28h576q40 0 68 -28t28 -68v-160h160v-1280h-1088zM512 416q0 -14 9 -23t23 -9h224v-224q0 -14 9 -23t23 -9h192q14 0 23 9t9 23v224h224q14 0 23 9t9 23v192 q0 14 -9 23t-23 9h-224v224q0 14 -9 23t-23 9h-192q-14 0 -23 -9t-9 -23v-224h-224q-14 0 -23 -9t-9 -23v-192zM640 1152h512v128h-512v-128zM1536 -128v1280h32q92 0 158 -66t66 -158v-832q0 -92 -66 -158t-158 -66h-32z" />
+<glyph unicode="&#xf0fb;" horiz-adv-x="1920" d="M0 512v128l192 24v8h-128v32h-32v192l32 32h96l192 -224h160v416h-64v32h64h160h96q26 0 45 -4.5t19 -11.5t-19 -11.5t-45 -4.5h-69l293 -352h64l224 -64l352 -32q261 -58 287 -93l1 -3q-1 -32 -288 -96l-352 -32l-224 -64h-64l-293 -352h69q26 0 45 -4.5t19 -11.5 t-19 -11.5t-45 -4.5h-96h-160h-64v32h64v416h-160l-192 -224h-96l-32 32v192h32v32h128v8z" />
+<glyph unicode="&#xf0fc;" horiz-adv-x="1664" d="M64 1152l32 128h480l32 128h960l32 -192l-64 -32v-800l128 -192v-192h-1152v192l128 192h-128q-159 0 -271.5 112.5t-112.5 271.5v320zM384 768q0 -53 37.5 -90.5t90.5 -37.5h128v384h-256v-256z" />
+<glyph unicode="&#xf0fd;" d="M0 160v960q0 119 84.5 203.5t203.5 84.5h960q119 0 203.5 -84.5t84.5 -203.5v-960q0 -119 -84.5 -203.5t-203.5 -84.5h-960q-119 0 -203.5 84.5t-84.5 203.5zM256 192q0 -26 19 -45t45 -19h128q26 0 45 19t19 45v320h512v-320q0 -26 19 -45t45 -19h128q26 0 45 19t19 45 v896q0 26 -19 45t-45 19h-128q-26 0 -45 -19t-19 -45v-320h-512v320q0 26 -19 45t-45 19h-128q-26 0 -45 -19t-19 -45v-896z" />
+<glyph unicode="&#xf0fe;" d="M0 160v960q0 119 84.5 203.5t203.5 84.5h960q119 0 203.5 -84.5t84.5 -203.5v-960q0 -119 -84.5 -203.5t-203.5 -84.5h-960q-119 0 -203.5 84.5t-84.5 203.5zM256 576q0 -26 19 -45t45 -19h320v-320q0 -26 19 -45t45 -19h128q26 0 45 19t19 45v320h320q26 0 45 19t19 45 v128q0 26 -19 45t-45 19h-320v320q0 26 -19 45t-45 19h-128q-26 0 -45 -19t-19 -45v-320h-320q-26 0 -45 -19t-19 -45v-128z" />
+<glyph unicode="&#xf100;" horiz-adv-x="1024" d="M45 576q0 13 10 23l466 466q10 10 23 10t23 -10l50 -50q10 -10 10 -23t-10 -23l-393 -393l393 -393q10 -10 10 -23t-10 -23l-50 -50q-10 -10 -23 -10t-23 10l-466 466q-10 10 -10 23zM429 576q0 13 10 23l466 466q10 10 23 10t23 -10l50 -50q10 -10 10 -23t-10 -23 l-393 -393l393 -393q10 -10 10 -23t-10 -23l-50 -50q-10 -10 -23 -10t-23 10l-466 466q-10 10 -10 23z" />
+<glyph unicode="&#xf101;" horiz-adv-x="1024" d="M13 160q0 13 10 23l393 393l-393 393q-10 10 -10 23t10 23l50 50q10 10 23 10t23 -10l466 -466q10 -10 10 -23t-10 -23l-466 -466q-10 -10 -23 -10t-23 10l-50 50q-10 10 -10 23zM397 160q0 13 10 23l393 393l-393 393q-10 10 -10 23t10 23l50 50q10 10 23 10t23 -10 l466 -466q10 -10 10 -23t-10 -23l-466 -466q-10 -10 -23 -10t-23 10l-50 50q-10 10 -10 23z" />
+<glyph unicode="&#xf102;" horiz-adv-x="1152" d="M77 224q0 13 10 23l466 466q10 10 23 10t23 -10l466 -466q10 -10 10 -23t-10 -23l-50 -50q-10 -10 -23 -10t-23 10l-393 393l-393 -393q-10 -10 -23 -10t-23 10l-50 50q-10 10 -10 23zM77 608q0 13 10 23l466 466q10 10 23 10t23 -10l466 -466q10 -10 10 -23t-10 -23 l-50 -50q-10 -10 -23 -10t-23 10l-393 393l-393 -393q-10 -10 -23 -10t-23 10l-50 50q-10 10 -10 23z" />
+<glyph unicode="&#xf103;" horiz-adv-x="1152" d="M77 672q0 13 10 23l50 50q10 10 23 10t23 -10l393 -393l393 393q10 10 23 10t23 -10l50 -50q10 -10 10 -23t-10 -23l-466 -466q-10 -10 -23 -10t-23 10l-466 466q-10 10 -10 23zM77 1056q0 13 10 23l50 50q10 10 23 10t23 -10l393 -393l393 393q10 10 23 10t23 -10 l50 -50q10 -10 10 -23t-10 -23l-466 -466q-10 -10 -23 -10t-23 10l-466 466q-10 10 -10 23z" />
+<glyph unicode="&#xf104;" horiz-adv-x="640" d="M45 576q0 13 10 23l466 466q10 10 23 10t23 -10l50 -50q10 -10 10 -23t-10 -23l-393 -393l393 -393q10 -10 10 -23t-10 -23l-50 -50q-10 -10 -23 -10t-23 10l-466 466q-10 10 -10 23z" />
+<glyph unicode="&#xf105;" horiz-adv-x="640" d="M13 160q0 13 10 23l393 393l-393 393q-10 10 -10 23t10 23l50 50q10 10 23 10t23 -10l466 -466q10 -10 10 -23t-10 -23l-466 -466q-10 -10 -23 -10t-23 10l-50 50q-10 10 -10 23z" />
+<glyph unicode="&#xf106;" horiz-adv-x="1152" d="M77 352q0 13 10 23l466 466q10 10 23 10t23 -10l466 -466q10 -10 10 -23t-10 -23l-50 -50q-10 -10 -23 -10t-23 10l-393 393l-393 -393q-10 -10 -23 -10t-23 10l-50 50q-10 10 -10 23z" />
+<glyph unicode="&#xf107;" horiz-adv-x="1152" d="M77 800q0 13 10 23l50 50q10 10 23 10t23 -10l393 -393l393 393q10 10 23 10t23 -10l50 -50q10 -10 10 -23t-10 -23l-466 -466q-10 -10 -23 -10t-23 10l-466 466q-10 10 -10 23z" />
+<glyph unicode="&#xf108;" horiz-adv-x="1920" d="M0 288v1088q0 66 47 113t113 47h1600q66 0 113 -47t47 -113v-1088q0 -66 -47 -113t-113 -47h-544q0 -37 16 -77.5t32 -71t16 -43.5q0 -26 -19 -45t-45 -19h-512q-26 0 -45 19t-19 45q0 14 16 44t32 70t16 78h-544q-66 0 -113 47t-47 113zM128 544q0 -13 9.5 -22.5 t22.5 -9.5h1600q13 0 22.5 9.5t9.5 22.5v832q0 13 -9.5 22.5t-22.5 9.5h-1600q-13 0 -22.5 -9.5t-9.5 -22.5v-832z" />
+<glyph unicode="&#xf109;" horiz-adv-x="1920" d="M0 96v96h160h1600h160v-96q0 -40 -47 -68t-113 -28h-1600q-66 0 -113 28t-47 68zM256 416v704q0 66 47 113t113 47h1088q66 0 113 -47t47 -113v-704q0 -66 -47 -113t-113 -47h-1088q-66 0 -113 47t-47 113zM384 416q0 -13 9.5 -22.5t22.5 -9.5h1088q13 0 22.5 9.5 t9.5 22.5v704q0 13 -9.5 22.5t-22.5 9.5h-1088q-13 0 -22.5 -9.5t-9.5 -22.5v-704zM864 112q0 -16 16 -16h160q16 0 16 16t-16 16h-160q-16 0 -16 -16z" />
+<glyph unicode="&#xf10a;" horiz-adv-x="1152" d="M0 160v1088q0 66 47 113t113 47h832q66 0 113 -47t47 -113v-1088q0 -66 -47 -113t-113 -47h-832q-66 0 -113 47t-47 113zM128 288q0 -13 9.5 -22.5t22.5 -9.5h832q13 0 22.5 9.5t9.5 22.5v960q0 13 -9.5 22.5t-22.5 9.5h-832q-13 0 -22.5 -9.5t-9.5 -22.5v-960zM512 128 q0 -26 19 -45t45 -19t45 19t19 45t-19 45t-45 19t-45 -19t-19 -45z" />
+<glyph unicode="&#xf10b;" horiz-adv-x="768" d="M0 128v1024q0 52 38 90t90 38h512q52 0 90 -38t38 -90v-1024q0 -52 -38 -90t-90 -38h-512q-52 0 -90 38t-38 90zM96 288q0 -13 9.5 -22.5t22.5 -9.5h512q13 0 22.5 9.5t9.5 22.5v704q0 13 -9.5 22.5t-22.5 9.5h-512q-13 0 -22.5 -9.5t-9.5 -22.5v-704zM288 1136 q0 -16 16 -16h160q16 0 16 16t-16 16h-160q-16 0 -16 -16zM304 128q0 -33 23.5 -56.5t56.5 -23.5t56.5 23.5t23.5 56.5t-23.5 56.5t-56.5 23.5t-56.5 -23.5t-23.5 -56.5z" />
+<glyph unicode="&#xf10c;" d="M0 640q0 209 103 385.5t279.5 279.5t385.5 103t385.5 -103t279.5 -279.5t103 -385.5t-103 -385.5t-279.5 -279.5t-385.5 -103t-385.5 103t-279.5 279.5t-103 385.5zM224 640q0 -148 73 -273t198 -198t273 -73t273 73t198 198t73 273t-73 273t-198 198t-273 73t-273 -73 t-198 -198t-73 -273z" />
+<glyph unicode="&#xf10d;" horiz-adv-x="1664" d="M0 192v704q0 104 40.5 198.5t109.5 163.5t163.5 109.5t198.5 40.5h64q26 0 45 -19t19 -45v-128q0 -26 -19 -45t-45 -19h-64q-106 0 -181 -75t-75 -181v-32q0 -40 28 -68t68 -28h224q80 0 136 -56t56 -136v-384q0 -80 -56 -136t-136 -56h-384q-80 0 -136 56t-56 136z M896 192v704q0 104 40.5 198.5t109.5 163.5t163.5 109.5t198.5 40.5h64q26 0 45 -19t19 -45v-128q0 -26 -19 -45t-45 -19h-64q-106 0 -181 -75t-75 -181v-32q0 -40 28 -68t68 -28h224q80 0 136 -56t56 -136v-384q0 -80 -56 -136t-136 -56h-384q-80 0 -136 56t-56 136z" />
+<glyph unicode="&#xf10e;" horiz-adv-x="1664" d="M0 832v384q0 80 56 136t136 56h384q80 0 136 -56t56 -136v-704q0 -104 -40.5 -198.5t-109.5 -163.5t-163.5 -109.5t-198.5 -40.5h-64q-26 0 -45 19t-19 45v128q0 26 19 45t45 19h64q106 0 181 75t75 181v32q0 40 -28 68t-68 28h-224q-80 0 -136 56t-56 136zM896 832v384 q0 80 56 136t136 56h384q80 0 136 -56t56 -136v-704q0 -104 -40.5 -198.5t-109.5 -163.5t-163.5 -109.5t-198.5 -40.5h-64q-26 0 -45 19t-19 45v128q0 26 19 45t45 19h64q106 0 181 75t75 181v32q0 40 -28 68t-68 28h-224q-80 0 -136 56t-56 136z" />
+<glyph unicode="&#xf110;" horiz-adv-x="1568" d="M0 640q0 66 47 113t113 47t113 -47t47 -113t-47 -113t-113 -47t-113 47t-47 113zM176 1088q0 73 51.5 124.5t124.5 51.5t124.5 -51.5t51.5 -124.5t-51.5 -124.5t-124.5 -51.5t-124.5 51.5t-51.5 124.5zM208 192q0 60 42 102t102 42q59 0 101.5 -42t42.5 -102t-42.5 -102 t-101.5 -42q-60 0 -102 42t-42 102zM608 1280q0 80 56 136t136 56t136 -56t56 -136t-56 -136t-136 -56t-136 56t-56 136zM672 0q0 53 37.5 90.5t90.5 37.5t90.5 -37.5t37.5 -90.5t-37.5 -90.5t-90.5 -37.5t-90.5 37.5t-37.5 90.5zM1136 192q0 46 33 79t79 33t79 -33t33 -79 t-33 -79t-79 -33t-79 33t-33 79zM1168 1088q0 33 23.5 56.5t56.5 23.5t56.5 -23.5t23.5 -56.5t-23.5 -56.5t-56.5 -23.5t-56.5 23.5t-23.5 56.5zM1344 640q0 40 28 68t68 28t68 -28t28 -68t-28 -68t-68 -28t-68 28t-28 68z" />
+<glyph unicode="&#xf111;" d="M0 640q0 209 103 385.5t279.5 279.5t385.5 103t385.5 -103t279.5 -279.5t103 -385.5t-103 -385.5t-279.5 -279.5t-385.5 -103t-385.5 103t-279.5 279.5t-103 385.5z" />
+<glyph unicode="&#xf112;" horiz-adv-x="1792" d="M0 896q0 26 19 45l512 512q19 19 45 19t45 -19t19 -45v-256h224q713 0 875 -403q53 -134 53 -333q0 -166 -127 -451q-3 -7 -10.5 -24t-13.5 -30t-13 -22q-12 -17 -28 -17q-15 0 -23.5 10t-8.5 25q0 9 2.5 26.5t2.5 23.5q5 68 5 123q0 101 -17.5 181t-48.5 138.5t-80 101 t-105.5 69.5t-133 42.5t-154 21.5t-175.5 6h-224v-256q0 -26 -19 -45t-45 -19t-45 19l-512 512q-19 19 -19 45z" />
+<glyph unicode="&#xf113;" horiz-adv-x="1664" d="M0 496q0 237 136 396q-27 82 -27 170q0 116 51 218q108 0 190 -39.5t189 -123.5q147 35 309 35q148 0 280 -32q105 82 187 121t189 39q51 -102 51 -218q0 -87 -27 -168q136 -160 136 -398q0 -207 -61 -331q-38 -77 -105.5 -133t-141 -86t-170 -47.5t-171.5 -22t-167 -4.5 q-78 0 -142 3t-147.5 12.5t-152.5 30t-137 51.5t-121 81t-86 115q-62 123 -62 331zM224 320q0 -88 32 -153.5t81 -103t122 -60t140 -29.5t149 -7h168q82 0 149 7t140 29.5t122 60t81 103t32 153.5q0 120 -69 204t-187 84q-41 0 -195 -21q-71 -11 -157 -11t-157 11 q-152 21 -195 21q-118 0 -187 -84t-69 -204zM384 320q0 40 12.5 82t43 76t72.5 34t72.5 -34t43 -76t12.5 -82t-12.5 -82t-43 -76t-72.5 -34t-72.5 34t-43 76t-12.5 82zM1024 320q0 40 12.5 82t43 76t72.5 34t72.5 -34t43 -76t12.5 -82t-12.5 -82t-43 -76t-72.5 -34t-72.5 34 t-43 76t-12.5 82z" />
+<glyph unicode="&#xf114;" horiz-adv-x="1664" d="M0 224v960q0 92 66 158t158 66h320q92 0 158 -66t66 -158v-32h672q92 0 158 -66t66 -158v-704q0 -92 -66 -158t-158 -66h-1216q-92 0 -158 66t-66 158zM128 224q0 -40 28 -68t68 -28h1216q40 0 68 28t28 68v704q0 40 -28 68t-68 28h-704q-40 0 -68 28t-28 68v64 q0 40 -28 68t-68 28h-320q-40 0 -68 -28t-28 -68v-960z" />
+<glyph unicode="&#xf115;" horiz-adv-x="1920" d="M0 224v960q0 92 66 158t158 66h320q92 0 158 -66t66 -158v-32h544q92 0 158 -66t66 -158v-160h192q54 0 99 -24.5t67 -70.5q15 -32 15 -68q0 -62 -46 -120l-295 -363q-43 -53 -116 -87.5t-140 -34.5h-1088q-92 0 -158 66t-66 158zM128 331l256 315q44 53 116 87.5 t140 34.5h768v160q0 40 -28 68t-68 28h-576q-40 0 -68 28t-28 68v64q0 40 -28 68t-68 28h-320q-40 0 -68 -28t-28 -68v-853zM171 163q0 -35 53 -35h1088q40 0 86 22t71 53l294 363q18 22 18 39q0 35 -53 35h-1088q-40 0 -85.5 -21.5t-71.5 -52.5l-294 -363q-18 -24 -18 -40z " />
+<glyph unicode="&#xf116;" horiz-adv-x="1792" />
+<glyph unicode="&#xf117;" horiz-adv-x="1792" />
+<glyph unicode="&#xf118;" d="M0 640q0 209 103 385.5t279.5 279.5t385.5 103t385.5 -103t279.5 -279.5t103 -385.5t-103 -385.5t-279.5 -279.5t-385.5 -103t-385.5 103t-279.5 279.5t-103 385.5zM128 640q0 -130 51 -248.5t136.5 -204t204 -136.5t248.5 -51t248.5 51t204 136.5t136.5 204t51 248.5 t-51 248.5t-136.5 204t-204 136.5t-248.5 51t-248.5 -51t-204 -136.5t-136.5 -204t-51 -248.5zM384 896q0 53 37.5 90.5t90.5 37.5t90.5 -37.5t37.5 -90.5t-37.5 -90.5t-90.5 -37.5t-90.5 37.5t-37.5 90.5zM402 461q-8 25 4 48.5t38 31.5q25 8 48.5 -4t31.5 -38 q25 -80 92.5 -129.5t151.5 -49.5t151.5 49.5t92.5 129.5q8 26 32 38t49 4t37 -31.5t4 -48.5q-37 -121 -138 -195t-228 -74t-228 74t-138 195zM896 896q0 53 37.5 90.5t90.5 37.5t90.5 -37.5t37.5 -90.5t-37.5 -90.5t-90.5 -37.5t-90.5 37.5t-37.5 90.5z" />
+<glyph unicode="&#xf119;" d="M0 640q0 209 103 385.5t279.5 279.5t385.5 103t385.5 -103t279.5 -279.5t103 -385.5t-103 -385.5t-279.5 -279.5t-385.5 -103t-385.5 103t-279.5 279.5t-103 385.5zM128 640q0 -130 51 -248.5t136.5 -204t204 -136.5t248.5 -51t248.5 51t204 136.5t136.5 204t51 248.5 t-51 248.5t-136.5 204t-204 136.5t-248.5 51t-248.5 -51t-204 -136.5t-136.5 -204t-51 -248.5zM384 896q0 53 37.5 90.5t90.5 37.5t90.5 -37.5t37.5 -90.5t-37.5 -90.5t-90.5 -37.5t-90.5 37.5t-37.5 90.5zM402 307q37 121 138 195t228 74t228 -74t138 -195q8 -25 -4 -48.5 t-37 -31.5t-49 4t-32 38q-25 80 -92.5 129.5t-151.5 49.5t-151.5 -49.5t-92.5 -129.5q-8 -26 -31.5 -38t-48.5 -4q-26 8 -38 31.5t-4 48.5zM896 896q0 53 37.5 90.5t90.5 37.5t90.5 -37.5t37.5 -90.5t-37.5 -90.5t-90.5 -37.5t-90.5 37.5t-37.5 90.5z" />
+<glyph unicode="&#xf11a;" d="M0 640q0 209 103 385.5t279.5 279.5t385.5 103t385.5 -103t279.5 -279.5t103 -385.5t-103 -385.5t-279.5 -279.5t-385.5 -103t-385.5 103t-279.5 279.5t-103 385.5zM128 640q0 -130 51 -248.5t136.5 -204t204 -136.5t248.5 -51t248.5 51t204 136.5t136.5 204t51 248.5 t-51 248.5t-136.5 204t-204 136.5t-248.5 51t-248.5 -51t-204 -136.5t-136.5 -204t-51 -248.5zM384 448q0 26 19 45t45 19h640q26 0 45 -19t19 -45t-19 -45t-45 -19h-640q-26 0 -45 19t-19 45zM384 896q0 53 37.5 90.5t90.5 37.5t90.5 -37.5t37.5 -90.5t-37.5 -90.5 t-90.5 -37.5t-90.5 37.5t-37.5 90.5zM896 896q0 53 37.5 90.5t90.5 37.5t90.5 -37.5t37.5 -90.5t-37.5 -90.5t-90.5 -37.5t-90.5 37.5t-37.5 90.5z" />
+<glyph unicode="&#xf11b;" horiz-adv-x="1920" d="M0 512q0 212 150 362t362 150h896q212 0 362 -150t150 -362t-150 -362t-362 -150q-192 0 -338 128h-220q-146 -128 -338 -128q-212 0 -362 150t-150 362zM192 448q0 -14 9 -23t23 -9h192v-192q0 -14 9 -23t23 -9h128q14 0 23 9t9 23v192h192q14 0 23 9t9 23v128 q0 14 -9 23t-23 9h-192v192q0 14 -9 23t-23 9h-128q-14 0 -23 -9t-9 -23v-192h-192q-14 0 -23 -9t-9 -23v-128zM1152 384q0 -53 37.5 -90.5t90.5 -37.5t90.5 37.5t37.5 90.5t-37.5 90.5t-90.5 37.5t-90.5 -37.5t-37.5 -90.5zM1408 640q0 -53 37.5 -90.5t90.5 -37.5 t90.5 37.5t37.5 90.5t-37.5 90.5t-90.5 37.5t-90.5 -37.5t-37.5 -90.5z" />
+<glyph unicode="&#xf11c;" horiz-adv-x="1920" d="M0 128v896q0 53 37.5 90.5t90.5 37.5h1664q53 0 90.5 -37.5t37.5 -90.5v-896q0 -53 -37.5 -90.5t-90.5 -37.5h-1664q-53 0 -90.5 37.5t-37.5 90.5zM128 128h1664v896h-1664v-896zM256 272v96q0 16 16 16h96q16 0 16 -16v-96q0 -16 -16 -16h-96q-16 0 -16 16zM256 528v96 q0 16 16 16h224q16 0 16 -16v-96q0 -16 -16 -16h-224q-16 0 -16 16zM256 784v96q0 16 16 16h96q16 0 16 -16v-96q0 -16 -16 -16h-96q-16 0 -16 16zM512 272v96q0 16 16 16h864q16 0 16 -16v-96q0 -16 -16 -16h-864q-16 0 -16 16zM512 784v96q0 16 16 16h96q16 0 16 -16v-96 q0 -16 -16 -16h-96q-16 0 -16 16zM640 528v96q0 16 16 16h96q16 0 16 -16v-96q0 -16 -16 -16h-96q-16 0 -16 16zM768 784v96q0 16 16 16h96q16 0 16 -16v-96q0 -16 -16 -16h-96q-16 0 -16 16zM896 528v96q0 16 16 16h96q16 0 16 -16v-96q0 -16 -16 -16h-96q-16 0 -16 16z M1024 784v96q0 16 16 16h96q16 0 16 -16v-96q0 -16 -16 -16h-96q-16 0 -16 16zM1152 528v96q0 16 16 16h96q16 0 16 -16v-96q0 -16 -16 -16h-96q-16 0 -16 16zM1280 784v96q0 16 16 16h96q16 0 16 -16v-96q0 -16 -16 -16h-96q-16 0 -16 16zM1408 528v96q0 16 16 16h112v240 q0 16 16 16h96q16 0 16 -16v-352q0 -16 -16 -16h-224q-16 0 -16 16zM1536 272v96q0 16 16 16h96q16 0 16 -16v-96q0 -16 -16 -16h-96q-16 0 -16 16z" />
+<glyph unicode="&#xf11d;" horiz-adv-x="1792" d="M64 1280q0 53 37.5 90.5t90.5 37.5t90.5 -37.5t37.5 -90.5q0 -35 -17.5 -64t-46.5 -46v-1266q0 -14 -9 -23t-23 -9h-64q-14 0 -23 9t-9 23v1266q-29 17 -46.5 46t-17.5 64zM320 320v742q0 35 31 55q35 21 78.5 42.5t114 52t152.5 49.5t155 19q112 0 209 -31t209 -86 q38 -19 89 -19q122 0 310 112q22 12 31 17q31 16 62 -2q31 -20 31 -55v-763q0 -39 -35 -57q-10 -5 -17 -9q-218 -116 -369 -116q-88 0 -158 35l-28 14q-64 33 -99 48t-91 29t-114 14q-102 0 -235.5 -44t-228.5 -102q-15 -9 -33 -9q-16 0 -32 8q-32 19 -32 56zM448 426 q245 113 433 113q55 0 103.5 -7.5t98 -26t77 -31t82.5 -39.5l28 -14q44 -22 101 -22q120 0 293 92v616q-169 -91 -306 -91q-82 0 -145 32q-100 49 -184 76.5t-178 27.5q-173 0 -403 -127v-599z" />
+<glyph unicode="&#xf11e;" horiz-adv-x="1792" d="M64 1280q0 53 37.5 90.5t90.5 37.5t90.5 -37.5t37.5 -90.5q0 -35 -17.5 -64t-46.5 -46v-1266q0 -14 -9 -23t-23 -9h-64q-14 0 -23 9t-9 23v1266q-29 17 -46.5 46t-17.5 64zM320 320v742q0 35 31 55q35 21 78.5 42.5t114 52t152.5 49.5t155 19q112 0 209 -31t209 -86 q38 -19 89 -19q122 0 310 112q22 12 31 17q31 16 62 -2q31 -20 31 -55v-763q0 -39 -35 -57q-10 -5 -17 -9q-218 -116 -369 -116q-88 0 -158 35l-28 14q-64 33 -99 48t-91 29t-114 14q-102 0 -235.5 -44t-228.5 -102q-15 -9 -33 -9q-16 0 -32 8q-32 19 -32 56zM448 426 q205 96 384 110v192q-181 -16 -384 -117v-185zM448 836q215 111 384 118v197q-172 -8 -384 -126v-189zM832 730h19q102 0 192.5 -29t197.5 -82q19 -9 39 -15v-188q42 -17 91 -17q120 0 293 92v184q-235 -116 -384 -71v224q-20 6 -39 15q-5 3 -33 17t-34.5 17t-31.5 15 t-34.5 15.5t-32.5 13t-36 12.5t-35 8.5t-39.5 7.5t-39.5 4t-44 2q-23 0 -49 -3v-222zM1280 828q148 -42 384 90v189q-169 -91 -306 -91q-45 0 -78 8v-196z" />
+<glyph unicode="&#xf120;" horiz-adv-x="1664" d="M13 160q0 13 10 23l393 393l-393 393q-10 10 -10 23t10 23l50 50q10 10 23 10t23 -10l466 -466q10 -10 10 -23t-10 -23l-466 -466q-10 -10 -23 -10t-23 10l-50 50q-10 10 -10 23zM640 32v64q0 14 9 23t23 9h960q14 0 23 -9t9 -23v-64q0 -14 -9 -23t-23 -9h-960 q-14 0 -23 9t-9 23z" />
+<glyph unicode="&#xf121;" horiz-adv-x="1920" d="M45 576q0 13 10 23l466 466q10 10 23 10t23 -10l50 -50q10 -10 10 -23t-10 -23l-393 -393l393 -393q10 -10 10 -23t-10 -23l-50 -50q-10 -10 -23 -10t-23 10l-466 466q-10 10 -10 23zM712 -52l373 1291q4 13 15.5 19.5t23.5 2.5l62 -17q13 -4 19.5 -15.5t2.5 -24.5 l-373 -1291q-4 -13 -15.5 -19.5t-23.5 -2.5l-62 17q-13 4 -19.5 15.5t-2.5 24.5zM1293 160q0 13 10 23l393 393l-393 393q-10 10 -10 23t10 23l50 50q10 10 23 10t23 -10l466 -466q10 -10 10 -23t-10 -23l-466 -466q-10 -10 -23 -10t-23 10l-50 50q-10 10 -10 23z" />
+<glyph unicode="&#xf122;" horiz-adv-x="1792" d="M0 896q0 26 19 45l512 512q29 31 70 14q39 -17 39 -59v-69l-397 -398q-19 -19 -19 -45t19 -45l397 -397v-70q0 -42 -39 -59q-13 -5 -25 -5q-27 0 -45 19l-512 512q-19 19 -19 45zM384 896q0 26 19 45l512 512q29 31 70 14q39 -17 39 -59v-262q411 -28 599 -221 q169 -173 169 -509q0 -58 -17 -133.5t-38.5 -138t-48 -125t-40.5 -90.5l-20 -40q-8 -17 -28 -17q-6 0 -9 1q-25 8 -23 34q43 400 -106 565q-64 71 -170.5 110.5t-267.5 52.5v-251q0 -42 -39 -59q-13 -5 -25 -5q-27 0 -45 19l-512 512q-19 19 -19 45z" />
+<glyph unicode="&#xf123;" horiz-adv-x="1664" d="M2 900.5q9 27.5 54 34.5l502 73l225 455q20 41 49 41q28 0 49 -41l225 -455l502 -73q45 -7 54 -34.5t-24 -59.5l-363 -354l86 -500q5 -33 -6 -51.5t-34 -18.5q-17 0 -40 12l-449 236l-449 -236q-23 -12 -40 -12q-23 0 -34 18.5t-6 51.5l86 500l-364 354q-32 32 -23 59.5z M832 310l59 -31l318 -168l-60 355l-12 66l49 47l257 250l-356 52l-66 10l-30 60l-159 322v-963z" />
+<glyph unicode="&#xf124;" horiz-adv-x="1408" d="M2 561q-5 22 4 42t29 30l1280 640q13 7 29 7q27 0 45 -19q15 -14 18.5 -34.5t-6.5 -39.5l-640 -1280q-17 -35 -57 -35q-5 0 -15 2q-22 5 -35.5 22.5t-13.5 39.5v576h-576q-22 0 -39.5 13.5t-22.5 35.5z" />
+<glyph unicode="&#xf125;" horiz-adv-x="1664" d="M0 928v192q0 14 9 23t23 9h224v224q0 14 9 23t23 9h192q14 0 23 -9t9 -23v-224h851l246 247q10 9 23 9t23 -9q9 -10 9 -23t-9 -23l-247 -246v-851h224q14 0 23 -9t9 -23v-192q0 -14 -9 -23t-23 -9h-224v-224q0 -14 -9 -23t-23 -9h-192q-14 0 -23 9t-9 23v224h-864 q-14 0 -23 9t-9 23v864h-224q-14 0 -23 9t-9 23zM512 301l595 595h-595v-595zM557 256h595v595z" />
+<glyph unicode="&#xf126;" horiz-adv-x="1024" d="M0 64q0 52 26 96.5t70 69.5v820q-44 25 -70 69.5t-26 96.5q0 80 56 136t136 56t136 -56t56 -136q0 -52 -26 -96.5t-70 -69.5v-497q54 26 154 57q55 17 87.5 29.5t70.5 31t59 39.5t40.5 51t28 69.5t8.5 91.5q-44 25 -70 69.5t-26 96.5q0 80 56 136t136 56t136 -56t56 -136 q0 -52 -26 -96.5t-70 -69.5q-2 -287 -226 -414q-68 -38 -203 -81q-128 -40 -169.5 -71t-41.5 -100v-26q44 -25 70 -69.5t26 -96.5q0 -80 -56 -136t-136 -56t-136 56t-56 136zM96 64q0 -40 28 -68t68 -28t68 28t28 68t-28 68t-68 28t-68 -28t-28 -68zM96 1216q0 -40 28 -68 t68 -28t68 28t28 68t-28 68t-68 28t-68 -28t-28 -68zM736 1088q0 -40 28 -68t68 -28t68 28t28 68t-28 68t-68 28t-68 -28t-28 -68z" />
+<glyph unicode="&#xf127;" horiz-adv-x="1664" d="M0 448q0 14 9 23t23 9h320q14 0 23 -9t9 -23t-9 -23t-23 -9h-320q-14 0 -23 9t-9 23zM16 1088q0 120 85 203l147 146q83 83 203 83q121 0 204 -85l334 -335q21 -21 42 -56l-239 -18l-273 274q-28 28 -68 28q-39 0 -68 -27l-147 -146q-28 -28 -28 -67q0 -40 28 -68 l274 -274l-18 -240q-35 21 -56 42l-336 336q-84 86 -84 204zM128 32q0 13 9 23l256 256q10 9 23 9t23 -9q9 -10 9 -23t-9 -23l-256 -256q-10 -9 -23 -9q-12 0 -23 9q-9 10 -9 23zM544 -96v320q0 14 9 23t23 9t23 -9t9 -23v-320q0 -14 -9 -23t-23 -9t-23 9t-9 23zM633 364 l239 18l273 -274q27 -27 68 -27.5t68 26.5l147 146q28 28 28 67q0 40 -28 68l-274 275l18 239q35 -21 56 -42l336 -336q84 -86 84 -204q0 -120 -85 -203l-147 -146q-83 -83 -203 -83q-121 0 -204 85l-334 335q-21 21 -42 56zM1056 1184v320q0 14 9 23t23 9t23 -9t9 -23v-320 q0 -14 -9 -23t-23 -9t-23 9t-9 23zM1216 1120q0 13 9 23l256 256q10 9 23 9t23 -9q9 -10 9 -23t-9 -23l-256 -256q-11 -9 -23 -9t-23 9q-9 10 -9 23zM1280 960q0 14 9 23t23 9h320q14 0 23 -9t9 -23t-9 -23t-23 -9h-320q-14 0 -23 9t-9 23z" />
+<glyph unicode="&#xf128;" horiz-adv-x="1024" d="M96.5 986q-2.5 15 5.5 28q160 266 464 266q80 0 161 -31t146 -83t106 -127.5t41 -158.5q0 -54 -15.5 -101t-35 -76.5t-55 -59.5t-57.5 -43.5t-61 -35.5q-41 -23 -68.5 -65t-27.5 -67q0 -17 -12 -32.5t-28 -15.5h-240q-15 0 -25.5 18.5t-10.5 37.5v45q0 83 65 156.5 t143 108.5q59 27 84 56t25 76q0 42 -46.5 74t-107.5 32q-65 0 -108 -29q-35 -25 -107 -115q-13 -16 -31 -16q-12 0 -25 8l-164 125q-13 10 -15.5 25zM384 40v240q0 16 12 28t28 12h240q16 0 28 -12t12 -28v-240q0 -16 -12 -28t-28 -12h-240q-16 0 -28 12t-12 28z" />
+<glyph unicode="&#xf129;" horiz-adv-x="640" d="M0 64v128q0 26 19 45t45 19h64v384h-64q-26 0 -45 19t-19 45v128q0 26 19 45t45 19h384q26 0 45 -19t19 -45v-576h64q26 0 45 -19t19 -45v-128q0 -26 -19 -45t-45 -19h-512q-26 0 -45 19t-19 45zM128 1152v192q0 26 19 45t45 19h256q26 0 45 -19t19 -45v-192 q0 -26 -19 -45t-45 -19h-256q-26 0 -45 19t-19 45z" />
+<glyph unicode="&#xf12a;" horiz-adv-x="640" d="M98 1344q-1 26 17.5 45t44.5 19h320q26 0 44.5 -19t17.5 -45l-28 -768q-1 -26 -20.5 -45t-45.5 -19h-256q-26 0 -45.5 19t-20.5 45zM128 64v224q0 26 19 45t45 19h256q26 0 45 -19t19 -45v-224q0 -26 -19 -45t-45 -19h-256q-26 0 -45 19t-19 45z" />
+<glyph unicode="&#xf12b;" d="M5 0v167h128l197 291l-185 272h-137v168h276l139 -228q2 -4 23 -42q8 -9 11 -21h3q3 9 11 21l25 42l140 228h257v-168h-125l-184 -267l204 -296h109v-167h-248l-159 252l-24 42q-8 9 -11 21h-3l-9 -21q-10 -20 -25 -44l-155 -250h-258zM1013 713q0 64 26 117t65 86.5 t84 65t84 54.5t65 54t26 64q0 38 -29.5 62.5t-70.5 24.5q-51 0 -97 -39q-14 -11 -36 -38l-105 92q26 37 63 66q83 65 188 65q110 0 178 -59.5t68 -158.5q0 -56 -24.5 -103t-62 -76.5t-81.5 -58.5t-82 -50.5t-65.5 -51.5t-30.5 -63h232v80h126v-206h-514l-3 27q-4 28 -4 46z " />
+<glyph unicode="&#xf12c;" d="M5 0v167h128l197 291l-185 272h-137v168h276l139 -228q2 -4 23 -42q8 -9 11 -21h3q3 9 11 21l25 42l140 228h257v-168h-125l-184 -267l204 -296h109v-167h-248l-159 252l-24 42q-8 9 -11 21h-3l-9 -21q-10 -20 -25 -44l-155 -250h-258zM1015 -183q0 64 26 117t65 86.5 t84 65t84 54.5t65 54t26 64q0 38 -29.5 62.5t-70.5 24.5q-51 0 -97 -39q-14 -11 -36 -38l-105 92q26 37 63 66q80 65 188 65q110 0 178 -59.5t68 -158.5q0 -66 -34.5 -118.5t-84 -86t-99.5 -62.5t-87 -63t-41 -73h232v80h126v-206h-514l-4 27q-3 45 -3 46z" />
+<glyph unicode="&#xf12d;" horiz-adv-x="1920" d="M1.5 146.5q5.5 37.5 30.5 65.5l896 1024q38 44 96 44h768q38 0 69.5 -20.5t47.5 -54.5q15 -34 9.5 -71.5t-30.5 -65.5l-896 -1024q-38 -44 -96 -44h-768q-38 0 -69.5 20.5t-47.5 54.5q-15 34 -9.5 71.5zM128 128h768l336 384h-768z" />
+<glyph unicode="&#xf12e;" horiz-adv-x="1664" d="M0 0v1024q2 -1 17.5 -3.5t34 -5t21.5 -3.5q150 -24 245 -24q80 0 117 35q46 44 46 89q0 22 -15 50.5t-33.5 53t-33.5 64.5t-15 83q0 82 59 127.5t144 45.5q80 0 134 -44.5t54 -123.5q0 -41 -17.5 -77.5t-38 -59t-38 -56.5t-17.5 -71q0 -57 42 -83.5t103 -26.5 q64 0 180 15t163 17v-2q-1 -2 -3.5 -17.5t-5 -34t-3.5 -21.5q-24 -150 -24 -245q0 -80 35 -117q44 -46 89 -46q22 0 50.5 15t53 33.5t64.5 33.5t83 15q82 0 127.5 -59t45.5 -143q0 -81 -44.5 -135t-123.5 -54q-41 0 -77.5 17.5t-59 38t-56.5 38t-71 17.5q-110 0 -110 -124 q0 -39 16 -115t15 -115v-5q-22 0 -33 -1q-34 -3 -97.5 -11.5t-115.5 -13.5t-98 -5q-61 0 -103 26.5t-42 83.5q0 37 17.5 71t38 56.5t38 59t17.5 77.5q0 79 -54 123.5t-135 44.5q-84 0 -143 -45.5t-59 -127.5q0 -43 15 -83t33.5 -64.5t33.5 -53t15 -50.5q0 -45 -46 -89 q-37 -35 -117 -35q-95 0 -245 24q-9 2 -27.5 4t-27.5 4l-13 2q-1 0 -3 1q-2 0 -2 1z" />
+<glyph unicode="&#xf130;" horiz-adv-x="1152" d="M0 704v128q0 26 19 45t45 19t45 -19t19 -45v-128q0 -185 131.5 -316.5t316.5 -131.5t316.5 131.5t131.5 316.5v128q0 26 19 45t45 19t45 -19t19 -45v-128q0 -221 -147.5 -384.5t-364.5 -187.5v-132h256q26 0 45 -19t19 -45t-19 -45t-45 -19h-640q-26 0 -45 19t-19 45 t19 45t45 19h256v132q-217 24 -364.5 187.5t-147.5 384.5zM256 704v512q0 132 94 226t226 94t226 -94t94 -226v-512q0 -132 -94 -226t-226 -94t-226 94t-94 226z" />
+<glyph unicode="&#xf131;" horiz-adv-x="1408" d="M13 64q0 13 10 23l1234 1234q10 10 23 10t23 -10l82 -82q10 -10 10 -23t-10 -23l-361 -361v-128q0 -132 -94 -226t-226 -94q-55 0 -109 19l-96 -96q97 -51 205 -51q185 0 316.5 131.5t131.5 316.5v128q0 26 19 45t45 19t45 -19t19 -45v-128q0 -221 -147.5 -384.5 t-364.5 -187.5v-132h256q26 0 45 -19t19 -45t-19 -45t-45 -19h-640q-26 0 -45 19t-19 45t19 45t45 19h256v132q-125 13 -235 81l-254 -254q-10 -10 -23 -10t-23 10l-82 82q-10 10 -10 23zM128 704v128q0 26 19 45t45 19t45 -19t19 -45v-128q0 -53 15 -113l-101 -101 q-42 103 -42 214zM384 704v512q0 132 94 226t226 94q102 0 184.5 -59t116.5 -152z" />
+<glyph unicode="&#xf132;" horiz-adv-x="1280" d="M0 576v768q0 26 19 45t45 19h1152q26 0 45 -19t19 -45v-768q0 -86 -33.5 -170.5t-83 -150t-118 -127.5t-126.5 -103t-121 -77.5t-89.5 -49.5t-42.5 -20q-12 -6 -26 -6t-26 6q-16 7 -42.5 20t-89.5 49.5t-121 77.5t-126.5 103t-118 127.5t-83 150t-33.5 170.5zM640 79 q119 63 213 137q235 184 235 360v640h-448v-1137z" />
+<glyph unicode="&#xf133;" horiz-adv-x="1664" d="M0 -128v1280q0 52 38 90t90 38h128v96q0 66 47 113t113 47h64q66 0 113 -47t47 -113v-96h384v96q0 66 47 113t113 47h64q66 0 113 -47t47 -113v-96h128q52 0 90 -38t38 -90v-1280q0 -52 -38 -90t-90 -38h-1408q-52 0 -90 38t-38 90zM128 -128h1408v1024h-1408v-1024z M384 1088q0 -14 9 -23t23 -9h64q14 0 23 9t9 23v288q0 14 -9 23t-23 9h-64q-14 0 -23 -9t-9 -23v-288zM1152 1088q0 -14 9 -23t23 -9h64q14 0 23 9t9 23v288q0 14 -9 23t-23 9h-64q-14 0 -23 -9t-9 -23v-288z" />
+<glyph unicode="&#xf134;" horiz-adv-x="1408" d="M3.5 940q-8.5 25 3.5 49q5 10 14.5 26t37.5 53.5t60.5 70t85 67t108.5 52.5q-25 42 -25 86q0 66 47 113t113 47t113 -47t47 -113q0 -33 -14 -64h302q0 11 7 20t18 11l448 96q3 1 7 1q12 0 20 -7q12 -9 12 -25v-320q0 -16 -12 -25q-8 -7 -20 -7q-4 0 -7 1l-448 96 q-11 2 -18 11t-7 20h-256v-102q111 -23 183.5 -111t72.5 -203v-800q0 -26 -19 -45t-45 -19h-512q-26 0 -45 19t-19 45v800q0 106 62.5 190.5t161.5 114.5v111h-32q-59 0 -115 -23.5t-91.5 -53t-66 -66.5t-40.5 -53.5t-14 -24.5q-17 -35 -57 -35q-16 0 -29 7q-23 12 -31.5 37 zM384 1344q0 -26 19 -45t45 -19t45 19t19 45t-19 45t-45 19t-45 -19t-19 -45z" />
+<glyph unicode="&#xf135;" horiz-adv-x="1664" d="M36 464l224 384q10 14 26 16l379 20q96 114 176 195q188 187 358 258t431 71q14 0 24 -9.5t10 -22.5q0 -249 -75.5 -430.5t-253.5 -360.5q-81 -80 -195 -176l-20 -379q-2 -16 -16 -26l-384 -224q-7 -4 -16 -4q-12 0 -23 9l-64 64q-13 14 -8 32l85 276l-281 281l-276 -85 q-3 -1 -9 -1q-14 0 -23 9l-64 64q-17 19 -5 39zM1248 1088q0 -40 28 -68t68 -28t68 28t28 68t-28 68t-68 28t-68 -28t-28 -68z" />
+<glyph unicode="&#xf136;" horiz-adv-x="1792" d="M0 0l204 953l-153 327h1276q101 0 189.5 -40.5t147.5 -113.5q60 -73 81 -168.5t0 -194.5l-164 -763h-334l178 832q13 56 -15 88q-27 33 -83 33h-169l-204 -953h-334l204 953h-286l-204 -953h-334z" />
+<glyph unicode="&#xf137;" d="M0 640q0 209 103 385.5t279.5 279.5t385.5 103t385.5 -103t279.5 -279.5t103 -385.5t-103 -385.5t-279.5 -279.5t-385.5 -103t-385.5 103t-279.5 279.5t-103 385.5zM346 640q0 -26 19 -45l454 -454q19 -19 45 -19t45 19l102 102q19 19 19 45t-19 45l-307 307l307 307 q19 19 19 45t-19 45l-102 102q-19 19 -45 19t-45 -19l-454 -454q-19 -19 -19 -45z" />
+<glyph unicode="&#xf138;" d="M0 640q0 209 103 385.5t279.5 279.5t385.5 103t385.5 -103t279.5 -279.5t103 -385.5t-103 -385.5t-279.5 -279.5t-385.5 -103t-385.5 103t-279.5 279.5t-103 385.5zM506 288q0 -26 19 -45l102 -102q19 -19 45 -19t45 19l454 454q19 19 19 45t-19 45l-454 454 q-19 19 -45 19t-45 -19l-102 -102q-19 -19 -19 -45t19 -45l307 -307l-307 -307q-19 -19 -19 -45z" />
+<glyph unicode="&#xf139;" d="M0 640q0 209 103 385.5t279.5 279.5t385.5 103t385.5 -103t279.5 -279.5t103 -385.5t-103 -385.5t-279.5 -279.5t-385.5 -103t-385.5 103t-279.5 279.5t-103 385.5zM250 544q0 -26 19 -45l102 -102q19 -19 45 -19t45 19l307 307l307 -307q19 -19 45 -19t45 19l102 102 q19 19 19 45t-19 45l-454 454q-19 19 -45 19t-45 -19l-454 -454q-19 -19 -19 -45z" />
+<glyph unicode="&#xf13a;" d="M0 640q0 209 103 385.5t279.5 279.5t385.5 103t385.5 -103t279.5 -279.5t103 -385.5t-103 -385.5t-279.5 -279.5t-385.5 -103t-385.5 103t-279.5 279.5t-103 385.5zM250 736q0 -26 19 -45l454 -454q19 -19 45 -19t45 19l454 454q19 19 19 45t-19 45l-102 102 q-19 19 -45 19t-45 -19l-307 -307l-307 307q-19 19 -45 19t-45 -19l-102 -102q-19 -19 -19 -45z" />
+<glyph unicode="&#xf13b;" horiz-adv-x="1408" d="M0 1408h1408l-128 -1438l-578 -162l-574 162zM262 1114l47 -534h612l-22 -228l-197 -53l-196 53l-13 140h-175l22 -278l362 -100h4v1l359 99l50 544h-644l-15 181h674l16 175h-884z" />
+<glyph unicode="&#xf13c;" horiz-adv-x="1792" d="M12 75l71 356h297l-29 -147l422 -161l486 161l68 339h-1208l58 297h1209l38 191h-1208l59 297h1505l-266 -1333l-804 -267z" />
+<glyph unicode="&#xf13d;" horiz-adv-x="1792" d="M0 0v352q0 14 9 23t23 9h352q22 0 30 -20q8 -19 -7 -35l-100 -100q67 -91 189.5 -153.5t271.5 -82.5v647h-192q-26 0 -45 19t-19 45v128q0 26 19 45t45 19h192v163q-58 34 -93 92.5t-35 128.5q0 106 75 181t181 75t181 -75t75 -181q0 -70 -35 -128.5t-93 -92.5v-163h192 q26 0 45 -19t19 -45v-128q0 -26 -19 -45t-45 -19h-192v-647q149 20 271.5 82.5t189.5 153.5l-100 100q-15 16 -7 35q8 20 30 20h352q14 0 23 -9t9 -23v-352q0 -22 -20 -30q-8 -2 -12 -2q-13 0 -23 9l-93 93q-119 -143 -318.5 -226.5t-429.5 -83.5t-429.5 83.5t-318.5 226.5 l-93 -93q-9 -9 -23 -9q-4 0 -12 2q-20 8 -20 30zM832 1280q0 -26 19 -45t45 -19t45 19t19 45t-19 45t-45 19t-45 -19t-19 -45z" />
+<glyph unicode="&#xf13e;" horiz-adv-x="1152" d="M0 96v576q0 40 28 68t68 28h32v320q0 185 131.5 316.5t316.5 131.5t316.5 -131.5t131.5 -316.5q0 -26 -19 -45t-45 -19h-64q-26 0 -45 19t-19 45q0 106 -75 181t-181 75t-181 -75t-75 -181v-320h736q40 0 68 -28t28 -68v-576q0 -40 -28 -68t-68 -28h-960q-40 0 -68 28 t-28 68z" />
+<glyph unicode="&#xf140;" d="M0 640q0 209 103 385.5t279.5 279.5t385.5 103t385.5 -103t279.5 -279.5t103 -385.5t-103 -385.5t-279.5 -279.5t-385.5 -103t-385.5 103t-279.5 279.5t-103 385.5zM128 640q0 -130 51 -248.5t136.5 -204t204 -136.5t248.5 -51t248.5 51t204 136.5t136.5 204t51 248.5 t-51 248.5t-136.5 204t-204 136.5t-248.5 51t-248.5 -51t-204 -136.5t-136.5 -204t-51 -248.5zM256 640q0 212 150 362t362 150t362 -150t150 -362t-150 -362t-362 -150t-362 150t-150 362zM384 640q0 -159 112.5 -271.5t271.5 -112.5t271.5 112.5t112.5 271.5t-112.5 271.5 t-271.5 112.5t-271.5 -112.5t-112.5 -271.5zM512 640q0 106 75 181t181 75t181 -75t75 -181t-75 -181t-181 -75t-181 75t-75 181z" />
+<glyph unicode="&#xf141;" horiz-adv-x="1408" d="M0 608v192q0 40 28 68t68 28h192q40 0 68 -28t28 -68v-192q0 -40 -28 -68t-68 -28h-192q-40 0 -68 28t-28 68zM512 608v192q0 40 28 68t68 28h192q40 0 68 -28t28 -68v-192q0 -40 -28 -68t-68 -28h-192q-40 0 -68 28t-28 68zM1024 608v192q0 40 28 68t68 28h192 q40 0 68 -28t28 -68v-192q0 -40 -28 -68t-68 -28h-192q-40 0 -68 28t-28 68z" />
+<glyph unicode="&#xf142;" horiz-adv-x="384" d="M0 96v192q0 40 28 68t68 28h192q40 0 68 -28t28 -68v-192q0 -40 -28 -68t-68 -28h-192q-40 0 -68 28t-28 68zM0 608v192q0 40 28 68t68 28h192q40 0 68 -28t28 -68v-192q0 -40 -28 -68t-68 -28h-192q-40 0 -68 28t-28 68zM0 1120v192q0 40 28 68t68 28h192q40 0 68 -28 t28 -68v-192q0 -40 -28 -68t-68 -28h-192q-40 0 -68 28t-28 68z" />
+<glyph unicode="&#xf143;" d="M0 160v960q0 119 84.5 203.5t203.5 84.5h960q119 0 203.5 -84.5t84.5 -203.5v-960q0 -119 -84.5 -203.5t-203.5 -84.5h-960q-119 0 -203.5 84.5t-84.5 203.5zM256 256q0 -53 37.5 -90.5t90.5 -37.5t90.5 37.5t37.5 90.5t-37.5 90.5t-90.5 37.5t-90.5 -37.5t-37.5 -90.5z M256 575q0 -13 8.5 -22t21.5 -10q154 -11 264 -121t121 -264q1 -13 10 -21.5t22 -8.5h128q13 0 23 10t9 24q-13 232 -177 396t-396 177q-14 1 -24 -9t-10 -23v-128zM256 959q0 -13 9 -22t22 -10q204 -7 378 -111.5t278.5 -278.5t111.5 -378q1 -13 10 -22t22 -9h128 q13 0 23 10q11 9 9 23q-5 154 -56 297.5t-139.5 260t-205 205t-260 139.5t-297.5 56q-14 1 -23 -9q-10 -10 -10 -23v-128z" />
+<glyph unicode="&#xf144;" d="M0 640q0 209 103 385.5t279.5 279.5t385.5 103t385.5 -103t279.5 -279.5t103 -385.5t-103 -385.5t-279.5 -279.5t-385.5 -103t-385.5 103t-279.5 279.5t-103 385.5zM512 320q0 -37 32 -56q16 -8 32 -8q17 0 32 9l544 320q32 18 32 55t-32 55l-544 320q-31 19 -64 1 q-32 -19 -32 -56v-640z" />
+<glyph unicode="&#xf145;" horiz-adv-x="1792" d="M54 448.5q0 53.5 37 90.5l907 906q37 37 90.5 37t90.5 -37l125 -125q-56 -56 -56 -136t56 -136t136 -56t136 56l126 -125q37 -37 37 -90.5t-37 -90.5l-907 -908q-37 -37 -90.5 -37t-90.5 37l-126 126q56 56 56 136t-56 136t-136 56t-136 -56l-125 126q-37 37 -37 90.5z M342 512q0 -26 19 -45l362 -362q18 -18 45 -18t45 18l618 618q19 19 19 45t-19 45l-362 362q-18 18 -45 18t-45 -18l-618 -618q-19 -19 -19 -45zM452 512l572 572l316 -316l-572 -572z" />
+<glyph unicode="&#xf146;" d="M0 160v960q0 119 84.5 203.5t203.5 84.5h960q119 0 203.5 -84.5t84.5 -203.5v-960q0 -119 -84.5 -203.5t-203.5 -84.5h-960q-119 0 -203.5 84.5t-84.5 203.5zM256 576q0 -26 19 -45t45 -19h896q26 0 45 19t19 45v128q0 26 -19 45t-45 19h-896q-26 0 -45 -19t-19 -45v-128 z" />
+<glyph unicode="&#xf147;" horiz-adv-x="1408" d="M0 288v832q0 119 84.5 203.5t203.5 84.5h832q119 0 203.5 -84.5t84.5 -203.5v-832q0 -119 -84.5 -203.5t-203.5 -84.5h-832q-119 0 -203.5 84.5t-84.5 203.5zM128 288q0 -66 47 -113t113 -47h832q66 0 113 47t47 113v832q0 66 -47 113t-113 47h-832q-66 0 -113 -47 t-47 -113v-832zM256 672v64q0 14 9 23t23 9h832q14 0 23 -9t9 -23v-64q0 -14 -9 -23t-23 -9h-832q-14 0 -23 9t-9 23z" />
+<glyph unicode="&#xf148;" horiz-adv-x="1024" d="M3 18q-8 20 4 35l160 192q9 11 25 11h320v640h-192q-40 0 -58 37q-17 37 9 68l320 384q18 22 49 22t49 -22l320 -384q27 -32 9 -68q-18 -37 -58 -37h-192v-864q0 -14 -9 -23t-23 -9h-704q-21 0 -29 18z" />
+<glyph unicode="&#xf149;" horiz-adv-x="1024" d="M3 1261q9 19 29 19h704q13 0 22.5 -9.5t9.5 -23.5v-863h192q40 0 58 -37t-9 -69l-320 -384q-18 -22 -49 -22t-49 22l-320 384q-26 31 -9 69q18 37 58 37h192v640h-320q-14 0 -25 11l-160 192q-13 14 -4 34z" />
+<glyph unicode="&#xf14a;" d="M0 160v960q0 119 84.5 203.5t203.5 84.5h960q119 0 203.5 -84.5t84.5 -203.5v-960q0 -119 -84.5 -203.5t-203.5 -84.5h-960q-119 0 -203.5 84.5t-84.5 203.5zM218 640q0 -26 19 -45l358 -358q19 -19 45 -19t45 19l614 614q19 19 19 45t-19 45l-102 102q-19 19 -45 19 t-45 -19l-467 -467l-211 211q-19 19 -45 19t-45 -19l-102 -102q-19 -19 -19 -45z" />
+<glyph unicode="&#xf14b;" d="M0 160v960q0 119 84.5 203.5t203.5 84.5h960q119 0 203.5 -84.5t84.5 -203.5v-960q0 -119 -84.5 -203.5t-203.5 -84.5h-960q-119 0 -203.5 84.5t-84.5 203.5zM256 128h288l544 544l-288 288l-544 -544v-288zM352 320v56l52 52l152 -152l-52 -52h-56v96h-96zM494 494 q-14 13 3 30l291 291q17 17 30 3q14 -13 -3 -30l-291 -291q-17 -17 -30 -3zM864 1024l288 -288l92 92q28 28 28 68t-28 68l-152 152q-28 28 -68 28t-68 -28z" />
+<glyph unicode="&#xf14c;" d="M0 160v960q0 119 84.5 203.5t203.5 84.5h960q119 0 203.5 -84.5t84.5 -203.5v-960q0 -119 -84.5 -203.5t-203.5 -84.5h-960q-119 0 -203.5 84.5t-84.5 203.5zM282 320q0 -26 19 -45l102 -102q19 -19 45 -19t45 19l534 534l144 -144q18 -19 45 -19q12 0 25 5q39 17 39 59 v480q0 26 -19 45t-45 19h-480q-42 0 -59 -39q-17 -41 14 -70l144 -144l-534 -534q-19 -19 -19 -45z" />
+<glyph unicode="&#xf14d;" d="M0 160v960q0 119 84.5 203.5t203.5 84.5h960q119 0 203.5 -84.5t84.5 -203.5v-960q0 -119 -84.5 -203.5t-203.5 -84.5h-960q-119 0 -203.5 84.5t-84.5 203.5zM256 448q0 -181 167 -404q10 -12 25 -12q7 0 13 3q22 9 19 33q-44 354 62 473q46 52 130 75.5t224 23.5v-160 q0 -42 40 -59q12 -5 24 -5q26 0 45 19l352 352q19 19 19 45t-19 45l-352 352q-30 31 -69 14q-40 -17 -40 -59v-160q-119 0 -216 -19.5t-162.5 -51t-114 -79t-76.5 -95.5t-44.5 -109t-21.5 -111.5t-5 -110.5z" />
+<glyph unicode="&#xf14e;" d="M0 640q0 209 103 385.5t279.5 279.5t385.5 103t385.5 -103t279.5 -279.5t103 -385.5t-103 -385.5t-279.5 -279.5t-385.5 -103t-385.5 103t-279.5 279.5t-103 385.5zM224 640q0 -148 73 -273t198 -198t273 -73t273 73t198 198t73 273t-73 273t-198 198t-273 73t-273 -73 t-198 -198t-73 -273zM512 241v542l512 256v-542zM640 448l256 128l-256 128v-256z" />
+<glyph unicode="&#xf150;" d="M0 160v960q0 119 84.5 203.5t203.5 84.5h960q119 0 203.5 -84.5t84.5 -203.5v-960q0 -119 -84.5 -203.5t-203.5 -84.5h-960q-119 0 -203.5 84.5t-84.5 203.5zM256 160q0 -13 9.5 -22.5t22.5 -9.5h960q13 0 22.5 9.5t9.5 22.5v960q0 13 -9.5 22.5t-22.5 9.5h-960 q-13 0 -22.5 -9.5t-9.5 -22.5v-960zM391 861q17 35 57 35h640q40 0 57 -35q18 -35 -5 -66l-320 -448q-19 -27 -52 -27t-52 27l-320 448q-23 31 -5 66z" />
+<glyph unicode="&#xf151;" d="M0 160v960q0 119 84.5 203.5t203.5 84.5h960q119 0 203.5 -84.5t84.5 -203.5v-960q0 -119 -84.5 -203.5t-203.5 -84.5h-960q-119 0 -203.5 84.5t-84.5 203.5zM256 160q0 -13 9.5 -22.5t22.5 -9.5h960q13 0 22.5 9.5t9.5 22.5v960q0 13 -9.5 22.5t-22.5 9.5h-960 q-13 0 -22.5 -9.5t-9.5 -22.5v-960zM391 419q-18 35 5 66l320 448q19 27 52 27t52 -27l320 -448q23 -31 5 -66q-17 -35 -57 -35h-640q-40 0 -57 35z" />
+<glyph unicode="&#xf152;" d="M0 160v960q0 119 84.5 203.5t203.5 84.5h960q119 0 203.5 -84.5t84.5 -203.5v-960q0 -119 -84.5 -203.5t-203.5 -84.5h-960q-119 0 -203.5 84.5t-84.5 203.5zM256 160q0 -14 9 -23t23 -9h960q14 0 23 9t9 23v960q0 14 -9 23t-23 9h-960q-14 0 -23 -9t-9 -23v-960z M512 320v640q0 40 35 57q35 18 66 -5l448 -320q27 -19 27 -52t-27 -52l-448 -320q-31 -23 -66 -5q-35 17 -35 57z" />
+<glyph unicode="&#xf153;" horiz-adv-x="1024" d="M0 514v113q0 13 9.5 22.5t22.5 9.5h66q-2 57 1 105h-67q-14 0 -23 9t-9 23v114q0 14 9 23t23 9h98q67 210 243.5 338t400.5 128q102 0 194 -23q11 -3 20 -15q6 -11 3 -24l-43 -159q-3 -13 -14 -19.5t-24 -2.5l-4 1q-4 1 -11.5 2.5l-17.5 3.5t-22.5 3.5t-26 3t-29 2.5 t-29.5 1q-126 0 -226 -64t-150 -176h468q16 0 25 -12q10 -12 7 -26l-24 -114q-5 -26 -32 -26h-488q-3 -37 0 -105h459q15 0 25 -12q9 -12 6 -27l-24 -112q-2 -11 -11 -18.5t-20 -7.5h-387q48 -117 149.5 -185.5t228.5 -68.5q18 0 36 1.5t33.5 3.5t29.5 4.5t24.5 5t18.5 4.5 l12 3l5 2q13 5 26 -2q12 -7 15 -21l35 -159q3 -12 -3 -22.5t-17 -14.5l-5 -1q-4 -2 -10.5 -3.5t-16 -4.5t-21.5 -5.5t-25.5 -5t-30 -5t-33.5 -4.5t-36.5 -3t-38.5 -1q-234 0 -409 130.5t-238 351.5h-95q-13 0 -22.5 9.5t-9.5 22.5z" />
+<glyph unicode="&#xf154;" horiz-adv-x="1024" d="M0 32v150q0 13 9.5 22.5t22.5 9.5h97v383h-95q-14 0 -23 9.5t-9 22.5v131q0 14 9 23t23 9h95v223q0 171 123.5 282t314.5 111q185 0 335 -125q9 -8 10 -20.5t-7 -22.5l-103 -127q-9 -11 -22 -12q-13 -2 -23 7q-5 5 -26 19t-69 32t-93 18q-85 0 -137 -47t-52 -123v-215 h305q13 0 22.5 -9t9.5 -23v-131q0 -13 -9.5 -22.5t-22.5 -9.5h-305v-379h414v181q0 13 9 22.5t23 9.5h162q14 0 23 -9.5t9 -22.5v-367q0 -14 -9 -23t-23 -9h-956q-14 0 -23 9t-9 23z" />
+<glyph unicode="&#xf155;" horiz-adv-x="1024" d="M52 171l103 135q7 10 23 12q15 2 24 -9l2 -2q113 -99 243 -125q37 -8 74 -8q81 0 142.5 43t61.5 122q0 28 -15 53t-33.5 42t-58.5 37.5t-66 32t-80 32.5q-39 16 -61.5 25t-61.5 26.5t-62.5 31t-56.5 35.5t-53.5 42.5t-43.5 49t-35.5 58t-21 66.5t-8.5 78q0 138 98 242 t255 134v180q0 13 9.5 22.5t22.5 9.5h135q14 0 23 -9t9 -23v-176q57 -6 110.5 -23t87 -33.5t63.5 -37.5t39 -29t15 -14q17 -18 5 -38l-81 -146q-8 -15 -23 -16q-14 -3 -27 7q-3 3 -14.5 12t-39 26.5t-58.5 32t-74.5 26t-85.5 11.5q-95 0 -155 -43t-60 -111q0 -26 8.5 -48 t29.5 -41.5t39.5 -33t56 -31t60.5 -27t70 -27.5q53 -20 81 -31.5t76 -35t75.5 -42.5t62 -50t53 -63.5t31.5 -76.5t13 -94q0 -153 -99.5 -263.5t-258.5 -136.5v-175q0 -14 -9 -23t-23 -9h-135q-13 0 -22.5 9.5t-9.5 22.5v175q-66 9 -127.5 31t-101.5 44.5t-74 48t-46.5 37.5 t-17.5 18q-17 21 -2 41z" />
+<glyph unicode="&#xf156;" horiz-adv-x="898" d="M0 605v127q0 13 9.5 22.5t22.5 9.5h112q132 0 212.5 43t102.5 125h-427q-14 0 -23 9t-9 23v102q0 14 9 23t23 9h413q-57 113 -268 113h-145q-13 0 -22.5 9.5t-9.5 22.5v133q0 14 9 23t23 9h832q14 0 23 -9t9 -23v-102q0 -14 -9 -23t-23 -9h-233q47 -61 64 -144h171 q14 0 23 -9t9 -23v-102q0 -14 -9 -23t-23 -9h-168q-23 -144 -129 -234t-276 -110q167 -178 459 -536q14 -16 4 -34q-8 -18 -29 -18h-195q-16 0 -25 12q-306 367 -498 571q-9 9 -9 22z" />
+<glyph unicode="&#xf157;" horiz-adv-x="1027" d="M4 1360q-8 16 0 32q10 16 28 16h194q19 0 29 -18l215 -425q19 -38 56 -125q10 24 30.5 68t27.5 61l191 420q8 19 29 19h191q17 0 27 -16q9 -14 1 -31l-313 -579h215q13 0 22.5 -9.5t9.5 -22.5v-104q0 -14 -9.5 -23t-22.5 -9h-290v-85h290q13 0 22.5 -9.5t9.5 -22.5v-103 q0 -14 -9.5 -23t-22.5 -9h-290v-330q0 -13 -9.5 -22.5t-22.5 -9.5h-172q-13 0 -22.5 9t-9.5 23v330h-288q-13 0 -22.5 9t-9.5 23v103q0 13 9.5 22.5t22.5 9.5h288v85h-288q-13 0 -22.5 9t-9.5 23v104q0 13 9.5 22.5t22.5 9.5h214z" />
+<glyph unicode="&#xf158;" horiz-adv-x="1280" d="M0 256v128q0 14 9 23t23 9h224v118h-224q-14 0 -23 9t-9 23v149q0 13 9 22.5t23 9.5h224v629q0 14 9 23t23 9h539q200 0 326.5 -122t126.5 -315t-126.5 -315t-326.5 -122h-340v-118h505q14 0 23 -9t9 -23v-128q0 -14 -9 -23t-23 -9h-505v-192q0 -14 -9.5 -23t-22.5 -9 h-167q-14 0 -23 9t-9 23v192h-224q-14 0 -23 9t-9 23zM487 747h320q106 0 171 62t65 162t-65 162t-171 62h-320v-448z" />
+<glyph unicode="&#xf159;" horiz-adv-x="1792" d="M0 672v64q0 14 9 23t23 9h175l-33 128h-142q-14 0 -23 9t-9 23v64q0 14 9 23t23 9h109l-89 344q-5 15 5 28q10 12 26 12h137q26 0 31 -24l90 -360h359l97 360q7 24 31 24h126q24 0 31 -24l98 -360h365l93 360q5 24 31 24h137q16 0 26 -12q10 -13 5 -28l-91 -344h111 q14 0 23 -9t9 -23v-64q0 -14 -9 -23t-23 -9h-145l-34 -128h179q14 0 23 -9t9 -23v-64q0 -14 -9 -23t-23 -9h-213l-164 -616q-7 -24 -31 -24h-159q-24 0 -31 24l-166 616h-209l-167 -616q-7 -24 -31 -24h-159q-11 0 -19.5 7t-10.5 17l-160 616h-208q-14 0 -23 9t-9 23z M373 896l32 -128h225l35 128h-292zM436 640l75 -300q1 -1 1 -3t1 -3q0 1 0.5 3.5t0.5 3.5l81 299h-159zM822 768h139l-35 128h-70zM1118 896l34 -128h230l33 128h-297zM1187 640l81 -299q0 -1 0.5 -3.5t1.5 -3.5q0 1 0.5 3t0.5 3l78 300h-162z" />
+<glyph unicode="&#xf15a;" horiz-adv-x="1280" d="M56 0l31 183h111q50 0 58 51v402h16q-6 1 -16 1v287q-13 68 -89 68h-111v164l212 -1q64 0 97 1v252h154v-247q82 2 122 2v245h154v-252q79 -7 140 -22.5t113 -45t82.5 -78t36.5 -114.5q18 -182 -131 -258q117 -28 175 -103t45 -214q-7 -71 -32.5 -125t-64.5 -89 t-97 -58.5t-121.5 -34.5t-145.5 -15v-255h-154v251q-80 0 -122 1v-252h-154v255q-18 0 -54 0.5t-55 0.5h-200zM522 182q8 0 37 -0.5t48 -0.5t53 1.5t58.5 4t57 8.5t55.5 14t47.5 21t39.5 30t24.5 40t9.5 51q0 36 -15 64t-37 46t-57.5 30.5t-65.5 18.5t-74 9t-69 3t-64.5 -1 t-47.5 -1v-338zM522 674q5 0 34.5 -0.5t46.5 0t50 2t55 5.5t51.5 11t48.5 18.5t37 27t27 38.5t9 51q0 33 -12.5 58.5t-30.5 42t-48 28t-55 16.5t-61.5 8t-58 2.5t-54 -1t-39.5 -0.5v-307z" />
+<glyph unicode="&#xf15b;" d="M0 -160v1600q0 40 28 68t68 28h800v-544q0 -40 28 -68t68 -28h544v-1056q0 -40 -28 -68t-68 -28h-1344q-40 0 -68 28t-28 68zM1024 1024v472q22 -14 36 -28l408 -408q14 -14 28 -36h-472z" />
+<glyph unicode="&#xf15c;" d="M0 -160v1600q0 40 28 68t68 28h800v-544q0 -40 28 -68t68 -28h544v-1056q0 -40 -28 -68t-68 -28h-1344q-40 0 -68 28t-28 68zM384 160q0 -14 9 -23t23 -9h704q14 0 23 9t9 23v64q0 14 -9 23t-23 9h-704q-14 0 -23 -9t-9 -23v-64zM384 416q0 -14 9 -23t23 -9h704 q14 0 23 9t9 23v64q0 14 -9 23t-23 9h-704q-14 0 -23 -9t-9 -23v-64zM384 672q0 -14 9 -23t23 -9h704q14 0 23 9t9 23v64q0 14 -9 23t-23 9h-704q-14 0 -23 -9t-9 -23v-64zM1024 1024v472q22 -14 36 -28l408 -408q14 -14 28 -36h-472z" />
+<glyph unicode="&#xf15d;" horiz-adv-x="1664" d="M34 108q8 20 30 20h192v1376q0 14 9 23t23 9h192q14 0 23 -9t9 -23v-1376h192q14 0 23 -9t9 -23q0 -12 -10 -24l-319 -319q-10 -9 -23 -9q-12 0 -23 9l-320 320q-15 16 -7 35zM899 768v106h70l230 662h162l230 -662h70v-106h-288v106h75l-47 144h-243l-47 -144h75v-106 h-287zM988 -166l369 529q12 18 21 27l11 9v3q-2 0 -6.5 -0.5t-7.5 -0.5q-12 -3 -30 -3h-232v-115h-120v229h567v-89l-369 -530q-6 -8 -21 -26l-11 -11v-2l14 2q9 2 30 2h248v119h121v-233h-584v90zM1191 1128h177l-72 218l-12 47q-2 16 -2 20h-4l-3 -20q0 -1 -3.5 -18 t-7.5 -29z" />
+<glyph unicode="&#xf15e;" horiz-adv-x="1664" d="M34 108q8 20 30 20h192v1376q0 14 9 23t23 9h192q14 0 23 -9t9 -23v-1376h192q14 0 23 -9t9 -23q0 -12 -10 -24l-319 -319q-10 -9 -23 -9q-12 0 -23 9l-320 320q-15 16 -7 35zM899 -150h70l230 662h162l230 -662h70v-106h-288v106h75l-47 144h-243l-47 -144h75v-106h-287 v106zM988 768v90l369 529q12 18 21 27l11 9v3q-2 0 -6.5 -0.5t-7.5 -0.5q-12 -3 -30 -3h-232v-115h-120v229h567v-89l-369 -530q-6 -8 -21 -26l-11 -10v-3l14 3q9 1 30 1h248v119h121v-233h-584zM1191 104h177l-72 218l-12 47q-2 16 -2 20h-4l-3 -20q0 -1 -3.5 -18t-7.5 -29 z" />
+<glyph unicode="&#xf160;" horiz-adv-x="1792" d="M34 108q8 20 30 20h192v1376q0 14 9 23t23 9h192q14 0 23 -9t9 -23v-1376h192q14 0 23 -9t9 -23q0 -12 -10 -24l-319 -319q-10 -9 -23 -9q-12 0 -23 9l-320 320q-15 16 -7 35zM896 -32q0 14 9 23t23 9h832q14 0 23 -9t9 -23v-192q0 -14 -9 -23t-23 -9h-832q-14 0 -23 9 t-9 23v192zM896 288v192q0 14 9 23t23 9h640q14 0 23 -9t9 -23v-192q0 -14 -9 -23t-23 -9h-640q-14 0 -23 9t-9 23zM896 800v192q0 14 9 23t23 9h448q14 0 23 -9t9 -23v-192q0 -14 -9 -23t-23 -9h-448q-14 0 -23 9t-9 23zM896 1312v192q0 14 9 23t23 9h256q14 0 23 -9t9 -23 v-192q0 -14 -9 -23t-23 -9h-256q-14 0 -23 9t-9 23z" />
+<glyph unicode="&#xf161;" horiz-adv-x="1792" d="M34 108q8 20 30 20h192v1376q0 14 9 23t23 9h192q14 0 23 -9t9 -23v-1376h192q14 0 23 -9t9 -23q0 -12 -10 -24l-319 -319q-10 -9 -23 -9q-12 0 -23 9l-320 320q-15 16 -7 35zM896 -32q0 14 9 23t23 9h256q14 0 23 -9t9 -23v-192q0 -14 -9 -23t-23 -9h-256q-14 0 -23 9 t-9 23v192zM896 288v192q0 14 9 23t23 9h448q14 0 23 -9t9 -23v-192q0 -14 -9 -23t-23 -9h-448q-14 0 -23 9t-9 23zM896 800v192q0 14 9 23t23 9h640q14 0 23 -9t9 -23v-192q0 -14 -9 -23t-23 -9h-640q-14 0 -23 9t-9 23zM896 1312v192q0 14 9 23t23 9h832q14 0 23 -9t9 -23 v-192q0 -14 -9 -23t-23 -9h-832q-14 0 -23 9t-9 23z" />
+<glyph unicode="&#xf162;" d="M34 108q8 20 30 20h192v1376q0 14 9 23t23 9h192q14 0 23 -9t9 -23v-1376h192q14 0 23 -9t9 -23q0 -12 -10 -24l-319 -319q-10 -9 -23 -9q-12 0 -23 9l-320 320q-15 16 -7 35zM946 261q0 105 72 178t181 73q123 0 205 -94.5t82 -252.5q0 -62 -13 -121.5t-41 -114 t-68 -95.5t-98.5 -65.5t-127.5 -24.5q-62 0 -108 16q-24 8 -42 15l39 113q15 -7 31 -11q37 -13 75 -13q84 0 134.5 58.5t66.5 145.5h-2q-21 -23 -61.5 -37t-84.5 -14q-106 0 -173 71.5t-67 172.5zM976 1351l192 185h123v-654h165v-114h-469v114h167v432q0 7 0.5 19t0.5 17 v16h-2l-7 -12q-8 -13 -26 -31l-62 -58zM1085 261q0 -57 36.5 -95t104.5 -38q50 0 85 27t35 68q0 63 -44 116t-103 53q-52 0 -83 -37t-31 -94z" />
+<glyph unicode="&#xf163;" d="M34 108q8 20 30 20h192v1376q0 14 9 23t23 9h192q14 0 23 -9t9 -23v-1376h192q14 0 23 -9t9 -23q0 -12 -10 -24l-319 -319q-10 -9 -23 -9q-12 0 -23 9l-320 320q-15 16 -7 35zM946 1285q0 105 72 178t181 73q123 0 205 -94.5t82 -252.5q0 -62 -13 -121.5t-41 -114 t-68 -95.5t-98.5 -65.5t-127.5 -24.5q-62 0 -108 16q-24 8 -42 15l39 113q15 -7 31 -11q37 -13 75 -13q84 0 134.5 58.5t66.5 145.5h-2q-21 -23 -61.5 -37t-84.5 -14q-106 0 -173 71.5t-67 172.5zM976 327l192 185h123v-654h165v-114h-469v114h167v432q0 7 0.5 19t0.5 17v16 h-2l-7 -12q-8 -13 -26 -31l-62 -58zM1085 1285q0 -57 36.5 -95t104.5 -38q50 0 85 27t35 68q0 63 -44 116t-103 53q-52 0 -83 -37t-31 -94z" />
+<glyph unicode="&#xf164;" horiz-adv-x="1664" d="M0 64v640q0 26 19 45t45 19h288q26 0 45 -19t19 -45v-640q0 -26 -19 -45t-45 -19h-288q-26 0 -45 19t-19 45zM128 192q0 -27 18.5 -45.5t45.5 -18.5q26 0 45 18.5t19 45.5q0 26 -19 45t-45 19q-27 0 -45.5 -19t-18.5 -45zM480 64v641q0 25 18 43.5t43 20.5q24 2 76 59 t101 121q68 87 101 120q18 18 31 48t17.5 48.5t13.5 60.5q7 39 12.5 61t19.5 52t34 50q19 19 45 19q46 0 82.5 -10.5t60 -26t40 -40.5t24 -45t12 -50t5 -45t0.5 -39q0 -38 -9.5 -76t-19 -60t-27.5 -56q-3 -6 -10 -18t-11 -22t-8 -24h277q78 0 135 -57t57 -135 q0 -86 -55 -149q15 -44 15 -76q3 -76 -43 -137q17 -56 0 -117q-15 -57 -54 -94q9 -112 -49 -181q-64 -76 -197 -78h-36h-76h-17q-66 0 -144 15.5t-121.5 29t-120.5 39.5q-123 43 -158 44q-26 1 -45 19.5t-19 44.5z" />
+<glyph unicode="&#xf165;" horiz-adv-x="1664" d="M0 448q0 -26 19 -45t45 -19h288q26 0 45 19t19 45v640q0 26 -19 45t-45 19h-288q-26 0 -45 -19t-19 -45v-640zM128 960q0 27 18.5 45.5t45.5 18.5q26 0 45 -18.5t19 -45.5q0 -26 -19 -45t-45 -19q-27 0 -45.5 19t-18.5 45zM480 447v641q0 26 19 44.5t45 19.5q35 1 158 44 q77 26 120.5 39.5t121.5 29t144 15.5h17h76h36q133 -2 197 -78q58 -69 49 -181q39 -37 54 -94q17 -61 0 -117q46 -61 43 -137q0 -32 -15 -76q55 -61 55 -149q-1 -78 -57.5 -135t-134.5 -57h-277q4 -14 8 -24t11 -22t10 -18q18 -37 27 -57t19 -58.5t10 -76.5q0 -24 -0.5 -39 t-5 -45t-12 -50t-24 -45t-40 -40.5t-60 -26t-82.5 -10.5q-26 0 -45 19q-20 20 -34 50t-19.5 52t-12.5 61q-9 42 -13.5 60.5t-17.5 48.5t-31 48q-33 33 -101 120q-49 64 -101 121t-76 59q-25 2 -43 20.5t-18 43.5z" />
+<glyph unicode="&#xf166;" d="M0 160v960q0 119 84.5 203.5t203.5 84.5h960q119 0 203.5 -84.5t84.5 -203.5v-960q0 -119 -84.5 -203.5t-203.5 -84.5h-960q-119 0 -203.5 84.5t-84.5 203.5zM218 366q0 -176 20 -260q10 -43 42.5 -73t75.5 -35q137 -15 412 -15t412 15q43 5 75.5 35t42.5 73 q20 84 20 260q0 177 -19 260q-10 44 -43 73.5t-76 34.5q-136 15 -412 15q-275 0 -411 -15q-44 -5 -76.5 -34.5t-42.5 -73.5q-20 -87 -20 -260zM300 551v70h232v-70h-80v-423h-74v423h-78zM396 1313l24 -69t23 -69q35 -103 46 -158v-201h74v201l90 296h-75l-51 -195l-53 195 h-78zM542 205v290h66v-270q0 -24 1 -26q1 -15 15 -15q20 0 42 31v280h67v-367h-67v40q-39 -45 -76 -45q-33 0 -42 28q-6 16 -6 54zM654 936q0 -58 21 -87q27 -38 78 -38q49 0 78 38q21 27 21 87v130q0 58 -21 87q-29 38 -78 38q-51 0 -78 -38q-21 -29 -21 -87v-130zM721 923 v156q0 52 32 52t32 -52v-156q0 -51 -32 -51t-32 51zM790 128v493h67v-161q32 40 68 40q41 0 53 -42q7 -21 7 -74v-146q0 -52 -7 -73q-12 -42 -53 -42q-35 0 -68 41v-36h-67zM857 200q16 -16 33 -16q29 0 29 49v157q0 50 -29 50q-17 0 -33 -16v-224zM907 893q0 -37 6 -55 q11 -27 43 -27q36 0 77 45v-40h67v370h-67v-283q-22 -31 -42 -31q-15 0 -16 16q-1 2 -1 26v272h-67v-293zM1037 247v129q0 59 20 86q29 38 80 38t78 -38q21 -28 21 -86v-76h-133v-65q0 -51 34 -51q24 0 30 26q0 1 0.5 7t0.5 16.5v21.5h68v-9q0 -29 -2 -43q-3 -22 -15 -40 q-27 -40 -80 -40q-52 0 -81 38q-21 27 -21 86zM1103 355h66v34q0 51 -33 51t-33 -51v-34z" />
+<glyph unicode="&#xf167;" d="M27 260q0 234 26 350q14 59 58 99t103 47q183 20 554 20t555 -20q58 -7 102.5 -47t57.5 -99q26 -112 26 -350q0 -234 -26 -350q-14 -59 -58 -99t-102 -46q-184 -21 -555 -21t-555 21q-58 6 -102.5 46t-57.5 99q-26 112 -26 350zM138 509h105v-569h100v569h107v94h-312 v-94zM266 1536h106l71 -263l68 263h102l-121 -399v-271h-100v271q-14 74 -61 212q-37 103 -65 187zM463 43q0 -49 8 -73q12 -37 58 -37q48 0 102 61v-54h89v494h-89v-378q-30 -42 -57 -42q-18 0 -21 21q-1 3 -1 35v364h-89v-391zM614 1028v175q0 80 28 117q38 51 105 51 q69 0 106 -51q28 -37 28 -117v-175q0 -81 -28 -118q-37 -51 -106 -51q-67 0 -105 51q-28 38 -28 118zM704 1011q0 -70 43 -70t43 70v210q0 69 -43 69t-43 -69v-210zM798 -60h89v48q45 -55 93 -55q54 0 71 55q9 27 9 100v197q0 73 -9 99q-17 56 -71 56q-50 0 -93 -54v217h-89 v-663zM887 36v301q22 22 45 22q39 0 39 -67v-211q0 -67 -39 -67q-23 0 -45 22zM955 971v394h91v-367q0 -33 1 -35q3 -22 21 -22q27 0 57 43v381h91v-499h-91v55q-53 -62 -103 -62q-46 0 -59 37q-8 24 -8 75zM1130 100q0 -79 29 -116q39 -51 108 -51q72 0 108 53q18 27 21 54 q2 9 2 58v13h-91q0 -51 -2 -61q-7 -36 -40 -36q-46 0 -46 69v87h179v103q0 79 -27 116q-39 51 -106 51q-68 0 -107 -51q-28 -37 -28 -116v-173zM1219 245v46q0 68 45 68t45 -68v-46h-90z" />
+<glyph unicode="&#xf168;" horiz-adv-x="1408" d="M5 384q-10 17 0 36l253 448q1 0 0 1l-161 279q-12 22 -1 37q9 15 32 15h239q40 0 66 -45l164 -286q-10 -18 -257 -456q-27 -46 -65 -46h-239q-21 0 -31 17zM536 539q18 32 531 942q25 45 64 45h241q22 0 31 -15q11 -16 0 -37l-528 -934v-1l336 -615q11 -20 1 -37 q-10 -15 -32 -15h-239q-42 0 -66 45z" />
+<glyph unicode="&#xf169;" d="M0 160v960q0 119 84.5 203.5t203.5 84.5h960q119 0 203.5 -84.5t84.5 -203.5v-960q0 -119 -84.5 -203.5t-203.5 -84.5h-960q-119 0 -203.5 84.5t-84.5 203.5zM227 396q8 -13 24 -13h185q31 0 50 36l199 352q0 1 -126 222q-21 34 -52 34h-184q-18 0 -26 -11q-7 -12 1 -29 l125 -216v-1l-196 -346q-9 -14 0 -28zM638 516q1 -2 262 -481q20 -35 52 -35h184q18 0 25 12q8 13 -1 28l-260 476v1l409 723q8 16 0 28q-7 12 -24 12h-187q-30 0 -49 -35z" />
+<glyph unicode="&#xf16a;" horiz-adv-x="1792" d="M0 640q0 96 1 150t8.5 136.5t22.5 147.5q16 73 69 123t124 58q222 25 671 25t671 -25q71 -8 124.5 -58t69.5 -123q14 -65 21.5 -147.5t8.5 -136.5t1 -150t-1 -150t-8.5 -136.5t-22.5 -147.5q-16 -73 -69 -123t-124 -58q-222 -25 -671 -25t-671 25q-71 8 -124.5 58 t-69.5 123q-14 65 -21.5 147.5t-8.5 136.5t-1 150zM640 320q0 -38 33 -56q16 -8 31 -8q20 0 34 10l512 320q30 17 30 54t-30 54l-512 320q-31 20 -65 2q-33 -18 -33 -56v-640z" />
+<glyph unicode="&#xf16b;" horiz-adv-x="1792" d="M64 558l338 271l494 -305l-342 -285zM64 1099l490 319l342 -285l-494 -304zM407 166v108l147 -96l342 284v2l1 -1l1 1v-2l343 -284l147 96v-108l-490 -293v-1l-1 1l-1 -1v1zM896 524l494 305l338 -271l-489 -319zM896 1133l343 285l489 -319l-338 -270z" />
+<glyph unicode="&#xf16c;" horiz-adv-x="1408" d="M0 -255v736h121v-618h928v618h120v-701l-1 -35v-1h-1132l-35 1h-1zM221 -17v151l707 1v-151zM227 243l14 150l704 -65l-13 -150zM270 563l39 146l683 -183l-39 -146zM395 928l77 130l609 -360l-77 -130zM707 1303l125 86l398 -585l-124 -85zM1136 1510l149 26l121 -697 l-149 -26z" />
+<glyph unicode="&#xf16d;" d="M0 69v1142q0 81 58 139t139 58h1142q81 0 139 -58t58 -139v-1142q0 -81 -58 -139t-139 -58h-1142q-81 0 -139 58t-58 139zM171 110q0 -26 17.5 -43.5t43.5 -17.5h1069q25 0 43 17.5t18 43.5v648h-135q20 -63 20 -131q0 -126 -64 -232.5t-174 -168.5t-240 -62 q-197 0 -337 135.5t-140 327.5q0 68 20 131h-141v-648zM461 643q0 -124 90.5 -211.5t217.5 -87.5q128 0 218.5 87.5t90.5 211.5t-90.5 211.5t-218.5 87.5q-127 0 -217.5 -87.5t-90.5 -211.5zM1050 1003q0 -29 20 -49t49 -20h174q29 0 49 20t20 49v165q0 28 -20 48.5 t-49 20.5h-174q-29 0 -49 -20.5t-20 -48.5v-165z" />
+<glyph unicode="&#xf16e;" d="M0 160v960q0 119 84.5 203.5t203.5 84.5h960q119 0 203.5 -84.5t84.5 -203.5v-960q0 -119 -84.5 -203.5t-203.5 -84.5h-960q-119 0 -203.5 84.5t-84.5 203.5zM274 640q0 -88 62 -150t150 -62t150 62t62 150t-62 150t-150 62t-150 -62t-62 -150zM838 640q0 -88 62 -150 t150 -62t150 62t62 150t-62 150t-150 62t-150 -62t-62 -150z" />
+<glyph unicode="&#xf170;" d="M0 640q0 209 103 385.5t279.5 279.5t385.5 103t385.5 -103t279.5 -279.5t103 -385.5t-103 -385.5t-279.5 -279.5t-385.5 -103t-385.5 103t-279.5 279.5t-103 385.5zM309 384h94l104 160h522l104 -160h94l-459 691zM567 608l201 306l201 -306h-402z" />
+<glyph unicode="&#xf171;" horiz-adv-x="1408" d="M0 1222q3 26 17.5 48.5t31.5 37.5t45 30t46 22.5t48 18.5q125 46 313 64q379 37 676 -50q155 -46 215 -122q16 -20 16.5 -51t-5.5 -54q-26 -167 -111 -655q-5 -30 -27 -56t-43.5 -40t-54.5 -31q-252 -126 -610 -88q-248 27 -394 139q-15 12 -25.5 26.5t-17 35t-9 34 t-6 39.5t-5.5 35q-9 50 -26.5 150t-28 161.5t-23.5 147.5t-22 158zM173 285l6 16l18 9q223 -148 506.5 -148t507.5 148q21 -6 24 -23t-5 -45t-8 -37q-8 -26 -15.5 -76.5t-14 -84t-28.5 -70t-58 -56.5q-86 -48 -189.5 -71.5t-202 -22t-201.5 18.5q-46 8 -81.5 18t-76.5 27 t-73 43.5t-52 61.5q-25 96 -57 292zM243 1240q30 -28 76 -45.5t73.5 -22t87.5 -11.5q228 -29 448 -1q63 8 89.5 12t72.5 21.5t75 46.5q-20 27 -56 44.5t-58 22t-71 12.5q-291 47 -566 -2q-43 -7 -66 -12t-55 -22t-50 -43zM481 657q4 -91 77.5 -155t165.5 -56q91 8 152 84 t50 168q-14 107 -113 164t-197 13q-63 -28 -100.5 -88.5t-34.5 -129.5zM599 710q14 41 52 58q36 18 72.5 12t64 -35.5t27.5 -67.5q8 -63 -50.5 -101t-111.5 -6q-39 17 -53.5 58t-0.5 82z" />
+<glyph unicode="&#xf172;" d="M0 160v960q0 119 84.5 203.5t203.5 84.5h960q119 0 203.5 -84.5t84.5 -203.5v-960q0 -119 -84.5 -203.5t-203.5 -84.5h-960q-119 0 -203.5 84.5t-84.5 203.5zM260 1060q8 -68 19 -138t29 -171t24 -137q1 -5 5 -31t7 -36t12 -27t22 -28q105 -80 284 -100q259 -28 440 63 q24 13 39.5 23t31 29t19.5 40q48 267 80 473q9 53 -8 75q-43 55 -155 88q-216 63 -487 36q-132 -12 -226 -46q-38 -15 -59.5 -25t-47 -34t-29.5 -54zM385 384q26 -154 41 -210q47 -81 204 -108q249 -46 428 53q34 19 49 51.5t22.5 85.5t12.5 71q0 7 5.5 26.5t3 32 t-17.5 16.5q-161 -106 -365 -106t-366 106l-12 -6zM436 1073q13 19 36 31t40 15.5t47 8.5q198 35 408 1q33 -5 51 -8.5t43 -16t39 -31.5q-20 -21 -53.5 -34t-53 -16t-63.5 -8q-155 -20 -324 0q-44 6 -63 9.5t-52.5 16t-54.5 32.5zM607 653q-2 49 25.5 93t72.5 64 q70 31 141.5 -10t81.5 -118q8 -66 -36 -121t-110 -61t-119 40t-56 113zM687.5 660.5q0.5 -52.5 43.5 -70.5q39 -23 81 4t36 72q0 43 -41 66t-77 1q-43 -20 -42.5 -72.5z" />
+<glyph unicode="&#xf173;" horiz-adv-x="1024" d="M78 779v217q91 30 155 84q64 55 103 132q39 78 54 196h219v-388h364v-241h-364v-394q0 -136 14 -172q13 -37 52 -60q50 -31 117 -31q117 0 232 76v-242q-102 -48 -178 -65q-77 -19 -173 -19q-105 0 -186 27q-78 25 -138 75q-58 51 -79 105q-22 54 -22 161v539h-170z" />
+<glyph unicode="&#xf174;" d="M0 160v960q0 119 84.5 203.5t203.5 84.5h960q119 0 203.5 -84.5t84.5 -203.5v-960q0 -119 -84.5 -203.5t-203.5 -84.5h-960q-119 0 -203.5 84.5t-84.5 203.5zM413 744h127v-404q0 -78 17 -121q17 -42 59 -78q43 -37 104 -57q62 -20 140 -20q67 0 129 14q57 13 134 49v181 q-88 -56 -174 -56q-51 0 -88 23q-29 17 -39 45q-11 30 -11 129v295h274v181h-274v291h-164q-11 -90 -40 -147t-78 -99q-48 -40 -116 -63v-163z" />
+<glyph unicode="&#xf175;" horiz-adv-x="768" d="M3 237q9 19 29 19h224v1248q0 14 9 23t23 9h192q14 0 23 -9t9 -23v-1248h224q21 0 29 -19t-5 -35l-350 -384q-10 -10 -23 -10q-14 0 -24 10l-355 384q-13 16 -5 35z" />
+<glyph unicode="&#xf176;" horiz-adv-x="768" d="M3 1043q-8 19 5 35l350 384q10 10 23 10q14 0 24 -10l355 -384q13 -16 5 -35q-9 -19 -29 -19h-224v-1248q0 -14 -9 -23t-23 -9h-192q-14 0 -23 9t-9 23v1248h-224q-21 0 -29 19z" />
+<glyph unicode="&#xf177;" horiz-adv-x="1792" d="M64 637q0 14 10 24l384 354q16 14 35 6q19 -9 19 -29v-224h1248q14 0 23 -9t9 -23v-192q0 -14 -9 -23t-23 -9h-1248v-224q0 -21 -19 -29t-35 5l-384 350q-10 10 -10 23z" />
+<glyph unicode="&#xf178;" horiz-adv-x="1792" d="M0 544v192q0 14 9 23t23 9h1248v224q0 21 19 29t35 -5l384 -350q10 -10 10 -23q0 -14 -10 -24l-384 -354q-16 -14 -35 -6q-19 9 -19 29v224h-1248q-14 0 -23 9t-9 23z" />
+<glyph unicode="&#xf179;" horiz-adv-x="1408" d="M0 634q0 228 113 374q112 144 284 144q72 0 177 -30q104 -30 138 -30q45 0 143 34q102 34 173 34q119 0 213 -65q52 -36 104 -100q-79 -67 -114 -118q-65 -94 -65 -207q0 -124 69 -223t158 -126q-39 -125 -123 -250q-129 -196 -257 -196q-49 0 -140 32q-86 32 -151 32 q-61 0 -142 -33q-81 -34 -132 -34q-152 0 -301 259q-147 261 -147 503zM683 1131q3 149 78 257q74 107 250 148q1 -3 2.5 -11t2.5 -11q0 -4 0.5 -10t0.5 -10q0 -61 -29 -136q-30 -75 -93 -138q-54 -54 -108 -72q-37 -11 -104 -17z" />
+<glyph unicode="&#xf17a;" horiz-adv-x="1664" d="M0 -27v557h682v-651zM0 614v565l682 94v-659h-682zM757 -131v661h907v-786zM757 614v669l907 125v-794h-907z" />
+<glyph unicode="&#xf17b;" horiz-adv-x="1408" d="M0 337v430q0 42 30 72t73 30q42 0 72 -30t30 -72v-430q0 -43 -29.5 -73t-72.5 -30t-73 30t-30 73zM241 886q0 117 64 215.5t172 153.5l-71 131q-7 13 5 20q13 6 20 -6l72 -132q95 42 201 42t201 -42l72 132q7 12 20 6q12 -7 5 -20l-71 -131q107 -55 171 -153.5t64 -215.5 h-925zM245 184v666h918v-666q0 -46 -32 -78t-77 -32h-75v-227q0 -43 -30 -73t-73 -30t-73 30t-30 73v227h-138v-227q0 -43 -30 -73t-73 -30q-42 0 -72 30t-30 73l-1 227h-74q-46 0 -78 32t-32 78zM455 1092q0 -16 11 -27.5t27 -11.5t27.5 11.5t11.5 27.5t-11.5 27.5 t-27.5 11.5t-27 -11.5t-11 -27.5zM876 1092q0 -16 11.5 -27.5t27.5 -11.5t27 11.5t11 27.5t-11 27.5t-27 11.5t-27.5 -11.5t-11.5 -27.5zM1203 337v430q0 43 30 72.5t72 29.5q43 0 73 -29.5t30 -72.5v-430q0 -43 -30 -73t-73 -30q-42 0 -72 30t-30 73z" />
+<glyph unicode="&#xf17c;" d="M11 -115q-10 23 7 66.5t18 54.5q1 16 -4 40t-10 42.5t-4.5 36.5t10.5 27q14 12 57 14t60 12q30 18 42 35t12 51q21 -73 -32 -106q-32 -20 -83 -15q-34 3 -43 -10q-13 -15 5 -57q2 -6 8 -18t8.5 -18t4.5 -17t1 -22q0 -15 -17 -49t-14 -48q3 -17 37 -26q20 -6 84.5 -18.5 t99.5 -20.5q24 -6 74 -22t82.5 -23t55.5 -4q43 6 64.5 28t23 48t-7.5 58.5t-19 52t-20 36.5q-121 190 -169 242q-68 74 -113 40q-11 -9 -15 15q-3 16 -2 38q1 29 10 52t24 47t22 42q8 21 26.5 72t29.5 78t30 61t39 54q110 143 124 195q-12 112 -16 310q-2 90 24 151.5 t106 104.5q39 21 104 21q53 1 106 -13.5t89 -41.5q57 -42 91.5 -121.5t29.5 -147.5q-5 -95 30 -214q34 -113 133 -218q55 -59 99.5 -163t59.5 -191q8 -49 5 -84.5t-12 -55.5t-20 -22q-10 -2 -23.5 -19t-27 -35.5t-40.5 -33.5t-61 -14q-18 1 -31.5 5t-22.5 13.5t-13.5 15.5 t-11.5 20.5t-9 19.5q-22 37 -41 30t-28 -49t7 -97q20 -70 1 -195q-10 -65 18 -100.5t73 -33t85 35.5q59 49 89.5 66.5t103.5 42.5q53 18 77 36.5t18.5 34.5t-25 28.5t-51.5 23.5q-33 11 -49.5 48t-15 72.5t15.5 47.5q1 -31 8 -56.5t14.5 -40.5t20.5 -28.5t21 -19t21.5 -13 t16.5 -9.5q20 -12 31 -24.5t12 -24t-2.5 -22.5t-15.5 -22t-23.5 -19.5t-30 -18.5t-31.5 -16.5t-32 -15.5t-27 -13q-38 -19 -85.5 -56t-75.5 -64q-17 -16 -68 -19.5t-89 14.5q-18 9 -29.5 23.5t-16.5 25.5t-22 19.5t-47 9.5q-44 1 -130 1q-19 0 -57 -1.5t-58 -2.5 q-44 -1 -79.5 -15t-53.5 -30t-43.5 -28.5t-53.5 -11.5q-29 1 -111 31t-146 43q-19 4 -51 9.5t-50 9t-39.5 9.5t-33.5 14.5t-17 19.5zM321 495q-36 -65 10 -166q5 -12 25 -28t24 -20q20 -23 104 -90.5t93 -76.5q16 -15 17.5 -38t-14 -43t-45.5 -23q8 -15 29 -44.5t28 -54 t7 -70.5q46 24 7 92q-4 8 -10.5 16t-9.5 12t-2 6q3 5 13 9.5t20 -2.5q46 -52 166 -36q133 15 177 87q23 38 34 30q12 -6 10 -52q-1 -25 -23 -92q-9 -23 -6 -37.5t24 -15.5q3 19 14.5 77t13.5 90q2 21 -6.5 73.5t-7.5 97t23 70.5q15 18 51 18q1 37 34.5 53t72.5 10.5 t60 -22.5q0 18 -55 42q4 15 7.5 27.5t5 26t3 21.5t0.5 22.5t-1 19.5t-3.5 22t-4 20.5t-5 25t-5.5 26.5q-10 48 -47 103t-72 75q24 -20 57 -83q87 -162 54 -278q-11 -40 -50 -42q-31 -4 -38.5 18.5t-8 83.5t-11.5 107q-9 39 -19.5 69t-19.5 45.5t-15.5 24.5t-13 15t-7.5 7 q-14 62 -31 103t-29.5 56t-23.5 33t-15 40q-4 21 6 53.5t4.5 49.5t-44.5 25q-15 3 -44.5 18t-35.5 16q-8 1 -11 26t8 51t36 27q37 3 51 -30t4 -58q-11 -19 -2 -26.5t30 -0.5q13 4 13 36v37q-5 30 -13.5 50t-21 30.5t-23.5 15t-27 7.5q-107 -8 -89 -134q0 -15 -1 -15 q-9 9 -29.5 10.5t-33 -0.5t-15.5 5q1 57 -16 90t-45 34q-27 1 -41.5 -27.5t-16.5 -59.5q-1 -15 3.5 -37t13 -37.5t15.5 -13.5q10 3 16 14q4 9 -7 8q-7 0 -15.5 14.5t-9.5 33.5q-1 22 9 37t34 14q17 0 27 -21t9.5 -39t-1.5 -22q-22 -15 -31 -29q-8 -12 -27.5 -23.5 t-20.5 -12.5q-13 -14 -15.5 -27t7.5 -18q14 -8 25 -19.5t16 -19t18.5 -13t35.5 -6.5q47 -2 102 15q2 1 23 7t34.5 10.5t29.5 13t21 17.5q9 14 20 8q5 -3 6.5 -8.5t-3 -12t-16.5 -9.5q-20 -6 -56.5 -21.5t-45.5 -19.5q-44 -19 -70 -23q-25 -5 -79 2q-10 2 -9 -2t17 -19 q25 -23 67 -22q17 1 36 7t36 14t33.5 17.5t30 17t24.5 12t17.5 2.5t8.5 -11q0 -2 -1 -4.5t-4 -5t-6 -4.5t-8.5 -5t-9 -4.5t-10 -5t-9.5 -4.5q-28 -14 -67.5 -44t-66.5 -43t-49 -1q-21 11 -63 73q-22 31 -25 22q-1 -3 -1 -10q0 -25 -15 -56.5t-29.5 -55.5t-21 -58t11.5 -63 q-23 -6 -62.5 -90t-47.5 -141q-2 -18 -1.5 -69t-5.5 -59q-8 -24 -29 -3q-32 31 -36 94q-2 28 4 56q4 19 -1 18zM372 630q4 -1 12.5 7t12.5 18q1 3 2 7t2 6t1.5 4.5t0.5 4v3t-1 2.5t-3 2q-4 1 -6 -3t-4.5 -12.5t-5.5 -13.5t-10 -13q-7 -10 -1 -12zM603 1190q2 -5 5 -6 q10 0 7 -15q-3 -20 8 -20q3 0 3 3q3 17 -2.5 30t-11.5 15q-9 2 -9 -7zM634 1110q0 12 19 15h10q-11 -1 -15.5 -10.5t-8.5 -9.5q-5 -1 -5 5zM721 1122q24 11 32 -2q3 -6 -3 -9q-4 -1 -11.5 6.5t-17.5 4.5zM835 1196l4 -2q14 -4 18 -31q0 -3 8 2l2 3q0 11 -5 19.5t-11 12.5 t-9 3q-14 -1 -7 -7zM851 1381.5q-1 -2.5 3 -8.5q4 -3 8 0t11 9t15 9q1 1 9 1t15 2t9 7q0 2 -2.5 5t-9 7t-9.5 6q-15 15 -24 15q-9 -1 -11.5 -7.5t-1 -13t-0.5 -12.5q-1 -4 -6 -10.5t-6 -9zM981 1002q-14 -16 7 -43.5t39 -31.5q9 -1 14.5 8t3.5 20q-2 8 -6.5 11.5t-13 5 t-14.5 5.5q-5 3 -9.5 8t-7 8t-5.5 6.5t-4 4t-4 -1.5z" />
+<glyph unicode="&#xf17d;" d="M0 640q0 209 103 385.5t279.5 279.5t385.5 103t385.5 -103t279.5 -279.5t103 -385.5t-103 -385.5t-279.5 -279.5t-385.5 -103t-385.5 103t-279.5 279.5t-103 385.5zM112 640q0 -124 44 -236.5t124 -201.5q50 89 123.5 166.5t142.5 124.5t130.5 81t99.5 48l37 13 q4 1 13 3.5t13 4.5q-21 49 -53 111q-311 -93 -673 -93q-1 -7 -1 -21zM126 775q302 0 606 80q-120 213 -244 378q-138 -65 -234 -186t-128 -272zM350 134q184 -150 418 -150q132 0 256 52q-42 241 -140 498h-2l-2 -1q-16 -6 -43 -16.5t-101 -49t-137 -82t-131 -114.5 t-103 -148zM609 1276q1 1 2 1q-1 0 -2 -1zM613 1277q131 -170 246 -382q69 26 130 60.5t96.5 61.5t65.5 57t37.5 40.5l12.5 17.5q-185 164 -433 164q-76 0 -155 -19zM909 797q25 -53 44 -95q2 -6 6.5 -17.5t7.5 -16.5q36 5 74.5 7t73.5 2t69 -1.5t64 -4t56.5 -5.5t48 -6.5 t36.5 -6t25 -4.5l10 -2q-3 232 -149 410l-1 -1q-9 -12 -19 -24.5t-43.5 -44.5t-71 -60.5t-100 -65t-131.5 -64.5zM1007 565q87 -239 128 -469q111 75 185 189.5t96 250.5q-210 60 -409 29z" />
+<glyph unicode="&#xf17e;" d="M0 1024q0 159 112.5 271.5t271.5 112.5q130 0 234 -80q77 16 150 16q143 0 273.5 -55.5t225 -150t150 -225t55.5 -273.5q0 -73 -16 -150q80 -104 80 -234q0 -159 -112.5 -271.5t-271.5 -112.5q-130 0 -234 80q-77 -16 -150 -16q-143 0 -273.5 55.5t-225 150t-150 225 t-55.5 273.5q0 73 16 150q-80 104 -80 234zM376 399q0 -92 122 -157.5t291 -65.5q73 0 140 18.5t122.5 53.5t88.5 93.5t33 131.5q0 50 -19.5 91.5t-48.5 68.5t-73 49t-82.5 34t-87.5 23l-104 24q-30 7 -44 10.5t-35 11.5t-30 16t-16.5 21t-7.5 30q0 77 144 77q43 0 77 -12 t54 -28.5t38 -33.5t40 -29t48 -12q47 0 75.5 32t28.5 77q0 55 -56 99.5t-142 67.5t-182 23q-68 0 -132 -15.5t-119.5 -47t-89 -87t-33.5 -128.5q0 -61 19 -106.5t56 -75.5t80 -48.5t103 -32.5l146 -36q90 -22 112 -36q32 -20 32 -60q0 -39 -40 -64.5t-105 -25.5 q-51 0 -91.5 16t-65 38.5t-45.5 45t-46 38.5t-54 16q-50 0 -75.5 -30t-25.5 -75z" />
+<glyph unicode="&#xf180;" horiz-adv-x="1664" d="M0 640q0 75 53 128l587 587q53 53 128 53t128 -53l265 -265l-398 -399l-188 188q-42 42 -99 42q-59 0 -100 -41l-120 -121q-42 -40 -42 -99q0 -58 42 -100l406 -408q30 -28 67 -37l6 -4h28q60 0 99 41l619 619l2 -3q53 -53 53 -128t-53 -128l-587 -587 q-52 -53 -127.5 -53t-128.5 53l-587 587q-53 53 -53 128zM302 660q0 21 14 35l121 120q13 15 35 15t36 -15l252 -252l574 575q15 15 36 15t36 -15l120 -120q14 -15 14 -36t-14 -36l-730 -730q-17 -15 -37 -15q-4 0 -6 1q-18 2 -30 14l-407 408q-14 15 -14 36z" />
+<glyph unicode="&#xf181;" d="M0 -64v1408q0 26 19 45t45 19h1408q26 0 45 -19t19 -45v-1408q0 -26 -19 -45t-45 -19h-1408q-26 0 -45 19t-19 45zM160 192q0 -14 9 -23t23 -9h480q14 0 23 9t9 23v1024q0 14 -9 23t-23 9h-480q-14 0 -23 -9t-9 -23v-1024zM832 576q0 -14 9 -23t23 -9h480q14 0 23 9t9 23 v640q0 14 -9 23t-23 9h-480q-14 0 -23 -9t-9 -23v-640z" />
+<glyph unicode="&#xf182;" horiz-adv-x="1280" d="M0 480q0 29 16 53l256 384q73 107 176 107h384q103 0 176 -107l256 -384q16 -24 16 -53q0 -40 -28 -68t-68 -28q-51 0 -80 43l-227 341h-45v-132l247 -411q9 -15 9 -33q0 -26 -19 -45t-45 -19h-192v-272q0 -46 -33 -79t-79 -33h-160q-46 0 -79 33t-33 79v272h-192 q-26 0 -45 19t-19 45q0 18 9 33l247 411v132h-45l-227 -341q-29 -43 -80 -43q-40 0 -68 28t-28 68zM416 1280q0 93 65.5 158.5t158.5 65.5t158.5 -65.5t65.5 -158.5t-65.5 -158.5t-158.5 -65.5t-158.5 65.5t-65.5 158.5z" />
+<glyph unicode="&#xf183;" horiz-adv-x="1024" d="M0 416v416q0 80 56 136t136 56h640q80 0 136 -56t56 -136v-416q0 -40 -28 -68t-68 -28t-68 28t-28 68v352h-64v-912q0 -46 -33 -79t-79 -33t-79 33t-33 79v464h-64v-464q0 -46 -33 -79t-79 -33t-79 33t-33 79v912h-64v-352q0 -40 -28 -68t-68 -28t-68 28t-28 68z M288 1280q0 93 65.5 158.5t158.5 65.5t158.5 -65.5t65.5 -158.5t-65.5 -158.5t-158.5 -65.5t-158.5 65.5t-65.5 158.5z" />
+<glyph unicode="&#xf184;" d="M0 640q0 209 103 385.5t279.5 279.5t385.5 103t385.5 -103t279.5 -279.5t103 -385.5t-103 -385.5t-279.5 -279.5t-385.5 -103t-385.5 103t-279.5 279.5t-103 385.5zM399.5 766q8.5 -37 24.5 -59l349 -473l350 473q16 22 24.5 59t-6 85t-61.5 79q-40 26 -83 25.5 t-73.5 -17.5t-54.5 -45q-36 -40 -96 -40q-59 0 -95 40q-24 28 -54.5 45t-73.5 17.5t-84 -25.5q-46 -31 -60.5 -79t-6 -85z" />
+<glyph unicode="&#xf185;" horiz-adv-x="1792" d="M44 363q-5 17 4 29l180 248l-180 248q-9 13 -4 29q4 15 20 20l292 96v306q0 16 13 26q15 10 29 4l292 -94l180 248q9 12 26 12t26 -12l180 -248l292 94q14 6 29 -4q13 -10 13 -26v-306l292 -96q16 -5 20 -20q5 -16 -4 -29l-180 -248l180 -248q9 -12 4 -29q-4 -15 -20 -20 l-292 -96v-306q0 -16 -13 -26q-15 -10 -29 -4l-292 94l-180 -248q-10 -13 -26 -13t-26 13l-180 248l-292 -94q-14 -6 -29 4q-13 10 -13 26v306l-292 96q-16 5 -20 20zM320 640q0 -117 45.5 -223.5t123 -184t184 -123t223.5 -45.5t223.5 45.5t184 123t123 184t45.5 223.5 t-45.5 223.5t-123 184t-184 123t-223.5 45.5t-223.5 -45.5t-184 -123t-123 -184t-45.5 -223.5z" />
+<glyph unicode="&#xf186;" d="M0 640q0 153 57.5 292.5t156 241.5t235.5 164.5t290 68.5q44 2 61 -39q18 -41 -15 -72q-86 -78 -131.5 -181.5t-45.5 -218.5q0 -148 73 -273t198 -198t273 -73q118 0 228 51q41 18 72 -13q14 -14 17.5 -34t-4.5 -38q-94 -203 -283.5 -324.5t-413.5 -121.5q-156 0 -298 61 t-245 164t-164 245t-61 298zM128 640q0 -130 51 -248.5t136.5 -204t204 -136.5t248.5 -51q144 0 273.5 61.5t220.5 171.5q-54 -9 -110 -9q-182 0 -337 90t-245 245t-90 337q0 192 104 357q-201 -60 -328.5 -229t-127.5 -384z" />
+<glyph unicode="&#xf187;" horiz-adv-x="1792" d="M64 1088v256q0 26 19 45t45 19h1536q26 0 45 -19t19 -45v-256q0 -26 -19 -45t-45 -19h-1536q-26 0 -45 19t-19 45zM128 -64v960q0 26 19 45t45 19h1408q26 0 45 -19t19 -45v-960q0 -26 -19 -45t-45 -19h-1408q-26 0 -45 19t-19 45zM704 704q0 -26 19 -45t45 -19h256 q26 0 45 19t19 45t-19 45t-45 19h-256q-26 0 -45 -19t-19 -45z" />
+<glyph unicode="&#xf188;" horiz-adv-x="1664" d="M32 576q0 26 19 45t45 19h224v294l-173 173q-19 19 -19 45t19 45t45 19t45 -19l173 -173h844l173 173q19 19 45 19t45 -19t19 -45t-19 -45l-173 -173v-294h224q26 0 45 -19t19 -45t-19 -45t-45 -19h-224q0 -171 -67 -290l208 -209q19 -19 19 -45t-19 -45q-18 -19 -45 -19 t-45 19l-198 197q-5 -5 -15 -13t-42 -28.5t-65 -36.5t-82 -29t-97 -13v896h-128v-896q-51 0 -101.5 13.5t-87 33t-66 39t-43.5 32.5l-15 14l-183 -207q-20 -21 -48 -21q-24 0 -43 16q-19 18 -20.5 44.5t15.5 46.5l202 227q-58 114 -58 274h-224q-26 0 -45 19t-19 45z M512 1152q0 133 93.5 226.5t226.5 93.5t226.5 -93.5t93.5 -226.5h-640z" />
+<glyph unicode="&#xf189;" horiz-adv-x="1920" d="M-1 1004q0 11 3 16l4 6q15 19 57 19l274 2q12 -2 23 -6.5t16 -8.5l5 -3q16 -11 24 -32q20 -50 46 -103.5t41 -81.5l16 -29q29 -60 56 -104t48.5 -68.5t41.5 -38.5t34 -14t27 5q2 1 5 5t12 22t13.5 47t9.5 81t0 125q-2 40 -9 73t-14 46l-6 12q-25 34 -85 43q-13 2 5 24 q17 19 38 30q53 26 239 24q82 -1 135 -13q20 -5 33.5 -13.5t20.5 -24t10.5 -32t3.5 -45.5t-1 -55t-2.5 -70.5t-1.5 -82.5q0 -11 -1 -42t-0.5 -48t3.5 -40.5t11.5 -39t22.5 -24.5q8 -2 17 -4t26 11t38 34.5t52 67t68 107.5q60 104 107 225q4 10 10 17.5t11 10.5l4 3l5 2.5 t13 3t20 0.5l288 2q39 5 64 -2.5t31 -16.5l6 -10q23 -64 -150 -294q-24 -32 -65 -85q-78 -100 -90 -131q-17 -41 14 -81q17 -21 81 -82h1l1 -1l1 -1l2 -2q141 -131 191 -221q3 -5 6.5 -12.5t7 -26.5t-0.5 -34t-25 -27.5t-59 -12.5l-256 -4q-24 -5 -56 5t-52 22l-20 12 q-30 21 -70 64t-68.5 77.5t-61 58t-56.5 15.5q-3 -1 -8 -3.5t-17 -14.5t-21.5 -29.5t-17 -52t-6.5 -77.5q0 -15 -3.5 -27.5t-7.5 -18.5l-4 -5q-18 -19 -53 -22h-115q-71 -4 -146 16.5t-131.5 53t-103 66t-70.5 57.5l-25 24q-10 10 -27.5 30t-71.5 91t-106 151t-122.5 211 t-130.5 272q-6 16 -6 27z" />
+<glyph unicode="&#xf18a;" horiz-adv-x="1792" d="M0 391q0 115 69.5 245t197.5 258q169 169 341.5 236t246.5 -7q65 -64 20 -209q-4 -14 -1 -20t10 -7t14.5 0.5t13.5 3.5l6 2q139 59 246 59t153 -61q45 -63 0 -178q-2 -13 -4.5 -20t4.5 -12.5t12 -7.5t17 -6q57 -18 103 -47t80 -81.5t34 -116.5q0 -68 -37 -139.5 t-109 -137t-168.5 -117.5t-226 -83t-270.5 -31t-275 33.5t-240.5 93t-171.5 151t-65 199.5zM181 320q9 -96 89 -170t208.5 -109t274.5 -21q223 23 369.5 141.5t132.5 264.5q-9 96 -89 170t-208.5 109t-274.5 21q-223 -23 -369.5 -141.5t-132.5 -264.5zM413.5 230.5 q-40.5 92.5 6.5 187.5q47 93 151.5 139t210.5 19q111 -29 158.5 -119.5t2.5 -190.5q-45 -102 -158 -150t-224 -12q-107 34 -147.5 126.5zM495 257.5q9 -34.5 43 -50.5t74.5 -2.5t62.5 47.5q21 34 11 69t-45 50q-34 14 -73 1t-60 -46q-22 -34 -13 -68.5zM705 399 q-17 -31 13 -45q14 -5 29 0.5t22 18.5q8 13 3.5 26.5t-17.5 18.5q-14 5 -28.5 -0.5t-21.5 -18.5zM1165 1274q-6 28 9.5 51.5t43.5 29.5q123 26 244 -11.5t208 -134.5q87 -96 112.5 -222.5t-13.5 -241.5q-9 -27 -34 -40t-52 -4t-40 34t-5 52q28 82 10 172t-80 158 q-62 69 -148 95.5t-173 8.5q-28 -6 -52 9.5t-30 43.5zM1224 1047q-5 24 8 44.5t37 25.5q60 13 119 -5.5t101 -65.5t54.5 -108.5t-6.5 -117.5q-8 -23 -29.5 -34t-44.5 -4q-23 8 -34 29.5t-4 44.5q20 63 -24 111t-107 35q-24 -5 -45 8t-25 37z" />
+<glyph unicode="&#xf18b;" d="M0 638q0 187 83.5 349.5t229.5 269.5t325 137v-485q0 -252 -126.5 -459.5t-330.5 -306.5q-181 215 -181 495zM398 -34q138 87 235.5 211t131.5 268q35 -144 132.5 -268t235.5 -211q-171 -94 -368 -94q-196 0 -367 94zM898 909v485q179 -30 325 -137t229.5 -269.5 t83.5 -349.5q0 -280 -181 -495q-204 99 -330.5 306.5t-126.5 459.5z" />
+<glyph unicode="&#xf18c;" horiz-adv-x="1408" d="M0 -211q0 19 13 31.5t32 12.5q173 1 322.5 107.5t251.5 294.5q-36 -14 -72 -23t-83 -13t-91 2.5t-93 28.5t-92 59t-84.5 100t-74.5 146q114 47 214 57t167.5 -7.5t124.5 -56.5t88.5 -77t56.5 -82q53 131 79 291q-7 -1 -18 -2.5t-46.5 -2.5t-69.5 0.5t-81.5 10t-88.5 23 t-84 42.5t-75 65t-54.5 94.5t-28.5 127.5q70 28 133.5 36.5t112.5 -1t92 -30t73.5 -50t56 -61t42 -63t27.5 -56t16 -39.5l4 -16q12 122 12 195q-8 6 -21.5 16t-49 44.5t-63.5 71.5t-54 93t-33 112.5t12 127t70 138.5q73 -25 127.5 -61.5t84.5 -76.5t48 -85t20.5 -89 t-0.5 -85.5t-13 -76.5t-19 -62t-17 -42l-7 -15q1 -5 1 -50.5t-1 -71.5q3 7 10 18.5t30.5 43t50.5 58t71 55.5t91.5 44.5t112 14.5t132.5 -24q-2 -78 -21.5 -141.5t-50 -104.5t-69.5 -71.5t-81.5 -45.5t-84.5 -24t-80 -9.5t-67.5 1t-46.5 4.5l-17 3q-23 -147 -73 -283 q6 7 18 18.5t49.5 41t77.5 52.5t99.5 42t117.5 20t129 -23.5t137 -77.5q-32 -80 -76 -138t-91 -88.5t-99 -46.5t-101.5 -14.5t-96.5 8.5t-86.5 22t-69.5 27.5t-46 22.5l-17 10q-113 -228 -289.5 -359.5t-384.5 -132.5q-19 0 -32 13t-13 32z" />
+<glyph unicode="&#xf18d;" horiz-adv-x="1280" d="M21 217v66h1238v-66q0 -85 -57.5 -144.5t-138.5 -59.5h-57l-260 -269v269h-529q-81 0 -138.5 59.5t-57.5 144.5zM21 354v255h1238v-255h-1238zM21 682v255h1238v-255h-1238zM21 1010v67q0 84 57.5 143.5t138.5 59.5h846q81 0 138.5 -59.5t57.5 -143.5v-67h-1238z" />
+<glyph unicode="&#xf18e;" d="M0 640q0 209 103 385.5t279.5 279.5t385.5 103t385.5 -103t279.5 -279.5t103 -385.5t-103 -385.5t-279.5 -279.5t-385.5 -103t-385.5 103t-279.5 279.5t-103 385.5zM224 640q0 -148 73 -273t198 -198t273 -73t273 73t198 198t73 273t-73 273t-198 198t-273 73t-273 -73 t-198 -198t-73 -273zM384 544v192q0 13 9.5 22.5t22.5 9.5h352v192q0 14 9 23t23 9q12 0 24 -10l319 -319q9 -9 9 -23t-9 -23l-320 -320q-9 -9 -23 -9q-13 0 -22.5 9.5t-9.5 22.5v192h-352q-13 0 -22.5 9.5t-9.5 22.5z" />
+<glyph unicode="&#xf190;" d="M0 640q0 209 103 385.5t279.5 279.5t385.5 103t385.5 -103t279.5 -279.5t103 -385.5t-103 -385.5t-279.5 -279.5t-385.5 -103t-385.5 103t-279.5 279.5t-103 385.5zM224 640q0 -148 73 -273t198 -198t273 -73t273 73t198 198t73 273t-73 273t-198 198t-273 73t-273 -73 t-198 -198t-73 -273zM384 640q0 14 9 23l320 320q9 9 23 9q13 0 22.5 -9.5t9.5 -22.5v-192h352q13 0 22.5 -9.5t9.5 -22.5v-192q0 -13 -9.5 -22.5t-22.5 -9.5h-352v-192q0 -14 -9 -23t-23 -9q-12 0 -24 10l-319 319q-9 9 -9 23z" />
+<glyph unicode="&#xf191;" d="M0 160v960q0 119 84.5 203.5t203.5 84.5h960q119 0 203.5 -84.5t84.5 -203.5v-960q0 -119 -84.5 -203.5t-203.5 -84.5h-960q-119 0 -203.5 84.5t-84.5 203.5zM256 160q0 -13 9.5 -22.5t22.5 -9.5h960q13 0 22.5 9.5t9.5 22.5v960q0 13 -9.5 22.5t-22.5 9.5h-960 q-13 0 -22.5 -9.5t-9.5 -22.5v-960zM448 640q0 33 27 52l448 320q17 12 37 12q26 0 45 -19t19 -45v-640q0 -26 -19 -45t-45 -19q-20 0 -37 12l-448 320q-27 19 -27 52z" />
+<glyph unicode="&#xf192;" d="M0 640q0 209 103 385.5t279.5 279.5t385.5 103t385.5 -103t279.5 -279.5t103 -385.5t-103 -385.5t-279.5 -279.5t-385.5 -103t-385.5 103t-279.5 279.5t-103 385.5zM224 640q0 -148 73 -273t198 -198t273 -73t273 73t198 198t73 273t-73 273t-198 198t-273 73t-273 -73 t-198 -198t-73 -273zM512 640q0 106 75 181t181 75t181 -75t75 -181t-75 -181t-181 -75t-181 75t-75 181z" />
+<glyph unicode="&#xf193;" horiz-adv-x="1664" d="M0 320q0 181 104.5 330t274.5 211l17 -131q-122 -54 -195 -165.5t-73 -244.5q0 -185 131.5 -316.5t316.5 -131.5q126 0 232.5 65t165 175.5t49.5 236.5l102 -204q-58 -179 -210 -290t-339 -111q-156 0 -288.5 77.5t-210 210t-77.5 288.5zM416 1348q-2 16 6 42 q14 51 57 82.5t97 31.5q66 0 113 -47t47 -113q0 -69 -52 -117.5t-120 -41.5l37 -289h423v-128h-407l16 -128h455q40 0 57 -35l228 -455l198 99l58 -114l-256 -128q-13 -7 -29 -7q-40 0 -57 35l-239 477h-472q-24 0 -42.5 16.5t-21.5 40.5z" />
+<glyph unicode="&#xf194;" d="M0 160v960q0 119 84.5 203.5t203.5 84.5h960q119 0 203.5 -84.5t84.5 -203.5v-960q0 -119 -84.5 -203.5t-203.5 -84.5h-960q-119 0 -203.5 84.5t-84.5 203.5zM128 806q16 -8 25.5 -26t21.5 -20q21 -3 54.5 8.5t58 10.5t41.5 -30q11 -18 18.5 -38.5t15 -48t12.5 -40.5 q17 -46 53 -187q36 -146 57 -197q42 -99 103 -125q43 -12 85 -1.5t76 31.5q131 77 250 237q104 139 172.5 292.5t82.5 226.5q16 85 -21 132q-52 65 -187 45q-17 -3 -41 -12.5t-57.5 -30.5t-64.5 -48.5t-59.5 -70t-44.5 -91.5q80 7 113.5 -16t26.5 -99q-5 -52 -52 -143 q-43 -78 -71 -99q-44 -32 -87 14q-23 24 -37.5 64.5t-19 73t-10 84t-8.5 71.5q-23 129 -34 164q-12 37 -35.5 69t-50.5 40q-57 16 -127 -25q-54 -32 -136.5 -106t-122.5 -102v-7z" />
+<glyph unicode="&#xf195;" horiz-adv-x="1152" d="M0 608v128q0 23 23 31l233 71v93l-215 -66q-3 -1 -9 -1q-10 0 -19 6q-13 10 -13 26v128q0 23 23 31l233 71v250q0 14 9 23t23 9h160q14 0 23 -9t9 -23v-181l375 116q15 5 28 -5t13 -26v-128q0 -23 -23 -31l-393 -121v-93l375 116q15 5 28 -5t13 -26v-128q0 -23 -23 -31 l-393 -121v-487q188 13 318 151t130 328q0 14 9 23t23 9h160q14 0 23 -9t9 -23q0 -191 -94.5 -353t-256.5 -256.5t-353 -94.5h-160q-14 0 -23 9t-9 23v611l-215 -66q-3 -1 -9 -1q-10 0 -19 6q-13 10 -13 26z" />
+<glyph unicode="&#xf196;" horiz-adv-x="1408" d="M0 288v832q0 119 84.5 203.5t203.5 84.5h832q119 0 203.5 -84.5t84.5 -203.5v-832q0 -119 -84.5 -203.5t-203.5 -84.5h-832q-119 0 -203.5 84.5t-84.5 203.5zM128 288q0 -66 47 -113t113 -47h832q66 0 113 47t47 113v832q0 66 -47 113t-113 47h-832q-66 0 -113 -47 t-47 -113v-832zM256 672v64q0 14 9 23t23 9h352v352q0 14 9 23t23 9h64q14 0 23 -9t9 -23v-352h352q14 0 23 -9t9 -23v-64q0 -14 -9 -23t-23 -9h-352v-352q0 -14 -9 -23t-23 -9h-64q-14 0 -23 9t-9 23v352h-352q-14 0 -23 9t-9 23z" />
+<glyph unicode="&#xf197;" horiz-adv-x="2176" d="M0 576q0 12 38.5 20.5t96.5 10.5q-7 25 -7 49q0 33 9.5 56.5t22.5 23.5h64v64h128q158 0 268 -64h1113q42 -7 106.5 -18t80.5 -14q89 -15 150 -40.5t83.5 -47.5t22.5 -40t-22.5 -40t-83.5 -47.5t-150 -40.5q-16 -3 -80.5 -14t-106.5 -18h-1113q-110 -64 -268 -64h-128v64 h-64q-13 0 -22.5 23.5t-9.5 56.5q0 24 7 49q-58 2 -96.5 10.5t-38.5 20.5zM323 336h29q157 0 273 64h1015q-217 -38 -456 -80q-57 0 -113 -24t-83 -48l-28 -24l-288 -288q-26 -26 -70.5 -45t-89.5 -19h-96zM323 816l93 464h96q46 0 90 -19t70 -45l288 -288q4 -4 11 -10.5 t30.5 -23t48.5 -29t61.5 -23t72.5 -10.5l456 -80h-1015q-116 64 -273 64h-29zM1739 484l81 -30q68 48 68 122t-68 122l-81 -30q53 -36 53 -92t-53 -92z" />
+<glyph unicode="&#xf198;" horiz-adv-x="1664" d="M0 796q0 47 27.5 85t71.5 53l157 53l-53 159q-8 24 -8 47q0 60 42 102.5t102 42.5q47 0 85 -27t53 -72l54 -160l310 105l-54 160q-8 24 -8 47q0 59 42.5 102t101.5 43q47 0 85.5 -27.5t53.5 -71.5l53 -161l162 55q21 6 43 6q60 0 102.5 -39.5t42.5 -98.5q0 -45 -30 -81.5 t-74 -51.5l-157 -54l105 -316l164 56q24 8 46 8q62 0 103.5 -40.5t41.5 -101.5q0 -97 -93 -130l-172 -59l56 -167q7 -21 7 -47q0 -59 -42 -102t-101 -43q-47 0 -85.5 27t-53.5 72l-55 165l-310 -106l55 -164q8 -24 8 -47q0 -59 -42 -102t-102 -43q-47 0 -85 27t-53 72 l-55 163l-153 -53q-29 -9 -50 -9q-61 0 -101.5 40t-40.5 101q0 47 27.5 85t71.5 53l156 53l-105 313l-156 -54q-26 -8 -48 -8q-60 0 -101 40.5t-41 100.5zM620 811l105 -313l310 105l-105 315z" />
+<glyph unicode="&#xf199;" d="M0 160v960q0 119 84.5 203.5t203.5 84.5h960q119 0 203.5 -84.5t84.5 -203.5v-960q0 -119 -84.5 -203.5t-203.5 -84.5h-960q-119 0 -203.5 84.5t-84.5 203.5zM256 352q0 -40 28 -68t68 -28h832q40 0 68 28t28 68v436q-31 -35 -64 -55q-34 -22 -132.5 -85t-151.5 -99 q-98 -69 -164 -69t-164 69q-46 32 -141.5 92.5t-142.5 92.5q-12 8 -33 27t-31 27v-436zM256 928q0 -37 30.5 -76.5t67.5 -64.5q47 -32 137.5 -89t129.5 -83q3 -2 17 -11.5t21 -14t21 -13t23.5 -13t21.5 -9.5t22.5 -7.5t20.5 -2.5t20.5 2.5t22.5 7.5t21.5 9.5t23.5 13t21 13 t21 14t17 11.5l267 174q35 23 66.5 62.5t31.5 73.5q0 41 -27.5 70t-68.5 29h-832q-40 0 -68 -28t-28 -68z" />
+<glyph unicode="&#xf19a;" horiz-adv-x="1792" d="M0 640q0 182 71 348t191 286t286 191t348 71t348 -71t286 -191t191 -286t71 -348t-71 -348t-191 -286t-286 -191t-348 -71t-348 71t-286 191t-191 286t-71 348zM41 640q0 -173 68 -331.5t182.5 -273t273 -182.5t331.5 -68t331.5 68t273 182.5t182.5 273t68 331.5 t-68 331.5t-182.5 273t-273 182.5t-331.5 68t-331.5 -68t-273 -182.5t-182.5 -273t-68 -331.5zM127 640q0 163 67 313l367 -1005q-196 95 -315 281t-119 411zM254 1062q105 160 274.5 253.5t367.5 93.5q147 0 280.5 -53t238.5 -149h-10q-55 0 -92 -40.5t-37 -95.5 q0 -12 2 -24t4 -21.5t8 -23t9 -21t12 -22.5t12.5 -21t14.5 -24t14 -23q63 -107 63 -212q0 -19 -2.5 -38.5t-10 -49.5t-11.5 -44t-17.5 -59t-17.5 -58l-76 -256l-278 826q46 3 88 8q19 2 26 18.5t-2.5 31t-28.5 13.5l-205 -10q-75 1 -202 10q-12 1 -20.5 -5t-11.5 -15 t-1.5 -18.5t9 -16.5t19.5 -8l80 -8l120 -328l-168 -504l-280 832q46 3 88 8q19 2 26 18.5t-2.5 31t-28.5 13.5l-205 -10q-7 0 -23 0.5t-26 0.5zM679 -97l230 670l237 -647q1 -6 5 -11q-126 -44 -255 -44q-112 0 -217 32zM1282 -24l235 678q59 169 59 276q0 42 -6 79 q95 -174 95 -369q0 -209 -104 -385.5t-279 -278.5z" />
+<glyph unicode="&#xf19b;" horiz-adv-x="1792" d="M0 455q0 140 100.5 263.5t275 205.5t391.5 108v-172q-217 -38 -356.5 -150t-139.5 -255q0 -152 154.5 -267t388.5 -145v1360l272 133v-1536l-272 -128q-228 20 -414 102t-293 208.5t-107 272.5zM1134 860v172q277 -33 481 -157l140 79l37 -390l-525 114l147 83 q-119 70 -280 99z" />
+<glyph unicode="&#xf19c;" horiz-adv-x="2048" d="M0 -128q0 26 20.5 45t48.5 19h1782q28 0 48.5 -19t20.5 -45v-128h-1920v128zM0 1024v128l960 384l960 -384v-128h-128q0 -26 -20.5 -45t-48.5 -19h-1526q-28 0 -48.5 19t-20.5 45h-128zM128 0v64q0 26 20.5 45t48.5 19h59v768h256v-768h128v768h256v-768h128v768h256 v-768h128v768h256v-768h59q28 0 48.5 -19t20.5 -45v-64h-1664z" />
+<glyph unicode="&#xf19d;" horiz-adv-x="2304" d="M0 1024q0 23 22 31l1120 352q4 1 10 1t10 -1l1120 -352q22 -8 22 -31t-22 -31l-1120 -352q-4 -1 -10 -1t-10 1l-652 206q-43 -34 -71 -111.5t-34 -178.5q63 -36 63 -109q0 -69 -58 -107l58 -433q2 -14 -8 -25q-9 -11 -24 -11h-192q-15 0 -24 11q-10 11 -8 25l58 433 q-58 38 -58 107q0 73 65 111q11 207 98 330l-333 104q-22 8 -22 31zM512 384l18 316l574 -181q22 -7 48 -7t48 7l574 181l18 -316q4 -69 -82 -128t-235 -93.5t-323 -34.5t-323 34.5t-235 93.5t-82 128z" />
+<glyph unicode="&#xf19e;" d="M109 1536q58 -15 108 -15q43 0 111 15q63 -111 133.5 -229.5t167 -276.5t138.5 -227q37 61 109.5 177.5t117.5 190t105 176t107 189.5q54 -14 107 -14q56 0 114 14q-28 -39 -60 -88.5t-49.5 -78.5t-56.5 -96t-49 -84q-146 -248 -353 -610l13 -707q-62 11 -105 11 q-41 0 -105 -11l13 707q-40 69 -168.5 295.5t-216.5 374.5t-181 287z" />
+<glyph unicode="&#xf1a0;" horiz-adv-x="1280" d="M111 182q0 81 44.5 150t118.5 115q131 82 404 100q-32 41 -47.5 73.5t-15.5 73.5q0 40 21 85q-46 -4 -68 -4q-148 0 -249.5 96.5t-101.5 244.5q0 82 36 159t99 131q76 66 182 98t218 32h417l-137 -88h-132q75 -63 113 -133t38 -160q0 -72 -24.5 -129.5t-59.5 -93 t-69.5 -65t-59 -61.5t-24.5 -66q0 -36 32 -70.5t77 -68t90.5 -73.5t77.5 -104t32 -142q0 -91 -49 -173q-71 -122 -209.5 -179.5t-298.5 -57.5q-132 0 -246.5 41.5t-172.5 137.5q-36 59 -36 131zM297 228q0 -56 23.5 -102t61 -75.5t87 -50t100 -29t101.5 -8.5q58 0 111.5 13 t99 39t73 73t27.5 109q0 25 -7 49t-14.5 42t-27 41.5t-29.5 35t-38.5 34.5t-36.5 29t-41.5 30t-36.5 26q-16 2 -49 2q-53 0 -104.5 -7t-107 -25t-97 -46t-68.5 -74.5t-27 -105.5zM403 1222q0 -46 10 -97.5t31.5 -103t52 -92.5t75 -67t96.5 -26q37 0 77.5 16.5t65.5 43.5 q53 56 53 159q0 59 -17 125.5t-48 129t-84 103.5t-117 41q-42 0 -82.5 -19.5t-66.5 -52.5q-46 -59 -46 -160z" />
+<glyph unicode="&#xf1a1;" horiz-adv-x="1984" d="M0 722q0 94 66 160t160 66q83 0 148 -55q248 158 592 164l134 423q4 14 17.5 21.5t28.5 4.5l347 -82q22 50 68.5 81t102.5 31q77 0 131.5 -54.5t54.5 -131.5t-54.5 -132t-131.5 -55q-76 0 -130.5 54t-55.5 131l-315 74l-116 -366q327 -14 560 -166q64 58 151 58 q94 0 160 -66t66 -160q0 -62 -31 -114t-83 -82q5 -33 5 -61q0 -121 -68.5 -230.5t-197.5 -193.5q-125 -82 -285.5 -125.5t-335.5 -43.5q-176 0 -336.5 43.5t-284.5 125.5q-129 84 -197.5 193t-68.5 231q0 29 5 66q-48 31 -77 81.5t-29 109.5zM77 722q0 -67 51 -111 q49 131 180 235q-36 25 -82 25q-62 0 -105.5 -43.5t-43.5 -105.5zM178 465q0 -101 59.5 -194t171.5 -166q116 -75 265.5 -115.5t313.5 -40.5t313.5 40.5t265.5 115.5q112 73 171.5 166t59.5 194t-59.5 193.5t-171.5 165.5q-116 75 -265.5 115.5t-313.5 40.5t-313.5 -40.5 t-265.5 -115.5q-112 -73 -171.5 -165.5t-59.5 -193.5zM555 572q0 57 41.5 98t97.5 41t96.5 -41t40.5 -98q0 -56 -40.5 -96t-96.5 -40q-57 0 -98 40t-41 96zM661 209.5q0 16.5 11 27.5t27 11t27 -11q77 -77 265 -77h2q188 0 265 77q11 11 27 11t27 -11t11 -27.5t-11 -27.5 q-99 -99 -319 -99h-2q-220 0 -319 99q-11 11 -11 27.5zM1153 572q0 57 41.5 98t97.5 41t96.5 -41t40.5 -98q0 -56 -40.5 -96t-96.5 -40q-57 0 -98 40t-41 96zM1555 1350q0 -45 32 -77t77 -32t77 32t32 77t-32 77t-77 32t-77 -32t-32 -77zM1672 843q131 -105 178 -238 q57 46 57 117q0 62 -43.5 105.5t-105.5 43.5q-49 0 -86 -28z" />
+<glyph unicode="&#xf1a2;" d="M0 193v894q0 133 94 227t226 94h896q132 0 226 -94t94 -227v-894q0 -133 -94 -227t-226 -94h-896q-132 0 -226 94t-94 227zM155 709q0 -37 19.5 -67.5t52.5 -45.5q-7 -25 -7 -54q0 -98 74 -181.5t201.5 -132t278.5 -48.5q150 0 277.5 48.5t201.5 132t74 181.5q0 27 -6 54 q35 14 57 45.5t22 70.5q0 51 -36 87.5t-87 36.5q-60 0 -98 -48q-151 107 -375 115l83 265l206 -49q1 -50 36.5 -85t84.5 -35q50 0 86 35.5t36 85.5t-36 86t-86 36q-36 0 -66 -20.5t-45 -53.5l-227 54q-9 2 -17.5 -2.5t-11.5 -14.5l-95 -302q-224 -4 -381 -113q-36 43 -93 43 q-51 0 -87 -36.5t-36 -87.5zM493 613q0 37 26 63t63 26t63 -26t26 -63t-26 -64t-63 -27t-63 27t-26 64zM560 375q0 11 8 18q7 7 17.5 7t17.5 -7q49 -51 172 -51h1h1q122 0 173 51q7 7 17.5 7t17.5 -7t7 -18t-7 -18q-65 -64 -208 -64h-1h-1q-143 0 -207 64q-8 7 -8 18z M882 613q0 37 26 63t63 26t63 -26t26 -63t-26 -64t-63 -27t-63 27t-26 64zM1143 1120q0 30 21 51t50 21q30 0 51 -21t21 -51q0 -29 -21 -50t-51 -21q-29 0 -50 21t-21 50z" />
+<glyph unicode="&#xf1a3;" d="M0 640q0 209 103 385.5t279.5 279.5t385.5 103t385.5 -103t279.5 -279.5t103 -385.5t-103 -385.5t-279.5 -279.5t-385.5 -103t-385.5 103t-279.5 279.5t-103 385.5zM320 502q0 -82 57.5 -139t139.5 -57q81 0 138.5 56.5t57.5 136.5v280q0 19 13.5 33t33.5 14 q19 0 32.5 -14t13.5 -33v-54l60 -28l90 27v62q0 79 -58 135t-138 56t-138 -55.5t-58 -134.5v-283q0 -20 -14 -33.5t-33 -13.5t-32.5 13.5t-13.5 33.5v120h-151v-122zM806 500q0 -80 58 -137t139 -57t138.5 57t57.5 139v122h-150v-126q0 -20 -13.5 -33.5t-33.5 -13.5 q-19 0 -32.5 14t-13.5 33v123l-90 -26l-60 28v-123z" />
+<glyph unicode="&#xf1a4;" horiz-adv-x="1920" d="M0 336v266h328v-262q0 -43 30 -72.5t72 -29.5t72 29.5t30 72.5v620q0 171 126.5 292t301.5 121q176 0 302 -122t126 -294v-136l-195 -58l-131 61v118q0 42 -30 72t-72 30t-72 -30t-30 -72v-612q0 -175 -126 -299t-303 -124q-178 0 -303.5 125.5t-125.5 303.5zM1062 332 v268l131 -61l195 58v-270q0 -42 30 -71.5t72 -29.5t72 29.5t30 71.5v275h328v-266q0 -178 -125.5 -303.5t-303.5 -125.5q-177 0 -303 124.5t-126 300.5z" />
+<glyph unicode="&#xf1a5;" d="M0 160v960q0 119 84.5 203.5t203.5 84.5h960q119 0 203.5 -84.5t84.5 -203.5v-960q0 -119 -84.5 -203.5t-203.5 -84.5h-960q-119 0 -203.5 84.5t-84.5 203.5zM64 640h704v-704h480q93 0 158.5 65.5t65.5 158.5v480h-704v704h-480q-93 0 -158.5 -65.5t-65.5 -158.5v-480z " />
+<glyph unicode="&#xf1a6;" horiz-adv-x="2048" d="M0 271v697h328v286h204v-983h-532zM205 435h123v369h-123v-369zM614 271h205v697h-205v-697zM614 1050h205v204h-205v-204zM901 26v163h328v82h-328v697h533v-942h-533zM1106 435h123v369h-123v-369zM1516 26v163h327v82h-327v697h532v-942h-532zM1720 435h123v369h-123 v-369z" />
+<glyph unicode="&#xf1a7;" d="M0 160v960q0 119 84.5 203.5t203.5 84.5h960q119 0 203.5 -84.5t84.5 -203.5v-960q0 -119 -84.5 -203.5t-203.5 -84.5h-960q-119 0 -203.5 84.5t-84.5 203.5zM293 388l211 41v206q55 -19 116 -19q125 0 213.5 95t88.5 229t-88.5 229t-213.5 95q-74 0 -141 -36h-186v-840z M504 804v277q28 17 70 17q53 0 91 -45t38 -109t-38 -109.5t-91 -45.5q-43 0 -70 15zM636 -39l211 41v206q51 -19 117 -19q125 0 213 95t88 229t-88 229t-213 95q-20 0 -39 -3q-23 -78 -78 -136q-87 -95 -211 -101v-636zM847 377v277q28 17 70 17q53 0 91 -45.5t38 -109.5 t-38 -109t-91 -45q-43 0 -70 15z" />
+<glyph unicode="&#xf1a8;" horiz-adv-x="2038" d="M41 455q0 15 8.5 26.5t22.5 14.5l486 106q-8 14 -8 25t5.5 17.5t16 11.5t20 7t23 4.5t18.5 4.5q4 1 15.5 7.5t17.5 6.5q15 0 28 -16t20 -33q163 37 172 37q17 0 29.5 -11t12.5 -28q0 -15 -8.5 -26t-23.5 -14l-182 -40l-1 -16q-1 -26 81.5 -117.5t104.5 -91.5q47 0 119 80 t72 129q0 36 -23.5 53t-51 18.5t-51 11.5t-23.5 34q0 16 10 34l-68 19q43 44 43 117q0 26 -5 58q82 16 144 16q44 0 71.5 -1.5t48.5 -8.5t31 -13.5t20.5 -24.5t15.5 -33.5t17 -47.5t24 -60l50 25q-3 -40 -23 -60t-42.5 -21t-40 -6.5t-16.5 -20.5l1 -21q75 3 143.5 -20.5 t118 -58.5t101 -94.5t84 -108t75.5 -120.5q33 -56 78.5 -109t75.5 -80.5t99 -88.5q-48 -30 -108.5 -57.5t-138.5 -59t-114 -47.5q-44 37 -74 115t-43.5 164.5t-33 180.5t-42.5 168.5t-72.5 123t-122.5 48.5l-10 -2l-6 -4q4 -5 13 -14q6 -5 28 -23.5t25.5 -22t19 -18 t18 -20.5t11.5 -21t10.5 -27.5t4.5 -31t4 -40.5l1 -33q1 -26 -2.5 -57.5t-7.5 -52t-12.5 -58.5t-11.5 -53q-35 1 -101 -9.5t-98 -10.5q-39 0 -72 10q-2 16 -2 47q0 74 3 96q2 13 31.5 41.5t57 59t26.5 51.5q-24 2 -43 -24q-36 -53 -111.5 -99.5t-136.5 -46.5q-25 0 -75.5 63 t-106.5 139.5t-84 96.5q-6 4 -27 30q-482 -112 -513 -112q-16 0 -28 11t-12 27zM764 676q10 1 32.5 7t34.5 6q19 0 35 -10l-96 -20zM822 568l48 12l109 -177l-73 -48zM859 884q16 30 36 46.5t54 29.5t65.5 36t46 36.5t50 55t43.5 50.5q12 -9 28 -31.5t32 -36.5t38 -13l12 1 v-76l22 -1q247 95 371 190q28 21 50 39t42.5 37.5t33 31t29.5 34t24 31t24.5 37t23 38t27 47.5t29.5 53l7 9q-2 -53 -43 -139q-79 -165 -205 -264t-306 -142q-14 -3 -42 -7.5t-50 -9.5t-39 -14q3 -19 24.5 -46t21.5 -34q0 -11 -26 -30q-5 5 -13.5 15.5t-12 14.5t-10.5 11.5 t-10 10.5l-8 8t-8.5 7.5t-8 5t-8.5 4.5q-7 3 -14.5 5t-20.5 2.5t-22 0.5h-32.5h-37.5q-126 0 -217 -43zM1061 45h31l10 -83l-41 -12v95zM1061 -79q39 26 131.5 47.5t146.5 21.5q9 0 22.5 -15.5t28 -42.5t26 -50t24 -51t14.5 -33q-121 -45 -244 -45q-61 0 -125 11zM1116 29 q21 2 60.5 8.5t72 10t60.5 3.5h14q3 -15 3 -16q0 -7 -17.5 -14.5t-46 -13t-54 -9.5t-53.5 -7.5t-32 -4.5zM1947 1528l1 3l2 4l-1 -5zM1950 1535v1v-1zM1950 1535l1 1z" />
+<glyph unicode="&#xf1a9;" d="M0 520q0 89 19.5 172.5t49 145.5t70.5 118.5t78.5 94t78.5 69.5t64.5 46.5t42.5 24.5q14 8 51 26.5t54.5 28.5t48 30t60.5 44q36 28 58 72.5t30 125.5q129 -155 186 -193q44 -29 130 -68t129 -66q21 -13 39 -25t60.5 -46.5t76 -70.5t75 -95t69 -122t47 -148.5 t19.5 -177.5q0 -164 -62 -304.5t-166 -236t-242.5 -149.5t-290.5 -54t-293 57.5t-247.5 157t-170.5 241.5t-64 302zM333 256q-2 -112 74 -164q29 -20 62.5 -28.5t103.5 -8.5q57 0 132 32.5t134 71t120 70.5t93 31q26 -1 65 -31.5t71.5 -67t68 -67.5t55.5 -32q35 -3 58.5 14 t55.5 63q28 41 42.5 101t14.5 106q0 22 -5 44.5t-16.5 45t-34 36.5t-52.5 14q-33 0 -97 -41.5t-129 -83.5t-101 -42q-27 -1 -63.5 19t-76 49t-83.5 58t-100 49t-111 19q-115 -1 -197 -78.5t-84 -178.5zM685.5 -76q-0.5 -10 7.5 -20q34 -32 87.5 -46t102.5 -12.5t99 4.5 q41 4 84.5 20.5t65 30t28.5 20.5q12 12 7 29q-5 19 -24 5q-30 -22 -87 -39t-131 -17q-129 0 -193 49q-5 4 -13 4q-11 0 -26 -12q-7 -6 -7.5 -16zM852 31q9 -8 17.5 -4.5t31.5 23.5q3 2 10.5 8.5t10.5 8.5t10 7t11.5 7t12.5 5t15 4.5t16.5 2.5t20.5 1q27 0 44.5 -7.5 t23 -14.5t13.5 -22q10 -17 12.5 -20t12.5 1q23 12 14 34q-19 47 -39 61q-23 15 -76 15q-47 0 -71 -10q-29 -12 -78 -56q-26 -24 -12 -44z" />
+<glyph unicode="&#xf1aa;" d="M0 78q0 72 44.5 128t113.5 72q-22 86 1 173t88 152l12 12l151 -152l-11 -11q-37 -37 -37 -89t37 -90q37 -37 89 -37t89 37l30 30l151 152l161 160l151 -152l-160 -160l-151 -152l-30 -30q-65 -64 -151.5 -87t-171.5 -2q-16 -70 -72 -115t-129 -45q-85 0 -145 60.5 t-60 145.5zM2 1202q0 85 60 145.5t145 60.5q76 0 133.5 -49t69.5 -123q84 20 169.5 -3.5t149.5 -87.5l12 -12l-152 -152l-12 12q-37 37 -89 37t-89 -37t-37 -89.5t37 -89.5l29 -29l152 -152l160 -160l-151 -152l-161 160l-151 152l-30 30q-68 67 -90 159.5t5 179.5 q-70 15 -115 71t-45 129zM446 803l161 160l152 152l29 30q67 67 159 89.5t178 -3.5q11 75 68.5 126t135.5 51q85 0 145 -60.5t60 -145.5q0 -77 -51 -135t-127 -69q26 -85 3 -176.5t-90 -158.5l-12 -12l-151 152l12 12q37 37 37 89t-37 89t-89 37t-89 -37l-30 -30l-152 -152 l-160 -160zM776 793l152 152l160 -160l152 -152l29 -30q64 -64 87.5 -150.5t2.5 -171.5q76 -11 126.5 -68.5t50.5 -134.5q0 -85 -60 -145.5t-145 -60.5q-74 0 -131 47t-71 118q-86 -28 -179.5 -6t-161.5 90l-11 12l151 152l12 -12q37 -37 89 -37t89 37t37 89t-37 89l-30 30 l-152 152z" />
+<glyph unicode="&#xf1ab;" d="M0 -16v1078q3 9 4 10q5 6 20 11q106 35 149 50v384l558 -198q2 0 160.5 55t316 108.5t161.5 53.5q20 0 20 -21v-418l147 -47v-1079l-774 246q-14 -6 -375 -127.5t-368 -121.5q-13 0 -18 13q0 1 -1 3zM39 15l694 232v1032l-694 -233v-1031zM147 293q6 4 82 92 q21 24 85.5 115t78.5 118q17 30 51 98.5t36 77.5q-8 1 -110 -33q-8 -2 -27.5 -7.5t-34.5 -9.5t-17 -5q-2 -2 -2 -10.5t-1 -9.5q-5 -10 -31 -15q-23 -7 -47 0q-18 4 -28 21q-4 6 -5 23q6 2 24.5 5t29.5 6q58 16 105 32q100 35 102 35q10 2 43 19.5t44 21.5q9 3 21.5 8 t14.5 5.5t6 -0.5q2 -12 -1 -33q0 -2 -12.5 -27t-26.5 -53.5t-17 -33.5q-25 -50 -77 -131l64 -28q12 -6 74.5 -32t67.5 -28q4 -1 10.5 -25.5t4.5 -30.5q-1 -3 -12.5 0.5t-31.5 11.5l-20 9q-44 20 -87 49q-7 5 -41 31.5t-38 28.5q-67 -103 -134 -181q-81 -95 -105 -110 q-4 -2 -19.5 -4t-18.5 0zM268 933l1 3q3 -3 19.5 -5t26.5 0t58 16q36 12 55 14q17 0 21 -17q3 -15 -4 -28q-12 -23 -50 -38q-30 -12 -60 -12q-26 3 -49 26q-14 15 -18 41zM310 -116q0 8 5 13.5t13 5.5q4 0 18 -7.5t30.5 -16.5t20.5 -11q73 -37 159.5 -61.5t157.5 -24.5 q95 0 167 14.5t157 50.5q15 7 30.5 15.5t34 19t28.5 16.5l-43 73l158 -13l-54 -160l-40 66q-130 -83 -276 -108q-58 -12 -91 -12h-84q-79 0 -199.5 39t-183.5 85q-8 7 -8 16zM777 1294l573 -184v380zM885 453l102 -31l45 110l211 -65l37 -135l102 -31l-181 657l-100 31z M1071 630l76 185l63 -227z" />
+<glyph unicode="&#xf1ac;" horiz-adv-x="1792" d="M0 -96v1088q0 66 47 113t113 47h128q66 0 113 -47t47 -113v-1088q0 -66 -47 -113t-113 -47h-128q-66 0 -113 47t-47 113zM512 -96v1536q0 40 28 68t68 28h672q40 0 88 -20t76 -48l152 -152q28 -28 48 -76t20 -88v-163q58 -34 93 -93t35 -128v-768q0 -106 -75 -181 t-181 -75h-864q-66 0 -113 47t-47 113zM640 896h896v256h-160q-40 0 -68 28t-28 68v160h-640v-512zM736 0q0 -14 9 -23t23 -9h128q14 0 23 9t9 23v128q0 14 -9 23t-23 9h-128q-14 0 -23 -9t-9 -23v-128zM736 256q0 -14 9 -23t23 -9h128q14 0 23 9t9 23v128q0 14 -9 23t-23 9 h-128q-14 0 -23 -9t-9 -23v-128zM736 512q0 -14 9 -23t23 -9h128q14 0 23 9t9 23v128q0 14 -9 23t-23 9h-128q-14 0 -23 -9t-9 -23v-128zM992 0q0 -14 9 -23t23 -9h128q14 0 23 9t9 23v128q0 14 -9 23t-23 9h-128q-14 0 -23 -9t-9 -23v-128zM992 256q0 -14 9 -23t23 -9h128 q14 0 23 9t9 23v128q0 14 -9 23t-23 9h-128q-14 0 -23 -9t-9 -23v-128zM992 512q0 -14 9 -23t23 -9h128q14 0 23 9t9 23v128q0 14 -9 23t-23 9h-128q-14 0 -23 -9t-9 -23v-128zM1248 0q0 -14 9 -23t23 -9h128q14 0 23 9t9 23v128q0 14 -9 23t-23 9h-128q-14 0 -23 -9t-9 -23 v-128zM1248 256q0 -14 9 -23t23 -9h128q14 0 23 9t9 23v128q0 14 -9 23t-23 9h-128q-14 0 -23 -9t-9 -23v-128zM1248 512q0 -14 9 -23t23 -9h128q14 0 23 9t9 23v128q0 14 -9 23t-23 9h-128q-14 0 -23 -9t-9 -23v-128z" />
+<glyph unicode="&#xf1ad;" d="M0 -192v1664q0 26 19 45t45 19h1280q26 0 45 -19t19 -45v-1664q0 -26 -19 -45t-45 -19h-1280q-26 0 -45 19t-19 45zM256 160q0 -14 9 -23t23 -9h64q14 0 23 9t9 23v64q0 14 -9 23t-23 9h-64q-14 0 -23 -9t-9 -23v-64zM256 416q0 -14 9 -23t23 -9h64q14 0 23 9t9 23v64 q0 14 -9 23t-23 9h-64q-14 0 -23 -9t-9 -23v-64zM256 672q0 -14 9 -23t23 -9h64q14 0 23 9t9 23v64q0 14 -9 23t-23 9h-64q-14 0 -23 -9t-9 -23v-64zM256 928q0 -14 9 -23t23 -9h64q14 0 23 9t9 23v64q0 14 -9 23t-23 9h-64q-14 0 -23 -9t-9 -23v-64zM256 1184q0 -14 9 -23 t23 -9h64q14 0 23 9t9 23v64q0 14 -9 23t-23 9h-64q-14 0 -23 -9t-9 -23v-64zM512 96v-192q0 -14 9 -23t23 -9h320q14 0 23 9t9 23v192q0 14 -9 23t-23 9h-320q-14 0 -23 -9t-9 -23zM512 416q0 -14 9 -23t23 -9h64q14 0 23 9t9 23v64q0 14 -9 23t-23 9h-64q-14 0 -23 -9 t-9 -23v-64zM512 672q0 -14 9 -23t23 -9h64q14 0 23 9t9 23v64q0 14 -9 23t-23 9h-64q-14 0 -23 -9t-9 -23v-64zM512 928q0 -14 9 -23t23 -9h64q14 0 23 9t9 23v64q0 14 -9 23t-23 9h-64q-14 0 -23 -9t-9 -23v-64zM512 1184q0 -14 9 -23t23 -9h64q14 0 23 9t9 23v64 q0 14 -9 23t-23 9h-64q-14 0 -23 -9t-9 -23v-64zM768 416q0 -14 9 -23t23 -9h64q14 0 23 9t9 23v64q0 14 -9 23t-23 9h-64q-14 0 -23 -9t-9 -23v-64zM768 672q0 -14 9 -23t23 -9h64q14 0 23 9t9 23v64q0 14 -9 23t-23 9h-64q-14 0 -23 -9t-9 -23v-64zM768 928q0 -14 9 -23 t23 -9h64q14 0 23 9t9 23v64q0 14 -9 23t-23 9h-64q-14 0 -23 -9t-9 -23v-64zM768 1184q0 -14 9 -23t23 -9h64q14 0 23 9t9 23v64q0 14 -9 23t-23 9h-64q-14 0 -23 -9t-9 -23v-64zM1024 160q0 -14 9 -23t23 -9h64q14 0 23 9t9 23v64q0 14 -9 23t-23 9h-64q-14 0 -23 -9 t-9 -23v-64zM1024 416q0 -14 9 -23t23 -9h64q14 0 23 9t9 23v64q0 14 -9 23t-23 9h-64q-14 0 -23 -9t-9 -23v-64zM1024 672q0 -14 9 -23t23 -9h64q14 0 23 9t9 23v64q0 14 -9 23t-23 9h-64q-14 0 -23 -9t-9 -23v-64zM1024 928q0 -14 9 -23t23 -9h64q14 0 23 9t9 23v64 q0 14 -9 23t-23 9h-64q-14 0 -23 -9t-9 -23v-64zM1024 1184q0 -14 9 -23t23 -9h64q14 0 23 9t9 23v64q0 14 -9 23t-23 9h-64q-14 0 -23 -9t-9 -23v-64z" />
+<glyph unicode="&#xf1ae;" horiz-adv-x="1280" d="M64 1056q0 40 28 68t68 28t68 -28l228 -228h368l228 228q28 28 68 28t68 -28t28 -68t-28 -68l-292 -292v-824q0 -46 -33 -79t-79 -33t-79 33t-33 79v384h-64v-384q0 -46 -33 -79t-79 -33t-79 33t-33 79v824l-292 292q-28 28 -28 68zM416 1152q0 93 65.5 158.5t158.5 65.5 t158.5 -65.5t65.5 -158.5t-65.5 -158.5t-158.5 -65.5t-158.5 65.5t-65.5 158.5z" />
+<glyph unicode="&#xf1b0;" horiz-adv-x="1664" d="M0 724q0 80 42 139.5t119 59.5q76 0 141.5 -55.5t100.5 -134t35 -152.5q0 -80 -42 -139t-119 -59q-76 0 -141.5 55.5t-100.5 133.5t-35 152zM256 19q0 86 56 191.5t139.5 192.5t187.5 146t193 59q118 0 255 -97.5t229 -237t92 -254.5q0 -46 -17 -76.5t-48.5 -45 t-64.5 -20t-76 -5.5q-68 0 -187.5 45t-182.5 45q-66 0 -192.5 -44.5t-200.5 -44.5q-183 0 -183 146zM333 1163q0 60 19 113.5t63 92.5t105 39q77 0 138.5 -57.5t91.5 -135t30 -151.5q0 -60 -19 -113.5t-63 -92.5t-105 -39q-76 0 -138 57.5t-92 135.5t-30 151zM884 1064 q0 74 30 151.5t91.5 135t138.5 57.5q61 0 105 -39t63 -92.5t19 -113.5q0 -73 -30 -151t-92 -135.5t-138 -57.5q-61 0 -105 39t-63 92.5t-19 113.5zM1226 581q0 74 35 152.5t100.5 134t141.5 55.5q77 0 119 -59.5t42 -139.5q0 -74 -35 -152t-100.5 -133.5t-141.5 -55.5 q-77 0 -119 59t-42 139z" />
+<glyph unicode="&#xf1b1;" horiz-adv-x="768" d="M64 1008q0 128 42.5 249.5t117.5 200t160 78.5t160 -78.5t117.5 -200t42.5 -249.5q0 -145 -57 -243.5t-152 -135.5l45 -821q2 -26 -16 -45t-44 -19h-192q-26 0 -44 19t-16 45l45 821q-95 37 -152 135.5t-57 243.5z" />
+<glyph unicode="&#xf1b2;" horiz-adv-x="1792" d="M0 256v768q0 40 23 73t61 47l704 256q22 8 44 8t44 -8l704 -256q38 -14 61 -47t23 -73v-768q0 -35 -18 -65t-49 -47l-704 -384q-28 -16 -61 -16t-61 16l-704 384q-31 17 -49 47t-18 65zM134 1026l698 -254l698 254l-698 254zM896 -93l640 349v636l-640 -233v-752z" />
+<glyph unicode="&#xf1b3;" horiz-adv-x="2304" d="M0 96v416q0 38 21.5 70t56.5 48l434 186v400q0 38 21.5 70t56.5 48l448 192q23 10 50 10t50 -10l448 -192q35 -16 56.5 -48t21.5 -70v-400l434 -186q36 -16 57 -48t21 -70v-416q0 -36 -19 -67t-52 -47l-448 -224q-25 -14 -57 -14t-57 14l-448 224q-5 2 -7 4q-2 -2 -7 -4 l-448 -224q-25 -14 -57 -14t-57 14l-448 224q-33 16 -52 47t-19 67zM172 531l404 -173l404 173l-404 173zM640 -96l384 192v314l-384 -164v-342zM647 1219l441 -189l441 189l-441 189zM1152 651l384 165v266l-384 -164v-267zM1196 531l404 -173l404 173l-404 173zM1664 -96 l384 192v314l-384 -164v-342z" />
+<glyph unicode="&#xf1b4;" horiz-adv-x="2048" d="M0 22v1260h594q87 0 155 -14t126.5 -47.5t90 -96.5t31.5 -154q0 -181 -172 -263q114 -32 172 -115t58 -204q0 -75 -24.5 -136.5t-66 -103.5t-98.5 -71t-121 -42t-134 -13h-611zM277 236h296q205 0 205 167q0 180 -199 180h-302v-347zM277 773h281q78 0 123.5 36.5 t45.5 113.5q0 144 -190 144h-260v-294zM1137 477q0 208 130.5 345.5t336.5 137.5q138 0 240.5 -68t153 -179t50.5 -248q0 -17 -2 -47h-658q0 -111 57.5 -171.5t166.5 -60.5q63 0 122 32t76 87h221q-100 -307 -427 -307q-214 0 -340.5 132t-126.5 347zM1337 1073h511v124 h-511v-124zM1388 576h408q-18 195 -200 195q-90 0 -146 -52.5t-62 -142.5z" />
+<glyph unicode="&#xf1b5;" d="M0 160v960q0 119 84.5 203.5t203.5 84.5h960q119 0 203.5 -84.5t84.5 -203.5v-960q0 -119 -84.5 -203.5t-203.5 -84.5h-960q-119 0 -203.5 84.5t-84.5 203.5zM128 254h382q117 0 197 57.5t80 170.5q0 158 -143 200q107 52 107 164q0 57 -19.5 96.5t-56.5 60.5t-79 29.5 t-97 8.5h-371v-787zM301 388v217h189q124 0 124 -113q0 -104 -128 -104h-185zM301 723v184h163q119 0 119 -90q0 -94 -106 -94h-176zM838 538q0 -135 79 -217t213 -82q205 0 267 191h-138q-11 -34 -47.5 -54t-75.5 -20q-68 0 -104 38t-36 107h411q1 10 1 30 q0 132 -74.5 220.5t-203.5 88.5q-128 0 -210 -86t-82 -216zM964 911v77h319v-77h-319zM996 600q4 56 39 89t91 33q113 0 124 -122h-254z" />
+<glyph unicode="&#xf1b6;" horiz-adv-x="2048" d="M0 764q0 86 61 146.5t146 60.5q73 0 130 -46t73 -117l783 -315q49 29 106 29q14 0 21 -1l173 248q1 114 82 194.5t195 80.5q115 0 196.5 -81t81.5 -196t-81.5 -196.5t-196.5 -81.5l-265 -194q-8 -80 -67.5 -133.5t-138.5 -53.5q-73 0 -130 46t-73 117l-783 315 q-51 -30 -106 -30q-85 0 -146 61t-61 147zM55 764q0 -64 44.5 -108.5t107.5 -44.5q11 0 33 4l-64 26q-33 14 -52.5 44.5t-19.5 66.5q0 50 35.5 85.5t85.5 35.5q20 0 41 -8v1l76 -31q-20 37 -56.5 59t-78.5 22q-63 0 -107.5 -44.5t-44.5 -107.5zM1164 244q19 -37 55.5 -59 t79.5 -22q63 0 107.5 44.5t44.5 107.5t-44.5 108t-107.5 45q-13 0 -33 -4q2 -1 20 -8t21.5 -8.5t18.5 -8.5t19 -10t16 -11t15.5 -13.5t11 -14.5t10 -18t5 -21t2.5 -25q0 -50 -35.5 -85.5t-85.5 -35.5q-14 0 -31.5 4.5t-29 9t-31.5 13.5t-28 12zM1584 767q0 -77 54.5 -131.5 t131.5 -54.5t132 54.5t55 131.5t-55 131.5t-132 54.5q-76 0 -131 -54.5t-55 -131.5zM1623 767q0 62 43.5 105.5t104.5 43.5t105 -44t44 -105t-43.5 -104.5t-105.5 -43.5q-61 0 -104.5 43.5t-43.5 104.5z" />
+<glyph unicode="&#xf1b7;" d="M0 160v960q0 119 84.5 203.5t203.5 84.5h960q119 0 203.5 -84.5t84.5 -203.5v-960q0 -119 -84.5 -203.5t-203.5 -84.5h-960q-119 0 -203.5 84.5t-84.5 203.5zM128 693q0 -53 38 -91t92 -38q36 0 66 18l489 -197q10 -44 45.5 -73t81.5 -29q50 0 86.5 34t41.5 83l167 122 q71 0 122 50.5t51 122.5t-51 123t-122 51q-72 0 -122.5 -50.5t-51.5 -121.5l-108 -155q-2 0 -6.5 0.5t-6.5 0.5q-35 0 -67 -19l-489 197q-10 44 -45.5 73t-80.5 29q-54 0 -92 -38t-38 -92zM162 693q0 40 28 68t68 28q27 0 49.5 -14t34.5 -37l-48 19q-29 11 -56.5 -2 t-38.5 -41q-12 -29 -0.5 -57t39.5 -40v-1l40 -16q-14 -2 -20 -2q-40 0 -68 27.5t-28 67.5zM855 369q5 -2 47 -19q29 -12 58 0.5t41 41.5q11 29 -1 57.5t-41 40.5l-40 16q14 2 21 2q39 0 67 -27.5t28 -67.5t-28 -67.5t-67 -27.5q-59 0 -85 51zM1118 695q0 48 34 82t83 34 q48 0 82 -34t34 -82t-34 -82t-82 -34q-49 0 -83 34t-34 82zM1142 696q0 -39 27.5 -66t65.5 -27t65.5 27t27.5 66q0 38 -27.5 65.5t-65.5 27.5t-65.5 -27.5t-27.5 -65.5z" />
+<glyph unicode="&#xf1b8;" horiz-adv-x="1792" d="M16 970l433 -17l180 -379l-147 92q-63 -72 -111.5 -144.5t-72.5 -125t-39.5 -94.5t-18.5 -63l-4 -21l-190 357q-17 26 -18 56t6 47l8 18q35 63 114 188zM270.5 158q-3.5 28 4 65t12 55t21.5 64t19 53q78 -12 509 -28l-15 -368l-2 -22l-420 29q-36 3 -67 31.5t-47 65.5 q-11 27 -14.5 55zM294 1124l225 356q20 31 60 45t80 10q24 -2 48.5 -12t42 -21t41.5 -33t36 -34.5t36 -39.5t32 -35q-47 -63 -265 -435l-317 187zM782 1524l405 -1q31 3 58 -10.5t39 -28.5l11 -15q39 -61 112 -190l142 83l-220 -373l-419 20l151 86q-34 89 -75 166 t-75.5 123.5t-64.5 80t-47 46.5zM953 197l211 362l7 -173q170 -16 283 -5t170 33l56 22l-188 -359q-12 -29 -36.5 -46.5t-43.5 -20.5l-18 -4q-71 -7 -219 -12l8 -164zM1218 847l313 195l19 11l212 -363q18 -37 12.5 -76t-27.5 -74q-13 -20 -33 -37t-38 -28t-48.5 -22 t-47 -16t-51.5 -14t-46 -12q-34 72 -265 436z" />
+<glyph unicode="&#xf1b9;" horiz-adv-x="1984" d="M0 160v384q0 93 65.5 158.5t158.5 65.5h28l105 419q23 94 104 157.5t179 63.5h704q98 0 179 -63.5t104 -157.5l105 -419h28q93 0 158.5 -65.5t65.5 -158.5v-384q0 -14 -9 -23t-23 -9h-128v-128q0 -80 -56 -136t-136 -56t-136 56t-56 136v128h-928v-128q0 -80 -56 -136 t-136 -56t-136 56t-56 136v128h-96q-14 0 -23 9t-9 23zM160 448q0 -66 47 -113t113 -47t113 47t47 113t-47 113t-113 47t-113 -47t-47 -113zM516 768h952l-89 357q-2 8 -14 17.5t-21 9.5h-704q-9 0 -21 -9.5t-14 -17.5zM1472 448q0 -66 47 -113t113 -47t113 47t47 113 t-47 113t-113 47t-113 -47t-47 -113z" />
+<glyph unicode="&#xf1ba;" horiz-adv-x="1984" d="M0 32v384q0 93 65.5 158.5t158.5 65.5h28l105 419q23 94 104 157.5t179 63.5h128v224q0 14 9 23t23 9h448q14 0 23 -9t9 -23v-224h64q98 0 179 -63.5t104 -157.5l105 -419h28q93 0 158.5 -65.5t65.5 -158.5v-384q0 -14 -9 -23t-23 -9h-128v-64q0 -80 -56 -136t-136 -56 t-136 56t-56 136v64h-928v-64q0 -80 -56 -136t-136 -56t-136 56t-56 136v64h-96q-14 0 -23 9t-9 23zM160 320q0 -66 47 -113t113 -47t113 47t47 113t-47 113t-113 47t-113 -47t-47 -113zM516 640h952l-89 357q-2 8 -14 17.5t-21 9.5h-704q-9 0 -21 -9.5t-14 -17.5zM1472 320 q0 -66 47 -113t113 -47t113 47t47 113t-47 113t-113 47t-113 -47t-47 -113z" />
+<glyph unicode="&#xf1bb;" d="M32 64q0 26 19 45l402 403h-229q-26 0 -45 19t-19 45t19 45l402 403h-197q-26 0 -45 19t-19 45t19 45l384 384q19 19 45 19t45 -19l384 -384q19 -19 19 -45t-19 -45t-45 -19h-197l402 -403q19 -19 19 -45t-19 -45t-45 -19h-229l402 -403q19 -19 19 -45t-19 -45t-45 -19 h-462q1 -17 6 -87.5t5 -108.5q0 -25 -18 -42.5t-43 -17.5h-320q-25 0 -43 17.5t-18 42.5q0 38 5 108.5t6 87.5h-462q-26 0 -45 19t-19 45z" />
+<glyph unicode="&#xf1bc;" d="M0 640q0 209 103 385.5t279.5 279.5t385.5 103t385.5 -103t279.5 -279.5t103 -385.5t-103 -385.5t-279.5 -279.5t-385.5 -103t-385.5 103t-279.5 279.5t-103 385.5zM237 886q0 -31 20.5 -52t51.5 -21q11 0 40 8q133 37 307 37q159 0 309.5 -34t253.5 -95q21 -12 40 -12 q29 0 50.5 20.5t21.5 51.5q0 47 -40 70q-126 73 -293 110.5t-343 37.5q-204 0 -364 -47q-23 -7 -38.5 -25.5t-15.5 -48.5zM289 637q0 -25 17.5 -42.5t42.5 -17.5q7 0 37 8q122 33 251 33q279 0 488 -124q24 -13 38 -13q25 0 42.5 17.5t17.5 42.5q0 40 -35 61 q-237 141 -548 141q-153 0 -303 -42q-48 -13 -48 -64zM321 406q0 -20 13.5 -34.5t35.5 -14.5q5 0 37 8q132 27 243 27q226 0 397 -103q19 -11 33 -11q19 0 33 13.5t14 34.5q0 32 -30 51q-193 115 -447 115q-133 0 -287 -34q-42 -9 -42 -52z" />
+<glyph unicode="&#xf1bd;" d="M0 11v1258q0 58 40.5 98.5t98.5 40.5h1258q58 0 98.5 -40.5t40.5 -98.5v-1258q0 -58 -40.5 -98.5t-98.5 -40.5h-1258q-58 0 -98.5 40.5t-40.5 98.5zM71 11q0 -28 20 -48t48 -20h1258q28 0 48 20t20 48v1258q0 28 -20 48t-48 20h-1258q-28 0 -48 -20t-20 -48v-1258z M121 11v141l711 195l-212 439q4 1 12 2.5t12 1.5q170 32 303.5 21.5t221 -46t143.5 -94.5q27 -28 -25 -42q-64 -16 -256 -62l-97 198q-111 7 -240 -16l188 -387l533 145v-496q0 -7 -5.5 -12.5t-12.5 -5.5h-1258q-7 0 -12.5 5.5t-5.5 12.5zM121 709v560q0 7 5.5 12.5 t12.5 5.5h1258q7 0 12.5 -5.5t5.5 -12.5v-428q-85 30 -188 52q-294 64 -645 12l-18 -3l-65 134h-233l85 -190q-132 -51 -230 -137zM246 413q-24 203 166 305l129 -270l-255 -61q-14 -3 -26 4.5t-14 21.5z" />
+<glyph unicode="&#xf1be;" horiz-adv-x="2304" d="M0 405l17 128q2 9 9 9t9 -9l20 -128l-20 -126q-2 -9 -9 -9t-9 9zM79 405l23 207q0 9 9 9q8 0 10 -9l26 -207l-26 -203q-2 -9 -10 -9q-9 0 -9 10zM169 405l21 245q2 12 12 12q11 0 11 -12l25 -245l-25 -237q0 -11 -11 -11q-10 0 -12 11zM259 405l21 252q0 13 13 13 q12 0 14 -13l23 -252l-23 -244q-2 -13 -14 -13q-13 0 -13 13zM350 405l20 234q0 6 4.5 10.5t10.5 4.5q14 0 16 -15l21 -234l-21 -246q-2 -16 -16 -16q-6 0 -10.5 4.5t-4.5 11.5zM401 159zM442 405l18 380q2 18 18 18q7 0 12 -5.5t5 -12.5l21 -380l-21 -246q0 -7 -5 -12.5 t-12 -5.5q-16 0 -18 18zM534 403l16 468q2 19 20 19q8 0 13.5 -5.5t5.5 -13.5l19 -468l-19 -244q0 -8 -5.5 -13.5t-13.5 -5.5q-18 0 -20 19zM628 405l16 506q0 9 6.5 15.5t14.5 6.5q9 0 15 -6.5t7 -15.5l18 -506l-18 -242q-2 -21 -22 -21q-19 0 -21 21zM723 405l14 -241 q1 -10 7.5 -16.5t15.5 -6.5q22 0 24 23l16 241l-16 523q-1 10 -7.5 17t-16.5 7q-9 0 -16 -7t-7 -17zM784 164zM817 405l14 510q0 11 7.5 18t17.5 7t17.5 -7t7.5 -18l15 -510l-15 -239q0 -10 -7.5 -17.5t-17.5 -7.5t-17 7t-8 18zM913 404l12 492q1 12 9 20t19 8t18.5 -8 t8.5 -20l14 -492l-14 -236q0 -11 -8 -19t-19 -8t-19 8t-9 19zM1010 405q0 -1 11 -236v-1q0 -10 6 -17q9 -11 23 -11q11 0 20 9q9 7 9 20l1 24l11 211l-12 586q0 16 -13 24q-8 5 -16 5t-16 -5q-13 -8 -13 -24l-1 -6zM1079 169zM1103 404l12 636v3q2 15 12 24q9 7 20 7 q8 0 15 -5q14 -8 16 -26l14 -639l-14 -231q0 -13 -9 -22t-22 -9t-22 9t-10 22l-6 114zM1204 174v899q0 23 28 33q85 34 181 34q195 0 338 -131.5t160 -323.5q53 22 110 22q117 0 200 -83t83 -201q0 -117 -83 -199.5t-200 -82.5h-786q-13 2 -22 11t-9 22z" />
+<glyph unicode="&#xf1c0;" d="M0 0v170q119 -84 325 -127t443 -43t443 43t325 127v-170q0 -69 -103 -128t-280 -93.5t-385 -34.5t-385 34.5t-280 93.5t-103 128zM0 384v170q119 -84 325 -127t443 -43t443 43t325 127v-170q0 -69 -103 -128t-280 -93.5t-385 -34.5t-385 34.5t-280 93.5t-103 128zM0 768 v170q119 -84 325 -127t443 -43t443 43t325 127v-170q0 -69 -103 -128t-280 -93.5t-385 -34.5t-385 34.5t-280 93.5t-103 128zM0 1152v128q0 69 103 128t280 93.5t385 34.5t385 -34.5t280 -93.5t103 -128v-128q0 -69 -103 -128t-280 -93.5t-385 -34.5t-385 34.5t-280 93.5 t-103 128z" />
+<glyph unicode="&#xf1c1;" d="M0 -160v1600q0 40 28 68t68 28h896q40 0 88 -20t76 -48l312 -312q28 -28 48 -76t20 -88v-1152q0 -40 -28 -68t-68 -28h-1344q-40 0 -68 28t-28 68zM128 -128h1280v1024h-416q-40 0 -68 28t-28 68v416h-768v-1536zM257 60q9 40 56 91.5t132 96.5q14 9 23 -6q2 -2 2 -4 q52 85 107 197q68 136 104 262q-24 82 -30.5 159.5t6.5 127.5q11 40 42 40h21h1q23 0 35 -15q18 -21 9 -68q-2 -6 -4 -8q1 -3 1 -8v-30q-2 -123 -14 -192q55 -164 146 -238q33 -26 84 -56q59 7 117 7q147 0 177 -49q16 -22 2 -52q0 -1 -1 -2l-2 -2v-1q-6 -38 -71 -38 q-48 0 -115 20t-130 53q-221 -24 -392 -83q-153 -262 -242 -262q-15 0 -28 7l-24 12q-1 1 -6 5q-10 10 -6 36zM318 54q52 24 137 158q-51 -40 -87.5 -84t-49.5 -74zM592 313q135 54 284 81q-2 1 -13 9.5t-16 13.5q-76 67 -127 176q-27 -86 -83 -197q-30 -56 -45 -83z M714 842q1 7 7 44q0 3 7 43q1 4 4 8q-1 1 -1 2t-0.5 1.5t-0.5 1.5q-1 22 -13 36q0 -1 -1 -2v-2q-15 -42 -2 -132zM1024 1024h376q-10 29 -22 41l-313 313q-12 12 -41 22v-376zM1098 353q76 -28 124 -28q14 0 18 1q0 1 -2 3q-24 24 -140 24z" />
+<glyph unicode="&#xf1c2;" d="M0 -160v1600q0 40 28 68t68 28h896q40 0 88 -20t76 -48l312 -312q28 -28 48 -76t20 -88v-1152q0 -40 -28 -68t-68 -28h-1344q-40 0 -68 28t-28 68zM128 -128h1280v1024h-416q-40 0 -68 28t-28 68v416h-768v-1536zM233 661h70l164 -661h159l128 485q7 20 10 46q2 16 2 24 h4l3 -24q1 -3 3.5 -20t5.5 -26l128 -485h159l164 661h70v107h-300v-107h90l-99 -438q-5 -20 -7 -46l-2 -21h-4l-3 21q-1 5 -4 21t-5 25l-144 545h-114l-144 -545q-2 -9 -4.5 -24.5t-3.5 -21.5l-4 -21h-4l-2 21q-2 26 -7 46l-99 438h90v107h-300v-107zM1024 1024h376 q-10 29 -22 41l-313 313q-12 12 -41 22v-376z" />
+<glyph unicode="&#xf1c3;" d="M0 -160v1600q0 40 28 68t68 28h896q40 0 88 -20t76 -48l312 -312q28 -28 48 -76t20 -88v-1152q0 -40 -28 -68t-68 -28h-1344q-40 0 -68 28t-28 68zM128 -128h1280v1024h-416q-40 0 -68 28t-28 68v416h-768v-1536zM429 0h281v106h-75l103 161q5 7 10 16.5t7.5 13.5t3.5 4 h2q1 -4 5 -10q2 -4 4.5 -7.5t6 -8t6.5 -8.5l107 -161h-76v-106h291v106h-68l-192 273l195 282h67v107h-279v-107h74l-103 -159q-4 -7 -10 -16.5t-9 -13.5l-2 -3h-2q-1 4 -5 10q-6 11 -17 23l-106 159h76v107h-290v-107h68l189 -272l-194 -283h-68v-106zM1024 1024h376 q-10 29 -22 41l-313 313q-12 12 -41 22v-376z" />
+<glyph unicode="&#xf1c4;" d="M0 -160v1600q0 40 28 68t68 28h896q40 0 88 -20t76 -48l312 -312q28 -28 48 -76t20 -88v-1152q0 -40 -28 -68t-68 -28h-1344q-40 0 -68 28t-28 68zM128 -128h1280v1024h-416q-40 0 -68 28t-28 68v416h-768v-1536zM416 0h327v106h-93v167h137q76 0 118 15q67 23 106.5 87 t39.5 146q0 81 -37 141t-100 87q-48 19 -130 19h-368v-107h92v-555h-92v-106zM650 386v268h120q52 0 83 -18q56 -33 56 -115q0 -89 -62 -120q-31 -15 -78 -15h-119zM1024 1024h376q-10 29 -22 41l-313 313q-12 12 -41 22v-376z" />
+<glyph unicode="&#xf1c5;" d="M0 -160v1600q0 40 28 68t68 28h896q40 0 88 -20t76 -48l312 -312q28 -28 48 -76t20 -88v-1152q0 -40 -28 -68t-68 -28h-1344q-40 0 -68 28t-28 68zM128 -128h1280v1024h-416q-40 0 -68 28t-28 68v416h-768v-1536zM256 0v192l192 192l128 -128l384 384l320 -320v-320 h-1024zM256 704q0 80 56 136t136 56t136 -56t56 -136t-56 -136t-136 -56t-136 56t-56 136zM1024 1024h376q-10 29 -22 41l-313 313q-12 12 -41 22v-376z" />
+<glyph unicode="&#xf1c6;" d="M0 -160v1600q0 40 28 68t68 28h896q40 0 88 -20t76 -48l312 -312q28 -28 48 -76t20 -88v-1152q0 -40 -28 -68t-68 -28h-1344q-40 0 -68 28t-28 68zM128 -128h1280v1024h-416q-40 0 -68 28t-28 68v416h-128v-128h-128v128h-512v-1536zM384 192q0 25 8 52q21 63 120 396 v128h128v-128h79q22 0 39 -13t23 -34l107 -349q8 -27 8 -52q0 -83 -72.5 -137.5t-183.5 -54.5t-183.5 54.5t-72.5 137.5zM512 192q0 -26 37.5 -45t90.5 -19t90.5 19t37.5 45t-37.5 45t-90.5 19t-90.5 -19t-37.5 -45zM512 896h128v128h-128v-128zM512 1152h128v128h-128v-128 zM640 768h128v128h-128v-128zM640 1024h128v128h-128v-128zM1024 1024h376q-10 29 -22 41l-313 313q-12 12 -41 22v-376z" />
+<glyph unicode="&#xf1c7;" d="M0 -160v1600q0 40 28 68t68 28h896q40 0 88 -20t76 -48l312 -312q28 -28 48 -76t20 -88v-1152q0 -40 -28 -68t-68 -28h-1344q-40 0 -68 28t-28 68zM128 -128h1280v1024h-416q-40 0 -68 28t-28 68v416h-768v-1536zM256 288v192q0 14 9 23t23 9h131l166 167q16 15 35 7 q20 -8 20 -30v-544q0 -22 -20 -30q-8 -2 -12 -2q-12 0 -23 9l-166 167h-131q-14 0 -23 9t-9 23zM762 206.5q1 -26.5 20 -44.5q20 -17 44 -17q27 0 47 20q87 93 87 219t-87 219q-18 19 -45 20t-46 -17t-20 -44.5t18 -46.5q52 -57 52 -131t-52 -131q-19 -20 -18 -46.5z M973.5 54.5q2.5 -26.5 23.5 -42.5q18 -15 40 -15q31 0 50 24q129 159 129 363t-129 363q-16 21 -43 24t-47 -14q-21 -17 -23.5 -43.5t14.5 -47.5q100 -123 100 -282t-100 -282q-17 -21 -14.5 -47.5zM1024 1024h376q-10 29 -22 41l-313 313q-12 12 -41 22v-376z" />
+<glyph unicode="&#xf1c8;" d="M0 -160v1600q0 40 28 68t68 28h896q40 0 88 -20t76 -48l312 -312q28 -28 48 -76t20 -88v-1152q0 -40 -28 -68t-68 -28h-1344q-40 0 -68 28t-28 68zM128 -128h1280v1024h-416q-40 0 -68 28t-28 68v416h-768v-1536zM256 256v384q0 52 38 90t90 38h384q52 0 90 -38t38 -90 v-384q0 -52 -38 -90t-90 -38h-384q-52 0 -90 38t-38 90zM960 403v90l265 266q9 9 23 9q4 0 12 -2q20 -8 20 -30v-576q0 -22 -20 -30q-8 -2 -12 -2q-14 0 -23 9zM1024 1024h376q-10 29 -22 41l-313 313q-12 12 -41 22v-376z" />
+<glyph unicode="&#xf1c9;" d="M0 -160v1600q0 40 28 68t68 28h896q40 0 88 -20t76 -48l312 -312q28 -28 48 -76t20 -88v-1152q0 -40 -28 -68t-68 -28h-1344q-40 0 -68 28t-28 68zM128 -128h1280v1024h-416q-40 0 -68 28t-28 68v416h-768v-1536zM254 429q-14 19 0 38l226 301q8 11 21 12.5t24 -6.5 l51 -38q11 -8 12.5 -21t-6.5 -24l-182 -243l182 -243q8 -11 6.5 -24t-12.5 -21l-51 -38q-11 -8 -24 -6.5t-21 12.5zM636 43l138 831q2 13 13 20.5t24 5.5l63 -10q13 -2 20.5 -13t5.5 -24l-138 -831q-2 -13 -13 -20.5t-24 -5.5l-63 10q-13 2 -20.5 13t-5.5 24zM947.5 181 q-1.5 13 6.5 24l182 243l-182 243q-8 11 -6.5 24t12.5 21l51 38q11 8 24 6.5t21 -12.5l226 -301q14 -19 0 -38l-226 -301q-8 -11 -21 -12.5t-24 6.5l-51 38q-11 8 -12.5 21zM1024 1024h376q-10 29 -22 41l-313 313q-12 12 -41 22v-376z" />
+<glyph unicode="&#xf1ca;" d="M39 1286h283q26 -218 70 -398.5t104.5 -317t121.5 -235.5t140 -195q169 169 287 406q-142 72 -223 220t-81 333q0 192 104 314.5t284 122.5q178 0 273 -105.5t95 -297.5q0 -159 -58 -286q-7 -1 -19.5 -3t-46 -2t-63 6t-62 25.5t-50.5 51.5q31 103 31 184q0 87 -29 132 t-79 45q-53 0 -85 -49.5t-32 -140.5q0 -186 105 -293.5t267 -107.5q62 0 121 14v-198q-101 -23 -198 -23q-65 -136 -165.5 -271t-181.5 -215.5t-128 -106.5q-80 -45 -162 3q-28 17 -60.5 43.5t-85 83.5t-102.5 128.5t-107.5 184t-105.5 244t-91.5 314.5t-70.5 390z" />
+<glyph unicode="&#xf1cb;" horiz-adv-x="1792" d="M0 367v546q0 41 34 64l819 546q21 13 43 13t43 -13l819 -546q34 -23 34 -64v-546q0 -41 -34 -64l-819 -546q-21 -13 -43 -13t-43 13l-819 546q-34 23 -34 64zM154 511l193 129l-193 129v-258zM216 367l603 -402v359l-334 223zM216 913l269 -180l334 223v359zM624 640 l272 -182l272 182l-272 182zM973 -35l603 402l-269 180l-334 -223v-359zM973 956l334 -223l269 180l-603 402v-359zM1445 640l193 -129v258z" />
+<glyph unicode="&#xf1cc;" horiz-adv-x="2048" d="M0 407q0 110 55 203t147 147q-12 39 -12 82q0 115 82 196t199 81q95 0 172 -58q75 154 222.5 248t326.5 94q166 0 306 -80.5t221.5 -218.5t81.5 -301q0 -6 -0.5 -18t-0.5 -18q111 -46 179.5 -145.5t68.5 -221.5q0 -164 -118 -280.5t-285 -116.5q-4 0 -11.5 0.5t-10.5 0.5 h-1209h-1h-2h-5q-170 10 -288 125.5t-118 280.5zM468 498q0 -122 84 -193t208 -71q137 0 240 99q-16 20 -47.5 56.5t-43.5 50.5q-67 -65 -144 -65q-55 0 -93.5 33.5t-38.5 87.5q0 53 38.5 87t91.5 34q44 0 84.5 -21t73 -55t65 -75t69 -82t77 -75t97 -55t121.5 -21 q121 0 204.5 71.5t83.5 190.5q0 121 -84 192t-207 71q-143 0 -241 -97q14 -16 29.5 -34t34.5 -40t29 -34q66 64 142 64q52 0 92 -33t40 -84q0 -57 -37 -91.5t-94 -34.5q-43 0 -82.5 21t-72 55t-65.5 75t-69.5 82t-77.5 75t-96.5 55t-118.5 21q-122 0 -207 -70.5t-85 -189.5z " />
+<glyph unicode="&#xf1cd;" horiz-adv-x="1792" d="M0 640q0 182 71 348t191 286t286 191t348 71t348 -71t286 -191t191 -286t71 -348t-71 -348t-191 -286t-286 -191t-348 -71t-348 71t-286 191t-191 286t-71 348zM128 640q0 -190 90 -361l194 194q-28 82 -28 167t28 167l-194 194q-90 -171 -90 -361zM512 640 q0 -159 112.5 -271.5t271.5 -112.5t271.5 112.5t112.5 271.5t-112.5 271.5t-271.5 112.5t-271.5 -112.5t-112.5 -271.5zM535 -38q171 -90 361 -90t361 90l-194 194q-82 -28 -167 -28t-167 28zM535 1318l194 -194q82 28 167 28t167 -28l194 194q-171 90 -361 90t-361 -90z M1380 473l194 -194q90 171 90 361t-90 361l-194 -194q28 -82 28 -167t-28 -167z" />
+<glyph unicode="&#xf1ce;" horiz-adv-x="1792" d="M0 640q0 222 101 414.5t276.5 317t390.5 155.5v-260q-221 -45 -366.5 -221t-145.5 -406q0 -130 51 -248.5t136.5 -204t204 -136.5t248.5 -51t248.5 51t204 136.5t136.5 204t51 248.5q0 230 -145.5 406t-366.5 221v260q215 -31 390.5 -155.5t276.5 -317t101 -414.5 q0 -182 -71 -348t-191 -286t-286 -191t-348 -71t-348 71t-286 191t-191 286t-71 348z" />
+<glyph unicode="&#xf1d0;" horiz-adv-x="1792" d="M19 662q8 217 116 406t305 318h5q0 -1 -1 -3q-8 -8 -28 -33.5t-52 -76.5t-60 -110.5t-44.5 -135.5t-14 -150.5t39 -157.5t108.5 -154q50 -50 102 -69.5t90.5 -11.5t69.5 23.5t47 32.5l16 16q39 51 53 116.5t6.5 122.5t-21 107t-26.5 80l-14 29q-10 25 -30.5 49.5t-43 41 t-43.5 29.5t-35 19l-13 6l104 115q39 -17 78 -52t59 -61l19 -27q1 48 -18.5 103.5t-40.5 87.5l-20 31l161 183l160 -181q-33 -46 -52.5 -102.5t-22.5 -90.5l-4 -33q22 37 61.5 72.5t67.5 52.5l28 17l103 -115q-44 -14 -85 -50t-60 -65l-19 -29q-31 -56 -48 -133.5t-7 -170 t57 -156.5q33 -45 77.5 -60.5t85 -5.5t76 26.5t57.5 33.5l21 16q60 53 96.5 115t48.5 121.5t10 121.5t-18 118t-37 107.5t-45.5 93t-45 72t-34.5 47.5l-13 17q-14 13 -7 13l10 -3q40 -29 62.5 -46t62 -50t64 -58t58.5 -65t55.5 -77t45.5 -88t38 -103t23.5 -117t10.5 -136 q3 -259 -108 -465t-312 -321t-456 -115q-185 0 -351 74t-283.5 198t-184 293t-60.5 353z" />
+<glyph unicode="&#xf1d1;" horiz-adv-x="1792" d="M0 640q0 182 71 348t191 286t286 191t348 71t348 -71t286 -191t191 -286t71 -348t-71 -348t-191 -286t-286 -191t-348 -71t-348 71t-286 191t-191 286t-71 348zM44 640q0 -173 67.5 -331t181.5 -272t272 -181.5t331 -67.5t331 67.5t272 181.5t181.5 272t67.5 331 t-67.5 331t-181.5 272t-272 181.5t-331 67.5t-331 -67.5t-272 -181.5t-181.5 -272t-67.5 -331zM87 640q0 205 98 385l57 -33q-30 -56 -49 -112l82 -28q-35 -100 -35 -212q0 -109 36 -212l-83 -28q22 -60 49 -112l-57 -33q-98 180 -98 385zM206 217l58 34q29 -49 73 -99 l65 57q148 -168 368 -212l-17 -86q65 -12 121 -13v-66q-208 6 -385 109.5t-283 275.5zM207 1063q106 172 282 275.5t385 109.5v-66q-65 -2 -121 -13l17 -86q-220 -42 -368 -211l-65 56q-38 -42 -73 -98zM415 805q33 93 99 169l185 -162q59 68 147 86l-48 240q44 10 98 10 t98 -10l-48 -240q88 -18 147 -86l185 162q66 -76 99 -169l-233 -80q14 -42 14 -85t-14 -85l232 -80q-31 -92 -98 -169l-185 162q-57 -67 -147 -85l48 -241q-52 -10 -98 -10t-98 10l48 241q-90 18 -147 85l-185 -162q-67 77 -98 169l232 80q-14 42 -14 85t14 85zM918 -102 q56 1 121 13l-17 86q220 44 368 212l65 -57q44 50 73 99l58 -34q-106 -172 -283 -275.5t-385 -109.5v66zM918 1382v66q209 -6 385 -109.5t282 -275.5l-57 -33q-35 56 -73 98l-65 -56q-148 169 -368 211l17 86q-56 11 -121 13zM1516 428q36 103 36 212q0 112 -35 212l82 28 q-19 56 -49 112l57 33q98 -180 98 -385t-98 -385l-57 33q27 52 49 112z" />
+<glyph unicode="&#xf1d2;" d="M0 160v960q0 119 84.5 203.5t203.5 84.5h960q119 0 203.5 -84.5t84.5 -203.5v-960q0 -119 -84.5 -203.5t-203.5 -84.5h-960q-119 0 -203.5 84.5t-84.5 203.5zM256 218q0 -45 20 -78.5t54 -51t72 -25.5t81 -8q224 0 224 188q0 67 -48 99t-126 46q-27 5 -51.5 20.5 t-24.5 39.5q0 44 49 52q77 15 122 70t45 134q0 24 -10 52q37 9 49 13v125q-78 -29 -135 -29q-50 29 -110 29q-86 0 -145 -57t-59 -143q0 -50 29.5 -102t73.5 -67v-3q-38 -17 -38 -85q0 -53 41 -77v-3q-113 -37 -113 -139zM382 225q0 64 98 64q102 0 102 -61q0 -66 -93 -66 q-107 0 -107 63zM395 693q0 90 77 90q36 0 55 -25.5t19 -63.5q0 -85 -74 -85q-77 0 -77 84zM755 1072q0 -36 25 -62.5t60 -26.5t59.5 27t24.5 62q0 36 -24 63.5t-60 27.5t-60.5 -27t-24.5 -64zM771 350h137q-2 27 -2 82v387q0 46 2 69h-137q3 -23 3 -71v-392q0 -50 -3 -75z M966 771q36 3 37 3q3 0 11 -0.5t12 -0.5v-2h-2v-217q0 -37 2.5 -64t11.5 -56.5t24.5 -48.5t43.5 -31t66 -12q64 0 108 24v121q-30 -21 -68 -21q-53 0 -53 82v225h52q9 0 26.5 -1t26.5 -1v117h-105q0 82 3 102h-140q4 -24 4 -55v-47h-60v-117z" />
+<glyph unicode="&#xf1d3;" horiz-adv-x="1792" d="M68 7q0 165 182 225v4q-67 41 -67 126q0 109 63 137v4q-72 24 -119.5 108.5t-47.5 165.5q0 139 95 231.5t235 92.5q96 0 178 -47q98 0 218 47v-202q-36 -12 -79 -22q16 -43 16 -84q0 -127 -73 -216.5t-197 -112.5q-40 -8 -59.5 -27t-19.5 -58q0 -31 22.5 -51.5t58 -32 t78.5 -22t86 -25.5t78.5 -37.5t58 -64t22.5 -98.5q0 -304 -363 -304q-69 0 -130 12.5t-116 41t-87.5 82t-32.5 127.5zM272 18q0 -101 172 -101q151 0 151 105q0 100 -165 100q-158 0 -158 -104zM293 775q0 -135 124 -135q119 0 119 137q0 61 -30 102t-89 41 q-124 0 -124 -145zM875 1389q0 59 39.5 103t98.5 44q58 0 96.5 -44.5t38.5 -102.5t-39 -101.5t-96 -43.5q-58 0 -98 43.5t-40 101.5zM901 220q4 45 4 134v609q0 94 -4 128h222q-4 -33 -4 -124v-613q0 -89 4 -134h-222zM1217 901v190h96v76q0 54 -6 89h227q-6 -41 -6 -165 h171v-190q-15 0 -43.5 2t-42.5 2h-85v-365q0 -131 87 -131q61 0 109 33v-196q-71 -39 -174 -39q-62 0 -107 20t-70 50t-39.5 78t-18.5 92t-4 103v351h2v4q-7 0 -19 1t-18 1q-21 0 -59 -6z" />
+<glyph unicode="&#xf1d4;" d="M0 160v960q0 119 84.5 203.5t203.5 84.5h960q119 0 203.5 -84.5t84.5 -203.5v-960q0 -119 -84.5 -203.5t-203.5 -84.5h-960q-119 0 -203.5 84.5t-84.5 203.5zM368 1135l323 -589v-435h134v436l343 588h-150q-21 -39 -63.5 -118.5t-68 -128.5t-59.5 -118.5t-60 -128.5h-3 q-21 48 -44.5 97t-52 105.5t-46.5 92t-54 104.5t-49 95h-150z" />
+<glyph unicode="&#xf1d5;" horiz-adv-x="1280" d="M57 953q0 119 46.5 227t124.5 186t186 124t226 46q158 0 292.5 -78t212.5 -212.5t78 -292.5t-78 -292t-212.5 -212t-292.5 -78q-64 0 -131 14q-21 5 -32.5 23.5t-6.5 39.5q5 20 23 31.5t39 7.5q51 -13 108 -13q97 0 186 38t153 102t102 153t38 186t-38 186t-102 153 t-153 102t-186 38t-186 -38t-153 -102t-102 -153t-38 -186q0 -114 52 -218q10 -20 3.5 -40t-25.5 -30t-39.5 -3t-30.5 26q-64 123 -64 265zM113.5 38.5q10.5 121.5 29.5 217t54 186t69 155.5t74 125q61 90 132 165q-16 35 -16 77q0 80 56.5 136.5t136.5 56.5t136.5 -56.5 t56.5 -136.5t-57 -136.5t-136 -56.5q-60 0 -111 35q-62 -67 -115 -146q-247 -371 -202 -859q1 -22 -12.5 -38.5t-34.5 -18.5h-5q-20 0 -35 13.5t-17 33.5q-14 126 -3.5 247.5z" />
+<glyph unicode="&#xf1d6;" horiz-adv-x="1792" d="M18 264q0 275 252 466q-8 19 -8 52q0 20 11 49t24 45q-1 22 7.5 53t22.5 43q0 139 92.5 288.5t217.5 209.5q139 66 324 66q133 0 266 -55q49 -21 90 -48t71 -56t55 -68t42 -74t32.5 -84.5t25.5 -89.5t22 -98l1 -5q55 -83 55 -150q0 -14 -9 -40t-9 -38q0 -1 1.5 -3.5 t3.5 -5t2 -3.5q77 -114 120.5 -214.5t43.5 -208.5q0 -43 -19.5 -100t-55.5 -57q-9 0 -19.5 7.5t-19 17.5t-19 26t-16 26.5t-13.5 26t-9 17.5q-1 1 -3 1l-5 -4q-59 -154 -132 -223q20 -20 61.5 -38.5t69 -41.5t35.5 -65q-2 -4 -4 -16t-7 -18q-64 -97 -302 -97q-53 0 -110.5 9 t-98 20t-104.5 30q-15 5 -23 7q-14 4 -46 4.5t-40 1.5q-41 -45 -127.5 -65t-168.5 -20q-35 0 -69 1.5t-93 9t-101 20.5t-74.5 40t-32.5 64q0 40 10 59.5t41 48.5q11 2 40.5 13t49.5 12q4 0 14 2q2 2 2 4l-2 3q-48 11 -108 105.5t-73 156.5l-5 3q-4 0 -12 -20 q-18 -41 -54.5 -74.5t-77.5 -37.5h-1q-4 0 -6 4.5t-5 5.5q-23 54 -23 100z" />
+<glyph unicode="&#xf1d7;" horiz-adv-x="2048" d="M0 858q0 169 97.5 311t264 223.5t363.5 81.5q176 0 332.5 -66t262 -182.5t136.5 -260.5q-31 4 -70 4q-169 0 -311 -77t-223.5 -208.5t-81.5 -287.5q0 -78 23 -152q-35 -3 -68 -3q-26 0 -50 1.5t-55 6.5t-44.5 7t-54.5 10.5t-50 10.5l-253 -127l72 218q-290 203 -290 490z M380 1075q0 -39 33 -64.5t76 -25.5q41 0 66 24.5t25 65.5t-25 66t-66 25q-43 0 -76 -25.5t-33 -65.5zM816 404q0 143 81.5 264t223.5 191.5t311 70.5q161 0 303 -70.5t227.5 -192t85.5 -263.5q0 -117 -68.5 -223.5t-185.5 -193.5l55 -181l-199 109q-150 -37 -218 -37 q-169 0 -311 70.5t-223.5 191.5t-81.5 264zM888 1075q0 -39 33 -64.5t76 -25.5q41 0 65.5 24.5t24.5 65.5t-24.5 66t-65.5 25q-43 0 -76 -25.5t-33 -65.5zM1160 568q0 -28 22.5 -50.5t49.5 -22.5q40 0 65.5 22t25.5 51q0 28 -25.5 50t-65.5 22q-27 0 -49.5 -22.5 t-22.5 -49.5zM1559 568q0 -28 22.5 -50.5t49.5 -22.5q39 0 65 22t26 51q0 28 -26 50t-65 22q-27 0 -49.5 -22.5t-22.5 -49.5z" />
+<glyph unicode="&#xf1d8;" horiz-adv-x="1792" d="M0 508q-2 40 32 59l1664 960q15 9 32 9q20 0 36 -11q33 -24 27 -64l-256 -1536q-5 -29 -32 -45q-14 -8 -31 -8q-11 0 -24 5l-453 185l-242 -295q-18 -23 -49 -23q-13 0 -22 4q-19 7 -30.5 23.5t-11.5 36.5v349l864 1059l-1069 -925l-395 162q-37 14 -40 55z" />
+<glyph unicode="&#xf1d9;" horiz-adv-x="1792" d="M0 508q-3 39 32 59l1664 960q35 21 68 -2q33 -24 27 -64l-256 -1536q-5 -29 -32 -45q-14 -8 -31 -8q-11 0 -24 5l-527 215l-298 -327q-18 -21 -47 -21q-14 0 -23 4q-19 7 -30 23.5t-11 36.5v452l-472 193q-37 14 -40 55zM209 522l336 -137l863 639l-478 -797l492 -201 l221 1323z" />
+<glyph unicode="&#xf1da;" d="M0 832v448q0 42 40 59q39 17 69 -14l130 -129q107 101 244.5 156.5t284.5 55.5q156 0 298 -61t245 -164t164 -245t61 -298t-61 -298t-164 -245t-245 -164t-298 -61q-172 0 -327 72.5t-264 204.5q-7 10 -6.5 22.5t8.5 20.5l137 138q10 9 25 9q16 -2 23 -12 q73 -95 179 -147t225 -52q104 0 198.5 40.5t163.5 109.5t109.5 163.5t40.5 198.5t-40.5 198.5t-109.5 163.5t-163.5 109.5t-198.5 40.5q-98 0 -188 -35.5t-160 -101.5l137 -138q31 -30 14 -69q-17 -40 -59 -40h-448q-26 0 -45 19t-19 45zM512 480v64q0 14 9 23t23 9h224v352 q0 14 9 23t23 9h64q14 0 23 -9t9 -23v-448q0 -14 -9 -23t-23 -9h-320q-14 0 -23 9t-9 23z" />
+<glyph unicode="&#xf1db;" d="M0 640q0 209 103 385.5t279.5 279.5t385.5 103t385.5 -103t279.5 -279.5t103 -385.5t-103 -385.5t-279.5 -279.5t-385.5 -103t-385.5 103t-279.5 279.5t-103 385.5zM128 640q0 -130 51 -248.5t136.5 -204t204 -136.5t248.5 -51t248.5 51t204 136.5t136.5 204t51 248.5 t-51 248.5t-136.5 204t-204 136.5t-248.5 51t-248.5 -51t-204 -136.5t-136.5 -204t-51 -248.5z" />
+<glyph unicode="&#xf1dc;" horiz-adv-x="1792" d="M62 1338q0 26 12 48t36 22q46 0 138.5 -3.5t138.5 -3.5q42 0 126.5 3.5t126.5 3.5q25 0 37.5 -22t12.5 -48q0 -30 -17 -43.5t-38.5 -14.5t-49.5 -4t-43 -13q-35 -21 -35 -160l1 -320q0 -21 1 -32q13 -3 39 -3h699q25 0 38 3q1 11 1 32l1 320q0 139 -35 160 q-18 11 -58.5 12.5t-66 13t-25.5 49.5q0 26 12.5 48t37.5 22q44 0 132 -3.5t132 -3.5q43 0 129 3.5t129 3.5q25 0 37.5 -22t12.5 -48q0 -30 -17.5 -44t-40 -14.5t-51.5 -3t-44 -12.5q-35 -23 -35 -161l1 -943q0 -119 34 -140q16 -10 46 -13.5t53.5 -4.5t41.5 -15.5t18 -44.5 q0 -26 -12 -48t-36 -22q-44 0 -132.5 3.5t-133.5 3.5q-44 0 -132 -3.5t-132 -3.5q-24 0 -37 20.5t-13 45.5q0 31 17 46t39 17t51 7t45 15q33 21 33 140l-1 391q0 21 -1 31q-13 4 -50 4h-675q-38 0 -51 -4q-1 -10 -1 -31l-1 -371q0 -142 37 -164q16 -10 48 -13t57 -3.5 t45 -15t20 -45.5q0 -26 -12.5 -48t-36.5 -22q-47 0 -139.5 3.5t-138.5 3.5q-43 0 -128 -3.5t-127 -3.5q-23 0 -35.5 21t-12.5 45q0 30 15.5 45t36 17.5t47.5 7.5t42 15q33 23 33 143l-1 57v813q0 3 0.5 26t0 36.5t-1.5 38.5t-3.5 42t-6.5 36.5t-11 31.5t-16 18 q-15 10 -45 12t-53 2t-41 14t-18 45z" />
+<glyph unicode="&#xf1dd;" horiz-adv-x="1280" d="M24 926q0 166 88 286q88 118 209 159q111 37 417 37h479q25 0 43 -18t18 -43v-73q0 -29 -18.5 -61t-42.5 -32q-50 0 -54 -1q-26 -6 -32 -31q-3 -11 -3 -64v-1152q0 -25 -18 -43t-43 -18h-108q-25 0 -43 18t-18 43v1218h-143v-1218q0 -25 -17.5 -43t-43.5 -18h-108 q-26 0 -43.5 18t-17.5 43v496q-147 12 -245 59q-126 58 -192 179q-64 117 -64 259z" />
+<glyph unicode="&#xf1de;" d="M0 736v64q0 40 28 68t68 28h320q40 0 68 -28t28 -68v-64q0 -40 -28 -68t-68 -28h-320q-40 0 -68 28t-28 68zM128 -96v672h256v-672q0 -14 -9 -23t-23 -9h-192q-14 0 -23 9t-9 23zM128 960v416q0 14 9 23t23 9h192q14 0 23 -9t9 -23v-416h-256zM512 224v64q0 40 28 68 t68 28h320q40 0 68 -28t28 -68v-64q0 -40 -28 -68t-68 -28h-320q-40 0 -68 28t-28 68zM640 64h256v-160q0 -14 -9 -23t-23 -9h-192q-14 0 -23 9t-9 23v160zM640 448v928q0 14 9 23t23 9h192q14 0 23 -9t9 -23v-928h-256zM1024 992v64q0 40 28 68t68 28h320q40 0 68 -28 t28 -68v-64q0 -40 -28 -68t-68 -28h-320q-40 0 -68 28t-28 68zM1152 -96v928h256v-928q0 -14 -9 -23t-23 -9h-192q-14 0 -23 9t-9 23zM1152 1216v160q0 14 9 23t23 9h192q14 0 23 -9t9 -23v-160h-256z" />
+<glyph unicode="&#xf1e0;" d="M0 640q0 133 93.5 226.5t226.5 93.5q126 0 218 -86l360 180q-2 22 -2 34q0 133 93.5 226.5t226.5 93.5t226.5 -93.5t93.5 -226.5t-93.5 -226.5t-226.5 -93.5q-126 0 -218 86l-360 -180q2 -22 2 -34t-2 -34l360 -180q92 86 218 86q133 0 226.5 -93.5t93.5 -226.5 t-93.5 -226.5t-226.5 -93.5t-226.5 93.5t-93.5 226.5q0 12 2 34l-360 180q-92 -86 -218 -86q-133 0 -226.5 93.5t-93.5 226.5z" />
+<glyph unicode="&#xf1e1;" d="M0 160v960q0 119 84.5 203.5t203.5 84.5h960q119 0 203.5 -84.5t84.5 -203.5v-960q0 -119 -84.5 -203.5t-203.5 -84.5h-960q-119 0 -203.5 84.5t-84.5 203.5zM256 640q0 -88 62.5 -150.5t150.5 -62.5q83 0 145 57l241 -120q-2 -16 -2 -23q0 -88 63 -150.5t151 -62.5 t150.5 62.5t62.5 150.5t-62.5 151t-150.5 63q-84 0 -145 -58l-241 120q2 16 2 23t-2 23l241 120q61 -58 145 -58q88 0 150.5 63t62.5 151t-62.5 150.5t-150.5 62.5t-151 -62.5t-63 -150.5q0 -7 2 -23l-241 -120q-62 57 -145 57q-88 0 -150.5 -62.5t-62.5 -150.5z" />
+<glyph unicode="&#xf1e2;" horiz-adv-x="1792" d="M0 448q0 143 55.5 273.5t150 225t225 150t273.5 55.5q182 0 343 -89l64 64q19 19 45.5 19t45.5 -19l68 -68l243 244l46 -46l-244 -243l68 -68q19 -19 19 -45.5t-19 -45.5l-64 -64q89 -161 89 -343q0 -143 -55.5 -273.5t-150 -225t-225 -150t-273.5 -55.5t-273.5 55.5 t-225 150t-150 225t-55.5 273.5zM170 615q10 -24 35 -34q13 -5 24 -5q42 0 60 40q34 84 98.5 148.5t148.5 98.5q25 11 35 35t0 49t-34 35t-49 0q-108 -44 -191 -127t-127 -191q-10 -25 0 -49zM1376 1472q0 13 9 23q10 9 23 9t23 -9l90 -91q10 -9 10 -22.5t-10 -22.5 q-10 -10 -22 -10q-13 0 -23 10l-91 90q-9 10 -9 23zM1536 1408v96q0 14 9 23t23 9t23 -9t9 -23v-96q0 -14 -9 -23t-23 -9t-23 9t-9 23zM1605 1242.5q0 13.5 10 22.5q9 10 22.5 10t22.5 -10l91 -90q9 -10 9 -23t-9 -23q-11 -9 -23 -9t-23 9l-90 91q-10 9 -10 22.5z M1605 1381.5q0 13.5 10 22.5l90 91q10 9 23 9t23 -9q9 -10 9 -23t-9 -23l-91 -90q-10 -10 -22 -10q-13 0 -23 10q-10 9 -10 22.5zM1632 1312q0 14 9 23t23 9h96q14 0 23 -9t9 -23t-9 -23t-23 -9h-96q-14 0 -23 9t-9 23z" />
+<glyph unicode="&#xf1e3;" horiz-adv-x="1792" />
+<glyph unicode="&#xf1e4;" horiz-adv-x="1792" />
+<glyph unicode="&#xf1e5;" horiz-adv-x="1792" />
+<glyph unicode="&#xf1e6;" horiz-adv-x="1792" />
+<glyph unicode="&#xf1e7;" horiz-adv-x="1792" />
+<glyph unicode="&#xf1e8;" horiz-adv-x="1792" />
+<glyph unicode="&#xf1e9;" horiz-adv-x="1792" />
+<glyph unicode="&#xf1ea;" horiz-adv-x="1792" />
+<glyph unicode="&#xf1eb;" horiz-adv-x="1792" />
+<glyph unicode="&#xf1ec;" horiz-adv-x="1792" />
+<glyph unicode="&#xf1ed;" horiz-adv-x="1792" />
+<glyph unicode="&#xf1ee;" horiz-adv-x="1792" />
+<glyph unicode="&#xf500;" horiz-adv-x="1792" />
+<glyph horiz-adv-x="1792" />
+<glyph horiz-adv-x="1792" />
+<glyph horiz-adv-x="1792" />
+<glyph horiz-adv-x="1792" />
+<glyph horiz-adv-x="1792" />
+<glyph horiz-adv-x="1792" />
+<glyph horiz-adv-x="1792" />
+<glyph horiz-adv-x="1792" />
+<glyph horiz-adv-x="1792" />
+<glyph horiz-adv-x="1792" />
+<glyph horiz-adv-x="1792" />
+<glyph horiz-adv-x="1792" />
+<glyph horiz-adv-x="1792" />
+<glyph horiz-adv-x="1792" />
+</font>
+</defs></svg> \ No newline at end of file
diff --git a/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/web/client/resources/fonts/FontAwesome/fontawesome-webfont.ttf b/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/web/client/resources/fonts/FontAwesome/fontawesome-webfont.ttf
new file mode 100755
index 00000000000..5cd6cff6d6f
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/web/client/resources/fonts/FontAwesome/fontawesome-webfont.ttf
Binary files differ
diff --git a/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/web/client/resources/fonts/FontAwesome/fontawesome-webfont.woff b/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/web/client/resources/fonts/FontAwesome/fontawesome-webfont.woff
new file mode 100755
index 00000000000..9eaecb37996
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/web/client/resources/fonts/FontAwesome/fontawesome-webfont.woff
Binary files differ
diff --git a/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/web/client/resources/fonts/Open_Sans/LICENSE.txt b/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/web/client/resources/fonts/Open_Sans/LICENSE.txt
new file mode 100755
index 00000000000..75b52484ea4
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/web/client/resources/fonts/Open_Sans/LICENSE.txt
@@ -0,0 +1,202 @@
+
+ Apache License
+ Version 2.0, January 2004
+ http://www.apache.org/licenses/
+
+ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+ 1. Definitions.
+
+ "License" shall mean the terms and conditions for use, reproduction,
+ and distribution as defined by Sections 1 through 9 of this document.
+
+ "Licensor" shall mean the copyright owner or entity authorized by
+ the copyright owner that is granting the License.
+
+ "Legal Entity" shall mean the union of the acting entity and all
+ other entities that control, are controlled by, or are under common
+ control with that entity. For the purposes of this definition,
+ "control" means (i) the power, direct or indirect, to cause the
+ direction or management of such entity, whether by contract or
+ otherwise, or (ii) ownership of fifty percent (50%) or more of the
+ outstanding shares, or (iii) beneficial ownership of such entity.
+
+ "You" (or "Your") shall mean an individual or Legal Entity
+ exercising permissions granted by this License.
+
+ "Source" form shall mean the preferred form for making modifications,
+ including but not limited to software source code, documentation
+ source, and configuration files.
+
+ "Object" form shall mean any form resulting from mechanical
+ transformation or translation of a Source form, including but
+ not limited to compiled object code, generated documentation,
+ and conversions to other media types.
+
+ "Work" shall mean the work of authorship, whether in Source or
+ Object form, made available under the License, as indicated by a
+ copyright notice that is included in or attached to the work
+ (an example is provided in the Appendix below).
+
+ "Derivative Works" shall mean any work, whether in Source or Object
+ form, that is based on (or derived from) the Work and for which the
+ editorial revisions, annotations, elaborations, or other modifications
+ represent, as a whole, an original work of authorship. For the purposes
+ of this License, Derivative Works shall not include works that remain
+ separable from, or merely link (or bind by name) to the interfaces of,
+ the Work and Derivative Works thereof.
+
+ "Contribution" shall mean any work of authorship, including
+ the original version of the Work and any modifications or additions
+ to that Work or Derivative Works thereof, that is intentionally
+ submitted to Licensor for inclusion in the Work by the copyright owner
+ or by an individual or Legal Entity authorized to submit on behalf of
+ the copyright owner. For the purposes of this definition, "submitted"
+ means any form of electronic, verbal, or written communication sent
+ to the Licensor or its representatives, including but not limited to
+ communication on electronic mailing lists, source code control systems,
+ and issue tracking systems that are managed by, or on behalf of, the
+ Licensor for the purpose of discussing and improving the Work, but
+ excluding communication that is conspicuously marked or otherwise
+ designated in writing by the copyright owner as "Not a Contribution."
+
+ "Contributor" shall mean Licensor and any individual or Legal Entity
+ on behalf of whom a Contribution has been received by Licensor and
+ subsequently incorporated within the Work.
+
+ 2. Grant of Copyright License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ copyright license to reproduce, prepare Derivative Works of,
+ publicly display, publicly perform, sublicense, and distribute the
+ Work and such Derivative Works in Source or Object form.
+
+ 3. Grant of Patent License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ (except as stated in this section) patent license to make, have made,
+ use, offer to sell, sell, import, and otherwise transfer the Work,
+ where such license applies only to those patent claims licensable
+ by such Contributor that are necessarily infringed by their
+ Contribution(s) alone or by combination of their Contribution(s)
+ with the Work to which such Contribution(s) was submitted. If You
+ institute patent litigation against any entity (including a
+ cross-claim or counterclaim in a lawsuit) alleging that the Work
+ or a Contribution incorporated within the Work constitutes direct
+ or contributory patent infringement, then any patent licenses
+ granted to You under this License for that Work shall terminate
+ as of the date such litigation is filed.
+
+ 4. Redistribution. You may reproduce and distribute copies of the
+ Work or Derivative Works thereof in any medium, with or without
+ modifications, and in Source or Object form, provided that You
+ meet the following conditions:
+
+ (a) You must give any other recipients of the Work or
+ Derivative Works a copy of this License; and
+
+ (b) You must cause any modified files to carry prominent notices
+ stating that You changed the files; and
+
+ (c) You must retain, in the Source form of any Derivative Works
+ that You distribute, all copyright, patent, trademark, and
+ attribution notices from the Source form of the Work,
+ excluding those notices that do not pertain to any part of
+ the Derivative Works; and
+
+ (d) If the Work includes a "NOTICE" text file as part of its
+ distribution, then any Derivative Works that You distribute must
+ include a readable copy of the attribution notices contained
+ within such NOTICE file, excluding those notices that do not
+ pertain to any part of the Derivative Works, in at least one
+ of the following places: within a NOTICE text file distributed
+ as part of the Derivative Works; within the Source form or
+ documentation, if provided along with the Derivative Works; or,
+ within a display generated by the Derivative Works, if and
+ wherever such third-party notices normally appear. The contents
+ of the NOTICE file are for informational purposes only and
+ do not modify the License. You may add Your own attribution
+ notices within Derivative Works that You distribute, alongside
+ or as an addendum to the NOTICE text from the Work, provided
+ that such additional attribution notices cannot be construed
+ as modifying the License.
+
+ You may add Your own copyright statement to Your modifications and
+ may provide additional or different license terms and conditions
+ for use, reproduction, or distribution of Your modifications, or
+ for any such Derivative Works as a whole, provided Your use,
+ reproduction, and distribution of the Work otherwise complies with
+ the conditions stated in this License.
+
+ 5. Submission of Contributions. Unless You explicitly state otherwise,
+ any Contribution intentionally submitted for inclusion in the Work
+ by You to the Licensor shall be under the terms and conditions of
+ this License, without any additional terms or conditions.
+ Notwithstanding the above, nothing herein shall supersede or modify
+ the terms of any separate license agreement you may have executed
+ with Licensor regarding such Contributions.
+
+ 6. Trademarks. This License does not grant permission to use the trade
+ names, trademarks, service marks, or product names of the Licensor,
+ except as required for reasonable and customary use in describing the
+ origin of the Work and reproducing the content of the NOTICE file.
+
+ 7. Disclaimer of Warranty. Unless required by applicable law or
+ agreed to in writing, Licensor provides the Work (and each
+ Contributor provides its Contributions) on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ implied, including, without limitation, any warranties or conditions
+ of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+ PARTICULAR PURPOSE. You are solely responsible for determining the
+ appropriateness of using or redistributing the Work and assume any
+ risks associated with Your exercise of permissions under this License.
+
+ 8. Limitation of Liability. In no event and under no legal theory,
+ whether in tort (including negligence), contract, or otherwise,
+ unless required by applicable law (such as deliberate and grossly
+ negligent acts) or agreed to in writing, shall any Contributor be
+ liable to You for damages, including any direct, indirect, special,
+ incidental, or consequential damages of any character arising as a
+ result of this License or out of the use or inability to use the
+ Work (including but not limited to damages for loss of goodwill,
+ work stoppage, computer failure or malfunction, or any and all
+ other commercial damages or losses), even if such Contributor
+ has been advised of the possibility of such damages.
+
+ 9. Accepting Warranty or Additional Liability. While redistributing
+ the Work or Derivative Works thereof, You may choose to offer,
+ and charge a fee for, acceptance of support, warranty, indemnity,
+ or other liability obligations and/or rights consistent with this
+ License. However, in accepting such obligations, You may act only
+ on Your own behalf and on Your sole responsibility, not on behalf
+ of any other Contributor, and only if You agree to indemnify,
+ defend, and hold each Contributor harmless for any liability
+ incurred by, or claims asserted against, such Contributor by reason
+ of your accepting any such warranty or additional liability.
+
+ END OF TERMS AND CONDITIONS
+
+ APPENDIX: How to apply the Apache License to your work.
+
+ To apply the Apache License to your work, attach the following
+ boilerplate notice, with the fields enclosed by brackets "[]"
+ replaced with your own identifying information. (Don't include
+ the brackets!) The text should be enclosed in the appropriate
+ comment syntax for the file format. We also recommend that a
+ file or class name and description of purpose be included on the
+ same "printed page" as the copyright notice for easier
+ identification within third-party archives.
+
+ Copyright [yyyy] [name of copyright owner]
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
diff --git a/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/web/client/resources/fonts/Open_Sans/OpenSans-Bold.ttf b/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/web/client/resources/fonts/Open_Sans/OpenSans-Bold.ttf
new file mode 100755
index 00000000000..fd79d43bea0
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/web/client/resources/fonts/Open_Sans/OpenSans-Bold.ttf
Binary files differ
diff --git a/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/web/client/resources/fonts/Open_Sans/OpenSans-Italic.ttf b/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/web/client/resources/fonts/Open_Sans/OpenSans-Italic.ttf
new file mode 100755
index 00000000000..c90da48ff3b
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/web/client/resources/fonts/Open_Sans/OpenSans-Italic.ttf
Binary files differ
diff --git a/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/web/client/resources/fonts/Open_Sans/OpenSans-Light.ttf b/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/web/client/resources/fonts/Open_Sans/OpenSans-Light.ttf
new file mode 100755
index 00000000000..0d381897da2
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/web/client/resources/fonts/Open_Sans/OpenSans-Light.ttf
Binary files differ
diff --git a/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/web/client/resources/fonts/Open_Sans/OpenSans-LightItalic.ttf b/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/web/client/resources/fonts/Open_Sans/OpenSans-LightItalic.ttf
new file mode 100755
index 00000000000..68299c4bc6b
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/web/client/resources/fonts/Open_Sans/OpenSans-LightItalic.ttf
Binary files differ
diff --git a/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/web/client/resources/fonts/Open_Sans/OpenSans-Regular.ttf b/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/web/client/resources/fonts/Open_Sans/OpenSans-Regular.ttf
new file mode 100755
index 00000000000..db433349b70
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/web/client/resources/fonts/Open_Sans/OpenSans-Regular.ttf
Binary files differ
diff --git a/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/web/client/resources/fonts/Orbitron/OFL.txt b/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/web/client/resources/fonts/Orbitron/OFL.txt
new file mode 100755
index 00000000000..3b859d9138f
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/web/client/resources/fonts/Orbitron/OFL.txt
@@ -0,0 +1,93 @@
+Copyright (c) 2009, Matt McInerney (matt@pixelspread.com),
+with Reserved Font Name Orbitron.
+This Font Software is licensed under the SIL Open Font License, Version 1.1.
+This license is copied below, and is also available with a FAQ at:
+http://scripts.sil.org/OFL
+
+
+-----------------------------------------------------------
+SIL OPEN FONT LICENSE Version 1.1 - 26 February 2007
+-----------------------------------------------------------
+
+PREAMBLE
+The goals of the Open Font License (OFL) are to stimulate worldwide
+development of collaborative font projects, to support the font creation
+efforts of academic and linguistic communities, and to provide a free and
+open framework in which fonts may be shared and improved in partnership
+with others.
+
+The OFL allows the licensed fonts to be used, studied, modified and
+redistributed freely as long as they are not sold by themselves. The
+fonts, including any derivative works, can be bundled, embedded,
+redistributed and/or sold with any software provided that any reserved
+names are not used by derivative works. The fonts and derivatives,
+however, cannot be released under any other type of license. The
+requirement for fonts to remain under this license does not apply
+to any document created using the fonts or their derivatives.
+
+DEFINITIONS
+"Font Software" refers to the set of files released by the Copyright
+Holder(s) under this license and clearly marked as such. This may
+include source files, build scripts and documentation.
+
+"Reserved Font Name" refers to any names specified as such after the
+copyright statement(s).
+
+"Original Version" refers to the collection of Font Software components as
+distributed by the Copyright Holder(s).
+
+"Modified Version" refers to any derivative made by adding to, deleting,
+or substituting -- in part or in whole -- any of the components of the
+Original Version, by changing formats or by porting the Font Software to a
+new environment.
+
+"Author" refers to any designer, engineer, programmer, technical
+writer or other person who contributed to the Font Software.
+
+PERMISSION & CONDITIONS
+Permission is hereby granted, free of charge, to any person obtaining
+a copy of the Font Software, to use, study, copy, merge, embed, modify,
+redistribute, and sell modified and unmodified copies of the Font
+Software, subject to the following conditions:
+
+1) Neither the Font Software nor any of its individual components,
+in Original or Modified Versions, may be sold by itself.
+
+2) Original or Modified Versions of the Font Software may be bundled,
+redistributed and/or sold with any software, provided that each copy
+contains the above copyright notice and this license. These can be
+included either as stand-alone text files, human-readable headers or
+in the appropriate machine-readable metadata fields within text or
+binary files as long as those fields can be easily viewed by the user.
+
+3) No Modified Version of the Font Software may use the Reserved Font
+Name(s) unless explicit written permission is granted by the corresponding
+Copyright Holder. This restriction only applies to the primary font name as
+presented to the users.
+
+4) The name(s) of the Copyright Holder(s) or the Author(s) of the Font
+Software shall not be used to promote, endorse or advertise any
+Modified Version, except to acknowledge the contribution(s) of the
+Copyright Holder(s) and the Author(s) or with their explicit written
+permission.
+
+5) The Font Software, modified or unmodified, in part or in whole,
+must be distributed entirely under this license, and must not be
+distributed under any other license. The requirement for fonts to
+remain under this license does not apply to any document created
+using the Font Software.
+
+TERMINATION
+This license becomes null and void if any of the above conditions are
+not met.
+
+DISCLAIMER
+THE FONT SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO ANY WARRANTIES OF
+MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT
+OF COPYRIGHT, PATENT, TRADEMARK, OR OTHER RIGHT. IN NO EVENT SHALL THE
+COPYRIGHT HOLDER BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+INCLUDING ANY GENERAL, SPECIAL, INDIRECT, INCIDENTAL, OR CONSEQUENTIAL
+DAMAGES, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+FROM, OUT OF THE USE OR INABILITY TO USE THE FONT SOFTWARE OR FROM
+OTHER DEALINGS IN THE FONT SOFTWARE.
diff --git a/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/web/client/resources/fonts/Orbitron/Orbitron-Regular.ttf b/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/web/client/resources/fonts/Orbitron/Orbitron-Regular.ttf
new file mode 100755
index 00000000000..42563d6b6ef
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/web/client/resources/fonts/Orbitron/Orbitron-Regular.ttf
Binary files differ
diff --git a/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/web/client/resources/fonts/Oswald/OFL.txt b/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/web/client/resources/fonts/Oswald/OFL.txt
new file mode 100755
index 00000000000..ff7febddcb2
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/web/client/resources/fonts/Oswald/OFL.txt
@@ -0,0 +1,92 @@
+Copyright (c) 2011-2012, Vernon Adams (vern@newtypography.co.uk), with Reserved Font Names 'Oswald'
+This Font Software is licensed under the SIL Open Font License, Version 1.1.
+This license is copied below, and is also available with a FAQ at:
+http://scripts.sil.org/OFL
+
+
+-----------------------------------------------------------
+SIL OPEN FONT LICENSE Version 1.1 - 26 February 2007
+-----------------------------------------------------------
+
+PREAMBLE
+The goals of the Open Font License (OFL) are to stimulate worldwide
+development of collaborative font projects, to support the font creation
+efforts of academic and linguistic communities, and to provide a free and
+open framework in which fonts may be shared and improved in partnership
+with others.
+
+The OFL allows the licensed fonts to be used, studied, modified and
+redistributed freely as long as they are not sold by themselves. The
+fonts, including any derivative works, can be bundled, embedded,
+redistributed and/or sold with any software provided that any reserved
+names are not used by derivative works. The fonts and derivatives,
+however, cannot be released under any other type of license. The
+requirement for fonts to remain under this license does not apply
+to any document created using the fonts or their derivatives.
+
+DEFINITIONS
+"Font Software" refers to the set of files released by the Copyright
+Holder(s) under this license and clearly marked as such. This may
+include source files, build scripts and documentation.
+
+"Reserved Font Name" refers to any names specified as such after the
+copyright statement(s).
+
+"Original Version" refers to the collection of Font Software components as
+distributed by the Copyright Holder(s).
+
+"Modified Version" refers to any derivative made by adding to, deleting,
+or substituting -- in part or in whole -- any of the components of the
+Original Version, by changing formats or by porting the Font Software to a
+new environment.
+
+"Author" refers to any designer, engineer, programmer, technical
+writer or other person who contributed to the Font Software.
+
+PERMISSION & CONDITIONS
+Permission is hereby granted, free of charge, to any person obtaining
+a copy of the Font Software, to use, study, copy, merge, embed, modify,
+redistribute, and sell modified and unmodified copies of the Font
+Software, subject to the following conditions:
+
+1) Neither the Font Software nor any of its individual components,
+in Original or Modified Versions, may be sold by itself.
+
+2) Original or Modified Versions of the Font Software may be bundled,
+redistributed and/or sold with any software, provided that each copy
+contains the above copyright notice and this license. These can be
+included either as stand-alone text files, human-readable headers or
+in the appropriate machine-readable metadata fields within text or
+binary files as long as those fields can be easily viewed by the user.
+
+3) No Modified Version of the Font Software may use the Reserved Font
+Name(s) unless explicit written permission is granted by the corresponding
+Copyright Holder. This restriction only applies to the primary font name as
+presented to the users.
+
+4) The name(s) of the Copyright Holder(s) or the Author(s) of the Font
+Software shall not be used to promote, endorse or advertise any
+Modified Version, except to acknowledge the contribution(s) of the
+Copyright Holder(s) and the Author(s) or with their explicit written
+permission.
+
+5) The Font Software, modified or unmodified, in part or in whole,
+must be distributed entirely under this license, and must not be
+distributed under any other license. The requirement for fonts to
+remain under this license does not apply to any document created
+using the Font Software.
+
+TERMINATION
+This license becomes null and void if any of the above conditions are
+not met.
+
+DISCLAIMER
+THE FONT SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO ANY WARRANTIES OF
+MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT
+OF COPYRIGHT, PATENT, TRADEMARK, OR OTHER RIGHT. IN NO EVENT SHALL THE
+COPYRIGHT HOLDER BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+INCLUDING ANY GENERAL, SPECIAL, INDIRECT, INCIDENTAL, OR CONSEQUENTIAL
+DAMAGES, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+FROM, OUT OF THE USE OR INABILITY TO USE THE FONT SOFTWARE OR FROM
+OTHER DEALINGS IN THE FONT SOFTWARE.
diff --git a/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/web/client/resources/fonts/Oswald/Oswald-Regular.ttf b/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/web/client/resources/fonts/Oswald/Oswald-Regular.ttf
new file mode 100755
index 00000000000..0798e241955
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/web/client/resources/fonts/Oswald/Oswald-Regular.ttf
Binary files differ
diff --git a/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/web/client/resources/ico/goconvey-buildfail.ico b/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/web/client/resources/ico/goconvey-buildfail.ico
new file mode 100644
index 00000000000..8fdb76e344a
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/web/client/resources/ico/goconvey-buildfail.ico
Binary files differ
diff --git a/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/web/client/resources/ico/goconvey-fail.ico b/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/web/client/resources/ico/goconvey-fail.ico
new file mode 100644
index 00000000000..e028baefaba
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/web/client/resources/ico/goconvey-fail.ico
Binary files differ
diff --git a/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/web/client/resources/ico/goconvey-ok.ico b/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/web/client/resources/ico/goconvey-ok.ico
new file mode 100644
index 00000000000..19f0e173de8
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/web/client/resources/ico/goconvey-ok.ico
Binary files differ
diff --git a/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/web/client/resources/ico/goconvey-panic.ico b/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/web/client/resources/ico/goconvey-panic.ico
new file mode 100644
index 00000000000..46b1bd085a0
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/web/client/resources/ico/goconvey-panic.ico
Binary files differ
diff --git a/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/web/client/resources/js/composer.js b/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/web/client/resources/js/composer.js
new file mode 100644
index 00000000000..7ddb0c8d01d
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/web/client/resources/js/composer.js
@@ -0,0 +1,171 @@
+var composer = {
+ tab: "\t",
+ template: "",
+ isFunc: function(scope)
+ {
+ if (!scope.title || typeof scope.depth === 'undefined')
+ return false;
+
+ return scope.title.indexOf("Test") === 0 && scope.depth === 0;
+ },
+ discardLastKey: false
+};
+
+
+$(function()
+{
+ // Begin layout sizing
+ var headerHeight = $('header').outerHeight();
+ var padding = $('#input, #output').css('padding-top').replace("px", "") * 2 + 1;
+ var outputPlaceholder = $('#output').text();
+
+ $(window).resize(function()
+ {
+ $('#input, #output').height($(window).height() - headerHeight - padding);
+ });
+
+ $(window).resize();
+ // End layout sizing
+
+
+ $('#input').keydown(function(e)
+ {
+ // 13=Enter, 16=Shift
+ composer.discardLastKey = e.keyCode === 13
+ || e.keyCode === 16;
+ }).keyup(function(e)
+ {
+ if (!composer.discardLastKey)
+ generate($(this).val());
+ });
+
+ composer.template = $('#tpl-convey').text();
+
+ tabOverride.set(document.getElementById('input'));
+ $('#input').focus();
+});
+
+
+
+// Begin Markup.js custom pipes
+Mark.pipes.recursivelyRender = function(val)
+{
+ return !val || val.length === 0 ? "\n" : Mark.up(composer.template, val);
+}
+
+Mark.pipes.indent = function(val)
+{
+ return new Array(val + 1).join("\t");
+}
+
+Mark.pipes.notTestFunc = function(scope)
+{
+ return !composer.isFunc(scope);
+}
+
+Mark.pipes.safeFunc = function(val)
+{
+ return val.replace(/[^a-z0-9_]/gi, '');
+}
+
+Mark.pipes.properCase = function(str)
+{
+ if (str.length === 0)
+ return "";
+
+ str = str.charAt(0).toUpperCase() + str.substr(1);
+
+ if (str.length < 2)
+ return str;
+
+ return str.replace(/[\s_][a-z]+/g, function(txt)
+ {
+ return txt.charAt(0)
+ + txt.charAt(1).toUpperCase()
+ + txt.substr(2).toLowerCase();
+ });
+}
+
+Mark.pipes.showImports = function(item)
+{
+ console.log(item);
+ if (root.title === "(root)" && root.stories.length > 0)
+ return 'import (\n\t"testing"\n\t. "github.com/smartystreets/goconvey/convey"\n)\n';
+ else
+ return "";
+}
+// End Markup.js custom pipes
+
+
+function generate(input)
+{
+ var root = parseInput(input);
+ $('#output').text(Mark.up(composer.template, root.stories));
+ if (root.stories.length > 0 && root.stories[0].title.substr(0, 4) === "Test")
+ $('#output').prepend('import (\n\t"testing"\n\t. "github.com/smartystreets/goconvey/convey"\n)\n\n');
+}
+
+function parseInput(input)
+{
+ lines = input.split("\n");
+
+ if (!lines)
+ return;
+
+ var root = {
+ title: "(root)",
+ stories: []
+ };
+
+ for (i in lines)
+ {
+ line = lines[i];
+ lineText = $.trim(line);
+
+ if (!lineText)
+ continue;
+
+ // Figure out how deep to put this story
+ indent = line.match(new RegExp("^" + composer.tab + "+"));
+ tabs = indent ? indent[0].length / composer.tab.length : 0;
+
+ // Starting at root, traverse into the right spot in the arrays
+ var curScope = root, prevScope = root;
+ for (j = 0; j < tabs && curScope.stories.length > 0; j++)
+ {
+ curScope = curScope.stories[curScope.stories.length - 1];
+ prevScope = curScope;
+ }
+
+ // Don't go crazy, though! (avoid excessive indentation)
+ if (tabs > curScope.depth + 1)
+ tabs = curScope.depth + 1;
+
+ // Only top-level Convey() calls need the *testing.T object passed in
+ var showT = composer.isFunc(prevScope)
+ || (!composer.isFunc(curScope)
+ && tabs === 0);
+
+ // Save the story at this scope
+ curScope.stories.push({
+ title: lineText.replace(/"/g, "\\\""), // escape quotes
+ stories: [],
+ depth: tabs,
+ showT: showT
+ });
+ }
+
+ return root;
+}
+
+function suppress(event)
+{
+ if (!event)
+ return false;
+ if (event.preventDefault)
+ event.preventDefault();
+ if (event.stopPropagation)
+ event.stopPropagation();
+ event.cancelBubble = true;
+ return false;
+}
diff --git a/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/web/client/resources/js/config.js b/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/web/client/resources/js/config.js
new file mode 100644
index 00000000000..0ca1e457bd4
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/web/client/resources/js/config.js
@@ -0,0 +1,15 @@
+// Configure the GoConvey web UI client in here
+
+convey.config = {
+
+ // Install new themes by adding them here; the first one will be default
+ themes: {
+ "dark": { name: "Dark", filename: "dark.css", coverage: "hsla({{hue}}, 75%, 30%, .5)" },
+ "dark-bigtext": { name: "Dark-BigText", filename: "dark-bigtext.css", coverage: "hsla({{hue}}, 75%, 30%, .5)" },
+ "light": { name: "Light", filename: "light.css", coverage: "hsla({{hue}}, 62%, 75%, 1)" }
+ },
+
+ // Path to the themes (end with forward-slash)
+ themePath: "/resources/css/themes/"
+
+};
diff --git a/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/web/client/resources/js/convey.js b/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/web/client/resources/js/convey.js
new file mode 100644
index 00000000000..b4e6b525eca
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/web/client/resources/js/convey.js
@@ -0,0 +1,46 @@
+var convey = {
+
+ // *** Don't edit in here unless you're brave ***
+
+ statuses: { // contains some constants related to overall test status
+ pass: { class: 'ok', text: "Pass" }, // class name must also be that in the favicon file name
+ fail: { class: 'fail', text: "Fail" },
+ panic: { class: 'panic', text: "Panic" },
+ buildfail: { class: 'buildfail', text: "Build Failure" }
+ },
+ frameCounter: 0, // gives each frame a unique ID
+ maxHistory: 20, // how many tests to keep in the history
+ notif: undefined, // the notification currently being displayed
+ notifTimer: undefined, // the timer that clears the notifications automatically
+ poller: new Poller(), // the server poller
+ status: "", // what the _server_ is currently doing (not overall test results)
+ overallClass: "", // class name of the "overall" status banner
+ theme: "", // theme currently being used
+ packageStates: {}, // packages manually collapsed or expanded during this page's lifetime
+ uiEffects: true, // whether visual effects are enabled
+ framesOnSamePath: 0, // number of consecutive frames on this same watch path
+ layout: {
+ selClass: "sel", // CSS class when an element is "selected"
+ header: undefined, // container element of the header area (overall, controls)
+ frame: undefined, // container element of the main body area (above footer)
+ footer: undefined // container element of the footer (stuck to bottom)
+ },
+ history: [], // complete history of states (test results and aggregated data), including the current one
+ moments: {}, // elements that display time relative to the current time, keyed by ID, with the moment() as a value
+ intervals: {}, // ntervals that execute periodically
+ intervalFuncs: { // functions executed by each interval in convey.intervals
+ time: function()
+ {
+ var t = new Date();
+ var h = zerofill(t.getHours(), 2);
+ var m = zerofill(t.getMinutes(), 2);
+ var s = zerofill(t.getSeconds(), 2);
+ $('#time').text(h + ":" + m + ":" + s);
+ },
+ momentjs: function()
+ {
+ for (var id in convey.moments)
+ $('#'+id).html(convey.moments[id].fromNow());
+ }
+ }
+};
diff --git a/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/web/client/resources/js/goconvey.js b/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/web/client/resources/js/goconvey.js
new file mode 100644
index 00000000000..3bc12c5a7cd
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/web/client/resources/js/goconvey.js
@@ -0,0 +1,1322 @@
+$(init);
+
+$(window).load(function()
+{
+ // Things may shift after all the elements (images/fonts) are loaded
+ // In Chrome, calling reframe() doesn't work (maybe a quirk); we need to trigger resize
+ $(window).resize();
+});
+
+function init()
+{
+ log("Welcome to GoConvey!");
+ log("Initializing interface");
+ convey.overall = emptyOverall();
+ loadTheme();
+ $('body').show();
+ initPoller();
+ wireup();
+ latest();
+}
+
+function loadTheme(thmID)
+{
+ var defaultTheme = "dark";
+ var linkTagId = "themeRef";
+
+ if (!thmID)
+ thmID = get('theme') || defaultTheme;
+
+ log("Initializing theme: " + thmID);
+
+ if (!convey.config.themes[thmID])
+ {
+ replacement = Object.keys(convey.config.themes)[0] || defaultTheme;
+ log("NOTICE: Could not find '" + thmID + "' theme; defaulting to '" + replacement + "'");
+ thmID = replacement;
+ }
+
+ convey.theme = thmID;
+ save('theme', convey.theme);
+
+ var linkTag = $('#'+linkTagId);
+ var fullPath = convey.config.themePath
+ + convey.config.themes[convey.theme].filename;
+
+ if (linkTag.length === 0)
+ {
+ $('head').append('<link rel="stylesheet" href="'
+ + fullPath + '" id="themeRef">');
+ }
+ else
+ linkTag.attr('href', fullPath);
+
+ colorizeCoverageBars();
+}
+
+function initPoller()
+{
+ $(convey.poller).on('serverstarting', function(event)
+ {
+ log("Server is starting...");
+ convey.status = "starting";
+ showServerDown("Server starting");
+ $('#run-tests').addClass('spin-slowly disabled');
+ });
+
+ $(convey.poller).on('pollsuccess', function(event, data)
+ {
+ if (convey.status !== "starting")
+ hideServerDown();
+
+ // These two if statements determine if the server is now busy
+ // (and wasn't before) or is not busy (regardless of whether it was before)
+ if ((!convey.status || convey.status === "idle")
+ && data.status && data.status !== "idle")
+ $('#run-tests').addClass('spin-slowly disabled');
+ else if (convey.status !== "idle" && data.status === "idle")
+ {
+ $('#run-tests').removeClass('spin-slowly disabled');
+ }
+
+ switch (data.status)
+ {
+ case "executing":
+ $(convey.poller).trigger('serverexec', data);
+ break;
+ case "idle":
+ $(convey.poller).trigger('serveridle', data);
+ break;
+ }
+
+ convey.status = data.status;
+ });
+
+ $(convey.poller).on('pollfail', function(event, data)
+ {
+ log("Poll failed; server down");
+ convey.status = "down";
+ showServerDown("Server down");
+ });
+
+ $(convey.poller).on('serverexec', function(event, data)
+ {
+ log("Server status: executing");
+ $('.favicon').attr('href', '/favicon.ico'); // indicates running tests
+ });
+
+ $(convey.poller).on('serveridle', function(event, data)
+ {
+ log("Server status: idle");
+ log("Tests have finished executing");
+ latest();
+ });
+
+ convey.poller.start();
+}
+
+function wireup()
+{
+ log("Wireup");
+
+ customMarkupPipes();
+
+ var themes = [];
+ for (var k in convey.config.themes)
+ themes.push({ id: k, name: convey.config.themes[k].name });
+ $('#theme').html(render('tpl-theme-enum', themes));
+
+ enumSel("theme", convey.theme);
+
+ loadSettingsFromStorage();
+
+ $('#stories').on('click', '.toggle-all-pkg', function(event)
+ {
+ if ($(this).closest('.story-pkg').data('pkg-state') === "expanded")
+ collapseAll();
+ else
+ expandAll();
+ return suppress(event);
+ });
+
+ // Wireup the settings switches
+ $('.enum#theme').on('click', 'li:not(.sel)', function()
+ {
+ loadTheme($(this).data('theme'));
+ });
+ $('.enum#pkg-expand-collapse').on('click', 'li:not(.sel)', function()
+ {
+ var newSetting = $(this).data('pkg-expand-collapse');
+ convey.packageStates = {};
+ save('pkg-expand-collapse', newSetting);
+ if (newSetting === "expanded")
+ expandAll();
+ else
+ collapseAll();
+ });
+ $('.enum#show-debug-output').on('click', 'li:not(.sel)', function()
+ {
+ var newSetting = $(this).data('show-debug-output');
+ save('show-debug-output', newSetting);
+ if (newSetting === "show")
+ $('.story-line-desc .message').show();
+ else
+ $('.story-line-desc .message').hide();
+ });
+ $('.enum#ui-effects').on('click', 'li:not(.sel)', function()
+ {
+ var newSetting = $(this).data('ui-effects');
+ convey.uiEffects = newSetting;
+ save('ui-effects', newSetting);
+ });
+ // End settings wireup
+
+ convey.layout.header = $('header').first();
+ convey.layout.frame = $('.frame').first();
+ convey.layout.footer = $('footer').last();
+
+ updateWatchPath();
+
+ $('#path').change(function()
+ {
+ // Updates the watched directory with the server and makes sure it exists
+ var tb = $(this);
+ var newpath = encodeURIComponent($.trim(tb.val()));
+ $.post('/watch?root='+newpath)
+ .done(function() { tb.removeClass('error'); })
+ .fail(function() { tb.addClass('error'); });
+ convey.framesOnSamePath = 1;
+ });
+
+ $('#run-tests').click(function()
+ {
+ var self = $(this);
+ if (self.hasClass('spin-slowly') || self.hasClass('disabled'))
+ return;
+ log("Test run invoked from web UI");
+ $.get("/execute");
+ });
+
+ $('#play-pause').click(function()
+ {
+ $.get('/pause');
+
+ if ($(this).hasClass(convey.layout.selClass))
+ {
+ // Un-pausing
+ if (!$('footer .replay').is(':visible'))
+ $('footer .recording').show();
+ $('footer .paused').hide();
+ log("Resuming auto-execution of tests");
+ }
+ else
+ {
+ // Pausing
+ $('footer .recording').hide();
+ $('footer .paused').show();
+ log("Pausing auto-execution of tests");
+ }
+
+ $(this).toggleClass("throb " + convey.layout.selClass);
+ });
+
+ $('#toggle-notif').click(function()
+ {
+ log("Turning notifications " + (notif() ? "off" : "on"));
+ $(this).toggleClass("fa-bell-o fa-bell " + convey.layout.selClass);
+ save('notifications', !notif());
+
+ if (notif() && 'Notification' in window)
+ {
+ if (Notification.permission !== 'denied')
+ {
+ Notification.requestPermission(function(per)
+ {
+ if (!('permission' in Notification))
+ Notification.permission = per;
+ });
+ }
+ else
+ log("Permission denied to show desktop notification");
+ }
+ });
+
+ $('#show-history').click(function()
+ {
+ toggle($('.history'), $(this));
+ });
+
+ $('#show-settings').click(function()
+ {
+ toggle($('.settings'), $(this));
+ });
+
+ $('#show-gen').click(function() {
+ var writer = window.open("/composer.html");
+ if (window.focus)
+ writer.focus();
+ });
+
+ // Wire-up the tipsy tooltips
+ $('.controls li, .pkg-cover-name').tipsy({ live: true });
+ $('footer .replay').tipsy({ live: true, gravity: 'e' });
+ $('#path').tipsy({ delayIn: 500 });
+ $('.ignore').tipsy({ live: true, gravity: $.fn.tipsy.autoNS });
+ $('.disabled').tipsy({ live: true, gravity: $.fn.tipsy.autoNS });
+ $('#logo').tipsy({ gravity: 'w' });
+
+
+ $('.toggler').not('.narrow').prepend('<i class="fa fa-angle-up fa-lg"></i>');
+ $('.toggler.narrow').prepend('<i class="fa fa-angle-down fa-lg"></i>');
+
+ $('.toggler').not('.narrow').click(function()
+ {
+ var target = $('#' + $(this).data('toggle'));
+ $('.fa-angle-down, .fa-angle-up', this).toggleClass('fa-angle-down fa-angle-up');
+ target.toggle();
+ });
+
+ $('.toggler.narrow').click(function()
+ {
+ var target = $('#' + $(this).data('toggle'));
+ $('.fa-angle-down, .fa-angle-up', this).toggleClass('fa-angle-down fa-angle-up');
+ target.toggleClass('hide-narrow show-narrow');
+ });
+
+ // Enumerations are horizontal lists where one item can be selected at a time
+ $('.enum').on('click', 'li', enumSel);
+
+ // Start ticking time
+ convey.intervals.time = setInterval(convey.intervalFuncs.time, 1000);
+ convey.intervals.momentjs = setInterval(convey.intervalFuncs.momentjs, 5000);
+ convey.intervalFuncs.time();
+
+ // Ignore/un-ignore package
+ $('#stories').on('click', '.fa.ignore', function(event)
+ {
+ var pkg = $(this).data('pkg');
+ if ($(this).hasClass('disabled'))
+ return;
+ else if ($(this).hasClass('unwatch'))
+ $.get("/ignore", { paths: pkg });
+ else
+ $.get("/reinstate", { paths: pkg });
+ $(this).toggleClass('watch unwatch fa-eye fa-eye-slash clr-red');
+ return suppress(event);
+ });
+
+ // Show "All" link when hovering the toggler on packages in the stories
+ $('#stories').on({
+ mouseenter: function() { $('.toggle-all-pkg', this).stop().show('fast'); },
+ mouseleave: function() { $('.toggle-all-pkg', this).stop().hide('fast'); }
+ }, '.pkg-toggle-container');
+
+ // Toggle a package in the stories when clicked
+ $('#stories').on('click', '.story-pkg', function(event)
+ {
+ togglePackage(this, true);
+ return suppress(event);
+ });
+
+ // Select a story line when it is clicked
+ $('#stories').on('click', '.story-line', function()
+ {
+ $('.story-line-sel').not(this).removeClass('story-line-sel');
+ $(this).toggleClass('story-line-sel');
+ });
+
+ // Render a frame from the history when clicked
+ $('.history .container').on('click', '.item', function(event)
+ {
+ var frame = getFrame($(this).data("frameid"));
+ changeStatus(frame.overall.status, true);
+ renderFrame(frame);
+ $(this).addClass('selected');
+
+ // Update current status down in the footer
+ if ($(this).is(':first-child'))
+ {
+ // Now on current frame
+ $('footer .replay').hide();
+
+ if ($('#play-pause').hasClass(convey.layout.selClass)) // Was/is paused
+ $('footer .paused').show();
+ else
+ $('footer .recording').show(); // Was/is recording
+ }
+ else
+ {
+ $('footer .recording, footer .replay').hide();
+ $('footer .replay').show();
+ }
+ return suppress(event);
+ });
+
+ $('footer').on('click', '.replay', function()
+ {
+ // Clicking "REPLAY" in the corner should bring them back to the current frame
+ // and hide, if visible, the history panel for convenience
+ $('.history .item:first-child').click();
+ if ($('#show-history').hasClass('sel'))
+ $('#show-history').click();
+ });
+
+ // Keyboard shortcuts!
+ $(document).keydown(function(e)
+ {
+ if (e.ctrlKey || e.metaKey || e.shiftKey)
+ return;
+
+ switch (e.keyCode)
+ {
+ case 67: // c
+ $('#show-gen').click();
+ break;
+ case 82: // r
+ $('#run-tests').click();
+ break;
+ case 78: // n
+ $('#toggle-notif').click();
+ break;
+ case 87: // w
+ $('#path').focus();
+ break;
+ case 80: // p
+ $('#play-pause').click();
+ break;
+ }
+
+ return suppress(e);
+ });
+ $('body').on('keydown', 'input, textarea, select', function(e)
+ {
+ // If user is typing something, don't let this event bubble
+ // up to the document to annoyingly fire keyboard shortcuts
+ e.stopPropagation();
+ });
+
+ // Keep everything positioned and sized properly on window resize
+ reframe();
+ $(window).resize(reframe);
+}
+
+function expandAll()
+{
+ $('.story-pkg').each(function() { expandPackage($(this).data('pkg')); });
+}
+
+function collapseAll()
+{
+ $('.story-pkg').each(function() { collapsePackage($(this).data('pkg')); });
+}
+
+function expandPackage(pkgId)
+{
+ var pkg = $('.story-pkg.pkg-'+pkgId);
+ var rows = $('.story-line.pkg-'+pkgId);
+
+ pkg.data('pkg-state', "expanded").addClass('expanded').removeClass('collapsed');
+
+ $('.pkg-toggle', pkg)
+ .addClass('fa-minus-square-o')
+ .removeClass('fa-plus-square-o');
+
+ rows.show();
+}
+
+function collapsePackage(pkgId)
+{
+ var pkg = $('.story-pkg.pkg-'+pkgId);
+ var rows = $('.story-line.pkg-'+pkgId);
+
+ pkg.data('pkg-state', "collapsed").addClass('collapsed').removeClass('expanded');
+
+ $('.pkg-toggle', pkg)
+ .addClass('fa-plus-square-o')
+ .removeClass('fa-minus-square-o');
+
+ rows.hide();
+}
+
+function togglePackage(storyPkgElem)
+{
+ var pkgId = $(storyPkgElem).data('pkg');
+ if ($(storyPkgElem).data('pkg-state') === "expanded")
+ {
+ collapsePackage(pkgId);
+ convey.packageStates[$(storyPkgElem).data('pkg-name')] = "collapsed";
+ }
+ else
+ {
+ expandPackage(pkgId);
+ convey.packageStates[$(storyPkgElem).data('pkg-name')] = "expanded";
+ }
+}
+
+function loadSettingsFromStorage()
+{
+ var pkgExpCollapse = get("pkg-expand-collapse");
+ if (!pkgExpCollapse)
+ {
+ pkgExpCollapse = "expanded";
+ save("pkg-expand-collapse", pkgExpCollapse);
+ }
+ enumSel("pkg-expand-collapse", pkgExpCollapse);
+
+ var showDebugOutput = get("show-debug-output");
+ if (!showDebugOutput)
+ {
+ showDebugOutput = "show";
+ save("show-debug-output", showDebugOutput);
+ }
+ enumSel("show-debug-output", showDebugOutput);
+
+ var uiEffects = get("ui-effects");
+ if (uiEffects === null)
+ uiEffects = "true";
+ convey.uiEffects = uiEffects === "true";
+ enumSel("ui-effects", uiEffects);
+
+ if (notif())
+ $('#toggle-notif').toggleClass("fa-bell-o fa-bell " + convey.layout.selClass);
+}
+
+
+
+
+
+
+
+
+
+
+
+function latest()
+{
+ log("Fetching latest test results");
+ $.getJSON("/latest", process);
+}
+
+function process(data, status, jqxhr)
+{
+ if (!data || !data.Revision)
+ {
+ log("No data received or revision timestamp was missing");
+ return;
+ }
+
+ if (data.Paused && !$('#play-pause').hasClass(convey.layout.selClass))
+ {
+ $('footer .recording').hide();
+ $('footer .paused').show();
+ $('#play-pause').toggleClass("throb " + convey.layout.selClass);
+ }
+
+ if (current() && data.Revision === current().results.Revision)
+ {
+ log("No changes");
+ changeStatus(current().overall.status); // re-assures that status is unchanged
+ return;
+ }
+
+
+ // Put the new frame in the queue so we can use current() to get to it
+ convey.history.push(newFrame());
+ convey.framesOnSamePath++;
+
+ // Store the raw results in our frame
+ current().results = data;
+
+ log("Updating watch path");
+ updateWatchPath();
+
+ // Remove all templated items from the DOM as we'll
+ // replace them with new ones; also remove tipsy tooltips
+ // that may have lingered around
+ $('.templated, .tipsy').remove();
+
+ var uniqueID = 0;
+ var coverageAvgHelper = { countedPackages: 0, coverageSum: 0 };
+ var packages = {
+ tested: [],
+ ignored: [],
+ coverage: {},
+ nogofiles: [],
+ notestfiles: [],
+ notestfn: []
+ };
+
+ log("Compiling package statistics");
+
+ // Look for failures and panics through the packages->tests->stories...
+ for (var i in data.Packages)
+ {
+ pkg = makeContext(data.Packages[i]);
+ current().overall.duration += pkg.Elapsed;
+ pkg._id = uniqueID++;
+
+ if (pkg.Outcome === "build failure")
+ {
+ current().overall.failedBuilds++;
+ current().failedBuilds.push(pkg);
+ continue;
+ }
+
+
+ if (pkg.Outcome === "no go code")
+ packages.nogofiles.push(pkg);
+ else if (pkg.Outcome === "no test files")
+ packages.notestfiles.push(pkg);
+ else if (pkg.Outcome === "no test functions")
+ packages.notestfn.push(pkg);
+ else if (pkg.Outcome === "ignored" || pkg.Outcome === "disabled")
+ packages.ignored.push(pkg);
+ else
+ {
+ if (pkg.Coverage >= 0)
+ coverageAvgHelper.coverageSum += pkg.Coverage;
+ coverageAvgHelper.countedPackages++;
+ packages.coverage[pkg.PackageName] = pkg.Coverage;
+ packages.tested.push(pkg);
+ }
+
+
+ for (var j in pkg.TestResults)
+ {
+ test = makeContext(pkg.TestResults[j]);
+ test._id = uniqueID++;
+ test._pkgid = pkg._id;
+ test._pkg = pkg.PackageName;
+
+ if (test.Stories.length === 0)
+ {
+ // Here we've got ourselves a classic Go test,
+ // not a GoConvey test that has stories and assertions
+ // so we'll treat this whole test as a single assertion
+ current().overall.assertions++;
+
+ if (test.Error)
+ {
+ test._status = convey.statuses.panic;
+ pkg._panicked++;
+ test._panicked++;
+ current().assertions.panicked.push(test);
+ }
+ else if (test.Passed === false)
+ {
+ test._status = convey.statuses.fail;
+ pkg._failed++;
+ test._failed++;
+ current().assertions.failed.push(test);
+ }
+ else if (test.Skipped)
+ {
+ test._status = convey.statuses.skipped;
+ pkg._skipped++;
+ test._skipped++;
+ current().assertions.skipped.push(test);
+ }
+ else
+ {
+ test._status = convey.statuses.pass;
+ pkg._passed++;
+ test._passed++;
+ current().assertions.passed.push(test);
+ }
+ }
+ else
+ test._status = convey.statuses.pass;
+
+ var storyPath = [{ Depth: -1, Title: test.TestName, _id: test._id }]; // Maintains the current assertion's story as we iterate
+
+ for (var k in test.Stories)
+ {
+ var story = makeContext(test.Stories[k]);
+
+ story._id = uniqueID;
+ story._pkgid = pkg._id;
+ current().overall.assertions += story.Assertions.length;
+
+ // Establish the current story path so we can report the context
+ // of failures and panicks more conveniently at the top of the page
+ if (storyPath.length > 0)
+ for (var x = storyPath[storyPath.length - 1].Depth; x >= test.Stories[k].Depth; x--)
+ storyPath.pop();
+ storyPath.push({ Depth: test.Stories[k].Depth, Title: test.Stories[k].Title, _id: test.Stories[k]._id });
+
+
+ for (var l in story.Assertions)
+ {
+ var assertion = story.Assertions[l];
+ assertion._id = uniqueID;
+ assertion._pkg = pkg.PackageName;
+ assertion._pkgId = pkg._id;
+ assertion._failed = !!assertion.Failure;
+ assertion._panicked = !!assertion.Error;
+ assertion._maxDepth = storyPath[storyPath.length - 1].Depth;
+ $.extend(assertion._path = [], storyPath);
+
+ if (assertion.Failure)
+ {
+ current().assertions.failed.push(assertion);
+ pkg._failed++;
+ test._failed++;
+ story._failed++;
+ }
+ if (assertion.Error)
+ {
+ current().assertions.panicked.push(assertion);
+ pkg._panicked++;
+ test._panicked++;
+ story._panicked++;
+ }
+ if (assertion.Skipped)
+ {
+ current().assertions.skipped.push(assertion);
+ pkg._skipped++;
+ test._skipped++;
+ story._skipped++;
+ }
+ if (!assertion.Failure && !assertion.Error && !assertion.Skipped)
+ {
+ current().assertions.passed.push(assertion);
+ pkg._passed++;
+ test._passed++;
+ story._passed++;
+ }
+ }
+
+ assignStatus(story);
+ uniqueID++;
+ }
+
+ if (!test.Passed && !test._failed && !test._panicked)
+ {
+ // Edge case: Developer is using the GoConvey DSL, but maybe
+ // in some cases is using t.Error() instead of So() assertions.
+ // This can be detected, assuming all child stories with
+ // assertions (in this test) are passing.
+ test._status = convey.statuses.fail;
+ pkg._failed++;
+ test._failed++;
+ current().assertions.failed.push(test);
+ }
+ }
+ }
+
+ current().overall.passed = current().assertions.passed.length;
+ current().overall.panics = current().assertions.panicked.length;
+ current().overall.failures = current().assertions.failed.length;
+ current().overall.skipped = current().assertions.skipped.length;
+
+ current().overall.coverage = Math.round((coverageAvgHelper.coverageSum / (coverageAvgHelper.countedPackages || 1)) * 100) / 100;
+ current().overall.duration = Math.round(current().overall.duration * 1000) / 1000;
+
+ // Compute the coverage delta (difference in overall coverage between now and last frame)
+ // Only compare coverage on the same watch path
+ var coverDelta = current().overall.coverage;
+ if (convey.framesOnSamePath > 2)
+ coverDelta = current().overall.coverage - convey.history[convey.history.length - 2].overall.coverage;
+ current().coverDelta = Math.round(coverDelta * 100) / 100;
+
+
+ // Build failures trump panics,
+ // Panics trump failures,
+ // Failures trump pass.
+ if (current().overall.failedBuilds)
+ changeStatus(convey.statuses.buildfail);
+ else if (current().overall.panics)
+ changeStatus(convey.statuses.panic);
+ else if (current().overall.failures)
+ changeStatus(convey.statuses.fail);
+ else
+ changeStatus(convey.statuses.pass);
+
+ // Save our organized package lists
+ current().packages = packages;
+
+ log(" Assertions: " + current().overall.assertions);
+ log(" Passed: " + current().overall.passed);
+ log(" Skipped: " + current().overall.skipped);
+ log(" Failures: " + current().overall.failures);
+ log(" Panics: " + current().overall.panics);
+ log("Build Failures: " + current().overall.failedBuilds);
+ log(" Coverage: " + current().overall.coverage + "% (" + showCoverDelta(current().coverDelta) + ")");
+
+ // Save timestamp when this test was executed
+ convey.moments['last-test'] = moment();
+
+
+
+ // Render... render ALL THE THINGS! (All model/state modifications are DONE!)
+ renderFrame(current());
+ // Now, just finish up miscellaneous UI things
+
+
+ // Add this frame to the history pane
+ var framePiece = render('tpl-history', current());
+ $('.history .container').prepend(framePiece);
+ $('.history .item:first-child').addClass('selected');
+ convey.moments['frame-'+current().id] = moment();
+ if (convey.history.length > convey.maxHistory)
+ {
+ // Delete the oldest frame out of the history pane if we have too many
+ convey.history.splice(0, 1);
+ $('.history .container .item').last().remove();
+ }
+
+ // Now add the momentjs time to the new frame in the history
+ convey.intervalFuncs.momentjs();
+
+ // Show notification, if enabled
+ if (notif())
+ {
+ log("Showing notification");
+ if (convey.notif)
+ {
+ clearTimeout(convey.notifTimer);
+ convey.notif.close();
+ }
+
+ var notifText = notifSummary(current())
+
+ convey.notif = new Notification(notifText.title, {
+ body: notifText.body,
+ icon: $('.favicon').attr('href')
+ });
+
+ convey.notifTimer = setTimeout(function() { convey.notif.close(); }, 5000);
+ }
+
+ // Update title in title bar
+ if (current().overall.passed === current().overall.assertions && current().overall.status.class === "ok")
+ $('title').text("GoConvey (ALL PASS)");
+ else
+ $('title').text("GoConvey [" + current().overall.status.text + "] " + current().overall.passed + "/" + current().overall.assertions);
+
+ // All done!
+ log("Processing complete");
+}
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+// Updates the entire UI given a frame from the history
+function renderFrame(frame)
+{
+ log("Rendering frame (id: " + frame.id + ")");
+
+ $('#coverage').html(render('tpl-coverage', frame.packages.tested.sort(sortPackages)));
+ $('#ignored').html(render('tpl-ignored', frame.packages.ignored.sort(sortPackages)));
+ $('#nogofiles').html(render('tpl-nogofiles', frame.packages.nogofiles.sort(sortPackages)));
+ $('#notestfiles').html(render('tpl-notestfiles', frame.packages.notestfiles.sort(sortPackages)));
+ $('#notestfn').html(render('tpl-notestfn', frame.packages.notestfn.sort(sortPackages)));
+
+ if (frame.overall.failedBuilds)
+ {
+ $('.buildfailures').show();
+ $('#buildfailures').html(render('tpl-buildfailures', frame.failedBuilds));
+ }
+ else
+ $('.buildfailures').hide();
+
+ if (frame.overall.panics)
+ {
+ $('.panics').show();
+ $('#panics').html(render('tpl-panics', frame.assertions.panicked));
+ }
+ else
+ $('.panics').hide();
+
+
+ if (frame.overall.failures)
+ {
+ $('.failures').show();
+ $('#failures').html(render('tpl-failures', frame.assertions.failed));
+ $(".failure").each(function() {
+ $(this).prettyTextDiff();
+ });
+ }
+ else
+ $('.failures').hide();
+
+ $('#stories').html(render('tpl-stories', frame.packages.tested.sort(sortPackages)));
+ $('#stories').append(render('tpl-stories', frame.packages.ignored.sort(sortPackages)));
+
+ var pkgDefaultView = get('pkg-expand-collapse');
+ $('.story-pkg.expanded').each(function()
+ {
+ if (pkgDefaultView === "collapsed" && convey.packageStates[$(this).data('pkg-name')] !== "expanded")
+ collapsePackage($(this).data('pkg'));
+ });
+
+ redrawCoverageBars();
+
+ $('#assert-count').html("<b>"+frame.overall.assertions+"</b> assertion"
+ + (frame.overall.assertions !== 1 ? "s" : ""));
+ $('#skip-count').html("<b>"+frame.assertions.skipped.length + "</b> skipped");
+ $('#fail-count').html("<b>"+frame.assertions.failed.length + "</b> failed");
+ $('#panic-count').html("<b>"+frame.assertions.panicked.length + "</b> panicked");
+ $('#duration').html("<b>"+frame.overall.duration + "</b>s");
+
+ $('#narrow-assert-count').html("<b>"+frame.overall.assertions+"</b>");
+ $('#narrow-skip-count').html("<b>"+frame.assertions.skipped.length + "</b>");
+ $('#narrow-fail-count').html("<b>"+frame.assertions.failed.length + "</b>");
+ $('#narrow-panic-count').html("<b>"+frame.assertions.panicked.length + "</b>");
+
+ $('.history .item').removeClass('selected');
+
+ if (get('show-debug-output') === "hide")
+ $('.story-line-desc .message').hide();
+
+ log("Rendering finished");
+}
+
+
+
+
+
+
+
+function enumSel(id, val)
+{
+ if (typeof id === "string" && typeof val === "string")
+ {
+ $('.enum#'+id+' > li').each(function()
+ {
+ if ($(this).data(id).toString() === val)
+ {
+ $(this).addClass(convey.layout.selClass).siblings().removeClass(convey.layout.selClass);
+ return false;
+ }
+ });
+ }
+ else
+ $(this).addClass(convey.layout.selClass).siblings().removeClass(convey.layout.selClass);
+}
+
+function toggle(jqelem, switchelem)
+{
+ var speed = 250;
+ var transition = 'easeInOutQuart';
+ var containerSel = '.container';
+
+ if (!jqelem.is(':visible'))
+ {
+ $(containerSel, jqelem).css('opacity', 0);
+ jqelem.stop().slideDown(speed, transition, function()
+ {
+ if (switchelem)
+ switchelem.toggleClass(convey.layout.selClass);
+ $(containerSel, jqelem).stop().animate({
+ opacity: 1
+ }, speed);
+ reframe();
+ });
+ }
+ else
+ {
+ $(containerSel, jqelem).stop().animate({
+ opacity: 0
+ }, speed, function()
+ {
+ if (switchelem)
+ switchelem.toggleClass(convey.layout.selClass);
+ jqelem.stop().slideUp(speed, transition, function() { reframe(); });
+ });
+ }
+}
+
+function changeStatus(newStatus, isHistoricalFrame)
+{
+ if (!newStatus || !newStatus.class || !newStatus.text)
+ newStatus = convey.statuses.pass;
+
+ var sameStatus = newStatus.class === convey.overallClass;
+
+ // The CSS class .flash and the jQuery UI 'pulsate' effect don't play well together.
+ // This series of callbacks does the flickering/pulsating as well as
+ // enabling/disabling flashing in the proper order so that they don't overlap.
+ // TODO: I suppose the pulsating could also be done with just CSS, maybe...?
+
+ if (convey.uiEffects)
+ {
+ var times = sameStatus ? 3 : 2;
+ var duration = sameStatus ? 500 : 300;
+
+ $('.overall .status').removeClass('flash').effect("pulsate", {times: times}, duration, function()
+ {
+ $(this).text(newStatus.text);
+
+ if (newStatus !== convey.statuses.pass) // only flicker extra when not currently passing
+ {
+ $(this).effect("pulsate", {times: 1}, 300, function()
+ {
+ $(this).effect("pulsate", {times: 1}, 500, function()
+ {
+ if (newStatus === convey.statuses.panic
+ || newStatus === convey.statuses.buildfail)
+ $(this).addClass('flash');
+ else
+ $(this).removeClass('flash');
+ });
+ });
+ }
+ });
+ }
+ else
+ $('.overall .status').text(newStatus.text);
+
+ if (!sameStatus) // change the color
+ $('.overall').switchClass(convey.overallClass, newStatus.class, 1000);
+
+ if (!isHistoricalFrame)
+ current().overall.status = newStatus;
+ convey.overallClass = newStatus.class;
+ $('.favicon').attr('href', '/resources/ico/goconvey-'+newStatus.class+'.ico');
+}
+
+function updateWatchPath()
+{
+ $.get("/watch", function(data)
+ {
+ var newPath = $.trim(data);
+ if (newPath !== $('#path').val())
+ convey.framesOnSamePath = 1;
+ $('#path').val(newPath);
+ });
+}
+
+function notifSummary(frame)
+{
+ var body = frame.overall.passed + " passed, ";
+
+ if (frame.overall.failedBuilds)
+ body += frame.overall.failedBuilds + " build" + (frame.overall.failedBuilds !== 1 ? "s" : "") + " failed, ";
+ if (frame.overall.failures)
+ body += frame.overall.failures + " failed, ";
+ if (frame.overall.panics)
+ body += frame.overall.panics + " panicked, ";
+ body += frame.overall.skipped + " skipped";
+
+ body += "\r\n" + frame.overall.duration + "s";
+
+ if (frame.coverDelta > 0)
+ body += "\r\n↑ coverage (" + showCoverDelta(frame.coverDelta) + ")";
+ else if (frame.coverDelta < 0)
+ body += "\r\n↓ coverage (" + showCoverDelta(frame.coverDelta) + ")";
+
+ return {
+ title: frame.overall.status.text.toUpperCase(),
+ body: body
+ };
+}
+
+function redrawCoverageBars()
+{
+ $('.pkg-cover-bar').each(function()
+ {
+ var pkgName = $(this).data("pkg");
+ var hue = $(this).data("width");
+ var hueDiff = hue;
+
+ if (convey.history.length > 1)
+ {
+ var oldHue = convey.history[convey.history.length - 2].packages.coverage[pkgName] || 0;
+ $(this).width(oldHue + "%");
+ hueDiff = hue - oldHue;
+ }
+
+ $(this).animate({
+ width: "+=" + hueDiff + "%"
+ }, 1250);
+ });
+
+ colorizeCoverageBars();
+}
+
+function colorizeCoverageBars()
+{
+ var colorTpl = convey.config.themes[convey.theme].coverage
+ || "hsla({{hue}}, 75%, 30%, .3)"; //default color template
+
+ $('.pkg-cover-bar').each(function()
+ {
+ var hue = $(this).data("width");
+ $(this).css({
+ background: colorTpl.replace("{{hue}}", hue)
+ });
+ });
+}
+
+
+function getFrame(id)
+{
+ for (var i in convey.history)
+ if (convey.history[i].id === id)
+ return convey.history[i];
+}
+
+function render(templateID, context)
+{
+ var tpl = $('#' + templateID).text();
+ return $($.trim(Mark.up(tpl, context)));
+}
+
+function reframe()
+{
+ var heightBelowHeader = $(window).height() - convey.layout.header.outerHeight();
+ var middleHeight = heightBelowHeader - convey.layout.footer.outerHeight();
+ convey.layout.frame.height(middleHeight);
+
+ var pathWidth = $(window).width() - $('#logo').outerWidth() - $('#control-buttons').outerWidth() - 10;
+ $('#path-container').width(pathWidth);
+}
+
+function notif()
+{
+ return get('notifications') === "true"; // stored as strings
+}
+
+function showServerDown(message)
+{
+ $('.server-down .notice-message').text(message);
+ $('.server-down').show();
+ $('.server-not-down').hide();
+ reframe();
+}
+
+function hideServerDown()
+{
+ $('.server-down').hide();
+ $('.server-not-down').show();
+ reframe();
+}
+
+function log(msg)
+{
+ var jqLog = $('#log');
+ if (jqLog.length > 0)
+ {
+ var t = new Date();
+ var h = zerofill(t.getHours(), 2);
+ var m = zerofill(t.getMinutes(), 2);
+ var s = zerofill(t.getSeconds(), 2);
+ var ms = zerofill(t.getMilliseconds(), 3);
+ date = h + ":" + m + ":" + s + "." + ms;
+
+ $(jqLog).append(render('tpl-log-line', { time: date, msg: msg }));
+ $(jqLog).parent('.col').scrollTop(jqLog[0].scrollHeight);
+ }
+ else
+ console.log(msg);
+}
+
+function zerofill(val, count)
+{
+ // Cheers to http://stackoverflow.com/a/9744576/1048862
+ var pad = new Array(1 + count).join('0');
+ return (pad + val).slice(-pad.length);
+}
+
+// Sorts packages ascending by only the last part of their name
+// Can be passed into Array.sort()
+function sortPackages(a, b)
+{
+ var aPkg = splitPathName(a.PackageName);
+ var bPkg = splitPathName(b.PackageName);
+
+ if (aPkg.length === 0 || bPkg.length === 0)
+ return 0;
+
+ var aName = aPkg.parts[aPkg.parts.length - 1].toLowerCase();
+ var bName = bPkg.parts[bPkg.parts.length - 1].toLowerCase();
+
+ if (aName < bName)
+ return -1;
+ else if (aName > bName)
+ return 1;
+ else
+ return 0;
+
+ /*
+ MEMO: Use to sort by entire package name:
+ if (a.PackageName < b.PackageName) return -1;
+ else if (a.PackageName > b.PackageName) return 1;
+ else return 0;
+ */
+}
+
+function get(key)
+{
+ var val = localStorage.getItem(key);
+ if (val && (val[0] === '[' || val[0] === '{'))
+ return JSON.parse(val);
+ else
+ return val;
+}
+
+function save(key, val)
+{
+ if (typeof val === 'object')
+ val = JSON.stringify(val);
+ else if (typeof val === 'number' || typeof val === 'boolean')
+ val = val.toString();
+ localStorage.setItem(key, val);
+}
+
+function splitPathName(str)
+{
+ var delim = str.indexOf('\\') > -1 ? '\\' : '/';
+ return { delim: delim, parts: str.split(delim) };
+}
+
+function newFrame()
+{
+ return {
+ results: {}, // response from server (with some of our own context info)
+ packages: {}, // packages organized into statuses for convenience (like with coverage)
+ overall: emptyOverall(), // overall status info, compiled from server's response
+ assertions: emptyAssertions(), // lists of assertions, compiled from server's response
+ failedBuilds: [], // list of packages that failed to build
+ timestamp: moment(), // the timestamp of this "freeze-state"
+ id: convey.frameCounter++, // unique ID for this frame
+ coverDelta: 0 // difference in total coverage from the last frame to this one
+ };
+}
+
+function emptyOverall()
+{
+ return {
+ status: {},
+ duration: 0,
+ assertions: 0,
+ passed: 0,
+ panics: 0,
+ failures: 0,
+ skipped: 0,
+ failedBuilds: 0,
+ coverage: 0
+ };
+}
+
+function emptyAssertions()
+{
+ return {
+ passed: [],
+ failed: [],
+ panicked: [],
+ skipped: []
+ };
+}
+
+function makeContext(obj)
+{
+ obj._passed = 0;
+ obj._failed = 0;
+ obj._panicked = 0;
+ obj._skipped = 0;
+ obj._status = '';
+ return obj;
+}
+
+function current()
+{
+ return convey.history[convey.history.length - 1];
+}
+
+function assignStatus(obj)
+{
+ if (obj._skipped)
+ obj._status = 'skip';
+ else if (obj.Outcome === "ignored")
+ obj._status = convey.statuses.ignored;
+ else if (obj._panicked)
+ obj._status = convey.statuses.panic;
+ else if (obj._failed || obj.Outcome === "failed")
+ obj._status = convey.statuses.fail;
+ else
+ obj._status = convey.statuses.pass;
+}
+
+function showCoverDelta(delta)
+{
+ if (delta > 0)
+ return "+" + delta + "%";
+ else if (delta === 0)
+ return "±" + delta + "%";
+ else
+ return delta + "%";
+}
+
+function customMarkupPipes()
+{
+ // MARKUP.JS custom pipes
+ Mark.pipes.relativePath = function(str)
+ {
+ basePath = new RegExp($('#path').val()+'[\\/]', 'gi');
+ return str.replace(basePath, '');
+ };
+ Mark.pipes.htmlSafe = function(str)
+ {
+ return str.replace(/</g, "&lt;").replace(/>/g, "&gt;");
+ };
+ Mark.pipes.ansiColours = ansispan;
+ Mark.pipes.boldPkgName = function(str)
+ {
+ var pkg = splitPathName(str);
+ pkg.parts[0] = '<span class="not-pkg-name">' + pkg.parts[0];
+ pkg.parts[pkg.parts.length - 1] = "</span><b>" + pkg.parts[pkg.parts.length - 1] + "</b>";
+ return pkg.parts.join(pkg.delim);
+ };
+ Mark.pipes.needsDiff = function(test)
+ {
+ return !!test.Failure && (test.Expected !== "" || test.Actual !== "");
+ };
+ Mark.pipes.coveragePct = function(str)
+ {
+ // Expected input: 75% to be represented as: "75.0"
+ var num = parseInt(str); // we only need int precision
+ if (num < 0)
+ return "0";
+ else if (num <= 5)
+ return "5px"; // Still shows low coverage
+ else if (num > 100)
+ str = "100";
+ return str;
+ };
+ Mark.pipes.coverageDisplay = function(str)
+ {
+ var num = parseFloat(str);
+ return num < 0 ? "" : num + "% coverage";
+ };
+ Mark.pipes.coverageReportName = function(str)
+ {
+ return str.replace(/\//g, "-");
+ };
+}
+
+function suppress(event)
+{
+ if (!event)
+ return false;
+ if (event.preventDefault)
+ event.preventDefault();
+ if (event.stopPropagation)
+ event.stopPropagation();
+ event.cancelBubble = true;
+ return false;
+}
diff --git a/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/web/client/resources/js/lib/ansispan.js b/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/web/client/resources/js/lib/ansispan.js
new file mode 100644
index 00000000000..3d8603a6d1b
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/web/client/resources/js/lib/ansispan.js
@@ -0,0 +1,67 @@
+/*
+Copyright (C) 2011 by Maciej Małecki
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in
+all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+THE SOFTWARE.
+*/
+
+var ansispan = function (str) {
+ Object.keys(ansispan.foregroundColors).forEach(function (ansi) {
+ var span = '<span class="ansi-' + ansispan.foregroundColors[ansi] + '">';
+
+ //
+ // `\033[Xm` == `\033[0;Xm` sets foreground color to `X`.
+ //
+
+ str = str.replace(
+ new RegExp('\033\\[' + ansi + 'm', 'g'),
+ span
+ ).replace(
+ new RegExp('\033\\[0;' + ansi + 'm', 'g'),
+ span
+ );
+ });
+ //
+ // `\033[1m` enables bold font, `\033[22m` disables it
+ //
+ str = str.replace(/\033\[1m/g, '<b>').replace(/\033\[22m/g, '</b>');
+
+ //
+ // `\033[3m` enables italics font, `\033[23m` disables it
+ //
+ str = str.replace(/\033\[3m/g, '<i>').replace(/\033\[23m/g, '</i>');
+
+ str = str.replace(/\033\[m/g, '</span>');
+ str = str.replace(/\033\[0m/g, '</span>');
+ return str.replace(/\033\[39m/g, '</span>');
+};
+
+ansispan.foregroundColors = {
+ '30': 'black',
+ '31': 'red',
+ '32': 'green',
+ '33': 'yellow',
+ '34': 'blue',
+ '35': 'purple',
+ '36': 'cyan',
+ '37': 'white'
+};
+
+if (typeof module !== 'undefined' && module.exports) {
+ module.exports = ansispan;
+}
diff --git a/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/web/client/resources/js/lib/diff-match-patch.min.js b/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/web/client/resources/js/lib/diff-match-patch.min.js
new file mode 100644
index 00000000000..c78b7ffc46d
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/web/client/resources/js/lib/diff-match-patch.min.js
@@ -0,0 +1,49 @@
+(function(){function diff_match_patch(){this.Diff_Timeout=1;this.Diff_EditCost=4;this.Match_Threshold=0.5;this.Match_Distance=1E3;this.Patch_DeleteThreshold=0.5;this.Patch_Margin=4;this.Match_MaxBits=32}
+diff_match_patch.prototype.diff_main=function(a,b,c,d){"undefined"==typeof d&&(d=0>=this.Diff_Timeout?Number.MAX_VALUE:(new Date).getTime()+1E3*this.Diff_Timeout);if(null==a||null==b)throw Error("Null input. (diff_main)");if(a==b)return a?[[0,a]]:[];"undefined"==typeof c&&(c=!0);var e=c,f=this.diff_commonPrefix(a,b);c=a.substring(0,f);a=a.substring(f);b=b.substring(f);var f=this.diff_commonSuffix(a,b),g=a.substring(a.length-f);a=a.substring(0,a.length-f);b=b.substring(0,b.length-f);a=this.diff_compute_(a,
+b,e,d);c&&a.unshift([0,c]);g&&a.push([0,g]);this.diff_cleanupMerge(a);return a};
+diff_match_patch.prototype.diff_compute_=function(a,b,c,d){if(!a)return[[1,b]];if(!b)return[[-1,a]];var e=a.length>b.length?a:b,f=a.length>b.length?b:a,g=e.indexOf(f);return-1!=g?(c=[[1,e.substring(0,g)],[0,f],[1,e.substring(g+f.length)]],a.length>b.length&&(c[0][0]=c[2][0]=-1),c):1==f.length?[[-1,a],[1,b]]:(e=this.diff_halfMatch_(a,b))?(f=e[0],a=e[1],g=e[2],b=e[3],e=e[4],f=this.diff_main(f,g,c,d),c=this.diff_main(a,b,c,d),f.concat([[0,e]],c)):c&&100<a.length&&100<b.length?this.diff_lineMode_(a,b,
+d):this.diff_bisect_(a,b,d)};
+diff_match_patch.prototype.diff_lineMode_=function(a,b,c){var d=this.diff_linesToChars_(a,b);a=d.chars1;b=d.chars2;d=d.lineArray;a=this.diff_main(a,b,!1,c);this.diff_charsToLines_(a,d);this.diff_cleanupSemantic(a);a.push([0,""]);for(var e=d=b=0,f="",g="";b<a.length;){switch(a[b][0]){case 1:e++;g+=a[b][1];break;case -1:d++;f+=a[b][1];break;case 0:if(1<=d&&1<=e){a.splice(b-d-e,d+e);b=b-d-e;d=this.diff_main(f,g,!1,c);for(e=d.length-1;0<=e;e--)a.splice(b,0,d[e]);b+=d.length}d=e=0;g=f=""}b++}a.pop();return a};
+diff_match_patch.prototype.diff_bisect_=function(a,b,c){for(var d=a.length,e=b.length,f=Math.ceil((d+e)/2),g=f,h=2*f,j=Array(h),i=Array(h),k=0;k<h;k++)j[k]=-1,i[k]=-1;j[g+1]=0;i[g+1]=0;for(var k=d-e,q=0!=k%2,r=0,t=0,p=0,w=0,v=0;v<f&&!((new Date).getTime()>c);v++){for(var n=-v+r;n<=v-t;n+=2){var l=g+n,m;m=n==-v||n!=v&&j[l-1]<j[l+1]?j[l+1]:j[l-1]+1;for(var s=m-n;m<d&&s<e&&a.charAt(m)==b.charAt(s);)m++,s++;j[l]=m;if(m>d)t+=2;else if(s>e)r+=2;else if(q&&(l=g+k-n,0<=l&&l<h&&-1!=i[l])){var u=d-i[l];if(m>=
+u)return this.diff_bisectSplit_(a,b,m,s,c)}}for(n=-v+p;n<=v-w;n+=2){l=g+n;u=n==-v||n!=v&&i[l-1]<i[l+1]?i[l+1]:i[l-1]+1;for(m=u-n;u<d&&m<e&&a.charAt(d-u-1)==b.charAt(e-m-1);)u++,m++;i[l]=u;if(u>d)w+=2;else if(m>e)p+=2;else if(!q&&(l=g+k-n,0<=l&&(l<h&&-1!=j[l])&&(m=j[l],s=g+m-l,u=d-u,m>=u)))return this.diff_bisectSplit_(a,b,m,s,c)}}return[[-1,a],[1,b]]};
+diff_match_patch.prototype.diff_bisectSplit_=function(a,b,c,d,e){var f=a.substring(0,c),g=b.substring(0,d);a=a.substring(c);b=b.substring(d);f=this.diff_main(f,g,!1,e);e=this.diff_main(a,b,!1,e);return f.concat(e)};
+diff_match_patch.prototype.diff_linesToChars_=function(a,b){function c(a){for(var b="",c=0,f=-1,g=d.length;f<a.length-1;){f=a.indexOf("\n",c);-1==f&&(f=a.length-1);var r=a.substring(c,f+1),c=f+1;(e.hasOwnProperty?e.hasOwnProperty(r):void 0!==e[r])?b+=String.fromCharCode(e[r]):(b+=String.fromCharCode(g),e[r]=g,d[g++]=r)}return b}var d=[],e={};d[0]="";var f=c(a),g=c(b);return{chars1:f,chars2:g,lineArray:d}};
+diff_match_patch.prototype.diff_charsToLines_=function(a,b){for(var c=0;c<a.length;c++){for(var d=a[c][1],e=[],f=0;f<d.length;f++)e[f]=b[d.charCodeAt(f)];a[c][1]=e.join("")}};diff_match_patch.prototype.diff_commonPrefix=function(a,b){if(!a||!b||a.charAt(0)!=b.charAt(0))return 0;for(var c=0,d=Math.min(a.length,b.length),e=d,f=0;c<e;)a.substring(f,e)==b.substring(f,e)?f=c=e:d=e,e=Math.floor((d-c)/2+c);return e};
+diff_match_patch.prototype.diff_commonSuffix=function(a,b){if(!a||!b||a.charAt(a.length-1)!=b.charAt(b.length-1))return 0;for(var c=0,d=Math.min(a.length,b.length),e=d,f=0;c<e;)a.substring(a.length-e,a.length-f)==b.substring(b.length-e,b.length-f)?f=c=e:d=e,e=Math.floor((d-c)/2+c);return e};
+diff_match_patch.prototype.diff_commonOverlap_=function(a,b){var c=a.length,d=b.length;if(0==c||0==d)return 0;c>d?a=a.substring(c-d):c<d&&(b=b.substring(0,c));c=Math.min(c,d);if(a==b)return c;for(var d=0,e=1;;){var f=a.substring(c-e),f=b.indexOf(f);if(-1==f)return d;e+=f;if(0==f||a.substring(c-e)==b.substring(0,e))d=e,e++}};
+diff_match_patch.prototype.diff_halfMatch_=function(a,b){function c(a,b,c){for(var d=a.substring(c,c+Math.floor(a.length/4)),e=-1,g="",h,j,n,l;-1!=(e=b.indexOf(d,e+1));){var m=f.diff_commonPrefix(a.substring(c),b.substring(e)),s=f.diff_commonSuffix(a.substring(0,c),b.substring(0,e));g.length<s+m&&(g=b.substring(e-s,e)+b.substring(e,e+m),h=a.substring(0,c-s),j=a.substring(c+m),n=b.substring(0,e-s),l=b.substring(e+m))}return 2*g.length>=a.length?[h,j,n,l,g]:null}if(0>=this.Diff_Timeout)return null;
+var d=a.length>b.length?a:b,e=a.length>b.length?b:a;if(4>d.length||2*e.length<d.length)return null;var f=this,g=c(d,e,Math.ceil(d.length/4)),d=c(d,e,Math.ceil(d.length/2)),h;if(!g&&!d)return null;h=d?g?g[4].length>d[4].length?g:d:d:g;var j;a.length>b.length?(g=h[0],d=h[1],e=h[2],j=h[3]):(e=h[0],j=h[1],g=h[2],d=h[3]);h=h[4];return[g,d,e,j,h]};
+diff_match_patch.prototype.diff_cleanupSemantic=function(a){for(var b=!1,c=[],d=0,e=null,f=0,g=0,h=0,j=0,i=0;f<a.length;)0==a[f][0]?(c[d++]=f,g=j,h=i,i=j=0,e=a[f][1]):(1==a[f][0]?j+=a[f][1].length:i+=a[f][1].length,e&&(e.length<=Math.max(g,h)&&e.length<=Math.max(j,i))&&(a.splice(c[d-1],0,[-1,e]),a[c[d-1]+1][0]=1,d--,d--,f=0<d?c[d-1]:-1,i=j=h=g=0,e=null,b=!0)),f++;b&&this.diff_cleanupMerge(a);this.diff_cleanupSemanticLossless(a);for(f=1;f<a.length;){if(-1==a[f-1][0]&&1==a[f][0]){b=a[f-1][1];c=a[f][1];
+d=this.diff_commonOverlap_(b,c);e=this.diff_commonOverlap_(c,b);if(d>=e){if(d>=b.length/2||d>=c.length/2)a.splice(f,0,[0,c.substring(0,d)]),a[f-1][1]=b.substring(0,b.length-d),a[f+1][1]=c.substring(d),f++}else if(e>=b.length/2||e>=c.length/2)a.splice(f,0,[0,b.substring(0,e)]),a[f-1][0]=1,a[f-1][1]=c.substring(0,c.length-e),a[f+1][0]=-1,a[f+1][1]=b.substring(e),f++;f++}f++}};
+diff_match_patch.prototype.diff_cleanupSemanticLossless=function(a){function b(a,b){if(!a||!b)return 6;var c=a.charAt(a.length-1),d=b.charAt(0),e=c.match(diff_match_patch.nonAlphaNumericRegex_),f=d.match(diff_match_patch.nonAlphaNumericRegex_),g=e&&c.match(diff_match_patch.whitespaceRegex_),h=f&&d.match(diff_match_patch.whitespaceRegex_),c=g&&c.match(diff_match_patch.linebreakRegex_),d=h&&d.match(diff_match_patch.linebreakRegex_),i=c&&a.match(diff_match_patch.blanklineEndRegex_),j=d&&b.match(diff_match_patch.blanklineStartRegex_);
+return i||j?5:c||d?4:e&&!g&&h?3:g||h?2:e||f?1:0}for(var c=1;c<a.length-1;){if(0==a[c-1][0]&&0==a[c+1][0]){var d=a[c-1][1],e=a[c][1],f=a[c+1][1],g=this.diff_commonSuffix(d,e);if(g)var h=e.substring(e.length-g),d=d.substring(0,d.length-g),e=h+e.substring(0,e.length-g),f=h+f;for(var g=d,h=e,j=f,i=b(d,e)+b(e,f);e.charAt(0)===f.charAt(0);){var d=d+e.charAt(0),e=e.substring(1)+f.charAt(0),f=f.substring(1),k=b(d,e)+b(e,f);k>=i&&(i=k,g=d,h=e,j=f)}a[c-1][1]!=g&&(g?a[c-1][1]=g:(a.splice(c-1,1),c--),a[c][1]=
+h,j?a[c+1][1]=j:(a.splice(c+1,1),c--))}c++}};diff_match_patch.nonAlphaNumericRegex_=/[^a-zA-Z0-9]/;diff_match_patch.whitespaceRegex_=/\s/;diff_match_patch.linebreakRegex_=/[\r\n]/;diff_match_patch.blanklineEndRegex_=/\n\r?\n$/;diff_match_patch.blanklineStartRegex_=/^\r?\n\r?\n/;
+diff_match_patch.prototype.diff_cleanupEfficiency=function(a){for(var b=!1,c=[],d=0,e=null,f=0,g=!1,h=!1,j=!1,i=!1;f<a.length;){if(0==a[f][0])a[f][1].length<this.Diff_EditCost&&(j||i)?(c[d++]=f,g=j,h=i,e=a[f][1]):(d=0,e=null),j=i=!1;else if(-1==a[f][0]?i=!0:j=!0,e&&(g&&h&&j&&i||e.length<this.Diff_EditCost/2&&3==g+h+j+i))a.splice(c[d-1],0,[-1,e]),a[c[d-1]+1][0]=1,d--,e=null,g&&h?(j=i=!0,d=0):(d--,f=0<d?c[d-1]:-1,j=i=!1),b=!0;f++}b&&this.diff_cleanupMerge(a)};
+diff_match_patch.prototype.diff_cleanupMerge=function(a){a.push([0,""]);for(var b=0,c=0,d=0,e="",f="",g;b<a.length;)switch(a[b][0]){case 1:d++;f+=a[b][1];b++;break;case -1:c++;e+=a[b][1];b++;break;case 0:1<c+d?(0!==c&&0!==d&&(g=this.diff_commonPrefix(f,e),0!==g&&(0<b-c-d&&0==a[b-c-d-1][0]?a[b-c-d-1][1]+=f.substring(0,g):(a.splice(0,0,[0,f.substring(0,g)]),b++),f=f.substring(g),e=e.substring(g)),g=this.diff_commonSuffix(f,e),0!==g&&(a[b][1]=f.substring(f.length-g)+a[b][1],f=f.substring(0,f.length-
+g),e=e.substring(0,e.length-g))),0===c?a.splice(b-d,c+d,[1,f]):0===d?a.splice(b-c,c+d,[-1,e]):a.splice(b-c-d,c+d,[-1,e],[1,f]),b=b-c-d+(c?1:0)+(d?1:0)+1):0!==b&&0==a[b-1][0]?(a[b-1][1]+=a[b][1],a.splice(b,1)):b++,c=d=0,f=e=""}""===a[a.length-1][1]&&a.pop();c=!1;for(b=1;b<a.length-1;)0==a[b-1][0]&&0==a[b+1][0]&&(a[b][1].substring(a[b][1].length-a[b-1][1].length)==a[b-1][1]?(a[b][1]=a[b-1][1]+a[b][1].substring(0,a[b][1].length-a[b-1][1].length),a[b+1][1]=a[b-1][1]+a[b+1][1],a.splice(b-1,1),c=!0):a[b][1].substring(0,
+a[b+1][1].length)==a[b+1][1]&&(a[b-1][1]+=a[b+1][1],a[b][1]=a[b][1].substring(a[b+1][1].length)+a[b+1][1],a.splice(b+1,1),c=!0)),b++;c&&this.diff_cleanupMerge(a)};diff_match_patch.prototype.diff_xIndex=function(a,b){var c=0,d=0,e=0,f=0,g;for(g=0;g<a.length;g++){1!==a[g][0]&&(c+=a[g][1].length);-1!==a[g][0]&&(d+=a[g][1].length);if(c>b)break;e=c;f=d}return a.length!=g&&-1===a[g][0]?f:f+(b-e)};
+diff_match_patch.prototype.diff_prettyHtml=function(a){for(var b=[],c=/&/g,d=/</g,e=/>/g,f=/\n/g,g=0;g<a.length;g++){var h=a[g][0],j=a[g][1],j=j.replace(c,"&amp;").replace(d,"&lt;").replace(e,"&gt;").replace(f,"&para;<br>");switch(h){case 1:b[g]='<ins style="background:#e6ffe6;">'+j+"</ins>";break;case -1:b[g]='<del style="background:#ffe6e6;">'+j+"</del>";break;case 0:b[g]="<span>"+j+"</span>"}}return b.join("")};
+diff_match_patch.prototype.diff_text1=function(a){for(var b=[],c=0;c<a.length;c++)1!==a[c][0]&&(b[c]=a[c][1]);return b.join("")};diff_match_patch.prototype.diff_text2=function(a){for(var b=[],c=0;c<a.length;c++)-1!==a[c][0]&&(b[c]=a[c][1]);return b.join("")};diff_match_patch.prototype.diff_levenshtein=function(a){for(var b=0,c=0,d=0,e=0;e<a.length;e++){var f=a[e][0],g=a[e][1];switch(f){case 1:c+=g.length;break;case -1:d+=g.length;break;case 0:b+=Math.max(c,d),d=c=0}}return b+=Math.max(c,d)};
+diff_match_patch.prototype.diff_toDelta=function(a){for(var b=[],c=0;c<a.length;c++)switch(a[c][0]){case 1:b[c]="+"+encodeURI(a[c][1]);break;case -1:b[c]="-"+a[c][1].length;break;case 0:b[c]="="+a[c][1].length}return b.join("\t").replace(/%20/g," ")};
+diff_match_patch.prototype.diff_fromDelta=function(a,b){for(var c=[],d=0,e=0,f=b.split(/\t/g),g=0;g<f.length;g++){var h=f[g].substring(1);switch(f[g].charAt(0)){case "+":try{c[d++]=[1,decodeURI(h)]}catch(j){throw Error("Illegal escape in diff_fromDelta: "+h);}break;case "-":case "=":var i=parseInt(h,10);if(isNaN(i)||0>i)throw Error("Invalid number in diff_fromDelta: "+h);h=a.substring(e,e+=i);"="==f[g].charAt(0)?c[d++]=[0,h]:c[d++]=[-1,h];break;default:if(f[g])throw Error("Invalid diff operation in diff_fromDelta: "+
+f[g]);}}if(e!=a.length)throw Error("Delta length ("+e+") does not equal source text length ("+a.length+").");return c};diff_match_patch.prototype.match_main=function(a,b,c){if(null==a||null==b||null==c)throw Error("Null input. (match_main)");c=Math.max(0,Math.min(c,a.length));return a==b?0:a.length?a.substring(c,c+b.length)==b?c:this.match_bitap_(a,b,c):-1};
+diff_match_patch.prototype.match_bitap_=function(a,b,c){function d(a,d){var e=a/b.length,g=Math.abs(c-d);return!f.Match_Distance?g?1:e:e+g/f.Match_Distance}if(b.length>this.Match_MaxBits)throw Error("Pattern too long for this browser.");var e=this.match_alphabet_(b),f=this,g=this.Match_Threshold,h=a.indexOf(b,c);-1!=h&&(g=Math.min(d(0,h),g),h=a.lastIndexOf(b,c+b.length),-1!=h&&(g=Math.min(d(0,h),g)));for(var j=1<<b.length-1,h=-1,i,k,q=b.length+a.length,r,t=0;t<b.length;t++){i=0;for(k=q;i<k;)d(t,c+
+k)<=g?i=k:q=k,k=Math.floor((q-i)/2+i);q=k;i=Math.max(1,c-k+1);var p=Math.min(c+k,a.length)+b.length;k=Array(p+2);for(k[p+1]=(1<<t)-1;p>=i;p--){var w=e[a.charAt(p-1)];k[p]=0===t?(k[p+1]<<1|1)&w:(k[p+1]<<1|1)&w|((r[p+1]|r[p])<<1|1)|r[p+1];if(k[p]&j&&(w=d(t,p-1),w<=g))if(g=w,h=p-1,h>c)i=Math.max(1,2*c-h);else break}if(d(t+1,c)>g)break;r=k}return h};
+diff_match_patch.prototype.match_alphabet_=function(a){for(var b={},c=0;c<a.length;c++)b[a.charAt(c)]=0;for(c=0;c<a.length;c++)b[a.charAt(c)]|=1<<a.length-c-1;return b};
+diff_match_patch.prototype.patch_addContext_=function(a,b){if(0!=b.length){for(var c=b.substring(a.start2,a.start2+a.length1),d=0;b.indexOf(c)!=b.lastIndexOf(c)&&c.length<this.Match_MaxBits-this.Patch_Margin-this.Patch_Margin;)d+=this.Patch_Margin,c=b.substring(a.start2-d,a.start2+a.length1+d);d+=this.Patch_Margin;(c=b.substring(a.start2-d,a.start2))&&a.diffs.unshift([0,c]);(d=b.substring(a.start2+a.length1,a.start2+a.length1+d))&&a.diffs.push([0,d]);a.start1-=c.length;a.start2-=c.length;a.length1+=
+c.length+d.length;a.length2+=c.length+d.length}};
+diff_match_patch.prototype.patch_make=function(a,b,c){var d;if("string"==typeof a&&"string"==typeof b&&"undefined"==typeof c)d=a,b=this.diff_main(d,b,!0),2<b.length&&(this.diff_cleanupSemantic(b),this.diff_cleanupEfficiency(b));else if(a&&"object"==typeof a&&"undefined"==typeof b&&"undefined"==typeof c)b=a,d=this.diff_text1(b);else if("string"==typeof a&&b&&"object"==typeof b&&"undefined"==typeof c)d=a;else if("string"==typeof a&&"string"==typeof b&&c&&"object"==typeof c)d=a,b=c;else throw Error("Unknown call format to patch_make.");
+if(0===b.length)return[];c=[];a=new diff_match_patch.patch_obj;for(var e=0,f=0,g=0,h=d,j=0;j<b.length;j++){var i=b[j][0],k=b[j][1];!e&&0!==i&&(a.start1=f,a.start2=g);switch(i){case 1:a.diffs[e++]=b[j];a.length2+=k.length;d=d.substring(0,g)+k+d.substring(g);break;case -1:a.length1+=k.length;a.diffs[e++]=b[j];d=d.substring(0,g)+d.substring(g+k.length);break;case 0:k.length<=2*this.Patch_Margin&&e&&b.length!=j+1?(a.diffs[e++]=b[j],a.length1+=k.length,a.length2+=k.length):k.length>=2*this.Patch_Margin&&
+e&&(this.patch_addContext_(a,h),c.push(a),a=new diff_match_patch.patch_obj,e=0,h=d,f=g)}1!==i&&(f+=k.length);-1!==i&&(g+=k.length)}e&&(this.patch_addContext_(a,h),c.push(a));return c};diff_match_patch.prototype.patch_deepCopy=function(a){for(var b=[],c=0;c<a.length;c++){var d=a[c],e=new diff_match_patch.patch_obj;e.diffs=[];for(var f=0;f<d.diffs.length;f++)e.diffs[f]=d.diffs[f].slice();e.start1=d.start1;e.start2=d.start2;e.length1=d.length1;e.length2=d.length2;b[c]=e}return b};
+diff_match_patch.prototype.patch_apply=function(a,b){if(0==a.length)return[b,[]];a=this.patch_deepCopy(a);var c=this.patch_addPadding(a);b=c+b+c;this.patch_splitMax(a);for(var d=0,e=[],f=0;f<a.length;f++){var g=a[f].start2+d,h=this.diff_text1(a[f].diffs),j,i=-1;if(h.length>this.Match_MaxBits){if(j=this.match_main(b,h.substring(0,this.Match_MaxBits),g),-1!=j&&(i=this.match_main(b,h.substring(h.length-this.Match_MaxBits),g+h.length-this.Match_MaxBits),-1==i||j>=i))j=-1}else j=this.match_main(b,h,g);
+if(-1==j)e[f]=!1,d-=a[f].length2-a[f].length1;else if(e[f]=!0,d=j-g,g=-1==i?b.substring(j,j+h.length):b.substring(j,i+this.Match_MaxBits),h==g)b=b.substring(0,j)+this.diff_text2(a[f].diffs)+b.substring(j+h.length);else if(g=this.diff_main(h,g,!1),h.length>this.Match_MaxBits&&this.diff_levenshtein(g)/h.length>this.Patch_DeleteThreshold)e[f]=!1;else{this.diff_cleanupSemanticLossless(g);for(var h=0,k,i=0;i<a[f].diffs.length;i++){var q=a[f].diffs[i];0!==q[0]&&(k=this.diff_xIndex(g,h));1===q[0]?b=b.substring(0,
+j+k)+q[1]+b.substring(j+k):-1===q[0]&&(b=b.substring(0,j+k)+b.substring(j+this.diff_xIndex(g,h+q[1].length)));-1!==q[0]&&(h+=q[1].length)}}}b=b.substring(c.length,b.length-c.length);return[b,e]};
+diff_match_patch.prototype.patch_addPadding=function(a){for(var b=this.Patch_Margin,c="",d=1;d<=b;d++)c+=String.fromCharCode(d);for(d=0;d<a.length;d++)a[d].start1+=b,a[d].start2+=b;var d=a[0],e=d.diffs;if(0==e.length||0!=e[0][0])e.unshift([0,c]),d.start1-=b,d.start2-=b,d.length1+=b,d.length2+=b;else if(b>e[0][1].length){var f=b-e[0][1].length;e[0][1]=c.substring(e[0][1].length)+e[0][1];d.start1-=f;d.start2-=f;d.length1+=f;d.length2+=f}d=a[a.length-1];e=d.diffs;0==e.length||0!=e[e.length-1][0]?(e.push([0,
+c]),d.length1+=b,d.length2+=b):b>e[e.length-1][1].length&&(f=b-e[e.length-1][1].length,e[e.length-1][1]+=c.substring(0,f),d.length1+=f,d.length2+=f);return c};
+diff_match_patch.prototype.patch_splitMax=function(a){for(var b=this.Match_MaxBits,c=0;c<a.length;c++)if(!(a[c].length1<=b)){var d=a[c];a.splice(c--,1);for(var e=d.start1,f=d.start2,g="";0!==d.diffs.length;){var h=new diff_match_patch.patch_obj,j=!0;h.start1=e-g.length;h.start2=f-g.length;""!==g&&(h.length1=h.length2=g.length,h.diffs.push([0,g]));for(;0!==d.diffs.length&&h.length1<b-this.Patch_Margin;){var g=d.diffs[0][0],i=d.diffs[0][1];1===g?(h.length2+=i.length,f+=i.length,h.diffs.push(d.diffs.shift()),
+j=!1):-1===g&&1==h.diffs.length&&0==h.diffs[0][0]&&i.length>2*b?(h.length1+=i.length,e+=i.length,j=!1,h.diffs.push([g,i]),d.diffs.shift()):(i=i.substring(0,b-h.length1-this.Patch_Margin),h.length1+=i.length,e+=i.length,0===g?(h.length2+=i.length,f+=i.length):j=!1,h.diffs.push([g,i]),i==d.diffs[0][1]?d.diffs.shift():d.diffs[0][1]=d.diffs[0][1].substring(i.length))}g=this.diff_text2(h.diffs);g=g.substring(g.length-this.Patch_Margin);i=this.diff_text1(d.diffs).substring(0,this.Patch_Margin);""!==i&&
+(h.length1+=i.length,h.length2+=i.length,0!==h.diffs.length&&0===h.diffs[h.diffs.length-1][0]?h.diffs[h.diffs.length-1][1]+=i:h.diffs.push([0,i]));j||a.splice(++c,0,h)}}};diff_match_patch.prototype.patch_toText=function(a){for(var b=[],c=0;c<a.length;c++)b[c]=a[c];return b.join("")};
+diff_match_patch.prototype.patch_fromText=function(a){var b=[];if(!a)return b;a=a.split("\n");for(var c=0,d=/^@@ -(\d+),?(\d*) \+(\d+),?(\d*) @@$/;c<a.length;){var e=a[c].match(d);if(!e)throw Error("Invalid patch string: "+a[c]);var f=new diff_match_patch.patch_obj;b.push(f);f.start1=parseInt(e[1],10);""===e[2]?(f.start1--,f.length1=1):"0"==e[2]?f.length1=0:(f.start1--,f.length1=parseInt(e[2],10));f.start2=parseInt(e[3],10);""===e[4]?(f.start2--,f.length2=1):"0"==e[4]?f.length2=0:(f.start2--,f.length2=
+parseInt(e[4],10));for(c++;c<a.length;){e=a[c].charAt(0);try{var g=decodeURI(a[c].substring(1))}catch(h){throw Error("Illegal escape in patch_fromText: "+g);}if("-"==e)f.diffs.push([-1,g]);else if("+"==e)f.diffs.push([1,g]);else if(" "==e)f.diffs.push([0,g]);else if("@"==e)break;else if(""!==e)throw Error('Invalid patch mode "'+e+'" in: '+g);c++}}return b};diff_match_patch.patch_obj=function(){this.diffs=[];this.start2=this.start1=null;this.length2=this.length1=0};
+diff_match_patch.patch_obj.prototype.toString=function(){var a,b;a=0===this.length1?this.start1+",0":1==this.length1?this.start1+1:this.start1+1+","+this.length1;b=0===this.length2?this.start2+",0":1==this.length2?this.start2+1:this.start2+1+","+this.length2;a=["@@ -"+a+" +"+b+" @@\n"];var c;for(b=0;b<this.diffs.length;b++){switch(this.diffs[b][0]){case 1:c="+";break;case -1:c="-";break;case 0:c=" "}a[b+1]=c+encodeURI(this.diffs[b][1])+"\n"}return a.join("").replace(/%20/g," ")};
+this.diff_match_patch=diff_match_patch;this.DIFF_DELETE=-1;this.DIFF_INSERT=1;this.DIFF_EQUAL=0;})() \ No newline at end of file
diff --git a/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/web/client/resources/js/lib/jquery-2_1_0.min.js b/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/web/client/resources/js/lib/jquery-2_1_0.min.js
new file mode 100644
index 00000000000..2adda35a5b1
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/web/client/resources/js/lib/jquery-2_1_0.min.js
@@ -0,0 +1,4 @@
+/*! jQuery v2.1.0 | (c) 2005, 2014 jQuery Foundation, Inc. | jquery.org/license */
+!function(a,b){"object"==typeof module&&"object"==typeof module.exports?module.exports=a.document?b(a,!0):function(a){if(!a.document)throw new Error("jQuery requires a window with a document");return b(a)}:b(a)}("undefined"!=typeof window?window:this,function(a,b){var c=[],d=c.slice,e=c.concat,f=c.push,g=c.indexOf,h={},i=h.toString,j=h.hasOwnProperty,k="".trim,l={},m=a.document,n="2.1.0",o=function(a,b){return new o.fn.init(a,b)},p=/^-ms-/,q=/-([\da-z])/gi,r=function(a,b){return b.toUpperCase()};o.fn=o.prototype={jquery:n,constructor:o,selector:"",length:0,toArray:function(){return d.call(this)},get:function(a){return null!=a?0>a?this[a+this.length]:this[a]:d.call(this)},pushStack:function(a){var b=o.merge(this.constructor(),a);return b.prevObject=this,b.context=this.context,b},each:function(a,b){return o.each(this,a,b)},map:function(a){return this.pushStack(o.map(this,function(b,c){return a.call(b,c,b)}))},slice:function(){return this.pushStack(d.apply(this,arguments))},first:function(){return this.eq(0)},last:function(){return this.eq(-1)},eq:function(a){var b=this.length,c=+a+(0>a?b:0);return this.pushStack(c>=0&&b>c?[this[c]]:[])},end:function(){return this.prevObject||this.constructor(null)},push:f,sort:c.sort,splice:c.splice},o.extend=o.fn.extend=function(){var a,b,c,d,e,f,g=arguments[0]||{},h=1,i=arguments.length,j=!1;for("boolean"==typeof g&&(j=g,g=arguments[h]||{},h++),"object"==typeof g||o.isFunction(g)||(g={}),h===i&&(g=this,h--);i>h;h++)if(null!=(a=arguments[h]))for(b in a)c=g[b],d=a[b],g!==d&&(j&&d&&(o.isPlainObject(d)||(e=o.isArray(d)))?(e?(e=!1,f=c&&o.isArray(c)?c:[]):f=c&&o.isPlainObject(c)?c:{},g[b]=o.extend(j,f,d)):void 0!==d&&(g[b]=d));return g},o.extend({expando:"jQuery"+(n+Math.random()).replace(/\D/g,""),isReady:!0,error:function(a){throw new Error(a)},noop:function(){},isFunction:function(a){return"function"===o.type(a)},isArray:Array.isArray,isWindow:function(a){return null!=a&&a===a.window},isNumeric:function(a){return a-parseFloat(a)>=0},isPlainObject:function(a){if("object"!==o.type(a)||a.nodeType||o.isWindow(a))return!1;try{if(a.constructor&&!j.call(a.constructor.prototype,"isPrototypeOf"))return!1}catch(b){return!1}return!0},isEmptyObject:function(a){var b;for(b in a)return!1;return!0},type:function(a){return null==a?a+"":"object"==typeof a||"function"==typeof a?h[i.call(a)]||"object":typeof a},globalEval:function(a){var b,c=eval;a=o.trim(a),a&&(1===a.indexOf("use strict")?(b=m.createElement("script"),b.text=a,m.head.appendChild(b).parentNode.removeChild(b)):c(a))},camelCase:function(a){return a.replace(p,"ms-").replace(q,r)},nodeName:function(a,b){return a.nodeName&&a.nodeName.toLowerCase()===b.toLowerCase()},each:function(a,b,c){var d,e=0,f=a.length,g=s(a);if(c){if(g){for(;f>e;e++)if(d=b.apply(a[e],c),d===!1)break}else for(e in a)if(d=b.apply(a[e],c),d===!1)break}else if(g){for(;f>e;e++)if(d=b.call(a[e],e,a[e]),d===!1)break}else for(e in a)if(d=b.call(a[e],e,a[e]),d===!1)break;return a},trim:function(a){return null==a?"":k.call(a)},makeArray:function(a,b){var c=b||[];return null!=a&&(s(Object(a))?o.merge(c,"string"==typeof a?[a]:a):f.call(c,a)),c},inArray:function(a,b,c){return null==b?-1:g.call(b,a,c)},merge:function(a,b){for(var c=+b.length,d=0,e=a.length;c>d;d++)a[e++]=b[d];return a.length=e,a},grep:function(a,b,c){for(var d,e=[],f=0,g=a.length,h=!c;g>f;f++)d=!b(a[f],f),d!==h&&e.push(a[f]);return e},map:function(a,b,c){var d,f=0,g=a.length,h=s(a),i=[];if(h)for(;g>f;f++)d=b(a[f],f,c),null!=d&&i.push(d);else for(f in a)d=b(a[f],f,c),null!=d&&i.push(d);return e.apply([],i)},guid:1,proxy:function(a,b){var c,e,f;return"string"==typeof b&&(c=a[b],b=a,a=c),o.isFunction(a)?(e=d.call(arguments,2),f=function(){return a.apply(b||this,e.concat(d.call(arguments)))},f.guid=a.guid=a.guid||o.guid++,f):void 0},now:Date.now,support:l}),o.each("Boolean Number String Function Array Date RegExp Object Error".split(" "),function(a,b){h["[object "+b+"]"]=b.toLowerCase()});function s(a){var b=a.length,c=o.type(a);return"function"===c||o.isWindow(a)?!1:1===a.nodeType&&b?!0:"array"===c||0===b||"number"==typeof b&&b>0&&b-1 in a}var t=function(a){var b,c,d,e,f,g,h,i,j,k,l,m,n,o,p,q,r,s="sizzle"+-new Date,t=a.document,u=0,v=0,w=eb(),x=eb(),y=eb(),z=function(a,b){return a===b&&(j=!0),0},A="undefined",B=1<<31,C={}.hasOwnProperty,D=[],E=D.pop,F=D.push,G=D.push,H=D.slice,I=D.indexOf||function(a){for(var b=0,c=this.length;c>b;b++)if(this[b]===a)return b;return-1},J="checked|selected|async|autofocus|autoplay|controls|defer|disabled|hidden|ismap|loop|multiple|open|readonly|required|scoped",K="[\\x20\\t\\r\\n\\f]",L="(?:\\\\.|[\\w-]|[^\\x00-\\xa0])+",M=L.replace("w","w#"),N="\\["+K+"*("+L+")"+K+"*(?:([*^$|!~]?=)"+K+"*(?:(['\"])((?:\\\\.|[^\\\\])*?)\\3|("+M+")|)|)"+K+"*\\]",O=":("+L+")(?:\\(((['\"])((?:\\\\.|[^\\\\])*?)\\3|((?:\\\\.|[^\\\\()[\\]]|"+N.replace(3,8)+")*)|.*)\\)|)",P=new RegExp("^"+K+"+|((?:^|[^\\\\])(?:\\\\.)*)"+K+"+$","g"),Q=new RegExp("^"+K+"*,"+K+"*"),R=new RegExp("^"+K+"*([>+~]|"+K+")"+K+"*"),S=new RegExp("="+K+"*([^\\]'\"]*?)"+K+"*\\]","g"),T=new RegExp(O),U=new RegExp("^"+M+"$"),V={ID:new RegExp("^#("+L+")"),CLASS:new RegExp("^\\.("+L+")"),TAG:new RegExp("^("+L.replace("w","w*")+")"),ATTR:new RegExp("^"+N),PSEUDO:new RegExp("^"+O),CHILD:new RegExp("^:(only|first|last|nth|nth-last)-(child|of-type)(?:\\("+K+"*(even|odd|(([+-]|)(\\d*)n|)"+K+"*(?:([+-]|)"+K+"*(\\d+)|))"+K+"*\\)|)","i"),bool:new RegExp("^(?:"+J+")$","i"),needsContext:new RegExp("^"+K+"*[>+~]|:(even|odd|eq|gt|lt|nth|first|last)(?:\\("+K+"*((?:-\\d)?\\d*)"+K+"*\\)|)(?=[^-]|$)","i")},W=/^(?:input|select|textarea|button)$/i,X=/^h\d$/i,Y=/^[^{]+\{\s*\[native \w/,Z=/^(?:#([\w-]+)|(\w+)|\.([\w-]+))$/,$=/[+~]/,_=/'|\\/g,ab=new RegExp("\\\\([\\da-f]{1,6}"+K+"?|("+K+")|.)","ig"),bb=function(a,b,c){var d="0x"+b-65536;return d!==d||c?b:0>d?String.fromCharCode(d+65536):String.fromCharCode(d>>10|55296,1023&d|56320)};try{G.apply(D=H.call(t.childNodes),t.childNodes),D[t.childNodes.length].nodeType}catch(cb){G={apply:D.length?function(a,b){F.apply(a,H.call(b))}:function(a,b){var c=a.length,d=0;while(a[c++]=b[d++]);a.length=c-1}}}function db(a,b,d,e){var f,g,h,i,j,m,p,q,u,v;if((b?b.ownerDocument||b:t)!==l&&k(b),b=b||l,d=d||[],!a||"string"!=typeof a)return d;if(1!==(i=b.nodeType)&&9!==i)return[];if(n&&!e){if(f=Z.exec(a))if(h=f[1]){if(9===i){if(g=b.getElementById(h),!g||!g.parentNode)return d;if(g.id===h)return d.push(g),d}else if(b.ownerDocument&&(g=b.ownerDocument.getElementById(h))&&r(b,g)&&g.id===h)return d.push(g),d}else{if(f[2])return G.apply(d,b.getElementsByTagName(a)),d;if((h=f[3])&&c.getElementsByClassName&&b.getElementsByClassName)return G.apply(d,b.getElementsByClassName(h)),d}if(c.qsa&&(!o||!o.test(a))){if(q=p=s,u=b,v=9===i&&a,1===i&&"object"!==b.nodeName.toLowerCase()){m=ob(a),(p=b.getAttribute("id"))?q=p.replace(_,"\\$&"):b.setAttribute("id",q),q="[id='"+q+"'] ",j=m.length;while(j--)m[j]=q+pb(m[j]);u=$.test(a)&&mb(b.parentNode)||b,v=m.join(",")}if(v)try{return G.apply(d,u.querySelectorAll(v)),d}catch(w){}finally{p||b.removeAttribute("id")}}}return xb(a.replace(P,"$1"),b,d,e)}function eb(){var a=[];function b(c,e){return a.push(c+" ")>d.cacheLength&&delete b[a.shift()],b[c+" "]=e}return b}function fb(a){return a[s]=!0,a}function gb(a){var b=l.createElement("div");try{return!!a(b)}catch(c){return!1}finally{b.parentNode&&b.parentNode.removeChild(b),b=null}}function hb(a,b){var c=a.split("|"),e=a.length;while(e--)d.attrHandle[c[e]]=b}function ib(a,b){var c=b&&a,d=c&&1===a.nodeType&&1===b.nodeType&&(~b.sourceIndex||B)-(~a.sourceIndex||B);if(d)return d;if(c)while(c=c.nextSibling)if(c===b)return-1;return a?1:-1}function jb(a){return function(b){var c=b.nodeName.toLowerCase();return"input"===c&&b.type===a}}function kb(a){return function(b){var c=b.nodeName.toLowerCase();return("input"===c||"button"===c)&&b.type===a}}function lb(a){return fb(function(b){return b=+b,fb(function(c,d){var e,f=a([],c.length,b),g=f.length;while(g--)c[e=f[g]]&&(c[e]=!(d[e]=c[e]))})})}function mb(a){return a&&typeof a.getElementsByTagName!==A&&a}c=db.support={},f=db.isXML=function(a){var b=a&&(a.ownerDocument||a).documentElement;return b?"HTML"!==b.nodeName:!1},k=db.setDocument=function(a){var b,e=a?a.ownerDocument||a:t,g=e.defaultView;return e!==l&&9===e.nodeType&&e.documentElement?(l=e,m=e.documentElement,n=!f(e),g&&g!==g.top&&(g.addEventListener?g.addEventListener("unload",function(){k()},!1):g.attachEvent&&g.attachEvent("onunload",function(){k()})),c.attributes=gb(function(a){return a.className="i",!a.getAttribute("className")}),c.getElementsByTagName=gb(function(a){return a.appendChild(e.createComment("")),!a.getElementsByTagName("*").length}),c.getElementsByClassName=Y.test(e.getElementsByClassName)&&gb(function(a){return a.innerHTML="<div class='a'></div><div class='a i'></div>",a.firstChild.className="i",2===a.getElementsByClassName("i").length}),c.getById=gb(function(a){return m.appendChild(a).id=s,!e.getElementsByName||!e.getElementsByName(s).length}),c.getById?(d.find.ID=function(a,b){if(typeof b.getElementById!==A&&n){var c=b.getElementById(a);return c&&c.parentNode?[c]:[]}},d.filter.ID=function(a){var b=a.replace(ab,bb);return function(a){return a.getAttribute("id")===b}}):(delete d.find.ID,d.filter.ID=function(a){var b=a.replace(ab,bb);return function(a){var c=typeof a.getAttributeNode!==A&&a.getAttributeNode("id");return c&&c.value===b}}),d.find.TAG=c.getElementsByTagName?function(a,b){return typeof b.getElementsByTagName!==A?b.getElementsByTagName(a):void 0}:function(a,b){var c,d=[],e=0,f=b.getElementsByTagName(a);if("*"===a){while(c=f[e++])1===c.nodeType&&d.push(c);return d}return f},d.find.CLASS=c.getElementsByClassName&&function(a,b){return typeof b.getElementsByClassName!==A&&n?b.getElementsByClassName(a):void 0},p=[],o=[],(c.qsa=Y.test(e.querySelectorAll))&&(gb(function(a){a.innerHTML="<select t=''><option selected=''></option></select>",a.querySelectorAll("[t^='']").length&&o.push("[*^$]="+K+"*(?:''|\"\")"),a.querySelectorAll("[selected]").length||o.push("\\["+K+"*(?:value|"+J+")"),a.querySelectorAll(":checked").length||o.push(":checked")}),gb(function(a){var b=e.createElement("input");b.setAttribute("type","hidden"),a.appendChild(b).setAttribute("name","D"),a.querySelectorAll("[name=d]").length&&o.push("name"+K+"*[*^$|!~]?="),a.querySelectorAll(":enabled").length||o.push(":enabled",":disabled"),a.querySelectorAll("*,:x"),o.push(",.*:")})),(c.matchesSelector=Y.test(q=m.webkitMatchesSelector||m.mozMatchesSelector||m.oMatchesSelector||m.msMatchesSelector))&&gb(function(a){c.disconnectedMatch=q.call(a,"div"),q.call(a,"[s!='']:x"),p.push("!=",O)}),o=o.length&&new RegExp(o.join("|")),p=p.length&&new RegExp(p.join("|")),b=Y.test(m.compareDocumentPosition),r=b||Y.test(m.contains)?function(a,b){var c=9===a.nodeType?a.documentElement:a,d=b&&b.parentNode;return a===d||!(!d||1!==d.nodeType||!(c.contains?c.contains(d):a.compareDocumentPosition&&16&a.compareDocumentPosition(d)))}:function(a,b){if(b)while(b=b.parentNode)if(b===a)return!0;return!1},z=b?function(a,b){if(a===b)return j=!0,0;var d=!a.compareDocumentPosition-!b.compareDocumentPosition;return d?d:(d=(a.ownerDocument||a)===(b.ownerDocument||b)?a.compareDocumentPosition(b):1,1&d||!c.sortDetached&&b.compareDocumentPosition(a)===d?a===e||a.ownerDocument===t&&r(t,a)?-1:b===e||b.ownerDocument===t&&r(t,b)?1:i?I.call(i,a)-I.call(i,b):0:4&d?-1:1)}:function(a,b){if(a===b)return j=!0,0;var c,d=0,f=a.parentNode,g=b.parentNode,h=[a],k=[b];if(!f||!g)return a===e?-1:b===e?1:f?-1:g?1:i?I.call(i,a)-I.call(i,b):0;if(f===g)return ib(a,b);c=a;while(c=c.parentNode)h.unshift(c);c=b;while(c=c.parentNode)k.unshift(c);while(h[d]===k[d])d++;return d?ib(h[d],k[d]):h[d]===t?-1:k[d]===t?1:0},e):l},db.matches=function(a,b){return db(a,null,null,b)},db.matchesSelector=function(a,b){if((a.ownerDocument||a)!==l&&k(a),b=b.replace(S,"='$1']"),!(!c.matchesSelector||!n||p&&p.test(b)||o&&o.test(b)))try{var d=q.call(a,b);if(d||c.disconnectedMatch||a.document&&11!==a.document.nodeType)return d}catch(e){}return db(b,l,null,[a]).length>0},db.contains=function(a,b){return(a.ownerDocument||a)!==l&&k(a),r(a,b)},db.attr=function(a,b){(a.ownerDocument||a)!==l&&k(a);var e=d.attrHandle[b.toLowerCase()],f=e&&C.call(d.attrHandle,b.toLowerCase())?e(a,b,!n):void 0;return void 0!==f?f:c.attributes||!n?a.getAttribute(b):(f=a.getAttributeNode(b))&&f.specified?f.value:null},db.error=function(a){throw new Error("Syntax error, unrecognized expression: "+a)},db.uniqueSort=function(a){var b,d=[],e=0,f=0;if(j=!c.detectDuplicates,i=!c.sortStable&&a.slice(0),a.sort(z),j){while(b=a[f++])b===a[f]&&(e=d.push(f));while(e--)a.splice(d[e],1)}return i=null,a},e=db.getText=function(a){var b,c="",d=0,f=a.nodeType;if(f){if(1===f||9===f||11===f){if("string"==typeof a.textContent)return a.textContent;for(a=a.firstChild;a;a=a.nextSibling)c+=e(a)}else if(3===f||4===f)return a.nodeValue}else while(b=a[d++])c+=e(b);return c},d=db.selectors={cacheLength:50,createPseudo:fb,match:V,attrHandle:{},find:{},relative:{">":{dir:"parentNode",first:!0}," ":{dir:"parentNode"},"+":{dir:"previousSibling",first:!0},"~":{dir:"previousSibling"}},preFilter:{ATTR:function(a){return a[1]=a[1].replace(ab,bb),a[3]=(a[4]||a[5]||"").replace(ab,bb),"~="===a[2]&&(a[3]=" "+a[3]+" "),a.slice(0,4)},CHILD:function(a){return a[1]=a[1].toLowerCase(),"nth"===a[1].slice(0,3)?(a[3]||db.error(a[0]),a[4]=+(a[4]?a[5]+(a[6]||1):2*("even"===a[3]||"odd"===a[3])),a[5]=+(a[7]+a[8]||"odd"===a[3])):a[3]&&db.error(a[0]),a},PSEUDO:function(a){var b,c=!a[5]&&a[2];return V.CHILD.test(a[0])?null:(a[3]&&void 0!==a[4]?a[2]=a[4]:c&&T.test(c)&&(b=ob(c,!0))&&(b=c.indexOf(")",c.length-b)-c.length)&&(a[0]=a[0].slice(0,b),a[2]=c.slice(0,b)),a.slice(0,3))}},filter:{TAG:function(a){var b=a.replace(ab,bb).toLowerCase();return"*"===a?function(){return!0}:function(a){return a.nodeName&&a.nodeName.toLowerCase()===b}},CLASS:function(a){var b=w[a+" "];return b||(b=new RegExp("(^|"+K+")"+a+"("+K+"|$)"))&&w(a,function(a){return b.test("string"==typeof a.className&&a.className||typeof a.getAttribute!==A&&a.getAttribute("class")||"")})},ATTR:function(a,b,c){return function(d){var e=db.attr(d,a);return null==e?"!="===b:b?(e+="","="===b?e===c:"!="===b?e!==c:"^="===b?c&&0===e.indexOf(c):"*="===b?c&&e.indexOf(c)>-1:"$="===b?c&&e.slice(-c.length)===c:"~="===b?(" "+e+" ").indexOf(c)>-1:"|="===b?e===c||e.slice(0,c.length+1)===c+"-":!1):!0}},CHILD:function(a,b,c,d,e){var f="nth"!==a.slice(0,3),g="last"!==a.slice(-4),h="of-type"===b;return 1===d&&0===e?function(a){return!!a.parentNode}:function(b,c,i){var j,k,l,m,n,o,p=f!==g?"nextSibling":"previousSibling",q=b.parentNode,r=h&&b.nodeName.toLowerCase(),t=!i&&!h;if(q){if(f){while(p){l=b;while(l=l[p])if(h?l.nodeName.toLowerCase()===r:1===l.nodeType)return!1;o=p="only"===a&&!o&&"nextSibling"}return!0}if(o=[g?q.firstChild:q.lastChild],g&&t){k=q[s]||(q[s]={}),j=k[a]||[],n=j[0]===u&&j[1],m=j[0]===u&&j[2],l=n&&q.childNodes[n];while(l=++n&&l&&l[p]||(m=n=0)||o.pop())if(1===l.nodeType&&++m&&l===b){k[a]=[u,n,m];break}}else if(t&&(j=(b[s]||(b[s]={}))[a])&&j[0]===u)m=j[1];else while(l=++n&&l&&l[p]||(m=n=0)||o.pop())if((h?l.nodeName.toLowerCase()===r:1===l.nodeType)&&++m&&(t&&((l[s]||(l[s]={}))[a]=[u,m]),l===b))break;return m-=e,m===d||m%d===0&&m/d>=0}}},PSEUDO:function(a,b){var c,e=d.pseudos[a]||d.setFilters[a.toLowerCase()]||db.error("unsupported pseudo: "+a);return e[s]?e(b):e.length>1?(c=[a,a,"",b],d.setFilters.hasOwnProperty(a.toLowerCase())?fb(function(a,c){var d,f=e(a,b),g=f.length;while(g--)d=I.call(a,f[g]),a[d]=!(c[d]=f[g])}):function(a){return e(a,0,c)}):e}},pseudos:{not:fb(function(a){var b=[],c=[],d=g(a.replace(P,"$1"));return d[s]?fb(function(a,b,c,e){var f,g=d(a,null,e,[]),h=a.length;while(h--)(f=g[h])&&(a[h]=!(b[h]=f))}):function(a,e,f){return b[0]=a,d(b,null,f,c),!c.pop()}}),has:fb(function(a){return function(b){return db(a,b).length>0}}),contains:fb(function(a){return function(b){return(b.textContent||b.innerText||e(b)).indexOf(a)>-1}}),lang:fb(function(a){return U.test(a||"")||db.error("unsupported lang: "+a),a=a.replace(ab,bb).toLowerCase(),function(b){var c;do if(c=n?b.lang:b.getAttribute("xml:lang")||b.getAttribute("lang"))return c=c.toLowerCase(),c===a||0===c.indexOf(a+"-");while((b=b.parentNode)&&1===b.nodeType);return!1}}),target:function(b){var c=a.location&&a.location.hash;return c&&c.slice(1)===b.id},root:function(a){return a===m},focus:function(a){return a===l.activeElement&&(!l.hasFocus||l.hasFocus())&&!!(a.type||a.href||~a.tabIndex)},enabled:function(a){return a.disabled===!1},disabled:function(a){return a.disabled===!0},checked:function(a){var b=a.nodeName.toLowerCase();return"input"===b&&!!a.checked||"option"===b&&!!a.selected},selected:function(a){return a.parentNode&&a.parentNode.selectedIndex,a.selected===!0},empty:function(a){for(a=a.firstChild;a;a=a.nextSibling)if(a.nodeType<6)return!1;return!0},parent:function(a){return!d.pseudos.empty(a)},header:function(a){return X.test(a.nodeName)},input:function(a){return W.test(a.nodeName)},button:function(a){var b=a.nodeName.toLowerCase();return"input"===b&&"button"===a.type||"button"===b},text:function(a){var b;return"input"===a.nodeName.toLowerCase()&&"text"===a.type&&(null==(b=a.getAttribute("type"))||"text"===b.toLowerCase())},first:lb(function(){return[0]}),last:lb(function(a,b){return[b-1]}),eq:lb(function(a,b,c){return[0>c?c+b:c]}),even:lb(function(a,b){for(var c=0;b>c;c+=2)a.push(c);return a}),odd:lb(function(a,b){for(var c=1;b>c;c+=2)a.push(c);return a}),lt:lb(function(a,b,c){for(var d=0>c?c+b:c;--d>=0;)a.push(d);return a}),gt:lb(function(a,b,c){for(var d=0>c?c+b:c;++d<b;)a.push(d);return a})}},d.pseudos.nth=d.pseudos.eq;for(b in{radio:!0,checkbox:!0,file:!0,password:!0,image:!0})d.pseudos[b]=jb(b);for(b in{submit:!0,reset:!0})d.pseudos[b]=kb(b);function nb(){}nb.prototype=d.filters=d.pseudos,d.setFilters=new nb;function ob(a,b){var c,e,f,g,h,i,j,k=x[a+" "];if(k)return b?0:k.slice(0);h=a,i=[],j=d.preFilter;while(h){(!c||(e=Q.exec(h)))&&(e&&(h=h.slice(e[0].length)||h),i.push(f=[])),c=!1,(e=R.exec(h))&&(c=e.shift(),f.push({value:c,type:e[0].replace(P," ")}),h=h.slice(c.length));for(g in d.filter)!(e=V[g].exec(h))||j[g]&&!(e=j[g](e))||(c=e.shift(),f.push({value:c,type:g,matches:e}),h=h.slice(c.length));if(!c)break}return b?h.length:h?db.error(a):x(a,i).slice(0)}function pb(a){for(var b=0,c=a.length,d="";c>b;b++)d+=a[b].value;return d}function qb(a,b,c){var d=b.dir,e=c&&"parentNode"===d,f=v++;return b.first?function(b,c,f){while(b=b[d])if(1===b.nodeType||e)return a(b,c,f)}:function(b,c,g){var h,i,j=[u,f];if(g){while(b=b[d])if((1===b.nodeType||e)&&a(b,c,g))return!0}else while(b=b[d])if(1===b.nodeType||e){if(i=b[s]||(b[s]={}),(h=i[d])&&h[0]===u&&h[1]===f)return j[2]=h[2];if(i[d]=j,j[2]=a(b,c,g))return!0}}}function rb(a){return a.length>1?function(b,c,d){var e=a.length;while(e--)if(!a[e](b,c,d))return!1;return!0}:a[0]}function sb(a,b,c,d,e){for(var f,g=[],h=0,i=a.length,j=null!=b;i>h;h++)(f=a[h])&&(!c||c(f,d,e))&&(g.push(f),j&&b.push(h));return g}function tb(a,b,c,d,e,f){return d&&!d[s]&&(d=tb(d)),e&&!e[s]&&(e=tb(e,f)),fb(function(f,g,h,i){var j,k,l,m=[],n=[],o=g.length,p=f||wb(b||"*",h.nodeType?[h]:h,[]),q=!a||!f&&b?p:sb(p,m,a,h,i),r=c?e||(f?a:o||d)?[]:g:q;if(c&&c(q,r,h,i),d){j=sb(r,n),d(j,[],h,i),k=j.length;while(k--)(l=j[k])&&(r[n[k]]=!(q[n[k]]=l))}if(f){if(e||a){if(e){j=[],k=r.length;while(k--)(l=r[k])&&j.push(q[k]=l);e(null,r=[],j,i)}k=r.length;while(k--)(l=r[k])&&(j=e?I.call(f,l):m[k])>-1&&(f[j]=!(g[j]=l))}}else r=sb(r===g?r.splice(o,r.length):r),e?e(null,g,r,i):G.apply(g,r)})}function ub(a){for(var b,c,e,f=a.length,g=d.relative[a[0].type],i=g||d.relative[" "],j=g?1:0,k=qb(function(a){return a===b},i,!0),l=qb(function(a){return I.call(b,a)>-1},i,!0),m=[function(a,c,d){return!g&&(d||c!==h)||((b=c).nodeType?k(a,c,d):l(a,c,d))}];f>j;j++)if(c=d.relative[a[j].type])m=[qb(rb(m),c)];else{if(c=d.filter[a[j].type].apply(null,a[j].matches),c[s]){for(e=++j;f>e;e++)if(d.relative[a[e].type])break;return tb(j>1&&rb(m),j>1&&pb(a.slice(0,j-1).concat({value:" "===a[j-2].type?"*":""})).replace(P,"$1"),c,e>j&&ub(a.slice(j,e)),f>e&&ub(a=a.slice(e)),f>e&&pb(a))}m.push(c)}return rb(m)}function vb(a,b){var c=b.length>0,e=a.length>0,f=function(f,g,i,j,k){var m,n,o,p=0,q="0",r=f&&[],s=[],t=h,v=f||e&&d.find.TAG("*",k),w=u+=null==t?1:Math.random()||.1,x=v.length;for(k&&(h=g!==l&&g);q!==x&&null!=(m=v[q]);q++){if(e&&m){n=0;while(o=a[n++])if(o(m,g,i)){j.push(m);break}k&&(u=w)}c&&((m=!o&&m)&&p--,f&&r.push(m))}if(p+=q,c&&q!==p){n=0;while(o=b[n++])o(r,s,g,i);if(f){if(p>0)while(q--)r[q]||s[q]||(s[q]=E.call(j));s=sb(s)}G.apply(j,s),k&&!f&&s.length>0&&p+b.length>1&&db.uniqueSort(j)}return k&&(u=w,h=t),r};return c?fb(f):f}g=db.compile=function(a,b){var c,d=[],e=[],f=y[a+" "];if(!f){b||(b=ob(a)),c=b.length;while(c--)f=ub(b[c]),f[s]?d.push(f):e.push(f);f=y(a,vb(e,d))}return f};function wb(a,b,c){for(var d=0,e=b.length;e>d;d++)db(a,b[d],c);return c}function xb(a,b,e,f){var h,i,j,k,l,m=ob(a);if(!f&&1===m.length){if(i=m[0]=m[0].slice(0),i.length>2&&"ID"===(j=i[0]).type&&c.getById&&9===b.nodeType&&n&&d.relative[i[1].type]){if(b=(d.find.ID(j.matches[0].replace(ab,bb),b)||[])[0],!b)return e;a=a.slice(i.shift().value.length)}h=V.needsContext.test(a)?0:i.length;while(h--){if(j=i[h],d.relative[k=j.type])break;if((l=d.find[k])&&(f=l(j.matches[0].replace(ab,bb),$.test(i[0].type)&&mb(b.parentNode)||b))){if(i.splice(h,1),a=f.length&&pb(i),!a)return G.apply(e,f),e;break}}}return g(a,m)(f,b,!n,e,$.test(a)&&mb(b.parentNode)||b),e}return c.sortStable=s.split("").sort(z).join("")===s,c.detectDuplicates=!!j,k(),c.sortDetached=gb(function(a){return 1&a.compareDocumentPosition(l.createElement("div"))}),gb(function(a){return a.innerHTML="<a href='#'></a>","#"===a.firstChild.getAttribute("href")})||hb("type|href|height|width",function(a,b,c){return c?void 0:a.getAttribute(b,"type"===b.toLowerCase()?1:2)}),c.attributes&&gb(function(a){return a.innerHTML="<input/>",a.firstChild.setAttribute("value",""),""===a.firstChild.getAttribute("value")})||hb("value",function(a,b,c){return c||"input"!==a.nodeName.toLowerCase()?void 0:a.defaultValue}),gb(function(a){return null==a.getAttribute("disabled")})||hb(J,function(a,b,c){var d;return c?void 0:a[b]===!0?b.toLowerCase():(d=a.getAttributeNode(b))&&d.specified?d.value:null}),db}(a);o.find=t,o.expr=t.selectors,o.expr[":"]=o.expr.pseudos,o.unique=t.uniqueSort,o.text=t.getText,o.isXMLDoc=t.isXML,o.contains=t.contains;var u=o.expr.match.needsContext,v=/^<(\w+)\s*\/?>(?:<\/\1>|)$/,w=/^.[^:#\[\.,]*$/;function x(a,b,c){if(o.isFunction(b))return o.grep(a,function(a,d){return!!b.call(a,d,a)!==c});if(b.nodeType)return o.grep(a,function(a){return a===b!==c});if("string"==typeof b){if(w.test(b))return o.filter(b,a,c);b=o.filter(b,a)}return o.grep(a,function(a){return g.call(b,a)>=0!==c})}o.filter=function(a,b,c){var d=b[0];return c&&(a=":not("+a+")"),1===b.length&&1===d.nodeType?o.find.matchesSelector(d,a)?[d]:[]:o.find.matches(a,o.grep(b,function(a){return 1===a.nodeType}))},o.fn.extend({find:function(a){var b,c=this.length,d=[],e=this;if("string"!=typeof a)return this.pushStack(o(a).filter(function(){for(b=0;c>b;b++)if(o.contains(e[b],this))return!0}));for(b=0;c>b;b++)o.find(a,e[b],d);return d=this.pushStack(c>1?o.unique(d):d),d.selector=this.selector?this.selector+" "+a:a,d},filter:function(a){return this.pushStack(x(this,a||[],!1))},not:function(a){return this.pushStack(x(this,a||[],!0))},is:function(a){return!!x(this,"string"==typeof a&&u.test(a)?o(a):a||[],!1).length}});var y,z=/^(?:\s*(<[\w\W]+>)[^>]*|#([\w-]*))$/,A=o.fn.init=function(a,b){var c,d;if(!a)return this;if("string"==typeof a){if(c="<"===a[0]&&">"===a[a.length-1]&&a.length>=3?[null,a,null]:z.exec(a),!c||!c[1]&&b)return!b||b.jquery?(b||y).find(a):this.constructor(b).find(a);if(c[1]){if(b=b instanceof o?b[0]:b,o.merge(this,o.parseHTML(c[1],b&&b.nodeType?b.ownerDocument||b:m,!0)),v.test(c[1])&&o.isPlainObject(b))for(c in b)o.isFunction(this[c])?this[c](b[c]):this.attr(c,b[c]);return this}return d=m.getElementById(c[2]),d&&d.parentNode&&(this.length=1,this[0]=d),this.context=m,this.selector=a,this}return a.nodeType?(this.context=this[0]=a,this.length=1,this):o.isFunction(a)?"undefined"!=typeof y.ready?y.ready(a):a(o):(void 0!==a.selector&&(this.selector=a.selector,this.context=a.context),o.makeArray(a,this))};A.prototype=o.fn,y=o(m);var B=/^(?:parents|prev(?:Until|All))/,C={children:!0,contents:!0,next:!0,prev:!0};o.extend({dir:function(a,b,c){var d=[],e=void 0!==c;while((a=a[b])&&9!==a.nodeType)if(1===a.nodeType){if(e&&o(a).is(c))break;d.push(a)}return d},sibling:function(a,b){for(var c=[];a;a=a.nextSibling)1===a.nodeType&&a!==b&&c.push(a);return c}}),o.fn.extend({has:function(a){var b=o(a,this),c=b.length;return this.filter(function(){for(var a=0;c>a;a++)if(o.contains(this,b[a]))return!0})},closest:function(a,b){for(var c,d=0,e=this.length,f=[],g=u.test(a)||"string"!=typeof a?o(a,b||this.context):0;e>d;d++)for(c=this[d];c&&c!==b;c=c.parentNode)if(c.nodeType<11&&(g?g.index(c)>-1:1===c.nodeType&&o.find.matchesSelector(c,a))){f.push(c);break}return this.pushStack(f.length>1?o.unique(f):f)},index:function(a){return a?"string"==typeof a?g.call(o(a),this[0]):g.call(this,a.jquery?a[0]:a):this[0]&&this[0].parentNode?this.first().prevAll().length:-1},add:function(a,b){return this.pushStack(o.unique(o.merge(this.get(),o(a,b))))},addBack:function(a){return this.add(null==a?this.prevObject:this.prevObject.filter(a))}});function D(a,b){while((a=a[b])&&1!==a.nodeType);return a}o.each({parent:function(a){var b=a.parentNode;return b&&11!==b.nodeType?b:null},parents:function(a){return o.dir(a,"parentNode")},parentsUntil:function(a,b,c){return o.dir(a,"parentNode",c)},next:function(a){return D(a,"nextSibling")},prev:function(a){return D(a,"previousSibling")},nextAll:function(a){return o.dir(a,"nextSibling")},prevAll:function(a){return o.dir(a,"previousSibling")},nextUntil:function(a,b,c){return o.dir(a,"nextSibling",c)},prevUntil:function(a,b,c){return o.dir(a,"previousSibling",c)},siblings:function(a){return o.sibling((a.parentNode||{}).firstChild,a)},children:function(a){return o.sibling(a.firstChild)},contents:function(a){return a.contentDocument||o.merge([],a.childNodes)}},function(a,b){o.fn[a]=function(c,d){var e=o.map(this,b,c);return"Until"!==a.slice(-5)&&(d=c),d&&"string"==typeof d&&(e=o.filter(d,e)),this.length>1&&(C[a]||o.unique(e),B.test(a)&&e.reverse()),this.pushStack(e)}});var E=/\S+/g,F={};function G(a){var b=F[a]={};return o.each(a.match(E)||[],function(a,c){b[c]=!0}),b}o.Callbacks=function(a){a="string"==typeof a?F[a]||G(a):o.extend({},a);var b,c,d,e,f,g,h=[],i=!a.once&&[],j=function(l){for(b=a.memory&&l,c=!0,g=e||0,e=0,f=h.length,d=!0;h&&f>g;g++)if(h[g].apply(l[0],l[1])===!1&&a.stopOnFalse){b=!1;break}d=!1,h&&(i?i.length&&j(i.shift()):b?h=[]:k.disable())},k={add:function(){if(h){var c=h.length;!function g(b){o.each(b,function(b,c){var d=o.type(c);"function"===d?a.unique&&k.has(c)||h.push(c):c&&c.length&&"string"!==d&&g(c)})}(arguments),d?f=h.length:b&&(e=c,j(b))}return this},remove:function(){return h&&o.each(arguments,function(a,b){var c;while((c=o.inArray(b,h,c))>-1)h.splice(c,1),d&&(f>=c&&f--,g>=c&&g--)}),this},has:function(a){return a?o.inArray(a,h)>-1:!(!h||!h.length)},empty:function(){return h=[],f=0,this},disable:function(){return h=i=b=void 0,this},disabled:function(){return!h},lock:function(){return i=void 0,b||k.disable(),this},locked:function(){return!i},fireWith:function(a,b){return!h||c&&!i||(b=b||[],b=[a,b.slice?b.slice():b],d?i.push(b):j(b)),this},fire:function(){return k.fireWith(this,arguments),this},fired:function(){return!!c}};return k},o.extend({Deferred:function(a){var b=[["resolve","done",o.Callbacks("once memory"),"resolved"],["reject","fail",o.Callbacks("once memory"),"rejected"],["notify","progress",o.Callbacks("memory")]],c="pending",d={state:function(){return c},always:function(){return e.done(arguments).fail(arguments),this},then:function(){var a=arguments;return o.Deferred(function(c){o.each(b,function(b,f){var g=o.isFunction(a[b])&&a[b];e[f[1]](function(){var a=g&&g.apply(this,arguments);a&&o.isFunction(a.promise)?a.promise().done(c.resolve).fail(c.reject).progress(c.notify):c[f[0]+"With"](this===d?c.promise():this,g?[a]:arguments)})}),a=null}).promise()},promise:function(a){return null!=a?o.extend(a,d):d}},e={};return d.pipe=d.then,o.each(b,function(a,f){var g=f[2],h=f[3];d[f[1]]=g.add,h&&g.add(function(){c=h},b[1^a][2].disable,b[2][2].lock),e[f[0]]=function(){return e[f[0]+"With"](this===e?d:this,arguments),this},e[f[0]+"With"]=g.fireWith}),d.promise(e),a&&a.call(e,e),e},when:function(a){var b=0,c=d.call(arguments),e=c.length,f=1!==e||a&&o.isFunction(a.promise)?e:0,g=1===f?a:o.Deferred(),h=function(a,b,c){return function(e){b[a]=this,c[a]=arguments.length>1?d.call(arguments):e,c===i?g.notifyWith(b,c):--f||g.resolveWith(b,c)}},i,j,k;if(e>1)for(i=new Array(e),j=new Array(e),k=new Array(e);e>b;b++)c[b]&&o.isFunction(c[b].promise)?c[b].promise().done(h(b,k,c)).fail(g.reject).progress(h(b,j,i)):--f;return f||g.resolveWith(k,c),g.promise()}});var H;o.fn.ready=function(a){return o.ready.promise().done(a),this},o.extend({isReady:!1,readyWait:1,holdReady:function(a){a?o.readyWait++:o.ready(!0)},ready:function(a){(a===!0?--o.readyWait:o.isReady)||(o.isReady=!0,a!==!0&&--o.readyWait>0||(H.resolveWith(m,[o]),o.fn.trigger&&o(m).trigger("ready").off("ready")))}});function I(){m.removeEventListener("DOMContentLoaded",I,!1),a.removeEventListener("load",I,!1),o.ready()}o.ready.promise=function(b){return H||(H=o.Deferred(),"complete"===m.readyState?setTimeout(o.ready):(m.addEventListener("DOMContentLoaded",I,!1),a.addEventListener("load",I,!1))),H.promise(b)},o.ready.promise();var J=o.access=function(a,b,c,d,e,f,g){var h=0,i=a.length,j=null==c;if("object"===o.type(c)){e=!0;for(h in c)o.access(a,b,h,c[h],!0,f,g)}else if(void 0!==d&&(e=!0,o.isFunction(d)||(g=!0),j&&(g?(b.call(a,d),b=null):(j=b,b=function(a,b,c){return j.call(o(a),c)})),b))for(;i>h;h++)b(a[h],c,g?d:d.call(a[h],h,b(a[h],c)));return e?a:j?b.call(a):i?b(a[0],c):f};o.acceptData=function(a){return 1===a.nodeType||9===a.nodeType||!+a.nodeType};function K(){Object.defineProperty(this.cache={},0,{get:function(){return{}}}),this.expando=o.expando+Math.random()}K.uid=1,K.accepts=o.acceptData,K.prototype={key:function(a){if(!K.accepts(a))return 0;var b={},c=a[this.expando];if(!c){c=K.uid++;try{b[this.expando]={value:c},Object.defineProperties(a,b)}catch(d){b[this.expando]=c,o.extend(a,b)}}return this.cache[c]||(this.cache[c]={}),c},set:function(a,b,c){var d,e=this.key(a),f=this.cache[e];if("string"==typeof b)f[b]=c;else if(o.isEmptyObject(f))o.extend(this.cache[e],b);else for(d in b)f[d]=b[d];return f},get:function(a,b){var c=this.cache[this.key(a)];return void 0===b?c:c[b]},access:function(a,b,c){var d;return void 0===b||b&&"string"==typeof b&&void 0===c?(d=this.get(a,b),void 0!==d?d:this.get(a,o.camelCase(b))):(this.set(a,b,c),void 0!==c?c:b)},remove:function(a,b){var c,d,e,f=this.key(a),g=this.cache[f];if(void 0===b)this.cache[f]={};else{o.isArray(b)?d=b.concat(b.map(o.camelCase)):(e=o.camelCase(b),b in g?d=[b,e]:(d=e,d=d in g?[d]:d.match(E)||[])),c=d.length;while(c--)delete g[d[c]]}},hasData:function(a){return!o.isEmptyObject(this.cache[a[this.expando]]||{})},discard:function(a){a[this.expando]&&delete this.cache[a[this.expando]]}};var L=new K,M=new K,N=/^(?:\{[\w\W]*\}|\[[\w\W]*\])$/,O=/([A-Z])/g;function P(a,b,c){var d;if(void 0===c&&1===a.nodeType)if(d="data-"+b.replace(O,"-$1").toLowerCase(),c=a.getAttribute(d),"string"==typeof c){try{c="true"===c?!0:"false"===c?!1:"null"===c?null:+c+""===c?+c:N.test(c)?o.parseJSON(c):c}catch(e){}M.set(a,b,c)}else c=void 0;return c}o.extend({hasData:function(a){return M.hasData(a)||L.hasData(a)},data:function(a,b,c){return M.access(a,b,c)},removeData:function(a,b){M.remove(a,b)},_data:function(a,b,c){return L.access(a,b,c)},_removeData:function(a,b){L.remove(a,b)}}),o.fn.extend({data:function(a,b){var c,d,e,f=this[0],g=f&&f.attributes;if(void 0===a){if(this.length&&(e=M.get(f),1===f.nodeType&&!L.get(f,"hasDataAttrs"))){c=g.length;
+while(c--)d=g[c].name,0===d.indexOf("data-")&&(d=o.camelCase(d.slice(5)),P(f,d,e[d]));L.set(f,"hasDataAttrs",!0)}return e}return"object"==typeof a?this.each(function(){M.set(this,a)}):J(this,function(b){var c,d=o.camelCase(a);if(f&&void 0===b){if(c=M.get(f,a),void 0!==c)return c;if(c=M.get(f,d),void 0!==c)return c;if(c=P(f,d,void 0),void 0!==c)return c}else this.each(function(){var c=M.get(this,d);M.set(this,d,b),-1!==a.indexOf("-")&&void 0!==c&&M.set(this,a,b)})},null,b,arguments.length>1,null,!0)},removeData:function(a){return this.each(function(){M.remove(this,a)})}}),o.extend({queue:function(a,b,c){var d;return a?(b=(b||"fx")+"queue",d=L.get(a,b),c&&(!d||o.isArray(c)?d=L.access(a,b,o.makeArray(c)):d.push(c)),d||[]):void 0},dequeue:function(a,b){b=b||"fx";var c=o.queue(a,b),d=c.length,e=c.shift(),f=o._queueHooks(a,b),g=function(){o.dequeue(a,b)};"inprogress"===e&&(e=c.shift(),d--),e&&("fx"===b&&c.unshift("inprogress"),delete f.stop,e.call(a,g,f)),!d&&f&&f.empty.fire()},_queueHooks:function(a,b){var c=b+"queueHooks";return L.get(a,c)||L.access(a,c,{empty:o.Callbacks("once memory").add(function(){L.remove(a,[b+"queue",c])})})}}),o.fn.extend({queue:function(a,b){var c=2;return"string"!=typeof a&&(b=a,a="fx",c--),arguments.length<c?o.queue(this[0],a):void 0===b?this:this.each(function(){var c=o.queue(this,a,b);o._queueHooks(this,a),"fx"===a&&"inprogress"!==c[0]&&o.dequeue(this,a)})},dequeue:function(a){return this.each(function(){o.dequeue(this,a)})},clearQueue:function(a){return this.queue(a||"fx",[])},promise:function(a,b){var c,d=1,e=o.Deferred(),f=this,g=this.length,h=function(){--d||e.resolveWith(f,[f])};"string"!=typeof a&&(b=a,a=void 0),a=a||"fx";while(g--)c=L.get(f[g],a+"queueHooks"),c&&c.empty&&(d++,c.empty.add(h));return h(),e.promise(b)}});var Q=/[+-]?(?:\d*\.|)\d+(?:[eE][+-]?\d+|)/.source,R=["Top","Right","Bottom","Left"],S=function(a,b){return a=b||a,"none"===o.css(a,"display")||!o.contains(a.ownerDocument,a)},T=/^(?:checkbox|radio)$/i;!function(){var a=m.createDocumentFragment(),b=a.appendChild(m.createElement("div"));b.innerHTML="<input type='radio' checked='checked' name='t'/>",l.checkClone=b.cloneNode(!0).cloneNode(!0).lastChild.checked,b.innerHTML="<textarea>x</textarea>",l.noCloneChecked=!!b.cloneNode(!0).lastChild.defaultValue}();var U="undefined";l.focusinBubbles="onfocusin"in a;var V=/^key/,W=/^(?:mouse|contextmenu)|click/,X=/^(?:focusinfocus|focusoutblur)$/,Y=/^([^.]*)(?:\.(.+)|)$/;function Z(){return!0}function $(){return!1}function _(){try{return m.activeElement}catch(a){}}o.event={global:{},add:function(a,b,c,d,e){var f,g,h,i,j,k,l,m,n,p,q,r=L.get(a);if(r){c.handler&&(f=c,c=f.handler,e=f.selector),c.guid||(c.guid=o.guid++),(i=r.events)||(i=r.events={}),(g=r.handle)||(g=r.handle=function(b){return typeof o!==U&&o.event.triggered!==b.type?o.event.dispatch.apply(a,arguments):void 0}),b=(b||"").match(E)||[""],j=b.length;while(j--)h=Y.exec(b[j])||[],n=q=h[1],p=(h[2]||"").split(".").sort(),n&&(l=o.event.special[n]||{},n=(e?l.delegateType:l.bindType)||n,l=o.event.special[n]||{},k=o.extend({type:n,origType:q,data:d,handler:c,guid:c.guid,selector:e,needsContext:e&&o.expr.match.needsContext.test(e),namespace:p.join(".")},f),(m=i[n])||(m=i[n]=[],m.delegateCount=0,l.setup&&l.setup.call(a,d,p,g)!==!1||a.addEventListener&&a.addEventListener(n,g,!1)),l.add&&(l.add.call(a,k),k.handler.guid||(k.handler.guid=c.guid)),e?m.splice(m.delegateCount++,0,k):m.push(k),o.event.global[n]=!0)}},remove:function(a,b,c,d,e){var f,g,h,i,j,k,l,m,n,p,q,r=L.hasData(a)&&L.get(a);if(r&&(i=r.events)){b=(b||"").match(E)||[""],j=b.length;while(j--)if(h=Y.exec(b[j])||[],n=q=h[1],p=(h[2]||"").split(".").sort(),n){l=o.event.special[n]||{},n=(d?l.delegateType:l.bindType)||n,m=i[n]||[],h=h[2]&&new RegExp("(^|\\.)"+p.join("\\.(?:.*\\.|)")+"(\\.|$)"),g=f=m.length;while(f--)k=m[f],!e&&q!==k.origType||c&&c.guid!==k.guid||h&&!h.test(k.namespace)||d&&d!==k.selector&&("**"!==d||!k.selector)||(m.splice(f,1),k.selector&&m.delegateCount--,l.remove&&l.remove.call(a,k));g&&!m.length&&(l.teardown&&l.teardown.call(a,p,r.handle)!==!1||o.removeEvent(a,n,r.handle),delete i[n])}else for(n in i)o.event.remove(a,n+b[j],c,d,!0);o.isEmptyObject(i)&&(delete r.handle,L.remove(a,"events"))}},trigger:function(b,c,d,e){var f,g,h,i,k,l,n,p=[d||m],q=j.call(b,"type")?b.type:b,r=j.call(b,"namespace")?b.namespace.split("."):[];if(g=h=d=d||m,3!==d.nodeType&&8!==d.nodeType&&!X.test(q+o.event.triggered)&&(q.indexOf(".")>=0&&(r=q.split("."),q=r.shift(),r.sort()),k=q.indexOf(":")<0&&"on"+q,b=b[o.expando]?b:new o.Event(q,"object"==typeof b&&b),b.isTrigger=e?2:3,b.namespace=r.join("."),b.namespace_re=b.namespace?new RegExp("(^|\\.)"+r.join("\\.(?:.*\\.|)")+"(\\.|$)"):null,b.result=void 0,b.target||(b.target=d),c=null==c?[b]:o.makeArray(c,[b]),n=o.event.special[q]||{},e||!n.trigger||n.trigger.apply(d,c)!==!1)){if(!e&&!n.noBubble&&!o.isWindow(d)){for(i=n.delegateType||q,X.test(i+q)||(g=g.parentNode);g;g=g.parentNode)p.push(g),h=g;h===(d.ownerDocument||m)&&p.push(h.defaultView||h.parentWindow||a)}f=0;while((g=p[f++])&&!b.isPropagationStopped())b.type=f>1?i:n.bindType||q,l=(L.get(g,"events")||{})[b.type]&&L.get(g,"handle"),l&&l.apply(g,c),l=k&&g[k],l&&l.apply&&o.acceptData(g)&&(b.result=l.apply(g,c),b.result===!1&&b.preventDefault());return b.type=q,e||b.isDefaultPrevented()||n._default&&n._default.apply(p.pop(),c)!==!1||!o.acceptData(d)||k&&o.isFunction(d[q])&&!o.isWindow(d)&&(h=d[k],h&&(d[k]=null),o.event.triggered=q,d[q](),o.event.triggered=void 0,h&&(d[k]=h)),b.result}},dispatch:function(a){a=o.event.fix(a);var b,c,e,f,g,h=[],i=d.call(arguments),j=(L.get(this,"events")||{})[a.type]||[],k=o.event.special[a.type]||{};if(i[0]=a,a.delegateTarget=this,!k.preDispatch||k.preDispatch.call(this,a)!==!1){h=o.event.handlers.call(this,a,j),b=0;while((f=h[b++])&&!a.isPropagationStopped()){a.currentTarget=f.elem,c=0;while((g=f.handlers[c++])&&!a.isImmediatePropagationStopped())(!a.namespace_re||a.namespace_re.test(g.namespace))&&(a.handleObj=g,a.data=g.data,e=((o.event.special[g.origType]||{}).handle||g.handler).apply(f.elem,i),void 0!==e&&(a.result=e)===!1&&(a.preventDefault(),a.stopPropagation()))}return k.postDispatch&&k.postDispatch.call(this,a),a.result}},handlers:function(a,b){var c,d,e,f,g=[],h=b.delegateCount,i=a.target;if(h&&i.nodeType&&(!a.button||"click"!==a.type))for(;i!==this;i=i.parentNode||this)if(i.disabled!==!0||"click"!==a.type){for(d=[],c=0;h>c;c++)f=b[c],e=f.selector+" ",void 0===d[e]&&(d[e]=f.needsContext?o(e,this).index(i)>=0:o.find(e,this,null,[i]).length),d[e]&&d.push(f);d.length&&g.push({elem:i,handlers:d})}return h<b.length&&g.push({elem:this,handlers:b.slice(h)}),g},props:"altKey bubbles cancelable ctrlKey currentTarget eventPhase metaKey relatedTarget shiftKey target timeStamp view which".split(" "),fixHooks:{},keyHooks:{props:"char charCode key keyCode".split(" "),filter:function(a,b){return null==a.which&&(a.which=null!=b.charCode?b.charCode:b.keyCode),a}},mouseHooks:{props:"button buttons clientX clientY offsetX offsetY pageX pageY screenX screenY toElement".split(" "),filter:function(a,b){var c,d,e,f=b.button;return null==a.pageX&&null!=b.clientX&&(c=a.target.ownerDocument||m,d=c.documentElement,e=c.body,a.pageX=b.clientX+(d&&d.scrollLeft||e&&e.scrollLeft||0)-(d&&d.clientLeft||e&&e.clientLeft||0),a.pageY=b.clientY+(d&&d.scrollTop||e&&e.scrollTop||0)-(d&&d.clientTop||e&&e.clientTop||0)),a.which||void 0===f||(a.which=1&f?1:2&f?3:4&f?2:0),a}},fix:function(a){if(a[o.expando])return a;var b,c,d,e=a.type,f=a,g=this.fixHooks[e];g||(this.fixHooks[e]=g=W.test(e)?this.mouseHooks:V.test(e)?this.keyHooks:{}),d=g.props?this.props.concat(g.props):this.props,a=new o.Event(f),b=d.length;while(b--)c=d[b],a[c]=f[c];return a.target||(a.target=m),3===a.target.nodeType&&(a.target=a.target.parentNode),g.filter?g.filter(a,f):a},special:{load:{noBubble:!0},focus:{trigger:function(){return this!==_()&&this.focus?(this.focus(),!1):void 0},delegateType:"focusin"},blur:{trigger:function(){return this===_()&&this.blur?(this.blur(),!1):void 0},delegateType:"focusout"},click:{trigger:function(){return"checkbox"===this.type&&this.click&&o.nodeName(this,"input")?(this.click(),!1):void 0},_default:function(a){return o.nodeName(a.target,"a")}},beforeunload:{postDispatch:function(a){void 0!==a.result&&(a.originalEvent.returnValue=a.result)}}},simulate:function(a,b,c,d){var e=o.extend(new o.Event,c,{type:a,isSimulated:!0,originalEvent:{}});d?o.event.trigger(e,null,b):o.event.dispatch.call(b,e),e.isDefaultPrevented()&&c.preventDefault()}},o.removeEvent=function(a,b,c){a.removeEventListener&&a.removeEventListener(b,c,!1)},o.Event=function(a,b){return this instanceof o.Event?(a&&a.type?(this.originalEvent=a,this.type=a.type,this.isDefaultPrevented=a.defaultPrevented||void 0===a.defaultPrevented&&a.getPreventDefault&&a.getPreventDefault()?Z:$):this.type=a,b&&o.extend(this,b),this.timeStamp=a&&a.timeStamp||o.now(),void(this[o.expando]=!0)):new o.Event(a,b)},o.Event.prototype={isDefaultPrevented:$,isPropagationStopped:$,isImmediatePropagationStopped:$,preventDefault:function(){var a=this.originalEvent;this.isDefaultPrevented=Z,a&&a.preventDefault&&a.preventDefault()},stopPropagation:function(){var a=this.originalEvent;this.isPropagationStopped=Z,a&&a.stopPropagation&&a.stopPropagation()},stopImmediatePropagation:function(){this.isImmediatePropagationStopped=Z,this.stopPropagation()}},o.each({mouseenter:"mouseover",mouseleave:"mouseout"},function(a,b){o.event.special[a]={delegateType:b,bindType:b,handle:function(a){var c,d=this,e=a.relatedTarget,f=a.handleObj;return(!e||e!==d&&!o.contains(d,e))&&(a.type=f.origType,c=f.handler.apply(this,arguments),a.type=b),c}}}),l.focusinBubbles||o.each({focus:"focusin",blur:"focusout"},function(a,b){var c=function(a){o.event.simulate(b,a.target,o.event.fix(a),!0)};o.event.special[b]={setup:function(){var d=this.ownerDocument||this,e=L.access(d,b);e||d.addEventListener(a,c,!0),L.access(d,b,(e||0)+1)},teardown:function(){var d=this.ownerDocument||this,e=L.access(d,b)-1;e?L.access(d,b,e):(d.removeEventListener(a,c,!0),L.remove(d,b))}}}),o.fn.extend({on:function(a,b,c,d,e){var f,g;if("object"==typeof a){"string"!=typeof b&&(c=c||b,b=void 0);for(g in a)this.on(g,b,c,a[g],e);return this}if(null==c&&null==d?(d=b,c=b=void 0):null==d&&("string"==typeof b?(d=c,c=void 0):(d=c,c=b,b=void 0)),d===!1)d=$;else if(!d)return this;return 1===e&&(f=d,d=function(a){return o().off(a),f.apply(this,arguments)},d.guid=f.guid||(f.guid=o.guid++)),this.each(function(){o.event.add(this,a,d,c,b)})},one:function(a,b,c,d){return this.on(a,b,c,d,1)},off:function(a,b,c){var d,e;if(a&&a.preventDefault&&a.handleObj)return d=a.handleObj,o(a.delegateTarget).off(d.namespace?d.origType+"."+d.namespace:d.origType,d.selector,d.handler),this;if("object"==typeof a){for(e in a)this.off(e,b,a[e]);return this}return(b===!1||"function"==typeof b)&&(c=b,b=void 0),c===!1&&(c=$),this.each(function(){o.event.remove(this,a,c,b)})},trigger:function(a,b){return this.each(function(){o.event.trigger(a,b,this)})},triggerHandler:function(a,b){var c=this[0];return c?o.event.trigger(a,b,c,!0):void 0}});var ab=/<(?!area|br|col|embed|hr|img|input|link|meta|param)(([\w:]+)[^>]*)\/>/gi,bb=/<([\w:]+)/,cb=/<|&#?\w+;/,db=/<(?:script|style|link)/i,eb=/checked\s*(?:[^=]|=\s*.checked.)/i,fb=/^$|\/(?:java|ecma)script/i,gb=/^true\/(.*)/,hb=/^\s*<!(?:\[CDATA\[|--)|(?:\]\]|--)>\s*$/g,ib={option:[1,"<select multiple='multiple'>","</select>"],thead:[1,"<table>","</table>"],col:[2,"<table><colgroup>","</colgroup></table>"],tr:[2,"<table><tbody>","</tbody></table>"],td:[3,"<table><tbody><tr>","</tr></tbody></table>"],_default:[0,"",""]};ib.optgroup=ib.option,ib.tbody=ib.tfoot=ib.colgroup=ib.caption=ib.thead,ib.th=ib.td;function jb(a,b){return o.nodeName(a,"table")&&o.nodeName(11!==b.nodeType?b:b.firstChild,"tr")?a.getElementsByTagName("tbody")[0]||a.appendChild(a.ownerDocument.createElement("tbody")):a}function kb(a){return a.type=(null!==a.getAttribute("type"))+"/"+a.type,a}function lb(a){var b=gb.exec(a.type);return b?a.type=b[1]:a.removeAttribute("type"),a}function mb(a,b){for(var c=0,d=a.length;d>c;c++)L.set(a[c],"globalEval",!b||L.get(b[c],"globalEval"))}function nb(a,b){var c,d,e,f,g,h,i,j;if(1===b.nodeType){if(L.hasData(a)&&(f=L.access(a),g=L.set(b,f),j=f.events)){delete g.handle,g.events={};for(e in j)for(c=0,d=j[e].length;d>c;c++)o.event.add(b,e,j[e][c])}M.hasData(a)&&(h=M.access(a),i=o.extend({},h),M.set(b,i))}}function ob(a,b){var c=a.getElementsByTagName?a.getElementsByTagName(b||"*"):a.querySelectorAll?a.querySelectorAll(b||"*"):[];return void 0===b||b&&o.nodeName(a,b)?o.merge([a],c):c}function pb(a,b){var c=b.nodeName.toLowerCase();"input"===c&&T.test(a.type)?b.checked=a.checked:("input"===c||"textarea"===c)&&(b.defaultValue=a.defaultValue)}o.extend({clone:function(a,b,c){var d,e,f,g,h=a.cloneNode(!0),i=o.contains(a.ownerDocument,a);if(!(l.noCloneChecked||1!==a.nodeType&&11!==a.nodeType||o.isXMLDoc(a)))for(g=ob(h),f=ob(a),d=0,e=f.length;e>d;d++)pb(f[d],g[d]);if(b)if(c)for(f=f||ob(a),g=g||ob(h),d=0,e=f.length;e>d;d++)nb(f[d],g[d]);else nb(a,h);return g=ob(h,"script"),g.length>0&&mb(g,!i&&ob(a,"script")),h},buildFragment:function(a,b,c,d){for(var e,f,g,h,i,j,k=b.createDocumentFragment(),l=[],m=0,n=a.length;n>m;m++)if(e=a[m],e||0===e)if("object"===o.type(e))o.merge(l,e.nodeType?[e]:e);else if(cb.test(e)){f=f||k.appendChild(b.createElement("div")),g=(bb.exec(e)||["",""])[1].toLowerCase(),h=ib[g]||ib._default,f.innerHTML=h[1]+e.replace(ab,"<$1></$2>")+h[2],j=h[0];while(j--)f=f.lastChild;o.merge(l,f.childNodes),f=k.firstChild,f.textContent=""}else l.push(b.createTextNode(e));k.textContent="",m=0;while(e=l[m++])if((!d||-1===o.inArray(e,d))&&(i=o.contains(e.ownerDocument,e),f=ob(k.appendChild(e),"script"),i&&mb(f),c)){j=0;while(e=f[j++])fb.test(e.type||"")&&c.push(e)}return k},cleanData:function(a){for(var b,c,d,e,f,g,h=o.event.special,i=0;void 0!==(c=a[i]);i++){if(o.acceptData(c)&&(f=c[L.expando],f&&(b=L.cache[f]))){if(d=Object.keys(b.events||{}),d.length)for(g=0;void 0!==(e=d[g]);g++)h[e]?o.event.remove(c,e):o.removeEvent(c,e,b.handle);L.cache[f]&&delete L.cache[f]}delete M.cache[c[M.expando]]}}}),o.fn.extend({text:function(a){return J(this,function(a){return void 0===a?o.text(this):this.empty().each(function(){(1===this.nodeType||11===this.nodeType||9===this.nodeType)&&(this.textContent=a)})},null,a,arguments.length)},append:function(){return this.domManip(arguments,function(a){if(1===this.nodeType||11===this.nodeType||9===this.nodeType){var b=jb(this,a);b.appendChild(a)}})},prepend:function(){return this.domManip(arguments,function(a){if(1===this.nodeType||11===this.nodeType||9===this.nodeType){var b=jb(this,a);b.insertBefore(a,b.firstChild)}})},before:function(){return this.domManip(arguments,function(a){this.parentNode&&this.parentNode.insertBefore(a,this)})},after:function(){return this.domManip(arguments,function(a){this.parentNode&&this.parentNode.insertBefore(a,this.nextSibling)})},remove:function(a,b){for(var c,d=a?o.filter(a,this):this,e=0;null!=(c=d[e]);e++)b||1!==c.nodeType||o.cleanData(ob(c)),c.parentNode&&(b&&o.contains(c.ownerDocument,c)&&mb(ob(c,"script")),c.parentNode.removeChild(c));return this},empty:function(){for(var a,b=0;null!=(a=this[b]);b++)1===a.nodeType&&(o.cleanData(ob(a,!1)),a.textContent="");return this},clone:function(a,b){return a=null==a?!1:a,b=null==b?a:b,this.map(function(){return o.clone(this,a,b)})},html:function(a){return J(this,function(a){var b=this[0]||{},c=0,d=this.length;if(void 0===a&&1===b.nodeType)return b.innerHTML;if("string"==typeof a&&!db.test(a)&&!ib[(bb.exec(a)||["",""])[1].toLowerCase()]){a=a.replace(ab,"<$1></$2>");try{for(;d>c;c++)b=this[c]||{},1===b.nodeType&&(o.cleanData(ob(b,!1)),b.innerHTML=a);b=0}catch(e){}}b&&this.empty().append(a)},null,a,arguments.length)},replaceWith:function(){var a=arguments[0];return this.domManip(arguments,function(b){a=this.parentNode,o.cleanData(ob(this)),a&&a.replaceChild(b,this)}),a&&(a.length||a.nodeType)?this:this.remove()},detach:function(a){return this.remove(a,!0)},domManip:function(a,b){a=e.apply([],a);var c,d,f,g,h,i,j=0,k=this.length,m=this,n=k-1,p=a[0],q=o.isFunction(p);if(q||k>1&&"string"==typeof p&&!l.checkClone&&eb.test(p))return this.each(function(c){var d=m.eq(c);q&&(a[0]=p.call(this,c,d.html())),d.domManip(a,b)});if(k&&(c=o.buildFragment(a,this[0].ownerDocument,!1,this),d=c.firstChild,1===c.childNodes.length&&(c=d),d)){for(f=o.map(ob(c,"script"),kb),g=f.length;k>j;j++)h=c,j!==n&&(h=o.clone(h,!0,!0),g&&o.merge(f,ob(h,"script"))),b.call(this[j],h,j);if(g)for(i=f[f.length-1].ownerDocument,o.map(f,lb),j=0;g>j;j++)h=f[j],fb.test(h.type||"")&&!L.access(h,"globalEval")&&o.contains(i,h)&&(h.src?o._evalUrl&&o._evalUrl(h.src):o.globalEval(h.textContent.replace(hb,"")))}return this}}),o.each({appendTo:"append",prependTo:"prepend",insertBefore:"before",insertAfter:"after",replaceAll:"replaceWith"},function(a,b){o.fn[a]=function(a){for(var c,d=[],e=o(a),g=e.length-1,h=0;g>=h;h++)c=h===g?this:this.clone(!0),o(e[h])[b](c),f.apply(d,c.get());return this.pushStack(d)}});var qb,rb={};function sb(b,c){var d=o(c.createElement(b)).appendTo(c.body),e=a.getDefaultComputedStyle?a.getDefaultComputedStyle(d[0]).display:o.css(d[0],"display");return d.detach(),e}function tb(a){var b=m,c=rb[a];return c||(c=sb(a,b),"none"!==c&&c||(qb=(qb||o("<iframe frameborder='0' width='0' height='0'/>")).appendTo(b.documentElement),b=qb[0].contentDocument,b.write(),b.close(),c=sb(a,b),qb.detach()),rb[a]=c),c}var ub=/^margin/,vb=new RegExp("^("+Q+")(?!px)[a-z%]+$","i"),wb=function(a){return a.ownerDocument.defaultView.getComputedStyle(a,null)};function xb(a,b,c){var d,e,f,g,h=a.style;return c=c||wb(a),c&&(g=c.getPropertyValue(b)||c[b]),c&&(""!==g||o.contains(a.ownerDocument,a)||(g=o.style(a,b)),vb.test(g)&&ub.test(b)&&(d=h.width,e=h.minWidth,f=h.maxWidth,h.minWidth=h.maxWidth=h.width=g,g=c.width,h.width=d,h.minWidth=e,h.maxWidth=f)),void 0!==g?g+"":g}function yb(a,b){return{get:function(){return a()?void delete this.get:(this.get=b).apply(this,arguments)}}}!function(){var b,c,d="padding:0;margin:0;border:0;display:block;-webkit-box-sizing:content-box;-moz-box-sizing:content-box;box-sizing:content-box",e=m.documentElement,f=m.createElement("div"),g=m.createElement("div");g.style.backgroundClip="content-box",g.cloneNode(!0).style.backgroundClip="",l.clearCloneStyle="content-box"===g.style.backgroundClip,f.style.cssText="border:0;width:0;height:0;position:absolute;top:0;left:-9999px;margin-top:1px",f.appendChild(g);function h(){g.style.cssText="-webkit-box-sizing:border-box;-moz-box-sizing:border-box;box-sizing:border-box;padding:1px;border:1px;display:block;width:4px;margin-top:1%;position:absolute;top:1%",e.appendChild(f);var d=a.getComputedStyle(g,null);b="1%"!==d.top,c="4px"===d.width,e.removeChild(f)}a.getComputedStyle&&o.extend(l,{pixelPosition:function(){return h(),b},boxSizingReliable:function(){return null==c&&h(),c},reliableMarginRight:function(){var b,c=g.appendChild(m.createElement("div"));return c.style.cssText=g.style.cssText=d,c.style.marginRight=c.style.width="0",g.style.width="1px",e.appendChild(f),b=!parseFloat(a.getComputedStyle(c,null).marginRight),e.removeChild(f),g.innerHTML="",b}})}(),o.swap=function(a,b,c,d){var e,f,g={};for(f in b)g[f]=a.style[f],a.style[f]=b[f];e=c.apply(a,d||[]);for(f in b)a.style[f]=g[f];return e};var zb=/^(none|table(?!-c[ea]).+)/,Ab=new RegExp("^("+Q+")(.*)$","i"),Bb=new RegExp("^([+-])=("+Q+")","i"),Cb={position:"absolute",visibility:"hidden",display:"block"},Db={letterSpacing:0,fontWeight:400},Eb=["Webkit","O","Moz","ms"];function Fb(a,b){if(b in a)return b;var c=b[0].toUpperCase()+b.slice(1),d=b,e=Eb.length;while(e--)if(b=Eb[e]+c,b in a)return b;return d}function Gb(a,b,c){var d=Ab.exec(b);return d?Math.max(0,d[1]-(c||0))+(d[2]||"px"):b}function Hb(a,b,c,d,e){for(var f=c===(d?"border":"content")?4:"width"===b?1:0,g=0;4>f;f+=2)"margin"===c&&(g+=o.css(a,c+R[f],!0,e)),d?("content"===c&&(g-=o.css(a,"padding"+R[f],!0,e)),"margin"!==c&&(g-=o.css(a,"border"+R[f]+"Width",!0,e))):(g+=o.css(a,"padding"+R[f],!0,e),"padding"!==c&&(g+=o.css(a,"border"+R[f]+"Width",!0,e)));return g}function Ib(a,b,c){var d=!0,e="width"===b?a.offsetWidth:a.offsetHeight,f=wb(a),g="border-box"===o.css(a,"boxSizing",!1,f);if(0>=e||null==e){if(e=xb(a,b,f),(0>e||null==e)&&(e=a.style[b]),vb.test(e))return e;d=g&&(l.boxSizingReliable()||e===a.style[b]),e=parseFloat(e)||0}return e+Hb(a,b,c||(g?"border":"content"),d,f)+"px"}function Jb(a,b){for(var c,d,e,f=[],g=0,h=a.length;h>g;g++)d=a[g],d.style&&(f[g]=L.get(d,"olddisplay"),c=d.style.display,b?(f[g]||"none"!==c||(d.style.display=""),""===d.style.display&&S(d)&&(f[g]=L.access(d,"olddisplay",tb(d.nodeName)))):f[g]||(e=S(d),(c&&"none"!==c||!e)&&L.set(d,"olddisplay",e?c:o.css(d,"display"))));for(g=0;h>g;g++)d=a[g],d.style&&(b&&"none"!==d.style.display&&""!==d.style.display||(d.style.display=b?f[g]||"":"none"));return a}o.extend({cssHooks:{opacity:{get:function(a,b){if(b){var c=xb(a,"opacity");return""===c?"1":c}}}},cssNumber:{columnCount:!0,fillOpacity:!0,fontWeight:!0,lineHeight:!0,opacity:!0,order:!0,orphans:!0,widows:!0,zIndex:!0,zoom:!0},cssProps:{"float":"cssFloat"},style:function(a,b,c,d){if(a&&3!==a.nodeType&&8!==a.nodeType&&a.style){var e,f,g,h=o.camelCase(b),i=a.style;return b=o.cssProps[h]||(o.cssProps[h]=Fb(i,h)),g=o.cssHooks[b]||o.cssHooks[h],void 0===c?g&&"get"in g&&void 0!==(e=g.get(a,!1,d))?e:i[b]:(f=typeof c,"string"===f&&(e=Bb.exec(c))&&(c=(e[1]+1)*e[2]+parseFloat(o.css(a,b)),f="number"),null!=c&&c===c&&("number"!==f||o.cssNumber[h]||(c+="px"),l.clearCloneStyle||""!==c||0!==b.indexOf("background")||(i[b]="inherit"),g&&"set"in g&&void 0===(c=g.set(a,c,d))||(i[b]="",i[b]=c)),void 0)}},css:function(a,b,c,d){var e,f,g,h=o.camelCase(b);return b=o.cssProps[h]||(o.cssProps[h]=Fb(a.style,h)),g=o.cssHooks[b]||o.cssHooks[h],g&&"get"in g&&(e=g.get(a,!0,c)),void 0===e&&(e=xb(a,b,d)),"normal"===e&&b in Db&&(e=Db[b]),""===c||c?(f=parseFloat(e),c===!0||o.isNumeric(f)?f||0:e):e}}),o.each(["height","width"],function(a,b){o.cssHooks[b]={get:function(a,c,d){return c?0===a.offsetWidth&&zb.test(o.css(a,"display"))?o.swap(a,Cb,function(){return Ib(a,b,d)}):Ib(a,b,d):void 0},set:function(a,c,d){var e=d&&wb(a);return Gb(a,c,d?Hb(a,b,d,"border-box"===o.css(a,"boxSizing",!1,e),e):0)}}}),o.cssHooks.marginRight=yb(l.reliableMarginRight,function(a,b){return b?o.swap(a,{display:"inline-block"},xb,[a,"marginRight"]):void 0}),o.each({margin:"",padding:"",border:"Width"},function(a,b){o.cssHooks[a+b]={expand:function(c){for(var d=0,e={},f="string"==typeof c?c.split(" "):[c];4>d;d++)e[a+R[d]+b]=f[d]||f[d-2]||f[0];return e}},ub.test(a)||(o.cssHooks[a+b].set=Gb)}),o.fn.extend({css:function(a,b){return J(this,function(a,b,c){var d,e,f={},g=0;if(o.isArray(b)){for(d=wb(a),e=b.length;e>g;g++)f[b[g]]=o.css(a,b[g],!1,d);return f}return void 0!==c?o.style(a,b,c):o.css(a,b)},a,b,arguments.length>1)},show:function(){return Jb(this,!0)},hide:function(){return Jb(this)},toggle:function(a){return"boolean"==typeof a?a?this.show():this.hide():this.each(function(){S(this)?o(this).show():o(this).hide()})}});function Kb(a,b,c,d,e){return new Kb.prototype.init(a,b,c,d,e)}o.Tween=Kb,Kb.prototype={constructor:Kb,init:function(a,b,c,d,e,f){this.elem=a,this.prop=c,this.easing=e||"swing",this.options=b,this.start=this.now=this.cur(),this.end=d,this.unit=f||(o.cssNumber[c]?"":"px")},cur:function(){var a=Kb.propHooks[this.prop];return a&&a.get?a.get(this):Kb.propHooks._default.get(this)},run:function(a){var b,c=Kb.propHooks[this.prop];return this.pos=b=this.options.duration?o.easing[this.easing](a,this.options.duration*a,0,1,this.options.duration):a,this.now=(this.end-this.start)*b+this.start,this.options.step&&this.options.step.call(this.elem,this.now,this),c&&c.set?c.set(this):Kb.propHooks._default.set(this),this}},Kb.prototype.init.prototype=Kb.prototype,Kb.propHooks={_default:{get:function(a){var b;return null==a.elem[a.prop]||a.elem.style&&null!=a.elem.style[a.prop]?(b=o.css(a.elem,a.prop,""),b&&"auto"!==b?b:0):a.elem[a.prop]},set:function(a){o.fx.step[a.prop]?o.fx.step[a.prop](a):a.elem.style&&(null!=a.elem.style[o.cssProps[a.prop]]||o.cssHooks[a.prop])?o.style(a.elem,a.prop,a.now+a.unit):a.elem[a.prop]=a.now}}},Kb.propHooks.scrollTop=Kb.propHooks.scrollLeft={set:function(a){a.elem.nodeType&&a.elem.parentNode&&(a.elem[a.prop]=a.now)}},o.easing={linear:function(a){return a},swing:function(a){return.5-Math.cos(a*Math.PI)/2}},o.fx=Kb.prototype.init,o.fx.step={};var Lb,Mb,Nb=/^(?:toggle|show|hide)$/,Ob=new RegExp("^(?:([+-])=|)("+Q+")([a-z%]*)$","i"),Pb=/queueHooks$/,Qb=[Vb],Rb={"*":[function(a,b){var c=this.createTween(a,b),d=c.cur(),e=Ob.exec(b),f=e&&e[3]||(o.cssNumber[a]?"":"px"),g=(o.cssNumber[a]||"px"!==f&&+d)&&Ob.exec(o.css(c.elem,a)),h=1,i=20;if(g&&g[3]!==f){f=f||g[3],e=e||[],g=+d||1;do h=h||".5",g/=h,o.style(c.elem,a,g+f);while(h!==(h=c.cur()/d)&&1!==h&&--i)}return e&&(g=c.start=+g||+d||0,c.unit=f,c.end=e[1]?g+(e[1]+1)*e[2]:+e[2]),c}]};function Sb(){return setTimeout(function(){Lb=void 0}),Lb=o.now()}function Tb(a,b){var c,d=0,e={height:a};for(b=b?1:0;4>d;d+=2-b)c=R[d],e["margin"+c]=e["padding"+c]=a;return b&&(e.opacity=e.width=a),e}function Ub(a,b,c){for(var d,e=(Rb[b]||[]).concat(Rb["*"]),f=0,g=e.length;g>f;f++)if(d=e[f].call(c,b,a))return d}function Vb(a,b,c){var d,e,f,g,h,i,j,k=this,l={},m=a.style,n=a.nodeType&&S(a),p=L.get(a,"fxshow");c.queue||(h=o._queueHooks(a,"fx"),null==h.unqueued&&(h.unqueued=0,i=h.empty.fire,h.empty.fire=function(){h.unqueued||i()}),h.unqueued++,k.always(function(){k.always(function(){h.unqueued--,o.queue(a,"fx").length||h.empty.fire()})})),1===a.nodeType&&("height"in b||"width"in b)&&(c.overflow=[m.overflow,m.overflowX,m.overflowY],j=o.css(a,"display"),"none"===j&&(j=tb(a.nodeName)),"inline"===j&&"none"===o.css(a,"float")&&(m.display="inline-block")),c.overflow&&(m.overflow="hidden",k.always(function(){m.overflow=c.overflow[0],m.overflowX=c.overflow[1],m.overflowY=c.overflow[2]}));for(d in b)if(e=b[d],Nb.exec(e)){if(delete b[d],f=f||"toggle"===e,e===(n?"hide":"show")){if("show"!==e||!p||void 0===p[d])continue;n=!0}l[d]=p&&p[d]||o.style(a,d)}if(!o.isEmptyObject(l)){p?"hidden"in p&&(n=p.hidden):p=L.access(a,"fxshow",{}),f&&(p.hidden=!n),n?o(a).show():k.done(function(){o(a).hide()}),k.done(function(){var b;L.remove(a,"fxshow");for(b in l)o.style(a,b,l[b])});for(d in l)g=Ub(n?p[d]:0,d,k),d in p||(p[d]=g.start,n&&(g.end=g.start,g.start="width"===d||"height"===d?1:0))}}function Wb(a,b){var c,d,e,f,g;for(c in a)if(d=o.camelCase(c),e=b[d],f=a[c],o.isArray(f)&&(e=f[1],f=a[c]=f[0]),c!==d&&(a[d]=f,delete a[c]),g=o.cssHooks[d],g&&"expand"in g){f=g.expand(f),delete a[d];for(c in f)c in a||(a[c]=f[c],b[c]=e)}else b[d]=e}function Xb(a,b,c){var d,e,f=0,g=Qb.length,h=o.Deferred().always(function(){delete i.elem}),i=function(){if(e)return!1;for(var b=Lb||Sb(),c=Math.max(0,j.startTime+j.duration-b),d=c/j.duration||0,f=1-d,g=0,i=j.tweens.length;i>g;g++)j.tweens[g].run(f);return h.notifyWith(a,[j,f,c]),1>f&&i?c:(h.resolveWith(a,[j]),!1)},j=h.promise({elem:a,props:o.extend({},b),opts:o.extend(!0,{specialEasing:{}},c),originalProperties:b,originalOptions:c,startTime:Lb||Sb(),duration:c.duration,tweens:[],createTween:function(b,c){var d=o.Tween(a,j.opts,b,c,j.opts.specialEasing[b]||j.opts.easing);return j.tweens.push(d),d},stop:function(b){var c=0,d=b?j.tweens.length:0;if(e)return this;for(e=!0;d>c;c++)j.tweens[c].run(1);return b?h.resolveWith(a,[j,b]):h.rejectWith(a,[j,b]),this}}),k=j.props;for(Wb(k,j.opts.specialEasing);g>f;f++)if(d=Qb[f].call(j,a,k,j.opts))return d;return o.map(k,Ub,j),o.isFunction(j.opts.start)&&j.opts.start.call(a,j),o.fx.timer(o.extend(i,{elem:a,anim:j,queue:j.opts.queue})),j.progress(j.opts.progress).done(j.opts.done,j.opts.complete).fail(j.opts.fail).always(j.opts.always)}o.Animation=o.extend(Xb,{tweener:function(a,b){o.isFunction(a)?(b=a,a=["*"]):a=a.split(" ");for(var c,d=0,e=a.length;e>d;d++)c=a[d],Rb[c]=Rb[c]||[],Rb[c].unshift(b)},prefilter:function(a,b){b?Qb.unshift(a):Qb.push(a)}}),o.speed=function(a,b,c){var d=a&&"object"==typeof a?o.extend({},a):{complete:c||!c&&b||o.isFunction(a)&&a,duration:a,easing:c&&b||b&&!o.isFunction(b)&&b};return d.duration=o.fx.off?0:"number"==typeof d.duration?d.duration:d.duration in o.fx.speeds?o.fx.speeds[d.duration]:o.fx.speeds._default,(null==d.queue||d.queue===!0)&&(d.queue="fx"),d.old=d.complete,d.complete=function(){o.isFunction(d.old)&&d.old.call(this),d.queue&&o.dequeue(this,d.queue)},d},o.fn.extend({fadeTo:function(a,b,c,d){return this.filter(S).css("opacity",0).show().end().animate({opacity:b},a,c,d)},animate:function(a,b,c,d){var e=o.isEmptyObject(a),f=o.speed(b,c,d),g=function(){var b=Xb(this,o.extend({},a),f);(e||L.get(this,"finish"))&&b.stop(!0)};return g.finish=g,e||f.queue===!1?this.each(g):this.queue(f.queue,g)},stop:function(a,b,c){var d=function(a){var b=a.stop;delete a.stop,b(c)};return"string"!=typeof a&&(c=b,b=a,a=void 0),b&&a!==!1&&this.queue(a||"fx",[]),this.each(function(){var b=!0,e=null!=a&&a+"queueHooks",f=o.timers,g=L.get(this);if(e)g[e]&&g[e].stop&&d(g[e]);else for(e in g)g[e]&&g[e].stop&&Pb.test(e)&&d(g[e]);for(e=f.length;e--;)f[e].elem!==this||null!=a&&f[e].queue!==a||(f[e].anim.stop(c),b=!1,f.splice(e,1));(b||!c)&&o.dequeue(this,a)})},finish:function(a){return a!==!1&&(a=a||"fx"),this.each(function(){var b,c=L.get(this),d=c[a+"queue"],e=c[a+"queueHooks"],f=o.timers,g=d?d.length:0;for(c.finish=!0,o.queue(this,a,[]),e&&e.stop&&e.stop.call(this,!0),b=f.length;b--;)f[b].elem===this&&f[b].queue===a&&(f[b].anim.stop(!0),f.splice(b,1));for(b=0;g>b;b++)d[b]&&d[b].finish&&d[b].finish.call(this);delete c.finish})}}),o.each(["toggle","show","hide"],function(a,b){var c=o.fn[b];o.fn[b]=function(a,d,e){return null==a||"boolean"==typeof a?c.apply(this,arguments):this.animate(Tb(b,!0),a,d,e)}}),o.each({slideDown:Tb("show"),slideUp:Tb("hide"),slideToggle:Tb("toggle"),fadeIn:{opacity:"show"},fadeOut:{opacity:"hide"},fadeToggle:{opacity:"toggle"}},function(a,b){o.fn[a]=function(a,c,d){return this.animate(b,a,c,d)}}),o.timers=[],o.fx.tick=function(){var a,b=0,c=o.timers;for(Lb=o.now();b<c.length;b++)a=c[b],a()||c[b]!==a||c.splice(b--,1);c.length||o.fx.stop(),Lb=void 0},o.fx.timer=function(a){o.timers.push(a),a()?o.fx.start():o.timers.pop()},o.fx.interval=13,o.fx.start=function(){Mb||(Mb=setInterval(o.fx.tick,o.fx.interval))},o.fx.stop=function(){clearInterval(Mb),Mb=null},o.fx.speeds={slow:600,fast:200,_default:400},o.fn.delay=function(a,b){return a=o.fx?o.fx.speeds[a]||a:a,b=b||"fx",this.queue(b,function(b,c){var d=setTimeout(b,a);c.stop=function(){clearTimeout(d)}})},function(){var a=m.createElement("input"),b=m.createElement("select"),c=b.appendChild(m.createElement("option"));a.type="checkbox",l.checkOn=""!==a.value,l.optSelected=c.selected,b.disabled=!0,l.optDisabled=!c.disabled,a=m.createElement("input"),a.value="t",a.type="radio",l.radioValue="t"===a.value}();var Yb,Zb,$b=o.expr.attrHandle;o.fn.extend({attr:function(a,b){return J(this,o.attr,a,b,arguments.length>1)},removeAttr:function(a){return this.each(function(){o.removeAttr(this,a)})}}),o.extend({attr:function(a,b,c){var d,e,f=a.nodeType;if(a&&3!==f&&8!==f&&2!==f)return typeof a.getAttribute===U?o.prop(a,b,c):(1===f&&o.isXMLDoc(a)||(b=b.toLowerCase(),d=o.attrHooks[b]||(o.expr.match.bool.test(b)?Zb:Yb)),void 0===c?d&&"get"in d&&null!==(e=d.get(a,b))?e:(e=o.find.attr(a,b),null==e?void 0:e):null!==c?d&&"set"in d&&void 0!==(e=d.set(a,c,b))?e:(a.setAttribute(b,c+""),c):void o.removeAttr(a,b))},removeAttr:function(a,b){var c,d,e=0,f=b&&b.match(E);if(f&&1===a.nodeType)while(c=f[e++])d=o.propFix[c]||c,o.expr.match.bool.test(c)&&(a[d]=!1),a.removeAttribute(c)},attrHooks:{type:{set:function(a,b){if(!l.radioValue&&"radio"===b&&o.nodeName(a,"input")){var c=a.value;return a.setAttribute("type",b),c&&(a.value=c),b}}}}}),Zb={set:function(a,b,c){return b===!1?o.removeAttr(a,c):a.setAttribute(c,c),c}},o.each(o.expr.match.bool.source.match(/\w+/g),function(a,b){var c=$b[b]||o.find.attr;$b[b]=function(a,b,d){var e,f;
+return d||(f=$b[b],$b[b]=e,e=null!=c(a,b,d)?b.toLowerCase():null,$b[b]=f),e}});var _b=/^(?:input|select|textarea|button)$/i;o.fn.extend({prop:function(a,b){return J(this,o.prop,a,b,arguments.length>1)},removeProp:function(a){return this.each(function(){delete this[o.propFix[a]||a]})}}),o.extend({propFix:{"for":"htmlFor","class":"className"},prop:function(a,b,c){var d,e,f,g=a.nodeType;if(a&&3!==g&&8!==g&&2!==g)return f=1!==g||!o.isXMLDoc(a),f&&(b=o.propFix[b]||b,e=o.propHooks[b]),void 0!==c?e&&"set"in e&&void 0!==(d=e.set(a,c,b))?d:a[b]=c:e&&"get"in e&&null!==(d=e.get(a,b))?d:a[b]},propHooks:{tabIndex:{get:function(a){return a.hasAttribute("tabindex")||_b.test(a.nodeName)||a.href?a.tabIndex:-1}}}}),l.optSelected||(o.propHooks.selected={get:function(a){var b=a.parentNode;return b&&b.parentNode&&b.parentNode.selectedIndex,null}}),o.each(["tabIndex","readOnly","maxLength","cellSpacing","cellPadding","rowSpan","colSpan","useMap","frameBorder","contentEditable"],function(){o.propFix[this.toLowerCase()]=this});var ac=/[\t\r\n\f]/g;o.fn.extend({addClass:function(a){var b,c,d,e,f,g,h="string"==typeof a&&a,i=0,j=this.length;if(o.isFunction(a))return this.each(function(b){o(this).addClass(a.call(this,b,this.className))});if(h)for(b=(a||"").match(E)||[];j>i;i++)if(c=this[i],d=1===c.nodeType&&(c.className?(" "+c.className+" ").replace(ac," "):" ")){f=0;while(e=b[f++])d.indexOf(" "+e+" ")<0&&(d+=e+" ");g=o.trim(d),c.className!==g&&(c.className=g)}return this},removeClass:function(a){var b,c,d,e,f,g,h=0===arguments.length||"string"==typeof a&&a,i=0,j=this.length;if(o.isFunction(a))return this.each(function(b){o(this).removeClass(a.call(this,b,this.className))});if(h)for(b=(a||"").match(E)||[];j>i;i++)if(c=this[i],d=1===c.nodeType&&(c.className?(" "+c.className+" ").replace(ac," "):"")){f=0;while(e=b[f++])while(d.indexOf(" "+e+" ")>=0)d=d.replace(" "+e+" "," ");g=a?o.trim(d):"",c.className!==g&&(c.className=g)}return this},toggleClass:function(a,b){var c=typeof a;return"boolean"==typeof b&&"string"===c?b?this.addClass(a):this.removeClass(a):this.each(o.isFunction(a)?function(c){o(this).toggleClass(a.call(this,c,this.className,b),b)}:function(){if("string"===c){var b,d=0,e=o(this),f=a.match(E)||[];while(b=f[d++])e.hasClass(b)?e.removeClass(b):e.addClass(b)}else(c===U||"boolean"===c)&&(this.className&&L.set(this,"__className__",this.className),this.className=this.className||a===!1?"":L.get(this,"__className__")||"")})},hasClass:function(a){for(var b=" "+a+" ",c=0,d=this.length;d>c;c++)if(1===this[c].nodeType&&(" "+this[c].className+" ").replace(ac," ").indexOf(b)>=0)return!0;return!1}});var bc=/\r/g;o.fn.extend({val:function(a){var b,c,d,e=this[0];{if(arguments.length)return d=o.isFunction(a),this.each(function(c){var e;1===this.nodeType&&(e=d?a.call(this,c,o(this).val()):a,null==e?e="":"number"==typeof e?e+="":o.isArray(e)&&(e=o.map(e,function(a){return null==a?"":a+""})),b=o.valHooks[this.type]||o.valHooks[this.nodeName.toLowerCase()],b&&"set"in b&&void 0!==b.set(this,e,"value")||(this.value=e))});if(e)return b=o.valHooks[e.type]||o.valHooks[e.nodeName.toLowerCase()],b&&"get"in b&&void 0!==(c=b.get(e,"value"))?c:(c=e.value,"string"==typeof c?c.replace(bc,""):null==c?"":c)}}}),o.extend({valHooks:{select:{get:function(a){for(var b,c,d=a.options,e=a.selectedIndex,f="select-one"===a.type||0>e,g=f?null:[],h=f?e+1:d.length,i=0>e?h:f?e:0;h>i;i++)if(c=d[i],!(!c.selected&&i!==e||(l.optDisabled?c.disabled:null!==c.getAttribute("disabled"))||c.parentNode.disabled&&o.nodeName(c.parentNode,"optgroup"))){if(b=o(c).val(),f)return b;g.push(b)}return g},set:function(a,b){var c,d,e=a.options,f=o.makeArray(b),g=e.length;while(g--)d=e[g],(d.selected=o.inArray(o(d).val(),f)>=0)&&(c=!0);return c||(a.selectedIndex=-1),f}}}}),o.each(["radio","checkbox"],function(){o.valHooks[this]={set:function(a,b){return o.isArray(b)?a.checked=o.inArray(o(a).val(),b)>=0:void 0}},l.checkOn||(o.valHooks[this].get=function(a){return null===a.getAttribute("value")?"on":a.value})}),o.each("blur focus focusin focusout load resize scroll unload click dblclick mousedown mouseup mousemove mouseover mouseout mouseenter mouseleave change select submit keydown keypress keyup error contextmenu".split(" "),function(a,b){o.fn[b]=function(a,c){return arguments.length>0?this.on(b,null,a,c):this.trigger(b)}}),o.fn.extend({hover:function(a,b){return this.mouseenter(a).mouseleave(b||a)},bind:function(a,b,c){return this.on(a,null,b,c)},unbind:function(a,b){return this.off(a,null,b)},delegate:function(a,b,c,d){return this.on(b,a,c,d)},undelegate:function(a,b,c){return 1===arguments.length?this.off(a,"**"):this.off(b,a||"**",c)}});var cc=o.now(),dc=/\?/;o.parseJSON=function(a){return JSON.parse(a+"")},o.parseXML=function(a){var b,c;if(!a||"string"!=typeof a)return null;try{c=new DOMParser,b=c.parseFromString(a,"text/xml")}catch(d){b=void 0}return(!b||b.getElementsByTagName("parsererror").length)&&o.error("Invalid XML: "+a),b};var ec,fc,gc=/#.*$/,hc=/([?&])_=[^&]*/,ic=/^(.*?):[ \t]*([^\r\n]*)$/gm,jc=/^(?:about|app|app-storage|.+-extension|file|res|widget):$/,kc=/^(?:GET|HEAD)$/,lc=/^\/\//,mc=/^([\w.+-]+:)(?:\/\/(?:[^\/?#]*@|)([^\/?#:]*)(?::(\d+)|)|)/,nc={},oc={},pc="*/".concat("*");try{fc=location.href}catch(qc){fc=m.createElement("a"),fc.href="",fc=fc.href}ec=mc.exec(fc.toLowerCase())||[];function rc(a){return function(b,c){"string"!=typeof b&&(c=b,b="*");var d,e=0,f=b.toLowerCase().match(E)||[];if(o.isFunction(c))while(d=f[e++])"+"===d[0]?(d=d.slice(1)||"*",(a[d]=a[d]||[]).unshift(c)):(a[d]=a[d]||[]).push(c)}}function sc(a,b,c,d){var e={},f=a===oc;function g(h){var i;return e[h]=!0,o.each(a[h]||[],function(a,h){var j=h(b,c,d);return"string"!=typeof j||f||e[j]?f?!(i=j):void 0:(b.dataTypes.unshift(j),g(j),!1)}),i}return g(b.dataTypes[0])||!e["*"]&&g("*")}function tc(a,b){var c,d,e=o.ajaxSettings.flatOptions||{};for(c in b)void 0!==b[c]&&((e[c]?a:d||(d={}))[c]=b[c]);return d&&o.extend(!0,a,d),a}function uc(a,b,c){var d,e,f,g,h=a.contents,i=a.dataTypes;while("*"===i[0])i.shift(),void 0===d&&(d=a.mimeType||b.getResponseHeader("Content-Type"));if(d)for(e in h)if(h[e]&&h[e].test(d)){i.unshift(e);break}if(i[0]in c)f=i[0];else{for(e in c){if(!i[0]||a.converters[e+" "+i[0]]){f=e;break}g||(g=e)}f=f||g}return f?(f!==i[0]&&i.unshift(f),c[f]):void 0}function vc(a,b,c,d){var e,f,g,h,i,j={},k=a.dataTypes.slice();if(k[1])for(g in a.converters)j[g.toLowerCase()]=a.converters[g];f=k.shift();while(f)if(a.responseFields[f]&&(c[a.responseFields[f]]=b),!i&&d&&a.dataFilter&&(b=a.dataFilter(b,a.dataType)),i=f,f=k.shift())if("*"===f)f=i;else if("*"!==i&&i!==f){if(g=j[i+" "+f]||j["* "+f],!g)for(e in j)if(h=e.split(" "),h[1]===f&&(g=j[i+" "+h[0]]||j["* "+h[0]])){g===!0?g=j[e]:j[e]!==!0&&(f=h[0],k.unshift(h[1]));break}if(g!==!0)if(g&&a["throws"])b=g(b);else try{b=g(b)}catch(l){return{state:"parsererror",error:g?l:"No conversion from "+i+" to "+f}}}return{state:"success",data:b}}o.extend({active:0,lastModified:{},etag:{},ajaxSettings:{url:fc,type:"GET",isLocal:jc.test(ec[1]),global:!0,processData:!0,async:!0,contentType:"application/x-www-form-urlencoded; charset=UTF-8",accepts:{"*":pc,text:"text/plain",html:"text/html",xml:"application/xml, text/xml",json:"application/json, text/javascript"},contents:{xml:/xml/,html:/html/,json:/json/},responseFields:{xml:"responseXML",text:"responseText",json:"responseJSON"},converters:{"* text":String,"text html":!0,"text json":o.parseJSON,"text xml":o.parseXML},flatOptions:{url:!0,context:!0}},ajaxSetup:function(a,b){return b?tc(tc(a,o.ajaxSettings),b):tc(o.ajaxSettings,a)},ajaxPrefilter:rc(nc),ajaxTransport:rc(oc),ajax:function(a,b){"object"==typeof a&&(b=a,a=void 0),b=b||{};var c,d,e,f,g,h,i,j,k=o.ajaxSetup({},b),l=k.context||k,m=k.context&&(l.nodeType||l.jquery)?o(l):o.event,n=o.Deferred(),p=o.Callbacks("once memory"),q=k.statusCode||{},r={},s={},t=0,u="canceled",v={readyState:0,getResponseHeader:function(a){var b;if(2===t){if(!f){f={};while(b=ic.exec(e))f[b[1].toLowerCase()]=b[2]}b=f[a.toLowerCase()]}return null==b?null:b},getAllResponseHeaders:function(){return 2===t?e:null},setRequestHeader:function(a,b){var c=a.toLowerCase();return t||(a=s[c]=s[c]||a,r[a]=b),this},overrideMimeType:function(a){return t||(k.mimeType=a),this},statusCode:function(a){var b;if(a)if(2>t)for(b in a)q[b]=[q[b],a[b]];else v.always(a[v.status]);return this},abort:function(a){var b=a||u;return c&&c.abort(b),x(0,b),this}};if(n.promise(v).complete=p.add,v.success=v.done,v.error=v.fail,k.url=((a||k.url||fc)+"").replace(gc,"").replace(lc,ec[1]+"//"),k.type=b.method||b.type||k.method||k.type,k.dataTypes=o.trim(k.dataType||"*").toLowerCase().match(E)||[""],null==k.crossDomain&&(h=mc.exec(k.url.toLowerCase()),k.crossDomain=!(!h||h[1]===ec[1]&&h[2]===ec[2]&&(h[3]||("http:"===h[1]?"80":"443"))===(ec[3]||("http:"===ec[1]?"80":"443")))),k.data&&k.processData&&"string"!=typeof k.data&&(k.data=o.param(k.data,k.traditional)),sc(nc,k,b,v),2===t)return v;i=k.global,i&&0===o.active++&&o.event.trigger("ajaxStart"),k.type=k.type.toUpperCase(),k.hasContent=!kc.test(k.type),d=k.url,k.hasContent||(k.data&&(d=k.url+=(dc.test(d)?"&":"?")+k.data,delete k.data),k.cache===!1&&(k.url=hc.test(d)?d.replace(hc,"$1_="+cc++):d+(dc.test(d)?"&":"?")+"_="+cc++)),k.ifModified&&(o.lastModified[d]&&v.setRequestHeader("If-Modified-Since",o.lastModified[d]),o.etag[d]&&v.setRequestHeader("If-None-Match",o.etag[d])),(k.data&&k.hasContent&&k.contentType!==!1||b.contentType)&&v.setRequestHeader("Content-Type",k.contentType),v.setRequestHeader("Accept",k.dataTypes[0]&&k.accepts[k.dataTypes[0]]?k.accepts[k.dataTypes[0]]+("*"!==k.dataTypes[0]?", "+pc+"; q=0.01":""):k.accepts["*"]);for(j in k.headers)v.setRequestHeader(j,k.headers[j]);if(k.beforeSend&&(k.beforeSend.call(l,v,k)===!1||2===t))return v.abort();u="abort";for(j in{success:1,error:1,complete:1})v[j](k[j]);if(c=sc(oc,k,b,v)){v.readyState=1,i&&m.trigger("ajaxSend",[v,k]),k.async&&k.timeout>0&&(g=setTimeout(function(){v.abort("timeout")},k.timeout));try{t=1,c.send(r,x)}catch(w){if(!(2>t))throw w;x(-1,w)}}else x(-1,"No Transport");function x(a,b,f,h){var j,r,s,u,w,x=b;2!==t&&(t=2,g&&clearTimeout(g),c=void 0,e=h||"",v.readyState=a>0?4:0,j=a>=200&&300>a||304===a,f&&(u=uc(k,v,f)),u=vc(k,u,v,j),j?(k.ifModified&&(w=v.getResponseHeader("Last-Modified"),w&&(o.lastModified[d]=w),w=v.getResponseHeader("etag"),w&&(o.etag[d]=w)),204===a||"HEAD"===k.type?x="nocontent":304===a?x="notmodified":(x=u.state,r=u.data,s=u.error,j=!s)):(s=x,(a||!x)&&(x="error",0>a&&(a=0))),v.status=a,v.statusText=(b||x)+"",j?n.resolveWith(l,[r,x,v]):n.rejectWith(l,[v,x,s]),v.statusCode(q),q=void 0,i&&m.trigger(j?"ajaxSuccess":"ajaxError",[v,k,j?r:s]),p.fireWith(l,[v,x]),i&&(m.trigger("ajaxComplete",[v,k]),--o.active||o.event.trigger("ajaxStop")))}return v},getJSON:function(a,b,c){return o.get(a,b,c,"json")},getScript:function(a,b){return o.get(a,void 0,b,"script")}}),o.each(["get","post"],function(a,b){o[b]=function(a,c,d,e){return o.isFunction(c)&&(e=e||d,d=c,c=void 0),o.ajax({url:a,type:b,dataType:e,data:c,success:d})}}),o.each(["ajaxStart","ajaxStop","ajaxComplete","ajaxError","ajaxSuccess","ajaxSend"],function(a,b){o.fn[b]=function(a){return this.on(b,a)}}),o._evalUrl=function(a){return o.ajax({url:a,type:"GET",dataType:"script",async:!1,global:!1,"throws":!0})},o.fn.extend({wrapAll:function(a){var b;return o.isFunction(a)?this.each(function(b){o(this).wrapAll(a.call(this,b))}):(this[0]&&(b=o(a,this[0].ownerDocument).eq(0).clone(!0),this[0].parentNode&&b.insertBefore(this[0]),b.map(function(){var a=this;while(a.firstElementChild)a=a.firstElementChild;return a}).append(this)),this)},wrapInner:function(a){return this.each(o.isFunction(a)?function(b){o(this).wrapInner(a.call(this,b))}:function(){var b=o(this),c=b.contents();c.length?c.wrapAll(a):b.append(a)})},wrap:function(a){var b=o.isFunction(a);return this.each(function(c){o(this).wrapAll(b?a.call(this,c):a)})},unwrap:function(){return this.parent().each(function(){o.nodeName(this,"body")||o(this).replaceWith(this.childNodes)}).end()}}),o.expr.filters.hidden=function(a){return a.offsetWidth<=0&&a.offsetHeight<=0},o.expr.filters.visible=function(a){return!o.expr.filters.hidden(a)};var wc=/%20/g,xc=/\[\]$/,yc=/\r?\n/g,zc=/^(?:submit|button|image|reset|file)$/i,Ac=/^(?:input|select|textarea|keygen)/i;function Bc(a,b,c,d){var e;if(o.isArray(b))o.each(b,function(b,e){c||xc.test(a)?d(a,e):Bc(a+"["+("object"==typeof e?b:"")+"]",e,c,d)});else if(c||"object"!==o.type(b))d(a,b);else for(e in b)Bc(a+"["+e+"]",b[e],c,d)}o.param=function(a,b){var c,d=[],e=function(a,b){b=o.isFunction(b)?b():null==b?"":b,d[d.length]=encodeURIComponent(a)+"="+encodeURIComponent(b)};if(void 0===b&&(b=o.ajaxSettings&&o.ajaxSettings.traditional),o.isArray(a)||a.jquery&&!o.isPlainObject(a))o.each(a,function(){e(this.name,this.value)});else for(c in a)Bc(c,a[c],b,e);return d.join("&").replace(wc,"+")},o.fn.extend({serialize:function(){return o.param(this.serializeArray())},serializeArray:function(){return this.map(function(){var a=o.prop(this,"elements");return a?o.makeArray(a):this}).filter(function(){var a=this.type;return this.name&&!o(this).is(":disabled")&&Ac.test(this.nodeName)&&!zc.test(a)&&(this.checked||!T.test(a))}).map(function(a,b){var c=o(this).val();return null==c?null:o.isArray(c)?o.map(c,function(a){return{name:b.name,value:a.replace(yc,"\r\n")}}):{name:b.name,value:c.replace(yc,"\r\n")}}).get()}}),o.ajaxSettings.xhr=function(){try{return new XMLHttpRequest}catch(a){}};var Cc=0,Dc={},Ec={0:200,1223:204},Fc=o.ajaxSettings.xhr();a.ActiveXObject&&o(a).on("unload",function(){for(var a in Dc)Dc[a]()}),l.cors=!!Fc&&"withCredentials"in Fc,l.ajax=Fc=!!Fc,o.ajaxTransport(function(a){var b;return l.cors||Fc&&!a.crossDomain?{send:function(c,d){var e,f=a.xhr(),g=++Cc;if(f.open(a.type,a.url,a.async,a.username,a.password),a.xhrFields)for(e in a.xhrFields)f[e]=a.xhrFields[e];a.mimeType&&f.overrideMimeType&&f.overrideMimeType(a.mimeType),a.crossDomain||c["X-Requested-With"]||(c["X-Requested-With"]="XMLHttpRequest");for(e in c)f.setRequestHeader(e,c[e]);b=function(a){return function(){b&&(delete Dc[g],b=f.onload=f.onerror=null,"abort"===a?f.abort():"error"===a?d(f.status,f.statusText):d(Ec[f.status]||f.status,f.statusText,"string"==typeof f.responseText?{text:f.responseText}:void 0,f.getAllResponseHeaders()))}},f.onload=b(),f.onerror=b("error"),b=Dc[g]=b("abort"),f.send(a.hasContent&&a.data||null)},abort:function(){b&&b()}}:void 0}),o.ajaxSetup({accepts:{script:"text/javascript, application/javascript, application/ecmascript, application/x-ecmascript"},contents:{script:/(?:java|ecma)script/},converters:{"text script":function(a){return o.globalEval(a),a}}}),o.ajaxPrefilter("script",function(a){void 0===a.cache&&(a.cache=!1),a.crossDomain&&(a.type="GET")}),o.ajaxTransport("script",function(a){if(a.crossDomain){var b,c;return{send:function(d,e){b=o("<script>").prop({async:!0,charset:a.scriptCharset,src:a.url}).on("load error",c=function(a){b.remove(),c=null,a&&e("error"===a.type?404:200,a.type)}),m.head.appendChild(b[0])},abort:function(){c&&c()}}}});var Gc=[],Hc=/(=)\?(?=&|$)|\?\?/;o.ajaxSetup({jsonp:"callback",jsonpCallback:function(){var a=Gc.pop()||o.expando+"_"+cc++;return this[a]=!0,a}}),o.ajaxPrefilter("json jsonp",function(b,c,d){var e,f,g,h=b.jsonp!==!1&&(Hc.test(b.url)?"url":"string"==typeof b.data&&!(b.contentType||"").indexOf("application/x-www-form-urlencoded")&&Hc.test(b.data)&&"data");return h||"jsonp"===b.dataTypes[0]?(e=b.jsonpCallback=o.isFunction(b.jsonpCallback)?b.jsonpCallback():b.jsonpCallback,h?b[h]=b[h].replace(Hc,"$1"+e):b.jsonp!==!1&&(b.url+=(dc.test(b.url)?"&":"?")+b.jsonp+"="+e),b.converters["script json"]=function(){return g||o.error(e+" was not called"),g[0]},b.dataTypes[0]="json",f=a[e],a[e]=function(){g=arguments},d.always(function(){a[e]=f,b[e]&&(b.jsonpCallback=c.jsonpCallback,Gc.push(e)),g&&o.isFunction(f)&&f(g[0]),g=f=void 0}),"script"):void 0}),o.parseHTML=function(a,b,c){if(!a||"string"!=typeof a)return null;"boolean"==typeof b&&(c=b,b=!1),b=b||m;var d=v.exec(a),e=!c&&[];return d?[b.createElement(d[1])]:(d=o.buildFragment([a],b,e),e&&e.length&&o(e).remove(),o.merge([],d.childNodes))};var Ic=o.fn.load;o.fn.load=function(a,b,c){if("string"!=typeof a&&Ic)return Ic.apply(this,arguments);var d,e,f,g=this,h=a.indexOf(" ");return h>=0&&(d=a.slice(h),a=a.slice(0,h)),o.isFunction(b)?(c=b,b=void 0):b&&"object"==typeof b&&(e="POST"),g.length>0&&o.ajax({url:a,type:e,dataType:"html",data:b}).done(function(a){f=arguments,g.html(d?o("<div>").append(o.parseHTML(a)).find(d):a)}).complete(c&&function(a,b){g.each(c,f||[a.responseText,b,a])}),this},o.expr.filters.animated=function(a){return o.grep(o.timers,function(b){return a===b.elem}).length};var Jc=a.document.documentElement;function Kc(a){return o.isWindow(a)?a:9===a.nodeType&&a.defaultView}o.offset={setOffset:function(a,b,c){var d,e,f,g,h,i,j,k=o.css(a,"position"),l=o(a),m={};"static"===k&&(a.style.position="relative"),h=l.offset(),f=o.css(a,"top"),i=o.css(a,"left"),j=("absolute"===k||"fixed"===k)&&(f+i).indexOf("auto")>-1,j?(d=l.position(),g=d.top,e=d.left):(g=parseFloat(f)||0,e=parseFloat(i)||0),o.isFunction(b)&&(b=b.call(a,c,h)),null!=b.top&&(m.top=b.top-h.top+g),null!=b.left&&(m.left=b.left-h.left+e),"using"in b?b.using.call(a,m):l.css(m)}},o.fn.extend({offset:function(a){if(arguments.length)return void 0===a?this:this.each(function(b){o.offset.setOffset(this,a,b)});var b,c,d=this[0],e={top:0,left:0},f=d&&d.ownerDocument;if(f)return b=f.documentElement,o.contains(b,d)?(typeof d.getBoundingClientRect!==U&&(e=d.getBoundingClientRect()),c=Kc(f),{top:e.top+c.pageYOffset-b.clientTop,left:e.left+c.pageXOffset-b.clientLeft}):e},position:function(){if(this[0]){var a,b,c=this[0],d={top:0,left:0};return"fixed"===o.css(c,"position")?b=c.getBoundingClientRect():(a=this.offsetParent(),b=this.offset(),o.nodeName(a[0],"html")||(d=a.offset()),d.top+=o.css(a[0],"borderTopWidth",!0),d.left+=o.css(a[0],"borderLeftWidth",!0)),{top:b.top-d.top-o.css(c,"marginTop",!0),left:b.left-d.left-o.css(c,"marginLeft",!0)}}},offsetParent:function(){return this.map(function(){var a=this.offsetParent||Jc;while(a&&!o.nodeName(a,"html")&&"static"===o.css(a,"position"))a=a.offsetParent;return a||Jc})}}),o.each({scrollLeft:"pageXOffset",scrollTop:"pageYOffset"},function(b,c){var d="pageYOffset"===c;o.fn[b]=function(e){return J(this,function(b,e,f){var g=Kc(b);return void 0===f?g?g[c]:b[e]:void(g?g.scrollTo(d?a.pageXOffset:f,d?f:a.pageYOffset):b[e]=f)},b,e,arguments.length,null)}}),o.each(["top","left"],function(a,b){o.cssHooks[b]=yb(l.pixelPosition,function(a,c){return c?(c=xb(a,b),vb.test(c)?o(a).position()[b]+"px":c):void 0})}),o.each({Height:"height",Width:"width"},function(a,b){o.each({padding:"inner"+a,content:b,"":"outer"+a},function(c,d){o.fn[d]=function(d,e){var f=arguments.length&&(c||"boolean"!=typeof d),g=c||(d===!0||e===!0?"margin":"border");return J(this,function(b,c,d){var e;return o.isWindow(b)?b.document.documentElement["client"+a]:9===b.nodeType?(e=b.documentElement,Math.max(b.body["scroll"+a],e["scroll"+a],b.body["offset"+a],e["offset"+a],e["client"+a])):void 0===d?o.css(b,c,g):o.style(b,c,d,g)},b,f?d:void 0,f,null)}})}),o.fn.size=function(){return this.length},o.fn.andSelf=o.fn.addBack,"function"==typeof define&&define.amd&&define("jquery",[],function(){return o});var Lc=a.jQuery,Mc=a.$;return o.noConflict=function(b){return a.$===o&&(a.$=Mc),b&&a.jQuery===o&&(a.jQuery=Lc),o},typeof b===U&&(a.jQuery=a.$=o),o}); \ No newline at end of file
diff --git a/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/web/client/resources/js/lib/jquery-ui-1_10_3-custom.min.js b/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/web/client/resources/js/lib/jquery-ui-1_10_3-custom.min.js
new file mode 100644
index 00000000000..d9fffa07332
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/web/client/resources/js/lib/jquery-ui-1_10_3-custom.min.js
@@ -0,0 +1,5 @@
+/*! jQuery UI - v1.10.3 - 2013-11-27
+* http://jqueryui.com
+* Includes: jquery.ui.effect.js, jquery.ui.effect-pulsate.js
+* Copyright 2013 jQuery Foundation and other contributors; Licensed MIT */
+(function(e,t){var i="ui-effects-";e.effects={effect:{}},function(e,t){function i(e,t,i){var s=c[t.type]||{};return null==e?i||!t.def?null:t.def:(e=s.floor?~~e:parseFloat(e),isNaN(e)?t.def:s.mod?(e+s.mod)%s.mod:0>e?0:e>s.max?s.max:e)}function s(i){var s=l(),a=s._rgba=[];return i=i.toLowerCase(),f(h,function(e,n){var r,o=n.re.exec(i),h=o&&n.parse(o),l=n.space||"rgba";return h?(r=s[l](h),s[u[l].cache]=r[u[l].cache],a=s._rgba=r._rgba,!1):t}),a.length?("0,0,0,0"===a.join()&&e.extend(a,n.transparent),s):n[i]}function a(e,t,i){return i=(i+1)%1,1>6*i?e+6*(t-e)*i:1>2*i?t:2>3*i?e+6*(t-e)*(2/3-i):e}var n,r="backgroundColor borderBottomColor borderLeftColor borderRightColor borderTopColor color columnRuleColor outlineColor textDecorationColor textEmphasisColor",o=/^([\-+])=\s*(\d+\.?\d*)/,h=[{re:/rgba?\(\s*(\d{1,3})\s*,\s*(\d{1,3})\s*,\s*(\d{1,3})\s*(?:,\s*(\d?(?:\.\d+)?)\s*)?\)/,parse:function(e){return[e[1],e[2],e[3],e[4]]}},{re:/rgba?\(\s*(\d+(?:\.\d+)?)\%\s*,\s*(\d+(?:\.\d+)?)\%\s*,\s*(\d+(?:\.\d+)?)\%\s*(?:,\s*(\d?(?:\.\d+)?)\s*)?\)/,parse:function(e){return[2.55*e[1],2.55*e[2],2.55*e[3],e[4]]}},{re:/#([a-f0-9]{2})([a-f0-9]{2})([a-f0-9]{2})/,parse:function(e){return[parseInt(e[1],16),parseInt(e[2],16),parseInt(e[3],16)]}},{re:/#([a-f0-9])([a-f0-9])([a-f0-9])/,parse:function(e){return[parseInt(e[1]+e[1],16),parseInt(e[2]+e[2],16),parseInt(e[3]+e[3],16)]}},{re:/hsla?\(\s*(\d+(?:\.\d+)?)\s*,\s*(\d+(?:\.\d+)?)\%\s*,\s*(\d+(?:\.\d+)?)\%\s*(?:,\s*(\d?(?:\.\d+)?)\s*)?\)/,space:"hsla",parse:function(e){return[e[1],e[2]/100,e[3]/100,e[4]]}}],l=e.Color=function(t,i,s,a){return new e.Color.fn.parse(t,i,s,a)},u={rgba:{props:{red:{idx:0,type:"byte"},green:{idx:1,type:"byte"},blue:{idx:2,type:"byte"}}},hsla:{props:{hue:{idx:0,type:"degrees"},saturation:{idx:1,type:"percent"},lightness:{idx:2,type:"percent"}}}},c={"byte":{floor:!0,max:255},percent:{max:1},degrees:{mod:360,floor:!0}},d=l.support={},p=e("<p>")[0],f=e.each;p.style.cssText="background-color:rgba(1,1,1,.5)",d.rgba=p.style.backgroundColor.indexOf("rgba")>-1,f(u,function(e,t){t.cache="_"+e,t.props.alpha={idx:3,type:"percent",def:1}}),l.fn=e.extend(l.prototype,{parse:function(a,r,o,h){if(a===t)return this._rgba=[null,null,null,null],this;(a.jquery||a.nodeType)&&(a=e(a).css(r),r=t);var c=this,d=e.type(a),p=this._rgba=[];return r!==t&&(a=[a,r,o,h],d="array"),"string"===d?this.parse(s(a)||n._default):"array"===d?(f(u.rgba.props,function(e,t){p[t.idx]=i(a[t.idx],t)}),this):"object"===d?(a instanceof l?f(u,function(e,t){a[t.cache]&&(c[t.cache]=a[t.cache].slice())}):f(u,function(t,s){var n=s.cache;f(s.props,function(e,t){if(!c[n]&&s.to){if("alpha"===e||null==a[e])return;c[n]=s.to(c._rgba)}c[n][t.idx]=i(a[e],t,!0)}),c[n]&&0>e.inArray(null,c[n].slice(0,3))&&(c[n][3]=1,s.from&&(c._rgba=s.from(c[n])))}),this):t},is:function(e){var i=l(e),s=!0,a=this;return f(u,function(e,n){var r,o=i[n.cache];return o&&(r=a[n.cache]||n.to&&n.to(a._rgba)||[],f(n.props,function(e,i){return null!=o[i.idx]?s=o[i.idx]===r[i.idx]:t})),s}),s},_space:function(){var e=[],t=this;return f(u,function(i,s){t[s.cache]&&e.push(i)}),e.pop()},transition:function(e,t){var s=l(e),a=s._space(),n=u[a],r=0===this.alpha()?l("transparent"):this,o=r[n.cache]||n.to(r._rgba),h=o.slice();return s=s[n.cache],f(n.props,function(e,a){var n=a.idx,r=o[n],l=s[n],u=c[a.type]||{};null!==l&&(null===r?h[n]=l:(u.mod&&(l-r>u.mod/2?r+=u.mod:r-l>u.mod/2&&(r-=u.mod)),h[n]=i((l-r)*t+r,a)))}),this[a](h)},blend:function(t){if(1===this._rgba[3])return this;var i=this._rgba.slice(),s=i.pop(),a=l(t)._rgba;return l(e.map(i,function(e,t){return(1-s)*a[t]+s*e}))},toRgbaString:function(){var t="rgba(",i=e.map(this._rgba,function(e,t){return null==e?t>2?1:0:e});return 1===i[3]&&(i.pop(),t="rgb("),t+i.join()+")"},toHslaString:function(){var t="hsla(",i=e.map(this.hsla(),function(e,t){return null==e&&(e=t>2?1:0),t&&3>t&&(e=Math.round(100*e)+"%"),e});return 1===i[3]&&(i.pop(),t="hsl("),t+i.join()+")"},toHexString:function(t){var i=this._rgba.slice(),s=i.pop();return t&&i.push(~~(255*s)),"#"+e.map(i,function(e){return e=(e||0).toString(16),1===e.length?"0"+e:e}).join("")},toString:function(){return 0===this._rgba[3]?"transparent":this.toRgbaString()}}),l.fn.parse.prototype=l.fn,u.hsla.to=function(e){if(null==e[0]||null==e[1]||null==e[2])return[null,null,null,e[3]];var t,i,s=e[0]/255,a=e[1]/255,n=e[2]/255,r=e[3],o=Math.max(s,a,n),h=Math.min(s,a,n),l=o-h,u=o+h,c=.5*u;return t=h===o?0:s===o?60*(a-n)/l+360:a===o?60*(n-s)/l+120:60*(s-a)/l+240,i=0===l?0:.5>=c?l/u:l/(2-u),[Math.round(t)%360,i,c,null==r?1:r]},u.hsla.from=function(e){if(null==e[0]||null==e[1]||null==e[2])return[null,null,null,e[3]];var t=e[0]/360,i=e[1],s=e[2],n=e[3],r=.5>=s?s*(1+i):s+i-s*i,o=2*s-r;return[Math.round(255*a(o,r,t+1/3)),Math.round(255*a(o,r,t)),Math.round(255*a(o,r,t-1/3)),n]},f(u,function(s,a){var n=a.props,r=a.cache,h=a.to,u=a.from;l.fn[s]=function(s){if(h&&!this[r]&&(this[r]=h(this._rgba)),s===t)return this[r].slice();var a,o=e.type(s),c="array"===o||"object"===o?s:arguments,d=this[r].slice();return f(n,function(e,t){var s=c["object"===o?e:t.idx];null==s&&(s=d[t.idx]),d[t.idx]=i(s,t)}),u?(a=l(u(d)),a[r]=d,a):l(d)},f(n,function(t,i){l.fn[t]||(l.fn[t]=function(a){var n,r=e.type(a),h="alpha"===t?this._hsla?"hsla":"rgba":s,l=this[h](),u=l[i.idx];return"undefined"===r?u:("function"===r&&(a=a.call(this,u),r=e.type(a)),null==a&&i.empty?this:("string"===r&&(n=o.exec(a),n&&(a=u+parseFloat(n[2])*("+"===n[1]?1:-1))),l[i.idx]=a,this[h](l)))})})}),l.hook=function(t){var i=t.split(" ");f(i,function(t,i){e.cssHooks[i]={set:function(t,a){var n,r,o="";if("transparent"!==a&&("string"!==e.type(a)||(n=s(a)))){if(a=l(n||a),!d.rgba&&1!==a._rgba[3]){for(r="backgroundColor"===i?t.parentNode:t;(""===o||"transparent"===o)&&r&&r.style;)try{o=e.css(r,"backgroundColor"),r=r.parentNode}catch(h){}a=a.blend(o&&"transparent"!==o?o:"_default")}a=a.toRgbaString()}try{t.style[i]=a}catch(h){}}},e.fx.step[i]=function(t){t.colorInit||(t.start=l(t.elem,i),t.end=l(t.end),t.colorInit=!0),e.cssHooks[i].set(t.elem,t.start.transition(t.end,t.pos))}})},l.hook(r),e.cssHooks.borderColor={expand:function(e){var t={};return f(["Top","Right","Bottom","Left"],function(i,s){t["border"+s+"Color"]=e}),t}},n=e.Color.names={aqua:"#00ffff",black:"#000000",blue:"#0000ff",fuchsia:"#ff00ff",gray:"#808080",green:"#008000",lime:"#00ff00",maroon:"#800000",navy:"#000080",olive:"#808000",purple:"#800080",red:"#ff0000",silver:"#c0c0c0",teal:"#008080",white:"#ffffff",yellow:"#ffff00",transparent:[null,null,null,0],_default:"#ffffff"}}(jQuery),function(){function i(t){var i,s,a=t.ownerDocument.defaultView?t.ownerDocument.defaultView.getComputedStyle(t,null):t.currentStyle,n={};if(a&&a.length&&a[0]&&a[a[0]])for(s=a.length;s--;)i=a[s],"string"==typeof a[i]&&(n[e.camelCase(i)]=a[i]);else for(i in a)"string"==typeof a[i]&&(n[i]=a[i]);return n}function s(t,i){var s,a,r={};for(s in i)a=i[s],t[s]!==a&&(n[s]||(e.fx.step[s]||!isNaN(parseFloat(a)))&&(r[s]=a));return r}var a=["add","remove","toggle"],n={border:1,borderBottom:1,borderColor:1,borderLeft:1,borderRight:1,borderTop:1,borderWidth:1,margin:1,padding:1};e.each(["borderLeftStyle","borderRightStyle","borderBottomStyle","borderTopStyle"],function(t,i){e.fx.step[i]=function(e){("none"!==e.end&&!e.setAttr||1===e.pos&&!e.setAttr)&&(jQuery.style(e.elem,i,e.end),e.setAttr=!0)}}),e.fn.addBack||(e.fn.addBack=function(e){return this.add(null==e?this.prevObject:this.prevObject.filter(e))}),e.effects.animateClass=function(t,n,r,o){var h=e.speed(n,r,o);return this.queue(function(){var n,r=e(this),o=r.attr("class")||"",l=h.children?r.find("*").addBack():r;l=l.map(function(){var t=e(this);return{el:t,start:i(this)}}),n=function(){e.each(a,function(e,i){t[i]&&r[i+"Class"](t[i])})},n(),l=l.map(function(){return this.end=i(this.el[0]),this.diff=s(this.start,this.end),this}),r.attr("class",o),l=l.map(function(){var t=this,i=e.Deferred(),s=e.extend({},h,{queue:!1,complete:function(){i.resolve(t)}});return this.el.animate(this.diff,s),i.promise()}),e.when.apply(e,l.get()).done(function(){n(),e.each(arguments,function(){var t=this.el;e.each(this.diff,function(e){t.css(e,"")})}),h.complete.call(r[0])})})},e.fn.extend({addClass:function(t){return function(i,s,a,n){return s?e.effects.animateClass.call(this,{add:i},s,a,n):t.apply(this,arguments)}}(e.fn.addClass),removeClass:function(t){return function(i,s,a,n){return arguments.length>1?e.effects.animateClass.call(this,{remove:i},s,a,n):t.apply(this,arguments)}}(e.fn.removeClass),toggleClass:function(i){return function(s,a,n,r,o){return"boolean"==typeof a||a===t?n?e.effects.animateClass.call(this,a?{add:s}:{remove:s},n,r,o):i.apply(this,arguments):e.effects.animateClass.call(this,{toggle:s},a,n,r)}}(e.fn.toggleClass),switchClass:function(t,i,s,a,n){return e.effects.animateClass.call(this,{add:i,remove:t},s,a,n)}})}(),function(){function s(t,i,s,a){return e.isPlainObject(t)&&(i=t,t=t.effect),t={effect:t},null==i&&(i={}),e.isFunction(i)&&(a=i,s=null,i={}),("number"==typeof i||e.fx.speeds[i])&&(a=s,s=i,i={}),e.isFunction(s)&&(a=s,s=null),i&&e.extend(t,i),s=s||i.duration,t.duration=e.fx.off?0:"number"==typeof s?s:s in e.fx.speeds?e.fx.speeds[s]:e.fx.speeds._default,t.complete=a||i.complete,t}function a(t){return!t||"number"==typeof t||e.fx.speeds[t]?!0:"string"!=typeof t||e.effects.effect[t]?e.isFunction(t)?!0:"object"!=typeof t||t.effect?!1:!0:!0}e.extend(e.effects,{version:"1.10.3",save:function(e,t){for(var s=0;t.length>s;s++)null!==t[s]&&e.data(i+t[s],e[0].style[t[s]])},restore:function(e,s){var a,n;for(n=0;s.length>n;n++)null!==s[n]&&(a=e.data(i+s[n]),a===t&&(a=""),e.css(s[n],a))},setMode:function(e,t){return"toggle"===t&&(t=e.is(":hidden")?"show":"hide"),t},getBaseline:function(e,t){var i,s;switch(e[0]){case"top":i=0;break;case"middle":i=.5;break;case"bottom":i=1;break;default:i=e[0]/t.height}switch(e[1]){case"left":s=0;break;case"center":s=.5;break;case"right":s=1;break;default:s=e[1]/t.width}return{x:s,y:i}},createWrapper:function(t){if(t.parent().is(".ui-effects-wrapper"))return t.parent();var i={width:t.outerWidth(!0),height:t.outerHeight(!0),"float":t.css("float")},s=e("<div></div>").addClass("ui-effects-wrapper").css({fontSize:"100%",background:"transparent",border:"none",margin:0,padding:0}),a={width:t.width(),height:t.height()},n=document.activeElement;try{n.id}catch(r){n=document.body}return t.wrap(s),(t[0]===n||e.contains(t[0],n))&&e(n).focus(),s=t.parent(),"static"===t.css("position")?(s.css({position:"relative"}),t.css({position:"relative"})):(e.extend(i,{position:t.css("position"),zIndex:t.css("z-index")}),e.each(["top","left","bottom","right"],function(e,s){i[s]=t.css(s),isNaN(parseInt(i[s],10))&&(i[s]="auto")}),t.css({position:"relative",top:0,left:0,right:"auto",bottom:"auto"})),t.css(a),s.css(i).show()},removeWrapper:function(t){var i=document.activeElement;return t.parent().is(".ui-effects-wrapper")&&(t.parent().replaceWith(t),(t[0]===i||e.contains(t[0],i))&&e(i).focus()),t},setTransition:function(t,i,s,a){return a=a||{},e.each(i,function(e,i){var n=t.cssUnit(i);n[0]>0&&(a[i]=n[0]*s+n[1])}),a}}),e.fn.extend({effect:function(){function t(t){function s(){e.isFunction(n)&&n.call(a[0]),e.isFunction(t)&&t()}var a=e(this),n=i.complete,o=i.mode;(a.is(":hidden")?"hide"===o:"show"===o)?(a[o](),s()):r.call(a[0],i,s)}var i=s.apply(this,arguments),a=i.mode,n=i.queue,r=e.effects.effect[i.effect];return e.fx.off||!r?a?this[a](i.duration,i.complete):this.each(function(){i.complete&&i.complete.call(this)}):n===!1?this.each(t):this.queue(n||"fx",t)},show:function(e){return function(t){if(a(t))return e.apply(this,arguments);var i=s.apply(this,arguments);return i.mode="show",this.effect.call(this,i)}}(e.fn.show),hide:function(e){return function(t){if(a(t))return e.apply(this,arguments);var i=s.apply(this,arguments);return i.mode="hide",this.effect.call(this,i)}}(e.fn.hide),toggle:function(e){return function(t){if(a(t)||"boolean"==typeof t)return e.apply(this,arguments);var i=s.apply(this,arguments);return i.mode="toggle",this.effect.call(this,i)}}(e.fn.toggle),cssUnit:function(t){var i=this.css(t),s=[];return e.each(["em","px","%","pt"],function(e,t){i.indexOf(t)>0&&(s=[parseFloat(i),t])}),s}})}(),function(){var t={};e.each(["Quad","Cubic","Quart","Quint","Expo"],function(e,i){t[i]=function(t){return Math.pow(t,e+2)}}),e.extend(t,{Sine:function(e){return 1-Math.cos(e*Math.PI/2)},Circ:function(e){return 1-Math.sqrt(1-e*e)},Elastic:function(e){return 0===e||1===e?e:-Math.pow(2,8*(e-1))*Math.sin((80*(e-1)-7.5)*Math.PI/15)},Back:function(e){return e*e*(3*e-2)},Bounce:function(e){for(var t,i=4;((t=Math.pow(2,--i))-1)/11>e;);return 1/Math.pow(4,3-i)-7.5625*Math.pow((3*t-2)/22-e,2)}}),e.each(t,function(t,i){e.easing["easeIn"+t]=i,e.easing["easeOut"+t]=function(e){return 1-i(1-e)},e.easing["easeInOut"+t]=function(e){return.5>e?i(2*e)/2:1-i(-2*e+2)/2}})}()})(jQuery);(function(e){e.effects.effect.pulsate=function(t,i){var s,a=e(this),n=e.effects.setMode(a,t.mode||"show"),r="show"===n,o="hide"===n,l=r||"hide"===n,h=2*(t.times||5)+(l?1:0),u=t.duration/h,d=0,c=a.queue(),p=c.length;for((r||!a.is(":visible"))&&(a.css("opacity",0).show(),d=1),s=1;h>s;s++)a.animate({opacity:d},u,t.easing),d=1-d;a.animate({opacity:d},u,t.easing),a.queue(function(){o&&a.hide(),i()}),p>1&&c.splice.apply(c,[1,0].concat(c.splice(p,h+1))),a.dequeue()}})(jQuery); \ No newline at end of file
diff --git a/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/web/client/resources/js/lib/jquery.pretty-text-diff.min.js b/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/web/client/resources/js/lib/jquery.pretty-text-diff.min.js
new file mode 100644
index 00000000000..d2c44be1fc8
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/web/client/resources/js/lib/jquery.pretty-text-diff.min.js
@@ -0,0 +1,5 @@
+/*
+@preserve jQuery.PrettyTextDiff 1.0.2
+See https://github.com/arnab/jQuery.PrettyTextDiff/
+*/
+(function(){var $;$=jQuery;$.fn.extend({prettyTextDiff:function(options){var dmp,settings;settings={originalContainer:".original",changedContainer:".changed",diffContainer:".diff",cleanup:true,debug:false};settings=$.extend(settings,options);$.fn.prettyTextDiff.debug("Options: ",settings,settings);dmp=new diff_match_patch;return this.each(function(){var changed,diff_as_html,diffs,original;original=$(settings.originalContainer,this).text();$.fn.prettyTextDiff.debug("Original text found: ",original,settings);changed=$(settings.changedContainer,this).text();$.fn.prettyTextDiff.debug("Changed text found: ",changed,settings);diffs=dmp.diff_main(original,changed);if(settings.cleanup){dmp.diff_cleanupSemantic(diffs)}$.fn.prettyTextDiff.debug("Diffs: ",diffs,settings);diff_as_html=diffs.map(function(diff){return $.fn.prettyTextDiff.createHTML(diff)});$(settings.diffContainer,this).html(diff_as_html.join(""));return this})}});$.fn.prettyTextDiff.debug=function(message,object,settings){if(settings.debug){return console.log(message,object)}};$.fn.prettyTextDiff.createHTML=function(diff){var data,html,operation,pattern_amp,pattern_gt,pattern_lt,pattern_para,text;html=[];pattern_amp=/&/g;pattern_lt=/</g;pattern_gt=/>/g;pattern_para=/\n/g;operation=diff[0],data=diff[1];text=data.replace(pattern_amp,"&amp;").replace(pattern_lt,"&lt;").replace(pattern_gt,"&gt;").replace(pattern_para,"<br>");switch(operation){case DIFF_INSERT:return"<ins>"+text+"</ins>";case DIFF_DELETE:return"<del>"+text+"</del>";case DIFF_EQUAL:return"<span>"+text+"</span>"}}}).call(this); \ No newline at end of file
diff --git a/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/web/client/resources/js/lib/jquery.tipsy.min.js b/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/web/client/resources/js/lib/jquery.tipsy.min.js
new file mode 100644
index 00000000000..b1f97639fa2
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/web/client/resources/js/lib/jquery.tipsy.min.js
@@ -0,0 +1,5 @@
+// tipsy, facebook style tooltips for jquery
+// version 1.0.0a
+// (c) 2008-2010 jason frame [jason@onehackoranother.com]
+// released under the MIT license
+(function(e){function t(e,t){return typeof e=="function"?e.call(t):e}function n(e){while(e=e.parentNode){if(e==document)return true}return false}function r(t,n){this.$element=e(t);this.options=n;this.enabled=true;this.fixTitle()}r.prototype={show:function(){var n=this.getTitle();if(n&&this.enabled){var r=this.tip();r.find(".tipsy-inner")[this.options.html?"html":"text"](n);r[0].className="tipsy";r.remove().css({top:0,left:0,visibility:"hidden",display:"block"}).prependTo(document.body);var i=e.extend({},this.$element.offset(),{width:this.$element[0].offsetWidth,height:this.$element[0].offsetHeight + 3});var s=r[0].offsetWidth,o=r[0].offsetHeight,u=t(this.options.gravity,this.$element[0]);var a;switch(u.charAt(0)){case"n":a={top:i.top+i.height+this.options.offset,left:i.left+i.width/2-s/2};break;case"s":a={top:i.top-o-this.options.offset,left:i.left+i.width/2-s/2};break;case"e":a={top:i.top+i.height/2-o/2,left:i.left-s-this.options.offset};break;case"w":a={top:i.top+i.height/2-o/2,left:i.left+i.width+this.options.offset};break}if(u.length==2){if(u.charAt(1)=="w"){a.left=i.left+i.width/2-15}else{a.left=i.left+i.width/2-s+15}}r.css(a).addClass("tipsy-"+u);r.find(".tipsy-arrow")[0].className="tipsy-arrow tipsy-arrow-"+u.charAt(0);if(this.options.className){r.addClass(t(this.options.className,this.$element[0]))}if(this.options.fade){r.stop().css({opacity:0,display:"block",visibility:"visible"}).animate({opacity:this.options.opacity})}else{r.css({visibility:"visible",opacity:this.options.opacity})}}},hide:function(){if(this.options.fade){this.tip().stop().fadeOut(function(){e(this).remove()})}else{this.tip().remove()}},fixTitle:function(){var e=this.$element;if(e.attr("title")||typeof e.attr("original-title")!="string"){e.attr("original-title",e.attr("title")||"").removeAttr("title")}},getTitle:function(){var e,t=this.$element,n=this.options;this.fixTitle();var e,n=this.options;if(typeof n.title=="string"){e=t.attr(n.title=="title"?"original-title":n.title)}else if(typeof n.title=="function"){e=n.title.call(t[0])}e=(""+e).replace(/(^\s*|\s*$)/,"");return e||n.fallback},tip:function(){if(!this.$tip){this.$tip=e('<div class="tipsy"></div>').html('<div class="tipsy-arrow"></div><div class="tipsy-inner"></div>');this.$tip.data("tipsy-pointee",this.$element[0])}return this.$tip},validate:function(){if(!this.$element[0].parentNode){this.hide();this.$element=null;this.options=null}},enable:function(){this.enabled=true},disable:function(){this.enabled=false},toggleEnabled:function(){this.enabled=!this.enabled}};e.fn.tipsy=function(t){function i(n){var i=e.data(n,"tipsy");if(!i){i=new r(n,e.fn.tipsy.elementOptions(n,t));e.data(n,"tipsy",i)}return i}function s(){var e=i(this);e.hoverState="in";if(t.delayIn==0){e.show()}else{e.fixTitle();setTimeout(function(){if(e.hoverState=="in")e.show()},t.delayIn)}}function o(){var e=i(this);e.hoverState="out";if(t.delayOut==0){e.hide()}else{setTimeout(function(){if(e.hoverState=="out")e.hide()},t.delayOut)}}if(t===true){return this.data("tipsy")}else if(typeof t=="string"){var n=this.data("tipsy");if(n)n[t]();return this}t=e.extend({},e.fn.tipsy.defaults,t);if(!t.live)this.each(function(){i(this)});if(t.trigger!="manual"){var u=t.trigger=="hover"?"mouseenter":"focus",a=t.trigger=="hover"?"mouseleave":"blur";if(t.live){e(this.context).on(u,this.selector,s).on(a,this.selector,o)}else{this.on(u,s).on(a,o)}}return this};e.fn.tipsy.defaults={className:null,delayIn:0,delayOut:0,fade:false,fallback:"",gravity:"n",html:false,live:false,offset:0,opacity:.9,title:"title",trigger:"hover"};e.fn.tipsy.revalidate=function(){e(".tipsy").each(function(){var t=e.data(this,"tipsy-pointee");if(!t||!n(t)){e(this).remove()}})};e.fn.tipsy.elementOptions=function(t,n){return e.metadata?e.extend({},n,e(t).metadata()):n};e.fn.tipsy.autoNS=function(){return e(this).offset().top>e(document).scrollTop()+e(window).height()/2?"s":"n"};e.fn.tipsy.autoWE=function(){return e(this).offset().left>e(document).scrollLeft()+e(window).width()/2?"e":"w"};e.fn.tipsy.autoBounds=function(t,n){return function(){var r={ns:n[0],ew:n.length>1?n[1]:false},i=e(document).scrollTop()+t,s=e(document).scrollLeft()+t,o=e(this);if(o.offset().top<i)r.ns="n";if(o.offset().left<s)r.ew="w";if(e(window).width()+e(document).scrollLeft()-o.offset().left<t)r.ew="e";if(e(window).height()+e(document).scrollTop()-o.offset().top<t)r.ns="s";return r.ns+(r.ew?r.ew:"")}}})(jQuery) \ No newline at end of file
diff --git a/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/web/client/resources/js/lib/markup.min.js b/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/web/client/resources/js/lib/markup.min.js
new file mode 100644
index 00000000000..204f3e4971f
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/web/client/resources/js/lib/markup.min.js
@@ -0,0 +1,6 @@
+/*
+ Markup.js v1.5.17: http://github.com/adammark/Markup.js
+ MIT License
+ (c) 2011 - 2013 Adam Mark
+*/
+var Mark={includes:{},globals:{},delimiter:">",compact:false,_copy:function(d,c){c=c||[];for(var e in d){c[e]=d[e]}return c},_size:function(b){return b instanceof Array?b.length:(b||0)},_iter:function(a,b){this.idx=a;this.size=b;this.length=b;this.sign="#";this.toString=function(){return this.idx+this.sign.length-1}},_pipe:function(h,c){var g,f,b,a;if((g=c.shift())){f=g.split(this.delimiter);b=f.shift().trim();try{a=Mark.pipes[b].apply(null,[h].concat(f));h=this._pipe(a,c)}catch(d){}}return h},_eval:function(e,g,h){var a=this._pipe(e,g),b=a,d=-1,c,f;if(a instanceof Array){a="";c=b.length;while(++d<c){f={iter:new this._iter(d,c)};a+=h?Mark.up(h,b[d],f):b[d]}}else{if(a instanceof Object){a=Mark.up(h,b)}}return a},_test:function(a,e,c,b){var d=Mark.up(e,c,b).split(/\{\{\s*else\s*\}\}/);return(a===false?d[1]:d[0])||""},_bridge:function(h,e){var f="{{\\s*"+e+"([^/}]+\\w*)?}}|{{/"+e+"\\s*}}",n=new RegExp(f,"g"),p=h.match(n)||[],o,g,m=0,l=0,k=-1,j=0;for(g=0;g<p.length;g++){o=g;k=h.indexOf(p[o],k+1);if(p[o].indexOf("{{/")>-1){l++}else{m++}if(m===l){break}}m=h.indexOf(p[0]);l=m+p[0].length;j=k+p[o].length;return[h.substring(m,j),h.substring(l,k)]}};Mark.up=function(s,b,e){b=b||{};e=e||{};var m=/\{\{(.+?)\}\}/g,l=s.match(m)||[],t,d,g,h=[],r,c,f,k,o,a,n,q=0,p=0;if(e.pipes){this._copy(e.pipes,this.pipes)}if(e.includes){this._copy(e.includes,this.includes)}if(e.globals){this._copy(e.globals,this.globals)}if(e.delimiter){this.delimiter=e.delimiter}if(e.compact!==undefined){this.compact=e.compact}while((t=l[q++])){k=undefined;f="";r=t.indexOf("/}}")>-1;d=t.substr(2,t.length-(r?5:4));d=d.replace(/`(.+?)`/g,function(i,j){return Mark.up("{{"+j+"}}",b)});c=d.trim().indexOf("if ")===0;h=d.split("|");h.shift();d=d.replace(/^\s*if/,"").split("|").shift().trim();g=c?"if":d.split("|")[0];n=b[d];if(c&&!h.length){h=["notempty"]}if(!r&&s.indexOf("{{/"+g)>-1){k=this._bridge(s,g);t=k[0];f=k[1];q+=t.match(m).length-1}if(/^\{\{\s*else\s*\}\}$/.test(t)){continue}else{if((o=this.globals[d])!==undefined){k=this._eval(o,h,f)}else{if((a=this.includes[d])){if(a instanceof Function){a=a()}k=this._pipe(Mark.up(a,b),h)}else{if(d.indexOf("#")>-1){e.iter.sign=d;k=this._pipe(e.iter,h)}else{if(d==="."){k=this._pipe(b,h)}else{if(d.indexOf(".")>-1){d=d.split(".");n=Mark.globals[d[0]];if(n){p=1}else{p=0;n=b}while(n&&p<d.length){n=n[d[p++]]}k=this._eval(n,h,f)}else{if(c){k=this._pipe(n,h)}else{if(n instanceof Array){k=this._eval(n,h,f)}else{if(f){k=n?Mark.up(f,n):undefined}else{if(b.hasOwnProperty(d)){k=this._pipe(n,h)}}}}}}}}}}if(k instanceof Array){k=this._eval(k,h,f)}if(c){k=this._test(k,f,b,e)}s=s.replace(t,k===undefined?"???":k)}return this.compact?s.replace(/>\s+</g,"><"):s};Mark.pipes={empty:function(a){return !a||(a+"").trim().length===0?a:false},notempty:function(a){return a&&(a+"").trim().length?a:false},blank:function(b,a){return !!b||b===0?b:a},more:function(d,c){return Mark._size(d)>c?d:false},less:function(d,c){return Mark._size(d)<c?d:false},ormore:function(d,c){return Mark._size(d)>=c?d:false},orless:function(d,c){return Mark._size(d)<=c?d:false},between:function(e,d,f){e=Mark._size(e);return e>=d&&e<=f?e:false},equals:function(d,c){return d==c?d:false},notequals:function(d,c){return d!=c?d:false},like:function(b,a){return new RegExp(a,"i").test(b)?b:false},notlike:function(b,a){return !Mark.pipes.like(b,a)?b:false},upcase:function(a){return String(a).toUpperCase()},downcase:function(a){return String(a).toLowerCase()},capcase:function(a){return a.replace(/\b\w/g,function(b){return b.toUpperCase()})},chop:function(a,b){return a.length>b?a.substr(0,b)+"...":a},tease:function(c,d){var b=c.split(/\s+/);return b.slice(0,d).join(" ")+(b.length>d?"...":"")},trim:function(a){return a.trim()},pack:function(a){return a.trim().replace(/\s{2,}/g," ")},round:function(a){return Math.round(+a)},clean:function(a){return String(a).replace(/<\/?[^>]+>/gi,"")},size:function(a){return a.length},length:function(a){return a.length},reverse:function(a){return[].concat(a).reverse()},join:function(a,b){return a.join(b)},limit:function(b,c,a){return b.slice(+a||0,+c+(+a||0))},split:function(b,a){return b.split(a||",")},choose:function(b,c,a){return !!b?c:(a||"")},toggle:function(c,b,a,d){return a.split(",")[b.match(/\w+/g).indexOf(c+"")]||d},sort:function(a,c){var b=function(e,d){return e[c]>d[c]?1:-1};return[].concat(a).sort(c?b:undefined)},fix:function(a,b){return(+a).toFixed(b)},mod:function(a,b){return(+a)%(+b)},divisible:function(a,b){return a&&(+a%b)===0?a:false},even:function(a){return a&&(+a&1)===0?a:false},odd:function(a){return a&&(+a&1)===1?a:false},number:function(a){return parseFloat(a.replace(/[^\-\d\.]/g,""))},url:function(a){return encodeURI(a)},bool:function(a){return !!a},falsy:function(a){return !a},first:function(a){return a.idx===0},last:function(a){return a.idx===a.size-1},call:function(b,a){return b[a].apply(b,[].slice.call(arguments,2))},set:function(b,a){Mark.globals[a]=b;return""},log:function(a){console.log(a);return a}};if(typeof String.prototype.trim!=="function"){String.prototype.trim=function(){return this.replace(/^\s+|\s+$/g,"")}}if(typeof module!=="undefined"&&module.exports){module.exports=Mark}else{if(typeof define==="function"&&define.amd){define(function(){return Mark})}}; \ No newline at end of file
diff --git a/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/web/client/resources/js/lib/moment.min.js b/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/web/client/resources/js/lib/moment.min.js
new file mode 100644
index 00000000000..d3425e9a1ee
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/web/client/resources/js/lib/moment.min.js
@@ -0,0 +1,6 @@
+//! moment.js
+//! version : 2.5.1
+//! authors : Tim Wood, Iskren Chernev, Moment.js contributors
+//! license : MIT
+//! momentjs.com
+;(function(a){function b(){return{empty:!1,unusedTokens:[],unusedInput:[],overflow:-2,charsLeftOver:0,nullInput:!1,invalidMonth:null,invalidFormat:!1,userInvalidated:!1,iso:!1}}function c(a,b){return function(c){return k(a.call(this,c),b)}}function d(a,b){return function(c){return this.lang().ordinal(a.call(this,c),b)}}function e(){}function f(a){w(a),h(this,a)}function g(a){var b=q(a),c=b.year||0,d=b.month||0,e=b.week||0,f=b.day||0,g=b.hour||0,h=b.minute||0,i=b.second||0,j=b.millisecond||0;this._milliseconds=+j+1e3*i+6e4*h+36e5*g,this._days=+f+7*e,this._months=+d+12*c,this._data={},this._bubble()}function h(a,b){for(var c in b)b.hasOwnProperty(c)&&(a[c]=b[c]);return b.hasOwnProperty("toString")&&(a.toString=b.toString),b.hasOwnProperty("valueOf")&&(a.valueOf=b.valueOf),a}function i(a){var b,c={};for(b in a)a.hasOwnProperty(b)&&qb.hasOwnProperty(b)&&(c[b]=a[b]);return c}function j(a){return 0>a?Math.ceil(a):Math.floor(a)}function k(a,b,c){for(var d=""+Math.abs(a),e=a>=0;d.length<b;)d="0"+d;return(e?c?"+":"":"-")+d}function l(a,b,c,d){var e,f,g=b._milliseconds,h=b._days,i=b._months;g&&a._d.setTime(+a._d+g*c),(h||i)&&(e=a.minute(),f=a.hour()),h&&a.date(a.date()+h*c),i&&a.month(a.month()+i*c),g&&!d&&db.updateOffset(a),(h||i)&&(a.minute(e),a.hour(f))}function m(a){return"[object Array]"===Object.prototype.toString.call(a)}function n(a){return"[object Date]"===Object.prototype.toString.call(a)||a instanceof Date}function o(a,b,c){var d,e=Math.min(a.length,b.length),f=Math.abs(a.length-b.length),g=0;for(d=0;e>d;d++)(c&&a[d]!==b[d]||!c&&s(a[d])!==s(b[d]))&&g++;return g+f}function p(a){if(a){var b=a.toLowerCase().replace(/(.)s$/,"$1");a=Tb[a]||Ub[b]||b}return a}function q(a){var b,c,d={};for(c in a)a.hasOwnProperty(c)&&(b=p(c),b&&(d[b]=a[c]));return d}function r(b){var c,d;if(0===b.indexOf("week"))c=7,d="day";else{if(0!==b.indexOf("month"))return;c=12,d="month"}db[b]=function(e,f){var g,h,i=db.fn._lang[b],j=[];if("number"==typeof e&&(f=e,e=a),h=function(a){var b=db().utc().set(d,a);return i.call(db.fn._lang,b,e||"")},null!=f)return h(f);for(g=0;c>g;g++)j.push(h(g));return j}}function s(a){var b=+a,c=0;return 0!==b&&isFinite(b)&&(c=b>=0?Math.floor(b):Math.ceil(b)),c}function t(a,b){return new Date(Date.UTC(a,b+1,0)).getUTCDate()}function u(a){return v(a)?366:365}function v(a){return a%4===0&&a%100!==0||a%400===0}function w(a){var b;a._a&&-2===a._pf.overflow&&(b=a._a[jb]<0||a._a[jb]>11?jb:a._a[kb]<1||a._a[kb]>t(a._a[ib],a._a[jb])?kb:a._a[lb]<0||a._a[lb]>23?lb:a._a[mb]<0||a._a[mb]>59?mb:a._a[nb]<0||a._a[nb]>59?nb:a._a[ob]<0||a._a[ob]>999?ob:-1,a._pf._overflowDayOfYear&&(ib>b||b>kb)&&(b=kb),a._pf.overflow=b)}function x(a){return null==a._isValid&&(a._isValid=!isNaN(a._d.getTime())&&a._pf.overflow<0&&!a._pf.empty&&!a._pf.invalidMonth&&!a._pf.nullInput&&!a._pf.invalidFormat&&!a._pf.userInvalidated,a._strict&&(a._isValid=a._isValid&&0===a._pf.charsLeftOver&&0===a._pf.unusedTokens.length)),a._isValid}function y(a){return a?a.toLowerCase().replace("_","-"):a}function z(a,b){return b._isUTC?db(a).zone(b._offset||0):db(a).local()}function A(a,b){return b.abbr=a,pb[a]||(pb[a]=new e),pb[a].set(b),pb[a]}function B(a){delete pb[a]}function C(a){var b,c,d,e,f=0,g=function(a){if(!pb[a]&&rb)try{require("./lang/"+a)}catch(b){}return pb[a]};if(!a)return db.fn._lang;if(!m(a)){if(c=g(a))return c;a=[a]}for(;f<a.length;){for(e=y(a[f]).split("-"),b=e.length,d=y(a[f+1]),d=d?d.split("-"):null;b>0;){if(c=g(e.slice(0,b).join("-")))return c;if(d&&d.length>=b&&o(e,d,!0)>=b-1)break;b--}f++}return db.fn._lang}function D(a){return a.match(/\[[\s\S]/)?a.replace(/^\[|\]$/g,""):a.replace(/\\/g,"")}function E(a){var b,c,d=a.match(vb);for(b=0,c=d.length;c>b;b++)d[b]=Yb[d[b]]?Yb[d[b]]:D(d[b]);return function(e){var f="";for(b=0;c>b;b++)f+=d[b]instanceof Function?d[b].call(e,a):d[b];return f}}function F(a,b){return a.isValid()?(b=G(b,a.lang()),Vb[b]||(Vb[b]=E(b)),Vb[b](a)):a.lang().invalidDate()}function G(a,b){function c(a){return b.longDateFormat(a)||a}var d=5;for(wb.lastIndex=0;d>=0&&wb.test(a);)a=a.replace(wb,c),wb.lastIndex=0,d-=1;return a}function H(a,b){var c,d=b._strict;switch(a){case"DDDD":return Ib;case"YYYY":case"GGGG":case"gggg":return d?Jb:zb;case"Y":case"G":case"g":return Lb;case"YYYYYY":case"YYYYY":case"GGGGG":case"ggggg":return d?Kb:Ab;case"S":if(d)return Gb;case"SS":if(d)return Hb;case"SSS":if(d)return Ib;case"DDD":return yb;case"MMM":case"MMMM":case"dd":case"ddd":case"dddd":return Cb;case"a":case"A":return C(b._l)._meridiemParse;case"X":return Fb;case"Z":case"ZZ":return Db;case"T":return Eb;case"SSSS":return Bb;case"MM":case"DD":case"YY":case"GG":case"gg":case"HH":case"hh":case"mm":case"ss":case"ww":case"WW":return d?Hb:xb;case"M":case"D":case"d":case"H":case"h":case"m":case"s":case"w":case"W":case"e":case"E":return xb;default:return c=new RegExp(P(O(a.replace("\\","")),"i"))}}function I(a){a=a||"";var b=a.match(Db)||[],c=b[b.length-1]||[],d=(c+"").match(Qb)||["-",0,0],e=+(60*d[1])+s(d[2]);return"+"===d[0]?-e:e}function J(a,b,c){var d,e=c._a;switch(a){case"M":case"MM":null!=b&&(e[jb]=s(b)-1);break;case"MMM":case"MMMM":d=C(c._l).monthsParse(b),null!=d?e[jb]=d:c._pf.invalidMonth=b;break;case"D":case"DD":null!=b&&(e[kb]=s(b));break;case"DDD":case"DDDD":null!=b&&(c._dayOfYear=s(b));break;case"YY":e[ib]=s(b)+(s(b)>68?1900:2e3);break;case"YYYY":case"YYYYY":case"YYYYYY":e[ib]=s(b);break;case"a":case"A":c._isPm=C(c._l).isPM(b);break;case"H":case"HH":case"h":case"hh":e[lb]=s(b);break;case"m":case"mm":e[mb]=s(b);break;case"s":case"ss":e[nb]=s(b);break;case"S":case"SS":case"SSS":case"SSSS":e[ob]=s(1e3*("0."+b));break;case"X":c._d=new Date(1e3*parseFloat(b));break;case"Z":case"ZZ":c._useUTC=!0,c._tzm=I(b);break;case"w":case"ww":case"W":case"WW":case"d":case"dd":case"ddd":case"dddd":case"e":case"E":a=a.substr(0,1);case"gg":case"gggg":case"GG":case"GGGG":case"GGGGG":a=a.substr(0,2),b&&(c._w=c._w||{},c._w[a]=b)}}function K(a){var b,c,d,e,f,g,h,i,j,k,l=[];if(!a._d){for(d=M(a),a._w&&null==a._a[kb]&&null==a._a[jb]&&(f=function(b){var c=parseInt(b,10);return b?b.length<3?c>68?1900+c:2e3+c:c:null==a._a[ib]?db().weekYear():a._a[ib]},g=a._w,null!=g.GG||null!=g.W||null!=g.E?h=Z(f(g.GG),g.W||1,g.E,4,1):(i=C(a._l),j=null!=g.d?V(g.d,i):null!=g.e?parseInt(g.e,10)+i._week.dow:0,k=parseInt(g.w,10)||1,null!=g.d&&j<i._week.dow&&k++,h=Z(f(g.gg),k,j,i._week.doy,i._week.dow)),a._a[ib]=h.year,a._dayOfYear=h.dayOfYear),a._dayOfYear&&(e=null==a._a[ib]?d[ib]:a._a[ib],a._dayOfYear>u(e)&&(a._pf._overflowDayOfYear=!0),c=U(e,0,a._dayOfYear),a._a[jb]=c.getUTCMonth(),a._a[kb]=c.getUTCDate()),b=0;3>b&&null==a._a[b];++b)a._a[b]=l[b]=d[b];for(;7>b;b++)a._a[b]=l[b]=null==a._a[b]?2===b?1:0:a._a[b];l[lb]+=s((a._tzm||0)/60),l[mb]+=s((a._tzm||0)%60),a._d=(a._useUTC?U:T).apply(null,l)}}function L(a){var b;a._d||(b=q(a._i),a._a=[b.year,b.month,b.day,b.hour,b.minute,b.second,b.millisecond],K(a))}function M(a){var b=new Date;return a._useUTC?[b.getUTCFullYear(),b.getUTCMonth(),b.getUTCDate()]:[b.getFullYear(),b.getMonth(),b.getDate()]}function N(a){a._a=[],a._pf.empty=!0;var b,c,d,e,f,g=C(a._l),h=""+a._i,i=h.length,j=0;for(d=G(a._f,g).match(vb)||[],b=0;b<d.length;b++)e=d[b],c=(h.match(H(e,a))||[])[0],c&&(f=h.substr(0,h.indexOf(c)),f.length>0&&a._pf.unusedInput.push(f),h=h.slice(h.indexOf(c)+c.length),j+=c.length),Yb[e]?(c?a._pf.empty=!1:a._pf.unusedTokens.push(e),J(e,c,a)):a._strict&&!c&&a._pf.unusedTokens.push(e);a._pf.charsLeftOver=i-j,h.length>0&&a._pf.unusedInput.push(h),a._isPm&&a._a[lb]<12&&(a._a[lb]+=12),a._isPm===!1&&12===a._a[lb]&&(a._a[lb]=0),K(a),w(a)}function O(a){return a.replace(/\\(\[)|\\(\])|\[([^\]\[]*)\]|\\(.)/g,function(a,b,c,d,e){return b||c||d||e})}function P(a){return a.replace(/[-\/\\^$*+?.()|[\]{}]/g,"\\$&")}function Q(a){var c,d,e,f,g;if(0===a._f.length)return a._pf.invalidFormat=!0,a._d=new Date(0/0),void 0;for(f=0;f<a._f.length;f++)g=0,c=h({},a),c._pf=b(),c._f=a._f[f],N(c),x(c)&&(g+=c._pf.charsLeftOver,g+=10*c._pf.unusedTokens.length,c._pf.score=g,(null==e||e>g)&&(e=g,d=c));h(a,d||c)}function R(a){var b,c,d=a._i,e=Mb.exec(d);if(e){for(a._pf.iso=!0,b=0,c=Ob.length;c>b;b++)if(Ob[b][1].exec(d)){a._f=Ob[b][0]+(e[6]||" ");break}for(b=0,c=Pb.length;c>b;b++)if(Pb[b][1].exec(d)){a._f+=Pb[b][0];break}d.match(Db)&&(a._f+="Z"),N(a)}else a._d=new Date(d)}function S(b){var c=b._i,d=sb.exec(c);c===a?b._d=new Date:d?b._d=new Date(+d[1]):"string"==typeof c?R(b):m(c)?(b._a=c.slice(0),K(b)):n(c)?b._d=new Date(+c):"object"==typeof c?L(b):b._d=new Date(c)}function T(a,b,c,d,e,f,g){var h=new Date(a,b,c,d,e,f,g);return 1970>a&&h.setFullYear(a),h}function U(a){var b=new Date(Date.UTC.apply(null,arguments));return 1970>a&&b.setUTCFullYear(a),b}function V(a,b){if("string"==typeof a)if(isNaN(a)){if(a=b.weekdaysParse(a),"number"!=typeof a)return null}else a=parseInt(a,10);return a}function W(a,b,c,d,e){return e.relativeTime(b||1,!!c,a,d)}function X(a,b,c){var d=hb(Math.abs(a)/1e3),e=hb(d/60),f=hb(e/60),g=hb(f/24),h=hb(g/365),i=45>d&&["s",d]||1===e&&["m"]||45>e&&["mm",e]||1===f&&["h"]||22>f&&["hh",f]||1===g&&["d"]||25>=g&&["dd",g]||45>=g&&["M"]||345>g&&["MM",hb(g/30)]||1===h&&["y"]||["yy",h];return i[2]=b,i[3]=a>0,i[4]=c,W.apply({},i)}function Y(a,b,c){var d,e=c-b,f=c-a.day();return f>e&&(f-=7),e-7>f&&(f+=7),d=db(a).add("d",f),{week:Math.ceil(d.dayOfYear()/7),year:d.year()}}function Z(a,b,c,d,e){var f,g,h=U(a,0,1).getUTCDay();return c=null!=c?c:e,f=e-h+(h>d?7:0)-(e>h?7:0),g=7*(b-1)+(c-e)+f+1,{year:g>0?a:a-1,dayOfYear:g>0?g:u(a-1)+g}}function $(a){var b=a._i,c=a._f;return null===b?db.invalid({nullInput:!0}):("string"==typeof b&&(a._i=b=C().preparse(b)),db.isMoment(b)?(a=i(b),a._d=new Date(+b._d)):c?m(c)?Q(a):N(a):S(a),new f(a))}function _(a,b){db.fn[a]=db.fn[a+"s"]=function(a){var c=this._isUTC?"UTC":"";return null!=a?(this._d["set"+c+b](a),db.updateOffset(this),this):this._d["get"+c+b]()}}function ab(a){db.duration.fn[a]=function(){return this._data[a]}}function bb(a,b){db.duration.fn["as"+a]=function(){return+this/b}}function cb(a){var b=!1,c=db;"undefined"==typeof ender&&(a?(gb.moment=function(){return!b&&console&&console.warn&&(b=!0,console.warn("Accessing Moment through the global scope is deprecated, and will be removed in an upcoming release.")),c.apply(null,arguments)},h(gb.moment,c)):gb.moment=db)}for(var db,eb,fb="2.5.1",gb=this,hb=Math.round,ib=0,jb=1,kb=2,lb=3,mb=4,nb=5,ob=6,pb={},qb={_isAMomentObject:null,_i:null,_f:null,_l:null,_strict:null,_isUTC:null,_offset:null,_pf:null,_lang:null},rb="undefined"!=typeof module&&module.exports&&"undefined"!=typeof require,sb=/^\/?Date\((\-?\d+)/i,tb=/(\-)?(?:(\d*)\.)?(\d+)\:(\d+)(?:\:(\d+)\.?(\d{3})?)?/,ub=/^(-)?P(?:(?:([0-9,.]*)Y)?(?:([0-9,.]*)M)?(?:([0-9,.]*)D)?(?:T(?:([0-9,.]*)H)?(?:([0-9,.]*)M)?(?:([0-9,.]*)S)?)?|([0-9,.]*)W)$/,vb=/(\[[^\[]*\])|(\\)?(Mo|MM?M?M?|Do|DDDo|DD?D?D?|ddd?d?|do?|w[o|w]?|W[o|W]?|YYYYYY|YYYYY|YYYY|YY|gg(ggg?)?|GG(GGG?)?|e|E|a|A|hh?|HH?|mm?|ss?|S{1,4}|X|zz?|ZZ?|.)/g,wb=/(\[[^\[]*\])|(\\)?(LT|LL?L?L?|l{1,4})/g,xb=/\d\d?/,yb=/\d{1,3}/,zb=/\d{1,4}/,Ab=/[+\-]?\d{1,6}/,Bb=/\d+/,Cb=/[0-9]*['a-z\u00A0-\u05FF\u0700-\uD7FF\uF900-\uFDCF\uFDF0-\uFFEF]+|[\u0600-\u06FF\/]+(\s*?[\u0600-\u06FF]+){1,2}/i,Db=/Z|[\+\-]\d\d:?\d\d/gi,Eb=/T/i,Fb=/[\+\-]?\d+(\.\d{1,3})?/,Gb=/\d/,Hb=/\d\d/,Ib=/\d{3}/,Jb=/\d{4}/,Kb=/[+-]?\d{6}/,Lb=/[+-]?\d+/,Mb=/^\s*(?:[+-]\d{6}|\d{4})-(?:(\d\d-\d\d)|(W\d\d$)|(W\d\d-\d)|(\d\d\d))((T| )(\d\d(:\d\d(:\d\d(\.\d+)?)?)?)?([\+\-]\d\d(?::?\d\d)?|\s*Z)?)?$/,Nb="YYYY-MM-DDTHH:mm:ssZ",Ob=[["YYYYYY-MM-DD",/[+-]\d{6}-\d{2}-\d{2}/],["YYYY-MM-DD",/\d{4}-\d{2}-\d{2}/],["GGGG-[W]WW-E",/\d{4}-W\d{2}-\d/],["GGGG-[W]WW",/\d{4}-W\d{2}/],["YYYY-DDD",/\d{4}-\d{3}/]],Pb=[["HH:mm:ss.SSSS",/(T| )\d\d:\d\d:\d\d\.\d{1,3}/],["HH:mm:ss",/(T| )\d\d:\d\d:\d\d/],["HH:mm",/(T| )\d\d:\d\d/],["HH",/(T| )\d\d/]],Qb=/([\+\-]|\d\d)/gi,Rb="Date|Hours|Minutes|Seconds|Milliseconds".split("|"),Sb={Milliseconds:1,Seconds:1e3,Minutes:6e4,Hours:36e5,Days:864e5,Months:2592e6,Years:31536e6},Tb={ms:"millisecond",s:"second",m:"minute",h:"hour",d:"day",D:"date",w:"week",W:"isoWeek",M:"month",y:"year",DDD:"dayOfYear",e:"weekday",E:"isoWeekday",gg:"weekYear",GG:"isoWeekYear"},Ub={dayofyear:"dayOfYear",isoweekday:"isoWeekday",isoweek:"isoWeek",weekyear:"weekYear",isoweekyear:"isoWeekYear"},Vb={},Wb="DDD w W M D d".split(" "),Xb="M D H h m s w W".split(" "),Yb={M:function(){return this.month()+1},MMM:function(a){return this.lang().monthsShort(this,a)},MMMM:function(a){return this.lang().months(this,a)},D:function(){return this.date()},DDD:function(){return this.dayOfYear()},d:function(){return this.day()},dd:function(a){return this.lang().weekdaysMin(this,a)},ddd:function(a){return this.lang().weekdaysShort(this,a)},dddd:function(a){return this.lang().weekdays(this,a)},w:function(){return this.week()},W:function(){return this.isoWeek()},YY:function(){return k(this.year()%100,2)},YYYY:function(){return k(this.year(),4)},YYYYY:function(){return k(this.year(),5)},YYYYYY:function(){var a=this.year(),b=a>=0?"+":"-";return b+k(Math.abs(a),6)},gg:function(){return k(this.weekYear()%100,2)},gggg:function(){return k(this.weekYear(),4)},ggggg:function(){return k(this.weekYear(),5)},GG:function(){return k(this.isoWeekYear()%100,2)},GGGG:function(){return k(this.isoWeekYear(),4)},GGGGG:function(){return k(this.isoWeekYear(),5)},e:function(){return this.weekday()},E:function(){return this.isoWeekday()},a:function(){return this.lang().meridiem(this.hours(),this.minutes(),!0)},A:function(){return this.lang().meridiem(this.hours(),this.minutes(),!1)},H:function(){return this.hours()},h:function(){return this.hours()%12||12},m:function(){return this.minutes()},s:function(){return this.seconds()},S:function(){return s(this.milliseconds()/100)},SS:function(){return k(s(this.milliseconds()/10),2)},SSS:function(){return k(this.milliseconds(),3)},SSSS:function(){return k(this.milliseconds(),3)},Z:function(){var a=-this.zone(),b="+";return 0>a&&(a=-a,b="-"),b+k(s(a/60),2)+":"+k(s(a)%60,2)},ZZ:function(){var a=-this.zone(),b="+";return 0>a&&(a=-a,b="-"),b+k(s(a/60),2)+k(s(a)%60,2)},z:function(){return this.zoneAbbr()},zz:function(){return this.zoneName()},X:function(){return this.unix()},Q:function(){return this.quarter()}},Zb=["months","monthsShort","weekdays","weekdaysShort","weekdaysMin"];Wb.length;)eb=Wb.pop(),Yb[eb+"o"]=d(Yb[eb],eb);for(;Xb.length;)eb=Xb.pop(),Yb[eb+eb]=c(Yb[eb],2);for(Yb.DDDD=c(Yb.DDD,3),h(e.prototype,{set:function(a){var b,c;for(c in a)b=a[c],"function"==typeof b?this[c]=b:this["_"+c]=b},_months:"January_February_March_April_May_June_July_August_September_October_November_December".split("_"),months:function(a){return this._months[a.month()]},_monthsShort:"Jan_Feb_Mar_Apr_May_Jun_Jul_Aug_Sep_Oct_Nov_Dec".split("_"),monthsShort:function(a){return this._monthsShort[a.month()]},monthsParse:function(a){var b,c,d;for(this._monthsParse||(this._monthsParse=[]),b=0;12>b;b++)if(this._monthsParse[b]||(c=db.utc([2e3,b]),d="^"+this.months(c,"")+"|^"+this.monthsShort(c,""),this._monthsParse[b]=new RegExp(d.replace(".",""),"i")),this._monthsParse[b].test(a))return b},_weekdays:"Sunday_Monday_Tuesday_Wednesday_Thursday_Friday_Saturday".split("_"),weekdays:function(a){return this._weekdays[a.day()]},_weekdaysShort:"Sun_Mon_Tue_Wed_Thu_Fri_Sat".split("_"),weekdaysShort:function(a){return this._weekdaysShort[a.day()]},_weekdaysMin:"Su_Mo_Tu_We_Th_Fr_Sa".split("_"),weekdaysMin:function(a){return this._weekdaysMin[a.day()]},weekdaysParse:function(a){var b,c,d;for(this._weekdaysParse||(this._weekdaysParse=[]),b=0;7>b;b++)if(this._weekdaysParse[b]||(c=db([2e3,1]).day(b),d="^"+this.weekdays(c,"")+"|^"+this.weekdaysShort(c,"")+"|^"+this.weekdaysMin(c,""),this._weekdaysParse[b]=new RegExp(d.replace(".",""),"i")),this._weekdaysParse[b].test(a))return b},_longDateFormat:{LT:"h:mm A",L:"MM/DD/YYYY",LL:"MMMM D YYYY",LLL:"MMMM D YYYY LT",LLLL:"dddd, MMMM D YYYY LT"},longDateFormat:function(a){var b=this._longDateFormat[a];return!b&&this._longDateFormat[a.toUpperCase()]&&(b=this._longDateFormat[a.toUpperCase()].replace(/MMMM|MM|DD|dddd/g,function(a){return a.slice(1)}),this._longDateFormat[a]=b),b},isPM:function(a){return"p"===(a+"").toLowerCase().charAt(0)},_meridiemParse:/[ap]\.?m?\.?/i,meridiem:function(a,b,c){return a>11?c?"pm":"PM":c?"am":"AM"},_calendar:{sameDay:"[Today at] LT",nextDay:"[Tomorrow at] LT",nextWeek:"dddd [at] LT",lastDay:"[Yesterday at] LT",lastWeek:"[Last] dddd [at] LT",sameElse:"L"},calendar:function(a,b){var c=this._calendar[a];return"function"==typeof c?c.apply(b):c},_relativeTime:{future:"in %s",past:"%s ago",s:"a few seconds",m:"a minute",mm:"%d minutes",h:"an hour",hh:"%d hours",d:"a day",dd:"%d days",M:"a month",MM:"%d months",y:"a year",yy:"%d years"},relativeTime:function(a,b,c,d){var e=this._relativeTime[c];return"function"==typeof e?e(a,b,c,d):e.replace(/%d/i,a)},pastFuture:function(a,b){var c=this._relativeTime[a>0?"future":"past"];return"function"==typeof c?c(b):c.replace(/%s/i,b)},ordinal:function(a){return this._ordinal.replace("%d",a)},_ordinal:"%d",preparse:function(a){return a},postformat:function(a){return a},week:function(a){return Y(a,this._week.dow,this._week.doy).week},_week:{dow:0,doy:6},_invalidDate:"Invalid date",invalidDate:function(){return this._invalidDate}}),db=function(c,d,e,f){var g;return"boolean"==typeof e&&(f=e,e=a),g={},g._isAMomentObject=!0,g._i=c,g._f=d,g._l=e,g._strict=f,g._isUTC=!1,g._pf=b(),$(g)},db.utc=function(c,d,e,f){var g;return"boolean"==typeof e&&(f=e,e=a),g={},g._isAMomentObject=!0,g._useUTC=!0,g._isUTC=!0,g._l=e,g._i=c,g._f=d,g._strict=f,g._pf=b(),$(g).utc()},db.unix=function(a){return db(1e3*a)},db.duration=function(a,b){var c,d,e,f=a,h=null;return db.isDuration(a)?f={ms:a._milliseconds,d:a._days,M:a._months}:"number"==typeof a?(f={},b?f[b]=a:f.milliseconds=a):(h=tb.exec(a))?(c="-"===h[1]?-1:1,f={y:0,d:s(h[kb])*c,h:s(h[lb])*c,m:s(h[mb])*c,s:s(h[nb])*c,ms:s(h[ob])*c}):(h=ub.exec(a))&&(c="-"===h[1]?-1:1,e=function(a){var b=a&&parseFloat(a.replace(",","."));return(isNaN(b)?0:b)*c},f={y:e(h[2]),M:e(h[3]),d:e(h[4]),h:e(h[5]),m:e(h[6]),s:e(h[7]),w:e(h[8])}),d=new g(f),db.isDuration(a)&&a.hasOwnProperty("_lang")&&(d._lang=a._lang),d},db.version=fb,db.defaultFormat=Nb,db.updateOffset=function(){},db.lang=function(a,b){var c;return a?(b?A(y(a),b):null===b?(B(a),a="en"):pb[a]||C(a),c=db.duration.fn._lang=db.fn._lang=C(a),c._abbr):db.fn._lang._abbr},db.langData=function(a){return a&&a._lang&&a._lang._abbr&&(a=a._lang._abbr),C(a)},db.isMoment=function(a){return a instanceof f||null!=a&&a.hasOwnProperty("_isAMomentObject")},db.isDuration=function(a){return a instanceof g},eb=Zb.length-1;eb>=0;--eb)r(Zb[eb]);for(db.normalizeUnits=function(a){return p(a)},db.invalid=function(a){var b=db.utc(0/0);return null!=a?h(b._pf,a):b._pf.userInvalidated=!0,b},db.parseZone=function(a){return db(a).parseZone()},h(db.fn=f.prototype,{clone:function(){return db(this)},valueOf:function(){return+this._d+6e4*(this._offset||0)},unix:function(){return Math.floor(+this/1e3)},toString:function(){return this.clone().lang("en").format("ddd MMM DD YYYY HH:mm:ss [GMT]ZZ")},toDate:function(){return this._offset?new Date(+this):this._d},toISOString:function(){var a=db(this).utc();return 0<a.year()&&a.year()<=9999?F(a,"YYYY-MM-DD[T]HH:mm:ss.SSS[Z]"):F(a,"YYYYYY-MM-DD[T]HH:mm:ss.SSS[Z]")},toArray:function(){var a=this;return[a.year(),a.month(),a.date(),a.hours(),a.minutes(),a.seconds(),a.milliseconds()]},isValid:function(){return x(this)},isDSTShifted:function(){return this._a?this.isValid()&&o(this._a,(this._isUTC?db.utc(this._a):db(this._a)).toArray())>0:!1},parsingFlags:function(){return h({},this._pf)},invalidAt:function(){return this._pf.overflow},utc:function(){return this.zone(0)},local:function(){return this.zone(0),this._isUTC=!1,this},format:function(a){var b=F(this,a||db.defaultFormat);return this.lang().postformat(b)},add:function(a,b){var c;return c="string"==typeof a?db.duration(+b,a):db.duration(a,b),l(this,c,1),this},subtract:function(a,b){var c;return c="string"==typeof a?db.duration(+b,a):db.duration(a,b),l(this,c,-1),this},diff:function(a,b,c){var d,e,f=z(a,this),g=6e4*(this.zone()-f.zone());return b=p(b),"year"===b||"month"===b?(d=432e5*(this.daysInMonth()+f.daysInMonth()),e=12*(this.year()-f.year())+(this.month()-f.month()),e+=(this-db(this).startOf("month")-(f-db(f).startOf("month")))/d,e-=6e4*(this.zone()-db(this).startOf("month").zone()-(f.zone()-db(f).startOf("month").zone()))/d,"year"===b&&(e/=12)):(d=this-f,e="second"===b?d/1e3:"minute"===b?d/6e4:"hour"===b?d/36e5:"day"===b?(d-g)/864e5:"week"===b?(d-g)/6048e5:d),c?e:j(e)},from:function(a,b){return db.duration(this.diff(a)).lang(this.lang()._abbr).humanize(!b)},fromNow:function(a){return this.from(db(),a)},calendar:function(){var a=z(db(),this).startOf("day"),b=this.diff(a,"days",!0),c=-6>b?"sameElse":-1>b?"lastWeek":0>b?"lastDay":1>b?"sameDay":2>b?"nextDay":7>b?"nextWeek":"sameElse";return this.format(this.lang().calendar(c,this))},isLeapYear:function(){return v(this.year())},isDST:function(){return this.zone()<this.clone().month(0).zone()||this.zone()<this.clone().month(5).zone()},day:function(a){var b=this._isUTC?this._d.getUTCDay():this._d.getDay();return null!=a?(a=V(a,this.lang()),this.add({d:a-b})):b},month:function(a){var b,c=this._isUTC?"UTC":"";return null!=a?"string"==typeof a&&(a=this.lang().monthsParse(a),"number"!=typeof a)?this:(b=this.date(),this.date(1),this._d["set"+c+"Month"](a),this.date(Math.min(b,this.daysInMonth())),db.updateOffset(this),this):this._d["get"+c+"Month"]()},startOf:function(a){switch(a=p(a)){case"year":this.month(0);case"month":this.date(1);case"week":case"isoWeek":case"day":this.hours(0);case"hour":this.minutes(0);case"minute":this.seconds(0);case"second":this.milliseconds(0)}return"week"===a?this.weekday(0):"isoWeek"===a&&this.isoWeekday(1),this},endOf:function(a){return a=p(a),this.startOf(a).add("isoWeek"===a?"week":a,1).subtract("ms",1)},isAfter:function(a,b){return b="undefined"!=typeof b?b:"millisecond",+this.clone().startOf(b)>+db(a).startOf(b)},isBefore:function(a,b){return b="undefined"!=typeof b?b:"millisecond",+this.clone().startOf(b)<+db(a).startOf(b)},isSame:function(a,b){return b=b||"ms",+this.clone().startOf(b)===+z(a,this).startOf(b)},min:function(a){return a=db.apply(null,arguments),this>a?this:a},max:function(a){return a=db.apply(null,arguments),a>this?this:a},zone:function(a){var b=this._offset||0;return null==a?this._isUTC?b:this._d.getTimezoneOffset():("string"==typeof a&&(a=I(a)),Math.abs(a)<16&&(a=60*a),this._offset=a,this._isUTC=!0,b!==a&&l(this,db.duration(b-a,"m"),1,!0),this)},zoneAbbr:function(){return this._isUTC?"UTC":""},zoneName:function(){return this._isUTC?"Coordinated Universal Time":""},parseZone:function(){return this._tzm?this.zone(this._tzm):"string"==typeof this._i&&this.zone(this._i),this},hasAlignedHourOffset:function(a){return a=a?db(a).zone():0,(this.zone()-a)%60===0},daysInMonth:function(){return t(this.year(),this.month())},dayOfYear:function(a){var b=hb((db(this).startOf("day")-db(this).startOf("year"))/864e5)+1;return null==a?b:this.add("d",a-b)},quarter:function(){return Math.ceil((this.month()+1)/3)},weekYear:function(a){var b=Y(this,this.lang()._week.dow,this.lang()._week.doy).year;return null==a?b:this.add("y",a-b)},isoWeekYear:function(a){var b=Y(this,1,4).year;return null==a?b:this.add("y",a-b)},week:function(a){var b=this.lang().week(this);return null==a?b:this.add("d",7*(a-b))},isoWeek:function(a){var b=Y(this,1,4).week;return null==a?b:this.add("d",7*(a-b))},weekday:function(a){var b=(this.day()+7-this.lang()._week.dow)%7;return null==a?b:this.add("d",a-b)},isoWeekday:function(a){return null==a?this.day()||7:this.day(this.day()%7?a:a-7)},get:function(a){return a=p(a),this[a]()},set:function(a,b){return a=p(a),"function"==typeof this[a]&&this[a](b),this},lang:function(b){return b===a?this._lang:(this._lang=C(b),this)}}),eb=0;eb<Rb.length;eb++)_(Rb[eb].toLowerCase().replace(/s$/,""),Rb[eb]);_("year","FullYear"),db.fn.days=db.fn.day,db.fn.months=db.fn.month,db.fn.weeks=db.fn.week,db.fn.isoWeeks=db.fn.isoWeek,db.fn.toJSON=db.fn.toISOString,h(db.duration.fn=g.prototype,{_bubble:function(){var a,b,c,d,e=this._milliseconds,f=this._days,g=this._months,h=this._data;h.milliseconds=e%1e3,a=j(e/1e3),h.seconds=a%60,b=j(a/60),h.minutes=b%60,c=j(b/60),h.hours=c%24,f+=j(c/24),h.days=f%30,g+=j(f/30),h.months=g%12,d=j(g/12),h.years=d},weeks:function(){return j(this.days()/7)},valueOf:function(){return this._milliseconds+864e5*this._days+this._months%12*2592e6+31536e6*s(this._months/12)},humanize:function(a){var b=+this,c=X(b,!a,this.lang());return a&&(c=this.lang().pastFuture(b,c)),this.lang().postformat(c)},add:function(a,b){var c=db.duration(a,b);return this._milliseconds+=c._milliseconds,this._days+=c._days,this._months+=c._months,this._bubble(),this},subtract:function(a,b){var c=db.duration(a,b);return this._milliseconds-=c._milliseconds,this._days-=c._days,this._months-=c._months,this._bubble(),this},get:function(a){return a=p(a),this[a.toLowerCase()+"s"]()},as:function(a){return a=p(a),this["as"+a.charAt(0).toUpperCase()+a.slice(1)+"s"]()},lang:db.fn.lang,toIsoString:function(){var a=Math.abs(this.years()),b=Math.abs(this.months()),c=Math.abs(this.days()),d=Math.abs(this.hours()),e=Math.abs(this.minutes()),f=Math.abs(this.seconds()+this.milliseconds()/1e3);return this.asSeconds()?(this.asSeconds()<0?"-":"")+"P"+(a?a+"Y":"")+(b?b+"M":"")+(c?c+"D":"")+(d||e||f?"T":"")+(d?d+"H":"")+(e?e+"M":"")+(f?f+"S":""):"P0D"}});for(eb in Sb)Sb.hasOwnProperty(eb)&&(bb(eb,Sb[eb]),ab(eb.toLowerCase()));bb("Weeks",6048e5),db.duration.fn.asMonths=function(){return(+this-31536e6*this.years())/2592e6+12*this.years()},db.lang("en",{ordinal:function(a){var b=a%10,c=1===s(a%100/10)?"th":1===b?"st":2===b?"nd":3===b?"rd":"th";return a+c}}),rb?(module.exports=db,cb(!0)):"function"==typeof define&&define.amd?define("moment",function(b,c,d){return d.config&&d.config()&&d.config().noGlobal!==!0&&cb(d.config().noGlobal===a),db}):cb()}).call(this); \ No newline at end of file
diff --git a/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/web/client/resources/js/lib/taboverride.min.js b/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/web/client/resources/js/lib/taboverride.min.js
new file mode 100644
index 00000000000..59ef198182d
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/web/client/resources/js/lib/taboverride.min.js
@@ -0,0 +1,3 @@
+/*! taboverride v4.0.2 | https://github.com/wjbryant/taboverride
+Copyright (c) 2014 Bill Bryant | http://opensource.org/licenses/mit */
+!function(a){"use strict";var b;"object"==typeof exports?a(exports):"function"==typeof define&&define.amd?define(["exports"],a):(b=window.tabOverride={},a(b))}(function(a){"use strict";function b(a,b){var c,d,e,f=["alt","ctrl","meta","shift"],g=a.length,h=!0;for(c=0;g>c;c+=1)if(!b[a[c]]){h=!1;break}if(h)for(c=0;c<f.length;c+=1){if(e=f[c]+"Key",b[e])if(g){for(h=!1,d=0;g>d;d+=1)if(e===a[d]){h=!0;break}}else h=!1;if(!h)break}return h}function c(a,c){return a===q&&b(s,c)}function d(a,c){return a===r&&b(t,c)}function e(a,b){return function(c,d){var e,f="";if(arguments.length){if("number"==typeof c&&(a(c),b.length=0,d&&d.length))for(e=0;e<d.length;e+=1)b.push(d[e]+"Key");return this}for(e=0;e<b.length;e+=1)f+=b[e].slice(0,-3)+"+";return f+a()}}function f(a){a=a||event;var b,e,f,g,h,i,j,k,l,s,t,w,x,y,z,A,B,C,D=a.currentTarget||a.srcElement,E=a.keyCode,F="character";if((!D.nodeName||"textarea"===D.nodeName.toLowerCase())&&(E===q||E===r||13===E&&u)){if(v=!1,f=D.value,k=D.scrollTop,"number"==typeof D.selectionStart)l=D.selectionStart,s=D.selectionEnd,t=f.slice(l,s);else{if(!o.selection)return;g=o.selection.createRange(),t=g.text,h=g.duplicate(),h.moveToElementText(D),h.setEndPoint("EndToEnd",g),s=h.text.length,l=s-t.length,n>1?(i=f.slice(0,l).split(m).length-1,j=t.split(m).length-1):i=j=0}if(E===q||E===r)if(b=p,e=b.length,y=0,z=0,A=0,l!==s&&-1!==t.indexOf("\n"))if(w=0===l||"\n"===f.charAt(l-1)?l:f.lastIndexOf("\n",l-1)+1,s===f.length||"\n"===f.charAt(s)?x=s:"\n"===f.charAt(s-1)?x=s-1:(x=f.indexOf("\n",s),-1===x&&(x=f.length)),c(E,a))y=1,D.value=f.slice(0,w)+b+f.slice(w,x).replace(/\n/g,function(){return y+=1,"\n"+b})+f.slice(x),g?(g.collapse(),g.moveEnd(F,s+y*e-j-i),g.moveStart(F,l+e-i),g.select()):(D.selectionStart=l+e,D.selectionEnd=s+y*e,D.scrollTop=k);else{if(!d(E,a))return;0===f.slice(w).indexOf(b)&&(w===l?t=t.slice(e):A=e,z=e),D.value=f.slice(0,w)+f.slice(w+A,l)+t.replace(new RegExp("\n"+b,"g"),function(){return y+=1,"\n"})+f.slice(s),g?(g.collapse(),g.moveEnd(F,s-z-y*e-j-i),g.moveStart(F,l-A-i),g.select()):(D.selectionStart=l-A,D.selectionEnd=s-z-y*e)}else if(c(E,a))g?(g.text=b,g.select()):(D.value=f.slice(0,l)+b+f.slice(s),D.selectionEnd=D.selectionStart=l+e,D.scrollTop=k);else{if(!d(E,a))return;0===f.slice(l-e).indexOf(b)&&(D.value=f.slice(0,l-e)+f.slice(l),g?(g.move(F,l-e-i),g.select()):(D.selectionEnd=D.selectionStart=l-e,D.scrollTop=k))}else if(u){if(0===l||"\n"===f.charAt(l-1))return void(v=!0);if(w=f.lastIndexOf("\n",l-1)+1,x=f.indexOf("\n",l),-1===x&&(x=f.length),B=f.slice(w,x).match(/^[ \t]*/)[0],C=B.length,w+C>l)return void(v=!0);g?(g.text="\n"+B,g.select()):(D.value=f.slice(0,l)+"\n"+B+f.slice(s),D.selectionEnd=D.selectionStart=l+n+C,D.scrollTop=k)}return a.preventDefault?void a.preventDefault():(a.returnValue=!1,!1)}}function g(a){a=a||event;var b=a.keyCode;if(c(b,a)||d(b,a)||13===b&&u&&!v){if(!a.preventDefault)return a.returnValue=!1,!1;a.preventDefault()}}function h(a,b){var c,d=x[a]||[],e=d.length;for(c=0;e>c;c+=1)d[c].apply(null,b)}function i(a){function b(b){for(c=0;f>c;c+=1)b(a[c].type,a[c].handler)}var c,d,e,f=a.length;return o.addEventListener?(d=function(a){b(function(b,c){a.removeEventListener(b,c,!1)})},e=function(a){d(a),b(function(b,c){a.addEventListener(b,c,!1)})}):o.attachEvent&&(d=function(a){b(function(b,c){a.detachEvent("on"+b,c)})},e=function(a){d(a),b(function(b,c){a.attachEvent("on"+b,c)})}),{add:e,remove:d}}function j(a){h("addListeners",[a]),l.add(a)}function k(a){h("removeListeners",[a]),l.remove(a)}var l,m,n,o=window.document,p=" ",q=9,r=9,s=[],t=["shiftKey"],u=!0,v=!1,w=o.createElement("textarea"),x={};l=i([{type:"keydown",handler:f},{type:"keypress",handler:g}]),w.value="\n",m=w.value,n=m.length,w=null,a.utils={executeExtensions:h,isValidModifierKeyCombo:b,createListeners:i,addListeners:j,removeListeners:k},a.handlers={keydown:f,keypress:g},a.addExtension=function(a,b){return a&&"string"==typeof a&&"function"==typeof b&&(x[a]||(x[a]=[]),x[a].push(b)),this},a.set=function(a,b){var c,d,e,f,g,i,l;if(a)for(c=arguments.length<2||b,d=a,e=d.length,"number"!=typeof e&&(d=[d],e=1),c?(f=j,g="true"):(f=k,g=""),i=0;e>i;i+=1)l=d[i],l&&l.nodeName&&"textarea"===l.nodeName.toLowerCase()&&(h("set",[l,c]),l.setAttribute("data-taboverride-enabled",g),f(l));return this},a.tabSize=function(a){var b;if(arguments.length){if(a&&"number"==typeof a&&a>0)for(p="",b=0;a>b;b+=1)p+=" ";else p=" ";return this}return" "===p?0:p.length},a.autoIndent=function(a){return arguments.length?(u=a?!0:!1,this):u},a.tabKey=e(function(a){return arguments.length?void(q=a):q},s),a.untabKey=e(function(a){return arguments.length?void(r=a):r},t)}); \ No newline at end of file
diff --git a/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/web/client/resources/js/poller.js b/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/web/client/resources/js/poller.js
new file mode 100644
index 00000000000..d8ab1e8eb9f
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/web/client/resources/js/poller.js
@@ -0,0 +1,130 @@
+function Poller(config)
+{
+ // CONFIGURABLE
+ var endpoints = {
+ up: "/status/poll", // url to poll when the server is up
+ down: "/status" // url to poll at regular intervals when the server is down
+ };
+ var timeout = 60000 * 2; // how many ms between polling attempts
+ var intervalMs = 1000; // ms between polls when the server is down
+
+ // INTERNAL STATE
+ var up = true; // whether or not we can connect to the server
+ var req; // the pending ajax request
+ var downPoller; // the setInterval for polling when the server is down
+ var self = this;
+
+ if (typeof config === 'object')
+ {
+ if (typeof config.endpoints === 'object')
+ {
+ endpoints.up = config.endpoints.up;
+ endpoints.down = config.endpoints.down;
+ }
+ if (config.timeout)
+ timeout = config.timeout;
+ if (config.interval)
+ intervalMs = config.interval;
+ }
+
+ $(self).on('pollstart', function(event, data) {
+ log("Started poller");
+ }).on('pollstop', function(event, data) {
+ log("Stopped poller");
+ });
+
+
+ this.start = function()
+ {
+ if (req)
+ return false;
+ doPoll();
+ $(self).trigger('pollstart', {url: endpoints.up, timeout: timeout});
+ return true;
+ };
+
+ this.stop = function()
+ {
+ if (!req)
+ return false;
+ req.abort();
+ req = undefined;
+ stopped = true;
+ stopDownPoller();
+ $(self).trigger('pollstop', {});
+ return true;
+ };
+
+ this.setTimeout = function(tmout)
+ {
+ timeout = tmout; // takes effect at next poll
+ };
+
+ this.isUp = function()
+ {
+ return up;
+ };
+
+ function doPoll()
+ {
+ req = $.ajax({
+ url: endpoints.up + "?timeout=" + timeout,
+ timeout: timeout
+ }).done(pollSuccess).fail(pollFailed);
+ }
+
+ function pollSuccess(data, message, jqxhr)
+ {
+ stopDownPoller();
+ doPoll();
+
+ var wasUp = up;
+ up = true;
+ status = data;
+
+ var arg = {
+ status: status,
+ data: data,
+ jqxhr: jqxhr
+ };
+
+ if (!wasUp)
+ $(convey.poller).trigger('serverstarting', arg);
+ else
+ $(self).trigger('pollsuccess', arg);
+ }
+
+ function pollFailed(jqxhr, message, exception)
+ {
+ if (message === "timeout")
+ {
+ log("Poller timeout; re-polling...", req);
+ doPoll(); // in our case, timeout actually means no activity; poll again
+ return;
+ }
+
+ up = false;
+
+ downPoller = setInterval(function()
+ {
+ // If the server is still down, do a ping to see
+ // if it's up; pollSuccess() will do the rest.
+ if (!up)
+ $.get(endpoints.down).done(pollSuccess);
+ }, intervalMs);
+
+ $(self).trigger('pollfail', {
+ exception: exception,
+ message: message,
+ jqxhr: jqxhr
+ });
+ }
+
+ function stopDownPoller()
+ {
+ if (!downPoller)
+ return;
+ clearInterval(downPoller);
+ downPoller = undefined;
+ }
+}
diff --git a/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/web/server/api/api.goconvey b/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/web/server/api/api.goconvey
new file mode 100644
index 00000000000..79982854b53
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/web/server/api/api.goconvey
@@ -0,0 +1,2 @@
+#ignore
+-timeout=1s
diff --git a/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/web/server/api/server.go b/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/web/server/api/server.go
new file mode 100644
index 00000000000..6cea26daee8
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/web/server/api/server.go
@@ -0,0 +1,164 @@
+package api
+
+import (
+ "encoding/json"
+ "fmt"
+ "net/http"
+ "os"
+ "strconv"
+ "time"
+
+ "github.com/smartystreets/goconvey/web/server/contract"
+ "github.com/smartystreets/goconvey/web/server/messaging"
+)
+
+type HTTPServer struct {
+ watcher chan messaging.WatcherCommand
+ executor contract.Executor
+ latest *contract.CompleteOutput
+ currentRoot string
+ longpoll chan chan string
+ paused bool
+}
+
+func (self *HTTPServer) ReceiveUpdate(root string, update *contract.CompleteOutput) {
+ self.currentRoot = root
+ self.latest = update
+}
+
+func (self *HTTPServer) Watch(response http.ResponseWriter, request *http.Request) {
+ if request.Method == "POST" {
+ self.adjustRoot(response, request)
+ } else if request.Method == "GET" {
+ response.Write([]byte(self.currentRoot))
+ }
+}
+
+func (self *HTTPServer) adjustRoot(response http.ResponseWriter, request *http.Request) {
+ newRoot := self.parseQueryString("root", response, request)
+ if newRoot == "" {
+ return
+ }
+ info, err := os.Stat(newRoot) // TODO: how to unit test?
+ if !info.IsDir() || err != nil {
+ http.Error(response, err.Error(), http.StatusNotFound)
+ return
+ }
+
+ self.watcher <- messaging.WatcherCommand{
+ Instruction: messaging.WatcherAdjustRoot,
+ Details: newRoot,
+ }
+}
+
+func (self *HTTPServer) Ignore(response http.ResponseWriter, request *http.Request) {
+ paths := self.parseQueryString("paths", response, request)
+ if paths != "" {
+ self.watcher <- messaging.WatcherCommand{
+ Instruction: messaging.WatcherIgnore,
+ Details: paths,
+ }
+ }
+}
+
+func (self *HTTPServer) Reinstate(response http.ResponseWriter, request *http.Request) {
+ paths := self.parseQueryString("paths", response, request)
+ if paths != "" {
+ self.watcher <- messaging.WatcherCommand{
+ Instruction: messaging.WatcherReinstate,
+ Details: paths,
+ }
+ }
+}
+
+func (self *HTTPServer) parseQueryString(key string, response http.ResponseWriter, request *http.Request) string {
+ value := request.URL.Query()[key]
+
+ if len(value) == 0 {
+ http.Error(response, fmt.Sprintf("No '%s' query string parameter included!", key), http.StatusBadRequest)
+ return ""
+ }
+
+ path := value[0]
+ if path == "" {
+ http.Error(response, "You must provide a non-blank path.", http.StatusBadRequest)
+ }
+ return path
+}
+
+func (self *HTTPServer) Status(response http.ResponseWriter, request *http.Request) {
+ status := self.executor.Status()
+ response.Write([]byte(status))
+}
+
+func (self *HTTPServer) LongPollStatus(response http.ResponseWriter, request *http.Request) {
+ if self.executor.ClearStatusFlag() {
+ response.Write([]byte(self.executor.Status()))
+ return
+ }
+
+ timeout, err := strconv.Atoi(request.URL.Query().Get("timeout"))
+ if err != nil || timeout > 180000 || timeout < 0 {
+ timeout = 60000 // default timeout is 60 seconds
+ }
+
+ myReqChan := make(chan string)
+
+ select {
+ case self.longpoll <- myReqChan: // this case means the executor's status is changing
+ case <-time.After(time.Duration(timeout) * time.Millisecond): // this case means the executor hasn't changed status
+ return
+ }
+
+ out := <-myReqChan
+
+ if out != "" { // TODO: Why is this check necessary? Sometimes it writes empty string...
+ response.Write([]byte(out))
+ }
+}
+
+func (self *HTTPServer) Results(response http.ResponseWriter, request *http.Request) {
+ response.Header().Set("Content-Type", "application/json")
+ response.Header().Set("Cache-Control", "no-cache, no-store, must-revalidate")
+ response.Header().Set("Pragma", "no-cache")
+ response.Header().Set("Expires", "0")
+ if self.latest != nil {
+ self.latest.Paused = self.paused
+ }
+ stuff, _ := json.Marshal(self.latest)
+ response.Write(stuff)
+}
+
+func (self *HTTPServer) Execute(response http.ResponseWriter, request *http.Request) {
+ go self.execute()
+}
+
+func (self *HTTPServer) execute() {
+ self.watcher <- messaging.WatcherCommand{Instruction: messaging.WatcherExecute}
+}
+
+func (self *HTTPServer) TogglePause(response http.ResponseWriter, request *http.Request) {
+ instruction := messaging.WatcherPause
+ if self.paused {
+ instruction = messaging.WatcherResume
+ }
+
+ self.watcher <- messaging.WatcherCommand{Instruction: instruction}
+ self.paused = !self.paused
+
+ fmt.Fprint(response, self.paused) // we could write out whatever helps keep the UI honest...
+}
+
+func NewHTTPServer(
+ root string,
+ watcher chan messaging.WatcherCommand,
+ executor contract.Executor,
+ status chan chan string) *HTTPServer {
+
+ self := new(HTTPServer)
+ self.currentRoot = root
+ self.watcher = watcher
+ self.executor = executor
+ self.longpoll = status
+ return self
+}
diff --git a/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/web/server/api/server_test.go b/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/web/server/api/server_test.go
new file mode 100644
index 00000000000..bd48e308015
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/web/server/api/server_test.go
@@ -0,0 +1,462 @@
+package api
+
+import (
+ "encoding/json"
+ "fmt"
+ "net/http"
+ "net/http/httptest"
+ "net/url"
+ "strings"
+ "testing"
+ "time"
+
+ . "github.com/smartystreets/goconvey/convey"
+ "github.com/smartystreets/goconvey/web/server/contract"
+ "github.com/smartystreets/goconvey/web/server/messaging"
+)
+
+const initialRoot = "/root/gopath/src/github.com/smartystreets/project"
+const nonexistentRoot = "I don't exist"
+const unreadableContent = "!!error!!"
+
+func TestHTTPServer(t *testing.T) {
+ // TODO: fix the skipped tests...
+
+ Convey("Subject: HttpServer responds to requests appropriately", t, func() {
+ fixture := newServerFixture()
+
+ Convey("Before any update is recieved", func() {
+ Convey("When the update is requested", func() {
+ update, _ := fixture.RequestLatest()
+
+ Convey("No panic should occur", func() {
+ So(func() { fixture.RequestLatest() }, ShouldNotPanic)
+ })
+
+ Convey("The update will be empty", func() {
+ So(update, ShouldResemble, new(contract.CompleteOutput))
+ })
+ })
+ })
+
+ Convey("Given an update is received", func() {
+ fixture.ReceiveUpdate("", &contract.CompleteOutput{Revision: "asdf"})
+
+ Convey("When the update is requested", func() {
+ update, response := fixture.RequestLatest()
+
+ Convey("The server returns it", func() {
+ So(update, ShouldResemble, &contract.CompleteOutput{Revision: "asdf"})
+ })
+
+ Convey("The server returns 200", func() {
+ So(response.Code, ShouldEqual, http.StatusOK)
+ })
+
+ Convey("The server should include important cache-related headers", func() {
+ So(len(response.HeaderMap), ShouldEqual, 4)
+ So(response.HeaderMap["Content-Type"][0], ShouldEqual, "application/json")
+ So(response.HeaderMap["Cache-Control"][0], ShouldEqual, "no-cache, no-store, must-revalidate")
+ So(response.HeaderMap["Pragma"][0], ShouldEqual, "no-cache")
+ So(response.HeaderMap["Expires"][0], ShouldEqual, "0")
+ })
+ })
+ })
+
+ Convey("When the root watch is queried", func() {
+ root, status := fixture.QueryRootWatch(false)
+
+ SkipConvey("The server returns it", func() {
+ So(root, ShouldEqual, initialRoot)
+ })
+
+ Convey("The server returns HTTP 200 - OK", func() {
+ So(status, ShouldEqual, http.StatusOK)
+ })
+ })
+
+ SkipConvey("When the root watch is adjusted", func() {
+
+ Convey("But the request has no root parameter", func() {
+ status, body := fixture.AdjustRootWatchMalformed()
+
+ Convey("The server returns HTTP 400 - Bad Input", func() {
+ So(status, ShouldEqual, http.StatusBadRequest)
+ })
+
+ Convey("The body should contain a helpful error message", func() {
+ So(body, ShouldEqual, "No 'root' query string parameter included!")
+ })
+
+ Convey("The server should not change the existing root", func() {
+ root, _ := fixture.QueryRootWatch(false)
+ So(root, ShouldEqual, initialRoot)
+ })
+ })
+
+ Convey("But the root parameter is empty", func() {
+ status, body := fixture.AdjustRootWatch("")
+
+ Convey("The server returns HTTP 400 - Bad Input", func() {
+ So(status, ShouldEqual, http.StatusBadRequest)
+ })
+
+ Convey("The server should provide a helpful error message", func() {
+ So(body, ShouldEqual, "You must provide a non-blank path.")
+ })
+
+ Convey("The server should not change the existing root", func() {
+ root, _ := fixture.QueryRootWatch(false)
+ So(root, ShouldEqual, initialRoot)
+ })
+ })
+
+ Convey("And the new root exists", func() {
+ status, body := fixture.AdjustRootWatch(initialRoot + "/package")
+
+ Convey("The server returns HTTP 200 - OK", func() {
+ So(status, ShouldEqual, http.StatusOK)
+ })
+
+ Convey("The body should NOT contain any error message or content", func() {
+ So(body, ShouldEqual, "")
+ })
+
+ Convey("The server informs the watcher of the new root", func() {
+ root, _ := fixture.QueryRootWatch(false)
+ So(root, ShouldEqual, initialRoot+"/package")
+ })
+ })
+
+ Convey("And the new root does NOT exist", func() {
+ status, body := fixture.AdjustRootWatch(nonexistentRoot)
+
+ Convey("The server returns HTTP 404 - Not Found", func() {
+ So(status, ShouldEqual, http.StatusNotFound)
+ })
+
+ Convey("The body should contain a helpful error message", func() {
+ So(body, ShouldEqual, fmt.Sprintf("Directory does not exist: '%s'", nonexistentRoot))
+ })
+
+ Convey("The server should not change the existing root", func() {
+ root, _ := fixture.QueryRootWatch(false)
+ So(root, ShouldEqual, initialRoot)
+ })
+ })
+ })
+
+ SkipConvey("When a packge is ignored", func() {
+
+ Convey("But the request has no path parameter", func() {
+ status, body := fixture.IgnoreMalformed()
+
+ Convey("The server returns HTTP 400 - Bad Input", func() {
+ So(status, ShouldEqual, http.StatusBadRequest)
+ })
+
+ Convey("The body should contain a helpful error message", func() {
+ So(body, ShouldEqual, "No 'paths' query string parameter included!")
+ })
+
+ SkipConvey("The server should not ignore anything", func() {
+ // So(fixture.watcher.ignored, ShouldEqual, "")
+ })
+ })
+
+ Convey("But the request is blank", func() {
+ status, body := fixture.Ignore("")
+
+ Convey("The server returns HTTP 400 - Bad Input", func() {
+ So(status, ShouldEqual, http.StatusBadRequest)
+ })
+
+ Convey("The body should contain a helpful error message", func() {
+ So(body, ShouldEqual, "You must provide a non-blank path.")
+ })
+ })
+
+ Convey("And the request is well formed", func() {
+ status, _ := fixture.Ignore(initialRoot)
+
+ SkipConvey("The server informs the watcher", func() {
+ // So(fixture.watcher.ignored, ShouldEqual, initialRoot)
+ })
+ Convey("The server returns HTTP 200 - OK", func() {
+ So(status, ShouldEqual, http.StatusOK)
+ })
+ })
+ })
+
+ SkipConvey("When a package is reinstated", func() {
+ Convey("But the request has no path parameter", func() {
+ status, body := fixture.ReinstateMalformed()
+
+ Convey("The server returns HTTP 400 - Bad Input", func() {
+ So(status, ShouldEqual, http.StatusBadRequest)
+ })
+
+ Convey("The body should contain a helpful error message", func() {
+ So(body, ShouldEqual, "No 'paths' query string parameter included!")
+ })
+
+ SkipConvey("The server should not ignore anything", func() {
+ // So(fixture.watcher.reinstated, ShouldEqual, "")
+ })
+ })
+
+ Convey("But the request is blank", func() {
+ status, body := fixture.Reinstate("")
+
+ Convey("The server returns HTTP 400 - Bad Input", func() {
+ So(status, ShouldEqual, http.StatusBadRequest)
+ })
+
+ Convey("The body should contain a helpful error message", func() {
+ So(body, ShouldEqual, "You must provide a non-blank path.")
+ })
+ })
+
+ Convey("And the request is well formed", func() {
+ status, _ := fixture.Reinstate(initialRoot)
+
+ SkipConvey("The server informs the watcher", func() {
+ // So(fixture.watcher.reinstated, ShouldEqual, initialRoot)
+ })
+ Convey("The server returns HTTP 200 - OK", func() {
+ So(status, ShouldEqual, http.StatusOK)
+ })
+ })
+ })
+
+ Convey("When the status of the executor is requested", func() {
+ fixture.executor.status = "blah blah blah"
+ statusCode, statusBody := fixture.RequestExecutorStatus()
+
+ Convey("The server asks the executor its status and returns it", func() {
+ So(statusBody, ShouldEqual, "blah blah blah")
+ })
+
+ Convey("The server returns HTTP 200 - OK", func() {
+ So(statusCode, ShouldEqual, http.StatusOK)
+ })
+ })
+
+ Convey("When a manual execution of the test packages is requested", func() {
+ status := fixture.ManualExecution()
+ update, _ := fixture.RequestLatest()
+
+ SkipConvey("The server invokes the executor using the watcher's listing and save the result", func() {
+ So(update, ShouldResemble, &contract.CompleteOutput{Revision: initialRoot})
+ })
+
+ Convey("The server returns HTTP 200 - OK", func() {
+ So(status, ShouldEqual, http.StatusOK)
+ })
+ })
+
+ SkipConvey("When the pause setting is toggled via the server", func() {
+ paused := fixture.TogglePause()
+
+ SkipConvey("The pause channel buffer should have a true value", func() {
+ // var value bool
+ // select {
+ // case value = <-fixture.pauseUpdate:
+ // default:
+ // }
+ // So(value, ShouldBeTrue)
+ })
+
+ Convey("The latest results should show that the server is paused", func() {
+ fixture.ReceiveUpdate("", &contract.CompleteOutput{Revision: "asdf"})
+ update, _ := fixture.RequestLatest()
+
+ So(update.Paused, ShouldBeTrue)
+ })
+
+ Convey("The toggle handler should return its new status", func() {
+ So(paused, ShouldEqual, "true")
+ })
+ })
+ })
+}
+
+/********* Server Fixture *********/
+
+type ServerFixture struct {
+ server *HTTPServer
+ watcher chan messaging.WatcherCommand
+ executor *FakeExecutor
+ statusUpdate chan bool
+}
+
+func (self *ServerFixture) ReceiveUpdate(root string, update *contract.CompleteOutput) {
+ self.server.ReceiveUpdate(root, update)
+}
+
+func (self *ServerFixture) RequestLatest() (*contract.CompleteOutput, *httptest.ResponseRecorder) {
+ request, _ := http.NewRequest("GET", "http://localhost:8080/results", nil)
+ response := httptest.NewRecorder()
+
+ self.server.Results(response, request)
+
+ decoder := json.NewDecoder(strings.NewReader(response.Body.String()))
+ update := new(contract.CompleteOutput)
+ decoder.Decode(update)
+ return update, response
+}
+
+func (self *ServerFixture) QueryRootWatch(newclient bool) (string, int) {
+ url := "http://localhost:8080/watch"
+ if newclient {
+ url += "?newclient=1"
+ }
+ request, _ := http.NewRequest("GET", url, nil)
+ response := httptest.NewRecorder()
+
+ self.server.Watch(response, request)
+
+ return strings.TrimSpace(response.Body.String()), response.Code
+}
+
+func (self *ServerFixture) AdjustRootWatchMalformed() (status int, body string) {
+ request, _ := http.NewRequest("POST", "http://localhost:8080/watch", nil)
+ response := httptest.NewRecorder()
+
+ self.server.Watch(response, request)
+
+ status, body = response.Code, strings.TrimSpace(response.Body.String())
+ return
+}
+
+func (self *ServerFixture) AdjustRootWatch(newRoot string) (status int, body string) {
+ escapedRoot := url.QueryEscape(newRoot)
+ request, _ := http.NewRequest("POST", "http://localhost:8080/watch?root="+escapedRoot, nil)
+ response := httptest.NewRecorder()
+
+ self.server.Watch(response, request)
+
+ status, body = response.Code, strings.TrimSpace(response.Body.String())
+ return
+}
+
+func (self *ServerFixture) IgnoreMalformed() (status int, body string) {
+ request, _ := http.NewRequest("POST", "http://localhost:8080/ignore", nil)
+ response := httptest.NewRecorder()
+
+ self.server.Ignore(response, request)
+
+ status, body = response.Code, strings.TrimSpace(response.Body.String())
+ return
+}
+
+func (self *ServerFixture) Ignore(folder string) (status int, body string) {
+ escapedFolder := url.QueryEscape(folder)
+ request, _ := http.NewRequest("POST", "http://localhost:8080/ignore?paths="+escapedFolder, nil)
+ response := httptest.NewRecorder()
+
+ self.server.Ignore(response, request)
+
+ status, body = response.Code, strings.TrimSpace(response.Body.String())
+ return
+}
+
+func (self *ServerFixture) ReinstateMalformed() (status int, body string) {
+ request, _ := http.NewRequest("POST", "http://localhost:8080/reinstate", nil)
+ response := httptest.NewRecorder()
+
+ self.server.Reinstate(response, request)
+
+ status, body = response.Code, strings.TrimSpace(response.Body.String())
+ return
+}
+
+func (self *ServerFixture) Reinstate(folder string) (status int, body string) {
+ escapedFolder := url.QueryEscape(folder)
+ request, _ := http.NewRequest("POST", "http://localhost:8080/reinstate?paths="+escapedFolder, nil)
+ response := httptest.NewRecorder()
+
+ self.server.Reinstate(response, request)
+
+ status, body = response.Code, strings.TrimSpace(response.Body.String())
+ return
+}
+
+func (self *ServerFixture) SetExecutorStatus(status string) {
+ // self.executor.status = status
+ // select {
+ // case self.executor.statusUpdate <- make(chan string):
+ // default:
+ // }
+}
+
+func (self *ServerFixture) RequestExecutorStatus() (code int, status string) {
+ request, _ := http.NewRequest("GET", "http://localhost:8080/status", nil)
+ response := httptest.NewRecorder()
+
+ self.server.Status(response, request)
+
+ code, status = response.Code, strings.TrimSpace(response.Body.String())
+ return
+}
+
+func (self *ServerFixture) ManualExecution() int {
+ request, _ := http.NewRequest("POST", "http://localhost:8080/execute", nil)
+ response := httptest.NewRecorder()
+
+ self.server.Execute(response, request)
+ nap, _ := time.ParseDuration("100ms")
+ time.Sleep(nap)
+ return response.Code
+}
+
+func (self *ServerFixture) TogglePause() string {
+ request, _ := http.NewRequest("POST", "http://localhost:8080/pause", nil)
+ response := httptest.NewRecorder()
+
+ self.server.TogglePause(response, request)
+
+ return response.Body.String()
+}
+
+func newServerFixture() *ServerFixture {
+ self := new(ServerFixture)
+ self.watcher = make(chan messaging.WatcherCommand)
+ // self.watcher.SetRootWatch(initialRoot)
+ statusUpdate := make(chan chan string)
+ self.executor = newFakeExecutor("", statusUpdate)
+ self.server = NewHTTPServer("initial-working-dir", self.watcher, self.executor, statusUpdate)
+ return self
+}
+
+/********* Fake Executor *********/
+
+type FakeExecutor struct {
+ status string
+ executed bool
+ statusFlag bool
+ statusUpdate chan chan string
+}
+
+func (self *FakeExecutor) Status() string {
+ return self.status
+}
+
+func (self *FakeExecutor) ClearStatusFlag() bool {
+ hasNewStatus := self.statusFlag
+ self.statusFlag = false
+ return hasNewStatus
+}
+
+func (self *FakeExecutor) ExecuteTests(watched []*contract.Package) *contract.CompleteOutput {
+ output := new(contract.CompleteOutput)
+ output.Revision = watched[0].Path
+ return output
+}
+
+func newFakeExecutor(status string, statusUpdate chan chan string) *FakeExecutor {
+ self := new(FakeExecutor)
+ self.status = status
+ self.statusUpdate = statusUpdate
+ return self
+}
diff --git a/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/web/server/contract/contracts.go b/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/web/server/contract/contracts.go
new file mode 100644
index 00000000000..e758f3e16e3
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/web/server/contract/contracts.go
@@ -0,0 +1,27 @@
+package contract
+
+import "net/http"
+
+type (
+ Server interface {
+ ReceiveUpdate(root string, update *CompleteOutput)
+ Watch(writer http.ResponseWriter, request *http.Request)
+ Ignore(writer http.ResponseWriter, request *http.Request)
+ Reinstate(writer http.ResponseWriter, request *http.Request)
+ Status(writer http.ResponseWriter, request *http.Request)
+ LongPollStatus(writer http.ResponseWriter, request *http.Request)
+ Results(writer http.ResponseWriter, request *http.Request)
+ Execute(writer http.ResponseWriter, request *http.Request)
+ TogglePause(writer http.ResponseWriter, request *http.Request)
+ }
+
+ Executor interface {
+ ExecuteTests([]*Package) *CompleteOutput
+ Status() string
+ ClearStatusFlag() bool
+ }
+
+ Shell interface {
+ GoTest(directory, packageName string, tags, arguments []string) (output string, err error)
+ }
+)
diff --git a/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/web/server/contract/doc_test.go b/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/web/server/contract/doc_test.go
new file mode 100644
index 00000000000..14f4d2d9c2e
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/web/server/contract/doc_test.go
@@ -0,0 +1 @@
+package contract
diff --git a/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/web/server/contract/result.go b/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/web/server/contract/result.go
new file mode 100644
index 00000000000..c6f9bf2cfde
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/web/server/contract/result.go
@@ -0,0 +1,120 @@
+package contract
+
+import (
+ "path/filepath"
+ "strings"
+
+ "go/build"
+
+ "github.com/smartystreets/goconvey/convey/reporting"
+ "github.com/smartystreets/goconvey/web/server/messaging"
+)
+
+type Package struct {
+ Path string
+ Name string
+ Ignored bool
+ Disabled bool
+ BuildTags []string
+ TestArguments []string
+ Error error
+ Output string
+ Result *PackageResult
+
+ HasImportCycle bool
+}
+
+func NewPackage(folder *messaging.Folder, hasImportCycle bool) *Package {
+ self := new(Package)
+ self.Path = folder.Path
+ self.Name = resolvePackageName(self.Path)
+ self.Result = NewPackageResult(self.Name)
+ self.Ignored = folder.Ignored
+ self.Disabled = folder.Disabled
+ self.BuildTags = folder.BuildTags
+ self.TestArguments = folder.TestArguments
+ self.HasImportCycle = hasImportCycle
+ return self
+}
+
+func (self *Package) Active() bool {
+ return !self.Disabled && !self.Ignored
+}
+
+func (self *Package) HasUsableResult() bool {
+ return self.Active() && (self.Error == nil || (self.Output != ""))
+}
+
+type CompleteOutput struct {
+ Packages []*PackageResult
+ Revision string
+ Paused bool
+}
+
+var ( // PackageResult.Outcome values:
+ Ignored = "ignored"
+ Disabled = "disabled"
+ Passed = "passed"
+ Failed = "failed"
+ Panicked = "panicked"
+ BuildFailure = "build failure"
+ NoTestFiles = "no test files"
+ NoTestFunctions = "no test functions"
+ NoGoFiles = "no go code"
+
+ TestRunAbortedUnexpectedly = "test run aborted unexpectedly"
+)
+
+type PackageResult struct {
+ PackageName string
+ Elapsed float64
+ Coverage float64
+ Outcome string
+ BuildOutput string
+ TestResults []TestResult
+}
+
+func NewPackageResult(packageName string) *PackageResult {
+ self := new(PackageResult)
+ self.PackageName = packageName
+ self.TestResults = []TestResult{}
+ self.Coverage = -1
+ return self
+}
+
+type TestResult struct {
+ TestName string
+ Elapsed float64
+ Passed bool
+ Skipped bool
+ File string
+ Line int
+ Message string
+ Error string
+ Stories []reporting.ScopeResult
+
+ RawLines []string `json:",omitempty"`
+}
+
+func NewTestResult(testName string) *TestResult {
+ self := new(TestResult)
+ self.Stories = []reporting.ScopeResult{}
+ self.RawLines = []string{}
+ self.TestName = testName
+ return self
+}
+
+func resolvePackageName(path string) string {
+ pkg, err := build.ImportDir(path, build.FindOnly)
+ if err == nil {
+ return pkg.ImportPath
+ }
+
+ nameArr := strings.Split(path, endGoPath)
+ return nameArr[len(nameArr)-1]
+}
+
+const (
+ separator = string(filepath.Separator)
+ endGoPath = separator + "src" + separator
+)
diff --git a/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/web/server/executor/contract.go b/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/web/server/executor/contract.go
new file mode 100644
index 00000000000..209dbca5975
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/web/server/executor/contract.go
@@ -0,0 +1,12 @@
+package executor
+
+import "github.com/smartystreets/goconvey/web/server/contract"
+
+type Parser interface {
+ Parse([]*contract.Package)
+}
+
+type Tester interface {
+ SetBatchSize(batchSize int)
+ TestAll(folders []*contract.Package)
+}
diff --git a/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/web/server/executor/coordinator.go b/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/web/server/executor/coordinator.go
new file mode 100644
index 00000000000..117dd56d65a
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/web/server/executor/coordinator.go
@@ -0,0 +1,71 @@
+package executor
+
+import (
+ "errors"
+ "fmt"
+ "log"
+ "strings"
+ "sync"
+
+ "github.com/smartystreets/goconvey/web/server/contract"
+)
+
+type concurrentCoordinator struct {
+ batchSize int
+ queue chan *contract.Package
+ folders []*contract.Package
+ shell contract.Shell
+ waiter sync.WaitGroup
+}
+
+func (self *concurrentCoordinator) ExecuteConcurrently() {
+ self.enlistWorkers()
+ self.scheduleTasks()
+ self.awaitCompletion()
+}
+
+func (self *concurrentCoordinator) enlistWorkers() {
+ for i := 0; i < self.batchSize; i++ {
+ self.waiter.Add(1)
+ go self.worker(i)
+ }
+}
+func (self *concurrentCoordinator) worker(id int) {
+ for folder := range self.queue {
+ packageName := strings.Replace(folder.Name, "\\", "/", -1)
+ if !folder.Active() {
+ log.Printf("Skipping concurrent execution: %s\n", packageName)
+ continue
+ }
+
+ if folder.HasImportCycle {
+ message := fmt.Sprintf("can't load package: import cycle not allowed\npackage %s\n\timports %s", packageName, packageName)
+ log.Println(message)
+ folder.Output, folder.Error = message, errors.New(message)
+ } else {
+ log.Printf("Executing concurrent tests: %s\n", packageName)
+ folder.Output, folder.Error = self.shell.GoTest(folder.Path, packageName, folder.BuildTags, folder.TestArguments)
+ }
+ }
+ self.waiter.Done()
+}
+
+func (self *concurrentCoordinator) scheduleTasks() {
+ for _, folder := range self.folders {
+ self.queue <- folder
+ }
+}
+
+func (self *concurrentCoordinator) awaitCompletion() {
+ close(self.queue)
+ self.waiter.Wait()
+}
+
+func newConcurrentCoordinator(folders []*contract.Package, batchSize int, shell contract.Shell) *concurrentCoordinator {
+ self := new(concurrentCoordinator)
+ self.queue = make(chan *contract.Package)
+ self.folders = folders
+ self.batchSize = batchSize
+ self.shell = shell
+ return self
+}
diff --git a/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/web/server/executor/executor.go b/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/web/server/executor/executor.go
new file mode 100644
index 00000000000..887080ccd33
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/web/server/executor/executor.go
@@ -0,0 +1,84 @@
+package executor
+
+import (
+ "log"
+ "time"
+
+ "github.com/smartystreets/goconvey/web/server/contract"
+)
+
+const (
+ Idle = "idle"
+ Executing = "executing"
+)
+
+type Executor struct {
+ tester Tester
+ parser Parser
+ status string
+ statusChan chan chan string
+ statusFlag bool
+}
+
+func (self *Executor) Status() string {
+ return self.status
+}
+
+func (self *Executor) ClearStatusFlag() bool {
+ hasNewStatus := self.statusFlag
+ self.statusFlag = false
+ return hasNewStatus
+}
+
+func (self *Executor) ExecuteTests(folders []*contract.Package) *contract.CompleteOutput {
+ defer func() { self.setStatus(Idle) }()
+ self.execute(folders)
+ result := self.parse(folders)
+ return result
+}
+
+func (self *Executor) execute(folders []*contract.Package) {
+ self.setStatus(Executing)
+ self.tester.TestAll(folders)
+}
+
+func (self *Executor) parse(folders []*contract.Package) *contract.CompleteOutput {
+ result := &contract.CompleteOutput{Revision: now().String()}
+ self.parser.Parse(folders)
+ for _, folder := range folders {
+ result.Packages = append(result.Packages, folder.Result)
+ }
+ return result
+}
+
+func (self *Executor) setStatus(status string) {
+ self.status = status
+ self.statusFlag = true
+
+Loop:
+ for {
+ select {
+ case c := <-self.statusChan:
+ self.statusFlag = false
+ c <- status
+ default:
+ break Loop
+ }
+ }
+
+ log.Printf("Executor status: '%s'\n", self.status)
+}
+
+func NewExecutor(tester Tester, parser Parser, ch chan chan string) *Executor {
+ return &Executor{
+ tester: tester,
+ parser: parser,
+ status: Idle,
+ statusChan: ch,
+ statusFlag: false,
+ }
+}
+
+var now = func() time.Time {
+ return time.Now()
+}
diff --git a/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/web/server/executor/executor.goconvey b/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/web/server/executor/executor.goconvey
new file mode 100644
index 00000000000..79982854b53
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/web/server/executor/executor.goconvey
@@ -0,0 +1,2 @@
+#ignore
+-timeout=1s
diff --git a/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/web/server/executor/executor_test.go b/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/web/server/executor/executor_test.go
new file mode 100644
index 00000000000..d7221b0dc12
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/web/server/executor/executor_test.go
@@ -0,0 +1,160 @@
+package executor
+
+import (
+ "strings"
+ "testing"
+ "time"
+
+ . "github.com/smartystreets/goconvey/convey"
+ "github.com/smartystreets/goconvey/web/server/contract"
+)
+
+func TestExecutor(t *testing.T) {
+ t.Skip("BROKEN!")
+
+ Convey("Subject: Execution of test packages and aggregation of parsed results", t, func() {
+ fixture := newExecutorFixture()
+
+ Convey("When tests packages are executed", func() {
+ fixture.ExecuteTests()
+
+ Convey("The result should include parsed results for each test package.",
+ fixture.ResultShouldBePopulated)
+ })
+
+ Convey("When the executor is idle", func() {
+ Convey("The status of the executor should be 'idle'", func() {
+ So(fixture.executor.Status(), ShouldEqual, Idle)
+ })
+ })
+
+ Convey("When the status is updated", func() {
+ fixture.executor.setStatus(Executing)
+
+ Convey("The status flag should be set to true", func() {
+ So(fixture.executor.statusFlag, ShouldBeTrue)
+ })
+ })
+
+ Convey("During test execution", func() {
+ status := fixture.CaptureStatusDuringExecutionPhase()
+
+ Convey("The status of the executor should be 'executing'", func() {
+ So(status, ShouldEqual, Executing)
+ })
+ })
+ })
+}
+
+type ExecutorFixture struct {
+ executor *Executor
+ tester *FakeTester
+ parser *FakeParser
+ folders []*contract.Package
+ result *contract.CompleteOutput
+ expected *contract.CompleteOutput
+ stamp time.Time
+}
+
+func (self *ExecutorFixture) ExecuteTests() {
+ self.result = self.executor.ExecuteTests(self.folders)
+}
+
+func (self *ExecutorFixture) CaptureStatusDuringExecutionPhase() string {
+ nap, _ := time.ParseDuration("25ms")
+ self.tester.addDelay(nap)
+ return self.delayedExecution(nap)
+}
+
+func (self *ExecutorFixture) delayedExecution(nap time.Duration) string {
+ go self.ExecuteTests()
+ time.Sleep(nap)
+ return self.executor.Status()
+}
+
+func (self *ExecutorFixture) ResultShouldBePopulated() {
+ So(self.result, ShouldResemble, self.expected)
+}
+
+var (
+ prefix = "/Users/blah/gopath/src/"
+ packageA = "github.com/smartystreets/goconvey/a"
+ packageB = "github.com/smartystreets/goconvey/b"
+ resultA = &contract.PackageResult{PackageName: packageA}
+ resultB = &contract.PackageResult{PackageName: packageB}
+)
+
+func newExecutorFixture() *ExecutorFixture {
+ self := new(ExecutorFixture)
+ self.tester = newFakeTester()
+ self.parser = newFakeParser()
+ self.executor = NewExecutor(self.tester, self.parser, make(chan chan string))
+ self.folders = []*contract.Package{
+ &contract.Package{Path: prefix + packageA, Name: packageA},
+ &contract.Package{Path: prefix + packageB, Name: packageB},
+ }
+ self.stamp = time.Now()
+ now = func() time.Time { return self.stamp }
+
+ self.expected = &contract.CompleteOutput{
+ Packages: []*contract.PackageResult{
+ resultA,
+ resultB,
+ },
+ Revision: self.stamp.String(),
+ }
+ return self
+}
+
+/******** FakeTester ********/
+
+type FakeTester struct {
+ nap time.Duration
+}
+
+func (self *FakeTester) SetBatchSize(batchSize int) { panic("NOT SUPPORTED") }
+func (self *FakeTester) TestAll(folders []*contract.Package) {
+ for _, p := range folders {
+ p.Output = p.Path
+ }
+ time.Sleep(self.nap)
+}
+func (self *FakeTester) addDelay(nap time.Duration) {
+ self.nap = nap
+}
+
+func newFakeTester() *FakeTester {
+ self := new(FakeTester)
+ zero, _ := time.ParseDuration("0")
+ self.nap = zero
+ return self
+}
+
+/******** FakeParser ********/
+
+type FakeParser struct {
+ nap time.Duration
+}
+
+func (self *FakeParser) Parse(packages []*contract.Package) {
+ time.Sleep(self.nap)
+ for _, package_ := range packages {
+ if package_.Name == packageA && strings.HasSuffix(package_.Output, packageA) {
+ package_.Result = resultA
+ }
+ if package_.Name == packageB && strings.HasSuffix(package_.Output, packageB) {
+ package_.Result = resultB
+ }
+ }
+}
+
+func (self *FakeParser) addDelay(nap time.Duration) {
+ self.nap = nap
+}
+
+func newFakeParser() *FakeParser {
+ self := new(FakeParser)
+ zero, _ := time.ParseDuration("0")
+ self.nap = zero
+ return self
+}
diff --git a/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/web/server/executor/tester.go b/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/web/server/executor/tester.go
new file mode 100644
index 00000000000..76f353a5baa
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/web/server/executor/tester.go
@@ -0,0 +1,56 @@
+package executor
+
+import (
+ "errors"
+ "fmt"
+ "log"
+ "strings"
+
+ "github.com/smartystreets/goconvey/web/server/contract"
+)
+
+type ConcurrentTester struct {
+ shell contract.Shell
+ batchSize int
+}
+
+func (self *ConcurrentTester) SetBatchSize(batchSize int) {
+ self.batchSize = batchSize
+ log.Printf("Now configured to test %d packages concurrently.\n", self.batchSize)
+}
+
+func (self *ConcurrentTester) TestAll(folders []*contract.Package) {
+ if self.batchSize == 1 {
+ self.executeSynchronously(folders)
+ } else {
+ newConcurrentCoordinator(folders, self.batchSize, self.shell).ExecuteConcurrently()
+ }
+ return
+}
+
+func (self *ConcurrentTester) executeSynchronously(folders []*contract.Package) {
+ for _, folder := range folders {
+ packageName := strings.Replace(folder.Name, "\\", "/", -1)
+ if !folder.Active() {
+ log.Printf("Skipping execution: %s\n", packageName)
+ continue
+ }
+ if folder.HasImportCycle {
+ message := fmt.Sprintf("can't load package: import cycle not allowed\npackage %s\n\timports %s", packageName, packageName)
+ log.Println(message)
+ folder.Output, folder.Error = message, errors.New(message)
+ } else {
+ log.Printf("Executing tests: %s\n", packageName)
+ folder.Output, folder.Error = self.shell.GoTest(folder.Path, packageName, folder.BuildTags, folder.TestArguments)
+ }
+ }
+}
+
+func NewConcurrentTester(shell contract.Shell) *ConcurrentTester {
+ self := new(ConcurrentTester)
+ self.shell = shell
+ self.batchSize = defaultBatchSize
+ return self
+}
+
+const defaultBatchSize = 10
diff --git a/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/web/server/executor/tester_test.go b/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/web/server/executor/tester_test.go
new file mode 100644
index 00000000000..d540c546d9a
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/web/server/executor/tester_test.go
@@ -0,0 +1,254 @@
+package executor
+
+import (
+ "errors"
+ "fmt"
+ "io/ioutil"
+ "log"
+ "testing"
+ "time"
+
+ . "github.com/smartystreets/goconvey/convey"
+ "github.com/smartystreets/goconvey/web/server/contract"
+)
+
+func init() {
+ log.SetOutput(ioutil.Discard)
+}
+
+func TestConcurrentTester(t *testing.T) {
+ t.Skip("BROKEN!")
+
+ Convey("Subject: Controlled execution of test packages", t, func() {
+ fixture := NewTesterFixture()
+
+ Convey("Whenever tests for each package are executed", func() {
+ fixture.InBatchesOf(1).RunTests()
+
+ Convey("The tester should execute the tests in each active package with the correct arguments",
+ fixture.ShouldHaveRecordOfExecutionCommands)
+
+ Convey("There should be a test output result for each active package",
+ fixture.ShouldHaveOneOutputPerInput)
+
+ Convey("The output should be as expected",
+ fixture.OutputShouldBeAsExpected)
+ })
+
+ Convey("When the tests for each package are executed synchronously", func() {
+ fixture.InBatchesOf(1).RunTests()
+
+ Convey("Each active package should be run synchronously and in the given order",
+ fixture.TestsShouldHaveRunContiguously)
+ })
+
+ Convey("When the tests for each package are executed synchronously with failures", func() {
+ fixture.InBatchesOf(1).SetupFailedTestSuites().RunTests()
+
+ Convey("The failed test packages should not result in any panics", func() {
+ So(fixture.recovered, ShouldBeNil)
+ })
+ })
+
+ Convey("When packages are tested concurrently", func() {
+ fixture.InBatchesOf(concurrentBatchSize).RunTests()
+
+ Convey("Active packages should be arranged and tested in batches of the appropriate size",
+ fixture.TestsShouldHaveRunInBatchesOfTwo)
+ })
+
+ Convey("When packages are tested concurrently with failures", func() {
+ fixture.InBatchesOf(concurrentBatchSize).SetupFailedTestSuites().RunTests()
+
+ Convey("The failed test packages should not result in any panics", func() {
+ So(fixture.recovered, ShouldBeNil)
+ })
+ })
+ })
+}
+
+const concurrentBatchSize = 2
+
+type TesterFixture struct {
+ tester *ConcurrentTester
+ shell *TimedShell
+ results []string
+ compilations []*ShellCommand
+ executions []*ShellCommand
+ packages []*contract.Package
+ recovered error
+}
+
+func NewTesterFixture() *TesterFixture {
+ self := new(TesterFixture)
+ self.shell = NewTimedShell()
+ self.tester = NewConcurrentTester(self.shell)
+ self.packages = []*contract.Package{
+ {Path: "a"},
+ {Path: "b"},
+ {Path: "c"},
+ {Path: "d"},
+ {Path: "e", Ignored: true},
+ {Path: "f"},
+ {Path: "g", HasImportCycle: true},
+ }
+ return self
+}
+
+func (self *TesterFixture) InBatchesOf(batchSize int) *TesterFixture {
+ self.tester.SetBatchSize(batchSize)
+ return self
+}
+
+func (self *TesterFixture) SetupAbnormalError(message string) *TesterFixture {
+ self.shell.setTripWire(message)
+ return self
+}
+
+func (self *TesterFixture) SetupFailedTestSuites() *TesterFixture {
+ self.shell.setExitWithError()
+ return self
+}
+
+func (self *TesterFixture) RunTests() {
+ defer func() {
+ if r := recover(); r != nil {
+ self.recovered = r.(error)
+ }
+ }()
+
+ self.tester.TestAll(self.packages)
+ for _, p := range self.packages {
+ self.results = append(self.results, p.Output)
+ }
+ self.executions = self.shell.Executions()
+}
+
+func (self *TesterFixture) ShouldHaveRecordOfExecutionCommands() {
+ executed := []string{"a", "b", "c", "d", "f"}
+ ignored := "e"
+ importCycle := "g"
+ actual := []string{}
+ for _, pkg := range self.executions {
+ actual = append(actual, pkg.Command)
+ }
+ So(actual, ShouldResemble, executed)
+ So(actual, ShouldNotContain, ignored)
+ So(actual, ShouldNotContain, importCycle)
+}
+
+func (self *TesterFixture) ShouldHaveOneOutputPerInput() {
+ So(len(self.results), ShouldEqual, len(self.packages))
+}
+
+func (self *TesterFixture) OutputShouldBeAsExpected() {
+ for _, p := range self.packages {
+ if p.HasImportCycle {
+ So(p.Output, ShouldContainSubstring, "can't load package: import cycle not allowed")
+ So(p.Error.Error(), ShouldContainSubstring, "can't load package: import cycle not allowed")
+ } else {
+ if p.Active() {
+ So(p.Output, ShouldEndWith, p.Path)
+ } else {
+ So(p.Output, ShouldBeBlank)
+ }
+ So(p.Error, ShouldBeNil)
+ }
+ }
+}
+
+func (self *TesterFixture) TestsShouldHaveRunContiguously() {
+ self.OutputShouldBeAsExpected()
+
+ So(self.shell.MaxConcurrentCommands(), ShouldEqual, 1)
+
+ for i := 0; i < len(self.executions)-1; i++ {
+ current := self.executions[i]
+ next := self.executions[i+1]
+ So(current.Started, ShouldHappenBefore, next.Started)
+ So(current.Ended, ShouldHappenOnOrBefore, next.Started)
+ }
+}
+
+func (self *TesterFixture) TestsShouldHaveRunInBatchesOfTwo() {
+ self.OutputShouldBeAsExpected()
+
+ So(self.shell.MaxConcurrentCommands(), ShouldEqual, concurrentBatchSize)
+}
+
+/**** Fakes ****/
+
+type ShellCommand struct {
+ Command string
+ Started time.Time
+ Ended time.Time
+}
+
+type TimedShell struct {
+ executions []*ShellCommand
+ panicMessage string
+ err error
+}
+
+func (self *TimedShell) Executions() []*ShellCommand {
+ return self.executions
+}
+
+func (self *TimedShell) MaxConcurrentCommands() int {
+ var concurrent int
+
+ for x, current := range self.executions {
+ concurrentWith_x := 1
+ for y, comparison := range self.executions {
+ if y == x {
+ continue
+ } else if concurrentWith(current, comparison) {
+ concurrentWith_x++
+ }
+ }
+ if concurrentWith_x > concurrent {
+ concurrent = concurrentWith_x
+ }
+ }
+ return concurrent
+}
+
+func concurrentWith(current, comparison *ShellCommand) bool {
+ return ((comparison.Started == current.Started || comparison.Started.After(current.Started)) &&
+ (comparison.Started.Before(current.Ended)))
+}
+
+func (self *TimedShell) setTripWire(message string) {
+ self.panicMessage = message
+}
+
+func (self *TimedShell) setExitWithError() {
+ self.err = errors.New("Simulate test failure")
+}
+
+func (self *TimedShell) GoTest(directory, packageName string, arguments, tags []string) (output string, err error) {
+ if self.panicMessage != "" {
+ return "", errors.New(self.panicMessage)
+ }
+
+ output = directory
+ err = self.err
+ self.executions = append(self.executions, self.composeCommand(directory))
+ return
+}
+
+func (self *TimedShell) composeCommand(commandText string) *ShellCommand {
+ start := time.Now()
+ time.Sleep(nap)
+ end := time.Now()
+ return &ShellCommand{commandText, start, end}
+}
+
+func NewTimedShell() *TimedShell {
+ self := new(TimedShell)
+ self.executions = []*ShellCommand{}
+ return self
+}
+
+var nap, _ = time.ParseDuration("10ms")
+var _ = fmt.Sprintf("fmt")
diff --git a/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/web/server/messaging/doc_test.go b/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/web/server/messaging/doc_test.go
new file mode 100644
index 00000000000..cbb7a43f8ba
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/web/server/messaging/doc_test.go
@@ -0,0 +1 @@
+package messaging
diff --git a/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/web/server/messaging/messages.go b/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/web/server/messaging/messages.go
new file mode 100644
index 00000000000..7a92091162a
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/web/server/messaging/messages.go
@@ -0,0 +1,56 @@
+package messaging
+
+///////////////////////////////////////////////////////////////////////////////
+
+type WatcherCommand struct {
+ Instruction WatcherInstruction
+ Details string
+}
+
+type WatcherInstruction int
+
+func (this WatcherInstruction) String() string {
+ switch this {
+ case WatcherPause:
+ return "Pause"
+ case WatcherResume:
+ return "Resume"
+ case WatcherIgnore:
+ return "Ignore"
+ case WatcherReinstate:
+ return "Reinstate"
+ case WatcherAdjustRoot:
+ return "AdjustRoot"
+ case WatcherExecute:
+ return "Execute"
+ case WatcherStop:
+ return "Stop"
+ default:
+ return "UNKNOWN INSTRUCTION"
+ }
+}
+
+const (
+ WatcherPause WatcherInstruction = iota
+ WatcherResume
+ WatcherIgnore
+ WatcherReinstate
+ WatcherAdjustRoot
+ WatcherExecute
+ WatcherStop
+)
+
+///////////////////////////////////////////////////////////////////////////////
+
+type Folders map[string]*Folder
+
+type Folder struct {
+ Path string // key
+ Root string
+ Ignored bool
+ Disabled bool
+ BuildTags []string
+ TestArguments []string
+}
+
+///////////////////////////////////////////////////////////////////////////////
diff --git a/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/web/server/parser/packageParser.go b/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/web/server/parser/packageParser.go
new file mode 100644
index 00000000000..628b137b39a
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/web/server/parser/packageParser.go
@@ -0,0 +1,174 @@
+package parser
+
+import (
+ "fmt"
+ "regexp"
+ "sort"
+ "strconv"
+ "strings"
+
+ "github.com/smartystreets/goconvey/web/server/contract"
+)
+
+var (
+ testNamePattern = regexp.MustCompile("^=== RUN:? +(.+)$")
+)
+
+func ParsePackageResults(result *contract.PackageResult, rawOutput string) {
+ newOutputParser(result, rawOutput).parse()
+}
+
+type outputParser struct {
+ raw string
+ lines []string
+ result *contract.PackageResult
+ tests []*contract.TestResult
+
+ // place holders for loops
+ line string
+ test *contract.TestResult
+ testMap map[string]*contract.TestResult
+}
+
+func newOutputParser(result *contract.PackageResult, rawOutput string) *outputParser {
+ self := new(outputParser)
+ self.raw = strings.TrimSpace(rawOutput)
+ self.lines = strings.Split(self.raw, "\n")
+ self.result = result
+ self.tests = []*contract.TestResult{}
+ self.testMap = make(map[string]*contract.TestResult)
+ return self
+}
+
+func (self *outputParser) parse() {
+ self.separateTestFunctionsAndMetadata()
+ self.parseEachTestFunction()
+}
+
+func (self *outputParser) separateTestFunctionsAndMetadata() {
+ for _, self.line = range self.lines {
+ if self.processNonTestOutput() {
+ break
+ }
+ self.processTestOutput()
+ }
+}
+func (self *outputParser) processNonTestOutput() bool {
+ if noGoFiles(self.line) {
+ self.recordFinalOutcome(contract.NoGoFiles)
+
+ } else if buildFailed(self.line) {
+ self.recordFinalOutcome(contract.BuildFailure)
+
+ } else if noTestFiles(self.line) {
+ self.recordFinalOutcome(contract.NoTestFiles)
+
+ } else if noTestFunctions(self.line) {
+ self.recordFinalOutcome(contract.NoTestFunctions)
+
+ } else {
+ return false
+ }
+ return true
+}
+
+func (self *outputParser) recordFinalOutcome(outcome string) {
+ self.result.Outcome = outcome
+ self.result.BuildOutput = strings.Join(self.lines, "\n")
+}
+
+func (self *outputParser) processTestOutput() {
+ if isNewTest(self.line) {
+ self.registerTestFunction()
+
+ } else if isTestResult(self.line) {
+ self.recordTestMetadata()
+
+ } else if isPackageReport(self.line) {
+ self.recordPackageMetadata()
+
+ } else {
+ self.saveLineForParsingLater()
+
+ }
+}
+
+func (self *outputParser) registerTestFunction() {
+ testName := testNamePattern.FindStringSubmatch(self.line)[1]
+ self.test = contract.NewTestResult(testName)
+ self.tests = append(self.tests, self.test)
+ self.testMap[self.test.TestName] = self.test
+}
+func (self *outputParser) recordTestMetadata() {
+ testName := strings.Split(self.line, " ")[2]
+ if test, ok := self.testMap[testName]; ok {
+ self.test = test
+ self.test.Passed = !strings.HasPrefix(self.line, "--- FAIL: ")
+ self.test.Skipped = strings.HasPrefix(self.line, "--- SKIP: ")
+ self.test.Elapsed = parseTestFunctionDuration(self.line)
+ }
+}
+func (self *outputParser) recordPackageMetadata() {
+ if packageFailed(self.line) {
+ self.recordTestingOutcome(contract.Failed)
+
+ } else if packagePassed(self.line) {
+ self.recordTestingOutcome(contract.Passed)
+
+ } else if isCoverageSummary(self.line) {
+ self.recordCoverageSummary(self.line)
+ }
+}
+func (self *outputParser) recordTestingOutcome(outcome string) {
+ self.result.Outcome = outcome
+ fields := strings.Split(self.line, "\t")
+ self.result.PackageName = strings.TrimSpace(fields[1])
+ self.result.Elapsed = parseDurationInSeconds(fields[2], 3)
+}
+func (self *outputParser) recordCoverageSummary(summary string) {
+ start := len("coverage: ")
+ end := strings.Index(summary, "%")
+ value := summary[start:end]
+ parsed, err := strconv.ParseFloat(value, 64)
+ if err != nil {
+ self.result.Coverage = -1
+ } else {
+ self.result.Coverage = parsed
+ }
+}
+func (self *outputParser) saveLineForParsingLater() {
+ self.line = strings.TrimLeft(self.line, "\t")
+ if self.test == nil {
+ fmt.Println("Potential error parsing output of", self.result.PackageName, "; couldn't handle this stray line:", self.line)
+ return
+ }
+ self.test.RawLines = append(self.test.RawLines, self.line)
+}
+
+// TestResults is a collection of TestResults that implements sort.Interface.
+type TestResults []contract.TestResult
+
+func (r TestResults) Len() int {
+ return len(r)
+}
+
+// Less compares TestResults on TestName
+func (r TestResults) Less(i, j int) bool {
+ return r[i].TestName < r[j].TestName
+}
+
+func (r TestResults) Swap(i, j int) {
+ r[i], r[j] = r[j], r[i]
+}
+
+func (self *outputParser) parseEachTestFunction() {
+ for _, self.test = range self.tests {
+ self.test = parseTestOutput(self.test)
+ if self.test.Error != "" {
+ self.result.Outcome = contract.Panicked
+ }
+ self.test.RawLines = []string{}
+ self.result.TestResults = append(self.result.TestResults, *self.test)
+ }
+ sort.Sort(TestResults(self.result.TestResults))
+}
diff --git a/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/web/server/parser/package_parser_test.go b/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/web/server/parser/package_parser_test.go
new file mode 100644
index 00000000000..65f3f9830bf
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/web/server/parser/package_parser_test.go
@@ -0,0 +1,792 @@
+package parser
+
+import (
+ "encoding/json"
+ "fmt"
+ "io/ioutil"
+ "log"
+ "strings"
+ "testing"
+
+ "github.com/smartystreets/goconvey/convey/reporting"
+ "github.com/smartystreets/goconvey/web/server/contract"
+)
+
+func init() {
+ log.SetOutput(ioutil.Discard)
+}
+
+func TestParsePackage_NoGoFiles_ReturnsPackageResult(t *testing.T) {
+ actual := &contract.PackageResult{PackageName: expected_NoGoFiles.PackageName}
+ ParsePackageResults(actual, input_NoGoFiles)
+ assertEqual(t, expected_NoGoFiles, *actual)
+}
+
+func TestParsePackage_NoTestFiles_ReturnsPackageResult(t *testing.T) {
+ actual := &contract.PackageResult{PackageName: expected_NoTestFiles.PackageName}
+ ParsePackageResults(actual, input_NoTestFiles)
+ assertEqual(t, expected_NoTestFiles, *actual)
+}
+
+func TestParsePacakge_NoTestFunctions_ReturnsPackageResult(t *testing.T) {
+ actual := &contract.PackageResult{PackageName: expected_NoTestFunctions.PackageName}
+ ParsePackageResults(actual, input_NoTestFunctions)
+ assertEqual(t, expected_NoTestFunctions, *actual)
+}
+
+func TestParsePackage_BuildFailed_ReturnsPackageResult(t *testing.T) {
+ actual := &contract.PackageResult{PackageName: expected_BuildFailed_InvalidPackageDeclaration.PackageName}
+ ParsePackageResults(actual, input_BuildFailed_InvalidPackageDeclaration)
+ assertEqual(t, expected_BuildFailed_InvalidPackageDeclaration, *actual)
+
+ actual = &contract.PackageResult{PackageName: expected_BuildFailed_OtherErrors.PackageName}
+ ParsePackageResults(actual, input_BuildFailed_OtherErrors)
+ assertEqual(t, expected_BuildFailed_OtherErrors, *actual)
+
+ actual = &contract.PackageResult{PackageName: expected_BuildFailed_ImportCycle.PackageName}
+ ParsePackageResults(actual, input_BuildFailed_ImportCycle)
+ assertEqual(t, expected_BuildFailed_ImportCycle, *actual)
+
+ actual = &contract.PackageResult{PackageName: expected_BuildFailed_CantFindPackage.PackageName}
+ ParsePackageResults(actual, input_BuildFailed_CantFindPackage)
+ assertEqual(t, expected_BuildFailed_CantFindPackage, *actual)
+
+ actual = &contract.PackageResult{PackageName: expected_BuildFailed_ConflictingImport.PackageName}
+ ParsePackageResults(actual, input_BuildFailed_ConfictingImport)
+ assertEqual(t, expected_BuildFailed_ConflictingImport, *actual)
+}
+
+func TestParsePackage_OldSchoolWithFailureOutput_ReturnsCompletePackageResult(t *testing.T) {
+ actual := &contract.PackageResult{PackageName: expectedOldSchool_Fails.PackageName}
+ ParsePackageResults(actual, inputOldSchool_Fails)
+ assertEqual(t, expectedOldSchool_Fails, *actual)
+}
+
+func TestParsePackage_OldSchoolWithSuccessOutput_ReturnsCompletePackageResult(t *testing.T) {
+ actual := &contract.PackageResult{PackageName: expectedOldSchool_Passes.PackageName}
+ ParsePackageResults(actual, inputOldSchool_Passes)
+ assertEqual(t, expectedOldSchool_Passes, *actual)
+}
+
+func TestParsePackage_OldSchoolWithPanicOutput_ReturnsCompletePackageResult(t *testing.T) {
+ actual := &contract.PackageResult{PackageName: expectedOldSchool_Panics.PackageName}
+ ParsePackageResults(actual, inputOldSchool_Panics)
+ assertEqual(t, expectedOldSchool_Panics, *actual)
+}
+
+func TestParsePackage_GoConveyOutput_ReturnsCompletePackageResult(t *testing.T) {
+ actual := &contract.PackageResult{PackageName: expectedGoConvey.PackageName}
+ ParsePackageResults(actual, inputGoConvey)
+ assertEqual(t, expectedGoConvey, *actual)
+}
+
+func TestParsePackage_ActualPackageNameDifferentThanDirectoryName_ReturnsActualPackageName(t *testing.T) {
+ actual := &contract.PackageResult{PackageName: strings.Replace(expectedGoConvey.PackageName, "examples", "stuff", -1)}
+ ParsePackageResults(actual, inputGoConvey)
+ assertEqual(t, expectedGoConvey, *actual)
+}
+
+func TestParsePackage_GoConveyOutputMalformed_CausesPanic(t *testing.T) {
+ defer func() {
+ if r := recover(); r != nil {
+ message := fmt.Sprintf("%v", r)
+ if !strings.Contains(message, "bug report") {
+ t.Errorf("Should have panicked with a request to file a bug report but we received this error instead: %s", message)
+ }
+ } else {
+ t.Errorf("Should have panicked with a request to file a bug report but we received no error.")
+ }
+ }()
+
+ actual := &contract.PackageResult{PackageName: expectedGoConvey.PackageName}
+ ParsePackageResults(actual, inputGoConvey_Malformed)
+}
+
+func TestParsePackage_GoConveyWithRandomOutput_ReturnsPackageResult(t *testing.T) {
+ actual := &contract.PackageResult{PackageName: expectedGoConvey_WithRandomOutput.PackageName}
+ ParsePackageResults(actual, inputGoConvey_WithRandomOutput)
+ assertEqual(t, expectedGoConvey_WithRandomOutput, *actual)
+}
+
+func TestParsePackage_OldSchoolWithSuccessAndBogusCoverage_ReturnsCompletePackageResult(t *testing.T) {
+ actual := &contract.PackageResult{PackageName: expectedOldSchool_PassesButCoverageIsBogus.PackageName}
+ ParsePackageResults(actual, inputOldSchool_PassesButCoverageIsBogus)
+ assertEqual(t, expectedOldSchool_PassesButCoverageIsBogus, *actual)
+}
+
+func TestParsePackage_NestedTests_ReturnsPackageResult(t *testing.T) {
+ actual := &contract.PackageResult{PackageName: expectedNestedTests.PackageName}
+ ParsePackageResults(actual, inputNestedTests)
+ assertEqual(t, expectedNestedTests, *actual)
+}
+
+func TestParsePackage_WithExampleFunctions_ReturnsPackageResult(t *testing.T) {
+ actual := &contract.PackageResult{PackageName: expectedExampleFunctions.PackageName}
+ ParsePackageResults(actual, inputExampleFunctions)
+ assertEqual(t, expectedExampleFunctions, *actual)
+}
+
+func TestParsePackage_Golang15Output_ShouldNotPanic(t *testing.T) {
+ actual := &contract.PackageResult{PackageName: expectedGolang15.PackageName}
+ ParsePackageResults(actual, inputGolang15)
+ assertEqual(t, expectedGolang15, *actual)
+}
+
+func assertEqual(t *testing.T, expected, actual interface{}) {
+ a, _ := json.Marshal(expected)
+ b, _ := json.Marshal(actual)
+ if string(a) != string(b) {
+ t.Errorf(failureTemplate, string(a), string(b))
+ }
+}
+
+const failureTemplate = "Comparison failed:\n Expected: %v\n Actual: %v\n"
+
+const input_NoGoFiles = `can't load package: package github.com/smartystreets/goconvey: no buildable Go source files in /Users/matt/Work/Dev/goconvey/src/github.com/smartystreets/goconvey`
+
+var expected_NoGoFiles = contract.PackageResult{
+ PackageName: "github.com/smartystreets/goconvey",
+ Outcome: contract.NoGoFiles,
+ BuildOutput: input_NoGoFiles,
+}
+
+const input_NoTestFiles = `? pkg.smartystreets.net/liveaddress-zipapi [no test files]`
+
+var expected_NoTestFiles = contract.PackageResult{
+ PackageName: "pkg.smartystreets.net/liveaddress-zipapi",
+ Outcome: contract.NoTestFiles,
+ BuildOutput: input_NoTestFiles,
+}
+
+const input_NoTestFunctions = `testing: warning: no tests to run
+PASS
+ok github.com/smartystreets/goconvey/scripts 0.011s`
+
+var expected_NoTestFunctions = contract.PackageResult{
+ PackageName: "github.com/smartystreets/goconvey/scripts",
+ Outcome: contract.NoTestFunctions,
+ BuildOutput: input_NoTestFunctions,
+}
+
+const input_BuildFailed_InvalidPackageDeclaration = `
+can't load package: package github.com/smartystreets/goconvey/examples:
+bowling_game_test.go:9:1: expected 'package', found 'IDENT' asdf
+bowling_game_test.go:10:1: invalid package name _
+`
+
+var expected_BuildFailed_InvalidPackageDeclaration = contract.PackageResult{
+ PackageName: "github.com/smartystreets/goconvey/examples",
+ Outcome: contract.BuildFailure,
+ BuildOutput: strings.TrimSpace(input_BuildFailed_InvalidPackageDeclaration),
+}
+
+const input_BuildFailed_CantFindPackage = `
+bowling_game.go:3:8: cannot find package "format" in any of:
+ /usr/local/go/src/pkg/format (from $GOROOT)
+ /Users/mike/work/dev/goconvey/src/format (from $GOPATH)
+FAIL github.com/smartystreets/goconvey/examples [setup failed]
+`
+
+var expected_BuildFailed_CantFindPackage = contract.PackageResult{
+ PackageName: "github.com/smartystreets/goconvey/examples",
+ Outcome: contract.BuildFailure,
+ BuildOutput: strings.TrimSpace(input_BuildFailed_CantFindPackage),
+}
+
+const input_BuildFailed_ConfictingImport = `
+mutustus.go:4:2: found packages e (e.go) and err (prepend.go) in /Users/mike/src/utensils.git/e
+`
+
+var expected_BuildFailed_ConflictingImport = contract.PackageResult{
+ PackageName: "github.com/smartystreets/goconvey/examples",
+ Outcome: contract.BuildFailure,
+ BuildOutput: strings.TrimSpace(input_BuildFailed_ConfictingImport),
+}
+
+const input_BuildFailed_OtherErrors = `
+# github.com/smartystreets/goconvey/examples
+./bowling_game_test.go:22: undefined: game
+./bowling_game_test.go:22: cannot assign to game
+./bowling_game_test.go:25: undefined: game
+./bowling_game_test.go:28: undefined: game
+./bowling_game_test.go:33: undefined: game
+./bowling_game_test.go:36: undefined: game
+./bowling_game_test.go:41: undefined: game
+./bowling_game_test.go:42: undefined: game
+./bowling_game_test.go:43: undefined: game
+./bowling_game_test.go:46: undefined: game
+./bowling_game_test.go:46: too many errors
+FAIL github.com/smartystreets/goconvey/examples [build failed]
+`
+
+var expected_BuildFailed_OtherErrors = contract.PackageResult{
+ PackageName: "github.com/smartystreets/goconvey/examples",
+ Outcome: contract.BuildFailure,
+ BuildOutput: strings.TrimSpace(input_BuildFailed_OtherErrors),
+}
+
+const input_BuildFailed_ImportCycle = `
+# github.com/smartystreets/goconvey/t
+./t_test.go:23: import "github.com/smartystreets/goconvey/t" while compiling that package (import cycle)
+FAIL github.com/smartystreets/goconvey/t [build failed]
+`
+
+var expected_BuildFailed_ImportCycle = contract.PackageResult{
+ PackageName: "github.com/smartystreets/goconvey/t",
+ Outcome: contract.BuildFailure,
+ BuildOutput: strings.TrimSpace(input_BuildFailed_ImportCycle),
+}
+
+const inputOldSchool_Passes = `
+=== RUN TestOldSchool_Passes
+--- PASS: TestOldSchool_Passes (0.02 seconds)
+=== RUN TestSkippingTests
+--- SKIP: TestSkippingTests (0.00 seconds)
+ old_school_test.go:8: blah
+=== RUN TestOldSchool_PassesWithMessage
+--- PASS: TestOldSchool_PassesWithMessage (0.05 seconds)
+ old_school_test.go:10: I am a passing test.
+ With a newline.
+PASS
+coverage: 100.0%% of statements in github.com/smartystreets/goconvey/convey, github.com/smartystreets/goconvey/convey/gotest, github.com/smartystreets/goconvey/convey/reporting
+ok github.com/smartystreets/goconvey/webserver/examples 0.018s
+`
+
+var expectedOldSchool_Passes = contract.PackageResult{
+ PackageName: "github.com/smartystreets/goconvey/webserver/examples",
+ Elapsed: 0.018,
+ Coverage: 100,
+ Outcome: contract.Passed,
+ TestResults: []contract.TestResult{
+ contract.TestResult{
+ TestName: "TestOldSchool_Passes",
+ Elapsed: 0.02,
+ Passed: true,
+ File: "",
+ Line: 0,
+ Message: "",
+ Stories: []reporting.ScopeResult{},
+ },
+ contract.TestResult{
+ TestName: "TestOldSchool_PassesWithMessage",
+ Elapsed: 0.05,
+ Passed: true,
+ File: "old_school_test.go",
+ Line: 10,
+ Message: "old_school_test.go:10: I am a passing test.\nWith a newline.",
+ Stories: []reporting.ScopeResult{},
+ },
+ contract.TestResult{
+ TestName: "TestSkippingTests",
+ Elapsed: 0,
+ Passed: true,
+ Skipped: true,
+ File: "old_school_test.go",
+ Line: 8,
+ Message: "old_school_test.go:8: blah",
+ Stories: []reporting.ScopeResult{},
+ },
+ },
+}
+
+const inputOldSchool_Fails = `
+=== RUN TestOldSchool_Passes
+--- PASS: TestOldSchool_Passes (0.01 seconds)
+=== RUN TestOldSchool_PassesWithMessage
+--- PASS: TestOldSchool_PassesWithMessage (0.03 seconds)
+ old_school_test.go:10: I am a passing test.
+ With a newline.
+=== RUN TestOldSchool_Failure
+--- FAIL: TestOldSchool_Failure (0.06 seconds)
+=== RUN TestOldSchool_FailureWithReason
+--- FAIL: TestOldSchool_FailureWithReason (0.11 seconds)
+ old_school_test.go:18: I am a failing test.
+FAIL
+exit status 1
+FAIL github.com/smartystreets/goconvey/webserver/examples 0.017s
+`
+
+var expectedOldSchool_Fails = contract.PackageResult{
+ PackageName: "github.com/smartystreets/goconvey/webserver/examples",
+ Outcome: contract.Failed,
+ Elapsed: 0.017,
+ TestResults: []contract.TestResult{
+ contract.TestResult{
+ TestName: "TestOldSchool_Failure",
+ Elapsed: 0.06,
+ Passed: false,
+ File: "",
+ Line: 0,
+ Message: "",
+ Stories: []reporting.ScopeResult{},
+ },
+ contract.TestResult{
+ TestName: "TestOldSchool_FailureWithReason",
+ Elapsed: 0.11,
+ Passed: false,
+ File: "old_school_test.go",
+ Line: 18,
+ Message: "old_school_test.go:18: I am a failing test.",
+ Stories: []reporting.ScopeResult{},
+ },
+ contract.TestResult{
+ TestName: "TestOldSchool_Passes",
+ Elapsed: 0.01,
+ Passed: true,
+ File: "",
+ Line: 0,
+ Message: "",
+ Stories: []reporting.ScopeResult{},
+ },
+ contract.TestResult{
+ TestName: "TestOldSchool_PassesWithMessage",
+ Elapsed: 0.03,
+ Passed: true,
+ File: "old_school_test.go",
+ Line: 10,
+ Message: "old_school_test.go:10: I am a passing test.\nWith a newline.",
+ Stories: []reporting.ScopeResult{},
+ },
+ },
+}
+
+const inputOldSchool_Panics = `
+=== RUN TestOldSchool_Panics
+--- FAIL: TestOldSchool_Panics (0.02 seconds)
+panic: runtime error: index out of range [recovered]
+ panic: runtime error: index out of range
+
+goroutine 3 [running]:
+testing.func·004()
+ /usr/local/go/src/pkg/testing/testing.go:348 +0xcd
+github.com/smartystreets/goconvey/webserver/examples.TestOldSchool_Panics(0x210292000)
+ /Users/mike/work/dev/goconvey/src/github.com/smartystreets/goconvey/webserver/examples/something_test.go:15 +0xec
+testing.tRunner(0x210292000, 0x1b09f0)
+ /usr/local/go/src/pkg/testing/testing.go:353 +0x8a
+created by testing.RunTests
+ /usr/local/go/src/pkg/testing/testing.go:433 +0x86b
+
+goroutine 1 [chan receive]:
+testing.RunTests(0x138f38, 0x1b09f0, 0x1, 0x1, 0x1, ...)
+ /usr/local/go/src/pkg/testing/testing.go:434 +0x88e
+testing.Main(0x138f38, 0x1b09f0, 0x1, 0x1, 0x1b7f60, ...)
+ /usr/local/go/src/pkg/testing/testing.go:365 +0x8a
+main.main()
+ github.com/smartystreets/goconvey/webserver/examples/_test/_testmain.go:43 +0x9a
+exit status 2
+FAIL github.com/smartystreets/goconvey/webserver/examples 0.014s
+`
+
+var expectedOldSchool_Panics = contract.PackageResult{
+ PackageName: "github.com/smartystreets/goconvey/webserver/examples",
+ Elapsed: 0.014,
+ Outcome: contract.Panicked,
+ TestResults: []contract.TestResult{
+ contract.TestResult{
+ TestName: "TestOldSchool_Panics",
+ Elapsed: 0.02,
+ Passed: false,
+ File: "/Users/mike/work/dev/goconvey/src/github.com/smartystreets/goconvey/webserver/examples/something_test.go",
+ Line: 15,
+ Message: "",
+ Error: strings.Replace(`panic: runtime error: index out of range [recovered]
+ panic: runtime error: index out of range
+
+goroutine 3 [running]:
+testing.func·004()
+ /usr/local/go/src/pkg/testing/testing.go:348 +0xcd
+github.com/smartystreets/goconvey/webserver/examples.TestOldSchool_Panics(0x210292000)
+ /Users/mike/work/dev/goconvey/src/github.com/smartystreets/goconvey/webserver/examples/something_test.go:15 +0xec
+testing.tRunner(0x210292000, 0x1b09f0)
+ /usr/local/go/src/pkg/testing/testing.go:353 +0x8a
+created by testing.RunTests
+ /usr/local/go/src/pkg/testing/testing.go:433 +0x86b
+
+goroutine 1 [chan receive]:
+testing.RunTests(0x138f38, 0x1b09f0, 0x1, 0x1, 0x1, ...)
+ /usr/local/go/src/pkg/testing/testing.go:434 +0x88e
+testing.Main(0x138f38, 0x1b09f0, 0x1, 0x1, 0x1b7f60, ...)
+ /usr/local/go/src/pkg/testing/testing.go:365 +0x8a
+main.main()
+ github.com/smartystreets/goconvey/webserver/examples/_test/_testmain.go:43 +0x9a`, "\u0009", "\t", -1),
+ Stories: []reporting.ScopeResult{},
+ },
+ },
+}
+
+const inputGoConvey_Malformed = `
+=== RUN TestPassingStory
+>->->OPEN-JSON->->->
+{
+ "Title": "A passing story",
+ "File": "/Users/mike/work/dev/goconvey/src/github.com/smartystreets/goconvey/webserver/examples/old_school_test.go",
+ "Line": 11,
+ "Depth": 0,
+ "Assertions": [
+ {
+ "File": "/Users/mike/work/dev/goconvey/src/github.com/smartystreets/goconvey/webserver/examples/old_school_test.go",
+ "Line": 10,
+ "Failure": "",
+
+ ;aiwheopinen39 n3902n92m
+
+ "Error": null,
+ "Skipped": false,
+ "StackTrace": "goroutine 3 [running]:\ngithub.com/smartystreets/goconvey/webserver/examples.func·001()\n\u0009/Users/mike/work/dev/goconvey/src/github.com/smartystreets/goconvey/webserver/examples/old_school_test.go:10 +0xe3\ngithub.com/smartystreets/goconvey/webserver/examples.TestPassingStory(0x210314000)\n\u0009/Users/mike/work/dev/goconvey/src/github.com/smartystreets/goconvey/webserver/examples/old_school_test.go:11 +0xec\ntesting.tRunner(0x210314000, 0x21ab10)\n\u0009/usr/local/go/src/pkg/testing/testing.go:353 +0x8a\ncreated by testing.RunTests\n\u0009/usr/local/go/src/pkg/testing/testing.go:433 +0x86b\n"
+ }
+ ]
+},
+<-<-<-CLOSE-JSON<-<-<
+--- PASS: TestPassingStory (0.01 seconds)
+PASS
+ok github.com/smartystreets/goconvey/webserver/examples 0.019s
+`
+
+const inputGoConvey = `
+=== RUN TestPassingStory
+>->->OPEN-JSON->->->
+{
+ "Title": "A passing story",
+ "File": "/Users/mike/work/dev/goconvey/src/github.com/smartystreets/goconvey/webserver/examples/old_school_test.go",
+ "Line": 11,
+ "Depth": 0,
+ "Assertions": [
+ {
+ "File": "/Users/mike/work/dev/goconvey/src/github.com/smartystreets/goconvey/webserver/examples/old_school_test.go",
+ "Line": 10,
+ "Failure": "",
+ "Error": null,
+ "Skipped": false,
+ "StackTrace": "goroutine 3 [running]:\ngithub.com/smartystreets/goconvey/webserver/examples.func·001()\n\u0009/Users/mike/work/dev/goconvey/src/github.com/smartystreets/goconvey/webserver/examples/old_school_test.go:10 +0xe3\ngithub.com/smartystreets/goconvey/webserver/examples.TestPassingStory(0x210314000)\n\u0009/Users/mike/work/dev/goconvey/src/github.com/smartystreets/goconvey/webserver/examples/old_school_test.go:11 +0xec\ntesting.tRunner(0x210314000, 0x21ab10)\n\u0009/usr/local/go/src/pkg/testing/testing.go:353 +0x8a\ncreated by testing.RunTests\n\u0009/usr/local/go/src/pkg/testing/testing.go:433 +0x86b\n"
+ }
+ ]
+},
+<-<-<-CLOSE-JSON<-<-<
+--- PASS: TestPassingStory (0.01 seconds)
+PASS
+coverage: 75.5%% of statements
+ok github.com/smartystreets/goconvey/webserver/examples 0.019s
+`
+
+var expectedGoConvey = contract.PackageResult{
+ PackageName: "github.com/smartystreets/goconvey/webserver/examples",
+ Elapsed: 0.019,
+ Outcome: contract.Passed,
+ Coverage: 75.5,
+ TestResults: []contract.TestResult{
+ contract.TestResult{
+ TestName: "TestPassingStory",
+ Elapsed: 0.01,
+ Passed: true,
+ File: "",
+ Line: 0,
+ Message: "",
+ Stories: []reporting.ScopeResult{
+ reporting.ScopeResult{
+ Title: "A passing story",
+ File: "/Users/mike/work/dev/goconvey/src/github.com/smartystreets/goconvey/webserver/examples/old_school_test.go",
+ Line: 11,
+ Depth: 0,
+ Assertions: []*reporting.AssertionResult{
+ &reporting.AssertionResult{
+ File: "/Users/mike/work/dev/goconvey/src/github.com/smartystreets/goconvey/webserver/examples/old_school_test.go",
+ Line: 10,
+ Failure: "",
+ Error: nil,
+ Skipped: false,
+ StackTrace: "goroutine 3 [running]:\ngithub.com/smartystreets/goconvey/webserver/examples.func·001()\n\u0009/Users/mike/work/dev/goconvey/src/github.com/smartystreets/goconvey/webserver/examples/old_school_test.go:10 +0xe3\ngithub.com/smartystreets/goconvey/webserver/examples.TestPassingStory(0x210314000)\n\u0009/Users/mike/work/dev/goconvey/src/github.com/smartystreets/goconvey/webserver/examples/old_school_test.go:11 +0xec\ntesting.tRunner(0x210314000, 0x21ab10)\n\u0009/usr/local/go/src/pkg/testing/testing.go:353 +0x8a\ncreated by testing.RunTests\n\u0009/usr/local/go/src/pkg/testing/testing.go:433 +0x86b\n",
+ },
+ },
+ },
+ },
+ },
+ },
+}
+
+const inputGoConvey_WithRandomOutput = `
+=== RUN TestPassingStory
+*** Hello, World! (1) ***
+*** Hello, World! (2) ***
+*** Hello, World! (3) ***>->->OPEN-JSON->->->
+{
+ "Title": "A passing story",
+ "File": "/Users/mike/work/dev/goconvey/src/github.com/smartystreets/goconvey/web/server/testing/go_test.go",
+ "Line": 16,
+ "Depth": 0,
+ "Assertions": [
+ {
+ "File": "/Users/mike/work/dev/goconvey/src/github.com/smartystreets/goconvey/web/server/testing/go_test.go",
+ "Line": 14,
+ "Failure": "",
+ "Error": null,
+ "Skipped": false,
+ "StackTrace": "goroutine 3 [running]:\ngithub.com/smartystreets/goconvey/web/server/testing.func·001()\n\u0009/Users/mike/work/dev/goconvey/src/github.com/smartystreets/goconvey/web/server/testing/go_test.go:14 +0x186\ngithub.com/smartystreets/goconvey/web/server/testing.TestPassingStory(0x210315000)\n\u0009/Users/mike/work/dev/goconvey/src/github.com/smartystreets/goconvey/web/server/testing/go_test.go:16 +0x1b9\ntesting.tRunner(0x210315000, 0x21bb10)\n\u0009/usr/local/go/src/pkg/testing/testing.go:353 +0x8a\ncreated by testing.RunTests\n\u0009/usr/local/go/src/pkg/testing/testing.go:433 +0x86b\n"
+ }
+ ]
+},
+<-<-<-CLOSE-JSON<-<-<
+*** Hello, World! (4)***
+*** Hello, World! (5) ***
+>->->OPEN-JSON->->->
+{
+ "Title": "A passing story",
+ "File": "/Users/mike/work/dev/goconvey/src/github.com/smartystreets/goconvey/web/server/testing/go_test.go",
+ "Line": 22,
+ "Depth": 0,
+ "Assertions": [
+ {
+ "File": "/Users/mike/work/dev/goconvey/src/github.com/smartystreets/goconvey/web/server/testing/go_test.go",
+ "Line": 20,
+ "Failure": "",
+ "Error": null,
+ "Skipped": false,
+ "StackTrace": "goroutine 3 [running]:\ngithub.com/smartystreets/goconvey/web/server/testing.func·002()\n\u0009/Users/mike/work/dev/goconvey/src/github.com/smartystreets/goconvey/web/server/testing/go_test.go:20 +0x186\ngithub.com/smartystreets/goconvey/web/server/testing.TestPassingStory(0x210315000)\n\u0009/Users/mike/work/dev/goconvey/src/github.com/smartystreets/goconvey/web/server/testing/go_test.go:22 +0x294\ntesting.tRunner(0x210315000, 0x21bb10)\n\u0009/usr/local/go/src/pkg/testing/testing.go:353 +0x8a\ncreated by testing.RunTests\n\u0009/usr/local/go/src/pkg/testing/testing.go:433 +0x86b\n"
+ }
+ ]
+},
+<-<-<-CLOSE-JSON<-<-<
+*** Hello, World! (6) ***
+--- PASS: TestPassingStory (0.03 seconds)
+PASS
+coverage: 45.0%% of statements
+ok github.com/smartystreets/goconvey/web/server/testing 0.024s
+`
+
+var expectedGoConvey_WithRandomOutput = contract.PackageResult{
+ PackageName: "github.com/smartystreets/goconvey/web/server/testing",
+ Elapsed: 0.024,
+ Outcome: contract.Passed,
+ Coverage: 45.0,
+ TestResults: []contract.TestResult{
+ contract.TestResult{
+ TestName: "TestPassingStory",
+ Elapsed: 0.03,
+ Passed: true,
+ File: "",
+ Line: 0,
+ Message: "*** Hello, World! (1) ***\n*** Hello, World! (2) ***\n*** Hello, World! (3) ***\n*** Hello, World! (4)***\n*** Hello, World! (5) ***\n*** Hello, World! (6) ***",
+ Stories: []reporting.ScopeResult{
+ reporting.ScopeResult{
+ Title: "A passing story",
+ File: "/Users/mike/work/dev/goconvey/src/github.com/smartystreets/goconvey/web/server/testing/go_test.go",
+ Line: 16,
+ Depth: 0,
+ Assertions: []*reporting.AssertionResult{
+ &reporting.AssertionResult{
+ File: "/Users/mike/work/dev/goconvey/src/github.com/smartystreets/goconvey/web/server/testing/go_test.go",
+ Line: 14,
+ Failure: "",
+ Error: nil,
+ Skipped: false,
+ StackTrace: "goroutine 3 [running]:\ngithub.com/smartystreets/goconvey/web/server/testing.func·001()\n\u0009/Users/mike/work/dev/goconvey/src/github.com/smartystreets/goconvey/web/server/testing/go_test.go:14 +0x186\ngithub.com/smartystreets/goconvey/web/server/testing.TestPassingStory(0x210315000)\n\u0009/Users/mike/work/dev/goconvey/src/github.com/smartystreets/goconvey/web/server/testing/go_test.go:16 +0x1b9\ntesting.tRunner(0x210315000, 0x21bb10)\n\u0009/usr/local/go/src/pkg/testing/testing.go:353 +0x8a\ncreated by testing.RunTests\n\u0009/usr/local/go/src/pkg/testing/testing.go:433 +0x86b\n",
+ },
+ },
+ },
+ reporting.ScopeResult{
+ Title: "A passing story",
+ File: "/Users/mike/work/dev/goconvey/src/github.com/smartystreets/goconvey/web/server/testing/go_test.go",
+ Line: 22,
+ Depth: 0,
+ Assertions: []*reporting.AssertionResult{
+ &reporting.AssertionResult{
+ File: "/Users/mike/work/dev/goconvey/src/github.com/smartystreets/goconvey/web/server/testing/go_test.go",
+ Line: 20,
+ Failure: "",
+ Error: nil,
+ Skipped: false,
+ StackTrace: "goroutine 3 [running]:\ngithub.com/smartystreets/goconvey/web/server/testing.func·002()\n\u0009/Users/mike/work/dev/goconvey/src/github.com/smartystreets/goconvey/web/server/testing/go_test.go:20 +0x186\ngithub.com/smartystreets/goconvey/web/server/testing.TestPassingStory(0x210315000)\n\u0009/Users/mike/work/dev/goconvey/src/github.com/smartystreets/goconvey/web/server/testing/go_test.go:22 +0x294\ntesting.tRunner(0x210315000, 0x21bb10)\n\u0009/usr/local/go/src/pkg/testing/testing.go:353 +0x8a\ncreated by testing.RunTests\n\u0009/usr/local/go/src/pkg/testing/testing.go:433 +0x86b\n",
+ },
+ },
+ },
+ },
+ },
+ },
+}
+
+const inputOldSchool_PassesButCoverageIsBogus = `
+=== RUN TestOldSchool_Passes
+--- PASS: TestOldSchool_Passes (0.02 seconds)
+=== RUN TestOldSchool_PassesWithMessage
+--- PASS: TestOldSchool_PassesWithMessage (0.05 seconds)
+ old_school_test.go:10: I am a passing test.
+ With a newline.
+PASS
+coverage: bogus%% of statements
+ok github.com/smartystreets/goconvey/webserver/examples 0.018s
+`
+
+var expectedOldSchool_PassesButCoverageIsBogus = contract.PackageResult{
+ PackageName: "github.com/smartystreets/goconvey/webserver/examples",
+ Elapsed: 0.018,
+ Coverage: -1,
+ Outcome: contract.Passed,
+ TestResults: []contract.TestResult{
+ contract.TestResult{
+ TestName: "TestOldSchool_Passes",
+ Elapsed: 0.02,
+ Passed: true,
+ File: "",
+ Line: 0,
+ Message: "",
+ Stories: []reporting.ScopeResult{},
+ },
+ contract.TestResult{
+ TestName: "TestOldSchool_PassesWithMessage",
+ Elapsed: 0.05,
+ Passed: true,
+ File: "old_school_test.go",
+ Line: 10,
+ Message: "old_school_test.go:10: I am a passing test.\nWith a newline.",
+ Stories: []reporting.ScopeResult{},
+ },
+ },
+}
+
+const inputNestedTests = `
+=== RUN TestNestedTests
+=== RUN TestNestedTests_Passes
+--- PASS: TestNestedTests_Passes (0.02 seconds)
+=== RUN TestNestedTests_Failure
+--- FAIL: TestNestedTests_Failure (0.06 seconds)
+=== RUN TestNestedTests_FailureWithReason
+--- FAIL: TestNestedTests_FailureWithReason (0.11 seconds)
+ nested_test.go:18: I am a failing test.
+=== RUN TestNestedTests_Skipping
+--- SKIP: TestNestedTests_Skipping (0.00 seconds)
+ nested_test.go:8: blah
+=== RUN TestNestedTests_PassesWithMessage
+--- PASS: TestNestedTests_PassesWithMessage (0.05 seconds)
+ nested_test.go:10: I am a passing test.
+ With a newline.
+--- FAIL: TestNestedTests (0.25 seconds)
+FAIL
+exit status 1
+FAIL github.com/smartystreets/goconvey/webserver/examples 0.018s
+`
+
+var expectedNestedTests = contract.PackageResult{
+ PackageName: "github.com/smartystreets/goconvey/webserver/examples",
+ Elapsed: 0.018,
+ Outcome: contract.Failed,
+ TestResults: []contract.TestResult{
+ contract.TestResult{
+ TestName: "TestNestedTests",
+ Elapsed: 0.25,
+ Passed: false,
+ File: "",
+ Line: 0,
+ Message: "",
+ Stories: []reporting.ScopeResult{},
+ },
+ contract.TestResult{
+ TestName: "TestNestedTests_Failure",
+ Elapsed: 0.06,
+ Passed: false,
+ File: "",
+ Line: 0,
+ Message: "",
+ Stories: []reporting.ScopeResult{},
+ },
+ contract.TestResult{
+ TestName: "TestNestedTests_FailureWithReason",
+ Elapsed: 0.11,
+ Passed: false,
+ File: "nested_test.go",
+ Line: 18,
+ Message: "nested_test.go:18: I am a failing test.",
+ Stories: []reporting.ScopeResult{},
+ },
+ contract.TestResult{
+ TestName: "TestNestedTests_Passes",
+ Elapsed: 0.02,
+ Passed: true,
+ File: "",
+ Line: 0,
+ Message: "",
+ Stories: []reporting.ScopeResult{},
+ },
+ contract.TestResult{
+ TestName: "TestNestedTests_PassesWithMessage",
+ Elapsed: 0.05,
+ Passed: true,
+ File: "nested_test.go",
+ Line: 10,
+ Message: "nested_test.go:10: I am a passing test.\nWith a newline.",
+ Stories: []reporting.ScopeResult{},
+ },
+ contract.TestResult{
+ TestName: "TestNestedTests_Skipping",
+ Elapsed: 0.00,
+ Passed: true,
+ Skipped: true,
+ File: "nested_test.go",
+ Line: 8,
+ Message: "nested_test.go:8: blah",
+ Stories: []reporting.ScopeResult{},
+ },
+ },
+}
+
+const inputExampleFunctions = `
+=== RUN Example_Failure
+--- FAIL: Example_Failure (0.11 seconds)
+got:
+actuall output
+want:
+real output
+=== RUN Example_Pass
+--- PASS: Example_Pass (0.06 seconds)
+FAIL
+exit status 1
+FAIL github.com/smartystreets/goconvey/webserver/examples 0.18s
+`
+
+var expectedExampleFunctions = contract.PackageResult{
+ PackageName: "github.com/smartystreets/goconvey/webserver/examples",
+ Elapsed: 0.18,
+ Outcome: contract.Failed,
+ TestResults: []contract.TestResult{
+ contract.TestResult{
+ TestName: "Example_Failure",
+ Elapsed: 0.11,
+ Passed: false,
+ File: "",
+ Line: 0,
+ Message: "got:\nactuall output\nwant:\nreal output",
+ Stories: []reporting.ScopeResult{},
+ },
+ contract.TestResult{
+ TestName: "Example_Pass",
+ Elapsed: 0.06,
+ Passed: true,
+ File: "",
+ Line: 0,
+ Message: "",
+ Stories: []reporting.ScopeResult{},
+ },
+ },
+}
+
+const inputGolang15 = `
+=== RUN Golang15
+--- PASS: Golang15 (0.00s)
+PASS
+ok github.com/smartystreets/goconvey/webserver/examples 0.008s
+`
+
+var expectedGolang15 = contract.PackageResult{
+ PackageName: "github.com/smartystreets/goconvey/webserver/examples",
+ Elapsed: 0.008,
+ Outcome: contract.Passed,
+ TestResults: []contract.TestResult{
+ contract.TestResult{
+ TestName: "Golang15",
+ Elapsed: 0.00,
+ Passed: true,
+ File: "",
+ Line: 0,
+ Message: "",
+ Stories: []reporting.ScopeResult{},
+ },
+ },
+}
diff --git a/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/web/server/parser/parser.go b/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/web/server/parser/parser.go
new file mode 100644
index 00000000000..f6250caf346
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/web/server/parser/parser.go
@@ -0,0 +1,32 @@
+package parser
+
+import (
+ "log"
+
+ "github.com/smartystreets/goconvey/web/server/contract"
+)
+
+type Parser struct {
+ parser func(*contract.PackageResult, string)
+}
+
+func (self *Parser) Parse(packages []*contract.Package) {
+ for _, p := range packages {
+ if p.Active() && p.HasUsableResult() {
+ self.parser(p.Result, p.Output)
+ } else if p.Ignored {
+ p.Result.Outcome = contract.Ignored
+ } else if p.Disabled {
+ p.Result.Outcome = contract.Disabled
+ } else {
+ p.Result.Outcome = contract.TestRunAbortedUnexpectedly
+ }
+ log.Printf("[%s]: %s\n", p.Result.Outcome, p.Name)
+ }
+}
+
+func NewParser(helper func(*contract.PackageResult, string)) *Parser {
+ self := new(Parser)
+ self.parser = helper
+ return self
+}
diff --git a/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/web/server/parser/parser.goconvey b/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/web/server/parser/parser.goconvey
new file mode 100644
index 00000000000..79982854b53
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/web/server/parser/parser.goconvey
@@ -0,0 +1,2 @@
+#ignore
+-timeout=1s
diff --git a/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/web/server/parser/parser_test.go b/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/web/server/parser/parser_test.go
new file mode 100644
index 00000000000..de695e3fd7e
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/web/server/parser/parser_test.go
@@ -0,0 +1,47 @@
+package parser
+
+import (
+ "errors"
+ "testing"
+
+ . "github.com/smartystreets/goconvey/convey"
+ "github.com/smartystreets/goconvey/web/server/contract"
+)
+
+func TestParser(t *testing.T) {
+
+ Convey("Subject: Parser parses test output for active packages", t, func() {
+ packages := []*contract.Package{
+ &contract.Package{Ignored: false, Output: "Active", Result: contract.NewPackageResult("asdf")},
+ &contract.Package{Ignored: true, Output: "Inactive", Result: contract.NewPackageResult("qwer")},
+ }
+ parser := NewParser(fakeParserImplementation)
+
+ Convey("When given a collection of packages", func() {
+ parser.Parse(packages)
+
+ Convey("The parser uses its internal parsing mechanism to parse the output of only the active packages", func() {
+ So(packages[0].Result.Outcome, ShouldEqual, packages[0].Output)
+ })
+
+ Convey("The parser should mark inactive packages as ignored", func() {
+ So(packages[1].Result.Outcome, ShouldEqual, contract.Ignored)
+ })
+ })
+
+ Convey("When a package could not be tested (maybe it was deleted between scanning and execution?)", func() {
+ packages[0].Output = ""
+ packages[0].Error = errors.New("Directory does not exist")
+
+ parser.Parse(packages)
+
+ Convey("The package result should not be parsed and the outcome should actually resemble the problem", func() {
+ So(packages[0].Result.Outcome, ShouldEqual, contract.TestRunAbortedUnexpectedly)
+ })
+ })
+ })
+}
+
+func fakeParserImplementation(result *contract.PackageResult, rawOutput string) {
+ result.Outcome = rawOutput
+}
diff --git a/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/web/server/parser/rules.go b/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/web/server/parser/rules.go
new file mode 100644
index 00000000000..f8d05d83066
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/web/server/parser/rules.go
@@ -0,0 +1,43 @@
+package parser
+
+import "strings"
+
+func noGoFiles(line string) bool {
+ return strings.HasPrefix(line, "can't load package: ") &&
+ strings.Contains(line, ": no buildable Go source files in ")
+}
+func buildFailed(line string) bool {
+ return strings.HasPrefix(line, "# ") ||
+ strings.Contains(line, "cannot find package") ||
+ (strings.HasPrefix(line, "can't load package: ") && !strings.Contains(line, ": no Go source files in ")) ||
+ (strings.Contains(line, ": found packages ") && strings.Contains(line, ".go) and ") && strings.Contains(line, ".go) in "))
+}
+func noTestFunctions(line string) bool {
+ return line == "testing: warning: no tests to run"
+}
+func noTestFiles(line string) bool {
+ return strings.HasPrefix(line, "?") && strings.Contains(line, "[no test files]")
+}
+func isNewTest(line string) bool {
+ return strings.HasPrefix(line, "=== ")
+}
+func isTestResult(line string) bool {
+ return strings.HasPrefix(line, "--- ")
+}
+func isPackageReport(line string) bool {
+ return (strings.HasPrefix(line, "FAIL") ||
+ strings.HasPrefix(line, "exit status") ||
+ strings.HasPrefix(line, "PASS") ||
+ isCoverageSummary(line) ||
+ packagePassed(line))
+}
+
+func packageFailed(line string) bool {
+ return strings.HasPrefix(line, "FAIL\t")
+}
+func packagePassed(line string) bool {
+ return strings.HasPrefix(line, "ok \t")
+}
+func isCoverageSummary(line string) bool {
+ return strings.HasPrefix(line, "coverage: ") && strings.Contains(line, "% of statements")
+}
diff --git a/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/web/server/parser/testParser.go b/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/web/server/parser/testParser.go
new file mode 100644
index 00000000000..fe8f5110b6c
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/web/server/parser/testParser.go
@@ -0,0 +1,174 @@
+package parser
+
+import (
+ "encoding/json"
+ "fmt"
+ "strconv"
+ "strings"
+
+ "github.com/smartystreets/goconvey/convey/reporting"
+ "github.com/smartystreets/goconvey/web/server/contract"
+)
+
+type testParser struct {
+ test *contract.TestResult
+ line string
+ index int
+ inJson bool
+ jsonLines []string
+ otherLines []string
+}
+
+func parseTestOutput(test *contract.TestResult) *contract.TestResult {
+ parser := newTestParser(test)
+ parser.parseTestFunctionOutput()
+ return test
+}
+
+func newTestParser(test *contract.TestResult) *testParser {
+ self := new(testParser)
+ self.test = test
+ return self
+}
+
+func (self *testParser) parseTestFunctionOutput() {
+ if len(self.test.RawLines) > 0 {
+ self.processLines()
+ self.deserializeJson()
+ self.composeCapturedOutput()
+ }
+}
+
+func (self *testParser) processLines() {
+ for self.index, self.line = range self.test.RawLines {
+ if !self.processLine() {
+ break
+ }
+ }
+}
+
+func (self *testParser) processLine() bool {
+ if strings.HasSuffix(self.line, reporting.OpenJson) {
+ self.inJson = true
+ self.accountForOutputWithoutNewline()
+
+ } else if self.line == reporting.CloseJson {
+ self.inJson = false
+
+ } else if self.inJson {
+ self.jsonLines = append(self.jsonLines, self.line)
+
+ } else if isPanic(self.line) {
+ self.parsePanicOutput()
+ return false
+
+ } else if isGoTestLogOutput(self.line) {
+ self.parseLogLocation()
+
+ } else {
+ self.otherLines = append(self.otherLines, self.line)
+ }
+ return true
+}
+
+// If fmt.Print(f) produces output with no \n and that output
+// is that last output before the framework spits out json
+// (which starts with ''>>>>>'') then without this code
+// all of the json is counted as output, not as json to be
+// parsed and displayed by the web UI.
+func (self *testParser) accountForOutputWithoutNewline() {
+ prefix := strings.Split(self.line, reporting.OpenJson)[0]
+ if prefix != "" {
+ self.otherLines = append(self.otherLines, prefix)
+ }
+}
+
+func (self *testParser) deserializeJson() {
+ formatted := createArrayForJsonItems(self.jsonLines)
+ var scopes []reporting.ScopeResult
+ err := json.Unmarshal(formatted, &scopes)
+ if err != nil {
+ panic(fmt.Sprintf(bugReportRequest, err, formatted))
+ }
+ self.test.Stories = scopes
+}
+func (self *testParser) parsePanicOutput() {
+ for index, line := range self.test.RawLines[self.index:] {
+ self.parsePanicLocation(index, line)
+ self.preserveStackTraceIndentation(index, line)
+ }
+ self.test.Error = strings.Join(self.test.RawLines, "\n")
+}
+func (self *testParser) parsePanicLocation(index int, line string) {
+ if !panicLineHasMetadata(line) {
+ return
+ }
+ metaLine := self.test.RawLines[index+4]
+ fields := strings.Split(metaLine, " ")
+ fileAndLine := strings.Split(fields[0], ":")
+ self.test.File = fileAndLine[0]
+ if len(fileAndLine) >= 2 {
+ self.test.Line, _ = strconv.Atoi(fileAndLine[1])
+ }
+}
+func (self *testParser) preserveStackTraceIndentation(index int, line string) {
+ if panicLineShouldBeIndented(index, line) {
+ self.test.RawLines[index] = "\t" + line
+ }
+}
+func (self *testParser) parseLogLocation() {
+ self.otherLines = append(self.otherLines, self.line)
+ lineFields := self.line
+ fields := strings.Split(lineFields, ":")
+ self.test.File = strings.TrimSpace(fields[0])
+ self.test.Line, _ = strconv.Atoi(fields[1])
+}
+
+func (self *testParser) composeCapturedOutput() {
+ self.test.Message = strings.Join(self.otherLines, "\n")
+}
+
+func createArrayForJsonItems(lines []string) []byte {
+ jsonArrayItems := strings.Join(lines, "")
+ jsonArrayItems = removeTrailingComma(jsonArrayItems)
+ return []byte(fmt.Sprintf("[%s]\n", jsonArrayItems))
+}
+func removeTrailingComma(rawJson string) string {
+ if trailingComma(rawJson) {
+ return rawJson[:len(rawJson)-1]
+ }
+ return rawJson
+}
+func trailingComma(value string) bool {
+ return strings.HasSuffix(value, ",")
+}
+
+func isGoTestLogOutput(line string) bool {
+ return strings.Count(line, ":") == 2
+}
+
+func isPanic(line string) bool {
+ return strings.HasPrefix(line, "panic: ")
+}
+
+func panicLineHasMetadata(line string) bool {
+ return strings.HasPrefix(line, "goroutine") && strings.Contains(line, "[running]")
+}
+func panicLineShouldBeIndented(index int, line string) bool {
+ return strings.Contains(line, "+") || (index > 0 && strings.Contains(line, "panic: "))
+}
+
+const bugReportRequest = `
+Uh-oh! Looks like something went wrong. Please copy the following text and file a bug report at:
+
+https://github.com/smartystreets/goconvey/issues?state=open
+
+======= BEGIN BUG REPORT =======
+
+ERROR: %v
+
+OUTPUT: %s
+
+======= END BUG REPORT =======
+
+`
diff --git a/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/web/server/parser/util.go b/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/web/server/parser/util.go
new file mode 100644
index 00000000000..e2061603439
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/web/server/parser/util.go
@@ -0,0 +1,45 @@
+package parser
+
+import (
+ "math"
+ "strings"
+ "time"
+)
+
+// parseTestFunctionDuration parses the duration in seconds as a float64
+// from a line of go test output that looks something like this:
+// --- PASS: TestOldSchool_PassesWithMessage (0.03 seconds)
+func parseTestFunctionDuration(line string) float64 {
+ line = strings.Replace(line, "(", "", 1)
+ fields := strings.Split(line, " ")
+ return parseDurationInSeconds(fields[3]+"s", 2)
+}
+
+func parseDurationInSeconds(raw string, precision int) float64 {
+ elapsed, _ := time.ParseDuration(raw)
+ return round(elapsed.Seconds(), precision)
+}
+
+// round returns the rounded version of x with precision.
+//
+// Special cases are:
+// round(±0) = ±0
+// round(±Inf) = ±Inf
+// round(NaN) = NaN
+//
+// Why, oh why doesn't the math package come with a round function?
+// Inspiration: http://play.golang.org/p/ZmFfr07oHp
+func round(x float64, precision int) float64 {
+ var rounder float64
+ pow := math.Pow(10, float64(precision))
+ intermediate := x * pow
+
+ if intermediate < 0.0 {
+ intermediate -= 0.5
+ } else {
+ intermediate += 0.5
+ }
+ rounder = float64(int64(intermediate))
+
+ return rounder / float64(pow)
+}
diff --git a/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/web/server/system/shell.go b/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/web/server/system/shell.go
new file mode 100644
index 00000000000..f2fa10711db
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/web/server/system/shell.go
@@ -0,0 +1,174 @@
+package system
+
+import (
+ "log"
+ "os/exec"
+ "path/filepath"
+ "regexp"
+ "strings"
+)
+
+///////////////////////////////////////////////////////////////////////////////
+// Integration: ///////////////////////////////////////////////////////////////
+///////////////////////////////////////////////////////////////////////////////
+
+type Shell struct {
+ coverage bool
+ gobin string
+ reportsPath string
+ defaultTimeout string
+}
+
+func NewShell(gobin, reportsPath string, coverage bool, defaultTimeout string) *Shell {
+ return &Shell{
+ coverage: coverage,
+ gobin: gobin,
+ reportsPath: reportsPath,
+ defaultTimeout: defaultTimeout,
+ }
+}
+
+func (self *Shell) GoTest(directory, packageName string, tags, arguments []string) (output string, err error) {
+ reportFilename := strings.Replace(packageName, "/", "-", -1)
+ reportPath := filepath.Join(self.reportsPath, reportFilename)
+ reportData := reportPath + ".txt"
+ reportHTML := reportPath + ".html"
+ tagsArg := "-tags=" + strings.Join(tags, ",")
+
+ goconvey := findGoConvey(directory, self.gobin, packageName, tagsArg).Execute()
+ compilation := compile(directory, self.gobin, tagsArg).Execute()
+ withCoverage := runWithCoverage(compilation, goconvey, self.coverage, reportData, directory, self.gobin, self.defaultTimeout, tagsArg, arguments).Execute()
+ final := runWithoutCoverage(compilation, withCoverage, goconvey, directory, self.gobin, self.defaultTimeout, tagsArg, arguments).Execute()
+ go generateReports(final, self.coverage, directory, self.gobin, reportData, reportHTML).Execute()
+
+ return final.Output, final.Error
+}
+
+///////////////////////////////////////////////////////////////////////////////
+// Functional Core:////////////////////////////////////////////////////////////
+///////////////////////////////////////////////////////////////////////////////
+
+func findGoConvey(directory, gobin, packageName, tagsArg string) Command {
+ return NewCommand(directory, gobin, "list", "-f", "'{{.TestImports}}'", tagsArg, packageName)
+}
+
+func compile(directory, gobin, tagsArg string) Command {
+ return NewCommand(directory, gobin, "test", "-i", tagsArg)
+}
+
+func runWithCoverage(compile, goconvey Command, coverage bool, reportPath, directory, gobin, defaultTimeout, tagsArg string, customArguments []string) Command {
+ if compile.Error != nil || goconvey.Error != nil {
+ return compile
+ }
+
+ if !coverage {
+ return compile
+ }
+
+ arguments := []string{"test", "-v", "-coverprofile=" + reportPath, tagsArg}
+
+ customArgsText := strings.Join(customArguments, "\t")
+ if !strings.Contains(customArgsText, "-covermode=") {
+ arguments = append(arguments, "-covermode=set")
+ }
+
+ if !strings.Contains(customArgsText, "-timeout=") {
+ arguments = append(arguments, "-timeout="+defaultTimeout)
+ }
+
+ if strings.Contains(goconvey.Output, goconveyDSLImport) {
+ arguments = append(arguments, "-json")
+ }
+
+ arguments = append(arguments, customArguments...)
+
+ return NewCommand(directory, gobin, arguments...)
+}
+
+func runWithoutCoverage(compile, withCoverage, goconvey Command, directory, gobin, defaultTimeout, tagsArg string, customArguments []string) Command {
+ if compile.Error != nil {
+ return compile
+ }
+
+ if goconvey.Error != nil {
+ log.Println(gopathProblem, goconvey.Output, goconvey.Error)
+ return goconvey
+ }
+
+ if coverageStatementRE.MatchString(withCoverage.Output) {
+ return withCoverage
+ }
+
+ log.Printf("Coverage output: %v", withCoverage.Output)
+
+ log.Print("Run without coverage")
+
+ arguments := []string{"test", "-v", tagsArg}
+ customArgsText := strings.Join(customArguments, "\t")
+ if !strings.Contains(customArgsText, "-timeout=") {
+ arguments = append(arguments, "-timeout="+defaultTimeout)
+ }
+
+ if strings.Contains(goconvey.Output, goconveyDSLImport) {
+ arguments = append(arguments, "-json")
+ }
+ arguments = append(arguments, customArguments...)
+ return NewCommand(directory, gobin, arguments...)
+}
+
+func generateReports(previous Command, coverage bool, directory, gobin, reportData, reportHTML string) Command {
+ if previous.Error != nil {
+ return previous
+ }
+
+ if !coverage {
+ return previous
+ }
+
+ return NewCommand(directory, gobin, "tool", "cover", "-html="+reportData, "-o", reportHTML)
+}
+
+///////////////////////////////////////////////////////////////////////////////
+// Imperative Shell: //////////////////////////////////////////////////////////
+///////////////////////////////////////////////////////////////////////////////
+
+type Command struct {
+ directory string
+ executable string
+ arguments []string
+
+ Output string
+ Error error
+}
+
+func NewCommand(directory, executable string, arguments ...string) Command {
+ return Command{
+ directory: directory,
+ executable: executable,
+ arguments: arguments,
+ }
+}
+
+func (this Command) Execute() Command {
+ if len(this.executable) == 0 {
+ return this
+ }
+
+ if len(this.Output) > 0 || this.Error != nil {
+ return this
+ }
+
+ command := exec.Command(this.executable, this.arguments...)
+ command.Dir = this.directory
+ var rawOutput []byte
+ rawOutput, this.Error = command.CombinedOutput()
+ this.Output = string(rawOutput)
+ return this
+}
+
+///////////////////////////////////////////////////////////////////////////////
+
+const goconveyDSLImport = "github.com/smartystreets/goconvey/convey " // note the trailing space: we don't want to target packages nested in the /convey package.
+const gopathProblem = "Please run goconvey from within $GOPATH/src (also, symlinks might be problematic). Output and Error: "
+
+var coverageStatementRE = regexp.MustCompile(`(?m)^coverage: \d+\.\d% of statements(.*)$|^panic: test timed out after `)
diff --git a/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/web/server/system/shell_integration_test.go b/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/web/server/system/shell_integration_test.go
new file mode 100644
index 00000000000..88c696dbb81
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/web/server/system/shell_integration_test.go
@@ -0,0 +1,33 @@
+package system
+
+import (
+ "log"
+ "path/filepath"
+ "runtime"
+ "strings"
+
+ "testing"
+)
+
+func TestShellIntegration(t *testing.T) {
+ if testing.Short() {
+ t.Skip("Skipping potentially long-running integration test...")
+ return
+ }
+
+ log.SetFlags(log.LstdFlags | log.Lshortfile | log.Lmicroseconds)
+
+ _, filename, _, _ := runtime.Caller(0)
+ directory := filepath.Join(filepath.Dir(filename), "..", "watch", "integration_testing", "sub")
+ packageName := "github.com/smartystreets/goconvey/web/server/watch/integration_testing/sub"
+
+ shell := NewShell("go", "", true, "5s")
+ output, err := shell.GoTest(directory, packageName, []string{}, []string{"-short"})
+
+ if !strings.Contains(output, "PASS\n") || !strings.Contains(output, "ok") {
+ t.Errorf("Expected output that resembed tests passing but got this instead: [%s]", output)
+ }
+ if err != nil {
+ t.Error("Test run resulted in the following error:", err)
+ }
+}
diff --git a/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/web/server/system/shell_test.go b/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/web/server/system/shell_test.go
new file mode 100644
index 00000000000..b115c4f356a
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/web/server/system/shell_test.go
@@ -0,0 +1,217 @@
+package system
+
+import (
+ "errors"
+ "testing"
+
+ . "github.com/smartystreets/goconvey/convey"
+)
+
+func TestShellCommandComposition(t *testing.T) {
+ var (
+ buildFailed = Command{Error: errors.New("BUILD FAILURE!")}
+ buildSucceeded = Command{Output: "ok"}
+ goConvey = Command{Output: "[fmt github.com/smartystreets/goconvey/convey net/http net/http/httptest path runtime strconv strings testing time]"}
+ noGoConvey = Command{Output: "[fmt net/http net/http/httptest path runtime strconv strings testing time]"}
+ errorGoConvey = Command{Output: "This is a wacky error", Error: errors.New("This happens when running goconvey outside your $GOPATH (symlinked code).")}
+ noCoveragePassed = Command{Output: "PASS\nok github.com/smartystreets/goconvey/examples 0.012s"}
+ coveragePassed = Command{Output: "PASS\ncoverage: 100.0% of statements\nok github.com/smartystreets/goconvey/examples 0.012s"}
+ coverageFailed = Command{
+ Error: errors.New("Tests bombed!"),
+ Output: "--- FAIL: TestIntegerManipulation (0.00 seconds)\nFAIL\ncoverage: 100.0% of statements\nexit status 1\nFAIL github.com/smartystreets/goconvey/examples 0.013s",
+ }
+ coverageFailedTimeout = Command{
+ Error: errors.New("Tests bombed!"),
+ Output: "=== RUN SomeTest\n--- PASS: SomeTest (0.00 seconds)\n=== RUN TimeoutTest\npanic: test timed out after 5s\n\ngoroutine 27 [running]:\n",
+ }
+ )
+
+ const (
+ yesCoverage = true
+ noCoverage = false
+ )
+
+ Convey("When attempting to run tests with coverage flags", t, func() {
+ Convey("And buildSucceeded failed", func() {
+ result := runWithCoverage(buildFailed, goConvey, noCoverage, "", "", "", "", "-tags=", nil)
+
+ Convey("Then no action should be taken", func() {
+ So(result, ShouldResemble, buildFailed)
+ })
+ })
+
+ Convey("And coverage is not wanted", func() {
+ result := runWithCoverage(buildSucceeded, goConvey, noCoverage, "", "", "", "", "-tags=", nil)
+
+ Convey("Then no action should be taken", func() {
+ So(result, ShouldResemble, buildSucceeded)
+ })
+ })
+
+ Convey("And the package being tested usees the GoConvey DSL (`convey` package)", func() {
+ result := runWithCoverage(buildSucceeded, goConvey, yesCoverage, "reportsPath", "/directory", "go", "5s", "-tags=bob", []string{"-arg1", "-arg2"})
+
+ Convey("The returned command should be well formed (and include the -json flag)", func() {
+ So(result, ShouldResemble, Command{
+ directory: "/directory",
+ executable: "go",
+ arguments: []string{"test", "-v", "-coverprofile=reportsPath", "-tags=bob", "-covermode=set", "-timeout=5s", "-json", "-arg1", "-arg2"},
+ })
+ })
+ })
+
+ Convey("And the package being tested does NOT use the GoConvey DSL", func() {
+ result := runWithCoverage(buildSucceeded, noGoConvey, yesCoverage, "reportsPath", "/directory", "go", "1s", "-tags=bob", []string{"-arg1", "-arg2"})
+
+ Convey("The returned command should be well formed (and NOT include the -json flag)", func() {
+ So(result, ShouldResemble, Command{
+ directory: "/directory",
+ executable: "go",
+ arguments: []string{"test", "-v", "-coverprofile=reportsPath", "-tags=bob", "-covermode=set", "-timeout=1s", "-arg1", "-arg2"},
+ })
+ })
+ })
+
+ Convey("And the package being tested has been symlinked outside the $GOAPTH", func() {
+ result := runWithCoverage(buildSucceeded, errorGoConvey, yesCoverage, "reportsPath", "/directory", "go", "1s", "-tags=", nil)
+
+ Convey("The returned command should be the compilation command", func() {
+ So(result, ShouldResemble, buildSucceeded)
+ })
+ })
+
+ Convey("And the package being tested specifies an alternate covermode", func() {
+ result := runWithCoverage(buildSucceeded, noGoConvey, yesCoverage, "reportsPath", "/directory", "go", "1s", "-tags=", []string{"-covermode=atomic"})
+
+ Convey("The returned command should allow the alternate value", func() {
+ So(result, ShouldResemble, Command{
+ directory: "/directory",
+ executable: "go",
+ arguments: []string{"test", "-v", "-coverprofile=reportsPath", "-tags=", "-timeout=1s", "-covermode=atomic"},
+ })
+ })
+ })
+
+ Convey("And the package being tested specifies an alternate timeout", func() {
+ result := runWithCoverage(buildSucceeded, noGoConvey, yesCoverage, "reportsPath", "/directory", "go", "1s", "-tags=", []string{"-timeout=5s"})
+
+ Convey("The returned command should allow the alternate value", func() {
+ So(result, ShouldResemble, Command{
+ directory: "/directory",
+ executable: "go",
+ arguments: []string{"test", "-v", "-coverprofile=reportsPath", "-tags=", "-covermode=set", "-timeout=5s"},
+ })
+ })
+ })
+
+ })
+
+ Convey("When attempting to run tests without the coverage flags", t, func() {
+ Convey("And tests already succeeded with coverage", func() {
+ result := runWithoutCoverage(buildSucceeded, coveragePassed, goConvey, "/directory", "go", "1s", "-tags=", []string{"-arg1", "-arg2"})
+
+ Convey("Then no action should be taken", func() {
+ So(result, ShouldResemble, coveragePassed)
+ })
+ })
+
+ Convey("And tests already failed (legitimately) with coverage", func() {
+ result := runWithoutCoverage(buildSucceeded, coverageFailed, goConvey, "/directory", "go", "1s", "-tags=", []string{"-arg1", "-arg2"})
+
+ Convey("Then no action should be taken", func() {
+ So(result, ShouldResemble, coverageFailed)
+ })
+ })
+
+ Convey("And tests already failed (timeout) with coverage", func() {
+ result := runWithoutCoverage(buildSucceeded, coverageFailedTimeout, goConvey, "/directory", "go", "1s", "-tags=", []string{"-arg1", "-arg2"})
+
+ Convey("Then no action should be taken", func() {
+ So(result, ShouldResemble, coverageFailedTimeout)
+ })
+ })
+
+ Convey("And the build failed earlier", func() {
+ result := runWithoutCoverage(buildFailed, Command{}, goConvey, "/directory", "go", "1s", "-tags=", []string{"-arg1", "-arg2"})
+
+ Convey("Then no action should be taken", func() {
+ So(result, ShouldResemble, buildFailed)
+ })
+ })
+
+ Convey("And the goconvey dsl command failed (probably because of symlinks)", func() {
+ result := runWithoutCoverage(buildSucceeded, Command{}, errorGoConvey, "", "", "", "-tags=", nil)
+
+ Convey("Then no action should be taken", func() {
+ So(result, ShouldResemble, errorGoConvey)
+ })
+ })
+
+ Convey("And the package being tested uses the GoConvey DSL (`convey` package)", func() {
+ result := runWithoutCoverage(buildSucceeded, buildSucceeded, goConvey, "/directory", "go", "1s", "-tags=", []string{"-arg1", "-arg2"})
+
+ Convey("Then the returned command should be well formed (and include the -json flag)", func() {
+ So(result, ShouldResemble, Command{
+ directory: "/directory",
+ executable: "go",
+ arguments: []string{"test", "-v", "-tags=", "-timeout=1s", "-json", "-arg1", "-arg2"},
+ })
+ })
+ })
+
+ Convey("And the package being tested does NOT use the GoConvey DSL", func() {
+ result := runWithoutCoverage(buildSucceeded, noCoveragePassed, noGoConvey, "/directory", "go", "1s", "-tags=", []string{"-arg1", "-arg2"})
+
+ Convey("Then the returned command should be well formed (and NOT include the -json flag)", func() {
+ So(result, ShouldResemble, Command{
+ directory: "/directory",
+ executable: "go",
+ arguments: []string{"test", "-v", "-tags=", "-timeout=1s", "-arg1", "-arg2"},
+ })
+ })
+ })
+
+ Convey("And the package being tested specifies an alternate timeout", func() {
+ result := runWithoutCoverage(buildSucceeded, buildSucceeded, noGoConvey, "/directory", "go", "1s", "-tags=", []string{"-timeout=5s"})
+
+ Convey("The returned command should allow the alternate value", func() {
+ So(result, ShouldResemble, Command{
+ directory: "/directory",
+ executable: "go",
+ arguments: []string{"test", "-v", "-tags=", "-timeout=5s"},
+ })
+ })
+ })
+
+ })
+
+ Convey("When generating coverage reports", t, func() {
+ Convey("And the previous command failed for any reason (compilation or failed tests)", func() {
+ result := generateReports(buildFailed, yesCoverage, "/directory", "go", "reportData", "reportHTML")
+
+ Convey("Then no action should be taken", func() {
+ So(result, ShouldResemble, buildFailed)
+ })
+ })
+
+ Convey("And coverage reports are unwanted", func() {
+ result := generateReports(noCoveragePassed, noCoverage, "/directory", "go", "reportData", "reportHTML")
+
+ Convey("Then no action should beg taken", func() {
+ So(result, ShouldResemble, noCoveragePassed)
+ })
+ })
+
+ Convey("And tests passed and coverage reports are wanted", func() {
+ result := generateReports(coveragePassed, yesCoverage, "/directory", "go", "reportData", "reportHTML")
+
+ Convey("Then the resulting command should be well-formed", func() {
+ So(result, ShouldResemble, Command{
+ directory: "/directory",
+ executable: "go",
+ arguments: []string{"tool", "cover", "-html=reportData", "-o", "reportHTML"},
+ })
+ })
+ })
+ })
+}
diff --git a/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/web/server/system/system.goconvey b/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/web/server/system/system.goconvey
new file mode 100644
index 00000000000..aa26e8b739b
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/web/server/system/system.goconvey
@@ -0,0 +1,3 @@
+#ignore
+-timeout=1s
+-short \ No newline at end of file
diff --git a/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/web/server/watch/functional_core.go b/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/web/server/watch/functional_core.go
new file mode 100644
index 00000000000..404a25d33ac
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/web/server/watch/functional_core.go
@@ -0,0 +1,171 @@
+package watch
+
+import (
+ "os"
+ "path/filepath"
+ "strings"
+
+ "github.com/smartystreets/goconvey/web/server/messaging"
+)
+
+///////////////////////////////////////////////////////////////////////////////
+
+func Categorize(items chan *FileSystemItem, root string, watchSuffixes []string) (folders, profiles, goFiles []*FileSystemItem) {
+ for item := range items {
+ if item.IsFolder && !isHidden(item.Name) && !foundInHiddenDirectory(item, root) {
+ folders = append(folders, item)
+
+ } else if strings.HasSuffix(item.Name, ".goconvey") && len(item.Name) > len(".goconvey") {
+ profiles = append(profiles, item)
+
+ } else {
+ for _, suffix := range watchSuffixes {
+ if strings.HasSuffix(item.Name, suffix) && !isHidden(item.Name) && !foundInHiddenDirectory(item, root) {
+ goFiles = append(goFiles, item)
+ }
+ }
+ }
+ }
+ return folders, profiles, goFiles
+}
+
+func foundInHiddenDirectory(item *FileSystemItem, root string) bool {
+ path := item.Path
+ if len(path) > len(root) {
+ path = path[len(root):]
+ }
+
+ for _, folder := range strings.Split(filepath.Dir(path), slash) {
+ if isHidden(folder) {
+ return true
+ }
+ }
+
+ return false
+}
+func isHidden(path string) bool {
+ return strings.HasPrefix(path, ".") || strings.HasPrefix(path, "_") || strings.HasPrefix(path, "flymake_")
+}
+
+///////////////////////////////////////////////////////////////////////////////
+
+func ParseProfile(profile string) (isDisabled bool, tags, arguments []string) {
+ lines := strings.Split(profile, "\n")
+
+ for _, line := range lines {
+ line = strings.TrimSpace(line)
+
+ if len(arguments) == 0 && strings.ToLower(line) == "ignore" {
+ return true, nil, nil
+
+ } else if strings.HasPrefix(line, "-tags=") {
+ tags = append(tags, strings.Split(strings.SplitN(line, "=", 2)[1], ",")...)
+ continue
+
+ } else if len(line) == 0 {
+ continue
+
+ } else if strings.HasPrefix(line, "#") {
+ continue
+
+ } else if strings.HasPrefix(line, "//") {
+ continue
+
+ } else if line == "-cover" || strings.HasPrefix(line, "-coverprofile") {
+ continue
+
+ } else if line == "-v" {
+ continue // Verbose mode is always enabled so there is no need to record it here.
+
+ }
+
+ arguments = append(arguments, line)
+ }
+
+ return false, tags, arguments
+}
+
+///////////////////////////////////////////////////////////////////////////////
+
+func CreateFolders(items []*FileSystemItem) messaging.Folders {
+ folders := map[string]*messaging.Folder{}
+
+ for _, item := range items {
+ folders[item.Path] = &messaging.Folder{Path: item.Path, Root: item.Root}
+ }
+
+ return folders
+}
+
+///////////////////////////////////////////////////////////////////////////////
+
+func LimitDepth(folders messaging.Folders, depth int) {
+ if depth < 0 {
+ return
+ }
+
+ for path, folder := range folders {
+ if strings.Count(path[len(folder.Root):], slash) > depth {
+ delete(folders, path)
+ }
+ }
+}
+
+///////////////////////////////////////////////////////////////////////////////
+
+func AttachProfiles(folders messaging.Folders, items []*FileSystemItem) {
+ for _, profile := range items {
+ if folder, exists := folders[filepath.Dir(profile.Path)]; exists {
+ folder.Disabled, folder.BuildTags, folder.TestArguments = profile.ProfileDisabled, profile.ProfileTags, profile.ProfileArguments
+ }
+ }
+}
+
+///////////////////////////////////////////////////////////////////////////////
+
+func MarkIgnored(folders messaging.Folders, ignored map[string]struct{}) {
+ if len(ignored) == 0 {
+ return
+ }
+
+ for _, folder := range folders {
+ for ignored := range ignored {
+ if !folder.Ignored && strings.HasSuffix(folder.Path, ignored) {
+ folder.Ignored = true
+ }
+ }
+ }
+}
+
+///////////////////////////////////////////////////////////////////////////////
+
+func ActiveFolders(folders messaging.Folders) messaging.Folders {
+ var active messaging.Folders = map[string]*messaging.Folder{}
+
+ for path, folder := range folders {
+ if folder.Ignored || folder.Disabled {
+ continue
+ }
+
+ active[path] = folder
+ }
+ return active
+}
+
+///////////////////////////////////////////////////////////////////////////////
+
+func Sum(folders messaging.Folders, items []*FileSystemItem) int64 {
+ var sum int64
+ for _, item := range items {
+ if _, exists := folders[filepath.Dir(item.Path)]; exists {
+ sum += item.Size + item.Modified
+ }
+ }
+ return sum
+}
+
+///////////////////////////////////////////////////////////////////////////////
+
+const slash = string(os.PathSeparator)
+
+///////////////////////////////////////////////////////////////////////////////
diff --git a/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/web/server/watch/functional_core_test.go b/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/web/server/watch/functional_core_test.go
new file mode 100644
index 00000000000..d0f74867acc
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/web/server/watch/functional_core_test.go
@@ -0,0 +1,419 @@
+package watch
+
+import (
+ "fmt"
+ "testing"
+
+ . "github.com/smartystreets/goconvey/convey"
+ "github.com/smartystreets/goconvey/web/server/messaging"
+)
+
+func TestCategorize(t *testing.T) {
+ fileSystem := []*FileSystemItem{
+ {
+ Root: "/.hello",
+ Path: "/.hello",
+ Name: "hello",
+ IsFolder: true,
+ },
+ {
+ Root: "/.hello",
+ Path: "/.hello/1/hello/world.txt",
+ Name: "world.txt",
+ IsFolder: false,
+ },
+ {
+ Root: "/.hello",
+ Path: "/.hello/1/2/3/4/5/hello/world.go",
+ Name: "world.go",
+ IsFolder: false,
+ },
+ {
+ Root: "/.hello",
+ Path: "/.hello/world.go",
+ Name: "world.go",
+ IsFolder: false,
+ },
+ {
+ Root: "/.hello",
+ Path: "/.hello/hello/world.tmpl",
+ Name: "world.tmpl",
+ IsFolder: false,
+ },
+ {
+ Root: "/.hello",
+ Path: "/.hello/hello/.world.go",
+ Name: ".world.go",
+ IsFolder: false,
+ },
+ {
+ Root: "/.hello",
+ Path: "/.hello/hello/_world.go",
+ Name: ".world.go",
+ IsFolder: false,
+ },
+ {
+ Root: "/.hello",
+ Path: "/.hello/hello/flymake_world.go",
+ Name: "flymake_world.go",
+ IsFolder: false,
+ },
+ {
+ Root: "/.hello",
+ Path: "/.hello/.hello",
+ Name: ".hello",
+ IsFolder: true,
+ },
+ {
+ Root: "/.hello",
+ Path: "/.hello/.hello/hello",
+ Name: "hello",
+ IsFolder: true,
+ },
+ {
+ Root: "/.hello",
+ Path: "/.hello/.hello/world.go",
+ Name: "world.go",
+ IsFolder: false,
+ },
+ {
+ Root: "/.hello",
+ Path: "/.hello/hello/hi.goconvey",
+ Name: "hi.goconvey",
+ IsFolder: false,
+ },
+ {
+ Root: "/.hello",
+ Path: "/.hello/hello2/.goconvey",
+ Name: ".goconvey",
+ IsFolder: false,
+ },
+ {
+ Root: "/.hello",
+ Path: "/.hello/_hello",
+ Name: "_hello",
+ IsFolder: true,
+ },
+ }
+
+ Convey("A stream of file system items should be categorized correctly", t, func() {
+ items := make(chan *FileSystemItem)
+
+ go func() {
+ for _, item := range fileSystem {
+ items <- item
+ }
+ close(items)
+ }()
+
+ folders, profiles, goFiles := Categorize(items, "/.hello", []string{".go"})
+ So(folders, ShouldResemble, fileSystem[:1])
+ So(profiles, ShouldResemble, fileSystem[11:12])
+ So(goFiles, ShouldResemble, fileSystem[2:4])
+ })
+
+ Convey("A stream of file system items should be categorized correctly", t, func() {
+ items := make(chan *FileSystemItem)
+
+ go func() {
+ for _, item := range fileSystem {
+ items <- item
+ }
+ close(items)
+ }()
+
+ folders, profiles, goFiles := Categorize(items, "/.hello", []string{".go", ".tmpl"})
+ So(folders, ShouldResemble, fileSystem[:1])
+ So(profiles, ShouldResemble, fileSystem[11:12])
+ So(goFiles, ShouldResemble, fileSystem[2:5])
+ })
+}
+
+func TestParseProfile(t *testing.T) {
+ var parseProfileTestCases = []struct {
+ SKIP bool
+ description string
+ input string
+ resultIgnored bool
+ resultTestTags []string
+ resultTestArgs []string
+ }{
+ {
+ SKIP: false,
+ description: "Blank profile",
+ input: "",
+ resultIgnored: false,
+ },
+ {
+ SKIP: false,
+ description: "All lines are blank or whitespace",
+ input: "\n \n \t\t\t \n \n \n",
+ resultIgnored: false,
+ },
+ {
+ SKIP: false,
+ description: "Ignored package, no args included",
+ input: "IGNORE\n-timeout=4s",
+ resultIgnored: true,
+ },
+ {
+ SKIP: false,
+ description: "Ignore directive is commented, all args are included",
+ input: "#IGNORE\n-timeout=4s\n-parallel=5",
+ resultIgnored: false,
+ resultTestArgs: []string{"-timeout=4s", "-parallel=5"},
+ },
+ {
+ SKIP: false,
+ description: "No ignore directive, all args are included",
+ input: "-run=TestBlah\n-timeout=42s",
+ resultIgnored: false,
+ resultTestArgs: []string{"-run=TestBlah", "-timeout=42s"},
+ },
+ {
+ SKIP: false,
+ description: "Some args are commented, therefore ignored",
+ input: "-run=TestBlah\n #-timeout=42s",
+ resultIgnored: false,
+ resultTestArgs: []string{"-run=TestBlah"},
+ },
+ {
+ SKIP: false,
+ description: "All args are commented, therefore all are ignored",
+ input: "#-run=TestBlah\n//-timeout=42",
+ resultIgnored: false,
+ },
+ {
+ SKIP: false,
+ description: "We ignore certain flags like -v and -cover and -coverprofile because they are specified by the shell",
+ input: "-v\n-cover\n-coverprofile=blah.out",
+ resultIgnored: false,
+ },
+ {
+ SKIP: false,
+ description: "We allow certain coverage flags like -coverpkg and -covermode",
+ input: "-coverpkg=blah\n-covermode=atomic",
+ resultIgnored: false,
+ resultTestArgs: []string{"-coverpkg=blah", "-covermode=atomic"},
+ },
+ {
+ SKIP: false,
+ description: "We parse out -tags particularly",
+ input: "-coverpkg=blah\n-covermode=atomic\n-tags=foo,bar",
+ resultIgnored: false,
+ resultTestTags: []string{"foo", "bar"},
+ resultTestArgs: []string{"-coverpkg=blah", "-covermode=atomic"},
+ },
+ }
+
+ for i, test := range parseProfileTestCases {
+ if test.SKIP {
+ SkipConvey(fmt.Sprintf("Profile Parsing, Test Case #%d: %s (SKIPPED)", i, test.description), t, nil)
+ } else {
+ Convey(fmt.Sprintf("Profile Parsing, Test Case #%d: %s", i, test.description), t, func() {
+ ignored, testTags, testArgs := ParseProfile(test.input)
+
+ So(ignored, ShouldEqual, test.resultIgnored)
+ So(testTags, ShouldResemble, test.resultTestTags)
+ So(testArgs, ShouldResemble, test.resultTestArgs)
+ })
+ }
+ }
+}
+
+func TestCreateFolders(t *testing.T) {
+ Convey("File system items that represent folders should be converted to folder structs correctly", t, func() {
+ expected := map[string]*messaging.Folder{
+ "/root/1": {Path: "/root/1", Root: "/root"},
+ "/root/1/2": {Path: "/root/1/2", Root: "/root"},
+ "/root/1/2/3": {Path: "/root/1/2/3", Root: "/root"},
+ }
+
+ inputs := []*FileSystemItem{
+ {Path: "/root/1", Root: "/root", IsFolder: true},
+ {Path: "/root/1/2", Root: "/root", IsFolder: true},
+ {Path: "/root/1/2/3", Root: "/root", IsFolder: true},
+ }
+
+ actual := CreateFolders(inputs)
+
+ for key, actualValue := range actual {
+ So(actualValue, ShouldResemble, expected[key])
+ }
+ })
+}
+
+func TestLimitDepth(t *testing.T) {
+ Convey("Subject: Limiting folders based on relative depth from a common root", t, func() {
+
+ folders := map[string]*messaging.Folder{
+ "/root/1": {
+ Path: "/root/1",
+ Root: "/root",
+ },
+ "/root/1/2": {
+ Path: "/root/1/2",
+ Root: "/root",
+ },
+ "/root/1/2/3": {
+ Path: "/root/1/2/3",
+ Root: "/root",
+ },
+ }
+
+ Convey("When there is no depth limit", func() {
+ LimitDepth(folders, -1)
+
+ Convey("No folders should be excluded", func() {
+ So(len(folders), ShouldEqual, 3)
+ })
+ })
+
+ Convey("When there is a limit", func() {
+ LimitDepth(folders, 2)
+
+ Convey("The deepest folder (in this case) should be excluded", func() {
+ So(len(folders), ShouldEqual, 2)
+ _, exists := folders["/root/1/2/3"]
+ So(exists, ShouldBeFalse)
+ })
+ })
+ })
+}
+
+func TestAttachProfiles(t *testing.T) {
+ Convey("Subject: Attaching profile information to a folder", t, func() {
+ folders := map[string]*messaging.Folder{
+ "/root/1": {
+ Path: "/root/1",
+ Root: "/root",
+ },
+ "/root/1/2": {
+ Path: "/root/1/2",
+ Root: "/root",
+ },
+ "/root/1/2/3": {
+ Path: "/root/1/2/3",
+ Root: "/root",
+ },
+ }
+
+ profiles := []*FileSystemItem{
+ {
+ Path: "/root/too-shallow.goconvey",
+ ProfileDisabled: true,
+ ProfileArguments: []string{"1", "2"},
+ },
+ {
+ Path: "/root/1/2/hi.goconvey",
+ ProfileDisabled: true,
+ ProfileArguments: []string{"1", "2"},
+ },
+ {
+ Path: "/root/1/2/3/4/does-not-exist",
+ ProfileDisabled: true,
+ ProfileArguments: []string{"1", "2", "3", "4"},
+ },
+ }
+
+ Convey("Profiles that match folders should be merged with those folders", func() {
+ AttachProfiles(folders, profiles)
+
+ Convey("No profiles matched the first folder, so no assignments should occur", func() {
+ So(folders["/root/1"].Disabled, ShouldBeFalse)
+ So(folders["/root/1"].TestArguments, ShouldBeEmpty)
+ })
+
+ Convey("The second folder should match the first profile", func() {
+ So(folders["/root/1/2"].Disabled, ShouldBeTrue)
+ So(folders["/root/1/2"].TestArguments, ShouldResemble, []string{"1", "2"})
+ })
+
+ Convey("No profiles match the third folder so no assignments should occur", func() {
+ So(folders["/root/1/2/3"].Disabled, ShouldBeFalse)
+ So(folders["/root/1/2/3"].TestArguments, ShouldBeEmpty)
+ })
+ })
+ })
+}
+
+func TestMarkIgnored(t *testing.T) {
+ Convey("Subject: folders that have been ignored should be marked as such", t, func() {
+ folders := map[string]*messaging.Folder{
+ "/root/1": {
+ Path: "/root/1",
+ Root: "/root",
+ },
+ "/root/1/2": {
+ Path: "/root/1/2",
+ Root: "/root",
+ },
+ "/root/1/2/3": {
+ Path: "/root/1/2/3",
+ Root: "/root",
+ },
+ }
+
+ Convey("When there are no ignored folders", func() {
+ ignored := map[string]struct{}{}
+ MarkIgnored(folders, ignored)
+
+ Convey("No folders should be marked as ignored", func() {
+ So(folders["/root/1"].Ignored, ShouldBeFalse)
+ So(folders["/root/1/2"].Ignored, ShouldBeFalse)
+ So(folders["/root/1/2/3"].Ignored, ShouldBeFalse)
+ })
+ })
+ Convey("When there are ignored folders", func() {
+ ignored := map[string]struct{}{"1/2": {}}
+ MarkIgnored(folders, ignored)
+
+ Convey("The ignored folders should be marked as ignored", func() {
+ So(folders["/root/1"].Ignored, ShouldBeFalse)
+ So(folders["/root/1/2"].Ignored, ShouldBeTrue)
+ So(folders["/root/1/2/3"].Ignored, ShouldBeFalse)
+ })
+ })
+ })
+}
+
+func TestActiveFolders(t *testing.T) {
+ Convey("Subject: Folders that are not ignored or disabled are active", t, func() {
+ folders := map[string]*messaging.Folder{
+ "/root/1": {
+ Path: "/root/1",
+ Root: "/root",
+ Ignored: true,
+ },
+ "/root/1/2": {
+ Path: "/root/1/2",
+ Root: "/root",
+ },
+ "/root/1/2/3": {
+ Path: "/root/1/2/3",
+ Root: "/root",
+ Disabled: true,
+ },
+ }
+
+ active := ActiveFolders(folders)
+
+ So(len(active), ShouldEqual, 1)
+ So(active["/root/1/2"], ShouldResemble, folders["/root/1/2"])
+ })
+}
+
+func TestSum(t *testing.T) {
+ Convey("Subject: file system items within specified directores should be counted and summed", t, func() {
+ folders := map[string]*messaging.Folder{
+ "/root/1": {Path: "/root/1", Root: "/root", Ignored: true},
+ }
+ items := []*FileSystemItem{
+ {Size: 1, Modified: 3, Path: "/root/1/hi.go"},
+ {Size: 7, Modified: 13, Path: "/root/1/bye.go"},
+ {Size: 33, Modified: 45, Path: "/root/1/2/salutations.go"}, // not counted
+ }
+
+ So(Sum(folders, items), ShouldEqual, 1+3+7+13)
+ })
+}
diff --git a/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/web/server/watch/imperative_shell.go b/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/web/server/watch/imperative_shell.go
new file mode 100644
index 00000000000..f4e886c9283
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/web/server/watch/imperative_shell.go
@@ -0,0 +1,77 @@
+package watch
+
+import (
+ "io"
+ "io/ioutil"
+ "os"
+ "path/filepath"
+)
+
+///////////////////////////////////////////////////////////////////////////////
+
+type FileSystemItem struct {
+ Root string
+ Path string
+ Name string
+ Size int64
+ Modified int64
+ IsFolder bool
+
+ ProfileDisabled bool
+ ProfileTags []string
+ ProfileArguments []string
+}
+
+///////////////////////////////////////////////////////////////////////////////
+
+func YieldFileSystemItems(root string, excludedDirs []string) chan *FileSystemItem {
+ items := make(chan *FileSystemItem)
+
+ go func() {
+ filepath.Walk(root, func(path string, info os.FileInfo, err error) error {
+ if err != nil {
+ return filepath.SkipDir
+ }
+
+ basePath := filepath.Base(path)
+ for _, item := range excludedDirs {
+ if item == basePath && info.IsDir() && item != "" && basePath != "" {
+ return filepath.SkipDir
+ }
+ }
+
+ items <- &FileSystemItem{
+ Root: root,
+ Path: path,
+ Name: info.Name(),
+ Size: info.Size(),
+ Modified: info.ModTime().Unix(),
+ IsFolder: info.IsDir(),
+ }
+
+ return nil
+ })
+ close(items)
+ }()
+
+ return items
+}
+
+///////////////////////////////////////////////////////////////////////////////
+
+// ReadContents reads files wholesale. This function is only called on files
+// that end in '.goconvey'. These files should be very small, probably not
+// ever more than a few hundred bytes. The ignored errors are ok because in
+// the event of an IO error all that need be returned is an empty string.
+func ReadContents(path string) string {
+ file, err := os.Open(path)
+ if err != nil {
+ return ""
+ }
+ defer file.Close()
+ reader := io.LimitReader(file, 1024*4)
+ content, _ := ioutil.ReadAll(reader)
+ return string(content)
+}
+
+///////////////////////////////////////////////////////////////////////////////
diff --git a/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/web/server/watch/integration.go b/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/web/server/watch/integration.go
new file mode 100644
index 00000000000..941241ab86a
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/web/server/watch/integration.go
@@ -0,0 +1,185 @@
+package watch
+
+import (
+ "log"
+ "os"
+ "strings"
+ "sync"
+ "time"
+
+ "github.com/smartystreets/goconvey/web/server/messaging"
+)
+
+type Watcher struct {
+ nap time.Duration
+ rootFolder string
+ folderDepth int
+ ignoredFolders map[string]struct{}
+ fileSystemState int64
+ paused bool
+ stopped bool
+ watchSuffixes []string
+ excludedDirs []string
+
+ input chan messaging.WatcherCommand
+ output chan messaging.Folders
+
+ lock sync.RWMutex
+}
+
+func NewWatcher(rootFolder string, folderDepth int, nap time.Duration,
+ input chan messaging.WatcherCommand, output chan messaging.Folders, watchSuffixes string, excludedDirs []string) *Watcher {
+
+ return &Watcher{
+ nap: nap,
+ rootFolder: rootFolder,
+ folderDepth: folderDepth,
+ input: input,
+ output: output,
+ watchSuffixes: strings.Split(watchSuffixes, ","),
+ excludedDirs: excludedDirs,
+
+ ignoredFolders: make(map[string]struct{}),
+ }
+}
+
+func (this *Watcher) Listen() {
+ for {
+ if this.stopped {
+ return
+ }
+
+ select {
+
+ case command := <-this.input:
+ this.respond(command)
+
+ default:
+ if !this.paused {
+ this.scan()
+ }
+ time.Sleep(nap)
+ }
+ }
+}
+
+func (this *Watcher) respond(command messaging.WatcherCommand) {
+ switch command.Instruction {
+
+ case messaging.WatcherAdjustRoot:
+ log.Println("Adjusting root...")
+ this.rootFolder = command.Details
+ this.execute()
+
+ case messaging.WatcherIgnore:
+ log.Println("Ignoring specified folders")
+ this.ignore(command.Details)
+ // Prevent a filesystem change due to the number of active folders changing
+ _, checksum := this.gather()
+ this.set(checksum)
+
+ case messaging.WatcherReinstate:
+ log.Println("Reinstating specified folders")
+ this.reinstate(command.Details)
+ // Prevent a filesystem change due to the number of active folders changing
+ _, checksum := this.gather()
+ this.set(checksum)
+
+ case messaging.WatcherPause:
+ log.Println("Pausing watcher...")
+ this.paused = true
+
+ case messaging.WatcherResume:
+ log.Println("Resuming watcher...")
+ this.paused = false
+
+ case messaging.WatcherExecute:
+ log.Println("Gathering folders for immediate execution...")
+ this.execute()
+
+ case messaging.WatcherStop:
+ log.Println("Stopping the watcher...")
+ close(this.output)
+ this.stopped = true
+
+ default:
+ log.Println("Unrecognized command from server:", command.Instruction)
+ }
+}
+
+func (this *Watcher) execute() {
+ folders, _ := this.gather()
+ this.sendToExecutor(folders)
+}
+
+func (this *Watcher) scan() {
+ folders, checksum := this.gather()
+
+ if checksum == this.fileSystemState {
+ return
+ }
+
+ log.Println("File system state modified, publishing current folders...", this.fileSystemState, checksum)
+
+ defer this.set(checksum)
+ this.sendToExecutor(folders)
+}
+
+func (this *Watcher) gather() (folders messaging.Folders, checksum int64) {
+ items := YieldFileSystemItems(this.rootFolder, this.excludedDirs)
+ folderItems, profileItems, goFileItems := Categorize(items, this.rootFolder, this.watchSuffixes)
+
+ for _, item := range profileItems {
+ // TODO: don't even bother if the item's size is over a few hundred bytes...
+ contents := ReadContents(item.Path)
+ item.ProfileDisabled, item.ProfileTags, item.ProfileArguments = ParseProfile(contents)
+ }
+
+ folders = CreateFolders(folderItems)
+ LimitDepth(folders, this.folderDepth)
+ AttachProfiles(folders, profileItems)
+ this.protectedRead(func() { MarkIgnored(folders, this.ignoredFolders) })
+
+ active := ActiveFolders(folders)
+ checksum = int64(len(active))
+ checksum += Sum(active, profileItems)
+ checksum += Sum(active, goFileItems)
+
+ return folders, checksum
+}
+
+func (this *Watcher) set(state int64) {
+ this.fileSystemState = state
+}
+
+func (this *Watcher) sendToExecutor(folders messaging.Folders) {
+ this.output <- folders
+}
+
+func (this *Watcher) ignore(paths string) {
+ this.protectedWrite(func() {
+ for _, folder := range strings.Split(paths, string(os.PathListSeparator)) {
+ this.ignoredFolders[folder] = struct{}{}
+ log.Println("Currently ignored folders:", this.ignoredFolders)
+ }
+ })
+}
+func (this *Watcher) reinstate(paths string) {
+ this.protectedWrite(func() {
+ for _, folder := range strings.Split(paths, string(os.PathListSeparator)) {
+ delete(this.ignoredFolders, folder)
+ }
+ })
+}
+func (this *Watcher) protectedWrite(protected func()) {
+ this.lock.Lock()
+ defer this.lock.Unlock()
+ protected()
+}
+func (this *Watcher) protectedRead(protected func()) {
+ this.lock.RLock()
+ defer this.lock.RUnlock()
+ protected()
+}
+
+const nap = time.Millisecond * 250
diff --git a/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/web/server/watch/integration_test.go b/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/web/server/watch/integration_test.go
new file mode 100644
index 00000000000..e26ad48c47a
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/web/server/watch/integration_test.go
@@ -0,0 +1,200 @@
+package watch
+
+import (
+ "bytes"
+ "io/ioutil"
+ "log"
+ "os"
+ "os/exec"
+ "path/filepath"
+ "runtime"
+ "testing"
+ "time"
+
+ . "github.com/smartystreets/goconvey/convey"
+ "github.com/smartystreets/goconvey/web/server/messaging"
+)
+
+func TestWatcher(t *testing.T) {
+ if testing.Short() {
+ t.Skip("Skipping potentially long-running integration test...")
+ return
+ }
+
+ log.SetFlags(log.LstdFlags | log.Lshortfile | log.Lmicroseconds)
+ output := new(bytes.Buffer)
+ log.SetOutput(output)
+ defer func() { t.Log(output.String()) }()
+
+ _, filename, _, _ := runtime.Caller(0)
+ originalRoot := filepath.Join(filepath.Dir(filename), "integration_testing")
+ temporary, err := ioutil.TempDir("/tmp", "goconvey")
+ if err != nil {
+ t.Fatal(err)
+ }
+ root := filepath.Join(temporary, "integration_testing")
+ sub := filepath.Join(root, "sub")
+
+ err = CopyDir(originalRoot, root)
+ if err != nil {
+ t.Fatal(err)
+ }
+ defer func() {
+ err = os.RemoveAll(temporary)
+ if err != nil {
+ t.Fatal(err)
+ }
+ }()
+
+ var ( // commands
+ pause = messaging.WatcherCommand{Instruction: messaging.WatcherPause}
+ resume = messaging.WatcherCommand{Instruction: messaging.WatcherResume}
+
+ ignore = messaging.WatcherCommand{Instruction: messaging.WatcherIgnore, Details: sub}
+ reinstate = messaging.WatcherCommand{Instruction: messaging.WatcherReinstate, Details: sub}
+
+ adjustToSub = messaging.WatcherCommand{Instruction: messaging.WatcherAdjustRoot, Details: sub}
+ adjustToRoot = messaging.WatcherCommand{Instruction: messaging.WatcherAdjustRoot, Details: root}
+
+ execute = messaging.WatcherCommand{Instruction: messaging.WatcherExecute}
+
+ bogus = messaging.WatcherCommand{Instruction: 42}
+
+ stop = messaging.WatcherCommand{Instruction: messaging.WatcherStop}
+ )
+
+ Convey("Subject: Watcher operations", t, func() {
+ input := make(chan messaging.WatcherCommand)
+ output := make(chan messaging.Folders)
+ excludedDirs := []string{}
+ watcher := NewWatcher(root, -1, time.Millisecond, input, output, ".go", excludedDirs)
+
+ go watcher.Listen()
+
+ Convey("Initial scan results", func() {
+ go func() { input <- stop }()
+
+ results := []messaging.Folders{}
+ for result := range output {
+ results = append(results, result)
+ }
+
+ So(len(results), ShouldEqual, 1)
+ })
+
+ Convey("Manual execution produces additional results", func() {
+ go func() {
+ input <- execute
+ input <- stop
+ }()
+
+ results := []messaging.Folders{}
+ for result := range output {
+ results = append(results, result)
+ }
+
+ So(len(results), ShouldEqual, 2)
+ So(len(results[0]), ShouldEqual, 2)
+ So(len(results[1]), ShouldEqual, 2)
+ })
+
+ Convey("Ignore and Reinstate commands are not reflected in the scan results", func() {
+ go func() {
+ input <- ignore
+ input <- reinstate
+ input <- stop
+ }()
+
+ results := []messaging.Folders{}
+ for result := range output {
+ results = append(results, result)
+ }
+
+ So(len(results), ShouldEqual, 1)
+ So(results[0][sub].Ignored, ShouldBeFalse) // initial
+ })
+
+ Convey("Adjusting the root changes the number of folders in the scanned results", func() {
+ go func() {
+ input <- adjustToSub
+ input <- adjustToRoot
+ input <- stop
+ }()
+
+ results := []messaging.Folders{}
+ for result := range output {
+ results = append(results, result)
+ }
+
+ So(len(results), ShouldEqual, 3)
+ So(len(results[0]), ShouldEqual, 2) // initial
+ So(len(results[1]), ShouldEqual, 1) // root moved to sub
+ So(len(results[2]), ShouldEqual, 2) // root moved back to original location
+ })
+
+ Convey("A bogus command does not cause any additional scanning beyond the initial scan", func() {
+ go func() {
+ input <- bogus
+ input <- stop
+ }()
+
+ results := []messaging.Folders{}
+ for result := range output {
+ results = append(results, result)
+ }
+
+ So(len(results), ShouldEqual, 1) // initial scan
+ })
+
+ Convey("Scanning occurs as a result of a file system update", func() {
+ go func() {
+ time.Sleep(time.Second)
+ exec.Command("touch", filepath.Join(root, "main.go")).Run()
+ time.Sleep(time.Second)
+ input <- stop
+ }()
+
+ results := []messaging.Folders{}
+ for result := range output {
+ results = append(results, result)
+ }
+
+ So(len(results), ShouldEqual, 2)
+ })
+
+ Convey("Scanning does not occur as a result of resuming after a pause", func() {
+ go func() {
+ input <- pause
+ input <- resume
+ input <- stop
+ }()
+
+ results := []messaging.Folders{}
+ for result := range output {
+ results = append(results, result)
+ }
+
+ So(len(results), ShouldEqual, 1)
+ })
+
+ Convey("Scanning should not occur when the watcher is paused", func() {
+ go func() {
+ input <- pause
+ for x := 0; x < 2; x++ {
+ time.Sleep(time.Millisecond * 250)
+ exec.Command("touch", filepath.Join(root, "main.go")).Run()
+ time.Sleep(time.Millisecond * 250)
+ }
+ input <- resume
+ input <- stop
+ }()
+
+ results := []messaging.Folders{}
+ for result := range output {
+ results = append(results, result)
+ }
+
+ So(len(results), ShouldEqual, 2)
+ })
+ })
+}
diff --git a/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/web/server/watch/integration_testing/doc_test.go b/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/web/server/watch/integration_testing/doc_test.go
new file mode 100644
index 00000000000..06ab7d0f9a3
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/web/server/watch/integration_testing/doc_test.go
@@ -0,0 +1 @@
+package main
diff --git a/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/web/server/watch/integration_testing/main.go b/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/web/server/watch/integration_testing/main.go
new file mode 100644
index 00000000000..1cd335173be
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/web/server/watch/integration_testing/main.go
@@ -0,0 +1,10 @@
+// This file's only purpose is to provide a realistic
+// environment from which to run integration tests
+// against the Watcher.
+package main
+
+import "fmt"
+
+func main() {
+ fmt.Println("Hello, World!")
+}
diff --git a/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/web/server/watch/integration_testing/sub/.gitignore b/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/web/server/watch/integration_testing/sub/.gitignore
new file mode 100644
index 00000000000..2574d15c0cc
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/web/server/watch/integration_testing/sub/.gitignore
@@ -0,0 +1,2 @@
+github.com-smartystreets-goconvey-web-server-integration_testing-sub.html
+github.com-smartystreets-goconvey-web-server-integration_testing-sub.txt \ No newline at end of file
diff --git a/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/web/server/watch/integration_testing/sub/stuff.go b/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/web/server/watch/integration_testing/sub/stuff.go
new file mode 100644
index 00000000000..22673259756
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/web/server/watch/integration_testing/sub/stuff.go
@@ -0,0 +1,4 @@
+// This file's only purpose is to provide a realistic
+// environment from which to run integration tests
+// against the Watcher.
+package sub
diff --git a/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/web/server/watch/integration_testing/sub/stuff_test.go b/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/web/server/watch/integration_testing/sub/stuff_test.go
new file mode 100644
index 00000000000..ea9eebd2d50
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/web/server/watch/integration_testing/sub/stuff_test.go
@@ -0,0 +1,17 @@
+// This file's only purpose is to provide a realistic
+// environment from which to run integration tests
+// against the Watcher.
+package sub
+
+import (
+ "fmt"
+ "testing"
+)
+
+func TestStuff(t *testing.T) {
+ if testing.Short() {
+ return
+ }
+
+ fmt.Println()
+}
diff --git a/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/web/server/watch/integration_testing/sub/sub.goconvey b/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/web/server/watch/integration_testing/sub/sub.goconvey
new file mode 100644
index 00000000000..6f6e9405e8f
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/web/server/watch/integration_testing/sub/sub.goconvey
@@ -0,0 +1,7 @@
+IGNORE
+-short
+-run=TestStuff
+
+// This file's only purpose is to provide a realistic
+// environment from which to run integration tests
+// against the Watcher.
diff --git a/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/web/server/watch/util_test.go b/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/web/server/watch/util_test.go
new file mode 100644
index 00000000000..b5ac11fa02a
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/web/server/watch/util_test.go
@@ -0,0 +1,92 @@
+// Credits: https://gist.github.com/jaybill/2876519
+package watch
+
+import "os"
+import "io"
+import "io/ioutil"
+import "log"
+
+// Copies original source to destination destination.
+func CopyFile(source string, destination string) (err error) {
+ originalFile, err := os.Open(source)
+ if err != nil {
+ return err
+ }
+ defer originalFile.Close()
+ destinationFile, err := os.Create(destination)
+ if err != nil {
+ return err
+ }
+ defer destinationFile.Close()
+ _, err = io.Copy(destinationFile, originalFile)
+ if err == nil {
+ info, err := os.Stat(source)
+ if err != nil {
+ err = os.Chmod(destination, info.Mode())
+ }
+
+ }
+
+ return
+}
+
+// Recursively copies a directory tree, attempting to preserve permissions.
+// Source directory must exist, destination directory must *not* exist.
+func CopyDir(source string, destination string) (err error) {
+
+ // get properties of source dir
+ sourceFile, err := os.Stat(source)
+ if err != nil {
+ return err
+ }
+
+ if !sourceFile.IsDir() {
+ return &CustomError{Message: "Source is not a directory"}
+ }
+
+ // ensure destination dir does not already exist
+
+ _, err = os.Open(destination)
+ if !os.IsNotExist(err) {
+ return &CustomError{Message: "Destination already exists"}
+ }
+
+ // create destination dir
+
+ err = os.MkdirAll(destination, sourceFile.Mode())
+ if err != nil {
+ return err
+ }
+
+ entries, err := ioutil.ReadDir(source)
+
+ for _, entry := range entries {
+
+ sourcePath := source + "/" + entry.Name()
+ destinationPath := destination + "/" + entry.Name()
+ if entry.IsDir() {
+ err = CopyDir(sourcePath, destinationPath)
+ if err != nil {
+ log.Println(err)
+ }
+ } else {
+ // perform copy
+ err = CopyFile(sourcePath, destinationPath)
+ if err != nil {
+ log.Println(err)
+ }
+ }
+
+ }
+ return
+}
+
+// A struct for returning custom error messages
+type CustomError struct {
+ Message string
+}
+
+// Returns the error message defined in Message as a string
+func (this *CustomError) Error() string {
+ return this.Message
+}
diff --git a/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/web/server/watch/watch.goconvey b/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/web/server/watch/watch.goconvey
new file mode 100644
index 00000000000..aa26e8b739b
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/smartystreets/goconvey/web/server/watch/watch.goconvey
@@ -0,0 +1,3 @@
+#ignore
+-timeout=1s
+-short \ No newline at end of file
diff --git a/src/mongo/gotools/vendor/src/github.com/spacemonkeygo/openssl/LICENSE b/src/mongo/gotools/vendor/src/github.com/spacemonkeygo/openssl/LICENSE
new file mode 100644
index 00000000000..37ec93a14fd
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/spacemonkeygo/openssl/LICENSE
@@ -0,0 +1,191 @@
+Apache License
+Version 2.0, January 2004
+http://www.apache.org/licenses/
+
+TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+1. Definitions.
+
+"License" shall mean the terms and conditions for use, reproduction, and
+distribution as defined by Sections 1 through 9 of this document.
+
+"Licensor" shall mean the copyright owner or entity authorized by the copyright
+owner that is granting the License.
+
+"Legal Entity" shall mean the union of the acting entity and all other entities
+that control, are controlled by, or are under common control with that entity.
+For the purposes of this definition, "control" means (i) the power, direct or
+indirect, to cause the direction or management of such entity, whether by
+contract or otherwise, or (ii) ownership of fifty percent (50%) or more of the
+outstanding shares, or (iii) beneficial ownership of such entity.
+
+"You" (or "Your") shall mean an individual or Legal Entity exercising
+permissions granted by this License.
+
+"Source" form shall mean the preferred form for making modifications, including
+but not limited to software source code, documentation source, and configuration
+files.
+
+"Object" form shall mean any form resulting from mechanical transformation or
+translation of a Source form, including but not limited to compiled object code,
+generated documentation, and conversions to other media types.
+
+"Work" shall mean the work of authorship, whether in Source or Object form, made
+available under the License, as indicated by a copyright notice that is included
+in or attached to the work (an example is provided in the Appendix below).
+
+"Derivative Works" shall mean any work, whether in Source or Object form, that
+is based on (or derived from) the Work and for which the editorial revisions,
+annotations, elaborations, or other modifications represent, as a whole, an
+original work of authorship. For the purposes of this License, Derivative Works
+shall not include works that remain separable from, or merely link (or bind by
+name) to the interfaces of, the Work and Derivative Works thereof.
+
+"Contribution" shall mean any work of authorship, including the original version
+of the Work and any modifications or additions to that Work or Derivative Works
+thereof, that is intentionally submitted to Licensor for inclusion in the Work
+by the copyright owner or by an individual or Legal Entity authorized to submit
+on behalf of the copyright owner. For the purposes of this definition,
+"submitted" means any form of electronic, verbal, or written communication sent
+to the Licensor or its representatives, including but not limited to
+communication on electronic mailing lists, source code control systems, and
+issue tracking systems that are managed by, or on behalf of, the Licensor for
+the purpose of discussing and improving the Work, but excluding communication
+that is conspicuously marked or otherwise designated in writing by the copyright
+owner as "Not a Contribution."
+
+"Contributor" shall mean Licensor and any individual or Legal Entity on behalf
+of whom a Contribution has been received by Licensor and subsequently
+incorporated within the Work.
+
+2. Grant of Copyright License.
+
+Subject to the terms and conditions of this License, each Contributor hereby
+grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free,
+irrevocable copyright license to reproduce, prepare Derivative Works of,
+publicly display, publicly perform, sublicense, and distribute the Work and such
+Derivative Works in Source or Object form.
+
+3. Grant of Patent License.
+
+Subject to the terms and conditions of this License, each Contributor hereby
+grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free,
+irrevocable (except as stated in this section) patent license to make, have
+made, use, offer to sell, sell, import, and otherwise transfer the Work, where
+such license applies only to those patent claims licensable by such Contributor
+that are necessarily infringed by their Contribution(s) alone or by combination
+of their Contribution(s) with the Work to which such Contribution(s) was
+submitted. If You institute patent litigation against any entity (including a
+cross-claim or counterclaim in a lawsuit) alleging that the Work or a
+Contribution incorporated within the Work constitutes direct or contributory
+patent infringement, then any patent licenses granted to You under this License
+for that Work shall terminate as of the date such litigation is filed.
+
+4. Redistribution.
+
+You may reproduce and distribute copies of the Work or Derivative Works thereof
+in any medium, with or without modifications, and in Source or Object form,
+provided that You meet the following conditions:
+
+You must give any other recipients of the Work or Derivative Works a copy of
+this License; and
+You must cause any modified files to carry prominent notices stating that You
+changed the files; and
+You must retain, in the Source form of any Derivative Works that You distribute,
+all copyright, patent, trademark, and attribution notices from the Source form
+of the Work, excluding those notices that do not pertain to any part of the
+Derivative Works; and
+If the Work includes a "NOTICE" text file as part of its distribution, then any
+Derivative Works that You distribute must include a readable copy of the
+attribution notices contained within such NOTICE file, excluding those notices
+that do not pertain to any part of the Derivative Works, in at least one of the
+following places: within a NOTICE text file distributed as part of the
+Derivative Works; within the Source form or documentation, if provided along
+with the Derivative Works; or, within a display generated by the Derivative
+Works, if and wherever such third-party notices normally appear. The contents of
+the NOTICE file are for informational purposes only and do not modify the
+License. You may add Your own attribution notices within Derivative Works that
+You distribute, alongside or as an addendum to the NOTICE text from the Work,
+provided that such additional attribution notices cannot be construed as
+modifying the License.
+You may add Your own copyright statement to Your modifications and may provide
+additional or different license terms and conditions for use, reproduction, or
+distribution of Your modifications, or for any such Derivative Works as a whole,
+provided Your use, reproduction, and distribution of the Work otherwise complies
+with the conditions stated in this License.
+
+5. Submission of Contributions.
+
+Unless You explicitly state otherwise, any Contribution intentionally submitted
+for inclusion in the Work by You to the Licensor shall be under the terms and
+conditions of this License, without any additional terms or conditions.
+Notwithstanding the above, nothing herein shall supersede or modify the terms of
+any separate license agreement you may have executed with Licensor regarding
+such Contributions.
+
+6. Trademarks.
+
+This License does not grant permission to use the trade names, trademarks,
+service marks, or product names of the Licensor, except as required for
+reasonable and customary use in describing the origin of the Work and
+reproducing the content of the NOTICE file.
+
+7. Disclaimer of Warranty.
+
+Unless required by applicable law or agreed to in writing, Licensor provides the
+Work (and each Contributor provides its Contributions) on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied,
+including, without limitation, any warranties or conditions of TITLE,
+NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A PARTICULAR PURPOSE. You are
+solely responsible for determining the appropriateness of using or
+redistributing the Work and assume any risks associated with Your exercise of
+permissions under this License.
+
+8. Limitation of Liability.
+
+In no event and under no legal theory, whether in tort (including negligence),
+contract, or otherwise, unless required by applicable law (such as deliberate
+and grossly negligent acts) or agreed to in writing, shall any Contributor be
+liable to You for damages, including any direct, indirect, special, incidental,
+or consequential damages of any character arising as a result of this License or
+out of the use or inability to use the Work (including but not limited to
+damages for loss of goodwill, work stoppage, computer failure or malfunction, or
+any and all other commercial damages or losses), even if such Contributor has
+been advised of the possibility of such damages.
+
+9. Accepting Warranty or Additional Liability.
+
+While redistributing the Work or Derivative Works thereof, You may choose to
+offer, and charge a fee for, acceptance of support, warranty, indemnity, or
+other liability obligations and/or rights consistent with this License. However,
+in accepting such obligations, You may act only on Your own behalf and on Your
+sole responsibility, not on behalf of any other Contributor, and only if You
+agree to indemnify, defend, and hold each Contributor harmless for any liability
+incurred by, or claims asserted against, such Contributor by reason of your
+accepting any such warranty or additional liability.
+
+END OF TERMS AND CONDITIONS
+
+APPENDIX: How to apply the Apache License to your work
+
+To apply the Apache License to your work, attach the following boilerplate
+notice, with the fields enclosed by brackets "[]" replaced with your own
+identifying information. (Don't include the brackets!) The text should be
+enclosed in the appropriate comment syntax for the file format. We also
+recommend that a file or class name and description of purpose be included on
+the same "printed page" as the copyright notice for easier identification within
+third-party archives.
+
+ Copyright [yyyy] [name of copyright owner]
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
diff --git a/src/mongo/gotools/vendor/src/github.com/spacemonkeygo/openssl/README.md b/src/mongo/gotools/vendor/src/github.com/spacemonkeygo/openssl/README.md
new file mode 100644
index 00000000000..6bd3383a0e8
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/spacemonkeygo/openssl/README.md
@@ -0,0 +1,26 @@
+# OpenSSL bindings for Go
+
+Please see http://godoc.org/github.com/spacemonkeygo/openssl for more info
+
+### License
+
+Copyright (C) 2014 Space Monkey, Inc.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+
+### Using on Windows
+1. Install [mingw-w64](http://mingw-w64.sourceforge.net/)
+2. Install [pkg-config-lite](http://sourceforge.net/projects/pkgconfiglite)
+3. Build (or install precompiled) openssl for mingw32-w64
+4. Set __PKG\_CONFIG\_PATH__ to the directory containing openssl.pc
+ (i.e. c:\mingw64\mingw64\lib\pkgconfig)
diff --git a/src/mongo/gotools/vendor/src/github.com/spacemonkeygo/openssl/bio.go b/src/mongo/gotools/vendor/src/github.com/spacemonkeygo/openssl/bio.go
new file mode 100644
index 00000000000..1be93aaa23a
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/spacemonkeygo/openssl/bio.go
@@ -0,0 +1,355 @@
+// Copyright (C) 2014 Space Monkey, Inc.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+// +build cgo
+
+package openssl
+
+/*
+#include <string.h>
+#include <openssl/bio.h>
+
+extern int cbioNew(BIO *b);
+static int cbioFree(BIO *b) {
+ return 1;
+}
+
+extern int writeBioWrite(BIO *b, char *buf, int size);
+extern long writeBioCtrl(BIO *b, int cmd, long arg1, void *arg2);
+static int writeBioPuts(BIO *b, const char *str) {
+ return writeBioWrite(b, (char*)str, (int)strlen(str));
+}
+
+extern int readBioRead(BIO *b, char *buf, int size);
+extern long readBioCtrl(BIO *b, int cmd, long arg1, void *arg2);
+
+static BIO_METHOD writeBioMethod = {
+ BIO_TYPE_SOURCE_SINK,
+ "Go Write BIO",
+ (int (*)(BIO *, const char *, int))writeBioWrite,
+ NULL,
+ writeBioPuts,
+ NULL,
+ writeBioCtrl,
+ cbioNew,
+ cbioFree,
+ NULL};
+
+static BIO_METHOD* BIO_s_writeBio() { return &writeBioMethod; }
+
+static BIO_METHOD readBioMethod = {
+ BIO_TYPE_SOURCE_SINK,
+ "Go Read BIO",
+ NULL,
+ readBioRead,
+ NULL,
+ NULL,
+ readBioCtrl,
+ cbioNew,
+ cbioFree,
+ NULL};
+
+static BIO_METHOD* BIO_s_readBio() { return &readBioMethod; }
+*/
+import "C"
+
+import (
+ "errors"
+ "io"
+ "reflect"
+ "sync"
+ "unsafe"
+)
+
+const (
+ SSLRecordSize = 16 * 1024
+)
+
+func nonCopyGoBytes(ptr uintptr, length int) []byte {
+ var slice []byte
+ header := (*reflect.SliceHeader)(unsafe.Pointer(&slice))
+ header.Cap = length
+ header.Len = length
+ header.Data = ptr
+ return slice
+}
+
+func nonCopyCString(data *C.char, size C.int) []byte {
+ return nonCopyGoBytes(uintptr(unsafe.Pointer(data)), int(size))
+}
+
+//export cbioNew
+func cbioNew(b *C.BIO) C.int {
+ b.shutdown = 1
+ b.init = 1
+ b.num = -1
+ b.ptr = nil
+ b.flags = 0
+ return 1
+}
+
+type writeBio struct {
+ data_mtx sync.Mutex
+ op_mtx sync.Mutex
+ buf []byte
+ release_buffers bool
+}
+
+func loadWritePtr(b *C.BIO) *writeBio {
+ return (*writeBio)(unsafe.Pointer(b.ptr))
+}
+
+func bioClearRetryFlags(b *C.BIO) {
+ // from BIO_clear_retry_flags and BIO_clear_flags
+ b.flags &= ^(C.BIO_FLAGS_RWS | C.BIO_FLAGS_SHOULD_RETRY)
+}
+
+func bioSetRetryRead(b *C.BIO) {
+ // from BIO_set_retry_read and BIO_set_flags
+ b.flags |= (C.BIO_FLAGS_READ | C.BIO_FLAGS_SHOULD_RETRY)
+}
+
+//export writeBioWrite
+func writeBioWrite(b *C.BIO, data *C.char, size C.int) (rc C.int) {
+ defer func() {
+ if err := recover(); err != nil {
+ logger.Critf("openssl: writeBioWrite panic'd: %v", err)
+ rc = -1
+ }
+ }()
+ ptr := loadWritePtr(b)
+ if ptr == nil || data == nil || size < 0 {
+ return -1
+ }
+ ptr.data_mtx.Lock()
+ defer ptr.data_mtx.Unlock()
+ bioClearRetryFlags(b)
+ ptr.buf = append(ptr.buf, nonCopyCString(data, size)...)
+ return size
+}
+
+//export writeBioCtrl
+func writeBioCtrl(b *C.BIO, cmd C.int, arg1 C.long, arg2 unsafe.Pointer) (
+ rc C.long) {
+ defer func() {
+ if err := recover(); err != nil {
+ logger.Critf("openssl: writeBioCtrl panic'd: %v", err)
+ rc = -1
+ }
+ }()
+ switch cmd {
+ case C.BIO_CTRL_WPENDING:
+ return writeBioPending(b)
+ case C.BIO_CTRL_DUP, C.BIO_CTRL_FLUSH:
+ return 1
+ default:
+ return 0
+ }
+}
+
+func writeBioPending(b *C.BIO) C.long {
+ ptr := loadWritePtr(b)
+ if ptr == nil {
+ return 0
+ }
+ ptr.data_mtx.Lock()
+ defer ptr.data_mtx.Unlock()
+ return C.long(len(ptr.buf))
+}
+
+func (b *writeBio) WriteTo(w io.Writer) (rv int64, err error) {
+ b.op_mtx.Lock()
+ defer b.op_mtx.Unlock()
+
+ // write whatever data we currently have
+ b.data_mtx.Lock()
+ data := b.buf
+ b.data_mtx.Unlock()
+
+ if len(data) == 0 {
+ return 0, nil
+ }
+ n, err := w.Write(data)
+
+ // subtract however much data we wrote from the buffer
+ b.data_mtx.Lock()
+ b.buf = b.buf[:copy(b.buf, b.buf[n:])]
+ if b.release_buffers && len(b.buf) == 0 {
+ b.buf = nil
+ }
+ b.data_mtx.Unlock()
+
+ return int64(n), err
+}
+
+func (self *writeBio) Disconnect(b *C.BIO) {
+ if loadWritePtr(b) == self {
+ b.ptr = nil
+ }
+}
+
+func (b *writeBio) MakeCBIO() *C.BIO {
+ rv := C.BIO_new(C.BIO_s_writeBio())
+ rv.ptr = unsafe.Pointer(b)
+ return rv
+}
+
+type readBio struct {
+ data_mtx sync.Mutex
+ op_mtx sync.Mutex
+ buf []byte
+ eof bool
+ release_buffers bool
+}
+
+func loadReadPtr(b *C.BIO) *readBio {
+ return (*readBio)(unsafe.Pointer(b.ptr))
+}
+
+//export readBioRead
+func readBioRead(b *C.BIO, data *C.char, size C.int) (rc C.int) {
+ defer func() {
+ if err := recover(); err != nil {
+ logger.Critf("openssl: readBioRead panic'd: %v", err)
+ rc = -1
+ }
+ }()
+ ptr := loadReadPtr(b)
+ if ptr == nil || size < 0 {
+ return -1
+ }
+ ptr.data_mtx.Lock()
+ defer ptr.data_mtx.Unlock()
+ bioClearRetryFlags(b)
+ if len(ptr.buf) == 0 {
+ if ptr.eof {
+ return 0
+ }
+ bioSetRetryRead(b)
+ return -1
+ }
+ if size == 0 || data == nil {
+ return C.int(len(ptr.buf))
+ }
+ n := copy(nonCopyCString(data, size), ptr.buf)
+ ptr.buf = ptr.buf[:copy(ptr.buf, ptr.buf[n:])]
+ if ptr.release_buffers && len(ptr.buf) == 0 {
+ ptr.buf = nil
+ }
+ return C.int(n)
+}
+
+//export readBioCtrl
+func readBioCtrl(b *C.BIO, cmd C.int, arg1 C.long, arg2 unsafe.Pointer) (
+ rc C.long) {
+
+ defer func() {
+ if err := recover(); err != nil {
+ logger.Critf("openssl: readBioCtrl panic'd: %v", err)
+ rc = -1
+ }
+ }()
+ switch cmd {
+ case C.BIO_CTRL_PENDING:
+ return readBioPending(b)
+ case C.BIO_CTRL_DUP, C.BIO_CTRL_FLUSH:
+ return 1
+ default:
+ return 0
+ }
+}
+
+func readBioPending(b *C.BIO) C.long {
+ ptr := loadReadPtr(b)
+ if ptr == nil {
+ return 0
+ }
+ ptr.data_mtx.Lock()
+ defer ptr.data_mtx.Unlock()
+ return C.long(len(ptr.buf))
+}
+
+func (b *readBio) ReadFromOnce(r io.Reader) (n int, err error) {
+ b.op_mtx.Lock()
+ defer b.op_mtx.Unlock()
+
+ // make sure we have a destination that fits at least one SSL record
+ b.data_mtx.Lock()
+ if cap(b.buf) < len(b.buf)+SSLRecordSize {
+ new_buf := make([]byte, len(b.buf), len(b.buf)+SSLRecordSize)
+ copy(new_buf, b.buf)
+ b.buf = new_buf
+ }
+ dst := b.buf[len(b.buf):cap(b.buf)]
+ dst_slice := b.buf
+ b.data_mtx.Unlock()
+
+ n, err = r.Read(dst)
+ b.data_mtx.Lock()
+ defer b.data_mtx.Unlock()
+ if n > 0 {
+ if len(dst_slice) != len(b.buf) {
+ // someone shrunk the buffer, so we read in too far ahead and we
+ // need to slide backwards
+ copy(b.buf[len(b.buf):len(b.buf)+n], dst)
+ }
+ b.buf = b.buf[:len(b.buf)+n]
+ }
+ return n, err
+}
+
+func (b *readBio) MakeCBIO() *C.BIO {
+ rv := C.BIO_new(C.BIO_s_readBio())
+ rv.ptr = unsafe.Pointer(b)
+ return rv
+}
+
+func (self *readBio) Disconnect(b *C.BIO) {
+ if loadReadPtr(b) == self {
+ b.ptr = nil
+ }
+}
+
+func (b *readBio) MarkEOF() {
+ b.data_mtx.Lock()
+ defer b.data_mtx.Unlock()
+ b.eof = true
+}
+
+type anyBio C.BIO
+
+func asAnyBio(b *C.BIO) *anyBio { return (*anyBio)(b) }
+
+func (b *anyBio) Read(buf []byte) (n int, err error) {
+ if len(buf) == 0 {
+ return 0, nil
+ }
+ n = int(C.BIO_read((*C.BIO)(b), unsafe.Pointer(&buf[0]), C.int(len(buf))))
+ if n <= 0 {
+ return 0, io.EOF
+ }
+ return n, nil
+}
+
+func (b *anyBio) Write(buf []byte) (written int, err error) {
+ if len(buf) == 0 {
+ return 0, nil
+ }
+ n := int(C.BIO_write((*C.BIO)(b), unsafe.Pointer(&buf[0]),
+ C.int(len(buf))))
+ if n != len(buf) {
+ return n, errors.New("BIO write failed")
+ }
+ return n, nil
+}
diff --git a/src/mongo/gotools/vendor/src/github.com/spacemonkeygo/openssl/build.go b/src/mongo/gotools/vendor/src/github.com/spacemonkeygo/openssl/build.go
new file mode 100644
index 00000000000..dd72651d3ea
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/spacemonkeygo/openssl/build.go
@@ -0,0 +1,23 @@
+// Copyright (C) 2014 Space Monkey, Inc.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+// +build cgo
+
+package openssl
+
+// #cgo linux pkg-config: openssl
+// #cgo windows CFLAGS: -DWIN32_LEAN_AND_MEAN
+// #cgo darwin CFLAGS: -Wno-deprecated-declarations
+// #cgo darwin LDFLAGS: -lssl -lcrypto
+import "C"
diff --git a/src/mongo/gotools/vendor/src/github.com/spacemonkeygo/openssl/cert.go b/src/mongo/gotools/vendor/src/github.com/spacemonkeygo/openssl/cert.go
new file mode 100644
index 00000000000..61637c649fa
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/spacemonkeygo/openssl/cert.go
@@ -0,0 +1,407 @@
+// Copyright (C) 2014 Space Monkey, Inc.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+// +build cgo
+
+package openssl
+
+// #include <openssl/conf.h>
+// #include <openssl/ssl.h>
+// #include <openssl/x509v3.h>
+//
+// void OPENSSL_free_not_a_macro(void *ref) { OPENSSL_free(ref); }
+//
+import "C"
+
+import (
+ "errors"
+ "io/ioutil"
+ "math/big"
+ "runtime"
+ "time"
+ "unsafe"
+)
+
+type EVP_MD int
+
+const (
+ EVP_NULL EVP_MD = iota
+ EVP_MD5 EVP_MD = iota
+ EVP_SHA EVP_MD = iota
+ EVP_SHA1 EVP_MD = iota
+ EVP_DSS EVP_MD = iota
+ EVP_DSS1 EVP_MD = iota
+ EVP_MDC2 EVP_MD = iota
+ EVP_RIPEMD160 EVP_MD = iota
+ EVP_SHA224 EVP_MD = iota
+ EVP_SHA256 EVP_MD = iota
+ EVP_SHA384 EVP_MD = iota
+ EVP_SHA512 EVP_MD = iota
+)
+
+type Certificate struct {
+ x *C.X509
+ Issuer *Certificate
+ ref interface{}
+ pubKey PublicKey
+}
+
+type CertificateInfo struct {
+ Serial *big.Int
+ Issued time.Duration
+ Expires time.Duration
+ Country string
+ Organization string
+ CommonName string
+}
+
+type Name struct {
+ name *C.X509_NAME
+}
+
+// Allocate and return a new Name object.
+func NewName() (*Name, error) {
+ n := C.X509_NAME_new()
+ if n == nil {
+ return nil, errors.New("could not create x509 name")
+ }
+ name := &Name{name: n}
+ runtime.SetFinalizer(name, func(n *Name) {
+ C.X509_NAME_free(n.name)
+ })
+ return name, nil
+}
+
+// AddTextEntry appends a text entry to an X509 NAME.
+func (n *Name) AddTextEntry(field, value string) error {
+ cfield := C.CString(field)
+ defer C.free(unsafe.Pointer(cfield))
+ cvalue := (*C.uchar)(unsafe.Pointer(C.CString(value)))
+ defer C.free(unsafe.Pointer(cvalue))
+ ret := C.X509_NAME_add_entry_by_txt(
+ n.name, cfield, C.MBSTRING_ASC, cvalue, -1, -1, 0)
+ if ret != 1 {
+ return errors.New("failed to add x509 name text entry")
+ }
+ return nil
+}
+
+// AddTextEntries allows adding multiple entries to a name in one call.
+func (n *Name) AddTextEntries(entries map[string]string) error {
+ for f, v := range entries {
+ if err := n.AddTextEntry(f, v); err != nil {
+ return err
+ }
+ }
+ return nil
+}
+
+// GetEntry returns a name entry based on NID. If no entry, then ("", false) is
+// returned.
+func (n *Name) GetEntry(nid NID) (entry string, ok bool) {
+ entrylen := C.X509_NAME_get_text_by_NID(n.name, C.int(nid), nil, 0)
+ if entrylen == -1 {
+ return "", false
+ }
+ buf := (*C.char)(C.malloc(C.size_t(entrylen + 1)))
+ defer C.free(unsafe.Pointer(buf))
+ C.X509_NAME_get_text_by_NID(n.name, C.int(nid), buf, entrylen+1)
+ return C.GoStringN(buf, entrylen), true
+}
+
+// NewCertificate generates a basic certificate based
+// on the provided CertificateInfo struct
+func NewCertificate(info *CertificateInfo, key PublicKey) (*Certificate, error) {
+ c := &Certificate{x: C.X509_new()}
+ runtime.SetFinalizer(c, func(c *Certificate) {
+ C.X509_free(c.x)
+ })
+
+ name, err := c.GetSubjectName()
+ if err != nil {
+ return nil, err
+ }
+ err = name.AddTextEntries(map[string]string{
+ "C": info.Country,
+ "O": info.Organization,
+ "CN": info.CommonName,
+ })
+ if err != nil {
+ return nil, err
+ }
+ // self-issue for now
+ if err := c.SetIssuerName(name); err != nil {
+ return nil, err
+ }
+ if err := c.SetSerial(info.Serial); err != nil {
+ return nil, err
+ }
+ if err := c.SetIssueDate(info.Issued); err != nil {
+ return nil, err
+ }
+ if err := c.SetExpireDate(info.Expires); err != nil {
+ return nil, err
+ }
+ if err := c.SetPubKey(key); err != nil {
+ return nil, err
+ }
+ return c, nil
+}
+
+func (c *Certificate) GetSubjectName() (*Name, error) {
+ n := C.X509_get_subject_name(c.x)
+ if n == nil {
+ return nil, errors.New("failed to get subject name")
+ }
+ return &Name{name: n}, nil
+}
+
+func (c *Certificate) GetIssuerName() (*Name, error) {
+ n := C.X509_get_issuer_name(c.x)
+ if n == nil {
+ return nil, errors.New("failed to get issuer name")
+ }
+ return &Name{name: n}, nil
+}
+
+func (c *Certificate) SetSubjectName(name *Name) error {
+ if C.X509_set_subject_name(c.x, name.name) != 1 {
+ return errors.New("failed to set subject name")
+ }
+ return nil
+}
+
+// SetIssuer updates the stored Issuer cert
+// and the internal x509 Issuer Name of a certificate.
+// The stored Issuer reference is used when adding extensions.
+func (c *Certificate) SetIssuer(issuer *Certificate) error {
+ name, err := issuer.GetSubjectName()
+ if err != nil {
+ return err
+ }
+ if err = c.SetIssuerName(name); err != nil {
+ return err
+ }
+ c.Issuer = issuer
+ return nil
+}
+
+// SetIssuerName populates the issuer name of a certificate.
+// Use SetIssuer instead, if possible.
+func (c *Certificate) SetIssuerName(name *Name) error {
+ if C.X509_set_issuer_name(c.x, name.name) != 1 {
+ return errors.New("failed to set subject name")
+ }
+ return nil
+}
+
+// SetSerial sets the serial of a certificate.
+func (c *Certificate) SetSerial(serial *big.Int) error {
+ sno := C.ASN1_INTEGER_new()
+ defer C.ASN1_INTEGER_free(sno)
+ bn := C.BN_new()
+ defer C.BN_free(bn)
+
+ serialBytes := serial.Bytes()
+ if bn = C.BN_bin2bn((*C.uchar)(unsafe.Pointer(&serialBytes[0])), C.int(len(serialBytes)), bn); bn == nil {
+ return errors.New("failed to set serial")
+ }
+ if sno = C.BN_to_ASN1_INTEGER(bn, sno); sno == nil {
+ return errors.New("failed to set serial")
+ }
+ if C.X509_set_serialNumber(c.x, sno) != 1 {
+ return errors.New("failed to set serial")
+ }
+ return nil
+}
+
+// SetIssueDate sets the certificate issue date relative to the current time.
+func (c *Certificate) SetIssueDate(when time.Duration) error {
+ offset := C.long(when / time.Second)
+ result := C.X509_gmtime_adj(c.x.cert_info.validity.notBefore, offset)
+ if result == nil {
+ return errors.New("failed to set issue date")
+ }
+ return nil
+}
+
+// SetExpireDate sets the certificate issue date relative to the current time.
+func (c *Certificate) SetExpireDate(when time.Duration) error {
+ offset := C.long(when / time.Second)
+ result := C.X509_gmtime_adj(c.x.cert_info.validity.notAfter, offset)
+ if result == nil {
+ return errors.New("failed to set expire date")
+ }
+ return nil
+}
+
+// SetPubKey assigns a new public key to a certificate.
+func (c *Certificate) SetPubKey(pubKey PublicKey) error {
+ c.pubKey = pubKey
+ if C.X509_set_pubkey(c.x, pubKey.evpPKey()) != 1 {
+ return errors.New("failed to set public key")
+ }
+ return nil
+}
+
+// Sign a certificate using a private key and a digest name.
+// Accepted digest names are 'sha256', 'sha384', and 'sha512'.
+func (c *Certificate) Sign(privKey PrivateKey, digest EVP_MD) error {
+ switch digest {
+ case EVP_SHA256:
+ case EVP_SHA384:
+ case EVP_SHA512:
+ default:
+ return errors.New("Unsupported digest" +
+ "You're probably looking for 'EVP_SHA256' or 'EVP_SHA512'.")
+ }
+ return c.insecureSign(privKey, digest)
+}
+
+func (c *Certificate) insecureSign(privKey PrivateKey, digest EVP_MD) error {
+ var md *C.EVP_MD
+ switch digest {
+ // please don't use these digest functions
+ case EVP_NULL:
+ md = C.EVP_md_null()
+ case EVP_MD5:
+ md = C.EVP_md5()
+ case EVP_SHA:
+ md = C.EVP_sha()
+ case EVP_SHA1:
+ md = C.EVP_sha1()
+ case EVP_DSS:
+ md = C.EVP_dss()
+ case EVP_DSS1:
+ md = C.EVP_dss1()
+ case EVP_RIPEMD160:
+ md = C.EVP_ripemd160()
+ case EVP_SHA224:
+ md = C.EVP_sha224()
+ // you actually want one of these
+ case EVP_SHA256:
+ md = C.EVP_sha256()
+ case EVP_SHA384:
+ md = C.EVP_sha384()
+ case EVP_SHA512:
+ md = C.EVP_sha512()
+ }
+ if C.X509_sign(c.x, privKey.evpPKey(), md) <= 0 {
+ return errors.New("failed to sign certificate")
+ }
+ return nil
+}
+
+// Add an extension to a certificate.
+// Extension constants are NID_* as found in openssl.
+func (c *Certificate) AddExtension(nid NID, value string) error {
+ issuer := c
+ if c.Issuer != nil {
+ issuer = c.Issuer
+ }
+ var ctx C.X509V3_CTX
+ C.X509V3_set_ctx(&ctx, c.x, issuer.x, nil, nil, 0)
+ ex := C.X509V3_EXT_conf_nid(nil, &ctx, C.int(nid), C.CString(value))
+ if ex == nil {
+ return errors.New("failed to create x509v3 extension")
+ }
+ defer C.X509_EXTENSION_free(ex)
+ if C.X509_add_ext(c.x, ex, -1) <= 0 {
+ return errors.New("failed to add x509v3 extension")
+ }
+ return nil
+}
+
+// Wraps AddExtension using a map of NID to text extension.
+// Will return without finishing if it encounters an error.
+func (c *Certificate) AddExtensions(extensions map[NID]string) error {
+ for nid, value := range extensions {
+ if err := c.AddExtension(nid, value); err != nil {
+ return err
+ }
+ }
+ return nil
+}
+
+// LoadCertificateFromPEM loads an X509 certificate from a PEM-encoded block.
+func LoadCertificateFromPEM(pem_block []byte) (*Certificate, error) {
+ if len(pem_block) == 0 {
+ return nil, errors.New("empty pem block")
+ }
+ runtime.LockOSThread()
+ defer runtime.UnlockOSThread()
+ bio := C.BIO_new_mem_buf(unsafe.Pointer(&pem_block[0]),
+ C.int(len(pem_block)))
+ cert := C.PEM_read_bio_X509(bio, nil, nil, nil)
+ C.BIO_free(bio)
+ if cert == nil {
+ return nil, errorFromErrorQueue()
+ }
+ x := &Certificate{x: cert}
+ runtime.SetFinalizer(x, func(x *Certificate) {
+ C.X509_free(x.x)
+ })
+ return x, nil
+}
+
+// MarshalPEM converts the X509 certificate to PEM-encoded format
+func (c *Certificate) MarshalPEM() (pem_block []byte, err error) {
+ bio := C.BIO_new(C.BIO_s_mem())
+ if bio == nil {
+ return nil, errors.New("failed to allocate memory BIO")
+ }
+ defer C.BIO_free(bio)
+ if int(C.PEM_write_bio_X509(bio, c.x)) != 1 {
+ return nil, errors.New("failed dumping certificate")
+ }
+ return ioutil.ReadAll(asAnyBio(bio))
+}
+
+// PublicKey returns the public key embedded in the X509 certificate.
+func (c *Certificate) PublicKey() (PublicKey, error) {
+ pkey := C.X509_get_pubkey(c.x)
+ if pkey == nil {
+ return nil, errors.New("no public key found")
+ }
+ key := &pKey{key: pkey}
+ runtime.SetFinalizer(key, func(key *pKey) {
+ C.EVP_PKEY_free(key.key)
+ })
+ return key, nil
+}
+
+// GetSerialNumberHex returns the certificate's serial number in hex format
+func (c *Certificate) GetSerialNumberHex() (serial string) {
+ asn1_i := C.X509_get_serialNumber(c.x)
+ bignum := C.ASN1_INTEGER_to_BN(asn1_i, nil)
+ hex := C.BN_bn2hex(bignum)
+ serial = C.GoString(hex)
+ C.BN_free(bignum)
+ C.OPENSSL_free_not_a_macro(unsafe.Pointer(hex))
+ return
+}
+
+func (c *Certificate) X509NamePrintEx() (out []byte, err error) {
+ bio := C.BIO_new(C.BIO_s_mem())
+ if bio == nil {
+ return nil, errors.New("failed to allocate memory BIO")
+ }
+ defer C.BIO_free(bio)
+ name := C.X509_get_subject_name(c.x)
+ // TODO, pass in flags instead of using this hardcoded one
+ if int(C.X509_NAME_print_ex(bio, name, 0, C.XN_FLAG_RFC2253)) < 0 {
+ return nil, errors.New("failed formatting subject")
+ }
+ return ioutil.ReadAll(asAnyBio(bio))
+}
diff --git a/src/mongo/gotools/vendor/src/github.com/spacemonkeygo/openssl/cert_test.go b/src/mongo/gotools/vendor/src/github.com/spacemonkeygo/openssl/cert_test.go
new file mode 100644
index 00000000000..c32883ba4eb
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/spacemonkeygo/openssl/cert_test.go
@@ -0,0 +1,139 @@
+// Copyright (C) 2014 Ryan Hileman
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package openssl
+
+import (
+ "math/big"
+ "testing"
+ "time"
+)
+
+func TestCertGenerate(t *testing.T) {
+ key, err := GenerateRSAKey(2048)
+ if err != nil {
+ t.Fatal(err)
+ }
+ info := &CertificateInfo{
+ Serial: big.NewInt(int64(1)),
+ Issued: 0,
+ Expires: 24 * time.Hour,
+ Country: "US",
+ Organization: "Test",
+ CommonName: "localhost",
+ }
+ cert, err := NewCertificate(info, key)
+ if err != nil {
+ t.Fatal(err)
+ }
+ if err := cert.Sign(key, EVP_SHA256); err != nil {
+ t.Fatal(err)
+ }
+}
+
+func TestCAGenerate(t *testing.T) {
+ cakey, err := GenerateRSAKey(2048)
+ if err != nil {
+ t.Fatal(err)
+ }
+ info := &CertificateInfo{
+ Serial: big.NewInt(int64(1)),
+ Issued: 0,
+ Expires: 24 * time.Hour,
+ Country: "US",
+ Organization: "Test CA",
+ CommonName: "CA",
+ }
+ ca, err := NewCertificate(info, cakey)
+ if err != nil {
+ t.Fatal(err)
+ }
+ if err := ca.AddExtensions(map[NID]string{
+ NID_basic_constraints: "critical,CA:TRUE",
+ NID_key_usage: "critical,keyCertSign,cRLSign",
+ NID_subject_key_identifier: "hash",
+ NID_netscape_cert_type: "sslCA",
+ }); err != nil {
+ t.Fatal(err)
+ }
+ if err := ca.Sign(cakey, EVP_SHA256); err != nil {
+ t.Fatal(err)
+ }
+ key, err := GenerateRSAKey(2048)
+ if err != nil {
+ t.Fatal(err)
+ }
+ info = &CertificateInfo{
+ Serial: big.NewInt(int64(1)),
+ Issued: 0,
+ Expires: 24 * time.Hour,
+ Country: "US",
+ Organization: "Test",
+ CommonName: "localhost",
+ }
+ cert, err := NewCertificate(info, key)
+ if err != nil {
+ t.Fatal(err)
+ }
+ if err := cert.AddExtensions(map[NID]string{
+ NID_basic_constraints: "critical,CA:FALSE",
+ NID_key_usage: "keyEncipherment",
+ NID_ext_key_usage: "serverAuth",
+ }); err != nil {
+ t.Fatal(err)
+ }
+ if err := cert.SetIssuer(ca); err != nil {
+ t.Fatal(err)
+ }
+ if err := cert.Sign(cakey, EVP_SHA256); err != nil {
+ t.Fatal(err)
+ }
+}
+
+func TestCertGetNameEntry(t *testing.T) {
+ key, err := GenerateRSAKey(2048)
+ if err != nil {
+ t.Fatal(err)
+ }
+ info := &CertificateInfo{
+ Serial: big.NewInt(int64(1)),
+ Issued: 0,
+ Expires: 24 * time.Hour,
+ Country: "US",
+ Organization: "Test",
+ CommonName: "localhost",
+ }
+ cert, err := NewCertificate(info, key)
+ if err != nil {
+ t.Fatal(err)
+ }
+ name, err := cert.GetSubjectName()
+ if err != nil {
+ t.Fatal(err)
+ }
+ entry, ok := name.GetEntry(NID_commonName)
+ if !ok {
+ t.Fatal("no common name")
+ }
+ if entry != "localhost" {
+ t.Fatalf("expected localhost; got %q", entry)
+ }
+ entry, ok = name.GetEntry(NID_localityName)
+ if ok {
+ t.Fatal("did not expect a locality name")
+ }
+ if entry != "" {
+ t.Fatalf("entry should be empty; got %q", entry)
+ }
+}
diff --git a/src/mongo/gotools/vendor/src/github.com/spacemonkeygo/openssl/ciphers.go b/src/mongo/gotools/vendor/src/github.com/spacemonkeygo/openssl/ciphers.go
new file mode 100644
index 00000000000..12662707f54
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/spacemonkeygo/openssl/ciphers.go
@@ -0,0 +1,355 @@
+// Copyright (C) 2014 Space Monkey, Inc.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+// +build cgo
+
+package openssl
+
+// #include <openssl/evp.h>
+//
+// int EVP_CIPHER_block_size_not_a_macro(EVP_CIPHER *c) {
+// return EVP_CIPHER_block_size(c);
+// }
+//
+// int EVP_CIPHER_key_length_not_a_macro(EVP_CIPHER *c) {
+// return EVP_CIPHER_key_length(c);
+// }
+//
+// int EVP_CIPHER_iv_length_not_a_macro(EVP_CIPHER *c) {
+// return EVP_CIPHER_iv_length(c);
+// }
+//
+// int EVP_CIPHER_nid_not_a_macro(EVP_CIPHER *c) {
+// return EVP_CIPHER_nid(c);
+// }
+//
+// int EVP_CIPHER_CTX_block_size_not_a_macro(EVP_CIPHER_CTX *ctx) {
+// return EVP_CIPHER_CTX_block_size(ctx);
+// }
+//
+// int EVP_CIPHER_CTX_key_length_not_a_macro(EVP_CIPHER_CTX *ctx) {
+// return EVP_CIPHER_CTX_key_length(ctx);
+// }
+//
+// int EVP_CIPHER_CTX_iv_length_not_a_macro(EVP_CIPHER_CTX *ctx) {
+// return EVP_CIPHER_CTX_iv_length(ctx);
+// }
+//
+// const EVP_CIPHER *EVP_CIPHER_CTX_cipher_not_a_macro(EVP_CIPHER_CTX *ctx) {
+// return EVP_CIPHER_CTX_cipher(ctx);
+// }
+import "C"
+
+import (
+ "errors"
+ "fmt"
+ "runtime"
+ "unsafe"
+)
+
+const (
+ GCM_TAG_MAXLEN = 16
+)
+
+type CipherCtx interface {
+ Cipher() *Cipher
+ BlockSize() int
+ KeySize() int
+ IVSize() int
+}
+
+type Cipher struct {
+ ptr *C.EVP_CIPHER
+}
+
+func (c *Cipher) Nid() NID {
+ return NID(C.EVP_CIPHER_nid_not_a_macro(c.ptr))
+}
+
+func (c *Cipher) ShortName() (string, error) {
+ return Nid2ShortName(c.Nid())
+}
+
+func (c *Cipher) BlockSize() int {
+ return int(C.EVP_CIPHER_block_size_not_a_macro(c.ptr))
+}
+
+func (c *Cipher) KeySize() int {
+ return int(C.EVP_CIPHER_key_length_not_a_macro(c.ptr))
+}
+
+func (c *Cipher) IVSize() int {
+ return int(C.EVP_CIPHER_iv_length_not_a_macro(c.ptr))
+}
+
+func Nid2ShortName(nid NID) (string, error) {
+ sn := C.OBJ_nid2sn(C.int(nid))
+ if sn == nil {
+ return "", fmt.Errorf("NID %d not found", nid)
+ }
+ return C.GoString(sn), nil
+}
+
+func GetCipherByName(name string) (*Cipher, error) {
+ cname := C.CString(name)
+ defer C.free(unsafe.Pointer(cname))
+ p := C.EVP_get_cipherbyname(cname)
+ if p == nil {
+ return nil, fmt.Errorf("Cipher %v not found", name)
+ }
+ // we can consider ciphers to use static mem; don't need to free
+ return &Cipher{ptr: p}, nil
+}
+
+func GetCipherByNid(nid NID) (*Cipher, error) {
+ sn, err := Nid2ShortName(nid)
+ if err != nil {
+ return nil, err
+ }
+ return GetCipherByName(sn)
+}
+
+type cipherCtx struct {
+ ctx *C.EVP_CIPHER_CTX
+}
+
+func newCipherCtx() (*cipherCtx, error) {
+ cctx := C.EVP_CIPHER_CTX_new()
+ if cctx == nil {
+ return nil, errors.New("failed to allocate cipher context")
+ }
+ ctx := &cipherCtx{cctx}
+ runtime.SetFinalizer(ctx, func(ctx *cipherCtx) {
+ C.EVP_CIPHER_CTX_free(ctx.ctx)
+ })
+ return ctx, nil
+}
+
+func (ctx *cipherCtx) applyKeyAndIV(key, iv []byte) error {
+ var kptr, iptr *C.uchar
+ if key != nil {
+ if len(key) != ctx.KeySize() {
+ return fmt.Errorf("bad key size (%d bytes instead of %d)",
+ len(key), ctx.KeySize())
+ }
+ kptr = (*C.uchar)(&key[0])
+ }
+ if iv != nil {
+ if len(iv) != ctx.IVSize() {
+ return fmt.Errorf("bad IV size (%d bytes instead of %d)",
+ len(iv), ctx.IVSize())
+ }
+ iptr = (*C.uchar)(&iv[0])
+ }
+ if kptr != nil || iptr != nil {
+ var res C.int
+ if ctx.ctx.encrypt != 0 {
+ res = C.EVP_EncryptInit_ex(ctx.ctx, nil, nil, kptr, iptr)
+ } else {
+ res = C.EVP_DecryptInit_ex(ctx.ctx, nil, nil, kptr, iptr)
+ }
+ if 1 != res {
+ return errors.New("failed to apply key/IV")
+ }
+ }
+ return nil
+}
+
+func (ctx *cipherCtx) Cipher() *Cipher {
+ return &Cipher{ptr: C.EVP_CIPHER_CTX_cipher_not_a_macro(ctx.ctx)}
+}
+
+func (ctx *cipherCtx) BlockSize() int {
+ return int(C.EVP_CIPHER_CTX_block_size_not_a_macro(ctx.ctx))
+}
+
+func (ctx *cipherCtx) KeySize() int {
+ return int(C.EVP_CIPHER_CTX_key_length_not_a_macro(ctx.ctx))
+}
+
+func (ctx *cipherCtx) IVSize() int {
+ return int(C.EVP_CIPHER_CTX_iv_length_not_a_macro(ctx.ctx))
+}
+
+func (ctx *cipherCtx) setCtrl(code, arg int) error {
+ res := C.EVP_CIPHER_CTX_ctrl(ctx.ctx, C.int(code), C.int(arg), nil)
+ if res != 1 {
+ return fmt.Errorf("failed to set code %d to %d [result %d]",
+ code, arg, res)
+ }
+ return nil
+}
+
+func (ctx *cipherCtx) setCtrlBytes(code, arg int, value []byte) error {
+ res := C.EVP_CIPHER_CTX_ctrl(ctx.ctx, C.int(code), C.int(arg),
+ unsafe.Pointer(&value[0]))
+ if res != 1 {
+ return fmt.Errorf("failed to set code %d with arg %d to %x [result %d]",
+ code, arg, value, res)
+ }
+ return nil
+}
+
+func (ctx *cipherCtx) getCtrlInt(code, arg int) (int, error) {
+ var returnVal C.int
+ res := C.EVP_CIPHER_CTX_ctrl(ctx.ctx, C.int(code), C.int(arg),
+ unsafe.Pointer(&returnVal))
+ if res != 1 {
+ return 0, fmt.Errorf("failed to get code %d with arg %d [result %d]",
+ code, arg, res)
+ }
+ return int(returnVal), nil
+}
+
+func (ctx *cipherCtx) getCtrlBytes(code, arg, expectsize int) ([]byte, error) {
+ returnVal := make([]byte, expectsize)
+ res := C.EVP_CIPHER_CTX_ctrl(ctx.ctx, C.int(code), C.int(arg),
+ unsafe.Pointer(&returnVal[0]))
+ if res != 1 {
+ return nil, fmt.Errorf("failed to get code %d with arg %d [result %d]",
+ code, arg, res)
+ }
+ return returnVal, nil
+}
+
+type EncryptionCipherCtx interface {
+ CipherCtx
+
+ // pass in plaintext, get back ciphertext. can be called
+ // multiple times as needed
+ EncryptUpdate(input []byte) ([]byte, error)
+
+ // call after all plaintext has been passed in; may return
+ // additional ciphertext if needed to finish off a block
+ // or extra padding information
+ EncryptFinal() ([]byte, error)
+}
+
+type DecryptionCipherCtx interface {
+ CipherCtx
+
+ // pass in ciphertext, get back plaintext. can be called
+ // multiple times as needed
+ DecryptUpdate(input []byte) ([]byte, error)
+
+ // call after all ciphertext has been passed in; may return
+ // additional plaintext if needed to finish off a block
+ DecryptFinal() ([]byte, error)
+}
+
+type encryptionCipherCtx struct {
+ *cipherCtx
+}
+
+type decryptionCipherCtx struct {
+ *cipherCtx
+}
+
+func newEncryptionCipherCtx(c *Cipher, e *Engine, key, iv []byte) (
+ *encryptionCipherCtx, error) {
+ if c == nil {
+ return nil, errors.New("null cipher not allowed")
+ }
+ ctx, err := newCipherCtx()
+ if err != nil {
+ return nil, err
+ }
+ var eptr *C.ENGINE
+ if e != nil {
+ eptr = e.e
+ }
+ if 1 != C.EVP_EncryptInit_ex(ctx.ctx, c.ptr, eptr, nil, nil) {
+ return nil, errors.New("failed to initialize cipher context")
+ }
+ err = ctx.applyKeyAndIV(key, iv)
+ if err != nil {
+ return nil, err
+ }
+ return &encryptionCipherCtx{cipherCtx: ctx}, nil
+}
+
+func newDecryptionCipherCtx(c *Cipher, e *Engine, key, iv []byte) (
+ *decryptionCipherCtx, error) {
+ if c == nil {
+ return nil, errors.New("null cipher not allowed")
+ }
+ ctx, err := newCipherCtx()
+ if err != nil {
+ return nil, err
+ }
+ var eptr *C.ENGINE
+ if e != nil {
+ eptr = e.e
+ }
+ if 1 != C.EVP_DecryptInit_ex(ctx.ctx, c.ptr, eptr, nil, nil) {
+ return nil, errors.New("failed to initialize cipher context")
+ }
+ err = ctx.applyKeyAndIV(key, iv)
+ if err != nil {
+ return nil, err
+ }
+ return &decryptionCipherCtx{cipherCtx: ctx}, nil
+}
+
+func NewEncryptionCipherCtx(c *Cipher, e *Engine, key, iv []byte) (
+ EncryptionCipherCtx, error) {
+ return newEncryptionCipherCtx(c, e, key, iv)
+}
+
+func NewDecryptionCipherCtx(c *Cipher, e *Engine, key, iv []byte) (
+ DecryptionCipherCtx, error) {
+ return newDecryptionCipherCtx(c, e, key, iv)
+}
+
+func (ctx *encryptionCipherCtx) EncryptUpdate(input []byte) ([]byte, error) {
+ outbuf := make([]byte, len(input)+ctx.BlockSize())
+ outlen := C.int(len(outbuf))
+ res := C.EVP_EncryptUpdate(ctx.ctx, (*C.uchar)(&outbuf[0]), &outlen,
+ (*C.uchar)(&input[0]), C.int(len(input)))
+ if res != 1 {
+ return nil, fmt.Errorf("failed to encrypt [result %d]", res)
+ }
+ return outbuf[:outlen], nil
+}
+
+func (ctx *decryptionCipherCtx) DecryptUpdate(input []byte) ([]byte, error) {
+ outbuf := make([]byte, len(input)+ctx.BlockSize())
+ outlen := C.int(len(outbuf))
+ res := C.EVP_DecryptUpdate(ctx.ctx, (*C.uchar)(&outbuf[0]), &outlen,
+ (*C.uchar)(&input[0]), C.int(len(input)))
+ if res != 1 {
+ return nil, fmt.Errorf("failed to decrypt [result %d]", res)
+ }
+ return outbuf[:outlen], nil
+}
+
+func (ctx *encryptionCipherCtx) EncryptFinal() ([]byte, error) {
+ outbuf := make([]byte, ctx.BlockSize())
+ var outlen C.int
+ if 1 != C.EVP_EncryptFinal_ex(ctx.ctx, (*C.uchar)(&outbuf[0]), &outlen) {
+ return nil, errors.New("encryption failed")
+ }
+ return outbuf[:outlen], nil
+}
+
+func (ctx *decryptionCipherCtx) DecryptFinal() ([]byte, error) {
+ outbuf := make([]byte, ctx.BlockSize())
+ var outlen C.int
+ if 1 != C.EVP_DecryptFinal_ex(ctx.ctx, (*C.uchar)(&outbuf[0]), &outlen) {
+ // this may mean the tag failed to verify- all previous plaintext
+ // returned must be considered faked and invalid
+ return nil, errors.New("decryption failed")
+ }
+ return outbuf[:outlen], nil
+}
diff --git a/src/mongo/gotools/vendor/src/github.com/spacemonkeygo/openssl/ciphers_test.go b/src/mongo/gotools/vendor/src/github.com/spacemonkeygo/openssl/ciphers_test.go
new file mode 100644
index 00000000000..d1d430b1e15
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/spacemonkeygo/openssl/ciphers_test.go
@@ -0,0 +1,307 @@
+// Copyright (C) 2014 Space Monkey, Inc.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+// +build !darwin
+
+package openssl
+
+import (
+ "bytes"
+ "fmt"
+ "strings"
+ "testing"
+)
+
+func expectError(t *testing.T, err error, msg string) {
+ if err == nil {
+ t.Fatalf("Expected error containing %#v, but got none", msg)
+ }
+ if !strings.Contains(err.Error(), msg) {
+ t.Fatalf("Expected error containing %#v, but got %s", msg, err)
+ }
+}
+
+func TestBadInputs(t *testing.T) {
+ _, err := NewGCMEncryptionCipherCtx(256, nil,
+ []byte("abcdefghijklmnopqrstuvwxyz"), nil)
+ expectError(t, err, "bad key size")
+ _, err = NewGCMEncryptionCipherCtx(128, nil,
+ []byte("abcdefghijklmnopqrstuvwxyz"), nil)
+ expectError(t, err, "bad key size")
+ _, err = NewGCMEncryptionCipherCtx(200, nil,
+ []byte("abcdefghijklmnopqrstuvwxy"), nil)
+ expectError(t, err, "unknown block size")
+ c, err := GetCipherByName("AES-128-CBC")
+ if err != nil {
+ t.Fatal("Could not look up AES-128-CBC")
+ }
+ _, err = NewEncryptionCipherCtx(c, nil, []byte("abcdefghijklmnop"),
+ []byte("abc"))
+ expectError(t, err, "bad IV size")
+}
+
+func doEncryption(key, iv, aad, plaintext []byte, blocksize, bufsize int) (
+ ciphertext, tag []byte, err error) {
+ ectx, err := NewGCMEncryptionCipherCtx(blocksize, nil, key, iv)
+ if err != nil {
+ return nil, nil, fmt.Errorf("Failed making GCM encryption ctx: %s", err)
+ }
+ err = ectx.ExtraData(aad)
+ if err != nil {
+ return nil, nil, fmt.Errorf("Failed to add authenticated data: %s",
+ err)
+ }
+ plainb := bytes.NewBuffer(plaintext)
+ cipherb := new(bytes.Buffer)
+ for plainb.Len() > 0 {
+ moar, err := ectx.EncryptUpdate(plainb.Next(bufsize))
+ if err != nil {
+ return nil, nil, fmt.Errorf("Failed to perform an encryption: %s",
+ err)
+ }
+ cipherb.Write(moar)
+ }
+ moar, err := ectx.EncryptFinal()
+ if err != nil {
+ return nil, nil, fmt.Errorf("Failed to finalize encryption: %s", err)
+ }
+ cipherb.Write(moar)
+ tag, err = ectx.GetTag()
+ if err != nil {
+ return nil, nil, fmt.Errorf("Failed to get GCM tag: %s", err)
+ }
+ return cipherb.Bytes(), tag, nil
+}
+
+func doDecryption(key, iv, aad, ciphertext, tag []byte, blocksize,
+ bufsize int) (plaintext []byte, err error) {
+ dctx, err := NewGCMDecryptionCipherCtx(blocksize, nil, key, iv)
+ if err != nil {
+ return nil, fmt.Errorf("Failed making GCM decryption ctx: %s", err)
+ }
+ aadbuf := bytes.NewBuffer(aad)
+ for aadbuf.Len() > 0 {
+ err = dctx.ExtraData(aadbuf.Next(bufsize))
+ if err != nil {
+ return nil, fmt.Errorf("Failed to add authenticated data: %s", err)
+ }
+ }
+ plainb := new(bytes.Buffer)
+ cipherb := bytes.NewBuffer(ciphertext)
+ for cipherb.Len() > 0 {
+ moar, err := dctx.DecryptUpdate(cipherb.Next(bufsize))
+ if err != nil {
+ return nil, fmt.Errorf("Failed to perform a decryption: %s", err)
+ }
+ plainb.Write(moar)
+ }
+ err = dctx.SetTag(tag)
+ if err != nil {
+ return nil, fmt.Errorf("Failed to set expected GCM tag: %s", err)
+ }
+ moar, err := dctx.DecryptFinal()
+ if err != nil {
+ return nil, fmt.Errorf("Failed to finalize decryption: %s", err)
+ }
+ plainb.Write(moar)
+ return plainb.Bytes(), nil
+}
+
+func checkEqual(t *testing.T, output []byte, original string) {
+ output_s := string(output)
+ if output_s != original {
+ t.Fatalf("output != original! %#v != %#v", output_s, original)
+ }
+}
+
+func TestGCM(t *testing.T) {
+ aad := []byte("foo bar baz")
+ key := []byte("nobody can guess this i'm sure..") // len=32
+ iv := []byte("just a bunch of bytes")
+ plaintext := "Long long ago, in a land far away..."
+
+ blocksizes_to_test := []int{256, 192, 128}
+
+ // best for this to have no common factors with blocksize, so that the
+ // buffering layer inside the CIPHER_CTX gets exercised
+ bufsize := 33
+
+ if len(plaintext)%8 == 0 {
+ plaintext += "!" // make sure padding is exercised
+ }
+
+ for _, bsize := range blocksizes_to_test {
+ subkey := key[:bsize/8]
+ ciphertext, tag, err := doEncryption(subkey, iv, aad, []byte(plaintext),
+ bsize, bufsize)
+ if err != nil {
+ t.Fatalf("Encryption with b=%d: %s", bsize, err)
+ }
+ plaintext_out, err := doDecryption(subkey, iv, aad, ciphertext, tag,
+ bsize, bufsize)
+ if err != nil {
+ t.Fatalf("Decryption with b=%d: %s", bsize, err)
+ }
+ checkEqual(t, plaintext_out, plaintext)
+ }
+}
+
+func TestGCMWithNoAAD(t *testing.T) {
+ key := []byte("0000111122223333")
+ iv := []byte("9999")
+ plaintext := "ABORT ABORT ABORT DANGAR"
+
+ ciphertext, tag, err := doEncryption(key, iv, nil, []byte(plaintext),
+ 128, 32)
+ if err != nil {
+ t.Fatal("Encryption failure:", err)
+ }
+ plaintext_out, err := doDecryption(key, iv, nil, ciphertext, tag, 128, 129)
+ if err != nil {
+ t.Fatal("Decryption failure:", err)
+ }
+ checkEqual(t, plaintext_out, plaintext)
+}
+
+func TestBadTag(t *testing.T) {
+ key := []byte("abcdefghijklmnop")
+ iv := []byte("v7239qjfv3qr793fuaj")
+ plaintext := "The red rooster has flown the coop I REPEAT" +
+ "the red rooster has flown the coop!!1!"
+
+ ciphertext, tag, err := doEncryption(key, iv, nil, []byte(plaintext),
+ 128, 32)
+ if err != nil {
+ t.Fatal("Encryption failure:", err)
+ }
+ // flip the last bit
+ tag[len(tag)-1] ^= 1
+ plaintext_out, err := doDecryption(key, iv, nil, ciphertext, tag, 128, 129)
+ if err == nil {
+ t.Fatal("Expected error for bad tag, but got none")
+ }
+ // flip it back, try again just to make sure
+ tag[len(tag)-1] ^= 1
+ plaintext_out, err = doDecryption(key, iv, nil, ciphertext, tag, 128, 129)
+ if err != nil {
+ t.Fatal("Decryption failure:", err)
+ }
+ checkEqual(t, plaintext_out, plaintext)
+}
+
+func TestBadCiphertext(t *testing.T) {
+ key := []byte("hard boiled eggs & bacon")
+ iv := []byte("x") // it's not a very /good/ IV, is it
+ aad := []byte("mu")
+ plaintext := "Roger roger bingo charlie, we have a niner fourteen tango"
+
+ ciphertext, tag, err := doEncryption(key, iv, aad, []byte(plaintext),
+ 192, 1)
+ if err != nil {
+ t.Fatal("Encryption failure:", err)
+ }
+ // flip the last bit
+ ciphertext[len(ciphertext)-1] ^= 1
+ plaintext_out, err := doDecryption(key, iv, aad, ciphertext, tag, 192, 192)
+ if err == nil {
+ t.Fatal("Expected error for bad ciphertext, but got none")
+ }
+ // flip it back, try again just to make sure
+ ciphertext[len(ciphertext)-1] ^= 1
+ plaintext_out, err = doDecryption(key, iv, aad, ciphertext, tag, 192, 192)
+ if err != nil {
+ t.Fatal("Decryption failure:", err)
+ }
+ checkEqual(t, plaintext_out, plaintext)
+}
+
+func TestBadAAD(t *testing.T) {
+ key := []byte("Ive got a lovely buncha coconuts")
+ iv := []byte("aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaab")
+ aad := []byte("Hi i am a plain")
+ plaintext := "Whatever."
+
+ ciphertext, tag, err := doEncryption(key, iv, aad, []byte(plaintext),
+ 256, 256)
+ if err != nil {
+ t.Fatal("Encryption failure:", err)
+ }
+ // flip the last bit
+ aad[len(aad)-1] ^= 1
+ plaintext_out, err := doDecryption(key, iv, aad, ciphertext, tag, 256, 256)
+ if err == nil {
+ t.Fatal("Expected error for bad AAD, but got none")
+ }
+ // flip it back, try again just to make sure
+ aad[len(aad)-1] ^= 1
+ plaintext_out, err = doDecryption(key, iv, aad, ciphertext, tag, 256, 256)
+ if err != nil {
+ t.Fatal("Decryption failure:", err)
+ }
+ checkEqual(t, plaintext_out, plaintext)
+}
+
+func TestNonAuthenticatedEncryption(t *testing.T) {
+ key := []byte("never gonna give you up, never g")
+ iv := []byte("onna let you dow")
+ plaintext1 := "n, never gonna run around"
+ plaintext2 := " and desert you"
+
+ cipher, err := GetCipherByName("aes-256-cbc")
+ if err != nil {
+ t.Fatal("Could not get cipher: ", err)
+ }
+
+ eCtx, err := NewEncryptionCipherCtx(cipher, nil, key, iv)
+ if err != nil {
+ t.Fatal("Could not create encryption context: ", err)
+ }
+ cipherbytes, err := eCtx.EncryptUpdate([]byte(plaintext1))
+ if err != nil {
+ t.Fatal("EncryptUpdate(plaintext1) failure: ", err)
+ }
+ ciphertext := string(cipherbytes)
+ cipherbytes, err = eCtx.EncryptUpdate([]byte(plaintext2))
+ if err != nil {
+ t.Fatal("EncryptUpdate(plaintext2) failure: ", err)
+ }
+ ciphertext += string(cipherbytes)
+ cipherbytes, err = eCtx.EncryptFinal()
+ if err != nil {
+ t.Fatal("EncryptFinal() failure: ", err)
+ }
+ ciphertext += string(cipherbytes)
+
+ dCtx, err := NewDecryptionCipherCtx(cipher, nil, key, iv)
+ if err != nil {
+ t.Fatal("Could not create decryption context: ", err)
+ }
+ plainbytes, err := dCtx.DecryptUpdate([]byte(ciphertext[:15]))
+ if err != nil {
+ t.Fatal("DecryptUpdate(ciphertext part 1) failure: ", err)
+ }
+ plainOutput := string(plainbytes)
+ plainbytes, err = dCtx.DecryptUpdate([]byte(ciphertext[15:]))
+ if err != nil {
+ t.Fatal("DecryptUpdate(ciphertext part 2) failure: ", err)
+ }
+ plainOutput += string(plainbytes)
+ plainbytes, err = dCtx.DecryptFinal()
+ if err != nil {
+ t.Fatal("DecryptFinal() failure: ", err)
+ }
+ plainOutput += string(plainbytes)
+
+ checkEqual(t, []byte(plainOutput), plaintext1+plaintext2)
+}
diff --git a/src/mongo/gotools/vendor/src/github.com/spacemonkeygo/openssl/conn.go b/src/mongo/gotools/vendor/src/github.com/spacemonkeygo/openssl/conn.go
new file mode 100644
index 00000000000..afc73a50ae3
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/spacemonkeygo/openssl/conn.go
@@ -0,0 +1,625 @@
+// Copyright (C) 2014 Space Monkey, Inc.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+// +build cgo
+
+package openssl
+
+/*
+#include <stdlib.h>
+#include <openssl/ssl.h>
+#include <openssl/conf.h>
+#include <openssl/err.h>
+
+int sk_X509_num_not_a_macro(STACK_OF(X509) *sk) { return sk_X509_num(sk); }
+X509 *sk_X509_value_not_a_macro(STACK_OF(X509)* sk, int i) {
+ return sk_X509_value(sk, i);
+}
+const char * SSL_get_cipher_name_not_a_macro(const SSL *ssl) {
+ return SSL_get_cipher_name(ssl);
+}
+static int SSL_session_reused_not_a_macro(SSL *ssl) {
+ return SSL_session_reused(ssl);
+}
+*/
+import "C"
+
+import (
+ "errors"
+ "fmt"
+ "io"
+ "net"
+ "runtime"
+ "sync"
+ "time"
+ "unsafe"
+
+ "github.com/spacemonkeygo/openssl/utils"
+)
+
+var (
+ zeroReturn = errors.New("zero return")
+ wantRead = errors.New("want read")
+ wantWrite = errors.New("want write")
+ tryAgain = errors.New("try again")
+)
+
+type Conn struct {
+ conn net.Conn
+ ssl *C.SSL
+ ctx *Ctx // for gc
+ into_ssl *readBio
+ from_ssl *writeBio
+ is_shutdown bool
+ mtx sync.Mutex
+ want_read_future *utils.Future
+}
+
+type VerifyResult int
+
+const (
+ Ok VerifyResult = C.X509_V_OK
+ UnableToGetIssuerCert VerifyResult = C.X509_V_ERR_UNABLE_TO_GET_ISSUER_CERT
+ UnableToGetCrl VerifyResult = C.X509_V_ERR_UNABLE_TO_GET_CRL
+ UnableToDecryptCertSignature VerifyResult = C.X509_V_ERR_UNABLE_TO_DECRYPT_CERT_SIGNATURE
+ UnableToDecryptCrlSignature VerifyResult = C.X509_V_ERR_UNABLE_TO_DECRYPT_CRL_SIGNATURE
+ UnableToDecodeIssuerPublicKey VerifyResult = C.X509_V_ERR_UNABLE_TO_DECODE_ISSUER_PUBLIC_KEY
+ CertSignatureFailure VerifyResult = C.X509_V_ERR_CERT_SIGNATURE_FAILURE
+ CrlSignatureFailure VerifyResult = C.X509_V_ERR_CRL_SIGNATURE_FAILURE
+ CertNotYetValid VerifyResult = C.X509_V_ERR_CERT_NOT_YET_VALID
+ CertHasExpired VerifyResult = C.X509_V_ERR_CERT_HAS_EXPIRED
+ CrlNotYetValid VerifyResult = C.X509_V_ERR_CRL_NOT_YET_VALID
+ CrlHasExpired VerifyResult = C.X509_V_ERR_CRL_HAS_EXPIRED
+ ErrorInCertNotBeforeField VerifyResult = C.X509_V_ERR_ERROR_IN_CERT_NOT_BEFORE_FIELD
+ ErrorInCertNotAfterField VerifyResult = C.X509_V_ERR_ERROR_IN_CERT_NOT_AFTER_FIELD
+ ErrorInCrlLastUpdateField VerifyResult = C.X509_V_ERR_ERROR_IN_CRL_LAST_UPDATE_FIELD
+ ErrorInCrlNextUpdateField VerifyResult = C.X509_V_ERR_ERROR_IN_CRL_NEXT_UPDATE_FIELD
+ OutOfMem VerifyResult = C.X509_V_ERR_OUT_OF_MEM
+ DepthZeroSelfSignedCert VerifyResult = C.X509_V_ERR_DEPTH_ZERO_SELF_SIGNED_CERT
+ SelfSignedCertInChain VerifyResult = C.X509_V_ERR_SELF_SIGNED_CERT_IN_CHAIN
+ UnableToGetIssuerCertLocally VerifyResult = C.X509_V_ERR_UNABLE_TO_GET_ISSUER_CERT_LOCALLY
+ UnableToVerifyLeafSignature VerifyResult = C.X509_V_ERR_UNABLE_TO_VERIFY_LEAF_SIGNATURE
+ CertChainTooLong VerifyResult = C.X509_V_ERR_CERT_CHAIN_TOO_LONG
+ CertRevoked VerifyResult = C.X509_V_ERR_CERT_REVOKED
+ InvalidCa VerifyResult = C.X509_V_ERR_INVALID_CA
+ PathLengthExceeded VerifyResult = C.X509_V_ERR_PATH_LENGTH_EXCEEDED
+ InvalidPurpose VerifyResult = C.X509_V_ERR_INVALID_PURPOSE
+ CertUntrusted VerifyResult = C.X509_V_ERR_CERT_UNTRUSTED
+ CertRejected VerifyResult = C.X509_V_ERR_CERT_REJECTED
+ SubjectIssuerMismatch VerifyResult = C.X509_V_ERR_SUBJECT_ISSUER_MISMATCH
+ AkidSkidMismatch VerifyResult = C.X509_V_ERR_AKID_SKID_MISMATCH
+ AkidIssuerSerialMismatch VerifyResult = C.X509_V_ERR_AKID_ISSUER_SERIAL_MISMATCH
+ KeyusageNoCertsign VerifyResult = C.X509_V_ERR_KEYUSAGE_NO_CERTSIGN
+ UnableToGetCrlIssuer VerifyResult = C.X509_V_ERR_UNABLE_TO_GET_CRL_ISSUER
+ UnhandledCriticalExtension VerifyResult = C.X509_V_ERR_UNHANDLED_CRITICAL_EXTENSION
+ KeyusageNoCrlSign VerifyResult = C.X509_V_ERR_KEYUSAGE_NO_CRL_SIGN
+ UnhandledCriticalCrlExtension VerifyResult = C.X509_V_ERR_UNHANDLED_CRITICAL_CRL_EXTENSION
+ InvalidNonCa VerifyResult = C.X509_V_ERR_INVALID_NON_CA
+ ProxyPathLengthExceeded VerifyResult = C.X509_V_ERR_PROXY_PATH_LENGTH_EXCEEDED
+ KeyusageNoDigitalSignature VerifyResult = C.X509_V_ERR_KEYUSAGE_NO_DIGITAL_SIGNATURE
+ ProxyCertificatesNotAllowed VerifyResult = C.X509_V_ERR_PROXY_CERTIFICATES_NOT_ALLOWED
+ InvalidExtension VerifyResult = C.X509_V_ERR_INVALID_EXTENSION
+ InvalidPolicyExtension VerifyResult = C.X509_V_ERR_INVALID_POLICY_EXTENSION
+ NoExplicitPolicy VerifyResult = C.X509_V_ERR_NO_EXPLICIT_POLICY
+ UnnestedResource VerifyResult = C.X509_V_ERR_UNNESTED_RESOURCE
+ ApplicationVerification VerifyResult = C.X509_V_ERR_APPLICATION_VERIFICATION
+)
+
+func newSSL(ctx *C.SSL_CTX) (*C.SSL, error) {
+ runtime.LockOSThread()
+ defer runtime.UnlockOSThread()
+ ssl := C.SSL_new(ctx)
+ if ssl == nil {
+ return nil, errorFromErrorQueue()
+ }
+ return ssl, nil
+}
+
+func newConn(conn net.Conn, ctx *Ctx) (*Conn, error) {
+ ssl, err := newSSL(ctx.ctx)
+ if err != nil {
+ return nil, err
+ }
+
+ into_ssl := &readBio{}
+ from_ssl := &writeBio{}
+
+ if ctx.GetMode()&ReleaseBuffers > 0 {
+ into_ssl.release_buffers = true
+ from_ssl.release_buffers = true
+ }
+
+ into_ssl_cbio := into_ssl.MakeCBIO()
+ from_ssl_cbio := from_ssl.MakeCBIO()
+ if into_ssl_cbio == nil || from_ssl_cbio == nil {
+ // these frees are null safe
+ C.BIO_free(into_ssl_cbio)
+ C.BIO_free(from_ssl_cbio)
+ C.SSL_free(ssl)
+ return nil, errors.New("failed to allocate memory BIO")
+ }
+
+ // the ssl object takes ownership of these objects now
+ C.SSL_set_bio(ssl, into_ssl_cbio, from_ssl_cbio)
+
+ c := &Conn{
+ conn: conn,
+ ssl: ssl,
+ ctx: ctx,
+ into_ssl: into_ssl,
+ from_ssl: from_ssl}
+ runtime.SetFinalizer(c, func(c *Conn) {
+ c.into_ssl.Disconnect(into_ssl_cbio)
+ c.from_ssl.Disconnect(from_ssl_cbio)
+ C.SSL_free(c.ssl)
+ })
+ return c, nil
+}
+
+// Client wraps an existing stream connection and puts it in the connect state
+// for any subsequent handshakes.
+//
+// IMPORTANT NOTE: if you use this method instead of Dial to construct an SSL
+// connection, you are responsible for verifying the peer's hostname.
+// Otherwise, you are vulnerable to MITM attacks.
+//
+// Client also does not set up SNI for you like Dial does.
+//
+// Client connections probably won't work for you unless you set a verify
+// location or add some certs to the certificate store of the client context
+// you're using. This library is not nice enough to use the system certificate
+// store by default for you yet.
+func Client(conn net.Conn, ctx *Ctx) (*Conn, error) {
+ c, err := newConn(conn, ctx)
+ if err != nil {
+ return nil, err
+ }
+ C.SSL_set_connect_state(c.ssl)
+ return c, nil
+}
+
+// Server wraps an existing stream connection and puts it in the accept state
+// for any subsequent handshakes.
+func Server(conn net.Conn, ctx *Ctx) (*Conn, error) {
+ c, err := newConn(conn, ctx)
+ if err != nil {
+ return nil, err
+ }
+ C.SSL_set_accept_state(c.ssl)
+ return c, nil
+}
+
+func (c *Conn) CurrentCipher() (string, error) {
+ p := C.SSL_get_cipher_name_not_a_macro(c.ssl)
+ if p == nil {
+ return "", errors.New("Session not established")
+ }
+
+ return C.GoString(p), nil
+}
+
+func (c *Conn) fillInputBuffer() error {
+ for {
+ n, err := c.into_ssl.ReadFromOnce(c.conn)
+ if n == 0 && err == nil {
+ continue
+ }
+ if err == io.EOF {
+ c.into_ssl.MarkEOF()
+ return c.Close()
+ }
+ return err
+ }
+}
+
+func (c *Conn) flushOutputBuffer() error {
+ _, err := c.from_ssl.WriteTo(c.conn)
+ return err
+}
+
+func (c *Conn) getErrorHandler(rv C.int, errno error) func() error {
+ errcode := C.SSL_get_error(c.ssl, rv)
+ switch errcode {
+ case C.SSL_ERROR_ZERO_RETURN:
+ return func() error {
+ c.Close()
+ return io.ErrUnexpectedEOF
+ }
+ case C.SSL_ERROR_WANT_READ:
+ go c.flushOutputBuffer()
+ if c.want_read_future != nil {
+ want_read_future := c.want_read_future
+ return func() error {
+ _, err := want_read_future.Get()
+ return err
+ }
+ }
+ c.want_read_future = utils.NewFuture()
+ want_read_future := c.want_read_future
+ return func() (err error) {
+ defer func() {
+ c.mtx.Lock()
+ c.want_read_future = nil
+ c.mtx.Unlock()
+ want_read_future.Set(nil, err)
+ }()
+ err = c.fillInputBuffer()
+ if err != nil {
+ return err
+ }
+ return tryAgain
+ }
+ case C.SSL_ERROR_WANT_WRITE:
+ return func() error {
+ err := c.flushOutputBuffer()
+ if err != nil {
+ return err
+ }
+ return tryAgain
+ }
+ case C.SSL_ERROR_SYSCALL:
+ var err error
+ if C.ERR_peek_error() == 0 {
+ switch rv {
+ case 0:
+ err = errors.New("protocol-violating EOF")
+ case -1:
+ err = errno
+ default:
+ err = errorFromErrorQueue()
+ }
+ } else {
+ err = errorFromErrorQueue()
+ }
+ return func() error { return err }
+ default:
+ err := errorFromErrorQueue()
+ return func() error { return err }
+ }
+}
+
+func (c *Conn) handleError(errcb func() error) error {
+ if errcb != nil {
+ return errcb()
+ }
+ return nil
+}
+
+func (c *Conn) handshake() func() error {
+ c.mtx.Lock()
+ defer c.mtx.Unlock()
+ if c.is_shutdown {
+ return func() error { return io.ErrUnexpectedEOF }
+ }
+ runtime.LockOSThread()
+ defer runtime.UnlockOSThread()
+ rv, errno := C.SSL_do_handshake(c.ssl)
+ if rv > 0 {
+ return nil
+ }
+ return c.getErrorHandler(rv, errno)
+}
+
+// Handshake performs an SSL handshake. If a handshake is not manually
+// triggered, it will run before the first I/O on the encrypted stream.
+func (c *Conn) Handshake() error {
+ err := tryAgain
+ for err == tryAgain {
+ err = c.handleError(c.handshake())
+ }
+ go c.flushOutputBuffer()
+ return err
+}
+
+// PeerCertificate returns the Certificate of the peer with which you're
+// communicating. Only valid after a handshake.
+func (c *Conn) PeerCertificate() (*Certificate, error) {
+ c.mtx.Lock()
+ defer c.mtx.Unlock()
+ if c.is_shutdown {
+ return nil, errors.New("connection closed")
+ }
+ x := C.SSL_get_peer_certificate(c.ssl)
+ if x == nil {
+ return nil, errors.New("no peer certificate found")
+ }
+ cert := &Certificate{x: x}
+ runtime.SetFinalizer(cert, func(cert *Certificate) {
+ C.X509_free(cert.x)
+ })
+ return cert, nil
+}
+
+// PeerCertificateChain returns the certificate chain of the peer. If called on
+// the client side, the stack also contains the peer's certificate; if called
+// on the server side, the peer's certificate must be obtained separately using
+// PeerCertificate.
+func (c *Conn) PeerCertificateChain() (rv []*Certificate, err error) {
+ c.mtx.Lock()
+ defer c.mtx.Unlock()
+ if c.is_shutdown {
+ return nil, errors.New("connection closed")
+ }
+ sk := C.SSL_get_peer_cert_chain(c.ssl)
+ if sk == nil {
+ return nil, errors.New("no peer certificates found")
+ }
+ sk_num := int(C.sk_X509_num_not_a_macro(sk))
+ rv = make([]*Certificate, 0, sk_num)
+ for i := 0; i < sk_num; i++ {
+ x := C.sk_X509_value_not_a_macro(sk, C.int(i))
+ // ref holds on to the underlying connection memory so we don't need to
+ // worry about incrementing refcounts manually or freeing the X509
+ rv = append(rv, &Certificate{x: x, ref: c})
+ }
+ return rv, nil
+}
+
+// GetVerifyResult gets result of peer certificate verification
+// SSL_get_verify_result() returns the result of the verification of the X509
+// certificate presented by the peer, if any. See
+// https://www.openssl.org/docs/ssl/SSL_get_verify_result.html
+func (c *Conn) GetVerifyResults() error {
+ result := C.SSL_get_verify_result(c.ssl)
+ if int(result) != 0 {
+ return errors.New(C.GoString(
+ C.X509_verify_cert_error_string(result)))
+ }
+ return nil
+}
+
+type ConnectionState struct {
+ Certificate *Certificate
+ CertificateError error
+ CertificateChain []*Certificate
+ CertificateChainError error
+ SessionReused bool
+}
+
+func (c *Conn) ConnectionState() (rv ConnectionState) {
+ rv.Certificate, rv.CertificateError = c.PeerCertificate()
+ rv.CertificateChain, rv.CertificateChainError = c.PeerCertificateChain()
+ rv.SessionReused = c.SessionReused()
+ return
+}
+
+func (c *Conn) shutdown() func() error {
+ c.mtx.Lock()
+ defer c.mtx.Unlock()
+ runtime.LockOSThread()
+ defer runtime.UnlockOSThread()
+ rv, errno := C.SSL_shutdown(c.ssl)
+ if rv > 0 {
+ return nil
+ }
+ if rv == 0 {
+ // The OpenSSL docs say that in this case, the shutdown is not
+ // finished, and we should call SSL_shutdown() a second time, if a
+ // bidirectional shutdown is going to be performed. Further, the
+ // output of SSL_get_error may be misleading, as an erroneous
+ // SSL_ERROR_SYSCALL may be flagged even though no error occurred.
+ // So, TODO: revisit bidrectional shutdown, possibly trying again.
+ // Note: some broken clients won't engage in bidirectional shutdown
+ // without tickling them to close by sending a TCP_FIN packet, or
+ // shutting down the write-side of the connection.
+ return nil
+ } else {
+ return c.getErrorHandler(rv, errno)
+ }
+}
+
+func (c *Conn) shutdownLoop() error {
+ err := tryAgain
+ shutdown_tries := 0
+ for err == tryAgain {
+ shutdown_tries = shutdown_tries + 1
+ err = c.handleError(c.shutdown())
+ if err == nil {
+ return c.flushOutputBuffer()
+ }
+ if err == tryAgain && shutdown_tries >= 2 {
+ return errors.New("shutdown requested a third time?")
+ }
+ }
+ if err == io.ErrUnexpectedEOF {
+ err = nil
+ }
+ return err
+}
+
+// Close shuts down the SSL connection and closes the underlying wrapped
+// connection.
+func (c *Conn) Close() error {
+ c.mtx.Lock()
+ if c.is_shutdown {
+ c.mtx.Unlock()
+ return nil
+ }
+ c.is_shutdown = true
+ c.mtx.Unlock()
+ var errs utils.ErrorGroup
+ errs.Add(c.shutdownLoop())
+ errs.Add(c.conn.Close())
+ return errs.Finalize()
+}
+
+func (c *Conn) read(b []byte) (int, func() error) {
+ if len(b) == 0 {
+ return 0, nil
+ }
+ c.mtx.Lock()
+ defer c.mtx.Unlock()
+ if c.is_shutdown {
+ return 0, func() error { return io.EOF }
+ }
+ runtime.LockOSThread()
+ defer runtime.UnlockOSThread()
+ rv, errno := C.SSL_read(c.ssl, unsafe.Pointer(&b[0]), C.int(len(b)))
+ if rv > 0 {
+ return int(rv), nil
+ }
+ return 0, c.getErrorHandler(rv, errno)
+}
+
+// Read reads up to len(b) bytes into b. It returns the number of bytes read
+// and an error if applicable. io.EOF is returned when the caller can expect
+// to see no more data.
+func (c *Conn) Read(b []byte) (n int, err error) {
+ if len(b) == 0 {
+ return 0, nil
+ }
+ err = tryAgain
+ for err == tryAgain {
+ n, errcb := c.read(b)
+ err = c.handleError(errcb)
+ if err == nil {
+ go c.flushOutputBuffer()
+ return n, nil
+ }
+ if err == io.ErrUnexpectedEOF {
+ err = io.EOF
+ }
+ }
+ return 0, err
+}
+
+func (c *Conn) write(b []byte) (int, func() error) {
+ if len(b) == 0 {
+ return 0, nil
+ }
+ c.mtx.Lock()
+ defer c.mtx.Unlock()
+ if c.is_shutdown {
+ err := errors.New("connection closed")
+ return 0, func() error { return err }
+ }
+ runtime.LockOSThread()
+ defer runtime.UnlockOSThread()
+ rv, errno := C.SSL_write(c.ssl, unsafe.Pointer(&b[0]), C.int(len(b)))
+ if rv > 0 {
+ return int(rv), nil
+ }
+ return 0, c.getErrorHandler(rv, errno)
+}
+
+// Write will encrypt the contents of b and write it to the underlying stream.
+// Performance will be vastly improved if the size of b is a multiple of
+// SSLRecordSize.
+func (c *Conn) Write(b []byte) (written int, err error) {
+ if len(b) == 0 {
+ return 0, nil
+ }
+ err = tryAgain
+ for err == tryAgain {
+ n, errcb := c.write(b)
+ err = c.handleError(errcb)
+ if err == nil {
+ return n, c.flushOutputBuffer()
+ }
+ }
+ return 0, err
+}
+
+// VerifyHostname pulls the PeerCertificate and calls VerifyHostname on the
+// certificate.
+func (c *Conn) VerifyHostname(host string) error {
+ cert, err := c.PeerCertificate()
+ if err != nil {
+ return err
+ }
+ return cert.VerifyHostname(host)
+}
+
+// LocalAddr returns the underlying connection's local address
+func (c *Conn) LocalAddr() net.Addr {
+ return c.conn.LocalAddr()
+}
+
+// RemoteAddr returns the underlying connection's remote address
+func (c *Conn) RemoteAddr() net.Addr {
+ return c.conn.RemoteAddr()
+}
+
+// SetDeadline calls SetDeadline on the underlying connection.
+func (c *Conn) SetDeadline(t time.Time) error {
+ return c.conn.SetDeadline(t)
+}
+
+// SetReadDeadline calls SetReadDeadline on the underlying connection.
+func (c *Conn) SetReadDeadline(t time.Time) error {
+ return c.conn.SetReadDeadline(t)
+}
+
+// SetWriteDeadline calls SetWriteDeadline on the underlying connection.
+func (c *Conn) SetWriteDeadline(t time.Time) error {
+ return c.conn.SetWriteDeadline(t)
+}
+
+func (c *Conn) UnderlyingConn() net.Conn {
+ return c.conn
+}
+
+func (c *Conn) VerifyResult() VerifyResult {
+ return VerifyResult(C.SSL_get_verify_result(c.ssl))
+}
+
+func (c *Conn) SessionReused() bool {
+ return C.SSL_session_reused_not_a_macro(c.ssl) == 1
+}
+
+func (c *Conn) GetSession() ([]byte, error) {
+ runtime.LockOSThread()
+ defer runtime.UnlockOSThread()
+
+ // get1 increases the refcount of the session, so we have to free it.
+ session := (*C.SSL_SESSION)(C.SSL_get1_session(c.ssl))
+ if session == nil {
+ return nil, errors.New("failed to get session")
+ }
+ defer C.SSL_SESSION_free(session)
+
+ // get the size of the encoding
+ slen := C.i2d_SSL_SESSION(session, nil)
+
+ buf := (*C.uchar)(C.malloc(C.size_t(slen)))
+ defer C.free(unsafe.Pointer(buf))
+
+ // this modifies the value of buf (seriously), so we have to pass in a temp
+ // var so that we can actually read the bytes from buf.
+ tmp := buf
+ slen2 := C.i2d_SSL_SESSION(session, &tmp)
+ if slen != slen2 {
+ return nil, errors.New("session had different lengths")
+ }
+
+ return C.GoBytes(unsafe.Pointer(buf), slen), nil
+}
+
+func (c *Conn) setSession(session []byte) error {
+ runtime.LockOSThread()
+ defer runtime.UnlockOSThread()
+
+ ptr := (*C.uchar)(&session[0])
+ s := C.d2i_SSL_SESSION(nil, &ptr, C.long(len(session)))
+ if s == nil {
+ return fmt.Errorf("unable to load session: %s", errorFromErrorQueue())
+ }
+ defer C.SSL_SESSION_free(s)
+
+ ret := C.SSL_set_session(c.ssl, s)
+ if ret != 1 {
+ return fmt.Errorf("unable to set session: %s", errorFromErrorQueue())
+ }
+ return nil
+}
diff --git a/src/mongo/gotools/vendor/src/github.com/spacemonkeygo/openssl/ctx.go b/src/mongo/gotools/vendor/src/github.com/spacemonkeygo/openssl/ctx.go
new file mode 100644
index 00000000000..74422f290a3
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/spacemonkeygo/openssl/ctx.go
@@ -0,0 +1,831 @@
+// Copyright (C) 2014 Space Monkey, Inc.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+// +build cgo
+
+package openssl
+
+/*
+#include <openssl/crypto.h>
+#include <openssl/ssl.h>
+#include <openssl/err.h>
+#include <openssl/conf.h>
+#include <openssl/x509.h>
+
+static long SSL_CTX_set_options_not_a_macro(SSL_CTX* ctx, long options) {
+ return SSL_CTX_set_options(ctx, options);
+}
+
+static long SSL_CTX_clear_options_not_a_macro(SSL_CTX* ctx, long options) {
+ return SSL_CTX_clear_options(ctx, options);
+}
+
+static long SSL_CTX_get_options_not_a_macro(SSL_CTX* ctx) {
+ return SSL_CTX_get_options(ctx);
+}
+
+static long SSL_CTX_set_mode_not_a_macro(SSL_CTX* ctx, long modes) {
+ return SSL_CTX_set_mode(ctx, modes);
+}
+
+static long SSL_CTX_get_mode_not_a_macro(SSL_CTX* ctx) {
+ return SSL_CTX_get_mode(ctx);
+}
+
+static long SSL_CTX_set_session_cache_mode_not_a_macro(SSL_CTX* ctx, long modes) {
+ return SSL_CTX_set_session_cache_mode(ctx, modes);
+}
+
+static long SSL_CTX_sess_set_cache_size_not_a_macro(SSL_CTX* ctx, long t) {
+ return SSL_CTX_sess_set_cache_size(ctx, t);
+}
+
+static long SSL_CTX_sess_get_cache_size_not_a_macro(SSL_CTX* ctx) {
+ return SSL_CTX_sess_get_cache_size(ctx);
+}
+
+static long SSL_CTX_set_timeout_not_a_macro(SSL_CTX* ctx, long t) {
+ return SSL_CTX_set_timeout(ctx, t);
+}
+
+static long SSL_CTX_get_timeout_not_a_macro(SSL_CTX* ctx) {
+ return SSL_CTX_get_timeout(ctx);
+}
+
+static int CRYPTO_add_not_a_macro(int *pointer,int amount,int type) {
+ return CRYPTO_add(pointer, amount, type);
+}
+
+static long SSL_CTX_add_extra_chain_cert_not_a_macro(SSL_CTX* ctx, X509 *cert) {
+ return SSL_CTX_add_extra_chain_cert(ctx, cert);
+}
+
+#ifndef SSL_MODE_RELEASE_BUFFERS
+#define SSL_MODE_RELEASE_BUFFERS 0
+#endif
+
+#ifndef SSL_OP_NO_COMPRESSION
+#define SSL_OP_NO_COMPRESSION 0
+#endif
+
+static const SSL_METHOD *OUR_TLSv1_1_method() {
+#if OPENSSL_VERSION_NUMBER > 0x1000100fL && defined(TLS1_1_VERSION) && !defined(OPENSSL_SYSNAME_MACOSX)
+ return TLSv1_1_method();
+#else
+ return NULL;
+#endif
+}
+
+static const SSL_METHOD *OUR_TLSv1_2_method() {
+#if OPENSSL_VERSION_NUMBER > 0x1000100fL && defined(TLS1_2_VERSION) && !defined(OPENSSL_SYSNAME_MACOSX)
+ return TLSv1_2_method();
+#else
+ return NULL;
+#endif
+}
+
+#if defined SSL_CTRL_SET_TLSEXT_HOSTNAME
+ extern int sni_cb(SSL *ssl_conn, int *ad, void *arg);
+#endif
+
+extern int verify_cb(int ok, X509_STORE_CTX* store);
+
+typedef STACK_OF(X509_NAME) *STACK_OF_X509_NAME_not_a_macro;
+
+static void sk_X509_NAME_pop_free_not_a_macro(STACK_OF_X509_NAME_not_a_macro st) {
+ sk_X509_NAME_pop_free(st, X509_NAME_free);
+}
+
+extern int password_cb(char *buf, int size, int rwflag, void *password);
+*/
+import "C"
+
+import (
+ "errors"
+ "fmt"
+ "io/ioutil"
+ "os"
+ "runtime"
+ "time"
+ "unsafe"
+
+ "github.com/spacemonkeygo/spacelog"
+)
+
+var (
+ ssl_ctx_idx = C.SSL_CTX_get_ex_new_index(0, nil, nil, nil, nil)
+
+ logger = spacelog.GetLogger()
+)
+
+type Ctx struct {
+ ctx *C.SSL_CTX
+ cert *Certificate
+ chain []*Certificate
+ key PrivateKey
+ verify_cb VerifyCallback
+ sni_cb TLSExtServernameCallback
+}
+
+//export get_ssl_ctx_idx
+func get_ssl_ctx_idx() C.int {
+ return ssl_ctx_idx
+}
+
+func newCtx(method *C.SSL_METHOD) (*Ctx, error) {
+ runtime.LockOSThread()
+ defer runtime.UnlockOSThread()
+ ctx := C.SSL_CTX_new(method)
+ if ctx == nil {
+ return nil, errorFromErrorQueue()
+ }
+ c := &Ctx{ctx: ctx}
+ C.SSL_CTX_set_ex_data(ctx, get_ssl_ctx_idx(), unsafe.Pointer(c))
+ runtime.SetFinalizer(c, func(c *Ctx) {
+ C.SSL_CTX_free(c.ctx)
+ })
+ return c, nil
+}
+
+type SSLVersion int
+
+const (
+ SSLv3 SSLVersion = 0x02 // Vulnerable to "POODLE" attack.
+ TLSv1 SSLVersion = 0x03
+ TLSv1_1 SSLVersion = 0x04
+ TLSv1_2 SSLVersion = 0x05
+
+ // Make sure to disable SSLv2 and SSLv3 if you use this. SSLv3 is vulnerable
+ // to the "POODLE" attack, and SSLv2 is what, just don't even.
+ AnyVersion SSLVersion = 0x06
+)
+
+// NewCtxWithVersion creates an SSL context that is specific to the provided
+// SSL version. See http://www.openssl.org/docs/ssl/SSL_CTX_new.html for more.
+func NewCtxWithVersion(version SSLVersion) (*Ctx, error) {
+ var method *C.SSL_METHOD
+ switch version {
+ case TLSv1:
+ method = C.TLSv1_method()
+ case TLSv1_1:
+ method = C.OUR_TLSv1_1_method()
+ case TLSv1_2:
+ method = C.OUR_TLSv1_2_method()
+ case AnyVersion:
+ method = C.SSLv23_method()
+ }
+ if method == nil {
+ return nil, errors.New("unknown ssl/tls version")
+ }
+ return newCtx(method)
+}
+
+// NewCtx creates a context that supports any TLS version 1.0 and newer.
+func NewCtx() (*Ctx, error) {
+ c, err := NewCtxWithVersion(AnyVersion)
+ if err == nil {
+ c.SetOptions(NoSSLv2 | NoSSLv3)
+ }
+ return c, err
+}
+
+// NewCtxFromFiles calls NewCtx, loads the provided files, and configures the
+// context to use them.
+func NewCtxFromFiles(cert_file string, key_file string) (*Ctx, error) {
+ ctx, err := NewCtx()
+ if err != nil {
+ return nil, err
+ }
+
+ cert_bytes, err := ioutil.ReadFile(cert_file)
+ if err != nil {
+ return nil, err
+ }
+
+ certs := SplitPEM(cert_bytes)
+ if len(certs) == 0 {
+ return nil, fmt.Errorf("No PEM certificate found in '%s'", cert_file)
+ }
+ first, certs := certs[0], certs[1:]
+ cert, err := LoadCertificateFromPEM(first)
+ if err != nil {
+ return nil, err
+ }
+
+ err = ctx.UseCertificate(cert)
+ if err != nil {
+ return nil, err
+ }
+
+ for _, pem := range certs {
+ cert, err := LoadCertificateFromPEM(pem)
+ if err != nil {
+ return nil, err
+ }
+ err = ctx.AddChainCertificate(cert)
+ if err != nil {
+ return nil, err
+ }
+ }
+
+ key_bytes, err := ioutil.ReadFile(key_file)
+ if err != nil {
+ return nil, err
+ }
+
+ key, err := LoadPrivateKeyFromPEM(key_bytes)
+ if err != nil {
+ return nil, err
+ }
+
+ err = ctx.UsePrivateKey(key)
+ if err != nil {
+ return nil, err
+ }
+
+ return ctx, nil
+}
+
+// EllipticCurve repesents the ASN.1 OID of an elliptic curve.
+// see https://www.openssl.org/docs/apps/ecparam.html for a list of implemented curves.
+type EllipticCurve int
+
+const (
+ // P-256: X9.62/SECG curve over a 256 bit prime field
+ Prime256v1 EllipticCurve = C.NID_X9_62_prime256v1
+ // P-384: NIST/SECG curve over a 384 bit prime field
+ Secp384r1 EllipticCurve = C.NID_secp384r1
+)
+
+// UseCertificate configures the context to present the given certificate to
+// peers.
+func (c *Ctx) UseCertificate(cert *Certificate) error {
+ runtime.LockOSThread()
+ defer runtime.UnlockOSThread()
+ c.cert = cert
+ if int(C.SSL_CTX_use_certificate(c.ctx, cert.x)) != 1 {
+ return errorFromErrorQueue()
+ }
+ return nil
+}
+
+// UseCertificateChainFromFile loads a certificate chain from file into ctx.
+// The certificates must be in PEM format and must be sorted starting with the
+// subject's certificate (actual client or server certificate), followed by
+// intermediate CA certificates if applicable, and ending at the highest level
+// (root) CA. See
+// https://www.openssl.org/docs/ssl/SSL_CTX_use_certificate.html
+func (c *Ctx) UseCertificateChainFile(cert_file string) error {
+ runtime.LockOSThread()
+ defer runtime.UnlockOSThread()
+ var c_cert_file *C.char
+ if cert_file != "" {
+ c_cert_file = C.CString(cert_file)
+ defer C.free(unsafe.Pointer(c_cert_file))
+ }
+ if int(C.SSL_CTX_use_certificate_chain_file(c.ctx, c_cert_file)) != 1 {
+ return errorFromErrorQueue()
+ }
+ return nil
+}
+
+// UsePrivateKeyFile adds the first private key found in file to the *Ctx, c. The
+// formatting type of the certificate must be specified from the known types
+// FiletypePEM, and FiletypeASN1
+func (c *Ctx) UsePrivateKeyFile(key_file string, file_type Filetypes) error {
+ runtime.LockOSThread()
+ defer runtime.UnlockOSThread()
+ var c_key_file *C.char
+ if key_file != "" {
+ c_key_file = C.CString(key_file)
+ defer C.free(unsafe.Pointer(c_key_file))
+ }
+ if int(C.SSL_CTX_use_PrivateKey_file(c.ctx, c_key_file, C.int(file_type))) != 1 {
+ return errorFromErrorQueue()
+ }
+ return nil
+}
+
+func (c *Ctx) UsePrivateKeyFileWithPassword(key_file string, file_type Filetypes, password string) error {
+ runtime.LockOSThread()
+ defer runtime.UnlockOSThread()
+ var c_key_file *C.char
+
+ c_pwd := C.CString(password)
+ defer C.free(unsafe.Pointer(c_pwd))
+ C.SSL_CTX_set_default_passwd_cb_userdata(c.ctx, unsafe.Pointer(c_pwd))
+ C.SSL_CTX_set_default_passwd_cb(c.ctx, (*C.pem_password_cb)(C.password_cb))
+
+ if key_file != "" {
+ c_key_file = C.CString(key_file)
+ defer C.free(unsafe.Pointer(c_key_file))
+ }
+ if int(C.SSL_CTX_use_PrivateKey_file(c.ctx, c_key_file, C.int(file_type))) != 1 {
+ return errorFromErrorQueue()
+ }
+ return nil
+}
+
+// CheckPrivateKey verifies that the private key agrees with the corresponding
+// public key in the certificate
+func (c *Ctx) CheckPrivateKey() error {
+ runtime.LockOSThread()
+ defer runtime.UnlockOSThread()
+ if int(C.SSL_CTX_check_private_key(c.ctx)) != 1 {
+ return errorFromErrorQueue()
+ }
+ return nil
+}
+
+type StackOfX509Name struct {
+ stack C.STACK_OF_X509_NAME_not_a_macro
+ // shared indicates weather we are the sole owner of this pointer, and implies
+ // weather we should or shouldn't free the underlying data structure
+ // when this go data structure goes out of scope
+ shared bool
+}
+
+// LoadClientCAFile reads certificates from file and returns a StackOfX509Name
+// with the subject names found. See
+// https://www.openssl.org/docs/ssl/SSL_load_client_CA_file.html
+func LoadClientCAFile(ca_file string) (*StackOfX509Name, error) {
+ runtime.LockOSThread()
+ defer runtime.UnlockOSThread()
+ var c_ca_file *C.char
+ if ca_file != "" {
+ c_ca_file = C.CString(ca_file)
+ defer C.free(unsafe.Pointer(c_ca_file))
+ }
+ stack := C.SSL_load_client_CA_file(c_ca_file)
+ if stack == nil {
+ return nil, errorFromErrorQueue()
+ }
+ caList := StackOfX509Name{
+ stack: stack,
+ shared: false,
+ }
+ runtime.SetFinalizer(&caList, func(c *StackOfX509Name) {
+ if !c.shared {
+ C.sk_X509_NAME_pop_free_not_a_macro(c.stack)
+ }
+ })
+ return &caList, nil
+}
+
+// SetClientCAList sets the list of CAs sent to the client when requesting a
+// client certificate for Ctx. See
+// https://www.openssl.org/docs/ssl/SSL_CTX_set_client_CA_list.html
+func (c *Ctx) SetClientCAList(caList *StackOfX509Name) {
+ C.SSL_CTX_set_client_CA_list(c.ctx, caList.stack)
+ caList.shared = true
+}
+
+// AddChainCertificate adds a certificate to the chain presented in the
+// handshake.
+func (c *Ctx) AddChainCertificate(cert *Certificate) error {
+ runtime.LockOSThread()
+ defer runtime.UnlockOSThread()
+ c.chain = append(c.chain, cert)
+ if int(C.SSL_CTX_add_extra_chain_cert_not_a_macro(c.ctx, cert.x)) != 1 {
+ return errorFromErrorQueue()
+ }
+ // OpenSSL takes ownership via SSL_CTX_add_extra_chain_cert
+ runtime.SetFinalizer(cert, nil)
+ return nil
+}
+
+// UsePrivateKey configures the context to use the given private key for SSL
+// handshakes.
+func (c *Ctx) UsePrivateKey(key PrivateKey) error {
+ runtime.LockOSThread()
+ defer runtime.UnlockOSThread()
+ c.key = key
+ if int(C.SSL_CTX_use_PrivateKey(c.ctx, key.evpPKey())) != 1 {
+ return errorFromErrorQueue()
+ }
+ return nil
+}
+
+type CertificateStore struct {
+ store *C.X509_STORE
+ // for GC
+ ctx *Ctx
+ certs []*Certificate
+}
+
+// Allocate a new, empty CertificateStore
+func NewCertificateStore() (*CertificateStore, error) {
+ s := C.X509_STORE_new()
+ if s == nil {
+ return nil, errors.New("failed to allocate X509_STORE")
+ }
+ store := &CertificateStore{store: s}
+ runtime.SetFinalizer(store, func(s *CertificateStore) {
+ C.X509_STORE_free(s.store)
+ })
+ return store, nil
+}
+
+// Parse a chained PEM file, loading all certificates into the Store.
+func (s *CertificateStore) LoadCertificatesFromPEM(data []byte) error {
+ pems := SplitPEM(data)
+ for _, pem := range pems {
+ cert, err := LoadCertificateFromPEM(pem)
+ if err != nil {
+ return err
+ }
+ err = s.AddCertificate(cert)
+ if err != nil {
+ return err
+ }
+ }
+ return nil
+}
+
+// GetCertificateStore returns the context's certificate store that will be
+// used for peer validation.
+func (c *Ctx) GetCertificateStore() *CertificateStore {
+ // we don't need to dealloc the cert store pointer here, because it points
+ // to a ctx internal. so we do need to keep the ctx around
+ return &CertificateStore{
+ store: C.SSL_CTX_get_cert_store(c.ctx),
+ ctx: c}
+}
+
+// AddCertificate marks the provided Certificate as a trusted certificate in
+// the given CertificateStore.
+func (s *CertificateStore) AddCertificate(cert *Certificate) error {
+ runtime.LockOSThread()
+ defer runtime.UnlockOSThread()
+ s.certs = append(s.certs, cert)
+ if int(C.X509_STORE_add_cert(s.store, cert.x)) != 1 {
+ return errorFromErrorQueue()
+ }
+ return nil
+}
+
+type X509VerificationFlag int
+
+func (s *CertificateStore) SetFlags(flags X509VerificationFlag) error {
+ runtime.LockOSThread()
+ defer runtime.UnlockOSThread()
+ if int(C.X509_STORE_set_flags(s.store, C.ulong(flags))) != 1 {
+ return errorFromErrorQueue()
+ }
+ return nil
+}
+
+// See https://www.openssl.org/docs/crypto/X509_VERIFY_PARAM_set_flags.html
+const (
+ CBIssuerCheck X509VerificationFlag = C.X509_V_FLAG_CB_ISSUER_CHECK
+ UseCheckTime X509VerificationFlag = C.X509_V_FLAG_USE_CHECK_TIME
+ CRLCheck X509VerificationFlag = C.X509_V_FLAG_CRL_CHECK
+ CRLCheckAll X509VerificationFlag = C.X509_V_FLAG_CRL_CHECK_ALL
+ IgnoreCritical X509VerificationFlag = C.X509_V_FLAG_IGNORE_CRITICAL
+ X509Strict X509VerificationFlag = C.X509_V_FLAG_X509_STRICT
+ AllowProxyCerts X509VerificationFlag = C.X509_V_FLAG_ALLOW_PROXY_CERTS
+ PolicyCheck X509VerificationFlag = C.X509_V_FLAG_POLICY_CHECK
+ ExplicitPolicy X509VerificationFlag = C.X509_V_FLAG_EXPLICIT_POLICY
+ InhibitAny X509VerificationFlag = C.X509_V_FLAG_INHIBIT_ANY
+ InhibitMap X509VerificationFlag = C.X509_V_FLAG_INHIBIT_MAP
+ NotifyPolicy X509VerificationFlag = C.X509_V_FLAG_NOTIFY_POLICY
+ // ExtendedCRLSupport X509VerificationFlag = C.X509_V_FLAG_EXTENDED_CRL_SUPPORT
+ // UseDeltas X509VerificationFlag = C.X509_V_FLAG_USE_DELTAS
+ // CheckSsSignature X509VerificationFlag = C.X509_V_FLAG_CHECK_SS_SIGNATURE
+ // TrustedFirst X509VerificationFlag = C.X509_V_FLAG_TRUSTED_FIRST
+ PolicyMask X509VerificationFlag = C.X509_V_FLAG_POLICY_MASK
+)
+
+type CertificateStoreLookup struct {
+ lookup *C.X509_LOOKUP
+ store *CertificateStore
+}
+
+// an X509LookupMethod is required to build a a CertificateStoreLookup in a
+// CertificateStore. The X509LookupMethod indicates the type or functionality
+// of the CertificateStoreLookup
+type X509LookupMethod *C.X509_LOOKUP_METHOD
+
+// CertificateStoreLookups with X509LookupFile methods look for certs in a file
+func X509LookupFile() X509LookupMethod {
+ return X509LookupMethod(C.X509_LOOKUP_file())
+}
+
+// CertificateStoreLookups with X509LookupHashDir methods look for certs in a
+// directory
+func X509LookupHashDir() X509LookupMethod {
+ return X509LookupMethod(C.X509_LOOKUP_hash_dir())
+}
+
+// AddLookup creates a CertificateStoreLookup of type X509LookupMethod in the
+// CertificateStore
+func (s *CertificateStore) AddLookup(method X509LookupMethod) (*CertificateStoreLookup, error) {
+ runtime.LockOSThread()
+ defer runtime.UnlockOSThread()
+ var lookup *C.X509_LOOKUP
+ lookup = C.X509_STORE_add_lookup(s.store, method)
+ if lookup != nil {
+ return &CertificateStoreLookup{
+ lookup: lookup,
+ store: s,
+ }, nil
+ }
+ return nil, errorFromErrorQueue()
+}
+
+// LoadCRLFile adds a file to a CertificateStoreLookup in the
+// CertificateStore
+// I suspect that the CertificateStoreLookup needs to have been created with
+// X509LookupFile as the lookup method
+func (l *CertificateStoreLookup) LoadCRLFile(crl_file string) error {
+ runtime.LockOSThread()
+ defer runtime.UnlockOSThread()
+ var c_crl_file *C.char
+ if crl_file != "" {
+ c_crl_file = C.CString(crl_file)
+ defer C.free(unsafe.Pointer(c_crl_file))
+ }
+ if int(C.X509_load_crl_file(l.lookup, c_crl_file, C.X509_FILETYPE_PEM)) != 1 {
+ return errorFromErrorQueue()
+ }
+ return nil
+}
+
+type CertificateStoreCtx struct {
+ ctx *C.X509_STORE_CTX
+ ssl_ctx *Ctx
+}
+
+func (self *CertificateStoreCtx) VerifyResult() VerifyResult {
+ return VerifyResult(C.X509_STORE_CTX_get_error(self.ctx))
+}
+
+func (self *CertificateStoreCtx) Err() error {
+ code := C.X509_STORE_CTX_get_error(self.ctx)
+ if code == C.X509_V_OK {
+ return nil
+ }
+ return fmt.Errorf("openssl: %s",
+ C.GoString(C.X509_verify_cert_error_string(C.long(code))))
+}
+
+func (self *CertificateStoreCtx) Depth() int {
+ return int(C.X509_STORE_CTX_get_error_depth(self.ctx))
+}
+
+// the certicate returned is only valid for the lifetime of the underlying
+// X509_STORE_CTX
+func (self *CertificateStoreCtx) GetCurrentCert() *Certificate {
+ x509 := C.X509_STORE_CTX_get_current_cert(self.ctx)
+ if x509 == nil {
+ return nil
+ }
+ // add a ref
+ C.CRYPTO_add_not_a_macro(&x509.references, 1, C.CRYPTO_LOCK_X509)
+ cert := &Certificate{
+ x: x509,
+ }
+ runtime.SetFinalizer(cert, func(cert *Certificate) {
+ C.X509_free(cert.x)
+ })
+ return cert
+}
+
+// LoadVerifyLocations tells the context to trust all certificate authorities
+// provided in either the ca_file or the ca_path.
+// See http://www.openssl.org/docs/ssl/SSL_CTX_load_verify_locations.html for
+// more.
+func (c *Ctx) LoadVerifyLocations(ca_file string, ca_path string) error {
+ runtime.LockOSThread()
+ defer runtime.UnlockOSThread()
+ var c_ca_file, c_ca_path *C.char
+ if ca_file != "" {
+ c_ca_file = C.CString(ca_file)
+ defer C.free(unsafe.Pointer(c_ca_file))
+ }
+ if ca_path != "" {
+ c_ca_path = C.CString(ca_path)
+ defer C.free(unsafe.Pointer(c_ca_path))
+ }
+ if C.SSL_CTX_load_verify_locations(c.ctx, c_ca_file, c_ca_path) != 1 {
+ return errorFromErrorQueue()
+ }
+ return nil
+}
+
+type Options uint
+
+const (
+ // NoCompression is only valid if you are using OpenSSL 1.0.1 or newer
+ NoCompression Options = C.SSL_OP_NO_COMPRESSION
+ NoSSLv2 Options = C.SSL_OP_NO_SSLv2
+ NoSSLv3 Options = C.SSL_OP_NO_SSLv3
+ NoTLSv1 Options = C.SSL_OP_NO_TLSv1
+ CipherServerPreference Options = C.SSL_OP_CIPHER_SERVER_PREFERENCE
+ NoSessionResumptionOrRenegotiation Options = C.SSL_OP_NO_SESSION_RESUMPTION_ON_RENEGOTIATION
+ OpAll Options = C.SSL_OP_ALL
+)
+
+// SetOptions sets context options. See
+// http://www.openssl.org/docs/ssl/SSL_CTX_set_options.html
+func (c *Ctx) SetOptions(options Options) Options {
+ return Options(C.SSL_CTX_set_options_not_a_macro(
+ c.ctx, C.long(options)))
+}
+
+func (c *Ctx) ClearOptions(options Options) Options {
+ return Options(C.SSL_CTX_clear_options_not_a_macro(
+ c.ctx, C.long(options)))
+}
+
+// GetOptions returns context options. See
+// https://www.openssl.org/docs/ssl/SSL_CTX_set_options.html
+func (c *Ctx) GetOptions() Options {
+ return Options(C.SSL_CTX_get_options_not_a_macro(c.ctx))
+}
+
+type Modes int
+
+const (
+ // ReleaseBuffers is only valid if you are using OpenSSL 1.0.1 or newer
+ ReleaseBuffers Modes = C.SSL_MODE_RELEASE_BUFFERS
+ AutoRetry Modes = C.SSL_MODE_AUTO_RETRY
+)
+
+// SetMode sets context modes. See
+// http://www.openssl.org/docs/ssl/SSL_CTX_set_mode.html
+func (c *Ctx) SetMode(modes Modes) Modes {
+ return Modes(C.SSL_CTX_set_mode_not_a_macro(c.ctx, C.long(modes)))
+}
+
+// GetMode returns context modes. See
+// http://www.openssl.org/docs/ssl/SSL_CTX_set_mode.html
+func (c *Ctx) GetMode() Modes {
+ return Modes(C.SSL_CTX_get_mode_not_a_macro(c.ctx))
+}
+
+type VerifyOptions int
+
+const (
+ VerifyNone VerifyOptions = C.SSL_VERIFY_NONE
+ VerifyPeer VerifyOptions = C.SSL_VERIFY_PEER
+ VerifyFailIfNoPeerCert VerifyOptions = C.SSL_VERIFY_FAIL_IF_NO_PEER_CERT
+ VerifyClientOnce VerifyOptions = C.SSL_VERIFY_CLIENT_ONCE
+)
+
+type Filetypes int
+
+const (
+ FiletypePEM Filetypes = C.SSL_FILETYPE_PEM
+ FiletypeASN1 Filetypes = C.SSL_FILETYPE_ASN1
+)
+
+type VerifyCallback func(ok bool, store *CertificateStoreCtx) bool
+
+//export verify_cb_thunk
+func verify_cb_thunk(p unsafe.Pointer, ok C.int, ctx *C.X509_STORE_CTX) C.int {
+ defer func() {
+ if err := recover(); err != nil {
+ logger.Critf("openssl: verify callback panic'd: %v", err)
+ os.Exit(1)
+ }
+ }()
+ verify_cb := (*Ctx)(p).verify_cb
+ // set up defaults just in case verify_cb is nil
+ if verify_cb != nil {
+ store := &CertificateStoreCtx{ctx: ctx}
+ if verify_cb(ok == 1, store) {
+ ok = 1
+ } else {
+ ok = 0
+ }
+ }
+ return ok
+}
+
+// SetVerify controls peer verification settings. See
+// http://www.openssl.org/docs/ssl/SSL_CTX_set_verify.html
+func (c *Ctx) SetVerify(options VerifyOptions, verify_cb VerifyCallback) {
+ c.verify_cb = verify_cb
+ if verify_cb != nil {
+ C.SSL_CTX_set_verify(c.ctx, C.int(options), (*[0]byte)(C.verify_cb))
+ } else {
+ C.SSL_CTX_set_verify(c.ctx, C.int(options), nil)
+ }
+}
+
+func (c *Ctx) SetVerifyMode(options VerifyOptions) {
+ c.SetVerify(options, c.verify_cb)
+}
+
+func (c *Ctx) SetVerifyCallback(verify_cb VerifyCallback) {
+ c.SetVerify(c.VerifyMode(), verify_cb)
+}
+
+func (c *Ctx) GetVerifyCallback() VerifyCallback {
+ return c.verify_cb
+}
+
+func (c *Ctx) VerifyMode() VerifyOptions {
+ return VerifyOptions(C.SSL_CTX_get_verify_mode(c.ctx))
+}
+
+// SetVerifyDepth controls how many certificates deep the certificate
+// verification logic is willing to follow a certificate chain. See
+// https://www.openssl.org/docs/ssl/SSL_CTX_set_verify.html
+func (c *Ctx) SetVerifyDepth(depth int) {
+ C.SSL_CTX_set_verify_depth(c.ctx, C.int(depth))
+}
+
+// GetVerifyDepth controls how many certificates deep the certificate
+// verification logic is willing to follow a certificate chain. See
+// https://www.openssl.org/docs/ssl/SSL_CTX_set_verify.html
+func (c *Ctx) GetVerifyDepth() int {
+ return int(C.SSL_CTX_get_verify_depth(c.ctx))
+}
+
+type TLSExtServernameCallback func(ssl *SSL) SSLTLSExtErr
+
+func (c *Ctx) SetSessionId(session_id []byte) error {
+ runtime.LockOSThread()
+ defer runtime.UnlockOSThread()
+ var ptr *C.uchar
+ if len(session_id) > 0 {
+ ptr = (*C.uchar)(unsafe.Pointer(&session_id[0]))
+ }
+ if int(C.SSL_CTX_set_session_id_context(c.ctx, ptr,
+ C.uint(len(session_id)))) == 0 {
+ return errorFromErrorQueue()
+ }
+ return nil
+}
+
+// SetCipherList sets the list of available ciphers. The format of the list is
+// described at http://www.openssl.org/docs/apps/ciphers.html, but see
+// http://www.openssl.org/docs/ssl/SSL_CTX_set_cipher_list.html for more.
+func (c *Ctx) SetCipherList(list string) error {
+ runtime.LockOSThread()
+ defer runtime.UnlockOSThread()
+ clist := C.CString(list)
+ defer C.free(unsafe.Pointer(clist))
+ if int(C.SSL_CTX_set_cipher_list(c.ctx, clist)) == 0 {
+ return errorFromErrorQueue()
+ }
+ return nil
+}
+
+type SessionCacheModes int
+
+const (
+ SessionCacheOff SessionCacheModes = C.SSL_SESS_CACHE_OFF
+ SessionCacheClient SessionCacheModes = C.SSL_SESS_CACHE_CLIENT
+ SessionCacheServer SessionCacheModes = C.SSL_SESS_CACHE_SERVER
+ SessionCacheBoth SessionCacheModes = C.SSL_SESS_CACHE_BOTH
+ NoAutoClear SessionCacheModes = C.SSL_SESS_CACHE_NO_AUTO_CLEAR
+ NoInternalLookup SessionCacheModes = C.SSL_SESS_CACHE_NO_INTERNAL_LOOKUP
+ NoInternalStore SessionCacheModes = C.SSL_SESS_CACHE_NO_INTERNAL_STORE
+ NoInternal SessionCacheModes = C.SSL_SESS_CACHE_NO_INTERNAL
+)
+
+// SetSessionCacheMode enables or disables session caching. See
+// http://www.openssl.org/docs/ssl/SSL_CTX_set_session_cache_mode.html
+func (c *Ctx) SetSessionCacheMode(modes SessionCacheModes) SessionCacheModes {
+ return SessionCacheModes(
+ C.SSL_CTX_set_session_cache_mode_not_a_macro(c.ctx, C.long(modes)))
+}
+
+// Set session cache timeout. Returns previously set value.
+// See https://www.openssl.org/docs/ssl/SSL_CTX_set_timeout.html
+func (c *Ctx) SetTimeout(t time.Duration) time.Duration {
+ prev := C.SSL_CTX_set_timeout_not_a_macro(c.ctx, C.long(t/time.Second))
+ return time.Duration(prev) * time.Second
+}
+
+// Get session cache timeout.
+// See https://www.openssl.org/docs/ssl/SSL_CTX_set_timeout.html
+func (c *Ctx) GetTimeout() time.Duration {
+ return time.Duration(C.SSL_CTX_get_timeout_not_a_macro(c.ctx)) * time.Second
+}
+
+// Set session cache size. Returns previously set value.
+// https://www.openssl.org/docs/ssl/SSL_CTX_sess_set_cache_size.html
+func (c *Ctx) SessSetCacheSize(t int) int {
+ return int(C.SSL_CTX_sess_set_cache_size_not_a_macro(c.ctx, C.long(t)))
+}
+
+// Get session cache size.
+// https://www.openssl.org/docs/ssl/SSL_CTX_sess_set_cache_size.html
+func (c *Ctx) SessGetCacheSize() int {
+ return int(C.SSL_CTX_sess_get_cache_size_not_a_macro(c.ctx))
+}
diff --git a/src/mongo/gotools/vendor/src/github.com/spacemonkeygo/openssl/ctx_test.go b/src/mongo/gotools/vendor/src/github.com/spacemonkeygo/openssl/ctx_test.go
new file mode 100644
index 00000000000..9644e518bf3
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/spacemonkeygo/openssl/ctx_test.go
@@ -0,0 +1,48 @@
+// Copyright (C) 2014 Ryan Hileman
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package openssl
+
+import (
+ "testing"
+ "time"
+)
+
+func TestCtxTimeoutOption(t *testing.T) {
+ ctx, _ := NewCtx()
+ oldTimeout1 := ctx.GetTimeout()
+ newTimeout1 := oldTimeout1 + (time.Duration(99) * time.Second)
+ oldTimeout2 := ctx.SetTimeout(newTimeout1)
+ newTimeout2 := ctx.GetTimeout()
+ if oldTimeout1 != oldTimeout2 {
+ t.Error("SetTimeout() returns something undocumented")
+ }
+ if newTimeout1 != newTimeout2 {
+ t.Error("SetTimeout() does not save anything to ctx")
+ }
+}
+
+func TestCtxSessCacheSizeOption(t *testing.T) {
+ ctx, _ := NewCtx()
+ oldSize1 := ctx.SessGetCacheSize()
+ newSize1 := oldSize1 + 42
+ oldSize2 := ctx.SessSetCacheSize(newSize1)
+ newSize2 := ctx.SessGetCacheSize()
+ if oldSize1 != oldSize2 {
+ t.Error("SessSetCacheSize() returns something undocumented")
+ }
+ if newSize1 != newSize2 {
+ t.Error("SessSetCacheSize() does not save anything to ctx")
+ }
+}
diff --git a/src/mongo/gotools/vendor/src/github.com/spacemonkeygo/openssl/dhparam.go b/src/mongo/gotools/vendor/src/github.com/spacemonkeygo/openssl/dhparam.go
new file mode 100644
index 00000000000..a698645c1ec
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/spacemonkeygo/openssl/dhparam.go
@@ -0,0 +1,65 @@
+// +build cgo
+
+package openssl
+
+/*
+#include <openssl/crypto.h>
+#include <openssl/ssl.h>
+#include <openssl/err.h>
+#include <openssl/conf.h>
+#include <openssl/dh.h>
+
+static long SSL_CTX_set_tmp_dh_not_a_macro(SSL_CTX* ctx, DH *dh) {
+ return SSL_CTX_set_tmp_dh(ctx, dh);
+}
+static long PEM_read_DHparams_not_a_macro(SSL_CTX* ctx, DH *dh) {
+ return SSL_CTX_set_tmp_dh(ctx, dh);
+}
+*/
+import "C"
+
+import (
+ "errors"
+ "runtime"
+ "unsafe"
+)
+
+type DH struct {
+ dh *C.struct_dh_st
+}
+
+// LoadDHParametersFromPEM loads the Diffie-Hellman parameters from
+// a PEM-encoded block.
+func LoadDHParametersFromPEM(pem_block []byte) (*DH, error) {
+ if len(pem_block) == 0 {
+ return nil, errors.New("empty pem block")
+ }
+ bio := C.BIO_new_mem_buf(unsafe.Pointer(&pem_block[0]),
+ C.int(len(pem_block)))
+ if bio == nil {
+ return nil, errors.New("failed creating bio")
+ }
+ defer C.BIO_free(bio)
+
+ params := C.PEM_read_bio_DHparams(bio, nil, nil, nil)
+ if params == nil {
+ return nil, errors.New("failed reading dh parameters")
+ }
+ dhparams := &DH{dh: params}
+ runtime.SetFinalizer(dhparams, func(dhparams *DH) {
+ C.DH_free(dhparams.dh)
+ })
+ return dhparams, nil
+}
+
+// SetDHParameters sets the DH group (DH parameters) used to
+// negotiate an emphemeral DH key during handshaking.
+func (c *Ctx) SetDHParameters(dh *DH) error {
+ runtime.LockOSThread()
+ defer runtime.UnlockOSThread()
+
+ if int(C.SSL_CTX_set_tmp_dh_not_a_macro(c.ctx, dh.dh)) != 1 {
+ return errorFromErrorQueue()
+ }
+ return nil
+}
diff --git a/src/mongo/gotools/vendor/src/github.com/spacemonkeygo/openssl/digest.go b/src/mongo/gotools/vendor/src/github.com/spacemonkeygo/openssl/digest.go
new file mode 100644
index 00000000000..44d4d001b13
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/spacemonkeygo/openssl/digest.go
@@ -0,0 +1,53 @@
+// Copyright (C) 2015 Space Monkey, Inc.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+// +build cgo
+
+package openssl
+
+// #include <openssl/evp.h>
+import "C"
+
+import (
+ "fmt"
+ "unsafe"
+)
+
+// Digest represents and openssl message digest.
+type Digest struct {
+ ptr *C.EVP_MD
+}
+
+// GetDigestByName returns the Digest with the name or nil and an error if the
+// digest was not found.
+func GetDigestByName(name string) (*Digest, error) {
+ cname := C.CString(name)
+ defer C.free(unsafe.Pointer(cname))
+ p := C.EVP_get_digestbyname(cname)
+ if p == nil {
+ return nil, fmt.Errorf("Digest %v not found", name)
+ }
+ // we can consider digests to use static mem; don't need to free
+ return &Digest{ptr: p}, nil
+}
+
+// GetDigestByName returns the Digest with the NID or nil and an error if the
+// digest was not found.
+func GetDigestByNid(nid NID) (*Digest, error) {
+ sn, err := Nid2ShortName(nid)
+ if err != nil {
+ return nil, err
+ }
+ return GetDigestByName(sn)
+}
diff --git a/src/mongo/gotools/vendor/src/github.com/spacemonkeygo/openssl/engine.go b/src/mongo/gotools/vendor/src/github.com/spacemonkeygo/openssl/engine.go
new file mode 100644
index 00000000000..7a175b70f7c
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/spacemonkeygo/openssl/engine.go
@@ -0,0 +1,52 @@
+// Copyright (C) 2014 Space Monkey, Inc.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+// +build cgo
+
+package openssl
+
+/*
+#include "openssl/engine.h"
+*/
+import "C"
+
+import (
+ "fmt"
+ "runtime"
+ "unsafe"
+)
+
+type Engine struct {
+ e *C.ENGINE
+}
+
+func EngineById(name string) (*Engine, error) {
+ cname := C.CString(name)
+ defer C.free(unsafe.Pointer(cname))
+ e := &Engine{
+ e: C.ENGINE_by_id(cname),
+ }
+ if e.e == nil {
+ return nil, fmt.Errorf("engine %s missing", name)
+ }
+ if C.ENGINE_init(e.e) == 0 {
+ C.ENGINE_free(e.e)
+ return nil, fmt.Errorf("engine %s not initialized", name)
+ }
+ runtime.SetFinalizer(e, func(e *Engine) {
+ C.ENGINE_finish(e.e)
+ C.ENGINE_free(e.e)
+ })
+ return e, nil
+}
diff --git a/src/mongo/gotools/vendor/src/github.com/spacemonkeygo/openssl/fips.go b/src/mongo/gotools/vendor/src/github.com/spacemonkeygo/openssl/fips.go
new file mode 100644
index 00000000000..cc463f17a18
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/spacemonkeygo/openssl/fips.go
@@ -0,0 +1,22 @@
+// +build cgo
+// +build -darwin
+
+package openssl
+
+/*
+#include <openssl/ssl.h>
+*/
+import "C"
+
+func FIPSModeSet(mode bool) error {
+ var r C.int
+ if mode {
+ r = C.FIPS_mode_set(1)
+ } else {
+ r = C.FIPS_mode_set(0)
+ }
+ if r != 1 {
+ return errorFromErrorQueue()
+ }
+ return nil
+}
diff --git a/src/mongo/gotools/vendor/src/github.com/spacemonkeygo/openssl/hostname.c b/src/mongo/gotools/vendor/src/github.com/spacemonkeygo/openssl/hostname.c
new file mode 100644
index 00000000000..9a610292067
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/spacemonkeygo/openssl/hostname.c
@@ -0,0 +1,367 @@
+/* Go-OpenSSL notice:
+ This file is required for all OpenSSL versions prior to 1.1.0. This simply
+ provides the new 1.1.0 X509_check_* methods for hostname validation if they
+ don't already exist.
+ */
+
+#include <openssl/x509.h>
+
+#ifndef X509_CHECK_FLAG_ALWAYS_CHECK_SUBJECT
+
+/* portions from x509v3.h and v3_utl.c */
+/* Written by Dr Stephen N Henson (steve@openssl.org) for the OpenSSL
+ * project.
+ */
+/* ====================================================================
+ * Copyright (c) 1999-2003 The OpenSSL Project. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ *
+ * 2. Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in
+ * the documentation and/or other materials provided with the
+ * distribution.
+ *
+ * 3. All advertising materials mentioning features or use of this
+ * software must display the following acknowledgment:
+ * "This product includes software developed by the OpenSSL Project
+ * for use in the OpenSSL Toolkit. (http://www.OpenSSL.org/)"
+ *
+ * 4. The names "OpenSSL Toolkit" and "OpenSSL Project" must not be used to
+ * endorse or promote products derived from this software without
+ * prior written permission. For written permission, please contact
+ * licensing@OpenSSL.org.
+ *
+ * 5. Products derived from this software may not be called "OpenSSL"
+ * nor may "OpenSSL" appear in their names without prior written
+ * permission of the OpenSSL Project.
+ *
+ * 6. Redistributions of any form whatsoever must retain the following
+ * acknowledgment:
+ * "This product includes software developed by the OpenSSL Project
+ * for use in the OpenSSL Toolkit (http://www.OpenSSL.org/)"
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE OpenSSL PROJECT ``AS IS'' AND ANY
+ * EXPRESSED OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+ * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
+ * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE OpenSSL PROJECT OR
+ * ITS CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
+ * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
+ * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
+ * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
+ * STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED
+ * OF THE POSSIBILITY OF SUCH DAMAGE.
+ * ====================================================================
+ *
+ * This product includes cryptographic software written by Eric Young
+ * (eay@cryptsoft.com). This product includes software written by Tim
+ * Hudson (tjh@cryptsoft.com).
+ *
+ */
+/* X509 v3 extension utilities */
+
+#include <stdlib.h>
+#include <openssl/ssl.h>
+#include <openssl/conf.h>
+#include <openssl/x509v3.h>
+
+#define X509_CHECK_FLAG_ALWAYS_CHECK_SUBJECT 0x1
+#define X509_CHECK_FLAG_NO_WILDCARDS 0x2
+
+typedef int (*equal_fn)(const unsigned char *pattern, size_t pattern_len,
+ const unsigned char *subject, size_t subject_len);
+
+/* Compare while ASCII ignoring case. */
+static int equal_nocase(const unsigned char *pattern, size_t pattern_len,
+ const unsigned char *subject, size_t subject_len)
+ {
+ if (pattern_len != subject_len)
+ return 0;
+ while (pattern_len)
+ {
+ unsigned char l = *pattern;
+ unsigned char r = *subject;
+ /* The pattern must not contain NUL characters. */
+ if (l == 0)
+ return 0;
+ if (l != r)
+ {
+ if ('A' <= l && l <= 'Z')
+ l = (l - 'A') + 'a';
+ if ('A' <= r && r <= 'Z')
+ r = (r - 'A') + 'a';
+ if (l != r)
+ return 0;
+ }
+ ++pattern;
+ ++subject;
+ --pattern_len;
+ }
+ return 1;
+ }
+
+/* Compare using memcmp. */
+static int equal_case(const unsigned char *pattern, size_t pattern_len,
+ const unsigned char *subject, size_t subject_len)
+{
+ /* The pattern must not contain NUL characters. */
+ if (memchr(pattern, '\0', pattern_len) != NULL)
+ return 0;
+ if (pattern_len != subject_len)
+ return 0;
+ return !memcmp(pattern, subject, pattern_len);
+}
+
+/* RFC 5280, section 7.5, requires that only the domain is compared in
+ a case-insensitive manner. */
+static int equal_email(const unsigned char *a, size_t a_len,
+ const unsigned char *b, size_t b_len)
+ {
+ size_t i = a_len;
+ if (a_len != b_len)
+ return 0;
+ /* We search backwards for the '@' character, so that we do
+ not have to deal with quoted local-parts. The domain part
+ is compared in a case-insensitive manner. */
+ while (i > 0)
+ {
+ --i;
+ if (a[i] == '@' || b[i] == '@')
+ {
+ if (!equal_nocase(a + i, a_len - i,
+ b + i, a_len - i))
+ return 0;
+ break;
+ }
+ }
+ if (i == 0)
+ i = a_len;
+ return equal_case(a, i, b, i);
+ }
+
+/* Compare the prefix and suffix with the subject, and check that the
+ characters in-between are valid. */
+static int wildcard_match(const unsigned char *prefix, size_t prefix_len,
+ const unsigned char *suffix, size_t suffix_len,
+ const unsigned char *subject, size_t subject_len)
+ {
+ const unsigned char *wildcard_start;
+ const unsigned char *wildcard_end;
+ const unsigned char *p;
+ if (subject_len < prefix_len + suffix_len)
+ return 0;
+ if (!equal_nocase(prefix, prefix_len, subject, prefix_len))
+ return 0;
+ wildcard_start = subject + prefix_len;
+ wildcard_end = subject + (subject_len - suffix_len);
+ if (!equal_nocase(wildcard_end, suffix_len, suffix, suffix_len))
+ return 0;
+ /* The wildcard must match at least one character. */
+ if (wildcard_start == wildcard_end)
+ return 0;
+ /* Check that the part matched by the wildcard contains only
+ permitted characters and only matches a single label. */
+ for (p = wildcard_start; p != wildcard_end; ++p)
+ if (!(('0' <= *p && *p <= '9') ||
+ ('A' <= *p && *p <= 'Z') ||
+ ('a' <= *p && *p <= 'z') ||
+ *p == '-'))
+ return 0;
+ return 1;
+ }
+
+/* Checks if the memory region consistens of [0-9A-Za-z.-]. */
+static int valid_domain_characters(const unsigned char *p, size_t len)
+ {
+ while (len)
+ {
+ if (!(('0' <= *p && *p <= '9') ||
+ ('A' <= *p && *p <= 'Z') ||
+ ('a' <= *p && *p <= 'z') ||
+ *p == '-' || *p == '.'))
+ return 0;
+ ++p;
+ --len;
+ }
+ return 1;
+ }
+
+/* Find the '*' in a wildcard pattern. If no such character is found
+ or the pattern is otherwise invalid, returns NULL. */
+static const unsigned char *wildcard_find_star(const unsigned char *pattern,
+ size_t pattern_len)
+ {
+ const unsigned char *star = memchr(pattern, '*', pattern_len);
+ size_t dot_count = 0;
+ const unsigned char *suffix_start;
+ size_t suffix_length;
+ if (star == NULL)
+ return NULL;
+ suffix_start = star + 1;
+ suffix_length = (pattern + pattern_len) - (star + 1);
+ if (!(valid_domain_characters(pattern, star - pattern) &&
+ valid_domain_characters(suffix_start, suffix_length)))
+ return NULL;
+ /* Check that the suffix matches at least two labels. */
+ while (suffix_length)
+ {
+ if (*suffix_start == '.')
+ ++dot_count;
+ ++suffix_start;
+ --suffix_length;
+ }
+ if (dot_count < 2)
+ return NULL;
+ return star;
+ }
+
+/* Compare using wildcards. */
+static int equal_wildcard(const unsigned char *pattern, size_t pattern_len,
+ const unsigned char *subject, size_t subject_len)
+ {
+ const unsigned char *star = wildcard_find_star(pattern, pattern_len);
+ if (star == NULL)
+ return equal_nocase(pattern, pattern_len,
+ subject, subject_len);
+ return wildcard_match(pattern, star - pattern,
+ star + 1, (pattern + pattern_len) - star - 1,
+ subject, subject_len);
+ }
+
+/* Compare an ASN1_STRING to a supplied string. If they match
+ * return 1. If cmp_type > 0 only compare if string matches the
+ * type, otherwise convert it to UTF8.
+ */
+
+static int do_check_string(ASN1_STRING *a, int cmp_type, equal_fn equal,
+ const unsigned char *b, size_t blen)
+ {
+ if (!a->data || !a->length)
+ return 0;
+ if (cmp_type > 0)
+ {
+ if (cmp_type != a->type)
+ return 0;
+ if (cmp_type == V_ASN1_IA5STRING)
+ return equal(a->data, a->length, b, blen);
+ if (a->length == (int)blen && !memcmp(a->data, b, blen))
+ return 1;
+ else
+ return 0;
+ }
+ else
+ {
+ int astrlen, rv;
+ unsigned char *astr;
+ astrlen = ASN1_STRING_to_UTF8(&astr, a);
+ if (astrlen < 0)
+ return -1;
+ rv = equal(astr, astrlen, b, blen);
+ OPENSSL_free(astr);
+ return rv;
+ }
+ }
+
+static int do_x509_check(X509 *x, const unsigned char *chk, size_t chklen,
+ unsigned int flags, int check_type)
+ {
+ STACK_OF(GENERAL_NAME) *gens = NULL;
+ X509_NAME *name = NULL;
+ int i;
+ int cnid;
+ int alt_type;
+ equal_fn equal;
+ if (check_type == GEN_EMAIL)
+ {
+ cnid = NID_pkcs9_emailAddress;
+ alt_type = V_ASN1_IA5STRING;
+ equal = equal_email;
+ }
+ else if (check_type == GEN_DNS)
+ {
+ cnid = NID_commonName;
+ alt_type = V_ASN1_IA5STRING;
+ if (flags & X509_CHECK_FLAG_NO_WILDCARDS)
+ equal = equal_nocase;
+ else
+ equal = equal_wildcard;
+ }
+ else
+ {
+ cnid = 0;
+ alt_type = V_ASN1_OCTET_STRING;
+ equal = equal_case;
+ }
+
+ if (chklen == 0)
+ chklen = strlen((const char *)chk);
+
+ gens = X509_get_ext_d2i(x, NID_subject_alt_name, NULL, NULL);
+ if (gens)
+ {
+ int rv = 0;
+ for (i = 0; i < sk_GENERAL_NAME_num(gens); i++)
+ {
+ GENERAL_NAME *gen;
+ ASN1_STRING *cstr;
+ gen = sk_GENERAL_NAME_value(gens, i);
+ if(gen->type != check_type)
+ continue;
+ if (check_type == GEN_EMAIL)
+ cstr = gen->d.rfc822Name;
+ else if (check_type == GEN_DNS)
+ cstr = gen->d.dNSName;
+ else
+ cstr = gen->d.iPAddress;
+ if (do_check_string(cstr, alt_type, equal, chk, chklen))
+ {
+ rv = 1;
+ break;
+ }
+ }
+ GENERAL_NAMES_free(gens);
+ if (rv)
+ return 1;
+ if (!(flags & X509_CHECK_FLAG_ALWAYS_CHECK_SUBJECT) || !cnid)
+ return 0;
+ }
+ i = -1;
+ name = X509_get_subject_name(x);
+ while((i = X509_NAME_get_index_by_NID(name, cnid, i)) >= 0)
+ {
+ X509_NAME_ENTRY *ne;
+ ASN1_STRING *str;
+ ne = X509_NAME_get_entry(name, i);
+ str = X509_NAME_ENTRY_get_data(ne);
+ if (do_check_string(str, -1, equal, chk, chklen))
+ return 1;
+ }
+ return 0;
+ }
+
+int _X509_check_host(X509 *x, const unsigned char *chk, size_t chklen,
+ unsigned int flags)
+ {
+ return do_x509_check(x, chk, chklen, flags, GEN_DNS);
+ }
+
+int _X509_check_email(X509 *x, const unsigned char *chk, size_t chklen,
+ unsigned int flags)
+ {
+ return do_x509_check(x, chk, chklen, flags, GEN_EMAIL);
+ }
+
+int _X509_check_ip(X509 *x, const unsigned char *chk, size_t chklen,
+ unsigned int flags)
+ {
+ return do_x509_check(x, chk, chklen, flags, GEN_IPADD);
+ }
+
+#endif
diff --git a/src/mongo/gotools/vendor/src/github.com/spacemonkeygo/openssl/hostname.go b/src/mongo/gotools/vendor/src/github.com/spacemonkeygo/openssl/hostname.go
new file mode 100644
index 00000000000..c1d1202fb65
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/spacemonkeygo/openssl/hostname.go
@@ -0,0 +1,127 @@
+// Copyright (C) 2014 Space Monkey, Inc.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+// +build cgo
+
+package openssl
+
+/*
+#include <openssl/ssl.h>
+#include <openssl/conf.h>
+#include <openssl/x509.h>
+
+#ifndef X509_CHECK_FLAG_ALWAYS_CHECK_SUBJECT
+#define X509_CHECK_FLAG_ALWAYS_CHECK_SUBJECT 0x1
+#define X509_CHECK_FLAG_NO_WILDCARDS 0x2
+
+extern int _X509_check_host(X509 *x, const unsigned char *chk, size_t chklen,
+ unsigned int flags);
+extern int _X509_check_email(X509 *x, const unsigned char *chk, size_t chklen,
+ unsigned int flags);
+extern int _X509_check_ip(X509 *x, const unsigned char *chk, size_t chklen,
+ unsigned int flags);
+#endif
+*/
+import "C"
+
+import (
+ "errors"
+ "net"
+ "unsafe"
+)
+
+var (
+ ValidationError = errors.New("Host validation error")
+)
+
+type CheckFlags int
+
+const (
+ AlwaysCheckSubject CheckFlags = C.X509_CHECK_FLAG_ALWAYS_CHECK_SUBJECT
+ NoWildcards CheckFlags = C.X509_CHECK_FLAG_NO_WILDCARDS
+)
+
+// CheckHost checks that the X509 certificate is signed for the provided
+// host name. See http://www.openssl.org/docs/crypto/X509_check_host.html for
+// more. Note that CheckHost does not check the IP field. See VerifyHostname.
+// Specifically returns ValidationError if the Certificate didn't match but
+// there was no internal error.
+func (c *Certificate) CheckHost(host string, flags CheckFlags) error {
+ chost := unsafe.Pointer(C.CString(host))
+ defer C.free(chost)
+ rv := C._X509_check_host(c.x, (*C.uchar)(chost), C.size_t(len(host)),
+ C.uint(flags))
+ if rv > 0 {
+ return nil
+ }
+ if rv == 0 {
+ return ValidationError
+ }
+ return errors.New("hostname validation had an internal failure")
+}
+
+// CheckEmail checks that the X509 certificate is signed for the provided
+// email address. See http://www.openssl.org/docs/crypto/X509_check_host.html
+// for more.
+// Specifically returns ValidationError if the Certificate didn't match but
+// there was no internal error.
+func (c *Certificate) CheckEmail(email string, flags CheckFlags) error {
+ cemail := unsafe.Pointer(C.CString(email))
+ defer C.free(cemail)
+ rv := C._X509_check_email(c.x, (*C.uchar)(cemail), C.size_t(len(email)),
+ C.uint(flags))
+ if rv > 0 {
+ return nil
+ }
+ if rv == 0 {
+ return ValidationError
+ }
+ return errors.New("email validation had an internal failure")
+}
+
+// CheckIP checks that the X509 certificate is signed for the provided
+// IP address. See http://www.openssl.org/docs/crypto/X509_check_host.html
+// for more.
+// Specifically returns ValidationError if the Certificate didn't match but
+// there was no internal error.
+func (c *Certificate) CheckIP(ip net.IP, flags CheckFlags) error {
+ cip := unsafe.Pointer(&ip[0])
+ rv := C._X509_check_ip(c.x, (*C.uchar)(cip), C.size_t(len(ip)),
+ C.uint(flags))
+ if rv > 0 {
+ return nil
+ }
+ if rv == 0 {
+ return ValidationError
+ }
+ return errors.New("ip validation had an internal failure")
+}
+
+// VerifyHostname is a combination of CheckHost and CheckIP. If the provided
+// hostname looks like an IP address, it will be checked as an IP address,
+// otherwise it will be checked as a hostname.
+// Specifically returns ValidationError if the Certificate didn't match but
+// there was no internal error.
+func (c *Certificate) VerifyHostname(host string) error {
+ var ip net.IP
+ if len(host) >= 3 && host[0] == '[' && host[len(host)-1] == ']' {
+ ip = net.ParseIP(host[1 : len(host)-1])
+ } else {
+ ip = net.ParseIP(host)
+ }
+ if ip != nil {
+ return c.CheckIP(ip, 0)
+ }
+ return c.CheckHost(host, 0)
+}
diff --git a/src/mongo/gotools/vendor/src/github.com/spacemonkeygo/openssl/http.go b/src/mongo/gotools/vendor/src/github.com/spacemonkeygo/openssl/http.go
new file mode 100644
index 00000000000..e3be32c264a
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/spacemonkeygo/openssl/http.go
@@ -0,0 +1,61 @@
+// Copyright (C) 2014 Space Monkey, Inc.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package openssl
+
+import (
+ "net/http"
+)
+
+// ListenAndServeTLS will take an http.Handler and serve it using OpenSSL over
+// the given tcp address, configured to use the provided cert and key files.
+func ListenAndServeTLS(addr string, cert_file string, key_file string,
+ handler http.Handler) error {
+ return ServerListenAndServeTLS(
+ &http.Server{Addr: addr, Handler: handler}, cert_file, key_file)
+}
+
+// ServerListenAndServeTLS will take an http.Server and serve it using OpenSSL
+// configured to use the provided cert and key files.
+func ServerListenAndServeTLS(srv *http.Server,
+ cert_file, key_file string) error {
+ addr := srv.Addr
+ if addr == "" {
+ addr = ":https"
+ }
+
+ ctx, err := NewCtxFromFiles(cert_file, key_file)
+ if err != nil {
+ return err
+ }
+
+ l, err := Listen("tcp", addr, ctx)
+ if err != nil {
+ return err
+ }
+
+ return srv.Serve(l)
+}
+
+// TODO: http client integration
+// holy crap, getting this integrated nicely with the Go stdlib HTTP client
+// stack so that it does proxying, connection pooling, and most importantly
+// hostname verification is really hard. So much stuff is hardcoded to just use
+// the built-in TLS lib. I think to get this to work either some crazy
+// hacktackery beyond me, an almost straight up fork of the HTTP client, or
+// serious stdlib internal refactoring is necessary.
+// even more so, good luck getting openssl to use the operating system default
+// root certificates if the user doesn't provide any. sadlol
+// NOTE: if you're going to try and write your own round tripper, at least use
+// openssl.Dial, or equivalent logic
diff --git a/src/mongo/gotools/vendor/src/github.com/spacemonkeygo/openssl/init.go b/src/mongo/gotools/vendor/src/github.com/spacemonkeygo/openssl/init.go
new file mode 100644
index 00000000000..7663a480ed2
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/spacemonkeygo/openssl/init.go
@@ -0,0 +1,155 @@
+// Copyright (C) 2014 Space Monkey, Inc.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+// +build cgo
+
+/*
+Package openssl is a light wrapper around OpenSSL for Go.
+
+It strives to provide a near-drop-in replacement for the Go standard library
+tls package, while allowing for:
+
+Performance
+
+OpenSSL is battle-tested and optimized C. While Go's built-in library shows
+great promise, it is still young and in some places, inefficient. This simple
+OpenSSL wrapper can often do at least 2x with the same cipher and protocol.
+
+On my lappytop, I get the following benchmarking speeds:
+ BenchmarkSHA1Large_openssl 1000 2611282 ns/op 401.56 MB/s
+ BenchmarkSHA1Large_stdlib 500 3963983 ns/op 264.53 MB/s
+ BenchmarkSHA1Small_openssl 1000000 3476 ns/op 0.29 MB/s
+ BenchmarkSHA1Small_stdlib 5000000 550 ns/op 1.82 MB/s
+ BenchmarkSHA256Large_openssl 200 8085314 ns/op 129.69 MB/s
+ BenchmarkSHA256Large_stdlib 100 18948189 ns/op 55.34 MB/s
+ BenchmarkSHA256Small_openssl 1000000 4262 ns/op 0.23 MB/s
+ BenchmarkSHA256Small_stdlib 1000000 1444 ns/op 0.69 MB/s
+ BenchmarkOpenSSLThroughput 100000 21634 ns/op 47.33 MB/s
+ BenchmarkStdlibThroughput 50000 58974 ns/op 17.36 MB/s
+
+Interoperability
+
+Many systems support OpenSSL with a variety of plugins and modules for things,
+such as hardware acceleration in embedded devices.
+
+Greater flexibility and configuration
+
+OpenSSL allows for far greater configuration of corner cases and backwards
+compatibility (such as support of SSLv2). You shouldn't be using SSLv2 if you
+can help but, but sometimes you can't help it.
+
+Security
+
+Yeah yeah, Heartbleed. But according to the author of the standard library's
+TLS implementation, Go's TLS library is vulnerable to timing attacks. And
+whether or not OpenSSL received the appropriate amount of scrutiny
+pre-Heartbleed, it sure is receiving it now.
+
+Usage
+
+Starting an HTTP server that uses OpenSSL is very easy. It's as simple as:
+ log.Fatal(openssl.ListenAndServeTLS(
+ ":8443", "my_server.crt", "my_server.key", myHandler))
+
+Getting a net.Listener that uses OpenSSL is also easy:
+ ctx, err := openssl.NewCtxFromFiles("my_server.crt", "my_server.key")
+ if err != nil {
+ log.Fatal(err)
+ }
+ l, err := openssl.Listen("tcp", ":7777", ctx)
+
+Making a client connection is straightforward too:
+ ctx, err := NewCtx()
+ if err != nil {
+ log.Fatal(err)
+ }
+ err = ctx.LoadVerifyLocations("/etc/ssl/certs/ca-certificates.crt", "")
+ if err != nil {
+ log.Fatal(err)
+ }
+ conn, err := openssl.Dial("tcp", "localhost:7777", ctx, 0)
+
+Help wanted: To get this library to work with net/http's client, we
+had to fork net/http. It would be nice if an alternate http client library
+supported the generality needed to use OpenSSL instead of crypto/tls.
+*/
+package openssl
+
+/*
+#include <openssl/ssl.h>
+#include <openssl/conf.h>
+#include <openssl/err.h>
+#include <openssl/evp.h>
+#include <openssl/engine.h>
+
+extern int Goopenssl_init_locks();
+extern void Goopenssl_thread_locking_callback(int, int, const char*, int);
+
+static int Goopenssl_init_threadsafety() {
+ // Set up OPENSSL thread safety callbacks. We only set the locking
+ // callback because the default id callback implementation is good
+ // enough for us.
+ int rc = Goopenssl_init_locks();
+ if (rc == 0) {
+ CRYPTO_set_locking_callback(Goopenssl_thread_locking_callback);
+ }
+ return rc;
+}
+
+static void OpenSSL_add_all_algorithms_not_a_macro() {
+ OpenSSL_add_all_algorithms();
+}
+
+*/
+import "C"
+
+import (
+ "errors"
+ "fmt"
+ "strings"
+ "sync"
+)
+
+var (
+ sslMutexes []sync.Mutex
+)
+
+func init() {
+ C.OPENSSL_config(nil)
+ C.ENGINE_load_builtin_engines()
+ C.SSL_load_error_strings()
+ C.SSL_library_init()
+ C.OpenSSL_add_all_algorithms_not_a_macro()
+ rc := C.Goopenssl_init_threadsafety()
+ if rc != 0 {
+ panic(fmt.Errorf("Goopenssl_init_locks failed with %d", rc))
+ }
+}
+
+// errorFromErrorQueue needs to run in the same OS thread as the operation
+// that caused the possible error
+func errorFromErrorQueue() error {
+ var errs []string
+ for {
+ err := C.ERR_get_error()
+ if err == 0 {
+ break
+ }
+ errs = append(errs, fmt.Sprintf("%s:%s:%s",
+ C.GoString(C.ERR_lib_error_string(err)),
+ C.GoString(C.ERR_func_error_string(err)),
+ C.GoString(C.ERR_reason_error_string(err))))
+ }
+ return errors.New(fmt.Sprintf("SSL errors: %s", strings.Join(errs, "\n")))
+}
diff --git a/src/mongo/gotools/vendor/src/github.com/spacemonkeygo/openssl/init_posix.go b/src/mongo/gotools/vendor/src/github.com/spacemonkeygo/openssl/init_posix.go
new file mode 100644
index 00000000000..03ed0f01bd0
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/spacemonkeygo/openssl/init_posix.go
@@ -0,0 +1,64 @@
+// Copyright (C) 2014 Space Monkey, Inc.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+// +build linux darwin cgo
+// +build !windows
+
+package openssl
+
+/*
+#include <errno.h>
+#include <openssl/crypto.h>
+#include <pthread.h>
+
+pthread_mutex_t* goopenssl_locks;
+
+int Goopenssl_init_locks() {
+ int rc = 0;
+ int nlock;
+ int i;
+ int locks_needed = CRYPTO_num_locks();
+
+ goopenssl_locks = (pthread_mutex_t*)malloc(
+ sizeof(pthread_mutex_t) * locks_needed);
+ if (!goopenssl_locks) {
+ return ENOMEM;
+ }
+ for (nlock = 0; nlock < locks_needed; ++nlock) {
+ rc = pthread_mutex_init(&goopenssl_locks[nlock], NULL);
+ if (rc != 0) {
+ break;
+ }
+ }
+
+ if (rc != 0) {
+ for (i = nlock - 1; i >= 0; --i) {
+ pthread_mutex_destroy(&goopenssl_locks[i]);
+ }
+ free(goopenssl_locks);
+ goopenssl_locks = NULL;
+ }
+ return rc;
+}
+
+void Goopenssl_thread_locking_callback(int mode, int n, const char *file,
+ int line) {
+ if (mode & CRYPTO_LOCK) {
+ pthread_mutex_lock(&goopenssl_locks[n]);
+ } else {
+ pthread_mutex_unlock(&goopenssl_locks[n]);
+ }
+}
+*/
+import "C"
diff --git a/src/mongo/gotools/vendor/src/github.com/spacemonkeygo/openssl/init_windows.go b/src/mongo/gotools/vendor/src/github.com/spacemonkeygo/openssl/init_windows.go
new file mode 100644
index 00000000000..5eca9fa0eac
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/spacemonkeygo/openssl/init_windows.go
@@ -0,0 +1,60 @@
+// Copyright (C) 2014 Space Monkey, Inc.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+// +build windows cgo
+
+package openssl
+
+/*
+
+#cgo windows LDFLAGS: -lssleay32 -llibeay32 -L c:/openssl/bin
+#cgo windows CFLAGS: -I"c:/openssl/include"
+
+#ifndef WIN32_LEAN_AND_MEAN
+#define WIN32_LEAN_AND_MEAN
+#endif
+#include <errno.h>
+#include <openssl/crypto.h>
+#include <windows.h>
+
+CRITICAL_SECTION* goopenssl_locks;
+
+int Goopenssl_init_locks() {
+ int rc = 0;
+ int nlock;
+ int i;
+ int locks_needed = CRYPTO_num_locks();
+
+ goopenssl_locks = (CRITICAL_SECTION*)malloc(
+ sizeof(*goopenssl_locks) * locks_needed);
+ if (!goopenssl_locks) {
+ return ENOMEM;
+ }
+ for (nlock = 0; nlock < locks_needed; ++nlock) {
+ InitializeCriticalSection(&goopenssl_locks[nlock]);
+ }
+
+ return 0;
+}
+
+void Goopenssl_thread_locking_callback(int mode, int n, const char *file,
+ int line) {
+ if (mode & CRYPTO_LOCK) {
+ EnterCriticalSection(&goopenssl_locks[n]);
+ } else {
+ LeaveCriticalSection(&goopenssl_locks[n]);
+ }
+}
+*/
+import "C"
diff --git a/src/mongo/gotools/vendor/src/github.com/spacemonkeygo/openssl/key.go b/src/mongo/gotools/vendor/src/github.com/spacemonkeygo/openssl/key.go
new file mode 100644
index 00000000000..c69a101631f
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/spacemonkeygo/openssl/key.go
@@ -0,0 +1,374 @@
+// Copyright (C) 2014 Space Monkey, Inc.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+// +build cgo
+
+package openssl
+
+// #include <openssl/evp.h>
+// #include <openssl/ssl.h>
+// #include <openssl/conf.h>
+//
+// int EVP_SignInit_not_a_macro(EVP_MD_CTX *ctx, const EVP_MD *type) {
+// return EVP_SignInit(ctx, type);
+// }
+//
+// int EVP_SignUpdate_not_a_macro(EVP_MD_CTX *ctx, const void *d,
+// unsigned int cnt) {
+// return EVP_SignUpdate(ctx, d, cnt);
+// }
+//
+// int EVP_VerifyInit_not_a_macro(EVP_MD_CTX *ctx, const EVP_MD *type) {
+// return EVP_VerifyInit(ctx, type);
+// }
+//
+// int EVP_VerifyUpdate_not_a_macro(EVP_MD_CTX *ctx, const void *d,
+// unsigned int cnt) {
+// return EVP_VerifyUpdate(ctx, d, cnt);
+// }
+//
+// int EVP_PKEY_assign_charp(EVP_PKEY *pkey, int type, char *key) {
+// return EVP_PKEY_assign(pkey, type, key);
+// }
+import "C"
+
+import (
+ "errors"
+ "io/ioutil"
+ "runtime"
+ "unsafe"
+)
+
+type Method *C.EVP_MD
+
+var (
+ SHA256_Method Method = C.EVP_sha256()
+)
+
+type PublicKey interface {
+ // Verifies the data signature using PKCS1.15
+ VerifyPKCS1v15(method Method, data, sig []byte) error
+
+ // MarshalPKIXPublicKeyPEM converts the public key to PEM-encoded PKIX
+ // format
+ MarshalPKIXPublicKeyPEM() (pem_block []byte, err error)
+
+ // MarshalPKIXPublicKeyDER converts the public key to DER-encoded PKIX
+ // format
+ MarshalPKIXPublicKeyDER() (der_block []byte, err error)
+
+ evpPKey() *C.EVP_PKEY
+}
+
+type PrivateKey interface {
+ PublicKey
+
+ // Signs the data using PKCS1.15
+ SignPKCS1v15(Method, []byte) ([]byte, error)
+
+ // MarshalPKCS1PrivateKeyPEM converts the private key to PEM-encoded PKCS1
+ // format
+ MarshalPKCS1PrivateKeyPEM() (pem_block []byte, err error)
+
+ // MarshalPKCS1PrivateKeyDER converts the private key to DER-encoded PKCS1
+ // format
+ MarshalPKCS1PrivateKeyDER() (der_block []byte, err error)
+}
+
+type pKey struct {
+ key *C.EVP_PKEY
+}
+
+func (key *pKey) evpPKey() *C.EVP_PKEY { return key.key }
+
+func (key *pKey) SignPKCS1v15(method Method, data []byte) ([]byte, error) {
+ var ctx C.EVP_MD_CTX
+ C.EVP_MD_CTX_init(&ctx)
+ defer C.EVP_MD_CTX_cleanup(&ctx)
+
+ if 1 != C.EVP_SignInit_not_a_macro(&ctx, method) {
+ return nil, errors.New("signpkcs1v15: failed to init signature")
+ }
+ if len(data) > 0 {
+ if 1 != C.EVP_SignUpdate_not_a_macro(
+ &ctx, unsafe.Pointer(&data[0]), C.uint(len(data))) {
+ return nil, errors.New("signpkcs1v15: failed to update signature")
+ }
+ }
+ sig := make([]byte, C.EVP_PKEY_size(key.key))
+ var sigblen C.uint
+ if 1 != C.EVP_SignFinal(&ctx,
+ ((*C.uchar)(unsafe.Pointer(&sig[0]))), &sigblen, key.key) {
+ return nil, errors.New("signpkcs1v15: failed to finalize signature")
+ }
+ return sig[:sigblen], nil
+}
+
+func (key *pKey) VerifyPKCS1v15(method Method, data, sig []byte) error {
+ var ctx C.EVP_MD_CTX
+ C.EVP_MD_CTX_init(&ctx)
+ defer C.EVP_MD_CTX_cleanup(&ctx)
+
+ if 1 != C.EVP_VerifyInit_not_a_macro(&ctx, method) {
+ return errors.New("verifypkcs1v15: failed to init verify")
+ }
+ if len(data) > 0 {
+ if 1 != C.EVP_VerifyUpdate_not_a_macro(
+ &ctx, unsafe.Pointer(&data[0]), C.uint(len(data))) {
+ return errors.New("verifypkcs1v15: failed to update verify")
+ }
+ }
+ if 1 != C.EVP_VerifyFinal(&ctx,
+ ((*C.uchar)(unsafe.Pointer(&sig[0]))), C.uint(len(sig)), key.key) {
+ return errors.New("verifypkcs1v15: failed to finalize verify")
+ }
+ return nil
+}
+
+func (key *pKey) MarshalPKCS1PrivateKeyPEM() (pem_block []byte,
+ err error) {
+ bio := C.BIO_new(C.BIO_s_mem())
+ if bio == nil {
+ return nil, errors.New("failed to allocate memory BIO")
+ }
+ defer C.BIO_free(bio)
+ rsa := (*C.RSA)(C.EVP_PKEY_get1_RSA(key.key))
+ if rsa == nil {
+ return nil, errors.New("failed getting rsa key")
+ }
+ defer C.RSA_free(rsa)
+ if int(C.PEM_write_bio_RSAPrivateKey(bio, rsa, nil, nil, C.int(0), nil,
+ nil)) != 1 {
+ return nil, errors.New("failed dumping private key")
+ }
+ return ioutil.ReadAll(asAnyBio(bio))
+}
+
+func (key *pKey) MarshalPKCS1PrivateKeyDER() (der_block []byte,
+ err error) {
+ bio := C.BIO_new(C.BIO_s_mem())
+ if bio == nil {
+ return nil, errors.New("failed to allocate memory BIO")
+ }
+ defer C.BIO_free(bio)
+ rsa := (*C.RSA)(C.EVP_PKEY_get1_RSA(key.key))
+ if rsa == nil {
+ return nil, errors.New("failed getting rsa key")
+ }
+ defer C.RSA_free(rsa)
+ if int(C.i2d_RSAPrivateKey_bio(bio, rsa)) != 1 {
+ return nil, errors.New("failed dumping private key der")
+ }
+ return ioutil.ReadAll(asAnyBio(bio))
+}
+
+func (key *pKey) MarshalPKIXPublicKeyPEM() (pem_block []byte,
+ err error) {
+ bio := C.BIO_new(C.BIO_s_mem())
+ if bio == nil {
+ return nil, errors.New("failed to allocate memory BIO")
+ }
+ defer C.BIO_free(bio)
+ rsa := (*C.RSA)(C.EVP_PKEY_get1_RSA(key.key))
+ if rsa == nil {
+ return nil, errors.New("failed getting rsa key")
+ }
+ defer C.RSA_free(rsa)
+ if int(C.PEM_write_bio_RSA_PUBKEY(bio, rsa)) != 1 {
+ return nil, errors.New("failed dumping public key pem")
+ }
+ return ioutil.ReadAll(asAnyBio(bio))
+}
+
+func (key *pKey) MarshalPKIXPublicKeyDER() (der_block []byte,
+ err error) {
+ bio := C.BIO_new(C.BIO_s_mem())
+ if bio == nil {
+ return nil, errors.New("failed to allocate memory BIO")
+ }
+ defer C.BIO_free(bio)
+ rsa := (*C.RSA)(C.EVP_PKEY_get1_RSA(key.key))
+ if rsa == nil {
+ return nil, errors.New("failed getting rsa key")
+ }
+ defer C.RSA_free(rsa)
+ if int(C.i2d_RSA_PUBKEY_bio(bio, rsa)) != 1 {
+ return nil, errors.New("failed dumping public key der")
+ }
+ return ioutil.ReadAll(asAnyBio(bio))
+}
+
+// LoadPrivateKeyFromPEM loads a private key from a PEM-encoded block.
+func LoadPrivateKeyFromPEM(pem_block []byte) (PrivateKey, error) {
+ if len(pem_block) == 0 {
+ return nil, errors.New("empty pem block")
+ }
+ bio := C.BIO_new_mem_buf(unsafe.Pointer(&pem_block[0]),
+ C.int(len(pem_block)))
+ if bio == nil {
+ return nil, errors.New("failed creating bio")
+ }
+ defer C.BIO_free(bio)
+
+ rsakey := C.PEM_read_bio_RSAPrivateKey(bio, nil, nil, nil)
+ if rsakey == nil {
+ return nil, errors.New("failed reading rsa key")
+ }
+ defer C.RSA_free(rsakey)
+
+ // convert to PKEY
+ key := C.EVP_PKEY_new()
+ if key == nil {
+ return nil, errors.New("failed converting to evp_pkey")
+ }
+ if C.EVP_PKEY_set1_RSA(key, (*C.struct_rsa_st)(rsakey)) != 1 {
+ C.EVP_PKEY_free(key)
+ return nil, errors.New("failed converting to evp_pkey")
+ }
+
+ p := &pKey{key: key}
+ runtime.SetFinalizer(p, func(p *pKey) {
+ C.EVP_PKEY_free(p.key)
+ })
+ return p, nil
+}
+
+// LoadPrivateKeyFromPEM loads a private key from a PEM-encoded block.
+func LoadPrivateKeyFromPEMWidthPassword(pem_block []byte, password string) (
+ PrivateKey, error) {
+ if len(pem_block) == 0 {
+ return nil, errors.New("empty pem block")
+ }
+ bio := C.BIO_new_mem_buf(unsafe.Pointer(&pem_block[0]),
+ C.int(len(pem_block)))
+ if bio == nil {
+ return nil, errors.New("failed creating bio")
+ }
+ defer C.BIO_free(bio)
+ cs := C.CString(password)
+ defer C.free(unsafe.Pointer(cs))
+ rsakey := C.PEM_read_bio_RSAPrivateKey(bio, nil, nil, unsafe.Pointer(cs))
+ if rsakey == nil {
+ return nil, errors.New("failed reading rsa key")
+ }
+ defer C.RSA_free(rsakey)
+
+ // convert to PKEY
+ key := C.EVP_PKEY_new()
+ if key == nil {
+ return nil, errors.New("failed converting to evp_pkey")
+ }
+ if C.EVP_PKEY_set1_RSA(key, (*C.struct_rsa_st)(rsakey)) != 1 {
+ C.EVP_PKEY_free(key)
+ return nil, errors.New("failed converting to evp_pkey")
+ }
+
+ p := &pKey{key: key}
+ runtime.SetFinalizer(p, func(p *pKey) {
+ C.EVP_PKEY_free(p.key)
+ })
+ return p, nil
+}
+
+// LoadPublicKeyFromPEM loads a public key from a PEM-encoded block.
+func LoadPublicKeyFromPEM(pem_block []byte) (PublicKey, error) {
+ if len(pem_block) == 0 {
+ return nil, errors.New("empty pem block")
+ }
+ bio := C.BIO_new_mem_buf(unsafe.Pointer(&pem_block[0]),
+ C.int(len(pem_block)))
+ if bio == nil {
+ return nil, errors.New("failed creating bio")
+ }
+ defer C.BIO_free(bio)
+
+ rsakey := C.PEM_read_bio_RSA_PUBKEY(bio, nil, nil, nil)
+ if rsakey == nil {
+ return nil, errors.New("failed reading rsa key")
+ }
+ defer C.RSA_free(rsakey)
+
+ // convert to PKEY
+ key := C.EVP_PKEY_new()
+ if key == nil {
+ return nil, errors.New("failed converting to evp_pkey")
+ }
+ if C.EVP_PKEY_set1_RSA(key, (*C.struct_rsa_st)(rsakey)) != 1 {
+ C.EVP_PKEY_free(key)
+ return nil, errors.New("failed converting to evp_pkey")
+ }
+
+ p := &pKey{key: key}
+ runtime.SetFinalizer(p, func(p *pKey) {
+ C.EVP_PKEY_free(p.key)
+ })
+ return p, nil
+}
+
+// LoadPublicKeyFromDER loads a public key from a DER-encoded block.
+func LoadPublicKeyFromDER(der_block []byte) (PublicKey, error) {
+ if len(der_block) == 0 {
+ return nil, errors.New("empty der block")
+ }
+ bio := C.BIO_new_mem_buf(unsafe.Pointer(&der_block[0]),
+ C.int(len(der_block)))
+ if bio == nil {
+ return nil, errors.New("failed creating bio")
+ }
+ defer C.BIO_free(bio)
+
+ rsakey := C.d2i_RSA_PUBKEY_bio(bio, nil)
+ if rsakey == nil {
+ return nil, errors.New("failed reading rsa key")
+ }
+ defer C.RSA_free(rsakey)
+
+ // convert to PKEY
+ key := C.EVP_PKEY_new()
+ if key == nil {
+ return nil, errors.New("failed converting to evp_pkey")
+ }
+ if C.EVP_PKEY_set1_RSA(key, (*C.struct_rsa_st)(rsakey)) != 1 {
+ C.EVP_PKEY_free(key)
+ return nil, errors.New("failed converting to evp_pkey")
+ }
+
+ p := &pKey{key: key}
+ runtime.SetFinalizer(p, func(p *pKey) {
+ C.EVP_PKEY_free(p.key)
+ })
+ return p, nil
+}
+
+// GenerateRSAKey generates a new RSA private key with an exponent of 3.
+func GenerateRSAKey(bits int) (PrivateKey, error) {
+ exponent := 3
+ rsa := C.RSA_generate_key(C.int(bits), C.ulong(exponent), nil, nil)
+ if rsa == nil {
+ return nil, errors.New("failed to generate RSA key")
+ }
+ key := C.EVP_PKEY_new()
+ if key == nil {
+ return nil, errors.New("failed to allocate EVP_PKEY")
+ }
+ if C.EVP_PKEY_assign_charp(key, C.EVP_PKEY_RSA, (*C.char)(unsafe.Pointer(rsa))) != 1 {
+ C.EVP_PKEY_free(key)
+ return nil, errors.New("failed to assign RSA key")
+ }
+ p := &pKey{key: key}
+ runtime.SetFinalizer(p, func(p *pKey) {
+ C.EVP_PKEY_free(p.key)
+ })
+ return p, nil
+}
diff --git a/src/mongo/gotools/vendor/src/github.com/spacemonkeygo/openssl/key_test.go b/src/mongo/gotools/vendor/src/github.com/spacemonkeygo/openssl/key_test.go
new file mode 100644
index 00000000000..54752d381bf
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/spacemonkeygo/openssl/key_test.go
@@ -0,0 +1,149 @@
+// Copyright (C) 2014 Space Monkey, Inc.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package openssl
+
+import (
+ "bytes"
+ "crypto/rsa"
+ "crypto/tls"
+ "crypto/x509"
+ "encoding/hex"
+ pem_pkg "encoding/pem"
+ "io/ioutil"
+ "testing"
+)
+
+func TestMarshal(t *testing.T) {
+ key, err := LoadPrivateKeyFromPEM(keyBytes)
+ if err != nil {
+ t.Fatal(err)
+ }
+ cert, err := LoadCertificateFromPEM(certBytes)
+ if err != nil {
+ t.Fatal(err)
+ }
+
+ pem, err := cert.MarshalPEM()
+ if err != nil {
+ t.Fatal(err)
+ }
+ if !bytes.Equal(pem, certBytes) {
+ ioutil.WriteFile("generated", pem, 0644)
+ ioutil.WriteFile("hardcoded", certBytes, 0644)
+ t.Fatal("invalid cert pem bytes")
+ }
+
+ pem, err = key.MarshalPKCS1PrivateKeyPEM()
+ if err != nil {
+ t.Fatal(err)
+ }
+ if !bytes.Equal(pem, keyBytes) {
+ ioutil.WriteFile("generated", pem, 0644)
+ ioutil.WriteFile("hardcoded", keyBytes, 0644)
+ t.Fatal("invalid private key pem bytes")
+ }
+ tls_cert, err := tls.X509KeyPair(certBytes, keyBytes)
+ if err != nil {
+ t.Fatal(err)
+ }
+ tls_key, ok := tls_cert.PrivateKey.(*rsa.PrivateKey)
+ if !ok {
+ t.Fatal("FASDFASDF")
+ }
+ _ = tls_key
+
+ der, err := key.MarshalPKCS1PrivateKeyDER()
+ if err != nil {
+ t.Fatal(err)
+ }
+ tls_der := x509.MarshalPKCS1PrivateKey(tls_key)
+ if !bytes.Equal(der, tls_der) {
+ t.Fatal("invalid private key der bytes: %s\n v.s. %s\n",
+ hex.Dump(der), hex.Dump(tls_der))
+ }
+
+ der, err = key.MarshalPKIXPublicKeyDER()
+ if err != nil {
+ t.Fatal(err)
+ }
+ tls_der, err = x509.MarshalPKIXPublicKey(&tls_key.PublicKey)
+ if err != nil {
+ t.Fatal(err)
+ }
+ if !bytes.Equal(der, tls_der) {
+ ioutil.WriteFile("generated", []byte(hex.Dump(der)), 0644)
+ ioutil.WriteFile("hardcoded", []byte(hex.Dump(tls_der)), 0644)
+ t.Fatal("invalid public key der bytes")
+ }
+
+ pem, err = key.MarshalPKIXPublicKeyPEM()
+ if err != nil {
+ t.Fatal(err)
+ }
+ tls_pem := pem_pkg.EncodeToMemory(&pem_pkg.Block{
+ Type: "PUBLIC KEY", Bytes: tls_der})
+ if !bytes.Equal(pem, tls_pem) {
+ ioutil.WriteFile("generated", pem, 0644)
+ ioutil.WriteFile("hardcoded", tls_pem, 0644)
+ t.Fatal("invalid public key pem bytes")
+ }
+
+ loaded_pubkey_from_pem, err := LoadPublicKeyFromPEM(pem)
+ if err != nil {
+ t.Fatal(err)
+ }
+
+ loaded_pubkey_from_der, err := LoadPublicKeyFromDER(der)
+ if err != nil {
+ t.Fatal(err)
+ }
+
+ new_der_from_pem, err := loaded_pubkey_from_pem.MarshalPKIXPublicKeyDER()
+ if err != nil {
+ t.Fatal(err)
+ }
+
+ new_der_from_der, err := loaded_pubkey_from_der.MarshalPKIXPublicKeyDER()
+ if err != nil {
+ t.Fatal(err)
+ }
+
+ if !bytes.Equal(new_der_from_der, tls_der) {
+ ioutil.WriteFile("generated", []byte(hex.Dump(new_der_from_der)), 0644)
+ ioutil.WriteFile("hardcoded", []byte(hex.Dump(tls_der)), 0644)
+ t.Fatal("invalid public key der bytes")
+ }
+
+ if !bytes.Equal(new_der_from_pem, tls_der) {
+ ioutil.WriteFile("generated", []byte(hex.Dump(new_der_from_pem)), 0644)
+ ioutil.WriteFile("hardcoded", []byte(hex.Dump(tls_der)), 0644)
+ t.Fatal("invalid public key der bytes")
+ }
+}
+
+func TestGenerate(t *testing.T) {
+ key, err := GenerateRSAKey(2048)
+ if err != nil {
+ t.Fatal(err)
+ }
+ _, err = key.MarshalPKIXPublicKeyPEM()
+ if err != nil {
+ t.Fatal(err)
+ }
+ _, err = key.MarshalPKCS1PrivateKeyPEM()
+ if err != nil {
+ t.Fatal(err)
+ }
+}
diff --git a/src/mongo/gotools/vendor/src/github.com/spacemonkeygo/openssl/net.go b/src/mongo/gotools/vendor/src/github.com/spacemonkeygo/openssl/net.go
new file mode 100644
index 00000000000..3cdd040d4d4
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/spacemonkeygo/openssl/net.go
@@ -0,0 +1,134 @@
+// Copyright (C) 2014 Space Monkey, Inc.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package openssl
+
+import (
+ "errors"
+ "net"
+)
+
+type listener struct {
+ net.Listener
+ ctx *Ctx
+}
+
+func (l *listener) Accept() (c net.Conn, err error) {
+ c, err = l.Listener.Accept()
+ if err != nil {
+ return nil, err
+ }
+ ssl_c, err := Server(c, l.ctx)
+ if err != nil {
+ c.Close()
+ return nil, err
+ }
+ return ssl_c, nil
+}
+
+// NewListener wraps an existing net.Listener such that all accepted
+// connections are wrapped as OpenSSL server connections using the provided
+// context ctx.
+func NewListener(inner net.Listener, ctx *Ctx) net.Listener {
+ return &listener{
+ Listener: inner,
+ ctx: ctx}
+}
+
+// Listen is a wrapper around net.Listen that wraps incoming connections with
+// an OpenSSL server connection using the provided context ctx.
+func Listen(network, laddr string, ctx *Ctx) (net.Listener, error) {
+ if ctx == nil {
+ return nil, errors.New("no ssl context provided")
+ }
+ l, err := net.Listen(network, laddr)
+ if err != nil {
+ return nil, err
+ }
+ return NewListener(l, ctx), nil
+}
+
+type DialFlags int
+
+const (
+ InsecureSkipHostVerification DialFlags = 1 << iota
+ DisableSNI
+)
+
+// Dial will connect to network/address and then wrap the corresponding
+// underlying connection with an OpenSSL client connection using context ctx.
+// If flags includes InsecureSkipHostVerification, the server certificate's
+// hostname will not be checked to match the hostname in addr. Otherwise, flags
+// should be 0.
+//
+// Dial probably won't work for you unless you set a verify location or add
+// some certs to the certificate store of the client context you're using.
+// This library is not nice enough to use the system certificate store by
+// default for you yet.
+func Dial(network, addr string, ctx *Ctx, flags DialFlags) (*Conn, error) {
+ return DialSession(network, addr, ctx, flags, nil)
+}
+
+// DialSession will connect to network/address and then wrap the corresponding
+// underlying connection with an OpenSSL client connection using context ctx.
+// If flags includes InsecureSkipHostVerification, the server certificate's
+// hostname will not be checked to match the hostname in addr. Otherwise, flags
+// should be 0.
+//
+// Dial probably won't work for you unless you set a verify location or add
+// some certs to the certificate store of the client context you're using.
+// This library is not nice enough to use the system certificate store by
+// default for you yet.
+//
+// If session is not nil it will be used to resume the tls state. The session
+// can be retrieved from the GetSession method on the Conn.
+func DialSession(network, addr string, ctx *Ctx, flags DialFlags,
+ session []byte) (*Conn, error) {
+
+ host, _, err := net.SplitHostPort(addr)
+ if err != nil {
+ return nil, err
+ }
+ if ctx == nil {
+ var err error
+ ctx, err = NewCtx()
+ if err != nil {
+ return nil, err
+ }
+ // TODO: use operating system default certificate chain?
+ }
+ c, err := net.Dial(network, addr)
+ if err != nil {
+ return nil, err
+ }
+ conn, err := Client(c, ctx)
+ if err != nil {
+ c.Close()
+ return nil, err
+ }
+ // XXX removed SNI
+ err = conn.Handshake()
+ if err != nil {
+ conn.Close()
+ return nil, err
+ }
+ if flags&InsecureSkipHostVerification == 0 {
+ err = conn.VerifyHostname(host)
+ if err != nil {
+ conn.Close()
+ return nil, err
+ }
+ }
+ return conn, nil
+}
diff --git a/src/mongo/gotools/vendor/src/github.com/spacemonkeygo/openssl/nid.go b/src/mongo/gotools/vendor/src/github.com/spacemonkeygo/openssl/nid.go
new file mode 100644
index 00000000000..c80f237b605
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/spacemonkeygo/openssl/nid.go
@@ -0,0 +1,199 @@
+// Copyright (C) 2014 Ryan Hileman
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package openssl
+
+type NID int
+
+const (
+ NID_rsadsi NID = 1
+ NID_pkcs NID = 2
+ NID_md2 NID = 3
+ NID_md5 NID = 4
+ NID_rc4 NID = 5
+ NID_rsaEncryption NID = 6
+ NID_md2WithRSAEncryption NID = 7
+ NID_md5WithRSAEncryption NID = 8
+ NID_pbeWithMD2AndDES_CBC NID = 9
+ NID_pbeWithMD5AndDES_CBC NID = 10
+ NID_X500 NID = 11
+ NID_X509 NID = 12
+ NID_commonName NID = 13
+ NID_countryName NID = 14
+ NID_localityName NID = 15
+ NID_stateOrProvinceName NID = 16
+ NID_organizationName NID = 17
+ NID_organizationalUnitName NID = 18
+ NID_rsa NID = 19
+ NID_pkcs7 NID = 20
+ NID_pkcs7_data NID = 21
+ NID_pkcs7_signed NID = 22
+ NID_pkcs7_enveloped NID = 23
+ NID_pkcs7_signedAndEnveloped NID = 24
+ NID_pkcs7_digest NID = 25
+ NID_pkcs7_encrypted NID = 26
+ NID_pkcs3 NID = 27
+ NID_dhKeyAgreement NID = 28
+ NID_des_ecb NID = 29
+ NID_des_cfb64 NID = 30
+ NID_des_cbc NID = 31
+ NID_des_ede NID = 32
+ NID_des_ede3 NID = 33
+ NID_idea_cbc NID = 34
+ NID_idea_cfb64 NID = 35
+ NID_idea_ecb NID = 36
+ NID_rc2_cbc NID = 37
+ NID_rc2_ecb NID = 38
+ NID_rc2_cfb64 NID = 39
+ NID_rc2_ofb64 NID = 40
+ NID_sha NID = 41
+ NID_shaWithRSAEncryption NID = 42
+ NID_des_ede_cbc NID = 43
+ NID_des_ede3_cbc NID = 44
+ NID_des_ofb64 NID = 45
+ NID_idea_ofb64 NID = 46
+ NID_pkcs9 NID = 47
+ NID_pkcs9_emailAddress NID = 48
+ NID_pkcs9_unstructuredName NID = 49
+ NID_pkcs9_contentType NID = 50
+ NID_pkcs9_messageDigest NID = 51
+ NID_pkcs9_signingTime NID = 52
+ NID_pkcs9_countersignature NID = 53
+ NID_pkcs9_challengePassword NID = 54
+ NID_pkcs9_unstructuredAddress NID = 55
+ NID_pkcs9_extCertAttributes NID = 56
+ NID_netscape NID = 57
+ NID_netscape_cert_extension NID = 58
+ NID_netscape_data_type NID = 59
+ NID_des_ede_cfb64 NID = 60
+ NID_des_ede3_cfb64 NID = 61
+ NID_des_ede_ofb64 NID = 62
+ NID_des_ede3_ofb64 NID = 63
+ NID_sha1 NID = 64
+ NID_sha1WithRSAEncryption NID = 65
+ NID_dsaWithSHA NID = 66
+ NID_dsa_2 NID = 67
+ NID_pbeWithSHA1AndRC2_CBC NID = 68
+ NID_id_pbkdf2 NID = 69
+ NID_dsaWithSHA1_2 NID = 70
+ NID_netscape_cert_type NID = 71
+ NID_netscape_base_url NID = 72
+ NID_netscape_revocation_url NID = 73
+ NID_netscape_ca_revocation_url NID = 74
+ NID_netscape_renewal_url NID = 75
+ NID_netscape_ca_policy_url NID = 76
+ NID_netscape_ssl_server_name NID = 77
+ NID_netscape_comment NID = 78
+ NID_netscape_cert_sequence NID = 79
+ NID_desx_cbc NID = 80
+ NID_id_ce NID = 81
+ NID_subject_key_identifier NID = 82
+ NID_key_usage NID = 83
+ NID_private_key_usage_period NID = 84
+ NID_subject_alt_name NID = 85
+ NID_issuer_alt_name NID = 86
+ NID_basic_constraints NID = 87
+ NID_crl_number NID = 88
+ NID_certificate_policies NID = 89
+ NID_authority_key_identifier NID = 90
+ NID_bf_cbc NID = 91
+ NID_bf_ecb NID = 92
+ NID_bf_cfb64 NID = 93
+ NID_bf_ofb64 NID = 94
+ NID_mdc2 NID = 95
+ NID_mdc2WithRSA NID = 96
+ NID_rc4_40 NID = 97
+ NID_rc2_40_cbc NID = 98
+ NID_givenName NID = 99
+ NID_surname NID = 100
+ NID_initials NID = 101
+ NID_uniqueIdentifier NID = 102
+ NID_crl_distribution_points NID = 103
+ NID_md5WithRSA NID = 104
+ NID_serialNumber NID = 105
+ NID_title NID = 106
+ NID_description NID = 107
+ NID_cast5_cbc NID = 108
+ NID_cast5_ecb NID = 109
+ NID_cast5_cfb64 NID = 110
+ NID_cast5_ofb64 NID = 111
+ NID_pbeWithMD5AndCast5_CBC NID = 112
+ NID_dsaWithSHA1 NID = 113
+ NID_md5_sha1 NID = 114
+ NID_sha1WithRSA NID = 115
+ NID_dsa NID = 116
+ NID_ripemd160 NID = 117
+ NID_ripemd160WithRSA NID = 119
+ NID_rc5_cbc NID = 120
+ NID_rc5_ecb NID = 121
+ NID_rc5_cfb64 NID = 122
+ NID_rc5_ofb64 NID = 123
+ NID_rle_compression NID = 124
+ NID_zlib_compression NID = 125
+ NID_ext_key_usage NID = 126
+ NID_id_pkix NID = 127
+ NID_id_kp NID = 128
+ NID_server_auth NID = 129
+ NID_client_auth NID = 130
+ NID_code_sign NID = 131
+ NID_email_protect NID = 132
+ NID_time_stamp NID = 133
+ NID_ms_code_ind NID = 134
+ NID_ms_code_com NID = 135
+ NID_ms_ctl_sign NID = 136
+ NID_ms_sgc NID = 137
+ NID_ms_efs NID = 138
+ NID_ns_sgc NID = 139
+ NID_delta_crl NID = 140
+ NID_crl_reason NID = 141
+ NID_invalidity_date NID = 142
+ NID_sxnet NID = 143
+ NID_pbe_WithSHA1And128BitRC4 NID = 144
+ NID_pbe_WithSHA1And40BitRC4 NID = 145
+ NID_pbe_WithSHA1And3_Key_TripleDES_CBC NID = 146
+ NID_pbe_WithSHA1And2_Key_TripleDES_CBC NID = 147
+ NID_pbe_WithSHA1And128BitRC2_CBC NID = 148
+ NID_pbe_WithSHA1And40BitRC2_CBC NID = 149
+ NID_keyBag NID = 150
+ NID_pkcs8ShroudedKeyBag NID = 151
+ NID_certBag NID = 152
+ NID_crlBag NID = 153
+ NID_secretBag NID = 154
+ NID_safeContentsBag NID = 155
+ NID_friendlyName NID = 156
+ NID_localKeyID NID = 157
+ NID_x509Certificate NID = 158
+ NID_sdsiCertificate NID = 159
+ NID_x509Crl NID = 160
+ NID_pbes2 NID = 161
+ NID_pbmac1 NID = 162
+ NID_hmacWithSHA1 NID = 163
+ NID_id_qt_cps NID = 164
+ NID_id_qt_unotice NID = 165
+ NID_rc2_64_cbc NID = 166
+ NID_SMIMECapabilities NID = 167
+ NID_pbeWithMD2AndRC2_CBC NID = 168
+ NID_pbeWithMD5AndRC2_CBC NID = 169
+ NID_pbeWithSHA1AndDES_CBC NID = 170
+ NID_ms_ext_req NID = 171
+ NID_ext_req NID = 172
+ NID_name NID = 173
+ NID_dnQualifier NID = 174
+ NID_id_pe NID = 175
+ NID_id_ad NID = 176
+ NID_info_access NID = 177
+ NID_ad_OCSP NID = 178
+ NID_ad_ca_issuers NID = 179
+ NID_OCSP_sign NID = 180
+)
diff --git a/src/mongo/gotools/vendor/src/github.com/spacemonkeygo/openssl/oracle_stubs.go b/src/mongo/gotools/vendor/src/github.com/spacemonkeygo/openssl/oracle_stubs.go
new file mode 100644
index 00000000000..30492f3b9d8
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/spacemonkeygo/openssl/oracle_stubs.go
@@ -0,0 +1,162 @@
+// Copyright (C) 2014 Space Monkey, Inc.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+// +build !cgo
+
+package openssl
+
+import (
+ "errors"
+ "net"
+ "time"
+)
+
+const (
+ SSLRecordSize = 16 * 1024
+)
+
+type Conn struct{}
+
+func Client(conn net.Conn, ctx *Ctx) (*Conn, error)
+func Server(conn net.Conn, ctx *Ctx) (*Conn, error)
+
+func (c *Conn) Handshake() error
+func (c *Conn) PeerCertificate() (*Certificate, error)
+func (c *Conn) Close() error
+func (c *Conn) Read(b []byte) (n int, err error)
+func (c *Conn) Write(b []byte) (written int, err error)
+
+func (c *Conn) VerifyHostname(host string) error
+
+func (c *Conn) LocalAddr() net.Addr
+func (c *Conn) RemoteAddr() net.Addr
+func (c *Conn) SetDeadline(t time.Time) error
+func (c *Conn) SetReadDeadline(t time.Time) error
+func (c *Conn) SetWriteDeadline(t time.Time) error
+
+type Ctx struct{}
+
+type SSLVersion int
+
+const (
+ SSLv3 SSLVersion = 0x02
+ TLSv1 SSLVersion = 0x03
+ TLSv1_1 SSLVersion = 0x04
+ TLSv1_2 SSLVersion = 0x05
+ AnyVersion SSLVersion = 0x06
+)
+
+func NewCtxWithVersion(version SSLVersion) (*Ctx, error)
+func NewCtx() (*Ctx, error)
+func NewCtxFromFiles(cert_file string, key_file string) (*Ctx, error)
+func (c *Ctx) UseCertificate(cert *Certificate) error
+func (c *Ctx) UsePrivateKey(key PrivateKey) error
+
+type CertificateStore struct{}
+
+func (c *Ctx) GetCertificateStore() *CertificateStore
+
+func (s *CertificateStore) AddCertificate(cert *Certificate) error
+
+func (c *Ctx) LoadVerifyLocations(ca_file string, ca_path string) error
+
+type Options int
+
+const (
+ NoCompression Options = 0
+ NoSSLv2 Options = 0
+ NoSSLv3 Options = 0
+ NoTLSv1 Options = 0
+ CipherServerPreference Options = 0
+ NoSessionResumptionOrRenegotiation Options = 0
+ NoTicket Options = 0
+)
+
+func (c *Ctx) SetOptions(options Options) Options
+
+type Modes int
+
+const (
+ ReleaseBuffers Modes = 0
+)
+
+func (c *Ctx) SetMode(modes Modes) Modes
+
+type VerifyOptions int
+
+const (
+ VerifyNone VerifyOptions = 0
+ VerifyPeer VerifyOptions = 0
+ VerifyFailIfNoPeerCert VerifyOptions = 0
+ VerifyClientOnce VerifyOptions = 0
+)
+
+func (c *Ctx) SetVerify(options VerifyOptions)
+func (c *Ctx) SetVerifyDepth(depth int)
+func (c *Ctx) SetSessionId(session_id []byte) error
+
+func (c *Ctx) SetCipherList(list string) error
+
+type SessionCacheModes int
+
+const (
+ SessionCacheOff SessionCacheModes = 0
+ SessionCacheClient SessionCacheModes = 0
+ SessionCacheServer SessionCacheModes = 0
+ SessionCacheBoth SessionCacheModes = 0
+ NoAutoClear SessionCacheModes = 0
+ NoInternalLookup SessionCacheModes = 0
+ NoInternalStore SessionCacheModes = 0
+ NoInternal SessionCacheModes = 0
+)
+
+func (c *Ctx) SetSessionCacheMode(modes SessionCacheModes) SessionCacheModes
+
+var (
+ ValidationError = errors.New("Host validation error")
+)
+
+type CheckFlags int
+
+const (
+ AlwaysCheckSubject CheckFlags = 0
+ NoWildcards CheckFlags = 0
+)
+
+func (c *Certificate) CheckHost(host string, flags CheckFlags) error
+func (c *Certificate) CheckEmail(email string, flags CheckFlags) error
+func (c *Certificate) CheckIP(ip net.IP, flags CheckFlags) error
+func (c *Certificate) VerifyHostname(host string) error
+
+type PublicKey interface {
+ MarshalPKIXPublicKeyPEM() (pem_block []byte, err error)
+ MarshalPKIXPublicKeyDER() (der_block []byte, err error)
+ evpPKey() struct{}
+}
+
+type PrivateKey interface {
+ PublicKey
+ MarshalPKCS1PrivateKeyPEM() (pem_block []byte, err error)
+ MarshalPKCS1PrivateKeyDER() (der_block []byte, err error)
+}
+
+func LoadPrivateKeyFromPEM(pem_block []byte) (PrivateKey, error)
+
+type Certificate struct{}
+
+func LoadCertificateFromPEM(pem_block []byte) (*Certificate, error)
+
+func (c *Certificate) MarshalPEM() (pem_block []byte, err error)
+
+func (c *Certificate) PublicKey() (PublicKey, error)
diff --git a/src/mongo/gotools/vendor/src/github.com/spacemonkeygo/openssl/password.c b/src/mongo/gotools/vendor/src/github.com/spacemonkeygo/openssl/password.c
new file mode 100644
index 00000000000..db9582ca726
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/spacemonkeygo/openssl/password.c
@@ -0,0 +1,10 @@
+#include <openssl/ssl.h>
+#include "_cgo_export.h"
+
+int password_cb(char *buf,int buf_len, int rwflag,void *userdata) {
+ char* pw = (char *)userdata;
+ int l = strlen(pw);
+ if (l + 1 > buf_len) return 0;
+ strcpy(buf,pw);
+ return l;
+}
diff --git a/src/mongo/gotools/vendor/src/github.com/spacemonkeygo/openssl/pem.go b/src/mongo/gotools/vendor/src/github.com/spacemonkeygo/openssl/pem.go
new file mode 100644
index 00000000000..6dad5972dbd
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/spacemonkeygo/openssl/pem.go
@@ -0,0 +1,32 @@
+// Copyright (C) 2014 Ryan Hileman
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package openssl
+
+import (
+ "regexp"
+)
+
+var pemSplit *regexp.Regexp = regexp.MustCompile(`(?sm)` +
+ `(^-----[\s-]*?BEGIN.*?-----$` +
+ `.*?` +
+ `^-----[\s-]*?END.*?-----$)`)
+
+func SplitPEM(data []byte) [][]byte {
+ var results [][]byte
+ for _, block := range pemSplit.FindAll(data, -1) {
+ results = append(results, block)
+ }
+ return results
+}
diff --git a/src/mongo/gotools/vendor/src/github.com/spacemonkeygo/openssl/sha1.go b/src/mongo/gotools/vendor/src/github.com/spacemonkeygo/openssl/sha1.go
new file mode 100644
index 00000000000..2592b6627d1
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/spacemonkeygo/openssl/sha1.go
@@ -0,0 +1,99 @@
+// Copyright (C) 2014 Space Monkey, Inc.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+// +build cgo
+
+package openssl
+
+/*
+#include <errno.h>
+#include <stdio.h>
+#include <stdlib.h>
+#include <unistd.h>
+
+#include "openssl/evp.h"
+*/
+import "C"
+
+import (
+ "errors"
+ "runtime"
+ "unsafe"
+)
+
+type SHA1Hash struct {
+ ctx C.EVP_MD_CTX
+ engine *Engine
+}
+
+func NewSHA1Hash() (*SHA1Hash, error) { return NewSHA1HashWithEngine(nil) }
+
+func NewSHA1HashWithEngine(e *Engine) (*SHA1Hash, error) {
+ hash := &SHA1Hash{engine: e}
+ C.EVP_MD_CTX_init(&hash.ctx)
+ runtime.SetFinalizer(hash, func(hash *SHA1Hash) { hash.Close() })
+ if err := hash.Reset(); err != nil {
+ return nil, err
+ }
+ return hash, nil
+}
+
+func (s *SHA1Hash) Close() {
+ C.EVP_MD_CTX_cleanup(&s.ctx)
+}
+
+func engineRef(e *Engine) *C.ENGINE {
+ if e == nil {
+ return nil
+ }
+ return e.e
+}
+
+func (s *SHA1Hash) Reset() error {
+ if 1 != C.EVP_DigestInit_ex(&s.ctx, C.EVP_sha1(), engineRef(s.engine)) {
+ return errors.New("openssl: sha1: cannot init digest ctx")
+ }
+ return nil
+}
+
+func (s *SHA1Hash) Write(p []byte) (n int, err error) {
+ if len(p) == 0 {
+ return 0, nil
+ }
+ if 1 != C.EVP_DigestUpdate(&s.ctx, unsafe.Pointer(&p[0]),
+ C.size_t(len(p))) {
+ return 0, errors.New("openssl: sha1: cannot update digest")
+ }
+ return len(p), nil
+}
+
+func (s *SHA1Hash) Sum() (result [20]byte, err error) {
+ if 1 != C.EVP_DigestFinal_ex(&s.ctx,
+ (*C.uchar)(unsafe.Pointer(&result[0])), nil) {
+ return result, errors.New("openssl: sha1: cannot finalize ctx")
+ }
+ return result, s.Reset()
+}
+
+func SHA1(data []byte) (result [20]byte, err error) {
+ hash, err := NewSHA1Hash()
+ if err != nil {
+ return result, err
+ }
+ defer hash.Close()
+ if _, err := hash.Write(data); err != nil {
+ return result, err
+ }
+ return hash.Sum()
+}
diff --git a/src/mongo/gotools/vendor/src/github.com/spacemonkeygo/openssl/sha1_test.go b/src/mongo/gotools/vendor/src/github.com/spacemonkeygo/openssl/sha1_test.go
new file mode 100644
index 00000000000..37037e4468b
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/spacemonkeygo/openssl/sha1_test.go
@@ -0,0 +1,111 @@
+// Copyright (C) 2014 Space Monkey, Inc.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+// +build cgo
+
+package openssl
+
+import (
+ "crypto/rand"
+ "crypto/sha1"
+ "io"
+ "testing"
+)
+
+func TestSHA1(t *testing.T) {
+ for i := 0; i < 100; i++ {
+ buf := make([]byte, 10*1024-i)
+ if _, err := io.ReadFull(rand.Reader, buf); err != nil {
+ t.Fatal(err)
+ }
+
+ expected := sha1.Sum(buf)
+ got, err := SHA1(buf)
+ if err != nil {
+ t.Fatal(err)
+ }
+
+ if expected != got {
+ t.Fatal("exp:%x got:%x", expected, got)
+ }
+ }
+}
+
+func TestSHA1Writer(t *testing.T) {
+ ohash, err := NewSHA1Hash()
+ if err != nil {
+ t.Fatal(err)
+ }
+ hash := sha1.New()
+
+ for i := 0; i < 100; i++ {
+ if err := ohash.Reset(); err != nil {
+ t.Fatal(err)
+ }
+ hash.Reset()
+ buf := make([]byte, 10*1024-i)
+ if _, err := io.ReadFull(rand.Reader, buf); err != nil {
+ t.Fatal(err)
+ }
+
+ if _, err := ohash.Write(buf); err != nil {
+ t.Fatal(err)
+ }
+ if _, err := hash.Write(buf); err != nil {
+ t.Fatal(err)
+ }
+
+ var got, exp [20]byte
+
+ hash.Sum(exp[:0])
+ got, err := ohash.Sum()
+ if err != nil {
+ t.Fatal(err)
+ }
+
+ if got != exp {
+ t.Fatal("exp:%x got:%x", exp, got)
+ }
+ }
+}
+
+type shafunc func([]byte)
+
+func benchmarkSHA1(b *testing.B, length int64, fn shafunc) {
+ buf := make([]byte, length)
+ if _, err := io.ReadFull(rand.Reader, buf); err != nil {
+ b.Fatal(err)
+ }
+ b.SetBytes(length)
+ b.ResetTimer()
+ for i := 0; i < b.N; i++ {
+ fn(buf)
+ }
+}
+
+func BenchmarkSHA1Large_openssl(b *testing.B) {
+ benchmarkSHA1(b, 1024*1024, func(buf []byte) { SHA1(buf) })
+}
+
+func BenchmarkSHA1Large_stdlib(b *testing.B) {
+ benchmarkSHA1(b, 1024*1024, func(buf []byte) { sha1.Sum(buf) })
+}
+
+func BenchmarkSHA1Small_openssl(b *testing.B) {
+ benchmarkSHA1(b, 1, func(buf []byte) { SHA1(buf) })
+}
+
+func BenchmarkSHA1Small_stdlib(b *testing.B) {
+ benchmarkSHA1(b, 1, func(buf []byte) { sha1.Sum(buf) })
+}
diff --git a/src/mongo/gotools/vendor/src/github.com/spacemonkeygo/openssl/sha256.go b/src/mongo/gotools/vendor/src/github.com/spacemonkeygo/openssl/sha256.go
new file mode 100644
index 00000000000..6785b32f881
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/spacemonkeygo/openssl/sha256.go
@@ -0,0 +1,92 @@
+// Copyright (C) 2014 Space Monkey, Inc.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+// +build cgo
+
+package openssl
+
+/*
+#include <errno.h>
+#include <stdio.h>
+#include <stdlib.h>
+#include <unistd.h>
+
+#include "openssl/evp.h"
+*/
+import "C"
+
+import (
+ "errors"
+ "runtime"
+ "unsafe"
+)
+
+type SHA256Hash struct {
+ ctx C.EVP_MD_CTX
+ engine *Engine
+}
+
+func NewSHA256Hash() (*SHA256Hash, error) { return NewSHA256HashWithEngine(nil) }
+
+func NewSHA256HashWithEngine(e *Engine) (*SHA256Hash, error) {
+ hash := &SHA256Hash{engine: e}
+ C.EVP_MD_CTX_init(&hash.ctx)
+ runtime.SetFinalizer(hash, func(hash *SHA256Hash) { hash.Close() })
+ if err := hash.Reset(); err != nil {
+ return nil, err
+ }
+ return hash, nil
+}
+
+func (s *SHA256Hash) Close() {
+ C.EVP_MD_CTX_cleanup(&s.ctx)
+}
+
+func (s *SHA256Hash) Reset() error {
+ if 1 != C.EVP_DigestInit_ex(&s.ctx, C.EVP_sha256(), engineRef(s.engine)) {
+ return errors.New("openssl: sha256: cannot init digest ctx")
+ }
+ return nil
+}
+
+func (s *SHA256Hash) Write(p []byte) (n int, err error) {
+ if len(p) == 0 {
+ return 0, nil
+ }
+ if 1 != C.EVP_DigestUpdate(&s.ctx, unsafe.Pointer(&p[0]),
+ C.size_t(len(p))) {
+ return 0, errors.New("openssl: sha256: cannot update digest")
+ }
+ return len(p), nil
+}
+
+func (s *SHA256Hash) Sum() (result [32]byte, err error) {
+ if 1 != C.EVP_DigestFinal_ex(&s.ctx,
+ (*C.uchar)(unsafe.Pointer(&result[0])), nil) {
+ return result, errors.New("openssl: sha256: cannot finalize ctx")
+ }
+ return result, s.Reset()
+}
+
+func SHA256(data []byte) (result [32]byte, err error) {
+ hash, err := NewSHA256Hash()
+ if err != nil {
+ return result, err
+ }
+ defer hash.Close()
+ if _, err := hash.Write(data); err != nil {
+ return result, err
+ }
+ return hash.Sum()
+}
diff --git a/src/mongo/gotools/vendor/src/github.com/spacemonkeygo/openssl/sha256_test.go b/src/mongo/gotools/vendor/src/github.com/spacemonkeygo/openssl/sha256_test.go
new file mode 100644
index 00000000000..89df88afd44
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/spacemonkeygo/openssl/sha256_test.go
@@ -0,0 +1,109 @@
+// Copyright (C) 2014 Space Monkey, Inc.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+// +build cgo
+
+package openssl
+
+import (
+ "crypto/rand"
+ "crypto/sha256"
+ "io"
+ "testing"
+)
+
+func TestSHA256(t *testing.T) {
+ for i := 0; i < 100; i++ {
+ buf := make([]byte, 10*1024-i)
+ if _, err := io.ReadFull(rand.Reader, buf); err != nil {
+ t.Fatal(err)
+ }
+
+ expected := sha256.Sum256(buf)
+ got, err := SHA256(buf)
+ if err != nil {
+ t.Fatal(err)
+ }
+
+ if expected != got {
+ t.Fatal("exp:%x got:%x", expected, got)
+ }
+ }
+}
+
+func TestSHA256Writer(t *testing.T) {
+ ohash, err := NewSHA256Hash()
+ if err != nil {
+ t.Fatal(err)
+ }
+ hash := sha256.New()
+
+ for i := 0; i < 100; i++ {
+ if err := ohash.Reset(); err != nil {
+ t.Fatal(err)
+ }
+ hash.Reset()
+ buf := make([]byte, 10*1024-i)
+ if _, err := io.ReadFull(rand.Reader, buf); err != nil {
+ t.Fatal(err)
+ }
+
+ if _, err := ohash.Write(buf); err != nil {
+ t.Fatal(err)
+ }
+ if _, err := hash.Write(buf); err != nil {
+ t.Fatal(err)
+ }
+
+ var got, exp [32]byte
+
+ hash.Sum(exp[:0])
+ got, err := ohash.Sum()
+ if err != nil {
+ t.Fatal(err)
+ }
+
+ if got != exp {
+ t.Fatal("exp:%x got:%x", exp, got)
+ }
+ }
+}
+
+func benchmarkSHA256(b *testing.B, length int64, fn shafunc) {
+ buf := make([]byte, length)
+ if _, err := io.ReadFull(rand.Reader, buf); err != nil {
+ b.Fatal(err)
+ }
+ b.SetBytes(length)
+ b.ResetTimer()
+ for i := 0; i < b.N; i++ {
+ fn(buf)
+ }
+}
+
+func BenchmarkSHA256Large_openssl(b *testing.B) {
+ benchmarkSHA256(b, 1024*1024, func(buf []byte) { SHA256(buf) })
+}
+
+func BenchmarkSHA256Large_stdlib(b *testing.B) {
+ benchmarkSHA256(b, 1024*1024, func(buf []byte) { sha256.Sum256(buf) })
+}
+
+func BenchmarkSHA256Small_openssl(b *testing.B) {
+ benchmarkSHA256(b, 1, func(buf []byte) { SHA256(buf) })
+}
+
+func BenchmarkSHA256Small_stdlib(b *testing.B) {
+ benchmarkSHA256(b, 1, func(buf []byte) { sha256.Sum256(buf) })
+}
diff --git a/src/mongo/gotools/vendor/src/github.com/spacemonkeygo/openssl/sni.c b/src/mongo/gotools/vendor/src/github.com/spacemonkeygo/openssl/sni.c
new file mode 100644
index 00000000000..5398da869b8
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/spacemonkeygo/openssl/sni.c
@@ -0,0 +1,23 @@
+// Copyright (C) 2014 Space Monkey, Inc.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+#include <openssl/ssl.h>
+#include "_cgo_export.h"
+#include <stdio.h>
+
+int sni_cb(SSL *con, int *ad, void *arg) {
+ SSL_CTX* ssl_ctx = ssl_ctx = SSL_get_SSL_CTX(con);
+ void* p = SSL_CTX_get_ex_data(ssl_ctx, get_ssl_ctx_idx());
+ return sni_cb_thunk(p, con, ad, arg);
+}
diff --git a/src/mongo/gotools/vendor/src/github.com/spacemonkeygo/openssl/sni_test.go b/src/mongo/gotools/vendor/src/github.com/spacemonkeygo/openssl/sni_test.go
new file mode 100644
index 00000000000..ee3b1a8bbaf
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/spacemonkeygo/openssl/sni_test.go
@@ -0,0 +1,23 @@
+// Copyright (C) 2014 Space Monkey, Inc.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package openssl
+
+import "fmt"
+
+// We can implemant SNI rfc6066 (http://tools.ietf.org/html/rfc6066) on the server side using foolowing callback.
+// You should implement context storage (tlsCtxStorage) by your self.
+func ExampleSetTLSExtServernameCallback() {
+ fmt.Println("Hello")
+}
diff --git a/src/mongo/gotools/vendor/src/github.com/spacemonkeygo/openssl/ssl.go b/src/mongo/gotools/vendor/src/github.com/spacemonkeygo/openssl/ssl.go
new file mode 100644
index 00000000000..d6120e15d99
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/spacemonkeygo/openssl/ssl.go
@@ -0,0 +1,167 @@
+// Copyright (C) 2014 Space Monkey, Inc.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+// +build cgo
+
+package openssl
+
+/*
+#include <openssl/crypto.h>
+#include <openssl/ssl.h>
+#include <openssl/err.h>
+#include <openssl/conf.h>
+
+static long SSL_set_options_not_a_macro(SSL* ssl, long options) {
+ return SSL_set_options(ssl, options);
+}
+
+static long SSL_get_options_not_a_macro(SSL* ssl) {
+ return SSL_get_options(ssl);
+}
+
+static long SSL_clear_options_not_a_macro(SSL* ssl, long options) {
+ return SSL_clear_options(ssl, options);
+}
+
+extern int verify_ssl_cb(int ok, X509_STORE_CTX* store);
+*/
+import "C"
+
+import (
+ "os"
+ "unsafe"
+)
+
+type SSLTLSExtErr int
+
+var (
+ ssl_idx = C.SSL_get_ex_new_index(0, nil, nil, nil, nil)
+)
+
+//export get_ssl_idx
+func get_ssl_idx() C.int {
+ return ssl_idx
+}
+
+type SSL struct {
+ ssl *C.SSL
+ verify_cb VerifyCallback
+}
+
+//export verify_ssl_cb_thunk
+func verify_ssl_cb_thunk(p unsafe.Pointer, ok C.int, ctx *C.X509_STORE_CTX) C.int {
+ defer func() {
+ if err := recover(); err != nil {
+ logger.Critf("openssl: verify callback panic'd: %v", err)
+ os.Exit(1)
+ }
+ }()
+ verify_cb := (*SSL)(p).verify_cb
+ // set up defaults just in case verify_cb is nil
+ if verify_cb != nil {
+ store := &CertificateStoreCtx{ctx: ctx}
+ if verify_cb(ok == 1, store) {
+ ok = 1
+ } else {
+ ok = 0
+ }
+ }
+ return ok
+}
+
+// GetOptions returns SSL options. See
+// https://www.openssl.org/docs/ssl/SSL_CTX_set_options.html
+func (s *SSL) GetOptions() Options {
+ return Options(C.SSL_get_options_not_a_macro(s.ssl))
+}
+
+// SetOptions sets SSL options. See
+// https://www.openssl.org/docs/ssl/SSL_CTX_set_options.html
+func (s *SSL) SetOptions(options Options) Options {
+ return Options(C.SSL_set_options_not_a_macro(s.ssl, C.long(options)))
+}
+
+// ClearOptions clear SSL options. See
+// https://www.openssl.org/docs/ssl/SSL_CTX_set_options.html
+func (s *SSL) ClearOptions(options Options) Options {
+ return Options(C.SSL_clear_options_not_a_macro(s.ssl, C.long(options)))
+}
+
+// SetVerify controls peer verification settings. See
+// http://www.openssl.org/docs/ssl/SSL_CTX_set_verify.html
+func (s *SSL) SetVerify(options VerifyOptions, verify_cb VerifyCallback) {
+ s.verify_cb = verify_cb
+ if verify_cb != nil {
+ C.SSL_set_verify(s.ssl, C.int(options), (*[0]byte)(C.verify_ssl_cb))
+ } else {
+ C.SSL_set_verify(s.ssl, C.int(options), nil)
+ }
+}
+
+// SetVerifyMode controls peer verification setting. See
+// http://www.openssl.org/docs/ssl/SSL_CTX_set_verify.html
+func (s *SSL) SetVerifyMode(options VerifyOptions) {
+ s.SetVerify(options, s.verify_cb)
+}
+
+// SetVerifyCallback controls peer verification setting. See
+// http://www.openssl.org/docs/ssl/SSL_CTX_set_verify.html
+func (s *SSL) SetVerifyCallback(verify_cb VerifyCallback) {
+ s.SetVerify(s.VerifyMode(), s.verify_cb)
+}
+
+// GetVerifyCallback returns callback function. See
+// http://www.openssl.org/docs/ssl/SSL_CTX_set_verify.html
+func (s *SSL) GetVerifyCallback() VerifyCallback {
+ return s.verify_cb
+}
+
+// VerifyMode returns peer verification setting. See
+// http://www.openssl.org/docs/ssl/SSL_CTX_set_verify.html
+func (s *SSL) VerifyMode() VerifyOptions {
+ return VerifyOptions(C.SSL_get_verify_mode(s.ssl))
+}
+
+// SetVerifyDepth controls how many certificates deep the certificate
+// verification logic is willing to follow a certificate chain. See
+// https://www.openssl.org/docs/ssl/SSL_CTX_set_verify.html
+func (s *SSL) SetVerifyDepth(depth int) {
+ C.SSL_set_verify_depth(s.ssl, C.int(depth))
+}
+
+// GetVerifyDepth controls how many certificates deep the certificate
+// verification logic is willing to follow a certificate chain. See
+// https://www.openssl.org/docs/ssl/SSL_CTX_set_verify.html
+func (s *SSL) GetVerifyDepth() int {
+ return int(C.SSL_get_verify_depth(s.ssl))
+}
+
+//export sni_cb_thunk
+func sni_cb_thunk(p unsafe.Pointer, con *C.SSL, ad unsafe.Pointer, arg unsafe.Pointer) C.int {
+ defer func() {
+ if err := recover(); err != nil {
+ logger.Critf("openssl: verify callback sni panic'd: %v", err)
+ os.Exit(1)
+ }
+ }()
+
+ sni_cb := (*Ctx)(p).sni_cb
+
+ s := &SSL{ssl: con}
+ // This attaches a pointer to our SSL struct into the SNI callback.
+ C.SSL_set_ex_data(s.ssl, get_ssl_idx(), unsafe.Pointer(s))
+
+ // Note: this is ctx.sni_cb, not C.sni_cb
+ return C.int(sni_cb(s))
+}
diff --git a/src/mongo/gotools/vendor/src/github.com/spacemonkeygo/openssl/ssl_test.go b/src/mongo/gotools/vendor/src/github.com/spacemonkeygo/openssl/ssl_test.go
new file mode 100644
index 00000000000..f83225dec97
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/spacemonkeygo/openssl/ssl_test.go
@@ -0,0 +1,633 @@
+// Copyright (C) 2014 Space Monkey, Inc.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package openssl
+
+import (
+ "bytes"
+ "crypto/rand"
+ "crypto/tls"
+ "io"
+ "io/ioutil"
+ "net"
+ "sync"
+ "testing"
+ "time"
+
+ "github.com/spacemonkeygo/openssl/utils"
+)
+
+var (
+ certBytes = []byte(`-----BEGIN CERTIFICATE-----
+MIIDxDCCAqygAwIBAgIVAMcK/0VWQr2O3MNfJCydqR7oVELcMA0GCSqGSIb3DQEB
+BQUAMIGQMUkwRwYDVQQDE0A1NjdjZGRmYzRjOWZiNTYwZTk1M2ZlZjA1N2M0NGFm
+MDdiYjc4MDIzODIxYTA5NThiY2RmMGMwNzJhOTdiMThhMQswCQYDVQQGEwJVUzEN
+MAsGA1UECBMEVXRhaDEQMA4GA1UEBxMHTWlkdmFsZTEVMBMGA1UEChMMU3BhY2Ug
+TW9ua2V5MB4XDTEzMTIxNzE4MzgyMloXDTIzMTIxNTE4MzgyMlowgZAxSTBHBgNV
+BAMTQDM4NTg3ODRkMjU1NTdiNTM1MWZmNjRmMmQzMTQ1ZjkwYTJlMTIzMDM4Y2Yz
+Mjc1Yzg1OTM1MjcxYWIzMmNiMDkxCzAJBgNVBAYTAlVTMQ0wCwYDVQQIEwRVdGFo
+MRAwDgYDVQQHEwdNaWR2YWxlMRUwEwYDVQQKEwxTcGFjZSBNb25rZXkwggEiMA0G
+CSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDdf3icNvFsrlrnNLi8SocscqlSbFq+
+pEvmhcSoqgDLqebnqu8Ld73HJJ74MGXEgRX8xZT5FinOML31CR6t9E/j3dqV6p+G
+fdlFLe3IqtC0/bPVnCDBirBygBI4uCrMq+1VhAxPWclrDo7l9QRYbsExH9lfn+Ry
+vxeNMZiOASasvVZNncY8E9usBGRdH17EfDL/TPwXqWOLyxSN5o54GTztjjy9w9CG
+QP7jcCueKYyQJQCtEmnwc6P/q6/EPv5R6drBkX6loAPtmCUAkHqxkWOJrRq/v7Pw
+zRYhfY+ZpVHGc7WEkDnLzRiUypr1C9oxvLKS10etZEIwEdKyOkSg2fdPAgMBAAGj
+EzARMA8GA1UdEwEB/wQFMAMCAQAwDQYJKoZIhvcNAQEFBQADggEBAEcz0RTTJ99l
+HTK/zTyfV5VZEhtwqu6bwre/hD7lhI+1ji0DZYGIgCbJLKuZhj+cHn2h5nPhN7zE
+M9tc4pn0TgeVS0SVFSe6TGnIFipNogvP17E+vXpDZcW/xn9kPKeVCZc1hlDt1W4Z
+5q+ub3aUwuMwYs7bcArtDrumCmciJ3LFyNhebPi4mntb5ooeLFLaujEmVYyrQnpo
+tWKC9sMlJmLm4yAso64Sv9KLS2T9ivJBNn0ZtougozBCCTqrqgZVjha+B2yjHe9f
+sRkg/uxcJf7wC5Y0BLlp1+aPwdmZD87T3a1uQ1Ij93jmHG+2T9U20MklHAePOl0q
+yTqdSPnSH1c=
+-----END CERTIFICATE-----
+`)
+ keyBytes = []byte(`-----BEGIN RSA PRIVATE KEY-----
+MIIEpQIBAAKCAQEA3X94nDbxbK5a5zS4vEqHLHKpUmxavqRL5oXEqKoAy6nm56rv
+C3e9xySe+DBlxIEV/MWU+RYpzjC99QkerfRP493aleqfhn3ZRS3tyKrQtP2z1Zwg
+wYqwcoASOLgqzKvtVYQMT1nJaw6O5fUEWG7BMR/ZX5/kcr8XjTGYjgEmrL1WTZ3G
+PBPbrARkXR9exHwy/0z8F6lji8sUjeaOeBk87Y48vcPQhkD+43ArnimMkCUArRJp
+8HOj/6uvxD7+UenawZF+paAD7ZglAJB6sZFjia0av7+z8M0WIX2PmaVRxnO1hJA5
+y80YlMqa9QvaMbyyktdHrWRCMBHSsjpEoNn3TwIDAQABAoIBAQCwgp6YzmgCFce3
+LBpzYmjqEM3CMzr1ZXRe1gbr6d4Mbu7leyBX4SpJAnP0kIzo1X2yG7ol7XWPLOST
+2pqqQWFQ00EX6wsJYEy+hmVRXl5HfU3MUkkAMwd9l3Xt4UWqKPBPD5XHvmN2fvl9
+Y4388vXdseXGAGNK1eFs0TMjJuOtDxDyrmJcnxpJ7y/77y/Hb5rUa9DCvj8tkKHg
+HmeIwQE0HhIFofj+qCYbqeVyjbPAaYZMrISXb2HmcyULKEOGRbMH24IzInKA0NxV
+kdP9qmV8Y2bJ609Fft/y8Vpj31iEdq/OFXyobdVvnXMnaVyAetoaWy7AOTIQ2Cnw
+wGbJ/F8BAoGBAN/pCnLQrWREeVMuFjf+MgYgCtRRaQ8EOVvjYcXXi0PhtOMFTAb7
+djqhlgmBOFsmeXcb8YRZsF+pNtu1xk5RJOquyKfK8j1rUdAJfoxGHiaUFI2/1i9E
+zuXX/Ao0xNRkWMxMKuwYBmmt1fMuVo+1M8UEwFMdHRtgxe+/+eOV1J2PAoGBAP09
+7GLOYSYAI1OO3BN/bEVNau6tAxP5YShGmX2Qxy0+ooxHZ1V3D8yo6C0hSg+H+fPT
+mjMgGcvaW6K+QyCdHDjgbk2hfdZ+Beq92JApPrH9gMV7MPhwHzgwjzDDio9OFxYY
+3vjBQ2yX+9jvz9lkvq2NM3fqFqbsG6Et+5mCc6pBAoGBAI62bxVtEgbladrtdfXs
+S6ABzkUzOl362EBL9iZuUnJKqstDtgiBQALwuLuIJA5cwHB9W/t6WuMt7CwveJy0
+NW5rRrNDtBAXlgad9o2bp135ZfxO+EoadjCi8B7lMUsaRkq4hWcDjRrQVJxxvXRN
+DxkVBSw0Uzf+/0nnN3OqLODbAoGACCY+/isAC1YDzQOS53m5RT2pjEa7C6CB1Ob4
+t4a6MiWK25LMq35qXr6swg8JMBjDHWqY0r5ctievvTv8Mwd7SgVG526j+wwRKq2z
+U2hQYS/0Peap+8S37Hn7kakpQ1VS/t4MBttJTSxS6XdGLAvG6xTZLCm3UuXUOcqe
+ByGgkUECgYEAmop45kRi974g4MPvyLplcE4syb19ifrHj76gPRBi94Cp8jZosY1T
+ucCCa4lOGgPtXJ0Qf1c8yq5vh4yqkQjrgUTkr+CFDGR6y4CxmNDQxEMYIajaIiSY
+qmgvgyRayemfO2zR0CPgC6wSoGBth+xW6g+WA8y0z76ZSaWpFi8lVM4=
+-----END RSA PRIVATE KEY-----
+`)
+)
+
+func NetPipe(t testing.TB) (net.Conn, net.Conn) {
+ l, err := net.Listen("tcp", "localhost:0")
+ if err != nil {
+ t.Fatal(err)
+ }
+ defer l.Close()
+ client_future := utils.NewFuture()
+ go func() {
+ client_future.Set(net.Dial(l.Addr().Network(), l.Addr().String()))
+ }()
+ var errs utils.ErrorGroup
+ server_conn, err := l.Accept()
+ errs.Add(err)
+ client_conn, err := client_future.Get()
+ errs.Add(err)
+ err = errs.Finalize()
+ if err != nil {
+ if server_conn != nil {
+ server_conn.Close()
+ }
+ if client_conn != nil {
+ client_conn.(net.Conn).Close()
+ }
+ t.Fatal(err)
+ }
+ return server_conn, client_conn.(net.Conn)
+}
+
+type HandshakingConn interface {
+ net.Conn
+ Handshake() error
+}
+
+func SimpleConnTest(t testing.TB, constructor func(
+ t testing.TB, conn1, conn2 net.Conn) (sslconn1, sslconn2 HandshakingConn)) {
+ server_conn, client_conn := NetPipe(t)
+ defer server_conn.Close()
+ defer client_conn.Close()
+
+ data := "first test string\n"
+
+ server, client := constructor(t, server_conn, client_conn)
+ defer close_both(server, client)
+
+ var wg sync.WaitGroup
+ wg.Add(2)
+ go func() {
+ defer wg.Done()
+
+ err := client.Handshake()
+ if err != nil {
+ t.Fatal(err)
+ }
+
+ _, err = io.Copy(client, bytes.NewReader([]byte(data)))
+ if err != nil {
+ t.Fatal(err)
+ }
+
+ err = client.Close()
+ if err != nil {
+ t.Fatal(err)
+ }
+ }()
+ go func() {
+ defer wg.Done()
+
+ err := server.Handshake()
+ if err != nil {
+ t.Fatal(err)
+ }
+
+ buf := bytes.NewBuffer(make([]byte, 0, len(data)))
+ _, err = io.CopyN(buf, server, int64(len(data)))
+ if err != nil {
+ t.Fatal(err)
+ }
+ if string(buf.Bytes()) != data {
+ t.Fatal("mismatched data")
+ }
+
+ err = server.Close()
+ if err != nil {
+ t.Fatal(err)
+ }
+ }()
+ wg.Wait()
+}
+
+func close_both(closer1, closer2 io.Closer) {
+ var wg sync.WaitGroup
+ wg.Add(2)
+ go func() {
+ defer wg.Done()
+ closer1.Close()
+ }()
+ go func() {
+ defer wg.Done()
+ closer2.Close()
+ }()
+ wg.Wait()
+}
+
+func ClosingTest(t testing.TB, constructor func(
+ t testing.TB, conn1, conn2 net.Conn) (sslconn1, sslconn2 HandshakingConn)) {
+
+ run_test := func(close_tcp bool, server_writes bool) {
+ server_conn, client_conn := NetPipe(t)
+ defer server_conn.Close()
+ defer client_conn.Close()
+ server, client := constructor(t, server_conn, client_conn)
+ defer close_both(server, client)
+
+ var sslconn1, sslconn2 HandshakingConn
+ var conn1 net.Conn
+ if server_writes {
+ sslconn1 = server
+ conn1 = server_conn
+ sslconn2 = client
+ } else {
+ sslconn1 = client
+ conn1 = client_conn
+ sslconn2 = server
+ }
+
+ var wg sync.WaitGroup
+ wg.Add(2)
+ go func() {
+ defer wg.Done()
+ _, err := sslconn1.Write([]byte("hello"))
+ if err != nil {
+ t.Fatal(err)
+ }
+ if close_tcp {
+ err = conn1.Close()
+ } else {
+ err = sslconn1.Close()
+ }
+ if err != nil {
+ t.Fatal(err)
+ }
+ }()
+
+ go func() {
+ defer wg.Done()
+ data, err := ioutil.ReadAll(sslconn2)
+ if err != nil {
+ t.Fatal(err)
+ }
+ if !bytes.Equal(data, []byte("hello")) {
+ t.Fatal("bytes don't match")
+ }
+ }()
+
+ wg.Wait()
+ }
+
+ run_test(true, false)
+ run_test(false, false)
+ run_test(true, true)
+ run_test(false, true)
+}
+
+func ThroughputBenchmark(b *testing.B, constructor func(
+ t testing.TB, conn1, conn2 net.Conn) (sslconn1, sslconn2 HandshakingConn)) {
+ server_conn, client_conn := NetPipe(b)
+ defer server_conn.Close()
+ defer client_conn.Close()
+
+ server, client := constructor(b, server_conn, client_conn)
+ defer close_both(server, client)
+
+ b.SetBytes(1024)
+ data := make([]byte, b.N*1024)
+ _, err := io.ReadFull(rand.Reader, data[:])
+ if err != nil {
+ b.Fatal(err)
+ }
+
+ b.ResetTimer()
+ var wg sync.WaitGroup
+ wg.Add(2)
+ go func() {
+ defer wg.Done()
+ _, err = io.Copy(client, bytes.NewReader([]byte(data)))
+ if err != nil {
+ b.Fatal(err)
+ }
+ }()
+ go func() {
+ defer wg.Done()
+
+ buf := &bytes.Buffer{}
+ _, err = io.CopyN(buf, server, int64(len(data)))
+ if err != nil {
+ b.Fatal(err)
+ }
+ if !bytes.Equal(buf.Bytes(), data) {
+ b.Fatal("mismatched data")
+ }
+ }()
+ wg.Wait()
+ b.StopTimer()
+}
+
+func StdlibConstructor(t testing.TB, server_conn, client_conn net.Conn) (
+ server, client HandshakingConn) {
+ cert, err := tls.X509KeyPair(certBytes, keyBytes)
+ if err != nil {
+ t.Fatal(err)
+ }
+ config := &tls.Config{
+ Certificates: []tls.Certificate{cert},
+ InsecureSkipVerify: true,
+ CipherSuites: []uint16{tls.TLS_RSA_WITH_AES_128_CBC_SHA}}
+ server = tls.Server(server_conn, config)
+ client = tls.Client(client_conn, config)
+ return server, client
+}
+
+func passThruVerify(t testing.TB) func(bool, *CertificateStoreCtx) bool {
+ x := func(ok bool, store *CertificateStoreCtx) bool {
+ cert := store.GetCurrentCert()
+ if cert == nil {
+ t.Fatalf("Could not obtain cert from store\n")
+ }
+ sn := cert.GetSerialNumberHex()
+ if len(sn) == 0 {
+ t.Fatalf("Could not obtain serial number from cert")
+ }
+ return ok
+ }
+ return x
+}
+
+func OpenSSLConstructor(t testing.TB, server_conn, client_conn net.Conn) (
+ server, client HandshakingConn) {
+ ctx, err := NewCtx()
+ if err != nil {
+ t.Fatal(err)
+ }
+ ctx.SetVerify(VerifyNone, passThruVerify(t))
+ key, err := LoadPrivateKeyFromPEM(keyBytes)
+ if err != nil {
+ t.Fatal(err)
+ }
+ err = ctx.UsePrivateKey(key)
+ if err != nil {
+ t.Fatal(err)
+ }
+ cert, err := LoadCertificateFromPEM(certBytes)
+ if err != nil {
+ t.Fatal(err)
+ }
+ err = ctx.UseCertificate(cert)
+ if err != nil {
+ t.Fatal(err)
+ }
+ err = ctx.SetCipherList("AES128-SHA")
+ if err != nil {
+ t.Fatal(err)
+ }
+ server, err = Server(server_conn, ctx)
+ if err != nil {
+ t.Fatal(err)
+ }
+ client, err = Client(client_conn, ctx)
+ if err != nil {
+ t.Fatal(err)
+ }
+ return server, client
+}
+
+func StdlibOpenSSLConstructor(t testing.TB, server_conn, client_conn net.Conn) (
+ server, client HandshakingConn) {
+ server_std, _ := StdlibConstructor(t, server_conn, client_conn)
+ _, client_ssl := OpenSSLConstructor(t, server_conn, client_conn)
+ return server_std, client_ssl
+}
+
+func OpenSSLStdlibConstructor(t testing.TB, server_conn, client_conn net.Conn) (
+ server, client HandshakingConn) {
+ _, client_std := StdlibConstructor(t, server_conn, client_conn)
+ server_ssl, _ := OpenSSLConstructor(t, server_conn, client_conn)
+ return server_ssl, client_std
+}
+
+func TestStdlibSimple(t *testing.T) {
+ SimpleConnTest(t, StdlibConstructor)
+}
+
+func TestOpenSSLSimple(t *testing.T) {
+ SimpleConnTest(t, OpenSSLConstructor)
+}
+
+func TestStdlibClosing(t *testing.T) {
+ ClosingTest(t, StdlibConstructor)
+}
+
+func TestOpenSSLClosing(t *testing.T) {
+ ClosingTest(t, OpenSSLConstructor)
+}
+
+func BenchmarkStdlibThroughput(b *testing.B) {
+ ThroughputBenchmark(b, StdlibConstructor)
+}
+
+func BenchmarkOpenSSLThroughput(b *testing.B) {
+ ThroughputBenchmark(b, OpenSSLConstructor)
+}
+
+func TestStdlibOpenSSLSimple(t *testing.T) {
+ SimpleConnTest(t, StdlibOpenSSLConstructor)
+}
+
+func TestOpenSSLStdlibSimple(t *testing.T) {
+ SimpleConnTest(t, OpenSSLStdlibConstructor)
+}
+
+func TestStdlibOpenSSLClosing(t *testing.T) {
+ ClosingTest(t, StdlibOpenSSLConstructor)
+}
+
+func TestOpenSSLStdlibClosing(t *testing.T) {
+ ClosingTest(t, OpenSSLStdlibConstructor)
+}
+
+func BenchmarkStdlibOpenSSLThroughput(b *testing.B) {
+ ThroughputBenchmark(b, StdlibOpenSSLConstructor)
+}
+
+func BenchmarkOpenSSLStdlibThroughput(b *testing.B) {
+ ThroughputBenchmark(b, OpenSSLStdlibConstructor)
+}
+
+func FullDuplexRenegotiationTest(t testing.TB, constructor func(
+ t testing.TB, conn1, conn2 net.Conn) (sslconn1, sslconn2 HandshakingConn)) {
+
+ server_conn, client_conn := NetPipe(t)
+ defer server_conn.Close()
+ defer client_conn.Close()
+
+ times := 256
+ data_len := 4 * SSLRecordSize
+ data1 := make([]byte, data_len)
+ _, err := io.ReadFull(rand.Reader, data1[:])
+ if err != nil {
+ t.Fatal(err)
+ }
+ data2 := make([]byte, data_len)
+ _, err = io.ReadFull(rand.Reader, data1[:])
+ if err != nil {
+ t.Fatal(err)
+ }
+
+ server, client := constructor(t, server_conn, client_conn)
+ defer close_both(server, client)
+
+ var wg sync.WaitGroup
+
+ send_func := func(sender HandshakingConn, data []byte) {
+ defer wg.Done()
+ for i := 0; i < times; i++ {
+ if i == times/2 {
+ wg.Add(1)
+ go func() {
+ defer wg.Done()
+ err := sender.Handshake()
+ if err != nil {
+ t.Fatal(err)
+ }
+ }()
+ }
+ _, err := sender.Write(data)
+ if err != nil {
+ t.Fatal(err)
+ }
+ }
+ }
+
+ recv_func := func(receiver net.Conn, data []byte) {
+ defer wg.Done()
+
+ buf := make([]byte, len(data))
+ for i := 0; i < times; i++ {
+ n, err := io.ReadFull(receiver, buf[:])
+ if err != nil {
+ t.Fatal(err)
+ }
+ if !bytes.Equal(buf[:n], data) {
+ t.Fatal(err)
+ }
+ }
+ }
+
+ wg.Add(4)
+ go recv_func(server, data1)
+ go send_func(client, data1)
+ go send_func(server, data2)
+ go recv_func(client, data2)
+ wg.Wait()
+}
+
+func TestStdlibFullDuplexRenegotiation(t *testing.T) {
+ FullDuplexRenegotiationTest(t, StdlibConstructor)
+}
+
+func TestOpenSSLFullDuplexRenegotiation(t *testing.T) {
+ FullDuplexRenegotiationTest(t, OpenSSLConstructor)
+}
+
+func TestOpenSSLStdlibFullDuplexRenegotiation(t *testing.T) {
+ FullDuplexRenegotiationTest(t, OpenSSLStdlibConstructor)
+}
+
+func TestStdlibOpenSSLFullDuplexRenegotiation(t *testing.T) {
+ FullDuplexRenegotiationTest(t, StdlibOpenSSLConstructor)
+}
+
+func LotsOfConns(t *testing.T, payload_size int64, loops, clients int,
+ sleep time.Duration, newListener func(net.Listener) net.Listener,
+ newClient func(net.Conn) (net.Conn, error)) {
+ tcp_listener, err := net.Listen("tcp", "localhost:0")
+ if err != nil {
+ t.Fatal(err)
+ }
+ ssl_listener := newListener(tcp_listener)
+ go func() {
+ for {
+ conn, err := ssl_listener.Accept()
+ if err != nil {
+ t.Fatalf("failed accept: %s", err)
+ continue
+ }
+ go func() {
+ defer func() {
+ err = conn.Close()
+ if err != nil {
+ t.Fatalf("failed closing: %s", err)
+ }
+ }()
+ for i := 0; i < loops; i++ {
+ _, err := io.Copy(ioutil.Discard,
+ io.LimitReader(conn, payload_size))
+ if err != nil {
+ t.Fatalf("failed reading: %s", err)
+ return
+ }
+ _, err = io.Copy(conn, io.LimitReader(rand.Reader,
+ payload_size))
+ if err != nil {
+ t.Fatalf("failed writing: %s", err)
+ return
+ }
+ }
+ time.Sleep(sleep)
+ }()
+ }
+ }()
+ var wg sync.WaitGroup
+ for i := 0; i < clients; i++ {
+ tcp_client, err := net.Dial(tcp_listener.Addr().Network(),
+ tcp_listener.Addr().String())
+ if err != nil {
+ t.Fatal(err)
+ }
+ ssl_client, err := newClient(tcp_client)
+ if err != nil {
+ t.Fatal(err)
+ }
+ wg.Add(1)
+ go func(i int) {
+ defer func() {
+ err = ssl_client.Close()
+ if err != nil {
+ t.Fatalf("failed closing: %s", err)
+ }
+ wg.Done()
+ }()
+ for i := 0; i < loops; i++ {
+ _, err := io.Copy(ssl_client, io.LimitReader(rand.Reader,
+ payload_size))
+ if err != nil {
+ t.Fatalf("failed writing: %s", err)
+ return
+ }
+ _, err = io.Copy(ioutil.Discard,
+ io.LimitReader(ssl_client, payload_size))
+ if err != nil {
+ t.Fatalf("failed reading: %s", err)
+ return
+ }
+ }
+ time.Sleep(sleep)
+ }(i)
+ }
+ wg.Wait()
+}
+
+func TestStdlibLotsOfConns(t *testing.T) {
+ tls_cert, err := tls.X509KeyPair(certBytes, keyBytes)
+ if err != nil {
+ t.Fatal(err)
+ }
+ tls_config := &tls.Config{
+ Certificates: []tls.Certificate{tls_cert},
+ InsecureSkipVerify: true,
+ CipherSuites: []uint16{tls.TLS_RSA_WITH_AES_128_CBC_SHA}}
+ LotsOfConns(t, 1024*64, 10, 100, 0*time.Second,
+ func(l net.Listener) net.Listener {
+ return tls.NewListener(l, tls_config)
+ }, func(c net.Conn) (net.Conn, error) {
+ return tls.Client(c, tls_config), nil
+ })
+}
+
+func TestOpenSSLLotsOfConns(t *testing.T) {
+ ctx, err := NewCtx()
+ if err != nil {
+ t.Fatal(err)
+ }
+ key, err := LoadPrivateKeyFromPEM(keyBytes)
+ if err != nil {
+ t.Fatal(err)
+ }
+ err = ctx.UsePrivateKey(key)
+ if err != nil {
+ t.Fatal(err)
+ }
+ cert, err := LoadCertificateFromPEM(certBytes)
+ if err != nil {
+ t.Fatal(err)
+ }
+ err = ctx.UseCertificate(cert)
+ if err != nil {
+ t.Fatal(err)
+ }
+ err = ctx.SetCipherList("AES128-SHA")
+ if err != nil {
+ t.Fatal(err)
+ }
+ LotsOfConns(t, 1024*64, 10, 100, 0*time.Second,
+ func(l net.Listener) net.Listener {
+ return NewListener(l, ctx)
+ }, func(c net.Conn) (net.Conn, error) {
+ return Client(c, ctx)
+ })
+}
diff --git a/src/mongo/gotools/vendor/src/github.com/spacemonkeygo/openssl/utils/errors.go b/src/mongo/gotools/vendor/src/github.com/spacemonkeygo/openssl/utils/errors.go
new file mode 100644
index 00000000000..bab314c95d7
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/spacemonkeygo/openssl/utils/errors.go
@@ -0,0 +1,50 @@
+// Copyright (C) 2014 Space Monkey, Inc.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package utils
+
+import (
+ "errors"
+ "strings"
+)
+
+// ErrorGroup collates errors
+type ErrorGroup struct {
+ Errors []error
+}
+
+// Add adds an error to an existing error group
+func (e *ErrorGroup) Add(err error) {
+ if err != nil {
+ e.Errors = append(e.Errors, err)
+ }
+}
+
+// Finalize returns an error corresponding to the ErrorGroup state. If there's
+// no errors in the group, finalize returns nil. If there's only one error,
+// Finalize returns that error. Otherwise, Finalize will make a new error
+// consisting of the messages from the constituent errors.
+func (e *ErrorGroup) Finalize() error {
+ if len(e.Errors) == 0 {
+ return nil
+ }
+ if len(e.Errors) == 1 {
+ return e.Errors[0]
+ }
+ msgs := make([]string, 0, len(e.Errors))
+ for _, err := range e.Errors {
+ msgs = append(msgs, err.Error())
+ }
+ return errors.New(strings.Join(msgs, "\n"))
+}
diff --git a/src/mongo/gotools/vendor/src/github.com/spacemonkeygo/openssl/utils/future.go b/src/mongo/gotools/vendor/src/github.com/spacemonkeygo/openssl/utils/future.go
new file mode 100644
index 00000000000..fa1bbbfb861
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/spacemonkeygo/openssl/utils/future.go
@@ -0,0 +1,79 @@
+// Copyright (C) 2014 Space Monkey, Inc.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package utils
+
+import (
+ "sync"
+)
+
+// Future is a type that is essentially the inverse of a channel. With a
+// channel, you have multiple senders and one receiver. With a future, you can
+// have multiple receivers and one sender. Additionally, a future protects
+// against double-sends. Since this is usually used for returning function
+// results, we also capture and return error values as well. Use NewFuture
+// to initialize.
+type Future struct {
+ mutex *sync.Mutex
+ cond *sync.Cond
+ received bool
+ val interface{}
+ err error
+}
+
+// NewFuture returns an initialized and ready Future.
+func NewFuture() *Future {
+ mutex := &sync.Mutex{}
+ return &Future{
+ mutex: mutex,
+ cond: sync.NewCond(mutex),
+ received: false,
+ val: nil,
+ err: nil,
+ }
+}
+
+// Get blocks until the Future has a value set.
+func (self *Future) Get() (interface{}, error) {
+ self.mutex.Lock()
+ defer self.mutex.Unlock()
+ for {
+ if self.received {
+ return self.val, self.err
+ }
+ self.cond.Wait()
+ }
+}
+
+// Fired returns whether or not a value has been set. If Fired is true, Get
+// won't block.
+func (self *Future) Fired() bool {
+ self.mutex.Lock()
+ defer self.mutex.Unlock()
+ return self.received
+}
+
+// Set provides the value to present and future Get calls. If Set has already
+// been called, this is a no-op.
+func (self *Future) Set(val interface{}, err error) {
+ self.mutex.Lock()
+ defer self.mutex.Unlock()
+ if self.received {
+ return
+ }
+ self.received = true
+ self.val = val
+ self.err = err
+ self.cond.Broadcast()
+}
diff --git a/src/mongo/gotools/vendor/src/github.com/spacemonkeygo/openssl/verify.c b/src/mongo/gotools/vendor/src/github.com/spacemonkeygo/openssl/verify.c
new file mode 100644
index 00000000000..d55866c4cf0
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/spacemonkeygo/openssl/verify.c
@@ -0,0 +1,31 @@
+// Copyright (C) 2014 Space Monkey, Inc.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+#include <openssl/ssl.h>
+#include "_cgo_export.h"
+
+int verify_cb(int ok, X509_STORE_CTX* store) {
+ SSL* ssl = (SSL *)X509_STORE_CTX_get_app_data(store);
+ SSL_CTX* ssl_ctx = SSL_get_SSL_CTX(ssl);
+ void* p = SSL_CTX_get_ex_data(ssl_ctx, get_ssl_ctx_idx());
+ // get the pointer to the go Ctx object and pass it back into the thunk
+ return verify_cb_thunk(p, ok, store);
+}
+
+int verify_ssl_cb(int ok, X509_STORE_CTX* store) {
+ SSL* ssl = (SSL *)X509_STORE_CTX_get_app_data(store);
+ void* p = SSL_get_ex_data(ssl, get_ssl_idx());
+ // get the pointer to the go Ctx object and pass it back into the thunk
+ return verify_ssl_cb_thunk(p, ok, store);
+}
diff --git a/src/mongo/gotools/vendor/src/github.com/spacemonkeygo/openssl/version.go b/src/mongo/gotools/vendor/src/github.com/spacemonkeygo/openssl/version.go
new file mode 100644
index 00000000000..8f3d392cde8
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/spacemonkeygo/openssl/version.go
@@ -0,0 +1,22 @@
+// Copyright (C) 2014 Space Monkey, Inc.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+// +build cgo
+
+package openssl
+
+// #include <openssl/opensslv.h>
+import "C"
+
+const Version string = C.OPENSSL_VERSION_TEXT
diff --git a/src/mongo/gotools/vendor/src/github.com/spacemonkeygo/spacelog/.travis.yml b/src/mongo/gotools/vendor/src/github.com/spacemonkeygo/spacelog/.travis.yml
new file mode 100644
index 00000000000..d87fbdcf39c
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/spacemonkeygo/spacelog/.travis.yml
@@ -0,0 +1,6 @@
+language: go
+
+go:
+ - 1.2
+ - release
+ - tip
diff --git a/src/mongo/gotools/vendor/src/github.com/spacemonkeygo/spacelog/LICENSE b/src/mongo/gotools/vendor/src/github.com/spacemonkeygo/spacelog/LICENSE
new file mode 100644
index 00000000000..37ec93a14fd
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/spacemonkeygo/spacelog/LICENSE
@@ -0,0 +1,191 @@
+Apache License
+Version 2.0, January 2004
+http://www.apache.org/licenses/
+
+TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+1. Definitions.
+
+"License" shall mean the terms and conditions for use, reproduction, and
+distribution as defined by Sections 1 through 9 of this document.
+
+"Licensor" shall mean the copyright owner or entity authorized by the copyright
+owner that is granting the License.
+
+"Legal Entity" shall mean the union of the acting entity and all other entities
+that control, are controlled by, or are under common control with that entity.
+For the purposes of this definition, "control" means (i) the power, direct or
+indirect, to cause the direction or management of such entity, whether by
+contract or otherwise, or (ii) ownership of fifty percent (50%) or more of the
+outstanding shares, or (iii) beneficial ownership of such entity.
+
+"You" (or "Your") shall mean an individual or Legal Entity exercising
+permissions granted by this License.
+
+"Source" form shall mean the preferred form for making modifications, including
+but not limited to software source code, documentation source, and configuration
+files.
+
+"Object" form shall mean any form resulting from mechanical transformation or
+translation of a Source form, including but not limited to compiled object code,
+generated documentation, and conversions to other media types.
+
+"Work" shall mean the work of authorship, whether in Source or Object form, made
+available under the License, as indicated by a copyright notice that is included
+in or attached to the work (an example is provided in the Appendix below).
+
+"Derivative Works" shall mean any work, whether in Source or Object form, that
+is based on (or derived from) the Work and for which the editorial revisions,
+annotations, elaborations, or other modifications represent, as a whole, an
+original work of authorship. For the purposes of this License, Derivative Works
+shall not include works that remain separable from, or merely link (or bind by
+name) to the interfaces of, the Work and Derivative Works thereof.
+
+"Contribution" shall mean any work of authorship, including the original version
+of the Work and any modifications or additions to that Work or Derivative Works
+thereof, that is intentionally submitted to Licensor for inclusion in the Work
+by the copyright owner or by an individual or Legal Entity authorized to submit
+on behalf of the copyright owner. For the purposes of this definition,
+"submitted" means any form of electronic, verbal, or written communication sent
+to the Licensor or its representatives, including but not limited to
+communication on electronic mailing lists, source code control systems, and
+issue tracking systems that are managed by, or on behalf of, the Licensor for
+the purpose of discussing and improving the Work, but excluding communication
+that is conspicuously marked or otherwise designated in writing by the copyright
+owner as "Not a Contribution."
+
+"Contributor" shall mean Licensor and any individual or Legal Entity on behalf
+of whom a Contribution has been received by Licensor and subsequently
+incorporated within the Work.
+
+2. Grant of Copyright License.
+
+Subject to the terms and conditions of this License, each Contributor hereby
+grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free,
+irrevocable copyright license to reproduce, prepare Derivative Works of,
+publicly display, publicly perform, sublicense, and distribute the Work and such
+Derivative Works in Source or Object form.
+
+3. Grant of Patent License.
+
+Subject to the terms and conditions of this License, each Contributor hereby
+grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free,
+irrevocable (except as stated in this section) patent license to make, have
+made, use, offer to sell, sell, import, and otherwise transfer the Work, where
+such license applies only to those patent claims licensable by such Contributor
+that are necessarily infringed by their Contribution(s) alone or by combination
+of their Contribution(s) with the Work to which such Contribution(s) was
+submitted. If You institute patent litigation against any entity (including a
+cross-claim or counterclaim in a lawsuit) alleging that the Work or a
+Contribution incorporated within the Work constitutes direct or contributory
+patent infringement, then any patent licenses granted to You under this License
+for that Work shall terminate as of the date such litigation is filed.
+
+4. Redistribution.
+
+You may reproduce and distribute copies of the Work or Derivative Works thereof
+in any medium, with or without modifications, and in Source or Object form,
+provided that You meet the following conditions:
+
+You must give any other recipients of the Work or Derivative Works a copy of
+this License; and
+You must cause any modified files to carry prominent notices stating that You
+changed the files; and
+You must retain, in the Source form of any Derivative Works that You distribute,
+all copyright, patent, trademark, and attribution notices from the Source form
+of the Work, excluding those notices that do not pertain to any part of the
+Derivative Works; and
+If the Work includes a "NOTICE" text file as part of its distribution, then any
+Derivative Works that You distribute must include a readable copy of the
+attribution notices contained within such NOTICE file, excluding those notices
+that do not pertain to any part of the Derivative Works, in at least one of the
+following places: within a NOTICE text file distributed as part of the
+Derivative Works; within the Source form or documentation, if provided along
+with the Derivative Works; or, within a display generated by the Derivative
+Works, if and wherever such third-party notices normally appear. The contents of
+the NOTICE file are for informational purposes only and do not modify the
+License. You may add Your own attribution notices within Derivative Works that
+You distribute, alongside or as an addendum to the NOTICE text from the Work,
+provided that such additional attribution notices cannot be construed as
+modifying the License.
+You may add Your own copyright statement to Your modifications and may provide
+additional or different license terms and conditions for use, reproduction, or
+distribution of Your modifications, or for any such Derivative Works as a whole,
+provided Your use, reproduction, and distribution of the Work otherwise complies
+with the conditions stated in this License.
+
+5. Submission of Contributions.
+
+Unless You explicitly state otherwise, any Contribution intentionally submitted
+for inclusion in the Work by You to the Licensor shall be under the terms and
+conditions of this License, without any additional terms or conditions.
+Notwithstanding the above, nothing herein shall supersede or modify the terms of
+any separate license agreement you may have executed with Licensor regarding
+such Contributions.
+
+6. Trademarks.
+
+This License does not grant permission to use the trade names, trademarks,
+service marks, or product names of the Licensor, except as required for
+reasonable and customary use in describing the origin of the Work and
+reproducing the content of the NOTICE file.
+
+7. Disclaimer of Warranty.
+
+Unless required by applicable law or agreed to in writing, Licensor provides the
+Work (and each Contributor provides its Contributions) on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied,
+including, without limitation, any warranties or conditions of TITLE,
+NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A PARTICULAR PURPOSE. You are
+solely responsible for determining the appropriateness of using or
+redistributing the Work and assume any risks associated with Your exercise of
+permissions under this License.
+
+8. Limitation of Liability.
+
+In no event and under no legal theory, whether in tort (including negligence),
+contract, or otherwise, unless required by applicable law (such as deliberate
+and grossly negligent acts) or agreed to in writing, shall any Contributor be
+liable to You for damages, including any direct, indirect, special, incidental,
+or consequential damages of any character arising as a result of this License or
+out of the use or inability to use the Work (including but not limited to
+damages for loss of goodwill, work stoppage, computer failure or malfunction, or
+any and all other commercial damages or losses), even if such Contributor has
+been advised of the possibility of such damages.
+
+9. Accepting Warranty or Additional Liability.
+
+While redistributing the Work or Derivative Works thereof, You may choose to
+offer, and charge a fee for, acceptance of support, warranty, indemnity, or
+other liability obligations and/or rights consistent with this License. However,
+in accepting such obligations, You may act only on Your own behalf and on Your
+sole responsibility, not on behalf of any other Contributor, and only if You
+agree to indemnify, defend, and hold each Contributor harmless for any liability
+incurred by, or claims asserted against, such Contributor by reason of your
+accepting any such warranty or additional liability.
+
+END OF TERMS AND CONDITIONS
+
+APPENDIX: How to apply the Apache License to your work
+
+To apply the Apache License to your work, attach the following boilerplate
+notice, with the fields enclosed by brackets "[]" replaced with your own
+identifying information. (Don't include the brackets!) The text should be
+enclosed in the appropriate comment syntax for the file format. We also
+recommend that a file or class name and description of purpose be included on
+the same "printed page" as the copyright notice for easier identification within
+third-party archives.
+
+ Copyright [yyyy] [name of copyright owner]
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
diff --git a/src/mongo/gotools/vendor/src/github.com/spacemonkeygo/spacelog/README.md b/src/mongo/gotools/vendor/src/github.com/spacemonkeygo/spacelog/README.md
new file mode 100644
index 00000000000..28033f68d9c
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/spacemonkeygo/spacelog/README.md
@@ -0,0 +1,19 @@
+# spacelog [![Build Status](https://api.travis-ci.org/spacemonkeygo/spacelog.svg?branch=master)](https://travis-ci.org/spacemonkeygo/spacelog)
+
+Please see http://godoc.org/github.com/spacemonkeygo/spacelog for info
+
+### License
+
+Copyright (C) 2014 Space Monkey, Inc.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
diff --git a/src/mongo/gotools/vendor/src/github.com/spacemonkeygo/spacelog/capture.go b/src/mongo/gotools/vendor/src/github.com/spacemonkeygo/spacelog/capture.go
new file mode 100644
index 00000000000..d7ea1ca31a6
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/spacemonkeygo/spacelog/capture.go
@@ -0,0 +1,67 @@
+// Copyright (C) 2014 Space Monkey, Inc.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package spacelog
+
+import (
+ "fmt"
+ "os"
+ "os/exec"
+)
+
+// CaptureOutputToFile opens a filehandle using the given path, then calls
+// CaptureOutputToFd on the associated filehandle.
+func CaptureOutputToFile(path string) error {
+ fh, err := os.OpenFile(path, os.O_RDWR|os.O_CREATE, 0644)
+ if err != nil {
+ return err
+ }
+ defer fh.Close()
+ return CaptureOutputToFd(int(fh.Fd()))
+}
+
+// CaptureOutputToProcess starts a process and using CaptureOutputToFd,
+// redirects stdout and stderr to the subprocess' stdin.
+// CaptureOutputToProcess expects the subcommand to last the lifetime of the
+// process, and if the subprocess dies, will panic.
+func CaptureOutputToProcess(command string, args ...string) error {
+ cmd := exec.Command(command, args...)
+ out, err := cmd.StdinPipe()
+ if err != nil {
+ return err
+ }
+ defer out.Close()
+ type fder interface {
+ Fd() uintptr
+ }
+ out_fder, ok := out.(fder)
+ if !ok {
+ return fmt.Errorf("unable to get underlying pipe")
+ }
+ err = CaptureOutputToFd(int(out_fder.Fd()))
+ if err != nil {
+ return err
+ }
+ err = cmd.Start()
+ if err != nil {
+ return err
+ }
+ go func() {
+ err := cmd.Wait()
+ if err != nil {
+ panic(fmt.Errorf("captured output process died! %s", err))
+ }
+ }()
+ return nil
+}
diff --git a/src/mongo/gotools/vendor/src/github.com/spacemonkeygo/spacelog/capture_other.go b/src/mongo/gotools/vendor/src/github.com/spacemonkeygo/spacelog/capture_other.go
new file mode 100644
index 00000000000..5a62a2accaf
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/spacemonkeygo/spacelog/capture_other.go
@@ -0,0 +1,35 @@
+// Copyright (C) 2014 Space Monkey, Inc.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+// +build !windows
+
+package spacelog
+
+import (
+ "syscall"
+)
+
+// CaptureOutputToFd redirects the current process' stdout and stderr file
+// descriptors to the given file descriptor, using the dup2 syscall.
+func CaptureOutputToFd(fd int) error {
+ err := syscall.Dup2(fd, syscall.Stdout)
+ if err != nil {
+ return err
+ }
+ err = syscall.Dup2(fd, syscall.Stderr)
+ if err != nil {
+ return err
+ }
+ return nil
+}
diff --git a/src/mongo/gotools/vendor/src/github.com/spacemonkeygo/spacelog/capture_windows.go b/src/mongo/gotools/vendor/src/github.com/spacemonkeygo/spacelog/capture_windows.go
new file mode 100644
index 00000000000..e9f061dcf47
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/spacemonkeygo/spacelog/capture_windows.go
@@ -0,0 +1,23 @@
+// Copyright (C) 2014 Space Monkey, Inc.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package spacelog
+
+import (
+ "fmt"
+)
+
+func CaptureOutputToFd(fd int) error {
+ return fmt.Errorf("CaptureOutputToFd not supported on Windows")
+}
diff --git a/src/mongo/gotools/vendor/src/github.com/spacemonkeygo/spacelog/collection.go b/src/mongo/gotools/vendor/src/github.com/spacemonkeygo/spacelog/collection.go
new file mode 100644
index 00000000000..fd612db6ebd
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/spacemonkeygo/spacelog/collection.go
@@ -0,0 +1,229 @@
+// Copyright (C) 2014 Space Monkey, Inc.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package spacelog
+
+import (
+ "regexp"
+ "runtime"
+ "strings"
+ "sync"
+ "text/template"
+)
+
+var (
+ // If set, these prefixes will be stripped out of automatic logger names.
+ IgnoredPrefixes []string
+
+ badChars = regexp.MustCompile("[^a-zA-Z0-9_.-]")
+ slashes = regexp.MustCompile("[/]")
+)
+
+func callerName() string {
+ pc, _, _, ok := runtime.Caller(2)
+ if !ok {
+ return "unknown.unknown"
+ }
+ f := runtime.FuncForPC(pc)
+ if f == nil {
+ return "unknown.unknown"
+ }
+ name := f.Name()
+ for _, prefix := range IgnoredPrefixes {
+ name = strings.TrimPrefix(name, prefix)
+ }
+ return badChars.ReplaceAllLiteralString(
+ slashes.ReplaceAllLiteralString(name, "."), "_")
+}
+
+// LoggerCollections contain all of the loggers a program might use. Typically
+// a codebase will just use the default logger collection.
+type LoggerCollection struct {
+ mtx sync.Mutex
+ loggers map[string]*Logger
+ level LogLevel
+ handler Handler
+}
+
+// NewLoggerCollection creates a new logger collection. It's unlikely you will
+// ever practically need this method. Use the DefaultLoggerCollection instead.
+func NewLoggerCollection() *LoggerCollection {
+ return &LoggerCollection{
+ loggers: make(map[string]*Logger),
+ level: DefaultLevel,
+ handler: defaultHandler}
+}
+
+// GetLogger returns a new Logger with a name automatically generated using
+// the callstack. If you want to avoid automatic name generation check out
+// GetLoggerNamed
+func (c *LoggerCollection) GetLogger() *Logger {
+ return GetLoggerNamed(callerName())
+}
+
+func (c *LoggerCollection) getLogger(name string, level LogLevel,
+ handler Handler) *Logger {
+ c.mtx.Lock()
+ defer c.mtx.Unlock()
+
+ logger, exists := c.loggers[name]
+ if !exists {
+ logger = &Logger{level: level,
+ collection: c,
+ name: name,
+ handler: handler}
+ c.loggers[name] = logger
+ }
+ return logger
+}
+
+// GetLoggerNamed returns a new Logger with the provided name. GetLogger is
+// more frequently used.
+func (c *LoggerCollection) GetLoggerNamed(name string) *Logger {
+ c.mtx.Lock()
+ defer c.mtx.Unlock()
+
+ logger, exists := c.loggers[name]
+ if !exists {
+ logger = &Logger{level: c.level,
+ collection: c,
+ name: name,
+ handler: c.handler}
+ c.loggers[name] = logger
+ }
+ return logger
+}
+
+// SetLevel will set the current log level for all loggers with names that
+// match a provided regular expression. If the regular expression is nil, then
+// all loggers match.
+func (c *LoggerCollection) SetLevel(re *regexp.Regexp, level LogLevel) {
+ c.mtx.Lock()
+ defer c.mtx.Unlock()
+
+ if re == nil {
+ c.level = level
+ }
+ for name, logger := range c.loggers {
+ if re == nil || re.MatchString(name) {
+ logger.setLevel(level)
+ }
+ }
+}
+
+// SetHandler will set the current log handler for all loggers with names that
+// match a provided regular expression. If the regular expression is nil, then
+// all loggers match.
+func (c *LoggerCollection) SetHandler(re *regexp.Regexp, handler Handler) {
+ c.mtx.Lock()
+ defer c.mtx.Unlock()
+
+ if re == nil {
+ c.handler = handler
+ }
+ for name, logger := range c.loggers {
+ if re == nil || re.MatchString(name) {
+ logger.setHandler(handler)
+ }
+ }
+}
+
+// SetTextTemplate will set the current text template for all loggers with
+// names that match a provided regular expression. If the regular expression
+// is nil, then all loggers match. Note that not every handler is guaranteed
+// to support text templates and a text template will only apply to
+// text-oriented and unstructured handlers.
+func (c *LoggerCollection) SetTextTemplate(re *regexp.Regexp,
+ t *template.Template) {
+ c.mtx.Lock()
+ defer c.mtx.Unlock()
+
+ if re == nil {
+ c.handler.SetTextTemplate(t)
+ }
+ for name, logger := range c.loggers {
+ if re == nil || re.MatchString(name) {
+ logger.getHandler().SetTextTemplate(t)
+ }
+ }
+}
+
+// SetTextOutput will set the current output interface for all loggers with
+// names that match a provided regular expression. If the regular expression
+// is nil, then all loggers match. Note that not every handler is guaranteed
+// to support text output and a text output interface will only apply to
+// text-oriented and unstructured handlers.
+func (c *LoggerCollection) SetTextOutput(re *regexp.Regexp,
+ output TextOutput) {
+ c.mtx.Lock()
+ defer c.mtx.Unlock()
+
+ if re == nil {
+ c.handler.SetTextOutput(output)
+ }
+ for name, logger := range c.loggers {
+ if re == nil || re.MatchString(name) {
+ logger.getHandler().SetTextOutput(output)
+ }
+ }
+}
+
+var (
+ // It's unlikely you'll need to use this directly
+ DefaultLoggerCollection = NewLoggerCollection()
+)
+
+// GetLogger returns an automatically-named logger on the default logger
+// collection.
+func GetLogger() *Logger {
+ return DefaultLoggerCollection.GetLoggerNamed(callerName())
+}
+
+// GetLoggerNamed returns a new Logger with the provided name on the default
+// logger collection. GetLogger is more frequently used.
+func GetLoggerNamed(name string) *Logger {
+ return DefaultLoggerCollection.GetLoggerNamed(name)
+}
+
+// SetLevel will set the current log level for all loggers on the default
+// collection with names that match a provided regular expression. If the
+// regular expression is nil, then all loggers match.
+func SetLevel(re *regexp.Regexp, level LogLevel) {
+ DefaultLoggerCollection.SetLevel(re, level)
+}
+
+// SetHandler will set the current log handler for all loggers on the default
+// collection with names that match a provided regular expression. If the
+// regular expression is nil, then all loggers match.
+func SetHandler(re *regexp.Regexp, handler Handler) {
+ DefaultLoggerCollection.SetHandler(re, handler)
+}
+
+// SetTextTemplate will set the current text template for all loggers on the
+// default collection with names that match a provided regular expression. If
+// the regular expression is nil, then all loggers match. Note that not every
+// handler is guaranteed to support text templates and a text template will
+// only apply to text-oriented and unstructured handlers.
+func SetTextTemplate(re *regexp.Regexp, t *template.Template) {
+ DefaultLoggerCollection.SetTextTemplate(re, t)
+}
+
+// SetTextOutput will set the current output interface for all loggers on the
+// default collection with names that match a provided regular expression. If
+// the regular expression is nil, then all loggers match. Note that not every
+// handler is guaranteed to support text output and a text output interface
+// will only apply to text-oriented and unstructured handlers.
+func SetTextOutput(re *regexp.Regexp, output TextOutput) {
+ DefaultLoggerCollection.SetTextOutput(re, output)
+}
diff --git a/src/mongo/gotools/vendor/src/github.com/spacemonkeygo/spacelog/convenience.go b/src/mongo/gotools/vendor/src/github.com/spacemonkeygo/spacelog/convenience.go
new file mode 100644
index 00000000000..4b4efd22389
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/spacemonkeygo/spacelog/convenience.go
@@ -0,0 +1,266 @@
+// Copyright (C) 2014 Space Monkey, Inc.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package spacelog
+
+import (
+ "fmt"
+ "io"
+)
+
+// Debug logs a collection of values if the logger's level is debug or even
+// more permissive.
+func (l *Logger) Debug(v ...interface{}) {
+ if l.getLevel() <= Debug {
+ l.getHandler().Log(l.name, Debug, fmt.Sprint(v...), 1)
+ }
+}
+
+// Debugf logs a format string with values if the logger's level is debug or
+// even more permissive.
+func (l *Logger) Debugf(format string, v ...interface{}) {
+ if l.getLevel() <= Debug {
+ l.getHandler().Log(l.name, Debug, fmt.Sprintf(format, v...), 1)
+ }
+}
+
+// Debuge logs an error value if the error is not nil and the logger's level
+// is debug or even more permissive.
+func (l *Logger) Debuge(err error) {
+ if l.getLevel() <= Debug && err != nil {
+ l.getHandler().Log(l.name, Debug, err.Error(), 1)
+ }
+}
+
+// DebugEnabled returns true if the logger's level is debug or even more
+// permissive.
+func (l *Logger) DebugEnabled() bool {
+ return l.getLevel() <= Debug
+}
+
+// Info logs a collection of values if the logger's level is info or even
+// more permissive.
+func (l *Logger) Info(v ...interface{}) {
+ if l.getLevel() <= Info {
+ l.getHandler().Log(l.name, Info, fmt.Sprint(v...), 1)
+ }
+}
+
+// Infof logs a format string with values if the logger's level is info or
+// even more permissive.
+func (l *Logger) Infof(format string, v ...interface{}) {
+ if l.getLevel() <= Info {
+ l.getHandler().Log(l.name, Info, fmt.Sprintf(format, v...), 1)
+ }
+}
+
+// Infoe logs an error value if the error is not nil and the logger's level
+// is info or even more permissive.
+func (l *Logger) Infoe(err error) {
+ if l.getLevel() <= Info && err != nil {
+ l.getHandler().Log(l.name, Info, err.Error(), 1)
+ }
+}
+
+// InfoEnabled returns true if the logger's level is info or even more
+// permissive.
+func (l *Logger) InfoEnabled() bool {
+ return l.getLevel() <= Info
+}
+
+// Notice logs a collection of values if the logger's level is notice or even
+// more permissive.
+func (l *Logger) Notice(v ...interface{}) {
+ if l.getLevel() <= Notice {
+ l.getHandler().Log(l.name, Notice, fmt.Sprint(v...), 1)
+ }
+}
+
+// Noticef logs a format string with values if the logger's level is notice or
+// even more permissive.
+func (l *Logger) Noticef(format string, v ...interface{}) {
+ if l.getLevel() <= Notice {
+ l.getHandler().Log(l.name, Notice, fmt.Sprintf(format, v...), 1)
+ }
+}
+
+// Noticee logs an error value if the error is not nil and the logger's level
+// is notice or even more permissive.
+func (l *Logger) Noticee(err error) {
+ if l.getLevel() <= Notice && err != nil {
+ l.getHandler().Log(l.name, Notice, err.Error(), 1)
+ }
+}
+
+// NoticeEnabled returns true if the logger's level is notice or even more
+// permissive.
+func (l *Logger) NoticeEnabled() bool {
+ return l.getLevel() <= Notice
+}
+
+// Warn logs a collection of values if the logger's level is warning or even
+// more permissive.
+func (l *Logger) Warn(v ...interface{}) {
+ if l.getLevel() <= Warning {
+ l.getHandler().Log(l.name, Warning, fmt.Sprint(v...), 1)
+ }
+}
+
+// Warnf logs a format string with values if the logger's level is warning or
+// even more permissive.
+func (l *Logger) Warnf(format string, v ...interface{}) {
+ if l.getLevel() <= Warning {
+ l.getHandler().Log(l.name, Warning, fmt.Sprintf(format, v...), 1)
+ }
+}
+
+// Warne logs an error value if the error is not nil and the logger's level
+// is warning or even more permissive.
+func (l *Logger) Warne(err error) {
+ if l.getLevel() <= Warning && err != nil {
+ l.getHandler().Log(l.name, Warning, err.Error(), 1)
+ }
+}
+
+// WarnEnabled returns true if the logger's level is warning or even more
+// permissive.
+func (l *Logger) WarnEnabled() bool {
+ return l.getLevel() <= Warning
+}
+
+// Error logs a collection of values if the logger's level is error or even
+// more permissive.
+func (l *Logger) Error(v ...interface{}) {
+ if l.getLevel() <= Error {
+ l.getHandler().Log(l.name, Error, fmt.Sprint(v...), 1)
+ }
+}
+
+// Errorf logs a format string with values if the logger's level is error or
+// even more permissive.
+func (l *Logger) Errorf(format string, v ...interface{}) {
+ if l.getLevel() <= Error {
+ l.getHandler().Log(l.name, Error, fmt.Sprintf(format, v...), 1)
+ }
+}
+
+// Errore logs an error value if the error is not nil and the logger's level
+// is error or even more permissive.
+func (l *Logger) Errore(err error) {
+ if l.getLevel() <= Error && err != nil {
+ l.getHandler().Log(l.name, Error, err.Error(), 1)
+ }
+}
+
+// ErrorEnabled returns true if the logger's level is error or even more
+// permissive.
+func (l *Logger) ErrorEnabled() bool {
+ return l.getLevel() <= Error
+}
+
+// Crit logs a collection of values if the logger's level is critical or even
+// more permissive.
+func (l *Logger) Crit(v ...interface{}) {
+ if l.getLevel() <= Critical {
+ l.getHandler().Log(l.name, Critical, fmt.Sprint(v...), 1)
+ }
+}
+
+// Critf logs a format string with values if the logger's level is critical or
+// even more permissive.
+func (l *Logger) Critf(format string, v ...interface{}) {
+ if l.getLevel() <= Critical {
+ l.getHandler().Log(l.name, Critical, fmt.Sprintf(format, v...), 1)
+ }
+}
+
+// Crite logs an error value if the error is not nil and the logger's level
+// is critical or even more permissive.
+func (l *Logger) Crite(err error) {
+ if l.getLevel() <= Critical && err != nil {
+ l.getHandler().Log(l.name, Critical, err.Error(), 1)
+ }
+}
+
+// CritEnabled returns true if the logger's level is critical or even more
+// permissive.
+func (l *Logger) CritEnabled() bool {
+ return l.getLevel() <= Critical
+}
+
+// Log logs a collection of values if the logger's level is the provided level
+// or even more permissive.
+func (l *Logger) Log(level LogLevel, v ...interface{}) {
+ if l.getLevel() <= level {
+ l.getHandler().Log(l.name, level, fmt.Sprint(v...), 1)
+ }
+}
+
+// Logf logs a format string with values if the logger's level is the provided
+// level or even more permissive.
+func (l *Logger) Logf(level LogLevel, format string, v ...interface{}) {
+ if l.getLevel() <= level {
+ l.getHandler().Log(l.name, level, fmt.Sprintf(format, v...), 1)
+ }
+}
+
+// Loge logs an error value if the error is not nil and the logger's level
+// is the provided level or even more permissive.
+func (l *Logger) Loge(level LogLevel, err error) {
+ if l.getLevel() <= level && err != nil {
+ l.getHandler().Log(l.name, level, err.Error(), 1)
+ }
+}
+
+// LevelEnabled returns true if the logger's level is the provided level or
+// even more permissive.
+func (l *Logger) LevelEnabled(level LogLevel) bool {
+ return l.getLevel() <= level
+}
+
+type writer struct {
+ l *Logger
+ level LogLevel
+}
+
+func (w *writer) Write(data []byte) (int, error) {
+ if w.l.getLevel() <= w.level {
+ w.l.getHandler().Log(w.l.name, w.level, string(data), 1)
+ }
+ return len(data), nil
+}
+
+// Writer returns an io.Writer that writes messages at the given log level.
+func (l *Logger) Writer(level LogLevel) io.Writer {
+ return &writer{l: l, level: level}
+}
+
+type writerNoCaller struct {
+ l *Logger
+ level LogLevel
+}
+
+func (w *writerNoCaller) Write(data []byte) (int, error) {
+ if w.l.getLevel() <= w.level {
+ w.l.getHandler().Log(w.l.name, w.level, string(data), -1)
+ }
+ return len(data), nil
+}
+
+// WriterWithoutCaller returns an io.Writer that writes messages at the given
+// log level, but does not attempt to collect the Write caller, and provides
+// no caller information to the log event.
+func (l *Logger) WriterWithoutCaller(level LogLevel) io.Writer {
+ return &writerNoCaller{l: l, level: level}
+}
diff --git a/src/mongo/gotools/vendor/src/github.com/spacemonkeygo/spacelog/doc.go b/src/mongo/gotools/vendor/src/github.com/spacemonkeygo/spacelog/doc.go
new file mode 100644
index 00000000000..28c25b4db64
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/spacemonkeygo/spacelog/doc.go
@@ -0,0 +1,39 @@
+// Copyright (C) 2014 Space Monkey, Inc.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+/*
+Package spacelog is a collection of interface lego bricks designed to help you
+build a flexible logging system.
+
+spacelog is loosely inspired by the Python logging library.
+
+The basic interaction is between a Logger and a Handler. A Logger is
+what the programmer typically interacts with for creating log messages. A
+Logger will be at a given log level, and if log messages can clear that
+specific logger's log level filter, they will be passed off to the Handler.
+
+Loggers are instantiated from GetLogger and GetLoggerNamed.
+
+A Handler is a very generic interface for handling log events. You can provide
+your own Handler for doing structured JSON output or colorized output or
+countless other things.
+
+Provided are a simple TextHandler with a variety of log event templates and
+TextOutput sinks, such as io.Writer, Syslog, and so forth.
+
+Make sure to see the source of the setup subpackage for an example of easy and
+configurable logging setup at process start:
+ http://godoc.org/github.com/spacemonkeygo/spacelog/setup
+*/
+package spacelog
diff --git a/src/mongo/gotools/vendor/src/github.com/spacemonkeygo/spacelog/event.go b/src/mongo/gotools/vendor/src/github.com/spacemonkeygo/spacelog/event.go
new file mode 100644
index 00000000000..da863cbf2c2
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/spacemonkeygo/spacelog/event.go
@@ -0,0 +1,75 @@
+// Copyright (C) 2014 Space Monkey, Inc.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package spacelog
+
+import (
+ "path/filepath"
+ "strings"
+ "time"
+)
+
+// TermColors is a type that knows how to output terminal colors and formatting
+type TermColors struct{}
+
+// LogEvent is a type made by the default text handler for feeding to log
+// templates. It has as much contextual data about the log event as possible.
+type LogEvent struct {
+ LoggerName string
+ Level LogLevel
+ Message string
+ Filepath string
+ Line int
+ Timestamp time.Time
+
+ TermColors
+}
+
+// Reset resets the color palette for terminals that support color
+func (TermColors) Reset() string { return "\x1b[0m" }
+func (TermColors) Bold() string { return "\x1b[1m" }
+func (TermColors) Underline() string { return "\x1b[4m" }
+func (TermColors) Black() string { return "\x1b[30m" }
+func (TermColors) Red() string { return "\x1b[31m" }
+func (TermColors) Green() string { return "\x1b[32m" }
+func (TermColors) Yellow() string { return "\x1b[33m" }
+func (TermColors) Blue() string { return "\x1b[34m" }
+func (TermColors) Magenta() string { return "\x1b[35m" }
+func (TermColors) Cyan() string { return "\x1b[36m" }
+func (TermColors) White() string { return "\x1b[37m" }
+
+func (l *LogEvent) Filename() string {
+ if l.Filepath == "" {
+ return ""
+ }
+ return filepath.Base(l.Filepath)
+}
+
+func (l *LogEvent) Time() string {
+ return l.Timestamp.Format("15:04:05")
+}
+
+func (l *LogEvent) Date() string {
+ return l.Timestamp.Format("2006/01/02")
+}
+
+// LevelJustified returns the log level in string form justified so that all
+// log levels take the same text width.
+func (l *LogEvent) LevelJustified() (rv string) {
+ rv = l.Level.String()
+ if len(rv) < 5 {
+ rv += strings.Repeat(" ", 5-len(rv))
+ }
+ return rv
+}
diff --git a/src/mongo/gotools/vendor/src/github.com/spacemonkeygo/spacelog/handler.go b/src/mongo/gotools/vendor/src/github.com/spacemonkeygo/spacelog/handler.go
new file mode 100644
index 00000000000..e3db0865479
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/spacemonkeygo/spacelog/handler.go
@@ -0,0 +1,53 @@
+// Copyright (C) 2014 Space Monkey, Inc.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package spacelog
+
+import (
+ "text/template"
+)
+
+// Handler is an interface that knows how to process log events. This is the
+// basic interface type for building a logging system. If you want to route
+// structured log data somewhere, you would implement this interface.
+type Handler interface {
+ // Log is called for every message. if calldepth is negative, caller
+ // information is missing
+ Log(logger_name string, level LogLevel, msg string, calldepth int)
+
+ // These two calls are expected to be no-ops on non-text-output handlers
+ SetTextTemplate(t *template.Template)
+ SetTextOutput(output TextOutput)
+}
+
+// HandlerFunc is a type to make implementation of the Handler interface easier
+type HandlerFunc func(logger_name string, level LogLevel, msg string,
+ calldepth int)
+
+// Log simply calls f(logger_name, level, msg, calldepth)
+func (f HandlerFunc) Log(logger_name string, level LogLevel, msg string,
+ calldepth int) {
+ f(logger_name, level, msg, calldepth)
+}
+
+// SetTextTemplate is a no-op
+func (HandlerFunc) SetTextTemplate(t *template.Template) {}
+
+// SetTextOutput is a no-op
+func (HandlerFunc) SetTextOutput(output TextOutput) {}
+
+var (
+ defaultHandler = NewTextHandler(StdlibTemplate,
+ &StdlibOutput{})
+)
diff --git a/src/mongo/gotools/vendor/src/github.com/spacemonkeygo/spacelog/level.go b/src/mongo/gotools/vendor/src/github.com/spacemonkeygo/spacelog/level.go
new file mode 100644
index 00000000000..1797be04041
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/spacemonkeygo/spacelog/level.go
@@ -0,0 +1,126 @@
+// Copyright (C) 2014 Space Monkey, Inc.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package spacelog
+
+import (
+ "fmt"
+ "strconv"
+ "strings"
+)
+
+type LogLevel int32
+
+const (
+ Debug LogLevel = 10
+ Info LogLevel = 20
+ Notice LogLevel = 30
+ Warning LogLevel = 40
+ Error LogLevel = 50
+ Critical LogLevel = 60
+ // syslog has Alert
+ // syslog has Emerg
+
+ DefaultLevel = Notice
+)
+
+// String returns the log level name in short form
+func (l LogLevel) String() string {
+ switch l.Match() {
+ case Critical:
+ return "CRIT"
+ case Error:
+ return "ERR"
+ case Warning:
+ return "WARN"
+ case Notice:
+ return "NOTE"
+ case Info:
+ return "INFO"
+ case Debug:
+ return "DEBUG"
+ default:
+ return "UNSET"
+ }
+}
+
+// String returns the log level name in long human readable form
+func (l LogLevel) Name() string {
+ switch l.Match() {
+ case Critical:
+ return "critical"
+ case Error:
+ return "error"
+ case Warning:
+ return "warning"
+ case Notice:
+ return "notice"
+ case Info:
+ return "info"
+ case Debug:
+ return "debug"
+ default:
+ return "unset"
+ }
+}
+
+// Match returns the greatest named log level that is less than or equal to
+// the receiver log level. For example, if the log level is 43, Match() will
+// return 40 (Warning)
+func (l LogLevel) Match() LogLevel {
+ if l >= Critical {
+ return Critical
+ }
+ if l >= Error {
+ return Error
+ }
+ if l >= Warning {
+ return Warning
+ }
+ if l >= Notice {
+ return Notice
+ }
+ if l >= Info {
+ return Info
+ }
+ if l >= Debug {
+ return Debug
+ }
+ return 0
+}
+
+// LevelFromString will convert a named log level to its corresponding value
+// type, or error if both the name was unknown and an integer value was unable
+// to be parsed.
+func LevelFromString(str string) (LogLevel, error) {
+ switch strings.ToLower(str) {
+ case "crit", "critical":
+ return Critical, nil
+ case "err", "error":
+ return Error, nil
+ case "warn", "warning":
+ return Warning, nil
+ case "note", "notice":
+ return Notice, nil
+ case "info":
+ return Info, nil
+ case "debug":
+ return Debug, nil
+ }
+ val, err := strconv.ParseInt(str, 10, 32)
+ if err == nil {
+ return LogLevel(val), nil
+ }
+ return 0, fmt.Errorf("Invalid log level: %s", str)
+}
diff --git a/src/mongo/gotools/vendor/src/github.com/spacemonkeygo/spacelog/logger.go b/src/mongo/gotools/vendor/src/github.com/spacemonkeygo/spacelog/logger.go
new file mode 100644
index 00000000000..ae1734b2780
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/spacemonkeygo/spacelog/logger.go
@@ -0,0 +1,61 @@
+// Copyright (C) 2014 Space Monkey, Inc.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package spacelog
+
+import (
+ "sync"
+ "sync/atomic"
+)
+
+// Logger is the basic type that allows for logging. A logger has an associated
+// name, given to it during construction, either through a logger collection,
+// GetLogger, GetLoggerNamed, or another Logger's Scope method. A logger also
+// has an associated level and handler, typically configured through the logger
+// collection to which it belongs.
+type Logger struct {
+ level LogLevel
+ name string
+ collection *LoggerCollection
+
+ handler_mtx sync.RWMutex
+ handler Handler
+}
+
+// Scope returns a new Logger with the same level and handler, using the
+// receiver Logger's name as a prefix.
+func (l *Logger) Scope(name string) *Logger {
+ return l.collection.getLogger(l.name+"."+name, l.getLevel(),
+ l.getHandler())
+}
+
+func (l *Logger) setLevel(level LogLevel) {
+ atomic.StoreInt32((*int32)(&l.level), int32(level))
+}
+
+func (l *Logger) getLevel() LogLevel {
+ return LogLevel(atomic.LoadInt32((*int32)(&l.level)))
+}
+
+func (l *Logger) setHandler(handler Handler) {
+ l.handler_mtx.Lock()
+ defer l.handler_mtx.Unlock()
+ l.handler = handler
+}
+
+func (l *Logger) getHandler() Handler {
+ l.handler_mtx.RLock()
+ defer l.handler_mtx.RUnlock()
+ return l.handler
+}
diff --git a/src/mongo/gotools/vendor/src/github.com/spacemonkeygo/spacelog/output.go b/src/mongo/gotools/vendor/src/github.com/spacemonkeygo/spacelog/output.go
new file mode 100644
index 00000000000..8751268fbe6
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/spacemonkeygo/spacelog/output.go
@@ -0,0 +1,178 @@
+// Copyright (C) 2014 Space Monkey, Inc.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package spacelog
+
+import (
+ "bytes"
+ "fmt"
+ "io"
+ "log"
+ "os"
+ "sync"
+)
+
+type TextOutput interface {
+ Output(LogLevel, []byte)
+}
+
+// WriterOutput is an io.Writer wrapper that matches the TextOutput interface
+type WriterOutput struct {
+ w io.Writer
+}
+
+// NewWriterOutput returns a TextOutput that writes messages to an io.Writer
+func NewWriterOutput(w io.Writer) *WriterOutput {
+ return &WriterOutput{w: w}
+}
+
+func (o *WriterOutput) Output(_ LogLevel, message []byte) {
+ o.w.Write(append(bytes.TrimRight(message, "\r\n"), platformNewline...))
+}
+
+// StdlibOutput is a TextOutput that simply writes to the default Go stdlib
+// logging system. It is the default. If you configure the Go stdlib to write
+// to spacelog, make sure to provide a new TextOutput to your logging
+// collection
+type StdlibOutput struct{}
+
+func (*StdlibOutput) Output(_ LogLevel, message []byte) {
+ log.Print(string(message))
+}
+
+type bufferMsg struct {
+ level LogLevel
+ message []byte
+}
+
+// BufferedOutput uses a channel to synchronize writes to a wrapped TextOutput
+// and allows for buffering a limited amount of log events.
+type BufferedOutput struct {
+ o TextOutput
+ c chan bufferMsg
+ running sync.Mutex
+ close_once sync.Once
+}
+
+// NewBufferedOutput returns a BufferedOutput wrapping output with a buffer
+// size of buffer.
+func NewBufferedOutput(output TextOutput, buffer int) *BufferedOutput {
+ if buffer < 0 {
+ buffer = 0
+ }
+ b := &BufferedOutput{
+ o: output,
+ c: make(chan bufferMsg, buffer)}
+ go b.process()
+ return b
+}
+
+// Close shuts down the BufferedOutput's processing
+func (b *BufferedOutput) Close() {
+ b.close_once.Do(func() {
+ close(b.c)
+ })
+ b.running.Lock()
+ b.running.Unlock()
+}
+
+func (b *BufferedOutput) Output(level LogLevel, message []byte) {
+ b.c <- bufferMsg{level: level, message: message}
+}
+
+func (b *BufferedOutput) process() {
+ b.running.Lock()
+ defer b.running.Unlock()
+ for {
+ msg, open := <-b.c
+ if !open {
+ break
+ }
+ b.o.Output(msg.level, msg.message)
+ }
+}
+
+// A TextOutput object that also implements HupHandlingTextOutput may have its
+// OnHup() method called when an administrative signal is sent to this process.
+type HupHandlingTextOutput interface {
+ TextOutput
+ OnHup()
+}
+
+// FileWriterOutput is like WriterOutput with a plain file handle, but it
+// knows how to reopen the file (or try to reopen it) if it hasn't been able
+// to open the file previously, or if an appropriate signal has been received.
+type FileWriterOutput struct {
+ *WriterOutput
+ path string
+}
+
+// Creates a new FileWriterOutput object. This is the only case where an
+// error opening the file will be reported to the caller; if we try to
+// reopen it later and the reopen fails, we'll just keep trying until it
+// works.
+func NewFileWriterOutput(path string) (*FileWriterOutput, error) {
+ fo := &FileWriterOutput{path: path}
+ fh, err := fo.openFile()
+ if err != nil {
+ return nil, err
+ }
+ fo.WriterOutput = NewWriterOutput(fh)
+ return fo, nil
+}
+
+// Try to open the file with the path associated with this object.
+func (fo *FileWriterOutput) openFile() (*os.File, error) {
+ return os.OpenFile(fo.path, os.O_WRONLY|os.O_CREATE|os.O_APPEND, 0644)
+}
+
+// Try to communicate a message without using our log file. In all likelihood,
+// stderr is closed or redirected to /dev/null, but at least we can try
+// writing there. In the very worst case, if an admin attaches a ptrace to
+// this process, it will be more clear what the problem is.
+func (fo *FileWriterOutput) fallbackLog(tmpl string, args ...interface{}) {
+ fmt.Fprintf(os.Stderr, tmpl, args...)
+}
+
+// Output a log line by writing it to the file. If the file has been
+// released, try to open it again. If that fails, cry for a little
+// while, then throw away the message and carry on.
+func (fo *FileWriterOutput) Output(ll LogLevel, message []byte) {
+ if fo.WriterOutput == nil {
+ fh, err := fo.openFile()
+ if err != nil {
+ fo.fallbackLog("Could not open %#v: %s", fo.path, err)
+ return
+ }
+ fo.WriterOutput = NewWriterOutput(fh)
+ }
+ fo.WriterOutput.Output(ll, message)
+}
+
+// Throw away any references/handles to the output file. This probably
+// means the admin wants to rotate the file out and have this process
+// open a new one. Close the underlying io.Writer if that is a thing
+// that it knows how to do.
+func (fo *FileWriterOutput) OnHup() {
+ if fo.WriterOutput != nil {
+ wc, ok := fo.WriterOutput.w.(io.Closer)
+ if ok {
+ err := wc.Close()
+ if err != nil {
+ fo.fallbackLog("Closing %#v failed: %s", fo.path, err)
+ }
+ }
+ fo.WriterOutput = nil
+ }
+}
diff --git a/src/mongo/gotools/vendor/src/github.com/spacemonkeygo/spacelog/output_other.go b/src/mongo/gotools/vendor/src/github.com/spacemonkeygo/spacelog/output_other.go
new file mode 100644
index 00000000000..2be240a1781
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/spacemonkeygo/spacelog/output_other.go
@@ -0,0 +1,19 @@
+// Copyright (C) 2014 Space Monkey, Inc.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+// +build !windows
+
+package spacelog
+
+var platformNewline = []byte("\n")
diff --git a/src/mongo/gotools/vendor/src/github.com/spacemonkeygo/spacelog/output_windows.go b/src/mongo/gotools/vendor/src/github.com/spacemonkeygo/spacelog/output_windows.go
new file mode 100644
index 00000000000..58b71daba69
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/spacemonkeygo/spacelog/output_windows.go
@@ -0,0 +1,17 @@
+// Copyright (C) 2014 Space Monkey, Inc.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package spacelog
+
+var platformNewline = []byte("\r\n")
diff --git a/src/mongo/gotools/vendor/src/github.com/spacemonkeygo/spacelog/setup.go b/src/mongo/gotools/vendor/src/github.com/spacemonkeygo/spacelog/setup.go
new file mode 100644
index 00000000000..26ad00572c9
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/spacemonkeygo/spacelog/setup.go
@@ -0,0 +1,183 @@
+// Copyright (C) 2014 Space Monkey, Inc.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package spacelog
+
+import (
+ "bytes"
+ "fmt"
+ "log"
+ "math"
+ "os"
+ "os/signal"
+ "regexp"
+ "strings"
+ "syscall"
+ "text/template"
+)
+
+// SetupConfig is a configuration struct meant to be used with
+// github.com/spacemonkeygo/flagfile/utils.Setup
+// but can be used independently.
+type SetupConfig struct {
+ Output string `default:"stderr" usage:"log output. can be stdout, stderr, syslog, or a path"`
+ Level string `default:"" usage:"base logger level"`
+ Filter string `default:"" usage:"sets loggers matching this regular expression to the lowest level"`
+ Format string `default:"" usage:"format string to use"`
+ Stdlevel string `default:"warn" usage:"logger level for stdlib log integration"`
+ Subproc string `default:"" usage:"process to run for stdout/stderr-captured logging. The command is first processed as a Go template that supports {{.Facility}}, {{.Level}}, and {{.Name}} fields, and then passed to sh. If set, will redirect stdout and stderr to the given process. A good default is 'setsid logger --priority {{.Facility}}.{{.Level}} --tag {{.Name}}'"`
+ Buffer int `default:"0" usage:"the number of messages to buffer. 0 for no buffer"`
+ // Facility defaults to syslog.LOG_USER (which is 8)
+ Facility int `default:"8" usage:"the syslog facility to use if syslog output is configured"`
+ HupRotate bool `default:"false" usage:"if true, sending a HUP signal will reopen log files"`
+}
+
+var (
+ stdlog = GetLoggerNamed("stdlog")
+ funcmap = template.FuncMap{"ColorizeLevel": ColorizeLevel}
+)
+
+// SetFormatMethod adds functions to the template function map, such that
+// command-line and Setup provided templates can call methods added to the map
+// via this method. The map comes prepopulated with ColorizeLevel, but can be
+// overridden. SetFormatMethod should be called (if at all) before one of
+// this package's Setup methods.
+func SetFormatMethod(name string, fn interface{}) {
+ funcmap[name] = fn
+}
+
+// MustSetup is the same as Setup, but panics instead of returning an error
+func MustSetup(procname string, config SetupConfig) {
+ err := Setup(procname, config)
+ if err != nil {
+ panic(err)
+ }
+}
+
+type subprocInfo struct {
+ Facility string
+ Level string
+ Name string
+}
+
+// Setup takes a given procname and sets spacelog up with the given
+// configuration. Setup supports:
+// * capturing stdout and stderr to a subprocess
+// * configuring the default level
+// * configuring log filters (enabling only some loggers)
+// * configuring the logging template
+// * configuring the output (a file, syslog, stdout, stderr)
+// * configuring log event buffering
+// * capturing all standard library logging with configurable log level
+// It is expected that this method will be called once at process start.
+func Setup(procname string, config SetupConfig) error {
+ if config.Subproc != "" {
+ t, err := template.New("subproc").Parse(config.Subproc)
+ if err != nil {
+ return err
+ }
+ var buf bytes.Buffer
+ err = t.Execute(&buf, &subprocInfo{
+ Facility: fmt.Sprintf("%d", config.Facility),
+ Level: fmt.Sprintf("%d", 2), // syslog.LOG_CRIT
+ Name: procname})
+ if err != nil {
+ return err
+ }
+ err = CaptureOutputToProcess("sh", "-c", string(buf.Bytes()))
+ if err != nil {
+ return err
+ }
+ }
+ if config.Level != "" {
+ level_val, err := LevelFromString(config.Level)
+ if err != nil {
+ return err
+ }
+ if level_val != DefaultLevel {
+ SetLevel(nil, level_val)
+ }
+ }
+ if config.Filter != "" {
+ re, err := regexp.Compile(config.Filter)
+ if err != nil {
+ return err
+ }
+ SetLevel(re, LogLevel(math.MinInt32))
+ }
+ var t *template.Template
+ if config.Format != "" {
+ var err error
+ t, err = template.New("user").Funcs(funcmap).Parse(config.Format)
+ if err != nil {
+ return err
+ }
+ }
+ var textout TextOutput
+ switch strings.ToLower(config.Output) {
+ case "syslog":
+ w, err := NewSyslogOutput(SyslogPriority(config.Facility), procname)
+ if err != nil {
+ return err
+ }
+ if t == nil {
+ t = SyslogTemplate
+ }
+ textout = w
+ case "stdout":
+ if t == nil {
+ t = DefaultTemplate
+ }
+ textout = NewWriterOutput(os.Stdout)
+ case "stderr", "":
+ if t == nil {
+ t = DefaultTemplate
+ }
+ textout = NewWriterOutput(os.Stderr)
+ default:
+ if t == nil {
+ t = StandardTemplate
+ }
+ var err error
+ textout, err = NewFileWriterOutput(config.Output)
+ if err != nil {
+ return err
+ }
+ }
+ if config.HupRotate {
+ if hh, ok := textout.(HupHandlingTextOutput); ok {
+ sigchan := make(chan os.Signal)
+ signal.Notify(sigchan, syscall.SIGHUP)
+ go func() {
+ for _ = range sigchan {
+ hh.OnHup()
+ }
+ }()
+ }
+ }
+ if config.Buffer > 0 {
+ textout = NewBufferedOutput(textout, config.Buffer)
+ }
+ SetHandler(nil, NewTextHandler(t, textout))
+ log.SetFlags(log.Lshortfile)
+ if config.Stdlevel == "" {
+ config.Stdlevel = "warn"
+ }
+ stdlog_level_val, err := LevelFromString(config.Stdlevel)
+ if err != nil {
+ return err
+ }
+ log.SetOutput(stdlog.WriterWithoutCaller(stdlog_level_val))
+ return nil
+}
diff --git a/src/mongo/gotools/vendor/src/github.com/spacemonkeygo/spacelog/setup/setup.go b/src/mongo/gotools/vendor/src/github.com/spacemonkeygo/spacelog/setup/setup.go
new file mode 100644
index 00000000000..22186888afd
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/spacemonkeygo/spacelog/setup/setup.go
@@ -0,0 +1,80 @@
+// Copyright (C) 2014 Space Monkey, Inc.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+/*
+Package setup provides simple helpers for configuring spacelog from flags.
+
+This package adds the following flags:
+ --log.output - can either be stdout, stderr, syslog, or a file path
+ --log.level - the base logger level
+ --log.filter - loggers that match this regular expression get set to the
+ lowest level
+ --log.format - a go text template for log lines
+ --log.stdlevel - the logger level to assume the standard library logger is
+ using
+ --log.subproc - a process to run for stdout/stderr capturing
+ --log.buffer - the number of message to buffer
+*/
+package setup
+
+import (
+ "github.com/spacemonkeygo/flagfile/utils"
+ "github.com/spacemonkeygo/spacelog"
+)
+
+var (
+ config spacelog.SetupConfig
+)
+
+func init() {
+ utils.Setup("log", &config)
+}
+
+// SetFormatMethod in this subpackage is deprecated and will be removed soon.
+// Please see spacelog.SetFormatMethod instead
+func SetFormatMethod(name string, fn interface{}) {
+ spacelog.SetFormatMethod(name, fn)
+}
+
+// MustSetup calls spacelog.MustSetup with a flag-configured config struct
+// It's pretty useless to call this method without parsing flags first, via
+// flagfile.Load()
+func MustSetup(procname string) {
+ spacelog.MustSetup(procname, config)
+}
+
+// Setup calls spacelog.Setup with a flag-configured config struct
+// It's pretty useless to call this method without parsing flags first, via
+// flagfile.Load()
+func Setup(procname string) error {
+ return spacelog.Setup(procname, config)
+}
+
+// MustSetupWithFacility is deprecated and will be removed soon. Please
+// configure facility through the facility flag option.
+func MustSetupWithFacility(procname string, facility spacelog.SyslogPriority) {
+ err := SetupWithFacility(procname, facility)
+ if err != nil {
+ panic(err)
+ }
+}
+
+// SetupWithFacility is deprecated and will be removed soon. Please
+// configure facility through the facility flag option.
+func SetupWithFacility(procname string,
+ facility spacelog.SyslogPriority) error {
+ config_copy := config
+ config_copy.Facility = int(facility)
+ return spacelog.Setup(procname, config_copy)
+}
diff --git a/src/mongo/gotools/vendor/src/github.com/spacemonkeygo/spacelog/syslog.go b/src/mongo/gotools/vendor/src/github.com/spacemonkeygo/spacelog/syslog.go
new file mode 100644
index 00000000000..0408a5a553b
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/spacemonkeygo/spacelog/syslog.go
@@ -0,0 +1,63 @@
+// Copyright (C) 2014 Space Monkey, Inc.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+// +build !windows
+
+package spacelog
+
+import (
+ "bytes"
+ "log/syslog"
+)
+
+type SyslogPriority syslog.Priority
+
+// SyslogOutput is a syslog client that matches the TextOutput interface
+type SyslogOutput struct {
+ w *syslog.Writer
+}
+
+// NewSyslogOutput returns a TextOutput object that writes to syslog using
+// the given facility and tag. The log level will be determined by the log
+// event.
+func NewSyslogOutput(facility SyslogPriority, tag string) (
+ TextOutput, error) {
+ w, err := syslog.New(syslog.Priority(facility), tag)
+ if err != nil {
+ return nil, err
+ }
+ return &SyslogOutput{w: w}, nil
+}
+
+func (o *SyslogOutput) Output(level LogLevel, message []byte) {
+ level = level.Match()
+ for _, msg := range bytes.Split(message, []byte{'\n'}) {
+ switch level {
+ case Critical:
+ o.w.Crit(string(msg))
+ case Error:
+ o.w.Err(string(msg))
+ case Warning:
+ o.w.Warning(string(msg))
+ case Notice:
+ o.w.Notice(string(msg))
+ case Info:
+ o.w.Info(string(msg))
+ case Debug:
+ fallthrough
+ default:
+ o.w.Debug(string(msg))
+ }
+ }
+}
diff --git a/src/mongo/gotools/vendor/src/github.com/spacemonkeygo/spacelog/syslog_windows.go b/src/mongo/gotools/vendor/src/github.com/spacemonkeygo/spacelog/syslog_windows.go
new file mode 100644
index 00000000000..edba3c2a56b
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/spacemonkeygo/spacelog/syslog_windows.go
@@ -0,0 +1,26 @@
+// Copyright (C) 2014 Space Monkey, Inc.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package spacelog
+
+import (
+ "fmt"
+)
+
+type SyslogPriority int
+
+func NewSyslogOutput(facility SyslogPriority, tag string) (
+ TextOutput, error) {
+ return nil, fmt.Errorf("SyslogOutput not supported on Windows")
+}
diff --git a/src/mongo/gotools/vendor/src/github.com/spacemonkeygo/spacelog/templates.go b/src/mongo/gotools/vendor/src/github.com/spacemonkeygo/spacelog/templates.go
new file mode 100644
index 00000000000..4ac0fdc0f23
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/spacemonkeygo/spacelog/templates.go
@@ -0,0 +1,69 @@
+// Copyright (C) 2014 Space Monkey, Inc.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package spacelog
+
+import (
+ "text/template"
+)
+
+// ColorizeLevel returns a TermColor byte sequence for the appropriate color
+// for the level. If you'd like to configure your own color choices, you can
+// make your own template with its own function map to your own colorize
+// function.
+func ColorizeLevel(level LogLevel) string {
+ switch level.Match() {
+ case Critical, Error:
+ return TermColors{}.Red()
+ case Warning:
+ return TermColors{}.Magenta()
+ case Notice:
+ return TermColors{}.Yellow()
+ case Info, Debug:
+ return TermColors{}.Green()
+ }
+ return ""
+}
+
+var (
+ // ColorTemplate uses the default ColorizeLevel method for color choices.
+ ColorTemplate = template.Must(template.New("color").Funcs(template.FuncMap{
+ "ColorizeLevel": ColorizeLevel}).Parse(
+ `{{.Blue}}{{.Date}} {{.Time}}{{.Reset}} ` +
+ `{{.Bold}}{{ColorizeLevel .Level}}{{.LevelJustified}}{{.Reset}} ` +
+ `{{.Underline}}{{.LoggerName}}{{.Reset}} ` +
+ `{{if .Filename}}{{.Filename}}:{{.Line}} {{end}}- ` +
+ `{{ColorizeLevel .Level}}{{.Message}}{{.Reset}}`))
+
+ // StandardTemplate is like ColorTemplate with no color.
+ StandardTemplate = template.Must(template.New("standard").Parse(
+ `{{.Date}} {{.Time}} ` +
+ `{{.Level}} {{.LoggerName}} ` +
+ `{{if .Filename}}{{.Filename}}:{{.Line}} {{end}}` +
+ `- {{.Message}}`))
+
+ // SyslogTemplate is missing the date and time as syslog adds those
+ // things.
+ SyslogTemplate = template.Must(template.New("syslog").Parse(
+ `{{.Level}} {{.LoggerName}} ` +
+ `{{if .Filename}}{{.Filename}}:{{.Line}} {{end}}` +
+ `- {{.Message}}`))
+
+ // StdlibTemplate is missing the date and time as the stdlib logger often
+ // adds those things.
+ StdlibTemplate = template.Must(template.New("stdlib").Parse(
+ `{{.Level}} {{.LoggerName}} ` +
+ `{{if .Filename}}{{.Filename}}:{{.Line}} {{end}}` +
+ `- {{.Message}}`))
+)
diff --git a/src/mongo/gotools/vendor/src/github.com/spacemonkeygo/spacelog/templates_others.go b/src/mongo/gotools/vendor/src/github.com/spacemonkeygo/spacelog/templates_others.go
new file mode 100644
index 00000000000..114e2e14312
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/spacemonkeygo/spacelog/templates_others.go
@@ -0,0 +1,22 @@
+// Copyright (C) 2014 Space Monkey, Inc.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+// +build !windows
+
+package spacelog
+
+var (
+ // DefaultTemplate is default template for stdout/stderr for the platform
+ DefaultTemplate = ColorTemplate
+)
diff --git a/src/mongo/gotools/vendor/src/github.com/spacemonkeygo/spacelog/templates_windows.go b/src/mongo/gotools/vendor/src/github.com/spacemonkeygo/spacelog/templates_windows.go
new file mode 100644
index 00000000000..512b600481e
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/spacemonkeygo/spacelog/templates_windows.go
@@ -0,0 +1,20 @@
+// Copyright (C) 2014 Space Monkey, Inc.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package spacelog
+
+var (
+ // DefaultTemplate is default template for stdout/stderr for the platform
+ DefaultTemplate = StandardTemplate
+)
diff --git a/src/mongo/gotools/vendor/src/github.com/spacemonkeygo/spacelog/text.go b/src/mongo/gotools/vendor/src/github.com/spacemonkeygo/spacelog/text.go
new file mode 100644
index 00000000000..8b36ce99f50
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/github.com/spacemonkeygo/spacelog/text.go
@@ -0,0 +1,80 @@
+// Copyright (C) 2014 Space Monkey, Inc.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package spacelog
+
+import (
+ "bytes"
+ "fmt"
+ "runtime"
+ "strings"
+ "sync"
+ "text/template"
+ "time"
+)
+
+// TextHandler is the default implementation of the Handler interface. A
+// TextHandler, on log events, makes LogEvent structures, passes them to the
+// configured template, and then passes that output to a configured TextOutput
+// interface.
+type TextHandler struct {
+ mtx sync.RWMutex
+ template *template.Template
+ output TextOutput
+}
+
+// NewTextHandler creates a Handler that creates LogEvents, passes them to
+// the given template, and passes the result to output
+func NewTextHandler(t *template.Template, output TextOutput) *TextHandler {
+ return &TextHandler{template: t, output: output}
+}
+
+// Log makes a LogEvent, formats it with the configured template, then passes
+// the output to configured output sink
+func (h *TextHandler) Log(logger_name string, level LogLevel, msg string,
+ calldepth int) {
+ h.mtx.RLock()
+ output, template := h.output, h.template
+ h.mtx.RUnlock()
+ event := LogEvent{
+ LoggerName: logger_name,
+ Level: level,
+ Message: strings.TrimRight(msg, "\n\r"),
+ Timestamp: time.Now()}
+ if calldepth >= 0 {
+ _, event.Filepath, event.Line, _ = runtime.Caller(calldepth + 1)
+ }
+ var buf bytes.Buffer
+ err := template.Execute(&buf, &event)
+ if err != nil {
+ output.Output(level, []byte(
+ fmt.Sprintf("log format template failed: %s", err)))
+ return
+ }
+ output.Output(level, buf.Bytes())
+}
+
+// SetTextTemplate changes the TextHandler's text formatting template
+func (h *TextHandler) SetTextTemplate(t *template.Template) {
+ h.mtx.Lock()
+ defer h.mtx.Unlock()
+ h.template = t
+}
+
+// SetTextOutput changes the TextHandler's TextOutput sink
+func (h *TextHandler) SetTextOutput(output TextOutput) {
+ h.mtx.Lock()
+ defer h.mtx.Unlock()
+ h.output = output
+}
diff --git a/src/mongo/gotools/vendor/src/golang.org/x/crypto/.gitattributes b/src/mongo/gotools/vendor/src/golang.org/x/crypto/.gitattributes
new file mode 100644
index 00000000000..d2f212e5da8
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/golang.org/x/crypto/.gitattributes
@@ -0,0 +1,10 @@
+# Treat all files in this repo as binary, with no git magic updating
+# line endings. Windows users contributing to Go will need to use a
+# modern version of git and editors capable of LF line endings.
+#
+# We'll prevent accidental CRLF line endings from entering the repo
+# via the git-review gofmt checks.
+#
+# See golang.org/issue/9281
+
+* -text
diff --git a/src/mongo/gotools/vendor/src/golang.org/x/crypto/.gitignore b/src/mongo/gotools/vendor/src/golang.org/x/crypto/.gitignore
new file mode 100644
index 00000000000..8339fd61d3f
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/golang.org/x/crypto/.gitignore
@@ -0,0 +1,2 @@
+# Add no patterns to .hgignore except for files generated by the build.
+last-change
diff --git a/src/mongo/gotools/vendor/src/golang.org/x/crypto/AUTHORS b/src/mongo/gotools/vendor/src/golang.org/x/crypto/AUTHORS
new file mode 100644
index 00000000000..15167cd746c
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/golang.org/x/crypto/AUTHORS
@@ -0,0 +1,3 @@
+# This source code refers to The Go Authors for copyright purposes.
+# The master list of authors is in the main Go distribution,
+# visible at http://tip.golang.org/AUTHORS.
diff --git a/src/mongo/gotools/vendor/src/golang.org/x/crypto/CONTRIBUTING.md b/src/mongo/gotools/vendor/src/golang.org/x/crypto/CONTRIBUTING.md
new file mode 100644
index 00000000000..88dff59bc7d
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/golang.org/x/crypto/CONTRIBUTING.md
@@ -0,0 +1,31 @@
+# Contributing to Go
+
+Go is an open source project.
+
+It is the work of hundreds of contributors. We appreciate your help!
+
+
+## Filing issues
+
+When [filing an issue](https://golang.org/issue/new), make sure to answer these five questions:
+
+1. What version of Go are you using (`go version`)?
+2. What operating system and processor architecture are you using?
+3. What did you do?
+4. What did you expect to see?
+5. What did you see instead?
+
+General questions should go to the [golang-nuts mailing list](https://groups.google.com/group/golang-nuts) instead of the issue tracker.
+The gophers there will answer or ask you to file an issue if you've tripped over a bug.
+
+## Contributing code
+
+Please read the [Contribution Guidelines](https://golang.org/doc/contribute.html)
+before sending patches.
+
+**We do not accept GitHub pull requests**
+(we use [Gerrit](https://code.google.com/p/gerrit/) instead for code review).
+
+Unless otherwise noted, the Go source files are distributed under
+the BSD-style license found in the LICENSE file.
+
diff --git a/src/mongo/gotools/vendor/src/golang.org/x/crypto/CONTRIBUTORS b/src/mongo/gotools/vendor/src/golang.org/x/crypto/CONTRIBUTORS
new file mode 100644
index 00000000000..1c4577e9680
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/golang.org/x/crypto/CONTRIBUTORS
@@ -0,0 +1,3 @@
+# This source code was written by the Go contributors.
+# The master list of contributors is in the main Go distribution,
+# visible at http://tip.golang.org/CONTRIBUTORS.
diff --git a/src/mongo/gotools/vendor/src/golang.org/x/crypto/LICENSE b/src/mongo/gotools/vendor/src/golang.org/x/crypto/LICENSE
new file mode 100644
index 00000000000..6a66aea5eaf
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/golang.org/x/crypto/LICENSE
@@ -0,0 +1,27 @@
+Copyright (c) 2009 The Go Authors. All rights reserved.
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions are
+met:
+
+ * Redistributions of source code must retain the above copyright
+notice, this list of conditions and the following disclaimer.
+ * Redistributions in binary form must reproduce the above
+copyright notice, this list of conditions and the following disclaimer
+in the documentation and/or other materials provided with the
+distribution.
+ * Neither the name of Google Inc. nor the names of its
+contributors may be used to endorse or promote products derived from
+this software without specific prior written permission.
+
+THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
diff --git a/src/mongo/gotools/vendor/src/golang.org/x/crypto/PATENTS b/src/mongo/gotools/vendor/src/golang.org/x/crypto/PATENTS
new file mode 100644
index 00000000000..733099041f8
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/golang.org/x/crypto/PATENTS
@@ -0,0 +1,22 @@
+Additional IP Rights Grant (Patents)
+
+"This implementation" means the copyrightable works distributed by
+Google as part of the Go project.
+
+Google hereby grants to You a perpetual, worldwide, non-exclusive,
+no-charge, royalty-free, irrevocable (except as stated in this section)
+patent license to make, have made, use, offer to sell, sell, import,
+transfer and otherwise run, modify and propagate the contents of this
+implementation of Go, where such license applies only to those patent
+claims, both currently owned or controlled by Google and acquired in
+the future, licensable by Google that are necessarily infringed by this
+implementation of Go. This grant does not include claims that would be
+infringed only as a consequence of further modification of this
+implementation. If you or your agent or exclusive licensee institute or
+order or agree to the institution of patent litigation against any
+entity (including a cross-claim or counterclaim in a lawsuit) alleging
+that this implementation of Go or any code incorporated within this
+implementation of Go constitutes direct or contributory patent
+infringement, or inducement of patent infringement, then any patent
+rights granted to you under this License for this implementation of Go
+shall terminate as of the date such litigation is filed.
diff --git a/src/mongo/gotools/vendor/src/golang.org/x/crypto/README b/src/mongo/gotools/vendor/src/golang.org/x/crypto/README
new file mode 100644
index 00000000000..f1e0cbf94e0
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/golang.org/x/crypto/README
@@ -0,0 +1,3 @@
+This repository holds supplementary Go cryptography libraries.
+
+To submit changes to this repository, see http://golang.org/doc/contribute.html.
diff --git a/src/mongo/gotools/vendor/src/golang.org/x/crypto/bcrypt/base64.go b/src/mongo/gotools/vendor/src/golang.org/x/crypto/bcrypt/base64.go
new file mode 100644
index 00000000000..fc311609081
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/golang.org/x/crypto/bcrypt/base64.go
@@ -0,0 +1,35 @@
+// Copyright 2011 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package bcrypt
+
+import "encoding/base64"
+
+const alphabet = "./ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789"
+
+var bcEncoding = base64.NewEncoding(alphabet)
+
+func base64Encode(src []byte) []byte {
+ n := bcEncoding.EncodedLen(len(src))
+ dst := make([]byte, n)
+ bcEncoding.Encode(dst, src)
+ for dst[n-1] == '=' {
+ n--
+ }
+ return dst[:n]
+}
+
+func base64Decode(src []byte) ([]byte, error) {
+ numOfEquals := 4 - (len(src) % 4)
+ for i := 0; i < numOfEquals; i++ {
+ src = append(src, '=')
+ }
+
+ dst := make([]byte, bcEncoding.DecodedLen(len(src)))
+ n, err := bcEncoding.Decode(dst, src)
+ if err != nil {
+ return nil, err
+ }
+ return dst[:n], nil
+}
diff --git a/src/mongo/gotools/vendor/src/golang.org/x/crypto/bcrypt/bcrypt.go b/src/mongo/gotools/vendor/src/golang.org/x/crypto/bcrypt/bcrypt.go
new file mode 100644
index 00000000000..f8b807f9c3a
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/golang.org/x/crypto/bcrypt/bcrypt.go
@@ -0,0 +1,294 @@
+// Copyright 2011 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// Package bcrypt implements Provos and Mazières's bcrypt adaptive hashing
+// algorithm. See http://www.usenix.org/event/usenix99/provos/provos.pdf
+package bcrypt // import "golang.org/x/crypto/bcrypt"
+
+// The code is a port of Provos and Mazières's C implementation.
+import (
+ "crypto/rand"
+ "crypto/subtle"
+ "errors"
+ "fmt"
+ "golang.org/x/crypto/blowfish"
+ "io"
+ "strconv"
+)
+
+const (
+ MinCost int = 4 // the minimum allowable cost as passed in to GenerateFromPassword
+ MaxCost int = 31 // the maximum allowable cost as passed in to GenerateFromPassword
+ DefaultCost int = 10 // the cost that will actually be set if a cost below MinCost is passed into GenerateFromPassword
+)
+
+// The error returned from CompareHashAndPassword when a password and hash do
+// not match.
+var ErrMismatchedHashAndPassword = errors.New("crypto/bcrypt: hashedPassword is not the hash of the given password")
+
+// The error returned from CompareHashAndPassword when a hash is too short to
+// be a bcrypt hash.
+var ErrHashTooShort = errors.New("crypto/bcrypt: hashedSecret too short to be a bcrypted password")
+
+// The error returned from CompareHashAndPassword when a hash was created with
+// a bcrypt algorithm newer than this implementation.
+type HashVersionTooNewError byte
+
+func (hv HashVersionTooNewError) Error() string {
+ return fmt.Sprintf("crypto/bcrypt: bcrypt algorithm version '%c' requested is newer than current version '%c'", byte(hv), majorVersion)
+}
+
+// The error returned from CompareHashAndPassword when a hash starts with something other than '$'
+type InvalidHashPrefixError byte
+
+func (ih InvalidHashPrefixError) Error() string {
+ return fmt.Sprintf("crypto/bcrypt: bcrypt hashes must start with '$', but hashedSecret started with '%c'", byte(ih))
+}
+
+type InvalidCostError int
+
+func (ic InvalidCostError) Error() string {
+ return fmt.Sprintf("crypto/bcrypt: cost %d is outside allowed range (%d,%d)", int(ic), int(MinCost), int(MaxCost))
+}
+
+const (
+ majorVersion = '2'
+ minorVersion = 'a'
+ maxSaltSize = 16
+ maxCryptedHashSize = 23
+ encodedSaltSize = 22
+ encodedHashSize = 31
+ minHashSize = 59
+)
+
+// magicCipherData is an IV for the 64 Blowfish encryption calls in
+// bcrypt(). It's the string "OrpheanBeholderScryDoubt" in big-endian bytes.
+var magicCipherData = []byte{
+ 0x4f, 0x72, 0x70, 0x68,
+ 0x65, 0x61, 0x6e, 0x42,
+ 0x65, 0x68, 0x6f, 0x6c,
+ 0x64, 0x65, 0x72, 0x53,
+ 0x63, 0x72, 0x79, 0x44,
+ 0x6f, 0x75, 0x62, 0x74,
+}
+
+type hashed struct {
+ hash []byte
+ salt []byte
+ cost int // allowed range is MinCost to MaxCost
+ major byte
+ minor byte
+}
+
+// GenerateFromPassword returns the bcrypt hash of the password at the given
+// cost. If the cost given is less than MinCost, the cost will be set to
+// DefaultCost, instead. Use CompareHashAndPassword, as defined in this package,
+// to compare the returned hashed password with its cleartext version.
+func GenerateFromPassword(password []byte, cost int) ([]byte, error) {
+ p, err := newFromPassword(password, cost)
+ if err != nil {
+ return nil, err
+ }
+ return p.Hash(), nil
+}
+
+// CompareHashAndPassword compares a bcrypt hashed password with its possible
+// plaintext equivalent. Returns nil on success, or an error on failure.
+func CompareHashAndPassword(hashedPassword, password []byte) error {
+ p, err := newFromHash(hashedPassword)
+ if err != nil {
+ return err
+ }
+
+ otherHash, err := bcrypt(password, p.cost, p.salt)
+ if err != nil {
+ return err
+ }
+
+ otherP := &hashed{otherHash, p.salt, p.cost, p.major, p.minor}
+ if subtle.ConstantTimeCompare(p.Hash(), otherP.Hash()) == 1 {
+ return nil
+ }
+
+ return ErrMismatchedHashAndPassword
+}
+
+// Cost returns the hashing cost used to create the given hashed
+// password. When, in the future, the hashing cost of a password system needs
+// to be increased in order to adjust for greater computational power, this
+// function allows one to establish which passwords need to be updated.
+func Cost(hashedPassword []byte) (int, error) {
+ p, err := newFromHash(hashedPassword)
+ if err != nil {
+ return 0, err
+ }
+ return p.cost, nil
+}
+
+func newFromPassword(password []byte, cost int) (*hashed, error) {
+ if cost < MinCost {
+ cost = DefaultCost
+ }
+ p := new(hashed)
+ p.major = majorVersion
+ p.minor = minorVersion
+
+ err := checkCost(cost)
+ if err != nil {
+ return nil, err
+ }
+ p.cost = cost
+
+ unencodedSalt := make([]byte, maxSaltSize)
+ _, err = io.ReadFull(rand.Reader, unencodedSalt)
+ if err != nil {
+ return nil, err
+ }
+
+ p.salt = base64Encode(unencodedSalt)
+ hash, err := bcrypt(password, p.cost, p.salt)
+ if err != nil {
+ return nil, err
+ }
+ p.hash = hash
+ return p, err
+}
+
+func newFromHash(hashedSecret []byte) (*hashed, error) {
+ if len(hashedSecret) < minHashSize {
+ return nil, ErrHashTooShort
+ }
+ p := new(hashed)
+ n, err := p.decodeVersion(hashedSecret)
+ if err != nil {
+ return nil, err
+ }
+ hashedSecret = hashedSecret[n:]
+ n, err = p.decodeCost(hashedSecret)
+ if err != nil {
+ return nil, err
+ }
+ hashedSecret = hashedSecret[n:]
+
+ // The "+2" is here because we'll have to append at most 2 '=' to the salt
+ // when base64 decoding it in expensiveBlowfishSetup().
+ p.salt = make([]byte, encodedSaltSize, encodedSaltSize+2)
+ copy(p.salt, hashedSecret[:encodedSaltSize])
+
+ hashedSecret = hashedSecret[encodedSaltSize:]
+ p.hash = make([]byte, len(hashedSecret))
+ copy(p.hash, hashedSecret)
+
+ return p, nil
+}
+
+func bcrypt(password []byte, cost int, salt []byte) ([]byte, error) {
+ cipherData := make([]byte, len(magicCipherData))
+ copy(cipherData, magicCipherData)
+
+ c, err := expensiveBlowfishSetup(password, uint32(cost), salt)
+ if err != nil {
+ return nil, err
+ }
+
+ for i := 0; i < 24; i += 8 {
+ for j := 0; j < 64; j++ {
+ c.Encrypt(cipherData[i:i+8], cipherData[i:i+8])
+ }
+ }
+
+ // Bug compatibility with C bcrypt implementations. We only encode 23 of
+ // the 24 bytes encrypted.
+ hsh := base64Encode(cipherData[:maxCryptedHashSize])
+ return hsh, nil
+}
+
+func expensiveBlowfishSetup(key []byte, cost uint32, salt []byte) (*blowfish.Cipher, error) {
+
+ csalt, err := base64Decode(salt)
+ if err != nil {
+ return nil, err
+ }
+
+ // Bug compatibility with C bcrypt implementations. They use the trailing
+ // NULL in the key string during expansion.
+ ckey := append(key, 0)
+
+ c, err := blowfish.NewSaltedCipher(ckey, csalt)
+ if err != nil {
+ return nil, err
+ }
+
+ var i, rounds uint64
+ rounds = 1 << cost
+ for i = 0; i < rounds; i++ {
+ blowfish.ExpandKey(ckey, c)
+ blowfish.ExpandKey(csalt, c)
+ }
+
+ return c, nil
+}
+
+func (p *hashed) Hash() []byte {
+ arr := make([]byte, 60)
+ arr[0] = '$'
+ arr[1] = p.major
+ n := 2
+ if p.minor != 0 {
+ arr[2] = p.minor
+ n = 3
+ }
+ arr[n] = '$'
+ n += 1
+ copy(arr[n:], []byte(fmt.Sprintf("%02d", p.cost)))
+ n += 2
+ arr[n] = '$'
+ n += 1
+ copy(arr[n:], p.salt)
+ n += encodedSaltSize
+ copy(arr[n:], p.hash)
+ n += encodedHashSize
+ return arr[:n]
+}
+
+func (p *hashed) decodeVersion(sbytes []byte) (int, error) {
+ if sbytes[0] != '$' {
+ return -1, InvalidHashPrefixError(sbytes[0])
+ }
+ if sbytes[1] > majorVersion {
+ return -1, HashVersionTooNewError(sbytes[1])
+ }
+ p.major = sbytes[1]
+ n := 3
+ if sbytes[2] != '$' {
+ p.minor = sbytes[2]
+ n++
+ }
+ return n, nil
+}
+
+// sbytes should begin where decodeVersion left off.
+func (p *hashed) decodeCost(sbytes []byte) (int, error) {
+ cost, err := strconv.Atoi(string(sbytes[0:2]))
+ if err != nil {
+ return -1, err
+ }
+ err = checkCost(cost)
+ if err != nil {
+ return -1, err
+ }
+ p.cost = cost
+ return 3, nil
+}
+
+func (p *hashed) String() string {
+ return fmt.Sprintf("&{hash: %#v, salt: %#v, cost: %d, major: %c, minor: %c}", string(p.hash), p.salt, p.cost, p.major, p.minor)
+}
+
+func checkCost(cost int) error {
+ if cost < MinCost || cost > MaxCost {
+ return InvalidCostError(cost)
+ }
+ return nil
+}
diff --git a/src/mongo/gotools/vendor/src/golang.org/x/crypto/bcrypt/bcrypt_test.go b/src/mongo/gotools/vendor/src/golang.org/x/crypto/bcrypt/bcrypt_test.go
new file mode 100644
index 00000000000..f08a6f5b229
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/golang.org/x/crypto/bcrypt/bcrypt_test.go
@@ -0,0 +1,226 @@
+// Copyright 2011 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package bcrypt
+
+import (
+ "bytes"
+ "fmt"
+ "testing"
+)
+
+func TestBcryptingIsEasy(t *testing.T) {
+ pass := []byte("mypassword")
+ hp, err := GenerateFromPassword(pass, 0)
+ if err != nil {
+ t.Fatalf("GenerateFromPassword error: %s", err)
+ }
+
+ if CompareHashAndPassword(hp, pass) != nil {
+ t.Errorf("%v should hash %s correctly", hp, pass)
+ }
+
+ notPass := "notthepass"
+ err = CompareHashAndPassword(hp, []byte(notPass))
+ if err != ErrMismatchedHashAndPassword {
+ t.Errorf("%v and %s should be mismatched", hp, notPass)
+ }
+}
+
+func TestBcryptingIsCorrect(t *testing.T) {
+ pass := []byte("allmine")
+ salt := []byte("XajjQvNhvvRt5GSeFk1xFe")
+ expectedHash := []byte("$2a$10$XajjQvNhvvRt5GSeFk1xFeyqRrsxkhBkUiQeg0dt.wU1qD4aFDcga")
+
+ hash, err := bcrypt(pass, 10, salt)
+ if err != nil {
+ t.Fatalf("bcrypt blew up: %v", err)
+ }
+ if !bytes.HasSuffix(expectedHash, hash) {
+ t.Errorf("%v should be the suffix of %v", hash, expectedHash)
+ }
+
+ h, err := newFromHash(expectedHash)
+ if err != nil {
+ t.Errorf("Unable to parse %s: %v", string(expectedHash), err)
+ }
+
+ // This is not the safe way to compare these hashes. We do this only for
+ // testing clarity. Use bcrypt.CompareHashAndPassword()
+ if err == nil && !bytes.Equal(expectedHash, h.Hash()) {
+ t.Errorf("Parsed hash %v should equal %v", h.Hash(), expectedHash)
+ }
+}
+
+func TestVeryShortPasswords(t *testing.T) {
+ key := []byte("k")
+ salt := []byte("XajjQvNhvvRt5GSeFk1xFe")
+ _, err := bcrypt(key, 10, salt)
+ if err != nil {
+ t.Errorf("One byte key resulted in error: %s", err)
+ }
+}
+
+func TestTooLongPasswordsWork(t *testing.T) {
+ salt := []byte("XajjQvNhvvRt5GSeFk1xFe")
+ // One byte over the usual 56 byte limit that blowfish has
+ tooLongPass := []byte("012345678901234567890123456789012345678901234567890123456")
+ tooLongExpected := []byte("$2a$10$XajjQvNhvvRt5GSeFk1xFe5l47dONXg781AmZtd869sO8zfsHuw7C")
+ hash, err := bcrypt(tooLongPass, 10, salt)
+ if err != nil {
+ t.Fatalf("bcrypt blew up on long password: %v", err)
+ }
+ if !bytes.HasSuffix(tooLongExpected, hash) {
+ t.Errorf("%v should be the suffix of %v", hash, tooLongExpected)
+ }
+}
+
+type InvalidHashTest struct {
+ err error
+ hash []byte
+}
+
+var invalidTests = []InvalidHashTest{
+ {ErrHashTooShort, []byte("$2a$10$fooo")},
+ {ErrHashTooShort, []byte("$2a")},
+ {HashVersionTooNewError('3'), []byte("$3a$10$sssssssssssssssssssssshhhhhhhhhhhhhhhhhhhhhhhhhhhhhhh")},
+ {InvalidHashPrefixError('%'), []byte("%2a$10$sssssssssssssssssssssshhhhhhhhhhhhhhhhhhhhhhhhhhhhhhh")},
+ {InvalidCostError(32), []byte("$2a$32$sssssssssssssssssssssshhhhhhhhhhhhhhhhhhhhhhhhhhhhhhh")},
+}
+
+func TestInvalidHashErrors(t *testing.T) {
+ check := func(name string, expected, err error) {
+ if err == nil {
+ t.Errorf("%s: Should have returned an error", name)
+ }
+ if err != nil && err != expected {
+ t.Errorf("%s gave err %v but should have given %v", name, err, expected)
+ }
+ }
+ for _, iht := range invalidTests {
+ _, err := newFromHash(iht.hash)
+ check("newFromHash", iht.err, err)
+ err = CompareHashAndPassword(iht.hash, []byte("anything"))
+ check("CompareHashAndPassword", iht.err, err)
+ }
+}
+
+func TestUnpaddedBase64Encoding(t *testing.T) {
+ original := []byte{101, 201, 101, 75, 19, 227, 199, 20, 239, 236, 133, 32, 30, 109, 243, 30}
+ encodedOriginal := []byte("XajjQvNhvvRt5GSeFk1xFe")
+
+ encoded := base64Encode(original)
+
+ if !bytes.Equal(encodedOriginal, encoded) {
+ t.Errorf("Encoded %v should have equaled %v", encoded, encodedOriginal)
+ }
+
+ decoded, err := base64Decode(encodedOriginal)
+ if err != nil {
+ t.Fatalf("base64Decode blew up: %s", err)
+ }
+
+ if !bytes.Equal(decoded, original) {
+ t.Errorf("Decoded %v should have equaled %v", decoded, original)
+ }
+}
+
+func TestCost(t *testing.T) {
+ suffix := "XajjQvNhvvRt5GSeFk1xFe5l47dONXg781AmZtd869sO8zfsHuw7C"
+ for _, vers := range []string{"2a", "2"} {
+ for _, cost := range []int{4, 10} {
+ s := fmt.Sprintf("$%s$%02d$%s", vers, cost, suffix)
+ h := []byte(s)
+ actual, err := Cost(h)
+ if err != nil {
+ t.Errorf("Cost, error: %s", err)
+ continue
+ }
+ if actual != cost {
+ t.Errorf("Cost, expected: %d, actual: %d", cost, actual)
+ }
+ }
+ }
+ _, err := Cost([]byte("$a$a$" + suffix))
+ if err == nil {
+ t.Errorf("Cost, malformed but no error returned")
+ }
+}
+
+func TestCostValidationInHash(t *testing.T) {
+ if testing.Short() {
+ return
+ }
+
+ pass := []byte("mypassword")
+
+ for c := 0; c < MinCost; c++ {
+ p, _ := newFromPassword(pass, c)
+ if p.cost != DefaultCost {
+ t.Errorf("newFromPassword should default costs below %d to %d, but was %d", MinCost, DefaultCost, p.cost)
+ }
+ }
+
+ p, _ := newFromPassword(pass, 14)
+ if p.cost != 14 {
+ t.Errorf("newFromPassword should default cost to 14, but was %d", p.cost)
+ }
+
+ hp, _ := newFromHash(p.Hash())
+ if p.cost != hp.cost {
+ t.Errorf("newFromHash should maintain the cost at %d, but was %d", p.cost, hp.cost)
+ }
+
+ _, err := newFromPassword(pass, 32)
+ if err == nil {
+ t.Fatalf("newFromPassword: should return a cost error")
+ }
+ if err != InvalidCostError(32) {
+ t.Errorf("newFromPassword: should return cost error, got %#v", err)
+ }
+}
+
+func TestCostReturnsWithLeadingZeroes(t *testing.T) {
+ hp, _ := newFromPassword([]byte("abcdefgh"), 7)
+ cost := hp.Hash()[4:7]
+ expected := []byte("07$")
+
+ if !bytes.Equal(expected, cost) {
+ t.Errorf("single digit costs in hash should have leading zeros: was %v instead of %v", cost, expected)
+ }
+}
+
+func TestMinorNotRequired(t *testing.T) {
+ noMinorHash := []byte("$2$10$XajjQvNhvvRt5GSeFk1xFeyqRrsxkhBkUiQeg0dt.wU1qD4aFDcga")
+ h, err := newFromHash(noMinorHash)
+ if err != nil {
+ t.Fatalf("No minor hash blew up: %s", err)
+ }
+ if h.minor != 0 {
+ t.Errorf("Should leave minor version at 0, but was %d", h.minor)
+ }
+
+ if !bytes.Equal(noMinorHash, h.Hash()) {
+ t.Errorf("Should generate hash %v, but created %v", noMinorHash, h.Hash())
+ }
+}
+
+func BenchmarkEqual(b *testing.B) {
+ b.StopTimer()
+ passwd := []byte("somepasswordyoulike")
+ hash, _ := GenerateFromPassword(passwd, 10)
+ b.StartTimer()
+ for i := 0; i < b.N; i++ {
+ CompareHashAndPassword(hash, passwd)
+ }
+}
+
+func BenchmarkGeneration(b *testing.B) {
+ b.StopTimer()
+ passwd := []byte("mylongpassword1234")
+ b.StartTimer()
+ for i := 0; i < b.N; i++ {
+ GenerateFromPassword(passwd, 10)
+ }
+}
diff --git a/src/mongo/gotools/vendor/src/golang.org/x/crypto/blowfish/block.go b/src/mongo/gotools/vendor/src/golang.org/x/crypto/blowfish/block.go
new file mode 100644
index 00000000000..9d80f19521b
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/golang.org/x/crypto/blowfish/block.go
@@ -0,0 +1,159 @@
+// Copyright 2010 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package blowfish
+
+// getNextWord returns the next big-endian uint32 value from the byte slice
+// at the given position in a circular manner, updating the position.
+func getNextWord(b []byte, pos *int) uint32 {
+ var w uint32
+ j := *pos
+ for i := 0; i < 4; i++ {
+ w = w<<8 | uint32(b[j])
+ j++
+ if j >= len(b) {
+ j = 0
+ }
+ }
+ *pos = j
+ return w
+}
+
+// ExpandKey performs a key expansion on the given *Cipher. Specifically, it
+// performs the Blowfish algorithm's key schedule which sets up the *Cipher's
+// pi and substitution tables for calls to Encrypt. This is used, primarily,
+// by the bcrypt package to reuse the Blowfish key schedule during its
+// set up. It's unlikely that you need to use this directly.
+func ExpandKey(key []byte, c *Cipher) {
+ j := 0
+ for i := 0; i < 18; i++ {
+ // Using inlined getNextWord for performance.
+ var d uint32
+ for k := 0; k < 4; k++ {
+ d = d<<8 | uint32(key[j])
+ j++
+ if j >= len(key) {
+ j = 0
+ }
+ }
+ c.p[i] ^= d
+ }
+
+ var l, r uint32
+ for i := 0; i < 18; i += 2 {
+ l, r = encryptBlock(l, r, c)
+ c.p[i], c.p[i+1] = l, r
+ }
+
+ for i := 0; i < 256; i += 2 {
+ l, r = encryptBlock(l, r, c)
+ c.s0[i], c.s0[i+1] = l, r
+ }
+ for i := 0; i < 256; i += 2 {
+ l, r = encryptBlock(l, r, c)
+ c.s1[i], c.s1[i+1] = l, r
+ }
+ for i := 0; i < 256; i += 2 {
+ l, r = encryptBlock(l, r, c)
+ c.s2[i], c.s2[i+1] = l, r
+ }
+ for i := 0; i < 256; i += 2 {
+ l, r = encryptBlock(l, r, c)
+ c.s3[i], c.s3[i+1] = l, r
+ }
+}
+
+// This is similar to ExpandKey, but folds the salt during the key
+// schedule. While ExpandKey is essentially expandKeyWithSalt with an all-zero
+// salt passed in, reusing ExpandKey turns out to be a place of inefficiency
+// and specializing it here is useful.
+func expandKeyWithSalt(key []byte, salt []byte, c *Cipher) {
+ j := 0
+ for i := 0; i < 18; i++ {
+ c.p[i] ^= getNextWord(key, &j)
+ }
+
+ j = 0
+ var l, r uint32
+ for i := 0; i < 18; i += 2 {
+ l ^= getNextWord(salt, &j)
+ r ^= getNextWord(salt, &j)
+ l, r = encryptBlock(l, r, c)
+ c.p[i], c.p[i+1] = l, r
+ }
+
+ for i := 0; i < 256; i += 2 {
+ l ^= getNextWord(salt, &j)
+ r ^= getNextWord(salt, &j)
+ l, r = encryptBlock(l, r, c)
+ c.s0[i], c.s0[i+1] = l, r
+ }
+
+ for i := 0; i < 256; i += 2 {
+ l ^= getNextWord(salt, &j)
+ r ^= getNextWord(salt, &j)
+ l, r = encryptBlock(l, r, c)
+ c.s1[i], c.s1[i+1] = l, r
+ }
+
+ for i := 0; i < 256; i += 2 {
+ l ^= getNextWord(salt, &j)
+ r ^= getNextWord(salt, &j)
+ l, r = encryptBlock(l, r, c)
+ c.s2[i], c.s2[i+1] = l, r
+ }
+
+ for i := 0; i < 256; i += 2 {
+ l ^= getNextWord(salt, &j)
+ r ^= getNextWord(salt, &j)
+ l, r = encryptBlock(l, r, c)
+ c.s3[i], c.s3[i+1] = l, r
+ }
+}
+
+func encryptBlock(l, r uint32, c *Cipher) (uint32, uint32) {
+ xl, xr := l, r
+ xl ^= c.p[0]
+ xr ^= ((c.s0[byte(xl>>24)] + c.s1[byte(xl>>16)]) ^ c.s2[byte(xl>>8)]) + c.s3[byte(xl)] ^ c.p[1]
+ xl ^= ((c.s0[byte(xr>>24)] + c.s1[byte(xr>>16)]) ^ c.s2[byte(xr>>8)]) + c.s3[byte(xr)] ^ c.p[2]
+ xr ^= ((c.s0[byte(xl>>24)] + c.s1[byte(xl>>16)]) ^ c.s2[byte(xl>>8)]) + c.s3[byte(xl)] ^ c.p[3]
+ xl ^= ((c.s0[byte(xr>>24)] + c.s1[byte(xr>>16)]) ^ c.s2[byte(xr>>8)]) + c.s3[byte(xr)] ^ c.p[4]
+ xr ^= ((c.s0[byte(xl>>24)] + c.s1[byte(xl>>16)]) ^ c.s2[byte(xl>>8)]) + c.s3[byte(xl)] ^ c.p[5]
+ xl ^= ((c.s0[byte(xr>>24)] + c.s1[byte(xr>>16)]) ^ c.s2[byte(xr>>8)]) + c.s3[byte(xr)] ^ c.p[6]
+ xr ^= ((c.s0[byte(xl>>24)] + c.s1[byte(xl>>16)]) ^ c.s2[byte(xl>>8)]) + c.s3[byte(xl)] ^ c.p[7]
+ xl ^= ((c.s0[byte(xr>>24)] + c.s1[byte(xr>>16)]) ^ c.s2[byte(xr>>8)]) + c.s3[byte(xr)] ^ c.p[8]
+ xr ^= ((c.s0[byte(xl>>24)] + c.s1[byte(xl>>16)]) ^ c.s2[byte(xl>>8)]) + c.s3[byte(xl)] ^ c.p[9]
+ xl ^= ((c.s0[byte(xr>>24)] + c.s1[byte(xr>>16)]) ^ c.s2[byte(xr>>8)]) + c.s3[byte(xr)] ^ c.p[10]
+ xr ^= ((c.s0[byte(xl>>24)] + c.s1[byte(xl>>16)]) ^ c.s2[byte(xl>>8)]) + c.s3[byte(xl)] ^ c.p[11]
+ xl ^= ((c.s0[byte(xr>>24)] + c.s1[byte(xr>>16)]) ^ c.s2[byte(xr>>8)]) + c.s3[byte(xr)] ^ c.p[12]
+ xr ^= ((c.s0[byte(xl>>24)] + c.s1[byte(xl>>16)]) ^ c.s2[byte(xl>>8)]) + c.s3[byte(xl)] ^ c.p[13]
+ xl ^= ((c.s0[byte(xr>>24)] + c.s1[byte(xr>>16)]) ^ c.s2[byte(xr>>8)]) + c.s3[byte(xr)] ^ c.p[14]
+ xr ^= ((c.s0[byte(xl>>24)] + c.s1[byte(xl>>16)]) ^ c.s2[byte(xl>>8)]) + c.s3[byte(xl)] ^ c.p[15]
+ xl ^= ((c.s0[byte(xr>>24)] + c.s1[byte(xr>>16)]) ^ c.s2[byte(xr>>8)]) + c.s3[byte(xr)] ^ c.p[16]
+ xr ^= c.p[17]
+ return xr, xl
+}
+
+func decryptBlock(l, r uint32, c *Cipher) (uint32, uint32) {
+ xl, xr := l, r
+ xl ^= c.p[17]
+ xr ^= ((c.s0[byte(xl>>24)] + c.s1[byte(xl>>16)]) ^ c.s2[byte(xl>>8)]) + c.s3[byte(xl)] ^ c.p[16]
+ xl ^= ((c.s0[byte(xr>>24)] + c.s1[byte(xr>>16)]) ^ c.s2[byte(xr>>8)]) + c.s3[byte(xr)] ^ c.p[15]
+ xr ^= ((c.s0[byte(xl>>24)] + c.s1[byte(xl>>16)]) ^ c.s2[byte(xl>>8)]) + c.s3[byte(xl)] ^ c.p[14]
+ xl ^= ((c.s0[byte(xr>>24)] + c.s1[byte(xr>>16)]) ^ c.s2[byte(xr>>8)]) + c.s3[byte(xr)] ^ c.p[13]
+ xr ^= ((c.s0[byte(xl>>24)] + c.s1[byte(xl>>16)]) ^ c.s2[byte(xl>>8)]) + c.s3[byte(xl)] ^ c.p[12]
+ xl ^= ((c.s0[byte(xr>>24)] + c.s1[byte(xr>>16)]) ^ c.s2[byte(xr>>8)]) + c.s3[byte(xr)] ^ c.p[11]
+ xr ^= ((c.s0[byte(xl>>24)] + c.s1[byte(xl>>16)]) ^ c.s2[byte(xl>>8)]) + c.s3[byte(xl)] ^ c.p[10]
+ xl ^= ((c.s0[byte(xr>>24)] + c.s1[byte(xr>>16)]) ^ c.s2[byte(xr>>8)]) + c.s3[byte(xr)] ^ c.p[9]
+ xr ^= ((c.s0[byte(xl>>24)] + c.s1[byte(xl>>16)]) ^ c.s2[byte(xl>>8)]) + c.s3[byte(xl)] ^ c.p[8]
+ xl ^= ((c.s0[byte(xr>>24)] + c.s1[byte(xr>>16)]) ^ c.s2[byte(xr>>8)]) + c.s3[byte(xr)] ^ c.p[7]
+ xr ^= ((c.s0[byte(xl>>24)] + c.s1[byte(xl>>16)]) ^ c.s2[byte(xl>>8)]) + c.s3[byte(xl)] ^ c.p[6]
+ xl ^= ((c.s0[byte(xr>>24)] + c.s1[byte(xr>>16)]) ^ c.s2[byte(xr>>8)]) + c.s3[byte(xr)] ^ c.p[5]
+ xr ^= ((c.s0[byte(xl>>24)] + c.s1[byte(xl>>16)]) ^ c.s2[byte(xl>>8)]) + c.s3[byte(xl)] ^ c.p[4]
+ xl ^= ((c.s0[byte(xr>>24)] + c.s1[byte(xr>>16)]) ^ c.s2[byte(xr>>8)]) + c.s3[byte(xr)] ^ c.p[3]
+ xr ^= ((c.s0[byte(xl>>24)] + c.s1[byte(xl>>16)]) ^ c.s2[byte(xl>>8)]) + c.s3[byte(xl)] ^ c.p[2]
+ xl ^= ((c.s0[byte(xr>>24)] + c.s1[byte(xr>>16)]) ^ c.s2[byte(xr>>8)]) + c.s3[byte(xr)] ^ c.p[1]
+ xr ^= c.p[0]
+ return xr, xl
+}
diff --git a/src/mongo/gotools/vendor/src/golang.org/x/crypto/blowfish/blowfish_test.go b/src/mongo/gotools/vendor/src/golang.org/x/crypto/blowfish/blowfish_test.go
new file mode 100644
index 00000000000..7afa1fdf3d5
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/golang.org/x/crypto/blowfish/blowfish_test.go
@@ -0,0 +1,274 @@
+// Copyright 2010 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package blowfish
+
+import "testing"
+
+type CryptTest struct {
+ key []byte
+ in []byte
+ out []byte
+}
+
+// Test vector values are from http://www.schneier.com/code/vectors.txt.
+var encryptTests = []CryptTest{
+ {
+ []byte{0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00},
+ []byte{0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00},
+ []byte{0x4E, 0xF9, 0x97, 0x45, 0x61, 0x98, 0xDD, 0x78}},
+ {
+ []byte{0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF},
+ []byte{0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF},
+ []byte{0x51, 0x86, 0x6F, 0xD5, 0xB8, 0x5E, 0xCB, 0x8A}},
+ {
+ []byte{0x30, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00},
+ []byte{0x10, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01},
+ []byte{0x7D, 0x85, 0x6F, 0x9A, 0x61, 0x30, 0x63, 0xF2}},
+ {
+ []byte{0x11, 0x11, 0x11, 0x11, 0x11, 0x11, 0x11, 0x11},
+ []byte{0x11, 0x11, 0x11, 0x11, 0x11, 0x11, 0x11, 0x11},
+ []byte{0x24, 0x66, 0xDD, 0x87, 0x8B, 0x96, 0x3C, 0x9D}},
+
+ {
+ []byte{0x01, 0x23, 0x45, 0x67, 0x89, 0xAB, 0xCD, 0xEF},
+ []byte{0x11, 0x11, 0x11, 0x11, 0x11, 0x11, 0x11, 0x11},
+ []byte{0x61, 0xF9, 0xC3, 0x80, 0x22, 0x81, 0xB0, 0x96}},
+ {
+ []byte{0x11, 0x11, 0x11, 0x11, 0x11, 0x11, 0x11, 0x11},
+ []byte{0x01, 0x23, 0x45, 0x67, 0x89, 0xAB, 0xCD, 0xEF},
+ []byte{0x7D, 0x0C, 0xC6, 0x30, 0xAF, 0xDA, 0x1E, 0xC7}},
+ {
+ []byte{0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00},
+ []byte{0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00},
+ []byte{0x4E, 0xF9, 0x97, 0x45, 0x61, 0x98, 0xDD, 0x78}},
+ {
+ []byte{0xFE, 0xDC, 0xBA, 0x98, 0x76, 0x54, 0x32, 0x10},
+ []byte{0x01, 0x23, 0x45, 0x67, 0x89, 0xAB, 0xCD, 0xEF},
+ []byte{0x0A, 0xCE, 0xAB, 0x0F, 0xC6, 0xA0, 0xA2, 0x8D}},
+ {
+ []byte{0x7C, 0xA1, 0x10, 0x45, 0x4A, 0x1A, 0x6E, 0x57},
+ []byte{0x01, 0xA1, 0xD6, 0xD0, 0x39, 0x77, 0x67, 0x42},
+ []byte{0x59, 0xC6, 0x82, 0x45, 0xEB, 0x05, 0x28, 0x2B}},
+ {
+ []byte{0x01, 0x31, 0xD9, 0x61, 0x9D, 0xC1, 0x37, 0x6E},
+ []byte{0x5C, 0xD5, 0x4C, 0xA8, 0x3D, 0xEF, 0x57, 0xDA},
+ []byte{0xB1, 0xB8, 0xCC, 0x0B, 0x25, 0x0F, 0x09, 0xA0}},
+ {
+ []byte{0x07, 0xA1, 0x13, 0x3E, 0x4A, 0x0B, 0x26, 0x86},
+ []byte{0x02, 0x48, 0xD4, 0x38, 0x06, 0xF6, 0x71, 0x72},
+ []byte{0x17, 0x30, 0xE5, 0x77, 0x8B, 0xEA, 0x1D, 0xA4}},
+ {
+ []byte{0x38, 0x49, 0x67, 0x4C, 0x26, 0x02, 0x31, 0x9E},
+ []byte{0x51, 0x45, 0x4B, 0x58, 0x2D, 0xDF, 0x44, 0x0A},
+ []byte{0xA2, 0x5E, 0x78, 0x56, 0xCF, 0x26, 0x51, 0xEB}},
+ {
+ []byte{0x04, 0xB9, 0x15, 0xBA, 0x43, 0xFE, 0xB5, 0xB6},
+ []byte{0x42, 0xFD, 0x44, 0x30, 0x59, 0x57, 0x7F, 0xA2},
+ []byte{0x35, 0x38, 0x82, 0xB1, 0x09, 0xCE, 0x8F, 0x1A}},
+ {
+ []byte{0x01, 0x13, 0xB9, 0x70, 0xFD, 0x34, 0xF2, 0xCE},
+ []byte{0x05, 0x9B, 0x5E, 0x08, 0x51, 0xCF, 0x14, 0x3A},
+ []byte{0x48, 0xF4, 0xD0, 0x88, 0x4C, 0x37, 0x99, 0x18}},
+ {
+ []byte{0x01, 0x70, 0xF1, 0x75, 0x46, 0x8F, 0xB5, 0xE6},
+ []byte{0x07, 0x56, 0xD8, 0xE0, 0x77, 0x47, 0x61, 0xD2},
+ []byte{0x43, 0x21, 0x93, 0xB7, 0x89, 0x51, 0xFC, 0x98}},
+ {
+ []byte{0x43, 0x29, 0x7F, 0xAD, 0x38, 0xE3, 0x73, 0xFE},
+ []byte{0x76, 0x25, 0x14, 0xB8, 0x29, 0xBF, 0x48, 0x6A},
+ []byte{0x13, 0xF0, 0x41, 0x54, 0xD6, 0x9D, 0x1A, 0xE5}},
+ {
+ []byte{0x07, 0xA7, 0x13, 0x70, 0x45, 0xDA, 0x2A, 0x16},
+ []byte{0x3B, 0xDD, 0x11, 0x90, 0x49, 0x37, 0x28, 0x02},
+ []byte{0x2E, 0xED, 0xDA, 0x93, 0xFF, 0xD3, 0x9C, 0x79}},
+ {
+ []byte{0x04, 0x68, 0x91, 0x04, 0xC2, 0xFD, 0x3B, 0x2F},
+ []byte{0x26, 0x95, 0x5F, 0x68, 0x35, 0xAF, 0x60, 0x9A},
+ []byte{0xD8, 0x87, 0xE0, 0x39, 0x3C, 0x2D, 0xA6, 0xE3}},
+ {
+ []byte{0x37, 0xD0, 0x6B, 0xB5, 0x16, 0xCB, 0x75, 0x46},
+ []byte{0x16, 0x4D, 0x5E, 0x40, 0x4F, 0x27, 0x52, 0x32},
+ []byte{0x5F, 0x99, 0xD0, 0x4F, 0x5B, 0x16, 0x39, 0x69}},
+ {
+ []byte{0x1F, 0x08, 0x26, 0x0D, 0x1A, 0xC2, 0x46, 0x5E},
+ []byte{0x6B, 0x05, 0x6E, 0x18, 0x75, 0x9F, 0x5C, 0xCA},
+ []byte{0x4A, 0x05, 0x7A, 0x3B, 0x24, 0xD3, 0x97, 0x7B}},
+ {
+ []byte{0x58, 0x40, 0x23, 0x64, 0x1A, 0xBA, 0x61, 0x76},
+ []byte{0x00, 0x4B, 0xD6, 0xEF, 0x09, 0x17, 0x60, 0x62},
+ []byte{0x45, 0x20, 0x31, 0xC1, 0xE4, 0xFA, 0xDA, 0x8E}},
+ {
+ []byte{0x02, 0x58, 0x16, 0x16, 0x46, 0x29, 0xB0, 0x07},
+ []byte{0x48, 0x0D, 0x39, 0x00, 0x6E, 0xE7, 0x62, 0xF2},
+ []byte{0x75, 0x55, 0xAE, 0x39, 0xF5, 0x9B, 0x87, 0xBD}},
+ {
+ []byte{0x49, 0x79, 0x3E, 0xBC, 0x79, 0xB3, 0x25, 0x8F},
+ []byte{0x43, 0x75, 0x40, 0xC8, 0x69, 0x8F, 0x3C, 0xFA},
+ []byte{0x53, 0xC5, 0x5F, 0x9C, 0xB4, 0x9F, 0xC0, 0x19}},
+ {
+ []byte{0x4F, 0xB0, 0x5E, 0x15, 0x15, 0xAB, 0x73, 0xA7},
+ []byte{0x07, 0x2D, 0x43, 0xA0, 0x77, 0x07, 0x52, 0x92},
+ []byte{0x7A, 0x8E, 0x7B, 0xFA, 0x93, 0x7E, 0x89, 0xA3}},
+ {
+ []byte{0x49, 0xE9, 0x5D, 0x6D, 0x4C, 0xA2, 0x29, 0xBF},
+ []byte{0x02, 0xFE, 0x55, 0x77, 0x81, 0x17, 0xF1, 0x2A},
+ []byte{0xCF, 0x9C, 0x5D, 0x7A, 0x49, 0x86, 0xAD, 0xB5}},
+ {
+ []byte{0x01, 0x83, 0x10, 0xDC, 0x40, 0x9B, 0x26, 0xD6},
+ []byte{0x1D, 0x9D, 0x5C, 0x50, 0x18, 0xF7, 0x28, 0xC2},
+ []byte{0xD1, 0xAB, 0xB2, 0x90, 0x65, 0x8B, 0xC7, 0x78}},
+ {
+ []byte{0x1C, 0x58, 0x7F, 0x1C, 0x13, 0x92, 0x4F, 0xEF},
+ []byte{0x30, 0x55, 0x32, 0x28, 0x6D, 0x6F, 0x29, 0x5A},
+ []byte{0x55, 0xCB, 0x37, 0x74, 0xD1, 0x3E, 0xF2, 0x01}},
+ {
+ []byte{0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01},
+ []byte{0x01, 0x23, 0x45, 0x67, 0x89, 0xAB, 0xCD, 0xEF},
+ []byte{0xFA, 0x34, 0xEC, 0x48, 0x47, 0xB2, 0x68, 0xB2}},
+ {
+ []byte{0x1F, 0x1F, 0x1F, 0x1F, 0x0E, 0x0E, 0x0E, 0x0E},
+ []byte{0x01, 0x23, 0x45, 0x67, 0x89, 0xAB, 0xCD, 0xEF},
+ []byte{0xA7, 0x90, 0x79, 0x51, 0x08, 0xEA, 0x3C, 0xAE}},
+ {
+ []byte{0xE0, 0xFE, 0xE0, 0xFE, 0xF1, 0xFE, 0xF1, 0xFE},
+ []byte{0x01, 0x23, 0x45, 0x67, 0x89, 0xAB, 0xCD, 0xEF},
+ []byte{0xC3, 0x9E, 0x07, 0x2D, 0x9F, 0xAC, 0x63, 0x1D}},
+ {
+ []byte{0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00},
+ []byte{0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF},
+ []byte{0x01, 0x49, 0x33, 0xE0, 0xCD, 0xAF, 0xF6, 0xE4}},
+ {
+ []byte{0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF},
+ []byte{0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00},
+ []byte{0xF2, 0x1E, 0x9A, 0x77, 0xB7, 0x1C, 0x49, 0xBC}},
+ {
+ []byte{0x01, 0x23, 0x45, 0x67, 0x89, 0xAB, 0xCD, 0xEF},
+ []byte{0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00},
+ []byte{0x24, 0x59, 0x46, 0x88, 0x57, 0x54, 0x36, 0x9A}},
+ {
+ []byte{0xFE, 0xDC, 0xBA, 0x98, 0x76, 0x54, 0x32, 0x10},
+ []byte{0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF},
+ []byte{0x6B, 0x5C, 0x5A, 0x9C, 0x5D, 0x9E, 0x0A, 0x5A}},
+}
+
+func TestCipherEncrypt(t *testing.T) {
+ for i, tt := range encryptTests {
+ c, err := NewCipher(tt.key)
+ if err != nil {
+ t.Errorf("NewCipher(%d bytes) = %s", len(tt.key), err)
+ continue
+ }
+ ct := make([]byte, len(tt.out))
+ c.Encrypt(ct, tt.in)
+ for j, v := range ct {
+ if v != tt.out[j] {
+ t.Errorf("Cipher.Encrypt, test vector #%d: cipher-text[%d] = %#x, expected %#x", i, j, v, tt.out[j])
+ break
+ }
+ }
+ }
+}
+
+func TestCipherDecrypt(t *testing.T) {
+ for i, tt := range encryptTests {
+ c, err := NewCipher(tt.key)
+ if err != nil {
+ t.Errorf("NewCipher(%d bytes) = %s", len(tt.key), err)
+ continue
+ }
+ pt := make([]byte, len(tt.in))
+ c.Decrypt(pt, tt.out)
+ for j, v := range pt {
+ if v != tt.in[j] {
+ t.Errorf("Cipher.Decrypt, test vector #%d: plain-text[%d] = %#x, expected %#x", i, j, v, tt.in[j])
+ break
+ }
+ }
+ }
+}
+
+func TestSaltedCipherKeyLength(t *testing.T) {
+ if _, err := NewSaltedCipher(nil, []byte{'a'}); err != KeySizeError(0) {
+ t.Errorf("NewSaltedCipher with short key, gave error %#v, expected %#v", err, KeySizeError(0))
+ }
+
+ // A 57-byte key. One over the typical blowfish restriction.
+ key := []byte("012345678901234567890123456789012345678901234567890123456")
+ if _, err := NewSaltedCipher(key, []byte{'a'}); err != nil {
+ t.Errorf("NewSaltedCipher with long key, gave error %#v", err)
+ }
+}
+
+// Test vectors generated with Blowfish from OpenSSH.
+var saltedVectors = [][8]byte{
+ {0x0c, 0x82, 0x3b, 0x7b, 0x8d, 0x01, 0x4b, 0x7e},
+ {0xd1, 0xe1, 0x93, 0xf0, 0x70, 0xa6, 0xdb, 0x12},
+ {0xfc, 0x5e, 0xba, 0xde, 0xcb, 0xf8, 0x59, 0xad},
+ {0x8a, 0x0c, 0x76, 0xe7, 0xdd, 0x2c, 0xd3, 0xa8},
+ {0x2c, 0xcb, 0x7b, 0xee, 0xac, 0x7b, 0x7f, 0xf8},
+ {0xbb, 0xf6, 0x30, 0x6f, 0xe1, 0x5d, 0x62, 0xbf},
+ {0x97, 0x1e, 0xc1, 0x3d, 0x3d, 0xe0, 0x11, 0xe9},
+ {0x06, 0xd7, 0x4d, 0xb1, 0x80, 0xa3, 0xb1, 0x38},
+ {0x67, 0xa1, 0xa9, 0x75, 0x0e, 0x5b, 0xc6, 0xb4},
+ {0x51, 0x0f, 0x33, 0x0e, 0x4f, 0x67, 0xd2, 0x0c},
+ {0xf1, 0x73, 0x7e, 0xd8, 0x44, 0xea, 0xdb, 0xe5},
+ {0x14, 0x0e, 0x16, 0xce, 0x7f, 0x4a, 0x9c, 0x7b},
+ {0x4b, 0xfe, 0x43, 0xfd, 0xbf, 0x36, 0x04, 0x47},
+ {0xb1, 0xeb, 0x3e, 0x15, 0x36, 0xa7, 0xbb, 0xe2},
+ {0x6d, 0x0b, 0x41, 0xdd, 0x00, 0x98, 0x0b, 0x19},
+ {0xd3, 0xce, 0x45, 0xce, 0x1d, 0x56, 0xb7, 0xfc},
+ {0xd9, 0xf0, 0xfd, 0xda, 0xc0, 0x23, 0xb7, 0x93},
+ {0x4c, 0x6f, 0xa1, 0xe4, 0x0c, 0xa8, 0xca, 0x57},
+ {0xe6, 0x2f, 0x28, 0xa7, 0x0c, 0x94, 0x0d, 0x08},
+ {0x8f, 0xe3, 0xf0, 0xb6, 0x29, 0xe3, 0x44, 0x03},
+ {0xff, 0x98, 0xdd, 0x04, 0x45, 0xb4, 0x6d, 0x1f},
+ {0x9e, 0x45, 0x4d, 0x18, 0x40, 0x53, 0xdb, 0xef},
+ {0xb7, 0x3b, 0xef, 0x29, 0xbe, 0xa8, 0x13, 0x71},
+ {0x02, 0x54, 0x55, 0x41, 0x8e, 0x04, 0xfc, 0xad},
+ {0x6a, 0x0a, 0xee, 0x7c, 0x10, 0xd9, 0x19, 0xfe},
+ {0x0a, 0x22, 0xd9, 0x41, 0xcc, 0x23, 0x87, 0x13},
+ {0x6e, 0xff, 0x1f, 0xff, 0x36, 0x17, 0x9c, 0xbe},
+ {0x79, 0xad, 0xb7, 0x40, 0xf4, 0x9f, 0x51, 0xa6},
+ {0x97, 0x81, 0x99, 0xa4, 0xde, 0x9e, 0x9f, 0xb6},
+ {0x12, 0x19, 0x7a, 0x28, 0xd0, 0xdc, 0xcc, 0x92},
+ {0x81, 0xda, 0x60, 0x1e, 0x0e, 0xdd, 0x65, 0x56},
+ {0x7d, 0x76, 0x20, 0xb2, 0x73, 0xc9, 0x9e, 0xee},
+}
+
+func TestSaltedCipher(t *testing.T) {
+ var key, salt [32]byte
+ for i := range key {
+ key[i] = byte(i)
+ salt[i] = byte(i + 32)
+ }
+ for i, v := range saltedVectors {
+ c, err := NewSaltedCipher(key[:], salt[:i])
+ if err != nil {
+ t.Fatal(err)
+ }
+ var buf [8]byte
+ c.Encrypt(buf[:], buf[:])
+ if v != buf {
+ t.Errorf("%d: expected %x, got %x", i, v, buf)
+ }
+ }
+}
+
+func BenchmarkExpandKeyWithSalt(b *testing.B) {
+ key := make([]byte, 32)
+ salt := make([]byte, 16)
+ c, _ := NewCipher(key)
+ for i := 0; i < b.N; i++ {
+ expandKeyWithSalt(key, salt, c)
+ }
+}
+
+func BenchmarkExpandKey(b *testing.B) {
+ key := make([]byte, 32)
+ c, _ := NewCipher(key)
+ for i := 0; i < b.N; i++ {
+ ExpandKey(key, c)
+ }
+}
diff --git a/src/mongo/gotools/vendor/src/golang.org/x/crypto/blowfish/cipher.go b/src/mongo/gotools/vendor/src/golang.org/x/crypto/blowfish/cipher.go
new file mode 100644
index 00000000000..542984aa8da
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/golang.org/x/crypto/blowfish/cipher.go
@@ -0,0 +1,91 @@
+// Copyright 2010 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// Package blowfish implements Bruce Schneier's Blowfish encryption algorithm.
+package blowfish // import "golang.org/x/crypto/blowfish"
+
+// The code is a port of Bruce Schneier's C implementation.
+// See http://www.schneier.com/blowfish.html.
+
+import "strconv"
+
+// The Blowfish block size in bytes.
+const BlockSize = 8
+
+// A Cipher is an instance of Blowfish encryption using a particular key.
+type Cipher struct {
+ p [18]uint32
+ s0, s1, s2, s3 [256]uint32
+}
+
+type KeySizeError int
+
+func (k KeySizeError) Error() string {
+ return "crypto/blowfish: invalid key size " + strconv.Itoa(int(k))
+}
+
+// NewCipher creates and returns a Cipher.
+// The key argument should be the Blowfish key, from 1 to 56 bytes.
+func NewCipher(key []byte) (*Cipher, error) {
+ var result Cipher
+ if k := len(key); k < 1 || k > 56 {
+ return nil, KeySizeError(k)
+ }
+ initCipher(&result)
+ ExpandKey(key, &result)
+ return &result, nil
+}
+
+// NewSaltedCipher creates a returns a Cipher that folds a salt into its key
+// schedule. For most purposes, NewCipher, instead of NewSaltedCipher, is
+// sufficient and desirable. For bcrypt compatiblity, the key can be over 56
+// bytes.
+func NewSaltedCipher(key, salt []byte) (*Cipher, error) {
+ if len(salt) == 0 {
+ return NewCipher(key)
+ }
+ var result Cipher
+ if k := len(key); k < 1 {
+ return nil, KeySizeError(k)
+ }
+ initCipher(&result)
+ expandKeyWithSalt(key, salt, &result)
+ return &result, nil
+}
+
+// BlockSize returns the Blowfish block size, 8 bytes.
+// It is necessary to satisfy the Block interface in the
+// package "crypto/cipher".
+func (c *Cipher) BlockSize() int { return BlockSize }
+
+// Encrypt encrypts the 8-byte buffer src using the key k
+// and stores the result in dst.
+// Note that for amounts of data larger than a block,
+// it is not safe to just call Encrypt on successive blocks;
+// instead, use an encryption mode like CBC (see crypto/cipher/cbc.go).
+func (c *Cipher) Encrypt(dst, src []byte) {
+ l := uint32(src[0])<<24 | uint32(src[1])<<16 | uint32(src[2])<<8 | uint32(src[3])
+ r := uint32(src[4])<<24 | uint32(src[5])<<16 | uint32(src[6])<<8 | uint32(src[7])
+ l, r = encryptBlock(l, r, c)
+ dst[0], dst[1], dst[2], dst[3] = byte(l>>24), byte(l>>16), byte(l>>8), byte(l)
+ dst[4], dst[5], dst[6], dst[7] = byte(r>>24), byte(r>>16), byte(r>>8), byte(r)
+}
+
+// Decrypt decrypts the 8-byte buffer src using the key k
+// and stores the result in dst.
+func (c *Cipher) Decrypt(dst, src []byte) {
+ l := uint32(src[0])<<24 | uint32(src[1])<<16 | uint32(src[2])<<8 | uint32(src[3])
+ r := uint32(src[4])<<24 | uint32(src[5])<<16 | uint32(src[6])<<8 | uint32(src[7])
+ l, r = decryptBlock(l, r, c)
+ dst[0], dst[1], dst[2], dst[3] = byte(l>>24), byte(l>>16), byte(l>>8), byte(l)
+ dst[4], dst[5], dst[6], dst[7] = byte(r>>24), byte(r>>16), byte(r>>8), byte(r)
+}
+
+func initCipher(c *Cipher) {
+ copy(c.p[0:], p[0:])
+ copy(c.s0[0:], s0[0:])
+ copy(c.s1[0:], s1[0:])
+ copy(c.s2[0:], s2[0:])
+ copy(c.s3[0:], s3[0:])
+}
diff --git a/src/mongo/gotools/vendor/src/golang.org/x/crypto/blowfish/const.go b/src/mongo/gotools/vendor/src/golang.org/x/crypto/blowfish/const.go
new file mode 100644
index 00000000000..8c5ee4cb08a
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/golang.org/x/crypto/blowfish/const.go
@@ -0,0 +1,199 @@
+// Copyright 2010 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// The startup permutation array and substitution boxes.
+// They are the hexadecimal digits of PI; see:
+// http://www.schneier.com/code/constants.txt.
+
+package blowfish
+
+var s0 = [256]uint32{
+ 0xd1310ba6, 0x98dfb5ac, 0x2ffd72db, 0xd01adfb7, 0xb8e1afed, 0x6a267e96,
+ 0xba7c9045, 0xf12c7f99, 0x24a19947, 0xb3916cf7, 0x0801f2e2, 0x858efc16,
+ 0x636920d8, 0x71574e69, 0xa458fea3, 0xf4933d7e, 0x0d95748f, 0x728eb658,
+ 0x718bcd58, 0x82154aee, 0x7b54a41d, 0xc25a59b5, 0x9c30d539, 0x2af26013,
+ 0xc5d1b023, 0x286085f0, 0xca417918, 0xb8db38ef, 0x8e79dcb0, 0x603a180e,
+ 0x6c9e0e8b, 0xb01e8a3e, 0xd71577c1, 0xbd314b27, 0x78af2fda, 0x55605c60,
+ 0xe65525f3, 0xaa55ab94, 0x57489862, 0x63e81440, 0x55ca396a, 0x2aab10b6,
+ 0xb4cc5c34, 0x1141e8ce, 0xa15486af, 0x7c72e993, 0xb3ee1411, 0x636fbc2a,
+ 0x2ba9c55d, 0x741831f6, 0xce5c3e16, 0x9b87931e, 0xafd6ba33, 0x6c24cf5c,
+ 0x7a325381, 0x28958677, 0x3b8f4898, 0x6b4bb9af, 0xc4bfe81b, 0x66282193,
+ 0x61d809cc, 0xfb21a991, 0x487cac60, 0x5dec8032, 0xef845d5d, 0xe98575b1,
+ 0xdc262302, 0xeb651b88, 0x23893e81, 0xd396acc5, 0x0f6d6ff3, 0x83f44239,
+ 0x2e0b4482, 0xa4842004, 0x69c8f04a, 0x9e1f9b5e, 0x21c66842, 0xf6e96c9a,
+ 0x670c9c61, 0xabd388f0, 0x6a51a0d2, 0xd8542f68, 0x960fa728, 0xab5133a3,
+ 0x6eef0b6c, 0x137a3be4, 0xba3bf050, 0x7efb2a98, 0xa1f1651d, 0x39af0176,
+ 0x66ca593e, 0x82430e88, 0x8cee8619, 0x456f9fb4, 0x7d84a5c3, 0x3b8b5ebe,
+ 0xe06f75d8, 0x85c12073, 0x401a449f, 0x56c16aa6, 0x4ed3aa62, 0x363f7706,
+ 0x1bfedf72, 0x429b023d, 0x37d0d724, 0xd00a1248, 0xdb0fead3, 0x49f1c09b,
+ 0x075372c9, 0x80991b7b, 0x25d479d8, 0xf6e8def7, 0xe3fe501a, 0xb6794c3b,
+ 0x976ce0bd, 0x04c006ba, 0xc1a94fb6, 0x409f60c4, 0x5e5c9ec2, 0x196a2463,
+ 0x68fb6faf, 0x3e6c53b5, 0x1339b2eb, 0x3b52ec6f, 0x6dfc511f, 0x9b30952c,
+ 0xcc814544, 0xaf5ebd09, 0xbee3d004, 0xde334afd, 0x660f2807, 0x192e4bb3,
+ 0xc0cba857, 0x45c8740f, 0xd20b5f39, 0xb9d3fbdb, 0x5579c0bd, 0x1a60320a,
+ 0xd6a100c6, 0x402c7279, 0x679f25fe, 0xfb1fa3cc, 0x8ea5e9f8, 0xdb3222f8,
+ 0x3c7516df, 0xfd616b15, 0x2f501ec8, 0xad0552ab, 0x323db5fa, 0xfd238760,
+ 0x53317b48, 0x3e00df82, 0x9e5c57bb, 0xca6f8ca0, 0x1a87562e, 0xdf1769db,
+ 0xd542a8f6, 0x287effc3, 0xac6732c6, 0x8c4f5573, 0x695b27b0, 0xbbca58c8,
+ 0xe1ffa35d, 0xb8f011a0, 0x10fa3d98, 0xfd2183b8, 0x4afcb56c, 0x2dd1d35b,
+ 0x9a53e479, 0xb6f84565, 0xd28e49bc, 0x4bfb9790, 0xe1ddf2da, 0xa4cb7e33,
+ 0x62fb1341, 0xcee4c6e8, 0xef20cada, 0x36774c01, 0xd07e9efe, 0x2bf11fb4,
+ 0x95dbda4d, 0xae909198, 0xeaad8e71, 0x6b93d5a0, 0xd08ed1d0, 0xafc725e0,
+ 0x8e3c5b2f, 0x8e7594b7, 0x8ff6e2fb, 0xf2122b64, 0x8888b812, 0x900df01c,
+ 0x4fad5ea0, 0x688fc31c, 0xd1cff191, 0xb3a8c1ad, 0x2f2f2218, 0xbe0e1777,
+ 0xea752dfe, 0x8b021fa1, 0xe5a0cc0f, 0xb56f74e8, 0x18acf3d6, 0xce89e299,
+ 0xb4a84fe0, 0xfd13e0b7, 0x7cc43b81, 0xd2ada8d9, 0x165fa266, 0x80957705,
+ 0x93cc7314, 0x211a1477, 0xe6ad2065, 0x77b5fa86, 0xc75442f5, 0xfb9d35cf,
+ 0xebcdaf0c, 0x7b3e89a0, 0xd6411bd3, 0xae1e7e49, 0x00250e2d, 0x2071b35e,
+ 0x226800bb, 0x57b8e0af, 0x2464369b, 0xf009b91e, 0x5563911d, 0x59dfa6aa,
+ 0x78c14389, 0xd95a537f, 0x207d5ba2, 0x02e5b9c5, 0x83260376, 0x6295cfa9,
+ 0x11c81968, 0x4e734a41, 0xb3472dca, 0x7b14a94a, 0x1b510052, 0x9a532915,
+ 0xd60f573f, 0xbc9bc6e4, 0x2b60a476, 0x81e67400, 0x08ba6fb5, 0x571be91f,
+ 0xf296ec6b, 0x2a0dd915, 0xb6636521, 0xe7b9f9b6, 0xff34052e, 0xc5855664,
+ 0x53b02d5d, 0xa99f8fa1, 0x08ba4799, 0x6e85076a,
+}
+
+var s1 = [256]uint32{
+ 0x4b7a70e9, 0xb5b32944, 0xdb75092e, 0xc4192623, 0xad6ea6b0, 0x49a7df7d,
+ 0x9cee60b8, 0x8fedb266, 0xecaa8c71, 0x699a17ff, 0x5664526c, 0xc2b19ee1,
+ 0x193602a5, 0x75094c29, 0xa0591340, 0xe4183a3e, 0x3f54989a, 0x5b429d65,
+ 0x6b8fe4d6, 0x99f73fd6, 0xa1d29c07, 0xefe830f5, 0x4d2d38e6, 0xf0255dc1,
+ 0x4cdd2086, 0x8470eb26, 0x6382e9c6, 0x021ecc5e, 0x09686b3f, 0x3ebaefc9,
+ 0x3c971814, 0x6b6a70a1, 0x687f3584, 0x52a0e286, 0xb79c5305, 0xaa500737,
+ 0x3e07841c, 0x7fdeae5c, 0x8e7d44ec, 0x5716f2b8, 0xb03ada37, 0xf0500c0d,
+ 0xf01c1f04, 0x0200b3ff, 0xae0cf51a, 0x3cb574b2, 0x25837a58, 0xdc0921bd,
+ 0xd19113f9, 0x7ca92ff6, 0x94324773, 0x22f54701, 0x3ae5e581, 0x37c2dadc,
+ 0xc8b57634, 0x9af3dda7, 0xa9446146, 0x0fd0030e, 0xecc8c73e, 0xa4751e41,
+ 0xe238cd99, 0x3bea0e2f, 0x3280bba1, 0x183eb331, 0x4e548b38, 0x4f6db908,
+ 0x6f420d03, 0xf60a04bf, 0x2cb81290, 0x24977c79, 0x5679b072, 0xbcaf89af,
+ 0xde9a771f, 0xd9930810, 0xb38bae12, 0xdccf3f2e, 0x5512721f, 0x2e6b7124,
+ 0x501adde6, 0x9f84cd87, 0x7a584718, 0x7408da17, 0xbc9f9abc, 0xe94b7d8c,
+ 0xec7aec3a, 0xdb851dfa, 0x63094366, 0xc464c3d2, 0xef1c1847, 0x3215d908,
+ 0xdd433b37, 0x24c2ba16, 0x12a14d43, 0x2a65c451, 0x50940002, 0x133ae4dd,
+ 0x71dff89e, 0x10314e55, 0x81ac77d6, 0x5f11199b, 0x043556f1, 0xd7a3c76b,
+ 0x3c11183b, 0x5924a509, 0xf28fe6ed, 0x97f1fbfa, 0x9ebabf2c, 0x1e153c6e,
+ 0x86e34570, 0xeae96fb1, 0x860e5e0a, 0x5a3e2ab3, 0x771fe71c, 0x4e3d06fa,
+ 0x2965dcb9, 0x99e71d0f, 0x803e89d6, 0x5266c825, 0x2e4cc978, 0x9c10b36a,
+ 0xc6150eba, 0x94e2ea78, 0xa5fc3c53, 0x1e0a2df4, 0xf2f74ea7, 0x361d2b3d,
+ 0x1939260f, 0x19c27960, 0x5223a708, 0xf71312b6, 0xebadfe6e, 0xeac31f66,
+ 0xe3bc4595, 0xa67bc883, 0xb17f37d1, 0x018cff28, 0xc332ddef, 0xbe6c5aa5,
+ 0x65582185, 0x68ab9802, 0xeecea50f, 0xdb2f953b, 0x2aef7dad, 0x5b6e2f84,
+ 0x1521b628, 0x29076170, 0xecdd4775, 0x619f1510, 0x13cca830, 0xeb61bd96,
+ 0x0334fe1e, 0xaa0363cf, 0xb5735c90, 0x4c70a239, 0xd59e9e0b, 0xcbaade14,
+ 0xeecc86bc, 0x60622ca7, 0x9cab5cab, 0xb2f3846e, 0x648b1eaf, 0x19bdf0ca,
+ 0xa02369b9, 0x655abb50, 0x40685a32, 0x3c2ab4b3, 0x319ee9d5, 0xc021b8f7,
+ 0x9b540b19, 0x875fa099, 0x95f7997e, 0x623d7da8, 0xf837889a, 0x97e32d77,
+ 0x11ed935f, 0x16681281, 0x0e358829, 0xc7e61fd6, 0x96dedfa1, 0x7858ba99,
+ 0x57f584a5, 0x1b227263, 0x9b83c3ff, 0x1ac24696, 0xcdb30aeb, 0x532e3054,
+ 0x8fd948e4, 0x6dbc3128, 0x58ebf2ef, 0x34c6ffea, 0xfe28ed61, 0xee7c3c73,
+ 0x5d4a14d9, 0xe864b7e3, 0x42105d14, 0x203e13e0, 0x45eee2b6, 0xa3aaabea,
+ 0xdb6c4f15, 0xfacb4fd0, 0xc742f442, 0xef6abbb5, 0x654f3b1d, 0x41cd2105,
+ 0xd81e799e, 0x86854dc7, 0xe44b476a, 0x3d816250, 0xcf62a1f2, 0x5b8d2646,
+ 0xfc8883a0, 0xc1c7b6a3, 0x7f1524c3, 0x69cb7492, 0x47848a0b, 0x5692b285,
+ 0x095bbf00, 0xad19489d, 0x1462b174, 0x23820e00, 0x58428d2a, 0x0c55f5ea,
+ 0x1dadf43e, 0x233f7061, 0x3372f092, 0x8d937e41, 0xd65fecf1, 0x6c223bdb,
+ 0x7cde3759, 0xcbee7460, 0x4085f2a7, 0xce77326e, 0xa6078084, 0x19f8509e,
+ 0xe8efd855, 0x61d99735, 0xa969a7aa, 0xc50c06c2, 0x5a04abfc, 0x800bcadc,
+ 0x9e447a2e, 0xc3453484, 0xfdd56705, 0x0e1e9ec9, 0xdb73dbd3, 0x105588cd,
+ 0x675fda79, 0xe3674340, 0xc5c43465, 0x713e38d8, 0x3d28f89e, 0xf16dff20,
+ 0x153e21e7, 0x8fb03d4a, 0xe6e39f2b, 0xdb83adf7,
+}
+
+var s2 = [256]uint32{
+ 0xe93d5a68, 0x948140f7, 0xf64c261c, 0x94692934, 0x411520f7, 0x7602d4f7,
+ 0xbcf46b2e, 0xd4a20068, 0xd4082471, 0x3320f46a, 0x43b7d4b7, 0x500061af,
+ 0x1e39f62e, 0x97244546, 0x14214f74, 0xbf8b8840, 0x4d95fc1d, 0x96b591af,
+ 0x70f4ddd3, 0x66a02f45, 0xbfbc09ec, 0x03bd9785, 0x7fac6dd0, 0x31cb8504,
+ 0x96eb27b3, 0x55fd3941, 0xda2547e6, 0xabca0a9a, 0x28507825, 0x530429f4,
+ 0x0a2c86da, 0xe9b66dfb, 0x68dc1462, 0xd7486900, 0x680ec0a4, 0x27a18dee,
+ 0x4f3ffea2, 0xe887ad8c, 0xb58ce006, 0x7af4d6b6, 0xaace1e7c, 0xd3375fec,
+ 0xce78a399, 0x406b2a42, 0x20fe9e35, 0xd9f385b9, 0xee39d7ab, 0x3b124e8b,
+ 0x1dc9faf7, 0x4b6d1856, 0x26a36631, 0xeae397b2, 0x3a6efa74, 0xdd5b4332,
+ 0x6841e7f7, 0xca7820fb, 0xfb0af54e, 0xd8feb397, 0x454056ac, 0xba489527,
+ 0x55533a3a, 0x20838d87, 0xfe6ba9b7, 0xd096954b, 0x55a867bc, 0xa1159a58,
+ 0xcca92963, 0x99e1db33, 0xa62a4a56, 0x3f3125f9, 0x5ef47e1c, 0x9029317c,
+ 0xfdf8e802, 0x04272f70, 0x80bb155c, 0x05282ce3, 0x95c11548, 0xe4c66d22,
+ 0x48c1133f, 0xc70f86dc, 0x07f9c9ee, 0x41041f0f, 0x404779a4, 0x5d886e17,
+ 0x325f51eb, 0xd59bc0d1, 0xf2bcc18f, 0x41113564, 0x257b7834, 0x602a9c60,
+ 0xdff8e8a3, 0x1f636c1b, 0x0e12b4c2, 0x02e1329e, 0xaf664fd1, 0xcad18115,
+ 0x6b2395e0, 0x333e92e1, 0x3b240b62, 0xeebeb922, 0x85b2a20e, 0xe6ba0d99,
+ 0xde720c8c, 0x2da2f728, 0xd0127845, 0x95b794fd, 0x647d0862, 0xe7ccf5f0,
+ 0x5449a36f, 0x877d48fa, 0xc39dfd27, 0xf33e8d1e, 0x0a476341, 0x992eff74,
+ 0x3a6f6eab, 0xf4f8fd37, 0xa812dc60, 0xa1ebddf8, 0x991be14c, 0xdb6e6b0d,
+ 0xc67b5510, 0x6d672c37, 0x2765d43b, 0xdcd0e804, 0xf1290dc7, 0xcc00ffa3,
+ 0xb5390f92, 0x690fed0b, 0x667b9ffb, 0xcedb7d9c, 0xa091cf0b, 0xd9155ea3,
+ 0xbb132f88, 0x515bad24, 0x7b9479bf, 0x763bd6eb, 0x37392eb3, 0xcc115979,
+ 0x8026e297, 0xf42e312d, 0x6842ada7, 0xc66a2b3b, 0x12754ccc, 0x782ef11c,
+ 0x6a124237, 0xb79251e7, 0x06a1bbe6, 0x4bfb6350, 0x1a6b1018, 0x11caedfa,
+ 0x3d25bdd8, 0xe2e1c3c9, 0x44421659, 0x0a121386, 0xd90cec6e, 0xd5abea2a,
+ 0x64af674e, 0xda86a85f, 0xbebfe988, 0x64e4c3fe, 0x9dbc8057, 0xf0f7c086,
+ 0x60787bf8, 0x6003604d, 0xd1fd8346, 0xf6381fb0, 0x7745ae04, 0xd736fccc,
+ 0x83426b33, 0xf01eab71, 0xb0804187, 0x3c005e5f, 0x77a057be, 0xbde8ae24,
+ 0x55464299, 0xbf582e61, 0x4e58f48f, 0xf2ddfda2, 0xf474ef38, 0x8789bdc2,
+ 0x5366f9c3, 0xc8b38e74, 0xb475f255, 0x46fcd9b9, 0x7aeb2661, 0x8b1ddf84,
+ 0x846a0e79, 0x915f95e2, 0x466e598e, 0x20b45770, 0x8cd55591, 0xc902de4c,
+ 0xb90bace1, 0xbb8205d0, 0x11a86248, 0x7574a99e, 0xb77f19b6, 0xe0a9dc09,
+ 0x662d09a1, 0xc4324633, 0xe85a1f02, 0x09f0be8c, 0x4a99a025, 0x1d6efe10,
+ 0x1ab93d1d, 0x0ba5a4df, 0xa186f20f, 0x2868f169, 0xdcb7da83, 0x573906fe,
+ 0xa1e2ce9b, 0x4fcd7f52, 0x50115e01, 0xa70683fa, 0xa002b5c4, 0x0de6d027,
+ 0x9af88c27, 0x773f8641, 0xc3604c06, 0x61a806b5, 0xf0177a28, 0xc0f586e0,
+ 0x006058aa, 0x30dc7d62, 0x11e69ed7, 0x2338ea63, 0x53c2dd94, 0xc2c21634,
+ 0xbbcbee56, 0x90bcb6de, 0xebfc7da1, 0xce591d76, 0x6f05e409, 0x4b7c0188,
+ 0x39720a3d, 0x7c927c24, 0x86e3725f, 0x724d9db9, 0x1ac15bb4, 0xd39eb8fc,
+ 0xed545578, 0x08fca5b5, 0xd83d7cd3, 0x4dad0fc4, 0x1e50ef5e, 0xb161e6f8,
+ 0xa28514d9, 0x6c51133c, 0x6fd5c7e7, 0x56e14ec4, 0x362abfce, 0xddc6c837,
+ 0xd79a3234, 0x92638212, 0x670efa8e, 0x406000e0,
+}
+
+var s3 = [256]uint32{
+ 0x3a39ce37, 0xd3faf5cf, 0xabc27737, 0x5ac52d1b, 0x5cb0679e, 0x4fa33742,
+ 0xd3822740, 0x99bc9bbe, 0xd5118e9d, 0xbf0f7315, 0xd62d1c7e, 0xc700c47b,
+ 0xb78c1b6b, 0x21a19045, 0xb26eb1be, 0x6a366eb4, 0x5748ab2f, 0xbc946e79,
+ 0xc6a376d2, 0x6549c2c8, 0x530ff8ee, 0x468dde7d, 0xd5730a1d, 0x4cd04dc6,
+ 0x2939bbdb, 0xa9ba4650, 0xac9526e8, 0xbe5ee304, 0xa1fad5f0, 0x6a2d519a,
+ 0x63ef8ce2, 0x9a86ee22, 0xc089c2b8, 0x43242ef6, 0xa51e03aa, 0x9cf2d0a4,
+ 0x83c061ba, 0x9be96a4d, 0x8fe51550, 0xba645bd6, 0x2826a2f9, 0xa73a3ae1,
+ 0x4ba99586, 0xef5562e9, 0xc72fefd3, 0xf752f7da, 0x3f046f69, 0x77fa0a59,
+ 0x80e4a915, 0x87b08601, 0x9b09e6ad, 0x3b3ee593, 0xe990fd5a, 0x9e34d797,
+ 0x2cf0b7d9, 0x022b8b51, 0x96d5ac3a, 0x017da67d, 0xd1cf3ed6, 0x7c7d2d28,
+ 0x1f9f25cf, 0xadf2b89b, 0x5ad6b472, 0x5a88f54c, 0xe029ac71, 0xe019a5e6,
+ 0x47b0acfd, 0xed93fa9b, 0xe8d3c48d, 0x283b57cc, 0xf8d56629, 0x79132e28,
+ 0x785f0191, 0xed756055, 0xf7960e44, 0xe3d35e8c, 0x15056dd4, 0x88f46dba,
+ 0x03a16125, 0x0564f0bd, 0xc3eb9e15, 0x3c9057a2, 0x97271aec, 0xa93a072a,
+ 0x1b3f6d9b, 0x1e6321f5, 0xf59c66fb, 0x26dcf319, 0x7533d928, 0xb155fdf5,
+ 0x03563482, 0x8aba3cbb, 0x28517711, 0xc20ad9f8, 0xabcc5167, 0xccad925f,
+ 0x4de81751, 0x3830dc8e, 0x379d5862, 0x9320f991, 0xea7a90c2, 0xfb3e7bce,
+ 0x5121ce64, 0x774fbe32, 0xa8b6e37e, 0xc3293d46, 0x48de5369, 0x6413e680,
+ 0xa2ae0810, 0xdd6db224, 0x69852dfd, 0x09072166, 0xb39a460a, 0x6445c0dd,
+ 0x586cdecf, 0x1c20c8ae, 0x5bbef7dd, 0x1b588d40, 0xccd2017f, 0x6bb4e3bb,
+ 0xdda26a7e, 0x3a59ff45, 0x3e350a44, 0xbcb4cdd5, 0x72eacea8, 0xfa6484bb,
+ 0x8d6612ae, 0xbf3c6f47, 0xd29be463, 0x542f5d9e, 0xaec2771b, 0xf64e6370,
+ 0x740e0d8d, 0xe75b1357, 0xf8721671, 0xaf537d5d, 0x4040cb08, 0x4eb4e2cc,
+ 0x34d2466a, 0x0115af84, 0xe1b00428, 0x95983a1d, 0x06b89fb4, 0xce6ea048,
+ 0x6f3f3b82, 0x3520ab82, 0x011a1d4b, 0x277227f8, 0x611560b1, 0xe7933fdc,
+ 0xbb3a792b, 0x344525bd, 0xa08839e1, 0x51ce794b, 0x2f32c9b7, 0xa01fbac9,
+ 0xe01cc87e, 0xbcc7d1f6, 0xcf0111c3, 0xa1e8aac7, 0x1a908749, 0xd44fbd9a,
+ 0xd0dadecb, 0xd50ada38, 0x0339c32a, 0xc6913667, 0x8df9317c, 0xe0b12b4f,
+ 0xf79e59b7, 0x43f5bb3a, 0xf2d519ff, 0x27d9459c, 0xbf97222c, 0x15e6fc2a,
+ 0x0f91fc71, 0x9b941525, 0xfae59361, 0xceb69ceb, 0xc2a86459, 0x12baa8d1,
+ 0xb6c1075e, 0xe3056a0c, 0x10d25065, 0xcb03a442, 0xe0ec6e0e, 0x1698db3b,
+ 0x4c98a0be, 0x3278e964, 0x9f1f9532, 0xe0d392df, 0xd3a0342b, 0x8971f21e,
+ 0x1b0a7441, 0x4ba3348c, 0xc5be7120, 0xc37632d8, 0xdf359f8d, 0x9b992f2e,
+ 0xe60b6f47, 0x0fe3f11d, 0xe54cda54, 0x1edad891, 0xce6279cf, 0xcd3e7e6f,
+ 0x1618b166, 0xfd2c1d05, 0x848fd2c5, 0xf6fb2299, 0xf523f357, 0xa6327623,
+ 0x93a83531, 0x56cccd02, 0xacf08162, 0x5a75ebb5, 0x6e163697, 0x88d273cc,
+ 0xde966292, 0x81b949d0, 0x4c50901b, 0x71c65614, 0xe6c6c7bd, 0x327a140a,
+ 0x45e1d006, 0xc3f27b9a, 0xc9aa53fd, 0x62a80f00, 0xbb25bfe2, 0x35bdd2f6,
+ 0x71126905, 0xb2040222, 0xb6cbcf7c, 0xcd769c2b, 0x53113ec0, 0x1640e3d3,
+ 0x38abbd60, 0x2547adf0, 0xba38209c, 0xf746ce76, 0x77afa1c5, 0x20756060,
+ 0x85cbfe4e, 0x8ae88dd8, 0x7aaaf9b0, 0x4cf9aa7e, 0x1948c25c, 0x02fb8a8c,
+ 0x01c36ae4, 0xd6ebe1f9, 0x90d4f869, 0xa65cdea0, 0x3f09252d, 0xc208e69f,
+ 0xb74e6132, 0xce77e25b, 0x578fdfe3, 0x3ac372e6,
+}
+
+var p = [18]uint32{
+ 0x243f6a88, 0x85a308d3, 0x13198a2e, 0x03707344, 0xa4093822, 0x299f31d0,
+ 0x082efa98, 0xec4e6c89, 0x452821e6, 0x38d01377, 0xbe5466cf, 0x34e90c6c,
+ 0xc0ac29b7, 0xc97c50dd, 0x3f84d5b5, 0xb5470917, 0x9216d5d9, 0x8979fb1b,
+}
diff --git a/src/mongo/gotools/vendor/src/golang.org/x/crypto/bn256/bn256.go b/src/mongo/gotools/vendor/src/golang.org/x/crypto/bn256/bn256.go
new file mode 100644
index 00000000000..014f8b3557c
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/golang.org/x/crypto/bn256/bn256.go
@@ -0,0 +1,404 @@
+// Copyright 2012 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// Package bn256 implements a particular bilinear group at the 128-bit security level.
+//
+// Bilinear groups are the basis of many of the new cryptographic protocols
+// that have been proposed over the past decade. They consist of a triplet of
+// groups (G₁, G₂ and GT) such that there exists a function e(g₁ˣ,g₂ʸ)=gTˣʸ
+// (where gₓ is a generator of the respective group). That function is called
+// a pairing function.
+//
+// This package specifically implements the Optimal Ate pairing over a 256-bit
+// Barreto-Naehrig curve as described in
+// http://cryptojedi.org/papers/dclxvi-20100714.pdf. Its output is compatible
+// with the implementation described in that paper.
+package bn256 // import "golang.org/x/crypto/bn256"
+
+import (
+ "crypto/rand"
+ "io"
+ "math/big"
+)
+
+// BUG(agl): this implementation is not constant time.
+// TODO(agl): keep GF(p²) elements in Mongomery form.
+
+// G1 is an abstract cyclic group. The zero value is suitable for use as the
+// output of an operation, but cannot be used as an input.
+type G1 struct {
+ p *curvePoint
+}
+
+// RandomG1 returns x and g₁ˣ where x is a random, non-zero number read from r.
+func RandomG1(r io.Reader) (*big.Int, *G1, error) {
+ var k *big.Int
+ var err error
+
+ for {
+ k, err = rand.Int(r, Order)
+ if err != nil {
+ return nil, nil, err
+ }
+ if k.Sign() > 0 {
+ break
+ }
+ }
+
+ return k, new(G1).ScalarBaseMult(k), nil
+}
+
+func (g *G1) String() string {
+ return "bn256.G1" + g.p.String()
+}
+
+// ScalarBaseMult sets e to g*k where g is the generator of the group and
+// then returns e.
+func (e *G1) ScalarBaseMult(k *big.Int) *G1 {
+ if e.p == nil {
+ e.p = newCurvePoint(nil)
+ }
+ e.p.Mul(curveGen, k, new(bnPool))
+ return e
+}
+
+// ScalarMult sets e to a*k and then returns e.
+func (e *G1) ScalarMult(a *G1, k *big.Int) *G1 {
+ if e.p == nil {
+ e.p = newCurvePoint(nil)
+ }
+ e.p.Mul(a.p, k, new(bnPool))
+ return e
+}
+
+// Add sets e to a+b and then returns e.
+// BUG(agl): this function is not complete: a==b fails.
+func (e *G1) Add(a, b *G1) *G1 {
+ if e.p == nil {
+ e.p = newCurvePoint(nil)
+ }
+ e.p.Add(a.p, b.p, new(bnPool))
+ return e
+}
+
+// Neg sets e to -a and then returns e.
+func (e *G1) Neg(a *G1) *G1 {
+ if e.p == nil {
+ e.p = newCurvePoint(nil)
+ }
+ e.p.Negative(a.p)
+ return e
+}
+
+// Marshal converts n to a byte slice.
+func (n *G1) Marshal() []byte {
+ n.p.MakeAffine(nil)
+
+ xBytes := new(big.Int).Mod(n.p.x, p).Bytes()
+ yBytes := new(big.Int).Mod(n.p.y, p).Bytes()
+
+ // Each value is a 256-bit number.
+ const numBytes = 256 / 8
+
+ ret := make([]byte, numBytes*2)
+ copy(ret[1*numBytes-len(xBytes):], xBytes)
+ copy(ret[2*numBytes-len(yBytes):], yBytes)
+
+ return ret
+}
+
+// Unmarshal sets e to the result of converting the output of Marshal back into
+// a group element and then returns e.
+func (e *G1) Unmarshal(m []byte) (*G1, bool) {
+ // Each value is a 256-bit number.
+ const numBytes = 256 / 8
+
+ if len(m) != 2*numBytes {
+ return nil, false
+ }
+
+ if e.p == nil {
+ e.p = newCurvePoint(nil)
+ }
+
+ e.p.x.SetBytes(m[0*numBytes : 1*numBytes])
+ e.p.y.SetBytes(m[1*numBytes : 2*numBytes])
+
+ if e.p.x.Sign() == 0 && e.p.y.Sign() == 0 {
+ // This is the point at infinity.
+ e.p.y.SetInt64(1)
+ e.p.z.SetInt64(0)
+ e.p.t.SetInt64(0)
+ } else {
+ e.p.z.SetInt64(1)
+ e.p.t.SetInt64(1)
+
+ if !e.p.IsOnCurve() {
+ return nil, false
+ }
+ }
+
+ return e, true
+}
+
+// G2 is an abstract cyclic group. The zero value is suitable for use as the
+// output of an operation, but cannot be used as an input.
+type G2 struct {
+ p *twistPoint
+}
+
+// RandomG1 returns x and g₂ˣ where x is a random, non-zero number read from r.
+func RandomG2(r io.Reader) (*big.Int, *G2, error) {
+ var k *big.Int
+ var err error
+
+ for {
+ k, err = rand.Int(r, Order)
+ if err != nil {
+ return nil, nil, err
+ }
+ if k.Sign() > 0 {
+ break
+ }
+ }
+
+ return k, new(G2).ScalarBaseMult(k), nil
+}
+
+func (g *G2) String() string {
+ return "bn256.G2" + g.p.String()
+}
+
+// ScalarBaseMult sets e to g*k where g is the generator of the group and
+// then returns out.
+func (e *G2) ScalarBaseMult(k *big.Int) *G2 {
+ if e.p == nil {
+ e.p = newTwistPoint(nil)
+ }
+ e.p.Mul(twistGen, k, new(bnPool))
+ return e
+}
+
+// ScalarMult sets e to a*k and then returns e.
+func (e *G2) ScalarMult(a *G2, k *big.Int) *G2 {
+ if e.p == nil {
+ e.p = newTwistPoint(nil)
+ }
+ e.p.Mul(a.p, k, new(bnPool))
+ return e
+}
+
+// Add sets e to a+b and then returns e.
+// BUG(agl): this function is not complete: a==b fails.
+func (e *G2) Add(a, b *G2) *G2 {
+ if e.p == nil {
+ e.p = newTwistPoint(nil)
+ }
+ e.p.Add(a.p, b.p, new(bnPool))
+ return e
+}
+
+// Marshal converts n into a byte slice.
+func (n *G2) Marshal() []byte {
+ n.p.MakeAffine(nil)
+
+ xxBytes := new(big.Int).Mod(n.p.x.x, p).Bytes()
+ xyBytes := new(big.Int).Mod(n.p.x.y, p).Bytes()
+ yxBytes := new(big.Int).Mod(n.p.y.x, p).Bytes()
+ yyBytes := new(big.Int).Mod(n.p.y.y, p).Bytes()
+
+ // Each value is a 256-bit number.
+ const numBytes = 256 / 8
+
+ ret := make([]byte, numBytes*4)
+ copy(ret[1*numBytes-len(xxBytes):], xxBytes)
+ copy(ret[2*numBytes-len(xyBytes):], xyBytes)
+ copy(ret[3*numBytes-len(yxBytes):], yxBytes)
+ copy(ret[4*numBytes-len(yyBytes):], yyBytes)
+
+ return ret
+}
+
+// Unmarshal sets e to the result of converting the output of Marshal back into
+// a group element and then returns e.
+func (e *G2) Unmarshal(m []byte) (*G2, bool) {
+ // Each value is a 256-bit number.
+ const numBytes = 256 / 8
+
+ if len(m) != 4*numBytes {
+ return nil, false
+ }
+
+ if e.p == nil {
+ e.p = newTwistPoint(nil)
+ }
+
+ e.p.x.x.SetBytes(m[0*numBytes : 1*numBytes])
+ e.p.x.y.SetBytes(m[1*numBytes : 2*numBytes])
+ e.p.y.x.SetBytes(m[2*numBytes : 3*numBytes])
+ e.p.y.y.SetBytes(m[3*numBytes : 4*numBytes])
+
+ if e.p.x.x.Sign() == 0 &&
+ e.p.x.y.Sign() == 0 &&
+ e.p.y.x.Sign() == 0 &&
+ e.p.y.y.Sign() == 0 {
+ // This is the point at infinity.
+ e.p.y.SetOne()
+ e.p.z.SetZero()
+ e.p.t.SetZero()
+ } else {
+ e.p.z.SetOne()
+ e.p.t.SetOne()
+
+ if !e.p.IsOnCurve() {
+ return nil, false
+ }
+ }
+
+ return e, true
+}
+
+// GT is an abstract cyclic group. The zero value is suitable for use as the
+// output of an operation, but cannot be used as an input.
+type GT struct {
+ p *gfP12
+}
+
+func (g *GT) String() string {
+ return "bn256.GT" + g.p.String()
+}
+
+// ScalarMult sets e to a*k and then returns e.
+func (e *GT) ScalarMult(a *GT, k *big.Int) *GT {
+ if e.p == nil {
+ e.p = newGFp12(nil)
+ }
+ e.p.Exp(a.p, k, new(bnPool))
+ return e
+}
+
+// Add sets e to a+b and then returns e.
+func (e *GT) Add(a, b *GT) *GT {
+ if e.p == nil {
+ e.p = newGFp12(nil)
+ }
+ e.p.Mul(a.p, b.p, new(bnPool))
+ return e
+}
+
+// Neg sets e to -a and then returns e.
+func (e *GT) Neg(a *GT) *GT {
+ if e.p == nil {
+ e.p = newGFp12(nil)
+ }
+ e.p.Invert(a.p, new(bnPool))
+ return e
+}
+
+// Marshal converts n into a byte slice.
+func (n *GT) Marshal() []byte {
+ n.p.Minimal()
+
+ xxxBytes := n.p.x.x.x.Bytes()
+ xxyBytes := n.p.x.x.y.Bytes()
+ xyxBytes := n.p.x.y.x.Bytes()
+ xyyBytes := n.p.x.y.y.Bytes()
+ xzxBytes := n.p.x.z.x.Bytes()
+ xzyBytes := n.p.x.z.y.Bytes()
+ yxxBytes := n.p.y.x.x.Bytes()
+ yxyBytes := n.p.y.x.y.Bytes()
+ yyxBytes := n.p.y.y.x.Bytes()
+ yyyBytes := n.p.y.y.y.Bytes()
+ yzxBytes := n.p.y.z.x.Bytes()
+ yzyBytes := n.p.y.z.y.Bytes()
+
+ // Each value is a 256-bit number.
+ const numBytes = 256 / 8
+
+ ret := make([]byte, numBytes*12)
+ copy(ret[1*numBytes-len(xxxBytes):], xxxBytes)
+ copy(ret[2*numBytes-len(xxyBytes):], xxyBytes)
+ copy(ret[3*numBytes-len(xyxBytes):], xyxBytes)
+ copy(ret[4*numBytes-len(xyyBytes):], xyyBytes)
+ copy(ret[5*numBytes-len(xzxBytes):], xzxBytes)
+ copy(ret[6*numBytes-len(xzyBytes):], xzyBytes)
+ copy(ret[7*numBytes-len(yxxBytes):], yxxBytes)
+ copy(ret[8*numBytes-len(yxyBytes):], yxyBytes)
+ copy(ret[9*numBytes-len(yyxBytes):], yyxBytes)
+ copy(ret[10*numBytes-len(yyyBytes):], yyyBytes)
+ copy(ret[11*numBytes-len(yzxBytes):], yzxBytes)
+ copy(ret[12*numBytes-len(yzyBytes):], yzyBytes)
+
+ return ret
+}
+
+// Unmarshal sets e to the result of converting the output of Marshal back into
+// a group element and then returns e.
+func (e *GT) Unmarshal(m []byte) (*GT, bool) {
+ // Each value is a 256-bit number.
+ const numBytes = 256 / 8
+
+ if len(m) != 12*numBytes {
+ return nil, false
+ }
+
+ if e.p == nil {
+ e.p = newGFp12(nil)
+ }
+
+ e.p.x.x.x.SetBytes(m[0*numBytes : 1*numBytes])
+ e.p.x.x.y.SetBytes(m[1*numBytes : 2*numBytes])
+ e.p.x.y.x.SetBytes(m[2*numBytes : 3*numBytes])
+ e.p.x.y.y.SetBytes(m[3*numBytes : 4*numBytes])
+ e.p.x.z.x.SetBytes(m[4*numBytes : 5*numBytes])
+ e.p.x.z.y.SetBytes(m[5*numBytes : 6*numBytes])
+ e.p.y.x.x.SetBytes(m[6*numBytes : 7*numBytes])
+ e.p.y.x.y.SetBytes(m[7*numBytes : 8*numBytes])
+ e.p.y.y.x.SetBytes(m[8*numBytes : 9*numBytes])
+ e.p.y.y.y.SetBytes(m[9*numBytes : 10*numBytes])
+ e.p.y.z.x.SetBytes(m[10*numBytes : 11*numBytes])
+ e.p.y.z.y.SetBytes(m[11*numBytes : 12*numBytes])
+
+ return e, true
+}
+
+// Pair calculates an Optimal Ate pairing.
+func Pair(g1 *G1, g2 *G2) *GT {
+ return &GT{optimalAte(g2.p, g1.p, new(bnPool))}
+}
+
+// bnPool implements a tiny cache of *big.Int objects that's used to reduce the
+// number of allocations made during processing.
+type bnPool struct {
+ bns []*big.Int
+ count int
+}
+
+func (pool *bnPool) Get() *big.Int {
+ if pool == nil {
+ return new(big.Int)
+ }
+
+ pool.count++
+ l := len(pool.bns)
+ if l == 0 {
+ return new(big.Int)
+ }
+
+ bn := pool.bns[l-1]
+ pool.bns = pool.bns[:l-1]
+ return bn
+}
+
+func (pool *bnPool) Put(bn *big.Int) {
+ if pool == nil {
+ return
+ }
+ pool.bns = append(pool.bns, bn)
+ pool.count--
+}
+
+func (pool *bnPool) Count() int {
+ return pool.count
+}
diff --git a/src/mongo/gotools/vendor/src/golang.org/x/crypto/bn256/bn256_test.go b/src/mongo/gotools/vendor/src/golang.org/x/crypto/bn256/bn256_test.go
new file mode 100644
index 00000000000..1cec3884ecd
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/golang.org/x/crypto/bn256/bn256_test.go
@@ -0,0 +1,304 @@
+// Copyright 2012 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package bn256
+
+import (
+ "bytes"
+ "crypto/rand"
+ "math/big"
+ "testing"
+)
+
+func TestGFp2Invert(t *testing.T) {
+ pool := new(bnPool)
+
+ a := newGFp2(pool)
+ a.x.SetString("23423492374", 10)
+ a.y.SetString("12934872398472394827398470", 10)
+
+ inv := newGFp2(pool)
+ inv.Invert(a, pool)
+
+ b := newGFp2(pool).Mul(inv, a, pool)
+ if b.x.Int64() != 0 || b.y.Int64() != 1 {
+ t.Fatalf("bad result for a^-1*a: %s %s", b.x, b.y)
+ }
+
+ a.Put(pool)
+ b.Put(pool)
+ inv.Put(pool)
+
+ if c := pool.Count(); c > 0 {
+ t.Errorf("Pool count non-zero: %d\n", c)
+ }
+}
+
+func isZero(n *big.Int) bool {
+ return new(big.Int).Mod(n, p).Int64() == 0
+}
+
+func isOne(n *big.Int) bool {
+ return new(big.Int).Mod(n, p).Int64() == 1
+}
+
+func TestGFp6Invert(t *testing.T) {
+ pool := new(bnPool)
+
+ a := newGFp6(pool)
+ a.x.x.SetString("239487238491", 10)
+ a.x.y.SetString("2356249827341", 10)
+ a.y.x.SetString("082659782", 10)
+ a.y.y.SetString("182703523765", 10)
+ a.z.x.SetString("978236549263", 10)
+ a.z.y.SetString("64893242", 10)
+
+ inv := newGFp6(pool)
+ inv.Invert(a, pool)
+
+ b := newGFp6(pool).Mul(inv, a, pool)
+ if !isZero(b.x.x) ||
+ !isZero(b.x.y) ||
+ !isZero(b.y.x) ||
+ !isZero(b.y.y) ||
+ !isZero(b.z.x) ||
+ !isOne(b.z.y) {
+ t.Fatalf("bad result for a^-1*a: %s", b)
+ }
+
+ a.Put(pool)
+ b.Put(pool)
+ inv.Put(pool)
+
+ if c := pool.Count(); c > 0 {
+ t.Errorf("Pool count non-zero: %d\n", c)
+ }
+}
+
+func TestGFp12Invert(t *testing.T) {
+ pool := new(bnPool)
+
+ a := newGFp12(pool)
+ a.x.x.x.SetString("239846234862342323958623", 10)
+ a.x.x.y.SetString("2359862352529835623", 10)
+ a.x.y.x.SetString("928836523", 10)
+ a.x.y.y.SetString("9856234", 10)
+ a.x.z.x.SetString("235635286", 10)
+ a.x.z.y.SetString("5628392833", 10)
+ a.y.x.x.SetString("252936598265329856238956532167968", 10)
+ a.y.x.y.SetString("23596239865236954178968", 10)
+ a.y.y.x.SetString("95421692834", 10)
+ a.y.y.y.SetString("236548", 10)
+ a.y.z.x.SetString("924523", 10)
+ a.y.z.y.SetString("12954623", 10)
+
+ inv := newGFp12(pool)
+ inv.Invert(a, pool)
+
+ b := newGFp12(pool).Mul(inv, a, pool)
+ if !isZero(b.x.x.x) ||
+ !isZero(b.x.x.y) ||
+ !isZero(b.x.y.x) ||
+ !isZero(b.x.y.y) ||
+ !isZero(b.x.z.x) ||
+ !isZero(b.x.z.y) ||
+ !isZero(b.y.x.x) ||
+ !isZero(b.y.x.y) ||
+ !isZero(b.y.y.x) ||
+ !isZero(b.y.y.y) ||
+ !isZero(b.y.z.x) ||
+ !isOne(b.y.z.y) {
+ t.Fatalf("bad result for a^-1*a: %s", b)
+ }
+
+ a.Put(pool)
+ b.Put(pool)
+ inv.Put(pool)
+
+ if c := pool.Count(); c > 0 {
+ t.Errorf("Pool count non-zero: %d\n", c)
+ }
+}
+
+func TestCurveImpl(t *testing.T) {
+ pool := new(bnPool)
+
+ g := &curvePoint{
+ pool.Get().SetInt64(1),
+ pool.Get().SetInt64(-2),
+ pool.Get().SetInt64(1),
+ pool.Get().SetInt64(0),
+ }
+
+ x := pool.Get().SetInt64(32498273234)
+ X := newCurvePoint(pool).Mul(g, x, pool)
+
+ y := pool.Get().SetInt64(98732423523)
+ Y := newCurvePoint(pool).Mul(g, y, pool)
+
+ s1 := newCurvePoint(pool).Mul(X, y, pool).MakeAffine(pool)
+ s2 := newCurvePoint(pool).Mul(Y, x, pool).MakeAffine(pool)
+
+ if s1.x.Cmp(s2.x) != 0 ||
+ s2.x.Cmp(s1.x) != 0 {
+ t.Errorf("DH points don't match: (%s, %s) (%s, %s)", s1.x, s1.y, s2.x, s2.y)
+ }
+
+ pool.Put(x)
+ X.Put(pool)
+ pool.Put(y)
+ Y.Put(pool)
+ s1.Put(pool)
+ s2.Put(pool)
+ g.Put(pool)
+
+ if c := pool.Count(); c > 0 {
+ t.Errorf("Pool count non-zero: %d\n", c)
+ }
+}
+
+func TestOrderG1(t *testing.T) {
+ g := new(G1).ScalarBaseMult(Order)
+ if !g.p.IsInfinity() {
+ t.Error("G1 has incorrect order")
+ }
+
+ one := new(G1).ScalarBaseMult(new(big.Int).SetInt64(1))
+ g.Add(g, one)
+ g.p.MakeAffine(nil)
+ if g.p.x.Cmp(one.p.x) != 0 || g.p.y.Cmp(one.p.y) != 0 {
+ t.Errorf("1+0 != 1 in G1")
+ }
+}
+
+func TestOrderG2(t *testing.T) {
+ g := new(G2).ScalarBaseMult(Order)
+ if !g.p.IsInfinity() {
+ t.Error("G2 has incorrect order")
+ }
+
+ one := new(G2).ScalarBaseMult(new(big.Int).SetInt64(1))
+ g.Add(g, one)
+ g.p.MakeAffine(nil)
+ if g.p.x.x.Cmp(one.p.x.x) != 0 ||
+ g.p.x.y.Cmp(one.p.x.y) != 0 ||
+ g.p.y.x.Cmp(one.p.y.x) != 0 ||
+ g.p.y.y.Cmp(one.p.y.y) != 0 {
+ t.Errorf("1+0 != 1 in G2")
+ }
+}
+
+func TestOrderGT(t *testing.T) {
+ gt := Pair(&G1{curveGen}, &G2{twistGen})
+ g := new(GT).ScalarMult(gt, Order)
+ if !g.p.IsOne() {
+ t.Error("GT has incorrect order")
+ }
+}
+
+func TestBilinearity(t *testing.T) {
+ for i := 0; i < 2; i++ {
+ a, p1, _ := RandomG1(rand.Reader)
+ b, p2, _ := RandomG2(rand.Reader)
+ e1 := Pair(p1, p2)
+
+ e2 := Pair(&G1{curveGen}, &G2{twistGen})
+ e2.ScalarMult(e2, a)
+ e2.ScalarMult(e2, b)
+
+ minusE2 := new(GT).Neg(e2)
+ e1.Add(e1, minusE2)
+
+ if !e1.p.IsOne() {
+ t.Fatalf("bad pairing result: %s", e1)
+ }
+ }
+}
+
+func TestG1Marshal(t *testing.T) {
+ g := new(G1).ScalarBaseMult(new(big.Int).SetInt64(1))
+ form := g.Marshal()
+ _, ok := new(G1).Unmarshal(form)
+ if !ok {
+ t.Fatalf("failed to unmarshal")
+ }
+
+ g.ScalarBaseMult(Order)
+ form = g.Marshal()
+ g2, ok := new(G1).Unmarshal(form)
+ if !ok {
+ t.Fatalf("failed to unmarshal ∞")
+ }
+ if !g2.p.IsInfinity() {
+ t.Fatalf("∞ unmarshaled incorrectly")
+ }
+}
+
+func TestG2Marshal(t *testing.T) {
+ g := new(G2).ScalarBaseMult(new(big.Int).SetInt64(1))
+ form := g.Marshal()
+ _, ok := new(G2).Unmarshal(form)
+ if !ok {
+ t.Fatalf("failed to unmarshal")
+ }
+
+ g.ScalarBaseMult(Order)
+ form = g.Marshal()
+ g2, ok := new(G2).Unmarshal(form)
+ if !ok {
+ t.Fatalf("failed to unmarshal ∞")
+ }
+ if !g2.p.IsInfinity() {
+ t.Fatalf("∞ unmarshaled incorrectly")
+ }
+}
+
+func TestG1Identity(t *testing.T) {
+ g := new(G1).ScalarBaseMult(new(big.Int).SetInt64(0))
+ if !g.p.IsInfinity() {
+ t.Error("failure")
+ }
+}
+
+func TestG2Identity(t *testing.T) {
+ g := new(G2).ScalarBaseMult(new(big.Int).SetInt64(0))
+ if !g.p.IsInfinity() {
+ t.Error("failure")
+ }
+}
+
+func TestTripartiteDiffieHellman(t *testing.T) {
+ a, _ := rand.Int(rand.Reader, Order)
+ b, _ := rand.Int(rand.Reader, Order)
+ c, _ := rand.Int(rand.Reader, Order)
+
+ pa, _ := new(G1).Unmarshal(new(G1).ScalarBaseMult(a).Marshal())
+ qa, _ := new(G2).Unmarshal(new(G2).ScalarBaseMult(a).Marshal())
+ pb, _ := new(G1).Unmarshal(new(G1).ScalarBaseMult(b).Marshal())
+ qb, _ := new(G2).Unmarshal(new(G2).ScalarBaseMult(b).Marshal())
+ pc, _ := new(G1).Unmarshal(new(G1).ScalarBaseMult(c).Marshal())
+ qc, _ := new(G2).Unmarshal(new(G2).ScalarBaseMult(c).Marshal())
+
+ k1 := Pair(pb, qc)
+ k1.ScalarMult(k1, a)
+ k1Bytes := k1.Marshal()
+
+ k2 := Pair(pc, qa)
+ k2.ScalarMult(k2, b)
+ k2Bytes := k2.Marshal()
+
+ k3 := Pair(pa, qb)
+ k3.ScalarMult(k3, c)
+ k3Bytes := k3.Marshal()
+
+ if !bytes.Equal(k1Bytes, k2Bytes) || !bytes.Equal(k2Bytes, k3Bytes) {
+ t.Errorf("keys didn't agree")
+ }
+}
+
+func BenchmarkPairing(b *testing.B) {
+ for i := 0; i < b.N; i++ {
+ Pair(&G1{curveGen}, &G2{twistGen})
+ }
+}
diff --git a/src/mongo/gotools/vendor/src/golang.org/x/crypto/bn256/constants.go b/src/mongo/gotools/vendor/src/golang.org/x/crypto/bn256/constants.go
new file mode 100644
index 00000000000..08ccfdf3d68
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/golang.org/x/crypto/bn256/constants.go
@@ -0,0 +1,44 @@
+// Copyright 2012 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package bn256
+
+import (
+ "math/big"
+)
+
+func bigFromBase10(s string) *big.Int {
+ n, _ := new(big.Int).SetString(s, 10)
+ return n
+}
+
+// u is the BN parameter that determines the prime: 1868033³.
+var u = bigFromBase10("6518589491078791937")
+
+// p is a prime over which we form a basic field: 36u⁴+36u³+24u³+6u+1.
+var p = bigFromBase10("65000549695646603732796438742359905742825358107623003571877145026864184071783")
+
+// Order is the number of elements in both G₁ and G₂: 36u⁴+36u³+18u³+6u+1.
+var Order = bigFromBase10("65000549695646603732796438742359905742570406053903786389881062969044166799969")
+
+// xiToPMinus1Over6 is ξ^((p-1)/6) where ξ = i+3.
+var xiToPMinus1Over6 = &gfP2{bigFromBase10("8669379979083712429711189836753509758585994370025260553045152614783263110636"), bigFromBase10("19998038925833620163537568958541907098007303196759855091367510456613536016040")}
+
+// xiToPMinus1Over3 is ξ^((p-1)/3) where ξ = i+3.
+var xiToPMinus1Over3 = &gfP2{bigFromBase10("26098034838977895781559542626833399156321265654106457577426020397262786167059"), bigFromBase10("15931493369629630809226283458085260090334794394361662678240713231519278691715")}
+
+// xiToPMinus1Over2 is ξ^((p-1)/2) where ξ = i+3.
+var xiToPMinus1Over2 = &gfP2{bigFromBase10("50997318142241922852281555961173165965672272825141804376761836765206060036244"), bigFromBase10("38665955945962842195025998234511023902832543644254935982879660597356748036009")}
+
+// xiToPSquaredMinus1Over3 is ξ^((p²-1)/3) where ξ = i+3.
+var xiToPSquaredMinus1Over3 = bigFromBase10("65000549695646603727810655408050771481677621702948236658134783353303381437752")
+
+// xiTo2PSquaredMinus2Over3 is ξ^((2p²-2)/3) where ξ = i+3 (a cubic root of unity, mod p).
+var xiTo2PSquaredMinus2Over3 = bigFromBase10("4985783334309134261147736404674766913742361673560802634030")
+
+// xiToPSquaredMinus1Over6 is ξ^((1p²-1)/6) where ξ = i+3 (a cubic root of -1, mod p).
+var xiToPSquaredMinus1Over6 = bigFromBase10("65000549695646603727810655408050771481677621702948236658134783353303381437753")
+
+// xiTo2PMinus2Over3 is ξ^((2p-2)/3) where ξ = i+3.
+var xiTo2PMinus2Over3 = &gfP2{bigFromBase10("19885131339612776214803633203834694332692106372356013117629940868870585019582"), bigFromBase10("21645619881471562101905880913352894726728173167203616652430647841922248593627")}
diff --git a/src/mongo/gotools/vendor/src/golang.org/x/crypto/bn256/curve.go b/src/mongo/gotools/vendor/src/golang.org/x/crypto/bn256/curve.go
new file mode 100644
index 00000000000..55b7063f163
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/golang.org/x/crypto/bn256/curve.go
@@ -0,0 +1,278 @@
+// Copyright 2012 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package bn256
+
+import (
+ "math/big"
+)
+
+// curvePoint implements the elliptic curve y²=x³+3. Points are kept in
+// Jacobian form and t=z² when valid. G₁ is the set of points of this curve on
+// GF(p).
+type curvePoint struct {
+ x, y, z, t *big.Int
+}
+
+var curveB = new(big.Int).SetInt64(3)
+
+// curveGen is the generator of G₁.
+var curveGen = &curvePoint{
+ new(big.Int).SetInt64(1),
+ new(big.Int).SetInt64(-2),
+ new(big.Int).SetInt64(1),
+ new(big.Int).SetInt64(1),
+}
+
+func newCurvePoint(pool *bnPool) *curvePoint {
+ return &curvePoint{
+ pool.Get(),
+ pool.Get(),
+ pool.Get(),
+ pool.Get(),
+ }
+}
+
+func (c *curvePoint) String() string {
+ c.MakeAffine(new(bnPool))
+ return "(" + c.x.String() + ", " + c.y.String() + ")"
+}
+
+func (c *curvePoint) Put(pool *bnPool) {
+ pool.Put(c.x)
+ pool.Put(c.y)
+ pool.Put(c.z)
+ pool.Put(c.t)
+}
+
+func (c *curvePoint) Set(a *curvePoint) {
+ c.x.Set(a.x)
+ c.y.Set(a.y)
+ c.z.Set(a.z)
+ c.t.Set(a.t)
+}
+
+// IsOnCurve returns true iff c is on the curve where c must be in affine form.
+func (c *curvePoint) IsOnCurve() bool {
+ yy := new(big.Int).Mul(c.y, c.y)
+ xxx := new(big.Int).Mul(c.x, c.x)
+ xxx.Mul(xxx, c.x)
+ yy.Sub(yy, xxx)
+ yy.Sub(yy, curveB)
+ if yy.Sign() < 0 || yy.Cmp(p) >= 0 {
+ yy.Mod(yy, p)
+ }
+ return yy.Sign() == 0
+}
+
+func (c *curvePoint) SetInfinity() {
+ c.z.SetInt64(0)
+}
+
+func (c *curvePoint) IsInfinity() bool {
+ return c.z.Sign() == 0
+}
+
+func (c *curvePoint) Add(a, b *curvePoint, pool *bnPool) {
+ if a.IsInfinity() {
+ c.Set(b)
+ return
+ }
+ if b.IsInfinity() {
+ c.Set(a)
+ return
+ }
+
+ // See http://hyperelliptic.org/EFD/g1p/auto-code/shortw/jacobian-0/addition/add-2007-bl.op3
+
+ // Normalize the points by replacing a = [x1:y1:z1] and b = [x2:y2:z2]
+ // by [u1:s1:z1·z2] and [u2:s2:z1·z2]
+ // where u1 = x1·z2², s1 = y1·z2³ and u1 = x2·z1², s2 = y2·z1³
+ z1z1 := pool.Get().Mul(a.z, a.z)
+ z1z1.Mod(z1z1, p)
+ z2z2 := pool.Get().Mul(b.z, b.z)
+ z2z2.Mod(z2z2, p)
+ u1 := pool.Get().Mul(a.x, z2z2)
+ u1.Mod(u1, p)
+ u2 := pool.Get().Mul(b.x, z1z1)
+ u2.Mod(u2, p)
+
+ t := pool.Get().Mul(b.z, z2z2)
+ t.Mod(t, p)
+ s1 := pool.Get().Mul(a.y, t)
+ s1.Mod(s1, p)
+
+ t.Mul(a.z, z1z1)
+ t.Mod(t, p)
+ s2 := pool.Get().Mul(b.y, t)
+ s2.Mod(s2, p)
+
+ // Compute x = (2h)²(s²-u1-u2)
+ // where s = (s2-s1)/(u2-u1) is the slope of the line through
+ // (u1,s1) and (u2,s2). The extra factor 2h = 2(u2-u1) comes from the value of z below.
+ // This is also:
+ // 4(s2-s1)² - 4h²(u1+u2) = 4(s2-s1)² - 4h³ - 4h²(2u1)
+ // = r² - j - 2v
+ // with the notations below.
+ h := pool.Get().Sub(u2, u1)
+ xEqual := h.Sign() == 0
+
+ t.Add(h, h)
+ // i = 4h²
+ i := pool.Get().Mul(t, t)
+ i.Mod(i, p)
+ // j = 4h³
+ j := pool.Get().Mul(h, i)
+ j.Mod(j, p)
+
+ t.Sub(s2, s1)
+ yEqual := t.Sign() == 0
+ if xEqual && yEqual {
+ c.Double(a, pool)
+ return
+ }
+ r := pool.Get().Add(t, t)
+
+ v := pool.Get().Mul(u1, i)
+ v.Mod(v, p)
+
+ // t4 = 4(s2-s1)²
+ t4 := pool.Get().Mul(r, r)
+ t4.Mod(t4, p)
+ t.Add(v, v)
+ t6 := pool.Get().Sub(t4, j)
+ c.x.Sub(t6, t)
+
+ // Set y = -(2h)³(s1 + s*(x/4h²-u1))
+ // This is also
+ // y = - 2·s1·j - (s2-s1)(2x - 2i·u1) = r(v-x) - 2·s1·j
+ t.Sub(v, c.x) // t7
+ t4.Mul(s1, j) // t8
+ t4.Mod(t4, p)
+ t6.Add(t4, t4) // t9
+ t4.Mul(r, t) // t10
+ t4.Mod(t4, p)
+ c.y.Sub(t4, t6)
+
+ // Set z = 2(u2-u1)·z1·z2 = 2h·z1·z2
+ t.Add(a.z, b.z) // t11
+ t4.Mul(t, t) // t12
+ t4.Mod(t4, p)
+ t.Sub(t4, z1z1) // t13
+ t4.Sub(t, z2z2) // t14
+ c.z.Mul(t4, h)
+ c.z.Mod(c.z, p)
+
+ pool.Put(z1z1)
+ pool.Put(z2z2)
+ pool.Put(u1)
+ pool.Put(u2)
+ pool.Put(t)
+ pool.Put(s1)
+ pool.Put(s2)
+ pool.Put(h)
+ pool.Put(i)
+ pool.Put(j)
+ pool.Put(r)
+ pool.Put(v)
+ pool.Put(t4)
+ pool.Put(t6)
+}
+
+func (c *curvePoint) Double(a *curvePoint, pool *bnPool) {
+ // See http://hyperelliptic.org/EFD/g1p/auto-code/shortw/jacobian-0/doubling/dbl-2009-l.op3
+ A := pool.Get().Mul(a.x, a.x)
+ A.Mod(A, p)
+ B := pool.Get().Mul(a.y, a.y)
+ B.Mod(B, p)
+ C := pool.Get().Mul(B, B)
+ C.Mod(C, p)
+
+ t := pool.Get().Add(a.x, B)
+ t2 := pool.Get().Mul(t, t)
+ t2.Mod(t2, p)
+ t.Sub(t2, A)
+ t2.Sub(t, C)
+ d := pool.Get().Add(t2, t2)
+ t.Add(A, A)
+ e := pool.Get().Add(t, A)
+ f := pool.Get().Mul(e, e)
+ f.Mod(f, p)
+
+ t.Add(d, d)
+ c.x.Sub(f, t)
+
+ t.Add(C, C)
+ t2.Add(t, t)
+ t.Add(t2, t2)
+ c.y.Sub(d, c.x)
+ t2.Mul(e, c.y)
+ t2.Mod(t2, p)
+ c.y.Sub(t2, t)
+
+ t.Mul(a.y, a.z)
+ t.Mod(t, p)
+ c.z.Add(t, t)
+
+ pool.Put(A)
+ pool.Put(B)
+ pool.Put(C)
+ pool.Put(t)
+ pool.Put(t2)
+ pool.Put(d)
+ pool.Put(e)
+ pool.Put(f)
+}
+
+func (c *curvePoint) Mul(a *curvePoint, scalar *big.Int, pool *bnPool) *curvePoint {
+ sum := newCurvePoint(pool)
+ sum.SetInfinity()
+ t := newCurvePoint(pool)
+
+ for i := scalar.BitLen(); i >= 0; i-- {
+ t.Double(sum, pool)
+ if scalar.Bit(i) != 0 {
+ sum.Add(t, a, pool)
+ } else {
+ sum.Set(t)
+ }
+ }
+
+ c.Set(sum)
+ sum.Put(pool)
+ t.Put(pool)
+ return c
+}
+
+func (c *curvePoint) MakeAffine(pool *bnPool) *curvePoint {
+ if words := c.z.Bits(); len(words) == 1 && words[0] == 1 {
+ return c
+ }
+
+ zInv := pool.Get().ModInverse(c.z, p)
+ t := pool.Get().Mul(c.y, zInv)
+ t.Mod(t, p)
+ zInv2 := pool.Get().Mul(zInv, zInv)
+ zInv2.Mod(zInv2, p)
+ c.y.Mul(t, zInv2)
+ c.y.Mod(c.y, p)
+ t.Mul(c.x, zInv2)
+ t.Mod(t, p)
+ c.x.Set(t)
+ c.z.SetInt64(1)
+ c.t.SetInt64(1)
+
+ pool.Put(zInv)
+ pool.Put(t)
+ pool.Put(zInv2)
+
+ return c
+}
+
+func (c *curvePoint) Negative(a *curvePoint) {
+ c.x.Set(a.x)
+ c.y.Neg(a.y)
+ c.z.Set(a.z)
+ c.t.SetInt64(0)
+}
diff --git a/src/mongo/gotools/vendor/src/golang.org/x/crypto/bn256/example_test.go b/src/mongo/gotools/vendor/src/golang.org/x/crypto/bn256/example_test.go
new file mode 100644
index 00000000000..b2d19807a25
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/golang.org/x/crypto/bn256/example_test.go
@@ -0,0 +1,43 @@
+// Copyright 2012 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package bn256
+
+import (
+ "crypto/rand"
+)
+
+func ExamplePair() {
+ // This implements the tripartite Diffie-Hellman algorithm from "A One
+ // Round Protocol for Tripartite Diffie-Hellman", A. Joux.
+ // http://www.springerlink.com/content/cddc57yyva0hburb/fulltext.pdf
+
+ // Each of three parties, a, b and c, generate a private value.
+ a, _ := rand.Int(rand.Reader, Order)
+ b, _ := rand.Int(rand.Reader, Order)
+ c, _ := rand.Int(rand.Reader, Order)
+
+ // Then each party calculates g₁ and g₂ times their private value.
+ pa := new(G1).ScalarBaseMult(a)
+ qa := new(G2).ScalarBaseMult(a)
+
+ pb := new(G1).ScalarBaseMult(b)
+ qb := new(G2).ScalarBaseMult(b)
+
+ pc := new(G1).ScalarBaseMult(c)
+ qc := new(G2).ScalarBaseMult(c)
+
+ // Now each party exchanges its public values with the other two and
+ // all parties can calculate the shared key.
+ k1 := Pair(pb, qc)
+ k1.ScalarMult(k1, a)
+
+ k2 := Pair(pc, qa)
+ k2.ScalarMult(k2, b)
+
+ k3 := Pair(pa, qb)
+ k3.ScalarMult(k3, c)
+
+ // k1, k2 and k3 will all be equal.
+}
diff --git a/src/mongo/gotools/vendor/src/golang.org/x/crypto/bn256/gfp12.go b/src/mongo/gotools/vendor/src/golang.org/x/crypto/bn256/gfp12.go
new file mode 100644
index 00000000000..f084eddf212
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/golang.org/x/crypto/bn256/gfp12.go
@@ -0,0 +1,200 @@
+// Copyright 2012 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package bn256
+
+// For details of the algorithms used, see "Multiplication and Squaring on
+// Pairing-Friendly Fields, Devegili et al.
+// http://eprint.iacr.org/2006/471.pdf.
+
+import (
+ "math/big"
+)
+
+// gfP12 implements the field of size p¹² as a quadratic extension of gfP6
+// where ω²=τ.
+type gfP12 struct {
+ x, y *gfP6 // value is xω + y
+}
+
+func newGFp12(pool *bnPool) *gfP12 {
+ return &gfP12{newGFp6(pool), newGFp6(pool)}
+}
+
+func (e *gfP12) String() string {
+ return "(" + e.x.String() + "," + e.y.String() + ")"
+}
+
+func (e *gfP12) Put(pool *bnPool) {
+ e.x.Put(pool)
+ e.y.Put(pool)
+}
+
+func (e *gfP12) Set(a *gfP12) *gfP12 {
+ e.x.Set(a.x)
+ e.y.Set(a.y)
+ return e
+}
+
+func (e *gfP12) SetZero() *gfP12 {
+ e.x.SetZero()
+ e.y.SetZero()
+ return e
+}
+
+func (e *gfP12) SetOne() *gfP12 {
+ e.x.SetZero()
+ e.y.SetOne()
+ return e
+}
+
+func (e *gfP12) Minimal() {
+ e.x.Minimal()
+ e.y.Minimal()
+}
+
+func (e *gfP12) IsZero() bool {
+ e.Minimal()
+ return e.x.IsZero() && e.y.IsZero()
+}
+
+func (e *gfP12) IsOne() bool {
+ e.Minimal()
+ return e.x.IsZero() && e.y.IsOne()
+}
+
+func (e *gfP12) Conjugate(a *gfP12) *gfP12 {
+ e.x.Negative(a.x)
+ e.y.Set(a.y)
+ return a
+}
+
+func (e *gfP12) Negative(a *gfP12) *gfP12 {
+ e.x.Negative(a.x)
+ e.y.Negative(a.y)
+ return e
+}
+
+// Frobenius computes (xω+y)^p = x^p ω·ξ^((p-1)/6) + y^p
+func (e *gfP12) Frobenius(a *gfP12, pool *bnPool) *gfP12 {
+ e.x.Frobenius(a.x, pool)
+ e.y.Frobenius(a.y, pool)
+ e.x.MulScalar(e.x, xiToPMinus1Over6, pool)
+ return e
+}
+
+// FrobeniusP2 computes (xω+y)^p² = x^p² ω·ξ^((p²-1)/6) + y^p²
+func (e *gfP12) FrobeniusP2(a *gfP12, pool *bnPool) *gfP12 {
+ e.x.FrobeniusP2(a.x)
+ e.x.MulGFP(e.x, xiToPSquaredMinus1Over6)
+ e.y.FrobeniusP2(a.y)
+ return e
+}
+
+func (e *gfP12) Add(a, b *gfP12) *gfP12 {
+ e.x.Add(a.x, b.x)
+ e.y.Add(a.y, b.y)
+ return e
+}
+
+func (e *gfP12) Sub(a, b *gfP12) *gfP12 {
+ e.x.Sub(a.x, b.x)
+ e.y.Sub(a.y, b.y)
+ return e
+}
+
+func (e *gfP12) Mul(a, b *gfP12, pool *bnPool) *gfP12 {
+ tx := newGFp6(pool)
+ tx.Mul(a.x, b.y, pool)
+ t := newGFp6(pool)
+ t.Mul(b.x, a.y, pool)
+ tx.Add(tx, t)
+
+ ty := newGFp6(pool)
+ ty.Mul(a.y, b.y, pool)
+ t.Mul(a.x, b.x, pool)
+ t.MulTau(t, pool)
+ e.y.Add(ty, t)
+ e.x.Set(tx)
+
+ tx.Put(pool)
+ ty.Put(pool)
+ t.Put(pool)
+ return e
+}
+
+func (e *gfP12) MulScalar(a *gfP12, b *gfP6, pool *bnPool) *gfP12 {
+ e.x.Mul(e.x, b, pool)
+ e.y.Mul(e.y, b, pool)
+ return e
+}
+
+func (c *gfP12) Exp(a *gfP12, power *big.Int, pool *bnPool) *gfP12 {
+ sum := newGFp12(pool)
+ sum.SetOne()
+ t := newGFp12(pool)
+
+ for i := power.BitLen() - 1; i >= 0; i-- {
+ t.Square(sum, pool)
+ if power.Bit(i) != 0 {
+ sum.Mul(t, a, pool)
+ } else {
+ sum.Set(t)
+ }
+ }
+
+ c.Set(sum)
+
+ sum.Put(pool)
+ t.Put(pool)
+
+ return c
+}
+
+func (e *gfP12) Square(a *gfP12, pool *bnPool) *gfP12 {
+ // Complex squaring algorithm
+ v0 := newGFp6(pool)
+ v0.Mul(a.x, a.y, pool)
+
+ t := newGFp6(pool)
+ t.MulTau(a.x, pool)
+ t.Add(a.y, t)
+ ty := newGFp6(pool)
+ ty.Add(a.x, a.y)
+ ty.Mul(ty, t, pool)
+ ty.Sub(ty, v0)
+ t.MulTau(v0, pool)
+ ty.Sub(ty, t)
+
+ e.y.Set(ty)
+ e.x.Double(v0)
+
+ v0.Put(pool)
+ t.Put(pool)
+ ty.Put(pool)
+
+ return e
+}
+
+func (e *gfP12) Invert(a *gfP12, pool *bnPool) *gfP12 {
+ // See "Implementing cryptographic pairings", M. Scott, section 3.2.
+ // ftp://136.206.11.249/pub/crypto/pairings.pdf
+ t1 := newGFp6(pool)
+ t2 := newGFp6(pool)
+
+ t1.Square(a.x, pool)
+ t2.Square(a.y, pool)
+ t1.MulTau(t1, pool)
+ t1.Sub(t2, t1)
+ t2.Invert(t1, pool)
+
+ e.x.Negative(a.x)
+ e.y.Set(a.y)
+ e.MulScalar(e, t2, pool)
+
+ t1.Put(pool)
+ t2.Put(pool)
+
+ return e
+}
diff --git a/src/mongo/gotools/vendor/src/golang.org/x/crypto/bn256/gfp2.go b/src/mongo/gotools/vendor/src/golang.org/x/crypto/bn256/gfp2.go
new file mode 100644
index 00000000000..97f3f1f3fa1
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/golang.org/x/crypto/bn256/gfp2.go
@@ -0,0 +1,219 @@
+// Copyright 2012 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package bn256
+
+// For details of the algorithms used, see "Multiplication and Squaring on
+// Pairing-Friendly Fields, Devegili et al.
+// http://eprint.iacr.org/2006/471.pdf.
+
+import (
+ "math/big"
+)
+
+// gfP2 implements a field of size p² as a quadratic extension of the base
+// field where i²=-1.
+type gfP2 struct {
+ x, y *big.Int // value is xi+y.
+}
+
+func newGFp2(pool *bnPool) *gfP2 {
+ return &gfP2{pool.Get(), pool.Get()}
+}
+
+func (e *gfP2) String() string {
+ x := new(big.Int).Mod(e.x, p)
+ y := new(big.Int).Mod(e.y, p)
+ return "(" + x.String() + "," + y.String() + ")"
+}
+
+func (e *gfP2) Put(pool *bnPool) {
+ pool.Put(e.x)
+ pool.Put(e.y)
+}
+
+func (e *gfP2) Set(a *gfP2) *gfP2 {
+ e.x.Set(a.x)
+ e.y.Set(a.y)
+ return e
+}
+
+func (e *gfP2) SetZero() *gfP2 {
+ e.x.SetInt64(0)
+ e.y.SetInt64(0)
+ return e
+}
+
+func (e *gfP2) SetOne() *gfP2 {
+ e.x.SetInt64(0)
+ e.y.SetInt64(1)
+ return e
+}
+
+func (e *gfP2) Minimal() {
+ if e.x.Sign() < 0 || e.x.Cmp(p) >= 0 {
+ e.x.Mod(e.x, p)
+ }
+ if e.y.Sign() < 0 || e.y.Cmp(p) >= 0 {
+ e.y.Mod(e.y, p)
+ }
+}
+
+func (e *gfP2) IsZero() bool {
+ return e.x.Sign() == 0 && e.y.Sign() == 0
+}
+
+func (e *gfP2) IsOne() bool {
+ if e.x.Sign() != 0 {
+ return false
+ }
+ words := e.y.Bits()
+ return len(words) == 1 && words[0] == 1
+}
+
+func (e *gfP2) Conjugate(a *gfP2) *gfP2 {
+ e.y.Set(a.y)
+ e.x.Neg(a.x)
+ return e
+}
+
+func (e *gfP2) Negative(a *gfP2) *gfP2 {
+ e.x.Neg(a.x)
+ e.y.Neg(a.y)
+ return e
+}
+
+func (e *gfP2) Add(a, b *gfP2) *gfP2 {
+ e.x.Add(a.x, b.x)
+ e.y.Add(a.y, b.y)
+ return e
+}
+
+func (e *gfP2) Sub(a, b *gfP2) *gfP2 {
+ e.x.Sub(a.x, b.x)
+ e.y.Sub(a.y, b.y)
+ return e
+}
+
+func (e *gfP2) Double(a *gfP2) *gfP2 {
+ e.x.Lsh(a.x, 1)
+ e.y.Lsh(a.y, 1)
+ return e
+}
+
+func (c *gfP2) Exp(a *gfP2, power *big.Int, pool *bnPool) *gfP2 {
+ sum := newGFp2(pool)
+ sum.SetOne()
+ t := newGFp2(pool)
+
+ for i := power.BitLen() - 1; i >= 0; i-- {
+ t.Square(sum, pool)
+ if power.Bit(i) != 0 {
+ sum.Mul(t, a, pool)
+ } else {
+ sum.Set(t)
+ }
+ }
+
+ c.Set(sum)
+
+ sum.Put(pool)
+ t.Put(pool)
+
+ return c
+}
+
+// See "Multiplication and Squaring in Pairing-Friendly Fields",
+// http://eprint.iacr.org/2006/471.pdf
+func (e *gfP2) Mul(a, b *gfP2, pool *bnPool) *gfP2 {
+ tx := pool.Get().Mul(a.x, b.y)
+ t := pool.Get().Mul(b.x, a.y)
+ tx.Add(tx, t)
+ tx.Mod(tx, p)
+
+ ty := pool.Get().Mul(a.y, b.y)
+ t.Mul(a.x, b.x)
+ ty.Sub(ty, t)
+ e.y.Mod(ty, p)
+ e.x.Set(tx)
+
+ pool.Put(tx)
+ pool.Put(ty)
+ pool.Put(t)
+
+ return e
+}
+
+func (e *gfP2) MulScalar(a *gfP2, b *big.Int) *gfP2 {
+ e.x.Mul(a.x, b)
+ e.y.Mul(a.y, b)
+ return e
+}
+
+// MulXi sets e=ξa where ξ=i+3 and then returns e.
+func (e *gfP2) MulXi(a *gfP2, pool *bnPool) *gfP2 {
+ // (xi+y)(i+3) = (3x+y)i+(3y-x)
+ tx := pool.Get().Lsh(a.x, 1)
+ tx.Add(tx, a.x)
+ tx.Add(tx, a.y)
+
+ ty := pool.Get().Lsh(a.y, 1)
+ ty.Add(ty, a.y)
+ ty.Sub(ty, a.x)
+
+ e.x.Set(tx)
+ e.y.Set(ty)
+
+ pool.Put(tx)
+ pool.Put(ty)
+
+ return e
+}
+
+func (e *gfP2) Square(a *gfP2, pool *bnPool) *gfP2 {
+ // Complex squaring algorithm:
+ // (xi+b)² = (x+y)(y-x) + 2*i*x*y
+ t1 := pool.Get().Sub(a.y, a.x)
+ t2 := pool.Get().Add(a.x, a.y)
+ ty := pool.Get().Mul(t1, t2)
+ ty.Mod(ty, p)
+
+ t1.Mul(a.x, a.y)
+ t1.Lsh(t1, 1)
+
+ e.x.Mod(t1, p)
+ e.y.Set(ty)
+
+ pool.Put(t1)
+ pool.Put(t2)
+ pool.Put(ty)
+
+ return e
+}
+
+func (e *gfP2) Invert(a *gfP2, pool *bnPool) *gfP2 {
+ // See "Implementing cryptographic pairings", M. Scott, section 3.2.
+ // ftp://136.206.11.249/pub/crypto/pairings.pdf
+ t := pool.Get()
+ t.Mul(a.y, a.y)
+ t2 := pool.Get()
+ t2.Mul(a.x, a.x)
+ t.Add(t, t2)
+
+ inv := pool.Get()
+ inv.ModInverse(t, p)
+
+ e.x.Neg(a.x)
+ e.x.Mul(e.x, inv)
+ e.x.Mod(e.x, p)
+
+ e.y.Mul(a.y, inv)
+ e.y.Mod(e.y, p)
+
+ pool.Put(t)
+ pool.Put(t2)
+ pool.Put(inv)
+
+ return e
+}
diff --git a/src/mongo/gotools/vendor/src/golang.org/x/crypto/bn256/gfp6.go b/src/mongo/gotools/vendor/src/golang.org/x/crypto/bn256/gfp6.go
new file mode 100644
index 00000000000..f98ae782cc1
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/golang.org/x/crypto/bn256/gfp6.go
@@ -0,0 +1,296 @@
+// Copyright 2012 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package bn256
+
+// For details of the algorithms used, see "Multiplication and Squaring on
+// Pairing-Friendly Fields, Devegili et al.
+// http://eprint.iacr.org/2006/471.pdf.
+
+import (
+ "math/big"
+)
+
+// gfP6 implements the field of size p⁶ as a cubic extension of gfP2 where τ³=ξ
+// and ξ=i+3.
+type gfP6 struct {
+ x, y, z *gfP2 // value is xτ² + yτ + z
+}
+
+func newGFp6(pool *bnPool) *gfP6 {
+ return &gfP6{newGFp2(pool), newGFp2(pool), newGFp2(pool)}
+}
+
+func (e *gfP6) String() string {
+ return "(" + e.x.String() + "," + e.y.String() + "," + e.z.String() + ")"
+}
+
+func (e *gfP6) Put(pool *bnPool) {
+ e.x.Put(pool)
+ e.y.Put(pool)
+ e.z.Put(pool)
+}
+
+func (e *gfP6) Set(a *gfP6) *gfP6 {
+ e.x.Set(a.x)
+ e.y.Set(a.y)
+ e.z.Set(a.z)
+ return e
+}
+
+func (e *gfP6) SetZero() *gfP6 {
+ e.x.SetZero()
+ e.y.SetZero()
+ e.z.SetZero()
+ return e
+}
+
+func (e *gfP6) SetOne() *gfP6 {
+ e.x.SetZero()
+ e.y.SetZero()
+ e.z.SetOne()
+ return e
+}
+
+func (e *gfP6) Minimal() {
+ e.x.Minimal()
+ e.y.Minimal()
+ e.z.Minimal()
+}
+
+func (e *gfP6) IsZero() bool {
+ return e.x.IsZero() && e.y.IsZero() && e.z.IsZero()
+}
+
+func (e *gfP6) IsOne() bool {
+ return e.x.IsZero() && e.y.IsZero() && e.z.IsOne()
+}
+
+func (e *gfP6) Negative(a *gfP6) *gfP6 {
+ e.x.Negative(a.x)
+ e.y.Negative(a.y)
+ e.z.Negative(a.z)
+ return e
+}
+
+func (e *gfP6) Frobenius(a *gfP6, pool *bnPool) *gfP6 {
+ e.x.Conjugate(a.x)
+ e.y.Conjugate(a.y)
+ e.z.Conjugate(a.z)
+
+ e.x.Mul(e.x, xiTo2PMinus2Over3, pool)
+ e.y.Mul(e.y, xiToPMinus1Over3, pool)
+ return e
+}
+
+// FrobeniusP2 computes (xτ²+yτ+z)^(p²) = xτ^(2p²) + yτ^(p²) + z
+func (e *gfP6) FrobeniusP2(a *gfP6) *gfP6 {
+ // τ^(2p²) = τ²τ^(2p²-2) = τ²ξ^((2p²-2)/3)
+ e.x.MulScalar(a.x, xiTo2PSquaredMinus2Over3)
+ // τ^(p²) = ττ^(p²-1) = τξ^((p²-1)/3)
+ e.y.MulScalar(a.y, xiToPSquaredMinus1Over3)
+ e.z.Set(a.z)
+ return e
+}
+
+func (e *gfP6) Add(a, b *gfP6) *gfP6 {
+ e.x.Add(a.x, b.x)
+ e.y.Add(a.y, b.y)
+ e.z.Add(a.z, b.z)
+ return e
+}
+
+func (e *gfP6) Sub(a, b *gfP6) *gfP6 {
+ e.x.Sub(a.x, b.x)
+ e.y.Sub(a.y, b.y)
+ e.z.Sub(a.z, b.z)
+ return e
+}
+
+func (e *gfP6) Double(a *gfP6) *gfP6 {
+ e.x.Double(a.x)
+ e.y.Double(a.y)
+ e.z.Double(a.z)
+ return e
+}
+
+func (e *gfP6) Mul(a, b *gfP6, pool *bnPool) *gfP6 {
+ // "Multiplication and Squaring on Pairing-Friendly Fields"
+ // Section 4, Karatsuba method.
+ // http://eprint.iacr.org/2006/471.pdf
+
+ v0 := newGFp2(pool)
+ v0.Mul(a.z, b.z, pool)
+ v1 := newGFp2(pool)
+ v1.Mul(a.y, b.y, pool)
+ v2 := newGFp2(pool)
+ v2.Mul(a.x, b.x, pool)
+
+ t0 := newGFp2(pool)
+ t0.Add(a.x, a.y)
+ t1 := newGFp2(pool)
+ t1.Add(b.x, b.y)
+ tz := newGFp2(pool)
+ tz.Mul(t0, t1, pool)
+
+ tz.Sub(tz, v1)
+ tz.Sub(tz, v2)
+ tz.MulXi(tz, pool)
+ tz.Add(tz, v0)
+
+ t0.Add(a.y, a.z)
+ t1.Add(b.y, b.z)
+ ty := newGFp2(pool)
+ ty.Mul(t0, t1, pool)
+ ty.Sub(ty, v0)
+ ty.Sub(ty, v1)
+ t0.MulXi(v2, pool)
+ ty.Add(ty, t0)
+
+ t0.Add(a.x, a.z)
+ t1.Add(b.x, b.z)
+ tx := newGFp2(pool)
+ tx.Mul(t0, t1, pool)
+ tx.Sub(tx, v0)
+ tx.Add(tx, v1)
+ tx.Sub(tx, v2)
+
+ e.x.Set(tx)
+ e.y.Set(ty)
+ e.z.Set(tz)
+
+ t0.Put(pool)
+ t1.Put(pool)
+ tx.Put(pool)
+ ty.Put(pool)
+ tz.Put(pool)
+ v0.Put(pool)
+ v1.Put(pool)
+ v2.Put(pool)
+ return e
+}
+
+func (e *gfP6) MulScalar(a *gfP6, b *gfP2, pool *bnPool) *gfP6 {
+ e.x.Mul(a.x, b, pool)
+ e.y.Mul(a.y, b, pool)
+ e.z.Mul(a.z, b, pool)
+ return e
+}
+
+func (e *gfP6) MulGFP(a *gfP6, b *big.Int) *gfP6 {
+ e.x.MulScalar(a.x, b)
+ e.y.MulScalar(a.y, b)
+ e.z.MulScalar(a.z, b)
+ return e
+}
+
+// MulTau computes τ·(aτ²+bτ+c) = bτ²+cτ+aξ
+func (e *gfP6) MulTau(a *gfP6, pool *bnPool) {
+ tz := newGFp2(pool)
+ tz.MulXi(a.x, pool)
+ ty := newGFp2(pool)
+ ty.Set(a.y)
+ e.y.Set(a.z)
+ e.x.Set(ty)
+ e.z.Set(tz)
+ tz.Put(pool)
+ ty.Put(pool)
+}
+
+func (e *gfP6) Square(a *gfP6, pool *bnPool) *gfP6 {
+ v0 := newGFp2(pool).Square(a.z, pool)
+ v1 := newGFp2(pool).Square(a.y, pool)
+ v2 := newGFp2(pool).Square(a.x, pool)
+
+ c0 := newGFp2(pool).Add(a.x, a.y)
+ c0.Square(c0, pool)
+ c0.Sub(c0, v1)
+ c0.Sub(c0, v2)
+ c0.MulXi(c0, pool)
+ c0.Add(c0, v0)
+
+ c1 := newGFp2(pool).Add(a.y, a.z)
+ c1.Square(c1, pool)
+ c1.Sub(c1, v0)
+ c1.Sub(c1, v1)
+ xiV2 := newGFp2(pool).MulXi(v2, pool)
+ c1.Add(c1, xiV2)
+
+ c2 := newGFp2(pool).Add(a.x, a.z)
+ c2.Square(c2, pool)
+ c2.Sub(c2, v0)
+ c2.Add(c2, v1)
+ c2.Sub(c2, v2)
+
+ e.x.Set(c2)
+ e.y.Set(c1)
+ e.z.Set(c0)
+
+ v0.Put(pool)
+ v1.Put(pool)
+ v2.Put(pool)
+ c0.Put(pool)
+ c1.Put(pool)
+ c2.Put(pool)
+ xiV2.Put(pool)
+
+ return e
+}
+
+func (e *gfP6) Invert(a *gfP6, pool *bnPool) *gfP6 {
+ // See "Implementing cryptographic pairings", M. Scott, section 3.2.
+ // ftp://136.206.11.249/pub/crypto/pairings.pdf
+
+ // Here we can give a short explanation of how it works: let j be a cubic root of
+ // unity in GF(p²) so that 1+j+j²=0.
+ // Then (xτ² + yτ + z)(xj²τ² + yjτ + z)(xjτ² + yj²τ + z)
+ // = (xτ² + yτ + z)(Cτ²+Bτ+A)
+ // = (x³ξ²+y³ξ+z³-3ξxyz) = F is an element of the base field (the norm).
+ //
+ // On the other hand (xj²τ² + yjτ + z)(xjτ² + yj²τ + z)
+ // = τ²(y²-ξxz) + τ(ξx²-yz) + (z²-ξxy)
+ //
+ // So that's why A = (z²-ξxy), B = (ξx²-yz), C = (y²-ξxz)
+ t1 := newGFp2(pool)
+
+ A := newGFp2(pool)
+ A.Square(a.z, pool)
+ t1.Mul(a.x, a.y, pool)
+ t1.MulXi(t1, pool)
+ A.Sub(A, t1)
+
+ B := newGFp2(pool)
+ B.Square(a.x, pool)
+ B.MulXi(B, pool)
+ t1.Mul(a.y, a.z, pool)
+ B.Sub(B, t1)
+
+ C := newGFp2(pool)
+ C.Square(a.y, pool)
+ t1.Mul(a.x, a.z, pool)
+ C.Sub(C, t1)
+
+ F := newGFp2(pool)
+ F.Mul(C, a.y, pool)
+ F.MulXi(F, pool)
+ t1.Mul(A, a.z, pool)
+ F.Add(F, t1)
+ t1.Mul(B, a.x, pool)
+ t1.MulXi(t1, pool)
+ F.Add(F, t1)
+
+ F.Invert(F, pool)
+
+ e.x.Mul(C, F, pool)
+ e.y.Mul(B, F, pool)
+ e.z.Mul(A, F, pool)
+
+ t1.Put(pool)
+ A.Put(pool)
+ B.Put(pool)
+ C.Put(pool)
+ F.Put(pool)
+
+ return e
+}
diff --git a/src/mongo/gotools/vendor/src/golang.org/x/crypto/bn256/optate.go b/src/mongo/gotools/vendor/src/golang.org/x/crypto/bn256/optate.go
new file mode 100644
index 00000000000..7ae0746eb10
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/golang.org/x/crypto/bn256/optate.go
@@ -0,0 +1,395 @@
+// Copyright 2012 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package bn256
+
+func lineFunctionAdd(r, p *twistPoint, q *curvePoint, r2 *gfP2, pool *bnPool) (a, b, c *gfP2, rOut *twistPoint) {
+ // See the mixed addition algorithm from "Faster Computation of the
+ // Tate Pairing", http://arxiv.org/pdf/0904.0854v3.pdf
+
+ B := newGFp2(pool).Mul(p.x, r.t, pool)
+
+ D := newGFp2(pool).Add(p.y, r.z)
+ D.Square(D, pool)
+ D.Sub(D, r2)
+ D.Sub(D, r.t)
+ D.Mul(D, r.t, pool)
+
+ H := newGFp2(pool).Sub(B, r.x)
+ I := newGFp2(pool).Square(H, pool)
+
+ E := newGFp2(pool).Add(I, I)
+ E.Add(E, E)
+
+ J := newGFp2(pool).Mul(H, E, pool)
+
+ L1 := newGFp2(pool).Sub(D, r.y)
+ L1.Sub(L1, r.y)
+
+ V := newGFp2(pool).Mul(r.x, E, pool)
+
+ rOut = newTwistPoint(pool)
+ rOut.x.Square(L1, pool)
+ rOut.x.Sub(rOut.x, J)
+ rOut.x.Sub(rOut.x, V)
+ rOut.x.Sub(rOut.x, V)
+
+ rOut.z.Add(r.z, H)
+ rOut.z.Square(rOut.z, pool)
+ rOut.z.Sub(rOut.z, r.t)
+ rOut.z.Sub(rOut.z, I)
+
+ t := newGFp2(pool).Sub(V, rOut.x)
+ t.Mul(t, L1, pool)
+ t2 := newGFp2(pool).Mul(r.y, J, pool)
+ t2.Add(t2, t2)
+ rOut.y.Sub(t, t2)
+
+ rOut.t.Square(rOut.z, pool)
+
+ t.Add(p.y, rOut.z)
+ t.Square(t, pool)
+ t.Sub(t, r2)
+ t.Sub(t, rOut.t)
+
+ t2.Mul(L1, p.x, pool)
+ t2.Add(t2, t2)
+ a = newGFp2(pool)
+ a.Sub(t2, t)
+
+ c = newGFp2(pool)
+ c.MulScalar(rOut.z, q.y)
+ c.Add(c, c)
+
+ b = newGFp2(pool)
+ b.SetZero()
+ b.Sub(b, L1)
+ b.MulScalar(b, q.x)
+ b.Add(b, b)
+
+ B.Put(pool)
+ D.Put(pool)
+ H.Put(pool)
+ I.Put(pool)
+ E.Put(pool)
+ J.Put(pool)
+ L1.Put(pool)
+ V.Put(pool)
+ t.Put(pool)
+ t2.Put(pool)
+
+ return
+}
+
+func lineFunctionDouble(r *twistPoint, q *curvePoint, pool *bnPool) (a, b, c *gfP2, rOut *twistPoint) {
+ // See the doubling algorithm for a=0 from "Faster Computation of the
+ // Tate Pairing", http://arxiv.org/pdf/0904.0854v3.pdf
+
+ A := newGFp2(pool).Square(r.x, pool)
+ B := newGFp2(pool).Square(r.y, pool)
+ C := newGFp2(pool).Square(B, pool)
+
+ D := newGFp2(pool).Add(r.x, B)
+ D.Square(D, pool)
+ D.Sub(D, A)
+ D.Sub(D, C)
+ D.Add(D, D)
+
+ E := newGFp2(pool).Add(A, A)
+ E.Add(E, A)
+
+ G := newGFp2(pool).Square(E, pool)
+
+ rOut = newTwistPoint(pool)
+ rOut.x.Sub(G, D)
+ rOut.x.Sub(rOut.x, D)
+
+ rOut.z.Add(r.y, r.z)
+ rOut.z.Square(rOut.z, pool)
+ rOut.z.Sub(rOut.z, B)
+ rOut.z.Sub(rOut.z, r.t)
+
+ rOut.y.Sub(D, rOut.x)
+ rOut.y.Mul(rOut.y, E, pool)
+ t := newGFp2(pool).Add(C, C)
+ t.Add(t, t)
+ t.Add(t, t)
+ rOut.y.Sub(rOut.y, t)
+
+ rOut.t.Square(rOut.z, pool)
+
+ t.Mul(E, r.t, pool)
+ t.Add(t, t)
+ b = newGFp2(pool)
+ b.SetZero()
+ b.Sub(b, t)
+ b.MulScalar(b, q.x)
+
+ a = newGFp2(pool)
+ a.Add(r.x, E)
+ a.Square(a, pool)
+ a.Sub(a, A)
+ a.Sub(a, G)
+ t.Add(B, B)
+ t.Add(t, t)
+ a.Sub(a, t)
+
+ c = newGFp2(pool)
+ c.Mul(rOut.z, r.t, pool)
+ c.Add(c, c)
+ c.MulScalar(c, q.y)
+
+ A.Put(pool)
+ B.Put(pool)
+ C.Put(pool)
+ D.Put(pool)
+ E.Put(pool)
+ G.Put(pool)
+ t.Put(pool)
+
+ return
+}
+
+func mulLine(ret *gfP12, a, b, c *gfP2, pool *bnPool) {
+ a2 := newGFp6(pool)
+ a2.x.SetZero()
+ a2.y.Set(a)
+ a2.z.Set(b)
+ a2.Mul(a2, ret.x, pool)
+ t3 := newGFp6(pool).MulScalar(ret.y, c, pool)
+
+ t := newGFp2(pool)
+ t.Add(b, c)
+ t2 := newGFp6(pool)
+ t2.x.SetZero()
+ t2.y.Set(a)
+ t2.z.Set(t)
+ ret.x.Add(ret.x, ret.y)
+
+ ret.y.Set(t3)
+
+ ret.x.Mul(ret.x, t2, pool)
+ ret.x.Sub(ret.x, a2)
+ ret.x.Sub(ret.x, ret.y)
+ a2.MulTau(a2, pool)
+ ret.y.Add(ret.y, a2)
+
+ a2.Put(pool)
+ t3.Put(pool)
+ t2.Put(pool)
+ t.Put(pool)
+}
+
+// sixuPlus2NAF is 6u+2 in non-adjacent form.
+var sixuPlus2NAF = []int8{0, 0, 0, 1, 0, 0, 0, 0, 0, 1, 0, 0, 1, 0, 0, 0, -1, 0, 1, 0, 1, 0, 0, 0, 0, 1, 0, 1, 0, 0, 0, -1, 0, 1, 0, 0, 0, 1, 0, -1, 0, 0, 0, -1, 0, 1, 0, 0, 0, 0, 0, 1, 0, 0, -1, 0, -1, 0, 0, 0, 0, 1, 0, 0, 0, 1}
+
+// miller implements the Miller loop for calculating the Optimal Ate pairing.
+// See algorithm 1 from http://cryptojedi.org/papers/dclxvi-20100714.pdf
+func miller(q *twistPoint, p *curvePoint, pool *bnPool) *gfP12 {
+ ret := newGFp12(pool)
+ ret.SetOne()
+
+ aAffine := newTwistPoint(pool)
+ aAffine.Set(q)
+ aAffine.MakeAffine(pool)
+
+ bAffine := newCurvePoint(pool)
+ bAffine.Set(p)
+ bAffine.MakeAffine(pool)
+
+ minusA := newTwistPoint(pool)
+ minusA.Negative(aAffine, pool)
+
+ r := newTwistPoint(pool)
+ r.Set(aAffine)
+
+ r2 := newGFp2(pool)
+ r2.Square(aAffine.y, pool)
+
+ for i := len(sixuPlus2NAF) - 1; i > 0; i-- {
+ a, b, c, newR := lineFunctionDouble(r, bAffine, pool)
+ if i != len(sixuPlus2NAF)-1 {
+ ret.Square(ret, pool)
+ }
+
+ mulLine(ret, a, b, c, pool)
+ a.Put(pool)
+ b.Put(pool)
+ c.Put(pool)
+ r.Put(pool)
+ r = newR
+
+ switch sixuPlus2NAF[i-1] {
+ case 1:
+ a, b, c, newR = lineFunctionAdd(r, aAffine, bAffine, r2, pool)
+ case -1:
+ a, b, c, newR = lineFunctionAdd(r, minusA, bAffine, r2, pool)
+ default:
+ continue
+ }
+
+ mulLine(ret, a, b, c, pool)
+ a.Put(pool)
+ b.Put(pool)
+ c.Put(pool)
+ r.Put(pool)
+ r = newR
+ }
+
+ // In order to calculate Q1 we have to convert q from the sextic twist
+ // to the full GF(p^12) group, apply the Frobenius there, and convert
+ // back.
+ //
+ // The twist isomorphism is (x', y') -> (xω², yω³). If we consider just
+ // x for a moment, then after applying the Frobenius, we have x̄ω^(2p)
+ // where x̄ is the conjugate of x. If we are going to apply the inverse
+ // isomorphism we need a value with a single coefficient of ω² so we
+ // rewrite this as x̄ω^(2p-2)ω². ξ⁶ = ω and, due to the construction of
+ // p, 2p-2 is a multiple of six. Therefore we can rewrite as
+ // x̄ξ^((p-1)/3)ω² and applying the inverse isomorphism eliminates the
+ // ω².
+ //
+ // A similar argument can be made for the y value.
+
+ q1 := newTwistPoint(pool)
+ q1.x.Conjugate(aAffine.x)
+ q1.x.Mul(q1.x, xiToPMinus1Over3, pool)
+ q1.y.Conjugate(aAffine.y)
+ q1.y.Mul(q1.y, xiToPMinus1Over2, pool)
+ q1.z.SetOne()
+ q1.t.SetOne()
+
+ // For Q2 we are applying the p² Frobenius. The two conjugations cancel
+ // out and we are left only with the factors from the isomorphism. In
+ // the case of x, we end up with a pure number which is why
+ // xiToPSquaredMinus1Over3 is ∈ GF(p). With y we get a factor of -1. We
+ // ignore this to end up with -Q2.
+
+ minusQ2 := newTwistPoint(pool)
+ minusQ2.x.MulScalar(aAffine.x, xiToPSquaredMinus1Over3)
+ minusQ2.y.Set(aAffine.y)
+ minusQ2.z.SetOne()
+ minusQ2.t.SetOne()
+
+ r2.Square(q1.y, pool)
+ a, b, c, newR := lineFunctionAdd(r, q1, bAffine, r2, pool)
+ mulLine(ret, a, b, c, pool)
+ a.Put(pool)
+ b.Put(pool)
+ c.Put(pool)
+ r.Put(pool)
+ r = newR
+
+ r2.Square(minusQ2.y, pool)
+ a, b, c, newR = lineFunctionAdd(r, minusQ2, bAffine, r2, pool)
+ mulLine(ret, a, b, c, pool)
+ a.Put(pool)
+ b.Put(pool)
+ c.Put(pool)
+ r.Put(pool)
+ r = newR
+
+ aAffine.Put(pool)
+ bAffine.Put(pool)
+ minusA.Put(pool)
+ r.Put(pool)
+ r2.Put(pool)
+
+ return ret
+}
+
+// finalExponentiation computes the (p¹²-1)/Order-th power of an element of
+// GF(p¹²) to obtain an element of GT (steps 13-15 of algorithm 1 from
+// http://cryptojedi.org/papers/dclxvi-20100714.pdf)
+func finalExponentiation(in *gfP12, pool *bnPool) *gfP12 {
+ t1 := newGFp12(pool)
+
+ // This is the p^6-Frobenius
+ t1.x.Negative(in.x)
+ t1.y.Set(in.y)
+
+ inv := newGFp12(pool)
+ inv.Invert(in, pool)
+ t1.Mul(t1, inv, pool)
+
+ t2 := newGFp12(pool).FrobeniusP2(t1, pool)
+ t1.Mul(t1, t2, pool)
+
+ fp := newGFp12(pool).Frobenius(t1, pool)
+ fp2 := newGFp12(pool).FrobeniusP2(t1, pool)
+ fp3 := newGFp12(pool).Frobenius(fp2, pool)
+
+ fu, fu2, fu3 := newGFp12(pool), newGFp12(pool), newGFp12(pool)
+ fu.Exp(t1, u, pool)
+ fu2.Exp(fu, u, pool)
+ fu3.Exp(fu2, u, pool)
+
+ y3 := newGFp12(pool).Frobenius(fu, pool)
+ fu2p := newGFp12(pool).Frobenius(fu2, pool)
+ fu3p := newGFp12(pool).Frobenius(fu3, pool)
+ y2 := newGFp12(pool).FrobeniusP2(fu2, pool)
+
+ y0 := newGFp12(pool)
+ y0.Mul(fp, fp2, pool)
+ y0.Mul(y0, fp3, pool)
+
+ y1, y4, y5 := newGFp12(pool), newGFp12(pool), newGFp12(pool)
+ y1.Conjugate(t1)
+ y5.Conjugate(fu2)
+ y3.Conjugate(y3)
+ y4.Mul(fu, fu2p, pool)
+ y4.Conjugate(y4)
+
+ y6 := newGFp12(pool)
+ y6.Mul(fu3, fu3p, pool)
+ y6.Conjugate(y6)
+
+ t0 := newGFp12(pool)
+ t0.Square(y6, pool)
+ t0.Mul(t0, y4, pool)
+ t0.Mul(t0, y5, pool)
+ t1.Mul(y3, y5, pool)
+ t1.Mul(t1, t0, pool)
+ t0.Mul(t0, y2, pool)
+ t1.Square(t1, pool)
+ t1.Mul(t1, t0, pool)
+ t1.Square(t1, pool)
+ t0.Mul(t1, y1, pool)
+ t1.Mul(t1, y0, pool)
+ t0.Square(t0, pool)
+ t0.Mul(t0, t1, pool)
+
+ inv.Put(pool)
+ t1.Put(pool)
+ t2.Put(pool)
+ fp.Put(pool)
+ fp2.Put(pool)
+ fp3.Put(pool)
+ fu.Put(pool)
+ fu2.Put(pool)
+ fu3.Put(pool)
+ fu2p.Put(pool)
+ fu3p.Put(pool)
+ y0.Put(pool)
+ y1.Put(pool)
+ y2.Put(pool)
+ y3.Put(pool)
+ y4.Put(pool)
+ y5.Put(pool)
+ y6.Put(pool)
+
+ return t0
+}
+
+func optimalAte(a *twistPoint, b *curvePoint, pool *bnPool) *gfP12 {
+ e := miller(a, b, pool)
+ ret := finalExponentiation(e, pool)
+ e.Put(pool)
+
+ if a.IsInfinity() || b.IsInfinity() {
+ ret.SetOne()
+ }
+
+ return ret
+}
diff --git a/src/mongo/gotools/vendor/src/golang.org/x/crypto/bn256/twist.go b/src/mongo/gotools/vendor/src/golang.org/x/crypto/bn256/twist.go
new file mode 100644
index 00000000000..4f8b3fede42
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/golang.org/x/crypto/bn256/twist.go
@@ -0,0 +1,249 @@
+// Copyright 2012 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package bn256
+
+import (
+ "math/big"
+)
+
+// twistPoint implements the elliptic curve y²=x³+3/ξ over GF(p²). Points are
+// kept in Jacobian form and t=z² when valid. The group G₂ is the set of
+// n-torsion points of this curve over GF(p²) (where n = Order)
+type twistPoint struct {
+ x, y, z, t *gfP2
+}
+
+var twistB = &gfP2{
+ bigFromBase10("6500054969564660373279643874235990574282535810762300357187714502686418407178"),
+ bigFromBase10("45500384786952622612957507119651934019977750675336102500314001518804928850249"),
+}
+
+// twistGen is the generator of group G₂.
+var twistGen = &twistPoint{
+ &gfP2{
+ bigFromBase10("21167961636542580255011770066570541300993051739349375019639421053990175267184"),
+ bigFromBase10("64746500191241794695844075326670126197795977525365406531717464316923369116492"),
+ },
+ &gfP2{
+ bigFromBase10("20666913350058776956210519119118544732556678129809273996262322366050359951122"),
+ bigFromBase10("17778617556404439934652658462602675281523610326338642107814333856843981424549"),
+ },
+ &gfP2{
+ bigFromBase10("0"),
+ bigFromBase10("1"),
+ },
+ &gfP2{
+ bigFromBase10("0"),
+ bigFromBase10("1"),
+ },
+}
+
+func newTwistPoint(pool *bnPool) *twistPoint {
+ return &twistPoint{
+ newGFp2(pool),
+ newGFp2(pool),
+ newGFp2(pool),
+ newGFp2(pool),
+ }
+}
+
+func (c *twistPoint) String() string {
+ return "(" + c.x.String() + ", " + c.y.String() + ", " + c.z.String() + ")"
+}
+
+func (c *twistPoint) Put(pool *bnPool) {
+ c.x.Put(pool)
+ c.y.Put(pool)
+ c.z.Put(pool)
+ c.t.Put(pool)
+}
+
+func (c *twistPoint) Set(a *twistPoint) {
+ c.x.Set(a.x)
+ c.y.Set(a.y)
+ c.z.Set(a.z)
+ c.t.Set(a.t)
+}
+
+// IsOnCurve returns true iff c is on the curve where c must be in affine form.
+func (c *twistPoint) IsOnCurve() bool {
+ pool := new(bnPool)
+ yy := newGFp2(pool).Square(c.y, pool)
+ xxx := newGFp2(pool).Square(c.x, pool)
+ xxx.Mul(xxx, c.x, pool)
+ yy.Sub(yy, xxx)
+ yy.Sub(yy, twistB)
+ yy.Minimal()
+ return yy.x.Sign() == 0 && yy.y.Sign() == 0
+}
+
+func (c *twistPoint) SetInfinity() {
+ c.z.SetZero()
+}
+
+func (c *twistPoint) IsInfinity() bool {
+ return c.z.IsZero()
+}
+
+func (c *twistPoint) Add(a, b *twistPoint, pool *bnPool) {
+ // For additional comments, see the same function in curve.go.
+
+ if a.IsInfinity() {
+ c.Set(b)
+ return
+ }
+ if b.IsInfinity() {
+ c.Set(a)
+ return
+ }
+
+ // See http://hyperelliptic.org/EFD/g1p/auto-code/shortw/jacobian-0/addition/add-2007-bl.op3
+ z1z1 := newGFp2(pool).Square(a.z, pool)
+ z2z2 := newGFp2(pool).Square(b.z, pool)
+ u1 := newGFp2(pool).Mul(a.x, z2z2, pool)
+ u2 := newGFp2(pool).Mul(b.x, z1z1, pool)
+
+ t := newGFp2(pool).Mul(b.z, z2z2, pool)
+ s1 := newGFp2(pool).Mul(a.y, t, pool)
+
+ t.Mul(a.z, z1z1, pool)
+ s2 := newGFp2(pool).Mul(b.y, t, pool)
+
+ h := newGFp2(pool).Sub(u2, u1)
+ xEqual := h.IsZero()
+
+ t.Add(h, h)
+ i := newGFp2(pool).Square(t, pool)
+ j := newGFp2(pool).Mul(h, i, pool)
+
+ t.Sub(s2, s1)
+ yEqual := t.IsZero()
+ if xEqual && yEqual {
+ c.Double(a, pool)
+ return
+ }
+ r := newGFp2(pool).Add(t, t)
+
+ v := newGFp2(pool).Mul(u1, i, pool)
+
+ t4 := newGFp2(pool).Square(r, pool)
+ t.Add(v, v)
+ t6 := newGFp2(pool).Sub(t4, j)
+ c.x.Sub(t6, t)
+
+ t.Sub(v, c.x) // t7
+ t4.Mul(s1, j, pool) // t8
+ t6.Add(t4, t4) // t9
+ t4.Mul(r, t, pool) // t10
+ c.y.Sub(t4, t6)
+
+ t.Add(a.z, b.z) // t11
+ t4.Square(t, pool) // t12
+ t.Sub(t4, z1z1) // t13
+ t4.Sub(t, z2z2) // t14
+ c.z.Mul(t4, h, pool)
+
+ z1z1.Put(pool)
+ z2z2.Put(pool)
+ u1.Put(pool)
+ u2.Put(pool)
+ t.Put(pool)
+ s1.Put(pool)
+ s2.Put(pool)
+ h.Put(pool)
+ i.Put(pool)
+ j.Put(pool)
+ r.Put(pool)
+ v.Put(pool)
+ t4.Put(pool)
+ t6.Put(pool)
+}
+
+func (c *twistPoint) Double(a *twistPoint, pool *bnPool) {
+ // See http://hyperelliptic.org/EFD/g1p/auto-code/shortw/jacobian-0/doubling/dbl-2009-l.op3
+ A := newGFp2(pool).Square(a.x, pool)
+ B := newGFp2(pool).Square(a.y, pool)
+ C := newGFp2(pool).Square(B, pool)
+
+ t := newGFp2(pool).Add(a.x, B)
+ t2 := newGFp2(pool).Square(t, pool)
+ t.Sub(t2, A)
+ t2.Sub(t, C)
+ d := newGFp2(pool).Add(t2, t2)
+ t.Add(A, A)
+ e := newGFp2(pool).Add(t, A)
+ f := newGFp2(pool).Square(e, pool)
+
+ t.Add(d, d)
+ c.x.Sub(f, t)
+
+ t.Add(C, C)
+ t2.Add(t, t)
+ t.Add(t2, t2)
+ c.y.Sub(d, c.x)
+ t2.Mul(e, c.y, pool)
+ c.y.Sub(t2, t)
+
+ t.Mul(a.y, a.z, pool)
+ c.z.Add(t, t)
+
+ A.Put(pool)
+ B.Put(pool)
+ C.Put(pool)
+ t.Put(pool)
+ t2.Put(pool)
+ d.Put(pool)
+ e.Put(pool)
+ f.Put(pool)
+}
+
+func (c *twistPoint) Mul(a *twistPoint, scalar *big.Int, pool *bnPool) *twistPoint {
+ sum := newTwistPoint(pool)
+ sum.SetInfinity()
+ t := newTwistPoint(pool)
+
+ for i := scalar.BitLen(); i >= 0; i-- {
+ t.Double(sum, pool)
+ if scalar.Bit(i) != 0 {
+ sum.Add(t, a, pool)
+ } else {
+ sum.Set(t)
+ }
+ }
+
+ c.Set(sum)
+ sum.Put(pool)
+ t.Put(pool)
+ return c
+}
+
+func (c *twistPoint) MakeAffine(pool *bnPool) *twistPoint {
+ if c.z.IsOne() {
+ return c
+ }
+
+ zInv := newGFp2(pool).Invert(c.z, pool)
+ t := newGFp2(pool).Mul(c.y, zInv, pool)
+ zInv2 := newGFp2(pool).Square(zInv, pool)
+ c.y.Mul(t, zInv2, pool)
+ t.Mul(c.x, zInv2, pool)
+ c.x.Set(t)
+ c.z.SetOne()
+ c.t.SetOne()
+
+ zInv.Put(pool)
+ t.Put(pool)
+ zInv2.Put(pool)
+
+ return c
+}
+
+func (c *twistPoint) Negative(a *twistPoint, pool *bnPool) {
+ c.x.Set(a.x)
+ c.y.SetZero()
+ c.y.Sub(c.y, a.y)
+ c.z.Set(a.z)
+ c.t.SetZero()
+}
diff --git a/src/mongo/gotools/vendor/src/golang.org/x/crypto/cast5/cast5.go b/src/mongo/gotools/vendor/src/golang.org/x/crypto/cast5/cast5.go
new file mode 100644
index 00000000000..0b4af37bdc2
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/golang.org/x/crypto/cast5/cast5.go
@@ -0,0 +1,526 @@
+// Copyright 2010 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// Package cast5 implements CAST5, as defined in RFC 2144. CAST5 is a common
+// OpenPGP cipher.
+package cast5 // import "golang.org/x/crypto/cast5"
+
+import "errors"
+
+const BlockSize = 8
+const KeySize = 16
+
+type Cipher struct {
+ masking [16]uint32
+ rotate [16]uint8
+}
+
+func NewCipher(key []byte) (c *Cipher, err error) {
+ if len(key) != KeySize {
+ return nil, errors.New("CAST5: keys must be 16 bytes")
+ }
+
+ c = new(Cipher)
+ c.keySchedule(key)
+ return
+}
+
+func (c *Cipher) BlockSize() int {
+ return BlockSize
+}
+
+func (c *Cipher) Encrypt(dst, src []byte) {
+ l := uint32(src[0])<<24 | uint32(src[1])<<16 | uint32(src[2])<<8 | uint32(src[3])
+ r := uint32(src[4])<<24 | uint32(src[5])<<16 | uint32(src[6])<<8 | uint32(src[7])
+
+ l, r = r, l^f1(r, c.masking[0], c.rotate[0])
+ l, r = r, l^f2(r, c.masking[1], c.rotate[1])
+ l, r = r, l^f3(r, c.masking[2], c.rotate[2])
+ l, r = r, l^f1(r, c.masking[3], c.rotate[3])
+
+ l, r = r, l^f2(r, c.masking[4], c.rotate[4])
+ l, r = r, l^f3(r, c.masking[5], c.rotate[5])
+ l, r = r, l^f1(r, c.masking[6], c.rotate[6])
+ l, r = r, l^f2(r, c.masking[7], c.rotate[7])
+
+ l, r = r, l^f3(r, c.masking[8], c.rotate[8])
+ l, r = r, l^f1(r, c.masking[9], c.rotate[9])
+ l, r = r, l^f2(r, c.masking[10], c.rotate[10])
+ l, r = r, l^f3(r, c.masking[11], c.rotate[11])
+
+ l, r = r, l^f1(r, c.masking[12], c.rotate[12])
+ l, r = r, l^f2(r, c.masking[13], c.rotate[13])
+ l, r = r, l^f3(r, c.masking[14], c.rotate[14])
+ l, r = r, l^f1(r, c.masking[15], c.rotate[15])
+
+ dst[0] = uint8(r >> 24)
+ dst[1] = uint8(r >> 16)
+ dst[2] = uint8(r >> 8)
+ dst[3] = uint8(r)
+ dst[4] = uint8(l >> 24)
+ dst[5] = uint8(l >> 16)
+ dst[6] = uint8(l >> 8)
+ dst[7] = uint8(l)
+}
+
+func (c *Cipher) Decrypt(dst, src []byte) {
+ l := uint32(src[0])<<24 | uint32(src[1])<<16 | uint32(src[2])<<8 | uint32(src[3])
+ r := uint32(src[4])<<24 | uint32(src[5])<<16 | uint32(src[6])<<8 | uint32(src[7])
+
+ l, r = r, l^f1(r, c.masking[15], c.rotate[15])
+ l, r = r, l^f3(r, c.masking[14], c.rotate[14])
+ l, r = r, l^f2(r, c.masking[13], c.rotate[13])
+ l, r = r, l^f1(r, c.masking[12], c.rotate[12])
+
+ l, r = r, l^f3(r, c.masking[11], c.rotate[11])
+ l, r = r, l^f2(r, c.masking[10], c.rotate[10])
+ l, r = r, l^f1(r, c.masking[9], c.rotate[9])
+ l, r = r, l^f3(r, c.masking[8], c.rotate[8])
+
+ l, r = r, l^f2(r, c.masking[7], c.rotate[7])
+ l, r = r, l^f1(r, c.masking[6], c.rotate[6])
+ l, r = r, l^f3(r, c.masking[5], c.rotate[5])
+ l, r = r, l^f2(r, c.masking[4], c.rotate[4])
+
+ l, r = r, l^f1(r, c.masking[3], c.rotate[3])
+ l, r = r, l^f3(r, c.masking[2], c.rotate[2])
+ l, r = r, l^f2(r, c.masking[1], c.rotate[1])
+ l, r = r, l^f1(r, c.masking[0], c.rotate[0])
+
+ dst[0] = uint8(r >> 24)
+ dst[1] = uint8(r >> 16)
+ dst[2] = uint8(r >> 8)
+ dst[3] = uint8(r)
+ dst[4] = uint8(l >> 24)
+ dst[5] = uint8(l >> 16)
+ dst[6] = uint8(l >> 8)
+ dst[7] = uint8(l)
+}
+
+type keyScheduleA [4][7]uint8
+type keyScheduleB [4][5]uint8
+
+// keyScheduleRound contains the magic values for a round of the key schedule.
+// The keyScheduleA deals with the lines like:
+// z0z1z2z3 = x0x1x2x3 ^ S5[xD] ^ S6[xF] ^ S7[xC] ^ S8[xE] ^ S7[x8]
+// Conceptually, both x and z are in the same array, x first. The first
+// element describes which word of this array gets written to and the
+// second, which word gets read. So, for the line above, it's "4, 0", because
+// it's writing to the first word of z, which, being after x, is word 4, and
+// reading from the first word of x: word 0.
+//
+// Next are the indexes into the S-boxes. Now the array is treated as bytes. So
+// "xD" is 0xd. The first byte of z is written as "16 + 0", just to be clear
+// that it's z that we're indexing.
+//
+// keyScheduleB deals with lines like:
+// K1 = S5[z8] ^ S6[z9] ^ S7[z7] ^ S8[z6] ^ S5[z2]
+// "K1" is ignored because key words are always written in order. So the five
+// elements are the S-box indexes. They use the same form as in keyScheduleA,
+// above.
+
+type keyScheduleRound struct{}
+type keySchedule []keyScheduleRound
+
+var schedule = []struct {
+ a keyScheduleA
+ b keyScheduleB
+}{
+ {
+ keyScheduleA{
+ {4, 0, 0xd, 0xf, 0xc, 0xe, 0x8},
+ {5, 2, 16 + 0, 16 + 2, 16 + 1, 16 + 3, 0xa},
+ {6, 3, 16 + 7, 16 + 6, 16 + 5, 16 + 4, 9},
+ {7, 1, 16 + 0xa, 16 + 9, 16 + 0xb, 16 + 8, 0xb},
+ },
+ keyScheduleB{
+ {16 + 8, 16 + 9, 16 + 7, 16 + 6, 16 + 2},
+ {16 + 0xa, 16 + 0xb, 16 + 5, 16 + 4, 16 + 6},
+ {16 + 0xc, 16 + 0xd, 16 + 3, 16 + 2, 16 + 9},
+ {16 + 0xe, 16 + 0xf, 16 + 1, 16 + 0, 16 + 0xc},
+ },
+ },
+ {
+ keyScheduleA{
+ {0, 6, 16 + 5, 16 + 7, 16 + 4, 16 + 6, 16 + 0},
+ {1, 4, 0, 2, 1, 3, 16 + 2},
+ {2, 5, 7, 6, 5, 4, 16 + 1},
+ {3, 7, 0xa, 9, 0xb, 8, 16 + 3},
+ },
+ keyScheduleB{
+ {3, 2, 0xc, 0xd, 8},
+ {1, 0, 0xe, 0xf, 0xd},
+ {7, 6, 8, 9, 3},
+ {5, 4, 0xa, 0xb, 7},
+ },
+ },
+ {
+ keyScheduleA{
+ {4, 0, 0xd, 0xf, 0xc, 0xe, 8},
+ {5, 2, 16 + 0, 16 + 2, 16 + 1, 16 + 3, 0xa},
+ {6, 3, 16 + 7, 16 + 6, 16 + 5, 16 + 4, 9},
+ {7, 1, 16 + 0xa, 16 + 9, 16 + 0xb, 16 + 8, 0xb},
+ },
+ keyScheduleB{
+ {16 + 3, 16 + 2, 16 + 0xc, 16 + 0xd, 16 + 9},
+ {16 + 1, 16 + 0, 16 + 0xe, 16 + 0xf, 16 + 0xc},
+ {16 + 7, 16 + 6, 16 + 8, 16 + 9, 16 + 2},
+ {16 + 5, 16 + 4, 16 + 0xa, 16 + 0xb, 16 + 6},
+ },
+ },
+ {
+ keyScheduleA{
+ {0, 6, 16 + 5, 16 + 7, 16 + 4, 16 + 6, 16 + 0},
+ {1, 4, 0, 2, 1, 3, 16 + 2},
+ {2, 5, 7, 6, 5, 4, 16 + 1},
+ {3, 7, 0xa, 9, 0xb, 8, 16 + 3},
+ },
+ keyScheduleB{
+ {8, 9, 7, 6, 3},
+ {0xa, 0xb, 5, 4, 7},
+ {0xc, 0xd, 3, 2, 8},
+ {0xe, 0xf, 1, 0, 0xd},
+ },
+ },
+}
+
+func (c *Cipher) keySchedule(in []byte) {
+ var t [8]uint32
+ var k [32]uint32
+
+ for i := 0; i < 4; i++ {
+ j := i * 4
+ t[i] = uint32(in[j])<<24 | uint32(in[j+1])<<16 | uint32(in[j+2])<<8 | uint32(in[j+3])
+ }
+
+ x := []byte{6, 7, 4, 5}
+ ki := 0
+
+ for half := 0; half < 2; half++ {
+ for _, round := range schedule {
+ for j := 0; j < 4; j++ {
+ var a [7]uint8
+ copy(a[:], round.a[j][:])
+ w := t[a[1]]
+ w ^= sBox[4][(t[a[2]>>2]>>(24-8*(a[2]&3)))&0xff]
+ w ^= sBox[5][(t[a[3]>>2]>>(24-8*(a[3]&3)))&0xff]
+ w ^= sBox[6][(t[a[4]>>2]>>(24-8*(a[4]&3)))&0xff]
+ w ^= sBox[7][(t[a[5]>>2]>>(24-8*(a[5]&3)))&0xff]
+ w ^= sBox[x[j]][(t[a[6]>>2]>>(24-8*(a[6]&3)))&0xff]
+ t[a[0]] = w
+ }
+
+ for j := 0; j < 4; j++ {
+ var b [5]uint8
+ copy(b[:], round.b[j][:])
+ w := sBox[4][(t[b[0]>>2]>>(24-8*(b[0]&3)))&0xff]
+ w ^= sBox[5][(t[b[1]>>2]>>(24-8*(b[1]&3)))&0xff]
+ w ^= sBox[6][(t[b[2]>>2]>>(24-8*(b[2]&3)))&0xff]
+ w ^= sBox[7][(t[b[3]>>2]>>(24-8*(b[3]&3)))&0xff]
+ w ^= sBox[4+j][(t[b[4]>>2]>>(24-8*(b[4]&3)))&0xff]
+ k[ki] = w
+ ki++
+ }
+ }
+ }
+
+ for i := 0; i < 16; i++ {
+ c.masking[i] = k[i]
+ c.rotate[i] = uint8(k[16+i] & 0x1f)
+ }
+}
+
+// These are the three 'f' functions. See RFC 2144, section 2.2.
+func f1(d, m uint32, r uint8) uint32 {
+ t := m + d
+ I := (t << r) | (t >> (32 - r))
+ return ((sBox[0][I>>24] ^ sBox[1][(I>>16)&0xff]) - sBox[2][(I>>8)&0xff]) + sBox[3][I&0xff]
+}
+
+func f2(d, m uint32, r uint8) uint32 {
+ t := m ^ d
+ I := (t << r) | (t >> (32 - r))
+ return ((sBox[0][I>>24] - sBox[1][(I>>16)&0xff]) + sBox[2][(I>>8)&0xff]) ^ sBox[3][I&0xff]
+}
+
+func f3(d, m uint32, r uint8) uint32 {
+ t := m - d
+ I := (t << r) | (t >> (32 - r))
+ return ((sBox[0][I>>24] + sBox[1][(I>>16)&0xff]) ^ sBox[2][(I>>8)&0xff]) - sBox[3][I&0xff]
+}
+
+var sBox = [8][256]uint32{
+ {
+ 0x30fb40d4, 0x9fa0ff0b, 0x6beccd2f, 0x3f258c7a, 0x1e213f2f, 0x9c004dd3, 0x6003e540, 0xcf9fc949,
+ 0xbfd4af27, 0x88bbbdb5, 0xe2034090, 0x98d09675, 0x6e63a0e0, 0x15c361d2, 0xc2e7661d, 0x22d4ff8e,
+ 0x28683b6f, 0xc07fd059, 0xff2379c8, 0x775f50e2, 0x43c340d3, 0xdf2f8656, 0x887ca41a, 0xa2d2bd2d,
+ 0xa1c9e0d6, 0x346c4819, 0x61b76d87, 0x22540f2f, 0x2abe32e1, 0xaa54166b, 0x22568e3a, 0xa2d341d0,
+ 0x66db40c8, 0xa784392f, 0x004dff2f, 0x2db9d2de, 0x97943fac, 0x4a97c1d8, 0x527644b7, 0xb5f437a7,
+ 0xb82cbaef, 0xd751d159, 0x6ff7f0ed, 0x5a097a1f, 0x827b68d0, 0x90ecf52e, 0x22b0c054, 0xbc8e5935,
+ 0x4b6d2f7f, 0x50bb64a2, 0xd2664910, 0xbee5812d, 0xb7332290, 0xe93b159f, 0xb48ee411, 0x4bff345d,
+ 0xfd45c240, 0xad31973f, 0xc4f6d02e, 0x55fc8165, 0xd5b1caad, 0xa1ac2dae, 0xa2d4b76d, 0xc19b0c50,
+ 0x882240f2, 0x0c6e4f38, 0xa4e4bfd7, 0x4f5ba272, 0x564c1d2f, 0xc59c5319, 0xb949e354, 0xb04669fe,
+ 0xb1b6ab8a, 0xc71358dd, 0x6385c545, 0x110f935d, 0x57538ad5, 0x6a390493, 0xe63d37e0, 0x2a54f6b3,
+ 0x3a787d5f, 0x6276a0b5, 0x19a6fcdf, 0x7a42206a, 0x29f9d4d5, 0xf61b1891, 0xbb72275e, 0xaa508167,
+ 0x38901091, 0xc6b505eb, 0x84c7cb8c, 0x2ad75a0f, 0x874a1427, 0xa2d1936b, 0x2ad286af, 0xaa56d291,
+ 0xd7894360, 0x425c750d, 0x93b39e26, 0x187184c9, 0x6c00b32d, 0x73e2bb14, 0xa0bebc3c, 0x54623779,
+ 0x64459eab, 0x3f328b82, 0x7718cf82, 0x59a2cea6, 0x04ee002e, 0x89fe78e6, 0x3fab0950, 0x325ff6c2,
+ 0x81383f05, 0x6963c5c8, 0x76cb5ad6, 0xd49974c9, 0xca180dcf, 0x380782d5, 0xc7fa5cf6, 0x8ac31511,
+ 0x35e79e13, 0x47da91d0, 0xf40f9086, 0xa7e2419e, 0x31366241, 0x051ef495, 0xaa573b04, 0x4a805d8d,
+ 0x548300d0, 0x00322a3c, 0xbf64cddf, 0xba57a68e, 0x75c6372b, 0x50afd341, 0xa7c13275, 0x915a0bf5,
+ 0x6b54bfab, 0x2b0b1426, 0xab4cc9d7, 0x449ccd82, 0xf7fbf265, 0xab85c5f3, 0x1b55db94, 0xaad4e324,
+ 0xcfa4bd3f, 0x2deaa3e2, 0x9e204d02, 0xc8bd25ac, 0xeadf55b3, 0xd5bd9e98, 0xe31231b2, 0x2ad5ad6c,
+ 0x954329de, 0xadbe4528, 0xd8710f69, 0xaa51c90f, 0xaa786bf6, 0x22513f1e, 0xaa51a79b, 0x2ad344cc,
+ 0x7b5a41f0, 0xd37cfbad, 0x1b069505, 0x41ece491, 0xb4c332e6, 0x032268d4, 0xc9600acc, 0xce387e6d,
+ 0xbf6bb16c, 0x6a70fb78, 0x0d03d9c9, 0xd4df39de, 0xe01063da, 0x4736f464, 0x5ad328d8, 0xb347cc96,
+ 0x75bb0fc3, 0x98511bfb, 0x4ffbcc35, 0xb58bcf6a, 0xe11f0abc, 0xbfc5fe4a, 0xa70aec10, 0xac39570a,
+ 0x3f04442f, 0x6188b153, 0xe0397a2e, 0x5727cb79, 0x9ceb418f, 0x1cacd68d, 0x2ad37c96, 0x0175cb9d,
+ 0xc69dff09, 0xc75b65f0, 0xd9db40d8, 0xec0e7779, 0x4744ead4, 0xb11c3274, 0xdd24cb9e, 0x7e1c54bd,
+ 0xf01144f9, 0xd2240eb1, 0x9675b3fd, 0xa3ac3755, 0xd47c27af, 0x51c85f4d, 0x56907596, 0xa5bb15e6,
+ 0x580304f0, 0xca042cf1, 0x011a37ea, 0x8dbfaadb, 0x35ba3e4a, 0x3526ffa0, 0xc37b4d09, 0xbc306ed9,
+ 0x98a52666, 0x5648f725, 0xff5e569d, 0x0ced63d0, 0x7c63b2cf, 0x700b45e1, 0xd5ea50f1, 0x85a92872,
+ 0xaf1fbda7, 0xd4234870, 0xa7870bf3, 0x2d3b4d79, 0x42e04198, 0x0cd0ede7, 0x26470db8, 0xf881814c,
+ 0x474d6ad7, 0x7c0c5e5c, 0xd1231959, 0x381b7298, 0xf5d2f4db, 0xab838653, 0x6e2f1e23, 0x83719c9e,
+ 0xbd91e046, 0x9a56456e, 0xdc39200c, 0x20c8c571, 0x962bda1c, 0xe1e696ff, 0xb141ab08, 0x7cca89b9,
+ 0x1a69e783, 0x02cc4843, 0xa2f7c579, 0x429ef47d, 0x427b169c, 0x5ac9f049, 0xdd8f0f00, 0x5c8165bf,
+ },
+ {
+ 0x1f201094, 0xef0ba75b, 0x69e3cf7e, 0x393f4380, 0xfe61cf7a, 0xeec5207a, 0x55889c94, 0x72fc0651,
+ 0xada7ef79, 0x4e1d7235, 0xd55a63ce, 0xde0436ba, 0x99c430ef, 0x5f0c0794, 0x18dcdb7d, 0xa1d6eff3,
+ 0xa0b52f7b, 0x59e83605, 0xee15b094, 0xe9ffd909, 0xdc440086, 0xef944459, 0xba83ccb3, 0xe0c3cdfb,
+ 0xd1da4181, 0x3b092ab1, 0xf997f1c1, 0xa5e6cf7b, 0x01420ddb, 0xe4e7ef5b, 0x25a1ff41, 0xe180f806,
+ 0x1fc41080, 0x179bee7a, 0xd37ac6a9, 0xfe5830a4, 0x98de8b7f, 0x77e83f4e, 0x79929269, 0x24fa9f7b,
+ 0xe113c85b, 0xacc40083, 0xd7503525, 0xf7ea615f, 0x62143154, 0x0d554b63, 0x5d681121, 0xc866c359,
+ 0x3d63cf73, 0xcee234c0, 0xd4d87e87, 0x5c672b21, 0x071f6181, 0x39f7627f, 0x361e3084, 0xe4eb573b,
+ 0x602f64a4, 0xd63acd9c, 0x1bbc4635, 0x9e81032d, 0x2701f50c, 0x99847ab4, 0xa0e3df79, 0xba6cf38c,
+ 0x10843094, 0x2537a95e, 0xf46f6ffe, 0xa1ff3b1f, 0x208cfb6a, 0x8f458c74, 0xd9e0a227, 0x4ec73a34,
+ 0xfc884f69, 0x3e4de8df, 0xef0e0088, 0x3559648d, 0x8a45388c, 0x1d804366, 0x721d9bfd, 0xa58684bb,
+ 0xe8256333, 0x844e8212, 0x128d8098, 0xfed33fb4, 0xce280ae1, 0x27e19ba5, 0xd5a6c252, 0xe49754bd,
+ 0xc5d655dd, 0xeb667064, 0x77840b4d, 0xa1b6a801, 0x84db26a9, 0xe0b56714, 0x21f043b7, 0xe5d05860,
+ 0x54f03084, 0x066ff472, 0xa31aa153, 0xdadc4755, 0xb5625dbf, 0x68561be6, 0x83ca6b94, 0x2d6ed23b,
+ 0xeccf01db, 0xa6d3d0ba, 0xb6803d5c, 0xaf77a709, 0x33b4a34c, 0x397bc8d6, 0x5ee22b95, 0x5f0e5304,
+ 0x81ed6f61, 0x20e74364, 0xb45e1378, 0xde18639b, 0x881ca122, 0xb96726d1, 0x8049a7e8, 0x22b7da7b,
+ 0x5e552d25, 0x5272d237, 0x79d2951c, 0xc60d894c, 0x488cb402, 0x1ba4fe5b, 0xa4b09f6b, 0x1ca815cf,
+ 0xa20c3005, 0x8871df63, 0xb9de2fcb, 0x0cc6c9e9, 0x0beeff53, 0xe3214517, 0xb4542835, 0x9f63293c,
+ 0xee41e729, 0x6e1d2d7c, 0x50045286, 0x1e6685f3, 0xf33401c6, 0x30a22c95, 0x31a70850, 0x60930f13,
+ 0x73f98417, 0xa1269859, 0xec645c44, 0x52c877a9, 0xcdff33a6, 0xa02b1741, 0x7cbad9a2, 0x2180036f,
+ 0x50d99c08, 0xcb3f4861, 0xc26bd765, 0x64a3f6ab, 0x80342676, 0x25a75e7b, 0xe4e6d1fc, 0x20c710e6,
+ 0xcdf0b680, 0x17844d3b, 0x31eef84d, 0x7e0824e4, 0x2ccb49eb, 0x846a3bae, 0x8ff77888, 0xee5d60f6,
+ 0x7af75673, 0x2fdd5cdb, 0xa11631c1, 0x30f66f43, 0xb3faec54, 0x157fd7fa, 0xef8579cc, 0xd152de58,
+ 0xdb2ffd5e, 0x8f32ce19, 0x306af97a, 0x02f03ef8, 0x99319ad5, 0xc242fa0f, 0xa7e3ebb0, 0xc68e4906,
+ 0xb8da230c, 0x80823028, 0xdcdef3c8, 0xd35fb171, 0x088a1bc8, 0xbec0c560, 0x61a3c9e8, 0xbca8f54d,
+ 0xc72feffa, 0x22822e99, 0x82c570b4, 0xd8d94e89, 0x8b1c34bc, 0x301e16e6, 0x273be979, 0xb0ffeaa6,
+ 0x61d9b8c6, 0x00b24869, 0xb7ffce3f, 0x08dc283b, 0x43daf65a, 0xf7e19798, 0x7619b72f, 0x8f1c9ba4,
+ 0xdc8637a0, 0x16a7d3b1, 0x9fc393b7, 0xa7136eeb, 0xc6bcc63e, 0x1a513742, 0xef6828bc, 0x520365d6,
+ 0x2d6a77ab, 0x3527ed4b, 0x821fd216, 0x095c6e2e, 0xdb92f2fb, 0x5eea29cb, 0x145892f5, 0x91584f7f,
+ 0x5483697b, 0x2667a8cc, 0x85196048, 0x8c4bacea, 0x833860d4, 0x0d23e0f9, 0x6c387e8a, 0x0ae6d249,
+ 0xb284600c, 0xd835731d, 0xdcb1c647, 0xac4c56ea, 0x3ebd81b3, 0x230eabb0, 0x6438bc87, 0xf0b5b1fa,
+ 0x8f5ea2b3, 0xfc184642, 0x0a036b7a, 0x4fb089bd, 0x649da589, 0xa345415e, 0x5c038323, 0x3e5d3bb9,
+ 0x43d79572, 0x7e6dd07c, 0x06dfdf1e, 0x6c6cc4ef, 0x7160a539, 0x73bfbe70, 0x83877605, 0x4523ecf1,
+ },
+ {
+ 0x8defc240, 0x25fa5d9f, 0xeb903dbf, 0xe810c907, 0x47607fff, 0x369fe44b, 0x8c1fc644, 0xaececa90,
+ 0xbeb1f9bf, 0xeefbcaea, 0xe8cf1950, 0x51df07ae, 0x920e8806, 0xf0ad0548, 0xe13c8d83, 0x927010d5,
+ 0x11107d9f, 0x07647db9, 0xb2e3e4d4, 0x3d4f285e, 0xb9afa820, 0xfade82e0, 0xa067268b, 0x8272792e,
+ 0x553fb2c0, 0x489ae22b, 0xd4ef9794, 0x125e3fbc, 0x21fffcee, 0x825b1bfd, 0x9255c5ed, 0x1257a240,
+ 0x4e1a8302, 0xbae07fff, 0x528246e7, 0x8e57140e, 0x3373f7bf, 0x8c9f8188, 0xa6fc4ee8, 0xc982b5a5,
+ 0xa8c01db7, 0x579fc264, 0x67094f31, 0xf2bd3f5f, 0x40fff7c1, 0x1fb78dfc, 0x8e6bd2c1, 0x437be59b,
+ 0x99b03dbf, 0xb5dbc64b, 0x638dc0e6, 0x55819d99, 0xa197c81c, 0x4a012d6e, 0xc5884a28, 0xccc36f71,
+ 0xb843c213, 0x6c0743f1, 0x8309893c, 0x0feddd5f, 0x2f7fe850, 0xd7c07f7e, 0x02507fbf, 0x5afb9a04,
+ 0xa747d2d0, 0x1651192e, 0xaf70bf3e, 0x58c31380, 0x5f98302e, 0x727cc3c4, 0x0a0fb402, 0x0f7fef82,
+ 0x8c96fdad, 0x5d2c2aae, 0x8ee99a49, 0x50da88b8, 0x8427f4a0, 0x1eac5790, 0x796fb449, 0x8252dc15,
+ 0xefbd7d9b, 0xa672597d, 0xada840d8, 0x45f54504, 0xfa5d7403, 0xe83ec305, 0x4f91751a, 0x925669c2,
+ 0x23efe941, 0xa903f12e, 0x60270df2, 0x0276e4b6, 0x94fd6574, 0x927985b2, 0x8276dbcb, 0x02778176,
+ 0xf8af918d, 0x4e48f79e, 0x8f616ddf, 0xe29d840e, 0x842f7d83, 0x340ce5c8, 0x96bbb682, 0x93b4b148,
+ 0xef303cab, 0x984faf28, 0x779faf9b, 0x92dc560d, 0x224d1e20, 0x8437aa88, 0x7d29dc96, 0x2756d3dc,
+ 0x8b907cee, 0xb51fd240, 0xe7c07ce3, 0xe566b4a1, 0xc3e9615e, 0x3cf8209d, 0x6094d1e3, 0xcd9ca341,
+ 0x5c76460e, 0x00ea983b, 0xd4d67881, 0xfd47572c, 0xf76cedd9, 0xbda8229c, 0x127dadaa, 0x438a074e,
+ 0x1f97c090, 0x081bdb8a, 0x93a07ebe, 0xb938ca15, 0x97b03cff, 0x3dc2c0f8, 0x8d1ab2ec, 0x64380e51,
+ 0x68cc7bfb, 0xd90f2788, 0x12490181, 0x5de5ffd4, 0xdd7ef86a, 0x76a2e214, 0xb9a40368, 0x925d958f,
+ 0x4b39fffa, 0xba39aee9, 0xa4ffd30b, 0xfaf7933b, 0x6d498623, 0x193cbcfa, 0x27627545, 0x825cf47a,
+ 0x61bd8ba0, 0xd11e42d1, 0xcead04f4, 0x127ea392, 0x10428db7, 0x8272a972, 0x9270c4a8, 0x127de50b,
+ 0x285ba1c8, 0x3c62f44f, 0x35c0eaa5, 0xe805d231, 0x428929fb, 0xb4fcdf82, 0x4fb66a53, 0x0e7dc15b,
+ 0x1f081fab, 0x108618ae, 0xfcfd086d, 0xf9ff2889, 0x694bcc11, 0x236a5cae, 0x12deca4d, 0x2c3f8cc5,
+ 0xd2d02dfe, 0xf8ef5896, 0xe4cf52da, 0x95155b67, 0x494a488c, 0xb9b6a80c, 0x5c8f82bc, 0x89d36b45,
+ 0x3a609437, 0xec00c9a9, 0x44715253, 0x0a874b49, 0xd773bc40, 0x7c34671c, 0x02717ef6, 0x4feb5536,
+ 0xa2d02fff, 0xd2bf60c4, 0xd43f03c0, 0x50b4ef6d, 0x07478cd1, 0x006e1888, 0xa2e53f55, 0xb9e6d4bc,
+ 0xa2048016, 0x97573833, 0xd7207d67, 0xde0f8f3d, 0x72f87b33, 0xabcc4f33, 0x7688c55d, 0x7b00a6b0,
+ 0x947b0001, 0x570075d2, 0xf9bb88f8, 0x8942019e, 0x4264a5ff, 0x856302e0, 0x72dbd92b, 0xee971b69,
+ 0x6ea22fde, 0x5f08ae2b, 0xaf7a616d, 0xe5c98767, 0xcf1febd2, 0x61efc8c2, 0xf1ac2571, 0xcc8239c2,
+ 0x67214cb8, 0xb1e583d1, 0xb7dc3e62, 0x7f10bdce, 0xf90a5c38, 0x0ff0443d, 0x606e6dc6, 0x60543a49,
+ 0x5727c148, 0x2be98a1d, 0x8ab41738, 0x20e1be24, 0xaf96da0f, 0x68458425, 0x99833be5, 0x600d457d,
+ 0x282f9350, 0x8334b362, 0xd91d1120, 0x2b6d8da0, 0x642b1e31, 0x9c305a00, 0x52bce688, 0x1b03588a,
+ 0xf7baefd5, 0x4142ed9c, 0xa4315c11, 0x83323ec5, 0xdfef4636, 0xa133c501, 0xe9d3531c, 0xee353783,
+ },
+ {
+ 0x9db30420, 0x1fb6e9de, 0xa7be7bef, 0xd273a298, 0x4a4f7bdb, 0x64ad8c57, 0x85510443, 0xfa020ed1,
+ 0x7e287aff, 0xe60fb663, 0x095f35a1, 0x79ebf120, 0xfd059d43, 0x6497b7b1, 0xf3641f63, 0x241e4adf,
+ 0x28147f5f, 0x4fa2b8cd, 0xc9430040, 0x0cc32220, 0xfdd30b30, 0xc0a5374f, 0x1d2d00d9, 0x24147b15,
+ 0xee4d111a, 0x0fca5167, 0x71ff904c, 0x2d195ffe, 0x1a05645f, 0x0c13fefe, 0x081b08ca, 0x05170121,
+ 0x80530100, 0xe83e5efe, 0xac9af4f8, 0x7fe72701, 0xd2b8ee5f, 0x06df4261, 0xbb9e9b8a, 0x7293ea25,
+ 0xce84ffdf, 0xf5718801, 0x3dd64b04, 0xa26f263b, 0x7ed48400, 0x547eebe6, 0x446d4ca0, 0x6cf3d6f5,
+ 0x2649abdf, 0xaea0c7f5, 0x36338cc1, 0x503f7e93, 0xd3772061, 0x11b638e1, 0x72500e03, 0xf80eb2bb,
+ 0xabe0502e, 0xec8d77de, 0x57971e81, 0xe14f6746, 0xc9335400, 0x6920318f, 0x081dbb99, 0xffc304a5,
+ 0x4d351805, 0x7f3d5ce3, 0xa6c866c6, 0x5d5bcca9, 0xdaec6fea, 0x9f926f91, 0x9f46222f, 0x3991467d,
+ 0xa5bf6d8e, 0x1143c44f, 0x43958302, 0xd0214eeb, 0x022083b8, 0x3fb6180c, 0x18f8931e, 0x281658e6,
+ 0x26486e3e, 0x8bd78a70, 0x7477e4c1, 0xb506e07c, 0xf32d0a25, 0x79098b02, 0xe4eabb81, 0x28123b23,
+ 0x69dead38, 0x1574ca16, 0xdf871b62, 0x211c40b7, 0xa51a9ef9, 0x0014377b, 0x041e8ac8, 0x09114003,
+ 0xbd59e4d2, 0xe3d156d5, 0x4fe876d5, 0x2f91a340, 0x557be8de, 0x00eae4a7, 0x0ce5c2ec, 0x4db4bba6,
+ 0xe756bdff, 0xdd3369ac, 0xec17b035, 0x06572327, 0x99afc8b0, 0x56c8c391, 0x6b65811c, 0x5e146119,
+ 0x6e85cb75, 0xbe07c002, 0xc2325577, 0x893ff4ec, 0x5bbfc92d, 0xd0ec3b25, 0xb7801ab7, 0x8d6d3b24,
+ 0x20c763ef, 0xc366a5fc, 0x9c382880, 0x0ace3205, 0xaac9548a, 0xeca1d7c7, 0x041afa32, 0x1d16625a,
+ 0x6701902c, 0x9b757a54, 0x31d477f7, 0x9126b031, 0x36cc6fdb, 0xc70b8b46, 0xd9e66a48, 0x56e55a79,
+ 0x026a4ceb, 0x52437eff, 0x2f8f76b4, 0x0df980a5, 0x8674cde3, 0xedda04eb, 0x17a9be04, 0x2c18f4df,
+ 0xb7747f9d, 0xab2af7b4, 0xefc34d20, 0x2e096b7c, 0x1741a254, 0xe5b6a035, 0x213d42f6, 0x2c1c7c26,
+ 0x61c2f50f, 0x6552daf9, 0xd2c231f8, 0x25130f69, 0xd8167fa2, 0x0418f2c8, 0x001a96a6, 0x0d1526ab,
+ 0x63315c21, 0x5e0a72ec, 0x49bafefd, 0x187908d9, 0x8d0dbd86, 0x311170a7, 0x3e9b640c, 0xcc3e10d7,
+ 0xd5cad3b6, 0x0caec388, 0xf73001e1, 0x6c728aff, 0x71eae2a1, 0x1f9af36e, 0xcfcbd12f, 0xc1de8417,
+ 0xac07be6b, 0xcb44a1d8, 0x8b9b0f56, 0x013988c3, 0xb1c52fca, 0xb4be31cd, 0xd8782806, 0x12a3a4e2,
+ 0x6f7de532, 0x58fd7eb6, 0xd01ee900, 0x24adffc2, 0xf4990fc5, 0x9711aac5, 0x001d7b95, 0x82e5e7d2,
+ 0x109873f6, 0x00613096, 0xc32d9521, 0xada121ff, 0x29908415, 0x7fbb977f, 0xaf9eb3db, 0x29c9ed2a,
+ 0x5ce2a465, 0xa730f32c, 0xd0aa3fe8, 0x8a5cc091, 0xd49e2ce7, 0x0ce454a9, 0xd60acd86, 0x015f1919,
+ 0x77079103, 0xdea03af6, 0x78a8565e, 0xdee356df, 0x21f05cbe, 0x8b75e387, 0xb3c50651, 0xb8a5c3ef,
+ 0xd8eeb6d2, 0xe523be77, 0xc2154529, 0x2f69efdf, 0xafe67afb, 0xf470c4b2, 0xf3e0eb5b, 0xd6cc9876,
+ 0x39e4460c, 0x1fda8538, 0x1987832f, 0xca007367, 0xa99144f8, 0x296b299e, 0x492fc295, 0x9266beab,
+ 0xb5676e69, 0x9bd3ddda, 0xdf7e052f, 0xdb25701c, 0x1b5e51ee, 0xf65324e6, 0x6afce36c, 0x0316cc04,
+ 0x8644213e, 0xb7dc59d0, 0x7965291f, 0xccd6fd43, 0x41823979, 0x932bcdf6, 0xb657c34d, 0x4edfd282,
+ 0x7ae5290c, 0x3cb9536b, 0x851e20fe, 0x9833557e, 0x13ecf0b0, 0xd3ffb372, 0x3f85c5c1, 0x0aef7ed2,
+ },
+ {
+ 0x7ec90c04, 0x2c6e74b9, 0x9b0e66df, 0xa6337911, 0xb86a7fff, 0x1dd358f5, 0x44dd9d44, 0x1731167f,
+ 0x08fbf1fa, 0xe7f511cc, 0xd2051b00, 0x735aba00, 0x2ab722d8, 0x386381cb, 0xacf6243a, 0x69befd7a,
+ 0xe6a2e77f, 0xf0c720cd, 0xc4494816, 0xccf5c180, 0x38851640, 0x15b0a848, 0xe68b18cb, 0x4caadeff,
+ 0x5f480a01, 0x0412b2aa, 0x259814fc, 0x41d0efe2, 0x4e40b48d, 0x248eb6fb, 0x8dba1cfe, 0x41a99b02,
+ 0x1a550a04, 0xba8f65cb, 0x7251f4e7, 0x95a51725, 0xc106ecd7, 0x97a5980a, 0xc539b9aa, 0x4d79fe6a,
+ 0xf2f3f763, 0x68af8040, 0xed0c9e56, 0x11b4958b, 0xe1eb5a88, 0x8709e6b0, 0xd7e07156, 0x4e29fea7,
+ 0x6366e52d, 0x02d1c000, 0xc4ac8e05, 0x9377f571, 0x0c05372a, 0x578535f2, 0x2261be02, 0xd642a0c9,
+ 0xdf13a280, 0x74b55bd2, 0x682199c0, 0xd421e5ec, 0x53fb3ce8, 0xc8adedb3, 0x28a87fc9, 0x3d959981,
+ 0x5c1ff900, 0xfe38d399, 0x0c4eff0b, 0x062407ea, 0xaa2f4fb1, 0x4fb96976, 0x90c79505, 0xb0a8a774,
+ 0xef55a1ff, 0xe59ca2c2, 0xa6b62d27, 0xe66a4263, 0xdf65001f, 0x0ec50966, 0xdfdd55bc, 0x29de0655,
+ 0x911e739a, 0x17af8975, 0x32c7911c, 0x89f89468, 0x0d01e980, 0x524755f4, 0x03b63cc9, 0x0cc844b2,
+ 0xbcf3f0aa, 0x87ac36e9, 0xe53a7426, 0x01b3d82b, 0x1a9e7449, 0x64ee2d7e, 0xcddbb1da, 0x01c94910,
+ 0xb868bf80, 0x0d26f3fd, 0x9342ede7, 0x04a5c284, 0x636737b6, 0x50f5b616, 0xf24766e3, 0x8eca36c1,
+ 0x136e05db, 0xfef18391, 0xfb887a37, 0xd6e7f7d4, 0xc7fb7dc9, 0x3063fcdf, 0xb6f589de, 0xec2941da,
+ 0x26e46695, 0xb7566419, 0xf654efc5, 0xd08d58b7, 0x48925401, 0xc1bacb7f, 0xe5ff550f, 0xb6083049,
+ 0x5bb5d0e8, 0x87d72e5a, 0xab6a6ee1, 0x223a66ce, 0xc62bf3cd, 0x9e0885f9, 0x68cb3e47, 0x086c010f,
+ 0xa21de820, 0xd18b69de, 0xf3f65777, 0xfa02c3f6, 0x407edac3, 0xcbb3d550, 0x1793084d, 0xb0d70eba,
+ 0x0ab378d5, 0xd951fb0c, 0xded7da56, 0x4124bbe4, 0x94ca0b56, 0x0f5755d1, 0xe0e1e56e, 0x6184b5be,
+ 0x580a249f, 0x94f74bc0, 0xe327888e, 0x9f7b5561, 0xc3dc0280, 0x05687715, 0x646c6bd7, 0x44904db3,
+ 0x66b4f0a3, 0xc0f1648a, 0x697ed5af, 0x49e92ff6, 0x309e374f, 0x2cb6356a, 0x85808573, 0x4991f840,
+ 0x76f0ae02, 0x083be84d, 0x28421c9a, 0x44489406, 0x736e4cb8, 0xc1092910, 0x8bc95fc6, 0x7d869cf4,
+ 0x134f616f, 0x2e77118d, 0xb31b2be1, 0xaa90b472, 0x3ca5d717, 0x7d161bba, 0x9cad9010, 0xaf462ba2,
+ 0x9fe459d2, 0x45d34559, 0xd9f2da13, 0xdbc65487, 0xf3e4f94e, 0x176d486f, 0x097c13ea, 0x631da5c7,
+ 0x445f7382, 0x175683f4, 0xcdc66a97, 0x70be0288, 0xb3cdcf72, 0x6e5dd2f3, 0x20936079, 0x459b80a5,
+ 0xbe60e2db, 0xa9c23101, 0xeba5315c, 0x224e42f2, 0x1c5c1572, 0xf6721b2c, 0x1ad2fff3, 0x8c25404e,
+ 0x324ed72f, 0x4067b7fd, 0x0523138e, 0x5ca3bc78, 0xdc0fd66e, 0x75922283, 0x784d6b17, 0x58ebb16e,
+ 0x44094f85, 0x3f481d87, 0xfcfeae7b, 0x77b5ff76, 0x8c2302bf, 0xaaf47556, 0x5f46b02a, 0x2b092801,
+ 0x3d38f5f7, 0x0ca81f36, 0x52af4a8a, 0x66d5e7c0, 0xdf3b0874, 0x95055110, 0x1b5ad7a8, 0xf61ed5ad,
+ 0x6cf6e479, 0x20758184, 0xd0cefa65, 0x88f7be58, 0x4a046826, 0x0ff6f8f3, 0xa09c7f70, 0x5346aba0,
+ 0x5ce96c28, 0xe176eda3, 0x6bac307f, 0x376829d2, 0x85360fa9, 0x17e3fe2a, 0x24b79767, 0xf5a96b20,
+ 0xd6cd2595, 0x68ff1ebf, 0x7555442c, 0xf19f06be, 0xf9e0659a, 0xeeb9491d, 0x34010718, 0xbb30cab8,
+ 0xe822fe15, 0x88570983, 0x750e6249, 0xda627e55, 0x5e76ffa8, 0xb1534546, 0x6d47de08, 0xefe9e7d4,
+ },
+ {
+ 0xf6fa8f9d, 0x2cac6ce1, 0x4ca34867, 0xe2337f7c, 0x95db08e7, 0x016843b4, 0xeced5cbc, 0x325553ac,
+ 0xbf9f0960, 0xdfa1e2ed, 0x83f0579d, 0x63ed86b9, 0x1ab6a6b8, 0xde5ebe39, 0xf38ff732, 0x8989b138,
+ 0x33f14961, 0xc01937bd, 0xf506c6da, 0xe4625e7e, 0xa308ea99, 0x4e23e33c, 0x79cbd7cc, 0x48a14367,
+ 0xa3149619, 0xfec94bd5, 0xa114174a, 0xeaa01866, 0xa084db2d, 0x09a8486f, 0xa888614a, 0x2900af98,
+ 0x01665991, 0xe1992863, 0xc8f30c60, 0x2e78ef3c, 0xd0d51932, 0xcf0fec14, 0xf7ca07d2, 0xd0a82072,
+ 0xfd41197e, 0x9305a6b0, 0xe86be3da, 0x74bed3cd, 0x372da53c, 0x4c7f4448, 0xdab5d440, 0x6dba0ec3,
+ 0x083919a7, 0x9fbaeed9, 0x49dbcfb0, 0x4e670c53, 0x5c3d9c01, 0x64bdb941, 0x2c0e636a, 0xba7dd9cd,
+ 0xea6f7388, 0xe70bc762, 0x35f29adb, 0x5c4cdd8d, 0xf0d48d8c, 0xb88153e2, 0x08a19866, 0x1ae2eac8,
+ 0x284caf89, 0xaa928223, 0x9334be53, 0x3b3a21bf, 0x16434be3, 0x9aea3906, 0xefe8c36e, 0xf890cdd9,
+ 0x80226dae, 0xc340a4a3, 0xdf7e9c09, 0xa694a807, 0x5b7c5ecc, 0x221db3a6, 0x9a69a02f, 0x68818a54,
+ 0xceb2296f, 0x53c0843a, 0xfe893655, 0x25bfe68a, 0xb4628abc, 0xcf222ebf, 0x25ac6f48, 0xa9a99387,
+ 0x53bddb65, 0xe76ffbe7, 0xe967fd78, 0x0ba93563, 0x8e342bc1, 0xe8a11be9, 0x4980740d, 0xc8087dfc,
+ 0x8de4bf99, 0xa11101a0, 0x7fd37975, 0xda5a26c0, 0xe81f994f, 0x9528cd89, 0xfd339fed, 0xb87834bf,
+ 0x5f04456d, 0x22258698, 0xc9c4c83b, 0x2dc156be, 0x4f628daa, 0x57f55ec5, 0xe2220abe, 0xd2916ebf,
+ 0x4ec75b95, 0x24f2c3c0, 0x42d15d99, 0xcd0d7fa0, 0x7b6e27ff, 0xa8dc8af0, 0x7345c106, 0xf41e232f,
+ 0x35162386, 0xe6ea8926, 0x3333b094, 0x157ec6f2, 0x372b74af, 0x692573e4, 0xe9a9d848, 0xf3160289,
+ 0x3a62ef1d, 0xa787e238, 0xf3a5f676, 0x74364853, 0x20951063, 0x4576698d, 0xb6fad407, 0x592af950,
+ 0x36f73523, 0x4cfb6e87, 0x7da4cec0, 0x6c152daa, 0xcb0396a8, 0xc50dfe5d, 0xfcd707ab, 0x0921c42f,
+ 0x89dff0bb, 0x5fe2be78, 0x448f4f33, 0x754613c9, 0x2b05d08d, 0x48b9d585, 0xdc049441, 0xc8098f9b,
+ 0x7dede786, 0xc39a3373, 0x42410005, 0x6a091751, 0x0ef3c8a6, 0x890072d6, 0x28207682, 0xa9a9f7be,
+ 0xbf32679d, 0xd45b5b75, 0xb353fd00, 0xcbb0e358, 0x830f220a, 0x1f8fb214, 0xd372cf08, 0xcc3c4a13,
+ 0x8cf63166, 0x061c87be, 0x88c98f88, 0x6062e397, 0x47cf8e7a, 0xb6c85283, 0x3cc2acfb, 0x3fc06976,
+ 0x4e8f0252, 0x64d8314d, 0xda3870e3, 0x1e665459, 0xc10908f0, 0x513021a5, 0x6c5b68b7, 0x822f8aa0,
+ 0x3007cd3e, 0x74719eef, 0xdc872681, 0x073340d4, 0x7e432fd9, 0x0c5ec241, 0x8809286c, 0xf592d891,
+ 0x08a930f6, 0x957ef305, 0xb7fbffbd, 0xc266e96f, 0x6fe4ac98, 0xb173ecc0, 0xbc60b42a, 0x953498da,
+ 0xfba1ae12, 0x2d4bd736, 0x0f25faab, 0xa4f3fceb, 0xe2969123, 0x257f0c3d, 0x9348af49, 0x361400bc,
+ 0xe8816f4a, 0x3814f200, 0xa3f94043, 0x9c7a54c2, 0xbc704f57, 0xda41e7f9, 0xc25ad33a, 0x54f4a084,
+ 0xb17f5505, 0x59357cbe, 0xedbd15c8, 0x7f97c5ab, 0xba5ac7b5, 0xb6f6deaf, 0x3a479c3a, 0x5302da25,
+ 0x653d7e6a, 0x54268d49, 0x51a477ea, 0x5017d55b, 0xd7d25d88, 0x44136c76, 0x0404a8c8, 0xb8e5a121,
+ 0xb81a928a, 0x60ed5869, 0x97c55b96, 0xeaec991b, 0x29935913, 0x01fdb7f1, 0x088e8dfa, 0x9ab6f6f5,
+ 0x3b4cbf9f, 0x4a5de3ab, 0xe6051d35, 0xa0e1d855, 0xd36b4cf1, 0xf544edeb, 0xb0e93524, 0xbebb8fbd,
+ 0xa2d762cf, 0x49c92f54, 0x38b5f331, 0x7128a454, 0x48392905, 0xa65b1db8, 0x851c97bd, 0xd675cf2f,
+ },
+ {
+ 0x85e04019, 0x332bf567, 0x662dbfff, 0xcfc65693, 0x2a8d7f6f, 0xab9bc912, 0xde6008a1, 0x2028da1f,
+ 0x0227bce7, 0x4d642916, 0x18fac300, 0x50f18b82, 0x2cb2cb11, 0xb232e75c, 0x4b3695f2, 0xb28707de,
+ 0xa05fbcf6, 0xcd4181e9, 0xe150210c, 0xe24ef1bd, 0xb168c381, 0xfde4e789, 0x5c79b0d8, 0x1e8bfd43,
+ 0x4d495001, 0x38be4341, 0x913cee1d, 0x92a79c3f, 0x089766be, 0xbaeeadf4, 0x1286becf, 0xb6eacb19,
+ 0x2660c200, 0x7565bde4, 0x64241f7a, 0x8248dca9, 0xc3b3ad66, 0x28136086, 0x0bd8dfa8, 0x356d1cf2,
+ 0x107789be, 0xb3b2e9ce, 0x0502aa8f, 0x0bc0351e, 0x166bf52a, 0xeb12ff82, 0xe3486911, 0xd34d7516,
+ 0x4e7b3aff, 0x5f43671b, 0x9cf6e037, 0x4981ac83, 0x334266ce, 0x8c9341b7, 0xd0d854c0, 0xcb3a6c88,
+ 0x47bc2829, 0x4725ba37, 0xa66ad22b, 0x7ad61f1e, 0x0c5cbafa, 0x4437f107, 0xb6e79962, 0x42d2d816,
+ 0x0a961288, 0xe1a5c06e, 0x13749e67, 0x72fc081a, 0xb1d139f7, 0xf9583745, 0xcf19df58, 0xbec3f756,
+ 0xc06eba30, 0x07211b24, 0x45c28829, 0xc95e317f, 0xbc8ec511, 0x38bc46e9, 0xc6e6fa14, 0xbae8584a,
+ 0xad4ebc46, 0x468f508b, 0x7829435f, 0xf124183b, 0x821dba9f, 0xaff60ff4, 0xea2c4e6d, 0x16e39264,
+ 0x92544a8b, 0x009b4fc3, 0xaba68ced, 0x9ac96f78, 0x06a5b79a, 0xb2856e6e, 0x1aec3ca9, 0xbe838688,
+ 0x0e0804e9, 0x55f1be56, 0xe7e5363b, 0xb3a1f25d, 0xf7debb85, 0x61fe033c, 0x16746233, 0x3c034c28,
+ 0xda6d0c74, 0x79aac56c, 0x3ce4e1ad, 0x51f0c802, 0x98f8f35a, 0x1626a49f, 0xeed82b29, 0x1d382fe3,
+ 0x0c4fb99a, 0xbb325778, 0x3ec6d97b, 0x6e77a6a9, 0xcb658b5c, 0xd45230c7, 0x2bd1408b, 0x60c03eb7,
+ 0xb9068d78, 0xa33754f4, 0xf430c87d, 0xc8a71302, 0xb96d8c32, 0xebd4e7be, 0xbe8b9d2d, 0x7979fb06,
+ 0xe7225308, 0x8b75cf77, 0x11ef8da4, 0xe083c858, 0x8d6b786f, 0x5a6317a6, 0xfa5cf7a0, 0x5dda0033,
+ 0xf28ebfb0, 0xf5b9c310, 0xa0eac280, 0x08b9767a, 0xa3d9d2b0, 0x79d34217, 0x021a718d, 0x9ac6336a,
+ 0x2711fd60, 0x438050e3, 0x069908a8, 0x3d7fedc4, 0x826d2bef, 0x4eeb8476, 0x488dcf25, 0x36c9d566,
+ 0x28e74e41, 0xc2610aca, 0x3d49a9cf, 0xbae3b9df, 0xb65f8de6, 0x92aeaf64, 0x3ac7d5e6, 0x9ea80509,
+ 0xf22b017d, 0xa4173f70, 0xdd1e16c3, 0x15e0d7f9, 0x50b1b887, 0x2b9f4fd5, 0x625aba82, 0x6a017962,
+ 0x2ec01b9c, 0x15488aa9, 0xd716e740, 0x40055a2c, 0x93d29a22, 0xe32dbf9a, 0x058745b9, 0x3453dc1e,
+ 0xd699296e, 0x496cff6f, 0x1c9f4986, 0xdfe2ed07, 0xb87242d1, 0x19de7eae, 0x053e561a, 0x15ad6f8c,
+ 0x66626c1c, 0x7154c24c, 0xea082b2a, 0x93eb2939, 0x17dcb0f0, 0x58d4f2ae, 0x9ea294fb, 0x52cf564c,
+ 0x9883fe66, 0x2ec40581, 0x763953c3, 0x01d6692e, 0xd3a0c108, 0xa1e7160e, 0xe4f2dfa6, 0x693ed285,
+ 0x74904698, 0x4c2b0edd, 0x4f757656, 0x5d393378, 0xa132234f, 0x3d321c5d, 0xc3f5e194, 0x4b269301,
+ 0xc79f022f, 0x3c997e7e, 0x5e4f9504, 0x3ffafbbd, 0x76f7ad0e, 0x296693f4, 0x3d1fce6f, 0xc61e45be,
+ 0xd3b5ab34, 0xf72bf9b7, 0x1b0434c0, 0x4e72b567, 0x5592a33d, 0xb5229301, 0xcfd2a87f, 0x60aeb767,
+ 0x1814386b, 0x30bcc33d, 0x38a0c07d, 0xfd1606f2, 0xc363519b, 0x589dd390, 0x5479f8e6, 0x1cb8d647,
+ 0x97fd61a9, 0xea7759f4, 0x2d57539d, 0x569a58cf, 0xe84e63ad, 0x462e1b78, 0x6580f87e, 0xf3817914,
+ 0x91da55f4, 0x40a230f3, 0xd1988f35, 0xb6e318d2, 0x3ffa50bc, 0x3d40f021, 0xc3c0bdae, 0x4958c24c,
+ 0x518f36b2, 0x84b1d370, 0x0fedce83, 0x878ddada, 0xf2a279c7, 0x94e01be8, 0x90716f4b, 0x954b8aa3,
+ },
+ {
+ 0xe216300d, 0xbbddfffc, 0xa7ebdabd, 0x35648095, 0x7789f8b7, 0xe6c1121b, 0x0e241600, 0x052ce8b5,
+ 0x11a9cfb0, 0xe5952f11, 0xece7990a, 0x9386d174, 0x2a42931c, 0x76e38111, 0xb12def3a, 0x37ddddfc,
+ 0xde9adeb1, 0x0a0cc32c, 0xbe197029, 0x84a00940, 0xbb243a0f, 0xb4d137cf, 0xb44e79f0, 0x049eedfd,
+ 0x0b15a15d, 0x480d3168, 0x8bbbde5a, 0x669ded42, 0xc7ece831, 0x3f8f95e7, 0x72df191b, 0x7580330d,
+ 0x94074251, 0x5c7dcdfa, 0xabbe6d63, 0xaa402164, 0xb301d40a, 0x02e7d1ca, 0x53571dae, 0x7a3182a2,
+ 0x12a8ddec, 0xfdaa335d, 0x176f43e8, 0x71fb46d4, 0x38129022, 0xce949ad4, 0xb84769ad, 0x965bd862,
+ 0x82f3d055, 0x66fb9767, 0x15b80b4e, 0x1d5b47a0, 0x4cfde06f, 0xc28ec4b8, 0x57e8726e, 0x647a78fc,
+ 0x99865d44, 0x608bd593, 0x6c200e03, 0x39dc5ff6, 0x5d0b00a3, 0xae63aff2, 0x7e8bd632, 0x70108c0c,
+ 0xbbd35049, 0x2998df04, 0x980cf42a, 0x9b6df491, 0x9e7edd53, 0x06918548, 0x58cb7e07, 0x3b74ef2e,
+ 0x522fffb1, 0xd24708cc, 0x1c7e27cd, 0xa4eb215b, 0x3cf1d2e2, 0x19b47a38, 0x424f7618, 0x35856039,
+ 0x9d17dee7, 0x27eb35e6, 0xc9aff67b, 0x36baf5b8, 0x09c467cd, 0xc18910b1, 0xe11dbf7b, 0x06cd1af8,
+ 0x7170c608, 0x2d5e3354, 0xd4de495a, 0x64c6d006, 0xbcc0c62c, 0x3dd00db3, 0x708f8f34, 0x77d51b42,
+ 0x264f620f, 0x24b8d2bf, 0x15c1b79e, 0x46a52564, 0xf8d7e54e, 0x3e378160, 0x7895cda5, 0x859c15a5,
+ 0xe6459788, 0xc37bc75f, 0xdb07ba0c, 0x0676a3ab, 0x7f229b1e, 0x31842e7b, 0x24259fd7, 0xf8bef472,
+ 0x835ffcb8, 0x6df4c1f2, 0x96f5b195, 0xfd0af0fc, 0xb0fe134c, 0xe2506d3d, 0x4f9b12ea, 0xf215f225,
+ 0xa223736f, 0x9fb4c428, 0x25d04979, 0x34c713f8, 0xc4618187, 0xea7a6e98, 0x7cd16efc, 0x1436876c,
+ 0xf1544107, 0xbedeee14, 0x56e9af27, 0xa04aa441, 0x3cf7c899, 0x92ecbae6, 0xdd67016d, 0x151682eb,
+ 0xa842eedf, 0xfdba60b4, 0xf1907b75, 0x20e3030f, 0x24d8c29e, 0xe139673b, 0xefa63fb8, 0x71873054,
+ 0xb6f2cf3b, 0x9f326442, 0xcb15a4cc, 0xb01a4504, 0xf1e47d8d, 0x844a1be5, 0xbae7dfdc, 0x42cbda70,
+ 0xcd7dae0a, 0x57e85b7a, 0xd53f5af6, 0x20cf4d8c, 0xcea4d428, 0x79d130a4, 0x3486ebfb, 0x33d3cddc,
+ 0x77853b53, 0x37effcb5, 0xc5068778, 0xe580b3e6, 0x4e68b8f4, 0xc5c8b37e, 0x0d809ea2, 0x398feb7c,
+ 0x132a4f94, 0x43b7950e, 0x2fee7d1c, 0x223613bd, 0xdd06caa2, 0x37df932b, 0xc4248289, 0xacf3ebc3,
+ 0x5715f6b7, 0xef3478dd, 0xf267616f, 0xc148cbe4, 0x9052815e, 0x5e410fab, 0xb48a2465, 0x2eda7fa4,
+ 0xe87b40e4, 0xe98ea084, 0x5889e9e1, 0xefd390fc, 0xdd07d35b, 0xdb485694, 0x38d7e5b2, 0x57720101,
+ 0x730edebc, 0x5b643113, 0x94917e4f, 0x503c2fba, 0x646f1282, 0x7523d24a, 0xe0779695, 0xf9c17a8f,
+ 0x7a5b2121, 0xd187b896, 0x29263a4d, 0xba510cdf, 0x81f47c9f, 0xad1163ed, 0xea7b5965, 0x1a00726e,
+ 0x11403092, 0x00da6d77, 0x4a0cdd61, 0xad1f4603, 0x605bdfb0, 0x9eedc364, 0x22ebe6a8, 0xcee7d28a,
+ 0xa0e736a0, 0x5564a6b9, 0x10853209, 0xc7eb8f37, 0x2de705ca, 0x8951570f, 0xdf09822b, 0xbd691a6c,
+ 0xaa12e4f2, 0x87451c0f, 0xe0f6a27a, 0x3ada4819, 0x4cf1764f, 0x0d771c2b, 0x67cdb156, 0x350d8384,
+ 0x5938fa0f, 0x42399ef3, 0x36997b07, 0x0e84093d, 0x4aa93e61, 0x8360d87b, 0x1fa98b0c, 0x1149382c,
+ 0xe97625a5, 0x0614d1b7, 0x0e25244b, 0x0c768347, 0x589e8d82, 0x0d2059d1, 0xa466bb1e, 0xf8da0a82,
+ 0x04f19130, 0xba6e4ec0, 0x99265164, 0x1ee7230d, 0x50b2ad80, 0xeaee6801, 0x8db2a283, 0xea8bf59e,
+ },
+}
diff --git a/src/mongo/gotools/vendor/src/golang.org/x/crypto/cast5/cast5_test.go b/src/mongo/gotools/vendor/src/golang.org/x/crypto/cast5/cast5_test.go
new file mode 100644
index 00000000000..778b272a638
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/golang.org/x/crypto/cast5/cast5_test.go
@@ -0,0 +1,106 @@
+// Copyright 2010 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package cast5
+
+import (
+ "bytes"
+ "encoding/hex"
+ "testing"
+)
+
+// This test vector is taken from RFC 2144, App B.1.
+// Since the other two test vectors are for reduced-round variants, we can't
+// use them.
+var basicTests = []struct {
+ key, plainText, cipherText string
+}{
+ {
+ "0123456712345678234567893456789a",
+ "0123456789abcdef",
+ "238b4fe5847e44b2",
+ },
+}
+
+func TestBasic(t *testing.T) {
+ for i, test := range basicTests {
+ key, _ := hex.DecodeString(test.key)
+ plainText, _ := hex.DecodeString(test.plainText)
+ expected, _ := hex.DecodeString(test.cipherText)
+
+ c, err := NewCipher(key)
+ if err != nil {
+ t.Errorf("#%d: failed to create Cipher: %s", i, err)
+ continue
+ }
+ var cipherText [BlockSize]byte
+ c.Encrypt(cipherText[:], plainText)
+ if !bytes.Equal(cipherText[:], expected) {
+ t.Errorf("#%d: got:%x want:%x", i, cipherText, expected)
+ }
+
+ var plainTextAgain [BlockSize]byte
+ c.Decrypt(plainTextAgain[:], cipherText[:])
+ if !bytes.Equal(plainTextAgain[:], plainText) {
+ t.Errorf("#%d: got:%x want:%x", i, plainTextAgain, plainText)
+ }
+ }
+}
+
+// TestFull performs the test specified in RFC 2144, App B.2.
+// However, due to the length of time taken, it's disabled here and a more
+// limited version is included, below.
+func TestFull(t *testing.T) {
+ if testing.Short() {
+ // This is too slow for normal testing
+ return
+ }
+
+ a, b := iterate(1000000)
+
+ const expectedA = "eea9d0a249fd3ba6b3436fb89d6dca92"
+ const expectedB = "b2c95eb00c31ad7180ac05b8e83d696e"
+
+ if hex.EncodeToString(a) != expectedA {
+ t.Errorf("a: got:%x want:%s", a, expectedA)
+ }
+ if hex.EncodeToString(b) != expectedB {
+ t.Errorf("b: got:%x want:%s", b, expectedB)
+ }
+}
+
+func iterate(iterations int) ([]byte, []byte) {
+ const initValueHex = "0123456712345678234567893456789a"
+
+ initValue, _ := hex.DecodeString(initValueHex)
+
+ var a, b [16]byte
+ copy(a[:], initValue)
+ copy(b[:], initValue)
+
+ for i := 0; i < iterations; i++ {
+ c, _ := NewCipher(b[:])
+ c.Encrypt(a[:8], a[:8])
+ c.Encrypt(a[8:], a[8:])
+ c, _ = NewCipher(a[:])
+ c.Encrypt(b[:8], b[:8])
+ c.Encrypt(b[8:], b[8:])
+ }
+
+ return a[:], b[:]
+}
+
+func TestLimited(t *testing.T) {
+ a, b := iterate(1000)
+
+ const expectedA = "23f73b14b02a2ad7dfb9f2c35644798d"
+ const expectedB = "e5bf37eff14c456a40b21ce369370a9f"
+
+ if hex.EncodeToString(a) != expectedA {
+ t.Errorf("a: got:%x want:%s", a, expectedA)
+ }
+ if hex.EncodeToString(b) != expectedB {
+ t.Errorf("b: got:%x want:%s", b, expectedB)
+ }
+}
diff --git a/src/mongo/gotools/vendor/src/golang.org/x/crypto/codereview.cfg b/src/mongo/gotools/vendor/src/golang.org/x/crypto/codereview.cfg
new file mode 100644
index 00000000000..3f8b14b64e8
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/golang.org/x/crypto/codereview.cfg
@@ -0,0 +1 @@
+issuerepo: golang/go
diff --git a/src/mongo/gotools/vendor/src/golang.org/x/crypto/curve25519/const_amd64.s b/src/mongo/gotools/vendor/src/golang.org/x/crypto/curve25519/const_amd64.s
new file mode 100644
index 00000000000..797f9b051df
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/golang.org/x/crypto/curve25519/const_amd64.s
@@ -0,0 +1,20 @@
+// Copyright 2012 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// This code was translated into a form compatible with 6a from the public
+// domain sources in SUPERCOP: http://bench.cr.yp.to/supercop.html
+
+// +build amd64,!gccgo,!appengine
+
+DATA ·REDMASK51(SB)/8, $0x0007FFFFFFFFFFFF
+GLOBL ·REDMASK51(SB), 8, $8
+
+DATA ·_121666_213(SB)/8, $996687872
+GLOBL ·_121666_213(SB), 8, $8
+
+DATA ·_2P0(SB)/8, $0xFFFFFFFFFFFDA
+GLOBL ·_2P0(SB), 8, $8
+
+DATA ·_2P1234(SB)/8, $0xFFFFFFFFFFFFE
+GLOBL ·_2P1234(SB), 8, $8
diff --git a/src/mongo/gotools/vendor/src/golang.org/x/crypto/curve25519/cswap_amd64.s b/src/mongo/gotools/vendor/src/golang.org/x/crypto/curve25519/cswap_amd64.s
new file mode 100644
index 00000000000..45484d1b596
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/golang.org/x/crypto/curve25519/cswap_amd64.s
@@ -0,0 +1,88 @@
+// Copyright 2012 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// This code was translated into a form compatible with 6a from the public
+// domain sources in SUPERCOP: http://bench.cr.yp.to/supercop.html
+
+// +build amd64,!gccgo,!appengine
+
+// func cswap(inout *[5]uint64, v uint64)
+TEXT ·cswap(SB),7,$0
+ MOVQ inout+0(FP),DI
+ MOVQ v+8(FP),SI
+
+ CMPQ SI,$1
+ MOVQ 0(DI),SI
+ MOVQ 80(DI),DX
+ MOVQ 8(DI),CX
+ MOVQ 88(DI),R8
+ MOVQ SI,R9
+ CMOVQEQ DX,SI
+ CMOVQEQ R9,DX
+ MOVQ CX,R9
+ CMOVQEQ R8,CX
+ CMOVQEQ R9,R8
+ MOVQ SI,0(DI)
+ MOVQ DX,80(DI)
+ MOVQ CX,8(DI)
+ MOVQ R8,88(DI)
+ MOVQ 16(DI),SI
+ MOVQ 96(DI),DX
+ MOVQ 24(DI),CX
+ MOVQ 104(DI),R8
+ MOVQ SI,R9
+ CMOVQEQ DX,SI
+ CMOVQEQ R9,DX
+ MOVQ CX,R9
+ CMOVQEQ R8,CX
+ CMOVQEQ R9,R8
+ MOVQ SI,16(DI)
+ MOVQ DX,96(DI)
+ MOVQ CX,24(DI)
+ MOVQ R8,104(DI)
+ MOVQ 32(DI),SI
+ MOVQ 112(DI),DX
+ MOVQ 40(DI),CX
+ MOVQ 120(DI),R8
+ MOVQ SI,R9
+ CMOVQEQ DX,SI
+ CMOVQEQ R9,DX
+ MOVQ CX,R9
+ CMOVQEQ R8,CX
+ CMOVQEQ R9,R8
+ MOVQ SI,32(DI)
+ MOVQ DX,112(DI)
+ MOVQ CX,40(DI)
+ MOVQ R8,120(DI)
+ MOVQ 48(DI),SI
+ MOVQ 128(DI),DX
+ MOVQ 56(DI),CX
+ MOVQ 136(DI),R8
+ MOVQ SI,R9
+ CMOVQEQ DX,SI
+ CMOVQEQ R9,DX
+ MOVQ CX,R9
+ CMOVQEQ R8,CX
+ CMOVQEQ R9,R8
+ MOVQ SI,48(DI)
+ MOVQ DX,128(DI)
+ MOVQ CX,56(DI)
+ MOVQ R8,136(DI)
+ MOVQ 64(DI),SI
+ MOVQ 144(DI),DX
+ MOVQ 72(DI),CX
+ MOVQ 152(DI),R8
+ MOVQ SI,R9
+ CMOVQEQ DX,SI
+ CMOVQEQ R9,DX
+ MOVQ CX,R9
+ CMOVQEQ R8,CX
+ CMOVQEQ R9,R8
+ MOVQ SI,64(DI)
+ MOVQ DX,144(DI)
+ MOVQ CX,72(DI)
+ MOVQ R8,152(DI)
+ MOVQ DI,AX
+ MOVQ SI,DX
+ RET
diff --git a/src/mongo/gotools/vendor/src/golang.org/x/crypto/curve25519/curve25519.go b/src/mongo/gotools/vendor/src/golang.org/x/crypto/curve25519/curve25519.go
new file mode 100644
index 00000000000..6918c47fc2e
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/golang.org/x/crypto/curve25519/curve25519.go
@@ -0,0 +1,841 @@
+// Copyright 2013 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// We have a implementation in amd64 assembly so this code is only run on
+// non-amd64 platforms. The amd64 assembly does not support gccgo.
+// +build !amd64 gccgo appengine
+
+package curve25519
+
+// This code is a port of the public domain, "ref10" implementation of
+// curve25519 from SUPERCOP 20130419 by D. J. Bernstein.
+
+// fieldElement represents an element of the field GF(2^255 - 19). An element
+// t, entries t[0]...t[9], represents the integer t[0]+2^26 t[1]+2^51 t[2]+2^77
+// t[3]+2^102 t[4]+...+2^230 t[9]. Bounds on each t[i] vary depending on
+// context.
+type fieldElement [10]int32
+
+func feZero(fe *fieldElement) {
+ for i := range fe {
+ fe[i] = 0
+ }
+}
+
+func feOne(fe *fieldElement) {
+ feZero(fe)
+ fe[0] = 1
+}
+
+func feAdd(dst, a, b *fieldElement) {
+ for i := range dst {
+ dst[i] = a[i] + b[i]
+ }
+}
+
+func feSub(dst, a, b *fieldElement) {
+ for i := range dst {
+ dst[i] = a[i] - b[i]
+ }
+}
+
+func feCopy(dst, src *fieldElement) {
+ for i := range dst {
+ dst[i] = src[i]
+ }
+}
+
+// feCSwap replaces (f,g) with (g,f) if b == 1; replaces (f,g) with (f,g) if b == 0.
+//
+// Preconditions: b in {0,1}.
+func feCSwap(f, g *fieldElement, b int32) {
+ var x fieldElement
+ b = -b
+ for i := range x {
+ x[i] = b & (f[i] ^ g[i])
+ }
+
+ for i := range f {
+ f[i] ^= x[i]
+ }
+ for i := range g {
+ g[i] ^= x[i]
+ }
+}
+
+// load3 reads a 24-bit, little-endian value from in.
+func load3(in []byte) int64 {
+ var r int64
+ r = int64(in[0])
+ r |= int64(in[1]) << 8
+ r |= int64(in[2]) << 16
+ return r
+}
+
+// load4 reads a 32-bit, little-endian value from in.
+func load4(in []byte) int64 {
+ var r int64
+ r = int64(in[0])
+ r |= int64(in[1]) << 8
+ r |= int64(in[2]) << 16
+ r |= int64(in[3]) << 24
+ return r
+}
+
+func feFromBytes(dst *fieldElement, src *[32]byte) {
+ h0 := load4(src[:])
+ h1 := load3(src[4:]) << 6
+ h2 := load3(src[7:]) << 5
+ h3 := load3(src[10:]) << 3
+ h4 := load3(src[13:]) << 2
+ h5 := load4(src[16:])
+ h6 := load3(src[20:]) << 7
+ h7 := load3(src[23:]) << 5
+ h8 := load3(src[26:]) << 4
+ h9 := load3(src[29:]) << 2
+
+ var carry [10]int64
+ carry[9] = (h9 + 1<<24) >> 25
+ h0 += carry[9] * 19
+ h9 -= carry[9] << 25
+ carry[1] = (h1 + 1<<24) >> 25
+ h2 += carry[1]
+ h1 -= carry[1] << 25
+ carry[3] = (h3 + 1<<24) >> 25
+ h4 += carry[3]
+ h3 -= carry[3] << 25
+ carry[5] = (h5 + 1<<24) >> 25
+ h6 += carry[5]
+ h5 -= carry[5] << 25
+ carry[7] = (h7 + 1<<24) >> 25
+ h8 += carry[7]
+ h7 -= carry[7] << 25
+
+ carry[0] = (h0 + 1<<25) >> 26
+ h1 += carry[0]
+ h0 -= carry[0] << 26
+ carry[2] = (h2 + 1<<25) >> 26
+ h3 += carry[2]
+ h2 -= carry[2] << 26
+ carry[4] = (h4 + 1<<25) >> 26
+ h5 += carry[4]
+ h4 -= carry[4] << 26
+ carry[6] = (h6 + 1<<25) >> 26
+ h7 += carry[6]
+ h6 -= carry[6] << 26
+ carry[8] = (h8 + 1<<25) >> 26
+ h9 += carry[8]
+ h8 -= carry[8] << 26
+
+ dst[0] = int32(h0)
+ dst[1] = int32(h1)
+ dst[2] = int32(h2)
+ dst[3] = int32(h3)
+ dst[4] = int32(h4)
+ dst[5] = int32(h5)
+ dst[6] = int32(h6)
+ dst[7] = int32(h7)
+ dst[8] = int32(h8)
+ dst[9] = int32(h9)
+}
+
+// feToBytes marshals h to s.
+// Preconditions:
+// |h| bounded by 1.1*2^25,1.1*2^24,1.1*2^25,1.1*2^24,etc.
+//
+// Write p=2^255-19; q=floor(h/p).
+// Basic claim: q = floor(2^(-255)(h + 19 2^(-25)h9 + 2^(-1))).
+//
+// Proof:
+// Have |h|<=p so |q|<=1 so |19^2 2^(-255) q|<1/4.
+// Also have |h-2^230 h9|<2^230 so |19 2^(-255)(h-2^230 h9)|<1/4.
+//
+// Write y=2^(-1)-19^2 2^(-255)q-19 2^(-255)(h-2^230 h9).
+// Then 0<y<1.
+//
+// Write r=h-pq.
+// Have 0<=r<=p-1=2^255-20.
+// Thus 0<=r+19(2^-255)r<r+19(2^-255)2^255<=2^255-1.
+//
+// Write x=r+19(2^-255)r+y.
+// Then 0<x<2^255 so floor(2^(-255)x) = 0 so floor(q+2^(-255)x) = q.
+//
+// Have q+2^(-255)x = 2^(-255)(h + 19 2^(-25) h9 + 2^(-1))
+// so floor(2^(-255)(h + 19 2^(-25) h9 + 2^(-1))) = q.
+func feToBytes(s *[32]byte, h *fieldElement) {
+ var carry [10]int32
+
+ q := (19*h[9] + (1 << 24)) >> 25
+ q = (h[0] + q) >> 26
+ q = (h[1] + q) >> 25
+ q = (h[2] + q) >> 26
+ q = (h[3] + q) >> 25
+ q = (h[4] + q) >> 26
+ q = (h[5] + q) >> 25
+ q = (h[6] + q) >> 26
+ q = (h[7] + q) >> 25
+ q = (h[8] + q) >> 26
+ q = (h[9] + q) >> 25
+
+ // Goal: Output h-(2^255-19)q, which is between 0 and 2^255-20.
+ h[0] += 19 * q
+ // Goal: Output h-2^255 q, which is between 0 and 2^255-20.
+
+ carry[0] = h[0] >> 26
+ h[1] += carry[0]
+ h[0] -= carry[0] << 26
+ carry[1] = h[1] >> 25
+ h[2] += carry[1]
+ h[1] -= carry[1] << 25
+ carry[2] = h[2] >> 26
+ h[3] += carry[2]
+ h[2] -= carry[2] << 26
+ carry[3] = h[3] >> 25
+ h[4] += carry[3]
+ h[3] -= carry[3] << 25
+ carry[4] = h[4] >> 26
+ h[5] += carry[4]
+ h[4] -= carry[4] << 26
+ carry[5] = h[5] >> 25
+ h[6] += carry[5]
+ h[5] -= carry[5] << 25
+ carry[6] = h[6] >> 26
+ h[7] += carry[6]
+ h[6] -= carry[6] << 26
+ carry[7] = h[7] >> 25
+ h[8] += carry[7]
+ h[7] -= carry[7] << 25
+ carry[8] = h[8] >> 26
+ h[9] += carry[8]
+ h[8] -= carry[8] << 26
+ carry[9] = h[9] >> 25
+ h[9] -= carry[9] << 25
+ // h10 = carry9
+
+ // Goal: Output h[0]+...+2^255 h10-2^255 q, which is between 0 and 2^255-20.
+ // Have h[0]+...+2^230 h[9] between 0 and 2^255-1;
+ // evidently 2^255 h10-2^255 q = 0.
+ // Goal: Output h[0]+...+2^230 h[9].
+
+ s[0] = byte(h[0] >> 0)
+ s[1] = byte(h[0] >> 8)
+ s[2] = byte(h[0] >> 16)
+ s[3] = byte((h[0] >> 24) | (h[1] << 2))
+ s[4] = byte(h[1] >> 6)
+ s[5] = byte(h[1] >> 14)
+ s[6] = byte((h[1] >> 22) | (h[2] << 3))
+ s[7] = byte(h[2] >> 5)
+ s[8] = byte(h[2] >> 13)
+ s[9] = byte((h[2] >> 21) | (h[3] << 5))
+ s[10] = byte(h[3] >> 3)
+ s[11] = byte(h[3] >> 11)
+ s[12] = byte((h[3] >> 19) | (h[4] << 6))
+ s[13] = byte(h[4] >> 2)
+ s[14] = byte(h[4] >> 10)
+ s[15] = byte(h[4] >> 18)
+ s[16] = byte(h[5] >> 0)
+ s[17] = byte(h[5] >> 8)
+ s[18] = byte(h[5] >> 16)
+ s[19] = byte((h[5] >> 24) | (h[6] << 1))
+ s[20] = byte(h[6] >> 7)
+ s[21] = byte(h[6] >> 15)
+ s[22] = byte((h[6] >> 23) | (h[7] << 3))
+ s[23] = byte(h[7] >> 5)
+ s[24] = byte(h[7] >> 13)
+ s[25] = byte((h[7] >> 21) | (h[8] << 4))
+ s[26] = byte(h[8] >> 4)
+ s[27] = byte(h[8] >> 12)
+ s[28] = byte((h[8] >> 20) | (h[9] << 6))
+ s[29] = byte(h[9] >> 2)
+ s[30] = byte(h[9] >> 10)
+ s[31] = byte(h[9] >> 18)
+}
+
+// feMul calculates h = f * g
+// Can overlap h with f or g.
+//
+// Preconditions:
+// |f| bounded by 1.1*2^26,1.1*2^25,1.1*2^26,1.1*2^25,etc.
+// |g| bounded by 1.1*2^26,1.1*2^25,1.1*2^26,1.1*2^25,etc.
+//
+// Postconditions:
+// |h| bounded by 1.1*2^25,1.1*2^24,1.1*2^25,1.1*2^24,etc.
+//
+// Notes on implementation strategy:
+//
+// Using schoolbook multiplication.
+// Karatsuba would save a little in some cost models.
+//
+// Most multiplications by 2 and 19 are 32-bit precomputations;
+// cheaper than 64-bit postcomputations.
+//
+// There is one remaining multiplication by 19 in the carry chain;
+// one *19 precomputation can be merged into this,
+// but the resulting data flow is considerably less clean.
+//
+// There are 12 carries below.
+// 10 of them are 2-way parallelizable and vectorizable.
+// Can get away with 11 carries, but then data flow is much deeper.
+//
+// With tighter constraints on inputs can squeeze carries into int32.
+func feMul(h, f, g *fieldElement) {
+ f0 := f[0]
+ f1 := f[1]
+ f2 := f[2]
+ f3 := f[3]
+ f4 := f[4]
+ f5 := f[5]
+ f6 := f[6]
+ f7 := f[7]
+ f8 := f[8]
+ f9 := f[9]
+ g0 := g[0]
+ g1 := g[1]
+ g2 := g[2]
+ g3 := g[3]
+ g4 := g[4]
+ g5 := g[5]
+ g6 := g[6]
+ g7 := g[7]
+ g8 := g[8]
+ g9 := g[9]
+ g1_19 := 19 * g1 // 1.4*2^29
+ g2_19 := 19 * g2 // 1.4*2^30; still ok
+ g3_19 := 19 * g3
+ g4_19 := 19 * g4
+ g5_19 := 19 * g5
+ g6_19 := 19 * g6
+ g7_19 := 19 * g7
+ g8_19 := 19 * g8
+ g9_19 := 19 * g9
+ f1_2 := 2 * f1
+ f3_2 := 2 * f3
+ f5_2 := 2 * f5
+ f7_2 := 2 * f7
+ f9_2 := 2 * f9
+ f0g0 := int64(f0) * int64(g0)
+ f0g1 := int64(f0) * int64(g1)
+ f0g2 := int64(f0) * int64(g2)
+ f0g3 := int64(f0) * int64(g3)
+ f0g4 := int64(f0) * int64(g4)
+ f0g5 := int64(f0) * int64(g5)
+ f0g6 := int64(f0) * int64(g6)
+ f0g7 := int64(f0) * int64(g7)
+ f0g8 := int64(f0) * int64(g8)
+ f0g9 := int64(f0) * int64(g9)
+ f1g0 := int64(f1) * int64(g0)
+ f1g1_2 := int64(f1_2) * int64(g1)
+ f1g2 := int64(f1) * int64(g2)
+ f1g3_2 := int64(f1_2) * int64(g3)
+ f1g4 := int64(f1) * int64(g4)
+ f1g5_2 := int64(f1_2) * int64(g5)
+ f1g6 := int64(f1) * int64(g6)
+ f1g7_2 := int64(f1_2) * int64(g7)
+ f1g8 := int64(f1) * int64(g8)
+ f1g9_38 := int64(f1_2) * int64(g9_19)
+ f2g0 := int64(f2) * int64(g0)
+ f2g1 := int64(f2) * int64(g1)
+ f2g2 := int64(f2) * int64(g2)
+ f2g3 := int64(f2) * int64(g3)
+ f2g4 := int64(f2) * int64(g4)
+ f2g5 := int64(f2) * int64(g5)
+ f2g6 := int64(f2) * int64(g6)
+ f2g7 := int64(f2) * int64(g7)
+ f2g8_19 := int64(f2) * int64(g8_19)
+ f2g9_19 := int64(f2) * int64(g9_19)
+ f3g0 := int64(f3) * int64(g0)
+ f3g1_2 := int64(f3_2) * int64(g1)
+ f3g2 := int64(f3) * int64(g2)
+ f3g3_2 := int64(f3_2) * int64(g3)
+ f3g4 := int64(f3) * int64(g4)
+ f3g5_2 := int64(f3_2) * int64(g5)
+ f3g6 := int64(f3) * int64(g6)
+ f3g7_38 := int64(f3_2) * int64(g7_19)
+ f3g8_19 := int64(f3) * int64(g8_19)
+ f3g9_38 := int64(f3_2) * int64(g9_19)
+ f4g0 := int64(f4) * int64(g0)
+ f4g1 := int64(f4) * int64(g1)
+ f4g2 := int64(f4) * int64(g2)
+ f4g3 := int64(f4) * int64(g3)
+ f4g4 := int64(f4) * int64(g4)
+ f4g5 := int64(f4) * int64(g5)
+ f4g6_19 := int64(f4) * int64(g6_19)
+ f4g7_19 := int64(f4) * int64(g7_19)
+ f4g8_19 := int64(f4) * int64(g8_19)
+ f4g9_19 := int64(f4) * int64(g9_19)
+ f5g0 := int64(f5) * int64(g0)
+ f5g1_2 := int64(f5_2) * int64(g1)
+ f5g2 := int64(f5) * int64(g2)
+ f5g3_2 := int64(f5_2) * int64(g3)
+ f5g4 := int64(f5) * int64(g4)
+ f5g5_38 := int64(f5_2) * int64(g5_19)
+ f5g6_19 := int64(f5) * int64(g6_19)
+ f5g7_38 := int64(f5_2) * int64(g7_19)
+ f5g8_19 := int64(f5) * int64(g8_19)
+ f5g9_38 := int64(f5_2) * int64(g9_19)
+ f6g0 := int64(f6) * int64(g0)
+ f6g1 := int64(f6) * int64(g1)
+ f6g2 := int64(f6) * int64(g2)
+ f6g3 := int64(f6) * int64(g3)
+ f6g4_19 := int64(f6) * int64(g4_19)
+ f6g5_19 := int64(f6) * int64(g5_19)
+ f6g6_19 := int64(f6) * int64(g6_19)
+ f6g7_19 := int64(f6) * int64(g7_19)
+ f6g8_19 := int64(f6) * int64(g8_19)
+ f6g9_19 := int64(f6) * int64(g9_19)
+ f7g0 := int64(f7) * int64(g0)
+ f7g1_2 := int64(f7_2) * int64(g1)
+ f7g2 := int64(f7) * int64(g2)
+ f7g3_38 := int64(f7_2) * int64(g3_19)
+ f7g4_19 := int64(f7) * int64(g4_19)
+ f7g5_38 := int64(f7_2) * int64(g5_19)
+ f7g6_19 := int64(f7) * int64(g6_19)
+ f7g7_38 := int64(f7_2) * int64(g7_19)
+ f7g8_19 := int64(f7) * int64(g8_19)
+ f7g9_38 := int64(f7_2) * int64(g9_19)
+ f8g0 := int64(f8) * int64(g0)
+ f8g1 := int64(f8) * int64(g1)
+ f8g2_19 := int64(f8) * int64(g2_19)
+ f8g3_19 := int64(f8) * int64(g3_19)
+ f8g4_19 := int64(f8) * int64(g4_19)
+ f8g5_19 := int64(f8) * int64(g5_19)
+ f8g6_19 := int64(f8) * int64(g6_19)
+ f8g7_19 := int64(f8) * int64(g7_19)
+ f8g8_19 := int64(f8) * int64(g8_19)
+ f8g9_19 := int64(f8) * int64(g9_19)
+ f9g0 := int64(f9) * int64(g0)
+ f9g1_38 := int64(f9_2) * int64(g1_19)
+ f9g2_19 := int64(f9) * int64(g2_19)
+ f9g3_38 := int64(f9_2) * int64(g3_19)
+ f9g4_19 := int64(f9) * int64(g4_19)
+ f9g5_38 := int64(f9_2) * int64(g5_19)
+ f9g6_19 := int64(f9) * int64(g6_19)
+ f9g7_38 := int64(f9_2) * int64(g7_19)
+ f9g8_19 := int64(f9) * int64(g8_19)
+ f9g9_38 := int64(f9_2) * int64(g9_19)
+ h0 := f0g0 + f1g9_38 + f2g8_19 + f3g7_38 + f4g6_19 + f5g5_38 + f6g4_19 + f7g3_38 + f8g2_19 + f9g1_38
+ h1 := f0g1 + f1g0 + f2g9_19 + f3g8_19 + f4g7_19 + f5g6_19 + f6g5_19 + f7g4_19 + f8g3_19 + f9g2_19
+ h2 := f0g2 + f1g1_2 + f2g0 + f3g9_38 + f4g8_19 + f5g7_38 + f6g6_19 + f7g5_38 + f8g4_19 + f9g3_38
+ h3 := f0g3 + f1g2 + f2g1 + f3g0 + f4g9_19 + f5g8_19 + f6g7_19 + f7g6_19 + f8g5_19 + f9g4_19
+ h4 := f0g4 + f1g3_2 + f2g2 + f3g1_2 + f4g0 + f5g9_38 + f6g8_19 + f7g7_38 + f8g6_19 + f9g5_38
+ h5 := f0g5 + f1g4 + f2g3 + f3g2 + f4g1 + f5g0 + f6g9_19 + f7g8_19 + f8g7_19 + f9g6_19
+ h6 := f0g6 + f1g5_2 + f2g4 + f3g3_2 + f4g2 + f5g1_2 + f6g0 + f7g9_38 + f8g8_19 + f9g7_38
+ h7 := f0g7 + f1g6 + f2g5 + f3g4 + f4g3 + f5g2 + f6g1 + f7g0 + f8g9_19 + f9g8_19
+ h8 := f0g8 + f1g7_2 + f2g6 + f3g5_2 + f4g4 + f5g3_2 + f6g2 + f7g1_2 + f8g0 + f9g9_38
+ h9 := f0g9 + f1g8 + f2g7 + f3g6 + f4g5 + f5g4 + f6g3 + f7g2 + f8g1 + f9g0
+ var carry [10]int64
+
+ // |h0| <= (1.1*1.1*2^52*(1+19+19+19+19)+1.1*1.1*2^50*(38+38+38+38+38))
+ // i.e. |h0| <= 1.2*2^59; narrower ranges for h2, h4, h6, h8
+ // |h1| <= (1.1*1.1*2^51*(1+1+19+19+19+19+19+19+19+19))
+ // i.e. |h1| <= 1.5*2^58; narrower ranges for h3, h5, h7, h9
+
+ carry[0] = (h0 + (1 << 25)) >> 26
+ h1 += carry[0]
+ h0 -= carry[0] << 26
+ carry[4] = (h4 + (1 << 25)) >> 26
+ h5 += carry[4]
+ h4 -= carry[4] << 26
+ // |h0| <= 2^25
+ // |h4| <= 2^25
+ // |h1| <= 1.51*2^58
+ // |h5| <= 1.51*2^58
+
+ carry[1] = (h1 + (1 << 24)) >> 25
+ h2 += carry[1]
+ h1 -= carry[1] << 25
+ carry[5] = (h5 + (1 << 24)) >> 25
+ h6 += carry[5]
+ h5 -= carry[5] << 25
+ // |h1| <= 2^24; from now on fits into int32
+ // |h5| <= 2^24; from now on fits into int32
+ // |h2| <= 1.21*2^59
+ // |h6| <= 1.21*2^59
+
+ carry[2] = (h2 + (1 << 25)) >> 26
+ h3 += carry[2]
+ h2 -= carry[2] << 26
+ carry[6] = (h6 + (1 << 25)) >> 26
+ h7 += carry[6]
+ h6 -= carry[6] << 26
+ // |h2| <= 2^25; from now on fits into int32 unchanged
+ // |h6| <= 2^25; from now on fits into int32 unchanged
+ // |h3| <= 1.51*2^58
+ // |h7| <= 1.51*2^58
+
+ carry[3] = (h3 + (1 << 24)) >> 25
+ h4 += carry[3]
+ h3 -= carry[3] << 25
+ carry[7] = (h7 + (1 << 24)) >> 25
+ h8 += carry[7]
+ h7 -= carry[7] << 25
+ // |h3| <= 2^24; from now on fits into int32 unchanged
+ // |h7| <= 2^24; from now on fits into int32 unchanged
+ // |h4| <= 1.52*2^33
+ // |h8| <= 1.52*2^33
+
+ carry[4] = (h4 + (1 << 25)) >> 26
+ h5 += carry[4]
+ h4 -= carry[4] << 26
+ carry[8] = (h8 + (1 << 25)) >> 26
+ h9 += carry[8]
+ h8 -= carry[8] << 26
+ // |h4| <= 2^25; from now on fits into int32 unchanged
+ // |h8| <= 2^25; from now on fits into int32 unchanged
+ // |h5| <= 1.01*2^24
+ // |h9| <= 1.51*2^58
+
+ carry[9] = (h9 + (1 << 24)) >> 25
+ h0 += carry[9] * 19
+ h9 -= carry[9] << 25
+ // |h9| <= 2^24; from now on fits into int32 unchanged
+ // |h0| <= 1.8*2^37
+
+ carry[0] = (h0 + (1 << 25)) >> 26
+ h1 += carry[0]
+ h0 -= carry[0] << 26
+ // |h0| <= 2^25; from now on fits into int32 unchanged
+ // |h1| <= 1.01*2^24
+
+ h[0] = int32(h0)
+ h[1] = int32(h1)
+ h[2] = int32(h2)
+ h[3] = int32(h3)
+ h[4] = int32(h4)
+ h[5] = int32(h5)
+ h[6] = int32(h6)
+ h[7] = int32(h7)
+ h[8] = int32(h8)
+ h[9] = int32(h9)
+}
+
+// feSquare calculates h = f*f. Can overlap h with f.
+//
+// Preconditions:
+// |f| bounded by 1.1*2^26,1.1*2^25,1.1*2^26,1.1*2^25,etc.
+//
+// Postconditions:
+// |h| bounded by 1.1*2^25,1.1*2^24,1.1*2^25,1.1*2^24,etc.
+func feSquare(h, f *fieldElement) {
+ f0 := f[0]
+ f1 := f[1]
+ f2 := f[2]
+ f3 := f[3]
+ f4 := f[4]
+ f5 := f[5]
+ f6 := f[6]
+ f7 := f[7]
+ f8 := f[8]
+ f9 := f[9]
+ f0_2 := 2 * f0
+ f1_2 := 2 * f1
+ f2_2 := 2 * f2
+ f3_2 := 2 * f3
+ f4_2 := 2 * f4
+ f5_2 := 2 * f5
+ f6_2 := 2 * f6
+ f7_2 := 2 * f7
+ f5_38 := 38 * f5 // 1.31*2^30
+ f6_19 := 19 * f6 // 1.31*2^30
+ f7_38 := 38 * f7 // 1.31*2^30
+ f8_19 := 19 * f8 // 1.31*2^30
+ f9_38 := 38 * f9 // 1.31*2^30
+ f0f0 := int64(f0) * int64(f0)
+ f0f1_2 := int64(f0_2) * int64(f1)
+ f0f2_2 := int64(f0_2) * int64(f2)
+ f0f3_2 := int64(f0_2) * int64(f3)
+ f0f4_2 := int64(f0_2) * int64(f4)
+ f0f5_2 := int64(f0_2) * int64(f5)
+ f0f6_2 := int64(f0_2) * int64(f6)
+ f0f7_2 := int64(f0_2) * int64(f7)
+ f0f8_2 := int64(f0_2) * int64(f8)
+ f0f9_2 := int64(f0_2) * int64(f9)
+ f1f1_2 := int64(f1_2) * int64(f1)
+ f1f2_2 := int64(f1_2) * int64(f2)
+ f1f3_4 := int64(f1_2) * int64(f3_2)
+ f1f4_2 := int64(f1_2) * int64(f4)
+ f1f5_4 := int64(f1_2) * int64(f5_2)
+ f1f6_2 := int64(f1_2) * int64(f6)
+ f1f7_4 := int64(f1_2) * int64(f7_2)
+ f1f8_2 := int64(f1_2) * int64(f8)
+ f1f9_76 := int64(f1_2) * int64(f9_38)
+ f2f2 := int64(f2) * int64(f2)
+ f2f3_2 := int64(f2_2) * int64(f3)
+ f2f4_2 := int64(f2_2) * int64(f4)
+ f2f5_2 := int64(f2_2) * int64(f5)
+ f2f6_2 := int64(f2_2) * int64(f6)
+ f2f7_2 := int64(f2_2) * int64(f7)
+ f2f8_38 := int64(f2_2) * int64(f8_19)
+ f2f9_38 := int64(f2) * int64(f9_38)
+ f3f3_2 := int64(f3_2) * int64(f3)
+ f3f4_2 := int64(f3_2) * int64(f4)
+ f3f5_4 := int64(f3_2) * int64(f5_2)
+ f3f6_2 := int64(f3_2) * int64(f6)
+ f3f7_76 := int64(f3_2) * int64(f7_38)
+ f3f8_38 := int64(f3_2) * int64(f8_19)
+ f3f9_76 := int64(f3_2) * int64(f9_38)
+ f4f4 := int64(f4) * int64(f4)
+ f4f5_2 := int64(f4_2) * int64(f5)
+ f4f6_38 := int64(f4_2) * int64(f6_19)
+ f4f7_38 := int64(f4) * int64(f7_38)
+ f4f8_38 := int64(f4_2) * int64(f8_19)
+ f4f9_38 := int64(f4) * int64(f9_38)
+ f5f5_38 := int64(f5) * int64(f5_38)
+ f5f6_38 := int64(f5_2) * int64(f6_19)
+ f5f7_76 := int64(f5_2) * int64(f7_38)
+ f5f8_38 := int64(f5_2) * int64(f8_19)
+ f5f9_76 := int64(f5_2) * int64(f9_38)
+ f6f6_19 := int64(f6) * int64(f6_19)
+ f6f7_38 := int64(f6) * int64(f7_38)
+ f6f8_38 := int64(f6_2) * int64(f8_19)
+ f6f9_38 := int64(f6) * int64(f9_38)
+ f7f7_38 := int64(f7) * int64(f7_38)
+ f7f8_38 := int64(f7_2) * int64(f8_19)
+ f7f9_76 := int64(f7_2) * int64(f9_38)
+ f8f8_19 := int64(f8) * int64(f8_19)
+ f8f9_38 := int64(f8) * int64(f9_38)
+ f9f9_38 := int64(f9) * int64(f9_38)
+ h0 := f0f0 + f1f9_76 + f2f8_38 + f3f7_76 + f4f6_38 + f5f5_38
+ h1 := f0f1_2 + f2f9_38 + f3f8_38 + f4f7_38 + f5f6_38
+ h2 := f0f2_2 + f1f1_2 + f3f9_76 + f4f8_38 + f5f7_76 + f6f6_19
+ h3 := f0f3_2 + f1f2_2 + f4f9_38 + f5f8_38 + f6f7_38
+ h4 := f0f4_2 + f1f3_4 + f2f2 + f5f9_76 + f6f8_38 + f7f7_38
+ h5 := f0f5_2 + f1f4_2 + f2f3_2 + f6f9_38 + f7f8_38
+ h6 := f0f6_2 + f1f5_4 + f2f4_2 + f3f3_2 + f7f9_76 + f8f8_19
+ h7 := f0f7_2 + f1f6_2 + f2f5_2 + f3f4_2 + f8f9_38
+ h8 := f0f8_2 + f1f7_4 + f2f6_2 + f3f5_4 + f4f4 + f9f9_38
+ h9 := f0f9_2 + f1f8_2 + f2f7_2 + f3f6_2 + f4f5_2
+ var carry [10]int64
+
+ carry[0] = (h0 + (1 << 25)) >> 26
+ h1 += carry[0]
+ h0 -= carry[0] << 26
+ carry[4] = (h4 + (1 << 25)) >> 26
+ h5 += carry[4]
+ h4 -= carry[4] << 26
+
+ carry[1] = (h1 + (1 << 24)) >> 25
+ h2 += carry[1]
+ h1 -= carry[1] << 25
+ carry[5] = (h5 + (1 << 24)) >> 25
+ h6 += carry[5]
+ h5 -= carry[5] << 25
+
+ carry[2] = (h2 + (1 << 25)) >> 26
+ h3 += carry[2]
+ h2 -= carry[2] << 26
+ carry[6] = (h6 + (1 << 25)) >> 26
+ h7 += carry[6]
+ h6 -= carry[6] << 26
+
+ carry[3] = (h3 + (1 << 24)) >> 25
+ h4 += carry[3]
+ h3 -= carry[3] << 25
+ carry[7] = (h7 + (1 << 24)) >> 25
+ h8 += carry[7]
+ h7 -= carry[7] << 25
+
+ carry[4] = (h4 + (1 << 25)) >> 26
+ h5 += carry[4]
+ h4 -= carry[4] << 26
+ carry[8] = (h8 + (1 << 25)) >> 26
+ h9 += carry[8]
+ h8 -= carry[8] << 26
+
+ carry[9] = (h9 + (1 << 24)) >> 25
+ h0 += carry[9] * 19
+ h9 -= carry[9] << 25
+
+ carry[0] = (h0 + (1 << 25)) >> 26
+ h1 += carry[0]
+ h0 -= carry[0] << 26
+
+ h[0] = int32(h0)
+ h[1] = int32(h1)
+ h[2] = int32(h2)
+ h[3] = int32(h3)
+ h[4] = int32(h4)
+ h[5] = int32(h5)
+ h[6] = int32(h6)
+ h[7] = int32(h7)
+ h[8] = int32(h8)
+ h[9] = int32(h9)
+}
+
+// feMul121666 calculates h = f * 121666. Can overlap h with f.
+//
+// Preconditions:
+// |f| bounded by 1.1*2^26,1.1*2^25,1.1*2^26,1.1*2^25,etc.
+//
+// Postconditions:
+// |h| bounded by 1.1*2^25,1.1*2^24,1.1*2^25,1.1*2^24,etc.
+func feMul121666(h, f *fieldElement) {
+ h0 := int64(f[0]) * 121666
+ h1 := int64(f[1]) * 121666
+ h2 := int64(f[2]) * 121666
+ h3 := int64(f[3]) * 121666
+ h4 := int64(f[4]) * 121666
+ h5 := int64(f[5]) * 121666
+ h6 := int64(f[6]) * 121666
+ h7 := int64(f[7]) * 121666
+ h8 := int64(f[8]) * 121666
+ h9 := int64(f[9]) * 121666
+ var carry [10]int64
+
+ carry[9] = (h9 + (1 << 24)) >> 25
+ h0 += carry[9] * 19
+ h9 -= carry[9] << 25
+ carry[1] = (h1 + (1 << 24)) >> 25
+ h2 += carry[1]
+ h1 -= carry[1] << 25
+ carry[3] = (h3 + (1 << 24)) >> 25
+ h4 += carry[3]
+ h3 -= carry[3] << 25
+ carry[5] = (h5 + (1 << 24)) >> 25
+ h6 += carry[5]
+ h5 -= carry[5] << 25
+ carry[7] = (h7 + (1 << 24)) >> 25
+ h8 += carry[7]
+ h7 -= carry[7] << 25
+
+ carry[0] = (h0 + (1 << 25)) >> 26
+ h1 += carry[0]
+ h0 -= carry[0] << 26
+ carry[2] = (h2 + (1 << 25)) >> 26
+ h3 += carry[2]
+ h2 -= carry[2] << 26
+ carry[4] = (h4 + (1 << 25)) >> 26
+ h5 += carry[4]
+ h4 -= carry[4] << 26
+ carry[6] = (h6 + (1 << 25)) >> 26
+ h7 += carry[6]
+ h6 -= carry[6] << 26
+ carry[8] = (h8 + (1 << 25)) >> 26
+ h9 += carry[8]
+ h8 -= carry[8] << 26
+
+ h[0] = int32(h0)
+ h[1] = int32(h1)
+ h[2] = int32(h2)
+ h[3] = int32(h3)
+ h[4] = int32(h4)
+ h[5] = int32(h5)
+ h[6] = int32(h6)
+ h[7] = int32(h7)
+ h[8] = int32(h8)
+ h[9] = int32(h9)
+}
+
+// feInvert sets out = z^-1.
+func feInvert(out, z *fieldElement) {
+ var t0, t1, t2, t3 fieldElement
+ var i int
+
+ feSquare(&t0, z)
+ for i = 1; i < 1; i++ {
+ feSquare(&t0, &t0)
+ }
+ feSquare(&t1, &t0)
+ for i = 1; i < 2; i++ {
+ feSquare(&t1, &t1)
+ }
+ feMul(&t1, z, &t1)
+ feMul(&t0, &t0, &t1)
+ feSquare(&t2, &t0)
+ for i = 1; i < 1; i++ {
+ feSquare(&t2, &t2)
+ }
+ feMul(&t1, &t1, &t2)
+ feSquare(&t2, &t1)
+ for i = 1; i < 5; i++ {
+ feSquare(&t2, &t2)
+ }
+ feMul(&t1, &t2, &t1)
+ feSquare(&t2, &t1)
+ for i = 1; i < 10; i++ {
+ feSquare(&t2, &t2)
+ }
+ feMul(&t2, &t2, &t1)
+ feSquare(&t3, &t2)
+ for i = 1; i < 20; i++ {
+ feSquare(&t3, &t3)
+ }
+ feMul(&t2, &t3, &t2)
+ feSquare(&t2, &t2)
+ for i = 1; i < 10; i++ {
+ feSquare(&t2, &t2)
+ }
+ feMul(&t1, &t2, &t1)
+ feSquare(&t2, &t1)
+ for i = 1; i < 50; i++ {
+ feSquare(&t2, &t2)
+ }
+ feMul(&t2, &t2, &t1)
+ feSquare(&t3, &t2)
+ for i = 1; i < 100; i++ {
+ feSquare(&t3, &t3)
+ }
+ feMul(&t2, &t3, &t2)
+ feSquare(&t2, &t2)
+ for i = 1; i < 50; i++ {
+ feSquare(&t2, &t2)
+ }
+ feMul(&t1, &t2, &t1)
+ feSquare(&t1, &t1)
+ for i = 1; i < 5; i++ {
+ feSquare(&t1, &t1)
+ }
+ feMul(out, &t1, &t0)
+}
+
+func scalarMult(out, in, base *[32]byte) {
+ var e [32]byte
+
+ copy(e[:], in[:])
+ e[0] &= 248
+ e[31] &= 127
+ e[31] |= 64
+
+ var x1, x2, z2, x3, z3, tmp0, tmp1 fieldElement
+ feFromBytes(&x1, base)
+ feOne(&x2)
+ feCopy(&x3, &x1)
+ feOne(&z3)
+
+ swap := int32(0)
+ for pos := 254; pos >= 0; pos-- {
+ b := e[pos/8] >> uint(pos&7)
+ b &= 1
+ swap ^= int32(b)
+ feCSwap(&x2, &x3, swap)
+ feCSwap(&z2, &z3, swap)
+ swap = int32(b)
+
+ feSub(&tmp0, &x3, &z3)
+ feSub(&tmp1, &x2, &z2)
+ feAdd(&x2, &x2, &z2)
+ feAdd(&z2, &x3, &z3)
+ feMul(&z3, &tmp0, &x2)
+ feMul(&z2, &z2, &tmp1)
+ feSquare(&tmp0, &tmp1)
+ feSquare(&tmp1, &x2)
+ feAdd(&x3, &z3, &z2)
+ feSub(&z2, &z3, &z2)
+ feMul(&x2, &tmp1, &tmp0)
+ feSub(&tmp1, &tmp1, &tmp0)
+ feSquare(&z2, &z2)
+ feMul121666(&z3, &tmp1)
+ feSquare(&x3, &x3)
+ feAdd(&tmp0, &tmp0, &z3)
+ feMul(&z3, &x1, &z2)
+ feMul(&z2, &tmp1, &tmp0)
+ }
+
+ feCSwap(&x2, &x3, swap)
+ feCSwap(&z2, &z3, swap)
+
+ feInvert(&z2, &z2)
+ feMul(&x2, &x2, &z2)
+ feToBytes(out, &x2)
+}
diff --git a/src/mongo/gotools/vendor/src/golang.org/x/crypto/curve25519/curve25519_test.go b/src/mongo/gotools/vendor/src/golang.org/x/crypto/curve25519/curve25519_test.go
new file mode 100644
index 00000000000..14b0ee87cdd
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/golang.org/x/crypto/curve25519/curve25519_test.go
@@ -0,0 +1,29 @@
+// Copyright 2012 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package curve25519
+
+import (
+ "fmt"
+ "testing"
+)
+
+const expectedHex = "89161fde887b2b53de549af483940106ecc114d6982daa98256de23bdf77661a"
+
+func TestBaseScalarMult(t *testing.T) {
+ var a, b [32]byte
+ in := &a
+ out := &b
+ a[0] = 1
+
+ for i := 0; i < 200; i++ {
+ ScalarBaseMult(out, in)
+ in, out = out, in
+ }
+
+ result := fmt.Sprintf("%x", in[:])
+ if result != expectedHex {
+ t.Errorf("incorrect result: got %s, want %s", result, expectedHex)
+ }
+}
diff --git a/src/mongo/gotools/vendor/src/golang.org/x/crypto/curve25519/doc.go b/src/mongo/gotools/vendor/src/golang.org/x/crypto/curve25519/doc.go
new file mode 100644
index 00000000000..ebeea3c2d6a
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/golang.org/x/crypto/curve25519/doc.go
@@ -0,0 +1,23 @@
+// Copyright 2012 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// Package curve25519 provides an implementation of scalar multiplication on
+// the elliptic curve known as curve25519. See http://cr.yp.to/ecdh.html
+package curve25519 // import "golang.org/x/crypto/curve25519"
+
+// basePoint is the x coordinate of the generator of the curve.
+var basePoint = [32]byte{9, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0}
+
+// ScalarMult sets dst to the product in*base where dst and base are the x
+// coordinates of group points and all values are in little-endian form.
+func ScalarMult(dst, in, base *[32]byte) {
+ scalarMult(dst, in, base)
+}
+
+// ScalarBaseMult sets dst to the product in*base where dst and base are the x
+// coordinates of group points, base is the standard generator and all values
+// are in little-endian form.
+func ScalarBaseMult(dst, in *[32]byte) {
+ ScalarMult(dst, in, &basePoint)
+}
diff --git a/src/mongo/gotools/vendor/src/golang.org/x/crypto/curve25519/freeze_amd64.s b/src/mongo/gotools/vendor/src/golang.org/x/crypto/curve25519/freeze_amd64.s
new file mode 100644
index 00000000000..37599fac043
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/golang.org/x/crypto/curve25519/freeze_amd64.s
@@ -0,0 +1,94 @@
+// Copyright 2012 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// This code was translated into a form compatible with 6a from the public
+// domain sources in SUPERCOP: http://bench.cr.yp.to/supercop.html
+
+// +build amd64,!gccgo,!appengine
+
+// func freeze(inout *[5]uint64)
+TEXT ·freeze(SB),7,$96-8
+ MOVQ inout+0(FP), DI
+
+ MOVQ SP,R11
+ MOVQ $31,CX
+ NOTQ CX
+ ANDQ CX,SP
+ ADDQ $32,SP
+
+ MOVQ R11,0(SP)
+ MOVQ R12,8(SP)
+ MOVQ R13,16(SP)
+ MOVQ R14,24(SP)
+ MOVQ R15,32(SP)
+ MOVQ BX,40(SP)
+ MOVQ BP,48(SP)
+ MOVQ 0(DI),SI
+ MOVQ 8(DI),DX
+ MOVQ 16(DI),CX
+ MOVQ 24(DI),R8
+ MOVQ 32(DI),R9
+ MOVQ ·REDMASK51(SB),AX
+ MOVQ AX,R10
+ SUBQ $18,R10
+ MOVQ $3,R11
+REDUCELOOP:
+ MOVQ SI,R12
+ SHRQ $51,R12
+ ANDQ AX,SI
+ ADDQ R12,DX
+ MOVQ DX,R12
+ SHRQ $51,R12
+ ANDQ AX,DX
+ ADDQ R12,CX
+ MOVQ CX,R12
+ SHRQ $51,R12
+ ANDQ AX,CX
+ ADDQ R12,R8
+ MOVQ R8,R12
+ SHRQ $51,R12
+ ANDQ AX,R8
+ ADDQ R12,R9
+ MOVQ R9,R12
+ SHRQ $51,R12
+ ANDQ AX,R9
+ IMUL3Q $19,R12,R12
+ ADDQ R12,SI
+ SUBQ $1,R11
+ JA REDUCELOOP
+ MOVQ $1,R12
+ CMPQ R10,SI
+ CMOVQLT R11,R12
+ CMPQ AX,DX
+ CMOVQNE R11,R12
+ CMPQ AX,CX
+ CMOVQNE R11,R12
+ CMPQ AX,R8
+ CMOVQNE R11,R12
+ CMPQ AX,R9
+ CMOVQNE R11,R12
+ NEGQ R12
+ ANDQ R12,AX
+ ANDQ R12,R10
+ SUBQ R10,SI
+ SUBQ AX,DX
+ SUBQ AX,CX
+ SUBQ AX,R8
+ SUBQ AX,R9
+ MOVQ SI,0(DI)
+ MOVQ DX,8(DI)
+ MOVQ CX,16(DI)
+ MOVQ R8,24(DI)
+ MOVQ R9,32(DI)
+ MOVQ 0(SP),R11
+ MOVQ 8(SP),R12
+ MOVQ 16(SP),R13
+ MOVQ 24(SP),R14
+ MOVQ 32(SP),R15
+ MOVQ 40(SP),BX
+ MOVQ 48(SP),BP
+ MOVQ R11,SP
+ MOVQ DI,AX
+ MOVQ SI,DX
+ RET
diff --git a/src/mongo/gotools/vendor/src/golang.org/x/crypto/curve25519/ladderstep_amd64.s b/src/mongo/gotools/vendor/src/golang.org/x/crypto/curve25519/ladderstep_amd64.s
new file mode 100644
index 00000000000..3949f9cfaf4
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/golang.org/x/crypto/curve25519/ladderstep_amd64.s
@@ -0,0 +1,1398 @@
+// Copyright 2012 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// This code was translated into a form compatible with 6a from the public
+// domain sources in SUPERCOP: http://bench.cr.yp.to/supercop.html
+
+// +build amd64,!gccgo,!appengine
+
+// func ladderstep(inout *[5][5]uint64)
+TEXT ·ladderstep(SB),0,$384-8
+ MOVQ inout+0(FP),DI
+
+ MOVQ SP,R11
+ MOVQ $31,CX
+ NOTQ CX
+ ANDQ CX,SP
+ ADDQ $32,SP
+
+ MOVQ R11,0(SP)
+ MOVQ R12,8(SP)
+ MOVQ R13,16(SP)
+ MOVQ R14,24(SP)
+ MOVQ R15,32(SP)
+ MOVQ BX,40(SP)
+ MOVQ BP,48(SP)
+ MOVQ 40(DI),SI
+ MOVQ 48(DI),DX
+ MOVQ 56(DI),CX
+ MOVQ 64(DI),R8
+ MOVQ 72(DI),R9
+ MOVQ SI,AX
+ MOVQ DX,R10
+ MOVQ CX,R11
+ MOVQ R8,R12
+ MOVQ R9,R13
+ ADDQ ·_2P0(SB),AX
+ ADDQ ·_2P1234(SB),R10
+ ADDQ ·_2P1234(SB),R11
+ ADDQ ·_2P1234(SB),R12
+ ADDQ ·_2P1234(SB),R13
+ ADDQ 80(DI),SI
+ ADDQ 88(DI),DX
+ ADDQ 96(DI),CX
+ ADDQ 104(DI),R8
+ ADDQ 112(DI),R9
+ SUBQ 80(DI),AX
+ SUBQ 88(DI),R10
+ SUBQ 96(DI),R11
+ SUBQ 104(DI),R12
+ SUBQ 112(DI),R13
+ MOVQ SI,56(SP)
+ MOVQ DX,64(SP)
+ MOVQ CX,72(SP)
+ MOVQ R8,80(SP)
+ MOVQ R9,88(SP)
+ MOVQ AX,96(SP)
+ MOVQ R10,104(SP)
+ MOVQ R11,112(SP)
+ MOVQ R12,120(SP)
+ MOVQ R13,128(SP)
+ MOVQ 96(SP),AX
+ MULQ 96(SP)
+ MOVQ AX,SI
+ MOVQ DX,CX
+ MOVQ 96(SP),AX
+ SHLQ $1,AX
+ MULQ 104(SP)
+ MOVQ AX,R8
+ MOVQ DX,R9
+ MOVQ 96(SP),AX
+ SHLQ $1,AX
+ MULQ 112(SP)
+ MOVQ AX,R10
+ MOVQ DX,R11
+ MOVQ 96(SP),AX
+ SHLQ $1,AX
+ MULQ 120(SP)
+ MOVQ AX,R12
+ MOVQ DX,R13
+ MOVQ 96(SP),AX
+ SHLQ $1,AX
+ MULQ 128(SP)
+ MOVQ AX,R14
+ MOVQ DX,R15
+ MOVQ 104(SP),AX
+ MULQ 104(SP)
+ ADDQ AX,R10
+ ADCQ DX,R11
+ MOVQ 104(SP),AX
+ SHLQ $1,AX
+ MULQ 112(SP)
+ ADDQ AX,R12
+ ADCQ DX,R13
+ MOVQ 104(SP),AX
+ SHLQ $1,AX
+ MULQ 120(SP)
+ ADDQ AX,R14
+ ADCQ DX,R15
+ MOVQ 104(SP),DX
+ IMUL3Q $38,DX,AX
+ MULQ 128(SP)
+ ADDQ AX,SI
+ ADCQ DX,CX
+ MOVQ 112(SP),AX
+ MULQ 112(SP)
+ ADDQ AX,R14
+ ADCQ DX,R15
+ MOVQ 112(SP),DX
+ IMUL3Q $38,DX,AX
+ MULQ 120(SP)
+ ADDQ AX,SI
+ ADCQ DX,CX
+ MOVQ 112(SP),DX
+ IMUL3Q $38,DX,AX
+ MULQ 128(SP)
+ ADDQ AX,R8
+ ADCQ DX,R9
+ MOVQ 120(SP),DX
+ IMUL3Q $19,DX,AX
+ MULQ 120(SP)
+ ADDQ AX,R8
+ ADCQ DX,R9
+ MOVQ 120(SP),DX
+ IMUL3Q $38,DX,AX
+ MULQ 128(SP)
+ ADDQ AX,R10
+ ADCQ DX,R11
+ MOVQ 128(SP),DX
+ IMUL3Q $19,DX,AX
+ MULQ 128(SP)
+ ADDQ AX,R12
+ ADCQ DX,R13
+ MOVQ ·REDMASK51(SB),DX
+ SHLQ $13,CX:SI
+ ANDQ DX,SI
+ SHLQ $13,R9:R8
+ ANDQ DX,R8
+ ADDQ CX,R8
+ SHLQ $13,R11:R10
+ ANDQ DX,R10
+ ADDQ R9,R10
+ SHLQ $13,R13:R12
+ ANDQ DX,R12
+ ADDQ R11,R12
+ SHLQ $13,R15:R14
+ ANDQ DX,R14
+ ADDQ R13,R14
+ IMUL3Q $19,R15,CX
+ ADDQ CX,SI
+ MOVQ SI,CX
+ SHRQ $51,CX
+ ADDQ R8,CX
+ ANDQ DX,SI
+ MOVQ CX,R8
+ SHRQ $51,CX
+ ADDQ R10,CX
+ ANDQ DX,R8
+ MOVQ CX,R9
+ SHRQ $51,CX
+ ADDQ R12,CX
+ ANDQ DX,R9
+ MOVQ CX,AX
+ SHRQ $51,CX
+ ADDQ R14,CX
+ ANDQ DX,AX
+ MOVQ CX,R10
+ SHRQ $51,CX
+ IMUL3Q $19,CX,CX
+ ADDQ CX,SI
+ ANDQ DX,R10
+ MOVQ SI,136(SP)
+ MOVQ R8,144(SP)
+ MOVQ R9,152(SP)
+ MOVQ AX,160(SP)
+ MOVQ R10,168(SP)
+ MOVQ 56(SP),AX
+ MULQ 56(SP)
+ MOVQ AX,SI
+ MOVQ DX,CX
+ MOVQ 56(SP),AX
+ SHLQ $1,AX
+ MULQ 64(SP)
+ MOVQ AX,R8
+ MOVQ DX,R9
+ MOVQ 56(SP),AX
+ SHLQ $1,AX
+ MULQ 72(SP)
+ MOVQ AX,R10
+ MOVQ DX,R11
+ MOVQ 56(SP),AX
+ SHLQ $1,AX
+ MULQ 80(SP)
+ MOVQ AX,R12
+ MOVQ DX,R13
+ MOVQ 56(SP),AX
+ SHLQ $1,AX
+ MULQ 88(SP)
+ MOVQ AX,R14
+ MOVQ DX,R15
+ MOVQ 64(SP),AX
+ MULQ 64(SP)
+ ADDQ AX,R10
+ ADCQ DX,R11
+ MOVQ 64(SP),AX
+ SHLQ $1,AX
+ MULQ 72(SP)
+ ADDQ AX,R12
+ ADCQ DX,R13
+ MOVQ 64(SP),AX
+ SHLQ $1,AX
+ MULQ 80(SP)
+ ADDQ AX,R14
+ ADCQ DX,R15
+ MOVQ 64(SP),DX
+ IMUL3Q $38,DX,AX
+ MULQ 88(SP)
+ ADDQ AX,SI
+ ADCQ DX,CX
+ MOVQ 72(SP),AX
+ MULQ 72(SP)
+ ADDQ AX,R14
+ ADCQ DX,R15
+ MOVQ 72(SP),DX
+ IMUL3Q $38,DX,AX
+ MULQ 80(SP)
+ ADDQ AX,SI
+ ADCQ DX,CX
+ MOVQ 72(SP),DX
+ IMUL3Q $38,DX,AX
+ MULQ 88(SP)
+ ADDQ AX,R8
+ ADCQ DX,R9
+ MOVQ 80(SP),DX
+ IMUL3Q $19,DX,AX
+ MULQ 80(SP)
+ ADDQ AX,R8
+ ADCQ DX,R9
+ MOVQ 80(SP),DX
+ IMUL3Q $38,DX,AX
+ MULQ 88(SP)
+ ADDQ AX,R10
+ ADCQ DX,R11
+ MOVQ 88(SP),DX
+ IMUL3Q $19,DX,AX
+ MULQ 88(SP)
+ ADDQ AX,R12
+ ADCQ DX,R13
+ MOVQ ·REDMASK51(SB),DX
+ SHLQ $13,CX:SI
+ ANDQ DX,SI
+ SHLQ $13,R9:R8
+ ANDQ DX,R8
+ ADDQ CX,R8
+ SHLQ $13,R11:R10
+ ANDQ DX,R10
+ ADDQ R9,R10
+ SHLQ $13,R13:R12
+ ANDQ DX,R12
+ ADDQ R11,R12
+ SHLQ $13,R15:R14
+ ANDQ DX,R14
+ ADDQ R13,R14
+ IMUL3Q $19,R15,CX
+ ADDQ CX,SI
+ MOVQ SI,CX
+ SHRQ $51,CX
+ ADDQ R8,CX
+ ANDQ DX,SI
+ MOVQ CX,R8
+ SHRQ $51,CX
+ ADDQ R10,CX
+ ANDQ DX,R8
+ MOVQ CX,R9
+ SHRQ $51,CX
+ ADDQ R12,CX
+ ANDQ DX,R9
+ MOVQ CX,AX
+ SHRQ $51,CX
+ ADDQ R14,CX
+ ANDQ DX,AX
+ MOVQ CX,R10
+ SHRQ $51,CX
+ IMUL3Q $19,CX,CX
+ ADDQ CX,SI
+ ANDQ DX,R10
+ MOVQ SI,176(SP)
+ MOVQ R8,184(SP)
+ MOVQ R9,192(SP)
+ MOVQ AX,200(SP)
+ MOVQ R10,208(SP)
+ MOVQ SI,SI
+ MOVQ R8,DX
+ MOVQ R9,CX
+ MOVQ AX,R8
+ MOVQ R10,R9
+ ADDQ ·_2P0(SB),SI
+ ADDQ ·_2P1234(SB),DX
+ ADDQ ·_2P1234(SB),CX
+ ADDQ ·_2P1234(SB),R8
+ ADDQ ·_2P1234(SB),R9
+ SUBQ 136(SP),SI
+ SUBQ 144(SP),DX
+ SUBQ 152(SP),CX
+ SUBQ 160(SP),R8
+ SUBQ 168(SP),R9
+ MOVQ SI,216(SP)
+ MOVQ DX,224(SP)
+ MOVQ CX,232(SP)
+ MOVQ R8,240(SP)
+ MOVQ R9,248(SP)
+ MOVQ 120(DI),SI
+ MOVQ 128(DI),DX
+ MOVQ 136(DI),CX
+ MOVQ 144(DI),R8
+ MOVQ 152(DI),R9
+ MOVQ SI,AX
+ MOVQ DX,R10
+ MOVQ CX,R11
+ MOVQ R8,R12
+ MOVQ R9,R13
+ ADDQ ·_2P0(SB),AX
+ ADDQ ·_2P1234(SB),R10
+ ADDQ ·_2P1234(SB),R11
+ ADDQ ·_2P1234(SB),R12
+ ADDQ ·_2P1234(SB),R13
+ ADDQ 160(DI),SI
+ ADDQ 168(DI),DX
+ ADDQ 176(DI),CX
+ ADDQ 184(DI),R8
+ ADDQ 192(DI),R9
+ SUBQ 160(DI),AX
+ SUBQ 168(DI),R10
+ SUBQ 176(DI),R11
+ SUBQ 184(DI),R12
+ SUBQ 192(DI),R13
+ MOVQ SI,256(SP)
+ MOVQ DX,264(SP)
+ MOVQ CX,272(SP)
+ MOVQ R8,280(SP)
+ MOVQ R9,288(SP)
+ MOVQ AX,296(SP)
+ MOVQ R10,304(SP)
+ MOVQ R11,312(SP)
+ MOVQ R12,320(SP)
+ MOVQ R13,328(SP)
+ MOVQ 280(SP),SI
+ IMUL3Q $19,SI,AX
+ MOVQ AX,336(SP)
+ MULQ 112(SP)
+ MOVQ AX,SI
+ MOVQ DX,CX
+ MOVQ 288(SP),DX
+ IMUL3Q $19,DX,AX
+ MOVQ AX,344(SP)
+ MULQ 104(SP)
+ ADDQ AX,SI
+ ADCQ DX,CX
+ MOVQ 256(SP),AX
+ MULQ 96(SP)
+ ADDQ AX,SI
+ ADCQ DX,CX
+ MOVQ 256(SP),AX
+ MULQ 104(SP)
+ MOVQ AX,R8
+ MOVQ DX,R9
+ MOVQ 256(SP),AX
+ MULQ 112(SP)
+ MOVQ AX,R10
+ MOVQ DX,R11
+ MOVQ 256(SP),AX
+ MULQ 120(SP)
+ MOVQ AX,R12
+ MOVQ DX,R13
+ MOVQ 256(SP),AX
+ MULQ 128(SP)
+ MOVQ AX,R14
+ MOVQ DX,R15
+ MOVQ 264(SP),AX
+ MULQ 96(SP)
+ ADDQ AX,R8
+ ADCQ DX,R9
+ MOVQ 264(SP),AX
+ MULQ 104(SP)
+ ADDQ AX,R10
+ ADCQ DX,R11
+ MOVQ 264(SP),AX
+ MULQ 112(SP)
+ ADDQ AX,R12
+ ADCQ DX,R13
+ MOVQ 264(SP),AX
+ MULQ 120(SP)
+ ADDQ AX,R14
+ ADCQ DX,R15
+ MOVQ 264(SP),DX
+ IMUL3Q $19,DX,AX
+ MULQ 128(SP)
+ ADDQ AX,SI
+ ADCQ DX,CX
+ MOVQ 272(SP),AX
+ MULQ 96(SP)
+ ADDQ AX,R10
+ ADCQ DX,R11
+ MOVQ 272(SP),AX
+ MULQ 104(SP)
+ ADDQ AX,R12
+ ADCQ DX,R13
+ MOVQ 272(SP),AX
+ MULQ 112(SP)
+ ADDQ AX,R14
+ ADCQ DX,R15
+ MOVQ 272(SP),DX
+ IMUL3Q $19,DX,AX
+ MULQ 120(SP)
+ ADDQ AX,SI
+ ADCQ DX,CX
+ MOVQ 272(SP),DX
+ IMUL3Q $19,DX,AX
+ MULQ 128(SP)
+ ADDQ AX,R8
+ ADCQ DX,R9
+ MOVQ 280(SP),AX
+ MULQ 96(SP)
+ ADDQ AX,R12
+ ADCQ DX,R13
+ MOVQ 280(SP),AX
+ MULQ 104(SP)
+ ADDQ AX,R14
+ ADCQ DX,R15
+ MOVQ 336(SP),AX
+ MULQ 120(SP)
+ ADDQ AX,R8
+ ADCQ DX,R9
+ MOVQ 336(SP),AX
+ MULQ 128(SP)
+ ADDQ AX,R10
+ ADCQ DX,R11
+ MOVQ 288(SP),AX
+ MULQ 96(SP)
+ ADDQ AX,R14
+ ADCQ DX,R15
+ MOVQ 344(SP),AX
+ MULQ 112(SP)
+ ADDQ AX,R8
+ ADCQ DX,R9
+ MOVQ 344(SP),AX
+ MULQ 120(SP)
+ ADDQ AX,R10
+ ADCQ DX,R11
+ MOVQ 344(SP),AX
+ MULQ 128(SP)
+ ADDQ AX,R12
+ ADCQ DX,R13
+ MOVQ ·REDMASK51(SB),DX
+ SHLQ $13,CX:SI
+ ANDQ DX,SI
+ SHLQ $13,R9:R8
+ ANDQ DX,R8
+ ADDQ CX,R8
+ SHLQ $13,R11:R10
+ ANDQ DX,R10
+ ADDQ R9,R10
+ SHLQ $13,R13:R12
+ ANDQ DX,R12
+ ADDQ R11,R12
+ SHLQ $13,R15:R14
+ ANDQ DX,R14
+ ADDQ R13,R14
+ IMUL3Q $19,R15,CX
+ ADDQ CX,SI
+ MOVQ SI,CX
+ SHRQ $51,CX
+ ADDQ R8,CX
+ MOVQ CX,R8
+ SHRQ $51,CX
+ ANDQ DX,SI
+ ADDQ R10,CX
+ MOVQ CX,R9
+ SHRQ $51,CX
+ ANDQ DX,R8
+ ADDQ R12,CX
+ MOVQ CX,AX
+ SHRQ $51,CX
+ ANDQ DX,R9
+ ADDQ R14,CX
+ MOVQ CX,R10
+ SHRQ $51,CX
+ ANDQ DX,AX
+ IMUL3Q $19,CX,CX
+ ADDQ CX,SI
+ ANDQ DX,R10
+ MOVQ SI,96(SP)
+ MOVQ R8,104(SP)
+ MOVQ R9,112(SP)
+ MOVQ AX,120(SP)
+ MOVQ R10,128(SP)
+ MOVQ 320(SP),SI
+ IMUL3Q $19,SI,AX
+ MOVQ AX,256(SP)
+ MULQ 72(SP)
+ MOVQ AX,SI
+ MOVQ DX,CX
+ MOVQ 328(SP),DX
+ IMUL3Q $19,DX,AX
+ MOVQ AX,264(SP)
+ MULQ 64(SP)
+ ADDQ AX,SI
+ ADCQ DX,CX
+ MOVQ 296(SP),AX
+ MULQ 56(SP)
+ ADDQ AX,SI
+ ADCQ DX,CX
+ MOVQ 296(SP),AX
+ MULQ 64(SP)
+ MOVQ AX,R8
+ MOVQ DX,R9
+ MOVQ 296(SP),AX
+ MULQ 72(SP)
+ MOVQ AX,R10
+ MOVQ DX,R11
+ MOVQ 296(SP),AX
+ MULQ 80(SP)
+ MOVQ AX,R12
+ MOVQ DX,R13
+ MOVQ 296(SP),AX
+ MULQ 88(SP)
+ MOVQ AX,R14
+ MOVQ DX,R15
+ MOVQ 304(SP),AX
+ MULQ 56(SP)
+ ADDQ AX,R8
+ ADCQ DX,R9
+ MOVQ 304(SP),AX
+ MULQ 64(SP)
+ ADDQ AX,R10
+ ADCQ DX,R11
+ MOVQ 304(SP),AX
+ MULQ 72(SP)
+ ADDQ AX,R12
+ ADCQ DX,R13
+ MOVQ 304(SP),AX
+ MULQ 80(SP)
+ ADDQ AX,R14
+ ADCQ DX,R15
+ MOVQ 304(SP),DX
+ IMUL3Q $19,DX,AX
+ MULQ 88(SP)
+ ADDQ AX,SI
+ ADCQ DX,CX
+ MOVQ 312(SP),AX
+ MULQ 56(SP)
+ ADDQ AX,R10
+ ADCQ DX,R11
+ MOVQ 312(SP),AX
+ MULQ 64(SP)
+ ADDQ AX,R12
+ ADCQ DX,R13
+ MOVQ 312(SP),AX
+ MULQ 72(SP)
+ ADDQ AX,R14
+ ADCQ DX,R15
+ MOVQ 312(SP),DX
+ IMUL3Q $19,DX,AX
+ MULQ 80(SP)
+ ADDQ AX,SI
+ ADCQ DX,CX
+ MOVQ 312(SP),DX
+ IMUL3Q $19,DX,AX
+ MULQ 88(SP)
+ ADDQ AX,R8
+ ADCQ DX,R9
+ MOVQ 320(SP),AX
+ MULQ 56(SP)
+ ADDQ AX,R12
+ ADCQ DX,R13
+ MOVQ 320(SP),AX
+ MULQ 64(SP)
+ ADDQ AX,R14
+ ADCQ DX,R15
+ MOVQ 256(SP),AX
+ MULQ 80(SP)
+ ADDQ AX,R8
+ ADCQ DX,R9
+ MOVQ 256(SP),AX
+ MULQ 88(SP)
+ ADDQ AX,R10
+ ADCQ DX,R11
+ MOVQ 328(SP),AX
+ MULQ 56(SP)
+ ADDQ AX,R14
+ ADCQ DX,R15
+ MOVQ 264(SP),AX
+ MULQ 72(SP)
+ ADDQ AX,R8
+ ADCQ DX,R9
+ MOVQ 264(SP),AX
+ MULQ 80(SP)
+ ADDQ AX,R10
+ ADCQ DX,R11
+ MOVQ 264(SP),AX
+ MULQ 88(SP)
+ ADDQ AX,R12
+ ADCQ DX,R13
+ MOVQ ·REDMASK51(SB),DX
+ SHLQ $13,CX:SI
+ ANDQ DX,SI
+ SHLQ $13,R9:R8
+ ANDQ DX,R8
+ ADDQ CX,R8
+ SHLQ $13,R11:R10
+ ANDQ DX,R10
+ ADDQ R9,R10
+ SHLQ $13,R13:R12
+ ANDQ DX,R12
+ ADDQ R11,R12
+ SHLQ $13,R15:R14
+ ANDQ DX,R14
+ ADDQ R13,R14
+ IMUL3Q $19,R15,CX
+ ADDQ CX,SI
+ MOVQ SI,CX
+ SHRQ $51,CX
+ ADDQ R8,CX
+ MOVQ CX,R8
+ SHRQ $51,CX
+ ANDQ DX,SI
+ ADDQ R10,CX
+ MOVQ CX,R9
+ SHRQ $51,CX
+ ANDQ DX,R8
+ ADDQ R12,CX
+ MOVQ CX,AX
+ SHRQ $51,CX
+ ANDQ DX,R9
+ ADDQ R14,CX
+ MOVQ CX,R10
+ SHRQ $51,CX
+ ANDQ DX,AX
+ IMUL3Q $19,CX,CX
+ ADDQ CX,SI
+ ANDQ DX,R10
+ MOVQ SI,DX
+ MOVQ R8,CX
+ MOVQ R9,R11
+ MOVQ AX,R12
+ MOVQ R10,R13
+ ADDQ ·_2P0(SB),DX
+ ADDQ ·_2P1234(SB),CX
+ ADDQ ·_2P1234(SB),R11
+ ADDQ ·_2P1234(SB),R12
+ ADDQ ·_2P1234(SB),R13
+ ADDQ 96(SP),SI
+ ADDQ 104(SP),R8
+ ADDQ 112(SP),R9
+ ADDQ 120(SP),AX
+ ADDQ 128(SP),R10
+ SUBQ 96(SP),DX
+ SUBQ 104(SP),CX
+ SUBQ 112(SP),R11
+ SUBQ 120(SP),R12
+ SUBQ 128(SP),R13
+ MOVQ SI,120(DI)
+ MOVQ R8,128(DI)
+ MOVQ R9,136(DI)
+ MOVQ AX,144(DI)
+ MOVQ R10,152(DI)
+ MOVQ DX,160(DI)
+ MOVQ CX,168(DI)
+ MOVQ R11,176(DI)
+ MOVQ R12,184(DI)
+ MOVQ R13,192(DI)
+ MOVQ 120(DI),AX
+ MULQ 120(DI)
+ MOVQ AX,SI
+ MOVQ DX,CX
+ MOVQ 120(DI),AX
+ SHLQ $1,AX
+ MULQ 128(DI)
+ MOVQ AX,R8
+ MOVQ DX,R9
+ MOVQ 120(DI),AX
+ SHLQ $1,AX
+ MULQ 136(DI)
+ MOVQ AX,R10
+ MOVQ DX,R11
+ MOVQ 120(DI),AX
+ SHLQ $1,AX
+ MULQ 144(DI)
+ MOVQ AX,R12
+ MOVQ DX,R13
+ MOVQ 120(DI),AX
+ SHLQ $1,AX
+ MULQ 152(DI)
+ MOVQ AX,R14
+ MOVQ DX,R15
+ MOVQ 128(DI),AX
+ MULQ 128(DI)
+ ADDQ AX,R10
+ ADCQ DX,R11
+ MOVQ 128(DI),AX
+ SHLQ $1,AX
+ MULQ 136(DI)
+ ADDQ AX,R12
+ ADCQ DX,R13
+ MOVQ 128(DI),AX
+ SHLQ $1,AX
+ MULQ 144(DI)
+ ADDQ AX,R14
+ ADCQ DX,R15
+ MOVQ 128(DI),DX
+ IMUL3Q $38,DX,AX
+ MULQ 152(DI)
+ ADDQ AX,SI
+ ADCQ DX,CX
+ MOVQ 136(DI),AX
+ MULQ 136(DI)
+ ADDQ AX,R14
+ ADCQ DX,R15
+ MOVQ 136(DI),DX
+ IMUL3Q $38,DX,AX
+ MULQ 144(DI)
+ ADDQ AX,SI
+ ADCQ DX,CX
+ MOVQ 136(DI),DX
+ IMUL3Q $38,DX,AX
+ MULQ 152(DI)
+ ADDQ AX,R8
+ ADCQ DX,R9
+ MOVQ 144(DI),DX
+ IMUL3Q $19,DX,AX
+ MULQ 144(DI)
+ ADDQ AX,R8
+ ADCQ DX,R9
+ MOVQ 144(DI),DX
+ IMUL3Q $38,DX,AX
+ MULQ 152(DI)
+ ADDQ AX,R10
+ ADCQ DX,R11
+ MOVQ 152(DI),DX
+ IMUL3Q $19,DX,AX
+ MULQ 152(DI)
+ ADDQ AX,R12
+ ADCQ DX,R13
+ MOVQ ·REDMASK51(SB),DX
+ SHLQ $13,CX:SI
+ ANDQ DX,SI
+ SHLQ $13,R9:R8
+ ANDQ DX,R8
+ ADDQ CX,R8
+ SHLQ $13,R11:R10
+ ANDQ DX,R10
+ ADDQ R9,R10
+ SHLQ $13,R13:R12
+ ANDQ DX,R12
+ ADDQ R11,R12
+ SHLQ $13,R15:R14
+ ANDQ DX,R14
+ ADDQ R13,R14
+ IMUL3Q $19,R15,CX
+ ADDQ CX,SI
+ MOVQ SI,CX
+ SHRQ $51,CX
+ ADDQ R8,CX
+ ANDQ DX,SI
+ MOVQ CX,R8
+ SHRQ $51,CX
+ ADDQ R10,CX
+ ANDQ DX,R8
+ MOVQ CX,R9
+ SHRQ $51,CX
+ ADDQ R12,CX
+ ANDQ DX,R9
+ MOVQ CX,AX
+ SHRQ $51,CX
+ ADDQ R14,CX
+ ANDQ DX,AX
+ MOVQ CX,R10
+ SHRQ $51,CX
+ IMUL3Q $19,CX,CX
+ ADDQ CX,SI
+ ANDQ DX,R10
+ MOVQ SI,120(DI)
+ MOVQ R8,128(DI)
+ MOVQ R9,136(DI)
+ MOVQ AX,144(DI)
+ MOVQ R10,152(DI)
+ MOVQ 160(DI),AX
+ MULQ 160(DI)
+ MOVQ AX,SI
+ MOVQ DX,CX
+ MOVQ 160(DI),AX
+ SHLQ $1,AX
+ MULQ 168(DI)
+ MOVQ AX,R8
+ MOVQ DX,R9
+ MOVQ 160(DI),AX
+ SHLQ $1,AX
+ MULQ 176(DI)
+ MOVQ AX,R10
+ MOVQ DX,R11
+ MOVQ 160(DI),AX
+ SHLQ $1,AX
+ MULQ 184(DI)
+ MOVQ AX,R12
+ MOVQ DX,R13
+ MOVQ 160(DI),AX
+ SHLQ $1,AX
+ MULQ 192(DI)
+ MOVQ AX,R14
+ MOVQ DX,R15
+ MOVQ 168(DI),AX
+ MULQ 168(DI)
+ ADDQ AX,R10
+ ADCQ DX,R11
+ MOVQ 168(DI),AX
+ SHLQ $1,AX
+ MULQ 176(DI)
+ ADDQ AX,R12
+ ADCQ DX,R13
+ MOVQ 168(DI),AX
+ SHLQ $1,AX
+ MULQ 184(DI)
+ ADDQ AX,R14
+ ADCQ DX,R15
+ MOVQ 168(DI),DX
+ IMUL3Q $38,DX,AX
+ MULQ 192(DI)
+ ADDQ AX,SI
+ ADCQ DX,CX
+ MOVQ 176(DI),AX
+ MULQ 176(DI)
+ ADDQ AX,R14
+ ADCQ DX,R15
+ MOVQ 176(DI),DX
+ IMUL3Q $38,DX,AX
+ MULQ 184(DI)
+ ADDQ AX,SI
+ ADCQ DX,CX
+ MOVQ 176(DI),DX
+ IMUL3Q $38,DX,AX
+ MULQ 192(DI)
+ ADDQ AX,R8
+ ADCQ DX,R9
+ MOVQ 184(DI),DX
+ IMUL3Q $19,DX,AX
+ MULQ 184(DI)
+ ADDQ AX,R8
+ ADCQ DX,R9
+ MOVQ 184(DI),DX
+ IMUL3Q $38,DX,AX
+ MULQ 192(DI)
+ ADDQ AX,R10
+ ADCQ DX,R11
+ MOVQ 192(DI),DX
+ IMUL3Q $19,DX,AX
+ MULQ 192(DI)
+ ADDQ AX,R12
+ ADCQ DX,R13
+ MOVQ ·REDMASK51(SB),DX
+ SHLQ $13,CX:SI
+ ANDQ DX,SI
+ SHLQ $13,R9:R8
+ ANDQ DX,R8
+ ADDQ CX,R8
+ SHLQ $13,R11:R10
+ ANDQ DX,R10
+ ADDQ R9,R10
+ SHLQ $13,R13:R12
+ ANDQ DX,R12
+ ADDQ R11,R12
+ SHLQ $13,R15:R14
+ ANDQ DX,R14
+ ADDQ R13,R14
+ IMUL3Q $19,R15,CX
+ ADDQ CX,SI
+ MOVQ SI,CX
+ SHRQ $51,CX
+ ADDQ R8,CX
+ ANDQ DX,SI
+ MOVQ CX,R8
+ SHRQ $51,CX
+ ADDQ R10,CX
+ ANDQ DX,R8
+ MOVQ CX,R9
+ SHRQ $51,CX
+ ADDQ R12,CX
+ ANDQ DX,R9
+ MOVQ CX,AX
+ SHRQ $51,CX
+ ADDQ R14,CX
+ ANDQ DX,AX
+ MOVQ CX,R10
+ SHRQ $51,CX
+ IMUL3Q $19,CX,CX
+ ADDQ CX,SI
+ ANDQ DX,R10
+ MOVQ SI,160(DI)
+ MOVQ R8,168(DI)
+ MOVQ R9,176(DI)
+ MOVQ AX,184(DI)
+ MOVQ R10,192(DI)
+ MOVQ 184(DI),SI
+ IMUL3Q $19,SI,AX
+ MOVQ AX,56(SP)
+ MULQ 16(DI)
+ MOVQ AX,SI
+ MOVQ DX,CX
+ MOVQ 192(DI),DX
+ IMUL3Q $19,DX,AX
+ MOVQ AX,64(SP)
+ MULQ 8(DI)
+ ADDQ AX,SI
+ ADCQ DX,CX
+ MOVQ 160(DI),AX
+ MULQ 0(DI)
+ ADDQ AX,SI
+ ADCQ DX,CX
+ MOVQ 160(DI),AX
+ MULQ 8(DI)
+ MOVQ AX,R8
+ MOVQ DX,R9
+ MOVQ 160(DI),AX
+ MULQ 16(DI)
+ MOVQ AX,R10
+ MOVQ DX,R11
+ MOVQ 160(DI),AX
+ MULQ 24(DI)
+ MOVQ AX,R12
+ MOVQ DX,R13
+ MOVQ 160(DI),AX
+ MULQ 32(DI)
+ MOVQ AX,R14
+ MOVQ DX,R15
+ MOVQ 168(DI),AX
+ MULQ 0(DI)
+ ADDQ AX,R8
+ ADCQ DX,R9
+ MOVQ 168(DI),AX
+ MULQ 8(DI)
+ ADDQ AX,R10
+ ADCQ DX,R11
+ MOVQ 168(DI),AX
+ MULQ 16(DI)
+ ADDQ AX,R12
+ ADCQ DX,R13
+ MOVQ 168(DI),AX
+ MULQ 24(DI)
+ ADDQ AX,R14
+ ADCQ DX,R15
+ MOVQ 168(DI),DX
+ IMUL3Q $19,DX,AX
+ MULQ 32(DI)
+ ADDQ AX,SI
+ ADCQ DX,CX
+ MOVQ 176(DI),AX
+ MULQ 0(DI)
+ ADDQ AX,R10
+ ADCQ DX,R11
+ MOVQ 176(DI),AX
+ MULQ 8(DI)
+ ADDQ AX,R12
+ ADCQ DX,R13
+ MOVQ 176(DI),AX
+ MULQ 16(DI)
+ ADDQ AX,R14
+ ADCQ DX,R15
+ MOVQ 176(DI),DX
+ IMUL3Q $19,DX,AX
+ MULQ 24(DI)
+ ADDQ AX,SI
+ ADCQ DX,CX
+ MOVQ 176(DI),DX
+ IMUL3Q $19,DX,AX
+ MULQ 32(DI)
+ ADDQ AX,R8
+ ADCQ DX,R9
+ MOVQ 184(DI),AX
+ MULQ 0(DI)
+ ADDQ AX,R12
+ ADCQ DX,R13
+ MOVQ 184(DI),AX
+ MULQ 8(DI)
+ ADDQ AX,R14
+ ADCQ DX,R15
+ MOVQ 56(SP),AX
+ MULQ 24(DI)
+ ADDQ AX,R8
+ ADCQ DX,R9
+ MOVQ 56(SP),AX
+ MULQ 32(DI)
+ ADDQ AX,R10
+ ADCQ DX,R11
+ MOVQ 192(DI),AX
+ MULQ 0(DI)
+ ADDQ AX,R14
+ ADCQ DX,R15
+ MOVQ 64(SP),AX
+ MULQ 16(DI)
+ ADDQ AX,R8
+ ADCQ DX,R9
+ MOVQ 64(SP),AX
+ MULQ 24(DI)
+ ADDQ AX,R10
+ ADCQ DX,R11
+ MOVQ 64(SP),AX
+ MULQ 32(DI)
+ ADDQ AX,R12
+ ADCQ DX,R13
+ MOVQ ·REDMASK51(SB),DX
+ SHLQ $13,CX:SI
+ ANDQ DX,SI
+ SHLQ $13,R9:R8
+ ANDQ DX,R8
+ ADDQ CX,R8
+ SHLQ $13,R11:R10
+ ANDQ DX,R10
+ ADDQ R9,R10
+ SHLQ $13,R13:R12
+ ANDQ DX,R12
+ ADDQ R11,R12
+ SHLQ $13,R15:R14
+ ANDQ DX,R14
+ ADDQ R13,R14
+ IMUL3Q $19,R15,CX
+ ADDQ CX,SI
+ MOVQ SI,CX
+ SHRQ $51,CX
+ ADDQ R8,CX
+ MOVQ CX,R8
+ SHRQ $51,CX
+ ANDQ DX,SI
+ ADDQ R10,CX
+ MOVQ CX,R9
+ SHRQ $51,CX
+ ANDQ DX,R8
+ ADDQ R12,CX
+ MOVQ CX,AX
+ SHRQ $51,CX
+ ANDQ DX,R9
+ ADDQ R14,CX
+ MOVQ CX,R10
+ SHRQ $51,CX
+ ANDQ DX,AX
+ IMUL3Q $19,CX,CX
+ ADDQ CX,SI
+ ANDQ DX,R10
+ MOVQ SI,160(DI)
+ MOVQ R8,168(DI)
+ MOVQ R9,176(DI)
+ MOVQ AX,184(DI)
+ MOVQ R10,192(DI)
+ MOVQ 200(SP),SI
+ IMUL3Q $19,SI,AX
+ MOVQ AX,56(SP)
+ MULQ 152(SP)
+ MOVQ AX,SI
+ MOVQ DX,CX
+ MOVQ 208(SP),DX
+ IMUL3Q $19,DX,AX
+ MOVQ AX,64(SP)
+ MULQ 144(SP)
+ ADDQ AX,SI
+ ADCQ DX,CX
+ MOVQ 176(SP),AX
+ MULQ 136(SP)
+ ADDQ AX,SI
+ ADCQ DX,CX
+ MOVQ 176(SP),AX
+ MULQ 144(SP)
+ MOVQ AX,R8
+ MOVQ DX,R9
+ MOVQ 176(SP),AX
+ MULQ 152(SP)
+ MOVQ AX,R10
+ MOVQ DX,R11
+ MOVQ 176(SP),AX
+ MULQ 160(SP)
+ MOVQ AX,R12
+ MOVQ DX,R13
+ MOVQ 176(SP),AX
+ MULQ 168(SP)
+ MOVQ AX,R14
+ MOVQ DX,R15
+ MOVQ 184(SP),AX
+ MULQ 136(SP)
+ ADDQ AX,R8
+ ADCQ DX,R9
+ MOVQ 184(SP),AX
+ MULQ 144(SP)
+ ADDQ AX,R10
+ ADCQ DX,R11
+ MOVQ 184(SP),AX
+ MULQ 152(SP)
+ ADDQ AX,R12
+ ADCQ DX,R13
+ MOVQ 184(SP),AX
+ MULQ 160(SP)
+ ADDQ AX,R14
+ ADCQ DX,R15
+ MOVQ 184(SP),DX
+ IMUL3Q $19,DX,AX
+ MULQ 168(SP)
+ ADDQ AX,SI
+ ADCQ DX,CX
+ MOVQ 192(SP),AX
+ MULQ 136(SP)
+ ADDQ AX,R10
+ ADCQ DX,R11
+ MOVQ 192(SP),AX
+ MULQ 144(SP)
+ ADDQ AX,R12
+ ADCQ DX,R13
+ MOVQ 192(SP),AX
+ MULQ 152(SP)
+ ADDQ AX,R14
+ ADCQ DX,R15
+ MOVQ 192(SP),DX
+ IMUL3Q $19,DX,AX
+ MULQ 160(SP)
+ ADDQ AX,SI
+ ADCQ DX,CX
+ MOVQ 192(SP),DX
+ IMUL3Q $19,DX,AX
+ MULQ 168(SP)
+ ADDQ AX,R8
+ ADCQ DX,R9
+ MOVQ 200(SP),AX
+ MULQ 136(SP)
+ ADDQ AX,R12
+ ADCQ DX,R13
+ MOVQ 200(SP),AX
+ MULQ 144(SP)
+ ADDQ AX,R14
+ ADCQ DX,R15
+ MOVQ 56(SP),AX
+ MULQ 160(SP)
+ ADDQ AX,R8
+ ADCQ DX,R9
+ MOVQ 56(SP),AX
+ MULQ 168(SP)
+ ADDQ AX,R10
+ ADCQ DX,R11
+ MOVQ 208(SP),AX
+ MULQ 136(SP)
+ ADDQ AX,R14
+ ADCQ DX,R15
+ MOVQ 64(SP),AX
+ MULQ 152(SP)
+ ADDQ AX,R8
+ ADCQ DX,R9
+ MOVQ 64(SP),AX
+ MULQ 160(SP)
+ ADDQ AX,R10
+ ADCQ DX,R11
+ MOVQ 64(SP),AX
+ MULQ 168(SP)
+ ADDQ AX,R12
+ ADCQ DX,R13
+ MOVQ ·REDMASK51(SB),DX
+ SHLQ $13,CX:SI
+ ANDQ DX,SI
+ SHLQ $13,R9:R8
+ ANDQ DX,R8
+ ADDQ CX,R8
+ SHLQ $13,R11:R10
+ ANDQ DX,R10
+ ADDQ R9,R10
+ SHLQ $13,R13:R12
+ ANDQ DX,R12
+ ADDQ R11,R12
+ SHLQ $13,R15:R14
+ ANDQ DX,R14
+ ADDQ R13,R14
+ IMUL3Q $19,R15,CX
+ ADDQ CX,SI
+ MOVQ SI,CX
+ SHRQ $51,CX
+ ADDQ R8,CX
+ MOVQ CX,R8
+ SHRQ $51,CX
+ ANDQ DX,SI
+ ADDQ R10,CX
+ MOVQ CX,R9
+ SHRQ $51,CX
+ ANDQ DX,R8
+ ADDQ R12,CX
+ MOVQ CX,AX
+ SHRQ $51,CX
+ ANDQ DX,R9
+ ADDQ R14,CX
+ MOVQ CX,R10
+ SHRQ $51,CX
+ ANDQ DX,AX
+ IMUL3Q $19,CX,CX
+ ADDQ CX,SI
+ ANDQ DX,R10
+ MOVQ SI,40(DI)
+ MOVQ R8,48(DI)
+ MOVQ R9,56(DI)
+ MOVQ AX,64(DI)
+ MOVQ R10,72(DI)
+ MOVQ 216(SP),AX
+ MULQ ·_121666_213(SB)
+ SHRQ $13,AX
+ MOVQ AX,SI
+ MOVQ DX,CX
+ MOVQ 224(SP),AX
+ MULQ ·_121666_213(SB)
+ SHRQ $13,AX
+ ADDQ AX,CX
+ MOVQ DX,R8
+ MOVQ 232(SP),AX
+ MULQ ·_121666_213(SB)
+ SHRQ $13,AX
+ ADDQ AX,R8
+ MOVQ DX,R9
+ MOVQ 240(SP),AX
+ MULQ ·_121666_213(SB)
+ SHRQ $13,AX
+ ADDQ AX,R9
+ MOVQ DX,R10
+ MOVQ 248(SP),AX
+ MULQ ·_121666_213(SB)
+ SHRQ $13,AX
+ ADDQ AX,R10
+ IMUL3Q $19,DX,DX
+ ADDQ DX,SI
+ ADDQ 136(SP),SI
+ ADDQ 144(SP),CX
+ ADDQ 152(SP),R8
+ ADDQ 160(SP),R9
+ ADDQ 168(SP),R10
+ MOVQ SI,80(DI)
+ MOVQ CX,88(DI)
+ MOVQ R8,96(DI)
+ MOVQ R9,104(DI)
+ MOVQ R10,112(DI)
+ MOVQ 104(DI),SI
+ IMUL3Q $19,SI,AX
+ MOVQ AX,56(SP)
+ MULQ 232(SP)
+ MOVQ AX,SI
+ MOVQ DX,CX
+ MOVQ 112(DI),DX
+ IMUL3Q $19,DX,AX
+ MOVQ AX,64(SP)
+ MULQ 224(SP)
+ ADDQ AX,SI
+ ADCQ DX,CX
+ MOVQ 80(DI),AX
+ MULQ 216(SP)
+ ADDQ AX,SI
+ ADCQ DX,CX
+ MOVQ 80(DI),AX
+ MULQ 224(SP)
+ MOVQ AX,R8
+ MOVQ DX,R9
+ MOVQ 80(DI),AX
+ MULQ 232(SP)
+ MOVQ AX,R10
+ MOVQ DX,R11
+ MOVQ 80(DI),AX
+ MULQ 240(SP)
+ MOVQ AX,R12
+ MOVQ DX,R13
+ MOVQ 80(DI),AX
+ MULQ 248(SP)
+ MOVQ AX,R14
+ MOVQ DX,R15
+ MOVQ 88(DI),AX
+ MULQ 216(SP)
+ ADDQ AX,R8
+ ADCQ DX,R9
+ MOVQ 88(DI),AX
+ MULQ 224(SP)
+ ADDQ AX,R10
+ ADCQ DX,R11
+ MOVQ 88(DI),AX
+ MULQ 232(SP)
+ ADDQ AX,R12
+ ADCQ DX,R13
+ MOVQ 88(DI),AX
+ MULQ 240(SP)
+ ADDQ AX,R14
+ ADCQ DX,R15
+ MOVQ 88(DI),DX
+ IMUL3Q $19,DX,AX
+ MULQ 248(SP)
+ ADDQ AX,SI
+ ADCQ DX,CX
+ MOVQ 96(DI),AX
+ MULQ 216(SP)
+ ADDQ AX,R10
+ ADCQ DX,R11
+ MOVQ 96(DI),AX
+ MULQ 224(SP)
+ ADDQ AX,R12
+ ADCQ DX,R13
+ MOVQ 96(DI),AX
+ MULQ 232(SP)
+ ADDQ AX,R14
+ ADCQ DX,R15
+ MOVQ 96(DI),DX
+ IMUL3Q $19,DX,AX
+ MULQ 240(SP)
+ ADDQ AX,SI
+ ADCQ DX,CX
+ MOVQ 96(DI),DX
+ IMUL3Q $19,DX,AX
+ MULQ 248(SP)
+ ADDQ AX,R8
+ ADCQ DX,R9
+ MOVQ 104(DI),AX
+ MULQ 216(SP)
+ ADDQ AX,R12
+ ADCQ DX,R13
+ MOVQ 104(DI),AX
+ MULQ 224(SP)
+ ADDQ AX,R14
+ ADCQ DX,R15
+ MOVQ 56(SP),AX
+ MULQ 240(SP)
+ ADDQ AX,R8
+ ADCQ DX,R9
+ MOVQ 56(SP),AX
+ MULQ 248(SP)
+ ADDQ AX,R10
+ ADCQ DX,R11
+ MOVQ 112(DI),AX
+ MULQ 216(SP)
+ ADDQ AX,R14
+ ADCQ DX,R15
+ MOVQ 64(SP),AX
+ MULQ 232(SP)
+ ADDQ AX,R8
+ ADCQ DX,R9
+ MOVQ 64(SP),AX
+ MULQ 240(SP)
+ ADDQ AX,R10
+ ADCQ DX,R11
+ MOVQ 64(SP),AX
+ MULQ 248(SP)
+ ADDQ AX,R12
+ ADCQ DX,R13
+ MOVQ ·REDMASK51(SB),DX
+ SHLQ $13,CX:SI
+ ANDQ DX,SI
+ SHLQ $13,R9:R8
+ ANDQ DX,R8
+ ADDQ CX,R8
+ SHLQ $13,R11:R10
+ ANDQ DX,R10
+ ADDQ R9,R10
+ SHLQ $13,R13:R12
+ ANDQ DX,R12
+ ADDQ R11,R12
+ SHLQ $13,R15:R14
+ ANDQ DX,R14
+ ADDQ R13,R14
+ IMUL3Q $19,R15,CX
+ ADDQ CX,SI
+ MOVQ SI,CX
+ SHRQ $51,CX
+ ADDQ R8,CX
+ MOVQ CX,R8
+ SHRQ $51,CX
+ ANDQ DX,SI
+ ADDQ R10,CX
+ MOVQ CX,R9
+ SHRQ $51,CX
+ ANDQ DX,R8
+ ADDQ R12,CX
+ MOVQ CX,AX
+ SHRQ $51,CX
+ ANDQ DX,R9
+ ADDQ R14,CX
+ MOVQ CX,R10
+ SHRQ $51,CX
+ ANDQ DX,AX
+ IMUL3Q $19,CX,CX
+ ADDQ CX,SI
+ ANDQ DX,R10
+ MOVQ SI,80(DI)
+ MOVQ R8,88(DI)
+ MOVQ R9,96(DI)
+ MOVQ AX,104(DI)
+ MOVQ R10,112(DI)
+ MOVQ 0(SP),R11
+ MOVQ 8(SP),R12
+ MOVQ 16(SP),R13
+ MOVQ 24(SP),R14
+ MOVQ 32(SP),R15
+ MOVQ 40(SP),BX
+ MOVQ 48(SP),BP
+ MOVQ R11,SP
+ MOVQ DI,AX
+ MOVQ SI,DX
+ RET
diff --git a/src/mongo/gotools/vendor/src/golang.org/x/crypto/curve25519/mont25519_amd64.go b/src/mongo/gotools/vendor/src/golang.org/x/crypto/curve25519/mont25519_amd64.go
new file mode 100644
index 00000000000..5822bd53383
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/golang.org/x/crypto/curve25519/mont25519_amd64.go
@@ -0,0 +1,240 @@
+// Copyright 2012 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// +build amd64,!gccgo,!appengine
+
+package curve25519
+
+// These functions are implemented in the .s files. The names of the functions
+// in the rest of the file are also taken from the SUPERCOP sources to help
+// people following along.
+
+//go:noescape
+
+func cswap(inout *[5]uint64, v uint64)
+
+//go:noescape
+
+func ladderstep(inout *[5][5]uint64)
+
+//go:noescape
+
+func freeze(inout *[5]uint64)
+
+//go:noescape
+
+func mul(dest, a, b *[5]uint64)
+
+//go:noescape
+
+func square(out, in *[5]uint64)
+
+// mladder uses a Montgomery ladder to calculate (xr/zr) *= s.
+func mladder(xr, zr *[5]uint64, s *[32]byte) {
+ var work [5][5]uint64
+
+ work[0] = *xr
+ setint(&work[1], 1)
+ setint(&work[2], 0)
+ work[3] = *xr
+ setint(&work[4], 1)
+
+ j := uint(6)
+ var prevbit byte
+
+ for i := 31; i >= 0; i-- {
+ for j < 8 {
+ bit := ((*s)[i] >> j) & 1
+ swap := bit ^ prevbit
+ prevbit = bit
+ cswap(&work[1], uint64(swap))
+ ladderstep(&work)
+ j--
+ }
+ j = 7
+ }
+
+ *xr = work[1]
+ *zr = work[2]
+}
+
+func scalarMult(out, in, base *[32]byte) {
+ var e [32]byte
+ copy(e[:], (*in)[:])
+ e[0] &= 248
+ e[31] &= 127
+ e[31] |= 64
+
+ var t, z [5]uint64
+ unpack(&t, base)
+ mladder(&t, &z, &e)
+ invert(&z, &z)
+ mul(&t, &t, &z)
+ pack(out, &t)
+}
+
+func setint(r *[5]uint64, v uint64) {
+ r[0] = v
+ r[1] = 0
+ r[2] = 0
+ r[3] = 0
+ r[4] = 0
+}
+
+// unpack sets r = x where r consists of 5, 51-bit limbs in little-endian
+// order.
+func unpack(r *[5]uint64, x *[32]byte) {
+ r[0] = uint64(x[0]) |
+ uint64(x[1])<<8 |
+ uint64(x[2])<<16 |
+ uint64(x[3])<<24 |
+ uint64(x[4])<<32 |
+ uint64(x[5])<<40 |
+ uint64(x[6]&7)<<48
+
+ r[1] = uint64(x[6])>>3 |
+ uint64(x[7])<<5 |
+ uint64(x[8])<<13 |
+ uint64(x[9])<<21 |
+ uint64(x[10])<<29 |
+ uint64(x[11])<<37 |
+ uint64(x[12]&63)<<45
+
+ r[2] = uint64(x[12])>>6 |
+ uint64(x[13])<<2 |
+ uint64(x[14])<<10 |
+ uint64(x[15])<<18 |
+ uint64(x[16])<<26 |
+ uint64(x[17])<<34 |
+ uint64(x[18])<<42 |
+ uint64(x[19]&1)<<50
+
+ r[3] = uint64(x[19])>>1 |
+ uint64(x[20])<<7 |
+ uint64(x[21])<<15 |
+ uint64(x[22])<<23 |
+ uint64(x[23])<<31 |
+ uint64(x[24])<<39 |
+ uint64(x[25]&15)<<47
+
+ r[4] = uint64(x[25])>>4 |
+ uint64(x[26])<<4 |
+ uint64(x[27])<<12 |
+ uint64(x[28])<<20 |
+ uint64(x[29])<<28 |
+ uint64(x[30])<<36 |
+ uint64(x[31]&127)<<44
+}
+
+// pack sets out = x where out is the usual, little-endian form of the 5,
+// 51-bit limbs in x.
+func pack(out *[32]byte, x *[5]uint64) {
+ t := *x
+ freeze(&t)
+
+ out[0] = byte(t[0])
+ out[1] = byte(t[0] >> 8)
+ out[2] = byte(t[0] >> 16)
+ out[3] = byte(t[0] >> 24)
+ out[4] = byte(t[0] >> 32)
+ out[5] = byte(t[0] >> 40)
+ out[6] = byte(t[0] >> 48)
+
+ out[6] ^= byte(t[1]<<3) & 0xf8
+ out[7] = byte(t[1] >> 5)
+ out[8] = byte(t[1] >> 13)
+ out[9] = byte(t[1] >> 21)
+ out[10] = byte(t[1] >> 29)
+ out[11] = byte(t[1] >> 37)
+ out[12] = byte(t[1] >> 45)
+
+ out[12] ^= byte(t[2]<<6) & 0xc0
+ out[13] = byte(t[2] >> 2)
+ out[14] = byte(t[2] >> 10)
+ out[15] = byte(t[2] >> 18)
+ out[16] = byte(t[2] >> 26)
+ out[17] = byte(t[2] >> 34)
+ out[18] = byte(t[2] >> 42)
+ out[19] = byte(t[2] >> 50)
+
+ out[19] ^= byte(t[3]<<1) & 0xfe
+ out[20] = byte(t[3] >> 7)
+ out[21] = byte(t[3] >> 15)
+ out[22] = byte(t[3] >> 23)
+ out[23] = byte(t[3] >> 31)
+ out[24] = byte(t[3] >> 39)
+ out[25] = byte(t[3] >> 47)
+
+ out[25] ^= byte(t[4]<<4) & 0xf0
+ out[26] = byte(t[4] >> 4)
+ out[27] = byte(t[4] >> 12)
+ out[28] = byte(t[4] >> 20)
+ out[29] = byte(t[4] >> 28)
+ out[30] = byte(t[4] >> 36)
+ out[31] = byte(t[4] >> 44)
+}
+
+// invert calculates r = x^-1 mod p using Fermat's little theorem.
+func invert(r *[5]uint64, x *[5]uint64) {
+ var z2, z9, z11, z2_5_0, z2_10_0, z2_20_0, z2_50_0, z2_100_0, t [5]uint64
+
+ square(&z2, x) /* 2 */
+ square(&t, &z2) /* 4 */
+ square(&t, &t) /* 8 */
+ mul(&z9, &t, x) /* 9 */
+ mul(&z11, &z9, &z2) /* 11 */
+ square(&t, &z11) /* 22 */
+ mul(&z2_5_0, &t, &z9) /* 2^5 - 2^0 = 31 */
+
+ square(&t, &z2_5_0) /* 2^6 - 2^1 */
+ for i := 1; i < 5; i++ { /* 2^20 - 2^10 */
+ square(&t, &t)
+ }
+ mul(&z2_10_0, &t, &z2_5_0) /* 2^10 - 2^0 */
+
+ square(&t, &z2_10_0) /* 2^11 - 2^1 */
+ for i := 1; i < 10; i++ { /* 2^20 - 2^10 */
+ square(&t, &t)
+ }
+ mul(&z2_20_0, &t, &z2_10_0) /* 2^20 - 2^0 */
+
+ square(&t, &z2_20_0) /* 2^21 - 2^1 */
+ for i := 1; i < 20; i++ { /* 2^40 - 2^20 */
+ square(&t, &t)
+ }
+ mul(&t, &t, &z2_20_0) /* 2^40 - 2^0 */
+
+ square(&t, &t) /* 2^41 - 2^1 */
+ for i := 1; i < 10; i++ { /* 2^50 - 2^10 */
+ square(&t, &t)
+ }
+ mul(&z2_50_0, &t, &z2_10_0) /* 2^50 - 2^0 */
+
+ square(&t, &z2_50_0) /* 2^51 - 2^1 */
+ for i := 1; i < 50; i++ { /* 2^100 - 2^50 */
+ square(&t, &t)
+ }
+ mul(&z2_100_0, &t, &z2_50_0) /* 2^100 - 2^0 */
+
+ square(&t, &z2_100_0) /* 2^101 - 2^1 */
+ for i := 1; i < 100; i++ { /* 2^200 - 2^100 */
+ square(&t, &t)
+ }
+ mul(&t, &t, &z2_100_0) /* 2^200 - 2^0 */
+
+ square(&t, &t) /* 2^201 - 2^1 */
+ for i := 1; i < 50; i++ { /* 2^250 - 2^50 */
+ square(&t, &t)
+ }
+ mul(&t, &t, &z2_50_0) /* 2^250 - 2^0 */
+
+ square(&t, &t) /* 2^251 - 2^1 */
+ square(&t, &t) /* 2^252 - 2^2 */
+ square(&t, &t) /* 2^253 - 2^3 */
+
+ square(&t, &t) /* 2^254 - 2^4 */
+
+ square(&t, &t) /* 2^255 - 2^5 */
+ mul(r, &t, &z11) /* 2^255 - 21 */
+}
diff --git a/src/mongo/gotools/vendor/src/golang.org/x/crypto/curve25519/mul_amd64.s b/src/mongo/gotools/vendor/src/golang.org/x/crypto/curve25519/mul_amd64.s
new file mode 100644
index 00000000000..e48d183ee56
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/golang.org/x/crypto/curve25519/mul_amd64.s
@@ -0,0 +1,191 @@
+// Copyright 2012 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// This code was translated into a form compatible with 6a from the public
+// domain sources in SUPERCOP: http://bench.cr.yp.to/supercop.html
+
+// +build amd64,!gccgo,!appengine
+
+// func mul(dest, a, b *[5]uint64)
+TEXT ·mul(SB),0,$128-24
+ MOVQ dest+0(FP), DI
+ MOVQ a+8(FP), SI
+ MOVQ b+16(FP), DX
+
+ MOVQ SP,R11
+ MOVQ $31,CX
+ NOTQ CX
+ ANDQ CX,SP
+ ADDQ $32,SP
+
+ MOVQ R11,0(SP)
+ MOVQ R12,8(SP)
+ MOVQ R13,16(SP)
+ MOVQ R14,24(SP)
+ MOVQ R15,32(SP)
+ MOVQ BX,40(SP)
+ MOVQ BP,48(SP)
+ MOVQ DI,56(SP)
+ MOVQ DX,CX
+ MOVQ 24(SI),DX
+ IMUL3Q $19,DX,AX
+ MOVQ AX,64(SP)
+ MULQ 16(CX)
+ MOVQ AX,R8
+ MOVQ DX,R9
+ MOVQ 32(SI),DX
+ IMUL3Q $19,DX,AX
+ MOVQ AX,72(SP)
+ MULQ 8(CX)
+ ADDQ AX,R8
+ ADCQ DX,R9
+ MOVQ 0(SI),AX
+ MULQ 0(CX)
+ ADDQ AX,R8
+ ADCQ DX,R9
+ MOVQ 0(SI),AX
+ MULQ 8(CX)
+ MOVQ AX,R10
+ MOVQ DX,R11
+ MOVQ 0(SI),AX
+ MULQ 16(CX)
+ MOVQ AX,R12
+ MOVQ DX,R13
+ MOVQ 0(SI),AX
+ MULQ 24(CX)
+ MOVQ AX,R14
+ MOVQ DX,R15
+ MOVQ 0(SI),AX
+ MULQ 32(CX)
+ MOVQ AX,BX
+ MOVQ DX,BP
+ MOVQ 8(SI),AX
+ MULQ 0(CX)
+ ADDQ AX,R10
+ ADCQ DX,R11
+ MOVQ 8(SI),AX
+ MULQ 8(CX)
+ ADDQ AX,R12
+ ADCQ DX,R13
+ MOVQ 8(SI),AX
+ MULQ 16(CX)
+ ADDQ AX,R14
+ ADCQ DX,R15
+ MOVQ 8(SI),AX
+ MULQ 24(CX)
+ ADDQ AX,BX
+ ADCQ DX,BP
+ MOVQ 8(SI),DX
+ IMUL3Q $19,DX,AX
+ MULQ 32(CX)
+ ADDQ AX,R8
+ ADCQ DX,R9
+ MOVQ 16(SI),AX
+ MULQ 0(CX)
+ ADDQ AX,R12
+ ADCQ DX,R13
+ MOVQ 16(SI),AX
+ MULQ 8(CX)
+ ADDQ AX,R14
+ ADCQ DX,R15
+ MOVQ 16(SI),AX
+ MULQ 16(CX)
+ ADDQ AX,BX
+ ADCQ DX,BP
+ MOVQ 16(SI),DX
+ IMUL3Q $19,DX,AX
+ MULQ 24(CX)
+ ADDQ AX,R8
+ ADCQ DX,R9
+ MOVQ 16(SI),DX
+ IMUL3Q $19,DX,AX
+ MULQ 32(CX)
+ ADDQ AX,R10
+ ADCQ DX,R11
+ MOVQ 24(SI),AX
+ MULQ 0(CX)
+ ADDQ AX,R14
+ ADCQ DX,R15
+ MOVQ 24(SI),AX
+ MULQ 8(CX)
+ ADDQ AX,BX
+ ADCQ DX,BP
+ MOVQ 64(SP),AX
+ MULQ 24(CX)
+ ADDQ AX,R10
+ ADCQ DX,R11
+ MOVQ 64(SP),AX
+ MULQ 32(CX)
+ ADDQ AX,R12
+ ADCQ DX,R13
+ MOVQ 32(SI),AX
+ MULQ 0(CX)
+ ADDQ AX,BX
+ ADCQ DX,BP
+ MOVQ 72(SP),AX
+ MULQ 16(CX)
+ ADDQ AX,R10
+ ADCQ DX,R11
+ MOVQ 72(SP),AX
+ MULQ 24(CX)
+ ADDQ AX,R12
+ ADCQ DX,R13
+ MOVQ 72(SP),AX
+ MULQ 32(CX)
+ ADDQ AX,R14
+ ADCQ DX,R15
+ MOVQ ·REDMASK51(SB),SI
+ SHLQ $13,R9:R8
+ ANDQ SI,R8
+ SHLQ $13,R11:R10
+ ANDQ SI,R10
+ ADDQ R9,R10
+ SHLQ $13,R13:R12
+ ANDQ SI,R12
+ ADDQ R11,R12
+ SHLQ $13,R15:R14
+ ANDQ SI,R14
+ ADDQ R13,R14
+ SHLQ $13,BP:BX
+ ANDQ SI,BX
+ ADDQ R15,BX
+ IMUL3Q $19,BP,DX
+ ADDQ DX,R8
+ MOVQ R8,DX
+ SHRQ $51,DX
+ ADDQ R10,DX
+ MOVQ DX,CX
+ SHRQ $51,DX
+ ANDQ SI,R8
+ ADDQ R12,DX
+ MOVQ DX,R9
+ SHRQ $51,DX
+ ANDQ SI,CX
+ ADDQ R14,DX
+ MOVQ DX,AX
+ SHRQ $51,DX
+ ANDQ SI,R9
+ ADDQ BX,DX
+ MOVQ DX,R10
+ SHRQ $51,DX
+ ANDQ SI,AX
+ IMUL3Q $19,DX,DX
+ ADDQ DX,R8
+ ANDQ SI,R10
+ MOVQ R8,0(DI)
+ MOVQ CX,8(DI)
+ MOVQ R9,16(DI)
+ MOVQ AX,24(DI)
+ MOVQ R10,32(DI)
+ MOVQ 0(SP),R11
+ MOVQ 8(SP),R12
+ MOVQ 16(SP),R13
+ MOVQ 24(SP),R14
+ MOVQ 32(SP),R15
+ MOVQ 40(SP),BX
+ MOVQ 48(SP),BP
+ MOVQ R11,SP
+ MOVQ DI,AX
+ MOVQ SI,DX
+ RET
diff --git a/src/mongo/gotools/vendor/src/golang.org/x/crypto/curve25519/square_amd64.s b/src/mongo/gotools/vendor/src/golang.org/x/crypto/curve25519/square_amd64.s
new file mode 100644
index 00000000000..78d1a50ddca
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/golang.org/x/crypto/curve25519/square_amd64.s
@@ -0,0 +1,153 @@
+// Copyright 2012 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// This code was translated into a form compatible with 6a from the public
+// domain sources in SUPERCOP: http://bench.cr.yp.to/supercop.html
+
+// +build amd64,!gccgo,!appengine
+
+// func square(out, in *[5]uint64)
+TEXT ·square(SB),7,$96-16
+ MOVQ out+0(FP), DI
+ MOVQ in+8(FP), SI
+
+ MOVQ SP,R11
+ MOVQ $31,CX
+ NOTQ CX
+ ANDQ CX,SP
+ ADDQ $32, SP
+
+ MOVQ R11,0(SP)
+ MOVQ R12,8(SP)
+ MOVQ R13,16(SP)
+ MOVQ R14,24(SP)
+ MOVQ R15,32(SP)
+ MOVQ BX,40(SP)
+ MOVQ BP,48(SP)
+ MOVQ 0(SI),AX
+ MULQ 0(SI)
+ MOVQ AX,CX
+ MOVQ DX,R8
+ MOVQ 0(SI),AX
+ SHLQ $1,AX
+ MULQ 8(SI)
+ MOVQ AX,R9
+ MOVQ DX,R10
+ MOVQ 0(SI),AX
+ SHLQ $1,AX
+ MULQ 16(SI)
+ MOVQ AX,R11
+ MOVQ DX,R12
+ MOVQ 0(SI),AX
+ SHLQ $1,AX
+ MULQ 24(SI)
+ MOVQ AX,R13
+ MOVQ DX,R14
+ MOVQ 0(SI),AX
+ SHLQ $1,AX
+ MULQ 32(SI)
+ MOVQ AX,R15
+ MOVQ DX,BX
+ MOVQ 8(SI),AX
+ MULQ 8(SI)
+ ADDQ AX,R11
+ ADCQ DX,R12
+ MOVQ 8(SI),AX
+ SHLQ $1,AX
+ MULQ 16(SI)
+ ADDQ AX,R13
+ ADCQ DX,R14
+ MOVQ 8(SI),AX
+ SHLQ $1,AX
+ MULQ 24(SI)
+ ADDQ AX,R15
+ ADCQ DX,BX
+ MOVQ 8(SI),DX
+ IMUL3Q $38,DX,AX
+ MULQ 32(SI)
+ ADDQ AX,CX
+ ADCQ DX,R8
+ MOVQ 16(SI),AX
+ MULQ 16(SI)
+ ADDQ AX,R15
+ ADCQ DX,BX
+ MOVQ 16(SI),DX
+ IMUL3Q $38,DX,AX
+ MULQ 24(SI)
+ ADDQ AX,CX
+ ADCQ DX,R8
+ MOVQ 16(SI),DX
+ IMUL3Q $38,DX,AX
+ MULQ 32(SI)
+ ADDQ AX,R9
+ ADCQ DX,R10
+ MOVQ 24(SI),DX
+ IMUL3Q $19,DX,AX
+ MULQ 24(SI)
+ ADDQ AX,R9
+ ADCQ DX,R10
+ MOVQ 24(SI),DX
+ IMUL3Q $38,DX,AX
+ MULQ 32(SI)
+ ADDQ AX,R11
+ ADCQ DX,R12
+ MOVQ 32(SI),DX
+ IMUL3Q $19,DX,AX
+ MULQ 32(SI)
+ ADDQ AX,R13
+ ADCQ DX,R14
+ MOVQ ·REDMASK51(SB),SI
+ SHLQ $13,R8:CX
+ ANDQ SI,CX
+ SHLQ $13,R10:R9
+ ANDQ SI,R9
+ ADDQ R8,R9
+ SHLQ $13,R12:R11
+ ANDQ SI,R11
+ ADDQ R10,R11
+ SHLQ $13,R14:R13
+ ANDQ SI,R13
+ ADDQ R12,R13
+ SHLQ $13,BX:R15
+ ANDQ SI,R15
+ ADDQ R14,R15
+ IMUL3Q $19,BX,DX
+ ADDQ DX,CX
+ MOVQ CX,DX
+ SHRQ $51,DX
+ ADDQ R9,DX
+ ANDQ SI,CX
+ MOVQ DX,R8
+ SHRQ $51,DX
+ ADDQ R11,DX
+ ANDQ SI,R8
+ MOVQ DX,R9
+ SHRQ $51,DX
+ ADDQ R13,DX
+ ANDQ SI,R9
+ MOVQ DX,AX
+ SHRQ $51,DX
+ ADDQ R15,DX
+ ANDQ SI,AX
+ MOVQ DX,R10
+ SHRQ $51,DX
+ IMUL3Q $19,DX,DX
+ ADDQ DX,CX
+ ANDQ SI,R10
+ MOVQ CX,0(DI)
+ MOVQ R8,8(DI)
+ MOVQ R9,16(DI)
+ MOVQ AX,24(DI)
+ MOVQ R10,32(DI)
+ MOVQ 0(SP),R11
+ MOVQ 8(SP),R12
+ MOVQ 16(SP),R13
+ MOVQ 24(SP),R14
+ MOVQ 32(SP),R15
+ MOVQ 40(SP),BX
+ MOVQ 48(SP),BP
+ MOVQ R11,SP
+ MOVQ DI,AX
+ MOVQ SI,DX
+ RET
diff --git a/src/mongo/gotools/vendor/src/golang.org/x/crypto/hkdf/example_test.go b/src/mongo/gotools/vendor/src/golang.org/x/crypto/hkdf/example_test.go
new file mode 100644
index 00000000000..df843951291
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/golang.org/x/crypto/hkdf/example_test.go
@@ -0,0 +1,61 @@
+// Copyright 2014 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package hkdf_test
+
+import (
+ "bytes"
+ "crypto/rand"
+ "crypto/sha256"
+ "fmt"
+ "golang.org/x/crypto/hkdf"
+ "io"
+)
+
+// Usage example that expands one master key into three other cryptographically
+// secure keys.
+func Example_usage() {
+ // Underlying hash function to use
+ hash := sha256.New
+
+ // Cryptographically secure master key.
+ master := []byte{0x00, 0x01, 0x02, 0x03} // i.e. NOT this.
+
+ // Non secret salt, optional (can be nil)
+ // Recommended: hash-length sized random
+ salt := make([]byte, hash().Size())
+ n, err := io.ReadFull(rand.Reader, salt)
+ if n != len(salt) || err != nil {
+ fmt.Println("error:", err)
+ return
+ }
+
+ // Non secret context specific info, optional (can be nil).
+ // Note, independent from the master key.
+ info := []byte{0x03, 0x14, 0x15, 0x92, 0x65}
+
+ // Create the key derivation function
+ hkdf := hkdf.New(hash, master, salt, info)
+
+ // Generate the required keys
+ keys := make([][]byte, 3)
+ for i := 0; i < len(keys); i++ {
+ keys[i] = make([]byte, 24)
+ n, err := io.ReadFull(hkdf, keys[i])
+ if n != len(keys[i]) || err != nil {
+ fmt.Println("error:", err)
+ return
+ }
+ }
+
+ // Keys should contain 192 bit random keys
+ for i := 1; i <= len(keys); i++ {
+ fmt.Printf("Key #%d: %v\n", i, !bytes.Equal(keys[i-1], make([]byte, 24)))
+ }
+
+ // Output:
+ // Key #1: true
+ // Key #2: true
+ // Key #3: true
+}
diff --git a/src/mongo/gotools/vendor/src/golang.org/x/crypto/hkdf/hkdf.go b/src/mongo/gotools/vendor/src/golang.org/x/crypto/hkdf/hkdf.go
new file mode 100644
index 00000000000..5bc246355a2
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/golang.org/x/crypto/hkdf/hkdf.go
@@ -0,0 +1,75 @@
+// Copyright 2014 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// Package hkdf implements the HMAC-based Extract-and-Expand Key Derivation
+// Function (HKDF) as defined in RFC 5869.
+//
+// HKDF is a cryptographic key derivation function (KDF) with the goal of
+// expanding limited input keying material into one or more cryptographically
+// strong secret keys.
+//
+// RFC 5869: https://tools.ietf.org/html/rfc5869
+package hkdf // import "golang.org/x/crypto/hkdf"
+
+import (
+ "crypto/hmac"
+ "errors"
+ "hash"
+ "io"
+)
+
+type hkdf struct {
+ expander hash.Hash
+ size int
+
+ info []byte
+ counter byte
+
+ prev []byte
+ cache []byte
+}
+
+func (f *hkdf) Read(p []byte) (int, error) {
+ // Check whether enough data can be generated
+ need := len(p)
+ remains := len(f.cache) + int(255-f.counter+1)*f.size
+ if remains < need {
+ return 0, errors.New("hkdf: entropy limit reached")
+ }
+ // Read from the cache, if enough data is present
+ n := copy(p, f.cache)
+ p = p[n:]
+
+ // Fill the buffer
+ for len(p) > 0 {
+ f.expander.Reset()
+ f.expander.Write(f.prev)
+ f.expander.Write(f.info)
+ f.expander.Write([]byte{f.counter})
+ f.prev = f.expander.Sum(f.prev[:0])
+ f.counter++
+
+ // Copy the new batch into p
+ f.cache = f.prev
+ n = copy(p, f.cache)
+ p = p[n:]
+ }
+ // Save leftovers for next run
+ f.cache = f.cache[n:]
+
+ return need, nil
+}
+
+// New returns a new HKDF using the given hash, the secret keying material to expand
+// and optional salt and info fields.
+func New(hash func() hash.Hash, secret, salt, info []byte) io.Reader {
+ if salt == nil {
+ salt = make([]byte, hash().Size())
+ }
+ extractor := hmac.New(hash, salt)
+ extractor.Write(secret)
+ prk := extractor.Sum(nil)
+
+ return &hkdf{hmac.New(hash, prk), extractor.Size(), info, 1, nil, nil}
+}
diff --git a/src/mongo/gotools/vendor/src/golang.org/x/crypto/hkdf/hkdf_test.go b/src/mongo/gotools/vendor/src/golang.org/x/crypto/hkdf/hkdf_test.go
new file mode 100644
index 00000000000..cee659bcdb9
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/golang.org/x/crypto/hkdf/hkdf_test.go
@@ -0,0 +1,370 @@
+// Copyright 2014 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+package hkdf
+
+import (
+ "bytes"
+ "crypto/md5"
+ "crypto/sha1"
+ "crypto/sha256"
+ "crypto/sha512"
+ "hash"
+ "io"
+ "testing"
+)
+
+type hkdfTest struct {
+ hash func() hash.Hash
+ master []byte
+ salt []byte
+ info []byte
+ out []byte
+}
+
+var hkdfTests = []hkdfTest{
+ // Tests from RFC 5869
+ {
+ sha256.New,
+ []byte{
+ 0x0b, 0x0b, 0x0b, 0x0b, 0x0b, 0x0b, 0x0b, 0x0b,
+ 0x0b, 0x0b, 0x0b, 0x0b, 0x0b, 0x0b, 0x0b, 0x0b,
+ 0x0b, 0x0b, 0x0b, 0x0b, 0x0b, 0x0b,
+ },
+ []byte{
+ 0x00, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07,
+ 0x08, 0x09, 0x0a, 0x0b, 0x0c,
+ },
+ []byte{
+ 0xf0, 0xf1, 0xf2, 0xf3, 0xf4, 0xf5, 0xf6, 0xf7,
+ 0xf8, 0xf9,
+ },
+ []byte{
+ 0x3c, 0xb2, 0x5f, 0x25, 0xfa, 0xac, 0xd5, 0x7a,
+ 0x90, 0x43, 0x4f, 0x64, 0xd0, 0x36, 0x2f, 0x2a,
+ 0x2d, 0x2d, 0x0a, 0x90, 0xcf, 0x1a, 0x5a, 0x4c,
+ 0x5d, 0xb0, 0x2d, 0x56, 0xec, 0xc4, 0xc5, 0xbf,
+ 0x34, 0x00, 0x72, 0x08, 0xd5, 0xb8, 0x87, 0x18,
+ 0x58, 0x65,
+ },
+ },
+ {
+ sha256.New,
+ []byte{
+ 0x00, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07,
+ 0x08, 0x09, 0x0a, 0x0b, 0x0c, 0x0d, 0x0e, 0x0f,
+ 0x10, 0x11, 0x12, 0x13, 0x14, 0x15, 0x16, 0x17,
+ 0x18, 0x19, 0x1a, 0x1b, 0x1c, 0x1d, 0x1e, 0x1f,
+ 0x20, 0x21, 0x22, 0x23, 0x24, 0x25, 0x26, 0x27,
+ 0x28, 0x29, 0x2a, 0x2b, 0x2c, 0x2d, 0x2e, 0x2f,
+ 0x30, 0x31, 0x32, 0x33, 0x34, 0x35, 0x36, 0x37,
+ 0x38, 0x39, 0x3a, 0x3b, 0x3c, 0x3d, 0x3e, 0x3f,
+ 0x40, 0x41, 0x42, 0x43, 0x44, 0x45, 0x46, 0x47,
+ 0x48, 0x49, 0x4a, 0x4b, 0x4c, 0x4d, 0x4e, 0x4f,
+ },
+ []byte{
+ 0x60, 0x61, 0x62, 0x63, 0x64, 0x65, 0x66, 0x67,
+ 0x68, 0x69, 0x6a, 0x6b, 0x6c, 0x6d, 0x6e, 0x6f,
+ 0x70, 0x71, 0x72, 0x73, 0x74, 0x75, 0x76, 0x77,
+ 0x78, 0x79, 0x7a, 0x7b, 0x7c, 0x7d, 0x7e, 0x7f,
+ 0x80, 0x81, 0x82, 0x83, 0x84, 0x85, 0x86, 0x87,
+ 0x88, 0x89, 0x8a, 0x8b, 0x8c, 0x8d, 0x8e, 0x8f,
+ 0x90, 0x91, 0x92, 0x93, 0x94, 0x95, 0x96, 0x97,
+ 0x98, 0x99, 0x9a, 0x9b, 0x9c, 0x9d, 0x9e, 0x9f,
+ 0xa0, 0xa1, 0xa2, 0xa3, 0xa4, 0xa5, 0xa6, 0xa7,
+ 0xa8, 0xa9, 0xaa, 0xab, 0xac, 0xad, 0xae, 0xaf,
+ },
+ []byte{
+ 0xb0, 0xb1, 0xb2, 0xb3, 0xb4, 0xb5, 0xb6, 0xb7,
+ 0xb8, 0xb9, 0xba, 0xbb, 0xbc, 0xbd, 0xbe, 0xbf,
+ 0xc0, 0xc1, 0xc2, 0xc3, 0xc4, 0xc5, 0xc6, 0xc7,
+ 0xc8, 0xc9, 0xca, 0xcb, 0xcc, 0xcd, 0xce, 0xcf,
+ 0xd0, 0xd1, 0xd2, 0xd3, 0xd4, 0xd5, 0xd6, 0xd7,
+ 0xd8, 0xd9, 0xda, 0xdb, 0xdc, 0xdd, 0xde, 0xdf,
+ 0xe0, 0xe1, 0xe2, 0xe3, 0xe4, 0xe5, 0xe6, 0xe7,
+ 0xe8, 0xe9, 0xea, 0xeb, 0xec, 0xed, 0xee, 0xef,
+ 0xf0, 0xf1, 0xf2, 0xf3, 0xf4, 0xf5, 0xf6, 0xf7,
+ 0xf8, 0xf9, 0xfa, 0xfb, 0xfc, 0xfd, 0xfe, 0xff,
+ },
+ []byte{
+ 0xb1, 0x1e, 0x39, 0x8d, 0xc8, 0x03, 0x27, 0xa1,
+ 0xc8, 0xe7, 0xf7, 0x8c, 0x59, 0x6a, 0x49, 0x34,
+ 0x4f, 0x01, 0x2e, 0xda, 0x2d, 0x4e, 0xfa, 0xd8,
+ 0xa0, 0x50, 0xcc, 0x4c, 0x19, 0xaf, 0xa9, 0x7c,
+ 0x59, 0x04, 0x5a, 0x99, 0xca, 0xc7, 0x82, 0x72,
+ 0x71, 0xcb, 0x41, 0xc6, 0x5e, 0x59, 0x0e, 0x09,
+ 0xda, 0x32, 0x75, 0x60, 0x0c, 0x2f, 0x09, 0xb8,
+ 0x36, 0x77, 0x93, 0xa9, 0xac, 0xa3, 0xdb, 0x71,
+ 0xcc, 0x30, 0xc5, 0x81, 0x79, 0xec, 0x3e, 0x87,
+ 0xc1, 0x4c, 0x01, 0xd5, 0xc1, 0xf3, 0x43, 0x4f,
+ 0x1d, 0x87,
+ },
+ },
+ {
+ sha256.New,
+ []byte{
+ 0x0b, 0x0b, 0x0b, 0x0b, 0x0b, 0x0b, 0x0b, 0x0b,
+ 0x0b, 0x0b, 0x0b, 0x0b, 0x0b, 0x0b, 0x0b, 0x0b,
+ 0x0b, 0x0b, 0x0b, 0x0b, 0x0b, 0x0b,
+ },
+ []byte{},
+ []byte{},
+ []byte{
+ 0x8d, 0xa4, 0xe7, 0x75, 0xa5, 0x63, 0xc1, 0x8f,
+ 0x71, 0x5f, 0x80, 0x2a, 0x06, 0x3c, 0x5a, 0x31,
+ 0xb8, 0xa1, 0x1f, 0x5c, 0x5e, 0xe1, 0x87, 0x9e,
+ 0xc3, 0x45, 0x4e, 0x5f, 0x3c, 0x73, 0x8d, 0x2d,
+ 0x9d, 0x20, 0x13, 0x95, 0xfa, 0xa4, 0xb6, 0x1a,
+ 0x96, 0xc8,
+ },
+ },
+ {
+ sha1.New,
+ []byte{
+ 0x0b, 0x0b, 0x0b, 0x0b, 0x0b, 0x0b, 0x0b, 0x0b,
+ 0x0b, 0x0b, 0x0b,
+ },
+ []byte{
+ 0x00, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07,
+ 0x08, 0x09, 0x0a, 0x0b, 0x0c,
+ },
+ []byte{
+ 0xf0, 0xf1, 0xf2, 0xf3, 0xf4, 0xf5, 0xf6, 0xf7,
+ 0xf8, 0xf9,
+ },
+ []byte{
+ 0x08, 0x5a, 0x01, 0xea, 0x1b, 0x10, 0xf3, 0x69,
+ 0x33, 0x06, 0x8b, 0x56, 0xef, 0xa5, 0xad, 0x81,
+ 0xa4, 0xf1, 0x4b, 0x82, 0x2f, 0x5b, 0x09, 0x15,
+ 0x68, 0xa9, 0xcd, 0xd4, 0xf1, 0x55, 0xfd, 0xa2,
+ 0xc2, 0x2e, 0x42, 0x24, 0x78, 0xd3, 0x05, 0xf3,
+ 0xf8, 0x96,
+ },
+ },
+ {
+ sha1.New,
+ []byte{
+ 0x00, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07,
+ 0x08, 0x09, 0x0a, 0x0b, 0x0c, 0x0d, 0x0e, 0x0f,
+ 0x10, 0x11, 0x12, 0x13, 0x14, 0x15, 0x16, 0x17,
+ 0x18, 0x19, 0x1a, 0x1b, 0x1c, 0x1d, 0x1e, 0x1f,
+ 0x20, 0x21, 0x22, 0x23, 0x24, 0x25, 0x26, 0x27,
+ 0x28, 0x29, 0x2a, 0x2b, 0x2c, 0x2d, 0x2e, 0x2f,
+ 0x30, 0x31, 0x32, 0x33, 0x34, 0x35, 0x36, 0x37,
+ 0x38, 0x39, 0x3a, 0x3b, 0x3c, 0x3d, 0x3e, 0x3f,
+ 0x40, 0x41, 0x42, 0x43, 0x44, 0x45, 0x46, 0x47,
+ 0x48, 0x49, 0x4a, 0x4b, 0x4c, 0x4d, 0x4e, 0x4f,
+ },
+ []byte{
+ 0x60, 0x61, 0x62, 0x63, 0x64, 0x65, 0x66, 0x67,
+ 0x68, 0x69, 0x6a, 0x6b, 0x6c, 0x6d, 0x6e, 0x6f,
+ 0x70, 0x71, 0x72, 0x73, 0x74, 0x75, 0x76, 0x77,
+ 0x78, 0x79, 0x7a, 0x7b, 0x7c, 0x7d, 0x7e, 0x7f,
+ 0x80, 0x81, 0x82, 0x83, 0x84, 0x85, 0x86, 0x87,
+ 0x88, 0x89, 0x8a, 0x8b, 0x8c, 0x8d, 0x8e, 0x8f,
+ 0x90, 0x91, 0x92, 0x93, 0x94, 0x95, 0x96, 0x97,
+ 0x98, 0x99, 0x9a, 0x9b, 0x9c, 0x9d, 0x9e, 0x9f,
+ 0xa0, 0xa1, 0xa2, 0xa3, 0xa4, 0xa5, 0xa6, 0xa7,
+ 0xa8, 0xa9, 0xaa, 0xab, 0xac, 0xad, 0xae, 0xaf,
+ },
+ []byte{
+ 0xb0, 0xb1, 0xb2, 0xb3, 0xb4, 0xb5, 0xb6, 0xb7,
+ 0xb8, 0xb9, 0xba, 0xbb, 0xbc, 0xbd, 0xbe, 0xbf,
+ 0xc0, 0xc1, 0xc2, 0xc3, 0xc4, 0xc5, 0xc6, 0xc7,
+ 0xc8, 0xc9, 0xca, 0xcb, 0xcc, 0xcd, 0xce, 0xcf,
+ 0xd0, 0xd1, 0xd2, 0xd3, 0xd4, 0xd5, 0xd6, 0xd7,
+ 0xd8, 0xd9, 0xda, 0xdb, 0xdc, 0xdd, 0xde, 0xdf,
+ 0xe0, 0xe1, 0xe2, 0xe3, 0xe4, 0xe5, 0xe6, 0xe7,
+ 0xe8, 0xe9, 0xea, 0xeb, 0xec, 0xed, 0xee, 0xef,
+ 0xf0, 0xf1, 0xf2, 0xf3, 0xf4, 0xf5, 0xf6, 0xf7,
+ 0xf8, 0xf9, 0xfa, 0xfb, 0xfc, 0xfd, 0xfe, 0xff,
+ },
+ []byte{
+ 0x0b, 0xd7, 0x70, 0xa7, 0x4d, 0x11, 0x60, 0xf7,
+ 0xc9, 0xf1, 0x2c, 0xd5, 0x91, 0x2a, 0x06, 0xeb,
+ 0xff, 0x6a, 0xdc, 0xae, 0x89, 0x9d, 0x92, 0x19,
+ 0x1f, 0xe4, 0x30, 0x56, 0x73, 0xba, 0x2f, 0xfe,
+ 0x8f, 0xa3, 0xf1, 0xa4, 0xe5, 0xad, 0x79, 0xf3,
+ 0xf3, 0x34, 0xb3, 0xb2, 0x02, 0xb2, 0x17, 0x3c,
+ 0x48, 0x6e, 0xa3, 0x7c, 0xe3, 0xd3, 0x97, 0xed,
+ 0x03, 0x4c, 0x7f, 0x9d, 0xfe, 0xb1, 0x5c, 0x5e,
+ 0x92, 0x73, 0x36, 0xd0, 0x44, 0x1f, 0x4c, 0x43,
+ 0x00, 0xe2, 0xcf, 0xf0, 0xd0, 0x90, 0x0b, 0x52,
+ 0xd3, 0xb4,
+ },
+ },
+ {
+ sha1.New,
+ []byte{
+ 0x0b, 0x0b, 0x0b, 0x0b, 0x0b, 0x0b, 0x0b, 0x0b,
+ 0x0b, 0x0b, 0x0b, 0x0b, 0x0b, 0x0b, 0x0b, 0x0b,
+ 0x0b, 0x0b, 0x0b, 0x0b, 0x0b, 0x0b,
+ },
+ []byte{},
+ []byte{},
+ []byte{
+ 0x0a, 0xc1, 0xaf, 0x70, 0x02, 0xb3, 0xd7, 0x61,
+ 0xd1, 0xe5, 0x52, 0x98, 0xda, 0x9d, 0x05, 0x06,
+ 0xb9, 0xae, 0x52, 0x05, 0x72, 0x20, 0xa3, 0x06,
+ 0xe0, 0x7b, 0x6b, 0x87, 0xe8, 0xdf, 0x21, 0xd0,
+ 0xea, 0x00, 0x03, 0x3d, 0xe0, 0x39, 0x84, 0xd3,
+ 0x49, 0x18,
+ },
+ },
+ {
+ sha1.New,
+ []byte{
+ 0x0c, 0x0c, 0x0c, 0x0c, 0x0c, 0x0c, 0x0c, 0x0c,
+ 0x0c, 0x0c, 0x0c, 0x0c, 0x0c, 0x0c, 0x0c, 0x0c,
+ 0x0c, 0x0c, 0x0c, 0x0c, 0x0c, 0x0c,
+ },
+ nil,
+ []byte{},
+ []byte{
+ 0x2c, 0x91, 0x11, 0x72, 0x04, 0xd7, 0x45, 0xf3,
+ 0x50, 0x0d, 0x63, 0x6a, 0x62, 0xf6, 0x4f, 0x0a,
+ 0xb3, 0xba, 0xe5, 0x48, 0xaa, 0x53, 0xd4, 0x23,
+ 0xb0, 0xd1, 0xf2, 0x7e, 0xbb, 0xa6, 0xf5, 0xe5,
+ 0x67, 0x3a, 0x08, 0x1d, 0x70, 0xcc, 0xe7, 0xac,
+ 0xfc, 0x48,
+ },
+ },
+}
+
+func TestHKDF(t *testing.T) {
+ for i, tt := range hkdfTests {
+ hkdf := New(tt.hash, tt.master, tt.salt, tt.info)
+ out := make([]byte, len(tt.out))
+
+ n, err := io.ReadFull(hkdf, out)
+ if n != len(tt.out) || err != nil {
+ t.Errorf("test %d: not enough output bytes: %d.", i, n)
+ }
+
+ if !bytes.Equal(out, tt.out) {
+ t.Errorf("test %d: incorrect output: have %v, need %v.", i, out, tt.out)
+ }
+ }
+}
+
+func TestHKDFMultiRead(t *testing.T) {
+ for i, tt := range hkdfTests {
+ hkdf := New(tt.hash, tt.master, tt.salt, tt.info)
+ out := make([]byte, len(tt.out))
+
+ for b := 0; b < len(tt.out); b++ {
+ n, err := io.ReadFull(hkdf, out[b:b+1])
+ if n != 1 || err != nil {
+ t.Errorf("test %d.%d: not enough output bytes: have %d, need %d .", i, b, n, len(tt.out))
+ }
+ }
+
+ if !bytes.Equal(out, tt.out) {
+ t.Errorf("test %d: incorrect output: have %v, need %v.", i, out, tt.out)
+ }
+ }
+}
+
+func TestHKDFLimit(t *testing.T) {
+ hash := sha1.New
+ master := []byte{0x00, 0x01, 0x02, 0x03}
+ info := []byte{}
+
+ hkdf := New(hash, master, nil, info)
+ limit := hash().Size() * 255
+ out := make([]byte, limit)
+
+ // The maximum output bytes should be extractable
+ n, err := io.ReadFull(hkdf, out)
+ if n != limit || err != nil {
+ t.Errorf("not enough output bytes: %d, %v.", n, err)
+ }
+
+ // Reading one more should fail
+ n, err = io.ReadFull(hkdf, make([]byte, 1))
+ if n > 0 || err == nil {
+ t.Errorf("key expansion overflowed: n = %d, err = %v", n, err)
+ }
+}
+
+func Benchmark16ByteMD5Single(b *testing.B) {
+ benchmarkHKDFSingle(md5.New, 16, b)
+}
+
+func Benchmark20ByteSHA1Single(b *testing.B) {
+ benchmarkHKDFSingle(sha1.New, 20, b)
+}
+
+func Benchmark32ByteSHA256Single(b *testing.B) {
+ benchmarkHKDFSingle(sha256.New, 32, b)
+}
+
+func Benchmark64ByteSHA512Single(b *testing.B) {
+ benchmarkHKDFSingle(sha512.New, 64, b)
+}
+
+func Benchmark8ByteMD5Stream(b *testing.B) {
+ benchmarkHKDFStream(md5.New, 8, b)
+}
+
+func Benchmark16ByteMD5Stream(b *testing.B) {
+ benchmarkHKDFStream(md5.New, 16, b)
+}
+
+func Benchmark8ByteSHA1Stream(b *testing.B) {
+ benchmarkHKDFStream(sha1.New, 8, b)
+}
+
+func Benchmark20ByteSHA1Stream(b *testing.B) {
+ benchmarkHKDFStream(sha1.New, 20, b)
+}
+
+func Benchmark8ByteSHA256Stream(b *testing.B) {
+ benchmarkHKDFStream(sha256.New, 8, b)
+}
+
+func Benchmark32ByteSHA256Stream(b *testing.B) {
+ benchmarkHKDFStream(sha256.New, 32, b)
+}
+
+func Benchmark8ByteSHA512Stream(b *testing.B) {
+ benchmarkHKDFStream(sha512.New, 8, b)
+}
+
+func Benchmark64ByteSHA512Stream(b *testing.B) {
+ benchmarkHKDFStream(sha512.New, 64, b)
+}
+
+func benchmarkHKDFSingle(hasher func() hash.Hash, block int, b *testing.B) {
+ master := []byte{0x00, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07}
+ salt := []byte{0x10, 0x11, 0x12, 0x13, 0x14, 0x15, 0x16, 0x17}
+ info := []byte{0x20, 0x21, 0x22, 0x23, 0x24, 0x25, 0x26, 0x27}
+ out := make([]byte, block)
+
+ b.SetBytes(int64(block))
+ b.ResetTimer()
+
+ for i := 0; i < b.N; i++ {
+ hkdf := New(hasher, master, salt, info)
+ io.ReadFull(hkdf, out)
+ }
+}
+
+func benchmarkHKDFStream(hasher func() hash.Hash, block int, b *testing.B) {
+ master := []byte{0x00, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07}
+ salt := []byte{0x10, 0x11, 0x12, 0x13, 0x14, 0x15, 0x16, 0x17}
+ info := []byte{0x20, 0x21, 0x22, 0x23, 0x24, 0x25, 0x26, 0x27}
+ out := make([]byte, block)
+
+ b.SetBytes(int64(block))
+ b.ResetTimer()
+
+ hkdf := New(hasher, master, salt, info)
+ for i := 0; i < b.N; i++ {
+ _, err := io.ReadFull(hkdf, out)
+ if err != nil {
+ hkdf = New(hasher, master, salt, info)
+ i--
+ }
+ }
+}
diff --git a/src/mongo/gotools/vendor/src/golang.org/x/crypto/md4/md4.go b/src/mongo/gotools/vendor/src/golang.org/x/crypto/md4/md4.go
new file mode 100644
index 00000000000..6d9ba9e5f33
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/golang.org/x/crypto/md4/md4.go
@@ -0,0 +1,118 @@
+// Copyright 2009 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// Package md4 implements the MD4 hash algorithm as defined in RFC 1320.
+package md4 // import "golang.org/x/crypto/md4"
+
+import (
+ "crypto"
+ "hash"
+)
+
+func init() {
+ crypto.RegisterHash(crypto.MD4, New)
+}
+
+// The size of an MD4 checksum in bytes.
+const Size = 16
+
+// The blocksize of MD4 in bytes.
+const BlockSize = 64
+
+const (
+ _Chunk = 64
+ _Init0 = 0x67452301
+ _Init1 = 0xEFCDAB89
+ _Init2 = 0x98BADCFE
+ _Init3 = 0x10325476
+)
+
+// digest represents the partial evaluation of a checksum.
+type digest struct {
+ s [4]uint32
+ x [_Chunk]byte
+ nx int
+ len uint64
+}
+
+func (d *digest) Reset() {
+ d.s[0] = _Init0
+ d.s[1] = _Init1
+ d.s[2] = _Init2
+ d.s[3] = _Init3
+ d.nx = 0
+ d.len = 0
+}
+
+// New returns a new hash.Hash computing the MD4 checksum.
+func New() hash.Hash {
+ d := new(digest)
+ d.Reset()
+ return d
+}
+
+func (d *digest) Size() int { return Size }
+
+func (d *digest) BlockSize() int { return BlockSize }
+
+func (d *digest) Write(p []byte) (nn int, err error) {
+ nn = len(p)
+ d.len += uint64(nn)
+ if d.nx > 0 {
+ n := len(p)
+ if n > _Chunk-d.nx {
+ n = _Chunk - d.nx
+ }
+ for i := 0; i < n; i++ {
+ d.x[d.nx+i] = p[i]
+ }
+ d.nx += n
+ if d.nx == _Chunk {
+ _Block(d, d.x[0:])
+ d.nx = 0
+ }
+ p = p[n:]
+ }
+ n := _Block(d, p)
+ p = p[n:]
+ if len(p) > 0 {
+ d.nx = copy(d.x[:], p)
+ }
+ return
+}
+
+func (d0 *digest) Sum(in []byte) []byte {
+ // Make a copy of d0, so that caller can keep writing and summing.
+ d := new(digest)
+ *d = *d0
+
+ // Padding. Add a 1 bit and 0 bits until 56 bytes mod 64.
+ len := d.len
+ var tmp [64]byte
+ tmp[0] = 0x80
+ if len%64 < 56 {
+ d.Write(tmp[0 : 56-len%64])
+ } else {
+ d.Write(tmp[0 : 64+56-len%64])
+ }
+
+ // Length in bits.
+ len <<= 3
+ for i := uint(0); i < 8; i++ {
+ tmp[i] = byte(len >> (8 * i))
+ }
+ d.Write(tmp[0:8])
+
+ if d.nx != 0 {
+ panic("d.nx != 0")
+ }
+
+ for _, s := range d.s {
+ in = append(in, byte(s>>0))
+ in = append(in, byte(s>>8))
+ in = append(in, byte(s>>16))
+ in = append(in, byte(s>>24))
+ }
+ return in
+}
diff --git a/src/mongo/gotools/vendor/src/golang.org/x/crypto/md4/md4_test.go b/src/mongo/gotools/vendor/src/golang.org/x/crypto/md4/md4_test.go
new file mode 100644
index 00000000000..b56edd7875d
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/golang.org/x/crypto/md4/md4_test.go
@@ -0,0 +1,71 @@
+// Copyright 2009 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package md4
+
+import (
+ "fmt"
+ "io"
+ "testing"
+)
+
+type md4Test struct {
+ out string
+ in string
+}
+
+var golden = []md4Test{
+ {"31d6cfe0d16ae931b73c59d7e0c089c0", ""},
+ {"bde52cb31de33e46245e05fbdbd6fb24", "a"},
+ {"ec388dd78999dfc7cf4632465693b6bf", "ab"},
+ {"a448017aaf21d8525fc10ae87aa6729d", "abc"},
+ {"41decd8f579255c5200f86a4bb3ba740", "abcd"},
+ {"9803f4a34e8eb14f96adba49064a0c41", "abcde"},
+ {"804e7f1c2586e50b49ac65db5b645131", "abcdef"},
+ {"752f4adfe53d1da0241b5bc216d098fc", "abcdefg"},
+ {"ad9daf8d49d81988590a6f0e745d15dd", "abcdefgh"},
+ {"1e4e28b05464316b56402b3815ed2dfd", "abcdefghi"},
+ {"dc959c6f5d6f9e04e4380777cc964b3d", "abcdefghij"},
+ {"1b5701e265778898ef7de5623bbe7cc0", "Discard medicine more than two years old."},
+ {"d7f087e090fe7ad4a01cb59dacc9a572", "He who has a shady past knows that nice guys finish last."},
+ {"a6f8fd6df617c72837592fc3570595c9", "I wouldn't marry him with a ten foot pole."},
+ {"c92a84a9526da8abc240c05d6b1a1ce0", "Free! Free!/A trip/to Mars/for 900/empty jars/Burma Shave"},
+ {"f6013160c4dcb00847069fee3bb09803", "The days of the digital watch are numbered. -Tom Stoppard"},
+ {"2c3bb64f50b9107ed57640fe94bec09f", "Nepal premier won't resign."},
+ {"45b7d8a32c7806f2f7f897332774d6e4", "For every action there is an equal and opposite government program."},
+ {"b5b4f9026b175c62d7654bdc3a1cd438", "His money is twice tainted: 'taint yours and 'taint mine."},
+ {"caf44e80f2c20ce19b5ba1cab766e7bd", "There is no reason for any individual to have a computer in their home. -Ken Olsen, 1977"},
+ {"191fae6707f496aa54a6bce9f2ecf74d", "It's a tiny change to the code and not completely disgusting. - Bob Manchek"},
+ {"9ddc753e7a4ccee6081cd1b45b23a834", "size: a.out: bad magic"},
+ {"8d050f55b1cadb9323474564be08a521", "The major problem is with sendmail. -Mark Horton"},
+ {"ad6e2587f74c3e3cc19146f6127fa2e3", "Give me a rock, paper and scissors and I will move the world. CCFestoon"},
+ {"1d616d60a5fabe85589c3f1566ca7fca", "If the enemy is within range, then so are you."},
+ {"aec3326a4f496a2ced65a1963f84577f", "It's well we cannot hear the screams/That we create in others' dreams."},
+ {"77b4fd762d6b9245e61c50bf6ebf118b", "You remind me of a TV show, but that's all right: I watch it anyway."},
+ {"e8f48c726bae5e516f6ddb1a4fe62438", "C is as portable as Stonehedge!!"},
+ {"a3a84366e7219e887423b01f9be7166e", "Even if I could be Shakespeare, I think I should still choose to be Faraday. - A. Huxley"},
+ {"a6b7aa35157e984ef5d9b7f32e5fbb52", "The fugacity of a constituent in a mixture of gases at a given temperature is proportional to its mole fraction. Lewis-Randall Rule"},
+ {"75661f0545955f8f9abeeb17845f3fd6", "How can you write a big system without C++? -Paul Glick"},
+}
+
+func TestGolden(t *testing.T) {
+ for i := 0; i < len(golden); i++ {
+ g := golden[i]
+ c := New()
+ for j := 0; j < 3; j++ {
+ if j < 2 {
+ io.WriteString(c, g.in)
+ } else {
+ io.WriteString(c, g.in[0:len(g.in)/2])
+ c.Sum(nil)
+ io.WriteString(c, g.in[len(g.in)/2:])
+ }
+ s := fmt.Sprintf("%x", c.Sum(nil))
+ if s != g.out {
+ t.Fatalf("md4[%d](%s) = %s want %s", j, g.in, s, g.out)
+ }
+ c.Reset()
+ }
+ }
+}
diff --git a/src/mongo/gotools/vendor/src/golang.org/x/crypto/md4/md4block.go b/src/mongo/gotools/vendor/src/golang.org/x/crypto/md4/md4block.go
new file mode 100644
index 00000000000..3fed475f3f6
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/golang.org/x/crypto/md4/md4block.go
@@ -0,0 +1,89 @@
+// Copyright 2009 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// MD4 block step.
+// In its own file so that a faster assembly or C version
+// can be substituted easily.
+
+package md4
+
+var shift1 = []uint{3, 7, 11, 19}
+var shift2 = []uint{3, 5, 9, 13}
+var shift3 = []uint{3, 9, 11, 15}
+
+var xIndex2 = []uint{0, 4, 8, 12, 1, 5, 9, 13, 2, 6, 10, 14, 3, 7, 11, 15}
+var xIndex3 = []uint{0, 8, 4, 12, 2, 10, 6, 14, 1, 9, 5, 13, 3, 11, 7, 15}
+
+func _Block(dig *digest, p []byte) int {
+ a := dig.s[0]
+ b := dig.s[1]
+ c := dig.s[2]
+ d := dig.s[3]
+ n := 0
+ var X [16]uint32
+ for len(p) >= _Chunk {
+ aa, bb, cc, dd := a, b, c, d
+
+ j := 0
+ for i := 0; i < 16; i++ {
+ X[i] = uint32(p[j]) | uint32(p[j+1])<<8 | uint32(p[j+2])<<16 | uint32(p[j+3])<<24
+ j += 4
+ }
+
+ // If this needs to be made faster in the future,
+ // the usual trick is to unroll each of these
+ // loops by a factor of 4; that lets you replace
+ // the shift[] lookups with constants and,
+ // with suitable variable renaming in each
+ // unrolled body, delete the a, b, c, d = d, a, b, c
+ // (or you can let the optimizer do the renaming).
+ //
+ // The index variables are uint so that % by a power
+ // of two can be optimized easily by a compiler.
+
+ // Round 1.
+ for i := uint(0); i < 16; i++ {
+ x := i
+ s := shift1[i%4]
+ f := ((c ^ d) & b) ^ d
+ a += f + X[x]
+ a = a<<s | a>>(32-s)
+ a, b, c, d = d, a, b, c
+ }
+
+ // Round 2.
+ for i := uint(0); i < 16; i++ {
+ x := xIndex2[i]
+ s := shift2[i%4]
+ g := (b & c) | (b & d) | (c & d)
+ a += g + X[x] + 0x5a827999
+ a = a<<s | a>>(32-s)
+ a, b, c, d = d, a, b, c
+ }
+
+ // Round 3.
+ for i := uint(0); i < 16; i++ {
+ x := xIndex3[i]
+ s := shift3[i%4]
+ h := b ^ c ^ d
+ a += h + X[x] + 0x6ed9eba1
+ a = a<<s | a>>(32-s)
+ a, b, c, d = d, a, b, c
+ }
+
+ a += aa
+ b += bb
+ c += cc
+ d += dd
+
+ p = p[_Chunk:]
+ n += _Chunk
+ }
+
+ dig.s[0] = a
+ dig.s[1] = b
+ dig.s[2] = c
+ dig.s[3] = d
+ return n
+}
diff --git a/src/mongo/gotools/vendor/src/golang.org/x/crypto/nacl/box/box.go b/src/mongo/gotools/vendor/src/golang.org/x/crypto/nacl/box/box.go
new file mode 100644
index 00000000000..ca48a6dbff9
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/golang.org/x/crypto/nacl/box/box.go
@@ -0,0 +1,85 @@
+// Copyright 2012 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+/*
+Package box authenticates and encrypts messages using public-key cryptography.
+
+Box uses Curve25519, XSalsa20 and Poly1305 to encrypt and authenticate
+messages. The length of messages is not hidden.
+
+It is the caller's responsibility to ensure the uniqueness of nonces—for
+example, by using nonce 1 for the first message, nonce 2 for the second
+message, etc. Nonces are long enough that randomly generated nonces have
+negligible risk of collision.
+
+This package is interoperable with NaCl: http://nacl.cr.yp.to/box.html.
+*/
+package box // import "golang.org/x/crypto/nacl/box"
+
+import (
+ "golang.org/x/crypto/curve25519"
+ "golang.org/x/crypto/nacl/secretbox"
+ "golang.org/x/crypto/salsa20/salsa"
+ "io"
+)
+
+// Overhead is the number of bytes of overhead when boxing a message.
+const Overhead = secretbox.Overhead
+
+// GenerateKey generates a new public/private key pair suitable for use with
+// Seal and Open.
+func GenerateKey(rand io.Reader) (publicKey, privateKey *[32]byte, err error) {
+ publicKey = new([32]byte)
+ privateKey = new([32]byte)
+ _, err = io.ReadFull(rand, privateKey[:])
+ if err != nil {
+ publicKey = nil
+ privateKey = nil
+ return
+ }
+
+ curve25519.ScalarBaseMult(publicKey, privateKey)
+ return
+}
+
+var zeros [16]byte
+
+// Precompute calculates the shared key between peersPublicKey and privateKey
+// and writes it to sharedKey. The shared key can be used with
+// OpenAfterPrecomputation and SealAfterPrecomputation to speed up processing
+// when using the same pair of keys repeatedly.
+func Precompute(sharedKey, peersPublicKey, privateKey *[32]byte) {
+ curve25519.ScalarMult(sharedKey, privateKey, peersPublicKey)
+ salsa.HSalsa20(sharedKey, &zeros, sharedKey, &salsa.Sigma)
+}
+
+// Seal appends an encrypted and authenticated copy of message to out, which
+// will be Overhead bytes longer than the original and must not overlap. The
+// nonce must be unique for each distinct message for a given pair of keys.
+func Seal(out, message []byte, nonce *[24]byte, peersPublicKey, privateKey *[32]byte) []byte {
+ var sharedKey [32]byte
+ Precompute(&sharedKey, peersPublicKey, privateKey)
+ return secretbox.Seal(out, message, nonce, &sharedKey)
+}
+
+// SealAfterPrecomputation performs the same actions as Seal, but takes a
+// shared key as generated by Precompute.
+func SealAfterPrecomputation(out, message []byte, nonce *[24]byte, sharedKey *[32]byte) []byte {
+ return secretbox.Seal(out, message, nonce, sharedKey)
+}
+
+// Open authenticates and decrypts a box produced by Seal and appends the
+// message to out, which must not overlap box. The output will be Overhead
+// bytes smaller than box.
+func Open(out, box []byte, nonce *[24]byte, peersPublicKey, privateKey *[32]byte) ([]byte, bool) {
+ var sharedKey [32]byte
+ Precompute(&sharedKey, peersPublicKey, privateKey)
+ return secretbox.Open(out, box, nonce, &sharedKey)
+}
+
+// OpenAfterPrecomputation performs the same actions as Open, but takes a
+// shared key as generated by Precompute.
+func OpenAfterPrecomputation(out, box []byte, nonce *[24]byte, sharedKey *[32]byte) ([]byte, bool) {
+ return secretbox.Open(out, box, nonce, sharedKey)
+}
diff --git a/src/mongo/gotools/vendor/src/golang.org/x/crypto/nacl/box/box_test.go b/src/mongo/gotools/vendor/src/golang.org/x/crypto/nacl/box/box_test.go
new file mode 100644
index 00000000000..481ade28aec
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/golang.org/x/crypto/nacl/box/box_test.go
@@ -0,0 +1,78 @@
+// Copyright 2012 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package box
+
+import (
+ "bytes"
+ "crypto/rand"
+ "encoding/hex"
+ "testing"
+
+ "golang.org/x/crypto/curve25519"
+)
+
+func TestSealOpen(t *testing.T) {
+ publicKey1, privateKey1, _ := GenerateKey(rand.Reader)
+ publicKey2, privateKey2, _ := GenerateKey(rand.Reader)
+
+ if *privateKey1 == *privateKey2 {
+ t.Fatalf("private keys are equal!")
+ }
+ if *publicKey1 == *publicKey2 {
+ t.Fatalf("public keys are equal!")
+ }
+ message := []byte("test message")
+ var nonce [24]byte
+
+ box := Seal(nil, message, &nonce, publicKey1, privateKey2)
+ opened, ok := Open(nil, box, &nonce, publicKey2, privateKey1)
+ if !ok {
+ t.Fatalf("failed to open box")
+ }
+
+ if !bytes.Equal(opened, message) {
+ t.Fatalf("got %x, want %x", opened, message)
+ }
+
+ for i := range box {
+ box[i] ^= 0x40
+ _, ok := Open(nil, box, &nonce, publicKey2, privateKey1)
+ if ok {
+ t.Fatalf("opened box with byte %d corrupted", i)
+ }
+ box[i] ^= 0x40
+ }
+}
+
+func TestBox(t *testing.T) {
+ var privateKey1, privateKey2 [32]byte
+ for i := range privateKey1[:] {
+ privateKey1[i] = 1
+ }
+ for i := range privateKey2[:] {
+ privateKey2[i] = 2
+ }
+
+ var publicKey1 [32]byte
+ curve25519.ScalarBaseMult(&publicKey1, &privateKey1)
+ var message [64]byte
+ for i := range message[:] {
+ message[i] = 3
+ }
+
+ var nonce [24]byte
+ for i := range nonce[:] {
+ nonce[i] = 4
+ }
+
+ box := Seal(nil, message[:], &nonce, &publicKey1, &privateKey2)
+
+ // expected was generated using the C implementation of NaCl.
+ expected, _ := hex.DecodeString("78ea30b19d2341ebbdba54180f821eec265cf86312549bea8a37652a8bb94f07b78a73ed1708085e6ddd0e943bbdeb8755079a37eb31d86163ce241164a47629c0539f330b4914cd135b3855bc2a2dfc")
+
+ if !bytes.Equal(box, expected) {
+ t.Fatalf("box didn't match, got\n%x\n, expected\n%x", box, expected)
+ }
+}
diff --git a/src/mongo/gotools/vendor/src/golang.org/x/crypto/nacl/secretbox/secretbox.go b/src/mongo/gotools/vendor/src/golang.org/x/crypto/nacl/secretbox/secretbox.go
new file mode 100644
index 00000000000..dbf31bbf40f
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/golang.org/x/crypto/nacl/secretbox/secretbox.go
@@ -0,0 +1,149 @@
+// Copyright 2012 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+/*
+Package secretbox encrypts and authenticates small messages.
+
+Secretbox uses XSalsa20 and Poly1305 to encrypt and authenticate messages with
+secret-key cryptography. The length of messages is not hidden.
+
+It is the caller's responsibility to ensure the uniqueness of nonces—for
+example, by using nonce 1 for the first message, nonce 2 for the second
+message, etc. Nonces are long enough that randomly generated nonces have
+negligible risk of collision.
+
+This package is interoperable with NaCl: http://nacl.cr.yp.to/secretbox.html.
+*/
+package secretbox // import "golang.org/x/crypto/nacl/secretbox"
+
+import (
+ "golang.org/x/crypto/poly1305"
+ "golang.org/x/crypto/salsa20/salsa"
+)
+
+// Overhead is the number of bytes of overhead when boxing a message.
+const Overhead = poly1305.TagSize
+
+// setup produces a sub-key and Salsa20 counter given a nonce and key.
+func setup(subKey *[32]byte, counter *[16]byte, nonce *[24]byte, key *[32]byte) {
+ // We use XSalsa20 for encryption so first we need to generate a
+ // key and nonce with HSalsa20.
+ var hNonce [16]byte
+ copy(hNonce[:], nonce[:])
+ salsa.HSalsa20(subKey, &hNonce, key, &salsa.Sigma)
+
+ // The final 8 bytes of the original nonce form the new nonce.
+ copy(counter[:], nonce[16:])
+}
+
+// sliceForAppend takes a slice and a requested number of bytes. It returns a
+// slice with the contents of the given slice followed by that many bytes and a
+// second slice that aliases into it and contains only the extra bytes. If the
+// original slice has sufficient capacity then no allocation is performed.
+func sliceForAppend(in []byte, n int) (head, tail []byte) {
+ if total := len(in) + n; cap(in) >= total {
+ head = in[:total]
+ } else {
+ head = make([]byte, total)
+ copy(head, in)
+ }
+ tail = head[len(in):]
+ return
+}
+
+// Seal appends an encrypted and authenticated copy of message to out, which
+// must not overlap message. The key and nonce pair must be unique for each
+// distinct message and the output will be Overhead bytes longer than message.
+func Seal(out, message []byte, nonce *[24]byte, key *[32]byte) []byte {
+ var subKey [32]byte
+ var counter [16]byte
+ setup(&subKey, &counter, nonce, key)
+
+ // The Poly1305 key is generated by encrypting 32 bytes of zeros. Since
+ // Salsa20 works with 64-byte blocks, we also generate 32 bytes of
+ // keystream as a side effect.
+ var firstBlock [64]byte
+ salsa.XORKeyStream(firstBlock[:], firstBlock[:], &counter, &subKey)
+
+ var poly1305Key [32]byte
+ copy(poly1305Key[:], firstBlock[:])
+
+ ret, out := sliceForAppend(out, len(message)+poly1305.TagSize)
+
+ // We XOR up to 32 bytes of message with the keystream generated from
+ // the first block.
+ firstMessageBlock := message
+ if len(firstMessageBlock) > 32 {
+ firstMessageBlock = firstMessageBlock[:32]
+ }
+
+ tagOut := out
+ out = out[poly1305.TagSize:]
+ for i, x := range firstMessageBlock {
+ out[i] = firstBlock[32+i] ^ x
+ }
+ message = message[len(firstMessageBlock):]
+ ciphertext := out
+ out = out[len(firstMessageBlock):]
+
+ // Now encrypt the rest.
+ counter[8] = 1
+ salsa.XORKeyStream(out, message, &counter, &subKey)
+
+ var tag [poly1305.TagSize]byte
+ poly1305.Sum(&tag, ciphertext, &poly1305Key)
+ copy(tagOut, tag[:])
+
+ return ret
+}
+
+// Open authenticates and decrypts a box produced by Seal and appends the
+// message to out, which must not overlap box. The output will be Overhead
+// bytes smaller than box.
+func Open(out []byte, box []byte, nonce *[24]byte, key *[32]byte) ([]byte, bool) {
+ if len(box) < Overhead {
+ return nil, false
+ }
+
+ var subKey [32]byte
+ var counter [16]byte
+ setup(&subKey, &counter, nonce, key)
+
+ // The Poly1305 key is generated by encrypting 32 bytes of zeros. Since
+ // Salsa20 works with 64-byte blocks, we also generate 32 bytes of
+ // keystream as a side effect.
+ var firstBlock [64]byte
+ salsa.XORKeyStream(firstBlock[:], firstBlock[:], &counter, &subKey)
+
+ var poly1305Key [32]byte
+ copy(poly1305Key[:], firstBlock[:])
+ var tag [poly1305.TagSize]byte
+ copy(tag[:], box)
+
+ if !poly1305.Verify(&tag, box[poly1305.TagSize:], &poly1305Key) {
+ return nil, false
+ }
+
+ ret, out := sliceForAppend(out, len(box)-Overhead)
+
+ // We XOR up to 32 bytes of box with the keystream generated from
+ // the first block.
+ box = box[Overhead:]
+ firstMessageBlock := box
+ if len(firstMessageBlock) > 32 {
+ firstMessageBlock = firstMessageBlock[:32]
+ }
+ for i, x := range firstMessageBlock {
+ out[i] = firstBlock[32+i] ^ x
+ }
+
+ box = box[len(firstMessageBlock):]
+ out = out[len(firstMessageBlock):]
+
+ // Now decrypt the rest.
+ counter[8] = 1
+ salsa.XORKeyStream(out, box, &counter, &subKey)
+
+ return ret, true
+}
diff --git a/src/mongo/gotools/vendor/src/golang.org/x/crypto/nacl/secretbox/secretbox_test.go b/src/mongo/gotools/vendor/src/golang.org/x/crypto/nacl/secretbox/secretbox_test.go
new file mode 100644
index 00000000000..664dc1521d3
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/golang.org/x/crypto/nacl/secretbox/secretbox_test.go
@@ -0,0 +1,91 @@
+// Copyright 2012 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package secretbox
+
+import (
+ "bytes"
+ "crypto/rand"
+ "encoding/hex"
+ "testing"
+)
+
+func TestSealOpen(t *testing.T) {
+ var key [32]byte
+ var nonce [24]byte
+
+ rand.Reader.Read(key[:])
+ rand.Reader.Read(nonce[:])
+
+ var box, opened []byte
+
+ for msgLen := 0; msgLen < 128; msgLen += 17 {
+ message := make([]byte, msgLen)
+ rand.Reader.Read(message)
+
+ box = Seal(box[:0], message, &nonce, &key)
+ var ok bool
+ opened, ok = Open(opened[:0], box, &nonce, &key)
+ if !ok {
+ t.Errorf("%d: failed to open box", msgLen)
+ continue
+ }
+
+ if !bytes.Equal(opened, message) {
+ t.Errorf("%d: got %x, expected %x", msgLen, opened, message)
+ continue
+ }
+ }
+
+ for i := range box {
+ box[i] ^= 0x20
+ _, ok := Open(opened[:0], box, &nonce, &key)
+ if ok {
+ t.Errorf("box was opened after corrupting byte %d", i)
+ }
+ box[i] ^= 0x20
+ }
+}
+
+func TestSecretBox(t *testing.T) {
+ var key [32]byte
+ var nonce [24]byte
+ var message [64]byte
+
+ for i := range key[:] {
+ key[i] = 1
+ }
+ for i := range nonce[:] {
+ nonce[i] = 2
+ }
+ for i := range message[:] {
+ message[i] = 3
+ }
+
+ box := Seal(nil, message[:], &nonce, &key)
+ // expected was generated using the C implementation of NaCl.
+ expected, _ := hex.DecodeString("8442bc313f4626f1359e3b50122b6ce6fe66ddfe7d39d14e637eb4fd5b45beadab55198df6ab5368439792a23c87db70acb6156dc5ef957ac04f6276cf6093b84be77ff0849cc33e34b7254d5a8f65ad")
+
+ if !bytes.Equal(box, expected) {
+ t.Fatalf("box didn't match, got\n%x\n, expected\n%x", box, expected)
+ }
+}
+
+func TestAppend(t *testing.T) {
+ var key [32]byte
+ var nonce [24]byte
+ var message [8]byte
+
+ out := make([]byte, 4)
+ box := Seal(out, message[:], &nonce, &key)
+ if !bytes.Equal(box[:4], out[:4]) {
+ t.Fatalf("Seal didn't correctly append")
+ }
+
+ out = make([]byte, 4, 100)
+ box = Seal(out, message[:], &nonce, &key)
+ if !bytes.Equal(box[:4], out[:4]) {
+ t.Fatalf("Seal didn't correctly append with sufficient capacity.")
+ }
+}
diff --git a/src/mongo/gotools/vendor/src/golang.org/x/crypto/ocsp/ocsp.go b/src/mongo/gotools/vendor/src/golang.org/x/crypto/ocsp/ocsp.go
new file mode 100644
index 00000000000..ea61cf49852
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/golang.org/x/crypto/ocsp/ocsp.go
@@ -0,0 +1,673 @@
+// Copyright 2013 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// Package ocsp parses OCSP responses as specified in RFC 2560. OCSP responses
+// are signed messages attesting to the validity of a certificate for a small
+// period of time. This is used to manage revocation for X.509 certificates.
+package ocsp // import "golang.org/x/crypto/ocsp"
+
+import (
+ "crypto"
+ "crypto/ecdsa"
+ "crypto/elliptic"
+ "crypto/rand"
+ "crypto/rsa"
+ "crypto/sha1"
+ "crypto/x509"
+ "crypto/x509/pkix"
+ "encoding/asn1"
+ "errors"
+ "math/big"
+ "strconv"
+ "time"
+)
+
+var idPKIXOCSPBasic = asn1.ObjectIdentifier([]int{1, 3, 6, 1, 5, 5, 7, 48, 1, 1})
+
+// ResponseStatus contains the result of an OCSP request. See
+// https://tools.ietf.org/html/rfc6960#section-2.3
+type ResponseStatus int
+
+const (
+ Success ResponseStatus = 0
+ Malformed ResponseStatus = 1
+ InternalError ResponseStatus = 2
+ TryLater ResponseStatus = 3
+ // Status code four is ununsed in OCSP. See
+ // https://tools.ietf.org/html/rfc6960#section-4.2.1
+ SignatureRequired ResponseStatus = 5
+ Unauthorized ResponseStatus = 6
+)
+
+func (r ResponseStatus) String() string {
+ switch r {
+ case Success:
+ return "success"
+ case Malformed:
+ return "malformed"
+ case InternalError:
+ return "internal error"
+ case TryLater:
+ return "try later"
+ case SignatureRequired:
+ return "signature required"
+ case Unauthorized:
+ return "unauthorized"
+ default:
+ return "unknown OCSP status: " + strconv.Itoa(int(r))
+ }
+}
+
+// ResponseError is an error that may be returned by ParseResponse to indicate
+// that the response itself is an error, not just that its indicating that a
+// certificate is revoked, unknown, etc.
+type ResponseError struct {
+ Status ResponseStatus
+}
+
+func (r ResponseError) Error() string {
+ return "ocsp: error from server: " + r.Status.String()
+}
+
+// These are internal structures that reflect the ASN.1 structure of an OCSP
+// response. See RFC 2560, section 4.2.
+
+type certID struct {
+ HashAlgorithm pkix.AlgorithmIdentifier
+ NameHash []byte
+ IssuerKeyHash []byte
+ SerialNumber *big.Int
+}
+
+// https://tools.ietf.org/html/rfc2560#section-4.1.1
+type ocspRequest struct {
+ TBSRequest tbsRequest
+}
+
+type tbsRequest struct {
+ Version int `asn1:"explicit,tag:0,default:0,optional"`
+ RequestorName pkix.RDNSequence `asn1:"explicit,tag:1,optional"`
+ RequestList []request
+}
+
+type request struct {
+ Cert certID
+}
+
+type responseASN1 struct {
+ Status asn1.Enumerated
+ Response responseBytes `asn1:"explicit,tag:0,optional"`
+}
+
+type responseBytes struct {
+ ResponseType asn1.ObjectIdentifier
+ Response []byte
+}
+
+type basicResponse struct {
+ TBSResponseData responseData
+ SignatureAlgorithm pkix.AlgorithmIdentifier
+ Signature asn1.BitString
+ Certificates []asn1.RawValue `asn1:"explicit,tag:0,optional"`
+}
+
+type responseData struct {
+ Raw asn1.RawContent
+ Version int `asn1:"optional,default:1,explicit,tag:0"`
+ RawResponderName asn1.RawValue `asn1:"optional,explicit,tag:1"`
+ KeyHash []byte `asn1:"optional,explicit,tag:2"`
+ ProducedAt time.Time `asn1:"generalized"`
+ Responses []singleResponse
+}
+
+type singleResponse struct {
+ CertID certID
+ Good asn1.Flag `asn1:"tag:0,optional"`
+ Revoked revokedInfo `asn1:"tag:1,optional"`
+ Unknown asn1.Flag `asn1:"tag:2,optional"`
+ ThisUpdate time.Time `asn1:"generalized"`
+ NextUpdate time.Time `asn1:"generalized,explicit,tag:0,optional"`
+ SingleExtensions []pkix.Extension `asn1:"explicit,tag:1,optional"`
+}
+
+type revokedInfo struct {
+ RevocationTime time.Time `asn1:"generalized"`
+ Reason asn1.Enumerated `asn1:"explicit,tag:0,optional"`
+}
+
+var (
+ oidSignatureMD2WithRSA = asn1.ObjectIdentifier{1, 2, 840, 113549, 1, 1, 2}
+ oidSignatureMD5WithRSA = asn1.ObjectIdentifier{1, 2, 840, 113549, 1, 1, 4}
+ oidSignatureSHA1WithRSA = asn1.ObjectIdentifier{1, 2, 840, 113549, 1, 1, 5}
+ oidSignatureSHA256WithRSA = asn1.ObjectIdentifier{1, 2, 840, 113549, 1, 1, 11}
+ oidSignatureSHA384WithRSA = asn1.ObjectIdentifier{1, 2, 840, 113549, 1, 1, 12}
+ oidSignatureSHA512WithRSA = asn1.ObjectIdentifier{1, 2, 840, 113549, 1, 1, 13}
+ oidSignatureDSAWithSHA1 = asn1.ObjectIdentifier{1, 2, 840, 10040, 4, 3}
+ oidSignatureDSAWithSHA256 = asn1.ObjectIdentifier{2, 16, 840, 1, 101, 4, 3, 2}
+ oidSignatureECDSAWithSHA1 = asn1.ObjectIdentifier{1, 2, 840, 10045, 4, 1}
+ oidSignatureECDSAWithSHA256 = asn1.ObjectIdentifier{1, 2, 840, 10045, 4, 3, 2}
+ oidSignatureECDSAWithSHA384 = asn1.ObjectIdentifier{1, 2, 840, 10045, 4, 3, 3}
+ oidSignatureECDSAWithSHA512 = asn1.ObjectIdentifier{1, 2, 840, 10045, 4, 3, 4}
+)
+
+var hashOIDs = map[crypto.Hash]asn1.ObjectIdentifier{
+ crypto.SHA1: asn1.ObjectIdentifier([]int{1, 3, 14, 3, 2, 26}),
+ crypto.SHA256: asn1.ObjectIdentifier([]int{2, 16, 840, 1, 101, 3, 4, 2, 1}),
+ crypto.SHA384: asn1.ObjectIdentifier([]int{2, 16, 840, 1, 101, 3, 4, 2, 2}),
+ crypto.SHA512: asn1.ObjectIdentifier([]int{2, 16, 840, 1, 101, 3, 4, 2, 3}),
+}
+
+// TODO(rlb): This is also from crypto/x509, so same comment as AGL's below
+var signatureAlgorithmDetails = []struct {
+ algo x509.SignatureAlgorithm
+ oid asn1.ObjectIdentifier
+ pubKeyAlgo x509.PublicKeyAlgorithm
+ hash crypto.Hash
+}{
+ {x509.MD2WithRSA, oidSignatureMD2WithRSA, x509.RSA, crypto.Hash(0) /* no value for MD2 */},
+ {x509.MD5WithRSA, oidSignatureMD5WithRSA, x509.RSA, crypto.MD5},
+ {x509.SHA1WithRSA, oidSignatureSHA1WithRSA, x509.RSA, crypto.SHA1},
+ {x509.SHA256WithRSA, oidSignatureSHA256WithRSA, x509.RSA, crypto.SHA256},
+ {x509.SHA384WithRSA, oidSignatureSHA384WithRSA, x509.RSA, crypto.SHA384},
+ {x509.SHA512WithRSA, oidSignatureSHA512WithRSA, x509.RSA, crypto.SHA512},
+ {x509.DSAWithSHA1, oidSignatureDSAWithSHA1, x509.DSA, crypto.SHA1},
+ {x509.DSAWithSHA256, oidSignatureDSAWithSHA256, x509.DSA, crypto.SHA256},
+ {x509.ECDSAWithSHA1, oidSignatureECDSAWithSHA1, x509.ECDSA, crypto.SHA1},
+ {x509.ECDSAWithSHA256, oidSignatureECDSAWithSHA256, x509.ECDSA, crypto.SHA256},
+ {x509.ECDSAWithSHA384, oidSignatureECDSAWithSHA384, x509.ECDSA, crypto.SHA384},
+ {x509.ECDSAWithSHA512, oidSignatureECDSAWithSHA512, x509.ECDSA, crypto.SHA512},
+}
+
+// TODO(rlb): This is also from crypto/x509, so same comment as AGL's below
+func signingParamsForPublicKey(pub interface{}, requestedSigAlgo x509.SignatureAlgorithm) (hashFunc crypto.Hash, sigAlgo pkix.AlgorithmIdentifier, err error) {
+ var pubType x509.PublicKeyAlgorithm
+
+ switch pub := pub.(type) {
+ case *rsa.PublicKey:
+ pubType = x509.RSA
+ hashFunc = crypto.SHA256
+ sigAlgo.Algorithm = oidSignatureSHA256WithRSA
+ sigAlgo.Parameters = asn1.RawValue{
+ Tag: 5,
+ }
+
+ case *ecdsa.PublicKey:
+ pubType = x509.ECDSA
+
+ switch pub.Curve {
+ case elliptic.P224(), elliptic.P256():
+ hashFunc = crypto.SHA256
+ sigAlgo.Algorithm = oidSignatureECDSAWithSHA256
+ case elliptic.P384():
+ hashFunc = crypto.SHA384
+ sigAlgo.Algorithm = oidSignatureECDSAWithSHA384
+ case elliptic.P521():
+ hashFunc = crypto.SHA512
+ sigAlgo.Algorithm = oidSignatureECDSAWithSHA512
+ default:
+ err = errors.New("x509: unknown elliptic curve")
+ }
+
+ default:
+ err = errors.New("x509: only RSA and ECDSA keys supported")
+ }
+
+ if err != nil {
+ return
+ }
+
+ if requestedSigAlgo == 0 {
+ return
+ }
+
+ found := false
+ for _, details := range signatureAlgorithmDetails {
+ if details.algo == requestedSigAlgo {
+ if details.pubKeyAlgo != pubType {
+ err = errors.New("x509: requested SignatureAlgorithm does not match private key type")
+ return
+ }
+ sigAlgo.Algorithm, hashFunc = details.oid, details.hash
+ if hashFunc == 0 {
+ err = errors.New("x509: cannot sign with hash function requested")
+ return
+ }
+ found = true
+ break
+ }
+ }
+
+ if !found {
+ err = errors.New("x509: unknown SignatureAlgorithm")
+ }
+
+ return
+}
+
+// TODO(agl): this is taken from crypto/x509 and so should probably be exported
+// from crypto/x509 or crypto/x509/pkix.
+func getSignatureAlgorithmFromOID(oid asn1.ObjectIdentifier) x509.SignatureAlgorithm {
+ for _, details := range signatureAlgorithmDetails {
+ if oid.Equal(details.oid) {
+ return details.algo
+ }
+ }
+ return x509.UnknownSignatureAlgorithm
+}
+
+// TODO(rlb): This is not taken from crypto/x509, but it's of the same general form.
+func getHashAlgorithmFromOID(target asn1.ObjectIdentifier) crypto.Hash {
+ for hash, oid := range hashOIDs {
+ if oid.Equal(target) {
+ return hash
+ }
+ }
+ return crypto.Hash(0)
+}
+
+// This is the exposed reflection of the internal OCSP structures.
+
+// The status values that can be expressed in OCSP. See RFC 6960.
+const (
+ // Good means that the certificate is valid.
+ Good = iota
+ // Revoked means that the certificate has been deliberately revoked.
+ Revoked
+ // Unknown means that the OCSP responder doesn't know about the certificate.
+ Unknown
+ // ServerFailed is unused and was never used (see
+ // https://go-review.googlesource.com/#/c/18944). ParseResponse will
+ // return a ResponseError when an error response is parsed.
+ ServerFailed
+)
+
+// The enumerated reasons for revoking a certificate. See RFC 5280.
+const (
+ Unspecified = iota
+ KeyCompromise = iota
+ CACompromise = iota
+ AffiliationChanged = iota
+ Superseded = iota
+ CessationOfOperation = iota
+ CertificateHold = iota
+ _ = iota
+ RemoveFromCRL = iota
+ PrivilegeWithdrawn = iota
+ AACompromise = iota
+)
+
+// Request represents an OCSP request. See RFC 6960.
+type Request struct {
+ HashAlgorithm crypto.Hash
+ IssuerNameHash []byte
+ IssuerKeyHash []byte
+ SerialNumber *big.Int
+}
+
+// Response represents an OCSP response containing a single SingleResponse. See
+// RFC 6960.
+type Response struct {
+ // Status is one of {Good, Revoked, Unknown}
+ Status int
+ SerialNumber *big.Int
+ ProducedAt, ThisUpdate, NextUpdate, RevokedAt time.Time
+ RevocationReason int
+ Certificate *x509.Certificate
+ // TBSResponseData contains the raw bytes of the signed response. If
+ // Certificate is nil then this can be used to verify Signature.
+ TBSResponseData []byte
+ Signature []byte
+ SignatureAlgorithm x509.SignatureAlgorithm
+
+ // Extensions contains raw X.509 extensions from the singleExtensions field
+ // of the OCSP response. When parsing certificates, this can be used to
+ // extract non-critical extensions that are not parsed by this package. When
+ // marshaling OCSP responses, the Extensions field is ignored, see
+ // ExtraExtensions.
+ Extensions []pkix.Extension
+
+ // ExtraExtensions contains extensions to be copied, raw, into any marshaled
+ // OCSP response (in the singleExtensions field). Values override any
+ // extensions that would otherwise be produced based on the other fields. The
+ // ExtraExtensions field is not populated when parsing certificates, see
+ // Extensions.
+ ExtraExtensions []pkix.Extension
+}
+
+// These are pre-serialized error responses for the various non-success codes
+// defined by OCSP. The Unauthorized code in particular can be used by an OCSP
+// responder that supports only pre-signed responses as a response to requests
+// for certificates with unknown status. See RFC 5019.
+var (
+ MalformedRequestErrorResponse = []byte{0x30, 0x03, 0x0A, 0x01, 0x01}
+ InternalErrorErrorResponse = []byte{0x30, 0x03, 0x0A, 0x01, 0x02}
+ TryLaterErrorResponse = []byte{0x30, 0x03, 0x0A, 0x01, 0x03}
+ SigRequredErrorResponse = []byte{0x30, 0x03, 0x0A, 0x01, 0x05}
+ UnauthorizedErrorResponse = []byte{0x30, 0x03, 0x0A, 0x01, 0x06}
+)
+
+// CheckSignatureFrom checks that the signature in resp is a valid signature
+// from issuer. This should only be used if resp.Certificate is nil. Otherwise,
+// the OCSP response contained an intermediate certificate that created the
+// signature. That signature is checked by ParseResponse and only
+// resp.Certificate remains to be validated.
+func (resp *Response) CheckSignatureFrom(issuer *x509.Certificate) error {
+ return issuer.CheckSignature(resp.SignatureAlgorithm, resp.TBSResponseData, resp.Signature)
+}
+
+// ParseError results from an invalid OCSP response.
+type ParseError string
+
+func (p ParseError) Error() string {
+ return string(p)
+}
+
+// ParseRequest parses an OCSP request in DER form. It only supports
+// requests for a single certificate. Signed requests are not supported.
+// If a request includes a signature, it will result in a ParseError.
+func ParseRequest(bytes []byte) (*Request, error) {
+ var req ocspRequest
+ rest, err := asn1.Unmarshal(bytes, &req)
+ if err != nil {
+ return nil, err
+ }
+ if len(rest) > 0 {
+ return nil, ParseError("trailing data in OCSP request")
+ }
+
+ if len(req.TBSRequest.RequestList) == 0 {
+ return nil, ParseError("OCSP request contains no request body")
+ }
+ innerRequest := req.TBSRequest.RequestList[0]
+
+ hashFunc := getHashAlgorithmFromOID(innerRequest.Cert.HashAlgorithm.Algorithm)
+ if hashFunc == crypto.Hash(0) {
+ return nil, ParseError("OCSP request uses unknown hash function")
+ }
+
+ return &Request{
+ HashAlgorithm: hashFunc,
+ IssuerNameHash: innerRequest.Cert.NameHash,
+ IssuerKeyHash: innerRequest.Cert.IssuerKeyHash,
+ SerialNumber: innerRequest.Cert.SerialNumber,
+ }, nil
+}
+
+// ParseResponse parses an OCSP response in DER form. It only supports
+// responses for a single certificate. If the response contains a certificate
+// then the signature over the response is checked. If issuer is not nil then
+// it will be used to validate the signature or embedded certificate.
+//
+// Invalid signatures or parse failures will result in a ParseError. Error
+// responses will result in a ResponseError.
+func ParseResponse(bytes []byte, issuer *x509.Certificate) (*Response, error) {
+ var resp responseASN1
+ rest, err := asn1.Unmarshal(bytes, &resp)
+ if err != nil {
+ return nil, err
+ }
+ if len(rest) > 0 {
+ return nil, ParseError("trailing data in OCSP response")
+ }
+
+ if status := ResponseStatus(resp.Status); status != Success {
+ return nil, ResponseError{status}
+ }
+
+ if !resp.Response.ResponseType.Equal(idPKIXOCSPBasic) {
+ return nil, ParseError("bad OCSP response type")
+ }
+
+ var basicResp basicResponse
+ rest, err = asn1.Unmarshal(resp.Response.Response, &basicResp)
+ if err != nil {
+ return nil, err
+ }
+
+ if len(basicResp.Certificates) > 1 {
+ return nil, ParseError("OCSP response contains bad number of certificates")
+ }
+
+ if len(basicResp.TBSResponseData.Responses) != 1 {
+ return nil, ParseError("OCSP response contains bad number of responses")
+ }
+
+ ret := &Response{
+ TBSResponseData: basicResp.TBSResponseData.Raw,
+ Signature: basicResp.Signature.RightAlign(),
+ SignatureAlgorithm: getSignatureAlgorithmFromOID(basicResp.SignatureAlgorithm.Algorithm),
+ }
+
+ if len(basicResp.Certificates) > 0 {
+ ret.Certificate, err = x509.ParseCertificate(basicResp.Certificates[0].FullBytes)
+ if err != nil {
+ return nil, err
+ }
+
+ if err := ret.CheckSignatureFrom(ret.Certificate); err != nil {
+ return nil, ParseError("bad OCSP signature")
+ }
+
+ if issuer != nil {
+ if err := issuer.CheckSignature(ret.Certificate.SignatureAlgorithm, ret.Certificate.RawTBSCertificate, ret.Certificate.Signature); err != nil {
+ return nil, ParseError("bad signature on embedded certificate")
+ }
+ }
+ } else if issuer != nil {
+ if err := ret.CheckSignatureFrom(issuer); err != nil {
+ return nil, ParseError("bad OCSP signature")
+ }
+ }
+
+ r := basicResp.TBSResponseData.Responses[0]
+
+ for _, ext := range r.SingleExtensions {
+ if ext.Critical {
+ return nil, ParseError("unsupported critical extension")
+ }
+ }
+ ret.Extensions = r.SingleExtensions
+
+ ret.SerialNumber = r.CertID.SerialNumber
+
+ switch {
+ case bool(r.Good):
+ ret.Status = Good
+ case bool(r.Unknown):
+ ret.Status = Unknown
+ default:
+ ret.Status = Revoked
+ ret.RevokedAt = r.Revoked.RevocationTime
+ ret.RevocationReason = int(r.Revoked.Reason)
+ }
+
+ ret.ProducedAt = basicResp.TBSResponseData.ProducedAt
+ ret.ThisUpdate = r.ThisUpdate
+ ret.NextUpdate = r.NextUpdate
+
+ return ret, nil
+}
+
+// RequestOptions contains options for constructing OCSP requests.
+type RequestOptions struct {
+ // Hash contains the hash function that should be used when
+ // constructing the OCSP request. If zero, SHA-1 will be used.
+ Hash crypto.Hash
+}
+
+func (opts *RequestOptions) hash() crypto.Hash {
+ if opts == nil || opts.Hash == 0 {
+ // SHA-1 is nearly universally used in OCSP.
+ return crypto.SHA1
+ }
+ return opts.Hash
+}
+
+// CreateRequest returns a DER-encoded, OCSP request for the status of cert. If
+// opts is nil then sensible defaults are used.
+func CreateRequest(cert, issuer *x509.Certificate, opts *RequestOptions) ([]byte, error) {
+ hashFunc := opts.hash()
+
+ // OCSP seems to be the only place where these raw hash identifiers are
+ // used. I took the following from
+ // http://msdn.microsoft.com/en-us/library/ff635603.aspx
+ var hashOID asn1.ObjectIdentifier
+ hashOID, ok := hashOIDs[hashFunc]
+ if !ok {
+ return nil, x509.ErrUnsupportedAlgorithm
+ }
+
+ if !hashFunc.Available() {
+ return nil, x509.ErrUnsupportedAlgorithm
+ }
+ h := opts.hash().New()
+
+ var publicKeyInfo struct {
+ Algorithm pkix.AlgorithmIdentifier
+ PublicKey asn1.BitString
+ }
+ if _, err := asn1.Unmarshal(issuer.RawSubjectPublicKeyInfo, &publicKeyInfo); err != nil {
+ return nil, err
+ }
+
+ h.Write(publicKeyInfo.PublicKey.RightAlign())
+ issuerKeyHash := h.Sum(nil)
+
+ h.Reset()
+ h.Write(issuer.RawSubject)
+ issuerNameHash := h.Sum(nil)
+
+ return asn1.Marshal(ocspRequest{
+ tbsRequest{
+ Version: 0,
+ RequestList: []request{
+ {
+ Cert: certID{
+ pkix.AlgorithmIdentifier{
+ Algorithm: hashOID,
+ Parameters: asn1.RawValue{Tag: 5 /* ASN.1 NULL */},
+ },
+ issuerNameHash,
+ issuerKeyHash,
+ cert.SerialNumber,
+ },
+ },
+ },
+ },
+ })
+}
+
+// CreateResponse returns a DER-encoded OCSP response with the specified contents.
+// The fields in the response are populated as follows:
+//
+// The responder cert is used to populate the ResponderName field, and the certificate
+// itself is provided alongside the OCSP response signature.
+//
+// The issuer cert is used to puplate the IssuerNameHash and IssuerKeyHash fields.
+// (SHA-1 is used for the hash function; this is not configurable.)
+//
+// The template is used to populate the SerialNumber, RevocationStatus, RevokedAt,
+// RevocationReason, ThisUpdate, and NextUpdate fields.
+//
+// The ProducedAt date is automatically set to the current date, to the nearest minute.
+func CreateResponse(issuer, responderCert *x509.Certificate, template Response, priv crypto.Signer) ([]byte, error) {
+ var publicKeyInfo struct {
+ Algorithm pkix.AlgorithmIdentifier
+ PublicKey asn1.BitString
+ }
+ if _, err := asn1.Unmarshal(issuer.RawSubjectPublicKeyInfo, &publicKeyInfo); err != nil {
+ return nil, err
+ }
+
+ h := sha1.New()
+ h.Write(publicKeyInfo.PublicKey.RightAlign())
+ issuerKeyHash := h.Sum(nil)
+
+ h.Reset()
+ h.Write(issuer.RawSubject)
+ issuerNameHash := h.Sum(nil)
+
+ innerResponse := singleResponse{
+ CertID: certID{
+ HashAlgorithm: pkix.AlgorithmIdentifier{
+ Algorithm: hashOIDs[crypto.SHA1],
+ Parameters: asn1.RawValue{Tag: 5 /* ASN.1 NULL */},
+ },
+ NameHash: issuerNameHash,
+ IssuerKeyHash: issuerKeyHash,
+ SerialNumber: template.SerialNumber,
+ },
+ ThisUpdate: template.ThisUpdate.UTC(),
+ NextUpdate: template.NextUpdate.UTC(),
+ SingleExtensions: template.ExtraExtensions,
+ }
+
+ switch template.Status {
+ case Good:
+ innerResponse.Good = true
+ case Unknown:
+ innerResponse.Unknown = true
+ case Revoked:
+ innerResponse.Revoked = revokedInfo{
+ RevocationTime: template.RevokedAt.UTC(),
+ Reason: asn1.Enumerated(template.RevocationReason),
+ }
+ }
+
+ responderName := asn1.RawValue{
+ Class: 2, // context-specific
+ Tag: 1, // explicit tag
+ IsCompound: true,
+ Bytes: responderCert.RawSubject,
+ }
+ tbsResponseData := responseData{
+ Version: 0,
+ RawResponderName: responderName,
+ ProducedAt: time.Now().Truncate(time.Minute).UTC(),
+ Responses: []singleResponse{innerResponse},
+ }
+
+ tbsResponseDataDER, err := asn1.Marshal(tbsResponseData)
+ if err != nil {
+ return nil, err
+ }
+
+ hashFunc, signatureAlgorithm, err := signingParamsForPublicKey(priv.Public(), template.SignatureAlgorithm)
+ if err != nil {
+ return nil, err
+ }
+
+ responseHash := hashFunc.New()
+ responseHash.Write(tbsResponseDataDER)
+ signature, err := priv.Sign(rand.Reader, responseHash.Sum(nil), hashFunc)
+ if err != nil {
+ return nil, err
+ }
+
+ response := basicResponse{
+ TBSResponseData: tbsResponseData,
+ SignatureAlgorithm: signatureAlgorithm,
+ Signature: asn1.BitString{
+ Bytes: signature,
+ BitLength: 8 * len(signature),
+ },
+ }
+ if template.Certificate != nil {
+ response.Certificates = []asn1.RawValue{
+ asn1.RawValue{FullBytes: template.Certificate.Raw},
+ }
+ }
+ responseDER, err := asn1.Marshal(response)
+ if err != nil {
+ return nil, err
+ }
+
+ return asn1.Marshal(responseASN1{
+ Status: asn1.Enumerated(Success),
+ Response: responseBytes{
+ ResponseType: idPKIXOCSPBasic,
+ Response: responseDER,
+ },
+ })
+}
diff --git a/src/mongo/gotools/vendor/src/golang.org/x/crypto/ocsp/ocsp_test.go b/src/mongo/gotools/vendor/src/golang.org/x/crypto/ocsp/ocsp_test.go
new file mode 100644
index 00000000000..33868497381
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/golang.org/x/crypto/ocsp/ocsp_test.go
@@ -0,0 +1,584 @@
+// Copyright 2013 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package ocsp
+
+import (
+ "bytes"
+ "crypto"
+ "crypto/sha1"
+ "crypto/x509"
+ "crypto/x509/pkix"
+ "encoding/asn1"
+ "encoding/hex"
+ "math/big"
+ "reflect"
+ "testing"
+ "time"
+)
+
+func TestOCSPDecode(t *testing.T) {
+ responseBytes, _ := hex.DecodeString(ocspResponseHex)
+ resp, err := ParseResponse(responseBytes, nil)
+ if err != nil {
+ t.Error(err)
+ }
+
+ expected := Response{
+ Status: Good,
+ SerialNumber: big.NewInt(0x1d0fa),
+ RevocationReason: Unspecified,
+ ThisUpdate: time.Date(2010, 7, 7, 15, 1, 5, 0, time.UTC),
+ NextUpdate: time.Date(2010, 7, 7, 18, 35, 17, 0, time.UTC),
+ }
+
+ if !reflect.DeepEqual(resp.ThisUpdate, expected.ThisUpdate) {
+ t.Errorf("resp.ThisUpdate: got %d, want %d", resp.ThisUpdate, expected.ThisUpdate)
+ }
+
+ if !reflect.DeepEqual(resp.NextUpdate, expected.NextUpdate) {
+ t.Errorf("resp.NextUpdate: got %d, want %d", resp.NextUpdate, expected.NextUpdate)
+ }
+
+ if resp.Status != expected.Status {
+ t.Errorf("resp.Status: got %d, want %d", resp.Status, expected.Status)
+ }
+
+ if resp.SerialNumber.Cmp(expected.SerialNumber) != 0 {
+ t.Errorf("resp.SerialNumber: got %x, want %x", resp.SerialNumber, expected.SerialNumber)
+ }
+
+ if resp.RevocationReason != expected.RevocationReason {
+ t.Errorf("resp.RevocationReason: got %d, want %d", resp.RevocationReason, expected.RevocationReason)
+ }
+}
+
+func TestOCSPDecodeWithoutCert(t *testing.T) {
+ responseBytes, _ := hex.DecodeString(ocspResponseWithoutCertHex)
+ _, err := ParseResponse(responseBytes, nil)
+ if err != nil {
+ t.Error(err)
+ }
+}
+
+func TestOCSPDecodeWithExtensions(t *testing.T) {
+ responseBytes, _ := hex.DecodeString(ocspResponseWithCriticalExtensionHex)
+ _, err := ParseResponse(responseBytes, nil)
+ if err == nil {
+ t.Error(err)
+ }
+
+ responseBytes, _ = hex.DecodeString(ocspResponseWithExtensionHex)
+ response, err := ParseResponse(responseBytes, nil)
+ if err != nil {
+ t.Fatal(err)
+ }
+
+ if len(response.Extensions) != 1 {
+ t.Errorf("len(response.Extensions): got %v, want %v", len(response.Extensions), 1)
+ }
+
+ extensionBytes := response.Extensions[0].Value
+ expectedBytes, _ := hex.DecodeString(ocspExtensionValueHex)
+ if !bytes.Equal(extensionBytes, expectedBytes) {
+ t.Errorf("response.Extensions[0]: got %x, want %x", extensionBytes, expectedBytes)
+ }
+}
+
+func TestOCSPSignature(t *testing.T) {
+ issuerCert, _ := hex.DecodeString(startComHex)
+ issuer, err := x509.ParseCertificate(issuerCert)
+ if err != nil {
+ t.Fatal(err)
+ }
+
+ response, _ := hex.DecodeString(ocspResponseHex)
+ if _, err := ParseResponse(response, issuer); err != nil {
+ t.Error(err)
+ }
+}
+
+func TestOCSPRequest(t *testing.T) {
+ leafCert, _ := hex.DecodeString(leafCertHex)
+ cert, err := x509.ParseCertificate(leafCert)
+ if err != nil {
+ t.Fatal(err)
+ }
+
+ issuerCert, _ := hex.DecodeString(issuerCertHex)
+ issuer, err := x509.ParseCertificate(issuerCert)
+ if err != nil {
+ t.Fatal(err)
+ }
+
+ request, err := CreateRequest(cert, issuer, nil)
+ if err != nil {
+ t.Fatal(err)
+ }
+
+ expectedBytes, _ := hex.DecodeString(ocspRequestHex)
+ if !bytes.Equal(request, expectedBytes) {
+ t.Errorf("request: got %x, wanted %x", request, expectedBytes)
+ }
+
+ decodedRequest, err := ParseRequest(expectedBytes)
+ if err != nil {
+ t.Fatal(err)
+ }
+
+ if decodedRequest.HashAlgorithm != crypto.SHA1 {
+ t.Errorf("request.HashAlgorithm: got %v, want %v", decodedRequest.HashAlgorithm, crypto.SHA1)
+ }
+
+ var publicKeyInfo struct {
+ Algorithm pkix.AlgorithmIdentifier
+ PublicKey asn1.BitString
+ }
+ _, err = asn1.Unmarshal(issuer.RawSubjectPublicKeyInfo, &publicKeyInfo)
+ if err != nil {
+ t.Fatal(err)
+ }
+
+ h := sha1.New()
+ h.Write(publicKeyInfo.PublicKey.RightAlign())
+ issuerKeyHash := h.Sum(nil)
+
+ h.Reset()
+ h.Write(issuer.RawSubject)
+ issuerNameHash := h.Sum(nil)
+
+ if got := decodedRequest.IssuerKeyHash; !bytes.Equal(got, issuerKeyHash) {
+ t.Errorf("request.IssuerKeyHash: got %x, want %x", got, issuerKeyHash)
+ }
+
+ if got := decodedRequest.IssuerNameHash; !bytes.Equal(got, issuerNameHash) {
+ t.Errorf("request.IssuerKeyHash: got %x, want %x", got, issuerNameHash)
+ }
+
+ if got := decodedRequest.SerialNumber; got.Cmp(cert.SerialNumber) != 0 {
+ t.Errorf("request.SerialNumber: got %x, want %x", got, cert.SerialNumber)
+ }
+}
+
+func TestOCSPResponse(t *testing.T) {
+ leafCert, _ := hex.DecodeString(leafCertHex)
+ leaf, err := x509.ParseCertificate(leafCert)
+ if err != nil {
+ t.Fatal(err)
+ }
+
+ issuerCert, _ := hex.DecodeString(issuerCertHex)
+ issuer, err := x509.ParseCertificate(issuerCert)
+ if err != nil {
+ t.Fatal(err)
+ }
+
+ responderCert, _ := hex.DecodeString(responderCertHex)
+ responder, err := x509.ParseCertificate(responderCert)
+ if err != nil {
+ t.Fatal(err)
+ }
+
+ responderPrivateKeyDER, _ := hex.DecodeString(responderPrivateKeyHex)
+ responderPrivateKey, err := x509.ParsePKCS1PrivateKey(responderPrivateKeyDER)
+ if err != nil {
+ t.Fatal(err)
+ }
+
+ extensionBytes, _ := hex.DecodeString(ocspExtensionValueHex)
+ extensions := []pkix.Extension{
+ pkix.Extension{
+ Id: ocspExtensionOID,
+ Critical: false,
+ Value: extensionBytes,
+ },
+ }
+
+ producedAt := time.Now().Truncate(time.Minute)
+ thisUpdate := time.Date(2010, 7, 7, 15, 1, 5, 0, time.UTC)
+ nextUpdate := time.Date(2010, 7, 7, 18, 35, 17, 0, time.UTC)
+ template := Response{
+ Status: Revoked,
+ SerialNumber: leaf.SerialNumber,
+ ThisUpdate: thisUpdate,
+ NextUpdate: nextUpdate,
+ RevokedAt: thisUpdate,
+ RevocationReason: KeyCompromise,
+ Certificate: responder,
+ ExtraExtensions: extensions,
+ }
+
+ responseBytes, err := CreateResponse(issuer, responder, template, responderPrivateKey)
+ if err != nil {
+ t.Fatal(err)
+ }
+
+ resp, err := ParseResponse(responseBytes, nil)
+ if err != nil {
+ t.Fatal(err)
+ }
+
+ if !reflect.DeepEqual(resp.ThisUpdate, template.ThisUpdate) {
+ t.Errorf("resp.ThisUpdate: got %d, want %d", resp.ThisUpdate, template.ThisUpdate)
+ }
+
+ if !reflect.DeepEqual(resp.NextUpdate, template.NextUpdate) {
+ t.Errorf("resp.NextUpdate: got %d, want %d", resp.NextUpdate, template.NextUpdate)
+ }
+
+ if !reflect.DeepEqual(resp.RevokedAt, template.RevokedAt) {
+ t.Errorf("resp.RevokedAt: got %d, want %d", resp.RevokedAt, template.RevokedAt)
+ }
+
+ if !reflect.DeepEqual(resp.Extensions, template.ExtraExtensions) {
+ t.Errorf("resp.Extensions: got %v, want %v", resp.Extensions, template.ExtraExtensions)
+ }
+
+ if !resp.ProducedAt.Equal(producedAt) {
+ t.Errorf("resp.ProducedAt: got %d, want %d", resp.ProducedAt, producedAt)
+ }
+
+ if resp.Status != template.Status {
+ t.Errorf("resp.Status: got %d, want %d", resp.Status, template.Status)
+ }
+
+ if resp.SerialNumber.Cmp(template.SerialNumber) != 0 {
+ t.Errorf("resp.SerialNumber: got %x, want %x", resp.SerialNumber, template.SerialNumber)
+ }
+
+ if resp.RevocationReason != template.RevocationReason {
+ t.Errorf("resp.RevocationReason: got %d, want %d", resp.RevocationReason, template.RevocationReason)
+ }
+}
+
+func TestErrorResponse(t *testing.T) {
+ responseBytes, _ := hex.DecodeString(errorResponseHex)
+ _, err := ParseResponse(responseBytes, nil)
+
+ respErr, ok := err.(ResponseError)
+ if !ok {
+ t.Fatalf("expected ResponseError from ParseResponse but got %#v", err)
+ }
+ if respErr.Status != Malformed {
+ t.Fatalf("expected Malformed status from ParseResponse but got %d", respErr.Status)
+ }
+}
+
+// This OCSP response was taken from Thawte's public OCSP responder.
+// To recreate:
+// $ openssl s_client -tls1 -showcerts -servername www.google.com -connect www.google.com:443
+// Copy and paste the first certificate into /tmp/cert.crt and the second into
+// /tmp/intermediate.crt
+// $ openssl ocsp -issuer /tmp/intermediate.crt -cert /tmp/cert.crt -url http://ocsp.thawte.com -resp_text -respout /tmp/ocsp.der
+// Then hex encode the result:
+// $ python -c 'print file("/tmp/ocsp.der", "r").read().encode("hex")'
+
+const ocspResponseHex = "308206bc0a0100a08206b5308206b106092b0601050507300101048206a23082069e3081" +
+ "c9a14e304c310b300906035504061302494c31163014060355040a130d5374617274436f" +
+ "6d204c74642e312530230603550403131c5374617274436f6d20436c6173732031204f43" +
+ "5350205369676e6572180f32303130303730373137333531375a30663064303c30090605" +
+ "2b0e03021a050004146568874f40750f016a3475625e1f5c93e5a26d580414eb4234d098" +
+ "b0ab9ff41b6b08f7cc642eef0e2c45020301d0fa8000180f323031303037303731353031" +
+ "30355aa011180f32303130303730373138333531375a300d06092a864886f70d01010505" +
+ "000382010100ab557ff070d1d7cebbb5f0ec91a15c3fed22eb2e1b8244f1b84545f013a4" +
+ "fb46214c5e3fbfbebb8a56acc2b9db19f68fd3c3201046b3824d5ba689f99864328710cb" +
+ "467195eb37d84f539e49f859316b32964dc3e47e36814ce94d6c56dd02733b1d0802f7ff" +
+ "4eebdbbd2927dcf580f16cbc290f91e81b53cb365e7223f1d6e20a88ea064104875e0145" +
+ "672b20fc14829d51ca122f5f5d77d3ad6c83889c55c7dc43680ba2fe3cef8b05dbcabdc0" +
+ "d3e09aaf9725597f8c858c2fa38c0d6aed2e6318194420dd1a1137445d13e1c97ab47896" +
+ "17a4e08925f46f867b72e3a4dc1f08cb870b2b0717f7207faa0ac512e628a029aba7457a" +
+ "e63dcf3281e2162d9349a08204ba308204b6308204b23082039aa003020102020101300d" +
+ "06092a864886f70d010105050030818c310b300906035504061302494c31163014060355" +
+ "040a130d5374617274436f6d204c74642e312b3029060355040b13225365637572652044" +
+ "69676974616c204365727469666963617465205369676e696e6731383036060355040313" +
+ "2f5374617274436f6d20436c6173732031205072696d61727920496e7465726d65646961" +
+ "746520536572766572204341301e170d3037313032353030323330365a170d3132313032" +
+ "333030323330365a304c310b300906035504061302494c31163014060355040a130d5374" +
+ "617274436f6d204c74642e312530230603550403131c5374617274436f6d20436c617373" +
+ "2031204f435350205369676e657230820122300d06092a864886f70d0101010500038201" +
+ "0f003082010a0282010100b9561b4c45318717178084e96e178df2255e18ed8d8ecc7c2b" +
+ "7b51a6c1c2e6bf0aa3603066f132fe10ae97b50e99fa24b83fc53dd2777496387d14e1c3" +
+ "a9b6a4933e2ac12413d085570a95b8147414a0bc007c7bcf222446ef7f1a156d7ea1c577" +
+ "fc5f0facdfd42eb0f5974990cb2f5cefebceef4d1bdc7ae5c1075c5a99a93171f2b0845b" +
+ "4ff0864e973fcfe32f9d7511ff87a3e943410c90a4493a306b6944359340a9ca96f02b66" +
+ "ce67f028df2980a6aaee8d5d5d452b8b0eb93f923cc1e23fcccbdbe7ffcb114d08fa7a6a" +
+ "3c404f825d1a0e715935cf623a8c7b59670014ed0622f6089a9447a7a19010f7fe58f841" +
+ "29a2765ea367824d1c3bb2fda308530203010001a382015c30820158300c0603551d1301" +
+ "01ff04023000300b0603551d0f0404030203a8301e0603551d250417301506082b060105" +
+ "0507030906092b0601050507300105301d0603551d0e0416041445e0a36695414c5dd449" +
+ "bc00e33cdcdbd2343e173081a80603551d230481a030819d8014eb4234d098b0ab9ff41b" +
+ "6b08f7cc642eef0e2c45a18181a47f307d310b300906035504061302494c311630140603" +
+ "55040a130d5374617274436f6d204c74642e312b3029060355040b132253656375726520" +
+ "4469676974616c204365727469666963617465205369676e696e67312930270603550403" +
+ "13205374617274436f6d2043657274696669636174696f6e20417574686f726974798201" +
+ "0a30230603551d12041c301a8618687474703a2f2f7777772e737461727473736c2e636f" +
+ "6d2f302c06096086480186f842010d041f161d5374617274436f6d205265766f63617469" +
+ "6f6e20417574686f72697479300d06092a864886f70d01010505000382010100182d2215" +
+ "8f0fc0291324fa8574c49bb8ff2835085adcbf7b7fc4191c397ab6951328253fffe1e5ec" +
+ "2a7da0d50fca1a404e6968481366939e666c0a6209073eca57973e2fefa9ed1718e8176f" +
+ "1d85527ff522c08db702e3b2b180f1cbff05d98128252cf0f450f7dd2772f4188047f19d" +
+ "c85317366f94bc52d60f453a550af58e308aaab00ced33040b62bf37f5b1ab2a4f7f0f80" +
+ "f763bf4d707bc8841d7ad9385ee2a4244469260b6f2bf085977af9074796048ecc2f9d48" +
+ "a1d24ce16e41a9941568fec5b42771e118f16c106a54ccc339a4b02166445a167902e75e" +
+ "6d8620b0825dcd18a069b90fd851d10fa8effd409deec02860d26d8d833f304b10669b42"
+
+const startComHex = "308206343082041ca003020102020118300d06092a864886f70d0101050500307d310b30" +
+ "0906035504061302494c31163014060355040a130d5374617274436f6d204c74642e312b" +
+ "3029060355040b1322536563757265204469676974616c20436572746966696361746520" +
+ "5369676e696e6731293027060355040313205374617274436f6d20436572746966696361" +
+ "74696f6e20417574686f72697479301e170d3037313032343230353431375a170d313731" +
+ "3032343230353431375a30818c310b300906035504061302494c31163014060355040a13" +
+ "0d5374617274436f6d204c74642e312b3029060355040b13225365637572652044696769" +
+ "74616c204365727469666963617465205369676e696e67313830360603550403132f5374" +
+ "617274436f6d20436c6173732031205072696d61727920496e7465726d65646961746520" +
+ "53657276657220434130820122300d06092a864886f70d01010105000382010f00308201" +
+ "0a0282010100b689c6acef09527807ac9263d0f44418188480561f91aee187fa3250b4d3" +
+ "4706f0e6075f700e10f71dc0ce103634855a0f92ac83c6ac58523fba38e8fce7a724e240" +
+ "a60876c0926e9e2a6d4d3f6e61200adb59ded27d63b33e46fefa215118d7cd30a6ed076e" +
+ "3b7087b4f9faebee823c056f92f7a4dc0a301e9373fe07cad75f809d225852ae06da8b87" +
+ "2369b0e42ad8ea83d2bdf371db705a280faf5a387045123f304dcd3baf17e50fcba0a95d" +
+ "48aab16150cb34cd3c5cc30be810c08c9bf0030362feb26c3e720eee1c432ac9480e5739" +
+ "c43121c810c12c87fe5495521f523c31129b7fe7c0a0a559d5e28f3ef0d5a8e1d77031a9" +
+ "c4b3cfaf6d532f06f4a70203010001a38201ad308201a9300f0603551d130101ff040530" +
+ "030101ff300e0603551d0f0101ff040403020106301d0603551d0e04160414eb4234d098" +
+ "b0ab9ff41b6b08f7cc642eef0e2c45301f0603551d230418301680144e0bef1aa4405ba5" +
+ "17698730ca346843d041aef2306606082b06010505070101045a3058302706082b060105" +
+ "05073001861b687474703a2f2f6f6373702e737461727473736c2e636f6d2f6361302d06" +
+ "082b060105050730028621687474703a2f2f7777772e737461727473736c2e636f6d2f73" +
+ "667363612e637274305b0603551d1f045430523027a025a0238621687474703a2f2f7777" +
+ "772e737461727473736c2e636f6d2f73667363612e63726c3027a025a023862168747470" +
+ "3a2f2f63726c2e737461727473736c2e636f6d2f73667363612e63726c3081800603551d" +
+ "20047930773075060b2b0601040181b5370102013066302e06082b060105050702011622" +
+ "687474703a2f2f7777772e737461727473736c2e636f6d2f706f6c6963792e7064663034" +
+ "06082b060105050702011628687474703a2f2f7777772e737461727473736c2e636f6d2f" +
+ "696e7465726d6564696174652e706466300d06092a864886f70d01010505000382020100" +
+ "2109493ea5886ee00b8b48da314d8ff75657a2e1d36257e9b556f38545753be5501f048b" +
+ "e6a05a3ee700ae85d0fbff200364cbad02e1c69172f8a34dd6dee8cc3fa18aa2e37c37a7" +
+ "c64f8f35d6f4d66e067bdd21d9cf56ffcb302249fe8904f385e5aaf1e71fe875904dddf9" +
+ "46f74234f745580c110d84b0c6da5d3ef9019ee7e1da5595be741c7bfc4d144fac7e5547" +
+ "7d7bf4a50d491e95e8f712c1ccff76a62547d0f37535be97b75816ebaa5c786fec5330af" +
+ "ea044dcca902e3f0b60412f630b1113d904e5664d7dc3c435f7339ef4baf87ebf6fe6888" +
+ "4472ead207c669b0c1a18bef1749d761b145485f3b2021e95bb2ccf4d7e931f50b15613b" +
+ "7a94e3ebd9bc7f94ae6ae3626296a8647cb887f399327e92a252bebbf865cfc9f230fc8b" +
+ "c1c2a696d75f89e15c3480f58f47072fb491bfb1a27e5f4b5ad05b9f248605515a690365" +
+ "434971c5e06f94346bf61bd8a9b04c7e53eb8f48dfca33b548fa364a1a53a6330cd089cd" +
+ "4915cd89313c90c072d7654b52358a461144b93d8e2865a63e799e5c084429adb035112e" +
+ "214eb8d2e7103e5d8483b3c3c2e4d2c6fd094b7409ddf1b3d3193e800da20b19f038e7c5" +
+ "c2afe223db61e29d5c6e2089492e236ab262c145b49faf8ba7f1223bf87de290d07a19fb" +
+ "4a4ce3d27d5f4a8303ed27d6239e6b8db459a2d9ef6c8229dd75193c3f4c108defbb7527" +
+ "d2ae83a7a8ce5ba7"
+
+const ocspResponseWithoutCertHex = "308201d40a0100a08201cd308201c906092b0601050507300101048201ba3082" +
+ "01b630819fa2160414884451ff502a695e2d88f421bad90cf2cecbea7c180f3230313330" +
+ "3631383037323434335a30743072304a300906052b0e03021a0500041448b60d38238df8" +
+ "456e4ee5843ea394111802979f0414884451ff502a695e2d88f421bad90cf2cecbea7c02" +
+ "1100f78b13b946fc9635d8ab49de9d2148218000180f3230313330363138303732343433" +
+ "5aa011180f32303133303632323037323434335a300d06092a864886f70d010105050003" +
+ "82010100103e18b3d297a5e7a6c07a4fc52ac46a15c0eba96f3be17f0ffe84de5b8c8e05" +
+ "5a8f577586a849dc4abd6440eb6fedde4622451e2823c1cbf3558b4e8184959c9fe96eff" +
+ "8bc5f95866c58c6d087519faabfdae37e11d9874f1bc0db292208f645dd848185e4dd38b" +
+ "6a8547dfa7b74d514a8470015719064d35476b95bebb03d4d2845c5ca15202d2784878f2" +
+ "0f904c24f09736f044609e9c271381713400e563023d212db422236440c6f377bbf24b2b" +
+ "9e7dec8698e36a8df68b7592ad3489fb2937afb90eb85d2aa96b81c94c25057dbd4759d9" +
+ "20a1a65c7f0b6427a224b3c98edd96b9b61f706099951188b0289555ad30a216fb774651" +
+ "5a35fca2e054dfa8"
+
+// PKIX nonce extension
+var ocspExtensionOID = asn1.ObjectIdentifier{1, 3, 6, 1, 5, 5, 7, 48, 1, 2}
+var ocspExtensionValueHex = "0403000000"
+
+const ocspResponseWithCriticalExtensionHex = "308204fe0a0100a08204f7308204f306092b0601050507300101048204e4308204e03081" +
+ "dba003020100a11b3019311730150603550403130e4f43535020526573706f6e64657218" +
+ "0f32303136303130343137303130305a3081a53081a23049300906052b0e03021a050004" +
+ "14c0fe0278fc99188891b3f212e9c7e1b21ab7bfc004140dfc1df0a9e0f01ce7f2b21317" +
+ "7e6f8d157cd4f60210017f77deb3bcbb235d44ccc7dba62e72a116180f32303130303730" +
+ "373135303130355aa0030a0101180f32303130303730373135303130355aa011180f3230" +
+ "3130303730373138333531375aa1193017301506092b06010505073001020101ff040504" +
+ "03000000300d06092a864886f70d01010b0500038201010031c730ca60a7a0d92d8e4010" +
+ "911b469de95b4d27e89de6537552436237967694f76f701cf6b45c932bd308bca4a8d092" +
+ "5c604ba94796903091d9e6c000178e72c1f0a24a277dd262835af5d17d3f9d7869606c9f" +
+ "e7c8e708a41645699895beee38bfa63bb46296683761c5d1d65439b8ab868dc3017c9eeb" +
+ "b70b82dbf3a31c55b457d48bb9e82b335ed49f445042eaf606b06a3e0639824924c89c63" +
+ "eccddfe85e6694314138b2536f5e15e07085d0f6e26d4b2f8244bab0d70de07283ac6384" +
+ "a0501fc3dea7cf0adfd4c7f34871080900e252ddc403e3f0265f2a704af905d3727504ed" +
+ "28f3214a219d898a022463c78439799ca81c8cbafdbcec34ea937cd6a08202ea308202e6" +
+ "308202e2308201caa003020102020101300d06092a864886f70d01010b05003019311730" +
+ "150603550403130e4f43535020526573706f6e646572301e170d31353031333031353530" +
+ "33335a170d3136303133303135353033335a3019311730150603550403130e4f43535020" +
+ "526573706f6e64657230820122300d06092a864886f70d01010105000382010f00308201" +
+ "0a0282010100e8155f2d3e6f2e8d14c62a788bd462f9f844e7a6977c83ef1099f0f6616e" +
+ "c5265b56f356e62c5400f0b06a2e7945a82752c636df32a895152d6074df1701dc6ccfbc" +
+ "bec75a70bd2b55ae2be7e6cad3b5fd4cd5b7790ab401a436d3f5f346074ffde8a99d5b72" +
+ "3350f0a112076614b12ef79c78991b119453445acf2416ab0046b540db14c9fc0f27b898" +
+ "9ad0f63aa4b8aefc91aa8a72160c36307c60fec78a93d3fddf4259902aa77e7332971c7d" +
+ "285b6a04f648993c6922a3e9da9adf5f81508c3228791843e5d49f24db2f1290bafd97e6" +
+ "55b1049a199f652cd603c4fafa330c390b0da78fbbc67e8fa021cbd74eb96222b12ace31" +
+ "a77dcf920334dc94581b0203010001a3353033300e0603551d0f0101ff04040302078030" +
+ "130603551d25040c300a06082b06010505070309300c0603551d130101ff04023000300d" +
+ "06092a864886f70d01010b05000382010100718012761b5063e18f0dc44644d8e6ab8612" +
+ "31c15fd5357805425d82aec1de85bf6d3e30fce205e3e3b8b795bbe52e40a439286d2288" +
+ "9064f4aeeb150359b9425f1da51b3a5c939018555d13ac42c565a0603786a919328f3267" +
+ "09dce52c22ad958ecb7873b9771d1148b1c4be2efe80ba868919fc9f68b6090c2f33c156" +
+ "d67156e42766a50b5d51e79637b7e58af74c2a951b1e642fa7741fec982cc937de37eff5" +
+ "9e2005d5939bfc031589ca143e6e8ab83f40ee08cc20a6b4a95a318352c28d18528dcaf9" +
+ "66705de17afa19d6e8ae91ddf33179d16ebb6ac2c69cae8373d408ebf8c55308be6c04d9" +
+ "3a25439a94299a65a709756c7a3e568be049d5c38839"
+
+const ocspResponseWithExtensionHex = "308204fb0a0100a08204f4308204f006092b0601050507300101048204e1308204dd3081" +
+ "d8a003020100a11b3019311730150603550403130e4f43535020526573706f6e64657218" +
+ "0f32303136303130343136353930305a3081a230819f3049300906052b0e03021a050004" +
+ "14c0fe0278fc99188891b3f212e9c7e1b21ab7bfc004140dfc1df0a9e0f01ce7f2b21317" +
+ "7e6f8d157cd4f60210017f77deb3bcbb235d44ccc7dba62e72a116180f32303130303730" +
+ "373135303130355aa0030a0101180f32303130303730373135303130355aa011180f3230" +
+ "3130303730373138333531375aa1163014301206092b0601050507300102040504030000" +
+ "00300d06092a864886f70d01010b05000382010100c09a33e0b2324c852421bb83f85ac9" +
+ "9113f5426012bd2d2279a8166e9241d18a33c870894250622ffc7ed0c4601b16d624f90b" +
+ "779265442cdb6868cf40ab304ab4b66e7315ed02cf663b1601d1d4751772b31bc299db23" +
+ "9aebac78ed6797c06ed815a7a8d18d63cfbb609cafb47ec2e89e37db255216eb09307848" +
+ "d01be0a3e943653c78212b96ff524b74c9ec456b17cdfb950cc97645c577b2e09ff41dde" +
+ "b03afb3adaa381cc0f7c1d95663ef22a0f72f2c45613ae8e2b2d1efc96e8463c7d1d8a1d" +
+ "7e3b35df8fe73a301fc3f804b942b2b3afa337ff105fc1462b7b1c1d75eb4566c8665e59" +
+ "f80393b0adbf8004ff6c3327ed34f007cb4a3348a7d55e06e3a08202ea308202e6308202" +
+ "e2308201caa003020102020101300d06092a864886f70d01010b05003019311730150603" +
+ "550403130e4f43535020526573706f6e646572301e170d3135303133303135353033335a" +
+ "170d3136303133303135353033335a3019311730150603550403130e4f43535020526573" +
+ "706f6e64657230820122300d06092a864886f70d01010105000382010f003082010a0282" +
+ "010100e8155f2d3e6f2e8d14c62a788bd462f9f844e7a6977c83ef1099f0f6616ec5265b" +
+ "56f356e62c5400f0b06a2e7945a82752c636df32a895152d6074df1701dc6ccfbcbec75a" +
+ "70bd2b55ae2be7e6cad3b5fd4cd5b7790ab401a436d3f5f346074ffde8a99d5b723350f0" +
+ "a112076614b12ef79c78991b119453445acf2416ab0046b540db14c9fc0f27b8989ad0f6" +
+ "3aa4b8aefc91aa8a72160c36307c60fec78a93d3fddf4259902aa77e7332971c7d285b6a" +
+ "04f648993c6922a3e9da9adf5f81508c3228791843e5d49f24db2f1290bafd97e655b104" +
+ "9a199f652cd603c4fafa330c390b0da78fbbc67e8fa021cbd74eb96222b12ace31a77dcf" +
+ "920334dc94581b0203010001a3353033300e0603551d0f0101ff04040302078030130603" +
+ "551d25040c300a06082b06010505070309300c0603551d130101ff04023000300d06092a" +
+ "864886f70d01010b05000382010100718012761b5063e18f0dc44644d8e6ab861231c15f" +
+ "d5357805425d82aec1de85bf6d3e30fce205e3e3b8b795bbe52e40a439286d22889064f4" +
+ "aeeb150359b9425f1da51b3a5c939018555d13ac42c565a0603786a919328f326709dce5" +
+ "2c22ad958ecb7873b9771d1148b1c4be2efe80ba868919fc9f68b6090c2f33c156d67156" +
+ "e42766a50b5d51e79637b7e58af74c2a951b1e642fa7741fec982cc937de37eff59e2005" +
+ "d5939bfc031589ca143e6e8ab83f40ee08cc20a6b4a95a318352c28d18528dcaf966705d" +
+ "e17afa19d6e8ae91ddf33179d16ebb6ac2c69cae8373d408ebf8c55308be6c04d93a2543" +
+ "9a94299a65a709756c7a3e568be049d5c38839"
+
+const ocspRequestHex = "3051304f304d304b3049300906052b0e03021a05000414c0fe0278fc99188891b3f212e9" +
+ "c7e1b21ab7bfc004140dfc1df0a9e0f01ce7f2b213177e6f8d157cd4f60210017f77deb3" +
+ "bcbb235d44ccc7dba62e72"
+
+const leafCertHex = "308203c830820331a0030201020210017f77deb3bcbb235d44ccc7dba62e72300d06092a" +
+ "864886f70d01010505003081ba311f301d060355040a1316566572695369676e20547275" +
+ "7374204e6574776f726b31173015060355040b130e566572695369676e2c20496e632e31" +
+ "333031060355040b132a566572695369676e20496e7465726e6174696f6e616c20536572" +
+ "766572204341202d20436c617373203331493047060355040b13407777772e7665726973" +
+ "69676e2e636f6d2f43505320496e636f72702e6279205265662e204c494142494c495459" +
+ "204c54442e286329393720566572695369676e301e170d3132303632313030303030305a" +
+ "170d3133313233313233353935395a3068310b3009060355040613025553311330110603" +
+ "550408130a43616c69666f726e6961311230100603550407130950616c6f20416c746f31" +
+ "173015060355040a130e46616365626f6f6b2c20496e632e311730150603550403140e2a" +
+ "2e66616365626f6f6b2e636f6d30819f300d06092a864886f70d010101050003818d0030" +
+ "818902818100ae94b171e2deccc1693e051063240102e0689ae83c39b6b3e74b97d48d7b" +
+ "23689100b0b496ee62f0e6d356bcf4aa0f50643402f5d1766aa972835a7564723f39bbef" +
+ "5290ded9bcdbf9d3d55dfad23aa03dc604c54d29cf1d4b3bdbd1a809cfae47b44c7eae17" +
+ "c5109bee24a9cf4a8d911bb0fd0415ae4c3f430aa12a557e2ae10203010001a382011e30" +
+ "82011a30090603551d130402300030440603551d20043d303b3039060b6086480186f845" +
+ "01071703302a302806082b06010505070201161c68747470733a2f2f7777772e76657269" +
+ "7369676e2e636f6d2f727061303c0603551d1f043530333031a02fa02d862b687474703a" +
+ "2f2f535652496e746c2d63726c2e766572697369676e2e636f6d2f535652496e746c2e63" +
+ "726c301d0603551d250416301406082b0601050507030106082b06010505070302300b06" +
+ "03551d0f0404030205a0303406082b0601050507010104283026302406082b0601050507" +
+ "30018618687474703a2f2f6f6373702e766572697369676e2e636f6d30270603551d1104" +
+ "20301e820e2a2e66616365626f6f6b2e636f6d820c66616365626f6f6b2e636f6d300d06" +
+ "092a864886f70d0101050500038181005b6c2b75f8ed30aa51aad36aba595e555141951f" +
+ "81a53b447910ac1f76ff78fc2781616b58f3122afc1c87010425e9ed43df1a7ba6498060" +
+ "67e2688af03db58c7df4ee03309a6afc247ccb134dc33e54c6bc1d5133a532a73273b1d7" +
+ "9cadc08e7e1a83116d34523340b0305427a21742827c98916698ee7eaf8c3bdd71700817"
+
+const issuerCertHex = "30820383308202eca003020102021046fcebbab4d02f0f926098233f93078f300d06092a" +
+ "864886f70d0101050500305f310b300906035504061302555331173015060355040a130e" +
+ "566572695369676e2c20496e632e31373035060355040b132e436c617373203320507562" +
+ "6c6963205072696d6172792043657274696669636174696f6e20417574686f7269747930" +
+ "1e170d3937303431373030303030305a170d3136313032343233353935395a3081ba311f" +
+ "301d060355040a1316566572695369676e205472757374204e6574776f726b3117301506" +
+ "0355040b130e566572695369676e2c20496e632e31333031060355040b132a5665726953" +
+ "69676e20496e7465726e6174696f6e616c20536572766572204341202d20436c61737320" +
+ "3331493047060355040b13407777772e766572697369676e2e636f6d2f43505320496e63" +
+ "6f72702e6279205265662e204c494142494c495459204c54442e28632939372056657269" +
+ "5369676e30819f300d06092a864886f70d010101050003818d0030818902818100d88280" +
+ "e8d619027d1f85183925a2652be1bfd405d3bce6363baaf04c6c5bb6e7aa3c734555b2f1" +
+ "bdea9742ed9a340a15d4a95cf54025ddd907c132b2756cc4cabba3fe56277143aa63f530" +
+ "3e9328e5faf1093bf3b74d4e39f75c495ab8c11dd3b28afe70309542cbfe2b518b5a3c3a" +
+ "f9224f90b202a7539c4f34e7ab04b27b6f0203010001a381e33081e0300f0603551d1304" +
+ "0830060101ff02010030440603551d20043d303b3039060b6086480186f8450107010130" +
+ "2a302806082b06010505070201161c68747470733a2f2f7777772e766572697369676e2e" +
+ "636f6d2f43505330340603551d25042d302b06082b0601050507030106082b0601050507" +
+ "030206096086480186f8420401060a6086480186f845010801300b0603551d0f04040302" +
+ "0106301106096086480186f842010104040302010630310603551d1f042a30283026a024" +
+ "a0228620687474703a2f2f63726c2e766572697369676e2e636f6d2f706361332e63726c" +
+ "300d06092a864886f70d010105050003818100408e4997968a73dd8e4def3e61b7caa062" +
+ "adf40e0abb753de26ed82cc7bff4b98c369bcaa2d09c724639f6a682036511c4bcbf2da6" +
+ "f5d93b0ab598fab378b91ef22b4c62d5fdb27a1ddf33fd73f9a5d82d8c2aead1fcb028b6" +
+ "e94948134b838a1b487b24f738de6f4154b8ab576b06dfc7a2d4a9f6f136628088f28b75" +
+ "d68071"
+
+// Key and certificate for the OCSP responder were not taken from the Thawte
+// responder, since CreateResponse requires that we have the private key.
+// Instead, they were generated randomly.
+const responderPrivateKeyHex = "308204a40201000282010100e8155f2d3e6f2e8d14c62a788bd462f9f844e7a6977c83ef" +
+ "1099f0f6616ec5265b56f356e62c5400f0b06a2e7945a82752c636df32a895152d6074df" +
+ "1701dc6ccfbcbec75a70bd2b55ae2be7e6cad3b5fd4cd5b7790ab401a436d3f5f346074f" +
+ "fde8a99d5b723350f0a112076614b12ef79c78991b119453445acf2416ab0046b540db14" +
+ "c9fc0f27b8989ad0f63aa4b8aefc91aa8a72160c36307c60fec78a93d3fddf4259902aa7" +
+ "7e7332971c7d285b6a04f648993c6922a3e9da9adf5f81508c3228791843e5d49f24db2f" +
+ "1290bafd97e655b1049a199f652cd603c4fafa330c390b0da78fbbc67e8fa021cbd74eb9" +
+ "6222b12ace31a77dcf920334dc94581b02030100010282010100bcf0b93d7238bda329a8" +
+ "72e7149f61bcb37c154330ccb3f42a85c9002c2e2bdea039d77d8581cd19bed94078794e" +
+ "56293d601547fc4bf6a2f9002fe5772b92b21b254403b403585e3130cc99ccf08f0ef81a" +
+ "575b38f597ba4660448b54f44bfbb97072b5a2bf043bfeca828cf7741d13698e3f38162b" +
+ "679faa646b82abd9a72c5c7d722c5fc577a76d2c2daac588accad18516d1bbad10b0dfa2" +
+ "05cfe246b59e28608a43942e1b71b0c80498075121de5b900d727c31c42c78cf1db5c0aa" +
+ "5b491e10ea4ed5c0962aaf2ae025dd81fa4ce490d9d6b4a4465411d8e542fc88617e5695" +
+ "1aa4fc8ea166f2b4d0eb89ef17f2b206bd5f1014bf8fe0e71fe62f2cccf102818100f2dc" +
+ "ddf878d553286daad68bac4070a82ffec3dc4666a2750f47879eec913f91836f1d976b60" +
+ "daf9356e078446dafab5bd2e489e5d64f8572ba24a4ba4f3729b5e106c4dd831cc2497a7" +
+ "e6c7507df05cb64aeb1bbc81c1e340d58b5964cf39cff84ea30c29ec5d3f005ee1362698" +
+ "07395037955955655292c3e85f6187fa1f9502818100f4a33c102630840705f8c778a47b" +
+ "87e8da31e68809af981ac5e5999cf1551685d761cdf0d6520361b99aebd5777a940fa64d" +
+ "327c09fa63746fbb3247ec73a86edf115f1fe5c83598db803881ade71c33c6e956118345" +
+ "497b98b5e07bb5be75971465ec78f2f9467e1b74956ca9d4c7c3e314e742a72d8b33889c" +
+ "6c093a466cef0281801d3df0d02124766dd0be98349b19eb36a508c4e679e793ba0a8bef" +
+ "4d786888c1e9947078b1ea28938716677b4ad8c5052af12eb73ac194915264a913709a0b" +
+ "7b9f98d4a18edd781a13d49899f91c20dbd8eb2e61d991ba19b5cdc08893f5cb9d39e5a6" +
+ "0629ea16d426244673b1b3ee72bd30e41fac8395acac40077403de5efd028180050731dd" +
+ "d71b1a2b96c8d538ba90bb6b62c8b1c74c03aae9a9f59d21a7a82b0d572ef06fa9c807bf" +
+ "c373d6b30d809c7871df96510c577421d9860c7383fda0919ece19996b3ca13562159193" +
+ "c0c246471e287f975e8e57034e5136aaf44254e2650def3d51292474c515b1588969112e" +
+ "0a85cc77073e9d64d2c2fc497844284b02818100d71d63eabf416cf677401ebf965f8314" +
+ "120b568a57dd3bd9116c629c40dc0c6948bab3a13cc544c31c7da40e76132ef5dd3f7534" +
+ "45a635930c74326ae3df0edd1bfb1523e3aa259873ac7cf1ac31151ec8f37b528c275622" +
+ "48f99b8bed59fd4da2576aa6ee20d93a684900bf907e80c66d6e2261ae15e55284b4ed9d" +
+ "6bdaa059"
+
+const responderCertHex = "308202e2308201caa003020102020101300d06092a864886f70d01010b05003019311730" +
+ "150603550403130e4f43535020526573706f6e646572301e170d31353031333031353530" +
+ "33335a170d3136303133303135353033335a3019311730150603550403130e4f43535020" +
+ "526573706f6e64657230820122300d06092a864886f70d01010105000382010f00308201" +
+ "0a0282010100e8155f2d3e6f2e8d14c62a788bd462f9f844e7a6977c83ef1099f0f6616e" +
+ "c5265b56f356e62c5400f0b06a2e7945a82752c636df32a895152d6074df1701dc6ccfbc" +
+ "bec75a70bd2b55ae2be7e6cad3b5fd4cd5b7790ab401a436d3f5f346074ffde8a99d5b72" +
+ "3350f0a112076614b12ef79c78991b119453445acf2416ab0046b540db14c9fc0f27b898" +
+ "9ad0f63aa4b8aefc91aa8a72160c36307c60fec78a93d3fddf4259902aa77e7332971c7d" +
+ "285b6a04f648993c6922a3e9da9adf5f81508c3228791843e5d49f24db2f1290bafd97e6" +
+ "55b1049a199f652cd603c4fafa330c390b0da78fbbc67e8fa021cbd74eb96222b12ace31" +
+ "a77dcf920334dc94581b0203010001a3353033300e0603551d0f0101ff04040302078030" +
+ "130603551d25040c300a06082b06010505070309300c0603551d130101ff04023000300d" +
+ "06092a864886f70d01010b05000382010100718012761b5063e18f0dc44644d8e6ab8612" +
+ "31c15fd5357805425d82aec1de85bf6d3e30fce205e3e3b8b795bbe52e40a439286d2288" +
+ "9064f4aeeb150359b9425f1da51b3a5c939018555d13ac42c565a0603786a919328f3267" +
+ "09dce52c22ad958ecb7873b9771d1148b1c4be2efe80ba868919fc9f68b6090c2f33c156" +
+ "d67156e42766a50b5d51e79637b7e58af74c2a951b1e642fa7741fec982cc937de37eff5" +
+ "9e2005d5939bfc031589ca143e6e8ab83f40ee08cc20a6b4a95a318352c28d18528dcaf9" +
+ "66705de17afa19d6e8ae91ddf33179d16ebb6ac2c69cae8373d408ebf8c55308be6c04d9" +
+ "3a25439a94299a65a709756c7a3e568be049d5c38839"
+
+const errorResponseHex = "30030a0101"
diff --git a/src/mongo/gotools/vendor/src/golang.org/x/crypto/openpgp/armor/armor.go b/src/mongo/gotools/vendor/src/golang.org/x/crypto/openpgp/armor/armor.go
new file mode 100644
index 00000000000..592d1864361
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/golang.org/x/crypto/openpgp/armor/armor.go
@@ -0,0 +1,219 @@
+// Copyright 2010 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// Package armor implements OpenPGP ASCII Armor, see RFC 4880. OpenPGP Armor is
+// very similar to PEM except that it has an additional CRC checksum.
+package armor // import "golang.org/x/crypto/openpgp/armor"
+
+import (
+ "bufio"
+ "bytes"
+ "encoding/base64"
+ "golang.org/x/crypto/openpgp/errors"
+ "io"
+)
+
+// A Block represents an OpenPGP armored structure.
+//
+// The encoded form is:
+// -----BEGIN Type-----
+// Headers
+//
+// base64-encoded Bytes
+// '=' base64 encoded checksum
+// -----END Type-----
+// where Headers is a possibly empty sequence of Key: Value lines.
+//
+// Since the armored data can be very large, this package presents a streaming
+// interface.
+type Block struct {
+ Type string // The type, taken from the preamble (i.e. "PGP SIGNATURE").
+ Header map[string]string // Optional headers.
+ Body io.Reader // A Reader from which the contents can be read
+ lReader lineReader
+ oReader openpgpReader
+}
+
+var ArmorCorrupt error = errors.StructuralError("armor invalid")
+
+const crc24Init = 0xb704ce
+const crc24Poly = 0x1864cfb
+const crc24Mask = 0xffffff
+
+// crc24 calculates the OpenPGP checksum as specified in RFC 4880, section 6.1
+func crc24(crc uint32, d []byte) uint32 {
+ for _, b := range d {
+ crc ^= uint32(b) << 16
+ for i := 0; i < 8; i++ {
+ crc <<= 1
+ if crc&0x1000000 != 0 {
+ crc ^= crc24Poly
+ }
+ }
+ }
+ return crc
+}
+
+var armorStart = []byte("-----BEGIN ")
+var armorEnd = []byte("-----END ")
+var armorEndOfLine = []byte("-----")
+
+// lineReader wraps a line based reader. It watches for the end of an armor
+// block and records the expected CRC value.
+type lineReader struct {
+ in *bufio.Reader
+ buf []byte
+ eof bool
+ crc uint32
+}
+
+func (l *lineReader) Read(p []byte) (n int, err error) {
+ if l.eof {
+ return 0, io.EOF
+ }
+
+ if len(l.buf) > 0 {
+ n = copy(p, l.buf)
+ l.buf = l.buf[n:]
+ return
+ }
+
+ line, isPrefix, err := l.in.ReadLine()
+ if err != nil {
+ return
+ }
+ if isPrefix {
+ return 0, ArmorCorrupt
+ }
+
+ if len(line) == 5 && line[0] == '=' {
+ // This is the checksum line
+ var expectedBytes [3]byte
+ var m int
+ m, err = base64.StdEncoding.Decode(expectedBytes[0:], line[1:])
+ if m != 3 || err != nil {
+ return
+ }
+ l.crc = uint32(expectedBytes[0])<<16 |
+ uint32(expectedBytes[1])<<8 |
+ uint32(expectedBytes[2])
+
+ line, _, err = l.in.ReadLine()
+ if err != nil && err != io.EOF {
+ return
+ }
+ if !bytes.HasPrefix(line, armorEnd) {
+ return 0, ArmorCorrupt
+ }
+
+ l.eof = true
+ return 0, io.EOF
+ }
+
+ if len(line) > 96 {
+ return 0, ArmorCorrupt
+ }
+
+ n = copy(p, line)
+ bytesToSave := len(line) - n
+ if bytesToSave > 0 {
+ if cap(l.buf) < bytesToSave {
+ l.buf = make([]byte, 0, bytesToSave)
+ }
+ l.buf = l.buf[0:bytesToSave]
+ copy(l.buf, line[n:])
+ }
+
+ return
+}
+
+// openpgpReader passes Read calls to the underlying base64 decoder, but keeps
+// a running CRC of the resulting data and checks the CRC against the value
+// found by the lineReader at EOF.
+type openpgpReader struct {
+ lReader *lineReader
+ b64Reader io.Reader
+ currentCRC uint32
+}
+
+func (r *openpgpReader) Read(p []byte) (n int, err error) {
+ n, err = r.b64Reader.Read(p)
+ r.currentCRC = crc24(r.currentCRC, p[:n])
+
+ if err == io.EOF {
+ if r.lReader.crc != uint32(r.currentCRC&crc24Mask) {
+ return 0, ArmorCorrupt
+ }
+ }
+
+ return
+}
+
+// Decode reads a PGP armored block from the given Reader. It will ignore
+// leading garbage. If it doesn't find a block, it will return nil, io.EOF. The
+// given Reader is not usable after calling this function: an arbitrary amount
+// of data may have been read past the end of the block.
+func Decode(in io.Reader) (p *Block, err error) {
+ r := bufio.NewReaderSize(in, 100)
+ var line []byte
+ ignoreNext := false
+
+TryNextBlock:
+ p = nil
+
+ // Skip leading garbage
+ for {
+ ignoreThis := ignoreNext
+ line, ignoreNext, err = r.ReadLine()
+ if err != nil {
+ return
+ }
+ if ignoreNext || ignoreThis {
+ continue
+ }
+ line = bytes.TrimSpace(line)
+ if len(line) > len(armorStart)+len(armorEndOfLine) && bytes.HasPrefix(line, armorStart) {
+ break
+ }
+ }
+
+ p = new(Block)
+ p.Type = string(line[len(armorStart) : len(line)-len(armorEndOfLine)])
+ p.Header = make(map[string]string)
+ nextIsContinuation := false
+ var lastKey string
+
+ // Read headers
+ for {
+ isContinuation := nextIsContinuation
+ line, nextIsContinuation, err = r.ReadLine()
+ if err != nil {
+ p = nil
+ return
+ }
+ if isContinuation {
+ p.Header[lastKey] += string(line)
+ continue
+ }
+ line = bytes.TrimSpace(line)
+ if len(line) == 0 {
+ break
+ }
+
+ i := bytes.Index(line, []byte(": "))
+ if i == -1 {
+ goto TryNextBlock
+ }
+ lastKey = string(line[:i])
+ p.Header[lastKey] = string(line[i+2:])
+ }
+
+ p.lReader.in = r
+ p.oReader.currentCRC = crc24Init
+ p.oReader.lReader = &p.lReader
+ p.oReader.b64Reader = base64.NewDecoder(base64.StdEncoding, &p.lReader)
+ p.Body = &p.oReader
+
+ return
+}
diff --git a/src/mongo/gotools/vendor/src/golang.org/x/crypto/openpgp/armor/armor_test.go b/src/mongo/gotools/vendor/src/golang.org/x/crypto/openpgp/armor/armor_test.go
new file mode 100644
index 00000000000..9334e94e96c
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/golang.org/x/crypto/openpgp/armor/armor_test.go
@@ -0,0 +1,95 @@
+// Copyright 2010 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package armor
+
+import (
+ "bytes"
+ "hash/adler32"
+ "io/ioutil"
+ "testing"
+)
+
+func TestDecodeEncode(t *testing.T) {
+ buf := bytes.NewBuffer([]byte(armorExample1))
+ result, err := Decode(buf)
+ if err != nil {
+ t.Error(err)
+ }
+ expectedType := "PGP SIGNATURE"
+ if result.Type != expectedType {
+ t.Errorf("result.Type: got:%s want:%s", result.Type, expectedType)
+ }
+ if len(result.Header) != 1 {
+ t.Errorf("len(result.Header): got:%d want:1", len(result.Header))
+ }
+ v, ok := result.Header["Version"]
+ if !ok || v != "GnuPG v1.4.10 (GNU/Linux)" {
+ t.Errorf("result.Header: got:%#v", result.Header)
+ }
+
+ contents, err := ioutil.ReadAll(result.Body)
+ if err != nil {
+ t.Error(err)
+ }
+
+ if adler32.Checksum(contents) != 0x27b144be {
+ t.Errorf("contents: got: %x", contents)
+ }
+
+ buf = bytes.NewBuffer(nil)
+ w, err := Encode(buf, result.Type, result.Header)
+ if err != nil {
+ t.Error(err)
+ }
+ _, err = w.Write(contents)
+ if err != nil {
+ t.Error(err)
+ }
+ w.Close()
+
+ if !bytes.Equal(buf.Bytes(), []byte(armorExample1)) {
+ t.Errorf("got: %s\nwant: %s", string(buf.Bytes()), armorExample1)
+ }
+}
+
+func TestLongHeader(t *testing.T) {
+ buf := bytes.NewBuffer([]byte(armorLongLine))
+ result, err := Decode(buf)
+ if err != nil {
+ t.Error(err)
+ return
+ }
+ value, ok := result.Header["Version"]
+ if !ok {
+ t.Errorf("missing Version header")
+ }
+ if value != longValueExpected {
+ t.Errorf("got: %s want: %s", value, longValueExpected)
+ }
+}
+
+const armorExample1 = `-----BEGIN PGP SIGNATURE-----
+Version: GnuPG v1.4.10 (GNU/Linux)
+
+iJwEAAECAAYFAk1Fv/0ACgkQo01+GMIMMbsYTwQAiAw+QAaNfY6WBdplZ/uMAccm
+4g+81QPmTSGHnetSb6WBiY13kVzK4HQiZH8JSkmmroMLuGeJwsRTEL4wbjRyUKEt
+p1xwUZDECs234F1xiG5enc5SGlRtP7foLBz9lOsjx+LEcA4sTl5/2eZR9zyFZqWW
+TxRjs+fJCIFuo71xb1g=
+=/teI
+-----END PGP SIGNATURE-----`
+
+const armorLongLine = `-----BEGIN PGP SIGNATURE-----
+Version: 0123456789abcdefghijklmnopqrstuvwxyz0123456789abcdefghijklmnopqrstuvwxyz0123456789abcdefghijklmnopqrstuvwxyz0123456789abcdefghijklmnopqrstuvwxyz0123456789abcdefghijklmnopqrstuvwxyz0123456789abcdefghijklmnopqrstuvwxyz0123456789abcdefghijklmnopqrstuvwxyz0123456789abcdefghijklmnopqrstuvwxyz0123456789abcdefghijklmnopqrstuvwxyz
+
+iQEcBAABAgAGBQJMtFESAAoJEKsQXJGvOPsVj40H/1WW6jaMXv4BW+1ueDSMDwM8
+kx1fLOXbVM5/Kn5LStZNt1jWWnpxdz7eq3uiqeCQjmqUoRde3YbB2EMnnwRbAhpp
+cacnAvy9ZQ78OTxUdNW1mhX5bS6q1MTEJnl+DcyigD70HG/yNNQD7sOPMdYQw0TA
+byQBwmLwmTsuZsrYqB68QyLHI+DUugn+kX6Hd2WDB62DKa2suoIUIHQQCd/ofwB3
+WfCYInXQKKOSxu2YOg2Eb4kLNhSMc1i9uKUWAH+sdgJh7NBgdoE4MaNtBFkHXRvv
+okWuf3+xA9ksp1npSY/mDvgHijmjvtpRDe6iUeqfCn8N9u9CBg8geANgaG8+QA4=
+=wfQG
+-----END PGP SIGNATURE-----`
+
+const longValueExpected = "0123456789abcdefghijklmnopqrstuvwxyz0123456789abcdefghijklmnopqrstuvwxyz0123456789abcdefghijklmnopqrstuvwxyz0123456789abcdefghijklmnopqrstuvwxyz0123456789abcdefghijklmnopqrstuvwxyz0123456789abcdefghijklmnopqrstuvwxyz0123456789abcdefghijklmnopqrstuvwxyz0123456789abcdefghijklmnopqrstuvwxyz0123456789abcdefghijklmnopqrstuvwxyz"
diff --git a/src/mongo/gotools/vendor/src/golang.org/x/crypto/openpgp/armor/encode.go b/src/mongo/gotools/vendor/src/golang.org/x/crypto/openpgp/armor/encode.go
new file mode 100644
index 00000000000..6f07582c37c
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/golang.org/x/crypto/openpgp/armor/encode.go
@@ -0,0 +1,160 @@
+// Copyright 2010 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package armor
+
+import (
+ "encoding/base64"
+ "io"
+)
+
+var armorHeaderSep = []byte(": ")
+var blockEnd = []byte("\n=")
+var newline = []byte("\n")
+var armorEndOfLineOut = []byte("-----\n")
+
+// writeSlices writes its arguments to the given Writer.
+func writeSlices(out io.Writer, slices ...[]byte) (err error) {
+ for _, s := range slices {
+ _, err = out.Write(s)
+ if err != nil {
+ return err
+ }
+ }
+ return
+}
+
+// lineBreaker breaks data across several lines, all of the same byte length
+// (except possibly the last). Lines are broken with a single '\n'.
+type lineBreaker struct {
+ lineLength int
+ line []byte
+ used int
+ out io.Writer
+ haveWritten bool
+}
+
+func newLineBreaker(out io.Writer, lineLength int) *lineBreaker {
+ return &lineBreaker{
+ lineLength: lineLength,
+ line: make([]byte, lineLength),
+ used: 0,
+ out: out,
+ }
+}
+
+func (l *lineBreaker) Write(b []byte) (n int, err error) {
+ n = len(b)
+
+ if n == 0 {
+ return
+ }
+
+ if l.used == 0 && l.haveWritten {
+ _, err = l.out.Write([]byte{'\n'})
+ if err != nil {
+ return
+ }
+ }
+
+ if l.used+len(b) < l.lineLength {
+ l.used += copy(l.line[l.used:], b)
+ return
+ }
+
+ l.haveWritten = true
+ _, err = l.out.Write(l.line[0:l.used])
+ if err != nil {
+ return
+ }
+ excess := l.lineLength - l.used
+ l.used = 0
+
+ _, err = l.out.Write(b[0:excess])
+ if err != nil {
+ return
+ }
+
+ _, err = l.Write(b[excess:])
+ return
+}
+
+func (l *lineBreaker) Close() (err error) {
+ if l.used > 0 {
+ _, err = l.out.Write(l.line[0:l.used])
+ if err != nil {
+ return
+ }
+ }
+
+ return
+}
+
+// encoding keeps track of a running CRC24 over the data which has been written
+// to it and outputs a OpenPGP checksum when closed, followed by an armor
+// trailer.
+//
+// It's built into a stack of io.Writers:
+// encoding -> base64 encoder -> lineBreaker -> out
+type encoding struct {
+ out io.Writer
+ breaker *lineBreaker
+ b64 io.WriteCloser
+ crc uint32
+ blockType []byte
+}
+
+func (e *encoding) Write(data []byte) (n int, err error) {
+ e.crc = crc24(e.crc, data)
+ return e.b64.Write(data)
+}
+
+func (e *encoding) Close() (err error) {
+ err = e.b64.Close()
+ if err != nil {
+ return
+ }
+ e.breaker.Close()
+
+ var checksumBytes [3]byte
+ checksumBytes[0] = byte(e.crc >> 16)
+ checksumBytes[1] = byte(e.crc >> 8)
+ checksumBytes[2] = byte(e.crc)
+
+ var b64ChecksumBytes [4]byte
+ base64.StdEncoding.Encode(b64ChecksumBytes[:], checksumBytes[:])
+
+ return writeSlices(e.out, blockEnd, b64ChecksumBytes[:], newline, armorEnd, e.blockType, armorEndOfLine)
+}
+
+// Encode returns a WriteCloser which will encode the data written to it in
+// OpenPGP armor.
+func Encode(out io.Writer, blockType string, headers map[string]string) (w io.WriteCloser, err error) {
+ bType := []byte(blockType)
+ err = writeSlices(out, armorStart, bType, armorEndOfLineOut)
+ if err != nil {
+ return
+ }
+
+ for k, v := range headers {
+ err = writeSlices(out, []byte(k), armorHeaderSep, []byte(v), newline)
+ if err != nil {
+ return
+ }
+ }
+
+ _, err = out.Write(newline)
+ if err != nil {
+ return
+ }
+
+ e := &encoding{
+ out: out,
+ breaker: newLineBreaker(out, 64),
+ crc: crc24Init,
+ blockType: bType,
+ }
+ e.b64 = base64.NewEncoder(base64.StdEncoding, e.breaker)
+ return e, nil
+}
diff --git a/src/mongo/gotools/vendor/src/golang.org/x/crypto/openpgp/canonical_text.go b/src/mongo/gotools/vendor/src/golang.org/x/crypto/openpgp/canonical_text.go
new file mode 100644
index 00000000000..e601e389f12
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/golang.org/x/crypto/openpgp/canonical_text.go
@@ -0,0 +1,59 @@
+// Copyright 2011 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package openpgp
+
+import "hash"
+
+// NewCanonicalTextHash reformats text written to it into the canonical
+// form and then applies the hash h. See RFC 4880, section 5.2.1.
+func NewCanonicalTextHash(h hash.Hash) hash.Hash {
+ return &canonicalTextHash{h, 0}
+}
+
+type canonicalTextHash struct {
+ h hash.Hash
+ s int
+}
+
+var newline = []byte{'\r', '\n'}
+
+func (cth *canonicalTextHash) Write(buf []byte) (int, error) {
+ start := 0
+
+ for i, c := range buf {
+ switch cth.s {
+ case 0:
+ if c == '\r' {
+ cth.s = 1
+ } else if c == '\n' {
+ cth.h.Write(buf[start:i])
+ cth.h.Write(newline)
+ start = i + 1
+ }
+ case 1:
+ cth.s = 0
+ }
+ }
+
+ cth.h.Write(buf[start:])
+ return len(buf), nil
+}
+
+func (cth *canonicalTextHash) Sum(in []byte) []byte {
+ return cth.h.Sum(in)
+}
+
+func (cth *canonicalTextHash) Reset() {
+ cth.h.Reset()
+ cth.s = 0
+}
+
+func (cth *canonicalTextHash) Size() int {
+ return cth.h.Size()
+}
+
+func (cth *canonicalTextHash) BlockSize() int {
+ return cth.h.BlockSize()
+}
diff --git a/src/mongo/gotools/vendor/src/golang.org/x/crypto/openpgp/canonical_text_test.go b/src/mongo/gotools/vendor/src/golang.org/x/crypto/openpgp/canonical_text_test.go
new file mode 100644
index 00000000000..8f3ba2a8814
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/golang.org/x/crypto/openpgp/canonical_text_test.go
@@ -0,0 +1,52 @@
+// Copyright 2011 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package openpgp
+
+import (
+ "bytes"
+ "testing"
+)
+
+type recordingHash struct {
+ buf *bytes.Buffer
+}
+
+func (r recordingHash) Write(b []byte) (n int, err error) {
+ return r.buf.Write(b)
+}
+
+func (r recordingHash) Sum(in []byte) []byte {
+ return append(in, r.buf.Bytes()...)
+}
+
+func (r recordingHash) Reset() {
+ panic("shouldn't be called")
+}
+
+func (r recordingHash) Size() int {
+ panic("shouldn't be called")
+}
+
+func (r recordingHash) BlockSize() int {
+ panic("shouldn't be called")
+}
+
+func testCanonicalText(t *testing.T, input, expected string) {
+ r := recordingHash{bytes.NewBuffer(nil)}
+ c := NewCanonicalTextHash(r)
+ c.Write([]byte(input))
+ result := c.Sum(nil)
+ if expected != string(result) {
+ t.Errorf("input: %x got: %x want: %x", input, result, expected)
+ }
+}
+
+func TestCanonicalText(t *testing.T) {
+ testCanonicalText(t, "foo\n", "foo\r\n")
+ testCanonicalText(t, "foo", "foo")
+ testCanonicalText(t, "foo\r\n", "foo\r\n")
+ testCanonicalText(t, "foo\r\nbar", "foo\r\nbar")
+ testCanonicalText(t, "foo\r\nbar\n\n", "foo\r\nbar\r\n\r\n")
+}
diff --git a/src/mongo/gotools/vendor/src/golang.org/x/crypto/openpgp/clearsign/clearsign.go b/src/mongo/gotools/vendor/src/golang.org/x/crypto/openpgp/clearsign/clearsign.go
new file mode 100644
index 00000000000..6454d22c7e3
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/golang.org/x/crypto/openpgp/clearsign/clearsign.go
@@ -0,0 +1,372 @@
+// Copyright 2012 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// Package clearsign generates and processes OpenPGP, clear-signed data. See
+// RFC 4880, section 7.
+//
+// Clearsigned messages are cryptographically signed, but the contents of the
+// message are kept in plaintext so that it can be read without special tools.
+package clearsign // import "golang.org/x/crypto/openpgp/clearsign"
+
+import (
+ "bufio"
+ "bytes"
+ "crypto"
+ "hash"
+ "io"
+ "net/textproto"
+ "strconv"
+
+ "golang.org/x/crypto/openpgp/armor"
+ "golang.org/x/crypto/openpgp/errors"
+ "golang.org/x/crypto/openpgp/packet"
+)
+
+// A Block represents a clearsigned message. A signature on a Block can
+// be checked by passing Bytes into openpgp.CheckDetachedSignature.
+type Block struct {
+ Headers textproto.MIMEHeader // Optional message headers
+ Plaintext []byte // The original message text
+ Bytes []byte // The signed message
+ ArmoredSignature *armor.Block // The signature block
+}
+
+// start is the marker which denotes the beginning of a clearsigned message.
+var start = []byte("\n-----BEGIN PGP SIGNED MESSAGE-----")
+
+// dashEscape is prefixed to any lines that begin with a hyphen so that they
+// can't be confused with endText.
+var dashEscape = []byte("- ")
+
+// endText is a marker which denotes the end of the message and the start of
+// an armored signature.
+var endText = []byte("-----BEGIN PGP SIGNATURE-----")
+
+// end is a marker which denotes the end of the armored signature.
+var end = []byte("\n-----END PGP SIGNATURE-----")
+
+var crlf = []byte("\r\n")
+var lf = byte('\n')
+
+// getLine returns the first \r\n or \n delineated line from the given byte
+// array. The line does not include the \r\n or \n. The remainder of the byte
+// array (also not including the new line bytes) is also returned and this will
+// always be smaller than the original argument.
+func getLine(data []byte) (line, rest []byte) {
+ i := bytes.Index(data, []byte{'\n'})
+ var j int
+ if i < 0 {
+ i = len(data)
+ j = i
+ } else {
+ j = i + 1
+ if i > 0 && data[i-1] == '\r' {
+ i--
+ }
+ }
+ return data[0:i], data[j:]
+}
+
+// Decode finds the first clearsigned message in data and returns it, as well
+// as the suffix of data which remains after the message.
+func Decode(data []byte) (b *Block, rest []byte) {
+ // start begins with a newline. However, at the very beginning of
+ // the byte array, we'll accept the start string without it.
+ rest = data
+ if bytes.HasPrefix(data, start[1:]) {
+ rest = rest[len(start)-1:]
+ } else if i := bytes.Index(data, start); i >= 0 {
+ rest = rest[i+len(start):]
+ } else {
+ return nil, data
+ }
+
+ // Consume the start line.
+ _, rest = getLine(rest)
+
+ var line []byte
+ b = &Block{
+ Headers: make(textproto.MIMEHeader),
+ }
+
+ // Next come a series of header lines.
+ for {
+ // This loop terminates because getLine's second result is
+ // always smaller than its argument.
+ if len(rest) == 0 {
+ return nil, data
+ }
+ // An empty line marks the end of the headers.
+ if line, rest = getLine(rest); len(line) == 0 {
+ break
+ }
+
+ i := bytes.Index(line, []byte{':'})
+ if i == -1 {
+ return nil, data
+ }
+
+ key, val := line[0:i], line[i+1:]
+ key = bytes.TrimSpace(key)
+ val = bytes.TrimSpace(val)
+ b.Headers.Add(string(key), string(val))
+ }
+
+ firstLine := true
+ for {
+ start := rest
+
+ line, rest = getLine(rest)
+ if bytes.Equal(line, endText) {
+ // Back up to the start of the line because armor expects to see the
+ // header line.
+ rest = start
+ break
+ }
+
+ // The final CRLF isn't included in the hash so we don't write it until
+ // we've seen the next line.
+ if firstLine {
+ firstLine = false
+ } else {
+ b.Bytes = append(b.Bytes, crlf...)
+ }
+
+ if bytes.HasPrefix(line, dashEscape) {
+ line = line[2:]
+ }
+ line = bytes.TrimRight(line, " \t")
+ b.Bytes = append(b.Bytes, line...)
+
+ b.Plaintext = append(b.Plaintext, line...)
+ b.Plaintext = append(b.Plaintext, lf)
+ }
+
+ // We want to find the extent of the armored data (including any newlines at
+ // the end).
+ i := bytes.Index(rest, end)
+ if i == -1 {
+ return nil, data
+ }
+ i += len(end)
+ for i < len(rest) && (rest[i] == '\r' || rest[i] == '\n') {
+ i++
+ }
+ armored := rest[:i]
+ rest = rest[i:]
+
+ var err error
+ b.ArmoredSignature, err = armor.Decode(bytes.NewBuffer(armored))
+ if err != nil {
+ return nil, data
+ }
+
+ return b, rest
+}
+
+// A dashEscaper is an io.WriteCloser which processes the body of a clear-signed
+// message. The clear-signed message is written to buffered and a hash, suitable
+// for signing, is maintained in h.
+//
+// When closed, an armored signature is created and written to complete the
+// message.
+type dashEscaper struct {
+ buffered *bufio.Writer
+ h hash.Hash
+ hashType crypto.Hash
+
+ atBeginningOfLine bool
+ isFirstLine bool
+
+ whitespace []byte
+ byteBuf []byte // a one byte buffer to save allocations
+
+ privateKey *packet.PrivateKey
+ config *packet.Config
+}
+
+func (d *dashEscaper) Write(data []byte) (n int, err error) {
+ for _, b := range data {
+ d.byteBuf[0] = b
+
+ if d.atBeginningOfLine {
+ // The final CRLF isn't included in the hash so we have to wait
+ // until this point (the start of the next line) before writing it.
+ if !d.isFirstLine {
+ d.h.Write(crlf)
+ }
+ d.isFirstLine = false
+ }
+
+ // Any whitespace at the end of the line has to be removed so we
+ // buffer it until we find out whether there's more on this line.
+ if b == ' ' || b == '\t' || b == '\r' {
+ d.whitespace = append(d.whitespace, b)
+ d.atBeginningOfLine = false
+ continue
+ }
+
+ if d.atBeginningOfLine {
+ // At the beginning of a line, hyphens have to be escaped.
+ if b == '-' {
+ // The signature isn't calculated over the dash-escaped text so
+ // the escape is only written to buffered.
+ if _, err = d.buffered.Write(dashEscape); err != nil {
+ return
+ }
+ d.h.Write(d.byteBuf)
+ d.atBeginningOfLine = false
+ } else if b == '\n' {
+ // Nothing to do because we delay writing CRLF to the hash.
+ } else {
+ d.h.Write(d.byteBuf)
+ d.atBeginningOfLine = false
+ }
+ if err = d.buffered.WriteByte(b); err != nil {
+ return
+ }
+ } else {
+ if b == '\n' {
+ // We got a raw \n. Drop any trailing whitespace and write a
+ // CRLF.
+ d.whitespace = d.whitespace[:0]
+ // We delay writing CRLF to the hash until the start of the
+ // next line.
+ if err = d.buffered.WriteByte(b); err != nil {
+ return
+ }
+ d.atBeginningOfLine = true
+ } else {
+ // Any buffered whitespace wasn't at the end of the line so
+ // we need to write it out.
+ if len(d.whitespace) > 0 {
+ d.h.Write(d.whitespace)
+ if _, err = d.buffered.Write(d.whitespace); err != nil {
+ return
+ }
+ d.whitespace = d.whitespace[:0]
+ }
+ d.h.Write(d.byteBuf)
+ if err = d.buffered.WriteByte(b); err != nil {
+ return
+ }
+ }
+ }
+ }
+
+ n = len(data)
+ return
+}
+
+func (d *dashEscaper) Close() (err error) {
+ if !d.atBeginningOfLine {
+ if err = d.buffered.WriteByte(lf); err != nil {
+ return
+ }
+ }
+ sig := new(packet.Signature)
+ sig.SigType = packet.SigTypeText
+ sig.PubKeyAlgo = d.privateKey.PubKeyAlgo
+ sig.Hash = d.hashType
+ sig.CreationTime = d.config.Now()
+ sig.IssuerKeyId = &d.privateKey.KeyId
+
+ if err = sig.Sign(d.h, d.privateKey, d.config); err != nil {
+ return
+ }
+
+ out, err := armor.Encode(d.buffered, "PGP SIGNATURE", nil)
+ if err != nil {
+ return
+ }
+
+ if err = sig.Serialize(out); err != nil {
+ return
+ }
+ if err = out.Close(); err != nil {
+ return
+ }
+ if err = d.buffered.Flush(); err != nil {
+ return
+ }
+ return
+}
+
+// Encode returns a WriteCloser which will clear-sign a message with privateKey
+// and write it to w. If config is nil, sensible defaults are used.
+func Encode(w io.Writer, privateKey *packet.PrivateKey, config *packet.Config) (plaintext io.WriteCloser, err error) {
+ if privateKey.Encrypted {
+ return nil, errors.InvalidArgumentError("signing key is encrypted")
+ }
+
+ hashType := config.Hash()
+ name := nameOfHash(hashType)
+ if len(name) == 0 {
+ return nil, errors.UnsupportedError("unknown hash type: " + strconv.Itoa(int(hashType)))
+ }
+
+ if !hashType.Available() {
+ return nil, errors.UnsupportedError("unsupported hash type: " + strconv.Itoa(int(hashType)))
+ }
+ h := hashType.New()
+
+ buffered := bufio.NewWriter(w)
+ // start has a \n at the beginning that we don't want here.
+ if _, err = buffered.Write(start[1:]); err != nil {
+ return
+ }
+ if err = buffered.WriteByte(lf); err != nil {
+ return
+ }
+ if _, err = buffered.WriteString("Hash: "); err != nil {
+ return
+ }
+ if _, err = buffered.WriteString(name); err != nil {
+ return
+ }
+ if err = buffered.WriteByte(lf); err != nil {
+ return
+ }
+ if err = buffered.WriteByte(lf); err != nil {
+ return
+ }
+
+ plaintext = &dashEscaper{
+ buffered: buffered,
+ h: h,
+ hashType: hashType,
+
+ atBeginningOfLine: true,
+ isFirstLine: true,
+
+ byteBuf: make([]byte, 1),
+
+ privateKey: privateKey,
+ config: config,
+ }
+
+ return
+}
+
+// nameOfHash returns the OpenPGP name for the given hash, or the empty string
+// if the name isn't known. See RFC 4880, section 9.4.
+func nameOfHash(h crypto.Hash) string {
+ switch h {
+ case crypto.MD5:
+ return "MD5"
+ case crypto.SHA1:
+ return "SHA1"
+ case crypto.RIPEMD160:
+ return "RIPEMD160"
+ case crypto.SHA224:
+ return "SHA224"
+ case crypto.SHA256:
+ return "SHA256"
+ case crypto.SHA384:
+ return "SHA384"
+ case crypto.SHA512:
+ return "SHA512"
+ }
+ return ""
+}
diff --git a/src/mongo/gotools/vendor/src/golang.org/x/crypto/openpgp/clearsign/clearsign_test.go b/src/mongo/gotools/vendor/src/golang.org/x/crypto/openpgp/clearsign/clearsign_test.go
new file mode 100644
index 00000000000..406377c6712
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/golang.org/x/crypto/openpgp/clearsign/clearsign_test.go
@@ -0,0 +1,197 @@
+// Copyright 2012 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package clearsign
+
+import (
+ "bytes"
+ "golang.org/x/crypto/openpgp"
+ "testing"
+)
+
+func testParse(t *testing.T, input []byte, expected, expectedPlaintext string) {
+ b, rest := Decode(input)
+ if b == nil {
+ t.Fatal("failed to decode clearsign message")
+ }
+ if !bytes.Equal(rest, []byte("trailing")) {
+ t.Errorf("unexpected remaining bytes returned: %s", string(rest))
+ }
+ if b.ArmoredSignature.Type != "PGP SIGNATURE" {
+ t.Errorf("bad armor type, got:%s, want:PGP SIGNATURE", b.ArmoredSignature.Type)
+ }
+ if !bytes.Equal(b.Bytes, []byte(expected)) {
+ t.Errorf("bad body, got:%x want:%x", b.Bytes, expected)
+ }
+
+ if !bytes.Equal(b.Plaintext, []byte(expectedPlaintext)) {
+ t.Errorf("bad plaintext, got:%x want:%x", b.Plaintext, expectedPlaintext)
+ }
+
+ keyring, err := openpgp.ReadArmoredKeyRing(bytes.NewBufferString(signingKey))
+ if err != nil {
+ t.Errorf("failed to parse public key: %s", err)
+ }
+
+ if _, err := openpgp.CheckDetachedSignature(keyring, bytes.NewBuffer(b.Bytes), b.ArmoredSignature.Body); err != nil {
+ t.Errorf("failed to check signature: %s", err)
+ }
+}
+
+func TestParse(t *testing.T) {
+ testParse(t, clearsignInput, "Hello world\r\nline 2", "Hello world\nline 2\n")
+ testParse(t, clearsignInput2, "\r\n\r\n(This message has a couple of blank lines at the start and end.)\r\n\r\n", "\n\n(This message has a couple of blank lines at the start and end.)\n\n\n")
+}
+
+func TestParseWithNoNewlineAtEnd(t *testing.T) {
+ input := clearsignInput
+ input = input[:len(input)-len("trailing")-1]
+ b, rest := Decode(input)
+ if b == nil {
+ t.Fatal("failed to decode clearsign message")
+ }
+ if len(rest) > 0 {
+ t.Errorf("unexpected remaining bytes returned: %s", string(rest))
+ }
+}
+
+var signingTests = []struct {
+ in, signed, plaintext string
+}{
+ {"", "", ""},
+ {"a", "a", "a\n"},
+ {"a\n", "a", "a\n"},
+ {"-a\n", "-a", "-a\n"},
+ {"--a\nb", "--a\r\nb", "--a\nb\n"},
+ // leading whitespace
+ {" a\n", " a", " a\n"},
+ {" a\n", " a", " a\n"},
+ // trailing whitespace (should be stripped)
+ {"a \n", "a", "a\n"},
+ {"a ", "a", "a\n"},
+ // whitespace-only lines (should be stripped)
+ {" \n", "", "\n"},
+ {" ", "", "\n"},
+ {"a\n \n \nb\n", "a\r\n\r\n\r\nb", "a\n\n\nb\n"},
+}
+
+func TestSigning(t *testing.T) {
+ keyring, err := openpgp.ReadArmoredKeyRing(bytes.NewBufferString(signingKey))
+ if err != nil {
+ t.Errorf("failed to parse public key: %s", err)
+ }
+
+ for i, test := range signingTests {
+ var buf bytes.Buffer
+
+ plaintext, err := Encode(&buf, keyring[0].PrivateKey, nil)
+ if err != nil {
+ t.Errorf("#%d: error from Encode: %s", i, err)
+ continue
+ }
+ if _, err := plaintext.Write([]byte(test.in)); err != nil {
+ t.Errorf("#%d: error from Write: %s", i, err)
+ continue
+ }
+ if err := plaintext.Close(); err != nil {
+ t.Fatalf("#%d: error from Close: %s", i, err)
+ continue
+ }
+
+ b, _ := Decode(buf.Bytes())
+ if b == nil {
+ t.Errorf("#%d: failed to decode clearsign message", i)
+ continue
+ }
+ if !bytes.Equal(b.Bytes, []byte(test.signed)) {
+ t.Errorf("#%d: bad result, got:%x, want:%x", i, b.Bytes, test.signed)
+ continue
+ }
+ if !bytes.Equal(b.Plaintext, []byte(test.plaintext)) {
+ t.Errorf("#%d: bad result, got:%x, want:%x", i, b.Plaintext, test.plaintext)
+ continue
+ }
+
+ if _, err := openpgp.CheckDetachedSignature(keyring, bytes.NewBuffer(b.Bytes), b.ArmoredSignature.Body); err != nil {
+ t.Errorf("#%d: failed to check signature: %s", i, err)
+ }
+ }
+}
+
+var clearsignInput = []byte(`
+;lasjlkfdsa
+
+-----BEGIN PGP SIGNED MESSAGE-----
+Hash: SHA1
+
+Hello world
+line 2
+-----BEGIN PGP SIGNATURE-----
+Version: GnuPG v1.4.10 (GNU/Linux)
+
+iJwEAQECAAYFAk8kMuEACgkQO9o98PRieSpMsAQAhmY/vwmNpflrPgmfWsYhk5O8
+pjnBUzZwqTDoDeINjZEoPDSpQAHGhjFjgaDx/Gj4fAl0dM4D0wuUEBb6QOrwflog
+2A2k9kfSOMOtk0IH/H5VuFN1Mie9L/erYXjTQIptv9t9J7NoRBMU0QOOaFU0JaO9
+MyTpno24AjIAGb+mH1U=
+=hIJ6
+-----END PGP SIGNATURE-----
+trailing`)
+
+var clearsignInput2 = []byte(`
+asdlfkjasdlkfjsadf
+
+-----BEGIN PGP SIGNED MESSAGE-----
+Hash: SHA256
+
+
+
+(This message has a couple of blank lines at the start and end.)
+
+
+-----BEGIN PGP SIGNATURE-----
+Version: GnuPG v1.4.11 (GNU/Linux)
+
+iJwEAQEIAAYFAlPpSREACgkQO9o98PRieSpZTAP+M8QUoCt/7Rf3YbXPcdzIL32v
+pt1I+cMNeopzfLy0u4ioEFi8s5VkwpL1AFmirvgViCwlf82inoRxzZRiW05JQ5LI
+ESEzeCoy2LIdRCQ2hcrG8pIUPzUO4TqO5D/dMbdHwNH4h5nNmGJUAEG6FpURlPm+
+qZg6BaTvOxepqOxnhVU=
+=e+C6
+-----END PGP SIGNATURE-----
+
+trailing`)
+
+var signingKey = `-----BEGIN PGP PRIVATE KEY BLOCK-----
+Version: GnuPG v1.4.10 (GNU/Linux)
+
+lQHYBE2rFNoBBADFwqWQIW/DSqcB4yCQqnAFTJ27qS5AnB46ccAdw3u4Greeu3Bp
+idpoHdjULy7zSKlwR1EA873dO/k/e11Ml3dlAFUinWeejWaK2ugFP6JjiieSsrKn
+vWNicdCS4HTWn0X4sjl0ZiAygw6GNhqEQ3cpLeL0g8E9hnYzJKQ0LWJa0QARAQAB
+AAP/TB81EIo2VYNmTq0pK1ZXwUpxCrvAAIG3hwKjEzHcbQznsjNvPUihZ+NZQ6+X
+0HCfPAdPkGDCLCb6NavcSW+iNnLTrdDnSI6+3BbIONqWWdRDYJhqZCkqmG6zqSfL
+IdkJgCw94taUg5BWP/AAeQrhzjChvpMQTVKQL5mnuZbUCeMCAN5qrYMP2S9iKdnk
+VANIFj7656ARKt/nf4CBzxcpHTyB8+d2CtPDKCmlJP6vL8t58Jmih+kHJMvC0dzn
+gr5f5+sCAOOe5gt9e0am7AvQWhdbHVfJU0TQJx+m2OiCJAqGTB1nvtBLHdJnfdC9
+TnXXQ6ZXibqLyBies/xeY2sCKL5qtTMCAKnX9+9d/5yQxRyrQUHt1NYhaXZnJbHx
+q4ytu0eWz+5i68IYUSK69jJ1NWPM0T6SkqpB3KCAIv68VFm9PxqG1KmhSrQIVGVz
+dCBLZXmIuAQTAQIAIgUCTasU2gIbAwYLCQgHAwIGFQgCCQoLBBYCAwECHgECF4AA
+CgkQO9o98PRieSoLhgQAkLEZex02Qt7vGhZzMwuN0R22w3VwyYyjBx+fM3JFETy1
+ut4xcLJoJfIaF5ZS38UplgakHG0FQ+b49i8dMij0aZmDqGxrew1m4kBfjXw9B/v+
+eIqpODryb6cOSwyQFH0lQkXC040pjq9YqDsO5w0WYNXYKDnzRV0p4H1pweo2VDid
+AdgETasU2gEEAN46UPeWRqKHvA99arOxee38fBt2CI08iiWyI8T3J6ivtFGixSqV
+bRcPxYO/qLpVe5l84Nb3X71GfVXlc9hyv7CD6tcowL59hg1E/DC5ydI8K8iEpUmK
+/UnHdIY5h8/kqgGxkY/T/hgp5fRQgW1ZoZxLajVlMRZ8W4tFtT0DeA+JABEBAAEA
+A/0bE1jaaZKj6ndqcw86jd+QtD1SF+Cf21CWRNeLKnUds4FRRvclzTyUMuWPkUeX
+TaNNsUOFqBsf6QQ2oHUBBK4VCHffHCW4ZEX2cd6umz7mpHW6XzN4DECEzOVksXtc
+lUC1j4UB91DC/RNQqwX1IV2QLSwssVotPMPqhOi0ZLNY7wIA3n7DWKInxYZZ4K+6
+rQ+POsz6brEoRHwr8x6XlHenq1Oki855pSa1yXIARoTrSJkBtn5oI+f8AzrnN0BN
+oyeQAwIA/7E++3HDi5aweWrViiul9cd3rcsS0dEnksPhvS0ozCJiHsq/6GFmy7J8
+QSHZPteedBnZyNp5jR+H7cIfVN3KgwH/Skq4PsuPhDq5TKK6i8Pc1WW8MA6DXTdU
+nLkX7RGmMwjC0DBf7KWAlPjFaONAX3a8ndnz//fy1q7u2l9AZwrj1qa1iJ8EGAEC
+AAkFAk2rFNoCGwwACgkQO9o98PRieSo2/QP/WTzr4ioINVsvN1akKuekmEMI3LAp
+BfHwatufxxP1U+3Si/6YIk7kuPB9Hs+pRqCXzbvPRrI8NHZBmc8qIGthishdCYad
+AHcVnXjtxrULkQFGbGvhKURLvS9WnzD/m1K2zzwxzkPTzT9/Yf06O6Mal5AdugPL
+VrM0m72/jnpKo04=
+=zNCn
+-----END PGP PRIVATE KEY BLOCK-----
+`
diff --git a/src/mongo/gotools/vendor/src/golang.org/x/crypto/openpgp/elgamal/elgamal.go b/src/mongo/gotools/vendor/src/golang.org/x/crypto/openpgp/elgamal/elgamal.go
new file mode 100644
index 00000000000..73f4fe37859
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/golang.org/x/crypto/openpgp/elgamal/elgamal.go
@@ -0,0 +1,122 @@
+// Copyright 2011 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// Package elgamal implements ElGamal encryption, suitable for OpenPGP,
+// as specified in "A Public-Key Cryptosystem and a Signature Scheme Based on
+// Discrete Logarithms," IEEE Transactions on Information Theory, v. IT-31,
+// n. 4, 1985, pp. 469-472.
+//
+// This form of ElGamal embeds PKCS#1 v1.5 padding, which may make it
+// unsuitable for other protocols. RSA should be used in preference in any
+// case.
+package elgamal // import "golang.org/x/crypto/openpgp/elgamal"
+
+import (
+ "crypto/rand"
+ "crypto/subtle"
+ "errors"
+ "io"
+ "math/big"
+)
+
+// PublicKey represents an ElGamal public key.
+type PublicKey struct {
+ G, P, Y *big.Int
+}
+
+// PrivateKey represents an ElGamal private key.
+type PrivateKey struct {
+ PublicKey
+ X *big.Int
+}
+
+// Encrypt encrypts the given message to the given public key. The result is a
+// pair of integers. Errors can result from reading random, or because msg is
+// too large to be encrypted to the public key.
+func Encrypt(random io.Reader, pub *PublicKey, msg []byte) (c1, c2 *big.Int, err error) {
+ pLen := (pub.P.BitLen() + 7) / 8
+ if len(msg) > pLen-11 {
+ err = errors.New("elgamal: message too long")
+ return
+ }
+
+ // EM = 0x02 || PS || 0x00 || M
+ em := make([]byte, pLen-1)
+ em[0] = 2
+ ps, mm := em[1:len(em)-len(msg)-1], em[len(em)-len(msg):]
+ err = nonZeroRandomBytes(ps, random)
+ if err != nil {
+ return
+ }
+ em[len(em)-len(msg)-1] = 0
+ copy(mm, msg)
+
+ m := new(big.Int).SetBytes(em)
+
+ k, err := rand.Int(random, pub.P)
+ if err != nil {
+ return
+ }
+
+ c1 = new(big.Int).Exp(pub.G, k, pub.P)
+ s := new(big.Int).Exp(pub.Y, k, pub.P)
+ c2 = s.Mul(s, m)
+ c2.Mod(c2, pub.P)
+
+ return
+}
+
+// Decrypt takes two integers, resulting from an ElGamal encryption, and
+// returns the plaintext of the message. An error can result only if the
+// ciphertext is invalid. Users should keep in mind that this is a padding
+// oracle and thus, if exposed to an adaptive chosen ciphertext attack, can
+// be used to break the cryptosystem. See ``Chosen Ciphertext Attacks
+// Against Protocols Based on the RSA Encryption Standard PKCS #1'', Daniel
+// Bleichenbacher, Advances in Cryptology (Crypto '98),
+func Decrypt(priv *PrivateKey, c1, c2 *big.Int) (msg []byte, err error) {
+ s := new(big.Int).Exp(c1, priv.X, priv.P)
+ s.ModInverse(s, priv.P)
+ s.Mul(s, c2)
+ s.Mod(s, priv.P)
+ em := s.Bytes()
+
+ firstByteIsTwo := subtle.ConstantTimeByteEq(em[0], 2)
+
+ // The remainder of the plaintext must be a string of non-zero random
+ // octets, followed by a 0, followed by the message.
+ // lookingForIndex: 1 iff we are still looking for the zero.
+ // index: the offset of the first zero byte.
+ var lookingForIndex, index int
+ lookingForIndex = 1
+
+ for i := 1; i < len(em); i++ {
+ equals0 := subtle.ConstantTimeByteEq(em[i], 0)
+ index = subtle.ConstantTimeSelect(lookingForIndex&equals0, i, index)
+ lookingForIndex = subtle.ConstantTimeSelect(equals0, 0, lookingForIndex)
+ }
+
+ if firstByteIsTwo != 1 || lookingForIndex != 0 || index < 9 {
+ return nil, errors.New("elgamal: decryption error")
+ }
+ return em[index+1:], nil
+}
+
+// nonZeroRandomBytes fills the given slice with non-zero random octets.
+func nonZeroRandomBytes(s []byte, rand io.Reader) (err error) {
+ _, err = io.ReadFull(rand, s)
+ if err != nil {
+ return
+ }
+
+ for i := 0; i < len(s); i++ {
+ for s[i] == 0 {
+ _, err = io.ReadFull(rand, s[i:i+1])
+ if err != nil {
+ return
+ }
+ }
+ }
+
+ return
+}
diff --git a/src/mongo/gotools/vendor/src/golang.org/x/crypto/openpgp/elgamal/elgamal_test.go b/src/mongo/gotools/vendor/src/golang.org/x/crypto/openpgp/elgamal/elgamal_test.go
new file mode 100644
index 00000000000..c4f99f5c48c
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/golang.org/x/crypto/openpgp/elgamal/elgamal_test.go
@@ -0,0 +1,49 @@
+// Copyright 2011 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package elgamal
+
+import (
+ "bytes"
+ "crypto/rand"
+ "math/big"
+ "testing"
+)
+
+// This is the 1024-bit MODP group from RFC 5114, section 2.1:
+const primeHex = "B10B8F96A080E01DDE92DE5EAE5D54EC52C99FBCFB06A3C69A6A9DCA52D23B616073E28675A23D189838EF1E2EE652C013ECB4AEA906112324975C3CD49B83BFACCBDD7D90C4BD7098488E9C219A73724EFFD6FAE5644738FAA31A4FF55BCCC0A151AF5F0DC8B4BD45BF37DF365C1A65E68CFDA76D4DA708DF1FB2BC2E4A4371"
+
+const generatorHex = "A4D1CBD5C3FD34126765A442EFB99905F8104DD258AC507FD6406CFF14266D31266FEA1E5C41564B777E690F5504F213160217B4B01B886A5E91547F9E2749F4D7FBD7D3B9A92EE1909D0D2263F80A76A6A24C087A091F531DBF0A0169B6A28AD662A4D18E73AFA32D779D5918D08BC8858F4DCEF97C2A24855E6EEB22B3B2E5"
+
+func fromHex(hex string) *big.Int {
+ n, ok := new(big.Int).SetString(hex, 16)
+ if !ok {
+ panic("failed to parse hex number")
+ }
+ return n
+}
+
+func TestEncryptDecrypt(t *testing.T) {
+ priv := &PrivateKey{
+ PublicKey: PublicKey{
+ G: fromHex(generatorHex),
+ P: fromHex(primeHex),
+ },
+ X: fromHex("42"),
+ }
+ priv.Y = new(big.Int).Exp(priv.G, priv.X, priv.P)
+
+ message := []byte("hello world")
+ c1, c2, err := Encrypt(rand.Reader, &priv.PublicKey, message)
+ if err != nil {
+ t.Errorf("error encrypting: %s", err)
+ }
+ message2, err := Decrypt(priv, c1, c2)
+ if err != nil {
+ t.Errorf("error decrypting: %s", err)
+ }
+ if !bytes.Equal(message2, message) {
+ t.Errorf("decryption failed, got: %x, want: %x", message2, message)
+ }
+}
diff --git a/src/mongo/gotools/vendor/src/golang.org/x/crypto/openpgp/errors/errors.go b/src/mongo/gotools/vendor/src/golang.org/x/crypto/openpgp/errors/errors.go
new file mode 100644
index 00000000000..eb0550b2d04
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/golang.org/x/crypto/openpgp/errors/errors.go
@@ -0,0 +1,72 @@
+// Copyright 2010 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// Package errors contains common error types for the OpenPGP packages.
+package errors // import "golang.org/x/crypto/openpgp/errors"
+
+import (
+ "strconv"
+)
+
+// A StructuralError is returned when OpenPGP data is found to be syntactically
+// invalid.
+type StructuralError string
+
+func (s StructuralError) Error() string {
+ return "openpgp: invalid data: " + string(s)
+}
+
+// UnsupportedError indicates that, although the OpenPGP data is valid, it
+// makes use of currently unimplemented features.
+type UnsupportedError string
+
+func (s UnsupportedError) Error() string {
+ return "openpgp: unsupported feature: " + string(s)
+}
+
+// InvalidArgumentError indicates that the caller is in error and passed an
+// incorrect value.
+type InvalidArgumentError string
+
+func (i InvalidArgumentError) Error() string {
+ return "openpgp: invalid argument: " + string(i)
+}
+
+// SignatureError indicates that a syntactically valid signature failed to
+// validate.
+type SignatureError string
+
+func (b SignatureError) Error() string {
+ return "openpgp: invalid signature: " + string(b)
+}
+
+type keyIncorrectError int
+
+func (ki keyIncorrectError) Error() string {
+ return "openpgp: incorrect key"
+}
+
+var ErrKeyIncorrect error = keyIncorrectError(0)
+
+type unknownIssuerError int
+
+func (unknownIssuerError) Error() string {
+ return "openpgp: signature made by unknown entity"
+}
+
+var ErrUnknownIssuer error = unknownIssuerError(0)
+
+type keyRevokedError int
+
+func (keyRevokedError) Error() string {
+ return "openpgp: signature made by revoked key"
+}
+
+var ErrKeyRevoked error = keyRevokedError(0)
+
+type UnknownPacketTypeError uint8
+
+func (upte UnknownPacketTypeError) Error() string {
+ return "openpgp: unknown packet type: " + strconv.Itoa(int(upte))
+}
diff --git a/src/mongo/gotools/vendor/src/golang.org/x/crypto/openpgp/keys.go b/src/mongo/gotools/vendor/src/golang.org/x/crypto/openpgp/keys.go
new file mode 100644
index 00000000000..bfe32603155
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/golang.org/x/crypto/openpgp/keys.go
@@ -0,0 +1,633 @@
+// Copyright 2011 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package openpgp
+
+import (
+ "crypto/rsa"
+ "io"
+ "time"
+
+ "golang.org/x/crypto/openpgp/armor"
+ "golang.org/x/crypto/openpgp/errors"
+ "golang.org/x/crypto/openpgp/packet"
+)
+
+// PublicKeyType is the armor type for a PGP public key.
+var PublicKeyType = "PGP PUBLIC KEY BLOCK"
+
+// PrivateKeyType is the armor type for a PGP private key.
+var PrivateKeyType = "PGP PRIVATE KEY BLOCK"
+
+// An Entity represents the components of an OpenPGP key: a primary public key
+// (which must be a signing key), one or more identities claimed by that key,
+// and zero or more subkeys, which may be encryption keys.
+type Entity struct {
+ PrimaryKey *packet.PublicKey
+ PrivateKey *packet.PrivateKey
+ Identities map[string]*Identity // indexed by Identity.Name
+ Revocations []*packet.Signature
+ Subkeys []Subkey
+}
+
+// An Identity represents an identity claimed by an Entity and zero or more
+// assertions by other entities about that claim.
+type Identity struct {
+ Name string // by convention, has the form "Full Name (comment) <email@example.com>"
+ UserId *packet.UserId
+ SelfSignature *packet.Signature
+ Signatures []*packet.Signature
+}
+
+// A Subkey is an additional public key in an Entity. Subkeys can be used for
+// encryption.
+type Subkey struct {
+ PublicKey *packet.PublicKey
+ PrivateKey *packet.PrivateKey
+ Sig *packet.Signature
+}
+
+// A Key identifies a specific public key in an Entity. This is either the
+// Entity's primary key or a subkey.
+type Key struct {
+ Entity *Entity
+ PublicKey *packet.PublicKey
+ PrivateKey *packet.PrivateKey
+ SelfSignature *packet.Signature
+}
+
+// A KeyRing provides access to public and private keys.
+type KeyRing interface {
+ // KeysById returns the set of keys that have the given key id.
+ KeysById(id uint64) []Key
+ // KeysByIdAndUsage returns the set of keys with the given id
+ // that also meet the key usage given by requiredUsage.
+ // The requiredUsage is expressed as the bitwise-OR of
+ // packet.KeyFlag* values.
+ KeysByIdUsage(id uint64, requiredUsage byte) []Key
+ // DecryptionKeys returns all private keys that are valid for
+ // decryption.
+ DecryptionKeys() []Key
+}
+
+// primaryIdentity returns the Identity marked as primary or the first identity
+// if none are so marked.
+func (e *Entity) primaryIdentity() *Identity {
+ var firstIdentity *Identity
+ for _, ident := range e.Identities {
+ if firstIdentity == nil {
+ firstIdentity = ident
+ }
+ if ident.SelfSignature.IsPrimaryId != nil && *ident.SelfSignature.IsPrimaryId {
+ return ident
+ }
+ }
+ return firstIdentity
+}
+
+// encryptionKey returns the best candidate Key for encrypting a message to the
+// given Entity.
+func (e *Entity) encryptionKey(now time.Time) (Key, bool) {
+ candidateSubkey := -1
+
+ // Iterate the keys to find the newest key
+ var maxTime time.Time
+ for i, subkey := range e.Subkeys {
+ if subkey.Sig.FlagsValid &&
+ subkey.Sig.FlagEncryptCommunications &&
+ subkey.PublicKey.PubKeyAlgo.CanEncrypt() &&
+ !subkey.Sig.KeyExpired(now) &&
+ (maxTime.IsZero() || subkey.Sig.CreationTime.After(maxTime)) {
+ candidateSubkey = i
+ maxTime = subkey.Sig.CreationTime
+ }
+ }
+
+ if candidateSubkey != -1 {
+ subkey := e.Subkeys[candidateSubkey]
+ return Key{e, subkey.PublicKey, subkey.PrivateKey, subkey.Sig}, true
+ }
+
+ // If we don't have any candidate subkeys for encryption and
+ // the primary key doesn't have any usage metadata then we
+ // assume that the primary key is ok. Or, if the primary key is
+ // marked as ok to encrypt to, then we can obviously use it.
+ i := e.primaryIdentity()
+ if !i.SelfSignature.FlagsValid || i.SelfSignature.FlagEncryptCommunications &&
+ e.PrimaryKey.PubKeyAlgo.CanEncrypt() &&
+ !i.SelfSignature.KeyExpired(now) {
+ return Key{e, e.PrimaryKey, e.PrivateKey, i.SelfSignature}, true
+ }
+
+ // This Entity appears to be signing only.
+ return Key{}, false
+}
+
+// signingKey return the best candidate Key for signing a message with this
+// Entity.
+func (e *Entity) signingKey(now time.Time) (Key, bool) {
+ candidateSubkey := -1
+
+ for i, subkey := range e.Subkeys {
+ if subkey.Sig.FlagsValid &&
+ subkey.Sig.FlagSign &&
+ subkey.PublicKey.PubKeyAlgo.CanSign() &&
+ !subkey.Sig.KeyExpired(now) {
+ candidateSubkey = i
+ break
+ }
+ }
+
+ if candidateSubkey != -1 {
+ subkey := e.Subkeys[candidateSubkey]
+ return Key{e, subkey.PublicKey, subkey.PrivateKey, subkey.Sig}, true
+ }
+
+ // If we have no candidate subkey then we assume that it's ok to sign
+ // with the primary key.
+ i := e.primaryIdentity()
+ if !i.SelfSignature.FlagsValid || i.SelfSignature.FlagSign &&
+ !i.SelfSignature.KeyExpired(now) {
+ return Key{e, e.PrimaryKey, e.PrivateKey, i.SelfSignature}, true
+ }
+
+ return Key{}, false
+}
+
+// An EntityList contains one or more Entities.
+type EntityList []*Entity
+
+// KeysById returns the set of keys that have the given key id.
+func (el EntityList) KeysById(id uint64) (keys []Key) {
+ for _, e := range el {
+ if e.PrimaryKey.KeyId == id {
+ var selfSig *packet.Signature
+ for _, ident := range e.Identities {
+ if selfSig == nil {
+ selfSig = ident.SelfSignature
+ } else if ident.SelfSignature.IsPrimaryId != nil && *ident.SelfSignature.IsPrimaryId {
+ selfSig = ident.SelfSignature
+ break
+ }
+ }
+ keys = append(keys, Key{e, e.PrimaryKey, e.PrivateKey, selfSig})
+ }
+
+ for _, subKey := range e.Subkeys {
+ if subKey.PublicKey.KeyId == id {
+ keys = append(keys, Key{e, subKey.PublicKey, subKey.PrivateKey, subKey.Sig})
+ }
+ }
+ }
+ return
+}
+
+// KeysByIdAndUsage returns the set of keys with the given id that also meet
+// the key usage given by requiredUsage. The requiredUsage is expressed as
+// the bitwise-OR of packet.KeyFlag* values.
+func (el EntityList) KeysByIdUsage(id uint64, requiredUsage byte) (keys []Key) {
+ for _, key := range el.KeysById(id) {
+ if len(key.Entity.Revocations) > 0 {
+ continue
+ }
+
+ if key.SelfSignature.RevocationReason != nil {
+ continue
+ }
+
+ if key.SelfSignature.FlagsValid && requiredUsage != 0 {
+ var usage byte
+ if key.SelfSignature.FlagCertify {
+ usage |= packet.KeyFlagCertify
+ }
+ if key.SelfSignature.FlagSign {
+ usage |= packet.KeyFlagSign
+ }
+ if key.SelfSignature.FlagEncryptCommunications {
+ usage |= packet.KeyFlagEncryptCommunications
+ }
+ if key.SelfSignature.FlagEncryptStorage {
+ usage |= packet.KeyFlagEncryptStorage
+ }
+ if usage&requiredUsage != requiredUsage {
+ continue
+ }
+ }
+
+ keys = append(keys, key)
+ }
+ return
+}
+
+// DecryptionKeys returns all private keys that are valid for decryption.
+func (el EntityList) DecryptionKeys() (keys []Key) {
+ for _, e := range el {
+ for _, subKey := range e.Subkeys {
+ if subKey.PrivateKey != nil && (!subKey.Sig.FlagsValid || subKey.Sig.FlagEncryptStorage || subKey.Sig.FlagEncryptCommunications) {
+ keys = append(keys, Key{e, subKey.PublicKey, subKey.PrivateKey, subKey.Sig})
+ }
+ }
+ }
+ return
+}
+
+// ReadArmoredKeyRing reads one or more public/private keys from an armor keyring file.
+func ReadArmoredKeyRing(r io.Reader) (EntityList, error) {
+ block, err := armor.Decode(r)
+ if err == io.EOF {
+ return nil, errors.InvalidArgumentError("no armored data found")
+ }
+ if err != nil {
+ return nil, err
+ }
+ if block.Type != PublicKeyType && block.Type != PrivateKeyType {
+ return nil, errors.InvalidArgumentError("expected public or private key block, got: " + block.Type)
+ }
+
+ return ReadKeyRing(block.Body)
+}
+
+// ReadKeyRing reads one or more public/private keys. Unsupported keys are
+// ignored as long as at least a single valid key is found.
+func ReadKeyRing(r io.Reader) (el EntityList, err error) {
+ packets := packet.NewReader(r)
+ var lastUnsupportedError error
+
+ for {
+ var e *Entity
+ e, err = ReadEntity(packets)
+ if err != nil {
+ // TODO: warn about skipped unsupported/unreadable keys
+ if _, ok := err.(errors.UnsupportedError); ok {
+ lastUnsupportedError = err
+ err = readToNextPublicKey(packets)
+ } else if _, ok := err.(errors.StructuralError); ok {
+ // Skip unreadable, badly-formatted keys
+ lastUnsupportedError = err
+ err = readToNextPublicKey(packets)
+ }
+ if err == io.EOF {
+ err = nil
+ break
+ }
+ if err != nil {
+ el = nil
+ break
+ }
+ } else {
+ el = append(el, e)
+ }
+ }
+
+ if len(el) == 0 && err == nil {
+ err = lastUnsupportedError
+ }
+ return
+}
+
+// readToNextPublicKey reads packets until the start of the entity and leaves
+// the first packet of the new entity in the Reader.
+func readToNextPublicKey(packets *packet.Reader) (err error) {
+ var p packet.Packet
+ for {
+ p, err = packets.Next()
+ if err == io.EOF {
+ return
+ } else if err != nil {
+ if _, ok := err.(errors.UnsupportedError); ok {
+ err = nil
+ continue
+ }
+ return
+ }
+
+ if pk, ok := p.(*packet.PublicKey); ok && !pk.IsSubkey {
+ packets.Unread(p)
+ return
+ }
+ }
+
+ panic("unreachable")
+}
+
+// ReadEntity reads an entity (public key, identities, subkeys etc) from the
+// given Reader.
+func ReadEntity(packets *packet.Reader) (*Entity, error) {
+ e := new(Entity)
+ e.Identities = make(map[string]*Identity)
+
+ p, err := packets.Next()
+ if err != nil {
+ return nil, err
+ }
+
+ var ok bool
+ if e.PrimaryKey, ok = p.(*packet.PublicKey); !ok {
+ if e.PrivateKey, ok = p.(*packet.PrivateKey); !ok {
+ packets.Unread(p)
+ return nil, errors.StructuralError("first packet was not a public/private key")
+ } else {
+ e.PrimaryKey = &e.PrivateKey.PublicKey
+ }
+ }
+
+ if !e.PrimaryKey.PubKeyAlgo.CanSign() {
+ return nil, errors.StructuralError("primary key cannot be used for signatures")
+ }
+
+ var current *Identity
+ var revocations []*packet.Signature
+EachPacket:
+ for {
+ p, err := packets.Next()
+ if err == io.EOF {
+ break
+ } else if err != nil {
+ return nil, err
+ }
+
+ switch pkt := p.(type) {
+ case *packet.UserId:
+ current = new(Identity)
+ current.Name = pkt.Id
+ current.UserId = pkt
+ e.Identities[pkt.Id] = current
+
+ for {
+ p, err = packets.Next()
+ if err == io.EOF {
+ return nil, io.ErrUnexpectedEOF
+ } else if err != nil {
+ return nil, err
+ }
+
+ sig, ok := p.(*packet.Signature)
+ if !ok {
+ return nil, errors.StructuralError("user ID packet not followed by self-signature")
+ }
+
+ if (sig.SigType == packet.SigTypePositiveCert || sig.SigType == packet.SigTypeGenericCert) && sig.IssuerKeyId != nil && *sig.IssuerKeyId == e.PrimaryKey.KeyId {
+ if err = e.PrimaryKey.VerifyUserIdSignature(pkt.Id, e.PrimaryKey, sig); err != nil {
+ return nil, errors.StructuralError("user ID self-signature invalid: " + err.Error())
+ }
+ current.SelfSignature = sig
+ break
+ }
+ current.Signatures = append(current.Signatures, sig)
+ }
+ case *packet.Signature:
+ if pkt.SigType == packet.SigTypeKeyRevocation {
+ revocations = append(revocations, pkt)
+ } else if pkt.SigType == packet.SigTypeDirectSignature {
+ // TODO: RFC4880 5.2.1 permits signatures
+ // directly on keys (eg. to bind additional
+ // revocation keys).
+ } else if current == nil {
+ return nil, errors.StructuralError("signature packet found before user id packet")
+ } else {
+ current.Signatures = append(current.Signatures, pkt)
+ }
+ case *packet.PrivateKey:
+ if pkt.IsSubkey == false {
+ packets.Unread(p)
+ break EachPacket
+ }
+ err = addSubkey(e, packets, &pkt.PublicKey, pkt)
+ if err != nil {
+ return nil, err
+ }
+ case *packet.PublicKey:
+ if pkt.IsSubkey == false {
+ packets.Unread(p)
+ break EachPacket
+ }
+ err = addSubkey(e, packets, pkt, nil)
+ if err != nil {
+ return nil, err
+ }
+ default:
+ // we ignore unknown packets
+ }
+ }
+
+ if len(e.Identities) == 0 {
+ return nil, errors.StructuralError("entity without any identities")
+ }
+
+ for _, revocation := range revocations {
+ err = e.PrimaryKey.VerifyRevocationSignature(revocation)
+ if err == nil {
+ e.Revocations = append(e.Revocations, revocation)
+ } else {
+ // TODO: RFC 4880 5.2.3.15 defines revocation keys.
+ return nil, errors.StructuralError("revocation signature signed by alternate key")
+ }
+ }
+
+ return e, nil
+}
+
+func addSubkey(e *Entity, packets *packet.Reader, pub *packet.PublicKey, priv *packet.PrivateKey) error {
+ var subKey Subkey
+ subKey.PublicKey = pub
+ subKey.PrivateKey = priv
+ p, err := packets.Next()
+ if err == io.EOF {
+ return io.ErrUnexpectedEOF
+ }
+ if err != nil {
+ return errors.StructuralError("subkey signature invalid: " + err.Error())
+ }
+ var ok bool
+ subKey.Sig, ok = p.(*packet.Signature)
+ if !ok {
+ return errors.StructuralError("subkey packet not followed by signature")
+ }
+ if subKey.Sig.SigType != packet.SigTypeSubkeyBinding && subKey.Sig.SigType != packet.SigTypeSubkeyRevocation {
+ return errors.StructuralError("subkey signature with wrong type")
+ }
+ err = e.PrimaryKey.VerifyKeySignature(subKey.PublicKey, subKey.Sig)
+ if err != nil {
+ return errors.StructuralError("subkey signature invalid: " + err.Error())
+ }
+ e.Subkeys = append(e.Subkeys, subKey)
+ return nil
+}
+
+const defaultRSAKeyBits = 2048
+
+// NewEntity returns an Entity that contains a fresh RSA/RSA keypair with a
+// single identity composed of the given full name, comment and email, any of
+// which may be empty but must not contain any of "()<>\x00".
+// If config is nil, sensible defaults will be used.
+func NewEntity(name, comment, email string, config *packet.Config) (*Entity, error) {
+ currentTime := config.Now()
+
+ bits := defaultRSAKeyBits
+ if config != nil && config.RSABits != 0 {
+ bits = config.RSABits
+ }
+
+ uid := packet.NewUserId(name, comment, email)
+ if uid == nil {
+ return nil, errors.InvalidArgumentError("user id field contained invalid characters")
+ }
+ signingPriv, err := rsa.GenerateKey(config.Random(), bits)
+ if err != nil {
+ return nil, err
+ }
+ encryptingPriv, err := rsa.GenerateKey(config.Random(), bits)
+ if err != nil {
+ return nil, err
+ }
+
+ e := &Entity{
+ PrimaryKey: packet.NewRSAPublicKey(currentTime, &signingPriv.PublicKey),
+ PrivateKey: packet.NewRSAPrivateKey(currentTime, signingPriv),
+ Identities: make(map[string]*Identity),
+ }
+ isPrimaryId := true
+ e.Identities[uid.Id] = &Identity{
+ Name: uid.Name,
+ UserId: uid,
+ SelfSignature: &packet.Signature{
+ CreationTime: currentTime,
+ SigType: packet.SigTypePositiveCert,
+ PubKeyAlgo: packet.PubKeyAlgoRSA,
+ Hash: config.Hash(),
+ IsPrimaryId: &isPrimaryId,
+ FlagsValid: true,
+ FlagSign: true,
+ FlagCertify: true,
+ IssuerKeyId: &e.PrimaryKey.KeyId,
+ },
+ }
+
+ e.Subkeys = make([]Subkey, 1)
+ e.Subkeys[0] = Subkey{
+ PublicKey: packet.NewRSAPublicKey(currentTime, &encryptingPriv.PublicKey),
+ PrivateKey: packet.NewRSAPrivateKey(currentTime, encryptingPriv),
+ Sig: &packet.Signature{
+ CreationTime: currentTime,
+ SigType: packet.SigTypeSubkeyBinding,
+ PubKeyAlgo: packet.PubKeyAlgoRSA,
+ Hash: config.Hash(),
+ FlagsValid: true,
+ FlagEncryptStorage: true,
+ FlagEncryptCommunications: true,
+ IssuerKeyId: &e.PrimaryKey.KeyId,
+ },
+ }
+ e.Subkeys[0].PublicKey.IsSubkey = true
+ e.Subkeys[0].PrivateKey.IsSubkey = true
+
+ return e, nil
+}
+
+// SerializePrivate serializes an Entity, including private key material, to
+// the given Writer. For now, it must only be used on an Entity returned from
+// NewEntity.
+// If config is nil, sensible defaults will be used.
+func (e *Entity) SerializePrivate(w io.Writer, config *packet.Config) (err error) {
+ err = e.PrivateKey.Serialize(w)
+ if err != nil {
+ return
+ }
+ for _, ident := range e.Identities {
+ err = ident.UserId.Serialize(w)
+ if err != nil {
+ return
+ }
+ err = ident.SelfSignature.SignUserId(ident.UserId.Id, e.PrimaryKey, e.PrivateKey, config)
+ if err != nil {
+ return
+ }
+ err = ident.SelfSignature.Serialize(w)
+ if err != nil {
+ return
+ }
+ }
+ for _, subkey := range e.Subkeys {
+ err = subkey.PrivateKey.Serialize(w)
+ if err != nil {
+ return
+ }
+ err = subkey.Sig.SignKey(subkey.PublicKey, e.PrivateKey, config)
+ if err != nil {
+ return
+ }
+ err = subkey.Sig.Serialize(w)
+ if err != nil {
+ return
+ }
+ }
+ return nil
+}
+
+// Serialize writes the public part of the given Entity to w. (No private
+// key material will be output).
+func (e *Entity) Serialize(w io.Writer) error {
+ err := e.PrimaryKey.Serialize(w)
+ if err != nil {
+ return err
+ }
+ for _, ident := range e.Identities {
+ err = ident.UserId.Serialize(w)
+ if err != nil {
+ return err
+ }
+ err = ident.SelfSignature.Serialize(w)
+ if err != nil {
+ return err
+ }
+ for _, sig := range ident.Signatures {
+ err = sig.Serialize(w)
+ if err != nil {
+ return err
+ }
+ }
+ }
+ for _, subkey := range e.Subkeys {
+ err = subkey.PublicKey.Serialize(w)
+ if err != nil {
+ return err
+ }
+ err = subkey.Sig.Serialize(w)
+ if err != nil {
+ return err
+ }
+ }
+ return nil
+}
+
+// SignIdentity adds a signature to e, from signer, attesting that identity is
+// associated with e. The provided identity must already be an element of
+// e.Identities and the private key of signer must have been decrypted if
+// necessary.
+// If config is nil, sensible defaults will be used.
+func (e *Entity) SignIdentity(identity string, signer *Entity, config *packet.Config) error {
+ if signer.PrivateKey == nil {
+ return errors.InvalidArgumentError("signing Entity must have a private key")
+ }
+ if signer.PrivateKey.Encrypted {
+ return errors.InvalidArgumentError("signing Entity's private key must be decrypted")
+ }
+ ident, ok := e.Identities[identity]
+ if !ok {
+ return errors.InvalidArgumentError("given identity string not found in Entity")
+ }
+
+ sig := &packet.Signature{
+ SigType: packet.SigTypeGenericCert,
+ PubKeyAlgo: signer.PrivateKey.PubKeyAlgo,
+ Hash: config.Hash(),
+ CreationTime: config.Now(),
+ IssuerKeyId: &signer.PrivateKey.KeyId,
+ }
+ if err := sig.SignUserId(identity, e.PrimaryKey, signer.PrivateKey, config); err != nil {
+ return err
+ }
+ ident.Signatures = append(ident.Signatures, sig)
+ return nil
+}
diff --git a/src/mongo/gotools/vendor/src/golang.org/x/crypto/openpgp/keys_test.go b/src/mongo/gotools/vendor/src/golang.org/x/crypto/openpgp/keys_test.go
new file mode 100644
index 00000000000..d5e2056bb82
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/golang.org/x/crypto/openpgp/keys_test.go
@@ -0,0 +1,370 @@
+package openpgp
+
+import (
+ "bytes"
+ "strings"
+ "testing"
+ "time"
+
+ "golang.org/x/crypto/openpgp/errors"
+ "golang.org/x/crypto/openpgp/packet"
+)
+
+func TestKeyExpiry(t *testing.T) {
+ kring, _ := ReadKeyRing(readerFromHex(expiringKeyHex))
+ entity := kring[0]
+
+ const timeFormat = "2006-01-02"
+ time1, _ := time.Parse(timeFormat, "2013-07-01")
+
+ // The expiringKeyHex key is structured as:
+ //
+ // pub 1024R/5E237D8C created: 2013-07-01 expires: 2013-07-31 usage: SC
+ // sub 1024R/1ABB25A0 created: 2013-07-01 23:11:07 +0200 CEST expires: 2013-07-08 usage: E
+ // sub 1024R/96A672F5 created: 2013-07-01 23:11:23 +0200 CEST expires: 2013-07-31 usage: E
+ //
+ // So this should select the newest, non-expired encryption key.
+ key, _ := entity.encryptionKey(time1)
+ if id := key.PublicKey.KeyIdShortString(); id != "96A672F5" {
+ t.Errorf("Expected key 1ABB25A0 at time %s, but got key %s", time1.Format(timeFormat), id)
+ }
+
+ // Once the first encryption subkey has expired, the second should be
+ // selected.
+ time2, _ := time.Parse(timeFormat, "2013-07-09")
+ key, _ = entity.encryptionKey(time2)
+ if id := key.PublicKey.KeyIdShortString(); id != "96A672F5" {
+ t.Errorf("Expected key 96A672F5 at time %s, but got key %s", time2.Format(timeFormat), id)
+ }
+
+ // Once all the keys have expired, nothing should be returned.
+ time3, _ := time.Parse(timeFormat, "2013-08-01")
+ if key, ok := entity.encryptionKey(time3); ok {
+ t.Errorf("Expected no key at time %s, but got key %s", time3.Format(timeFormat), key.PublicKey.KeyIdShortString())
+ }
+}
+
+func TestMissingCrossSignature(t *testing.T) {
+ // This public key has a signing subkey, but the subkey does not
+ // contain a cross-signature.
+ keys, err := ReadArmoredKeyRing(bytes.NewBufferString(missingCrossSignatureKey))
+ if len(keys) != 0 {
+ t.Errorf("Accepted key with missing cross signature")
+ }
+ if err == nil {
+ t.Fatal("Failed to detect error in keyring with missing cross signature")
+ }
+ structural, ok := err.(errors.StructuralError)
+ if !ok {
+ t.Fatalf("Unexpected class of error: %T. Wanted StructuralError", err)
+ }
+ const expectedMsg = "signing subkey is missing cross-signature"
+ if !strings.Contains(string(structural), expectedMsg) {
+ t.Fatalf("Unexpected error: %q. Expected it to contain %q", err, expectedMsg)
+ }
+}
+
+func TestInvalidCrossSignature(t *testing.T) {
+ // This public key has a signing subkey, and the subkey has an
+ // embedded cross-signature. However, the cross-signature does
+ // not correctly validate over the primary and subkey.
+ keys, err := ReadArmoredKeyRing(bytes.NewBufferString(invalidCrossSignatureKey))
+ if len(keys) != 0 {
+ t.Errorf("Accepted key with invalid cross signature")
+ }
+ if err == nil {
+ t.Fatal("Failed to detect error in keyring with an invalid cross signature")
+ }
+ structural, ok := err.(errors.StructuralError)
+ if !ok {
+ t.Fatalf("Unexpected class of error: %T. Wanted StructuralError", err)
+ }
+ const expectedMsg = "subkey signature invalid"
+ if !strings.Contains(string(structural), expectedMsg) {
+ t.Fatalf("Unexpected error: %q. Expected it to contain %q", err, expectedMsg)
+ }
+}
+
+func TestGoodCrossSignature(t *testing.T) {
+ // This public key has a signing subkey, and the subkey has an
+ // embedded cross-signature which correctly validates over the
+ // primary and subkey.
+ keys, err := ReadArmoredKeyRing(bytes.NewBufferString(goodCrossSignatureKey))
+ if err != nil {
+ t.Fatal(err)
+ }
+ if len(keys) != 1 {
+ t.Errorf("Failed to accept key with good cross signature, %d", len(keys))
+ }
+ if len(keys[0].Subkeys) != 1 {
+ t.Errorf("Failed to accept good subkey, %d", len(keys[0].Subkeys))
+ }
+}
+
+// TestExternallyRevokableKey attempts to load and parse a key with a third party revocation permission.
+func TestExternallyRevocableKey(t *testing.T) {
+ kring, _ := ReadKeyRing(readerFromHex(subkeyUsageHex))
+
+ // The 0xA42704B92866382A key can be revoked by 0xBE3893CB843D0FE70C
+ // according to this signature that appears within the key:
+ // :signature packet: algo 1, keyid A42704B92866382A
+ // version 4, created 1396409682, md5len 0, sigclass 0x1f
+ // digest algo 2, begin of digest a9 84
+ // hashed subpkt 2 len 4 (sig created 2014-04-02)
+ // hashed subpkt 12 len 22 (revocation key: c=80 a=1 f=CE094AA433F7040BB2DDF0BE3893CB843D0FE70C)
+ // hashed subpkt 7 len 1 (not revocable)
+ // subpkt 16 len 8 (issuer key ID A42704B92866382A)
+ // data: [1024 bits]
+
+ id := uint64(0xA42704B92866382A)
+ keys := kring.KeysById(id)
+ if len(keys) != 1 {
+ t.Errorf("Expected to find key id %X, but got %d matches", id, len(keys))
+ }
+}
+
+func TestKeyRevocation(t *testing.T) {
+ kring, _ := ReadKeyRing(readerFromHex(revokedKeyHex))
+
+ // revokedKeyHex contains these keys:
+ // pub 1024R/9A34F7C0 2014-03-25 [revoked: 2014-03-25]
+ // sub 1024R/1BA3CD60 2014-03-25 [revoked: 2014-03-25]
+ ids := []uint64{0xA401D9F09A34F7C0, 0x5CD3BE0A1BA3CD60}
+
+ for _, id := range ids {
+ keys := kring.KeysById(id)
+ if len(keys) != 1 {
+ t.Errorf("Expected KeysById to find revoked key %X, but got %d matches", id, len(keys))
+ }
+ keys = kring.KeysByIdUsage(id, 0)
+ if len(keys) != 0 {
+ t.Errorf("Expected KeysByIdUsage to filter out revoked key %X, but got %d matches", id, len(keys))
+ }
+ }
+}
+
+func TestSubkeyRevocation(t *testing.T) {
+ kring, _ := ReadKeyRing(readerFromHex(revokedSubkeyHex))
+
+ // revokedSubkeyHex contains these keys:
+ // pub 1024R/4EF7E4BECCDE97F0 2014-03-25
+ // sub 1024R/D63636E2B96AE423 2014-03-25
+ // sub 1024D/DBCE4EE19529437F 2014-03-25
+ // sub 1024R/677815E371C2FD23 2014-03-25 [revoked: 2014-03-25]
+ validKeys := []uint64{0x4EF7E4BECCDE97F0, 0xD63636E2B96AE423, 0xDBCE4EE19529437F}
+ revokedKey := uint64(0x677815E371C2FD23)
+
+ for _, id := range validKeys {
+ keys := kring.KeysById(id)
+ if len(keys) != 1 {
+ t.Errorf("Expected KeysById to find key %X, but got %d matches", id, len(keys))
+ }
+ keys = kring.KeysByIdUsage(id, 0)
+ if len(keys) != 1 {
+ t.Errorf("Expected KeysByIdUsage to find key %X, but got %d matches", id, len(keys))
+ }
+ }
+
+ keys := kring.KeysById(revokedKey)
+ if len(keys) != 1 {
+ t.Errorf("Expected KeysById to find key %X, but got %d matches", revokedKey, len(keys))
+ }
+
+ keys = kring.KeysByIdUsage(revokedKey, 0)
+ if len(keys) != 0 {
+ t.Errorf("Expected KeysByIdUsage to filter out revoked key %X, but got %d matches", revokedKey, len(keys))
+ }
+}
+
+func TestKeyUsage(t *testing.T) {
+ kring, _ := ReadKeyRing(readerFromHex(subkeyUsageHex))
+
+ // subkeyUsageHex contains these keys:
+ // pub 1024R/2866382A created: 2014-04-01 expires: never usage: SC
+ // sub 1024R/936C9153 created: 2014-04-01 expires: never usage: E
+ // sub 1024R/64D5F5BB created: 2014-04-02 expires: never usage: E
+ // sub 1024D/BC0BA992 created: 2014-04-02 expires: never usage: S
+ certifiers := []uint64{0xA42704B92866382A}
+ signers := []uint64{0xA42704B92866382A, 0x42CE2C64BC0BA992}
+ encrypters := []uint64{0x09C0C7D9936C9153, 0xC104E98664D5F5BB}
+
+ for _, id := range certifiers {
+ keys := kring.KeysByIdUsage(id, packet.KeyFlagCertify)
+ if len(keys) == 1 {
+ if keys[0].PublicKey.KeyId != id {
+ t.Errorf("Expected to find certifier key id %X, but got %X", id, keys[0].PublicKey.KeyId)
+ }
+ } else {
+ t.Errorf("Expected one match for certifier key id %X, but got %d matches", id, len(keys))
+ }
+ }
+
+ for _, id := range signers {
+ keys := kring.KeysByIdUsage(id, packet.KeyFlagSign)
+ if len(keys) == 1 {
+ if keys[0].PublicKey.KeyId != id {
+ t.Errorf("Expected to find signing key id %X, but got %X", id, keys[0].PublicKey.KeyId)
+ }
+ } else {
+ t.Errorf("Expected one match for signing key id %X, but got %d matches", id, len(keys))
+ }
+
+ // This keyring contains no encryption keys that are also good for signing.
+ keys = kring.KeysByIdUsage(id, packet.KeyFlagEncryptStorage|packet.KeyFlagEncryptCommunications)
+ if len(keys) != 0 {
+ t.Errorf("Unexpected match for encryption key id %X", id)
+ }
+ }
+
+ for _, id := range encrypters {
+ keys := kring.KeysByIdUsage(id, packet.KeyFlagEncryptStorage|packet.KeyFlagEncryptCommunications)
+ if len(keys) == 1 {
+ if keys[0].PublicKey.KeyId != id {
+ t.Errorf("Expected to find encryption key id %X, but got %X", id, keys[0].PublicKey.KeyId)
+ }
+ } else {
+ t.Errorf("Expected one match for encryption key id %X, but got %d matches", id, len(keys))
+ }
+
+ // This keyring contains no encryption keys that are also good for signing.
+ keys = kring.KeysByIdUsage(id, packet.KeyFlagSign)
+ if len(keys) != 0 {
+ t.Errorf("Unexpected match for signing key id %X", id)
+ }
+ }
+}
+
+func TestIdVerification(t *testing.T) {
+ kring, err := ReadKeyRing(readerFromHex(testKeys1And2PrivateHex))
+ if err != nil {
+ t.Fatal(err)
+ }
+ if err := kring[1].PrivateKey.Decrypt([]byte("passphrase")); err != nil {
+ t.Fatal(err)
+ }
+
+ const identity = "Test Key 1 (RSA)"
+ if err := kring[0].SignIdentity(identity, kring[1], nil); err != nil {
+ t.Fatal(err)
+ }
+
+ ident, ok := kring[0].Identities[identity]
+ if !ok {
+ t.Fatal("identity missing from key after signing")
+ }
+
+ checked := false
+ for _, sig := range ident.Signatures {
+ if sig.IssuerKeyId == nil || *sig.IssuerKeyId != kring[1].PrimaryKey.KeyId {
+ continue
+ }
+
+ if err := kring[1].PrimaryKey.VerifyUserIdSignature(identity, kring[0].PrimaryKey, sig); err != nil {
+ t.Fatalf("error verifying new identity signature: %s", err)
+ }
+ checked = true
+ break
+ }
+
+ if !checked {
+ t.Fatal("didn't find identity signature in Entity")
+ }
+}
+
+const expiringKeyHex = "988d0451d1ec5d010400ba3385721f2dc3f4ab096b2ee867ab77213f0a27a8538441c35d2fa225b08798a1439a66a5150e6bdc3f40f5d28d588c712394c632b6299f77db8c0d48d37903fb72ebd794d61be6aa774688839e5fdecfe06b2684cc115d240c98c66cb1ef22ae84e3aa0c2b0c28665c1e7d4d044e7f270706193f5223c8d44e0d70b7b8da830011010001b40f4578706972792074657374206b657988be041301020028050251d1ec5d021b03050900278d00060b090807030206150802090a0b0416020301021e01021780000a091072589ad75e237d8c033503fd10506d72837834eb7f994117740723adc39227104b0d326a1161871c0b415d25b4aedef946ca77ea4c05af9c22b32cf98be86ab890111fced1ee3f75e87b7cc3c00dc63bbc85dfab91c0dc2ad9de2c4d13a34659333a85c6acc1a669c5e1d6cecb0cf1e56c10e72d855ae177ddc9e766f9b2dda57ccbb75f57156438bbdb4e42b88d0451d1ec5d0104009c64906559866c5cb61578f5846a94fcee142a489c9b41e67b12bb54cfe86eb9bc8566460f9a720cb00d6526fbccfd4f552071a8e3f7744b1882d01036d811ee5a3fb91a1c568055758f43ba5d2c6a9676b012f3a1a89e47bbf624f1ad571b208f3cc6224eb378f1645dd3d47584463f9eadeacfd1ce6f813064fbfdcc4b5a53001101000188a504180102000f021b0c050251d1f06b050900093e89000a091072589ad75e237d8c20e00400ab8310a41461425b37889c4da28129b5fae6084fafbc0a47dd1adc74a264c6e9c9cc125f40462ee1433072a58384daef88c961c390ed06426a81b464a53194c4e291ddd7e2e2ba3efced01537d713bd111f48437bde2363446200995e8e0d4e528dda377fd1e8f8ede9c8e2198b393bd86852ce7457a7e3daf74d510461a5b77b88d0451d1ece8010400b3a519f83ab0010307e83bca895170acce8964a044190a2b368892f7a244758d9fc193482648acb1fb9780d28cc22d171931f38bb40279389fc9bf2110876d4f3db4fcfb13f22f7083877fe56592b3b65251312c36f83ffcb6d313c6a17f197dd471f0712aad15a8537b435a92471ba2e5b0c72a6c72536c3b567c558d7b6051001101000188a504180102000f021b0c050251d1f07b050900279091000a091072589ad75e237d8ce69e03fe286026afacf7c97ee20673864d4459a2240b5655219950643c7dba0ac384b1d4359c67805b21d98211f7b09c2a0ccf6410c8c04d4ff4a51293725d8d6570d9d8bb0e10c07d22357caeb49626df99c180be02d77d1fe8ed25e7a54481237646083a9f89a11566cd20b9e995b1487c5f9e02aeb434f3a1897cd416dd0a87861838da3e9e"
+const subkeyUsageHex = "988d04533a52bc010400d26af43085558f65b9e7dbc90cb9238015259aed5e954637adcfa2181548b2d0b60c65f1f42ec5081cbf1bc0a8aa4900acfb77070837c58f26012fbce297d70afe96e759ad63531f0037538e70dbf8e384569b9720d99d8eb39d8d0a2947233ed242436cb6ac7dfe74123354b3d0119b5c235d3dd9c9d6c004f8ffaf67ad8583001101000188b7041f010200210502533b8552170c8001ce094aa433f7040bb2ddf0be3893cb843d0fe70c020700000a0910a42704b92866382aa98404009d63d916a27543da4221c60087c33f1c44bec9998c5438018ed370cca4962876c748e94b73eb39c58eb698063f3fd6346d58dd2a11c0247934c4a9d71f24754f7468f96fb24c3e791dd2392b62f626148ad724189498cbf993db2df7c0cdc2d677c35da0f16cb16c9ce7c33b4de65a4a91b1d21a130ae9cc26067718910ef8e2b417556d627261203c756d627261407379642e65642e61753e88b80413010200220502533a52bc021b03060b090807030206150802090a0b0416020301021e01021780000a0910a42704b92866382a47840400c0c2bd04f5fca586de408b395b3c280a278259c93eaaa8b79a53b97003f8ed502a8a00446dd9947fb462677e4fcac0dac2f0701847d15130aadb6cd9e0705ea0cf5f92f129136c7be21a718d46c8e641eb7f044f2adae573e11ae423a0a9ca51324f03a8a2f34b91fa40c3cc764bee4dccadedb54c768ba0469b683ea53f1c29b88d04533a52bc01040099c92a5d6f8b744224da27bc2369127c35269b58bec179de6bbc038f749344222f85a31933224f26b70243c4e4b2d242f0c4777eaef7b5502f9dad6d8bf3aaeb471210674b74de2d7078af497d55f5cdad97c7bedfbc1b41e8065a97c9c3d344b21fc81d27723af8e374bc595da26ea242dccb6ae497be26eea57e563ed517e90011010001889f0418010200090502533a52bc021b0c000a0910a42704b92866382afa1403ff70284c2de8a043ff51d8d29772602fa98009b7861c540535f874f2c230af8caf5638151a636b21f8255003997ccd29747fdd06777bb24f9593bd7d98a3e887689bf902f999915fcc94625ae487e5d13e6616f89090ebc4fdc7eb5cad8943e4056995bb61c6af37f8043016876a958ec7ebf39c43d20d53b7f546cfa83e8d2604b88d04533b8283010400c0b529316dbdf58b4c54461e7e669dc11c09eb7f73819f178ccd4177b9182b91d138605fcf1e463262fabefa73f94a52b5e15d1904635541c7ea540f07050ce0fb51b73e6f88644cec86e91107c957a114f69554548a85295d2b70bd0b203992f76eb5d493d86d9eabcaa7ef3fc7db7e458438db3fcdb0ca1cc97c638439a9170011010001889f0418010200090502533b8283021b0c000a0910a42704b92866382adc6d0400cfff6258485a21675adb7a811c3e19ebca18851533f75a7ba317950b9997fda8d1a4c8c76505c08c04b6c2cc31dc704d33da36a21273f2b388a1a706f7c3378b66d887197a525936ed9a69acb57fe7f718133da85ec742001c5d1864e9c6c8ea1b94f1c3759cebfd93b18606066c063a63be86085b7e37bdbc65f9a915bf084bb901a204533b85cd110400aed3d2c52af2b38b5b67904b0ef73d6dd7aef86adb770e2b153cd22489654dcc91730892087bb9856ae2d9f7ed1eb48f214243fe86bfe87b349ebd7c30e630e49c07b21fdabf78b7a95c8b7f969e97e3d33f2e074c63552ba64a2ded7badc05ce0ea2be6d53485f6900c7860c7aa76560376ce963d7271b9b54638a4028b573f00a0d8854bfcdb04986141568046202192263b9b67350400aaa1049dbc7943141ef590a70dcb028d730371d92ea4863de715f7f0f16d168bd3dc266c2450457d46dcbbf0b071547e5fbee7700a820c3750b236335d8d5848adb3c0da010e998908dfd93d961480084f3aea20b247034f8988eccb5546efaa35a92d0451df3aaf1aee5aa36a4c4d462c760ecd9cebcabfbe1412b1f21450f203fd126687cd486496e971a87fd9e1a8a765fe654baa219a6871ab97768596ab05c26c1aeea8f1a2c72395a58dbc12ef9640d2b95784e974a4d2d5a9b17c25fedacfe551bda52602de8f6d2e48443f5dd1a2a2a8e6a5e70ecdb88cd6e766ad9745c7ee91d78cc55c3d06536b49c3fee6c3d0b6ff0fb2bf13a314f57c953b8f4d93bf88e70418010200090502533b85cd021b0200520910a42704b92866382a47200419110200060502533b85cd000a091042ce2c64bc0ba99214b2009e26b26852c8b13b10c35768e40e78fbbb48bd084100a0c79d9ea0844fa5853dd3c85ff3ecae6f2c9dd6c557aa04008bbbc964cd65b9b8299d4ebf31f41cc7264b8cf33a00e82c5af022331fac79efc9563a822497ba012953cefe2629f1242fcdcb911dbb2315985bab060bfd58261ace3c654bdbbe2e8ed27a46e836490145c86dc7bae15c011f7e1ffc33730109b9338cd9f483e7cef3d2f396aab5bd80efb6646d7e778270ee99d934d187dd98"
+const revokedKeyHex = "988d045331ce82010400c4fdf7b40a5477f206e6ee278eaef888ca73bf9128a9eef9f2f1ddb8b7b71a4c07cfa241f028a04edb405e4d916c61d6beabc333813dc7b484d2b3c52ee233c6a79b1eea4e9cc51596ba9cd5ac5aeb9df62d86ea051055b79d03f8a4fa9f38386f5bd17529138f3325d46801514ea9047977e0829ed728e68636802796801be10011010001889f04200102000905025331d0e3021d03000a0910a401d9f09a34f7c042aa040086631196405b7e6af71026b88e98012eab44aa9849f6ef3fa930c7c9f23deaedba9db1538830f8652fb7648ec3fcade8dbcbf9eaf428e83c6cbcc272201bfe2fbb90d41963397a7c0637a1a9d9448ce695d9790db2dc95433ad7be19eb3de72dacf1d6db82c3644c13eae2a3d072b99bb341debba012c5ce4006a7d34a1f4b94b444526567205265766f6b657220283c52656727732022424d204261726973746122204b657920262530305c303e5c29203c72656740626d626172697374612e636f2e61753e88b704130102002205025331ce82021b03060b090807030206150802090a0b0416020301021e01021780000a0910a401d9f09a34f7c0019c03f75edfbeb6a73e7225ad3cc52724e2872e04260d7daf0d693c170d8c4b243b8767bc7785763533febc62ec2600c30603c433c095453ede59ff2fcabeb84ce32e0ed9d5cf15ffcbc816202b64370d4d77c1e9077d74e94a16fb4fa2e5bec23a56d7a73cf275f91691ae1801a976fcde09e981a2f6327ac27ea1fecf3185df0d56889c04100102000605025331cfb5000a0910fe9645554e8266b64b4303fc084075396674fb6f778d302ac07cef6bc0b5d07b66b2004c44aef711cbac79617ef06d836b4957522d8772dd94bf41a2f4ac8b1ee6d70c57503f837445a74765a076d07b829b8111fc2a918423ddb817ead7ca2a613ef0bfb9c6b3562aec6c3cf3c75ef3031d81d95f6563e4cdcc9960bcb386c5d757b104fcca5fe11fc709df884604101102000605025331cfe7000a09107b15a67f0b3ddc0317f6009e360beea58f29c1d963a22b962b80788c3fa6c84e009d148cfde6b351469b8eae91187eff07ad9d08fcaab88d045331ce820104009f25e20a42b904f3fa555530fe5c46737cf7bd076c35a2a0d22b11f7e0b61a69320b768f4a80fe13980ce380d1cfc4a0cd8fbe2d2e2ef85416668b77208baa65bf973fe8e500e78cc310d7c8705cdb34328bf80e24f0385fce5845c33bc7943cf6b11b02348a23da0bf6428e57c05135f2dc6bd7c1ce325d666d5a5fd2fd5e410011010001889f04180102000905025331ce82021b0c000a0910a401d9f09a34f7c0418003fe34feafcbeaef348a800a0d908a7a6809cc7304017d820f70f0474d5e23cb17e38b67dc6dca282c6ca00961f4ec9edf2738d0f087b1d81e4871ef08e1798010863afb4eac4c44a376cb343be929c5be66a78cfd4456ae9ec6a99d97f4e1c3ff3583351db2147a65c0acef5c003fb544ab3a2e2dc4d43646f58b811a6c3a369d1f"
+const revokedSubkeyHex = "988d04533121f6010400aefc803a3e4bb1a61c86e8a86d2726c6a43e0079e9f2713f1fa017e9854c83877f4aced8e331d675c67ea83ddab80aacbfa0b9040bb12d96f5a3d6be09455e2a76546cbd21677537db941cab710216b6d24ec277ee0bd65b910f416737ed120f6b93a9d3b306245c8cfd8394606fdb462e5cf43c551438d2864506c63367fc890011010001b41d416c696365203c616c69636540626d626172697374612e636f2e61753e88bb041301020025021b03060b090807030206150802090a0b0416020301021e01021780050253312798021901000a09104ef7e4beccde97f015a803ff5448437780f63263b0df8442a995e7f76c221351a51edd06f2063d8166cf3157aada4923dfc44aa0f2a6a4da5cf83b7fe722ba8ab416c976e77c6b5682e7f1069026673bd0de56ba06fd5d7a9f177607f277d9b55ff940a638c3e68525c67517e2b3d976899b93ca267f705b3e5efad7d61220e96b618a4497eab8d04403d23f8846041011020006050253312910000a09107b15a67f0b3ddc03d96e009f50b6365d86c4be5d5e9d0ea42d5e56f5794c617700a0ab274e19c2827780016d23417ce89e0a2c0d987d889c04100102000605025331cf7a000a0910a401d9f09a34f7c0ee970400aca292f213041c9f3b3fc49148cbda9d84afee6183c8dd6c5ff2600b29482db5fecd4303797be1ee6d544a20a858080fec43412061c9a71fae4039fd58013b4ae341273e6c66ad4c7cdd9e68245bedb260562e7b166f2461a1032f2b38c0e0e5715fb3d1656979e052b55ca827a76f872b78a9fdae64bc298170bfcebedc1271b41a416c696365203c616c696365407379646973702e6f722e61753e88b804130102002205025331278b021b03060b090807030206150802090a0b0416020301021e01021780000a09104ef7e4beccde97f06a7003fa03c3af68d272ebc1fa08aa72a03b02189c26496a2833d90450801c4e42c5b5f51ad96ce2d2c9cef4b7c02a6a2fcf1412d6a2d486098eb762f5010a201819c17fd2888aec8eda20c65a3b75744de7ee5cc8ac7bfc470cbe3cb982720405a27a3c6a8c229cfe36905f881b02ed5680f6a8f05866efb9d6c5844897e631deb949ca8846041011020006050253312910000a09107b15a67f0b3ddc0347bc009f7fa35db59147469eb6f2c5aaf6428accb138b22800a0caa2f5f0874bacc5909c652a57a31beda65eddd5889c04100102000605025331cf7a000a0910a401d9f09a34f7c0316403ff46f2a5c101256627f16384d34a38fb47a6c88ba60506843e532d91614339fccae5f884a5741e7582ffaf292ba38ee10a270a05f139bde3814b6a077e8cd2db0f105ebea2a83af70d385f13b507fac2ad93ff79d84950328bb86f3074745a8b7f9b64990fb142e2a12976e27e8d09a28dc5621f957ac49091116da410ac3cbde1b88d04533121f6010400cbd785b56905e4192e2fb62a720727d43c4fa487821203cf72138b884b78b701093243e1d8c92a0248a6c0203a5a88693da34af357499abacaf4b3309c640797d03093870a323b4b6f37865f6eaa2838148a67df4735d43a90ca87942554cdf1c4a751b1e75f9fd4ce4e97e278d6c1c7ed59d33441df7d084f3f02beb68896c70011010001889f0418010200090502533121f6021b0c000a09104ef7e4beccde97f0b98b03fc0a5ccf6a372995835a2f5da33b282a7d612c0ab2a97f59cf9fff73e9110981aac2858c41399afa29624a7fd8a0add11654e3d882c0fd199e161bdad65e5e2548f7b68a437ea64293db1246e3011cbb94dc1bcdeaf0f2539bd88ff16d95547144d97cead6a8c5927660a91e6db0d16eb36b7b49a3525b54d1644e65599b032b7eb901a204533127a0110400bd3edaa09eff9809c4edc2c2a0ebe52e53c50a19c1e49ab78e6167bf61473bb08f2050d78a5cbbc6ed66aff7b42cd503f16b4a0b99fa1609681fca9b7ce2bbb1a5b3864d6cdda4d7ef7849d156d534dea30fb0efb9e4cf8959a2b2ce623905882d5430b995a15c3b9fe92906086788b891002924f94abe139b42cbbfaaabe42f00a0b65dc1a1ad27d798adbcb5b5ad02d2688c89477b03ff4eebb6f7b15a73b96a96bed201c0e5e4ea27e4c6e2dd1005b94d4b90137a5b1cf5e01c6226c070c4cc999938101578877ee76d296b9aab8246d57049caacf489e80a3f40589cade790a020b1ac146d6f7a6241184b8c7fcde680eae3188f5dcbe846d7f7bdad34f6fcfca08413e19c1d5df83fc7c7c627d493492e009c2f52a80400a2fe82de87136fd2e8845888c4431b032ba29d9a29a804277e31002a8201fb8591a3e55c7a0d0881496caf8b9fb07544a5a4879291d0dc026a0ea9e5bd88eb4aa4947bbd694b25012e208a250d65ddc6f1eea59d3aed3b4ec15fcab85e2afaa23a40ab1ef9ce3e11e1bc1c34a0e758e7aa64deb8739276df0af7d4121f834a9b88e70418010200090502533127a0021b02005209104ef7e4beccde97f047200419110200060502533127a0000a0910dbce4ee19529437fe045009c0b32f5ead48ee8a7e98fac0dea3d3e6c0e2c552500a0ad71fadc5007cfaf842d9b7db3335a8cdad15d3d1a6404009b08e2c68fe8f3b45c1bb72a4b3278cdf3012aa0f229883ad74aa1f6000bb90b18301b2f85372ca5d6b9bf478d235b733b1b197d19ccca48e9daf8e890cb64546b4ce1b178faccfff07003c172a2d4f5ebaba9f57153955f3f61a9b80a4f5cb959908f8b211b03b7026a8a82fc612bfedd3794969bcf458c4ce92be215a1176ab88d045331d144010400a5063000c5aaf34953c1aa3bfc95045b3aab9882b9a8027fecfe2142dc6b47ba8aca667399990244d513dd0504716908c17d92c65e74219e004f7b83fc125e575dd58efec3ab6dd22e3580106998523dea42ec75bf9aa111734c82df54630bebdff20fe981cfc36c76f865eb1c2fb62c9e85bc3a6e5015a361a2eb1c8431578d0011010001889f04280102000905025331d433021d03000a09104ef7e4beccde97f02e5503ff5e0630d1b65291f4882b6d40a29da4616bb5088717d469fbcc3648b8276de04a04988b1f1b9f3e18f52265c1f8b6c85861691c1a6b8a3a25a1809a0b32ad330aec5667cb4262f4450649184e8113849b05e5ad06a316ea80c001e8e71838190339a6e48bbde30647bcf245134b9a97fa875c1d83a9862cae87ffd7e2c4ce3a1b89013d04180102000905025331d144021b0200a809104ef7e4beccde97f09d2004190102000605025331d144000a0910677815e371c2fd23522203fe22ab62b8e7a151383cea3edd3a12995693911426f8ccf125e1f6426388c0010f88d9ca7da2224aee8d1c12135998640c5e1813d55a93df472faae75bef858457248db41b4505827590aeccf6f9eb646da7f980655dd3050c6897feddddaca90676dee856d66db8923477d251712bb9b3186b4d0114daf7d6b59272b53218dd1da94a03ff64006fcbe71211e5daecd9961fba66cdb6de3f914882c58ba5beddeba7dcb950c1156d7fba18c19ea880dccc800eae335deec34e3b84ac75ffa24864f782f87815cda1c0f634b3dd2fa67cea30811d21723d21d9551fa12ccbcfa62b6d3a15d01307b99925707992556d50065505b090aadb8579083a20fe65bd2a270da9b011"
+const missingCrossSignatureKey = `-----BEGIN PGP PUBLIC KEY BLOCK-----
+Charset: UTF-8
+
+mQENBFMYynYBCACVOZ3/e8Bm2b9KH9QyIlHGo/i1bnkpqsgXj8tpJ2MIUOnXMMAY
+ztW7kKFLCmgVdLIC0vSoLA4yhaLcMojznh/2CcUglZeb6Ao8Gtelr//Rd5DRfPpG
+zqcfUo+m+eO1co2Orabw0tZDfGpg5p3AYl0hmxhUyYSc/xUq93xL1UJzBFgYXY54
+QsM8dgeQgFseSk/YvdP5SMx1ev+eraUyiiUtWzWrWC1TdyRa5p4UZg6Rkoppf+WJ
+QrW6BWrhAtqATHc8ozV7uJjeONjUEq24roRc/OFZdmQQGK6yrzKnnbA6MdHhqpdo
+9kWDcXYb7pSE63Lc+OBa5X2GUVvXJLS/3nrtABEBAAG0F2ludmFsaWQtc2lnbmlu
+Zy1zdWJrZXlziQEoBBMBAgASBQJTnKB5AhsBAgsHAhUIAh4BAAoJEO3UDQUIHpI/
+dN4H/idX4FQ1LIZCnpHS/oxoWQWfpRgdKAEM0qCqjMgiipJeEwSQbqjTCynuh5/R
+JlODDz85ABR06aoF4l5ebGLQWFCYifPnJZ/Yf5OYcMGtb7dIbqxWVFL9iLMO/oDL
+ioI3dotjPui5e+2hI9pVH1UHB/bZ/GvMGo6Zg0XxLPolKQODMVjpjLAQ0YJ3spew
+RAmOGre6tIvbDsMBnm8qREt7a07cBJ6XK7xjxYaZHQBiHVxyEWDa6gyANONx8duW
+/fhQ/zDTnyVM/ik6VO0Ty9BhPpcEYLFwh5c1ilFari1ta3e6qKo6ZGa9YMk/REhu
+yBHd9nTkI+0CiQUmbckUiVjDKKe5AQ0EUxjKdgEIAJcXQeP+NmuciE99YcJoffxv
+2gVLU4ZXBNHEaP0mgaJ1+tmMD089vUQAcyGRvw8jfsNsVZQIOAuRxY94aHQhIRHR
+bUzBN28ofo/AJJtfx62C15xt6fDKRV6HXYqAiygrHIpEoRLyiN69iScUsjIJeyFL
+C8wa72e8pSL6dkHoaV1N9ZH/xmrJ+k0vsgkQaAh9CzYufncDxcwkoP+aOlGtX1gP
+WwWoIbz0JwLEMPHBWvDDXQcQPQTYQyj+LGC9U6f9VZHN25E94subM1MjuT9OhN9Y
+MLfWaaIc5WyhLFyQKW2Upofn9wSFi8ubyBnv640Dfd0rVmaWv7LNTZpoZ/GbJAMA
+EQEAAYkBHwQYAQIACQUCU5ygeQIbAgAKCRDt1A0FCB6SP0zCB/sEzaVR38vpx+OQ
+MMynCBJrakiqDmUZv9xtplY7zsHSQjpd6xGflbU2n+iX99Q+nav0ETQZifNUEd4N
+1ljDGQejcTyKD6Pkg6wBL3x9/RJye7Zszazm4+toJXZ8xJ3800+BtaPoI39akYJm
++ijzbskvN0v/j5GOFJwQO0pPRAFtdHqRs9Kf4YanxhedB4dIUblzlIJuKsxFit6N
+lgGRblagG3Vv2eBszbxzPbJjHCgVLR3RmrVezKOsZjr/2i7X+xLWIR0uD3IN1qOW
+CXQxLBizEEmSNVNxsp7KPGTLnqO3bPtqFirxS9PJLIMPTPLNBY7ZYuPNTMqVIUWF
+4artDmrG
+=7FfJ
+-----END PGP PUBLIC KEY BLOCK-----`
+
+const invalidCrossSignatureKey = `-----BEGIN PGP PUBLIC KEY BLOCK-----
+
+mQENBFMYynYBCACVOZ3/e8Bm2b9KH9QyIlHGo/i1bnkpqsgXj8tpJ2MIUOnXMMAY
+ztW7kKFLCmgVdLIC0vSoLA4yhaLcMojznh/2CcUglZeb6Ao8Gtelr//Rd5DRfPpG
+zqcfUo+m+eO1co2Orabw0tZDfGpg5p3AYl0hmxhUyYSc/xUq93xL1UJzBFgYXY54
+QsM8dgeQgFseSk/YvdP5SMx1ev+eraUyiiUtWzWrWC1TdyRa5p4UZg6Rkoppf+WJ
+QrW6BWrhAtqATHc8ozV7uJjeONjUEq24roRc/OFZdmQQGK6yrzKnnbA6MdHhqpdo
+9kWDcXYb7pSE63Lc+OBa5X2GUVvXJLS/3nrtABEBAAG0F2ludmFsaWQtc2lnbmlu
+Zy1zdWJrZXlziQEoBBMBAgASBQJTnKB5AhsBAgsHAhUIAh4BAAoJEO3UDQUIHpI/
+dN4H/idX4FQ1LIZCnpHS/oxoWQWfpRgdKAEM0qCqjMgiipJeEwSQbqjTCynuh5/R
+JlODDz85ABR06aoF4l5ebGLQWFCYifPnJZ/Yf5OYcMGtb7dIbqxWVFL9iLMO/oDL
+ioI3dotjPui5e+2hI9pVH1UHB/bZ/GvMGo6Zg0XxLPolKQODMVjpjLAQ0YJ3spew
+RAmOGre6tIvbDsMBnm8qREt7a07cBJ6XK7xjxYaZHQBiHVxyEWDa6gyANONx8duW
+/fhQ/zDTnyVM/ik6VO0Ty9BhPpcEYLFwh5c1ilFari1ta3e6qKo6ZGa9YMk/REhu
+yBHd9nTkI+0CiQUmbckUiVjDKKe5AQ0EUxjKdgEIAIINDqlj7X6jYKc6DjwrOkjQ
+UIRWbQQar0LwmNilehmt70g5DCL1SYm9q4LcgJJ2Nhxj0/5qqsYib50OSWMcKeEe
+iRXpXzv1ObpcQtI5ithp0gR53YPXBib80t3bUzomQ5UyZqAAHzMp3BKC54/vUrSK
+FeRaxDzNLrCeyI00+LHNUtwghAqHvdNcsIf8VRumK8oTm3RmDh0TyjASWYbrt9c8
+R1Um3zuoACOVy+mEIgIzsfHq0u7dwYwJB5+KeM7ZLx+HGIYdUYzHuUE1sLwVoELh
++SHIGHI1HDicOjzqgajShuIjj5hZTyQySVprrsLKiXS6NEwHAP20+XjayJ/R3tEA
+EQEAAYkCPgQYAQIBKAUCU5ygeQIbAsBdIAQZAQIABgUCU5ygeQAKCRCpVlnFZmhO
+52RJB/9uD1MSa0wjY6tHOIgquZcP3bHBvHmrHNMw9HR2wRCMO91ZkhrpdS3ZHtgb
+u3/55etj0FdvDo1tb8P8FGSVtO5Vcwf5APM8sbbqoi8L951Q3i7qt847lfhu6sMl
+w0LWFvPTOLHrliZHItPRjOltS1WAWfr2jUYhsU9ytaDAJmvf9DujxEOsN5G1YJep
+54JCKVCkM/y585Zcnn+yxk/XwqoNQ0/iJUT9qRrZWvoeasxhl1PQcwihCwss44A+
+YXaAt3hbk+6LEQuZoYS73yR3WHj+42tfm7YxRGeubXfgCEz/brETEWXMh4pe0vCL
+bfWrmfSPq2rDegYcAybxRQz0lF8PAAoJEO3UDQUIHpI/exkH/0vQfdHA8g/N4T6E
+i6b1CUVBAkvtdJpCATZjWPhXmShOw62gkDw306vHPilL4SCvEEi4KzG72zkp6VsB
+DSRcpxCwT4mHue+duiy53/aRMtSJ+vDfiV1Vhq+3sWAck/yUtfDU9/u4eFaiNok1
+8/Gd7reyuZt5CiJnpdPpjCwelK21l2w7sHAnJF55ITXdOxI8oG3BRKufz0z5lyDY
+s2tXYmhhQIggdgelN8LbcMhWs/PBbtUr6uZlNJG2lW1yscD4aI529VjwJlCeo745
+U7pO4eF05VViUJ2mmfoivL3tkhoTUWhx8xs8xCUcCg8DoEoSIhxtOmoTPR22Z9BL
+6LCg2mg=
+=Dhm4
+-----END PGP PUBLIC KEY BLOCK-----`
+
+const goodCrossSignatureKey = `-----BEGIN PGP PUBLIC KEY BLOCK-----
+Version: GnuPG v1
+
+mI0EVUqeVwEEAMufHRrMPWK3gyvi0O0tABCs/oON9zV9KDZlr1a1M91ShCSFwCPo
+7r80PxdWVWcj0V5h50/CJYtpN3eE/mUIgW2z1uDYQF1OzrQ8ubrksfsJvpAhENom
+lTQEppv9mV8qhcM278teb7TX0pgrUHLYF5CfPdp1L957JLLXoQR/lwLVABEBAAG0
+E2dvb2Qtc2lnbmluZy1zdWJrZXmIuAQTAQIAIgUCVUqeVwIbAwYLCQgHAwIGFQgC
+CQoLBBYCAwECHgECF4AACgkQNRjL95IRWP69XQQAlH6+eyXJN4DZTLX78KGjHrsw
+6FCvxxClEPtPUjcJy/1KCRQmtLAt9PbbA78dvgzjDeZMZqRAwdjyJhjyg/fkU2OH
+7wq4ktjUu+dLcOBb+BFMEY+YjKZhf6EJuVfxoTVr5f82XNPbYHfTho9/OABKH6kv
+X70PaKZhbwnwij8Nts65AaIEVUqftREEAJ3WxZfqAX0bTDbQPf2CMT2IVMGDfhK7
+GyubOZgDFFjwUJQvHNvsrbeGLZ0xOBumLINyPO1amIfTgJNm1iiWFWfmnHReGcDl
+y5mpYG60Mb79Whdcer7CMm3AqYh/dW4g6IB02NwZMKoUHo3PXmFLxMKXnWyJ0clw
+R0LI/Qn509yXAKDh1SO20rqrBM+EAP2c5bfI98kyNwQAi3buu94qo3RR1ZbvfxgW
+CKXDVm6N99jdZGNK7FbRifXqzJJDLcXZKLnstnC4Sd3uyfyf1uFhmDLIQRryn5m+
+LBYHfDBPN3kdm7bsZDDq9GbTHiFZUfm/tChVKXWxkhpAmHhU/tH6GGzNSMXuIWSO
+aOz3Rqq0ED4NXyNKjdF9MiwD/i83S0ZBc0LmJYt4Z10jtH2B6tYdqnAK29uQaadx
+yZCX2scE09UIm32/w7pV77CKr1Cp/4OzAXS1tmFzQ+bX7DR+Gl8t4wxr57VeEMvl
+BGw4Vjh3X8//m3xynxycQU18Q1zJ6PkiMyPw2owZ/nss3hpSRKFJsxMLhW3fKmKr
+Ey2KiOcEGAECAAkFAlVKn7UCGwIAUgkQNRjL95IRWP5HIAQZEQIABgUCVUqftQAK
+CRD98VjDN10SqkWrAKDTpEY8D8HC02E/KVC5YUI01B30wgCgurpILm20kXEDCeHp
+C5pygfXw1DJrhAP+NyPJ4um/bU1I+rXaHHJYroYJs8YSweiNcwiHDQn0Engh/mVZ
+SqLHvbKh2dL/RXymC3+rjPvQf5cup9bPxNMa6WagdYBNAfzWGtkVISeaQW+cTEp/
+MtgVijRGXR/lGLGETPg2X3Afwn9N9bLMBkBprKgbBqU7lpaoPupxT61bL70=
+=vtbN
+-----END PGP PUBLIC KEY BLOCK-----`
diff --git a/src/mongo/gotools/vendor/src/golang.org/x/crypto/openpgp/packet/compressed.go b/src/mongo/gotools/vendor/src/golang.org/x/crypto/openpgp/packet/compressed.go
new file mode 100644
index 00000000000..e8f0b5caa7d
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/golang.org/x/crypto/openpgp/packet/compressed.go
@@ -0,0 +1,123 @@
+// Copyright 2011 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package packet
+
+import (
+ "compress/bzip2"
+ "compress/flate"
+ "compress/zlib"
+ "golang.org/x/crypto/openpgp/errors"
+ "io"
+ "strconv"
+)
+
+// Compressed represents a compressed OpenPGP packet. The decompressed contents
+// will contain more OpenPGP packets. See RFC 4880, section 5.6.
+type Compressed struct {
+ Body io.Reader
+}
+
+const (
+ NoCompression = flate.NoCompression
+ BestSpeed = flate.BestSpeed
+ BestCompression = flate.BestCompression
+ DefaultCompression = flate.DefaultCompression
+)
+
+// CompressionConfig contains compressor configuration settings.
+type CompressionConfig struct {
+ // Level is the compression level to use. It must be set to
+ // between -1 and 9, with -1 causing the compressor to use the
+ // default compression level, 0 causing the compressor to use
+ // no compression and 1 to 9 representing increasing (better,
+ // slower) compression levels. If Level is less than -1 or
+ // more then 9, a non-nil error will be returned during
+ // encryption. See the constants above for convenient common
+ // settings for Level.
+ Level int
+}
+
+func (c *Compressed) parse(r io.Reader) error {
+ var buf [1]byte
+ _, err := readFull(r, buf[:])
+ if err != nil {
+ return err
+ }
+
+ switch buf[0] {
+ case 1:
+ c.Body = flate.NewReader(r)
+ case 2:
+ c.Body, err = zlib.NewReader(r)
+ case 3:
+ c.Body = bzip2.NewReader(r)
+ default:
+ err = errors.UnsupportedError("unknown compression algorithm: " + strconv.Itoa(int(buf[0])))
+ }
+
+ return err
+}
+
+// compressedWriterCloser represents the serialized compression stream
+// header and the compressor. Its Close() method ensures that both the
+// compressor and serialized stream header are closed. Its Write()
+// method writes to the compressor.
+type compressedWriteCloser struct {
+ sh io.Closer // Stream Header
+ c io.WriteCloser // Compressor
+}
+
+func (cwc compressedWriteCloser) Write(p []byte) (int, error) {
+ return cwc.c.Write(p)
+}
+
+func (cwc compressedWriteCloser) Close() (err error) {
+ err = cwc.c.Close()
+ if err != nil {
+ return err
+ }
+
+ return cwc.sh.Close()
+}
+
+// SerializeCompressed serializes a compressed data packet to w and
+// returns a WriteCloser to which the literal data packets themselves
+// can be written and which MUST be closed on completion. If cc is
+// nil, sensible defaults will be used to configure the compression
+// algorithm.
+func SerializeCompressed(w io.WriteCloser, algo CompressionAlgo, cc *CompressionConfig) (literaldata io.WriteCloser, err error) {
+ compressed, err := serializeStreamHeader(w, packetTypeCompressed)
+ if err != nil {
+ return
+ }
+
+ _, err = compressed.Write([]byte{uint8(algo)})
+ if err != nil {
+ return
+ }
+
+ level := DefaultCompression
+ if cc != nil {
+ level = cc.Level
+ }
+
+ var compressor io.WriteCloser
+ switch algo {
+ case CompressionZIP:
+ compressor, err = flate.NewWriter(compressed, level)
+ case CompressionZLIB:
+ compressor, err = zlib.NewWriterLevel(compressed, level)
+ default:
+ s := strconv.Itoa(int(algo))
+ err = errors.UnsupportedError("Unsupported compression algorithm: " + s)
+ }
+ if err != nil {
+ return
+ }
+
+ literaldata = compressedWriteCloser{compressed, compressor}
+
+ return
+}
diff --git a/src/mongo/gotools/vendor/src/golang.org/x/crypto/openpgp/packet/compressed_test.go b/src/mongo/gotools/vendor/src/golang.org/x/crypto/openpgp/packet/compressed_test.go
new file mode 100644
index 00000000000..cb2d70bd411
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/golang.org/x/crypto/openpgp/packet/compressed_test.go
@@ -0,0 +1,41 @@
+// Copyright 2011 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package packet
+
+import (
+ "bytes"
+ "encoding/hex"
+ "io"
+ "io/ioutil"
+ "testing"
+)
+
+func TestCompressed(t *testing.T) {
+ packet, err := Read(readerFromHex(compressedHex))
+ if err != nil {
+ t.Errorf("failed to read Compressed: %s", err)
+ return
+ }
+
+ c, ok := packet.(*Compressed)
+ if !ok {
+ t.Error("didn't find Compressed packet")
+ return
+ }
+
+ contents, err := ioutil.ReadAll(c.Body)
+ if err != nil && err != io.EOF {
+ t.Error(err)
+ return
+ }
+
+ expected, _ := hex.DecodeString(compressedExpectedHex)
+ if !bytes.Equal(expected, contents) {
+ t.Errorf("got:%x want:%x", contents, expected)
+ }
+}
+
+const compressedHex = "a3013b2d90c4e02b72e25f727e5e496a5e49b11e1700"
+const compressedExpectedHex = "cb1062004d14c8fe636f6e74656e74732e0a"
diff --git a/src/mongo/gotools/vendor/src/golang.org/x/crypto/openpgp/packet/config.go b/src/mongo/gotools/vendor/src/golang.org/x/crypto/openpgp/packet/config.go
new file mode 100644
index 00000000000..c76eecc963a
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/golang.org/x/crypto/openpgp/packet/config.go
@@ -0,0 +1,91 @@
+// Copyright 2012 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package packet
+
+import (
+ "crypto"
+ "crypto/rand"
+ "io"
+ "time"
+)
+
+// Config collects a number of parameters along with sensible defaults.
+// A nil *Config is valid and results in all default values.
+type Config struct {
+ // Rand provides the source of entropy.
+ // If nil, the crypto/rand Reader is used.
+ Rand io.Reader
+ // DefaultHash is the default hash function to be used.
+ // If zero, SHA-256 is used.
+ DefaultHash crypto.Hash
+ // DefaultCipher is the cipher to be used.
+ // If zero, AES-128 is used.
+ DefaultCipher CipherFunction
+ // Time returns the current time as the number of seconds since the
+ // epoch. If Time is nil, time.Now is used.
+ Time func() time.Time
+ // DefaultCompressionAlgo is the compression algorithm to be
+ // applied to the plaintext before encryption. If zero, no
+ // compression is done.
+ DefaultCompressionAlgo CompressionAlgo
+ // CompressionConfig configures the compression settings.
+ CompressionConfig *CompressionConfig
+ // S2KCount is only used for symmetric encryption. It
+ // determines the strength of the passphrase stretching when
+ // the said passphrase is hashed to produce a key. S2KCount
+ // should be between 1024 and 65011712, inclusive. If Config
+ // is nil or S2KCount is 0, the value 65536 used. Not all
+ // values in the above range can be represented. S2KCount will
+ // be rounded up to the next representable value if it cannot
+ // be encoded exactly. When set, it is strongly encrouraged to
+ // use a value that is at least 65536. See RFC 4880 Section
+ // 3.7.1.3.
+ S2KCount int
+ // RSABits is the number of bits in new RSA keys made with NewEntity.
+ // If zero, then 2048 bit keys are created.
+ RSABits int
+}
+
+func (c *Config) Random() io.Reader {
+ if c == nil || c.Rand == nil {
+ return rand.Reader
+ }
+ return c.Rand
+}
+
+func (c *Config) Hash() crypto.Hash {
+ if c == nil || uint(c.DefaultHash) == 0 {
+ return crypto.SHA256
+ }
+ return c.DefaultHash
+}
+
+func (c *Config) Cipher() CipherFunction {
+ if c == nil || uint8(c.DefaultCipher) == 0 {
+ return CipherAES128
+ }
+ return c.DefaultCipher
+}
+
+func (c *Config) Now() time.Time {
+ if c == nil || c.Time == nil {
+ return time.Now()
+ }
+ return c.Time()
+}
+
+func (c *Config) Compression() CompressionAlgo {
+ if c == nil {
+ return CompressionNone
+ }
+ return c.DefaultCompressionAlgo
+}
+
+func (c *Config) PasswordHashIterations() int {
+ if c == nil || c.S2KCount == 0 {
+ return 0
+ }
+ return c.S2KCount
+}
diff --git a/src/mongo/gotools/vendor/src/golang.org/x/crypto/openpgp/packet/encrypted_key.go b/src/mongo/gotools/vendor/src/golang.org/x/crypto/openpgp/packet/encrypted_key.go
new file mode 100644
index 00000000000..266840d05a3
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/golang.org/x/crypto/openpgp/packet/encrypted_key.go
@@ -0,0 +1,199 @@
+// Copyright 2011 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package packet
+
+import (
+ "crypto/rsa"
+ "encoding/binary"
+ "io"
+ "math/big"
+ "strconv"
+
+ "golang.org/x/crypto/openpgp/elgamal"
+ "golang.org/x/crypto/openpgp/errors"
+)
+
+const encryptedKeyVersion = 3
+
+// EncryptedKey represents a public-key encrypted session key. See RFC 4880,
+// section 5.1.
+type EncryptedKey struct {
+ KeyId uint64
+ Algo PublicKeyAlgorithm
+ CipherFunc CipherFunction // only valid after a successful Decrypt
+ Key []byte // only valid after a successful Decrypt
+
+ encryptedMPI1, encryptedMPI2 parsedMPI
+}
+
+func (e *EncryptedKey) parse(r io.Reader) (err error) {
+ var buf [10]byte
+ _, err = readFull(r, buf[:])
+ if err != nil {
+ return
+ }
+ if buf[0] != encryptedKeyVersion {
+ return errors.UnsupportedError("unknown EncryptedKey version " + strconv.Itoa(int(buf[0])))
+ }
+ e.KeyId = binary.BigEndian.Uint64(buf[1:9])
+ e.Algo = PublicKeyAlgorithm(buf[9])
+ switch e.Algo {
+ case PubKeyAlgoRSA, PubKeyAlgoRSAEncryptOnly:
+ e.encryptedMPI1.bytes, e.encryptedMPI1.bitLength, err = readMPI(r)
+ case PubKeyAlgoElGamal:
+ e.encryptedMPI1.bytes, e.encryptedMPI1.bitLength, err = readMPI(r)
+ if err != nil {
+ return
+ }
+ e.encryptedMPI2.bytes, e.encryptedMPI2.bitLength, err = readMPI(r)
+ }
+ _, err = consumeAll(r)
+ return
+}
+
+func checksumKeyMaterial(key []byte) uint16 {
+ var checksum uint16
+ for _, v := range key {
+ checksum += uint16(v)
+ }
+ return checksum
+}
+
+// Decrypt decrypts an encrypted session key with the given private key. The
+// private key must have been decrypted first.
+// If config is nil, sensible defaults will be used.
+func (e *EncryptedKey) Decrypt(priv *PrivateKey, config *Config) error {
+ var err error
+ var b []byte
+
+ // TODO(agl): use session key decryption routines here to avoid
+ // padding oracle attacks.
+ switch priv.PubKeyAlgo {
+ case PubKeyAlgoRSA, PubKeyAlgoRSAEncryptOnly:
+ b, err = rsa.DecryptPKCS1v15(config.Random(), priv.PrivateKey.(*rsa.PrivateKey), e.encryptedMPI1.bytes)
+ case PubKeyAlgoElGamal:
+ c1 := new(big.Int).SetBytes(e.encryptedMPI1.bytes)
+ c2 := new(big.Int).SetBytes(e.encryptedMPI2.bytes)
+ b, err = elgamal.Decrypt(priv.PrivateKey.(*elgamal.PrivateKey), c1, c2)
+ default:
+ err = errors.InvalidArgumentError("cannot decrypted encrypted session key with private key of type " + strconv.Itoa(int(priv.PubKeyAlgo)))
+ }
+
+ if err != nil {
+ return err
+ }
+
+ e.CipherFunc = CipherFunction(b[0])
+ e.Key = b[1 : len(b)-2]
+ expectedChecksum := uint16(b[len(b)-2])<<8 | uint16(b[len(b)-1])
+ checksum := checksumKeyMaterial(e.Key)
+ if checksum != expectedChecksum {
+ return errors.StructuralError("EncryptedKey checksum incorrect")
+ }
+
+ return nil
+}
+
+// Serialize writes the encrypted key packet, e, to w.
+func (e *EncryptedKey) Serialize(w io.Writer) error {
+ var mpiLen int
+ switch e.Algo {
+ case PubKeyAlgoRSA, PubKeyAlgoRSAEncryptOnly:
+ mpiLen = 2 + len(e.encryptedMPI1.bytes)
+ case PubKeyAlgoElGamal:
+ mpiLen = 2 + len(e.encryptedMPI1.bytes) + 2 + len(e.encryptedMPI2.bytes)
+ default:
+ return errors.InvalidArgumentError("don't know how to serialize encrypted key type " + strconv.Itoa(int(e.Algo)))
+ }
+
+ serializeHeader(w, packetTypeEncryptedKey, 1 /* version */ +8 /* key id */ +1 /* algo */ +mpiLen)
+
+ w.Write([]byte{encryptedKeyVersion})
+ binary.Write(w, binary.BigEndian, e.KeyId)
+ w.Write([]byte{byte(e.Algo)})
+
+ switch e.Algo {
+ case PubKeyAlgoRSA, PubKeyAlgoRSAEncryptOnly:
+ writeMPIs(w, e.encryptedMPI1)
+ case PubKeyAlgoElGamal:
+ writeMPIs(w, e.encryptedMPI1, e.encryptedMPI2)
+ default:
+ panic("internal error")
+ }
+
+ return nil
+}
+
+// SerializeEncryptedKey serializes an encrypted key packet to w that contains
+// key, encrypted to pub.
+// If config is nil, sensible defaults will be used.
+func SerializeEncryptedKey(w io.Writer, pub *PublicKey, cipherFunc CipherFunction, key []byte, config *Config) error {
+ var buf [10]byte
+ buf[0] = encryptedKeyVersion
+ binary.BigEndian.PutUint64(buf[1:9], pub.KeyId)
+ buf[9] = byte(pub.PubKeyAlgo)
+
+ keyBlock := make([]byte, 1 /* cipher type */ +len(key)+2 /* checksum */)
+ keyBlock[0] = byte(cipherFunc)
+ copy(keyBlock[1:], key)
+ checksum := checksumKeyMaterial(key)
+ keyBlock[1+len(key)] = byte(checksum >> 8)
+ keyBlock[1+len(key)+1] = byte(checksum)
+
+ switch pub.PubKeyAlgo {
+ case PubKeyAlgoRSA, PubKeyAlgoRSAEncryptOnly:
+ return serializeEncryptedKeyRSA(w, config.Random(), buf, pub.PublicKey.(*rsa.PublicKey), keyBlock)
+ case PubKeyAlgoElGamal:
+ return serializeEncryptedKeyElGamal(w, config.Random(), buf, pub.PublicKey.(*elgamal.PublicKey), keyBlock)
+ case PubKeyAlgoDSA, PubKeyAlgoRSASignOnly:
+ return errors.InvalidArgumentError("cannot encrypt to public key of type " + strconv.Itoa(int(pub.PubKeyAlgo)))
+ }
+
+ return errors.UnsupportedError("encrypting a key to public key of type " + strconv.Itoa(int(pub.PubKeyAlgo)))
+}
+
+func serializeEncryptedKeyRSA(w io.Writer, rand io.Reader, header [10]byte, pub *rsa.PublicKey, keyBlock []byte) error {
+ cipherText, err := rsa.EncryptPKCS1v15(rand, pub, keyBlock)
+ if err != nil {
+ return errors.InvalidArgumentError("RSA encryption failed: " + err.Error())
+ }
+
+ packetLen := 10 /* header length */ + 2 /* mpi size */ + len(cipherText)
+
+ err = serializeHeader(w, packetTypeEncryptedKey, packetLen)
+ if err != nil {
+ return err
+ }
+ _, err = w.Write(header[:])
+ if err != nil {
+ return err
+ }
+ return writeMPI(w, 8*uint16(len(cipherText)), cipherText)
+}
+
+func serializeEncryptedKeyElGamal(w io.Writer, rand io.Reader, header [10]byte, pub *elgamal.PublicKey, keyBlock []byte) error {
+ c1, c2, err := elgamal.Encrypt(rand, pub, keyBlock)
+ if err != nil {
+ return errors.InvalidArgumentError("ElGamal encryption failed: " + err.Error())
+ }
+
+ packetLen := 10 /* header length */
+ packetLen += 2 /* mpi size */ + (c1.BitLen()+7)/8
+ packetLen += 2 /* mpi size */ + (c2.BitLen()+7)/8
+
+ err = serializeHeader(w, packetTypeEncryptedKey, packetLen)
+ if err != nil {
+ return err
+ }
+ _, err = w.Write(header[:])
+ if err != nil {
+ return err
+ }
+ err = writeBig(w, c1)
+ if err != nil {
+ return err
+ }
+ return writeBig(w, c2)
+}
diff --git a/src/mongo/gotools/vendor/src/golang.org/x/crypto/openpgp/packet/encrypted_key_test.go b/src/mongo/gotools/vendor/src/golang.org/x/crypto/openpgp/packet/encrypted_key_test.go
new file mode 100644
index 00000000000..fee14cf3cf0
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/golang.org/x/crypto/openpgp/packet/encrypted_key_test.go
@@ -0,0 +1,146 @@
+// Copyright 2011 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package packet
+
+import (
+ "bytes"
+ "crypto/rsa"
+ "encoding/hex"
+ "fmt"
+ "math/big"
+ "testing"
+)
+
+func bigFromBase10(s string) *big.Int {
+ b, ok := new(big.Int).SetString(s, 10)
+ if !ok {
+ panic("bigFromBase10 failed")
+ }
+ return b
+}
+
+var encryptedKeyPub = rsa.PublicKey{
+ E: 65537,
+ N: bigFromBase10("115804063926007623305902631768113868327816898845124614648849934718568541074358183759250136204762053879858102352159854352727097033322663029387610959884180306668628526686121021235757016368038585212410610742029286439607686208110250133174279811431933746643015923132833417396844716207301518956640020862630546868823"),
+}
+
+var encryptedKeyRSAPriv = &rsa.PrivateKey{
+ PublicKey: encryptedKeyPub,
+ D: bigFromBase10("32355588668219869544751561565313228297765464314098552250409557267371233892496951383426602439009993875125222579159850054973310859166139474359774543943714622292329487391199285040721944491839695981199720170366763547754915493640685849961780092241140181198779299712578774460837139360803883139311171713302987058393"),
+}
+
+var encryptedKeyPriv = &PrivateKey{
+ PublicKey: PublicKey{
+ PubKeyAlgo: PubKeyAlgoRSA,
+ },
+ PrivateKey: encryptedKeyRSAPriv,
+}
+
+func TestDecryptingEncryptedKey(t *testing.T) {
+ const encryptedKeyHex = "c18c032a67d68660df41c70104005789d0de26b6a50c985a02a13131ca829c413a35d0e6fa8d6842599252162808ac7439c72151c8c6183e76923fe3299301414d0c25a2f06a2257db3839e7df0ec964773f6e4c4ac7ff3b48c444237166dd46ba8ff443a5410dc670cb486672fdbe7c9dfafb75b4fea83af3a204fe2a7dfa86bd20122b4f3d2646cbeecb8f7be8"
+ const expectedKeyHex = "d930363f7e0308c333b9618617ea728963d8df993665ae7be1092d4926fd864b"
+
+ p, err := Read(readerFromHex(encryptedKeyHex))
+ if err != nil {
+ t.Errorf("error from Read: %s", err)
+ return
+ }
+ ek, ok := p.(*EncryptedKey)
+ if !ok {
+ t.Errorf("didn't parse an EncryptedKey, got %#v", p)
+ return
+ }
+
+ if ek.KeyId != 0x2a67d68660df41c7 || ek.Algo != PubKeyAlgoRSA {
+ t.Errorf("unexpected EncryptedKey contents: %#v", ek)
+ return
+ }
+
+ err = ek.Decrypt(encryptedKeyPriv, nil)
+ if err != nil {
+ t.Errorf("error from Decrypt: %s", err)
+ return
+ }
+
+ if ek.CipherFunc != CipherAES256 {
+ t.Errorf("unexpected EncryptedKey contents: %#v", ek)
+ return
+ }
+
+ keyHex := fmt.Sprintf("%x", ek.Key)
+ if keyHex != expectedKeyHex {
+ t.Errorf("bad key, got %s want %x", keyHex, expectedKeyHex)
+ }
+}
+
+func TestEncryptingEncryptedKey(t *testing.T) {
+ key := []byte{1, 2, 3, 4}
+ const expectedKeyHex = "01020304"
+ const keyId = 42
+
+ pub := &PublicKey{
+ PublicKey: &encryptedKeyPub,
+ KeyId: keyId,
+ PubKeyAlgo: PubKeyAlgoRSAEncryptOnly,
+ }
+
+ buf := new(bytes.Buffer)
+ err := SerializeEncryptedKey(buf, pub, CipherAES128, key, nil)
+ if err != nil {
+ t.Errorf("error writing encrypted key packet: %s", err)
+ }
+
+ p, err := Read(buf)
+ if err != nil {
+ t.Errorf("error from Read: %s", err)
+ return
+ }
+ ek, ok := p.(*EncryptedKey)
+ if !ok {
+ t.Errorf("didn't parse an EncryptedKey, got %#v", p)
+ return
+ }
+
+ if ek.KeyId != keyId || ek.Algo != PubKeyAlgoRSAEncryptOnly {
+ t.Errorf("unexpected EncryptedKey contents: %#v", ek)
+ return
+ }
+
+ err = ek.Decrypt(encryptedKeyPriv, nil)
+ if err != nil {
+ t.Errorf("error from Decrypt: %s", err)
+ return
+ }
+
+ if ek.CipherFunc != CipherAES128 {
+ t.Errorf("unexpected EncryptedKey contents: %#v", ek)
+ return
+ }
+
+ keyHex := fmt.Sprintf("%x", ek.Key)
+ if keyHex != expectedKeyHex {
+ t.Errorf("bad key, got %s want %x", keyHex, expectedKeyHex)
+ }
+}
+
+func TestSerializingEncryptedKey(t *testing.T) {
+ const encryptedKeyHex = "c18c032a67d68660df41c70104005789d0de26b6a50c985a02a13131ca829c413a35d0e6fa8d6842599252162808ac7439c72151c8c6183e76923fe3299301414d0c25a2f06a2257db3839e7df0ec964773f6e4c4ac7ff3b48c444237166dd46ba8ff443a5410dc670cb486672fdbe7c9dfafb75b4fea83af3a204fe2a7dfa86bd20122b4f3d2646cbeecb8f7be8"
+
+ p, err := Read(readerFromHex(encryptedKeyHex))
+ if err != nil {
+ t.Fatalf("error from Read: %s", err)
+ }
+ ek, ok := p.(*EncryptedKey)
+ if !ok {
+ t.Fatalf("didn't parse an EncryptedKey, got %#v", p)
+ }
+
+ var buf bytes.Buffer
+ ek.Serialize(&buf)
+
+ if bufHex := hex.EncodeToString(buf.Bytes()); bufHex != encryptedKeyHex {
+ t.Fatalf("serialization of encrypted key differed from original. Original was %s, but reserialized as %s", encryptedKeyHex, bufHex)
+ }
+}
diff --git a/src/mongo/gotools/vendor/src/golang.org/x/crypto/openpgp/packet/literal.go b/src/mongo/gotools/vendor/src/golang.org/x/crypto/openpgp/packet/literal.go
new file mode 100644
index 00000000000..1a9ec6e51e8
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/golang.org/x/crypto/openpgp/packet/literal.go
@@ -0,0 +1,89 @@
+// Copyright 2011 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package packet
+
+import (
+ "encoding/binary"
+ "io"
+)
+
+// LiteralData represents an encrypted file. See RFC 4880, section 5.9.
+type LiteralData struct {
+ IsBinary bool
+ FileName string
+ Time uint32 // Unix epoch time. Either creation time or modification time. 0 means undefined.
+ Body io.Reader
+}
+
+// ForEyesOnly returns whether the contents of the LiteralData have been marked
+// as especially sensitive.
+func (l *LiteralData) ForEyesOnly() bool {
+ return l.FileName == "_CONSOLE"
+}
+
+func (l *LiteralData) parse(r io.Reader) (err error) {
+ var buf [256]byte
+
+ _, err = readFull(r, buf[:2])
+ if err != nil {
+ return
+ }
+
+ l.IsBinary = buf[0] == 'b'
+ fileNameLen := int(buf[1])
+
+ _, err = readFull(r, buf[:fileNameLen])
+ if err != nil {
+ return
+ }
+
+ l.FileName = string(buf[:fileNameLen])
+
+ _, err = readFull(r, buf[:4])
+ if err != nil {
+ return
+ }
+
+ l.Time = binary.BigEndian.Uint32(buf[:4])
+ l.Body = r
+ return
+}
+
+// SerializeLiteral serializes a literal data packet to w and returns a
+// WriteCloser to which the data itself can be written and which MUST be closed
+// on completion. The fileName is truncated to 255 bytes.
+func SerializeLiteral(w io.WriteCloser, isBinary bool, fileName string, time uint32) (plaintext io.WriteCloser, err error) {
+ var buf [4]byte
+ buf[0] = 't'
+ if isBinary {
+ buf[0] = 'b'
+ }
+ if len(fileName) > 255 {
+ fileName = fileName[:255]
+ }
+ buf[1] = byte(len(fileName))
+
+ inner, err := serializeStreamHeader(w, packetTypeLiteralData)
+ if err != nil {
+ return
+ }
+
+ _, err = inner.Write(buf[:2])
+ if err != nil {
+ return
+ }
+ _, err = inner.Write([]byte(fileName))
+ if err != nil {
+ return
+ }
+ binary.BigEndian.PutUint32(buf[:], time)
+ _, err = inner.Write(buf[:])
+ if err != nil {
+ return
+ }
+
+ plaintext = inner
+ return
+}
diff --git a/src/mongo/gotools/vendor/src/golang.org/x/crypto/openpgp/packet/ocfb.go b/src/mongo/gotools/vendor/src/golang.org/x/crypto/openpgp/packet/ocfb.go
new file mode 100644
index 00000000000..ce2a33a547c
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/golang.org/x/crypto/openpgp/packet/ocfb.go
@@ -0,0 +1,143 @@
+// Copyright 2010 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// OpenPGP CFB Mode. http://tools.ietf.org/html/rfc4880#section-13.9
+
+package packet
+
+import (
+ "crypto/cipher"
+)
+
+type ocfbEncrypter struct {
+ b cipher.Block
+ fre []byte
+ outUsed int
+}
+
+// An OCFBResyncOption determines if the "resynchronization step" of OCFB is
+// performed.
+type OCFBResyncOption bool
+
+const (
+ OCFBResync OCFBResyncOption = true
+ OCFBNoResync OCFBResyncOption = false
+)
+
+// NewOCFBEncrypter returns a cipher.Stream which encrypts data with OpenPGP's
+// cipher feedback mode using the given cipher.Block, and an initial amount of
+// ciphertext. randData must be random bytes and be the same length as the
+// cipher.Block's block size. Resync determines if the "resynchronization step"
+// from RFC 4880, 13.9 step 7 is performed. Different parts of OpenPGP vary on
+// this point.
+func NewOCFBEncrypter(block cipher.Block, randData []byte, resync OCFBResyncOption) (cipher.Stream, []byte) {
+ blockSize := block.BlockSize()
+ if len(randData) != blockSize {
+ return nil, nil
+ }
+
+ x := &ocfbEncrypter{
+ b: block,
+ fre: make([]byte, blockSize),
+ outUsed: 0,
+ }
+ prefix := make([]byte, blockSize+2)
+
+ block.Encrypt(x.fre, x.fre)
+ for i := 0; i < blockSize; i++ {
+ prefix[i] = randData[i] ^ x.fre[i]
+ }
+
+ block.Encrypt(x.fre, prefix[:blockSize])
+ prefix[blockSize] = x.fre[0] ^ randData[blockSize-2]
+ prefix[blockSize+1] = x.fre[1] ^ randData[blockSize-1]
+
+ if resync {
+ block.Encrypt(x.fre, prefix[2:])
+ } else {
+ x.fre[0] = prefix[blockSize]
+ x.fre[1] = prefix[blockSize+1]
+ x.outUsed = 2
+ }
+ return x, prefix
+}
+
+func (x *ocfbEncrypter) XORKeyStream(dst, src []byte) {
+ for i := 0; i < len(src); i++ {
+ if x.outUsed == len(x.fre) {
+ x.b.Encrypt(x.fre, x.fre)
+ x.outUsed = 0
+ }
+
+ x.fre[x.outUsed] ^= src[i]
+ dst[i] = x.fre[x.outUsed]
+ x.outUsed++
+ }
+}
+
+type ocfbDecrypter struct {
+ b cipher.Block
+ fre []byte
+ outUsed int
+}
+
+// NewOCFBDecrypter returns a cipher.Stream which decrypts data with OpenPGP's
+// cipher feedback mode using the given cipher.Block. Prefix must be the first
+// blockSize + 2 bytes of the ciphertext, where blockSize is the cipher.Block's
+// block size. If an incorrect key is detected then nil is returned. On
+// successful exit, blockSize+2 bytes of decrypted data are written into
+// prefix. Resync determines if the "resynchronization step" from RFC 4880,
+// 13.9 step 7 is performed. Different parts of OpenPGP vary on this point.
+func NewOCFBDecrypter(block cipher.Block, prefix []byte, resync OCFBResyncOption) cipher.Stream {
+ blockSize := block.BlockSize()
+ if len(prefix) != blockSize+2 {
+ return nil
+ }
+
+ x := &ocfbDecrypter{
+ b: block,
+ fre: make([]byte, blockSize),
+ outUsed: 0,
+ }
+ prefixCopy := make([]byte, len(prefix))
+ copy(prefixCopy, prefix)
+
+ block.Encrypt(x.fre, x.fre)
+ for i := 0; i < blockSize; i++ {
+ prefixCopy[i] ^= x.fre[i]
+ }
+
+ block.Encrypt(x.fre, prefix[:blockSize])
+ prefixCopy[blockSize] ^= x.fre[0]
+ prefixCopy[blockSize+1] ^= x.fre[1]
+
+ if prefixCopy[blockSize-2] != prefixCopy[blockSize] ||
+ prefixCopy[blockSize-1] != prefixCopy[blockSize+1] {
+ return nil
+ }
+
+ if resync {
+ block.Encrypt(x.fre, prefix[2:])
+ } else {
+ x.fre[0] = prefix[blockSize]
+ x.fre[1] = prefix[blockSize+1]
+ x.outUsed = 2
+ }
+ copy(prefix, prefixCopy)
+ return x
+}
+
+func (x *ocfbDecrypter) XORKeyStream(dst, src []byte) {
+ for i := 0; i < len(src); i++ {
+ if x.outUsed == len(x.fre) {
+ x.b.Encrypt(x.fre, x.fre)
+ x.outUsed = 0
+ }
+
+ c := src[i]
+ dst[i] = x.fre[x.outUsed] ^ src[i]
+ x.fre[x.outUsed] = c
+ x.outUsed++
+ }
+}
diff --git a/src/mongo/gotools/vendor/src/golang.org/x/crypto/openpgp/packet/ocfb_test.go b/src/mongo/gotools/vendor/src/golang.org/x/crypto/openpgp/packet/ocfb_test.go
new file mode 100644
index 00000000000..91022c042d4
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/golang.org/x/crypto/openpgp/packet/ocfb_test.go
@@ -0,0 +1,46 @@
+// Copyright 2010 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package packet
+
+import (
+ "bytes"
+ "crypto/aes"
+ "crypto/rand"
+ "testing"
+)
+
+var commonKey128 = []byte{0x2b, 0x7e, 0x15, 0x16, 0x28, 0xae, 0xd2, 0xa6, 0xab, 0xf7, 0x15, 0x88, 0x09, 0xcf, 0x4f, 0x3c}
+
+func testOCFB(t *testing.T, resync OCFBResyncOption) {
+ block, err := aes.NewCipher(commonKey128)
+ if err != nil {
+ t.Error(err)
+ return
+ }
+
+ plaintext := []byte("this is the plaintext, which is long enough to span several blocks.")
+ randData := make([]byte, block.BlockSize())
+ rand.Reader.Read(randData)
+ ocfb, prefix := NewOCFBEncrypter(block, randData, resync)
+ ciphertext := make([]byte, len(plaintext))
+ ocfb.XORKeyStream(ciphertext, plaintext)
+
+ ocfbdec := NewOCFBDecrypter(block, prefix, resync)
+ if ocfbdec == nil {
+ t.Errorf("NewOCFBDecrypter failed (resync: %t)", resync)
+ return
+ }
+ plaintextCopy := make([]byte, len(plaintext))
+ ocfbdec.XORKeyStream(plaintextCopy, ciphertext)
+
+ if !bytes.Equal(plaintextCopy, plaintext) {
+ t.Errorf("got: %x, want: %x (resync: %t)", plaintextCopy, plaintext, resync)
+ }
+}
+
+func TestOCFB(t *testing.T) {
+ testOCFB(t, OCFBNoResync)
+ testOCFB(t, OCFBResync)
+}
diff --git a/src/mongo/gotools/vendor/src/golang.org/x/crypto/openpgp/packet/one_pass_signature.go b/src/mongo/gotools/vendor/src/golang.org/x/crypto/openpgp/packet/one_pass_signature.go
new file mode 100644
index 00000000000..1713503395e
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/golang.org/x/crypto/openpgp/packet/one_pass_signature.go
@@ -0,0 +1,73 @@
+// Copyright 2011 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package packet
+
+import (
+ "crypto"
+ "encoding/binary"
+ "golang.org/x/crypto/openpgp/errors"
+ "golang.org/x/crypto/openpgp/s2k"
+ "io"
+ "strconv"
+)
+
+// OnePassSignature represents a one-pass signature packet. See RFC 4880,
+// section 5.4.
+type OnePassSignature struct {
+ SigType SignatureType
+ Hash crypto.Hash
+ PubKeyAlgo PublicKeyAlgorithm
+ KeyId uint64
+ IsLast bool
+}
+
+const onePassSignatureVersion = 3
+
+func (ops *OnePassSignature) parse(r io.Reader) (err error) {
+ var buf [13]byte
+
+ _, err = readFull(r, buf[:])
+ if err != nil {
+ return
+ }
+ if buf[0] != onePassSignatureVersion {
+ err = errors.UnsupportedError("one-pass-signature packet version " + strconv.Itoa(int(buf[0])))
+ }
+
+ var ok bool
+ ops.Hash, ok = s2k.HashIdToHash(buf[2])
+ if !ok {
+ return errors.UnsupportedError("hash function: " + strconv.Itoa(int(buf[2])))
+ }
+
+ ops.SigType = SignatureType(buf[1])
+ ops.PubKeyAlgo = PublicKeyAlgorithm(buf[3])
+ ops.KeyId = binary.BigEndian.Uint64(buf[4:12])
+ ops.IsLast = buf[12] != 0
+ return
+}
+
+// Serialize marshals the given OnePassSignature to w.
+func (ops *OnePassSignature) Serialize(w io.Writer) error {
+ var buf [13]byte
+ buf[0] = onePassSignatureVersion
+ buf[1] = uint8(ops.SigType)
+ var ok bool
+ buf[2], ok = s2k.HashToHashId(ops.Hash)
+ if !ok {
+ return errors.UnsupportedError("hash type: " + strconv.Itoa(int(ops.Hash)))
+ }
+ buf[3] = uint8(ops.PubKeyAlgo)
+ binary.BigEndian.PutUint64(buf[4:12], ops.KeyId)
+ if ops.IsLast {
+ buf[12] = 1
+ }
+
+ if err := serializeHeader(w, packetTypeOnePassSignature, len(buf)); err != nil {
+ return err
+ }
+ _, err := w.Write(buf[:])
+ return err
+}
diff --git a/src/mongo/gotools/vendor/src/golang.org/x/crypto/openpgp/packet/opaque.go b/src/mongo/gotools/vendor/src/golang.org/x/crypto/openpgp/packet/opaque.go
new file mode 100644
index 00000000000..456d807f255
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/golang.org/x/crypto/openpgp/packet/opaque.go
@@ -0,0 +1,162 @@
+// Copyright 2012 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package packet
+
+import (
+ "bytes"
+ "io"
+ "io/ioutil"
+
+ "golang.org/x/crypto/openpgp/errors"
+)
+
+// OpaquePacket represents an OpenPGP packet as raw, unparsed data. This is
+// useful for splitting and storing the original packet contents separately,
+// handling unsupported packet types or accessing parts of the packet not yet
+// implemented by this package.
+type OpaquePacket struct {
+ // Packet type
+ Tag uint8
+ // Reason why the packet was parsed opaquely
+ Reason error
+ // Binary contents of the packet data
+ Contents []byte
+}
+
+func (op *OpaquePacket) parse(r io.Reader) (err error) {
+ op.Contents, err = ioutil.ReadAll(r)
+ return
+}
+
+// Serialize marshals the packet to a writer in its original form, including
+// the packet header.
+func (op *OpaquePacket) Serialize(w io.Writer) (err error) {
+ err = serializeHeader(w, packetType(op.Tag), len(op.Contents))
+ if err == nil {
+ _, err = w.Write(op.Contents)
+ }
+ return
+}
+
+// Parse attempts to parse the opaque contents into a structure supported by
+// this package. If the packet is not known then the result will be another
+// OpaquePacket.
+func (op *OpaquePacket) Parse() (p Packet, err error) {
+ hdr := bytes.NewBuffer(nil)
+ err = serializeHeader(hdr, packetType(op.Tag), len(op.Contents))
+ if err != nil {
+ op.Reason = err
+ return op, err
+ }
+ p, err = Read(io.MultiReader(hdr, bytes.NewBuffer(op.Contents)))
+ if err != nil {
+ op.Reason = err
+ p = op
+ }
+ return
+}
+
+// OpaqueReader reads OpaquePackets from an io.Reader.
+type OpaqueReader struct {
+ r io.Reader
+}
+
+func NewOpaqueReader(r io.Reader) *OpaqueReader {
+ return &OpaqueReader{r: r}
+}
+
+// Read the next OpaquePacket.
+func (or *OpaqueReader) Next() (op *OpaquePacket, err error) {
+ tag, _, contents, err := readHeader(or.r)
+ if err != nil {
+ return
+ }
+ op = &OpaquePacket{Tag: uint8(tag), Reason: err}
+ err = op.parse(contents)
+ if err != nil {
+ consumeAll(contents)
+ }
+ return
+}
+
+// OpaqueSubpacket represents an unparsed OpenPGP subpacket,
+// as found in signature and user attribute packets.
+type OpaqueSubpacket struct {
+ SubType uint8
+ Contents []byte
+}
+
+// OpaqueSubpackets extracts opaque, unparsed OpenPGP subpackets from
+// their byte representation.
+func OpaqueSubpackets(contents []byte) (result []*OpaqueSubpacket, err error) {
+ var (
+ subHeaderLen int
+ subPacket *OpaqueSubpacket
+ )
+ for len(contents) > 0 {
+ subHeaderLen, subPacket, err = nextSubpacket(contents)
+ if err != nil {
+ break
+ }
+ result = append(result, subPacket)
+ contents = contents[subHeaderLen+len(subPacket.Contents):]
+ }
+ return
+}
+
+func nextSubpacket(contents []byte) (subHeaderLen int, subPacket *OpaqueSubpacket, err error) {
+ // RFC 4880, section 5.2.3.1
+ var subLen uint32
+ if len(contents) < 1 {
+ goto Truncated
+ }
+ subPacket = &OpaqueSubpacket{}
+ switch {
+ case contents[0] < 192:
+ subHeaderLen = 2 // 1 length byte, 1 subtype byte
+ if len(contents) < subHeaderLen {
+ goto Truncated
+ }
+ subLen = uint32(contents[0])
+ contents = contents[1:]
+ case contents[0] < 255:
+ subHeaderLen = 3 // 2 length bytes, 1 subtype
+ if len(contents) < subHeaderLen {
+ goto Truncated
+ }
+ subLen = uint32(contents[0]-192)<<8 + uint32(contents[1]) + 192
+ contents = contents[2:]
+ default:
+ subHeaderLen = 6 // 5 length bytes, 1 subtype
+ if len(contents) < subHeaderLen {
+ goto Truncated
+ }
+ subLen = uint32(contents[1])<<24 |
+ uint32(contents[2])<<16 |
+ uint32(contents[3])<<8 |
+ uint32(contents[4])
+ contents = contents[5:]
+ }
+ if subLen > uint32(len(contents)) || subLen == 0 {
+ goto Truncated
+ }
+ subPacket.SubType = contents[0]
+ subPacket.Contents = contents[1:subLen]
+ return
+Truncated:
+ err = errors.StructuralError("subpacket truncated")
+ return
+}
+
+func (osp *OpaqueSubpacket) Serialize(w io.Writer) (err error) {
+ buf := make([]byte, 6)
+ n := serializeSubpacketLength(buf, len(osp.Contents)+1)
+ buf[n] = osp.SubType
+ if _, err = w.Write(buf[:n+1]); err != nil {
+ return
+ }
+ _, err = w.Write(osp.Contents)
+ return
+}
diff --git a/src/mongo/gotools/vendor/src/golang.org/x/crypto/openpgp/packet/opaque_test.go b/src/mongo/gotools/vendor/src/golang.org/x/crypto/openpgp/packet/opaque_test.go
new file mode 100644
index 00000000000..f27bbfe090b
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/golang.org/x/crypto/openpgp/packet/opaque_test.go
@@ -0,0 +1,67 @@
+// Copyright 2011 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package packet
+
+import (
+ "bytes"
+ "encoding/hex"
+ "io"
+ "testing"
+)
+
+// Test packet.Read error handling in OpaquePacket.Parse,
+// which attempts to re-read an OpaquePacket as a supported
+// Packet type.
+func TestOpaqueParseReason(t *testing.T) {
+ buf, err := hex.DecodeString(UnsupportedKeyHex)
+ if err != nil {
+ t.Fatal(err)
+ }
+ or := NewOpaqueReader(bytes.NewBuffer(buf))
+ count := 0
+ badPackets := 0
+ var uid *UserId
+ for {
+ op, err := or.Next()
+ if err == io.EOF {
+ break
+ } else if err != nil {
+ t.Errorf("#%d: opaque read error: %v", count, err)
+ break
+ }
+ // try to parse opaque packet
+ p, err := op.Parse()
+ switch pkt := p.(type) {
+ case *UserId:
+ uid = pkt
+ case *OpaquePacket:
+ // If an OpaquePacket can't re-parse, packet.Read
+ // certainly had its reasons.
+ if pkt.Reason == nil {
+ t.Errorf("#%d: opaque packet, no reason", count)
+ } else {
+ badPackets++
+ }
+ }
+ count++
+ }
+
+ const expectedBad = 3
+ // Test post-conditions, make sure we actually parsed packets as expected.
+ if badPackets != expectedBad {
+ t.Errorf("unexpected # unparseable packets: %d (want %d)", badPackets, expectedBad)
+ }
+ if uid == nil {
+ t.Errorf("failed to find expected UID in unsupported keyring")
+ } else if uid.Id != "Armin M. Warda <warda@nephilim.ruhr.de>" {
+ t.Errorf("unexpected UID: %v", uid.Id)
+ }
+}
+
+// This key material has public key and signature packet versions modified to
+// an unsupported value (1), so that trying to parse the OpaquePacket to
+// a typed packet will get an error. It also contains a GnuPG trust packet.
+// (Created with: od -An -t x1 pubring.gpg | xargs | sed 's/ //g')
+const UnsupportedKeyHex = `988d012e7a18a20000010400d6ac00d92b89c1f4396c243abb9b76d2e9673ad63483291fed88e22b82e255e441c078c6abbbf7d2d195e50b62eeaa915b85b0ec20c225ce2c64c167cacb6e711daf2e45da4a8356a059b8160e3b3628ac0dd8437b31f06d53d6e8ea4214d4a26406a6b63e1001406ef23e0bb3069fac9a99a91f77dfafd5de0f188a5da5e3c9000511b42741726d696e204d2e205761726461203c7761726461406e657068696c696d2e727568722e64653e8900950105102e8936c705d1eb399e58489901013f0e03ff5a0c4f421e34fcfa388129166420c08cd76987bcdec6f01bd0271459a85cc22048820dd4e44ac2c7d23908d540f54facf1b36b0d9c20488781ce9dca856531e76e2e846826e9951338020a03a09b57aa5faa82e9267458bd76105399885ac35af7dc1cbb6aaed7c39e1039f3b5beda2c0e916bd38560509bab81235d1a0ead83b0020000`
diff --git a/src/mongo/gotools/vendor/src/golang.org/x/crypto/openpgp/packet/packet.go b/src/mongo/gotools/vendor/src/golang.org/x/crypto/openpgp/packet/packet.go
new file mode 100644
index 00000000000..e2bde1111e2
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/golang.org/x/crypto/openpgp/packet/packet.go
@@ -0,0 +1,539 @@
+// Copyright 2011 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// Package packet implements parsing and serialization of OpenPGP packets, as
+// specified in RFC 4880.
+package packet // import "golang.org/x/crypto/openpgp/packet"
+
+import (
+ "bufio"
+ "crypto/aes"
+ "crypto/cipher"
+ "crypto/des"
+ "golang.org/x/crypto/cast5"
+ "golang.org/x/crypto/openpgp/errors"
+ "io"
+ "math/big"
+)
+
+// readFull is the same as io.ReadFull except that reading zero bytes returns
+// ErrUnexpectedEOF rather than EOF.
+func readFull(r io.Reader, buf []byte) (n int, err error) {
+ n, err = io.ReadFull(r, buf)
+ if err == io.EOF {
+ err = io.ErrUnexpectedEOF
+ }
+ return
+}
+
+// readLength reads an OpenPGP length from r. See RFC 4880, section 4.2.2.
+func readLength(r io.Reader) (length int64, isPartial bool, err error) {
+ var buf [4]byte
+ _, err = readFull(r, buf[:1])
+ if err != nil {
+ return
+ }
+ switch {
+ case buf[0] < 192:
+ length = int64(buf[0])
+ case buf[0] < 224:
+ length = int64(buf[0]-192) << 8
+ _, err = readFull(r, buf[0:1])
+ if err != nil {
+ return
+ }
+ length += int64(buf[0]) + 192
+ case buf[0] < 255:
+ length = int64(1) << (buf[0] & 0x1f)
+ isPartial = true
+ default:
+ _, err = readFull(r, buf[0:4])
+ if err != nil {
+ return
+ }
+ length = int64(buf[0])<<24 |
+ int64(buf[1])<<16 |
+ int64(buf[2])<<8 |
+ int64(buf[3])
+ }
+ return
+}
+
+// partialLengthReader wraps an io.Reader and handles OpenPGP partial lengths.
+// The continuation lengths are parsed and removed from the stream and EOF is
+// returned at the end of the packet. See RFC 4880, section 4.2.2.4.
+type partialLengthReader struct {
+ r io.Reader
+ remaining int64
+ isPartial bool
+}
+
+func (r *partialLengthReader) Read(p []byte) (n int, err error) {
+ for r.remaining == 0 {
+ if !r.isPartial {
+ return 0, io.EOF
+ }
+ r.remaining, r.isPartial, err = readLength(r.r)
+ if err != nil {
+ return 0, err
+ }
+ }
+
+ toRead := int64(len(p))
+ if toRead > r.remaining {
+ toRead = r.remaining
+ }
+
+ n, err = r.r.Read(p[:int(toRead)])
+ r.remaining -= int64(n)
+ if n < int(toRead) && err == io.EOF {
+ err = io.ErrUnexpectedEOF
+ }
+ return
+}
+
+// partialLengthWriter writes a stream of data using OpenPGP partial lengths.
+// See RFC 4880, section 4.2.2.4.
+type partialLengthWriter struct {
+ w io.WriteCloser
+ lengthByte [1]byte
+}
+
+func (w *partialLengthWriter) Write(p []byte) (n int, err error) {
+ for len(p) > 0 {
+ for power := uint(14); power < 32; power-- {
+ l := 1 << power
+ if len(p) >= l {
+ w.lengthByte[0] = 224 + uint8(power)
+ _, err = w.w.Write(w.lengthByte[:])
+ if err != nil {
+ return
+ }
+ var m int
+ m, err = w.w.Write(p[:l])
+ n += m
+ if err != nil {
+ return
+ }
+ p = p[l:]
+ break
+ }
+ }
+ }
+ return
+}
+
+func (w *partialLengthWriter) Close() error {
+ w.lengthByte[0] = 0
+ _, err := w.w.Write(w.lengthByte[:])
+ if err != nil {
+ return err
+ }
+ return w.w.Close()
+}
+
+// A spanReader is an io.LimitReader, but it returns ErrUnexpectedEOF if the
+// underlying Reader returns EOF before the limit has been reached.
+type spanReader struct {
+ r io.Reader
+ n int64
+}
+
+func (l *spanReader) Read(p []byte) (n int, err error) {
+ if l.n <= 0 {
+ return 0, io.EOF
+ }
+ if int64(len(p)) > l.n {
+ p = p[0:l.n]
+ }
+ n, err = l.r.Read(p)
+ l.n -= int64(n)
+ if l.n > 0 && err == io.EOF {
+ err = io.ErrUnexpectedEOF
+ }
+ return
+}
+
+// readHeader parses a packet header and returns an io.Reader which will return
+// the contents of the packet. See RFC 4880, section 4.2.
+func readHeader(r io.Reader) (tag packetType, length int64, contents io.Reader, err error) {
+ var buf [4]byte
+ _, err = io.ReadFull(r, buf[:1])
+ if err != nil {
+ return
+ }
+ if buf[0]&0x80 == 0 {
+ err = errors.StructuralError("tag byte does not have MSB set")
+ return
+ }
+ if buf[0]&0x40 == 0 {
+ // Old format packet
+ tag = packetType((buf[0] & 0x3f) >> 2)
+ lengthType := buf[0] & 3
+ if lengthType == 3 {
+ length = -1
+ contents = r
+ return
+ }
+ lengthBytes := 1 << lengthType
+ _, err = readFull(r, buf[0:lengthBytes])
+ if err != nil {
+ return
+ }
+ for i := 0; i < lengthBytes; i++ {
+ length <<= 8
+ length |= int64(buf[i])
+ }
+ contents = &spanReader{r, length}
+ return
+ }
+
+ // New format packet
+ tag = packetType(buf[0] & 0x3f)
+ length, isPartial, err := readLength(r)
+ if err != nil {
+ return
+ }
+ if isPartial {
+ contents = &partialLengthReader{
+ remaining: length,
+ isPartial: true,
+ r: r,
+ }
+ length = -1
+ } else {
+ contents = &spanReader{r, length}
+ }
+ return
+}
+
+// serializeHeader writes an OpenPGP packet header to w. See RFC 4880, section
+// 4.2.
+func serializeHeader(w io.Writer, ptype packetType, length int) (err error) {
+ var buf [6]byte
+ var n int
+
+ buf[0] = 0x80 | 0x40 | byte(ptype)
+ if length < 192 {
+ buf[1] = byte(length)
+ n = 2
+ } else if length < 8384 {
+ length -= 192
+ buf[1] = 192 + byte(length>>8)
+ buf[2] = byte(length)
+ n = 3
+ } else {
+ buf[1] = 255
+ buf[2] = byte(length >> 24)
+ buf[3] = byte(length >> 16)
+ buf[4] = byte(length >> 8)
+ buf[5] = byte(length)
+ n = 6
+ }
+
+ _, err = w.Write(buf[:n])
+ return
+}
+
+// serializeStreamHeader writes an OpenPGP packet header to w where the
+// length of the packet is unknown. It returns a io.WriteCloser which can be
+// used to write the contents of the packet. See RFC 4880, section 4.2.
+func serializeStreamHeader(w io.WriteCloser, ptype packetType) (out io.WriteCloser, err error) {
+ var buf [1]byte
+ buf[0] = 0x80 | 0x40 | byte(ptype)
+ _, err = w.Write(buf[:])
+ if err != nil {
+ return
+ }
+ out = &partialLengthWriter{w: w}
+ return
+}
+
+// Packet represents an OpenPGP packet. Users are expected to try casting
+// instances of this interface to specific packet types.
+type Packet interface {
+ parse(io.Reader) error
+}
+
+// consumeAll reads from the given Reader until error, returning the number of
+// bytes read.
+func consumeAll(r io.Reader) (n int64, err error) {
+ var m int
+ var buf [1024]byte
+
+ for {
+ m, err = r.Read(buf[:])
+ n += int64(m)
+ if err == io.EOF {
+ err = nil
+ return
+ }
+ if err != nil {
+ return
+ }
+ }
+
+ panic("unreachable")
+}
+
+// packetType represents the numeric ids of the different OpenPGP packet types. See
+// http://www.iana.org/assignments/pgp-parameters/pgp-parameters.xhtml#pgp-parameters-2
+type packetType uint8
+
+const (
+ packetTypeEncryptedKey packetType = 1
+ packetTypeSignature packetType = 2
+ packetTypeSymmetricKeyEncrypted packetType = 3
+ packetTypeOnePassSignature packetType = 4
+ packetTypePrivateKey packetType = 5
+ packetTypePublicKey packetType = 6
+ packetTypePrivateSubkey packetType = 7
+ packetTypeCompressed packetType = 8
+ packetTypeSymmetricallyEncrypted packetType = 9
+ packetTypeLiteralData packetType = 11
+ packetTypeUserId packetType = 13
+ packetTypePublicSubkey packetType = 14
+ packetTypeUserAttribute packetType = 17
+ packetTypeSymmetricallyEncryptedMDC packetType = 18
+)
+
+// peekVersion detects the version of a public key packet about to
+// be read. A bufio.Reader at the original position of the io.Reader
+// is returned.
+func peekVersion(r io.Reader) (bufr *bufio.Reader, ver byte, err error) {
+ bufr = bufio.NewReader(r)
+ var verBuf []byte
+ if verBuf, err = bufr.Peek(1); err != nil {
+ return
+ }
+ ver = verBuf[0]
+ return
+}
+
+// Read reads a single OpenPGP packet from the given io.Reader. If there is an
+// error parsing a packet, the whole packet is consumed from the input.
+func Read(r io.Reader) (p Packet, err error) {
+ tag, _, contents, err := readHeader(r)
+ if err != nil {
+ return
+ }
+
+ switch tag {
+ case packetTypeEncryptedKey:
+ p = new(EncryptedKey)
+ case packetTypeSignature:
+ var version byte
+ // Detect signature version
+ if contents, version, err = peekVersion(contents); err != nil {
+ return
+ }
+ if version < 4 {
+ p = new(SignatureV3)
+ } else {
+ p = new(Signature)
+ }
+ case packetTypeSymmetricKeyEncrypted:
+ p = new(SymmetricKeyEncrypted)
+ case packetTypeOnePassSignature:
+ p = new(OnePassSignature)
+ case packetTypePrivateKey, packetTypePrivateSubkey:
+ pk := new(PrivateKey)
+ if tag == packetTypePrivateSubkey {
+ pk.IsSubkey = true
+ }
+ p = pk
+ case packetTypePublicKey, packetTypePublicSubkey:
+ var version byte
+ if contents, version, err = peekVersion(contents); err != nil {
+ return
+ }
+ isSubkey := tag == packetTypePublicSubkey
+ if version < 4 {
+ p = &PublicKeyV3{IsSubkey: isSubkey}
+ } else {
+ p = &PublicKey{IsSubkey: isSubkey}
+ }
+ case packetTypeCompressed:
+ p = new(Compressed)
+ case packetTypeSymmetricallyEncrypted:
+ p = new(SymmetricallyEncrypted)
+ case packetTypeLiteralData:
+ p = new(LiteralData)
+ case packetTypeUserId:
+ p = new(UserId)
+ case packetTypeUserAttribute:
+ p = new(UserAttribute)
+ case packetTypeSymmetricallyEncryptedMDC:
+ se := new(SymmetricallyEncrypted)
+ se.MDC = true
+ p = se
+ default:
+ err = errors.UnknownPacketTypeError(tag)
+ }
+ if p != nil {
+ err = p.parse(contents)
+ }
+ if err != nil {
+ consumeAll(contents)
+ }
+ return
+}
+
+// SignatureType represents the different semantic meanings of an OpenPGP
+// signature. See RFC 4880, section 5.2.1.
+type SignatureType uint8
+
+const (
+ SigTypeBinary SignatureType = 0
+ SigTypeText = 1
+ SigTypeGenericCert = 0x10
+ SigTypePersonaCert = 0x11
+ SigTypeCasualCert = 0x12
+ SigTypePositiveCert = 0x13
+ SigTypeSubkeyBinding = 0x18
+ SigTypePrimaryKeyBinding = 0x19
+ SigTypeDirectSignature = 0x1F
+ SigTypeKeyRevocation = 0x20
+ SigTypeSubkeyRevocation = 0x28
+)
+
+// PublicKeyAlgorithm represents the different public key system specified for
+// OpenPGP. See
+// http://www.iana.org/assignments/pgp-parameters/pgp-parameters.xhtml#pgp-parameters-12
+type PublicKeyAlgorithm uint8
+
+const (
+ PubKeyAlgoRSA PublicKeyAlgorithm = 1
+ PubKeyAlgoRSAEncryptOnly PublicKeyAlgorithm = 2
+ PubKeyAlgoRSASignOnly PublicKeyAlgorithm = 3
+ PubKeyAlgoElGamal PublicKeyAlgorithm = 16
+ PubKeyAlgoDSA PublicKeyAlgorithm = 17
+ // RFC 6637, Section 5.
+ PubKeyAlgoECDH PublicKeyAlgorithm = 18
+ PubKeyAlgoECDSA PublicKeyAlgorithm = 19
+)
+
+// CanEncrypt returns true if it's possible to encrypt a message to a public
+// key of the given type.
+func (pka PublicKeyAlgorithm) CanEncrypt() bool {
+ switch pka {
+ case PubKeyAlgoRSA, PubKeyAlgoRSAEncryptOnly, PubKeyAlgoElGamal:
+ return true
+ }
+ return false
+}
+
+// CanSign returns true if it's possible for a public key of the given type to
+// sign a message.
+func (pka PublicKeyAlgorithm) CanSign() bool {
+ switch pka {
+ case PubKeyAlgoRSA, PubKeyAlgoRSASignOnly, PubKeyAlgoDSA, PubKeyAlgoECDSA:
+ return true
+ }
+ return false
+}
+
+// CipherFunction represents the different block ciphers specified for OpenPGP. See
+// http://www.iana.org/assignments/pgp-parameters/pgp-parameters.xhtml#pgp-parameters-13
+type CipherFunction uint8
+
+const (
+ Cipher3DES CipherFunction = 2
+ CipherCAST5 CipherFunction = 3
+ CipherAES128 CipherFunction = 7
+ CipherAES192 CipherFunction = 8
+ CipherAES256 CipherFunction = 9
+)
+
+// KeySize returns the key size, in bytes, of cipher.
+func (cipher CipherFunction) KeySize() int {
+ switch cipher {
+ case Cipher3DES:
+ return 24
+ case CipherCAST5:
+ return cast5.KeySize
+ case CipherAES128:
+ return 16
+ case CipherAES192:
+ return 24
+ case CipherAES256:
+ return 32
+ }
+ return 0
+}
+
+// blockSize returns the block size, in bytes, of cipher.
+func (cipher CipherFunction) blockSize() int {
+ switch cipher {
+ case Cipher3DES:
+ return des.BlockSize
+ case CipherCAST5:
+ return 8
+ case CipherAES128, CipherAES192, CipherAES256:
+ return 16
+ }
+ return 0
+}
+
+// new returns a fresh instance of the given cipher.
+func (cipher CipherFunction) new(key []byte) (block cipher.Block) {
+ switch cipher {
+ case Cipher3DES:
+ block, _ = des.NewTripleDESCipher(key)
+ case CipherCAST5:
+ block, _ = cast5.NewCipher(key)
+ case CipherAES128, CipherAES192, CipherAES256:
+ block, _ = aes.NewCipher(key)
+ }
+ return
+}
+
+// readMPI reads a big integer from r. The bit length returned is the bit
+// length that was specified in r. This is preserved so that the integer can be
+// reserialized exactly.
+func readMPI(r io.Reader) (mpi []byte, bitLength uint16, err error) {
+ var buf [2]byte
+ _, err = readFull(r, buf[0:])
+ if err != nil {
+ return
+ }
+ bitLength = uint16(buf[0])<<8 | uint16(buf[1])
+ numBytes := (int(bitLength) + 7) / 8
+ mpi = make([]byte, numBytes)
+ _, err = readFull(r, mpi)
+ return
+}
+
+// mpiLength returns the length of the given *big.Int when serialized as an
+// MPI.
+func mpiLength(n *big.Int) (mpiLengthInBytes int) {
+ mpiLengthInBytes = 2 /* MPI length */
+ mpiLengthInBytes += (n.BitLen() + 7) / 8
+ return
+}
+
+// writeMPI serializes a big integer to w.
+func writeMPI(w io.Writer, bitLength uint16, mpiBytes []byte) (err error) {
+ _, err = w.Write([]byte{byte(bitLength >> 8), byte(bitLength)})
+ if err == nil {
+ _, err = w.Write(mpiBytes)
+ }
+ return
+}
+
+// writeBig serializes a *big.Int to w.
+func writeBig(w io.Writer, i *big.Int) error {
+ return writeMPI(w, uint16(i.BitLen()), i.Bytes())
+}
+
+// CompressionAlgo Represents the different compression algorithms
+// supported by OpenPGP (except for BZIP2, which is not currently
+// supported). See Section 9.3 of RFC 4880.
+type CompressionAlgo uint8
+
+const (
+ CompressionNone CompressionAlgo = 0
+ CompressionZIP CompressionAlgo = 1
+ CompressionZLIB CompressionAlgo = 2
+)
diff --git a/src/mongo/gotools/vendor/src/golang.org/x/crypto/openpgp/packet/packet_test.go b/src/mongo/gotools/vendor/src/golang.org/x/crypto/openpgp/packet/packet_test.go
new file mode 100644
index 00000000000..1dab5c3d588
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/golang.org/x/crypto/openpgp/packet/packet_test.go
@@ -0,0 +1,255 @@
+// Copyright 2011 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package packet
+
+import (
+ "bytes"
+ "encoding/hex"
+ "fmt"
+ "golang.org/x/crypto/openpgp/errors"
+ "io"
+ "io/ioutil"
+ "testing"
+)
+
+func TestReadFull(t *testing.T) {
+ var out [4]byte
+
+ b := bytes.NewBufferString("foo")
+ n, err := readFull(b, out[:3])
+ if n != 3 || err != nil {
+ t.Errorf("full read failed n:%d err:%s", n, err)
+ }
+
+ b = bytes.NewBufferString("foo")
+ n, err = readFull(b, out[:4])
+ if n != 3 || err != io.ErrUnexpectedEOF {
+ t.Errorf("partial read failed n:%d err:%s", n, err)
+ }
+
+ b = bytes.NewBuffer(nil)
+ n, err = readFull(b, out[:3])
+ if n != 0 || err != io.ErrUnexpectedEOF {
+ t.Errorf("empty read failed n:%d err:%s", n, err)
+ }
+}
+
+func readerFromHex(s string) io.Reader {
+ data, err := hex.DecodeString(s)
+ if err != nil {
+ panic("readerFromHex: bad input")
+ }
+ return bytes.NewBuffer(data)
+}
+
+var readLengthTests = []struct {
+ hexInput string
+ length int64
+ isPartial bool
+ err error
+}{
+ {"", 0, false, io.ErrUnexpectedEOF},
+ {"1f", 31, false, nil},
+ {"c0", 0, false, io.ErrUnexpectedEOF},
+ {"c101", 256 + 1 + 192, false, nil},
+ {"e0", 1, true, nil},
+ {"e1", 2, true, nil},
+ {"e2", 4, true, nil},
+ {"ff", 0, false, io.ErrUnexpectedEOF},
+ {"ff00", 0, false, io.ErrUnexpectedEOF},
+ {"ff0000", 0, false, io.ErrUnexpectedEOF},
+ {"ff000000", 0, false, io.ErrUnexpectedEOF},
+ {"ff00000000", 0, false, nil},
+ {"ff01020304", 16909060, false, nil},
+}
+
+func TestReadLength(t *testing.T) {
+ for i, test := range readLengthTests {
+ length, isPartial, err := readLength(readerFromHex(test.hexInput))
+ if test.err != nil {
+ if err != test.err {
+ t.Errorf("%d: expected different error got:%s want:%s", i, err, test.err)
+ }
+ continue
+ }
+ if err != nil {
+ t.Errorf("%d: unexpected error: %s", i, err)
+ continue
+ }
+ if length != test.length || isPartial != test.isPartial {
+ t.Errorf("%d: bad result got:(%d,%t) want:(%d,%t)", i, length, isPartial, test.length, test.isPartial)
+ }
+ }
+}
+
+var partialLengthReaderTests = []struct {
+ hexInput string
+ err error
+ hexOutput string
+}{
+ {"e0", io.ErrUnexpectedEOF, ""},
+ {"e001", io.ErrUnexpectedEOF, ""},
+ {"e0010102", nil, "0102"},
+ {"ff00000000", nil, ""},
+ {"e10102e1030400", nil, "01020304"},
+ {"e101", io.ErrUnexpectedEOF, ""},
+}
+
+func TestPartialLengthReader(t *testing.T) {
+ for i, test := range partialLengthReaderTests {
+ r := &partialLengthReader{readerFromHex(test.hexInput), 0, true}
+ out, err := ioutil.ReadAll(r)
+ if test.err != nil {
+ if err != test.err {
+ t.Errorf("%d: expected different error got:%s want:%s", i, err, test.err)
+ }
+ continue
+ }
+ if err != nil {
+ t.Errorf("%d: unexpected error: %s", i, err)
+ continue
+ }
+
+ got := fmt.Sprintf("%x", out)
+ if got != test.hexOutput {
+ t.Errorf("%d: got:%s want:%s", i, test.hexOutput, got)
+ }
+ }
+}
+
+var readHeaderTests = []struct {
+ hexInput string
+ structuralError bool
+ unexpectedEOF bool
+ tag int
+ length int64
+ hexOutput string
+}{
+ {"", false, false, 0, 0, ""},
+ {"7f", true, false, 0, 0, ""},
+
+ // Old format headers
+ {"80", false, true, 0, 0, ""},
+ {"8001", false, true, 0, 1, ""},
+ {"800102", false, false, 0, 1, "02"},
+ {"81000102", false, false, 0, 1, "02"},
+ {"820000000102", false, false, 0, 1, "02"},
+ {"860000000102", false, false, 1, 1, "02"},
+ {"83010203", false, false, 0, -1, "010203"},
+
+ // New format headers
+ {"c0", false, true, 0, 0, ""},
+ {"c000", false, false, 0, 0, ""},
+ {"c00102", false, false, 0, 1, "02"},
+ {"c0020203", false, false, 0, 2, "0203"},
+ {"c00202", false, true, 0, 2, ""},
+ {"c3020203", false, false, 3, 2, "0203"},
+}
+
+func TestReadHeader(t *testing.T) {
+ for i, test := range readHeaderTests {
+ tag, length, contents, err := readHeader(readerFromHex(test.hexInput))
+ if test.structuralError {
+ if _, ok := err.(errors.StructuralError); ok {
+ continue
+ }
+ t.Errorf("%d: expected StructuralError, got:%s", i, err)
+ continue
+ }
+ if err != nil {
+ if len(test.hexInput) == 0 && err == io.EOF {
+ continue
+ }
+ if !test.unexpectedEOF || err != io.ErrUnexpectedEOF {
+ t.Errorf("%d: unexpected error from readHeader: %s", i, err)
+ }
+ continue
+ }
+ if int(tag) != test.tag || length != test.length {
+ t.Errorf("%d: got:(%d,%d) want:(%d,%d)", i, int(tag), length, test.tag, test.length)
+ continue
+ }
+
+ body, err := ioutil.ReadAll(contents)
+ if err != nil {
+ if !test.unexpectedEOF || err != io.ErrUnexpectedEOF {
+ t.Errorf("%d: unexpected error from contents: %s", i, err)
+ }
+ continue
+ }
+ if test.unexpectedEOF {
+ t.Errorf("%d: expected ErrUnexpectedEOF from contents but got no error", i)
+ continue
+ }
+ got := fmt.Sprintf("%x", body)
+ if got != test.hexOutput {
+ t.Errorf("%d: got:%s want:%s", i, got, test.hexOutput)
+ }
+ }
+}
+
+func TestSerializeHeader(t *testing.T) {
+ tag := packetTypePublicKey
+ lengths := []int{0, 1, 2, 64, 192, 193, 8000, 8384, 8385, 10000}
+
+ for _, length := range lengths {
+ buf := bytes.NewBuffer(nil)
+ serializeHeader(buf, tag, length)
+ tag2, length2, _, err := readHeader(buf)
+ if err != nil {
+ t.Errorf("length %d, err: %s", length, err)
+ }
+ if tag2 != tag {
+ t.Errorf("length %d, tag incorrect (got %d, want %d)", length, tag2, tag)
+ }
+ if int(length2) != length {
+ t.Errorf("length %d, length incorrect (got %d)", length, length2)
+ }
+ }
+}
+
+func TestPartialLengths(t *testing.T) {
+ buf := bytes.NewBuffer(nil)
+ w := new(partialLengthWriter)
+ w.w = noOpCloser{buf}
+
+ const maxChunkSize = 64
+
+ var b [maxChunkSize]byte
+ var n uint8
+ for l := 1; l <= maxChunkSize; l++ {
+ for i := 0; i < l; i++ {
+ b[i] = n
+ n++
+ }
+ m, err := w.Write(b[:l])
+ if m != l {
+ t.Errorf("short write got: %d want: %d", m, l)
+ }
+ if err != nil {
+ t.Errorf("error from write: %s", err)
+ }
+ }
+ w.Close()
+
+ want := (maxChunkSize * (maxChunkSize + 1)) / 2
+ copyBuf := bytes.NewBuffer(nil)
+ r := &partialLengthReader{buf, 0, true}
+ m, err := io.Copy(copyBuf, r)
+ if m != int64(want) {
+ t.Errorf("short copy got: %d want: %d", m, want)
+ }
+ if err != nil {
+ t.Errorf("error from copy: %s", err)
+ }
+
+ copyBytes := copyBuf.Bytes()
+ for i := 0; i < want; i++ {
+ if copyBytes[i] != uint8(i) {
+ t.Errorf("bad pattern in copy at %d", i)
+ break
+ }
+ }
+}
diff --git a/src/mongo/gotools/vendor/src/golang.org/x/crypto/openpgp/packet/private_key.go b/src/mongo/gotools/vendor/src/golang.org/x/crypto/openpgp/packet/private_key.go
new file mode 100644
index 00000000000..740a27deacb
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/golang.org/x/crypto/openpgp/packet/private_key.go
@@ -0,0 +1,326 @@
+// Copyright 2011 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package packet
+
+import (
+ "bytes"
+ "crypto/cipher"
+ "crypto/dsa"
+ "crypto/rsa"
+ "crypto/sha1"
+ "golang.org/x/crypto/openpgp/elgamal"
+ "golang.org/x/crypto/openpgp/errors"
+ "golang.org/x/crypto/openpgp/s2k"
+ "io"
+ "io/ioutil"
+ "math/big"
+ "strconv"
+ "time"
+)
+
+// PrivateKey represents a possibly encrypted private key. See RFC 4880,
+// section 5.5.3.
+type PrivateKey struct {
+ PublicKey
+ Encrypted bool // if true then the private key is unavailable until Decrypt has been called.
+ encryptedData []byte
+ cipher CipherFunction
+ s2k func(out, in []byte)
+ PrivateKey interface{} // An *rsa.PrivateKey or *dsa.PrivateKey.
+ sha1Checksum bool
+ iv []byte
+}
+
+func NewRSAPrivateKey(currentTime time.Time, priv *rsa.PrivateKey) *PrivateKey {
+ pk := new(PrivateKey)
+ pk.PublicKey = *NewRSAPublicKey(currentTime, &priv.PublicKey)
+ pk.PrivateKey = priv
+ return pk
+}
+
+func NewDSAPrivateKey(currentTime time.Time, priv *dsa.PrivateKey) *PrivateKey {
+ pk := new(PrivateKey)
+ pk.PublicKey = *NewDSAPublicKey(currentTime, &priv.PublicKey)
+ pk.PrivateKey = priv
+ return pk
+}
+
+func NewElGamalPrivateKey(currentTime time.Time, priv *elgamal.PrivateKey) *PrivateKey {
+ pk := new(PrivateKey)
+ pk.PublicKey = *NewElGamalPublicKey(currentTime, &priv.PublicKey)
+ pk.PrivateKey = priv
+ return pk
+}
+
+func (pk *PrivateKey) parse(r io.Reader) (err error) {
+ err = (&pk.PublicKey).parse(r)
+ if err != nil {
+ return
+ }
+ var buf [1]byte
+ _, err = readFull(r, buf[:])
+ if err != nil {
+ return
+ }
+
+ s2kType := buf[0]
+
+ switch s2kType {
+ case 0:
+ pk.s2k = nil
+ pk.Encrypted = false
+ case 254, 255:
+ _, err = readFull(r, buf[:])
+ if err != nil {
+ return
+ }
+ pk.cipher = CipherFunction(buf[0])
+ pk.Encrypted = true
+ pk.s2k, err = s2k.Parse(r)
+ if err != nil {
+ return
+ }
+ if s2kType == 254 {
+ pk.sha1Checksum = true
+ }
+ default:
+ return errors.UnsupportedError("deprecated s2k function in private key")
+ }
+
+ if pk.Encrypted {
+ blockSize := pk.cipher.blockSize()
+ if blockSize == 0 {
+ return errors.UnsupportedError("unsupported cipher in private key: " + strconv.Itoa(int(pk.cipher)))
+ }
+ pk.iv = make([]byte, blockSize)
+ _, err = readFull(r, pk.iv)
+ if err != nil {
+ return
+ }
+ }
+
+ pk.encryptedData, err = ioutil.ReadAll(r)
+ if err != nil {
+ return
+ }
+
+ if !pk.Encrypted {
+ return pk.parsePrivateKey(pk.encryptedData)
+ }
+
+ return
+}
+
+func mod64kHash(d []byte) uint16 {
+ var h uint16
+ for _, b := range d {
+ h += uint16(b)
+ }
+ return h
+}
+
+func (pk *PrivateKey) Serialize(w io.Writer) (err error) {
+ // TODO(agl): support encrypted private keys
+ buf := bytes.NewBuffer(nil)
+ err = pk.PublicKey.serializeWithoutHeaders(buf)
+ if err != nil {
+ return
+ }
+ buf.WriteByte(0 /* no encryption */)
+
+ privateKeyBuf := bytes.NewBuffer(nil)
+
+ switch priv := pk.PrivateKey.(type) {
+ case *rsa.PrivateKey:
+ err = serializeRSAPrivateKey(privateKeyBuf, priv)
+ case *dsa.PrivateKey:
+ err = serializeDSAPrivateKey(privateKeyBuf, priv)
+ case *elgamal.PrivateKey:
+ err = serializeElGamalPrivateKey(privateKeyBuf, priv)
+ default:
+ err = errors.InvalidArgumentError("unknown private key type")
+ }
+ if err != nil {
+ return
+ }
+
+ ptype := packetTypePrivateKey
+ contents := buf.Bytes()
+ privateKeyBytes := privateKeyBuf.Bytes()
+ if pk.IsSubkey {
+ ptype = packetTypePrivateSubkey
+ }
+ err = serializeHeader(w, ptype, len(contents)+len(privateKeyBytes)+2)
+ if err != nil {
+ return
+ }
+ _, err = w.Write(contents)
+ if err != nil {
+ return
+ }
+ _, err = w.Write(privateKeyBytes)
+ if err != nil {
+ return
+ }
+
+ checksum := mod64kHash(privateKeyBytes)
+ var checksumBytes [2]byte
+ checksumBytes[0] = byte(checksum >> 8)
+ checksumBytes[1] = byte(checksum)
+ _, err = w.Write(checksumBytes[:])
+
+ return
+}
+
+func serializeRSAPrivateKey(w io.Writer, priv *rsa.PrivateKey) error {
+ err := writeBig(w, priv.D)
+ if err != nil {
+ return err
+ }
+ err = writeBig(w, priv.Primes[1])
+ if err != nil {
+ return err
+ }
+ err = writeBig(w, priv.Primes[0])
+ if err != nil {
+ return err
+ }
+ return writeBig(w, priv.Precomputed.Qinv)
+}
+
+func serializeDSAPrivateKey(w io.Writer, priv *dsa.PrivateKey) error {
+ return writeBig(w, priv.X)
+}
+
+func serializeElGamalPrivateKey(w io.Writer, priv *elgamal.PrivateKey) error {
+ return writeBig(w, priv.X)
+}
+
+// Decrypt decrypts an encrypted private key using a passphrase.
+func (pk *PrivateKey) Decrypt(passphrase []byte) error {
+ if !pk.Encrypted {
+ return nil
+ }
+
+ key := make([]byte, pk.cipher.KeySize())
+ pk.s2k(key, passphrase)
+ block := pk.cipher.new(key)
+ cfb := cipher.NewCFBDecrypter(block, pk.iv)
+
+ data := make([]byte, len(pk.encryptedData))
+ cfb.XORKeyStream(data, pk.encryptedData)
+
+ if pk.sha1Checksum {
+ if len(data) < sha1.Size {
+ return errors.StructuralError("truncated private key data")
+ }
+ h := sha1.New()
+ h.Write(data[:len(data)-sha1.Size])
+ sum := h.Sum(nil)
+ if !bytes.Equal(sum, data[len(data)-sha1.Size:]) {
+ return errors.StructuralError("private key checksum failure")
+ }
+ data = data[:len(data)-sha1.Size]
+ } else {
+ if len(data) < 2 {
+ return errors.StructuralError("truncated private key data")
+ }
+ var sum uint16
+ for i := 0; i < len(data)-2; i++ {
+ sum += uint16(data[i])
+ }
+ if data[len(data)-2] != uint8(sum>>8) ||
+ data[len(data)-1] != uint8(sum) {
+ return errors.StructuralError("private key checksum failure")
+ }
+ data = data[:len(data)-2]
+ }
+
+ return pk.parsePrivateKey(data)
+}
+
+func (pk *PrivateKey) parsePrivateKey(data []byte) (err error) {
+ switch pk.PublicKey.PubKeyAlgo {
+ case PubKeyAlgoRSA, PubKeyAlgoRSASignOnly, PubKeyAlgoRSAEncryptOnly:
+ return pk.parseRSAPrivateKey(data)
+ case PubKeyAlgoDSA:
+ return pk.parseDSAPrivateKey(data)
+ case PubKeyAlgoElGamal:
+ return pk.parseElGamalPrivateKey(data)
+ }
+ panic("impossible")
+}
+
+func (pk *PrivateKey) parseRSAPrivateKey(data []byte) (err error) {
+ rsaPub := pk.PublicKey.PublicKey.(*rsa.PublicKey)
+ rsaPriv := new(rsa.PrivateKey)
+ rsaPriv.PublicKey = *rsaPub
+
+ buf := bytes.NewBuffer(data)
+ d, _, err := readMPI(buf)
+ if err != nil {
+ return
+ }
+ p, _, err := readMPI(buf)
+ if err != nil {
+ return
+ }
+ q, _, err := readMPI(buf)
+ if err != nil {
+ return
+ }
+
+ rsaPriv.D = new(big.Int).SetBytes(d)
+ rsaPriv.Primes = make([]*big.Int, 2)
+ rsaPriv.Primes[0] = new(big.Int).SetBytes(p)
+ rsaPriv.Primes[1] = new(big.Int).SetBytes(q)
+ if err := rsaPriv.Validate(); err != nil {
+ return err
+ }
+ rsaPriv.Precompute()
+ pk.PrivateKey = rsaPriv
+ pk.Encrypted = false
+ pk.encryptedData = nil
+
+ return nil
+}
+
+func (pk *PrivateKey) parseDSAPrivateKey(data []byte) (err error) {
+ dsaPub := pk.PublicKey.PublicKey.(*dsa.PublicKey)
+ dsaPriv := new(dsa.PrivateKey)
+ dsaPriv.PublicKey = *dsaPub
+
+ buf := bytes.NewBuffer(data)
+ x, _, err := readMPI(buf)
+ if err != nil {
+ return
+ }
+
+ dsaPriv.X = new(big.Int).SetBytes(x)
+ pk.PrivateKey = dsaPriv
+ pk.Encrypted = false
+ pk.encryptedData = nil
+
+ return nil
+}
+
+func (pk *PrivateKey) parseElGamalPrivateKey(data []byte) (err error) {
+ pub := pk.PublicKey.PublicKey.(*elgamal.PublicKey)
+ priv := new(elgamal.PrivateKey)
+ priv.PublicKey = *pub
+
+ buf := bytes.NewBuffer(data)
+ x, _, err := readMPI(buf)
+ if err != nil {
+ return
+ }
+
+ priv.X = new(big.Int).SetBytes(x)
+ pk.PrivateKey = priv
+ pk.Encrypted = false
+ pk.encryptedData = nil
+
+ return nil
+}
diff --git a/src/mongo/gotools/vendor/src/golang.org/x/crypto/openpgp/packet/private_key_test.go b/src/mongo/gotools/vendor/src/golang.org/x/crypto/openpgp/packet/private_key_test.go
new file mode 100644
index 00000000000..25c8931f2ca
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/golang.org/x/crypto/openpgp/packet/private_key_test.go
@@ -0,0 +1,69 @@
+// Copyright 2011 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package packet
+
+import (
+ "testing"
+ "time"
+)
+
+var privateKeyTests = []struct {
+ privateKeyHex string
+ creationTime time.Time
+}{
+ {
+ privKeyRSAHex,
+ time.Unix(0x4cc349a8, 0),
+ },
+ {
+ privKeyElGamalHex,
+ time.Unix(0x4df9ee1a, 0),
+ },
+}
+
+func TestPrivateKeyRead(t *testing.T) {
+ for i, test := range privateKeyTests {
+ packet, err := Read(readerFromHex(test.privateKeyHex))
+ if err != nil {
+ t.Errorf("#%d: failed to parse: %s", i, err)
+ continue
+ }
+
+ privKey := packet.(*PrivateKey)
+
+ if !privKey.Encrypted {
+ t.Errorf("#%d: private key isn't encrypted", i)
+ continue
+ }
+
+ err = privKey.Decrypt([]byte("wrong password"))
+ if err == nil {
+ t.Errorf("#%d: decrypted with incorrect key", i)
+ continue
+ }
+
+ err = privKey.Decrypt([]byte("testing"))
+ if err != nil {
+ t.Errorf("#%d: failed to decrypt: %s", i, err)
+ continue
+ }
+
+ if !privKey.CreationTime.Equal(test.creationTime) || privKey.Encrypted {
+ t.Errorf("#%d: bad result, got: %#v", i, privKey)
+ }
+ }
+}
+
+func TestIssue11505(t *testing.T) {
+ // parsing a rsa private key with p or q == 1 used to panic due to a divide by zero
+ _, _ = Read(readerFromHex("9c3004303030300100000011303030000000000000010130303030303030303030303030303030303030303030303030303030303030303030303030303030303030"))
+}
+
+// Generated with `gpg --export-secret-keys "Test Key 2"`
+const privKeyRSAHex = "9501fe044cc349a8010400b70ca0010e98c090008d45d1ee8f9113bd5861fd57b88bacb7c68658747663f1e1a3b5a98f32fda6472373c024b97359cd2efc88ff60f77751adfbf6af5e615e6a1408cfad8bf0cea30b0d5f53aa27ad59089ba9b15b7ebc2777a25d7b436144027e3bcd203909f147d0e332b240cf63d3395f5dfe0df0a6c04e8655af7eacdf0011010001fe0303024a252e7d475fd445607de39a265472aa74a9320ba2dac395faa687e9e0336aeb7e9a7397e511b5afd9dc84557c80ac0f3d4d7bfec5ae16f20d41c8c84a04552a33870b930420e230e179564f6d19bb153145e76c33ae993886c388832b0fa042ddda7f133924f3854481533e0ede31d51278c0519b29abc3bf53da673e13e3e1214b52413d179d7f66deee35cac8eacb060f78379d70ef4af8607e68131ff529439668fc39c9ce6dfef8a5ac234d234802cbfb749a26107db26406213ae5c06d4673253a3cbee1fcbae58d6ab77e38d6e2c0e7c6317c48e054edadb5a40d0d48acb44643d998139a8a66bb820be1f3f80185bc777d14b5954b60effe2448a036d565c6bc0b915fcea518acdd20ab07bc1529f561c58cd044f723109b93f6fd99f876ff891d64306b5d08f48bab59f38695e9109c4dec34013ba3153488ce070268381ba923ee1eb77125b36afcb4347ec3478c8f2735b06ef17351d872e577fa95d0c397c88c71b59629a36aec"
+
+// Generated by `gpg --export-secret-keys` followed by a manual extraction of
+// the ElGamal subkey from the packets.
+const privKeyElGamalHex = "9d0157044df9ee1a100400eb8e136a58ec39b582629cdadf830bc64e0a94ed8103ca8bb247b27b11b46d1d25297ef4bcc3071785ba0c0bedfe89eabc5287fcc0edf81ab5896c1c8e4b20d27d79813c7aede75320b33eaeeaa586edc00fd1036c10133e6ba0ff277245d0d59d04b2b3421b7244aca5f4a8d870c6f1c1fbff9e1c26699a860b9504f35ca1d700030503fd1ededd3b840795be6d9ccbe3c51ee42e2f39233c432b831ddd9c4e72b7025a819317e47bf94f9ee316d7273b05d5fcf2999c3a681f519b1234bbfa6d359b4752bd9c3f77d6b6456cde152464763414ca130f4e91d91041432f90620fec0e6d6b5116076c2985d5aeaae13be492b9b329efcaf7ee25120159a0a30cd976b42d7afe030302dae7eb80db744d4960c4df930d57e87fe81412eaace9f900e6c839817a614ddb75ba6603b9417c33ea7b6c93967dfa2bcff3fa3c74a5ce2c962db65b03aece14c96cbd0038fc"
diff --git a/src/mongo/gotools/vendor/src/golang.org/x/crypto/openpgp/packet/public_key.go b/src/mongo/gotools/vendor/src/golang.org/x/crypto/openpgp/packet/public_key.go
new file mode 100644
index 00000000000..37a6472e561
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/golang.org/x/crypto/openpgp/packet/public_key.go
@@ -0,0 +1,724 @@
+// Copyright 2011 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package packet
+
+import (
+ "bytes"
+ "crypto"
+ "crypto/dsa"
+ "crypto/ecdsa"
+ "crypto/elliptic"
+ "crypto/rsa"
+ "crypto/sha1"
+ _ "crypto/sha256"
+ _ "crypto/sha512"
+ "encoding/binary"
+ "fmt"
+ "hash"
+ "io"
+ "math/big"
+ "strconv"
+ "time"
+
+ "golang.org/x/crypto/openpgp/elgamal"
+ "golang.org/x/crypto/openpgp/errors"
+)
+
+var (
+ // NIST curve P-256
+ oidCurveP256 []byte = []byte{0x2A, 0x86, 0x48, 0xCE, 0x3D, 0x03, 0x01, 0x07}
+ // NIST curve P-384
+ oidCurveP384 []byte = []byte{0x2B, 0x81, 0x04, 0x00, 0x22}
+ // NIST curve P-521
+ oidCurveP521 []byte = []byte{0x2B, 0x81, 0x04, 0x00, 0x23}
+)
+
+const maxOIDLength = 8
+
+// ecdsaKey stores the algorithm-specific fields for ECDSA keys.
+// as defined in RFC 6637, Section 9.
+type ecdsaKey struct {
+ // oid contains the OID byte sequence identifying the elliptic curve used
+ oid []byte
+ // p contains the elliptic curve point that represents the public key
+ p parsedMPI
+}
+
+// parseOID reads the OID for the curve as defined in RFC 6637, Section 9.
+func parseOID(r io.Reader) (oid []byte, err error) {
+ buf := make([]byte, maxOIDLength)
+ if _, err = readFull(r, buf[:1]); err != nil {
+ return
+ }
+ oidLen := buf[0]
+ if int(oidLen) > len(buf) {
+ err = errors.UnsupportedError("invalid oid length: " + strconv.Itoa(int(oidLen)))
+ return
+ }
+ oid = buf[:oidLen]
+ _, err = readFull(r, oid)
+ return
+}
+
+func (f *ecdsaKey) parse(r io.Reader) (err error) {
+ if f.oid, err = parseOID(r); err != nil {
+ return err
+ }
+ f.p.bytes, f.p.bitLength, err = readMPI(r)
+ return
+}
+
+func (f *ecdsaKey) serialize(w io.Writer) (err error) {
+ buf := make([]byte, maxOIDLength+1)
+ buf[0] = byte(len(f.oid))
+ copy(buf[1:], f.oid)
+ if _, err = w.Write(buf[:len(f.oid)+1]); err != nil {
+ return
+ }
+ return writeMPIs(w, f.p)
+}
+
+func (f *ecdsaKey) newECDSA() (*ecdsa.PublicKey, error) {
+ var c elliptic.Curve
+ if bytes.Equal(f.oid, oidCurveP256) {
+ c = elliptic.P256()
+ } else if bytes.Equal(f.oid, oidCurveP384) {
+ c = elliptic.P384()
+ } else if bytes.Equal(f.oid, oidCurveP521) {
+ c = elliptic.P521()
+ } else {
+ return nil, errors.UnsupportedError(fmt.Sprintf("unsupported oid: %x", f.oid))
+ }
+ x, y := elliptic.Unmarshal(c, f.p.bytes)
+ if x == nil {
+ return nil, errors.UnsupportedError("failed to parse EC point")
+ }
+ return &ecdsa.PublicKey{Curve: c, X: x, Y: y}, nil
+}
+
+func (f *ecdsaKey) byteLen() int {
+ return 1 + len(f.oid) + 2 + len(f.p.bytes)
+}
+
+type kdfHashFunction byte
+type kdfAlgorithm byte
+
+// ecdhKdf stores key derivation function parameters
+// used for ECDH encryption. See RFC 6637, Section 9.
+type ecdhKdf struct {
+ KdfHash kdfHashFunction
+ KdfAlgo kdfAlgorithm
+}
+
+func (f *ecdhKdf) parse(r io.Reader) (err error) {
+ buf := make([]byte, 1)
+ if _, err = readFull(r, buf); err != nil {
+ return
+ }
+ kdfLen := int(buf[0])
+ if kdfLen < 3 {
+ return errors.UnsupportedError("Unsupported ECDH KDF length: " + strconv.Itoa(kdfLen))
+ }
+ buf = make([]byte, kdfLen)
+ if _, err = readFull(r, buf); err != nil {
+ return
+ }
+ reserved := int(buf[0])
+ f.KdfHash = kdfHashFunction(buf[1])
+ f.KdfAlgo = kdfAlgorithm(buf[2])
+ if reserved != 0x01 {
+ return errors.UnsupportedError("Unsupported KDF reserved field: " + strconv.Itoa(reserved))
+ }
+ return
+}
+
+func (f *ecdhKdf) serialize(w io.Writer) (err error) {
+ buf := make([]byte, 4)
+ // See RFC 6637, Section 9, Algorithm-Specific Fields for ECDH keys.
+ buf[0] = byte(0x03) // Length of the following fields
+ buf[1] = byte(0x01) // Reserved for future extensions, must be 1 for now
+ buf[2] = byte(f.KdfHash)
+ buf[3] = byte(f.KdfAlgo)
+ _, err = w.Write(buf[:])
+ return
+}
+
+func (f *ecdhKdf) byteLen() int {
+ return 4
+}
+
+// PublicKey represents an OpenPGP public key. See RFC 4880, section 5.5.2.
+type PublicKey struct {
+ CreationTime time.Time
+ PubKeyAlgo PublicKeyAlgorithm
+ PublicKey interface{} // *rsa.PublicKey, *dsa.PublicKey or *ecdsa.PublicKey
+ Fingerprint [20]byte
+ KeyId uint64
+ IsSubkey bool
+
+ n, e, p, q, g, y parsedMPI
+
+ // RFC 6637 fields
+ ec *ecdsaKey
+ ecdh *ecdhKdf
+}
+
+// signingKey provides a convenient abstraction over signature verification
+// for v3 and v4 public keys.
+type signingKey interface {
+ SerializeSignaturePrefix(io.Writer)
+ serializeWithoutHeaders(io.Writer) error
+}
+
+func fromBig(n *big.Int) parsedMPI {
+ return parsedMPI{
+ bytes: n.Bytes(),
+ bitLength: uint16(n.BitLen()),
+ }
+}
+
+// NewRSAPublicKey returns a PublicKey that wraps the given rsa.PublicKey.
+func NewRSAPublicKey(creationTime time.Time, pub *rsa.PublicKey) *PublicKey {
+ pk := &PublicKey{
+ CreationTime: creationTime,
+ PubKeyAlgo: PubKeyAlgoRSA,
+ PublicKey: pub,
+ n: fromBig(pub.N),
+ e: fromBig(big.NewInt(int64(pub.E))),
+ }
+
+ pk.setFingerPrintAndKeyId()
+ return pk
+}
+
+// NewDSAPublicKey returns a PublicKey that wraps the given dsa.PublicKey.
+func NewDSAPublicKey(creationTime time.Time, pub *dsa.PublicKey) *PublicKey {
+ pk := &PublicKey{
+ CreationTime: creationTime,
+ PubKeyAlgo: PubKeyAlgoDSA,
+ PublicKey: pub,
+ p: fromBig(pub.P),
+ q: fromBig(pub.Q),
+ g: fromBig(pub.G),
+ y: fromBig(pub.Y),
+ }
+
+ pk.setFingerPrintAndKeyId()
+ return pk
+}
+
+// NewElGamalPublicKey returns a PublicKey that wraps the given elgamal.PublicKey.
+func NewElGamalPublicKey(creationTime time.Time, pub *elgamal.PublicKey) *PublicKey {
+ pk := &PublicKey{
+ CreationTime: creationTime,
+ PubKeyAlgo: PubKeyAlgoElGamal,
+ PublicKey: pub,
+ p: fromBig(pub.P),
+ g: fromBig(pub.G),
+ y: fromBig(pub.Y),
+ }
+
+ pk.setFingerPrintAndKeyId()
+ return pk
+}
+
+func (pk *PublicKey) parse(r io.Reader) (err error) {
+ // RFC 4880, section 5.5.2
+ var buf [6]byte
+ _, err = readFull(r, buf[:])
+ if err != nil {
+ return
+ }
+ if buf[0] != 4 {
+ return errors.UnsupportedError("public key version")
+ }
+ pk.CreationTime = time.Unix(int64(uint32(buf[1])<<24|uint32(buf[2])<<16|uint32(buf[3])<<8|uint32(buf[4])), 0)
+ pk.PubKeyAlgo = PublicKeyAlgorithm(buf[5])
+ switch pk.PubKeyAlgo {
+ case PubKeyAlgoRSA, PubKeyAlgoRSAEncryptOnly, PubKeyAlgoRSASignOnly:
+ err = pk.parseRSA(r)
+ case PubKeyAlgoDSA:
+ err = pk.parseDSA(r)
+ case PubKeyAlgoElGamal:
+ err = pk.parseElGamal(r)
+ case PubKeyAlgoECDSA:
+ pk.ec = new(ecdsaKey)
+ if err = pk.ec.parse(r); err != nil {
+ return err
+ }
+ pk.PublicKey, err = pk.ec.newECDSA()
+ case PubKeyAlgoECDH:
+ pk.ec = new(ecdsaKey)
+ if err = pk.ec.parse(r); err != nil {
+ return
+ }
+ pk.ecdh = new(ecdhKdf)
+ if err = pk.ecdh.parse(r); err != nil {
+ return
+ }
+ // The ECDH key is stored in an ecdsa.PublicKey for convenience.
+ pk.PublicKey, err = pk.ec.newECDSA()
+ default:
+ err = errors.UnsupportedError("public key type: " + strconv.Itoa(int(pk.PubKeyAlgo)))
+ }
+ if err != nil {
+ return
+ }
+
+ pk.setFingerPrintAndKeyId()
+ return
+}
+
+func (pk *PublicKey) setFingerPrintAndKeyId() {
+ // RFC 4880, section 12.2
+ fingerPrint := sha1.New()
+ pk.SerializeSignaturePrefix(fingerPrint)
+ pk.serializeWithoutHeaders(fingerPrint)
+ copy(pk.Fingerprint[:], fingerPrint.Sum(nil))
+ pk.KeyId = binary.BigEndian.Uint64(pk.Fingerprint[12:20])
+}
+
+// parseRSA parses RSA public key material from the given Reader. See RFC 4880,
+// section 5.5.2.
+func (pk *PublicKey) parseRSA(r io.Reader) (err error) {
+ pk.n.bytes, pk.n.bitLength, err = readMPI(r)
+ if err != nil {
+ return
+ }
+ pk.e.bytes, pk.e.bitLength, err = readMPI(r)
+ if err != nil {
+ return
+ }
+
+ if len(pk.e.bytes) > 3 {
+ err = errors.UnsupportedError("large public exponent")
+ return
+ }
+ rsa := &rsa.PublicKey{
+ N: new(big.Int).SetBytes(pk.n.bytes),
+ E: 0,
+ }
+ for i := 0; i < len(pk.e.bytes); i++ {
+ rsa.E <<= 8
+ rsa.E |= int(pk.e.bytes[i])
+ }
+ pk.PublicKey = rsa
+ return
+}
+
+// parseDSA parses DSA public key material from the given Reader. See RFC 4880,
+// section 5.5.2.
+func (pk *PublicKey) parseDSA(r io.Reader) (err error) {
+ pk.p.bytes, pk.p.bitLength, err = readMPI(r)
+ if err != nil {
+ return
+ }
+ pk.q.bytes, pk.q.bitLength, err = readMPI(r)
+ if err != nil {
+ return
+ }
+ pk.g.bytes, pk.g.bitLength, err = readMPI(r)
+ if err != nil {
+ return
+ }
+ pk.y.bytes, pk.y.bitLength, err = readMPI(r)
+ if err != nil {
+ return
+ }
+
+ dsa := new(dsa.PublicKey)
+ dsa.P = new(big.Int).SetBytes(pk.p.bytes)
+ dsa.Q = new(big.Int).SetBytes(pk.q.bytes)
+ dsa.G = new(big.Int).SetBytes(pk.g.bytes)
+ dsa.Y = new(big.Int).SetBytes(pk.y.bytes)
+ pk.PublicKey = dsa
+ return
+}
+
+// parseElGamal parses ElGamal public key material from the given Reader. See
+// RFC 4880, section 5.5.2.
+func (pk *PublicKey) parseElGamal(r io.Reader) (err error) {
+ pk.p.bytes, pk.p.bitLength, err = readMPI(r)
+ if err != nil {
+ return
+ }
+ pk.g.bytes, pk.g.bitLength, err = readMPI(r)
+ if err != nil {
+ return
+ }
+ pk.y.bytes, pk.y.bitLength, err = readMPI(r)
+ if err != nil {
+ return
+ }
+
+ elgamal := new(elgamal.PublicKey)
+ elgamal.P = new(big.Int).SetBytes(pk.p.bytes)
+ elgamal.G = new(big.Int).SetBytes(pk.g.bytes)
+ elgamal.Y = new(big.Int).SetBytes(pk.y.bytes)
+ pk.PublicKey = elgamal
+ return
+}
+
+// SerializeSignaturePrefix writes the prefix for this public key to the given Writer.
+// The prefix is used when calculating a signature over this public key. See
+// RFC 4880, section 5.2.4.
+func (pk *PublicKey) SerializeSignaturePrefix(h io.Writer) {
+ var pLength uint16
+ switch pk.PubKeyAlgo {
+ case PubKeyAlgoRSA, PubKeyAlgoRSAEncryptOnly, PubKeyAlgoRSASignOnly:
+ pLength += 2 + uint16(len(pk.n.bytes))
+ pLength += 2 + uint16(len(pk.e.bytes))
+ case PubKeyAlgoDSA:
+ pLength += 2 + uint16(len(pk.p.bytes))
+ pLength += 2 + uint16(len(pk.q.bytes))
+ pLength += 2 + uint16(len(pk.g.bytes))
+ pLength += 2 + uint16(len(pk.y.bytes))
+ case PubKeyAlgoElGamal:
+ pLength += 2 + uint16(len(pk.p.bytes))
+ pLength += 2 + uint16(len(pk.g.bytes))
+ pLength += 2 + uint16(len(pk.y.bytes))
+ case PubKeyAlgoECDSA:
+ pLength += uint16(pk.ec.byteLen())
+ case PubKeyAlgoECDH:
+ pLength += uint16(pk.ec.byteLen())
+ pLength += uint16(pk.ecdh.byteLen())
+ default:
+ panic("unknown public key algorithm")
+ }
+ pLength += 6
+ h.Write([]byte{0x99, byte(pLength >> 8), byte(pLength)})
+ return
+}
+
+func (pk *PublicKey) Serialize(w io.Writer) (err error) {
+ length := 6 // 6 byte header
+
+ switch pk.PubKeyAlgo {
+ case PubKeyAlgoRSA, PubKeyAlgoRSAEncryptOnly, PubKeyAlgoRSASignOnly:
+ length += 2 + len(pk.n.bytes)
+ length += 2 + len(pk.e.bytes)
+ case PubKeyAlgoDSA:
+ length += 2 + len(pk.p.bytes)
+ length += 2 + len(pk.q.bytes)
+ length += 2 + len(pk.g.bytes)
+ length += 2 + len(pk.y.bytes)
+ case PubKeyAlgoElGamal:
+ length += 2 + len(pk.p.bytes)
+ length += 2 + len(pk.g.bytes)
+ length += 2 + len(pk.y.bytes)
+ case PubKeyAlgoECDSA:
+ length += pk.ec.byteLen()
+ case PubKeyAlgoECDH:
+ length += pk.ec.byteLen()
+ length += pk.ecdh.byteLen()
+ default:
+ panic("unknown public key algorithm")
+ }
+
+ packetType := packetTypePublicKey
+ if pk.IsSubkey {
+ packetType = packetTypePublicSubkey
+ }
+ err = serializeHeader(w, packetType, length)
+ if err != nil {
+ return
+ }
+ return pk.serializeWithoutHeaders(w)
+}
+
+// serializeWithoutHeaders marshals the PublicKey to w in the form of an
+// OpenPGP public key packet, not including the packet header.
+func (pk *PublicKey) serializeWithoutHeaders(w io.Writer) (err error) {
+ var buf [6]byte
+ buf[0] = 4
+ t := uint32(pk.CreationTime.Unix())
+ buf[1] = byte(t >> 24)
+ buf[2] = byte(t >> 16)
+ buf[3] = byte(t >> 8)
+ buf[4] = byte(t)
+ buf[5] = byte(pk.PubKeyAlgo)
+
+ _, err = w.Write(buf[:])
+ if err != nil {
+ return
+ }
+
+ switch pk.PubKeyAlgo {
+ case PubKeyAlgoRSA, PubKeyAlgoRSAEncryptOnly, PubKeyAlgoRSASignOnly:
+ return writeMPIs(w, pk.n, pk.e)
+ case PubKeyAlgoDSA:
+ return writeMPIs(w, pk.p, pk.q, pk.g, pk.y)
+ case PubKeyAlgoElGamal:
+ return writeMPIs(w, pk.p, pk.g, pk.y)
+ case PubKeyAlgoECDSA:
+ return pk.ec.serialize(w)
+ case PubKeyAlgoECDH:
+ if err = pk.ec.serialize(w); err != nil {
+ return
+ }
+ return pk.ecdh.serialize(w)
+ }
+ return errors.InvalidArgumentError("bad public-key algorithm")
+}
+
+// CanSign returns true iff this public key can generate signatures
+func (pk *PublicKey) CanSign() bool {
+ return pk.PubKeyAlgo != PubKeyAlgoRSAEncryptOnly && pk.PubKeyAlgo != PubKeyAlgoElGamal
+}
+
+// VerifySignature returns nil iff sig is a valid signature, made by this
+// public key, of the data hashed into signed. signed is mutated by this call.
+func (pk *PublicKey) VerifySignature(signed hash.Hash, sig *Signature) (err error) {
+ if !pk.CanSign() {
+ return errors.InvalidArgumentError("public key cannot generate signatures")
+ }
+
+ signed.Write(sig.HashSuffix)
+ hashBytes := signed.Sum(nil)
+
+ if hashBytes[0] != sig.HashTag[0] || hashBytes[1] != sig.HashTag[1] {
+ return errors.SignatureError("hash tag doesn't match")
+ }
+
+ if pk.PubKeyAlgo != sig.PubKeyAlgo {
+ return errors.InvalidArgumentError("public key and signature use different algorithms")
+ }
+
+ switch pk.PubKeyAlgo {
+ case PubKeyAlgoRSA, PubKeyAlgoRSASignOnly:
+ rsaPublicKey, _ := pk.PublicKey.(*rsa.PublicKey)
+ err = rsa.VerifyPKCS1v15(rsaPublicKey, sig.Hash, hashBytes, sig.RSASignature.bytes)
+ if err != nil {
+ return errors.SignatureError("RSA verification failure")
+ }
+ return nil
+ case PubKeyAlgoDSA:
+ dsaPublicKey, _ := pk.PublicKey.(*dsa.PublicKey)
+ // Need to truncate hashBytes to match FIPS 186-3 section 4.6.
+ subgroupSize := (dsaPublicKey.Q.BitLen() + 7) / 8
+ if len(hashBytes) > subgroupSize {
+ hashBytes = hashBytes[:subgroupSize]
+ }
+ if !dsa.Verify(dsaPublicKey, hashBytes, new(big.Int).SetBytes(sig.DSASigR.bytes), new(big.Int).SetBytes(sig.DSASigS.bytes)) {
+ return errors.SignatureError("DSA verification failure")
+ }
+ return nil
+ case PubKeyAlgoECDSA:
+ ecdsaPublicKey := pk.PublicKey.(*ecdsa.PublicKey)
+ if !ecdsa.Verify(ecdsaPublicKey, hashBytes, new(big.Int).SetBytes(sig.ECDSASigR.bytes), new(big.Int).SetBytes(sig.ECDSASigS.bytes)) {
+ return errors.SignatureError("ECDSA verification failure")
+ }
+ return nil
+ default:
+ return errors.SignatureError("Unsupported public key algorithm used in signature")
+ }
+ panic("unreachable")
+}
+
+// VerifySignatureV3 returns nil iff sig is a valid signature, made by this
+// public key, of the data hashed into signed. signed is mutated by this call.
+func (pk *PublicKey) VerifySignatureV3(signed hash.Hash, sig *SignatureV3) (err error) {
+ if !pk.CanSign() {
+ return errors.InvalidArgumentError("public key cannot generate signatures")
+ }
+
+ suffix := make([]byte, 5)
+ suffix[0] = byte(sig.SigType)
+ binary.BigEndian.PutUint32(suffix[1:], uint32(sig.CreationTime.Unix()))
+ signed.Write(suffix)
+ hashBytes := signed.Sum(nil)
+
+ if hashBytes[0] != sig.HashTag[0] || hashBytes[1] != sig.HashTag[1] {
+ return errors.SignatureError("hash tag doesn't match")
+ }
+
+ if pk.PubKeyAlgo != sig.PubKeyAlgo {
+ return errors.InvalidArgumentError("public key and signature use different algorithms")
+ }
+
+ switch pk.PubKeyAlgo {
+ case PubKeyAlgoRSA, PubKeyAlgoRSASignOnly:
+ rsaPublicKey := pk.PublicKey.(*rsa.PublicKey)
+ if err = rsa.VerifyPKCS1v15(rsaPublicKey, sig.Hash, hashBytes, sig.RSASignature.bytes); err != nil {
+ return errors.SignatureError("RSA verification failure")
+ }
+ return
+ case PubKeyAlgoDSA:
+ dsaPublicKey := pk.PublicKey.(*dsa.PublicKey)
+ // Need to truncate hashBytes to match FIPS 186-3 section 4.6.
+ subgroupSize := (dsaPublicKey.Q.BitLen() + 7) / 8
+ if len(hashBytes) > subgroupSize {
+ hashBytes = hashBytes[:subgroupSize]
+ }
+ if !dsa.Verify(dsaPublicKey, hashBytes, new(big.Int).SetBytes(sig.DSASigR.bytes), new(big.Int).SetBytes(sig.DSASigS.bytes)) {
+ return errors.SignatureError("DSA verification failure")
+ }
+ return nil
+ default:
+ panic("shouldn't happen")
+ }
+ panic("unreachable")
+}
+
+// keySignatureHash returns a Hash of the message that needs to be signed for
+// pk to assert a subkey relationship to signed.
+func keySignatureHash(pk, signed signingKey, hashFunc crypto.Hash) (h hash.Hash, err error) {
+ if !hashFunc.Available() {
+ return nil, errors.UnsupportedError("hash function")
+ }
+ h = hashFunc.New()
+
+ // RFC 4880, section 5.2.4
+ pk.SerializeSignaturePrefix(h)
+ pk.serializeWithoutHeaders(h)
+ signed.SerializeSignaturePrefix(h)
+ signed.serializeWithoutHeaders(h)
+ return
+}
+
+// VerifyKeySignature returns nil iff sig is a valid signature, made by this
+// public key, of signed.
+func (pk *PublicKey) VerifyKeySignature(signed *PublicKey, sig *Signature) error {
+ h, err := keySignatureHash(pk, signed, sig.Hash)
+ if err != nil {
+ return err
+ }
+ if err = pk.VerifySignature(h, sig); err != nil {
+ return err
+ }
+
+ if sig.FlagSign {
+ // Signing subkeys must be cross-signed. See
+ // https://www.gnupg.org/faq/subkey-cross-certify.html.
+ if sig.EmbeddedSignature == nil {
+ return errors.StructuralError("signing subkey is missing cross-signature")
+ }
+ // Verify the cross-signature. This is calculated over the same
+ // data as the main signature, so we cannot just recursively
+ // call signed.VerifyKeySignature(...)
+ if h, err = keySignatureHash(pk, signed, sig.EmbeddedSignature.Hash); err != nil {
+ return errors.StructuralError("error while hashing for cross-signature: " + err.Error())
+ }
+ if err := signed.VerifySignature(h, sig.EmbeddedSignature); err != nil {
+ return errors.StructuralError("error while verifying cross-signature: " + err.Error())
+ }
+ }
+
+ return nil
+}
+
+func keyRevocationHash(pk signingKey, hashFunc crypto.Hash) (h hash.Hash, err error) {
+ if !hashFunc.Available() {
+ return nil, errors.UnsupportedError("hash function")
+ }
+ h = hashFunc.New()
+
+ // RFC 4880, section 5.2.4
+ pk.SerializeSignaturePrefix(h)
+ pk.serializeWithoutHeaders(h)
+
+ return
+}
+
+// VerifyRevocationSignature returns nil iff sig is a valid signature, made by this
+// public key.
+func (pk *PublicKey) VerifyRevocationSignature(sig *Signature) (err error) {
+ h, err := keyRevocationHash(pk, sig.Hash)
+ if err != nil {
+ return err
+ }
+ return pk.VerifySignature(h, sig)
+}
+
+// userIdSignatureHash returns a Hash of the message that needs to be signed
+// to assert that pk is a valid key for id.
+func userIdSignatureHash(id string, pk *PublicKey, hashFunc crypto.Hash) (h hash.Hash, err error) {
+ if !hashFunc.Available() {
+ return nil, errors.UnsupportedError("hash function")
+ }
+ h = hashFunc.New()
+
+ // RFC 4880, section 5.2.4
+ pk.SerializeSignaturePrefix(h)
+ pk.serializeWithoutHeaders(h)
+
+ var buf [5]byte
+ buf[0] = 0xb4
+ buf[1] = byte(len(id) >> 24)
+ buf[2] = byte(len(id) >> 16)
+ buf[3] = byte(len(id) >> 8)
+ buf[4] = byte(len(id))
+ h.Write(buf[:])
+ h.Write([]byte(id))
+
+ return
+}
+
+// VerifyUserIdSignature returns nil iff sig is a valid signature, made by this
+// public key, that id is the identity of pub.
+func (pk *PublicKey) VerifyUserIdSignature(id string, pub *PublicKey, sig *Signature) (err error) {
+ h, err := userIdSignatureHash(id, pub, sig.Hash)
+ if err != nil {
+ return err
+ }
+ return pk.VerifySignature(h, sig)
+}
+
+// VerifyUserIdSignatureV3 returns nil iff sig is a valid signature, made by this
+// public key, that id is the identity of pub.
+func (pk *PublicKey) VerifyUserIdSignatureV3(id string, pub *PublicKey, sig *SignatureV3) (err error) {
+ h, err := userIdSignatureV3Hash(id, pub, sig.Hash)
+ if err != nil {
+ return err
+ }
+ return pk.VerifySignatureV3(h, sig)
+}
+
+// KeyIdString returns the public key's fingerprint in capital hex
+// (e.g. "6C7EE1B8621CC013").
+func (pk *PublicKey) KeyIdString() string {
+ return fmt.Sprintf("%X", pk.Fingerprint[12:20])
+}
+
+// KeyIdShortString returns the short form of public key's fingerprint
+// in capital hex, as shown by gpg --list-keys (e.g. "621CC013").
+func (pk *PublicKey) KeyIdShortString() string {
+ return fmt.Sprintf("%X", pk.Fingerprint[16:20])
+}
+
+// A parsedMPI is used to store the contents of a big integer, along with the
+// bit length that was specified in the original input. This allows the MPI to
+// be reserialized exactly.
+type parsedMPI struct {
+ bytes []byte
+ bitLength uint16
+}
+
+// writeMPIs is a utility function for serializing several big integers to the
+// given Writer.
+func writeMPIs(w io.Writer, mpis ...parsedMPI) (err error) {
+ for _, mpi := range mpis {
+ err = writeMPI(w, mpi.bitLength, mpi.bytes)
+ if err != nil {
+ return
+ }
+ }
+ return
+}
+
+// BitLength returns the bit length for the given public key.
+func (pk *PublicKey) BitLength() (bitLength uint16, err error) {
+ switch pk.PubKeyAlgo {
+ case PubKeyAlgoRSA, PubKeyAlgoRSAEncryptOnly, PubKeyAlgoRSASignOnly:
+ bitLength = pk.n.bitLength
+ case PubKeyAlgoDSA:
+ bitLength = pk.p.bitLength
+ case PubKeyAlgoElGamal:
+ bitLength = pk.p.bitLength
+ default:
+ err = errors.InvalidArgumentError("bad public-key algorithm")
+ }
+ return
+}
diff --git a/src/mongo/gotools/vendor/src/golang.org/x/crypto/openpgp/packet/public_key_test.go b/src/mongo/gotools/vendor/src/golang.org/x/crypto/openpgp/packet/public_key_test.go
new file mode 100644
index 00000000000..7ad7d91856d
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/golang.org/x/crypto/openpgp/packet/public_key_test.go
@@ -0,0 +1,202 @@
+// Copyright 2011 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package packet
+
+import (
+ "bytes"
+ "encoding/hex"
+ "testing"
+ "time"
+)
+
+var pubKeyTests = []struct {
+ hexData string
+ hexFingerprint string
+ creationTime time.Time
+ pubKeyAlgo PublicKeyAlgorithm
+ keyId uint64
+ keyIdString string
+ keyIdShort string
+}{
+ {rsaPkDataHex, rsaFingerprintHex, time.Unix(0x4d3c5c10, 0), PubKeyAlgoRSA, 0xa34d7e18c20c31bb, "A34D7E18C20C31BB", "C20C31BB"},
+ {dsaPkDataHex, dsaFingerprintHex, time.Unix(0x4d432f89, 0), PubKeyAlgoDSA, 0x8e8fbe54062f19ed, "8E8FBE54062F19ED", "062F19ED"},
+ {ecdsaPkDataHex, ecdsaFingerprintHex, time.Unix(0x5071c294, 0), PubKeyAlgoECDSA, 0x43fe956c542ca00b, "43FE956C542CA00B", "542CA00B"},
+}
+
+func TestPublicKeyRead(t *testing.T) {
+ for i, test := range pubKeyTests {
+ packet, err := Read(readerFromHex(test.hexData))
+ if err != nil {
+ t.Errorf("#%d: Read error: %s", i, err)
+ continue
+ }
+ pk, ok := packet.(*PublicKey)
+ if !ok {
+ t.Errorf("#%d: failed to parse, got: %#v", i, packet)
+ continue
+ }
+ if pk.PubKeyAlgo != test.pubKeyAlgo {
+ t.Errorf("#%d: bad public key algorithm got:%x want:%x", i, pk.PubKeyAlgo, test.pubKeyAlgo)
+ }
+ if !pk.CreationTime.Equal(test.creationTime) {
+ t.Errorf("#%d: bad creation time got:%v want:%v", i, pk.CreationTime, test.creationTime)
+ }
+ expectedFingerprint, _ := hex.DecodeString(test.hexFingerprint)
+ if !bytes.Equal(expectedFingerprint, pk.Fingerprint[:]) {
+ t.Errorf("#%d: bad fingerprint got:%x want:%x", i, pk.Fingerprint[:], expectedFingerprint)
+ }
+ if pk.KeyId != test.keyId {
+ t.Errorf("#%d: bad keyid got:%x want:%x", i, pk.KeyId, test.keyId)
+ }
+ if g, e := pk.KeyIdString(), test.keyIdString; g != e {
+ t.Errorf("#%d: bad KeyIdString got:%q want:%q", i, g, e)
+ }
+ if g, e := pk.KeyIdShortString(), test.keyIdShort; g != e {
+ t.Errorf("#%d: bad KeyIdShortString got:%q want:%q", i, g, e)
+ }
+ }
+}
+
+func TestPublicKeySerialize(t *testing.T) {
+ for i, test := range pubKeyTests {
+ packet, err := Read(readerFromHex(test.hexData))
+ if err != nil {
+ t.Errorf("#%d: Read error: %s", i, err)
+ continue
+ }
+ pk, ok := packet.(*PublicKey)
+ if !ok {
+ t.Errorf("#%d: failed to parse, got: %#v", i, packet)
+ continue
+ }
+ serializeBuf := bytes.NewBuffer(nil)
+ err = pk.Serialize(serializeBuf)
+ if err != nil {
+ t.Errorf("#%d: failed to serialize: %s", i, err)
+ continue
+ }
+
+ packet, err = Read(serializeBuf)
+ if err != nil {
+ t.Errorf("#%d: Read error (from serialized data): %s", i, err)
+ continue
+ }
+ pk, ok = packet.(*PublicKey)
+ if !ok {
+ t.Errorf("#%d: failed to parse serialized data, got: %#v", i, packet)
+ continue
+ }
+ }
+}
+
+func TestEcc384Serialize(t *testing.T) {
+ r := readerFromHex(ecc384PubHex)
+ var w bytes.Buffer
+ for i := 0; i < 2; i++ {
+ // Public key
+ p, err := Read(r)
+ if err != nil {
+ t.Error(err)
+ }
+ pubkey := p.(*PublicKey)
+ if !bytes.Equal(pubkey.ec.oid, []byte{0x2b, 0x81, 0x04, 0x00, 0x22}) {
+ t.Errorf("Unexpected pubkey OID: %x", pubkey.ec.oid)
+ }
+ if !bytes.Equal(pubkey.ec.p.bytes[:5], []byte{0x04, 0xf6, 0xb8, 0xc5, 0xac}) {
+ t.Errorf("Unexpected pubkey P[:5]: %x", pubkey.ec.p.bytes)
+ }
+ if pubkey.KeyId != 0x098033880F54719F {
+ t.Errorf("Unexpected pubkey ID: %x", pubkey.KeyId)
+ }
+ err = pubkey.Serialize(&w)
+ if err != nil {
+ t.Error(err)
+ }
+ // User ID
+ p, err = Read(r)
+ if err != nil {
+ t.Error(err)
+ }
+ uid := p.(*UserId)
+ if uid.Id != "ec_dsa_dh_384 <openpgp@brainhub.org>" {
+ t.Error("Unexpected UID:", uid.Id)
+ }
+ err = uid.Serialize(&w)
+ if err != nil {
+ t.Error(err)
+ }
+ // User ID Sig
+ p, err = Read(r)
+ if err != nil {
+ t.Error(err)
+ }
+ uidSig := p.(*Signature)
+ err = pubkey.VerifyUserIdSignature(uid.Id, pubkey, uidSig)
+ if err != nil {
+ t.Error(err, ": UID")
+ }
+ err = uidSig.Serialize(&w)
+ if err != nil {
+ t.Error(err)
+ }
+ // Subkey
+ p, err = Read(r)
+ if err != nil {
+ t.Error(err)
+ }
+ subkey := p.(*PublicKey)
+ if !bytes.Equal(subkey.ec.oid, []byte{0x2b, 0x81, 0x04, 0x00, 0x22}) {
+ t.Errorf("Unexpected subkey OID: %x", subkey.ec.oid)
+ }
+ if !bytes.Equal(subkey.ec.p.bytes[:5], []byte{0x04, 0x2f, 0xaa, 0x84, 0x02}) {
+ t.Errorf("Unexpected subkey P[:5]: %x", subkey.ec.p.bytes)
+ }
+ if subkey.ecdh.KdfHash != 0x09 {
+ t.Error("Expected KDF hash function SHA384 (0x09), got", subkey.ecdh.KdfHash)
+ }
+ if subkey.ecdh.KdfAlgo != 0x09 {
+ t.Error("Expected KDF symmetric alg AES256 (0x09), got", subkey.ecdh.KdfAlgo)
+ }
+ if subkey.KeyId != 0xAA8B938F9A201946 {
+ t.Errorf("Unexpected subkey ID: %x", subkey.KeyId)
+ }
+ err = subkey.Serialize(&w)
+ if err != nil {
+ t.Error(err)
+ }
+ // Subkey Sig
+ p, err = Read(r)
+ if err != nil {
+ t.Error(err)
+ }
+ subkeySig := p.(*Signature)
+ err = pubkey.VerifyKeySignature(subkey, subkeySig)
+ if err != nil {
+ t.Error(err)
+ }
+ err = subkeySig.Serialize(&w)
+ if err != nil {
+ t.Error(err)
+ }
+ // Now read back what we've written again
+ r = bytes.NewBuffer(w.Bytes())
+ w.Reset()
+ }
+}
+
+const rsaFingerprintHex = "5fb74b1d03b1e3cb31bc2f8aa34d7e18c20c31bb"
+
+const rsaPkDataHex = "988d044d3c5c10010400b1d13382944bd5aba23a4312968b5095d14f947f600eb478e14a6fcb16b0e0cac764884909c020bc495cfcc39a935387c661507bdb236a0612fb582cac3af9b29cc2c8c70090616c41b662f4da4c1201e195472eb7f4ae1ccbcbf9940fe21d985e379a5563dde5b9a23d35f1cfaa5790da3b79db26f23695107bfaca8e7b5bcd0011010001"
+
+const dsaFingerprintHex = "eece4c094db002103714c63c8e8fbe54062f19ed"
+
+const dsaPkDataHex = "9901a2044d432f89110400cd581334f0d7a1e1bdc8b9d6d8c0baf68793632735d2bb0903224cbaa1dfbf35a60ee7a13b92643421e1eb41aa8d79bea19a115a677f6b8ba3c7818ce53a6c2a24a1608bd8b8d6e55c5090cbde09dd26e356267465ae25e69ec8bdd57c7bbb2623e4d73336f73a0a9098f7f16da2e25252130fd694c0e8070c55a812a423ae7f00a0ebf50e70c2f19c3520a551bd4b08d30f23530d3d03ff7d0bf4a53a64a09dc5e6e6e35854b7d70c882b0c60293401958b1bd9e40abec3ea05ba87cf64899299d4bd6aa7f459c201d3fbbd6c82004bdc5e8a9eb8082d12054cc90fa9d4ec251a843236a588bf49552441817436c4f43326966fe85447d4e6d0acf8fa1ef0f014730770603ad7634c3088dc52501c237328417c31c89ed70400b2f1a98b0bf42f11fefc430704bebbaa41d9f355600c3facee1e490f64208e0e094ea55e3a598a219a58500bf78ac677b670a14f4e47e9cf8eab4f368cc1ddcaa18cc59309d4cc62dd4f680e73e6cc3e1ce87a84d0925efbcb26c575c093fc42eecf45135fabf6403a25c2016e1774c0484e440a18319072c617cc97ac0a3bb0"
+
+const ecdsaFingerprintHex = "9892270b38b8980b05c8d56d43fe956c542ca00b"
+
+const ecdsaPkDataHex = "9893045071c29413052b8104002304230401f4867769cedfa52c325018896245443968e52e51d0c2df8d939949cb5b330f2921711fbee1c9b9dddb95d15cb0255e99badeddda7cc23d9ddcaacbc290969b9f24019375d61c2e4e3b36953a28d8b2bc95f78c3f1d592fb24499be348656a7b17e3963187b4361afe497bc5f9f81213f04069f8e1fb9e6a6290ae295ca1a92b894396cb4"
+
+// Source: https://sites.google.com/site/brainhub/pgpecckeys#TOC-ECC-NIST-P-384-key
+const ecc384PubHex = `99006f044d53059213052b81040022030304f6b8c5aced5b84ef9f4a209db2e4a9dfb70d28cb8c10ecd57674a9fa5a67389942b62d5e51367df4c7bfd3f8e500feecf07ed265a621a8ebbbe53e947ec78c677eba143bd1533c2b350e1c29f82313e1e1108eba063be1e64b10e6950e799c2db42465635f6473615f64685f333834203c6f70656e70677040627261696e6875622e6f72673e8900cb04101309005305024d530592301480000000002000077072656665727265642d656d61696c2d656e636f64696e67407067702e636f6d7067706d696d65040b090807021901051b03000000021602051e010000000415090a08000a0910098033880f54719fca2b0180aa37350968bd5f115afd8ce7bc7b103822152dbff06d0afcda835329510905b98cb469ba208faab87c7412b799e7b633017f58364ea480e8a1a3f253a0c5f22c446e8be9a9fce6210136ee30811abbd49139de28b5bdf8dc36d06ae748579e9ff503b90073044d53059212052b810400220303042faa84024a20b6735c4897efa5bfb41bf85b7eefeab5ca0cb9ffc8ea04a46acb25534a577694f9e25340a4ab5223a9dd1eda530c8aa2e6718db10d7e672558c7736fe09369ea5739a2a3554bf16d41faa50562f11c6d39bbd5dffb6b9a9ec9180301090989008404181309000c05024d530592051b0c000000000a0910098033880f54719f80970180eee7a6d8fcee41ee4f9289df17f9bcf9d955dca25c583b94336f3a2b2d4986dc5cf417b8d2dc86f741a9e1a6d236c0e3017d1c76575458a0cfb93ae8a2b274fcc65ceecd7a91eec83656ba13219969f06945b48c56bd04152c3a0553c5f2f4bd1267`
diff --git a/src/mongo/gotools/vendor/src/golang.org/x/crypto/openpgp/packet/public_key_v3.go b/src/mongo/gotools/vendor/src/golang.org/x/crypto/openpgp/packet/public_key_v3.go
new file mode 100644
index 00000000000..26337f5aaf1
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/golang.org/x/crypto/openpgp/packet/public_key_v3.go
@@ -0,0 +1,280 @@
+// Copyright 2013 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package packet
+
+import (
+ "crypto"
+ "crypto/md5"
+ "crypto/rsa"
+ "encoding/binary"
+ "fmt"
+ "hash"
+ "io"
+ "math/big"
+ "strconv"
+ "time"
+
+ "golang.org/x/crypto/openpgp/errors"
+)
+
+// PublicKeyV3 represents older, version 3 public keys. These keys are less secure and
+// should not be used for signing or encrypting. They are supported here only for
+// parsing version 3 key material and validating signatures.
+// See RFC 4880, section 5.5.2.
+type PublicKeyV3 struct {
+ CreationTime time.Time
+ DaysToExpire uint16
+ PubKeyAlgo PublicKeyAlgorithm
+ PublicKey *rsa.PublicKey
+ Fingerprint [16]byte
+ KeyId uint64
+ IsSubkey bool
+
+ n, e parsedMPI
+}
+
+// newRSAPublicKeyV3 returns a PublicKey that wraps the given rsa.PublicKey.
+// Included here for testing purposes only. RFC 4880, section 5.5.2:
+// "an implementation MUST NOT generate a V3 key, but MAY accept it."
+func newRSAPublicKeyV3(creationTime time.Time, pub *rsa.PublicKey) *PublicKeyV3 {
+ pk := &PublicKeyV3{
+ CreationTime: creationTime,
+ PublicKey: pub,
+ n: fromBig(pub.N),
+ e: fromBig(big.NewInt(int64(pub.E))),
+ }
+
+ pk.setFingerPrintAndKeyId()
+ return pk
+}
+
+func (pk *PublicKeyV3) parse(r io.Reader) (err error) {
+ // RFC 4880, section 5.5.2
+ var buf [8]byte
+ if _, err = readFull(r, buf[:]); err != nil {
+ return
+ }
+ if buf[0] < 2 || buf[0] > 3 {
+ return errors.UnsupportedError("public key version")
+ }
+ pk.CreationTime = time.Unix(int64(uint32(buf[1])<<24|uint32(buf[2])<<16|uint32(buf[3])<<8|uint32(buf[4])), 0)
+ pk.DaysToExpire = binary.BigEndian.Uint16(buf[5:7])
+ pk.PubKeyAlgo = PublicKeyAlgorithm(buf[7])
+ switch pk.PubKeyAlgo {
+ case PubKeyAlgoRSA, PubKeyAlgoRSAEncryptOnly, PubKeyAlgoRSASignOnly:
+ err = pk.parseRSA(r)
+ default:
+ err = errors.UnsupportedError("public key type: " + strconv.Itoa(int(pk.PubKeyAlgo)))
+ }
+ if err != nil {
+ return
+ }
+
+ pk.setFingerPrintAndKeyId()
+ return
+}
+
+func (pk *PublicKeyV3) setFingerPrintAndKeyId() {
+ // RFC 4880, section 12.2
+ fingerPrint := md5.New()
+ fingerPrint.Write(pk.n.bytes)
+ fingerPrint.Write(pk.e.bytes)
+ fingerPrint.Sum(pk.Fingerprint[:0])
+ pk.KeyId = binary.BigEndian.Uint64(pk.n.bytes[len(pk.n.bytes)-8:])
+}
+
+// parseRSA parses RSA public key material from the given Reader. See RFC 4880,
+// section 5.5.2.
+func (pk *PublicKeyV3) parseRSA(r io.Reader) (err error) {
+ if pk.n.bytes, pk.n.bitLength, err = readMPI(r); err != nil {
+ return
+ }
+ if pk.e.bytes, pk.e.bitLength, err = readMPI(r); err != nil {
+ return
+ }
+
+ // RFC 4880 Section 12.2 requires the low 8 bytes of the
+ // modulus to form the key id.
+ if len(pk.n.bytes) < 8 {
+ return errors.StructuralError("v3 public key modulus is too short")
+ }
+ if len(pk.e.bytes) > 3 {
+ err = errors.UnsupportedError("large public exponent")
+ return
+ }
+ rsa := &rsa.PublicKey{N: new(big.Int).SetBytes(pk.n.bytes)}
+ for i := 0; i < len(pk.e.bytes); i++ {
+ rsa.E <<= 8
+ rsa.E |= int(pk.e.bytes[i])
+ }
+ pk.PublicKey = rsa
+ return
+}
+
+// SerializeSignaturePrefix writes the prefix for this public key to the given Writer.
+// The prefix is used when calculating a signature over this public key. See
+// RFC 4880, section 5.2.4.
+func (pk *PublicKeyV3) SerializeSignaturePrefix(w io.Writer) {
+ var pLength uint16
+ switch pk.PubKeyAlgo {
+ case PubKeyAlgoRSA, PubKeyAlgoRSAEncryptOnly, PubKeyAlgoRSASignOnly:
+ pLength += 2 + uint16(len(pk.n.bytes))
+ pLength += 2 + uint16(len(pk.e.bytes))
+ default:
+ panic("unknown public key algorithm")
+ }
+ pLength += 6
+ w.Write([]byte{0x99, byte(pLength >> 8), byte(pLength)})
+ return
+}
+
+func (pk *PublicKeyV3) Serialize(w io.Writer) (err error) {
+ length := 8 // 8 byte header
+
+ switch pk.PubKeyAlgo {
+ case PubKeyAlgoRSA, PubKeyAlgoRSAEncryptOnly, PubKeyAlgoRSASignOnly:
+ length += 2 + len(pk.n.bytes)
+ length += 2 + len(pk.e.bytes)
+ default:
+ panic("unknown public key algorithm")
+ }
+
+ packetType := packetTypePublicKey
+ if pk.IsSubkey {
+ packetType = packetTypePublicSubkey
+ }
+ if err = serializeHeader(w, packetType, length); err != nil {
+ return
+ }
+ return pk.serializeWithoutHeaders(w)
+}
+
+// serializeWithoutHeaders marshals the PublicKey to w in the form of an
+// OpenPGP public key packet, not including the packet header.
+func (pk *PublicKeyV3) serializeWithoutHeaders(w io.Writer) (err error) {
+ var buf [8]byte
+ // Version 3
+ buf[0] = 3
+ // Creation time
+ t := uint32(pk.CreationTime.Unix())
+ buf[1] = byte(t >> 24)
+ buf[2] = byte(t >> 16)
+ buf[3] = byte(t >> 8)
+ buf[4] = byte(t)
+ // Days to expire
+ buf[5] = byte(pk.DaysToExpire >> 8)
+ buf[6] = byte(pk.DaysToExpire)
+ // Public key algorithm
+ buf[7] = byte(pk.PubKeyAlgo)
+
+ if _, err = w.Write(buf[:]); err != nil {
+ return
+ }
+
+ switch pk.PubKeyAlgo {
+ case PubKeyAlgoRSA, PubKeyAlgoRSAEncryptOnly, PubKeyAlgoRSASignOnly:
+ return writeMPIs(w, pk.n, pk.e)
+ }
+ return errors.InvalidArgumentError("bad public-key algorithm")
+}
+
+// CanSign returns true iff this public key can generate signatures
+func (pk *PublicKeyV3) CanSign() bool {
+ return pk.PubKeyAlgo != PubKeyAlgoRSAEncryptOnly
+}
+
+// VerifySignatureV3 returns nil iff sig is a valid signature, made by this
+// public key, of the data hashed into signed. signed is mutated by this call.
+func (pk *PublicKeyV3) VerifySignatureV3(signed hash.Hash, sig *SignatureV3) (err error) {
+ if !pk.CanSign() {
+ return errors.InvalidArgumentError("public key cannot generate signatures")
+ }
+
+ suffix := make([]byte, 5)
+ suffix[0] = byte(sig.SigType)
+ binary.BigEndian.PutUint32(suffix[1:], uint32(sig.CreationTime.Unix()))
+ signed.Write(suffix)
+ hashBytes := signed.Sum(nil)
+
+ if hashBytes[0] != sig.HashTag[0] || hashBytes[1] != sig.HashTag[1] {
+ return errors.SignatureError("hash tag doesn't match")
+ }
+
+ if pk.PubKeyAlgo != sig.PubKeyAlgo {
+ return errors.InvalidArgumentError("public key and signature use different algorithms")
+ }
+
+ switch pk.PubKeyAlgo {
+ case PubKeyAlgoRSA, PubKeyAlgoRSASignOnly:
+ if err = rsa.VerifyPKCS1v15(pk.PublicKey, sig.Hash, hashBytes, sig.RSASignature.bytes); err != nil {
+ return errors.SignatureError("RSA verification failure")
+ }
+ return
+ default:
+ // V3 public keys only support RSA.
+ panic("shouldn't happen")
+ }
+ panic("unreachable")
+}
+
+// VerifyUserIdSignatureV3 returns nil iff sig is a valid signature, made by this
+// public key, that id is the identity of pub.
+func (pk *PublicKeyV3) VerifyUserIdSignatureV3(id string, pub *PublicKeyV3, sig *SignatureV3) (err error) {
+ h, err := userIdSignatureV3Hash(id, pk, sig.Hash)
+ if err != nil {
+ return err
+ }
+ return pk.VerifySignatureV3(h, sig)
+}
+
+// VerifyKeySignatureV3 returns nil iff sig is a valid signature, made by this
+// public key, of signed.
+func (pk *PublicKeyV3) VerifyKeySignatureV3(signed *PublicKeyV3, sig *SignatureV3) (err error) {
+ h, err := keySignatureHash(pk, signed, sig.Hash)
+ if err != nil {
+ return err
+ }
+ return pk.VerifySignatureV3(h, sig)
+}
+
+// userIdSignatureV3Hash returns a Hash of the message that needs to be signed
+// to assert that pk is a valid key for id.
+func userIdSignatureV3Hash(id string, pk signingKey, hfn crypto.Hash) (h hash.Hash, err error) {
+ if !hfn.Available() {
+ return nil, errors.UnsupportedError("hash function")
+ }
+ h = hfn.New()
+
+ // RFC 4880, section 5.2.4
+ pk.SerializeSignaturePrefix(h)
+ pk.serializeWithoutHeaders(h)
+
+ h.Write([]byte(id))
+
+ return
+}
+
+// KeyIdString returns the public key's fingerprint in capital hex
+// (e.g. "6C7EE1B8621CC013").
+func (pk *PublicKeyV3) KeyIdString() string {
+ return fmt.Sprintf("%X", pk.KeyId)
+}
+
+// KeyIdShortString returns the short form of public key's fingerprint
+// in capital hex, as shown by gpg --list-keys (e.g. "621CC013").
+func (pk *PublicKeyV3) KeyIdShortString() string {
+ return fmt.Sprintf("%X", pk.KeyId&0xFFFFFFFF)
+}
+
+// BitLength returns the bit length for the given public key.
+func (pk *PublicKeyV3) BitLength() (bitLength uint16, err error) {
+ switch pk.PubKeyAlgo {
+ case PubKeyAlgoRSA, PubKeyAlgoRSAEncryptOnly, PubKeyAlgoRSASignOnly:
+ bitLength = pk.n.bitLength
+ default:
+ err = errors.InvalidArgumentError("bad public-key algorithm")
+ }
+ return
+}
diff --git a/src/mongo/gotools/vendor/src/golang.org/x/crypto/openpgp/packet/public_key_v3_test.go b/src/mongo/gotools/vendor/src/golang.org/x/crypto/openpgp/packet/public_key_v3_test.go
new file mode 100644
index 00000000000..e06405904b3
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/golang.org/x/crypto/openpgp/packet/public_key_v3_test.go
@@ -0,0 +1,82 @@
+// Copyright 2013 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package packet
+
+import (
+ "bytes"
+ "encoding/hex"
+ "testing"
+ "time"
+)
+
+var pubKeyV3Test = struct {
+ hexFingerprint string
+ creationTime time.Time
+ pubKeyAlgo PublicKeyAlgorithm
+ keyId uint64
+ keyIdString string
+ keyIdShort string
+}{
+ "103BECF5BD1E837C89D19E98487767F7",
+ time.Unix(779753634, 0),
+ PubKeyAlgoRSA,
+ 0xDE0F188A5DA5E3C9,
+ "DE0F188A5DA5E3C9",
+ "5DA5E3C9"}
+
+func TestPublicKeyV3Read(t *testing.T) {
+ i, test := 0, pubKeyV3Test
+ packet, err := Read(v3KeyReader(t))
+ if err != nil {
+ t.Fatalf("#%d: Read error: %s", i, err)
+ }
+ pk, ok := packet.(*PublicKeyV3)
+ if !ok {
+ t.Fatalf("#%d: failed to parse, got: %#v", i, packet)
+ }
+ if pk.PubKeyAlgo != test.pubKeyAlgo {
+ t.Errorf("#%d: bad public key algorithm got:%x want:%x", i, pk.PubKeyAlgo, test.pubKeyAlgo)
+ }
+ if !pk.CreationTime.Equal(test.creationTime) {
+ t.Errorf("#%d: bad creation time got:%v want:%v", i, pk.CreationTime, test.creationTime)
+ }
+ expectedFingerprint, _ := hex.DecodeString(test.hexFingerprint)
+ if !bytes.Equal(expectedFingerprint, pk.Fingerprint[:]) {
+ t.Errorf("#%d: bad fingerprint got:%x want:%x", i, pk.Fingerprint[:], expectedFingerprint)
+ }
+ if pk.KeyId != test.keyId {
+ t.Errorf("#%d: bad keyid got:%x want:%x", i, pk.KeyId, test.keyId)
+ }
+ if g, e := pk.KeyIdString(), test.keyIdString; g != e {
+ t.Errorf("#%d: bad KeyIdString got:%q want:%q", i, g, e)
+ }
+ if g, e := pk.KeyIdShortString(), test.keyIdShort; g != e {
+ t.Errorf("#%d: bad KeyIdShortString got:%q want:%q", i, g, e)
+ }
+}
+
+func TestPublicKeyV3Serialize(t *testing.T) {
+ //for i, test := range pubKeyV3Tests {
+ i := 0
+ packet, err := Read(v3KeyReader(t))
+ if err != nil {
+ t.Fatalf("#%d: Read error: %s", i, err)
+ }
+ pk, ok := packet.(*PublicKeyV3)
+ if !ok {
+ t.Fatalf("#%d: failed to parse, got: %#v", i, packet)
+ }
+ var serializeBuf bytes.Buffer
+ if err = pk.Serialize(&serializeBuf); err != nil {
+ t.Fatalf("#%d: failed to serialize: %s", i, err)
+ }
+
+ if packet, err = Read(bytes.NewBuffer(serializeBuf.Bytes())); err != nil {
+ t.Fatalf("#%d: Read error (from serialized data): %s", i, err)
+ }
+ if pk, ok = packet.(*PublicKeyV3); !ok {
+ t.Fatalf("#%d: failed to parse serialized data, got: %#v", i, packet)
+ }
+}
diff --git a/src/mongo/gotools/vendor/src/golang.org/x/crypto/openpgp/packet/reader.go b/src/mongo/gotools/vendor/src/golang.org/x/crypto/openpgp/packet/reader.go
new file mode 100644
index 00000000000..34bc7c613e6
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/golang.org/x/crypto/openpgp/packet/reader.go
@@ -0,0 +1,76 @@
+// Copyright 2011 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package packet
+
+import (
+ "golang.org/x/crypto/openpgp/errors"
+ "io"
+)
+
+// Reader reads packets from an io.Reader and allows packets to be 'unread' so
+// that they result from the next call to Next.
+type Reader struct {
+ q []Packet
+ readers []io.Reader
+}
+
+// New io.Readers are pushed when a compressed or encrypted packet is processed
+// and recursively treated as a new source of packets. However, a carefully
+// crafted packet can trigger an infinite recursive sequence of packets. See
+// http://mumble.net/~campbell/misc/pgp-quine
+// https://web.nvd.nist.gov/view/vuln/detail?vulnId=CVE-2013-4402
+// This constant limits the number of recursive packets that may be pushed.
+const maxReaders = 32
+
+// Next returns the most recently unread Packet, or reads another packet from
+// the top-most io.Reader. Unknown packet types are skipped.
+func (r *Reader) Next() (p Packet, err error) {
+ if len(r.q) > 0 {
+ p = r.q[len(r.q)-1]
+ r.q = r.q[:len(r.q)-1]
+ return
+ }
+
+ for len(r.readers) > 0 {
+ p, err = Read(r.readers[len(r.readers)-1])
+ if err == nil {
+ return
+ }
+ if err == io.EOF {
+ r.readers = r.readers[:len(r.readers)-1]
+ continue
+ }
+ if _, ok := err.(errors.UnknownPacketTypeError); !ok {
+ return nil, err
+ }
+ }
+
+ return nil, io.EOF
+}
+
+// Push causes the Reader to start reading from a new io.Reader. When an EOF
+// error is seen from the new io.Reader, it is popped and the Reader continues
+// to read from the next most recent io.Reader. Push returns a StructuralError
+// if pushing the reader would exceed the maximum recursion level, otherwise it
+// returns nil.
+func (r *Reader) Push(reader io.Reader) (err error) {
+ if len(r.readers) >= maxReaders {
+ return errors.StructuralError("too many layers of packets")
+ }
+ r.readers = append(r.readers, reader)
+ return nil
+}
+
+// Unread causes the given Packet to be returned from the next call to Next.
+func (r *Reader) Unread(p Packet) {
+ r.q = append(r.q, p)
+}
+
+func NewReader(r io.Reader) *Reader {
+ return &Reader{
+ q: nil,
+ readers: []io.Reader{r},
+ }
+}
diff --git a/src/mongo/gotools/vendor/src/golang.org/x/crypto/openpgp/packet/signature.go b/src/mongo/gotools/vendor/src/golang.org/x/crypto/openpgp/packet/signature.go
new file mode 100644
index 00000000000..1f29d3df388
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/golang.org/x/crypto/openpgp/packet/signature.go
@@ -0,0 +1,699 @@
+// Copyright 2011 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package packet
+
+import (
+ "bytes"
+ "crypto"
+ "crypto/dsa"
+ "crypto/rsa"
+ "encoding/binary"
+ "hash"
+ "io"
+ "strconv"
+ "time"
+
+ "golang.org/x/crypto/openpgp/errors"
+ "golang.org/x/crypto/openpgp/s2k"
+)
+
+const (
+ // See RFC 4880, section 5.2.3.21 for details.
+ KeyFlagCertify = 1 << iota
+ KeyFlagSign
+ KeyFlagEncryptCommunications
+ KeyFlagEncryptStorage
+)
+
+// Signature represents a signature. See RFC 4880, section 5.2.
+type Signature struct {
+ SigType SignatureType
+ PubKeyAlgo PublicKeyAlgorithm
+ Hash crypto.Hash
+
+ // HashSuffix is extra data that is hashed in after the signed data.
+ HashSuffix []byte
+ // HashTag contains the first two bytes of the hash for fast rejection
+ // of bad signed data.
+ HashTag [2]byte
+ CreationTime time.Time
+
+ RSASignature parsedMPI
+ DSASigR, DSASigS parsedMPI
+ ECDSASigR, ECDSASigS parsedMPI
+
+ // rawSubpackets contains the unparsed subpackets, in order.
+ rawSubpackets []outputSubpacket
+
+ // The following are optional so are nil when not included in the
+ // signature.
+
+ SigLifetimeSecs, KeyLifetimeSecs *uint32
+ PreferredSymmetric, PreferredHash, PreferredCompression []uint8
+ IssuerKeyId *uint64
+ IsPrimaryId *bool
+
+ // FlagsValid is set if any flags were given. See RFC 4880, section
+ // 5.2.3.21 for details.
+ FlagsValid bool
+ FlagCertify, FlagSign, FlagEncryptCommunications, FlagEncryptStorage bool
+
+ // RevocationReason is set if this signature has been revoked.
+ // See RFC 4880, section 5.2.3.23 for details.
+ RevocationReason *uint8
+ RevocationReasonText string
+
+ // MDC is set if this signature has a feature packet that indicates
+ // support for MDC subpackets.
+ MDC bool
+
+ // EmbeddedSignature, if non-nil, is a signature of the parent key, by
+ // this key. This prevents an attacker from claiming another's signing
+ // subkey as their own.
+ EmbeddedSignature *Signature
+
+ outSubpackets []outputSubpacket
+}
+
+func (sig *Signature) parse(r io.Reader) (err error) {
+ // RFC 4880, section 5.2.3
+ var buf [5]byte
+ _, err = readFull(r, buf[:1])
+ if err != nil {
+ return
+ }
+ if buf[0] != 4 {
+ err = errors.UnsupportedError("signature packet version " + strconv.Itoa(int(buf[0])))
+ return
+ }
+
+ _, err = readFull(r, buf[:5])
+ if err != nil {
+ return
+ }
+ sig.SigType = SignatureType(buf[0])
+ sig.PubKeyAlgo = PublicKeyAlgorithm(buf[1])
+ switch sig.PubKeyAlgo {
+ case PubKeyAlgoRSA, PubKeyAlgoRSASignOnly, PubKeyAlgoDSA, PubKeyAlgoECDSA:
+ default:
+ err = errors.UnsupportedError("public key algorithm " + strconv.Itoa(int(sig.PubKeyAlgo)))
+ return
+ }
+
+ var ok bool
+ sig.Hash, ok = s2k.HashIdToHash(buf[2])
+ if !ok {
+ return errors.UnsupportedError("hash function " + strconv.Itoa(int(buf[2])))
+ }
+
+ hashedSubpacketsLength := int(buf[3])<<8 | int(buf[4])
+ l := 6 + hashedSubpacketsLength
+ sig.HashSuffix = make([]byte, l+6)
+ sig.HashSuffix[0] = 4
+ copy(sig.HashSuffix[1:], buf[:5])
+ hashedSubpackets := sig.HashSuffix[6:l]
+ _, err = readFull(r, hashedSubpackets)
+ if err != nil {
+ return
+ }
+ // See RFC 4880, section 5.2.4
+ trailer := sig.HashSuffix[l:]
+ trailer[0] = 4
+ trailer[1] = 0xff
+ trailer[2] = uint8(l >> 24)
+ trailer[3] = uint8(l >> 16)
+ trailer[4] = uint8(l >> 8)
+ trailer[5] = uint8(l)
+
+ err = parseSignatureSubpackets(sig, hashedSubpackets, true)
+ if err != nil {
+ return
+ }
+
+ _, err = readFull(r, buf[:2])
+ if err != nil {
+ return
+ }
+ unhashedSubpacketsLength := int(buf[0])<<8 | int(buf[1])
+ unhashedSubpackets := make([]byte, unhashedSubpacketsLength)
+ _, err = readFull(r, unhashedSubpackets)
+ if err != nil {
+ return
+ }
+ err = parseSignatureSubpackets(sig, unhashedSubpackets, false)
+ if err != nil {
+ return
+ }
+
+ _, err = readFull(r, sig.HashTag[:2])
+ if err != nil {
+ return
+ }
+
+ switch sig.PubKeyAlgo {
+ case PubKeyAlgoRSA, PubKeyAlgoRSASignOnly:
+ sig.RSASignature.bytes, sig.RSASignature.bitLength, err = readMPI(r)
+ case PubKeyAlgoDSA:
+ sig.DSASigR.bytes, sig.DSASigR.bitLength, err = readMPI(r)
+ if err == nil {
+ sig.DSASigS.bytes, sig.DSASigS.bitLength, err = readMPI(r)
+ }
+ case PubKeyAlgoECDSA:
+ sig.ECDSASigR.bytes, sig.ECDSASigR.bitLength, err = readMPI(r)
+ if err == nil {
+ sig.ECDSASigS.bytes, sig.ECDSASigS.bitLength, err = readMPI(r)
+ }
+ default:
+ panic("unreachable")
+ }
+ return
+}
+
+// parseSignatureSubpackets parses subpackets of the main signature packet. See
+// RFC 4880, section 5.2.3.1.
+func parseSignatureSubpackets(sig *Signature, subpackets []byte, isHashed bool) (err error) {
+ for len(subpackets) > 0 {
+ subpackets, err = parseSignatureSubpacket(sig, subpackets, isHashed)
+ if err != nil {
+ return
+ }
+ }
+
+ if sig.CreationTime.IsZero() {
+ err = errors.StructuralError("no creation time in signature")
+ }
+
+ return
+}
+
+type signatureSubpacketType uint8
+
+const (
+ creationTimeSubpacket signatureSubpacketType = 2
+ signatureExpirationSubpacket signatureSubpacketType = 3
+ keyExpirationSubpacket signatureSubpacketType = 9
+ prefSymmetricAlgosSubpacket signatureSubpacketType = 11
+ issuerSubpacket signatureSubpacketType = 16
+ prefHashAlgosSubpacket signatureSubpacketType = 21
+ prefCompressionSubpacket signatureSubpacketType = 22
+ primaryUserIdSubpacket signatureSubpacketType = 25
+ keyFlagsSubpacket signatureSubpacketType = 27
+ reasonForRevocationSubpacket signatureSubpacketType = 29
+ featuresSubpacket signatureSubpacketType = 30
+ embeddedSignatureSubpacket signatureSubpacketType = 32
+)
+
+// parseSignatureSubpacket parses a single subpacket. len(subpacket) is >= 1.
+func parseSignatureSubpacket(sig *Signature, subpacket []byte, isHashed bool) (rest []byte, err error) {
+ // RFC 4880, section 5.2.3.1
+ var (
+ length uint32
+ packetType signatureSubpacketType
+ isCritical bool
+ )
+ switch {
+ case subpacket[0] < 192:
+ length = uint32(subpacket[0])
+ subpacket = subpacket[1:]
+ case subpacket[0] < 255:
+ if len(subpacket) < 2 {
+ goto Truncated
+ }
+ length = uint32(subpacket[0]-192)<<8 + uint32(subpacket[1]) + 192
+ subpacket = subpacket[2:]
+ default:
+ if len(subpacket) < 5 {
+ goto Truncated
+ }
+ length = uint32(subpacket[1])<<24 |
+ uint32(subpacket[2])<<16 |
+ uint32(subpacket[3])<<8 |
+ uint32(subpacket[4])
+ subpacket = subpacket[5:]
+ }
+ if length > uint32(len(subpacket)) {
+ goto Truncated
+ }
+ rest = subpacket[length:]
+ subpacket = subpacket[:length]
+ if len(subpacket) == 0 {
+ err = errors.StructuralError("zero length signature subpacket")
+ return
+ }
+ packetType = signatureSubpacketType(subpacket[0] & 0x7f)
+ isCritical = subpacket[0]&0x80 == 0x80
+ subpacket = subpacket[1:]
+ sig.rawSubpackets = append(sig.rawSubpackets, outputSubpacket{isHashed, packetType, isCritical, subpacket})
+ switch packetType {
+ case creationTimeSubpacket:
+ if !isHashed {
+ err = errors.StructuralError("signature creation time in non-hashed area")
+ return
+ }
+ if len(subpacket) != 4 {
+ err = errors.StructuralError("signature creation time not four bytes")
+ return
+ }
+ t := binary.BigEndian.Uint32(subpacket)
+ sig.CreationTime = time.Unix(int64(t), 0)
+ case signatureExpirationSubpacket:
+ // Signature expiration time, section 5.2.3.10
+ if !isHashed {
+ return
+ }
+ if len(subpacket) != 4 {
+ err = errors.StructuralError("expiration subpacket with bad length")
+ return
+ }
+ sig.SigLifetimeSecs = new(uint32)
+ *sig.SigLifetimeSecs = binary.BigEndian.Uint32(subpacket)
+ case keyExpirationSubpacket:
+ // Key expiration time, section 5.2.3.6
+ if !isHashed {
+ return
+ }
+ if len(subpacket) != 4 {
+ err = errors.StructuralError("key expiration subpacket with bad length")
+ return
+ }
+ sig.KeyLifetimeSecs = new(uint32)
+ *sig.KeyLifetimeSecs = binary.BigEndian.Uint32(subpacket)
+ case prefSymmetricAlgosSubpacket:
+ // Preferred symmetric algorithms, section 5.2.3.7
+ if !isHashed {
+ return
+ }
+ sig.PreferredSymmetric = make([]byte, len(subpacket))
+ copy(sig.PreferredSymmetric, subpacket)
+ case issuerSubpacket:
+ // Issuer, section 5.2.3.5
+ if len(subpacket) != 8 {
+ err = errors.StructuralError("issuer subpacket with bad length")
+ return
+ }
+ sig.IssuerKeyId = new(uint64)
+ *sig.IssuerKeyId = binary.BigEndian.Uint64(subpacket)
+ case prefHashAlgosSubpacket:
+ // Preferred hash algorithms, section 5.2.3.8
+ if !isHashed {
+ return
+ }
+ sig.PreferredHash = make([]byte, len(subpacket))
+ copy(sig.PreferredHash, subpacket)
+ case prefCompressionSubpacket:
+ // Preferred compression algorithms, section 5.2.3.9
+ if !isHashed {
+ return
+ }
+ sig.PreferredCompression = make([]byte, len(subpacket))
+ copy(sig.PreferredCompression, subpacket)
+ case primaryUserIdSubpacket:
+ // Primary User ID, section 5.2.3.19
+ if !isHashed {
+ return
+ }
+ if len(subpacket) != 1 {
+ err = errors.StructuralError("primary user id subpacket with bad length")
+ return
+ }
+ sig.IsPrimaryId = new(bool)
+ if subpacket[0] > 0 {
+ *sig.IsPrimaryId = true
+ }
+ case keyFlagsSubpacket:
+ // Key flags, section 5.2.3.21
+ if !isHashed {
+ return
+ }
+ if len(subpacket) == 0 {
+ err = errors.StructuralError("empty key flags subpacket")
+ return
+ }
+ sig.FlagsValid = true
+ if subpacket[0]&KeyFlagCertify != 0 {
+ sig.FlagCertify = true
+ }
+ if subpacket[0]&KeyFlagSign != 0 {
+ sig.FlagSign = true
+ }
+ if subpacket[0]&KeyFlagEncryptCommunications != 0 {
+ sig.FlagEncryptCommunications = true
+ }
+ if subpacket[0]&KeyFlagEncryptStorage != 0 {
+ sig.FlagEncryptStorage = true
+ }
+ case reasonForRevocationSubpacket:
+ // Reason For Revocation, section 5.2.3.23
+ if !isHashed {
+ return
+ }
+ if len(subpacket) == 0 {
+ err = errors.StructuralError("empty revocation reason subpacket")
+ return
+ }
+ sig.RevocationReason = new(uint8)
+ *sig.RevocationReason = subpacket[0]
+ sig.RevocationReasonText = string(subpacket[1:])
+ case featuresSubpacket:
+ // Features subpacket, section 5.2.3.24 specifies a very general
+ // mechanism for OpenPGP implementations to signal support for new
+ // features. In practice, the subpacket is used exclusively to
+ // indicate support for MDC-protected encryption.
+ sig.MDC = len(subpacket) >= 1 && subpacket[0]&1 == 1
+ case embeddedSignatureSubpacket:
+ // Only usage is in signatures that cross-certify
+ // signing subkeys. section 5.2.3.26 describes the
+ // format, with its usage described in section 11.1
+ if sig.EmbeddedSignature != nil {
+ err = errors.StructuralError("Cannot have multiple embedded signatures")
+ return
+ }
+ sig.EmbeddedSignature = new(Signature)
+ // Embedded signatures are required to be v4 signatures see
+ // section 12.1. However, we only parse v4 signatures in this
+ // file anyway.
+ if err := sig.EmbeddedSignature.parse(bytes.NewBuffer(subpacket)); err != nil {
+ return nil, err
+ }
+ if sigType := sig.EmbeddedSignature.SigType; sigType != SigTypePrimaryKeyBinding {
+ return nil, errors.StructuralError("cross-signature has unexpected type " + strconv.Itoa(int(sigType)))
+ }
+ default:
+ if isCritical {
+ err = errors.UnsupportedError("unknown critical signature subpacket type " + strconv.Itoa(int(packetType)))
+ return
+ }
+ }
+ return
+
+Truncated:
+ err = errors.StructuralError("signature subpacket truncated")
+ return
+}
+
+// subpacketLengthLength returns the length, in bytes, of an encoded length value.
+func subpacketLengthLength(length int) int {
+ if length < 192 {
+ return 1
+ }
+ if length < 16320 {
+ return 2
+ }
+ return 5
+}
+
+// serializeSubpacketLength marshals the given length into to.
+func serializeSubpacketLength(to []byte, length int) int {
+ // RFC 4880, Section 4.2.2.
+ if length < 192 {
+ to[0] = byte(length)
+ return 1
+ }
+ if length < 16320 {
+ length -= 192
+ to[0] = byte((length >> 8) + 192)
+ to[1] = byte(length)
+ return 2
+ }
+ to[0] = 255
+ to[1] = byte(length >> 24)
+ to[2] = byte(length >> 16)
+ to[3] = byte(length >> 8)
+ to[4] = byte(length)
+ return 5
+}
+
+// subpacketsLength returns the serialized length, in bytes, of the given
+// subpackets.
+func subpacketsLength(subpackets []outputSubpacket, hashed bool) (length int) {
+ for _, subpacket := range subpackets {
+ if subpacket.hashed == hashed {
+ length += subpacketLengthLength(len(subpacket.contents) + 1)
+ length += 1 // type byte
+ length += len(subpacket.contents)
+ }
+ }
+ return
+}
+
+// serializeSubpackets marshals the given subpackets into to.
+func serializeSubpackets(to []byte, subpackets []outputSubpacket, hashed bool) {
+ for _, subpacket := range subpackets {
+ if subpacket.hashed == hashed {
+ n := serializeSubpacketLength(to, len(subpacket.contents)+1)
+ to[n] = byte(subpacket.subpacketType)
+ to = to[1+n:]
+ n = copy(to, subpacket.contents)
+ to = to[n:]
+ }
+ }
+ return
+}
+
+// KeyExpired returns whether sig is a self-signature of a key that has
+// expired.
+func (sig *Signature) KeyExpired(currentTime time.Time) bool {
+ if sig.KeyLifetimeSecs == nil {
+ return false
+ }
+ expiry := sig.CreationTime.Add(time.Duration(*sig.KeyLifetimeSecs) * time.Second)
+ return currentTime.After(expiry)
+}
+
+// buildHashSuffix constructs the HashSuffix member of sig in preparation for signing.
+func (sig *Signature) buildHashSuffix() (err error) {
+ hashedSubpacketsLen := subpacketsLength(sig.outSubpackets, true)
+
+ var ok bool
+ l := 6 + hashedSubpacketsLen
+ sig.HashSuffix = make([]byte, l+6)
+ sig.HashSuffix[0] = 4
+ sig.HashSuffix[1] = uint8(sig.SigType)
+ sig.HashSuffix[2] = uint8(sig.PubKeyAlgo)
+ sig.HashSuffix[3], ok = s2k.HashToHashId(sig.Hash)
+ if !ok {
+ sig.HashSuffix = nil
+ return errors.InvalidArgumentError("hash cannot be represented in OpenPGP: " + strconv.Itoa(int(sig.Hash)))
+ }
+ sig.HashSuffix[4] = byte(hashedSubpacketsLen >> 8)
+ sig.HashSuffix[5] = byte(hashedSubpacketsLen)
+ serializeSubpackets(sig.HashSuffix[6:l], sig.outSubpackets, true)
+ trailer := sig.HashSuffix[l:]
+ trailer[0] = 4
+ trailer[1] = 0xff
+ trailer[2] = byte(l >> 24)
+ trailer[3] = byte(l >> 16)
+ trailer[4] = byte(l >> 8)
+ trailer[5] = byte(l)
+ return
+}
+
+func (sig *Signature) signPrepareHash(h hash.Hash) (digest []byte, err error) {
+ err = sig.buildHashSuffix()
+ if err != nil {
+ return
+ }
+
+ h.Write(sig.HashSuffix)
+ digest = h.Sum(nil)
+ copy(sig.HashTag[:], digest)
+ return
+}
+
+// Sign signs a message with a private key. The hash, h, must contain
+// the hash of the message to be signed and will be mutated by this function.
+// On success, the signature is stored in sig. Call Serialize to write it out.
+// If config is nil, sensible defaults will be used.
+func (sig *Signature) Sign(h hash.Hash, priv *PrivateKey, config *Config) (err error) {
+ sig.outSubpackets = sig.buildSubpackets()
+ digest, err := sig.signPrepareHash(h)
+ if err != nil {
+ return
+ }
+
+ switch priv.PubKeyAlgo {
+ case PubKeyAlgoRSA, PubKeyAlgoRSASignOnly:
+ sig.RSASignature.bytes, err = rsa.SignPKCS1v15(config.Random(), priv.PrivateKey.(*rsa.PrivateKey), sig.Hash, digest)
+ sig.RSASignature.bitLength = uint16(8 * len(sig.RSASignature.bytes))
+ case PubKeyAlgoDSA:
+ dsaPriv := priv.PrivateKey.(*dsa.PrivateKey)
+
+ // Need to truncate hashBytes to match FIPS 186-3 section 4.6.
+ subgroupSize := (dsaPriv.Q.BitLen() + 7) / 8
+ if len(digest) > subgroupSize {
+ digest = digest[:subgroupSize]
+ }
+ r, s, err := dsa.Sign(config.Random(), dsaPriv, digest)
+ if err == nil {
+ sig.DSASigR.bytes = r.Bytes()
+ sig.DSASigR.bitLength = uint16(8 * len(sig.DSASigR.bytes))
+ sig.DSASigS.bytes = s.Bytes()
+ sig.DSASigS.bitLength = uint16(8 * len(sig.DSASigS.bytes))
+ }
+ default:
+ err = errors.UnsupportedError("public key algorithm: " + strconv.Itoa(int(sig.PubKeyAlgo)))
+ }
+
+ return
+}
+
+// SignUserId computes a signature from priv, asserting that pub is a valid
+// key for the identity id. On success, the signature is stored in sig. Call
+// Serialize to write it out.
+// If config is nil, sensible defaults will be used.
+func (sig *Signature) SignUserId(id string, pub *PublicKey, priv *PrivateKey, config *Config) error {
+ h, err := userIdSignatureHash(id, pub, sig.Hash)
+ if err != nil {
+ return nil
+ }
+ return sig.Sign(h, priv, config)
+}
+
+// SignKey computes a signature from priv, asserting that pub is a subkey. On
+// success, the signature is stored in sig. Call Serialize to write it out.
+// If config is nil, sensible defaults will be used.
+func (sig *Signature) SignKey(pub *PublicKey, priv *PrivateKey, config *Config) error {
+ h, err := keySignatureHash(&priv.PublicKey, pub, sig.Hash)
+ if err != nil {
+ return err
+ }
+ return sig.Sign(h, priv, config)
+}
+
+// Serialize marshals sig to w. Sign, SignUserId or SignKey must have been
+// called first.
+func (sig *Signature) Serialize(w io.Writer) (err error) {
+ if len(sig.outSubpackets) == 0 {
+ sig.outSubpackets = sig.rawSubpackets
+ }
+ if sig.RSASignature.bytes == nil && sig.DSASigR.bytes == nil && sig.ECDSASigR.bytes == nil {
+ return errors.InvalidArgumentError("Signature: need to call Sign, SignUserId or SignKey before Serialize")
+ }
+
+ sigLength := 0
+ switch sig.PubKeyAlgo {
+ case PubKeyAlgoRSA, PubKeyAlgoRSASignOnly:
+ sigLength = 2 + len(sig.RSASignature.bytes)
+ case PubKeyAlgoDSA:
+ sigLength = 2 + len(sig.DSASigR.bytes)
+ sigLength += 2 + len(sig.DSASigS.bytes)
+ case PubKeyAlgoECDSA:
+ sigLength = 2 + len(sig.ECDSASigR.bytes)
+ sigLength += 2 + len(sig.ECDSASigS.bytes)
+ default:
+ panic("impossible")
+ }
+
+ unhashedSubpacketsLen := subpacketsLength(sig.outSubpackets, false)
+ length := len(sig.HashSuffix) - 6 /* trailer not included */ +
+ 2 /* length of unhashed subpackets */ + unhashedSubpacketsLen +
+ 2 /* hash tag */ + sigLength
+ err = serializeHeader(w, packetTypeSignature, length)
+ if err != nil {
+ return
+ }
+
+ _, err = w.Write(sig.HashSuffix[:len(sig.HashSuffix)-6])
+ if err != nil {
+ return
+ }
+
+ unhashedSubpackets := make([]byte, 2+unhashedSubpacketsLen)
+ unhashedSubpackets[0] = byte(unhashedSubpacketsLen >> 8)
+ unhashedSubpackets[1] = byte(unhashedSubpacketsLen)
+ serializeSubpackets(unhashedSubpackets[2:], sig.outSubpackets, false)
+
+ _, err = w.Write(unhashedSubpackets)
+ if err != nil {
+ return
+ }
+ _, err = w.Write(sig.HashTag[:])
+ if err != nil {
+ return
+ }
+
+ switch sig.PubKeyAlgo {
+ case PubKeyAlgoRSA, PubKeyAlgoRSASignOnly:
+ err = writeMPIs(w, sig.RSASignature)
+ case PubKeyAlgoDSA:
+ err = writeMPIs(w, sig.DSASigR, sig.DSASigS)
+ case PubKeyAlgoECDSA:
+ err = writeMPIs(w, sig.ECDSASigR, sig.ECDSASigS)
+ default:
+ panic("impossible")
+ }
+ return
+}
+
+// outputSubpacket represents a subpacket to be marshaled.
+type outputSubpacket struct {
+ hashed bool // true if this subpacket is in the hashed area.
+ subpacketType signatureSubpacketType
+ isCritical bool
+ contents []byte
+}
+
+func (sig *Signature) buildSubpackets() (subpackets []outputSubpacket) {
+ creationTime := make([]byte, 4)
+ binary.BigEndian.PutUint32(creationTime, uint32(sig.CreationTime.Unix()))
+ subpackets = append(subpackets, outputSubpacket{true, creationTimeSubpacket, false, creationTime})
+
+ if sig.IssuerKeyId != nil {
+ keyId := make([]byte, 8)
+ binary.BigEndian.PutUint64(keyId, *sig.IssuerKeyId)
+ subpackets = append(subpackets, outputSubpacket{true, issuerSubpacket, false, keyId})
+ }
+
+ if sig.SigLifetimeSecs != nil && *sig.SigLifetimeSecs != 0 {
+ sigLifetime := make([]byte, 4)
+ binary.BigEndian.PutUint32(sigLifetime, *sig.SigLifetimeSecs)
+ subpackets = append(subpackets, outputSubpacket{true, signatureExpirationSubpacket, true, sigLifetime})
+ }
+
+ // Key flags may only appear in self-signatures or certification signatures.
+
+ if sig.FlagsValid {
+ var flags byte
+ if sig.FlagCertify {
+ flags |= KeyFlagCertify
+ }
+ if sig.FlagSign {
+ flags |= KeyFlagSign
+ }
+ if sig.FlagEncryptCommunications {
+ flags |= KeyFlagEncryptCommunications
+ }
+ if sig.FlagEncryptStorage {
+ flags |= KeyFlagEncryptStorage
+ }
+ subpackets = append(subpackets, outputSubpacket{true, keyFlagsSubpacket, false, []byte{flags}})
+ }
+
+ // The following subpackets may only appear in self-signatures
+
+ if sig.KeyLifetimeSecs != nil && *sig.KeyLifetimeSecs != 0 {
+ keyLifetime := make([]byte, 4)
+ binary.BigEndian.PutUint32(keyLifetime, *sig.KeyLifetimeSecs)
+ subpackets = append(subpackets, outputSubpacket{true, keyExpirationSubpacket, true, keyLifetime})
+ }
+
+ if sig.IsPrimaryId != nil && *sig.IsPrimaryId {
+ subpackets = append(subpackets, outputSubpacket{true, primaryUserIdSubpacket, false, []byte{1}})
+ }
+
+ if len(sig.PreferredSymmetric) > 0 {
+ subpackets = append(subpackets, outputSubpacket{true, prefSymmetricAlgosSubpacket, false, sig.PreferredSymmetric})
+ }
+
+ if len(sig.PreferredHash) > 0 {
+ subpackets = append(subpackets, outputSubpacket{true, prefHashAlgosSubpacket, false, sig.PreferredHash})
+ }
+
+ if len(sig.PreferredCompression) > 0 {
+ subpackets = append(subpackets, outputSubpacket{true, prefCompressionSubpacket, false, sig.PreferredCompression})
+ }
+
+ return
+}
diff --git a/src/mongo/gotools/vendor/src/golang.org/x/crypto/openpgp/packet/signature_test.go b/src/mongo/gotools/vendor/src/golang.org/x/crypto/openpgp/packet/signature_test.go
new file mode 100644
index 00000000000..c1bbde8b0c3
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/golang.org/x/crypto/openpgp/packet/signature_test.go
@@ -0,0 +1,42 @@
+// Copyright 2011 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package packet
+
+import (
+ "bytes"
+ "crypto"
+ "encoding/hex"
+ "testing"
+)
+
+func TestSignatureRead(t *testing.T) {
+ packet, err := Read(readerFromHex(signatureDataHex))
+ if err != nil {
+ t.Error(err)
+ return
+ }
+ sig, ok := packet.(*Signature)
+ if !ok || sig.SigType != SigTypeBinary || sig.PubKeyAlgo != PubKeyAlgoRSA || sig.Hash != crypto.SHA1 {
+ t.Errorf("failed to parse, got: %#v", packet)
+ }
+}
+
+func TestSignatureReserialize(t *testing.T) {
+ packet, _ := Read(readerFromHex(signatureDataHex))
+ sig := packet.(*Signature)
+ out := new(bytes.Buffer)
+ err := sig.Serialize(out)
+ if err != nil {
+ t.Errorf("error reserializing: %s", err)
+ return
+ }
+
+ expected, _ := hex.DecodeString(signatureDataHex)
+ if !bytes.Equal(expected, out.Bytes()) {
+ t.Errorf("output doesn't match input (got vs expected):\n%s\n%s", hex.Dump(out.Bytes()), hex.Dump(expected))
+ }
+}
+
+const signatureDataHex = "c2c05c04000102000605024cb45112000a0910ab105c91af38fb158f8d07ff5596ea368c5efe015bed6e78348c0f033c931d5f2ce5db54ce7f2a7e4b4ad64db758d65a7a71773edeab7ba2a9e0908e6a94a1175edd86c1d843279f045b021a6971a72702fcbd650efc393c5474d5b59a15f96d2eaad4c4c426797e0dcca2803ef41c6ff234d403eec38f31d610c344c06f2401c262f0993b2e66cad8a81ebc4322c723e0d4ba09fe917e8777658307ad8329adacba821420741009dfe87f007759f0982275d028a392c6ed983a0d846f890b36148c7358bdb8a516007fac760261ecd06076813831a36d0459075d1befa245ae7f7fb103d92ca759e9498fe60ef8078a39a3beda510deea251ea9f0a7f0df6ef42060f20780360686f3e400e"
diff --git a/src/mongo/gotools/vendor/src/golang.org/x/crypto/openpgp/packet/signature_v3.go b/src/mongo/gotools/vendor/src/golang.org/x/crypto/openpgp/packet/signature_v3.go
new file mode 100644
index 00000000000..6edff889349
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/golang.org/x/crypto/openpgp/packet/signature_v3.go
@@ -0,0 +1,146 @@
+// Copyright 2013 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package packet
+
+import (
+ "crypto"
+ "encoding/binary"
+ "fmt"
+ "io"
+ "strconv"
+ "time"
+
+ "golang.org/x/crypto/openpgp/errors"
+ "golang.org/x/crypto/openpgp/s2k"
+)
+
+// SignatureV3 represents older version 3 signatures. These signatures are less secure
+// than version 4 and should not be used to create new signatures. They are included
+// here for backwards compatibility to read and validate with older key material.
+// See RFC 4880, section 5.2.2.
+type SignatureV3 struct {
+ SigType SignatureType
+ CreationTime time.Time
+ IssuerKeyId uint64
+ PubKeyAlgo PublicKeyAlgorithm
+ Hash crypto.Hash
+ HashTag [2]byte
+
+ RSASignature parsedMPI
+ DSASigR, DSASigS parsedMPI
+}
+
+func (sig *SignatureV3) parse(r io.Reader) (err error) {
+ // RFC 4880, section 5.2.2
+ var buf [8]byte
+ if _, err = readFull(r, buf[:1]); err != nil {
+ return
+ }
+ if buf[0] < 2 || buf[0] > 3 {
+ err = errors.UnsupportedError("signature packet version " + strconv.Itoa(int(buf[0])))
+ return
+ }
+ if _, err = readFull(r, buf[:1]); err != nil {
+ return
+ }
+ if buf[0] != 5 {
+ err = errors.UnsupportedError(
+ "invalid hashed material length " + strconv.Itoa(int(buf[0])))
+ return
+ }
+
+ // Read hashed material: signature type + creation time
+ if _, err = readFull(r, buf[:5]); err != nil {
+ return
+ }
+ sig.SigType = SignatureType(buf[0])
+ t := binary.BigEndian.Uint32(buf[1:5])
+ sig.CreationTime = time.Unix(int64(t), 0)
+
+ // Eight-octet Key ID of signer.
+ if _, err = readFull(r, buf[:8]); err != nil {
+ return
+ }
+ sig.IssuerKeyId = binary.BigEndian.Uint64(buf[:])
+
+ // Public-key and hash algorithm
+ if _, err = readFull(r, buf[:2]); err != nil {
+ return
+ }
+ sig.PubKeyAlgo = PublicKeyAlgorithm(buf[0])
+ switch sig.PubKeyAlgo {
+ case PubKeyAlgoRSA, PubKeyAlgoRSASignOnly, PubKeyAlgoDSA:
+ default:
+ err = errors.UnsupportedError("public key algorithm " + strconv.Itoa(int(sig.PubKeyAlgo)))
+ return
+ }
+ var ok bool
+ if sig.Hash, ok = s2k.HashIdToHash(buf[1]); !ok {
+ return errors.UnsupportedError("hash function " + strconv.Itoa(int(buf[2])))
+ }
+
+ // Two-octet field holding left 16 bits of signed hash value.
+ if _, err = readFull(r, sig.HashTag[:2]); err != nil {
+ return
+ }
+
+ switch sig.PubKeyAlgo {
+ case PubKeyAlgoRSA, PubKeyAlgoRSASignOnly:
+ sig.RSASignature.bytes, sig.RSASignature.bitLength, err = readMPI(r)
+ case PubKeyAlgoDSA:
+ if sig.DSASigR.bytes, sig.DSASigR.bitLength, err = readMPI(r); err != nil {
+ return
+ }
+ sig.DSASigS.bytes, sig.DSASigS.bitLength, err = readMPI(r)
+ default:
+ panic("unreachable")
+ }
+ return
+}
+
+// Serialize marshals sig to w. Sign, SignUserId or SignKey must have been
+// called first.
+func (sig *SignatureV3) Serialize(w io.Writer) (err error) {
+ buf := make([]byte, 8)
+
+ // Write the sig type and creation time
+ buf[0] = byte(sig.SigType)
+ binary.BigEndian.PutUint32(buf[1:5], uint32(sig.CreationTime.Unix()))
+ if _, err = w.Write(buf[:5]); err != nil {
+ return
+ }
+
+ // Write the issuer long key ID
+ binary.BigEndian.PutUint64(buf[:8], sig.IssuerKeyId)
+ if _, err = w.Write(buf[:8]); err != nil {
+ return
+ }
+
+ // Write public key algorithm, hash ID, and hash value
+ buf[0] = byte(sig.PubKeyAlgo)
+ hashId, ok := s2k.HashToHashId(sig.Hash)
+ if !ok {
+ return errors.UnsupportedError(fmt.Sprintf("hash function %v", sig.Hash))
+ }
+ buf[1] = hashId
+ copy(buf[2:4], sig.HashTag[:])
+ if _, err = w.Write(buf[:4]); err != nil {
+ return
+ }
+
+ if sig.RSASignature.bytes == nil && sig.DSASigR.bytes == nil {
+ return errors.InvalidArgumentError("Signature: need to call Sign, SignUserId or SignKey before Serialize")
+ }
+
+ switch sig.PubKeyAlgo {
+ case PubKeyAlgoRSA, PubKeyAlgoRSASignOnly:
+ err = writeMPIs(w, sig.RSASignature)
+ case PubKeyAlgoDSA:
+ err = writeMPIs(w, sig.DSASigR, sig.DSASigS)
+ default:
+ panic("impossible")
+ }
+ return
+}
diff --git a/src/mongo/gotools/vendor/src/golang.org/x/crypto/openpgp/packet/signature_v3_test.go b/src/mongo/gotools/vendor/src/golang.org/x/crypto/openpgp/packet/signature_v3_test.go
new file mode 100644
index 00000000000..ad7b62ac193
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/golang.org/x/crypto/openpgp/packet/signature_v3_test.go
@@ -0,0 +1,92 @@
+// Copyright 2013 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package packet
+
+import (
+ "bytes"
+ "crypto"
+ "encoding/hex"
+ "io"
+ "io/ioutil"
+ "testing"
+
+ "golang.org/x/crypto/openpgp/armor"
+)
+
+func TestSignatureV3Read(t *testing.T) {
+ r := v3KeyReader(t)
+ Read(r) // Skip public key
+ Read(r) // Skip uid
+ packet, err := Read(r) // Signature
+ if err != nil {
+ t.Error(err)
+ return
+ }
+ sig, ok := packet.(*SignatureV3)
+ if !ok || sig.SigType != SigTypeGenericCert || sig.PubKeyAlgo != PubKeyAlgoRSA || sig.Hash != crypto.MD5 {
+ t.Errorf("failed to parse, got: %#v", packet)
+ }
+}
+
+func TestSignatureV3Reserialize(t *testing.T) {
+ r := v3KeyReader(t)
+ Read(r) // Skip public key
+ Read(r) // Skip uid
+ packet, err := Read(r)
+ if err != nil {
+ t.Error(err)
+ return
+ }
+ sig := packet.(*SignatureV3)
+ out := new(bytes.Buffer)
+ if err = sig.Serialize(out); err != nil {
+ t.Errorf("error reserializing: %s", err)
+ return
+ }
+ expected, err := ioutil.ReadAll(v3KeyReader(t))
+ if err != nil {
+ t.Error(err)
+ return
+ }
+ expected = expected[4+141+4+39:] // See pgpdump offsets below, this is where the sig starts
+ if !bytes.Equal(expected, out.Bytes()) {
+ t.Errorf("output doesn't match input (got vs expected):\n%s\n%s", hex.Dump(out.Bytes()), hex.Dump(expected))
+ }
+}
+
+func v3KeyReader(t *testing.T) io.Reader {
+ armorBlock, err := armor.Decode(bytes.NewBufferString(keySigV3Armor))
+ if err != nil {
+ t.Fatalf("armor Decode failed: %v", err)
+ }
+ return armorBlock.Body
+}
+
+// keySigV3Armor is some V3 public key I found in an SKS dump.
+// Old: Public Key Packet(tag 6)(141 bytes)
+// Ver 4 - new
+// Public key creation time - Fri Sep 16 17:13:54 CDT 1994
+// Pub alg - unknown(pub 0)
+// Unknown public key(pub 0)
+// Old: User ID Packet(tag 13)(39 bytes)
+// User ID - Armin M. Warda <warda@nephilim.ruhr.de>
+// Old: Signature Packet(tag 2)(149 bytes)
+// Ver 4 - new
+// Sig type - unknown(05)
+// Pub alg - ElGamal Encrypt-Only(pub 16)
+// Hash alg - unknown(hash 46)
+// Hashed Sub: unknown(sub 81, critical)(1988 bytes)
+const keySigV3Armor = `-----BEGIN PGP PUBLIC KEY BLOCK-----
+Version: SKS 1.0.10
+
+mI0CLnoYogAAAQQA1qwA2SuJwfQ5bCQ6u5t20ulnOtY0gykf7YjiK4LiVeRBwHjGq7v30tGV
+5Qti7qqRW4Ww7CDCJc4sZMFnystucR2vLkXaSoNWoFm4Fg47NiisDdhDezHwbVPW6OpCFNSi
+ZAamtj4QAUBu8j4LswafrJqZqR9336/V3g8Yil2l48kABRG0J0FybWluIE0uIFdhcmRhIDx3
+YXJkYUBuZXBoaWxpbS5ydWhyLmRlPoiVAgUQLok2xwXR6zmeWEiZAQE/DgP/WgxPQh40/Po4
+gSkWZCDAjNdph7zexvAb0CcUWahcwiBIgg3U5ErCx9I5CNVA9U+s8bNrDZwgSIeBzp3KhWUx
+524uhGgm6ZUTOAIKA6CbV6pfqoLpJnRYvXYQU5mIWsNa99wcu2qu18OeEDnztb7aLA6Ra9OF
+YFCbq4EjXRoOrYM=
+=LPjs
+-----END PGP PUBLIC KEY BLOCK-----`
diff --git a/src/mongo/gotools/vendor/src/golang.org/x/crypto/openpgp/packet/symmetric_key_encrypted.go b/src/mongo/gotools/vendor/src/golang.org/x/crypto/openpgp/packet/symmetric_key_encrypted.go
new file mode 100644
index 00000000000..4b1105b6f6b
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/golang.org/x/crypto/openpgp/packet/symmetric_key_encrypted.go
@@ -0,0 +1,155 @@
+// Copyright 2011 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package packet
+
+import (
+ "bytes"
+ "crypto/cipher"
+ "io"
+ "strconv"
+
+ "golang.org/x/crypto/openpgp/errors"
+ "golang.org/x/crypto/openpgp/s2k"
+)
+
+// This is the largest session key that we'll support. Since no 512-bit cipher
+// has even been seriously used, this is comfortably large.
+const maxSessionKeySizeInBytes = 64
+
+// SymmetricKeyEncrypted represents a passphrase protected session key. See RFC
+// 4880, section 5.3.
+type SymmetricKeyEncrypted struct {
+ CipherFunc CipherFunction
+ s2k func(out, in []byte)
+ encryptedKey []byte
+}
+
+const symmetricKeyEncryptedVersion = 4
+
+func (ske *SymmetricKeyEncrypted) parse(r io.Reader) error {
+ // RFC 4880, section 5.3.
+ var buf [2]byte
+ if _, err := readFull(r, buf[:]); err != nil {
+ return err
+ }
+ if buf[0] != symmetricKeyEncryptedVersion {
+ return errors.UnsupportedError("SymmetricKeyEncrypted version")
+ }
+ ske.CipherFunc = CipherFunction(buf[1])
+
+ if ske.CipherFunc.KeySize() == 0 {
+ return errors.UnsupportedError("unknown cipher: " + strconv.Itoa(int(buf[1])))
+ }
+
+ var err error
+ ske.s2k, err = s2k.Parse(r)
+ if err != nil {
+ return err
+ }
+
+ encryptedKey := make([]byte, maxSessionKeySizeInBytes)
+ // The session key may follow. We just have to try and read to find
+ // out. If it exists then we limit it to maxSessionKeySizeInBytes.
+ n, err := readFull(r, encryptedKey)
+ if err != nil && err != io.ErrUnexpectedEOF {
+ return err
+ }
+
+ if n != 0 {
+ if n == maxSessionKeySizeInBytes {
+ return errors.UnsupportedError("oversized encrypted session key")
+ }
+ ske.encryptedKey = encryptedKey[:n]
+ }
+
+ return nil
+}
+
+// Decrypt attempts to decrypt an encrypted session key and returns the key and
+// the cipher to use when decrypting a subsequent Symmetrically Encrypted Data
+// packet.
+func (ske *SymmetricKeyEncrypted) Decrypt(passphrase []byte) ([]byte, CipherFunction, error) {
+ key := make([]byte, ske.CipherFunc.KeySize())
+ ske.s2k(key, passphrase)
+
+ if len(ske.encryptedKey) == 0 {
+ return key, ske.CipherFunc, nil
+ }
+
+ // the IV is all zeros
+ iv := make([]byte, ske.CipherFunc.blockSize())
+ c := cipher.NewCFBDecrypter(ske.CipherFunc.new(key), iv)
+ plaintextKey := make([]byte, len(ske.encryptedKey))
+ c.XORKeyStream(plaintextKey, ske.encryptedKey)
+ cipherFunc := CipherFunction(plaintextKey[0])
+ if cipherFunc.blockSize() == 0 {
+ return nil, ske.CipherFunc, errors.UnsupportedError("unknown cipher: " + strconv.Itoa(int(cipherFunc)))
+ }
+ plaintextKey = plaintextKey[1:]
+ if l := len(plaintextKey); l == 0 || l%cipherFunc.blockSize() != 0 {
+ return nil, cipherFunc, errors.StructuralError("length of decrypted key not a multiple of block size")
+ }
+
+ return plaintextKey, cipherFunc, nil
+}
+
+// SerializeSymmetricKeyEncrypted serializes a symmetric key packet to w. The
+// packet contains a random session key, encrypted by a key derived from the
+// given passphrase. The session key is returned and must be passed to
+// SerializeSymmetricallyEncrypted.
+// If config is nil, sensible defaults will be used.
+func SerializeSymmetricKeyEncrypted(w io.Writer, passphrase []byte, config *Config) (key []byte, err error) {
+ cipherFunc := config.Cipher()
+ keySize := cipherFunc.KeySize()
+ if keySize == 0 {
+ return nil, errors.UnsupportedError("unknown cipher: " + strconv.Itoa(int(cipherFunc)))
+ }
+
+ s2kBuf := new(bytes.Buffer)
+ keyEncryptingKey := make([]byte, keySize)
+ // s2k.Serialize salts and stretches the passphrase, and writes the
+ // resulting key to keyEncryptingKey and the s2k descriptor to s2kBuf.
+ err = s2k.Serialize(s2kBuf, keyEncryptingKey, config.Random(), passphrase, &s2k.Config{Hash: config.Hash(), S2KCount: config.PasswordHashIterations()})
+ if err != nil {
+ return
+ }
+ s2kBytes := s2kBuf.Bytes()
+
+ packetLength := 2 /* header */ + len(s2kBytes) + 1 /* cipher type */ + keySize
+ err = serializeHeader(w, packetTypeSymmetricKeyEncrypted, packetLength)
+ if err != nil {
+ return
+ }
+
+ var buf [2]byte
+ buf[0] = symmetricKeyEncryptedVersion
+ buf[1] = byte(cipherFunc)
+ _, err = w.Write(buf[:])
+ if err != nil {
+ return
+ }
+ _, err = w.Write(s2kBytes)
+ if err != nil {
+ return
+ }
+
+ sessionKey := make([]byte, keySize)
+ _, err = io.ReadFull(config.Random(), sessionKey)
+ if err != nil {
+ return
+ }
+ iv := make([]byte, cipherFunc.blockSize())
+ c := cipher.NewCFBEncrypter(cipherFunc.new(keyEncryptingKey), iv)
+ encryptedCipherAndKey := make([]byte, keySize+1)
+ c.XORKeyStream(encryptedCipherAndKey, buf[1:])
+ c.XORKeyStream(encryptedCipherAndKey[1:], sessionKey)
+ _, err = w.Write(encryptedCipherAndKey)
+ if err != nil {
+ return
+ }
+
+ key = sessionKey
+ return
+}
diff --git a/src/mongo/gotools/vendor/src/golang.org/x/crypto/openpgp/packet/symmetric_key_encrypted_test.go b/src/mongo/gotools/vendor/src/golang.org/x/crypto/openpgp/packet/symmetric_key_encrypted_test.go
new file mode 100644
index 00000000000..19538df77c9
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/golang.org/x/crypto/openpgp/packet/symmetric_key_encrypted_test.go
@@ -0,0 +1,103 @@
+// Copyright 2011 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package packet
+
+import (
+ "bytes"
+ "encoding/hex"
+ "io"
+ "io/ioutil"
+ "testing"
+)
+
+func TestSymmetricKeyEncrypted(t *testing.T) {
+ buf := readerFromHex(symmetricallyEncryptedHex)
+ packet, err := Read(buf)
+ if err != nil {
+ t.Errorf("failed to read SymmetricKeyEncrypted: %s", err)
+ return
+ }
+ ske, ok := packet.(*SymmetricKeyEncrypted)
+ if !ok {
+ t.Error("didn't find SymmetricKeyEncrypted packet")
+ return
+ }
+ key, cipherFunc, err := ske.Decrypt([]byte("password"))
+ if err != nil {
+ t.Error(err)
+ return
+ }
+
+ packet, err = Read(buf)
+ if err != nil {
+ t.Errorf("failed to read SymmetricallyEncrypted: %s", err)
+ return
+ }
+ se, ok := packet.(*SymmetricallyEncrypted)
+ if !ok {
+ t.Error("didn't find SymmetricallyEncrypted packet")
+ return
+ }
+ r, err := se.Decrypt(cipherFunc, key)
+ if err != nil {
+ t.Error(err)
+ return
+ }
+
+ contents, err := ioutil.ReadAll(r)
+ if err != nil && err != io.EOF {
+ t.Error(err)
+ return
+ }
+
+ expectedContents, _ := hex.DecodeString(symmetricallyEncryptedContentsHex)
+ if !bytes.Equal(expectedContents, contents) {
+ t.Errorf("bad contents got:%x want:%x", contents, expectedContents)
+ }
+}
+
+const symmetricallyEncryptedHex = "8c0d04030302371a0b38d884f02060c91cf97c9973b8e58e028e9501708ccfe618fb92afef7fa2d80ddadd93cf"
+const symmetricallyEncryptedContentsHex = "cb1062004d14c4df636f6e74656e74732e0a"
+
+func TestSerializeSymmetricKeyEncrypted(t *testing.T) {
+ buf := bytes.NewBuffer(nil)
+ passphrase := []byte("testing")
+ const cipherFunc = CipherAES128
+ config := &Config{
+ DefaultCipher: cipherFunc,
+ }
+
+ key, err := SerializeSymmetricKeyEncrypted(buf, passphrase, config)
+ if err != nil {
+ t.Errorf("failed to serialize: %s", err)
+ return
+ }
+
+ p, err := Read(buf)
+ if err != nil {
+ t.Errorf("failed to reparse: %s", err)
+ return
+ }
+ ske, ok := p.(*SymmetricKeyEncrypted)
+ if !ok {
+ t.Errorf("parsed a different packet type: %#v", p)
+ return
+ }
+
+ if ske.CipherFunc != config.DefaultCipher {
+ t.Errorf("SKE cipher function is %d (expected %d)", ske.CipherFunc, config.DefaultCipher)
+ }
+ parsedKey, parsedCipherFunc, err := ske.Decrypt(passphrase)
+ if err != nil {
+ t.Errorf("failed to decrypt reparsed SKE: %s", err)
+ return
+ }
+ if !bytes.Equal(key, parsedKey) {
+ t.Errorf("keys don't match after Decrypt: %x (original) vs %x (parsed)", key, parsedKey)
+ }
+ if parsedCipherFunc != cipherFunc {
+ t.Errorf("cipher function doesn't match after Decrypt: %d (original) vs %d (parsed)", cipherFunc, parsedCipherFunc)
+ }
+}
diff --git a/src/mongo/gotools/vendor/src/golang.org/x/crypto/openpgp/packet/symmetrically_encrypted.go b/src/mongo/gotools/vendor/src/golang.org/x/crypto/openpgp/packet/symmetrically_encrypted.go
new file mode 100644
index 00000000000..6126030eb90
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/golang.org/x/crypto/openpgp/packet/symmetrically_encrypted.go
@@ -0,0 +1,290 @@
+// Copyright 2011 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package packet
+
+import (
+ "crypto/cipher"
+ "crypto/sha1"
+ "crypto/subtle"
+ "golang.org/x/crypto/openpgp/errors"
+ "hash"
+ "io"
+ "strconv"
+)
+
+// SymmetricallyEncrypted represents a symmetrically encrypted byte string. The
+// encrypted contents will consist of more OpenPGP packets. See RFC 4880,
+// sections 5.7 and 5.13.
+type SymmetricallyEncrypted struct {
+ MDC bool // true iff this is a type 18 packet and thus has an embedded MAC.
+ contents io.Reader
+ prefix []byte
+}
+
+const symmetricallyEncryptedVersion = 1
+
+func (se *SymmetricallyEncrypted) parse(r io.Reader) error {
+ if se.MDC {
+ // See RFC 4880, section 5.13.
+ var buf [1]byte
+ _, err := readFull(r, buf[:])
+ if err != nil {
+ return err
+ }
+ if buf[0] != symmetricallyEncryptedVersion {
+ return errors.UnsupportedError("unknown SymmetricallyEncrypted version")
+ }
+ }
+ se.contents = r
+ return nil
+}
+
+// Decrypt returns a ReadCloser, from which the decrypted contents of the
+// packet can be read. An incorrect key can, with high probability, be detected
+// immediately and this will result in a KeyIncorrect error being returned.
+func (se *SymmetricallyEncrypted) Decrypt(c CipherFunction, key []byte) (io.ReadCloser, error) {
+ keySize := c.KeySize()
+ if keySize == 0 {
+ return nil, errors.UnsupportedError("unknown cipher: " + strconv.Itoa(int(c)))
+ }
+ if len(key) != keySize {
+ return nil, errors.InvalidArgumentError("SymmetricallyEncrypted: incorrect key length")
+ }
+
+ if se.prefix == nil {
+ se.prefix = make([]byte, c.blockSize()+2)
+ _, err := readFull(se.contents, se.prefix)
+ if err != nil {
+ return nil, err
+ }
+ } else if len(se.prefix) != c.blockSize()+2 {
+ return nil, errors.InvalidArgumentError("can't try ciphers with different block lengths")
+ }
+
+ ocfbResync := OCFBResync
+ if se.MDC {
+ // MDC packets use a different form of OCFB mode.
+ ocfbResync = OCFBNoResync
+ }
+
+ s := NewOCFBDecrypter(c.new(key), se.prefix, ocfbResync)
+ if s == nil {
+ return nil, errors.ErrKeyIncorrect
+ }
+
+ plaintext := cipher.StreamReader{S: s, R: se.contents}
+
+ if se.MDC {
+ // MDC packets have an embedded hash that we need to check.
+ h := sha1.New()
+ h.Write(se.prefix)
+ return &seMDCReader{in: plaintext, h: h}, nil
+ }
+
+ // Otherwise, we just need to wrap plaintext so that it's a valid ReadCloser.
+ return seReader{plaintext}, nil
+}
+
+// seReader wraps an io.Reader with a no-op Close method.
+type seReader struct {
+ in io.Reader
+}
+
+func (ser seReader) Read(buf []byte) (int, error) {
+ return ser.in.Read(buf)
+}
+
+func (ser seReader) Close() error {
+ return nil
+}
+
+const mdcTrailerSize = 1 /* tag byte */ + 1 /* length byte */ + sha1.Size
+
+// An seMDCReader wraps an io.Reader, maintains a running hash and keeps hold
+// of the most recent 22 bytes (mdcTrailerSize). Upon EOF, those bytes form an
+// MDC packet containing a hash of the previous contents which is checked
+// against the running hash. See RFC 4880, section 5.13.
+type seMDCReader struct {
+ in io.Reader
+ h hash.Hash
+ trailer [mdcTrailerSize]byte
+ scratch [mdcTrailerSize]byte
+ trailerUsed int
+ error bool
+ eof bool
+}
+
+func (ser *seMDCReader) Read(buf []byte) (n int, err error) {
+ if ser.error {
+ err = io.ErrUnexpectedEOF
+ return
+ }
+ if ser.eof {
+ err = io.EOF
+ return
+ }
+
+ // If we haven't yet filled the trailer buffer then we must do that
+ // first.
+ for ser.trailerUsed < mdcTrailerSize {
+ n, err = ser.in.Read(ser.trailer[ser.trailerUsed:])
+ ser.trailerUsed += n
+ if err == io.EOF {
+ if ser.trailerUsed != mdcTrailerSize {
+ n = 0
+ err = io.ErrUnexpectedEOF
+ ser.error = true
+ return
+ }
+ ser.eof = true
+ n = 0
+ return
+ }
+
+ if err != nil {
+ n = 0
+ return
+ }
+ }
+
+ // If it's a short read then we read into a temporary buffer and shift
+ // the data into the caller's buffer.
+ if len(buf) <= mdcTrailerSize {
+ n, err = readFull(ser.in, ser.scratch[:len(buf)])
+ copy(buf, ser.trailer[:n])
+ ser.h.Write(buf[:n])
+ copy(ser.trailer[:], ser.trailer[n:])
+ copy(ser.trailer[mdcTrailerSize-n:], ser.scratch[:])
+ if n < len(buf) {
+ ser.eof = true
+ err = io.EOF
+ }
+ return
+ }
+
+ n, err = ser.in.Read(buf[mdcTrailerSize:])
+ copy(buf, ser.trailer[:])
+ ser.h.Write(buf[:n])
+ copy(ser.trailer[:], buf[n:])
+
+ if err == io.EOF {
+ ser.eof = true
+ }
+ return
+}
+
+// This is a new-format packet tag byte for a type 19 (MDC) packet.
+const mdcPacketTagByte = byte(0x80) | 0x40 | 19
+
+func (ser *seMDCReader) Close() error {
+ if ser.error {
+ return errors.SignatureError("error during reading")
+ }
+
+ for !ser.eof {
+ // We haven't seen EOF so we need to read to the end
+ var buf [1024]byte
+ _, err := ser.Read(buf[:])
+ if err == io.EOF {
+ break
+ }
+ if err != nil {
+ return errors.SignatureError("error during reading")
+ }
+ }
+
+ if ser.trailer[0] != mdcPacketTagByte || ser.trailer[1] != sha1.Size {
+ return errors.SignatureError("MDC packet not found")
+ }
+ ser.h.Write(ser.trailer[:2])
+
+ final := ser.h.Sum(nil)
+ if subtle.ConstantTimeCompare(final, ser.trailer[2:]) != 1 {
+ return errors.SignatureError("hash mismatch")
+ }
+ return nil
+}
+
+// An seMDCWriter writes through to an io.WriteCloser while maintains a running
+// hash of the data written. On close, it emits an MDC packet containing the
+// running hash.
+type seMDCWriter struct {
+ w io.WriteCloser
+ h hash.Hash
+}
+
+func (w *seMDCWriter) Write(buf []byte) (n int, err error) {
+ w.h.Write(buf)
+ return w.w.Write(buf)
+}
+
+func (w *seMDCWriter) Close() (err error) {
+ var buf [mdcTrailerSize]byte
+
+ buf[0] = mdcPacketTagByte
+ buf[1] = sha1.Size
+ w.h.Write(buf[:2])
+ digest := w.h.Sum(nil)
+ copy(buf[2:], digest)
+
+ _, err = w.w.Write(buf[:])
+ if err != nil {
+ return
+ }
+ return w.w.Close()
+}
+
+// noOpCloser is like an ioutil.NopCloser, but for an io.Writer.
+type noOpCloser struct {
+ w io.Writer
+}
+
+func (c noOpCloser) Write(data []byte) (n int, err error) {
+ return c.w.Write(data)
+}
+
+func (c noOpCloser) Close() error {
+ return nil
+}
+
+// SerializeSymmetricallyEncrypted serializes a symmetrically encrypted packet
+// to w and returns a WriteCloser to which the to-be-encrypted packets can be
+// written.
+// If config is nil, sensible defaults will be used.
+func SerializeSymmetricallyEncrypted(w io.Writer, c CipherFunction, key []byte, config *Config) (contents io.WriteCloser, err error) {
+ if c.KeySize() != len(key) {
+ return nil, errors.InvalidArgumentError("SymmetricallyEncrypted.Serialize: bad key length")
+ }
+ writeCloser := noOpCloser{w}
+ ciphertext, err := serializeStreamHeader(writeCloser, packetTypeSymmetricallyEncryptedMDC)
+ if err != nil {
+ return
+ }
+
+ _, err = ciphertext.Write([]byte{symmetricallyEncryptedVersion})
+ if err != nil {
+ return
+ }
+
+ block := c.new(key)
+ blockSize := block.BlockSize()
+ iv := make([]byte, blockSize)
+ _, err = config.Random().Read(iv)
+ if err != nil {
+ return
+ }
+ s, prefix := NewOCFBEncrypter(block, iv, OCFBNoResync)
+ _, err = ciphertext.Write(prefix)
+ if err != nil {
+ return
+ }
+ plaintext := cipher.StreamWriter{S: s, W: ciphertext}
+
+ h := sha1.New()
+ h.Write(iv)
+ h.Write(iv[blockSize-2:])
+ contents = &seMDCWriter{w: plaintext, h: h}
+ return
+}
diff --git a/src/mongo/gotools/vendor/src/golang.org/x/crypto/openpgp/packet/symmetrically_encrypted_test.go b/src/mongo/gotools/vendor/src/golang.org/x/crypto/openpgp/packet/symmetrically_encrypted_test.go
new file mode 100644
index 00000000000..c5c00f7b9c3
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/golang.org/x/crypto/openpgp/packet/symmetrically_encrypted_test.go
@@ -0,0 +1,123 @@
+// Copyright 2011 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package packet
+
+import (
+ "bytes"
+ "crypto/sha1"
+ "encoding/hex"
+ "golang.org/x/crypto/openpgp/errors"
+ "io"
+ "io/ioutil"
+ "testing"
+)
+
+// TestReader wraps a []byte and returns reads of a specific length.
+type testReader struct {
+ data []byte
+ stride int
+}
+
+func (t *testReader) Read(buf []byte) (n int, err error) {
+ n = t.stride
+ if n > len(t.data) {
+ n = len(t.data)
+ }
+ if n > len(buf) {
+ n = len(buf)
+ }
+ copy(buf, t.data)
+ t.data = t.data[n:]
+ if len(t.data) == 0 {
+ err = io.EOF
+ }
+ return
+}
+
+func testMDCReader(t *testing.T) {
+ mdcPlaintext, _ := hex.DecodeString(mdcPlaintextHex)
+
+ for stride := 1; stride < len(mdcPlaintext)/2; stride++ {
+ r := &testReader{data: mdcPlaintext, stride: stride}
+ mdcReader := &seMDCReader{in: r, h: sha1.New()}
+ body, err := ioutil.ReadAll(mdcReader)
+ if err != nil {
+ t.Errorf("stride: %d, error: %s", stride, err)
+ continue
+ }
+ if !bytes.Equal(body, mdcPlaintext[:len(mdcPlaintext)-22]) {
+ t.Errorf("stride: %d: bad contents %x", stride, body)
+ continue
+ }
+
+ err = mdcReader.Close()
+ if err != nil {
+ t.Errorf("stride: %d, error on Close: %s", stride, err)
+ }
+ }
+
+ mdcPlaintext[15] ^= 80
+
+ r := &testReader{data: mdcPlaintext, stride: 2}
+ mdcReader := &seMDCReader{in: r, h: sha1.New()}
+ _, err := ioutil.ReadAll(mdcReader)
+ if err != nil {
+ t.Errorf("corruption test, error: %s", err)
+ return
+ }
+ err = mdcReader.Close()
+ if err == nil {
+ t.Error("corruption: no error")
+ } else if _, ok := err.(*errors.SignatureError); !ok {
+ t.Errorf("corruption: expected SignatureError, got: %s", err)
+ }
+}
+
+const mdcPlaintextHex = "a302789c3b2d93c4e0eb9aba22283539b3203335af44a134afb800c849cb4c4de10200aff40b45d31432c80cb384299a0655966d6939dfdeed1dddf980"
+
+func TestSerialize(t *testing.T) {
+ buf := bytes.NewBuffer(nil)
+ c := CipherAES128
+ key := make([]byte, c.KeySize())
+
+ w, err := SerializeSymmetricallyEncrypted(buf, c, key, nil)
+ if err != nil {
+ t.Errorf("error from SerializeSymmetricallyEncrypted: %s", err)
+ return
+ }
+
+ contents := []byte("hello world\n")
+
+ w.Write(contents)
+ w.Close()
+
+ p, err := Read(buf)
+ if err != nil {
+ t.Errorf("error from Read: %s", err)
+ return
+ }
+
+ se, ok := p.(*SymmetricallyEncrypted)
+ if !ok {
+ t.Errorf("didn't read a *SymmetricallyEncrypted")
+ return
+ }
+
+ r, err := se.Decrypt(c, key)
+ if err != nil {
+ t.Errorf("error from Decrypt: %s", err)
+ return
+ }
+
+ contentsCopy := bytes.NewBuffer(nil)
+ _, err = io.Copy(contentsCopy, r)
+ if err != nil {
+ t.Errorf("error from io.Copy: %s", err)
+ return
+ }
+ if !bytes.Equal(contentsCopy.Bytes(), contents) {
+ t.Errorf("contents not equal got: %x want: %x", contentsCopy.Bytes(), contents)
+ }
+}
diff --git a/src/mongo/gotools/vendor/src/golang.org/x/crypto/openpgp/packet/userattribute.go b/src/mongo/gotools/vendor/src/golang.org/x/crypto/openpgp/packet/userattribute.go
new file mode 100644
index 00000000000..96a2b382a1d
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/golang.org/x/crypto/openpgp/packet/userattribute.go
@@ -0,0 +1,91 @@
+// Copyright 2013 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package packet
+
+import (
+ "bytes"
+ "image"
+ "image/jpeg"
+ "io"
+ "io/ioutil"
+)
+
+const UserAttrImageSubpacket = 1
+
+// UserAttribute is capable of storing other types of data about a user
+// beyond name, email and a text comment. In practice, user attributes are typically used
+// to store a signed thumbnail photo JPEG image of the user.
+// See RFC 4880, section 5.12.
+type UserAttribute struct {
+ Contents []*OpaqueSubpacket
+}
+
+// NewUserAttributePhoto creates a user attribute packet
+// containing the given images.
+func NewUserAttributePhoto(photos ...image.Image) (uat *UserAttribute, err error) {
+ uat = new(UserAttribute)
+ for _, photo := range photos {
+ var buf bytes.Buffer
+ // RFC 4880, Section 5.12.1.
+ data := []byte{
+ 0x10, 0x00, // Little-endian image header length (16 bytes)
+ 0x01, // Image header version 1
+ 0x01, // JPEG
+ 0, 0, 0, 0, // 12 reserved octets, must be all zero.
+ 0, 0, 0, 0,
+ 0, 0, 0, 0}
+ if _, err = buf.Write(data); err != nil {
+ return
+ }
+ if err = jpeg.Encode(&buf, photo, nil); err != nil {
+ return
+ }
+ uat.Contents = append(uat.Contents, &OpaqueSubpacket{
+ SubType: UserAttrImageSubpacket,
+ Contents: buf.Bytes()})
+ }
+ return
+}
+
+// NewUserAttribute creates a new user attribute packet containing the given subpackets.
+func NewUserAttribute(contents ...*OpaqueSubpacket) *UserAttribute {
+ return &UserAttribute{Contents: contents}
+}
+
+func (uat *UserAttribute) parse(r io.Reader) (err error) {
+ // RFC 4880, section 5.13
+ b, err := ioutil.ReadAll(r)
+ if err != nil {
+ return
+ }
+ uat.Contents, err = OpaqueSubpackets(b)
+ return
+}
+
+// Serialize marshals the user attribute to w in the form of an OpenPGP packet, including
+// header.
+func (uat *UserAttribute) Serialize(w io.Writer) (err error) {
+ var buf bytes.Buffer
+ for _, sp := range uat.Contents {
+ sp.Serialize(&buf)
+ }
+ if err = serializeHeader(w, packetTypeUserAttribute, buf.Len()); err != nil {
+ return err
+ }
+ _, err = w.Write(buf.Bytes())
+ return
+}
+
+// ImageData returns zero or more byte slices, each containing
+// JPEG File Interchange Format (JFIF), for each photo in the
+// the user attribute packet.
+func (uat *UserAttribute) ImageData() (imageData [][]byte) {
+ for _, sp := range uat.Contents {
+ if sp.SubType == UserAttrImageSubpacket && len(sp.Contents) > 16 {
+ imageData = append(imageData, sp.Contents[16:])
+ }
+ }
+ return
+}
diff --git a/src/mongo/gotools/vendor/src/golang.org/x/crypto/openpgp/packet/userattribute_test.go b/src/mongo/gotools/vendor/src/golang.org/x/crypto/openpgp/packet/userattribute_test.go
new file mode 100644
index 00000000000..13ca5143cee
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/golang.org/x/crypto/openpgp/packet/userattribute_test.go
@@ -0,0 +1,109 @@
+// Copyright 2011 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package packet
+
+import (
+ "bytes"
+ "encoding/base64"
+ "image/color"
+ "image/jpeg"
+ "testing"
+)
+
+func TestParseUserAttribute(t *testing.T) {
+ r := base64.NewDecoder(base64.StdEncoding, bytes.NewBufferString(userAttributePacket))
+ for i := 0; i < 2; i++ {
+ p, err := Read(r)
+ if err != nil {
+ t.Fatal(err)
+ }
+ uat := p.(*UserAttribute)
+ imgs := uat.ImageData()
+ if len(imgs) != 1 {
+ t.Errorf("Unexpected number of images in user attribute packet: %d", len(imgs))
+ }
+ if len(imgs[0]) != 3395 {
+ t.Errorf("Unexpected JPEG image size: %d", len(imgs[0]))
+ }
+ img, err := jpeg.Decode(bytes.NewBuffer(imgs[0]))
+ if err != nil {
+ t.Errorf("Error decoding JPEG image: %v", err)
+ }
+ // A pixel in my right eye.
+ pixel := color.NRGBAModel.Convert(img.At(56, 36))
+ ref := color.NRGBA{R: 157, G: 128, B: 124, A: 255}
+ if pixel != ref {
+ t.Errorf("Unexpected pixel color: %v", pixel)
+ }
+ w := bytes.NewBuffer(nil)
+ err = uat.Serialize(w)
+ if err != nil {
+ t.Errorf("Error writing user attribute: %v", err)
+ }
+ r = bytes.NewBuffer(w.Bytes())
+ }
+}
+
+const userAttributePacket = `
+0cyWzJQBEAABAQAAAAAAAAAAAAAAAP/Y/+AAEEpGSUYAAQIAAAEAAQAA/9sAQwAFAwQEBAMFBAQE
+BQUFBgcMCAcHBwcPCgsJDBEPEhIRDxEQExYcFxMUGhUQERghGBocHR8fHxMXIiQiHiQcHh8e/9sA
+QwEFBQUHBgcOCAgOHhQRFB4eHh4eHh4eHh4eHh4eHh4eHh4eHh4eHh4eHh4eHh4eHh4eHh4eHh4e
+Hh4eHh4eHh4e/8AAEQgAZABkAwEiAAIRAQMRAf/EAB8AAAEFAQEBAQEBAAAAAAAAAAABAgMEBQYH
+CAkKC//EALUQAAIBAwMCBAMFBQQEAAABfQECAwAEEQUSITFBBhNRYQcicRQygZGhCCNCscEVUtHw
+JDNicoIJChYXGBkaJSYnKCkqNDU2Nzg5OkNERUZHSElKU1RVVldYWVpjZGVmZ2hpanN0dXZ3eHl6
+g4SFhoeIiYqSk5SVlpeYmZqio6Slpqeoqaqys7S1tre4ubrCw8TFxsfIycrS09TV1tfY2drh4uPk
+5ebn6Onq8fLz9PX29/j5+v/EAB8BAAMBAQEBAQEBAQEAAAAAAAABAgMEBQYHCAkKC//EALURAAIB
+AgQEAwQHBQQEAAECdwABAgMRBAUhMQYSQVEHYXETIjKBCBRCkaGxwQkjM1LwFWJy0QoWJDThJfEX
+GBkaJicoKSo1Njc4OTpDREVGR0hJSlNUVVZXWFlaY2RlZmdoaWpzdHV2d3h5eoKDhIWGh4iJipKT
+lJWWl5iZmqKjpKWmp6ipqrKztLW2t7i5usLDxMXGx8jJytLT1NXW19jZ2uLj5OXm5+jp6vLz9PX2
+9/j5+v/aAAwDAQACEQMRAD8A5uGP06VehQ4pIox04q5EnHSvAep+hIIl4zVuMHGPWmRrUWtalaaN
+pU2oXsgSGJSxPr6ClvoitErs0Itqjc7BQOpPAFYmrfEnwjojtHNqaXEynBjtx5hH4jj9a8B8d+Od
+W8UXZjWR4LJT+7t0Jwfc+prnIdO1CWZEW2mZ3HyDactXXDB3V5s8evm1namj6r0H4weCLtxG+ova
+ueP30RA/MV6not1bX0Ed1ZzxzwyDKvGwZSPqK+Ff+ES8R8t/ZV2oHUmM10Hgbxp4m8BatEfNnWBH
+/eWshOxx9Kmpg4te49RUM1kn+8Wh9zQ4P1FaMC7l465rjPh14y0fxnoseoaXOpfaPOgJ+eI98j09
+67W19M15bi4uzPSqTU480WXkjZkAyAR61DPE6OCSOalWRRgZxjvTb598sfU4FBwx5uY4T4feIm8P
+TeJbAgc65NIM+8cX+FFeLfF3Vr3SfiNrMFrMypJMJcDPUqP8KK+kpVFyLU+ar037SXqX4hxVpMY7
+1UhPpVlT2rybKx9smWYz3NeH/EDVLzxt40j8O6bITaQybPlbKkjq39K9O8fasdH8IahfKxWQRFIy
+Ou9uB/OuE/Z/0y3j1d9TuyoZCMs5xjuea1pLli5nn46q240l13PcfhN8EvDNtpcEl/CklyVBLuMk
+mvU/Dfwo0BL/AO13FjEDD/qyV7Vn+CvGPg8zRpJrVm8ikLtEg6+1ew2dxZ3EQaJgysuQPasH7eXW
+1zzsbVhT92kk/PsYieEND+zlPs6c/wCyAPyryH4wfCPRtW0u6j+xRLOxLxSoADkDpXY+MPjJ4c0S
+9k082d3O8ZKkxw5XI96ytK+IGk+IpFjRpod+Qq3C7QT6A1E6NenaXbqRg6rlLlqS0fRnxjpd1r/w
+w8afa7GWRPKbZLGeBKmeVNfZngLxNaeKfDdprVjxHcLlkJ5Vh1H5185/tDad9h8XOsqAw3Cb0cjq
+CfX61P8AsveKf7L8T3fhe5nxa3g324YniQdh9R/KuivTdSmp9TXB1/Z1nRlsfU249QBx1pWfcwI7
+Cq6u2Ovamb9rYz16V5x7Psz5q/aJhZfibcupIElvE3H+7j+lFbXx9szP45jlUfeso8/99OKK9elL
+3EeNVopzZVharCtxVRGGMk02S5JyFOB69zWTieypnL/GksfB+0cr9oQt69awPhPpD69Y3Ky3DWth
+CWluGU4LAdq3vibGs/g68BJygVxjrwRW5+ztoRv/AAs8EeCZnO/J/hzz/Kumi4wp3kePjlOdZKPY
+ml8Mvo6WM9ppi7J0EkQYMzkb1X0wW+bJHGACa+ivg14huZPCkjXUO6SImIYOQAP6UQ2sGneHmiWF
+CYoSAAuM8etXfhBpMr+EZ3SSNRcMx6ZxWdes6ytBGSwkMNFuo7pnP614Ut9Zn1C4uLySKcwObGFA
+Qnm4+XcR71h+CfDHiKCQWuv2YWFtw+bBZQD8rcE8n2Ney+GbGGQSM6I7xvtI681rXdp8hKRRp6t3
+FYPE1VDlsY1nQjWdl+J8w/tOeDZZ/AMd/EGefTHyxxyYjwfyODXg3waRh8UtEcFh+8Jb8FNfZPxh
+Ak8J6nbPIsiyW7LnseK+Ofh99ptPHFnf2lu0y2twGcKuSEPB/Q1WHk50miq1o14TXU+xop+On61H
+NMC6Nis1LgsAcUTSt1APFcXJZn0EqmhyvxA037friTYziBV6f7Tf40Vr3k4aXLx5OMZIzRXZB2ik
+efJXbPHJJcnaD9aN2R1qoGO8/WkuLlIV+YjdjpXSonQ5lTxfiTwzqCnkeQxx9BWx+zPrQsrBFYja
+zEfrXL6lfie3khcjY6lSPUGud+G3iA6FrY0uQ/KJsA9gCa0jSvFpnBi6tpKSPu++nsIfDFxeXciR
+qIicscY4rxTwB8RUkn1axsPEf2LTYx85kTGzqCUP8VcJ47+JOs+I0Hhq1njjt/ufIeSvq1VtE+Gs
+eoaUbSHUrkHdu3WtuX5Ix81XRh7OL5jirVpV5Whdn0F8C/iX4auVn0i612T7bASoe8wjTAd89K9g
+vtSt5NMa4t5lkRhgOh3Dn6V8aaz8KZrIR3OlQ6r56LySmSxxz06Vo/CHx34h0rxBP4XvJ5AjK2RP
+nEbAEj6ZxjPrWM6fMmoswqJxqJ1VZnqHxn1NLPwveqWHmNC2BnnNcD8DfDkGi+CH1m+ijN1qMzNA
+4GSIiAMf+hVxPxU8Tapc3c0F9MGCn5GU5BX0Pau3+HmrT3XgXSIJCBHDGdgAx1NYSpezha52Yauq
+1dya2Wh2onAIwTj1p0lxxWWLkhRyCKWa5O3ORXOos9KVQluZm83j0oqi84JyWH50Vdmc7ep43d3I
+t1Z2Iz2FYdxeSTsxyRnvTdVuDNcNluM9KrKcg817NOnZGNbEXdkNckjrXGeIIprPxFFdRHAlIwem
+COtdmxrG8Q2cd/ZNExw45RvQ1bVjim+dWNzw7eaTD4mN3dndCQCo6hmI5zXpj/Ea/wBHjkh0kwRW
+xXEfl4yTxXzXZalJDL9nuWKMmRnHcV2Hh3WreCyYXW2SWQhd5P3F6n+lS43d2cTm6d7Ox9EWPxH1
+ODQxPqWpCaSU/ukUc4z3/WvKW8UhviAdaMewYZG98gj9c1ymoa8LyWOJHwkTDaVPb0qpr+q2m6Nb
+cfvNo349az9mou9iZVXNWbub3jm98/Vza2ReV7lsJg/e3dsV654UR9N0K0sZP9ZDGFbHr3rzL4P+
+H7rXfEEWr3I3W1qf3IYdW9fwqDxf4k8UeH/G95p08kscHmk25dPlZT0we9YTj7SXKjpw1aNG8mj3
+FLv5ccU959ycnmvKPDnxB82YQarGsZPAlTp+IrvIr1ZIgySKwIyCOhFYTpyg9T0qWIhVV4svzPvf
+IdhgY4orPachj81FRdmtzxqdiZmJ9aQEgdqZcPtmbJ71DJcAZ5r20kkeXJtsfPIQDwPzrG1a+S3i
+LyHAHvmp7y7HOD1rlNdm+1T7Acovf3o+J2RMpezjzMvrob67pX9o2ShZlYgg/wAWKxZLLWLZ/Ke3
+mVh14yK9M+BMC3dre2ko3LHKCB7EV7EngeGQJdQ7HyBkMKS0djgq1W3c+XtK03U522RwzsTwNiEk
+ntXoHgf4calql9El/G8UZbLfLyfr7V9FeGvh+s+0Lbxxcglu2K1NW1nwN4Gk/wBLuI57tV5jjwzE
+/QVNS+0dWYRqNvXRFv4eeCodKsY1ggVIY1G3K4z714h+1Jqul3GpwaXYeXJLbzgyyrg4b+6D+HNb
+vjz436zq9m+naHF/ZdkeGfOZXH17V4Vqt2b29K+ZuOc5bnce5zWdPBShL2lTfojSeJhy+zp/NjVz
+1Bwa6DSfFGq6fbJFDKrov8DjPFcu97ZxsUe4jVhwVJ5Bpp1mwQiLewJPXacVq6fNpYyjOUXdHoKf
+EG8VQHsInbuVcgflRXnt5fIs2FYHgcgUVi8LG+xusdW/mN7U2KgEVkTzPt60UVfQ9eHxGHrV1MGi
+iD4V25x1qvdgLAMd6KK0pbHm4x++dp8FtUubLxJ5EIjMc+A4Za+qfD8pe1JZVOBmiinW3RyRPMfi
+R8QPE638+k2l6LK0Hylbddhb6nOa80mlkcmWR2kcnlnOSaKK7qCXKcNdu5narcSrAoBxvODWJIga
+VckjDdqKKwq/EaQ0gUdbjQ6mr7QGBUcd6tPBC6gtGpOOuKKKie5qn7qIpEXd0HSiiimSf//Z`
diff --git a/src/mongo/gotools/vendor/src/golang.org/x/crypto/openpgp/packet/userid.go b/src/mongo/gotools/vendor/src/golang.org/x/crypto/openpgp/packet/userid.go
new file mode 100644
index 00000000000..d6bea7d4acc
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/golang.org/x/crypto/openpgp/packet/userid.go
@@ -0,0 +1,160 @@
+// Copyright 2011 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package packet
+
+import (
+ "io"
+ "io/ioutil"
+ "strings"
+)
+
+// UserId contains text that is intended to represent the name and email
+// address of the key holder. See RFC 4880, section 5.11. By convention, this
+// takes the form "Full Name (Comment) <email@example.com>"
+type UserId struct {
+ Id string // By convention, this takes the form "Full Name (Comment) <email@example.com>" which is split out in the fields below.
+
+ Name, Comment, Email string
+}
+
+func hasInvalidCharacters(s string) bool {
+ for _, c := range s {
+ switch c {
+ case '(', ')', '<', '>', 0:
+ return true
+ }
+ }
+ return false
+}
+
+// NewUserId returns a UserId or nil if any of the arguments contain invalid
+// characters. The invalid characters are '\x00', '(', ')', '<' and '>'
+func NewUserId(name, comment, email string) *UserId {
+ // RFC 4880 doesn't deal with the structure of userid strings; the
+ // name, comment and email form is just a convention. However, there's
+ // no convention about escaping the metacharacters and GPG just refuses
+ // to create user ids where, say, the name contains a '('. We mirror
+ // this behaviour.
+
+ if hasInvalidCharacters(name) || hasInvalidCharacters(comment) || hasInvalidCharacters(email) {
+ return nil
+ }
+
+ uid := new(UserId)
+ uid.Name, uid.Comment, uid.Email = name, comment, email
+ uid.Id = name
+ if len(comment) > 0 {
+ if len(uid.Id) > 0 {
+ uid.Id += " "
+ }
+ uid.Id += "("
+ uid.Id += comment
+ uid.Id += ")"
+ }
+ if len(email) > 0 {
+ if len(uid.Id) > 0 {
+ uid.Id += " "
+ }
+ uid.Id += "<"
+ uid.Id += email
+ uid.Id += ">"
+ }
+ return uid
+}
+
+func (uid *UserId) parse(r io.Reader) (err error) {
+ // RFC 4880, section 5.11
+ b, err := ioutil.ReadAll(r)
+ if err != nil {
+ return
+ }
+ uid.Id = string(b)
+ uid.Name, uid.Comment, uid.Email = parseUserId(uid.Id)
+ return
+}
+
+// Serialize marshals uid to w in the form of an OpenPGP packet, including
+// header.
+func (uid *UserId) Serialize(w io.Writer) error {
+ err := serializeHeader(w, packetTypeUserId, len(uid.Id))
+ if err != nil {
+ return err
+ }
+ _, err = w.Write([]byte(uid.Id))
+ return err
+}
+
+// parseUserId extracts the name, comment and email from a user id string that
+// is formatted as "Full Name (Comment) <email@example.com>".
+func parseUserId(id string) (name, comment, email string) {
+ var n, c, e struct {
+ start, end int
+ }
+ var state int
+
+ for offset, rune := range id {
+ switch state {
+ case 0:
+ // Entering name
+ n.start = offset
+ state = 1
+ fallthrough
+ case 1:
+ // In name
+ if rune == '(' {
+ state = 2
+ n.end = offset
+ } else if rune == '<' {
+ state = 5
+ n.end = offset
+ }
+ case 2:
+ // Entering comment
+ c.start = offset
+ state = 3
+ fallthrough
+ case 3:
+ // In comment
+ if rune == ')' {
+ state = 4
+ c.end = offset
+ }
+ case 4:
+ // Between comment and email
+ if rune == '<' {
+ state = 5
+ }
+ case 5:
+ // Entering email
+ e.start = offset
+ state = 6
+ fallthrough
+ case 6:
+ // In email
+ if rune == '>' {
+ state = 7
+ e.end = offset
+ }
+ default:
+ // After email
+ }
+ }
+ switch state {
+ case 1:
+ // ended in the name
+ n.end = len(id)
+ case 3:
+ // ended in comment
+ c.end = len(id)
+ case 6:
+ // ended in email
+ e.end = len(id)
+ }
+
+ name = strings.TrimSpace(id[n.start:n.end])
+ comment = strings.TrimSpace(id[c.start:c.end])
+ email = strings.TrimSpace(id[e.start:e.end])
+ return
+}
diff --git a/src/mongo/gotools/vendor/src/golang.org/x/crypto/openpgp/packet/userid_test.go b/src/mongo/gotools/vendor/src/golang.org/x/crypto/openpgp/packet/userid_test.go
new file mode 100644
index 00000000000..29681938938
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/golang.org/x/crypto/openpgp/packet/userid_test.go
@@ -0,0 +1,87 @@
+// Copyright 2011 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package packet
+
+import (
+ "testing"
+)
+
+var userIdTests = []struct {
+ id string
+ name, comment, email string
+}{
+ {"", "", "", ""},
+ {"John Smith", "John Smith", "", ""},
+ {"John Smith ()", "John Smith", "", ""},
+ {"John Smith () <>", "John Smith", "", ""},
+ {"(comment", "", "comment", ""},
+ {"(comment)", "", "comment", ""},
+ {"<email", "", "", "email"},
+ {"<email> sdfk", "", "", "email"},
+ {" John Smith ( Comment ) asdkflj < email > lksdfj", "John Smith", "Comment", "email"},
+ {" John Smith < email > lksdfj", "John Smith", "", "email"},
+ {"(<foo", "", "<foo", ""},
+ {"René Descartes (العربي)", "René Descartes", "العربي", ""},
+}
+
+func TestParseUserId(t *testing.T) {
+ for i, test := range userIdTests {
+ name, comment, email := parseUserId(test.id)
+ if name != test.name {
+ t.Errorf("%d: name mismatch got:%s want:%s", i, name, test.name)
+ }
+ if comment != test.comment {
+ t.Errorf("%d: comment mismatch got:%s want:%s", i, comment, test.comment)
+ }
+ if email != test.email {
+ t.Errorf("%d: email mismatch got:%s want:%s", i, email, test.email)
+ }
+ }
+}
+
+var newUserIdTests = []struct {
+ name, comment, email, id string
+}{
+ {"foo", "", "", "foo"},
+ {"", "bar", "", "(bar)"},
+ {"", "", "baz", "<baz>"},
+ {"foo", "bar", "", "foo (bar)"},
+ {"foo", "", "baz", "foo <baz>"},
+ {"", "bar", "baz", "(bar) <baz>"},
+ {"foo", "bar", "baz", "foo (bar) <baz>"},
+}
+
+func TestNewUserId(t *testing.T) {
+ for i, test := range newUserIdTests {
+ uid := NewUserId(test.name, test.comment, test.email)
+ if uid == nil {
+ t.Errorf("#%d: returned nil", i)
+ continue
+ }
+ if uid.Id != test.id {
+ t.Errorf("#%d: got '%s', want '%s'", i, uid.Id, test.id)
+ }
+ }
+}
+
+var invalidNewUserIdTests = []struct {
+ name, comment, email string
+}{
+ {"foo(", "", ""},
+ {"foo<", "", ""},
+ {"", "bar)", ""},
+ {"", "bar<", ""},
+ {"", "", "baz>"},
+ {"", "", "baz)"},
+ {"", "", "baz\x00"},
+}
+
+func TestNewUserIdWithInvalidInput(t *testing.T) {
+ for i, test := range invalidNewUserIdTests {
+ if uid := NewUserId(test.name, test.comment, test.email); uid != nil {
+ t.Errorf("#%d: returned non-nil value: %#v", i, uid)
+ }
+ }
+}
diff --git a/src/mongo/gotools/vendor/src/golang.org/x/crypto/openpgp/read.go b/src/mongo/gotools/vendor/src/golang.org/x/crypto/openpgp/read.go
new file mode 100644
index 00000000000..dfffc398d5b
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/golang.org/x/crypto/openpgp/read.go
@@ -0,0 +1,439 @@
+// Copyright 2011 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// Package openpgp implements high level operations on OpenPGP messages.
+package openpgp // import "golang.org/x/crypto/openpgp"
+
+import (
+ "crypto"
+ _ "crypto/sha256"
+ "hash"
+ "io"
+ "strconv"
+
+ "golang.org/x/crypto/openpgp/armor"
+ "golang.org/x/crypto/openpgp/errors"
+ "golang.org/x/crypto/openpgp/packet"
+)
+
+// SignatureType is the armor type for a PGP signature.
+var SignatureType = "PGP SIGNATURE"
+
+// readArmored reads an armored block with the given type.
+func readArmored(r io.Reader, expectedType string) (body io.Reader, err error) {
+ block, err := armor.Decode(r)
+ if err != nil {
+ return
+ }
+
+ if block.Type != expectedType {
+ return nil, errors.InvalidArgumentError("expected '" + expectedType + "', got: " + block.Type)
+ }
+
+ return block.Body, nil
+}
+
+// MessageDetails contains the result of parsing an OpenPGP encrypted and/or
+// signed message.
+type MessageDetails struct {
+ IsEncrypted bool // true if the message was encrypted.
+ EncryptedToKeyIds []uint64 // the list of recipient key ids.
+ IsSymmetricallyEncrypted bool // true if a passphrase could have decrypted the message.
+ DecryptedWith Key // the private key used to decrypt the message, if any.
+ IsSigned bool // true if the message is signed.
+ SignedByKeyId uint64 // the key id of the signer, if any.
+ SignedBy *Key // the key of the signer, if available.
+ LiteralData *packet.LiteralData // the metadata of the contents
+ UnverifiedBody io.Reader // the contents of the message.
+
+ // If IsSigned is true and SignedBy is non-zero then the signature will
+ // be verified as UnverifiedBody is read. The signature cannot be
+ // checked until the whole of UnverifiedBody is read so UnverifiedBody
+ // must be consumed until EOF before the data can trusted. Even if a
+ // message isn't signed (or the signer is unknown) the data may contain
+ // an authentication code that is only checked once UnverifiedBody has
+ // been consumed. Once EOF has been seen, the following fields are
+ // valid. (An authentication code failure is reported as a
+ // SignatureError error when reading from UnverifiedBody.)
+ SignatureError error // nil if the signature is good.
+ Signature *packet.Signature // the signature packet itself.
+
+ decrypted io.ReadCloser
+}
+
+// A PromptFunction is used as a callback by functions that may need to decrypt
+// a private key, or prompt for a passphrase. It is called with a list of
+// acceptable, encrypted private keys and a boolean that indicates whether a
+// passphrase is usable. It should either decrypt a private key or return a
+// passphrase to try. If the decrypted private key or given passphrase isn't
+// correct, the function will be called again, forever. Any error returned will
+// be passed up.
+type PromptFunction func(keys []Key, symmetric bool) ([]byte, error)
+
+// A keyEnvelopePair is used to store a private key with the envelope that
+// contains a symmetric key, encrypted with that key.
+type keyEnvelopePair struct {
+ key Key
+ encryptedKey *packet.EncryptedKey
+}
+
+// ReadMessage parses an OpenPGP message that may be signed and/or encrypted.
+// The given KeyRing should contain both public keys (for signature
+// verification) and, possibly encrypted, private keys for decrypting.
+// If config is nil, sensible defaults will be used.
+func ReadMessage(r io.Reader, keyring KeyRing, prompt PromptFunction, config *packet.Config) (md *MessageDetails, err error) {
+ var p packet.Packet
+
+ var symKeys []*packet.SymmetricKeyEncrypted
+ var pubKeys []keyEnvelopePair
+ var se *packet.SymmetricallyEncrypted
+
+ packets := packet.NewReader(r)
+ md = new(MessageDetails)
+ md.IsEncrypted = true
+
+ // The message, if encrypted, starts with a number of packets
+ // containing an encrypted decryption key. The decryption key is either
+ // encrypted to a public key, or with a passphrase. This loop
+ // collects these packets.
+ParsePackets:
+ for {
+ p, err = packets.Next()
+ if err != nil {
+ return nil, err
+ }
+ switch p := p.(type) {
+ case *packet.SymmetricKeyEncrypted:
+ // This packet contains the decryption key encrypted with a passphrase.
+ md.IsSymmetricallyEncrypted = true
+ symKeys = append(symKeys, p)
+ case *packet.EncryptedKey:
+ // This packet contains the decryption key encrypted to a public key.
+ md.EncryptedToKeyIds = append(md.EncryptedToKeyIds, p.KeyId)
+ switch p.Algo {
+ case packet.PubKeyAlgoRSA, packet.PubKeyAlgoRSAEncryptOnly, packet.PubKeyAlgoElGamal:
+ break
+ default:
+ continue
+ }
+ var keys []Key
+ if p.KeyId == 0 {
+ keys = keyring.DecryptionKeys()
+ } else {
+ keys = keyring.KeysById(p.KeyId)
+ }
+ for _, k := range keys {
+ pubKeys = append(pubKeys, keyEnvelopePair{k, p})
+ }
+ case *packet.SymmetricallyEncrypted:
+ se = p
+ break ParsePackets
+ case *packet.Compressed, *packet.LiteralData, *packet.OnePassSignature:
+ // This message isn't encrypted.
+ if len(symKeys) != 0 || len(pubKeys) != 0 {
+ return nil, errors.StructuralError("key material not followed by encrypted message")
+ }
+ packets.Unread(p)
+ return readSignedMessage(packets, nil, keyring)
+ }
+ }
+
+ var candidates []Key
+ var decrypted io.ReadCloser
+
+ // Now that we have the list of encrypted keys we need to decrypt at
+ // least one of them or, if we cannot, we need to call the prompt
+ // function so that it can decrypt a key or give us a passphrase.
+FindKey:
+ for {
+ // See if any of the keys already have a private key available
+ candidates = candidates[:0]
+ candidateFingerprints := make(map[string]bool)
+
+ for _, pk := range pubKeys {
+ if pk.key.PrivateKey == nil {
+ continue
+ }
+ if !pk.key.PrivateKey.Encrypted {
+ if len(pk.encryptedKey.Key) == 0 {
+ pk.encryptedKey.Decrypt(pk.key.PrivateKey, config)
+ }
+ if len(pk.encryptedKey.Key) == 0 {
+ continue
+ }
+ decrypted, err = se.Decrypt(pk.encryptedKey.CipherFunc, pk.encryptedKey.Key)
+ if err != nil && err != errors.ErrKeyIncorrect {
+ return nil, err
+ }
+ if decrypted != nil {
+ md.DecryptedWith = pk.key
+ break FindKey
+ }
+ } else {
+ fpr := string(pk.key.PublicKey.Fingerprint[:])
+ if v := candidateFingerprints[fpr]; v {
+ continue
+ }
+ candidates = append(candidates, pk.key)
+ candidateFingerprints[fpr] = true
+ }
+ }
+
+ if len(candidates) == 0 && len(symKeys) == 0 {
+ return nil, errors.ErrKeyIncorrect
+ }
+
+ if prompt == nil {
+ return nil, errors.ErrKeyIncorrect
+ }
+
+ passphrase, err := prompt(candidates, len(symKeys) != 0)
+ if err != nil {
+ return nil, err
+ }
+
+ // Try the symmetric passphrase first
+ if len(symKeys) != 0 && passphrase != nil {
+ for _, s := range symKeys {
+ key, cipherFunc, err := s.Decrypt(passphrase)
+ if err == nil {
+ decrypted, err = se.Decrypt(cipherFunc, key)
+ if err != nil && err != errors.ErrKeyIncorrect {
+ return nil, err
+ }
+ if decrypted != nil {
+ break FindKey
+ }
+ }
+
+ }
+ }
+ }
+
+ md.decrypted = decrypted
+ if err := packets.Push(decrypted); err != nil {
+ return nil, err
+ }
+ return readSignedMessage(packets, md, keyring)
+}
+
+// readSignedMessage reads a possibly signed message if mdin is non-zero then
+// that structure is updated and returned. Otherwise a fresh MessageDetails is
+// used.
+func readSignedMessage(packets *packet.Reader, mdin *MessageDetails, keyring KeyRing) (md *MessageDetails, err error) {
+ if mdin == nil {
+ mdin = new(MessageDetails)
+ }
+ md = mdin
+
+ var p packet.Packet
+ var h hash.Hash
+ var wrappedHash hash.Hash
+FindLiteralData:
+ for {
+ p, err = packets.Next()
+ if err != nil {
+ return nil, err
+ }
+ switch p := p.(type) {
+ case *packet.Compressed:
+ if err := packets.Push(p.Body); err != nil {
+ return nil, err
+ }
+ case *packet.OnePassSignature:
+ if !p.IsLast {
+ return nil, errors.UnsupportedError("nested signatures")
+ }
+
+ h, wrappedHash, err = hashForSignature(p.Hash, p.SigType)
+ if err != nil {
+ md = nil
+ return
+ }
+
+ md.IsSigned = true
+ md.SignedByKeyId = p.KeyId
+ keys := keyring.KeysByIdUsage(p.KeyId, packet.KeyFlagSign)
+ if len(keys) > 0 {
+ md.SignedBy = &keys[0]
+ }
+ case *packet.LiteralData:
+ md.LiteralData = p
+ break FindLiteralData
+ }
+ }
+
+ if md.SignedBy != nil {
+ md.UnverifiedBody = &signatureCheckReader{packets, h, wrappedHash, md}
+ } else if md.decrypted != nil {
+ md.UnverifiedBody = checkReader{md}
+ } else {
+ md.UnverifiedBody = md.LiteralData.Body
+ }
+
+ return md, nil
+}
+
+// hashForSignature returns a pair of hashes that can be used to verify a
+// signature. The signature may specify that the contents of the signed message
+// should be preprocessed (i.e. to normalize line endings). Thus this function
+// returns two hashes. The second should be used to hash the message itself and
+// performs any needed preprocessing.
+func hashForSignature(hashId crypto.Hash, sigType packet.SignatureType) (hash.Hash, hash.Hash, error) {
+ if !hashId.Available() {
+ return nil, nil, errors.UnsupportedError("hash not available: " + strconv.Itoa(int(hashId)))
+ }
+ h := hashId.New()
+
+ switch sigType {
+ case packet.SigTypeBinary:
+ return h, h, nil
+ case packet.SigTypeText:
+ return h, NewCanonicalTextHash(h), nil
+ }
+
+ return nil, nil, errors.UnsupportedError("unsupported signature type: " + strconv.Itoa(int(sigType)))
+}
+
+// checkReader wraps an io.Reader from a LiteralData packet. When it sees EOF
+// it closes the ReadCloser from any SymmetricallyEncrypted packet to trigger
+// MDC checks.
+type checkReader struct {
+ md *MessageDetails
+}
+
+func (cr checkReader) Read(buf []byte) (n int, err error) {
+ n, err = cr.md.LiteralData.Body.Read(buf)
+ if err == io.EOF {
+ mdcErr := cr.md.decrypted.Close()
+ if mdcErr != nil {
+ err = mdcErr
+ }
+ }
+ return
+}
+
+// signatureCheckReader wraps an io.Reader from a LiteralData packet and hashes
+// the data as it is read. When it sees an EOF from the underlying io.Reader
+// it parses and checks a trailing Signature packet and triggers any MDC checks.
+type signatureCheckReader struct {
+ packets *packet.Reader
+ h, wrappedHash hash.Hash
+ md *MessageDetails
+}
+
+func (scr *signatureCheckReader) Read(buf []byte) (n int, err error) {
+ n, err = scr.md.LiteralData.Body.Read(buf)
+ scr.wrappedHash.Write(buf[:n])
+ if err == io.EOF {
+ var p packet.Packet
+ p, scr.md.SignatureError = scr.packets.Next()
+ if scr.md.SignatureError != nil {
+ return
+ }
+
+ var ok bool
+ if scr.md.Signature, ok = p.(*packet.Signature); !ok {
+ scr.md.SignatureError = errors.StructuralError("LiteralData not followed by Signature")
+ return
+ }
+
+ scr.md.SignatureError = scr.md.SignedBy.PublicKey.VerifySignature(scr.h, scr.md.Signature)
+
+ // The SymmetricallyEncrypted packet, if any, might have an
+ // unsigned hash of its own. In order to check this we need to
+ // close that Reader.
+ if scr.md.decrypted != nil {
+ mdcErr := scr.md.decrypted.Close()
+ if mdcErr != nil {
+ err = mdcErr
+ }
+ }
+ }
+ return
+}
+
+// CheckDetachedSignature takes a signed file and a detached signature and
+// returns the signer if the signature is valid. If the signer isn't known,
+// ErrUnknownIssuer is returned.
+func CheckDetachedSignature(keyring KeyRing, signed, signature io.Reader) (signer *Entity, err error) {
+ var issuerKeyId uint64
+ var hashFunc crypto.Hash
+ var sigType packet.SignatureType
+ var keys []Key
+ var p packet.Packet
+
+ packets := packet.NewReader(signature)
+ for {
+ p, err = packets.Next()
+ if err == io.EOF {
+ return nil, errors.ErrUnknownIssuer
+ }
+ if err != nil {
+ return nil, err
+ }
+
+ switch sig := p.(type) {
+ case *packet.Signature:
+ if sig.IssuerKeyId == nil {
+ return nil, errors.StructuralError("signature doesn't have an issuer")
+ }
+ issuerKeyId = *sig.IssuerKeyId
+ hashFunc = sig.Hash
+ sigType = sig.SigType
+ case *packet.SignatureV3:
+ issuerKeyId = sig.IssuerKeyId
+ hashFunc = sig.Hash
+ sigType = sig.SigType
+ default:
+ return nil, errors.StructuralError("non signature packet found")
+ }
+
+ keys = keyring.KeysByIdUsage(issuerKeyId, packet.KeyFlagSign)
+ if len(keys) > 0 {
+ break
+ }
+ }
+
+ if len(keys) == 0 {
+ panic("unreachable")
+ }
+
+ h, wrappedHash, err := hashForSignature(hashFunc, sigType)
+ if err != nil {
+ return nil, err
+ }
+
+ if _, err := io.Copy(wrappedHash, signed); err != nil && err != io.EOF {
+ return nil, err
+ }
+
+ for _, key := range keys {
+ switch sig := p.(type) {
+ case *packet.Signature:
+ err = key.PublicKey.VerifySignature(h, sig)
+ case *packet.SignatureV3:
+ err = key.PublicKey.VerifySignatureV3(h, sig)
+ default:
+ panic("unreachable")
+ }
+
+ if err == nil {
+ return key.Entity, nil
+ }
+ }
+
+ return nil, err
+}
+
+// CheckArmoredDetachedSignature performs the same actions as
+// CheckDetachedSignature but expects the signature to be armored.
+func CheckArmoredDetachedSignature(keyring KeyRing, signed, signature io.Reader) (signer *Entity, err error) {
+ body, err := readArmored(signature, SignatureType)
+ if err != nil {
+ return
+ }
+
+ return CheckDetachedSignature(keyring, signed, body)
+}
diff --git a/src/mongo/gotools/vendor/src/golang.org/x/crypto/openpgp/read_test.go b/src/mongo/gotools/vendor/src/golang.org/x/crypto/openpgp/read_test.go
new file mode 100644
index 00000000000..7524a02e56d
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/golang.org/x/crypto/openpgp/read_test.go
@@ -0,0 +1,512 @@
+// Copyright 2011 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package openpgp
+
+import (
+ "bytes"
+ _ "crypto/sha512"
+ "encoding/hex"
+ "io"
+ "io/ioutil"
+ "strings"
+ "testing"
+
+ "golang.org/x/crypto/openpgp/errors"
+)
+
+func readerFromHex(s string) io.Reader {
+ data, err := hex.DecodeString(s)
+ if err != nil {
+ panic("readerFromHex: bad input")
+ }
+ return bytes.NewBuffer(data)
+}
+
+func TestReadKeyRing(t *testing.T) {
+ kring, err := ReadKeyRing(readerFromHex(testKeys1And2Hex))
+ if err != nil {
+ t.Error(err)
+ return
+ }
+ if len(kring) != 2 || uint32(kring[0].PrimaryKey.KeyId) != 0xC20C31BB || uint32(kring[1].PrimaryKey.KeyId) != 0x1E35246B {
+ t.Errorf("bad keyring: %#v", kring)
+ }
+}
+
+func TestRereadKeyRing(t *testing.T) {
+ kring, err := ReadKeyRing(readerFromHex(testKeys1And2Hex))
+ if err != nil {
+ t.Errorf("error in initial parse: %s", err)
+ return
+ }
+ out := new(bytes.Buffer)
+ err = kring[0].Serialize(out)
+ if err != nil {
+ t.Errorf("error in serialization: %s", err)
+ return
+ }
+ kring, err = ReadKeyRing(out)
+ if err != nil {
+ t.Errorf("error in second parse: %s", err)
+ return
+ }
+
+ if len(kring) != 1 || uint32(kring[0].PrimaryKey.KeyId) != 0xC20C31BB {
+ t.Errorf("bad keyring: %#v", kring)
+ }
+}
+
+func TestReadPrivateKeyRing(t *testing.T) {
+ kring, err := ReadKeyRing(readerFromHex(testKeys1And2PrivateHex))
+ if err != nil {
+ t.Error(err)
+ return
+ }
+ if len(kring) != 2 || uint32(kring[0].PrimaryKey.KeyId) != 0xC20C31BB || uint32(kring[1].PrimaryKey.KeyId) != 0x1E35246B || kring[0].PrimaryKey == nil {
+ t.Errorf("bad keyring: %#v", kring)
+ }
+}
+
+func TestReadDSAKey(t *testing.T) {
+ kring, err := ReadKeyRing(readerFromHex(dsaTestKeyHex))
+ if err != nil {
+ t.Error(err)
+ return
+ }
+ if len(kring) != 1 || uint32(kring[0].PrimaryKey.KeyId) != 0x0CCC0360 {
+ t.Errorf("bad parse: %#v", kring)
+ }
+}
+
+func TestDSAHashTruncatation(t *testing.T) {
+ // dsaKeyWithSHA512 was generated with GnuPG and --cert-digest-algo
+ // SHA512 in order to require DSA hash truncation to verify correctly.
+ _, err := ReadKeyRing(readerFromHex(dsaKeyWithSHA512))
+ if err != nil {
+ t.Error(err)
+ }
+}
+
+func TestGetKeyById(t *testing.T) {
+ kring, _ := ReadKeyRing(readerFromHex(testKeys1And2Hex))
+
+ keys := kring.KeysById(0xa34d7e18c20c31bb)
+ if len(keys) != 1 || keys[0].Entity != kring[0] {
+ t.Errorf("bad result for 0xa34d7e18c20c31bb: %#v", keys)
+ }
+
+ keys = kring.KeysById(0xfd94408d4543314f)
+ if len(keys) != 1 || keys[0].Entity != kring[0] {
+ t.Errorf("bad result for 0xa34d7e18c20c31bb: %#v", keys)
+ }
+}
+
+func checkSignedMessage(t *testing.T, signedHex, expected string) {
+ kring, _ := ReadKeyRing(readerFromHex(testKeys1And2Hex))
+
+ md, err := ReadMessage(readerFromHex(signedHex), kring, nil, nil)
+ if err != nil {
+ t.Error(err)
+ return
+ }
+
+ if !md.IsSigned || md.SignedByKeyId != 0xa34d7e18c20c31bb || md.SignedBy == nil || md.IsEncrypted || md.IsSymmetricallyEncrypted || len(md.EncryptedToKeyIds) != 0 || md.IsSymmetricallyEncrypted {
+ t.Errorf("bad MessageDetails: %#v", md)
+ }
+
+ contents, err := ioutil.ReadAll(md.UnverifiedBody)
+ if err != nil {
+ t.Errorf("error reading UnverifiedBody: %s", err)
+ }
+ if string(contents) != expected {
+ t.Errorf("bad UnverifiedBody got:%s want:%s", string(contents), expected)
+ }
+ if md.SignatureError != nil || md.Signature == nil {
+ t.Errorf("failed to validate: %s", md.SignatureError)
+ }
+}
+
+func TestSignedMessage(t *testing.T) {
+ checkSignedMessage(t, signedMessageHex, signedInput)
+}
+
+func TestTextSignedMessage(t *testing.T) {
+ checkSignedMessage(t, signedTextMessageHex, signedTextInput)
+}
+
+// The reader should detect "compressed quines", which are compressed
+// packets that expand into themselves and cause an infinite recursive
+// parsing loop.
+// The packet in this test case comes from Taylor R. Campbell at
+// http://mumble.net/~campbell/misc/pgp-quine/
+func TestCampbellQuine(t *testing.T) {
+ md, err := ReadMessage(readerFromHex(campbellQuine), nil, nil, nil)
+ if md != nil {
+ t.Errorf("Reading a compressed quine should not return any data: %#v", md)
+ }
+ structural, ok := err.(errors.StructuralError)
+ if !ok {
+ t.Fatalf("Unexpected class of error: %T", err)
+ }
+ if !strings.Contains(string(structural), "too many layers of packets") {
+ t.Fatalf("Unexpected error: %s", err)
+ }
+}
+
+var signedEncryptedMessageTests = []struct {
+ keyRingHex string
+ messageHex string
+ signedByKeyId uint64
+ encryptedToKeyId uint64
+}{
+ {
+ testKeys1And2PrivateHex,
+ signedEncryptedMessageHex,
+ 0xa34d7e18c20c31bb,
+ 0x2a67d68660df41c7,
+ },
+ {
+ dsaElGamalTestKeysHex,
+ signedEncryptedMessage2Hex,
+ 0x33af447ccd759b09,
+ 0xcf6a7abcd43e3673,
+ },
+}
+
+func TestSignedEncryptedMessage(t *testing.T) {
+ for i, test := range signedEncryptedMessageTests {
+ expected := "Signed and encrypted message\n"
+ kring, _ := ReadKeyRing(readerFromHex(test.keyRingHex))
+ prompt := func(keys []Key, symmetric bool) ([]byte, error) {
+ if symmetric {
+ t.Errorf("prompt: message was marked as symmetrically encrypted")
+ return nil, errors.ErrKeyIncorrect
+ }
+
+ if len(keys) == 0 {
+ t.Error("prompt: no keys requested")
+ return nil, errors.ErrKeyIncorrect
+ }
+
+ err := keys[0].PrivateKey.Decrypt([]byte("passphrase"))
+ if err != nil {
+ t.Errorf("prompt: error decrypting key: %s", err)
+ return nil, errors.ErrKeyIncorrect
+ }
+
+ return nil, nil
+ }
+
+ md, err := ReadMessage(readerFromHex(test.messageHex), kring, prompt, nil)
+ if err != nil {
+ t.Errorf("#%d: error reading message: %s", i, err)
+ return
+ }
+
+ if !md.IsSigned || md.SignedByKeyId != test.signedByKeyId || md.SignedBy == nil || !md.IsEncrypted || md.IsSymmetricallyEncrypted || len(md.EncryptedToKeyIds) == 0 || md.EncryptedToKeyIds[0] != test.encryptedToKeyId {
+ t.Errorf("#%d: bad MessageDetails: %#v", i, md)
+ }
+
+ contents, err := ioutil.ReadAll(md.UnverifiedBody)
+ if err != nil {
+ t.Errorf("#%d: error reading UnverifiedBody: %s", i, err)
+ }
+ if string(contents) != expected {
+ t.Errorf("#%d: bad UnverifiedBody got:%s want:%s", i, string(contents), expected)
+ }
+
+ if md.SignatureError != nil || md.Signature == nil {
+ t.Errorf("#%d: failed to validate: %s", i, md.SignatureError)
+ }
+ }
+}
+
+func TestUnspecifiedRecipient(t *testing.T) {
+ expected := "Recipient unspecified\n"
+ kring, _ := ReadKeyRing(readerFromHex(testKeys1And2PrivateHex))
+
+ md, err := ReadMessage(readerFromHex(recipientUnspecifiedHex), kring, nil, nil)
+ if err != nil {
+ t.Errorf("error reading message: %s", err)
+ return
+ }
+
+ contents, err := ioutil.ReadAll(md.UnverifiedBody)
+ if err != nil {
+ t.Errorf("error reading UnverifiedBody: %s", err)
+ }
+ if string(contents) != expected {
+ t.Errorf("bad UnverifiedBody got:%s want:%s", string(contents), expected)
+ }
+}
+
+func TestSymmetricallyEncrypted(t *testing.T) {
+ firstTimeCalled := true
+
+ prompt := func(keys []Key, symmetric bool) ([]byte, error) {
+ if len(keys) != 0 {
+ t.Errorf("prompt: len(keys) = %d (want 0)", len(keys))
+ }
+
+ if !symmetric {
+ t.Errorf("symmetric is not set")
+ }
+
+ if firstTimeCalled {
+ firstTimeCalled = false
+ return []byte("wrongpassword"), nil
+ }
+
+ return []byte("password"), nil
+ }
+
+ md, err := ReadMessage(readerFromHex(symmetricallyEncryptedCompressedHex), nil, prompt, nil)
+ if err != nil {
+ t.Errorf("ReadMessage: %s", err)
+ return
+ }
+
+ contents, err := ioutil.ReadAll(md.UnverifiedBody)
+ if err != nil {
+ t.Errorf("ReadAll: %s", err)
+ }
+
+ expectedCreationTime := uint32(1295992998)
+ if md.LiteralData.Time != expectedCreationTime {
+ t.Errorf("LiteralData.Time is %d, want %d", md.LiteralData.Time, expectedCreationTime)
+ }
+
+ const expected = "Symmetrically encrypted.\n"
+ if string(contents) != expected {
+ t.Errorf("contents got: %s want: %s", string(contents), expected)
+ }
+}
+
+func testDetachedSignature(t *testing.T, kring KeyRing, signature io.Reader, sigInput, tag string, expectedSignerKeyId uint64) {
+ signed := bytes.NewBufferString(sigInput)
+ signer, err := CheckDetachedSignature(kring, signed, signature)
+ if err != nil {
+ t.Errorf("%s: signature error: %s", tag, err)
+ return
+ }
+ if signer == nil {
+ t.Errorf("%s: signer is nil", tag)
+ return
+ }
+ if signer.PrimaryKey.KeyId != expectedSignerKeyId {
+ t.Errorf("%s: wrong signer got:%x want:%x", tag, signer.PrimaryKey.KeyId, expectedSignerKeyId)
+ }
+}
+
+func TestDetachedSignature(t *testing.T) {
+ kring, _ := ReadKeyRing(readerFromHex(testKeys1And2Hex))
+ testDetachedSignature(t, kring, readerFromHex(detachedSignatureHex), signedInput, "binary", testKey1KeyId)
+ testDetachedSignature(t, kring, readerFromHex(detachedSignatureTextHex), signedInput, "text", testKey1KeyId)
+ testDetachedSignature(t, kring, readerFromHex(detachedSignatureV3TextHex), signedInput, "v3", testKey1KeyId)
+
+ incorrectSignedInput := signedInput + "X"
+ _, err := CheckDetachedSignature(kring, bytes.NewBufferString(incorrectSignedInput), readerFromHex(detachedSignatureHex))
+ if err == nil {
+ t.Fatal("CheckDetachedSignature returned without error for bad signature")
+ }
+ if err == errors.ErrUnknownIssuer {
+ t.Fatal("CheckDetachedSignature returned ErrUnknownIssuer when the signer was known, but the signature invalid")
+ }
+}
+
+func TestDetachedSignatureDSA(t *testing.T) {
+ kring, _ := ReadKeyRing(readerFromHex(dsaTestKeyHex))
+ testDetachedSignature(t, kring, readerFromHex(detachedSignatureDSAHex), signedInput, "binary", testKey3KeyId)
+}
+
+func TestMultipleSignaturePacketsDSA(t *testing.T) {
+ kring, _ := ReadKeyRing(readerFromHex(dsaTestKeyHex))
+ testDetachedSignature(t, kring, readerFromHex(missingHashFunctionHex+detachedSignatureDSAHex), signedInput, "binary", testKey3KeyId)
+}
+
+func testHashFunctionError(t *testing.T, signatureHex string) {
+ kring, _ := ReadKeyRing(readerFromHex(testKeys1And2Hex))
+ _, err := CheckDetachedSignature(kring, nil, readerFromHex(signatureHex))
+ if err == nil {
+ t.Fatal("Packet with bad hash type was correctly parsed")
+ }
+ unsupported, ok := err.(errors.UnsupportedError)
+ if !ok {
+ t.Fatalf("Unexpected class of error: %s", err)
+ }
+ if !strings.Contains(string(unsupported), "hash ") {
+ t.Fatalf("Unexpected error: %s", err)
+ }
+}
+
+func TestUnknownHashFunction(t *testing.T) {
+ // unknownHashFunctionHex contains a signature packet with hash
+ // function type 153 (which isn't a real hash function id).
+ testHashFunctionError(t, unknownHashFunctionHex)
+}
+
+func TestMissingHashFunction(t *testing.T) {
+ // missingHashFunctionHex contains a signature packet that uses
+ // RIPEMD160, which isn't compiled in. Since that's the only signature
+ // packet we don't find any suitable packets and end up with ErrUnknownIssuer
+ kring, _ := ReadKeyRing(readerFromHex(testKeys1And2Hex))
+ _, err := CheckDetachedSignature(kring, nil, readerFromHex(missingHashFunctionHex))
+ if err == nil {
+ t.Fatal("Packet with missing hash type was correctly parsed")
+ }
+ if err != errors.ErrUnknownIssuer {
+ t.Fatalf("Unexpected class of error: %s", err)
+ }
+}
+
+func TestReadingArmoredPrivateKey(t *testing.T) {
+ el, err := ReadArmoredKeyRing(bytes.NewBufferString(armoredPrivateKeyBlock))
+ if err != nil {
+ t.Error(err)
+ }
+ if len(el) != 1 {
+ t.Errorf("got %d entities, wanted 1\n", len(el))
+ }
+}
+
+func TestReadingArmoredPublicKey(t *testing.T) {
+ el, err := ReadArmoredKeyRing(bytes.NewBufferString(e2ePublicKey))
+ if err != nil {
+ t.Error(err)
+ }
+ if len(el) != 1 {
+ t.Errorf("didn't get a valid entity")
+ }
+}
+
+func TestNoArmoredData(t *testing.T) {
+ _, err := ReadArmoredKeyRing(bytes.NewBufferString("foo"))
+ if _, ok := err.(errors.InvalidArgumentError); !ok {
+ t.Errorf("error was not an InvalidArgumentError: %s", err)
+ }
+}
+
+func testReadMessageError(t *testing.T, messageHex string) {
+ buf, err := hex.DecodeString(messageHex)
+ if err != nil {
+ t.Errorf("hex.DecodeString(): %v", err)
+ }
+
+ kr, err := ReadKeyRing(new(bytes.Buffer))
+ if err != nil {
+ t.Errorf("ReadKeyring(): %v", err)
+ }
+
+ _, err = ReadMessage(bytes.NewBuffer(buf), kr,
+ func([]Key, bool) ([]byte, error) {
+ return []byte("insecure"), nil
+ }, nil)
+
+ if err == nil {
+ t.Errorf("ReadMessage(): Unexpected nil error")
+ }
+}
+
+func TestIssue11503(t *testing.T) {
+ testReadMessageError(t, "8c040402000aa430aa8228b9248b01fc899a91197130303030")
+}
+
+func TestIssue11504(t *testing.T) {
+ testReadMessageError(t, "9303000130303030303030303030983002303030303030030000000130")
+}
+
+const testKey1KeyId = 0xA34D7E18C20C31BB
+const testKey3KeyId = 0x338934250CCC0360
+
+const signedInput = "Signed message\nline 2\nline 3\n"
+const signedTextInput = "Signed message\r\nline 2\r\nline 3\r\n"
+
+const recipientUnspecifiedHex = "848c0300000000000000000103ff62d4d578d03cf40c3da998dfe216c074fa6ddec5e31c197c9666ba292830d91d18716a80f699f9d897389a90e6d62d0238f5f07a5248073c0f24920e4bc4a30c2d17ee4e0cae7c3d4aaa4e8dced50e3010a80ee692175fa0385f62ecca4b56ee6e9980aa3ec51b61b077096ac9e800edaf161268593eedb6cc7027ff5cb32745d250010d407a6221ae22ef18469b444f2822478c4d190b24d36371a95cb40087cdd42d9399c3d06a53c0673349bfb607927f20d1e122bde1e2bf3aa6cae6edf489629bcaa0689539ae3b718914d88ededc3b"
+
+const detachedSignatureHex = "889c04000102000605024d449cd1000a0910a34d7e18c20c31bb167603ff57718d09f28a519fdc7b5a68b6a3336da04df85e38c5cd5d5bd2092fa4629848a33d85b1729402a2aab39c3ac19f9d573f773cc62c264dc924c067a79dfd8a863ae06c7c8686120760749f5fd9b1e03a64d20a7df3446ddc8f0aeadeaeba7cbaee5c1e366d65b6a0c6cc749bcb912d2f15013f812795c2e29eb7f7b77f39ce77"
+
+const detachedSignatureTextHex = "889c04010102000605024d449d21000a0910a34d7e18c20c31bbc8c60400a24fbef7342603a41cb1165767bd18985d015fb72fe05db42db36cfb2f1d455967f1e491194fbf6cf88146222b23bf6ffbd50d17598d976a0417d3192ff9cc0034fd00f287b02e90418bbefe609484b09231e4e7a5f3562e199bf39909ab5276c4d37382fe088f6b5c3426fc1052865da8b3ab158672d58b6264b10823dc4b39"
+
+const detachedSignatureV3TextHex = "8900950305005255c25ca34d7e18c20c31bb0102bb3f04009f6589ef8a028d6e54f6eaf25432e590d31c3a41f4710897585e10c31e5e332c7f9f409af8512adceaff24d0da1474ab07aa7bce4f674610b010fccc5b579ae5eb00a127f272fb799f988ab8e4574c141da6dbfecfef7e6b2c478d9a3d2551ba741f260ee22bec762812f0053e05380bfdd55ad0f22d8cdf71b233fe51ae8a24"
+
+const detachedSignatureDSAHex = "884604001102000605024d6c4eac000a0910338934250ccc0360f18d00a087d743d6405ed7b87755476629600b8b694a39e900a0abff8126f46faf1547c1743c37b21b4ea15b8f83"
+
+const testKeys1And2Hex = "988d044d3c5c10010400b1d13382944bd5aba23a4312968b5095d14f947f600eb478e14a6fcb16b0e0cac764884909c020bc495cfcc39a935387c661507bdb236a0612fb582cac3af9b29cc2c8c70090616c41b662f4da4c1201e195472eb7f4ae1ccbcbf9940fe21d985e379a5563dde5b9a23d35f1cfaa5790da3b79db26f23695107bfaca8e7b5bcd0011010001b41054657374204b6579203120285253412988b804130102002205024d3c5c10021b03060b090807030206150802090a0b0416020301021e01021780000a0910a34d7e18c20c31bbb5b304009cc45fe610b641a2c146331be94dade0a396e73ca725e1b25c21708d9cab46ecca5ccebc23055879df8f99eea39b377962a400f2ebdc36a7c99c333d74aeba346315137c3ff9d0a09b0273299090343048afb8107cf94cbd1400e3026f0ccac7ecebbc4d78588eb3e478fe2754d3ca664bcf3eac96ca4a6b0c8d7df5102f60f6b0020003b88d044d3c5c10010400b201df61d67487301f11879d514f4248ade90c8f68c7af1284c161098de4c28c2850f1ec7b8e30f959793e571542ffc6532189409cb51c3d30dad78c4ad5165eda18b20d9826d8707d0f742e2ab492103a85bbd9ddf4f5720f6de7064feb0d39ee002219765bb07bcfb8b877f47abe270ddeda4f676108cecb6b9bb2ad484a4f0011010001889f04180102000905024d3c5c10021b0c000a0910a34d7e18c20c31bb1a03040085c8d62e16d05dc4e9dad64953c8a2eed8b6c12f92b1575eeaa6dcf7be9473dd5b24b37b6dffbb4e7c99ed1bd3cb11634be19b3e6e207bed7505c7ca111ccf47cb323bf1f8851eb6360e8034cbff8dd149993c959de89f8f77f38e7e98b8e3076323aa719328e2b408db5ec0d03936efd57422ba04f925cdc7b4c1af7590e40ab0020003988d044d3c5c33010400b488c3e5f83f4d561f317817538d9d0397981e9aef1321ca68ebfae1cf8b7d388e19f4b5a24a82e2fbbf1c6c26557a6c5845307a03d815756f564ac7325b02bc83e87d5480a8fae848f07cb891f2d51ce7df83dcafdc12324517c86d472cc0ee10d47a68fd1d9ae49a6c19bbd36d82af597a0d88cc9c49de9df4e696fc1f0b5d0011010001b42754657374204b6579203220285253412c20656e637279707465642070726976617465206b65792988b804130102002205024d3c5c33021b03060b090807030206150802090a0b0416020301021e01021780000a0910d4984f961e35246b98940400908a73b6a6169f700434f076c6c79015a49bee37130eaf23aaa3cfa9ce60bfe4acaa7bc95f1146ada5867e0079babb38804891f4f0b8ebca57a86b249dee786161a755b7a342e68ccf3f78ed6440a93a6626beb9a37aa66afcd4f888790cb4bb46d94a4ae3eb3d7d3e6b00f6bfec940303e89ec5b32a1eaaacce66497d539328b0020003b88d044d3c5c33010400a4e913f9442abcc7f1804ccab27d2f787ffa592077ca935a8bb23165bd8d57576acac647cc596b2c3f814518cc8c82953c7a4478f32e0cf645630a5ba38d9618ef2bc3add69d459ae3dece5cab778938d988239f8c5ae437807075e06c828019959c644ff05ef6a5a1dab72227c98e3a040b0cf219026640698d7a13d8538a570011010001889f04180102000905024d3c5c33021b0c000a0910d4984f961e35246b26c703ff7ee29ef53bc1ae1ead533c408fa136db508434e233d6e62be621e031e5940bbd4c08142aed0f82217e7c3e1ec8de574bc06ccf3c36633be41ad78a9eacd209f861cae7b064100758545cc9dd83db71806dc1cfd5fb9ae5c7474bba0c19c44034ae61bae5eca379383339dece94ff56ff7aa44a582f3e5c38f45763af577c0934b0020003"
+
+const testKeys1And2PrivateHex = "9501d8044d3c5c10010400b1d13382944bd5aba23a4312968b5095d14f947f600eb478e14a6fcb16b0e0cac764884909c020bc495cfcc39a935387c661507bdb236a0612fb582cac3af9b29cc2c8c70090616c41b662f4da4c1201e195472eb7f4ae1ccbcbf9940fe21d985e379a5563dde5b9a23d35f1cfaa5790da3b79db26f23695107bfaca8e7b5bcd00110100010003ff4d91393b9a8e3430b14d6209df42f98dc927425b881f1209f319220841273a802a97c7bdb8b3a7740b3ab5866c4d1d308ad0d3a79bd1e883aacf1ac92dfe720285d10d08752a7efe3c609b1d00f17f2805b217be53999a7da7e493bfc3e9618fd17018991b8128aea70a05dbce30e4fbe626aa45775fa255dd9177aabf4df7cf0200c1ded12566e4bc2bb590455e5becfb2e2c9796482270a943343a7835de41080582c2be3caf5981aa838140e97afa40ad652a0b544f83eb1833b0957dce26e47b0200eacd6046741e9ce2ec5beb6fb5e6335457844fb09477f83b050a96be7da043e17f3a9523567ed40e7a521f818813a8b8a72209f1442844843ccc7eb9805442570200bdafe0438d97ac36e773c7162028d65844c4d463e2420aa2228c6e50dc2743c3d6c72d0d782a5173fe7be2169c8a9f4ef8a7cf3e37165e8c61b89c346cdc6c1799d2b41054657374204b6579203120285253412988b804130102002205024d3c5c10021b03060b090807030206150802090a0b0416020301021e01021780000a0910a34d7e18c20c31bbb5b304009cc45fe610b641a2c146331be94dade0a396e73ca725e1b25c21708d9cab46ecca5ccebc23055879df8f99eea39b377962a400f2ebdc36a7c99c333d74aeba346315137c3ff9d0a09b0273299090343048afb8107cf94cbd1400e3026f0ccac7ecebbc4d78588eb3e478fe2754d3ca664bcf3eac96ca4a6b0c8d7df5102f60f6b00200009d01d8044d3c5c10010400b201df61d67487301f11879d514f4248ade90c8f68c7af1284c161098de4c28c2850f1ec7b8e30f959793e571542ffc6532189409cb51c3d30dad78c4ad5165eda18b20d9826d8707d0f742e2ab492103a85bbd9ddf4f5720f6de7064feb0d39ee002219765bb07bcfb8b877f47abe270ddeda4f676108cecb6b9bb2ad484a4f00110100010003fd17a7490c22a79c59281fb7b20f5e6553ec0c1637ae382e8adaea295f50241037f8997cf42c1ce26417e015091451b15424b2c59eb8d4161b0975630408e394d3b00f88d4b4e18e2cc85e8251d4753a27c639c83f5ad4a571c4f19d7cd460b9b73c25ade730c99df09637bd173d8e3e981ac64432078263bb6dc30d3e974150dd0200d0ee05be3d4604d2146fb0457f31ba17c057560785aa804e8ca5530a7cd81d3440d0f4ba6851efcfd3954b7e68908fc0ba47f7ac37bf559c6c168b70d3a7c8cd0200da1c677c4bce06a068070f2b3733b0a714e88d62aa3f9a26c6f5216d48d5c2b5624144f3807c0df30be66b3268eeeca4df1fbded58faf49fc95dc3c35f134f8b01fd1396b6c0fc1b6c4f0eb8f5e44b8eace1e6073e20d0b8bc5385f86f1cf3f050f66af789f3ef1fc107b7f4421e19e0349c730c68f0a226981f4e889054fdb4dc149e8e889f04180102000905024d3c5c10021b0c000a0910a34d7e18c20c31bb1a03040085c8d62e16d05dc4e9dad64953c8a2eed8b6c12f92b1575eeaa6dcf7be9473dd5b24b37b6dffbb4e7c99ed1bd3cb11634be19b3e6e207bed7505c7ca111ccf47cb323bf1f8851eb6360e8034cbff8dd149993c959de89f8f77f38e7e98b8e3076323aa719328e2b408db5ec0d03936efd57422ba04f925cdc7b4c1af7590e40ab00200009501fe044d3c5c33010400b488c3e5f83f4d561f317817538d9d0397981e9aef1321ca68ebfae1cf8b7d388e19f4b5a24a82e2fbbf1c6c26557a6c5845307a03d815756f564ac7325b02bc83e87d5480a8fae848f07cb891f2d51ce7df83dcafdc12324517c86d472cc0ee10d47a68fd1d9ae49a6c19bbd36d82af597a0d88cc9c49de9df4e696fc1f0b5d0011010001fe030302e9030f3c783e14856063f16938530e148bc57a7aa3f3e4f90df9dceccdc779bc0835e1ad3d006e4a8d7b36d08b8e0de5a0d947254ecfbd22037e6572b426bcfdc517796b224b0036ff90bc574b5509bede85512f2eefb520fb4b02aa523ba739bff424a6fe81c5041f253f8d757e69a503d3563a104d0d49e9e890b9d0c26f96b55b743883b472caa7050c4acfd4a21f875bdf1258d88bd61224d303dc9df77f743137d51e6d5246b88c406780528fd9a3e15bab5452e5b93970d9dcc79f48b38651b9f15bfbcf6da452837e9cc70683d1bdca94507870f743e4ad902005812488dd342f836e72869afd00ce1850eea4cfa53ce10e3608e13d3c149394ee3cbd0e23d018fcbcb6e2ec5a1a22972d1d462ca05355d0d290dd2751e550d5efb38c6c89686344df64852bf4ff86638708f644e8ec6bd4af9b50d8541cb91891a431326ab2e332faa7ae86cfb6e0540aa63160c1e5cdd5a4add518b303fff0a20117c6bc77f7cfbaf36b04c865c6c2b42754657374204b6579203220285253412c20656e637279707465642070726976617465206b65792988b804130102002205024d3c5c33021b03060b090807030206150802090a0b0416020301021e01021780000a0910d4984f961e35246b98940400908a73b6a6169f700434f076c6c79015a49bee37130eaf23aaa3cfa9ce60bfe4acaa7bc95f1146ada5867e0079babb38804891f4f0b8ebca57a86b249dee786161a755b7a342e68ccf3f78ed6440a93a6626beb9a37aa66afcd4f888790cb4bb46d94a4ae3eb3d7d3e6b00f6bfec940303e89ec5b32a1eaaacce66497d539328b00200009d01fe044d3c5c33010400a4e913f9442abcc7f1804ccab27d2f787ffa592077ca935a8bb23165bd8d57576acac647cc596b2c3f814518cc8c82953c7a4478f32e0cf645630a5ba38d9618ef2bc3add69d459ae3dece5cab778938d988239f8c5ae437807075e06c828019959c644ff05ef6a5a1dab72227c98e3a040b0cf219026640698d7a13d8538a570011010001fe030302e9030f3c783e148560f936097339ae381d63116efcf802ff8b1c9360767db5219cc987375702a4123fd8657d3e22700f23f95020d1b261eda5257e9a72f9a918e8ef22dd5b3323ae03bbc1923dd224db988cadc16acc04b120a9f8b7e84da9716c53e0334d7b66586ddb9014df604b41be1e960dcfcbc96f4ed150a1a0dd070b9eb14276b9b6be413a769a75b519a53d3ecc0c220e85cd91ca354d57e7344517e64b43b6e29823cbd87eae26e2b2e78e6dedfbb76e3e9f77bcb844f9a8932eb3db2c3f9e44316e6f5d60e9e2a56e46b72abe6b06dc9a31cc63f10023d1f5e12d2a3ee93b675c96f504af0001220991c88db759e231b3320dcedf814dcf723fd9857e3d72d66a0f2af26950b915abdf56c1596f46a325bf17ad4810d3535fb02a259b247ac3dbd4cc3ecf9c51b6c07cebb009c1506fba0a89321ec8683e3fd009a6e551d50243e2d5092fefb3321083a4bad91320dc624bd6b5dddf93553e3d53924c05bfebec1fb4bd47e89a1a889f04180102000905024d3c5c33021b0c000a0910d4984f961e35246b26c703ff7ee29ef53bc1ae1ead533c408fa136db508434e233d6e62be621e031e5940bbd4c08142aed0f82217e7c3e1ec8de574bc06ccf3c36633be41ad78a9eacd209f861cae7b064100758545cc9dd83db71806dc1cfd5fb9ae5c7474bba0c19c44034ae61bae5eca379383339dece94ff56ff7aa44a582f3e5c38f45763af577c0934b0020000"
+
+const dsaElGamalTestKeysHex = "9501e1044dfcb16a110400aa3e5c1a1f43dd28c2ffae8abf5cfce555ee874134d8ba0a0f7b868ce2214beddc74e5e1e21ded354a95d18acdaf69e5e342371a71fbb9093162e0c5f3427de413a7f2c157d83f5cd2f9d791256dc4f6f0e13f13c3302af27f2384075ab3021dff7a050e14854bbde0a1094174855fc02f0bae8e00a340d94a1f22b32e48485700a0cec672ac21258fb95f61de2ce1af74b2c4fa3e6703ff698edc9be22c02ae4d916e4fa223f819d46582c0516235848a77b577ea49018dcd5e9e15cff9dbb4663a1ae6dd7580fa40946d40c05f72814b0f88481207e6c0832c3bded4853ebba0a7e3bd8e8c66df33d5a537cd4acf946d1080e7a3dcea679cb2b11a72a33a2b6a9dc85f466ad2ddf4c3db6283fa645343286971e3dd700703fc0c4e290d45767f370831a90187e74e9972aae5bff488eeff7d620af0362bfb95c1a6c3413ab5d15a2e4139e5d07a54d72583914661ed6a87cce810be28a0aa8879a2dd39e52fb6fe800f4f181ac7e328f740cde3d09a05cecf9483e4cca4253e60d4429ffd679d9996a520012aad119878c941e3cf151459873bdfc2a9563472fe0303027a728f9feb3b864260a1babe83925ce794710cfd642ee4ae0e5b9d74cee49e9c67b6cd0ea5dfbb582132195a121356a1513e1bca73e5b80c58c7ccb4164453412f456c47616d616c2054657374204b65792031886204131102002205024dfcb16a021b03060b090807030206150802090a0b0416020301021e01021780000a091033af447ccd759b09fadd00a0b8fd6f5a790bad7e9f2dbb7632046dc4493588db009c087c6a9ba9f7f49fab221587a74788c00db4889ab00200009d0157044dfcb16a1004008dec3f9291205255ccff8c532318133a6840739dd68b03ba942676f9038612071447bf07d00d559c5c0875724ea16a4c774f80d8338b55fca691a0522e530e604215b467bbc9ccfd483a1da99d7bc2648b4318fdbd27766fc8bfad3fddb37c62b8ae7ccfe9577e9b8d1e77c1d417ed2c2ef02d52f4da11600d85d3229607943700030503ff506c94c87c8cab778e963b76cf63770f0a79bf48fb49d3b4e52234620fc9f7657f9f8d56c96a2b7c7826ae6b57ebb2221a3fe154b03b6637cea7e6d98e3e45d87cf8dc432f723d3d71f89c5192ac8d7290684d2c25ce55846a80c9a7823f6acd9bb29fa6cd71f20bc90eccfca20451d0c976e460e672b000df49466408d527affe0303027a728f9feb3b864260abd761730327bca2aaa4ea0525c175e92bf240682a0e83b226f97ecb2e935b62c9a133858ce31b271fa8eb41f6a1b3cd72a63025ce1a75ee4180dcc284884904181102000905024dfcb16a021b0c000a091033af447ccd759b09dd0b009e3c3e7296092c81bee5a19929462caaf2fff3ae26009e218c437a2340e7ea628149af1ec98ec091a43992b00200009501e1044dfcb1be1104009f61faa61aa43df75d128cbe53de528c4aec49ce9360c992e70c77072ad5623de0a3a6212771b66b39a30dad6781799e92608316900518ec01184a85d872365b7d2ba4bacfb5882ea3c2473d3750dc6178cc1cf82147fb58caa28b28e9f12f6d1efcb0534abed644156c91cca4ab78834268495160b2400bc422beb37d237c2300a0cac94911b6d493bda1e1fbc6feeca7cb7421d34b03fe22cec6ccb39675bb7b94a335c2b7be888fd3906a1125f33301d8aa6ec6ee6878f46f73961c8d57a3e9544d8ef2a2cbfd4d52da665b1266928cfe4cb347a58c412815f3b2d2369dec04b41ac9a71cc9547426d5ab941cccf3b18575637ccfb42df1a802df3cfe0a999f9e7109331170e3a221991bf868543960f8c816c28097e503fe319db10fb98049f3a57d7c80c420da66d56f3644371631fad3f0ff4040a19a4fedc2d07727a1b27576f75a4d28c47d8246f27071e12d7a8de62aad216ddbae6aa02efd6b8a3e2818cda48526549791ab277e447b3a36c57cefe9b592f5eab73959743fcc8e83cbefec03a329b55018b53eec196765ae40ef9e20521a603c551efe0303020950d53a146bf9c66034d00c23130cce95576a2ff78016ca471276e8227fb30b1ffbd92e61804fb0c3eff9e30b1a826ee8f3e4730b4d86273ca977b4164453412f456c47616d616c2054657374204b65792032886204131102002205024dfcb1be021b03060b090807030206150802090a0b0416020301021e01021780000a0910a86bf526325b21b22bd9009e34511620415c974750a20df5cb56b182f3b48e6600a0a9466cb1a1305a84953445f77d461593f1d42bc1b00200009d0157044dfcb1be1004009565a951da1ee87119d600c077198f1c1bceb0f7aa54552489298e41ff788fa8f0d43a69871f0f6f77ebdfb14a4260cf9fbeb65d5844b4272a1904dd95136d06c3da745dc46327dd44a0f16f60135914368c8039a34033862261806bb2c5ce1152e2840254697872c85441ccb7321431d75a747a4bfb1d2c66362b51ce76311700030503fc0ea76601c196768070b7365a200e6ddb09307f262d5f39eec467b5f5784e22abdf1aa49226f59ab37cb49969d8f5230ea65caf56015abda62604544ed526c5c522bf92bed178a078789f6c807b6d34885688024a5bed9e9f8c58d11d4b82487b44c5f470c5606806a0443b79cadb45e0f897a561a53f724e5349b9267c75ca17fe0303020950d53a146bf9c660bc5f4ce8f072465e2d2466434320c1e712272fafc20e342fe7608101580fa1a1a367e60486a7cd1246b7ef5586cf5e10b32762b710a30144f12dd17dd4884904181102000905024dfcb1be021b0c000a0910a86bf526325b21b2904c00a0b2b66b4b39ccffda1d10f3ea8d58f827e30a8b8e009f4255b2d8112a184e40cde43a34e8655ca7809370b0020000"
+
+const signedMessageHex = "a3019bc0cbccc0c4b8d8b74ee2108fe16ec6d3ca490cbe362d3f8333d3f352531472538b8b13d353b97232f352158c20943157c71c16064626063656269052062e4e01987e9b6fccff4b7df3a34c534b23e679cbec3bc0f8f6e64dfb4b55fe3f8efa9ce110ddb5cd79faf1d753c51aecfa669f7e7aa043436596cccc3359cb7dd6bbe9ecaa69e5989d9e57209571edc0b2fa7f57b9b79a64ee6e99ce1371395fee92fec2796f7b15a77c386ff668ee27f6d38f0baa6c438b561657377bf6acff3c5947befd7bf4c196252f1d6e5c524d0300"
+
+const signedTextMessageHex = "a3019bc0cbccc8c4b8d8b74ee2108fe16ec6d36a250cbece0c178233d3f352531472538b8b13d35379b97232f352158ca0b4312f57c71c1646462606365626906a062e4e019811591798ff99bf8afee860b0d8a8c2a85c3387e3bcf0bb3b17987f2bbcfab2aa526d930cbfd3d98757184df3995c9f3e7790e36e3e9779f06089d4c64e9e47dd6202cb6e9bc73c5d11bb59fbaf89d22d8dc7cf199ddf17af96e77c5f65f9bbed56f427bd8db7af37f6c9984bf9385efaf5f184f986fb3e6adb0ecfe35bbf92d16a7aa2a344fb0bc52fb7624f0200"
+
+const signedEncryptedMessageHex = "848c032a67d68660df41c70103ff5789d0de26b6a50c985a02a13131ca829c413a35d0e6fa8d6842599252162808ac7439c72151c8c6183e76923fe3299301414d0c25a2f06a2257db3839e7df0ec964773f6e4c4ac7ff3b48c444237166dd46ba8ff443a5410dc670cb486672fdbe7c9dfafb75b4fea83af3a204fe2a7dfa86bd20122b4f3d2646cbeecb8f7be8d2c03b018bd210b1d3791e1aba74b0f1034e122ab72e760492c192383cf5e20b5628bd043272d63df9b923f147eb6091cd897553204832aba48fec54aa447547bb16305a1024713b90e77fd0065f1918271947549205af3c74891af22ee0b56cd29bfec6d6e351901cd4ab3ece7c486f1e32a792d4e474aed98ee84b3f591c7dff37b64e0ecd68fd036d517e412dcadf85840ce184ad7921ad446c4ee28db80447aea1ca8d4f574db4d4e37688158ddd19e14ee2eab4873d46947d65d14a23e788d912cf9a19624ca7352469b72a83866b7c23cb5ace3deab3c7018061b0ba0f39ed2befe27163e5083cf9b8271e3e3d52cc7ad6e2a3bd81d4c3d7022f8d"
+
+const signedEncryptedMessage2Hex = "85010e03cf6a7abcd43e36731003fb057f5495b79db367e277cdbe4ab90d924ddee0c0381494112ff8c1238fb0184af35d1731573b01bc4c55ecacd2aafbe2003d36310487d1ecc9ac994f3fada7f9f7f5c3a64248ab7782906c82c6ff1303b69a84d9a9529c31ecafbcdb9ba87e05439897d87e8a2a3dec55e14df19bba7f7bd316291c002ae2efd24f83f9e3441203fc081c0c23dc3092a454ca8a082b27f631abf73aca341686982e8fbda7e0e7d863941d68f3de4a755c2964407f4b5e0477b3196b8c93d551dd23c8beef7d0f03fbb1b6066f78907faf4bf1677d8fcec72651124080e0b7feae6b476e72ab207d38d90b958759fdedfc3c6c35717c9dbfc979b3cfbbff0a76d24a5e57056bb88acbd2a901ef64bc6e4db02adc05b6250ff378de81dca18c1910ab257dff1b9771b85bb9bbe0a69f5989e6d1710a35e6dfcceb7d8fb5ccea8db3932b3d9ff3fe0d327597c68b3622aec8e3716c83a6c93f497543b459b58ba504ed6bcaa747d37d2ca746fe49ae0a6ce4a8b694234e941b5159ff8bd34b9023da2814076163b86f40eed7c9472f81b551452d5ab87004a373c0172ec87ea6ce42ccfa7dbdad66b745496c4873d8019e8c28d6b3"
+
+const symmetricallyEncryptedCompressedHex = "8c0d04030302eb4a03808145d0d260c92f714339e13de5a79881216431925bf67ee2898ea61815f07894cd0703c50d0a76ef64d482196f47a8bc729af9b80bb6"
+
+const dsaTestKeyHex = "9901a2044d6c49de110400cb5ce438cf9250907ac2ba5bf6547931270b89f7c4b53d9d09f4d0213a5ef2ec1f26806d3d259960f872a4a102ef1581ea3f6d6882d15134f21ef6a84de933cc34c47cc9106efe3bd84c6aec12e78523661e29bc1a61f0aab17fa58a627fd5fd33f5149153fbe8cd70edf3d963bc287ef875270ff14b5bfdd1bca4483793923b00a0fe46d76cb6e4cbdc568435cd5480af3266d610d303fe33ae8273f30a96d4d34f42fa28ce1112d425b2e3bf7ea553d526e2db6b9255e9dc7419045ce817214d1a0056dbc8d5289956a4b1b69f20f1105124096e6a438f41f2e2495923b0f34b70642607d45559595c7fe94d7fa85fc41bf7d68c1fd509ebeaa5f315f6059a446b9369c277597e4f474a9591535354c7e7f4fd98a08aa60400b130c24ff20bdfbf683313f5daebf1c9b34b3bdadfc77f2ddd72ee1fb17e56c473664bc21d66467655dd74b9005e3a2bacce446f1920cd7017231ae447b67036c9b431b8179deacd5120262d894c26bc015bffe3d827ba7087ad9b700d2ca1f6d16cc1786581e5dd065f293c31209300f9b0afcc3f7c08dd26d0a22d87580b4db41054657374204b65792033202844534129886204131102002205024d6c49de021b03060b090807030206150802090a0b0416020301021e01021780000a0910338934250ccc03607e0400a0bdb9193e8a6b96fc2dfc108ae848914b504481f100a09c4dc148cb693293a67af24dd40d2b13a9e36794"
+
+const dsaTestKeyPrivateHex = "9501bb044d6c49de110400cb5ce438cf9250907ac2ba5bf6547931270b89f7c4b53d9d09f4d0213a5ef2ec1f26806d3d259960f872a4a102ef1581ea3f6d6882d15134f21ef6a84de933cc34c47cc9106efe3bd84c6aec12e78523661e29bc1a61f0aab17fa58a627fd5fd33f5149153fbe8cd70edf3d963bc287ef875270ff14b5bfdd1bca4483793923b00a0fe46d76cb6e4cbdc568435cd5480af3266d610d303fe33ae8273f30a96d4d34f42fa28ce1112d425b2e3bf7ea553d526e2db6b9255e9dc7419045ce817214d1a0056dbc8d5289956a4b1b69f20f1105124096e6a438f41f2e2495923b0f34b70642607d45559595c7fe94d7fa85fc41bf7d68c1fd509ebeaa5f315f6059a446b9369c277597e4f474a9591535354c7e7f4fd98a08aa60400b130c24ff20bdfbf683313f5daebf1c9b34b3bdadfc77f2ddd72ee1fb17e56c473664bc21d66467655dd74b9005e3a2bacce446f1920cd7017231ae447b67036c9b431b8179deacd5120262d894c26bc015bffe3d827ba7087ad9b700d2ca1f6d16cc1786581e5dd065f293c31209300f9b0afcc3f7c08dd26d0a22d87580b4d00009f592e0619d823953577d4503061706843317e4fee083db41054657374204b65792033202844534129886204131102002205024d6c49de021b03060b090807030206150802090a0b0416020301021e01021780000a0910338934250ccc03607e0400a0bdb9193e8a6b96fc2dfc108ae848914b504481f100a09c4dc148cb693293a67af24dd40d2b13a9e36794"
+
+const armoredPrivateKeyBlock = `-----BEGIN PGP PRIVATE KEY BLOCK-----
+Version: GnuPG v1.4.10 (GNU/Linux)
+
+lQHYBE2rFNoBBADFwqWQIW/DSqcB4yCQqnAFTJ27qS5AnB46ccAdw3u4Greeu3Bp
+idpoHdjULy7zSKlwR1EA873dO/k/e11Ml3dlAFUinWeejWaK2ugFP6JjiieSsrKn
+vWNicdCS4HTWn0X4sjl0ZiAygw6GNhqEQ3cpLeL0g8E9hnYzJKQ0LWJa0QARAQAB
+AAP/TB81EIo2VYNmTq0pK1ZXwUpxCrvAAIG3hwKjEzHcbQznsjNvPUihZ+NZQ6+X
+0HCfPAdPkGDCLCb6NavcSW+iNnLTrdDnSI6+3BbIONqWWdRDYJhqZCkqmG6zqSfL
+IdkJgCw94taUg5BWP/AAeQrhzjChvpMQTVKQL5mnuZbUCeMCAN5qrYMP2S9iKdnk
+VANIFj7656ARKt/nf4CBzxcpHTyB8+d2CtPDKCmlJP6vL8t58Jmih+kHJMvC0dzn
+gr5f5+sCAOOe5gt9e0am7AvQWhdbHVfJU0TQJx+m2OiCJAqGTB1nvtBLHdJnfdC9
+TnXXQ6ZXibqLyBies/xeY2sCKL5qtTMCAKnX9+9d/5yQxRyrQUHt1NYhaXZnJbHx
+q4ytu0eWz+5i68IYUSK69jJ1NWPM0T6SkqpB3KCAIv68VFm9PxqG1KmhSrQIVGVz
+dCBLZXmIuAQTAQIAIgUCTasU2gIbAwYLCQgHAwIGFQgCCQoLBBYCAwECHgECF4AA
+CgkQO9o98PRieSoLhgQAkLEZex02Qt7vGhZzMwuN0R22w3VwyYyjBx+fM3JFETy1
+ut4xcLJoJfIaF5ZS38UplgakHG0FQ+b49i8dMij0aZmDqGxrew1m4kBfjXw9B/v+
+eIqpODryb6cOSwyQFH0lQkXC040pjq9YqDsO5w0WYNXYKDnzRV0p4H1pweo2VDid
+AdgETasU2gEEAN46UPeWRqKHvA99arOxee38fBt2CI08iiWyI8T3J6ivtFGixSqV
+bRcPxYO/qLpVe5l84Nb3X71GfVXlc9hyv7CD6tcowL59hg1E/DC5ydI8K8iEpUmK
+/UnHdIY5h8/kqgGxkY/T/hgp5fRQgW1ZoZxLajVlMRZ8W4tFtT0DeA+JABEBAAEA
+A/0bE1jaaZKj6ndqcw86jd+QtD1SF+Cf21CWRNeLKnUds4FRRvclzTyUMuWPkUeX
+TaNNsUOFqBsf6QQ2oHUBBK4VCHffHCW4ZEX2cd6umz7mpHW6XzN4DECEzOVksXtc
+lUC1j4UB91DC/RNQqwX1IV2QLSwssVotPMPqhOi0ZLNY7wIA3n7DWKInxYZZ4K+6
+rQ+POsz6brEoRHwr8x6XlHenq1Oki855pSa1yXIARoTrSJkBtn5oI+f8AzrnN0BN
+oyeQAwIA/7E++3HDi5aweWrViiul9cd3rcsS0dEnksPhvS0ozCJiHsq/6GFmy7J8
+QSHZPteedBnZyNp5jR+H7cIfVN3KgwH/Skq4PsuPhDq5TKK6i8Pc1WW8MA6DXTdU
+nLkX7RGmMwjC0DBf7KWAlPjFaONAX3a8ndnz//fy1q7u2l9AZwrj1qa1iJ8EGAEC
+AAkFAk2rFNoCGwwACgkQO9o98PRieSo2/QP/WTzr4ioINVsvN1akKuekmEMI3LAp
+BfHwatufxxP1U+3Si/6YIk7kuPB9Hs+pRqCXzbvPRrI8NHZBmc8qIGthishdCYad
+AHcVnXjtxrULkQFGbGvhKURLvS9WnzD/m1K2zzwxzkPTzT9/Yf06O6Mal5AdugPL
+VrM0m72/jnpKo04=
+=zNCn
+-----END PGP PRIVATE KEY BLOCK-----`
+
+const e2ePublicKey = `-----BEGIN PGP PUBLIC KEY BLOCK-----
+Charset: UTF-8
+
+xv8AAABSBAAAAAATCCqGSM49AwEHAgME1LRoXSpOxtHXDUdmuvzchyg6005qIBJ4
+sfaSxX7QgH9RV2ONUhC+WiayCNADq+UMzuR/vunSr4aQffXvuGnR383/AAAAFDxk
+Z2lsQHlhaG9vLWluYy5jb20+wv8AAACGBBATCAA4/wAAAAWCVGvAG/8AAAACiwn/
+AAAACZC2VkQCOjdvYf8AAAAFlQgJCgv/AAAAA5YBAv8AAAACngEAAE1BAP0X8veD
+24IjmI5/C6ZAfVNXxgZZFhTAACFX75jUA3oD6AEAzoSwKf1aqH6oq62qhCN/pekX
++WAsVMBhNwzLpqtCRjLO/wAAAFYEAAAAABIIKoZIzj0DAQcCAwT50ain7vXiIRv8
+B1DO3x3cE/aattZ5sHNixJzRCXi2vQIA5QmOxZ6b5jjUekNbdHG3SZi1a2Ak5mfX
+fRxC/5VGAwEIB8L/AAAAZQQYEwgAGP8AAAAFglRrwBz/AAAACZC2VkQCOjdvYQAA
+FJAA9isX3xtGyMLYwp2F3nXm7QEdY5bq5VUcD/RJlj792VwA/1wH0pCzVLl4Q9F9
+ex7En5r7rHR5xwX82Msc+Rq9dSyO
+=7MrZ
+-----END PGP PUBLIC KEY BLOCK-----`
+
+const dsaKeyWithSHA512 = `9901a2044f04b07f110400db244efecc7316553ee08d179972aab87bb1214de7692593fcf5b6feb1c80fba268722dd464748539b85b81d574cd2d7ad0ca2444de4d849b8756bad7768c486c83a824f9bba4af773d11742bdfb4ac3b89ef8cc9452d4aad31a37e4b630d33927bff68e879284a1672659b8b298222fc68f370f3e24dccacc4a862442b9438b00a0ea444a24088dc23e26df7daf8f43cba3bffc4fe703fe3d6cd7fdca199d54ed8ae501c30e3ec7871ea9cdd4cf63cfe6fc82281d70a5b8bb493f922cd99fba5f088935596af087c8d818d5ec4d0b9afa7f070b3d7c1dd32a84fca08d8280b4890c8da1dde334de8e3cad8450eed2a4a4fcc2db7b8e5528b869a74a7f0189e11ef097ef1253582348de072bb07a9fa8ab838e993cef0ee203ff49298723e2d1f549b00559f886cd417a41692ce58d0ac1307dc71d85a8af21b0cf6eaa14baf2922d3a70389bedf17cc514ba0febbd107675a372fe84b90162a9e88b14d4b1c6be855b96b33fb198c46f058568817780435b6936167ebb3724b680f32bf27382ada2e37a879b3d9de2abe0c3f399350afd1ad438883f4791e2e3b4184453412068617368207472756e636174696f6e207465737488620413110a002205024f04b07f021b03060b090807030206150802090a0b0416020301021e01021780000a0910ef20e0cefca131581318009e2bf3bf047a44d75a9bacd00161ee04d435522397009a03a60d51bd8a568c6c021c8d7cf1be8d990d6417b0020003`
+
+const unknownHashFunctionHex = `8a00000040040001990006050253863c24000a09103b4fe6acc0b21f32ffff01010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101`
+
+const missingHashFunctionHex = `8a00000040040001030006050253863c24000a09103b4fe6acc0b21f32ffff0101010101010101010101010101010101010101010101010101010101010101010101010101`
+
+const campbellQuine = `a0b001000300fcffa0b001000d00f2ff000300fcffa0b001000d00f2ff8270a01c00000500faff8270a01c00000500faff000500faff001400ebff8270a01c00000500faff000500faff001400ebff428821c400001400ebff428821c400001400ebff428821c400001400ebff428821c400001400ebff428821c400000000ffff000000ffff000b00f4ff428821c400000000ffff000000ffff000b00f4ff0233214c40000100feff000233214c40000100feff0000`
diff --git a/src/mongo/gotools/vendor/src/golang.org/x/crypto/openpgp/s2k/s2k.go b/src/mongo/gotools/vendor/src/golang.org/x/crypto/openpgp/s2k/s2k.go
new file mode 100644
index 00000000000..0e8641ed1b4
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/golang.org/x/crypto/openpgp/s2k/s2k.go
@@ -0,0 +1,273 @@
+// Copyright 2011 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// Package s2k implements the various OpenPGP string-to-key transforms as
+// specified in RFC 4800 section 3.7.1.
+package s2k // import "golang.org/x/crypto/openpgp/s2k"
+
+import (
+ "crypto"
+ "hash"
+ "io"
+ "strconv"
+
+ "golang.org/x/crypto/openpgp/errors"
+)
+
+// Config collects configuration parameters for s2k key-stretching
+// transformatioms. A nil *Config is valid and results in all default
+// values. Currently, Config is used only by the Serialize function in
+// this package.
+type Config struct {
+ // Hash is the default hash function to be used. If
+ // nil, SHA1 is used.
+ Hash crypto.Hash
+ // S2KCount is only used for symmetric encryption. It
+ // determines the strength of the passphrase stretching when
+ // the said passphrase is hashed to produce a key. S2KCount
+ // should be between 1024 and 65011712, inclusive. If Config
+ // is nil or S2KCount is 0, the value 65536 used. Not all
+ // values in the above range can be represented. S2KCount will
+ // be rounded up to the next representable value if it cannot
+ // be encoded exactly. When set, it is strongly encrouraged to
+ // use a value that is at least 65536. See RFC 4880 Section
+ // 3.7.1.3.
+ S2KCount int
+}
+
+func (c *Config) hash() crypto.Hash {
+ if c == nil || uint(c.Hash) == 0 {
+ // SHA1 is the historical default in this package.
+ return crypto.SHA1
+ }
+
+ return c.Hash
+}
+
+func (c *Config) encodedCount() uint8 {
+ if c == nil || c.S2KCount == 0 {
+ return 96 // The common case. Correspoding to 65536
+ }
+
+ i := c.S2KCount
+ switch {
+ // Behave like GPG. Should we make 65536 the lowest value used?
+ case i < 1024:
+ i = 1024
+ case i > 65011712:
+ i = 65011712
+ }
+
+ return encodeCount(i)
+}
+
+// encodeCount converts an iterative "count" in the range 1024 to
+// 65011712, inclusive, to an encoded count. The return value is the
+// octet that is actually stored in the GPG file. encodeCount panics
+// if i is not in the above range (encodedCount above takes care to
+// pass i in the correct range). See RFC 4880 Section 3.7.7.1.
+func encodeCount(i int) uint8 {
+ if i < 1024 || i > 65011712 {
+ panic("count arg i outside the required range")
+ }
+
+ for encoded := 0; encoded < 256; encoded++ {
+ count := decodeCount(uint8(encoded))
+ if count >= i {
+ return uint8(encoded)
+ }
+ }
+
+ return 255
+}
+
+// decodeCount returns the s2k mode 3 iterative "count" corresponding to
+// the encoded octet c.
+func decodeCount(c uint8) int {
+ return (16 + int(c&15)) << (uint32(c>>4) + 6)
+}
+
+// Simple writes to out the result of computing the Simple S2K function (RFC
+// 4880, section 3.7.1.1) using the given hash and input passphrase.
+func Simple(out []byte, h hash.Hash, in []byte) {
+ Salted(out, h, in, nil)
+}
+
+var zero [1]byte
+
+// Salted writes to out the result of computing the Salted S2K function (RFC
+// 4880, section 3.7.1.2) using the given hash, input passphrase and salt.
+func Salted(out []byte, h hash.Hash, in []byte, salt []byte) {
+ done := 0
+ var digest []byte
+
+ for i := 0; done < len(out); i++ {
+ h.Reset()
+ for j := 0; j < i; j++ {
+ h.Write(zero[:])
+ }
+ h.Write(salt)
+ h.Write(in)
+ digest = h.Sum(digest[:0])
+ n := copy(out[done:], digest)
+ done += n
+ }
+}
+
+// Iterated writes to out the result of computing the Iterated and Salted S2K
+// function (RFC 4880, section 3.7.1.3) using the given hash, input passphrase,
+// salt and iteration count.
+func Iterated(out []byte, h hash.Hash, in []byte, salt []byte, count int) {
+ combined := make([]byte, len(in)+len(salt))
+ copy(combined, salt)
+ copy(combined[len(salt):], in)
+
+ if count < len(combined) {
+ count = len(combined)
+ }
+
+ done := 0
+ var digest []byte
+ for i := 0; done < len(out); i++ {
+ h.Reset()
+ for j := 0; j < i; j++ {
+ h.Write(zero[:])
+ }
+ written := 0
+ for written < count {
+ if written+len(combined) > count {
+ todo := count - written
+ h.Write(combined[:todo])
+ written = count
+ } else {
+ h.Write(combined)
+ written += len(combined)
+ }
+ }
+ digest = h.Sum(digest[:0])
+ n := copy(out[done:], digest)
+ done += n
+ }
+}
+
+// Parse reads a binary specification for a string-to-key transformation from r
+// and returns a function which performs that transform.
+func Parse(r io.Reader) (f func(out, in []byte), err error) {
+ var buf [9]byte
+
+ _, err = io.ReadFull(r, buf[:2])
+ if err != nil {
+ return
+ }
+
+ hash, ok := HashIdToHash(buf[1])
+ if !ok {
+ return nil, errors.UnsupportedError("hash for S2K function: " + strconv.Itoa(int(buf[1])))
+ }
+ if !hash.Available() {
+ return nil, errors.UnsupportedError("hash not available: " + strconv.Itoa(int(hash)))
+ }
+ h := hash.New()
+
+ switch buf[0] {
+ case 0:
+ f := func(out, in []byte) {
+ Simple(out, h, in)
+ }
+ return f, nil
+ case 1:
+ _, err = io.ReadFull(r, buf[:8])
+ if err != nil {
+ return
+ }
+ f := func(out, in []byte) {
+ Salted(out, h, in, buf[:8])
+ }
+ return f, nil
+ case 3:
+ _, err = io.ReadFull(r, buf[:9])
+ if err != nil {
+ return
+ }
+ count := decodeCount(buf[8])
+ f := func(out, in []byte) {
+ Iterated(out, h, in, buf[:8], count)
+ }
+ return f, nil
+ }
+
+ return nil, errors.UnsupportedError("S2K function")
+}
+
+// Serialize salts and stretches the given passphrase and writes the
+// resulting key into key. It also serializes an S2K descriptor to
+// w. The key stretching can be configured with c, which may be
+// nil. In that case, sensible defaults will be used.
+func Serialize(w io.Writer, key []byte, rand io.Reader, passphrase []byte, c *Config) error {
+ var buf [11]byte
+ buf[0] = 3 /* iterated and salted */
+ buf[1], _ = HashToHashId(c.hash())
+ salt := buf[2:10]
+ if _, err := io.ReadFull(rand, salt); err != nil {
+ return err
+ }
+ encodedCount := c.encodedCount()
+ count := decodeCount(encodedCount)
+ buf[10] = encodedCount
+ if _, err := w.Write(buf[:]); err != nil {
+ return err
+ }
+
+ Iterated(key, c.hash().New(), passphrase, salt, count)
+ return nil
+}
+
+// hashToHashIdMapping contains pairs relating OpenPGP's hash identifier with
+// Go's crypto.Hash type. See RFC 4880, section 9.4.
+var hashToHashIdMapping = []struct {
+ id byte
+ hash crypto.Hash
+ name string
+}{
+ {1, crypto.MD5, "MD5"},
+ {2, crypto.SHA1, "SHA1"},
+ {3, crypto.RIPEMD160, "RIPEMD160"},
+ {8, crypto.SHA256, "SHA256"},
+ {9, crypto.SHA384, "SHA384"},
+ {10, crypto.SHA512, "SHA512"},
+ {11, crypto.SHA224, "SHA224"},
+}
+
+// HashIdToHash returns a crypto.Hash which corresponds to the given OpenPGP
+// hash id.
+func HashIdToHash(id byte) (h crypto.Hash, ok bool) {
+ for _, m := range hashToHashIdMapping {
+ if m.id == id {
+ return m.hash, true
+ }
+ }
+ return 0, false
+}
+
+// HashIdToString returns the name of the hash function corresponding to the
+// given OpenPGP hash id, or panics if id is unknown.
+func HashIdToString(id byte) (name string, ok bool) {
+ for _, m := range hashToHashIdMapping {
+ if m.id == id {
+ return m.name, true
+ }
+ }
+
+ return "", false
+}
+
+// HashIdToHash returns an OpenPGP hash id which corresponds the given Hash.
+func HashToHashId(h crypto.Hash) (id byte, ok bool) {
+ for _, m := range hashToHashIdMapping {
+ if m.hash == h {
+ return m.id, true
+ }
+ }
+ return 0, false
+}
diff --git a/src/mongo/gotools/vendor/src/golang.org/x/crypto/openpgp/s2k/s2k_test.go b/src/mongo/gotools/vendor/src/golang.org/x/crypto/openpgp/s2k/s2k_test.go
new file mode 100644
index 00000000000..183d26056b1
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/golang.org/x/crypto/openpgp/s2k/s2k_test.go
@@ -0,0 +1,137 @@
+// Copyright 2011 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package s2k
+
+import (
+ "bytes"
+ "crypto"
+ _ "crypto/md5"
+ "crypto/rand"
+ "crypto/sha1"
+ _ "crypto/sha256"
+ _ "crypto/sha512"
+ "encoding/hex"
+ "testing"
+
+ _ "golang.org/x/crypto/ripemd160"
+)
+
+var saltedTests = []struct {
+ in, out string
+}{
+ {"hello", "10295ac1"},
+ {"world", "ac587a5e"},
+ {"foo", "4dda8077"},
+ {"bar", "bd8aac6b9ea9cae04eae6a91c6133b58b5d9a61c14f355516ed9370456"},
+ {"x", "f1d3f289"},
+ {"xxxxxxxxxxxxxxxxxxxxxxx", "e00d7b45"},
+}
+
+func TestSalted(t *testing.T) {
+ h := sha1.New()
+ salt := [4]byte{1, 2, 3, 4}
+
+ for i, test := range saltedTests {
+ expected, _ := hex.DecodeString(test.out)
+ out := make([]byte, len(expected))
+ Salted(out, h, []byte(test.in), salt[:])
+ if !bytes.Equal(expected, out) {
+ t.Errorf("#%d, got: %x want: %x", i, out, expected)
+ }
+ }
+}
+
+var iteratedTests = []struct {
+ in, out string
+}{
+ {"hello", "83126105"},
+ {"world", "6fa317f9"},
+ {"foo", "8fbc35b9"},
+ {"bar", "2af5a99b54f093789fd657f19bd245af7604d0f6ae06f66602a46a08ae"},
+ {"x", "5a684dfe"},
+ {"xxxxxxxxxxxxxxxxxxxxxxx", "18955174"},
+}
+
+func TestIterated(t *testing.T) {
+ h := sha1.New()
+ salt := [4]byte{4, 3, 2, 1}
+
+ for i, test := range iteratedTests {
+ expected, _ := hex.DecodeString(test.out)
+ out := make([]byte, len(expected))
+ Iterated(out, h, []byte(test.in), salt[:], 31)
+ if !bytes.Equal(expected, out) {
+ t.Errorf("#%d, got: %x want: %x", i, out, expected)
+ }
+ }
+}
+
+var parseTests = []struct {
+ spec, in, out string
+}{
+ /* Simple with SHA1 */
+ {"0002", "hello", "aaf4c61d"},
+ /* Salted with SHA1 */
+ {"01020102030405060708", "hello", "f4f7d67e"},
+ /* Iterated with SHA1 */
+ {"03020102030405060708f1", "hello", "f2a57b7c"},
+}
+
+func TestParse(t *testing.T) {
+ for i, test := range parseTests {
+ spec, _ := hex.DecodeString(test.spec)
+ buf := bytes.NewBuffer(spec)
+ f, err := Parse(buf)
+ if err != nil {
+ t.Errorf("%d: Parse returned error: %s", i, err)
+ continue
+ }
+
+ expected, _ := hex.DecodeString(test.out)
+ out := make([]byte, len(expected))
+ f(out, []byte(test.in))
+ if !bytes.Equal(out, expected) {
+ t.Errorf("%d: output got: %x want: %x", i, out, expected)
+ }
+ if testing.Short() {
+ break
+ }
+ }
+}
+
+func TestSerialize(t *testing.T) {
+ hashes := []crypto.Hash{crypto.MD5, crypto.SHA1, crypto.RIPEMD160,
+ crypto.SHA256, crypto.SHA384, crypto.SHA512, crypto.SHA224}
+ testCounts := []int{-1, 0, 1024, 65536, 4063232, 65011712}
+ for _, h := range hashes {
+ for _, c := range testCounts {
+ testSerializeConfig(t, &Config{Hash: h, S2KCount: c})
+ }
+ }
+}
+
+func testSerializeConfig(t *testing.T, c *Config) {
+ t.Logf("Running testSerializeConfig() with config: %+v", c)
+
+ buf := bytes.NewBuffer(nil)
+ key := make([]byte, 16)
+ passphrase := []byte("testing")
+ err := Serialize(buf, key, rand.Reader, passphrase, c)
+ if err != nil {
+ t.Errorf("failed to serialize: %s", err)
+ return
+ }
+
+ f, err := Parse(buf)
+ if err != nil {
+ t.Errorf("failed to reparse: %s", err)
+ return
+ }
+ key2 := make([]byte, len(key))
+ f(key2, passphrase)
+ if !bytes.Equal(key2, key) {
+ t.Errorf("keys don't match: %x (serialied) vs %x (parsed)", key, key2)
+ }
+}
diff --git a/src/mongo/gotools/vendor/src/golang.org/x/crypto/openpgp/write.go b/src/mongo/gotools/vendor/src/golang.org/x/crypto/openpgp/write.go
new file mode 100644
index 00000000000..15aaa1a0151
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/golang.org/x/crypto/openpgp/write.go
@@ -0,0 +1,378 @@
+// Copyright 2011 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package openpgp
+
+import (
+ "crypto"
+ "hash"
+ "io"
+ "strconv"
+ "time"
+
+ "golang.org/x/crypto/openpgp/armor"
+ "golang.org/x/crypto/openpgp/errors"
+ "golang.org/x/crypto/openpgp/packet"
+ "golang.org/x/crypto/openpgp/s2k"
+)
+
+// DetachSign signs message with the private key from signer (which must
+// already have been decrypted) and writes the signature to w.
+// If config is nil, sensible defaults will be used.
+func DetachSign(w io.Writer, signer *Entity, message io.Reader, config *packet.Config) error {
+ return detachSign(w, signer, message, packet.SigTypeBinary, config)
+}
+
+// ArmoredDetachSign signs message with the private key from signer (which
+// must already have been decrypted) and writes an armored signature to w.
+// If config is nil, sensible defaults will be used.
+func ArmoredDetachSign(w io.Writer, signer *Entity, message io.Reader, config *packet.Config) (err error) {
+ return armoredDetachSign(w, signer, message, packet.SigTypeBinary, config)
+}
+
+// DetachSignText signs message (after canonicalising the line endings) with
+// the private key from signer (which must already have been decrypted) and
+// writes the signature to w.
+// If config is nil, sensible defaults will be used.
+func DetachSignText(w io.Writer, signer *Entity, message io.Reader, config *packet.Config) error {
+ return detachSign(w, signer, message, packet.SigTypeText, config)
+}
+
+// ArmoredDetachSignText signs message (after canonicalising the line endings)
+// with the private key from signer (which must already have been decrypted)
+// and writes an armored signature to w.
+// If config is nil, sensible defaults will be used.
+func ArmoredDetachSignText(w io.Writer, signer *Entity, message io.Reader, config *packet.Config) error {
+ return armoredDetachSign(w, signer, message, packet.SigTypeText, config)
+}
+
+func armoredDetachSign(w io.Writer, signer *Entity, message io.Reader, sigType packet.SignatureType, config *packet.Config) (err error) {
+ out, err := armor.Encode(w, SignatureType, nil)
+ if err != nil {
+ return
+ }
+ err = detachSign(out, signer, message, sigType, config)
+ if err != nil {
+ return
+ }
+ return out.Close()
+}
+
+func detachSign(w io.Writer, signer *Entity, message io.Reader, sigType packet.SignatureType, config *packet.Config) (err error) {
+ if signer.PrivateKey == nil {
+ return errors.InvalidArgumentError("signing key doesn't have a private key")
+ }
+ if signer.PrivateKey.Encrypted {
+ return errors.InvalidArgumentError("signing key is encrypted")
+ }
+
+ sig := new(packet.Signature)
+ sig.SigType = sigType
+ sig.PubKeyAlgo = signer.PrivateKey.PubKeyAlgo
+ sig.Hash = config.Hash()
+ sig.CreationTime = config.Now()
+ sig.IssuerKeyId = &signer.PrivateKey.KeyId
+
+ h, wrappedHash, err := hashForSignature(sig.Hash, sig.SigType)
+ if err != nil {
+ return
+ }
+ io.Copy(wrappedHash, message)
+
+ err = sig.Sign(h, signer.PrivateKey, config)
+ if err != nil {
+ return
+ }
+
+ return sig.Serialize(w)
+}
+
+// FileHints contains metadata about encrypted files. This metadata is, itself,
+// encrypted.
+type FileHints struct {
+ // IsBinary can be set to hint that the contents are binary data.
+ IsBinary bool
+ // FileName hints at the name of the file that should be written. It's
+ // truncated to 255 bytes if longer. It may be empty to suggest that the
+ // file should not be written to disk. It may be equal to "_CONSOLE" to
+ // suggest the data should not be written to disk.
+ FileName string
+ // ModTime contains the modification time of the file, or the zero time if not applicable.
+ ModTime time.Time
+}
+
+// SymmetricallyEncrypt acts like gpg -c: it encrypts a file with a passphrase.
+// The resulting WriteCloser must be closed after the contents of the file have
+// been written.
+// If config is nil, sensible defaults will be used.
+func SymmetricallyEncrypt(ciphertext io.Writer, passphrase []byte, hints *FileHints, config *packet.Config) (plaintext io.WriteCloser, err error) {
+ if hints == nil {
+ hints = &FileHints{}
+ }
+
+ key, err := packet.SerializeSymmetricKeyEncrypted(ciphertext, passphrase, config)
+ if err != nil {
+ return
+ }
+ w, err := packet.SerializeSymmetricallyEncrypted(ciphertext, config.Cipher(), key, config)
+ if err != nil {
+ return
+ }
+
+ literaldata := w
+ if algo := config.Compression(); algo != packet.CompressionNone {
+ var compConfig *packet.CompressionConfig
+ if config != nil {
+ compConfig = config.CompressionConfig
+ }
+ literaldata, err = packet.SerializeCompressed(w, algo, compConfig)
+ if err != nil {
+ return
+ }
+ }
+
+ var epochSeconds uint32
+ if !hints.ModTime.IsZero() {
+ epochSeconds = uint32(hints.ModTime.Unix())
+ }
+ return packet.SerializeLiteral(literaldata, hints.IsBinary, hints.FileName, epochSeconds)
+}
+
+// intersectPreferences mutates and returns a prefix of a that contains only
+// the values in the intersection of a and b. The order of a is preserved.
+func intersectPreferences(a []uint8, b []uint8) (intersection []uint8) {
+ var j int
+ for _, v := range a {
+ for _, v2 := range b {
+ if v == v2 {
+ a[j] = v
+ j++
+ break
+ }
+ }
+ }
+
+ return a[:j]
+}
+
+func hashToHashId(h crypto.Hash) uint8 {
+ v, ok := s2k.HashToHashId(h)
+ if !ok {
+ panic("tried to convert unknown hash")
+ }
+ return v
+}
+
+// Encrypt encrypts a message to a number of recipients and, optionally, signs
+// it. hints contains optional information, that is also encrypted, that aids
+// the recipients in processing the message. The resulting WriteCloser must
+// be closed after the contents of the file have been written.
+// If config is nil, sensible defaults will be used.
+func Encrypt(ciphertext io.Writer, to []*Entity, signed *Entity, hints *FileHints, config *packet.Config) (plaintext io.WriteCloser, err error) {
+ var signer *packet.PrivateKey
+ if signed != nil {
+ signKey, ok := signed.signingKey(config.Now())
+ if !ok {
+ return nil, errors.InvalidArgumentError("no valid signing keys")
+ }
+ signer = signKey.PrivateKey
+ if signer == nil {
+ return nil, errors.InvalidArgumentError("no private key in signing key")
+ }
+ if signer.Encrypted {
+ return nil, errors.InvalidArgumentError("signing key must be decrypted")
+ }
+ }
+
+ // These are the possible ciphers that we'll use for the message.
+ candidateCiphers := []uint8{
+ uint8(packet.CipherAES128),
+ uint8(packet.CipherAES256),
+ uint8(packet.CipherCAST5),
+ }
+ // These are the possible hash functions that we'll use for the signature.
+ candidateHashes := []uint8{
+ hashToHashId(crypto.SHA256),
+ hashToHashId(crypto.SHA512),
+ hashToHashId(crypto.SHA1),
+ hashToHashId(crypto.RIPEMD160),
+ }
+ // In the event that a recipient doesn't specify any supported ciphers
+ // or hash functions, these are the ones that we assume that every
+ // implementation supports.
+ defaultCiphers := candidateCiphers[len(candidateCiphers)-1:]
+ defaultHashes := candidateHashes[len(candidateHashes)-1:]
+
+ encryptKeys := make([]Key, len(to))
+ for i := range to {
+ var ok bool
+ encryptKeys[i], ok = to[i].encryptionKey(config.Now())
+ if !ok {
+ return nil, errors.InvalidArgumentError("cannot encrypt a message to key id " + strconv.FormatUint(to[i].PrimaryKey.KeyId, 16) + " because it has no encryption keys")
+ }
+
+ sig := to[i].primaryIdentity().SelfSignature
+
+ preferredSymmetric := sig.PreferredSymmetric
+ if len(preferredSymmetric) == 0 {
+ preferredSymmetric = defaultCiphers
+ }
+ preferredHashes := sig.PreferredHash
+ if len(preferredHashes) == 0 {
+ preferredHashes = defaultHashes
+ }
+ candidateCiphers = intersectPreferences(candidateCiphers, preferredSymmetric)
+ candidateHashes = intersectPreferences(candidateHashes, preferredHashes)
+ }
+
+ if len(candidateCiphers) == 0 || len(candidateHashes) == 0 {
+ return nil, errors.InvalidArgumentError("cannot encrypt because recipient set shares no common algorithms")
+ }
+
+ cipher := packet.CipherFunction(candidateCiphers[0])
+ // If the cipher specifed by config is a candidate, we'll use that.
+ configuredCipher := config.Cipher()
+ for _, c := range candidateCiphers {
+ cipherFunc := packet.CipherFunction(c)
+ if cipherFunc == configuredCipher {
+ cipher = cipherFunc
+ break
+ }
+ }
+
+ var hash crypto.Hash
+ for _, hashId := range candidateHashes {
+ if h, ok := s2k.HashIdToHash(hashId); ok && h.Available() {
+ hash = h
+ break
+ }
+ }
+
+ // If the hash specified by config is a candidate, we'll use that.
+ if configuredHash := config.Hash(); configuredHash.Available() {
+ for _, hashId := range candidateHashes {
+ if h, ok := s2k.HashIdToHash(hashId); ok && h == configuredHash {
+ hash = h
+ break
+ }
+ }
+ }
+
+ if hash == 0 {
+ hashId := candidateHashes[0]
+ name, ok := s2k.HashIdToString(hashId)
+ if !ok {
+ name = "#" + strconv.Itoa(int(hashId))
+ }
+ return nil, errors.InvalidArgumentError("cannot encrypt because no candidate hash functions are compiled in. (Wanted " + name + " in this case.)")
+ }
+
+ symKey := make([]byte, cipher.KeySize())
+ if _, err := io.ReadFull(config.Random(), symKey); err != nil {
+ return nil, err
+ }
+
+ for _, key := range encryptKeys {
+ if err := packet.SerializeEncryptedKey(ciphertext, key.PublicKey, cipher, symKey, config); err != nil {
+ return nil, err
+ }
+ }
+
+ encryptedData, err := packet.SerializeSymmetricallyEncrypted(ciphertext, cipher, symKey, config)
+ if err != nil {
+ return
+ }
+
+ if signer != nil {
+ ops := &packet.OnePassSignature{
+ SigType: packet.SigTypeBinary,
+ Hash: hash,
+ PubKeyAlgo: signer.PubKeyAlgo,
+ KeyId: signer.KeyId,
+ IsLast: true,
+ }
+ if err := ops.Serialize(encryptedData); err != nil {
+ return nil, err
+ }
+ }
+
+ if hints == nil {
+ hints = &FileHints{}
+ }
+
+ w := encryptedData
+ if signer != nil {
+ // If we need to write a signature packet after the literal
+ // data then we need to stop literalData from closing
+ // encryptedData.
+ w = noOpCloser{encryptedData}
+
+ }
+ var epochSeconds uint32
+ if !hints.ModTime.IsZero() {
+ epochSeconds = uint32(hints.ModTime.Unix())
+ }
+ literalData, err := packet.SerializeLiteral(w, hints.IsBinary, hints.FileName, epochSeconds)
+ if err != nil {
+ return nil, err
+ }
+
+ if signer != nil {
+ return signatureWriter{encryptedData, literalData, hash, hash.New(), signer, config}, nil
+ }
+ return literalData, nil
+}
+
+// signatureWriter hashes the contents of a message while passing it along to
+// literalData. When closed, it closes literalData, writes a signature packet
+// to encryptedData and then also closes encryptedData.
+type signatureWriter struct {
+ encryptedData io.WriteCloser
+ literalData io.WriteCloser
+ hashType crypto.Hash
+ h hash.Hash
+ signer *packet.PrivateKey
+ config *packet.Config
+}
+
+func (s signatureWriter) Write(data []byte) (int, error) {
+ s.h.Write(data)
+ return s.literalData.Write(data)
+}
+
+func (s signatureWriter) Close() error {
+ sig := &packet.Signature{
+ SigType: packet.SigTypeBinary,
+ PubKeyAlgo: s.signer.PubKeyAlgo,
+ Hash: s.hashType,
+ CreationTime: s.config.Now(),
+ IssuerKeyId: &s.signer.KeyId,
+ }
+
+ if err := sig.Sign(s.h, s.signer, s.config); err != nil {
+ return err
+ }
+ if err := s.literalData.Close(); err != nil {
+ return err
+ }
+ if err := sig.Serialize(s.encryptedData); err != nil {
+ return err
+ }
+ return s.encryptedData.Close()
+}
+
+// noOpCloser is like an ioutil.NopCloser, but for an io.Writer.
+// TODO: we have two of these in OpenPGP packages alone. This probably needs
+// to be promoted somewhere more common.
+type noOpCloser struct {
+ w io.Writer
+}
+
+func (c noOpCloser) Write(data []byte) (n int, err error) {
+ return c.w.Write(data)
+}
+
+func (c noOpCloser) Close() error {
+ return nil
+}
diff --git a/src/mongo/gotools/vendor/src/golang.org/x/crypto/openpgp/write_test.go b/src/mongo/gotools/vendor/src/golang.org/x/crypto/openpgp/write_test.go
new file mode 100644
index 00000000000..8e9a33583f5
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/golang.org/x/crypto/openpgp/write_test.go
@@ -0,0 +1,259 @@
+// Copyright 2011 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package openpgp
+
+import (
+ "bytes"
+ "io"
+ "io/ioutil"
+ "testing"
+ "time"
+
+ "golang.org/x/crypto/openpgp/packet"
+)
+
+func TestSignDetached(t *testing.T) {
+ kring, _ := ReadKeyRing(readerFromHex(testKeys1And2PrivateHex))
+ out := bytes.NewBuffer(nil)
+ message := bytes.NewBufferString(signedInput)
+ err := DetachSign(out, kring[0], message, nil)
+ if err != nil {
+ t.Error(err)
+ }
+
+ testDetachedSignature(t, kring, out, signedInput, "check", testKey1KeyId)
+}
+
+func TestSignTextDetached(t *testing.T) {
+ kring, _ := ReadKeyRing(readerFromHex(testKeys1And2PrivateHex))
+ out := bytes.NewBuffer(nil)
+ message := bytes.NewBufferString(signedInput)
+ err := DetachSignText(out, kring[0], message, nil)
+ if err != nil {
+ t.Error(err)
+ }
+
+ testDetachedSignature(t, kring, out, signedInput, "check", testKey1KeyId)
+}
+
+func TestSignDetachedDSA(t *testing.T) {
+ kring, _ := ReadKeyRing(readerFromHex(dsaTestKeyPrivateHex))
+ out := bytes.NewBuffer(nil)
+ message := bytes.NewBufferString(signedInput)
+ err := DetachSign(out, kring[0], message, nil)
+ if err != nil {
+ t.Error(err)
+ }
+
+ testDetachedSignature(t, kring, out, signedInput, "check", testKey3KeyId)
+}
+
+func TestNewEntity(t *testing.T) {
+ if testing.Short() {
+ return
+ }
+
+ // Check bit-length with no config.
+ e, err := NewEntity("Test User", "test", "test@example.com", nil)
+ if err != nil {
+ t.Errorf("failed to create entity: %s", err)
+ return
+ }
+ bl, err := e.PrimaryKey.BitLength()
+ if err != nil {
+ t.Errorf("failed to find bit length: %s", err)
+ }
+ if int(bl) != defaultRSAKeyBits {
+ t.Errorf("BitLength %v, expected %v", defaultRSAKeyBits)
+ }
+
+ // Check bit-length with a config.
+ cfg := &packet.Config{RSABits: 1024}
+ e, err = NewEntity("Test User", "test", "test@example.com", cfg)
+ if err != nil {
+ t.Errorf("failed to create entity: %s", err)
+ return
+ }
+ bl, err = e.PrimaryKey.BitLength()
+ if err != nil {
+ t.Errorf("failed to find bit length: %s", err)
+ }
+ if int(bl) != cfg.RSABits {
+ t.Errorf("BitLength %v, expected %v", bl, cfg.RSABits)
+ }
+
+ w := bytes.NewBuffer(nil)
+ if err := e.SerializePrivate(w, nil); err != nil {
+ t.Errorf("failed to serialize entity: %s", err)
+ return
+ }
+ serialized := w.Bytes()
+
+ el, err := ReadKeyRing(w)
+ if err != nil {
+ t.Errorf("failed to reparse entity: %s", err)
+ return
+ }
+
+ if len(el) != 1 {
+ t.Errorf("wrong number of entities found, got %d, want 1", len(el))
+ }
+
+ w = bytes.NewBuffer(nil)
+ if err := e.SerializePrivate(w, nil); err != nil {
+ t.Errorf("failed to serialize entity second time: %s", err)
+ return
+ }
+
+ if !bytes.Equal(w.Bytes(), serialized) {
+ t.Errorf("results differed")
+ }
+}
+
+func TestSymmetricEncryption(t *testing.T) {
+ buf := new(bytes.Buffer)
+ plaintext, err := SymmetricallyEncrypt(buf, []byte("testing"), nil, nil)
+ if err != nil {
+ t.Errorf("error writing headers: %s", err)
+ return
+ }
+ message := []byte("hello world\n")
+ _, err = plaintext.Write(message)
+ if err != nil {
+ t.Errorf("error writing to plaintext writer: %s", err)
+ }
+ err = plaintext.Close()
+ if err != nil {
+ t.Errorf("error closing plaintext writer: %s", err)
+ }
+
+ md, err := ReadMessage(buf, nil, func(keys []Key, symmetric bool) ([]byte, error) {
+ return []byte("testing"), nil
+ }, nil)
+ if err != nil {
+ t.Errorf("error rereading message: %s", err)
+ }
+ messageBuf := bytes.NewBuffer(nil)
+ _, err = io.Copy(messageBuf, md.UnverifiedBody)
+ if err != nil {
+ t.Errorf("error rereading message: %s", err)
+ }
+ if !bytes.Equal(message, messageBuf.Bytes()) {
+ t.Errorf("recovered message incorrect got '%s', want '%s'", messageBuf.Bytes(), message)
+ }
+}
+
+var testEncryptionTests = []struct {
+ keyRingHex string
+ isSigned bool
+}{
+ {
+ testKeys1And2PrivateHex,
+ false,
+ },
+ {
+ testKeys1And2PrivateHex,
+ true,
+ },
+ {
+ dsaElGamalTestKeysHex,
+ false,
+ },
+ {
+ dsaElGamalTestKeysHex,
+ true,
+ },
+}
+
+func TestEncryption(t *testing.T) {
+ for i, test := range testEncryptionTests {
+ kring, _ := ReadKeyRing(readerFromHex(test.keyRingHex))
+
+ passphrase := []byte("passphrase")
+ for _, entity := range kring {
+ if entity.PrivateKey != nil && entity.PrivateKey.Encrypted {
+ err := entity.PrivateKey.Decrypt(passphrase)
+ if err != nil {
+ t.Errorf("#%d: failed to decrypt key", i)
+ }
+ }
+ for _, subkey := range entity.Subkeys {
+ if subkey.PrivateKey != nil && subkey.PrivateKey.Encrypted {
+ err := subkey.PrivateKey.Decrypt(passphrase)
+ if err != nil {
+ t.Errorf("#%d: failed to decrypt subkey", i)
+ }
+ }
+ }
+ }
+
+ var signed *Entity
+ if test.isSigned {
+ signed = kring[0]
+ }
+
+ buf := new(bytes.Buffer)
+ w, err := Encrypt(buf, kring[:1], signed, nil /* no hints */, nil)
+ if err != nil {
+ t.Errorf("#%d: error in Encrypt: %s", i, err)
+ continue
+ }
+
+ const message = "testing"
+ _, err = w.Write([]byte(message))
+ if err != nil {
+ t.Errorf("#%d: error writing plaintext: %s", i, err)
+ continue
+ }
+ err = w.Close()
+ if err != nil {
+ t.Errorf("#%d: error closing WriteCloser: %s", i, err)
+ continue
+ }
+
+ md, err := ReadMessage(buf, kring, nil /* no prompt */, nil)
+ if err != nil {
+ t.Errorf("#%d: error reading message: %s", i, err)
+ continue
+ }
+
+ testTime, _ := time.Parse("2006-01-02", "2013-07-01")
+ if test.isSigned {
+ signKey, _ := kring[0].signingKey(testTime)
+ expectedKeyId := signKey.PublicKey.KeyId
+ if md.SignedByKeyId != expectedKeyId {
+ t.Errorf("#%d: message signed by wrong key id, got: %d, want: %d", i, *md.SignedBy, expectedKeyId)
+ }
+ if md.SignedBy == nil {
+ t.Errorf("#%d: failed to find the signing Entity", i)
+ }
+ }
+
+ plaintext, err := ioutil.ReadAll(md.UnverifiedBody)
+ if err != nil {
+ t.Errorf("#%d: error reading encrypted contents: %s", i, err)
+ continue
+ }
+
+ encryptKey, _ := kring[0].encryptionKey(testTime)
+ expectedKeyId := encryptKey.PublicKey.KeyId
+ if len(md.EncryptedToKeyIds) != 1 || md.EncryptedToKeyIds[0] != expectedKeyId {
+ t.Errorf("#%d: expected message to be encrypted to %v, but got %#v", i, expectedKeyId, md.EncryptedToKeyIds)
+ }
+
+ if string(plaintext) != message {
+ t.Errorf("#%d: got: %s, want: %s", i, string(plaintext), message)
+ }
+
+ if test.isSigned {
+ if md.SignatureError != nil {
+ t.Errorf("#%d: signature error: %s", i, md.SignatureError)
+ }
+ if md.Signature == nil {
+ t.Error("signature missing")
+ }
+ }
+ }
+}
diff --git a/src/mongo/gotools/vendor/src/golang.org/x/crypto/otr/libotr_test_helper.c b/src/mongo/gotools/vendor/src/golang.org/x/crypto/otr/libotr_test_helper.c
new file mode 100644
index 00000000000..b3ca072d480
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/golang.org/x/crypto/otr/libotr_test_helper.c
@@ -0,0 +1,197 @@
+// Copyright 2012 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// This code can be compiled and used to test the otr package against libotr.
+// See otr_test.go.
+
+// +build ignore
+
+#include <stdio.h>
+#include <stdlib.h>
+#include <unistd.h>
+
+#include <proto.h>
+#include <message.h>
+#include <privkey.h>
+
+static int g_session_established = 0;
+
+OtrlPolicy policy(void *opdata, ConnContext *context) {
+ return OTRL_POLICY_ALWAYS;
+}
+
+int is_logged_in(void *opdata, const char *accountname, const char *protocol,
+ const char *recipient) {
+ return 1;
+}
+
+void inject_message(void *opdata, const char *accountname, const char *protocol,
+ const char *recipient, const char *message) {
+ printf("%s\n", message);
+ fflush(stdout);
+ fprintf(stderr, "libotr helper sent: %s\n", message);
+}
+
+void update_context_list(void *opdata) {}
+
+void new_fingerprint(void *opdata, OtrlUserState us, const char *accountname,
+ const char *protocol, const char *username,
+ unsigned char fingerprint[20]) {
+ fprintf(stderr, "NEW FINGERPRINT\n");
+ g_session_established = 1;
+}
+
+void write_fingerprints(void *opdata) {}
+
+void gone_secure(void *opdata, ConnContext *context) {}
+
+void gone_insecure(void *opdata, ConnContext *context) {}
+
+void still_secure(void *opdata, ConnContext *context, int is_reply) {}
+
+int max_message_size(void *opdata, ConnContext *context) { return 99999; }
+
+const char *account_name(void *opdata, const char *account,
+ const char *protocol) {
+ return "ACCOUNT";
+}
+
+void account_name_free(void *opdata, const char *account_name) {}
+
+const char *error_message(void *opdata, ConnContext *context,
+ OtrlErrorCode err_code) {
+ return "ERR";
+}
+
+void error_message_free(void *opdata, const char *msg) {}
+
+void resent_msg_prefix_free(void *opdata, const char *prefix) {}
+
+void handle_smp_event(void *opdata, OtrlSMPEvent smp_event,
+ ConnContext *context, unsigned short progress_event,
+ char *question) {}
+
+void handle_msg_event(void *opdata, OtrlMessageEvent msg_event,
+ ConnContext *context, const char *message,
+ gcry_error_t err) {
+ fprintf(stderr, "msg event: %d %s\n", msg_event, message);
+}
+
+OtrlMessageAppOps uiops = {
+ policy,
+ NULL,
+ is_logged_in,
+ inject_message,
+ update_context_list,
+ new_fingerprint,
+ write_fingerprints,
+ gone_secure,
+ gone_insecure,
+ still_secure,
+ max_message_size,
+ account_name,
+ account_name_free,
+ NULL, /* received_symkey */
+ error_message,
+ error_message_free,
+ NULL, /* resent_msg_prefix */
+ resent_msg_prefix_free,
+ handle_smp_event,
+ handle_msg_event,
+ NULL /* create_instag */,
+ NULL /* convert_msg */,
+ NULL /* convert_free */,
+ NULL /* timer_control */,
+};
+
+static const char kPrivateKeyData[] =
+ "(privkeys (account (name \"account\") (protocol proto) (private-key (dsa "
+ "(p "
+ "#00FC07ABCF0DC916AFF6E9AE47BEF60C7AB9B4D6B2469E436630E36F8A489BE812486A09F"
+ "30B71224508654940A835301ACC525A4FF133FC152CC53DCC59D65C30A54F1993FE13FE63E"
+ "5823D4C746DB21B90F9B9C00B49EC7404AB1D929BA7FBA12F2E45C6E0A651689750E8528AB"
+ "8C031D3561FECEE72EBB4A090D450A9B7A857#) (q "
+ "#00997BD266EF7B1F60A5C23F3A741F2AEFD07A2081#) (g "
+ "#535E360E8A95EBA46A4F7DE50AD6E9B2A6DB785A66B64EB9F20338D2A3E8FB0E94725848F"
+ "1AA6CC567CB83A1CC517EC806F2E92EAE71457E80B2210A189B91250779434B41FC8A8873F"
+ "6DB94BEA7D177F5D59E7E114EE10A49CFD9CEF88AE43387023B672927BA74B04EB6BBB5E57"
+ "597766A2F9CE3857D7ACE3E1E3BC1FC6F26#) (y "
+ "#0AC8670AD767D7A8D9D14CC1AC6744CD7D76F993B77FFD9E39DF01E5A6536EF65E775FCEF"
+ "2A983E2A19BD6415500F6979715D9FD1257E1FE2B6F5E1E74B333079E7C880D39868462A93"
+ "454B41877BE62E5EF0A041C2EE9C9E76BD1E12AE25D9628DECB097025DD625EF49C3258A1A"
+ "3C0FF501E3DC673B76D7BABF349009B6ECF#) (x "
+ "#14D0345A3562C480A039E3C72764F72D79043216#)))))\n";
+
+int main() {
+ OTRL_INIT;
+
+ // We have to write the private key information to a file because the libotr
+ // API demands a filename to read from.
+ const char *tmpdir = "/tmp";
+ if (getenv("TMP")) {
+ tmpdir = getenv("TMP");
+ }
+
+ char private_key_file[256];
+ snprintf(private_key_file, sizeof(private_key_file),
+ "%s/libotr_test_helper_privatekeys-XXXXXX", tmpdir);
+ int fd = mkstemp(private_key_file);
+ if (fd == -1) {
+ perror("creating temp file");
+ }
+ write(fd, kPrivateKeyData, sizeof(kPrivateKeyData) - 1);
+ close(fd);
+
+ OtrlUserState userstate = otrl_userstate_create();
+ otrl_privkey_read(userstate, private_key_file);
+ unlink(private_key_file);
+
+ fprintf(stderr, "libotr helper started\n");
+
+ char buf[4096];
+
+ for (;;) {
+ char *message = fgets(buf, sizeof(buf), stdin);
+ if (strlen(message) == 0) {
+ break;
+ }
+ message[strlen(message) - 1] = 0;
+ fprintf(stderr, "libotr helper got: %s\n", message);
+
+ char *newmessage = NULL;
+ OtrlTLV *tlvs;
+ int ignore_message = otrl_message_receiving(
+ userstate, &uiops, NULL, "account", "proto", "peer", message,
+ &newmessage, &tlvs, NULL, NULL, NULL);
+ if (tlvs) {
+ otrl_tlv_free(tlvs);
+ }
+
+ if (newmessage != NULL) {
+ fprintf(stderr, "libotr got: %s\n", newmessage);
+ otrl_message_free(newmessage);
+
+ gcry_error_t err;
+ char *newmessage = NULL;
+
+ err = otrl_message_sending(userstate, &uiops, NULL, "account", "proto",
+ "peer", 0, "test message", NULL, &newmessage,
+ OTRL_FRAGMENT_SEND_SKIP, NULL, NULL, NULL);
+ if (newmessage == NULL) {
+ fprintf(stderr, "libotr didn't encrypt message\n");
+ return 1;
+ }
+ write(1, newmessage, strlen(newmessage));
+ write(1, "\n", 1);
+ fprintf(stderr, "libotr sent: %s\n", newmessage);
+ otrl_message_free(newmessage);
+
+ g_session_established = 0;
+ write(1, "?OTRv2?\n", 8);
+ fprintf(stderr, "libotr sent: ?OTRv2\n");
+ }
+ }
+
+ return 0;
+}
diff --git a/src/mongo/gotools/vendor/src/golang.org/x/crypto/otr/otr.go b/src/mongo/gotools/vendor/src/golang.org/x/crypto/otr/otr.go
new file mode 100644
index 00000000000..549be116df2
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/golang.org/x/crypto/otr/otr.go
@@ -0,0 +1,1408 @@
+// Copyright 2012 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// Package otr implements the Off The Record protocol as specified in
+// http://www.cypherpunks.ca/otr/Protocol-v2-3.1.0.html
+package otr // import "golang.org/x/crypto/otr"
+
+import (
+ "bytes"
+ "crypto/aes"
+ "crypto/cipher"
+ "crypto/dsa"
+ "crypto/hmac"
+ "crypto/rand"
+ "crypto/sha1"
+ "crypto/sha256"
+ "crypto/subtle"
+ "encoding/base64"
+ "encoding/hex"
+ "errors"
+ "hash"
+ "io"
+ "math/big"
+ "strconv"
+)
+
+// SecurityChange describes a change in the security state of a Conversation.
+type SecurityChange int
+
+const (
+ NoChange SecurityChange = iota
+ // NewKeys indicates that a key exchange has completed. This occurs
+ // when a conversation first becomes encrypted, and when the keys are
+ // renegotiated within an encrypted conversation.
+ NewKeys
+ // SMPSecretNeeded indicates that the peer has started an
+ // authentication and that we need to supply a secret. Call SMPQuestion
+ // to get the optional, human readable challenge and then Authenticate
+ // to supply the matching secret.
+ SMPSecretNeeded
+ // SMPComplete indicates that an authentication completed. The identity
+ // of the peer has now been confirmed.
+ SMPComplete
+ // SMPFailed indicates that an authentication failed.
+ SMPFailed
+ // ConversationEnded indicates that the peer ended the secure
+ // conversation.
+ ConversationEnded
+)
+
+// QueryMessage can be sent to a peer to start an OTR conversation.
+var QueryMessage = "?OTRv2?"
+
+// ErrorPrefix can be used to make an OTR error by appending an error message
+// to it.
+var ErrorPrefix = "?OTR Error:"
+
+var (
+ fragmentPartSeparator = []byte(",")
+ fragmentPrefix = []byte("?OTR,")
+ msgPrefix = []byte("?OTR:")
+ queryMarker = []byte("?OTR")
+)
+
+// isQuery attempts to parse an OTR query from msg and returns the greatest
+// common version, or 0 if msg is not an OTR query.
+func isQuery(msg []byte) (greatestCommonVersion int) {
+ pos := bytes.Index(msg, queryMarker)
+ if pos == -1 {
+ return 0
+ }
+ for i, c := range msg[pos+len(queryMarker):] {
+ if i == 0 {
+ if c == '?' {
+ // Indicates support for version 1, but we don't
+ // implement that.
+ continue
+ }
+
+ if c != 'v' {
+ // Invalid message
+ return 0
+ }
+
+ continue
+ }
+
+ if c == '?' {
+ // End of message
+ return
+ }
+
+ if c == ' ' || c == '\t' {
+ // Probably an invalid message
+ return 0
+ }
+
+ if c == '2' {
+ greatestCommonVersion = 2
+ }
+ }
+
+ return 0
+}
+
+const (
+ statePlaintext = iota
+ stateEncrypted
+ stateFinished
+)
+
+const (
+ authStateNone = iota
+ authStateAwaitingDHKey
+ authStateAwaitingRevealSig
+ authStateAwaitingSig
+)
+
+const (
+ msgTypeDHCommit = 2
+ msgTypeData = 3
+ msgTypeDHKey = 10
+ msgTypeRevealSig = 17
+ msgTypeSig = 18
+)
+
+const (
+ // If the requested fragment size is less than this, it will be ignored.
+ minFragmentSize = 18
+ // Messages are padded to a multiple of this number of bytes.
+ paddingGranularity = 256
+ // The number of bytes in a Diffie-Hellman private value (320-bits).
+ dhPrivateBytes = 40
+ // The number of bytes needed to represent an element of the DSA
+ // subgroup (160-bits).
+ dsaSubgroupBytes = 20
+ // The number of bytes of the MAC that are sent on the wire (160-bits).
+ macPrefixBytes = 20
+)
+
+// These are the global, common group parameters for OTR.
+var (
+ p *big.Int // group prime
+ g *big.Int // group generator
+ q *big.Int // group order
+ pMinus2 *big.Int
+)
+
+func init() {
+ p, _ = new(big.Int).SetString("FFFFFFFFFFFFFFFFC90FDAA22168C234C4C6628B80DC1CD129024E088A67CC74020BBEA63B139B22514A08798E3404DDEF9519B3CD3A431B302B0A6DF25F14374FE1356D6D51C245E485B576625E7EC6F44C42E9A637ED6B0BFF5CB6F406B7EDEE386BFB5A899FA5AE9F24117C4B1FE649286651ECE45B3DC2007CB8A163BF0598DA48361C55D39A69163FA8FD24CF5F83655D23DCA3AD961C62F356208552BB9ED529077096966D670C354E4ABC9804F1746C08CA237327FFFFFFFFFFFFFFFF", 16)
+ q, _ = new(big.Int).SetString("7FFFFFFFFFFFFFFFE487ED5110B4611A62633145C06E0E68948127044533E63A0105DF531D89CD9128A5043CC71A026EF7CA8CD9E69D218D98158536F92F8A1BA7F09AB6B6A8E122F242DABB312F3F637A262174D31BF6B585FFAE5B7A035BF6F71C35FDAD44CFD2D74F9208BE258FF324943328F6722D9EE1003E5C50B1DF82CC6D241B0E2AE9CD348B1FD47E9267AFC1B2AE91EE51D6CB0E3179AB1042A95DCF6A9483B84B4B36B3861AA7255E4C0278BA36046511B993FFFFFFFFFFFFFFFF", 16)
+ g = new(big.Int).SetInt64(2)
+ pMinus2 = new(big.Int).Sub(p, g)
+}
+
+// Conversation represents a relation with a peer. The zero value is a valid
+// Conversation, although PrivateKey must be set.
+//
+// When communicating with a peer, all inbound messages should be passed to
+// Conversation.Receive and all outbound messages to Conversation.Send. The
+// Conversation will take care of maintaining the encryption state and
+// negotiating encryption as needed.
+type Conversation struct {
+ // PrivateKey contains the private key to use to sign key exchanges.
+ PrivateKey *PrivateKey
+
+ // Rand can be set to override the entropy source. Otherwise,
+ // crypto/rand will be used.
+ Rand io.Reader
+ // If FragmentSize is set, all messages produced by Receive and Send
+ // will be fragmented into messages of, at most, this number of bytes.
+ FragmentSize int
+
+ // Once Receive has returned NewKeys once, the following fields are
+ // valid.
+ SSID [8]byte
+ TheirPublicKey PublicKey
+
+ state, authState int
+
+ r [16]byte
+ x, y *big.Int
+ gx, gy *big.Int
+ gxBytes []byte
+ digest [sha256.Size]byte
+
+ revealKeys, sigKeys akeKeys
+
+ myKeyId uint32
+ myCurrentDHPub *big.Int
+ myCurrentDHPriv *big.Int
+ myLastDHPub *big.Int
+ myLastDHPriv *big.Int
+
+ theirKeyId uint32
+ theirCurrentDHPub *big.Int
+ theirLastDHPub *big.Int
+
+ keySlots [4]keySlot
+
+ myCounter [8]byte
+ theirLastCtr [8]byte
+ oldMACs []byte
+
+ k, n int // fragment state
+ frag []byte
+
+ smp smpState
+}
+
+// A keySlot contains key material for a specific (their keyid, my keyid) pair.
+type keySlot struct {
+ // used is true if this slot is valid. If false, it's free for reuse.
+ used bool
+ theirKeyId uint32
+ myKeyId uint32
+ sendAESKey, recvAESKey []byte
+ sendMACKey, recvMACKey []byte
+ theirLastCtr [8]byte
+}
+
+// akeKeys are generated during key exchange. There's one set for the reveal
+// signature message and another for the signature message. In the protocol
+// spec the latter are indicated with a prime mark.
+type akeKeys struct {
+ c [16]byte
+ m1, m2 [32]byte
+}
+
+func (c *Conversation) rand() io.Reader {
+ if c.Rand != nil {
+ return c.Rand
+ }
+ return rand.Reader
+}
+
+func (c *Conversation) randMPI(buf []byte) *big.Int {
+ _, err := io.ReadFull(c.rand(), buf)
+ if err != nil {
+ panic("otr: short read from random source")
+ }
+
+ return new(big.Int).SetBytes(buf)
+}
+
+// tlv represents the type-length value from the protocol.
+type tlv struct {
+ typ, length uint16
+ data []byte
+}
+
+const (
+ tlvTypePadding = 0
+ tlvTypeDisconnected = 1
+ tlvTypeSMP1 = 2
+ tlvTypeSMP2 = 3
+ tlvTypeSMP3 = 4
+ tlvTypeSMP4 = 5
+ tlvTypeSMPAbort = 6
+ tlvTypeSMP1WithQuestion = 7
+)
+
+// Receive handles a message from a peer. It returns a human readable message,
+// an indicator of whether that message was encrypted, a hint about the
+// encryption state and zero or more messages to send back to the peer.
+// These messages do not need to be passed to Send before transmission.
+func (c *Conversation) Receive(in []byte) (out []byte, encrypted bool, change SecurityChange, toSend [][]byte, err error) {
+ if bytes.HasPrefix(in, fragmentPrefix) {
+ in, err = c.processFragment(in)
+ if in == nil || err != nil {
+ return
+ }
+ }
+
+ if bytes.HasPrefix(in, msgPrefix) && in[len(in)-1] == '.' {
+ in = in[len(msgPrefix) : len(in)-1]
+ } else if version := isQuery(in); version > 0 {
+ c.authState = authStateAwaitingDHKey
+ c.reset()
+ toSend = c.encode(c.generateDHCommit())
+ return
+ } else {
+ // plaintext message
+ out = in
+ return
+ }
+
+ msg := make([]byte, base64.StdEncoding.DecodedLen(len(in)))
+ msgLen, err := base64.StdEncoding.Decode(msg, in)
+ if err != nil {
+ err = errors.New("otr: invalid base64 encoding in message")
+ return
+ }
+ msg = msg[:msgLen]
+
+ // The first two bytes are the protocol version (2)
+ if len(msg) < 3 || msg[0] != 0 || msg[1] != 2 {
+ err = errors.New("otr: invalid OTR message")
+ return
+ }
+
+ msgType := int(msg[2])
+ msg = msg[3:]
+
+ switch msgType {
+ case msgTypeDHCommit:
+ switch c.authState {
+ case authStateNone:
+ c.authState = authStateAwaitingRevealSig
+ if err = c.processDHCommit(msg); err != nil {
+ return
+ }
+ c.reset()
+ toSend = c.encode(c.generateDHKey())
+ return
+ case authStateAwaitingDHKey:
+ // This is a 'SYN-crossing'. The greater digest wins.
+ var cmp int
+ if cmp, err = c.compareToDHCommit(msg); err != nil {
+ return
+ }
+ if cmp > 0 {
+ // We win. Retransmit DH commit.
+ toSend = c.encode(c.serializeDHCommit())
+ return
+ } else {
+ // They win. We forget about our DH commit.
+ c.authState = authStateAwaitingRevealSig
+ if err = c.processDHCommit(msg); err != nil {
+ return
+ }
+ c.reset()
+ toSend = c.encode(c.generateDHKey())
+ return
+ }
+ case authStateAwaitingRevealSig:
+ if err = c.processDHCommit(msg); err != nil {
+ return
+ }
+ toSend = c.encode(c.serializeDHKey())
+ case authStateAwaitingSig:
+ if err = c.processDHCommit(msg); err != nil {
+ return
+ }
+ c.reset()
+ toSend = c.encode(c.generateDHKey())
+ c.authState = authStateAwaitingRevealSig
+ default:
+ panic("bad state")
+ }
+ case msgTypeDHKey:
+ switch c.authState {
+ case authStateAwaitingDHKey:
+ var isSame bool
+ if isSame, err = c.processDHKey(msg); err != nil {
+ return
+ }
+ if isSame {
+ err = errors.New("otr: unexpected duplicate DH key")
+ return
+ }
+ toSend = c.encode(c.generateRevealSig())
+ c.authState = authStateAwaitingSig
+ case authStateAwaitingSig:
+ var isSame bool
+ if isSame, err = c.processDHKey(msg); err != nil {
+ return
+ }
+ if isSame {
+ toSend = c.encode(c.serializeDHKey())
+ }
+ }
+ case msgTypeRevealSig:
+ if c.authState != authStateAwaitingRevealSig {
+ return
+ }
+ if err = c.processRevealSig(msg); err != nil {
+ return
+ }
+ toSend = c.encode(c.generateSig())
+ c.authState = authStateNone
+ c.state = stateEncrypted
+ change = NewKeys
+ case msgTypeSig:
+ if c.authState != authStateAwaitingSig {
+ return
+ }
+ if err = c.processSig(msg); err != nil {
+ return
+ }
+ c.authState = authStateNone
+ c.state = stateEncrypted
+ change = NewKeys
+ case msgTypeData:
+ if c.state != stateEncrypted {
+ err = errors.New("otr: encrypted message received without encrypted session established")
+ return
+ }
+ var tlvs []tlv
+ out, tlvs, err = c.processData(msg)
+ encrypted = true
+
+ EachTLV:
+ for _, inTLV := range tlvs {
+ switch inTLV.typ {
+ case tlvTypeDisconnected:
+ change = ConversationEnded
+ c.state = stateFinished
+ break EachTLV
+ case tlvTypeSMP1, tlvTypeSMP2, tlvTypeSMP3, tlvTypeSMP4, tlvTypeSMPAbort, tlvTypeSMP1WithQuestion:
+ var reply tlv
+ var complete bool
+ reply, complete, err = c.processSMP(inTLV)
+ if err == smpSecretMissingError {
+ err = nil
+ change = SMPSecretNeeded
+ c.smp.saved = &inTLV
+ return
+ }
+ if err == smpFailureError {
+ err = nil
+ change = SMPFailed
+ } else if complete {
+ change = SMPComplete
+ }
+ if reply.typ != 0 {
+ toSend = c.encode(c.generateData(nil, &reply))
+ }
+ break EachTLV
+ default:
+ // skip unknown TLVs
+ }
+ }
+ default:
+ err = errors.New("otr: unknown message type " + strconv.Itoa(msgType))
+ }
+
+ return
+}
+
+// Send takes a human readable message from the local user, possibly encrypts
+// it and returns zero one or more messages to send to the peer.
+func (c *Conversation) Send(msg []byte) ([][]byte, error) {
+ switch c.state {
+ case statePlaintext:
+ return [][]byte{msg}, nil
+ case stateEncrypted:
+ return c.encode(c.generateData(msg, nil)), nil
+ case stateFinished:
+ return nil, errors.New("otr: cannot send message because secure conversation has finished")
+ }
+
+ return nil, errors.New("otr: cannot send message in current state")
+}
+
+// SMPQuestion returns the human readable challenge question from the peer.
+// It's only valid after Receive has returned SMPSecretNeeded.
+func (c *Conversation) SMPQuestion() string {
+ return c.smp.question
+}
+
+// Authenticate begins an authentication with the peer. Authentication involves
+// an optional challenge message and a shared secret. The authentication
+// proceeds until either Receive returns SMPComplete, SMPSecretNeeded (which
+// indicates that a new authentication is happening and thus this one was
+// aborted) or SMPFailed.
+func (c *Conversation) Authenticate(question string, mutualSecret []byte) (toSend [][]byte, err error) {
+ if c.state != stateEncrypted {
+ err = errors.New("otr: can't authenticate a peer without a secure conversation established")
+ return
+ }
+
+ if c.smp.saved != nil {
+ c.calcSMPSecret(mutualSecret, false /* they started it */)
+
+ var out tlv
+ var complete bool
+ out, complete, err = c.processSMP(*c.smp.saved)
+ if complete {
+ panic("SMP completed on the first message")
+ }
+ c.smp.saved = nil
+ if out.typ != 0 {
+ toSend = c.encode(c.generateData(nil, &out))
+ }
+ return
+ }
+
+ c.calcSMPSecret(mutualSecret, true /* we started it */)
+ outs := c.startSMP(question)
+ for _, out := range outs {
+ toSend = append(toSend, c.encode(c.generateData(nil, &out))...)
+ }
+ return
+}
+
+// End ends a secure conversation by generating a termination message for
+// the peer and switches to unencrypted communication.
+func (c *Conversation) End() (toSend [][]byte) {
+ switch c.state {
+ case statePlaintext:
+ return nil
+ case stateEncrypted:
+ c.state = statePlaintext
+ return c.encode(c.generateData(nil, &tlv{typ: tlvTypeDisconnected}))
+ case stateFinished:
+ c.state = statePlaintext
+ return nil
+ }
+ panic("unreachable")
+}
+
+// IsEncrypted returns true if a message passed to Send would be encrypted
+// before transmission. This result remains valid until the next call to
+// Receive or End, which may change the state of the Conversation.
+func (c *Conversation) IsEncrypted() bool {
+ return c.state == stateEncrypted
+}
+
+var fragmentError = errors.New("otr: invalid OTR fragment")
+
+// processFragment processes a fragmented OTR message and possibly returns a
+// complete message. Fragmented messages look like "?OTR,k,n,msg," where k is
+// the fragment number (starting from 1), n is the number of fragments in this
+// message and msg is a substring of the base64 encoded message.
+func (c *Conversation) processFragment(in []byte) (out []byte, err error) {
+ in = in[len(fragmentPrefix):] // remove "?OTR,"
+ parts := bytes.Split(in, fragmentPartSeparator)
+ if len(parts) != 4 || len(parts[3]) != 0 {
+ return nil, fragmentError
+ }
+
+ k, err := strconv.Atoi(string(parts[0]))
+ if err != nil {
+ return nil, fragmentError
+ }
+
+ n, err := strconv.Atoi(string(parts[1]))
+ if err != nil {
+ return nil, fragmentError
+ }
+
+ if k < 1 || n < 1 || k > n {
+ return nil, fragmentError
+ }
+
+ if k == 1 {
+ c.frag = append(c.frag[:0], parts[2]...)
+ c.k, c.n = k, n
+ } else if n == c.n && k == c.k+1 {
+ c.frag = append(c.frag, parts[2]...)
+ c.k++
+ } else {
+ c.frag = c.frag[:0]
+ c.n, c.k = 0, 0
+ }
+
+ if c.n > 0 && c.k == c.n {
+ c.n, c.k = 0, 0
+ return c.frag, nil
+ }
+
+ return nil, nil
+}
+
+func (c *Conversation) generateDHCommit() []byte {
+ _, err := io.ReadFull(c.rand(), c.r[:])
+ if err != nil {
+ panic("otr: short read from random source")
+ }
+
+ var xBytes [dhPrivateBytes]byte
+ c.x = c.randMPI(xBytes[:])
+ c.gx = new(big.Int).Exp(g, c.x, p)
+ c.gy = nil
+ c.gxBytes = appendMPI(nil, c.gx)
+
+ h := sha256.New()
+ h.Write(c.gxBytes)
+ h.Sum(c.digest[:0])
+
+ aesCipher, err := aes.NewCipher(c.r[:])
+ if err != nil {
+ panic(err.Error())
+ }
+
+ var iv [aes.BlockSize]byte
+ ctr := cipher.NewCTR(aesCipher, iv[:])
+ ctr.XORKeyStream(c.gxBytes, c.gxBytes)
+
+ return c.serializeDHCommit()
+}
+
+func (c *Conversation) serializeDHCommit() []byte {
+ var ret []byte
+ ret = appendU16(ret, 2) // protocol version
+ ret = append(ret, msgTypeDHCommit)
+ ret = appendData(ret, c.gxBytes)
+ ret = appendData(ret, c.digest[:])
+ return ret
+}
+
+func (c *Conversation) processDHCommit(in []byte) error {
+ var ok1, ok2 bool
+ c.gxBytes, in, ok1 = getData(in)
+ digest, in, ok2 := getData(in)
+ if !ok1 || !ok2 || len(in) > 0 {
+ return errors.New("otr: corrupt DH commit message")
+ }
+ copy(c.digest[:], digest)
+ return nil
+}
+
+func (c *Conversation) compareToDHCommit(in []byte) (int, error) {
+ _, in, ok1 := getData(in)
+ digest, in, ok2 := getData(in)
+ if !ok1 || !ok2 || len(in) > 0 {
+ return 0, errors.New("otr: corrupt DH commit message")
+ }
+ return bytes.Compare(c.digest[:], digest), nil
+}
+
+func (c *Conversation) generateDHKey() []byte {
+ var yBytes [dhPrivateBytes]byte
+ c.y = c.randMPI(yBytes[:])
+ c.gy = new(big.Int).Exp(g, c.y, p)
+ return c.serializeDHKey()
+}
+
+func (c *Conversation) serializeDHKey() []byte {
+ var ret []byte
+ ret = appendU16(ret, 2) // protocol version
+ ret = append(ret, msgTypeDHKey)
+ ret = appendMPI(ret, c.gy)
+ return ret
+}
+
+func (c *Conversation) processDHKey(in []byte) (isSame bool, err error) {
+ gy, in, ok := getMPI(in)
+ if !ok {
+ err = errors.New("otr: corrupt DH key message")
+ return
+ }
+ if gy.Cmp(g) < 0 || gy.Cmp(pMinus2) > 0 {
+ err = errors.New("otr: DH value out of range")
+ return
+ }
+ if c.gy != nil {
+ isSame = c.gy.Cmp(gy) == 0
+ return
+ }
+ c.gy = gy
+ return
+}
+
+func (c *Conversation) generateEncryptedSignature(keys *akeKeys, xFirst bool) ([]byte, []byte) {
+ var xb []byte
+ xb = c.PrivateKey.PublicKey.Serialize(xb)
+
+ var verifyData []byte
+ if xFirst {
+ verifyData = appendMPI(verifyData, c.gx)
+ verifyData = appendMPI(verifyData, c.gy)
+ } else {
+ verifyData = appendMPI(verifyData, c.gy)
+ verifyData = appendMPI(verifyData, c.gx)
+ }
+ verifyData = append(verifyData, xb...)
+ verifyData = appendU32(verifyData, c.myKeyId)
+
+ mac := hmac.New(sha256.New, keys.m1[:])
+ mac.Write(verifyData)
+ mb := mac.Sum(nil)
+
+ xb = appendU32(xb, c.myKeyId)
+ xb = append(xb, c.PrivateKey.Sign(c.rand(), mb)...)
+
+ aesCipher, err := aes.NewCipher(keys.c[:])
+ if err != nil {
+ panic(err.Error())
+ }
+ var iv [aes.BlockSize]byte
+ ctr := cipher.NewCTR(aesCipher, iv[:])
+ ctr.XORKeyStream(xb, xb)
+
+ mac = hmac.New(sha256.New, keys.m2[:])
+ encryptedSig := appendData(nil, xb)
+ mac.Write(encryptedSig)
+
+ return encryptedSig, mac.Sum(nil)
+}
+
+func (c *Conversation) generateRevealSig() []byte {
+ s := new(big.Int).Exp(c.gy, c.x, p)
+ c.calcAKEKeys(s)
+ c.myKeyId++
+
+ encryptedSig, mac := c.generateEncryptedSignature(&c.revealKeys, true /* gx comes first */)
+
+ c.myCurrentDHPub = c.gx
+ c.myCurrentDHPriv = c.x
+ c.rotateDHKeys()
+ incCounter(&c.myCounter)
+
+ var ret []byte
+ ret = appendU16(ret, 2)
+ ret = append(ret, msgTypeRevealSig)
+ ret = appendData(ret, c.r[:])
+ ret = append(ret, encryptedSig...)
+ ret = append(ret, mac[:20]...)
+ return ret
+}
+
+func (c *Conversation) processEncryptedSig(encryptedSig, theirMAC []byte, keys *akeKeys, xFirst bool) error {
+ mac := hmac.New(sha256.New, keys.m2[:])
+ mac.Write(appendData(nil, encryptedSig))
+ myMAC := mac.Sum(nil)[:20]
+
+ if len(myMAC) != len(theirMAC) || subtle.ConstantTimeCompare(myMAC, theirMAC) == 0 {
+ return errors.New("bad signature MAC in encrypted signature")
+ }
+
+ aesCipher, err := aes.NewCipher(keys.c[:])
+ if err != nil {
+ panic(err.Error())
+ }
+ var iv [aes.BlockSize]byte
+ ctr := cipher.NewCTR(aesCipher, iv[:])
+ ctr.XORKeyStream(encryptedSig, encryptedSig)
+
+ sig := encryptedSig
+ sig, ok1 := c.TheirPublicKey.Parse(sig)
+ keyId, sig, ok2 := getU32(sig)
+ if !ok1 || !ok2 {
+ return errors.New("otr: corrupt encrypted signature")
+ }
+
+ var verifyData []byte
+ if xFirst {
+ verifyData = appendMPI(verifyData, c.gx)
+ verifyData = appendMPI(verifyData, c.gy)
+ } else {
+ verifyData = appendMPI(verifyData, c.gy)
+ verifyData = appendMPI(verifyData, c.gx)
+ }
+ verifyData = c.TheirPublicKey.Serialize(verifyData)
+ verifyData = appendU32(verifyData, keyId)
+
+ mac = hmac.New(sha256.New, keys.m1[:])
+ mac.Write(verifyData)
+ mb := mac.Sum(nil)
+
+ sig, ok1 = c.TheirPublicKey.Verify(mb, sig)
+ if !ok1 {
+ return errors.New("bad signature in encrypted signature")
+ }
+ if len(sig) > 0 {
+ return errors.New("corrupt encrypted signature")
+ }
+
+ c.theirKeyId = keyId
+ zero(c.theirLastCtr[:])
+ return nil
+}
+
+func (c *Conversation) processRevealSig(in []byte) error {
+ r, in, ok1 := getData(in)
+ encryptedSig, in, ok2 := getData(in)
+ theirMAC := in
+ if !ok1 || !ok2 || len(theirMAC) != 20 {
+ return errors.New("otr: corrupt reveal signature message")
+ }
+
+ aesCipher, err := aes.NewCipher(r)
+ if err != nil {
+ return errors.New("otr: cannot create AES cipher from reveal signature message: " + err.Error())
+ }
+ var iv [aes.BlockSize]byte
+ ctr := cipher.NewCTR(aesCipher, iv[:])
+ ctr.XORKeyStream(c.gxBytes, c.gxBytes)
+ h := sha256.New()
+ h.Write(c.gxBytes)
+ digest := h.Sum(nil)
+ if len(digest) != len(c.digest) || subtle.ConstantTimeCompare(digest, c.digest[:]) == 0 {
+ return errors.New("otr: bad commit MAC in reveal signature message")
+ }
+ var rest []byte
+ c.gx, rest, ok1 = getMPI(c.gxBytes)
+ if !ok1 || len(rest) > 0 {
+ return errors.New("otr: gx corrupt after decryption")
+ }
+ if c.gx.Cmp(g) < 0 || c.gx.Cmp(pMinus2) > 0 {
+ return errors.New("otr: DH value out of range")
+ }
+ s := new(big.Int).Exp(c.gx, c.y, p)
+ c.calcAKEKeys(s)
+
+ if err := c.processEncryptedSig(encryptedSig, theirMAC, &c.revealKeys, true /* gx comes first */); err != nil {
+ return errors.New("otr: in reveal signature message: " + err.Error())
+ }
+
+ c.theirCurrentDHPub = c.gx
+ c.theirLastDHPub = nil
+
+ return nil
+}
+
+func (c *Conversation) generateSig() []byte {
+ c.myKeyId++
+
+ encryptedSig, mac := c.generateEncryptedSignature(&c.sigKeys, false /* gy comes first */)
+
+ c.myCurrentDHPub = c.gy
+ c.myCurrentDHPriv = c.y
+ c.rotateDHKeys()
+ incCounter(&c.myCounter)
+
+ var ret []byte
+ ret = appendU16(ret, 2)
+ ret = append(ret, msgTypeSig)
+ ret = append(ret, encryptedSig...)
+ ret = append(ret, mac[:macPrefixBytes]...)
+ return ret
+}
+
+func (c *Conversation) processSig(in []byte) error {
+ encryptedSig, in, ok1 := getData(in)
+ theirMAC := in
+ if !ok1 || len(theirMAC) != macPrefixBytes {
+ return errors.New("otr: corrupt signature message")
+ }
+
+ if err := c.processEncryptedSig(encryptedSig, theirMAC, &c.sigKeys, false /* gy comes first */); err != nil {
+ return errors.New("otr: in signature message: " + err.Error())
+ }
+
+ c.theirCurrentDHPub = c.gy
+ c.theirLastDHPub = nil
+
+ return nil
+}
+
+func (c *Conversation) rotateDHKeys() {
+ // evict slots using our retired key id
+ for i := range c.keySlots {
+ slot := &c.keySlots[i]
+ if slot.used && slot.myKeyId == c.myKeyId-1 {
+ slot.used = false
+ c.oldMACs = append(c.oldMACs, slot.recvMACKey...)
+ }
+ }
+
+ c.myLastDHPriv = c.myCurrentDHPriv
+ c.myLastDHPub = c.myCurrentDHPub
+
+ var xBytes [dhPrivateBytes]byte
+ c.myCurrentDHPriv = c.randMPI(xBytes[:])
+ c.myCurrentDHPub = new(big.Int).Exp(g, c.myCurrentDHPriv, p)
+ c.myKeyId++
+}
+
+func (c *Conversation) processData(in []byte) (out []byte, tlvs []tlv, err error) {
+ origIn := in
+ flags, in, ok1 := getU8(in)
+ theirKeyId, in, ok2 := getU32(in)
+ myKeyId, in, ok3 := getU32(in)
+ y, in, ok4 := getMPI(in)
+ counter, in, ok5 := getNBytes(in, 8)
+ encrypted, in, ok6 := getData(in)
+ macedData := origIn[:len(origIn)-len(in)]
+ theirMAC, in, ok7 := getNBytes(in, macPrefixBytes)
+ _, in, ok8 := getData(in)
+ if !ok1 || !ok2 || !ok3 || !ok4 || !ok5 || !ok6 || !ok7 || !ok8 || len(in) > 0 {
+ err = errors.New("otr: corrupt data message")
+ return
+ }
+
+ ignoreErrors := flags&1 != 0
+
+ slot, err := c.calcDataKeys(myKeyId, theirKeyId)
+ if err != nil {
+ if ignoreErrors {
+ err = nil
+ }
+ return
+ }
+
+ mac := hmac.New(sha1.New, slot.recvMACKey)
+ mac.Write([]byte{0, 2, 3})
+ mac.Write(macedData)
+ myMAC := mac.Sum(nil)
+ if len(myMAC) != len(theirMAC) || subtle.ConstantTimeCompare(myMAC, theirMAC) == 0 {
+ if !ignoreErrors {
+ err = errors.New("otr: bad MAC on data message")
+ }
+ return
+ }
+
+ if bytes.Compare(counter, slot.theirLastCtr[:]) <= 0 {
+ err = errors.New("otr: counter regressed")
+ return
+ }
+ copy(slot.theirLastCtr[:], counter)
+
+ var iv [aes.BlockSize]byte
+ copy(iv[:], counter)
+ aesCipher, err := aes.NewCipher(slot.recvAESKey)
+ if err != nil {
+ panic(err.Error())
+ }
+ ctr := cipher.NewCTR(aesCipher, iv[:])
+ ctr.XORKeyStream(encrypted, encrypted)
+ decrypted := encrypted
+
+ if myKeyId == c.myKeyId {
+ c.rotateDHKeys()
+ }
+ if theirKeyId == c.theirKeyId {
+ // evict slots using their retired key id
+ for i := range c.keySlots {
+ slot := &c.keySlots[i]
+ if slot.used && slot.theirKeyId == theirKeyId-1 {
+ slot.used = false
+ c.oldMACs = append(c.oldMACs, slot.recvMACKey...)
+ }
+ }
+
+ c.theirLastDHPub = c.theirCurrentDHPub
+ c.theirKeyId++
+ c.theirCurrentDHPub = y
+ }
+
+ if nulPos := bytes.IndexByte(decrypted, 0); nulPos >= 0 {
+ out = decrypted[:nulPos]
+ tlvData := decrypted[nulPos+1:]
+ for len(tlvData) > 0 {
+ var t tlv
+ var ok1, ok2, ok3 bool
+
+ t.typ, tlvData, ok1 = getU16(tlvData)
+ t.length, tlvData, ok2 = getU16(tlvData)
+ t.data, tlvData, ok3 = getNBytes(tlvData, int(t.length))
+ if !ok1 || !ok2 || !ok3 {
+ err = errors.New("otr: corrupt tlv data")
+ }
+ tlvs = append(tlvs, t)
+ }
+ } else {
+ out = decrypted
+ }
+
+ return
+}
+
+func (c *Conversation) generateData(msg []byte, extra *tlv) []byte {
+ slot, err := c.calcDataKeys(c.myKeyId-1, c.theirKeyId)
+ if err != nil {
+ panic("otr: failed to generate sending keys: " + err.Error())
+ }
+
+ var plaintext []byte
+ plaintext = append(plaintext, msg...)
+ plaintext = append(plaintext, 0)
+
+ padding := paddingGranularity - ((len(plaintext) + 4) % paddingGranularity)
+ plaintext = appendU16(plaintext, tlvTypePadding)
+ plaintext = appendU16(plaintext, uint16(padding))
+ for i := 0; i < padding; i++ {
+ plaintext = append(plaintext, 0)
+ }
+
+ if extra != nil {
+ plaintext = appendU16(plaintext, extra.typ)
+ plaintext = appendU16(plaintext, uint16(len(extra.data)))
+ plaintext = append(plaintext, extra.data...)
+ }
+
+ encrypted := make([]byte, len(plaintext))
+
+ var iv [aes.BlockSize]byte
+ copy(iv[:], c.myCounter[:])
+ aesCipher, err := aes.NewCipher(slot.sendAESKey)
+ if err != nil {
+ panic(err.Error())
+ }
+ ctr := cipher.NewCTR(aesCipher, iv[:])
+ ctr.XORKeyStream(encrypted, plaintext)
+
+ var ret []byte
+ ret = appendU16(ret, 2)
+ ret = append(ret, msgTypeData)
+ ret = append(ret, 0 /* flags */)
+ ret = appendU32(ret, c.myKeyId-1)
+ ret = appendU32(ret, c.theirKeyId)
+ ret = appendMPI(ret, c.myCurrentDHPub)
+ ret = append(ret, c.myCounter[:]...)
+ ret = appendData(ret, encrypted)
+
+ mac := hmac.New(sha1.New, slot.sendMACKey)
+ mac.Write(ret)
+ ret = append(ret, mac.Sum(nil)[:macPrefixBytes]...)
+ ret = appendData(ret, c.oldMACs)
+ c.oldMACs = nil
+ incCounter(&c.myCounter)
+
+ return ret
+}
+
+func incCounter(counter *[8]byte) {
+ for i := 7; i >= 0; i-- {
+ counter[i]++
+ if counter[i] > 0 {
+ break
+ }
+ }
+}
+
+// calcDataKeys computes the keys used to encrypt a data message given the key
+// IDs.
+func (c *Conversation) calcDataKeys(myKeyId, theirKeyId uint32) (slot *keySlot, err error) {
+ // Check for a cache hit.
+ for i := range c.keySlots {
+ slot = &c.keySlots[i]
+ if slot.used && slot.theirKeyId == theirKeyId && slot.myKeyId == myKeyId {
+ return
+ }
+ }
+
+ // Find an empty slot to write into.
+ slot = nil
+ for i := range c.keySlots {
+ if !c.keySlots[i].used {
+ slot = &c.keySlots[i]
+ break
+ }
+ }
+ if slot == nil {
+ return nil, errors.New("otr: internal error: no more key slots")
+ }
+
+ var myPriv, myPub, theirPub *big.Int
+
+ if myKeyId == c.myKeyId {
+ myPriv = c.myCurrentDHPriv
+ myPub = c.myCurrentDHPub
+ } else if myKeyId == c.myKeyId-1 {
+ myPriv = c.myLastDHPriv
+ myPub = c.myLastDHPub
+ } else {
+ err = errors.New("otr: peer requested keyid " + strconv.FormatUint(uint64(myKeyId), 10) + " when I'm on " + strconv.FormatUint(uint64(c.myKeyId), 10))
+ return
+ }
+
+ if theirKeyId == c.theirKeyId {
+ theirPub = c.theirCurrentDHPub
+ } else if theirKeyId == c.theirKeyId-1 && c.theirLastDHPub != nil {
+ theirPub = c.theirLastDHPub
+ } else {
+ err = errors.New("otr: peer requested keyid " + strconv.FormatUint(uint64(myKeyId), 10) + " when they're on " + strconv.FormatUint(uint64(c.myKeyId), 10))
+ return
+ }
+
+ var sendPrefixByte, recvPrefixByte [1]byte
+
+ if myPub.Cmp(theirPub) > 0 {
+ // we're the high end
+ sendPrefixByte[0], recvPrefixByte[0] = 1, 2
+ } else {
+ // we're the low end
+ sendPrefixByte[0], recvPrefixByte[0] = 2, 1
+ }
+
+ s := new(big.Int).Exp(theirPub, myPriv, p)
+ sBytes := appendMPI(nil, s)
+
+ h := sha1.New()
+ h.Write(sendPrefixByte[:])
+ h.Write(sBytes)
+ slot.sendAESKey = h.Sum(slot.sendAESKey[:0])[:16]
+
+ h.Reset()
+ h.Write(slot.sendAESKey)
+ slot.sendMACKey = h.Sum(slot.sendMACKey[:0])
+
+ h.Reset()
+ h.Write(recvPrefixByte[:])
+ h.Write(sBytes)
+ slot.recvAESKey = h.Sum(slot.recvAESKey[:0])[:16]
+
+ h.Reset()
+ h.Write(slot.recvAESKey)
+ slot.recvMACKey = h.Sum(slot.recvMACKey[:0])
+
+ slot.theirKeyId = theirKeyId
+ slot.myKeyId = myKeyId
+ slot.used = true
+
+ zero(slot.theirLastCtr[:])
+ return
+}
+
+func (c *Conversation) calcAKEKeys(s *big.Int) {
+ mpi := appendMPI(nil, s)
+ h := sha256.New()
+
+ var cBytes [32]byte
+ hashWithPrefix(c.SSID[:], 0, mpi, h)
+
+ hashWithPrefix(cBytes[:], 1, mpi, h)
+ copy(c.revealKeys.c[:], cBytes[:16])
+ copy(c.sigKeys.c[:], cBytes[16:])
+
+ hashWithPrefix(c.revealKeys.m1[:], 2, mpi, h)
+ hashWithPrefix(c.revealKeys.m2[:], 3, mpi, h)
+ hashWithPrefix(c.sigKeys.m1[:], 4, mpi, h)
+ hashWithPrefix(c.sigKeys.m2[:], 5, mpi, h)
+}
+
+func hashWithPrefix(out []byte, prefix byte, in []byte, h hash.Hash) {
+ h.Reset()
+ var p [1]byte
+ p[0] = prefix
+ h.Write(p[:])
+ h.Write(in)
+ if len(out) == h.Size() {
+ h.Sum(out[:0])
+ } else {
+ digest := h.Sum(nil)
+ copy(out, digest)
+ }
+}
+
+func (c *Conversation) encode(msg []byte) [][]byte {
+ b64 := make([]byte, base64.StdEncoding.EncodedLen(len(msg))+len(msgPrefix)+1)
+ base64.StdEncoding.Encode(b64[len(msgPrefix):], msg)
+ copy(b64, msgPrefix)
+ b64[len(b64)-1] = '.'
+
+ if c.FragmentSize < minFragmentSize || len(b64) <= c.FragmentSize {
+ // We can encode this in a single fragment.
+ return [][]byte{b64}
+ }
+
+ // We have to fragment this message.
+ var ret [][]byte
+ bytesPerFragment := c.FragmentSize - minFragmentSize
+ numFragments := (len(b64) + bytesPerFragment) / bytesPerFragment
+
+ for i := 0; i < numFragments; i++ {
+ frag := []byte("?OTR," + strconv.Itoa(i+1) + "," + strconv.Itoa(numFragments) + ",")
+ todo := bytesPerFragment
+ if todo > len(b64) {
+ todo = len(b64)
+ }
+ frag = append(frag, b64[:todo]...)
+ b64 = b64[todo:]
+ frag = append(frag, ',')
+ ret = append(ret, frag)
+ }
+
+ return ret
+}
+
+func (c *Conversation) reset() {
+ c.myKeyId = 0
+
+ for i := range c.keySlots {
+ c.keySlots[i].used = false
+ }
+}
+
+type PublicKey struct {
+ dsa.PublicKey
+}
+
+func (pk *PublicKey) Parse(in []byte) ([]byte, bool) {
+ var ok bool
+ var pubKeyType uint16
+
+ if pubKeyType, in, ok = getU16(in); !ok || pubKeyType != 0 {
+ return nil, false
+ }
+ if pk.P, in, ok = getMPI(in); !ok {
+ return nil, false
+ }
+ if pk.Q, in, ok = getMPI(in); !ok {
+ return nil, false
+ }
+ if pk.G, in, ok = getMPI(in); !ok {
+ return nil, false
+ }
+ if pk.Y, in, ok = getMPI(in); !ok {
+ return nil, false
+ }
+
+ return in, true
+}
+
+func (pk *PublicKey) Serialize(in []byte) []byte {
+ in = appendU16(in, 0)
+ in = appendMPI(in, pk.P)
+ in = appendMPI(in, pk.Q)
+ in = appendMPI(in, pk.G)
+ in = appendMPI(in, pk.Y)
+ return in
+}
+
+// Fingerprint returns the 20-byte, binary fingerprint of the PublicKey.
+func (pk *PublicKey) Fingerprint() []byte {
+ b := pk.Serialize(nil)
+ h := sha1.New()
+ h.Write(b[2:])
+ return h.Sum(nil)
+}
+
+func (pk *PublicKey) Verify(hashed, sig []byte) ([]byte, bool) {
+ if len(sig) != 2*dsaSubgroupBytes {
+ return nil, false
+ }
+ r := new(big.Int).SetBytes(sig[:dsaSubgroupBytes])
+ s := new(big.Int).SetBytes(sig[dsaSubgroupBytes:])
+ ok := dsa.Verify(&pk.PublicKey, hashed, r, s)
+ return sig[dsaSubgroupBytes*2:], ok
+}
+
+type PrivateKey struct {
+ PublicKey
+ dsa.PrivateKey
+}
+
+func (priv *PrivateKey) Sign(rand io.Reader, hashed []byte) []byte {
+ r, s, err := dsa.Sign(rand, &priv.PrivateKey, hashed)
+ if err != nil {
+ panic(err.Error())
+ }
+ rBytes := r.Bytes()
+ sBytes := s.Bytes()
+ if len(rBytes) > dsaSubgroupBytes || len(sBytes) > dsaSubgroupBytes {
+ panic("DSA signature too large")
+ }
+
+ out := make([]byte, 2*dsaSubgroupBytes)
+ copy(out[dsaSubgroupBytes-len(rBytes):], rBytes)
+ copy(out[len(out)-len(sBytes):], sBytes)
+ return out
+}
+
+func (priv *PrivateKey) Serialize(in []byte) []byte {
+ in = priv.PublicKey.Serialize(in)
+ in = appendMPI(in, priv.PrivateKey.X)
+ return in
+}
+
+func (priv *PrivateKey) Parse(in []byte) ([]byte, bool) {
+ in, ok := priv.PublicKey.Parse(in)
+ if !ok {
+ return in, ok
+ }
+ priv.PrivateKey.PublicKey = priv.PublicKey.PublicKey
+ priv.PrivateKey.X, in, ok = getMPI(in)
+ return in, ok
+}
+
+func (priv *PrivateKey) Generate(rand io.Reader) {
+ if err := dsa.GenerateParameters(&priv.PrivateKey.PublicKey.Parameters, rand, dsa.L1024N160); err != nil {
+ panic(err.Error())
+ }
+ if err := dsa.GenerateKey(&priv.PrivateKey, rand); err != nil {
+ panic(err.Error())
+ }
+ priv.PublicKey.PublicKey = priv.PrivateKey.PublicKey
+}
+
+func notHex(r rune) bool {
+ if r >= '0' && r <= '9' ||
+ r >= 'a' && r <= 'f' ||
+ r >= 'A' && r <= 'F' {
+ return false
+ }
+
+ return true
+}
+
+// Import parses the contents of a libotr private key file.
+func (priv *PrivateKey) Import(in []byte) bool {
+ mpiStart := []byte(" #")
+
+ mpis := make([]*big.Int, 5)
+
+ for i := 0; i < len(mpis); i++ {
+ start := bytes.Index(in, mpiStart)
+ if start == -1 {
+ return false
+ }
+ in = in[start+len(mpiStart):]
+ end := bytes.IndexFunc(in, notHex)
+ if end == -1 {
+ return false
+ }
+ hexBytes := in[:end]
+ in = in[end:]
+
+ if len(hexBytes)&1 != 0 {
+ return false
+ }
+
+ mpiBytes := make([]byte, len(hexBytes)/2)
+ if _, err := hex.Decode(mpiBytes, hexBytes); err != nil {
+ return false
+ }
+
+ mpis[i] = new(big.Int).SetBytes(mpiBytes)
+ }
+
+ priv.PrivateKey.P = mpis[0]
+ priv.PrivateKey.Q = mpis[1]
+ priv.PrivateKey.G = mpis[2]
+ priv.PrivateKey.Y = mpis[3]
+ priv.PrivateKey.X = mpis[4]
+ priv.PublicKey.PublicKey = priv.PrivateKey.PublicKey
+
+ a := new(big.Int).Exp(priv.PrivateKey.G, priv.PrivateKey.X, priv.PrivateKey.P)
+ return a.Cmp(priv.PrivateKey.Y) == 0
+}
+
+func getU8(in []byte) (uint8, []byte, bool) {
+ if len(in) < 1 {
+ return 0, in, false
+ }
+ return in[0], in[1:], true
+}
+
+func getU16(in []byte) (uint16, []byte, bool) {
+ if len(in) < 2 {
+ return 0, in, false
+ }
+ r := uint16(in[0])<<8 | uint16(in[1])
+ return r, in[2:], true
+}
+
+func getU32(in []byte) (uint32, []byte, bool) {
+ if len(in) < 4 {
+ return 0, in, false
+ }
+ r := uint32(in[0])<<24 | uint32(in[1])<<16 | uint32(in[2])<<8 | uint32(in[3])
+ return r, in[4:], true
+}
+
+func getMPI(in []byte) (*big.Int, []byte, bool) {
+ l, in, ok := getU32(in)
+ if !ok || uint32(len(in)) < l {
+ return nil, in, false
+ }
+ r := new(big.Int).SetBytes(in[:l])
+ return r, in[l:], true
+}
+
+func getData(in []byte) ([]byte, []byte, bool) {
+ l, in, ok := getU32(in)
+ if !ok || uint32(len(in)) < l {
+ return nil, in, false
+ }
+ return in[:l], in[l:], true
+}
+
+func getNBytes(in []byte, n int) ([]byte, []byte, bool) {
+ if len(in) < n {
+ return nil, in, false
+ }
+ return in[:n], in[n:], true
+}
+
+func appendU16(out []byte, v uint16) []byte {
+ out = append(out, byte(v>>8), byte(v))
+ return out
+}
+
+func appendU32(out []byte, v uint32) []byte {
+ out = append(out, byte(v>>24), byte(v>>16), byte(v>>8), byte(v))
+ return out
+}
+
+func appendData(out, v []byte) []byte {
+ out = appendU32(out, uint32(len(v)))
+ out = append(out, v...)
+ return out
+}
+
+func appendMPI(out []byte, v *big.Int) []byte {
+ vBytes := v.Bytes()
+ out = appendU32(out, uint32(len(vBytes)))
+ out = append(out, vBytes...)
+ return out
+}
+
+func appendMPIs(out []byte, mpis ...*big.Int) []byte {
+ for _, mpi := range mpis {
+ out = appendMPI(out, mpi)
+ }
+ return out
+}
+
+func zero(b []byte) {
+ for i := range b {
+ b[i] = 0
+ }
+}
diff --git a/src/mongo/gotools/vendor/src/golang.org/x/crypto/otr/otr_test.go b/src/mongo/gotools/vendor/src/golang.org/x/crypto/otr/otr_test.go
new file mode 100644
index 00000000000..cfcd062b274
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/golang.org/x/crypto/otr/otr_test.go
@@ -0,0 +1,470 @@
+// Copyright 2012 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package otr
+
+import (
+ "bufio"
+ "bytes"
+ "crypto/rand"
+ "encoding/hex"
+ "math/big"
+ "os"
+ "os/exec"
+ "testing"
+)
+
+var isQueryTests = []struct {
+ msg string
+ expectedVersion int
+}{
+ {"foo", 0},
+ {"?OtR", 0},
+ {"?OtR?", 0},
+ {"?OTR?", 0},
+ {"?OTRv?", 0},
+ {"?OTRv1?", 0},
+ {"?OTR?v1?", 0},
+ {"?OTR?v?", 0},
+ {"?OTR?v2?", 2},
+ {"?OTRv2?", 2},
+ {"?OTRv23?", 2},
+ {"?OTRv23 ?", 0},
+}
+
+func TestIsQuery(t *testing.T) {
+ for i, test := range isQueryTests {
+ version := isQuery([]byte(test.msg))
+ if version != test.expectedVersion {
+ t.Errorf("#%d: got %d, want %d", i, version, test.expectedVersion)
+ }
+ }
+}
+
+var alicePrivateKeyHex = "000000000080c81c2cb2eb729b7e6fd48e975a932c638b3a9055478583afa46755683e30102447f6da2d8bec9f386bbb5da6403b0040fee8650b6ab2d7f32c55ab017ae9b6aec8c324ab5844784e9a80e194830d548fb7f09a0410df2c4d5c8bc2b3e9ad484e65412be689cf0834694e0839fb2954021521ffdffb8f5c32c14dbf2020b3ce7500000014da4591d58def96de61aea7b04a8405fe1609308d000000808ddd5cb0b9d66956e3dea5a915d9aba9d8a6e7053b74dadb2fc52f9fe4e5bcc487d2305485ed95fed026ad93f06ebb8c9e8baf693b7887132c7ffdd3b0f72f4002ff4ed56583ca7c54458f8c068ca3e8a4dfa309d1dd5d34e2a4b68e6f4338835e5e0fb4317c9e4c7e4806dafda3ef459cd563775a586dd91b1319f72621bf3f00000080b8147e74d8c45e6318c37731b8b33b984a795b3653c2cd1d65cc99efe097cb7eb2fa49569bab5aab6e8a1c261a27d0f7840a5e80b317e6683042b59b6dceca2879c6ffc877a465be690c15e4a42f9a7588e79b10faac11b1ce3741fcef7aba8ce05327a2c16d279ee1b3d77eb783fb10e3356caa25635331e26dd42b8396c4d00000001420bec691fea37ecea58a5c717142f0b804452f57"
+
+var aliceFingerprintHex = "0bb01c360424522e94ee9c346ce877a1a4288b2f"
+
+var bobPrivateKeyHex = "000000000080a5138eb3d3eb9c1d85716faecadb718f87d31aaed1157671d7fee7e488f95e8e0ba60ad449ec732710a7dec5190f7182af2e2f98312d98497221dff160fd68033dd4f3a33b7c078d0d9f66e26847e76ca7447d4bab35486045090572863d9e4454777f24d6706f63e02548dfec2d0a620af37bbc1d24f884708a212c343b480d00000014e9c58f0ea21a5e4dfd9f44b6a9f7f6a9961a8fa9000000803c4d111aebd62d3c50c2889d420a32cdf1e98b70affcc1fcf44d59cca2eb019f6b774ef88153fb9b9615441a5fe25ea2d11b74ce922ca0232bd81b3c0fcac2a95b20cb6e6c0c5c1ace2e26f65dc43c751af0edbb10d669890e8ab6beea91410b8b2187af1a8347627a06ecea7e0f772c28aae9461301e83884860c9b656c722f0000008065af8625a555ea0e008cd04743671a3cda21162e83af045725db2eb2bb52712708dc0cc1a84c08b3649b88a966974bde27d8612c2861792ec9f08786a246fcadd6d8d3a81a32287745f309238f47618c2bd7612cb8b02d940571e0f30b96420bcd462ff542901b46109b1e5ad6423744448d20a57818a8cbb1647d0fea3b664e0000001440f9f2eb554cb00d45a5826b54bfa419b6980e48"
+
+func TestKeySerialization(t *testing.T) {
+ var priv PrivateKey
+ alicePrivateKey, _ := hex.DecodeString(alicePrivateKeyHex)
+ rest, ok := priv.Parse(alicePrivateKey)
+ if !ok {
+ t.Error("failed to parse private key")
+ }
+ if len(rest) > 0 {
+ t.Error("data remaining after parsing private key")
+ }
+
+ out := priv.Serialize(nil)
+ if !bytes.Equal(alicePrivateKey, out) {
+ t.Errorf("serialization (%x) is not equal to original (%x)", out, alicePrivateKey)
+ }
+
+ aliceFingerprint, _ := hex.DecodeString(aliceFingerprintHex)
+ fingerprint := priv.PublicKey.Fingerprint()
+ if !bytes.Equal(aliceFingerprint, fingerprint) {
+ t.Errorf("fingerprint (%x) is not equal to expected value (%x)", fingerprint, aliceFingerprint)
+ }
+}
+
+const libOTRPrivateKey = `(privkeys
+ (account
+(name "foo@example.com")
+(protocol prpl-jabber)
+(private-key
+ (dsa
+ (p #00FC07ABCF0DC916AFF6E9AE47BEF60C7AB9B4D6B2469E436630E36F8A489BE812486A09F30B71224508654940A835301ACC525A4FF133FC152CC53DCC59D65C30A54F1993FE13FE63E5823D4C746DB21B90F9B9C00B49EC7404AB1D929BA7FBA12F2E45C6E0A651689750E8528AB8C031D3561FECEE72EBB4A090D450A9B7A857#)
+ (q #00997BD266EF7B1F60A5C23F3A741F2AEFD07A2081#)
+ (g #535E360E8A95EBA46A4F7DE50AD6E9B2A6DB785A66B64EB9F20338D2A3E8FB0E94725848F1AA6CC567CB83A1CC517EC806F2E92EAE71457E80B2210A189B91250779434B41FC8A8873F6DB94BEA7D177F5D59E7E114EE10A49CFD9CEF88AE43387023B672927BA74B04EB6BBB5E57597766A2F9CE3857D7ACE3E1E3BC1FC6F26#)
+ (y #0AC8670AD767D7A8D9D14CC1AC6744CD7D76F993B77FFD9E39DF01E5A6536EF65E775FCEF2A983E2A19BD6415500F6979715D9FD1257E1FE2B6F5E1E74B333079E7C880D39868462A93454B41877BE62E5EF0A041C2EE9C9E76BD1E12AE25D9628DECB097025DD625EF49C3258A1A3C0FF501E3DC673B76D7BABF349009B6ECF#)
+ (x #14D0345A3562C480A039E3C72764F72D79043216#)
+ )
+ )
+ )
+)`
+
+func TestParseLibOTRPrivateKey(t *testing.T) {
+ var priv PrivateKey
+
+ if !priv.Import([]byte(libOTRPrivateKey)) {
+ t.Fatalf("Failed to import sample private key")
+ }
+}
+
+func TestSignVerify(t *testing.T) {
+ var priv PrivateKey
+ alicePrivateKey, _ := hex.DecodeString(alicePrivateKeyHex)
+ _, ok := priv.Parse(alicePrivateKey)
+ if !ok {
+ t.Error("failed to parse private key")
+ }
+
+ var msg [32]byte
+ rand.Reader.Read(msg[:])
+
+ sig := priv.Sign(rand.Reader, msg[:])
+ rest, ok := priv.PublicKey.Verify(msg[:], sig)
+ if !ok {
+ t.Errorf("signature (%x) of %x failed to verify", sig, msg[:])
+ } else if len(rest) > 0 {
+ t.Error("signature data remains after verification")
+ }
+
+ sig[10] ^= 80
+ _, ok = priv.PublicKey.Verify(msg[:], sig)
+ if ok {
+ t.Errorf("corrupted signature (%x) of %x verified", sig, msg[:])
+ }
+}
+
+func setupConversation(t *testing.T) (alice, bob *Conversation) {
+ alicePrivateKey, _ := hex.DecodeString(alicePrivateKeyHex)
+ bobPrivateKey, _ := hex.DecodeString(bobPrivateKeyHex)
+
+ alice, bob = new(Conversation), new(Conversation)
+
+ alice.PrivateKey = new(PrivateKey)
+ bob.PrivateKey = new(PrivateKey)
+ alice.PrivateKey.Parse(alicePrivateKey)
+ bob.PrivateKey.Parse(bobPrivateKey)
+ alice.FragmentSize = 100
+ bob.FragmentSize = 100
+
+ if alice.IsEncrypted() {
+ t.Error("Alice believes that the conversation is secure before we've started")
+ }
+ if bob.IsEncrypted() {
+ t.Error("Bob believes that the conversation is secure before we've started")
+ }
+
+ performHandshake(t, alice, bob)
+ return alice, bob
+}
+
+func performHandshake(t *testing.T, alice, bob *Conversation) {
+ var alicesMessage, bobsMessage [][]byte
+ var out []byte
+ var aliceChange, bobChange SecurityChange
+ var err error
+ alicesMessage = append(alicesMessage, []byte(QueryMessage))
+
+ for round := 0; len(alicesMessage) > 0 || len(bobsMessage) > 0; round++ {
+ bobsMessage = nil
+ for i, msg := range alicesMessage {
+ out, _, bobChange, bobsMessage, err = bob.Receive(msg)
+ if len(out) > 0 {
+ t.Errorf("Bob generated output during key exchange, round %d, message %d", round, i)
+ }
+ if err != nil {
+ t.Fatalf("Bob returned an error, round %d, message %d (%x): %s", round, i, msg, err)
+ }
+ if len(bobsMessage) > 0 && i != len(alicesMessage)-1 {
+ t.Errorf("Bob produced output while processing a fragment, round %d, message %d", round, i)
+ }
+ }
+
+ alicesMessage = nil
+ for i, msg := range bobsMessage {
+ out, _, aliceChange, alicesMessage, err = alice.Receive(msg)
+ if len(out) > 0 {
+ t.Errorf("Alice generated output during key exchange, round %d, message %d", round, i)
+ }
+ if err != nil {
+ t.Fatalf("Alice returned an error, round %d, message %d (%x): %s", round, i, msg, err)
+ }
+ if len(alicesMessage) > 0 && i != len(bobsMessage)-1 {
+ t.Errorf("Alice produced output while processing a fragment, round %d, message %d", round, i)
+ }
+ }
+ }
+
+ if aliceChange != NewKeys {
+ t.Errorf("Alice terminated without signaling new keys")
+ }
+ if bobChange != NewKeys {
+ t.Errorf("Bob terminated without signaling new keys")
+ }
+
+ if !bytes.Equal(alice.SSID[:], bob.SSID[:]) {
+ t.Errorf("Session identifiers don't match. Alice has %x, Bob has %x", alice.SSID[:], bob.SSID[:])
+ }
+
+ if !alice.IsEncrypted() {
+ t.Error("Alice doesn't believe that the conversation is secure")
+ }
+ if !bob.IsEncrypted() {
+ t.Error("Bob doesn't believe that the conversation is secure")
+ }
+}
+
+const (
+ firstRoundTrip = iota
+ subsequentRoundTrip
+ noMACKeyCheck
+)
+
+func roundTrip(t *testing.T, alice, bob *Conversation, message []byte, macKeyCheck int) {
+ alicesMessage, err := alice.Send(message)
+ if err != nil {
+ t.Errorf("Error from Alice sending message: %s", err)
+ }
+
+ if len(alice.oldMACs) != 0 {
+ t.Errorf("Alice has not revealed all MAC keys")
+ }
+
+ for i, msg := range alicesMessage {
+ out, encrypted, _, _, err := bob.Receive(msg)
+
+ if err != nil {
+ t.Errorf("Error generated while processing test message: %s", err.Error())
+ }
+ if len(out) > 0 {
+ if i != len(alicesMessage)-1 {
+ t.Fatal("Bob produced a message while processing a fragment of Alice's")
+ }
+ if !encrypted {
+ t.Errorf("Message was not marked as encrypted")
+ }
+ if !bytes.Equal(out, message) {
+ t.Errorf("Message corrupted: got %x, want %x", out, message)
+ }
+ }
+ }
+
+ switch macKeyCheck {
+ case firstRoundTrip:
+ if len(bob.oldMACs) != 0 {
+ t.Errorf("Bob should not have MAC keys to reveal")
+ }
+ case subsequentRoundTrip:
+ if len(bob.oldMACs) != 40 {
+ t.Errorf("Bob has %d bytes of MAC keys to reveal, but should have 40", len(bob.oldMACs))
+ }
+ }
+
+ bobsMessage, err := bob.Send(message)
+ if err != nil {
+ t.Errorf("Error from Bob sending message: %s", err)
+ }
+
+ if len(bob.oldMACs) != 0 {
+ t.Errorf("Bob has not revealed all MAC keys")
+ }
+
+ for i, msg := range bobsMessage {
+ out, encrypted, _, _, err := alice.Receive(msg)
+
+ if err != nil {
+ t.Errorf("Error generated while processing test message: %s", err.Error())
+ }
+ if len(out) > 0 {
+ if i != len(bobsMessage)-1 {
+ t.Fatal("Alice produced a message while processing a fragment of Bob's")
+ }
+ if !encrypted {
+ t.Errorf("Message was not marked as encrypted")
+ }
+ if !bytes.Equal(out, message) {
+ t.Errorf("Message corrupted: got %x, want %x", out, message)
+ }
+ }
+ }
+
+ switch macKeyCheck {
+ case firstRoundTrip:
+ if len(alice.oldMACs) != 20 {
+ t.Errorf("Alice has %d bytes of MAC keys to reveal, but should have 20", len(alice.oldMACs))
+ }
+ case subsequentRoundTrip:
+ if len(alice.oldMACs) != 40 {
+ t.Errorf("Alice has %d bytes of MAC keys to reveal, but should have 40", len(alice.oldMACs))
+ }
+ }
+}
+
+func TestConversation(t *testing.T) {
+ alice, bob := setupConversation(t)
+
+ var testMessages = [][]byte{
+ []byte("hello"), []byte("bye"),
+ }
+
+ roundTripType := firstRoundTrip
+
+ for _, testMessage := range testMessages {
+ roundTrip(t, alice, bob, testMessage, roundTripType)
+ roundTripType = subsequentRoundTrip
+ }
+}
+
+func TestGoodSMP(t *testing.T) {
+ var alice, bob Conversation
+
+ alice.smp.secret = new(big.Int).SetInt64(42)
+ bob.smp.secret = alice.smp.secret
+
+ var alicesMessages, bobsMessages []tlv
+ var aliceComplete, bobComplete bool
+ var err error
+ var out tlv
+
+ alicesMessages = alice.startSMP("")
+ for round := 0; len(alicesMessages) > 0 || len(bobsMessages) > 0; round++ {
+ bobsMessages = bobsMessages[:0]
+ for i, msg := range alicesMessages {
+ out, bobComplete, err = bob.processSMP(msg)
+ if err != nil {
+ t.Errorf("Error from Bob in round %d: %s", round, err)
+ }
+ if bobComplete && i != len(alicesMessages)-1 {
+ t.Errorf("Bob returned a completed signal before processing all of Alice's messages in round %d", round)
+ }
+ if out.typ != 0 {
+ bobsMessages = append(bobsMessages, out)
+ }
+ }
+
+ alicesMessages = alicesMessages[:0]
+ for i, msg := range bobsMessages {
+ out, aliceComplete, err = alice.processSMP(msg)
+ if err != nil {
+ t.Errorf("Error from Alice in round %d: %s", round, err)
+ }
+ if aliceComplete && i != len(bobsMessages)-1 {
+ t.Errorf("Alice returned a completed signal before processing all of Bob's messages in round %d", round)
+ }
+ if out.typ != 0 {
+ alicesMessages = append(alicesMessages, out)
+ }
+ }
+ }
+
+ if !aliceComplete || !bobComplete {
+ t.Errorf("SMP completed without both sides reporting success: alice: %v, bob: %v\n", aliceComplete, bobComplete)
+ }
+}
+
+func TestBadSMP(t *testing.T) {
+ var alice, bob Conversation
+
+ alice.smp.secret = new(big.Int).SetInt64(42)
+ bob.smp.secret = new(big.Int).SetInt64(43)
+
+ var alicesMessages, bobsMessages []tlv
+
+ alicesMessages = alice.startSMP("")
+ for round := 0; len(alicesMessages) > 0 || len(bobsMessages) > 0; round++ {
+ bobsMessages = bobsMessages[:0]
+ for _, msg := range alicesMessages {
+ out, complete, _ := bob.processSMP(msg)
+ if complete {
+ t.Errorf("Bob signaled completion in round %d", round)
+ }
+ if out.typ != 0 {
+ bobsMessages = append(bobsMessages, out)
+ }
+ }
+
+ alicesMessages = alicesMessages[:0]
+ for _, msg := range bobsMessages {
+ out, complete, _ := alice.processSMP(msg)
+ if complete {
+ t.Errorf("Alice signaled completion in round %d", round)
+ }
+ if out.typ != 0 {
+ alicesMessages = append(alicesMessages, out)
+ }
+ }
+ }
+}
+
+func TestRehandshaking(t *testing.T) {
+ alice, bob := setupConversation(t)
+ roundTrip(t, alice, bob, []byte("test"), firstRoundTrip)
+ roundTrip(t, alice, bob, []byte("test 2"), subsequentRoundTrip)
+ roundTrip(t, alice, bob, []byte("test 3"), subsequentRoundTrip)
+ roundTrip(t, alice, bob, []byte("test 4"), subsequentRoundTrip)
+ roundTrip(t, alice, bob, []byte("test 5"), subsequentRoundTrip)
+ roundTrip(t, alice, bob, []byte("test 6"), subsequentRoundTrip)
+ roundTrip(t, alice, bob, []byte("test 7"), subsequentRoundTrip)
+ roundTrip(t, alice, bob, []byte("test 8"), subsequentRoundTrip)
+ performHandshake(t, alice, bob)
+ roundTrip(t, alice, bob, []byte("test"), noMACKeyCheck)
+ roundTrip(t, alice, bob, []byte("test 2"), noMACKeyCheck)
+}
+
+func TestAgainstLibOTR(t *testing.T) {
+ // This test requires otr.c.test to be built as /tmp/a.out.
+ // If enabled, this tests runs forever performing OTR handshakes in a
+ // loop.
+ return
+
+ alicePrivateKey, _ := hex.DecodeString(alicePrivateKeyHex)
+ var alice Conversation
+ alice.PrivateKey = new(PrivateKey)
+ alice.PrivateKey.Parse(alicePrivateKey)
+
+ cmd := exec.Command("/tmp/a.out")
+ cmd.Stderr = os.Stderr
+
+ out, err := cmd.StdinPipe()
+ if err != nil {
+ t.Fatal(err)
+ }
+ defer out.Close()
+ stdout, err := cmd.StdoutPipe()
+ if err != nil {
+ t.Fatal(err)
+ }
+ in := bufio.NewReader(stdout)
+
+ if err := cmd.Start(); err != nil {
+ t.Fatal(err)
+ }
+
+ out.Write([]byte(QueryMessage))
+ out.Write([]byte("\n"))
+ var expectedText = []byte("test message")
+
+ for {
+ line, isPrefix, err := in.ReadLine()
+ if isPrefix {
+ t.Fatal("line from subprocess too long")
+ }
+ if err != nil {
+ t.Fatal(err)
+ }
+ text, encrypted, change, alicesMessage, err := alice.Receive(line)
+ if err != nil {
+ t.Fatal(err)
+ }
+ for _, msg := range alicesMessage {
+ out.Write(msg)
+ out.Write([]byte("\n"))
+ }
+ if change == NewKeys {
+ alicesMessage, err := alice.Send([]byte("Go -> libotr test message"))
+ if err != nil {
+ t.Fatalf("error sending message: %s", err.Error())
+ } else {
+ for _, msg := range alicesMessage {
+ out.Write(msg)
+ out.Write([]byte("\n"))
+ }
+ }
+ }
+ if len(text) > 0 {
+ if !bytes.Equal(text, expectedText) {
+ t.Fatalf("expected %x, but got %x", expectedText, text)
+ }
+ if !encrypted {
+ t.Fatal("message wasn't encrypted")
+ }
+ }
+ }
+}
diff --git a/src/mongo/gotools/vendor/src/golang.org/x/crypto/otr/smp.go b/src/mongo/gotools/vendor/src/golang.org/x/crypto/otr/smp.go
new file mode 100644
index 00000000000..dc6de4ee0eb
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/golang.org/x/crypto/otr/smp.go
@@ -0,0 +1,572 @@
+// Copyright 2012 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// This file implements the Socialist Millionaires Protocol as described in
+// http://www.cypherpunks.ca/otr/Protocol-v2-3.1.0.html. The protocol
+// specification is required in order to understand this code and, where
+// possible, the variable names in the code match up with the spec.
+
+package otr
+
+import (
+ "bytes"
+ "crypto/sha256"
+ "errors"
+ "hash"
+ "math/big"
+)
+
+type smpFailure string
+
+func (s smpFailure) Error() string {
+ return string(s)
+}
+
+var smpFailureError = smpFailure("otr: SMP protocol failed")
+var smpSecretMissingError = smpFailure("otr: mutual secret needed")
+
+const smpVersion = 1
+
+const (
+ smpState1 = iota
+ smpState2
+ smpState3
+ smpState4
+)
+
+type smpState struct {
+ state int
+ a2, a3, b2, b3, pb, qb *big.Int
+ g2a, g3a *big.Int
+ g2, g3 *big.Int
+ g3b, papb, qaqb, ra *big.Int
+ saved *tlv
+ secret *big.Int
+ question string
+}
+
+func (c *Conversation) startSMP(question string) (tlvs []tlv) {
+ if c.smp.state != smpState1 {
+ tlvs = append(tlvs, c.generateSMPAbort())
+ }
+ tlvs = append(tlvs, c.generateSMP1(question))
+ c.smp.question = ""
+ c.smp.state = smpState2
+ return
+}
+
+func (c *Conversation) resetSMP() {
+ c.smp.state = smpState1
+ c.smp.secret = nil
+ c.smp.question = ""
+}
+
+func (c *Conversation) processSMP(in tlv) (out tlv, complete bool, err error) {
+ data := in.data
+
+ switch in.typ {
+ case tlvTypeSMPAbort:
+ if c.smp.state != smpState1 {
+ err = smpFailureError
+ }
+ c.resetSMP()
+ return
+ case tlvTypeSMP1WithQuestion:
+ // We preprocess this into a SMP1 message.
+ nulPos := bytes.IndexByte(data, 0)
+ if nulPos == -1 {
+ err = errors.New("otr: SMP message with question didn't contain a NUL byte")
+ return
+ }
+ c.smp.question = string(data[:nulPos])
+ data = data[nulPos+1:]
+ }
+
+ numMPIs, data, ok := getU32(data)
+ if !ok || numMPIs > 20 {
+ err = errors.New("otr: corrupt SMP message")
+ return
+ }
+
+ mpis := make([]*big.Int, numMPIs)
+ for i := range mpis {
+ var ok bool
+ mpis[i], data, ok = getMPI(data)
+ if !ok {
+ err = errors.New("otr: corrupt SMP message")
+ return
+ }
+ }
+
+ switch in.typ {
+ case tlvTypeSMP1, tlvTypeSMP1WithQuestion:
+ if c.smp.state != smpState1 {
+ c.resetSMP()
+ out = c.generateSMPAbort()
+ return
+ }
+ if c.smp.secret == nil {
+ err = smpSecretMissingError
+ return
+ }
+ if err = c.processSMP1(mpis); err != nil {
+ return
+ }
+ c.smp.state = smpState3
+ out = c.generateSMP2()
+ case tlvTypeSMP2:
+ if c.smp.state != smpState2 {
+ c.resetSMP()
+ out = c.generateSMPAbort()
+ return
+ }
+ if out, err = c.processSMP2(mpis); err != nil {
+ out = c.generateSMPAbort()
+ return
+ }
+ c.smp.state = smpState4
+ case tlvTypeSMP3:
+ if c.smp.state != smpState3 {
+ c.resetSMP()
+ out = c.generateSMPAbort()
+ return
+ }
+ if out, err = c.processSMP3(mpis); err != nil {
+ return
+ }
+ c.smp.state = smpState1
+ c.smp.secret = nil
+ complete = true
+ case tlvTypeSMP4:
+ if c.smp.state != smpState4 {
+ c.resetSMP()
+ out = c.generateSMPAbort()
+ return
+ }
+ if err = c.processSMP4(mpis); err != nil {
+ out = c.generateSMPAbort()
+ return
+ }
+ c.smp.state = smpState1
+ c.smp.secret = nil
+ complete = true
+ default:
+ panic("unknown SMP message")
+ }
+
+ return
+}
+
+func (c *Conversation) calcSMPSecret(mutualSecret []byte, weStarted bool) {
+ h := sha256.New()
+ h.Write([]byte{smpVersion})
+ if weStarted {
+ h.Write(c.PrivateKey.PublicKey.Fingerprint())
+ h.Write(c.TheirPublicKey.Fingerprint())
+ } else {
+ h.Write(c.TheirPublicKey.Fingerprint())
+ h.Write(c.PrivateKey.PublicKey.Fingerprint())
+ }
+ h.Write(c.SSID[:])
+ h.Write(mutualSecret)
+ c.smp.secret = new(big.Int).SetBytes(h.Sum(nil))
+}
+
+func (c *Conversation) generateSMP1(question string) tlv {
+ var randBuf [16]byte
+ c.smp.a2 = c.randMPI(randBuf[:])
+ c.smp.a3 = c.randMPI(randBuf[:])
+ g2a := new(big.Int).Exp(g, c.smp.a2, p)
+ g3a := new(big.Int).Exp(g, c.smp.a3, p)
+ h := sha256.New()
+
+ r2 := c.randMPI(randBuf[:])
+ r := new(big.Int).Exp(g, r2, p)
+ c2 := new(big.Int).SetBytes(hashMPIs(h, 1, r))
+ d2 := new(big.Int).Mul(c.smp.a2, c2)
+ d2.Sub(r2, d2)
+ d2.Mod(d2, q)
+ if d2.Sign() < 0 {
+ d2.Add(d2, q)
+ }
+
+ r3 := c.randMPI(randBuf[:])
+ r.Exp(g, r3, p)
+ c3 := new(big.Int).SetBytes(hashMPIs(h, 2, r))
+ d3 := new(big.Int).Mul(c.smp.a3, c3)
+ d3.Sub(r3, d3)
+ d3.Mod(d3, q)
+ if d3.Sign() < 0 {
+ d3.Add(d3, q)
+ }
+
+ var ret tlv
+ if len(question) > 0 {
+ ret.typ = tlvTypeSMP1WithQuestion
+ ret.data = append(ret.data, question...)
+ ret.data = append(ret.data, 0)
+ } else {
+ ret.typ = tlvTypeSMP1
+ }
+ ret.data = appendU32(ret.data, 6)
+ ret.data = appendMPIs(ret.data, g2a, c2, d2, g3a, c3, d3)
+ return ret
+}
+
+func (c *Conversation) processSMP1(mpis []*big.Int) error {
+ if len(mpis) != 6 {
+ return errors.New("otr: incorrect number of arguments in SMP1 message")
+ }
+ g2a := mpis[0]
+ c2 := mpis[1]
+ d2 := mpis[2]
+ g3a := mpis[3]
+ c3 := mpis[4]
+ d3 := mpis[5]
+ h := sha256.New()
+
+ r := new(big.Int).Exp(g, d2, p)
+ s := new(big.Int).Exp(g2a, c2, p)
+ r.Mul(r, s)
+ r.Mod(r, p)
+ t := new(big.Int).SetBytes(hashMPIs(h, 1, r))
+ if c2.Cmp(t) != 0 {
+ return errors.New("otr: ZKP c2 incorrect in SMP1 message")
+ }
+ r.Exp(g, d3, p)
+ s.Exp(g3a, c3, p)
+ r.Mul(r, s)
+ r.Mod(r, p)
+ t.SetBytes(hashMPIs(h, 2, r))
+ if c3.Cmp(t) != 0 {
+ return errors.New("otr: ZKP c3 incorrect in SMP1 message")
+ }
+
+ c.smp.g2a = g2a
+ c.smp.g3a = g3a
+ return nil
+}
+
+func (c *Conversation) generateSMP2() tlv {
+ var randBuf [16]byte
+ b2 := c.randMPI(randBuf[:])
+ c.smp.b3 = c.randMPI(randBuf[:])
+ r2 := c.randMPI(randBuf[:])
+ r3 := c.randMPI(randBuf[:])
+ r4 := c.randMPI(randBuf[:])
+ r5 := c.randMPI(randBuf[:])
+ r6 := c.randMPI(randBuf[:])
+
+ g2b := new(big.Int).Exp(g, b2, p)
+ g3b := new(big.Int).Exp(g, c.smp.b3, p)
+
+ r := new(big.Int).Exp(g, r2, p)
+ h := sha256.New()
+ c2 := new(big.Int).SetBytes(hashMPIs(h, 3, r))
+ d2 := new(big.Int).Mul(b2, c2)
+ d2.Sub(r2, d2)
+ d2.Mod(d2, q)
+ if d2.Sign() < 0 {
+ d2.Add(d2, q)
+ }
+
+ r.Exp(g, r3, p)
+ c3 := new(big.Int).SetBytes(hashMPIs(h, 4, r))
+ d3 := new(big.Int).Mul(c.smp.b3, c3)
+ d3.Sub(r3, d3)
+ d3.Mod(d3, q)
+ if d3.Sign() < 0 {
+ d3.Add(d3, q)
+ }
+
+ c.smp.g2 = new(big.Int).Exp(c.smp.g2a, b2, p)
+ c.smp.g3 = new(big.Int).Exp(c.smp.g3a, c.smp.b3, p)
+ c.smp.pb = new(big.Int).Exp(c.smp.g3, r4, p)
+ c.smp.qb = new(big.Int).Exp(g, r4, p)
+ r.Exp(c.smp.g2, c.smp.secret, p)
+ c.smp.qb.Mul(c.smp.qb, r)
+ c.smp.qb.Mod(c.smp.qb, p)
+
+ s := new(big.Int)
+ s.Exp(c.smp.g2, r6, p)
+ r.Exp(g, r5, p)
+ s.Mul(r, s)
+ s.Mod(s, p)
+ r.Exp(c.smp.g3, r5, p)
+ cp := new(big.Int).SetBytes(hashMPIs(h, 5, r, s))
+
+ // D5 = r5 - r4 cP mod q and D6 = r6 - y cP mod q
+
+ s.Mul(r4, cp)
+ r.Sub(r5, s)
+ d5 := new(big.Int).Mod(r, q)
+ if d5.Sign() < 0 {
+ d5.Add(d5, q)
+ }
+
+ s.Mul(c.smp.secret, cp)
+ r.Sub(r6, s)
+ d6 := new(big.Int).Mod(r, q)
+ if d6.Sign() < 0 {
+ d6.Add(d6, q)
+ }
+
+ var ret tlv
+ ret.typ = tlvTypeSMP2
+ ret.data = appendU32(ret.data, 11)
+ ret.data = appendMPIs(ret.data, g2b, c2, d2, g3b, c3, d3, c.smp.pb, c.smp.qb, cp, d5, d6)
+ return ret
+}
+
+func (c *Conversation) processSMP2(mpis []*big.Int) (out tlv, err error) {
+ if len(mpis) != 11 {
+ err = errors.New("otr: incorrect number of arguments in SMP2 message")
+ return
+ }
+ g2b := mpis[0]
+ c2 := mpis[1]
+ d2 := mpis[2]
+ g3b := mpis[3]
+ c3 := mpis[4]
+ d3 := mpis[5]
+ pb := mpis[6]
+ qb := mpis[7]
+ cp := mpis[8]
+ d5 := mpis[9]
+ d6 := mpis[10]
+ h := sha256.New()
+
+ r := new(big.Int).Exp(g, d2, p)
+ s := new(big.Int).Exp(g2b, c2, p)
+ r.Mul(r, s)
+ r.Mod(r, p)
+ s.SetBytes(hashMPIs(h, 3, r))
+ if c2.Cmp(s) != 0 {
+ err = errors.New("otr: ZKP c2 failed in SMP2 message")
+ return
+ }
+
+ r.Exp(g, d3, p)
+ s.Exp(g3b, c3, p)
+ r.Mul(r, s)
+ r.Mod(r, p)
+ s.SetBytes(hashMPIs(h, 4, r))
+ if c3.Cmp(s) != 0 {
+ err = errors.New("otr: ZKP c3 failed in SMP2 message")
+ return
+ }
+
+ c.smp.g2 = new(big.Int).Exp(g2b, c.smp.a2, p)
+ c.smp.g3 = new(big.Int).Exp(g3b, c.smp.a3, p)
+
+ r.Exp(g, d5, p)
+ s.Exp(c.smp.g2, d6, p)
+ r.Mul(r, s)
+ s.Exp(qb, cp, p)
+ r.Mul(r, s)
+ r.Mod(r, p)
+
+ s.Exp(c.smp.g3, d5, p)
+ t := new(big.Int).Exp(pb, cp, p)
+ s.Mul(s, t)
+ s.Mod(s, p)
+ t.SetBytes(hashMPIs(h, 5, s, r))
+ if cp.Cmp(t) != 0 {
+ err = errors.New("otr: ZKP cP failed in SMP2 message")
+ return
+ }
+
+ var randBuf [16]byte
+ r4 := c.randMPI(randBuf[:])
+ r5 := c.randMPI(randBuf[:])
+ r6 := c.randMPI(randBuf[:])
+ r7 := c.randMPI(randBuf[:])
+
+ pa := new(big.Int).Exp(c.smp.g3, r4, p)
+ r.Exp(c.smp.g2, c.smp.secret, p)
+ qa := new(big.Int).Exp(g, r4, p)
+ qa.Mul(qa, r)
+ qa.Mod(qa, p)
+
+ r.Exp(g, r5, p)
+ s.Exp(c.smp.g2, r6, p)
+ r.Mul(r, s)
+ r.Mod(r, p)
+
+ s.Exp(c.smp.g3, r5, p)
+ cp.SetBytes(hashMPIs(h, 6, s, r))
+
+ r.Mul(r4, cp)
+ d5 = new(big.Int).Sub(r5, r)
+ d5.Mod(d5, q)
+ if d5.Sign() < 0 {
+ d5.Add(d5, q)
+ }
+
+ r.Mul(c.smp.secret, cp)
+ d6 = new(big.Int).Sub(r6, r)
+ d6.Mod(d6, q)
+ if d6.Sign() < 0 {
+ d6.Add(d6, q)
+ }
+
+ r.ModInverse(qb, p)
+ qaqb := new(big.Int).Mul(qa, r)
+ qaqb.Mod(qaqb, p)
+
+ ra := new(big.Int).Exp(qaqb, c.smp.a3, p)
+ r.Exp(qaqb, r7, p)
+ s.Exp(g, r7, p)
+ cr := new(big.Int).SetBytes(hashMPIs(h, 7, s, r))
+
+ r.Mul(c.smp.a3, cr)
+ d7 := new(big.Int).Sub(r7, r)
+ d7.Mod(d7, q)
+ if d7.Sign() < 0 {
+ d7.Add(d7, q)
+ }
+
+ c.smp.g3b = g3b
+ c.smp.qaqb = qaqb
+
+ r.ModInverse(pb, p)
+ c.smp.papb = new(big.Int).Mul(pa, r)
+ c.smp.papb.Mod(c.smp.papb, p)
+ c.smp.ra = ra
+
+ out.typ = tlvTypeSMP3
+ out.data = appendU32(out.data, 8)
+ out.data = appendMPIs(out.data, pa, qa, cp, d5, d6, ra, cr, d7)
+ return
+}
+
+func (c *Conversation) processSMP3(mpis []*big.Int) (out tlv, err error) {
+ if len(mpis) != 8 {
+ err = errors.New("otr: incorrect number of arguments in SMP3 message")
+ return
+ }
+ pa := mpis[0]
+ qa := mpis[1]
+ cp := mpis[2]
+ d5 := mpis[3]
+ d6 := mpis[4]
+ ra := mpis[5]
+ cr := mpis[6]
+ d7 := mpis[7]
+ h := sha256.New()
+
+ r := new(big.Int).Exp(g, d5, p)
+ s := new(big.Int).Exp(c.smp.g2, d6, p)
+ r.Mul(r, s)
+ s.Exp(qa, cp, p)
+ r.Mul(r, s)
+ r.Mod(r, p)
+
+ s.Exp(c.smp.g3, d5, p)
+ t := new(big.Int).Exp(pa, cp, p)
+ s.Mul(s, t)
+ s.Mod(s, p)
+ t.SetBytes(hashMPIs(h, 6, s, r))
+ if t.Cmp(cp) != 0 {
+ err = errors.New("otr: ZKP cP failed in SMP3 message")
+ return
+ }
+
+ r.ModInverse(c.smp.qb, p)
+ qaqb := new(big.Int).Mul(qa, r)
+ qaqb.Mod(qaqb, p)
+
+ r.Exp(qaqb, d7, p)
+ s.Exp(ra, cr, p)
+ r.Mul(r, s)
+ r.Mod(r, p)
+
+ s.Exp(g, d7, p)
+ t.Exp(c.smp.g3a, cr, p)
+ s.Mul(s, t)
+ s.Mod(s, p)
+ t.SetBytes(hashMPIs(h, 7, s, r))
+ if t.Cmp(cr) != 0 {
+ err = errors.New("otr: ZKP cR failed in SMP3 message")
+ return
+ }
+
+ var randBuf [16]byte
+ r7 := c.randMPI(randBuf[:])
+ rb := new(big.Int).Exp(qaqb, c.smp.b3, p)
+
+ r.Exp(qaqb, r7, p)
+ s.Exp(g, r7, p)
+ cr = new(big.Int).SetBytes(hashMPIs(h, 8, s, r))
+
+ r.Mul(c.smp.b3, cr)
+ d7 = new(big.Int).Sub(r7, r)
+ d7.Mod(d7, q)
+ if d7.Sign() < 0 {
+ d7.Add(d7, q)
+ }
+
+ out.typ = tlvTypeSMP4
+ out.data = appendU32(out.data, 3)
+ out.data = appendMPIs(out.data, rb, cr, d7)
+
+ r.ModInverse(c.smp.pb, p)
+ r.Mul(pa, r)
+ r.Mod(r, p)
+ s.Exp(ra, c.smp.b3, p)
+ if r.Cmp(s) != 0 {
+ err = smpFailureError
+ }
+
+ return
+}
+
+func (c *Conversation) processSMP4(mpis []*big.Int) error {
+ if len(mpis) != 3 {
+ return errors.New("otr: incorrect number of arguments in SMP4 message")
+ }
+ rb := mpis[0]
+ cr := mpis[1]
+ d7 := mpis[2]
+ h := sha256.New()
+
+ r := new(big.Int).Exp(c.smp.qaqb, d7, p)
+ s := new(big.Int).Exp(rb, cr, p)
+ r.Mul(r, s)
+ r.Mod(r, p)
+
+ s.Exp(g, d7, p)
+ t := new(big.Int).Exp(c.smp.g3b, cr, p)
+ s.Mul(s, t)
+ s.Mod(s, p)
+ t.SetBytes(hashMPIs(h, 8, s, r))
+ if t.Cmp(cr) != 0 {
+ return errors.New("otr: ZKP cR failed in SMP4 message")
+ }
+
+ r.Exp(rb, c.smp.a3, p)
+ if r.Cmp(c.smp.papb) != 0 {
+ return smpFailureError
+ }
+
+ return nil
+}
+
+func (c *Conversation) generateSMPAbort() tlv {
+ return tlv{typ: tlvTypeSMPAbort}
+}
+
+func hashMPIs(h hash.Hash, magic byte, mpis ...*big.Int) []byte {
+ if h != nil {
+ h.Reset()
+ } else {
+ h = sha256.New()
+ }
+
+ h.Write([]byte{magic})
+ for _, mpi := range mpis {
+ h.Write(appendMPI(nil, mpi))
+ }
+ return h.Sum(nil)
+}
diff --git a/src/mongo/gotools/vendor/src/golang.org/x/crypto/pbkdf2/pbkdf2.go b/src/mongo/gotools/vendor/src/golang.org/x/crypto/pbkdf2/pbkdf2.go
new file mode 100644
index 00000000000..593f6530084
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/golang.org/x/crypto/pbkdf2/pbkdf2.go
@@ -0,0 +1,77 @@
+// Copyright 2012 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+/*
+Package pbkdf2 implements the key derivation function PBKDF2 as defined in RFC
+2898 / PKCS #5 v2.0.
+
+A key derivation function is useful when encrypting data based on a password
+or any other not-fully-random data. It uses a pseudorandom function to derive
+a secure encryption key based on the password.
+
+While v2.0 of the standard defines only one pseudorandom function to use,
+HMAC-SHA1, the drafted v2.1 specification allows use of all five FIPS Approved
+Hash Functions SHA-1, SHA-224, SHA-256, SHA-384 and SHA-512 for HMAC. To
+choose, you can pass the `New` functions from the different SHA packages to
+pbkdf2.Key.
+*/
+package pbkdf2 // import "golang.org/x/crypto/pbkdf2"
+
+import (
+ "crypto/hmac"
+ "hash"
+)
+
+// Key derives a key from the password, salt and iteration count, returning a
+// []byte of length keylen that can be used as cryptographic key. The key is
+// derived based on the method described as PBKDF2 with the HMAC variant using
+// the supplied hash function.
+//
+// For example, to use a HMAC-SHA-1 based PBKDF2 key derivation function, you
+// can get a derived key for e.g. AES-256 (which needs a 32-byte key) by
+// doing:
+//
+// dk := pbkdf2.Key([]byte("some password"), salt, 4096, 32, sha1.New)
+//
+// Remember to get a good random salt. At least 8 bytes is recommended by the
+// RFC.
+//
+// Using a higher iteration count will increase the cost of an exhaustive
+// search but will also make derivation proportionally slower.
+func Key(password, salt []byte, iter, keyLen int, h func() hash.Hash) []byte {
+ prf := hmac.New(h, password)
+ hashLen := prf.Size()
+ numBlocks := (keyLen + hashLen - 1) / hashLen
+
+ var buf [4]byte
+ dk := make([]byte, 0, numBlocks*hashLen)
+ U := make([]byte, hashLen)
+ for block := 1; block <= numBlocks; block++ {
+ // N.B.: || means concatenation, ^ means XOR
+ // for each block T_i = U_1 ^ U_2 ^ ... ^ U_iter
+ // U_1 = PRF(password, salt || uint(i))
+ prf.Reset()
+ prf.Write(salt)
+ buf[0] = byte(block >> 24)
+ buf[1] = byte(block >> 16)
+ buf[2] = byte(block >> 8)
+ buf[3] = byte(block)
+ prf.Write(buf[:4])
+ dk = prf.Sum(dk)
+ T := dk[len(dk)-hashLen:]
+ copy(U, T)
+
+ // U_n = PRF(password, U_(n-1))
+ for n := 2; n <= iter; n++ {
+ prf.Reset()
+ prf.Write(U)
+ U = U[:0]
+ U = prf.Sum(U)
+ for x := range U {
+ T[x] ^= U[x]
+ }
+ }
+ }
+ return dk[:keyLen]
+}
diff --git a/src/mongo/gotools/vendor/src/golang.org/x/crypto/pbkdf2/pbkdf2_test.go b/src/mongo/gotools/vendor/src/golang.org/x/crypto/pbkdf2/pbkdf2_test.go
new file mode 100644
index 00000000000..1379240610b
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/golang.org/x/crypto/pbkdf2/pbkdf2_test.go
@@ -0,0 +1,157 @@
+// Copyright 2012 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package pbkdf2
+
+import (
+ "bytes"
+ "crypto/sha1"
+ "crypto/sha256"
+ "hash"
+ "testing"
+)
+
+type testVector struct {
+ password string
+ salt string
+ iter int
+ output []byte
+}
+
+// Test vectors from RFC 6070, http://tools.ietf.org/html/rfc6070
+var sha1TestVectors = []testVector{
+ {
+ "password",
+ "salt",
+ 1,
+ []byte{
+ 0x0c, 0x60, 0xc8, 0x0f, 0x96, 0x1f, 0x0e, 0x71,
+ 0xf3, 0xa9, 0xb5, 0x24, 0xaf, 0x60, 0x12, 0x06,
+ 0x2f, 0xe0, 0x37, 0xa6,
+ },
+ },
+ {
+ "password",
+ "salt",
+ 2,
+ []byte{
+ 0xea, 0x6c, 0x01, 0x4d, 0xc7, 0x2d, 0x6f, 0x8c,
+ 0xcd, 0x1e, 0xd9, 0x2a, 0xce, 0x1d, 0x41, 0xf0,
+ 0xd8, 0xde, 0x89, 0x57,
+ },
+ },
+ {
+ "password",
+ "salt",
+ 4096,
+ []byte{
+ 0x4b, 0x00, 0x79, 0x01, 0xb7, 0x65, 0x48, 0x9a,
+ 0xbe, 0xad, 0x49, 0xd9, 0x26, 0xf7, 0x21, 0xd0,
+ 0x65, 0xa4, 0x29, 0xc1,
+ },
+ },
+ // // This one takes too long
+ // {
+ // "password",
+ // "salt",
+ // 16777216,
+ // []byte{
+ // 0xee, 0xfe, 0x3d, 0x61, 0xcd, 0x4d, 0xa4, 0xe4,
+ // 0xe9, 0x94, 0x5b, 0x3d, 0x6b, 0xa2, 0x15, 0x8c,
+ // 0x26, 0x34, 0xe9, 0x84,
+ // },
+ // },
+ {
+ "passwordPASSWORDpassword",
+ "saltSALTsaltSALTsaltSALTsaltSALTsalt",
+ 4096,
+ []byte{
+ 0x3d, 0x2e, 0xec, 0x4f, 0xe4, 0x1c, 0x84, 0x9b,
+ 0x80, 0xc8, 0xd8, 0x36, 0x62, 0xc0, 0xe4, 0x4a,
+ 0x8b, 0x29, 0x1a, 0x96, 0x4c, 0xf2, 0xf0, 0x70,
+ 0x38,
+ },
+ },
+ {
+ "pass\000word",
+ "sa\000lt",
+ 4096,
+ []byte{
+ 0x56, 0xfa, 0x6a, 0xa7, 0x55, 0x48, 0x09, 0x9d,
+ 0xcc, 0x37, 0xd7, 0xf0, 0x34, 0x25, 0xe0, 0xc3,
+ },
+ },
+}
+
+// Test vectors from
+// http://stackoverflow.com/questions/5130513/pbkdf2-hmac-sha2-test-vectors
+var sha256TestVectors = []testVector{
+ {
+ "password",
+ "salt",
+ 1,
+ []byte{
+ 0x12, 0x0f, 0xb6, 0xcf, 0xfc, 0xf8, 0xb3, 0x2c,
+ 0x43, 0xe7, 0x22, 0x52, 0x56, 0xc4, 0xf8, 0x37,
+ 0xa8, 0x65, 0x48, 0xc9,
+ },
+ },
+ {
+ "password",
+ "salt",
+ 2,
+ []byte{
+ 0xae, 0x4d, 0x0c, 0x95, 0xaf, 0x6b, 0x46, 0xd3,
+ 0x2d, 0x0a, 0xdf, 0xf9, 0x28, 0xf0, 0x6d, 0xd0,
+ 0x2a, 0x30, 0x3f, 0x8e,
+ },
+ },
+ {
+ "password",
+ "salt",
+ 4096,
+ []byte{
+ 0xc5, 0xe4, 0x78, 0xd5, 0x92, 0x88, 0xc8, 0x41,
+ 0xaa, 0x53, 0x0d, 0xb6, 0x84, 0x5c, 0x4c, 0x8d,
+ 0x96, 0x28, 0x93, 0xa0,
+ },
+ },
+ {
+ "passwordPASSWORDpassword",
+ "saltSALTsaltSALTsaltSALTsaltSALTsalt",
+ 4096,
+ []byte{
+ 0x34, 0x8c, 0x89, 0xdb, 0xcb, 0xd3, 0x2b, 0x2f,
+ 0x32, 0xd8, 0x14, 0xb8, 0x11, 0x6e, 0x84, 0xcf,
+ 0x2b, 0x17, 0x34, 0x7e, 0xbc, 0x18, 0x00, 0x18,
+ 0x1c,
+ },
+ },
+ {
+ "pass\000word",
+ "sa\000lt",
+ 4096,
+ []byte{
+ 0x89, 0xb6, 0x9d, 0x05, 0x16, 0xf8, 0x29, 0x89,
+ 0x3c, 0x69, 0x62, 0x26, 0x65, 0x0a, 0x86, 0x87,
+ },
+ },
+}
+
+func testHash(t *testing.T, h func() hash.Hash, hashName string, vectors []testVector) {
+ for i, v := range vectors {
+ o := Key([]byte(v.password), []byte(v.salt), v.iter, len(v.output), h)
+ if !bytes.Equal(o, v.output) {
+ t.Errorf("%s %d: expected %x, got %x", hashName, i, v.output, o)
+ }
+ }
+}
+
+func TestWithHMACSHA1(t *testing.T) {
+ testHash(t, sha1.New, "SHA1", sha1TestVectors)
+}
+
+func TestWithHMACSHA256(t *testing.T) {
+ testHash(t, sha256.New, "SHA256", sha256TestVectors)
+}
diff --git a/src/mongo/gotools/vendor/src/golang.org/x/crypto/pkcs12/bmp-string.go b/src/mongo/gotools/vendor/src/golang.org/x/crypto/pkcs12/bmp-string.go
new file mode 100644
index 00000000000..284d2a68f1e
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/golang.org/x/crypto/pkcs12/bmp-string.go
@@ -0,0 +1,50 @@
+// Copyright 2015 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package pkcs12
+
+import (
+ "errors"
+ "unicode/utf16"
+)
+
+// bmpString returns s encoded in UCS-2 with a zero terminator.
+func bmpString(s string) ([]byte, error) {
+ // References:
+ // https://tools.ietf.org/html/rfc7292#appendix-B.1
+ // http://en.wikipedia.org/wiki/Plane_(Unicode)#Basic_Multilingual_Plane
+ // - non-BMP characters are encoded in UTF 16 by using a surrogate pair of 16-bit codes
+ // EncodeRune returns 0xfffd if the rune does not need special encoding
+ // - the above RFC provides the info that BMPStrings are NULL terminated.
+
+ ret := make([]byte, 0, 2*len(s)+2)
+
+ for _, r := range s {
+ if t, _ := utf16.EncodeRune(r); t != 0xfffd {
+ return nil, errors.New("pkcs12: string contains characters that cannot be encoded in UCS-2")
+ }
+ ret = append(ret, byte(r/256), byte(r%256))
+ }
+
+ return append(ret, 0, 0), nil
+}
+
+func decodeBMPString(bmpString []byte) (string, error) {
+ if len(bmpString)%2 != 0 {
+ return "", errors.New("pkcs12: odd-length BMP string")
+ }
+
+ // strip terminator if present
+ if l := len(bmpString); l >= 2 && bmpString[l-1] == 0 && bmpString[l-2] == 0 {
+ bmpString = bmpString[:l-2]
+ }
+
+ s := make([]uint16, 0, len(bmpString)/2)
+ for len(bmpString) > 0 {
+ s = append(s, uint16(bmpString[0])<<8+uint16(bmpString[1]))
+ bmpString = bmpString[2:]
+ }
+
+ return string(utf16.Decode(s)), nil
+}
diff --git a/src/mongo/gotools/vendor/src/golang.org/x/crypto/pkcs12/bmp-string_test.go b/src/mongo/gotools/vendor/src/golang.org/x/crypto/pkcs12/bmp-string_test.go
new file mode 100644
index 00000000000..7fca55f4e8b
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/golang.org/x/crypto/pkcs12/bmp-string_test.go
@@ -0,0 +1,63 @@
+// Copyright 2015 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package pkcs12
+
+import (
+ "bytes"
+ "encoding/hex"
+ "testing"
+)
+
+var bmpStringTests = []struct {
+ in string
+ expectedHex string
+ shouldFail bool
+}{
+ {"", "0000", false},
+ // Example from https://tools.ietf.org/html/rfc7292#appendix-B.
+ {"Beavis", "0042006500610076006900730000", false},
+ // Some characters from the "Letterlike Symbols Unicode block".
+ {"\u2115 - Double-struck N", "21150020002d00200044006f00750062006c0065002d00730074007200750063006b0020004e0000", false},
+ // any character outside the BMP should trigger an error.
+ {"\U0001f000 East wind (Mahjong)", "", true},
+}
+
+func TestBMPString(t *testing.T) {
+ for i, test := range bmpStringTests {
+ expected, err := hex.DecodeString(test.expectedHex)
+ if err != nil {
+ t.Fatalf("#%d: failed to decode expectation", i)
+ }
+
+ out, err := bmpString(test.in)
+ if err == nil && test.shouldFail {
+ t.Errorf("#%d: expected to fail, but produced %x", i, out)
+ continue
+ }
+
+ if err != nil && !test.shouldFail {
+ t.Errorf("#%d: failed unexpectedly: %s", i, err)
+ continue
+ }
+
+ if !test.shouldFail {
+ if !bytes.Equal(out, expected) {
+ t.Errorf("#%d: expected %s, got %x", i, test.expectedHex, out)
+ continue
+ }
+
+ roundTrip, err := decodeBMPString(out)
+ if err != nil {
+ t.Errorf("#%d: decoding output gave an error: %s", i, err)
+ continue
+ }
+
+ if roundTrip != test.in {
+ t.Errorf("#%d: decoding output resulted in %q, but it should have been %q", i, roundTrip, test.in)
+ continue
+ }
+ }
+ }
+}
diff --git a/src/mongo/gotools/vendor/src/golang.org/x/crypto/pkcs12/crypto.go b/src/mongo/gotools/vendor/src/golang.org/x/crypto/pkcs12/crypto.go
new file mode 100644
index 00000000000..4bd4470ec04
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/golang.org/x/crypto/pkcs12/crypto.go
@@ -0,0 +1,131 @@
+// Copyright 2015 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package pkcs12
+
+import (
+ "bytes"
+ "crypto/cipher"
+ "crypto/des"
+ "crypto/x509/pkix"
+ "encoding/asn1"
+ "errors"
+
+ "golang.org/x/crypto/pkcs12/internal/rc2"
+)
+
+var (
+ oidPBEWithSHAAnd3KeyTripleDESCBC = asn1.ObjectIdentifier([]int{1, 2, 840, 113549, 1, 12, 1, 3})
+ oidPBEWithSHAAnd40BitRC2CBC = asn1.ObjectIdentifier([]int{1, 2, 840, 113549, 1, 12, 1, 6})
+)
+
+// pbeCipher is an abstraction of a PKCS#12 cipher.
+type pbeCipher interface {
+ // create returns a cipher.Block given a key.
+ create(key []byte) (cipher.Block, error)
+ // deriveKey returns a key derived from the given password and salt.
+ deriveKey(salt, password []byte, iterations int) []byte
+ // deriveKey returns an IV derived from the given password and salt.
+ deriveIV(salt, password []byte, iterations int) []byte
+}
+
+type shaWithTripleDESCBC struct{}
+
+func (shaWithTripleDESCBC) create(key []byte) (cipher.Block, error) {
+ return des.NewTripleDESCipher(key)
+}
+
+func (shaWithTripleDESCBC) deriveKey(salt, password []byte, iterations int) []byte {
+ return pbkdf(sha1Sum, 20, 64, salt, password, iterations, 1, 24)
+}
+
+func (shaWithTripleDESCBC) deriveIV(salt, password []byte, iterations int) []byte {
+ return pbkdf(sha1Sum, 20, 64, salt, password, iterations, 2, 8)
+}
+
+type shaWith40BitRC2CBC struct{}
+
+func (shaWith40BitRC2CBC) create(key []byte) (cipher.Block, error) {
+ return rc2.New(key, len(key)*8)
+}
+
+func (shaWith40BitRC2CBC) deriveKey(salt, password []byte, iterations int) []byte {
+ return pbkdf(sha1Sum, 20, 64, salt, password, iterations, 1, 5)
+}
+
+func (shaWith40BitRC2CBC) deriveIV(salt, password []byte, iterations int) []byte {
+ return pbkdf(sha1Sum, 20, 64, salt, password, iterations, 2, 8)
+}
+
+type pbeParams struct {
+ Salt []byte
+ Iterations int
+}
+
+func pbDecrypterFor(algorithm pkix.AlgorithmIdentifier, password []byte) (cipher.BlockMode, int, error) {
+ var cipherType pbeCipher
+
+ switch {
+ case algorithm.Algorithm.Equal(oidPBEWithSHAAnd3KeyTripleDESCBC):
+ cipherType = shaWithTripleDESCBC{}
+ case algorithm.Algorithm.Equal(oidPBEWithSHAAnd40BitRC2CBC):
+ cipherType = shaWith40BitRC2CBC{}
+ default:
+ return nil, 0, NotImplementedError("algorithm " + algorithm.Algorithm.String() + " is not supported")
+ }
+
+ var params pbeParams
+ if err := unmarshal(algorithm.Parameters.FullBytes, &params); err != nil {
+ return nil, 0, err
+ }
+
+ key := cipherType.deriveKey(params.Salt, password, params.Iterations)
+ iv := cipherType.deriveIV(params.Salt, password, params.Iterations)
+
+ block, err := cipherType.create(key)
+ if err != nil {
+ return nil, 0, err
+ }
+
+ return cipher.NewCBCDecrypter(block, iv), block.BlockSize(), nil
+}
+
+func pbDecrypt(info decryptable, password []byte) (decrypted []byte, err error) {
+ cbc, blockSize, err := pbDecrypterFor(info.Algorithm(), password)
+ if err != nil {
+ return nil, err
+ }
+
+ encrypted := info.Data()
+ if len(encrypted) == 0 {
+ return nil, errors.New("pkcs12: empty encrypted data")
+ }
+ if len(encrypted)%blockSize != 0 {
+ return nil, errors.New("pkcs12: input is not a multiple of the block size")
+ }
+ decrypted = make([]byte, len(encrypted))
+ cbc.CryptBlocks(decrypted, encrypted)
+
+ psLen := int(decrypted[len(decrypted)-1])
+ if psLen == 0 || psLen > blockSize {
+ return nil, ErrDecryption
+ }
+
+ if len(decrypted) < psLen {
+ return nil, ErrDecryption
+ }
+ ps := decrypted[len(decrypted)-psLen:]
+ decrypted = decrypted[:len(decrypted)-psLen]
+ if bytes.Compare(ps, bytes.Repeat([]byte{byte(psLen)}, psLen)) != 0 {
+ return nil, ErrDecryption
+ }
+
+ return
+}
+
+// decryptable abstracts a object that contains ciphertext.
+type decryptable interface {
+ Algorithm() pkix.AlgorithmIdentifier
+ Data() []byte
+}
diff --git a/src/mongo/gotools/vendor/src/golang.org/x/crypto/pkcs12/crypto_test.go b/src/mongo/gotools/vendor/src/golang.org/x/crypto/pkcs12/crypto_test.go
new file mode 100644
index 00000000000..eb4dae8fceb
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/golang.org/x/crypto/pkcs12/crypto_test.go
@@ -0,0 +1,125 @@
+// Copyright 2015 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package pkcs12
+
+import (
+ "bytes"
+ "crypto/x509/pkix"
+ "encoding/asn1"
+ "testing"
+)
+
+var sha1WithTripleDES = asn1.ObjectIdentifier([]int{1, 2, 840, 113549, 1, 12, 1, 3})
+
+func TestPbDecrypterFor(t *testing.T) {
+ params, _ := asn1.Marshal(pbeParams{
+ Salt: []byte{1, 2, 3, 4, 5, 6, 7, 8},
+ Iterations: 2048,
+ })
+ alg := pkix.AlgorithmIdentifier{
+ Algorithm: asn1.ObjectIdentifier([]int{1, 2, 3}),
+ Parameters: asn1.RawValue{
+ FullBytes: params,
+ },
+ }
+
+ pass, _ := bmpString("Sesame open")
+
+ _, _, err := pbDecrypterFor(alg, pass)
+ if _, ok := err.(NotImplementedError); !ok {
+ t.Errorf("expected not implemented error, got: %T %s", err, err)
+ }
+
+ alg.Algorithm = sha1WithTripleDES
+ cbc, blockSize, err := pbDecrypterFor(alg, pass)
+ if err != nil {
+ t.Errorf("unexpected error from pbDecrypterFor %v", err)
+ }
+ if blockSize != 8 {
+ t.Errorf("unexpected block size %d, wanted 8", blockSize)
+ }
+
+ plaintext := []byte{1, 2, 3, 4, 5, 6, 7, 8}
+ expectedCiphertext := []byte{185, 73, 135, 249, 137, 1, 122, 247}
+ ciphertext := make([]byte, len(plaintext))
+ cbc.CryptBlocks(ciphertext, plaintext)
+
+ if bytes.Compare(ciphertext, expectedCiphertext) != 0 {
+ t.Errorf("bad ciphertext, got %x but wanted %x", ciphertext, expectedCiphertext)
+ }
+}
+
+var pbDecryptTests = []struct {
+ in []byte
+ expected []byte
+ expectedError error
+}{
+ {
+ []byte("\x33\x73\xf3\x9f\xda\x49\xae\xfc\xa0\x9a\xdf\x5a\x58\xa0\xea\x46"), // 7 padding bytes
+ []byte("A secret!"),
+ nil,
+ },
+ {
+ []byte("\x33\x73\xf3\x9f\xda\x49\xae\xfc\x96\x24\x2f\x71\x7e\x32\x3f\xe7"), // 8 padding bytes
+ []byte("A secret"),
+ nil,
+ },
+ {
+ []byte("\x35\x0c\xc0\x8d\xab\xa9\x5d\x30\x7f\x9a\xec\x6a\xd8\x9b\x9c\xd9"), // 9 padding bytes, incorrect
+ nil,
+ ErrDecryption,
+ },
+ {
+ []byte("\xb2\xf9\x6e\x06\x60\xae\x20\xcf\x08\xa0\x7b\xd9\x6b\x20\xef\x41"), // incorrect padding bytes: [ ... 0x04 0x02 ]
+ nil,
+ ErrDecryption,
+ },
+}
+
+func TestPbDecrypt(t *testing.T) {
+ for i, test := range pbDecryptTests {
+ decryptable := testDecryptable{
+ data: test.in,
+ algorithm: pkix.AlgorithmIdentifier{
+ Algorithm: sha1WithTripleDES,
+ Parameters: pbeParams{
+ Salt: []byte("\xf1\xf2\xf3\xf4\xf5\xf6\xf7\xf8"),
+ Iterations: 4096,
+ }.RawASN1(),
+ },
+ }
+ password, _ := bmpString("sesame")
+
+ plaintext, err := pbDecrypt(decryptable, password)
+ if err != test.expectedError {
+ t.Errorf("#%d: got error %q, but wanted %q", i, err, test.expectedError)
+ continue
+ }
+
+ if !bytes.Equal(plaintext, test.expected) {
+ t.Errorf("#%d: got %x, but wanted %x", i, plaintext, test.expected)
+ }
+ }
+}
+
+type testDecryptable struct {
+ data []byte
+ algorithm pkix.AlgorithmIdentifier
+}
+
+func (d testDecryptable) Algorithm() pkix.AlgorithmIdentifier { return d.algorithm }
+func (d testDecryptable) Data() []byte { return d.data }
+
+func (params pbeParams) RawASN1() (raw asn1.RawValue) {
+ asn1Bytes, err := asn1.Marshal(params)
+ if err != nil {
+ panic(err)
+ }
+ _, err = asn1.Unmarshal(asn1Bytes, &raw)
+ if err != nil {
+ panic(err)
+ }
+ return
+}
diff --git a/src/mongo/gotools/vendor/src/golang.org/x/crypto/pkcs12/errors.go b/src/mongo/gotools/vendor/src/golang.org/x/crypto/pkcs12/errors.go
new file mode 100644
index 00000000000..7377ce6fb2b
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/golang.org/x/crypto/pkcs12/errors.go
@@ -0,0 +1,23 @@
+// Copyright 2015 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package pkcs12
+
+import "errors"
+
+var (
+ // ErrDecryption represents a failure to decrypt the input.
+ ErrDecryption = errors.New("pkcs12: decryption error, incorrect padding")
+
+ // ErrIncorrectPassword is returned when an incorrect password is detected.
+ // Usually, P12/PFX data is signed to be able to verify the password.
+ ErrIncorrectPassword = errors.New("pkcs12: decryption password incorrect")
+)
+
+// NotImplementedError indicates that the input is not currently supported.
+type NotImplementedError string
+
+func (e NotImplementedError) Error() string {
+ return "pkcs12: " + string(e)
+}
diff --git a/src/mongo/gotools/vendor/src/golang.org/x/crypto/pkcs12/internal/rc2/bench_test.go b/src/mongo/gotools/vendor/src/golang.org/x/crypto/pkcs12/internal/rc2/bench_test.go
new file mode 100644
index 00000000000..3347f338c18
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/golang.org/x/crypto/pkcs12/internal/rc2/bench_test.go
@@ -0,0 +1,27 @@
+// Copyright 2015 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package rc2
+
+import (
+ "testing"
+)
+
+func BenchmarkEncrypt(b *testing.B) {
+ r, _ := New([]byte{0, 0, 0, 0, 0, 0, 0, 0}, 64)
+ b.ResetTimer()
+ var src [8]byte
+ for i := 0; i < b.N; i++ {
+ r.Encrypt(src[:], src[:])
+ }
+}
+
+func BenchmarkDecrypt(b *testing.B) {
+ r, _ := New([]byte{0, 0, 0, 0, 0, 0, 0, 0}, 64)
+ b.ResetTimer()
+ var src [8]byte
+ for i := 0; i < b.N; i++ {
+ r.Decrypt(src[:], src[:])
+ }
+}
diff --git a/src/mongo/gotools/vendor/src/golang.org/x/crypto/pkcs12/internal/rc2/rc2.go b/src/mongo/gotools/vendor/src/golang.org/x/crypto/pkcs12/internal/rc2/rc2.go
new file mode 100644
index 00000000000..8c7090258c5
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/golang.org/x/crypto/pkcs12/internal/rc2/rc2.go
@@ -0,0 +1,274 @@
+// Copyright 2015 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// Package rc2 implements the RC2 cipher
+/*
+https://www.ietf.org/rfc/rfc2268.txt
+http://people.csail.mit.edu/rivest/pubs/KRRR98.pdf
+
+This code is licensed under the MIT license.
+*/
+package rc2
+
+import (
+ "crypto/cipher"
+ "encoding/binary"
+)
+
+// The rc2 block size in bytes
+const BlockSize = 8
+
+type rc2Cipher struct {
+ k [64]uint16
+}
+
+// New returns a new rc2 cipher with the given key and effective key length t1
+func New(key []byte, t1 int) (cipher.Block, error) {
+ // TODO(dgryski): error checking for key length
+ return &rc2Cipher{
+ k: expandKey(key, t1),
+ }, nil
+}
+
+func (*rc2Cipher) BlockSize() int { return BlockSize }
+
+var piTable = [256]byte{
+ 0xd9, 0x78, 0xf9, 0xc4, 0x19, 0xdd, 0xb5, 0xed, 0x28, 0xe9, 0xfd, 0x79, 0x4a, 0xa0, 0xd8, 0x9d,
+ 0xc6, 0x7e, 0x37, 0x83, 0x2b, 0x76, 0x53, 0x8e, 0x62, 0x4c, 0x64, 0x88, 0x44, 0x8b, 0xfb, 0xa2,
+ 0x17, 0x9a, 0x59, 0xf5, 0x87, 0xb3, 0x4f, 0x13, 0x61, 0x45, 0x6d, 0x8d, 0x09, 0x81, 0x7d, 0x32,
+ 0xbd, 0x8f, 0x40, 0xeb, 0x86, 0xb7, 0x7b, 0x0b, 0xf0, 0x95, 0x21, 0x22, 0x5c, 0x6b, 0x4e, 0x82,
+ 0x54, 0xd6, 0x65, 0x93, 0xce, 0x60, 0xb2, 0x1c, 0x73, 0x56, 0xc0, 0x14, 0xa7, 0x8c, 0xf1, 0xdc,
+ 0x12, 0x75, 0xca, 0x1f, 0x3b, 0xbe, 0xe4, 0xd1, 0x42, 0x3d, 0xd4, 0x30, 0xa3, 0x3c, 0xb6, 0x26,
+ 0x6f, 0xbf, 0x0e, 0xda, 0x46, 0x69, 0x07, 0x57, 0x27, 0xf2, 0x1d, 0x9b, 0xbc, 0x94, 0x43, 0x03,
+ 0xf8, 0x11, 0xc7, 0xf6, 0x90, 0xef, 0x3e, 0xe7, 0x06, 0xc3, 0xd5, 0x2f, 0xc8, 0x66, 0x1e, 0xd7,
+ 0x08, 0xe8, 0xea, 0xde, 0x80, 0x52, 0xee, 0xf7, 0x84, 0xaa, 0x72, 0xac, 0x35, 0x4d, 0x6a, 0x2a,
+ 0x96, 0x1a, 0xd2, 0x71, 0x5a, 0x15, 0x49, 0x74, 0x4b, 0x9f, 0xd0, 0x5e, 0x04, 0x18, 0xa4, 0xec,
+ 0xc2, 0xe0, 0x41, 0x6e, 0x0f, 0x51, 0xcb, 0xcc, 0x24, 0x91, 0xaf, 0x50, 0xa1, 0xf4, 0x70, 0x39,
+ 0x99, 0x7c, 0x3a, 0x85, 0x23, 0xb8, 0xb4, 0x7a, 0xfc, 0x02, 0x36, 0x5b, 0x25, 0x55, 0x97, 0x31,
+ 0x2d, 0x5d, 0xfa, 0x98, 0xe3, 0x8a, 0x92, 0xae, 0x05, 0xdf, 0x29, 0x10, 0x67, 0x6c, 0xba, 0xc9,
+ 0xd3, 0x00, 0xe6, 0xcf, 0xe1, 0x9e, 0xa8, 0x2c, 0x63, 0x16, 0x01, 0x3f, 0x58, 0xe2, 0x89, 0xa9,
+ 0x0d, 0x38, 0x34, 0x1b, 0xab, 0x33, 0xff, 0xb0, 0xbb, 0x48, 0x0c, 0x5f, 0xb9, 0xb1, 0xcd, 0x2e,
+ 0xc5, 0xf3, 0xdb, 0x47, 0xe5, 0xa5, 0x9c, 0x77, 0x0a, 0xa6, 0x20, 0x68, 0xfe, 0x7f, 0xc1, 0xad,
+}
+
+func expandKey(key []byte, t1 int) [64]uint16 {
+
+ l := make([]byte, 128)
+ copy(l, key)
+
+ var t = len(key)
+ var t8 = (t1 + 7) / 8
+ var tm = byte(255 % uint(1<<(8+uint(t1)-8*uint(t8))))
+
+ for i := len(key); i < 128; i++ {
+ l[i] = piTable[l[i-1]+l[uint8(i-t)]]
+ }
+
+ l[128-t8] = piTable[l[128-t8]&tm]
+
+ for i := 127 - t8; i >= 0; i-- {
+ l[i] = piTable[l[i+1]^l[i+t8]]
+ }
+
+ var k [64]uint16
+
+ for i := range k {
+ k[i] = uint16(l[2*i]) + uint16(l[2*i+1])*256
+ }
+
+ return k
+}
+
+func rotl16(x uint16, b uint) uint16 {
+ return (x >> (16 - b)) | (x << b)
+}
+
+func (c *rc2Cipher) Encrypt(dst, src []byte) {
+
+ r0 := binary.LittleEndian.Uint16(src[0:])
+ r1 := binary.LittleEndian.Uint16(src[2:])
+ r2 := binary.LittleEndian.Uint16(src[4:])
+ r3 := binary.LittleEndian.Uint16(src[6:])
+
+ var j int
+
+ for j <= 16 {
+ // mix r0
+ r0 = r0 + c.k[j] + (r3 & r2) + ((^r3) & r1)
+ r0 = rotl16(r0, 1)
+ j++
+
+ // mix r1
+ r1 = r1 + c.k[j] + (r0 & r3) + ((^r0) & r2)
+ r1 = rotl16(r1, 2)
+ j++
+
+ // mix r2
+ r2 = r2 + c.k[j] + (r1 & r0) + ((^r1) & r3)
+ r2 = rotl16(r2, 3)
+ j++
+
+ // mix r3
+ r3 = r3 + c.k[j] + (r2 & r1) + ((^r2) & r0)
+ r3 = rotl16(r3, 5)
+ j++
+
+ }
+
+ r0 = r0 + c.k[r3&63]
+ r1 = r1 + c.k[r0&63]
+ r2 = r2 + c.k[r1&63]
+ r3 = r3 + c.k[r2&63]
+
+ for j <= 40 {
+
+ // mix r0
+ r0 = r0 + c.k[j] + (r3 & r2) + ((^r3) & r1)
+ r0 = rotl16(r0, 1)
+ j++
+
+ // mix r1
+ r1 = r1 + c.k[j] + (r0 & r3) + ((^r0) & r2)
+ r1 = rotl16(r1, 2)
+ j++
+
+ // mix r2
+ r2 = r2 + c.k[j] + (r1 & r0) + ((^r1) & r3)
+ r2 = rotl16(r2, 3)
+ j++
+
+ // mix r3
+ r3 = r3 + c.k[j] + (r2 & r1) + ((^r2) & r0)
+ r3 = rotl16(r3, 5)
+ j++
+
+ }
+
+ r0 = r0 + c.k[r3&63]
+ r1 = r1 + c.k[r0&63]
+ r2 = r2 + c.k[r1&63]
+ r3 = r3 + c.k[r2&63]
+
+ for j <= 60 {
+
+ // mix r0
+ r0 = r0 + c.k[j] + (r3 & r2) + ((^r3) & r1)
+ r0 = rotl16(r0, 1)
+ j++
+
+ // mix r1
+ r1 = r1 + c.k[j] + (r0 & r3) + ((^r0) & r2)
+ r1 = rotl16(r1, 2)
+ j++
+
+ // mix r2
+ r2 = r2 + c.k[j] + (r1 & r0) + ((^r1) & r3)
+ r2 = rotl16(r2, 3)
+ j++
+
+ // mix r3
+ r3 = r3 + c.k[j] + (r2 & r1) + ((^r2) & r0)
+ r3 = rotl16(r3, 5)
+ j++
+ }
+
+ binary.LittleEndian.PutUint16(dst[0:], r0)
+ binary.LittleEndian.PutUint16(dst[2:], r1)
+ binary.LittleEndian.PutUint16(dst[4:], r2)
+ binary.LittleEndian.PutUint16(dst[6:], r3)
+}
+
+func (c *rc2Cipher) Decrypt(dst, src []byte) {
+
+ r0 := binary.LittleEndian.Uint16(src[0:])
+ r1 := binary.LittleEndian.Uint16(src[2:])
+ r2 := binary.LittleEndian.Uint16(src[4:])
+ r3 := binary.LittleEndian.Uint16(src[6:])
+
+ j := 63
+
+ for j >= 44 {
+ // unmix r3
+ r3 = rotl16(r3, 16-5)
+ r3 = r3 - c.k[j] - (r2 & r1) - ((^r2) & r0)
+ j--
+
+ // unmix r2
+ r2 = rotl16(r2, 16-3)
+ r2 = r2 - c.k[j] - (r1 & r0) - ((^r1) & r3)
+ j--
+
+ // unmix r1
+ r1 = rotl16(r1, 16-2)
+ r1 = r1 - c.k[j] - (r0 & r3) - ((^r0) & r2)
+ j--
+
+ // unmix r0
+ r0 = rotl16(r0, 16-1)
+ r0 = r0 - c.k[j] - (r3 & r2) - ((^r3) & r1)
+ j--
+ }
+
+ r3 = r3 - c.k[r2&63]
+ r2 = r2 - c.k[r1&63]
+ r1 = r1 - c.k[r0&63]
+ r0 = r0 - c.k[r3&63]
+
+ for j >= 20 {
+ // unmix r3
+ r3 = rotl16(r3, 16-5)
+ r3 = r3 - c.k[j] - (r2 & r1) - ((^r2) & r0)
+ j--
+
+ // unmix r2
+ r2 = rotl16(r2, 16-3)
+ r2 = r2 - c.k[j] - (r1 & r0) - ((^r1) & r3)
+ j--
+
+ // unmix r1
+ r1 = rotl16(r1, 16-2)
+ r1 = r1 - c.k[j] - (r0 & r3) - ((^r0) & r2)
+ j--
+
+ // unmix r0
+ r0 = rotl16(r0, 16-1)
+ r0 = r0 - c.k[j] - (r3 & r2) - ((^r3) & r1)
+ j--
+
+ }
+
+ r3 = r3 - c.k[r2&63]
+ r2 = r2 - c.k[r1&63]
+ r1 = r1 - c.k[r0&63]
+ r0 = r0 - c.k[r3&63]
+
+ for j >= 0 {
+
+ // unmix r3
+ r3 = rotl16(r3, 16-5)
+ r3 = r3 - c.k[j] - (r2 & r1) - ((^r2) & r0)
+ j--
+
+ // unmix r2
+ r2 = rotl16(r2, 16-3)
+ r2 = r2 - c.k[j] - (r1 & r0) - ((^r1) & r3)
+ j--
+
+ // unmix r1
+ r1 = rotl16(r1, 16-2)
+ r1 = r1 - c.k[j] - (r0 & r3) - ((^r0) & r2)
+ j--
+
+ // unmix r0
+ r0 = rotl16(r0, 16-1)
+ r0 = r0 - c.k[j] - (r3 & r2) - ((^r3) & r1)
+ j--
+
+ }
+
+ binary.LittleEndian.PutUint16(dst[0:], r0)
+ binary.LittleEndian.PutUint16(dst[2:], r1)
+ binary.LittleEndian.PutUint16(dst[4:], r2)
+ binary.LittleEndian.PutUint16(dst[6:], r3)
+}
diff --git a/src/mongo/gotools/vendor/src/golang.org/x/crypto/pkcs12/internal/rc2/rc2_test.go b/src/mongo/gotools/vendor/src/golang.org/x/crypto/pkcs12/internal/rc2/rc2_test.go
new file mode 100644
index 00000000000..8a49dfaf3c6
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/golang.org/x/crypto/pkcs12/internal/rc2/rc2_test.go
@@ -0,0 +1,93 @@
+// Copyright 2015 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package rc2
+
+import (
+ "bytes"
+ "encoding/hex"
+ "testing"
+)
+
+func TestEncryptDecrypt(t *testing.T) {
+
+ // TODO(dgryski): add the rest of the test vectors from the RFC
+ var tests = []struct {
+ key string
+ plain string
+ cipher string
+ t1 int
+ }{
+ {
+ "0000000000000000",
+ "0000000000000000",
+ "ebb773f993278eff",
+ 63,
+ },
+ {
+ "ffffffffffffffff",
+ "ffffffffffffffff",
+ "278b27e42e2f0d49",
+ 64,
+ },
+ {
+ "3000000000000000",
+ "1000000000000001",
+ "30649edf9be7d2c2",
+ 64,
+ },
+ {
+ "88",
+ "0000000000000000",
+ "61a8a244adacccf0",
+ 64,
+ },
+ {
+ "88bca90e90875a",
+ "0000000000000000",
+ "6ccf4308974c267f",
+ 64,
+ },
+ {
+ "88bca90e90875a7f0f79c384627bafb2",
+ "0000000000000000",
+ "1a807d272bbe5db1",
+ 64,
+ },
+ {
+ "88bca90e90875a7f0f79c384627bafb2",
+ "0000000000000000",
+ "2269552ab0f85ca6",
+ 128,
+ },
+ {
+ "88bca90e90875a7f0f79c384627bafb216f80a6f85920584c42fceb0be255daf1e",
+ "0000000000000000",
+ "5b78d3a43dfff1f1",
+ 129,
+ },
+ }
+
+ for _, tt := range tests {
+ k, _ := hex.DecodeString(tt.key)
+ p, _ := hex.DecodeString(tt.plain)
+ c, _ := hex.DecodeString(tt.cipher)
+
+ b, _ := New(k, tt.t1)
+
+ var dst [8]byte
+
+ b.Encrypt(dst[:], p)
+
+ if !bytes.Equal(dst[:], c) {
+ t.Errorf("encrypt failed: got % 2x wanted % 2x\n", dst, c)
+ }
+
+ b.Decrypt(dst[:], c)
+
+ if !bytes.Equal(dst[:], p) {
+ t.Errorf("decrypt failed: got % 2x wanted % 2x\n", dst, p)
+ }
+ }
+}
diff --git a/src/mongo/gotools/vendor/src/golang.org/x/crypto/pkcs12/mac.go b/src/mongo/gotools/vendor/src/golang.org/x/crypto/pkcs12/mac.go
new file mode 100644
index 00000000000..5f38aa7de83
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/golang.org/x/crypto/pkcs12/mac.go
@@ -0,0 +1,45 @@
+// Copyright 2015 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package pkcs12
+
+import (
+ "crypto/hmac"
+ "crypto/sha1"
+ "crypto/x509/pkix"
+ "encoding/asn1"
+)
+
+type macData struct {
+ Mac digestInfo
+ MacSalt []byte
+ Iterations int `asn1:"optional,default:1"`
+}
+
+// from PKCS#7:
+type digestInfo struct {
+ Algorithm pkix.AlgorithmIdentifier
+ Digest []byte
+}
+
+var (
+ oidSHA1 = asn1.ObjectIdentifier([]int{1, 3, 14, 3, 2, 26})
+)
+
+func verifyMac(macData *macData, message, password []byte) error {
+ if !macData.Mac.Algorithm.Algorithm.Equal(oidSHA1) {
+ return NotImplementedError("unknown digest algorithm: " + macData.Mac.Algorithm.Algorithm.String())
+ }
+
+ key := pbkdf(sha1Sum, 20, 64, macData.MacSalt, password, macData.Iterations, 3, 20)
+
+ mac := hmac.New(sha1.New, key)
+ mac.Write(message)
+ expectedMAC := mac.Sum(nil)
+
+ if !hmac.Equal(macData.Mac.Digest, expectedMAC) {
+ return ErrIncorrectPassword
+ }
+ return nil
+}
diff --git a/src/mongo/gotools/vendor/src/golang.org/x/crypto/pkcs12/mac_test.go b/src/mongo/gotools/vendor/src/golang.org/x/crypto/pkcs12/mac_test.go
new file mode 100644
index 00000000000..1ed4ff21e14
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/golang.org/x/crypto/pkcs12/mac_test.go
@@ -0,0 +1,42 @@
+// Copyright 2015 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package pkcs12
+
+import (
+ "encoding/asn1"
+ "testing"
+)
+
+func TestVerifyMac(t *testing.T) {
+ td := macData{
+ Mac: digestInfo{
+ Digest: []byte{0x18, 0x20, 0x3d, 0xff, 0x1e, 0x16, 0xf4, 0x92, 0xf2, 0xaf, 0xc8, 0x91, 0xa9, 0xba, 0xd6, 0xca, 0x9d, 0xee, 0x51, 0x93},
+ },
+ MacSalt: []byte{1, 2, 3, 4, 5, 6, 7, 8},
+ Iterations: 2048,
+ }
+
+ message := []byte{11, 12, 13, 14, 15}
+ password, _ := bmpString("")
+
+ td.Mac.Algorithm.Algorithm = asn1.ObjectIdentifier([]int{1, 2, 3})
+ err := verifyMac(&td, message, password)
+ if _, ok := err.(NotImplementedError); !ok {
+ t.Errorf("err: %v", err)
+ }
+
+ td.Mac.Algorithm.Algorithm = asn1.ObjectIdentifier([]int{1, 3, 14, 3, 2, 26})
+ err = verifyMac(&td, message, password)
+ if err != ErrIncorrectPassword {
+ t.Errorf("Expected incorrect password, got err: %v", err)
+ }
+
+ password, _ = bmpString("Sesame open")
+ err = verifyMac(&td, message, password)
+ if err != nil {
+ t.Errorf("err: %v", err)
+ }
+
+}
diff --git a/src/mongo/gotools/vendor/src/golang.org/x/crypto/pkcs12/pbkdf.go b/src/mongo/gotools/vendor/src/golang.org/x/crypto/pkcs12/pbkdf.go
new file mode 100644
index 00000000000..5c419d41e32
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/golang.org/x/crypto/pkcs12/pbkdf.go
@@ -0,0 +1,170 @@
+// Copyright 2015 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package pkcs12
+
+import (
+ "bytes"
+ "crypto/sha1"
+ "math/big"
+)
+
+var (
+ one = big.NewInt(1)
+)
+
+// sha1Sum returns the SHA-1 hash of in.
+func sha1Sum(in []byte) []byte {
+ sum := sha1.Sum(in)
+ return sum[:]
+}
+
+// fillWithRepeats returns v*ceiling(len(pattern) / v) bytes consisting of
+// repeats of pattern.
+func fillWithRepeats(pattern []byte, v int) []byte {
+ if len(pattern) == 0 {
+ return nil
+ }
+ outputLen := v * ((len(pattern) + v - 1) / v)
+ return bytes.Repeat(pattern, (outputLen+len(pattern)-1)/len(pattern))[:outputLen]
+}
+
+func pbkdf(hash func([]byte) []byte, u, v int, salt, password []byte, r int, ID byte, size int) (key []byte) {
+ // implementation of https://tools.ietf.org/html/rfc7292#appendix-B.2 , RFC text verbatim in comments
+
+ // Let H be a hash function built around a compression function f:
+
+ // Z_2^u x Z_2^v -> Z_2^u
+
+ // (that is, H has a chaining variable and output of length u bits, and
+ // the message input to the compression function of H is v bits). The
+ // values for u and v are as follows:
+
+ // HASH FUNCTION VALUE u VALUE v
+ // MD2, MD5 128 512
+ // SHA-1 160 512
+ // SHA-224 224 512
+ // SHA-256 256 512
+ // SHA-384 384 1024
+ // SHA-512 512 1024
+ // SHA-512/224 224 1024
+ // SHA-512/256 256 1024
+
+ // Furthermore, let r be the iteration count.
+
+ // We assume here that u and v are both multiples of 8, as are the
+ // lengths of the password and salt strings (which we denote by p and s,
+ // respectively) and the number n of pseudorandom bits required. In
+ // addition, u and v are of course non-zero.
+
+ // For information on security considerations for MD5 [19], see [25] and
+ // [1], and on those for MD2, see [18].
+
+ // The following procedure can be used to produce pseudorandom bits for
+ // a particular "purpose" that is identified by a byte called "ID".
+ // This standard specifies 3 different values for the ID byte:
+
+ // 1. If ID=1, then the pseudorandom bits being produced are to be used
+ // as key material for performing encryption or decryption.
+
+ // 2. If ID=2, then the pseudorandom bits being produced are to be used
+ // as an IV (Initial Value) for encryption or decryption.
+
+ // 3. If ID=3, then the pseudorandom bits being produced are to be used
+ // as an integrity key for MACing.
+
+ // 1. Construct a string, D (the "diversifier"), by concatenating v/8
+ // copies of ID.
+ var D []byte
+ for i := 0; i < v; i++ {
+ D = append(D, ID)
+ }
+
+ // 2. Concatenate copies of the salt together to create a string S of
+ // length v(ceiling(s/v)) bits (the final copy of the salt may be
+ // truncated to create S). Note that if the salt is the empty
+ // string, then so is S.
+
+ S := fillWithRepeats(salt, v)
+
+ // 3. Concatenate copies of the password together to create a string P
+ // of length v(ceiling(p/v)) bits (the final copy of the password
+ // may be truncated to create P). Note that if the password is the
+ // empty string, then so is P.
+
+ P := fillWithRepeats(password, v)
+
+ // 4. Set I=S||P to be the concatenation of S and P.
+ I := append(S, P...)
+
+ // 5. Set c=ceiling(n/u).
+ c := (size + u - 1) / u
+
+ // 6. For i=1, 2, ..., c, do the following:
+ A := make([]byte, c*20)
+ var IjBuf []byte
+ for i := 0; i < c; i++ {
+ // A. Set A2=H^r(D||I). (i.e., the r-th hash of D||1,
+ // H(H(H(... H(D||I))))
+ Ai := hash(append(D, I...))
+ for j := 1; j < r; j++ {
+ Ai = hash(Ai)
+ }
+ copy(A[i*20:], Ai[:])
+
+ if i < c-1 { // skip on last iteration
+ // B. Concatenate copies of Ai to create a string B of length v
+ // bits (the final copy of Ai may be truncated to create B).
+ var B []byte
+ for len(B) < v {
+ B = append(B, Ai[:]...)
+ }
+ B = B[:v]
+
+ // C. Treating I as a concatenation I_0, I_1, ..., I_(k-1) of v-bit
+ // blocks, where k=ceiling(s/v)+ceiling(p/v), modify I by
+ // setting I_j=(I_j+B+1) mod 2^v for each j.
+ {
+ Bbi := new(big.Int).SetBytes(B)
+ Ij := new(big.Int)
+
+ for j := 0; j < len(I)/v; j++ {
+ Ij.SetBytes(I[j*v : (j+1)*v])
+ Ij.Add(Ij, Bbi)
+ Ij.Add(Ij, one)
+ Ijb := Ij.Bytes()
+ // We expect Ijb to be exactly v bytes,
+ // if it is longer or shorter we must
+ // adjust it accordingly.
+ if len(Ijb) > v {
+ Ijb = Ijb[len(Ijb)-v:]
+ }
+ if len(Ijb) < v {
+ if IjBuf == nil {
+ IjBuf = make([]byte, v)
+ }
+ bytesShort := v - len(Ijb)
+ for i := 0; i < bytesShort; i++ {
+ IjBuf[i] = 0
+ }
+ copy(IjBuf[bytesShort:], Ijb)
+ Ijb = IjBuf
+ }
+ copy(I[j*v:(j+1)*v], Ijb)
+ }
+ }
+ }
+ }
+ // 7. Concatenate A_1, A_2, ..., A_c together to form a pseudorandom
+ // bit string, A.
+
+ // 8. Use the first n bits of A as the output of this entire process.
+ return A[:size]
+
+ // If the above process is being used to generate a DES key, the process
+ // should be used to create 64 random bits, and the key's parity bits
+ // should be set after the 64 bits have been produced. Similar concerns
+ // hold for 2-key and 3-key triple-DES keys, for CDMF keys, and for any
+ // similar keys with parity bits "built into them".
+}
diff --git a/src/mongo/gotools/vendor/src/golang.org/x/crypto/pkcs12/pbkdf_test.go b/src/mongo/gotools/vendor/src/golang.org/x/crypto/pkcs12/pbkdf_test.go
new file mode 100644
index 00000000000..262037d7eba
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/golang.org/x/crypto/pkcs12/pbkdf_test.go
@@ -0,0 +1,34 @@
+// Copyright 2015 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package pkcs12
+
+import (
+ "bytes"
+ "testing"
+)
+
+func TestThatPBKDFWorksCorrectlyForLongKeys(t *testing.T) {
+ cipherInfo := shaWithTripleDESCBC{}
+
+ salt := []byte("\xff\xff\xff\xff\xff\xff\xff\xff")
+ password, _ := bmpString("sesame")
+ key := cipherInfo.deriveKey(salt, password, 2048)
+
+ if expected := []byte("\x7c\xd9\xfd\x3e\x2b\x3b\xe7\x69\x1a\x44\xe3\xbe\xf0\xf9\xea\x0f\xb9\xb8\x97\xd4\xe3\x25\xd9\xd1"); bytes.Compare(key, expected) != 0 {
+ t.Fatalf("expected key '%x', but found '%x'", expected, key)
+ }
+}
+
+func TestThatPBKDFHandlesLeadingZeros(t *testing.T) {
+ // This test triggers a case where I_j (in step 6C) ends up with leading zero
+ // byte, meaning that len(Ijb) < v (leading zeros get stripped by big.Int).
+ // This was previously causing bug whereby certain inputs would break the
+ // derivation and produce the wrong output.
+ key := pbkdf(sha1Sum, 20, 64, []byte("\xf3\x7e\x05\xb5\x18\x32\x4b\x4b"), []byte("\x00\x00"), 2048, 1, 24)
+ expected := []byte("\x00\xf7\x59\xff\x47\xd1\x4d\xd0\x36\x65\xd5\x94\x3c\xb3\xc4\xa3\x9a\x25\x55\xc0\x2a\xed\x66\xe1")
+ if bytes.Compare(key, expected) != 0 {
+ t.Fatalf("expected key '%x', but found '%x'", expected, key)
+ }
+}
diff --git a/src/mongo/gotools/vendor/src/golang.org/x/crypto/pkcs12/pkcs12.go b/src/mongo/gotools/vendor/src/golang.org/x/crypto/pkcs12/pkcs12.go
new file mode 100644
index 00000000000..ad6341e60fa
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/golang.org/x/crypto/pkcs12/pkcs12.go
@@ -0,0 +1,342 @@
+// Copyright 2015 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// Package pkcs12 implements some of PKCS#12.
+//
+// This implementation is distilled from https://tools.ietf.org/html/rfc7292
+// and referenced documents. It is intended for decoding P12/PFX-stored
+// certificates and keys for use with the crypto/tls package.
+package pkcs12
+
+import (
+ "crypto/ecdsa"
+ "crypto/rsa"
+ "crypto/x509"
+ "crypto/x509/pkix"
+ "encoding/asn1"
+ "encoding/hex"
+ "encoding/pem"
+ "errors"
+)
+
+var (
+ oidDataContentType = asn1.ObjectIdentifier([]int{1, 2, 840, 113549, 1, 7, 1})
+ oidEncryptedDataContentType = asn1.ObjectIdentifier([]int{1, 2, 840, 113549, 1, 7, 6})
+
+ oidFriendlyName = asn1.ObjectIdentifier([]int{1, 2, 840, 113549, 1, 9, 20})
+ oidLocalKeyID = asn1.ObjectIdentifier([]int{1, 2, 840, 113549, 1, 9, 21})
+ oidMicrosoftCSPName = asn1.ObjectIdentifier([]int{1, 3, 6, 1, 4, 1, 311, 17, 1})
+)
+
+type pfxPdu struct {
+ Version int
+ AuthSafe contentInfo
+ MacData macData `asn1:"optional"`
+}
+
+type contentInfo struct {
+ ContentType asn1.ObjectIdentifier
+ Content asn1.RawValue `asn1:"tag:0,explicit,optional"`
+}
+
+type encryptedData struct {
+ Version int
+ EncryptedContentInfo encryptedContentInfo
+}
+
+type encryptedContentInfo struct {
+ ContentType asn1.ObjectIdentifier
+ ContentEncryptionAlgorithm pkix.AlgorithmIdentifier
+ EncryptedContent []byte `asn1:"tag:0,optional"`
+}
+
+func (i encryptedContentInfo) Algorithm() pkix.AlgorithmIdentifier {
+ return i.ContentEncryptionAlgorithm
+}
+
+func (i encryptedContentInfo) Data() []byte { return i.EncryptedContent }
+
+type safeBag struct {
+ Id asn1.ObjectIdentifier
+ Value asn1.RawValue `asn1:"tag:0,explicit"`
+ Attributes []pkcs12Attribute `asn1:"set,optional"`
+}
+
+type pkcs12Attribute struct {
+ Id asn1.ObjectIdentifier
+ Value asn1.RawValue `asn1:"set"`
+}
+
+type encryptedPrivateKeyInfo struct {
+ AlgorithmIdentifier pkix.AlgorithmIdentifier
+ EncryptedData []byte
+}
+
+func (i encryptedPrivateKeyInfo) Algorithm() pkix.AlgorithmIdentifier {
+ return i.AlgorithmIdentifier
+}
+
+func (i encryptedPrivateKeyInfo) Data() []byte {
+ return i.EncryptedData
+}
+
+// PEM block types
+const (
+ certificateType = "CERTIFICATE"
+ privateKeyType = "PRIVATE KEY"
+)
+
+// unmarshal calls asn1.Unmarshal, but also returns an error if there is any
+// trailing data after unmarshaling.
+func unmarshal(in []byte, out interface{}) error {
+ trailing, err := asn1.Unmarshal(in, out)
+ if err != nil {
+ return err
+ }
+ if len(trailing) != 0 {
+ return errors.New("pkcs12: trailing data found")
+ }
+ return nil
+}
+
+// ConvertToPEM converts all "safe bags" contained in pfxData to PEM blocks.
+func ToPEM(pfxData []byte, password string) ([]*pem.Block, error) {
+ encodedPassword, err := bmpString(password)
+ if err != nil {
+ return nil, ErrIncorrectPassword
+ }
+
+ bags, encodedPassword, err := getSafeContents(pfxData, encodedPassword)
+
+ blocks := make([]*pem.Block, 0, len(bags))
+ for _, bag := range bags {
+ block, err := convertBag(&bag, encodedPassword)
+ if err != nil {
+ return nil, err
+ }
+ blocks = append(blocks, block)
+ }
+
+ return blocks, nil
+}
+
+func convertBag(bag *safeBag, password []byte) (*pem.Block, error) {
+ block := &pem.Block{
+ Headers: make(map[string]string),
+ }
+
+ for _, attribute := range bag.Attributes {
+ k, v, err := convertAttribute(&attribute)
+ if err != nil {
+ return nil, err
+ }
+ block.Headers[k] = v
+ }
+
+ switch {
+ case bag.Id.Equal(oidCertBag):
+ block.Type = certificateType
+ certsData, err := decodeCertBag(bag.Value.Bytes)
+ if err != nil {
+ return nil, err
+ }
+ block.Bytes = certsData
+ case bag.Id.Equal(oidPKCS8ShroundedKeyBag):
+ block.Type = privateKeyType
+
+ key, err := decodePkcs8ShroudedKeyBag(bag.Value.Bytes, password)
+ if err != nil {
+ return nil, err
+ }
+
+ switch key := key.(type) {
+ case *rsa.PrivateKey:
+ block.Bytes = x509.MarshalPKCS1PrivateKey(key)
+ case *ecdsa.PrivateKey:
+ block.Bytes, err = x509.MarshalECPrivateKey(key)
+ if err != nil {
+ return nil, err
+ }
+ default:
+ return nil, errors.New("found unknown private key type in PKCS#8 wrapping")
+ }
+ default:
+ return nil, errors.New("don't know how to convert a safe bag of type " + bag.Id.String())
+ }
+ return block, nil
+}
+
+func convertAttribute(attribute *pkcs12Attribute) (key, value string, err error) {
+ isString := false
+
+ switch {
+ case attribute.Id.Equal(oidFriendlyName):
+ key = "friendlyName"
+ isString = true
+ case attribute.Id.Equal(oidLocalKeyID):
+ key = "localKeyId"
+ case attribute.Id.Equal(oidMicrosoftCSPName):
+ // This key is chosen to match OpenSSL.
+ key = "Microsoft CSP Name"
+ isString = true
+ default:
+ return "", "", errors.New("pkcs12: unknown attribute with OID " + attribute.Id.String())
+ }
+
+ if isString {
+ if err := unmarshal(attribute.Value.Bytes, &attribute.Value); err != nil {
+ return "", "", err
+ }
+ if value, err = decodeBMPString(attribute.Value.Bytes); err != nil {
+ return "", "", err
+ }
+ } else {
+ var id []byte
+ if err := unmarshal(attribute.Value.Bytes, &id); err != nil {
+ return "", "", err
+ }
+ value = hex.EncodeToString(id)
+ }
+
+ return key, value, nil
+}
+
+// Decode extracts a certificate and private key from pfxData. This function
+// assumes that there is only one certificate and only one private key in the
+// pfxData.
+func Decode(pfxData []byte, password string) (privateKey interface{}, certificate *x509.Certificate, err error) {
+ encodedPassword, err := bmpString(password)
+ if err != nil {
+ return nil, nil, err
+ }
+
+ bags, encodedPassword, err := getSafeContents(pfxData, encodedPassword)
+ if err != nil {
+ return nil, nil, err
+ }
+
+ if len(bags) != 2 {
+ err = errors.New("pkcs12: expected exactly two safe bags in the PFX PDU")
+ return
+ }
+
+ for _, bag := range bags {
+ switch {
+ case bag.Id.Equal(oidCertBag):
+ if certificate != nil {
+ err = errors.New("pkcs12: expected exactly one certificate bag")
+ }
+
+ certsData, err := decodeCertBag(bag.Value.Bytes)
+ if err != nil {
+ return nil, nil, err
+ }
+ certs, err := x509.ParseCertificates(certsData)
+ if err != nil {
+ return nil, nil, err
+ }
+ if len(certs) != 1 {
+ err = errors.New("pkcs12: expected exactly one certificate in the certBag")
+ return nil, nil, err
+ }
+ certificate = certs[0]
+
+ case bag.Id.Equal(oidPKCS8ShroundedKeyBag):
+ if privateKey != nil {
+ err = errors.New("pkcs12: expected exactly one key bag")
+ }
+
+ if privateKey, err = decodePkcs8ShroudedKeyBag(bag.Value.Bytes, encodedPassword); err != nil {
+ return nil, nil, err
+ }
+ }
+ }
+
+ if certificate == nil {
+ return nil, nil, errors.New("pkcs12: certificate missing")
+ }
+ if privateKey == nil {
+ return nil, nil, errors.New("pkcs12: private key missing")
+ }
+
+ return
+}
+
+func getSafeContents(p12Data, password []byte) (bags []safeBag, updatedPassword []byte, err error) {
+ pfx := new(pfxPdu)
+ if err := unmarshal(p12Data, pfx); err != nil {
+ return nil, nil, errors.New("pkcs12: error reading P12 data: " + err.Error())
+ }
+
+ if pfx.Version != 3 {
+ return nil, nil, NotImplementedError("can only decode v3 PFX PDU's")
+ }
+
+ if !pfx.AuthSafe.ContentType.Equal(oidDataContentType) {
+ return nil, nil, NotImplementedError("only password-protected PFX is implemented")
+ }
+
+ // unmarshal the explicit bytes in the content for type 'data'
+ if err := unmarshal(pfx.AuthSafe.Content.Bytes, &pfx.AuthSafe.Content); err != nil {
+ return nil, nil, err
+ }
+
+ if len(pfx.MacData.Mac.Algorithm.Algorithm) == 0 {
+ return nil, nil, errors.New("pkcs12: no MAC in data")
+ }
+
+ if err := verifyMac(&pfx.MacData, pfx.AuthSafe.Content.Bytes, password); err != nil {
+ if err == ErrIncorrectPassword && len(password) == 2 && password[0] == 0 && password[1] == 0 {
+ // some implementations use an empty byte array
+ // for the empty string password try one more
+ // time with empty-empty password
+ password = nil
+ err = verifyMac(&pfx.MacData, pfx.AuthSafe.Content.Bytes, password)
+ }
+ if err != nil {
+ return nil, nil, err
+ }
+ }
+
+ var authenticatedSafe []contentInfo
+ if err := unmarshal(pfx.AuthSafe.Content.Bytes, &authenticatedSafe); err != nil {
+ return nil, nil, err
+ }
+
+ if len(authenticatedSafe) != 2 {
+ return nil, nil, NotImplementedError("expected exactly two items in the authenticated safe")
+ }
+
+ for _, ci := range authenticatedSafe {
+ var data []byte
+
+ switch {
+ case ci.ContentType.Equal(oidDataContentType):
+ if err := unmarshal(ci.Content.Bytes, &data); err != nil {
+ return nil, nil, err
+ }
+ case ci.ContentType.Equal(oidEncryptedDataContentType):
+ var encryptedData encryptedData
+ if err := unmarshal(ci.Content.Bytes, &encryptedData); err != nil {
+ return nil, nil, err
+ }
+ if encryptedData.Version != 0 {
+ return nil, nil, NotImplementedError("only version 0 of EncryptedData is supported")
+ }
+ if data, err = pbDecrypt(encryptedData.EncryptedContentInfo, password); err != nil {
+ return nil, nil, err
+ }
+ default:
+ return nil, nil, NotImplementedError("only data and encryptedData content types are supported in authenticated safe")
+ }
+
+ var safeContents []safeBag
+ if err := unmarshal(data, &safeContents); err != nil {
+ return nil, nil, err
+ }
+ bags = append(bags, safeContents...)
+ }
+
+ return bags, password, nil
+}
diff --git a/src/mongo/gotools/vendor/src/golang.org/x/crypto/pkcs12/pkcs12_test.go b/src/mongo/gotools/vendor/src/golang.org/x/crypto/pkcs12/pkcs12_test.go
new file mode 100644
index 00000000000..14dd2a6c5d6
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/golang.org/x/crypto/pkcs12/pkcs12_test.go
@@ -0,0 +1,138 @@
+// Copyright 2015 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package pkcs12
+
+import (
+ "crypto/rsa"
+ "crypto/tls"
+ "encoding/base64"
+ "encoding/pem"
+ "testing"
+)
+
+func TestPfx(t *testing.T) {
+ for commonName, base64P12 := range testdata {
+ p12, _ := base64.StdEncoding.DecodeString(base64P12)
+
+ priv, cert, err := Decode(p12, "")
+ if err != nil {
+ t.Fatal(err)
+ }
+
+ if err := priv.(*rsa.PrivateKey).Validate(); err != nil {
+ t.Errorf("error while validating private key: %v", err)
+ }
+
+ if cert.Subject.CommonName != commonName {
+ t.Errorf("expected common name to be %q, but found %q", commonName, cert.Subject.CommonName)
+ }
+ }
+}
+
+func TestPEM(t *testing.T) {
+ for commonName, base64P12 := range testdata {
+ p12, _ := base64.StdEncoding.DecodeString(base64P12)
+
+ blocks, err := ToPEM(p12, "")
+ if err != nil {
+ t.Fatalf("error while converting to PEM: %s", err)
+ }
+
+ var pemData []byte
+ for _, b := range blocks {
+ pemData = append(pemData, pem.EncodeToMemory(b)...)
+ }
+
+ cert, err := tls.X509KeyPair(pemData, pemData)
+ if err != nil {
+ t.Errorf("err while converting to key pair: %v", err)
+ }
+ config := tls.Config{
+ Certificates: []tls.Certificate{cert},
+ }
+ config.BuildNameToCertificate()
+
+ if _, exists := config.NameToCertificate[commonName]; !exists {
+ t.Errorf("did not find our cert in PEM?: %v", config.NameToCertificate)
+ }
+ }
+}
+
+func ExampleToPEM() {
+ p12, _ := base64.StdEncoding.DecodeString(`MIIJzgIBAzCCCZQGCS ... CA+gwggPk==`)
+
+ blocks, err := ToPEM(p12, "password")
+ if err != nil {
+ panic(err)
+ }
+
+ var pemData []byte
+ for _, b := range blocks {
+ pemData = append(pemData, pem.EncodeToMemory(b)...)
+ }
+
+ // then use PEM data for tls to construct tls certificate:
+ cert, err := tls.X509KeyPair(pemData, pemData)
+ if err != nil {
+ panic(err)
+ }
+
+ config := &tls.Config{
+ Certificates: []tls.Certificate{cert},
+ }
+
+ _ = config
+}
+
+var testdata = map[string]string{
+ // 'null' password test case
+ "Windows Azure Tools": `MIIKDAIBAzCCCcwGCSqGSIb3DQEHAaCCCb0Eggm5MIIJtTCCBe4GCSqGSIb3DQEHAaCCBd8EggXbMIIF1zCCBdMGCyqGSIb3DQEMCgECoIIE7jCCBOowHAYKKoZIhvcNAQwBAzAOBAhStUNnlTGV+gICB9AEggTIJ81JIossF6boFWpPtkiQRPtI6DW6e9QD4/WvHAVrM2bKdpMzSMsCML5NyuddANTKHBVq00Jc9keqGNAqJPKkjhSUebzQFyhe0E1oI9T4zY5UKr/I8JclOeccH4QQnsySzYUG2SnniXnQ+JrG3juetli7EKth9h6jLc6xbubPadY5HMB3wL/eG/kJymiXwU2KQ9Mgd4X6jbcV+NNCE/8jbZHvSTCPeYTJIjxfeX61Sj5kFKUCzERbsnpyevhY3X0eYtEDezZQarvGmXtMMdzf8HJHkWRdk9VLDLgjk8uiJif/+X4FohZ37ig0CpgC2+dP4DGugaZZ51hb8tN9GeCKIsrmWogMXDIVd0OACBp/EjJVmFB6y0kUCXxUE0TZt0XA1tjAGJcjDUpBvTntZjPsnH/4ZySy+s2d9OOhJ6pzRQBRm360TzkFdSwk9DLiLdGfv4pwMMu/vNGBlqjP/1sQtj+jprJiD1sDbCl4AdQZVoMBQHadF2uSD4/o17XG/Ci0r2h6Htc2yvZMAbEY4zMjjIn2a+vqIxD6onexaek1R3zbkS9j19D6EN9EWn8xgz80YRCyW65znZk8xaIhhvlU/mg7sTxeyuqroBZNcq6uDaQTehDpyH7bY2l4zWRpoj10a6JfH2q5shYz8Y6UZC/kOTfuGqbZDNZWro/9pYquvNNW0M847E5t9bsf9VkAAMHRGBbWoVoU9VpI0UnoXSfvpOo+aXa2DSq5sHHUTVY7A9eov3z5IqT+pligx11xcs+YhDWcU8di3BTJisohKvv5Y8WSkm/rloiZd4ig269k0jTRk1olP/vCksPli4wKG2wdsd5o42nX1yL7mFfXocOANZbB+5qMkiwdyoQSk+Vq+C8nAZx2bbKhUq2MbrORGMzOe0Hh0x2a0PeObycN1Bpyv7Mp3ZI9h5hBnONKCnqMhtyQHUj/nNvbJUnDVYNfoOEqDiEqqEwB7YqWzAKz8KW0OIqdlM8uiQ4JqZZlFllnWJUfaiDrdFM3lYSnFQBkzeVlts6GpDOOBjCYd7dcCNS6kq6pZC6p6HN60Twu0JnurZD6RT7rrPkIGE8vAenFt4iGe/yF52fahCSY8Ws4K0UTwN7bAS+4xRHVCWvE8sMRZsRCHizb5laYsVrPZJhE6+hux6OBb6w8kwPYXc+ud5v6UxawUWgt6uPwl8mlAtU9Z7Miw4Nn/wtBkiLL/ke1UI1gqJtcQXgHxx6mzsjh41+nAgTvdbsSEyU6vfOmxGj3Rwc1eOrIhJUqn5YjOWfzzsz/D5DzWKmwXIwdspt1p+u+kol1N3f2wT9fKPnd/RGCb4g/1hc3Aju4DQYgGY782l89CEEdalpQ/35bQczMFk6Fje12HykakWEXd/bGm9Unh82gH84USiRpeOfQvBDYoqEyrY3zkFZzBjhDqa+jEcAj41tcGx47oSfDq3iVYCdL7HSIjtnyEktVXd7mISZLoMt20JACFcMw+mrbjlug+eU7o2GR7T+LwtOp/p4LZqyLa7oQJDwde1BNZtm3TCK2P1mW94QDL0nDUps5KLtr1DaZXEkRbjSJub2ZE9WqDHyU3KA8G84Tq/rN1IoNu/if45jacyPje1Npj9IftUZSP22nV7HMwZtwQ4P4MYHRMBMGCSqGSIb3DQEJFTEGBAQBAAAAMFsGCSqGSIb3DQEJFDFOHkwAewBCADQAQQA0AEYARQBCADAALQBBADEAOABBAC0ANAA0AEIAQgAtAEIANQBGADIALQA0ADkAMQBFAEYAMQA1ADIAQgBBADEANgB9MF0GCSsGAQQBgjcRATFQHk4ATQBpAGMAcgBvAHMAbwBmAHQAIABTAG8AZgB0AHcAYQByAGUAIABLAGUAeQAgAFMAdABvAHIAYQBnAGUAIABQAHIAbwB2AGkAZABlAHIwggO/BgkqhkiG9w0BBwagggOwMIIDrAIBADCCA6UGCSqGSIb3DQEHATAcBgoqhkiG9w0BDAEGMA4ECEBk5ZAYpu0WAgIH0ICCA3hik4mQFGpw9Ha8TQPtk+j2jwWdxfF0+sTk6S8PTsEfIhB7wPltjiCK92Uv2tCBQnodBUmatIfkpnRDEySmgmdglmOCzj204lWAMRs94PoALGn3JVBXbO1vIDCbAPOZ7Z0Hd0/1t2hmk8v3//QJGUg+qr59/4y/MuVfIg4qfkPcC2QSvYWcK3oTf6SFi5rv9B1IOWFgN5D0+C+x/9Lb/myPYX+rbOHrwtJ4W1fWKoz9g7wwmGFA9IJ2DYGuH8ifVFbDFT1Vcgsvs8arSX7oBsJVW0qrP7XkuDRe3EqCmKW7rBEwYrFznhxZcRDEpMwbFoSvgSIZ4XhFY9VKYglT+JpNH5iDceYEBOQL4vBLpxNUk3l5jKaBNxVa14AIBxq18bVHJ+STInhLhad4u10v/Xbx7wIL3f9DX1yLAkPrpBYbNHS2/ew6H/ySDJnoIDxkw2zZ4qJ+qUJZ1S0lbZVG+VT0OP5uF6tyOSpbMlcGkdl3z254n6MlCrTifcwkzscysDsgKXaYQw06rzrPW6RDub+t+hXzGny799fS9jhQMLDmOggaQ7+LA4oEZsfT89HLMWxJYDqjo3gIfjciV2mV54R684qLDS+AO09U49e6yEbwGlq8lpmO/pbXCbpGbB1b3EomcQbxdWxW2WEkkEd/VBn81K4M3obmywwXJkw+tPXDXfBmzzaqqCR+onMQ5ME1nMkY8ybnfoCc1bDIupjVWsEL2Wvq752RgI6KqzVNr1ew1IdqV5AWN2fOfek+0vi3Jd9FHF3hx8JMwjJL9dZsETV5kHtYJtE7wJ23J68BnCt2eI0GEuwXcCf5EdSKN/xXCTlIokc4Qk/gzRdIZsvcEJ6B1lGovKG54X4IohikqTjiepjbsMWj38yxDmK3mtENZ9ci8FPfbbvIEcOCZIinuY3qFUlRSbx7VUerEoV1IP3clUwexVQo4lHFee2jd7ocWsdSqSapW7OWUupBtDzRkqVhE7tGria+i1W2d6YLlJ21QTjyapWJehAMO637OdbJCCzDs1cXbodRRE7bsP492ocJy8OX66rKdhYbg8srSFNKdb3pF3UDNbN9jhI/t8iagRhNBhlQtTr1me2E/c86Q18qcRXl4bcXTt6acgCeffK6Y26LcVlrgjlD33AEYRRUeyC+rpxbT0aMjdFderlndKRIyG23mSp0HaUwNzAfMAcGBSsOAwIaBBRlviCbIyRrhIysg2dc/KbLFTc2vQQUg4rfwHMM4IKYRD/fsd1x6dda+wQ=`,
+ // empty string password test case
+ "testing@example.com": `MIIJzgIBAzCCCZQGCSqGSIb3DQEHAaCCCYUEggmBMIIJfTCCA/cGCSqGSIb3DQEHBqCCA+gwggPk
+AgEAMIID3QYJKoZIhvcNAQcBMBwGCiqGSIb3DQEMAQYwDgQIIszfRGqcmPcCAggAgIIDsOZ9Eg1L
+s5Wx8JhYoV3HAL4aRnkAWvTYB5NISZOgSgIQTssmt/3A7134dibTmaT/93LikkL3cTKLnQzJ4wDf
+YZ1bprpVJvUqz+HFT79m27bP9zYXFrvxWBJbxjYKTSjQMgz+h8LAEpXXGajCmxMJ1oCOtdXkhhzc
+LdZN6SAYgtmtyFnCdMEDskSggGuLb3fw84QEJ/Sj6FAULXunW/CPaS7Ce0TMsKmNU/jfFWj3yXXw
+ro0kwjKiVLpVFlnBlHo2OoVU7hmkm59YpGhLgS7nxLD3n7nBroQ0ID1+8R01NnV9XLGoGzxMm1te
+6UyTCkr5mj+kEQ8EP1Ys7g/TC411uhVWySMt/rcpkx7Vz1r9kYEAzJpONAfr6cuEVkPKrxpq4Fh0
+2fzlKBky0i/hrfIEUmngh+ERHUb/Mtv/fkv1j5w9suESbhsMLLiCXAlsP1UWMX+3bNizi3WVMEts
+FM2k9byn+p8IUD/A8ULlE4kEaWeoc+2idkCNQkLGuIdGUXUFVm58se0auUkVRoRJx8x4CkMesT8j
+b1H831W66YRWoEwwDQp2kK1lA2vQXxdVHWlFevMNxJeromLzj3ayiaFrfByeUXhR2S+Hpm+c0yNR
+4UVU9WED2kacsZcpRm9nlEa5sr28mri5JdBrNa/K02OOhvKCxr5ZGmbOVzUQKla2z4w+Ku9k8POm
+dfDNU/fGx1b5hcFWtghXe3msWVsSJrQihnN6q1ughzNiYZlJUGcHdZDRtiWwCFI0bR8h/Dmg9uO9
+4rawQQrjIRT7B8yF3UbkZyAqs8Ppb1TsMeNPHh1rxEfGVQknh/48ouJYsmtbnzugTUt3mJCXXiL+
+XcPMV6bBVAUu4aaVKSmg9+yJtY4/VKv10iw88ktv29fViIdBe3t6l/oPuvQgbQ8dqf4T8w0l/uKZ
+9lS1Na9jfT1vCoS7F5TRi+tmyj1vL5kr/amEIW6xKEP6oeAMvCMtbPAzVEj38zdJ1R22FfuIBxkh
+f0Zl7pdVbmzRxl/SBx9iIBJSqAvcXItiT0FIj8HxQ+0iZKqMQMiBuNWJf5pYOLWGrIyntCWwHuaQ
+wrx0sTGuEL9YXLEAsBDrsvzLkx/56E4INGZFrH8G7HBdW6iGqb22IMI4GHltYSyBRKbB0gadYTyv
+abPEoqww8o7/85aPSzOTJ/53ozD438Q+d0u9SyDuOb60SzCD/zPuCEd78YgtXJwBYTuUNRT27FaM
+3LGMX8Hz+6yPNRnmnA2XKPn7dx/IlaqAjIs8MIIFfgYJKoZIhvcNAQcBoIIFbwSCBWswggVnMIIF
+YwYLKoZIhvcNAQwKAQKgggTuMIIE6jAcBgoqhkiG9w0BDAEDMA4ECJr0cClYqOlcAgIIAASCBMhe
+OQSiP2s0/46ONXcNeVAkz2ksW3u/+qorhSiskGZ0b3dFa1hhgBU2Q7JVIkc4Hf7OXaT1eVQ8oqND
+uhqsNz83/kqYo70+LS8Hocj49jFgWAKrf/yQkdyP1daHa2yzlEw4mkpqOfnIORQHvYCa8nEApspZ
+wVu8y6WVuLHKU67mel7db2xwstQp7PRuSAYqGjTfAylElog8ASdaqqYbYIrCXucF8iF9oVgmb/Qo
+xrXshJ9aSLO4MuXlTPELmWgj07AXKSb90FKNihE+y0bWb9LPVFY1Sly3AX9PfrtkSXIZwqW3phpv
+MxGxQl/R6mr1z+hlTfY9Wdpb5vlKXPKA0L0Rt8d2pOesylFi6esJoS01QgP1kJILjbrV731kvDc0
+Jsd+Oxv4BMwA7ClG8w1EAOInc/GrV1MWFGw/HeEqj3CZ/l/0jv9bwkbVeVCiIhoL6P6lVx9pXq4t
+KZ0uKg/tk5TVJmG2vLcMLvezD0Yk3G2ZOMrywtmskrwoF7oAUpO9e87szoH6fEvUZlkDkPVW1NV4
+cZk3DBSQiuA3VOOg8qbo/tx/EE3H59P0axZWno2GSB0wFPWd1aj+b//tJEJHaaNR6qPRj4IWj9ru
+Qbc8eRAcVWleHg8uAehSvUXlFpyMQREyrnpvMGddpiTC8N4UMrrBRhV7+UbCOWhxPCbItnInBqgl
+1JpSZIP7iUtsIMdu3fEC2cdbXMTRul+4rdzUR7F9OaezV3jjvcAbDvgbK1CpyC+MJ1Mxm/iTgk9V
+iUArydhlR8OniN84GyGYoYCW9O/KUwb6ASmeFOu/msx8x6kAsSQHIkKqMKv0TUR3kZnkxUvdpBGP
+KTl4YCTvNGX4dYALBqrAETRDhua2KVBD/kEttDHwBNVbN2xi81+Mc7ml461aADfk0c66R/m2sjHB
+2tN9+wG12OIWFQjL6wF/UfJMYamxx2zOOExiId29Opt57uYiNVLOO4ourPewHPeH0u8Gz35aero7
+lkt7cZAe1Q0038JUuE/QGlnK4lESK9UkSIQAjSaAlTsrcfwtQxB2EjoOoLhwH5mvxUEmcNGNnXUc
+9xj3M5BD3zBz3Ft7G3YMMDwB1+zC2l+0UG0MGVjMVaeoy32VVNvxgX7jk22OXG1iaOB+PY9kdk+O
+X+52BGSf/rD6X0EnqY7XuRPkMGgjtpZeAYxRQnFtCZgDY4wYheuxqSSpdF49yNczSPLkgB3CeCfS
++9NTKN7aC6hBbmW/8yYh6OvSiCEwY0lFS/T+7iaVxr1loE4zI1y/FFp4Pe1qfLlLttVlkygga2UU
+SCunTQ8UB/M5IXWKkhMOO11dP4niWwb39Y7pCWpau7mwbXOKfRPX96cgHnQJK5uG+BesDD1oYnX0
+6frN7FOnTSHKruRIwuI8KnOQ/I+owmyz71wiv5LMQt+yM47UrEjB/EZa5X8dpEwOZvkdqL7utcyo
+l0XH5kWMXdW856LL/FYftAqJIDAmtX1TXF/rbP6mPyN/IlDC0gjP84Uzd/a2UyTIWr+wk49Ek3vQ
+/uDamq6QrwAxVmNh5Tset5Vhpc1e1kb7mRMZIzxSP8JcTuYd45oFKi98I8YjvueHVZce1g7OudQP
+SbFQoJvdT46iBg1TTatlltpOiH2mFaxWVS0xYjAjBgkqhkiG9w0BCRUxFgQUdA9eVqvETX4an/c8
+p8SsTugkit8wOwYJKoZIhvcNAQkUMS4eLABGAHIAaQBlAG4AZABsAHkAIABuAGEAbQBlACAAZgBv
+AHIAIABjAGUAcgB0MDEwITAJBgUrDgMCGgUABBRFsNz3Zd1O1GI8GTuFwCWuDOjEEwQIuBEfIcAy
+HQ8CAggA`,
+}
diff --git a/src/mongo/gotools/vendor/src/golang.org/x/crypto/pkcs12/safebags.go b/src/mongo/gotools/vendor/src/golang.org/x/crypto/pkcs12/safebags.go
new file mode 100644
index 00000000000..def1f7b98d7
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/golang.org/x/crypto/pkcs12/safebags.go
@@ -0,0 +1,57 @@
+// Copyright 2015 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package pkcs12
+
+import (
+ "crypto/x509"
+ "encoding/asn1"
+ "errors"
+)
+
+var (
+ // see https://tools.ietf.org/html/rfc7292#appendix-D
+ oidCertTypeX509Certificate = asn1.ObjectIdentifier([]int{1, 2, 840, 113549, 1, 9, 22, 1})
+ oidPKCS8ShroundedKeyBag = asn1.ObjectIdentifier([]int{1, 2, 840, 113549, 1, 12, 10, 1, 2})
+ oidCertBag = asn1.ObjectIdentifier([]int{1, 2, 840, 113549, 1, 12, 10, 1, 3})
+)
+
+type certBag struct {
+ Id asn1.ObjectIdentifier
+ Data []byte `asn1:"tag:0,explicit"`
+}
+
+func decodePkcs8ShroudedKeyBag(asn1Data, password []byte) (privateKey interface{}, err error) {
+ pkinfo := new(encryptedPrivateKeyInfo)
+ if err = unmarshal(asn1Data, pkinfo); err != nil {
+ return nil, errors.New("pkcs12: error decoding PKCS#8 shrouded key bag: " + err.Error())
+ }
+
+ pkData, err := pbDecrypt(pkinfo, password)
+ if err != nil {
+ return nil, errors.New("pkcs12: error decrypting PKCS#8 shrouded key bag: " + err.Error())
+ }
+
+ ret := new(asn1.RawValue)
+ if err = unmarshal(pkData, ret); err != nil {
+ return nil, errors.New("pkcs12: error unmarshaling decrypted private key: " + err.Error())
+ }
+
+ if privateKey, err = x509.ParsePKCS8PrivateKey(pkData); err != nil {
+ return nil, errors.New("pkcs12: error parsing PKCS#8 private key: " + err.Error())
+ }
+
+ return privateKey, nil
+}
+
+func decodeCertBag(asn1Data []byte) (x509Certificates []byte, err error) {
+ bag := new(certBag)
+ if err := unmarshal(asn1Data, bag); err != nil {
+ return nil, errors.New("pkcs12: error decoding cert bag: " + err.Error())
+ }
+ if !bag.Id.Equal(oidCertTypeX509Certificate) {
+ return nil, NotImplementedError("only X509 certificates are supported")
+ }
+ return bag.Data, nil
+}
diff --git a/src/mongo/gotools/vendor/src/golang.org/x/crypto/poly1305/const_amd64.s b/src/mongo/gotools/vendor/src/golang.org/x/crypto/poly1305/const_amd64.s
new file mode 100644
index 00000000000..8e861f337cd
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/golang.org/x/crypto/poly1305/const_amd64.s
@@ -0,0 +1,45 @@
+// Copyright 2012 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// This code was translated into a form compatible with 6a from the public
+// domain sources in SUPERCOP: http://bench.cr.yp.to/supercop.html
+
+// +build amd64,!gccgo,!appengine
+
+DATA ·SCALE(SB)/8, $0x37F4000000000000
+GLOBL ·SCALE(SB), 8, $8
+DATA ·TWO32(SB)/8, $0x41F0000000000000
+GLOBL ·TWO32(SB), 8, $8
+DATA ·TWO64(SB)/8, $0x43F0000000000000
+GLOBL ·TWO64(SB), 8, $8
+DATA ·TWO96(SB)/8, $0x45F0000000000000
+GLOBL ·TWO96(SB), 8, $8
+DATA ·ALPHA32(SB)/8, $0x45E8000000000000
+GLOBL ·ALPHA32(SB), 8, $8
+DATA ·ALPHA64(SB)/8, $0x47E8000000000000
+GLOBL ·ALPHA64(SB), 8, $8
+DATA ·ALPHA96(SB)/8, $0x49E8000000000000
+GLOBL ·ALPHA96(SB), 8, $8
+DATA ·ALPHA130(SB)/8, $0x4C08000000000000
+GLOBL ·ALPHA130(SB), 8, $8
+DATA ·DOFFSET0(SB)/8, $0x4330000000000000
+GLOBL ·DOFFSET0(SB), 8, $8
+DATA ·DOFFSET1(SB)/8, $0x4530000000000000
+GLOBL ·DOFFSET1(SB), 8, $8
+DATA ·DOFFSET2(SB)/8, $0x4730000000000000
+GLOBL ·DOFFSET2(SB), 8, $8
+DATA ·DOFFSET3(SB)/8, $0x4930000000000000
+GLOBL ·DOFFSET3(SB), 8, $8
+DATA ·DOFFSET3MINUSTWO128(SB)/8, $0x492FFFFE00000000
+GLOBL ·DOFFSET3MINUSTWO128(SB), 8, $8
+DATA ·HOFFSET0(SB)/8, $0x43300001FFFFFFFB
+GLOBL ·HOFFSET0(SB), 8, $8
+DATA ·HOFFSET1(SB)/8, $0x45300001FFFFFFFE
+GLOBL ·HOFFSET1(SB), 8, $8
+DATA ·HOFFSET2(SB)/8, $0x47300001FFFFFFFE
+GLOBL ·HOFFSET2(SB), 8, $8
+DATA ·HOFFSET3(SB)/8, $0x49300003FFFFFFFE
+GLOBL ·HOFFSET3(SB), 8, $8
+DATA ·ROUNDING(SB)/2, $0x137f
+GLOBL ·ROUNDING(SB), 8, $2
diff --git a/src/mongo/gotools/vendor/src/golang.org/x/crypto/poly1305/poly1305.go b/src/mongo/gotools/vendor/src/golang.org/x/crypto/poly1305/poly1305.go
new file mode 100644
index 00000000000..4a5f826f7a6
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/golang.org/x/crypto/poly1305/poly1305.go
@@ -0,0 +1,32 @@
+// Copyright 2012 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+/*
+Package poly1305 implements Poly1305 one-time message authentication code as specified in http://cr.yp.to/mac/poly1305-20050329.pdf.
+
+Poly1305 is a fast, one-time authentication function. It is infeasible for an
+attacker to generate an authenticator for a message without the key. However, a
+key must only be used for a single message. Authenticating two different
+messages with the same key allows an attacker to forge authenticators for other
+messages with the same key.
+
+Poly1305 was originally coupled with AES in order to make Poly1305-AES. AES was
+used with a fixed key in order to generate one-time keys from an nonce.
+However, in this package AES isn't used and the one-time key is specified
+directly.
+*/
+package poly1305 // import "golang.org/x/crypto/poly1305"
+
+import "crypto/subtle"
+
+// TagSize is the size, in bytes, of a poly1305 authenticator.
+const TagSize = 16
+
+// Verify returns true if mac is a valid authenticator for m with the given
+// key.
+func Verify(mac *[16]byte, m []byte, key *[32]byte) bool {
+ var tmp [16]byte
+ Sum(&tmp, m, key)
+ return subtle.ConstantTimeCompare(tmp[:], mac[:]) == 1
+}
diff --git a/src/mongo/gotools/vendor/src/golang.org/x/crypto/poly1305/poly1305_amd64.s b/src/mongo/gotools/vendor/src/golang.org/x/crypto/poly1305/poly1305_amd64.s
new file mode 100644
index 00000000000..f8d4ee92898
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/golang.org/x/crypto/poly1305/poly1305_amd64.s
@@ -0,0 +1,497 @@
+// Copyright 2012 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// This code was translated into a form compatible with 6a from the public
+// domain sources in SUPERCOP: http://bench.cr.yp.to/supercop.html
+
+// +build amd64,!gccgo,!appengine
+
+// func poly1305(out *[16]byte, m *byte, mlen uint64, key *[32]key)
+TEXT ·poly1305(SB),0,$224-32
+ MOVQ out+0(FP),DI
+ MOVQ m+8(FP),SI
+ MOVQ mlen+16(FP),DX
+ MOVQ key+24(FP),CX
+
+ MOVQ SP,R11
+ MOVQ $31,R9
+ NOTQ R9
+ ANDQ R9,SP
+ ADDQ $32,SP
+
+ MOVQ R11,32(SP)
+ MOVQ R12,40(SP)
+ MOVQ R13,48(SP)
+ MOVQ R14,56(SP)
+ MOVQ R15,64(SP)
+ MOVQ BX,72(SP)
+ MOVQ BP,80(SP)
+ FLDCW ·ROUNDING(SB)
+ MOVL 0(CX),R8
+ MOVL 4(CX),R9
+ MOVL 8(CX),AX
+ MOVL 12(CX),R10
+ MOVQ DI,88(SP)
+ MOVQ CX,96(SP)
+ MOVL $0X43300000,108(SP)
+ MOVL $0X45300000,116(SP)
+ MOVL $0X47300000,124(SP)
+ MOVL $0X49300000,132(SP)
+ ANDL $0X0FFFFFFF,R8
+ ANDL $0X0FFFFFFC,R9
+ ANDL $0X0FFFFFFC,AX
+ ANDL $0X0FFFFFFC,R10
+ MOVL R8,104(SP)
+ MOVL R9,112(SP)
+ MOVL AX,120(SP)
+ MOVL R10,128(SP)
+ FMOVD 104(SP), F0
+ FSUBD ·DOFFSET0(SB), F0
+ FMOVD 112(SP), F0
+ FSUBD ·DOFFSET1(SB), F0
+ FMOVD 120(SP), F0
+ FSUBD ·DOFFSET2(SB), F0
+ FMOVD 128(SP), F0
+ FSUBD ·DOFFSET3(SB), F0
+ FXCHD F0, F3
+ FMOVDP F0, 136(SP)
+ FXCHD F0, F1
+ FMOVD F0, 144(SP)
+ FMULD ·SCALE(SB), F0
+ FMOVDP F0, 152(SP)
+ FMOVD F0, 160(SP)
+ FMULD ·SCALE(SB), F0
+ FMOVDP F0, 168(SP)
+ FMOVD F0, 176(SP)
+ FMULD ·SCALE(SB), F0
+ FMOVDP F0, 184(SP)
+ FLDZ
+ FLDZ
+ FLDZ
+ FLDZ
+ CMPQ DX,$16
+ JB ADDATMOST15BYTES
+ INITIALATLEAST16BYTES:
+ MOVL 12(SI),DI
+ MOVL 8(SI),CX
+ MOVL 4(SI),R8
+ MOVL 0(SI),R9
+ MOVL DI,128(SP)
+ MOVL CX,120(SP)
+ MOVL R8,112(SP)
+ MOVL R9,104(SP)
+ ADDQ $16,SI
+ SUBQ $16,DX
+ FXCHD F0, F3
+ FADDD 128(SP), F0
+ FSUBD ·DOFFSET3MINUSTWO128(SB), F0
+ FXCHD F0, F1
+ FADDD 112(SP), F0
+ FSUBD ·DOFFSET1(SB), F0
+ FXCHD F0, F2
+ FADDD 120(SP), F0
+ FSUBD ·DOFFSET2(SB), F0
+ FXCHD F0, F3
+ FADDD 104(SP), F0
+ FSUBD ·DOFFSET0(SB), F0
+ CMPQ DX,$16
+ JB MULTIPLYADDATMOST15BYTES
+ MULTIPLYADDATLEAST16BYTES:
+ MOVL 12(SI),DI
+ MOVL 8(SI),CX
+ MOVL 4(SI),R8
+ MOVL 0(SI),R9
+ MOVL DI,128(SP)
+ MOVL CX,120(SP)
+ MOVL R8,112(SP)
+ MOVL R9,104(SP)
+ ADDQ $16,SI
+ SUBQ $16,DX
+ FMOVD ·ALPHA130(SB), F0
+ FADDD F2,F0
+ FSUBD ·ALPHA130(SB), F0
+ FSUBD F0,F2
+ FMULD ·SCALE(SB), F0
+ FMOVD ·ALPHA32(SB), F0
+ FADDD F2,F0
+ FSUBD ·ALPHA32(SB), F0
+ FSUBD F0,F2
+ FXCHD F0, F2
+ FADDDP F0,F1
+ FMOVD ·ALPHA64(SB), F0
+ FADDD F4,F0
+ FSUBD ·ALPHA64(SB), F0
+ FSUBD F0,F4
+ FMOVD ·ALPHA96(SB), F0
+ FADDD F6,F0
+ FSUBD ·ALPHA96(SB), F0
+ FSUBD F0,F6
+ FXCHD F0, F6
+ FADDDP F0,F1
+ FXCHD F0, F3
+ FADDDP F0,F5
+ FXCHD F0, F3
+ FADDDP F0,F1
+ FMOVD 176(SP), F0
+ FMULD F3,F0
+ FMOVD 160(SP), F0
+ FMULD F4,F0
+ FMOVD 144(SP), F0
+ FMULD F5,F0
+ FMOVD 136(SP), F0
+ FMULDP F0,F6
+ FMOVD 160(SP), F0
+ FMULD F4,F0
+ FADDDP F0,F3
+ FMOVD 144(SP), F0
+ FMULD F4,F0
+ FADDDP F0,F2
+ FMOVD 136(SP), F0
+ FMULD F4,F0
+ FADDDP F0,F1
+ FMOVD 184(SP), F0
+ FMULDP F0,F4
+ FXCHD F0, F3
+ FADDDP F0,F5
+ FMOVD 144(SP), F0
+ FMULD F4,F0
+ FADDDP F0,F2
+ FMOVD 136(SP), F0
+ FMULD F4,F0
+ FADDDP F0,F1
+ FMOVD 184(SP), F0
+ FMULD F4,F0
+ FADDDP F0,F3
+ FMOVD 168(SP), F0
+ FMULDP F0,F4
+ FXCHD F0, F3
+ FADDDP F0,F4
+ FMOVD 136(SP), F0
+ FMULD F5,F0
+ FADDDP F0,F1
+ FXCHD F0, F3
+ FMOVD 184(SP), F0
+ FMULD F5,F0
+ FADDDP F0,F3
+ FXCHD F0, F1
+ FMOVD 168(SP), F0
+ FMULD F5,F0
+ FADDDP F0,F1
+ FMOVD 152(SP), F0
+ FMULDP F0,F5
+ FXCHD F0, F4
+ FADDDP F0,F1
+ CMPQ DX,$16
+ FXCHD F0, F2
+ FMOVD 128(SP), F0
+ FSUBD ·DOFFSET3MINUSTWO128(SB), F0
+ FADDDP F0,F1
+ FXCHD F0, F1
+ FMOVD 120(SP), F0
+ FSUBD ·DOFFSET2(SB), F0
+ FADDDP F0,F1
+ FXCHD F0, F3
+ FMOVD 112(SP), F0
+ FSUBD ·DOFFSET1(SB), F0
+ FADDDP F0,F1
+ FXCHD F0, F2
+ FMOVD 104(SP), F0
+ FSUBD ·DOFFSET0(SB), F0
+ FADDDP F0,F1
+ JAE MULTIPLYADDATLEAST16BYTES
+ MULTIPLYADDATMOST15BYTES:
+ FMOVD ·ALPHA130(SB), F0
+ FADDD F2,F0
+ FSUBD ·ALPHA130(SB), F0
+ FSUBD F0,F2
+ FMULD ·SCALE(SB), F0
+ FMOVD ·ALPHA32(SB), F0
+ FADDD F2,F0
+ FSUBD ·ALPHA32(SB), F0
+ FSUBD F0,F2
+ FMOVD ·ALPHA64(SB), F0
+ FADDD F5,F0
+ FSUBD ·ALPHA64(SB), F0
+ FSUBD F0,F5
+ FMOVD ·ALPHA96(SB), F0
+ FADDD F7,F0
+ FSUBD ·ALPHA96(SB), F0
+ FSUBD F0,F7
+ FXCHD F0, F7
+ FADDDP F0,F1
+ FXCHD F0, F5
+ FADDDP F0,F1
+ FXCHD F0, F3
+ FADDDP F0,F5
+ FADDDP F0,F1
+ FMOVD 176(SP), F0
+ FMULD F1,F0
+ FMOVD 160(SP), F0
+ FMULD F2,F0
+ FMOVD 144(SP), F0
+ FMULD F3,F0
+ FMOVD 136(SP), F0
+ FMULDP F0,F4
+ FMOVD 160(SP), F0
+ FMULD F5,F0
+ FADDDP F0,F3
+ FMOVD 144(SP), F0
+ FMULD F5,F0
+ FADDDP F0,F2
+ FMOVD 136(SP), F0
+ FMULD F5,F0
+ FADDDP F0,F1
+ FMOVD 184(SP), F0
+ FMULDP F0,F5
+ FXCHD F0, F4
+ FADDDP F0,F3
+ FMOVD 144(SP), F0
+ FMULD F5,F0
+ FADDDP F0,F2
+ FMOVD 136(SP), F0
+ FMULD F5,F0
+ FADDDP F0,F1
+ FMOVD 184(SP), F0
+ FMULD F5,F0
+ FADDDP F0,F4
+ FMOVD 168(SP), F0
+ FMULDP F0,F5
+ FXCHD F0, F4
+ FADDDP F0,F2
+ FMOVD 136(SP), F0
+ FMULD F5,F0
+ FADDDP F0,F1
+ FMOVD 184(SP), F0
+ FMULD F5,F0
+ FADDDP F0,F4
+ FMOVD 168(SP), F0
+ FMULD F5,F0
+ FADDDP F0,F3
+ FMOVD 152(SP), F0
+ FMULDP F0,F5
+ FXCHD F0, F4
+ FADDDP F0,F1
+ ADDATMOST15BYTES:
+ CMPQ DX,$0
+ JE NOMOREBYTES
+ MOVL $0,0(SP)
+ MOVL $0, 4 (SP)
+ MOVL $0, 8 (SP)
+ MOVL $0, 12 (SP)
+ LEAQ 0(SP),DI
+ MOVQ DX,CX
+ REP; MOVSB
+ MOVB $1,0(DI)
+ MOVL 12 (SP),DI
+ MOVL 8 (SP),SI
+ MOVL 4 (SP),DX
+ MOVL 0(SP),CX
+ MOVL DI,128(SP)
+ MOVL SI,120(SP)
+ MOVL DX,112(SP)
+ MOVL CX,104(SP)
+ FXCHD F0, F3
+ FADDD 128(SP), F0
+ FSUBD ·DOFFSET3(SB), F0
+ FXCHD F0, F2
+ FADDD 120(SP), F0
+ FSUBD ·DOFFSET2(SB), F0
+ FXCHD F0, F1
+ FADDD 112(SP), F0
+ FSUBD ·DOFFSET1(SB), F0
+ FXCHD F0, F3
+ FADDD 104(SP), F0
+ FSUBD ·DOFFSET0(SB), F0
+ FMOVD ·ALPHA130(SB), F0
+ FADDD F3,F0
+ FSUBD ·ALPHA130(SB), F0
+ FSUBD F0,F3
+ FMULD ·SCALE(SB), F0
+ FMOVD ·ALPHA32(SB), F0
+ FADDD F2,F0
+ FSUBD ·ALPHA32(SB), F0
+ FSUBD F0,F2
+ FMOVD ·ALPHA64(SB), F0
+ FADDD F6,F0
+ FSUBD ·ALPHA64(SB), F0
+ FSUBD F0,F6
+ FMOVD ·ALPHA96(SB), F0
+ FADDD F5,F0
+ FSUBD ·ALPHA96(SB), F0
+ FSUBD F0,F5
+ FXCHD F0, F4
+ FADDDP F0,F3
+ FXCHD F0, F6
+ FADDDP F0,F1
+ FXCHD F0, F3
+ FADDDP F0,F5
+ FXCHD F0, F3
+ FADDDP F0,F1
+ FMOVD 176(SP), F0
+ FMULD F3,F0
+ FMOVD 160(SP), F0
+ FMULD F4,F0
+ FMOVD 144(SP), F0
+ FMULD F5,F0
+ FMOVD 136(SP), F0
+ FMULDP F0,F6
+ FMOVD 160(SP), F0
+ FMULD F5,F0
+ FADDDP F0,F3
+ FMOVD 144(SP), F0
+ FMULD F5,F0
+ FADDDP F0,F2
+ FMOVD 136(SP), F0
+ FMULD F5,F0
+ FADDDP F0,F1
+ FMOVD 184(SP), F0
+ FMULDP F0,F5
+ FXCHD F0, F4
+ FADDDP F0,F5
+ FMOVD 144(SP), F0
+ FMULD F6,F0
+ FADDDP F0,F2
+ FMOVD 136(SP), F0
+ FMULD F6,F0
+ FADDDP F0,F1
+ FMOVD 184(SP), F0
+ FMULD F6,F0
+ FADDDP F0,F4
+ FMOVD 168(SP), F0
+ FMULDP F0,F6
+ FXCHD F0, F5
+ FADDDP F0,F4
+ FMOVD 136(SP), F0
+ FMULD F2,F0
+ FADDDP F0,F1
+ FMOVD 184(SP), F0
+ FMULD F2,F0
+ FADDDP F0,F5
+ FMOVD 168(SP), F0
+ FMULD F2,F0
+ FADDDP F0,F3
+ FMOVD 152(SP), F0
+ FMULDP F0,F2
+ FXCHD F0, F1
+ FADDDP F0,F3
+ FXCHD F0, F3
+ FXCHD F0, F2
+ NOMOREBYTES:
+ MOVL $0,R10
+ FMOVD ·ALPHA130(SB), F0
+ FADDD F4,F0
+ FSUBD ·ALPHA130(SB), F0
+ FSUBD F0,F4
+ FMULD ·SCALE(SB), F0
+ FMOVD ·ALPHA32(SB), F0
+ FADDD F2,F0
+ FSUBD ·ALPHA32(SB), F0
+ FSUBD F0,F2
+ FMOVD ·ALPHA64(SB), F0
+ FADDD F4,F0
+ FSUBD ·ALPHA64(SB), F0
+ FSUBD F0,F4
+ FMOVD ·ALPHA96(SB), F0
+ FADDD F6,F0
+ FSUBD ·ALPHA96(SB), F0
+ FXCHD F0, F6
+ FSUBD F6,F0
+ FXCHD F0, F4
+ FADDDP F0,F3
+ FXCHD F0, F4
+ FADDDP F0,F1
+ FXCHD F0, F2
+ FADDDP F0,F3
+ FXCHD F0, F4
+ FADDDP F0,F3
+ FXCHD F0, F3
+ FADDD ·HOFFSET0(SB), F0
+ FXCHD F0, F3
+ FADDD ·HOFFSET1(SB), F0
+ FXCHD F0, F1
+ FADDD ·HOFFSET2(SB), F0
+ FXCHD F0, F2
+ FADDD ·HOFFSET3(SB), F0
+ FXCHD F0, F3
+ FMOVDP F0, 104(SP)
+ FMOVDP F0, 112(SP)
+ FMOVDP F0, 120(SP)
+ FMOVDP F0, 128(SP)
+ MOVL 108(SP),DI
+ ANDL $63,DI
+ MOVL 116(SP),SI
+ ANDL $63,SI
+ MOVL 124(SP),DX
+ ANDL $63,DX
+ MOVL 132(SP),CX
+ ANDL $63,CX
+ MOVL 112(SP),R8
+ ADDL DI,R8
+ MOVQ R8,112(SP)
+ MOVL 120(SP),DI
+ ADCL SI,DI
+ MOVQ DI,120(SP)
+ MOVL 128(SP),DI
+ ADCL DX,DI
+ MOVQ DI,128(SP)
+ MOVL R10,DI
+ ADCL CX,DI
+ MOVQ DI,136(SP)
+ MOVQ $5,DI
+ MOVL 104(SP),SI
+ ADDL SI,DI
+ MOVQ DI,104(SP)
+ MOVL R10,DI
+ MOVQ 112(SP),DX
+ ADCL DX,DI
+ MOVQ DI,112(SP)
+ MOVL R10,DI
+ MOVQ 120(SP),CX
+ ADCL CX,DI
+ MOVQ DI,120(SP)
+ MOVL R10,DI
+ MOVQ 128(SP),R8
+ ADCL R8,DI
+ MOVQ DI,128(SP)
+ MOVQ $0XFFFFFFFC,DI
+ MOVQ 136(SP),R9
+ ADCL R9,DI
+ SARL $16,DI
+ MOVQ DI,R9
+ XORL $0XFFFFFFFF,R9
+ ANDQ DI,SI
+ MOVQ 104(SP),AX
+ ANDQ R9,AX
+ ORQ AX,SI
+ ANDQ DI,DX
+ MOVQ 112(SP),AX
+ ANDQ R9,AX
+ ORQ AX,DX
+ ANDQ DI,CX
+ MOVQ 120(SP),AX
+ ANDQ R9,AX
+ ORQ AX,CX
+ ANDQ DI,R8
+ MOVQ 128(SP),DI
+ ANDQ R9,DI
+ ORQ DI,R8
+ MOVQ 88(SP),DI
+ MOVQ 96(SP),R9
+ ADDL 16(R9),SI
+ ADCL 20(R9),DX
+ ADCL 24(R9),CX
+ ADCL 28(R9),R8
+ MOVL SI,0(DI)
+ MOVL DX,4(DI)
+ MOVL CX,8(DI)
+ MOVL R8,12(DI)
+ MOVQ 32(SP),R11
+ MOVQ 40(SP),R12
+ MOVQ 48(SP),R13
+ MOVQ 56(SP),R14
+ MOVQ 64(SP),R15
+ MOVQ 72(SP),BX
+ MOVQ 80(SP),BP
+ MOVQ R11,SP
+ RET
diff --git a/src/mongo/gotools/vendor/src/golang.org/x/crypto/poly1305/poly1305_arm.s b/src/mongo/gotools/vendor/src/golang.org/x/crypto/poly1305/poly1305_arm.s
new file mode 100644
index 00000000000..c15386744dd
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/golang.org/x/crypto/poly1305/poly1305_arm.s
@@ -0,0 +1,379 @@
+// Copyright 2015 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// This code was translated into a form compatible with 5a from the public
+// domain source by Andrew Moon: github.com/floodyberry/poly1305-opt/blob/master/app/extensions/poly1305.
+
+// +build arm,!gccgo,!appengine
+
+DATA poly1305_init_constants_armv6<>+0x00(SB)/4, $0x3ffffff
+DATA poly1305_init_constants_armv6<>+0x04(SB)/4, $0x3ffff03
+DATA poly1305_init_constants_armv6<>+0x08(SB)/4, $0x3ffc0ff
+DATA poly1305_init_constants_armv6<>+0x0c(SB)/4, $0x3f03fff
+DATA poly1305_init_constants_armv6<>+0x10(SB)/4, $0x00fffff
+GLOBL poly1305_init_constants_armv6<>(SB), 8, $20
+
+// Warning: the linker may use R11 to synthesize certain instructions. Please
+// take care and verify that no synthetic instructions use it.
+
+TEXT poly1305_init_ext_armv6<>(SB),4,$-4
+ MOVM.DB.W [R4-R11], (R13)
+ MOVM.IA.W (R1), [R2-R5]
+ MOVW $poly1305_init_constants_armv6<>(SB), R7
+ MOVW R2, R8
+ MOVW R2>>26, R9
+ MOVW R3>>20, g
+ MOVW R4>>14, R11
+ MOVW R5>>8, R12
+ ORR R3<<6, R9, R9
+ ORR R4<<12, g, g
+ ORR R5<<18, R11, R11
+ MOVM.IA (R7), [R2-R6]
+ AND R8, R2, R2
+ AND R9, R3, R3
+ AND g, R4, R4
+ AND R11, R5, R5
+ AND R12, R6, R6
+ MOVM.IA.W [R2-R6], (R0)
+ EOR R2, R2, R2
+ EOR R3, R3, R3
+ EOR R4, R4, R4
+ EOR R5, R5, R5
+ EOR R6, R6, R6
+ MOVM.IA.W [R2-R6], (R0)
+ MOVM.IA.W (R1), [R2-R5]
+ MOVM.IA [R2-R6], (R0)
+ MOVM.IA.W (R13), [R4-R11]
+ RET
+
+#define MOVW_UNALIGNED(Rsrc, Rdst, Rtmp, offset) \
+ MOVBU (offset+0)(Rsrc), Rtmp; \
+ MOVBU Rtmp, (offset+0)(Rdst); \
+ MOVBU (offset+1)(Rsrc), Rtmp; \
+ MOVBU Rtmp, (offset+1)(Rdst); \
+ MOVBU (offset+2)(Rsrc), Rtmp; \
+ MOVBU Rtmp, (offset+2)(Rdst); \
+ MOVBU (offset+3)(Rsrc), Rtmp; \
+ MOVBU Rtmp, (offset+3)(Rdst)
+
+TEXT poly1305_blocks_armv6<>(SB),4,$-4
+ MOVM.DB.W [R4, R5, R6, R7, R8, R9, g, R11, R14], (R13)
+ SUB $128, R13
+ MOVW R0, 36(R13)
+ MOVW R1, 40(R13)
+ MOVW R2, 44(R13)
+ MOVW R1, R14
+ MOVW R2, R12
+ MOVW 56(R0), R8
+ WORD $0xe1180008 // TST R8, R8 not working see issue 5921
+ EOR R6, R6, R6
+ MOVW.EQ $(1<<24), R6
+ MOVW R6, 32(R13)
+ ADD $64, R13, g
+ MOVM.IA (R0), [R0-R9]
+ MOVM.IA [R0-R4], (g)
+ CMP $16, R12
+ BLO poly1305_blocks_armv6_done
+poly1305_blocks_armv6_mainloop:
+ WORD $0xe31e0003 // TST R14, #3 not working see issue 5921
+ BEQ poly1305_blocks_armv6_mainloop_aligned
+ ADD $48, R13, g
+ MOVW_UNALIGNED(R14, g, R0, 0)
+ MOVW_UNALIGNED(R14, g, R0, 4)
+ MOVW_UNALIGNED(R14, g, R0, 8)
+ MOVW_UNALIGNED(R14, g, R0, 12)
+ MOVM.IA (g), [R0-R3]
+ ADD $16, R14
+ B poly1305_blocks_armv6_mainloop_loaded
+poly1305_blocks_armv6_mainloop_aligned:
+ MOVM.IA.W (R14), [R0-R3]
+poly1305_blocks_armv6_mainloop_loaded:
+ MOVW R0>>26, g
+ MOVW R1>>20, R11
+ MOVW R2>>14, R12
+ MOVW R14, 40(R13)
+ MOVW R3>>8, R4
+ ORR R1<<6, g, g
+ ORR R2<<12, R11, R11
+ ORR R3<<18, R12, R12
+ BIC $0xfc000000, R0, R0
+ BIC $0xfc000000, g, g
+ MOVW 32(R13), R3
+ BIC $0xfc000000, R11, R11
+ BIC $0xfc000000, R12, R12
+ ADD R0, R5, R5
+ ADD g, R6, R6
+ ORR R3, R4, R4
+ ADD R11, R7, R7
+ ADD $64, R13, R14
+ ADD R12, R8, R8
+ ADD R4, R9, R9
+ MOVM.IA (R14), [R0-R4]
+ MULLU R4, R5, (R11, g)
+ MULLU R3, R5, (R14, R12)
+ MULALU R3, R6, (R11, g)
+ MULALU R2, R6, (R14, R12)
+ MULALU R2, R7, (R11, g)
+ MULALU R1, R7, (R14, R12)
+ ADD R4<<2, R4, R4
+ ADD R3<<2, R3, R3
+ MULALU R1, R8, (R11, g)
+ MULALU R0, R8, (R14, R12)
+ MULALU R0, R9, (R11, g)
+ MULALU R4, R9, (R14, R12)
+ MOVW g, 24(R13)
+ MOVW R11, 28(R13)
+ MOVW R12, 16(R13)
+ MOVW R14, 20(R13)
+ MULLU R2, R5, (R11, g)
+ MULLU R1, R5, (R14, R12)
+ MULALU R1, R6, (R11, g)
+ MULALU R0, R6, (R14, R12)
+ MULALU R0, R7, (R11, g)
+ MULALU R4, R7, (R14, R12)
+ ADD R2<<2, R2, R2
+ ADD R1<<2, R1, R1
+ MULALU R4, R8, (R11, g)
+ MULALU R3, R8, (R14, R12)
+ MULALU R3, R9, (R11, g)
+ MULALU R2, R9, (R14, R12)
+ MOVW g, 8(R13)
+ MOVW R11, 12(R13)
+ MOVW R12, 0(R13)
+ MOVW R14, w+4(SP)
+ MULLU R0, R5, (R11, g)
+ MULALU R4, R6, (R11, g)
+ MULALU R3, R7, (R11, g)
+ MULALU R2, R8, (R11, g)
+ MULALU R1, R9, (R11, g)
+ MOVM.IA (R13), [R0-R7]
+ MOVW g>>26, R12
+ MOVW R4>>26, R14
+ ORR R11<<6, R12, R12
+ ORR R5<<6, R14, R14
+ BIC $0xfc000000, g, g
+ BIC $0xfc000000, R4, R4
+ ADD.S R12, R0, R0
+ ADC $0, R1, R1
+ ADD.S R14, R6, R6
+ ADC $0, R7, R7
+ MOVW R0>>26, R12
+ MOVW R6>>26, R14
+ ORR R1<<6, R12, R12
+ ORR R7<<6, R14, R14
+ BIC $0xfc000000, R0, R0
+ BIC $0xfc000000, R6, R6
+ ADD R14<<2, R14, R14
+ ADD.S R12, R2, R2
+ ADC $0, R3, R3
+ ADD R14, g, g
+ MOVW R2>>26, R12
+ MOVW g>>26, R14
+ ORR R3<<6, R12, R12
+ BIC $0xfc000000, g, R5
+ BIC $0xfc000000, R2, R7
+ ADD R12, R4, R4
+ ADD R14, R0, R0
+ MOVW R4>>26, R12
+ BIC $0xfc000000, R4, R8
+ ADD R12, R6, R9
+ MOVW w+44(SP), R12
+ MOVW w+40(SP), R14
+ MOVW R0, R6
+ CMP $32, R12
+ SUB $16, R12, R12
+ MOVW R12, 44(R13)
+ BHS poly1305_blocks_armv6_mainloop
+poly1305_blocks_armv6_done:
+ MOVW 36(R13), R12
+ MOVW R5, 20(R12)
+ MOVW R6, 24(R12)
+ MOVW R7, 28(R12)
+ MOVW R8, 32(R12)
+ MOVW R9, 36(R12)
+ ADD $128, R13, R13
+ MOVM.IA.W (R13), [R4, R5, R6, R7, R8, R9, g, R11, R14]
+ RET
+
+#define MOVHUP_UNALIGNED(Rsrc, Rdst, Rtmp) \
+ MOVBU.P 1(Rsrc), Rtmp; \
+ MOVBU.P Rtmp, 1(Rdst); \
+ MOVBU.P 1(Rsrc), Rtmp; \
+ MOVBU.P Rtmp, 1(Rdst)
+
+#define MOVWP_UNALIGNED(Rsrc, Rdst, Rtmp) \
+ MOVHUP_UNALIGNED(Rsrc, Rdst, Rtmp); \
+ MOVHUP_UNALIGNED(Rsrc, Rdst, Rtmp)
+
+TEXT poly1305_finish_ext_armv6<>(SB),4,$-4
+ MOVM.DB.W [R4, R5, R6, R7, R8, R9, g, R11, R14], (R13)
+ SUB $16, R13, R13
+ MOVW R0, R5
+ MOVW R1, R6
+ MOVW R2, R7
+ MOVW R3, R8
+ AND.S R2, R2, R2
+ BEQ poly1305_finish_ext_armv6_noremaining
+ EOR R0, R0
+ MOVW R13, R9
+ MOVW R0, 0(R13)
+ MOVW R0, 4(R13)
+ MOVW R0, 8(R13)
+ MOVW R0, 12(R13)
+ WORD $0xe3110003 // TST R1, #3 not working see issue 5921
+ BEQ poly1305_finish_ext_armv6_aligned
+ WORD $0xe3120008 // TST R2, #8 not working see issue 5921
+ BEQ poly1305_finish_ext_armv6_skip8
+ MOVWP_UNALIGNED(R1, R9, g)
+ MOVWP_UNALIGNED(R1, R9, g)
+poly1305_finish_ext_armv6_skip8:
+ WORD $0xe3120004 // TST $4, R2 not working see issue 5921
+ BEQ poly1305_finish_ext_armv6_skip4
+ MOVWP_UNALIGNED(R1, R9, g)
+poly1305_finish_ext_armv6_skip4:
+ WORD $0xe3120002 // TST $2, R2 not working see issue 5921
+ BEQ poly1305_finish_ext_armv6_skip2
+ MOVHUP_UNALIGNED(R1, R9, g)
+ B poly1305_finish_ext_armv6_skip2
+poly1305_finish_ext_armv6_aligned:
+ WORD $0xe3120008 // TST R2, #8 not working see issue 5921
+ BEQ poly1305_finish_ext_armv6_skip8_aligned
+ MOVM.IA.W (R1), [g-R11]
+ MOVM.IA.W [g-R11], (R9)
+poly1305_finish_ext_armv6_skip8_aligned:
+ WORD $0xe3120004 // TST $4, R2 not working see issue 5921
+ BEQ poly1305_finish_ext_armv6_skip4_aligned
+ MOVW.P 4(R1), g
+ MOVW.P g, 4(R9)
+poly1305_finish_ext_armv6_skip4_aligned:
+ WORD $0xe3120002 // TST $2, R2 not working see issue 5921
+ BEQ poly1305_finish_ext_armv6_skip2
+ MOVHU.P 2(R1), g
+ MOVH.P g, 2(R9)
+poly1305_finish_ext_armv6_skip2:
+ WORD $0xe3120001 // TST $1, R2 not working see issue 5921
+ BEQ poly1305_finish_ext_armv6_skip1
+ MOVBU.P 1(R1), g
+ MOVBU.P g, 1(R9)
+poly1305_finish_ext_armv6_skip1:
+ MOVW $1, R11
+ MOVBU R11, 0(R9)
+ MOVW R11, 56(R5)
+ MOVW R5, R0
+ MOVW R13, R1
+ MOVW $16, R2
+ BL poly1305_blocks_armv6<>(SB)
+poly1305_finish_ext_armv6_noremaining:
+ MOVW 20(R5), R0
+ MOVW 24(R5), R1
+ MOVW 28(R5), R2
+ MOVW 32(R5), R3
+ MOVW 36(R5), R4
+ MOVW R4>>26, R12
+ BIC $0xfc000000, R4, R4
+ ADD R12<<2, R12, R12
+ ADD R12, R0, R0
+ MOVW R0>>26, R12
+ BIC $0xfc000000, R0, R0
+ ADD R12, R1, R1
+ MOVW R1>>26, R12
+ BIC $0xfc000000, R1, R1
+ ADD R12, R2, R2
+ MOVW R2>>26, R12
+ BIC $0xfc000000, R2, R2
+ ADD R12, R3, R3
+ MOVW R3>>26, R12
+ BIC $0xfc000000, R3, R3
+ ADD R12, R4, R4
+ ADD $5, R0, R6
+ MOVW R6>>26, R12
+ BIC $0xfc000000, R6, R6
+ ADD R12, R1, R7
+ MOVW R7>>26, R12
+ BIC $0xfc000000, R7, R7
+ ADD R12, R2, g
+ MOVW g>>26, R12
+ BIC $0xfc000000, g, g
+ ADD R12, R3, R11
+ MOVW $-(1<<26), R12
+ ADD R11>>26, R12, R12
+ BIC $0xfc000000, R11, R11
+ ADD R12, R4, R14
+ MOVW R14>>31, R12
+ SUB $1, R12
+ AND R12, R6, R6
+ AND R12, R7, R7
+ AND R12, g, g
+ AND R12, R11, R11
+ AND R12, R14, R14
+ MVN R12, R12
+ AND R12, R0, R0
+ AND R12, R1, R1
+ AND R12, R2, R2
+ AND R12, R3, R3
+ AND R12, R4, R4
+ ORR R6, R0, R0
+ ORR R7, R1, R1
+ ORR g, R2, R2
+ ORR R11, R3, R3
+ ORR R14, R4, R4
+ ORR R1<<26, R0, R0
+ MOVW R1>>6, R1
+ ORR R2<<20, R1, R1
+ MOVW R2>>12, R2
+ ORR R3<<14, R2, R2
+ MOVW R3>>18, R3
+ ORR R4<<8, R3, R3
+ MOVW 40(R5), R6
+ MOVW 44(R5), R7
+ MOVW 48(R5), g
+ MOVW 52(R5), R11
+ ADD.S R6, R0, R0
+ ADC.S R7, R1, R1
+ ADC.S g, R2, R2
+ ADC.S R11, R3, R3
+ MOVM.IA [R0-R3], (R8)
+ MOVW R5, R12
+ EOR R0, R0, R0
+ EOR R1, R1, R1
+ EOR R2, R2, R2
+ EOR R3, R3, R3
+ EOR R4, R4, R4
+ EOR R5, R5, R5
+ EOR R6, R6, R6
+ EOR R7, R7, R7
+ MOVM.IA.W [R0-R7], (R12)
+ MOVM.IA [R0-R7], (R12)
+ ADD $16, R13, R13
+ MOVM.IA.W (R13), [R4, R5, R6, R7, R8, R9, g, R11, R14]
+ RET
+
+// func poly1305_auth_armv6(out *[16]byte, m *byte, mlen uint32, key *[32]key)
+TEXT ·poly1305_auth_armv6(SB),0,$280-16
+ MOVW out+0(FP), R4
+ MOVW m+4(FP), R5
+ MOVW mlen+8(FP), R6
+ MOVW key+12(FP), R7
+
+ MOVW R13, R8
+ BIC $63, R13
+ SUB $64, R13, R13
+ MOVW R13, R0
+ MOVW R7, R1
+ BL poly1305_init_ext_armv6<>(SB)
+ BIC.S $15, R6, R2
+ BEQ poly1305_auth_armv6_noblocks
+ MOVW R13, R0
+ MOVW R5, R1
+ ADD R2, R5, R5
+ SUB R2, R6, R6
+ BL poly1305_blocks_armv6<>(SB)
+poly1305_auth_armv6_noblocks:
+ MOVW R13, R0
+ MOVW R5, R1
+ MOVW R6, R2
+ MOVW R4, R3
+ BL poly1305_finish_ext_armv6<>(SB)
+ MOVW R8, R13
+ RET
diff --git a/src/mongo/gotools/vendor/src/golang.org/x/crypto/poly1305/poly1305_test.go b/src/mongo/gotools/vendor/src/golang.org/x/crypto/poly1305/poly1305_test.go
new file mode 100644
index 00000000000..b3e92310b5d
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/golang.org/x/crypto/poly1305/poly1305_test.go
@@ -0,0 +1,86 @@
+// Copyright 2012 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package poly1305
+
+import (
+ "bytes"
+ "testing"
+ "unsafe"
+)
+
+var testData = []struct {
+ in, k, correct []byte
+}{
+ {
+ []byte("Hello world!"),
+ []byte("this is 32-byte key for Poly1305"),
+ []byte{0xa6, 0xf7, 0x45, 0x00, 0x8f, 0x81, 0xc9, 0x16, 0xa2, 0x0d, 0xcc, 0x74, 0xee, 0xf2, 0xb2, 0xf0},
+ },
+ {
+ make([]byte, 32),
+ []byte("this is 32-byte key for Poly1305"),
+ []byte{0x49, 0xec, 0x78, 0x09, 0x0e, 0x48, 0x1e, 0xc6, 0xc2, 0x6b, 0x33, 0xb9, 0x1c, 0xcc, 0x03, 0x07},
+ },
+ {
+ make([]byte, 2007),
+ []byte("this is 32-byte key for Poly1305"),
+ []byte{0xda, 0x84, 0xbc, 0xab, 0x02, 0x67, 0x6c, 0x38, 0xcd, 0xb0, 0x15, 0x60, 0x42, 0x74, 0xc2, 0xaa},
+ },
+ {
+ make([]byte, 2007),
+ make([]byte, 32),
+ make([]byte, 16),
+ },
+}
+
+func testSum(t *testing.T, unaligned bool) {
+ var out [16]byte
+ var key [32]byte
+
+ for i, v := range testData {
+ in := v.in
+ if unaligned {
+ in = unalignBytes(in)
+ }
+ copy(key[:], v.k)
+ Sum(&out, in, &key)
+ if !bytes.Equal(out[:], v.correct) {
+ t.Errorf("%d: expected %x, got %x", i, v.correct, out[:])
+ }
+ }
+}
+
+func TestSum(t *testing.T) { testSum(t, false) }
+func TestSumUnaligned(t *testing.T) { testSum(t, true) }
+
+func benchmark(b *testing.B, size int, unaligned bool) {
+ var out [16]byte
+ var key [32]byte
+ in := make([]byte, size)
+ if unaligned {
+ in = unalignBytes(in)
+ }
+ b.SetBytes(int64(len(in)))
+ b.ResetTimer()
+ for i := 0; i < b.N; i++ {
+ Sum(&out, in, &key)
+ }
+}
+
+func Benchmark64(b *testing.B) { benchmark(b, 64, false) }
+func Benchmark1K(b *testing.B) { benchmark(b, 1024, false) }
+func Benchmark64Unaligned(b *testing.B) { benchmark(b, 64, true) }
+func Benchmark1KUnaligned(b *testing.B) { benchmark(b, 1024, true) }
+
+func unalignBytes(in []byte) []byte {
+ out := make([]byte, len(in)+1)
+ if uintptr(unsafe.Pointer(&out[0]))&(unsafe.Alignof(uint32(0))-1) == 0 {
+ out = out[1:]
+ } else {
+ out = out[:len(in)]
+ }
+ copy(out, in)
+ return out
+}
diff --git a/src/mongo/gotools/vendor/src/golang.org/x/crypto/poly1305/sum_amd64.go b/src/mongo/gotools/vendor/src/golang.org/x/crypto/poly1305/sum_amd64.go
new file mode 100644
index 00000000000..6775c703f61
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/golang.org/x/crypto/poly1305/sum_amd64.go
@@ -0,0 +1,24 @@
+// Copyright 2012 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// +build amd64,!gccgo,!appengine
+
+package poly1305
+
+// This function is implemented in poly1305_amd64.s
+
+//go:noescape
+
+func poly1305(out *[16]byte, m *byte, mlen uint64, key *[32]byte)
+
+// Sum generates an authenticator for m using a one-time key and puts the
+// 16-byte result into out. Authenticating two different messages with the same
+// key allows an attacker to forge messages at will.
+func Sum(out *[16]byte, m []byte, key *[32]byte) {
+ var mPtr *byte
+ if len(m) > 0 {
+ mPtr = &m[0]
+ }
+ poly1305(out, mPtr, uint64(len(m)), key)
+}
diff --git a/src/mongo/gotools/vendor/src/golang.org/x/crypto/poly1305/sum_arm.go b/src/mongo/gotools/vendor/src/golang.org/x/crypto/poly1305/sum_arm.go
new file mode 100644
index 00000000000..50b979c24c6
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/golang.org/x/crypto/poly1305/sum_arm.go
@@ -0,0 +1,24 @@
+// Copyright 2015 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// +build arm,!gccgo,!appengine
+
+package poly1305
+
+// This function is implemented in poly1305_arm.s
+
+//go:noescape
+
+func poly1305_auth_armv6(out *[16]byte, m *byte, mlen uint32, key *[32]byte)
+
+// Sum generates an authenticator for m using a one-time key and puts the
+// 16-byte result into out. Authenticating two different messages with the same
+// key allows an attacker to forge messages at will.
+func Sum(out *[16]byte, m []byte, key *[32]byte) {
+ var mPtr *byte
+ if len(m) > 0 {
+ mPtr = &m[0]
+ }
+ poly1305_auth_armv6(out, mPtr, uint32(len(m)), key)
+}
diff --git a/src/mongo/gotools/vendor/src/golang.org/x/crypto/poly1305/sum_ref.go b/src/mongo/gotools/vendor/src/golang.org/x/crypto/poly1305/sum_ref.go
new file mode 100644
index 00000000000..0b24fc78b93
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/golang.org/x/crypto/poly1305/sum_ref.go
@@ -0,0 +1,1531 @@
+// Copyright 2012 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// +build !amd64,!arm gccgo appengine
+
+package poly1305
+
+// Based on original, public domain implementation from NaCl by D. J.
+// Bernstein.
+
+import "math"
+
+const (
+ alpham80 = 0.00000000558793544769287109375
+ alpham48 = 24.0
+ alpham16 = 103079215104.0
+ alpha0 = 6755399441055744.0
+ alpha18 = 1770887431076116955136.0
+ alpha32 = 29014219670751100192948224.0
+ alpha50 = 7605903601369376408980219232256.0
+ alpha64 = 124615124604835863084731911901282304.0
+ alpha82 = 32667107224410092492483962313449748299776.0
+ alpha96 = 535217884764734955396857238543560676143529984.0
+ alpha112 = 35076039295941670036888435985190792471742381031424.0
+ alpha130 = 9194973245195333150150082162901855101712434733101613056.0
+ scale = 0.0000000000000000000000000000000000000036734198463196484624023016788195177431833298649127735047148490821200539357960224151611328125
+ offset0 = 6755408030990331.0
+ offset1 = 29014256564239239022116864.0
+ offset2 = 124615283061160854719918951570079744.0
+ offset3 = 535219245894202480694386063513315216128475136.0
+)
+
+// Sum generates an authenticator for m using a one-time key and puts the
+// 16-byte result into out. Authenticating two different messages with the same
+// key allows an attacker to forge messages at will.
+func Sum(out *[16]byte, m []byte, key *[32]byte) {
+ r := key
+ s := key[16:]
+ var (
+ y7 float64
+ y6 float64
+ y1 float64
+ y0 float64
+ y5 float64
+ y4 float64
+ x7 float64
+ x6 float64
+ x1 float64
+ x0 float64
+ y3 float64
+ y2 float64
+ x5 float64
+ r3lowx0 float64
+ x4 float64
+ r0lowx6 float64
+ x3 float64
+ r3highx0 float64
+ x2 float64
+ r0highx6 float64
+ r0lowx0 float64
+ sr1lowx6 float64
+ r0highx0 float64
+ sr1highx6 float64
+ sr3low float64
+ r1lowx0 float64
+ sr2lowx6 float64
+ r1highx0 float64
+ sr2highx6 float64
+ r2lowx0 float64
+ sr3lowx6 float64
+ r2highx0 float64
+ sr3highx6 float64
+ r1highx4 float64
+ r1lowx4 float64
+ r0highx4 float64
+ r0lowx4 float64
+ sr3highx4 float64
+ sr3lowx4 float64
+ sr2highx4 float64
+ sr2lowx4 float64
+ r0lowx2 float64
+ r0highx2 float64
+ r1lowx2 float64
+ r1highx2 float64
+ r2lowx2 float64
+ r2highx2 float64
+ sr3lowx2 float64
+ sr3highx2 float64
+ z0 float64
+ z1 float64
+ z2 float64
+ z3 float64
+ m0 int64
+ m1 int64
+ m2 int64
+ m3 int64
+ m00 uint32
+ m01 uint32
+ m02 uint32
+ m03 uint32
+ m10 uint32
+ m11 uint32
+ m12 uint32
+ m13 uint32
+ m20 uint32
+ m21 uint32
+ m22 uint32
+ m23 uint32
+ m30 uint32
+ m31 uint32
+ m32 uint32
+ m33 uint64
+ lbelow2 int32
+ lbelow3 int32
+ lbelow4 int32
+ lbelow5 int32
+ lbelow6 int32
+ lbelow7 int32
+ lbelow8 int32
+ lbelow9 int32
+ lbelow10 int32
+ lbelow11 int32
+ lbelow12 int32
+ lbelow13 int32
+ lbelow14 int32
+ lbelow15 int32
+ s00 uint32
+ s01 uint32
+ s02 uint32
+ s03 uint32
+ s10 uint32
+ s11 uint32
+ s12 uint32
+ s13 uint32
+ s20 uint32
+ s21 uint32
+ s22 uint32
+ s23 uint32
+ s30 uint32
+ s31 uint32
+ s32 uint32
+ s33 uint32
+ bits32 uint64
+ f uint64
+ f0 uint64
+ f1 uint64
+ f2 uint64
+ f3 uint64
+ f4 uint64
+ g uint64
+ g0 uint64
+ g1 uint64
+ g2 uint64
+ g3 uint64
+ g4 uint64
+ )
+
+ var p int32
+
+ l := int32(len(m))
+
+ r00 := uint32(r[0])
+
+ r01 := uint32(r[1])
+
+ r02 := uint32(r[2])
+ r0 := int64(2151)
+
+ r03 := uint32(r[3])
+ r03 &= 15
+ r0 <<= 51
+
+ r10 := uint32(r[4])
+ r10 &= 252
+ r01 <<= 8
+ r0 += int64(r00)
+
+ r11 := uint32(r[5])
+ r02 <<= 16
+ r0 += int64(r01)
+
+ r12 := uint32(r[6])
+ r03 <<= 24
+ r0 += int64(r02)
+
+ r13 := uint32(r[7])
+ r13 &= 15
+ r1 := int64(2215)
+ r0 += int64(r03)
+
+ d0 := r0
+ r1 <<= 51
+ r2 := int64(2279)
+
+ r20 := uint32(r[8])
+ r20 &= 252
+ r11 <<= 8
+ r1 += int64(r10)
+
+ r21 := uint32(r[9])
+ r12 <<= 16
+ r1 += int64(r11)
+
+ r22 := uint32(r[10])
+ r13 <<= 24
+ r1 += int64(r12)
+
+ r23 := uint32(r[11])
+ r23 &= 15
+ r2 <<= 51
+ r1 += int64(r13)
+
+ d1 := r1
+ r21 <<= 8
+ r2 += int64(r20)
+
+ r30 := uint32(r[12])
+ r30 &= 252
+ r22 <<= 16
+ r2 += int64(r21)
+
+ r31 := uint32(r[13])
+ r23 <<= 24
+ r2 += int64(r22)
+
+ r32 := uint32(r[14])
+ r2 += int64(r23)
+ r3 := int64(2343)
+
+ d2 := r2
+ r3 <<= 51
+
+ r33 := uint32(r[15])
+ r33 &= 15
+ r31 <<= 8
+ r3 += int64(r30)
+
+ r32 <<= 16
+ r3 += int64(r31)
+
+ r33 <<= 24
+ r3 += int64(r32)
+
+ r3 += int64(r33)
+ h0 := alpha32 - alpha32
+
+ d3 := r3
+ h1 := alpha32 - alpha32
+
+ h2 := alpha32 - alpha32
+
+ h3 := alpha32 - alpha32
+
+ h4 := alpha32 - alpha32
+
+ r0low := math.Float64frombits(uint64(d0))
+ h5 := alpha32 - alpha32
+
+ r1low := math.Float64frombits(uint64(d1))
+ h6 := alpha32 - alpha32
+
+ r2low := math.Float64frombits(uint64(d2))
+ h7 := alpha32 - alpha32
+
+ r0low -= alpha0
+
+ r1low -= alpha32
+
+ r2low -= alpha64
+
+ r0high := r0low + alpha18
+
+ r3low := math.Float64frombits(uint64(d3))
+
+ r1high := r1low + alpha50
+ sr1low := scale * r1low
+
+ r2high := r2low + alpha82
+ sr2low := scale * r2low
+
+ r0high -= alpha18
+ r0high_stack := r0high
+
+ r3low -= alpha96
+
+ r1high -= alpha50
+ r1high_stack := r1high
+
+ sr1high := sr1low + alpham80
+
+ r0low -= r0high
+
+ r2high -= alpha82
+ sr3low = scale * r3low
+
+ sr2high := sr2low + alpham48
+
+ r1low -= r1high
+ r1low_stack := r1low
+
+ sr1high -= alpham80
+ sr1high_stack := sr1high
+
+ r2low -= r2high
+ r2low_stack := r2low
+
+ sr2high -= alpham48
+ sr2high_stack := sr2high
+
+ r3high := r3low + alpha112
+ r0low_stack := r0low
+
+ sr1low -= sr1high
+ sr1low_stack := sr1low
+
+ sr3high := sr3low + alpham16
+ r2high_stack := r2high
+
+ sr2low -= sr2high
+ sr2low_stack := sr2low
+
+ r3high -= alpha112
+ r3high_stack := r3high
+
+ sr3high -= alpham16
+ sr3high_stack := sr3high
+
+ r3low -= r3high
+ r3low_stack := r3low
+
+ sr3low -= sr3high
+ sr3low_stack := sr3low
+
+ if l < 16 {
+ goto addatmost15bytes
+ }
+
+ m00 = uint32(m[p+0])
+ m0 = 2151
+
+ m0 <<= 51
+ m1 = 2215
+ m01 = uint32(m[p+1])
+
+ m1 <<= 51
+ m2 = 2279
+ m02 = uint32(m[p+2])
+
+ m2 <<= 51
+ m3 = 2343
+ m03 = uint32(m[p+3])
+
+ m10 = uint32(m[p+4])
+ m01 <<= 8
+ m0 += int64(m00)
+
+ m11 = uint32(m[p+5])
+ m02 <<= 16
+ m0 += int64(m01)
+
+ m12 = uint32(m[p+6])
+ m03 <<= 24
+ m0 += int64(m02)
+
+ m13 = uint32(m[p+7])
+ m3 <<= 51
+ m0 += int64(m03)
+
+ m20 = uint32(m[p+8])
+ m11 <<= 8
+ m1 += int64(m10)
+
+ m21 = uint32(m[p+9])
+ m12 <<= 16
+ m1 += int64(m11)
+
+ m22 = uint32(m[p+10])
+ m13 <<= 24
+ m1 += int64(m12)
+
+ m23 = uint32(m[p+11])
+ m1 += int64(m13)
+
+ m30 = uint32(m[p+12])
+ m21 <<= 8
+ m2 += int64(m20)
+
+ m31 = uint32(m[p+13])
+ m22 <<= 16
+ m2 += int64(m21)
+
+ m32 = uint32(m[p+14])
+ m23 <<= 24
+ m2 += int64(m22)
+
+ m33 = uint64(m[p+15])
+ m2 += int64(m23)
+
+ d0 = m0
+ m31 <<= 8
+ m3 += int64(m30)
+
+ d1 = m1
+ m32 <<= 16
+ m3 += int64(m31)
+
+ d2 = m2
+ m33 += 256
+
+ m33 <<= 24
+ m3 += int64(m32)
+
+ m3 += int64(m33)
+ d3 = m3
+
+ p += 16
+ l -= 16
+
+ z0 = math.Float64frombits(uint64(d0))
+
+ z1 = math.Float64frombits(uint64(d1))
+
+ z2 = math.Float64frombits(uint64(d2))
+
+ z3 = math.Float64frombits(uint64(d3))
+
+ z0 -= alpha0
+
+ z1 -= alpha32
+
+ z2 -= alpha64
+
+ z3 -= alpha96
+
+ h0 += z0
+
+ h1 += z1
+
+ h3 += z2
+
+ h5 += z3
+
+ if l < 16 {
+ goto multiplyaddatmost15bytes
+ }
+
+multiplyaddatleast16bytes:
+
+ m2 = 2279
+ m20 = uint32(m[p+8])
+ y7 = h7 + alpha130
+
+ m2 <<= 51
+ m3 = 2343
+ m21 = uint32(m[p+9])
+ y6 = h6 + alpha130
+
+ m3 <<= 51
+ m0 = 2151
+ m22 = uint32(m[p+10])
+ y1 = h1 + alpha32
+
+ m0 <<= 51
+ m1 = 2215
+ m23 = uint32(m[p+11])
+ y0 = h0 + alpha32
+
+ m1 <<= 51
+ m30 = uint32(m[p+12])
+ y7 -= alpha130
+
+ m21 <<= 8
+ m2 += int64(m20)
+ m31 = uint32(m[p+13])
+ y6 -= alpha130
+
+ m22 <<= 16
+ m2 += int64(m21)
+ m32 = uint32(m[p+14])
+ y1 -= alpha32
+
+ m23 <<= 24
+ m2 += int64(m22)
+ m33 = uint64(m[p+15])
+ y0 -= alpha32
+
+ m2 += int64(m23)
+ m00 = uint32(m[p+0])
+ y5 = h5 + alpha96
+
+ m31 <<= 8
+ m3 += int64(m30)
+ m01 = uint32(m[p+1])
+ y4 = h4 + alpha96
+
+ m32 <<= 16
+ m02 = uint32(m[p+2])
+ x7 = h7 - y7
+ y7 *= scale
+
+ m33 += 256
+ m03 = uint32(m[p+3])
+ x6 = h6 - y6
+ y6 *= scale
+
+ m33 <<= 24
+ m3 += int64(m31)
+ m10 = uint32(m[p+4])
+ x1 = h1 - y1
+
+ m01 <<= 8
+ m3 += int64(m32)
+ m11 = uint32(m[p+5])
+ x0 = h0 - y0
+
+ m3 += int64(m33)
+ m0 += int64(m00)
+ m12 = uint32(m[p+6])
+ y5 -= alpha96
+
+ m02 <<= 16
+ m0 += int64(m01)
+ m13 = uint32(m[p+7])
+ y4 -= alpha96
+
+ m03 <<= 24
+ m0 += int64(m02)
+ d2 = m2
+ x1 += y7
+
+ m0 += int64(m03)
+ d3 = m3
+ x0 += y6
+
+ m11 <<= 8
+ m1 += int64(m10)
+ d0 = m0
+ x7 += y5
+
+ m12 <<= 16
+ m1 += int64(m11)
+ x6 += y4
+
+ m13 <<= 24
+ m1 += int64(m12)
+ y3 = h3 + alpha64
+
+ m1 += int64(m13)
+ d1 = m1
+ y2 = h2 + alpha64
+
+ x0 += x1
+
+ x6 += x7
+
+ y3 -= alpha64
+ r3low = r3low_stack
+
+ y2 -= alpha64
+ r0low = r0low_stack
+
+ x5 = h5 - y5
+ r3lowx0 = r3low * x0
+ r3high = r3high_stack
+
+ x4 = h4 - y4
+ r0lowx6 = r0low * x6
+ r0high = r0high_stack
+
+ x3 = h3 - y3
+ r3highx0 = r3high * x0
+ sr1low = sr1low_stack
+
+ x2 = h2 - y2
+ r0highx6 = r0high * x6
+ sr1high = sr1high_stack
+
+ x5 += y3
+ r0lowx0 = r0low * x0
+ r1low = r1low_stack
+
+ h6 = r3lowx0 + r0lowx6
+ sr1lowx6 = sr1low * x6
+ r1high = r1high_stack
+
+ x4 += y2
+ r0highx0 = r0high * x0
+ sr2low = sr2low_stack
+
+ h7 = r3highx0 + r0highx6
+ sr1highx6 = sr1high * x6
+ sr2high = sr2high_stack
+
+ x3 += y1
+ r1lowx0 = r1low * x0
+ r2low = r2low_stack
+
+ h0 = r0lowx0 + sr1lowx6
+ sr2lowx6 = sr2low * x6
+ r2high = r2high_stack
+
+ x2 += y0
+ r1highx0 = r1high * x0
+ sr3low = sr3low_stack
+
+ h1 = r0highx0 + sr1highx6
+ sr2highx6 = sr2high * x6
+ sr3high = sr3high_stack
+
+ x4 += x5
+ r2lowx0 = r2low * x0
+ z2 = math.Float64frombits(uint64(d2))
+
+ h2 = r1lowx0 + sr2lowx6
+ sr3lowx6 = sr3low * x6
+
+ x2 += x3
+ r2highx0 = r2high * x0
+ z3 = math.Float64frombits(uint64(d3))
+
+ h3 = r1highx0 + sr2highx6
+ sr3highx6 = sr3high * x6
+
+ r1highx4 = r1high * x4
+ z2 -= alpha64
+
+ h4 = r2lowx0 + sr3lowx6
+ r1lowx4 = r1low * x4
+
+ r0highx4 = r0high * x4
+ z3 -= alpha96
+
+ h5 = r2highx0 + sr3highx6
+ r0lowx4 = r0low * x4
+
+ h7 += r1highx4
+ sr3highx4 = sr3high * x4
+
+ h6 += r1lowx4
+ sr3lowx4 = sr3low * x4
+
+ h5 += r0highx4
+ sr2highx4 = sr2high * x4
+
+ h4 += r0lowx4
+ sr2lowx4 = sr2low * x4
+
+ h3 += sr3highx4
+ r0lowx2 = r0low * x2
+
+ h2 += sr3lowx4
+ r0highx2 = r0high * x2
+
+ h1 += sr2highx4
+ r1lowx2 = r1low * x2
+
+ h0 += sr2lowx4
+ r1highx2 = r1high * x2
+
+ h2 += r0lowx2
+ r2lowx2 = r2low * x2
+
+ h3 += r0highx2
+ r2highx2 = r2high * x2
+
+ h4 += r1lowx2
+ sr3lowx2 = sr3low * x2
+
+ h5 += r1highx2
+ sr3highx2 = sr3high * x2
+
+ p += 16
+ l -= 16
+ h6 += r2lowx2
+
+ h7 += r2highx2
+
+ z1 = math.Float64frombits(uint64(d1))
+ h0 += sr3lowx2
+
+ z0 = math.Float64frombits(uint64(d0))
+ h1 += sr3highx2
+
+ z1 -= alpha32
+
+ z0 -= alpha0
+
+ h5 += z3
+
+ h3 += z2
+
+ h1 += z1
+
+ h0 += z0
+
+ if l >= 16 {
+ goto multiplyaddatleast16bytes
+ }
+
+multiplyaddatmost15bytes:
+
+ y7 = h7 + alpha130
+
+ y6 = h6 + alpha130
+
+ y1 = h1 + alpha32
+
+ y0 = h0 + alpha32
+
+ y7 -= alpha130
+
+ y6 -= alpha130
+
+ y1 -= alpha32
+
+ y0 -= alpha32
+
+ y5 = h5 + alpha96
+
+ y4 = h4 + alpha96
+
+ x7 = h7 - y7
+ y7 *= scale
+
+ x6 = h6 - y6
+ y6 *= scale
+
+ x1 = h1 - y1
+
+ x0 = h0 - y0
+
+ y5 -= alpha96
+
+ y4 -= alpha96
+
+ x1 += y7
+
+ x0 += y6
+
+ x7 += y5
+
+ x6 += y4
+
+ y3 = h3 + alpha64
+
+ y2 = h2 + alpha64
+
+ x0 += x1
+
+ x6 += x7
+
+ y3 -= alpha64
+ r3low = r3low_stack
+
+ y2 -= alpha64
+ r0low = r0low_stack
+
+ x5 = h5 - y5
+ r3lowx0 = r3low * x0
+ r3high = r3high_stack
+
+ x4 = h4 - y4
+ r0lowx6 = r0low * x6
+ r0high = r0high_stack
+
+ x3 = h3 - y3
+ r3highx0 = r3high * x0
+ sr1low = sr1low_stack
+
+ x2 = h2 - y2
+ r0highx6 = r0high * x6
+ sr1high = sr1high_stack
+
+ x5 += y3
+ r0lowx0 = r0low * x0
+ r1low = r1low_stack
+
+ h6 = r3lowx0 + r0lowx6
+ sr1lowx6 = sr1low * x6
+ r1high = r1high_stack
+
+ x4 += y2
+ r0highx0 = r0high * x0
+ sr2low = sr2low_stack
+
+ h7 = r3highx0 + r0highx6
+ sr1highx6 = sr1high * x6
+ sr2high = sr2high_stack
+
+ x3 += y1
+ r1lowx0 = r1low * x0
+ r2low = r2low_stack
+
+ h0 = r0lowx0 + sr1lowx6
+ sr2lowx6 = sr2low * x6
+ r2high = r2high_stack
+
+ x2 += y0
+ r1highx0 = r1high * x0
+ sr3low = sr3low_stack
+
+ h1 = r0highx0 + sr1highx6
+ sr2highx6 = sr2high * x6
+ sr3high = sr3high_stack
+
+ x4 += x5
+ r2lowx0 = r2low * x0
+
+ h2 = r1lowx0 + sr2lowx6
+ sr3lowx6 = sr3low * x6
+
+ x2 += x3
+ r2highx0 = r2high * x0
+
+ h3 = r1highx0 + sr2highx6
+ sr3highx6 = sr3high * x6
+
+ r1highx4 = r1high * x4
+
+ h4 = r2lowx0 + sr3lowx6
+ r1lowx4 = r1low * x4
+
+ r0highx4 = r0high * x4
+
+ h5 = r2highx0 + sr3highx6
+ r0lowx4 = r0low * x4
+
+ h7 += r1highx4
+ sr3highx4 = sr3high * x4
+
+ h6 += r1lowx4
+ sr3lowx4 = sr3low * x4
+
+ h5 += r0highx4
+ sr2highx4 = sr2high * x4
+
+ h4 += r0lowx4
+ sr2lowx4 = sr2low * x4
+
+ h3 += sr3highx4
+ r0lowx2 = r0low * x2
+
+ h2 += sr3lowx4
+ r0highx2 = r0high * x2
+
+ h1 += sr2highx4
+ r1lowx2 = r1low * x2
+
+ h0 += sr2lowx4
+ r1highx2 = r1high * x2
+
+ h2 += r0lowx2
+ r2lowx2 = r2low * x2
+
+ h3 += r0highx2
+ r2highx2 = r2high * x2
+
+ h4 += r1lowx2
+ sr3lowx2 = sr3low * x2
+
+ h5 += r1highx2
+ sr3highx2 = sr3high * x2
+
+ h6 += r2lowx2
+
+ h7 += r2highx2
+
+ h0 += sr3lowx2
+
+ h1 += sr3highx2
+
+addatmost15bytes:
+
+ if l == 0 {
+ goto nomorebytes
+ }
+
+ lbelow2 = l - 2
+
+ lbelow3 = l - 3
+
+ lbelow2 >>= 31
+ lbelow4 = l - 4
+
+ m00 = uint32(m[p+0])
+ lbelow3 >>= 31
+ p += lbelow2
+
+ m01 = uint32(m[p+1])
+ lbelow4 >>= 31
+ p += lbelow3
+
+ m02 = uint32(m[p+2])
+ p += lbelow4
+ m0 = 2151
+
+ m03 = uint32(m[p+3])
+ m0 <<= 51
+ m1 = 2215
+
+ m0 += int64(m00)
+ m01 &^= uint32(lbelow2)
+
+ m02 &^= uint32(lbelow3)
+ m01 -= uint32(lbelow2)
+
+ m01 <<= 8
+ m03 &^= uint32(lbelow4)
+
+ m0 += int64(m01)
+ lbelow2 -= lbelow3
+
+ m02 += uint32(lbelow2)
+ lbelow3 -= lbelow4
+
+ m02 <<= 16
+ m03 += uint32(lbelow3)
+
+ m03 <<= 24
+ m0 += int64(m02)
+
+ m0 += int64(m03)
+ lbelow5 = l - 5
+
+ lbelow6 = l - 6
+ lbelow7 = l - 7
+
+ lbelow5 >>= 31
+ lbelow8 = l - 8
+
+ lbelow6 >>= 31
+ p += lbelow5
+
+ m10 = uint32(m[p+4])
+ lbelow7 >>= 31
+ p += lbelow6
+
+ m11 = uint32(m[p+5])
+ lbelow8 >>= 31
+ p += lbelow7
+
+ m12 = uint32(m[p+6])
+ m1 <<= 51
+ p += lbelow8
+
+ m13 = uint32(m[p+7])
+ m10 &^= uint32(lbelow5)
+ lbelow4 -= lbelow5
+
+ m10 += uint32(lbelow4)
+ lbelow5 -= lbelow6
+
+ m11 &^= uint32(lbelow6)
+ m11 += uint32(lbelow5)
+
+ m11 <<= 8
+ m1 += int64(m10)
+
+ m1 += int64(m11)
+ m12 &^= uint32(lbelow7)
+
+ lbelow6 -= lbelow7
+ m13 &^= uint32(lbelow8)
+
+ m12 += uint32(lbelow6)
+ lbelow7 -= lbelow8
+
+ m12 <<= 16
+ m13 += uint32(lbelow7)
+
+ m13 <<= 24
+ m1 += int64(m12)
+
+ m1 += int64(m13)
+ m2 = 2279
+
+ lbelow9 = l - 9
+ m3 = 2343
+
+ lbelow10 = l - 10
+ lbelow11 = l - 11
+
+ lbelow9 >>= 31
+ lbelow12 = l - 12
+
+ lbelow10 >>= 31
+ p += lbelow9
+
+ m20 = uint32(m[p+8])
+ lbelow11 >>= 31
+ p += lbelow10
+
+ m21 = uint32(m[p+9])
+ lbelow12 >>= 31
+ p += lbelow11
+
+ m22 = uint32(m[p+10])
+ m2 <<= 51
+ p += lbelow12
+
+ m23 = uint32(m[p+11])
+ m20 &^= uint32(lbelow9)
+ lbelow8 -= lbelow9
+
+ m20 += uint32(lbelow8)
+ lbelow9 -= lbelow10
+
+ m21 &^= uint32(lbelow10)
+ m21 += uint32(lbelow9)
+
+ m21 <<= 8
+ m2 += int64(m20)
+
+ m2 += int64(m21)
+ m22 &^= uint32(lbelow11)
+
+ lbelow10 -= lbelow11
+ m23 &^= uint32(lbelow12)
+
+ m22 += uint32(lbelow10)
+ lbelow11 -= lbelow12
+
+ m22 <<= 16
+ m23 += uint32(lbelow11)
+
+ m23 <<= 24
+ m2 += int64(m22)
+
+ m3 <<= 51
+ lbelow13 = l - 13
+
+ lbelow13 >>= 31
+ lbelow14 = l - 14
+
+ lbelow14 >>= 31
+ p += lbelow13
+ lbelow15 = l - 15
+
+ m30 = uint32(m[p+12])
+ lbelow15 >>= 31
+ p += lbelow14
+
+ m31 = uint32(m[p+13])
+ p += lbelow15
+ m2 += int64(m23)
+
+ m32 = uint32(m[p+14])
+ m30 &^= uint32(lbelow13)
+ lbelow12 -= lbelow13
+
+ m30 += uint32(lbelow12)
+ lbelow13 -= lbelow14
+
+ m3 += int64(m30)
+ m31 &^= uint32(lbelow14)
+
+ m31 += uint32(lbelow13)
+ m32 &^= uint32(lbelow15)
+
+ m31 <<= 8
+ lbelow14 -= lbelow15
+
+ m3 += int64(m31)
+ m32 += uint32(lbelow14)
+ d0 = m0
+
+ m32 <<= 16
+ m33 = uint64(lbelow15 + 1)
+ d1 = m1
+
+ m33 <<= 24
+ m3 += int64(m32)
+ d2 = m2
+
+ m3 += int64(m33)
+ d3 = m3
+
+ z3 = math.Float64frombits(uint64(d3))
+
+ z2 = math.Float64frombits(uint64(d2))
+
+ z1 = math.Float64frombits(uint64(d1))
+
+ z0 = math.Float64frombits(uint64(d0))
+
+ z3 -= alpha96
+
+ z2 -= alpha64
+
+ z1 -= alpha32
+
+ z0 -= alpha0
+
+ h5 += z3
+
+ h3 += z2
+
+ h1 += z1
+
+ h0 += z0
+
+ y7 = h7 + alpha130
+
+ y6 = h6 + alpha130
+
+ y1 = h1 + alpha32
+
+ y0 = h0 + alpha32
+
+ y7 -= alpha130
+
+ y6 -= alpha130
+
+ y1 -= alpha32
+
+ y0 -= alpha32
+
+ y5 = h5 + alpha96
+
+ y4 = h4 + alpha96
+
+ x7 = h7 - y7
+ y7 *= scale
+
+ x6 = h6 - y6
+ y6 *= scale
+
+ x1 = h1 - y1
+
+ x0 = h0 - y0
+
+ y5 -= alpha96
+
+ y4 -= alpha96
+
+ x1 += y7
+
+ x0 += y6
+
+ x7 += y5
+
+ x6 += y4
+
+ y3 = h3 + alpha64
+
+ y2 = h2 + alpha64
+
+ x0 += x1
+
+ x6 += x7
+
+ y3 -= alpha64
+ r3low = r3low_stack
+
+ y2 -= alpha64
+ r0low = r0low_stack
+
+ x5 = h5 - y5
+ r3lowx0 = r3low * x0
+ r3high = r3high_stack
+
+ x4 = h4 - y4
+ r0lowx6 = r0low * x6
+ r0high = r0high_stack
+
+ x3 = h3 - y3
+ r3highx0 = r3high * x0
+ sr1low = sr1low_stack
+
+ x2 = h2 - y2
+ r0highx6 = r0high * x6
+ sr1high = sr1high_stack
+
+ x5 += y3
+ r0lowx0 = r0low * x0
+ r1low = r1low_stack
+
+ h6 = r3lowx0 + r0lowx6
+ sr1lowx6 = sr1low * x6
+ r1high = r1high_stack
+
+ x4 += y2
+ r0highx0 = r0high * x0
+ sr2low = sr2low_stack
+
+ h7 = r3highx0 + r0highx6
+ sr1highx6 = sr1high * x6
+ sr2high = sr2high_stack
+
+ x3 += y1
+ r1lowx0 = r1low * x0
+ r2low = r2low_stack
+
+ h0 = r0lowx0 + sr1lowx6
+ sr2lowx6 = sr2low * x6
+ r2high = r2high_stack
+
+ x2 += y0
+ r1highx0 = r1high * x0
+ sr3low = sr3low_stack
+
+ h1 = r0highx0 + sr1highx6
+ sr2highx6 = sr2high * x6
+ sr3high = sr3high_stack
+
+ x4 += x5
+ r2lowx0 = r2low * x0
+
+ h2 = r1lowx0 + sr2lowx6
+ sr3lowx6 = sr3low * x6
+
+ x2 += x3
+ r2highx0 = r2high * x0
+
+ h3 = r1highx0 + sr2highx6
+ sr3highx6 = sr3high * x6
+
+ r1highx4 = r1high * x4
+
+ h4 = r2lowx0 + sr3lowx6
+ r1lowx4 = r1low * x4
+
+ r0highx4 = r0high * x4
+
+ h5 = r2highx0 + sr3highx6
+ r0lowx4 = r0low * x4
+
+ h7 += r1highx4
+ sr3highx4 = sr3high * x4
+
+ h6 += r1lowx4
+ sr3lowx4 = sr3low * x4
+
+ h5 += r0highx4
+ sr2highx4 = sr2high * x4
+
+ h4 += r0lowx4
+ sr2lowx4 = sr2low * x4
+
+ h3 += sr3highx4
+ r0lowx2 = r0low * x2
+
+ h2 += sr3lowx4
+ r0highx2 = r0high * x2
+
+ h1 += sr2highx4
+ r1lowx2 = r1low * x2
+
+ h0 += sr2lowx4
+ r1highx2 = r1high * x2
+
+ h2 += r0lowx2
+ r2lowx2 = r2low * x2
+
+ h3 += r0highx2
+ r2highx2 = r2high * x2
+
+ h4 += r1lowx2
+ sr3lowx2 = sr3low * x2
+
+ h5 += r1highx2
+ sr3highx2 = sr3high * x2
+
+ h6 += r2lowx2
+
+ h7 += r2highx2
+
+ h0 += sr3lowx2
+
+ h1 += sr3highx2
+
+nomorebytes:
+
+ y7 = h7 + alpha130
+
+ y0 = h0 + alpha32
+
+ y1 = h1 + alpha32
+
+ y2 = h2 + alpha64
+
+ y7 -= alpha130
+
+ y3 = h3 + alpha64
+
+ y4 = h4 + alpha96
+
+ y5 = h5 + alpha96
+
+ x7 = h7 - y7
+ y7 *= scale
+
+ y0 -= alpha32
+
+ y1 -= alpha32
+
+ y2 -= alpha64
+
+ h6 += x7
+
+ y3 -= alpha64
+
+ y4 -= alpha96
+
+ y5 -= alpha96
+
+ y6 = h6 + alpha130
+
+ x0 = h0 - y0
+
+ x1 = h1 - y1
+
+ x2 = h2 - y2
+
+ y6 -= alpha130
+
+ x0 += y7
+
+ x3 = h3 - y3
+
+ x4 = h4 - y4
+
+ x5 = h5 - y5
+
+ x6 = h6 - y6
+
+ y6 *= scale
+
+ x2 += y0
+
+ x3 += y1
+
+ x4 += y2
+
+ x0 += y6
+
+ x5 += y3
+
+ x6 += y4
+
+ x2 += x3
+
+ x0 += x1
+
+ x4 += x5
+
+ x6 += y5
+
+ x2 += offset1
+ d1 = int64(math.Float64bits(x2))
+
+ x0 += offset0
+ d0 = int64(math.Float64bits(x0))
+
+ x4 += offset2
+ d2 = int64(math.Float64bits(x4))
+
+ x6 += offset3
+ d3 = int64(math.Float64bits(x6))
+
+ f0 = uint64(d0)
+
+ f1 = uint64(d1)
+ bits32 = math.MaxUint64
+
+ f2 = uint64(d2)
+ bits32 >>= 32
+
+ f3 = uint64(d3)
+ f = f0 >> 32
+
+ f0 &= bits32
+ f &= 255
+
+ f1 += f
+ g0 = f0 + 5
+
+ g = g0 >> 32
+ g0 &= bits32
+
+ f = f1 >> 32
+ f1 &= bits32
+
+ f &= 255
+ g1 = f1 + g
+
+ g = g1 >> 32
+ f2 += f
+
+ f = f2 >> 32
+ g1 &= bits32
+
+ f2 &= bits32
+ f &= 255
+
+ f3 += f
+ g2 = f2 + g
+
+ g = g2 >> 32
+ g2 &= bits32
+
+ f4 = f3 >> 32
+ f3 &= bits32
+
+ f4 &= 255
+ g3 = f3 + g
+
+ g = g3 >> 32
+ g3 &= bits32
+
+ g4 = f4 + g
+
+ g4 = g4 - 4
+ s00 = uint32(s[0])
+
+ f = uint64(int64(g4) >> 63)
+ s01 = uint32(s[1])
+
+ f0 &= f
+ g0 &^= f
+ s02 = uint32(s[2])
+
+ f1 &= f
+ f0 |= g0
+ s03 = uint32(s[3])
+
+ g1 &^= f
+ f2 &= f
+ s10 = uint32(s[4])
+
+ f3 &= f
+ g2 &^= f
+ s11 = uint32(s[5])
+
+ g3 &^= f
+ f1 |= g1
+ s12 = uint32(s[6])
+
+ f2 |= g2
+ f3 |= g3
+ s13 = uint32(s[7])
+
+ s01 <<= 8
+ f0 += uint64(s00)
+ s20 = uint32(s[8])
+
+ s02 <<= 16
+ f0 += uint64(s01)
+ s21 = uint32(s[9])
+
+ s03 <<= 24
+ f0 += uint64(s02)
+ s22 = uint32(s[10])
+
+ s11 <<= 8
+ f1 += uint64(s10)
+ s23 = uint32(s[11])
+
+ s12 <<= 16
+ f1 += uint64(s11)
+ s30 = uint32(s[12])
+
+ s13 <<= 24
+ f1 += uint64(s12)
+ s31 = uint32(s[13])
+
+ f0 += uint64(s03)
+ f1 += uint64(s13)
+ s32 = uint32(s[14])
+
+ s21 <<= 8
+ f2 += uint64(s20)
+ s33 = uint32(s[15])
+
+ s22 <<= 16
+ f2 += uint64(s21)
+
+ s23 <<= 24
+ f2 += uint64(s22)
+
+ s31 <<= 8
+ f3 += uint64(s30)
+
+ s32 <<= 16
+ f3 += uint64(s31)
+
+ s33 <<= 24
+ f3 += uint64(s32)
+
+ f2 += uint64(s23)
+ f3 += uint64(s33)
+
+ out[0] = byte(f0)
+ f0 >>= 8
+ out[1] = byte(f0)
+ f0 >>= 8
+ out[2] = byte(f0)
+ f0 >>= 8
+ out[3] = byte(f0)
+ f0 >>= 8
+ f1 += f0
+
+ out[4] = byte(f1)
+ f1 >>= 8
+ out[5] = byte(f1)
+ f1 >>= 8
+ out[6] = byte(f1)
+ f1 >>= 8
+ out[7] = byte(f1)
+ f1 >>= 8
+ f2 += f1
+
+ out[8] = byte(f2)
+ f2 >>= 8
+ out[9] = byte(f2)
+ f2 >>= 8
+ out[10] = byte(f2)
+ f2 >>= 8
+ out[11] = byte(f2)
+ f2 >>= 8
+ f3 += f2
+
+ out[12] = byte(f3)
+ f3 >>= 8
+ out[13] = byte(f3)
+ f3 >>= 8
+ out[14] = byte(f3)
+ f3 >>= 8
+ out[15] = byte(f3)
+}
diff --git a/src/mongo/gotools/vendor/src/golang.org/x/crypto/ripemd160/ripemd160.go b/src/mongo/gotools/vendor/src/golang.org/x/crypto/ripemd160/ripemd160.go
new file mode 100644
index 00000000000..6c6e84236ab
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/golang.org/x/crypto/ripemd160/ripemd160.go
@@ -0,0 +1,120 @@
+// Copyright 2010 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// Package ripemd160 implements the RIPEMD-160 hash algorithm.
+package ripemd160 // import "golang.org/x/crypto/ripemd160"
+
+// RIPEMD-160 is designed by by Hans Dobbertin, Antoon Bosselaers, and Bart
+// Preneel with specifications available at:
+// http://homes.esat.kuleuven.be/~cosicart/pdf/AB-9601/AB-9601.pdf.
+
+import (
+ "crypto"
+ "hash"
+)
+
+func init() {
+ crypto.RegisterHash(crypto.RIPEMD160, New)
+}
+
+// The size of the checksum in bytes.
+const Size = 20
+
+// The block size of the hash algorithm in bytes.
+const BlockSize = 64
+
+const (
+ _s0 = 0x67452301
+ _s1 = 0xefcdab89
+ _s2 = 0x98badcfe
+ _s3 = 0x10325476
+ _s4 = 0xc3d2e1f0
+)
+
+// digest represents the partial evaluation of a checksum.
+type digest struct {
+ s [5]uint32 // running context
+ x [BlockSize]byte // temporary buffer
+ nx int // index into x
+ tc uint64 // total count of bytes processed
+}
+
+func (d *digest) Reset() {
+ d.s[0], d.s[1], d.s[2], d.s[3], d.s[4] = _s0, _s1, _s2, _s3, _s4
+ d.nx = 0
+ d.tc = 0
+}
+
+// New returns a new hash.Hash computing the checksum.
+func New() hash.Hash {
+ result := new(digest)
+ result.Reset()
+ return result
+}
+
+func (d *digest) Size() int { return Size }
+
+func (d *digest) BlockSize() int { return BlockSize }
+
+func (d *digest) Write(p []byte) (nn int, err error) {
+ nn = len(p)
+ d.tc += uint64(nn)
+ if d.nx > 0 {
+ n := len(p)
+ if n > BlockSize-d.nx {
+ n = BlockSize - d.nx
+ }
+ for i := 0; i < n; i++ {
+ d.x[d.nx+i] = p[i]
+ }
+ d.nx += n
+ if d.nx == BlockSize {
+ _Block(d, d.x[0:])
+ d.nx = 0
+ }
+ p = p[n:]
+ }
+ n := _Block(d, p)
+ p = p[n:]
+ if len(p) > 0 {
+ d.nx = copy(d.x[:], p)
+ }
+ return
+}
+
+func (d0 *digest) Sum(in []byte) []byte {
+ // Make a copy of d0 so that caller can keep writing and summing.
+ d := *d0
+
+ // Padding. Add a 1 bit and 0 bits until 56 bytes mod 64.
+ tc := d.tc
+ var tmp [64]byte
+ tmp[0] = 0x80
+ if tc%64 < 56 {
+ d.Write(tmp[0 : 56-tc%64])
+ } else {
+ d.Write(tmp[0 : 64+56-tc%64])
+ }
+
+ // Length in bits.
+ tc <<= 3
+ for i := uint(0); i < 8; i++ {
+ tmp[i] = byte(tc >> (8 * i))
+ }
+ d.Write(tmp[0:8])
+
+ if d.nx != 0 {
+ panic("d.nx != 0")
+ }
+
+ var digest [Size]byte
+ for i, s := range d.s {
+ digest[i*4] = byte(s)
+ digest[i*4+1] = byte(s >> 8)
+ digest[i*4+2] = byte(s >> 16)
+ digest[i*4+3] = byte(s >> 24)
+ }
+
+ return append(in, digest[:]...)
+}
diff --git a/src/mongo/gotools/vendor/src/golang.org/x/crypto/ripemd160/ripemd160_test.go b/src/mongo/gotools/vendor/src/golang.org/x/crypto/ripemd160/ripemd160_test.go
new file mode 100644
index 00000000000..5df1b2593d2
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/golang.org/x/crypto/ripemd160/ripemd160_test.go
@@ -0,0 +1,64 @@
+// Copyright 2010 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package ripemd160
+
+// Test vectors are from:
+// http://homes.esat.kuleuven.be/~bosselae/ripemd160.html
+
+import (
+ "fmt"
+ "io"
+ "testing"
+)
+
+type mdTest struct {
+ out string
+ in string
+}
+
+var vectors = [...]mdTest{
+ {"9c1185a5c5e9fc54612808977ee8f548b2258d31", ""},
+ {"0bdc9d2d256b3ee9daae347be6f4dc835a467ffe", "a"},
+ {"8eb208f7e05d987a9b044a8e98c6b087f15a0bfc", "abc"},
+ {"5d0689ef49d2fae572b881b123a85ffa21595f36", "message digest"},
+ {"f71c27109c692c1b56bbdceb5b9d2865b3708dbc", "abcdefghijklmnopqrstuvwxyz"},
+ {"12a053384a9c0c88e405a06c27dcf49ada62eb2b", "abcdbcdecdefdefgefghfghighijhijkijkljklmklmnlmnomnopnopq"},
+ {"b0e20b6e3116640286ed3a87a5713079b21f5189", "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789"},
+ {"9b752e45573d4b39f4dbd3323cab82bf63326bfb", "12345678901234567890123456789012345678901234567890123456789012345678901234567890"},
+}
+
+func TestVectors(t *testing.T) {
+ for i := 0; i < len(vectors); i++ {
+ tv := vectors[i]
+ md := New()
+ for j := 0; j < 3; j++ {
+ if j < 2 {
+ io.WriteString(md, tv.in)
+ } else {
+ io.WriteString(md, tv.in[0:len(tv.in)/2])
+ md.Sum(nil)
+ io.WriteString(md, tv.in[len(tv.in)/2:])
+ }
+ s := fmt.Sprintf("%x", md.Sum(nil))
+ if s != tv.out {
+ t.Fatalf("RIPEMD-160[%d](%s) = %s, expected %s", j, tv.in, s, tv.out)
+ }
+ md.Reset()
+ }
+ }
+}
+
+func TestMillionA(t *testing.T) {
+ md := New()
+ for i := 0; i < 100000; i++ {
+ io.WriteString(md, "aaaaaaaaaa")
+ }
+ out := "52783243c1697bdbe16d37f97f68f08325dc1528"
+ s := fmt.Sprintf("%x", md.Sum(nil))
+ if s != out {
+ t.Fatalf("RIPEMD-160 (1 million 'a') = %s, expected %s", s, out)
+ }
+ md.Reset()
+}
diff --git a/src/mongo/gotools/vendor/src/golang.org/x/crypto/ripemd160/ripemd160block.go b/src/mongo/gotools/vendor/src/golang.org/x/crypto/ripemd160/ripemd160block.go
new file mode 100644
index 00000000000..7bc8e6c485e
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/golang.org/x/crypto/ripemd160/ripemd160block.go
@@ -0,0 +1,161 @@
+// Copyright 2010 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// RIPEMD-160 block step.
+// In its own file so that a faster assembly or C version
+// can be substituted easily.
+
+package ripemd160
+
+// work buffer indices and roll amounts for one line
+var _n = [80]uint{
+ 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15,
+ 7, 4, 13, 1, 10, 6, 15, 3, 12, 0, 9, 5, 2, 14, 11, 8,
+ 3, 10, 14, 4, 9, 15, 8, 1, 2, 7, 0, 6, 13, 11, 5, 12,
+ 1, 9, 11, 10, 0, 8, 12, 4, 13, 3, 7, 15, 14, 5, 6, 2,
+ 4, 0, 5, 9, 7, 12, 2, 10, 14, 1, 3, 8, 11, 6, 15, 13,
+}
+
+var _r = [80]uint{
+ 11, 14, 15, 12, 5, 8, 7, 9, 11, 13, 14, 15, 6, 7, 9, 8,
+ 7, 6, 8, 13, 11, 9, 7, 15, 7, 12, 15, 9, 11, 7, 13, 12,
+ 11, 13, 6, 7, 14, 9, 13, 15, 14, 8, 13, 6, 5, 12, 7, 5,
+ 11, 12, 14, 15, 14, 15, 9, 8, 9, 14, 5, 6, 8, 6, 5, 12,
+ 9, 15, 5, 11, 6, 8, 13, 12, 5, 12, 13, 14, 11, 8, 5, 6,
+}
+
+// same for the other parallel one
+var n_ = [80]uint{
+ 5, 14, 7, 0, 9, 2, 11, 4, 13, 6, 15, 8, 1, 10, 3, 12,
+ 6, 11, 3, 7, 0, 13, 5, 10, 14, 15, 8, 12, 4, 9, 1, 2,
+ 15, 5, 1, 3, 7, 14, 6, 9, 11, 8, 12, 2, 10, 0, 4, 13,
+ 8, 6, 4, 1, 3, 11, 15, 0, 5, 12, 2, 13, 9, 7, 10, 14,
+ 12, 15, 10, 4, 1, 5, 8, 7, 6, 2, 13, 14, 0, 3, 9, 11,
+}
+
+var r_ = [80]uint{
+ 8, 9, 9, 11, 13, 15, 15, 5, 7, 7, 8, 11, 14, 14, 12, 6,
+ 9, 13, 15, 7, 12, 8, 9, 11, 7, 7, 12, 7, 6, 15, 13, 11,
+ 9, 7, 15, 11, 8, 6, 6, 14, 12, 13, 5, 14, 13, 13, 7, 5,
+ 15, 5, 8, 11, 14, 14, 6, 14, 6, 9, 12, 9, 12, 5, 15, 8,
+ 8, 5, 12, 9, 12, 5, 14, 6, 8, 13, 6, 5, 15, 13, 11, 11,
+}
+
+func _Block(md *digest, p []byte) int {
+ n := 0
+ var x [16]uint32
+ var alpha, beta uint32
+ for len(p) >= BlockSize {
+ a, b, c, d, e := md.s[0], md.s[1], md.s[2], md.s[3], md.s[4]
+ aa, bb, cc, dd, ee := a, b, c, d, e
+ j := 0
+ for i := 0; i < 16; i++ {
+ x[i] = uint32(p[j]) | uint32(p[j+1])<<8 | uint32(p[j+2])<<16 | uint32(p[j+3])<<24
+ j += 4
+ }
+
+ // round 1
+ i := 0
+ for i < 16 {
+ alpha = a + (b ^ c ^ d) + x[_n[i]]
+ s := _r[i]
+ alpha = (alpha<<s | alpha>>(32-s)) + e
+ beta = c<<10 | c>>22
+ a, b, c, d, e = e, alpha, b, beta, d
+
+ // parallel line
+ alpha = aa + (bb ^ (cc | ^dd)) + x[n_[i]] + 0x50a28be6
+ s = r_[i]
+ alpha = (alpha<<s | alpha>>(32-s)) + ee
+ beta = cc<<10 | cc>>22
+ aa, bb, cc, dd, ee = ee, alpha, bb, beta, dd
+
+ i++
+ }
+
+ // round 2
+ for i < 32 {
+ alpha = a + (b&c | ^b&d) + x[_n[i]] + 0x5a827999
+ s := _r[i]
+ alpha = (alpha<<s | alpha>>(32-s)) + e
+ beta = c<<10 | c>>22
+ a, b, c, d, e = e, alpha, b, beta, d
+
+ // parallel line
+ alpha = aa + (bb&dd | cc&^dd) + x[n_[i]] + 0x5c4dd124
+ s = r_[i]
+ alpha = (alpha<<s | alpha>>(32-s)) + ee
+ beta = cc<<10 | cc>>22
+ aa, bb, cc, dd, ee = ee, alpha, bb, beta, dd
+
+ i++
+ }
+
+ // round 3
+ for i < 48 {
+ alpha = a + (b | ^c ^ d) + x[_n[i]] + 0x6ed9eba1
+ s := _r[i]
+ alpha = (alpha<<s | alpha>>(32-s)) + e
+ beta = c<<10 | c>>22
+ a, b, c, d, e = e, alpha, b, beta, d
+
+ // parallel line
+ alpha = aa + (bb | ^cc ^ dd) + x[n_[i]] + 0x6d703ef3
+ s = r_[i]
+ alpha = (alpha<<s | alpha>>(32-s)) + ee
+ beta = cc<<10 | cc>>22
+ aa, bb, cc, dd, ee = ee, alpha, bb, beta, dd
+
+ i++
+ }
+
+ // round 4
+ for i < 64 {
+ alpha = a + (b&d | c&^d) + x[_n[i]] + 0x8f1bbcdc
+ s := _r[i]
+ alpha = (alpha<<s | alpha>>(32-s)) + e
+ beta = c<<10 | c>>22
+ a, b, c, d, e = e, alpha, b, beta, d
+
+ // parallel line
+ alpha = aa + (bb&cc | ^bb&dd) + x[n_[i]] + 0x7a6d76e9
+ s = r_[i]
+ alpha = (alpha<<s | alpha>>(32-s)) + ee
+ beta = cc<<10 | cc>>22
+ aa, bb, cc, dd, ee = ee, alpha, bb, beta, dd
+
+ i++
+ }
+
+ // round 5
+ for i < 80 {
+ alpha = a + (b ^ (c | ^d)) + x[_n[i]] + 0xa953fd4e
+ s := _r[i]
+ alpha = (alpha<<s | alpha>>(32-s)) + e
+ beta = c<<10 | c>>22
+ a, b, c, d, e = e, alpha, b, beta, d
+
+ // parallel line
+ alpha = aa + (bb ^ cc ^ dd) + x[n_[i]]
+ s = r_[i]
+ alpha = (alpha<<s | alpha>>(32-s)) + ee
+ beta = cc<<10 | cc>>22
+ aa, bb, cc, dd, ee = ee, alpha, bb, beta, dd
+
+ i++
+ }
+
+ // combine results
+ dd += c + md.s[1]
+ md.s[1] = md.s[2] + d + ee
+ md.s[2] = md.s[3] + e + aa
+ md.s[3] = md.s[4] + a + bb
+ md.s[4] = md.s[0] + b + cc
+ md.s[0] = dd
+
+ p = p[BlockSize:]
+ n += BlockSize
+ }
+ return n
+}
diff --git a/src/mongo/gotools/vendor/src/golang.org/x/crypto/salsa20/salsa/hsalsa20.go b/src/mongo/gotools/vendor/src/golang.org/x/crypto/salsa20/salsa/hsalsa20.go
new file mode 100644
index 00000000000..4c96147c86b
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/golang.org/x/crypto/salsa20/salsa/hsalsa20.go
@@ -0,0 +1,144 @@
+// Copyright 2012 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// Package salsa provides low-level access to functions in the Salsa family.
+package salsa // import "golang.org/x/crypto/salsa20/salsa"
+
+// Sigma is the Salsa20 constant for 256-bit keys.
+var Sigma = [16]byte{'e', 'x', 'p', 'a', 'n', 'd', ' ', '3', '2', '-', 'b', 'y', 't', 'e', ' ', 'k'}
+
+// HSalsa20 applies the HSalsa20 core function to a 16-byte input in, 32-byte
+// key k, and 16-byte constant c, and puts the result into the 32-byte array
+// out.
+func HSalsa20(out *[32]byte, in *[16]byte, k *[32]byte, c *[16]byte) {
+ x0 := uint32(c[0]) | uint32(c[1])<<8 | uint32(c[2])<<16 | uint32(c[3])<<24
+ x1 := uint32(k[0]) | uint32(k[1])<<8 | uint32(k[2])<<16 | uint32(k[3])<<24
+ x2 := uint32(k[4]) | uint32(k[5])<<8 | uint32(k[6])<<16 | uint32(k[7])<<24
+ x3 := uint32(k[8]) | uint32(k[9])<<8 | uint32(k[10])<<16 | uint32(k[11])<<24
+ x4 := uint32(k[12]) | uint32(k[13])<<8 | uint32(k[14])<<16 | uint32(k[15])<<24
+ x5 := uint32(c[4]) | uint32(c[5])<<8 | uint32(c[6])<<16 | uint32(c[7])<<24
+ x6 := uint32(in[0]) | uint32(in[1])<<8 | uint32(in[2])<<16 | uint32(in[3])<<24
+ x7 := uint32(in[4]) | uint32(in[5])<<8 | uint32(in[6])<<16 | uint32(in[7])<<24
+ x8 := uint32(in[8]) | uint32(in[9])<<8 | uint32(in[10])<<16 | uint32(in[11])<<24
+ x9 := uint32(in[12]) | uint32(in[13])<<8 | uint32(in[14])<<16 | uint32(in[15])<<24
+ x10 := uint32(c[8]) | uint32(c[9])<<8 | uint32(c[10])<<16 | uint32(c[11])<<24
+ x11 := uint32(k[16]) | uint32(k[17])<<8 | uint32(k[18])<<16 | uint32(k[19])<<24
+ x12 := uint32(k[20]) | uint32(k[21])<<8 | uint32(k[22])<<16 | uint32(k[23])<<24
+ x13 := uint32(k[24]) | uint32(k[25])<<8 | uint32(k[26])<<16 | uint32(k[27])<<24
+ x14 := uint32(k[28]) | uint32(k[29])<<8 | uint32(k[30])<<16 | uint32(k[31])<<24
+ x15 := uint32(c[12]) | uint32(c[13])<<8 | uint32(c[14])<<16 | uint32(c[15])<<24
+
+ for i := 0; i < 20; i += 2 {
+ u := x0 + x12
+ x4 ^= u<<7 | u>>(32-7)
+ u = x4 + x0
+ x8 ^= u<<9 | u>>(32-9)
+ u = x8 + x4
+ x12 ^= u<<13 | u>>(32-13)
+ u = x12 + x8
+ x0 ^= u<<18 | u>>(32-18)
+
+ u = x5 + x1
+ x9 ^= u<<7 | u>>(32-7)
+ u = x9 + x5
+ x13 ^= u<<9 | u>>(32-9)
+ u = x13 + x9
+ x1 ^= u<<13 | u>>(32-13)
+ u = x1 + x13
+ x5 ^= u<<18 | u>>(32-18)
+
+ u = x10 + x6
+ x14 ^= u<<7 | u>>(32-7)
+ u = x14 + x10
+ x2 ^= u<<9 | u>>(32-9)
+ u = x2 + x14
+ x6 ^= u<<13 | u>>(32-13)
+ u = x6 + x2
+ x10 ^= u<<18 | u>>(32-18)
+
+ u = x15 + x11
+ x3 ^= u<<7 | u>>(32-7)
+ u = x3 + x15
+ x7 ^= u<<9 | u>>(32-9)
+ u = x7 + x3
+ x11 ^= u<<13 | u>>(32-13)
+ u = x11 + x7
+ x15 ^= u<<18 | u>>(32-18)
+
+ u = x0 + x3
+ x1 ^= u<<7 | u>>(32-7)
+ u = x1 + x0
+ x2 ^= u<<9 | u>>(32-9)
+ u = x2 + x1
+ x3 ^= u<<13 | u>>(32-13)
+ u = x3 + x2
+ x0 ^= u<<18 | u>>(32-18)
+
+ u = x5 + x4
+ x6 ^= u<<7 | u>>(32-7)
+ u = x6 + x5
+ x7 ^= u<<9 | u>>(32-9)
+ u = x7 + x6
+ x4 ^= u<<13 | u>>(32-13)
+ u = x4 + x7
+ x5 ^= u<<18 | u>>(32-18)
+
+ u = x10 + x9
+ x11 ^= u<<7 | u>>(32-7)
+ u = x11 + x10
+ x8 ^= u<<9 | u>>(32-9)
+ u = x8 + x11
+ x9 ^= u<<13 | u>>(32-13)
+ u = x9 + x8
+ x10 ^= u<<18 | u>>(32-18)
+
+ u = x15 + x14
+ x12 ^= u<<7 | u>>(32-7)
+ u = x12 + x15
+ x13 ^= u<<9 | u>>(32-9)
+ u = x13 + x12
+ x14 ^= u<<13 | u>>(32-13)
+ u = x14 + x13
+ x15 ^= u<<18 | u>>(32-18)
+ }
+ out[0] = byte(x0)
+ out[1] = byte(x0 >> 8)
+ out[2] = byte(x0 >> 16)
+ out[3] = byte(x0 >> 24)
+
+ out[4] = byte(x5)
+ out[5] = byte(x5 >> 8)
+ out[6] = byte(x5 >> 16)
+ out[7] = byte(x5 >> 24)
+
+ out[8] = byte(x10)
+ out[9] = byte(x10 >> 8)
+ out[10] = byte(x10 >> 16)
+ out[11] = byte(x10 >> 24)
+
+ out[12] = byte(x15)
+ out[13] = byte(x15 >> 8)
+ out[14] = byte(x15 >> 16)
+ out[15] = byte(x15 >> 24)
+
+ out[16] = byte(x6)
+ out[17] = byte(x6 >> 8)
+ out[18] = byte(x6 >> 16)
+ out[19] = byte(x6 >> 24)
+
+ out[20] = byte(x7)
+ out[21] = byte(x7 >> 8)
+ out[22] = byte(x7 >> 16)
+ out[23] = byte(x7 >> 24)
+
+ out[24] = byte(x8)
+ out[25] = byte(x8 >> 8)
+ out[26] = byte(x8 >> 16)
+ out[27] = byte(x8 >> 24)
+
+ out[28] = byte(x9)
+ out[29] = byte(x9 >> 8)
+ out[30] = byte(x9 >> 16)
+ out[31] = byte(x9 >> 24)
+}
diff --git a/src/mongo/gotools/vendor/src/golang.org/x/crypto/salsa20/salsa/salsa2020_amd64.s b/src/mongo/gotools/vendor/src/golang.org/x/crypto/salsa20/salsa/salsa2020_amd64.s
new file mode 100644
index 00000000000..6e1df963917
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/golang.org/x/crypto/salsa20/salsa/salsa2020_amd64.s
@@ -0,0 +1,902 @@
+// Copyright 2012 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// +build amd64,!appengine,!gccgo
+
+// This code was translated into a form compatible with 6a from the public
+// domain sources in SUPERCOP: http://bench.cr.yp.to/supercop.html
+
+// func salsa2020XORKeyStream(out, in *byte, n uint64, nonce, key *byte)
+TEXT ·salsa2020XORKeyStream(SB),0,$512-40
+ MOVQ out+0(FP),DI
+ MOVQ in+8(FP),SI
+ MOVQ n+16(FP),DX
+ MOVQ nonce+24(FP),CX
+ MOVQ key+32(FP),R8
+
+ MOVQ SP,R11
+ MOVQ $31,R9
+ NOTQ R9
+ ANDQ R9,SP
+ ADDQ $32,SP
+
+ MOVQ R11,352(SP)
+ MOVQ R12,360(SP)
+ MOVQ R13,368(SP)
+ MOVQ R14,376(SP)
+ MOVQ R15,384(SP)
+ MOVQ BX,392(SP)
+ MOVQ BP,400(SP)
+ MOVQ DX,R9
+ MOVQ CX,DX
+ MOVQ R8,R10
+ CMPQ R9,$0
+ JBE DONE
+ START:
+ MOVL 20(R10),CX
+ MOVL 0(R10),R8
+ MOVL 0(DX),AX
+ MOVL 16(R10),R11
+ MOVL CX,0(SP)
+ MOVL R8, 4 (SP)
+ MOVL AX, 8 (SP)
+ MOVL R11, 12 (SP)
+ MOVL 8(DX),CX
+ MOVL 24(R10),R8
+ MOVL 4(R10),AX
+ MOVL 4(DX),R11
+ MOVL CX,16(SP)
+ MOVL R8, 20 (SP)
+ MOVL AX, 24 (SP)
+ MOVL R11, 28 (SP)
+ MOVL 12(DX),CX
+ MOVL 12(R10),DX
+ MOVL 28(R10),R8
+ MOVL 8(R10),AX
+ MOVL DX,32(SP)
+ MOVL CX, 36 (SP)
+ MOVL R8, 40 (SP)
+ MOVL AX, 44 (SP)
+ MOVQ $1634760805,DX
+ MOVQ $857760878,CX
+ MOVQ $2036477234,R8
+ MOVQ $1797285236,AX
+ MOVL DX,48(SP)
+ MOVL CX, 52 (SP)
+ MOVL R8, 56 (SP)
+ MOVL AX, 60 (SP)
+ CMPQ R9,$256
+ JB BYTESBETWEEN1AND255
+ MOVOA 48(SP),X0
+ PSHUFL $0X55,X0,X1
+ PSHUFL $0XAA,X0,X2
+ PSHUFL $0XFF,X0,X3
+ PSHUFL $0X00,X0,X0
+ MOVOA X1,64(SP)
+ MOVOA X2,80(SP)
+ MOVOA X3,96(SP)
+ MOVOA X0,112(SP)
+ MOVOA 0(SP),X0
+ PSHUFL $0XAA,X0,X1
+ PSHUFL $0XFF,X0,X2
+ PSHUFL $0X00,X0,X3
+ PSHUFL $0X55,X0,X0
+ MOVOA X1,128(SP)
+ MOVOA X2,144(SP)
+ MOVOA X3,160(SP)
+ MOVOA X0,176(SP)
+ MOVOA 16(SP),X0
+ PSHUFL $0XFF,X0,X1
+ PSHUFL $0X55,X0,X2
+ PSHUFL $0XAA,X0,X0
+ MOVOA X1,192(SP)
+ MOVOA X2,208(SP)
+ MOVOA X0,224(SP)
+ MOVOA 32(SP),X0
+ PSHUFL $0X00,X0,X1
+ PSHUFL $0XAA,X0,X2
+ PSHUFL $0XFF,X0,X0
+ MOVOA X1,240(SP)
+ MOVOA X2,256(SP)
+ MOVOA X0,272(SP)
+ BYTESATLEAST256:
+ MOVL 16(SP),DX
+ MOVL 36 (SP),CX
+ MOVL DX,288(SP)
+ MOVL CX,304(SP)
+ ADDQ $1,DX
+ SHLQ $32,CX
+ ADDQ CX,DX
+ MOVQ DX,CX
+ SHRQ $32,CX
+ MOVL DX, 292 (SP)
+ MOVL CX, 308 (SP)
+ ADDQ $1,DX
+ SHLQ $32,CX
+ ADDQ CX,DX
+ MOVQ DX,CX
+ SHRQ $32,CX
+ MOVL DX, 296 (SP)
+ MOVL CX, 312 (SP)
+ ADDQ $1,DX
+ SHLQ $32,CX
+ ADDQ CX,DX
+ MOVQ DX,CX
+ SHRQ $32,CX
+ MOVL DX, 300 (SP)
+ MOVL CX, 316 (SP)
+ ADDQ $1,DX
+ SHLQ $32,CX
+ ADDQ CX,DX
+ MOVQ DX,CX
+ SHRQ $32,CX
+ MOVL DX,16(SP)
+ MOVL CX, 36 (SP)
+ MOVQ R9,408(SP)
+ MOVQ $20,DX
+ MOVOA 64(SP),X0
+ MOVOA 80(SP),X1
+ MOVOA 96(SP),X2
+ MOVOA 256(SP),X3
+ MOVOA 272(SP),X4
+ MOVOA 128(SP),X5
+ MOVOA 144(SP),X6
+ MOVOA 176(SP),X7
+ MOVOA 192(SP),X8
+ MOVOA 208(SP),X9
+ MOVOA 224(SP),X10
+ MOVOA 304(SP),X11
+ MOVOA 112(SP),X12
+ MOVOA 160(SP),X13
+ MOVOA 240(SP),X14
+ MOVOA 288(SP),X15
+ MAINLOOP1:
+ MOVOA X1,320(SP)
+ MOVOA X2,336(SP)
+ MOVOA X13,X1
+ PADDL X12,X1
+ MOVOA X1,X2
+ PSLLL $7,X1
+ PXOR X1,X14
+ PSRLL $25,X2
+ PXOR X2,X14
+ MOVOA X7,X1
+ PADDL X0,X1
+ MOVOA X1,X2
+ PSLLL $7,X1
+ PXOR X1,X11
+ PSRLL $25,X2
+ PXOR X2,X11
+ MOVOA X12,X1
+ PADDL X14,X1
+ MOVOA X1,X2
+ PSLLL $9,X1
+ PXOR X1,X15
+ PSRLL $23,X2
+ PXOR X2,X15
+ MOVOA X0,X1
+ PADDL X11,X1
+ MOVOA X1,X2
+ PSLLL $9,X1
+ PXOR X1,X9
+ PSRLL $23,X2
+ PXOR X2,X9
+ MOVOA X14,X1
+ PADDL X15,X1
+ MOVOA X1,X2
+ PSLLL $13,X1
+ PXOR X1,X13
+ PSRLL $19,X2
+ PXOR X2,X13
+ MOVOA X11,X1
+ PADDL X9,X1
+ MOVOA X1,X2
+ PSLLL $13,X1
+ PXOR X1,X7
+ PSRLL $19,X2
+ PXOR X2,X7
+ MOVOA X15,X1
+ PADDL X13,X1
+ MOVOA X1,X2
+ PSLLL $18,X1
+ PXOR X1,X12
+ PSRLL $14,X2
+ PXOR X2,X12
+ MOVOA 320(SP),X1
+ MOVOA X12,320(SP)
+ MOVOA X9,X2
+ PADDL X7,X2
+ MOVOA X2,X12
+ PSLLL $18,X2
+ PXOR X2,X0
+ PSRLL $14,X12
+ PXOR X12,X0
+ MOVOA X5,X2
+ PADDL X1,X2
+ MOVOA X2,X12
+ PSLLL $7,X2
+ PXOR X2,X3
+ PSRLL $25,X12
+ PXOR X12,X3
+ MOVOA 336(SP),X2
+ MOVOA X0,336(SP)
+ MOVOA X6,X0
+ PADDL X2,X0
+ MOVOA X0,X12
+ PSLLL $7,X0
+ PXOR X0,X4
+ PSRLL $25,X12
+ PXOR X12,X4
+ MOVOA X1,X0
+ PADDL X3,X0
+ MOVOA X0,X12
+ PSLLL $9,X0
+ PXOR X0,X10
+ PSRLL $23,X12
+ PXOR X12,X10
+ MOVOA X2,X0
+ PADDL X4,X0
+ MOVOA X0,X12
+ PSLLL $9,X0
+ PXOR X0,X8
+ PSRLL $23,X12
+ PXOR X12,X8
+ MOVOA X3,X0
+ PADDL X10,X0
+ MOVOA X0,X12
+ PSLLL $13,X0
+ PXOR X0,X5
+ PSRLL $19,X12
+ PXOR X12,X5
+ MOVOA X4,X0
+ PADDL X8,X0
+ MOVOA X0,X12
+ PSLLL $13,X0
+ PXOR X0,X6
+ PSRLL $19,X12
+ PXOR X12,X6
+ MOVOA X10,X0
+ PADDL X5,X0
+ MOVOA X0,X12
+ PSLLL $18,X0
+ PXOR X0,X1
+ PSRLL $14,X12
+ PXOR X12,X1
+ MOVOA 320(SP),X0
+ MOVOA X1,320(SP)
+ MOVOA X4,X1
+ PADDL X0,X1
+ MOVOA X1,X12
+ PSLLL $7,X1
+ PXOR X1,X7
+ PSRLL $25,X12
+ PXOR X12,X7
+ MOVOA X8,X1
+ PADDL X6,X1
+ MOVOA X1,X12
+ PSLLL $18,X1
+ PXOR X1,X2
+ PSRLL $14,X12
+ PXOR X12,X2
+ MOVOA 336(SP),X12
+ MOVOA X2,336(SP)
+ MOVOA X14,X1
+ PADDL X12,X1
+ MOVOA X1,X2
+ PSLLL $7,X1
+ PXOR X1,X5
+ PSRLL $25,X2
+ PXOR X2,X5
+ MOVOA X0,X1
+ PADDL X7,X1
+ MOVOA X1,X2
+ PSLLL $9,X1
+ PXOR X1,X10
+ PSRLL $23,X2
+ PXOR X2,X10
+ MOVOA X12,X1
+ PADDL X5,X1
+ MOVOA X1,X2
+ PSLLL $9,X1
+ PXOR X1,X8
+ PSRLL $23,X2
+ PXOR X2,X8
+ MOVOA X7,X1
+ PADDL X10,X1
+ MOVOA X1,X2
+ PSLLL $13,X1
+ PXOR X1,X4
+ PSRLL $19,X2
+ PXOR X2,X4
+ MOVOA X5,X1
+ PADDL X8,X1
+ MOVOA X1,X2
+ PSLLL $13,X1
+ PXOR X1,X14
+ PSRLL $19,X2
+ PXOR X2,X14
+ MOVOA X10,X1
+ PADDL X4,X1
+ MOVOA X1,X2
+ PSLLL $18,X1
+ PXOR X1,X0
+ PSRLL $14,X2
+ PXOR X2,X0
+ MOVOA 320(SP),X1
+ MOVOA X0,320(SP)
+ MOVOA X8,X0
+ PADDL X14,X0
+ MOVOA X0,X2
+ PSLLL $18,X0
+ PXOR X0,X12
+ PSRLL $14,X2
+ PXOR X2,X12
+ MOVOA X11,X0
+ PADDL X1,X0
+ MOVOA X0,X2
+ PSLLL $7,X0
+ PXOR X0,X6
+ PSRLL $25,X2
+ PXOR X2,X6
+ MOVOA 336(SP),X2
+ MOVOA X12,336(SP)
+ MOVOA X3,X0
+ PADDL X2,X0
+ MOVOA X0,X12
+ PSLLL $7,X0
+ PXOR X0,X13
+ PSRLL $25,X12
+ PXOR X12,X13
+ MOVOA X1,X0
+ PADDL X6,X0
+ MOVOA X0,X12
+ PSLLL $9,X0
+ PXOR X0,X15
+ PSRLL $23,X12
+ PXOR X12,X15
+ MOVOA X2,X0
+ PADDL X13,X0
+ MOVOA X0,X12
+ PSLLL $9,X0
+ PXOR X0,X9
+ PSRLL $23,X12
+ PXOR X12,X9
+ MOVOA X6,X0
+ PADDL X15,X0
+ MOVOA X0,X12
+ PSLLL $13,X0
+ PXOR X0,X11
+ PSRLL $19,X12
+ PXOR X12,X11
+ MOVOA X13,X0
+ PADDL X9,X0
+ MOVOA X0,X12
+ PSLLL $13,X0
+ PXOR X0,X3
+ PSRLL $19,X12
+ PXOR X12,X3
+ MOVOA X15,X0
+ PADDL X11,X0
+ MOVOA X0,X12
+ PSLLL $18,X0
+ PXOR X0,X1
+ PSRLL $14,X12
+ PXOR X12,X1
+ MOVOA X9,X0
+ PADDL X3,X0
+ MOVOA X0,X12
+ PSLLL $18,X0
+ PXOR X0,X2
+ PSRLL $14,X12
+ PXOR X12,X2
+ MOVOA 320(SP),X12
+ MOVOA 336(SP),X0
+ SUBQ $2,DX
+ JA MAINLOOP1
+ PADDL 112(SP),X12
+ PADDL 176(SP),X7
+ PADDL 224(SP),X10
+ PADDL 272(SP),X4
+ MOVD X12,DX
+ MOVD X7,CX
+ MOVD X10,R8
+ MOVD X4,R9
+ PSHUFL $0X39,X12,X12
+ PSHUFL $0X39,X7,X7
+ PSHUFL $0X39,X10,X10
+ PSHUFL $0X39,X4,X4
+ XORL 0(SI),DX
+ XORL 4(SI),CX
+ XORL 8(SI),R8
+ XORL 12(SI),R9
+ MOVL DX,0(DI)
+ MOVL CX,4(DI)
+ MOVL R8,8(DI)
+ MOVL R9,12(DI)
+ MOVD X12,DX
+ MOVD X7,CX
+ MOVD X10,R8
+ MOVD X4,R9
+ PSHUFL $0X39,X12,X12
+ PSHUFL $0X39,X7,X7
+ PSHUFL $0X39,X10,X10
+ PSHUFL $0X39,X4,X4
+ XORL 64(SI),DX
+ XORL 68(SI),CX
+ XORL 72(SI),R8
+ XORL 76(SI),R9
+ MOVL DX,64(DI)
+ MOVL CX,68(DI)
+ MOVL R8,72(DI)
+ MOVL R9,76(DI)
+ MOVD X12,DX
+ MOVD X7,CX
+ MOVD X10,R8
+ MOVD X4,R9
+ PSHUFL $0X39,X12,X12
+ PSHUFL $0X39,X7,X7
+ PSHUFL $0X39,X10,X10
+ PSHUFL $0X39,X4,X4
+ XORL 128(SI),DX
+ XORL 132(SI),CX
+ XORL 136(SI),R8
+ XORL 140(SI),R9
+ MOVL DX,128(DI)
+ MOVL CX,132(DI)
+ MOVL R8,136(DI)
+ MOVL R9,140(DI)
+ MOVD X12,DX
+ MOVD X7,CX
+ MOVD X10,R8
+ MOVD X4,R9
+ XORL 192(SI),DX
+ XORL 196(SI),CX
+ XORL 200(SI),R8
+ XORL 204(SI),R9
+ MOVL DX,192(DI)
+ MOVL CX,196(DI)
+ MOVL R8,200(DI)
+ MOVL R9,204(DI)
+ PADDL 240(SP),X14
+ PADDL 64(SP),X0
+ PADDL 128(SP),X5
+ PADDL 192(SP),X8
+ MOVD X14,DX
+ MOVD X0,CX
+ MOVD X5,R8
+ MOVD X8,R9
+ PSHUFL $0X39,X14,X14
+ PSHUFL $0X39,X0,X0
+ PSHUFL $0X39,X5,X5
+ PSHUFL $0X39,X8,X8
+ XORL 16(SI),DX
+ XORL 20(SI),CX
+ XORL 24(SI),R8
+ XORL 28(SI),R9
+ MOVL DX,16(DI)
+ MOVL CX,20(DI)
+ MOVL R8,24(DI)
+ MOVL R9,28(DI)
+ MOVD X14,DX
+ MOVD X0,CX
+ MOVD X5,R8
+ MOVD X8,R9
+ PSHUFL $0X39,X14,X14
+ PSHUFL $0X39,X0,X0
+ PSHUFL $0X39,X5,X5
+ PSHUFL $0X39,X8,X8
+ XORL 80(SI),DX
+ XORL 84(SI),CX
+ XORL 88(SI),R8
+ XORL 92(SI),R9
+ MOVL DX,80(DI)
+ MOVL CX,84(DI)
+ MOVL R8,88(DI)
+ MOVL R9,92(DI)
+ MOVD X14,DX
+ MOVD X0,CX
+ MOVD X5,R8
+ MOVD X8,R9
+ PSHUFL $0X39,X14,X14
+ PSHUFL $0X39,X0,X0
+ PSHUFL $0X39,X5,X5
+ PSHUFL $0X39,X8,X8
+ XORL 144(SI),DX
+ XORL 148(SI),CX
+ XORL 152(SI),R8
+ XORL 156(SI),R9
+ MOVL DX,144(DI)
+ MOVL CX,148(DI)
+ MOVL R8,152(DI)
+ MOVL R9,156(DI)
+ MOVD X14,DX
+ MOVD X0,CX
+ MOVD X5,R8
+ MOVD X8,R9
+ XORL 208(SI),DX
+ XORL 212(SI),CX
+ XORL 216(SI),R8
+ XORL 220(SI),R9
+ MOVL DX,208(DI)
+ MOVL CX,212(DI)
+ MOVL R8,216(DI)
+ MOVL R9,220(DI)
+ PADDL 288(SP),X15
+ PADDL 304(SP),X11
+ PADDL 80(SP),X1
+ PADDL 144(SP),X6
+ MOVD X15,DX
+ MOVD X11,CX
+ MOVD X1,R8
+ MOVD X6,R9
+ PSHUFL $0X39,X15,X15
+ PSHUFL $0X39,X11,X11
+ PSHUFL $0X39,X1,X1
+ PSHUFL $0X39,X6,X6
+ XORL 32(SI),DX
+ XORL 36(SI),CX
+ XORL 40(SI),R8
+ XORL 44(SI),R9
+ MOVL DX,32(DI)
+ MOVL CX,36(DI)
+ MOVL R8,40(DI)
+ MOVL R9,44(DI)
+ MOVD X15,DX
+ MOVD X11,CX
+ MOVD X1,R8
+ MOVD X6,R9
+ PSHUFL $0X39,X15,X15
+ PSHUFL $0X39,X11,X11
+ PSHUFL $0X39,X1,X1
+ PSHUFL $0X39,X6,X6
+ XORL 96(SI),DX
+ XORL 100(SI),CX
+ XORL 104(SI),R8
+ XORL 108(SI),R9
+ MOVL DX,96(DI)
+ MOVL CX,100(DI)
+ MOVL R8,104(DI)
+ MOVL R9,108(DI)
+ MOVD X15,DX
+ MOVD X11,CX
+ MOVD X1,R8
+ MOVD X6,R9
+ PSHUFL $0X39,X15,X15
+ PSHUFL $0X39,X11,X11
+ PSHUFL $0X39,X1,X1
+ PSHUFL $0X39,X6,X6
+ XORL 160(SI),DX
+ XORL 164(SI),CX
+ XORL 168(SI),R8
+ XORL 172(SI),R9
+ MOVL DX,160(DI)
+ MOVL CX,164(DI)
+ MOVL R8,168(DI)
+ MOVL R9,172(DI)
+ MOVD X15,DX
+ MOVD X11,CX
+ MOVD X1,R8
+ MOVD X6,R9
+ XORL 224(SI),DX
+ XORL 228(SI),CX
+ XORL 232(SI),R8
+ XORL 236(SI),R9
+ MOVL DX,224(DI)
+ MOVL CX,228(DI)
+ MOVL R8,232(DI)
+ MOVL R9,236(DI)
+ PADDL 160(SP),X13
+ PADDL 208(SP),X9
+ PADDL 256(SP),X3
+ PADDL 96(SP),X2
+ MOVD X13,DX
+ MOVD X9,CX
+ MOVD X3,R8
+ MOVD X2,R9
+ PSHUFL $0X39,X13,X13
+ PSHUFL $0X39,X9,X9
+ PSHUFL $0X39,X3,X3
+ PSHUFL $0X39,X2,X2
+ XORL 48(SI),DX
+ XORL 52(SI),CX
+ XORL 56(SI),R8
+ XORL 60(SI),R9
+ MOVL DX,48(DI)
+ MOVL CX,52(DI)
+ MOVL R8,56(DI)
+ MOVL R9,60(DI)
+ MOVD X13,DX
+ MOVD X9,CX
+ MOVD X3,R8
+ MOVD X2,R9
+ PSHUFL $0X39,X13,X13
+ PSHUFL $0X39,X9,X9
+ PSHUFL $0X39,X3,X3
+ PSHUFL $0X39,X2,X2
+ XORL 112(SI),DX
+ XORL 116(SI),CX
+ XORL 120(SI),R8
+ XORL 124(SI),R9
+ MOVL DX,112(DI)
+ MOVL CX,116(DI)
+ MOVL R8,120(DI)
+ MOVL R9,124(DI)
+ MOVD X13,DX
+ MOVD X9,CX
+ MOVD X3,R8
+ MOVD X2,R9
+ PSHUFL $0X39,X13,X13
+ PSHUFL $0X39,X9,X9
+ PSHUFL $0X39,X3,X3
+ PSHUFL $0X39,X2,X2
+ XORL 176(SI),DX
+ XORL 180(SI),CX
+ XORL 184(SI),R8
+ XORL 188(SI),R9
+ MOVL DX,176(DI)
+ MOVL CX,180(DI)
+ MOVL R8,184(DI)
+ MOVL R9,188(DI)
+ MOVD X13,DX
+ MOVD X9,CX
+ MOVD X3,R8
+ MOVD X2,R9
+ XORL 240(SI),DX
+ XORL 244(SI),CX
+ XORL 248(SI),R8
+ XORL 252(SI),R9
+ MOVL DX,240(DI)
+ MOVL CX,244(DI)
+ MOVL R8,248(DI)
+ MOVL R9,252(DI)
+ MOVQ 408(SP),R9
+ SUBQ $256,R9
+ ADDQ $256,SI
+ ADDQ $256,DI
+ CMPQ R9,$256
+ JAE BYTESATLEAST256
+ CMPQ R9,$0
+ JBE DONE
+ BYTESBETWEEN1AND255:
+ CMPQ R9,$64
+ JAE NOCOPY
+ MOVQ DI,DX
+ LEAQ 416(SP),DI
+ MOVQ R9,CX
+ REP; MOVSB
+ LEAQ 416(SP),DI
+ LEAQ 416(SP),SI
+ NOCOPY:
+ MOVQ R9,408(SP)
+ MOVOA 48(SP),X0
+ MOVOA 0(SP),X1
+ MOVOA 16(SP),X2
+ MOVOA 32(SP),X3
+ MOVOA X1,X4
+ MOVQ $20,CX
+ MAINLOOP2:
+ PADDL X0,X4
+ MOVOA X0,X5
+ MOVOA X4,X6
+ PSLLL $7,X4
+ PSRLL $25,X6
+ PXOR X4,X3
+ PXOR X6,X3
+ PADDL X3,X5
+ MOVOA X3,X4
+ MOVOA X5,X6
+ PSLLL $9,X5
+ PSRLL $23,X6
+ PXOR X5,X2
+ PSHUFL $0X93,X3,X3
+ PXOR X6,X2
+ PADDL X2,X4
+ MOVOA X2,X5
+ MOVOA X4,X6
+ PSLLL $13,X4
+ PSRLL $19,X6
+ PXOR X4,X1
+ PSHUFL $0X4E,X2,X2
+ PXOR X6,X1
+ PADDL X1,X5
+ MOVOA X3,X4
+ MOVOA X5,X6
+ PSLLL $18,X5
+ PSRLL $14,X6
+ PXOR X5,X0
+ PSHUFL $0X39,X1,X1
+ PXOR X6,X0
+ PADDL X0,X4
+ MOVOA X0,X5
+ MOVOA X4,X6
+ PSLLL $7,X4
+ PSRLL $25,X6
+ PXOR X4,X1
+ PXOR X6,X1
+ PADDL X1,X5
+ MOVOA X1,X4
+ MOVOA X5,X6
+ PSLLL $9,X5
+ PSRLL $23,X6
+ PXOR X5,X2
+ PSHUFL $0X93,X1,X1
+ PXOR X6,X2
+ PADDL X2,X4
+ MOVOA X2,X5
+ MOVOA X4,X6
+ PSLLL $13,X4
+ PSRLL $19,X6
+ PXOR X4,X3
+ PSHUFL $0X4E,X2,X2
+ PXOR X6,X3
+ PADDL X3,X5
+ MOVOA X1,X4
+ MOVOA X5,X6
+ PSLLL $18,X5
+ PSRLL $14,X6
+ PXOR X5,X0
+ PSHUFL $0X39,X3,X3
+ PXOR X6,X0
+ PADDL X0,X4
+ MOVOA X0,X5
+ MOVOA X4,X6
+ PSLLL $7,X4
+ PSRLL $25,X6
+ PXOR X4,X3
+ PXOR X6,X3
+ PADDL X3,X5
+ MOVOA X3,X4
+ MOVOA X5,X6
+ PSLLL $9,X5
+ PSRLL $23,X6
+ PXOR X5,X2
+ PSHUFL $0X93,X3,X3
+ PXOR X6,X2
+ PADDL X2,X4
+ MOVOA X2,X5
+ MOVOA X4,X6
+ PSLLL $13,X4
+ PSRLL $19,X6
+ PXOR X4,X1
+ PSHUFL $0X4E,X2,X2
+ PXOR X6,X1
+ PADDL X1,X5
+ MOVOA X3,X4
+ MOVOA X5,X6
+ PSLLL $18,X5
+ PSRLL $14,X6
+ PXOR X5,X0
+ PSHUFL $0X39,X1,X1
+ PXOR X6,X0
+ PADDL X0,X4
+ MOVOA X0,X5
+ MOVOA X4,X6
+ PSLLL $7,X4
+ PSRLL $25,X6
+ PXOR X4,X1
+ PXOR X6,X1
+ PADDL X1,X5
+ MOVOA X1,X4
+ MOVOA X5,X6
+ PSLLL $9,X5
+ PSRLL $23,X6
+ PXOR X5,X2
+ PSHUFL $0X93,X1,X1
+ PXOR X6,X2
+ PADDL X2,X4
+ MOVOA X2,X5
+ MOVOA X4,X6
+ PSLLL $13,X4
+ PSRLL $19,X6
+ PXOR X4,X3
+ PSHUFL $0X4E,X2,X2
+ PXOR X6,X3
+ SUBQ $4,CX
+ PADDL X3,X5
+ MOVOA X1,X4
+ MOVOA X5,X6
+ PSLLL $18,X5
+ PXOR X7,X7
+ PSRLL $14,X6
+ PXOR X5,X0
+ PSHUFL $0X39,X3,X3
+ PXOR X6,X0
+ JA MAINLOOP2
+ PADDL 48(SP),X0
+ PADDL 0(SP),X1
+ PADDL 16(SP),X2
+ PADDL 32(SP),X3
+ MOVD X0,CX
+ MOVD X1,R8
+ MOVD X2,R9
+ MOVD X3,AX
+ PSHUFL $0X39,X0,X0
+ PSHUFL $0X39,X1,X1
+ PSHUFL $0X39,X2,X2
+ PSHUFL $0X39,X3,X3
+ XORL 0(SI),CX
+ XORL 48(SI),R8
+ XORL 32(SI),R9
+ XORL 16(SI),AX
+ MOVL CX,0(DI)
+ MOVL R8,48(DI)
+ MOVL R9,32(DI)
+ MOVL AX,16(DI)
+ MOVD X0,CX
+ MOVD X1,R8
+ MOVD X2,R9
+ MOVD X3,AX
+ PSHUFL $0X39,X0,X0
+ PSHUFL $0X39,X1,X1
+ PSHUFL $0X39,X2,X2
+ PSHUFL $0X39,X3,X3
+ XORL 20(SI),CX
+ XORL 4(SI),R8
+ XORL 52(SI),R9
+ XORL 36(SI),AX
+ MOVL CX,20(DI)
+ MOVL R8,4(DI)
+ MOVL R9,52(DI)
+ MOVL AX,36(DI)
+ MOVD X0,CX
+ MOVD X1,R8
+ MOVD X2,R9
+ MOVD X3,AX
+ PSHUFL $0X39,X0,X0
+ PSHUFL $0X39,X1,X1
+ PSHUFL $0X39,X2,X2
+ PSHUFL $0X39,X3,X3
+ XORL 40(SI),CX
+ XORL 24(SI),R8
+ XORL 8(SI),R9
+ XORL 56(SI),AX
+ MOVL CX,40(DI)
+ MOVL R8,24(DI)
+ MOVL R9,8(DI)
+ MOVL AX,56(DI)
+ MOVD X0,CX
+ MOVD X1,R8
+ MOVD X2,R9
+ MOVD X3,AX
+ XORL 60(SI),CX
+ XORL 44(SI),R8
+ XORL 28(SI),R9
+ XORL 12(SI),AX
+ MOVL CX,60(DI)
+ MOVL R8,44(DI)
+ MOVL R9,28(DI)
+ MOVL AX,12(DI)
+ MOVQ 408(SP),R9
+ MOVL 16(SP),CX
+ MOVL 36 (SP),R8
+ ADDQ $1,CX
+ SHLQ $32,R8
+ ADDQ R8,CX
+ MOVQ CX,R8
+ SHRQ $32,R8
+ MOVL CX,16(SP)
+ MOVL R8, 36 (SP)
+ CMPQ R9,$64
+ JA BYTESATLEAST65
+ JAE BYTESATLEAST64
+ MOVQ DI,SI
+ MOVQ DX,DI
+ MOVQ R9,CX
+ REP; MOVSB
+ BYTESATLEAST64:
+ DONE:
+ MOVQ 352(SP),R11
+ MOVQ 360(SP),R12
+ MOVQ 368(SP),R13
+ MOVQ 376(SP),R14
+ MOVQ 384(SP),R15
+ MOVQ 392(SP),BX
+ MOVQ 400(SP),BP
+ MOVQ R11,SP
+ RET
+ BYTESATLEAST65:
+ SUBQ $64,R9
+ ADDQ $64,DI
+ ADDQ $64,SI
+ JMP BYTESBETWEEN1AND255
diff --git a/src/mongo/gotools/vendor/src/golang.org/x/crypto/salsa20/salsa/salsa208.go b/src/mongo/gotools/vendor/src/golang.org/x/crypto/salsa20/salsa/salsa208.go
new file mode 100644
index 00000000000..9bfc0927ce8
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/golang.org/x/crypto/salsa20/salsa/salsa208.go
@@ -0,0 +1,199 @@
+// Copyright 2012 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package salsa
+
+// Core208 applies the Salsa20/8 core function to the 64-byte array in and puts
+// the result into the 64-byte array out. The input and output may be the same array.
+func Core208(out *[64]byte, in *[64]byte) {
+ j0 := uint32(in[0]) | uint32(in[1])<<8 | uint32(in[2])<<16 | uint32(in[3])<<24
+ j1 := uint32(in[4]) | uint32(in[5])<<8 | uint32(in[6])<<16 | uint32(in[7])<<24
+ j2 := uint32(in[8]) | uint32(in[9])<<8 | uint32(in[10])<<16 | uint32(in[11])<<24
+ j3 := uint32(in[12]) | uint32(in[13])<<8 | uint32(in[14])<<16 | uint32(in[15])<<24
+ j4 := uint32(in[16]) | uint32(in[17])<<8 | uint32(in[18])<<16 | uint32(in[19])<<24
+ j5 := uint32(in[20]) | uint32(in[21])<<8 | uint32(in[22])<<16 | uint32(in[23])<<24
+ j6 := uint32(in[24]) | uint32(in[25])<<8 | uint32(in[26])<<16 | uint32(in[27])<<24
+ j7 := uint32(in[28]) | uint32(in[29])<<8 | uint32(in[30])<<16 | uint32(in[31])<<24
+ j8 := uint32(in[32]) | uint32(in[33])<<8 | uint32(in[34])<<16 | uint32(in[35])<<24
+ j9 := uint32(in[36]) | uint32(in[37])<<8 | uint32(in[38])<<16 | uint32(in[39])<<24
+ j10 := uint32(in[40]) | uint32(in[41])<<8 | uint32(in[42])<<16 | uint32(in[43])<<24
+ j11 := uint32(in[44]) | uint32(in[45])<<8 | uint32(in[46])<<16 | uint32(in[47])<<24
+ j12 := uint32(in[48]) | uint32(in[49])<<8 | uint32(in[50])<<16 | uint32(in[51])<<24
+ j13 := uint32(in[52]) | uint32(in[53])<<8 | uint32(in[54])<<16 | uint32(in[55])<<24
+ j14 := uint32(in[56]) | uint32(in[57])<<8 | uint32(in[58])<<16 | uint32(in[59])<<24
+ j15 := uint32(in[60]) | uint32(in[61])<<8 | uint32(in[62])<<16 | uint32(in[63])<<24
+
+ x0, x1, x2, x3, x4, x5, x6, x7, x8 := j0, j1, j2, j3, j4, j5, j6, j7, j8
+ x9, x10, x11, x12, x13, x14, x15 := j9, j10, j11, j12, j13, j14, j15
+
+ for i := 0; i < 8; i += 2 {
+ u := x0 + x12
+ x4 ^= u<<7 | u>>(32-7)
+ u = x4 + x0
+ x8 ^= u<<9 | u>>(32-9)
+ u = x8 + x4
+ x12 ^= u<<13 | u>>(32-13)
+ u = x12 + x8
+ x0 ^= u<<18 | u>>(32-18)
+
+ u = x5 + x1
+ x9 ^= u<<7 | u>>(32-7)
+ u = x9 + x5
+ x13 ^= u<<9 | u>>(32-9)
+ u = x13 + x9
+ x1 ^= u<<13 | u>>(32-13)
+ u = x1 + x13
+ x5 ^= u<<18 | u>>(32-18)
+
+ u = x10 + x6
+ x14 ^= u<<7 | u>>(32-7)
+ u = x14 + x10
+ x2 ^= u<<9 | u>>(32-9)
+ u = x2 + x14
+ x6 ^= u<<13 | u>>(32-13)
+ u = x6 + x2
+ x10 ^= u<<18 | u>>(32-18)
+
+ u = x15 + x11
+ x3 ^= u<<7 | u>>(32-7)
+ u = x3 + x15
+ x7 ^= u<<9 | u>>(32-9)
+ u = x7 + x3
+ x11 ^= u<<13 | u>>(32-13)
+ u = x11 + x7
+ x15 ^= u<<18 | u>>(32-18)
+
+ u = x0 + x3
+ x1 ^= u<<7 | u>>(32-7)
+ u = x1 + x0
+ x2 ^= u<<9 | u>>(32-9)
+ u = x2 + x1
+ x3 ^= u<<13 | u>>(32-13)
+ u = x3 + x2
+ x0 ^= u<<18 | u>>(32-18)
+
+ u = x5 + x4
+ x6 ^= u<<7 | u>>(32-7)
+ u = x6 + x5
+ x7 ^= u<<9 | u>>(32-9)
+ u = x7 + x6
+ x4 ^= u<<13 | u>>(32-13)
+ u = x4 + x7
+ x5 ^= u<<18 | u>>(32-18)
+
+ u = x10 + x9
+ x11 ^= u<<7 | u>>(32-7)
+ u = x11 + x10
+ x8 ^= u<<9 | u>>(32-9)
+ u = x8 + x11
+ x9 ^= u<<13 | u>>(32-13)
+ u = x9 + x8
+ x10 ^= u<<18 | u>>(32-18)
+
+ u = x15 + x14
+ x12 ^= u<<7 | u>>(32-7)
+ u = x12 + x15
+ x13 ^= u<<9 | u>>(32-9)
+ u = x13 + x12
+ x14 ^= u<<13 | u>>(32-13)
+ u = x14 + x13
+ x15 ^= u<<18 | u>>(32-18)
+ }
+ x0 += j0
+ x1 += j1
+ x2 += j2
+ x3 += j3
+ x4 += j4
+ x5 += j5
+ x6 += j6
+ x7 += j7
+ x8 += j8
+ x9 += j9
+ x10 += j10
+ x11 += j11
+ x12 += j12
+ x13 += j13
+ x14 += j14
+ x15 += j15
+
+ out[0] = byte(x0)
+ out[1] = byte(x0 >> 8)
+ out[2] = byte(x0 >> 16)
+ out[3] = byte(x0 >> 24)
+
+ out[4] = byte(x1)
+ out[5] = byte(x1 >> 8)
+ out[6] = byte(x1 >> 16)
+ out[7] = byte(x1 >> 24)
+
+ out[8] = byte(x2)
+ out[9] = byte(x2 >> 8)
+ out[10] = byte(x2 >> 16)
+ out[11] = byte(x2 >> 24)
+
+ out[12] = byte(x3)
+ out[13] = byte(x3 >> 8)
+ out[14] = byte(x3 >> 16)
+ out[15] = byte(x3 >> 24)
+
+ out[16] = byte(x4)
+ out[17] = byte(x4 >> 8)
+ out[18] = byte(x4 >> 16)
+ out[19] = byte(x4 >> 24)
+
+ out[20] = byte(x5)
+ out[21] = byte(x5 >> 8)
+ out[22] = byte(x5 >> 16)
+ out[23] = byte(x5 >> 24)
+
+ out[24] = byte(x6)
+ out[25] = byte(x6 >> 8)
+ out[26] = byte(x6 >> 16)
+ out[27] = byte(x6 >> 24)
+
+ out[28] = byte(x7)
+ out[29] = byte(x7 >> 8)
+ out[30] = byte(x7 >> 16)
+ out[31] = byte(x7 >> 24)
+
+ out[32] = byte(x8)
+ out[33] = byte(x8 >> 8)
+ out[34] = byte(x8 >> 16)
+ out[35] = byte(x8 >> 24)
+
+ out[36] = byte(x9)
+ out[37] = byte(x9 >> 8)
+ out[38] = byte(x9 >> 16)
+ out[39] = byte(x9 >> 24)
+
+ out[40] = byte(x10)
+ out[41] = byte(x10 >> 8)
+ out[42] = byte(x10 >> 16)
+ out[43] = byte(x10 >> 24)
+
+ out[44] = byte(x11)
+ out[45] = byte(x11 >> 8)
+ out[46] = byte(x11 >> 16)
+ out[47] = byte(x11 >> 24)
+
+ out[48] = byte(x12)
+ out[49] = byte(x12 >> 8)
+ out[50] = byte(x12 >> 16)
+ out[51] = byte(x12 >> 24)
+
+ out[52] = byte(x13)
+ out[53] = byte(x13 >> 8)
+ out[54] = byte(x13 >> 16)
+ out[55] = byte(x13 >> 24)
+
+ out[56] = byte(x14)
+ out[57] = byte(x14 >> 8)
+ out[58] = byte(x14 >> 16)
+ out[59] = byte(x14 >> 24)
+
+ out[60] = byte(x15)
+ out[61] = byte(x15 >> 8)
+ out[62] = byte(x15 >> 16)
+ out[63] = byte(x15 >> 24)
+}
diff --git a/src/mongo/gotools/vendor/src/golang.org/x/crypto/salsa20/salsa/salsa20_amd64.go b/src/mongo/gotools/vendor/src/golang.org/x/crypto/salsa20/salsa/salsa20_amd64.go
new file mode 100644
index 00000000000..903c7858e43
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/golang.org/x/crypto/salsa20/salsa/salsa20_amd64.go
@@ -0,0 +1,23 @@
+// Copyright 2012 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// +build amd64,!appengine,!gccgo
+
+package salsa
+
+// This function is implemented in salsa2020_amd64.s.
+
+//go:noescape
+
+func salsa2020XORKeyStream(out, in *byte, n uint64, nonce, key *byte)
+
+// XORKeyStream crypts bytes from in to out using the given key and counters.
+// In and out may be the same slice but otherwise should not overlap. Counter
+// contains the raw salsa20 counter bytes (both nonce and block counter).
+func XORKeyStream(out, in []byte, counter *[16]byte, key *[32]byte) {
+ if len(in) == 0 {
+ return
+ }
+ salsa2020XORKeyStream(&out[0], &in[0], uint64(len(in)), &counter[0], &key[0])
+}
diff --git a/src/mongo/gotools/vendor/src/golang.org/x/crypto/salsa20/salsa/salsa20_ref.go b/src/mongo/gotools/vendor/src/golang.org/x/crypto/salsa20/salsa/salsa20_ref.go
new file mode 100644
index 00000000000..95f8ca5bb96
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/golang.org/x/crypto/salsa20/salsa/salsa20_ref.go
@@ -0,0 +1,234 @@
+// Copyright 2012 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// +build !amd64 appengine gccgo
+
+package salsa
+
+const rounds = 20
+
+// core applies the Salsa20 core function to 16-byte input in, 32-byte key k,
+// and 16-byte constant c, and puts the result into 64-byte array out.
+func core(out *[64]byte, in *[16]byte, k *[32]byte, c *[16]byte) {
+ j0 := uint32(c[0]) | uint32(c[1])<<8 | uint32(c[2])<<16 | uint32(c[3])<<24
+ j1 := uint32(k[0]) | uint32(k[1])<<8 | uint32(k[2])<<16 | uint32(k[3])<<24
+ j2 := uint32(k[4]) | uint32(k[5])<<8 | uint32(k[6])<<16 | uint32(k[7])<<24
+ j3 := uint32(k[8]) | uint32(k[9])<<8 | uint32(k[10])<<16 | uint32(k[11])<<24
+ j4 := uint32(k[12]) | uint32(k[13])<<8 | uint32(k[14])<<16 | uint32(k[15])<<24
+ j5 := uint32(c[4]) | uint32(c[5])<<8 | uint32(c[6])<<16 | uint32(c[7])<<24
+ j6 := uint32(in[0]) | uint32(in[1])<<8 | uint32(in[2])<<16 | uint32(in[3])<<24
+ j7 := uint32(in[4]) | uint32(in[5])<<8 | uint32(in[6])<<16 | uint32(in[7])<<24
+ j8 := uint32(in[8]) | uint32(in[9])<<8 | uint32(in[10])<<16 | uint32(in[11])<<24
+ j9 := uint32(in[12]) | uint32(in[13])<<8 | uint32(in[14])<<16 | uint32(in[15])<<24
+ j10 := uint32(c[8]) | uint32(c[9])<<8 | uint32(c[10])<<16 | uint32(c[11])<<24
+ j11 := uint32(k[16]) | uint32(k[17])<<8 | uint32(k[18])<<16 | uint32(k[19])<<24
+ j12 := uint32(k[20]) | uint32(k[21])<<8 | uint32(k[22])<<16 | uint32(k[23])<<24
+ j13 := uint32(k[24]) | uint32(k[25])<<8 | uint32(k[26])<<16 | uint32(k[27])<<24
+ j14 := uint32(k[28]) | uint32(k[29])<<8 | uint32(k[30])<<16 | uint32(k[31])<<24
+ j15 := uint32(c[12]) | uint32(c[13])<<8 | uint32(c[14])<<16 | uint32(c[15])<<24
+
+ x0, x1, x2, x3, x4, x5, x6, x7, x8 := j0, j1, j2, j3, j4, j5, j6, j7, j8
+ x9, x10, x11, x12, x13, x14, x15 := j9, j10, j11, j12, j13, j14, j15
+
+ for i := 0; i < rounds; i += 2 {
+ u := x0 + x12
+ x4 ^= u<<7 | u>>(32-7)
+ u = x4 + x0
+ x8 ^= u<<9 | u>>(32-9)
+ u = x8 + x4
+ x12 ^= u<<13 | u>>(32-13)
+ u = x12 + x8
+ x0 ^= u<<18 | u>>(32-18)
+
+ u = x5 + x1
+ x9 ^= u<<7 | u>>(32-7)
+ u = x9 + x5
+ x13 ^= u<<9 | u>>(32-9)
+ u = x13 + x9
+ x1 ^= u<<13 | u>>(32-13)
+ u = x1 + x13
+ x5 ^= u<<18 | u>>(32-18)
+
+ u = x10 + x6
+ x14 ^= u<<7 | u>>(32-7)
+ u = x14 + x10
+ x2 ^= u<<9 | u>>(32-9)
+ u = x2 + x14
+ x6 ^= u<<13 | u>>(32-13)
+ u = x6 + x2
+ x10 ^= u<<18 | u>>(32-18)
+
+ u = x15 + x11
+ x3 ^= u<<7 | u>>(32-7)
+ u = x3 + x15
+ x7 ^= u<<9 | u>>(32-9)
+ u = x7 + x3
+ x11 ^= u<<13 | u>>(32-13)
+ u = x11 + x7
+ x15 ^= u<<18 | u>>(32-18)
+
+ u = x0 + x3
+ x1 ^= u<<7 | u>>(32-7)
+ u = x1 + x0
+ x2 ^= u<<9 | u>>(32-9)
+ u = x2 + x1
+ x3 ^= u<<13 | u>>(32-13)
+ u = x3 + x2
+ x0 ^= u<<18 | u>>(32-18)
+
+ u = x5 + x4
+ x6 ^= u<<7 | u>>(32-7)
+ u = x6 + x5
+ x7 ^= u<<9 | u>>(32-9)
+ u = x7 + x6
+ x4 ^= u<<13 | u>>(32-13)
+ u = x4 + x7
+ x5 ^= u<<18 | u>>(32-18)
+
+ u = x10 + x9
+ x11 ^= u<<7 | u>>(32-7)
+ u = x11 + x10
+ x8 ^= u<<9 | u>>(32-9)
+ u = x8 + x11
+ x9 ^= u<<13 | u>>(32-13)
+ u = x9 + x8
+ x10 ^= u<<18 | u>>(32-18)
+
+ u = x15 + x14
+ x12 ^= u<<7 | u>>(32-7)
+ u = x12 + x15
+ x13 ^= u<<9 | u>>(32-9)
+ u = x13 + x12
+ x14 ^= u<<13 | u>>(32-13)
+ u = x14 + x13
+ x15 ^= u<<18 | u>>(32-18)
+ }
+ x0 += j0
+ x1 += j1
+ x2 += j2
+ x3 += j3
+ x4 += j4
+ x5 += j5
+ x6 += j6
+ x7 += j7
+ x8 += j8
+ x9 += j9
+ x10 += j10
+ x11 += j11
+ x12 += j12
+ x13 += j13
+ x14 += j14
+ x15 += j15
+
+ out[0] = byte(x0)
+ out[1] = byte(x0 >> 8)
+ out[2] = byte(x0 >> 16)
+ out[3] = byte(x0 >> 24)
+
+ out[4] = byte(x1)
+ out[5] = byte(x1 >> 8)
+ out[6] = byte(x1 >> 16)
+ out[7] = byte(x1 >> 24)
+
+ out[8] = byte(x2)
+ out[9] = byte(x2 >> 8)
+ out[10] = byte(x2 >> 16)
+ out[11] = byte(x2 >> 24)
+
+ out[12] = byte(x3)
+ out[13] = byte(x3 >> 8)
+ out[14] = byte(x3 >> 16)
+ out[15] = byte(x3 >> 24)
+
+ out[16] = byte(x4)
+ out[17] = byte(x4 >> 8)
+ out[18] = byte(x4 >> 16)
+ out[19] = byte(x4 >> 24)
+
+ out[20] = byte(x5)
+ out[21] = byte(x5 >> 8)
+ out[22] = byte(x5 >> 16)
+ out[23] = byte(x5 >> 24)
+
+ out[24] = byte(x6)
+ out[25] = byte(x6 >> 8)
+ out[26] = byte(x6 >> 16)
+ out[27] = byte(x6 >> 24)
+
+ out[28] = byte(x7)
+ out[29] = byte(x7 >> 8)
+ out[30] = byte(x7 >> 16)
+ out[31] = byte(x7 >> 24)
+
+ out[32] = byte(x8)
+ out[33] = byte(x8 >> 8)
+ out[34] = byte(x8 >> 16)
+ out[35] = byte(x8 >> 24)
+
+ out[36] = byte(x9)
+ out[37] = byte(x9 >> 8)
+ out[38] = byte(x9 >> 16)
+ out[39] = byte(x9 >> 24)
+
+ out[40] = byte(x10)
+ out[41] = byte(x10 >> 8)
+ out[42] = byte(x10 >> 16)
+ out[43] = byte(x10 >> 24)
+
+ out[44] = byte(x11)
+ out[45] = byte(x11 >> 8)
+ out[46] = byte(x11 >> 16)
+ out[47] = byte(x11 >> 24)
+
+ out[48] = byte(x12)
+ out[49] = byte(x12 >> 8)
+ out[50] = byte(x12 >> 16)
+ out[51] = byte(x12 >> 24)
+
+ out[52] = byte(x13)
+ out[53] = byte(x13 >> 8)
+ out[54] = byte(x13 >> 16)
+ out[55] = byte(x13 >> 24)
+
+ out[56] = byte(x14)
+ out[57] = byte(x14 >> 8)
+ out[58] = byte(x14 >> 16)
+ out[59] = byte(x14 >> 24)
+
+ out[60] = byte(x15)
+ out[61] = byte(x15 >> 8)
+ out[62] = byte(x15 >> 16)
+ out[63] = byte(x15 >> 24)
+}
+
+// XORKeyStream crypts bytes from in to out using the given key and counters.
+// In and out may be the same slice but otherwise should not overlap. Counter
+// contains the raw salsa20 counter bytes (both nonce and block counter).
+func XORKeyStream(out, in []byte, counter *[16]byte, key *[32]byte) {
+ var block [64]byte
+ var counterCopy [16]byte
+ copy(counterCopy[:], counter[:])
+
+ for len(in) >= 64 {
+ core(&block, &counterCopy, key, &Sigma)
+ for i, x := range block {
+ out[i] = in[i] ^ x
+ }
+ u := uint32(1)
+ for i := 8; i < 16; i++ {
+ u += uint32(counterCopy[i])
+ counterCopy[i] = byte(u)
+ u >>= 8
+ }
+ in = in[64:]
+ out = out[64:]
+ }
+
+ if len(in) > 0 {
+ core(&block, &counterCopy, key, &Sigma)
+ for i, v := range in {
+ out[i] = v ^ block[i]
+ }
+ }
+}
diff --git a/src/mongo/gotools/vendor/src/golang.org/x/crypto/salsa20/salsa/salsa_test.go b/src/mongo/gotools/vendor/src/golang.org/x/crypto/salsa20/salsa/salsa_test.go
new file mode 100644
index 00000000000..f8cecd9e6e9
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/golang.org/x/crypto/salsa20/salsa/salsa_test.go
@@ -0,0 +1,35 @@
+// Copyright 2012 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package salsa
+
+import "testing"
+
+func TestCore208(t *testing.T) {
+ in := [64]byte{
+ 0x7e, 0x87, 0x9a, 0x21, 0x4f, 0x3e, 0xc9, 0x86,
+ 0x7c, 0xa9, 0x40, 0xe6, 0x41, 0x71, 0x8f, 0x26,
+ 0xba, 0xee, 0x55, 0x5b, 0x8c, 0x61, 0xc1, 0xb5,
+ 0x0d, 0xf8, 0x46, 0x11, 0x6d, 0xcd, 0x3b, 0x1d,
+ 0xee, 0x24, 0xf3, 0x19, 0xdf, 0x9b, 0x3d, 0x85,
+ 0x14, 0x12, 0x1e, 0x4b, 0x5a, 0xc5, 0xaa, 0x32,
+ 0x76, 0x02, 0x1d, 0x29, 0x09, 0xc7, 0x48, 0x29,
+ 0xed, 0xeb, 0xc6, 0x8d, 0xb8, 0xb8, 0xc2, 0x5e}
+
+ out := [64]byte{
+ 0xa4, 0x1f, 0x85, 0x9c, 0x66, 0x08, 0xcc, 0x99,
+ 0x3b, 0x81, 0xca, 0xcb, 0x02, 0x0c, 0xef, 0x05,
+ 0x04, 0x4b, 0x21, 0x81, 0xa2, 0xfd, 0x33, 0x7d,
+ 0xfd, 0x7b, 0x1c, 0x63, 0x96, 0x68, 0x2f, 0x29,
+ 0xb4, 0x39, 0x31, 0x68, 0xe3, 0xc9, 0xe6, 0xbc,
+ 0xfe, 0x6b, 0xc5, 0xb7, 0xa0, 0x6d, 0x96, 0xba,
+ 0xe4, 0x24, 0xcc, 0x10, 0x2c, 0x91, 0x74, 0x5c,
+ 0x24, 0xad, 0x67, 0x3d, 0xc7, 0x61, 0x8f, 0x81,
+ }
+
+ Core208(&in, &in)
+ if in != out {
+ t.Errorf("expected %x, got %x", out, in)
+ }
+}
diff --git a/src/mongo/gotools/vendor/src/golang.org/x/crypto/salsa20/salsa20.go b/src/mongo/gotools/vendor/src/golang.org/x/crypto/salsa20/salsa20.go
new file mode 100644
index 00000000000..fde9846b6eb
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/golang.org/x/crypto/salsa20/salsa20.go
@@ -0,0 +1,54 @@
+// Copyright 2012 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+/*
+Package salsa20 implements the Salsa20 stream cipher as specified in http://cr.yp.to/snuffle/spec.pdf.
+
+Salsa20 differs from many other stream ciphers in that it is message orientated
+rather than byte orientated. Keystream blocks are not preserved between calls,
+therefore each side must encrypt/decrypt data with the same segmentation.
+
+Another aspect of this difference is that part of the counter is exposed as
+an nonce in each call. Encrypting two different messages with the same (key,
+nonce) pair leads to trivial plaintext recovery. This is analogous to
+encrypting two different messages with the same key with a traditional stream
+cipher.
+
+This package also implements XSalsa20: a version of Salsa20 with a 24-byte
+nonce as specified in http://cr.yp.to/snuffle/xsalsa-20081128.pdf. Simply
+passing a 24-byte slice as the nonce triggers XSalsa20.
+*/
+package salsa20 // import "golang.org/x/crypto/salsa20"
+
+// TODO(agl): implement XORKeyStream12 and XORKeyStream8 - the reduced round variants of Salsa20.
+
+import (
+ "golang.org/x/crypto/salsa20/salsa"
+)
+
+// XORKeyStream crypts bytes from in to out using the given key and nonce. In
+// and out may be the same slice but otherwise should not overlap. Nonce must
+// be either 8 or 24 bytes long.
+func XORKeyStream(out, in []byte, nonce []byte, key *[32]byte) {
+ if len(out) < len(in) {
+ in = in[:len(out)]
+ }
+
+ var subNonce [16]byte
+
+ if len(nonce) == 24 {
+ var subKey [32]byte
+ var hNonce [16]byte
+ copy(hNonce[:], nonce[:16])
+ salsa.HSalsa20(&subKey, &hNonce, key, &salsa.Sigma)
+ copy(subNonce[:], nonce[16:])
+ key = &subKey
+ } else if len(nonce) == 8 {
+ copy(subNonce[:], nonce[:])
+ } else {
+ panic("salsa20: nonce must be 8 or 24 bytes")
+ }
+
+ salsa.XORKeyStream(out, in, &subNonce, key)
+}
diff --git a/src/mongo/gotools/vendor/src/golang.org/x/crypto/salsa20/salsa20_test.go b/src/mongo/gotools/vendor/src/golang.org/x/crypto/salsa20/salsa20_test.go
new file mode 100644
index 00000000000..0ef3328eb0a
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/golang.org/x/crypto/salsa20/salsa20_test.go
@@ -0,0 +1,139 @@
+// Copyright 2012 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package salsa20
+
+import (
+ "bytes"
+ "encoding/hex"
+ "testing"
+)
+
+func fromHex(s string) []byte {
+ ret, err := hex.DecodeString(s)
+ if err != nil {
+ panic(err)
+ }
+ return ret
+}
+
+// testVectors was taken from set 6 of the ECRYPT test vectors:
+// http://www.ecrypt.eu.org/stream/svn/viewcvs.cgi/ecrypt/trunk/submissions/salsa20/full/verified.test-vectors?logsort=rev&rev=210&view=markup
+var testVectors = []struct {
+ key []byte
+ iv []byte
+ numBytes int
+ xor []byte
+}{
+ {
+ fromHex("0053A6F94C9FF24598EB3E91E4378ADD3083D6297CCF2275C81B6EC11467BA0D"),
+ fromHex("0D74DB42A91077DE"),
+ 131072,
+ fromHex("C349B6A51A3EC9B712EAED3F90D8BCEE69B7628645F251A996F55260C62EF31FD6C6B0AEA94E136C9D984AD2DF3578F78E457527B03A0450580DD874F63B1AB9"),
+ },
+ {
+ fromHex("0558ABFE51A4F74A9DF04396E93C8FE23588DB2E81D4277ACD2073C6196CBF12"),
+ fromHex("167DE44BB21980E7"),
+ 131072,
+ fromHex("C3EAAF32836BACE32D04E1124231EF47E101367D6305413A0EEB07C60698A2876E4D031870A739D6FFDDD208597AFF0A47AC17EDB0167DD67EBA84F1883D4DFD"),
+ },
+ {
+ fromHex("0A5DB00356A9FC4FA2F5489BEE4194E73A8DE03386D92C7FD22578CB1E71C417"),
+ fromHex("1F86ED54BB2289F0"),
+ 131072,
+ fromHex("3CD23C3DC90201ACC0CF49B440B6C417F0DC8D8410A716D5314C059E14B1A8D9A9FB8EA3D9C8DAE12B21402F674AA95C67B1FC514E994C9D3F3A6E41DFF5BBA6"),
+ },
+ {
+ fromHex("0F62B5085BAE0154A7FA4DA0F34699EC3F92E5388BDE3184D72A7DD02376C91C"),
+ fromHex("288FF65DC42B92F9"),
+ 131072,
+ fromHex("E00EBCCD70D69152725F9987982178A2E2E139C7BCBE04CA8A0E99E318D9AB76F988C8549F75ADD790BA4F81C176DA653C1A043F11A958E169B6D2319F4EEC1A"),
+ },
+}
+
+func TestSalsa20(t *testing.T) {
+ var inBuf, outBuf []byte
+ var key [32]byte
+
+ for i, test := range testVectors {
+ if test.numBytes%64 != 0 {
+ t.Errorf("#%d: numBytes is not a multiple of 64", i)
+ continue
+ }
+
+ if test.numBytes > len(inBuf) {
+ inBuf = make([]byte, test.numBytes)
+ outBuf = make([]byte, test.numBytes)
+ }
+ in := inBuf[:test.numBytes]
+ out := outBuf[:test.numBytes]
+ copy(key[:], test.key)
+ XORKeyStream(out, in, test.iv, &key)
+
+ var xor [64]byte
+ for len(out) > 0 {
+ for i := 0; i < 64; i++ {
+ xor[i] ^= out[i]
+ }
+ out = out[64:]
+ }
+
+ if !bytes.Equal(xor[:], test.xor) {
+ t.Errorf("#%d: bad result", i)
+ }
+ }
+}
+
+var xSalsa20TestData = []struct {
+ in, nonce, key, out []byte
+}{
+ {
+ []byte("Hello world!"),
+ []byte("24-byte nonce for xsalsa"),
+ []byte("this is 32-byte key for xsalsa20"),
+ []byte{0x00, 0x2d, 0x45, 0x13, 0x84, 0x3f, 0xc2, 0x40, 0xc4, 0x01, 0xe5, 0x41},
+ },
+ {
+ make([]byte, 64),
+ []byte("24-byte nonce for xsalsa"),
+ []byte("this is 32-byte key for xsalsa20"),
+ []byte{0x48, 0x48, 0x29, 0x7f, 0xeb, 0x1f, 0xb5, 0x2f, 0xb6,
+ 0x6d, 0x81, 0x60, 0x9b, 0xd5, 0x47, 0xfa, 0xbc, 0xbe, 0x70,
+ 0x26, 0xed, 0xc8, 0xb5, 0xe5, 0xe4, 0x49, 0xd0, 0x88, 0xbf,
+ 0xa6, 0x9c, 0x08, 0x8f, 0x5d, 0x8d, 0xa1, 0xd7, 0x91, 0x26,
+ 0x7c, 0x2c, 0x19, 0x5a, 0x7f, 0x8c, 0xae, 0x9c, 0x4b, 0x40,
+ 0x50, 0xd0, 0x8c, 0xe6, 0xd3, 0xa1, 0x51, 0xec, 0x26, 0x5f,
+ 0x3a, 0x58, 0xe4, 0x76, 0x48},
+ },
+}
+
+func TestXSalsa20(t *testing.T) {
+ var key [32]byte
+
+ for i, test := range xSalsa20TestData {
+ out := make([]byte, len(test.in))
+ copy(key[:], test.key)
+ XORKeyStream(out, test.in, test.nonce, &key)
+ if !bytes.Equal(out, test.out) {
+ t.Errorf("%d: expected %x, got %x", i, test.out, out)
+ }
+ }
+}
+
+var (
+ keyArray [32]byte
+ key = &keyArray
+ nonce [8]byte
+ msg = make([]byte, 1<<10)
+)
+
+func BenchmarkXOR1K(b *testing.B) {
+ b.StopTimer()
+ out := make([]byte, 1024)
+ b.StartTimer()
+ for i := 0; i < b.N; i++ {
+ XORKeyStream(out, msg[:1024], nonce[:], key)
+ }
+ b.SetBytes(1024)
+}
diff --git a/src/mongo/gotools/vendor/src/golang.org/x/crypto/scrypt/scrypt.go b/src/mongo/gotools/vendor/src/golang.org/x/crypto/scrypt/scrypt.go
new file mode 100644
index 00000000000..dc0124b1fce
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/golang.org/x/crypto/scrypt/scrypt.go
@@ -0,0 +1,243 @@
+// Copyright 2012 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// Package scrypt implements the scrypt key derivation function as defined in
+// Colin Percival's paper "Stronger Key Derivation via Sequential Memory-Hard
+// Functions" (http://www.tarsnap.com/scrypt/scrypt.pdf).
+package scrypt // import "golang.org/x/crypto/scrypt"
+
+import (
+ "crypto/sha256"
+ "errors"
+
+ "golang.org/x/crypto/pbkdf2"
+)
+
+const maxInt = int(^uint(0) >> 1)
+
+// blockCopy copies n numbers from src into dst.
+func blockCopy(dst, src []uint32, n int) {
+ copy(dst, src[:n])
+}
+
+// blockXOR XORs numbers from dst with n numbers from src.
+func blockXOR(dst, src []uint32, n int) {
+ for i, v := range src[:n] {
+ dst[i] ^= v
+ }
+}
+
+// salsaXOR applies Salsa20/8 to the XOR of 16 numbers from tmp and in,
+// and puts the result into both both tmp and out.
+func salsaXOR(tmp *[16]uint32, in, out []uint32) {
+ w0 := tmp[0] ^ in[0]
+ w1 := tmp[1] ^ in[1]
+ w2 := tmp[2] ^ in[2]
+ w3 := tmp[3] ^ in[3]
+ w4 := tmp[4] ^ in[4]
+ w5 := tmp[5] ^ in[5]
+ w6 := tmp[6] ^ in[6]
+ w7 := tmp[7] ^ in[7]
+ w8 := tmp[8] ^ in[8]
+ w9 := tmp[9] ^ in[9]
+ w10 := tmp[10] ^ in[10]
+ w11 := tmp[11] ^ in[11]
+ w12 := tmp[12] ^ in[12]
+ w13 := tmp[13] ^ in[13]
+ w14 := tmp[14] ^ in[14]
+ w15 := tmp[15] ^ in[15]
+
+ x0, x1, x2, x3, x4, x5, x6, x7, x8 := w0, w1, w2, w3, w4, w5, w6, w7, w8
+ x9, x10, x11, x12, x13, x14, x15 := w9, w10, w11, w12, w13, w14, w15
+
+ for i := 0; i < 8; i += 2 {
+ u := x0 + x12
+ x4 ^= u<<7 | u>>(32-7)
+ u = x4 + x0
+ x8 ^= u<<9 | u>>(32-9)
+ u = x8 + x4
+ x12 ^= u<<13 | u>>(32-13)
+ u = x12 + x8
+ x0 ^= u<<18 | u>>(32-18)
+
+ u = x5 + x1
+ x9 ^= u<<7 | u>>(32-7)
+ u = x9 + x5
+ x13 ^= u<<9 | u>>(32-9)
+ u = x13 + x9
+ x1 ^= u<<13 | u>>(32-13)
+ u = x1 + x13
+ x5 ^= u<<18 | u>>(32-18)
+
+ u = x10 + x6
+ x14 ^= u<<7 | u>>(32-7)
+ u = x14 + x10
+ x2 ^= u<<9 | u>>(32-9)
+ u = x2 + x14
+ x6 ^= u<<13 | u>>(32-13)
+ u = x6 + x2
+ x10 ^= u<<18 | u>>(32-18)
+
+ u = x15 + x11
+ x3 ^= u<<7 | u>>(32-7)
+ u = x3 + x15
+ x7 ^= u<<9 | u>>(32-9)
+ u = x7 + x3
+ x11 ^= u<<13 | u>>(32-13)
+ u = x11 + x7
+ x15 ^= u<<18 | u>>(32-18)
+
+ u = x0 + x3
+ x1 ^= u<<7 | u>>(32-7)
+ u = x1 + x0
+ x2 ^= u<<9 | u>>(32-9)
+ u = x2 + x1
+ x3 ^= u<<13 | u>>(32-13)
+ u = x3 + x2
+ x0 ^= u<<18 | u>>(32-18)
+
+ u = x5 + x4
+ x6 ^= u<<7 | u>>(32-7)
+ u = x6 + x5
+ x7 ^= u<<9 | u>>(32-9)
+ u = x7 + x6
+ x4 ^= u<<13 | u>>(32-13)
+ u = x4 + x7
+ x5 ^= u<<18 | u>>(32-18)
+
+ u = x10 + x9
+ x11 ^= u<<7 | u>>(32-7)
+ u = x11 + x10
+ x8 ^= u<<9 | u>>(32-9)
+ u = x8 + x11
+ x9 ^= u<<13 | u>>(32-13)
+ u = x9 + x8
+ x10 ^= u<<18 | u>>(32-18)
+
+ u = x15 + x14
+ x12 ^= u<<7 | u>>(32-7)
+ u = x12 + x15
+ x13 ^= u<<9 | u>>(32-9)
+ u = x13 + x12
+ x14 ^= u<<13 | u>>(32-13)
+ u = x14 + x13
+ x15 ^= u<<18 | u>>(32-18)
+ }
+ x0 += w0
+ x1 += w1
+ x2 += w2
+ x3 += w3
+ x4 += w4
+ x5 += w5
+ x6 += w6
+ x7 += w7
+ x8 += w8
+ x9 += w9
+ x10 += w10
+ x11 += w11
+ x12 += w12
+ x13 += w13
+ x14 += w14
+ x15 += w15
+
+ out[0], tmp[0] = x0, x0
+ out[1], tmp[1] = x1, x1
+ out[2], tmp[2] = x2, x2
+ out[3], tmp[3] = x3, x3
+ out[4], tmp[4] = x4, x4
+ out[5], tmp[5] = x5, x5
+ out[6], tmp[6] = x6, x6
+ out[7], tmp[7] = x7, x7
+ out[8], tmp[8] = x8, x8
+ out[9], tmp[9] = x9, x9
+ out[10], tmp[10] = x10, x10
+ out[11], tmp[11] = x11, x11
+ out[12], tmp[12] = x12, x12
+ out[13], tmp[13] = x13, x13
+ out[14], tmp[14] = x14, x14
+ out[15], tmp[15] = x15, x15
+}
+
+func blockMix(tmp *[16]uint32, in, out []uint32, r int) {
+ blockCopy(tmp[:], in[(2*r-1)*16:], 16)
+ for i := 0; i < 2*r; i += 2 {
+ salsaXOR(tmp, in[i*16:], out[i*8:])
+ salsaXOR(tmp, in[i*16+16:], out[i*8+r*16:])
+ }
+}
+
+func integer(b []uint32, r int) uint64 {
+ j := (2*r - 1) * 16
+ return uint64(b[j]) | uint64(b[j+1])<<32
+}
+
+func smix(b []byte, r, N int, v, xy []uint32) {
+ var tmp [16]uint32
+ x := xy
+ y := xy[32*r:]
+
+ j := 0
+ for i := 0; i < 32*r; i++ {
+ x[i] = uint32(b[j]) | uint32(b[j+1])<<8 | uint32(b[j+2])<<16 | uint32(b[j+3])<<24
+ j += 4
+ }
+ for i := 0; i < N; i += 2 {
+ blockCopy(v[i*(32*r):], x, 32*r)
+ blockMix(&tmp, x, y, r)
+
+ blockCopy(v[(i+1)*(32*r):], y, 32*r)
+ blockMix(&tmp, y, x, r)
+ }
+ for i := 0; i < N; i += 2 {
+ j := int(integer(x, r) & uint64(N-1))
+ blockXOR(x, v[j*(32*r):], 32*r)
+ blockMix(&tmp, x, y, r)
+
+ j = int(integer(y, r) & uint64(N-1))
+ blockXOR(y, v[j*(32*r):], 32*r)
+ blockMix(&tmp, y, x, r)
+ }
+ j = 0
+ for _, v := range x[:32*r] {
+ b[j+0] = byte(v >> 0)
+ b[j+1] = byte(v >> 8)
+ b[j+2] = byte(v >> 16)
+ b[j+3] = byte(v >> 24)
+ j += 4
+ }
+}
+
+// Key derives a key from the password, salt, and cost parameters, returning
+// a byte slice of length keyLen that can be used as cryptographic key.
+//
+// N is a CPU/memory cost parameter, which must be a power of two greater than 1.
+// r and p must satisfy r * p < 2³⁰. If the parameters do not satisfy the
+// limits, the function returns a nil byte slice and an error.
+//
+// For example, you can get a derived key for e.g. AES-256 (which needs a
+// 32-byte key) by doing:
+//
+// dk := scrypt.Key([]byte("some password"), salt, 16384, 8, 1, 32)
+//
+// The recommended parameters for interactive logins as of 2009 are N=16384,
+// r=8, p=1. They should be increased as memory latency and CPU parallelism
+// increases. Remember to get a good random salt.
+func Key(password, salt []byte, N, r, p, keyLen int) ([]byte, error) {
+ if N <= 1 || N&(N-1) != 0 {
+ return nil, errors.New("scrypt: N must be > 1 and a power of 2")
+ }
+ if uint64(r)*uint64(p) >= 1<<30 || r > maxInt/128/p || r > maxInt/256 || N > maxInt/128/r {
+ return nil, errors.New("scrypt: parameters are too large")
+ }
+
+ xy := make([]uint32, 64*r)
+ v := make([]uint32, 32*N*r)
+ b := pbkdf2.Key(password, salt, 1, p*128*r, sha256.New)
+
+ for i := 0; i < p; i++ {
+ smix(b[i*128*r:], r, N, v, xy)
+ }
+
+ return pbkdf2.Key(password, b, 1, keyLen, sha256.New), nil
+}
diff --git a/src/mongo/gotools/vendor/src/golang.org/x/crypto/scrypt/scrypt_test.go b/src/mongo/gotools/vendor/src/golang.org/x/crypto/scrypt/scrypt_test.go
new file mode 100644
index 00000000000..e096c3a31ab
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/golang.org/x/crypto/scrypt/scrypt_test.go
@@ -0,0 +1,160 @@
+// Copyright 2012 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package scrypt
+
+import (
+ "bytes"
+ "testing"
+)
+
+type testVector struct {
+ password string
+ salt string
+ N, r, p int
+ output []byte
+}
+
+var good = []testVector{
+ {
+ "password",
+ "salt",
+ 2, 10, 10,
+ []byte{
+ 0x48, 0x2c, 0x85, 0x8e, 0x22, 0x90, 0x55, 0xe6, 0x2f,
+ 0x41, 0xe0, 0xec, 0x81, 0x9a, 0x5e, 0xe1, 0x8b, 0xdb,
+ 0x87, 0x25, 0x1a, 0x53, 0x4f, 0x75, 0xac, 0xd9, 0x5a,
+ 0xc5, 0xe5, 0xa, 0xa1, 0x5f,
+ },
+ },
+ {
+ "password",
+ "salt",
+ 16, 100, 100,
+ []byte{
+ 0x88, 0xbd, 0x5e, 0xdb, 0x52, 0xd1, 0xdd, 0x0, 0x18,
+ 0x87, 0x72, 0xad, 0x36, 0x17, 0x12, 0x90, 0x22, 0x4e,
+ 0x74, 0x82, 0x95, 0x25, 0xb1, 0x8d, 0x73, 0x23, 0xa5,
+ 0x7f, 0x91, 0x96, 0x3c, 0x37,
+ },
+ },
+ {
+ "this is a long \000 password",
+ "and this is a long \000 salt",
+ 16384, 8, 1,
+ []byte{
+ 0xc3, 0xf1, 0x82, 0xee, 0x2d, 0xec, 0x84, 0x6e, 0x70,
+ 0xa6, 0x94, 0x2f, 0xb5, 0x29, 0x98, 0x5a, 0x3a, 0x09,
+ 0x76, 0x5e, 0xf0, 0x4c, 0x61, 0x29, 0x23, 0xb1, 0x7f,
+ 0x18, 0x55, 0x5a, 0x37, 0x07, 0x6d, 0xeb, 0x2b, 0x98,
+ 0x30, 0xd6, 0x9d, 0xe5, 0x49, 0x26, 0x51, 0xe4, 0x50,
+ 0x6a, 0xe5, 0x77, 0x6d, 0x96, 0xd4, 0x0f, 0x67, 0xaa,
+ 0xee, 0x37, 0xe1, 0x77, 0x7b, 0x8a, 0xd5, 0xc3, 0x11,
+ 0x14, 0x32, 0xbb, 0x3b, 0x6f, 0x7e, 0x12, 0x64, 0x40,
+ 0x18, 0x79, 0xe6, 0x41, 0xae,
+ },
+ },
+ {
+ "p",
+ "s",
+ 2, 1, 1,
+ []byte{
+ 0x48, 0xb0, 0xd2, 0xa8, 0xa3, 0x27, 0x26, 0x11, 0x98,
+ 0x4c, 0x50, 0xeb, 0xd6, 0x30, 0xaf, 0x52,
+ },
+ },
+
+ {
+ "",
+ "",
+ 16, 1, 1,
+ []byte{
+ 0x77, 0xd6, 0x57, 0x62, 0x38, 0x65, 0x7b, 0x20, 0x3b,
+ 0x19, 0xca, 0x42, 0xc1, 0x8a, 0x04, 0x97, 0xf1, 0x6b,
+ 0x48, 0x44, 0xe3, 0x07, 0x4a, 0xe8, 0xdf, 0xdf, 0xfa,
+ 0x3f, 0xed, 0xe2, 0x14, 0x42, 0xfc, 0xd0, 0x06, 0x9d,
+ 0xed, 0x09, 0x48, 0xf8, 0x32, 0x6a, 0x75, 0x3a, 0x0f,
+ 0xc8, 0x1f, 0x17, 0xe8, 0xd3, 0xe0, 0xfb, 0x2e, 0x0d,
+ 0x36, 0x28, 0xcf, 0x35, 0xe2, 0x0c, 0x38, 0xd1, 0x89,
+ 0x06,
+ },
+ },
+ {
+ "password",
+ "NaCl",
+ 1024, 8, 16,
+ []byte{
+ 0xfd, 0xba, 0xbe, 0x1c, 0x9d, 0x34, 0x72, 0x00, 0x78,
+ 0x56, 0xe7, 0x19, 0x0d, 0x01, 0xe9, 0xfe, 0x7c, 0x6a,
+ 0xd7, 0xcb, 0xc8, 0x23, 0x78, 0x30, 0xe7, 0x73, 0x76,
+ 0x63, 0x4b, 0x37, 0x31, 0x62, 0x2e, 0xaf, 0x30, 0xd9,
+ 0x2e, 0x22, 0xa3, 0x88, 0x6f, 0xf1, 0x09, 0x27, 0x9d,
+ 0x98, 0x30, 0xda, 0xc7, 0x27, 0xaf, 0xb9, 0x4a, 0x83,
+ 0xee, 0x6d, 0x83, 0x60, 0xcb, 0xdf, 0xa2, 0xcc, 0x06,
+ 0x40,
+ },
+ },
+ {
+ "pleaseletmein", "SodiumChloride",
+ 16384, 8, 1,
+ []byte{
+ 0x70, 0x23, 0xbd, 0xcb, 0x3a, 0xfd, 0x73, 0x48, 0x46,
+ 0x1c, 0x06, 0xcd, 0x81, 0xfd, 0x38, 0xeb, 0xfd, 0xa8,
+ 0xfb, 0xba, 0x90, 0x4f, 0x8e, 0x3e, 0xa9, 0xb5, 0x43,
+ 0xf6, 0x54, 0x5d, 0xa1, 0xf2, 0xd5, 0x43, 0x29, 0x55,
+ 0x61, 0x3f, 0x0f, 0xcf, 0x62, 0xd4, 0x97, 0x05, 0x24,
+ 0x2a, 0x9a, 0xf9, 0xe6, 0x1e, 0x85, 0xdc, 0x0d, 0x65,
+ 0x1e, 0x40, 0xdf, 0xcf, 0x01, 0x7b, 0x45, 0x57, 0x58,
+ 0x87,
+ },
+ },
+ /*
+ // Disabled: needs 1 GiB RAM and takes too long for a simple test.
+ {
+ "pleaseletmein", "SodiumChloride",
+ 1048576, 8, 1,
+ []byte{
+ 0x21, 0x01, 0xcb, 0x9b, 0x6a, 0x51, 0x1a, 0xae, 0xad,
+ 0xdb, 0xbe, 0x09, 0xcf, 0x70, 0xf8, 0x81, 0xec, 0x56,
+ 0x8d, 0x57, 0x4a, 0x2f, 0xfd, 0x4d, 0xab, 0xe5, 0xee,
+ 0x98, 0x20, 0xad, 0xaa, 0x47, 0x8e, 0x56, 0xfd, 0x8f,
+ 0x4b, 0xa5, 0xd0, 0x9f, 0xfa, 0x1c, 0x6d, 0x92, 0x7c,
+ 0x40, 0xf4, 0xc3, 0x37, 0x30, 0x40, 0x49, 0xe8, 0xa9,
+ 0x52, 0xfb, 0xcb, 0xf4, 0x5c, 0x6f, 0xa7, 0x7a, 0x41,
+ 0xa4,
+ },
+ },
+ */
+}
+
+var bad = []testVector{
+ {"p", "s", 0, 1, 1, nil}, // N == 0
+ {"p", "s", 1, 1, 1, nil}, // N == 1
+ {"p", "s", 7, 8, 1, nil}, // N is not power of 2
+ {"p", "s", 16, maxInt / 2, maxInt / 2, nil}, // p * r too large
+}
+
+func TestKey(t *testing.T) {
+ for i, v := range good {
+ k, err := Key([]byte(v.password), []byte(v.salt), v.N, v.r, v.p, len(v.output))
+ if err != nil {
+ t.Errorf("%d: got unexpected error: %s", i, err)
+ }
+ if !bytes.Equal(k, v.output) {
+ t.Errorf("%d: expected %x, got %x", i, v.output, k)
+ }
+ }
+ for i, v := range bad {
+ _, err := Key([]byte(v.password), []byte(v.salt), v.N, v.r, v.p, 32)
+ if err == nil {
+ t.Errorf("%d: expected error, got nil", i)
+ }
+ }
+}
+
+func BenchmarkKey(b *testing.B) {
+ for i := 0; i < b.N; i++ {
+ Key([]byte("password"), []byte("salt"), 16384, 8, 1, 64)
+ }
+}
diff --git a/src/mongo/gotools/vendor/src/golang.org/x/crypto/sha3/doc.go b/src/mongo/gotools/vendor/src/golang.org/x/crypto/sha3/doc.go
new file mode 100644
index 00000000000..a0ee3ae725d
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/golang.org/x/crypto/sha3/doc.go
@@ -0,0 +1,66 @@
+// Copyright 2014 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// Package sha3 implements the SHA-3 fixed-output-length hash functions and
+// the SHAKE variable-output-length hash functions defined by FIPS-202.
+//
+// Both types of hash function use the "sponge" construction and the Keccak
+// permutation. For a detailed specification see http://keccak.noekeon.org/
+//
+//
+// Guidance
+//
+// If you aren't sure what function you need, use SHAKE256 with at least 64
+// bytes of output. The SHAKE instances are faster than the SHA3 instances;
+// the latter have to allocate memory to conform to the hash.Hash interface.
+//
+// If you need a secret-key MAC (message authentication code), prepend the
+// secret key to the input, hash with SHAKE256 and read at least 32 bytes of
+// output.
+//
+//
+// Security strengths
+//
+// The SHA3-x (x equals 224, 256, 384, or 512) functions have a security
+// strength against preimage attacks of x bits. Since they only produce "x"
+// bits of output, their collision-resistance is only "x/2" bits.
+//
+// The SHAKE-256 and -128 functions have a generic security strength of 256 and
+// 128 bits against all attacks, provided that at least 2x bits of their output
+// is used. Requesting more than 64 or 32 bytes of output, respectively, does
+// not increase the collision-resistance of the SHAKE functions.
+//
+//
+// The sponge construction
+//
+// A sponge builds a pseudo-random function from a public pseudo-random
+// permutation, by applying the permutation to a state of "rate + capacity"
+// bytes, but hiding "capacity" of the bytes.
+//
+// A sponge starts out with a zero state. To hash an input using a sponge, up
+// to "rate" bytes of the input are XORed into the sponge's state. The sponge
+// is then "full" and the permutation is applied to "empty" it. This process is
+// repeated until all the input has been "absorbed". The input is then padded.
+// The digest is "squeezed" from the sponge in the same way, except that output
+// output is copied out instead of input being XORed in.
+//
+// A sponge is parameterized by its generic security strength, which is equal
+// to half its capacity; capacity + rate is equal to the permutation's width.
+// Since the KeccakF-1600 permutation is 1600 bits (200 bytes) wide, this means
+// that the security strength of a sponge instance is equal to (1600 - bitrate) / 2.
+//
+//
+// Recommendations
+//
+// The SHAKE functions are recommended for most new uses. They can produce
+// output of arbitrary length. SHAKE256, with an output length of at least
+// 64 bytes, provides 256-bit security against all attacks. The Keccak team
+// recommends it for most applications upgrading from SHA2-512. (NIST chose a
+// much stronger, but much slower, sponge instance for SHA3-512.)
+//
+// The SHA-3 functions are "drop-in" replacements for the SHA-2 functions.
+// They produce output of the same length, with the same security strengths
+// against all attacks. This means, in particular, that SHA3-256 only has
+// 128-bit collision resistance, because its output length is 32 bytes.
+package sha3 // import "golang.org/x/crypto/sha3"
diff --git a/src/mongo/gotools/vendor/src/golang.org/x/crypto/sha3/hashes.go b/src/mongo/gotools/vendor/src/golang.org/x/crypto/sha3/hashes.go
new file mode 100644
index 00000000000..2b51cf4e9b4
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/golang.org/x/crypto/sha3/hashes.go
@@ -0,0 +1,65 @@
+// Copyright 2014 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package sha3
+
+// This file provides functions for creating instances of the SHA-3
+// and SHAKE hash functions, as well as utility functions for hashing
+// bytes.
+
+import (
+ "hash"
+)
+
+// New224 creates a new SHA3-224 hash.
+// Its generic security strength is 224 bits against preimage attacks,
+// and 112 bits against collision attacks.
+func New224() hash.Hash { return &state{rate: 144, outputLen: 28, dsbyte: 0x06} }
+
+// New256 creates a new SHA3-256 hash.
+// Its generic security strength is 256 bits against preimage attacks,
+// and 128 bits against collision attacks.
+func New256() hash.Hash { return &state{rate: 136, outputLen: 32, dsbyte: 0x06} }
+
+// New384 creates a new SHA3-384 hash.
+// Its generic security strength is 384 bits against preimage attacks,
+// and 192 bits against collision attacks.
+func New384() hash.Hash { return &state{rate: 104, outputLen: 48, dsbyte: 0x06} }
+
+// New512 creates a new SHA3-512 hash.
+// Its generic security strength is 512 bits against preimage attacks,
+// and 256 bits against collision attacks.
+func New512() hash.Hash { return &state{rate: 72, outputLen: 64, dsbyte: 0x06} }
+
+// Sum224 returns the SHA3-224 digest of the data.
+func Sum224(data []byte) (digest [28]byte) {
+ h := New224()
+ h.Write(data)
+ h.Sum(digest[:0])
+ return
+}
+
+// Sum256 returns the SHA3-256 digest of the data.
+func Sum256(data []byte) (digest [32]byte) {
+ h := New256()
+ h.Write(data)
+ h.Sum(digest[:0])
+ return
+}
+
+// Sum384 returns the SHA3-384 digest of the data.
+func Sum384(data []byte) (digest [48]byte) {
+ h := New384()
+ h.Write(data)
+ h.Sum(digest[:0])
+ return
+}
+
+// Sum512 returns the SHA3-512 digest of the data.
+func Sum512(data []byte) (digest [64]byte) {
+ h := New512()
+ h.Write(data)
+ h.Sum(digest[:0])
+ return
+}
diff --git a/src/mongo/gotools/vendor/src/golang.org/x/crypto/sha3/keccakf.go b/src/mongo/gotools/vendor/src/golang.org/x/crypto/sha3/keccakf.go
new file mode 100644
index 00000000000..13e7058fa98
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/golang.org/x/crypto/sha3/keccakf.go
@@ -0,0 +1,410 @@
+// Copyright 2014 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package sha3
+
+// rc stores the round constants for use in the ι step.
+var rc = [24]uint64{
+ 0x0000000000000001,
+ 0x0000000000008082,
+ 0x800000000000808A,
+ 0x8000000080008000,
+ 0x000000000000808B,
+ 0x0000000080000001,
+ 0x8000000080008081,
+ 0x8000000000008009,
+ 0x000000000000008A,
+ 0x0000000000000088,
+ 0x0000000080008009,
+ 0x000000008000000A,
+ 0x000000008000808B,
+ 0x800000000000008B,
+ 0x8000000000008089,
+ 0x8000000000008003,
+ 0x8000000000008002,
+ 0x8000000000000080,
+ 0x000000000000800A,
+ 0x800000008000000A,
+ 0x8000000080008081,
+ 0x8000000000008080,
+ 0x0000000080000001,
+ 0x8000000080008008,
+}
+
+// keccakF1600 applies the Keccak permutation to a 1600b-wide
+// state represented as a slice of 25 uint64s.
+func keccakF1600(a *[25]uint64) {
+ // Implementation translated from Keccak-inplace.c
+ // in the keccak reference code.
+ var t, bc0, bc1, bc2, bc3, bc4, d0, d1, d2, d3, d4 uint64
+
+ for i := 0; i < 24; i += 4 {
+ // Combines the 5 steps in each round into 2 steps.
+ // Unrolls 4 rounds per loop and spreads some steps across rounds.
+
+ // Round 1
+ bc0 = a[0] ^ a[5] ^ a[10] ^ a[15] ^ a[20]
+ bc1 = a[1] ^ a[6] ^ a[11] ^ a[16] ^ a[21]
+ bc2 = a[2] ^ a[7] ^ a[12] ^ a[17] ^ a[22]
+ bc3 = a[3] ^ a[8] ^ a[13] ^ a[18] ^ a[23]
+ bc4 = a[4] ^ a[9] ^ a[14] ^ a[19] ^ a[24]
+ d0 = bc4 ^ (bc1<<1 | bc1>>63)
+ d1 = bc0 ^ (bc2<<1 | bc2>>63)
+ d2 = bc1 ^ (bc3<<1 | bc3>>63)
+ d3 = bc2 ^ (bc4<<1 | bc4>>63)
+ d4 = bc3 ^ (bc0<<1 | bc0>>63)
+
+ bc0 = a[0] ^ d0
+ t = a[6] ^ d1
+ bc1 = t<<44 | t>>(64-44)
+ t = a[12] ^ d2
+ bc2 = t<<43 | t>>(64-43)
+ t = a[18] ^ d3
+ bc3 = t<<21 | t>>(64-21)
+ t = a[24] ^ d4
+ bc4 = t<<14 | t>>(64-14)
+ a[0] = bc0 ^ (bc2 &^ bc1) ^ rc[i]
+ a[6] = bc1 ^ (bc3 &^ bc2)
+ a[12] = bc2 ^ (bc4 &^ bc3)
+ a[18] = bc3 ^ (bc0 &^ bc4)
+ a[24] = bc4 ^ (bc1 &^ bc0)
+
+ t = a[10] ^ d0
+ bc2 = t<<3 | t>>(64-3)
+ t = a[16] ^ d1
+ bc3 = t<<45 | t>>(64-45)
+ t = a[22] ^ d2
+ bc4 = t<<61 | t>>(64-61)
+ t = a[3] ^ d3
+ bc0 = t<<28 | t>>(64-28)
+ t = a[9] ^ d4
+ bc1 = t<<20 | t>>(64-20)
+ a[10] = bc0 ^ (bc2 &^ bc1)
+ a[16] = bc1 ^ (bc3 &^ bc2)
+ a[22] = bc2 ^ (bc4 &^ bc3)
+ a[3] = bc3 ^ (bc0 &^ bc4)
+ a[9] = bc4 ^ (bc1 &^ bc0)
+
+ t = a[20] ^ d0
+ bc4 = t<<18 | t>>(64-18)
+ t = a[1] ^ d1
+ bc0 = t<<1 | t>>(64-1)
+ t = a[7] ^ d2
+ bc1 = t<<6 | t>>(64-6)
+ t = a[13] ^ d3
+ bc2 = t<<25 | t>>(64-25)
+ t = a[19] ^ d4
+ bc3 = t<<8 | t>>(64-8)
+ a[20] = bc0 ^ (bc2 &^ bc1)
+ a[1] = bc1 ^ (bc3 &^ bc2)
+ a[7] = bc2 ^ (bc4 &^ bc3)
+ a[13] = bc3 ^ (bc0 &^ bc4)
+ a[19] = bc4 ^ (bc1 &^ bc0)
+
+ t = a[5] ^ d0
+ bc1 = t<<36 | t>>(64-36)
+ t = a[11] ^ d1
+ bc2 = t<<10 | t>>(64-10)
+ t = a[17] ^ d2
+ bc3 = t<<15 | t>>(64-15)
+ t = a[23] ^ d3
+ bc4 = t<<56 | t>>(64-56)
+ t = a[4] ^ d4
+ bc0 = t<<27 | t>>(64-27)
+ a[5] = bc0 ^ (bc2 &^ bc1)
+ a[11] = bc1 ^ (bc3 &^ bc2)
+ a[17] = bc2 ^ (bc4 &^ bc3)
+ a[23] = bc3 ^ (bc0 &^ bc4)
+ a[4] = bc4 ^ (bc1 &^ bc0)
+
+ t = a[15] ^ d0
+ bc3 = t<<41 | t>>(64-41)
+ t = a[21] ^ d1
+ bc4 = t<<2 | t>>(64-2)
+ t = a[2] ^ d2
+ bc0 = t<<62 | t>>(64-62)
+ t = a[8] ^ d3
+ bc1 = t<<55 | t>>(64-55)
+ t = a[14] ^ d4
+ bc2 = t<<39 | t>>(64-39)
+ a[15] = bc0 ^ (bc2 &^ bc1)
+ a[21] = bc1 ^ (bc3 &^ bc2)
+ a[2] = bc2 ^ (bc4 &^ bc3)
+ a[8] = bc3 ^ (bc0 &^ bc4)
+ a[14] = bc4 ^ (bc1 &^ bc0)
+
+ // Round 2
+ bc0 = a[0] ^ a[5] ^ a[10] ^ a[15] ^ a[20]
+ bc1 = a[1] ^ a[6] ^ a[11] ^ a[16] ^ a[21]
+ bc2 = a[2] ^ a[7] ^ a[12] ^ a[17] ^ a[22]
+ bc3 = a[3] ^ a[8] ^ a[13] ^ a[18] ^ a[23]
+ bc4 = a[4] ^ a[9] ^ a[14] ^ a[19] ^ a[24]
+ d0 = bc4 ^ (bc1<<1 | bc1>>63)
+ d1 = bc0 ^ (bc2<<1 | bc2>>63)
+ d2 = bc1 ^ (bc3<<1 | bc3>>63)
+ d3 = bc2 ^ (bc4<<1 | bc4>>63)
+ d4 = bc3 ^ (bc0<<1 | bc0>>63)
+
+ bc0 = a[0] ^ d0
+ t = a[16] ^ d1
+ bc1 = t<<44 | t>>(64-44)
+ t = a[7] ^ d2
+ bc2 = t<<43 | t>>(64-43)
+ t = a[23] ^ d3
+ bc3 = t<<21 | t>>(64-21)
+ t = a[14] ^ d4
+ bc4 = t<<14 | t>>(64-14)
+ a[0] = bc0 ^ (bc2 &^ bc1) ^ rc[i+1]
+ a[16] = bc1 ^ (bc3 &^ bc2)
+ a[7] = bc2 ^ (bc4 &^ bc3)
+ a[23] = bc3 ^ (bc0 &^ bc4)
+ a[14] = bc4 ^ (bc1 &^ bc0)
+
+ t = a[20] ^ d0
+ bc2 = t<<3 | t>>(64-3)
+ t = a[11] ^ d1
+ bc3 = t<<45 | t>>(64-45)
+ t = a[2] ^ d2
+ bc4 = t<<61 | t>>(64-61)
+ t = a[18] ^ d3
+ bc0 = t<<28 | t>>(64-28)
+ t = a[9] ^ d4
+ bc1 = t<<20 | t>>(64-20)
+ a[20] = bc0 ^ (bc2 &^ bc1)
+ a[11] = bc1 ^ (bc3 &^ bc2)
+ a[2] = bc2 ^ (bc4 &^ bc3)
+ a[18] = bc3 ^ (bc0 &^ bc4)
+ a[9] = bc4 ^ (bc1 &^ bc0)
+
+ t = a[15] ^ d0
+ bc4 = t<<18 | t>>(64-18)
+ t = a[6] ^ d1
+ bc0 = t<<1 | t>>(64-1)
+ t = a[22] ^ d2
+ bc1 = t<<6 | t>>(64-6)
+ t = a[13] ^ d3
+ bc2 = t<<25 | t>>(64-25)
+ t = a[4] ^ d4
+ bc3 = t<<8 | t>>(64-8)
+ a[15] = bc0 ^ (bc2 &^ bc1)
+ a[6] = bc1 ^ (bc3 &^ bc2)
+ a[22] = bc2 ^ (bc4 &^ bc3)
+ a[13] = bc3 ^ (bc0 &^ bc4)
+ a[4] = bc4 ^ (bc1 &^ bc0)
+
+ t = a[10] ^ d0
+ bc1 = t<<36 | t>>(64-36)
+ t = a[1] ^ d1
+ bc2 = t<<10 | t>>(64-10)
+ t = a[17] ^ d2
+ bc3 = t<<15 | t>>(64-15)
+ t = a[8] ^ d3
+ bc4 = t<<56 | t>>(64-56)
+ t = a[24] ^ d4
+ bc0 = t<<27 | t>>(64-27)
+ a[10] = bc0 ^ (bc2 &^ bc1)
+ a[1] = bc1 ^ (bc3 &^ bc2)
+ a[17] = bc2 ^ (bc4 &^ bc3)
+ a[8] = bc3 ^ (bc0 &^ bc4)
+ a[24] = bc4 ^ (bc1 &^ bc0)
+
+ t = a[5] ^ d0
+ bc3 = t<<41 | t>>(64-41)
+ t = a[21] ^ d1
+ bc4 = t<<2 | t>>(64-2)
+ t = a[12] ^ d2
+ bc0 = t<<62 | t>>(64-62)
+ t = a[3] ^ d3
+ bc1 = t<<55 | t>>(64-55)
+ t = a[19] ^ d4
+ bc2 = t<<39 | t>>(64-39)
+ a[5] = bc0 ^ (bc2 &^ bc1)
+ a[21] = bc1 ^ (bc3 &^ bc2)
+ a[12] = bc2 ^ (bc4 &^ bc3)
+ a[3] = bc3 ^ (bc0 &^ bc4)
+ a[19] = bc4 ^ (bc1 &^ bc0)
+
+ // Round 3
+ bc0 = a[0] ^ a[5] ^ a[10] ^ a[15] ^ a[20]
+ bc1 = a[1] ^ a[6] ^ a[11] ^ a[16] ^ a[21]
+ bc2 = a[2] ^ a[7] ^ a[12] ^ a[17] ^ a[22]
+ bc3 = a[3] ^ a[8] ^ a[13] ^ a[18] ^ a[23]
+ bc4 = a[4] ^ a[9] ^ a[14] ^ a[19] ^ a[24]
+ d0 = bc4 ^ (bc1<<1 | bc1>>63)
+ d1 = bc0 ^ (bc2<<1 | bc2>>63)
+ d2 = bc1 ^ (bc3<<1 | bc3>>63)
+ d3 = bc2 ^ (bc4<<1 | bc4>>63)
+ d4 = bc3 ^ (bc0<<1 | bc0>>63)
+
+ bc0 = a[0] ^ d0
+ t = a[11] ^ d1
+ bc1 = t<<44 | t>>(64-44)
+ t = a[22] ^ d2
+ bc2 = t<<43 | t>>(64-43)
+ t = a[8] ^ d3
+ bc3 = t<<21 | t>>(64-21)
+ t = a[19] ^ d4
+ bc4 = t<<14 | t>>(64-14)
+ a[0] = bc0 ^ (bc2 &^ bc1) ^ rc[i+2]
+ a[11] = bc1 ^ (bc3 &^ bc2)
+ a[22] = bc2 ^ (bc4 &^ bc3)
+ a[8] = bc3 ^ (bc0 &^ bc4)
+ a[19] = bc4 ^ (bc1 &^ bc0)
+
+ t = a[15] ^ d0
+ bc2 = t<<3 | t>>(64-3)
+ t = a[1] ^ d1
+ bc3 = t<<45 | t>>(64-45)
+ t = a[12] ^ d2
+ bc4 = t<<61 | t>>(64-61)
+ t = a[23] ^ d3
+ bc0 = t<<28 | t>>(64-28)
+ t = a[9] ^ d4
+ bc1 = t<<20 | t>>(64-20)
+ a[15] = bc0 ^ (bc2 &^ bc1)
+ a[1] = bc1 ^ (bc3 &^ bc2)
+ a[12] = bc2 ^ (bc4 &^ bc3)
+ a[23] = bc3 ^ (bc0 &^ bc4)
+ a[9] = bc4 ^ (bc1 &^ bc0)
+
+ t = a[5] ^ d0
+ bc4 = t<<18 | t>>(64-18)
+ t = a[16] ^ d1
+ bc0 = t<<1 | t>>(64-1)
+ t = a[2] ^ d2
+ bc1 = t<<6 | t>>(64-6)
+ t = a[13] ^ d3
+ bc2 = t<<25 | t>>(64-25)
+ t = a[24] ^ d4
+ bc3 = t<<8 | t>>(64-8)
+ a[5] = bc0 ^ (bc2 &^ bc1)
+ a[16] = bc1 ^ (bc3 &^ bc2)
+ a[2] = bc2 ^ (bc4 &^ bc3)
+ a[13] = bc3 ^ (bc0 &^ bc4)
+ a[24] = bc4 ^ (bc1 &^ bc0)
+
+ t = a[20] ^ d0
+ bc1 = t<<36 | t>>(64-36)
+ t = a[6] ^ d1
+ bc2 = t<<10 | t>>(64-10)
+ t = a[17] ^ d2
+ bc3 = t<<15 | t>>(64-15)
+ t = a[3] ^ d3
+ bc4 = t<<56 | t>>(64-56)
+ t = a[14] ^ d4
+ bc0 = t<<27 | t>>(64-27)
+ a[20] = bc0 ^ (bc2 &^ bc1)
+ a[6] = bc1 ^ (bc3 &^ bc2)
+ a[17] = bc2 ^ (bc4 &^ bc3)
+ a[3] = bc3 ^ (bc0 &^ bc4)
+ a[14] = bc4 ^ (bc1 &^ bc0)
+
+ t = a[10] ^ d0
+ bc3 = t<<41 | t>>(64-41)
+ t = a[21] ^ d1
+ bc4 = t<<2 | t>>(64-2)
+ t = a[7] ^ d2
+ bc0 = t<<62 | t>>(64-62)
+ t = a[18] ^ d3
+ bc1 = t<<55 | t>>(64-55)
+ t = a[4] ^ d4
+ bc2 = t<<39 | t>>(64-39)
+ a[10] = bc0 ^ (bc2 &^ bc1)
+ a[21] = bc1 ^ (bc3 &^ bc2)
+ a[7] = bc2 ^ (bc4 &^ bc3)
+ a[18] = bc3 ^ (bc0 &^ bc4)
+ a[4] = bc4 ^ (bc1 &^ bc0)
+
+ // Round 4
+ bc0 = a[0] ^ a[5] ^ a[10] ^ a[15] ^ a[20]
+ bc1 = a[1] ^ a[6] ^ a[11] ^ a[16] ^ a[21]
+ bc2 = a[2] ^ a[7] ^ a[12] ^ a[17] ^ a[22]
+ bc3 = a[3] ^ a[8] ^ a[13] ^ a[18] ^ a[23]
+ bc4 = a[4] ^ a[9] ^ a[14] ^ a[19] ^ a[24]
+ d0 = bc4 ^ (bc1<<1 | bc1>>63)
+ d1 = bc0 ^ (bc2<<1 | bc2>>63)
+ d2 = bc1 ^ (bc3<<1 | bc3>>63)
+ d3 = bc2 ^ (bc4<<1 | bc4>>63)
+ d4 = bc3 ^ (bc0<<1 | bc0>>63)
+
+ bc0 = a[0] ^ d0
+ t = a[1] ^ d1
+ bc1 = t<<44 | t>>(64-44)
+ t = a[2] ^ d2
+ bc2 = t<<43 | t>>(64-43)
+ t = a[3] ^ d3
+ bc3 = t<<21 | t>>(64-21)
+ t = a[4] ^ d4
+ bc4 = t<<14 | t>>(64-14)
+ a[0] = bc0 ^ (bc2 &^ bc1) ^ rc[i+3]
+ a[1] = bc1 ^ (bc3 &^ bc2)
+ a[2] = bc2 ^ (bc4 &^ bc3)
+ a[3] = bc3 ^ (bc0 &^ bc4)
+ a[4] = bc4 ^ (bc1 &^ bc0)
+
+ t = a[5] ^ d0
+ bc2 = t<<3 | t>>(64-3)
+ t = a[6] ^ d1
+ bc3 = t<<45 | t>>(64-45)
+ t = a[7] ^ d2
+ bc4 = t<<61 | t>>(64-61)
+ t = a[8] ^ d3
+ bc0 = t<<28 | t>>(64-28)
+ t = a[9] ^ d4
+ bc1 = t<<20 | t>>(64-20)
+ a[5] = bc0 ^ (bc2 &^ bc1)
+ a[6] = bc1 ^ (bc3 &^ bc2)
+ a[7] = bc2 ^ (bc4 &^ bc3)
+ a[8] = bc3 ^ (bc0 &^ bc4)
+ a[9] = bc4 ^ (bc1 &^ bc0)
+
+ t = a[10] ^ d0
+ bc4 = t<<18 | t>>(64-18)
+ t = a[11] ^ d1
+ bc0 = t<<1 | t>>(64-1)
+ t = a[12] ^ d2
+ bc1 = t<<6 | t>>(64-6)
+ t = a[13] ^ d3
+ bc2 = t<<25 | t>>(64-25)
+ t = a[14] ^ d4
+ bc3 = t<<8 | t>>(64-8)
+ a[10] = bc0 ^ (bc2 &^ bc1)
+ a[11] = bc1 ^ (bc3 &^ bc2)
+ a[12] = bc2 ^ (bc4 &^ bc3)
+ a[13] = bc3 ^ (bc0 &^ bc4)
+ a[14] = bc4 ^ (bc1 &^ bc0)
+
+ t = a[15] ^ d0
+ bc1 = t<<36 | t>>(64-36)
+ t = a[16] ^ d1
+ bc2 = t<<10 | t>>(64-10)
+ t = a[17] ^ d2
+ bc3 = t<<15 | t>>(64-15)
+ t = a[18] ^ d3
+ bc4 = t<<56 | t>>(64-56)
+ t = a[19] ^ d4
+ bc0 = t<<27 | t>>(64-27)
+ a[15] = bc0 ^ (bc2 &^ bc1)
+ a[16] = bc1 ^ (bc3 &^ bc2)
+ a[17] = bc2 ^ (bc4 &^ bc3)
+ a[18] = bc3 ^ (bc0 &^ bc4)
+ a[19] = bc4 ^ (bc1 &^ bc0)
+
+ t = a[20] ^ d0
+ bc3 = t<<41 | t>>(64-41)
+ t = a[21] ^ d1
+ bc4 = t<<2 | t>>(64-2)
+ t = a[22] ^ d2
+ bc0 = t<<62 | t>>(64-62)
+ t = a[23] ^ d3
+ bc1 = t<<55 | t>>(64-55)
+ t = a[24] ^ d4
+ bc2 = t<<39 | t>>(64-39)
+ a[20] = bc0 ^ (bc2 &^ bc1)
+ a[21] = bc1 ^ (bc3 &^ bc2)
+ a[22] = bc2 ^ (bc4 &^ bc3)
+ a[23] = bc3 ^ (bc0 &^ bc4)
+ a[24] = bc4 ^ (bc1 &^ bc0)
+ }
+}
diff --git a/src/mongo/gotools/vendor/src/golang.org/x/crypto/sha3/register.go b/src/mongo/gotools/vendor/src/golang.org/x/crypto/sha3/register.go
new file mode 100644
index 00000000000..3cf6a22e093
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/golang.org/x/crypto/sha3/register.go
@@ -0,0 +1,18 @@
+// Copyright 2014 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// +build go1.4
+
+package sha3
+
+import (
+ "crypto"
+)
+
+func init() {
+ crypto.RegisterHash(crypto.SHA3_224, New224)
+ crypto.RegisterHash(crypto.SHA3_256, New256)
+ crypto.RegisterHash(crypto.SHA3_384, New384)
+ crypto.RegisterHash(crypto.SHA3_512, New512)
+}
diff --git a/src/mongo/gotools/vendor/src/golang.org/x/crypto/sha3/sha3.go b/src/mongo/gotools/vendor/src/golang.org/x/crypto/sha3/sha3.go
new file mode 100644
index 00000000000..c8fd31cb0ba
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/golang.org/x/crypto/sha3/sha3.go
@@ -0,0 +1,193 @@
+// Copyright 2014 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package sha3
+
+// spongeDirection indicates the direction bytes are flowing through the sponge.
+type spongeDirection int
+
+const (
+ // spongeAbsorbing indicates that the sponge is absorbing input.
+ spongeAbsorbing spongeDirection = iota
+ // spongeSqueezing indicates that the sponge is being squeezed.
+ spongeSqueezing
+)
+
+const (
+ // maxRate is the maximum size of the internal buffer. SHAKE-256
+ // currently needs the largest buffer.
+ maxRate = 168
+)
+
+type state struct {
+ // Generic sponge components.
+ a [25]uint64 // main state of the hash
+ buf []byte // points into storage
+ rate int // the number of bytes of state to use
+
+ // dsbyte contains the "domain separation" bits and the first bit of
+ // the padding. Sections 6.1 and 6.2 of [1] separate the outputs of the
+ // SHA-3 and SHAKE functions by appending bitstrings to the message.
+ // Using a little-endian bit-ordering convention, these are "01" for SHA-3
+ // and "1111" for SHAKE, or 00000010b and 00001111b, respectively. Then the
+ // padding rule from section 5.1 is applied to pad the message to a multiple
+ // of the rate, which involves adding a "1" bit, zero or more "0" bits, and
+ // a final "1" bit. We merge the first "1" bit from the padding into dsbyte,
+ // giving 00000110b (0x06) and 00011111b (0x1f).
+ // [1] http://csrc.nist.gov/publications/drafts/fips-202/fips_202_draft.pdf
+ // "Draft FIPS 202: SHA-3 Standard: Permutation-Based Hash and
+ // Extendable-Output Functions (May 2014)"
+ dsbyte byte
+ storage [maxRate]byte
+
+ // Specific to SHA-3 and SHAKE.
+ fixedOutput bool // whether this is a fixed-ouput-length instance
+ outputLen int // the default output size in bytes
+ state spongeDirection // whether the sponge is absorbing or squeezing
+}
+
+// BlockSize returns the rate of sponge underlying this hash function.
+func (d *state) BlockSize() int { return d.rate }
+
+// Size returns the output size of the hash function in bytes.
+func (d *state) Size() int { return d.outputLen }
+
+// Reset clears the internal state by zeroing the sponge state and
+// the byte buffer, and setting Sponge.state to absorbing.
+func (d *state) Reset() {
+ // Zero the permutation's state.
+ for i := range d.a {
+ d.a[i] = 0
+ }
+ d.state = spongeAbsorbing
+ d.buf = d.storage[:0]
+}
+
+func (d *state) clone() *state {
+ ret := *d
+ if ret.state == spongeAbsorbing {
+ ret.buf = ret.storage[:len(ret.buf)]
+ } else {
+ ret.buf = ret.storage[d.rate-cap(d.buf) : d.rate]
+ }
+
+ return &ret
+}
+
+// permute applies the KeccakF-1600 permutation. It handles
+// any input-output buffering.
+func (d *state) permute() {
+ switch d.state {
+ case spongeAbsorbing:
+ // If we're absorbing, we need to xor the input into the state
+ // before applying the permutation.
+ xorIn(d, d.buf)
+ d.buf = d.storage[:0]
+ keccakF1600(&d.a)
+ case spongeSqueezing:
+ // If we're squeezing, we need to apply the permutatin before
+ // copying more output.
+ keccakF1600(&d.a)
+ d.buf = d.storage[:d.rate]
+ copyOut(d, d.buf)
+ }
+}
+
+// pads appends the domain separation bits in dsbyte, applies
+// the multi-bitrate 10..1 padding rule, and permutes the state.
+func (d *state) padAndPermute(dsbyte byte) {
+ if d.buf == nil {
+ d.buf = d.storage[:0]
+ }
+ // Pad with this instance's domain-separator bits. We know that there's
+ // at least one byte of space in d.buf because, if it were full,
+ // permute would have been called to empty it. dsbyte also contains the
+ // first one bit for the padding. See the comment in the state struct.
+ d.buf = append(d.buf, dsbyte)
+ zerosStart := len(d.buf)
+ d.buf = d.storage[:d.rate]
+ for i := zerosStart; i < d.rate; i++ {
+ d.buf[i] = 0
+ }
+ // This adds the final one bit for the padding. Because of the way that
+ // bits are numbered from the LSB upwards, the final bit is the MSB of
+ // the last byte.
+ d.buf[d.rate-1] ^= 0x80
+ // Apply the permutation
+ d.permute()
+ d.state = spongeSqueezing
+ d.buf = d.storage[:d.rate]
+ copyOut(d, d.buf)
+}
+
+// Write absorbs more data into the hash's state. It produces an error
+// if more data is written to the ShakeHash after writing
+func (d *state) Write(p []byte) (written int, err error) {
+ if d.state != spongeAbsorbing {
+ panic("sha3: write to sponge after read")
+ }
+ if d.buf == nil {
+ d.buf = d.storage[:0]
+ }
+ written = len(p)
+
+ for len(p) > 0 {
+ if len(d.buf) == 0 && len(p) >= d.rate {
+ // The fast path; absorb a full "rate" bytes of input and apply the permutation.
+ xorIn(d, p[:d.rate])
+ p = p[d.rate:]
+ keccakF1600(&d.a)
+ } else {
+ // The slow path; buffer the input until we can fill the sponge, and then xor it in.
+ todo := d.rate - len(d.buf)
+ if todo > len(p) {
+ todo = len(p)
+ }
+ d.buf = append(d.buf, p[:todo]...)
+ p = p[todo:]
+
+ // If the sponge is full, apply the permutation.
+ if len(d.buf) == d.rate {
+ d.permute()
+ }
+ }
+ }
+
+ return
+}
+
+// Read squeezes an arbitrary number of bytes from the sponge.
+func (d *state) Read(out []byte) (n int, err error) {
+ // If we're still absorbing, pad and apply the permutation.
+ if d.state == spongeAbsorbing {
+ d.padAndPermute(d.dsbyte)
+ }
+
+ n = len(out)
+
+ // Now, do the squeezing.
+ for len(out) > 0 {
+ n := copy(out, d.buf)
+ d.buf = d.buf[n:]
+ out = out[n:]
+
+ // Apply the permutation if we've squeezed the sponge dry.
+ if len(d.buf) == 0 {
+ d.permute()
+ }
+ }
+
+ return
+}
+
+// Sum applies padding to the hash state and then squeezes out the desired
+// number of output bytes.
+func (d *state) Sum(in []byte) []byte {
+ // Make a copy of the original hash so that caller can keep writing
+ // and summing.
+ dup := d.clone()
+ hash := make([]byte, dup.outputLen)
+ dup.Read(hash)
+ return append(in, hash...)
+}
diff --git a/src/mongo/gotools/vendor/src/golang.org/x/crypto/sha3/sha3_test.go b/src/mongo/gotools/vendor/src/golang.org/x/crypto/sha3/sha3_test.go
new file mode 100644
index 00000000000..caf72f279f1
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/golang.org/x/crypto/sha3/sha3_test.go
@@ -0,0 +1,306 @@
+// Copyright 2014 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package sha3
+
+// Tests include all the ShortMsgKATs provided by the Keccak team at
+// https://github.com/gvanas/KeccakCodePackage
+//
+// They only include the zero-bit case of the bitwise testvectors
+// published by NIST in the draft of FIPS-202.
+
+import (
+ "bytes"
+ "compress/flate"
+ "encoding/hex"
+ "encoding/json"
+ "hash"
+ "os"
+ "strings"
+ "testing"
+)
+
+const (
+ testString = "brekeccakkeccak koax koax"
+ katFilename = "testdata/keccakKats.json.deflate"
+)
+
+// Internal-use instances of SHAKE used to test against KATs.
+func newHashShake128() hash.Hash {
+ return &state{rate: 168, dsbyte: 0x1f, outputLen: 512}
+}
+func newHashShake256() hash.Hash {
+ return &state{rate: 136, dsbyte: 0x1f, outputLen: 512}
+}
+
+// testDigests contains functions returning hash.Hash instances
+// with output-length equal to the KAT length for both SHA-3 and
+// SHAKE instances.
+var testDigests = map[string]func() hash.Hash{
+ "SHA3-224": New224,
+ "SHA3-256": New256,
+ "SHA3-384": New384,
+ "SHA3-512": New512,
+ "SHAKE128": newHashShake128,
+ "SHAKE256": newHashShake256,
+}
+
+// testShakes contains functions that return ShakeHash instances for
+// testing the ShakeHash-specific interface.
+var testShakes = map[string]func() ShakeHash{
+ "SHAKE128": NewShake128,
+ "SHAKE256": NewShake256,
+}
+
+// decodeHex converts a hex-encoded string into a raw byte string.
+func decodeHex(s string) []byte {
+ b, err := hex.DecodeString(s)
+ if err != nil {
+ panic(err)
+ }
+ return b
+}
+
+// structs used to marshal JSON test-cases.
+type KeccakKats struct {
+ Kats map[string][]struct {
+ Digest string `json:"digest"`
+ Length int64 `json:"length"`
+ Message string `json:"message"`
+ }
+}
+
+func testUnalignedAndGeneric(t *testing.T, testf func(impl string)) {
+ xorInOrig, copyOutOrig := xorIn, copyOut
+ xorIn, copyOut = xorInGeneric, copyOutGeneric
+ testf("generic")
+ if xorImplementationUnaligned != "generic" {
+ xorIn, copyOut = xorInUnaligned, copyOutUnaligned
+ testf("unaligned")
+ }
+ xorIn, copyOut = xorInOrig, copyOutOrig
+}
+
+// TestKeccakKats tests the SHA-3 and Shake implementations against all the
+// ShortMsgKATs from https://github.com/gvanas/KeccakCodePackage
+// (The testvectors are stored in keccakKats.json.deflate due to their length.)
+func TestKeccakKats(t *testing.T) {
+ testUnalignedAndGeneric(t, func(impl string) {
+ // Read the KATs.
+ deflated, err := os.Open(katFilename)
+ if err != nil {
+ t.Errorf("error opening %s: %s", katFilename, err)
+ }
+ file := flate.NewReader(deflated)
+ dec := json.NewDecoder(file)
+ var katSet KeccakKats
+ err = dec.Decode(&katSet)
+ if err != nil {
+ t.Errorf("error decoding KATs: %s", err)
+ }
+
+ // Do the KATs.
+ for functionName, kats := range katSet.Kats {
+ d := testDigests[functionName]()
+ for _, kat := range kats {
+ d.Reset()
+ in, err := hex.DecodeString(kat.Message)
+ if err != nil {
+ t.Errorf("error decoding KAT: %s", err)
+ }
+ d.Write(in[:kat.Length/8])
+ got := strings.ToUpper(hex.EncodeToString(d.Sum(nil)))
+ if got != kat.Digest {
+ t.Errorf("function=%s, implementation=%s, length=%d\nmessage:\n %s\ngot:\n %s\nwanted:\n %s",
+ functionName, impl, kat.Length, kat.Message, got, kat.Digest)
+ t.Logf("wanted %+v", kat)
+ t.FailNow()
+ }
+ continue
+ }
+ }
+ })
+}
+
+// TestUnalignedWrite tests that writing data in an arbitrary pattern with
+// small input buffers.
+func testUnalignedWrite(t *testing.T) {
+ testUnalignedAndGeneric(t, func(impl string) {
+ buf := sequentialBytes(0x10000)
+ for alg, df := range testDigests {
+ d := df()
+ d.Reset()
+ d.Write(buf)
+ want := d.Sum(nil)
+ d.Reset()
+ for i := 0; i < len(buf); {
+ // Cycle through offsets which make a 137 byte sequence.
+ // Because 137 is prime this sequence should exercise all corner cases.
+ offsets := [17]int{1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 1}
+ for _, j := range offsets {
+ if v := len(buf) - i; v < j {
+ j = v
+ }
+ d.Write(buf[i : i+j])
+ i += j
+ }
+ }
+ got := d.Sum(nil)
+ if !bytes.Equal(got, want) {
+ t.Errorf("Unaligned writes, implementation=%s, alg=%s\ngot %q, want %q", impl, alg, got, want)
+ }
+ }
+ })
+}
+
+// TestAppend checks that appending works when reallocation is necessary.
+func TestAppend(t *testing.T) {
+ testUnalignedAndGeneric(t, func(impl string) {
+ d := New224()
+
+ for capacity := 2; capacity <= 66; capacity += 64 {
+ // The first time around the loop, Sum will have to reallocate.
+ // The second time, it will not.
+ buf := make([]byte, 2, capacity)
+ d.Reset()
+ d.Write([]byte{0xcc})
+ buf = d.Sum(buf)
+ expected := "0000DF70ADC49B2E76EEE3A6931B93FA41841C3AF2CDF5B32A18B5478C39"
+ if got := strings.ToUpper(hex.EncodeToString(buf)); got != expected {
+ t.Errorf("got %s, want %s", got, expected)
+ }
+ }
+ })
+}
+
+// TestAppendNoRealloc tests that appending works when no reallocation is necessary.
+func TestAppendNoRealloc(t *testing.T) {
+ testUnalignedAndGeneric(t, func(impl string) {
+ buf := make([]byte, 1, 200)
+ d := New224()
+ d.Write([]byte{0xcc})
+ buf = d.Sum(buf)
+ expected := "00DF70ADC49B2E76EEE3A6931B93FA41841C3AF2CDF5B32A18B5478C39"
+ if got := strings.ToUpper(hex.EncodeToString(buf)); got != expected {
+ t.Errorf("%s: got %s, want %s", impl, got, expected)
+ }
+ })
+}
+
+// TestSqueezing checks that squeezing the full output a single time produces
+// the same output as repeatedly squeezing the instance.
+func TestSqueezing(t *testing.T) {
+ testUnalignedAndGeneric(t, func(impl string) {
+ for functionName, newShakeHash := range testShakes {
+ d0 := newShakeHash()
+ d0.Write([]byte(testString))
+ ref := make([]byte, 32)
+ d0.Read(ref)
+
+ d1 := newShakeHash()
+ d1.Write([]byte(testString))
+ var multiple []byte
+ for _ = range ref {
+ one := make([]byte, 1)
+ d1.Read(one)
+ multiple = append(multiple, one...)
+ }
+ if !bytes.Equal(ref, multiple) {
+ t.Errorf("%s (%s): squeezing %d bytes one at a time failed", functionName, impl, len(ref))
+ }
+ }
+ })
+}
+
+// sequentialBytes produces a buffer of size consecutive bytes 0x00, 0x01, ..., used for testing.
+func sequentialBytes(size int) []byte {
+ result := make([]byte, size)
+ for i := range result {
+ result[i] = byte(i)
+ }
+ return result
+}
+
+// BenchmarkPermutationFunction measures the speed of the permutation function
+// with no input data.
+func BenchmarkPermutationFunction(b *testing.B) {
+ b.SetBytes(int64(200))
+ var lanes [25]uint64
+ for i := 0; i < b.N; i++ {
+ keccakF1600(&lanes)
+ }
+}
+
+// benchmarkHash tests the speed to hash num buffers of buflen each.
+func benchmarkHash(b *testing.B, h hash.Hash, size, num int) {
+ b.StopTimer()
+ h.Reset()
+ data := sequentialBytes(size)
+ b.SetBytes(int64(size * num))
+ b.StartTimer()
+
+ var state []byte
+ for i := 0; i < b.N; i++ {
+ for j := 0; j < num; j++ {
+ h.Write(data)
+ }
+ state = h.Sum(state[:0])
+ }
+ b.StopTimer()
+ h.Reset()
+}
+
+// benchmarkShake is specialized to the Shake instances, which don't
+// require a copy on reading output.
+func benchmarkShake(b *testing.B, h ShakeHash, size, num int) {
+ b.StopTimer()
+ h.Reset()
+ data := sequentialBytes(size)
+ d := make([]byte, 32)
+
+ b.SetBytes(int64(size * num))
+ b.StartTimer()
+
+ for i := 0; i < b.N; i++ {
+ h.Reset()
+ for j := 0; j < num; j++ {
+ h.Write(data)
+ }
+ h.Read(d)
+ }
+}
+
+func BenchmarkSha3_512_MTU(b *testing.B) { benchmarkHash(b, New512(), 1350, 1) }
+func BenchmarkSha3_384_MTU(b *testing.B) { benchmarkHash(b, New384(), 1350, 1) }
+func BenchmarkSha3_256_MTU(b *testing.B) { benchmarkHash(b, New256(), 1350, 1) }
+func BenchmarkSha3_224_MTU(b *testing.B) { benchmarkHash(b, New224(), 1350, 1) }
+
+func BenchmarkShake128_MTU(b *testing.B) { benchmarkShake(b, NewShake128(), 1350, 1) }
+func BenchmarkShake256_MTU(b *testing.B) { benchmarkShake(b, NewShake256(), 1350, 1) }
+func BenchmarkShake256_16x(b *testing.B) { benchmarkShake(b, NewShake256(), 16, 1024) }
+func BenchmarkShake256_1MiB(b *testing.B) { benchmarkShake(b, NewShake256(), 1024, 1024) }
+
+func BenchmarkSha3_512_1MiB(b *testing.B) { benchmarkHash(b, New512(), 1024, 1024) }
+
+func Example_sum() {
+ buf := []byte("some data to hash")
+ // A hash needs to be 64 bytes long to have 256-bit collision resistance.
+ h := make([]byte, 64)
+ // Compute a 64-byte hash of buf and put it in h.
+ ShakeSum256(h, buf)
+}
+
+func Example_mac() {
+ k := []byte("this is a secret key; you should generate a strong random key that's at least 32 bytes long")
+ buf := []byte("and this is some data to authenticate")
+ // A MAC with 32 bytes of output has 256-bit security strength -- if you use at least a 32-byte-long key.
+ h := make([]byte, 32)
+ d := NewShake256()
+ // Write the key into the hash.
+ d.Write(k)
+ // Now write the data.
+ d.Write(buf)
+ // Read 32 bytes of output from the hash into h.
+ d.Read(h)
+}
diff --git a/src/mongo/gotools/vendor/src/golang.org/x/crypto/sha3/shake.go b/src/mongo/gotools/vendor/src/golang.org/x/crypto/sha3/shake.go
new file mode 100644
index 00000000000..841f9860f03
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/golang.org/x/crypto/sha3/shake.go
@@ -0,0 +1,60 @@
+// Copyright 2014 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package sha3
+
+// This file defines the ShakeHash interface, and provides
+// functions for creating SHAKE instances, as well as utility
+// functions for hashing bytes to arbitrary-length output.
+
+import (
+ "io"
+)
+
+// ShakeHash defines the interface to hash functions that
+// support arbitrary-length output.
+type ShakeHash interface {
+ // Write absorbs more data into the hash's state. It panics if input is
+ // written to it after output has been read from it.
+ io.Writer
+
+ // Read reads more output from the hash; reading affects the hash's
+ // state. (ShakeHash.Read is thus very different from Hash.Sum)
+ // It never returns an error.
+ io.Reader
+
+ // Clone returns a copy of the ShakeHash in its current state.
+ Clone() ShakeHash
+
+ // Reset resets the ShakeHash to its initial state.
+ Reset()
+}
+
+func (d *state) Clone() ShakeHash {
+ return d.clone()
+}
+
+// NewShake128 creates a new SHAKE128 variable-output-length ShakeHash.
+// Its generic security strength is 128 bits against all attacks if at
+// least 32 bytes of its output are used.
+func NewShake128() ShakeHash { return &state{rate: 168, dsbyte: 0x1f} }
+
+// NewShake256 creates a new SHAKE128 variable-output-length ShakeHash.
+// Its generic security strength is 256 bits against all attacks if
+// at least 64 bytes of its output are used.
+func NewShake256() ShakeHash { return &state{rate: 136, dsbyte: 0x1f} }
+
+// ShakeSum128 writes an arbitrary-length digest of data into hash.
+func ShakeSum128(hash, data []byte) {
+ h := NewShake128()
+ h.Write(data)
+ h.Read(hash)
+}
+
+// ShakeSum256 writes an arbitrary-length digest of data into hash.
+func ShakeSum256(hash, data []byte) {
+ h := NewShake256()
+ h.Write(data)
+ h.Read(hash)
+}
diff --git a/src/mongo/gotools/vendor/src/golang.org/x/crypto/sha3/testdata/keccakKats.json.deflate b/src/mongo/gotools/vendor/src/golang.org/x/crypto/sha3/testdata/keccakKats.json.deflate
new file mode 100644
index 00000000000..62e85ae2423
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/golang.org/x/crypto/sha3/testdata/keccakKats.json.deflate
Binary files differ
diff --git a/src/mongo/gotools/vendor/src/golang.org/x/crypto/sha3/xor.go b/src/mongo/gotools/vendor/src/golang.org/x/crypto/sha3/xor.go
new file mode 100644
index 00000000000..d622979c115
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/golang.org/x/crypto/sha3/xor.go
@@ -0,0 +1,16 @@
+// Copyright 2015 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// +build !amd64,!386 appengine
+
+package sha3
+
+var (
+ xorIn = xorInGeneric
+ copyOut = copyOutGeneric
+ xorInUnaligned = xorInGeneric
+ copyOutUnaligned = copyOutGeneric
+)
+
+const xorImplementationUnaligned = "generic"
diff --git a/src/mongo/gotools/vendor/src/golang.org/x/crypto/sha3/xor_generic.go b/src/mongo/gotools/vendor/src/golang.org/x/crypto/sha3/xor_generic.go
new file mode 100644
index 00000000000..fd35f02ef6e
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/golang.org/x/crypto/sha3/xor_generic.go
@@ -0,0 +1,28 @@
+// Copyright 2015 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package sha3
+
+import "encoding/binary"
+
+// xorInGeneric xors the bytes in buf into the state; it
+// makes no non-portable assumptions about memory layout
+// or alignment.
+func xorInGeneric(d *state, buf []byte) {
+ n := len(buf) / 8
+
+ for i := 0; i < n; i++ {
+ a := binary.LittleEndian.Uint64(buf)
+ d.a[i] ^= a
+ buf = buf[8:]
+ }
+}
+
+// copyOutGeneric copies ulint64s to a byte buffer.
+func copyOutGeneric(d *state, b []byte) {
+ for i := 0; len(b) >= 8; i++ {
+ binary.LittleEndian.PutUint64(b, d.a[i])
+ b = b[8:]
+ }
+}
diff --git a/src/mongo/gotools/vendor/src/golang.org/x/crypto/sha3/xor_unaligned.go b/src/mongo/gotools/vendor/src/golang.org/x/crypto/sha3/xor_unaligned.go
new file mode 100644
index 00000000000..c7851a1d850
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/golang.org/x/crypto/sha3/xor_unaligned.go
@@ -0,0 +1,58 @@
+// Copyright 2015 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// +build amd64 386
+// +build !appengine
+
+package sha3
+
+import "unsafe"
+
+func xorInUnaligned(d *state, buf []byte) {
+ bw := (*[maxRate / 8]uint64)(unsafe.Pointer(&buf[0]))
+ n := len(buf)
+ if n >= 72 {
+ d.a[0] ^= bw[0]
+ d.a[1] ^= bw[1]
+ d.a[2] ^= bw[2]
+ d.a[3] ^= bw[3]
+ d.a[4] ^= bw[4]
+ d.a[5] ^= bw[5]
+ d.a[6] ^= bw[6]
+ d.a[7] ^= bw[7]
+ d.a[8] ^= bw[8]
+ }
+ if n >= 104 {
+ d.a[9] ^= bw[9]
+ d.a[10] ^= bw[10]
+ d.a[11] ^= bw[11]
+ d.a[12] ^= bw[12]
+ }
+ if n >= 136 {
+ d.a[13] ^= bw[13]
+ d.a[14] ^= bw[14]
+ d.a[15] ^= bw[15]
+ d.a[16] ^= bw[16]
+ }
+ if n >= 144 {
+ d.a[17] ^= bw[17]
+ }
+ if n >= 168 {
+ d.a[18] ^= bw[18]
+ d.a[19] ^= bw[19]
+ d.a[20] ^= bw[20]
+ }
+}
+
+func copyOutUnaligned(d *state, buf []byte) {
+ ab := (*[maxRate]uint8)(unsafe.Pointer(&d.a[0]))
+ copy(buf, ab[:])
+}
+
+var (
+ xorIn = xorInUnaligned
+ copyOut = copyOutUnaligned
+)
+
+const xorImplementationUnaligned = "unaligned"
diff --git a/src/mongo/gotools/vendor/src/golang.org/x/crypto/ssh/agent/client.go b/src/mongo/gotools/vendor/src/golang.org/x/crypto/ssh/agent/client.go
new file mode 100644
index 00000000000..8c856a08c18
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/golang.org/x/crypto/ssh/agent/client.go
@@ -0,0 +1,615 @@
+// Copyright 2012 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+/*
+ Package agent implements a client to an ssh-agent daemon.
+
+References:
+ [PROTOCOL.agent]: http://cvsweb.openbsd.org/cgi-bin/cvsweb/src/usr.bin/ssh/PROTOCOL.agent?rev=HEAD
+*/
+package agent // import "golang.org/x/crypto/ssh/agent"
+
+import (
+ "bytes"
+ "crypto/dsa"
+ "crypto/ecdsa"
+ "crypto/elliptic"
+ "crypto/rsa"
+ "encoding/base64"
+ "encoding/binary"
+ "errors"
+ "fmt"
+ "io"
+ "math/big"
+ "sync"
+
+ "golang.org/x/crypto/ssh"
+)
+
+// Agent represents the capabilities of an ssh-agent.
+type Agent interface {
+ // List returns the identities known to the agent.
+ List() ([]*Key, error)
+
+ // Sign has the agent sign the data using a protocol 2 key as defined
+ // in [PROTOCOL.agent] section 2.6.2.
+ Sign(key ssh.PublicKey, data []byte) (*ssh.Signature, error)
+
+ // Add adds a private key to the agent.
+ Add(key AddedKey) error
+
+ // Remove removes all identities with the given public key.
+ Remove(key ssh.PublicKey) error
+
+ // RemoveAll removes all identities.
+ RemoveAll() error
+
+ // Lock locks the agent. Sign and Remove will fail, and List will empty an empty list.
+ Lock(passphrase []byte) error
+
+ // Unlock undoes the effect of Lock
+ Unlock(passphrase []byte) error
+
+ // Signers returns signers for all the known keys.
+ Signers() ([]ssh.Signer, error)
+}
+
+// AddedKey describes an SSH key to be added to an Agent.
+type AddedKey struct {
+ // PrivateKey must be a *rsa.PrivateKey, *dsa.PrivateKey or
+ // *ecdsa.PrivateKey, which will be inserted into the agent.
+ PrivateKey interface{}
+ // Certificate, if not nil, is communicated to the agent and will be
+ // stored with the key.
+ Certificate *ssh.Certificate
+ // Comment is an optional, free-form string.
+ Comment string
+ // LifetimeSecs, if not zero, is the number of seconds that the
+ // agent will store the key for.
+ LifetimeSecs uint32
+ // ConfirmBeforeUse, if true, requests that the agent confirm with the
+ // user before each use of this key.
+ ConfirmBeforeUse bool
+}
+
+// See [PROTOCOL.agent], section 3.
+const (
+ agentRequestV1Identities = 1
+
+ // 3.2 Requests from client to agent for protocol 2 key operations
+ agentAddIdentity = 17
+ agentRemoveIdentity = 18
+ agentRemoveAllIdentities = 19
+ agentAddIdConstrained = 25
+
+ // 3.3 Key-type independent requests from client to agent
+ agentAddSmartcardKey = 20
+ agentRemoveSmartcardKey = 21
+ agentLock = 22
+ agentUnlock = 23
+ agentAddSmartcardKeyConstrained = 26
+
+ // 3.7 Key constraint identifiers
+ agentConstrainLifetime = 1
+ agentConstrainConfirm = 2
+)
+
+// maxAgentResponseBytes is the maximum agent reply size that is accepted. This
+// is a sanity check, not a limit in the spec.
+const maxAgentResponseBytes = 16 << 20
+
+// Agent messages:
+// These structures mirror the wire format of the corresponding ssh agent
+// messages found in [PROTOCOL.agent].
+
+// 3.4 Generic replies from agent to client
+const agentFailure = 5
+
+type failureAgentMsg struct{}
+
+const agentSuccess = 6
+
+type successAgentMsg struct{}
+
+// See [PROTOCOL.agent], section 2.5.2.
+const agentRequestIdentities = 11
+
+type requestIdentitiesAgentMsg struct{}
+
+// See [PROTOCOL.agent], section 2.5.2.
+const agentIdentitiesAnswer = 12
+
+type identitiesAnswerAgentMsg struct {
+ NumKeys uint32 `sshtype:"12"`
+ Keys []byte `ssh:"rest"`
+}
+
+// See [PROTOCOL.agent], section 2.6.2.
+const agentSignRequest = 13
+
+type signRequestAgentMsg struct {
+ KeyBlob []byte `sshtype:"13"`
+ Data []byte
+ Flags uint32
+}
+
+// See [PROTOCOL.agent], section 2.6.2.
+
+// 3.6 Replies from agent to client for protocol 2 key operations
+const agentSignResponse = 14
+
+type signResponseAgentMsg struct {
+ SigBlob []byte `sshtype:"14"`
+}
+
+type publicKey struct {
+ Format string
+ Rest []byte `ssh:"rest"`
+}
+
+// Key represents a protocol 2 public key as defined in
+// [PROTOCOL.agent], section 2.5.2.
+type Key struct {
+ Format string
+ Blob []byte
+ Comment string
+}
+
+func clientErr(err error) error {
+ return fmt.Errorf("agent: client error: %v", err)
+}
+
+// String returns the storage form of an agent key with the format, base64
+// encoded serialized key, and the comment if it is not empty.
+func (k *Key) String() string {
+ s := string(k.Format) + " " + base64.StdEncoding.EncodeToString(k.Blob)
+
+ if k.Comment != "" {
+ s += " " + k.Comment
+ }
+
+ return s
+}
+
+// Type returns the public key type.
+func (k *Key) Type() string {
+ return k.Format
+}
+
+// Marshal returns key blob to satisfy the ssh.PublicKey interface.
+func (k *Key) Marshal() []byte {
+ return k.Blob
+}
+
+// Verify satisfies the ssh.PublicKey interface, but is not
+// implemented for agent keys.
+func (k *Key) Verify(data []byte, sig *ssh.Signature) error {
+ return errors.New("agent: agent key does not know how to verify")
+}
+
+type wireKey struct {
+ Format string
+ Rest []byte `ssh:"rest"`
+}
+
+func parseKey(in []byte) (out *Key, rest []byte, err error) {
+ var record struct {
+ Blob []byte
+ Comment string
+ Rest []byte `ssh:"rest"`
+ }
+
+ if err := ssh.Unmarshal(in, &record); err != nil {
+ return nil, nil, err
+ }
+
+ var wk wireKey
+ if err := ssh.Unmarshal(record.Blob, &wk); err != nil {
+ return nil, nil, err
+ }
+
+ return &Key{
+ Format: wk.Format,
+ Blob: record.Blob,
+ Comment: record.Comment,
+ }, record.Rest, nil
+}
+
+// client is a client for an ssh-agent process.
+type client struct {
+ // conn is typically a *net.UnixConn
+ conn io.ReadWriter
+ // mu is used to prevent concurrent access to the agent
+ mu sync.Mutex
+}
+
+// NewClient returns an Agent that talks to an ssh-agent process over
+// the given connection.
+func NewClient(rw io.ReadWriter) Agent {
+ return &client{conn: rw}
+}
+
+// call sends an RPC to the agent. On success, the reply is
+// unmarshaled into reply and replyType is set to the first byte of
+// the reply, which contains the type of the message.
+func (c *client) call(req []byte) (reply interface{}, err error) {
+ c.mu.Lock()
+ defer c.mu.Unlock()
+
+ msg := make([]byte, 4+len(req))
+ binary.BigEndian.PutUint32(msg, uint32(len(req)))
+ copy(msg[4:], req)
+ if _, err = c.conn.Write(msg); err != nil {
+ return nil, clientErr(err)
+ }
+
+ var respSizeBuf [4]byte
+ if _, err = io.ReadFull(c.conn, respSizeBuf[:]); err != nil {
+ return nil, clientErr(err)
+ }
+ respSize := binary.BigEndian.Uint32(respSizeBuf[:])
+ if respSize > maxAgentResponseBytes {
+ return nil, clientErr(err)
+ }
+
+ buf := make([]byte, respSize)
+ if _, err = io.ReadFull(c.conn, buf); err != nil {
+ return nil, clientErr(err)
+ }
+ reply, err = unmarshal(buf)
+ if err != nil {
+ return nil, clientErr(err)
+ }
+ return reply, err
+}
+
+func (c *client) simpleCall(req []byte) error {
+ resp, err := c.call(req)
+ if err != nil {
+ return err
+ }
+ if _, ok := resp.(*successAgentMsg); ok {
+ return nil
+ }
+ return errors.New("agent: failure")
+}
+
+func (c *client) RemoveAll() error {
+ return c.simpleCall([]byte{agentRemoveAllIdentities})
+}
+
+func (c *client) Remove(key ssh.PublicKey) error {
+ req := ssh.Marshal(&agentRemoveIdentityMsg{
+ KeyBlob: key.Marshal(),
+ })
+ return c.simpleCall(req)
+}
+
+func (c *client) Lock(passphrase []byte) error {
+ req := ssh.Marshal(&agentLockMsg{
+ Passphrase: passphrase,
+ })
+ return c.simpleCall(req)
+}
+
+func (c *client) Unlock(passphrase []byte) error {
+ req := ssh.Marshal(&agentUnlockMsg{
+ Passphrase: passphrase,
+ })
+ return c.simpleCall(req)
+}
+
+// List returns the identities known to the agent.
+func (c *client) List() ([]*Key, error) {
+ // see [PROTOCOL.agent] section 2.5.2.
+ req := []byte{agentRequestIdentities}
+
+ msg, err := c.call(req)
+ if err != nil {
+ return nil, err
+ }
+
+ switch msg := msg.(type) {
+ case *identitiesAnswerAgentMsg:
+ if msg.NumKeys > maxAgentResponseBytes/8 {
+ return nil, errors.New("agent: too many keys in agent reply")
+ }
+ keys := make([]*Key, msg.NumKeys)
+ data := msg.Keys
+ for i := uint32(0); i < msg.NumKeys; i++ {
+ var key *Key
+ var err error
+ if key, data, err = parseKey(data); err != nil {
+ return nil, err
+ }
+ keys[i] = key
+ }
+ return keys, nil
+ case *failureAgentMsg:
+ return nil, errors.New("agent: failed to list keys")
+ }
+ panic("unreachable")
+}
+
+// Sign has the agent sign the data using a protocol 2 key as defined
+// in [PROTOCOL.agent] section 2.6.2.
+func (c *client) Sign(key ssh.PublicKey, data []byte) (*ssh.Signature, error) {
+ req := ssh.Marshal(signRequestAgentMsg{
+ KeyBlob: key.Marshal(),
+ Data: data,
+ })
+
+ msg, err := c.call(req)
+ if err != nil {
+ return nil, err
+ }
+
+ switch msg := msg.(type) {
+ case *signResponseAgentMsg:
+ var sig ssh.Signature
+ if err := ssh.Unmarshal(msg.SigBlob, &sig); err != nil {
+ return nil, err
+ }
+
+ return &sig, nil
+ case *failureAgentMsg:
+ return nil, errors.New("agent: failed to sign challenge")
+ }
+ panic("unreachable")
+}
+
+// unmarshal parses an agent message in packet, returning the parsed
+// form and the message type of packet.
+func unmarshal(packet []byte) (interface{}, error) {
+ if len(packet) < 1 {
+ return nil, errors.New("agent: empty packet")
+ }
+ var msg interface{}
+ switch packet[0] {
+ case agentFailure:
+ return new(failureAgentMsg), nil
+ case agentSuccess:
+ return new(successAgentMsg), nil
+ case agentIdentitiesAnswer:
+ msg = new(identitiesAnswerAgentMsg)
+ case agentSignResponse:
+ msg = new(signResponseAgentMsg)
+ default:
+ return nil, fmt.Errorf("agent: unknown type tag %d", packet[0])
+ }
+ if err := ssh.Unmarshal(packet, msg); err != nil {
+ return nil, err
+ }
+ return msg, nil
+}
+
+type rsaKeyMsg struct {
+ Type string `sshtype:"17"`
+ N *big.Int
+ E *big.Int
+ D *big.Int
+ Iqmp *big.Int // IQMP = Inverse Q Mod P
+ P *big.Int
+ Q *big.Int
+ Comments string
+ Constraints []byte `ssh:"rest"`
+}
+
+type dsaKeyMsg struct {
+ Type string `sshtype:"17"`
+ P *big.Int
+ Q *big.Int
+ G *big.Int
+ Y *big.Int
+ X *big.Int
+ Comments string
+ Constraints []byte `ssh:"rest"`
+}
+
+type ecdsaKeyMsg struct {
+ Type string `sshtype:"17"`
+ Curve string
+ KeyBytes []byte
+ D *big.Int
+ Comments string
+ Constraints []byte `ssh:"rest"`
+}
+
+// Insert adds a private key to the agent.
+func (c *client) insertKey(s interface{}, comment string, constraints []byte) error {
+ var req []byte
+ switch k := s.(type) {
+ case *rsa.PrivateKey:
+ if len(k.Primes) != 2 {
+ return fmt.Errorf("agent: unsupported RSA key with %d primes", len(k.Primes))
+ }
+ k.Precompute()
+ req = ssh.Marshal(rsaKeyMsg{
+ Type: ssh.KeyAlgoRSA,
+ N: k.N,
+ E: big.NewInt(int64(k.E)),
+ D: k.D,
+ Iqmp: k.Precomputed.Qinv,
+ P: k.Primes[0],
+ Q: k.Primes[1],
+ Comments: comment,
+ Constraints: constraints,
+ })
+ case *dsa.PrivateKey:
+ req = ssh.Marshal(dsaKeyMsg{
+ Type: ssh.KeyAlgoDSA,
+ P: k.P,
+ Q: k.Q,
+ G: k.G,
+ Y: k.Y,
+ X: k.X,
+ Comments: comment,
+ Constraints: constraints,
+ })
+ case *ecdsa.PrivateKey:
+ nistID := fmt.Sprintf("nistp%d", k.Params().BitSize)
+ req = ssh.Marshal(ecdsaKeyMsg{
+ Type: "ecdsa-sha2-" + nistID,
+ Curve: nistID,
+ KeyBytes: elliptic.Marshal(k.Curve, k.X, k.Y),
+ D: k.D,
+ Comments: comment,
+ Constraints: constraints,
+ })
+ default:
+ return fmt.Errorf("agent: unsupported key type %T", s)
+ }
+
+ // if constraints are present then the message type needs to be changed.
+ if len(constraints) != 0 {
+ req[0] = agentAddIdConstrained
+ }
+
+ resp, err := c.call(req)
+ if err != nil {
+ return err
+ }
+ if _, ok := resp.(*successAgentMsg); ok {
+ return nil
+ }
+ return errors.New("agent: failure")
+}
+
+type rsaCertMsg struct {
+ Type string `sshtype:"17"`
+ CertBytes []byte
+ D *big.Int
+ Iqmp *big.Int // IQMP = Inverse Q Mod P
+ P *big.Int
+ Q *big.Int
+ Comments string
+ Constraints []byte `ssh:"rest"`
+}
+
+type dsaCertMsg struct {
+ Type string `sshtype:"17"`
+ CertBytes []byte
+ X *big.Int
+ Comments string
+ Constraints []byte `ssh:"rest"`
+}
+
+type ecdsaCertMsg struct {
+ Type string `sshtype:"17"`
+ CertBytes []byte
+ D *big.Int
+ Comments string
+ Constraints []byte `ssh:"rest"`
+}
+
+// Insert adds a private key to the agent. If a certificate is given,
+// that certificate is added instead as public key.
+func (c *client) Add(key AddedKey) error {
+ var constraints []byte
+
+ if secs := key.LifetimeSecs; secs != 0 {
+ constraints = append(constraints, agentConstrainLifetime)
+
+ var secsBytes [4]byte
+ binary.BigEndian.PutUint32(secsBytes[:], secs)
+ constraints = append(constraints, secsBytes[:]...)
+ }
+
+ if key.ConfirmBeforeUse {
+ constraints = append(constraints, agentConstrainConfirm)
+ }
+
+ if cert := key.Certificate; cert == nil {
+ return c.insertKey(key.PrivateKey, key.Comment, constraints)
+ } else {
+ return c.insertCert(key.PrivateKey, cert, key.Comment, constraints)
+ }
+}
+
+func (c *client) insertCert(s interface{}, cert *ssh.Certificate, comment string, constraints []byte) error {
+ var req []byte
+ switch k := s.(type) {
+ case *rsa.PrivateKey:
+ if len(k.Primes) != 2 {
+ return fmt.Errorf("agent: unsupported RSA key with %d primes", len(k.Primes))
+ }
+ k.Precompute()
+ req = ssh.Marshal(rsaCertMsg{
+ Type: cert.Type(),
+ CertBytes: cert.Marshal(),
+ D: k.D,
+ Iqmp: k.Precomputed.Qinv,
+ P: k.Primes[0],
+ Q: k.Primes[1],
+ Comments: comment,
+ Constraints: constraints,
+ })
+ case *dsa.PrivateKey:
+ req = ssh.Marshal(dsaCertMsg{
+ Type: cert.Type(),
+ CertBytes: cert.Marshal(),
+ X: k.X,
+ Comments: comment,
+ })
+ case *ecdsa.PrivateKey:
+ req = ssh.Marshal(ecdsaCertMsg{
+ Type: cert.Type(),
+ CertBytes: cert.Marshal(),
+ D: k.D,
+ Comments: comment,
+ })
+ default:
+ return fmt.Errorf("agent: unsupported key type %T", s)
+ }
+
+ // if constraints are present then the message type needs to be changed.
+ if len(constraints) != 0 {
+ req[0] = agentAddIdConstrained
+ }
+
+ signer, err := ssh.NewSignerFromKey(s)
+ if err != nil {
+ return err
+ }
+ if bytes.Compare(cert.Key.Marshal(), signer.PublicKey().Marshal()) != 0 {
+ return errors.New("agent: signer and cert have different public key")
+ }
+
+ resp, err := c.call(req)
+ if err != nil {
+ return err
+ }
+ if _, ok := resp.(*successAgentMsg); ok {
+ return nil
+ }
+ return errors.New("agent: failure")
+}
+
+// Signers provides a callback for client authentication.
+func (c *client) Signers() ([]ssh.Signer, error) {
+ keys, err := c.List()
+ if err != nil {
+ return nil, err
+ }
+
+ var result []ssh.Signer
+ for _, k := range keys {
+ result = append(result, &agentKeyringSigner{c, k})
+ }
+ return result, nil
+}
+
+type agentKeyringSigner struct {
+ agent *client
+ pub ssh.PublicKey
+}
+
+func (s *agentKeyringSigner) PublicKey() ssh.PublicKey {
+ return s.pub
+}
+
+func (s *agentKeyringSigner) Sign(rand io.Reader, data []byte) (*ssh.Signature, error) {
+ // The agent has its own entropy source, so the rand argument is ignored.
+ return s.agent.Sign(s.pub, data)
+}
diff --git a/src/mongo/gotools/vendor/src/golang.org/x/crypto/ssh/agent/client_test.go b/src/mongo/gotools/vendor/src/golang.org/x/crypto/ssh/agent/client_test.go
new file mode 100644
index 00000000000..ec7198d549d
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/golang.org/x/crypto/ssh/agent/client_test.go
@@ -0,0 +1,287 @@
+// Copyright 2012 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package agent
+
+import (
+ "bytes"
+ "crypto/rand"
+ "errors"
+ "net"
+ "os"
+ "os/exec"
+ "path/filepath"
+ "strconv"
+ "testing"
+
+ "golang.org/x/crypto/ssh"
+)
+
+// startAgent executes ssh-agent, and returns a Agent interface to it.
+func startAgent(t *testing.T) (client Agent, socket string, cleanup func()) {
+ if testing.Short() {
+ // ssh-agent is not always available, and the key
+ // types supported vary by platform.
+ t.Skip("skipping test due to -short")
+ }
+
+ bin, err := exec.LookPath("ssh-agent")
+ if err != nil {
+ t.Skip("could not find ssh-agent")
+ }
+
+ cmd := exec.Command(bin, "-s")
+ out, err := cmd.Output()
+ if err != nil {
+ t.Fatalf("cmd.Output: %v", err)
+ }
+
+ /* Output looks like:
+
+ SSH_AUTH_SOCK=/tmp/ssh-P65gpcqArqvH/agent.15541; export SSH_AUTH_SOCK;
+ SSH_AGENT_PID=15542; export SSH_AGENT_PID;
+ echo Agent pid 15542;
+ */
+ fields := bytes.Split(out, []byte(";"))
+ line := bytes.SplitN(fields[0], []byte("="), 2)
+ line[0] = bytes.TrimLeft(line[0], "\n")
+ if string(line[0]) != "SSH_AUTH_SOCK" {
+ t.Fatalf("could not find key SSH_AUTH_SOCK in %q", fields[0])
+ }
+ socket = string(line[1])
+
+ line = bytes.SplitN(fields[2], []byte("="), 2)
+ line[0] = bytes.TrimLeft(line[0], "\n")
+ if string(line[0]) != "SSH_AGENT_PID" {
+ t.Fatalf("could not find key SSH_AGENT_PID in %q", fields[2])
+ }
+ pidStr := line[1]
+ pid, err := strconv.Atoi(string(pidStr))
+ if err != nil {
+ t.Fatalf("Atoi(%q): %v", pidStr, err)
+ }
+
+ conn, err := net.Dial("unix", string(socket))
+ if err != nil {
+ t.Fatalf("net.Dial: %v", err)
+ }
+
+ ac := NewClient(conn)
+ return ac, socket, func() {
+ proc, _ := os.FindProcess(pid)
+ if proc != nil {
+ proc.Kill()
+ }
+ conn.Close()
+ os.RemoveAll(filepath.Dir(socket))
+ }
+}
+
+func testAgent(t *testing.T, key interface{}, cert *ssh.Certificate, lifetimeSecs uint32) {
+ agent, _, cleanup := startAgent(t)
+ defer cleanup()
+
+ testAgentInterface(t, agent, key, cert, lifetimeSecs)
+}
+
+func testAgentInterface(t *testing.T, agent Agent, key interface{}, cert *ssh.Certificate, lifetimeSecs uint32) {
+ signer, err := ssh.NewSignerFromKey(key)
+ if err != nil {
+ t.Fatalf("NewSignerFromKey(%T): %v", key, err)
+ }
+ // The agent should start up empty.
+ if keys, err := agent.List(); err != nil {
+ t.Fatalf("RequestIdentities: %v", err)
+ } else if len(keys) > 0 {
+ t.Fatalf("got %d keys, want 0: %v", len(keys), keys)
+ }
+
+ // Attempt to insert the key, with certificate if specified.
+ var pubKey ssh.PublicKey
+ if cert != nil {
+ err = agent.Add(AddedKey{
+ PrivateKey: key,
+ Certificate: cert,
+ Comment: "comment",
+ LifetimeSecs: lifetimeSecs,
+ })
+ pubKey = cert
+ } else {
+ err = agent.Add(AddedKey{PrivateKey: key, Comment: "comment", LifetimeSecs: lifetimeSecs})
+ pubKey = signer.PublicKey()
+ }
+ if err != nil {
+ t.Fatalf("insert(%T): %v", key, err)
+ }
+
+ // Did the key get inserted successfully?
+ if keys, err := agent.List(); err != nil {
+ t.Fatalf("List: %v", err)
+ } else if len(keys) != 1 {
+ t.Fatalf("got %v, want 1 key", keys)
+ } else if keys[0].Comment != "comment" {
+ t.Fatalf("key comment: got %v, want %v", keys[0].Comment, "comment")
+ } else if !bytes.Equal(keys[0].Blob, pubKey.Marshal()) {
+ t.Fatalf("key mismatch")
+ }
+
+ // Can the agent make a valid signature?
+ data := []byte("hello")
+ sig, err := agent.Sign(pubKey, data)
+ if err != nil {
+ t.Fatalf("Sign(%s): %v", pubKey.Type(), err)
+ }
+
+ if err := pubKey.Verify(data, sig); err != nil {
+ t.Fatalf("Verify(%s): %v", pubKey.Type(), err)
+ }
+}
+
+func TestAgent(t *testing.T) {
+ for _, keyType := range []string{"rsa", "dsa", "ecdsa"} {
+ testAgent(t, testPrivateKeys[keyType], nil, 0)
+ }
+}
+
+func TestCert(t *testing.T) {
+ cert := &ssh.Certificate{
+ Key: testPublicKeys["rsa"],
+ ValidBefore: ssh.CertTimeInfinity,
+ CertType: ssh.UserCert,
+ }
+ cert.SignCert(rand.Reader, testSigners["ecdsa"])
+
+ testAgent(t, testPrivateKeys["rsa"], cert, 0)
+}
+
+func TestConstraints(t *testing.T) {
+ testAgent(t, testPrivateKeys["rsa"], nil, 3600 /* lifetime in seconds */)
+}
+
+// netPipe is analogous to net.Pipe, but it uses a real net.Conn, and
+// therefore is buffered (net.Pipe deadlocks if both sides start with
+// a write.)
+func netPipe() (net.Conn, net.Conn, error) {
+ listener, err := net.Listen("tcp", "127.0.0.1:0")
+ if err != nil {
+ return nil, nil, err
+ }
+ defer listener.Close()
+ c1, err := net.Dial("tcp", listener.Addr().String())
+ if err != nil {
+ return nil, nil, err
+ }
+
+ c2, err := listener.Accept()
+ if err != nil {
+ c1.Close()
+ return nil, nil, err
+ }
+
+ return c1, c2, nil
+}
+
+func TestAuth(t *testing.T) {
+ a, b, err := netPipe()
+ if err != nil {
+ t.Fatalf("netPipe: %v", err)
+ }
+
+ defer a.Close()
+ defer b.Close()
+
+ agent, _, cleanup := startAgent(t)
+ defer cleanup()
+
+ if err := agent.Add(AddedKey{PrivateKey: testPrivateKeys["rsa"], Comment: "comment"}); err != nil {
+ t.Errorf("Add: %v", err)
+ }
+
+ serverConf := ssh.ServerConfig{}
+ serverConf.AddHostKey(testSigners["rsa"])
+ serverConf.PublicKeyCallback = func(c ssh.ConnMetadata, key ssh.PublicKey) (*ssh.Permissions, error) {
+ if bytes.Equal(key.Marshal(), testPublicKeys["rsa"].Marshal()) {
+ return nil, nil
+ }
+
+ return nil, errors.New("pubkey rejected")
+ }
+
+ go func() {
+ conn, _, _, err := ssh.NewServerConn(a, &serverConf)
+ if err != nil {
+ t.Fatalf("Server: %v", err)
+ }
+ conn.Close()
+ }()
+
+ conf := ssh.ClientConfig{}
+ conf.Auth = append(conf.Auth, ssh.PublicKeysCallback(agent.Signers))
+ conn, _, _, err := ssh.NewClientConn(b, "", &conf)
+ if err != nil {
+ t.Fatalf("NewClientConn: %v", err)
+ }
+ conn.Close()
+}
+
+func TestLockClient(t *testing.T) {
+ agent, _, cleanup := startAgent(t)
+ defer cleanup()
+ testLockAgent(agent, t)
+}
+
+func testLockAgent(agent Agent, t *testing.T) {
+ if err := agent.Add(AddedKey{PrivateKey: testPrivateKeys["rsa"], Comment: "comment 1"}); err != nil {
+ t.Errorf("Add: %v", err)
+ }
+ if err := agent.Add(AddedKey{PrivateKey: testPrivateKeys["dsa"], Comment: "comment dsa"}); err != nil {
+ t.Errorf("Add: %v", err)
+ }
+ if keys, err := agent.List(); err != nil {
+ t.Errorf("List: %v", err)
+ } else if len(keys) != 2 {
+ t.Errorf("Want 2 keys, got %v", keys)
+ }
+
+ passphrase := []byte("secret")
+ if err := agent.Lock(passphrase); err != nil {
+ t.Errorf("Lock: %v", err)
+ }
+
+ if keys, err := agent.List(); err != nil {
+ t.Errorf("List: %v", err)
+ } else if len(keys) != 0 {
+ t.Errorf("Want 0 keys, got %v", keys)
+ }
+
+ signer, _ := ssh.NewSignerFromKey(testPrivateKeys["rsa"])
+ if _, err := agent.Sign(signer.PublicKey(), []byte("hello")); err == nil {
+ t.Fatalf("Sign did not fail")
+ }
+
+ if err := agent.Remove(signer.PublicKey()); err == nil {
+ t.Fatalf("Remove did not fail")
+ }
+
+ if err := agent.RemoveAll(); err == nil {
+ t.Fatalf("RemoveAll did not fail")
+ }
+
+ if err := agent.Unlock(nil); err == nil {
+ t.Errorf("Unlock with wrong passphrase succeeded")
+ }
+ if err := agent.Unlock(passphrase); err != nil {
+ t.Errorf("Unlock: %v", err)
+ }
+
+ if err := agent.Remove(signer.PublicKey()); err != nil {
+ t.Fatalf("Remove: %v", err)
+ }
+
+ if keys, err := agent.List(); err != nil {
+ t.Errorf("List: %v", err)
+ } else if len(keys) != 1 {
+ t.Errorf("Want 1 keys, got %v", keys)
+ }
+}
diff --git a/src/mongo/gotools/vendor/src/golang.org/x/crypto/ssh/agent/forward.go b/src/mongo/gotools/vendor/src/golang.org/x/crypto/ssh/agent/forward.go
new file mode 100644
index 00000000000..fd24ba900d2
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/golang.org/x/crypto/ssh/agent/forward.go
@@ -0,0 +1,103 @@
+// Copyright 2014 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package agent
+
+import (
+ "errors"
+ "io"
+ "net"
+ "sync"
+
+ "golang.org/x/crypto/ssh"
+)
+
+// RequestAgentForwarding sets up agent forwarding for the session.
+// ForwardToAgent or ForwardToRemote should be called to route
+// the authentication requests.
+func RequestAgentForwarding(session *ssh.Session) error {
+ ok, err := session.SendRequest("auth-agent-req@openssh.com", true, nil)
+ if err != nil {
+ return err
+ }
+ if !ok {
+ return errors.New("forwarding request denied")
+ }
+ return nil
+}
+
+// ForwardToAgent routes authentication requests to the given keyring.
+func ForwardToAgent(client *ssh.Client, keyring Agent) error {
+ channels := client.HandleChannelOpen(channelType)
+ if channels == nil {
+ return errors.New("agent: already have handler for " + channelType)
+ }
+
+ go func() {
+ for ch := range channels {
+ channel, reqs, err := ch.Accept()
+ if err != nil {
+ continue
+ }
+ go ssh.DiscardRequests(reqs)
+ go func() {
+ ServeAgent(keyring, channel)
+ channel.Close()
+ }()
+ }
+ }()
+ return nil
+}
+
+const channelType = "auth-agent@openssh.com"
+
+// ForwardToRemote routes authentication requests to the ssh-agent
+// process serving on the given unix socket.
+func ForwardToRemote(client *ssh.Client, addr string) error {
+ channels := client.HandleChannelOpen(channelType)
+ if channels == nil {
+ return errors.New("agent: already have handler for " + channelType)
+ }
+ conn, err := net.Dial("unix", addr)
+ if err != nil {
+ return err
+ }
+ conn.Close()
+
+ go func() {
+ for ch := range channels {
+ channel, reqs, err := ch.Accept()
+ if err != nil {
+ continue
+ }
+ go ssh.DiscardRequests(reqs)
+ go forwardUnixSocket(channel, addr)
+ }
+ }()
+ return nil
+}
+
+func forwardUnixSocket(channel ssh.Channel, addr string) {
+ conn, err := net.Dial("unix", addr)
+ if err != nil {
+ return
+ }
+
+ var wg sync.WaitGroup
+ wg.Add(2)
+ go func() {
+ io.Copy(conn, channel)
+ conn.(*net.UnixConn).CloseWrite()
+ wg.Done()
+ }()
+ go func() {
+ io.Copy(channel, conn)
+ channel.CloseWrite()
+ wg.Done()
+ }()
+
+ wg.Wait()
+ conn.Close()
+ channel.Close()
+}
diff --git a/src/mongo/gotools/vendor/src/golang.org/x/crypto/ssh/agent/keyring.go b/src/mongo/gotools/vendor/src/golang.org/x/crypto/ssh/agent/keyring.go
new file mode 100644
index 00000000000..12ffa82b1a0
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/golang.org/x/crypto/ssh/agent/keyring.go
@@ -0,0 +1,184 @@
+// Copyright 2014 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package agent
+
+import (
+ "bytes"
+ "crypto/rand"
+ "crypto/subtle"
+ "errors"
+ "fmt"
+ "sync"
+
+ "golang.org/x/crypto/ssh"
+)
+
+type privKey struct {
+ signer ssh.Signer
+ comment string
+}
+
+type keyring struct {
+ mu sync.Mutex
+ keys []privKey
+
+ locked bool
+ passphrase []byte
+}
+
+var errLocked = errors.New("agent: locked")
+
+// NewKeyring returns an Agent that holds keys in memory. It is safe
+// for concurrent use by multiple goroutines.
+func NewKeyring() Agent {
+ return &keyring{}
+}
+
+// RemoveAll removes all identities.
+func (r *keyring) RemoveAll() error {
+ r.mu.Lock()
+ defer r.mu.Unlock()
+ if r.locked {
+ return errLocked
+ }
+
+ r.keys = nil
+ return nil
+}
+
+// Remove removes all identities with the given public key.
+func (r *keyring) Remove(key ssh.PublicKey) error {
+ r.mu.Lock()
+ defer r.mu.Unlock()
+ if r.locked {
+ return errLocked
+ }
+
+ want := key.Marshal()
+ found := false
+ for i := 0; i < len(r.keys); {
+ if bytes.Equal(r.keys[i].signer.PublicKey().Marshal(), want) {
+ found = true
+ r.keys[i] = r.keys[len(r.keys)-1]
+ r.keys = r.keys[:len(r.keys)-1]
+ continue
+ } else {
+ i++
+ }
+ }
+
+ if !found {
+ return errors.New("agent: key not found")
+ }
+ return nil
+}
+
+// Lock locks the agent. Sign and Remove will fail, and List will empty an empty list.
+func (r *keyring) Lock(passphrase []byte) error {
+ r.mu.Lock()
+ defer r.mu.Unlock()
+ if r.locked {
+ return errLocked
+ }
+
+ r.locked = true
+ r.passphrase = passphrase
+ return nil
+}
+
+// Unlock undoes the effect of Lock
+func (r *keyring) Unlock(passphrase []byte) error {
+ r.mu.Lock()
+ defer r.mu.Unlock()
+ if !r.locked {
+ return errors.New("agent: not locked")
+ }
+ if len(passphrase) != len(r.passphrase) || 1 != subtle.ConstantTimeCompare(passphrase, r.passphrase) {
+ return fmt.Errorf("agent: incorrect passphrase")
+ }
+
+ r.locked = false
+ r.passphrase = nil
+ return nil
+}
+
+// List returns the identities known to the agent.
+func (r *keyring) List() ([]*Key, error) {
+ r.mu.Lock()
+ defer r.mu.Unlock()
+ if r.locked {
+ // section 2.7: locked agents return empty.
+ return nil, nil
+ }
+
+ var ids []*Key
+ for _, k := range r.keys {
+ pub := k.signer.PublicKey()
+ ids = append(ids, &Key{
+ Format: pub.Type(),
+ Blob: pub.Marshal(),
+ Comment: k.comment})
+ }
+ return ids, nil
+}
+
+// Insert adds a private key to the keyring. If a certificate
+// is given, that certificate is added as public key. Note that
+// any constraints given are ignored.
+func (r *keyring) Add(key AddedKey) error {
+ r.mu.Lock()
+ defer r.mu.Unlock()
+ if r.locked {
+ return errLocked
+ }
+ signer, err := ssh.NewSignerFromKey(key.PrivateKey)
+
+ if err != nil {
+ return err
+ }
+
+ if cert := key.Certificate; cert != nil {
+ signer, err = ssh.NewCertSigner(cert, signer)
+ if err != nil {
+ return err
+ }
+ }
+
+ r.keys = append(r.keys, privKey{signer, key.Comment})
+
+ return nil
+}
+
+// Sign returns a signature for the data.
+func (r *keyring) Sign(key ssh.PublicKey, data []byte) (*ssh.Signature, error) {
+ r.mu.Lock()
+ defer r.mu.Unlock()
+ if r.locked {
+ return nil, errLocked
+ }
+
+ wanted := key.Marshal()
+ for _, k := range r.keys {
+ if bytes.Equal(k.signer.PublicKey().Marshal(), wanted) {
+ return k.signer.Sign(rand.Reader, data)
+ }
+ }
+ return nil, errors.New("not found")
+}
+
+// Signers returns signers for all the known keys.
+func (r *keyring) Signers() ([]ssh.Signer, error) {
+ r.mu.Lock()
+ defer r.mu.Unlock()
+ if r.locked {
+ return nil, errLocked
+ }
+
+ s := make([]ssh.Signer, 0, len(r.keys))
+ for _, k := range r.keys {
+ s = append(s, k.signer)
+ }
+ return s, nil
+}
diff --git a/src/mongo/gotools/vendor/src/golang.org/x/crypto/ssh/agent/keyring_test.go b/src/mongo/gotools/vendor/src/golang.org/x/crypto/ssh/agent/keyring_test.go
new file mode 100644
index 00000000000..7f05905712d
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/golang.org/x/crypto/ssh/agent/keyring_test.go
@@ -0,0 +1,78 @@
+// Copyright 2015 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package agent
+
+import (
+ "testing"
+)
+
+func addTestKey(t *testing.T, a Agent, keyName string) {
+ err := a.Add(AddedKey{
+ PrivateKey: testPrivateKeys[keyName],
+ Comment: keyName,
+ })
+ if err != nil {
+ t.Fatalf("failed to add key %q: %v", keyName, err)
+ }
+}
+
+func removeTestKey(t *testing.T, a Agent, keyName string) {
+ err := a.Remove(testPublicKeys[keyName])
+ if err != nil {
+ t.Fatalf("failed to remove key %q: %v", keyName, err)
+ }
+}
+
+func validateListedKeys(t *testing.T, a Agent, expectedKeys []string) {
+ listedKeys, err := a.List()
+ if err != nil {
+ t.Fatalf("failed to list keys: %v", err)
+ return
+ }
+ actualKeys := make(map[string]bool)
+ for _, key := range listedKeys {
+ actualKeys[key.Comment] = true
+ }
+
+ matchedKeys := make(map[string]bool)
+ for _, expectedKey := range expectedKeys {
+ if !actualKeys[expectedKey] {
+ t.Fatalf("expected key %q, but was not found", expectedKey)
+ } else {
+ matchedKeys[expectedKey] = true
+ }
+ }
+
+ for actualKey := range actualKeys {
+ if !matchedKeys[actualKey] {
+ t.Fatalf("key %q was found, but was not expected", actualKey)
+ }
+ }
+}
+
+func TestKeyringAddingAndRemoving(t *testing.T) {
+ keyNames := []string{"dsa", "ecdsa", "rsa", "user"}
+
+ // add all test private keys
+ k := NewKeyring()
+ for _, keyName := range keyNames {
+ addTestKey(t, k, keyName)
+ }
+ validateListedKeys(t, k, keyNames)
+
+ // remove a key in the middle
+ keyToRemove := keyNames[1]
+ keyNames = append(keyNames[:1], keyNames[2:]...)
+
+ removeTestKey(t, k, keyToRemove)
+ validateListedKeys(t, k, keyNames)
+
+ // remove all keys
+ err := k.RemoveAll()
+ if err != nil {
+ t.Fatalf("failed to remove all keys: %v", err)
+ }
+ validateListedKeys(t, k, []string{})
+}
diff --git a/src/mongo/gotools/vendor/src/golang.org/x/crypto/ssh/agent/server.go b/src/mongo/gotools/vendor/src/golang.org/x/crypto/ssh/agent/server.go
new file mode 100644
index 00000000000..b21a20180fe
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/golang.org/x/crypto/ssh/agent/server.go
@@ -0,0 +1,209 @@
+// Copyright 2012 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package agent
+
+import (
+ "crypto/rsa"
+ "encoding/binary"
+ "fmt"
+ "io"
+ "log"
+ "math/big"
+
+ "golang.org/x/crypto/ssh"
+)
+
+// Server wraps an Agent and uses it to implement the agent side of
+// the SSH-agent, wire protocol.
+type server struct {
+ agent Agent
+}
+
+func (s *server) processRequestBytes(reqData []byte) []byte {
+ rep, err := s.processRequest(reqData)
+ if err != nil {
+ if err != errLocked {
+ // TODO(hanwen): provide better logging interface?
+ log.Printf("agent %d: %v", reqData[0], err)
+ }
+ return []byte{agentFailure}
+ }
+
+ if err == nil && rep == nil {
+ return []byte{agentSuccess}
+ }
+
+ return ssh.Marshal(rep)
+}
+
+func marshalKey(k *Key) []byte {
+ var record struct {
+ Blob []byte
+ Comment string
+ }
+ record.Blob = k.Marshal()
+ record.Comment = k.Comment
+
+ return ssh.Marshal(&record)
+}
+
+type agentV1IdentityMsg struct {
+ Numkeys uint32 `sshtype:"2"`
+}
+
+type agentRemoveIdentityMsg struct {
+ KeyBlob []byte `sshtype:"18"`
+}
+
+type agentLockMsg struct {
+ Passphrase []byte `sshtype:"22"`
+}
+
+type agentUnlockMsg struct {
+ Passphrase []byte `sshtype:"23"`
+}
+
+func (s *server) processRequest(data []byte) (interface{}, error) {
+ switch data[0] {
+ case agentRequestV1Identities:
+ return &agentV1IdentityMsg{0}, nil
+ case agentRemoveIdentity:
+ var req agentRemoveIdentityMsg
+ if err := ssh.Unmarshal(data, &req); err != nil {
+ return nil, err
+ }
+
+ var wk wireKey
+ if err := ssh.Unmarshal(req.KeyBlob, &wk); err != nil {
+ return nil, err
+ }
+
+ return nil, s.agent.Remove(&Key{Format: wk.Format, Blob: req.KeyBlob})
+
+ case agentRemoveAllIdentities:
+ return nil, s.agent.RemoveAll()
+
+ case agentLock:
+ var req agentLockMsg
+ if err := ssh.Unmarshal(data, &req); err != nil {
+ return nil, err
+ }
+
+ return nil, s.agent.Lock(req.Passphrase)
+
+ case agentUnlock:
+ var req agentLockMsg
+ if err := ssh.Unmarshal(data, &req); err != nil {
+ return nil, err
+ }
+ return nil, s.agent.Unlock(req.Passphrase)
+
+ case agentSignRequest:
+ var req signRequestAgentMsg
+ if err := ssh.Unmarshal(data, &req); err != nil {
+ return nil, err
+ }
+
+ var wk wireKey
+ if err := ssh.Unmarshal(req.KeyBlob, &wk); err != nil {
+ return nil, err
+ }
+
+ k := &Key{
+ Format: wk.Format,
+ Blob: req.KeyBlob,
+ }
+
+ sig, err := s.agent.Sign(k, req.Data) // TODO(hanwen): flags.
+ if err != nil {
+ return nil, err
+ }
+ return &signResponseAgentMsg{SigBlob: ssh.Marshal(sig)}, nil
+ case agentRequestIdentities:
+ keys, err := s.agent.List()
+ if err != nil {
+ return nil, err
+ }
+
+ rep := identitiesAnswerAgentMsg{
+ NumKeys: uint32(len(keys)),
+ }
+ for _, k := range keys {
+ rep.Keys = append(rep.Keys, marshalKey(k)...)
+ }
+ return rep, nil
+ case agentAddIdentity:
+ return nil, s.insertIdentity(data)
+ }
+
+ return nil, fmt.Errorf("unknown opcode %d", data[0])
+}
+
+func (s *server) insertIdentity(req []byte) error {
+ var record struct {
+ Type string `sshtype:"17"`
+ Rest []byte `ssh:"rest"`
+ }
+ if err := ssh.Unmarshal(req, &record); err != nil {
+ return err
+ }
+
+ switch record.Type {
+ case ssh.KeyAlgoRSA:
+ var k rsaKeyMsg
+ if err := ssh.Unmarshal(req, &k); err != nil {
+ return err
+ }
+
+ priv := rsa.PrivateKey{
+ PublicKey: rsa.PublicKey{
+ E: int(k.E.Int64()),
+ N: k.N,
+ },
+ D: k.D,
+ Primes: []*big.Int{k.P, k.Q},
+ }
+ priv.Precompute()
+
+ return s.agent.Add(AddedKey{PrivateKey: &priv, Comment: k.Comments})
+ }
+ return fmt.Errorf("not implemented: %s", record.Type)
+}
+
+// ServeAgent serves the agent protocol on the given connection. It
+// returns when an I/O error occurs.
+func ServeAgent(agent Agent, c io.ReadWriter) error {
+ s := &server{agent}
+
+ var length [4]byte
+ for {
+ if _, err := io.ReadFull(c, length[:]); err != nil {
+ return err
+ }
+ l := binary.BigEndian.Uint32(length[:])
+ if l > maxAgentResponseBytes {
+ // We also cap requests.
+ return fmt.Errorf("agent: request too large: %d", l)
+ }
+
+ req := make([]byte, l)
+ if _, err := io.ReadFull(c, req); err != nil {
+ return err
+ }
+
+ repData := s.processRequestBytes(req)
+ if len(repData) > maxAgentResponseBytes {
+ return fmt.Errorf("agent: reply too large: %d bytes", len(repData))
+ }
+
+ binary.BigEndian.PutUint32(length[:], uint32(len(repData)))
+ if _, err := c.Write(length[:]); err != nil {
+ return err
+ }
+ if _, err := c.Write(repData); err != nil {
+ return err
+ }
+ }
+}
diff --git a/src/mongo/gotools/vendor/src/golang.org/x/crypto/ssh/agent/server_test.go b/src/mongo/gotools/vendor/src/golang.org/x/crypto/ssh/agent/server_test.go
new file mode 100644
index 00000000000..ef0ab293487
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/golang.org/x/crypto/ssh/agent/server_test.go
@@ -0,0 +1,77 @@
+// Copyright 2012 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package agent
+
+import (
+ "testing"
+
+ "golang.org/x/crypto/ssh"
+)
+
+func TestServer(t *testing.T) {
+ c1, c2, err := netPipe()
+ if err != nil {
+ t.Fatalf("netPipe: %v", err)
+ }
+ defer c1.Close()
+ defer c2.Close()
+ client := NewClient(c1)
+
+ go ServeAgent(NewKeyring(), c2)
+
+ testAgentInterface(t, client, testPrivateKeys["rsa"], nil, 0)
+}
+
+func TestLockServer(t *testing.T) {
+ testLockAgent(NewKeyring(), t)
+}
+
+func TestSetupForwardAgent(t *testing.T) {
+ a, b, err := netPipe()
+ if err != nil {
+ t.Fatalf("netPipe: %v", err)
+ }
+
+ defer a.Close()
+ defer b.Close()
+
+ _, socket, cleanup := startAgent(t)
+ defer cleanup()
+
+ serverConf := ssh.ServerConfig{
+ NoClientAuth: true,
+ }
+ serverConf.AddHostKey(testSigners["rsa"])
+ incoming := make(chan *ssh.ServerConn, 1)
+ go func() {
+ conn, _, _, err := ssh.NewServerConn(a, &serverConf)
+ if err != nil {
+ t.Fatalf("Server: %v", err)
+ }
+ incoming <- conn
+ }()
+
+ conf := ssh.ClientConfig{}
+ conn, chans, reqs, err := ssh.NewClientConn(b, "", &conf)
+ if err != nil {
+ t.Fatalf("NewClientConn: %v", err)
+ }
+ client := ssh.NewClient(conn, chans, reqs)
+
+ if err := ForwardToRemote(client, socket); err != nil {
+ t.Fatalf("SetupForwardAgent: %v", err)
+ }
+
+ server := <-incoming
+ ch, reqs, err := server.OpenChannel(channelType, nil)
+ if err != nil {
+ t.Fatalf("OpenChannel(%q): %v", channelType, err)
+ }
+ go ssh.DiscardRequests(reqs)
+
+ agentClient := NewClient(ch)
+ testAgentInterface(t, agentClient, testPrivateKeys["rsa"], nil, 0)
+ conn.Close()
+}
diff --git a/src/mongo/gotools/vendor/src/golang.org/x/crypto/ssh/agent/testdata_test.go b/src/mongo/gotools/vendor/src/golang.org/x/crypto/ssh/agent/testdata_test.go
new file mode 100644
index 00000000000..b7a8781e1a5
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/golang.org/x/crypto/ssh/agent/testdata_test.go
@@ -0,0 +1,64 @@
+// Copyright 2014 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// IMPLEMENTOR NOTE: To avoid a package loop, this file is in three places:
+// ssh/, ssh/agent, and ssh/test/. It should be kept in sync across all three
+// instances.
+
+package agent
+
+import (
+ "crypto/rand"
+ "fmt"
+
+ "golang.org/x/crypto/ssh"
+ "golang.org/x/crypto/ssh/testdata"
+)
+
+var (
+ testPrivateKeys map[string]interface{}
+ testSigners map[string]ssh.Signer
+ testPublicKeys map[string]ssh.PublicKey
+)
+
+func init() {
+ var err error
+
+ n := len(testdata.PEMBytes)
+ testPrivateKeys = make(map[string]interface{}, n)
+ testSigners = make(map[string]ssh.Signer, n)
+ testPublicKeys = make(map[string]ssh.PublicKey, n)
+ for t, k := range testdata.PEMBytes {
+ testPrivateKeys[t], err = ssh.ParseRawPrivateKey(k)
+ if err != nil {
+ panic(fmt.Sprintf("Unable to parse test key %s: %v", t, err))
+ }
+ testSigners[t], err = ssh.NewSignerFromKey(testPrivateKeys[t])
+ if err != nil {
+ panic(fmt.Sprintf("Unable to create signer for test key %s: %v", t, err))
+ }
+ testPublicKeys[t] = testSigners[t].PublicKey()
+ }
+
+ // Create a cert and sign it for use in tests.
+ testCert := &ssh.Certificate{
+ Nonce: []byte{}, // To pass reflect.DeepEqual after marshal & parse, this must be non-nil
+ ValidPrincipals: []string{"gopher1", "gopher2"}, // increases test coverage
+ ValidAfter: 0, // unix epoch
+ ValidBefore: ssh.CertTimeInfinity, // The end of currently representable time.
+ Reserved: []byte{}, // To pass reflect.DeepEqual after marshal & parse, this must be non-nil
+ Key: testPublicKeys["ecdsa"],
+ SignatureKey: testPublicKeys["rsa"],
+ Permissions: ssh.Permissions{
+ CriticalOptions: map[string]string{},
+ Extensions: map[string]string{},
+ },
+ }
+ testCert.SignCert(rand.Reader, testSigners["rsa"])
+ testPrivateKeys["cert"] = testPrivateKeys["ecdsa"]
+ testSigners["cert"], err = ssh.NewCertSigner(testCert, testSigners["ecdsa"])
+ if err != nil {
+ panic(fmt.Sprintf("Unable to create certificate signer: %v", err))
+ }
+}
diff --git a/src/mongo/gotools/vendor/src/golang.org/x/crypto/ssh/benchmark_test.go b/src/mongo/gotools/vendor/src/golang.org/x/crypto/ssh/benchmark_test.go
new file mode 100644
index 00000000000..d9f7eb9b60a
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/golang.org/x/crypto/ssh/benchmark_test.go
@@ -0,0 +1,122 @@
+// Copyright 2013 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package ssh
+
+import (
+ "errors"
+ "io"
+ "net"
+ "testing"
+)
+
+type server struct {
+ *ServerConn
+ chans <-chan NewChannel
+}
+
+func newServer(c net.Conn, conf *ServerConfig) (*server, error) {
+ sconn, chans, reqs, err := NewServerConn(c, conf)
+ if err != nil {
+ return nil, err
+ }
+ go DiscardRequests(reqs)
+ return &server{sconn, chans}, nil
+}
+
+func (s *server) Accept() (NewChannel, error) {
+ n, ok := <-s.chans
+ if !ok {
+ return nil, io.EOF
+ }
+ return n, nil
+}
+
+func sshPipe() (Conn, *server, error) {
+ c1, c2, err := netPipe()
+ if err != nil {
+ return nil, nil, err
+ }
+
+ clientConf := ClientConfig{
+ User: "user",
+ }
+ serverConf := ServerConfig{
+ NoClientAuth: true,
+ }
+ serverConf.AddHostKey(testSigners["ecdsa"])
+ done := make(chan *server, 1)
+ go func() {
+ server, err := newServer(c2, &serverConf)
+ if err != nil {
+ done <- nil
+ }
+ done <- server
+ }()
+
+ client, _, reqs, err := NewClientConn(c1, "", &clientConf)
+ if err != nil {
+ return nil, nil, err
+ }
+
+ server := <-done
+ if server == nil {
+ return nil, nil, errors.New("server handshake failed.")
+ }
+ go DiscardRequests(reqs)
+
+ return client, server, nil
+}
+
+func BenchmarkEndToEnd(b *testing.B) {
+ b.StopTimer()
+
+ client, server, err := sshPipe()
+ if err != nil {
+ b.Fatalf("sshPipe: %v", err)
+ }
+
+ defer client.Close()
+ defer server.Close()
+
+ size := (1 << 20)
+ input := make([]byte, size)
+ output := make([]byte, size)
+ b.SetBytes(int64(size))
+ done := make(chan int, 1)
+
+ go func() {
+ newCh, err := server.Accept()
+ if err != nil {
+ b.Fatalf("Client: %v", err)
+ }
+ ch, incoming, err := newCh.Accept()
+ go DiscardRequests(incoming)
+ for i := 0; i < b.N; i++ {
+ if _, err := io.ReadFull(ch, output); err != nil {
+ b.Fatalf("ReadFull: %v", err)
+ }
+ }
+ ch.Close()
+ done <- 1
+ }()
+
+ ch, in, err := client.OpenChannel("speed", nil)
+ if err != nil {
+ b.Fatalf("OpenChannel: %v", err)
+ }
+ go DiscardRequests(in)
+
+ b.ResetTimer()
+ b.StartTimer()
+ for i := 0; i < b.N; i++ {
+ if _, err := ch.Write(input); err != nil {
+ b.Fatalf("WriteFull: %v", err)
+ }
+ }
+ ch.Close()
+ b.StopTimer()
+
+ <-done
+}
diff --git a/src/mongo/gotools/vendor/src/golang.org/x/crypto/ssh/buffer.go b/src/mongo/gotools/vendor/src/golang.org/x/crypto/ssh/buffer.go
new file mode 100644
index 00000000000..6931b5114fe
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/golang.org/x/crypto/ssh/buffer.go
@@ -0,0 +1,98 @@
+// Copyright 2012 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package ssh
+
+import (
+ "io"
+ "sync"
+)
+
+// buffer provides a linked list buffer for data exchange
+// between producer and consumer. Theoretically the buffer is
+// of unlimited capacity as it does no allocation of its own.
+type buffer struct {
+ // protects concurrent access to head, tail and closed
+ *sync.Cond
+
+ head *element // the buffer that will be read first
+ tail *element // the buffer that will be read last
+
+ closed bool
+}
+
+// An element represents a single link in a linked list.
+type element struct {
+ buf []byte
+ next *element
+}
+
+// newBuffer returns an empty buffer that is not closed.
+func newBuffer() *buffer {
+ e := new(element)
+ b := &buffer{
+ Cond: newCond(),
+ head: e,
+ tail: e,
+ }
+ return b
+}
+
+// write makes buf available for Read to receive.
+// buf must not be modified after the call to write.
+func (b *buffer) write(buf []byte) {
+ b.Cond.L.Lock()
+ e := &element{buf: buf}
+ b.tail.next = e
+ b.tail = e
+ b.Cond.Signal()
+ b.Cond.L.Unlock()
+}
+
+// eof closes the buffer. Reads from the buffer once all
+// the data has been consumed will receive os.EOF.
+func (b *buffer) eof() error {
+ b.Cond.L.Lock()
+ b.closed = true
+ b.Cond.Signal()
+ b.Cond.L.Unlock()
+ return nil
+}
+
+// Read reads data from the internal buffer in buf. Reads will block
+// if no data is available, or until the buffer is closed.
+func (b *buffer) Read(buf []byte) (n int, err error) {
+ b.Cond.L.Lock()
+ defer b.Cond.L.Unlock()
+
+ for len(buf) > 0 {
+ // if there is data in b.head, copy it
+ if len(b.head.buf) > 0 {
+ r := copy(buf, b.head.buf)
+ buf, b.head.buf = buf[r:], b.head.buf[r:]
+ n += r
+ continue
+ }
+ // if there is a next buffer, make it the head
+ if len(b.head.buf) == 0 && b.head != b.tail {
+ b.head = b.head.next
+ continue
+ }
+
+ // if at least one byte has been copied, return
+ if n > 0 {
+ break
+ }
+
+ // if nothing was read, and there is nothing outstanding
+ // check to see if the buffer is closed.
+ if b.closed {
+ err = io.EOF
+ break
+ }
+ // out of buffers, wait for producer
+ b.Cond.Wait()
+ }
+ return
+}
diff --git a/src/mongo/gotools/vendor/src/golang.org/x/crypto/ssh/buffer_test.go b/src/mongo/gotools/vendor/src/golang.org/x/crypto/ssh/buffer_test.go
new file mode 100644
index 00000000000..d5781cb3da9
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/golang.org/x/crypto/ssh/buffer_test.go
@@ -0,0 +1,87 @@
+// Copyright 2011 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package ssh
+
+import (
+ "io"
+ "testing"
+)
+
+var alphabet = []byte("abcdefghijklmnopqrstuvwxyz")
+
+func TestBufferReadwrite(t *testing.T) {
+ b := newBuffer()
+ b.write(alphabet[:10])
+ r, _ := b.Read(make([]byte, 10))
+ if r != 10 {
+ t.Fatalf("Expected written == read == 10, written: 10, read %d", r)
+ }
+
+ b = newBuffer()
+ b.write(alphabet[:5])
+ r, _ = b.Read(make([]byte, 10))
+ if r != 5 {
+ t.Fatalf("Expected written == read == 5, written: 5, read %d", r)
+ }
+
+ b = newBuffer()
+ b.write(alphabet[:10])
+ r, _ = b.Read(make([]byte, 5))
+ if r != 5 {
+ t.Fatalf("Expected written == 10, read == 5, written: 10, read %d", r)
+ }
+
+ b = newBuffer()
+ b.write(alphabet[:5])
+ b.write(alphabet[5:15])
+ r, _ = b.Read(make([]byte, 10))
+ r2, _ := b.Read(make([]byte, 10))
+ if r != 10 || r2 != 5 || 15 != r+r2 {
+ t.Fatal("Expected written == read == 15")
+ }
+}
+
+func TestBufferClose(t *testing.T) {
+ b := newBuffer()
+ b.write(alphabet[:10])
+ b.eof()
+ _, err := b.Read(make([]byte, 5))
+ if err != nil {
+ t.Fatal("expected read of 5 to not return EOF")
+ }
+ b = newBuffer()
+ b.write(alphabet[:10])
+ b.eof()
+ r, err := b.Read(make([]byte, 5))
+ r2, err2 := b.Read(make([]byte, 10))
+ if r != 5 || r2 != 5 || err != nil || err2 != nil {
+ t.Fatal("expected reads of 5 and 5")
+ }
+
+ b = newBuffer()
+ b.write(alphabet[:10])
+ b.eof()
+ r, err = b.Read(make([]byte, 5))
+ r2, err2 = b.Read(make([]byte, 10))
+ r3, err3 := b.Read(make([]byte, 10))
+ if r != 5 || r2 != 5 || r3 != 0 || err != nil || err2 != nil || err3 != io.EOF {
+ t.Fatal("expected reads of 5 and 5 and 0, with EOF")
+ }
+
+ b = newBuffer()
+ b.write(make([]byte, 5))
+ b.write(make([]byte, 10))
+ b.eof()
+ r, err = b.Read(make([]byte, 9))
+ r2, err2 = b.Read(make([]byte, 3))
+ r3, err3 = b.Read(make([]byte, 3))
+ r4, err4 := b.Read(make([]byte, 10))
+ if err != nil || err2 != nil || err3 != nil || err4 != io.EOF {
+ t.Fatalf("Expected EOF on forth read only, err=%v, err2=%v, err3=%v, err4=%v", err, err2, err3, err4)
+ }
+ if r != 9 || r2 != 3 || r3 != 3 || r4 != 0 {
+ t.Fatal("Expected written == read == 15", r, r2, r3, r4)
+ }
+}
diff --git a/src/mongo/gotools/vendor/src/golang.org/x/crypto/ssh/certs.go b/src/mongo/gotools/vendor/src/golang.org/x/crypto/ssh/certs.go
new file mode 100644
index 00000000000..385770036a7
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/golang.org/x/crypto/ssh/certs.go
@@ -0,0 +1,501 @@
+// Copyright 2012 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package ssh
+
+import (
+ "bytes"
+ "errors"
+ "fmt"
+ "io"
+ "net"
+ "sort"
+ "time"
+)
+
+// These constants from [PROTOCOL.certkeys] represent the algorithm names
+// for certificate types supported by this package.
+const (
+ CertAlgoRSAv01 = "ssh-rsa-cert-v01@openssh.com"
+ CertAlgoDSAv01 = "ssh-dss-cert-v01@openssh.com"
+ CertAlgoECDSA256v01 = "ecdsa-sha2-nistp256-cert-v01@openssh.com"
+ CertAlgoECDSA384v01 = "ecdsa-sha2-nistp384-cert-v01@openssh.com"
+ CertAlgoECDSA521v01 = "ecdsa-sha2-nistp521-cert-v01@openssh.com"
+)
+
+// Certificate types distinguish between host and user
+// certificates. The values can be set in the CertType field of
+// Certificate.
+const (
+ UserCert = 1
+ HostCert = 2
+)
+
+// Signature represents a cryptographic signature.
+type Signature struct {
+ Format string
+ Blob []byte
+}
+
+// CertTimeInfinity can be used for OpenSSHCertV01.ValidBefore to indicate that
+// a certificate does not expire.
+const CertTimeInfinity = 1<<64 - 1
+
+// An Certificate represents an OpenSSH certificate as defined in
+// [PROTOCOL.certkeys]?rev=1.8.
+type Certificate struct {
+ Nonce []byte
+ Key PublicKey
+ Serial uint64
+ CertType uint32
+ KeyId string
+ ValidPrincipals []string
+ ValidAfter uint64
+ ValidBefore uint64
+ Permissions
+ Reserved []byte
+ SignatureKey PublicKey
+ Signature *Signature
+}
+
+// genericCertData holds the key-independent part of the certificate data.
+// Overall, certificates contain an nonce, public key fields and
+// key-independent fields.
+type genericCertData struct {
+ Serial uint64
+ CertType uint32
+ KeyId string
+ ValidPrincipals []byte
+ ValidAfter uint64
+ ValidBefore uint64
+ CriticalOptions []byte
+ Extensions []byte
+ Reserved []byte
+ SignatureKey []byte
+ Signature []byte
+}
+
+func marshalStringList(namelist []string) []byte {
+ var to []byte
+ for _, name := range namelist {
+ s := struct{ N string }{name}
+ to = append(to, Marshal(&s)...)
+ }
+ return to
+}
+
+type optionsTuple struct {
+ Key string
+ Value []byte
+}
+
+type optionsTupleValue struct {
+ Value string
+}
+
+// serialize a map of critical options or extensions
+// issue #10569 - per [PROTOCOL.certkeys] and SSH implementation,
+// we need two length prefixes for a non-empty string value
+func marshalTuples(tups map[string]string) []byte {
+ keys := make([]string, 0, len(tups))
+ for key := range tups {
+ keys = append(keys, key)
+ }
+ sort.Strings(keys)
+
+ var ret []byte
+ for _, key := range keys {
+ s := optionsTuple{Key: key}
+ if value := tups[key]; len(value) > 0 {
+ s.Value = Marshal(&optionsTupleValue{value})
+ }
+ ret = append(ret, Marshal(&s)...)
+ }
+ return ret
+}
+
+// issue #10569 - per [PROTOCOL.certkeys] and SSH implementation,
+// we need two length prefixes for a non-empty option value
+func parseTuples(in []byte) (map[string]string, error) {
+ tups := map[string]string{}
+ var lastKey string
+ var haveLastKey bool
+
+ for len(in) > 0 {
+ var key, val, extra []byte
+ var ok bool
+
+ if key, in, ok = parseString(in); !ok {
+ return nil, errShortRead
+ }
+ keyStr := string(key)
+ // according to [PROTOCOL.certkeys], the names must be in
+ // lexical order.
+ if haveLastKey && keyStr <= lastKey {
+ return nil, fmt.Errorf("ssh: certificate options are not in lexical order")
+ }
+ lastKey, haveLastKey = keyStr, true
+ // the next field is a data field, which if non-empty has a string embedded
+ if val, in, ok = parseString(in); !ok {
+ return nil, errShortRead
+ }
+ if len(val) > 0 {
+ val, extra, ok = parseString(val)
+ if !ok {
+ return nil, errShortRead
+ }
+ if len(extra) > 0 {
+ return nil, fmt.Errorf("ssh: unexpected trailing data after certificate option value")
+ }
+ tups[keyStr] = string(val)
+ } else {
+ tups[keyStr] = ""
+ }
+ }
+ return tups, nil
+}
+
+func parseCert(in []byte, privAlgo string) (*Certificate, error) {
+ nonce, rest, ok := parseString(in)
+ if !ok {
+ return nil, errShortRead
+ }
+
+ key, rest, err := parsePubKey(rest, privAlgo)
+ if err != nil {
+ return nil, err
+ }
+
+ var g genericCertData
+ if err := Unmarshal(rest, &g); err != nil {
+ return nil, err
+ }
+
+ c := &Certificate{
+ Nonce: nonce,
+ Key: key,
+ Serial: g.Serial,
+ CertType: g.CertType,
+ KeyId: g.KeyId,
+ ValidAfter: g.ValidAfter,
+ ValidBefore: g.ValidBefore,
+ }
+
+ for principals := g.ValidPrincipals; len(principals) > 0; {
+ principal, rest, ok := parseString(principals)
+ if !ok {
+ return nil, errShortRead
+ }
+ c.ValidPrincipals = append(c.ValidPrincipals, string(principal))
+ principals = rest
+ }
+
+ c.CriticalOptions, err = parseTuples(g.CriticalOptions)
+ if err != nil {
+ return nil, err
+ }
+ c.Extensions, err = parseTuples(g.Extensions)
+ if err != nil {
+ return nil, err
+ }
+ c.Reserved = g.Reserved
+ k, err := ParsePublicKey(g.SignatureKey)
+ if err != nil {
+ return nil, err
+ }
+
+ c.SignatureKey = k
+ c.Signature, rest, ok = parseSignatureBody(g.Signature)
+ if !ok || len(rest) > 0 {
+ return nil, errors.New("ssh: signature parse error")
+ }
+
+ return c, nil
+}
+
+type openSSHCertSigner struct {
+ pub *Certificate
+ signer Signer
+}
+
+// NewCertSigner returns a Signer that signs with the given Certificate, whose
+// private key is held by signer. It returns an error if the public key in cert
+// doesn't match the key used by signer.
+func NewCertSigner(cert *Certificate, signer Signer) (Signer, error) {
+ if bytes.Compare(cert.Key.Marshal(), signer.PublicKey().Marshal()) != 0 {
+ return nil, errors.New("ssh: signer and cert have different public key")
+ }
+
+ return &openSSHCertSigner{cert, signer}, nil
+}
+
+func (s *openSSHCertSigner) Sign(rand io.Reader, data []byte) (*Signature, error) {
+ return s.signer.Sign(rand, data)
+}
+
+func (s *openSSHCertSigner) PublicKey() PublicKey {
+ return s.pub
+}
+
+const sourceAddressCriticalOption = "source-address"
+
+// CertChecker does the work of verifying a certificate. Its methods
+// can be plugged into ClientConfig.HostKeyCallback and
+// ServerConfig.PublicKeyCallback. For the CertChecker to work,
+// minimally, the IsAuthority callback should be set.
+type CertChecker struct {
+ // SupportedCriticalOptions lists the CriticalOptions that the
+ // server application layer understands. These are only used
+ // for user certificates.
+ SupportedCriticalOptions []string
+
+ // IsAuthority should return true if the key is recognized as
+ // an authority. This allows for certificates to be signed by other
+ // certificates.
+ IsAuthority func(auth PublicKey) bool
+
+ // Clock is used for verifying time stamps. If nil, time.Now
+ // is used.
+ Clock func() time.Time
+
+ // UserKeyFallback is called when CertChecker.Authenticate encounters a
+ // public key that is not a certificate. It must implement validation
+ // of user keys or else, if nil, all such keys are rejected.
+ UserKeyFallback func(conn ConnMetadata, key PublicKey) (*Permissions, error)
+
+ // HostKeyFallback is called when CertChecker.CheckHostKey encounters a
+ // public key that is not a certificate. It must implement host key
+ // validation or else, if nil, all such keys are rejected.
+ HostKeyFallback func(addr string, remote net.Addr, key PublicKey) error
+
+ // IsRevoked is called for each certificate so that revocation checking
+ // can be implemented. It should return true if the given certificate
+ // is revoked and false otherwise. If nil, no certificates are
+ // considered to have been revoked.
+ IsRevoked func(cert *Certificate) bool
+}
+
+// CheckHostKey checks a host key certificate. This method can be
+// plugged into ClientConfig.HostKeyCallback.
+func (c *CertChecker) CheckHostKey(addr string, remote net.Addr, key PublicKey) error {
+ cert, ok := key.(*Certificate)
+ if !ok {
+ if c.HostKeyFallback != nil {
+ return c.HostKeyFallback(addr, remote, key)
+ }
+ return errors.New("ssh: non-certificate host key")
+ }
+ if cert.CertType != HostCert {
+ return fmt.Errorf("ssh: certificate presented as a host key has type %d", cert.CertType)
+ }
+
+ return c.CheckCert(addr, cert)
+}
+
+// Authenticate checks a user certificate. Authenticate can be used as
+// a value for ServerConfig.PublicKeyCallback.
+func (c *CertChecker) Authenticate(conn ConnMetadata, pubKey PublicKey) (*Permissions, error) {
+ cert, ok := pubKey.(*Certificate)
+ if !ok {
+ if c.UserKeyFallback != nil {
+ return c.UserKeyFallback(conn, pubKey)
+ }
+ return nil, errors.New("ssh: normal key pairs not accepted")
+ }
+
+ if cert.CertType != UserCert {
+ return nil, fmt.Errorf("ssh: cert has type %d", cert.CertType)
+ }
+
+ if err := c.CheckCert(conn.User(), cert); err != nil {
+ return nil, err
+ }
+
+ return &cert.Permissions, nil
+}
+
+// CheckCert checks CriticalOptions, ValidPrincipals, revocation, timestamp and
+// the signature of the certificate.
+func (c *CertChecker) CheckCert(principal string, cert *Certificate) error {
+ if c.IsRevoked != nil && c.IsRevoked(cert) {
+ return fmt.Errorf("ssh: certicate serial %d revoked", cert.Serial)
+ }
+
+ for opt, _ := range cert.CriticalOptions {
+ // sourceAddressCriticalOption will be enforced by
+ // serverAuthenticate
+ if opt == sourceAddressCriticalOption {
+ continue
+ }
+
+ found := false
+ for _, supp := range c.SupportedCriticalOptions {
+ if supp == opt {
+ found = true
+ break
+ }
+ }
+ if !found {
+ return fmt.Errorf("ssh: unsupported critical option %q in certificate", opt)
+ }
+ }
+
+ if len(cert.ValidPrincipals) > 0 {
+ // By default, certs are valid for all users/hosts.
+ found := false
+ for _, p := range cert.ValidPrincipals {
+ if p == principal {
+ found = true
+ break
+ }
+ }
+ if !found {
+ return fmt.Errorf("ssh: principal %q not in the set of valid principals for given certificate: %q", principal, cert.ValidPrincipals)
+ }
+ }
+
+ if !c.IsAuthority(cert.SignatureKey) {
+ return fmt.Errorf("ssh: certificate signed by unrecognized authority")
+ }
+
+ clock := c.Clock
+ if clock == nil {
+ clock = time.Now
+ }
+
+ unixNow := clock().Unix()
+ if after := int64(cert.ValidAfter); after < 0 || unixNow < int64(cert.ValidAfter) {
+ return fmt.Errorf("ssh: cert is not yet valid")
+ }
+ if before := int64(cert.ValidBefore); cert.ValidBefore != uint64(CertTimeInfinity) && (unixNow >= before || before < 0) {
+ return fmt.Errorf("ssh: cert has expired")
+ }
+ if err := cert.SignatureKey.Verify(cert.bytesForSigning(), cert.Signature); err != nil {
+ return fmt.Errorf("ssh: certificate signature does not verify")
+ }
+
+ return nil
+}
+
+// SignCert sets c.SignatureKey to the authority's public key and stores a
+// Signature, by authority, in the certificate.
+func (c *Certificate) SignCert(rand io.Reader, authority Signer) error {
+ c.Nonce = make([]byte, 32)
+ if _, err := io.ReadFull(rand, c.Nonce); err != nil {
+ return err
+ }
+ c.SignatureKey = authority.PublicKey()
+
+ sig, err := authority.Sign(rand, c.bytesForSigning())
+ if err != nil {
+ return err
+ }
+ c.Signature = sig
+ return nil
+}
+
+var certAlgoNames = map[string]string{
+ KeyAlgoRSA: CertAlgoRSAv01,
+ KeyAlgoDSA: CertAlgoDSAv01,
+ KeyAlgoECDSA256: CertAlgoECDSA256v01,
+ KeyAlgoECDSA384: CertAlgoECDSA384v01,
+ KeyAlgoECDSA521: CertAlgoECDSA521v01,
+}
+
+// certToPrivAlgo returns the underlying algorithm for a certificate algorithm.
+// Panics if a non-certificate algorithm is passed.
+func certToPrivAlgo(algo string) string {
+ for privAlgo, pubAlgo := range certAlgoNames {
+ if pubAlgo == algo {
+ return privAlgo
+ }
+ }
+ panic("unknown cert algorithm")
+}
+
+func (cert *Certificate) bytesForSigning() []byte {
+ c2 := *cert
+ c2.Signature = nil
+ out := c2.Marshal()
+ // Drop trailing signature length.
+ return out[:len(out)-4]
+}
+
+// Marshal serializes c into OpenSSH's wire format. It is part of the
+// PublicKey interface.
+func (c *Certificate) Marshal() []byte {
+ generic := genericCertData{
+ Serial: c.Serial,
+ CertType: c.CertType,
+ KeyId: c.KeyId,
+ ValidPrincipals: marshalStringList(c.ValidPrincipals),
+ ValidAfter: uint64(c.ValidAfter),
+ ValidBefore: uint64(c.ValidBefore),
+ CriticalOptions: marshalTuples(c.CriticalOptions),
+ Extensions: marshalTuples(c.Extensions),
+ Reserved: c.Reserved,
+ SignatureKey: c.SignatureKey.Marshal(),
+ }
+ if c.Signature != nil {
+ generic.Signature = Marshal(c.Signature)
+ }
+ genericBytes := Marshal(&generic)
+ keyBytes := c.Key.Marshal()
+ _, keyBytes, _ = parseString(keyBytes)
+ prefix := Marshal(&struct {
+ Name string
+ Nonce []byte
+ Key []byte `ssh:"rest"`
+ }{c.Type(), c.Nonce, keyBytes})
+
+ result := make([]byte, 0, len(prefix)+len(genericBytes))
+ result = append(result, prefix...)
+ result = append(result, genericBytes...)
+ return result
+}
+
+// Type returns the key name. It is part of the PublicKey interface.
+func (c *Certificate) Type() string {
+ algo, ok := certAlgoNames[c.Key.Type()]
+ if !ok {
+ panic("unknown cert key type")
+ }
+ return algo
+}
+
+// Verify verifies a signature against the certificate's public
+// key. It is part of the PublicKey interface.
+func (c *Certificate) Verify(data []byte, sig *Signature) error {
+ return c.Key.Verify(data, sig)
+}
+
+func parseSignatureBody(in []byte) (out *Signature, rest []byte, ok bool) {
+ format, in, ok := parseString(in)
+ if !ok {
+ return
+ }
+
+ out = &Signature{
+ Format: string(format),
+ }
+
+ if out.Blob, in, ok = parseString(in); !ok {
+ return
+ }
+
+ return out, in, ok
+}
+
+func parseSignature(in []byte) (out *Signature, rest []byte, ok bool) {
+ sigBytes, rest, ok := parseString(in)
+ if !ok {
+ return
+ }
+
+ out, trailing, ok := parseSignatureBody(sigBytes)
+ if !ok || len(trailing) > 0 {
+ return nil, nil, false
+ }
+ return
+}
diff --git a/src/mongo/gotools/vendor/src/golang.org/x/crypto/ssh/certs_test.go b/src/mongo/gotools/vendor/src/golang.org/x/crypto/ssh/certs_test.go
new file mode 100644
index 00000000000..c5f2e533043
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/golang.org/x/crypto/ssh/certs_test.go
@@ -0,0 +1,216 @@
+// Copyright 2013 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package ssh
+
+import (
+ "bytes"
+ "crypto/rand"
+ "reflect"
+ "testing"
+ "time"
+)
+
+// Cert generated by ssh-keygen 6.0p1 Debian-4.
+// % ssh-keygen -s ca-key -I test user-key
+const exampleSSHCert = `ssh-rsa-cert-v01@openssh.com AAAAHHNzaC1yc2EtY2VydC12MDFAb3BlbnNzaC5jb20AAAAgb1srW/W3ZDjYAO45xLYAwzHBDLsJ4Ux6ICFIkTjb1LEAAAADAQABAAAAYQCkoR51poH0wE8w72cqSB8Sszx+vAhzcMdCO0wqHTj7UNENHWEXGrU0E0UQekD7U+yhkhtoyjbPOVIP7hNa6aRk/ezdh/iUnCIt4Jt1v3Z1h1P+hA4QuYFMHNB+rmjPwAcAAAAAAAAAAAAAAAEAAAAEdGVzdAAAAAAAAAAAAAAAAP//////////AAAAAAAAAIIAAAAVcGVybWl0LVgxMS1mb3J3YXJkaW5nAAAAAAAAABdwZXJtaXQtYWdlbnQtZm9yd2FyZGluZwAAAAAAAAAWcGVybWl0LXBvcnQtZm9yd2FyZGluZwAAAAAAAAAKcGVybWl0LXB0eQAAAAAAAAAOcGVybWl0LXVzZXItcmMAAAAAAAAAAAAAAHcAAAAHc3NoLXJzYQAAAAMBAAEAAABhANFS2kaktpSGc+CcmEKPyw9mJC4nZKxHKTgLVZeaGbFZOvJTNzBspQHdy7Q1uKSfktxpgjZnksiu/tFF9ngyY2KFoc+U88ya95IZUycBGCUbBQ8+bhDtw/icdDGQD5WnUwAAAG8AAAAHc3NoLXJzYQAAAGC8Y9Z2LQKhIhxf52773XaWrXdxP0t3GBVo4A10vUWiYoAGepr6rQIoGGXFxT4B9Gp+nEBJjOwKDXPrAevow0T9ca8gZN+0ykbhSrXLE5Ao48rqr3zP4O1/9P7e6gp0gw8=`
+
+func TestParseCert(t *testing.T) {
+ authKeyBytes := []byte(exampleSSHCert)
+
+ key, _, _, rest, err := ParseAuthorizedKey(authKeyBytes)
+ if err != nil {
+ t.Fatalf("ParseAuthorizedKey: %v", err)
+ }
+ if len(rest) > 0 {
+ t.Errorf("rest: got %q, want empty", rest)
+ }
+
+ if _, ok := key.(*Certificate); !ok {
+ t.Fatalf("got %v (%T), want *Certificate", key, key)
+ }
+
+ marshaled := MarshalAuthorizedKey(key)
+ // Before comparison, remove the trailing newline that
+ // MarshalAuthorizedKey adds.
+ marshaled = marshaled[:len(marshaled)-1]
+ if !bytes.Equal(authKeyBytes, marshaled) {
+ t.Errorf("marshaled certificate does not match original: got %q, want %q", marshaled, authKeyBytes)
+ }
+}
+
+// Cert generated by ssh-keygen OpenSSH_6.8p1 OS X 10.10.3
+// % ssh-keygen -s ca -I testcert -O source-address=192.168.1.0/24 -O force-command=/bin/sleep user.pub
+// user.pub key: ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQDACh1rt2DXfV3hk6fszSQcQ/rueMId0kVD9U7nl8cfEnFxqOCrNT92g4laQIGl2mn8lsGZfTLg8ksHq3gkvgO3oo/0wHy4v32JeBOHTsN5AL4gfHNEhWeWb50ev47hnTsRIt9P4dxogeUo/hTu7j9+s9lLpEQXCvq6xocXQt0j8MV9qZBBXFLXVT3cWIkSqOdwt/5ZBg+1GSrc7WfCXVWgTk4a20uPMuJPxU4RQwZW6X3+O8Pqo8C3cW0OzZRFP6gUYUKUsTI5WntlS+LAxgw1mZNsozFGdbiOPRnEryE3SRldh9vjDR3tin1fGpA5P7+CEB/bqaXtG3V+F2OkqaMN
+// Critical Options:
+// force-command /bin/sleep
+// source-address 192.168.1.0/24
+// Extensions:
+// permit-X11-forwarding
+// permit-agent-forwarding
+// permit-port-forwarding
+// permit-pty
+// permit-user-rc
+const exampleSSHCertWithOptions = `ssh-rsa-cert-v01@openssh.com AAAAHHNzaC1yc2EtY2VydC12MDFAb3BlbnNzaC5jb20AAAAgDyysCJY0XrO1n03EeRRoITnTPdjENFmWDs9X58PP3VUAAAADAQABAAABAQDACh1rt2DXfV3hk6fszSQcQ/rueMId0kVD9U7nl8cfEnFxqOCrNT92g4laQIGl2mn8lsGZfTLg8ksHq3gkvgO3oo/0wHy4v32JeBOHTsN5AL4gfHNEhWeWb50ev47hnTsRIt9P4dxogeUo/hTu7j9+s9lLpEQXCvq6xocXQt0j8MV9qZBBXFLXVT3cWIkSqOdwt/5ZBg+1GSrc7WfCXVWgTk4a20uPMuJPxU4RQwZW6X3+O8Pqo8C3cW0OzZRFP6gUYUKUsTI5WntlS+LAxgw1mZNsozFGdbiOPRnEryE3SRldh9vjDR3tin1fGpA5P7+CEB/bqaXtG3V+F2OkqaMNAAAAAAAAAAAAAAABAAAACHRlc3RjZXJ0AAAAAAAAAAAAAAAA//////////8AAABLAAAADWZvcmNlLWNvbW1hbmQAAAAOAAAACi9iaW4vc2xlZXAAAAAOc291cmNlLWFkZHJlc3MAAAASAAAADjE5Mi4xNjguMS4wLzI0AAAAggAAABVwZXJtaXQtWDExLWZvcndhcmRpbmcAAAAAAAAAF3Blcm1pdC1hZ2VudC1mb3J3YXJkaW5nAAAAAAAAABZwZXJtaXQtcG9ydC1mb3J3YXJkaW5nAAAAAAAAAApwZXJtaXQtcHR5AAAAAAAAAA5wZXJtaXQtdXNlci1yYwAAAAAAAAAAAAABFwAAAAdzc2gtcnNhAAAAAwEAAQAAAQEAwU+c5ui5A8+J/CFpjW8wCa52bEODA808WWQDCSuTG/eMXNf59v9Y8Pk0F1E9dGCosSNyVcB/hacUrc6He+i97+HJCyKavBsE6GDxrjRyxYqAlfcOXi/IVmaUGiO8OQ39d4GHrjToInKvExSUeleQyH4Y4/e27T/pILAqPFL3fyrvMLT5qU9QyIt6zIpa7GBP5+urouNavMprV3zsfIqNBbWypinOQAw823a5wN+zwXnhZrgQiHZ/USG09Y6k98y1dTVz8YHlQVR4D3lpTAsKDKJ5hCH9WU4fdf+lU8OyNGaJ/vz0XNqxcToe1l4numLTnaoSuH89pHryjqurB7lJKwAAAQ8AAAAHc3NoLXJzYQAAAQCaHvUIoPL1zWUHIXLvu96/HU1s/i4CAW2IIEuGgxCUCiFj6vyTyYtgxQxcmbfZf6eaITlS6XJZa7Qq4iaFZh75C1DXTX8labXhRSD4E2t//AIP9MC1rtQC5xo6FmbQ+BoKcDskr+mNACcbRSxs3IL3bwCfWDnIw2WbVox9ZdcthJKk4UoCW4ix4QwdHw7zlddlz++fGEEVhmTbll1SUkycGApPFBsAYRTMupUJcYPIeReBI/m8XfkoMk99bV8ZJQTAd7OekHY2/48Ff53jLmyDjP7kNw1F8OaPtkFs6dGJXta4krmaekPy87j+35In5hFj7yoOqvSbmYUkeX70/GGQ`
+
+func TestParseCertWithOptions(t *testing.T) {
+ opts := map[string]string{
+ "source-address": "192.168.1.0/24",
+ "force-command": "/bin/sleep",
+ }
+ exts := map[string]string{
+ "permit-X11-forwarding": "",
+ "permit-agent-forwarding": "",
+ "permit-port-forwarding": "",
+ "permit-pty": "",
+ "permit-user-rc": "",
+ }
+ authKeyBytes := []byte(exampleSSHCertWithOptions)
+
+ key, _, _, rest, err := ParseAuthorizedKey(authKeyBytes)
+ if err != nil {
+ t.Fatalf("ParseAuthorizedKey: %v", err)
+ }
+ if len(rest) > 0 {
+ t.Errorf("rest: got %q, want empty", rest)
+ }
+ cert, ok := key.(*Certificate)
+ if !ok {
+ t.Fatalf("got %v (%T), want *Certificate", key, key)
+ }
+ if !reflect.DeepEqual(cert.CriticalOptions, opts) {
+ t.Errorf("unexpected critical options - got %v, want %v", cert.CriticalOptions, opts)
+ }
+ if !reflect.DeepEqual(cert.Extensions, exts) {
+ t.Errorf("unexpected Extensions - got %v, want %v", cert.Extensions, exts)
+ }
+ marshaled := MarshalAuthorizedKey(key)
+ // Before comparison, remove the trailing newline that
+ // MarshalAuthorizedKey adds.
+ marshaled = marshaled[:len(marshaled)-1]
+ if !bytes.Equal(authKeyBytes, marshaled) {
+ t.Errorf("marshaled certificate does not match original: got %q, want %q", marshaled, authKeyBytes)
+ }
+}
+
+func TestValidateCert(t *testing.T) {
+ key, _, _, _, err := ParseAuthorizedKey([]byte(exampleSSHCert))
+ if err != nil {
+ t.Fatalf("ParseAuthorizedKey: %v", err)
+ }
+ validCert, ok := key.(*Certificate)
+ if !ok {
+ t.Fatalf("got %v (%T), want *Certificate", key, key)
+ }
+ checker := CertChecker{}
+ checker.IsAuthority = func(k PublicKey) bool {
+ return bytes.Equal(k.Marshal(), validCert.SignatureKey.Marshal())
+ }
+
+ if err := checker.CheckCert("user", validCert); err != nil {
+ t.Errorf("Unable to validate certificate: %v", err)
+ }
+ invalidCert := &Certificate{
+ Key: testPublicKeys["rsa"],
+ SignatureKey: testPublicKeys["ecdsa"],
+ ValidBefore: CertTimeInfinity,
+ Signature: &Signature{},
+ }
+ if err := checker.CheckCert("user", invalidCert); err == nil {
+ t.Error("Invalid cert signature passed validation")
+ }
+}
+
+func TestValidateCertTime(t *testing.T) {
+ cert := Certificate{
+ ValidPrincipals: []string{"user"},
+ Key: testPublicKeys["rsa"],
+ ValidAfter: 50,
+ ValidBefore: 100,
+ }
+
+ cert.SignCert(rand.Reader, testSigners["ecdsa"])
+
+ for ts, ok := range map[int64]bool{
+ 25: false,
+ 50: true,
+ 99: true,
+ 100: false,
+ 125: false,
+ } {
+ checker := CertChecker{
+ Clock: func() time.Time { return time.Unix(ts, 0) },
+ }
+ checker.IsAuthority = func(k PublicKey) bool {
+ return bytes.Equal(k.Marshal(),
+ testPublicKeys["ecdsa"].Marshal())
+ }
+
+ if v := checker.CheckCert("user", &cert); (v == nil) != ok {
+ t.Errorf("Authenticate(%d): %v", ts, v)
+ }
+ }
+}
+
+// TODO(hanwen): tests for
+//
+// host keys:
+// * fallbacks
+
+func TestHostKeyCert(t *testing.T) {
+ cert := &Certificate{
+ ValidPrincipals: []string{"hostname", "hostname.domain"},
+ Key: testPublicKeys["rsa"],
+ ValidBefore: CertTimeInfinity,
+ CertType: HostCert,
+ }
+ cert.SignCert(rand.Reader, testSigners["ecdsa"])
+
+ checker := &CertChecker{
+ IsAuthority: func(p PublicKey) bool {
+ return bytes.Equal(testPublicKeys["ecdsa"].Marshal(), p.Marshal())
+ },
+ }
+
+ certSigner, err := NewCertSigner(cert, testSigners["rsa"])
+ if err != nil {
+ t.Errorf("NewCertSigner: %v", err)
+ }
+
+ for _, name := range []string{"hostname", "otherhost"} {
+ c1, c2, err := netPipe()
+ if err != nil {
+ t.Fatalf("netPipe: %v", err)
+ }
+ defer c1.Close()
+ defer c2.Close()
+
+ errc := make(chan error)
+
+ go func() {
+ conf := ServerConfig{
+ NoClientAuth: true,
+ }
+ conf.AddHostKey(certSigner)
+ _, _, _, err := NewServerConn(c1, &conf)
+ errc <- err
+ }()
+
+ config := &ClientConfig{
+ User: "user",
+ HostKeyCallback: checker.CheckHostKey,
+ }
+ _, _, _, err = NewClientConn(c2, name, config)
+
+ succeed := name == "hostname"
+ if (err == nil) != succeed {
+ t.Fatalf("NewClientConn(%q): %v", name, err)
+ }
+
+ err = <-errc
+ if (err == nil) != succeed {
+ t.Fatalf("NewServerConn(%q): %v", name, err)
+ }
+ }
+}
diff --git a/src/mongo/gotools/vendor/src/golang.org/x/crypto/ssh/channel.go b/src/mongo/gotools/vendor/src/golang.org/x/crypto/ssh/channel.go
new file mode 100644
index 00000000000..5403c7e45fb
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/golang.org/x/crypto/ssh/channel.go
@@ -0,0 +1,631 @@
+// Copyright 2011 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package ssh
+
+import (
+ "encoding/binary"
+ "errors"
+ "fmt"
+ "io"
+ "log"
+ "sync"
+)
+
+const (
+ minPacketLength = 9
+ // channelMaxPacket contains the maximum number of bytes that will be
+ // sent in a single packet. As per RFC 4253, section 6.1, 32k is also
+ // the minimum.
+ channelMaxPacket = 1 << 15
+ // We follow OpenSSH here.
+ channelWindowSize = 64 * channelMaxPacket
+)
+
+// NewChannel represents an incoming request to a channel. It must either be
+// accepted for use by calling Accept, or rejected by calling Reject.
+type NewChannel interface {
+ // Accept accepts the channel creation request. It returns the Channel
+ // and a Go channel containing SSH requests. The Go channel must be
+ // serviced otherwise the Channel will hang.
+ Accept() (Channel, <-chan *Request, error)
+
+ // Reject rejects the channel creation request. After calling
+ // this, no other methods on the Channel may be called.
+ Reject(reason RejectionReason, message string) error
+
+ // ChannelType returns the type of the channel, as supplied by the
+ // client.
+ ChannelType() string
+
+ // ExtraData returns the arbitrary payload for this channel, as supplied
+ // by the client. This data is specific to the channel type.
+ ExtraData() []byte
+}
+
+// A Channel is an ordered, reliable, flow-controlled, duplex stream
+// that is multiplexed over an SSH connection.
+type Channel interface {
+ // Read reads up to len(data) bytes from the channel.
+ Read(data []byte) (int, error)
+
+ // Write writes len(data) bytes to the channel.
+ Write(data []byte) (int, error)
+
+ // Close signals end of channel use. No data may be sent after this
+ // call.
+ Close() error
+
+ // CloseWrite signals the end of sending in-band
+ // data. Requests may still be sent, and the other side may
+ // still send data
+ CloseWrite() error
+
+ // SendRequest sends a channel request. If wantReply is true,
+ // it will wait for a reply and return the result as a
+ // boolean, otherwise the return value will be false. Channel
+ // requests are out-of-band messages so they may be sent even
+ // if the data stream is closed or blocked by flow control.
+ SendRequest(name string, wantReply bool, payload []byte) (bool, error)
+
+ // Stderr returns an io.ReadWriter that writes to this channel
+ // with the extended data type set to stderr. Stderr may
+ // safely be read and written from a different goroutine than
+ // Read and Write respectively.
+ Stderr() io.ReadWriter
+}
+
+// Request is a request sent outside of the normal stream of
+// data. Requests can either be specific to an SSH channel, or they
+// can be global.
+type Request struct {
+ Type string
+ WantReply bool
+ Payload []byte
+
+ ch *channel
+ mux *mux
+}
+
+// Reply sends a response to a request. It must be called for all requests
+// where WantReply is true and is a no-op otherwise. The payload argument is
+// ignored for replies to channel-specific requests.
+func (r *Request) Reply(ok bool, payload []byte) error {
+ if !r.WantReply {
+ return nil
+ }
+
+ if r.ch == nil {
+ return r.mux.ackRequest(ok, payload)
+ }
+
+ return r.ch.ackRequest(ok)
+}
+
+// RejectionReason is an enumeration used when rejecting channel creation
+// requests. See RFC 4254, section 5.1.
+type RejectionReason uint32
+
+const (
+ Prohibited RejectionReason = iota + 1
+ ConnectionFailed
+ UnknownChannelType
+ ResourceShortage
+)
+
+// String converts the rejection reason to human readable form.
+func (r RejectionReason) String() string {
+ switch r {
+ case Prohibited:
+ return "administratively prohibited"
+ case ConnectionFailed:
+ return "connect failed"
+ case UnknownChannelType:
+ return "unknown channel type"
+ case ResourceShortage:
+ return "resource shortage"
+ }
+ return fmt.Sprintf("unknown reason %d", int(r))
+}
+
+func min(a uint32, b int) uint32 {
+ if a < uint32(b) {
+ return a
+ }
+ return uint32(b)
+}
+
+type channelDirection uint8
+
+const (
+ channelInbound channelDirection = iota
+ channelOutbound
+)
+
+// channel is an implementation of the Channel interface that works
+// with the mux class.
+type channel struct {
+ // R/O after creation
+ chanType string
+ extraData []byte
+ localId, remoteId uint32
+
+ // maxIncomingPayload and maxRemotePayload are the maximum
+ // payload sizes of normal and extended data packets for
+ // receiving and sending, respectively. The wire packet will
+ // be 9 or 13 bytes larger (excluding encryption overhead).
+ maxIncomingPayload uint32
+ maxRemotePayload uint32
+
+ mux *mux
+
+ // decided is set to true if an accept or reject message has been sent
+ // (for outbound channels) or received (for inbound channels).
+ decided bool
+
+ // direction contains either channelOutbound, for channels created
+ // locally, or channelInbound, for channels created by the peer.
+ direction channelDirection
+
+ // Pending internal channel messages.
+ msg chan interface{}
+
+ // Since requests have no ID, there can be only one request
+ // with WantReply=true outstanding. This lock is held by a
+ // goroutine that has such an outgoing request pending.
+ sentRequestMu sync.Mutex
+
+ incomingRequests chan *Request
+
+ sentEOF bool
+
+ // thread-safe data
+ remoteWin window
+ pending *buffer
+ extPending *buffer
+
+ // windowMu protects myWindow, the flow-control window.
+ windowMu sync.Mutex
+ myWindow uint32
+
+ // writeMu serializes calls to mux.conn.writePacket() and
+ // protects sentClose and packetPool. This mutex must be
+ // different from windowMu, as writePacket can block if there
+ // is a key exchange pending.
+ writeMu sync.Mutex
+ sentClose bool
+
+ // packetPool has a buffer for each extended channel ID to
+ // save allocations during writes.
+ packetPool map[uint32][]byte
+}
+
+// writePacket sends a packet. If the packet is a channel close, it updates
+// sentClose. This method takes the lock c.writeMu.
+func (c *channel) writePacket(packet []byte) error {
+ c.writeMu.Lock()
+ if c.sentClose {
+ c.writeMu.Unlock()
+ return io.EOF
+ }
+ c.sentClose = (packet[0] == msgChannelClose)
+ err := c.mux.conn.writePacket(packet)
+ c.writeMu.Unlock()
+ return err
+}
+
+func (c *channel) sendMessage(msg interface{}) error {
+ if debugMux {
+ log.Printf("send %d: %#v", c.mux.chanList.offset, msg)
+ }
+
+ p := Marshal(msg)
+ binary.BigEndian.PutUint32(p[1:], c.remoteId)
+ return c.writePacket(p)
+}
+
+// WriteExtended writes data to a specific extended stream. These streams are
+// used, for example, for stderr.
+func (c *channel) WriteExtended(data []byte, extendedCode uint32) (n int, err error) {
+ if c.sentEOF {
+ return 0, io.EOF
+ }
+ // 1 byte message type, 4 bytes remoteId, 4 bytes data length
+ opCode := byte(msgChannelData)
+ headerLength := uint32(9)
+ if extendedCode > 0 {
+ headerLength += 4
+ opCode = msgChannelExtendedData
+ }
+
+ c.writeMu.Lock()
+ packet := c.packetPool[extendedCode]
+ // We don't remove the buffer from packetPool, so
+ // WriteExtended calls from different goroutines will be
+ // flagged as errors by the race detector.
+ c.writeMu.Unlock()
+
+ for len(data) > 0 {
+ space := min(c.maxRemotePayload, len(data))
+ if space, err = c.remoteWin.reserve(space); err != nil {
+ return n, err
+ }
+ if want := headerLength + space; uint32(cap(packet)) < want {
+ packet = make([]byte, want)
+ } else {
+ packet = packet[:want]
+ }
+
+ todo := data[:space]
+
+ packet[0] = opCode
+ binary.BigEndian.PutUint32(packet[1:], c.remoteId)
+ if extendedCode > 0 {
+ binary.BigEndian.PutUint32(packet[5:], uint32(extendedCode))
+ }
+ binary.BigEndian.PutUint32(packet[headerLength-4:], uint32(len(todo)))
+ copy(packet[headerLength:], todo)
+ if err = c.writePacket(packet); err != nil {
+ return n, err
+ }
+
+ n += len(todo)
+ data = data[len(todo):]
+ }
+
+ c.writeMu.Lock()
+ c.packetPool[extendedCode] = packet
+ c.writeMu.Unlock()
+
+ return n, err
+}
+
+func (c *channel) handleData(packet []byte) error {
+ headerLen := 9
+ isExtendedData := packet[0] == msgChannelExtendedData
+ if isExtendedData {
+ headerLen = 13
+ }
+ if len(packet) < headerLen {
+ // malformed data packet
+ return parseError(packet[0])
+ }
+
+ var extended uint32
+ if isExtendedData {
+ extended = binary.BigEndian.Uint32(packet[5:])
+ }
+
+ length := binary.BigEndian.Uint32(packet[headerLen-4 : headerLen])
+ if length == 0 {
+ return nil
+ }
+ if length > c.maxIncomingPayload {
+ // TODO(hanwen): should send Disconnect?
+ return errors.New("ssh: incoming packet exceeds maximum payload size")
+ }
+
+ data := packet[headerLen:]
+ if length != uint32(len(data)) {
+ return errors.New("ssh: wrong packet length")
+ }
+
+ c.windowMu.Lock()
+ if c.myWindow < length {
+ c.windowMu.Unlock()
+ // TODO(hanwen): should send Disconnect with reason?
+ return errors.New("ssh: remote side wrote too much")
+ }
+ c.myWindow -= length
+ c.windowMu.Unlock()
+
+ if extended == 1 {
+ c.extPending.write(data)
+ } else if extended > 0 {
+ // discard other extended data.
+ } else {
+ c.pending.write(data)
+ }
+ return nil
+}
+
+func (c *channel) adjustWindow(n uint32) error {
+ c.windowMu.Lock()
+ // Since myWindow is managed on our side, and can never exceed
+ // the initial window setting, we don't worry about overflow.
+ c.myWindow += uint32(n)
+ c.windowMu.Unlock()
+ return c.sendMessage(windowAdjustMsg{
+ AdditionalBytes: uint32(n),
+ })
+}
+
+func (c *channel) ReadExtended(data []byte, extended uint32) (n int, err error) {
+ switch extended {
+ case 1:
+ n, err = c.extPending.Read(data)
+ case 0:
+ n, err = c.pending.Read(data)
+ default:
+ return 0, fmt.Errorf("ssh: extended code %d unimplemented", extended)
+ }
+
+ if n > 0 {
+ err = c.adjustWindow(uint32(n))
+ // sendWindowAdjust can return io.EOF if the remote
+ // peer has closed the connection, however we want to
+ // defer forwarding io.EOF to the caller of Read until
+ // the buffer has been drained.
+ if n > 0 && err == io.EOF {
+ err = nil
+ }
+ }
+
+ return n, err
+}
+
+func (c *channel) close() {
+ c.pending.eof()
+ c.extPending.eof()
+ close(c.msg)
+ close(c.incomingRequests)
+ c.writeMu.Lock()
+ // This is not necesary for a normal channel teardown, but if
+ // there was another error, it is.
+ c.sentClose = true
+ c.writeMu.Unlock()
+ // Unblock writers.
+ c.remoteWin.close()
+}
+
+// responseMessageReceived is called when a success or failure message is
+// received on a channel to check that such a message is reasonable for the
+// given channel.
+func (c *channel) responseMessageReceived() error {
+ if c.direction == channelInbound {
+ return errors.New("ssh: channel response message received on inbound channel")
+ }
+ if c.decided {
+ return errors.New("ssh: duplicate response received for channel")
+ }
+ c.decided = true
+ return nil
+}
+
+func (c *channel) handlePacket(packet []byte) error {
+ switch packet[0] {
+ case msgChannelData, msgChannelExtendedData:
+ return c.handleData(packet)
+ case msgChannelClose:
+ c.sendMessage(channelCloseMsg{PeersId: c.remoteId})
+ c.mux.chanList.remove(c.localId)
+ c.close()
+ return nil
+ case msgChannelEOF:
+ // RFC 4254 is mute on how EOF affects dataExt messages but
+ // it is logical to signal EOF at the same time.
+ c.extPending.eof()
+ c.pending.eof()
+ return nil
+ }
+
+ decoded, err := decode(packet)
+ if err != nil {
+ return err
+ }
+
+ switch msg := decoded.(type) {
+ case *channelOpenFailureMsg:
+ if err := c.responseMessageReceived(); err != nil {
+ return err
+ }
+ c.mux.chanList.remove(msg.PeersId)
+ c.msg <- msg
+ case *channelOpenConfirmMsg:
+ if err := c.responseMessageReceived(); err != nil {
+ return err
+ }
+ if msg.MaxPacketSize < minPacketLength || msg.MaxPacketSize > 1<<31 {
+ return fmt.Errorf("ssh: invalid MaxPacketSize %d from peer", msg.MaxPacketSize)
+ }
+ c.remoteId = msg.MyId
+ c.maxRemotePayload = msg.MaxPacketSize
+ c.remoteWin.add(msg.MyWindow)
+ c.msg <- msg
+ case *windowAdjustMsg:
+ if !c.remoteWin.add(msg.AdditionalBytes) {
+ return fmt.Errorf("ssh: invalid window update for %d bytes", msg.AdditionalBytes)
+ }
+ case *channelRequestMsg:
+ req := Request{
+ Type: msg.Request,
+ WantReply: msg.WantReply,
+ Payload: msg.RequestSpecificData,
+ ch: c,
+ }
+
+ c.incomingRequests <- &req
+ default:
+ c.msg <- msg
+ }
+ return nil
+}
+
+func (m *mux) newChannel(chanType string, direction channelDirection, extraData []byte) *channel {
+ ch := &channel{
+ remoteWin: window{Cond: newCond()},
+ myWindow: channelWindowSize,
+ pending: newBuffer(),
+ extPending: newBuffer(),
+ direction: direction,
+ incomingRequests: make(chan *Request, 16),
+ msg: make(chan interface{}, 16),
+ chanType: chanType,
+ extraData: extraData,
+ mux: m,
+ packetPool: make(map[uint32][]byte),
+ }
+ ch.localId = m.chanList.add(ch)
+ return ch
+}
+
+var errUndecided = errors.New("ssh: must Accept or Reject channel")
+var errDecidedAlready = errors.New("ssh: can call Accept or Reject only once")
+
+type extChannel struct {
+ code uint32
+ ch *channel
+}
+
+func (e *extChannel) Write(data []byte) (n int, err error) {
+ return e.ch.WriteExtended(data, e.code)
+}
+
+func (e *extChannel) Read(data []byte) (n int, err error) {
+ return e.ch.ReadExtended(data, e.code)
+}
+
+func (c *channel) Accept() (Channel, <-chan *Request, error) {
+ if c.decided {
+ return nil, nil, errDecidedAlready
+ }
+ c.maxIncomingPayload = channelMaxPacket
+ confirm := channelOpenConfirmMsg{
+ PeersId: c.remoteId,
+ MyId: c.localId,
+ MyWindow: c.myWindow,
+ MaxPacketSize: c.maxIncomingPayload,
+ }
+ c.decided = true
+ if err := c.sendMessage(confirm); err != nil {
+ return nil, nil, err
+ }
+
+ return c, c.incomingRequests, nil
+}
+
+func (ch *channel) Reject(reason RejectionReason, message string) error {
+ if ch.decided {
+ return errDecidedAlready
+ }
+ reject := channelOpenFailureMsg{
+ PeersId: ch.remoteId,
+ Reason: reason,
+ Message: message,
+ Language: "en",
+ }
+ ch.decided = true
+ return ch.sendMessage(reject)
+}
+
+func (ch *channel) Read(data []byte) (int, error) {
+ if !ch.decided {
+ return 0, errUndecided
+ }
+ return ch.ReadExtended(data, 0)
+}
+
+func (ch *channel) Write(data []byte) (int, error) {
+ if !ch.decided {
+ return 0, errUndecided
+ }
+ return ch.WriteExtended(data, 0)
+}
+
+func (ch *channel) CloseWrite() error {
+ if !ch.decided {
+ return errUndecided
+ }
+ ch.sentEOF = true
+ return ch.sendMessage(channelEOFMsg{
+ PeersId: ch.remoteId})
+}
+
+func (ch *channel) Close() error {
+ if !ch.decided {
+ return errUndecided
+ }
+
+ return ch.sendMessage(channelCloseMsg{
+ PeersId: ch.remoteId})
+}
+
+// Extended returns an io.ReadWriter that sends and receives data on the given,
+// SSH extended stream. Such streams are used, for example, for stderr.
+func (ch *channel) Extended(code uint32) io.ReadWriter {
+ if !ch.decided {
+ return nil
+ }
+ return &extChannel{code, ch}
+}
+
+func (ch *channel) Stderr() io.ReadWriter {
+ return ch.Extended(1)
+}
+
+func (ch *channel) SendRequest(name string, wantReply bool, payload []byte) (bool, error) {
+ if !ch.decided {
+ return false, errUndecided
+ }
+
+ if wantReply {
+ ch.sentRequestMu.Lock()
+ defer ch.sentRequestMu.Unlock()
+ }
+
+ msg := channelRequestMsg{
+ PeersId: ch.remoteId,
+ Request: name,
+ WantReply: wantReply,
+ RequestSpecificData: payload,
+ }
+
+ if err := ch.sendMessage(msg); err != nil {
+ return false, err
+ }
+
+ if wantReply {
+ m, ok := (<-ch.msg)
+ if !ok {
+ return false, io.EOF
+ }
+ switch m.(type) {
+ case *channelRequestFailureMsg:
+ return false, nil
+ case *channelRequestSuccessMsg:
+ return true, nil
+ default:
+ return false, fmt.Errorf("ssh: unexpected response to channel request: %#v", m)
+ }
+ }
+
+ return false, nil
+}
+
+// ackRequest either sends an ack or nack to the channel request.
+func (ch *channel) ackRequest(ok bool) error {
+ if !ch.decided {
+ return errUndecided
+ }
+
+ var msg interface{}
+ if !ok {
+ msg = channelRequestFailureMsg{
+ PeersId: ch.remoteId,
+ }
+ } else {
+ msg = channelRequestSuccessMsg{
+ PeersId: ch.remoteId,
+ }
+ }
+ return ch.sendMessage(msg)
+}
+
+func (ch *channel) ChannelType() string {
+ return ch.chanType
+}
+
+func (ch *channel) ExtraData() []byte {
+ return ch.extraData
+}
diff --git a/src/mongo/gotools/vendor/src/golang.org/x/crypto/ssh/cipher.go b/src/mongo/gotools/vendor/src/golang.org/x/crypto/ssh/cipher.go
new file mode 100644
index 00000000000..2732963f39e
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/golang.org/x/crypto/ssh/cipher.go
@@ -0,0 +1,552 @@
+// Copyright 2011 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package ssh
+
+import (
+ "crypto/aes"
+ "crypto/cipher"
+ "crypto/rc4"
+ "crypto/subtle"
+ "encoding/binary"
+ "errors"
+ "fmt"
+ "hash"
+ "io"
+ "io/ioutil"
+)
+
+const (
+ packetSizeMultiple = 16 // TODO(huin) this should be determined by the cipher.
+
+ // RFC 4253 section 6.1 defines a minimum packet size of 32768 that implementations
+ // MUST be able to process (plus a few more kilobytes for padding and mac). The RFC
+ // indicates implementations SHOULD be able to handle larger packet sizes, but then
+ // waffles on about reasonable limits.
+ //
+ // OpenSSH caps their maxPacket at 256kB so we choose to do
+ // the same. maxPacket is also used to ensure that uint32
+ // length fields do not overflow, so it should remain well
+ // below 4G.
+ maxPacket = 256 * 1024
+)
+
+// noneCipher implements cipher.Stream and provides no encryption. It is used
+// by the transport before the first key-exchange.
+type noneCipher struct{}
+
+func (c noneCipher) XORKeyStream(dst, src []byte) {
+ copy(dst, src)
+}
+
+func newAESCTR(key, iv []byte) (cipher.Stream, error) {
+ c, err := aes.NewCipher(key)
+ if err != nil {
+ return nil, err
+ }
+ return cipher.NewCTR(c, iv), nil
+}
+
+func newRC4(key, iv []byte) (cipher.Stream, error) {
+ return rc4.NewCipher(key)
+}
+
+type streamCipherMode struct {
+ keySize int
+ ivSize int
+ skip int
+ createFunc func(key, iv []byte) (cipher.Stream, error)
+}
+
+func (c *streamCipherMode) createStream(key, iv []byte) (cipher.Stream, error) {
+ if len(key) < c.keySize {
+ panic("ssh: key length too small for cipher")
+ }
+ if len(iv) < c.ivSize {
+ panic("ssh: iv too small for cipher")
+ }
+
+ stream, err := c.createFunc(key[:c.keySize], iv[:c.ivSize])
+ if err != nil {
+ return nil, err
+ }
+
+ var streamDump []byte
+ if c.skip > 0 {
+ streamDump = make([]byte, 512)
+ }
+
+ for remainingToDump := c.skip; remainingToDump > 0; {
+ dumpThisTime := remainingToDump
+ if dumpThisTime > len(streamDump) {
+ dumpThisTime = len(streamDump)
+ }
+ stream.XORKeyStream(streamDump[:dumpThisTime], streamDump[:dumpThisTime])
+ remainingToDump -= dumpThisTime
+ }
+
+ return stream, nil
+}
+
+// cipherModes documents properties of supported ciphers. Ciphers not included
+// are not supported and will not be negotiated, even if explicitly requested in
+// ClientConfig.Crypto.Ciphers.
+var cipherModes = map[string]*streamCipherMode{
+ // Ciphers from RFC4344, which introduced many CTR-based ciphers. Algorithms
+ // are defined in the order specified in the RFC.
+ "aes128-ctr": {16, aes.BlockSize, 0, newAESCTR},
+ "aes192-ctr": {24, aes.BlockSize, 0, newAESCTR},
+ "aes256-ctr": {32, aes.BlockSize, 0, newAESCTR},
+
+ // Ciphers from RFC4345, which introduces security-improved arcfour ciphers.
+ // They are defined in the order specified in the RFC.
+ "arcfour128": {16, 0, 1536, newRC4},
+ "arcfour256": {32, 0, 1536, newRC4},
+
+ // Cipher defined in RFC 4253, which describes SSH Transport Layer Protocol.
+ // Note that this cipher is not safe, as stated in RFC 4253: "Arcfour (and
+ // RC4) has problems with weak keys, and should be used with caution."
+ // RFC4345 introduces improved versions of Arcfour.
+ "arcfour": {16, 0, 0, newRC4},
+
+ // AES-GCM is not a stream cipher, so it is constructed with a
+ // special case. If we add any more non-stream ciphers, we
+ // should invest a cleaner way to do this.
+ gcmCipherID: {16, 12, 0, nil},
+
+ // CBC mode is insecure and so is not included in the default config.
+ // (See http://www.isg.rhul.ac.uk/~kp/SandPfinal.pdf). If absolutely
+ // needed, it's possible to specify a custom Config to enable it.
+ // You should expect that an active attacker can recover plaintext if
+ // you do.
+ aes128cbcID: {16, aes.BlockSize, 0, nil},
+}
+
+// prefixLen is the length of the packet prefix that contains the packet length
+// and number of padding bytes.
+const prefixLen = 5
+
+// streamPacketCipher is a packetCipher using a stream cipher.
+type streamPacketCipher struct {
+ mac hash.Hash
+ cipher cipher.Stream
+
+ // The following members are to avoid per-packet allocations.
+ prefix [prefixLen]byte
+ seqNumBytes [4]byte
+ padding [2 * packetSizeMultiple]byte
+ packetData []byte
+ macResult []byte
+}
+
+// readPacket reads and decrypt a single packet from the reader argument.
+func (s *streamPacketCipher) readPacket(seqNum uint32, r io.Reader) ([]byte, error) {
+ if _, err := io.ReadFull(r, s.prefix[:]); err != nil {
+ return nil, err
+ }
+
+ s.cipher.XORKeyStream(s.prefix[:], s.prefix[:])
+ length := binary.BigEndian.Uint32(s.prefix[0:4])
+ paddingLength := uint32(s.prefix[4])
+
+ var macSize uint32
+ if s.mac != nil {
+ s.mac.Reset()
+ binary.BigEndian.PutUint32(s.seqNumBytes[:], seqNum)
+ s.mac.Write(s.seqNumBytes[:])
+ s.mac.Write(s.prefix[:])
+ macSize = uint32(s.mac.Size())
+ }
+
+ if length <= paddingLength+1 {
+ return nil, errors.New("ssh: invalid packet length, packet too small")
+ }
+
+ if length > maxPacket {
+ return nil, errors.New("ssh: invalid packet length, packet too large")
+ }
+
+ // the maxPacket check above ensures that length-1+macSize
+ // does not overflow.
+ if uint32(cap(s.packetData)) < length-1+macSize {
+ s.packetData = make([]byte, length-1+macSize)
+ } else {
+ s.packetData = s.packetData[:length-1+macSize]
+ }
+
+ if _, err := io.ReadFull(r, s.packetData); err != nil {
+ return nil, err
+ }
+ mac := s.packetData[length-1:]
+ data := s.packetData[:length-1]
+ s.cipher.XORKeyStream(data, data)
+
+ if s.mac != nil {
+ s.mac.Write(data)
+ s.macResult = s.mac.Sum(s.macResult[:0])
+ if subtle.ConstantTimeCompare(s.macResult, mac) != 1 {
+ return nil, errors.New("ssh: MAC failure")
+ }
+ }
+
+ return s.packetData[:length-paddingLength-1], nil
+}
+
+// writePacket encrypts and sends a packet of data to the writer argument
+func (s *streamPacketCipher) writePacket(seqNum uint32, w io.Writer, rand io.Reader, packet []byte) error {
+ if len(packet) > maxPacket {
+ return errors.New("ssh: packet too large")
+ }
+
+ paddingLength := packetSizeMultiple - (prefixLen+len(packet))%packetSizeMultiple
+ if paddingLength < 4 {
+ paddingLength += packetSizeMultiple
+ }
+
+ length := len(packet) + 1 + paddingLength
+ binary.BigEndian.PutUint32(s.prefix[:], uint32(length))
+ s.prefix[4] = byte(paddingLength)
+ padding := s.padding[:paddingLength]
+ if _, err := io.ReadFull(rand, padding); err != nil {
+ return err
+ }
+
+ if s.mac != nil {
+ s.mac.Reset()
+ binary.BigEndian.PutUint32(s.seqNumBytes[:], seqNum)
+ s.mac.Write(s.seqNumBytes[:])
+ s.mac.Write(s.prefix[:])
+ s.mac.Write(packet)
+ s.mac.Write(padding)
+ }
+
+ s.cipher.XORKeyStream(s.prefix[:], s.prefix[:])
+ s.cipher.XORKeyStream(packet, packet)
+ s.cipher.XORKeyStream(padding, padding)
+
+ if _, err := w.Write(s.prefix[:]); err != nil {
+ return err
+ }
+ if _, err := w.Write(packet); err != nil {
+ return err
+ }
+ if _, err := w.Write(padding); err != nil {
+ return err
+ }
+
+ if s.mac != nil {
+ s.macResult = s.mac.Sum(s.macResult[:0])
+ if _, err := w.Write(s.macResult); err != nil {
+ return err
+ }
+ }
+
+ return nil
+}
+
+type gcmCipher struct {
+ aead cipher.AEAD
+ prefix [4]byte
+ iv []byte
+ buf []byte
+}
+
+func newGCMCipher(iv, key, macKey []byte) (packetCipher, error) {
+ c, err := aes.NewCipher(key)
+ if err != nil {
+ return nil, err
+ }
+
+ aead, err := cipher.NewGCM(c)
+ if err != nil {
+ return nil, err
+ }
+
+ return &gcmCipher{
+ aead: aead,
+ iv: iv,
+ }, nil
+}
+
+const gcmTagSize = 16
+
+func (c *gcmCipher) writePacket(seqNum uint32, w io.Writer, rand io.Reader, packet []byte) error {
+ // Pad out to multiple of 16 bytes. This is different from the
+ // stream cipher because that encrypts the length too.
+ padding := byte(packetSizeMultiple - (1+len(packet))%packetSizeMultiple)
+ if padding < 4 {
+ padding += packetSizeMultiple
+ }
+
+ length := uint32(len(packet) + int(padding) + 1)
+ binary.BigEndian.PutUint32(c.prefix[:], length)
+ if _, err := w.Write(c.prefix[:]); err != nil {
+ return err
+ }
+
+ if cap(c.buf) < int(length) {
+ c.buf = make([]byte, length)
+ } else {
+ c.buf = c.buf[:length]
+ }
+
+ c.buf[0] = padding
+ copy(c.buf[1:], packet)
+ if _, err := io.ReadFull(rand, c.buf[1+len(packet):]); err != nil {
+ return err
+ }
+ c.buf = c.aead.Seal(c.buf[:0], c.iv, c.buf, c.prefix[:])
+ if _, err := w.Write(c.buf); err != nil {
+ return err
+ }
+ c.incIV()
+
+ return nil
+}
+
+func (c *gcmCipher) incIV() {
+ for i := 4 + 7; i >= 4; i-- {
+ c.iv[i]++
+ if c.iv[i] != 0 {
+ break
+ }
+ }
+}
+
+func (c *gcmCipher) readPacket(seqNum uint32, r io.Reader) ([]byte, error) {
+ if _, err := io.ReadFull(r, c.prefix[:]); err != nil {
+ return nil, err
+ }
+ length := binary.BigEndian.Uint32(c.prefix[:])
+ if length > maxPacket {
+ return nil, errors.New("ssh: max packet length exceeded.")
+ }
+
+ if cap(c.buf) < int(length+gcmTagSize) {
+ c.buf = make([]byte, length+gcmTagSize)
+ } else {
+ c.buf = c.buf[:length+gcmTagSize]
+ }
+
+ if _, err := io.ReadFull(r, c.buf); err != nil {
+ return nil, err
+ }
+
+ plain, err := c.aead.Open(c.buf[:0], c.iv, c.buf, c.prefix[:])
+ if err != nil {
+ return nil, err
+ }
+ c.incIV()
+
+ padding := plain[0]
+ if padding < 4 || padding >= 20 {
+ return nil, fmt.Errorf("ssh: illegal padding %d", padding)
+ }
+
+ if int(padding+1) >= len(plain) {
+ return nil, fmt.Errorf("ssh: padding %d too large", padding)
+ }
+ plain = plain[1 : length-uint32(padding)]
+ return plain, nil
+}
+
+// cbcCipher implements aes128-cbc cipher defined in RFC 4253 section 6.1
+type cbcCipher struct {
+ mac hash.Hash
+ macSize uint32
+ decrypter cipher.BlockMode
+ encrypter cipher.BlockMode
+
+ // The following members are to avoid per-packet allocations.
+ seqNumBytes [4]byte
+ packetData []byte
+ macResult []byte
+
+ // Amount of data we should still read to hide which
+ // verification error triggered.
+ oracleCamouflage uint32
+}
+
+func newAESCBCCipher(iv, key, macKey []byte, algs directionAlgorithms) (packetCipher, error) {
+ c, err := aes.NewCipher(key)
+ if err != nil {
+ return nil, err
+ }
+
+ cbc := &cbcCipher{
+ mac: macModes[algs.MAC].new(macKey),
+ decrypter: cipher.NewCBCDecrypter(c, iv),
+ encrypter: cipher.NewCBCEncrypter(c, iv),
+ packetData: make([]byte, 1024),
+ }
+ if cbc.mac != nil {
+ cbc.macSize = uint32(cbc.mac.Size())
+ }
+
+ return cbc, nil
+}
+
+func maxUInt32(a, b int) uint32 {
+ if a > b {
+ return uint32(a)
+ }
+ return uint32(b)
+}
+
+const (
+ cbcMinPacketSizeMultiple = 8
+ cbcMinPacketSize = 16
+ cbcMinPaddingSize = 4
+)
+
+// cbcError represents a verification error that may leak information.
+type cbcError string
+
+func (e cbcError) Error() string { return string(e) }
+
+func (c *cbcCipher) readPacket(seqNum uint32, r io.Reader) ([]byte, error) {
+ p, err := c.readPacketLeaky(seqNum, r)
+ if err != nil {
+ if _, ok := err.(cbcError); ok {
+ // Verification error: read a fixed amount of
+ // data, to make distinguishing between
+ // failing MAC and failing length check more
+ // difficult.
+ io.CopyN(ioutil.Discard, r, int64(c.oracleCamouflage))
+ }
+ }
+ return p, err
+}
+
+func (c *cbcCipher) readPacketLeaky(seqNum uint32, r io.Reader) ([]byte, error) {
+ blockSize := c.decrypter.BlockSize()
+
+ // Read the header, which will include some of the subsequent data in the
+ // case of block ciphers - this is copied back to the payload later.
+ // How many bytes of payload/padding will be read with this first read.
+ firstBlockLength := uint32((prefixLen + blockSize - 1) / blockSize * blockSize)
+ firstBlock := c.packetData[:firstBlockLength]
+ if _, err := io.ReadFull(r, firstBlock); err != nil {
+ return nil, err
+ }
+
+ c.oracleCamouflage = maxPacket + 4 + c.macSize - firstBlockLength
+
+ c.decrypter.CryptBlocks(firstBlock, firstBlock)
+ length := binary.BigEndian.Uint32(firstBlock[:4])
+ if length > maxPacket {
+ return nil, cbcError("ssh: packet too large")
+ }
+ if length+4 < maxUInt32(cbcMinPacketSize, blockSize) {
+ // The minimum size of a packet is 16 (or the cipher block size, whichever
+ // is larger) bytes.
+ return nil, cbcError("ssh: packet too small")
+ }
+ // The length of the packet (including the length field but not the MAC) must
+ // be a multiple of the block size or 8, whichever is larger.
+ if (length+4)%maxUInt32(cbcMinPacketSizeMultiple, blockSize) != 0 {
+ return nil, cbcError("ssh: invalid packet length multiple")
+ }
+
+ paddingLength := uint32(firstBlock[4])
+ if paddingLength < cbcMinPaddingSize || length <= paddingLength+1 {
+ return nil, cbcError("ssh: invalid packet length")
+ }
+
+ // Positions within the c.packetData buffer:
+ macStart := 4 + length
+ paddingStart := macStart - paddingLength
+
+ // Entire packet size, starting before length, ending at end of mac.
+ entirePacketSize := macStart + c.macSize
+
+ // Ensure c.packetData is large enough for the entire packet data.
+ if uint32(cap(c.packetData)) < entirePacketSize {
+ // Still need to upsize and copy, but this should be rare at runtime, only
+ // on upsizing the packetData buffer.
+ c.packetData = make([]byte, entirePacketSize)
+ copy(c.packetData, firstBlock)
+ } else {
+ c.packetData = c.packetData[:entirePacketSize]
+ }
+
+ if n, err := io.ReadFull(r, c.packetData[firstBlockLength:]); err != nil {
+ return nil, err
+ } else {
+ c.oracleCamouflage -= uint32(n)
+ }
+
+ remainingCrypted := c.packetData[firstBlockLength:macStart]
+ c.decrypter.CryptBlocks(remainingCrypted, remainingCrypted)
+
+ mac := c.packetData[macStart:]
+ if c.mac != nil {
+ c.mac.Reset()
+ binary.BigEndian.PutUint32(c.seqNumBytes[:], seqNum)
+ c.mac.Write(c.seqNumBytes[:])
+ c.mac.Write(c.packetData[:macStart])
+ c.macResult = c.mac.Sum(c.macResult[:0])
+ if subtle.ConstantTimeCompare(c.macResult, mac) != 1 {
+ return nil, cbcError("ssh: MAC failure")
+ }
+ }
+
+ return c.packetData[prefixLen:paddingStart], nil
+}
+
+func (c *cbcCipher) writePacket(seqNum uint32, w io.Writer, rand io.Reader, packet []byte) error {
+ effectiveBlockSize := maxUInt32(cbcMinPacketSizeMultiple, c.encrypter.BlockSize())
+
+ // Length of encrypted portion of the packet (header, payload, padding).
+ // Enforce minimum padding and packet size.
+ encLength := maxUInt32(prefixLen+len(packet)+cbcMinPaddingSize, cbcMinPaddingSize)
+ // Enforce block size.
+ encLength = (encLength + effectiveBlockSize - 1) / effectiveBlockSize * effectiveBlockSize
+
+ length := encLength - 4
+ paddingLength := int(length) - (1 + len(packet))
+
+ // Overall buffer contains: header, payload, padding, mac.
+ // Space for the MAC is reserved in the capacity but not the slice length.
+ bufferSize := encLength + c.macSize
+ if uint32(cap(c.packetData)) < bufferSize {
+ c.packetData = make([]byte, encLength, bufferSize)
+ } else {
+ c.packetData = c.packetData[:encLength]
+ }
+
+ p := c.packetData
+
+ // Packet header.
+ binary.BigEndian.PutUint32(p, length)
+ p = p[4:]
+ p[0] = byte(paddingLength)
+
+ // Payload.
+ p = p[1:]
+ copy(p, packet)
+
+ // Padding.
+ p = p[len(packet):]
+ if _, err := io.ReadFull(rand, p); err != nil {
+ return err
+ }
+
+ if c.mac != nil {
+ c.mac.Reset()
+ binary.BigEndian.PutUint32(c.seqNumBytes[:], seqNum)
+ c.mac.Write(c.seqNumBytes[:])
+ c.mac.Write(c.packetData)
+ // The MAC is now appended into the capacity reserved for it earlier.
+ c.packetData = c.mac.Sum(c.packetData)
+ }
+
+ c.encrypter.CryptBlocks(c.packetData[:encLength], c.packetData[:encLength])
+
+ if _, err := w.Write(c.packetData); err != nil {
+ return err
+ }
+
+ return nil
+}
diff --git a/src/mongo/gotools/vendor/src/golang.org/x/crypto/ssh/cipher_test.go b/src/mongo/gotools/vendor/src/golang.org/x/crypto/ssh/cipher_test.go
new file mode 100644
index 00000000000..54b92b6edce
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/golang.org/x/crypto/ssh/cipher_test.go
@@ -0,0 +1,127 @@
+// Copyright 2011 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package ssh
+
+import (
+ "bytes"
+ "crypto"
+ "crypto/aes"
+ "crypto/rand"
+ "testing"
+)
+
+func TestDefaultCiphersExist(t *testing.T) {
+ for _, cipherAlgo := range supportedCiphers {
+ if _, ok := cipherModes[cipherAlgo]; !ok {
+ t.Errorf("default cipher %q is unknown", cipherAlgo)
+ }
+ }
+}
+
+func TestPacketCiphers(t *testing.T) {
+ // Still test aes128cbc cipher althought it's commented out.
+ cipherModes[aes128cbcID] = &streamCipherMode{16, aes.BlockSize, 0, nil}
+ defer delete(cipherModes, aes128cbcID)
+
+ for cipher := range cipherModes {
+ kr := &kexResult{Hash: crypto.SHA1}
+ algs := directionAlgorithms{
+ Cipher: cipher,
+ MAC: "hmac-sha1",
+ Compression: "none",
+ }
+ client, err := newPacketCipher(clientKeys, algs, kr)
+ if err != nil {
+ t.Errorf("newPacketCipher(client, %q): %v", cipher, err)
+ continue
+ }
+ server, err := newPacketCipher(clientKeys, algs, kr)
+ if err != nil {
+ t.Errorf("newPacketCipher(client, %q): %v", cipher, err)
+ continue
+ }
+
+ want := "bla bla"
+ input := []byte(want)
+ buf := &bytes.Buffer{}
+ if err := client.writePacket(0, buf, rand.Reader, input); err != nil {
+ t.Errorf("writePacket(%q): %v", cipher, err)
+ continue
+ }
+
+ packet, err := server.readPacket(0, buf)
+ if err != nil {
+ t.Errorf("readPacket(%q): %v", cipher, err)
+ continue
+ }
+
+ if string(packet) != want {
+ t.Errorf("roundtrip(%q): got %q, want %q", cipher, packet, want)
+ }
+ }
+}
+
+func TestCBCOracleCounterMeasure(t *testing.T) {
+ cipherModes[aes128cbcID] = &streamCipherMode{16, aes.BlockSize, 0, nil}
+ defer delete(cipherModes, aes128cbcID)
+
+ kr := &kexResult{Hash: crypto.SHA1}
+ algs := directionAlgorithms{
+ Cipher: aes128cbcID,
+ MAC: "hmac-sha1",
+ Compression: "none",
+ }
+ client, err := newPacketCipher(clientKeys, algs, kr)
+ if err != nil {
+ t.Fatalf("newPacketCipher(client): %v", err)
+ }
+
+ want := "bla bla"
+ input := []byte(want)
+ buf := &bytes.Buffer{}
+ if err := client.writePacket(0, buf, rand.Reader, input); err != nil {
+ t.Errorf("writePacket: %v", err)
+ }
+
+ packetSize := buf.Len()
+ buf.Write(make([]byte, 2*maxPacket))
+
+ // We corrupt each byte, but this usually will only test the
+ // 'packet too large' or 'MAC failure' cases.
+ lastRead := -1
+ for i := 0; i < packetSize; i++ {
+ server, err := newPacketCipher(clientKeys, algs, kr)
+ if err != nil {
+ t.Fatalf("newPacketCipher(client): %v", err)
+ }
+
+ fresh := &bytes.Buffer{}
+ fresh.Write(buf.Bytes())
+ fresh.Bytes()[i] ^= 0x01
+
+ before := fresh.Len()
+ _, err = server.readPacket(0, fresh)
+ if err == nil {
+ t.Errorf("corrupt byte %d: readPacket succeeded ", i)
+ continue
+ }
+ if _, ok := err.(cbcError); !ok {
+ t.Errorf("corrupt byte %d: got %v (%T), want cbcError", i, err, err)
+ continue
+ }
+
+ after := fresh.Len()
+ bytesRead := before - after
+ if bytesRead < maxPacket {
+ t.Errorf("corrupt byte %d: read %d bytes, want more than %d", i, bytesRead, maxPacket)
+ continue
+ }
+
+ if i > 0 && bytesRead != lastRead {
+ t.Errorf("corrupt byte %d: read %d bytes, want %d bytes read", i, bytesRead, lastRead)
+ }
+ lastRead = bytesRead
+ }
+}
diff --git a/src/mongo/gotools/vendor/src/golang.org/x/crypto/ssh/client.go b/src/mongo/gotools/vendor/src/golang.org/x/crypto/ssh/client.go
new file mode 100644
index 00000000000..0b9fbe5002a
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/golang.org/x/crypto/ssh/client.go
@@ -0,0 +1,213 @@
+// Copyright 2011 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package ssh
+
+import (
+ "errors"
+ "fmt"
+ "net"
+ "sync"
+)
+
+// Client implements a traditional SSH client that supports shells,
+// subprocesses, port forwarding and tunneled dialing.
+type Client struct {
+ Conn
+
+ forwards forwardList // forwarded tcpip connections from the remote side
+ mu sync.Mutex
+ channelHandlers map[string]chan NewChannel
+}
+
+// HandleChannelOpen returns a channel on which NewChannel requests
+// for the given type are sent. If the type already is being handled,
+// nil is returned. The channel is closed when the connection is closed.
+func (c *Client) HandleChannelOpen(channelType string) <-chan NewChannel {
+ c.mu.Lock()
+ defer c.mu.Unlock()
+ if c.channelHandlers == nil {
+ // The SSH channel has been closed.
+ c := make(chan NewChannel)
+ close(c)
+ return c
+ }
+
+ ch := c.channelHandlers[channelType]
+ if ch != nil {
+ return nil
+ }
+
+ ch = make(chan NewChannel, 16)
+ c.channelHandlers[channelType] = ch
+ return ch
+}
+
+// NewClient creates a Client on top of the given connection.
+func NewClient(c Conn, chans <-chan NewChannel, reqs <-chan *Request) *Client {
+ conn := &Client{
+ Conn: c,
+ channelHandlers: make(map[string]chan NewChannel, 1),
+ }
+
+ go conn.handleGlobalRequests(reqs)
+ go conn.handleChannelOpens(chans)
+ go func() {
+ conn.Wait()
+ conn.forwards.closeAll()
+ }()
+ go conn.forwards.handleChannels(conn.HandleChannelOpen("forwarded-tcpip"))
+ return conn
+}
+
+// NewClientConn establishes an authenticated SSH connection using c
+// as the underlying transport. The Request and NewChannel channels
+// must be serviced or the connection will hang.
+func NewClientConn(c net.Conn, addr string, config *ClientConfig) (Conn, <-chan NewChannel, <-chan *Request, error) {
+ fullConf := *config
+ fullConf.SetDefaults()
+ conn := &connection{
+ sshConn: sshConn{conn: c},
+ }
+
+ if err := conn.clientHandshake(addr, &fullConf); err != nil {
+ c.Close()
+ return nil, nil, nil, fmt.Errorf("ssh: handshake failed: %v", err)
+ }
+ conn.mux = newMux(conn.transport)
+ return conn, conn.mux.incomingChannels, conn.mux.incomingRequests, nil
+}
+
+// clientHandshake performs the client side key exchange. See RFC 4253 Section
+// 7.
+func (c *connection) clientHandshake(dialAddress string, config *ClientConfig) error {
+ if config.ClientVersion != "" {
+ c.clientVersion = []byte(config.ClientVersion)
+ } else {
+ c.clientVersion = []byte(packageVersion)
+ }
+ var err error
+ c.serverVersion, err = exchangeVersions(c.sshConn.conn, c.clientVersion)
+ if err != nil {
+ return err
+ }
+
+ c.transport = newClientTransport(
+ newTransport(c.sshConn.conn, config.Rand, true /* is client */),
+ c.clientVersion, c.serverVersion, config, dialAddress, c.sshConn.RemoteAddr())
+ if err := c.transport.requestKeyChange(); err != nil {
+ return err
+ }
+
+ if packet, err := c.transport.readPacket(); err != nil {
+ return err
+ } else if packet[0] != msgNewKeys {
+ return unexpectedMessageError(msgNewKeys, packet[0])
+ }
+
+ // We just did the key change, so the session ID is established.
+ c.sessionID = c.transport.getSessionID()
+
+ return c.clientAuthenticate(config)
+}
+
+// verifyHostKeySignature verifies the host key obtained in the key
+// exchange.
+func verifyHostKeySignature(hostKey PublicKey, result *kexResult) error {
+ sig, rest, ok := parseSignatureBody(result.Signature)
+ if len(rest) > 0 || !ok {
+ return errors.New("ssh: signature parse error")
+ }
+
+ return hostKey.Verify(result.H, sig)
+}
+
+// NewSession opens a new Session for this client. (A session is a remote
+// execution of a program.)
+func (c *Client) NewSession() (*Session, error) {
+ ch, in, err := c.OpenChannel("session", nil)
+ if err != nil {
+ return nil, err
+ }
+ return newSession(ch, in)
+}
+
+func (c *Client) handleGlobalRequests(incoming <-chan *Request) {
+ for r := range incoming {
+ // This handles keepalive messages and matches
+ // the behaviour of OpenSSH.
+ r.Reply(false, nil)
+ }
+}
+
+// handleChannelOpens channel open messages from the remote side.
+func (c *Client) handleChannelOpens(in <-chan NewChannel) {
+ for ch := range in {
+ c.mu.Lock()
+ handler := c.channelHandlers[ch.ChannelType()]
+ c.mu.Unlock()
+
+ if handler != nil {
+ handler <- ch
+ } else {
+ ch.Reject(UnknownChannelType, fmt.Sprintf("unknown channel type: %v", ch.ChannelType()))
+ }
+ }
+
+ c.mu.Lock()
+ for _, ch := range c.channelHandlers {
+ close(ch)
+ }
+ c.channelHandlers = nil
+ c.mu.Unlock()
+}
+
+// Dial starts a client connection to the given SSH server. It is a
+// convenience function that connects to the given network address,
+// initiates the SSH handshake, and then sets up a Client. For access
+// to incoming channels and requests, use net.Dial with NewClientConn
+// instead.
+func Dial(network, addr string, config *ClientConfig) (*Client, error) {
+ conn, err := net.Dial(network, addr)
+ if err != nil {
+ return nil, err
+ }
+ c, chans, reqs, err := NewClientConn(conn, addr, config)
+ if err != nil {
+ return nil, err
+ }
+ return NewClient(c, chans, reqs), nil
+}
+
+// A ClientConfig structure is used to configure a Client. It must not be
+// modified after having been passed to an SSH function.
+type ClientConfig struct {
+ // Config contains configuration that is shared between clients and
+ // servers.
+ Config
+
+ // User contains the username to authenticate as.
+ User string
+
+ // Auth contains possible authentication methods to use with the
+ // server. Only the first instance of a particular RFC 4252 method will
+ // be used during authentication.
+ Auth []AuthMethod
+
+ // HostKeyCallback, if not nil, is called during the cryptographic
+ // handshake to validate the server's host key. A nil HostKeyCallback
+ // implies that all host keys are accepted.
+ HostKeyCallback func(hostname string, remote net.Addr, key PublicKey) error
+
+ // ClientVersion contains the version identification string that will
+ // be used for the connection. If empty, a reasonable default is used.
+ ClientVersion string
+
+ // HostKeyAlgorithms lists the key types that the client will
+ // accept from the server as host key, in order of
+ // preference. If empty, a reasonable default is used. Any
+ // string returned from PublicKey.Type method may be used, or
+ // any of the CertAlgoXxxx and KeyAlgoXxxx constants.
+ HostKeyAlgorithms []string
+}
diff --git a/src/mongo/gotools/vendor/src/golang.org/x/crypto/ssh/client_auth.go b/src/mongo/gotools/vendor/src/golang.org/x/crypto/ssh/client_auth.go
new file mode 100644
index 00000000000..e15be3ef29d
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/golang.org/x/crypto/ssh/client_auth.go
@@ -0,0 +1,441 @@
+// Copyright 2011 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package ssh
+
+import (
+ "bytes"
+ "errors"
+ "fmt"
+ "io"
+)
+
+// clientAuthenticate authenticates with the remote server. See RFC 4252.
+func (c *connection) clientAuthenticate(config *ClientConfig) error {
+ // initiate user auth session
+ if err := c.transport.writePacket(Marshal(&serviceRequestMsg{serviceUserAuth})); err != nil {
+ return err
+ }
+ packet, err := c.transport.readPacket()
+ if err != nil {
+ return err
+ }
+ var serviceAccept serviceAcceptMsg
+ if err := Unmarshal(packet, &serviceAccept); err != nil {
+ return err
+ }
+
+ // during the authentication phase the client first attempts the "none" method
+ // then any untried methods suggested by the server.
+ tried := make(map[string]bool)
+ var lastMethods []string
+ for auth := AuthMethod(new(noneAuth)); auth != nil; {
+ ok, methods, err := auth.auth(c.transport.getSessionID(), config.User, c.transport, config.Rand)
+ if err != nil {
+ return err
+ }
+ if ok {
+ // success
+ return nil
+ }
+ tried[auth.method()] = true
+ if methods == nil {
+ methods = lastMethods
+ }
+ lastMethods = methods
+
+ auth = nil
+
+ findNext:
+ for _, a := range config.Auth {
+ candidateMethod := a.method()
+ if tried[candidateMethod] {
+ continue
+ }
+ for _, meth := range methods {
+ if meth == candidateMethod {
+ auth = a
+ break findNext
+ }
+ }
+ }
+ }
+ return fmt.Errorf("ssh: unable to authenticate, attempted methods %v, no supported methods remain", keys(tried))
+}
+
+func keys(m map[string]bool) []string {
+ s := make([]string, 0, len(m))
+
+ for key := range m {
+ s = append(s, key)
+ }
+ return s
+}
+
+// An AuthMethod represents an instance of an RFC 4252 authentication method.
+type AuthMethod interface {
+ // auth authenticates user over transport t.
+ // Returns true if authentication is successful.
+ // If authentication is not successful, a []string of alternative
+ // method names is returned. If the slice is nil, it will be ignored
+ // and the previous set of possible methods will be reused.
+ auth(session []byte, user string, p packetConn, rand io.Reader) (bool, []string, error)
+
+ // method returns the RFC 4252 method name.
+ method() string
+}
+
+// "none" authentication, RFC 4252 section 5.2.
+type noneAuth int
+
+func (n *noneAuth) auth(session []byte, user string, c packetConn, rand io.Reader) (bool, []string, error) {
+ if err := c.writePacket(Marshal(&userAuthRequestMsg{
+ User: user,
+ Service: serviceSSH,
+ Method: "none",
+ })); err != nil {
+ return false, nil, err
+ }
+
+ return handleAuthResponse(c)
+}
+
+func (n *noneAuth) method() string {
+ return "none"
+}
+
+// passwordCallback is an AuthMethod that fetches the password through
+// a function call, e.g. by prompting the user.
+type passwordCallback func() (password string, err error)
+
+func (cb passwordCallback) auth(session []byte, user string, c packetConn, rand io.Reader) (bool, []string, error) {
+ type passwordAuthMsg struct {
+ User string `sshtype:"50"`
+ Service string
+ Method string
+ Reply bool
+ Password string
+ }
+
+ pw, err := cb()
+ // REVIEW NOTE: is there a need to support skipping a password attempt?
+ // The program may only find out that the user doesn't have a password
+ // when prompting.
+ if err != nil {
+ return false, nil, err
+ }
+
+ if err := c.writePacket(Marshal(&passwordAuthMsg{
+ User: user,
+ Service: serviceSSH,
+ Method: cb.method(),
+ Reply: false,
+ Password: pw,
+ })); err != nil {
+ return false, nil, err
+ }
+
+ return handleAuthResponse(c)
+}
+
+func (cb passwordCallback) method() string {
+ return "password"
+}
+
+// Password returns an AuthMethod using the given password.
+func Password(secret string) AuthMethod {
+ return passwordCallback(func() (string, error) { return secret, nil })
+}
+
+// PasswordCallback returns an AuthMethod that uses a callback for
+// fetching a password.
+func PasswordCallback(prompt func() (secret string, err error)) AuthMethod {
+ return passwordCallback(prompt)
+}
+
+type publickeyAuthMsg struct {
+ User string `sshtype:"50"`
+ Service string
+ Method string
+ // HasSig indicates to the receiver packet that the auth request is signed and
+ // should be used for authentication of the request.
+ HasSig bool
+ Algoname string
+ PubKey []byte
+ // Sig is tagged with "rest" so Marshal will exclude it during
+ // validateKey
+ Sig []byte `ssh:"rest"`
+}
+
+// publicKeyCallback is an AuthMethod that uses a set of key
+// pairs for authentication.
+type publicKeyCallback func() ([]Signer, error)
+
+func (cb publicKeyCallback) method() string {
+ return "publickey"
+}
+
+func (cb publicKeyCallback) auth(session []byte, user string, c packetConn, rand io.Reader) (bool, []string, error) {
+ // Authentication is performed in two stages. The first stage sends an
+ // enquiry to test if each key is acceptable to the remote. The second
+ // stage attempts to authenticate with the valid keys obtained in the
+ // first stage.
+
+ signers, err := cb()
+ if err != nil {
+ return false, nil, err
+ }
+ var validKeys []Signer
+ for _, signer := range signers {
+ if ok, err := validateKey(signer.PublicKey(), user, c); ok {
+ validKeys = append(validKeys, signer)
+ } else {
+ if err != nil {
+ return false, nil, err
+ }
+ }
+ }
+
+ // methods that may continue if this auth is not successful.
+ var methods []string
+ for _, signer := range validKeys {
+ pub := signer.PublicKey()
+
+ pubKey := pub.Marshal()
+ sign, err := signer.Sign(rand, buildDataSignedForAuth(session, userAuthRequestMsg{
+ User: user,
+ Service: serviceSSH,
+ Method: cb.method(),
+ }, []byte(pub.Type()), pubKey))
+ if err != nil {
+ return false, nil, err
+ }
+
+ // manually wrap the serialized signature in a string
+ s := Marshal(sign)
+ sig := make([]byte, stringLength(len(s)))
+ marshalString(sig, s)
+ msg := publickeyAuthMsg{
+ User: user,
+ Service: serviceSSH,
+ Method: cb.method(),
+ HasSig: true,
+ Algoname: pub.Type(),
+ PubKey: pubKey,
+ Sig: sig,
+ }
+ p := Marshal(&msg)
+ if err := c.writePacket(p); err != nil {
+ return false, nil, err
+ }
+ var success bool
+ success, methods, err = handleAuthResponse(c)
+ if err != nil {
+ return false, nil, err
+ }
+ if success {
+ return success, methods, err
+ }
+ }
+ return false, methods, nil
+}
+
+// validateKey validates the key provided is acceptable to the server.
+func validateKey(key PublicKey, user string, c packetConn) (bool, error) {
+ pubKey := key.Marshal()
+ msg := publickeyAuthMsg{
+ User: user,
+ Service: serviceSSH,
+ Method: "publickey",
+ HasSig: false,
+ Algoname: key.Type(),
+ PubKey: pubKey,
+ }
+ if err := c.writePacket(Marshal(&msg)); err != nil {
+ return false, err
+ }
+
+ return confirmKeyAck(key, c)
+}
+
+func confirmKeyAck(key PublicKey, c packetConn) (bool, error) {
+ pubKey := key.Marshal()
+ algoname := key.Type()
+
+ for {
+ packet, err := c.readPacket()
+ if err != nil {
+ return false, err
+ }
+ switch packet[0] {
+ case msgUserAuthBanner:
+ // TODO(gpaul): add callback to present the banner to the user
+ case msgUserAuthPubKeyOk:
+ var msg userAuthPubKeyOkMsg
+ if err := Unmarshal(packet, &msg); err != nil {
+ return false, err
+ }
+ if msg.Algo != algoname || !bytes.Equal(msg.PubKey, pubKey) {
+ return false, nil
+ }
+ return true, nil
+ case msgUserAuthFailure:
+ return false, nil
+ default:
+ return false, unexpectedMessageError(msgUserAuthSuccess, packet[0])
+ }
+ }
+}
+
+// PublicKeys returns an AuthMethod that uses the given key
+// pairs.
+func PublicKeys(signers ...Signer) AuthMethod {
+ return publicKeyCallback(func() ([]Signer, error) { return signers, nil })
+}
+
+// PublicKeysCallback returns an AuthMethod that runs the given
+// function to obtain a list of key pairs.
+func PublicKeysCallback(getSigners func() (signers []Signer, err error)) AuthMethod {
+ return publicKeyCallback(getSigners)
+}
+
+// handleAuthResponse returns whether the preceding authentication request succeeded
+// along with a list of remaining authentication methods to try next and
+// an error if an unexpected response was received.
+func handleAuthResponse(c packetConn) (bool, []string, error) {
+ for {
+ packet, err := c.readPacket()
+ if err != nil {
+ return false, nil, err
+ }
+
+ switch packet[0] {
+ case msgUserAuthBanner:
+ // TODO: add callback to present the banner to the user
+ case msgUserAuthFailure:
+ var msg userAuthFailureMsg
+ if err := Unmarshal(packet, &msg); err != nil {
+ return false, nil, err
+ }
+ return false, msg.Methods, nil
+ case msgUserAuthSuccess:
+ return true, nil, nil
+ case msgDisconnect:
+ return false, nil, io.EOF
+ default:
+ return false, nil, unexpectedMessageError(msgUserAuthSuccess, packet[0])
+ }
+ }
+}
+
+// KeyboardInteractiveChallenge should print questions, optionally
+// disabling echoing (e.g. for passwords), and return all the answers.
+// Challenge may be called multiple times in a single session. After
+// successful authentication, the server may send a challenge with no
+// questions, for which the user and instruction messages should be
+// printed. RFC 4256 section 3.3 details how the UI should behave for
+// both CLI and GUI environments.
+type KeyboardInteractiveChallenge func(user, instruction string, questions []string, echos []bool) (answers []string, err error)
+
+// KeyboardInteractive returns a AuthMethod using a prompt/response
+// sequence controlled by the server.
+func KeyboardInteractive(challenge KeyboardInteractiveChallenge) AuthMethod {
+ return challenge
+}
+
+func (cb KeyboardInteractiveChallenge) method() string {
+ return "keyboard-interactive"
+}
+
+func (cb KeyboardInteractiveChallenge) auth(session []byte, user string, c packetConn, rand io.Reader) (bool, []string, error) {
+ type initiateMsg struct {
+ User string `sshtype:"50"`
+ Service string
+ Method string
+ Language string
+ Submethods string
+ }
+
+ if err := c.writePacket(Marshal(&initiateMsg{
+ User: user,
+ Service: serviceSSH,
+ Method: "keyboard-interactive",
+ })); err != nil {
+ return false, nil, err
+ }
+
+ for {
+ packet, err := c.readPacket()
+ if err != nil {
+ return false, nil, err
+ }
+
+ // like handleAuthResponse, but with less options.
+ switch packet[0] {
+ case msgUserAuthBanner:
+ // TODO: Print banners during userauth.
+ continue
+ case msgUserAuthInfoRequest:
+ // OK
+ case msgUserAuthFailure:
+ var msg userAuthFailureMsg
+ if err := Unmarshal(packet, &msg); err != nil {
+ return false, nil, err
+ }
+ return false, msg.Methods, nil
+ case msgUserAuthSuccess:
+ return true, nil, nil
+ default:
+ return false, nil, unexpectedMessageError(msgUserAuthInfoRequest, packet[0])
+ }
+
+ var msg userAuthInfoRequestMsg
+ if err := Unmarshal(packet, &msg); err != nil {
+ return false, nil, err
+ }
+
+ // Manually unpack the prompt/echo pairs.
+ rest := msg.Prompts
+ var prompts []string
+ var echos []bool
+ for i := 0; i < int(msg.NumPrompts); i++ {
+ prompt, r, ok := parseString(rest)
+ if !ok || len(r) == 0 {
+ return false, nil, errors.New("ssh: prompt format error")
+ }
+ prompts = append(prompts, string(prompt))
+ echos = append(echos, r[0] != 0)
+ rest = r[1:]
+ }
+
+ if len(rest) != 0 {
+ return false, nil, errors.New("ssh: extra data following keyboard-interactive pairs")
+ }
+
+ answers, err := cb(msg.User, msg.Instruction, prompts, echos)
+ if err != nil {
+ return false, nil, err
+ }
+
+ if len(answers) != len(prompts) {
+ return false, nil, errors.New("ssh: not enough answers from keyboard-interactive callback")
+ }
+ responseLength := 1 + 4
+ for _, a := range answers {
+ responseLength += stringLength(len(a))
+ }
+ serialized := make([]byte, responseLength)
+ p := serialized
+ p[0] = msgUserAuthInfoResponse
+ p = p[1:]
+ p = marshalUint32(p, uint32(len(answers)))
+ for _, a := range answers {
+ p = marshalString(p, []byte(a))
+ }
+
+ if err := c.writePacket(serialized); err != nil {
+ return false, nil, err
+ }
+ }
+}
diff --git a/src/mongo/gotools/vendor/src/golang.org/x/crypto/ssh/client_auth_test.go b/src/mongo/gotools/vendor/src/golang.org/x/crypto/ssh/client_auth_test.go
new file mode 100644
index 00000000000..2ea44624fc1
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/golang.org/x/crypto/ssh/client_auth_test.go
@@ -0,0 +1,393 @@
+// Copyright 2011 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package ssh
+
+import (
+ "bytes"
+ "crypto/rand"
+ "errors"
+ "fmt"
+ "strings"
+ "testing"
+)
+
+type keyboardInteractive map[string]string
+
+func (cr keyboardInteractive) Challenge(user string, instruction string, questions []string, echos []bool) ([]string, error) {
+ var answers []string
+ for _, q := range questions {
+ answers = append(answers, cr[q])
+ }
+ return answers, nil
+}
+
+// reused internally by tests
+var clientPassword = "tiger"
+
+// tryAuth runs a handshake with a given config against an SSH server
+// with config serverConfig
+func tryAuth(t *testing.T, config *ClientConfig) error {
+ c1, c2, err := netPipe()
+ if err != nil {
+ t.Fatalf("netPipe: %v", err)
+ }
+ defer c1.Close()
+ defer c2.Close()
+
+ certChecker := CertChecker{
+ IsAuthority: func(k PublicKey) bool {
+ return bytes.Equal(k.Marshal(), testPublicKeys["ecdsa"].Marshal())
+ },
+ UserKeyFallback: func(conn ConnMetadata, key PublicKey) (*Permissions, error) {
+ if conn.User() == "testuser" && bytes.Equal(key.Marshal(), testPublicKeys["rsa"].Marshal()) {
+ return nil, nil
+ }
+
+ return nil, fmt.Errorf("pubkey for %q not acceptable", conn.User())
+ },
+ IsRevoked: func(c *Certificate) bool {
+ return c.Serial == 666
+ },
+ }
+
+ serverConfig := &ServerConfig{
+ PasswordCallback: func(conn ConnMetadata, pass []byte) (*Permissions, error) {
+ if conn.User() == "testuser" && string(pass) == clientPassword {
+ return nil, nil
+ }
+ return nil, errors.New("password auth failed")
+ },
+ PublicKeyCallback: certChecker.Authenticate,
+ KeyboardInteractiveCallback: func(conn ConnMetadata, challenge KeyboardInteractiveChallenge) (*Permissions, error) {
+ ans, err := challenge("user",
+ "instruction",
+ []string{"question1", "question2"},
+ []bool{true, true})
+ if err != nil {
+ return nil, err
+ }
+ ok := conn.User() == "testuser" && ans[0] == "answer1" && ans[1] == "answer2"
+ if ok {
+ challenge("user", "motd", nil, nil)
+ return nil, nil
+ }
+ return nil, errors.New("keyboard-interactive failed")
+ },
+ AuthLogCallback: func(conn ConnMetadata, method string, err error) {
+ t.Logf("user %q, method %q: %v", conn.User(), method, err)
+ },
+ }
+ serverConfig.AddHostKey(testSigners["rsa"])
+
+ go newServer(c1, serverConfig)
+ _, _, _, err = NewClientConn(c2, "", config)
+ return err
+}
+
+func TestClientAuthPublicKey(t *testing.T) {
+ config := &ClientConfig{
+ User: "testuser",
+ Auth: []AuthMethod{
+ PublicKeys(testSigners["rsa"]),
+ },
+ }
+ if err := tryAuth(t, config); err != nil {
+ t.Fatalf("unable to dial remote side: %s", err)
+ }
+}
+
+func TestAuthMethodPassword(t *testing.T) {
+ config := &ClientConfig{
+ User: "testuser",
+ Auth: []AuthMethod{
+ Password(clientPassword),
+ },
+ }
+
+ if err := tryAuth(t, config); err != nil {
+ t.Fatalf("unable to dial remote side: %s", err)
+ }
+}
+
+func TestAuthMethodFallback(t *testing.T) {
+ var passwordCalled bool
+ config := &ClientConfig{
+ User: "testuser",
+ Auth: []AuthMethod{
+ PublicKeys(testSigners["rsa"]),
+ PasswordCallback(
+ func() (string, error) {
+ passwordCalled = true
+ return "WRONG", nil
+ }),
+ },
+ }
+
+ if err := tryAuth(t, config); err != nil {
+ t.Fatalf("unable to dial remote side: %s", err)
+ }
+
+ if passwordCalled {
+ t.Errorf("password auth tried before public-key auth.")
+ }
+}
+
+func TestAuthMethodWrongPassword(t *testing.T) {
+ config := &ClientConfig{
+ User: "testuser",
+ Auth: []AuthMethod{
+ Password("wrong"),
+ PublicKeys(testSigners["rsa"]),
+ },
+ }
+
+ if err := tryAuth(t, config); err != nil {
+ t.Fatalf("unable to dial remote side: %s", err)
+ }
+}
+
+func TestAuthMethodKeyboardInteractive(t *testing.T) {
+ answers := keyboardInteractive(map[string]string{
+ "question1": "answer1",
+ "question2": "answer2",
+ })
+ config := &ClientConfig{
+ User: "testuser",
+ Auth: []AuthMethod{
+ KeyboardInteractive(answers.Challenge),
+ },
+ }
+
+ if err := tryAuth(t, config); err != nil {
+ t.Fatalf("unable to dial remote side: %s", err)
+ }
+}
+
+func TestAuthMethodWrongKeyboardInteractive(t *testing.T) {
+ answers := keyboardInteractive(map[string]string{
+ "question1": "answer1",
+ "question2": "WRONG",
+ })
+ config := &ClientConfig{
+ User: "testuser",
+ Auth: []AuthMethod{
+ KeyboardInteractive(answers.Challenge),
+ },
+ }
+
+ if err := tryAuth(t, config); err == nil {
+ t.Fatalf("wrong answers should not have authenticated with KeyboardInteractive")
+ }
+}
+
+// the mock server will only authenticate ssh-rsa keys
+func TestAuthMethodInvalidPublicKey(t *testing.T) {
+ config := &ClientConfig{
+ User: "testuser",
+ Auth: []AuthMethod{
+ PublicKeys(testSigners["dsa"]),
+ },
+ }
+
+ if err := tryAuth(t, config); err == nil {
+ t.Fatalf("dsa private key should not have authenticated with rsa public key")
+ }
+}
+
+// the client should authenticate with the second key
+func TestAuthMethodRSAandDSA(t *testing.T) {
+ config := &ClientConfig{
+ User: "testuser",
+ Auth: []AuthMethod{
+ PublicKeys(testSigners["dsa"], testSigners["rsa"]),
+ },
+ }
+ if err := tryAuth(t, config); err != nil {
+ t.Fatalf("client could not authenticate with rsa key: %v", err)
+ }
+}
+
+func TestClientHMAC(t *testing.T) {
+ for _, mac := range supportedMACs {
+ config := &ClientConfig{
+ User: "testuser",
+ Auth: []AuthMethod{
+ PublicKeys(testSigners["rsa"]),
+ },
+ Config: Config{
+ MACs: []string{mac},
+ },
+ }
+ if err := tryAuth(t, config); err != nil {
+ t.Fatalf("client could not authenticate with mac algo %s: %v", mac, err)
+ }
+ }
+}
+
+// issue 4285.
+func TestClientUnsupportedCipher(t *testing.T) {
+ config := &ClientConfig{
+ User: "testuser",
+ Auth: []AuthMethod{
+ PublicKeys(),
+ },
+ Config: Config{
+ Ciphers: []string{"aes128-cbc"}, // not currently supported
+ },
+ }
+ if err := tryAuth(t, config); err == nil {
+ t.Errorf("expected no ciphers in common")
+ }
+}
+
+func TestClientUnsupportedKex(t *testing.T) {
+ config := &ClientConfig{
+ User: "testuser",
+ Auth: []AuthMethod{
+ PublicKeys(),
+ },
+ Config: Config{
+ KeyExchanges: []string{"diffie-hellman-group-exchange-sha256"}, // not currently supported
+ },
+ }
+ if err := tryAuth(t, config); err == nil || !strings.Contains(err.Error(), "common algorithm") {
+ t.Errorf("got %v, expected 'common algorithm'", err)
+ }
+}
+
+func TestClientLoginCert(t *testing.T) {
+ cert := &Certificate{
+ Key: testPublicKeys["rsa"],
+ ValidBefore: CertTimeInfinity,
+ CertType: UserCert,
+ }
+ cert.SignCert(rand.Reader, testSigners["ecdsa"])
+ certSigner, err := NewCertSigner(cert, testSigners["rsa"])
+ if err != nil {
+ t.Fatalf("NewCertSigner: %v", err)
+ }
+
+ clientConfig := &ClientConfig{
+ User: "user",
+ }
+ clientConfig.Auth = append(clientConfig.Auth, PublicKeys(certSigner))
+
+ t.Log("should succeed")
+ if err := tryAuth(t, clientConfig); err != nil {
+ t.Errorf("cert login failed: %v", err)
+ }
+
+ t.Log("corrupted signature")
+ cert.Signature.Blob[0]++
+ if err := tryAuth(t, clientConfig); err == nil {
+ t.Errorf("cert login passed with corrupted sig")
+ }
+
+ t.Log("revoked")
+ cert.Serial = 666
+ cert.SignCert(rand.Reader, testSigners["ecdsa"])
+ if err := tryAuth(t, clientConfig); err == nil {
+ t.Errorf("revoked cert login succeeded")
+ }
+ cert.Serial = 1
+
+ t.Log("sign with wrong key")
+ cert.SignCert(rand.Reader, testSigners["dsa"])
+ if err := tryAuth(t, clientConfig); err == nil {
+ t.Errorf("cert login passed with non-authoritive key")
+ }
+
+ t.Log("host cert")
+ cert.CertType = HostCert
+ cert.SignCert(rand.Reader, testSigners["ecdsa"])
+ if err := tryAuth(t, clientConfig); err == nil {
+ t.Errorf("cert login passed with wrong type")
+ }
+ cert.CertType = UserCert
+
+ t.Log("principal specified")
+ cert.ValidPrincipals = []string{"user"}
+ cert.SignCert(rand.Reader, testSigners["ecdsa"])
+ if err := tryAuth(t, clientConfig); err != nil {
+ t.Errorf("cert login failed: %v", err)
+ }
+
+ t.Log("wrong principal specified")
+ cert.ValidPrincipals = []string{"fred"}
+ cert.SignCert(rand.Reader, testSigners["ecdsa"])
+ if err := tryAuth(t, clientConfig); err == nil {
+ t.Errorf("cert login passed with wrong principal")
+ }
+ cert.ValidPrincipals = nil
+
+ t.Log("added critical option")
+ cert.CriticalOptions = map[string]string{"root-access": "yes"}
+ cert.SignCert(rand.Reader, testSigners["ecdsa"])
+ if err := tryAuth(t, clientConfig); err == nil {
+ t.Errorf("cert login passed with unrecognized critical option")
+ }
+
+ t.Log("allowed source address")
+ cert.CriticalOptions = map[string]string{"source-address": "127.0.0.42/24"}
+ cert.SignCert(rand.Reader, testSigners["ecdsa"])
+ if err := tryAuth(t, clientConfig); err != nil {
+ t.Errorf("cert login with source-address failed: %v", err)
+ }
+
+ t.Log("disallowed source address")
+ cert.CriticalOptions = map[string]string{"source-address": "127.0.0.42"}
+ cert.SignCert(rand.Reader, testSigners["ecdsa"])
+ if err := tryAuth(t, clientConfig); err == nil {
+ t.Errorf("cert login with source-address succeeded")
+ }
+}
+
+func testPermissionsPassing(withPermissions bool, t *testing.T) {
+ serverConfig := &ServerConfig{
+ PublicKeyCallback: func(conn ConnMetadata, key PublicKey) (*Permissions, error) {
+ if conn.User() == "nopermissions" {
+ return nil, nil
+ } else {
+ return &Permissions{}, nil
+ }
+ },
+ }
+ serverConfig.AddHostKey(testSigners["rsa"])
+
+ clientConfig := &ClientConfig{
+ Auth: []AuthMethod{
+ PublicKeys(testSigners["rsa"]),
+ },
+ }
+ if withPermissions {
+ clientConfig.User = "permissions"
+ } else {
+ clientConfig.User = "nopermissions"
+ }
+
+ c1, c2, err := netPipe()
+ if err != nil {
+ t.Fatalf("netPipe: %v", err)
+ }
+ defer c1.Close()
+ defer c2.Close()
+
+ go NewClientConn(c2, "", clientConfig)
+ serverConn, err := newServer(c1, serverConfig)
+ if err != nil {
+ t.Fatal(err)
+ }
+ if p := serverConn.Permissions; (p != nil) != withPermissions {
+ t.Fatalf("withPermissions is %t, but Permissions object is %#v", withPermissions, p)
+ }
+}
+
+func TestPermissionsPassing(t *testing.T) {
+ testPermissionsPassing(true, t)
+}
+
+func TestNoPermissionsPassing(t *testing.T) {
+ testPermissionsPassing(false, t)
+}
diff --git a/src/mongo/gotools/vendor/src/golang.org/x/crypto/ssh/client_test.go b/src/mongo/gotools/vendor/src/golang.org/x/crypto/ssh/client_test.go
new file mode 100644
index 00000000000..1fe790cb494
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/golang.org/x/crypto/ssh/client_test.go
@@ -0,0 +1,39 @@
+// Copyright 2014 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package ssh
+
+import (
+ "net"
+ "testing"
+)
+
+func testClientVersion(t *testing.T, config *ClientConfig, expected string) {
+ clientConn, serverConn := net.Pipe()
+ defer clientConn.Close()
+ receivedVersion := make(chan string, 1)
+ go func() {
+ version, err := readVersion(serverConn)
+ if err != nil {
+ receivedVersion <- ""
+ } else {
+ receivedVersion <- string(version)
+ }
+ serverConn.Close()
+ }()
+ NewClientConn(clientConn, "", config)
+ actual := <-receivedVersion
+ if actual != expected {
+ t.Fatalf("got %s; want %s", actual, expected)
+ }
+}
+
+func TestCustomClientVersion(t *testing.T) {
+ version := "Test-Client-Version-0.0"
+ testClientVersion(t, &ClientConfig{ClientVersion: version}, version)
+}
+
+func TestDefaultClientVersion(t *testing.T) {
+ testClientVersion(t, &ClientConfig{}, packageVersion)
+}
diff --git a/src/mongo/gotools/vendor/src/golang.org/x/crypto/ssh/common.go b/src/mongo/gotools/vendor/src/golang.org/x/crypto/ssh/common.go
new file mode 100644
index 00000000000..9fc739e1d01
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/golang.org/x/crypto/ssh/common.go
@@ -0,0 +1,354 @@
+// Copyright 2011 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package ssh
+
+import (
+ "crypto"
+ "crypto/rand"
+ "fmt"
+ "io"
+ "sync"
+
+ _ "crypto/sha1"
+ _ "crypto/sha256"
+ _ "crypto/sha512"
+)
+
+// These are string constants in the SSH protocol.
+const (
+ compressionNone = "none"
+ serviceUserAuth = "ssh-userauth"
+ serviceSSH = "ssh-connection"
+)
+
+// supportedCiphers specifies the supported ciphers in preference order.
+var supportedCiphers = []string{
+ "aes128-ctr", "aes192-ctr", "aes256-ctr",
+ "aes128-gcm@openssh.com",
+ "arcfour256", "arcfour128",
+}
+
+// supportedKexAlgos specifies the supported key-exchange algorithms in
+// preference order.
+var supportedKexAlgos = []string{
+ kexAlgoCurve25519SHA256,
+ // P384 and P521 are not constant-time yet, but since we don't
+ // reuse ephemeral keys, using them for ECDH should be OK.
+ kexAlgoECDH256, kexAlgoECDH384, kexAlgoECDH521,
+ kexAlgoDH14SHA1, kexAlgoDH1SHA1,
+}
+
+// supportedKexAlgos specifies the supported host-key algorithms (i.e. methods
+// of authenticating servers) in preference order.
+var supportedHostKeyAlgos = []string{
+ CertAlgoRSAv01, CertAlgoDSAv01, CertAlgoECDSA256v01,
+ CertAlgoECDSA384v01, CertAlgoECDSA521v01,
+
+ KeyAlgoECDSA256, KeyAlgoECDSA384, KeyAlgoECDSA521,
+ KeyAlgoRSA, KeyAlgoDSA,
+}
+
+// supportedMACs specifies a default set of MAC algorithms in preference order.
+// This is based on RFC 4253, section 6.4, but with hmac-md5 variants removed
+// because they have reached the end of their useful life.
+var supportedMACs = []string{
+ "hmac-sha2-256", "hmac-sha1", "hmac-sha1-96",
+}
+
+var supportedCompressions = []string{compressionNone}
+
+// hashFuncs keeps the mapping of supported algorithms to their respective
+// hashes needed for signature verification.
+var hashFuncs = map[string]crypto.Hash{
+ KeyAlgoRSA: crypto.SHA1,
+ KeyAlgoDSA: crypto.SHA1,
+ KeyAlgoECDSA256: crypto.SHA256,
+ KeyAlgoECDSA384: crypto.SHA384,
+ KeyAlgoECDSA521: crypto.SHA512,
+ CertAlgoRSAv01: crypto.SHA1,
+ CertAlgoDSAv01: crypto.SHA1,
+ CertAlgoECDSA256v01: crypto.SHA256,
+ CertAlgoECDSA384v01: crypto.SHA384,
+ CertAlgoECDSA521v01: crypto.SHA512,
+}
+
+// unexpectedMessageError results when the SSH message that we received didn't
+// match what we wanted.
+func unexpectedMessageError(expected, got uint8) error {
+ return fmt.Errorf("ssh: unexpected message type %d (expected %d)", got, expected)
+}
+
+// parseError results from a malformed SSH message.
+func parseError(tag uint8) error {
+ return fmt.Errorf("ssh: parse error in message type %d", tag)
+}
+
+func findCommon(what string, client []string, server []string) (common string, err error) {
+ for _, c := range client {
+ for _, s := range server {
+ if c == s {
+ return c, nil
+ }
+ }
+ }
+ return "", fmt.Errorf("ssh: no common algorithm for %s; client offered: %v, server offered: %v", what, client, server)
+}
+
+type directionAlgorithms struct {
+ Cipher string
+ MAC string
+ Compression string
+}
+
+type algorithms struct {
+ kex string
+ hostKey string
+ w directionAlgorithms
+ r directionAlgorithms
+}
+
+func findAgreedAlgorithms(clientKexInit, serverKexInit *kexInitMsg) (algs *algorithms, err error) {
+ result := &algorithms{}
+
+ result.kex, err = findCommon("key exchange", clientKexInit.KexAlgos, serverKexInit.KexAlgos)
+ if err != nil {
+ return
+ }
+
+ result.hostKey, err = findCommon("host key", clientKexInit.ServerHostKeyAlgos, serverKexInit.ServerHostKeyAlgos)
+ if err != nil {
+ return
+ }
+
+ result.w.Cipher, err = findCommon("client to server cipher", clientKexInit.CiphersClientServer, serverKexInit.CiphersClientServer)
+ if err != nil {
+ return
+ }
+
+ result.r.Cipher, err = findCommon("server to client cipher", clientKexInit.CiphersServerClient, serverKexInit.CiphersServerClient)
+ if err != nil {
+ return
+ }
+
+ result.w.MAC, err = findCommon("client to server MAC", clientKexInit.MACsClientServer, serverKexInit.MACsClientServer)
+ if err != nil {
+ return
+ }
+
+ result.r.MAC, err = findCommon("server to client MAC", clientKexInit.MACsServerClient, serverKexInit.MACsServerClient)
+ if err != nil {
+ return
+ }
+
+ result.w.Compression, err = findCommon("client to server compression", clientKexInit.CompressionClientServer, serverKexInit.CompressionClientServer)
+ if err != nil {
+ return
+ }
+
+ result.r.Compression, err = findCommon("server to client compression", clientKexInit.CompressionServerClient, serverKexInit.CompressionServerClient)
+ if err != nil {
+ return
+ }
+
+ return result, nil
+}
+
+// If rekeythreshold is too small, we can't make any progress sending
+// stuff.
+const minRekeyThreshold uint64 = 256
+
+// Config contains configuration data common to both ServerConfig and
+// ClientConfig.
+type Config struct {
+ // Rand provides the source of entropy for cryptographic
+ // primitives. If Rand is nil, the cryptographic random reader
+ // in package crypto/rand will be used.
+ Rand io.Reader
+
+ // The maximum number of bytes sent or received after which a
+ // new key is negotiated. It must be at least 256. If
+ // unspecified, 1 gigabyte is used.
+ RekeyThreshold uint64
+
+ // The allowed key exchanges algorithms. If unspecified then a
+ // default set of algorithms is used.
+ KeyExchanges []string
+
+ // The allowed cipher algorithms. If unspecified then a sensible
+ // default is used.
+ Ciphers []string
+
+ // The allowed MAC algorithms. If unspecified then a sensible default
+ // is used.
+ MACs []string
+}
+
+// SetDefaults sets sensible values for unset fields in config. This is
+// exported for testing: Configs passed to SSH functions are copied and have
+// default values set automatically.
+func (c *Config) SetDefaults() {
+ if c.Rand == nil {
+ c.Rand = rand.Reader
+ }
+ if c.Ciphers == nil {
+ c.Ciphers = supportedCiphers
+ }
+ var ciphers []string
+ for _, c := range c.Ciphers {
+ if cipherModes[c] != nil {
+ // reject the cipher if we have no cipherModes definition
+ ciphers = append(ciphers, c)
+ }
+ }
+ c.Ciphers = ciphers
+
+ if c.KeyExchanges == nil {
+ c.KeyExchanges = supportedKexAlgos
+ }
+
+ if c.MACs == nil {
+ c.MACs = supportedMACs
+ }
+
+ if c.RekeyThreshold == 0 {
+ // RFC 4253, section 9 suggests rekeying after 1G.
+ c.RekeyThreshold = 1 << 30
+ }
+ if c.RekeyThreshold < minRekeyThreshold {
+ c.RekeyThreshold = minRekeyThreshold
+ }
+}
+
+// buildDataSignedForAuth returns the data that is signed in order to prove
+// possession of a private key. See RFC 4252, section 7.
+func buildDataSignedForAuth(sessionId []byte, req userAuthRequestMsg, algo, pubKey []byte) []byte {
+ data := struct {
+ Session []byte
+ Type byte
+ User string
+ Service string
+ Method string
+ Sign bool
+ Algo []byte
+ PubKey []byte
+ }{
+ sessionId,
+ msgUserAuthRequest,
+ req.User,
+ req.Service,
+ req.Method,
+ true,
+ algo,
+ pubKey,
+ }
+ return Marshal(data)
+}
+
+func appendU16(buf []byte, n uint16) []byte {
+ return append(buf, byte(n>>8), byte(n))
+}
+
+func appendU32(buf []byte, n uint32) []byte {
+ return append(buf, byte(n>>24), byte(n>>16), byte(n>>8), byte(n))
+}
+
+func appendU64(buf []byte, n uint64) []byte {
+ return append(buf,
+ byte(n>>56), byte(n>>48), byte(n>>40), byte(n>>32),
+ byte(n>>24), byte(n>>16), byte(n>>8), byte(n))
+}
+
+func appendInt(buf []byte, n int) []byte {
+ return appendU32(buf, uint32(n))
+}
+
+func appendString(buf []byte, s string) []byte {
+ buf = appendU32(buf, uint32(len(s)))
+ buf = append(buf, s...)
+ return buf
+}
+
+func appendBool(buf []byte, b bool) []byte {
+ if b {
+ return append(buf, 1)
+ }
+ return append(buf, 0)
+}
+
+// newCond is a helper to hide the fact that there is no usable zero
+// value for sync.Cond.
+func newCond() *sync.Cond { return sync.NewCond(new(sync.Mutex)) }
+
+// window represents the buffer available to clients
+// wishing to write to a channel.
+type window struct {
+ *sync.Cond
+ win uint32 // RFC 4254 5.2 says the window size can grow to 2^32-1
+ writeWaiters int
+ closed bool
+}
+
+// add adds win to the amount of window available
+// for consumers.
+func (w *window) add(win uint32) bool {
+ // a zero sized window adjust is a noop.
+ if win == 0 {
+ return true
+ }
+ w.L.Lock()
+ if w.win+win < win {
+ w.L.Unlock()
+ return false
+ }
+ w.win += win
+ // It is unusual that multiple goroutines would be attempting to reserve
+ // window space, but not guaranteed. Use broadcast to notify all waiters
+ // that additional window is available.
+ w.Broadcast()
+ w.L.Unlock()
+ return true
+}
+
+// close sets the window to closed, so all reservations fail
+// immediately.
+func (w *window) close() {
+ w.L.Lock()
+ w.closed = true
+ w.Broadcast()
+ w.L.Unlock()
+}
+
+// reserve reserves win from the available window capacity.
+// If no capacity remains, reserve will block. reserve may
+// return less than requested.
+func (w *window) reserve(win uint32) (uint32, error) {
+ var err error
+ w.L.Lock()
+ w.writeWaiters++
+ w.Broadcast()
+ for w.win == 0 && !w.closed {
+ w.Wait()
+ }
+ w.writeWaiters--
+ if w.win < win {
+ win = w.win
+ }
+ w.win -= win
+ if w.closed {
+ err = io.EOF
+ }
+ w.L.Unlock()
+ return win, err
+}
+
+// waitWriterBlocked waits until some goroutine is blocked for further
+// writes. It is used in tests only.
+func (w *window) waitWriterBlocked() {
+ w.Cond.L.Lock()
+ for w.writeWaiters == 0 {
+ w.Cond.Wait()
+ }
+ w.Cond.L.Unlock()
+}
diff --git a/src/mongo/gotools/vendor/src/golang.org/x/crypto/ssh/connection.go b/src/mongo/gotools/vendor/src/golang.org/x/crypto/ssh/connection.go
new file mode 100644
index 00000000000..979d919e81e
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/golang.org/x/crypto/ssh/connection.go
@@ -0,0 +1,144 @@
+// Copyright 2013 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package ssh
+
+import (
+ "fmt"
+ "net"
+)
+
+// OpenChannelError is returned if the other side rejects an
+// OpenChannel request.
+type OpenChannelError struct {
+ Reason RejectionReason
+ Message string
+}
+
+func (e *OpenChannelError) Error() string {
+ return fmt.Sprintf("ssh: rejected: %s (%s)", e.Reason, e.Message)
+}
+
+// ConnMetadata holds metadata for the connection.
+type ConnMetadata interface {
+ // User returns the user ID for this connection.
+ // It is empty if no authentication is used.
+ User() string
+
+ // SessionID returns the sesson hash, also denoted by H.
+ SessionID() []byte
+
+ // ClientVersion returns the client's version string as hashed
+ // into the session ID.
+ ClientVersion() []byte
+
+ // ServerVersion returns the server's version string as hashed
+ // into the session ID.
+ ServerVersion() []byte
+
+ // RemoteAddr returns the remote address for this connection.
+ RemoteAddr() net.Addr
+
+ // LocalAddr returns the local address for this connection.
+ LocalAddr() net.Addr
+}
+
+// Conn represents an SSH connection for both server and client roles.
+// Conn is the basis for implementing an application layer, such
+// as ClientConn, which implements the traditional shell access for
+// clients.
+type Conn interface {
+ ConnMetadata
+
+ // SendRequest sends a global request, and returns the
+ // reply. If wantReply is true, it returns the response status
+ // and payload. See also RFC4254, section 4.
+ SendRequest(name string, wantReply bool, payload []byte) (bool, []byte, error)
+
+ // OpenChannel tries to open an channel. If the request is
+ // rejected, it returns *OpenChannelError. On success it returns
+ // the SSH Channel and a Go channel for incoming, out-of-band
+ // requests. The Go channel must be serviced, or the
+ // connection will hang.
+ OpenChannel(name string, data []byte) (Channel, <-chan *Request, error)
+
+ // Close closes the underlying network connection
+ Close() error
+
+ // Wait blocks until the connection has shut down, and returns the
+ // error causing the shutdown.
+ Wait() error
+
+ // TODO(hanwen): consider exposing:
+ // RequestKeyChange
+ // Disconnect
+}
+
+// DiscardRequests consumes and rejects all requests from the
+// passed-in channel.
+func DiscardRequests(in <-chan *Request) {
+ for req := range in {
+ if req.WantReply {
+ req.Reply(false, nil)
+ }
+ }
+}
+
+// A connection represents an incoming connection.
+type connection struct {
+ transport *handshakeTransport
+ sshConn
+
+ // The connection protocol.
+ *mux
+}
+
+func (c *connection) Close() error {
+ return c.sshConn.conn.Close()
+}
+
+// sshconn provides net.Conn metadata, but disallows direct reads and
+// writes.
+type sshConn struct {
+ conn net.Conn
+
+ user string
+ sessionID []byte
+ clientVersion []byte
+ serverVersion []byte
+}
+
+func dup(src []byte) []byte {
+ dst := make([]byte, len(src))
+ copy(dst, src)
+ return dst
+}
+
+func (c *sshConn) User() string {
+ return c.user
+}
+
+func (c *sshConn) RemoteAddr() net.Addr {
+ return c.conn.RemoteAddr()
+}
+
+func (c *sshConn) Close() error {
+ return c.conn.Close()
+}
+
+func (c *sshConn) LocalAddr() net.Addr {
+ return c.conn.LocalAddr()
+}
+
+func (c *sshConn) SessionID() []byte {
+ return dup(c.sessionID)
+}
+
+func (c *sshConn) ClientVersion() []byte {
+ return dup(c.clientVersion)
+}
+
+func (c *sshConn) ServerVersion() []byte {
+ return dup(c.serverVersion)
+}
diff --git a/src/mongo/gotools/vendor/src/golang.org/x/crypto/ssh/doc.go b/src/mongo/gotools/vendor/src/golang.org/x/crypto/ssh/doc.go
new file mode 100644
index 00000000000..d6be8946629
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/golang.org/x/crypto/ssh/doc.go
@@ -0,0 +1,18 @@
+// Copyright 2011 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+/*
+Package ssh implements an SSH client and server.
+
+SSH is a transport security protocol, an authentication protocol and a
+family of application protocols. The most typical application level
+protocol is a remote shell and this is specifically implemented. However,
+the multiplexed nature of SSH is exposed to users that wish to support
+others.
+
+References:
+ [PROTOCOL.certkeys]: http://cvsweb.openbsd.org/cgi-bin/cvsweb/src/usr.bin/ssh/PROTOCOL.certkeys?rev=HEAD
+ [SSH-PARAMETERS]: http://www.iana.org/assignments/ssh-parameters/ssh-parameters.xml#ssh-parameters-1
+*/
+package ssh // import "golang.org/x/crypto/ssh"
diff --git a/src/mongo/gotools/vendor/src/golang.org/x/crypto/ssh/example_test.go b/src/mongo/gotools/vendor/src/golang.org/x/crypto/ssh/example_test.go
new file mode 100644
index 00000000000..dfd9dcab606
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/golang.org/x/crypto/ssh/example_test.go
@@ -0,0 +1,211 @@
+// Copyright 2011 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package ssh_test
+
+import (
+ "bytes"
+ "fmt"
+ "io/ioutil"
+ "log"
+ "net"
+ "net/http"
+
+ "golang.org/x/crypto/ssh"
+ "golang.org/x/crypto/ssh/terminal"
+)
+
+func ExampleNewServerConn() {
+ // An SSH server is represented by a ServerConfig, which holds
+ // certificate details and handles authentication of ServerConns.
+ config := &ssh.ServerConfig{
+ PasswordCallback: func(c ssh.ConnMetadata, pass []byte) (*ssh.Permissions, error) {
+ // Should use constant-time compare (or better, salt+hash) in
+ // a production setting.
+ if c.User() == "testuser" && string(pass) == "tiger" {
+ return nil, nil
+ }
+ return nil, fmt.Errorf("password rejected for %q", c.User())
+ },
+ }
+
+ privateBytes, err := ioutil.ReadFile("id_rsa")
+ if err != nil {
+ panic("Failed to load private key")
+ }
+
+ private, err := ssh.ParsePrivateKey(privateBytes)
+ if err != nil {
+ panic("Failed to parse private key")
+ }
+
+ config.AddHostKey(private)
+
+ // Once a ServerConfig has been configured, connections can be
+ // accepted.
+ listener, err := net.Listen("tcp", "0.0.0.0:2022")
+ if err != nil {
+ panic("failed to listen for connection")
+ }
+ nConn, err := listener.Accept()
+ if err != nil {
+ panic("failed to accept incoming connection")
+ }
+
+ // Before use, a handshake must be performed on the incoming
+ // net.Conn.
+ _, chans, reqs, err := ssh.NewServerConn(nConn, config)
+ if err != nil {
+ panic("failed to handshake")
+ }
+ // The incoming Request channel must be serviced.
+ go ssh.DiscardRequests(reqs)
+
+ // Service the incoming Channel channel.
+ for newChannel := range chans {
+ // Channels have a type, depending on the application level
+ // protocol intended. In the case of a shell, the type is
+ // "session" and ServerShell may be used to present a simple
+ // terminal interface.
+ if newChannel.ChannelType() != "session" {
+ newChannel.Reject(ssh.UnknownChannelType, "unknown channel type")
+ continue
+ }
+ channel, requests, err := newChannel.Accept()
+ if err != nil {
+ panic("could not accept channel.")
+ }
+
+ // Sessions have out-of-band requests such as "shell",
+ // "pty-req" and "env". Here we handle only the
+ // "shell" request.
+ go func(in <-chan *ssh.Request) {
+ for req := range in {
+ ok := false
+ switch req.Type {
+ case "shell":
+ ok = true
+ if len(req.Payload) > 0 {
+ // We don't accept any
+ // commands, only the
+ // default shell.
+ ok = false
+ }
+ }
+ req.Reply(ok, nil)
+ }
+ }(requests)
+
+ term := terminal.NewTerminal(channel, "> ")
+
+ go func() {
+ defer channel.Close()
+ for {
+ line, err := term.ReadLine()
+ if err != nil {
+ break
+ }
+ fmt.Println(line)
+ }
+ }()
+ }
+}
+
+func ExampleDial() {
+ // An SSH client is represented with a ClientConn. Currently only
+ // the "password" authentication method is supported.
+ //
+ // To authenticate with the remote server you must pass at least one
+ // implementation of AuthMethod via the Auth field in ClientConfig.
+ config := &ssh.ClientConfig{
+ User: "username",
+ Auth: []ssh.AuthMethod{
+ ssh.Password("yourpassword"),
+ },
+ }
+ client, err := ssh.Dial("tcp", "yourserver.com:22", config)
+ if err != nil {
+ panic("Failed to dial: " + err.Error())
+ }
+
+ // Each ClientConn can support multiple interactive sessions,
+ // represented by a Session.
+ session, err := client.NewSession()
+ if err != nil {
+ panic("Failed to create session: " + err.Error())
+ }
+ defer session.Close()
+
+ // Once a Session is created, you can execute a single command on
+ // the remote side using the Run method.
+ var b bytes.Buffer
+ session.Stdout = &b
+ if err := session.Run("/usr/bin/whoami"); err != nil {
+ panic("Failed to run: " + err.Error())
+ }
+ fmt.Println(b.String())
+}
+
+func ExampleClient_Listen() {
+ config := &ssh.ClientConfig{
+ User: "username",
+ Auth: []ssh.AuthMethod{
+ ssh.Password("password"),
+ },
+ }
+ // Dial your ssh server.
+ conn, err := ssh.Dial("tcp", "localhost:22", config)
+ if err != nil {
+ log.Fatalf("unable to connect: %s", err)
+ }
+ defer conn.Close()
+
+ // Request the remote side to open port 8080 on all interfaces.
+ l, err := conn.Listen("tcp", "0.0.0.0:8080")
+ if err != nil {
+ log.Fatalf("unable to register tcp forward: %v", err)
+ }
+ defer l.Close()
+
+ // Serve HTTP with your SSH server acting as a reverse proxy.
+ http.Serve(l, http.HandlerFunc(func(resp http.ResponseWriter, req *http.Request) {
+ fmt.Fprintf(resp, "Hello world!\n")
+ }))
+}
+
+func ExampleSession_RequestPty() {
+ // Create client config
+ config := &ssh.ClientConfig{
+ User: "username",
+ Auth: []ssh.AuthMethod{
+ ssh.Password("password"),
+ },
+ }
+ // Connect to ssh server
+ conn, err := ssh.Dial("tcp", "localhost:22", config)
+ if err != nil {
+ log.Fatalf("unable to connect: %s", err)
+ }
+ defer conn.Close()
+ // Create a session
+ session, err := conn.NewSession()
+ if err != nil {
+ log.Fatalf("unable to create session: %s", err)
+ }
+ defer session.Close()
+ // Set up terminal modes
+ modes := ssh.TerminalModes{
+ ssh.ECHO: 0, // disable echoing
+ ssh.TTY_OP_ISPEED: 14400, // input speed = 14.4kbaud
+ ssh.TTY_OP_OSPEED: 14400, // output speed = 14.4kbaud
+ }
+ // Request pseudo terminal
+ if err := session.RequestPty("xterm", 80, 40, modes); err != nil {
+ log.Fatalf("request for pseudo terminal failed: %s", err)
+ }
+ // Start remote shell
+ if err := session.Shell(); err != nil {
+ log.Fatalf("failed to start shell: %s", err)
+ }
+}
diff --git a/src/mongo/gotools/vendor/src/golang.org/x/crypto/ssh/handshake.go b/src/mongo/gotools/vendor/src/golang.org/x/crypto/ssh/handshake.go
new file mode 100644
index 00000000000..1c54f758781
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/golang.org/x/crypto/ssh/handshake.go
@@ -0,0 +1,412 @@
+// Copyright 2013 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package ssh
+
+import (
+ "crypto/rand"
+ "errors"
+ "fmt"
+ "io"
+ "log"
+ "net"
+ "sync"
+)
+
+// debugHandshake, if set, prints messages sent and received. Key
+// exchange messages are printed as if DH were used, so the debug
+// messages are wrong when using ECDH.
+const debugHandshake = false
+
+// keyingTransport is a packet based transport that supports key
+// changes. It need not be thread-safe. It should pass through
+// msgNewKeys in both directions.
+type keyingTransport interface {
+ packetConn
+
+ // prepareKeyChange sets up a key change. The key change for a
+ // direction will be effected if a msgNewKeys message is sent
+ // or received.
+ prepareKeyChange(*algorithms, *kexResult) error
+
+ // getSessionID returns the session ID. prepareKeyChange must
+ // have been called once.
+ getSessionID() []byte
+}
+
+// rekeyingTransport is the interface of handshakeTransport that we
+// (internally) expose to ClientConn and ServerConn.
+type rekeyingTransport interface {
+ packetConn
+
+ // requestKeyChange asks the remote side to change keys. All
+ // writes are blocked until the key change succeeds, which is
+ // signaled by reading a msgNewKeys.
+ requestKeyChange() error
+
+ // getSessionID returns the session ID. This is only valid
+ // after the first key change has completed.
+ getSessionID() []byte
+}
+
+// handshakeTransport implements rekeying on top of a keyingTransport
+// and offers a thread-safe writePacket() interface.
+type handshakeTransport struct {
+ conn keyingTransport
+ config *Config
+
+ serverVersion []byte
+ clientVersion []byte
+
+ // hostKeys is non-empty if we are the server. In that case,
+ // it contains all host keys that can be used to sign the
+ // connection.
+ hostKeys []Signer
+
+ // hostKeyAlgorithms is non-empty if we are the client. In that case,
+ // we accept these key types from the server as host key.
+ hostKeyAlgorithms []string
+
+ // On read error, incoming is closed, and readError is set.
+ incoming chan []byte
+ readError error
+
+ // data for host key checking
+ hostKeyCallback func(hostname string, remote net.Addr, key PublicKey) error
+ dialAddress string
+ remoteAddr net.Addr
+
+ readSinceKex uint64
+
+ // Protects the writing side of the connection
+ mu sync.Mutex
+ cond *sync.Cond
+ sentInitPacket []byte
+ sentInitMsg *kexInitMsg
+ writtenSinceKex uint64
+ writeError error
+}
+
+func newHandshakeTransport(conn keyingTransport, config *Config, clientVersion, serverVersion []byte) *handshakeTransport {
+ t := &handshakeTransport{
+ conn: conn,
+ serverVersion: serverVersion,
+ clientVersion: clientVersion,
+ incoming: make(chan []byte, 16),
+ config: config,
+ }
+ t.cond = sync.NewCond(&t.mu)
+ return t
+}
+
+func newClientTransport(conn keyingTransport, clientVersion, serverVersion []byte, config *ClientConfig, dialAddr string, addr net.Addr) *handshakeTransport {
+ t := newHandshakeTransport(conn, &config.Config, clientVersion, serverVersion)
+ t.dialAddress = dialAddr
+ t.remoteAddr = addr
+ t.hostKeyCallback = config.HostKeyCallback
+ if config.HostKeyAlgorithms != nil {
+ t.hostKeyAlgorithms = config.HostKeyAlgorithms
+ } else {
+ t.hostKeyAlgorithms = supportedHostKeyAlgos
+ }
+ go t.readLoop()
+ return t
+}
+
+func newServerTransport(conn keyingTransport, clientVersion, serverVersion []byte, config *ServerConfig) *handshakeTransport {
+ t := newHandshakeTransport(conn, &config.Config, clientVersion, serverVersion)
+ t.hostKeys = config.hostKeys
+ go t.readLoop()
+ return t
+}
+
+func (t *handshakeTransport) getSessionID() []byte {
+ return t.conn.getSessionID()
+}
+
+func (t *handshakeTransport) id() string {
+ if len(t.hostKeys) > 0 {
+ return "server"
+ }
+ return "client"
+}
+
+func (t *handshakeTransport) readPacket() ([]byte, error) {
+ p, ok := <-t.incoming
+ if !ok {
+ return nil, t.readError
+ }
+ return p, nil
+}
+
+func (t *handshakeTransport) readLoop() {
+ for {
+ p, err := t.readOnePacket()
+ if err != nil {
+ t.readError = err
+ close(t.incoming)
+ break
+ }
+ if p[0] == msgIgnore || p[0] == msgDebug {
+ continue
+ }
+ t.incoming <- p
+ }
+
+ // If we can't read, declare the writing part dead too.
+ t.mu.Lock()
+ defer t.mu.Unlock()
+ if t.writeError == nil {
+ t.writeError = t.readError
+ }
+ t.cond.Broadcast()
+}
+
+func (t *handshakeTransport) readOnePacket() ([]byte, error) {
+ if t.readSinceKex > t.config.RekeyThreshold {
+ if err := t.requestKeyChange(); err != nil {
+ return nil, err
+ }
+ }
+
+ p, err := t.conn.readPacket()
+ if err != nil {
+ return nil, err
+ }
+
+ t.readSinceKex += uint64(len(p))
+ if debugHandshake {
+ msg, err := decode(p)
+ log.Printf("%s got %T %v (%v)", t.id(), msg, msg, err)
+ }
+ if p[0] != msgKexInit {
+ return p, nil
+ }
+ err = t.enterKeyExchange(p)
+
+ t.mu.Lock()
+ if err != nil {
+ // drop connection
+ t.conn.Close()
+ t.writeError = err
+ }
+
+ if debugHandshake {
+ log.Printf("%s exited key exchange, err %v", t.id(), err)
+ }
+
+ // Unblock writers.
+ t.sentInitMsg = nil
+ t.sentInitPacket = nil
+ t.cond.Broadcast()
+ t.writtenSinceKex = 0
+ t.mu.Unlock()
+
+ if err != nil {
+ return nil, err
+ }
+
+ t.readSinceKex = 0
+ return []byte{msgNewKeys}, nil
+}
+
+// sendKexInit sends a key change message, and returns the message
+// that was sent. After initiating the key change, all writes will be
+// blocked until the change is done, and a failed key change will
+// close the underlying transport. This function is safe for
+// concurrent use by multiple goroutines.
+func (t *handshakeTransport) sendKexInit() (*kexInitMsg, []byte, error) {
+ t.mu.Lock()
+ defer t.mu.Unlock()
+ return t.sendKexInitLocked()
+}
+
+func (t *handshakeTransport) requestKeyChange() error {
+ _, _, err := t.sendKexInit()
+ return err
+}
+
+// sendKexInitLocked sends a key change message. t.mu must be locked
+// while this happens.
+func (t *handshakeTransport) sendKexInitLocked() (*kexInitMsg, []byte, error) {
+ // kexInits may be sent either in response to the other side,
+ // or because our side wants to initiate a key change, so we
+ // may have already sent a kexInit. In that case, don't send a
+ // second kexInit.
+ if t.sentInitMsg != nil {
+ return t.sentInitMsg, t.sentInitPacket, nil
+ }
+ msg := &kexInitMsg{
+ KexAlgos: t.config.KeyExchanges,
+ CiphersClientServer: t.config.Ciphers,
+ CiphersServerClient: t.config.Ciphers,
+ MACsClientServer: t.config.MACs,
+ MACsServerClient: t.config.MACs,
+ CompressionClientServer: supportedCompressions,
+ CompressionServerClient: supportedCompressions,
+ }
+ io.ReadFull(rand.Reader, msg.Cookie[:])
+
+ if len(t.hostKeys) > 0 {
+ for _, k := range t.hostKeys {
+ msg.ServerHostKeyAlgos = append(
+ msg.ServerHostKeyAlgos, k.PublicKey().Type())
+ }
+ } else {
+ msg.ServerHostKeyAlgos = t.hostKeyAlgorithms
+ }
+ packet := Marshal(msg)
+
+ // writePacket destroys the contents, so save a copy.
+ packetCopy := make([]byte, len(packet))
+ copy(packetCopy, packet)
+
+ if err := t.conn.writePacket(packetCopy); err != nil {
+ return nil, nil, err
+ }
+
+ t.sentInitMsg = msg
+ t.sentInitPacket = packet
+ return msg, packet, nil
+}
+
+func (t *handshakeTransport) writePacket(p []byte) error {
+ t.mu.Lock()
+ defer t.mu.Unlock()
+
+ if t.writtenSinceKex > t.config.RekeyThreshold {
+ t.sendKexInitLocked()
+ }
+ for t.sentInitMsg != nil && t.writeError == nil {
+ t.cond.Wait()
+ }
+ if t.writeError != nil {
+ return t.writeError
+ }
+ t.writtenSinceKex += uint64(len(p))
+
+ switch p[0] {
+ case msgKexInit:
+ return errors.New("ssh: only handshakeTransport can send kexInit")
+ case msgNewKeys:
+ return errors.New("ssh: only handshakeTransport can send newKeys")
+ default:
+ return t.conn.writePacket(p)
+ }
+}
+
+func (t *handshakeTransport) Close() error {
+ return t.conn.Close()
+}
+
+// enterKeyExchange runs the key exchange.
+func (t *handshakeTransport) enterKeyExchange(otherInitPacket []byte) error {
+ if debugHandshake {
+ log.Printf("%s entered key exchange", t.id())
+ }
+ myInit, myInitPacket, err := t.sendKexInit()
+ if err != nil {
+ return err
+ }
+
+ otherInit := &kexInitMsg{}
+ if err := Unmarshal(otherInitPacket, otherInit); err != nil {
+ return err
+ }
+
+ magics := handshakeMagics{
+ clientVersion: t.clientVersion,
+ serverVersion: t.serverVersion,
+ clientKexInit: otherInitPacket,
+ serverKexInit: myInitPacket,
+ }
+
+ clientInit := otherInit
+ serverInit := myInit
+ if len(t.hostKeys) == 0 {
+ clientInit = myInit
+ serverInit = otherInit
+
+ magics.clientKexInit = myInitPacket
+ magics.serverKexInit = otherInitPacket
+ }
+
+ algs, err := findAgreedAlgorithms(clientInit, serverInit)
+ if err != nil {
+ return err
+ }
+
+ // We don't send FirstKexFollows, but we handle receiving it.
+ if otherInit.FirstKexFollows && algs.kex != otherInit.KexAlgos[0] {
+ // other side sent a kex message for the wrong algorithm,
+ // which we have to ignore.
+ if _, err := t.conn.readPacket(); err != nil {
+ return err
+ }
+ }
+
+ kex, ok := kexAlgoMap[algs.kex]
+ if !ok {
+ return fmt.Errorf("ssh: unexpected key exchange algorithm %v", algs.kex)
+ }
+
+ var result *kexResult
+ if len(t.hostKeys) > 0 {
+ result, err = t.server(kex, algs, &magics)
+ } else {
+ result, err = t.client(kex, algs, &magics)
+ }
+
+ if err != nil {
+ return err
+ }
+
+ t.conn.prepareKeyChange(algs, result)
+ if err = t.conn.writePacket([]byte{msgNewKeys}); err != nil {
+ return err
+ }
+ if packet, err := t.conn.readPacket(); err != nil {
+ return err
+ } else if packet[0] != msgNewKeys {
+ return unexpectedMessageError(msgNewKeys, packet[0])
+ }
+ return nil
+}
+
+func (t *handshakeTransport) server(kex kexAlgorithm, algs *algorithms, magics *handshakeMagics) (*kexResult, error) {
+ var hostKey Signer
+ for _, k := range t.hostKeys {
+ if algs.hostKey == k.PublicKey().Type() {
+ hostKey = k
+ }
+ }
+
+ r, err := kex.Server(t.conn, t.config.Rand, magics, hostKey)
+ return r, err
+}
+
+func (t *handshakeTransport) client(kex kexAlgorithm, algs *algorithms, magics *handshakeMagics) (*kexResult, error) {
+ result, err := kex.Client(t.conn, t.config.Rand, magics)
+ if err != nil {
+ return nil, err
+ }
+
+ hostKey, err := ParsePublicKey(result.HostKey)
+ if err != nil {
+ return nil, err
+ }
+
+ if err := verifyHostKeySignature(hostKey, result); err != nil {
+ return nil, err
+ }
+
+ if t.hostKeyCallback != nil {
+ err = t.hostKeyCallback(t.dialAddress, t.remoteAddr, hostKey)
+ if err != nil {
+ return nil, err
+ }
+ }
+
+ return result, nil
+}
diff --git a/src/mongo/gotools/vendor/src/golang.org/x/crypto/ssh/handshake_test.go b/src/mongo/gotools/vendor/src/golang.org/x/crypto/ssh/handshake_test.go
new file mode 100644
index 00000000000..b86d369cced
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/golang.org/x/crypto/ssh/handshake_test.go
@@ -0,0 +1,415 @@
+// Copyright 2013 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package ssh
+
+import (
+ "bytes"
+ "crypto/rand"
+ "errors"
+ "fmt"
+ "net"
+ "runtime"
+ "strings"
+ "sync"
+ "testing"
+)
+
+type testChecker struct {
+ calls []string
+}
+
+func (t *testChecker) Check(dialAddr string, addr net.Addr, key PublicKey) error {
+ if dialAddr == "bad" {
+ return fmt.Errorf("dialAddr is bad")
+ }
+
+ if tcpAddr, ok := addr.(*net.TCPAddr); !ok || tcpAddr == nil {
+ return fmt.Errorf("testChecker: got %T want *net.TCPAddr", addr)
+ }
+
+ t.calls = append(t.calls, fmt.Sprintf("%s %v %s %x", dialAddr, addr, key.Type(), key.Marshal()))
+
+ return nil
+}
+
+// netPipe is analogous to net.Pipe, but it uses a real net.Conn, and
+// therefore is buffered (net.Pipe deadlocks if both sides start with
+// a write.)
+func netPipe() (net.Conn, net.Conn, error) {
+ listener, err := net.Listen("tcp", "127.0.0.1:0")
+ if err != nil {
+ return nil, nil, err
+ }
+ defer listener.Close()
+ c1, err := net.Dial("tcp", listener.Addr().String())
+ if err != nil {
+ return nil, nil, err
+ }
+
+ c2, err := listener.Accept()
+ if err != nil {
+ c1.Close()
+ return nil, nil, err
+ }
+
+ return c1, c2, nil
+}
+
+func handshakePair(clientConf *ClientConfig, addr string) (client *handshakeTransport, server *handshakeTransport, err error) {
+ a, b, err := netPipe()
+ if err != nil {
+ return nil, nil, err
+ }
+
+ trC := newTransport(a, rand.Reader, true)
+ trS := newTransport(b, rand.Reader, false)
+ clientConf.SetDefaults()
+
+ v := []byte("version")
+ client = newClientTransport(trC, v, v, clientConf, addr, a.RemoteAddr())
+
+ serverConf := &ServerConfig{}
+ serverConf.AddHostKey(testSigners["ecdsa"])
+ serverConf.AddHostKey(testSigners["rsa"])
+ serverConf.SetDefaults()
+ server = newServerTransport(trS, v, v, serverConf)
+
+ return client, server, nil
+}
+
+func TestHandshakeBasic(t *testing.T) {
+ if runtime.GOOS == "plan9" {
+ t.Skip("see golang.org/issue/7237")
+ }
+ checker := &testChecker{}
+ trC, trS, err := handshakePair(&ClientConfig{HostKeyCallback: checker.Check}, "addr")
+ if err != nil {
+ t.Fatalf("handshakePair: %v", err)
+ }
+
+ defer trC.Close()
+ defer trS.Close()
+
+ go func() {
+ // Client writes a bunch of stuff, and does a key
+ // change in the middle. This should not confuse the
+ // handshake in progress
+ for i := 0; i < 10; i++ {
+ p := []byte{msgRequestSuccess, byte(i)}
+ if err := trC.writePacket(p); err != nil {
+ t.Fatalf("sendPacket: %v", err)
+ }
+ if i == 5 {
+ // halfway through, we request a key change.
+ _, _, err := trC.sendKexInit()
+ if err != nil {
+ t.Fatalf("sendKexInit: %v", err)
+ }
+ }
+ }
+ trC.Close()
+ }()
+
+ // Server checks that client messages come in cleanly
+ i := 0
+ for {
+ p, err := trS.readPacket()
+ if err != nil {
+ break
+ }
+ if p[0] == msgNewKeys {
+ continue
+ }
+ want := []byte{msgRequestSuccess, byte(i)}
+ if bytes.Compare(p, want) != 0 {
+ t.Errorf("message %d: got %q, want %q", i, p, want)
+ }
+ i++
+ }
+ if i != 10 {
+ t.Errorf("received %d messages, want 10.", i)
+ }
+
+ // If all went well, we registered exactly 1 key change.
+ if len(checker.calls) != 1 {
+ t.Fatalf("got %d host key checks, want 1", len(checker.calls))
+ }
+
+ pub := testSigners["ecdsa"].PublicKey()
+ want := fmt.Sprintf("%s %v %s %x", "addr", trC.remoteAddr, pub.Type(), pub.Marshal())
+ if want != checker.calls[0] {
+ t.Errorf("got %q want %q for host key check", checker.calls[0], want)
+ }
+}
+
+func TestHandshakeError(t *testing.T) {
+ checker := &testChecker{}
+ trC, trS, err := handshakePair(&ClientConfig{HostKeyCallback: checker.Check}, "bad")
+ if err != nil {
+ t.Fatalf("handshakePair: %v", err)
+ }
+ defer trC.Close()
+ defer trS.Close()
+
+ // send a packet
+ packet := []byte{msgRequestSuccess, 42}
+ if err := trC.writePacket(packet); err != nil {
+ t.Errorf("writePacket: %v", err)
+ }
+
+ // Now request a key change.
+ _, _, err = trC.sendKexInit()
+ if err != nil {
+ t.Errorf("sendKexInit: %v", err)
+ }
+
+ // the key change will fail, and afterwards we can't write.
+ if err := trC.writePacket([]byte{msgRequestSuccess, 43}); err == nil {
+ t.Errorf("writePacket after botched rekey succeeded.")
+ }
+
+ readback, err := trS.readPacket()
+ if err != nil {
+ t.Fatalf("server closed too soon: %v", err)
+ }
+ if bytes.Compare(readback, packet) != 0 {
+ t.Errorf("got %q want %q", readback, packet)
+ }
+ readback, err = trS.readPacket()
+ if err == nil {
+ t.Errorf("got a message %q after failed key change", readback)
+ }
+}
+
+func TestHandshakeTwice(t *testing.T) {
+ checker := &testChecker{}
+ trC, trS, err := handshakePair(&ClientConfig{HostKeyCallback: checker.Check}, "addr")
+ if err != nil {
+ t.Fatalf("handshakePair: %v", err)
+ }
+
+ defer trC.Close()
+ defer trS.Close()
+
+ // send a packet
+ packet := make([]byte, 5)
+ packet[0] = msgRequestSuccess
+ if err := trC.writePacket(packet); err != nil {
+ t.Errorf("writePacket: %v", err)
+ }
+
+ // Now request a key change.
+ _, _, err = trC.sendKexInit()
+ if err != nil {
+ t.Errorf("sendKexInit: %v", err)
+ }
+
+ // Send another packet. Use a fresh one, since writePacket destroys.
+ packet = make([]byte, 5)
+ packet[0] = msgRequestSuccess
+ if err := trC.writePacket(packet); err != nil {
+ t.Errorf("writePacket: %v", err)
+ }
+
+ // 2nd key change.
+ _, _, err = trC.sendKexInit()
+ if err != nil {
+ t.Errorf("sendKexInit: %v", err)
+ }
+
+ packet = make([]byte, 5)
+ packet[0] = msgRequestSuccess
+ if err := trC.writePacket(packet); err != nil {
+ t.Errorf("writePacket: %v", err)
+ }
+
+ packet = make([]byte, 5)
+ packet[0] = msgRequestSuccess
+ for i := 0; i < 5; i++ {
+ msg, err := trS.readPacket()
+ if err != nil {
+ t.Fatalf("server closed too soon: %v", err)
+ }
+ if msg[0] == msgNewKeys {
+ continue
+ }
+
+ if bytes.Compare(msg, packet) != 0 {
+ t.Errorf("packet %d: got %q want %q", i, msg, packet)
+ }
+ }
+ if len(checker.calls) != 2 {
+ t.Errorf("got %d key changes, want 2", len(checker.calls))
+ }
+}
+
+func TestHandshakeAutoRekeyWrite(t *testing.T) {
+ checker := &testChecker{}
+ clientConf := &ClientConfig{HostKeyCallback: checker.Check}
+ clientConf.RekeyThreshold = 500
+ trC, trS, err := handshakePair(clientConf, "addr")
+ if err != nil {
+ t.Fatalf("handshakePair: %v", err)
+ }
+ defer trC.Close()
+ defer trS.Close()
+
+ for i := 0; i < 5; i++ {
+ packet := make([]byte, 251)
+ packet[0] = msgRequestSuccess
+ if err := trC.writePacket(packet); err != nil {
+ t.Errorf("writePacket: %v", err)
+ }
+ }
+
+ j := 0
+ for ; j < 5; j++ {
+ _, err := trS.readPacket()
+ if err != nil {
+ break
+ }
+ }
+
+ if j != 5 {
+ t.Errorf("got %d, want 5 messages", j)
+ }
+
+ if len(checker.calls) != 2 {
+ t.Errorf("got %d key changes, wanted 2", len(checker.calls))
+ }
+}
+
+type syncChecker struct {
+ called chan int
+}
+
+func (t *syncChecker) Check(dialAddr string, addr net.Addr, key PublicKey) error {
+ t.called <- 1
+ return nil
+}
+
+func TestHandshakeAutoRekeyRead(t *testing.T) {
+ sync := &syncChecker{make(chan int, 2)}
+ clientConf := &ClientConfig{
+ HostKeyCallback: sync.Check,
+ }
+ clientConf.RekeyThreshold = 500
+
+ trC, trS, err := handshakePair(clientConf, "addr")
+ if err != nil {
+ t.Fatalf("handshakePair: %v", err)
+ }
+ defer trC.Close()
+ defer trS.Close()
+
+ packet := make([]byte, 501)
+ packet[0] = msgRequestSuccess
+ if err := trS.writePacket(packet); err != nil {
+ t.Fatalf("writePacket: %v", err)
+ }
+ // While we read out the packet, a key change will be
+ // initiated.
+ if _, err := trC.readPacket(); err != nil {
+ t.Fatalf("readPacket(client): %v", err)
+ }
+
+ <-sync.called
+}
+
+// errorKeyingTransport generates errors after a given number of
+// read/write operations.
+type errorKeyingTransport struct {
+ packetConn
+ readLeft, writeLeft int
+}
+
+func (n *errorKeyingTransport) prepareKeyChange(*algorithms, *kexResult) error {
+ return nil
+}
+func (n *errorKeyingTransport) getSessionID() []byte {
+ return nil
+}
+
+func (n *errorKeyingTransport) writePacket(packet []byte) error {
+ if n.writeLeft == 0 {
+ n.Close()
+ return errors.New("barf")
+ }
+
+ n.writeLeft--
+ return n.packetConn.writePacket(packet)
+}
+
+func (n *errorKeyingTransport) readPacket() ([]byte, error) {
+ if n.readLeft == 0 {
+ n.Close()
+ return nil, errors.New("barf")
+ }
+
+ n.readLeft--
+ return n.packetConn.readPacket()
+}
+
+func TestHandshakeErrorHandlingRead(t *testing.T) {
+ for i := 0; i < 20; i++ {
+ testHandshakeErrorHandlingN(t, i, -1)
+ }
+}
+
+func TestHandshakeErrorHandlingWrite(t *testing.T) {
+ for i := 0; i < 20; i++ {
+ testHandshakeErrorHandlingN(t, -1, i)
+ }
+}
+
+// testHandshakeErrorHandlingN runs handshakes, injecting errors. If
+// handshakeTransport deadlocks, the go runtime will detect it and
+// panic.
+func testHandshakeErrorHandlingN(t *testing.T, readLimit, writeLimit int) {
+ msg := Marshal(&serviceRequestMsg{strings.Repeat("x", int(minRekeyThreshold)/4)})
+
+ a, b := memPipe()
+ defer a.Close()
+ defer b.Close()
+
+ key := testSigners["ecdsa"]
+ serverConf := Config{RekeyThreshold: minRekeyThreshold}
+ serverConf.SetDefaults()
+ serverConn := newHandshakeTransport(&errorKeyingTransport{a, readLimit, writeLimit}, &serverConf, []byte{'a'}, []byte{'b'})
+ serverConn.hostKeys = []Signer{key}
+ go serverConn.readLoop()
+
+ clientConf := Config{RekeyThreshold: 10 * minRekeyThreshold}
+ clientConf.SetDefaults()
+ clientConn := newHandshakeTransport(&errorKeyingTransport{b, -1, -1}, &clientConf, []byte{'a'}, []byte{'b'})
+ clientConn.hostKeyAlgorithms = []string{key.PublicKey().Type()}
+ go clientConn.readLoop()
+
+ var wg sync.WaitGroup
+ wg.Add(4)
+
+ for _, hs := range []packetConn{serverConn, clientConn} {
+ go func(c packetConn) {
+ for {
+ err := c.writePacket(msg)
+ if err != nil {
+ break
+ }
+ }
+ wg.Done()
+ }(hs)
+ go func(c packetConn) {
+ for {
+ _, err := c.readPacket()
+ if err != nil {
+ break
+ }
+ }
+ wg.Done()
+ }(hs)
+ }
+
+ wg.Wait()
+}
diff --git a/src/mongo/gotools/vendor/src/golang.org/x/crypto/ssh/kex.go b/src/mongo/gotools/vendor/src/golang.org/x/crypto/ssh/kex.go
new file mode 100644
index 00000000000..3ec603c0a15
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/golang.org/x/crypto/ssh/kex.go
@@ -0,0 +1,526 @@
+// Copyright 2013 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package ssh
+
+import (
+ "crypto"
+ "crypto/ecdsa"
+ "crypto/elliptic"
+ "crypto/rand"
+ "crypto/subtle"
+ "errors"
+ "io"
+ "math/big"
+
+ "golang.org/x/crypto/curve25519"
+)
+
+const (
+ kexAlgoDH1SHA1 = "diffie-hellman-group1-sha1"
+ kexAlgoDH14SHA1 = "diffie-hellman-group14-sha1"
+ kexAlgoECDH256 = "ecdh-sha2-nistp256"
+ kexAlgoECDH384 = "ecdh-sha2-nistp384"
+ kexAlgoECDH521 = "ecdh-sha2-nistp521"
+ kexAlgoCurve25519SHA256 = "curve25519-sha256@libssh.org"
+)
+
+// kexResult captures the outcome of a key exchange.
+type kexResult struct {
+ // Session hash. See also RFC 4253, section 8.
+ H []byte
+
+ // Shared secret. See also RFC 4253, section 8.
+ K []byte
+
+ // Host key as hashed into H.
+ HostKey []byte
+
+ // Signature of H.
+ Signature []byte
+
+ // A cryptographic hash function that matches the security
+ // level of the key exchange algorithm. It is used for
+ // calculating H, and for deriving keys from H and K.
+ Hash crypto.Hash
+
+ // The session ID, which is the first H computed. This is used
+ // to signal data inside transport.
+ SessionID []byte
+}
+
+// handshakeMagics contains data that is always included in the
+// session hash.
+type handshakeMagics struct {
+ clientVersion, serverVersion []byte
+ clientKexInit, serverKexInit []byte
+}
+
+func (m *handshakeMagics) write(w io.Writer) {
+ writeString(w, m.clientVersion)
+ writeString(w, m.serverVersion)
+ writeString(w, m.clientKexInit)
+ writeString(w, m.serverKexInit)
+}
+
+// kexAlgorithm abstracts different key exchange algorithms.
+type kexAlgorithm interface {
+ // Server runs server-side key agreement, signing the result
+ // with a hostkey.
+ Server(p packetConn, rand io.Reader, magics *handshakeMagics, s Signer) (*kexResult, error)
+
+ // Client runs the client-side key agreement. Caller is
+ // responsible for verifying the host key signature.
+ Client(p packetConn, rand io.Reader, magics *handshakeMagics) (*kexResult, error)
+}
+
+// dhGroup is a multiplicative group suitable for implementing Diffie-Hellman key agreement.
+type dhGroup struct {
+ g, p *big.Int
+}
+
+func (group *dhGroup) diffieHellman(theirPublic, myPrivate *big.Int) (*big.Int, error) {
+ if theirPublic.Sign() <= 0 || theirPublic.Cmp(group.p) >= 0 {
+ return nil, errors.New("ssh: DH parameter out of bounds")
+ }
+ return new(big.Int).Exp(theirPublic, myPrivate, group.p), nil
+}
+
+func (group *dhGroup) Client(c packetConn, randSource io.Reader, magics *handshakeMagics) (*kexResult, error) {
+ hashFunc := crypto.SHA1
+
+ x, err := rand.Int(randSource, group.p)
+ if err != nil {
+ return nil, err
+ }
+ X := new(big.Int).Exp(group.g, x, group.p)
+ kexDHInit := kexDHInitMsg{
+ X: X,
+ }
+ if err := c.writePacket(Marshal(&kexDHInit)); err != nil {
+ return nil, err
+ }
+
+ packet, err := c.readPacket()
+ if err != nil {
+ return nil, err
+ }
+
+ var kexDHReply kexDHReplyMsg
+ if err = Unmarshal(packet, &kexDHReply); err != nil {
+ return nil, err
+ }
+
+ kInt, err := group.diffieHellman(kexDHReply.Y, x)
+ if err != nil {
+ return nil, err
+ }
+
+ h := hashFunc.New()
+ magics.write(h)
+ writeString(h, kexDHReply.HostKey)
+ writeInt(h, X)
+ writeInt(h, kexDHReply.Y)
+ K := make([]byte, intLength(kInt))
+ marshalInt(K, kInt)
+ h.Write(K)
+
+ return &kexResult{
+ H: h.Sum(nil),
+ K: K,
+ HostKey: kexDHReply.HostKey,
+ Signature: kexDHReply.Signature,
+ Hash: crypto.SHA1,
+ }, nil
+}
+
+func (group *dhGroup) Server(c packetConn, randSource io.Reader, magics *handshakeMagics, priv Signer) (result *kexResult, err error) {
+ hashFunc := crypto.SHA1
+ packet, err := c.readPacket()
+ if err != nil {
+ return
+ }
+ var kexDHInit kexDHInitMsg
+ if err = Unmarshal(packet, &kexDHInit); err != nil {
+ return
+ }
+
+ y, err := rand.Int(randSource, group.p)
+ if err != nil {
+ return
+ }
+
+ Y := new(big.Int).Exp(group.g, y, group.p)
+ kInt, err := group.diffieHellman(kexDHInit.X, y)
+ if err != nil {
+ return nil, err
+ }
+
+ hostKeyBytes := priv.PublicKey().Marshal()
+
+ h := hashFunc.New()
+ magics.write(h)
+ writeString(h, hostKeyBytes)
+ writeInt(h, kexDHInit.X)
+ writeInt(h, Y)
+
+ K := make([]byte, intLength(kInt))
+ marshalInt(K, kInt)
+ h.Write(K)
+
+ H := h.Sum(nil)
+
+ // H is already a hash, but the hostkey signing will apply its
+ // own key-specific hash algorithm.
+ sig, err := signAndMarshal(priv, randSource, H)
+ if err != nil {
+ return nil, err
+ }
+
+ kexDHReply := kexDHReplyMsg{
+ HostKey: hostKeyBytes,
+ Y: Y,
+ Signature: sig,
+ }
+ packet = Marshal(&kexDHReply)
+
+ err = c.writePacket(packet)
+ return &kexResult{
+ H: H,
+ K: K,
+ HostKey: hostKeyBytes,
+ Signature: sig,
+ Hash: crypto.SHA1,
+ }, nil
+}
+
+// ecdh performs Elliptic Curve Diffie-Hellman key exchange as
+// described in RFC 5656, section 4.
+type ecdh struct {
+ curve elliptic.Curve
+}
+
+func (kex *ecdh) Client(c packetConn, rand io.Reader, magics *handshakeMagics) (*kexResult, error) {
+ ephKey, err := ecdsa.GenerateKey(kex.curve, rand)
+ if err != nil {
+ return nil, err
+ }
+
+ kexInit := kexECDHInitMsg{
+ ClientPubKey: elliptic.Marshal(kex.curve, ephKey.PublicKey.X, ephKey.PublicKey.Y),
+ }
+
+ serialized := Marshal(&kexInit)
+ if err := c.writePacket(serialized); err != nil {
+ return nil, err
+ }
+
+ packet, err := c.readPacket()
+ if err != nil {
+ return nil, err
+ }
+
+ var reply kexECDHReplyMsg
+ if err = Unmarshal(packet, &reply); err != nil {
+ return nil, err
+ }
+
+ x, y, err := unmarshalECKey(kex.curve, reply.EphemeralPubKey)
+ if err != nil {
+ return nil, err
+ }
+
+ // generate shared secret
+ secret, _ := kex.curve.ScalarMult(x, y, ephKey.D.Bytes())
+
+ h := ecHash(kex.curve).New()
+ magics.write(h)
+ writeString(h, reply.HostKey)
+ writeString(h, kexInit.ClientPubKey)
+ writeString(h, reply.EphemeralPubKey)
+ K := make([]byte, intLength(secret))
+ marshalInt(K, secret)
+ h.Write(K)
+
+ return &kexResult{
+ H: h.Sum(nil),
+ K: K,
+ HostKey: reply.HostKey,
+ Signature: reply.Signature,
+ Hash: ecHash(kex.curve),
+ }, nil
+}
+
+// unmarshalECKey parses and checks an EC key.
+func unmarshalECKey(curve elliptic.Curve, pubkey []byte) (x, y *big.Int, err error) {
+ x, y = elliptic.Unmarshal(curve, pubkey)
+ if x == nil {
+ return nil, nil, errors.New("ssh: elliptic.Unmarshal failure")
+ }
+ if !validateECPublicKey(curve, x, y) {
+ return nil, nil, errors.New("ssh: public key not on curve")
+ }
+ return x, y, nil
+}
+
+// validateECPublicKey checks that the point is a valid public key for
+// the given curve. See [SEC1], 3.2.2
+func validateECPublicKey(curve elliptic.Curve, x, y *big.Int) bool {
+ if x.Sign() == 0 && y.Sign() == 0 {
+ return false
+ }
+
+ if x.Cmp(curve.Params().P) >= 0 {
+ return false
+ }
+
+ if y.Cmp(curve.Params().P) >= 0 {
+ return false
+ }
+
+ if !curve.IsOnCurve(x, y) {
+ return false
+ }
+
+ // We don't check if N * PubKey == 0, since
+ //
+ // - the NIST curves have cofactor = 1, so this is implicit.
+ // (We don't foresee an implementation that supports non NIST
+ // curves)
+ //
+ // - for ephemeral keys, we don't need to worry about small
+ // subgroup attacks.
+ return true
+}
+
+func (kex *ecdh) Server(c packetConn, rand io.Reader, magics *handshakeMagics, priv Signer) (result *kexResult, err error) {
+ packet, err := c.readPacket()
+ if err != nil {
+ return nil, err
+ }
+
+ var kexECDHInit kexECDHInitMsg
+ if err = Unmarshal(packet, &kexECDHInit); err != nil {
+ return nil, err
+ }
+
+ clientX, clientY, err := unmarshalECKey(kex.curve, kexECDHInit.ClientPubKey)
+ if err != nil {
+ return nil, err
+ }
+
+ // We could cache this key across multiple users/multiple
+ // connection attempts, but the benefit is small. OpenSSH
+ // generates a new key for each incoming connection.
+ ephKey, err := ecdsa.GenerateKey(kex.curve, rand)
+ if err != nil {
+ return nil, err
+ }
+
+ hostKeyBytes := priv.PublicKey().Marshal()
+
+ serializedEphKey := elliptic.Marshal(kex.curve, ephKey.PublicKey.X, ephKey.PublicKey.Y)
+
+ // generate shared secret
+ secret, _ := kex.curve.ScalarMult(clientX, clientY, ephKey.D.Bytes())
+
+ h := ecHash(kex.curve).New()
+ magics.write(h)
+ writeString(h, hostKeyBytes)
+ writeString(h, kexECDHInit.ClientPubKey)
+ writeString(h, serializedEphKey)
+
+ K := make([]byte, intLength(secret))
+ marshalInt(K, secret)
+ h.Write(K)
+
+ H := h.Sum(nil)
+
+ // H is already a hash, but the hostkey signing will apply its
+ // own key-specific hash algorithm.
+ sig, err := signAndMarshal(priv, rand, H)
+ if err != nil {
+ return nil, err
+ }
+
+ reply := kexECDHReplyMsg{
+ EphemeralPubKey: serializedEphKey,
+ HostKey: hostKeyBytes,
+ Signature: sig,
+ }
+
+ serialized := Marshal(&reply)
+ if err := c.writePacket(serialized); err != nil {
+ return nil, err
+ }
+
+ return &kexResult{
+ H: H,
+ K: K,
+ HostKey: reply.HostKey,
+ Signature: sig,
+ Hash: ecHash(kex.curve),
+ }, nil
+}
+
+var kexAlgoMap = map[string]kexAlgorithm{}
+
+func init() {
+ // This is the group called diffie-hellman-group1-sha1 in RFC
+ // 4253 and Oakley Group 2 in RFC 2409.
+ p, _ := new(big.Int).SetString("FFFFFFFFFFFFFFFFC90FDAA22168C234C4C6628B80DC1CD129024E088A67CC74020BBEA63B139B22514A08798E3404DDEF9519B3CD3A431B302B0A6DF25F14374FE1356D6D51C245E485B576625E7EC6F44C42E9A637ED6B0BFF5CB6F406B7EDEE386BFB5A899FA5AE9F24117C4B1FE649286651ECE65381FFFFFFFFFFFFFFFF", 16)
+ kexAlgoMap[kexAlgoDH1SHA1] = &dhGroup{
+ g: new(big.Int).SetInt64(2),
+ p: p,
+ }
+
+ // This is the group called diffie-hellman-group14-sha1 in RFC
+ // 4253 and Oakley Group 14 in RFC 3526.
+ p, _ = new(big.Int).SetString("FFFFFFFFFFFFFFFFC90FDAA22168C234C4C6628B80DC1CD129024E088A67CC74020BBEA63B139B22514A08798E3404DDEF9519B3CD3A431B302B0A6DF25F14374FE1356D6D51C245E485B576625E7EC6F44C42E9A637ED6B0BFF5CB6F406B7EDEE386BFB5A899FA5AE9F24117C4B1FE649286651ECE45B3DC2007CB8A163BF0598DA48361C55D39A69163FA8FD24CF5F83655D23DCA3AD961C62F356208552BB9ED529077096966D670C354E4ABC9804F1746C08CA18217C32905E462E36CE3BE39E772C180E86039B2783A2EC07A28FB5C55DF06F4C52C9DE2BCBF6955817183995497CEA956AE515D2261898FA051015728E5A8AACAA68FFFFFFFFFFFFFFFF", 16)
+
+ kexAlgoMap[kexAlgoDH14SHA1] = &dhGroup{
+ g: new(big.Int).SetInt64(2),
+ p: p,
+ }
+
+ kexAlgoMap[kexAlgoECDH521] = &ecdh{elliptic.P521()}
+ kexAlgoMap[kexAlgoECDH384] = &ecdh{elliptic.P384()}
+ kexAlgoMap[kexAlgoECDH256] = &ecdh{elliptic.P256()}
+ kexAlgoMap[kexAlgoCurve25519SHA256] = &curve25519sha256{}
+}
+
+// curve25519sha256 implements the curve25519-sha256@libssh.org key
+// agreement protocol, as described in
+// https://git.libssh.org/projects/libssh.git/tree/doc/curve25519-sha256@libssh.org.txt
+type curve25519sha256 struct{}
+
+type curve25519KeyPair struct {
+ priv [32]byte
+ pub [32]byte
+}
+
+func (kp *curve25519KeyPair) generate(rand io.Reader) error {
+ if _, err := io.ReadFull(rand, kp.priv[:]); err != nil {
+ return err
+ }
+ curve25519.ScalarBaseMult(&kp.pub, &kp.priv)
+ return nil
+}
+
+// curve25519Zeros is just an array of 32 zero bytes so that we have something
+// convenient to compare against in order to reject curve25519 points with the
+// wrong order.
+var curve25519Zeros [32]byte
+
+func (kex *curve25519sha256) Client(c packetConn, rand io.Reader, magics *handshakeMagics) (*kexResult, error) {
+ var kp curve25519KeyPair
+ if err := kp.generate(rand); err != nil {
+ return nil, err
+ }
+ if err := c.writePacket(Marshal(&kexECDHInitMsg{kp.pub[:]})); err != nil {
+ return nil, err
+ }
+
+ packet, err := c.readPacket()
+ if err != nil {
+ return nil, err
+ }
+
+ var reply kexECDHReplyMsg
+ if err = Unmarshal(packet, &reply); err != nil {
+ return nil, err
+ }
+ if len(reply.EphemeralPubKey) != 32 {
+ return nil, errors.New("ssh: peer's curve25519 public value has wrong length")
+ }
+
+ var servPub, secret [32]byte
+ copy(servPub[:], reply.EphemeralPubKey)
+ curve25519.ScalarMult(&secret, &kp.priv, &servPub)
+ if subtle.ConstantTimeCompare(secret[:], curve25519Zeros[:]) == 1 {
+ return nil, errors.New("ssh: peer's curve25519 public value has wrong order")
+ }
+
+ h := crypto.SHA256.New()
+ magics.write(h)
+ writeString(h, reply.HostKey)
+ writeString(h, kp.pub[:])
+ writeString(h, reply.EphemeralPubKey)
+
+ kInt := new(big.Int).SetBytes(secret[:])
+ K := make([]byte, intLength(kInt))
+ marshalInt(K, kInt)
+ h.Write(K)
+
+ return &kexResult{
+ H: h.Sum(nil),
+ K: K,
+ HostKey: reply.HostKey,
+ Signature: reply.Signature,
+ Hash: crypto.SHA256,
+ }, nil
+}
+
+func (kex *curve25519sha256) Server(c packetConn, rand io.Reader, magics *handshakeMagics, priv Signer) (result *kexResult, err error) {
+ packet, err := c.readPacket()
+ if err != nil {
+ return
+ }
+ var kexInit kexECDHInitMsg
+ if err = Unmarshal(packet, &kexInit); err != nil {
+ return
+ }
+
+ if len(kexInit.ClientPubKey) != 32 {
+ return nil, errors.New("ssh: peer's curve25519 public value has wrong length")
+ }
+
+ var kp curve25519KeyPair
+ if err := kp.generate(rand); err != nil {
+ return nil, err
+ }
+
+ var clientPub, secret [32]byte
+ copy(clientPub[:], kexInit.ClientPubKey)
+ curve25519.ScalarMult(&secret, &kp.priv, &clientPub)
+ if subtle.ConstantTimeCompare(secret[:], curve25519Zeros[:]) == 1 {
+ return nil, errors.New("ssh: peer's curve25519 public value has wrong order")
+ }
+
+ hostKeyBytes := priv.PublicKey().Marshal()
+
+ h := crypto.SHA256.New()
+ magics.write(h)
+ writeString(h, hostKeyBytes)
+ writeString(h, kexInit.ClientPubKey)
+ writeString(h, kp.pub[:])
+
+ kInt := new(big.Int).SetBytes(secret[:])
+ K := make([]byte, intLength(kInt))
+ marshalInt(K, kInt)
+ h.Write(K)
+
+ H := h.Sum(nil)
+
+ sig, err := signAndMarshal(priv, rand, H)
+ if err != nil {
+ return nil, err
+ }
+
+ reply := kexECDHReplyMsg{
+ EphemeralPubKey: kp.pub[:],
+ HostKey: hostKeyBytes,
+ Signature: sig,
+ }
+ if err := c.writePacket(Marshal(&reply)); err != nil {
+ return nil, err
+ }
+ return &kexResult{
+ H: H,
+ K: K,
+ HostKey: hostKeyBytes,
+ Signature: sig,
+ Hash: crypto.SHA256,
+ }, nil
+}
diff --git a/src/mongo/gotools/vendor/src/golang.org/x/crypto/ssh/kex_test.go b/src/mongo/gotools/vendor/src/golang.org/x/crypto/ssh/kex_test.go
new file mode 100644
index 00000000000..12ca0acd31d
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/golang.org/x/crypto/ssh/kex_test.go
@@ -0,0 +1,50 @@
+// Copyright 2013 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package ssh
+
+// Key exchange tests.
+
+import (
+ "crypto/rand"
+ "reflect"
+ "testing"
+)
+
+func TestKexes(t *testing.T) {
+ type kexResultErr struct {
+ result *kexResult
+ err error
+ }
+
+ for name, kex := range kexAlgoMap {
+ a, b := memPipe()
+
+ s := make(chan kexResultErr, 1)
+ c := make(chan kexResultErr, 1)
+ var magics handshakeMagics
+ go func() {
+ r, e := kex.Client(a, rand.Reader, &magics)
+ a.Close()
+ c <- kexResultErr{r, e}
+ }()
+ go func() {
+ r, e := kex.Server(b, rand.Reader, &magics, testSigners["ecdsa"])
+ b.Close()
+ s <- kexResultErr{r, e}
+ }()
+
+ clientRes := <-c
+ serverRes := <-s
+ if clientRes.err != nil {
+ t.Errorf("client: %v", clientRes.err)
+ }
+ if serverRes.err != nil {
+ t.Errorf("server: %v", serverRes.err)
+ }
+ if !reflect.DeepEqual(clientRes.result, serverRes.result) {
+ t.Errorf("kex %q: mismatch %#v, %#v", name, clientRes.result, serverRes.result)
+ }
+ }
+}
diff --git a/src/mongo/gotools/vendor/src/golang.org/x/crypto/ssh/keys.go b/src/mongo/gotools/vendor/src/golang.org/x/crypto/ssh/keys.go
new file mode 100644
index 00000000000..cfc970b2cd2
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/golang.org/x/crypto/ssh/keys.go
@@ -0,0 +1,720 @@
+// Copyright 2012 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package ssh
+
+import (
+ "bytes"
+ "crypto"
+ "crypto/dsa"
+ "crypto/ecdsa"
+ "crypto/elliptic"
+ "crypto/rsa"
+ "crypto/x509"
+ "encoding/asn1"
+ "encoding/base64"
+ "encoding/pem"
+ "errors"
+ "fmt"
+ "io"
+ "math/big"
+ "strings"
+)
+
+// These constants represent the algorithm names for key types supported by this
+// package.
+const (
+ KeyAlgoRSA = "ssh-rsa"
+ KeyAlgoDSA = "ssh-dss"
+ KeyAlgoECDSA256 = "ecdsa-sha2-nistp256"
+ KeyAlgoECDSA384 = "ecdsa-sha2-nistp384"
+ KeyAlgoECDSA521 = "ecdsa-sha2-nistp521"
+)
+
+// parsePubKey parses a public key of the given algorithm.
+// Use ParsePublicKey for keys with prepended algorithm.
+func parsePubKey(in []byte, algo string) (pubKey PublicKey, rest []byte, err error) {
+ switch algo {
+ case KeyAlgoRSA:
+ return parseRSA(in)
+ case KeyAlgoDSA:
+ return parseDSA(in)
+ case KeyAlgoECDSA256, KeyAlgoECDSA384, KeyAlgoECDSA521:
+ return parseECDSA(in)
+ case CertAlgoRSAv01, CertAlgoDSAv01, CertAlgoECDSA256v01, CertAlgoECDSA384v01, CertAlgoECDSA521v01:
+ cert, err := parseCert(in, certToPrivAlgo(algo))
+ if err != nil {
+ return nil, nil, err
+ }
+ return cert, nil, nil
+ }
+ return nil, nil, fmt.Errorf("ssh: unknown key algorithm: %v", err)
+}
+
+// parseAuthorizedKey parses a public key in OpenSSH authorized_keys format
+// (see sshd(8) manual page) once the options and key type fields have been
+// removed.
+func parseAuthorizedKey(in []byte) (out PublicKey, comment string, err error) {
+ in = bytes.TrimSpace(in)
+
+ i := bytes.IndexAny(in, " \t")
+ if i == -1 {
+ i = len(in)
+ }
+ base64Key := in[:i]
+
+ key := make([]byte, base64.StdEncoding.DecodedLen(len(base64Key)))
+ n, err := base64.StdEncoding.Decode(key, base64Key)
+ if err != nil {
+ return nil, "", err
+ }
+ key = key[:n]
+ out, err = ParsePublicKey(key)
+ if err != nil {
+ return nil, "", err
+ }
+ comment = string(bytes.TrimSpace(in[i:]))
+ return out, comment, nil
+}
+
+// ParseKnownHosts parses an entry in the format of the known_hosts file.
+//
+// The known_hosts format is documented in the sshd(8) manual page. This
+// function will parse a single entry from in. On successful return, marker
+// will contain the optional marker value (i.e. "cert-authority" or "revoked")
+// or else be empty, hosts will contain the hosts that this entry matches,
+// pubKey will contain the public key and comment will contain any trailing
+// comment at the end of the line. See the sshd(8) manual page for the various
+// forms that a host string can take.
+//
+// The unparsed remainder of the input will be returned in rest. This function
+// can be called repeatedly to parse multiple entries.
+//
+// If no entries were found in the input then err will be io.EOF. Otherwise a
+// non-nil err value indicates a parse error.
+func ParseKnownHosts(in []byte) (marker string, hosts []string, pubKey PublicKey, comment string, rest []byte, err error) {
+ for len(in) > 0 {
+ end := bytes.IndexByte(in, '\n')
+ if end != -1 {
+ rest = in[end+1:]
+ in = in[:end]
+ } else {
+ rest = nil
+ }
+
+ end = bytes.IndexByte(in, '\r')
+ if end != -1 {
+ in = in[:end]
+ }
+
+ in = bytes.TrimSpace(in)
+ if len(in) == 0 || in[0] == '#' {
+ in = rest
+ continue
+ }
+
+ i := bytes.IndexAny(in, " \t")
+ if i == -1 {
+ in = rest
+ continue
+ }
+
+ // Strip out the begining of the known_host key.
+ // This is either an optional marker or a (set of) hostname(s).
+ keyFields := bytes.Fields(in)
+ if len(keyFields) < 3 || len(keyFields) > 5 {
+ return "", nil, nil, "", nil, errors.New("ssh: invalid entry in known_hosts data")
+ }
+
+ // keyFields[0] is either "@cert-authority", "@revoked" or a comma separated
+ // list of hosts
+ marker := ""
+ if keyFields[0][0] == '@' {
+ marker = string(keyFields[0][1:])
+ keyFields = keyFields[1:]
+ }
+
+ hosts := string(keyFields[0])
+ // keyFields[1] contains the key type (e.g. “ssh-rsa”).
+ // However, that information is duplicated inside the
+ // base64-encoded key and so is ignored here.
+
+ key := bytes.Join(keyFields[2:], []byte(" "))
+ if pubKey, comment, err = parseAuthorizedKey(key); err != nil {
+ return "", nil, nil, "", nil, err
+ }
+
+ return marker, strings.Split(hosts, ","), pubKey, comment, rest, nil
+ }
+
+ return "", nil, nil, "", nil, io.EOF
+}
+
+// ParseAuthorizedKeys parses a public key from an authorized_keys
+// file used in OpenSSH according to the sshd(8) manual page.
+func ParseAuthorizedKey(in []byte) (out PublicKey, comment string, options []string, rest []byte, err error) {
+ for len(in) > 0 {
+ end := bytes.IndexByte(in, '\n')
+ if end != -1 {
+ rest = in[end+1:]
+ in = in[:end]
+ } else {
+ rest = nil
+ }
+
+ end = bytes.IndexByte(in, '\r')
+ if end != -1 {
+ in = in[:end]
+ }
+
+ in = bytes.TrimSpace(in)
+ if len(in) == 0 || in[0] == '#' {
+ in = rest
+ continue
+ }
+
+ i := bytes.IndexAny(in, " \t")
+ if i == -1 {
+ in = rest
+ continue
+ }
+
+ if out, comment, err = parseAuthorizedKey(in[i:]); err == nil {
+ return out, comment, options, rest, nil
+ }
+
+ // No key type recognised. Maybe there's an options field at
+ // the beginning.
+ var b byte
+ inQuote := false
+ var candidateOptions []string
+ optionStart := 0
+ for i, b = range in {
+ isEnd := !inQuote && (b == ' ' || b == '\t')
+ if (b == ',' && !inQuote) || isEnd {
+ if i-optionStart > 0 {
+ candidateOptions = append(candidateOptions, string(in[optionStart:i]))
+ }
+ optionStart = i + 1
+ }
+ if isEnd {
+ break
+ }
+ if b == '"' && (i == 0 || (i > 0 && in[i-1] != '\\')) {
+ inQuote = !inQuote
+ }
+ }
+ for i < len(in) && (in[i] == ' ' || in[i] == '\t') {
+ i++
+ }
+ if i == len(in) {
+ // Invalid line: unmatched quote
+ in = rest
+ continue
+ }
+
+ in = in[i:]
+ i = bytes.IndexAny(in, " \t")
+ if i == -1 {
+ in = rest
+ continue
+ }
+
+ if out, comment, err = parseAuthorizedKey(in[i:]); err == nil {
+ options = candidateOptions
+ return out, comment, options, rest, nil
+ }
+
+ in = rest
+ continue
+ }
+
+ return nil, "", nil, nil, errors.New("ssh: no key found")
+}
+
+// ParsePublicKey parses an SSH public key formatted for use in
+// the SSH wire protocol according to RFC 4253, section 6.6.
+func ParsePublicKey(in []byte) (out PublicKey, err error) {
+ algo, in, ok := parseString(in)
+ if !ok {
+ return nil, errShortRead
+ }
+ var rest []byte
+ out, rest, err = parsePubKey(in, string(algo))
+ if len(rest) > 0 {
+ return nil, errors.New("ssh: trailing junk in public key")
+ }
+
+ return out, err
+}
+
+// MarshalAuthorizedKey serializes key for inclusion in an OpenSSH
+// authorized_keys file. The return value ends with newline.
+func MarshalAuthorizedKey(key PublicKey) []byte {
+ b := &bytes.Buffer{}
+ b.WriteString(key.Type())
+ b.WriteByte(' ')
+ e := base64.NewEncoder(base64.StdEncoding, b)
+ e.Write(key.Marshal())
+ e.Close()
+ b.WriteByte('\n')
+ return b.Bytes()
+}
+
+// PublicKey is an abstraction of different types of public keys.
+type PublicKey interface {
+ // Type returns the key's type, e.g. "ssh-rsa".
+ Type() string
+
+ // Marshal returns the serialized key data in SSH wire format,
+ // with the name prefix.
+ Marshal() []byte
+
+ // Verify that sig is a signature on the given data using this
+ // key. This function will hash the data appropriately first.
+ Verify(data []byte, sig *Signature) error
+}
+
+// A Signer can create signatures that verify against a public key.
+type Signer interface {
+ // PublicKey returns an associated PublicKey instance.
+ PublicKey() PublicKey
+
+ // Sign returns raw signature for the given data. This method
+ // will apply the hash specified for the keytype to the data.
+ Sign(rand io.Reader, data []byte) (*Signature, error)
+}
+
+type rsaPublicKey rsa.PublicKey
+
+func (r *rsaPublicKey) Type() string {
+ return "ssh-rsa"
+}
+
+// parseRSA parses an RSA key according to RFC 4253, section 6.6.
+func parseRSA(in []byte) (out PublicKey, rest []byte, err error) {
+ var w struct {
+ E *big.Int
+ N *big.Int
+ Rest []byte `ssh:"rest"`
+ }
+ if err := Unmarshal(in, &w); err != nil {
+ return nil, nil, err
+ }
+
+ if w.E.BitLen() > 24 {
+ return nil, nil, errors.New("ssh: exponent too large")
+ }
+ e := w.E.Int64()
+ if e < 3 || e&1 == 0 {
+ return nil, nil, errors.New("ssh: incorrect exponent")
+ }
+
+ var key rsa.PublicKey
+ key.E = int(e)
+ key.N = w.N
+ return (*rsaPublicKey)(&key), w.Rest, nil
+}
+
+func (r *rsaPublicKey) Marshal() []byte {
+ e := new(big.Int).SetInt64(int64(r.E))
+ wirekey := struct {
+ Name string
+ E *big.Int
+ N *big.Int
+ }{
+ KeyAlgoRSA,
+ e,
+ r.N,
+ }
+ return Marshal(&wirekey)
+}
+
+func (r *rsaPublicKey) Verify(data []byte, sig *Signature) error {
+ if sig.Format != r.Type() {
+ return fmt.Errorf("ssh: signature type %s for key type %s", sig.Format, r.Type())
+ }
+ h := crypto.SHA1.New()
+ h.Write(data)
+ digest := h.Sum(nil)
+ return rsa.VerifyPKCS1v15((*rsa.PublicKey)(r), crypto.SHA1, digest, sig.Blob)
+}
+
+type dsaPublicKey dsa.PublicKey
+
+func (r *dsaPublicKey) Type() string {
+ return "ssh-dss"
+}
+
+// parseDSA parses an DSA key according to RFC 4253, section 6.6.
+func parseDSA(in []byte) (out PublicKey, rest []byte, err error) {
+ var w struct {
+ P, Q, G, Y *big.Int
+ Rest []byte `ssh:"rest"`
+ }
+ if err := Unmarshal(in, &w); err != nil {
+ return nil, nil, err
+ }
+
+ key := &dsaPublicKey{
+ Parameters: dsa.Parameters{
+ P: w.P,
+ Q: w.Q,
+ G: w.G,
+ },
+ Y: w.Y,
+ }
+ return key, w.Rest, nil
+}
+
+func (k *dsaPublicKey) Marshal() []byte {
+ w := struct {
+ Name string
+ P, Q, G, Y *big.Int
+ }{
+ k.Type(),
+ k.P,
+ k.Q,
+ k.G,
+ k.Y,
+ }
+
+ return Marshal(&w)
+}
+
+func (k *dsaPublicKey) Verify(data []byte, sig *Signature) error {
+ if sig.Format != k.Type() {
+ return fmt.Errorf("ssh: signature type %s for key type %s", sig.Format, k.Type())
+ }
+ h := crypto.SHA1.New()
+ h.Write(data)
+ digest := h.Sum(nil)
+
+ // Per RFC 4253, section 6.6,
+ // The value for 'dss_signature_blob' is encoded as a string containing
+ // r, followed by s (which are 160-bit integers, without lengths or
+ // padding, unsigned, and in network byte order).
+ // For DSS purposes, sig.Blob should be exactly 40 bytes in length.
+ if len(sig.Blob) != 40 {
+ return errors.New("ssh: DSA signature parse error")
+ }
+ r := new(big.Int).SetBytes(sig.Blob[:20])
+ s := new(big.Int).SetBytes(sig.Blob[20:])
+ if dsa.Verify((*dsa.PublicKey)(k), digest, r, s) {
+ return nil
+ }
+ return errors.New("ssh: signature did not verify")
+}
+
+type dsaPrivateKey struct {
+ *dsa.PrivateKey
+}
+
+func (k *dsaPrivateKey) PublicKey() PublicKey {
+ return (*dsaPublicKey)(&k.PrivateKey.PublicKey)
+}
+
+func (k *dsaPrivateKey) Sign(rand io.Reader, data []byte) (*Signature, error) {
+ h := crypto.SHA1.New()
+ h.Write(data)
+ digest := h.Sum(nil)
+ r, s, err := dsa.Sign(rand, k.PrivateKey, digest)
+ if err != nil {
+ return nil, err
+ }
+
+ sig := make([]byte, 40)
+ rb := r.Bytes()
+ sb := s.Bytes()
+
+ copy(sig[20-len(rb):20], rb)
+ copy(sig[40-len(sb):], sb)
+
+ return &Signature{
+ Format: k.PublicKey().Type(),
+ Blob: sig,
+ }, nil
+}
+
+type ecdsaPublicKey ecdsa.PublicKey
+
+func (key *ecdsaPublicKey) Type() string {
+ return "ecdsa-sha2-" + key.nistID()
+}
+
+func (key *ecdsaPublicKey) nistID() string {
+ switch key.Params().BitSize {
+ case 256:
+ return "nistp256"
+ case 384:
+ return "nistp384"
+ case 521:
+ return "nistp521"
+ }
+ panic("ssh: unsupported ecdsa key size")
+}
+
+func supportedEllipticCurve(curve elliptic.Curve) bool {
+ return curve == elliptic.P256() || curve == elliptic.P384() || curve == elliptic.P521()
+}
+
+// ecHash returns the hash to match the given elliptic curve, see RFC
+// 5656, section 6.2.1
+func ecHash(curve elliptic.Curve) crypto.Hash {
+ bitSize := curve.Params().BitSize
+ switch {
+ case bitSize <= 256:
+ return crypto.SHA256
+ case bitSize <= 384:
+ return crypto.SHA384
+ }
+ return crypto.SHA512
+}
+
+// parseECDSA parses an ECDSA key according to RFC 5656, section 3.1.
+func parseECDSA(in []byte) (out PublicKey, rest []byte, err error) {
+ var w struct {
+ Curve string
+ KeyBytes []byte
+ Rest []byte `ssh:"rest"`
+ }
+
+ if err := Unmarshal(in, &w); err != nil {
+ return nil, nil, err
+ }
+
+ key := new(ecdsa.PublicKey)
+
+ switch w.Curve {
+ case "nistp256":
+ key.Curve = elliptic.P256()
+ case "nistp384":
+ key.Curve = elliptic.P384()
+ case "nistp521":
+ key.Curve = elliptic.P521()
+ default:
+ return nil, nil, errors.New("ssh: unsupported curve")
+ }
+
+ key.X, key.Y = elliptic.Unmarshal(key.Curve, w.KeyBytes)
+ if key.X == nil || key.Y == nil {
+ return nil, nil, errors.New("ssh: invalid curve point")
+ }
+ return (*ecdsaPublicKey)(key), w.Rest, nil
+}
+
+func (key *ecdsaPublicKey) Marshal() []byte {
+ // See RFC 5656, section 3.1.
+ keyBytes := elliptic.Marshal(key.Curve, key.X, key.Y)
+ w := struct {
+ Name string
+ ID string
+ Key []byte
+ }{
+ key.Type(),
+ key.nistID(),
+ keyBytes,
+ }
+
+ return Marshal(&w)
+}
+
+func (key *ecdsaPublicKey) Verify(data []byte, sig *Signature) error {
+ if sig.Format != key.Type() {
+ return fmt.Errorf("ssh: signature type %s for key type %s", sig.Format, key.Type())
+ }
+
+ h := ecHash(key.Curve).New()
+ h.Write(data)
+ digest := h.Sum(nil)
+
+ // Per RFC 5656, section 3.1.2,
+ // The ecdsa_signature_blob value has the following specific encoding:
+ // mpint r
+ // mpint s
+ var ecSig struct {
+ R *big.Int
+ S *big.Int
+ }
+
+ if err := Unmarshal(sig.Blob, &ecSig); err != nil {
+ return err
+ }
+
+ if ecdsa.Verify((*ecdsa.PublicKey)(key), digest, ecSig.R, ecSig.S) {
+ return nil
+ }
+ return errors.New("ssh: signature did not verify")
+}
+
+// NewSignerFromKey takes an *rsa.PrivateKey, *dsa.PrivateKey,
+// *ecdsa.PrivateKey or any other crypto.Signer and returns a corresponding
+// Signer instance. ECDSA keys must use P-256, P-384 or P-521.
+func NewSignerFromKey(key interface{}) (Signer, error) {
+ switch key := key.(type) {
+ case crypto.Signer:
+ return NewSignerFromSigner(key)
+ case *dsa.PrivateKey:
+ return &dsaPrivateKey{key}, nil
+ default:
+ return nil, fmt.Errorf("ssh: unsupported key type %T", key)
+ }
+}
+
+type wrappedSigner struct {
+ signer crypto.Signer
+ pubKey PublicKey
+}
+
+// NewSignerFromSigner takes any crypto.Signer implementation and
+// returns a corresponding Signer interface. This can be used, for
+// example, with keys kept in hardware modules.
+func NewSignerFromSigner(signer crypto.Signer) (Signer, error) {
+ pubKey, err := NewPublicKey(signer.Public())
+ if err != nil {
+ return nil, err
+ }
+
+ return &wrappedSigner{signer, pubKey}, nil
+}
+
+func (s *wrappedSigner) PublicKey() PublicKey {
+ return s.pubKey
+}
+
+func (s *wrappedSigner) Sign(rand io.Reader, data []byte) (*Signature, error) {
+ var hashFunc crypto.Hash
+
+ switch key := s.pubKey.(type) {
+ case *rsaPublicKey, *dsaPublicKey:
+ hashFunc = crypto.SHA1
+ case *ecdsaPublicKey:
+ hashFunc = ecHash(key.Curve)
+ default:
+ return nil, fmt.Errorf("ssh: unsupported key type %T", key)
+ }
+
+ h := hashFunc.New()
+ h.Write(data)
+ digest := h.Sum(nil)
+
+ signature, err := s.signer.Sign(rand, digest, hashFunc)
+ if err != nil {
+ return nil, err
+ }
+
+ // crypto.Signer.Sign is expected to return an ASN.1-encoded signature
+ // for ECDSA and DSA, but that's not the encoding expected by SSH, so
+ // re-encode.
+ switch s.pubKey.(type) {
+ case *ecdsaPublicKey, *dsaPublicKey:
+ type asn1Signature struct {
+ R, S *big.Int
+ }
+ asn1Sig := new(asn1Signature)
+ _, err := asn1.Unmarshal(signature, asn1Sig)
+ if err != nil {
+ return nil, err
+ }
+
+ switch s.pubKey.(type) {
+ case *ecdsaPublicKey:
+ signature = Marshal(asn1Sig)
+
+ case *dsaPublicKey:
+ signature = make([]byte, 40)
+ r := asn1Sig.R.Bytes()
+ s := asn1Sig.S.Bytes()
+ copy(signature[20-len(r):20], r)
+ copy(signature[40-len(s):40], s)
+ }
+ }
+
+ return &Signature{
+ Format: s.pubKey.Type(),
+ Blob: signature,
+ }, nil
+}
+
+// NewPublicKey takes an *rsa.PublicKey, *dsa.PublicKey, *ecdsa.PublicKey or
+// any other crypto.Signer and returns a corresponding Signer instance. ECDSA
+// keys must use P-256, P-384 or P-521.
+func NewPublicKey(key interface{}) (PublicKey, error) {
+ switch key := key.(type) {
+ case *rsa.PublicKey:
+ return (*rsaPublicKey)(key), nil
+ case *ecdsa.PublicKey:
+ if !supportedEllipticCurve(key.Curve) {
+ return nil, errors.New("ssh: only P-256, P-384 and P-521 EC keys are supported.")
+ }
+ return (*ecdsaPublicKey)(key), nil
+ case *dsa.PublicKey:
+ return (*dsaPublicKey)(key), nil
+ default:
+ return nil, fmt.Errorf("ssh: unsupported key type %T", key)
+ }
+}
+
+// ParsePrivateKey returns a Signer from a PEM encoded private key. It supports
+// the same keys as ParseRawPrivateKey.
+func ParsePrivateKey(pemBytes []byte) (Signer, error) {
+ key, err := ParseRawPrivateKey(pemBytes)
+ if err != nil {
+ return nil, err
+ }
+
+ return NewSignerFromKey(key)
+}
+
+// ParseRawPrivateKey returns a private key from a PEM encoded private key. It
+// supports RSA (PKCS#1), DSA (OpenSSL), and ECDSA private keys.
+func ParseRawPrivateKey(pemBytes []byte) (interface{}, error) {
+ block, _ := pem.Decode(pemBytes)
+ if block == nil {
+ return nil, errors.New("ssh: no key found")
+ }
+
+ switch block.Type {
+ case "RSA PRIVATE KEY":
+ return x509.ParsePKCS1PrivateKey(block.Bytes)
+ case "EC PRIVATE KEY":
+ return x509.ParseECPrivateKey(block.Bytes)
+ case "DSA PRIVATE KEY":
+ return ParseDSAPrivateKey(block.Bytes)
+ default:
+ return nil, fmt.Errorf("ssh: unsupported key type %q", block.Type)
+ }
+}
+
+// ParseDSAPrivateKey returns a DSA private key from its ASN.1 DER encoding, as
+// specified by the OpenSSL DSA man page.
+func ParseDSAPrivateKey(der []byte) (*dsa.PrivateKey, error) {
+ var k struct {
+ Version int
+ P *big.Int
+ Q *big.Int
+ G *big.Int
+ Priv *big.Int
+ Pub *big.Int
+ }
+ rest, err := asn1.Unmarshal(der, &k)
+ if err != nil {
+ return nil, errors.New("ssh: failed to parse DSA key: " + err.Error())
+ }
+ if len(rest) > 0 {
+ return nil, errors.New("ssh: garbage after DSA key")
+ }
+
+ return &dsa.PrivateKey{
+ PublicKey: dsa.PublicKey{
+ Parameters: dsa.Parameters{
+ P: k.P,
+ Q: k.Q,
+ G: k.G,
+ },
+ Y: k.Priv,
+ },
+ X: k.Pub,
+ }, nil
+}
diff --git a/src/mongo/gotools/vendor/src/golang.org/x/crypto/ssh/keys_test.go b/src/mongo/gotools/vendor/src/golang.org/x/crypto/ssh/keys_test.go
new file mode 100644
index 00000000000..27569473fcf
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/golang.org/x/crypto/ssh/keys_test.go
@@ -0,0 +1,437 @@
+// Copyright 2014 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package ssh
+
+import (
+ "bytes"
+ "crypto/dsa"
+ "crypto/ecdsa"
+ "crypto/elliptic"
+ "crypto/rand"
+ "crypto/rsa"
+ "encoding/base64"
+ "fmt"
+ "reflect"
+ "strings"
+ "testing"
+
+ "golang.org/x/crypto/ssh/testdata"
+)
+
+func rawKey(pub PublicKey) interface{} {
+ switch k := pub.(type) {
+ case *rsaPublicKey:
+ return (*rsa.PublicKey)(k)
+ case *dsaPublicKey:
+ return (*dsa.PublicKey)(k)
+ case *ecdsaPublicKey:
+ return (*ecdsa.PublicKey)(k)
+ case *Certificate:
+ return k
+ }
+ panic("unknown key type")
+}
+
+func TestKeyMarshalParse(t *testing.T) {
+ for _, priv := range testSigners {
+ pub := priv.PublicKey()
+ roundtrip, err := ParsePublicKey(pub.Marshal())
+ if err != nil {
+ t.Errorf("ParsePublicKey(%T): %v", pub, err)
+ }
+
+ k1 := rawKey(pub)
+ k2 := rawKey(roundtrip)
+
+ if !reflect.DeepEqual(k1, k2) {
+ t.Errorf("got %#v in roundtrip, want %#v", k2, k1)
+ }
+ }
+}
+
+func TestUnsupportedCurves(t *testing.T) {
+ raw, err := ecdsa.GenerateKey(elliptic.P224(), rand.Reader)
+ if err != nil {
+ t.Fatalf("GenerateKey: %v", err)
+ }
+
+ if _, err = NewSignerFromKey(raw); err == nil || !strings.Contains(err.Error(), "only P-256") {
+ t.Fatalf("NewPrivateKey should not succeed with P-224, got: %v", err)
+ }
+
+ if _, err = NewPublicKey(&raw.PublicKey); err == nil || !strings.Contains(err.Error(), "only P-256") {
+ t.Fatalf("NewPublicKey should not succeed with P-224, got: %v", err)
+ }
+}
+
+func TestNewPublicKey(t *testing.T) {
+ for _, k := range testSigners {
+ raw := rawKey(k.PublicKey())
+ // Skip certificates, as NewPublicKey does not support them.
+ if _, ok := raw.(*Certificate); ok {
+ continue
+ }
+ pub, err := NewPublicKey(raw)
+ if err != nil {
+ t.Errorf("NewPublicKey(%#v): %v", raw, err)
+ }
+ if !reflect.DeepEqual(k.PublicKey(), pub) {
+ t.Errorf("NewPublicKey(%#v) = %#v, want %#v", raw, pub, k.PublicKey())
+ }
+ }
+}
+
+func TestKeySignVerify(t *testing.T) {
+ for _, priv := range testSigners {
+ pub := priv.PublicKey()
+
+ data := []byte("sign me")
+ sig, err := priv.Sign(rand.Reader, data)
+ if err != nil {
+ t.Fatalf("Sign(%T): %v", priv, err)
+ }
+
+ if err := pub.Verify(data, sig); err != nil {
+ t.Errorf("publicKey.Verify(%T): %v", priv, err)
+ }
+ sig.Blob[5]++
+ if err := pub.Verify(data, sig); err == nil {
+ t.Errorf("publicKey.Verify on broken sig did not fail")
+ }
+ }
+}
+
+func TestParseRSAPrivateKey(t *testing.T) {
+ key := testPrivateKeys["rsa"]
+
+ rsa, ok := key.(*rsa.PrivateKey)
+ if !ok {
+ t.Fatalf("got %T, want *rsa.PrivateKey", rsa)
+ }
+
+ if err := rsa.Validate(); err != nil {
+ t.Errorf("Validate: %v", err)
+ }
+}
+
+func TestParseECPrivateKey(t *testing.T) {
+ key := testPrivateKeys["ecdsa"]
+
+ ecKey, ok := key.(*ecdsa.PrivateKey)
+ if !ok {
+ t.Fatalf("got %T, want *ecdsa.PrivateKey", ecKey)
+ }
+
+ if !validateECPublicKey(ecKey.Curve, ecKey.X, ecKey.Y) {
+ t.Fatalf("public key does not validate.")
+ }
+}
+
+func TestParseDSA(t *testing.T) {
+ // We actually exercise the ParsePrivateKey codepath here, as opposed to
+ // using the ParseRawPrivateKey+NewSignerFromKey path that testdata_test.go
+ // uses.
+ s, err := ParsePrivateKey(testdata.PEMBytes["dsa"])
+ if err != nil {
+ t.Fatalf("ParsePrivateKey returned error: %s", err)
+ }
+
+ data := []byte("sign me")
+ sig, err := s.Sign(rand.Reader, data)
+ if err != nil {
+ t.Fatalf("dsa.Sign: %v", err)
+ }
+
+ if err := s.PublicKey().Verify(data, sig); err != nil {
+ t.Errorf("Verify failed: %v", err)
+ }
+}
+
+// Tests for authorized_keys parsing.
+
+// getTestKey returns a public key, and its base64 encoding.
+func getTestKey() (PublicKey, string) {
+ k := testPublicKeys["rsa"]
+
+ b := &bytes.Buffer{}
+ e := base64.NewEncoder(base64.StdEncoding, b)
+ e.Write(k.Marshal())
+ e.Close()
+
+ return k, b.String()
+}
+
+func TestMarshalParsePublicKey(t *testing.T) {
+ pub, pubSerialized := getTestKey()
+ line := fmt.Sprintf("%s %s user@host", pub.Type(), pubSerialized)
+
+ authKeys := MarshalAuthorizedKey(pub)
+ actualFields := strings.Fields(string(authKeys))
+ if len(actualFields) == 0 {
+ t.Fatalf("failed authKeys: %v", authKeys)
+ }
+
+ // drop the comment
+ expectedFields := strings.Fields(line)[0:2]
+
+ if !reflect.DeepEqual(actualFields, expectedFields) {
+ t.Errorf("got %v, expected %v", actualFields, expectedFields)
+ }
+
+ actPub, _, _, _, err := ParseAuthorizedKey([]byte(line))
+ if err != nil {
+ t.Fatalf("cannot parse %v: %v", line, err)
+ }
+ if !reflect.DeepEqual(actPub, pub) {
+ t.Errorf("got %v, expected %v", actPub, pub)
+ }
+}
+
+type authResult struct {
+ pubKey PublicKey
+ options []string
+ comments string
+ rest string
+ ok bool
+}
+
+func testAuthorizedKeys(t *testing.T, authKeys []byte, expected []authResult) {
+ rest := authKeys
+ var values []authResult
+ for len(rest) > 0 {
+ var r authResult
+ var err error
+ r.pubKey, r.comments, r.options, rest, err = ParseAuthorizedKey(rest)
+ r.ok = (err == nil)
+ t.Log(err)
+ r.rest = string(rest)
+ values = append(values, r)
+ }
+
+ if !reflect.DeepEqual(values, expected) {
+ t.Errorf("got %#v, expected %#v", values, expected)
+ }
+}
+
+func TestAuthorizedKeyBasic(t *testing.T) {
+ pub, pubSerialized := getTestKey()
+ line := "ssh-rsa " + pubSerialized + " user@host"
+ testAuthorizedKeys(t, []byte(line),
+ []authResult{
+ {pub, nil, "user@host", "", true},
+ })
+}
+
+func TestAuth(t *testing.T) {
+ pub, pubSerialized := getTestKey()
+ authWithOptions := []string{
+ `# comments to ignore before any keys...`,
+ ``,
+ `env="HOME=/home/root",no-port-forwarding ssh-rsa ` + pubSerialized + ` user@host`,
+ `# comments to ignore, along with a blank line`,
+ ``,
+ `env="HOME=/home/root2" ssh-rsa ` + pubSerialized + ` user2@host2`,
+ ``,
+ `# more comments, plus a invalid entry`,
+ `ssh-rsa data-that-will-not-parse user@host3`,
+ }
+ for _, eol := range []string{"\n", "\r\n"} {
+ authOptions := strings.Join(authWithOptions, eol)
+ rest2 := strings.Join(authWithOptions[3:], eol)
+ rest3 := strings.Join(authWithOptions[6:], eol)
+ testAuthorizedKeys(t, []byte(authOptions), []authResult{
+ {pub, []string{`env="HOME=/home/root"`, "no-port-forwarding"}, "user@host", rest2, true},
+ {pub, []string{`env="HOME=/home/root2"`}, "user2@host2", rest3, true},
+ {nil, nil, "", "", false},
+ })
+ }
+}
+
+func TestAuthWithQuotedSpaceInEnv(t *testing.T) {
+ pub, pubSerialized := getTestKey()
+ authWithQuotedSpaceInEnv := []byte(`env="HOME=/home/root dir",no-port-forwarding ssh-rsa ` + pubSerialized + ` user@host`)
+ testAuthorizedKeys(t, []byte(authWithQuotedSpaceInEnv), []authResult{
+ {pub, []string{`env="HOME=/home/root dir"`, "no-port-forwarding"}, "user@host", "", true},
+ })
+}
+
+func TestAuthWithQuotedCommaInEnv(t *testing.T) {
+ pub, pubSerialized := getTestKey()
+ authWithQuotedCommaInEnv := []byte(`env="HOME=/home/root,dir",no-port-forwarding ssh-rsa ` + pubSerialized + ` user@host`)
+ testAuthorizedKeys(t, []byte(authWithQuotedCommaInEnv), []authResult{
+ {pub, []string{`env="HOME=/home/root,dir"`, "no-port-forwarding"}, "user@host", "", true},
+ })
+}
+
+func TestAuthWithQuotedQuoteInEnv(t *testing.T) {
+ pub, pubSerialized := getTestKey()
+ authWithQuotedQuoteInEnv := []byte(`env="HOME=/home/\"root dir",no-port-forwarding` + "\t" + `ssh-rsa` + "\t" + pubSerialized + ` user@host`)
+ authWithDoubleQuotedQuote := []byte(`no-port-forwarding,env="HOME=/home/ \"root dir\"" ssh-rsa ` + pubSerialized + "\t" + `user@host`)
+ testAuthorizedKeys(t, []byte(authWithQuotedQuoteInEnv), []authResult{
+ {pub, []string{`env="HOME=/home/\"root dir"`, "no-port-forwarding"}, "user@host", "", true},
+ })
+
+ testAuthorizedKeys(t, []byte(authWithDoubleQuotedQuote), []authResult{
+ {pub, []string{"no-port-forwarding", `env="HOME=/home/ \"root dir\""`}, "user@host", "", true},
+ })
+}
+
+func TestAuthWithInvalidSpace(t *testing.T) {
+ _, pubSerialized := getTestKey()
+ authWithInvalidSpace := []byte(`env="HOME=/home/root dir", no-port-forwarding ssh-rsa ` + pubSerialized + ` user@host
+#more to follow but still no valid keys`)
+ testAuthorizedKeys(t, []byte(authWithInvalidSpace), []authResult{
+ {nil, nil, "", "", false},
+ })
+}
+
+func TestAuthWithMissingQuote(t *testing.T) {
+ pub, pubSerialized := getTestKey()
+ authWithMissingQuote := []byte(`env="HOME=/home/root,no-port-forwarding ssh-rsa ` + pubSerialized + ` user@host
+env="HOME=/home/root",shared-control ssh-rsa ` + pubSerialized + ` user@host`)
+
+ testAuthorizedKeys(t, []byte(authWithMissingQuote), []authResult{
+ {pub, []string{`env="HOME=/home/root"`, `shared-control`}, "user@host", "", true},
+ })
+}
+
+func TestInvalidEntry(t *testing.T) {
+ authInvalid := []byte(`ssh-rsa`)
+ _, _, _, _, err := ParseAuthorizedKey(authInvalid)
+ if err == nil {
+ t.Errorf("got valid entry for %q", authInvalid)
+ }
+}
+
+var knownHostsParseTests = []struct {
+ input string
+ err string
+
+ marker string
+ comment string
+ hosts []string
+ rest string
+} {
+ {
+ "",
+ "EOF",
+
+ "", "", nil, "",
+ },
+ {
+ "# Just a comment",
+ "EOF",
+
+ "", "", nil, "",
+ },
+ {
+ " \t ",
+ "EOF",
+
+ "", "", nil, "",
+ },
+ {
+ "localhost ssh-rsa {RSAPUB}",
+ "",
+
+ "", "", []string{"localhost"}, "",
+ },
+ {
+ "localhost\tssh-rsa {RSAPUB}",
+ "",
+
+ "", "", []string{"localhost"}, "",
+ },
+ {
+ "localhost\tssh-rsa {RSAPUB}\tcomment comment",
+ "",
+
+ "", "comment comment", []string{"localhost"}, "",
+ },
+ {
+ "localhost\tssh-rsa {RSAPUB}\tcomment comment\n",
+ "",
+
+ "", "comment comment", []string{"localhost"}, "",
+ },
+ {
+ "localhost\tssh-rsa {RSAPUB}\tcomment comment\r\n",
+ "",
+
+ "", "comment comment", []string{"localhost"}, "",
+ },
+ {
+ "localhost\tssh-rsa {RSAPUB}\tcomment comment\r\nnext line",
+ "",
+
+ "", "comment comment", []string{"localhost"}, "next line",
+ },
+ {
+ "localhost,[host2:123]\tssh-rsa {RSAPUB}\tcomment comment",
+ "",
+
+ "", "comment comment", []string{"localhost","[host2:123]"}, "",
+ },
+ {
+ "@marker \tlocalhost,[host2:123]\tssh-rsa {RSAPUB}",
+ "",
+
+ "marker", "", []string{"localhost","[host2:123]"}, "",
+ },
+ {
+ "@marker \tlocalhost,[host2:123]\tssh-rsa aabbccdd",
+ "short read",
+
+ "", "", nil, "",
+ },
+}
+
+func TestKnownHostsParsing(t *testing.T) {
+ rsaPub, rsaPubSerialized := getTestKey()
+
+ for i, test := range knownHostsParseTests {
+ var expectedKey PublicKey
+ const rsaKeyToken = "{RSAPUB}"
+
+ input := test.input
+ if strings.Contains(input, rsaKeyToken) {
+ expectedKey = rsaPub
+ input = strings.Replace(test.input, rsaKeyToken, rsaPubSerialized, -1)
+ }
+
+ marker, hosts, pubKey, comment, rest, err := ParseKnownHosts([]byte(input))
+ if err != nil {
+ if len(test.err) == 0 {
+ t.Errorf("#%d: unexpectedly failed with %q", i, err)
+ } else if !strings.Contains(err.Error(), test.err) {
+ t.Errorf("#%d: expected error containing %q, but got %q", i, test.err, err)
+ }
+ continue
+ } else if len(test.err) != 0 {
+ t.Errorf("#%d: succeeded but expected error including %q", i, test.err)
+ continue
+ }
+
+ if !reflect.DeepEqual(expectedKey, pubKey) {
+ t.Errorf("#%d: expected key %#v, but got %#v", i, expectedKey, pubKey)
+ }
+
+ if marker != test.marker {
+ t.Errorf("#%d: expected marker %q, but got %q", i, test.marker, marker)
+ }
+
+ if comment != test.comment {
+ t.Errorf("#%d: expected comment %q, but got %q", i, test.comment, comment)
+ }
+
+ if !reflect.DeepEqual(test.hosts, hosts) {
+ t.Errorf("#%d: expected hosts %#v, but got %#v", i, test.hosts, hosts)
+ }
+
+ if rest := string(rest); rest != test.rest {
+ t.Errorf("#%d: expected remaining input to be %q, but got %q", i, test.rest, rest)
+ }
+ }
+}
diff --git a/src/mongo/gotools/vendor/src/golang.org/x/crypto/ssh/mac.go b/src/mongo/gotools/vendor/src/golang.org/x/crypto/ssh/mac.go
new file mode 100644
index 00000000000..07744ad6713
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/golang.org/x/crypto/ssh/mac.go
@@ -0,0 +1,57 @@
+// Copyright 2012 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package ssh
+
+// Message authentication support
+
+import (
+ "crypto/hmac"
+ "crypto/sha1"
+ "crypto/sha256"
+ "hash"
+)
+
+type macMode struct {
+ keySize int
+ new func(key []byte) hash.Hash
+}
+
+// truncatingMAC wraps around a hash.Hash and truncates the output digest to
+// a given size.
+type truncatingMAC struct {
+ length int
+ hmac hash.Hash
+}
+
+func (t truncatingMAC) Write(data []byte) (int, error) {
+ return t.hmac.Write(data)
+}
+
+func (t truncatingMAC) Sum(in []byte) []byte {
+ out := t.hmac.Sum(in)
+ return out[:len(in)+t.length]
+}
+
+func (t truncatingMAC) Reset() {
+ t.hmac.Reset()
+}
+
+func (t truncatingMAC) Size() int {
+ return t.length
+}
+
+func (t truncatingMAC) BlockSize() int { return t.hmac.BlockSize() }
+
+var macModes = map[string]*macMode{
+ "hmac-sha2-256": {32, func(key []byte) hash.Hash {
+ return hmac.New(sha256.New, key)
+ }},
+ "hmac-sha1": {20, func(key []byte) hash.Hash {
+ return hmac.New(sha1.New, key)
+ }},
+ "hmac-sha1-96": {20, func(key []byte) hash.Hash {
+ return truncatingMAC{12, hmac.New(sha1.New, key)}
+ }},
+}
diff --git a/src/mongo/gotools/vendor/src/golang.org/x/crypto/ssh/mempipe_test.go b/src/mongo/gotools/vendor/src/golang.org/x/crypto/ssh/mempipe_test.go
new file mode 100644
index 00000000000..8697cd6140a
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/golang.org/x/crypto/ssh/mempipe_test.go
@@ -0,0 +1,110 @@
+// Copyright 2013 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package ssh
+
+import (
+ "io"
+ "sync"
+ "testing"
+)
+
+// An in-memory packetConn. It is safe to call Close and writePacket
+// from different goroutines.
+type memTransport struct {
+ eof bool
+ pending [][]byte
+ write *memTransport
+ sync.Mutex
+ *sync.Cond
+}
+
+func (t *memTransport) readPacket() ([]byte, error) {
+ t.Lock()
+ defer t.Unlock()
+ for {
+ if len(t.pending) > 0 {
+ r := t.pending[0]
+ t.pending = t.pending[1:]
+ return r, nil
+ }
+ if t.eof {
+ return nil, io.EOF
+ }
+ t.Cond.Wait()
+ }
+}
+
+func (t *memTransport) closeSelf() error {
+ t.Lock()
+ defer t.Unlock()
+ if t.eof {
+ return io.EOF
+ }
+ t.eof = true
+ t.Cond.Broadcast()
+ return nil
+}
+
+func (t *memTransport) Close() error {
+ err := t.write.closeSelf()
+ t.closeSelf()
+ return err
+}
+
+func (t *memTransport) writePacket(p []byte) error {
+ t.write.Lock()
+ defer t.write.Unlock()
+ if t.write.eof {
+ return io.EOF
+ }
+ c := make([]byte, len(p))
+ copy(c, p)
+ t.write.pending = append(t.write.pending, c)
+ t.write.Cond.Signal()
+ return nil
+}
+
+func memPipe() (a, b packetConn) {
+ t1 := memTransport{}
+ t2 := memTransport{}
+ t1.write = &t2
+ t2.write = &t1
+ t1.Cond = sync.NewCond(&t1.Mutex)
+ t2.Cond = sync.NewCond(&t2.Mutex)
+ return &t1, &t2
+}
+
+func TestMemPipe(t *testing.T) {
+ a, b := memPipe()
+ if err := a.writePacket([]byte{42}); err != nil {
+ t.Fatalf("writePacket: %v", err)
+ }
+ if err := a.Close(); err != nil {
+ t.Fatal("Close: ", err)
+ }
+ p, err := b.readPacket()
+ if err != nil {
+ t.Fatal("readPacket: ", err)
+ }
+ if len(p) != 1 || p[0] != 42 {
+ t.Fatalf("got %v, want {42}", p)
+ }
+ p, err = b.readPacket()
+ if err != io.EOF {
+ t.Fatalf("got %v, %v, want EOF", p, err)
+ }
+}
+
+func TestDoubleClose(t *testing.T) {
+ a, _ := memPipe()
+ err := a.Close()
+ if err != nil {
+ t.Errorf("Close: %v", err)
+ }
+ err = a.Close()
+ if err != io.EOF {
+ t.Errorf("expect EOF on double close.")
+ }
+}
diff --git a/src/mongo/gotools/vendor/src/golang.org/x/crypto/ssh/messages.go b/src/mongo/gotools/vendor/src/golang.org/x/crypto/ssh/messages.go
new file mode 100644
index 00000000000..eaf6106698e
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/golang.org/x/crypto/ssh/messages.go
@@ -0,0 +1,725 @@
+// Copyright 2011 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package ssh
+
+import (
+ "bytes"
+ "encoding/binary"
+ "errors"
+ "fmt"
+ "io"
+ "math/big"
+ "reflect"
+ "strconv"
+)
+
+// These are SSH message type numbers. They are scattered around several
+// documents but many were taken from [SSH-PARAMETERS].
+const (
+ msgIgnore = 2
+ msgUnimplemented = 3
+ msgDebug = 4
+ msgNewKeys = 21
+
+ // Standard authentication messages
+ msgUserAuthSuccess = 52
+ msgUserAuthBanner = 53
+)
+
+// SSH messages:
+//
+// These structures mirror the wire format of the corresponding SSH messages.
+// They are marshaled using reflection with the marshal and unmarshal functions
+// in this file. The only wrinkle is that a final member of type []byte with a
+// ssh tag of "rest" receives the remainder of a packet when unmarshaling.
+
+// See RFC 4253, section 11.1.
+const msgDisconnect = 1
+
+// disconnectMsg is the message that signals a disconnect. It is also
+// the error type returned from mux.Wait()
+type disconnectMsg struct {
+ Reason uint32 `sshtype:"1"`
+ Message string
+ Language string
+}
+
+func (d *disconnectMsg) Error() string {
+ return fmt.Sprintf("ssh: disconnect reason %d: %s", d.Reason, d.Message)
+}
+
+// See RFC 4253, section 7.1.
+const msgKexInit = 20
+
+type kexInitMsg struct {
+ Cookie [16]byte `sshtype:"20"`
+ KexAlgos []string
+ ServerHostKeyAlgos []string
+ CiphersClientServer []string
+ CiphersServerClient []string
+ MACsClientServer []string
+ MACsServerClient []string
+ CompressionClientServer []string
+ CompressionServerClient []string
+ LanguagesClientServer []string
+ LanguagesServerClient []string
+ FirstKexFollows bool
+ Reserved uint32
+}
+
+// See RFC 4253, section 8.
+
+// Diffie-Helman
+const msgKexDHInit = 30
+
+type kexDHInitMsg struct {
+ X *big.Int `sshtype:"30"`
+}
+
+const msgKexECDHInit = 30
+
+type kexECDHInitMsg struct {
+ ClientPubKey []byte `sshtype:"30"`
+}
+
+const msgKexECDHReply = 31
+
+type kexECDHReplyMsg struct {
+ HostKey []byte `sshtype:"31"`
+ EphemeralPubKey []byte
+ Signature []byte
+}
+
+const msgKexDHReply = 31
+
+type kexDHReplyMsg struct {
+ HostKey []byte `sshtype:"31"`
+ Y *big.Int
+ Signature []byte
+}
+
+// See RFC 4253, section 10.
+const msgServiceRequest = 5
+
+type serviceRequestMsg struct {
+ Service string `sshtype:"5"`
+}
+
+// See RFC 4253, section 10.
+const msgServiceAccept = 6
+
+type serviceAcceptMsg struct {
+ Service string `sshtype:"6"`
+}
+
+// See RFC 4252, section 5.
+const msgUserAuthRequest = 50
+
+type userAuthRequestMsg struct {
+ User string `sshtype:"50"`
+ Service string
+ Method string
+ Payload []byte `ssh:"rest"`
+}
+
+// See RFC 4252, section 5.1
+const msgUserAuthFailure = 51
+
+type userAuthFailureMsg struct {
+ Methods []string `sshtype:"51"`
+ PartialSuccess bool
+}
+
+// See RFC 4256, section 3.2
+const msgUserAuthInfoRequest = 60
+const msgUserAuthInfoResponse = 61
+
+type userAuthInfoRequestMsg struct {
+ User string `sshtype:"60"`
+ Instruction string
+ DeprecatedLanguage string
+ NumPrompts uint32
+ Prompts []byte `ssh:"rest"`
+}
+
+// See RFC 4254, section 5.1.
+const msgChannelOpen = 90
+
+type channelOpenMsg struct {
+ ChanType string `sshtype:"90"`
+ PeersId uint32
+ PeersWindow uint32
+ MaxPacketSize uint32
+ TypeSpecificData []byte `ssh:"rest"`
+}
+
+const msgChannelExtendedData = 95
+const msgChannelData = 94
+
+// See RFC 4254, section 5.1.
+const msgChannelOpenConfirm = 91
+
+type channelOpenConfirmMsg struct {
+ PeersId uint32 `sshtype:"91"`
+ MyId uint32
+ MyWindow uint32
+ MaxPacketSize uint32
+ TypeSpecificData []byte `ssh:"rest"`
+}
+
+// See RFC 4254, section 5.1.
+const msgChannelOpenFailure = 92
+
+type channelOpenFailureMsg struct {
+ PeersId uint32 `sshtype:"92"`
+ Reason RejectionReason
+ Message string
+ Language string
+}
+
+const msgChannelRequest = 98
+
+type channelRequestMsg struct {
+ PeersId uint32 `sshtype:"98"`
+ Request string
+ WantReply bool
+ RequestSpecificData []byte `ssh:"rest"`
+}
+
+// See RFC 4254, section 5.4.
+const msgChannelSuccess = 99
+
+type channelRequestSuccessMsg struct {
+ PeersId uint32 `sshtype:"99"`
+}
+
+// See RFC 4254, section 5.4.
+const msgChannelFailure = 100
+
+type channelRequestFailureMsg struct {
+ PeersId uint32 `sshtype:"100"`
+}
+
+// See RFC 4254, section 5.3
+const msgChannelClose = 97
+
+type channelCloseMsg struct {
+ PeersId uint32 `sshtype:"97"`
+}
+
+// See RFC 4254, section 5.3
+const msgChannelEOF = 96
+
+type channelEOFMsg struct {
+ PeersId uint32 `sshtype:"96"`
+}
+
+// See RFC 4254, section 4
+const msgGlobalRequest = 80
+
+type globalRequestMsg struct {
+ Type string `sshtype:"80"`
+ WantReply bool
+ Data []byte `ssh:"rest"`
+}
+
+// See RFC 4254, section 4
+const msgRequestSuccess = 81
+
+type globalRequestSuccessMsg struct {
+ Data []byte `ssh:"rest" sshtype:"81"`
+}
+
+// See RFC 4254, section 4
+const msgRequestFailure = 82
+
+type globalRequestFailureMsg struct {
+ Data []byte `ssh:"rest" sshtype:"82"`
+}
+
+// See RFC 4254, section 5.2
+const msgChannelWindowAdjust = 93
+
+type windowAdjustMsg struct {
+ PeersId uint32 `sshtype:"93"`
+ AdditionalBytes uint32
+}
+
+// See RFC 4252, section 7
+const msgUserAuthPubKeyOk = 60
+
+type userAuthPubKeyOkMsg struct {
+ Algo string `sshtype:"60"`
+ PubKey []byte
+}
+
+// typeTag returns the type byte for the given type. The type should
+// be struct.
+func typeTag(structType reflect.Type) byte {
+ var tag byte
+ var tagStr string
+ tagStr = structType.Field(0).Tag.Get("sshtype")
+ i, err := strconv.Atoi(tagStr)
+ if err == nil {
+ tag = byte(i)
+ }
+ return tag
+}
+
+func fieldError(t reflect.Type, field int, problem string) error {
+ if problem != "" {
+ problem = ": " + problem
+ }
+ return fmt.Errorf("ssh: unmarshal error for field %s of type %s%s", t.Field(field).Name, t.Name(), problem)
+}
+
+var errShortRead = errors.New("ssh: short read")
+
+// Unmarshal parses data in SSH wire format into a structure. The out
+// argument should be a pointer to struct. If the first member of the
+// struct has the "sshtype" tag set to a number in decimal, the packet
+// must start that number. In case of error, Unmarshal returns a
+// ParseError or UnexpectedMessageError.
+func Unmarshal(data []byte, out interface{}) error {
+ v := reflect.ValueOf(out).Elem()
+ structType := v.Type()
+ expectedType := typeTag(structType)
+ if len(data) == 0 {
+ return parseError(expectedType)
+ }
+ if expectedType > 0 {
+ if data[0] != expectedType {
+ return unexpectedMessageError(expectedType, data[0])
+ }
+ data = data[1:]
+ }
+
+ var ok bool
+ for i := 0; i < v.NumField(); i++ {
+ field := v.Field(i)
+ t := field.Type()
+ switch t.Kind() {
+ case reflect.Bool:
+ if len(data) < 1 {
+ return errShortRead
+ }
+ field.SetBool(data[0] != 0)
+ data = data[1:]
+ case reflect.Array:
+ if t.Elem().Kind() != reflect.Uint8 {
+ return fieldError(structType, i, "array of unsupported type")
+ }
+ if len(data) < t.Len() {
+ return errShortRead
+ }
+ for j, n := 0, t.Len(); j < n; j++ {
+ field.Index(j).Set(reflect.ValueOf(data[j]))
+ }
+ data = data[t.Len():]
+ case reflect.Uint64:
+ var u64 uint64
+ if u64, data, ok = parseUint64(data); !ok {
+ return errShortRead
+ }
+ field.SetUint(u64)
+ case reflect.Uint32:
+ var u32 uint32
+ if u32, data, ok = parseUint32(data); !ok {
+ return errShortRead
+ }
+ field.SetUint(uint64(u32))
+ case reflect.Uint8:
+ if len(data) < 1 {
+ return errShortRead
+ }
+ field.SetUint(uint64(data[0]))
+ data = data[1:]
+ case reflect.String:
+ var s []byte
+ if s, data, ok = parseString(data); !ok {
+ return fieldError(structType, i, "")
+ }
+ field.SetString(string(s))
+ case reflect.Slice:
+ switch t.Elem().Kind() {
+ case reflect.Uint8:
+ if structType.Field(i).Tag.Get("ssh") == "rest" {
+ field.Set(reflect.ValueOf(data))
+ data = nil
+ } else {
+ var s []byte
+ if s, data, ok = parseString(data); !ok {
+ return errShortRead
+ }
+ field.Set(reflect.ValueOf(s))
+ }
+ case reflect.String:
+ var nl []string
+ if nl, data, ok = parseNameList(data); !ok {
+ return errShortRead
+ }
+ field.Set(reflect.ValueOf(nl))
+ default:
+ return fieldError(structType, i, "slice of unsupported type")
+ }
+ case reflect.Ptr:
+ if t == bigIntType {
+ var n *big.Int
+ if n, data, ok = parseInt(data); !ok {
+ return errShortRead
+ }
+ field.Set(reflect.ValueOf(n))
+ } else {
+ return fieldError(structType, i, "pointer to unsupported type")
+ }
+ default:
+ return fieldError(structType, i, "unsupported type")
+ }
+ }
+
+ if len(data) != 0 {
+ return parseError(expectedType)
+ }
+
+ return nil
+}
+
+// Marshal serializes the message in msg to SSH wire format. The msg
+// argument should be a struct or pointer to struct. If the first
+// member has the "sshtype" tag set to a number in decimal, that
+// number is prepended to the result. If the last of member has the
+// "ssh" tag set to "rest", its contents are appended to the output.
+func Marshal(msg interface{}) []byte {
+ out := make([]byte, 0, 64)
+ return marshalStruct(out, msg)
+}
+
+func marshalStruct(out []byte, msg interface{}) []byte {
+ v := reflect.Indirect(reflect.ValueOf(msg))
+ msgType := typeTag(v.Type())
+ if msgType > 0 {
+ out = append(out, msgType)
+ }
+
+ for i, n := 0, v.NumField(); i < n; i++ {
+ field := v.Field(i)
+ switch t := field.Type(); t.Kind() {
+ case reflect.Bool:
+ var v uint8
+ if field.Bool() {
+ v = 1
+ }
+ out = append(out, v)
+ case reflect.Array:
+ if t.Elem().Kind() != reflect.Uint8 {
+ panic(fmt.Sprintf("array of non-uint8 in field %d: %T", i, field.Interface()))
+ }
+ for j, l := 0, t.Len(); j < l; j++ {
+ out = append(out, uint8(field.Index(j).Uint()))
+ }
+ case reflect.Uint32:
+ out = appendU32(out, uint32(field.Uint()))
+ case reflect.Uint64:
+ out = appendU64(out, uint64(field.Uint()))
+ case reflect.Uint8:
+ out = append(out, uint8(field.Uint()))
+ case reflect.String:
+ s := field.String()
+ out = appendInt(out, len(s))
+ out = append(out, s...)
+ case reflect.Slice:
+ switch t.Elem().Kind() {
+ case reflect.Uint8:
+ if v.Type().Field(i).Tag.Get("ssh") != "rest" {
+ out = appendInt(out, field.Len())
+ }
+ out = append(out, field.Bytes()...)
+ case reflect.String:
+ offset := len(out)
+ out = appendU32(out, 0)
+ if n := field.Len(); n > 0 {
+ for j := 0; j < n; j++ {
+ f := field.Index(j)
+ if j != 0 {
+ out = append(out, ',')
+ }
+ out = append(out, f.String()...)
+ }
+ // overwrite length value
+ binary.BigEndian.PutUint32(out[offset:], uint32(len(out)-offset-4))
+ }
+ default:
+ panic(fmt.Sprintf("slice of unknown type in field %d: %T", i, field.Interface()))
+ }
+ case reflect.Ptr:
+ if t == bigIntType {
+ var n *big.Int
+ nValue := reflect.ValueOf(&n)
+ nValue.Elem().Set(field)
+ needed := intLength(n)
+ oldLength := len(out)
+
+ if cap(out)-len(out) < needed {
+ newOut := make([]byte, len(out), 2*(len(out)+needed))
+ copy(newOut, out)
+ out = newOut
+ }
+ out = out[:oldLength+needed]
+ marshalInt(out[oldLength:], n)
+ } else {
+ panic(fmt.Sprintf("pointer to unknown type in field %d: %T", i, field.Interface()))
+ }
+ }
+ }
+
+ return out
+}
+
+var bigOne = big.NewInt(1)
+
+func parseString(in []byte) (out, rest []byte, ok bool) {
+ if len(in) < 4 {
+ return
+ }
+ length := binary.BigEndian.Uint32(in)
+ in = in[4:]
+ if uint32(len(in)) < length {
+ return
+ }
+ out = in[:length]
+ rest = in[length:]
+ ok = true
+ return
+}
+
+var (
+ comma = []byte{','}
+ emptyNameList = []string{}
+)
+
+func parseNameList(in []byte) (out []string, rest []byte, ok bool) {
+ contents, rest, ok := parseString(in)
+ if !ok {
+ return
+ }
+ if len(contents) == 0 {
+ out = emptyNameList
+ return
+ }
+ parts := bytes.Split(contents, comma)
+ out = make([]string, len(parts))
+ for i, part := range parts {
+ out[i] = string(part)
+ }
+ return
+}
+
+func parseInt(in []byte) (out *big.Int, rest []byte, ok bool) {
+ contents, rest, ok := parseString(in)
+ if !ok {
+ return
+ }
+ out = new(big.Int)
+
+ if len(contents) > 0 && contents[0]&0x80 == 0x80 {
+ // This is a negative number
+ notBytes := make([]byte, len(contents))
+ for i := range notBytes {
+ notBytes[i] = ^contents[i]
+ }
+ out.SetBytes(notBytes)
+ out.Add(out, bigOne)
+ out.Neg(out)
+ } else {
+ // Positive number
+ out.SetBytes(contents)
+ }
+ ok = true
+ return
+}
+
+func parseUint32(in []byte) (uint32, []byte, bool) {
+ if len(in) < 4 {
+ return 0, nil, false
+ }
+ return binary.BigEndian.Uint32(in), in[4:], true
+}
+
+func parseUint64(in []byte) (uint64, []byte, bool) {
+ if len(in) < 8 {
+ return 0, nil, false
+ }
+ return binary.BigEndian.Uint64(in), in[8:], true
+}
+
+func intLength(n *big.Int) int {
+ length := 4 /* length bytes */
+ if n.Sign() < 0 {
+ nMinus1 := new(big.Int).Neg(n)
+ nMinus1.Sub(nMinus1, bigOne)
+ bitLen := nMinus1.BitLen()
+ if bitLen%8 == 0 {
+ // The number will need 0xff padding
+ length++
+ }
+ length += (bitLen + 7) / 8
+ } else if n.Sign() == 0 {
+ // A zero is the zero length string
+ } else {
+ bitLen := n.BitLen()
+ if bitLen%8 == 0 {
+ // The number will need 0x00 padding
+ length++
+ }
+ length += (bitLen + 7) / 8
+ }
+
+ return length
+}
+
+func marshalUint32(to []byte, n uint32) []byte {
+ binary.BigEndian.PutUint32(to, n)
+ return to[4:]
+}
+
+func marshalUint64(to []byte, n uint64) []byte {
+ binary.BigEndian.PutUint64(to, n)
+ return to[8:]
+}
+
+func marshalInt(to []byte, n *big.Int) []byte {
+ lengthBytes := to
+ to = to[4:]
+ length := 0
+
+ if n.Sign() < 0 {
+ // A negative number has to be converted to two's-complement
+ // form. So we'll subtract 1 and invert. If the
+ // most-significant-bit isn't set then we'll need to pad the
+ // beginning with 0xff in order to keep the number negative.
+ nMinus1 := new(big.Int).Neg(n)
+ nMinus1.Sub(nMinus1, bigOne)
+ bytes := nMinus1.Bytes()
+ for i := range bytes {
+ bytes[i] ^= 0xff
+ }
+ if len(bytes) == 0 || bytes[0]&0x80 == 0 {
+ to[0] = 0xff
+ to = to[1:]
+ length++
+ }
+ nBytes := copy(to, bytes)
+ to = to[nBytes:]
+ length += nBytes
+ } else if n.Sign() == 0 {
+ // A zero is the zero length string
+ } else {
+ bytes := n.Bytes()
+ if len(bytes) > 0 && bytes[0]&0x80 != 0 {
+ // We'll have to pad this with a 0x00 in order to
+ // stop it looking like a negative number.
+ to[0] = 0
+ to = to[1:]
+ length++
+ }
+ nBytes := copy(to, bytes)
+ to = to[nBytes:]
+ length += nBytes
+ }
+
+ lengthBytes[0] = byte(length >> 24)
+ lengthBytes[1] = byte(length >> 16)
+ lengthBytes[2] = byte(length >> 8)
+ lengthBytes[3] = byte(length)
+ return to
+}
+
+func writeInt(w io.Writer, n *big.Int) {
+ length := intLength(n)
+ buf := make([]byte, length)
+ marshalInt(buf, n)
+ w.Write(buf)
+}
+
+func writeString(w io.Writer, s []byte) {
+ var lengthBytes [4]byte
+ lengthBytes[0] = byte(len(s) >> 24)
+ lengthBytes[1] = byte(len(s) >> 16)
+ lengthBytes[2] = byte(len(s) >> 8)
+ lengthBytes[3] = byte(len(s))
+ w.Write(lengthBytes[:])
+ w.Write(s)
+}
+
+func stringLength(n int) int {
+ return 4 + n
+}
+
+func marshalString(to []byte, s []byte) []byte {
+ to[0] = byte(len(s) >> 24)
+ to[1] = byte(len(s) >> 16)
+ to[2] = byte(len(s) >> 8)
+ to[3] = byte(len(s))
+ to = to[4:]
+ copy(to, s)
+ return to[len(s):]
+}
+
+var bigIntType = reflect.TypeOf((*big.Int)(nil))
+
+// Decode a packet into its corresponding message.
+func decode(packet []byte) (interface{}, error) {
+ var msg interface{}
+ switch packet[0] {
+ case msgDisconnect:
+ msg = new(disconnectMsg)
+ case msgServiceRequest:
+ msg = new(serviceRequestMsg)
+ case msgServiceAccept:
+ msg = new(serviceAcceptMsg)
+ case msgKexInit:
+ msg = new(kexInitMsg)
+ case msgKexDHInit:
+ msg = new(kexDHInitMsg)
+ case msgKexDHReply:
+ msg = new(kexDHReplyMsg)
+ case msgUserAuthRequest:
+ msg = new(userAuthRequestMsg)
+ case msgUserAuthFailure:
+ msg = new(userAuthFailureMsg)
+ case msgUserAuthPubKeyOk:
+ msg = new(userAuthPubKeyOkMsg)
+ case msgGlobalRequest:
+ msg = new(globalRequestMsg)
+ case msgRequestSuccess:
+ msg = new(globalRequestSuccessMsg)
+ case msgRequestFailure:
+ msg = new(globalRequestFailureMsg)
+ case msgChannelOpen:
+ msg = new(channelOpenMsg)
+ case msgChannelOpenConfirm:
+ msg = new(channelOpenConfirmMsg)
+ case msgChannelOpenFailure:
+ msg = new(channelOpenFailureMsg)
+ case msgChannelWindowAdjust:
+ msg = new(windowAdjustMsg)
+ case msgChannelEOF:
+ msg = new(channelEOFMsg)
+ case msgChannelClose:
+ msg = new(channelCloseMsg)
+ case msgChannelRequest:
+ msg = new(channelRequestMsg)
+ case msgChannelSuccess:
+ msg = new(channelRequestSuccessMsg)
+ case msgChannelFailure:
+ msg = new(channelRequestFailureMsg)
+ default:
+ return nil, unexpectedMessageError(0, packet[0])
+ }
+ if err := Unmarshal(packet, msg); err != nil {
+ return nil, err
+ }
+ return msg, nil
+}
diff --git a/src/mongo/gotools/vendor/src/golang.org/x/crypto/ssh/messages_test.go b/src/mongo/gotools/vendor/src/golang.org/x/crypto/ssh/messages_test.go
new file mode 100644
index 00000000000..955b5127f9b
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/golang.org/x/crypto/ssh/messages_test.go
@@ -0,0 +1,254 @@
+// Copyright 2011 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package ssh
+
+import (
+ "bytes"
+ "math/big"
+ "math/rand"
+ "reflect"
+ "testing"
+ "testing/quick"
+)
+
+var intLengthTests = []struct {
+ val, length int
+}{
+ {0, 4 + 0},
+ {1, 4 + 1},
+ {127, 4 + 1},
+ {128, 4 + 2},
+ {-1, 4 + 1},
+}
+
+func TestIntLength(t *testing.T) {
+ for _, test := range intLengthTests {
+ v := new(big.Int).SetInt64(int64(test.val))
+ length := intLength(v)
+ if length != test.length {
+ t.Errorf("For %d, got length %d but expected %d", test.val, length, test.length)
+ }
+ }
+}
+
+type msgAllTypes struct {
+ Bool bool `sshtype:"21"`
+ Array [16]byte
+ Uint64 uint64
+ Uint32 uint32
+ Uint8 uint8
+ String string
+ Strings []string
+ Bytes []byte
+ Int *big.Int
+ Rest []byte `ssh:"rest"`
+}
+
+func (t *msgAllTypes) Generate(rand *rand.Rand, size int) reflect.Value {
+ m := &msgAllTypes{}
+ m.Bool = rand.Intn(2) == 1
+ randomBytes(m.Array[:], rand)
+ m.Uint64 = uint64(rand.Int63n(1<<63 - 1))
+ m.Uint32 = uint32(rand.Intn((1 << 31) - 1))
+ m.Uint8 = uint8(rand.Intn(1 << 8))
+ m.String = string(m.Array[:])
+ m.Strings = randomNameList(rand)
+ m.Bytes = m.Array[:]
+ m.Int = randomInt(rand)
+ m.Rest = m.Array[:]
+ return reflect.ValueOf(m)
+}
+
+func TestMarshalUnmarshal(t *testing.T) {
+ rand := rand.New(rand.NewSource(0))
+ iface := &msgAllTypes{}
+ ty := reflect.ValueOf(iface).Type()
+
+ n := 100
+ if testing.Short() {
+ n = 5
+ }
+ for j := 0; j < n; j++ {
+ v, ok := quick.Value(ty, rand)
+ if !ok {
+ t.Errorf("failed to create value")
+ break
+ }
+
+ m1 := v.Elem().Interface()
+ m2 := iface
+
+ marshaled := Marshal(m1)
+ if err := Unmarshal(marshaled, m2); err != nil {
+ t.Errorf("Unmarshal %#v: %s", m1, err)
+ break
+ }
+
+ if !reflect.DeepEqual(v.Interface(), m2) {
+ t.Errorf("got: %#v\nwant:%#v\n%x", m2, m1, marshaled)
+ break
+ }
+ }
+}
+
+func TestUnmarshalEmptyPacket(t *testing.T) {
+ var b []byte
+ var m channelRequestSuccessMsg
+ if err := Unmarshal(b, &m); err == nil {
+ t.Fatalf("unmarshal of empty slice succeeded")
+ }
+}
+
+func TestUnmarshalUnexpectedPacket(t *testing.T) {
+ type S struct {
+ I uint32 `sshtype:"43"`
+ S string
+ B bool
+ }
+
+ s := S{11, "hello", true}
+ packet := Marshal(s)
+ packet[0] = 42
+ roundtrip := S{}
+ err := Unmarshal(packet, &roundtrip)
+ if err == nil {
+ t.Fatal("expected error, not nil")
+ }
+}
+
+func TestMarshalPtr(t *testing.T) {
+ s := struct {
+ S string
+ }{"hello"}
+
+ m1 := Marshal(s)
+ m2 := Marshal(&s)
+ if !bytes.Equal(m1, m2) {
+ t.Errorf("got %q, want %q for marshaled pointer", m2, m1)
+ }
+}
+
+func TestBareMarshalUnmarshal(t *testing.T) {
+ type S struct {
+ I uint32
+ S string
+ B bool
+ }
+
+ s := S{42, "hello", true}
+ packet := Marshal(s)
+ roundtrip := S{}
+ Unmarshal(packet, &roundtrip)
+
+ if !reflect.DeepEqual(s, roundtrip) {
+ t.Errorf("got %#v, want %#v", roundtrip, s)
+ }
+}
+
+func TestBareMarshal(t *testing.T) {
+ type S2 struct {
+ I uint32
+ }
+ s := S2{42}
+ packet := Marshal(s)
+ i, rest, ok := parseUint32(packet)
+ if len(rest) > 0 || !ok {
+ t.Errorf("parseInt(%q): parse error", packet)
+ }
+ if i != s.I {
+ t.Errorf("got %d, want %d", i, s.I)
+ }
+}
+
+func TestUnmarshalShortKexInitPacket(t *testing.T) {
+ // This used to panic.
+ // Issue 11348
+ packet := []byte{0x14, 0x30, 0x30, 0x30, 0x30, 0x30, 0x30, 0x30, 0x30, 0x30, 0x30, 0x30, 0x30, 0x30, 0x30, 0x30, 0x30, 0xff, 0xff, 0xff, 0xff}
+ kim := &kexInitMsg{}
+ if err := Unmarshal(packet, kim); err == nil {
+ t.Error("truncated packet unmarshaled without error")
+ }
+}
+
+func randomBytes(out []byte, rand *rand.Rand) {
+ for i := 0; i < len(out); i++ {
+ out[i] = byte(rand.Int31())
+ }
+}
+
+func randomNameList(rand *rand.Rand) []string {
+ ret := make([]string, rand.Int31()&15)
+ for i := range ret {
+ s := make([]byte, 1+(rand.Int31()&15))
+ for j := range s {
+ s[j] = 'a' + uint8(rand.Int31()&15)
+ }
+ ret[i] = string(s)
+ }
+ return ret
+}
+
+func randomInt(rand *rand.Rand) *big.Int {
+ return new(big.Int).SetInt64(int64(int32(rand.Uint32())))
+}
+
+func (*kexInitMsg) Generate(rand *rand.Rand, size int) reflect.Value {
+ ki := &kexInitMsg{}
+ randomBytes(ki.Cookie[:], rand)
+ ki.KexAlgos = randomNameList(rand)
+ ki.ServerHostKeyAlgos = randomNameList(rand)
+ ki.CiphersClientServer = randomNameList(rand)
+ ki.CiphersServerClient = randomNameList(rand)
+ ki.MACsClientServer = randomNameList(rand)
+ ki.MACsServerClient = randomNameList(rand)
+ ki.CompressionClientServer = randomNameList(rand)
+ ki.CompressionServerClient = randomNameList(rand)
+ ki.LanguagesClientServer = randomNameList(rand)
+ ki.LanguagesServerClient = randomNameList(rand)
+ if rand.Int31()&1 == 1 {
+ ki.FirstKexFollows = true
+ }
+ return reflect.ValueOf(ki)
+}
+
+func (*kexDHInitMsg) Generate(rand *rand.Rand, size int) reflect.Value {
+ dhi := &kexDHInitMsg{}
+ dhi.X = randomInt(rand)
+ return reflect.ValueOf(dhi)
+}
+
+var (
+ _kexInitMsg = new(kexInitMsg).Generate(rand.New(rand.NewSource(0)), 10).Elem().Interface()
+ _kexDHInitMsg = new(kexDHInitMsg).Generate(rand.New(rand.NewSource(0)), 10).Elem().Interface()
+
+ _kexInit = Marshal(_kexInitMsg)
+ _kexDHInit = Marshal(_kexDHInitMsg)
+)
+
+func BenchmarkMarshalKexInitMsg(b *testing.B) {
+ for i := 0; i < b.N; i++ {
+ Marshal(_kexInitMsg)
+ }
+}
+
+func BenchmarkUnmarshalKexInitMsg(b *testing.B) {
+ m := new(kexInitMsg)
+ for i := 0; i < b.N; i++ {
+ Unmarshal(_kexInit, m)
+ }
+}
+
+func BenchmarkMarshalKexDHInitMsg(b *testing.B) {
+ for i := 0; i < b.N; i++ {
+ Marshal(_kexDHInitMsg)
+ }
+}
+
+func BenchmarkUnmarshalKexDHInitMsg(b *testing.B) {
+ m := new(kexDHInitMsg)
+ for i := 0; i < b.N; i++ {
+ Unmarshal(_kexDHInit, m)
+ }
+}
diff --git a/src/mongo/gotools/vendor/src/golang.org/x/crypto/ssh/mux.go b/src/mongo/gotools/vendor/src/golang.org/x/crypto/ssh/mux.go
new file mode 100644
index 00000000000..321880ad9a4
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/golang.org/x/crypto/ssh/mux.go
@@ -0,0 +1,356 @@
+// Copyright 2013 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package ssh
+
+import (
+ "encoding/binary"
+ "fmt"
+ "io"
+ "log"
+ "sync"
+ "sync/atomic"
+)
+
+// debugMux, if set, causes messages in the connection protocol to be
+// logged.
+const debugMux = false
+
+// chanList is a thread safe channel list.
+type chanList struct {
+ // protects concurrent access to chans
+ sync.Mutex
+
+ // chans are indexed by the local id of the channel, which the
+ // other side should send in the PeersId field.
+ chans []*channel
+
+ // This is a debugging aid: it offsets all IDs by this
+ // amount. This helps distinguish otherwise identical
+ // server/client muxes
+ offset uint32
+}
+
+// Assigns a channel ID to the given channel.
+func (c *chanList) add(ch *channel) uint32 {
+ c.Lock()
+ defer c.Unlock()
+ for i := range c.chans {
+ if c.chans[i] == nil {
+ c.chans[i] = ch
+ return uint32(i) + c.offset
+ }
+ }
+ c.chans = append(c.chans, ch)
+ return uint32(len(c.chans)-1) + c.offset
+}
+
+// getChan returns the channel for the given ID.
+func (c *chanList) getChan(id uint32) *channel {
+ id -= c.offset
+
+ c.Lock()
+ defer c.Unlock()
+ if id < uint32(len(c.chans)) {
+ return c.chans[id]
+ }
+ return nil
+}
+
+func (c *chanList) remove(id uint32) {
+ id -= c.offset
+ c.Lock()
+ if id < uint32(len(c.chans)) {
+ c.chans[id] = nil
+ }
+ c.Unlock()
+}
+
+// dropAll forgets all channels it knows, returning them in a slice.
+func (c *chanList) dropAll() []*channel {
+ c.Lock()
+ defer c.Unlock()
+ var r []*channel
+
+ for _, ch := range c.chans {
+ if ch == nil {
+ continue
+ }
+ r = append(r, ch)
+ }
+ c.chans = nil
+ return r
+}
+
+// mux represents the state for the SSH connection protocol, which
+// multiplexes many channels onto a single packet transport.
+type mux struct {
+ conn packetConn
+ chanList chanList
+
+ incomingChannels chan NewChannel
+
+ globalSentMu sync.Mutex
+ globalResponses chan interface{}
+ incomingRequests chan *Request
+
+ errCond *sync.Cond
+ err error
+}
+
+// When debugging, each new chanList instantiation has a different
+// offset.
+var globalOff uint32
+
+func (m *mux) Wait() error {
+ m.errCond.L.Lock()
+ defer m.errCond.L.Unlock()
+ for m.err == nil {
+ m.errCond.Wait()
+ }
+ return m.err
+}
+
+// newMux returns a mux that runs over the given connection.
+func newMux(p packetConn) *mux {
+ m := &mux{
+ conn: p,
+ incomingChannels: make(chan NewChannel, 16),
+ globalResponses: make(chan interface{}, 1),
+ incomingRequests: make(chan *Request, 16),
+ errCond: newCond(),
+ }
+ if debugMux {
+ m.chanList.offset = atomic.AddUint32(&globalOff, 1)
+ }
+
+ go m.loop()
+ return m
+}
+
+func (m *mux) sendMessage(msg interface{}) error {
+ p := Marshal(msg)
+ return m.conn.writePacket(p)
+}
+
+func (m *mux) SendRequest(name string, wantReply bool, payload []byte) (bool, []byte, error) {
+ if wantReply {
+ m.globalSentMu.Lock()
+ defer m.globalSentMu.Unlock()
+ }
+
+ if err := m.sendMessage(globalRequestMsg{
+ Type: name,
+ WantReply: wantReply,
+ Data: payload,
+ }); err != nil {
+ return false, nil, err
+ }
+
+ if !wantReply {
+ return false, nil, nil
+ }
+
+ msg, ok := <-m.globalResponses
+ if !ok {
+ return false, nil, io.EOF
+ }
+ switch msg := msg.(type) {
+ case *globalRequestFailureMsg:
+ return false, msg.Data, nil
+ case *globalRequestSuccessMsg:
+ return true, msg.Data, nil
+ default:
+ return false, nil, fmt.Errorf("ssh: unexpected response to request: %#v", msg)
+ }
+}
+
+// ackRequest must be called after processing a global request that
+// has WantReply set.
+func (m *mux) ackRequest(ok bool, data []byte) error {
+ if ok {
+ return m.sendMessage(globalRequestSuccessMsg{Data: data})
+ }
+ return m.sendMessage(globalRequestFailureMsg{Data: data})
+}
+
+// TODO(hanwen): Disconnect is a transport layer message. We should
+// probably send and receive Disconnect somewhere in the transport
+// code.
+
+// Disconnect sends a disconnect message.
+func (m *mux) Disconnect(reason uint32, message string) error {
+ return m.sendMessage(disconnectMsg{
+ Reason: reason,
+ Message: message,
+ })
+}
+
+func (m *mux) Close() error {
+ return m.conn.Close()
+}
+
+// loop runs the connection machine. It will process packets until an
+// error is encountered. To synchronize on loop exit, use mux.Wait.
+func (m *mux) loop() {
+ var err error
+ for err == nil {
+ err = m.onePacket()
+ }
+
+ for _, ch := range m.chanList.dropAll() {
+ ch.close()
+ }
+
+ close(m.incomingChannels)
+ close(m.incomingRequests)
+ close(m.globalResponses)
+
+ m.conn.Close()
+
+ m.errCond.L.Lock()
+ m.err = err
+ m.errCond.Broadcast()
+ m.errCond.L.Unlock()
+
+ if debugMux {
+ log.Println("loop exit", err)
+ }
+}
+
+// onePacket reads and processes one packet.
+func (m *mux) onePacket() error {
+ packet, err := m.conn.readPacket()
+ if err != nil {
+ return err
+ }
+
+ if debugMux {
+ if packet[0] == msgChannelData || packet[0] == msgChannelExtendedData {
+ log.Printf("decoding(%d): data packet - %d bytes", m.chanList.offset, len(packet))
+ } else {
+ p, _ := decode(packet)
+ log.Printf("decoding(%d): %d %#v - %d bytes", m.chanList.offset, packet[0], p, len(packet))
+ }
+ }
+
+ switch packet[0] {
+ case msgNewKeys:
+ // Ignore notification of key change.
+ return nil
+ case msgDisconnect:
+ return m.handleDisconnect(packet)
+ case msgChannelOpen:
+ return m.handleChannelOpen(packet)
+ case msgGlobalRequest, msgRequestSuccess, msgRequestFailure:
+ return m.handleGlobalPacket(packet)
+ }
+
+ // assume a channel packet.
+ if len(packet) < 5 {
+ return parseError(packet[0])
+ }
+ id := binary.BigEndian.Uint32(packet[1:])
+ ch := m.chanList.getChan(id)
+ if ch == nil {
+ return fmt.Errorf("ssh: invalid channel %d", id)
+ }
+
+ return ch.handlePacket(packet)
+}
+
+func (m *mux) handleDisconnect(packet []byte) error {
+ var d disconnectMsg
+ if err := Unmarshal(packet, &d); err != nil {
+ return err
+ }
+
+ if debugMux {
+ log.Printf("caught disconnect: %v", d)
+ }
+ return &d
+}
+
+func (m *mux) handleGlobalPacket(packet []byte) error {
+ msg, err := decode(packet)
+ if err != nil {
+ return err
+ }
+
+ switch msg := msg.(type) {
+ case *globalRequestMsg:
+ m.incomingRequests <- &Request{
+ Type: msg.Type,
+ WantReply: msg.WantReply,
+ Payload: msg.Data,
+ mux: m,
+ }
+ case *globalRequestSuccessMsg, *globalRequestFailureMsg:
+ m.globalResponses <- msg
+ default:
+ panic(fmt.Sprintf("not a global message %#v", msg))
+ }
+
+ return nil
+}
+
+// handleChannelOpen schedules a channel to be Accept()ed.
+func (m *mux) handleChannelOpen(packet []byte) error {
+ var msg channelOpenMsg
+ if err := Unmarshal(packet, &msg); err != nil {
+ return err
+ }
+
+ if msg.MaxPacketSize < minPacketLength || msg.MaxPacketSize > 1<<31 {
+ failMsg := channelOpenFailureMsg{
+ PeersId: msg.PeersId,
+ Reason: ConnectionFailed,
+ Message: "invalid request",
+ Language: "en_US.UTF-8",
+ }
+ return m.sendMessage(failMsg)
+ }
+
+ c := m.newChannel(msg.ChanType, channelInbound, msg.TypeSpecificData)
+ c.remoteId = msg.PeersId
+ c.maxRemotePayload = msg.MaxPacketSize
+ c.remoteWin.add(msg.PeersWindow)
+ m.incomingChannels <- c
+ return nil
+}
+
+func (m *mux) OpenChannel(chanType string, extra []byte) (Channel, <-chan *Request, error) {
+ ch, err := m.openChannel(chanType, extra)
+ if err != nil {
+ return nil, nil, err
+ }
+
+ return ch, ch.incomingRequests, nil
+}
+
+func (m *mux) openChannel(chanType string, extra []byte) (*channel, error) {
+ ch := m.newChannel(chanType, channelOutbound, extra)
+
+ ch.maxIncomingPayload = channelMaxPacket
+
+ open := channelOpenMsg{
+ ChanType: chanType,
+ PeersWindow: ch.myWindow,
+ MaxPacketSize: ch.maxIncomingPayload,
+ TypeSpecificData: extra,
+ PeersId: ch.localId,
+ }
+ if err := m.sendMessage(open); err != nil {
+ return nil, err
+ }
+
+ switch msg := (<-ch.msg).(type) {
+ case *channelOpenConfirmMsg:
+ return ch, nil
+ case *channelOpenFailureMsg:
+ return nil, &OpenChannelError{msg.Reason, msg.Message}
+ default:
+ return nil, fmt.Errorf("ssh: unexpected packet in response to channel open: %T", msg)
+ }
+}
diff --git a/src/mongo/gotools/vendor/src/golang.org/x/crypto/ssh/mux_test.go b/src/mongo/gotools/vendor/src/golang.org/x/crypto/ssh/mux_test.go
new file mode 100644
index 00000000000..523038960f2
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/golang.org/x/crypto/ssh/mux_test.go
@@ -0,0 +1,525 @@
+// Copyright 2013 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package ssh
+
+import (
+ "io"
+ "io/ioutil"
+ "sync"
+ "testing"
+)
+
+func muxPair() (*mux, *mux) {
+ a, b := memPipe()
+
+ s := newMux(a)
+ c := newMux(b)
+
+ return s, c
+}
+
+// Returns both ends of a channel, and the mux for the the 2nd
+// channel.
+func channelPair(t *testing.T) (*channel, *channel, *mux) {
+ c, s := muxPair()
+
+ res := make(chan *channel, 1)
+ go func() {
+ newCh, ok := <-s.incomingChannels
+ if !ok {
+ t.Fatalf("No incoming channel")
+ }
+ if newCh.ChannelType() != "chan" {
+ t.Fatalf("got type %q want chan", newCh.ChannelType())
+ }
+ ch, _, err := newCh.Accept()
+ if err != nil {
+ t.Fatalf("Accept %v", err)
+ }
+ res <- ch.(*channel)
+ }()
+
+ ch, err := c.openChannel("chan", nil)
+ if err != nil {
+ t.Fatalf("OpenChannel: %v", err)
+ }
+
+ return <-res, ch, c
+}
+
+// Test that stderr and stdout can be addressed from different
+// goroutines. This is intended for use with the race detector.
+func TestMuxChannelExtendedThreadSafety(t *testing.T) {
+ writer, reader, mux := channelPair(t)
+ defer writer.Close()
+ defer reader.Close()
+ defer mux.Close()
+
+ var wr, rd sync.WaitGroup
+ magic := "hello world"
+
+ wr.Add(2)
+ go func() {
+ io.WriteString(writer, magic)
+ wr.Done()
+ }()
+ go func() {
+ io.WriteString(writer.Stderr(), magic)
+ wr.Done()
+ }()
+
+ rd.Add(2)
+ go func() {
+ c, err := ioutil.ReadAll(reader)
+ if string(c) != magic {
+ t.Fatalf("stdout read got %q, want %q (error %s)", c, magic, err)
+ }
+ rd.Done()
+ }()
+ go func() {
+ c, err := ioutil.ReadAll(reader.Stderr())
+ if string(c) != magic {
+ t.Fatalf("stderr read got %q, want %q (error %s)", c, magic, err)
+ }
+ rd.Done()
+ }()
+
+ wr.Wait()
+ writer.CloseWrite()
+ rd.Wait()
+}
+
+func TestMuxReadWrite(t *testing.T) {
+ s, c, mux := channelPair(t)
+ defer s.Close()
+ defer c.Close()
+ defer mux.Close()
+
+ magic := "hello world"
+ magicExt := "hello stderr"
+ go func() {
+ _, err := s.Write([]byte(magic))
+ if err != nil {
+ t.Fatalf("Write: %v", err)
+ }
+ _, err = s.Extended(1).Write([]byte(magicExt))
+ if err != nil {
+ t.Fatalf("Write: %v", err)
+ }
+ err = s.Close()
+ if err != nil {
+ t.Fatalf("Close: %v", err)
+ }
+ }()
+
+ var buf [1024]byte
+ n, err := c.Read(buf[:])
+ if err != nil {
+ t.Fatalf("server Read: %v", err)
+ }
+ got := string(buf[:n])
+ if got != magic {
+ t.Fatalf("server: got %q want %q", got, magic)
+ }
+
+ n, err = c.Extended(1).Read(buf[:])
+ if err != nil {
+ t.Fatalf("server Read: %v", err)
+ }
+
+ got = string(buf[:n])
+ if got != magicExt {
+ t.Fatalf("server: got %q want %q", got, magic)
+ }
+}
+
+func TestMuxChannelOverflow(t *testing.T) {
+ reader, writer, mux := channelPair(t)
+ defer reader.Close()
+ defer writer.Close()
+ defer mux.Close()
+
+ wDone := make(chan int, 1)
+ go func() {
+ if _, err := writer.Write(make([]byte, channelWindowSize)); err != nil {
+ t.Errorf("could not fill window: %v", err)
+ }
+ writer.Write(make([]byte, 1))
+ wDone <- 1
+ }()
+ writer.remoteWin.waitWriterBlocked()
+
+ // Send 1 byte.
+ packet := make([]byte, 1+4+4+1)
+ packet[0] = msgChannelData
+ marshalUint32(packet[1:], writer.remoteId)
+ marshalUint32(packet[5:], uint32(1))
+ packet[9] = 42
+
+ if err := writer.mux.conn.writePacket(packet); err != nil {
+ t.Errorf("could not send packet")
+ }
+ if _, err := reader.SendRequest("hello", true, nil); err == nil {
+ t.Errorf("SendRequest succeeded.")
+ }
+ <-wDone
+}
+
+func TestMuxChannelCloseWriteUnblock(t *testing.T) {
+ reader, writer, mux := channelPair(t)
+ defer reader.Close()
+ defer writer.Close()
+ defer mux.Close()
+
+ wDone := make(chan int, 1)
+ go func() {
+ if _, err := writer.Write(make([]byte, channelWindowSize)); err != nil {
+ t.Errorf("could not fill window: %v", err)
+ }
+ if _, err := writer.Write(make([]byte, 1)); err != io.EOF {
+ t.Errorf("got %v, want EOF for unblock write", err)
+ }
+ wDone <- 1
+ }()
+
+ writer.remoteWin.waitWriterBlocked()
+ reader.Close()
+ <-wDone
+}
+
+func TestMuxConnectionCloseWriteUnblock(t *testing.T) {
+ reader, writer, mux := channelPair(t)
+ defer reader.Close()
+ defer writer.Close()
+ defer mux.Close()
+
+ wDone := make(chan int, 1)
+ go func() {
+ if _, err := writer.Write(make([]byte, channelWindowSize)); err != nil {
+ t.Errorf("could not fill window: %v", err)
+ }
+ if _, err := writer.Write(make([]byte, 1)); err != io.EOF {
+ t.Errorf("got %v, want EOF for unblock write", err)
+ }
+ wDone <- 1
+ }()
+
+ writer.remoteWin.waitWriterBlocked()
+ mux.Close()
+ <-wDone
+}
+
+func TestMuxReject(t *testing.T) {
+ client, server := muxPair()
+ defer server.Close()
+ defer client.Close()
+
+ go func() {
+ ch, ok := <-server.incomingChannels
+ if !ok {
+ t.Fatalf("Accept")
+ }
+ if ch.ChannelType() != "ch" || string(ch.ExtraData()) != "extra" {
+ t.Fatalf("unexpected channel: %q, %q", ch.ChannelType(), ch.ExtraData())
+ }
+ ch.Reject(RejectionReason(42), "message")
+ }()
+
+ ch, err := client.openChannel("ch", []byte("extra"))
+ if ch != nil {
+ t.Fatal("openChannel not rejected")
+ }
+
+ ocf, ok := err.(*OpenChannelError)
+ if !ok {
+ t.Errorf("got %#v want *OpenChannelError", err)
+ } else if ocf.Reason != 42 || ocf.Message != "message" {
+ t.Errorf("got %#v, want {Reason: 42, Message: %q}", ocf, "message")
+ }
+
+ want := "ssh: rejected: unknown reason 42 (message)"
+ if err.Error() != want {
+ t.Errorf("got %q, want %q", err.Error(), want)
+ }
+}
+
+func TestMuxChannelRequest(t *testing.T) {
+ client, server, mux := channelPair(t)
+ defer server.Close()
+ defer client.Close()
+ defer mux.Close()
+
+ var received int
+ var wg sync.WaitGroup
+ wg.Add(1)
+ go func() {
+ for r := range server.incomingRequests {
+ received++
+ r.Reply(r.Type == "yes", nil)
+ }
+ wg.Done()
+ }()
+ _, err := client.SendRequest("yes", false, nil)
+ if err != nil {
+ t.Fatalf("SendRequest: %v", err)
+ }
+ ok, err := client.SendRequest("yes", true, nil)
+ if err != nil {
+ t.Fatalf("SendRequest: %v", err)
+ }
+
+ if !ok {
+ t.Errorf("SendRequest(yes): %v", ok)
+
+ }
+
+ ok, err = client.SendRequest("no", true, nil)
+ if err != nil {
+ t.Fatalf("SendRequest: %v", err)
+ }
+ if ok {
+ t.Errorf("SendRequest(no): %v", ok)
+
+ }
+
+ client.Close()
+ wg.Wait()
+
+ if received != 3 {
+ t.Errorf("got %d requests, want %d", received, 3)
+ }
+}
+
+func TestMuxGlobalRequest(t *testing.T) {
+ clientMux, serverMux := muxPair()
+ defer serverMux.Close()
+ defer clientMux.Close()
+
+ var seen bool
+ go func() {
+ for r := range serverMux.incomingRequests {
+ seen = seen || r.Type == "peek"
+ if r.WantReply {
+ err := r.Reply(r.Type == "yes",
+ append([]byte(r.Type), r.Payload...))
+ if err != nil {
+ t.Errorf("AckRequest: %v", err)
+ }
+ }
+ }
+ }()
+
+ _, _, err := clientMux.SendRequest("peek", false, nil)
+ if err != nil {
+ t.Errorf("SendRequest: %v", err)
+ }
+
+ ok, data, err := clientMux.SendRequest("yes", true, []byte("a"))
+ if !ok || string(data) != "yesa" || err != nil {
+ t.Errorf("SendRequest(\"yes\", true, \"a\"): %v %v %v",
+ ok, data, err)
+ }
+ if ok, data, err := clientMux.SendRequest("yes", true, []byte("a")); !ok || string(data) != "yesa" || err != nil {
+ t.Errorf("SendRequest(\"yes\", true, \"a\"): %v %v %v",
+ ok, data, err)
+ }
+
+ if ok, data, err := clientMux.SendRequest("no", true, []byte("a")); ok || string(data) != "noa" || err != nil {
+ t.Errorf("SendRequest(\"no\", true, \"a\"): %v %v %v",
+ ok, data, err)
+ }
+
+ clientMux.Disconnect(0, "")
+ if !seen {
+ t.Errorf("never saw 'peek' request")
+ }
+}
+
+func TestMuxGlobalRequestUnblock(t *testing.T) {
+ clientMux, serverMux := muxPair()
+ defer serverMux.Close()
+ defer clientMux.Close()
+
+ result := make(chan error, 1)
+ go func() {
+ _, _, err := clientMux.SendRequest("hello", true, nil)
+ result <- err
+ }()
+
+ <-serverMux.incomingRequests
+ serverMux.conn.Close()
+ err := <-result
+
+ if err != io.EOF {
+ t.Errorf("want EOF, got %v", io.EOF)
+ }
+}
+
+func TestMuxChannelRequestUnblock(t *testing.T) {
+ a, b, connB := channelPair(t)
+ defer a.Close()
+ defer b.Close()
+ defer connB.Close()
+
+ result := make(chan error, 1)
+ go func() {
+ _, err := a.SendRequest("hello", true, nil)
+ result <- err
+ }()
+
+ <-b.incomingRequests
+ connB.conn.Close()
+ err := <-result
+
+ if err != io.EOF {
+ t.Errorf("want EOF, got %v", err)
+ }
+}
+
+func TestMuxDisconnect(t *testing.T) {
+ a, b := muxPair()
+ defer a.Close()
+ defer b.Close()
+
+ go func() {
+ for r := range b.incomingRequests {
+ r.Reply(true, nil)
+ }
+ }()
+
+ a.Disconnect(42, "whatever")
+ ok, _, err := a.SendRequest("hello", true, nil)
+ if ok || err == nil {
+ t.Errorf("got reply after disconnecting")
+ }
+ err = b.Wait()
+ if d, ok := err.(*disconnectMsg); !ok || d.Reason != 42 {
+ t.Errorf("got %#v, want disconnectMsg{Reason:42}", err)
+ }
+}
+
+func TestMuxCloseChannel(t *testing.T) {
+ r, w, mux := channelPair(t)
+ defer mux.Close()
+ defer r.Close()
+ defer w.Close()
+
+ result := make(chan error, 1)
+ go func() {
+ var b [1024]byte
+ _, err := r.Read(b[:])
+ result <- err
+ }()
+ if err := w.Close(); err != nil {
+ t.Errorf("w.Close: %v", err)
+ }
+
+ if _, err := w.Write([]byte("hello")); err != io.EOF {
+ t.Errorf("got err %v, want io.EOF after Close", err)
+ }
+
+ if err := <-result; err != io.EOF {
+ t.Errorf("got %v (%T), want io.EOF", err, err)
+ }
+}
+
+func TestMuxCloseWriteChannel(t *testing.T) {
+ r, w, mux := channelPair(t)
+ defer mux.Close()
+
+ result := make(chan error, 1)
+ go func() {
+ var b [1024]byte
+ _, err := r.Read(b[:])
+ result <- err
+ }()
+ if err := w.CloseWrite(); err != nil {
+ t.Errorf("w.CloseWrite: %v", err)
+ }
+
+ if _, err := w.Write([]byte("hello")); err != io.EOF {
+ t.Errorf("got err %v, want io.EOF after CloseWrite", err)
+ }
+
+ if err := <-result; err != io.EOF {
+ t.Errorf("got %v (%T), want io.EOF", err, err)
+ }
+}
+
+func TestMuxInvalidRecord(t *testing.T) {
+ a, b := muxPair()
+ defer a.Close()
+ defer b.Close()
+
+ packet := make([]byte, 1+4+4+1)
+ packet[0] = msgChannelData
+ marshalUint32(packet[1:], 29348723 /* invalid channel id */)
+ marshalUint32(packet[5:], 1)
+ packet[9] = 42
+
+ a.conn.writePacket(packet)
+ go a.SendRequest("hello", false, nil)
+ // 'a' wrote an invalid packet, so 'b' has exited.
+ req, ok := <-b.incomingRequests
+ if ok {
+ t.Errorf("got request %#v after receiving invalid packet", req)
+ }
+}
+
+func TestZeroWindowAdjust(t *testing.T) {
+ a, b, mux := channelPair(t)
+ defer a.Close()
+ defer b.Close()
+ defer mux.Close()
+
+ go func() {
+ io.WriteString(a, "hello")
+ // bogus adjust.
+ a.sendMessage(windowAdjustMsg{})
+ io.WriteString(a, "world")
+ a.Close()
+ }()
+
+ want := "helloworld"
+ c, _ := ioutil.ReadAll(b)
+ if string(c) != want {
+ t.Errorf("got %q want %q", c, want)
+ }
+}
+
+func TestMuxMaxPacketSize(t *testing.T) {
+ a, b, mux := channelPair(t)
+ defer a.Close()
+ defer b.Close()
+ defer mux.Close()
+
+ large := make([]byte, a.maxRemotePayload+1)
+ packet := make([]byte, 1+4+4+1+len(large))
+ packet[0] = msgChannelData
+ marshalUint32(packet[1:], a.remoteId)
+ marshalUint32(packet[5:], uint32(len(large)))
+ packet[9] = 42
+
+ if err := a.mux.conn.writePacket(packet); err != nil {
+ t.Errorf("could not send packet")
+ }
+
+ go a.SendRequest("hello", false, nil)
+
+ _, ok := <-b.incomingRequests
+ if ok {
+ t.Errorf("connection still alive after receiving large packet.")
+ }
+}
+
+// Don't ship code with debug=true.
+func TestDebug(t *testing.T) {
+ if debugMux {
+ t.Error("mux debug switched on")
+ }
+ if debugHandshake {
+ t.Error("handshake debug switched on")
+ }
+}
diff --git a/src/mongo/gotools/vendor/src/golang.org/x/crypto/ssh/server.go b/src/mongo/gotools/vendor/src/golang.org/x/crypto/ssh/server.go
new file mode 100644
index 00000000000..4781eb78050
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/golang.org/x/crypto/ssh/server.go
@@ -0,0 +1,495 @@
+// Copyright 2011 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package ssh
+
+import (
+ "bytes"
+ "errors"
+ "fmt"
+ "io"
+ "net"
+)
+
+// The Permissions type holds fine-grained permissions that are
+// specific to a user or a specific authentication method for a
+// user. Permissions, except for "source-address", must be enforced in
+// the server application layer, after successful authentication. The
+// Permissions are passed on in ServerConn so a server implementation
+// can honor them.
+type Permissions struct {
+ // Critical options restrict default permissions. Common
+ // restrictions are "source-address" and "force-command". If
+ // the server cannot enforce the restriction, or does not
+ // recognize it, the user should not authenticate.
+ CriticalOptions map[string]string
+
+ // Extensions are extra functionality that the server may
+ // offer on authenticated connections. Common extensions are
+ // "permit-agent-forwarding", "permit-X11-forwarding". Lack of
+ // support for an extension does not preclude authenticating a
+ // user.
+ Extensions map[string]string
+}
+
+// ServerConfig holds server specific configuration data.
+type ServerConfig struct {
+ // Config contains configuration shared between client and server.
+ Config
+
+ hostKeys []Signer
+
+ // NoClientAuth is true if clients are allowed to connect without
+ // authenticating.
+ NoClientAuth bool
+
+ // PasswordCallback, if non-nil, is called when a user
+ // attempts to authenticate using a password.
+ PasswordCallback func(conn ConnMetadata, password []byte) (*Permissions, error)
+
+ // PublicKeyCallback, if non-nil, is called when a client attempts public
+ // key authentication. It must return true if the given public key is
+ // valid for the given user. For example, see CertChecker.Authenticate.
+ PublicKeyCallback func(conn ConnMetadata, key PublicKey) (*Permissions, error)
+
+ // KeyboardInteractiveCallback, if non-nil, is called when
+ // keyboard-interactive authentication is selected (RFC
+ // 4256). The client object's Challenge function should be
+ // used to query the user. The callback may offer multiple
+ // Challenge rounds. To avoid information leaks, the client
+ // should be presented a challenge even if the user is
+ // unknown.
+ KeyboardInteractiveCallback func(conn ConnMetadata, client KeyboardInteractiveChallenge) (*Permissions, error)
+
+ // AuthLogCallback, if non-nil, is called to log all authentication
+ // attempts.
+ AuthLogCallback func(conn ConnMetadata, method string, err error)
+
+ // ServerVersion is the version identification string to announce in
+ // the public handshake.
+ // If empty, a reasonable default is used.
+ // Note that RFC 4253 section 4.2 requires that this string start with
+ // "SSH-2.0-".
+ ServerVersion string
+}
+
+// AddHostKey adds a private key as a host key. If an existing host
+// key exists with the same algorithm, it is overwritten. Each server
+// config must have at least one host key.
+func (s *ServerConfig) AddHostKey(key Signer) {
+ for i, k := range s.hostKeys {
+ if k.PublicKey().Type() == key.PublicKey().Type() {
+ s.hostKeys[i] = key
+ return
+ }
+ }
+
+ s.hostKeys = append(s.hostKeys, key)
+}
+
+// cachedPubKey contains the results of querying whether a public key is
+// acceptable for a user.
+type cachedPubKey struct {
+ user string
+ pubKeyData []byte
+ result error
+ perms *Permissions
+}
+
+const maxCachedPubKeys = 16
+
+// pubKeyCache caches tests for public keys. Since SSH clients
+// will query whether a public key is acceptable before attempting to
+// authenticate with it, we end up with duplicate queries for public
+// key validity. The cache only applies to a single ServerConn.
+type pubKeyCache struct {
+ keys []cachedPubKey
+}
+
+// get returns the result for a given user/algo/key tuple.
+func (c *pubKeyCache) get(user string, pubKeyData []byte) (cachedPubKey, bool) {
+ for _, k := range c.keys {
+ if k.user == user && bytes.Equal(k.pubKeyData, pubKeyData) {
+ return k, true
+ }
+ }
+ return cachedPubKey{}, false
+}
+
+// add adds the given tuple to the cache.
+func (c *pubKeyCache) add(candidate cachedPubKey) {
+ if len(c.keys) < maxCachedPubKeys {
+ c.keys = append(c.keys, candidate)
+ }
+}
+
+// ServerConn is an authenticated SSH connection, as seen from the
+// server
+type ServerConn struct {
+ Conn
+
+ // If the succeeding authentication callback returned a
+ // non-nil Permissions pointer, it is stored here.
+ Permissions *Permissions
+}
+
+// NewServerConn starts a new SSH server with c as the underlying
+// transport. It starts with a handshake and, if the handshake is
+// unsuccessful, it closes the connection and returns an error. The
+// Request and NewChannel channels must be serviced, or the connection
+// will hang.
+func NewServerConn(c net.Conn, config *ServerConfig) (*ServerConn, <-chan NewChannel, <-chan *Request, error) {
+ fullConf := *config
+ fullConf.SetDefaults()
+ s := &connection{
+ sshConn: sshConn{conn: c},
+ }
+ perms, err := s.serverHandshake(&fullConf)
+ if err != nil {
+ c.Close()
+ return nil, nil, nil, err
+ }
+ return &ServerConn{s, perms}, s.mux.incomingChannels, s.mux.incomingRequests, nil
+}
+
+// signAndMarshal signs the data with the appropriate algorithm,
+// and serializes the result in SSH wire format.
+func signAndMarshal(k Signer, rand io.Reader, data []byte) ([]byte, error) {
+ sig, err := k.Sign(rand, data)
+ if err != nil {
+ return nil, err
+ }
+
+ return Marshal(sig), nil
+}
+
+// handshake performs key exchange and user authentication.
+func (s *connection) serverHandshake(config *ServerConfig) (*Permissions, error) {
+ if len(config.hostKeys) == 0 {
+ return nil, errors.New("ssh: server has no host keys")
+ }
+
+ if !config.NoClientAuth && config.PasswordCallback == nil && config.PublicKeyCallback == nil && config.KeyboardInteractiveCallback == nil {
+ return nil, errors.New("ssh: no authentication methods configured but NoClientAuth is also false")
+ }
+
+ if config.ServerVersion != "" {
+ s.serverVersion = []byte(config.ServerVersion)
+ } else {
+ s.serverVersion = []byte(packageVersion)
+ }
+ var err error
+ s.clientVersion, err = exchangeVersions(s.sshConn.conn, s.serverVersion)
+ if err != nil {
+ return nil, err
+ }
+
+ tr := newTransport(s.sshConn.conn, config.Rand, false /* not client */)
+ s.transport = newServerTransport(tr, s.clientVersion, s.serverVersion, config)
+
+ if err := s.transport.requestKeyChange(); err != nil {
+ return nil, err
+ }
+
+ if packet, err := s.transport.readPacket(); err != nil {
+ return nil, err
+ } else if packet[0] != msgNewKeys {
+ return nil, unexpectedMessageError(msgNewKeys, packet[0])
+ }
+
+ // We just did the key change, so the session ID is established.
+ s.sessionID = s.transport.getSessionID()
+
+ var packet []byte
+ if packet, err = s.transport.readPacket(); err != nil {
+ return nil, err
+ }
+
+ var serviceRequest serviceRequestMsg
+ if err = Unmarshal(packet, &serviceRequest); err != nil {
+ return nil, err
+ }
+ if serviceRequest.Service != serviceUserAuth {
+ return nil, errors.New("ssh: requested service '" + serviceRequest.Service + "' before authenticating")
+ }
+ serviceAccept := serviceAcceptMsg{
+ Service: serviceUserAuth,
+ }
+ if err := s.transport.writePacket(Marshal(&serviceAccept)); err != nil {
+ return nil, err
+ }
+
+ perms, err := s.serverAuthenticate(config)
+ if err != nil {
+ return nil, err
+ }
+ s.mux = newMux(s.transport)
+ return perms, err
+}
+
+func isAcceptableAlgo(algo string) bool {
+ switch algo {
+ case KeyAlgoRSA, KeyAlgoDSA, KeyAlgoECDSA256, KeyAlgoECDSA384, KeyAlgoECDSA521,
+ CertAlgoRSAv01, CertAlgoDSAv01, CertAlgoECDSA256v01, CertAlgoECDSA384v01, CertAlgoECDSA521v01:
+ return true
+ }
+ return false
+}
+
+func checkSourceAddress(addr net.Addr, sourceAddr string) error {
+ if addr == nil {
+ return errors.New("ssh: no address known for client, but source-address match required")
+ }
+
+ tcpAddr, ok := addr.(*net.TCPAddr)
+ if !ok {
+ return fmt.Errorf("ssh: remote address %v is not an TCP address when checking source-address match", addr)
+ }
+
+ if allowedIP := net.ParseIP(sourceAddr); allowedIP != nil {
+ if bytes.Equal(allowedIP, tcpAddr.IP) {
+ return nil
+ }
+ } else {
+ _, ipNet, err := net.ParseCIDR(sourceAddr)
+ if err != nil {
+ return fmt.Errorf("ssh: error parsing source-address restriction %q: %v", sourceAddr, err)
+ }
+
+ if ipNet.Contains(tcpAddr.IP) {
+ return nil
+ }
+ }
+
+ return fmt.Errorf("ssh: remote address %v is not allowed because of source-address restriction", addr)
+}
+
+func (s *connection) serverAuthenticate(config *ServerConfig) (*Permissions, error) {
+ var err error
+ var cache pubKeyCache
+ var perms *Permissions
+
+userAuthLoop:
+ for {
+ var userAuthReq userAuthRequestMsg
+ if packet, err := s.transport.readPacket(); err != nil {
+ return nil, err
+ } else if err = Unmarshal(packet, &userAuthReq); err != nil {
+ return nil, err
+ }
+
+ if userAuthReq.Service != serviceSSH {
+ return nil, errors.New("ssh: client attempted to negotiate for unknown service: " + userAuthReq.Service)
+ }
+
+ s.user = userAuthReq.User
+ perms = nil
+ authErr := errors.New("no auth passed yet")
+
+ switch userAuthReq.Method {
+ case "none":
+ if config.NoClientAuth {
+ s.user = ""
+ authErr = nil
+ }
+ case "password":
+ if config.PasswordCallback == nil {
+ authErr = errors.New("ssh: password auth not configured")
+ break
+ }
+ payload := userAuthReq.Payload
+ if len(payload) < 1 || payload[0] != 0 {
+ return nil, parseError(msgUserAuthRequest)
+ }
+ payload = payload[1:]
+ password, payload, ok := parseString(payload)
+ if !ok || len(payload) > 0 {
+ return nil, parseError(msgUserAuthRequest)
+ }
+
+ perms, authErr = config.PasswordCallback(s, password)
+ case "keyboard-interactive":
+ if config.KeyboardInteractiveCallback == nil {
+ authErr = errors.New("ssh: keyboard-interactive auth not configubred")
+ break
+ }
+
+ prompter := &sshClientKeyboardInteractive{s}
+ perms, authErr = config.KeyboardInteractiveCallback(s, prompter.Challenge)
+ case "publickey":
+ if config.PublicKeyCallback == nil {
+ authErr = errors.New("ssh: publickey auth not configured")
+ break
+ }
+ payload := userAuthReq.Payload
+ if len(payload) < 1 {
+ return nil, parseError(msgUserAuthRequest)
+ }
+ isQuery := payload[0] == 0
+ payload = payload[1:]
+ algoBytes, payload, ok := parseString(payload)
+ if !ok {
+ return nil, parseError(msgUserAuthRequest)
+ }
+ algo := string(algoBytes)
+ if !isAcceptableAlgo(algo) {
+ authErr = fmt.Errorf("ssh: algorithm %q not accepted", algo)
+ break
+ }
+
+ pubKeyData, payload, ok := parseString(payload)
+ if !ok {
+ return nil, parseError(msgUserAuthRequest)
+ }
+
+ pubKey, err := ParsePublicKey(pubKeyData)
+ if err != nil {
+ return nil, err
+ }
+
+ candidate, ok := cache.get(s.user, pubKeyData)
+ if !ok {
+ candidate.user = s.user
+ candidate.pubKeyData = pubKeyData
+ candidate.perms, candidate.result = config.PublicKeyCallback(s, pubKey)
+ if candidate.result == nil && candidate.perms != nil && candidate.perms.CriticalOptions != nil && candidate.perms.CriticalOptions[sourceAddressCriticalOption] != "" {
+ candidate.result = checkSourceAddress(
+ s.RemoteAddr(),
+ candidate.perms.CriticalOptions[sourceAddressCriticalOption])
+ }
+ cache.add(candidate)
+ }
+
+ if isQuery {
+ // The client can query if the given public key
+ // would be okay.
+ if len(payload) > 0 {
+ return nil, parseError(msgUserAuthRequest)
+ }
+
+ if candidate.result == nil {
+ okMsg := userAuthPubKeyOkMsg{
+ Algo: algo,
+ PubKey: pubKeyData,
+ }
+ if err = s.transport.writePacket(Marshal(&okMsg)); err != nil {
+ return nil, err
+ }
+ continue userAuthLoop
+ }
+ authErr = candidate.result
+ } else {
+ sig, payload, ok := parseSignature(payload)
+ if !ok || len(payload) > 0 {
+ return nil, parseError(msgUserAuthRequest)
+ }
+ // Ensure the public key algo and signature algo
+ // are supported. Compare the private key
+ // algorithm name that corresponds to algo with
+ // sig.Format. This is usually the same, but
+ // for certs, the names differ.
+ if !isAcceptableAlgo(sig.Format) {
+ break
+ }
+ signedData := buildDataSignedForAuth(s.transport.getSessionID(), userAuthReq, algoBytes, pubKeyData)
+
+ if err := pubKey.Verify(signedData, sig); err != nil {
+ return nil, err
+ }
+
+ authErr = candidate.result
+ perms = candidate.perms
+ }
+ default:
+ authErr = fmt.Errorf("ssh: unknown method %q", userAuthReq.Method)
+ }
+
+ if config.AuthLogCallback != nil {
+ config.AuthLogCallback(s, userAuthReq.Method, authErr)
+ }
+
+ if authErr == nil {
+ break userAuthLoop
+ }
+
+ var failureMsg userAuthFailureMsg
+ if config.PasswordCallback != nil {
+ failureMsg.Methods = append(failureMsg.Methods, "password")
+ }
+ if config.PublicKeyCallback != nil {
+ failureMsg.Methods = append(failureMsg.Methods, "publickey")
+ }
+ if config.KeyboardInteractiveCallback != nil {
+ failureMsg.Methods = append(failureMsg.Methods, "keyboard-interactive")
+ }
+
+ if len(failureMsg.Methods) == 0 {
+ return nil, errors.New("ssh: no authentication methods configured but NoClientAuth is also false")
+ }
+
+ if err = s.transport.writePacket(Marshal(&failureMsg)); err != nil {
+ return nil, err
+ }
+ }
+
+ if err = s.transport.writePacket([]byte{msgUserAuthSuccess}); err != nil {
+ return nil, err
+ }
+ return perms, nil
+}
+
+// sshClientKeyboardInteractive implements a ClientKeyboardInteractive by
+// asking the client on the other side of a ServerConn.
+type sshClientKeyboardInteractive struct {
+ *connection
+}
+
+func (c *sshClientKeyboardInteractive) Challenge(user, instruction string, questions []string, echos []bool) (answers []string, err error) {
+ if len(questions) != len(echos) {
+ return nil, errors.New("ssh: echos and questions must have equal length")
+ }
+
+ var prompts []byte
+ for i := range questions {
+ prompts = appendString(prompts, questions[i])
+ prompts = appendBool(prompts, echos[i])
+ }
+
+ if err := c.transport.writePacket(Marshal(&userAuthInfoRequestMsg{
+ Instruction: instruction,
+ NumPrompts: uint32(len(questions)),
+ Prompts: prompts,
+ })); err != nil {
+ return nil, err
+ }
+
+ packet, err := c.transport.readPacket()
+ if err != nil {
+ return nil, err
+ }
+ if packet[0] != msgUserAuthInfoResponse {
+ return nil, unexpectedMessageError(msgUserAuthInfoResponse, packet[0])
+ }
+ packet = packet[1:]
+
+ n, packet, ok := parseUint32(packet)
+ if !ok || int(n) != len(questions) {
+ return nil, parseError(msgUserAuthInfoResponse)
+ }
+
+ for i := uint32(0); i < n; i++ {
+ ans, rest, ok := parseString(packet)
+ if !ok {
+ return nil, parseError(msgUserAuthInfoResponse)
+ }
+
+ answers = append(answers, string(ans))
+ packet = rest
+ }
+ if len(packet) != 0 {
+ return nil, errors.New("ssh: junk at end of message")
+ }
+
+ return answers, nil
+}
diff --git a/src/mongo/gotools/vendor/src/golang.org/x/crypto/ssh/session.go b/src/mongo/gotools/vendor/src/golang.org/x/crypto/ssh/session.go
new file mode 100644
index 00000000000..fd10cd1aaf2
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/golang.org/x/crypto/ssh/session.go
@@ -0,0 +1,605 @@
+// Copyright 2011 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package ssh
+
+// Session implements an interactive session described in
+// "RFC 4254, section 6".
+
+import (
+ "bytes"
+ "errors"
+ "fmt"
+ "io"
+ "io/ioutil"
+ "sync"
+)
+
+type Signal string
+
+// POSIX signals as listed in RFC 4254 Section 6.10.
+const (
+ SIGABRT Signal = "ABRT"
+ SIGALRM Signal = "ALRM"
+ SIGFPE Signal = "FPE"
+ SIGHUP Signal = "HUP"
+ SIGILL Signal = "ILL"
+ SIGINT Signal = "INT"
+ SIGKILL Signal = "KILL"
+ SIGPIPE Signal = "PIPE"
+ SIGQUIT Signal = "QUIT"
+ SIGSEGV Signal = "SEGV"
+ SIGTERM Signal = "TERM"
+ SIGUSR1 Signal = "USR1"
+ SIGUSR2 Signal = "USR2"
+)
+
+var signals = map[Signal]int{
+ SIGABRT: 6,
+ SIGALRM: 14,
+ SIGFPE: 8,
+ SIGHUP: 1,
+ SIGILL: 4,
+ SIGINT: 2,
+ SIGKILL: 9,
+ SIGPIPE: 13,
+ SIGQUIT: 3,
+ SIGSEGV: 11,
+ SIGTERM: 15,
+}
+
+type TerminalModes map[uint8]uint32
+
+// POSIX terminal mode flags as listed in RFC 4254 Section 8.
+const (
+ tty_OP_END = 0
+ VINTR = 1
+ VQUIT = 2
+ VERASE = 3
+ VKILL = 4
+ VEOF = 5
+ VEOL = 6
+ VEOL2 = 7
+ VSTART = 8
+ VSTOP = 9
+ VSUSP = 10
+ VDSUSP = 11
+ VREPRINT = 12
+ VWERASE = 13
+ VLNEXT = 14
+ VFLUSH = 15
+ VSWTCH = 16
+ VSTATUS = 17
+ VDISCARD = 18
+ IGNPAR = 30
+ PARMRK = 31
+ INPCK = 32
+ ISTRIP = 33
+ INLCR = 34
+ IGNCR = 35
+ ICRNL = 36
+ IUCLC = 37
+ IXON = 38
+ IXANY = 39
+ IXOFF = 40
+ IMAXBEL = 41
+ ISIG = 50
+ ICANON = 51
+ XCASE = 52
+ ECHO = 53
+ ECHOE = 54
+ ECHOK = 55
+ ECHONL = 56
+ NOFLSH = 57
+ TOSTOP = 58
+ IEXTEN = 59
+ ECHOCTL = 60
+ ECHOKE = 61
+ PENDIN = 62
+ OPOST = 70
+ OLCUC = 71
+ ONLCR = 72
+ OCRNL = 73
+ ONOCR = 74
+ ONLRET = 75
+ CS7 = 90
+ CS8 = 91
+ PARENB = 92
+ PARODD = 93
+ TTY_OP_ISPEED = 128
+ TTY_OP_OSPEED = 129
+)
+
+// A Session represents a connection to a remote command or shell.
+type Session struct {
+ // Stdin specifies the remote process's standard input.
+ // If Stdin is nil, the remote process reads from an empty
+ // bytes.Buffer.
+ Stdin io.Reader
+
+ // Stdout and Stderr specify the remote process's standard
+ // output and error.
+ //
+ // If either is nil, Run connects the corresponding file
+ // descriptor to an instance of ioutil.Discard. There is a
+ // fixed amount of buffering that is shared for the two streams.
+ // If either blocks it may eventually cause the remote
+ // command to block.
+ Stdout io.Writer
+ Stderr io.Writer
+
+ ch Channel // the channel backing this session
+ started bool // true once Start, Run or Shell is invoked.
+ copyFuncs []func() error
+ errors chan error // one send per copyFunc
+
+ // true if pipe method is active
+ stdinpipe, stdoutpipe, stderrpipe bool
+
+ // stdinPipeWriter is non-nil if StdinPipe has not been called
+ // and Stdin was specified by the user; it is the write end of
+ // a pipe connecting Session.Stdin to the stdin channel.
+ stdinPipeWriter io.WriteCloser
+
+ exitStatus chan error
+}
+
+// SendRequest sends an out-of-band channel request on the SSH channel
+// underlying the session.
+func (s *Session) SendRequest(name string, wantReply bool, payload []byte) (bool, error) {
+ return s.ch.SendRequest(name, wantReply, payload)
+}
+
+func (s *Session) Close() error {
+ return s.ch.Close()
+}
+
+// RFC 4254 Section 6.4.
+type setenvRequest struct {
+ Name string
+ Value string
+}
+
+// Setenv sets an environment variable that will be applied to any
+// command executed by Shell or Run.
+func (s *Session) Setenv(name, value string) error {
+ msg := setenvRequest{
+ Name: name,
+ Value: value,
+ }
+ ok, err := s.ch.SendRequest("env", true, Marshal(&msg))
+ if err == nil && !ok {
+ err = errors.New("ssh: setenv failed")
+ }
+ return err
+}
+
+// RFC 4254 Section 6.2.
+type ptyRequestMsg struct {
+ Term string
+ Columns uint32
+ Rows uint32
+ Width uint32
+ Height uint32
+ Modelist string
+}
+
+// RequestPty requests the association of a pty with the session on the remote host.
+func (s *Session) RequestPty(term string, h, w int, termmodes TerminalModes) error {
+ var tm []byte
+ for k, v := range termmodes {
+ kv := struct {
+ Key byte
+ Val uint32
+ }{k, v}
+
+ tm = append(tm, Marshal(&kv)...)
+ }
+ tm = append(tm, tty_OP_END)
+ req := ptyRequestMsg{
+ Term: term,
+ Columns: uint32(w),
+ Rows: uint32(h),
+ Width: uint32(w * 8),
+ Height: uint32(h * 8),
+ Modelist: string(tm),
+ }
+ ok, err := s.ch.SendRequest("pty-req", true, Marshal(&req))
+ if err == nil && !ok {
+ err = errors.New("ssh: pty-req failed")
+ }
+ return err
+}
+
+// RFC 4254 Section 6.5.
+type subsystemRequestMsg struct {
+ Subsystem string
+}
+
+// RequestSubsystem requests the association of a subsystem with the session on the remote host.
+// A subsystem is a predefined command that runs in the background when the ssh session is initiated
+func (s *Session) RequestSubsystem(subsystem string) error {
+ msg := subsystemRequestMsg{
+ Subsystem: subsystem,
+ }
+ ok, err := s.ch.SendRequest("subsystem", true, Marshal(&msg))
+ if err == nil && !ok {
+ err = errors.New("ssh: subsystem request failed")
+ }
+ return err
+}
+
+// RFC 4254 Section 6.9.
+type signalMsg struct {
+ Signal string
+}
+
+// Signal sends the given signal to the remote process.
+// sig is one of the SIG* constants.
+func (s *Session) Signal(sig Signal) error {
+ msg := signalMsg{
+ Signal: string(sig),
+ }
+
+ _, err := s.ch.SendRequest("signal", false, Marshal(&msg))
+ return err
+}
+
+// RFC 4254 Section 6.5.
+type execMsg struct {
+ Command string
+}
+
+// Start runs cmd on the remote host. Typically, the remote
+// server passes cmd to the shell for interpretation.
+// A Session only accepts one call to Run, Start or Shell.
+func (s *Session) Start(cmd string) error {
+ if s.started {
+ return errors.New("ssh: session already started")
+ }
+ req := execMsg{
+ Command: cmd,
+ }
+
+ ok, err := s.ch.SendRequest("exec", true, Marshal(&req))
+ if err == nil && !ok {
+ err = fmt.Errorf("ssh: command %v failed", cmd)
+ }
+ if err != nil {
+ return err
+ }
+ return s.start()
+}
+
+// Run runs cmd on the remote host. Typically, the remote
+// server passes cmd to the shell for interpretation.
+// A Session only accepts one call to Run, Start, Shell, Output,
+// or CombinedOutput.
+//
+// The returned error is nil if the command runs, has no problems
+// copying stdin, stdout, and stderr, and exits with a zero exit
+// status.
+//
+// If the command fails to run or doesn't complete successfully, the
+// error is of type *ExitError. Other error types may be
+// returned for I/O problems.
+func (s *Session) Run(cmd string) error {
+ err := s.Start(cmd)
+ if err != nil {
+ return err
+ }
+ return s.Wait()
+}
+
+// Output runs cmd on the remote host and returns its standard output.
+func (s *Session) Output(cmd string) ([]byte, error) {
+ if s.Stdout != nil {
+ return nil, errors.New("ssh: Stdout already set")
+ }
+ var b bytes.Buffer
+ s.Stdout = &b
+ err := s.Run(cmd)
+ return b.Bytes(), err
+}
+
+type singleWriter struct {
+ b bytes.Buffer
+ mu sync.Mutex
+}
+
+func (w *singleWriter) Write(p []byte) (int, error) {
+ w.mu.Lock()
+ defer w.mu.Unlock()
+ return w.b.Write(p)
+}
+
+// CombinedOutput runs cmd on the remote host and returns its combined
+// standard output and standard error.
+func (s *Session) CombinedOutput(cmd string) ([]byte, error) {
+ if s.Stdout != nil {
+ return nil, errors.New("ssh: Stdout already set")
+ }
+ if s.Stderr != nil {
+ return nil, errors.New("ssh: Stderr already set")
+ }
+ var b singleWriter
+ s.Stdout = &b
+ s.Stderr = &b
+ err := s.Run(cmd)
+ return b.b.Bytes(), err
+}
+
+// Shell starts a login shell on the remote host. A Session only
+// accepts one call to Run, Start, Shell, Output, or CombinedOutput.
+func (s *Session) Shell() error {
+ if s.started {
+ return errors.New("ssh: session already started")
+ }
+
+ ok, err := s.ch.SendRequest("shell", true, nil)
+ if err == nil && !ok {
+ return errors.New("ssh: could not start shell")
+ }
+ if err != nil {
+ return err
+ }
+ return s.start()
+}
+
+func (s *Session) start() error {
+ s.started = true
+
+ type F func(*Session)
+ for _, setupFd := range []F{(*Session).stdin, (*Session).stdout, (*Session).stderr} {
+ setupFd(s)
+ }
+
+ s.errors = make(chan error, len(s.copyFuncs))
+ for _, fn := range s.copyFuncs {
+ go func(fn func() error) {
+ s.errors <- fn()
+ }(fn)
+ }
+ return nil
+}
+
+// Wait waits for the remote command to exit.
+//
+// The returned error is nil if the command runs, has no problems
+// copying stdin, stdout, and stderr, and exits with a zero exit
+// status.
+//
+// If the command fails to run or doesn't complete successfully, the
+// error is of type *ExitError. Other error types may be
+// returned for I/O problems.
+func (s *Session) Wait() error {
+ if !s.started {
+ return errors.New("ssh: session not started")
+ }
+ waitErr := <-s.exitStatus
+
+ if s.stdinPipeWriter != nil {
+ s.stdinPipeWriter.Close()
+ }
+ var copyError error
+ for _ = range s.copyFuncs {
+ if err := <-s.errors; err != nil && copyError == nil {
+ copyError = err
+ }
+ }
+ if waitErr != nil {
+ return waitErr
+ }
+ return copyError
+}
+
+func (s *Session) wait(reqs <-chan *Request) error {
+ wm := Waitmsg{status: -1}
+ // Wait for msg channel to be closed before returning.
+ for msg := range reqs {
+ switch msg.Type {
+ case "exit-status":
+ d := msg.Payload
+ wm.status = int(d[0])<<24 | int(d[1])<<16 | int(d[2])<<8 | int(d[3])
+ case "exit-signal":
+ var sigval struct {
+ Signal string
+ CoreDumped bool
+ Error string
+ Lang string
+ }
+ if err := Unmarshal(msg.Payload, &sigval); err != nil {
+ return err
+ }
+
+ // Must sanitize strings?
+ wm.signal = sigval.Signal
+ wm.msg = sigval.Error
+ wm.lang = sigval.Lang
+ default:
+ // This handles keepalives and matches
+ // OpenSSH's behaviour.
+ if msg.WantReply {
+ msg.Reply(false, nil)
+ }
+ }
+ }
+ if wm.status == 0 {
+ return nil
+ }
+ if wm.status == -1 {
+ // exit-status was never sent from server
+ if wm.signal == "" {
+ return errors.New("wait: remote command exited without exit status or exit signal")
+ }
+ wm.status = 128
+ if _, ok := signals[Signal(wm.signal)]; ok {
+ wm.status += signals[Signal(wm.signal)]
+ }
+ }
+ return &ExitError{wm}
+}
+
+func (s *Session) stdin() {
+ if s.stdinpipe {
+ return
+ }
+ var stdin io.Reader
+ if s.Stdin == nil {
+ stdin = new(bytes.Buffer)
+ } else {
+ r, w := io.Pipe()
+ go func() {
+ _, err := io.Copy(w, s.Stdin)
+ w.CloseWithError(err)
+ }()
+ stdin, s.stdinPipeWriter = r, w
+ }
+ s.copyFuncs = append(s.copyFuncs, func() error {
+ _, err := io.Copy(s.ch, stdin)
+ if err1 := s.ch.CloseWrite(); err == nil && err1 != io.EOF {
+ err = err1
+ }
+ return err
+ })
+}
+
+func (s *Session) stdout() {
+ if s.stdoutpipe {
+ return
+ }
+ if s.Stdout == nil {
+ s.Stdout = ioutil.Discard
+ }
+ s.copyFuncs = append(s.copyFuncs, func() error {
+ _, err := io.Copy(s.Stdout, s.ch)
+ return err
+ })
+}
+
+func (s *Session) stderr() {
+ if s.stderrpipe {
+ return
+ }
+ if s.Stderr == nil {
+ s.Stderr = ioutil.Discard
+ }
+ s.copyFuncs = append(s.copyFuncs, func() error {
+ _, err := io.Copy(s.Stderr, s.ch.Stderr())
+ return err
+ })
+}
+
+// sessionStdin reroutes Close to CloseWrite.
+type sessionStdin struct {
+ io.Writer
+ ch Channel
+}
+
+func (s *sessionStdin) Close() error {
+ return s.ch.CloseWrite()
+}
+
+// StdinPipe returns a pipe that will be connected to the
+// remote command's standard input when the command starts.
+func (s *Session) StdinPipe() (io.WriteCloser, error) {
+ if s.Stdin != nil {
+ return nil, errors.New("ssh: Stdin already set")
+ }
+ if s.started {
+ return nil, errors.New("ssh: StdinPipe after process started")
+ }
+ s.stdinpipe = true
+ return &sessionStdin{s.ch, s.ch}, nil
+}
+
+// StdoutPipe returns a pipe that will be connected to the
+// remote command's standard output when the command starts.
+// There is a fixed amount of buffering that is shared between
+// stdout and stderr streams. If the StdoutPipe reader is
+// not serviced fast enough it may eventually cause the
+// remote command to block.
+func (s *Session) StdoutPipe() (io.Reader, error) {
+ if s.Stdout != nil {
+ return nil, errors.New("ssh: Stdout already set")
+ }
+ if s.started {
+ return nil, errors.New("ssh: StdoutPipe after process started")
+ }
+ s.stdoutpipe = true
+ return s.ch, nil
+}
+
+// StderrPipe returns a pipe that will be connected to the
+// remote command's standard error when the command starts.
+// There is a fixed amount of buffering that is shared between
+// stdout and stderr streams. If the StderrPipe reader is
+// not serviced fast enough it may eventually cause the
+// remote command to block.
+func (s *Session) StderrPipe() (io.Reader, error) {
+ if s.Stderr != nil {
+ return nil, errors.New("ssh: Stderr already set")
+ }
+ if s.started {
+ return nil, errors.New("ssh: StderrPipe after process started")
+ }
+ s.stderrpipe = true
+ return s.ch.Stderr(), nil
+}
+
+// newSession returns a new interactive session on the remote host.
+func newSession(ch Channel, reqs <-chan *Request) (*Session, error) {
+ s := &Session{
+ ch: ch,
+ }
+ s.exitStatus = make(chan error, 1)
+ go func() {
+ s.exitStatus <- s.wait(reqs)
+ }()
+
+ return s, nil
+}
+
+// An ExitError reports unsuccessful completion of a remote command.
+type ExitError struct {
+ Waitmsg
+}
+
+func (e *ExitError) Error() string {
+ return e.Waitmsg.String()
+}
+
+// Waitmsg stores the information about an exited remote command
+// as reported by Wait.
+type Waitmsg struct {
+ status int
+ signal string
+ msg string
+ lang string
+}
+
+// ExitStatus returns the exit status of the remote command.
+func (w Waitmsg) ExitStatus() int {
+ return w.status
+}
+
+// Signal returns the exit signal of the remote command if
+// it was terminated violently.
+func (w Waitmsg) Signal() string {
+ return w.signal
+}
+
+// Msg returns the exit message given by the remote command
+func (w Waitmsg) Msg() string {
+ return w.msg
+}
+
+// Lang returns the language tag. See RFC 3066
+func (w Waitmsg) Lang() string {
+ return w.lang
+}
+
+func (w Waitmsg) String() string {
+ return fmt.Sprintf("Process exited with: %v. Reason was: %v (%v)", w.status, w.msg, w.signal)
+}
diff --git a/src/mongo/gotools/vendor/src/golang.org/x/crypto/ssh/session_test.go b/src/mongo/gotools/vendor/src/golang.org/x/crypto/ssh/session_test.go
new file mode 100644
index 00000000000..f7f0f7642e7
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/golang.org/x/crypto/ssh/session_test.go
@@ -0,0 +1,774 @@
+// Copyright 2011 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package ssh
+
+// Session tests.
+
+import (
+ "bytes"
+ crypto_rand "crypto/rand"
+ "errors"
+ "io"
+ "io/ioutil"
+ "math/rand"
+ "net"
+ "testing"
+
+ "golang.org/x/crypto/ssh/terminal"
+)
+
+type serverType func(Channel, <-chan *Request, *testing.T)
+
+// dial constructs a new test server and returns a *ClientConn.
+func dial(handler serverType, t *testing.T) *Client {
+ c1, c2, err := netPipe()
+ if err != nil {
+ t.Fatalf("netPipe: %v", err)
+ }
+
+ go func() {
+ defer c1.Close()
+ conf := ServerConfig{
+ NoClientAuth: true,
+ }
+ conf.AddHostKey(testSigners["rsa"])
+
+ _, chans, reqs, err := NewServerConn(c1, &conf)
+ if err != nil {
+ t.Fatalf("Unable to handshake: %v", err)
+ }
+ go DiscardRequests(reqs)
+
+ for newCh := range chans {
+ if newCh.ChannelType() != "session" {
+ newCh.Reject(UnknownChannelType, "unknown channel type")
+ continue
+ }
+
+ ch, inReqs, err := newCh.Accept()
+ if err != nil {
+ t.Errorf("Accept: %v", err)
+ continue
+ }
+ go func() {
+ handler(ch, inReqs, t)
+ }()
+ }
+ }()
+
+ config := &ClientConfig{
+ User: "testuser",
+ }
+
+ conn, chans, reqs, err := NewClientConn(c2, "", config)
+ if err != nil {
+ t.Fatalf("unable to dial remote side: %v", err)
+ }
+
+ return NewClient(conn, chans, reqs)
+}
+
+// Test a simple string is returned to session.Stdout.
+func TestSessionShell(t *testing.T) {
+ conn := dial(shellHandler, t)
+ defer conn.Close()
+ session, err := conn.NewSession()
+ if err != nil {
+ t.Fatalf("Unable to request new session: %v", err)
+ }
+ defer session.Close()
+ stdout := new(bytes.Buffer)
+ session.Stdout = stdout
+ if err := session.Shell(); err != nil {
+ t.Fatalf("Unable to execute command: %s", err)
+ }
+ if err := session.Wait(); err != nil {
+ t.Fatalf("Remote command did not exit cleanly: %v", err)
+ }
+ actual := stdout.String()
+ if actual != "golang" {
+ t.Fatalf("Remote shell did not return expected string: expected=golang, actual=%s", actual)
+ }
+}
+
+// TODO(dfc) add support for Std{in,err}Pipe when the Server supports it.
+
+// Test a simple string is returned via StdoutPipe.
+func TestSessionStdoutPipe(t *testing.T) {
+ conn := dial(shellHandler, t)
+ defer conn.Close()
+ session, err := conn.NewSession()
+ if err != nil {
+ t.Fatalf("Unable to request new session: %v", err)
+ }
+ defer session.Close()
+ stdout, err := session.StdoutPipe()
+ if err != nil {
+ t.Fatalf("Unable to request StdoutPipe(): %v", err)
+ }
+ var buf bytes.Buffer
+ if err := session.Shell(); err != nil {
+ t.Fatalf("Unable to execute command: %v", err)
+ }
+ done := make(chan bool, 1)
+ go func() {
+ if _, err := io.Copy(&buf, stdout); err != nil {
+ t.Errorf("Copy of stdout failed: %v", err)
+ }
+ done <- true
+ }()
+ if err := session.Wait(); err != nil {
+ t.Fatalf("Remote command did not exit cleanly: %v", err)
+ }
+ <-done
+ actual := buf.String()
+ if actual != "golang" {
+ t.Fatalf("Remote shell did not return expected string: expected=golang, actual=%s", actual)
+ }
+}
+
+// Test that a simple string is returned via the Output helper,
+// and that stderr is discarded.
+func TestSessionOutput(t *testing.T) {
+ conn := dial(fixedOutputHandler, t)
+ defer conn.Close()
+ session, err := conn.NewSession()
+ if err != nil {
+ t.Fatalf("Unable to request new session: %v", err)
+ }
+ defer session.Close()
+
+ buf, err := session.Output("") // cmd is ignored by fixedOutputHandler
+ if err != nil {
+ t.Error("Remote command did not exit cleanly:", err)
+ }
+ w := "this-is-stdout."
+ g := string(buf)
+ if g != w {
+ t.Error("Remote command did not return expected string:")
+ t.Logf("want %q", w)
+ t.Logf("got %q", g)
+ }
+}
+
+// Test that both stdout and stderr are returned
+// via the CombinedOutput helper.
+func TestSessionCombinedOutput(t *testing.T) {
+ conn := dial(fixedOutputHandler, t)
+ defer conn.Close()
+ session, err := conn.NewSession()
+ if err != nil {
+ t.Fatalf("Unable to request new session: %v", err)
+ }
+ defer session.Close()
+
+ buf, err := session.CombinedOutput("") // cmd is ignored by fixedOutputHandler
+ if err != nil {
+ t.Error("Remote command did not exit cleanly:", err)
+ }
+ const stdout = "this-is-stdout."
+ const stderr = "this-is-stderr."
+ g := string(buf)
+ if g != stdout+stderr && g != stderr+stdout {
+ t.Error("Remote command did not return expected string:")
+ t.Logf("want %q, or %q", stdout+stderr, stderr+stdout)
+ t.Logf("got %q", g)
+ }
+}
+
+// Test non-0 exit status is returned correctly.
+func TestExitStatusNonZero(t *testing.T) {
+ conn := dial(exitStatusNonZeroHandler, t)
+ defer conn.Close()
+ session, err := conn.NewSession()
+ if err != nil {
+ t.Fatalf("Unable to request new session: %v", err)
+ }
+ defer session.Close()
+ if err := session.Shell(); err != nil {
+ t.Fatalf("Unable to execute command: %v", err)
+ }
+ err = session.Wait()
+ if err == nil {
+ t.Fatalf("expected command to fail but it didn't")
+ }
+ e, ok := err.(*ExitError)
+ if !ok {
+ t.Fatalf("expected *ExitError but got %T", err)
+ }
+ if e.ExitStatus() != 15 {
+ t.Fatalf("expected command to exit with 15 but got %v", e.ExitStatus())
+ }
+}
+
+// Test 0 exit status is returned correctly.
+func TestExitStatusZero(t *testing.T) {
+ conn := dial(exitStatusZeroHandler, t)
+ defer conn.Close()
+ session, err := conn.NewSession()
+ if err != nil {
+ t.Fatalf("Unable to request new session: %v", err)
+ }
+ defer session.Close()
+
+ if err := session.Shell(); err != nil {
+ t.Fatalf("Unable to execute command: %v", err)
+ }
+ err = session.Wait()
+ if err != nil {
+ t.Fatalf("expected nil but got %v", err)
+ }
+}
+
+// Test exit signal and status are both returned correctly.
+func TestExitSignalAndStatus(t *testing.T) {
+ conn := dial(exitSignalAndStatusHandler, t)
+ defer conn.Close()
+ session, err := conn.NewSession()
+ if err != nil {
+ t.Fatalf("Unable to request new session: %v", err)
+ }
+ defer session.Close()
+ if err := session.Shell(); err != nil {
+ t.Fatalf("Unable to execute command: %v", err)
+ }
+ err = session.Wait()
+ if err == nil {
+ t.Fatalf("expected command to fail but it didn't")
+ }
+ e, ok := err.(*ExitError)
+ if !ok {
+ t.Fatalf("expected *ExitError but got %T", err)
+ }
+ if e.Signal() != "TERM" || e.ExitStatus() != 15 {
+ t.Fatalf("expected command to exit with signal TERM and status 15 but got signal %s and status %v", e.Signal(), e.ExitStatus())
+ }
+}
+
+// Test exit signal and status are both returned correctly.
+func TestKnownExitSignalOnly(t *testing.T) {
+ conn := dial(exitSignalHandler, t)
+ defer conn.Close()
+ session, err := conn.NewSession()
+ if err != nil {
+ t.Fatalf("Unable to request new session: %v", err)
+ }
+ defer session.Close()
+ if err := session.Shell(); err != nil {
+ t.Fatalf("Unable to execute command: %v", err)
+ }
+ err = session.Wait()
+ if err == nil {
+ t.Fatalf("expected command to fail but it didn't")
+ }
+ e, ok := err.(*ExitError)
+ if !ok {
+ t.Fatalf("expected *ExitError but got %T", err)
+ }
+ if e.Signal() != "TERM" || e.ExitStatus() != 143 {
+ t.Fatalf("expected command to exit with signal TERM and status 143 but got signal %s and status %v", e.Signal(), e.ExitStatus())
+ }
+}
+
+// Test exit signal and status are both returned correctly.
+func TestUnknownExitSignal(t *testing.T) {
+ conn := dial(exitSignalUnknownHandler, t)
+ defer conn.Close()
+ session, err := conn.NewSession()
+ if err != nil {
+ t.Fatalf("Unable to request new session: %v", err)
+ }
+ defer session.Close()
+ if err := session.Shell(); err != nil {
+ t.Fatalf("Unable to execute command: %v", err)
+ }
+ err = session.Wait()
+ if err == nil {
+ t.Fatalf("expected command to fail but it didn't")
+ }
+ e, ok := err.(*ExitError)
+ if !ok {
+ t.Fatalf("expected *ExitError but got %T", err)
+ }
+ if e.Signal() != "SYS" || e.ExitStatus() != 128 {
+ t.Fatalf("expected command to exit with signal SYS and status 128 but got signal %s and status %v", e.Signal(), e.ExitStatus())
+ }
+}
+
+// Test WaitMsg is not returned if the channel closes abruptly.
+func TestExitWithoutStatusOrSignal(t *testing.T) {
+ conn := dial(exitWithoutSignalOrStatus, t)
+ defer conn.Close()
+ session, err := conn.NewSession()
+ if err != nil {
+ t.Fatalf("Unable to request new session: %v", err)
+ }
+ defer session.Close()
+ if err := session.Shell(); err != nil {
+ t.Fatalf("Unable to execute command: %v", err)
+ }
+ err = session.Wait()
+ if err == nil {
+ t.Fatalf("expected command to fail but it didn't")
+ }
+ _, ok := err.(*ExitError)
+ if ok {
+ // you can't actually test for errors.errorString
+ // because it's not exported.
+ t.Fatalf("expected *errorString but got %T", err)
+ }
+}
+
+// windowTestBytes is the number of bytes that we'll send to the SSH server.
+const windowTestBytes = 16000 * 200
+
+// TestServerWindow writes random data to the server. The server is expected to echo
+// the same data back, which is compared against the original.
+func TestServerWindow(t *testing.T) {
+ origBuf := bytes.NewBuffer(make([]byte, 0, windowTestBytes))
+ io.CopyN(origBuf, crypto_rand.Reader, windowTestBytes)
+ origBytes := origBuf.Bytes()
+
+ conn := dial(echoHandler, t)
+ defer conn.Close()
+ session, err := conn.NewSession()
+ if err != nil {
+ t.Fatal(err)
+ }
+ defer session.Close()
+ result := make(chan []byte)
+
+ go func() {
+ defer close(result)
+ echoedBuf := bytes.NewBuffer(make([]byte, 0, windowTestBytes))
+ serverStdout, err := session.StdoutPipe()
+ if err != nil {
+ t.Errorf("StdoutPipe failed: %v", err)
+ return
+ }
+ n, err := copyNRandomly("stdout", echoedBuf, serverStdout, windowTestBytes)
+ if err != nil && err != io.EOF {
+ t.Errorf("Read only %d bytes from server, expected %d: %v", n, windowTestBytes, err)
+ }
+ result <- echoedBuf.Bytes()
+ }()
+
+ serverStdin, err := session.StdinPipe()
+ if err != nil {
+ t.Fatalf("StdinPipe failed: %v", err)
+ }
+ written, err := copyNRandomly("stdin", serverStdin, origBuf, windowTestBytes)
+ if err != nil {
+ t.Fatalf("failed to copy origBuf to serverStdin: %v", err)
+ }
+ if written != windowTestBytes {
+ t.Fatalf("Wrote only %d of %d bytes to server", written, windowTestBytes)
+ }
+
+ echoedBytes := <-result
+
+ if !bytes.Equal(origBytes, echoedBytes) {
+ t.Fatalf("Echoed buffer differed from original, orig %d, echoed %d", len(origBytes), len(echoedBytes))
+ }
+}
+
+// Verify the client can handle a keepalive packet from the server.
+func TestClientHandlesKeepalives(t *testing.T) {
+ conn := dial(channelKeepaliveSender, t)
+ defer conn.Close()
+ session, err := conn.NewSession()
+ if err != nil {
+ t.Fatal(err)
+ }
+ defer session.Close()
+ if err := session.Shell(); err != nil {
+ t.Fatalf("Unable to execute command: %v", err)
+ }
+ err = session.Wait()
+ if err != nil {
+ t.Fatalf("expected nil but got: %v", err)
+ }
+}
+
+type exitStatusMsg struct {
+ Status uint32
+}
+
+type exitSignalMsg struct {
+ Signal string
+ CoreDumped bool
+ Errmsg string
+ Lang string
+}
+
+func handleTerminalRequests(in <-chan *Request) {
+ for req := range in {
+ ok := false
+ switch req.Type {
+ case "shell":
+ ok = true
+ if len(req.Payload) > 0 {
+ // We don't accept any commands, only the default shell.
+ ok = false
+ }
+ case "env":
+ ok = true
+ }
+ req.Reply(ok, nil)
+ }
+}
+
+func newServerShell(ch Channel, in <-chan *Request, prompt string) *terminal.Terminal {
+ term := terminal.NewTerminal(ch, prompt)
+ go handleTerminalRequests(in)
+ return term
+}
+
+func exitStatusZeroHandler(ch Channel, in <-chan *Request, t *testing.T) {
+ defer ch.Close()
+ // this string is returned to stdout
+ shell := newServerShell(ch, in, "> ")
+ readLine(shell, t)
+ sendStatus(0, ch, t)
+}
+
+func exitStatusNonZeroHandler(ch Channel, in <-chan *Request, t *testing.T) {
+ defer ch.Close()
+ shell := newServerShell(ch, in, "> ")
+ readLine(shell, t)
+ sendStatus(15, ch, t)
+}
+
+func exitSignalAndStatusHandler(ch Channel, in <-chan *Request, t *testing.T) {
+ defer ch.Close()
+ shell := newServerShell(ch, in, "> ")
+ readLine(shell, t)
+ sendStatus(15, ch, t)
+ sendSignal("TERM", ch, t)
+}
+
+func exitSignalHandler(ch Channel, in <-chan *Request, t *testing.T) {
+ defer ch.Close()
+ shell := newServerShell(ch, in, "> ")
+ readLine(shell, t)
+ sendSignal("TERM", ch, t)
+}
+
+func exitSignalUnknownHandler(ch Channel, in <-chan *Request, t *testing.T) {
+ defer ch.Close()
+ shell := newServerShell(ch, in, "> ")
+ readLine(shell, t)
+ sendSignal("SYS", ch, t)
+}
+
+func exitWithoutSignalOrStatus(ch Channel, in <-chan *Request, t *testing.T) {
+ defer ch.Close()
+ shell := newServerShell(ch, in, "> ")
+ readLine(shell, t)
+}
+
+func shellHandler(ch Channel, in <-chan *Request, t *testing.T) {
+ defer ch.Close()
+ // this string is returned to stdout
+ shell := newServerShell(ch, in, "golang")
+ readLine(shell, t)
+ sendStatus(0, ch, t)
+}
+
+// Ignores the command, writes fixed strings to stderr and stdout.
+// Strings are "this-is-stdout." and "this-is-stderr.".
+func fixedOutputHandler(ch Channel, in <-chan *Request, t *testing.T) {
+ defer ch.Close()
+ _, err := ch.Read(nil)
+
+ req, ok := <-in
+ if !ok {
+ t.Fatalf("error: expected channel request, got: %#v", err)
+ return
+ }
+
+ // ignore request, always send some text
+ req.Reply(true, nil)
+
+ _, err = io.WriteString(ch, "this-is-stdout.")
+ if err != nil {
+ t.Fatalf("error writing on server: %v", err)
+ }
+ _, err = io.WriteString(ch.Stderr(), "this-is-stderr.")
+ if err != nil {
+ t.Fatalf("error writing on server: %v", err)
+ }
+ sendStatus(0, ch, t)
+}
+
+func readLine(shell *terminal.Terminal, t *testing.T) {
+ if _, err := shell.ReadLine(); err != nil && err != io.EOF {
+ t.Errorf("unable to read line: %v", err)
+ }
+}
+
+func sendStatus(status uint32, ch Channel, t *testing.T) {
+ msg := exitStatusMsg{
+ Status: status,
+ }
+ if _, err := ch.SendRequest("exit-status", false, Marshal(&msg)); err != nil {
+ t.Errorf("unable to send status: %v", err)
+ }
+}
+
+func sendSignal(signal string, ch Channel, t *testing.T) {
+ sig := exitSignalMsg{
+ Signal: signal,
+ CoreDumped: false,
+ Errmsg: "Process terminated",
+ Lang: "en-GB-oed",
+ }
+ if _, err := ch.SendRequest("exit-signal", false, Marshal(&sig)); err != nil {
+ t.Errorf("unable to send signal: %v", err)
+ }
+}
+
+func discardHandler(ch Channel, t *testing.T) {
+ defer ch.Close()
+ io.Copy(ioutil.Discard, ch)
+}
+
+func echoHandler(ch Channel, in <-chan *Request, t *testing.T) {
+ defer ch.Close()
+ if n, err := copyNRandomly("echohandler", ch, ch, windowTestBytes); err != nil {
+ t.Errorf("short write, wrote %d, expected %d: %v ", n, windowTestBytes, err)
+ }
+}
+
+// copyNRandomly copies n bytes from src to dst. It uses a variable, and random,
+// buffer size to exercise more code paths.
+func copyNRandomly(title string, dst io.Writer, src io.Reader, n int) (int, error) {
+ var (
+ buf = make([]byte, 32*1024)
+ written int
+ remaining = n
+ )
+ for remaining > 0 {
+ l := rand.Intn(1 << 15)
+ if remaining < l {
+ l = remaining
+ }
+ nr, er := src.Read(buf[:l])
+ nw, ew := dst.Write(buf[:nr])
+ remaining -= nw
+ written += nw
+ if ew != nil {
+ return written, ew
+ }
+ if nr != nw {
+ return written, io.ErrShortWrite
+ }
+ if er != nil && er != io.EOF {
+ return written, er
+ }
+ }
+ return written, nil
+}
+
+func channelKeepaliveSender(ch Channel, in <-chan *Request, t *testing.T) {
+ defer ch.Close()
+ shell := newServerShell(ch, in, "> ")
+ readLine(shell, t)
+ if _, err := ch.SendRequest("keepalive@openssh.com", true, nil); err != nil {
+ t.Errorf("unable to send channel keepalive request: %v", err)
+ }
+ sendStatus(0, ch, t)
+}
+
+func TestClientWriteEOF(t *testing.T) {
+ conn := dial(simpleEchoHandler, t)
+ defer conn.Close()
+
+ session, err := conn.NewSession()
+ if err != nil {
+ t.Fatal(err)
+ }
+ defer session.Close()
+ stdin, err := session.StdinPipe()
+ if err != nil {
+ t.Fatalf("StdinPipe failed: %v", err)
+ }
+ stdout, err := session.StdoutPipe()
+ if err != nil {
+ t.Fatalf("StdoutPipe failed: %v", err)
+ }
+
+ data := []byte(`0000`)
+ _, err = stdin.Write(data)
+ if err != nil {
+ t.Fatalf("Write failed: %v", err)
+ }
+ stdin.Close()
+
+ res, err := ioutil.ReadAll(stdout)
+ if err != nil {
+ t.Fatalf("Read failed: %v", err)
+ }
+
+ if !bytes.Equal(data, res) {
+ t.Fatalf("Read differed from write, wrote: %v, read: %v", data, res)
+ }
+}
+
+func simpleEchoHandler(ch Channel, in <-chan *Request, t *testing.T) {
+ defer ch.Close()
+ data, err := ioutil.ReadAll(ch)
+ if err != nil {
+ t.Errorf("handler read error: %v", err)
+ }
+ _, err = ch.Write(data)
+ if err != nil {
+ t.Errorf("handler write error: %v", err)
+ }
+}
+
+func TestSessionID(t *testing.T) {
+ c1, c2, err := netPipe()
+ if err != nil {
+ t.Fatalf("netPipe: %v", err)
+ }
+ defer c1.Close()
+ defer c2.Close()
+
+ serverID := make(chan []byte, 1)
+ clientID := make(chan []byte, 1)
+
+ serverConf := &ServerConfig{
+ NoClientAuth: true,
+ }
+ serverConf.AddHostKey(testSigners["ecdsa"])
+ clientConf := &ClientConfig{
+ User: "user",
+ }
+
+ go func() {
+ conn, chans, reqs, err := NewServerConn(c1, serverConf)
+ if err != nil {
+ t.Fatalf("server handshake: %v", err)
+ }
+ serverID <- conn.SessionID()
+ go DiscardRequests(reqs)
+ for ch := range chans {
+ ch.Reject(Prohibited, "")
+ }
+ }()
+
+ go func() {
+ conn, chans, reqs, err := NewClientConn(c2, "", clientConf)
+ if err != nil {
+ t.Fatalf("client handshake: %v", err)
+ }
+ clientID <- conn.SessionID()
+ go DiscardRequests(reqs)
+ for ch := range chans {
+ ch.Reject(Prohibited, "")
+ }
+ }()
+
+ s := <-serverID
+ c := <-clientID
+ if bytes.Compare(s, c) != 0 {
+ t.Errorf("server session ID (%x) != client session ID (%x)", s, c)
+ } else if len(s) == 0 {
+ t.Errorf("client and server SessionID were empty.")
+ }
+}
+
+type noReadConn struct {
+ readSeen bool
+ net.Conn
+}
+
+func (c *noReadConn) Close() error {
+ return nil
+}
+
+func (c *noReadConn) Read(b []byte) (int, error) {
+ c.readSeen = true
+ return 0, errors.New("noReadConn error")
+}
+
+func TestInvalidServerConfiguration(t *testing.T) {
+ c1, c2, err := netPipe()
+ if err != nil {
+ t.Fatalf("netPipe: %v", err)
+ }
+ defer c1.Close()
+ defer c2.Close()
+
+ serveConn := noReadConn{Conn: c1}
+ serverConf := &ServerConfig{}
+
+ NewServerConn(&serveConn, serverConf)
+ if serveConn.readSeen {
+ t.Fatalf("NewServerConn attempted to Read() from Conn while configuration is missing host key")
+ }
+
+ serverConf.AddHostKey(testSigners["ecdsa"])
+
+ NewServerConn(&serveConn, serverConf)
+ if serveConn.readSeen {
+ t.Fatalf("NewServerConn attempted to Read() from Conn while configuration is missing authentication method")
+ }
+}
+
+func TestHostKeyAlgorithms(t *testing.T) {
+ serverConf := &ServerConfig{
+ NoClientAuth: true,
+ }
+ serverConf.AddHostKey(testSigners["rsa"])
+ serverConf.AddHostKey(testSigners["ecdsa"])
+
+ connect := func(clientConf *ClientConfig, want string) {
+ var alg string
+ clientConf.HostKeyCallback = func(h string, a net.Addr, key PublicKey) error {
+ alg = key.Type()
+ return nil
+ }
+ c1, c2, err := netPipe()
+ if err != nil {
+ t.Fatalf("netPipe: %v", err)
+ }
+ defer c1.Close()
+ defer c2.Close()
+
+ go NewServerConn(c1, serverConf)
+ _, _, _, err = NewClientConn(c2, "", clientConf)
+ if err != nil {
+ t.Fatalf("NewClientConn: %v", err)
+ }
+ if alg != want {
+ t.Errorf("selected key algorithm %s, want %s", alg, want)
+ }
+ }
+
+ // By default, we get the preferred algorithm, which is ECDSA 256.
+
+ clientConf := &ClientConfig{}
+ connect(clientConf, KeyAlgoECDSA256)
+
+ // Client asks for RSA explicitly.
+ clientConf.HostKeyAlgorithms = []string{KeyAlgoRSA}
+ connect(clientConf, KeyAlgoRSA)
+
+ c1, c2, err := netPipe()
+ if err != nil {
+ t.Fatalf("netPipe: %v", err)
+ }
+ defer c1.Close()
+ defer c2.Close()
+
+ go NewServerConn(c1, serverConf)
+ clientConf.HostKeyAlgorithms = []string{"nonexistent-hostkey-algo"}
+ _, _, _, err = NewClientConn(c2, "", clientConf)
+ if err == nil {
+ t.Fatal("succeeded connecting with unknown hostkey algorithm")
+ }
+}
diff --git a/src/mongo/gotools/vendor/src/golang.org/x/crypto/ssh/tcpip.go b/src/mongo/gotools/vendor/src/golang.org/x/crypto/ssh/tcpip.go
new file mode 100644
index 00000000000..6151241ff08
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/golang.org/x/crypto/ssh/tcpip.go
@@ -0,0 +1,407 @@
+// Copyright 2011 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package ssh
+
+import (
+ "errors"
+ "fmt"
+ "io"
+ "math/rand"
+ "net"
+ "strconv"
+ "strings"
+ "sync"
+ "time"
+)
+
+// Listen requests the remote peer open a listening socket on
+// addr. Incoming connections will be available by calling Accept on
+// the returned net.Listener. The listener must be serviced, or the
+// SSH connection may hang.
+func (c *Client) Listen(n, addr string) (net.Listener, error) {
+ laddr, err := net.ResolveTCPAddr(n, addr)
+ if err != nil {
+ return nil, err
+ }
+ return c.ListenTCP(laddr)
+}
+
+// Automatic port allocation is broken with OpenSSH before 6.0. See
+// also https://bugzilla.mindrot.org/show_bug.cgi?id=2017. In
+// particular, OpenSSH 5.9 sends a channelOpenMsg with port number 0,
+// rather than the actual port number. This means you can never open
+// two different listeners with auto allocated ports. We work around
+// this by trying explicit ports until we succeed.
+
+const openSSHPrefix = "OpenSSH_"
+
+var portRandomizer = rand.New(rand.NewSource(time.Now().UnixNano()))
+
+// isBrokenOpenSSHVersion returns true if the given version string
+// specifies a version of OpenSSH that is known to have a bug in port
+// forwarding.
+func isBrokenOpenSSHVersion(versionStr string) bool {
+ i := strings.Index(versionStr, openSSHPrefix)
+ if i < 0 {
+ return false
+ }
+ i += len(openSSHPrefix)
+ j := i
+ for ; j < len(versionStr); j++ {
+ if versionStr[j] < '0' || versionStr[j] > '9' {
+ break
+ }
+ }
+ version, _ := strconv.Atoi(versionStr[i:j])
+ return version < 6
+}
+
+// autoPortListenWorkaround simulates automatic port allocation by
+// trying random ports repeatedly.
+func (c *Client) autoPortListenWorkaround(laddr *net.TCPAddr) (net.Listener, error) {
+ var sshListener net.Listener
+ var err error
+ const tries = 10
+ for i := 0; i < tries; i++ {
+ addr := *laddr
+ addr.Port = 1024 + portRandomizer.Intn(60000)
+ sshListener, err = c.ListenTCP(&addr)
+ if err == nil {
+ laddr.Port = addr.Port
+ return sshListener, err
+ }
+ }
+ return nil, fmt.Errorf("ssh: listen on random port failed after %d tries: %v", tries, err)
+}
+
+// RFC 4254 7.1
+type channelForwardMsg struct {
+ addr string
+ rport uint32
+}
+
+// ListenTCP requests the remote peer open a listening socket
+// on laddr. Incoming connections will be available by calling
+// Accept on the returned net.Listener.
+func (c *Client) ListenTCP(laddr *net.TCPAddr) (net.Listener, error) {
+ if laddr.Port == 0 && isBrokenOpenSSHVersion(string(c.ServerVersion())) {
+ return c.autoPortListenWorkaround(laddr)
+ }
+
+ m := channelForwardMsg{
+ laddr.IP.String(),
+ uint32(laddr.Port),
+ }
+ // send message
+ ok, resp, err := c.SendRequest("tcpip-forward", true, Marshal(&m))
+ if err != nil {
+ return nil, err
+ }
+ if !ok {
+ return nil, errors.New("ssh: tcpip-forward request denied by peer")
+ }
+
+ // If the original port was 0, then the remote side will
+ // supply a real port number in the response.
+ if laddr.Port == 0 {
+ var p struct {
+ Port uint32
+ }
+ if err := Unmarshal(resp, &p); err != nil {
+ return nil, err
+ }
+ laddr.Port = int(p.Port)
+ }
+
+ // Register this forward, using the port number we obtained.
+ ch := c.forwards.add(*laddr)
+
+ return &tcpListener{laddr, c, ch}, nil
+}
+
+// forwardList stores a mapping between remote
+// forward requests and the tcpListeners.
+type forwardList struct {
+ sync.Mutex
+ entries []forwardEntry
+}
+
+// forwardEntry represents an established mapping of a laddr on a
+// remote ssh server to a channel connected to a tcpListener.
+type forwardEntry struct {
+ laddr net.TCPAddr
+ c chan forward
+}
+
+// forward represents an incoming forwarded tcpip connection. The
+// arguments to add/remove/lookup should be address as specified in
+// the original forward-request.
+type forward struct {
+ newCh NewChannel // the ssh client channel underlying this forward
+ raddr *net.TCPAddr // the raddr of the incoming connection
+}
+
+func (l *forwardList) add(addr net.TCPAddr) chan forward {
+ l.Lock()
+ defer l.Unlock()
+ f := forwardEntry{
+ addr,
+ make(chan forward, 1),
+ }
+ l.entries = append(l.entries, f)
+ return f.c
+}
+
+// See RFC 4254, section 7.2
+type forwardedTCPPayload struct {
+ Addr string
+ Port uint32
+ OriginAddr string
+ OriginPort uint32
+}
+
+// parseTCPAddr parses the originating address from the remote into a *net.TCPAddr.
+func parseTCPAddr(addr string, port uint32) (*net.TCPAddr, error) {
+ if port == 0 || port > 65535 {
+ return nil, fmt.Errorf("ssh: port number out of range: %d", port)
+ }
+ ip := net.ParseIP(string(addr))
+ if ip == nil {
+ return nil, fmt.Errorf("ssh: cannot parse IP address %q", addr)
+ }
+ return &net.TCPAddr{IP: ip, Port: int(port)}, nil
+}
+
+func (l *forwardList) handleChannels(in <-chan NewChannel) {
+ for ch := range in {
+ var payload forwardedTCPPayload
+ if err := Unmarshal(ch.ExtraData(), &payload); err != nil {
+ ch.Reject(ConnectionFailed, "could not parse forwarded-tcpip payload: "+err.Error())
+ continue
+ }
+
+ // RFC 4254 section 7.2 specifies that incoming
+ // addresses should list the address, in string
+ // format. It is implied that this should be an IP
+ // address, as it would be impossible to connect to it
+ // otherwise.
+ laddr, err := parseTCPAddr(payload.Addr, payload.Port)
+ if err != nil {
+ ch.Reject(ConnectionFailed, err.Error())
+ continue
+ }
+ raddr, err := parseTCPAddr(payload.OriginAddr, payload.OriginPort)
+ if err != nil {
+ ch.Reject(ConnectionFailed, err.Error())
+ continue
+ }
+
+ if ok := l.forward(*laddr, *raddr, ch); !ok {
+ // Section 7.2, implementations MUST reject spurious incoming
+ // connections.
+ ch.Reject(Prohibited, "no forward for address")
+ continue
+ }
+ }
+}
+
+// remove removes the forward entry, and the channel feeding its
+// listener.
+func (l *forwardList) remove(addr net.TCPAddr) {
+ l.Lock()
+ defer l.Unlock()
+ for i, f := range l.entries {
+ if addr.IP.Equal(f.laddr.IP) && addr.Port == f.laddr.Port {
+ l.entries = append(l.entries[:i], l.entries[i+1:]...)
+ close(f.c)
+ return
+ }
+ }
+}
+
+// closeAll closes and clears all forwards.
+func (l *forwardList) closeAll() {
+ l.Lock()
+ defer l.Unlock()
+ for _, f := range l.entries {
+ close(f.c)
+ }
+ l.entries = nil
+}
+
+func (l *forwardList) forward(laddr, raddr net.TCPAddr, ch NewChannel) bool {
+ l.Lock()
+ defer l.Unlock()
+ for _, f := range l.entries {
+ if laddr.IP.Equal(f.laddr.IP) && laddr.Port == f.laddr.Port {
+ f.c <- forward{ch, &raddr}
+ return true
+ }
+ }
+ return false
+}
+
+type tcpListener struct {
+ laddr *net.TCPAddr
+
+ conn *Client
+ in <-chan forward
+}
+
+// Accept waits for and returns the next connection to the listener.
+func (l *tcpListener) Accept() (net.Conn, error) {
+ s, ok := <-l.in
+ if !ok {
+ return nil, io.EOF
+ }
+ ch, incoming, err := s.newCh.Accept()
+ if err != nil {
+ return nil, err
+ }
+ go DiscardRequests(incoming)
+
+ return &tcpChanConn{
+ Channel: ch,
+ laddr: l.laddr,
+ raddr: s.raddr,
+ }, nil
+}
+
+// Close closes the listener.
+func (l *tcpListener) Close() error {
+ m := channelForwardMsg{
+ l.laddr.IP.String(),
+ uint32(l.laddr.Port),
+ }
+
+ // this also closes the listener.
+ l.conn.forwards.remove(*l.laddr)
+ ok, _, err := l.conn.SendRequest("cancel-tcpip-forward", true, Marshal(&m))
+ if err == nil && !ok {
+ err = errors.New("ssh: cancel-tcpip-forward failed")
+ }
+ return err
+}
+
+// Addr returns the listener's network address.
+func (l *tcpListener) Addr() net.Addr {
+ return l.laddr
+}
+
+// Dial initiates a connection to the addr from the remote host.
+// The resulting connection has a zero LocalAddr() and RemoteAddr().
+func (c *Client) Dial(n, addr string) (net.Conn, error) {
+ // Parse the address into host and numeric port.
+ host, portString, err := net.SplitHostPort(addr)
+ if err != nil {
+ return nil, err
+ }
+ port, err := strconv.ParseUint(portString, 10, 16)
+ if err != nil {
+ return nil, err
+ }
+ // Use a zero address for local and remote address.
+ zeroAddr := &net.TCPAddr{
+ IP: net.IPv4zero,
+ Port: 0,
+ }
+ ch, err := c.dial(net.IPv4zero.String(), 0, host, int(port))
+ if err != nil {
+ return nil, err
+ }
+ return &tcpChanConn{
+ Channel: ch,
+ laddr: zeroAddr,
+ raddr: zeroAddr,
+ }, nil
+}
+
+// DialTCP connects to the remote address raddr on the network net,
+// which must be "tcp", "tcp4", or "tcp6". If laddr is not nil, it is used
+// as the local address for the connection.
+func (c *Client) DialTCP(n string, laddr, raddr *net.TCPAddr) (net.Conn, error) {
+ if laddr == nil {
+ laddr = &net.TCPAddr{
+ IP: net.IPv4zero,
+ Port: 0,
+ }
+ }
+ ch, err := c.dial(laddr.IP.String(), laddr.Port, raddr.IP.String(), raddr.Port)
+ if err != nil {
+ return nil, err
+ }
+ return &tcpChanConn{
+ Channel: ch,
+ laddr: laddr,
+ raddr: raddr,
+ }, nil
+}
+
+// RFC 4254 7.2
+type channelOpenDirectMsg struct {
+ raddr string
+ rport uint32
+ laddr string
+ lport uint32
+}
+
+func (c *Client) dial(laddr string, lport int, raddr string, rport int) (Channel, error) {
+ msg := channelOpenDirectMsg{
+ raddr: raddr,
+ rport: uint32(rport),
+ laddr: laddr,
+ lport: uint32(lport),
+ }
+ ch, in, err := c.OpenChannel("direct-tcpip", Marshal(&msg))
+ if err != nil {
+ return nil, err
+ }
+ go DiscardRequests(in)
+ return ch, err
+}
+
+type tcpChan struct {
+ Channel // the backing channel
+}
+
+// tcpChanConn fulfills the net.Conn interface without
+// the tcpChan having to hold laddr or raddr directly.
+type tcpChanConn struct {
+ Channel
+ laddr, raddr net.Addr
+}
+
+// LocalAddr returns the local network address.
+func (t *tcpChanConn) LocalAddr() net.Addr {
+ return t.laddr
+}
+
+// RemoteAddr returns the remote network address.
+func (t *tcpChanConn) RemoteAddr() net.Addr {
+ return t.raddr
+}
+
+// SetDeadline sets the read and write deadlines associated
+// with the connection.
+func (t *tcpChanConn) SetDeadline(deadline time.Time) error {
+ if err := t.SetReadDeadline(deadline); err != nil {
+ return err
+ }
+ return t.SetWriteDeadline(deadline)
+}
+
+// SetReadDeadline sets the read deadline.
+// A zero value for t means Read will not time out.
+// After the deadline, the error from Read will implement net.Error
+// with Timeout() == true.
+func (t *tcpChanConn) SetReadDeadline(deadline time.Time) error {
+ return errors.New("ssh: tcpChan: deadline not supported")
+}
+
+// SetWriteDeadline exists to satisfy the net.Conn interface
+// but is not implemented by this type. It always returns an error.
+func (t *tcpChanConn) SetWriteDeadline(deadline time.Time) error {
+ return errors.New("ssh: tcpChan: deadline not supported")
+}
diff --git a/src/mongo/gotools/vendor/src/golang.org/x/crypto/ssh/tcpip_test.go b/src/mongo/gotools/vendor/src/golang.org/x/crypto/ssh/tcpip_test.go
new file mode 100644
index 00000000000..f1265cb4964
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/golang.org/x/crypto/ssh/tcpip_test.go
@@ -0,0 +1,20 @@
+// Copyright 2014 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package ssh
+
+import (
+ "testing"
+)
+
+func TestAutoPortListenBroken(t *testing.T) {
+ broken := "SSH-2.0-OpenSSH_5.9hh11"
+ works := "SSH-2.0-OpenSSH_6.1"
+ if !isBrokenOpenSSHVersion(broken) {
+ t.Errorf("version %q not marked as broken", broken)
+ }
+ if isBrokenOpenSSHVersion(works) {
+ t.Errorf("version %q marked as broken", works)
+ }
+}
diff --git a/src/mongo/gotools/vendor/src/golang.org/x/crypto/ssh/terminal/terminal.go b/src/mongo/gotools/vendor/src/golang.org/x/crypto/ssh/terminal/terminal.go
new file mode 100644
index 00000000000..741eeb13f0f
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/golang.org/x/crypto/ssh/terminal/terminal.go
@@ -0,0 +1,892 @@
+// Copyright 2011 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package terminal
+
+import (
+ "bytes"
+ "io"
+ "sync"
+ "unicode/utf8"
+)
+
+// EscapeCodes contains escape sequences that can be written to the terminal in
+// order to achieve different styles of text.
+type EscapeCodes struct {
+ // Foreground colors
+ Black, Red, Green, Yellow, Blue, Magenta, Cyan, White []byte
+
+ // Reset all attributes
+ Reset []byte
+}
+
+var vt100EscapeCodes = EscapeCodes{
+ Black: []byte{keyEscape, '[', '3', '0', 'm'},
+ Red: []byte{keyEscape, '[', '3', '1', 'm'},
+ Green: []byte{keyEscape, '[', '3', '2', 'm'},
+ Yellow: []byte{keyEscape, '[', '3', '3', 'm'},
+ Blue: []byte{keyEscape, '[', '3', '4', 'm'},
+ Magenta: []byte{keyEscape, '[', '3', '5', 'm'},
+ Cyan: []byte{keyEscape, '[', '3', '6', 'm'},
+ White: []byte{keyEscape, '[', '3', '7', 'm'},
+
+ Reset: []byte{keyEscape, '[', '0', 'm'},
+}
+
+// Terminal contains the state for running a VT100 terminal that is capable of
+// reading lines of input.
+type Terminal struct {
+ // AutoCompleteCallback, if non-null, is called for each keypress with
+ // the full input line and the current position of the cursor (in
+ // bytes, as an index into |line|). If it returns ok=false, the key
+ // press is processed normally. Otherwise it returns a replacement line
+ // and the new cursor position.
+ AutoCompleteCallback func(line string, pos int, key rune) (newLine string, newPos int, ok bool)
+
+ // Escape contains a pointer to the escape codes for this terminal.
+ // It's always a valid pointer, although the escape codes themselves
+ // may be empty if the terminal doesn't support them.
+ Escape *EscapeCodes
+
+ // lock protects the terminal and the state in this object from
+ // concurrent processing of a key press and a Write() call.
+ lock sync.Mutex
+
+ c io.ReadWriter
+ prompt []rune
+
+ // line is the current line being entered.
+ line []rune
+ // pos is the logical position of the cursor in line
+ pos int
+ // echo is true if local echo is enabled
+ echo bool
+ // pasteActive is true iff there is a bracketed paste operation in
+ // progress.
+ pasteActive bool
+
+ // cursorX contains the current X value of the cursor where the left
+ // edge is 0. cursorY contains the row number where the first row of
+ // the current line is 0.
+ cursorX, cursorY int
+ // maxLine is the greatest value of cursorY so far.
+ maxLine int
+
+ termWidth, termHeight int
+
+ // outBuf contains the terminal data to be sent.
+ outBuf []byte
+ // remainder contains the remainder of any partial key sequences after
+ // a read. It aliases into inBuf.
+ remainder []byte
+ inBuf [256]byte
+
+ // history contains previously entered commands so that they can be
+ // accessed with the up and down keys.
+ history stRingBuffer
+ // historyIndex stores the currently accessed history entry, where zero
+ // means the immediately previous entry.
+ historyIndex int
+ // When navigating up and down the history it's possible to return to
+ // the incomplete, initial line. That value is stored in
+ // historyPending.
+ historyPending string
+}
+
+// NewTerminal runs a VT100 terminal on the given ReadWriter. If the ReadWriter is
+// a local terminal, that terminal must first have been put into raw mode.
+// prompt is a string that is written at the start of each input line (i.e.
+// "> ").
+func NewTerminal(c io.ReadWriter, prompt string) *Terminal {
+ return &Terminal{
+ Escape: &vt100EscapeCodes,
+ c: c,
+ prompt: []rune(prompt),
+ termWidth: 80,
+ termHeight: 24,
+ echo: true,
+ historyIndex: -1,
+ }
+}
+
+const (
+ keyCtrlD = 4
+ keyCtrlU = 21
+ keyEnter = '\r'
+ keyEscape = 27
+ keyBackspace = 127
+ keyUnknown = 0xd800 /* UTF-16 surrogate area */ + iota
+ keyUp
+ keyDown
+ keyLeft
+ keyRight
+ keyAltLeft
+ keyAltRight
+ keyHome
+ keyEnd
+ keyDeleteWord
+ keyDeleteLine
+ keyClearScreen
+ keyPasteStart
+ keyPasteEnd
+)
+
+var pasteStart = []byte{keyEscape, '[', '2', '0', '0', '~'}
+var pasteEnd = []byte{keyEscape, '[', '2', '0', '1', '~'}
+
+// bytesToKey tries to parse a key sequence from b. If successful, it returns
+// the key and the remainder of the input. Otherwise it returns utf8.RuneError.
+func bytesToKey(b []byte, pasteActive bool) (rune, []byte) {
+ if len(b) == 0 {
+ return utf8.RuneError, nil
+ }
+
+ if !pasteActive {
+ switch b[0] {
+ case 1: // ^A
+ return keyHome, b[1:]
+ case 5: // ^E
+ return keyEnd, b[1:]
+ case 8: // ^H
+ return keyBackspace, b[1:]
+ case 11: // ^K
+ return keyDeleteLine, b[1:]
+ case 12: // ^L
+ return keyClearScreen, b[1:]
+ case 23: // ^W
+ return keyDeleteWord, b[1:]
+ }
+ }
+
+ if b[0] != keyEscape {
+ if !utf8.FullRune(b) {
+ return utf8.RuneError, b
+ }
+ r, l := utf8.DecodeRune(b)
+ return r, b[l:]
+ }
+
+ if !pasteActive && len(b) >= 3 && b[0] == keyEscape && b[1] == '[' {
+ switch b[2] {
+ case 'A':
+ return keyUp, b[3:]
+ case 'B':
+ return keyDown, b[3:]
+ case 'C':
+ return keyRight, b[3:]
+ case 'D':
+ return keyLeft, b[3:]
+ case 'H':
+ return keyHome, b[3:]
+ case 'F':
+ return keyEnd, b[3:]
+ }
+ }
+
+ if !pasteActive && len(b) >= 6 && b[0] == keyEscape && b[1] == '[' && b[2] == '1' && b[3] == ';' && b[4] == '3' {
+ switch b[5] {
+ case 'C':
+ return keyAltRight, b[6:]
+ case 'D':
+ return keyAltLeft, b[6:]
+ }
+ }
+
+ if !pasteActive && len(b) >= 6 && bytes.Equal(b[:6], pasteStart) {
+ return keyPasteStart, b[6:]
+ }
+
+ if pasteActive && len(b) >= 6 && bytes.Equal(b[:6], pasteEnd) {
+ return keyPasteEnd, b[6:]
+ }
+
+ // If we get here then we have a key that we don't recognise, or a
+ // partial sequence. It's not clear how one should find the end of a
+ // sequence without knowing them all, but it seems that [a-zA-Z~] only
+ // appears at the end of a sequence.
+ for i, c := range b[0:] {
+ if c >= 'a' && c <= 'z' || c >= 'A' && c <= 'Z' || c == '~' {
+ return keyUnknown, b[i+1:]
+ }
+ }
+
+ return utf8.RuneError, b
+}
+
+// queue appends data to the end of t.outBuf
+func (t *Terminal) queue(data []rune) {
+ t.outBuf = append(t.outBuf, []byte(string(data))...)
+}
+
+var eraseUnderCursor = []rune{' ', keyEscape, '[', 'D'}
+var space = []rune{' '}
+
+func isPrintable(key rune) bool {
+ isInSurrogateArea := key >= 0xd800 && key <= 0xdbff
+ return key >= 32 && !isInSurrogateArea
+}
+
+// moveCursorToPos appends data to t.outBuf which will move the cursor to the
+// given, logical position in the text.
+func (t *Terminal) moveCursorToPos(pos int) {
+ if !t.echo {
+ return
+ }
+
+ x := visualLength(t.prompt) + pos
+ y := x / t.termWidth
+ x = x % t.termWidth
+
+ up := 0
+ if y < t.cursorY {
+ up = t.cursorY - y
+ }
+
+ down := 0
+ if y > t.cursorY {
+ down = y - t.cursorY
+ }
+
+ left := 0
+ if x < t.cursorX {
+ left = t.cursorX - x
+ }
+
+ right := 0
+ if x > t.cursorX {
+ right = x - t.cursorX
+ }
+
+ t.cursorX = x
+ t.cursorY = y
+ t.move(up, down, left, right)
+}
+
+func (t *Terminal) move(up, down, left, right int) {
+ movement := make([]rune, 3*(up+down+left+right))
+ m := movement
+ for i := 0; i < up; i++ {
+ m[0] = keyEscape
+ m[1] = '['
+ m[2] = 'A'
+ m = m[3:]
+ }
+ for i := 0; i < down; i++ {
+ m[0] = keyEscape
+ m[1] = '['
+ m[2] = 'B'
+ m = m[3:]
+ }
+ for i := 0; i < left; i++ {
+ m[0] = keyEscape
+ m[1] = '['
+ m[2] = 'D'
+ m = m[3:]
+ }
+ for i := 0; i < right; i++ {
+ m[0] = keyEscape
+ m[1] = '['
+ m[2] = 'C'
+ m = m[3:]
+ }
+
+ t.queue(movement)
+}
+
+func (t *Terminal) clearLineToRight() {
+ op := []rune{keyEscape, '[', 'K'}
+ t.queue(op)
+}
+
+const maxLineLength = 4096
+
+func (t *Terminal) setLine(newLine []rune, newPos int) {
+ if t.echo {
+ t.moveCursorToPos(0)
+ t.writeLine(newLine)
+ for i := len(newLine); i < len(t.line); i++ {
+ t.writeLine(space)
+ }
+ t.moveCursorToPos(newPos)
+ }
+ t.line = newLine
+ t.pos = newPos
+}
+
+func (t *Terminal) advanceCursor(places int) {
+ t.cursorX += places
+ t.cursorY += t.cursorX / t.termWidth
+ if t.cursorY > t.maxLine {
+ t.maxLine = t.cursorY
+ }
+ t.cursorX = t.cursorX % t.termWidth
+
+ if places > 0 && t.cursorX == 0 {
+ // Normally terminals will advance the current position
+ // when writing a character. But that doesn't happen
+ // for the last character in a line. However, when
+ // writing a character (except a new line) that causes
+ // a line wrap, the position will be advanced two
+ // places.
+ //
+ // So, if we are stopping at the end of a line, we
+ // need to write a newline so that our cursor can be
+ // advanced to the next line.
+ t.outBuf = append(t.outBuf, '\n')
+ }
+}
+
+func (t *Terminal) eraseNPreviousChars(n int) {
+ if n == 0 {
+ return
+ }
+
+ if t.pos < n {
+ n = t.pos
+ }
+ t.pos -= n
+ t.moveCursorToPos(t.pos)
+
+ copy(t.line[t.pos:], t.line[n+t.pos:])
+ t.line = t.line[:len(t.line)-n]
+ if t.echo {
+ t.writeLine(t.line[t.pos:])
+ for i := 0; i < n; i++ {
+ t.queue(space)
+ }
+ t.advanceCursor(n)
+ t.moveCursorToPos(t.pos)
+ }
+}
+
+// countToLeftWord returns then number of characters from the cursor to the
+// start of the previous word.
+func (t *Terminal) countToLeftWord() int {
+ if t.pos == 0 {
+ return 0
+ }
+
+ pos := t.pos - 1
+ for pos > 0 {
+ if t.line[pos] != ' ' {
+ break
+ }
+ pos--
+ }
+ for pos > 0 {
+ if t.line[pos] == ' ' {
+ pos++
+ break
+ }
+ pos--
+ }
+
+ return t.pos - pos
+}
+
+// countToRightWord returns then number of characters from the cursor to the
+// start of the next word.
+func (t *Terminal) countToRightWord() int {
+ pos := t.pos
+ for pos < len(t.line) {
+ if t.line[pos] == ' ' {
+ break
+ }
+ pos++
+ }
+ for pos < len(t.line) {
+ if t.line[pos] != ' ' {
+ break
+ }
+ pos++
+ }
+ return pos - t.pos
+}
+
+// visualLength returns the number of visible glyphs in s.
+func visualLength(runes []rune) int {
+ inEscapeSeq := false
+ length := 0
+
+ for _, r := range runes {
+ switch {
+ case inEscapeSeq:
+ if (r >= 'a' && r <= 'z') || (r >= 'A' && r <= 'Z') {
+ inEscapeSeq = false
+ }
+ case r == '\x1b':
+ inEscapeSeq = true
+ default:
+ length++
+ }
+ }
+
+ return length
+}
+
+// handleKey processes the given key and, optionally, returns a line of text
+// that the user has entered.
+func (t *Terminal) handleKey(key rune) (line string, ok bool) {
+ if t.pasteActive && key != keyEnter {
+ t.addKeyToLine(key)
+ return
+ }
+
+ switch key {
+ case keyBackspace:
+ if t.pos == 0 {
+ return
+ }
+ t.eraseNPreviousChars(1)
+ case keyAltLeft:
+ // move left by a word.
+ t.pos -= t.countToLeftWord()
+ t.moveCursorToPos(t.pos)
+ case keyAltRight:
+ // move right by a word.
+ t.pos += t.countToRightWord()
+ t.moveCursorToPos(t.pos)
+ case keyLeft:
+ if t.pos == 0 {
+ return
+ }
+ t.pos--
+ t.moveCursorToPos(t.pos)
+ case keyRight:
+ if t.pos == len(t.line) {
+ return
+ }
+ t.pos++
+ t.moveCursorToPos(t.pos)
+ case keyHome:
+ if t.pos == 0 {
+ return
+ }
+ t.pos = 0
+ t.moveCursorToPos(t.pos)
+ case keyEnd:
+ if t.pos == len(t.line) {
+ return
+ }
+ t.pos = len(t.line)
+ t.moveCursorToPos(t.pos)
+ case keyUp:
+ entry, ok := t.history.NthPreviousEntry(t.historyIndex + 1)
+ if !ok {
+ return "", false
+ }
+ if t.historyIndex == -1 {
+ t.historyPending = string(t.line)
+ }
+ t.historyIndex++
+ runes := []rune(entry)
+ t.setLine(runes, len(runes))
+ case keyDown:
+ switch t.historyIndex {
+ case -1:
+ return
+ case 0:
+ runes := []rune(t.historyPending)
+ t.setLine(runes, len(runes))
+ t.historyIndex--
+ default:
+ entry, ok := t.history.NthPreviousEntry(t.historyIndex - 1)
+ if ok {
+ t.historyIndex--
+ runes := []rune(entry)
+ t.setLine(runes, len(runes))
+ }
+ }
+ case keyEnter:
+ t.moveCursorToPos(len(t.line))
+ t.queue([]rune("\r\n"))
+ line = string(t.line)
+ ok = true
+ t.line = t.line[:0]
+ t.pos = 0
+ t.cursorX = 0
+ t.cursorY = 0
+ t.maxLine = 0
+ case keyDeleteWord:
+ // Delete zero or more spaces and then one or more characters.
+ t.eraseNPreviousChars(t.countToLeftWord())
+ case keyDeleteLine:
+ // Delete everything from the current cursor position to the
+ // end of line.
+ for i := t.pos; i < len(t.line); i++ {
+ t.queue(space)
+ t.advanceCursor(1)
+ }
+ t.line = t.line[:t.pos]
+ t.moveCursorToPos(t.pos)
+ case keyCtrlD:
+ // Erase the character under the current position.
+ // The EOF case when the line is empty is handled in
+ // readLine().
+ if t.pos < len(t.line) {
+ t.pos++
+ t.eraseNPreviousChars(1)
+ }
+ case keyCtrlU:
+ t.eraseNPreviousChars(t.pos)
+ case keyClearScreen:
+ // Erases the screen and moves the cursor to the home position.
+ t.queue([]rune("\x1b[2J\x1b[H"))
+ t.queue(t.prompt)
+ t.cursorX, t.cursorY = 0, 0
+ t.advanceCursor(visualLength(t.prompt))
+ t.setLine(t.line, t.pos)
+ default:
+ if t.AutoCompleteCallback != nil {
+ prefix := string(t.line[:t.pos])
+ suffix := string(t.line[t.pos:])
+
+ t.lock.Unlock()
+ newLine, newPos, completeOk := t.AutoCompleteCallback(prefix+suffix, len(prefix), key)
+ t.lock.Lock()
+
+ if completeOk {
+ t.setLine([]rune(newLine), utf8.RuneCount([]byte(newLine)[:newPos]))
+ return
+ }
+ }
+ if !isPrintable(key) {
+ return
+ }
+ if len(t.line) == maxLineLength {
+ return
+ }
+ t.addKeyToLine(key)
+ }
+ return
+}
+
+// addKeyToLine inserts the given key at the current position in the current
+// line.
+func (t *Terminal) addKeyToLine(key rune) {
+ if len(t.line) == cap(t.line) {
+ newLine := make([]rune, len(t.line), 2*(1+len(t.line)))
+ copy(newLine, t.line)
+ t.line = newLine
+ }
+ t.line = t.line[:len(t.line)+1]
+ copy(t.line[t.pos+1:], t.line[t.pos:])
+ t.line[t.pos] = key
+ if t.echo {
+ t.writeLine(t.line[t.pos:])
+ }
+ t.pos++
+ t.moveCursorToPos(t.pos)
+}
+
+func (t *Terminal) writeLine(line []rune) {
+ for len(line) != 0 {
+ remainingOnLine := t.termWidth - t.cursorX
+ todo := len(line)
+ if todo > remainingOnLine {
+ todo = remainingOnLine
+ }
+ t.queue(line[:todo])
+ t.advanceCursor(visualLength(line[:todo]))
+ line = line[todo:]
+ }
+}
+
+func (t *Terminal) Write(buf []byte) (n int, err error) {
+ t.lock.Lock()
+ defer t.lock.Unlock()
+
+ if t.cursorX == 0 && t.cursorY == 0 {
+ // This is the easy case: there's nothing on the screen that we
+ // have to move out of the way.
+ return t.c.Write(buf)
+ }
+
+ // We have a prompt and possibly user input on the screen. We
+ // have to clear it first.
+ t.move(0 /* up */, 0 /* down */, t.cursorX /* left */, 0 /* right */)
+ t.cursorX = 0
+ t.clearLineToRight()
+
+ for t.cursorY > 0 {
+ t.move(1 /* up */, 0, 0, 0)
+ t.cursorY--
+ t.clearLineToRight()
+ }
+
+ if _, err = t.c.Write(t.outBuf); err != nil {
+ return
+ }
+ t.outBuf = t.outBuf[:0]
+
+ if n, err = t.c.Write(buf); err != nil {
+ return
+ }
+
+ t.writeLine(t.prompt)
+ if t.echo {
+ t.writeLine(t.line)
+ }
+
+ t.moveCursorToPos(t.pos)
+
+ if _, err = t.c.Write(t.outBuf); err != nil {
+ return
+ }
+ t.outBuf = t.outBuf[:0]
+ return
+}
+
+// ReadPassword temporarily changes the prompt and reads a password, without
+// echo, from the terminal.
+func (t *Terminal) ReadPassword(prompt string) (line string, err error) {
+ t.lock.Lock()
+ defer t.lock.Unlock()
+
+ oldPrompt := t.prompt
+ t.prompt = []rune(prompt)
+ t.echo = false
+
+ line, err = t.readLine()
+
+ t.prompt = oldPrompt
+ t.echo = true
+
+ return
+}
+
+// ReadLine returns a line of input from the terminal.
+func (t *Terminal) ReadLine() (line string, err error) {
+ t.lock.Lock()
+ defer t.lock.Unlock()
+
+ return t.readLine()
+}
+
+func (t *Terminal) readLine() (line string, err error) {
+ // t.lock must be held at this point
+
+ if t.cursorX == 0 && t.cursorY == 0 {
+ t.writeLine(t.prompt)
+ t.c.Write(t.outBuf)
+ t.outBuf = t.outBuf[:0]
+ }
+
+ lineIsPasted := t.pasteActive
+
+ for {
+ rest := t.remainder
+ lineOk := false
+ for !lineOk {
+ var key rune
+ key, rest = bytesToKey(rest, t.pasteActive)
+ if key == utf8.RuneError {
+ break
+ }
+ if !t.pasteActive {
+ if key == keyCtrlD {
+ if len(t.line) == 0 {
+ return "", io.EOF
+ }
+ }
+ if key == keyPasteStart {
+ t.pasteActive = true
+ if len(t.line) == 0 {
+ lineIsPasted = true
+ }
+ continue
+ }
+ } else if key == keyPasteEnd {
+ t.pasteActive = false
+ continue
+ }
+ if !t.pasteActive {
+ lineIsPasted = false
+ }
+ line, lineOk = t.handleKey(key)
+ }
+ if len(rest) > 0 {
+ n := copy(t.inBuf[:], rest)
+ t.remainder = t.inBuf[:n]
+ } else {
+ t.remainder = nil
+ }
+ t.c.Write(t.outBuf)
+ t.outBuf = t.outBuf[:0]
+ if lineOk {
+ if t.echo {
+ t.historyIndex = -1
+ t.history.Add(line)
+ }
+ if lineIsPasted {
+ err = ErrPasteIndicator
+ }
+ return
+ }
+
+ // t.remainder is a slice at the beginning of t.inBuf
+ // containing a partial key sequence
+ readBuf := t.inBuf[len(t.remainder):]
+ var n int
+
+ t.lock.Unlock()
+ n, err = t.c.Read(readBuf)
+ t.lock.Lock()
+
+ if err != nil {
+ return
+ }
+
+ t.remainder = t.inBuf[:n+len(t.remainder)]
+ }
+
+ panic("unreachable") // for Go 1.0.
+}
+
+// SetPrompt sets the prompt to be used when reading subsequent lines.
+func (t *Terminal) SetPrompt(prompt string) {
+ t.lock.Lock()
+ defer t.lock.Unlock()
+
+ t.prompt = []rune(prompt)
+}
+
+func (t *Terminal) clearAndRepaintLinePlusNPrevious(numPrevLines int) {
+ // Move cursor to column zero at the start of the line.
+ t.move(t.cursorY, 0, t.cursorX, 0)
+ t.cursorX, t.cursorY = 0, 0
+ t.clearLineToRight()
+ for t.cursorY < numPrevLines {
+ // Move down a line
+ t.move(0, 1, 0, 0)
+ t.cursorY++
+ t.clearLineToRight()
+ }
+ // Move back to beginning.
+ t.move(t.cursorY, 0, 0, 0)
+ t.cursorX, t.cursorY = 0, 0
+
+ t.queue(t.prompt)
+ t.advanceCursor(visualLength(t.prompt))
+ t.writeLine(t.line)
+ t.moveCursorToPos(t.pos)
+}
+
+func (t *Terminal) SetSize(width, height int) error {
+ t.lock.Lock()
+ defer t.lock.Unlock()
+
+ if width == 0 {
+ width = 1
+ }
+
+ oldWidth := t.termWidth
+ t.termWidth, t.termHeight = width, height
+
+ switch {
+ case width == oldWidth:
+ // If the width didn't change then nothing else needs to be
+ // done.
+ return nil
+ case len(t.line) == 0 && t.cursorX == 0 && t.cursorY == 0:
+ // If there is nothing on current line and no prompt printed,
+ // just do nothing
+ return nil
+ case width < oldWidth:
+ // Some terminals (e.g. xterm) will truncate lines that were
+ // too long when shinking. Others, (e.g. gnome-terminal) will
+ // attempt to wrap them. For the former, repainting t.maxLine
+ // works great, but that behaviour goes badly wrong in the case
+ // of the latter because they have doubled every full line.
+
+ // We assume that we are working on a terminal that wraps lines
+ // and adjust the cursor position based on every previous line
+ // wrapping and turning into two. This causes the prompt on
+ // xterms to move upwards, which isn't great, but it avoids a
+ // huge mess with gnome-terminal.
+ if t.cursorX >= t.termWidth {
+ t.cursorX = t.termWidth - 1
+ }
+ t.cursorY *= 2
+ t.clearAndRepaintLinePlusNPrevious(t.maxLine * 2)
+ case width > oldWidth:
+ // If the terminal expands then our position calculations will
+ // be wrong in the future because we think the cursor is
+ // |t.pos| chars into the string, but there will be a gap at
+ // the end of any wrapped line.
+ //
+ // But the position will actually be correct until we move, so
+ // we can move back to the beginning and repaint everything.
+ t.clearAndRepaintLinePlusNPrevious(t.maxLine)
+ }
+
+ _, err := t.c.Write(t.outBuf)
+ t.outBuf = t.outBuf[:0]
+ return err
+}
+
+type pasteIndicatorError struct{}
+
+func (pasteIndicatorError) Error() string {
+ return "terminal: ErrPasteIndicator not correctly handled"
+}
+
+// ErrPasteIndicator may be returned from ReadLine as the error, in addition
+// to valid line data. It indicates that bracketed paste mode is enabled and
+// that the returned line consists only of pasted data. Programs may wish to
+// interpret pasted data more literally than typed data.
+var ErrPasteIndicator = pasteIndicatorError{}
+
+// SetBracketedPasteMode requests that the terminal bracket paste operations
+// with markers. Not all terminals support this but, if it is supported, then
+// enabling this mode will stop any autocomplete callback from running due to
+// pastes. Additionally, any lines that are completely pasted will be returned
+// from ReadLine with the error set to ErrPasteIndicator.
+func (t *Terminal) SetBracketedPasteMode(on bool) {
+ if on {
+ io.WriteString(t.c, "\x1b[?2004h")
+ } else {
+ io.WriteString(t.c, "\x1b[?2004l")
+ }
+}
+
+// stRingBuffer is a ring buffer of strings.
+type stRingBuffer struct {
+ // entries contains max elements.
+ entries []string
+ max int
+ // head contains the index of the element most recently added to the ring.
+ head int
+ // size contains the number of elements in the ring.
+ size int
+}
+
+func (s *stRingBuffer) Add(a string) {
+ if s.entries == nil {
+ const defaultNumEntries = 100
+ s.entries = make([]string, defaultNumEntries)
+ s.max = defaultNumEntries
+ }
+
+ s.head = (s.head + 1) % s.max
+ s.entries[s.head] = a
+ if s.size < s.max {
+ s.size++
+ }
+}
+
+// NthPreviousEntry returns the value passed to the nth previous call to Add.
+// If n is zero then the immediately prior value is returned, if one, then the
+// next most recent, and so on. If such an element doesn't exist then ok is
+// false.
+func (s *stRingBuffer) NthPreviousEntry(n int) (value string, ok bool) {
+ if n >= s.size {
+ return "", false
+ }
+ index := s.head - n
+ if index < 0 {
+ index += s.max
+ }
+ return s.entries[index], true
+}
diff --git a/src/mongo/gotools/vendor/src/golang.org/x/crypto/ssh/terminal/terminal_test.go b/src/mongo/gotools/vendor/src/golang.org/x/crypto/ssh/terminal/terminal_test.go
new file mode 100644
index 00000000000..a663fe41b77
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/golang.org/x/crypto/ssh/terminal/terminal_test.go
@@ -0,0 +1,269 @@
+// Copyright 2011 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package terminal
+
+import (
+ "io"
+ "testing"
+)
+
+type MockTerminal struct {
+ toSend []byte
+ bytesPerRead int
+ received []byte
+}
+
+func (c *MockTerminal) Read(data []byte) (n int, err error) {
+ n = len(data)
+ if n == 0 {
+ return
+ }
+ if n > len(c.toSend) {
+ n = len(c.toSend)
+ }
+ if n == 0 {
+ return 0, io.EOF
+ }
+ if c.bytesPerRead > 0 && n > c.bytesPerRead {
+ n = c.bytesPerRead
+ }
+ copy(data, c.toSend[:n])
+ c.toSend = c.toSend[n:]
+ return
+}
+
+func (c *MockTerminal) Write(data []byte) (n int, err error) {
+ c.received = append(c.received, data...)
+ return len(data), nil
+}
+
+func TestClose(t *testing.T) {
+ c := &MockTerminal{}
+ ss := NewTerminal(c, "> ")
+ line, err := ss.ReadLine()
+ if line != "" {
+ t.Errorf("Expected empty line but got: %s", line)
+ }
+ if err != io.EOF {
+ t.Errorf("Error should have been EOF but got: %s", err)
+ }
+}
+
+var keyPressTests = []struct {
+ in string
+ line string
+ err error
+ throwAwayLines int
+}{
+ {
+ err: io.EOF,
+ },
+ {
+ in: "\r",
+ line: "",
+ },
+ {
+ in: "foo\r",
+ line: "foo",
+ },
+ {
+ in: "a\x1b[Cb\r", // right
+ line: "ab",
+ },
+ {
+ in: "a\x1b[Db\r", // left
+ line: "ba",
+ },
+ {
+ in: "a\177b\r", // backspace
+ line: "b",
+ },
+ {
+ in: "\x1b[A\r", // up
+ },
+ {
+ in: "\x1b[B\r", // down
+ },
+ {
+ in: "line\x1b[A\x1b[B\r", // up then down
+ line: "line",
+ },
+ {
+ in: "line1\rline2\x1b[A\r", // recall previous line.
+ line: "line1",
+ throwAwayLines: 1,
+ },
+ {
+ // recall two previous lines and append.
+ in: "line1\rline2\rline3\x1b[A\x1b[Axxx\r",
+ line: "line1xxx",
+ throwAwayLines: 2,
+ },
+ {
+ // Ctrl-A to move to beginning of line followed by ^K to kill
+ // line.
+ in: "a b \001\013\r",
+ line: "",
+ },
+ {
+ // Ctrl-A to move to beginning of line, Ctrl-E to move to end,
+ // finally ^K to kill nothing.
+ in: "a b \001\005\013\r",
+ line: "a b ",
+ },
+ {
+ in: "\027\r",
+ line: "",
+ },
+ {
+ in: "a\027\r",
+ line: "",
+ },
+ {
+ in: "a \027\r",
+ line: "",
+ },
+ {
+ in: "a b\027\r",
+ line: "a ",
+ },
+ {
+ in: "a b \027\r",
+ line: "a ",
+ },
+ {
+ in: "one two thr\x1b[D\027\r",
+ line: "one two r",
+ },
+ {
+ in: "\013\r",
+ line: "",
+ },
+ {
+ in: "a\013\r",
+ line: "a",
+ },
+ {
+ in: "ab\x1b[D\013\r",
+ line: "a",
+ },
+ {
+ in: "Ξεσκεπάζω\r",
+ line: "Ξεσκεπάζω",
+ },
+ {
+ in: "£\r\x1b[A\177\r", // non-ASCII char, enter, up, backspace.
+ line: "",
+ throwAwayLines: 1,
+ },
+ {
+ in: "£\r££\x1b[A\x1b[B\177\r", // non-ASCII char, enter, 2x non-ASCII, up, down, backspace, enter.
+ line: "£",
+ throwAwayLines: 1,
+ },
+ {
+ // Ctrl-D at the end of the line should be ignored.
+ in: "a\004\r",
+ line: "a",
+ },
+ {
+ // a, b, left, Ctrl-D should erase the b.
+ in: "ab\x1b[D\004\r",
+ line: "a",
+ },
+ {
+ // a, b, c, d, left, left, ^U should erase to the beginning of
+ // the line.
+ in: "abcd\x1b[D\x1b[D\025\r",
+ line: "cd",
+ },
+ {
+ // Bracketed paste mode: control sequences should be returned
+ // verbatim in paste mode.
+ in: "abc\x1b[200~de\177f\x1b[201~\177\r",
+ line: "abcde\177",
+ },
+ {
+ // Enter in bracketed paste mode should still work.
+ in: "abc\x1b[200~d\refg\x1b[201~h\r",
+ line: "efgh",
+ throwAwayLines: 1,
+ },
+ {
+ // Lines consisting entirely of pasted data should be indicated as such.
+ in: "\x1b[200~a\r",
+ line: "a",
+ err: ErrPasteIndicator,
+ },
+}
+
+func TestKeyPresses(t *testing.T) {
+ for i, test := range keyPressTests {
+ for j := 1; j < len(test.in); j++ {
+ c := &MockTerminal{
+ toSend: []byte(test.in),
+ bytesPerRead: j,
+ }
+ ss := NewTerminal(c, "> ")
+ for k := 0; k < test.throwAwayLines; k++ {
+ _, err := ss.ReadLine()
+ if err != nil {
+ t.Errorf("Throwaway line %d from test %d resulted in error: %s", k, i, err)
+ }
+ }
+ line, err := ss.ReadLine()
+ if line != test.line {
+ t.Errorf("Line resulting from test %d (%d bytes per read) was '%s', expected '%s'", i, j, line, test.line)
+ break
+ }
+ if err != test.err {
+ t.Errorf("Error resulting from test %d (%d bytes per read) was '%v', expected '%v'", i, j, err, test.err)
+ break
+ }
+ }
+ }
+}
+
+func TestPasswordNotSaved(t *testing.T) {
+ c := &MockTerminal{
+ toSend: []byte("password\r\x1b[A\r"),
+ bytesPerRead: 1,
+ }
+ ss := NewTerminal(c, "> ")
+ pw, _ := ss.ReadPassword("> ")
+ if pw != "password" {
+ t.Fatalf("failed to read password, got %s", pw)
+ }
+ line, _ := ss.ReadLine()
+ if len(line) > 0 {
+ t.Fatalf("password was saved in history")
+ }
+}
+
+var setSizeTests = []struct {
+ width, height int
+}{
+ {40, 13},
+ {80, 24},
+ {132, 43},
+}
+
+func TestTerminalSetSize(t *testing.T) {
+ for _, setSize := range setSizeTests {
+ c := &MockTerminal{
+ toSend: []byte("password\r\x1b[A\r"),
+ bytesPerRead: 1,
+ }
+ ss := NewTerminal(c, "> ")
+ ss.SetSize(setSize.width, setSize.height)
+ pw, _ := ss.ReadPassword("Password: ")
+ if pw != "password" {
+ t.Fatalf("failed to read password, got %s", pw)
+ }
+ if string(c.received) != "Password: \r\n" {
+ t.Errorf("failed to set the temporary prompt expected %q, got %q", "Password: ", c.received)
+ }
+ }
+}
diff --git a/src/mongo/gotools/vendor/src/golang.org/x/crypto/ssh/terminal/util.go b/src/mongo/gotools/vendor/src/golang.org/x/crypto/ssh/terminal/util.go
new file mode 100644
index 00000000000..598e3df77e7
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/golang.org/x/crypto/ssh/terminal/util.go
@@ -0,0 +1,128 @@
+// Copyright 2011 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// +build darwin dragonfly freebsd linux,!appengine netbsd openbsd
+
+// Package terminal provides support functions for dealing with terminals, as
+// commonly found on UNIX systems.
+//
+// Putting a terminal into raw mode is the most common requirement:
+//
+// oldState, err := terminal.MakeRaw(0)
+// if err != nil {
+// panic(err)
+// }
+// defer terminal.Restore(0, oldState)
+package terminal // import "golang.org/x/crypto/ssh/terminal"
+
+import (
+ "io"
+ "syscall"
+ "unsafe"
+)
+
+// State contains the state of a terminal.
+type State struct {
+ termios syscall.Termios
+}
+
+// IsTerminal returns true if the given file descriptor is a terminal.
+func IsTerminal(fd int) bool {
+ var termios syscall.Termios
+ _, _, err := syscall.Syscall6(syscall.SYS_IOCTL, uintptr(fd), ioctlReadTermios, uintptr(unsafe.Pointer(&termios)), 0, 0, 0)
+ return err == 0
+}
+
+// MakeRaw put the terminal connected to the given file descriptor into raw
+// mode and returns the previous state of the terminal so that it can be
+// restored.
+func MakeRaw(fd int) (*State, error) {
+ var oldState State
+ if _, _, err := syscall.Syscall6(syscall.SYS_IOCTL, uintptr(fd), ioctlReadTermios, uintptr(unsafe.Pointer(&oldState.termios)), 0, 0, 0); err != 0 {
+ return nil, err
+ }
+
+ newState := oldState.termios
+ newState.Iflag &^= syscall.ISTRIP | syscall.INLCR | syscall.ICRNL | syscall.IGNCR | syscall.IXON | syscall.IXOFF
+ newState.Lflag &^= syscall.ECHO | syscall.ICANON | syscall.ISIG
+ if _, _, err := syscall.Syscall6(syscall.SYS_IOCTL, uintptr(fd), ioctlWriteTermios, uintptr(unsafe.Pointer(&newState)), 0, 0, 0); err != 0 {
+ return nil, err
+ }
+
+ return &oldState, nil
+}
+
+// GetState returns the current state of a terminal which may be useful to
+// restore the terminal after a signal.
+func GetState(fd int) (*State, error) {
+ var oldState State
+ if _, _, err := syscall.Syscall6(syscall.SYS_IOCTL, uintptr(fd), ioctlReadTermios, uintptr(unsafe.Pointer(&oldState.termios)), 0, 0, 0); err != 0 {
+ return nil, err
+ }
+
+ return &oldState, nil
+}
+
+// Restore restores the terminal connected to the given file descriptor to a
+// previous state.
+func Restore(fd int, state *State) error {
+ _, _, err := syscall.Syscall6(syscall.SYS_IOCTL, uintptr(fd), ioctlWriteTermios, uintptr(unsafe.Pointer(&state.termios)), 0, 0, 0)
+ return err
+}
+
+// GetSize returns the dimensions of the given terminal.
+func GetSize(fd int) (width, height int, err error) {
+ var dimensions [4]uint16
+
+ if _, _, err := syscall.Syscall6(syscall.SYS_IOCTL, uintptr(fd), uintptr(syscall.TIOCGWINSZ), uintptr(unsafe.Pointer(&dimensions)), 0, 0, 0); err != 0 {
+ return -1, -1, err
+ }
+ return int(dimensions[1]), int(dimensions[0]), nil
+}
+
+// ReadPassword reads a line of input from a terminal without local echo. This
+// is commonly used for inputting passwords and other sensitive data. The slice
+// returned does not include the \n.
+func ReadPassword(fd int) ([]byte, error) {
+ var oldState syscall.Termios
+ if _, _, err := syscall.Syscall6(syscall.SYS_IOCTL, uintptr(fd), ioctlReadTermios, uintptr(unsafe.Pointer(&oldState)), 0, 0, 0); err != 0 {
+ return nil, err
+ }
+
+ newState := oldState
+ newState.Lflag &^= syscall.ECHO
+ newState.Lflag |= syscall.ICANON | syscall.ISIG
+ newState.Iflag |= syscall.ICRNL
+ if _, _, err := syscall.Syscall6(syscall.SYS_IOCTL, uintptr(fd), ioctlWriteTermios, uintptr(unsafe.Pointer(&newState)), 0, 0, 0); err != 0 {
+ return nil, err
+ }
+
+ defer func() {
+ syscall.Syscall6(syscall.SYS_IOCTL, uintptr(fd), ioctlWriteTermios, uintptr(unsafe.Pointer(&oldState)), 0, 0, 0)
+ }()
+
+ var buf [16]byte
+ var ret []byte
+ for {
+ n, err := syscall.Read(fd, buf[:])
+ if err != nil {
+ return nil, err
+ }
+ if n == 0 {
+ if len(ret) == 0 {
+ return nil, io.EOF
+ }
+ break
+ }
+ if buf[n-1] == '\n' {
+ n--
+ }
+ ret = append(ret, buf[:n]...)
+ if n < len(buf) {
+ break
+ }
+ }
+
+ return ret, nil
+}
diff --git a/src/mongo/gotools/vendor/src/golang.org/x/crypto/ssh/terminal/util_bsd.go b/src/mongo/gotools/vendor/src/golang.org/x/crypto/ssh/terminal/util_bsd.go
new file mode 100644
index 00000000000..9c1ffd145a7
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/golang.org/x/crypto/ssh/terminal/util_bsd.go
@@ -0,0 +1,12 @@
+// Copyright 2013 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// +build darwin dragonfly freebsd netbsd openbsd
+
+package terminal
+
+import "syscall"
+
+const ioctlReadTermios = syscall.TIOCGETA
+const ioctlWriteTermios = syscall.TIOCSETA
diff --git a/src/mongo/gotools/vendor/src/golang.org/x/crypto/ssh/terminal/util_linux.go b/src/mongo/gotools/vendor/src/golang.org/x/crypto/ssh/terminal/util_linux.go
new file mode 100644
index 00000000000..5883b22d780
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/golang.org/x/crypto/ssh/terminal/util_linux.go
@@ -0,0 +1,11 @@
+// Copyright 2013 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package terminal
+
+// These constants are declared here, rather than importing
+// them from the syscall package as some syscall packages, even
+// on linux, for example gccgo, do not declare them.
+const ioctlReadTermios = 0x5401 // syscall.TCGETS
+const ioctlWriteTermios = 0x5402 // syscall.TCSETS
diff --git a/src/mongo/gotools/vendor/src/golang.org/x/crypto/ssh/terminal/util_windows.go b/src/mongo/gotools/vendor/src/golang.org/x/crypto/ssh/terminal/util_windows.go
new file mode 100644
index 00000000000..2dd6c3d9788
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/golang.org/x/crypto/ssh/terminal/util_windows.go
@@ -0,0 +1,174 @@
+// Copyright 2011 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// +build windows
+
+// Package terminal provides support functions for dealing with terminals, as
+// commonly found on UNIX systems.
+//
+// Putting a terminal into raw mode is the most common requirement:
+//
+// oldState, err := terminal.MakeRaw(0)
+// if err != nil {
+// panic(err)
+// }
+// defer terminal.Restore(0, oldState)
+package terminal
+
+import (
+ "io"
+ "syscall"
+ "unsafe"
+)
+
+const (
+ enableLineInput = 2
+ enableEchoInput = 4
+ enableProcessedInput = 1
+ enableWindowInput = 8
+ enableMouseInput = 16
+ enableInsertMode = 32
+ enableQuickEditMode = 64
+ enableExtendedFlags = 128
+ enableAutoPosition = 256
+ enableProcessedOutput = 1
+ enableWrapAtEolOutput = 2
+)
+
+var kernel32 = syscall.NewLazyDLL("kernel32.dll")
+
+var (
+ procGetConsoleMode = kernel32.NewProc("GetConsoleMode")
+ procSetConsoleMode = kernel32.NewProc("SetConsoleMode")
+ procGetConsoleScreenBufferInfo = kernel32.NewProc("GetConsoleScreenBufferInfo")
+)
+
+type (
+ short int16
+ word uint16
+
+ coord struct {
+ x short
+ y short
+ }
+ smallRect struct {
+ left short
+ top short
+ right short
+ bottom short
+ }
+ consoleScreenBufferInfo struct {
+ size coord
+ cursorPosition coord
+ attributes word
+ window smallRect
+ maximumWindowSize coord
+ }
+)
+
+type State struct {
+ mode uint32
+}
+
+// IsTerminal returns true if the given file descriptor is a terminal.
+func IsTerminal(fd int) bool {
+ var st uint32
+ r, _, e := syscall.Syscall(procGetConsoleMode.Addr(), 2, uintptr(fd), uintptr(unsafe.Pointer(&st)), 0)
+ return r != 0 && e == 0
+}
+
+// MakeRaw put the terminal connected to the given file descriptor into raw
+// mode and returns the previous state of the terminal so that it can be
+// restored.
+func MakeRaw(fd int) (*State, error) {
+ var st uint32
+ _, _, e := syscall.Syscall(procGetConsoleMode.Addr(), 2, uintptr(fd), uintptr(unsafe.Pointer(&st)), 0)
+ if e != 0 {
+ return nil, error(e)
+ }
+ st &^= (enableEchoInput | enableProcessedInput | enableLineInput | enableProcessedOutput)
+ _, _, e = syscall.Syscall(procSetConsoleMode.Addr(), 2, uintptr(fd), uintptr(st), 0)
+ if e != 0 {
+ return nil, error(e)
+ }
+ return &State{st}, nil
+}
+
+// GetState returns the current state of a terminal which may be useful to
+// restore the terminal after a signal.
+func GetState(fd int) (*State, error) {
+ var st uint32
+ _, _, e := syscall.Syscall(procGetConsoleMode.Addr(), 2, uintptr(fd), uintptr(unsafe.Pointer(&st)), 0)
+ if e != 0 {
+ return nil, error(e)
+ }
+ return &State{st}, nil
+}
+
+// Restore restores the terminal connected to the given file descriptor to a
+// previous state.
+func Restore(fd int, state *State) error {
+ _, _, err := syscall.Syscall(procSetConsoleMode.Addr(), 2, uintptr(fd), uintptr(state.mode), 0)
+ return err
+}
+
+// GetSize returns the dimensions of the given terminal.
+func GetSize(fd int) (width, height int, err error) {
+ var info consoleScreenBufferInfo
+ _, _, e := syscall.Syscall(procGetConsoleScreenBufferInfo.Addr(), 2, uintptr(fd), uintptr(unsafe.Pointer(&info)), 0)
+ if e != 0 {
+ return 0, 0, error(e)
+ }
+ return int(info.size.x), int(info.size.y), nil
+}
+
+// ReadPassword reads a line of input from a terminal without local echo. This
+// is commonly used for inputting passwords and other sensitive data. The slice
+// returned does not include the \n.
+func ReadPassword(fd int) ([]byte, error) {
+ var st uint32
+ _, _, e := syscall.Syscall(procGetConsoleMode.Addr(), 2, uintptr(fd), uintptr(unsafe.Pointer(&st)), 0)
+ if e != 0 {
+ return nil, error(e)
+ }
+ old := st
+
+ st &^= (enableEchoInput)
+ st |= (enableProcessedInput | enableLineInput | enableProcessedOutput)
+ _, _, e = syscall.Syscall(procSetConsoleMode.Addr(), 2, uintptr(fd), uintptr(st), 0)
+ if e != 0 {
+ return nil, error(e)
+ }
+
+ defer func() {
+ syscall.Syscall(procSetConsoleMode.Addr(), 2, uintptr(fd), uintptr(old), 0)
+ }()
+
+ var buf [16]byte
+ var ret []byte
+ for {
+ n, err := syscall.Read(syscall.Handle(fd), buf[:])
+ if err != nil {
+ return nil, err
+ }
+ if n == 0 {
+ if len(ret) == 0 {
+ return nil, io.EOF
+ }
+ break
+ }
+ if buf[n-1] == '\n' {
+ n--
+ }
+ if n > 0 && buf[n-1] == '\r' {
+ n--
+ }
+ ret = append(ret, buf[:n]...)
+ if n < len(buf) {
+ break
+ }
+ }
+
+ return ret, nil
+}
diff --git a/src/mongo/gotools/vendor/src/golang.org/x/crypto/ssh/test/agent_unix_test.go b/src/mongo/gotools/vendor/src/golang.org/x/crypto/ssh/test/agent_unix_test.go
new file mode 100644
index 00000000000..f481253c9eb
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/golang.org/x/crypto/ssh/test/agent_unix_test.go
@@ -0,0 +1,59 @@
+// Copyright 2014 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// +build darwin dragonfly freebsd linux netbsd openbsd
+
+package test
+
+import (
+ "bytes"
+ "testing"
+
+ "golang.org/x/crypto/ssh"
+ "golang.org/x/crypto/ssh/agent"
+)
+
+func TestAgentForward(t *testing.T) {
+ server := newServer(t)
+ defer server.Shutdown()
+ conn := server.Dial(clientConfig())
+ defer conn.Close()
+
+ keyring := agent.NewKeyring()
+ if err := keyring.Add(agent.AddedKey{PrivateKey: testPrivateKeys["dsa"]}); err != nil {
+ t.Fatalf("Error adding key: %s", err)
+ }
+ if err := keyring.Add(agent.AddedKey{
+ PrivateKey: testPrivateKeys["dsa"],
+ ConfirmBeforeUse: true,
+ LifetimeSecs: 3600,
+ }); err != nil {
+ t.Fatalf("Error adding key with constraints: %s", err)
+ }
+ pub := testPublicKeys["dsa"]
+
+ sess, err := conn.NewSession()
+ if err != nil {
+ t.Fatalf("NewSession: %v", err)
+ }
+ if err := agent.RequestAgentForwarding(sess); err != nil {
+ t.Fatalf("RequestAgentForwarding: %v", err)
+ }
+
+ if err := agent.ForwardToAgent(conn, keyring); err != nil {
+ t.Fatalf("SetupForwardKeyring: %v", err)
+ }
+ out, err := sess.CombinedOutput("ssh-add -L")
+ if err != nil {
+ t.Fatalf("running ssh-add: %v, out %s", err, out)
+ }
+ key, _, _, _, err := ssh.ParseAuthorizedKey(out)
+ if err != nil {
+ t.Fatalf("ParseAuthorizedKey(%q): %v", out, err)
+ }
+
+ if !bytes.Equal(key.Marshal(), pub.Marshal()) {
+ t.Fatalf("got key %s, want %s", ssh.MarshalAuthorizedKey(key), ssh.MarshalAuthorizedKey(pub))
+ }
+}
diff --git a/src/mongo/gotools/vendor/src/golang.org/x/crypto/ssh/test/cert_test.go b/src/mongo/gotools/vendor/src/golang.org/x/crypto/ssh/test/cert_test.go
new file mode 100644
index 00000000000..364790f17d7
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/golang.org/x/crypto/ssh/test/cert_test.go
@@ -0,0 +1,47 @@
+// Copyright 2014 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// +build darwin dragonfly freebsd linux netbsd openbsd
+
+package test
+
+import (
+ "crypto/rand"
+ "testing"
+
+ "golang.org/x/crypto/ssh"
+)
+
+func TestCertLogin(t *testing.T) {
+ s := newServer(t)
+ defer s.Shutdown()
+
+ // Use a key different from the default.
+ clientKey := testSigners["dsa"]
+ caAuthKey := testSigners["ecdsa"]
+ cert := &ssh.Certificate{
+ Key: clientKey.PublicKey(),
+ ValidPrincipals: []string{username()},
+ CertType: ssh.UserCert,
+ ValidBefore: ssh.CertTimeInfinity,
+ }
+ if err := cert.SignCert(rand.Reader, caAuthKey); err != nil {
+ t.Fatalf("SetSignature: %v", err)
+ }
+
+ certSigner, err := ssh.NewCertSigner(cert, clientKey)
+ if err != nil {
+ t.Fatalf("NewCertSigner: %v", err)
+ }
+
+ conf := &ssh.ClientConfig{
+ User: username(),
+ }
+ conf.Auth = append(conf.Auth, ssh.PublicKeys(certSigner))
+ client, err := s.TryDial(conf)
+ if err != nil {
+ t.Fatalf("TryDial: %v", err)
+ }
+ client.Close()
+}
diff --git a/src/mongo/gotools/vendor/src/golang.org/x/crypto/ssh/test/doc.go b/src/mongo/gotools/vendor/src/golang.org/x/crypto/ssh/test/doc.go
new file mode 100644
index 00000000000..3f9b3346dfa
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/golang.org/x/crypto/ssh/test/doc.go
@@ -0,0 +1,7 @@
+// Copyright 2012 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// This package contains integration tests for the
+// golang.org/x/crypto/ssh package.
+package test // import "golang.org/x/crypto/ssh/test"
diff --git a/src/mongo/gotools/vendor/src/golang.org/x/crypto/ssh/test/forward_unix_test.go b/src/mongo/gotools/vendor/src/golang.org/x/crypto/ssh/test/forward_unix_test.go
new file mode 100644
index 00000000000..877a88cde3d
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/golang.org/x/crypto/ssh/test/forward_unix_test.go
@@ -0,0 +1,160 @@
+// Copyright 2012 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// +build darwin dragonfly freebsd linux netbsd openbsd
+
+package test
+
+import (
+ "bytes"
+ "io"
+ "io/ioutil"
+ "math/rand"
+ "net"
+ "testing"
+ "time"
+)
+
+func TestPortForward(t *testing.T) {
+ server := newServer(t)
+ defer server.Shutdown()
+ conn := server.Dial(clientConfig())
+ defer conn.Close()
+
+ sshListener, err := conn.Listen("tcp", "localhost:0")
+ if err != nil {
+ t.Fatal(err)
+ }
+
+ go func() {
+ sshConn, err := sshListener.Accept()
+ if err != nil {
+ t.Fatalf("listen.Accept failed: %v", err)
+ }
+
+ _, err = io.Copy(sshConn, sshConn)
+ if err != nil && err != io.EOF {
+ t.Fatalf("ssh client copy: %v", err)
+ }
+ sshConn.Close()
+ }()
+
+ forwardedAddr := sshListener.Addr().String()
+ tcpConn, err := net.Dial("tcp", forwardedAddr)
+ if err != nil {
+ t.Fatalf("TCP dial failed: %v", err)
+ }
+
+ readChan := make(chan []byte)
+ go func() {
+ data, _ := ioutil.ReadAll(tcpConn)
+ readChan <- data
+ }()
+
+ // Invent some data.
+ data := make([]byte, 100*1000)
+ for i := range data {
+ data[i] = byte(i % 255)
+ }
+
+ var sent []byte
+ for len(sent) < 1000*1000 {
+ // Send random sized chunks
+ m := rand.Intn(len(data))
+ n, err := tcpConn.Write(data[:m])
+ if err != nil {
+ break
+ }
+ sent = append(sent, data[:n]...)
+ }
+ if err := tcpConn.(*net.TCPConn).CloseWrite(); err != nil {
+ t.Errorf("tcpConn.CloseWrite: %v", err)
+ }
+
+ read := <-readChan
+
+ if len(sent) != len(read) {
+ t.Fatalf("got %d bytes, want %d", len(read), len(sent))
+ }
+ if bytes.Compare(sent, read) != 0 {
+ t.Fatalf("read back data does not match")
+ }
+
+ if err := sshListener.Close(); err != nil {
+ t.Fatalf("sshListener.Close: %v", err)
+ }
+
+ // Check that the forward disappeared.
+ tcpConn, err = net.Dial("tcp", forwardedAddr)
+ if err == nil {
+ tcpConn.Close()
+ t.Errorf("still listening to %s after closing", forwardedAddr)
+ }
+}
+
+func TestAcceptClose(t *testing.T) {
+ server := newServer(t)
+ defer server.Shutdown()
+ conn := server.Dial(clientConfig())
+
+ sshListener, err := conn.Listen("tcp", "localhost:0")
+ if err != nil {
+ t.Fatal(err)
+ }
+
+ quit := make(chan error, 1)
+ go func() {
+ for {
+ c, err := sshListener.Accept()
+ if err != nil {
+ quit <- err
+ break
+ }
+ c.Close()
+ }
+ }()
+ sshListener.Close()
+
+ select {
+ case <-time.After(1 * time.Second):
+ t.Errorf("timeout: listener did not close.")
+ case err := <-quit:
+ t.Logf("quit as expected (error %v)", err)
+ }
+}
+
+// Check that listeners exit if the underlying client transport dies.
+func TestPortForwardConnectionClose(t *testing.T) {
+ server := newServer(t)
+ defer server.Shutdown()
+ conn := server.Dial(clientConfig())
+
+ sshListener, err := conn.Listen("tcp", "localhost:0")
+ if err != nil {
+ t.Fatal(err)
+ }
+
+ quit := make(chan error, 1)
+ go func() {
+ for {
+ c, err := sshListener.Accept()
+ if err != nil {
+ quit <- err
+ break
+ }
+ c.Close()
+ }
+ }()
+
+ // It would be even nicer if we closed the server side, but it
+ // is more involved as the fd for that side is dup()ed.
+ server.clientConn.Close()
+
+ select {
+ case <-time.After(1 * time.Second):
+ t.Errorf("timeout: listener did not close.")
+ case err := <-quit:
+ t.Logf("quit as expected (error %v)", err)
+ }
+}
diff --git a/src/mongo/gotools/vendor/src/golang.org/x/crypto/ssh/test/session_test.go b/src/mongo/gotools/vendor/src/golang.org/x/crypto/ssh/test/session_test.go
new file mode 100644
index 00000000000..c0e714ba906
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/golang.org/x/crypto/ssh/test/session_test.go
@@ -0,0 +1,340 @@
+// Copyright 2012 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// +build !windows
+
+package test
+
+// Session functional tests.
+
+import (
+ "bytes"
+ "errors"
+ "io"
+ "strings"
+ "testing"
+
+ "golang.org/x/crypto/ssh"
+)
+
+func TestRunCommandSuccess(t *testing.T) {
+ server := newServer(t)
+ defer server.Shutdown()
+ conn := server.Dial(clientConfig())
+ defer conn.Close()
+
+ session, err := conn.NewSession()
+ if err != nil {
+ t.Fatalf("session failed: %v", err)
+ }
+ defer session.Close()
+ err = session.Run("true")
+ if err != nil {
+ t.Fatalf("session failed: %v", err)
+ }
+}
+
+func TestHostKeyCheck(t *testing.T) {
+ server := newServer(t)
+ defer server.Shutdown()
+
+ conf := clientConfig()
+ hostDB := hostKeyDB()
+ conf.HostKeyCallback = hostDB.Check
+
+ // change the keys.
+ hostDB.keys[ssh.KeyAlgoRSA][25]++
+ hostDB.keys[ssh.KeyAlgoDSA][25]++
+ hostDB.keys[ssh.KeyAlgoECDSA256][25]++
+
+ conn, err := server.TryDial(conf)
+ if err == nil {
+ conn.Close()
+ t.Fatalf("dial should have failed.")
+ } else if !strings.Contains(err.Error(), "host key mismatch") {
+ t.Fatalf("'host key mismatch' not found in %v", err)
+ }
+}
+
+func TestRunCommandStdin(t *testing.T) {
+ server := newServer(t)
+ defer server.Shutdown()
+ conn := server.Dial(clientConfig())
+ defer conn.Close()
+
+ session, err := conn.NewSession()
+ if err != nil {
+ t.Fatalf("session failed: %v", err)
+ }
+ defer session.Close()
+
+ r, w := io.Pipe()
+ defer r.Close()
+ defer w.Close()
+ session.Stdin = r
+
+ err = session.Run("true")
+ if err != nil {
+ t.Fatalf("session failed: %v", err)
+ }
+}
+
+func TestRunCommandStdinError(t *testing.T) {
+ server := newServer(t)
+ defer server.Shutdown()
+ conn := server.Dial(clientConfig())
+ defer conn.Close()
+
+ session, err := conn.NewSession()
+ if err != nil {
+ t.Fatalf("session failed: %v", err)
+ }
+ defer session.Close()
+
+ r, w := io.Pipe()
+ defer r.Close()
+ session.Stdin = r
+ pipeErr := errors.New("closing write end of pipe")
+ w.CloseWithError(pipeErr)
+
+ err = session.Run("true")
+ if err != pipeErr {
+ t.Fatalf("expected %v, found %v", pipeErr, err)
+ }
+}
+
+func TestRunCommandFailed(t *testing.T) {
+ server := newServer(t)
+ defer server.Shutdown()
+ conn := server.Dial(clientConfig())
+ defer conn.Close()
+
+ session, err := conn.NewSession()
+ if err != nil {
+ t.Fatalf("session failed: %v", err)
+ }
+ defer session.Close()
+ err = session.Run(`bash -c "kill -9 $$"`)
+ if err == nil {
+ t.Fatalf("session succeeded: %v", err)
+ }
+}
+
+func TestRunCommandWeClosed(t *testing.T) {
+ server := newServer(t)
+ defer server.Shutdown()
+ conn := server.Dial(clientConfig())
+ defer conn.Close()
+
+ session, err := conn.NewSession()
+ if err != nil {
+ t.Fatalf("session failed: %v", err)
+ }
+ err = session.Shell()
+ if err != nil {
+ t.Fatalf("shell failed: %v", err)
+ }
+ err = session.Close()
+ if err != nil {
+ t.Fatalf("shell failed: %v", err)
+ }
+}
+
+func TestFuncLargeRead(t *testing.T) {
+ server := newServer(t)
+ defer server.Shutdown()
+ conn := server.Dial(clientConfig())
+ defer conn.Close()
+
+ session, err := conn.NewSession()
+ if err != nil {
+ t.Fatalf("unable to create new session: %s", err)
+ }
+
+ stdout, err := session.StdoutPipe()
+ if err != nil {
+ t.Fatalf("unable to acquire stdout pipe: %s", err)
+ }
+
+ err = session.Start("dd if=/dev/urandom bs=2048 count=1024")
+ if err != nil {
+ t.Fatalf("unable to execute remote command: %s", err)
+ }
+
+ buf := new(bytes.Buffer)
+ n, err := io.Copy(buf, stdout)
+ if err != nil {
+ t.Fatalf("error reading from remote stdout: %s", err)
+ }
+
+ if n != 2048*1024 {
+ t.Fatalf("Expected %d bytes but read only %d from remote command", 2048, n)
+ }
+}
+
+func TestKeyChange(t *testing.T) {
+ server := newServer(t)
+ defer server.Shutdown()
+ conf := clientConfig()
+ hostDB := hostKeyDB()
+ conf.HostKeyCallback = hostDB.Check
+ conf.RekeyThreshold = 1024
+ conn := server.Dial(conf)
+ defer conn.Close()
+
+ for i := 0; i < 4; i++ {
+ session, err := conn.NewSession()
+ if err != nil {
+ t.Fatalf("unable to create new session: %s", err)
+ }
+
+ stdout, err := session.StdoutPipe()
+ if err != nil {
+ t.Fatalf("unable to acquire stdout pipe: %s", err)
+ }
+
+ err = session.Start("dd if=/dev/urandom bs=1024 count=1")
+ if err != nil {
+ t.Fatalf("unable to execute remote command: %s", err)
+ }
+ buf := new(bytes.Buffer)
+ n, err := io.Copy(buf, stdout)
+ if err != nil {
+ t.Fatalf("error reading from remote stdout: %s", err)
+ }
+
+ want := int64(1024)
+ if n != want {
+ t.Fatalf("Expected %d bytes but read only %d from remote command", want, n)
+ }
+ }
+
+ if changes := hostDB.checkCount; changes < 4 {
+ t.Errorf("got %d key changes, want 4", changes)
+ }
+}
+
+func TestInvalidTerminalMode(t *testing.T) {
+ server := newServer(t)
+ defer server.Shutdown()
+ conn := server.Dial(clientConfig())
+ defer conn.Close()
+
+ session, err := conn.NewSession()
+ if err != nil {
+ t.Fatalf("session failed: %v", err)
+ }
+ defer session.Close()
+
+ if err = session.RequestPty("vt100", 80, 40, ssh.TerminalModes{255: 1984}); err == nil {
+ t.Fatalf("req-pty failed: successful request with invalid mode")
+ }
+}
+
+func TestValidTerminalMode(t *testing.T) {
+ server := newServer(t)
+ defer server.Shutdown()
+ conn := server.Dial(clientConfig())
+ defer conn.Close()
+
+ session, err := conn.NewSession()
+ if err != nil {
+ t.Fatalf("session failed: %v", err)
+ }
+ defer session.Close()
+
+ stdout, err := session.StdoutPipe()
+ if err != nil {
+ t.Fatalf("unable to acquire stdout pipe: %s", err)
+ }
+
+ stdin, err := session.StdinPipe()
+ if err != nil {
+ t.Fatalf("unable to acquire stdin pipe: %s", err)
+ }
+
+ tm := ssh.TerminalModes{ssh.ECHO: 0}
+ if err = session.RequestPty("xterm", 80, 40, tm); err != nil {
+ t.Fatalf("req-pty failed: %s", err)
+ }
+
+ err = session.Shell()
+ if err != nil {
+ t.Fatalf("session failed: %s", err)
+ }
+
+ stdin.Write([]byte("stty -a && exit\n"))
+
+ var buf bytes.Buffer
+ if _, err := io.Copy(&buf, stdout); err != nil {
+ t.Fatalf("reading failed: %s", err)
+ }
+
+ if sttyOutput := buf.String(); !strings.Contains(sttyOutput, "-echo ") {
+ t.Fatalf("terminal mode failure: expected -echo in stty output, got %s", sttyOutput)
+ }
+}
+
+func TestCiphers(t *testing.T) {
+ var config ssh.Config
+ config.SetDefaults()
+ cipherOrder := config.Ciphers
+ // This cipher will not be tested when commented out in cipher.go it will
+ // fallback to the next available as per line 292.
+ cipherOrder = append(cipherOrder, "aes128-cbc")
+
+ for _, ciph := range cipherOrder {
+ server := newServer(t)
+ defer server.Shutdown()
+ conf := clientConfig()
+ conf.Ciphers = []string{ciph}
+ // Don't fail if sshd doesnt have the cipher.
+ conf.Ciphers = append(conf.Ciphers, cipherOrder...)
+ conn, err := server.TryDial(conf)
+ if err == nil {
+ conn.Close()
+ } else {
+ t.Fatalf("failed for cipher %q", ciph)
+ }
+ }
+}
+
+func TestMACs(t *testing.T) {
+ var config ssh.Config
+ config.SetDefaults()
+ macOrder := config.MACs
+
+ for _, mac := range macOrder {
+ server := newServer(t)
+ defer server.Shutdown()
+ conf := clientConfig()
+ conf.MACs = []string{mac}
+ // Don't fail if sshd doesnt have the MAC.
+ conf.MACs = append(conf.MACs, macOrder...)
+ if conn, err := server.TryDial(conf); err == nil {
+ conn.Close()
+ } else {
+ t.Fatalf("failed for MAC %q", mac)
+ }
+ }
+}
+
+func TestKeyExchanges(t *testing.T) {
+ var config ssh.Config
+ config.SetDefaults()
+ kexOrder := config.KeyExchanges
+ for _, kex := range kexOrder {
+ server := newServer(t)
+ defer server.Shutdown()
+ conf := clientConfig()
+ // Don't fail if sshd doesnt have the kex.
+ conf.KeyExchanges = append([]string{kex}, kexOrder...)
+ conn, err := server.TryDial(conf)
+ if err == nil {
+ conn.Close()
+ } else {
+ t.Errorf("failed for kex %q", kex)
+ }
+ }
+}
diff --git a/src/mongo/gotools/vendor/src/golang.org/x/crypto/ssh/test/tcpip_test.go b/src/mongo/gotools/vendor/src/golang.org/x/crypto/ssh/test/tcpip_test.go
new file mode 100644
index 00000000000..a2eb9358d02
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/golang.org/x/crypto/ssh/test/tcpip_test.go
@@ -0,0 +1,46 @@
+// Copyright 2012 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// +build !windows
+
+package test
+
+// direct-tcpip functional tests
+
+import (
+ "io"
+ "net"
+ "testing"
+)
+
+func TestDial(t *testing.T) {
+ server := newServer(t)
+ defer server.Shutdown()
+ sshConn := server.Dial(clientConfig())
+ defer sshConn.Close()
+
+ l, err := net.Listen("tcp", "127.0.0.1:0")
+ if err != nil {
+ t.Fatalf("Listen: %v", err)
+ }
+ defer l.Close()
+
+ go func() {
+ for {
+ c, err := l.Accept()
+ if err != nil {
+ break
+ }
+
+ io.WriteString(c, c.RemoteAddr().String())
+ c.Close()
+ }
+ }()
+
+ conn, err := sshConn.Dial("tcp", l.Addr().String())
+ if err != nil {
+ t.Fatalf("Dial: %v", err)
+ }
+ defer conn.Close()
+}
diff --git a/src/mongo/gotools/vendor/src/golang.org/x/crypto/ssh/test/test_unix_test.go b/src/mongo/gotools/vendor/src/golang.org/x/crypto/ssh/test/test_unix_test.go
new file mode 100644
index 00000000000..f1fc50b2e48
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/golang.org/x/crypto/ssh/test/test_unix_test.go
@@ -0,0 +1,261 @@
+// Copyright 2012 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// +build darwin dragonfly freebsd linux netbsd openbsd plan9
+
+package test
+
+// functional test harness for unix.
+
+import (
+ "bytes"
+ "fmt"
+ "io/ioutil"
+ "log"
+ "net"
+ "os"
+ "os/exec"
+ "os/user"
+ "path/filepath"
+ "testing"
+ "text/template"
+
+ "golang.org/x/crypto/ssh"
+ "golang.org/x/crypto/ssh/testdata"
+)
+
+const sshd_config = `
+Protocol 2
+HostKey {{.Dir}}/id_rsa
+HostKey {{.Dir}}/id_dsa
+HostKey {{.Dir}}/id_ecdsa
+Pidfile {{.Dir}}/sshd.pid
+#UsePrivilegeSeparation no
+KeyRegenerationInterval 3600
+ServerKeyBits 768
+SyslogFacility AUTH
+LogLevel DEBUG2
+LoginGraceTime 120
+PermitRootLogin no
+StrictModes no
+RSAAuthentication yes
+PubkeyAuthentication yes
+AuthorizedKeysFile {{.Dir}}/id_user.pub
+TrustedUserCAKeys {{.Dir}}/id_ecdsa.pub
+IgnoreRhosts yes
+RhostsRSAAuthentication no
+HostbasedAuthentication no
+`
+
+var configTmpl = template.Must(template.New("").Parse(sshd_config))
+
+type server struct {
+ t *testing.T
+ cleanup func() // executed during Shutdown
+ configfile string
+ cmd *exec.Cmd
+ output bytes.Buffer // holds stderr from sshd process
+
+ // Client half of the network connection.
+ clientConn net.Conn
+}
+
+func username() string {
+ var username string
+ if user, err := user.Current(); err == nil {
+ username = user.Username
+ } else {
+ // user.Current() currently requires cgo. If an error is
+ // returned attempt to get the username from the environment.
+ log.Printf("user.Current: %v; falling back on $USER", err)
+ username = os.Getenv("USER")
+ }
+ if username == "" {
+ panic("Unable to get username")
+ }
+ return username
+}
+
+type storedHostKey struct {
+ // keys map from an algorithm string to binary key data.
+ keys map[string][]byte
+
+ // checkCount counts the Check calls. Used for testing
+ // rekeying.
+ checkCount int
+}
+
+func (k *storedHostKey) Add(key ssh.PublicKey) {
+ if k.keys == nil {
+ k.keys = map[string][]byte{}
+ }
+ k.keys[key.Type()] = key.Marshal()
+}
+
+func (k *storedHostKey) Check(addr string, remote net.Addr, key ssh.PublicKey) error {
+ k.checkCount++
+ algo := key.Type()
+
+ if k.keys == nil || bytes.Compare(key.Marshal(), k.keys[algo]) != 0 {
+ return fmt.Errorf("host key mismatch. Got %q, want %q", key, k.keys[algo])
+ }
+ return nil
+}
+
+func hostKeyDB() *storedHostKey {
+ keyChecker := &storedHostKey{}
+ keyChecker.Add(testPublicKeys["ecdsa"])
+ keyChecker.Add(testPublicKeys["rsa"])
+ keyChecker.Add(testPublicKeys["dsa"])
+ return keyChecker
+}
+
+func clientConfig() *ssh.ClientConfig {
+ config := &ssh.ClientConfig{
+ User: username(),
+ Auth: []ssh.AuthMethod{
+ ssh.PublicKeys(testSigners["user"]),
+ },
+ HostKeyCallback: hostKeyDB().Check,
+ }
+ return config
+}
+
+// unixConnection creates two halves of a connected net.UnixConn. It
+// is used for connecting the Go SSH client with sshd without opening
+// ports.
+func unixConnection() (*net.UnixConn, *net.UnixConn, error) {
+ dir, err := ioutil.TempDir("", "unixConnection")
+ if err != nil {
+ return nil, nil, err
+ }
+ defer os.Remove(dir)
+
+ addr := filepath.Join(dir, "ssh")
+ listener, err := net.Listen("unix", addr)
+ if err != nil {
+ return nil, nil, err
+ }
+ defer listener.Close()
+ c1, err := net.Dial("unix", addr)
+ if err != nil {
+ return nil, nil, err
+ }
+
+ c2, err := listener.Accept()
+ if err != nil {
+ c1.Close()
+ return nil, nil, err
+ }
+
+ return c1.(*net.UnixConn), c2.(*net.UnixConn), nil
+}
+
+func (s *server) TryDial(config *ssh.ClientConfig) (*ssh.Client, error) {
+ sshd, err := exec.LookPath("sshd")
+ if err != nil {
+ s.t.Skipf("skipping test: %v", err)
+ }
+
+ c1, c2, err := unixConnection()
+ if err != nil {
+ s.t.Fatalf("unixConnection: %v", err)
+ }
+
+ s.cmd = exec.Command(sshd, "-f", s.configfile, "-i", "-e")
+ f, err := c2.File()
+ if err != nil {
+ s.t.Fatalf("UnixConn.File: %v", err)
+ }
+ defer f.Close()
+ s.cmd.Stdin = f
+ s.cmd.Stdout = f
+ s.cmd.Stderr = &s.output
+ if err := s.cmd.Start(); err != nil {
+ s.t.Fail()
+ s.Shutdown()
+ s.t.Fatalf("s.cmd.Start: %v", err)
+ }
+ s.clientConn = c1
+ conn, chans, reqs, err := ssh.NewClientConn(c1, "", config)
+ if err != nil {
+ return nil, err
+ }
+ return ssh.NewClient(conn, chans, reqs), nil
+}
+
+func (s *server) Dial(config *ssh.ClientConfig) *ssh.Client {
+ conn, err := s.TryDial(config)
+ if err != nil {
+ s.t.Fail()
+ s.Shutdown()
+ s.t.Fatalf("ssh.Client: %v", err)
+ }
+ return conn
+}
+
+func (s *server) Shutdown() {
+ if s.cmd != nil && s.cmd.Process != nil {
+ // Don't check for errors; if it fails it's most
+ // likely "os: process already finished", and we don't
+ // care about that. Use os.Interrupt, so child
+ // processes are killed too.
+ s.cmd.Process.Signal(os.Interrupt)
+ s.cmd.Wait()
+ }
+ if s.t.Failed() {
+ // log any output from sshd process
+ s.t.Logf("sshd: %s", s.output.String())
+ }
+ s.cleanup()
+}
+
+func writeFile(path string, contents []byte) {
+ f, err := os.OpenFile(path, os.O_WRONLY|os.O_TRUNC|os.O_CREATE, 0600)
+ if err != nil {
+ panic(err)
+ }
+ defer f.Close()
+ if _, err := f.Write(contents); err != nil {
+ panic(err)
+ }
+}
+
+// newServer returns a new mock ssh server.
+func newServer(t *testing.T) *server {
+ if testing.Short() {
+ t.Skip("skipping test due to -short")
+ }
+ dir, err := ioutil.TempDir("", "sshtest")
+ if err != nil {
+ t.Fatal(err)
+ }
+ f, err := os.Create(filepath.Join(dir, "sshd_config"))
+ if err != nil {
+ t.Fatal(err)
+ }
+ err = configTmpl.Execute(f, map[string]string{
+ "Dir": dir,
+ })
+ if err != nil {
+ t.Fatal(err)
+ }
+ f.Close()
+
+ for k, v := range testdata.PEMBytes {
+ filename := "id_" + k
+ writeFile(filepath.Join(dir, filename), v)
+ writeFile(filepath.Join(dir, filename+".pub"), ssh.MarshalAuthorizedKey(testPublicKeys[k]))
+ }
+
+ return &server{
+ t: t,
+ configfile: f.Name(),
+ cleanup: func() {
+ if err := os.RemoveAll(dir); err != nil {
+ t.Error(err)
+ }
+ },
+ }
+}
diff --git a/src/mongo/gotools/vendor/src/golang.org/x/crypto/ssh/test/testdata_test.go b/src/mongo/gotools/vendor/src/golang.org/x/crypto/ssh/test/testdata_test.go
new file mode 100644
index 00000000000..ae48c7516cb
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/golang.org/x/crypto/ssh/test/testdata_test.go
@@ -0,0 +1,64 @@
+// Copyright 2014 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// IMPLEMENTOR NOTE: To avoid a package loop, this file is in three places:
+// ssh/, ssh/agent, and ssh/test/. It should be kept in sync across all three
+// instances.
+
+package test
+
+import (
+ "crypto/rand"
+ "fmt"
+
+ "golang.org/x/crypto/ssh"
+ "golang.org/x/crypto/ssh/testdata"
+)
+
+var (
+ testPrivateKeys map[string]interface{}
+ testSigners map[string]ssh.Signer
+ testPublicKeys map[string]ssh.PublicKey
+)
+
+func init() {
+ var err error
+
+ n := len(testdata.PEMBytes)
+ testPrivateKeys = make(map[string]interface{}, n)
+ testSigners = make(map[string]ssh.Signer, n)
+ testPublicKeys = make(map[string]ssh.PublicKey, n)
+ for t, k := range testdata.PEMBytes {
+ testPrivateKeys[t], err = ssh.ParseRawPrivateKey(k)
+ if err != nil {
+ panic(fmt.Sprintf("Unable to parse test key %s: %v", t, err))
+ }
+ testSigners[t], err = ssh.NewSignerFromKey(testPrivateKeys[t])
+ if err != nil {
+ panic(fmt.Sprintf("Unable to create signer for test key %s: %v", t, err))
+ }
+ testPublicKeys[t] = testSigners[t].PublicKey()
+ }
+
+ // Create a cert and sign it for use in tests.
+ testCert := &ssh.Certificate{
+ Nonce: []byte{}, // To pass reflect.DeepEqual after marshal & parse, this must be non-nil
+ ValidPrincipals: []string{"gopher1", "gopher2"}, // increases test coverage
+ ValidAfter: 0, // unix epoch
+ ValidBefore: ssh.CertTimeInfinity, // The end of currently representable time.
+ Reserved: []byte{}, // To pass reflect.DeepEqual after marshal & parse, this must be non-nil
+ Key: testPublicKeys["ecdsa"],
+ SignatureKey: testPublicKeys["rsa"],
+ Permissions: ssh.Permissions{
+ CriticalOptions: map[string]string{},
+ Extensions: map[string]string{},
+ },
+ }
+ testCert.SignCert(rand.Reader, testSigners["rsa"])
+ testPrivateKeys["cert"] = testPrivateKeys["ecdsa"]
+ testSigners["cert"], err = ssh.NewCertSigner(testCert, testSigners["ecdsa"])
+ if err != nil {
+ panic(fmt.Sprintf("Unable to create certificate signer: %v", err))
+ }
+}
diff --git a/src/mongo/gotools/vendor/src/golang.org/x/crypto/ssh/testdata/doc.go b/src/mongo/gotools/vendor/src/golang.org/x/crypto/ssh/testdata/doc.go
new file mode 100644
index 00000000000..fcae47ca687
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/golang.org/x/crypto/ssh/testdata/doc.go
@@ -0,0 +1,8 @@
+// Copyright 2014 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// This package contains test data shared between the various subpackages of
+// the golang.org/x/crypto/ssh package. Under no circumstance should
+// this data be used for production code.
+package testdata // import "golang.org/x/crypto/ssh/testdata"
diff --git a/src/mongo/gotools/vendor/src/golang.org/x/crypto/ssh/testdata/keys.go b/src/mongo/gotools/vendor/src/golang.org/x/crypto/ssh/testdata/keys.go
new file mode 100644
index 00000000000..5ff1c0e0358
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/golang.org/x/crypto/ssh/testdata/keys.go
@@ -0,0 +1,43 @@
+// Copyright 2014 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package testdata
+
+var PEMBytes = map[string][]byte{
+ "dsa": []byte(`-----BEGIN DSA PRIVATE KEY-----
+MIIBuwIBAAKBgQD6PDSEyXiI9jfNs97WuM46MSDCYlOqWw80ajN16AohtBncs1YB
+lHk//dQOvCYOsYaE+gNix2jtoRjwXhDsc25/IqQbU1ahb7mB8/rsaILRGIbA5WH3
+EgFtJmXFovDz3if6F6TzvhFpHgJRmLYVR8cqsezL3hEZOvvs2iH7MorkxwIVAJHD
+nD82+lxh2fb4PMsIiaXudAsBAoGAQRf7Q/iaPRn43ZquUhd6WwvirqUj+tkIu6eV
+2nZWYmXLlqFQKEy4Tejl7Wkyzr2OSYvbXLzo7TNxLKoWor6ips0phYPPMyXld14r
+juhT24CrhOzuLMhDduMDi032wDIZG4Y+K7ElU8Oufn8Sj5Wge8r6ANmmVgmFfynr
+FhdYCngCgYEA3ucGJ93/Mx4q4eKRDxcWD3QzWyqpbRVRRV1Vmih9Ha/qC994nJFz
+DQIdjxDIT2Rk2AGzMqFEB68Zc3O+Wcsmz5eWWzEwFxaTwOGWTyDqsDRLm3fD+QYj
+nOwuxb0Kce+gWI8voWcqC9cyRm09jGzu2Ab3Bhtpg8JJ8L7gS3MRZK4CFEx4UAfY
+Fmsr0W6fHB9nhS4/UXM8
+-----END DSA PRIVATE KEY-----
+`),
+ "ecdsa": []byte(`-----BEGIN EC PRIVATE KEY-----
+MHcCAQEEINGWx0zo6fhJ/0EAfrPzVFyFC9s18lBt3cRoEDhS3ARooAoGCCqGSM49
+AwEHoUQDQgAEi9Hdw6KvZcWxfg2IDhA7UkpDtzzt6ZqJXSsFdLd+Kx4S3Sx4cVO+
+6/ZOXRnPmNAlLUqjShUsUBBngG0u2fqEqA==
+-----END EC PRIVATE KEY-----
+`),
+ "rsa": []byte(`-----BEGIN RSA PRIVATE KEY-----
+MIIBOwIBAAJBALdGZxkXDAjsYk10ihwU6Id2KeILz1TAJuoq4tOgDWxEEGeTrcld
+r/ZwVaFzjWzxaf6zQIJbfaSEAhqD5yo72+sCAwEAAQJBAK8PEVU23Wj8mV0QjwcJ
+tZ4GcTUYQL7cF4+ezTCE9a1NrGnCP2RuQkHEKxuTVrxXt+6OF15/1/fuXnxKjmJC
+nxkCIQDaXvPPBi0c7vAxGwNY9726x01/dNbHCE0CBtcotobxpwIhANbbQbh3JHVW
+2haQh4fAG5mhesZKAGcxTyv4mQ7uMSQdAiAj+4dzMpJWdSzQ+qGHlHMIBvVHLkqB
+y2VdEyF7DPCZewIhAI7GOI/6LDIFOvtPo6Bj2nNmyQ1HU6k/LRtNIXi4c9NJAiAr
+rrxx26itVhJmcvoUhOjwuzSlP2bE5VHAvkGB352YBg==
+-----END RSA PRIVATE KEY-----
+`),
+ "user": []byte(`-----BEGIN EC PRIVATE KEY-----
+MHcCAQEEILYCAeq8f7V4vSSypRw7pxy8yz3V5W4qg8kSC3zJhqpQoAoGCCqGSM49
+AwEHoUQDQgAEYcO2xNKiRUYOLEHM7VYAp57HNyKbOdYtHD83Z4hzNPVC4tM5mdGD
+PLL8IEwvYu2wq+lpXfGQnNMbzYf9gspG0w==
+-----END EC PRIVATE KEY-----
+`),
+}
diff --git a/src/mongo/gotools/vendor/src/golang.org/x/crypto/ssh/testdata_test.go b/src/mongo/gotools/vendor/src/golang.org/x/crypto/ssh/testdata_test.go
new file mode 100644
index 00000000000..f2828c1b5fb
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/golang.org/x/crypto/ssh/testdata_test.go
@@ -0,0 +1,63 @@
+// Copyright 2014 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// IMPLEMENTOR NOTE: To avoid a package loop, this file is in three places:
+// ssh/, ssh/agent, and ssh/test/. It should be kept in sync across all three
+// instances.
+
+package ssh
+
+import (
+ "crypto/rand"
+ "fmt"
+
+ "golang.org/x/crypto/ssh/testdata"
+)
+
+var (
+ testPrivateKeys map[string]interface{}
+ testSigners map[string]Signer
+ testPublicKeys map[string]PublicKey
+)
+
+func init() {
+ var err error
+
+ n := len(testdata.PEMBytes)
+ testPrivateKeys = make(map[string]interface{}, n)
+ testSigners = make(map[string]Signer, n)
+ testPublicKeys = make(map[string]PublicKey, n)
+ for t, k := range testdata.PEMBytes {
+ testPrivateKeys[t], err = ParseRawPrivateKey(k)
+ if err != nil {
+ panic(fmt.Sprintf("Unable to parse test key %s: %v", t, err))
+ }
+ testSigners[t], err = NewSignerFromKey(testPrivateKeys[t])
+ if err != nil {
+ panic(fmt.Sprintf("Unable to create signer for test key %s: %v", t, err))
+ }
+ testPublicKeys[t] = testSigners[t].PublicKey()
+ }
+
+ // Create a cert and sign it for use in tests.
+ testCert := &Certificate{
+ Nonce: []byte{}, // To pass reflect.DeepEqual after marshal & parse, this must be non-nil
+ ValidPrincipals: []string{"gopher1", "gopher2"}, // increases test coverage
+ ValidAfter: 0, // unix epoch
+ ValidBefore: CertTimeInfinity, // The end of currently representable time.
+ Reserved: []byte{}, // To pass reflect.DeepEqual after marshal & parse, this must be non-nil
+ Key: testPublicKeys["ecdsa"],
+ SignatureKey: testPublicKeys["rsa"],
+ Permissions: Permissions{
+ CriticalOptions: map[string]string{},
+ Extensions: map[string]string{},
+ },
+ }
+ testCert.SignCert(rand.Reader, testSigners["rsa"])
+ testPrivateKeys["cert"] = testPrivateKeys["ecdsa"]
+ testSigners["cert"], err = NewCertSigner(testCert, testSigners["ecdsa"])
+ if err != nil {
+ panic(fmt.Sprintf("Unable to create certificate signer: %v", err))
+ }
+}
diff --git a/src/mongo/gotools/vendor/src/golang.org/x/crypto/ssh/transport.go b/src/mongo/gotools/vendor/src/golang.org/x/crypto/ssh/transport.go
new file mode 100644
index 00000000000..8351d378e7e
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/golang.org/x/crypto/ssh/transport.go
@@ -0,0 +1,332 @@
+// Copyright 2011 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package ssh
+
+import (
+ "bufio"
+ "errors"
+ "io"
+)
+
+const (
+ gcmCipherID = "aes128-gcm@openssh.com"
+ aes128cbcID = "aes128-cbc"
+)
+
+// packetConn represents a transport that implements packet based
+// operations.
+type packetConn interface {
+ // Encrypt and send a packet of data to the remote peer.
+ writePacket(packet []byte) error
+
+ // Read a packet from the connection
+ readPacket() ([]byte, error)
+
+ // Close closes the write-side of the connection.
+ Close() error
+}
+
+// transport is the keyingTransport that implements the SSH packet
+// protocol.
+type transport struct {
+ reader connectionState
+ writer connectionState
+
+ bufReader *bufio.Reader
+ bufWriter *bufio.Writer
+ rand io.Reader
+
+ io.Closer
+
+ // Initial H used for the session ID. Once assigned this does
+ // not change, even during subsequent key exchanges.
+ sessionID []byte
+}
+
+// getSessionID returns the ID of the SSH connection. The return value
+// should not be modified.
+func (t *transport) getSessionID() []byte {
+ if t.sessionID == nil {
+ panic("session ID not set yet")
+ }
+ return t.sessionID
+}
+
+// packetCipher represents a combination of SSH encryption/MAC
+// protocol. A single instance should be used for one direction only.
+type packetCipher interface {
+ // writePacket encrypts the packet and writes it to w. The
+ // contents of the packet are generally scrambled.
+ writePacket(seqnum uint32, w io.Writer, rand io.Reader, packet []byte) error
+
+ // readPacket reads and decrypts a packet of data. The
+ // returned packet may be overwritten by future calls of
+ // readPacket.
+ readPacket(seqnum uint32, r io.Reader) ([]byte, error)
+}
+
+// connectionState represents one side (read or write) of the
+// connection. This is necessary because each direction has its own
+// keys, and can even have its own algorithms
+type connectionState struct {
+ packetCipher
+ seqNum uint32
+ dir direction
+ pendingKeyChange chan packetCipher
+}
+
+// prepareKeyChange sets up key material for a keychange. The key changes in
+// both directions are triggered by reading and writing a msgNewKey packet
+// respectively.
+func (t *transport) prepareKeyChange(algs *algorithms, kexResult *kexResult) error {
+ if t.sessionID == nil {
+ t.sessionID = kexResult.H
+ }
+
+ kexResult.SessionID = t.sessionID
+
+ if ciph, err := newPacketCipher(t.reader.dir, algs.r, kexResult); err != nil {
+ return err
+ } else {
+ t.reader.pendingKeyChange <- ciph
+ }
+
+ if ciph, err := newPacketCipher(t.writer.dir, algs.w, kexResult); err != nil {
+ return err
+ } else {
+ t.writer.pendingKeyChange <- ciph
+ }
+
+ return nil
+}
+
+// Read and decrypt next packet.
+func (t *transport) readPacket() ([]byte, error) {
+ return t.reader.readPacket(t.bufReader)
+}
+
+func (s *connectionState) readPacket(r *bufio.Reader) ([]byte, error) {
+ packet, err := s.packetCipher.readPacket(s.seqNum, r)
+ s.seqNum++
+ if err == nil && len(packet) == 0 {
+ err = errors.New("ssh: zero length packet")
+ }
+
+ if len(packet) > 0 && packet[0] == msgNewKeys {
+ select {
+ case cipher := <-s.pendingKeyChange:
+ s.packetCipher = cipher
+ default:
+ return nil, errors.New("ssh: got bogus newkeys message.")
+ }
+ }
+
+ // The packet may point to an internal buffer, so copy the
+ // packet out here.
+ fresh := make([]byte, len(packet))
+ copy(fresh, packet)
+
+ return fresh, err
+}
+
+func (t *transport) writePacket(packet []byte) error {
+ return t.writer.writePacket(t.bufWriter, t.rand, packet)
+}
+
+func (s *connectionState) writePacket(w *bufio.Writer, rand io.Reader, packet []byte) error {
+ changeKeys := len(packet) > 0 && packet[0] == msgNewKeys
+
+ err := s.packetCipher.writePacket(s.seqNum, w, rand, packet)
+ if err != nil {
+ return err
+ }
+ if err = w.Flush(); err != nil {
+ return err
+ }
+ s.seqNum++
+ if changeKeys {
+ select {
+ case cipher := <-s.pendingKeyChange:
+ s.packetCipher = cipher
+ default:
+ panic("ssh: no key material for msgNewKeys")
+ }
+ }
+ return err
+}
+
+func newTransport(rwc io.ReadWriteCloser, rand io.Reader, isClient bool) *transport {
+ t := &transport{
+ bufReader: bufio.NewReader(rwc),
+ bufWriter: bufio.NewWriter(rwc),
+ rand: rand,
+ reader: connectionState{
+ packetCipher: &streamPacketCipher{cipher: noneCipher{}},
+ pendingKeyChange: make(chan packetCipher, 1),
+ },
+ writer: connectionState{
+ packetCipher: &streamPacketCipher{cipher: noneCipher{}},
+ pendingKeyChange: make(chan packetCipher, 1),
+ },
+ Closer: rwc,
+ }
+ if isClient {
+ t.reader.dir = serverKeys
+ t.writer.dir = clientKeys
+ } else {
+ t.reader.dir = clientKeys
+ t.writer.dir = serverKeys
+ }
+
+ return t
+}
+
+type direction struct {
+ ivTag []byte
+ keyTag []byte
+ macKeyTag []byte
+}
+
+var (
+ serverKeys = direction{[]byte{'B'}, []byte{'D'}, []byte{'F'}}
+ clientKeys = direction{[]byte{'A'}, []byte{'C'}, []byte{'E'}}
+)
+
+// generateKeys generates key material for IV, MAC and encryption.
+func generateKeys(d direction, algs directionAlgorithms, kex *kexResult) (iv, key, macKey []byte) {
+ cipherMode := cipherModes[algs.Cipher]
+ macMode := macModes[algs.MAC]
+
+ iv = make([]byte, cipherMode.ivSize)
+ key = make([]byte, cipherMode.keySize)
+ macKey = make([]byte, macMode.keySize)
+
+ generateKeyMaterial(iv, d.ivTag, kex)
+ generateKeyMaterial(key, d.keyTag, kex)
+ generateKeyMaterial(macKey, d.macKeyTag, kex)
+ return
+}
+
+// setupKeys sets the cipher and MAC keys from kex.K, kex.H and sessionId, as
+// described in RFC 4253, section 6.4. direction should either be serverKeys
+// (to setup server->client keys) or clientKeys (for client->server keys).
+func newPacketCipher(d direction, algs directionAlgorithms, kex *kexResult) (packetCipher, error) {
+ iv, key, macKey := generateKeys(d, algs, kex)
+
+ if algs.Cipher == gcmCipherID {
+ return newGCMCipher(iv, key, macKey)
+ }
+
+ if algs.Cipher == aes128cbcID {
+ return newAESCBCCipher(iv, key, macKey, algs)
+ }
+
+ c := &streamPacketCipher{
+ mac: macModes[algs.MAC].new(macKey),
+ }
+ c.macResult = make([]byte, c.mac.Size())
+
+ var err error
+ c.cipher, err = cipherModes[algs.Cipher].createStream(key, iv)
+ if err != nil {
+ return nil, err
+ }
+
+ return c, nil
+}
+
+// generateKeyMaterial fills out with key material generated from tag, K, H
+// and sessionId, as specified in RFC 4253, section 7.2.
+func generateKeyMaterial(out, tag []byte, r *kexResult) {
+ var digestsSoFar []byte
+
+ h := r.Hash.New()
+ for len(out) > 0 {
+ h.Reset()
+ h.Write(r.K)
+ h.Write(r.H)
+
+ if len(digestsSoFar) == 0 {
+ h.Write(tag)
+ h.Write(r.SessionID)
+ } else {
+ h.Write(digestsSoFar)
+ }
+
+ digest := h.Sum(nil)
+ n := copy(out, digest)
+ out = out[n:]
+ if len(out) > 0 {
+ digestsSoFar = append(digestsSoFar, digest...)
+ }
+ }
+}
+
+const packageVersion = "SSH-2.0-Go"
+
+// Sends and receives a version line. The versionLine string should
+// be US ASCII, start with "SSH-2.0-", and should not include a
+// newline. exchangeVersions returns the other side's version line.
+func exchangeVersions(rw io.ReadWriter, versionLine []byte) (them []byte, err error) {
+ // Contrary to the RFC, we do not ignore lines that don't
+ // start with "SSH-2.0-" to make the library usable with
+ // nonconforming servers.
+ for _, c := range versionLine {
+ // The spec disallows non US-ASCII chars, and
+ // specifically forbids null chars.
+ if c < 32 {
+ return nil, errors.New("ssh: junk character in version line")
+ }
+ }
+ if _, err = rw.Write(append(versionLine, '\r', '\n')); err != nil {
+ return
+ }
+
+ them, err = readVersion(rw)
+ return them, err
+}
+
+// maxVersionStringBytes is the maximum number of bytes that we'll
+// accept as a version string. RFC 4253 section 4.2 limits this at 255
+// chars
+const maxVersionStringBytes = 255
+
+// Read version string as specified by RFC 4253, section 4.2.
+func readVersion(r io.Reader) ([]byte, error) {
+ versionString := make([]byte, 0, 64)
+ var ok bool
+ var buf [1]byte
+
+ for len(versionString) < maxVersionStringBytes {
+ _, err := io.ReadFull(r, buf[:])
+ if err != nil {
+ return nil, err
+ }
+ // The RFC says that the version should be terminated with \r\n
+ // but several SSH servers actually only send a \n.
+ if buf[0] == '\n' {
+ ok = true
+ break
+ }
+
+ // non ASCII chars are disallowed, but we are lenient,
+ // since Go doesn't use null-terminated strings.
+
+ // The RFC allows a comment after a space, however,
+ // all of it (version and comments) goes into the
+ // session hash.
+ versionString = append(versionString, buf[0])
+ }
+
+ if !ok {
+ return nil, errors.New("ssh: overflow reading version string")
+ }
+
+ // There might be a '\r' on the end which we should remove.
+ if len(versionString) > 0 && versionString[len(versionString)-1] == '\r' {
+ versionString = versionString[:len(versionString)-1]
+ }
+ return versionString, nil
+}
diff --git a/src/mongo/gotools/vendor/src/golang.org/x/crypto/ssh/transport_test.go b/src/mongo/gotools/vendor/src/golang.org/x/crypto/ssh/transport_test.go
new file mode 100644
index 00000000000..92d83abf93f
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/golang.org/x/crypto/ssh/transport_test.go
@@ -0,0 +1,109 @@
+// Copyright 2011 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package ssh
+
+import (
+ "bytes"
+ "crypto/rand"
+ "encoding/binary"
+ "strings"
+ "testing"
+)
+
+func TestReadVersion(t *testing.T) {
+ longversion := strings.Repeat("SSH-2.0-bla", 50)[:253]
+ cases := map[string]string{
+ "SSH-2.0-bla\r\n": "SSH-2.0-bla",
+ "SSH-2.0-bla\n": "SSH-2.0-bla",
+ longversion + "\r\n": longversion,
+ }
+
+ for in, want := range cases {
+ result, err := readVersion(bytes.NewBufferString(in))
+ if err != nil {
+ t.Errorf("readVersion(%q): %s", in, err)
+ }
+ got := string(result)
+ if got != want {
+ t.Errorf("got %q, want %q", got, want)
+ }
+ }
+}
+
+func TestReadVersionError(t *testing.T) {
+ longversion := strings.Repeat("SSH-2.0-bla", 50)[:253]
+ cases := []string{
+ longversion + "too-long\r\n",
+ }
+ for _, in := range cases {
+ if _, err := readVersion(bytes.NewBufferString(in)); err == nil {
+ t.Errorf("readVersion(%q) should have failed", in)
+ }
+ }
+}
+
+func TestExchangeVersionsBasic(t *testing.T) {
+ v := "SSH-2.0-bla"
+ buf := bytes.NewBufferString(v + "\r\n")
+ them, err := exchangeVersions(buf, []byte("xyz"))
+ if err != nil {
+ t.Errorf("exchangeVersions: %v", err)
+ }
+
+ if want := "SSH-2.0-bla"; string(them) != want {
+ t.Errorf("got %q want %q for our version", them, want)
+ }
+}
+
+func TestExchangeVersions(t *testing.T) {
+ cases := []string{
+ "not\x000allowed",
+ "not allowed\n",
+ }
+ for _, c := range cases {
+ buf := bytes.NewBufferString("SSH-2.0-bla\r\n")
+ if _, err := exchangeVersions(buf, []byte(c)); err == nil {
+ t.Errorf("exchangeVersions(%q): should have failed", c)
+ }
+ }
+}
+
+type closerBuffer struct {
+ bytes.Buffer
+}
+
+func (b *closerBuffer) Close() error {
+ return nil
+}
+
+func TestTransportMaxPacketWrite(t *testing.T) {
+ buf := &closerBuffer{}
+ tr := newTransport(buf, rand.Reader, true)
+ huge := make([]byte, maxPacket+1)
+ err := tr.writePacket(huge)
+ if err == nil {
+ t.Errorf("transport accepted write for a huge packet.")
+ }
+}
+
+func TestTransportMaxPacketReader(t *testing.T) {
+ var header [5]byte
+ huge := make([]byte, maxPacket+128)
+ binary.BigEndian.PutUint32(header[0:], uint32(len(huge)))
+ // padding.
+ header[4] = 0
+
+ buf := &closerBuffer{}
+ buf.Write(header[:])
+ buf.Write(huge)
+
+ tr := newTransport(buf, rand.Reader, true)
+ _, err := tr.readPacket()
+ if err == nil {
+ t.Errorf("transport succeeded reading huge packet.")
+ } else if !strings.Contains(err.Error(), "large") {
+ t.Errorf("got %q, should mention %q", err.Error(), "large")
+ }
+}
diff --git a/src/mongo/gotools/vendor/src/golang.org/x/crypto/tea/cipher.go b/src/mongo/gotools/vendor/src/golang.org/x/crypto/tea/cipher.go
new file mode 100644
index 00000000000..9c13d12a22c
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/golang.org/x/crypto/tea/cipher.go
@@ -0,0 +1,109 @@
+// Copyright 2015 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// Package tea implements the TEA algorithm, as defined in Needham and
+// Wheeler's 1994 technical report, “TEA, a Tiny Encryption Algorithm”. See
+// http://www.cix.co.uk/~klockstone/tea.pdf for details.
+
+package tea
+
+import (
+ "crypto/cipher"
+ "encoding/binary"
+ "errors"
+)
+
+const (
+ // BlockSize is the size of a TEA block, in bytes.
+ BlockSize = 8
+
+ // KeySize is the size of a TEA key, in bytes.
+ KeySize = 16
+
+ // delta is the TEA key schedule constant.
+ delta = 0x9e3779b9
+
+ // numRounds is the standard number of rounds in TEA.
+ numRounds = 64
+)
+
+// tea is an instance of the TEA cipher with a particular key.
+type tea struct {
+ key [16]byte
+ rounds int
+}
+
+// NewCipher returns an instance of the TEA cipher with the standard number of
+// rounds. The key argument must be 16 bytes long.
+func NewCipher(key []byte) (cipher.Block, error) {
+ return NewCipherWithRounds(key, numRounds)
+}
+
+// NewCipherWithRounds returns an instance of the TEA cipher with a given
+// number of rounds, which must be even. The key argument must be 16 bytes
+// long.
+func NewCipherWithRounds(key []byte, rounds int) (cipher.Block, error) {
+ if len(key) != 16 {
+ return nil, errors.New("tea: incorrect key size")
+ }
+
+ if rounds&1 != 0 {
+ return nil, errors.New("tea: odd number of rounds specified")
+ }
+
+ c := &tea{
+ rounds: rounds,
+ }
+ copy(c.key[:], key)
+
+ return c, nil
+}
+
+// BlockSize returns the TEA block size, which is eight bytes. It is necessary
+// to satisfy the Block interface in the package "crypto/cipher".
+func (*tea) BlockSize() int {
+ return BlockSize
+}
+
+// Encrypt encrypts the 8 byte buffer src using the key in t and stores the
+// result in dst. Note that for amounts of data larger than a block, it is not
+// safe to just call Encrypt on successive blocks; instead, use an encryption
+// mode like CBC (see crypto/cipher/cbc.go).
+func (t *tea) Encrypt(dst, src []byte) {
+ e := binary.BigEndian
+ v0, v1 := e.Uint32(src), e.Uint32(src[4:])
+ k0, k1, k2, k3 := e.Uint32(t.key[0:]), e.Uint32(t.key[4:]), e.Uint32(t.key[8:]), e.Uint32(t.key[12:])
+
+ sum := uint32(0)
+ delta := uint32(delta)
+
+ for i := 0; i < t.rounds/2; i++ {
+ sum += delta
+ v0 += ((v1 << 4) + k0) ^ (v1 + sum) ^ ((v1 >> 5) + k1)
+ v1 += ((v0 << 4) + k2) ^ (v0 + sum) ^ ((v0 >> 5) + k3)
+ }
+
+ e.PutUint32(dst, v0)
+ e.PutUint32(dst[4:], v1)
+}
+
+// Decrypt decrypts the 8 byte buffer src using the key in t and stores the
+// result in dst.
+func (t *tea) Decrypt(dst, src []byte) {
+ e := binary.BigEndian
+ v0, v1 := e.Uint32(src), e.Uint32(src[4:])
+ k0, k1, k2, k3 := e.Uint32(t.key[0:]), e.Uint32(t.key[4:]), e.Uint32(t.key[8:]), e.Uint32(t.key[12:])
+
+ delta := uint32(delta)
+ sum := delta * uint32(t.rounds/2) // in general, sum = delta * n
+
+ for i := 0; i < t.rounds/2; i++ {
+ v1 -= ((v0 << 4) + k2) ^ (v0 + sum) ^ ((v0 >> 5) + k3)
+ v0 -= ((v1 << 4) + k0) ^ (v1 + sum) ^ ((v1 >> 5) + k1)
+ sum -= delta
+ }
+
+ e.PutUint32(dst, v0)
+ e.PutUint32(dst[4:], v1)
+}
diff --git a/src/mongo/gotools/vendor/src/golang.org/x/crypto/tea/tea_test.go b/src/mongo/gotools/vendor/src/golang.org/x/crypto/tea/tea_test.go
new file mode 100644
index 00000000000..eb98d1e0e03
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/golang.org/x/crypto/tea/tea_test.go
@@ -0,0 +1,93 @@
+// Copyright 2015 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package tea
+
+import (
+ "bytes"
+ "testing"
+)
+
+// A sample test key for when we just want to initialize a cipher
+var testKey = []byte{0x00, 0x11, 0x22, 0x33, 0x44, 0x55, 0x66, 0x77, 0x88, 0x99, 0xAA, 0xBB, 0xCC, 0xDD, 0xEE, 0xFF}
+
+// Test that the block size for tea is correct
+func TestBlocksize(t *testing.T) {
+ c, err := NewCipher(testKey)
+ if err != nil {
+ t.Fatalf("NewCipher returned error: %s", err)
+ }
+
+ if result := c.BlockSize(); result != BlockSize {
+ t.Errorf("cipher.BlockSize returned %d, but expected %d", result, BlockSize)
+ }
+}
+
+// Test that invalid key sizes return an error
+func TestInvalidKeySize(t *testing.T) {
+ var key [KeySize + 1]byte
+
+ if _, err := NewCipher(key[:]); err == nil {
+ t.Errorf("invalid key size %d didn't result in an error.", len(key))
+ }
+
+ if _, err := NewCipher(key[:KeySize-1]); err == nil {
+ t.Errorf("invalid key size %d didn't result in an error.", KeySize-1)
+ }
+}
+
+// Test Vectors
+type teaTest struct {
+ rounds int
+ key []byte
+ plaintext []byte
+ ciphertext []byte
+}
+
+var teaTests = []teaTest{
+ // These were sourced from https://github.com/froydnj/ironclad/blob/master/testing/test-vectors/tea.testvec
+ {
+ numRounds,
+ []byte{0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00},
+ []byte{0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00},
+ []byte{0x41, 0xea, 0x3a, 0x0a, 0x94, 0xba, 0xa9, 0x40},
+ },
+ {
+ numRounds,
+ []byte{0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff},
+ []byte{0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff},
+ []byte{0x31, 0x9b, 0xbe, 0xfb, 0x01, 0x6a, 0xbd, 0xb2},
+ },
+ {
+ 16,
+ []byte{0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00},
+ []byte{0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00},
+ []byte{0xed, 0x28, 0x5d, 0xa1, 0x45, 0x5b, 0x33, 0xc1},
+ },
+}
+
+// Test encryption
+func TestCipherEncrypt(t *testing.T) {
+ // Test encryption with standard 64 rounds
+ for i, test := range teaTests {
+ c, err := NewCipherWithRounds(test.key, test.rounds)
+ if err != nil {
+ t.Fatalf("#%d: NewCipher returned error: %s", i, err)
+ }
+
+ var ciphertext [BlockSize]byte
+ c.Encrypt(ciphertext[:], test.plaintext)
+
+ if !bytes.Equal(ciphertext[:], test.ciphertext) {
+ t.Errorf("#%d: incorrect ciphertext. Got %x, wanted %x", i, ciphertext, test.ciphertext)
+ }
+
+ var plaintext2 [BlockSize]byte
+ c.Decrypt(plaintext2[:], ciphertext[:])
+
+ if !bytes.Equal(plaintext2[:], test.plaintext) {
+ t.Errorf("#%d: incorrect plaintext. Got %x, wanted %x", i, plaintext2, test.plaintext)
+ }
+ }
+}
diff --git a/src/mongo/gotools/vendor/src/golang.org/x/crypto/twofish/twofish.go b/src/mongo/gotools/vendor/src/golang.org/x/crypto/twofish/twofish.go
new file mode 100644
index 00000000000..376fa0ec2d9
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/golang.org/x/crypto/twofish/twofish.go
@@ -0,0 +1,342 @@
+// Copyright 2011 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// Package twofish implements Bruce Schneier's Twofish encryption algorithm.
+package twofish // import "golang.org/x/crypto/twofish"
+
+// Twofish is defined in http://www.schneier.com/paper-twofish-paper.pdf [TWOFISH]
+
+// This code is a port of the LibTom C implementation.
+// See http://libtom.org/?page=features&newsitems=5&whatfile=crypt.
+// LibTomCrypt is free for all purposes under the public domain.
+// It was heavily inspired by the go blowfish package.
+
+import "strconv"
+
+// BlockSize is the constant block size of Twofish.
+const BlockSize = 16
+
+const mdsPolynomial = 0x169 // x^8 + x^6 + x^5 + x^3 + 1, see [TWOFISH] 4.2
+const rsPolynomial = 0x14d // x^8 + x^6 + x^3 + x^2 + 1, see [TWOFISH] 4.3
+
+// A Cipher is an instance of Twofish encryption using a particular key.
+type Cipher struct {
+ s [4][256]uint32
+ k [40]uint32
+}
+
+type KeySizeError int
+
+func (k KeySizeError) Error() string {
+ return "crypto/twofish: invalid key size " + strconv.Itoa(int(k))
+}
+
+// NewCipher creates and returns a Cipher.
+// The key argument should be the Twofish key, 16, 24 or 32 bytes.
+func NewCipher(key []byte) (*Cipher, error) {
+ keylen := len(key)
+
+ if keylen != 16 && keylen != 24 && keylen != 32 {
+ return nil, KeySizeError(keylen)
+ }
+
+ // k is the number of 64 bit words in key
+ k := keylen / 8
+
+ // Create the S[..] words
+ var S [4 * 4]byte
+ for i := 0; i < k; i++ {
+ // Computes [y0 y1 y2 y3] = rs . [x0 x1 x2 x3 x4 x5 x6 x7]
+ for j, rsRow := range rs {
+ for k, rsVal := range rsRow {
+ S[4*i+j] ^= gfMult(key[8*i+k], rsVal, rsPolynomial)
+ }
+ }
+ }
+
+ // Calculate subkeys
+ c := new(Cipher)
+ var tmp [4]byte
+ for i := byte(0); i < 20; i++ {
+ // A = h(p * 2x, Me)
+ for j := range tmp {
+ tmp[j] = 2 * i
+ }
+ A := h(tmp[:], key, 0)
+
+ // B = rolc(h(p * (2x + 1), Mo), 8)
+ for j := range tmp {
+ tmp[j] = 2*i + 1
+ }
+ B := h(tmp[:], key, 1)
+ B = rol(B, 8)
+
+ c.k[2*i] = A + B
+
+ // K[2i+1] = (A + 2B) <<< 9
+ c.k[2*i+1] = rol(2*B+A, 9)
+ }
+
+ // Calculate sboxes
+ switch k {
+ case 2:
+ for i := range c.s[0] {
+ c.s[0][i] = mdsColumnMult(sbox[1][sbox[0][sbox[0][byte(i)]^S[0]]^S[4]], 0)
+ c.s[1][i] = mdsColumnMult(sbox[0][sbox[0][sbox[1][byte(i)]^S[1]]^S[5]], 1)
+ c.s[2][i] = mdsColumnMult(sbox[1][sbox[1][sbox[0][byte(i)]^S[2]]^S[6]], 2)
+ c.s[3][i] = mdsColumnMult(sbox[0][sbox[1][sbox[1][byte(i)]^S[3]]^S[7]], 3)
+ }
+ case 3:
+ for i := range c.s[0] {
+ c.s[0][i] = mdsColumnMult(sbox[1][sbox[0][sbox[0][sbox[1][byte(i)]^S[0]]^S[4]]^S[8]], 0)
+ c.s[1][i] = mdsColumnMult(sbox[0][sbox[0][sbox[1][sbox[1][byte(i)]^S[1]]^S[5]]^S[9]], 1)
+ c.s[2][i] = mdsColumnMult(sbox[1][sbox[1][sbox[0][sbox[0][byte(i)]^S[2]]^S[6]]^S[10]], 2)
+ c.s[3][i] = mdsColumnMult(sbox[0][sbox[1][sbox[1][sbox[0][byte(i)]^S[3]]^S[7]]^S[11]], 3)
+ }
+ default:
+ for i := range c.s[0] {
+ c.s[0][i] = mdsColumnMult(sbox[1][sbox[0][sbox[0][sbox[1][sbox[1][byte(i)]^S[0]]^S[4]]^S[8]]^S[12]], 0)
+ c.s[1][i] = mdsColumnMult(sbox[0][sbox[0][sbox[1][sbox[1][sbox[0][byte(i)]^S[1]]^S[5]]^S[9]]^S[13]], 1)
+ c.s[2][i] = mdsColumnMult(sbox[1][sbox[1][sbox[0][sbox[0][sbox[0][byte(i)]^S[2]]^S[6]]^S[10]]^S[14]], 2)
+ c.s[3][i] = mdsColumnMult(sbox[0][sbox[1][sbox[1][sbox[0][sbox[1][byte(i)]^S[3]]^S[7]]^S[11]]^S[15]], 3)
+ }
+ }
+
+ return c, nil
+}
+
+// BlockSize returns the Twofish block size, 16 bytes.
+func (c *Cipher) BlockSize() int { return BlockSize }
+
+// store32l stores src in dst in little-endian form.
+func store32l(dst []byte, src uint32) {
+ dst[0] = byte(src)
+ dst[1] = byte(src >> 8)
+ dst[2] = byte(src >> 16)
+ dst[3] = byte(src >> 24)
+ return
+}
+
+// load32l reads a little-endian uint32 from src.
+func load32l(src []byte) uint32 {
+ return uint32(src[0]) | uint32(src[1])<<8 | uint32(src[2])<<16 | uint32(src[3])<<24
+}
+
+// rol returns x after a left circular rotation of y bits.
+func rol(x, y uint32) uint32 {
+ return (x << (y & 31)) | (x >> (32 - (y & 31)))
+}
+
+// ror returns x after a right circular rotation of y bits.
+func ror(x, y uint32) uint32 {
+ return (x >> (y & 31)) | (x << (32 - (y & 31)))
+}
+
+// The RS matrix. See [TWOFISH] 4.3
+var rs = [4][8]byte{
+ {0x01, 0xA4, 0x55, 0x87, 0x5A, 0x58, 0xDB, 0x9E},
+ {0xA4, 0x56, 0x82, 0xF3, 0x1E, 0xC6, 0x68, 0xE5},
+ {0x02, 0xA1, 0xFC, 0xC1, 0x47, 0xAE, 0x3D, 0x19},
+ {0xA4, 0x55, 0x87, 0x5A, 0x58, 0xDB, 0x9E, 0x03},
+}
+
+// sbox tables
+var sbox = [2][256]byte{
+ {
+ 0xa9, 0x67, 0xb3, 0xe8, 0x04, 0xfd, 0xa3, 0x76, 0x9a, 0x92, 0x80, 0x78, 0xe4, 0xdd, 0xd1, 0x38,
+ 0x0d, 0xc6, 0x35, 0x98, 0x18, 0xf7, 0xec, 0x6c, 0x43, 0x75, 0x37, 0x26, 0xfa, 0x13, 0x94, 0x48,
+ 0xf2, 0xd0, 0x8b, 0x30, 0x84, 0x54, 0xdf, 0x23, 0x19, 0x5b, 0x3d, 0x59, 0xf3, 0xae, 0xa2, 0x82,
+ 0x63, 0x01, 0x83, 0x2e, 0xd9, 0x51, 0x9b, 0x7c, 0xa6, 0xeb, 0xa5, 0xbe, 0x16, 0x0c, 0xe3, 0x61,
+ 0xc0, 0x8c, 0x3a, 0xf5, 0x73, 0x2c, 0x25, 0x0b, 0xbb, 0x4e, 0x89, 0x6b, 0x53, 0x6a, 0xb4, 0xf1,
+ 0xe1, 0xe6, 0xbd, 0x45, 0xe2, 0xf4, 0xb6, 0x66, 0xcc, 0x95, 0x03, 0x56, 0xd4, 0x1c, 0x1e, 0xd7,
+ 0xfb, 0xc3, 0x8e, 0xb5, 0xe9, 0xcf, 0xbf, 0xba, 0xea, 0x77, 0x39, 0xaf, 0x33, 0xc9, 0x62, 0x71,
+ 0x81, 0x79, 0x09, 0xad, 0x24, 0xcd, 0xf9, 0xd8, 0xe5, 0xc5, 0xb9, 0x4d, 0x44, 0x08, 0x86, 0xe7,
+ 0xa1, 0x1d, 0xaa, 0xed, 0x06, 0x70, 0xb2, 0xd2, 0x41, 0x7b, 0xa0, 0x11, 0x31, 0xc2, 0x27, 0x90,
+ 0x20, 0xf6, 0x60, 0xff, 0x96, 0x5c, 0xb1, 0xab, 0x9e, 0x9c, 0x52, 0x1b, 0x5f, 0x93, 0x0a, 0xef,
+ 0x91, 0x85, 0x49, 0xee, 0x2d, 0x4f, 0x8f, 0x3b, 0x47, 0x87, 0x6d, 0x46, 0xd6, 0x3e, 0x69, 0x64,
+ 0x2a, 0xce, 0xcb, 0x2f, 0xfc, 0x97, 0x05, 0x7a, 0xac, 0x7f, 0xd5, 0x1a, 0x4b, 0x0e, 0xa7, 0x5a,
+ 0x28, 0x14, 0x3f, 0x29, 0x88, 0x3c, 0x4c, 0x02, 0xb8, 0xda, 0xb0, 0x17, 0x55, 0x1f, 0x8a, 0x7d,
+ 0x57, 0xc7, 0x8d, 0x74, 0xb7, 0xc4, 0x9f, 0x72, 0x7e, 0x15, 0x22, 0x12, 0x58, 0x07, 0x99, 0x34,
+ 0x6e, 0x50, 0xde, 0x68, 0x65, 0xbc, 0xdb, 0xf8, 0xc8, 0xa8, 0x2b, 0x40, 0xdc, 0xfe, 0x32, 0xa4,
+ 0xca, 0x10, 0x21, 0xf0, 0xd3, 0x5d, 0x0f, 0x00, 0x6f, 0x9d, 0x36, 0x42, 0x4a, 0x5e, 0xc1, 0xe0,
+ },
+ {
+ 0x75, 0xf3, 0xc6, 0xf4, 0xdb, 0x7b, 0xfb, 0xc8, 0x4a, 0xd3, 0xe6, 0x6b, 0x45, 0x7d, 0xe8, 0x4b,
+ 0xd6, 0x32, 0xd8, 0xfd, 0x37, 0x71, 0xf1, 0xe1, 0x30, 0x0f, 0xf8, 0x1b, 0x87, 0xfa, 0x06, 0x3f,
+ 0x5e, 0xba, 0xae, 0x5b, 0x8a, 0x00, 0xbc, 0x9d, 0x6d, 0xc1, 0xb1, 0x0e, 0x80, 0x5d, 0xd2, 0xd5,
+ 0xa0, 0x84, 0x07, 0x14, 0xb5, 0x90, 0x2c, 0xa3, 0xb2, 0x73, 0x4c, 0x54, 0x92, 0x74, 0x36, 0x51,
+ 0x38, 0xb0, 0xbd, 0x5a, 0xfc, 0x60, 0x62, 0x96, 0x6c, 0x42, 0xf7, 0x10, 0x7c, 0x28, 0x27, 0x8c,
+ 0x13, 0x95, 0x9c, 0xc7, 0x24, 0x46, 0x3b, 0x70, 0xca, 0xe3, 0x85, 0xcb, 0x11, 0xd0, 0x93, 0xb8,
+ 0xa6, 0x83, 0x20, 0xff, 0x9f, 0x77, 0xc3, 0xcc, 0x03, 0x6f, 0x08, 0xbf, 0x40, 0xe7, 0x2b, 0xe2,
+ 0x79, 0x0c, 0xaa, 0x82, 0x41, 0x3a, 0xea, 0xb9, 0xe4, 0x9a, 0xa4, 0x97, 0x7e, 0xda, 0x7a, 0x17,
+ 0x66, 0x94, 0xa1, 0x1d, 0x3d, 0xf0, 0xde, 0xb3, 0x0b, 0x72, 0xa7, 0x1c, 0xef, 0xd1, 0x53, 0x3e,
+ 0x8f, 0x33, 0x26, 0x5f, 0xec, 0x76, 0x2a, 0x49, 0x81, 0x88, 0xee, 0x21, 0xc4, 0x1a, 0xeb, 0xd9,
+ 0xc5, 0x39, 0x99, 0xcd, 0xad, 0x31, 0x8b, 0x01, 0x18, 0x23, 0xdd, 0x1f, 0x4e, 0x2d, 0xf9, 0x48,
+ 0x4f, 0xf2, 0x65, 0x8e, 0x78, 0x5c, 0x58, 0x19, 0x8d, 0xe5, 0x98, 0x57, 0x67, 0x7f, 0x05, 0x64,
+ 0xaf, 0x63, 0xb6, 0xfe, 0xf5, 0xb7, 0x3c, 0xa5, 0xce, 0xe9, 0x68, 0x44, 0xe0, 0x4d, 0x43, 0x69,
+ 0x29, 0x2e, 0xac, 0x15, 0x59, 0xa8, 0x0a, 0x9e, 0x6e, 0x47, 0xdf, 0x34, 0x35, 0x6a, 0xcf, 0xdc,
+ 0x22, 0xc9, 0xc0, 0x9b, 0x89, 0xd4, 0xed, 0xab, 0x12, 0xa2, 0x0d, 0x52, 0xbb, 0x02, 0x2f, 0xa9,
+ 0xd7, 0x61, 0x1e, 0xb4, 0x50, 0x04, 0xf6, 0xc2, 0x16, 0x25, 0x86, 0x56, 0x55, 0x09, 0xbe, 0x91,
+ },
+}
+
+// gfMult returns a·b in GF(2^8)/p
+func gfMult(a, b byte, p uint32) byte {
+ B := [2]uint32{0, uint32(b)}
+ P := [2]uint32{0, p}
+ var result uint32
+
+ // branchless GF multiplier
+ for i := 0; i < 7; i++ {
+ result ^= B[a&1]
+ a >>= 1
+ B[1] = P[B[1]>>7] ^ (B[1] << 1)
+ }
+ result ^= B[a&1]
+ return byte(result)
+}
+
+// mdsColumnMult calculates y{col} where [y0 y1 y2 y3] = MDS · [x0]
+func mdsColumnMult(in byte, col int) uint32 {
+ mul01 := in
+ mul5B := gfMult(in, 0x5B, mdsPolynomial)
+ mulEF := gfMult(in, 0xEF, mdsPolynomial)
+
+ switch col {
+ case 0:
+ return uint32(mul01) | uint32(mul5B)<<8 | uint32(mulEF)<<16 | uint32(mulEF)<<24
+ case 1:
+ return uint32(mulEF) | uint32(mulEF)<<8 | uint32(mul5B)<<16 | uint32(mul01)<<24
+ case 2:
+ return uint32(mul5B) | uint32(mulEF)<<8 | uint32(mul01)<<16 | uint32(mulEF)<<24
+ case 3:
+ return uint32(mul5B) | uint32(mul01)<<8 | uint32(mulEF)<<16 | uint32(mul5B)<<24
+ }
+
+ panic("unreachable")
+}
+
+// h implements the S-box generation function. See [TWOFISH] 4.3.5
+func h(in, key []byte, offset int) uint32 {
+ var y [4]byte
+ for x := range y {
+ y[x] = in[x]
+ }
+ switch len(key) / 8 {
+ case 4:
+ y[0] = sbox[1][y[0]] ^ key[4*(6+offset)+0]
+ y[1] = sbox[0][y[1]] ^ key[4*(6+offset)+1]
+ y[2] = sbox[0][y[2]] ^ key[4*(6+offset)+2]
+ y[3] = sbox[1][y[3]] ^ key[4*(6+offset)+3]
+ fallthrough
+ case 3:
+ y[0] = sbox[1][y[0]] ^ key[4*(4+offset)+0]
+ y[1] = sbox[1][y[1]] ^ key[4*(4+offset)+1]
+ y[2] = sbox[0][y[2]] ^ key[4*(4+offset)+2]
+ y[3] = sbox[0][y[3]] ^ key[4*(4+offset)+3]
+ fallthrough
+ case 2:
+ y[0] = sbox[1][sbox[0][sbox[0][y[0]]^key[4*(2+offset)+0]]^key[4*(0+offset)+0]]
+ y[1] = sbox[0][sbox[0][sbox[1][y[1]]^key[4*(2+offset)+1]]^key[4*(0+offset)+1]]
+ y[2] = sbox[1][sbox[1][sbox[0][y[2]]^key[4*(2+offset)+2]]^key[4*(0+offset)+2]]
+ y[3] = sbox[0][sbox[1][sbox[1][y[3]]^key[4*(2+offset)+3]]^key[4*(0+offset)+3]]
+ }
+ // [y0 y1 y2 y3] = MDS . [x0 x1 x2 x3]
+ var mdsMult uint32
+ for i := range y {
+ mdsMult ^= mdsColumnMult(y[i], i)
+ }
+ return mdsMult
+}
+
+// Encrypt encrypts a 16-byte block from src to dst, which may overlap.
+// Note that for amounts of data larger than a block,
+// it is not safe to just call Encrypt on successive blocks;
+// instead, use an encryption mode like CBC (see crypto/cipher/cbc.go).
+func (c *Cipher) Encrypt(dst, src []byte) {
+ S1 := c.s[0]
+ S2 := c.s[1]
+ S3 := c.s[2]
+ S4 := c.s[3]
+
+ // Load input
+ ia := load32l(src[0:4])
+ ib := load32l(src[4:8])
+ ic := load32l(src[8:12])
+ id := load32l(src[12:16])
+
+ // Pre-whitening
+ ia ^= c.k[0]
+ ib ^= c.k[1]
+ ic ^= c.k[2]
+ id ^= c.k[3]
+
+ for i := 0; i < 8; i++ {
+ k := c.k[8+i*4 : 12+i*4]
+ t2 := S2[byte(ib)] ^ S3[byte(ib>>8)] ^ S4[byte(ib>>16)] ^ S1[byte(ib>>24)]
+ t1 := S1[byte(ia)] ^ S2[byte(ia>>8)] ^ S3[byte(ia>>16)] ^ S4[byte(ia>>24)] + t2
+ ic = ror(ic^(t1+k[0]), 1)
+ id = rol(id, 1) ^ (t2 + t1 + k[1])
+
+ t2 = S2[byte(id)] ^ S3[byte(id>>8)] ^ S4[byte(id>>16)] ^ S1[byte(id>>24)]
+ t1 = S1[byte(ic)] ^ S2[byte(ic>>8)] ^ S3[byte(ic>>16)] ^ S4[byte(ic>>24)] + t2
+ ia = ror(ia^(t1+k[2]), 1)
+ ib = rol(ib, 1) ^ (t2 + t1 + k[3])
+ }
+
+ // Output with "undo last swap"
+ ta := ic ^ c.k[4]
+ tb := id ^ c.k[5]
+ tc := ia ^ c.k[6]
+ td := ib ^ c.k[7]
+
+ store32l(dst[0:4], ta)
+ store32l(dst[4:8], tb)
+ store32l(dst[8:12], tc)
+ store32l(dst[12:16], td)
+}
+
+// Decrypt decrypts a 16-byte block from src to dst, which may overlap.
+func (c *Cipher) Decrypt(dst, src []byte) {
+ S1 := c.s[0]
+ S2 := c.s[1]
+ S3 := c.s[2]
+ S4 := c.s[3]
+
+ // Load input
+ ta := load32l(src[0:4])
+ tb := load32l(src[4:8])
+ tc := load32l(src[8:12])
+ td := load32l(src[12:16])
+
+ // Undo undo final swap
+ ia := tc ^ c.k[6]
+ ib := td ^ c.k[7]
+ ic := ta ^ c.k[4]
+ id := tb ^ c.k[5]
+
+ for i := 8; i > 0; i-- {
+ k := c.k[4+i*4 : 8+i*4]
+ t2 := S2[byte(id)] ^ S3[byte(id>>8)] ^ S4[byte(id>>16)] ^ S1[byte(id>>24)]
+ t1 := S1[byte(ic)] ^ S2[byte(ic>>8)] ^ S3[byte(ic>>16)] ^ S4[byte(ic>>24)] + t2
+ ia = rol(ia, 1) ^ (t1 + k[2])
+ ib = ror(ib^(t2+t1+k[3]), 1)
+
+ t2 = S2[byte(ib)] ^ S3[byte(ib>>8)] ^ S4[byte(ib>>16)] ^ S1[byte(ib>>24)]
+ t1 = S1[byte(ia)] ^ S2[byte(ia>>8)] ^ S3[byte(ia>>16)] ^ S4[byte(ia>>24)] + t2
+ ic = rol(ic, 1) ^ (t1 + k[0])
+ id = ror(id^(t2+t1+k[1]), 1)
+ }
+
+ // Undo pre-whitening
+ ia ^= c.k[0]
+ ib ^= c.k[1]
+ ic ^= c.k[2]
+ id ^= c.k[3]
+
+ store32l(dst[0:4], ia)
+ store32l(dst[4:8], ib)
+ store32l(dst[8:12], ic)
+ store32l(dst[12:16], id)
+}
diff --git a/src/mongo/gotools/vendor/src/golang.org/x/crypto/twofish/twofish_test.go b/src/mongo/gotools/vendor/src/golang.org/x/crypto/twofish/twofish_test.go
new file mode 100644
index 00000000000..303081f3f28
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/golang.org/x/crypto/twofish/twofish_test.go
@@ -0,0 +1,129 @@
+// Copyright 2011 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package twofish
+
+import (
+ "bytes"
+ "testing"
+)
+
+var qbox = [2][4][16]byte{
+ {
+ {0x8, 0x1, 0x7, 0xD, 0x6, 0xF, 0x3, 0x2, 0x0, 0xB, 0x5, 0x9, 0xE, 0xC, 0xA, 0x4},
+ {0xE, 0xC, 0xB, 0x8, 0x1, 0x2, 0x3, 0x5, 0xF, 0x4, 0xA, 0x6, 0x7, 0x0, 0x9, 0xD},
+ {0xB, 0xA, 0x5, 0xE, 0x6, 0xD, 0x9, 0x0, 0xC, 0x8, 0xF, 0x3, 0x2, 0x4, 0x7, 0x1},
+ {0xD, 0x7, 0xF, 0x4, 0x1, 0x2, 0x6, 0xE, 0x9, 0xB, 0x3, 0x0, 0x8, 0x5, 0xC, 0xA},
+ },
+ {
+ {0x2, 0x8, 0xB, 0xD, 0xF, 0x7, 0x6, 0xE, 0x3, 0x1, 0x9, 0x4, 0x0, 0xA, 0xC, 0x5},
+ {0x1, 0xE, 0x2, 0xB, 0x4, 0xC, 0x3, 0x7, 0x6, 0xD, 0xA, 0x5, 0xF, 0x9, 0x0, 0x8},
+ {0x4, 0xC, 0x7, 0x5, 0x1, 0x6, 0x9, 0xA, 0x0, 0xE, 0xD, 0x8, 0x2, 0xB, 0x3, 0xF},
+ {0xB, 0x9, 0x5, 0x1, 0xC, 0x3, 0xD, 0xE, 0x6, 0x4, 0x7, 0xF, 0x2, 0x0, 0x8, 0xA},
+ },
+}
+
+// genSbox generates the variable sbox
+func genSbox(qi int, x byte) byte {
+ a0, b0 := x/16, x%16
+ for i := 0; i < 2; i++ {
+ a1 := a0 ^ b0
+ b1 := (a0 ^ ((b0 << 3) | (b0 >> 1)) ^ (a0 << 3)) & 15
+ a0 = qbox[qi][2*i][a1]
+ b0 = qbox[qi][2*i+1][b1]
+ }
+ return (b0 << 4) + a0
+}
+
+func TestSbox(t *testing.T) {
+ for n := range sbox {
+ for m := range sbox[n] {
+ if genSbox(n, byte(m)) != sbox[n][m] {
+ t.Errorf("#%d|%d: sbox value = %d want %d", n, m, sbox[n][m], genSbox(n, byte(m)))
+ }
+ }
+ }
+}
+
+var testVectors = []struct {
+ key []byte
+ dec []byte
+ enc []byte
+}{
+ // These tests are extracted from LibTom
+ {
+ []byte{0x9F, 0x58, 0x9F, 0x5C, 0xF6, 0x12, 0x2C, 0x32, 0xB6, 0xBF, 0xEC, 0x2F, 0x2A, 0xE8, 0xC3, 0x5A},
+ []byte{0xD4, 0x91, 0xDB, 0x16, 0xE7, 0xB1, 0xC3, 0x9E, 0x86, 0xCB, 0x08, 0x6B, 0x78, 0x9F, 0x54, 0x19},
+ []byte{0x01, 0x9F, 0x98, 0x09, 0xDE, 0x17, 0x11, 0x85, 0x8F, 0xAA, 0xC3, 0xA3, 0xBA, 0x20, 0xFB, 0xC3},
+ },
+ {
+ []byte{0x88, 0xB2, 0xB2, 0x70, 0x6B, 0x10, 0x5E, 0x36, 0xB4, 0x46, 0xBB, 0x6D, 0x73, 0x1A, 0x1E, 0x88,
+ 0xEF, 0xA7, 0x1F, 0x78, 0x89, 0x65, 0xBD, 0x44},
+ []byte{0x39, 0xDA, 0x69, 0xD6, 0xBA, 0x49, 0x97, 0xD5, 0x85, 0xB6, 0xDC, 0x07, 0x3C, 0xA3, 0x41, 0xB2},
+ []byte{0x18, 0x2B, 0x02, 0xD8, 0x14, 0x97, 0xEA, 0x45, 0xF9, 0xDA, 0xAC, 0xDC, 0x29, 0x19, 0x3A, 0x65},
+ },
+ {
+ []byte{0xD4, 0x3B, 0xB7, 0x55, 0x6E, 0xA3, 0x2E, 0x46, 0xF2, 0xA2, 0x82, 0xB7, 0xD4, 0x5B, 0x4E, 0x0D,
+ 0x57, 0xFF, 0x73, 0x9D, 0x4D, 0xC9, 0x2C, 0x1B, 0xD7, 0xFC, 0x01, 0x70, 0x0C, 0xC8, 0x21, 0x6F},
+ []byte{0x90, 0xAF, 0xE9, 0x1B, 0xB2, 0x88, 0x54, 0x4F, 0x2C, 0x32, 0xDC, 0x23, 0x9B, 0x26, 0x35, 0xE6},
+ []byte{0x6C, 0xB4, 0x56, 0x1C, 0x40, 0xBF, 0x0A, 0x97, 0x05, 0x93, 0x1C, 0xB6, 0xD4, 0x08, 0xE7, 0xFA},
+ },
+ // These test are derived from http://www.schneier.com/code/ecb_ival.txt
+ {
+ []byte{0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00},
+ []byte{0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00},
+ []byte{0x9F, 0x58, 0x9F, 0x5C, 0xF6, 0x12, 0x2C, 0x32, 0xB6, 0xBF, 0xEC, 0x2F, 0x2A, 0xE8, 0xC3, 0x5A},
+ },
+ {
+ []byte{0x01, 0x23, 0x45, 0x67, 0x89, 0xAB, 0xCD, 0xEF, 0xFE, 0xDC, 0xBA, 0x98, 0x76, 0x54, 0x32, 0x10,
+ 0x00, 0x11, 0x22, 0x33, 0x44, 0x55, 0x66, 0x77,
+ },
+ []byte{0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00},
+ []byte{0xCF, 0xD1, 0xD2, 0xE5, 0xA9, 0xBE, 0x9C, 0xDF, 0x50, 0x1F, 0x13, 0xB8, 0x92, 0xBD, 0x22, 0x48},
+ },
+ {
+ []byte{0x01, 0x23, 0x45, 0x67, 0x89, 0xAB, 0xCD, 0xEF, 0xFE, 0xDC, 0xBA, 0x98, 0x76, 0x54, 0x32, 0x10,
+ 0x00, 0x11, 0x22, 0x33, 0x44, 0x55, 0x66, 0x77, 0x88, 0x99, 0xAA, 0xBB, 0xCC, 0xDD, 0xEE, 0xFF,
+ },
+ []byte{0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00},
+ []byte{0x37, 0x52, 0x7B, 0xE0, 0x05, 0x23, 0x34, 0xB8, 0x9F, 0x0C, 0xFC, 0xCA, 0xE8, 0x7C, 0xFA, 0x20},
+ },
+}
+
+func TestCipher(t *testing.T) {
+ for n, tt := range testVectors {
+ // Test if the plaintext (dec) is encrypts to the given
+ // ciphertext (enc) using the given key. Test also if enc can
+ // be decrypted again into dec.
+ c, err := NewCipher(tt.key)
+ if err != nil {
+ t.Errorf("#%d: NewCipher: %v", n, err)
+ return
+ }
+
+ buf := make([]byte, 16)
+ c.Encrypt(buf, tt.dec)
+ if !bytes.Equal(buf, tt.enc) {
+ t.Errorf("#%d: encrypt = %x want %x", n, buf, tt.enc)
+ }
+ c.Decrypt(buf, tt.enc)
+ if !bytes.Equal(buf, tt.dec) {
+ t.Errorf("#%d: decrypt = %x want %x", n, buf, tt.dec)
+ }
+
+ // Test that 16 zero bytes, encrypted 1000 times then decrypted
+ // 1000 times results in zero bytes again.
+ zero := make([]byte, 16)
+ buf = make([]byte, 16)
+ for i := 0; i < 1000; i++ {
+ c.Encrypt(buf, buf)
+ }
+ for i := 0; i < 1000; i++ {
+ c.Decrypt(buf, buf)
+ }
+ if !bytes.Equal(buf, zero) {
+ t.Errorf("#%d: encrypt/decrypt 1000: have %x want %x", n, buf, zero)
+ }
+ }
+}
diff --git a/src/mongo/gotools/vendor/src/golang.org/x/crypto/xtea/block.go b/src/mongo/gotools/vendor/src/golang.org/x/crypto/xtea/block.go
new file mode 100644
index 00000000000..bf5d245992d
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/golang.org/x/crypto/xtea/block.go
@@ -0,0 +1,66 @@
+// Copyright 2009 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+/*
+ Implementation adapted from Needham and Wheeler's paper:
+ http://www.cix.co.uk/~klockstone/xtea.pdf
+
+ A precalculated look up table is used during encryption/decryption for values that are based purely on the key.
+*/
+
+package xtea
+
+// XTEA is based on 64 rounds.
+const numRounds = 64
+
+// blockToUint32 reads an 8 byte slice into two uint32s.
+// The block is treated as big endian.
+func blockToUint32(src []byte) (uint32, uint32) {
+ r0 := uint32(src[0])<<24 | uint32(src[1])<<16 | uint32(src[2])<<8 | uint32(src[3])
+ r1 := uint32(src[4])<<24 | uint32(src[5])<<16 | uint32(src[6])<<8 | uint32(src[7])
+ return r0, r1
+}
+
+// uint32ToBlock writes two uint32s into an 8 byte data block.
+// Values are written as big endian.
+func uint32ToBlock(v0, v1 uint32, dst []byte) {
+ dst[0] = byte(v0 >> 24)
+ dst[1] = byte(v0 >> 16)
+ dst[2] = byte(v0 >> 8)
+ dst[3] = byte(v0)
+ dst[4] = byte(v1 >> 24)
+ dst[5] = byte(v1 >> 16)
+ dst[6] = byte(v1 >> 8)
+ dst[7] = byte(v1 >> 0)
+}
+
+// encryptBlock encrypts a single 8 byte block using XTEA.
+func encryptBlock(c *Cipher, dst, src []byte) {
+ v0, v1 := blockToUint32(src)
+
+ // Two rounds of XTEA applied per loop
+ for i := 0; i < numRounds; {
+ v0 += ((v1<<4 ^ v1>>5) + v1) ^ c.table[i]
+ i++
+ v1 += ((v0<<4 ^ v0>>5) + v0) ^ c.table[i]
+ i++
+ }
+
+ uint32ToBlock(v0, v1, dst)
+}
+
+// decryptBlock decrypt a single 8 byte block using XTEA.
+func decryptBlock(c *Cipher, dst, src []byte) {
+ v0, v1 := blockToUint32(src)
+
+ // Two rounds of XTEA applied per loop
+ for i := numRounds; i > 0; {
+ i--
+ v1 -= ((v0<<4 ^ v0>>5) + v0) ^ c.table[i]
+ i--
+ v0 -= ((v1<<4 ^ v1>>5) + v1) ^ c.table[i]
+ }
+
+ uint32ToBlock(v0, v1, dst)
+}
diff --git a/src/mongo/gotools/vendor/src/golang.org/x/crypto/xtea/cipher.go b/src/mongo/gotools/vendor/src/golang.org/x/crypto/xtea/cipher.go
new file mode 100644
index 00000000000..108b4263559
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/golang.org/x/crypto/xtea/cipher.go
@@ -0,0 +1,82 @@
+// Copyright 2009 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// Package xtea implements XTEA encryption, as defined in Needham and Wheeler's
+// 1997 technical report, "Tea extensions."
+package xtea // import "golang.org/x/crypto/xtea"
+
+// For details, see http://www.cix.co.uk/~klockstone/xtea.pdf
+
+import "strconv"
+
+// The XTEA block size in bytes.
+const BlockSize = 8
+
+// A Cipher is an instance of an XTEA cipher using a particular key.
+// table contains a series of precalculated values that are used each round.
+type Cipher struct {
+ table [64]uint32
+}
+
+type KeySizeError int
+
+func (k KeySizeError) Error() string {
+ return "crypto/xtea: invalid key size " + strconv.Itoa(int(k))
+}
+
+// NewCipher creates and returns a new Cipher.
+// The key argument should be the XTEA key.
+// XTEA only supports 128 bit (16 byte) keys.
+func NewCipher(key []byte) (*Cipher, error) {
+ k := len(key)
+ switch k {
+ default:
+ return nil, KeySizeError(k)
+ case 16:
+ break
+ }
+
+ c := new(Cipher)
+ initCipher(c, key)
+
+ return c, nil
+}
+
+// BlockSize returns the XTEA block size, 8 bytes.
+// It is necessary to satisfy the Block interface in the
+// package "crypto/cipher".
+func (c *Cipher) BlockSize() int { return BlockSize }
+
+// Encrypt encrypts the 8 byte buffer src using the key and stores the result in dst.
+// Note that for amounts of data larger than a block,
+// it is not safe to just call Encrypt on successive blocks;
+// instead, use an encryption mode like CBC (see crypto/cipher/cbc.go).
+func (c *Cipher) Encrypt(dst, src []byte) { encryptBlock(c, dst, src) }
+
+// Decrypt decrypts the 8 byte buffer src using the key k and stores the result in dst.
+func (c *Cipher) Decrypt(dst, src []byte) { decryptBlock(c, dst, src) }
+
+// initCipher initializes the cipher context by creating a look up table
+// of precalculated values that are based on the key.
+func initCipher(c *Cipher, key []byte) {
+ // Load the key into four uint32s
+ var k [4]uint32
+ for i := 0; i < len(k); i++ {
+ j := i << 2 // Multiply by 4
+ k[i] = uint32(key[j+0])<<24 | uint32(key[j+1])<<16 | uint32(key[j+2])<<8 | uint32(key[j+3])
+ }
+
+ // Precalculate the table
+ const delta = 0x9E3779B9
+ var sum uint32 = 0
+
+ // Two rounds of XTEA applied per loop
+ for i := 0; i < numRounds; {
+ c.table[i] = sum + k[sum&3]
+ i++
+ sum += delta
+ c.table[i] = sum + k[(sum>>11)&3]
+ i++
+ }
+}
diff --git a/src/mongo/gotools/vendor/src/golang.org/x/crypto/xtea/xtea_test.go b/src/mongo/gotools/vendor/src/golang.org/x/crypto/xtea/xtea_test.go
new file mode 100644
index 00000000000..be711bf5af0
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/golang.org/x/crypto/xtea/xtea_test.go
@@ -0,0 +1,229 @@
+// Copyright 2009 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package xtea
+
+import (
+ "testing"
+)
+
+// A sample test key for when we just want to initialize a cipher
+var testKey = []byte{0x00, 0x11, 0x22, 0x33, 0x44, 0x55, 0x66, 0x77, 0x88, 0x99, 0xAA, 0xBB, 0xCC, 0xDD, 0xEE, 0xFF}
+
+// Test that the block size for XTEA is correct
+func TestBlocksize(t *testing.T) {
+ if BlockSize != 8 {
+ t.Errorf("BlockSize constant - expected 8, got %d", BlockSize)
+ return
+ }
+
+ c, err := NewCipher(testKey)
+ if err != nil {
+ t.Errorf("NewCipher(%d bytes) = %s", len(testKey), err)
+ return
+ }
+
+ result := c.BlockSize()
+ if result != 8 {
+ t.Errorf("BlockSize function - expected 8, got %d", result)
+ return
+ }
+}
+
+// A series of test values to confirm that the Cipher.table array was initialized correctly
+var testTable = []uint32{
+ 0x00112233, 0x6B1568B8, 0xE28CE030, 0xC5089E2D, 0xC5089E2D, 0x1EFBD3A2, 0xA7845C2A, 0x78EF0917,
+ 0x78EF0917, 0x172682D0, 0x5B6AC714, 0x822AC955, 0x3DE68511, 0xDC1DFECA, 0x2062430E, 0x3611343F,
+ 0xF1CCEFFB, 0x900469B4, 0xD448ADF8, 0x2E3BE36D, 0xB6C46BF5, 0x994029F2, 0x994029F2, 0xF3335F67,
+ 0x6AAAD6DF, 0x4D2694DC, 0x4D2694DC, 0xEB5E0E95, 0x2FA252D9, 0x4551440A, 0x121E10D6, 0xB0558A8F,
+ 0xE388BDC3, 0x0A48C004, 0xC6047BC0, 0x643BF579, 0xA88039BD, 0x02736F32, 0x8AFBF7BA, 0x5C66A4A7,
+ 0x5C66A4A7, 0xC76AEB2C, 0x3EE262A4, 0x215E20A1, 0x215E20A1, 0x7B515616, 0x03D9DE9E, 0x1988CFCF,
+ 0xD5448B8B, 0x737C0544, 0xB7C04988, 0xDE804BC9, 0x9A3C0785, 0x3873813E, 0x7CB7C582, 0xD6AAFAF7,
+ 0x4E22726F, 0x309E306C, 0x309E306C, 0x8A9165E1, 0x1319EE69, 0xF595AC66, 0xF595AC66, 0x4F88E1DB,
+}
+
+// Test that the cipher context is initialized correctly
+func TestCipherInit(t *testing.T) {
+ c, err := NewCipher(testKey)
+ if err != nil {
+ t.Errorf("NewCipher(%d bytes) = %s", len(testKey), err)
+ return
+ }
+
+ for i := 0; i < len(c.table); i++ {
+ if c.table[i] != testTable[i] {
+ t.Errorf("NewCipher() failed to initialize Cipher.table[%d] correctly. Expected %08X, got %08X", i, testTable[i], c.table[i])
+ break
+ }
+ }
+}
+
+// Test that invalid key sizes return an error
+func TestInvalidKeySize(t *testing.T) {
+ // Test a long key
+ key := []byte{
+ 0x00, 0x11, 0x22, 0x33, 0x44, 0x55, 0x66, 0x77, 0x88, 0x99, 0xAA, 0xBB, 0xCC, 0xDD, 0xEE, 0xFF,
+ 0x00, 0x11, 0x22, 0x33, 0x44, 0x55, 0x66, 0x77, 0x88, 0x99, 0xAA, 0xBB, 0xCC, 0xDD, 0xEE, 0xFF,
+ }
+
+ _, err := NewCipher(key)
+ if err == nil {
+ t.Errorf("Invalid key size %d didn't result in an error.", len(key))
+ }
+
+ // Test a short key
+ key = []byte{0x00, 0x11, 0x22, 0x33, 0x44, 0x55, 0x66, 0x77}
+
+ _, err = NewCipher(key)
+ if err == nil {
+ t.Errorf("Invalid key size %d didn't result in an error.", len(key))
+ }
+}
+
+// Test that we can correctly decode some bytes we have encoded
+func TestEncodeDecode(t *testing.T) {
+ original := []byte{0x01, 0x23, 0x45, 0x67, 0x89, 0xAB, 0xCD, 0xEF}
+ input := original
+ output := make([]byte, BlockSize)
+
+ c, err := NewCipher(testKey)
+ if err != nil {
+ t.Errorf("NewCipher(%d bytes) = %s", len(testKey), err)
+ return
+ }
+
+ // Encrypt the input block
+ c.Encrypt(output, input)
+
+ // Check that the output does not match the input
+ differs := false
+ for i := 0; i < len(input); i++ {
+ if output[i] != input[i] {
+ differs = true
+ break
+ }
+ }
+ if differs == false {
+ t.Error("Cipher.Encrypt: Failed to encrypt the input block.")
+ return
+ }
+
+ // Decrypt the block we just encrypted
+ input = output
+ output = make([]byte, BlockSize)
+ c.Decrypt(output, input)
+
+ // Check that the output from decrypt matches our initial input
+ for i := 0; i < len(input); i++ {
+ if output[i] != original[i] {
+ t.Errorf("Decrypted byte %d differed. Expected %02X, got %02X\n", i, original[i], output[i])
+ return
+ }
+ }
+}
+
+// Test Vectors
+type CryptTest struct {
+ key []byte
+ plainText []byte
+ cipherText []byte
+}
+
+var CryptTests = []CryptTest{
+ // These were sourced from http://www.freemedialibrary.com/index.php/XTEA_test_vectors
+ {
+ []byte{0x00, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, 0x08, 0x09, 0x0a, 0x0b, 0x0c, 0x0d, 0x0e, 0x0f},
+ []byte{0x41, 0x42, 0x43, 0x44, 0x45, 0x46, 0x47, 0x48},
+ []byte{0x49, 0x7d, 0xf3, 0xd0, 0x72, 0x61, 0x2c, 0xb5},
+ },
+ {
+ []byte{0x00, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, 0x08, 0x09, 0x0a, 0x0b, 0x0c, 0x0d, 0x0e, 0x0f},
+ []byte{0x41, 0x41, 0x41, 0x41, 0x41, 0x41, 0x41, 0x41},
+ []byte{0xe7, 0x8f, 0x2d, 0x13, 0x74, 0x43, 0x41, 0xd8},
+ },
+ {
+ []byte{0x00, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, 0x08, 0x09, 0x0a, 0x0b, 0x0c, 0x0d, 0x0e, 0x0f},
+ []byte{0x5a, 0x5b, 0x6e, 0x27, 0x89, 0x48, 0xd7, 0x7f},
+ []byte{0x41, 0x41, 0x41, 0x41, 0x41, 0x41, 0x41, 0x41},
+ },
+ {
+ []byte{0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00},
+ []byte{0x41, 0x42, 0x43, 0x44, 0x45, 0x46, 0x47, 0x48},
+ []byte{0xa0, 0x39, 0x05, 0x89, 0xf8, 0xb8, 0xef, 0xa5},
+ },
+ {
+ []byte{0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00},
+ []byte{0x41, 0x41, 0x41, 0x41, 0x41, 0x41, 0x41, 0x41},
+ []byte{0xed, 0x23, 0x37, 0x5a, 0x82, 0x1a, 0x8c, 0x2d},
+ },
+ {
+ []byte{0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00},
+ []byte{0x70, 0xe1, 0x22, 0x5d, 0x6e, 0x4e, 0x76, 0x55},
+ []byte{0x41, 0x41, 0x41, 0x41, 0x41, 0x41, 0x41, 0x41},
+ },
+
+ // These vectors are from http://wiki.secondlife.com/wiki/XTEA_Strong_Encryption_Implementation#Bouncy_Castle_C.23_API
+ {
+ []byte{0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00},
+ []byte{0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00},
+ []byte{0xDE, 0xE9, 0xD4, 0xD8, 0xF7, 0x13, 0x1E, 0xD9},
+ },
+ {
+ []byte{0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00},
+ []byte{0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, 0x08},
+ []byte{0x06, 0x5C, 0x1B, 0x89, 0x75, 0xC6, 0xA8, 0x16},
+ },
+ {
+ []byte{0x01, 0x23, 0x45, 0x67, 0x12, 0x34, 0x56, 0x78, 0x23, 0x45, 0x67, 0x89, 0x34, 0x56, 0x78, 0x9A},
+ []byte{0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00},
+ []byte{0x1F, 0xF9, 0xA0, 0x26, 0x1A, 0xC6, 0x42, 0x64},
+ },
+ {
+ []byte{0x01, 0x23, 0x45, 0x67, 0x12, 0x34, 0x56, 0x78, 0x23, 0x45, 0x67, 0x89, 0x34, 0x56, 0x78, 0x9A},
+ []byte{0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, 0x08},
+ []byte{0x8C, 0x67, 0x15, 0x5B, 0x2E, 0xF9, 0x1E, 0xAD},
+ },
+}
+
+// Test encryption
+func TestCipherEncrypt(t *testing.T) {
+ for i, tt := range CryptTests {
+ c, err := NewCipher(tt.key)
+ if err != nil {
+ t.Errorf("NewCipher(%d bytes), vector %d = %s", len(tt.key), i, err)
+ continue
+ }
+
+ out := make([]byte, len(tt.plainText))
+ c.Encrypt(out, tt.plainText)
+
+ for j := 0; j < len(out); j++ {
+ if out[j] != tt.cipherText[j] {
+ t.Errorf("Cipher.Encrypt %d: out[%d] = %02X, expected %02X", i, j, out[j], tt.cipherText[j])
+ break
+ }
+ }
+ }
+}
+
+// Test decryption
+func TestCipherDecrypt(t *testing.T) {
+ for i, tt := range CryptTests {
+ c, err := NewCipher(tt.key)
+ if err != nil {
+ t.Errorf("NewCipher(%d bytes), vector %d = %s", len(tt.key), i, err)
+ continue
+ }
+
+ out := make([]byte, len(tt.cipherText))
+ c.Decrypt(out, tt.cipherText)
+
+ for j := 0; j < len(out); j++ {
+ if out[j] != tt.plainText[j] {
+ t.Errorf("Cipher.Decrypt %d: out[%d] = %02X, expected %02X", i, j, out[j], tt.plainText[j])
+ break
+ }
+ }
+ }
+}
diff --git a/src/mongo/gotools/vendor/src/golang.org/x/crypto/xts/xts.go b/src/mongo/gotools/vendor/src/golang.org/x/crypto/xts/xts.go
new file mode 100644
index 00000000000..c9a283b2e1d
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/golang.org/x/crypto/xts/xts.go
@@ -0,0 +1,138 @@
+// Copyright 2012 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// Package xts implements the XTS cipher mode as specified in IEEE P1619/D16.
+//
+// XTS mode is typically used for disk encryption, which presents a number of
+// novel problems that make more common modes inapplicable. The disk is
+// conceptually an array of sectors and we must be able to encrypt and decrypt
+// a sector in isolation. However, an attacker must not be able to transpose
+// two sectors of plaintext by transposing their ciphertext.
+//
+// XTS wraps a block cipher with Rogaway's XEX mode in order to build a
+// tweakable block cipher. This allows each sector to have a unique tweak and
+// effectively create a unique key for each sector.
+//
+// XTS does not provide any authentication. An attacker can manipulate the
+// ciphertext and randomise a block (16 bytes) of the plaintext.
+//
+// (Note: this package does not implement ciphertext-stealing so sectors must
+// be a multiple of 16 bytes.)
+package xts // import "golang.org/x/crypto/xts"
+
+import (
+ "crypto/cipher"
+ "errors"
+)
+
+// Cipher contains an expanded key structure. It doesn't contain mutable state
+// and therefore can be used concurrently.
+type Cipher struct {
+ k1, k2 cipher.Block
+}
+
+// blockSize is the block size that the underlying cipher must have. XTS is
+// only defined for 16-byte ciphers.
+const blockSize = 16
+
+// NewCipher creates a Cipher given a function for creating the underlying
+// block cipher (which must have a block size of 16 bytes). The key must be
+// twice the length of the underlying cipher's key.
+func NewCipher(cipherFunc func([]byte) (cipher.Block, error), key []byte) (c *Cipher, err error) {
+ c = new(Cipher)
+ if c.k1, err = cipherFunc(key[:len(key)/2]); err != nil {
+ return
+ }
+ c.k2, err = cipherFunc(key[len(key)/2:])
+
+ if c.k1.BlockSize() != blockSize {
+ err = errors.New("xts: cipher does not have a block size of 16")
+ }
+
+ return
+}
+
+// Encrypt encrypts a sector of plaintext and puts the result into ciphertext.
+// Plaintext and ciphertext may be the same slice but should not overlap.
+// Sectors must be a multiple of 16 bytes and less than 2²⁴ bytes.
+func (c *Cipher) Encrypt(ciphertext, plaintext []byte, sectorNum uint64) {
+ if len(ciphertext) < len(plaintext) {
+ panic("xts: ciphertext is smaller than plaintext")
+ }
+ if len(plaintext)%blockSize != 0 {
+ panic("xts: plaintext is not a multiple of the block size")
+ }
+
+ var tweak [blockSize]byte
+ for i := 0; i < 8; i++ {
+ tweak[i] = byte(sectorNum)
+ sectorNum >>= 8
+ }
+
+ c.k2.Encrypt(tweak[:], tweak[:])
+
+ for i := 0; i < len(plaintext); i += blockSize {
+ for j := 0; j < blockSize; j++ {
+ ciphertext[i+j] = plaintext[i+j] ^ tweak[j]
+ }
+ c.k1.Encrypt(ciphertext[i:], ciphertext[i:])
+ for j := 0; j < blockSize; j++ {
+ ciphertext[i+j] ^= tweak[j]
+ }
+
+ mul2(&tweak)
+ }
+}
+
+// Decrypt decrypts a sector of ciphertext and puts the result into plaintext.
+// Plaintext and ciphertext may be the same slice but should not overlap.
+// Sectors must be a multiple of 16 bytes and less than 2²⁴ bytes.
+func (c *Cipher) Decrypt(plaintext, ciphertext []byte, sectorNum uint64) {
+ if len(plaintext) < len(ciphertext) {
+ panic("xts: plaintext is smaller than ciphertext")
+ }
+ if len(ciphertext)%blockSize != 0 {
+ panic("xts: ciphertext is not a multiple of the block size")
+ }
+
+ var tweak [blockSize]byte
+ for i := 0; i < 8; i++ {
+ tweak[i] = byte(sectorNum)
+ sectorNum >>= 8
+ }
+
+ c.k2.Encrypt(tweak[:], tweak[:])
+
+ for i := 0; i < len(plaintext); i += blockSize {
+ for j := 0; j < blockSize; j++ {
+ plaintext[i+j] = ciphertext[i+j] ^ tweak[j]
+ }
+ c.k1.Decrypt(plaintext[i:], plaintext[i:])
+ for j := 0; j < blockSize; j++ {
+ plaintext[i+j] ^= tweak[j]
+ }
+
+ mul2(&tweak)
+ }
+}
+
+// mul2 multiplies tweak by 2 in GF(2¹²⁸) with an irreducible polynomial of
+// x¹²⁸ + x⁷ + x² + x + 1.
+func mul2(tweak *[blockSize]byte) {
+ var carryIn byte
+ for j := range tweak {
+ carryOut := tweak[j] >> 7
+ tweak[j] = (tweak[j] << 1) + carryIn
+ carryIn = carryOut
+ }
+ if carryIn != 0 {
+ // If we have a carry bit then we need to subtract a multiple
+ // of the irreducible polynomial (x¹²⁸ + x⁷ + x² + x + 1).
+ // By dropping the carry bit, we're subtracting the x^128 term
+ // so all that remains is to subtract x⁷ + x² + x + 1.
+ // Subtraction (and addition) in this representation is just
+ // XOR.
+ tweak[0] ^= 1<<7 | 1<<2 | 1<<1 | 1
+ }
+}
diff --git a/src/mongo/gotools/vendor/src/golang.org/x/crypto/xts/xts_test.go b/src/mongo/gotools/vendor/src/golang.org/x/crypto/xts/xts_test.go
new file mode 100644
index 00000000000..7a5e9fadd60
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/golang.org/x/crypto/xts/xts_test.go
@@ -0,0 +1,85 @@
+// Copyright 2012 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package xts
+
+import (
+ "bytes"
+ "crypto/aes"
+ "encoding/hex"
+ "testing"
+)
+
+// These test vectors have been taken from IEEE P1619/D16, Annex B.
+var xtsTestVectors = []struct {
+ key string
+ sector uint64
+ plaintext string
+ ciphertext string
+}{
+ {
+ "0000000000000000000000000000000000000000000000000000000000000000",
+ 0,
+ "0000000000000000000000000000000000000000000000000000000000000000",
+ "917cf69ebd68b2ec9b9fe9a3eadda692cd43d2f59598ed858c02c2652fbf922e",
+ }, {
+ "1111111111111111111111111111111122222222222222222222222222222222",
+ 0x3333333333,
+ "4444444444444444444444444444444444444444444444444444444444444444",
+ "c454185e6a16936e39334038acef838bfb186fff7480adc4289382ecd6d394f0",
+ }, {
+ "fffefdfcfbfaf9f8f7f6f5f4f3f2f1f022222222222222222222222222222222",
+ 0x3333333333,
+ "4444444444444444444444444444444444444444444444444444444444444444",
+ "af85336b597afc1a900b2eb21ec949d292df4c047e0b21532186a5971a227a89",
+ }, {
+ "2718281828459045235360287471352631415926535897932384626433832795",
+ 0,
+ "000102030405060708090a0b0c0d0e0f101112131415161718191a1b1c1d1e1f202122232425262728292a2b2c2d2e2f303132333435363738393a3b3c3d3e3f404142434445464748494a4b4c4d4e4f505152535455565758595a5b5c5d5e5f606162636465666768696a6b6c6d6e6f707172737475767778797a7b7c7d7e7f808182838485868788898a8b8c8d8e8f909192939495969798999a9b9c9d9e9fa0a1a2a3a4a5a6a7a8a9aaabacadaeafb0b1b2b3b4b5b6b7b8b9babbbcbdbebfc0c1c2c3c4c5c6c7c8c9cacbcccdcecfd0d1d2d3d4d5d6d7d8d9dadbdcdddedfe0e1e2e3e4e5e6e7e8e9eaebecedeeeff0f1f2f3f4f5f6f7f8f9fafbfcfdfeff000102030405060708090a0b0c0d0e0f101112131415161718191a1b1c1d1e1f202122232425262728292a2b2c2d2e2f303132333435363738393a3b3c3d3e3f404142434445464748494a4b4c4d4e4f505152535455565758595a5b5c5d5e5f606162636465666768696a6b6c6d6e6f707172737475767778797a7b7c7d7e7f808182838485868788898a8b8c8d8e8f909192939495969798999a9b9c9d9e9fa0a1a2a3a4a5a6a7a8a9aaabacadaeafb0b1b2b3b4b5b6b7b8b9babbbcbdbebfc0c1c2c3c4c5c6c7c8c9cacbcccdcecfd0d1d2d3d4d5d6d7d8d9dadbdcdddedfe0e1e2e3e4e5e6e7e8e9eaebecedeeeff0f1f2f3f4f5f6f7f8f9fafbfcfdfeff",
+ "27a7479befa1d476489f308cd4cfa6e2a96e4bbe3208ff25287dd3819616e89cc78cf7f5e543445f8333d8fa7f56000005279fa5d8b5e4ad40e736ddb4d35412328063fd2aab53e5ea1e0a9f332500a5df9487d07a5c92cc512c8866c7e860ce93fdf166a24912b422976146ae20ce846bb7dc9ba94a767aaef20c0d61ad02655ea92dc4c4e41a8952c651d33174be51a10c421110e6d81588ede82103a252d8a750e8768defffed9122810aaeb99f9172af82b604dc4b8e51bcb08235a6f4341332e4ca60482a4ba1a03b3e65008fc5da76b70bf1690db4eae29c5f1badd03c5ccf2a55d705ddcd86d449511ceb7ec30bf12b1fa35b913f9f747a8afd1b130e94bff94effd01a91735ca1726acd0b197c4e5b03393697e126826fb6bbde8ecc1e08298516e2c9ed03ff3c1b7860f6de76d4cecd94c8119855ef5297ca67e9f3e7ff72b1e99785ca0a7e7720c5b36dc6d72cac9574c8cbbc2f801e23e56fd344b07f22154beba0f08ce8891e643ed995c94d9a69c9f1b5f499027a78572aeebd74d20cc39881c213ee770b1010e4bea718846977ae119f7a023ab58cca0ad752afe656bb3c17256a9f6e9bf19fdd5a38fc82bbe872c5539edb609ef4f79c203ebb140f2e583cb2ad15b4aa5b655016a8449277dbd477ef2c8d6c017db738b18deb4a427d1923ce3ff262735779a418f20a282df920147beabe421ee5319d0568",
+ }, {
+ "2718281828459045235360287471352631415926535897932384626433832795",
+ 1,
+ "27a7479befa1d476489f308cd4cfa6e2a96e4bbe3208ff25287dd3819616e89cc78cf7f5e543445f8333d8fa7f56000005279fa5d8b5e4ad40e736ddb4d35412328063fd2aab53e5ea1e0a9f332500a5df9487d07a5c92cc512c8866c7e860ce93fdf166a24912b422976146ae20ce846bb7dc9ba94a767aaef20c0d61ad02655ea92dc4c4e41a8952c651d33174be51a10c421110e6d81588ede82103a252d8a750e8768defffed9122810aaeb99f9172af82b604dc4b8e51bcb08235a6f4341332e4ca60482a4ba1a03b3e65008fc5da76b70bf1690db4eae29c5f1badd03c5ccf2a55d705ddcd86d449511ceb7ec30bf12b1fa35b913f9f747a8afd1b130e94bff94effd01a91735ca1726acd0b197c4e5b03393697e126826fb6bbde8ecc1e08298516e2c9ed03ff3c1b7860f6de76d4cecd94c8119855ef5297ca67e9f3e7ff72b1e99785ca0a7e7720c5b36dc6d72cac9574c8cbbc2f801e23e56fd344b07f22154beba0f08ce8891e643ed995c94d9a69c9f1b5f499027a78572aeebd74d20cc39881c213ee770b1010e4bea718846977ae119f7a023ab58cca0ad752afe656bb3c17256a9f6e9bf19fdd5a38fc82bbe872c5539edb609ef4f79c203ebb140f2e583cb2ad15b4aa5b655016a8449277dbd477ef2c8d6c017db738b18deb4a427d1923ce3ff262735779a418f20a282df920147beabe421ee5319d0568",
+ "264d3ca8512194fec312c8c9891f279fefdd608d0c027b60483a3fa811d65ee59d52d9e40ec5672d81532b38b6b089ce951f0f9c35590b8b978d175213f329bb1c2fd30f2f7f30492a61a532a79f51d36f5e31a7c9a12c286082ff7d2394d18f783e1a8e72c722caaaa52d8f065657d2631fd25bfd8e5baad6e527d763517501c68c5edc3cdd55435c532d7125c8614deed9adaa3acade5888b87bef641c4c994c8091b5bcd387f3963fb5bc37aa922fbfe3df4e5b915e6eb514717bdd2a74079a5073f5c4bfd46adf7d282e7a393a52579d11a028da4d9cd9c77124f9648ee383b1ac763930e7162a8d37f350b2f74b8472cf09902063c6b32e8c2d9290cefbd7346d1c779a0df50edcde4531da07b099c638e83a755944df2aef1aa31752fd323dcb710fb4bfbb9d22b925bc3577e1b8949e729a90bbafeacf7f7879e7b1147e28ba0bae940db795a61b15ecf4df8db07b824bb062802cc98a9545bb2aaeed77cb3fc6db15dcd7d80d7d5bc406c4970a3478ada8899b329198eb61c193fb6275aa8ca340344a75a862aebe92eee1ce032fd950b47d7704a3876923b4ad62844bf4a09c4dbe8b4397184b7471360c9564880aedddb9baa4af2e75394b08cd32ff479c57a07d3eab5d54de5f9738b8d27f27a9f0ab11799d7b7ffefb2704c95c6ad12c39f1e867a4b7b1d7818a4b753dfd2a89ccb45e001a03a867b187f225dd",
+ }, {
+ "27182818284590452353602874713526624977572470936999595749669676273141592653589793238462643383279502884197169399375105820974944592",
+ 0xff,
+ "000102030405060708090a0b0c0d0e0f101112131415161718191a1b1c1d1e1f202122232425262728292a2b2c2d2e2f303132333435363738393a3b3c3d3e3f404142434445464748494a4b4c4d4e4f505152535455565758595a5b5c5d5e5f606162636465666768696a6b6c6d6e6f707172737475767778797a7b7c7d7e7f808182838485868788898a8b8c8d8e8f909192939495969798999a9b9c9d9e9fa0a1a2a3a4a5a6a7a8a9aaabacadaeafb0b1b2b3b4b5b6b7b8b9babbbcbdbebfc0c1c2c3c4c5c6c7c8c9cacbcccdcecfd0d1d2d3d4d5d6d7d8d9dadbdcdddedfe0e1e2e3e4e5e6e7e8e9eaebecedeeeff0f1f2f3f4f5f6f7f8f9fafbfcfdfeff000102030405060708090a0b0c0d0e0f101112131415161718191a1b1c1d1e1f202122232425262728292a2b2c2d2e2f303132333435363738393a3b3c3d3e3f404142434445464748494a4b4c4d4e4f505152535455565758595a5b5c5d5e5f606162636465666768696a6b6c6d6e6f707172737475767778797a7b7c7d7e7f808182838485868788898a8b8c8d8e8f909192939495969798999a9b9c9d9e9fa0a1a2a3a4a5a6a7a8a9aaabacadaeafb0b1b2b3b4b5b6b7b8b9babbbcbdbebfc0c1c2c3c4c5c6c7c8c9cacbcccdcecfd0d1d2d3d4d5d6d7d8d9dadbdcdddedfe0e1e2e3e4e5e6e7e8e9eaebecedeeeff0f1f2f3f4f5f6f7f8f9fafbfcfdfeff",
+ "1c3b3a102f770386e4836c99e370cf9bea00803f5e482357a4ae12d414a3e63b5d31e276f8fe4a8d66b317f9ac683f44680a86ac35adfc3345befecb4bb188fd5776926c49a3095eb108fd1098baec70aaa66999a72a82f27d848b21d4a741b0c5cd4d5fff9dac89aeba122961d03a757123e9870f8acf1000020887891429ca2a3e7a7d7df7b10355165c8b9a6d0a7de8b062c4500dc4cd120c0f7418dae3d0b5781c34803fa75421c790dfe1de1834f280d7667b327f6c8cd7557e12ac3a0f93ec05c52e0493ef31a12d3d9260f79a289d6a379bc70c50841473d1a8cc81ec583e9645e07b8d9670655ba5bbcfecc6dc3966380ad8fecb17b6ba02469a020a84e18e8f84252070c13e9f1f289be54fbc481457778f616015e1327a02b140f1505eb309326d68378f8374595c849d84f4c333ec4423885143cb47bd71c5edae9be69a2ffeceb1bec9de244fbe15992b11b77c040f12bd8f6a975a44a0f90c29a9abc3d4d893927284c58754cce294529f8614dcd2aba991925fedc4ae74ffac6e333b93eb4aff0479da9a410e4450e0dd7ae4c6e2910900575da401fc07059f645e8b7e9bfdef33943054ff84011493c27b3429eaedb4ed5376441a77ed43851ad77f16f541dfd269d50d6a5f14fb0aab1cbb4c1550be97f7ab4066193c4caa773dad38014bd2092fa755c824bb5e54c4f36ffda9fcea70b9c6e693e148c151",
+ },
+}
+
+func fromHex(s string) []byte {
+ ret, err := hex.DecodeString(s)
+ if err != nil {
+ panic("xts: invalid hex in test")
+ }
+ return ret
+}
+
+func TestXTS(t *testing.T) {
+ for i, test := range xtsTestVectors {
+ c, err := NewCipher(aes.NewCipher, fromHex(test.key))
+ if err != nil {
+ t.Errorf("#%d: failed to create cipher: %s", i, err)
+ continue
+ }
+ plaintext := fromHex(test.plaintext)
+ ciphertext := make([]byte, len(plaintext))
+ c.Encrypt(ciphertext, plaintext, test.sector)
+
+ expectedCiphertext := fromHex(test.ciphertext)
+ if !bytes.Equal(ciphertext, expectedCiphertext) {
+ t.Errorf("#%d: encrypted failed, got: %x, want: %x", i, ciphertext, expectedCiphertext)
+ continue
+ }
+
+ decrypted := make([]byte, len(ciphertext))
+ c.Decrypt(decrypted, ciphertext, test.sector)
+ if !bytes.Equal(decrypted, plaintext) {
+ t.Errorf("#%d: decryption failed, got: %x, want: %x", i, decrypted, plaintext)
+ }
+ }
+}
diff --git a/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/.travis.yml b/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/.travis.yml
new file mode 100644
index 00000000000..45b38cf13fb
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/.travis.yml
@@ -0,0 +1,45 @@
+language: go
+
+go_import_path: gopkg.in/mgo.v2
+
+addons:
+ apt:
+ packages:
+
+env:
+ global:
+ - BUCKET=https://niemeyer.s3.amazonaws.com
+ matrix:
+ - GO=1.4.1 MONGODB=x86_64-2.2.7
+ - GO=1.4.1 MONGODB=x86_64-2.4.14
+ - GO=1.4.1 MONGODB=x86_64-2.6.11
+ - GO=1.4.1 MONGODB=x86_64-3.0.9
+ - GO=1.4.1 MONGODB=x86_64-3.2.3-nojournal
+ - GO=1.5.3 MONGODB=x86_64-3.0.9
+ - GO=1.6 MONGODB=x86_64-3.0.9
+
+install:
+ - eval "$(gimme $GO)"
+
+ - wget $BUCKET/mongodb-linux-$MONGODB.tgz
+ - tar xzvf mongodb-linux-$MONGODB.tgz
+ - export PATH=$PWD/mongodb-linux-$MONGODB/bin:$PATH
+
+ - wget $BUCKET/daemontools.tar.gz
+ - tar xzvf daemontools.tar.gz
+ - export PATH=$PWD/daemontools:$PATH
+
+ - go get gopkg.in/check.v1
+ - go get gopkg.in/yaml.v2
+ - go get gopkg.in/tomb.v2
+
+before_script:
+ - export NOIPV6=1
+ - make startdb
+
+script:
+ - (cd bson && go test -check.v)
+ - go test -check.v -fast
+ - (cd txn && go test -check.v)
+
+# vim:sw=4:ts=4:et
diff --git a/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/LICENSE b/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/LICENSE
new file mode 100644
index 00000000000..770c7672b45
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/LICENSE
@@ -0,0 +1,25 @@
+mgo - MongoDB driver for Go
+
+Copyright (c) 2010-2013 - Gustavo Niemeyer <gustavo@niemeyer.net>
+
+All rights reserved.
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions are met:
+
+1. Redistributions of source code must retain the above copyright notice, this
+ list of conditions and the following disclaimer.
+2. Redistributions in binary form must reproduce the above copyright notice,
+ this list of conditions and the following disclaimer in the documentation
+ and/or other materials provided with the distribution.
+
+THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
+ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
+WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
+ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
+(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
+LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
+ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
+SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
diff --git a/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/Makefile b/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/Makefile
new file mode 100644
index 00000000000..d1027d45090
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/Makefile
@@ -0,0 +1,5 @@
+startdb:
+ @harness/setup.sh start
+
+stopdb:
+ @harness/setup.sh stop
diff --git a/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/README.md b/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/README.md
new file mode 100644
index 00000000000..f4e452c04e3
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/README.md
@@ -0,0 +1,4 @@
+The MongoDB driver for Go
+-------------------------
+
+Please go to [http://labix.org/mgo](http://labix.org/mgo) for all project details.
diff --git a/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/auth.go b/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/auth.go
new file mode 100644
index 00000000000..dc26e52f583
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/auth.go
@@ -0,0 +1,467 @@
+// mgo - MongoDB driver for Go
+//
+// Copyright (c) 2010-2012 - Gustavo Niemeyer <gustavo@niemeyer.net>
+//
+// All rights reserved.
+//
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are met:
+//
+// 1. Redistributions of source code must retain the above copyright notice, this
+// list of conditions and the following disclaimer.
+// 2. Redistributions in binary form must reproduce the above copyright notice,
+// this list of conditions and the following disclaimer in the documentation
+// and/or other materials provided with the distribution.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
+// ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
+// WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+// DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
+// ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
+// (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
+// LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
+// ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
+// SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+package mgo
+
+import (
+ "crypto/md5"
+ "crypto/sha1"
+ "encoding/hex"
+ "errors"
+ "fmt"
+ "sync"
+
+ "gopkg.in/mgo.v2/bson"
+ "gopkg.in/mgo.v2/internal/scram"
+)
+
+type authCmd struct {
+ Authenticate int
+
+ Nonce string
+ User string
+ Key string
+}
+
+type startSaslCmd struct {
+ StartSASL int `bson:"startSasl"`
+}
+
+type authResult struct {
+ ErrMsg string
+ Ok bool
+}
+
+type getNonceCmd struct {
+ GetNonce int
+}
+
+type getNonceResult struct {
+ Nonce string
+ Err string "$err"
+ Code int
+}
+
+type logoutCmd struct {
+ Logout int
+}
+
+type saslCmd struct {
+ Start int `bson:"saslStart,omitempty"`
+ Continue int `bson:"saslContinue,omitempty"`
+ ConversationId int `bson:"conversationId,omitempty"`
+ Mechanism string `bson:"mechanism,omitempty"`
+ Payload []byte
+}
+
+type saslResult struct {
+ Ok bool `bson:"ok"`
+ NotOk bool `bson:"code"` // Server <= 2.3.2 returns ok=1 & code>0 on errors (WTF?)
+ Done bool
+
+ ConversationId int `bson:"conversationId"`
+ Payload []byte
+ ErrMsg string
+}
+
+type saslStepper interface {
+ Step(serverData []byte) (clientData []byte, done bool, err error)
+ Close()
+}
+
+func (socket *mongoSocket) getNonce() (nonce string, err error) {
+ socket.Lock()
+ for socket.cachedNonce == "" && socket.dead == nil {
+ debugf("Socket %p to %s: waiting for nonce", socket, socket.addr)
+ socket.gotNonce.Wait()
+ }
+ if socket.cachedNonce == "mongos" {
+ socket.Unlock()
+ return "", errors.New("Can't authenticate with mongos; see http://j.mp/mongos-auth")
+ }
+ debugf("Socket %p to %s: got nonce", socket, socket.addr)
+ nonce, err = socket.cachedNonce, socket.dead
+ socket.cachedNonce = ""
+ socket.Unlock()
+ if err != nil {
+ nonce = ""
+ }
+ return
+}
+
+func (socket *mongoSocket) resetNonce() {
+ debugf("Socket %p to %s: requesting a new nonce", socket, socket.addr)
+ op := &queryOp{}
+ op.query = &getNonceCmd{GetNonce: 1}
+ op.collection = "admin.$cmd"
+ op.limit = -1
+ op.replyFunc = func(err error, reply *replyOp, docNum int, docData []byte) {
+ if err != nil {
+ socket.kill(errors.New("getNonce: "+err.Error()), true)
+ return
+ }
+ result := &getNonceResult{}
+ err = bson.Unmarshal(docData, &result)
+ if err != nil {
+ socket.kill(errors.New("Failed to unmarshal nonce: "+err.Error()), true)
+ return
+ }
+ debugf("Socket %p to %s: nonce unmarshalled: %#v", socket, socket.addr, result)
+ if result.Code == 13390 {
+ // mongos doesn't yet support auth (see http://j.mp/mongos-auth)
+ result.Nonce = "mongos"
+ } else if result.Nonce == "" {
+ var msg string
+ if result.Err != "" {
+ msg = fmt.Sprintf("Got an empty nonce: %s (%d)", result.Err, result.Code)
+ } else {
+ msg = "Got an empty nonce"
+ }
+ socket.kill(errors.New(msg), true)
+ return
+ }
+ socket.Lock()
+ if socket.cachedNonce != "" {
+ socket.Unlock()
+ panic("resetNonce: nonce already cached")
+ }
+ socket.cachedNonce = result.Nonce
+ socket.gotNonce.Signal()
+ socket.Unlock()
+ }
+ err := socket.Query(op)
+ if err != nil {
+ socket.kill(errors.New("resetNonce: "+err.Error()), true)
+ }
+}
+
+func (socket *mongoSocket) Login(cred Credential) error {
+ socket.Lock()
+ if cred.Mechanism == "" && socket.serverInfo.MaxWireVersion >= 3 {
+ cred.Mechanism = "SCRAM-SHA-1"
+ }
+ for _, sockCred := range socket.creds {
+ if sockCred == cred {
+ debugf("Socket %p to %s: login: db=%q user=%q (already logged in)", socket, socket.addr, cred.Source, cred.Username)
+ socket.Unlock()
+ return nil
+ }
+ }
+ if socket.dropLogout(cred) {
+ debugf("Socket %p to %s: login: db=%q user=%q (cached)", socket, socket.addr, cred.Source, cred.Username)
+ socket.creds = append(socket.creds, cred)
+ socket.Unlock()
+ return nil
+ }
+ socket.Unlock()
+
+ debugf("Socket %p to %s: login: db=%q user=%q", socket, socket.addr, cred.Source, cred.Username)
+
+ var err error
+ switch cred.Mechanism {
+ case "", "MONGODB-CR", "MONGO-CR": // Name changed to MONGODB-CR in SERVER-8501.
+ err = socket.loginClassic(cred)
+ case "PLAIN":
+ err = socket.loginPlain(cred)
+ case "MONGODB-X509":
+ err = socket.loginX509(cred)
+ default:
+ // Try SASL for everything else, if it is available.
+ err = socket.loginSASL(cred)
+ }
+
+ if err != nil {
+ debugf("Socket %p to %s: login error: %s", socket, socket.addr, err)
+ } else {
+ debugf("Socket %p to %s: login successful", socket, socket.addr)
+ }
+ return err
+}
+
+func (socket *mongoSocket) loginClassic(cred Credential) error {
+ // Note that this only works properly because this function is
+ // synchronous, which means the nonce won't get reset while we're
+ // using it and any other login requests will block waiting for a
+ // new nonce provided in the defer call below.
+ nonce, err := socket.getNonce()
+ if err != nil {
+ return err
+ }
+ defer socket.resetNonce()
+
+ psum := md5.New()
+ psum.Write([]byte(cred.Username + ":mongo:" + cred.Password))
+
+ ksum := md5.New()
+ ksum.Write([]byte(nonce + cred.Username))
+ ksum.Write([]byte(hex.EncodeToString(psum.Sum(nil))))
+
+ key := hex.EncodeToString(ksum.Sum(nil))
+
+ cmd := authCmd{Authenticate: 1, User: cred.Username, Nonce: nonce, Key: key}
+ res := authResult{}
+ return socket.loginRun(cred.Source, &cmd, &res, func() error {
+ if !res.Ok {
+ return errors.New(res.ErrMsg)
+ }
+ socket.Lock()
+ socket.dropAuth(cred.Source)
+ socket.creds = append(socket.creds, cred)
+ socket.Unlock()
+ return nil
+ })
+}
+
+type authX509Cmd struct {
+ Authenticate int
+ User string
+ Mechanism string
+}
+
+func (socket *mongoSocket) loginX509(cred Credential) error {
+ cmd := authX509Cmd{Authenticate: 1, User: cred.Username, Mechanism: "MONGODB-X509"}
+ res := authResult{}
+ return socket.loginRun(cred.Source, &cmd, &res, func() error {
+ if !res.Ok {
+ return errors.New(res.ErrMsg)
+ }
+ socket.Lock()
+ socket.dropAuth(cred.Source)
+ socket.creds = append(socket.creds, cred)
+ socket.Unlock()
+ return nil
+ })
+}
+
+func (socket *mongoSocket) loginPlain(cred Credential) error {
+ cmd := saslCmd{Start: 1, Mechanism: "PLAIN", Payload: []byte("\x00" + cred.Username + "\x00" + cred.Password)}
+ res := authResult{}
+ return socket.loginRun(cred.Source, &cmd, &res, func() error {
+ if !res.Ok {
+ return errors.New(res.ErrMsg)
+ }
+ socket.Lock()
+ socket.dropAuth(cred.Source)
+ socket.creds = append(socket.creds, cred)
+ socket.Unlock()
+ return nil
+ })
+}
+
+func (socket *mongoSocket) loginSASL(cred Credential) error {
+ var sasl saslStepper
+ var err error
+ if cred.Mechanism == "SCRAM-SHA-1" {
+ // SCRAM is handled without external libraries.
+ sasl = saslNewScram(cred)
+ } else if len(cred.ServiceHost) > 0 {
+ sasl, err = saslNew(cred, cred.ServiceHost)
+ } else {
+ sasl, err = saslNew(cred, socket.Server().Addr)
+ }
+ if err != nil {
+ return err
+ }
+ defer sasl.Close()
+
+ // The goal of this logic is to carry a locked socket until the
+ // local SASL step confirms the auth is valid; the socket needs to be
+ // locked so that concurrent action doesn't leave the socket in an
+ // auth state that doesn't reflect the operations that took place.
+ // As a simple case, imagine inverting login=>logout to logout=>login.
+ //
+ // The logic below works because the lock func isn't called concurrently.
+ locked := false
+ lock := func(b bool) {
+ if locked != b {
+ locked = b
+ if b {
+ socket.Lock()
+ } else {
+ socket.Unlock()
+ }
+ }
+ }
+
+ lock(true)
+ defer lock(false)
+
+ start := 1
+ cmd := saslCmd{}
+ res := saslResult{}
+ for {
+ payload, done, err := sasl.Step(res.Payload)
+ if err != nil {
+ return err
+ }
+ if done && res.Done {
+ socket.dropAuth(cred.Source)
+ socket.creds = append(socket.creds, cred)
+ break
+ }
+ lock(false)
+
+ cmd = saslCmd{
+ Start: start,
+ Continue: 1 - start,
+ ConversationId: res.ConversationId,
+ Mechanism: cred.Mechanism,
+ Payload: payload,
+ }
+ start = 0
+ err = socket.loginRun(cred.Source, &cmd, &res, func() error {
+ // See the comment on lock for why this is necessary.
+ lock(true)
+ if !res.Ok || res.NotOk {
+ return fmt.Errorf("server returned error on SASL authentication step: %s", res.ErrMsg)
+ }
+ return nil
+ })
+ if err != nil {
+ return err
+ }
+ if done && res.Done {
+ socket.dropAuth(cred.Source)
+ socket.creds = append(socket.creds, cred)
+ break
+ }
+ }
+
+ return nil
+}
+
+func saslNewScram(cred Credential) *saslScram {
+ credsum := md5.New()
+ credsum.Write([]byte(cred.Username + ":mongo:" + cred.Password))
+ client := scram.NewClient(sha1.New, cred.Username, hex.EncodeToString(credsum.Sum(nil)))
+ return &saslScram{cred: cred, client: client}
+}
+
+type saslScram struct {
+ cred Credential
+ client *scram.Client
+}
+
+func (s *saslScram) Close() {}
+
+func (s *saslScram) Step(serverData []byte) (clientData []byte, done bool, err error) {
+ more := s.client.Step(serverData)
+ return s.client.Out(), !more, s.client.Err()
+}
+
+func (socket *mongoSocket) loginRun(db string, query, result interface{}, f func() error) error {
+ var mutex sync.Mutex
+ var replyErr error
+ mutex.Lock()
+
+ op := queryOp{}
+ op.query = query
+ op.collection = db + ".$cmd"
+ op.limit = -1
+ op.replyFunc = func(err error, reply *replyOp, docNum int, docData []byte) {
+ defer mutex.Unlock()
+
+ if err != nil {
+ replyErr = err
+ return
+ }
+
+ err = bson.Unmarshal(docData, result)
+ if err != nil {
+ replyErr = err
+ } else {
+ // Must handle this within the read loop for the socket, so
+ // that concurrent login requests are properly ordered.
+ replyErr = f()
+ }
+ }
+
+ err := socket.Query(&op)
+ if err != nil {
+ return err
+ }
+ mutex.Lock() // Wait.
+ return replyErr
+}
+
+func (socket *mongoSocket) Logout(db string) {
+ socket.Lock()
+ cred, found := socket.dropAuth(db)
+ if found {
+ debugf("Socket %p to %s: logout: db=%q (flagged)", socket, socket.addr, db)
+ socket.logout = append(socket.logout, cred)
+ }
+ socket.Unlock()
+}
+
+func (socket *mongoSocket) LogoutAll() {
+ socket.Lock()
+ if l := len(socket.creds); l > 0 {
+ debugf("Socket %p to %s: logout all (flagged %d)", socket, socket.addr, l)
+ socket.logout = append(socket.logout, socket.creds...)
+ socket.creds = socket.creds[0:0]
+ }
+ socket.Unlock()
+}
+
+func (socket *mongoSocket) flushLogout() (ops []interface{}) {
+ socket.Lock()
+ if l := len(socket.logout); l > 0 {
+ debugf("Socket %p to %s: logout all (flushing %d)", socket, socket.addr, l)
+ for i := 0; i != l; i++ {
+ op := queryOp{}
+ op.query = &logoutCmd{1}
+ op.collection = socket.logout[i].Source + ".$cmd"
+ op.limit = -1
+ ops = append(ops, &op)
+ }
+ socket.logout = socket.logout[0:0]
+ }
+ socket.Unlock()
+ return
+}
+
+func (socket *mongoSocket) dropAuth(db string) (cred Credential, found bool) {
+ for i, sockCred := range socket.creds {
+ if sockCred.Source == db {
+ copy(socket.creds[i:], socket.creds[i+1:])
+ socket.creds = socket.creds[:len(socket.creds)-1]
+ return sockCred, true
+ }
+ }
+ return cred, false
+}
+
+func (socket *mongoSocket) dropLogout(cred Credential) (found bool) {
+ for i, sockCred := range socket.logout {
+ if sockCred == cred {
+ copy(socket.logout[i:], socket.logout[i+1:])
+ socket.logout = socket.logout[:len(socket.logout)-1]
+ return true
+ }
+ }
+ return false
+}
diff --git a/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/auth_test.go b/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/auth_test.go
new file mode 100644
index 00000000000..9952734757e
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/auth_test.go
@@ -0,0 +1,1180 @@
+// mgo - MongoDB driver for Go
+//
+// Copyright (c) 2010-2012 - Gustavo Niemeyer <gustavo@niemeyer.net>
+//
+// All rights reserved.
+//
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are met:
+//
+// 1. Redistributions of source code must retain the above copyright notice, this
+// list of conditions and the following disclaimer.
+// 2. Redistributions in binary form must reproduce the above copyright notice,
+// this list of conditions and the following disclaimer in the documentation
+// and/or other materials provided with the distribution.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
+// ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
+// WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+// DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
+// ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
+// (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
+// LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
+// ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
+// SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+package mgo_test
+
+import (
+ "crypto/tls"
+ "flag"
+ "fmt"
+ "io/ioutil"
+ "net"
+ "net/url"
+ "os"
+ "runtime"
+ "sync"
+ "time"
+
+ . "gopkg.in/check.v1"
+ "gopkg.in/mgo.v2"
+)
+
+func (s *S) TestAuthLoginDatabase(c *C) {
+ // Test both with a normal database and with an authenticated shard.
+ for _, addr := range []string{"localhost:40002", "localhost:40203"} {
+ session, err := mgo.Dial(addr)
+ c.Assert(err, IsNil)
+ defer session.Close()
+
+ coll := session.DB("mydb").C("mycoll")
+ err = coll.Insert(M{"n": 1})
+ c.Assert(err, ErrorMatches, "unauthorized|need to login|not authorized .*")
+
+ admindb := session.DB("admin")
+
+ err = admindb.Login("root", "wrong")
+ c.Assert(err, ErrorMatches, "auth fail(s|ed)|.*Authentication failed.")
+
+ err = admindb.Login("root", "rapadura")
+ c.Assert(err, IsNil)
+
+ err = coll.Insert(M{"n": 1})
+ c.Assert(err, IsNil)
+ }
+}
+
+func (s *S) TestAuthLoginSession(c *C) {
+ // Test both with a normal database and with an authenticated shard.
+ for _, addr := range []string{"localhost:40002", "localhost:40203"} {
+ session, err := mgo.Dial(addr)
+ c.Assert(err, IsNil)
+ defer session.Close()
+
+ coll := session.DB("mydb").C("mycoll")
+ err = coll.Insert(M{"n": 1})
+ c.Assert(err, ErrorMatches, "unauthorized|need to login|not authorized .*")
+
+ cred := mgo.Credential{
+ Username: "root",
+ Password: "wrong",
+ }
+ err = session.Login(&cred)
+ c.Assert(err, ErrorMatches, "auth fail(s|ed)|.*Authentication failed.")
+
+ cred.Password = "rapadura"
+
+ err = session.Login(&cred)
+ c.Assert(err, IsNil)
+
+ err = coll.Insert(M{"n": 1})
+ c.Assert(err, IsNil)
+ }
+}
+
+func (s *S) TestAuthLoginLogout(c *C) {
+ // Test both with a normal database and with an authenticated shard.
+ for _, addr := range []string{"localhost:40002", "localhost:40203"} {
+ session, err := mgo.Dial(addr)
+ c.Assert(err, IsNil)
+ defer session.Close()
+
+ admindb := session.DB("admin")
+ err = admindb.Login("root", "rapadura")
+ c.Assert(err, IsNil)
+
+ admindb.Logout()
+
+ coll := session.DB("mydb").C("mycoll")
+ err = coll.Insert(M{"n": 1})
+ c.Assert(err, ErrorMatches, "unauthorized|need to login|not authorized .*")
+
+ // Must have dropped auth from the session too.
+ session = session.Copy()
+ defer session.Close()
+
+ coll = session.DB("mydb").C("mycoll")
+ err = coll.Insert(M{"n": 1})
+ c.Assert(err, ErrorMatches, "unauthorized|need to login|not authorized .*")
+ }
+}
+
+func (s *S) TestAuthLoginLogoutAll(c *C) {
+ session, err := mgo.Dial("localhost:40002")
+ c.Assert(err, IsNil)
+ defer session.Close()
+
+ admindb := session.DB("admin")
+ err = admindb.Login("root", "rapadura")
+ c.Assert(err, IsNil)
+
+ session.LogoutAll()
+
+ coll := session.DB("mydb").C("mycoll")
+ err = coll.Insert(M{"n": 1})
+ c.Assert(err, ErrorMatches, "unauthorized|need to login|not authorized .*")
+
+ // Must have dropped auth from the session too.
+ session = session.Copy()
+ defer session.Close()
+
+ coll = session.DB("mydb").C("mycoll")
+ err = coll.Insert(M{"n": 1})
+ c.Assert(err, ErrorMatches, "unauthorized|need to login|not authorized .*")
+}
+
+func (s *S) TestAuthUpsertUserErrors(c *C) {
+ session, err := mgo.Dial("localhost:40002")
+ c.Assert(err, IsNil)
+ defer session.Close()
+
+ admindb := session.DB("admin")
+ err = admindb.Login("root", "rapadura")
+ c.Assert(err, IsNil)
+
+ mydb := session.DB("mydb")
+
+ err = mydb.UpsertUser(&mgo.User{})
+ c.Assert(err, ErrorMatches, "user has no Username")
+
+ err = mydb.UpsertUser(&mgo.User{Username: "user", Password: "pass", UserSource: "source"})
+ c.Assert(err, ErrorMatches, "user has both Password/PasswordHash and UserSource set")
+
+ err = mydb.UpsertUser(&mgo.User{Username: "user", Password: "pass", OtherDBRoles: map[string][]mgo.Role{"db": nil}})
+ c.Assert(err, ErrorMatches, "user with OtherDBRoles is only supported in the admin or \\$external databases")
+}
+
+func (s *S) TestAuthUpsertUser(c *C) {
+ if !s.versionAtLeast(2, 4) {
+ c.Skip("UpsertUser only works on 2.4+")
+ }
+ session, err := mgo.Dial("localhost:40002")
+ c.Assert(err, IsNil)
+ defer session.Close()
+
+ admindb := session.DB("admin")
+ err = admindb.Login("root", "rapadura")
+ c.Assert(err, IsNil)
+
+ mydb := session.DB("mydb")
+
+ ruser := &mgo.User{
+ Username: "myruser",
+ Password: "mypass",
+ Roles: []mgo.Role{mgo.RoleRead},
+ }
+ rwuser := &mgo.User{
+ Username: "myrwuser",
+ Password: "mypass",
+ Roles: []mgo.Role{mgo.RoleReadWrite},
+ }
+
+ err = mydb.UpsertUser(ruser)
+ c.Assert(err, IsNil)
+ err = mydb.UpsertUser(rwuser)
+ c.Assert(err, IsNil)
+
+ err = mydb.Login("myruser", "mypass")
+ c.Assert(err, IsNil)
+
+ admindb.Logout()
+
+ coll := session.DB("mydb").C("mycoll")
+ err = coll.Insert(M{"n": 1})
+ c.Assert(err, ErrorMatches, "unauthorized|not authorized .*")
+
+ err = mydb.Login("myrwuser", "mypass")
+ c.Assert(err, IsNil)
+
+ err = coll.Insert(M{"n": 1})
+ c.Assert(err, IsNil)
+
+ myotherdb := session.DB("myotherdb")
+
+ err = admindb.Login("root", "rapadura")
+ c.Assert(err, IsNil)
+
+ // Test UserSource.
+ rwuserother := &mgo.User{
+ Username: "myrwuser",
+ UserSource: "mydb",
+ Roles: []mgo.Role{mgo.RoleRead},
+ }
+
+ err = myotherdb.UpsertUser(rwuserother)
+ if s.versionAtLeast(2, 6) {
+ c.Assert(err, ErrorMatches, `MongoDB 2.6\+ does not support the UserSource setting`)
+ return
+ }
+ c.Assert(err, IsNil)
+
+ admindb.Logout()
+
+ // Test indirection via UserSource: we can't write to it, because
+ // the roles for myrwuser are different there.
+ othercoll := myotherdb.C("myothercoll")
+ err = othercoll.Insert(M{"n": 1})
+ c.Assert(err, ErrorMatches, "unauthorized|not authorized .*")
+
+ // Reading works, though.
+ err = othercoll.Find(nil).One(nil)
+ c.Assert(err, Equals, mgo.ErrNotFound)
+
+ // Can't login directly into the database using UserSource, though.
+ err = myotherdb.Login("myrwuser", "mypass")
+ c.Assert(err, ErrorMatches, "auth fail(s|ed)|.*Authentication failed.")
+}
+
+func (s *S) TestAuthUpsertUserOtherDBRoles(c *C) {
+ if !s.versionAtLeast(2, 4) {
+ c.Skip("UpsertUser only works on 2.4+")
+ }
+ session, err := mgo.Dial("localhost:40002")
+ c.Assert(err, IsNil)
+ defer session.Close()
+
+ admindb := session.DB("admin")
+ err = admindb.Login("root", "rapadura")
+ c.Assert(err, IsNil)
+
+ ruser := &mgo.User{
+ Username: "myruser",
+ Password: "mypass",
+ OtherDBRoles: map[string][]mgo.Role{"mydb": []mgo.Role{mgo.RoleRead}},
+ }
+
+ err = admindb.UpsertUser(ruser)
+ c.Assert(err, IsNil)
+ defer admindb.RemoveUser("myruser")
+
+ admindb.Logout()
+ err = admindb.Login("myruser", "mypass")
+
+ coll := session.DB("mydb").C("mycoll")
+ err = coll.Insert(M{"n": 1})
+ c.Assert(err, ErrorMatches, "unauthorized|not authorized .*")
+
+ err = coll.Find(nil).One(nil)
+ c.Assert(err, Equals, mgo.ErrNotFound)
+}
+
+func (s *S) TestAuthUpsertUserUpdates(c *C) {
+ if !s.versionAtLeast(2, 4) {
+ c.Skip("UpsertUser only works on 2.4+")
+ }
+ session, err := mgo.Dial("localhost:40002")
+ c.Assert(err, IsNil)
+ defer session.Close()
+
+ admindb := session.DB("admin")
+ err = admindb.Login("root", "rapadura")
+ c.Assert(err, IsNil)
+
+ mydb := session.DB("mydb")
+
+ // Insert a user that can read.
+ user := &mgo.User{
+ Username: "myruser",
+ Password: "mypass",
+ Roles: []mgo.Role{mgo.RoleRead},
+ }
+ err = mydb.UpsertUser(user)
+ c.Assert(err, IsNil)
+
+ // Now update the user password.
+ user = &mgo.User{
+ Username: "myruser",
+ Password: "mynewpass",
+ }
+ err = mydb.UpsertUser(user)
+ c.Assert(err, IsNil)
+
+ // Login with the new user.
+ usession, err := mgo.Dial("myruser:mynewpass@localhost:40002/mydb")
+ c.Assert(err, IsNil)
+ defer usession.Close()
+
+ // Can read, but not write.
+ err = usession.DB("mydb").C("mycoll").Find(nil).One(nil)
+ c.Assert(err, Equals, mgo.ErrNotFound)
+ err = usession.DB("mydb").C("mycoll").Insert(M{"ok": 1})
+ c.Assert(err, ErrorMatches, "unauthorized|not authorized .*")
+
+ // Update the user role.
+ user = &mgo.User{
+ Username: "myruser",
+ Roles: []mgo.Role{mgo.RoleReadWrite},
+ }
+ err = mydb.UpsertUser(user)
+ c.Assert(err, IsNil)
+
+ // Dial again to ensure the password hasn't changed.
+ usession, err = mgo.Dial("myruser:mynewpass@localhost:40002/mydb")
+ c.Assert(err, IsNil)
+ defer usession.Close()
+
+ // Now it can write.
+ err = usession.DB("mydb").C("mycoll").Insert(M{"ok": 1})
+ c.Assert(err, IsNil)
+}
+
+func (s *S) TestAuthAddUser(c *C) {
+ session, err := mgo.Dial("localhost:40002")
+ c.Assert(err, IsNil)
+ defer session.Close()
+
+ admindb := session.DB("admin")
+ err = admindb.Login("root", "rapadura")
+ c.Assert(err, IsNil)
+
+ mydb := session.DB("mydb")
+ err = mydb.AddUser("myruser", "mypass", true)
+ c.Assert(err, IsNil)
+ err = mydb.AddUser("mywuser", "mypass", false)
+ c.Assert(err, IsNil)
+
+ err = mydb.Login("myruser", "mypass")
+ c.Assert(err, IsNil)
+
+ admindb.Logout()
+
+ coll := session.DB("mydb").C("mycoll")
+ err = coll.Insert(M{"n": 1})
+ c.Assert(err, ErrorMatches, "unauthorized|not authorized .*")
+
+ err = mydb.Login("mywuser", "mypass")
+ c.Assert(err, IsNil)
+
+ err = coll.Insert(M{"n": 1})
+ c.Assert(err, IsNil)
+}
+
+func (s *S) TestAuthAddUserReplaces(c *C) {
+ session, err := mgo.Dial("localhost:40002")
+ c.Assert(err, IsNil)
+ defer session.Close()
+
+ admindb := session.DB("admin")
+ err = admindb.Login("root", "rapadura")
+ c.Assert(err, IsNil)
+
+ mydb := session.DB("mydb")
+ err = mydb.AddUser("myuser", "myoldpass", false)
+ c.Assert(err, IsNil)
+ err = mydb.AddUser("myuser", "mynewpass", true)
+ c.Assert(err, IsNil)
+
+ admindb.Logout()
+
+ err = mydb.Login("myuser", "myoldpass")
+ c.Assert(err, ErrorMatches, "auth fail(s|ed)|.*Authentication failed.")
+ err = mydb.Login("myuser", "mynewpass")
+ c.Assert(err, IsNil)
+
+ // ReadOnly flag was changed too.
+ err = mydb.C("mycoll").Insert(M{"n": 1})
+ c.Assert(err, ErrorMatches, "unauthorized|not authorized .*")
+}
+
+func (s *S) TestAuthRemoveUser(c *C) {
+ session, err := mgo.Dial("localhost:40002")
+ c.Assert(err, IsNil)
+ defer session.Close()
+
+ admindb := session.DB("admin")
+ err = admindb.Login("root", "rapadura")
+ c.Assert(err, IsNil)
+
+ mydb := session.DB("mydb")
+ err = mydb.AddUser("myuser", "mypass", true)
+ c.Assert(err, IsNil)
+ err = mydb.RemoveUser("myuser")
+ c.Assert(err, IsNil)
+ err = mydb.RemoveUser("myuser")
+ c.Assert(err, Equals, mgo.ErrNotFound)
+
+ err = mydb.Login("myuser", "mypass")
+ c.Assert(err, ErrorMatches, "auth fail(s|ed)|.*Authentication failed.")
+}
+
+func (s *S) TestAuthLoginTwiceDoesNothing(c *C) {
+ session, err := mgo.Dial("localhost:40002")
+ c.Assert(err, IsNil)
+ defer session.Close()
+
+ admindb := session.DB("admin")
+ err = admindb.Login("root", "rapadura")
+ c.Assert(err, IsNil)
+
+ oldStats := mgo.GetStats()
+
+ err = admindb.Login("root", "rapadura")
+ c.Assert(err, IsNil)
+
+ newStats := mgo.GetStats()
+ c.Assert(newStats.SentOps, Equals, oldStats.SentOps)
+}
+
+func (s *S) TestAuthLoginLogoutLoginDoesNothing(c *C) {
+ session, err := mgo.Dial("localhost:40002")
+ c.Assert(err, IsNil)
+ defer session.Close()
+
+ admindb := session.DB("admin")
+ err = admindb.Login("root", "rapadura")
+ c.Assert(err, IsNil)
+
+ oldStats := mgo.GetStats()
+
+ admindb.Logout()
+ err = admindb.Login("root", "rapadura")
+ c.Assert(err, IsNil)
+
+ newStats := mgo.GetStats()
+ c.Assert(newStats.SentOps, Equals, oldStats.SentOps)
+}
+
+func (s *S) TestAuthLoginSwitchUser(c *C) {
+ session, err := mgo.Dial("localhost:40002")
+ c.Assert(err, IsNil)
+ defer session.Close()
+
+ admindb := session.DB("admin")
+ err = admindb.Login("root", "rapadura")
+ c.Assert(err, IsNil)
+
+ coll := session.DB("mydb").C("mycoll")
+ err = coll.Insert(M{"n": 1})
+ c.Assert(err, IsNil)
+
+ err = admindb.Login("reader", "rapadura")
+ c.Assert(err, IsNil)
+
+ // Can't write.
+ err = coll.Insert(M{"n": 1})
+ c.Assert(err, ErrorMatches, "unauthorized|not authorized .*")
+
+ // But can read.
+ result := struct{ N int }{}
+ err = coll.Find(nil).One(&result)
+ c.Assert(err, IsNil)
+ c.Assert(result.N, Equals, 1)
+}
+
+func (s *S) TestAuthLoginChangePassword(c *C) {
+ session, err := mgo.Dial("localhost:40002")
+ c.Assert(err, IsNil)
+ defer session.Close()
+
+ admindb := session.DB("admin")
+ err = admindb.Login("root", "rapadura")
+ c.Assert(err, IsNil)
+
+ mydb := session.DB("mydb")
+ err = mydb.AddUser("myuser", "myoldpass", false)
+ c.Assert(err, IsNil)
+
+ err = mydb.Login("myuser", "myoldpass")
+ c.Assert(err, IsNil)
+
+ err = mydb.AddUser("myuser", "mynewpass", true)
+ c.Assert(err, IsNil)
+
+ err = mydb.Login("myuser", "mynewpass")
+ c.Assert(err, IsNil)
+
+ admindb.Logout()
+
+ // The second login must be in effect, which means read-only.
+ err = mydb.C("mycoll").Insert(M{"n": 1})
+ c.Assert(err, ErrorMatches, "unauthorized|not authorized .*")
+}
+
+func (s *S) TestAuthLoginCachingWithSessionRefresh(c *C) {
+ session, err := mgo.Dial("localhost:40002")
+ c.Assert(err, IsNil)
+ defer session.Close()
+
+ admindb := session.DB("admin")
+ err = admindb.Login("root", "rapadura")
+ c.Assert(err, IsNil)
+
+ session.Refresh()
+
+ coll := session.DB("mydb").C("mycoll")
+ err = coll.Insert(M{"n": 1})
+ c.Assert(err, IsNil)
+}
+
+func (s *S) TestAuthLoginCachingWithSessionCopy(c *C) {
+ session, err := mgo.Dial("localhost:40002")
+ c.Assert(err, IsNil)
+ defer session.Close()
+
+ admindb := session.DB("admin")
+ err = admindb.Login("root", "rapadura")
+ c.Assert(err, IsNil)
+
+ session = session.Copy()
+ defer session.Close()
+
+ coll := session.DB("mydb").C("mycoll")
+ err = coll.Insert(M{"n": 1})
+ c.Assert(err, IsNil)
+}
+
+func (s *S) TestAuthLoginCachingWithSessionClone(c *C) {
+ session, err := mgo.Dial("localhost:40002")
+ c.Assert(err, IsNil)
+ defer session.Close()
+
+ admindb := session.DB("admin")
+ err = admindb.Login("root", "rapadura")
+ c.Assert(err, IsNil)
+
+ session = session.Clone()
+ defer session.Close()
+
+ coll := session.DB("mydb").C("mycoll")
+ err = coll.Insert(M{"n": 1})
+ c.Assert(err, IsNil)
+}
+
+func (s *S) TestAuthLoginCachingWithNewSession(c *C) {
+ session, err := mgo.Dial("localhost:40002")
+ c.Assert(err, IsNil)
+ defer session.Close()
+
+ admindb := session.DB("admin")
+ err = admindb.Login("root", "rapadura")
+ c.Assert(err, IsNil)
+
+ session = session.New()
+ defer session.Close()
+
+ coll := session.DB("mydb").C("mycoll")
+ err = coll.Insert(M{"n": 1})
+ c.Assert(err, ErrorMatches, "unauthorized|need to login|not authorized .*")
+}
+
+func (s *S) TestAuthLoginCachingAcrossPool(c *C) {
+ // Logins are cached even when the conenction goes back
+ // into the pool.
+
+ session, err := mgo.Dial("localhost:40002")
+ c.Assert(err, IsNil)
+ defer session.Close()
+
+ admindb := session.DB("admin")
+ err = admindb.Login("root", "rapadura")
+ c.Assert(err, IsNil)
+
+ // Add another user to test the logout case at the same time.
+ mydb := session.DB("mydb")
+ err = mydb.AddUser("myuser", "mypass", false)
+ c.Assert(err, IsNil)
+
+ err = mydb.Login("myuser", "mypass")
+ c.Assert(err, IsNil)
+
+ // Logout root explicitly, to test both cases.
+ admindb.Logout()
+
+ // Give socket back to pool.
+ session.Refresh()
+
+ // Brand new session, should use socket from the pool.
+ other := session.New()
+ defer other.Close()
+
+ oldStats := mgo.GetStats()
+
+ err = other.DB("admin").Login("root", "rapadura")
+ c.Assert(err, IsNil)
+ err = other.DB("mydb").Login("myuser", "mypass")
+ c.Assert(err, IsNil)
+
+ // Both logins were cached, so no ops.
+ newStats := mgo.GetStats()
+ c.Assert(newStats.SentOps, Equals, oldStats.SentOps)
+
+ // And they actually worked.
+ err = other.DB("mydb").C("mycoll").Insert(M{"n": 1})
+ c.Assert(err, IsNil)
+
+ other.DB("admin").Logout()
+
+ err = other.DB("mydb").C("mycoll").Insert(M{"n": 1})
+ c.Assert(err, IsNil)
+}
+
+func (s *S) TestAuthLoginCachingAcrossPoolWithLogout(c *C) {
+ // Now verify that logouts are properly flushed if they
+ // are not revalidated after leaving the pool.
+
+ session, err := mgo.Dial("localhost:40002")
+ c.Assert(err, IsNil)
+ defer session.Close()
+
+ admindb := session.DB("admin")
+ err = admindb.Login("root", "rapadura")
+ c.Assert(err, IsNil)
+
+ // Add another user to test the logout case at the same time.
+ mydb := session.DB("mydb")
+ err = mydb.AddUser("myuser", "mypass", true)
+ c.Assert(err, IsNil)
+
+ err = mydb.Login("myuser", "mypass")
+ c.Assert(err, IsNil)
+
+ // Just some data to query later.
+ err = session.DB("mydb").C("mycoll").Insert(M{"n": 1})
+ c.Assert(err, IsNil)
+
+ // Give socket back to pool.
+ session.Refresh()
+
+ // Brand new session, should use socket from the pool.
+ other := session.New()
+ defer other.Close()
+
+ oldStats := mgo.GetStats()
+
+ err = other.DB("mydb").Login("myuser", "mypass")
+ c.Assert(err, IsNil)
+
+ // Login was cached, so no ops.
+ newStats := mgo.GetStats()
+ c.Assert(newStats.SentOps, Equals, oldStats.SentOps)
+
+ // Can't write, since root has been implicitly logged out
+ // when the collection went into the pool, and not revalidated.
+ err = other.DB("mydb").C("mycoll").Insert(M{"n": 1})
+ c.Assert(err, ErrorMatches, "unauthorized|not authorized .*")
+
+ // But can read due to the revalidated myuser login.
+ result := struct{ N int }{}
+ err = other.DB("mydb").C("mycoll").Find(nil).One(&result)
+ c.Assert(err, IsNil)
+ c.Assert(result.N, Equals, 1)
+}
+
+func (s *S) TestAuthEventual(c *C) {
+ // Eventual sessions don't keep sockets around, so they are
+ // an interesting test case.
+ session, err := mgo.Dial("localhost:40002")
+ c.Assert(err, IsNil)
+ defer session.Close()
+
+ admindb := session.DB("admin")
+ err = admindb.Login("root", "rapadura")
+ c.Assert(err, IsNil)
+
+ err = session.DB("mydb").C("mycoll").Insert(M{"n": 1})
+ c.Assert(err, IsNil)
+
+ var wg sync.WaitGroup
+ wg.Add(20)
+
+ for i := 0; i != 10; i++ {
+ go func() {
+ defer wg.Done()
+ var result struct{ N int }
+ err := session.DB("mydb").C("mycoll").Find(nil).One(&result)
+ c.Assert(err, IsNil)
+ c.Assert(result.N, Equals, 1)
+ }()
+ }
+
+ for i := 0; i != 10; i++ {
+ go func() {
+ defer wg.Done()
+ err := session.DB("mydb").C("mycoll").Insert(M{"n": 1})
+ c.Assert(err, IsNil)
+ }()
+ }
+
+ wg.Wait()
+}
+
+func (s *S) TestAuthURL(c *C) {
+ session, err := mgo.Dial("mongodb://root:rapadura@localhost:40002/")
+ c.Assert(err, IsNil)
+ defer session.Close()
+
+ err = session.DB("mydb").C("mycoll").Insert(M{"n": 1})
+ c.Assert(err, IsNil)
+}
+
+func (s *S) TestAuthURLWrongCredentials(c *C) {
+ session, err := mgo.Dial("mongodb://root:wrong@localhost:40002/")
+ if session != nil {
+ session.Close()
+ }
+ c.Assert(err, ErrorMatches, "auth fail(s|ed)|.*Authentication failed.")
+ c.Assert(session, IsNil)
+}
+
+func (s *S) TestAuthURLWithNewSession(c *C) {
+ // When authentication is in the URL, the new session will
+ // actually carry it on as well, even if logged out explicitly.
+ session, err := mgo.Dial("mongodb://root:rapadura@localhost:40002/")
+ c.Assert(err, IsNil)
+ defer session.Close()
+
+ session.DB("admin").Logout()
+
+ // Do it twice to ensure it passes the needed data on.
+ session = session.New()
+ defer session.Close()
+ session = session.New()
+ defer session.Close()
+
+ err = session.DB("mydb").C("mycoll").Insert(M{"n": 1})
+ c.Assert(err, IsNil)
+}
+
+func (s *S) TestAuthURLWithDatabase(c *C) {
+ session, err := mgo.Dial("mongodb://root:rapadura@localhost:40002")
+ c.Assert(err, IsNil)
+ defer session.Close()
+
+ mydb := session.DB("mydb")
+ err = mydb.AddUser("myruser", "mypass", true)
+ c.Assert(err, IsNil)
+
+ // Test once with database, and once with source.
+ for i := 0; i < 2; i++ {
+ var url string
+ if i == 0 {
+ url = "mongodb://myruser:mypass@localhost:40002/mydb"
+ } else {
+ url = "mongodb://myruser:mypass@localhost:40002/admin?authSource=mydb"
+ }
+ usession, err := mgo.Dial(url)
+ c.Assert(err, IsNil)
+ defer usession.Close()
+
+ ucoll := usession.DB("mydb").C("mycoll")
+ err = ucoll.FindId(0).One(nil)
+ c.Assert(err, Equals, mgo.ErrNotFound)
+ err = ucoll.Insert(M{"n": 1})
+ c.Assert(err, ErrorMatches, "unauthorized|not authorized .*")
+ }
+}
+
+func (s *S) TestDefaultDatabase(c *C) {
+ tests := []struct{ url, db string }{
+ {"mongodb://root:rapadura@localhost:40002", "test"},
+ {"mongodb://root:rapadura@localhost:40002/admin", "admin"},
+ {"mongodb://localhost:40001", "test"},
+ {"mongodb://localhost:40001/", "test"},
+ {"mongodb://localhost:40001/mydb", "mydb"},
+ }
+
+ for _, test := range tests {
+ session, err := mgo.Dial(test.url)
+ c.Assert(err, IsNil)
+ defer session.Close()
+
+ c.Logf("test: %#v", test)
+ c.Assert(session.DB("").Name, Equals, test.db)
+
+ scopy := session.Copy()
+ c.Check(scopy.DB("").Name, Equals, test.db)
+ scopy.Close()
+ }
+}
+
+func (s *S) TestAuthDirect(c *C) {
+ // Direct connections must work to the master and slaves.
+ for _, port := range []string{"40031", "40032", "40033"} {
+ url := fmt.Sprintf("mongodb://root:rapadura@localhost:%s/?connect=direct", port)
+ session, err := mgo.Dial(url)
+ c.Assert(err, IsNil)
+ defer session.Close()
+
+ session.SetMode(mgo.Monotonic, true)
+
+ var result struct{}
+ err = session.DB("mydb").C("mycoll").Find(nil).One(&result)
+ c.Assert(err, Equals, mgo.ErrNotFound)
+ }
+}
+
+func (s *S) TestAuthDirectWithLogin(c *C) {
+ // Direct connections must work to the master and slaves.
+ for _, port := range []string{"40031", "40032", "40033"} {
+ url := fmt.Sprintf("mongodb://localhost:%s/?connect=direct", port)
+ session, err := mgo.Dial(url)
+ c.Assert(err, IsNil)
+ defer session.Close()
+
+ session.SetMode(mgo.Monotonic, true)
+ session.SetSyncTimeout(3 * time.Second)
+
+ err = session.DB("admin").Login("root", "rapadura")
+ c.Assert(err, IsNil)
+
+ var result struct{}
+ err = session.DB("mydb").C("mycoll").Find(nil).One(&result)
+ c.Assert(err, Equals, mgo.ErrNotFound)
+ }
+}
+
+func (s *S) TestAuthScramSha1Cred(c *C) {
+ if !s.versionAtLeast(2, 7, 7) {
+ c.Skip("SCRAM-SHA-1 tests depend on 2.7.7")
+ }
+ cred := &mgo.Credential{
+ Username: "root",
+ Password: "rapadura",
+ Mechanism: "SCRAM-SHA-1",
+ Source: "admin",
+ }
+ host := "localhost:40002"
+ c.Logf("Connecting to %s...", host)
+ session, err := mgo.Dial(host)
+ c.Assert(err, IsNil)
+ defer session.Close()
+
+ mycoll := session.DB("admin").C("mycoll")
+
+ c.Logf("Connected! Testing the need for authentication...")
+ err = mycoll.Find(nil).One(nil)
+ c.Assert(err, ErrorMatches, "unauthorized|not authorized .*")
+
+ c.Logf("Authenticating...")
+ err = session.Login(cred)
+ c.Assert(err, IsNil)
+ c.Logf("Authenticated!")
+
+ c.Logf("Connected! Testing the need for authentication...")
+ err = mycoll.Find(nil).One(nil)
+ c.Assert(err, Equals, mgo.ErrNotFound)
+}
+
+func (s *S) TestAuthScramSha1URL(c *C) {
+ if !s.versionAtLeast(2, 7, 7) {
+ c.Skip("SCRAM-SHA-1 tests depend on 2.7.7")
+ }
+ host := "localhost:40002"
+ c.Logf("Connecting to %s...", host)
+ session, err := mgo.Dial(fmt.Sprintf("root:rapadura@%s?authMechanism=SCRAM-SHA-1", host))
+ c.Assert(err, IsNil)
+ defer session.Close()
+
+ mycoll := session.DB("admin").C("mycoll")
+
+ c.Logf("Connected! Testing the need for authentication...")
+ err = mycoll.Find(nil).One(nil)
+ c.Assert(err, Equals, mgo.ErrNotFound)
+}
+
+func (s *S) TestAuthX509Cred(c *C) {
+ session, err := mgo.Dial("localhost:40001")
+ c.Assert(err, IsNil)
+ defer session.Close()
+ binfo, err := session.BuildInfo()
+ c.Assert(err, IsNil)
+ if binfo.OpenSSLVersion == "" {
+ c.Skip("server does not support SSL")
+ }
+
+ clientCertPEM, err := ioutil.ReadFile("harness/certs/client.pem")
+ c.Assert(err, IsNil)
+
+ clientCert, err := tls.X509KeyPair(clientCertPEM, clientCertPEM)
+ c.Assert(err, IsNil)
+
+ tlsConfig := &tls.Config{
+ // Isolating tests to client certs, don't care about server validation.
+ InsecureSkipVerify: true,
+ Certificates: []tls.Certificate{clientCert},
+ }
+
+ var host = "localhost:40003"
+ c.Logf("Connecting to %s...", host)
+ session, err = mgo.DialWithInfo(&mgo.DialInfo{
+ Addrs: []string{host},
+ DialServer: func(addr *mgo.ServerAddr) (net.Conn, error) {
+ return tls.Dial("tcp", addr.String(), tlsConfig)
+ },
+ })
+ c.Assert(err, IsNil)
+ defer session.Close()
+
+ err = session.Login(&mgo.Credential{Username: "root", Password: "rapadura"})
+ c.Assert(err, IsNil)
+
+ // This needs to be kept in sync with client.pem
+ x509Subject := "CN=localhost,OU=Client,O=MGO,L=MGO,ST=MGO,C=GO"
+
+ externalDB := session.DB("$external")
+ var x509User mgo.User = mgo.User{
+ Username: x509Subject,
+ OtherDBRoles: map[string][]mgo.Role{"admin": []mgo.Role{mgo.RoleRoot}},
+ }
+ err = externalDB.UpsertUser(&x509User)
+ c.Assert(err, IsNil)
+
+ session.LogoutAll()
+
+ c.Logf("Connected! Ensuring authentication is required...")
+ names, err := session.DatabaseNames()
+ c.Assert(err, ErrorMatches, "not authorized .*")
+
+ cred := &mgo.Credential{
+ Username: x509Subject,
+ Mechanism: "MONGODB-X509",
+ Source: "$external",
+ }
+
+ c.Logf("Authenticating...")
+ err = session.Login(cred)
+ c.Assert(err, IsNil)
+ c.Logf("Authenticated!")
+
+ names, err = session.DatabaseNames()
+ c.Assert(err, IsNil)
+ c.Assert(len(names) > 0, Equals, true)
+}
+
+var (
+ plainFlag = flag.String("plain", "", "Host to test PLAIN authentication against (depends on custom environment)")
+ plainUser = "einstein"
+ plainPass = "password"
+)
+
+func (s *S) TestAuthPlainCred(c *C) {
+ if *plainFlag == "" {
+ c.Skip("no -plain")
+ }
+ cred := &mgo.Credential{
+ Username: plainUser,
+ Password: plainPass,
+ Source: "$external",
+ Mechanism: "PLAIN",
+ }
+ c.Logf("Connecting to %s...", *plainFlag)
+ session, err := mgo.Dial(*plainFlag)
+ c.Assert(err, IsNil)
+ defer session.Close()
+
+ records := session.DB("records").C("records")
+
+ c.Logf("Connected! Testing the need for authentication...")
+ err = records.Find(nil).One(nil)
+ c.Assert(err, ErrorMatches, "unauthorized|not authorized .*")
+
+ c.Logf("Authenticating...")
+ err = session.Login(cred)
+ c.Assert(err, IsNil)
+ c.Logf("Authenticated!")
+
+ c.Logf("Connected! Testing the need for authentication...")
+ err = records.Find(nil).One(nil)
+ c.Assert(err, Equals, mgo.ErrNotFound)
+}
+
+func (s *S) TestAuthPlainURL(c *C) {
+ if *plainFlag == "" {
+ c.Skip("no -plain")
+ }
+ c.Logf("Connecting to %s...", *plainFlag)
+ session, err := mgo.Dial(fmt.Sprintf("%s:%s@%s?authMechanism=PLAIN", url.QueryEscape(plainUser), url.QueryEscape(plainPass), *plainFlag))
+ c.Assert(err, IsNil)
+ defer session.Close()
+
+ c.Logf("Connected! Testing the need for authentication...")
+ err = session.DB("records").C("records").Find(nil).One(nil)
+ c.Assert(err, Equals, mgo.ErrNotFound)
+}
+
+var (
+ kerberosFlag = flag.Bool("kerberos", false, "Test Kerberos authentication (depends on custom environment)")
+ kerberosHost = "ldaptest.10gen.cc"
+ kerberosUser = "drivers@LDAPTEST.10GEN.CC"
+
+ winKerberosPasswordEnv = "MGO_KERBEROS_PASSWORD"
+)
+
+// Kerberos has its own suite because it talks to a remote server
+// that is prepared to authenticate against a kerberos deployment.
+type KerberosSuite struct{}
+
+var _ = Suite(&KerberosSuite{})
+
+func (kerberosSuite *KerberosSuite) SetUpSuite(c *C) {
+ mgo.SetDebug(true)
+ mgo.SetStats(true)
+}
+
+func (kerberosSuite *KerberosSuite) TearDownSuite(c *C) {
+ mgo.SetDebug(false)
+ mgo.SetStats(false)
+}
+
+func (kerberosSuite *KerberosSuite) SetUpTest(c *C) {
+ mgo.SetLogger((*cLogger)(c))
+ mgo.ResetStats()
+}
+
+func (kerberosSuite *KerberosSuite) TearDownTest(c *C) {
+ mgo.SetLogger(nil)
+}
+
+func (kerberosSuite *KerberosSuite) TestAuthKerberosCred(c *C) {
+ if !*kerberosFlag {
+ c.Skip("no -kerberos")
+ }
+ cred := &mgo.Credential{
+ Username: kerberosUser,
+ Mechanism: "GSSAPI",
+ }
+ windowsAppendPasswordToCredential(cred)
+ c.Logf("Connecting to %s...", kerberosHost)
+ session, err := mgo.Dial(kerberosHost)
+ c.Assert(err, IsNil)
+ defer session.Close()
+
+ c.Logf("Connected! Testing the need for authentication...")
+ n, err := session.DB("kerberos").C("test").Find(M{}).Count()
+ c.Assert(err, ErrorMatches, ".*authorized.*")
+
+ c.Logf("Authenticating...")
+ err = session.Login(cred)
+ c.Assert(err, IsNil)
+ c.Logf("Authenticated!")
+
+ n, err = session.DB("kerberos").C("test").Find(M{}).Count()
+ c.Assert(err, IsNil)
+ c.Assert(n, Equals, 1)
+}
+
+func (kerberosSuite *KerberosSuite) TestAuthKerberosURL(c *C) {
+ if !*kerberosFlag {
+ c.Skip("no -kerberos")
+ }
+ c.Logf("Connecting to %s...", kerberosHost)
+ connectUri := url.QueryEscape(kerberosUser) + "@" + kerberosHost + "?authMechanism=GSSAPI"
+ if runtime.GOOS == "windows" {
+ connectUri = url.QueryEscape(kerberosUser) + ":" + url.QueryEscape(getWindowsKerberosPassword()) + "@" + kerberosHost + "?authMechanism=GSSAPI"
+ }
+ session, err := mgo.Dial(connectUri)
+ c.Assert(err, IsNil)
+ defer session.Close()
+ n, err := session.DB("kerberos").C("test").Find(M{}).Count()
+ c.Assert(err, IsNil)
+ c.Assert(n, Equals, 1)
+}
+
+func (kerberosSuite *KerberosSuite) TestAuthKerberosServiceName(c *C) {
+ if !*kerberosFlag {
+ c.Skip("no -kerberos")
+ }
+
+ wrongServiceName := "wrong"
+ rightServiceName := "mongodb"
+
+ cred := &mgo.Credential{
+ Username: kerberosUser,
+ Mechanism: "GSSAPI",
+ Service: wrongServiceName,
+ }
+ windowsAppendPasswordToCredential(cred)
+
+ c.Logf("Connecting to %s...", kerberosHost)
+ session, err := mgo.Dial(kerberosHost)
+ c.Assert(err, IsNil)
+ defer session.Close()
+
+ c.Logf("Authenticating with incorrect service name...")
+ err = session.Login(cred)
+ c.Assert(err, ErrorMatches, ".*@LDAPTEST.10GEN.CC not found.*")
+
+ cred.Service = rightServiceName
+ c.Logf("Authenticating with correct service name...")
+ err = session.Login(cred)
+ c.Assert(err, IsNil)
+ c.Logf("Authenticated!")
+
+ n, err := session.DB("kerberos").C("test").Find(M{}).Count()
+ c.Assert(err, IsNil)
+ c.Assert(n, Equals, 1)
+}
+
+func (kerberosSuite *KerberosSuite) TestAuthKerberosServiceHost(c *C) {
+ if !*kerberosFlag {
+ c.Skip("no -kerberos")
+ }
+
+ wrongServiceHost := "eggs.bacon.tk"
+ rightServiceHost := kerberosHost
+
+ cred := &mgo.Credential{
+ Username: kerberosUser,
+ Mechanism: "GSSAPI",
+ ServiceHost: wrongServiceHost,
+ }
+ windowsAppendPasswordToCredential(cred)
+
+ c.Logf("Connecting to %s...", kerberosHost)
+ session, err := mgo.Dial(kerberosHost)
+ c.Assert(err, IsNil)
+ defer session.Close()
+
+ c.Logf("Authenticating with incorrect service host...")
+ err = session.Login(cred)
+ c.Assert(err, ErrorMatches, ".*@LDAPTEST.10GEN.CC not found.*")
+
+ cred.ServiceHost = rightServiceHost
+ c.Logf("Authenticating with correct service host...")
+ err = session.Login(cred)
+ c.Assert(err, IsNil)
+ c.Logf("Authenticated!")
+
+ n, err := session.DB("kerberos").C("test").Find(M{}).Count()
+ c.Assert(err, IsNil)
+ c.Assert(n, Equals, 1)
+}
+
+// No kinit on SSPI-style Kerberos, so we need to provide a password. In order
+// to avoid inlining password, require it to be set as an environment variable,
+// for instance: `SET MGO_KERBEROS_PASSWORD=this_isnt_the_password`
+func getWindowsKerberosPassword() string {
+ pw := os.Getenv(winKerberosPasswordEnv)
+ if pw == "" {
+ panic(fmt.Sprintf("Need to set %v environment variable to run Kerberos tests on Windows", winKerberosPasswordEnv))
+ }
+ return pw
+}
+
+func windowsAppendPasswordToCredential(cred *mgo.Credential) {
+ if runtime.GOOS == "windows" {
+ cred.Password = getWindowsKerberosPassword()
+ }
+}
diff --git a/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/bson/LICENSE b/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/bson/LICENSE
new file mode 100644
index 00000000000..890326017b8
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/bson/LICENSE
@@ -0,0 +1,25 @@
+BSON library for Go
+
+Copyright (c) 2010-2012 - Gustavo Niemeyer <gustavo@niemeyer.net>
+
+All rights reserved.
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions are met:
+
+1. Redistributions of source code must retain the above copyright notice, this
+ list of conditions and the following disclaimer.
+2. Redistributions in binary form must reproduce the above copyright notice,
+ this list of conditions and the following disclaimer in the documentation
+ and/or other materials provided with the distribution.
+
+THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
+ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
+WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
+ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
+(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
+LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
+ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
+SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
diff --git a/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/bson/bson.go b/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/bson/bson.go
new file mode 100644
index 00000000000..7fb7f8cae48
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/bson/bson.go
@@ -0,0 +1,738 @@
+// BSON library for Go
+//
+// Copyright (c) 2010-2012 - Gustavo Niemeyer <gustavo@niemeyer.net>
+//
+// All rights reserved.
+//
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are met:
+//
+// 1. Redistributions of source code must retain the above copyright notice, this
+// list of conditions and the following disclaimer.
+// 2. Redistributions in binary form must reproduce the above copyright notice,
+// this list of conditions and the following disclaimer in the documentation
+// and/or other materials provided with the distribution.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
+// ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
+// WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+// DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
+// ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
+// (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
+// LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
+// ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
+// SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// Package bson is an implementation of the BSON specification for Go:
+//
+// http://bsonspec.org
+//
+// It was created as part of the mgo MongoDB driver for Go, but is standalone
+// and may be used on its own without the driver.
+package bson
+
+import (
+ "bytes"
+ "crypto/md5"
+ "crypto/rand"
+ "encoding/binary"
+ "encoding/hex"
+ "encoding/json"
+ "errors"
+ "fmt"
+ "io"
+ "os"
+ "reflect"
+ "runtime"
+ "strings"
+ "sync"
+ "sync/atomic"
+ "time"
+)
+
+// --------------------------------------------------------------------------
+// The public API.
+
+// A value implementing the bson.Getter interface will have its GetBSON
+// method called when the given value has to be marshalled, and the result
+// of this method will be marshaled in place of the actual object.
+//
+// If GetBSON returns return a non-nil error, the marshalling procedure
+// will stop and error out with the provided value.
+type Getter interface {
+ GetBSON() (interface{}, error)
+}
+
+// A value implementing the bson.Setter interface will receive the BSON
+// value via the SetBSON method during unmarshaling, and the object
+// itself will not be changed as usual.
+//
+// If setting the value works, the method should return nil or alternatively
+// bson.SetZero to set the respective field to its zero value (nil for
+// pointer types). If SetBSON returns a value of type bson.TypeError, the
+// BSON value will be omitted from a map or slice being decoded and the
+// unmarshalling will continue. If it returns any other non-nil error, the
+// unmarshalling procedure will stop and error out with the provided value.
+//
+// This interface is generally useful in pointer receivers, since the method
+// will want to change the receiver. A type field that implements the Setter
+// interface doesn't have to be a pointer, though.
+//
+// Unlike the usual behavior, unmarshalling onto a value that implements a
+// Setter interface will NOT reset the value to its zero state. This allows
+// the value to decide by itself how to be unmarshalled.
+//
+// For example:
+//
+// type MyString string
+//
+// func (s *MyString) SetBSON(raw bson.Raw) error {
+// return raw.Unmarshal(s)
+// }
+//
+type Setter interface {
+ SetBSON(raw Raw) error
+}
+
+// SetZero may be returned from a SetBSON method to have the value set to
+// its respective zero value. When used in pointer values, this will set the
+// field to nil rather than to the pre-allocated value.
+var SetZero = errors.New("set to zero")
+
+// M is a convenient alias for a map[string]interface{} map, useful for
+// dealing with BSON in a native way. For instance:
+//
+// bson.M{"a": 1, "b": true}
+//
+// There's no special handling for this type in addition to what's done anyway
+// for an equivalent map type. Elements in the map will be dumped in an
+// undefined ordered. See also the bson.D type for an ordered alternative.
+type M map[string]interface{}
+
+// D represents a BSON document containing ordered elements. For example:
+//
+// bson.D{{"a", 1}, {"b", true}}
+//
+// In some situations, such as when creating indexes for MongoDB, the order in
+// which the elements are defined is important. If the order is not important,
+// using a map is generally more comfortable. See bson.M and bson.RawD.
+type D []DocElem
+
+// DocElem is an element of the bson.D document representation.
+type DocElem struct {
+ Name string
+ Value interface{}
+}
+
+// Map returns a map out of the ordered element name/value pairs in d.
+func (d D) Map() (m M) {
+ m = make(M, len(d))
+ for _, item := range d {
+ m[item.Name] = item.Value
+ }
+ return m
+}
+
+// The Raw type represents raw unprocessed BSON documents and elements.
+// Kind is the kind of element as defined per the BSON specification, and
+// Data is the raw unprocessed data for the respective element.
+// Using this type it is possible to unmarshal or marshal values partially.
+//
+// Relevant documentation:
+//
+// http://bsonspec.org/#/specification
+//
+type Raw struct {
+ Kind byte
+ Data []byte
+}
+
+// RawD represents a BSON document containing raw unprocessed elements.
+// This low-level representation may be useful when lazily processing
+// documents of uncertain content, or when manipulating the raw content
+// documents in general.
+type RawD []RawDocElem
+
+// See the RawD type.
+type RawDocElem struct {
+ Name string
+ Value Raw
+}
+
+// ObjectId is a unique ID identifying a BSON value. It must be exactly 12 bytes
+// long. MongoDB objects by default have such a property set in their "_id"
+// property.
+//
+// http://www.mongodb.org/display/DOCS/Object+IDs
+type ObjectId string
+
+// ObjectIdHex returns an ObjectId from the provided hex representation.
+// Calling this function with an invalid hex representation will
+// cause a runtime panic. See the IsObjectIdHex function.
+func ObjectIdHex(s string) ObjectId {
+ d, err := hex.DecodeString(s)
+ if err != nil || len(d) != 12 {
+ panic(fmt.Sprintf("invalid input to ObjectIdHex: %q", s))
+ }
+ return ObjectId(d)
+}
+
+// IsObjectIdHex returns whether s is a valid hex representation of
+// an ObjectId. See the ObjectIdHex function.
+func IsObjectIdHex(s string) bool {
+ if len(s) != 24 {
+ return false
+ }
+ _, err := hex.DecodeString(s)
+ return err == nil
+}
+
+// objectIdCounter is atomically incremented when generating a new ObjectId
+// using NewObjectId() function. It's used as a counter part of an id.
+var objectIdCounter uint32 = readRandomUint32()
+
+// readRandomUint32 returns a random objectIdCounter.
+func readRandomUint32() uint32 {
+ var b [4]byte
+ _, err := io.ReadFull(rand.Reader, b[:])
+ if err != nil {
+ panic(fmt.Errorf("cannot read random object id: %v", err))
+ }
+ return uint32((uint32(b[0]) << 0) | (uint32(b[1]) << 8) | (uint32(b[2]) << 16) | (uint32(b[3]) << 24))
+}
+
+// machineId stores machine id generated once and used in subsequent calls
+// to NewObjectId function.
+var machineId = readMachineId()
+var processId = os.Getpid()
+
+// readMachineId generates and returns a machine id.
+// If this function fails to get the hostname it will cause a runtime error.
+func readMachineId() []byte {
+ var sum [3]byte
+ id := sum[:]
+ hostname, err1 := os.Hostname()
+ if err1 != nil {
+ _, err2 := io.ReadFull(rand.Reader, id)
+ if err2 != nil {
+ panic(fmt.Errorf("cannot get hostname: %v; %v", err1, err2))
+ }
+ return id
+ }
+ hw := md5.New()
+ hw.Write([]byte(hostname))
+ copy(id, hw.Sum(nil))
+ return id
+}
+
+// NewObjectId returns a new unique ObjectId.
+func NewObjectId() ObjectId {
+ var b [12]byte
+ // Timestamp, 4 bytes, big endian
+ binary.BigEndian.PutUint32(b[:], uint32(time.Now().Unix()))
+ // Machine, first 3 bytes of md5(hostname)
+ b[4] = machineId[0]
+ b[5] = machineId[1]
+ b[6] = machineId[2]
+ // Pid, 2 bytes, specs don't specify endianness, but we use big endian.
+ b[7] = byte(processId >> 8)
+ b[8] = byte(processId)
+ // Increment, 3 bytes, big endian
+ i := atomic.AddUint32(&objectIdCounter, 1)
+ b[9] = byte(i >> 16)
+ b[10] = byte(i >> 8)
+ b[11] = byte(i)
+ return ObjectId(b[:])
+}
+
+// NewObjectIdWithTime returns a dummy ObjectId with the timestamp part filled
+// with the provided number of seconds from epoch UTC, and all other parts
+// filled with zeroes. It's not safe to insert a document with an id generated
+// by this method, it is useful only for queries to find documents with ids
+// generated before or after the specified timestamp.
+func NewObjectIdWithTime(t time.Time) ObjectId {
+ var b [12]byte
+ binary.BigEndian.PutUint32(b[:4], uint32(t.Unix()))
+ return ObjectId(string(b[:]))
+}
+
+// String returns a hex string representation of the id.
+// Example: ObjectIdHex("4d88e15b60f486e428412dc9").
+func (id ObjectId) String() string {
+ return fmt.Sprintf(`ObjectIdHex("%x")`, string(id))
+}
+
+// Hex returns a hex representation of the ObjectId.
+func (id ObjectId) Hex() string {
+ return hex.EncodeToString([]byte(id))
+}
+
+// MarshalJSON turns a bson.ObjectId into a json.Marshaller.
+func (id ObjectId) MarshalJSON() ([]byte, error) {
+ return []byte(fmt.Sprintf(`"%x"`, string(id))), nil
+}
+
+var nullBytes = []byte("null")
+
+// UnmarshalJSON turns *bson.ObjectId into a json.Unmarshaller.
+func (id *ObjectId) UnmarshalJSON(data []byte) error {
+ if len(data) > 0 && (data[0] == '{' || data[0] == 'O') {
+ var v struct {
+ Id json.RawMessage `json:"$oid"`
+ Func struct {
+ Id json.RawMessage
+ } `json:"$oidFunc"`
+ }
+ err := jdec(data, &v)
+ if err == nil {
+ if len(v.Id) > 0 {
+ data = []byte(v.Id)
+ } else {
+ data = []byte(v.Func.Id)
+ }
+ }
+ }
+ if len(data) == 2 && data[0] == '"' && data[1] == '"' || bytes.Equal(data, nullBytes) {
+ *id = ""
+ return nil
+ }
+ if len(data) != 26 || data[0] != '"' || data[25] != '"' {
+ return errors.New(fmt.Sprintf("invalid ObjectId in JSON: %s", string(data)))
+ }
+ var buf [12]byte
+ _, err := hex.Decode(buf[:], data[1:25])
+ if err != nil {
+ return errors.New(fmt.Sprintf("invalid ObjectId in JSON: %s (%s)", string(data), err))
+ }
+ *id = ObjectId(string(buf[:]))
+ return nil
+}
+
+// MarshalText turns bson.ObjectId into an encoding.TextMarshaler.
+func (id ObjectId) MarshalText() ([]byte, error) {
+ return []byte(fmt.Sprintf("%x", string(id))), nil
+}
+
+// UnmarshalText turns *bson.ObjectId into an encoding.TextUnmarshaler.
+func (id *ObjectId) UnmarshalText(data []byte) error {
+ if len(data) == 1 && data[0] == ' ' || len(data) == 0 {
+ *id = ""
+ return nil
+ }
+ if len(data) != 24 {
+ return fmt.Errorf("invalid ObjectId: %s", data)
+ }
+ var buf [12]byte
+ _, err := hex.Decode(buf[:], data[:])
+ if err != nil {
+ return fmt.Errorf("invalid ObjectId: %s (%s)", data, err)
+ }
+ *id = ObjectId(string(buf[:]))
+ return nil
+}
+
+// Valid returns true if id is valid. A valid id must contain exactly 12 bytes.
+func (id ObjectId) Valid() bool {
+ return len(id) == 12
+}
+
+// byteSlice returns byte slice of id from start to end.
+// Calling this function with an invalid id will cause a runtime panic.
+func (id ObjectId) byteSlice(start, end int) []byte {
+ if len(id) != 12 {
+ panic(fmt.Sprintf("invalid ObjectId: %q", string(id)))
+ }
+ return []byte(string(id)[start:end])
+}
+
+// Time returns the timestamp part of the id.
+// It's a runtime error to call this method with an invalid id.
+func (id ObjectId) Time() time.Time {
+ // First 4 bytes of ObjectId is 32-bit big-endian seconds from epoch.
+ secs := int64(binary.BigEndian.Uint32(id.byteSlice(0, 4)))
+ return time.Unix(secs, 0)
+}
+
+// Machine returns the 3-byte machine id part of the id.
+// It's a runtime error to call this method with an invalid id.
+func (id ObjectId) Machine() []byte {
+ return id.byteSlice(4, 7)
+}
+
+// Pid returns the process id part of the id.
+// It's a runtime error to call this method with an invalid id.
+func (id ObjectId) Pid() uint16 {
+ return binary.BigEndian.Uint16(id.byteSlice(7, 9))
+}
+
+// Counter returns the incrementing value part of the id.
+// It's a runtime error to call this method with an invalid id.
+func (id ObjectId) Counter() int32 {
+ b := id.byteSlice(9, 12)
+ // Counter is stored as big-endian 3-byte value
+ return int32(uint32(b[0])<<16 | uint32(b[1])<<8 | uint32(b[2]))
+}
+
+// The Symbol type is similar to a string and is used in languages with a
+// distinct symbol type.
+type Symbol string
+
+// Now returns the current time with millisecond precision. MongoDB stores
+// timestamps with the same precision, so a Time returned from this method
+// will not change after a roundtrip to the database. That's the only reason
+// why this function exists. Using the time.Now function also works fine
+// otherwise.
+func Now() time.Time {
+ return time.Unix(0, time.Now().UnixNano()/1e6*1e6)
+}
+
+// MongoTimestamp is a special internal type used by MongoDB that for some
+// strange reason has its own datatype defined in BSON.
+type MongoTimestamp int64
+
+type orderKey int64
+
+// MaxKey is a special value that compares higher than all other possible BSON
+// values in a MongoDB database.
+var MaxKey = orderKey(1<<63 - 1)
+
+// MinKey is a special value that compares lower than all other possible BSON
+// values in a MongoDB database.
+var MinKey = orderKey(-1 << 63)
+
+type undefined struct{}
+
+// Undefined represents the undefined BSON value.
+var Undefined undefined
+
+// Binary is a representation for non-standard binary values. Any kind should
+// work, but the following are known as of this writing:
+//
+// 0x00 - Generic. This is decoded as []byte(data), not Binary{0x00, data}.
+// 0x01 - Function (!?)
+// 0x02 - Obsolete generic.
+// 0x03 - UUID
+// 0x05 - MD5
+// 0x80 - User defined.
+//
+type Binary struct {
+ Kind byte
+ Data []byte
+}
+
+// RegEx represents a regular expression. The Options field may contain
+// individual characters defining the way in which the pattern should be
+// applied, and must be sorted. Valid options as of this writing are 'i' for
+// case insensitive matching, 'm' for multi-line matching, 'x' for verbose
+// mode, 'l' to make \w, \W, and similar be locale-dependent, 's' for dot-all
+// mode (a '.' matches everything), and 'u' to make \w, \W, and similar match
+// unicode. The value of the Options parameter is not verified before being
+// marshaled into the BSON format.
+type RegEx struct {
+ Pattern string
+ Options string
+}
+
+// JavaScript is a type that holds JavaScript code. If Scope is non-nil, it
+// will be marshaled as a mapping from identifiers to values that may be
+// used when evaluating the provided Code.
+type JavaScript struct {
+ Code string
+ Scope interface{}
+}
+
+// DBPointer refers to a document id in a namespace.
+//
+// This type is deprecated in the BSON specification and should not be used
+// except for backwards compatibility with ancient applications.
+type DBPointer struct {
+ Namespace string
+ Id ObjectId
+}
+
+const initialBufferSize = 64
+
+func handleErr(err *error) {
+ if r := recover(); r != nil {
+ if _, ok := r.(runtime.Error); ok {
+ panic(r)
+ } else if _, ok := r.(externalPanic); ok {
+ panic(r)
+ } else if s, ok := r.(string); ok {
+ *err = errors.New(s)
+ } else if e, ok := r.(error); ok {
+ *err = e
+ } else {
+ panic(r)
+ }
+ }
+}
+
+// Marshal serializes the in value, which may be a map or a struct value.
+// In the case of struct values, only exported fields will be serialized,
+// and the order of serialized fields will match that of the struct itself.
+// The lowercased field name is used as the key for each exported field,
+// but this behavior may be changed using the respective field tag.
+// The tag may also contain flags to tweak the marshalling behavior for
+// the field. The tag formats accepted are:
+//
+// "[<key>][,<flag1>[,<flag2>]]"
+//
+// `(...) bson:"[<key>][,<flag1>[,<flag2>]]" (...)`
+//
+// The following flags are currently supported:
+//
+// omitempty Only include the field if it's not set to the zero
+// value for the type or to empty slices or maps.
+//
+// minsize Marshal an int64 value as an int32, if that's feasible
+// while preserving the numeric value.
+//
+// inline Inline the field, which must be a struct or a map,
+// causing all of its fields or keys to be processed as if
+// they were part of the outer struct. For maps, keys must
+// not conflict with the bson keys of other struct fields.
+//
+// Some examples:
+//
+// type T struct {
+// A bool
+// B int "myb"
+// C string "myc,omitempty"
+// D string `bson:",omitempty" json:"jsonkey"`
+// E int64 ",minsize"
+// F int64 "myf,omitempty,minsize"
+// }
+//
+func Marshal(in interface{}) (out []byte, err error) {
+ defer handleErr(&err)
+ e := &encoder{make([]byte, 0, initialBufferSize)}
+ e.addDoc(reflect.ValueOf(in))
+ return e.out, nil
+}
+
+// Unmarshal deserializes data from in into the out value. The out value
+// must be a map, a pointer to a struct, or a pointer to a bson.D value.
+// In the case of struct values, only exported fields will be deserialized.
+// The lowercased field name is used as the key for each exported field,
+// but this behavior may be changed using the respective field tag.
+// The tag may also contain flags to tweak the marshalling behavior for
+// the field. The tag formats accepted are:
+//
+// "[<key>][,<flag1>[,<flag2>]]"
+//
+// `(...) bson:"[<key>][,<flag1>[,<flag2>]]" (...)`
+//
+// The following flags are currently supported during unmarshal (see the
+// Marshal method for other flags):
+//
+// inline Inline the field, which must be a struct or a map.
+// Inlined structs are handled as if its fields were part
+// of the outer struct. An inlined map causes keys that do
+// not match any other struct field to be inserted in the
+// map rather than being discarded as usual.
+//
+// The target field or element types of out may not necessarily match
+// the BSON values of the provided data. The following conversions are
+// made automatically:
+//
+// - Numeric types are converted if at least the integer part of the
+// value would be preserved correctly
+// - Bools are converted to numeric types as 1 or 0
+// - Numeric types are converted to bools as true if not 0 or false otherwise
+// - Binary and string BSON data is converted to a string, array or byte slice
+//
+// If the value would not fit the type and cannot be converted, it's
+// silently skipped.
+//
+// Pointer values are initialized when necessary.
+func Unmarshal(in []byte, out interface{}) (err error) {
+ if raw, ok := out.(*Raw); ok {
+ raw.Kind = 3
+ raw.Data = in
+ return nil
+ }
+ defer handleErr(&err)
+ v := reflect.ValueOf(out)
+ switch v.Kind() {
+ case reflect.Ptr:
+ fallthrough
+ case reflect.Map:
+ d := newDecoder(in)
+ d.readDocTo(v)
+ case reflect.Struct:
+ return errors.New("Unmarshal can't deal with struct values. Use a pointer.")
+ default:
+ return errors.New("Unmarshal needs a map or a pointer to a struct.")
+ }
+ return nil
+}
+
+// Unmarshal deserializes raw into the out value. If the out value type
+// is not compatible with raw, a *bson.TypeError is returned.
+//
+// See the Unmarshal function documentation for more details on the
+// unmarshalling process.
+func (raw Raw) Unmarshal(out interface{}) (err error) {
+ defer handleErr(&err)
+ v := reflect.ValueOf(out)
+ switch v.Kind() {
+ case reflect.Ptr:
+ v = v.Elem()
+ fallthrough
+ case reflect.Map:
+ d := newDecoder(raw.Data)
+ good := d.readElemTo(v, raw.Kind)
+ if !good {
+ return &TypeError{v.Type(), raw.Kind}
+ }
+ case reflect.Struct:
+ return errors.New("Raw Unmarshal can't deal with struct values. Use a pointer.")
+ default:
+ return errors.New("Raw Unmarshal needs a map or a valid pointer.")
+ }
+ return nil
+}
+
+type TypeError struct {
+ Type reflect.Type
+ Kind byte
+}
+
+func (e *TypeError) Error() string {
+ return fmt.Sprintf("BSON kind 0x%02x isn't compatible with type %s", e.Kind, e.Type.String())
+}
+
+// --------------------------------------------------------------------------
+// Maintain a mapping of keys to structure field indexes
+
+type structInfo struct {
+ FieldsMap map[string]fieldInfo
+ FieldsList []fieldInfo
+ InlineMap int
+ Zero reflect.Value
+}
+
+type fieldInfo struct {
+ Key string
+ Num int
+ OmitEmpty bool
+ MinSize bool
+ Inline []int
+}
+
+var structMap = make(map[reflect.Type]*structInfo)
+var structMapMutex sync.RWMutex
+
+type externalPanic string
+
+func (e externalPanic) String() string {
+ return string(e)
+}
+
+func getStructInfo(st reflect.Type) (*structInfo, error) {
+ structMapMutex.RLock()
+ sinfo, found := structMap[st]
+ structMapMutex.RUnlock()
+ if found {
+ return sinfo, nil
+ }
+ n := st.NumField()
+ fieldsMap := make(map[string]fieldInfo)
+ fieldsList := make([]fieldInfo, 0, n)
+ inlineMap := -1
+ for i := 0; i != n; i++ {
+ field := st.Field(i)
+ if field.PkgPath != "" && !field.Anonymous {
+ continue // Private field
+ }
+
+ info := fieldInfo{Num: i}
+
+ tag := field.Tag.Get("bson")
+ if tag == "" && strings.Index(string(field.Tag), ":") < 0 {
+ tag = string(field.Tag)
+ }
+ if tag == "-" {
+ continue
+ }
+
+ inline := false
+ fields := strings.Split(tag, ",")
+ if len(fields) > 1 {
+ for _, flag := range fields[1:] {
+ switch flag {
+ case "omitempty":
+ info.OmitEmpty = true
+ case "minsize":
+ info.MinSize = true
+ case "inline":
+ inline = true
+ default:
+ msg := fmt.Sprintf("Unsupported flag %q in tag %q of type %s", flag, tag, st)
+ panic(externalPanic(msg))
+ }
+ }
+ tag = fields[0]
+ }
+
+ if inline {
+ switch field.Type.Kind() {
+ case reflect.Map:
+ if inlineMap >= 0 {
+ return nil, errors.New("Multiple ,inline maps in struct " + st.String())
+ }
+ if field.Type.Key() != reflect.TypeOf("") {
+ return nil, errors.New("Option ,inline needs a map with string keys in struct " + st.String())
+ }
+ inlineMap = info.Num
+ case reflect.Struct:
+ sinfo, err := getStructInfo(field.Type)
+ if err != nil {
+ return nil, err
+ }
+ for _, finfo := range sinfo.FieldsList {
+ if _, found := fieldsMap[finfo.Key]; found {
+ msg := "Duplicated key '" + finfo.Key + "' in struct " + st.String()
+ return nil, errors.New(msg)
+ }
+ if finfo.Inline == nil {
+ finfo.Inline = []int{i, finfo.Num}
+ } else {
+ finfo.Inline = append([]int{i}, finfo.Inline...)
+ }
+ fieldsMap[finfo.Key] = finfo
+ fieldsList = append(fieldsList, finfo)
+ }
+ default:
+ panic("Option ,inline needs a struct value or map field")
+ }
+ continue
+ }
+
+ if tag != "" {
+ info.Key = tag
+ } else {
+ info.Key = strings.ToLower(field.Name)
+ }
+
+ if _, found = fieldsMap[info.Key]; found {
+ msg := "Duplicated key '" + info.Key + "' in struct " + st.String()
+ return nil, errors.New(msg)
+ }
+
+ fieldsList = append(fieldsList, info)
+ fieldsMap[info.Key] = info
+ }
+ sinfo = &structInfo{
+ fieldsMap,
+ fieldsList,
+ inlineMap,
+ reflect.New(st).Elem(),
+ }
+ structMapMutex.Lock()
+ structMap[st] = sinfo
+ structMapMutex.Unlock()
+ return sinfo, nil
+}
diff --git a/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/bson/bson_test.go b/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/bson/bson_test.go
new file mode 100644
index 00000000000..37451f9fdc2
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/bson/bson_test.go
@@ -0,0 +1,1832 @@
+// BSON library for Go
+//
+// Copyright (c) 2010-2012 - Gustavo Niemeyer <gustavo@niemeyer.net>
+//
+// All rights reserved.
+//
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are met:
+//
+// 1. Redistributions of source code must retain the above copyright notice, this
+// list of conditions and the following disclaimer.
+// 2. Redistributions in binary form must reproduce the above copyright notice,
+// this list of conditions and the following disclaimer in the documentation
+// and/or other materials provided with the distribution.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
+// ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
+// WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+// DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
+// ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
+// (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
+// LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
+// ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
+// SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+// gobson - BSON library for Go.
+
+package bson_test
+
+import (
+ "encoding/binary"
+ "encoding/hex"
+ "encoding/json"
+ "encoding/xml"
+ "errors"
+ "net/url"
+ "reflect"
+ "strings"
+ "testing"
+ "time"
+
+ . "gopkg.in/check.v1"
+ "gopkg.in/mgo.v2/bson"
+ "gopkg.in/yaml.v2"
+)
+
+func TestAll(t *testing.T) {
+ TestingT(t)
+}
+
+type S struct{}
+
+var _ = Suite(&S{})
+
+// Wrap up the document elements contained in data, prepending the int32
+// length of the data, and appending the '\x00' value closing the document.
+func wrapInDoc(data string) string {
+ result := make([]byte, len(data)+5)
+ binary.LittleEndian.PutUint32(result, uint32(len(result)))
+ copy(result[4:], []byte(data))
+ return string(result)
+}
+
+func makeZeroDoc(value interface{}) (zero interface{}) {
+ v := reflect.ValueOf(value)
+ t := v.Type()
+ switch t.Kind() {
+ case reflect.Map:
+ mv := reflect.MakeMap(t)
+ zero = mv.Interface()
+ case reflect.Ptr:
+ pv := reflect.New(v.Type().Elem())
+ zero = pv.Interface()
+ case reflect.Slice, reflect.Int, reflect.Int64, reflect.Struct:
+ zero = reflect.New(t).Interface()
+ default:
+ panic("unsupported doc type: " + t.Name())
+ }
+ return zero
+}
+
+func testUnmarshal(c *C, data string, obj interface{}) {
+ zero := makeZeroDoc(obj)
+ err := bson.Unmarshal([]byte(data), zero)
+ c.Assert(err, IsNil)
+ c.Assert(zero, DeepEquals, obj)
+}
+
+type testItemType struct {
+ obj interface{}
+ data string
+}
+
+// --------------------------------------------------------------------------
+// Samples from bsonspec.org:
+
+var sampleItems = []testItemType{
+ {bson.M{"hello": "world"},
+ "\x16\x00\x00\x00\x02hello\x00\x06\x00\x00\x00world\x00\x00"},
+ {bson.M{"BSON": []interface{}{"awesome", float64(5.05), 1986}},
+ "1\x00\x00\x00\x04BSON\x00&\x00\x00\x00\x020\x00\x08\x00\x00\x00" +
+ "awesome\x00\x011\x00333333\x14@\x102\x00\xc2\x07\x00\x00\x00\x00"},
+}
+
+func (s *S) TestMarshalSampleItems(c *C) {
+ for i, item := range sampleItems {
+ data, err := bson.Marshal(item.obj)
+ c.Assert(err, IsNil)
+ c.Assert(string(data), Equals, item.data, Commentf("Failed on item %d", i))
+ }
+}
+
+func (s *S) TestUnmarshalSampleItems(c *C) {
+ for i, item := range sampleItems {
+ value := bson.M{}
+ err := bson.Unmarshal([]byte(item.data), value)
+ c.Assert(err, IsNil)
+ c.Assert(value, DeepEquals, item.obj, Commentf("Failed on item %d", i))
+ }
+}
+
+// --------------------------------------------------------------------------
+// Every type, ordered by the type flag. These are not wrapped with the
+// length and last \x00 from the document. wrapInDoc() computes them.
+// Note that all of them should be supported as two-way conversions.
+
+var allItems = []testItemType{
+ {bson.M{},
+ ""},
+ {bson.M{"_": float64(5.05)},
+ "\x01_\x00333333\x14@"},
+ {bson.M{"_": "yo"},
+ "\x02_\x00\x03\x00\x00\x00yo\x00"},
+ {bson.M{"_": bson.M{"a": true}},
+ "\x03_\x00\x09\x00\x00\x00\x08a\x00\x01\x00"},
+ {bson.M{"_": []interface{}{true, false}},
+ "\x04_\x00\r\x00\x00\x00\x080\x00\x01\x081\x00\x00\x00"},
+ {bson.M{"_": []byte("yo")},
+ "\x05_\x00\x02\x00\x00\x00\x00yo"},
+ {bson.M{"_": bson.Binary{0x80, []byte("udef")}},
+ "\x05_\x00\x04\x00\x00\x00\x80udef"},
+ {bson.M{"_": bson.Undefined}, // Obsolete, but still seen in the wild.
+ "\x06_\x00"},
+ {bson.M{"_": bson.ObjectId("0123456789ab")},
+ "\x07_\x000123456789ab"},
+ {bson.M{"_": bson.DBPointer{"testnamespace", bson.ObjectId("0123456789ab")}},
+ "\x0C_\x00\x0e\x00\x00\x00testnamespace\x000123456789ab"},
+ {bson.M{"_": false},
+ "\x08_\x00\x00"},
+ {bson.M{"_": true},
+ "\x08_\x00\x01"},
+ {bson.M{"_": time.Unix(0, 258e6)}, // Note the NS <=> MS conversion.
+ "\x09_\x00\x02\x01\x00\x00\x00\x00\x00\x00"},
+ {bson.M{"_": nil},
+ "\x0A_\x00"},
+ {bson.M{"_": bson.RegEx{"ab", "cd"}},
+ "\x0B_\x00ab\x00cd\x00"},
+ {bson.M{"_": bson.JavaScript{"code", nil}},
+ "\x0D_\x00\x05\x00\x00\x00code\x00"},
+ {bson.M{"_": bson.Symbol("sym")},
+ "\x0E_\x00\x04\x00\x00\x00sym\x00"},
+ {bson.M{"_": bson.JavaScript{"code", bson.M{"": nil}}},
+ "\x0F_\x00\x14\x00\x00\x00\x05\x00\x00\x00code\x00" +
+ "\x07\x00\x00\x00\x0A\x00\x00"},
+ {bson.M{"_": 258},
+ "\x10_\x00\x02\x01\x00\x00"},
+ {bson.M{"_": bson.MongoTimestamp(258)},
+ "\x11_\x00\x02\x01\x00\x00\x00\x00\x00\x00"},
+ {bson.M{"_": int64(258)},
+ "\x12_\x00\x02\x01\x00\x00\x00\x00\x00\x00"},
+ {bson.M{"_": int64(258 << 32)},
+ "\x12_\x00\x00\x00\x00\x00\x02\x01\x00\x00"},
+ {bson.M{"_": bson.MaxKey},
+ "\x7F_\x00"},
+ {bson.M{"_": bson.MinKey},
+ "\xFF_\x00"},
+}
+
+func (s *S) TestMarshalAllItems(c *C) {
+ for i, item := range allItems {
+ data, err := bson.Marshal(item.obj)
+ c.Assert(err, IsNil)
+ c.Assert(string(data), Equals, wrapInDoc(item.data), Commentf("Failed on item %d: %#v", i, item))
+ }
+}
+
+func (s *S) TestUnmarshalAllItems(c *C) {
+ for i, item := range allItems {
+ value := bson.M{}
+ err := bson.Unmarshal([]byte(wrapInDoc(item.data)), value)
+ c.Assert(err, IsNil)
+ c.Assert(value, DeepEquals, item.obj, Commentf("Failed on item %d: %#v", i, item))
+ }
+}
+
+func (s *S) TestUnmarshalRawAllItems(c *C) {
+ for i, item := range allItems {
+ if len(item.data) == 0 {
+ continue
+ }
+ value := item.obj.(bson.M)["_"]
+ if value == nil {
+ continue
+ }
+ pv := reflect.New(reflect.ValueOf(value).Type())
+ raw := bson.Raw{item.data[0], []byte(item.data[3:])}
+ c.Logf("Unmarshal raw: %#v, %#v", raw, pv.Interface())
+ err := raw.Unmarshal(pv.Interface())
+ c.Assert(err, IsNil)
+ c.Assert(pv.Elem().Interface(), DeepEquals, value, Commentf("Failed on item %d: %#v", i, item))
+ }
+}
+
+func (s *S) TestUnmarshalRawIncompatible(c *C) {
+ raw := bson.Raw{0x08, []byte{0x01}} // true
+ err := raw.Unmarshal(&struct{}{})
+ c.Assert(err, ErrorMatches, "BSON kind 0x08 isn't compatible with type struct \\{\\}")
+}
+
+func (s *S) TestUnmarshalZeroesStruct(c *C) {
+ data, err := bson.Marshal(bson.M{"b": 2})
+ c.Assert(err, IsNil)
+ type T struct{ A, B int }
+ v := T{A: 1}
+ err = bson.Unmarshal(data, &v)
+ c.Assert(err, IsNil)
+ c.Assert(v.A, Equals, 0)
+ c.Assert(v.B, Equals, 2)
+}
+
+func (s *S) TestUnmarshalZeroesMap(c *C) {
+ data, err := bson.Marshal(bson.M{"b": 2})
+ c.Assert(err, IsNil)
+ m := bson.M{"a": 1}
+ err = bson.Unmarshal(data, &m)
+ c.Assert(err, IsNil)
+ c.Assert(m, DeepEquals, bson.M{"b": 2})
+}
+
+func (s *S) TestUnmarshalNonNilInterface(c *C) {
+ data, err := bson.Marshal(bson.M{"b": 2})
+ c.Assert(err, IsNil)
+ m := bson.M{"a": 1}
+ var i interface{}
+ i = m
+ err = bson.Unmarshal(data, &i)
+ c.Assert(err, IsNil)
+ c.Assert(i, DeepEquals, bson.M{"b": 2})
+ c.Assert(m, DeepEquals, bson.M{"a": 1})
+}
+
+// --------------------------------------------------------------------------
+// Some one way marshaling operations which would unmarshal differently.
+
+var oneWayMarshalItems = []testItemType{
+ // These are being passed as pointers, and will unmarshal as values.
+ {bson.M{"": &bson.Binary{0x02, []byte("old")}},
+ "\x05\x00\x07\x00\x00\x00\x02\x03\x00\x00\x00old"},
+ {bson.M{"": &bson.Binary{0x80, []byte("udef")}},
+ "\x05\x00\x04\x00\x00\x00\x80udef"},
+ {bson.M{"": &bson.RegEx{"ab", "cd"}},
+ "\x0B\x00ab\x00cd\x00"},
+ {bson.M{"": &bson.JavaScript{"code", nil}},
+ "\x0D\x00\x05\x00\x00\x00code\x00"},
+ {bson.M{"": &bson.JavaScript{"code", bson.M{"": nil}}},
+ "\x0F\x00\x14\x00\x00\x00\x05\x00\x00\x00code\x00" +
+ "\x07\x00\x00\x00\x0A\x00\x00"},
+
+ // There's no float32 type in BSON. Will encode as a float64.
+ {bson.M{"": float32(5.05)},
+ "\x01\x00\x00\x00\x00@33\x14@"},
+
+ // The array will be unmarshaled as a slice instead.
+ {bson.M{"": [2]bool{true, false}},
+ "\x04\x00\r\x00\x00\x00\x080\x00\x01\x081\x00\x00\x00"},
+
+ // The typed slice will be unmarshaled as []interface{}.
+ {bson.M{"": []bool{true, false}},
+ "\x04\x00\r\x00\x00\x00\x080\x00\x01\x081\x00\x00\x00"},
+
+ // Will unmarshal as a []byte.
+ {bson.M{"": bson.Binary{0x00, []byte("yo")}},
+ "\x05\x00\x02\x00\x00\x00\x00yo"},
+ {bson.M{"": bson.Binary{0x02, []byte("old")}},
+ "\x05\x00\x07\x00\x00\x00\x02\x03\x00\x00\x00old"},
+
+ // No way to preserve the type information here. We might encode as a zero
+ // value, but this would mean that pointer values in structs wouldn't be
+ // able to correctly distinguish between unset and set to the zero value.
+ {bson.M{"": (*byte)(nil)},
+ "\x0A\x00"},
+
+ // No int types smaller than int32 in BSON. Could encode this as a char,
+ // but it would still be ambiguous, take more, and be awkward in Go when
+ // loaded without typing information.
+ {bson.M{"": byte(8)},
+ "\x10\x00\x08\x00\x00\x00"},
+
+ // There are no unsigned types in BSON. Will unmarshal as int32 or int64.
+ {bson.M{"": uint32(258)},
+ "\x10\x00\x02\x01\x00\x00"},
+ {bson.M{"": uint64(258)},
+ "\x12\x00\x02\x01\x00\x00\x00\x00\x00\x00"},
+ {bson.M{"": uint64(258 << 32)},
+ "\x12\x00\x00\x00\x00\x00\x02\x01\x00\x00"},
+
+ // This will unmarshal as int.
+ {bson.M{"": int32(258)},
+ "\x10\x00\x02\x01\x00\x00"},
+
+ // That's a special case. The unsigned value is too large for an int32,
+ // so an int64 is used instead.
+ {bson.M{"": uint32(1<<32 - 1)},
+ "\x12\x00\xFF\xFF\xFF\xFF\x00\x00\x00\x00"},
+ {bson.M{"": uint(1<<32 - 1)},
+ "\x12\x00\xFF\xFF\xFF\xFF\x00\x00\x00\x00"},
+}
+
+func (s *S) TestOneWayMarshalItems(c *C) {
+ for i, item := range oneWayMarshalItems {
+ data, err := bson.Marshal(item.obj)
+ c.Assert(err, IsNil)
+ c.Assert(string(data), Equals, wrapInDoc(item.data),
+ Commentf("Failed on item %d", i))
+ }
+}
+
+// --------------------------------------------------------------------------
+// Two-way tests for user-defined structures using the samples
+// from bsonspec.org.
+
+type specSample1 struct {
+ Hello string
+}
+
+type specSample2 struct {
+ BSON []interface{} "BSON"
+}
+
+var structSampleItems = []testItemType{
+ {&specSample1{"world"},
+ "\x16\x00\x00\x00\x02hello\x00\x06\x00\x00\x00world\x00\x00"},
+ {&specSample2{[]interface{}{"awesome", float64(5.05), 1986}},
+ "1\x00\x00\x00\x04BSON\x00&\x00\x00\x00\x020\x00\x08\x00\x00\x00" +
+ "awesome\x00\x011\x00333333\x14@\x102\x00\xc2\x07\x00\x00\x00\x00"},
+}
+
+func (s *S) TestMarshalStructSampleItems(c *C) {
+ for i, item := range structSampleItems {
+ data, err := bson.Marshal(item.obj)
+ c.Assert(err, IsNil)
+ c.Assert(string(data), Equals, item.data,
+ Commentf("Failed on item %d", i))
+ }
+}
+
+func (s *S) TestUnmarshalStructSampleItems(c *C) {
+ for _, item := range structSampleItems {
+ testUnmarshal(c, item.data, item.obj)
+ }
+}
+
+func (s *S) Test64bitInt(c *C) {
+ var i int64 = (1 << 31)
+ if int(i) > 0 {
+ data, err := bson.Marshal(bson.M{"i": int(i)})
+ c.Assert(err, IsNil)
+ c.Assert(string(data), Equals, wrapInDoc("\x12i\x00\x00\x00\x00\x80\x00\x00\x00\x00"))
+
+ var result struct{ I int }
+ err = bson.Unmarshal(data, &result)
+ c.Assert(err, IsNil)
+ c.Assert(int64(result.I), Equals, i)
+ }
+}
+
+// --------------------------------------------------------------------------
+// Generic two-way struct marshaling tests.
+
+var bytevar = byte(8)
+var byteptr = &bytevar
+
+var structItems = []testItemType{
+ {&struct{ Ptr *byte }{nil},
+ "\x0Aptr\x00"},
+ {&struct{ Ptr *byte }{&bytevar},
+ "\x10ptr\x00\x08\x00\x00\x00"},
+ {&struct{ Ptr **byte }{&byteptr},
+ "\x10ptr\x00\x08\x00\x00\x00"},
+ {&struct{ Byte byte }{8},
+ "\x10byte\x00\x08\x00\x00\x00"},
+ {&struct{ Byte byte }{0},
+ "\x10byte\x00\x00\x00\x00\x00"},
+ {&struct {
+ V byte "Tag"
+ }{8},
+ "\x10Tag\x00\x08\x00\x00\x00"},
+ {&struct {
+ V *struct {
+ Byte byte
+ }
+ }{&struct{ Byte byte }{8}},
+ "\x03v\x00" + "\x0f\x00\x00\x00\x10byte\x00\b\x00\x00\x00\x00"},
+ {&struct{ priv byte }{}, ""},
+
+ // The order of the dumped fields should be the same in the struct.
+ {&struct{ A, C, B, D, F, E *byte }{},
+ "\x0Aa\x00\x0Ac\x00\x0Ab\x00\x0Ad\x00\x0Af\x00\x0Ae\x00"},
+
+ {&struct{ V bson.Raw }{bson.Raw{0x03, []byte("\x0f\x00\x00\x00\x10byte\x00\b\x00\x00\x00\x00")}},
+ "\x03v\x00" + "\x0f\x00\x00\x00\x10byte\x00\b\x00\x00\x00\x00"},
+ {&struct{ V bson.Raw }{bson.Raw{0x10, []byte("\x00\x00\x00\x00")}},
+ "\x10v\x00" + "\x00\x00\x00\x00"},
+
+ // Byte arrays.
+ {&struct{ V [2]byte }{[2]byte{'y', 'o'}},
+ "\x05v\x00\x02\x00\x00\x00\x00yo"},
+}
+
+func (s *S) TestMarshalStructItems(c *C) {
+ for i, item := range structItems {
+ data, err := bson.Marshal(item.obj)
+ c.Assert(err, IsNil)
+ c.Assert(string(data), Equals, wrapInDoc(item.data),
+ Commentf("Failed on item %d", i))
+ }
+}
+
+func (s *S) TestUnmarshalStructItems(c *C) {
+ for _, item := range structItems {
+ testUnmarshal(c, wrapInDoc(item.data), item.obj)
+ }
+}
+
+func (s *S) TestUnmarshalRawStructItems(c *C) {
+ for i, item := range structItems {
+ raw := bson.Raw{0x03, []byte(wrapInDoc(item.data))}
+ zero := makeZeroDoc(item.obj)
+ err := raw.Unmarshal(zero)
+ c.Assert(err, IsNil)
+ c.Assert(zero, DeepEquals, item.obj, Commentf("Failed on item %d: %#v", i, item))
+ }
+}
+
+func (s *S) TestUnmarshalRawNil(c *C) {
+ // Regression test: shouldn't try to nil out the pointer itself,
+ // as it's not settable.
+ raw := bson.Raw{0x0A, []byte{}}
+ err := raw.Unmarshal(&struct{}{})
+ c.Assert(err, IsNil)
+}
+
+// --------------------------------------------------------------------------
+// One-way marshaling tests.
+
+type dOnIface struct {
+ D interface{}
+}
+
+type ignoreField struct {
+ Before string
+ Ignore string `bson:"-"`
+ After string
+}
+
+var marshalItems = []testItemType{
+ // Ordered document dump. Will unmarshal as a dictionary by default.
+ {bson.D{{"a", nil}, {"c", nil}, {"b", nil}, {"d", nil}, {"f", nil}, {"e", true}},
+ "\x0Aa\x00\x0Ac\x00\x0Ab\x00\x0Ad\x00\x0Af\x00\x08e\x00\x01"},
+ {MyD{{"a", nil}, {"c", nil}, {"b", nil}, {"d", nil}, {"f", nil}, {"e", true}},
+ "\x0Aa\x00\x0Ac\x00\x0Ab\x00\x0Ad\x00\x0Af\x00\x08e\x00\x01"},
+ {&dOnIface{bson.D{{"a", nil}, {"c", nil}, {"b", nil}, {"d", true}}},
+ "\x03d\x00" + wrapInDoc("\x0Aa\x00\x0Ac\x00\x0Ab\x00\x08d\x00\x01")},
+
+ {bson.RawD{{"a", bson.Raw{0x0A, nil}}, {"c", bson.Raw{0x0A, nil}}, {"b", bson.Raw{0x08, []byte{0x01}}}},
+ "\x0Aa\x00" + "\x0Ac\x00" + "\x08b\x00\x01"},
+ {MyRawD{{"a", bson.Raw{0x0A, nil}}, {"c", bson.Raw{0x0A, nil}}, {"b", bson.Raw{0x08, []byte{0x01}}}},
+ "\x0Aa\x00" + "\x0Ac\x00" + "\x08b\x00\x01"},
+ {&dOnIface{bson.RawD{{"a", bson.Raw{0x0A, nil}}, {"c", bson.Raw{0x0A, nil}}, {"b", bson.Raw{0x08, []byte{0x01}}}}},
+ "\x03d\x00" + wrapInDoc("\x0Aa\x00"+"\x0Ac\x00"+"\x08b\x00\x01")},
+
+ {&ignoreField{"before", "ignore", "after"},
+ "\x02before\x00\a\x00\x00\x00before\x00\x02after\x00\x06\x00\x00\x00after\x00"},
+
+ // Marshalling a Raw document does nothing.
+ {bson.Raw{0x03, []byte(wrapInDoc("anything"))},
+ "anything"},
+ {bson.Raw{Data: []byte(wrapInDoc("anything"))},
+ "anything"},
+}
+
+func (s *S) TestMarshalOneWayItems(c *C) {
+ for _, item := range marshalItems {
+ data, err := bson.Marshal(item.obj)
+ c.Assert(err, IsNil)
+ c.Assert(string(data), Equals, wrapInDoc(item.data))
+ }
+}
+
+// --------------------------------------------------------------------------
+// One-way unmarshaling tests.
+
+var unmarshalItems = []testItemType{
+ // Field is private. Should not attempt to unmarshal it.
+ {&struct{ priv byte }{},
+ "\x10priv\x00\x08\x00\x00\x00"},
+
+ // Wrong casing. Field names are lowercased.
+ {&struct{ Byte byte }{},
+ "\x10Byte\x00\x08\x00\x00\x00"},
+
+ // Ignore non-existing field.
+ {&struct{ Byte byte }{9},
+ "\x10boot\x00\x08\x00\x00\x00" + "\x10byte\x00\x09\x00\x00\x00"},
+
+ // Do not unmarshal on ignored field.
+ {&ignoreField{"before", "", "after"},
+ "\x02before\x00\a\x00\x00\x00before\x00" +
+ "\x02-\x00\a\x00\x00\x00ignore\x00" +
+ "\x02after\x00\x06\x00\x00\x00after\x00"},
+
+ // Ignore unsuitable types silently.
+ {map[string]string{"str": "s"},
+ "\x02str\x00\x02\x00\x00\x00s\x00" + "\x10int\x00\x01\x00\x00\x00"},
+ {map[string][]int{"array": []int{5, 9}},
+ "\x04array\x00" + wrapInDoc("\x100\x00\x05\x00\x00\x00"+"\x021\x00\x02\x00\x00\x00s\x00"+"\x102\x00\x09\x00\x00\x00")},
+
+ // Wrong type. Shouldn't init pointer.
+ {&struct{ Str *byte }{},
+ "\x02str\x00\x02\x00\x00\x00s\x00"},
+ {&struct{ Str *struct{ Str string } }{},
+ "\x02str\x00\x02\x00\x00\x00s\x00"},
+
+ // Ordered document.
+ {&struct{ bson.D }{bson.D{{"a", nil}, {"c", nil}, {"b", nil}, {"d", true}}},
+ "\x03d\x00" + wrapInDoc("\x0Aa\x00\x0Ac\x00\x0Ab\x00\x08d\x00\x01")},
+
+ // Raw document.
+ {&bson.Raw{0x03, []byte(wrapInDoc("\x10byte\x00\x08\x00\x00\x00"))},
+ "\x10byte\x00\x08\x00\x00\x00"},
+
+ // RawD document.
+ {&struct{ bson.RawD }{bson.RawD{{"a", bson.Raw{0x0A, []byte{}}}, {"c", bson.Raw{0x0A, []byte{}}}, {"b", bson.Raw{0x08, []byte{0x01}}}}},
+ "\x03rawd\x00" + wrapInDoc("\x0Aa\x00\x0Ac\x00\x08b\x00\x01")},
+
+ // Decode old binary.
+ {bson.M{"_": []byte("old")},
+ "\x05_\x00\x07\x00\x00\x00\x02\x03\x00\x00\x00old"},
+
+ // Decode old binary without length. According to the spec, this shouldn't happen.
+ {bson.M{"_": []byte("old")},
+ "\x05_\x00\x03\x00\x00\x00\x02old"},
+
+ // Decode a doc within a doc in to a slice within a doc; shouldn't error
+ {&struct{ Foo []string }{},
+ "\x03\x66\x6f\x6f\x00\x05\x00\x00\x00\x00"},
+}
+
+func (s *S) TestUnmarshalOneWayItems(c *C) {
+ for _, item := range unmarshalItems {
+ testUnmarshal(c, wrapInDoc(item.data), item.obj)
+ }
+}
+
+func (s *S) TestUnmarshalNilInStruct(c *C) {
+ // Nil is the default value, so we need to ensure it's indeed being set.
+ b := byte(1)
+ v := &struct{ Ptr *byte }{&b}
+ err := bson.Unmarshal([]byte(wrapInDoc("\x0Aptr\x00")), v)
+ c.Assert(err, IsNil)
+ c.Assert(v, DeepEquals, &struct{ Ptr *byte }{nil})
+}
+
+// --------------------------------------------------------------------------
+// Marshalling error cases.
+
+type structWithDupKeys struct {
+ Name byte
+ Other byte "name" // Tag should precede.
+}
+
+var marshalErrorItems = []testItemType{
+ {bson.M{"": uint64(1 << 63)},
+ "BSON has no uint64 type, and value is too large to fit correctly in an int64"},
+ {bson.M{"": bson.ObjectId("tooshort")},
+ "ObjectIDs must be exactly 12 bytes long \\(got 8\\)"},
+ {int64(123),
+ "Can't marshal int64 as a BSON document"},
+ {bson.M{"": 1i},
+ "Can't marshal complex128 in a BSON document"},
+ {&structWithDupKeys{},
+ "Duplicated key 'name' in struct bson_test.structWithDupKeys"},
+ {bson.Raw{0xA, []byte{}},
+ "Attempted to marshal Raw kind 10 as a document"},
+ {bson.Raw{0x3, []byte{}},
+ "Attempted to marshal empty Raw document"},
+ {bson.M{"w": bson.Raw{0x3, []byte{}}},
+ "Attempted to marshal empty Raw document"},
+ {&inlineCantPtr{&struct{ A, B int }{1, 2}},
+ "Option ,inline needs a struct value or map field"},
+ {&inlineDupName{1, struct{ A, B int }{2, 3}},
+ "Duplicated key 'a' in struct bson_test.inlineDupName"},
+ {&inlineDupMap{},
+ "Multiple ,inline maps in struct bson_test.inlineDupMap"},
+ {&inlineBadKeyMap{},
+ "Option ,inline needs a map with string keys in struct bson_test.inlineBadKeyMap"},
+ {&inlineMap{A: 1, M: map[string]interface{}{"a": 1}},
+ `Can't have key "a" in inlined map; conflicts with struct field`},
+}
+
+func (s *S) TestMarshalErrorItems(c *C) {
+ for _, item := range marshalErrorItems {
+ data, err := bson.Marshal(item.obj)
+ c.Assert(err, ErrorMatches, item.data)
+ c.Assert(data, IsNil)
+ }
+}
+
+// --------------------------------------------------------------------------
+// Unmarshalling error cases.
+
+type unmarshalErrorType struct {
+ obj interface{}
+ data string
+ error string
+}
+
+var unmarshalErrorItems = []unmarshalErrorType{
+ // Tag name conflicts with existing parameter.
+ {&structWithDupKeys{},
+ "\x10name\x00\x08\x00\x00\x00",
+ "Duplicated key 'name' in struct bson_test.structWithDupKeys"},
+
+ // Non-string map key.
+ {map[int]interface{}{},
+ "\x10name\x00\x08\x00\x00\x00",
+ "BSON map must have string keys. Got: map\\[int\\]interface \\{\\}"},
+
+ {nil,
+ "\xEEname\x00",
+ "Unknown element kind \\(0xEE\\)"},
+
+ {struct{ Name bool }{},
+ "\x10name\x00\x08\x00\x00\x00",
+ "Unmarshal can't deal with struct values. Use a pointer."},
+
+ {123,
+ "\x10name\x00\x08\x00\x00\x00",
+ "Unmarshal needs a map or a pointer to a struct."},
+
+ {nil,
+ "\x08\x62\x00\x02",
+ "encoded boolean must be 1 or 0, found 2"},
+}
+
+func (s *S) TestUnmarshalErrorItems(c *C) {
+ for _, item := range unmarshalErrorItems {
+ data := []byte(wrapInDoc(item.data))
+ var value interface{}
+ switch reflect.ValueOf(item.obj).Kind() {
+ case reflect.Map, reflect.Ptr:
+ value = makeZeroDoc(item.obj)
+ case reflect.Invalid:
+ value = bson.M{}
+ default:
+ value = item.obj
+ }
+ err := bson.Unmarshal(data, value)
+ c.Assert(err, ErrorMatches, item.error)
+ }
+}
+
+type unmarshalRawErrorType struct {
+ obj interface{}
+ raw bson.Raw
+ error string
+}
+
+var unmarshalRawErrorItems = []unmarshalRawErrorType{
+ // Tag name conflicts with existing parameter.
+ {&structWithDupKeys{},
+ bson.Raw{0x03, []byte("\x10byte\x00\x08\x00\x00\x00")},
+ "Duplicated key 'name' in struct bson_test.structWithDupKeys"},
+
+ {&struct{}{},
+ bson.Raw{0xEE, []byte{}},
+ "Unknown element kind \\(0xEE\\)"},
+
+ {struct{ Name bool }{},
+ bson.Raw{0x10, []byte("\x08\x00\x00\x00")},
+ "Raw Unmarshal can't deal with struct values. Use a pointer."},
+
+ {123,
+ bson.Raw{0x10, []byte("\x08\x00\x00\x00")},
+ "Raw Unmarshal needs a map or a valid pointer."},
+}
+
+func (s *S) TestUnmarshalRawErrorItems(c *C) {
+ for i, item := range unmarshalRawErrorItems {
+ err := item.raw.Unmarshal(item.obj)
+ c.Assert(err, ErrorMatches, item.error, Commentf("Failed on item %d: %#v\n", i, item))
+ }
+}
+
+var corruptedData = []string{
+ "\x04\x00\x00\x00\x00", // Document shorter than minimum
+ "\x06\x00\x00\x00\x00", // Not enough data
+ "\x05\x00\x00", // Broken length
+ "\x05\x00\x00\x00\xff", // Corrupted termination
+ "\x0A\x00\x00\x00\x0Aooop\x00", // Unfinished C string
+
+ // Array end past end of string (s[2]=0x07 is correct)
+ wrapInDoc("\x04\x00\x09\x00\x00\x00\x0A\x00\x00"),
+
+ // Array end within string, but past acceptable.
+ wrapInDoc("\x04\x00\x08\x00\x00\x00\x0A\x00\x00"),
+
+ // Document end within string, but past acceptable.
+ wrapInDoc("\x03\x00\x08\x00\x00\x00\x0A\x00\x00"),
+
+ // String with corrupted end.
+ wrapInDoc("\x02\x00\x03\x00\x00\x00yo\xFF"),
+
+ // String with negative length (issue #116).
+ "\x0c\x00\x00\x00\x02x\x00\xff\xff\xff\xff\x00",
+
+ // String with zero length (must include trailing '\x00')
+ "\x0c\x00\x00\x00\x02x\x00\x00\x00\x00\x00\x00",
+
+ // Binary with negative length.
+ "\r\x00\x00\x00\x05x\x00\xff\xff\xff\xff\x00\x00",
+}
+
+func (s *S) TestUnmarshalMapDocumentTooShort(c *C) {
+ for _, data := range corruptedData {
+ err := bson.Unmarshal([]byte(data), bson.M{})
+ c.Assert(err, ErrorMatches, "Document is corrupted")
+
+ err = bson.Unmarshal([]byte(data), &struct{}{})
+ c.Assert(err, ErrorMatches, "Document is corrupted")
+ }
+}
+
+// --------------------------------------------------------------------------
+// Setter test cases.
+
+var setterResult = map[string]error{}
+
+type setterType struct {
+ received interface{}
+}
+
+func (o *setterType) SetBSON(raw bson.Raw) error {
+ err := raw.Unmarshal(&o.received)
+ if err != nil {
+ panic("The panic:" + err.Error())
+ }
+ if s, ok := o.received.(string); ok {
+ if result, ok := setterResult[s]; ok {
+ return result
+ }
+ }
+ return nil
+}
+
+type ptrSetterDoc struct {
+ Field *setterType "_"
+}
+
+type valSetterDoc struct {
+ Field setterType "_"
+}
+
+func (s *S) TestUnmarshalAllItemsWithPtrSetter(c *C) {
+ for _, item := range allItems {
+ for i := 0; i != 2; i++ {
+ var field *setterType
+ if i == 0 {
+ obj := &ptrSetterDoc{}
+ err := bson.Unmarshal([]byte(wrapInDoc(item.data)), obj)
+ c.Assert(err, IsNil)
+ field = obj.Field
+ } else {
+ obj := &valSetterDoc{}
+ err := bson.Unmarshal([]byte(wrapInDoc(item.data)), obj)
+ c.Assert(err, IsNil)
+ field = &obj.Field
+ }
+ if item.data == "" {
+ // Nothing to unmarshal. Should be untouched.
+ if i == 0 {
+ c.Assert(field, IsNil)
+ } else {
+ c.Assert(field.received, IsNil)
+ }
+ } else {
+ expected := item.obj.(bson.M)["_"]
+ c.Assert(field, NotNil, Commentf("Pointer not initialized (%#v)", expected))
+ c.Assert(field.received, DeepEquals, expected)
+ }
+ }
+ }
+}
+
+func (s *S) TestUnmarshalWholeDocumentWithSetter(c *C) {
+ obj := &setterType{}
+ err := bson.Unmarshal([]byte(sampleItems[0].data), obj)
+ c.Assert(err, IsNil)
+ c.Assert(obj.received, DeepEquals, bson.M{"hello": "world"})
+}
+
+func (s *S) TestUnmarshalSetterOmits(c *C) {
+ setterResult["2"] = &bson.TypeError{}
+ setterResult["4"] = &bson.TypeError{}
+ defer func() {
+ delete(setterResult, "2")
+ delete(setterResult, "4")
+ }()
+
+ m := map[string]*setterType{}
+ data := wrapInDoc("\x02abc\x00\x02\x00\x00\x001\x00" +
+ "\x02def\x00\x02\x00\x00\x002\x00" +
+ "\x02ghi\x00\x02\x00\x00\x003\x00" +
+ "\x02jkl\x00\x02\x00\x00\x004\x00")
+ err := bson.Unmarshal([]byte(data), m)
+ c.Assert(err, IsNil)
+ c.Assert(m["abc"], NotNil)
+ c.Assert(m["def"], IsNil)
+ c.Assert(m["ghi"], NotNil)
+ c.Assert(m["jkl"], IsNil)
+
+ c.Assert(m["abc"].received, Equals, "1")
+ c.Assert(m["ghi"].received, Equals, "3")
+}
+
+func (s *S) TestUnmarshalSetterErrors(c *C) {
+ boom := errors.New("BOOM")
+ setterResult["2"] = boom
+ defer delete(setterResult, "2")
+
+ m := map[string]*setterType{}
+ data := wrapInDoc("\x02abc\x00\x02\x00\x00\x001\x00" +
+ "\x02def\x00\x02\x00\x00\x002\x00" +
+ "\x02ghi\x00\x02\x00\x00\x003\x00")
+ err := bson.Unmarshal([]byte(data), m)
+ c.Assert(err, Equals, boom)
+ c.Assert(m["abc"], NotNil)
+ c.Assert(m["def"], IsNil)
+ c.Assert(m["ghi"], IsNil)
+
+ c.Assert(m["abc"].received, Equals, "1")
+}
+
+func (s *S) TestDMap(c *C) {
+ d := bson.D{{"a", 1}, {"b", 2}}
+ c.Assert(d.Map(), DeepEquals, bson.M{"a": 1, "b": 2})
+}
+
+func (s *S) TestUnmarshalSetterSetZero(c *C) {
+ setterResult["foo"] = bson.SetZero
+ defer delete(setterResult, "field")
+
+ data, err := bson.Marshal(bson.M{"field": "foo"})
+ c.Assert(err, IsNil)
+
+ m := map[string]*setterType{}
+ err = bson.Unmarshal([]byte(data), m)
+ c.Assert(err, IsNil)
+
+ value, ok := m["field"]
+ c.Assert(ok, Equals, true)
+ c.Assert(value, IsNil)
+}
+
+// --------------------------------------------------------------------------
+// Getter test cases.
+
+type typeWithGetter struct {
+ result interface{}
+ err error
+}
+
+func (t *typeWithGetter) GetBSON() (interface{}, error) {
+ if t == nil {
+ return "<value is nil>", nil
+ }
+ return t.result, t.err
+}
+
+type docWithGetterField struct {
+ Field *typeWithGetter "_"
+}
+
+func (s *S) TestMarshalAllItemsWithGetter(c *C) {
+ for i, item := range allItems {
+ if item.data == "" {
+ continue
+ }
+ obj := &docWithGetterField{}
+ obj.Field = &typeWithGetter{result: item.obj.(bson.M)["_"]}
+ data, err := bson.Marshal(obj)
+ c.Assert(err, IsNil)
+ c.Assert(string(data), Equals, wrapInDoc(item.data),
+ Commentf("Failed on item #%d", i))
+ }
+}
+
+func (s *S) TestMarshalWholeDocumentWithGetter(c *C) {
+ obj := &typeWithGetter{result: sampleItems[0].obj}
+ data, err := bson.Marshal(obj)
+ c.Assert(err, IsNil)
+ c.Assert(string(data), Equals, sampleItems[0].data)
+}
+
+func (s *S) TestGetterErrors(c *C) {
+ e := errors.New("oops")
+
+ obj1 := &docWithGetterField{}
+ obj1.Field = &typeWithGetter{sampleItems[0].obj, e}
+ data, err := bson.Marshal(obj1)
+ c.Assert(err, ErrorMatches, "oops")
+ c.Assert(data, IsNil)
+
+ obj2 := &typeWithGetter{sampleItems[0].obj, e}
+ data, err = bson.Marshal(obj2)
+ c.Assert(err, ErrorMatches, "oops")
+ c.Assert(data, IsNil)
+}
+
+type intGetter int64
+
+func (t intGetter) GetBSON() (interface{}, error) {
+ return int64(t), nil
+}
+
+type typeWithIntGetter struct {
+ V intGetter ",minsize"
+}
+
+func (s *S) TestMarshalShortWithGetter(c *C) {
+ obj := typeWithIntGetter{42}
+ data, err := bson.Marshal(obj)
+ c.Assert(err, IsNil)
+ m := bson.M{}
+ err = bson.Unmarshal(data, m)
+ c.Assert(err, IsNil)
+ c.Assert(m["v"], Equals, 42)
+}
+
+func (s *S) TestMarshalWithGetterNil(c *C) {
+ obj := docWithGetterField{}
+ data, err := bson.Marshal(obj)
+ c.Assert(err, IsNil)
+ m := bson.M{}
+ err = bson.Unmarshal(data, m)
+ c.Assert(err, IsNil)
+ c.Assert(m, DeepEquals, bson.M{"_": "<value is nil>"})
+}
+
+// --------------------------------------------------------------------------
+// Cross-type conversion tests.
+
+type crossTypeItem struct {
+ obj1 interface{}
+ obj2 interface{}
+}
+
+type condStr struct {
+ V string ",omitempty"
+}
+type condStrNS struct {
+ V string `a:"A" bson:",omitempty" b:"B"`
+}
+type condBool struct {
+ V bool ",omitempty"
+}
+type condInt struct {
+ V int ",omitempty"
+}
+type condUInt struct {
+ V uint ",omitempty"
+}
+type condFloat struct {
+ V float64 ",omitempty"
+}
+type condIface struct {
+ V interface{} ",omitempty"
+}
+type condPtr struct {
+ V *bool ",omitempty"
+}
+type condSlice struct {
+ V []string ",omitempty"
+}
+type condMap struct {
+ V map[string]int ",omitempty"
+}
+type namedCondStr struct {
+ V string "myv,omitempty"
+}
+type condTime struct {
+ V time.Time ",omitempty"
+}
+type condStruct struct {
+ V struct{ A []int } ",omitempty"
+}
+type condRaw struct {
+ V bson.Raw ",omitempty"
+}
+
+type shortInt struct {
+ V int64 ",minsize"
+}
+type shortUint struct {
+ V uint64 ",minsize"
+}
+type shortIface struct {
+ V interface{} ",minsize"
+}
+type shortPtr struct {
+ V *int64 ",minsize"
+}
+type shortNonEmptyInt struct {
+ V int64 ",minsize,omitempty"
+}
+
+type inlineInt struct {
+ V struct{ A, B int } ",inline"
+}
+type inlineCantPtr struct {
+ V *struct{ A, B int } ",inline"
+}
+type inlineDupName struct {
+ A int
+ V struct{ A, B int } ",inline"
+}
+type inlineMap struct {
+ A int
+ M map[string]interface{} ",inline"
+}
+type inlineMapInt struct {
+ A int
+ M map[string]int ",inline"
+}
+type inlineMapMyM struct {
+ A int
+ M MyM ",inline"
+}
+type inlineDupMap struct {
+ M1 map[string]interface{} ",inline"
+ M2 map[string]interface{} ",inline"
+}
+type inlineBadKeyMap struct {
+ M map[int]int ",inline"
+}
+type inlineUnexported struct {
+ M map[string]interface{} ",inline"
+ unexported ",inline"
+}
+type unexported struct {
+ A int
+}
+
+type getterSetterD bson.D
+
+func (s getterSetterD) GetBSON() (interface{}, error) {
+ if len(s) == 0 {
+ return bson.D{}, nil
+ }
+ return bson.D(s[:len(s)-1]), nil
+}
+
+func (s *getterSetterD) SetBSON(raw bson.Raw) error {
+ var doc bson.D
+ err := raw.Unmarshal(&doc)
+ doc = append(doc, bson.DocElem{"suffix", true})
+ *s = getterSetterD(doc)
+ return err
+}
+
+type getterSetterInt int
+
+func (i getterSetterInt) GetBSON() (interface{}, error) {
+ return bson.D{{"a", int(i)}}, nil
+}
+
+func (i *getterSetterInt) SetBSON(raw bson.Raw) error {
+ var doc struct{ A int }
+ err := raw.Unmarshal(&doc)
+ *i = getterSetterInt(doc.A)
+ return err
+}
+
+type ifaceType interface {
+ Hello()
+}
+
+type ifaceSlice []ifaceType
+
+func (s *ifaceSlice) SetBSON(raw bson.Raw) error {
+ var ns []int
+ if err := raw.Unmarshal(&ns); err != nil {
+ return err
+ }
+ *s = make(ifaceSlice, ns[0])
+ return nil
+}
+
+func (s ifaceSlice) GetBSON() (interface{}, error) {
+ return []int{len(s)}, nil
+}
+
+type (
+ MyString string
+ MyBytes []byte
+ MyBool bool
+ MyD []bson.DocElem
+ MyRawD []bson.RawDocElem
+ MyM map[string]interface{}
+)
+
+var (
+ truevar = true
+ falsevar = false
+
+ int64var = int64(42)
+ int64ptr = &int64var
+ intvar = int(42)
+ intptr = &intvar
+
+ gsintvar = getterSetterInt(42)
+)
+
+func parseURL(s string) *url.URL {
+ u, err := url.Parse(s)
+ if err != nil {
+ panic(err)
+ }
+ return u
+}
+
+// That's a pretty fun test. It will dump the first item, generate a zero
+// value equivalent to the second one, load the dumped data onto it, and then
+// verify that the resulting value is deep-equal to the untouched second value.
+// Then, it will do the same in the *opposite* direction!
+var twoWayCrossItems = []crossTypeItem{
+ // int<=>int
+ {&struct{ I int }{42}, &struct{ I int8 }{42}},
+ {&struct{ I int }{42}, &struct{ I int32 }{42}},
+ {&struct{ I int }{42}, &struct{ I int64 }{42}},
+ {&struct{ I int8 }{42}, &struct{ I int32 }{42}},
+ {&struct{ I int8 }{42}, &struct{ I int64 }{42}},
+ {&struct{ I int32 }{42}, &struct{ I int64 }{42}},
+
+ // uint<=>uint
+ {&struct{ I uint }{42}, &struct{ I uint8 }{42}},
+ {&struct{ I uint }{42}, &struct{ I uint32 }{42}},
+ {&struct{ I uint }{42}, &struct{ I uint64 }{42}},
+ {&struct{ I uint8 }{42}, &struct{ I uint32 }{42}},
+ {&struct{ I uint8 }{42}, &struct{ I uint64 }{42}},
+ {&struct{ I uint32 }{42}, &struct{ I uint64 }{42}},
+
+ // float32<=>float64
+ {&struct{ I float32 }{42}, &struct{ I float64 }{42}},
+
+ // int<=>uint
+ {&struct{ I uint }{42}, &struct{ I int }{42}},
+ {&struct{ I uint }{42}, &struct{ I int8 }{42}},
+ {&struct{ I uint }{42}, &struct{ I int32 }{42}},
+ {&struct{ I uint }{42}, &struct{ I int64 }{42}},
+ {&struct{ I uint8 }{42}, &struct{ I int }{42}},
+ {&struct{ I uint8 }{42}, &struct{ I int8 }{42}},
+ {&struct{ I uint8 }{42}, &struct{ I int32 }{42}},
+ {&struct{ I uint8 }{42}, &struct{ I int64 }{42}},
+ {&struct{ I uint32 }{42}, &struct{ I int }{42}},
+ {&struct{ I uint32 }{42}, &struct{ I int8 }{42}},
+ {&struct{ I uint32 }{42}, &struct{ I int32 }{42}},
+ {&struct{ I uint32 }{42}, &struct{ I int64 }{42}},
+ {&struct{ I uint64 }{42}, &struct{ I int }{42}},
+ {&struct{ I uint64 }{42}, &struct{ I int8 }{42}},
+ {&struct{ I uint64 }{42}, &struct{ I int32 }{42}},
+ {&struct{ I uint64 }{42}, &struct{ I int64 }{42}},
+
+ // int <=> float
+ {&struct{ I int }{42}, &struct{ I float64 }{42}},
+
+ // int <=> bool
+ {&struct{ I int }{1}, &struct{ I bool }{true}},
+ {&struct{ I int }{0}, &struct{ I bool }{false}},
+
+ // uint <=> float64
+ {&struct{ I uint }{42}, &struct{ I float64 }{42}},
+
+ // uint <=> bool
+ {&struct{ I uint }{1}, &struct{ I bool }{true}},
+ {&struct{ I uint }{0}, &struct{ I bool }{false}},
+
+ // float64 <=> bool
+ {&struct{ I float64 }{1}, &struct{ I bool }{true}},
+ {&struct{ I float64 }{0}, &struct{ I bool }{false}},
+
+ // string <=> string and string <=> []byte
+ {&struct{ S []byte }{[]byte("abc")}, &struct{ S string }{"abc"}},
+ {&struct{ S []byte }{[]byte("def")}, &struct{ S bson.Symbol }{"def"}},
+ {&struct{ S string }{"ghi"}, &struct{ S bson.Symbol }{"ghi"}},
+
+ // map <=> struct
+ {&struct {
+ A struct {
+ B, C int
+ }
+ }{struct{ B, C int }{1, 2}},
+ map[string]map[string]int{"a": map[string]int{"b": 1, "c": 2}}},
+
+ {&struct{ A bson.Symbol }{"abc"}, map[string]string{"a": "abc"}},
+ {&struct{ A bson.Symbol }{"abc"}, map[string][]byte{"a": []byte("abc")}},
+ {&struct{ A []byte }{[]byte("abc")}, map[string]string{"a": "abc"}},
+ {&struct{ A uint }{42}, map[string]int{"a": 42}},
+ {&struct{ A uint }{42}, map[string]float64{"a": 42}},
+ {&struct{ A uint }{1}, map[string]bool{"a": true}},
+ {&struct{ A int }{42}, map[string]uint{"a": 42}},
+ {&struct{ A int }{42}, map[string]float64{"a": 42}},
+ {&struct{ A int }{1}, map[string]bool{"a": true}},
+ {&struct{ A float64 }{42}, map[string]float32{"a": 42}},
+ {&struct{ A float64 }{42}, map[string]int{"a": 42}},
+ {&struct{ A float64 }{42}, map[string]uint{"a": 42}},
+ {&struct{ A float64 }{1}, map[string]bool{"a": true}},
+ {&struct{ A bool }{true}, map[string]int{"a": 1}},
+ {&struct{ A bool }{true}, map[string]uint{"a": 1}},
+ {&struct{ A bool }{true}, map[string]float64{"a": 1}},
+ {&struct{ A **byte }{&byteptr}, map[string]byte{"a": 8}},
+
+ // url.URL <=> string
+ {&struct{ URL *url.URL }{parseURL("h://e.c/p")}, map[string]string{"url": "h://e.c/p"}},
+ {&struct{ URL url.URL }{*parseURL("h://e.c/p")}, map[string]string{"url": "h://e.c/p"}},
+
+ // Slices
+ {&struct{ S []int }{[]int{1, 2, 3}}, map[string][]int{"s": []int{1, 2, 3}}},
+ {&struct{ S *[]int }{&[]int{1, 2, 3}}, map[string][]int{"s": []int{1, 2, 3}}},
+
+ // Conditionals
+ {&condBool{true}, map[string]bool{"v": true}},
+ {&condBool{}, map[string]bool{}},
+ {&condInt{1}, map[string]int{"v": 1}},
+ {&condInt{}, map[string]int{}},
+ {&condUInt{1}, map[string]uint{"v": 1}},
+ {&condUInt{}, map[string]uint{}},
+ {&condFloat{}, map[string]int{}},
+ {&condStr{"yo"}, map[string]string{"v": "yo"}},
+ {&condStr{}, map[string]string{}},
+ {&condStrNS{"yo"}, map[string]string{"v": "yo"}},
+ {&condStrNS{}, map[string]string{}},
+ {&condSlice{[]string{"yo"}}, map[string][]string{"v": []string{"yo"}}},
+ {&condSlice{}, map[string][]string{}},
+ {&condMap{map[string]int{"k": 1}}, bson.M{"v": bson.M{"k": 1}}},
+ {&condMap{}, map[string][]string{}},
+ {&condIface{"yo"}, map[string]string{"v": "yo"}},
+ {&condIface{""}, map[string]string{"v": ""}},
+ {&condIface{}, map[string]string{}},
+ {&condPtr{&truevar}, map[string]bool{"v": true}},
+ {&condPtr{&falsevar}, map[string]bool{"v": false}},
+ {&condPtr{}, map[string]string{}},
+
+ {&condTime{time.Unix(123456789, 123e6)}, map[string]time.Time{"v": time.Unix(123456789, 123e6)}},
+ {&condTime{}, map[string]string{}},
+
+ {&condStruct{struct{ A []int }{[]int{1}}}, bson.M{"v": bson.M{"a": []interface{}{1}}}},
+ {&condStruct{struct{ A []int }{}}, bson.M{}},
+
+ {&condRaw{bson.Raw{Kind: 0x0A, Data: []byte{}}}, bson.M{"v": nil}},
+ {&condRaw{bson.Raw{Kind: 0x00}}, bson.M{}},
+
+ {&namedCondStr{"yo"}, map[string]string{"myv": "yo"}},
+ {&namedCondStr{}, map[string]string{}},
+
+ {&shortInt{1}, map[string]interface{}{"v": 1}},
+ {&shortInt{1 << 30}, map[string]interface{}{"v": 1 << 30}},
+ {&shortInt{1 << 31}, map[string]interface{}{"v": int64(1 << 31)}},
+ {&shortUint{1 << 30}, map[string]interface{}{"v": 1 << 30}},
+ {&shortUint{1 << 31}, map[string]interface{}{"v": int64(1 << 31)}},
+ {&shortIface{int64(1) << 31}, map[string]interface{}{"v": int64(1 << 31)}},
+ {&shortPtr{int64ptr}, map[string]interface{}{"v": intvar}},
+
+ {&shortNonEmptyInt{1}, map[string]interface{}{"v": 1}},
+ {&shortNonEmptyInt{1 << 31}, map[string]interface{}{"v": int64(1 << 31)}},
+ {&shortNonEmptyInt{}, map[string]interface{}{}},
+
+ {&inlineInt{struct{ A, B int }{1, 2}}, map[string]interface{}{"a": 1, "b": 2}},
+ {&inlineMap{A: 1, M: map[string]interface{}{"b": 2}}, map[string]interface{}{"a": 1, "b": 2}},
+ {&inlineMap{A: 1, M: nil}, map[string]interface{}{"a": 1}},
+ {&inlineMapInt{A: 1, M: map[string]int{"b": 2}}, map[string]int{"a": 1, "b": 2}},
+ {&inlineMapInt{A: 1, M: nil}, map[string]int{"a": 1}},
+ {&inlineMapMyM{A: 1, M: MyM{"b": MyM{"c": 3}}}, map[string]interface{}{"a": 1, "b": map[string]interface{}{"c": 3}}},
+ {&inlineUnexported{M: map[string]interface{}{"b": 1}, unexported: unexported{A: 2}}, map[string]interface{}{"b": 1, "a": 2}},
+
+ // []byte <=> Binary
+ {&struct{ B []byte }{[]byte("abc")}, map[string]bson.Binary{"b": bson.Binary{Data: []byte("abc")}}},
+
+ // []byte <=> MyBytes
+ {&struct{ B MyBytes }{[]byte("abc")}, map[string]string{"b": "abc"}},
+ {&struct{ B MyBytes }{[]byte{}}, map[string]string{"b": ""}},
+ {&struct{ B MyBytes }{}, map[string]bool{}},
+ {&struct{ B []byte }{[]byte("abc")}, map[string]MyBytes{"b": []byte("abc")}},
+
+ // bool <=> MyBool
+ {&struct{ B MyBool }{true}, map[string]bool{"b": true}},
+ {&struct{ B MyBool }{}, map[string]bool{"b": false}},
+ {&struct{ B MyBool }{}, map[string]string{}},
+ {&struct{ B bool }{}, map[string]MyBool{"b": false}},
+
+ // arrays
+ {&struct{ V [2]int }{[...]int{1, 2}}, map[string][2]int{"v": [2]int{1, 2}}},
+ {&struct{ V [2]byte }{[...]byte{1, 2}}, map[string][2]byte{"v": [2]byte{1, 2}}},
+
+ // zero time
+ {&struct{ V time.Time }{}, map[string]interface{}{"v": time.Time{}}},
+
+ // zero time + 1 second + 1 millisecond; overflows int64 as nanoseconds
+ {&struct{ V time.Time }{time.Unix(-62135596799, 1e6).Local()},
+ map[string]interface{}{"v": time.Unix(-62135596799, 1e6).Local()}},
+
+ // bson.D <=> []DocElem
+ {&bson.D{{"a", bson.D{{"b", 1}, {"c", 2}}}}, &bson.D{{"a", bson.D{{"b", 1}, {"c", 2}}}}},
+ {&bson.D{{"a", bson.D{{"b", 1}, {"c", 2}}}}, &MyD{{"a", MyD{{"b", 1}, {"c", 2}}}}},
+ {&struct{ V MyD }{MyD{{"a", 1}}}, &bson.D{{"v", bson.D{{"a", 1}}}}},
+
+ // bson.RawD <=> []RawDocElem
+ {&bson.RawD{{"a", bson.Raw{0x08, []byte{0x01}}}}, &bson.RawD{{"a", bson.Raw{0x08, []byte{0x01}}}}},
+ {&bson.RawD{{"a", bson.Raw{0x08, []byte{0x01}}}}, &MyRawD{{"a", bson.Raw{0x08, []byte{0x01}}}}},
+
+ // bson.M <=> map
+ {bson.M{"a": bson.M{"b": 1, "c": 2}}, MyM{"a": MyM{"b": 1, "c": 2}}},
+ {bson.M{"a": bson.M{"b": 1, "c": 2}}, map[string]interface{}{"a": map[string]interface{}{"b": 1, "c": 2}}},
+
+ // bson.M <=> map[MyString]
+ {bson.M{"a": bson.M{"b": 1, "c": 2}}, map[MyString]interface{}{"a": map[MyString]interface{}{"b": 1, "c": 2}}},
+
+ // json.Number <=> int64, float64
+ {&struct{ N json.Number }{"5"}, map[string]interface{}{"n": int64(5)}},
+ {&struct{ N json.Number }{"5.05"}, map[string]interface{}{"n": 5.05}},
+ {&struct{ N json.Number }{"9223372036854776000"}, map[string]interface{}{"n": float64(1 << 63)}},
+
+ // bson.D <=> non-struct getter/setter
+ {&bson.D{{"a", 1}}, &getterSetterD{{"a", 1}, {"suffix", true}}},
+ {&bson.D{{"a", 42}}, &gsintvar},
+
+ // Interface slice setter.
+ {&struct{ V ifaceSlice }{ifaceSlice{nil, nil, nil}}, bson.M{"v": []interface{}{3}}},
+}
+
+// Same thing, but only one way (obj1 => obj2).
+var oneWayCrossItems = []crossTypeItem{
+ // map <=> struct
+ {map[string]interface{}{"a": 1, "b": "2", "c": 3}, map[string]int{"a": 1, "c": 3}},
+
+ // inline map elides badly typed values
+ {map[string]interface{}{"a": 1, "b": "2", "c": 3}, &inlineMapInt{A: 1, M: map[string]int{"c": 3}}},
+
+ // Can't decode int into struct.
+ {bson.M{"a": bson.M{"b": 2}}, &struct{ A bool }{}},
+
+ // Would get decoded into a int32 too in the opposite direction.
+ {&shortIface{int64(1) << 30}, map[string]interface{}{"v": 1 << 30}},
+
+ // Ensure omitempty on struct with private fields works properly.
+ {&struct {
+ V struct{ v time.Time } ",omitempty"
+ }{}, map[string]interface{}{}},
+
+ // Attempt to marshal slice into RawD (issue #120).
+ {bson.M{"x": []int{1, 2, 3}}, &struct{ X bson.RawD }{}},
+}
+
+func testCrossPair(c *C, dump interface{}, load interface{}) {
+ c.Logf("Dump: %#v", dump)
+ c.Logf("Load: %#v", load)
+ zero := makeZeroDoc(load)
+ data, err := bson.Marshal(dump)
+ c.Assert(err, IsNil)
+ c.Logf("Dumped: %#v", string(data))
+ err = bson.Unmarshal(data, zero)
+ c.Assert(err, IsNil)
+ c.Logf("Loaded: %#v", zero)
+ c.Assert(zero, DeepEquals, load)
+}
+
+func (s *S) TestTwoWayCrossPairs(c *C) {
+ for _, item := range twoWayCrossItems {
+ testCrossPair(c, item.obj1, item.obj2)
+ testCrossPair(c, item.obj2, item.obj1)
+ }
+}
+
+func (s *S) TestOneWayCrossPairs(c *C) {
+ for _, item := range oneWayCrossItems {
+ testCrossPair(c, item.obj1, item.obj2)
+ }
+}
+
+// --------------------------------------------------------------------------
+// ObjectId hex representation test.
+
+func (s *S) TestObjectIdHex(c *C) {
+ id := bson.ObjectIdHex("4d88e15b60f486e428412dc9")
+ c.Assert(id.String(), Equals, `ObjectIdHex("4d88e15b60f486e428412dc9")`)
+ c.Assert(id.Hex(), Equals, "4d88e15b60f486e428412dc9")
+}
+
+func (s *S) TestIsObjectIdHex(c *C) {
+ test := []struct {
+ id string
+ valid bool
+ }{
+ {"4d88e15b60f486e428412dc9", true},
+ {"4d88e15b60f486e428412dc", false},
+ {"4d88e15b60f486e428412dc9e", false},
+ {"4d88e15b60f486e428412dcx", false},
+ }
+ for _, t := range test {
+ c.Assert(bson.IsObjectIdHex(t.id), Equals, t.valid)
+ }
+}
+
+// --------------------------------------------------------------------------
+// ObjectId parts extraction tests.
+
+type objectIdParts struct {
+ id bson.ObjectId
+ timestamp int64
+ machine []byte
+ pid uint16
+ counter int32
+}
+
+var objectIds = []objectIdParts{
+ objectIdParts{
+ bson.ObjectIdHex("4d88e15b60f486e428412dc9"),
+ 1300816219,
+ []byte{0x60, 0xf4, 0x86},
+ 0xe428,
+ 4271561,
+ },
+ objectIdParts{
+ bson.ObjectIdHex("000000000000000000000000"),
+ 0,
+ []byte{0x00, 0x00, 0x00},
+ 0x0000,
+ 0,
+ },
+ objectIdParts{
+ bson.ObjectIdHex("00000000aabbccddee000001"),
+ 0,
+ []byte{0xaa, 0xbb, 0xcc},
+ 0xddee,
+ 1,
+ },
+}
+
+func (s *S) TestObjectIdPartsExtraction(c *C) {
+ for i, v := range objectIds {
+ t := time.Unix(v.timestamp, 0)
+ c.Assert(v.id.Time(), Equals, t, Commentf("#%d Wrong timestamp value", i))
+ c.Assert(v.id.Machine(), DeepEquals, v.machine, Commentf("#%d Wrong machine id value", i))
+ c.Assert(v.id.Pid(), Equals, v.pid, Commentf("#%d Wrong pid value", i))
+ c.Assert(v.id.Counter(), Equals, v.counter, Commentf("#%d Wrong counter value", i))
+ }
+}
+
+func (s *S) TestNow(c *C) {
+ before := time.Now()
+ time.Sleep(1e6)
+ now := bson.Now()
+ time.Sleep(1e6)
+ after := time.Now()
+ c.Assert(now.After(before) && now.Before(after), Equals, true, Commentf("now=%s, before=%s, after=%s", now, before, after))
+}
+
+// --------------------------------------------------------------------------
+// ObjectId generation tests.
+
+func (s *S) TestNewObjectId(c *C) {
+ // Generate 10 ids
+ ids := make([]bson.ObjectId, 10)
+ for i := 0; i < 10; i++ {
+ ids[i] = bson.NewObjectId()
+ }
+ for i := 1; i < 10; i++ {
+ prevId := ids[i-1]
+ id := ids[i]
+ // Test for uniqueness among all other 9 generated ids
+ for j, tid := range ids {
+ if j != i {
+ c.Assert(id, Not(Equals), tid, Commentf("Generated ObjectId is not unique"))
+ }
+ }
+ // Check that timestamp was incremented and is within 30 seconds of the previous one
+ secs := id.Time().Sub(prevId.Time()).Seconds()
+ c.Assert((secs >= 0 && secs <= 30), Equals, true, Commentf("Wrong timestamp in generated ObjectId"))
+ // Check that machine ids are the same
+ c.Assert(id.Machine(), DeepEquals, prevId.Machine())
+ // Check that pids are the same
+ c.Assert(id.Pid(), Equals, prevId.Pid())
+ // Test for proper increment
+ delta := int(id.Counter() - prevId.Counter())
+ c.Assert(delta, Equals, 1, Commentf("Wrong increment in generated ObjectId"))
+ }
+}
+
+func (s *S) TestNewObjectIdWithTime(c *C) {
+ t := time.Unix(12345678, 0)
+ id := bson.NewObjectIdWithTime(t)
+ c.Assert(id.Time(), Equals, t)
+ c.Assert(id.Machine(), DeepEquals, []byte{0x00, 0x00, 0x00})
+ c.Assert(int(id.Pid()), Equals, 0)
+ c.Assert(int(id.Counter()), Equals, 0)
+}
+
+// --------------------------------------------------------------------------
+// ObjectId JSON marshalling.
+
+type jsonType struct {
+ Id bson.ObjectId
+}
+
+var jsonIdTests = []struct {
+ value jsonType
+ json string
+ marshal bool
+ unmarshal bool
+ error string
+}{{
+ value: jsonType{Id: bson.ObjectIdHex("4d88e15b60f486e428412dc9")},
+ json: `{"Id":"4d88e15b60f486e428412dc9"}`,
+ marshal: true,
+ unmarshal: true,
+}, {
+ value: jsonType{},
+ json: `{"Id":""}`,
+ marshal: true,
+ unmarshal: true,
+}, {
+ value: jsonType{},
+ json: `{"Id":null}`,
+ marshal: false,
+ unmarshal: true,
+}, {
+ json: `{"Id":"4d88e15b60f486e428412dc9A"}`,
+ error: `invalid ObjectId in JSON: "4d88e15b60f486e428412dc9A"`,
+ marshal: false,
+ unmarshal: true,
+}, {
+ json: `{"Id":"4d88e15b60f486e428412dcZ"}`,
+ error: `invalid ObjectId in JSON: "4d88e15b60f486e428412dcZ" .*`,
+ marshal: false,
+ unmarshal: true,
+}}
+
+func (s *S) TestObjectIdJSONMarshaling(c *C) {
+ for _, test := range jsonIdTests {
+ if test.marshal {
+ data, err := json.Marshal(&test.value)
+ if test.error == "" {
+ c.Assert(err, IsNil)
+ c.Assert(string(data), Equals, test.json)
+ } else {
+ c.Assert(err, ErrorMatches, test.error)
+ }
+ }
+
+ if test.unmarshal {
+ var value jsonType
+ err := json.Unmarshal([]byte(test.json), &value)
+ if test.error == "" {
+ c.Assert(err, IsNil)
+ c.Assert(value, DeepEquals, test.value)
+ } else {
+ c.Assert(err, ErrorMatches, test.error)
+ }
+ }
+ }
+}
+
+// --------------------------------------------------------------------------
+// Spec tests
+
+type specTest struct {
+ Description string
+ Documents []struct {
+ Decoded map[string]interface{}
+ Encoded string
+ DecodeOnly bool `yaml:"decodeOnly"`
+ Error interface{}
+ }
+}
+
+func (s *S) TestSpecTests(c *C) {
+ for _, data := range specTests {
+ var test specTest
+ err := yaml.Unmarshal([]byte(data), &test)
+ c.Assert(err, IsNil)
+
+ c.Logf("Running spec test set %q", test.Description)
+
+ for _, doc := range test.Documents {
+ if doc.Error != nil {
+ continue
+ }
+ c.Logf("Ensuring %q decodes as %v", doc.Encoded, doc.Decoded)
+ var decoded map[string]interface{}
+ encoded, err := hex.DecodeString(doc.Encoded)
+ c.Assert(err, IsNil)
+ err = bson.Unmarshal(encoded, &decoded)
+ c.Assert(err, IsNil)
+ c.Assert(decoded, DeepEquals, doc.Decoded)
+ }
+
+ for _, doc := range test.Documents {
+ if doc.DecodeOnly || doc.Error != nil {
+ continue
+ }
+ c.Logf("Ensuring %v encodes as %q", doc.Decoded, doc.Encoded)
+ encoded, err := bson.Marshal(doc.Decoded)
+ c.Assert(err, IsNil)
+ c.Assert(strings.ToUpper(hex.EncodeToString(encoded)), Equals, doc.Encoded)
+ }
+
+ for _, doc := range test.Documents {
+ if doc.Error == nil {
+ continue
+ }
+ c.Logf("Ensuring %q errors when decoded: %s", doc.Encoded, doc.Error)
+ var decoded map[string]interface{}
+ encoded, err := hex.DecodeString(doc.Encoded)
+ c.Assert(err, IsNil)
+ err = bson.Unmarshal(encoded, &decoded)
+ c.Assert(err, NotNil)
+ c.Logf("Failed with: %v", err)
+ }
+ }
+}
+
+// --------------------------------------------------------------------------
+// ObjectId Text encoding.TextUnmarshaler.
+
+var textIdTests = []struct {
+ value bson.ObjectId
+ text string
+ marshal bool
+ unmarshal bool
+ error string
+}{{
+ value: bson.ObjectIdHex("4d88e15b60f486e428412dc9"),
+ text: "4d88e15b60f486e428412dc9",
+ marshal: true,
+ unmarshal: true,
+}, {
+ text: "",
+ marshal: true,
+ unmarshal: true,
+}, {
+ text: "4d88e15b60f486e428412dc9A",
+ marshal: false,
+ unmarshal: true,
+ error: `invalid ObjectId: 4d88e15b60f486e428412dc9A`,
+}, {
+ text: "4d88e15b60f486e428412dcZ",
+ marshal: false,
+ unmarshal: true,
+ error: `invalid ObjectId: 4d88e15b60f486e428412dcZ .*`,
+}}
+
+func (s *S) TestObjectIdTextMarshaling(c *C) {
+ for _, test := range textIdTests {
+ if test.marshal {
+ data, err := test.value.MarshalText()
+ if test.error == "" {
+ c.Assert(err, IsNil)
+ c.Assert(string(data), Equals, test.text)
+ } else {
+ c.Assert(err, ErrorMatches, test.error)
+ }
+ }
+
+ if test.unmarshal {
+ err := test.value.UnmarshalText([]byte(test.text))
+ if test.error == "" {
+ c.Assert(err, IsNil)
+ if test.value != "" {
+ value := bson.ObjectIdHex(test.text)
+ c.Assert(value, DeepEquals, test.value)
+ }
+ } else {
+ c.Assert(err, ErrorMatches, test.error)
+ }
+ }
+ }
+}
+
+// --------------------------------------------------------------------------
+// ObjectId XML marshalling.
+
+type xmlType struct {
+ Id bson.ObjectId
+}
+
+var xmlIdTests = []struct {
+ value xmlType
+ xml string
+ marshal bool
+ unmarshal bool
+ error string
+}{{
+ value: xmlType{Id: bson.ObjectIdHex("4d88e15b60f486e428412dc9")},
+ xml: "<xmlType><Id>4d88e15b60f486e428412dc9</Id></xmlType>",
+ marshal: true,
+ unmarshal: true,
+}, {
+ value: xmlType{},
+ xml: "<xmlType><Id></Id></xmlType>",
+ marshal: true,
+ unmarshal: true,
+}, {
+ xml: "<xmlType><Id>4d88e15b60f486e428412dc9A</Id></xmlType>",
+ marshal: false,
+ unmarshal: true,
+ error: `invalid ObjectId: 4d88e15b60f486e428412dc9A`,
+}, {
+ xml: "<xmlType><Id>4d88e15b60f486e428412dcZ</Id></xmlType>",
+ marshal: false,
+ unmarshal: true,
+ error: `invalid ObjectId: 4d88e15b60f486e428412dcZ .*`,
+}}
+
+func (s *S) TestObjectIdXMLMarshaling(c *C) {
+ for _, test := range xmlIdTests {
+ if test.marshal {
+ data, err := xml.Marshal(&test.value)
+ if test.error == "" {
+ c.Assert(err, IsNil)
+ c.Assert(string(data), Equals, test.xml)
+ } else {
+ c.Assert(err, ErrorMatches, test.error)
+ }
+ }
+
+ if test.unmarshal {
+ var value xmlType
+ err := xml.Unmarshal([]byte(test.xml), &value)
+ if test.error == "" {
+ c.Assert(err, IsNil)
+ c.Assert(value, DeepEquals, test.value)
+ } else {
+ c.Assert(err, ErrorMatches, test.error)
+ }
+ }
+ }
+}
+
+// --------------------------------------------------------------------------
+// Some simple benchmarks.
+
+type BenchT struct {
+ A, B, C, D, E, F string
+}
+
+type BenchRawT struct {
+ A string
+ B int
+ C bson.M
+ D []float64
+}
+
+func (s *S) BenchmarkUnmarhsalStruct(c *C) {
+ v := BenchT{A: "A", D: "D", E: "E"}
+ data, err := bson.Marshal(&v)
+ if err != nil {
+ panic(err)
+ }
+ c.ResetTimer()
+ for i := 0; i < c.N; i++ {
+ err = bson.Unmarshal(data, &v)
+ }
+ if err != nil {
+ panic(err)
+ }
+}
+
+func (s *S) BenchmarkUnmarhsalMap(c *C) {
+ m := bson.M{"a": "a", "d": "d", "e": "e"}
+ data, err := bson.Marshal(&m)
+ if err != nil {
+ panic(err)
+ }
+ c.ResetTimer()
+ for i := 0; i < c.N; i++ {
+ err = bson.Unmarshal(data, &m)
+ }
+ if err != nil {
+ panic(err)
+ }
+}
+
+func (s *S) BenchmarkUnmarshalRaw(c *C) {
+ var err error
+ m := BenchRawT{
+ A: "test_string",
+ B: 123,
+ C: bson.M{
+ "subdoc_int": 12312,
+ "subdoc_doc": bson.M{"1": 1},
+ },
+ D: []float64{0.0, 1.3333, -99.9997, 3.1415},
+ }
+ data, err := bson.Marshal(&m)
+ if err != nil {
+ panic(err)
+ }
+ raw := bson.Raw{}
+ c.ResetTimer()
+ for i := 0; i < c.N; i++ {
+ err = bson.Unmarshal(data, &raw)
+ }
+ if err != nil {
+ panic(err)
+ }
+}
+
+func (s *S) BenchmarkNewObjectId(c *C) {
+ for i := 0; i < c.N; i++ {
+ bson.NewObjectId()
+ }
+}
diff --git a/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/bson/decimal.go b/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/bson/decimal.go
new file mode 100644
index 00000000000..3d2f7002037
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/bson/decimal.go
@@ -0,0 +1,310 @@
+// BSON library for Go
+//
+// Copyright (c) 2010-2012 - Gustavo Niemeyer <gustavo@niemeyer.net>
+//
+// All rights reserved.
+//
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are met:
+//
+// 1. Redistributions of source code must retain the above copyright notice, this
+// list of conditions and the following disclaimer.
+// 2. Redistributions in binary form must reproduce the above copyright notice,
+// this list of conditions and the following disclaimer in the documentation
+// and/or other materials provided with the distribution.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
+// ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
+// WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+// DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
+// ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
+// (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
+// LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
+// ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
+// SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+package bson
+
+import (
+ "fmt"
+ "strconv"
+ "strings"
+)
+
+// Decimal128 holds decimal128 BSON values.
+type Decimal128 struct {
+ h, l uint64
+}
+
+func (d Decimal128) String() string {
+ var pos int // positive sign
+ var e int // exponent
+ var h, l uint64 // significand high/low
+
+ if d.h>>63&1 == 0 {
+ pos = 1
+ }
+
+ switch d.h >> 58 & (1<<5 - 1) {
+ case 0x1F:
+ return "NaN"
+ case 0x1E:
+ return "-Inf"[pos:]
+ }
+
+ l = d.l
+ if d.h>>61&3 == 3 {
+ // Bits: 1*sign 2*ignored 14*exponent 111*significand.
+ // Implicit 0b100 prefix in significand.
+ e = int(d.h>>47&(1<<14-1)) - 6176
+ //h = 4<<47 | d.h&(1<<47-1)
+ // Spec says all of these values are out of range.
+ h, l = 0, 0
+ } else {
+ // Bits: 1*sign 14*exponent 113*significand
+ e = int(d.h>>49&(1<<14-1)) - 6176
+ h = d.h & (1<<49 - 1)
+ }
+
+ // Would be handled by the logic below, but that's trivial and common.
+ if h == 0 && l == 0 && e == 0 {
+ return "-0"[pos:]
+ }
+
+ var repr [48]byte // Loop 5 times over 9 digits plus dot, negative sign, and leading zero.
+ var last = len(repr)
+ var i = len(repr)
+ var dot = len(repr) + e
+ var rem uint32
+Loop:
+ for d9 := 0; d9 < 5; d9++ {
+ h, l, rem = divmod(h, l, 1e9)
+ for d1 := 0; d1 < 9; d1++ {
+ // Handle "-0.0", "0.00123400", "-1.00E-6", "1.050E+3", etc.
+ if i < len(repr) && (dot == i || l == 0 && h == 0 && rem > 0 && rem < 10 && (dot < i-6 || e > 0)) {
+ e += len(repr) - i
+ i--
+ repr[i] = '.'
+ last = i - 1
+ dot = len(repr) // Unmark.
+ }
+ c := '0' + byte(rem%10)
+ rem /= 10
+ i--
+ repr[i] = c
+ // Handle "0E+3", "1E+3", etc.
+ if l == 0 && h == 0 && rem == 0 && i == len(repr)-1 && (dot < i-5 || e > 0) {
+ last = i
+ break Loop
+ }
+ if c != '0' {
+ last = i
+ }
+ // Break early. Works without it, but why.
+ if dot > i && l == 0 && h == 0 && rem == 0 {
+ break Loop
+ }
+ }
+ }
+ repr[last-1] = '-'
+ last--
+
+ if e > 0 {
+ return string(repr[last+pos:]) + "E+" + strconv.Itoa(e)
+ }
+ if e < 0 {
+ return string(repr[last+pos:]) + "E" + strconv.Itoa(e)
+ }
+ return string(repr[last+pos:])
+}
+
+func divmod(h, l uint64, div uint32) (qh, ql uint64, rem uint32) {
+ div64 := uint64(div)
+ a := h >> 32
+ aq := a / div64
+ ar := a % div64
+ b := ar<<32 + h&(1<<32-1)
+ bq := b / div64
+ br := b % div64
+ c := br<<32 + l>>32
+ cq := c / div64
+ cr := c % div64
+ d := cr<<32 + l&(1<<32-1)
+ dq := d / div64
+ dr := d % div64
+ return (aq<<32 | bq), (cq<<32 | dq), uint32(dr)
+}
+
+var dNaN = Decimal128{0x1F << 58, 0}
+var dPosInf = Decimal128{0x1E << 58, 0}
+var dNegInf = Decimal128{0x3E << 58, 0}
+
+func dErr(s string) (Decimal128, error) {
+ return dNaN, fmt.Errorf("cannot parse %q as a decimal128", s)
+}
+
+func ParseDecimal128(s string) (Decimal128, error) {
+ orig := s
+ if s == "" {
+ return dErr(orig)
+ }
+ neg := s[0] == '-'
+ if neg || s[0] == '+' {
+ s = s[1:]
+ }
+
+ if (len(s) == 3 || len(s) == 8) && (s[0] == 'N' || s[0] == 'n' || s[0] == 'I' || s[0] == 'i') {
+ if s == "NaN" || s == "nan" || strings.EqualFold(s, "nan") {
+ return dNaN, nil
+ }
+ if s == "Inf" || s == "inf" || strings.EqualFold(s, "inf") || strings.EqualFold(s, "infinity") {
+ if neg {
+ return dNegInf, nil
+ }
+ return dPosInf, nil
+ }
+ return dErr(orig)
+ }
+
+ var h, l uint64
+ var e int
+
+ var add, ovr uint32
+ var mul uint32 = 1
+ var dot = -1
+ var digits = 0
+ var i = 0
+ for i < len(s) {
+ c := s[i]
+ if mul == 1e9 {
+ h, l, ovr = muladd(h, l, mul, add)
+ mul, add = 1, 0
+ if ovr > 0 || h&((1<<15-1)<<49) > 0 {
+ return dErr(orig)
+ }
+ }
+ if c >= '0' && c <= '9' {
+ i++
+ if c > '0' || digits > 0 {
+ digits++
+ }
+ if digits > 34 {
+ if c == '0' {
+ // Exact rounding.
+ e++
+ continue
+ }
+ return dErr(orig)
+ }
+ mul *= 10
+ add *= 10
+ add += uint32(c - '0')
+ continue
+ }
+ if c == '.' {
+ i++
+ if dot >= 0 || i == 1 && len(s) == 1 {
+ return dErr(orig)
+ }
+ if i == len(s) {
+ break
+ }
+ if s[i] < '0' || s[i] > '9' || e > 0 {
+ return dErr(orig)
+ }
+ dot = i
+ continue
+ }
+ break
+ }
+ if i == 0 {
+ return dErr(orig)
+ }
+ if mul > 1 {
+ h, l, ovr = muladd(h, l, mul, add)
+ if ovr > 0 || h&((1<<15-1)<<49) > 0 {
+ return dErr(orig)
+ }
+ }
+ if dot >= 0 {
+ e += dot - i
+ }
+ if i+1 < len(s) && (s[i] == 'E' || s[i] == 'e') {
+ i++
+ eneg := s[i] == '-'
+ if eneg || s[i] == '+' {
+ i++
+ if i == len(s) {
+ return dErr(orig)
+ }
+ }
+ n := 0
+ for i < len(s) && n < 1e4 {
+ c := s[i]
+ i++
+ if c < '0' || c > '9' {
+ return dErr(orig)
+ }
+ n *= 10
+ n += int(c - '0')
+ }
+ if eneg {
+ n = -n
+ }
+ e += n
+ for e < -6176 {
+ // Subnormal.
+ var div uint32 = 1
+ for div < 1e9 && e < -6176 {
+ div *= 10
+ e++
+ }
+ var rem uint32
+ h, l, rem = divmod(h, l, div)
+ if rem > 0 {
+ return dErr(orig)
+ }
+ }
+ for e > 6111 {
+ // Clamped.
+ var mul uint32 = 1
+ for mul < 1e9 && e > 6111 {
+ mul *= 10
+ e--
+ }
+ h, l, ovr = muladd(h, l, mul, 0)
+ if ovr > 0 || h&((1<<15-1)<<49) > 0 {
+ return dErr(orig)
+ }
+ }
+ if e < -6176 || e > 6111 {
+ return dErr(orig)
+ }
+ }
+
+ if i < len(s) {
+ return dErr(orig)
+ }
+
+ h |= uint64(e+6176) & uint64(1<<14-1) << 49
+ if neg {
+ h |= 1 << 63
+ }
+ return Decimal128{h, l}, nil
+}
+
+func muladd(h, l uint64, mul uint32, add uint32) (resh, resl uint64, overflow uint32) {
+ mul64 := uint64(mul)
+ a := mul64 * (l & (1<<32 - 1))
+ b := a>>32 + mul64*(l>>32)
+ c := b>>32 + mul64*(h&(1<<32-1))
+ d := c>>32 + mul64*(h>>32)
+
+ a = a&(1<<32-1) + uint64(add)
+ b = b&(1<<32-1) + a>>32
+ c = c&(1<<32-1) + b>>32
+ d = d&(1<<32-1) + c>>32
+
+ return (d<<32 | c&(1<<32-1)), (b<<32 | a&(1<<32-1)), uint32(d >> 32)
+}
diff --git a/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/bson/decimal_test.go b/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/bson/decimal_test.go
new file mode 100644
index 00000000000..a29728094ec
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/bson/decimal_test.go
@@ -0,0 +1,4109 @@
+// BSON library for Go
+//
+// Copyright (c) 2010-2012 - Gustavo Niemeyer <gustavo@niemeyer.net>
+//
+// All rights reserved.
+//
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are met:
+//
+// 1. Redistributions of source code must retain the above copyright notice, this
+// list of conditions and the following disclaimer.
+// 2. Redistributions in binary form must reproduce the above copyright notice,
+// this list of conditions and the following disclaimer in the documentation
+// and/or other materials provided with the distribution.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
+// ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
+// WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+// DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
+// ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
+// (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
+// LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
+// ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
+// SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+package bson_test
+
+import (
+ "encoding/hex"
+ "encoding/json"
+ "fmt"
+ "regexp"
+ "strings"
+
+ "gopkg.in/mgo.v2/bson"
+
+ . "gopkg.in/check.v1"
+)
+
+// --------------------------------------------------------------------------
+// Decimal tests
+
+type decimalTests struct {
+ Valid []struct {
+ Description string `json:"description"`
+ BSON string `json:"bson"`
+ CanonicalBSON string `json:"canonical_bson"`
+ ExtJSON string `json:"extjson"`
+ CanonicalExtJSON string `json:"canonical_extjson"`
+ Lossy bool `json:"lossy"`
+ } `json:"valid"`
+
+ ParseErrors []struct {
+ Description string `json:"description"`
+ String string `json:"string"`
+ } `json:"parseErrors"`
+}
+
+func extJSONRepr(s string) string {
+ var value struct {
+ D struct {
+ Repr string `json:"$numberDecimal"`
+ } `json:"d"`
+ }
+ err := json.Unmarshal([]byte(s), &value)
+ if err != nil {
+ panic(err)
+ }
+ return value.D.Repr
+}
+
+func (s *S) TestDecimalTests(c *C) {
+ // These also conform to the spec and are used by Go elsewhere.
+ // (e.g. math/big won't parse "Infinity").
+ goStr := func(s string) string {
+ switch s {
+ case "Infinity":
+ return "Inf"
+ case "-Infinity":
+ return "-Inf"
+ }
+ return s
+ }
+
+ for _, testEntry := range decimalTestsJSON {
+ testFile := testEntry.file
+
+ var tests decimalTests
+ err := json.Unmarshal([]byte(testEntry.json), &tests)
+ c.Assert(err, IsNil)
+
+ for _, test := range tests.Valid {
+ c.Logf("Running %s test: %s", testFile, test.Description)
+
+ test.BSON = strings.ToLower(test.BSON)
+
+ // Unmarshal value from BSON data.
+ bsonData, err := hex.DecodeString(test.BSON)
+ var bsonValue struct{ D interface{} }
+ err = bson.Unmarshal(bsonData, &bsonValue)
+ c.Assert(err, IsNil)
+ dec128, ok := bsonValue.D.(bson.Decimal128)
+ c.Assert(ok, Equals, true)
+
+ // Extract ExtJSON representations (canonical and not).
+ extjRepr := extJSONRepr(test.ExtJSON)
+ cextjRepr := extjRepr
+ if test.CanonicalExtJSON != "" {
+ cextjRepr = extJSONRepr(test.CanonicalExtJSON)
+ }
+
+ wantRepr := goStr(cextjRepr)
+
+ // Generate canonical representation.
+ c.Assert(dec128.String(), Equals, wantRepr)
+
+ // Parse original canonical representation.
+ parsed, err := bson.ParseDecimal128(cextjRepr)
+ c.Assert(err, IsNil)
+ c.Assert(parsed.String(), Equals, wantRepr)
+
+ // Parse non-canonical representation.
+ parsed, err = bson.ParseDecimal128(extjRepr)
+ c.Assert(err, IsNil)
+ c.Assert(parsed.String(), Equals, wantRepr)
+
+ // Parse Go canonical representation (Inf vs. Infinity).
+ parsed, err = bson.ParseDecimal128(wantRepr)
+ c.Assert(err, IsNil)
+ c.Assert(parsed.String(), Equals, wantRepr)
+
+ // Marshal original value back into BSON data.
+ data, err := bson.Marshal(bsonValue)
+ c.Assert(err, IsNil)
+ c.Assert(hex.EncodeToString(data), Equals, test.BSON)
+
+ if test.Lossy {
+ continue
+ }
+
+ // Marshal the parsed canonical representation.
+ var parsedValue struct{ D interface{} }
+ parsedValue.D = parsed
+ data, err = bson.Marshal(parsedValue)
+ c.Assert(err, IsNil)
+ c.Assert(hex.EncodeToString(data), Equals, test.BSON)
+ }
+
+ for _, test := range tests.ParseErrors {
+ c.Logf("Running %s parse error test: %s (string %q)", testFile, test.Description, test.String)
+
+ _, err := bson.ParseDecimal128(test.String)
+ quoted := regexp.QuoteMeta(fmt.Sprintf("%q", test.String))
+ c.Assert(err, ErrorMatches, `cannot parse `+quoted+` as a decimal128`)
+ }
+ }
+}
+
+const decBenchNum = "9.999999999999999999999999999999999E+6144"
+
+func (s *S) BenchmarkDecimal128String(c *C) {
+ d, err := bson.ParseDecimal128(decBenchNum)
+ c.Assert(err, IsNil)
+ c.Assert(d.String(), Equals, decBenchNum)
+
+ c.ResetTimer()
+ for i := 0; i < c.N; i++ {
+ d.String()
+ }
+}
+
+func (s *S) BenchmarkDecimal128Parse(c *C) {
+ var err error
+ c.ResetTimer()
+ for i := 0; i < c.N; i++ {
+ _, err = bson.ParseDecimal128(decBenchNum)
+ }
+ if err != nil {
+ panic(err)
+ }
+}
+
+var decimalTestsJSON = []struct{ file, json string }{
+ {"decimal128-1.json", `
+{
+ "description": "Decimal128",
+ "bson_type": "0x13",
+ "test_key": "d",
+ "valid": [
+ {
+ "description": "Special - Canonical NaN",
+ "bson": "180000001364000000000000000000000000000000007C00",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"NaN\"}}"
+ },
+ {
+ "description": "Special - Negative NaN",
+ "bson": "18000000136400000000000000000000000000000000FC00",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"NaN\"}}",
+ "lossy": true
+ },
+ {
+ "description": "Special - Negative NaN",
+ "bson": "18000000136400000000000000000000000000000000FC00",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"NaN\"}}",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"-NaN\"}}",
+ "lossy": true
+ },
+ {
+ "description": "Special - Canonical SNaN",
+ "bson": "180000001364000000000000000000000000000000007E00",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"NaN\"}}",
+ "lossy": true
+ },
+ {
+ "description": "Special - Negative SNaN",
+ "bson": "18000000136400000000000000000000000000000000FE00",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"NaN\"}}",
+ "lossy": true
+ },
+ {
+ "description": "Special - NaN with a payload",
+ "bson": "180000001364001200000000000000000000000000007E00",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"NaN\"}}",
+ "lossy": true
+ },
+ {
+ "description": "Special - Canonical Positive Infinity",
+ "bson": "180000001364000000000000000000000000000000007800",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"Infinity\"}}"
+ },
+ {
+ "description": "Special - Canonical Negative Infinity",
+ "bson": "18000000136400000000000000000000000000000000F800",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"-Infinity\"}}"
+ },
+ {
+ "description": "Special - Invalid representation treated as 0",
+ "bson": "180000001364000000000000000000000000000000106C00",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"0\"}}",
+ "lossy": true
+ },
+ {
+ "description": "Special - Invalid representation treated as -0",
+ "bson": "18000000136400DCBA9876543210DEADBEEF00000010EC00",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"-0\"}}",
+ "lossy": true
+ },
+ {
+ "description": "Special - Invalid representation treated as 0E3",
+ "bson": "18000000136400FFFFFFFFFFFFFFFFFFFFFFFFFFFF116C00",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"0E+3\"}}",
+ "lossy": true
+ },
+ {
+ "description": "Regular - Adjusted Exponent Limit",
+ "bson": "18000000136400F2AF967ED05C82DE3297FF6FDE3CF22F00",
+ "extjson": "{\"d\": { \"$numberDecimal\": \"0.000001234567890123456789012345678901234\" }}"
+ },
+ {
+ "description": "Regular - Smallest",
+ "bson": "18000000136400D204000000000000000000000000343000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"0.001234\"}}"
+ },
+ {
+ "description": "Regular - Smallest with Trailing Zeros",
+ "bson": "1800000013640040EF5A07000000000000000000002A3000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"0.00123400000\"}}"
+ },
+ {
+ "description": "Regular - 0.1",
+ "bson": "1800000013640001000000000000000000000000003E3000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"0.1\"}}"
+ },
+ {
+ "description": "Regular - 0.1234567890123456789012345678901234",
+ "bson": "18000000136400F2AF967ED05C82DE3297FF6FDE3CFC2F00",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"0.1234567890123456789012345678901234\"}}"
+ },
+ {
+ "description": "Regular - 0",
+ "bson": "180000001364000000000000000000000000000000403000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"0\"}}"
+ },
+ {
+ "description": "Regular - -0",
+ "bson": "18000000136400000000000000000000000000000040B000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"-0\"}}"
+ },
+ {
+ "description": "Regular - -0.0",
+ "bson": "1800000013640000000000000000000000000000003EB000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"-0.0\"}}"
+ },
+ {
+ "description": "Regular - 2",
+ "bson": "180000001364000200000000000000000000000000403000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"2\"}}"
+ },
+ {
+ "description": "Regular - 2.000",
+ "bson": "18000000136400D0070000000000000000000000003A3000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"2.000\"}}"
+ },
+ {
+ "description": "Regular - Largest",
+ "bson": "18000000136400F2AF967ED05C82DE3297FF6FDE3C403000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"1234567890123456789012345678901234\"}}"
+ },
+ {
+ "description": "Scientific - Tiniest",
+ "bson": "18000000136400FFFFFFFF638E8D37C087ADBE09ED010000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"9.999999999999999999999999999999999E-6143\"}}"
+ },
+ {
+ "description": "Scientific - Tiny",
+ "bson": "180000001364000100000000000000000000000000000000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"1E-6176\"}}"
+ },
+ {
+ "description": "Scientific - Negative Tiny",
+ "bson": "180000001364000100000000000000000000000000008000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"-1E-6176\"}}"
+ },
+ {
+ "description": "Scientific - Adjusted Exponent Limit",
+ "bson": "18000000136400F2AF967ED05C82DE3297FF6FDE3CF02F00",
+ "extjson": "{\"d\": { \"$numberDecimal\": \"1.234567890123456789012345678901234E-7\" }}"
+ },
+ {
+ "description": "Scientific - Fractional",
+ "bson": "1800000013640064000000000000000000000000002CB000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"-1.00E-8\"}}"
+ },
+ {
+ "description": "Scientific - 0 with Exponent",
+ "bson": "180000001364000000000000000000000000000000205F00",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"0E+6000\"}}"
+ },
+ {
+ "description": "Scientific - 0 with Negative Exponent",
+ "bson": "1800000013640000000000000000000000000000007A2B00",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"0E-611\"}}"
+ },
+ {
+ "description": "Scientific - No Decimal with Signed Exponent",
+ "bson": "180000001364000100000000000000000000000000463000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"1E+3\"}}"
+ },
+ {
+ "description": "Scientific - Trailing Zero",
+ "bson": "180000001364001A04000000000000000000000000423000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"1.050E+4\"}}"
+ },
+ {
+ "description": "Scientific - With Decimal",
+ "bson": "180000001364006900000000000000000000000000423000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"1.05E+3\"}}"
+ },
+ {
+ "description": "Scientific - Full",
+ "bson": "18000000136400FFFFFFFFFFFFFFFFFFFFFFFFFFFF403000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"5192296858534827628530496329220095\"}}"
+ },
+ {
+ "description": "Scientific - Large",
+ "bson": "18000000136400000000000A5BC138938D44C64D31FE5F00",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"1.000000000000000000000000000000000E+6144\"}}"
+ },
+ {
+ "description": "Scientific - Largest",
+ "bson": "18000000136400FFFFFFFF638E8D37C087ADBE09EDFF5F00",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"9.999999999999999999999999999999999E+6144\"}}"
+ },
+ {
+ "description": "Non-Canonical Parsing - Exponent Normalization",
+ "bson": "1800000013640064000000000000000000000000002CB000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"-100E-10\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"-1.00E-8\"}}"
+ },
+ {
+ "description": "Non-Canonical Parsing - Unsigned Positive Exponent",
+ "bson": "180000001364000100000000000000000000000000463000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"1E3\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"1E+3\"}}"
+ },
+ {
+ "description": "Non-Canonical Parsing - Lowercase Exponent Identifier",
+ "bson": "180000001364000100000000000000000000000000463000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"1e+3\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"1E+3\"}}"
+ },
+ {
+ "description": "Non-Canonical Parsing - Long Significand with Exponent",
+ "bson": "1800000013640079D9E0F9763ADA429D0200000000583000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"12345689012345789012345E+12\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"1.2345689012345789012345E+34\"}}"
+ },
+ {
+ "description": "Non-Canonical Parsing - Positive Sign",
+ "bson": "18000000136400F2AF967ED05C82DE3297FF6FDE3C403000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"+1234567890123456789012345678901234\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"1234567890123456789012345678901234\"}}"
+ },
+ {
+ "description": "Non-Canonical Parsing - Long Decimal String",
+ "bson": "180000001364000100000000000000000000000000722800",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \".000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000001\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"1E-999\"}}"
+ },
+ {
+ "description": "Non-Canonical Parsing - nan",
+ "bson": "180000001364000000000000000000000000000000007C00",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"nan\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"NaN\"}}"
+ },
+ {
+ "description": "Non-Canonical Parsing - nAn",
+ "bson": "180000001364000000000000000000000000000000007C00",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"nAn\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"NaN\"}}"
+ },
+ {
+ "description": "Non-Canonical Parsing - +infinity",
+ "bson": "180000001364000000000000000000000000000000007800",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"+infinity\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"Infinity\"}}"
+ },
+ {
+ "description": "Non-Canonical Parsing - infinity",
+ "bson": "180000001364000000000000000000000000000000007800",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"infinity\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"Infinity\"}}"
+ },
+ {
+ "description": "Non-Canonical Parsing - infiniTY",
+ "bson": "180000001364000000000000000000000000000000007800",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"infiniTY\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"Infinity\"}}"
+ },
+ {
+ "description": "Non-Canonical Parsing - inf",
+ "bson": "180000001364000000000000000000000000000000007800",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"inf\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"Infinity\"}}"
+ },
+ {
+ "description": "Non-Canonical Parsing - inF",
+ "bson": "180000001364000000000000000000000000000000007800",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"inF\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"Infinity\"}}"
+ },
+ {
+ "description": "Non-Canonical Parsing - -infinity",
+ "bson": "18000000136400000000000000000000000000000000F800",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"-infinity\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"-Infinity\"}}"
+ },
+ {
+ "description": "Non-Canonical Parsing - -infiniTy",
+ "bson": "18000000136400000000000000000000000000000000F800",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"-infiniTy\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"-Infinity\"}}"
+ },
+ {
+ "description": "Non-Canonical Parsing - -Inf",
+ "bson": "18000000136400000000000000000000000000000000F800",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"-Infinity\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"-Infinity\"}}"
+ },
+ {
+ "description": "Non-Canonical Parsing - -inf",
+ "bson": "18000000136400000000000000000000000000000000F800",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"-inf\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"-Infinity\"}}"
+ },
+ {
+ "description": "Non-Canonical Parsing - -inF",
+ "bson": "18000000136400000000000000000000000000000000F800",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"-inF\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"-Infinity\"}}"
+ },
+ {
+ "description": "Rounded Subnormal number",
+ "bson": "180000001364000100000000000000000000000000000000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"10E-6177\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"1E-6176\"}}"
+ },
+ {
+ "description": "Clamped",
+ "bson": "180000001364000a00000000000000000000000000fe5f00",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"1E6112\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"1.0E+6112\"}}"
+ },
+ {
+ "description": "Exact rounding",
+ "bson": "18000000136400000000000a5bc138938d44c64d31cc3700",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"1000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"1.000000000000000000000000000000000E+999\"}}"
+ }
+ ]
+}
+`},
+
+ {"decimal128-2.json", `
+{
+ "description": "Decimal128",
+ "bson_type": "0x13",
+ "test_key": "d",
+ "valid": [
+ {
+ "description": "[decq021] Normality",
+ "bson": "18000000136400F2AF967ED05C82DE3297FF6FDE3C40B000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"-1234567890123456789012345678901234\"}}"
+ },
+ {
+ "description": "[decq823] values around [u]int32 edges (zeros done earlier)",
+ "bson": "18000000136400010000800000000000000000000040B000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"-2147483649\"}}"
+ },
+ {
+ "description": "[decq822] values around [u]int32 edges (zeros done earlier)",
+ "bson": "18000000136400000000800000000000000000000040B000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"-2147483648\"}}"
+ },
+ {
+ "description": "[decq821] values around [u]int32 edges (zeros done earlier)",
+ "bson": "18000000136400FFFFFF7F0000000000000000000040B000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"-2147483647\"}}"
+ },
+ {
+ "description": "[decq820] values around [u]int32 edges (zeros done earlier)",
+ "bson": "18000000136400FEFFFF7F0000000000000000000040B000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"-2147483646\"}}"
+ },
+ {
+ "description": "[decq152] fold-downs (more below)",
+ "bson": "18000000136400393000000000000000000000000040B000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"-12345\"}}"
+ },
+ {
+ "description": "[decq154] fold-downs (more below)",
+ "bson": "18000000136400D20400000000000000000000000040B000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"-1234\"}}"
+ },
+ {
+ "description": "[decq006] derivative canonical plain strings",
+ "bson": "18000000136400EE0200000000000000000000000040B000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"-750\"}}"
+ },
+ {
+ "description": "[decq164] fold-downs (more below)",
+ "bson": "1800000013640039300000000000000000000000003CB000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"-123.45\"}}"
+ },
+ {
+ "description": "[decq156] fold-downs (more below)",
+ "bson": "180000001364007B0000000000000000000000000040B000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"-123\"}}"
+ },
+ {
+ "description": "[decq008] derivative canonical plain strings",
+ "bson": "18000000136400EE020000000000000000000000003EB000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"-75.0\"}}"
+ },
+ {
+ "description": "[decq158] fold-downs (more below)",
+ "bson": "180000001364000C0000000000000000000000000040B000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"-12\"}}"
+ },
+ {
+ "description": "[decq122] Nmax and similar",
+ "bson": "18000000136400FFFFFFFF638E8D37C087ADBE09EDFFDF00",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"-9.999999999999999999999999999999999E+6144\"}}"
+ },
+ {
+ "description": "[decq002] (mostly derived from the Strawman 4 document and examples)",
+ "bson": "18000000136400EE020000000000000000000000003CB000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"-7.50\"}}"
+ },
+ {
+ "description": "[decq004] derivative canonical plain strings",
+ "bson": "18000000136400EE0200000000000000000000000042B000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"-7.50E+3\"}}"
+ },
+ {
+ "description": "[decq018] derivative canonical plain strings",
+ "bson": "18000000136400EE020000000000000000000000002EB000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"-7.50E-7\"}}"
+ },
+ {
+ "description": "[decq125] Nmax and similar",
+ "bson": "18000000136400F2AF967ED05C82DE3297FF6FDE3CFEDF00",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"-1.234567890123456789012345678901234E+6144\"}}"
+ },
+ {
+ "description": "[decq131] fold-downs (more below)",
+ "bson": "18000000136400000000807F1BCF85B27059C8A43CFEDF00",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"-1.230000000000000000000000000000000E+6144\"}}"
+ },
+ {
+ "description": "[decq162] fold-downs (more below)",
+ "bson": "180000001364007B000000000000000000000000003CB000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"-1.23\"}}"
+ },
+ {
+ "description": "[decq176] Nmin and below",
+ "bson": "18000000136400010000000A5BC138938D44C64D31008000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"-1.000000000000000000000000000000001E-6143\"}}"
+ },
+ {
+ "description": "[decq174] Nmin and below",
+ "bson": "18000000136400000000000A5BC138938D44C64D31008000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"-1.000000000000000000000000000000000E-6143\"}}"
+ },
+ {
+ "description": "[decq133] fold-downs (more below)",
+ "bson": "18000000136400000000000A5BC138938D44C64D31FEDF00",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"-1.000000000000000000000000000000000E+6144\"}}"
+ },
+ {
+ "description": "[decq160] fold-downs (more below)",
+ "bson": "18000000136400010000000000000000000000000040B000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"-1\"}}"
+ },
+ {
+ "description": "[decq172] Nmin and below",
+ "bson": "180000001364000100000000000000000000000000428000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"-1E-6143\"}}"
+ },
+ {
+ "description": "[decq010] derivative canonical plain strings",
+ "bson": "18000000136400EE020000000000000000000000003AB000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"-0.750\"}}"
+ },
+ {
+ "description": "[decq012] derivative canonical plain strings",
+ "bson": "18000000136400EE0200000000000000000000000038B000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"-0.0750\"}}"
+ },
+ {
+ "description": "[decq014] derivative canonical plain strings",
+ "bson": "18000000136400EE0200000000000000000000000034B000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"-0.000750\"}}"
+ },
+ {
+ "description": "[decq016] derivative canonical plain strings",
+ "bson": "18000000136400EE0200000000000000000000000030B000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"-0.00000750\"}}"
+ },
+ {
+ "description": "[decq404] zeros",
+ "bson": "180000001364000000000000000000000000000000000000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"0E-6176\"}}"
+ },
+ {
+ "description": "[decq424] negative zeros",
+ "bson": "180000001364000000000000000000000000000000008000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"-0E-6176\"}}"
+ },
+ {
+ "description": "[decq407] zeros",
+ "bson": "1800000013640000000000000000000000000000003C3000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"0.00\"}}"
+ },
+ {
+ "description": "[decq427] negative zeros",
+ "bson": "1800000013640000000000000000000000000000003CB000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"-0.00\"}}"
+ },
+ {
+ "description": "[decq409] zeros",
+ "bson": "180000001364000000000000000000000000000000403000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"0\"}}"
+ },
+ {
+ "description": "[decq428] negative zeros",
+ "bson": "18000000136400000000000000000000000000000040B000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"-0\"}}"
+ },
+ {
+ "description": "[decq700] Selected DPD codes",
+ "bson": "180000001364000000000000000000000000000000403000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"0\"}}"
+ },
+ {
+ "description": "[decq406] zeros",
+ "bson": "1800000013640000000000000000000000000000003C3000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"0.00\"}}"
+ },
+ {
+ "description": "[decq426] negative zeros",
+ "bson": "1800000013640000000000000000000000000000003CB000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"-0.00\"}}"
+ },
+ {
+ "description": "[decq410] zeros",
+ "bson": "180000001364000000000000000000000000000000463000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"0E+3\"}}"
+ },
+ {
+ "description": "[decq431] negative zeros",
+ "bson": "18000000136400000000000000000000000000000046B000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"-0E+3\"}}"
+ },
+ {
+ "description": "[decq419] clamped zeros...",
+ "bson": "180000001364000000000000000000000000000000FE5F00",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"0E+6111\"}}"
+ },
+ {
+ "description": "[decq432] negative zeros",
+ "bson": "180000001364000000000000000000000000000000FEDF00",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"-0E+6111\"}}"
+ },
+ {
+ "description": "[decq405] zeros",
+ "bson": "180000001364000000000000000000000000000000000000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"0E-6176\"}}"
+ },
+ {
+ "description": "[decq425] negative zeros",
+ "bson": "180000001364000000000000000000000000000000008000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"-0E-6176\"}}"
+ },
+ {
+ "description": "[decq508] Specials",
+ "bson": "180000001364000000000000000000000000000000007800",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"Infinity\"}}"
+ },
+ {
+ "description": "[decq528] Specials",
+ "bson": "18000000136400000000000000000000000000000000F800",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"-Infinity\"}}"
+ },
+ {
+ "description": "[decq541] Specials",
+ "bson": "180000001364000000000000000000000000000000007C00",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"NaN\"}}"
+ },
+ {
+ "description": "[decq074] Nmin and below",
+ "bson": "18000000136400000000000A5BC138938D44C64D31000000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"1.000000000000000000000000000000000E-6143\"}}"
+ },
+ {
+ "description": "[decq602] fold-down full sequence",
+ "bson": "18000000136400000000000A5BC138938D44C64D31FE5F00",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"1.000000000000000000000000000000000E+6144\"}}"
+ },
+ {
+ "description": "[decq604] fold-down full sequence",
+ "bson": "180000001364000000000081EFAC855B416D2DEE04FE5F00",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"1.00000000000000000000000000000000E+6143\"}}"
+ },
+ {
+ "description": "[decq606] fold-down full sequence",
+ "bson": "1800000013640000000080264B91C02220BE377E00FE5F00",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"1.0000000000000000000000000000000E+6142\"}}"
+ },
+ {
+ "description": "[decq608] fold-down full sequence",
+ "bson": "1800000013640000000040EAED7446D09C2C9F0C00FE5F00",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"1.000000000000000000000000000000E+6141\"}}"
+ },
+ {
+ "description": "[decq610] fold-down full sequence",
+ "bson": "18000000136400000000A0CA17726DAE0F1E430100FE5F00",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"1.00000000000000000000000000000E+6140\"}}"
+ },
+ {
+ "description": "[decq612] fold-down full sequence",
+ "bson": "18000000136400000000106102253E5ECE4F200000FE5F00",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"1.0000000000000000000000000000E+6139\"}}"
+ },
+ {
+ "description": "[decq614] fold-down full sequence",
+ "bson": "18000000136400000000E83C80D09F3C2E3B030000FE5F00",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"1.000000000000000000000000000E+6138\"}}"
+ },
+ {
+ "description": "[decq616] fold-down full sequence",
+ "bson": "18000000136400000000E4D20CC8DCD2B752000000FE5F00",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"1.00000000000000000000000000E+6137\"}}"
+ },
+ {
+ "description": "[decq618] fold-down full sequence",
+ "bson": "180000001364000000004A48011416954508000000FE5F00",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"1.0000000000000000000000000E+6136\"}}"
+ },
+ {
+ "description": "[decq620] fold-down full sequence",
+ "bson": "18000000136400000000A1EDCCCE1BC2D300000000FE5F00",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"1.000000000000000000000000E+6135\"}}"
+ },
+ {
+ "description": "[decq622] fold-down full sequence",
+ "bson": "18000000136400000080F64AE1C7022D1500000000FE5F00",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"1.00000000000000000000000E+6134\"}}"
+ },
+ {
+ "description": "[decq624] fold-down full sequence",
+ "bson": "18000000136400000040B2BAC9E0191E0200000000FE5F00",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"1.0000000000000000000000E+6133\"}}"
+ },
+ {
+ "description": "[decq626] fold-down full sequence",
+ "bson": "180000001364000000A0DEC5ADC935360000000000FE5F00",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"1.000000000000000000000E+6132\"}}"
+ },
+ {
+ "description": "[decq628] fold-down full sequence",
+ "bson": "18000000136400000010632D5EC76B050000000000FE5F00",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"1.00000000000000000000E+6131\"}}"
+ },
+ {
+ "description": "[decq630] fold-down full sequence",
+ "bson": "180000001364000000E8890423C78A000000000000FE5F00",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"1.0000000000000000000E+6130\"}}"
+ },
+ {
+ "description": "[decq632] fold-down full sequence",
+ "bson": "18000000136400000064A7B3B6E00D000000000000FE5F00",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"1.000000000000000000E+6129\"}}"
+ },
+ {
+ "description": "[decq634] fold-down full sequence",
+ "bson": "1800000013640000008A5D78456301000000000000FE5F00",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"1.00000000000000000E+6128\"}}"
+ },
+ {
+ "description": "[decq636] fold-down full sequence",
+ "bson": "180000001364000000C16FF2862300000000000000FE5F00",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"1.0000000000000000E+6127\"}}"
+ },
+ {
+ "description": "[decq638] fold-down full sequence",
+ "bson": "180000001364000080C6A47E8D0300000000000000FE5F00",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"1.000000000000000E+6126\"}}"
+ },
+ {
+ "description": "[decq640] fold-down full sequence",
+ "bson": "1800000013640000407A10F35A0000000000000000FE5F00",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"1.00000000000000E+6125\"}}"
+ },
+ {
+ "description": "[decq642] fold-down full sequence",
+ "bson": "1800000013640000A0724E18090000000000000000FE5F00",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"1.0000000000000E+6124\"}}"
+ },
+ {
+ "description": "[decq644] fold-down full sequence",
+ "bson": "180000001364000010A5D4E8000000000000000000FE5F00",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"1.000000000000E+6123\"}}"
+ },
+ {
+ "description": "[decq646] fold-down full sequence",
+ "bson": "1800000013640000E8764817000000000000000000FE5F00",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"1.00000000000E+6122\"}}"
+ },
+ {
+ "description": "[decq648] fold-down full sequence",
+ "bson": "1800000013640000E40B5402000000000000000000FE5F00",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"1.0000000000E+6121\"}}"
+ },
+ {
+ "description": "[decq650] fold-down full sequence",
+ "bson": "1800000013640000CA9A3B00000000000000000000FE5F00",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"1.000000000E+6120\"}}"
+ },
+ {
+ "description": "[decq652] fold-down full sequence",
+ "bson": "1800000013640000E1F50500000000000000000000FE5F00",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"1.00000000E+6119\"}}"
+ },
+ {
+ "description": "[decq654] fold-down full sequence",
+ "bson": "180000001364008096980000000000000000000000FE5F00",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"1.0000000E+6118\"}}"
+ },
+ {
+ "description": "[decq656] fold-down full sequence",
+ "bson": "1800000013640040420F0000000000000000000000FE5F00",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"1.000000E+6117\"}}"
+ },
+ {
+ "description": "[decq658] fold-down full sequence",
+ "bson": "18000000136400A086010000000000000000000000FE5F00",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"1.00000E+6116\"}}"
+ },
+ {
+ "description": "[decq660] fold-down full sequence",
+ "bson": "180000001364001027000000000000000000000000FE5F00",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"1.0000E+6115\"}}"
+ },
+ {
+ "description": "[decq662] fold-down full sequence",
+ "bson": "18000000136400E803000000000000000000000000FE5F00",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"1.000E+6114\"}}"
+ },
+ {
+ "description": "[decq664] fold-down full sequence",
+ "bson": "180000001364006400000000000000000000000000FE5F00",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"1.00E+6113\"}}"
+ },
+ {
+ "description": "[decq666] fold-down full sequence",
+ "bson": "180000001364000A00000000000000000000000000FE5F00",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"1.0E+6112\"}}"
+ },
+ {
+ "description": "[decq060] fold-downs (more below)",
+ "bson": "180000001364000100000000000000000000000000403000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"1\"}}"
+ },
+ {
+ "description": "[decq670] fold-down full sequence",
+ "bson": "180000001364000100000000000000000000000000FC5F00",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"1E+6110\"}}"
+ },
+ {
+ "description": "[decq668] fold-down full sequence",
+ "bson": "180000001364000100000000000000000000000000FE5F00",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"1E+6111\"}}"
+ },
+ {
+ "description": "[decq072] Nmin and below",
+ "bson": "180000001364000100000000000000000000000000420000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"1E-6143\"}}"
+ },
+ {
+ "description": "[decq076] Nmin and below",
+ "bson": "18000000136400010000000A5BC138938D44C64D31000000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"1.000000000000000000000000000000001E-6143\"}}"
+ },
+ {
+ "description": "[decq036] fold-downs (more below)",
+ "bson": "18000000136400000000807F1BCF85B27059C8A43CFE5F00",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"1.230000000000000000000000000000000E+6144\"}}"
+ },
+ {
+ "description": "[decq062] fold-downs (more below)",
+ "bson": "180000001364007B000000000000000000000000003C3000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"1.23\"}}"
+ },
+ {
+ "description": "[decq034] Nmax and similar",
+ "bson": "18000000136400F2AF967ED05C82DE3297FF6FDE3CFE5F00",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"1.234567890123456789012345678901234E+6144\"}}"
+ },
+ {
+ "description": "[decq441] exponent lengths",
+ "bson": "180000001364000700000000000000000000000000403000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"7\"}}"
+ },
+ {
+ "description": "[decq449] exponent lengths",
+ "bson": "1800000013640007000000000000000000000000001E5F00",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"7E+5999\"}}"
+ },
+ {
+ "description": "[decq447] exponent lengths",
+ "bson": "1800000013640007000000000000000000000000000E3800",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"7E+999\"}}"
+ },
+ {
+ "description": "[decq445] exponent lengths",
+ "bson": "180000001364000700000000000000000000000000063100",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"7E+99\"}}"
+ },
+ {
+ "description": "[decq443] exponent lengths",
+ "bson": "180000001364000700000000000000000000000000523000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"7E+9\"}}"
+ },
+ {
+ "description": "[decq842] VG testcase",
+ "bson": "180000001364000000FED83F4E7C9FE4E269E38A5BCD1700",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"7.049000000000010795488000000000000E-3097\"}}"
+ },
+ {
+ "description": "[decq841] VG testcase",
+ "bson": "180000001364000000203B9DB5056F000000000000002400",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"8.000000000000000000E-1550\"}}"
+ },
+ {
+ "description": "[decq840] VG testcase",
+ "bson": "180000001364003C17258419D710C42F0000000000002400",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"8.81125000000001349436E-1548\"}}"
+ },
+ {
+ "description": "[decq701] Selected DPD codes",
+ "bson": "180000001364000900000000000000000000000000403000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"9\"}}"
+ },
+ {
+ "description": "[decq032] Nmax and similar",
+ "bson": "18000000136400FFFFFFFF638E8D37C087ADBE09EDFF5F00",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"9.999999999999999999999999999999999E+6144\"}}"
+ },
+ {
+ "description": "[decq702] Selected DPD codes",
+ "bson": "180000001364000A00000000000000000000000000403000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"10\"}}"
+ },
+ {
+ "description": "[decq057] fold-downs (more below)",
+ "bson": "180000001364000C00000000000000000000000000403000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"12\"}}"
+ },
+ {
+ "description": "[decq703] Selected DPD codes",
+ "bson": "180000001364001300000000000000000000000000403000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"19\"}}"
+ },
+ {
+ "description": "[decq704] Selected DPD codes",
+ "bson": "180000001364001400000000000000000000000000403000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"20\"}}"
+ },
+ {
+ "description": "[decq705] Selected DPD codes",
+ "bson": "180000001364001D00000000000000000000000000403000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"29\"}}"
+ },
+ {
+ "description": "[decq706] Selected DPD codes",
+ "bson": "180000001364001E00000000000000000000000000403000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"30\"}}"
+ },
+ {
+ "description": "[decq707] Selected DPD codes",
+ "bson": "180000001364002700000000000000000000000000403000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"39\"}}"
+ },
+ {
+ "description": "[decq708] Selected DPD codes",
+ "bson": "180000001364002800000000000000000000000000403000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"40\"}}"
+ },
+ {
+ "description": "[decq709] Selected DPD codes",
+ "bson": "180000001364003100000000000000000000000000403000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"49\"}}"
+ },
+ {
+ "description": "[decq710] Selected DPD codes",
+ "bson": "180000001364003200000000000000000000000000403000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"50\"}}"
+ },
+ {
+ "description": "[decq711] Selected DPD codes",
+ "bson": "180000001364003B00000000000000000000000000403000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"59\"}}"
+ },
+ {
+ "description": "[decq712] Selected DPD codes",
+ "bson": "180000001364003C00000000000000000000000000403000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"60\"}}"
+ },
+ {
+ "description": "[decq713] Selected DPD codes",
+ "bson": "180000001364004500000000000000000000000000403000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"69\"}}"
+ },
+ {
+ "description": "[decq714] Selected DPD codes",
+ "bson": "180000001364004600000000000000000000000000403000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"70\"}}"
+ },
+ {
+ "description": "[decq715] Selected DPD codes",
+ "bson": "180000001364004700000000000000000000000000403000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"71\"}}"
+ },
+ {
+ "description": "[decq716] Selected DPD codes",
+ "bson": "180000001364004800000000000000000000000000403000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"72\"}}"
+ },
+ {
+ "description": "[decq717] Selected DPD codes",
+ "bson": "180000001364004900000000000000000000000000403000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"73\"}}"
+ },
+ {
+ "description": "[decq718] Selected DPD codes",
+ "bson": "180000001364004A00000000000000000000000000403000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"74\"}}"
+ },
+ {
+ "description": "[decq719] Selected DPD codes",
+ "bson": "180000001364004B00000000000000000000000000403000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"75\"}}"
+ },
+ {
+ "description": "[decq720] Selected DPD codes",
+ "bson": "180000001364004C00000000000000000000000000403000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"76\"}}"
+ },
+ {
+ "description": "[decq721] Selected DPD codes",
+ "bson": "180000001364004D00000000000000000000000000403000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"77\"}}"
+ },
+ {
+ "description": "[decq722] Selected DPD codes",
+ "bson": "180000001364004E00000000000000000000000000403000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"78\"}}"
+ },
+ {
+ "description": "[decq723] Selected DPD codes",
+ "bson": "180000001364004F00000000000000000000000000403000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"79\"}}"
+ },
+ {
+ "description": "[decq056] fold-downs (more below)",
+ "bson": "180000001364007B00000000000000000000000000403000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"123\"}}"
+ },
+ {
+ "description": "[decq064] fold-downs (more below)",
+ "bson": "1800000013640039300000000000000000000000003C3000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"123.45\"}}"
+ },
+ {
+ "description": "[decq732] Selected DPD codes",
+ "bson": "180000001364000802000000000000000000000000403000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"520\"}}"
+ },
+ {
+ "description": "[decq733] Selected DPD codes",
+ "bson": "180000001364000902000000000000000000000000403000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"521\"}}"
+ },
+ {
+ "description": "[decq740] DPD: one of each of the huffman groups",
+ "bson": "180000001364000903000000000000000000000000403000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"777\"}}"
+ },
+ {
+ "description": "[decq741] DPD: one of each of the huffman groups",
+ "bson": "180000001364000A03000000000000000000000000403000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"778\"}}"
+ },
+ {
+ "description": "[decq742] DPD: one of each of the huffman groups",
+ "bson": "180000001364001303000000000000000000000000403000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"787\"}}"
+ },
+ {
+ "description": "[decq746] DPD: one of each of the huffman groups",
+ "bson": "180000001364001F03000000000000000000000000403000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"799\"}}"
+ },
+ {
+ "description": "[decq743] DPD: one of each of the huffman groups",
+ "bson": "180000001364006D03000000000000000000000000403000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"877\"}}"
+ },
+ {
+ "description": "[decq753] DPD all-highs cases (includes the 24 redundant codes)",
+ "bson": "180000001364007803000000000000000000000000403000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"888\"}}"
+ },
+ {
+ "description": "[decq754] DPD all-highs cases (includes the 24 redundant codes)",
+ "bson": "180000001364007903000000000000000000000000403000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"889\"}}"
+ },
+ {
+ "description": "[decq760] DPD all-highs cases (includes the 24 redundant codes)",
+ "bson": "180000001364008203000000000000000000000000403000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"898\"}}"
+ },
+ {
+ "description": "[decq764] DPD all-highs cases (includes the 24 redundant codes)",
+ "bson": "180000001364008303000000000000000000000000403000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"899\"}}"
+ },
+ {
+ "description": "[decq745] DPD: one of each of the huffman groups",
+ "bson": "18000000136400D303000000000000000000000000403000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"979\"}}"
+ },
+ {
+ "description": "[decq770] DPD all-highs cases (includes the 24 redundant codes)",
+ "bson": "18000000136400DC03000000000000000000000000403000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"988\"}}"
+ },
+ {
+ "description": "[decq774] DPD all-highs cases (includes the 24 redundant codes)",
+ "bson": "18000000136400DD03000000000000000000000000403000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"989\"}}"
+ },
+ {
+ "description": "[decq730] Selected DPD codes",
+ "bson": "18000000136400E203000000000000000000000000403000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"994\"}}"
+ },
+ {
+ "description": "[decq731] Selected DPD codes",
+ "bson": "18000000136400E303000000000000000000000000403000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"995\"}}"
+ },
+ {
+ "description": "[decq744] DPD: one of each of the huffman groups",
+ "bson": "18000000136400E503000000000000000000000000403000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"997\"}}"
+ },
+ {
+ "description": "[decq780] DPD all-highs cases (includes the 24 redundant codes)",
+ "bson": "18000000136400E603000000000000000000000000403000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"998\"}}"
+ },
+ {
+ "description": "[decq787] DPD all-highs cases (includes the 24 redundant codes)",
+ "bson": "18000000136400E703000000000000000000000000403000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"999\"}}"
+ },
+ {
+ "description": "[decq053] fold-downs (more below)",
+ "bson": "18000000136400D204000000000000000000000000403000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"1234\"}}"
+ },
+ {
+ "description": "[decq052] fold-downs (more below)",
+ "bson": "180000001364003930000000000000000000000000403000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"12345\"}}"
+ },
+ {
+ "description": "[decq792] Miscellaneous (testers' queries, etc.)",
+ "bson": "180000001364003075000000000000000000000000403000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"30000\"}}"
+ },
+ {
+ "description": "[decq793] Miscellaneous (testers' queries, etc.)",
+ "bson": "1800000013640090940D0000000000000000000000403000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"890000\"}}"
+ },
+ {
+ "description": "[decq824] values around [u]int32 edges (zeros done earlier)",
+ "bson": "18000000136400FEFFFF7F00000000000000000000403000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"2147483646\"}}"
+ },
+ {
+ "description": "[decq825] values around [u]int32 edges (zeros done earlier)",
+ "bson": "18000000136400FFFFFF7F00000000000000000000403000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"2147483647\"}}"
+ },
+ {
+ "description": "[decq826] values around [u]int32 edges (zeros done earlier)",
+ "bson": "180000001364000000008000000000000000000000403000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"2147483648\"}}"
+ },
+ {
+ "description": "[decq827] values around [u]int32 edges (zeros done earlier)",
+ "bson": "180000001364000100008000000000000000000000403000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"2147483649\"}}"
+ },
+ {
+ "description": "[decq828] values around [u]int32 edges (zeros done earlier)",
+ "bson": "18000000136400FEFFFFFF00000000000000000000403000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"4294967294\"}}"
+ },
+ {
+ "description": "[decq829] values around [u]int32 edges (zeros done earlier)",
+ "bson": "18000000136400FFFFFFFF00000000000000000000403000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"4294967295\"}}"
+ },
+ {
+ "description": "[decq830] values around [u]int32 edges (zeros done earlier)",
+ "bson": "180000001364000000000001000000000000000000403000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"4294967296\"}}"
+ },
+ {
+ "description": "[decq831] values around [u]int32 edges (zeros done earlier)",
+ "bson": "180000001364000100000001000000000000000000403000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"4294967297\"}}"
+ },
+ {
+ "description": "[decq022] Normality",
+ "bson": "18000000136400C7711CC7B548F377DC80A131C836403000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"1111111111111111111111111111111111\"}}"
+ },
+ {
+ "description": "[decq020] Normality",
+ "bson": "18000000136400F2AF967ED05C82DE3297FF6FDE3C403000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"1234567890123456789012345678901234\"}}"
+ },
+ {
+ "description": "[decq550] Specials",
+ "bson": "18000000136400FFFFFFFF638E8D37C087ADBE09ED413000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"9999999999999999999999999999999999\"}}"
+ }
+ ]
+}
+`},
+
+ {"decimal128-3.json", `
+{
+ "description": "Decimal128",
+ "bson_type": "0x13",
+ "test_key": "d",
+ "valid": [
+ {
+ "description": "[basx066] strings without E cannot generate E in result",
+ "bson": "18000000136400185C0ACE0000000000000000000038B000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"-00345678.5432\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"-345678.5432\"}}"
+ },
+ {
+ "description": "[basx065] strings without E cannot generate E in result",
+ "bson": "18000000136400185C0ACE0000000000000000000038B000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"-0345678.5432\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"-345678.5432\"}}"
+ },
+ {
+ "description": "[basx064] strings without E cannot generate E in result",
+ "bson": "18000000136400185C0ACE0000000000000000000038B000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"-345678.5432\"}}"
+ },
+ {
+ "description": "[basx041] strings without E cannot generate E in result",
+ "bson": "180000001364004C0000000000000000000000000040B000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"-76\"}}"
+ },
+ {
+ "description": "[basx027] conform to rules and exponent will be in permitted range).",
+ "bson": "180000001364000F270000000000000000000000003AB000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"-9.999\"}}"
+ },
+ {
+ "description": "[basx026] conform to rules and exponent will be in permitted range).",
+ "bson": "180000001364009F230000000000000000000000003AB000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"-9.119\"}}"
+ },
+ {
+ "description": "[basx025] conform to rules and exponent will be in permitted range).",
+ "bson": "180000001364008F030000000000000000000000003CB000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"-9.11\"}}"
+ },
+ {
+ "description": "[basx024] conform to rules and exponent will be in permitted range).",
+ "bson": "180000001364005B000000000000000000000000003EB000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"-9.1\"}}"
+ },
+ {
+ "description": "[dqbsr531] negatives (Rounded)",
+ "bson": "1800000013640099761CC7B548F377DC80A131C836FEAF00",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"-1.1111111111111111111111111111123450\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"-1.111111111111111111111111111112345\"}}"
+ },
+ {
+ "description": "[basx022] conform to rules and exponent will be in permitted range).",
+ "bson": "180000001364000A000000000000000000000000003EB000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"-1.0\"}}"
+ },
+ {
+ "description": "[basx021] conform to rules and exponent will be in permitted range).",
+ "bson": "18000000136400010000000000000000000000000040B000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"-1\"}}"
+ },
+ {
+ "description": "[basx601] Zeros",
+ "bson": "1800000013640000000000000000000000000000002E3000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"0.000000000\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"0E-9\"}}"
+ },
+ {
+ "description": "[basx622] Zeros",
+ "bson": "1800000013640000000000000000000000000000002EB000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"-0.000000000\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"-0E-9\"}}"
+ },
+ {
+ "description": "[basx602] Zeros",
+ "bson": "180000001364000000000000000000000000000000303000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"0.00000000\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"0E-8\"}}"
+ },
+ {
+ "description": "[basx621] Zeros",
+ "bson": "18000000136400000000000000000000000000000030B000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"-0.00000000\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"-0E-8\"}}"
+ },
+ {
+ "description": "[basx603] Zeros",
+ "bson": "180000001364000000000000000000000000000000323000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"0.0000000\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"0E-7\"}}"
+ },
+ {
+ "description": "[basx620] Zeros",
+ "bson": "18000000136400000000000000000000000000000032B000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"-0.0000000\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"-0E-7\"}}"
+ },
+ {
+ "description": "[basx604] Zeros",
+ "bson": "180000001364000000000000000000000000000000343000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"0.000000\"}}"
+ },
+ {
+ "description": "[basx619] Zeros",
+ "bson": "18000000136400000000000000000000000000000034B000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"-0.000000\"}}"
+ },
+ {
+ "description": "[basx605] Zeros",
+ "bson": "180000001364000000000000000000000000000000363000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"0.00000\"}}"
+ },
+ {
+ "description": "[basx618] Zeros",
+ "bson": "18000000136400000000000000000000000000000036B000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"-0.00000\"}}"
+ },
+ {
+ "description": "[basx680] Zeros",
+ "bson": "180000001364000000000000000000000000000000403000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"000000.\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"0\"}}"
+ },
+ {
+ "description": "[basx606] Zeros",
+ "bson": "180000001364000000000000000000000000000000383000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"0.0000\"}}"
+ },
+ {
+ "description": "[basx617] Zeros",
+ "bson": "18000000136400000000000000000000000000000038B000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"-0.0000\"}}"
+ },
+ {
+ "description": "[basx681] Zeros",
+ "bson": "180000001364000000000000000000000000000000403000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"00000.\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"0\"}}"
+ },
+ {
+ "description": "[basx686] Zeros",
+ "bson": "180000001364000000000000000000000000000000403000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"+00000.\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"0\"}}"
+ },
+ {
+ "description": "[basx687] Zeros",
+ "bson": "18000000136400000000000000000000000000000040B000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"-00000.\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"-0\"}}"
+ },
+ {
+ "description": "[basx019] conform to rules and exponent will be in permitted range).",
+ "bson": "1800000013640000000000000000000000000000003CB000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"-00.00\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"-0.00\"}}"
+ },
+ {
+ "description": "[basx607] Zeros",
+ "bson": "1800000013640000000000000000000000000000003A3000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"0.000\"}}"
+ },
+ {
+ "description": "[basx616] Zeros",
+ "bson": "1800000013640000000000000000000000000000003AB000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"-0.000\"}}"
+ },
+ {
+ "description": "[basx682] Zeros",
+ "bson": "180000001364000000000000000000000000000000403000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"0000.\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"0\"}}"
+ },
+ {
+ "description": "[basx155] Numbers with E",
+ "bson": "1800000013640000000000000000000000000000003A3000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"0.000e+0\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"0.000\"}}"
+ },
+ {
+ "description": "[basx130] Numbers with E",
+ "bson": "180000001364000000000000000000000000000000383000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"0.000E-1\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"0.0000\"}}"
+ },
+ {
+ "description": "[basx290] some more negative zeros [systematic tests below]",
+ "bson": "18000000136400000000000000000000000000000038B000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"-0.000E-1\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"-0.0000\"}}"
+ },
+ {
+ "description": "[basx131] Numbers with E",
+ "bson": "180000001364000000000000000000000000000000363000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"0.000E-2\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"0.00000\"}}"
+ },
+ {
+ "description": "[basx291] some more negative zeros [systematic tests below]",
+ "bson": "18000000136400000000000000000000000000000036B000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"-0.000E-2\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"-0.00000\"}}"
+ },
+ {
+ "description": "[basx132] Numbers with E",
+ "bson": "180000001364000000000000000000000000000000343000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"0.000E-3\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"0.000000\"}}"
+ },
+ {
+ "description": "[basx292] some more negative zeros [systematic tests below]",
+ "bson": "18000000136400000000000000000000000000000034B000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"-0.000E-3\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"-0.000000\"}}"
+ },
+ {
+ "description": "[basx133] Numbers with E",
+ "bson": "180000001364000000000000000000000000000000323000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"0.000E-4\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"0E-7\"}}"
+ },
+ {
+ "description": "[basx293] some more negative zeros [systematic tests below]",
+ "bson": "18000000136400000000000000000000000000000032B000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"-0.000E-4\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"-0E-7\"}}"
+ },
+ {
+ "description": "[basx608] Zeros",
+ "bson": "1800000013640000000000000000000000000000003C3000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"0.00\"}}"
+ },
+ {
+ "description": "[basx615] Zeros",
+ "bson": "1800000013640000000000000000000000000000003CB000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"-0.00\"}}"
+ },
+ {
+ "description": "[basx683] Zeros",
+ "bson": "180000001364000000000000000000000000000000403000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"000.\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"0\"}}"
+ },
+ {
+ "description": "[basx630] Zeros",
+ "bson": "1800000013640000000000000000000000000000003C3000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"0.00E+0\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"0.00\"}}"
+ },
+ {
+ "description": "[basx670] Zeros",
+ "bson": "1800000013640000000000000000000000000000003C3000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"0.00E-0\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"0.00\"}}"
+ },
+ {
+ "description": "[basx631] Zeros",
+ "bson": "1800000013640000000000000000000000000000003E3000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"0.00E+1\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"0.0\"}}"
+ },
+ {
+ "description": "[basx671] Zeros",
+ "bson": "1800000013640000000000000000000000000000003A3000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"0.00E-1\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"0.000\"}}"
+ },
+ {
+ "description": "[basx134] Numbers with E",
+ "bson": "180000001364000000000000000000000000000000383000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"0.00E-2\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"0.0000\"}}"
+ },
+ {
+ "description": "[basx294] some more negative zeros [systematic tests below]",
+ "bson": "18000000136400000000000000000000000000000038B000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"-0.00E-2\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"-0.0000\"}}"
+ },
+ {
+ "description": "[basx632] Zeros",
+ "bson": "180000001364000000000000000000000000000000403000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"0.00E+2\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"0\"}}"
+ },
+ {
+ "description": "[basx672] Zeros",
+ "bson": "180000001364000000000000000000000000000000383000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"0.00E-2\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"0.0000\"}}"
+ },
+ {
+ "description": "[basx135] Numbers with E",
+ "bson": "180000001364000000000000000000000000000000363000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"0.00E-3\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"0.00000\"}}"
+ },
+ {
+ "description": "[basx295] some more negative zeros [systematic tests below]",
+ "bson": "18000000136400000000000000000000000000000036B000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"-0.00E-3\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"-0.00000\"}}"
+ },
+ {
+ "description": "[basx633] Zeros",
+ "bson": "180000001364000000000000000000000000000000423000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"0.00E+3\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"0E+1\"}}"
+ },
+ {
+ "description": "[basx673] Zeros",
+ "bson": "180000001364000000000000000000000000000000363000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"0.00E-3\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"0.00000\"}}"
+ },
+ {
+ "description": "[basx136] Numbers with E",
+ "bson": "180000001364000000000000000000000000000000343000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"0.00E-4\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"0.000000\"}}"
+ },
+ {
+ "description": "[basx674] Zeros",
+ "bson": "180000001364000000000000000000000000000000343000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"0.00E-4\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"0.000000\"}}"
+ },
+ {
+ "description": "[basx634] Zeros",
+ "bson": "180000001364000000000000000000000000000000443000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"0.00E+4\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"0E+2\"}}"
+ },
+ {
+ "description": "[basx137] Numbers with E",
+ "bson": "180000001364000000000000000000000000000000323000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"0.00E-5\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"0E-7\"}}"
+ },
+ {
+ "description": "[basx635] Zeros",
+ "bson": "180000001364000000000000000000000000000000463000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"0.00E+5\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"0E+3\"}}"
+ },
+ {
+ "description": "[basx675] Zeros",
+ "bson": "180000001364000000000000000000000000000000323000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"0.00E-5\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"0E-7\"}}"
+ },
+ {
+ "description": "[basx636] Zeros",
+ "bson": "180000001364000000000000000000000000000000483000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"0.00E+6\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"0E+4\"}}"
+ },
+ {
+ "description": "[basx676] Zeros",
+ "bson": "180000001364000000000000000000000000000000303000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"0.00E-6\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"0E-8\"}}"
+ },
+ {
+ "description": "[basx637] Zeros",
+ "bson": "1800000013640000000000000000000000000000004A3000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"0.00E+7\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"0E+5\"}}"
+ },
+ {
+ "description": "[basx677] Zeros",
+ "bson": "1800000013640000000000000000000000000000002E3000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"0.00E-7\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"0E-9\"}}"
+ },
+ {
+ "description": "[basx638] Zeros",
+ "bson": "1800000013640000000000000000000000000000004C3000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"0.00E+8\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"0E+6\"}}"
+ },
+ {
+ "description": "[basx678] Zeros",
+ "bson": "1800000013640000000000000000000000000000002C3000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"0.00E-8\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"0E-10\"}}"
+ },
+ {
+ "description": "[basx149] Numbers with E",
+ "bson": "180000001364000000000000000000000000000000523000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"000E+9\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"0E+9\"}}"
+ },
+ {
+ "description": "[basx639] Zeros",
+ "bson": "1800000013640000000000000000000000000000004E3000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"0.00E+9\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"0E+7\"}}"
+ },
+ {
+ "description": "[basx679] Zeros",
+ "bson": "1800000013640000000000000000000000000000002A3000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"0.00E-9\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"0E-11\"}}"
+ },
+ {
+ "description": "[basx063] strings without E cannot generate E in result",
+ "bson": "18000000136400185C0ACE00000000000000000000383000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"+00345678.5432\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"345678.5432\"}}"
+ },
+ {
+ "description": "[basx018] conform to rules and exponent will be in permitted range).",
+ "bson": "1800000013640000000000000000000000000000003EB000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"-0.0\"}}"
+ },
+ {
+ "description": "[basx609] Zeros",
+ "bson": "1800000013640000000000000000000000000000003E3000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"0.0\"}}"
+ },
+ {
+ "description": "[basx614] Zeros",
+ "bson": "1800000013640000000000000000000000000000003EB000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"-0.0\"}}"
+ },
+ {
+ "description": "[basx684] Zeros",
+ "bson": "180000001364000000000000000000000000000000403000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"00.\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"0\"}}"
+ },
+ {
+ "description": "[basx640] Zeros",
+ "bson": "1800000013640000000000000000000000000000003E3000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"0.0E+0\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"0.0\"}}"
+ },
+ {
+ "description": "[basx660] Zeros",
+ "bson": "1800000013640000000000000000000000000000003E3000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"0.0E-0\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"0.0\"}}"
+ },
+ {
+ "description": "[basx641] Zeros",
+ "bson": "180000001364000000000000000000000000000000403000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"0.0E+1\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"0\"}}"
+ },
+ {
+ "description": "[basx661] Zeros",
+ "bson": "1800000013640000000000000000000000000000003C3000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"0.0E-1\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"0.00\"}}"
+ },
+ {
+ "description": "[basx296] some more negative zeros [systematic tests below]",
+ "bson": "1800000013640000000000000000000000000000003AB000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"-0.0E-2\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"-0.000\"}}"
+ },
+ {
+ "description": "[basx642] Zeros",
+ "bson": "180000001364000000000000000000000000000000423000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"0.0E+2\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"0E+1\"}}"
+ },
+ {
+ "description": "[basx662] Zeros",
+ "bson": "1800000013640000000000000000000000000000003A3000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"0.0E-2\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"0.000\"}}"
+ },
+ {
+ "description": "[basx297] some more negative zeros [systematic tests below]",
+ "bson": "18000000136400000000000000000000000000000038B000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"-0.0E-3\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"-0.0000\"}}"
+ },
+ {
+ "description": "[basx643] Zeros",
+ "bson": "180000001364000000000000000000000000000000443000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"0.0E+3\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"0E+2\"}}"
+ },
+ {
+ "description": "[basx663] Zeros",
+ "bson": "180000001364000000000000000000000000000000383000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"0.0E-3\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"0.0000\"}}"
+ },
+ {
+ "description": "[basx644] Zeros",
+ "bson": "180000001364000000000000000000000000000000463000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"0.0E+4\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"0E+3\"}}"
+ },
+ {
+ "description": "[basx664] Zeros",
+ "bson": "180000001364000000000000000000000000000000363000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"0.0E-4\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"0.00000\"}}"
+ },
+ {
+ "description": "[basx645] Zeros",
+ "bson": "180000001364000000000000000000000000000000483000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"0.0E+5\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"0E+4\"}}"
+ },
+ {
+ "description": "[basx665] Zeros",
+ "bson": "180000001364000000000000000000000000000000343000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"0.0E-5\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"0.000000\"}}"
+ },
+ {
+ "description": "[basx646] Zeros",
+ "bson": "1800000013640000000000000000000000000000004A3000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"0.0E+6\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"0E+5\"}}"
+ },
+ {
+ "description": "[basx666] Zeros",
+ "bson": "180000001364000000000000000000000000000000323000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"0.0E-6\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"0E-7\"}}"
+ },
+ {
+ "description": "[basx647] Zeros",
+ "bson": "1800000013640000000000000000000000000000004C3000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"0.0E+7\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"0E+6\"}}"
+ },
+ {
+ "description": "[basx667] Zeros",
+ "bson": "180000001364000000000000000000000000000000303000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"0.0E-7\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"0E-8\"}}"
+ },
+ {
+ "description": "[basx648] Zeros",
+ "bson": "1800000013640000000000000000000000000000004E3000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"0.0E+8\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"0E+7\"}}"
+ },
+ {
+ "description": "[basx668] Zeros",
+ "bson": "1800000013640000000000000000000000000000002E3000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"0.0E-8\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"0E-9\"}}"
+ },
+ {
+ "description": "[basx160] Numbers with E",
+ "bson": "180000001364000000000000000000000000000000523000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"00E+9\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"0E+9\"}}"
+ },
+ {
+ "description": "[basx161] Numbers with E",
+ "bson": "1800000013640000000000000000000000000000002E3000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"00E-9\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"0E-9\"}}"
+ },
+ {
+ "description": "[basx649] Zeros",
+ "bson": "180000001364000000000000000000000000000000503000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"0.0E+9\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"0E+8\"}}"
+ },
+ {
+ "description": "[basx669] Zeros",
+ "bson": "1800000013640000000000000000000000000000002C3000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"0.0E-9\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"0E-10\"}}"
+ },
+ {
+ "description": "[basx062] strings without E cannot generate E in result",
+ "bson": "18000000136400185C0ACE00000000000000000000383000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"+0345678.5432\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"345678.5432\"}}"
+ },
+ {
+ "description": "[basx001] conform to rules and exponent will be in permitted range).",
+ "bson": "180000001364000000000000000000000000000000403000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"0\"}}"
+ },
+ {
+ "description": "[basx017] conform to rules and exponent will be in permitted range).",
+ "bson": "18000000136400000000000000000000000000000040B000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"-0\"}}"
+ },
+ {
+ "description": "[basx611] Zeros",
+ "bson": "180000001364000000000000000000000000000000403000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"0.\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"0\"}}"
+ },
+ {
+ "description": "[basx613] Zeros",
+ "bson": "18000000136400000000000000000000000000000040B000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"-0.\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"-0\"}}"
+ },
+ {
+ "description": "[basx685] Zeros",
+ "bson": "180000001364000000000000000000000000000000403000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"0.\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"0\"}}"
+ },
+ {
+ "description": "[basx688] Zeros",
+ "bson": "180000001364000000000000000000000000000000403000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"+0.\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"0\"}}"
+ },
+ {
+ "description": "[basx689] Zeros",
+ "bson": "18000000136400000000000000000000000000000040B000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"-0.\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"-0\"}}"
+ },
+ {
+ "description": "[basx650] Zeros",
+ "bson": "180000001364000000000000000000000000000000403000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"0E+0\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"0\"}}"
+ },
+ {
+ "description": "[basx651] Zeros",
+ "bson": "180000001364000000000000000000000000000000423000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"0E+1\"}}"
+ },
+ {
+ "description": "[basx298] some more negative zeros [systematic tests below]",
+ "bson": "1800000013640000000000000000000000000000003CB000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"-0E-2\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"-0.00\"}}"
+ },
+ {
+ "description": "[basx652] Zeros",
+ "bson": "180000001364000000000000000000000000000000443000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"0E+2\"}}"
+ },
+ {
+ "description": "[basx299] some more negative zeros [systematic tests below]",
+ "bson": "1800000013640000000000000000000000000000003AB000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"-0E-3\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"-0.000\"}}"
+ },
+ {
+ "description": "[basx653] Zeros",
+ "bson": "180000001364000000000000000000000000000000463000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"0E+3\"}}"
+ },
+ {
+ "description": "[basx654] Zeros",
+ "bson": "180000001364000000000000000000000000000000483000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"0E+4\"}}"
+ },
+ {
+ "description": "[basx655] Zeros",
+ "bson": "1800000013640000000000000000000000000000004A3000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"0E+5\"}}"
+ },
+ {
+ "description": "[basx656] Zeros",
+ "bson": "1800000013640000000000000000000000000000004C3000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"0E+6\"}}"
+ },
+ {
+ "description": "[basx657] Zeros",
+ "bson": "1800000013640000000000000000000000000000004E3000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"0E+7\"}}"
+ },
+ {
+ "description": "[basx658] Zeros",
+ "bson": "180000001364000000000000000000000000000000503000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"0E+8\"}}"
+ },
+ {
+ "description": "[basx138] Numbers with E",
+ "bson": "180000001364000000000000000000000000000000523000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"+0E+9\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"0E+9\"}}"
+ },
+ {
+ "description": "[basx139] Numbers with E",
+ "bson": "18000000136400000000000000000000000000000052B000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"-0E+9\"}}"
+ },
+ {
+ "description": "[basx144] Numbers with E",
+ "bson": "180000001364000000000000000000000000000000523000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"0E+9\"}}"
+ },
+ {
+ "description": "[basx154] Numbers with E",
+ "bson": "180000001364000000000000000000000000000000523000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"0E9\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"0E+9\"}}"
+ },
+ {
+ "description": "[basx659] Zeros",
+ "bson": "180000001364000000000000000000000000000000523000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"0E+9\"}}"
+ },
+ {
+ "description": "[basx042] strings without E cannot generate E in result",
+ "bson": "18000000136400FC040000000000000000000000003C3000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"+12.76\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"12.76\"}}"
+ },
+ {
+ "description": "[basx143] Numbers with E",
+ "bson": "180000001364000100000000000000000000000000523000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"+1E+009\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"1E+9\"}}"
+ },
+ {
+ "description": "[basx061] strings without E cannot generate E in result",
+ "bson": "18000000136400185C0ACE00000000000000000000383000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"+345678.5432\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"345678.5432\"}}"
+ },
+ {
+ "description": "[basx036] conform to rules and exponent will be in permitted range).",
+ "bson": "1800000013640015CD5B0700000000000000000000203000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"0.0000000123456789\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"1.23456789E-8\"}}"
+ },
+ {
+ "description": "[basx035] conform to rules and exponent will be in permitted range).",
+ "bson": "1800000013640015CD5B0700000000000000000000223000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"0.000000123456789\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"1.23456789E-7\"}}"
+ },
+ {
+ "description": "[basx034] conform to rules and exponent will be in permitted range).",
+ "bson": "1800000013640015CD5B0700000000000000000000243000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"0.00000123456789\"}}"
+ },
+ {
+ "description": "[basx053] strings without E cannot generate E in result",
+ "bson": "180000001364003200000000000000000000000000323000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"0.0000050\"}}"
+ },
+ {
+ "description": "[basx033] conform to rules and exponent will be in permitted range).",
+ "bson": "1800000013640015CD5B0700000000000000000000263000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"0.0000123456789\"}}"
+ },
+ {
+ "description": "[basx016] conform to rules and exponent will be in permitted range).",
+ "bson": "180000001364000C000000000000000000000000003A3000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"0.012\"}}"
+ },
+ {
+ "description": "[basx015] conform to rules and exponent will be in permitted range).",
+ "bson": "180000001364007B000000000000000000000000003A3000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"0.123\"}}"
+ },
+ {
+ "description": "[basx037] conform to rules and exponent will be in permitted range).",
+ "bson": "1800000013640078DF0D8648700000000000000000223000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"0.123456789012344\"}}"
+ },
+ {
+ "description": "[basx038] conform to rules and exponent will be in permitted range).",
+ "bson": "1800000013640079DF0D8648700000000000000000223000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"0.123456789012345\"}}"
+ },
+ {
+ "description": "[basx250] Numbers with E",
+ "bson": "18000000136400F104000000000000000000000000383000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"0.1265\"}}"
+ },
+ {
+ "description": "[basx257] Numbers with E",
+ "bson": "18000000136400F104000000000000000000000000383000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"0.1265E-0\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"0.1265\"}}"
+ },
+ {
+ "description": "[basx256] Numbers with E",
+ "bson": "18000000136400F104000000000000000000000000363000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"0.1265E-1\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"0.01265\"}}"
+ },
+ {
+ "description": "[basx258] Numbers with E",
+ "bson": "18000000136400F1040000000000000000000000003A3000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"0.1265E+1\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"1.265\"}}"
+ },
+ {
+ "description": "[basx251] Numbers with E",
+ "bson": "18000000136400F104000000000000000000000000103000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"0.1265E-20\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"1.265E-21\"}}"
+ },
+ {
+ "description": "[basx263] Numbers with E",
+ "bson": "18000000136400F104000000000000000000000000603000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"0.1265E+20\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"1.265E+19\"}}"
+ },
+ {
+ "description": "[basx255] Numbers with E",
+ "bson": "18000000136400F104000000000000000000000000343000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"0.1265E-2\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"0.001265\"}}"
+ },
+ {
+ "description": "[basx259] Numbers with E",
+ "bson": "18000000136400F1040000000000000000000000003C3000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"0.1265E+2\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"12.65\"}}"
+ },
+ {
+ "description": "[basx254] Numbers with E",
+ "bson": "18000000136400F104000000000000000000000000323000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"0.1265E-3\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"0.0001265\"}}"
+ },
+ {
+ "description": "[basx260] Numbers with E",
+ "bson": "18000000136400F1040000000000000000000000003E3000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"0.1265E+3\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"126.5\"}}"
+ },
+ {
+ "description": "[basx253] Numbers with E",
+ "bson": "18000000136400F104000000000000000000000000303000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"0.1265E-4\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"0.00001265\"}}"
+ },
+ {
+ "description": "[basx261] Numbers with E",
+ "bson": "18000000136400F104000000000000000000000000403000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"0.1265E+4\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"1265\"}}"
+ },
+ {
+ "description": "[basx252] Numbers with E",
+ "bson": "18000000136400F104000000000000000000000000283000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"0.1265E-8\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"1.265E-9\"}}"
+ },
+ {
+ "description": "[basx262] Numbers with E",
+ "bson": "18000000136400F104000000000000000000000000483000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"0.1265E+8\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"1.265E+7\"}}"
+ },
+ {
+ "description": "[basx159] Numbers with E",
+ "bson": "1800000013640049000000000000000000000000002E3000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"0.73e-7\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"7.3E-8\"}}"
+ },
+ {
+ "description": "[basx004] conform to rules and exponent will be in permitted range).",
+ "bson": "1800000013640064000000000000000000000000003C3000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"1.00\"}}"
+ },
+ {
+ "description": "[basx003] conform to rules and exponent will be in permitted range).",
+ "bson": "180000001364000A000000000000000000000000003E3000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"1.0\"}}"
+ },
+ {
+ "description": "[basx002] conform to rules and exponent will be in permitted range).",
+ "bson": "180000001364000100000000000000000000000000403000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"1\"}}"
+ },
+ {
+ "description": "[basx148] Numbers with E",
+ "bson": "180000001364000100000000000000000000000000523000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"1E+009\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"1E+9\"}}"
+ },
+ {
+ "description": "[basx153] Numbers with E",
+ "bson": "180000001364000100000000000000000000000000523000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"1E009\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"1E+9\"}}"
+ },
+ {
+ "description": "[basx141] Numbers with E",
+ "bson": "180000001364000100000000000000000000000000523000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"1e+09\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"1E+9\"}}"
+ },
+ {
+ "description": "[basx146] Numbers with E",
+ "bson": "180000001364000100000000000000000000000000523000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"1E+09\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"1E+9\"}}"
+ },
+ {
+ "description": "[basx151] Numbers with E",
+ "bson": "180000001364000100000000000000000000000000523000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"1e09\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"1E+9\"}}"
+ },
+ {
+ "description": "[basx142] Numbers with E",
+ "bson": "180000001364000100000000000000000000000000F43000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"1E+90\"}}"
+ },
+ {
+ "description": "[basx147] Numbers with E",
+ "bson": "180000001364000100000000000000000000000000F43000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"1e+90\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"1E+90\"}}"
+ },
+ {
+ "description": "[basx152] Numbers with E",
+ "bson": "180000001364000100000000000000000000000000F43000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"1E90\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"1E+90\"}}"
+ },
+ {
+ "description": "[basx140] Numbers with E",
+ "bson": "180000001364000100000000000000000000000000523000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"1E+9\"}}"
+ },
+ {
+ "description": "[basx150] Numbers with E",
+ "bson": "180000001364000100000000000000000000000000523000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"1E9\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"1E+9\"}}"
+ },
+ {
+ "description": "[basx014] conform to rules and exponent will be in permitted range).",
+ "bson": "18000000136400D2040000000000000000000000003A3000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"1.234\"}}"
+ },
+ {
+ "description": "[basx170] Numbers with E",
+ "bson": "18000000136400F1040000000000000000000000003A3000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"1.265\"}}"
+ },
+ {
+ "description": "[basx177] Numbers with E",
+ "bson": "18000000136400F1040000000000000000000000003A3000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"1.265E-0\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"1.265\"}}"
+ },
+ {
+ "description": "[basx176] Numbers with E",
+ "bson": "18000000136400F104000000000000000000000000383000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"1.265E-1\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"0.1265\"}}"
+ },
+ {
+ "description": "[basx178] Numbers with E",
+ "bson": "18000000136400F1040000000000000000000000003C3000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"1.265E+1\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"12.65\"}}"
+ },
+ {
+ "description": "[basx171] Numbers with E",
+ "bson": "18000000136400F104000000000000000000000000123000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"1.265E-20\"}}"
+ },
+ {
+ "description": "[basx183] Numbers with E",
+ "bson": "18000000136400F104000000000000000000000000623000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"1.265E+20\"}}"
+ },
+ {
+ "description": "[basx175] Numbers with E",
+ "bson": "18000000136400F104000000000000000000000000363000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"1.265E-2\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"0.01265\"}}"
+ },
+ {
+ "description": "[basx179] Numbers with E",
+ "bson": "18000000136400F1040000000000000000000000003E3000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"1.265E+2\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"126.5\"}}"
+ },
+ {
+ "description": "[basx174] Numbers with E",
+ "bson": "18000000136400F104000000000000000000000000343000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"1.265E-3\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"0.001265\"}}"
+ },
+ {
+ "description": "[basx180] Numbers with E",
+ "bson": "18000000136400F104000000000000000000000000403000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"1.265E+3\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"1265\"}}"
+ },
+ {
+ "description": "[basx173] Numbers with E",
+ "bson": "18000000136400F104000000000000000000000000323000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"1.265E-4\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"0.0001265\"}}"
+ },
+ {
+ "description": "[basx181] Numbers with E",
+ "bson": "18000000136400F104000000000000000000000000423000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"1.265E+4\"}}"
+ },
+ {
+ "description": "[basx172] Numbers with E",
+ "bson": "18000000136400F1040000000000000000000000002A3000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"1.265E-8\"}}"
+ },
+ {
+ "description": "[basx182] Numbers with E",
+ "bson": "18000000136400F1040000000000000000000000004A3000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"1.265E+8\"}}"
+ },
+ {
+ "description": "[basx157] Numbers with E",
+ "bson": "180000001364000400000000000000000000000000523000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"4E+9\"}}"
+ },
+ {
+ "description": "[basx067] examples",
+ "bson": "180000001364000500000000000000000000000000343000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"5E-6\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"0.000005\"}}"
+ },
+ {
+ "description": "[basx069] examples",
+ "bson": "180000001364000500000000000000000000000000323000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"5E-7\"}}"
+ },
+ {
+ "description": "[basx385] Engineering notation tests",
+ "bson": "180000001364000700000000000000000000000000403000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"7E0\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"7\"}}"
+ },
+ {
+ "description": "[basx365] Engineering notation tests",
+ "bson": "180000001364000700000000000000000000000000543000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"7E10\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"7E+10\"}}"
+ },
+ {
+ "description": "[basx405] Engineering notation tests",
+ "bson": "1800000013640007000000000000000000000000002C3000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"7E-10\"}}"
+ },
+ {
+ "description": "[basx363] Engineering notation tests",
+ "bson": "180000001364000700000000000000000000000000563000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"7E11\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"7E+11\"}}"
+ },
+ {
+ "description": "[basx407] Engineering notation tests",
+ "bson": "1800000013640007000000000000000000000000002A3000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"7E-11\"}}"
+ },
+ {
+ "description": "[basx361] Engineering notation tests",
+ "bson": "180000001364000700000000000000000000000000583000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"7E12\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"7E+12\"}}"
+ },
+ {
+ "description": "[basx409] Engineering notation tests",
+ "bson": "180000001364000700000000000000000000000000283000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"7E-12\"}}"
+ },
+ {
+ "description": "[basx411] Engineering notation tests",
+ "bson": "180000001364000700000000000000000000000000263000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"7E-13\"}}"
+ },
+ {
+ "description": "[basx383] Engineering notation tests",
+ "bson": "180000001364000700000000000000000000000000423000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"7E1\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"7E+1\"}}"
+ },
+ {
+ "description": "[basx387] Engineering notation tests",
+ "bson": "1800000013640007000000000000000000000000003E3000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"7E-1\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"0.7\"}}"
+ },
+ {
+ "description": "[basx381] Engineering notation tests",
+ "bson": "180000001364000700000000000000000000000000443000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"7E2\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"7E+2\"}}"
+ },
+ {
+ "description": "[basx389] Engineering notation tests",
+ "bson": "1800000013640007000000000000000000000000003C3000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"7E-2\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"0.07\"}}"
+ },
+ {
+ "description": "[basx379] Engineering notation tests",
+ "bson": "180000001364000700000000000000000000000000463000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"7E3\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"7E+3\"}}"
+ },
+ {
+ "description": "[basx391] Engineering notation tests",
+ "bson": "1800000013640007000000000000000000000000003A3000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"7E-3\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"0.007\"}}"
+ },
+ {
+ "description": "[basx377] Engineering notation tests",
+ "bson": "180000001364000700000000000000000000000000483000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"7E4\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"7E+4\"}}"
+ },
+ {
+ "description": "[basx393] Engineering notation tests",
+ "bson": "180000001364000700000000000000000000000000383000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"7E-4\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"0.0007\"}}"
+ },
+ {
+ "description": "[basx375] Engineering notation tests",
+ "bson": "1800000013640007000000000000000000000000004A3000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"7E5\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"7E+5\"}}"
+ },
+ {
+ "description": "[basx395] Engineering notation tests",
+ "bson": "180000001364000700000000000000000000000000363000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"7E-5\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"0.00007\"}}"
+ },
+ {
+ "description": "[basx373] Engineering notation tests",
+ "bson": "1800000013640007000000000000000000000000004C3000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"7E6\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"7E+6\"}}"
+ },
+ {
+ "description": "[basx397] Engineering notation tests",
+ "bson": "180000001364000700000000000000000000000000343000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"7E-6\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"0.000007\"}}"
+ },
+ {
+ "description": "[basx371] Engineering notation tests",
+ "bson": "1800000013640007000000000000000000000000004E3000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"7E7\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"7E+7\"}}"
+ },
+ {
+ "description": "[basx399] Engineering notation tests",
+ "bson": "180000001364000700000000000000000000000000323000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"7E-7\"}}"
+ },
+ {
+ "description": "[basx369] Engineering notation tests",
+ "bson": "180000001364000700000000000000000000000000503000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"7E8\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"7E+8\"}}"
+ },
+ {
+ "description": "[basx401] Engineering notation tests",
+ "bson": "180000001364000700000000000000000000000000303000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"7E-8\"}}"
+ },
+ {
+ "description": "[basx367] Engineering notation tests",
+ "bson": "180000001364000700000000000000000000000000523000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"7E9\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"7E+9\"}}"
+ },
+ {
+ "description": "[basx403] Engineering notation tests",
+ "bson": "1800000013640007000000000000000000000000002E3000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"7E-9\"}}"
+ },
+ {
+ "description": "[basx007] conform to rules and exponent will be in permitted range).",
+ "bson": "1800000013640064000000000000000000000000003E3000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"10.0\"}}"
+ },
+ {
+ "description": "[basx005] conform to rules and exponent will be in permitted range).",
+ "bson": "180000001364000A00000000000000000000000000403000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"10\"}}"
+ },
+ {
+ "description": "[basx165] Numbers with E",
+ "bson": "180000001364000A00000000000000000000000000523000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"10E+009\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"1.0E+10\"}}"
+ },
+ {
+ "description": "[basx163] Numbers with E",
+ "bson": "180000001364000A00000000000000000000000000523000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"10E+09\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"1.0E+10\"}}"
+ },
+ {
+ "description": "[basx325] Engineering notation tests",
+ "bson": "180000001364000A00000000000000000000000000403000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"10e0\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"10\"}}"
+ },
+ {
+ "description": "[basx305] Engineering notation tests",
+ "bson": "180000001364000A00000000000000000000000000543000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"10e10\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"1.0E+11\"}}"
+ },
+ {
+ "description": "[basx345] Engineering notation tests",
+ "bson": "180000001364000A000000000000000000000000002C3000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"10e-10\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"1.0E-9\"}}"
+ },
+ {
+ "description": "[basx303] Engineering notation tests",
+ "bson": "180000001364000A00000000000000000000000000563000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"10e11\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"1.0E+12\"}}"
+ },
+ {
+ "description": "[basx347] Engineering notation tests",
+ "bson": "180000001364000A000000000000000000000000002A3000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"10e-11\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"1.0E-10\"}}"
+ },
+ {
+ "description": "[basx301] Engineering notation tests",
+ "bson": "180000001364000A00000000000000000000000000583000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"10e12\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"1.0E+13\"}}"
+ },
+ {
+ "description": "[basx349] Engineering notation tests",
+ "bson": "180000001364000A00000000000000000000000000283000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"10e-12\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"1.0E-11\"}}"
+ },
+ {
+ "description": "[basx351] Engineering notation tests",
+ "bson": "180000001364000A00000000000000000000000000263000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"10e-13\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"1.0E-12\"}}"
+ },
+ {
+ "description": "[basx323] Engineering notation tests",
+ "bson": "180000001364000A00000000000000000000000000423000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"10e1\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"1.0E+2\"}}"
+ },
+ {
+ "description": "[basx327] Engineering notation tests",
+ "bson": "180000001364000A000000000000000000000000003E3000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"10e-1\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"1.0\"}}"
+ },
+ {
+ "description": "[basx321] Engineering notation tests",
+ "bson": "180000001364000A00000000000000000000000000443000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"10e2\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"1.0E+3\"}}"
+ },
+ {
+ "description": "[basx329] Engineering notation tests",
+ "bson": "180000001364000A000000000000000000000000003C3000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"10e-2\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"0.10\"}}"
+ },
+ {
+ "description": "[basx319] Engineering notation tests",
+ "bson": "180000001364000A00000000000000000000000000463000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"10e3\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"1.0E+4\"}}"
+ },
+ {
+ "description": "[basx331] Engineering notation tests",
+ "bson": "180000001364000A000000000000000000000000003A3000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"10e-3\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"0.010\"}}"
+ },
+ {
+ "description": "[basx317] Engineering notation tests",
+ "bson": "180000001364000A00000000000000000000000000483000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"10e4\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"1.0E+5\"}}"
+ },
+ {
+ "description": "[basx333] Engineering notation tests",
+ "bson": "180000001364000A00000000000000000000000000383000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"10e-4\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"0.0010\"}}"
+ },
+ {
+ "description": "[basx315] Engineering notation tests",
+ "bson": "180000001364000A000000000000000000000000004A3000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"10e5\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"1.0E+6\"}}"
+ },
+ {
+ "description": "[basx335] Engineering notation tests",
+ "bson": "180000001364000A00000000000000000000000000363000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"10e-5\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"0.00010\"}}"
+ },
+ {
+ "description": "[basx313] Engineering notation tests",
+ "bson": "180000001364000A000000000000000000000000004C3000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"10e6\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"1.0E+7\"}}"
+ },
+ {
+ "description": "[basx337] Engineering notation tests",
+ "bson": "180000001364000A00000000000000000000000000343000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"10e-6\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"0.000010\"}}"
+ },
+ {
+ "description": "[basx311] Engineering notation tests",
+ "bson": "180000001364000A000000000000000000000000004E3000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"10e7\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"1.0E+8\"}}"
+ },
+ {
+ "description": "[basx339] Engineering notation tests",
+ "bson": "180000001364000A00000000000000000000000000323000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"10e-7\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"0.0000010\"}}"
+ },
+ {
+ "description": "[basx309] Engineering notation tests",
+ "bson": "180000001364000A00000000000000000000000000503000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"10e8\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"1.0E+9\"}}"
+ },
+ {
+ "description": "[basx341] Engineering notation tests",
+ "bson": "180000001364000A00000000000000000000000000303000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"10e-8\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"1.0E-7\"}}"
+ },
+ {
+ "description": "[basx164] Numbers with E",
+ "bson": "180000001364000A00000000000000000000000000F43000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"10e+90\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"1.0E+91\"}}"
+ },
+ {
+ "description": "[basx162] Numbers with E",
+ "bson": "180000001364000A00000000000000000000000000523000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"10E+9\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"1.0E+10\"}}"
+ },
+ {
+ "description": "[basx307] Engineering notation tests",
+ "bson": "180000001364000A00000000000000000000000000523000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"10e9\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"1.0E+10\"}}"
+ },
+ {
+ "description": "[basx343] Engineering notation tests",
+ "bson": "180000001364000A000000000000000000000000002E3000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"10e-9\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"1.0E-8\"}}"
+ },
+ {
+ "description": "[basx008] conform to rules and exponent will be in permitted range).",
+ "bson": "1800000013640065000000000000000000000000003E3000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"10.1\"}}"
+ },
+ {
+ "description": "[basx009] conform to rules and exponent will be in permitted range).",
+ "bson": "1800000013640068000000000000000000000000003E3000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"10.4\"}}"
+ },
+ {
+ "description": "[basx010] conform to rules and exponent will be in permitted range).",
+ "bson": "1800000013640069000000000000000000000000003E3000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"10.5\"}}"
+ },
+ {
+ "description": "[basx011] conform to rules and exponent will be in permitted range).",
+ "bson": "180000001364006A000000000000000000000000003E3000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"10.6\"}}"
+ },
+ {
+ "description": "[basx012] conform to rules and exponent will be in permitted range).",
+ "bson": "180000001364006D000000000000000000000000003E3000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"10.9\"}}"
+ },
+ {
+ "description": "[basx013] conform to rules and exponent will be in permitted range).",
+ "bson": "180000001364006E000000000000000000000000003E3000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"11.0\"}}"
+ },
+ {
+ "description": "[basx040] strings without E cannot generate E in result",
+ "bson": "180000001364000C00000000000000000000000000403000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"12\"}}"
+ },
+ {
+ "description": "[basx190] Numbers with E",
+ "bson": "18000000136400F1040000000000000000000000003C3000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"12.65\"}}"
+ },
+ {
+ "description": "[basx197] Numbers with E",
+ "bson": "18000000136400F1040000000000000000000000003C3000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"12.65E-0\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"12.65\"}}"
+ },
+ {
+ "description": "[basx196] Numbers with E",
+ "bson": "18000000136400F1040000000000000000000000003A3000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"12.65E-1\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"1.265\"}}"
+ },
+ {
+ "description": "[basx198] Numbers with E",
+ "bson": "18000000136400F1040000000000000000000000003E3000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"12.65E+1\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"126.5\"}}"
+ },
+ {
+ "description": "[basx191] Numbers with E",
+ "bson": "18000000136400F104000000000000000000000000143000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"12.65E-20\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"1.265E-19\"}}"
+ },
+ {
+ "description": "[basx203] Numbers with E",
+ "bson": "18000000136400F104000000000000000000000000643000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"12.65E+20\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"1.265E+21\"}}"
+ },
+ {
+ "description": "[basx195] Numbers with E",
+ "bson": "18000000136400F104000000000000000000000000383000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"12.65E-2\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"0.1265\"}}"
+ },
+ {
+ "description": "[basx199] Numbers with E",
+ "bson": "18000000136400F104000000000000000000000000403000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"12.65E+2\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"1265\"}}"
+ },
+ {
+ "description": "[basx194] Numbers with E",
+ "bson": "18000000136400F104000000000000000000000000363000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"12.65E-3\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"0.01265\"}}"
+ },
+ {
+ "description": "[basx200] Numbers with E",
+ "bson": "18000000136400F104000000000000000000000000423000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"12.65E+3\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"1.265E+4\"}}"
+ },
+ {
+ "description": "[basx193] Numbers with E",
+ "bson": "18000000136400F104000000000000000000000000343000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"12.65E-4\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"0.001265\"}}"
+ },
+ {
+ "description": "[basx201] Numbers with E",
+ "bson": "18000000136400F104000000000000000000000000443000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"12.65E+4\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"1.265E+5\"}}"
+ },
+ {
+ "description": "[basx192] Numbers with E",
+ "bson": "18000000136400F1040000000000000000000000002C3000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"12.65E-8\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"1.265E-7\"}}"
+ },
+ {
+ "description": "[basx202] Numbers with E",
+ "bson": "18000000136400F1040000000000000000000000004C3000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"12.65E+8\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"1.265E+9\"}}"
+ },
+ {
+ "description": "[basx044] strings without E cannot generate E in result",
+ "bson": "18000000136400FC040000000000000000000000003C3000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"012.76\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"12.76\"}}"
+ },
+ {
+ "description": "[basx042] strings without E cannot generate E in result",
+ "bson": "18000000136400FC040000000000000000000000003C3000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"12.76\"}}"
+ },
+ {
+ "description": "[basx046] strings without E cannot generate E in result",
+ "bson": "180000001364001100000000000000000000000000403000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"17.\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"17\"}}"
+ },
+ {
+ "description": "[basx049] strings without E cannot generate E in result",
+ "bson": "180000001364002C00000000000000000000000000403000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"0044\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"44\"}}"
+ },
+ {
+ "description": "[basx048] strings without E cannot generate E in result",
+ "bson": "180000001364002C00000000000000000000000000403000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"044\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"44\"}}"
+ },
+ {
+ "description": "[basx158] Numbers with E",
+ "bson": "180000001364002C00000000000000000000000000523000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"44E+9\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"4.4E+10\"}}"
+ },
+ {
+ "description": "[basx068] examples",
+ "bson": "180000001364003200000000000000000000000000323000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"50E-7\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"0.0000050\"}}"
+ },
+ {
+ "description": "[basx169] Numbers with E",
+ "bson": "180000001364006400000000000000000000000000523000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"100e+009\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"1.00E+11\"}}"
+ },
+ {
+ "description": "[basx167] Numbers with E",
+ "bson": "180000001364006400000000000000000000000000523000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"100e+09\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"1.00E+11\"}}"
+ },
+ {
+ "description": "[basx168] Numbers with E",
+ "bson": "180000001364006400000000000000000000000000F43000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"100E+90\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"1.00E+92\"}}"
+ },
+ {
+ "description": "[basx166] Numbers with E",
+ "bson": "180000001364006400000000000000000000000000523000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"100e+9\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"1.00E+11\"}}"
+ },
+ {
+ "description": "[basx210] Numbers with E",
+ "bson": "18000000136400F1040000000000000000000000003E3000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"126.5\"}}"
+ },
+ {
+ "description": "[basx217] Numbers with E",
+ "bson": "18000000136400F1040000000000000000000000003E3000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"126.5E-0\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"126.5\"}}"
+ },
+ {
+ "description": "[basx216] Numbers with E",
+ "bson": "18000000136400F1040000000000000000000000003C3000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"126.5E-1\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"12.65\"}}"
+ },
+ {
+ "description": "[basx218] Numbers with E",
+ "bson": "18000000136400F104000000000000000000000000403000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"126.5E+1\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"1265\"}}"
+ },
+ {
+ "description": "[basx211] Numbers with E",
+ "bson": "18000000136400F104000000000000000000000000163000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"126.5E-20\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"1.265E-18\"}}"
+ },
+ {
+ "description": "[basx223] Numbers with E",
+ "bson": "18000000136400F104000000000000000000000000663000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"126.5E+20\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"1.265E+22\"}}"
+ },
+ {
+ "description": "[basx215] Numbers with E",
+ "bson": "18000000136400F1040000000000000000000000003A3000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"126.5E-2\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"1.265\"}}"
+ },
+ {
+ "description": "[basx219] Numbers with E",
+ "bson": "18000000136400F104000000000000000000000000423000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"126.5E+2\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"1.265E+4\"}}"
+ },
+ {
+ "description": "[basx214] Numbers with E",
+ "bson": "18000000136400F104000000000000000000000000383000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"126.5E-3\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"0.1265\"}}"
+ },
+ {
+ "description": "[basx220] Numbers with E",
+ "bson": "18000000136400F104000000000000000000000000443000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"126.5E+3\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"1.265E+5\"}}"
+ },
+ {
+ "description": "[basx213] Numbers with E",
+ "bson": "18000000136400F104000000000000000000000000363000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"126.5E-4\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"0.01265\"}}"
+ },
+ {
+ "description": "[basx221] Numbers with E",
+ "bson": "18000000136400F104000000000000000000000000463000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"126.5E+4\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"1.265E+6\"}}"
+ },
+ {
+ "description": "[basx212] Numbers with E",
+ "bson": "18000000136400F1040000000000000000000000002E3000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"126.5E-8\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"0.000001265\"}}"
+ },
+ {
+ "description": "[basx222] Numbers with E",
+ "bson": "18000000136400F1040000000000000000000000004E3000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"126.5E+8\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"1.265E+10\"}}"
+ },
+ {
+ "description": "[basx006] conform to rules and exponent will be in permitted range).",
+ "bson": "18000000136400E803000000000000000000000000403000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"1000\"}}"
+ },
+ {
+ "description": "[basx230] Numbers with E",
+ "bson": "18000000136400F104000000000000000000000000403000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"1265\"}}"
+ },
+ {
+ "description": "[basx237] Numbers with E",
+ "bson": "18000000136400F104000000000000000000000000403000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"1265E-0\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"1265\"}}"
+ },
+ {
+ "description": "[basx236] Numbers with E",
+ "bson": "18000000136400F1040000000000000000000000003E3000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"1265E-1\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"126.5\"}}"
+ },
+ {
+ "description": "[basx238] Numbers with E",
+ "bson": "18000000136400F104000000000000000000000000423000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"1265E+1\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"1.265E+4\"}}"
+ },
+ {
+ "description": "[basx231] Numbers with E",
+ "bson": "18000000136400F104000000000000000000000000183000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"1265E-20\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"1.265E-17\"}}"
+ },
+ {
+ "description": "[basx243] Numbers with E",
+ "bson": "18000000136400F104000000000000000000000000683000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"1265E+20\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"1.265E+23\"}}"
+ },
+ {
+ "description": "[basx235] Numbers with E",
+ "bson": "18000000136400F1040000000000000000000000003C3000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"1265E-2\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"12.65\"}}"
+ },
+ {
+ "description": "[basx239] Numbers with E",
+ "bson": "18000000136400F104000000000000000000000000443000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"1265E+2\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"1.265E+5\"}}"
+ },
+ {
+ "description": "[basx234] Numbers with E",
+ "bson": "18000000136400F1040000000000000000000000003A3000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"1265E-3\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"1.265\"}}"
+ },
+ {
+ "description": "[basx240] Numbers with E",
+ "bson": "18000000136400F104000000000000000000000000463000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"1265E+3\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"1.265E+6\"}}"
+ },
+ {
+ "description": "[basx233] Numbers with E",
+ "bson": "18000000136400F104000000000000000000000000383000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"1265E-4\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"0.1265\"}}"
+ },
+ {
+ "description": "[basx241] Numbers with E",
+ "bson": "18000000136400F104000000000000000000000000483000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"1265E+4\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"1.265E+7\"}}"
+ },
+ {
+ "description": "[basx232] Numbers with E",
+ "bson": "18000000136400F104000000000000000000000000303000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"1265E-8\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"0.00001265\"}}"
+ },
+ {
+ "description": "[basx242] Numbers with E",
+ "bson": "18000000136400F104000000000000000000000000503000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"1265E+8\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"1.265E+11\"}}"
+ },
+ {
+ "description": "[basx060] strings without E cannot generate E in result",
+ "bson": "18000000136400185C0ACE00000000000000000000383000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"345678.5432\"}}"
+ },
+ {
+ "description": "[basx059] strings without E cannot generate E in result",
+ "bson": "18000000136400F198670C08000000000000000000363000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"0345678.54321\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"345678.54321\"}}"
+ },
+ {
+ "description": "[basx058] strings without E cannot generate E in result",
+ "bson": "180000001364006AF90B7C50000000000000000000343000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"345678.543210\"}}"
+ },
+ {
+ "description": "[basx057] strings without E cannot generate E in result",
+ "bson": "180000001364006A19562522020000000000000000343000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"2345678.543210\"}}"
+ },
+ {
+ "description": "[basx056] strings without E cannot generate E in result",
+ "bson": "180000001364006AB9C8733A0B0000000000000000343000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"12345678.543210\"}}"
+ },
+ {
+ "description": "[basx031] conform to rules and exponent will be in permitted range).",
+ "bson": "1800000013640040AF0D8648700000000000000000343000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"123456789.000000\"}}"
+ },
+ {
+ "description": "[basx030] conform to rules and exponent will be in permitted range).",
+ "bson": "1800000013640080910F8648700000000000000000343000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"123456789.123456\"}}"
+ },
+ {
+ "description": "[basx032] conform to rules and exponent will be in permitted range).",
+ "bson": "1800000013640080910F8648700000000000000000403000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"123456789123456\"}}"
+ }
+ ]
+}
+`},
+
+ {"decimal128-4.json", `
+{
+ "description": "Decimal128",
+ "bson_type": "0x13",
+ "test_key": "d",
+ "valid": [
+ {
+ "description": "[basx023] conform to rules and exponent will be in permitted range).",
+ "bson": "1800000013640001000000000000000000000000003EB000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"-0.1\"}}"
+ },
+
+ {
+ "description": "[basx045] strings without E cannot generate E in result",
+ "bson": "1800000013640003000000000000000000000000003A3000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"+0.003\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"0.003\"}}"
+ },
+ {
+ "description": "[basx610] Zeros",
+ "bson": "1800000013640000000000000000000000000000003E3000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \".0\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"0.0\"}}"
+ },
+ {
+ "description": "[basx612] Zeros",
+ "bson": "1800000013640000000000000000000000000000003EB000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"-.0\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"-0.0\"}}"
+ },
+ {
+ "description": "[basx043] strings without E cannot generate E in result",
+ "bson": "18000000136400FC040000000000000000000000003C3000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"+12.76\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"12.76\"}}"
+ },
+ {
+ "description": "[basx055] strings without E cannot generate E in result",
+ "bson": "180000001364000500000000000000000000000000303000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"0.00000005\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"5E-8\"}}"
+ },
+ {
+ "description": "[basx054] strings without E cannot generate E in result",
+ "bson": "180000001364000500000000000000000000000000323000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"0.0000005\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"5E-7\"}}"
+ },
+ {
+ "description": "[basx052] strings without E cannot generate E in result",
+ "bson": "180000001364000500000000000000000000000000343000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"0.000005\"}}"
+ },
+ {
+ "description": "[basx051] strings without E cannot generate E in result",
+ "bson": "180000001364000500000000000000000000000000363000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"00.00005\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"0.00005\"}}"
+ },
+ {
+ "description": "[basx050] strings without E cannot generate E in result",
+ "bson": "180000001364000500000000000000000000000000383000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"0.0005\"}}"
+ },
+ {
+ "description": "[basx047] strings without E cannot generate E in result",
+ "bson": "1800000013640005000000000000000000000000003E3000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \".5\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"0.5\"}}"
+ },
+ {
+ "description": "[dqbsr431] check rounding modes heeded (Rounded)",
+ "bson": "1800000013640099761CC7B548F377DC80A131C836FE2F00",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"1.1111111111111111111111111111123450\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"1.111111111111111111111111111112345\"}}"
+ },
+ {
+ "description": "OK2",
+ "bson": "18000000136400000000000A5BC138938D44C64D31FC2F00",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \".100000000000000000000000000000000000000000000000000000000000\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"0.1000000000000000000000000000000000\"}}"
+ }
+ ],
+ "parseErrors": [
+ {
+ "description": "[basx564] Near-specials (Conversion_syntax)",
+ "string": "Infi"
+ },
+ {
+ "description": "[basx565] Near-specials (Conversion_syntax)",
+ "string": "Infin"
+ },
+ {
+ "description": "[basx566] Near-specials (Conversion_syntax)",
+ "string": "Infini"
+ },
+ {
+ "description": "[basx567] Near-specials (Conversion_syntax)",
+ "string": "Infinit"
+ },
+ {
+ "description": "[basx568] Near-specials (Conversion_syntax)",
+ "string": "-Infinit"
+ },
+ {
+ "description": "[basx590] some baddies with dots and Es and dots and specials (Conversion_syntax)",
+ "string": ".Infinity"
+ },
+ {
+ "description": "[basx562] Near-specials (Conversion_syntax)",
+ "string": "NaNq"
+ },
+ {
+ "description": "[basx563] Near-specials (Conversion_syntax)",
+ "string": "NaNs"
+ },
+ {
+ "description": "[dqbas939] overflow results at different rounding modes (Overflow & Inexact & Rounded)",
+ "string": "-7e10000"
+ },
+ {
+ "description": "[dqbsr534] negatives (Rounded & Inexact)",
+ "string": "-1.11111111111111111111111111111234650"
+ },
+ {
+ "description": "[dqbsr535] negatives (Rounded & Inexact)",
+ "string": "-1.11111111111111111111111111111234551"
+ },
+ {
+ "description": "[dqbsr533] negatives (Rounded & Inexact)",
+ "string": "-1.11111111111111111111111111111234550"
+ },
+ {
+ "description": "[dqbsr532] negatives (Rounded & Inexact)",
+ "string": "-1.11111111111111111111111111111234549"
+ },
+ {
+ "description": "[dqbsr432] check rounding modes heeded (Rounded & Inexact)",
+ "string": "1.11111111111111111111111111111234549"
+ },
+ {
+ "description": "[dqbsr433] check rounding modes heeded (Rounded & Inexact)",
+ "string": "1.11111111111111111111111111111234550"
+ },
+ {
+ "description": "[dqbsr435] check rounding modes heeded (Rounded & Inexact)",
+ "string": "1.11111111111111111111111111111234551"
+ },
+ {
+ "description": "[dqbsr434] check rounding modes heeded (Rounded & Inexact)",
+ "string": "1.11111111111111111111111111111234650"
+ },
+ {
+ "description": "[dqbas938] overflow results at different rounding modes (Overflow & Inexact & Rounded)",
+ "string": "7e10000"
+ },
+ {
+ "description": "Inexact rounding#1",
+ "string": "100000000000000000000000000000000000000000000000000000000001"
+ },
+ {
+ "description": "Inexact rounding#2",
+ "string": "1E-6177"
+ }
+ ]
+}
+`},
+
+ {"decimal128-5.json", `
+{
+ "description": "Decimal128",
+ "bson_type": "0x13",
+ "test_key": "d",
+ "valid": [
+ {
+ "description": "[decq035] fold-downs (more below) (Clamped)",
+ "bson": "18000000136400000000807F1BCF85B27059C8A43CFE5F00",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"1.23E+6144\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"1.230000000000000000000000000000000E+6144\"}}"
+ },
+ {
+ "description": "[decq037] fold-downs (more below) (Clamped)",
+ "bson": "18000000136400000000000A5BC138938D44C64D31FE5F00",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"1E+6144\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"1.000000000000000000000000000000000E+6144\"}}"
+ },
+ {
+ "description": "[decq077] Nmin and below (Subnormal)",
+ "bson": "180000001364000000000081EFAC855B416D2DEE04000000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"0.100000000000000000000000000000000E-6143\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"1.00000000000000000000000000000000E-6144\"}}"
+ },
+ {
+ "description": "[decq078] Nmin and below (Subnormal)",
+ "bson": "180000001364000000000081EFAC855B416D2DEE04000000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"1.00000000000000000000000000000000E-6144\"}}"
+ },
+ {
+ "description": "[decq079] Nmin and below (Subnormal)",
+ "bson": "180000001364000A00000000000000000000000000000000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"0.000000000000000000000000000000010E-6143\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"1.0E-6175\"}}"
+ },
+ {
+ "description": "[decq080] Nmin and below (Subnormal)",
+ "bson": "180000001364000A00000000000000000000000000000000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"1.0E-6175\"}}"
+ },
+ {
+ "description": "[decq081] Nmin and below (Subnormal)",
+ "bson": "180000001364000100000000000000000000000000020000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"0.00000000000000000000000000000001E-6143\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"1E-6175\"}}"
+ },
+ {
+ "description": "[decq082] Nmin and below (Subnormal)",
+ "bson": "180000001364000100000000000000000000000000020000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"1E-6175\"}}"
+ },
+ {
+ "description": "[decq083] Nmin and below (Subnormal)",
+ "bson": "180000001364000100000000000000000000000000000000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"0.000000000000000000000000000000001E-6143\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"1E-6176\"}}"
+ },
+ {
+ "description": "[decq084] Nmin and below (Subnormal)",
+ "bson": "180000001364000100000000000000000000000000000000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"1E-6176\"}}"
+ },
+ {
+ "description": "[decq090] underflows cannot be tested for simple copies, check edge cases (Subnormal)",
+ "bson": "180000001364000100000000000000000000000000000000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"1e-6176\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"1E-6176\"}}"
+ },
+ {
+ "description": "[decq100] underflows cannot be tested for simple copies, check edge cases (Subnormal)",
+ "bson": "18000000136400FFFFFFFF095BC138938D44C64D31000000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"999999999999999999999999999999999e-6176\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"9.99999999999999999999999999999999E-6144\"}}"
+ },
+ {
+ "description": "[decq130] fold-downs (more below) (Clamped)",
+ "bson": "18000000136400000000807F1BCF85B27059C8A43CFEDF00",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"-1.23E+6144\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"-1.230000000000000000000000000000000E+6144\"}}"
+ },
+ {
+ "description": "[decq132] fold-downs (more below) (Clamped)",
+ "bson": "18000000136400000000000A5BC138938D44C64D31FEDF00",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"-1E+6144\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"-1.000000000000000000000000000000000E+6144\"}}"
+ },
+ {
+ "description": "[decq177] Nmin and below (Subnormal)",
+ "bson": "180000001364000000000081EFAC855B416D2DEE04008000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"-0.100000000000000000000000000000000E-6143\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"-1.00000000000000000000000000000000E-6144\"}}"
+ },
+ {
+ "description": "[decq178] Nmin and below (Subnormal)",
+ "bson": "180000001364000000000081EFAC855B416D2DEE04008000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"-1.00000000000000000000000000000000E-6144\"}}"
+ },
+ {
+ "description": "[decq179] Nmin and below (Subnormal)",
+ "bson": "180000001364000A00000000000000000000000000008000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"-0.000000000000000000000000000000010E-6143\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"-1.0E-6175\"}}"
+ },
+ {
+ "description": "[decq180] Nmin and below (Subnormal)",
+ "bson": "180000001364000A00000000000000000000000000008000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"-1.0E-6175\"}}"
+ },
+ {
+ "description": "[decq181] Nmin and below (Subnormal)",
+ "bson": "180000001364000100000000000000000000000000028000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"-0.00000000000000000000000000000001E-6143\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"-1E-6175\"}}"
+ },
+ {
+ "description": "[decq182] Nmin and below (Subnormal)",
+ "bson": "180000001364000100000000000000000000000000028000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"-1E-6175\"}}"
+ },
+ {
+ "description": "[decq183] Nmin and below (Subnormal)",
+ "bson": "180000001364000100000000000000000000000000008000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"-0.000000000000000000000000000000001E-6143\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"-1E-6176\"}}"
+ },
+ {
+ "description": "[decq184] Nmin and below (Subnormal)",
+ "bson": "180000001364000100000000000000000000000000008000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"-1E-6176\"}}"
+ },
+ {
+ "description": "[decq190] underflow edge cases (Subnormal)",
+ "bson": "180000001364000100000000000000000000000000008000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"-1e-6176\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"-1E-6176\"}}"
+ },
+ {
+ "description": "[decq200] underflow edge cases (Subnormal)",
+ "bson": "18000000136400FFFFFFFF095BC138938D44C64D31008000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"-999999999999999999999999999999999e-6176\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"-9.99999999999999999999999999999999E-6144\"}}"
+ },
+ {
+ "description": "[decq400] zeros (Clamped)",
+ "bson": "180000001364000000000000000000000000000000000000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"0E-8000\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"0E-6176\"}}"
+ },
+ {
+ "description": "[decq401] zeros (Clamped)",
+ "bson": "180000001364000000000000000000000000000000000000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"0E-6177\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"0E-6176\"}}"
+ },
+ {
+ "description": "[decq414] clamped zeros... (Clamped)",
+ "bson": "180000001364000000000000000000000000000000FE5F00",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"0E+6112\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"0E+6111\"}}"
+ },
+ {
+ "description": "[decq416] clamped zeros... (Clamped)",
+ "bson": "180000001364000000000000000000000000000000FE5F00",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"0E+6144\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"0E+6111\"}}"
+ },
+ {
+ "description": "[decq418] clamped zeros... (Clamped)",
+ "bson": "180000001364000000000000000000000000000000FE5F00",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"0E+8000\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"0E+6111\"}}"
+ },
+ {
+ "description": "[decq420] negative zeros (Clamped)",
+ "bson": "180000001364000000000000000000000000000000008000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"-0E-8000\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"-0E-6176\"}}"
+ },
+ {
+ "description": "[decq421] negative zeros (Clamped)",
+ "bson": "180000001364000000000000000000000000000000008000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"-0E-6177\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"-0E-6176\"}}"
+ },
+ {
+ "description": "[decq434] clamped zeros... (Clamped)",
+ "bson": "180000001364000000000000000000000000000000FEDF00",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"-0E+6112\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"-0E+6111\"}}"
+ },
+ {
+ "description": "[decq436] clamped zeros... (Clamped)",
+ "bson": "180000001364000000000000000000000000000000FEDF00",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"-0E+6144\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"-0E+6111\"}}"
+ },
+ {
+ "description": "[decq438] clamped zeros... (Clamped)",
+ "bson": "180000001364000000000000000000000000000000FEDF00",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"-0E+8000\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"-0E+6111\"}}"
+ },
+ {
+ "description": "[decq601] fold-down full sequence (Clamped)",
+ "bson": "18000000136400000000000A5BC138938D44C64D31FE5F00",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"1E+6144\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"1.000000000000000000000000000000000E+6144\"}}"
+ },
+ {
+ "description": "[decq603] fold-down full sequence (Clamped)",
+ "bson": "180000001364000000000081EFAC855B416D2DEE04FE5F00",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"1E+6143\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"1.00000000000000000000000000000000E+6143\"}}"
+ },
+ {
+ "description": "[decq605] fold-down full sequence (Clamped)",
+ "bson": "1800000013640000000080264B91C02220BE377E00FE5F00",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"1E+6142\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"1.0000000000000000000000000000000E+6142\"}}"
+ },
+ {
+ "description": "[decq607] fold-down full sequence (Clamped)",
+ "bson": "1800000013640000000040EAED7446D09C2C9F0C00FE5F00",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"1E+6141\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"1.000000000000000000000000000000E+6141\"}}"
+ },
+ {
+ "description": "[decq609] fold-down full sequence (Clamped)",
+ "bson": "18000000136400000000A0CA17726DAE0F1E430100FE5F00",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"1E+6140\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"1.00000000000000000000000000000E+6140\"}}"
+ },
+ {
+ "description": "[decq611] fold-down full sequence (Clamped)",
+ "bson": "18000000136400000000106102253E5ECE4F200000FE5F00",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"1E+6139\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"1.0000000000000000000000000000E+6139\"}}"
+ },
+ {
+ "description": "[decq613] fold-down full sequence (Clamped)",
+ "bson": "18000000136400000000E83C80D09F3C2E3B030000FE5F00",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"1E+6138\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"1.000000000000000000000000000E+6138\"}}"
+ },
+ {
+ "description": "[decq615] fold-down full sequence (Clamped)",
+ "bson": "18000000136400000000E4D20CC8DCD2B752000000FE5F00",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"1E+6137\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"1.00000000000000000000000000E+6137\"}}"
+ },
+ {
+ "description": "[decq617] fold-down full sequence (Clamped)",
+ "bson": "180000001364000000004A48011416954508000000FE5F00",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"1E+6136\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"1.0000000000000000000000000E+6136\"}}"
+ },
+ {
+ "description": "[decq619] fold-down full sequence (Clamped)",
+ "bson": "18000000136400000000A1EDCCCE1BC2D300000000FE5F00",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"1E+6135\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"1.000000000000000000000000E+6135\"}}"
+ },
+ {
+ "description": "[decq621] fold-down full sequence (Clamped)",
+ "bson": "18000000136400000080F64AE1C7022D1500000000FE5F00",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"1E+6134\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"1.00000000000000000000000E+6134\"}}"
+ },
+ {
+ "description": "[decq623] fold-down full sequence (Clamped)",
+ "bson": "18000000136400000040B2BAC9E0191E0200000000FE5F00",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"1E+6133\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"1.0000000000000000000000E+6133\"}}"
+ },
+ {
+ "description": "[decq625] fold-down full sequence (Clamped)",
+ "bson": "180000001364000000A0DEC5ADC935360000000000FE5F00",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"1E+6132\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"1.000000000000000000000E+6132\"}}"
+ },
+ {
+ "description": "[decq627] fold-down full sequence (Clamped)",
+ "bson": "18000000136400000010632D5EC76B050000000000FE5F00",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"1E+6131\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"1.00000000000000000000E+6131\"}}"
+ },
+ {
+ "description": "[decq629] fold-down full sequence (Clamped)",
+ "bson": "180000001364000000E8890423C78A000000000000FE5F00",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"1E+6130\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"1.0000000000000000000E+6130\"}}"
+ },
+ {
+ "description": "[decq631] fold-down full sequence (Clamped)",
+ "bson": "18000000136400000064A7B3B6E00D000000000000FE5F00",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"1E+6129\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"1.000000000000000000E+6129\"}}"
+ },
+ {
+ "description": "[decq633] fold-down full sequence (Clamped)",
+ "bson": "1800000013640000008A5D78456301000000000000FE5F00",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"1E+6128\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"1.00000000000000000E+6128\"}}"
+ },
+ {
+ "description": "[decq635] fold-down full sequence (Clamped)",
+ "bson": "180000001364000000C16FF2862300000000000000FE5F00",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"1E+6127\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"1.0000000000000000E+6127\"}}"
+ },
+ {
+ "description": "[decq637] fold-down full sequence (Clamped)",
+ "bson": "180000001364000080C6A47E8D0300000000000000FE5F00",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"1E+6126\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"1.000000000000000E+6126\"}}"
+ },
+ {
+ "description": "[decq639] fold-down full sequence (Clamped)",
+ "bson": "1800000013640000407A10F35A0000000000000000FE5F00",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"1E+6125\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"1.00000000000000E+6125\"}}"
+ },
+ {
+ "description": "[decq641] fold-down full sequence (Clamped)",
+ "bson": "1800000013640000A0724E18090000000000000000FE5F00",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"1E+6124\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"1.0000000000000E+6124\"}}"
+ },
+ {
+ "description": "[decq643] fold-down full sequence (Clamped)",
+ "bson": "180000001364000010A5D4E8000000000000000000FE5F00",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"1E+6123\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"1.000000000000E+6123\"}}"
+ },
+ {
+ "description": "[decq645] fold-down full sequence (Clamped)",
+ "bson": "1800000013640000E8764817000000000000000000FE5F00",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"1E+6122\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"1.00000000000E+6122\"}}"
+ },
+ {
+ "description": "[decq647] fold-down full sequence (Clamped)",
+ "bson": "1800000013640000E40B5402000000000000000000FE5F00",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"1E+6121\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"1.0000000000E+6121\"}}"
+ },
+ {
+ "description": "[decq649] fold-down full sequence (Clamped)",
+ "bson": "1800000013640000CA9A3B00000000000000000000FE5F00",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"1E+6120\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"1.000000000E+6120\"}}"
+ },
+ {
+ "description": "[decq651] fold-down full sequence (Clamped)",
+ "bson": "1800000013640000E1F50500000000000000000000FE5F00",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"1E+6119\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"1.00000000E+6119\"}}"
+ },
+ {
+ "description": "[decq653] fold-down full sequence (Clamped)",
+ "bson": "180000001364008096980000000000000000000000FE5F00",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"1E+6118\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"1.0000000E+6118\"}}"
+ },
+ {
+ "description": "[decq655] fold-down full sequence (Clamped)",
+ "bson": "1800000013640040420F0000000000000000000000FE5F00",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"1E+6117\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"1.000000E+6117\"}}"
+ },
+ {
+ "description": "[decq657] fold-down full sequence (Clamped)",
+ "bson": "18000000136400A086010000000000000000000000FE5F00",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"1E+6116\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"1.00000E+6116\"}}"
+ },
+ {
+ "description": "[decq659] fold-down full sequence (Clamped)",
+ "bson": "180000001364001027000000000000000000000000FE5F00",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"1E+6115\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"1.0000E+6115\"}}"
+ },
+ {
+ "description": "[decq661] fold-down full sequence (Clamped)",
+ "bson": "18000000136400E803000000000000000000000000FE5F00",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"1E+6114\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"1.000E+6114\"}}"
+ },
+ {
+ "description": "[decq663] fold-down full sequence (Clamped)",
+ "bson": "180000001364006400000000000000000000000000FE5F00",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"1E+6113\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"1.00E+6113\"}}"
+ },
+ {
+ "description": "[decq665] fold-down full sequence (Clamped)",
+ "bson": "180000001364000A00000000000000000000000000FE5F00",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"1E+6112\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"1.0E+6112\"}}"
+ }
+ ]
+}
+`},
+
+ {"decimal128-6.json", `
+{
+ "description": "Decimal128",
+ "bson_type": "0x13",
+ "test_key": "d",
+ "parseErrors": [
+ {
+ "description": "Incomplete Exponent",
+ "string": "1e"
+ },
+ {
+ "description": "Exponent at the beginning",
+ "string": "E01"
+ },
+ {
+ "description": "Just a decimal place",
+ "string": "."
+ },
+ {
+ "description": "2 decimal places",
+ "string": "..3"
+ },
+ {
+ "description": "2 decimal places",
+ "string": ".13.3"
+ },
+ {
+ "description": "2 decimal places",
+ "string": "1..3"
+ },
+ {
+ "description": "2 decimal places",
+ "string": "1.3.4"
+ },
+ {
+ "description": "2 decimal places",
+ "string": "1.34."
+ },
+ {
+ "description": "Decimal with no digits",
+ "string": ".e"
+ },
+ {
+ "description": "2 signs",
+ "string": "+-32.4"
+ },
+ {
+ "description": "2 signs",
+ "string": "-+32.4"
+ },
+ {
+ "description": "2 negative signs",
+ "string": "--32.4"
+ },
+ {
+ "description": "2 negative signs",
+ "string": "-32.-4"
+ },
+ {
+ "description": "End in negative sign",
+ "string": "32.0-"
+ },
+ {
+ "description": "2 negative signs",
+ "string": "32.4E--21"
+ },
+ {
+ "description": "2 negative signs",
+ "string": "32.4E-2-1"
+ },
+ {
+ "description": "2 signs",
+ "string": "32.4E+-21"
+ },
+ {
+ "description": "Empty string",
+ "string": ""
+ },
+ {
+ "description": "leading white space positive number",
+ "string": " 1"
+ },
+ {
+ "description": "leading white space negative number",
+ "string": " -1"
+ },
+ {
+ "description": "trailing white space",
+ "string": "1 "
+ },
+ {
+ "description": "Invalid",
+ "string": "E"
+ },
+ {
+ "description": "Invalid",
+ "string": "invalid"
+ },
+ {
+ "description": "Invalid",
+ "string": "i"
+ },
+ {
+ "description": "Invalid",
+ "string": "in"
+ },
+ {
+ "description": "Invalid",
+ "string": "-in"
+ },
+ {
+ "description": "Invalid",
+ "string": "Na"
+ },
+ {
+ "description": "Invalid",
+ "string": "-Na"
+ },
+ {
+ "description": "Invalid",
+ "string": "1.23abc"
+ },
+ {
+ "description": "Invalid",
+ "string": "1.23abcE+02"
+ },
+ {
+ "description": "Invalid",
+ "string": "1.23E+0aabs2"
+ }
+ ]
+}
+`},
+
+ {"decimal128-7.json", `
+{
+ "description": "Decimal128",
+ "bson_type": "0x13",
+ "test_key": "d",
+ "parseErrors": [
+ {
+ "description": "[basx572] Near-specials (Conversion_syntax)",
+ "string": "-9Inf"
+ },
+ {
+ "description": "[basx516] The 'baddies' tests from DiagBigDecimal, plus some new ones (Conversion_syntax)",
+ "string": "-1-"
+ },
+ {
+ "description": "[basx533] The 'baddies' tests from DiagBigDecimal, plus some new ones (Conversion_syntax)",
+ "string": "0000.."
+ },
+ {
+ "description": "[basx534] The 'baddies' tests from DiagBigDecimal, plus some new ones (Conversion_syntax)",
+ "string": ".0000."
+ },
+ {
+ "description": "[basx535] The 'baddies' tests from DiagBigDecimal, plus some new ones (Conversion_syntax)",
+ "string": "00..00"
+ },
+ {
+ "description": "[basx569] Near-specials (Conversion_syntax)",
+ "string": "0Inf"
+ },
+ {
+ "description": "[basx571] Near-specials (Conversion_syntax)",
+ "string": "-0Inf"
+ },
+ {
+ "description": "[basx575] Near-specials (Conversion_syntax)",
+ "string": "0sNaN"
+ },
+ {
+ "description": "[basx503] The 'baddies' tests from DiagBigDecimal, plus some new ones (Conversion_syntax)",
+ "string": "++1"
+ },
+ {
+ "description": "[basx504] The 'baddies' tests from DiagBigDecimal, plus some new ones (Conversion_syntax)",
+ "string": "--1"
+ },
+ {
+ "description": "[basx505] The 'baddies' tests from DiagBigDecimal, plus some new ones (Conversion_syntax)",
+ "string": "-+1"
+ },
+ {
+ "description": "[basx506] The 'baddies' tests from DiagBigDecimal, plus some new ones (Conversion_syntax)",
+ "string": "+-1"
+ },
+ {
+ "description": "[basx510] The 'baddies' tests from DiagBigDecimal, plus some new ones (Conversion_syntax)",
+ "string": " +1"
+ },
+ {
+ "description": "[basx513] The 'baddies' tests from DiagBigDecimal, plus some new ones (Conversion_syntax)",
+ "string": " + 1"
+ },
+ {
+ "description": "[basx514] The 'baddies' tests from DiagBigDecimal, plus some new ones (Conversion_syntax)",
+ "string": " - 1"
+ },
+ {
+ "description": "[basx501] The 'baddies' tests from DiagBigDecimal, plus some new ones (Conversion_syntax)",
+ "string": "."
+ },
+ {
+ "description": "[basx502] The 'baddies' tests from DiagBigDecimal, plus some new ones (Conversion_syntax)",
+ "string": ".."
+ },
+ {
+ "description": "[basx519] The 'baddies' tests from DiagBigDecimal, plus some new ones (Conversion_syntax)",
+ "string": ""
+ },
+ {
+ "description": "[basx525] The 'baddies' tests from DiagBigDecimal, plus some new ones (Conversion_syntax)",
+ "string": "e100"
+ },
+ {
+ "description": "[basx549] The 'baddies' tests from DiagBigDecimal, plus some new ones (Conversion_syntax)",
+ "string": "e+1"
+ },
+ {
+ "description": "[basx577] some baddies with dots and Es and dots and specials (Conversion_syntax)",
+ "string": ".e+1"
+ },
+ {
+ "description": "[basx578] some baddies with dots and Es and dots and specials (Conversion_syntax)",
+ "string": "+.e+1"
+ },
+ {
+ "description": "[basx581] some baddies with dots and Es and dots and specials (Conversion_syntax)",
+ "string": "E+1"
+ },
+ {
+ "description": "[basx582] some baddies with dots and Es and dots and specials (Conversion_syntax)",
+ "string": ".E+1"
+ },
+ {
+ "description": "[basx583] some baddies with dots and Es and dots and specials (Conversion_syntax)",
+ "string": "+.E+1"
+ },
+ {
+ "description": "[basx579] some baddies with dots and Es and dots and specials (Conversion_syntax)",
+ "string": "-.e+"
+ },
+ {
+ "description": "[basx580] some baddies with dots and Es and dots and specials (Conversion_syntax)",
+ "string": "-.e"
+ },
+ {
+ "description": "[basx584] some baddies with dots and Es and dots and specials (Conversion_syntax)",
+ "string": "-.E+"
+ },
+ {
+ "description": "[basx585] some baddies with dots and Es and dots and specials (Conversion_syntax)",
+ "string": "-.E"
+ },
+ {
+ "description": "[basx589] some baddies with dots and Es and dots and specials (Conversion_syntax)",
+ "string": "+.Inf"
+ },
+ {
+ "description": "[basx586] some baddies with dots and Es and dots and specials (Conversion_syntax)",
+ "string": ".NaN"
+ },
+ {
+ "description": "[basx587] some baddies with dots and Es and dots and specials (Conversion_syntax)",
+ "string": "-.NaN"
+ },
+ {
+ "description": "[basx545] The 'baddies' tests from DiagBigDecimal, plus some new ones (Conversion_syntax)",
+ "string": "ONE"
+ },
+ {
+ "description": "[basx561] Near-specials (Conversion_syntax)",
+ "string": "qNaN"
+ },
+ {
+ "description": "[basx573] Near-specials (Conversion_syntax)",
+ "string": "-sNa"
+ },
+ {
+ "description": "[basx588] some baddies with dots and Es and dots and specials (Conversion_syntax)",
+ "string": "+.sNaN"
+ },
+ {
+ "description": "[basx544] The 'baddies' tests from DiagBigDecimal, plus some new ones (Conversion_syntax)",
+ "string": "ten"
+ },
+ {
+ "description": "[basx527] The 'baddies' tests from DiagBigDecimal, plus some new ones (Conversion_syntax)",
+ "string": "u0b65"
+ },
+ {
+ "description": "[basx526] The 'baddies' tests from DiagBigDecimal, plus some new ones (Conversion_syntax)",
+ "string": "u0e5a"
+ },
+ {
+ "description": "[basx515] The 'baddies' tests from DiagBigDecimal, plus some new ones (Conversion_syntax)",
+ "string": "x"
+ },
+ {
+ "description": "[basx574] Near-specials (Conversion_syntax)",
+ "string": "xNaN"
+ },
+ {
+ "description": "[basx530] The 'baddies' tests from DiagBigDecimal, plus some new ones (Conversion_syntax)",
+ "string": ".123.5"
+ },
+ {
+ "description": "[basx500] The 'baddies' tests from DiagBigDecimal, plus some new ones (Conversion_syntax)",
+ "string": "1..2"
+ },
+ {
+ "description": "[basx542] The 'baddies' tests from DiagBigDecimal, plus some new ones (Conversion_syntax)",
+ "string": "1e1.0"
+ },
+ {
+ "description": "[basx553] The 'baddies' tests from DiagBigDecimal, plus some new ones (Conversion_syntax)",
+ "string": "1E+1.2.3"
+ },
+ {
+ "description": "[basx543] The 'baddies' tests from DiagBigDecimal, plus some new ones (Conversion_syntax)",
+ "string": "1e123e"
+ },
+ {
+ "description": "[basx552] The 'baddies' tests from DiagBigDecimal, plus some new ones (Conversion_syntax)",
+ "string": "1E+1.2"
+ },
+ {
+ "description": "[basx546] The 'baddies' tests from DiagBigDecimal, plus some new ones (Conversion_syntax)",
+ "string": "1e.1"
+ },
+ {
+ "description": "[basx547] The 'baddies' tests from DiagBigDecimal, plus some new ones (Conversion_syntax)",
+ "string": "1e1."
+ },
+ {
+ "description": "[basx554] The 'baddies' tests from DiagBigDecimal, plus some new ones (Conversion_syntax)",
+ "string": "1E++1"
+ },
+ {
+ "description": "[basx555] The 'baddies' tests from DiagBigDecimal, plus some new ones (Conversion_syntax)",
+ "string": "1E--1"
+ },
+ {
+ "description": "[basx556] The 'baddies' tests from DiagBigDecimal, plus some new ones (Conversion_syntax)",
+ "string": "1E+-1"
+ },
+ {
+ "description": "[basx557] The 'baddies' tests from DiagBigDecimal, plus some new ones (Conversion_syntax)",
+ "string": "1E-+1"
+ },
+ {
+ "description": "[basx558] The 'baddies' tests from DiagBigDecimal, plus some new ones (Conversion_syntax)",
+ "string": "1E'1"
+ },
+ {
+ "description": "[basx559] The 'baddies' tests from DiagBigDecimal, plus some new ones (Conversion_syntax)",
+ "string": "1E\"1"
+ },
+ {
+ "description": "[basx520] The 'baddies' tests from DiagBigDecimal, plus some new ones (Conversion_syntax)",
+ "string": "1e-"
+ },
+ {
+ "description": "[basx560] The 'baddies' tests from DiagBigDecimal, plus some new ones (Conversion_syntax)",
+ "string": "1E"
+ },
+ {
+ "description": "[basx548] The 'baddies' tests from DiagBigDecimal, plus some new ones (Conversion_syntax)",
+ "string": "1ee"
+ },
+ {
+ "description": "[basx551] The 'baddies' tests from DiagBigDecimal, plus some new ones (Conversion_syntax)",
+ "string": "1.2.1"
+ },
+ {
+ "description": "[basx550] The 'baddies' tests from DiagBigDecimal, plus some new ones (Conversion_syntax)",
+ "string": "1.23.4"
+ },
+ {
+ "description": "[basx529] The 'baddies' tests from DiagBigDecimal, plus some new ones (Conversion_syntax)",
+ "string": "1.34.5"
+ },
+ {
+ "description": "[basx531] The 'baddies' tests from DiagBigDecimal, plus some new ones (Conversion_syntax)",
+ "string": "01.35."
+ },
+ {
+ "description": "[basx532] The 'baddies' tests from DiagBigDecimal, plus some new ones (Conversion_syntax)",
+ "string": "01.35-"
+ },
+ {
+ "description": "[basx518] The 'baddies' tests from DiagBigDecimal, plus some new ones (Conversion_syntax)",
+ "string": "3+"
+ },
+ {
+ "description": "[basx521] The 'baddies' tests from DiagBigDecimal, plus some new ones (Conversion_syntax)",
+ "string": "7e99999a"
+ },
+ {
+ "description": "[basx570] Near-specials (Conversion_syntax)",
+ "string": "9Inf"
+ },
+ {
+ "description": "[basx512] The 'baddies' tests from DiagBigDecimal, plus some new ones (Conversion_syntax)",
+ "string": "12 "
+ },
+ {
+ "description": "[basx517] The 'baddies' tests from DiagBigDecimal, plus some new ones (Conversion_syntax)",
+ "string": "12-"
+ },
+ {
+ "description": "[basx507] The 'baddies' tests from DiagBigDecimal, plus some new ones (Conversion_syntax)",
+ "string": "12e"
+ },
+ {
+ "description": "[basx508] The 'baddies' tests from DiagBigDecimal, plus some new ones (Conversion_syntax)",
+ "string": "12e++"
+ },
+ {
+ "description": "[basx509] The 'baddies' tests from DiagBigDecimal, plus some new ones (Conversion_syntax)",
+ "string": "12f4"
+ },
+ {
+ "description": "[basx536] The 'baddies' tests from DiagBigDecimal, plus some new ones (Conversion_syntax)",
+ "string": "111e*123"
+ },
+ {
+ "description": "[basx537] The 'baddies' tests from DiagBigDecimal, plus some new ones (Conversion_syntax)",
+ "string": "111e123-"
+ },
+ {
+ "description": "[basx540] The 'baddies' tests from DiagBigDecimal, plus some new ones (Conversion_syntax)",
+ "string": "111e1*23"
+ },
+ {
+ "description": "[basx538] The 'baddies' tests from DiagBigDecimal, plus some new ones (Conversion_syntax)",
+ "string": "111e+12+"
+ },
+ {
+ "description": "[basx539] The 'baddies' tests from DiagBigDecimal, plus some new ones (Conversion_syntax)",
+ "string": "111e1-3-"
+ },
+ {
+ "description": "[basx541] The 'baddies' tests from DiagBigDecimal, plus some new ones (Conversion_syntax)",
+ "string": "111E1e+3"
+ },
+ {
+ "description": "[basx528] The 'baddies' tests from DiagBigDecimal, plus some new ones (Conversion_syntax)",
+ "string": "123,65"
+ },
+ {
+ "description": "[basx523] The 'baddies' tests from DiagBigDecimal, plus some new ones (Conversion_syntax)",
+ "string": "7e12356789012x"
+ },
+ {
+ "description": "[basx522] The 'baddies' tests from DiagBigDecimal, plus some new ones (Conversion_syntax)",
+ "string": "7e123567890x"
+ }
+ ]
+}
+`},
+}
diff --git a/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/bson/decode.go b/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/bson/decode.go
new file mode 100644
index 00000000000..7c2d8416afe
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/bson/decode.go
@@ -0,0 +1,849 @@
+// BSON library for Go
+//
+// Copyright (c) 2010-2012 - Gustavo Niemeyer <gustavo@niemeyer.net>
+//
+// All rights reserved.
+//
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are met:
+//
+// 1. Redistributions of source code must retain the above copyright notice, this
+// list of conditions and the following disclaimer.
+// 2. Redistributions in binary form must reproduce the above copyright notice,
+// this list of conditions and the following disclaimer in the documentation
+// and/or other materials provided with the distribution.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
+// ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
+// WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+// DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
+// ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
+// (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
+// LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
+// ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
+// SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+// gobson - BSON library for Go.
+
+package bson
+
+import (
+ "fmt"
+ "math"
+ "net/url"
+ "reflect"
+ "strconv"
+ "sync"
+ "time"
+)
+
+type decoder struct {
+ in []byte
+ i int
+ docType reflect.Type
+}
+
+var typeM = reflect.TypeOf(M{})
+
+func newDecoder(in []byte) *decoder {
+ return &decoder{in, 0, typeM}
+}
+
+// --------------------------------------------------------------------------
+// Some helper functions.
+
+func corrupted() {
+ panic("Document is corrupted")
+}
+
+func settableValueOf(i interface{}) reflect.Value {
+ v := reflect.ValueOf(i)
+ sv := reflect.New(v.Type()).Elem()
+ sv.Set(v)
+ return sv
+}
+
+// --------------------------------------------------------------------------
+// Unmarshaling of documents.
+
+const (
+ setterUnknown = iota
+ setterNone
+ setterType
+ setterAddr
+)
+
+var setterStyles map[reflect.Type]int
+var setterIface reflect.Type
+var setterMutex sync.RWMutex
+
+func init() {
+ var iface Setter
+ setterIface = reflect.TypeOf(&iface).Elem()
+ setterStyles = make(map[reflect.Type]int)
+}
+
+func setterStyle(outt reflect.Type) int {
+ setterMutex.RLock()
+ style := setterStyles[outt]
+ setterMutex.RUnlock()
+ if style == setterUnknown {
+ setterMutex.Lock()
+ defer setterMutex.Unlock()
+ if outt.Implements(setterIface) {
+ setterStyles[outt] = setterType
+ } else if reflect.PtrTo(outt).Implements(setterIface) {
+ setterStyles[outt] = setterAddr
+ } else {
+ setterStyles[outt] = setterNone
+ }
+ style = setterStyles[outt]
+ }
+ return style
+}
+
+func getSetter(outt reflect.Type, out reflect.Value) Setter {
+ style := setterStyle(outt)
+ if style == setterNone {
+ return nil
+ }
+ if style == setterAddr {
+ if !out.CanAddr() {
+ return nil
+ }
+ out = out.Addr()
+ } else if outt.Kind() == reflect.Ptr && out.IsNil() {
+ out.Set(reflect.New(outt.Elem()))
+ }
+ return out.Interface().(Setter)
+}
+
+func clearMap(m reflect.Value) {
+ var none reflect.Value
+ for _, k := range m.MapKeys() {
+ m.SetMapIndex(k, none)
+ }
+}
+
+func (d *decoder) readDocTo(out reflect.Value) {
+ var elemType reflect.Type
+ outt := out.Type()
+ outk := outt.Kind()
+
+ for {
+ if outk == reflect.Ptr && out.IsNil() {
+ out.Set(reflect.New(outt.Elem()))
+ }
+ if setter := getSetter(outt, out); setter != nil {
+ var raw Raw
+ d.readDocTo(reflect.ValueOf(&raw))
+ err := setter.SetBSON(raw)
+ if _, ok := err.(*TypeError); err != nil && !ok {
+ panic(err)
+ }
+ return
+ }
+ if outk == reflect.Ptr {
+ out = out.Elem()
+ outt = out.Type()
+ outk = out.Kind()
+ continue
+ }
+ break
+ }
+
+ var fieldsMap map[string]fieldInfo
+ var inlineMap reflect.Value
+ start := d.i
+
+ origout := out
+ if outk == reflect.Interface {
+ if d.docType.Kind() == reflect.Map {
+ mv := reflect.MakeMap(d.docType)
+ out.Set(mv)
+ out = mv
+ } else {
+ dv := reflect.New(d.docType).Elem()
+ out.Set(dv)
+ out = dv
+ }
+ outt = out.Type()
+ outk = outt.Kind()
+ }
+
+ docType := d.docType
+ keyType := typeString
+ convertKey := false
+ switch outk {
+ case reflect.Map:
+ keyType = outt.Key()
+ if keyType.Kind() != reflect.String {
+ panic("BSON map must have string keys. Got: " + outt.String())
+ }
+ if keyType != typeString {
+ convertKey = true
+ }
+ elemType = outt.Elem()
+ if elemType == typeIface {
+ d.docType = outt
+ }
+ if out.IsNil() {
+ out.Set(reflect.MakeMap(out.Type()))
+ } else if out.Len() > 0 {
+ clearMap(out)
+ }
+ case reflect.Struct:
+ if outt != typeRaw {
+ sinfo, err := getStructInfo(out.Type())
+ if err != nil {
+ panic(err)
+ }
+ fieldsMap = sinfo.FieldsMap
+ out.Set(sinfo.Zero)
+ if sinfo.InlineMap != -1 {
+ inlineMap = out.Field(sinfo.InlineMap)
+ if !inlineMap.IsNil() && inlineMap.Len() > 0 {
+ clearMap(inlineMap)
+ }
+ elemType = inlineMap.Type().Elem()
+ if elemType == typeIface {
+ d.docType = inlineMap.Type()
+ }
+ }
+ }
+ case reflect.Slice:
+ switch outt.Elem() {
+ case typeDocElem:
+ origout.Set(d.readDocElems(outt))
+ return
+ case typeRawDocElem:
+ origout.Set(d.readRawDocElems(outt))
+ return
+ }
+ fallthrough
+ default:
+ panic("Unsupported document type for unmarshalling: " + out.Type().String())
+ }
+
+ end := int(d.readInt32())
+ end += d.i - 4
+ if end <= d.i || end > len(d.in) || d.in[end-1] != '\x00' {
+ corrupted()
+ }
+ for d.in[d.i] != '\x00' {
+ kind := d.readByte()
+ name := d.readCStr()
+ if d.i >= end {
+ corrupted()
+ }
+
+ switch outk {
+ case reflect.Map:
+ e := reflect.New(elemType).Elem()
+ if d.readElemTo(e, kind) {
+ k := reflect.ValueOf(name)
+ if convertKey {
+ k = k.Convert(keyType)
+ }
+ out.SetMapIndex(k, e)
+ }
+ case reflect.Struct:
+ if outt == typeRaw {
+ d.dropElem(kind)
+ } else {
+ if info, ok := fieldsMap[name]; ok {
+ if info.Inline == nil {
+ d.readElemTo(out.Field(info.Num), kind)
+ } else {
+ d.readElemTo(out.FieldByIndex(info.Inline), kind)
+ }
+ } else if inlineMap.IsValid() {
+ if inlineMap.IsNil() {
+ inlineMap.Set(reflect.MakeMap(inlineMap.Type()))
+ }
+ e := reflect.New(elemType).Elem()
+ if d.readElemTo(e, kind) {
+ inlineMap.SetMapIndex(reflect.ValueOf(name), e)
+ }
+ } else {
+ d.dropElem(kind)
+ }
+ }
+ case reflect.Slice:
+ }
+
+ if d.i >= end {
+ corrupted()
+ }
+ }
+ d.i++ // '\x00'
+ if d.i != end {
+ corrupted()
+ }
+ d.docType = docType
+
+ if outt == typeRaw {
+ out.Set(reflect.ValueOf(Raw{0x03, d.in[start:d.i]}))
+ }
+}
+
+func (d *decoder) readArrayDocTo(out reflect.Value) {
+ end := int(d.readInt32())
+ end += d.i - 4
+ if end <= d.i || end > len(d.in) || d.in[end-1] != '\x00' {
+ corrupted()
+ }
+ i := 0
+ l := out.Len()
+ for d.in[d.i] != '\x00' {
+ if i >= l {
+ panic("Length mismatch on array field")
+ }
+ kind := d.readByte()
+ for d.i < end && d.in[d.i] != '\x00' {
+ d.i++
+ }
+ if d.i >= end {
+ corrupted()
+ }
+ d.i++
+ d.readElemTo(out.Index(i), kind)
+ if d.i >= end {
+ corrupted()
+ }
+ i++
+ }
+ if i != l {
+ panic("Length mismatch on array field")
+ }
+ d.i++ // '\x00'
+ if d.i != end {
+ corrupted()
+ }
+}
+
+func (d *decoder) readSliceDoc(t reflect.Type) interface{} {
+ tmp := make([]reflect.Value, 0, 8)
+ elemType := t.Elem()
+ if elemType == typeRawDocElem {
+ d.dropElem(0x04)
+ return reflect.Zero(t).Interface()
+ }
+
+ end := int(d.readInt32())
+ end += d.i - 4
+ if end <= d.i || end > len(d.in) || d.in[end-1] != '\x00' {
+ corrupted()
+ }
+ for d.in[d.i] != '\x00' {
+ kind := d.readByte()
+ for d.i < end && d.in[d.i] != '\x00' {
+ d.i++
+ }
+ if d.i >= end {
+ corrupted()
+ }
+ d.i++
+ e := reflect.New(elemType).Elem()
+ if d.readElemTo(e, kind) {
+ tmp = append(tmp, e)
+ }
+ if d.i >= end {
+ corrupted()
+ }
+ }
+ d.i++ // '\x00'
+ if d.i != end {
+ corrupted()
+ }
+
+ n := len(tmp)
+ slice := reflect.MakeSlice(t, n, n)
+ for i := 0; i != n; i++ {
+ slice.Index(i).Set(tmp[i])
+ }
+ return slice.Interface()
+}
+
+var typeSlice = reflect.TypeOf([]interface{}{})
+var typeIface = typeSlice.Elem()
+
+func (d *decoder) readDocElems(typ reflect.Type) reflect.Value {
+ docType := d.docType
+ d.docType = typ
+ slice := make([]DocElem, 0, 8)
+ d.readDocWith(func(kind byte, name string) {
+ e := DocElem{Name: name}
+ v := reflect.ValueOf(&e.Value)
+ if d.readElemTo(v.Elem(), kind) {
+ slice = append(slice, e)
+ }
+ })
+ slicev := reflect.New(typ).Elem()
+ slicev.Set(reflect.ValueOf(slice))
+ d.docType = docType
+ return slicev
+}
+
+func (d *decoder) readRawDocElems(typ reflect.Type) reflect.Value {
+ docType := d.docType
+ d.docType = typ
+ slice := make([]RawDocElem, 0, 8)
+ d.readDocWith(func(kind byte, name string) {
+ e := RawDocElem{Name: name}
+ v := reflect.ValueOf(&e.Value)
+ if d.readElemTo(v.Elem(), kind) {
+ slice = append(slice, e)
+ }
+ })
+ slicev := reflect.New(typ).Elem()
+ slicev.Set(reflect.ValueOf(slice))
+ d.docType = docType
+ return slicev
+}
+
+func (d *decoder) readDocWith(f func(kind byte, name string)) {
+ end := int(d.readInt32())
+ end += d.i - 4
+ if end <= d.i || end > len(d.in) || d.in[end-1] != '\x00' {
+ corrupted()
+ }
+ for d.in[d.i] != '\x00' {
+ kind := d.readByte()
+ name := d.readCStr()
+ if d.i >= end {
+ corrupted()
+ }
+ f(kind, name)
+ if d.i >= end {
+ corrupted()
+ }
+ }
+ d.i++ // '\x00'
+ if d.i != end {
+ corrupted()
+ }
+}
+
+// --------------------------------------------------------------------------
+// Unmarshaling of individual elements within a document.
+
+var blackHole = settableValueOf(struct{}{})
+
+func (d *decoder) dropElem(kind byte) {
+ d.readElemTo(blackHole, kind)
+}
+
+// Attempt to decode an element from the document and put it into out.
+// If the types are not compatible, the returned ok value will be
+// false and out will be unchanged.
+func (d *decoder) readElemTo(out reflect.Value, kind byte) (good bool) {
+
+ start := d.i
+
+ if kind == 0x03 {
+ // Delegate unmarshaling of documents.
+ outt := out.Type()
+ outk := out.Kind()
+ switch outk {
+ case reflect.Interface, reflect.Ptr, reflect.Struct, reflect.Map:
+ d.readDocTo(out)
+ return true
+ }
+ if setterStyle(outt) != setterNone {
+ d.readDocTo(out)
+ return true
+ }
+ if outk == reflect.Slice {
+ switch outt.Elem() {
+ case typeDocElem:
+ out.Set(d.readDocElems(outt))
+ case typeRawDocElem:
+ out.Set(d.readRawDocElems(outt))
+ default:
+ d.readDocTo(blackHole)
+ }
+ return true
+ }
+ d.readDocTo(blackHole)
+ return true
+ }
+
+ var in interface{}
+
+ switch kind {
+ case 0x01: // Float64
+ in = d.readFloat64()
+ case 0x02: // UTF-8 string
+ in = d.readStr()
+ case 0x03: // Document
+ panic("Can't happen. Handled above.")
+ case 0x04: // Array
+ outt := out.Type()
+ if setterStyle(outt) != setterNone {
+ // Skip the value so its data is handed to the setter below.
+ d.dropElem(kind)
+ break
+ }
+ for outt.Kind() == reflect.Ptr {
+ outt = outt.Elem()
+ }
+ switch outt.Kind() {
+ case reflect.Array:
+ d.readArrayDocTo(out)
+ return true
+ case reflect.Slice:
+ in = d.readSliceDoc(outt)
+ default:
+ in = d.readSliceDoc(typeSlice)
+ }
+ case 0x05: // Binary
+ b := d.readBinary()
+ if b.Kind == 0x00 || b.Kind == 0x02 {
+ in = b.Data
+ } else {
+ in = b
+ }
+ case 0x06: // Undefined (obsolete, but still seen in the wild)
+ in = Undefined
+ case 0x07: // ObjectId
+ in = ObjectId(d.readBytes(12))
+ case 0x08: // Bool
+ in = d.readBool()
+ case 0x09: // Timestamp
+ // MongoDB handles timestamps as milliseconds.
+ i := d.readInt64()
+ if i == -62135596800000 {
+ in = time.Time{} // In UTC for convenience.
+ } else {
+ in = time.Unix(i/1e3, i%1e3*1e6)
+ }
+ case 0x0A: // Nil
+ in = nil
+ case 0x0B: // RegEx
+ in = d.readRegEx()
+ case 0x0C:
+ in = DBPointer{Namespace: d.readStr(), Id: ObjectId(d.readBytes(12))}
+ case 0x0D: // JavaScript without scope
+ in = JavaScript{Code: d.readStr()}
+ case 0x0E: // Symbol
+ in = Symbol(d.readStr())
+ case 0x0F: // JavaScript with scope
+ d.i += 4 // Skip length
+ js := JavaScript{d.readStr(), make(M)}
+ d.readDocTo(reflect.ValueOf(js.Scope))
+ in = js
+ case 0x10: // Int32
+ in = int(d.readInt32())
+ case 0x11: // Mongo-specific timestamp
+ in = MongoTimestamp(d.readInt64())
+ case 0x12: // Int64
+ in = d.readInt64()
+ case 0x13: // Decimal128
+ in = Decimal128{
+ l: uint64(d.readInt64()),
+ h: uint64(d.readInt64()),
+ }
+ case 0x7F: // Max key
+ in = MaxKey
+ case 0xFF: // Min key
+ in = MinKey
+ default:
+ panic(fmt.Sprintf("Unknown element kind (0x%02X)", kind))
+ }
+
+ outt := out.Type()
+
+ if outt == typeRaw {
+ out.Set(reflect.ValueOf(Raw{kind, d.in[start:d.i]}))
+ return true
+ }
+
+ if setter := getSetter(outt, out); setter != nil {
+ err := setter.SetBSON(Raw{kind, d.in[start:d.i]})
+ if err == SetZero {
+ out.Set(reflect.Zero(outt))
+ return true
+ }
+ if err == nil {
+ return true
+ }
+ if _, ok := err.(*TypeError); !ok {
+ panic(err)
+ }
+ return false
+ }
+
+ if in == nil {
+ out.Set(reflect.Zero(outt))
+ return true
+ }
+
+ outk := outt.Kind()
+
+ // Dereference and initialize pointer if necessary.
+ first := true
+ for outk == reflect.Ptr {
+ if !out.IsNil() {
+ out = out.Elem()
+ } else {
+ elem := reflect.New(outt.Elem())
+ if first {
+ // Only set if value is compatible.
+ first = false
+ defer func(out, elem reflect.Value) {
+ if good {
+ out.Set(elem)
+ }
+ }(out, elem)
+ } else {
+ out.Set(elem)
+ }
+ out = elem
+ }
+ outt = out.Type()
+ outk = outt.Kind()
+ }
+
+ inv := reflect.ValueOf(in)
+ if outt == inv.Type() {
+ out.Set(inv)
+ return true
+ }
+
+ switch outk {
+ case reflect.Interface:
+ out.Set(inv)
+ return true
+ case reflect.String:
+ switch inv.Kind() {
+ case reflect.String:
+ out.SetString(inv.String())
+ return true
+ case reflect.Slice:
+ if b, ok := in.([]byte); ok {
+ out.SetString(string(b))
+ return true
+ }
+ case reflect.Int, reflect.Int64:
+ if outt == typeJSONNumber {
+ out.SetString(strconv.FormatInt(inv.Int(), 10))
+ return true
+ }
+ case reflect.Float64:
+ if outt == typeJSONNumber {
+ out.SetString(strconv.FormatFloat(inv.Float(), 'f', -1, 64))
+ return true
+ }
+ }
+ case reflect.Slice, reflect.Array:
+ // Remember, array (0x04) slices are built with the correct
+ // element type. If we are here, must be a cross BSON kind
+ // conversion (e.g. 0x05 unmarshalling on string).
+ if outt.Elem().Kind() != reflect.Uint8 {
+ break
+ }
+ switch inv.Kind() {
+ case reflect.String:
+ slice := []byte(inv.String())
+ out.Set(reflect.ValueOf(slice))
+ return true
+ case reflect.Slice:
+ switch outt.Kind() {
+ case reflect.Array:
+ reflect.Copy(out, inv)
+ case reflect.Slice:
+ out.SetBytes(inv.Bytes())
+ }
+ return true
+ }
+ case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
+ switch inv.Kind() {
+ case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
+ out.SetInt(inv.Int())
+ return true
+ case reflect.Float32, reflect.Float64:
+ out.SetInt(int64(inv.Float()))
+ return true
+ case reflect.Bool:
+ if inv.Bool() {
+ out.SetInt(1)
+ } else {
+ out.SetInt(0)
+ }
+ return true
+ case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr:
+ panic("can't happen: no uint types in BSON (!?)")
+ }
+ case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr:
+ switch inv.Kind() {
+ case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
+ out.SetUint(uint64(inv.Int()))
+ return true
+ case reflect.Float32, reflect.Float64:
+ out.SetUint(uint64(inv.Float()))
+ return true
+ case reflect.Bool:
+ if inv.Bool() {
+ out.SetUint(1)
+ } else {
+ out.SetUint(0)
+ }
+ return true
+ case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr:
+ panic("Can't happen. No uint types in BSON.")
+ }
+ case reflect.Float32, reflect.Float64:
+ switch inv.Kind() {
+ case reflect.Float32, reflect.Float64:
+ out.SetFloat(inv.Float())
+ return true
+ case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
+ out.SetFloat(float64(inv.Int()))
+ return true
+ case reflect.Bool:
+ if inv.Bool() {
+ out.SetFloat(1)
+ } else {
+ out.SetFloat(0)
+ }
+ return true
+ case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr:
+ panic("Can't happen. No uint types in BSON?")
+ }
+ case reflect.Bool:
+ switch inv.Kind() {
+ case reflect.Bool:
+ out.SetBool(inv.Bool())
+ return true
+ case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
+ out.SetBool(inv.Int() != 0)
+ return true
+ case reflect.Float32, reflect.Float64:
+ out.SetBool(inv.Float() != 0)
+ return true
+ case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr:
+ panic("Can't happen. No uint types in BSON?")
+ }
+ case reflect.Struct:
+ if outt == typeURL && inv.Kind() == reflect.String {
+ u, err := url.Parse(inv.String())
+ if err != nil {
+ panic(err)
+ }
+ out.Set(reflect.ValueOf(u).Elem())
+ return true
+ }
+ if outt == typeBinary {
+ if b, ok := in.([]byte); ok {
+ out.Set(reflect.ValueOf(Binary{Data: b}))
+ return true
+ }
+ }
+ }
+
+ return false
+}
+
+// --------------------------------------------------------------------------
+// Parsers of basic types.
+
+func (d *decoder) readRegEx() RegEx {
+ re := RegEx{}
+ re.Pattern = d.readCStr()
+ re.Options = d.readCStr()
+ return re
+}
+
+func (d *decoder) readBinary() Binary {
+ l := d.readInt32()
+ b := Binary{}
+ b.Kind = d.readByte()
+ b.Data = d.readBytes(l)
+ if b.Kind == 0x02 && len(b.Data) >= 4 {
+ // Weird obsolete format with redundant length.
+ b.Data = b.Data[4:]
+ }
+ return b
+}
+
+func (d *decoder) readStr() string {
+ l := d.readInt32()
+ b := d.readBytes(l - 1)
+ if d.readByte() != '\x00' {
+ corrupted()
+ }
+ return string(b)
+}
+
+func (d *decoder) readCStr() string {
+ start := d.i
+ end := start
+ l := len(d.in)
+ for ; end != l; end++ {
+ if d.in[end] == '\x00' {
+ break
+ }
+ }
+ d.i = end + 1
+ if d.i > l {
+ corrupted()
+ }
+ return string(d.in[start:end])
+}
+
+func (d *decoder) readBool() bool {
+ b := d.readByte()
+ if b == 0 {
+ return false
+ }
+ if b == 1 {
+ return true
+ }
+ panic(fmt.Sprintf("encoded boolean must be 1 or 0, found %d", b))
+}
+
+func (d *decoder) readFloat64() float64 {
+ return math.Float64frombits(uint64(d.readInt64()))
+}
+
+func (d *decoder) readInt32() int32 {
+ b := d.readBytes(4)
+ return int32((uint32(b[0]) << 0) |
+ (uint32(b[1]) << 8) |
+ (uint32(b[2]) << 16) |
+ (uint32(b[3]) << 24))
+}
+
+func (d *decoder) readInt64() int64 {
+ b := d.readBytes(8)
+ return int64((uint64(b[0]) << 0) |
+ (uint64(b[1]) << 8) |
+ (uint64(b[2]) << 16) |
+ (uint64(b[3]) << 24) |
+ (uint64(b[4]) << 32) |
+ (uint64(b[5]) << 40) |
+ (uint64(b[6]) << 48) |
+ (uint64(b[7]) << 56))
+}
+
+func (d *decoder) readByte() byte {
+ i := d.i
+ d.i++
+ if d.i > len(d.in) {
+ corrupted()
+ }
+ return d.in[i]
+}
+
+func (d *decoder) readBytes(length int32) []byte {
+ if length < 0 {
+ corrupted()
+ }
+ start := d.i
+ d.i += int(length)
+ if d.i < start || d.i > len(d.in) {
+ corrupted()
+ }
+ return d.in[start : start+int(length)]
+}
diff --git a/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/bson/encode.go b/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/bson/encode.go
new file mode 100644
index 00000000000..add39e865dd
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/bson/encode.go
@@ -0,0 +1,514 @@
+// BSON library for Go
+//
+// Copyright (c) 2010-2012 - Gustavo Niemeyer <gustavo@niemeyer.net>
+//
+// All rights reserved.
+//
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are met:
+//
+// 1. Redistributions of source code must retain the above copyright notice, this
+// list of conditions and the following disclaimer.
+// 2. Redistributions in binary form must reproduce the above copyright notice,
+// this list of conditions and the following disclaimer in the documentation
+// and/or other materials provided with the distribution.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
+// ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
+// WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+// DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
+// ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
+// (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
+// LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
+// ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
+// SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+// gobson - BSON library for Go.
+
+package bson
+
+import (
+ "encoding/json"
+ "fmt"
+ "math"
+ "net/url"
+ "reflect"
+ "strconv"
+ "time"
+)
+
+// --------------------------------------------------------------------------
+// Some internal infrastructure.
+
+var (
+ typeBinary = reflect.TypeOf(Binary{})
+ typeObjectId = reflect.TypeOf(ObjectId(""))
+ typeDBPointer = reflect.TypeOf(DBPointer{"", ObjectId("")})
+ typeSymbol = reflect.TypeOf(Symbol(""))
+ typeMongoTimestamp = reflect.TypeOf(MongoTimestamp(0))
+ typeOrderKey = reflect.TypeOf(MinKey)
+ typeDocElem = reflect.TypeOf(DocElem{})
+ typeRawDocElem = reflect.TypeOf(RawDocElem{})
+ typeRaw = reflect.TypeOf(Raw{})
+ typeURL = reflect.TypeOf(url.URL{})
+ typeTime = reflect.TypeOf(time.Time{})
+ typeString = reflect.TypeOf("")
+ typeJSONNumber = reflect.TypeOf(json.Number(""))
+)
+
+const itoaCacheSize = 32
+
+var itoaCache []string
+
+func init() {
+ itoaCache = make([]string, itoaCacheSize)
+ for i := 0; i != itoaCacheSize; i++ {
+ itoaCache[i] = strconv.Itoa(i)
+ }
+}
+
+func itoa(i int) string {
+ if i < itoaCacheSize {
+ return itoaCache[i]
+ }
+ return strconv.Itoa(i)
+}
+
+// --------------------------------------------------------------------------
+// Marshaling of the document value itself.
+
+type encoder struct {
+ out []byte
+}
+
+func (e *encoder) addDoc(v reflect.Value) {
+ for {
+ if vi, ok := v.Interface().(Getter); ok {
+ getv, err := vi.GetBSON()
+ if err != nil {
+ panic(err)
+ }
+ v = reflect.ValueOf(getv)
+ continue
+ }
+ if v.Kind() == reflect.Ptr {
+ v = v.Elem()
+ continue
+ }
+ break
+ }
+
+ if v.Type() == typeRaw {
+ raw := v.Interface().(Raw)
+ if raw.Kind != 0x03 && raw.Kind != 0x00 {
+ panic("Attempted to marshal Raw kind " + strconv.Itoa(int(raw.Kind)) + " as a document")
+ }
+ if len(raw.Data) == 0 {
+ panic("Attempted to marshal empty Raw document")
+ }
+ e.addBytes(raw.Data...)
+ return
+ }
+
+ start := e.reserveInt32()
+
+ switch v.Kind() {
+ case reflect.Map:
+ e.addMap(v)
+ case reflect.Struct:
+ e.addStruct(v)
+ case reflect.Array, reflect.Slice:
+ e.addSlice(v)
+ default:
+ panic("Can't marshal " + v.Type().String() + " as a BSON document")
+ }
+
+ e.addBytes(0)
+ e.setInt32(start, int32(len(e.out)-start))
+}
+
+func (e *encoder) addMap(v reflect.Value) {
+ for _, k := range v.MapKeys() {
+ e.addElem(k.String(), v.MapIndex(k), false)
+ }
+}
+
+func (e *encoder) addStruct(v reflect.Value) {
+ sinfo, err := getStructInfo(v.Type())
+ if err != nil {
+ panic(err)
+ }
+ var value reflect.Value
+ if sinfo.InlineMap >= 0 {
+ m := v.Field(sinfo.InlineMap)
+ if m.Len() > 0 {
+ for _, k := range m.MapKeys() {
+ ks := k.String()
+ if _, found := sinfo.FieldsMap[ks]; found {
+ panic(fmt.Sprintf("Can't have key %q in inlined map; conflicts with struct field", ks))
+ }
+ e.addElem(ks, m.MapIndex(k), false)
+ }
+ }
+ }
+ for _, info := range sinfo.FieldsList {
+ if info.Inline == nil {
+ value = v.Field(info.Num)
+ } else {
+ value = v.FieldByIndex(info.Inline)
+ }
+ if info.OmitEmpty && isZero(value) {
+ continue
+ }
+ e.addElem(info.Key, value, info.MinSize)
+ }
+}
+
+func isZero(v reflect.Value) bool {
+ switch v.Kind() {
+ case reflect.String:
+ return len(v.String()) == 0
+ case reflect.Ptr, reflect.Interface:
+ return v.IsNil()
+ case reflect.Slice:
+ return v.Len() == 0
+ case reflect.Map:
+ return v.Len() == 0
+ case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
+ return v.Int() == 0
+ case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr:
+ return v.Uint() == 0
+ case reflect.Float32, reflect.Float64:
+ return v.Float() == 0
+ case reflect.Bool:
+ return !v.Bool()
+ case reflect.Struct:
+ vt := v.Type()
+ if vt == typeTime {
+ return v.Interface().(time.Time).IsZero()
+ }
+ for i := 0; i < v.NumField(); i++ {
+ if vt.Field(i).PkgPath != "" && !vt.Field(i).Anonymous {
+ continue // Private field
+ }
+ if !isZero(v.Field(i)) {
+ return false
+ }
+ }
+ return true
+ }
+ return false
+}
+
+func (e *encoder) addSlice(v reflect.Value) {
+ vi := v.Interface()
+ if d, ok := vi.(D); ok {
+ for _, elem := range d {
+ e.addElem(elem.Name, reflect.ValueOf(elem.Value), false)
+ }
+ return
+ }
+ if d, ok := vi.(RawD); ok {
+ for _, elem := range d {
+ e.addElem(elem.Name, reflect.ValueOf(elem.Value), false)
+ }
+ return
+ }
+ l := v.Len()
+ et := v.Type().Elem()
+ if et == typeDocElem {
+ for i := 0; i < l; i++ {
+ elem := v.Index(i).Interface().(DocElem)
+ e.addElem(elem.Name, reflect.ValueOf(elem.Value), false)
+ }
+ return
+ }
+ if et == typeRawDocElem {
+ for i := 0; i < l; i++ {
+ elem := v.Index(i).Interface().(RawDocElem)
+ e.addElem(elem.Name, reflect.ValueOf(elem.Value), false)
+ }
+ return
+ }
+ for i := 0; i < l; i++ {
+ e.addElem(itoa(i), v.Index(i), false)
+ }
+}
+
+// --------------------------------------------------------------------------
+// Marshaling of elements in a document.
+
+func (e *encoder) addElemName(kind byte, name string) {
+ e.addBytes(kind)
+ e.addBytes([]byte(name)...)
+ e.addBytes(0)
+}
+
+func (e *encoder) addElem(name string, v reflect.Value, minSize bool) {
+
+ if !v.IsValid() {
+ e.addElemName(0x0A, name)
+ return
+ }
+
+ if getter, ok := v.Interface().(Getter); ok {
+ getv, err := getter.GetBSON()
+ if err != nil {
+ panic(err)
+ }
+ e.addElem(name, reflect.ValueOf(getv), minSize)
+ return
+ }
+
+ switch v.Kind() {
+
+ case reflect.Interface:
+ e.addElem(name, v.Elem(), minSize)
+
+ case reflect.Ptr:
+ e.addElem(name, v.Elem(), minSize)
+
+ case reflect.String:
+ s := v.String()
+ switch v.Type() {
+ case typeObjectId:
+ if len(s) != 12 {
+ panic("ObjectIDs must be exactly 12 bytes long (got " +
+ strconv.Itoa(len(s)) + ")")
+ }
+ e.addElemName(0x07, name)
+ e.addBytes([]byte(s)...)
+ case typeSymbol:
+ e.addElemName(0x0E, name)
+ e.addStr(s)
+ case typeJSONNumber:
+ n := v.Interface().(json.Number)
+ if i, err := n.Int64(); err == nil {
+ e.addElemName(0x12, name)
+ e.addInt64(i)
+ } else if f, err := n.Float64(); err == nil {
+ e.addElemName(0x01, name)
+ e.addFloat64(f)
+ } else {
+ panic("failed to convert json.Number to a number: " + s)
+ }
+ default:
+ e.addElemName(0x02, name)
+ e.addStr(s)
+ }
+
+ case reflect.Float32, reflect.Float64:
+ e.addElemName(0x01, name)
+ e.addFloat64(v.Float())
+
+ case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr:
+ u := v.Uint()
+ if int64(u) < 0 {
+ panic("BSON has no uint64 type, and value is too large to fit correctly in an int64")
+ } else if u <= math.MaxInt32 && (minSize || v.Kind() <= reflect.Uint32) {
+ e.addElemName(0x10, name)
+ e.addInt32(int32(u))
+ } else {
+ e.addElemName(0x12, name)
+ e.addInt64(int64(u))
+ }
+
+ case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
+ switch v.Type() {
+ case typeMongoTimestamp:
+ e.addElemName(0x11, name)
+ e.addInt64(v.Int())
+
+ case typeOrderKey:
+ if v.Int() == int64(MaxKey) {
+ e.addElemName(0x7F, name)
+ } else {
+ e.addElemName(0xFF, name)
+ }
+
+ default:
+ i := v.Int()
+ if (minSize || v.Type().Kind() != reflect.Int64) && i >= math.MinInt32 && i <= math.MaxInt32 {
+ // It fits into an int32, encode as such.
+ e.addElemName(0x10, name)
+ e.addInt32(int32(i))
+ } else {
+ e.addElemName(0x12, name)
+ e.addInt64(i)
+ }
+ }
+
+ case reflect.Bool:
+ e.addElemName(0x08, name)
+ if v.Bool() {
+ e.addBytes(1)
+ } else {
+ e.addBytes(0)
+ }
+
+ case reflect.Map:
+ e.addElemName(0x03, name)
+ e.addDoc(v)
+
+ case reflect.Slice:
+ vt := v.Type()
+ et := vt.Elem()
+ if et.Kind() == reflect.Uint8 {
+ e.addElemName(0x05, name)
+ e.addBinary(0x00, v.Bytes())
+ } else if et == typeDocElem || et == typeRawDocElem {
+ e.addElemName(0x03, name)
+ e.addDoc(v)
+ } else {
+ e.addElemName(0x04, name)
+ e.addDoc(v)
+ }
+
+ case reflect.Array:
+ et := v.Type().Elem()
+ if et.Kind() == reflect.Uint8 {
+ e.addElemName(0x05, name)
+ if v.CanAddr() {
+ e.addBinary(0x00, v.Slice(0, v.Len()).Interface().([]byte))
+ } else {
+ n := v.Len()
+ e.addInt32(int32(n))
+ e.addBytes(0x00)
+ for i := 0; i < n; i++ {
+ el := v.Index(i)
+ e.addBytes(byte(el.Uint()))
+ }
+ }
+ } else {
+ e.addElemName(0x04, name)
+ e.addDoc(v)
+ }
+
+ case reflect.Struct:
+ switch s := v.Interface().(type) {
+
+ case Raw:
+ kind := s.Kind
+ if kind == 0x00 {
+ kind = 0x03
+ }
+ if len(s.Data) == 0 && kind != 0x06 && kind != 0x0A && kind != 0xFF && kind != 0x7F {
+ panic("Attempted to marshal empty Raw document")
+ }
+ e.addElemName(kind, name)
+ e.addBytes(s.Data...)
+
+ case Binary:
+ e.addElemName(0x05, name)
+ e.addBinary(s.Kind, s.Data)
+
+ case Decimal128:
+ e.addElemName(0x13, name)
+ e.addInt64(int64(s.l))
+ e.addInt64(int64(s.h))
+
+ case DBPointer:
+ e.addElemName(0x0C, name)
+ e.addStr(s.Namespace)
+ if len(s.Id) != 12 {
+ panic("ObjectIDs must be exactly 12 bytes long (got " +
+ strconv.Itoa(len(s.Id)) + ")")
+ }
+ e.addBytes([]byte(s.Id)...)
+
+ case RegEx:
+ e.addElemName(0x0B, name)
+ e.addCStr(s.Pattern)
+ e.addCStr(s.Options)
+
+ case JavaScript:
+ if s.Scope == nil {
+ e.addElemName(0x0D, name)
+ e.addStr(s.Code)
+ } else {
+ e.addElemName(0x0F, name)
+ start := e.reserveInt32()
+ e.addStr(s.Code)
+ e.addDoc(reflect.ValueOf(s.Scope))
+ e.setInt32(start, int32(len(e.out)-start))
+ }
+
+ case time.Time:
+ // MongoDB handles timestamps as milliseconds.
+ e.addElemName(0x09, name)
+ e.addInt64(s.Unix()*1000 + int64(s.Nanosecond()/1e6))
+
+ case url.URL:
+ e.addElemName(0x02, name)
+ e.addStr(s.String())
+
+ case undefined:
+ e.addElemName(0x06, name)
+
+ default:
+ e.addElemName(0x03, name)
+ e.addDoc(v)
+ }
+
+ default:
+ panic("Can't marshal " + v.Type().String() + " in a BSON document")
+ }
+}
+
+// --------------------------------------------------------------------------
+// Marshaling of base types.
+
+func (e *encoder) addBinary(subtype byte, v []byte) {
+ if subtype == 0x02 {
+ // Wonder how that brilliant idea came to life. Obsolete, luckily.
+ e.addInt32(int32(len(v) + 4))
+ e.addBytes(subtype)
+ e.addInt32(int32(len(v)))
+ } else {
+ e.addInt32(int32(len(v)))
+ e.addBytes(subtype)
+ }
+ e.addBytes(v...)
+}
+
+func (e *encoder) addStr(v string) {
+ e.addInt32(int32(len(v) + 1))
+ e.addCStr(v)
+}
+
+func (e *encoder) addCStr(v string) {
+ e.addBytes([]byte(v)...)
+ e.addBytes(0)
+}
+
+func (e *encoder) reserveInt32() (pos int) {
+ pos = len(e.out)
+ e.addBytes(0, 0, 0, 0)
+ return pos
+}
+
+func (e *encoder) setInt32(pos int, v int32) {
+ e.out[pos+0] = byte(v)
+ e.out[pos+1] = byte(v >> 8)
+ e.out[pos+2] = byte(v >> 16)
+ e.out[pos+3] = byte(v >> 24)
+}
+
+func (e *encoder) addInt32(v int32) {
+ u := uint32(v)
+ e.addBytes(byte(u), byte(u>>8), byte(u>>16), byte(u>>24))
+}
+
+func (e *encoder) addInt64(v int64) {
+ u := uint64(v)
+ e.addBytes(byte(u), byte(u>>8), byte(u>>16), byte(u>>24),
+ byte(u>>32), byte(u>>40), byte(u>>48), byte(u>>56))
+}
+
+func (e *encoder) addFloat64(v float64) {
+ e.addInt64(int64(math.Float64bits(v)))
+}
+
+func (e *encoder) addBytes(v ...byte) {
+ e.out = append(e.out, v...)
+}
diff --git a/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/bson/json.go b/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/bson/json.go
new file mode 100644
index 00000000000..09df8260a53
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/bson/json.go
@@ -0,0 +1,380 @@
+package bson
+
+import (
+ "bytes"
+ "encoding/base64"
+ "fmt"
+ "gopkg.in/mgo.v2/internal/json"
+ "strconv"
+ "time"
+)
+
+// UnmarshalJSON unmarshals a JSON value that may hold non-standard
+// syntax as defined in BSON's extended JSON specification.
+func UnmarshalJSON(data []byte, value interface{}) error {
+ d := json.NewDecoder(bytes.NewBuffer(data))
+ d.Extend(&jsonExt)
+ return d.Decode(value)
+}
+
+// MarshalJSON marshals a JSON value that may hold non-standard
+// syntax as defined in BSON's extended JSON specification.
+func MarshalJSON(value interface{}) ([]byte, error) {
+ var buf bytes.Buffer
+ e := json.NewEncoder(&buf)
+ e.Extend(&jsonExt)
+ err := e.Encode(value)
+ if err != nil {
+ return nil, err
+ }
+ return buf.Bytes(), nil
+}
+
+// jdec is used internally by the JSON decoding functions
+// so they may unmarshal functions without getting into endless
+// recursion due to keyed objects.
+func jdec(data []byte, value interface{}) error {
+ d := json.NewDecoder(bytes.NewBuffer(data))
+ d.Extend(&funcExt)
+ return d.Decode(value)
+}
+
+var jsonExt json.Extension
+var funcExt json.Extension
+
+// TODO
+// - Shell regular expressions ("/regexp/opts")
+
+func init() {
+ jsonExt.DecodeUnquotedKeys(true)
+ jsonExt.DecodeTrailingCommas(true)
+
+ funcExt.DecodeFunc("BinData", "$binaryFunc", "$type", "$binary")
+ jsonExt.DecodeKeyed("$binary", jdecBinary)
+ jsonExt.DecodeKeyed("$binaryFunc", jdecBinary)
+ jsonExt.EncodeType([]byte(nil), jencBinarySlice)
+ jsonExt.EncodeType(Binary{}, jencBinaryType)
+
+ funcExt.DecodeFunc("ISODate", "$dateFunc", "S")
+ funcExt.DecodeFunc("new Date", "$dateFunc", "S")
+ jsonExt.DecodeKeyed("$date", jdecDate)
+ jsonExt.DecodeKeyed("$dateFunc", jdecDate)
+ jsonExt.EncodeType(time.Time{}, jencDate)
+
+ funcExt.DecodeFunc("Timestamp", "$timestamp", "t", "i")
+ jsonExt.DecodeKeyed("$timestamp", jdecTimestamp)
+ jsonExt.EncodeType(MongoTimestamp(0), jencTimestamp)
+
+ funcExt.DecodeConst("undefined", Undefined)
+
+ jsonExt.DecodeKeyed("$regex", jdecRegEx)
+ jsonExt.EncodeType(RegEx{}, jencRegEx)
+
+ funcExt.DecodeFunc("ObjectId", "$oidFunc", "Id")
+ jsonExt.DecodeKeyed("$oid", jdecObjectId)
+ jsonExt.DecodeKeyed("$oidFunc", jdecObjectId)
+ jsonExt.EncodeType(ObjectId(""), jencObjectId)
+
+ funcExt.DecodeFunc("DBRef", "$dbrefFunc", "$ref", "$id")
+ jsonExt.DecodeKeyed("$dbrefFunc", jdecDBRef)
+
+ funcExt.DecodeFunc("NumberLong", "$numberLongFunc", "N")
+ jsonExt.DecodeKeyed("$numberLong", jdecNumberLong)
+ jsonExt.DecodeKeyed("$numberLongFunc", jdecNumberLong)
+ jsonExt.EncodeType(int64(0), jencNumberLong)
+ jsonExt.EncodeType(int(0), jencInt)
+
+ funcExt.DecodeConst("MinKey", MinKey)
+ funcExt.DecodeConst("MaxKey", MaxKey)
+ jsonExt.DecodeKeyed("$minKey", jdecMinKey)
+ jsonExt.DecodeKeyed("$maxKey", jdecMaxKey)
+ jsonExt.EncodeType(orderKey(0), jencMinMaxKey)
+
+ jsonExt.DecodeKeyed("$undefined", jdecUndefined)
+ jsonExt.EncodeType(Undefined, jencUndefined)
+
+ jsonExt.Extend(&funcExt)
+}
+
+func fbytes(format string, args ...interface{}) []byte {
+ var buf bytes.Buffer
+ fmt.Fprintf(&buf, format, args...)
+ return buf.Bytes()
+}
+
+func jdecBinary(data []byte) (interface{}, error) {
+ var v struct {
+ Binary []byte `json:"$binary"`
+ Type string `json:"$type"`
+ Func struct {
+ Binary []byte `json:"$binary"`
+ Type int64 `json:"$type"`
+ } `json:"$binaryFunc"`
+ }
+ err := jdec(data, &v)
+ if err != nil {
+ return nil, err
+ }
+
+ var binData []byte
+ var binKind int64
+ if v.Type == "" && v.Binary == nil {
+ binData = v.Func.Binary
+ binKind = v.Func.Type
+ } else if v.Type == "" {
+ return v.Binary, nil
+ } else {
+ binData = v.Binary
+ binKind, err = strconv.ParseInt(v.Type, 0, 64)
+ if err != nil {
+ binKind = -1
+ }
+ }
+
+ if binKind == 0 {
+ return binData, nil
+ }
+ if binKind < 0 || binKind > 255 {
+ return nil, fmt.Errorf("invalid type in binary object: %s", data)
+ }
+
+ return Binary{Kind: byte(binKind), Data: binData}, nil
+}
+
+func jencBinarySlice(v interface{}) ([]byte, error) {
+ in := v.([]byte)
+ out := make([]byte, base64.StdEncoding.EncodedLen(len(in)))
+ base64.StdEncoding.Encode(out, in)
+ return fbytes(`{"$binary":"%s","$type":"0x0"}`, out), nil
+}
+
+func jencBinaryType(v interface{}) ([]byte, error) {
+ in := v.(Binary)
+ out := make([]byte, base64.StdEncoding.EncodedLen(len(in.Data)))
+ base64.StdEncoding.Encode(out, in.Data)
+ return fbytes(`{"$binary":"%s","$type":"0x%x"}`, out, in.Kind), nil
+}
+
+const jdateFormat = "2006-01-02T15:04:05.999Z"
+
+func jdecDate(data []byte) (interface{}, error) {
+ var v struct {
+ S string `json:"$date"`
+ Func struct {
+ S string
+ } `json:"$dateFunc"`
+ }
+ _ = jdec(data, &v)
+ if v.S == "" {
+ v.S = v.Func.S
+ }
+ if v.S != "" {
+ for _, format := range []string{jdateFormat, "2006-01-02"} {
+ t, err := time.Parse(format, v.S)
+ if err == nil {
+ return t, nil
+ }
+ }
+ return nil, fmt.Errorf("cannot parse date: %q", v.S)
+ }
+
+ var vn struct {
+ Date struct {
+ N int64 `json:"$numberLong,string"`
+ } `json:"$date"`
+ Func struct {
+ S int64
+ } `json:"$dateFunc"`
+ }
+ err := jdec(data, &vn)
+ if err != nil {
+ return nil, fmt.Errorf("cannot parse date: %q", data)
+ }
+ n := vn.Date.N
+ if n == 0 {
+ n = vn.Func.S
+ }
+ return time.Unix(n/1000, n%1000*1e6).UTC(), nil
+}
+
+func jencDate(v interface{}) ([]byte, error) {
+ t := v.(time.Time)
+ return fbytes(`{"$date":%q}`, t.Format(jdateFormat)), nil
+}
+
+func jdecTimestamp(data []byte) (interface{}, error) {
+ var v struct {
+ Func struct {
+ T int32 `json:"t"`
+ I int32 `json:"i"`
+ } `json:"$timestamp"`
+ }
+ err := jdec(data, &v)
+ if err != nil {
+ return nil, err
+ }
+ return MongoTimestamp(uint64(v.Func.T)<<32 | uint64(uint32(v.Func.I))), nil
+}
+
+func jencTimestamp(v interface{}) ([]byte, error) {
+ ts := uint64(v.(MongoTimestamp))
+ return fbytes(`{"$timestamp":{"t":%d,"i":%d}}`, ts>>32, uint32(ts)), nil
+}
+
+func jdecRegEx(data []byte) (interface{}, error) {
+ var v struct {
+ Regex string `json:"$regex"`
+ Options string `json:"$options"`
+ }
+ err := jdec(data, &v)
+ if err != nil {
+ return nil, err
+ }
+ return RegEx{v.Regex, v.Options}, nil
+}
+
+func jencRegEx(v interface{}) ([]byte, error) {
+ re := v.(RegEx)
+ type regex struct {
+ Regex string `json:"$regex"`
+ Options string `json:"$options"`
+ }
+ return json.Marshal(regex{re.Pattern, re.Options})
+}
+
+func jdecObjectId(data []byte) (interface{}, error) {
+ var v struct {
+ Id string `json:"$oid"`
+ Func struct {
+ Id string
+ } `json:"$oidFunc"`
+ }
+ err := jdec(data, &v)
+ if err != nil {
+ return nil, err
+ }
+ if v.Id == "" {
+ v.Id = v.Func.Id
+ }
+ return ObjectIdHex(v.Id), nil
+}
+
+func jencObjectId(v interface{}) ([]byte, error) {
+ return fbytes(`{"$oid":"%s"}`, v.(ObjectId).Hex()), nil
+}
+
+func jdecDBRef(data []byte) (interface{}, error) {
+ // TODO Support unmarshaling $ref and $id into the input value.
+ var v struct {
+ Obj map[string]interface{} `json:"$dbrefFunc"`
+ }
+ // TODO Fix this. Must not be required.
+ v.Obj = make(map[string]interface{})
+ err := jdec(data, &v)
+ if err != nil {
+ return nil, err
+ }
+ return v.Obj, nil
+}
+
+func jdecNumberLong(data []byte) (interface{}, error) {
+ var v struct {
+ N int64 `json:"$numberLong,string"`
+ Func struct {
+ N int64 `json:",string"`
+ } `json:"$numberLongFunc"`
+ }
+ var vn struct {
+ N int64 `json:"$numberLong"`
+ Func struct {
+ N int64
+ } `json:"$numberLongFunc"`
+ }
+ err := jdec(data, &v)
+ if err != nil {
+ err = jdec(data, &vn)
+ v.N = vn.N
+ v.Func.N = vn.Func.N
+ }
+ if err != nil {
+ return nil, err
+ }
+ if v.N != 0 {
+ return v.N, nil
+ }
+ return v.Func.N, nil
+}
+
+func jencNumberLong(v interface{}) ([]byte, error) {
+ n := v.(int64)
+ f := `{"$numberLong":"%d"}`
+ if n <= 1<<53 {
+ f = `{"$numberLong":%d}`
+ }
+ return fbytes(f, n), nil
+}
+
+func jencInt(v interface{}) ([]byte, error) {
+ n := v.(int)
+ f := `{"$numberLong":"%d"}`
+ if int64(n) <= 1<<53 {
+ f = `%d`
+ }
+ return fbytes(f, n), nil
+}
+
+func jdecMinKey(data []byte) (interface{}, error) {
+ var v struct {
+ N int64 `json:"$minKey"`
+ }
+ err := jdec(data, &v)
+ if err != nil {
+ return nil, err
+ }
+ if v.N != 1 {
+ return nil, fmt.Errorf("invalid $minKey object: %s", data)
+ }
+ return MinKey, nil
+}
+
+func jdecMaxKey(data []byte) (interface{}, error) {
+ var v struct {
+ N int64 `json:"$maxKey"`
+ }
+ err := jdec(data, &v)
+ if err != nil {
+ return nil, err
+ }
+ if v.N != 1 {
+ return nil, fmt.Errorf("invalid $maxKey object: %s", data)
+ }
+ return MaxKey, nil
+}
+
+func jencMinMaxKey(v interface{}) ([]byte, error) {
+ switch v.(orderKey) {
+ case MinKey:
+ return []byte(`{"$minKey":1}`), nil
+ case MaxKey:
+ return []byte(`{"$maxKey":1}`), nil
+ }
+ panic(fmt.Sprintf("invalid $minKey/$maxKey value: %d", v))
+}
+
+func jdecUndefined(data []byte) (interface{}, error) {
+ var v struct {
+ B bool `json:"$undefined"`
+ }
+ err := jdec(data, &v)
+ if err != nil {
+ return nil, err
+ }
+ if !v.B {
+ return nil, fmt.Errorf("invalid $undefined object: %s", data)
+ }
+ return Undefined, nil
+}
+
+func jencUndefined(v interface{}) ([]byte, error) {
+ return []byte(`{"$undefined":true}`), nil
+}
diff --git a/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/bson/json_test.go b/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/bson/json_test.go
new file mode 100644
index 00000000000..866f51c34e3
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/bson/json_test.go
@@ -0,0 +1,184 @@
+package bson_test
+
+import (
+ "gopkg.in/mgo.v2/bson"
+
+ . "gopkg.in/check.v1"
+ "reflect"
+ "strings"
+ "time"
+)
+
+type jsonTest struct {
+ a interface{} // value encoded into JSON (optional)
+ b string // JSON expected as output of <a>, and used as input to <c>
+ c interface{} // Value expected from decoding <b>, defaults to <a>
+ e string // error string, if decoding (b) should fail
+}
+
+var jsonTests = []jsonTest{
+ // $binary
+ {
+ a: []byte("foo"),
+ b: `{"$binary":"Zm9v","$type":"0x0"}`,
+ }, {
+ a: bson.Binary{Kind: 2, Data: []byte("foo")},
+ b: `{"$binary":"Zm9v","$type":"0x2"}`,
+ }, {
+ b: `BinData(2,"Zm9v")`,
+ c: bson.Binary{Kind: 2, Data: []byte("foo")},
+ },
+
+ // $date
+ {
+ a: time.Date(2016, 5, 15, 1, 2, 3, 4000000, time.UTC),
+ b: `{"$date":"2016-05-15T01:02:03.004Z"}`,
+ }, {
+ b: `{"$date": {"$numberLong": "1002"}}`,
+ c: time.Date(1970, 1, 1, 0, 0, 1, 2e6, time.UTC),
+ }, {
+ b: `ISODate("2016-05-15T01:02:03.004Z")`,
+ c: time.Date(2016, 5, 15, 1, 2, 3, 4000000, time.UTC),
+ }, {
+ b: `new Date(1000)`,
+ c: time.Date(1970, 1, 1, 0, 0, 1, 0, time.UTC),
+ }, {
+ b: `new Date("2016-05-15")`,
+ c: time.Date(2016, 5, 15, 0, 0, 0, 0, time.UTC),
+ },
+
+ // $timestamp
+ {
+ a: bson.MongoTimestamp(4294967298),
+ b: `{"$timestamp":{"t":1,"i":2}}`,
+ }, {
+ b: `Timestamp(1, 2)`,
+ c: bson.MongoTimestamp(4294967298),
+ },
+
+ // $regex
+ {
+ a: bson.RegEx{"pattern", "options"},
+ b: `{"$regex":"pattern","$options":"options"}`,
+ },
+
+ // $oid
+ {
+ a: bson.ObjectIdHex("0123456789abcdef01234567"),
+ b: `{"$oid":"0123456789abcdef01234567"}`,
+ }, {
+ b: `ObjectId("0123456789abcdef01234567")`,
+ c: bson.ObjectIdHex("0123456789abcdef01234567"),
+ },
+
+ // $ref (no special type)
+ {
+ b: `DBRef("name", "id")`,
+ c: map[string]interface{}{"$ref": "name", "$id": "id"},
+ },
+
+ // $numberLong
+ {
+ a: 123,
+ b: `123`,
+ }, {
+ a: int64(9007199254740992),
+ b: `{"$numberLong":9007199254740992}`,
+ }, {
+ a: int64(1<<53 + 1),
+ b: `{"$numberLong":"9007199254740993"}`,
+ }, {
+ a: 1<<53 + 1,
+ b: `{"$numberLong":"9007199254740993"}`,
+ c: int64(9007199254740993),
+ }, {
+ b: `NumberLong(9007199254740992)`,
+ c: int64(1 << 53),
+ }, {
+ b: `NumberLong("9007199254740993")`,
+ c: int64(1<<53 + 1),
+ },
+
+ // $minKey, $maxKey
+ {
+ a: bson.MinKey,
+ b: `{"$minKey":1}`,
+ }, {
+ a: bson.MaxKey,
+ b: `{"$maxKey":1}`,
+ }, {
+ b: `MinKey`,
+ c: bson.MinKey,
+ }, {
+ b: `MaxKey`,
+ c: bson.MaxKey,
+ }, {
+ b: `{"$minKey":0}`,
+ e: `invalid $minKey object: {"$minKey":0}`,
+ }, {
+ b: `{"$maxKey":0}`,
+ e: `invalid $maxKey object: {"$maxKey":0}`,
+ },
+
+ {
+ a: bson.Undefined,
+ b: `{"$undefined":true}`,
+ }, {
+ b: `undefined`,
+ c: bson.Undefined,
+ }, {
+ b: `{"v": undefined}`,
+ c: struct{ V interface{} }{bson.Undefined},
+ },
+
+ // Unquoted keys and trailing commas
+ {
+ b: `{$foo: ["bar",],}`,
+ c: map[string]interface{}{"$foo": []interface{}{"bar"}},
+ },
+}
+
+func (s *S) TestJSON(c *C) {
+ for i, item := range jsonTests {
+ c.Logf("------------ (#%d)", i)
+ c.Logf("A: %#v", item.a)
+ c.Logf("B: %#v", item.b)
+
+ if item.c == nil {
+ item.c = item.a
+ } else {
+ c.Logf("C: %#v", item.c)
+ }
+ if item.e != "" {
+ c.Logf("E: %s", item.e)
+ }
+
+ if item.a != nil {
+ data, err := bson.MarshalJSON(item.a)
+ c.Assert(err, IsNil)
+ c.Logf("Dumped: %#v", string(data))
+ c.Assert(strings.TrimSuffix(string(data), "\n"), Equals, item.b)
+ }
+
+ var zero interface{}
+ if item.c == nil {
+ zero = &struct{}{}
+ } else {
+ zero = reflect.New(reflect.TypeOf(item.c)).Interface()
+ }
+ err := bson.UnmarshalJSON([]byte(item.b), zero)
+ if item.e != "" {
+ c.Assert(err, NotNil)
+ c.Assert(err.Error(), Equals, item.e)
+ continue
+ }
+ c.Assert(err, IsNil)
+ zerov := reflect.ValueOf(zero)
+ value := zerov.Interface()
+ if zerov.Kind() == reflect.Ptr {
+ value = zerov.Elem().Interface()
+ }
+ c.Logf("Loaded: %#v", value)
+ c.Assert(value, DeepEquals, item.c)
+ }
+}
diff --git a/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/bson/specdata/update.sh b/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/bson/specdata/update.sh
new file mode 100755
index 00000000000..1efd3d3b66d
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/bson/specdata/update.sh
@@ -0,0 +1,27 @@
+#!/bin/sh
+
+set -e
+
+if [ ! -d specifications ]; then
+ git clone -b bson git@github.com:jyemin/specifications
+fi
+
+TESTFILE="../specdata_test.go"
+
+cat <<END > $TESTFILE
+package bson_test
+
+var specTests = []string{
+END
+
+for file in specifications/source/bson/tests/*.yml; do
+ (
+ echo '`'
+ cat $file
+ echo -n '`,'
+ ) >> $TESTFILE
+done
+
+echo '}' >> $TESTFILE
+
+gofmt -w $TESTFILE
diff --git a/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/bson/specdata_test.go b/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/bson/specdata_test.go
new file mode 100644
index 00000000000..513f9b209c7
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/bson/specdata_test.go
@@ -0,0 +1,241 @@
+package bson_test
+
+var specTests = []string{
+ `
+---
+description: "Array type"
+documents:
+ -
+ decoded:
+ a : []
+ encoded: 0D000000046100050000000000
+ -
+ decoded:
+ a: [10]
+ encoded: 140000000461000C0000001030000A0000000000
+ -
+ # Decode an array that uses an empty string as the key
+ decodeOnly : true
+ decoded:
+ a: [10]
+ encoded: 130000000461000B00000010000A0000000000
+ -
+ # Decode an array that uses a non-numeric string as the key
+ decodeOnly : true
+ decoded:
+ a: [10]
+ encoded: 150000000461000D000000106162000A0000000000
+
+
+`, `
+---
+description: "Boolean type"
+documents:
+ -
+ encoded: "090000000862000100"
+ decoded: { "b" : true }
+ -
+ encoded: "090000000862000000"
+ decoded: { "b" : false }
+
+
+ `, `
+---
+description: "Corrupted BSON"
+documents:
+ -
+ encoded: "09000000016600"
+ error: "truncated double"
+ -
+ encoded: "09000000026600"
+ error: "truncated string"
+ -
+ encoded: "09000000036600"
+ error: "truncated document"
+ -
+ encoded: "09000000046600"
+ error: "truncated array"
+ -
+ encoded: "09000000056600"
+ error: "truncated binary"
+ -
+ encoded: "09000000076600"
+ error: "truncated objectid"
+ -
+ encoded: "09000000086600"
+ error: "truncated boolean"
+ -
+ encoded: "09000000096600"
+ error: "truncated date"
+ -
+ encoded: "090000000b6600"
+ error: "truncated regex"
+ -
+ encoded: "090000000c6600"
+ error: "truncated db pointer"
+ -
+ encoded: "0C0000000d6600"
+ error: "truncated javascript"
+ -
+ encoded: "0C0000000e6600"
+ error: "truncated symbol"
+ -
+ encoded: "0C0000000f6600"
+ error: "truncated javascript with scope"
+ -
+ encoded: "0C000000106600"
+ error: "truncated int32"
+ -
+ encoded: "0C000000116600"
+ error: "truncated timestamp"
+ -
+ encoded: "0C000000126600"
+ error: "truncated int64"
+ -
+ encoded: "0400000000"
+ error: basic
+ -
+ encoded: "0500000001"
+ error: basic
+ -
+ encoded: "05000000"
+ error: basic
+ -
+ encoded: "0700000002610078563412"
+ error: basic
+ -
+ encoded: "090000001061000500"
+ error: basic
+ -
+ encoded: "00000000000000000000"
+ error: basic
+ -
+ encoded: "1300000002666f6f00040000006261720000"
+ error: "basic"
+ -
+ encoded: "1800000003666f6f000f0000001062617200ffffff7f0000"
+ error: basic
+ -
+ encoded: "1500000003666f6f000c0000000862617200010000"
+ error: basic
+ -
+ encoded: "1c00000003666f6f001200000002626172000500000062617a000000"
+ error: basic
+ -
+ encoded: "1000000002610004000000616263ff00"
+ error: string is not null-terminated
+ -
+ encoded: "0c0000000200000000000000"
+ error: bad_string_length
+ -
+ encoded: "120000000200ffffffff666f6f6261720000"
+ error: bad_string_length
+ -
+ encoded: "0c0000000e00000000000000"
+ error: bad_string_length
+ -
+ encoded: "120000000e00ffffffff666f6f6261720000"
+ error: bad_string_length
+ -
+ encoded: "180000000c00fa5bd841d6585d9900"
+ error: ""
+ -
+ encoded: "1e0000000c00ffffffff666f6f626172005259b56afa5bd841d6585d9900"
+ error: bad_string_length
+ -
+ encoded: "0c0000000d00000000000000"
+ error: bad_string_length
+ -
+ encoded: "0c0000000d00ffffffff0000"
+ error: bad_string_length
+ -
+ encoded: "1c0000000f001500000000000000000c000000020001000000000000"
+ error: bad_string_length
+ -
+ encoded: "1c0000000f0015000000ffffffff000c000000020001000000000000"
+ error: bad_string_length
+ -
+ encoded: "1c0000000f001500000001000000000c000000020000000000000000"
+ error: bad_string_length
+ -
+ encoded: "1c0000000f001500000001000000000c0000000200ffffffff000000"
+ error: bad_string_length
+ -
+ encoded: "0E00000008616263646566676869707172737475"
+ error: "Run-on CString"
+ -
+ encoded: "0100000000"
+ error: "An object size that's too small to even include the object size, but is correctly encoded, along with a correct EOO (and no data)"
+ -
+ encoded: "1a0000000e74657374000c00000068656c6c6f20776f726c6400000500000000"
+ error: "One object, but with object size listed smaller than it is in the data"
+ -
+ encoded: "05000000"
+ error: "One object, missing the EOO at the end"
+ -
+ encoded: "0500000001"
+ error: "One object, sized correctly, with a spot for an EOO, but the EOO is 0x01"
+ -
+ encoded: "05000000ff"
+ error: "One object, sized correctly, with a spot for an EOO, but the EOO is 0xff"
+ -
+ encoded: "0500000070"
+ error: "One object, sized correctly, with a spot for an EOO, but the EOO is 0x70"
+ -
+ encoded: "07000000000000"
+ error: "Invalid BSON type low range"
+ -
+ encoded: "07000000800000"
+ error: "Invalid BSON type high range"
+ -
+ encoded: "090000000862000200"
+ error: "Invalid boolean value of 2"
+ -
+ encoded: "09000000086200ff00"
+ error: "Invalid boolean value of -1"
+ `, `
+---
+description: "Int32 type"
+documents:
+ -
+ decoded:
+ i: -2147483648
+ encoded: 0C0000001069000000008000
+ -
+ decoded:
+ i: 2147483647
+ encoded: 0C000000106900FFFFFF7F00
+ -
+ decoded:
+ i: -1
+ encoded: 0C000000106900FFFFFFFF00
+ -
+ decoded:
+ i: 0
+ encoded: 0C0000001069000000000000
+ -
+ decoded:
+ i: 1
+ encoded: 0C0000001069000100000000
+
+`, `
+---
+description: "String type"
+documents:
+ -
+ decoded:
+ s : ""
+ encoded: 0D000000027300010000000000
+ -
+ decoded:
+ s: "a"
+ encoded: 0E00000002730002000000610000
+ -
+ decoded:
+ s: "This is a string"
+ encoded: 1D0000000273001100000054686973206973206120737472696E670000
+ -
+ decoded:
+ s: "κόσμε"
+ encoded: 180000000273000C000000CEBAE1BDB9CF83CEBCCEB50000
+`}
diff --git a/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/bulk.go b/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/bulk.go
new file mode 100644
index 00000000000..072a5206ac2
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/bulk.go
@@ -0,0 +1,351 @@
+package mgo
+
+import (
+ "bytes"
+ "sort"
+
+ "gopkg.in/mgo.v2/bson"
+)
+
+// Bulk represents an operation that can be prepared with several
+// orthogonal changes before being delivered to the server.
+//
+// MongoDB servers older than version 2.6 do not have proper support for bulk
+// operations, so the driver attempts to map its API as much as possible into
+// the functionality that works. In particular, in those releases updates and
+// removals are sent individually, and inserts are sent in bulk but have
+// suboptimal error reporting compared to more recent versions of the server.
+// See the documentation of BulkErrorCase for details on that.
+//
+// Relevant documentation:
+//
+// http://blog.mongodb.org/post/84922794768/mongodbs-new-bulk-api
+//
+type Bulk struct {
+ c *Collection
+ opcount int
+ actions []bulkAction
+ ordered bool
+}
+
+type bulkOp int
+
+const (
+ bulkInsert bulkOp = iota + 1
+ bulkUpdate
+ bulkUpdateAll
+ bulkRemove
+)
+
+type bulkAction struct {
+ op bulkOp
+ docs []interface{}
+ idxs []int
+}
+
+type bulkUpdateOp []interface{}
+type bulkDeleteOp []interface{}
+
+// BulkResult holds the results for a bulk operation.
+type BulkResult struct {
+ Matched int
+ Modified int // Available only for MongoDB 2.6+
+
+ // Be conservative while we understand exactly how to report these
+ // results in a useful and convenient way, and also how to emulate
+ // them with prior servers.
+ private bool
+}
+
+// BulkError holds an error returned from running a Bulk operation.
+// Individual errors may be obtained and inspected via the Cases method.
+type BulkError struct {
+ ecases []BulkErrorCase
+}
+
+func (e *BulkError) Error() string {
+ if len(e.ecases) == 0 {
+ return "invalid BulkError instance: no errors"
+ }
+ if len(e.ecases) == 1 {
+ return e.ecases[0].Err.Error()
+ }
+ msgs := make([]string, 0, len(e.ecases))
+ seen := make(map[string]bool)
+ for _, ecase := range e.ecases {
+ msg := ecase.Err.Error()
+ if !seen[msg] {
+ seen[msg] = true
+ msgs = append(msgs, msg)
+ }
+ }
+ if len(msgs) == 1 {
+ return msgs[0]
+ }
+ var buf bytes.Buffer
+ buf.WriteString("multiple errors in bulk operation:\n")
+ for _, msg := range msgs {
+ buf.WriteString(" - ")
+ buf.WriteString(msg)
+ buf.WriteByte('\n')
+ }
+ return buf.String()
+}
+
+type bulkErrorCases []BulkErrorCase
+
+func (slice bulkErrorCases) Len() int { return len(slice) }
+func (slice bulkErrorCases) Less(i, j int) bool { return slice[i].Index < slice[j].Index }
+func (slice bulkErrorCases) Swap(i, j int) { slice[i], slice[j] = slice[j], slice[i] }
+
+// BulkErrorCase holds an individual error found while attempting a single change
+// within a bulk operation, and the position in which it was enqueued.
+//
+// MongoDB servers older than version 2.6 do not have proper support for bulk
+// operations, so the driver attempts to map its API as much as possible into
+// the functionality that works. In particular, only the last error is reported
+// for bulk inserts and without any positional information, so the Index
+// field is set to -1 in these cases.
+type BulkErrorCase struct {
+ Index int // Position of operation that failed, or -1 if unknown.
+ Err error
+}
+
+// Cases returns all individual errors found while attempting the requested changes.
+//
+// See the documentation of BulkErrorCase for limitations in older MongoDB releases.
+func (e *BulkError) Cases() []BulkErrorCase {
+ return e.ecases
+}
+
+// Bulk returns a value to prepare the execution of a bulk operation.
+func (c *Collection) Bulk() *Bulk {
+ return &Bulk{c: c, ordered: true}
+}
+
+// Unordered puts the bulk operation in unordered mode.
+//
+// In unordered mode the indvidual operations may be sent
+// out of order, which means latter operations may proceed
+// even if prior ones have failed.
+func (b *Bulk) Unordered() {
+ b.ordered = false
+}
+
+func (b *Bulk) action(op bulkOp, opcount int) *bulkAction {
+ var action *bulkAction
+ if len(b.actions) > 0 && b.actions[len(b.actions)-1].op == op {
+ action = &b.actions[len(b.actions)-1]
+ } else if !b.ordered {
+ for i := range b.actions {
+ if b.actions[i].op == op {
+ action = &b.actions[i]
+ break
+ }
+ }
+ }
+ if action == nil {
+ b.actions = append(b.actions, bulkAction{op: op})
+ action = &b.actions[len(b.actions)-1]
+ }
+ for i := 0; i < opcount; i++ {
+ action.idxs = append(action.idxs, b.opcount)
+ b.opcount++
+ }
+ return action
+}
+
+// Insert queues up the provided documents for insertion.
+func (b *Bulk) Insert(docs ...interface{}) {
+ action := b.action(bulkInsert, len(docs))
+ action.docs = append(action.docs, docs...)
+}
+
+// Remove queues up the provided selectors for removing matching documents.
+// Each selector will remove only a single matching document.
+func (b *Bulk) Remove(selectors ...interface{}) {
+ action := b.action(bulkRemove, len(selectors))
+ for _, selector := range selectors {
+ if selector == nil {
+ selector = bson.D{}
+ }
+ action.docs = append(action.docs, &deleteOp{
+ Collection: b.c.FullName,
+ Selector: selector,
+ Flags: 1,
+ Limit: 1,
+ })
+ }
+}
+
+// RemoveAll queues up the provided selectors for removing all matching documents.
+// Each selector will remove all matching documents.
+func (b *Bulk) RemoveAll(selectors ...interface{}) {
+ action := b.action(bulkRemove, len(selectors))
+ for _, selector := range selectors {
+ if selector == nil {
+ selector = bson.D{}
+ }
+ action.docs = append(action.docs, &deleteOp{
+ Collection: b.c.FullName,
+ Selector: selector,
+ Flags: 0,
+ Limit: 0,
+ })
+ }
+}
+
+// Update queues up the provided pairs of updating instructions.
+// The first element of each pair selects which documents must be
+// updated, and the second element defines how to update it.
+// Each pair matches exactly one document for updating at most.
+func (b *Bulk) Update(pairs ...interface{}) {
+ if len(pairs)%2 != 0 {
+ panic("Bulk.Update requires an even number of parameters")
+ }
+ action := b.action(bulkUpdate, len(pairs)/2)
+ for i := 0; i < len(pairs); i += 2 {
+ selector := pairs[i]
+ if selector == nil {
+ selector = bson.D{}
+ }
+ action.docs = append(action.docs, &updateOp{
+ Collection: b.c.FullName,
+ Selector: selector,
+ Update: pairs[i+1],
+ })
+ }
+}
+
+// UpdateAll queues up the provided pairs of updating instructions.
+// The first element of each pair selects which documents must be
+// updated, and the second element defines how to update it.
+// Each pair updates all documents matching the selector.
+func (b *Bulk) UpdateAll(pairs ...interface{}) {
+ if len(pairs)%2 != 0 {
+ panic("Bulk.UpdateAll requires an even number of parameters")
+ }
+ action := b.action(bulkUpdate, len(pairs)/2)
+ for i := 0; i < len(pairs); i += 2 {
+ selector := pairs[i]
+ if selector == nil {
+ selector = bson.D{}
+ }
+ action.docs = append(action.docs, &updateOp{
+ Collection: b.c.FullName,
+ Selector: selector,
+ Update: pairs[i+1],
+ Flags: 2,
+ Multi: true,
+ })
+ }
+}
+
+// Upsert queues up the provided pairs of upserting instructions.
+// The first element of each pair selects which documents must be
+// updated, and the second element defines how to update it.
+// Each pair matches exactly one document for updating at most.
+func (b *Bulk) Upsert(pairs ...interface{}) {
+ if len(pairs)%2 != 0 {
+ panic("Bulk.Update requires an even number of parameters")
+ }
+ action := b.action(bulkUpdate, len(pairs)/2)
+ for i := 0; i < len(pairs); i += 2 {
+ selector := pairs[i]
+ if selector == nil {
+ selector = bson.D{}
+ }
+ action.docs = append(action.docs, &updateOp{
+ Collection: b.c.FullName,
+ Selector: selector,
+ Update: pairs[i+1],
+ Flags: 1,
+ Upsert: true,
+ })
+ }
+}
+
+// Run runs all the operations queued up.
+//
+// If an error is reported on an unordered bulk operation, the error value may
+// be an aggregation of all issues observed. As an exception to that, Insert
+// operations running on MongoDB versions prior to 2.6 will report the last
+// error only due to a limitation in the wire protocol.
+func (b *Bulk) Run() (*BulkResult, error) {
+ var result BulkResult
+ var berr BulkError
+ var failed bool
+ for i := range b.actions {
+ action := &b.actions[i]
+ var ok bool
+ switch action.op {
+ case bulkInsert:
+ ok = b.runInsert(action, &result, &berr)
+ case bulkUpdate:
+ ok = b.runUpdate(action, &result, &berr)
+ case bulkRemove:
+ ok = b.runRemove(action, &result, &berr)
+ default:
+ panic("unknown bulk operation")
+ }
+ if !ok {
+ failed = true
+ if b.ordered {
+ break
+ }
+ }
+ }
+ if failed {
+ sort.Sort(bulkErrorCases(berr.ecases))
+ return nil, &berr
+ }
+ return &result, nil
+}
+
+func (b *Bulk) runInsert(action *bulkAction, result *BulkResult, berr *BulkError) bool {
+ op := &insertOp{b.c.FullName, action.docs, 0}
+ if !b.ordered {
+ op.flags = 1 // ContinueOnError
+ }
+ lerr, err := b.c.writeOp(op, b.ordered)
+ return b.checkSuccess(action, berr, lerr, err)
+}
+
+func (b *Bulk) runUpdate(action *bulkAction, result *BulkResult, berr *BulkError) bool {
+ lerr, err := b.c.writeOp(bulkUpdateOp(action.docs), b.ordered)
+ if lerr != nil {
+ result.Matched += lerr.N
+ result.Modified += lerr.modified
+ }
+ return b.checkSuccess(action, berr, lerr, err)
+}
+
+func (b *Bulk) runRemove(action *bulkAction, result *BulkResult, berr *BulkError) bool {
+ lerr, err := b.c.writeOp(bulkDeleteOp(action.docs), b.ordered)
+ if lerr != nil {
+ result.Matched += lerr.N
+ result.Modified += lerr.modified
+ }
+ return b.checkSuccess(action, berr, lerr, err)
+}
+
+func (b *Bulk) checkSuccess(action *bulkAction, berr *BulkError, lerr *LastError, err error) bool {
+ if lerr != nil && len(lerr.ecases) > 0 {
+ for i := 0; i < len(lerr.ecases); i++ {
+ // Map back from the local error index into the visible one.
+ ecase := lerr.ecases[i]
+ idx := ecase.Index
+ if idx >= 0 {
+ idx = action.idxs[idx]
+ }
+ berr.ecases = append(berr.ecases, BulkErrorCase{idx, ecase.Err})
+ }
+ return false
+ } else if err != nil {
+ for i := 0; i < len(action.idxs); i++ {
+ berr.ecases = append(berr.ecases, BulkErrorCase{action.idxs[i], err})
+ }
+ return false
+ }
+ return true
+}
diff --git a/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/bulk_test.go b/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/bulk_test.go
new file mode 100644
index 00000000000..cb280bbfa40
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/bulk_test.go
@@ -0,0 +1,504 @@
+// mgo - MongoDB driver for Go
+//
+// Copyright (c) 2010-2015 - Gustavo Niemeyer <gustavo@niemeyer.net>
+//
+// All rights reserved.
+//
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are met:
+//
+// 1. Redistributions of source code must retain the above copyright notice, this
+// list of conditions and the following disclaimer.
+// 2. Redistributions in binary form must reproduce the above copyright notice,
+// this list of conditions and the following disclaimer in the documentation
+// and/or other materials provided with the distribution.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
+// ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
+// WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+// DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
+// ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
+// (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
+// LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
+// ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
+// SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+package mgo_test
+
+import (
+ . "gopkg.in/check.v1"
+ "gopkg.in/mgo.v2"
+)
+
+func (s *S) TestBulkInsert(c *C) {
+ session, err := mgo.Dial("localhost:40001")
+ c.Assert(err, IsNil)
+ defer session.Close()
+
+ coll := session.DB("mydb").C("mycoll")
+ bulk := coll.Bulk()
+ bulk.Insert(M{"n": 1})
+ bulk.Insert(M{"n": 2}, M{"n": 3})
+ r, err := bulk.Run()
+ c.Assert(err, IsNil)
+ c.Assert(r, FitsTypeOf, &mgo.BulkResult{})
+
+ type doc struct{ N int }
+ var res []doc
+ err = coll.Find(nil).Sort("n").All(&res)
+ c.Assert(err, IsNil)
+ c.Assert(res, DeepEquals, []doc{{1}, {2}, {3}})
+}
+
+func (s *S) TestBulkInsertError(c *C) {
+ session, err := mgo.Dial("localhost:40001")
+ c.Assert(err, IsNil)
+ defer session.Close()
+
+ coll := session.DB("mydb").C("mycoll")
+ bulk := coll.Bulk()
+ bulk.Insert(M{"_id": 1}, M{"_id": 2}, M{"_id": 2}, M{"_id": 3})
+ _, err = bulk.Run()
+ c.Assert(err, ErrorMatches, ".*duplicate key.*")
+ c.Assert(mgo.IsDup(err), Equals, true)
+
+ type doc struct {
+ N int `_id`
+ }
+ var res []doc
+ err = coll.Find(nil).Sort("_id").All(&res)
+ c.Assert(err, IsNil)
+ c.Assert(res, DeepEquals, []doc{{1}, {2}})
+}
+
+func (s *S) TestBulkInsertErrorUnordered(c *C) {
+ session, err := mgo.Dial("localhost:40001")
+ c.Assert(err, IsNil)
+ defer session.Close()
+
+ coll := session.DB("mydb").C("mycoll")
+ bulk := coll.Bulk()
+ bulk.Unordered()
+ bulk.Insert(M{"_id": 1}, M{"_id": 2}, M{"_id": 2}, M{"_id": 3})
+ _, err = bulk.Run()
+ c.Assert(err, ErrorMatches, ".*duplicate key.*")
+
+ type doc struct {
+ N int `_id`
+ }
+ var res []doc
+ err = coll.Find(nil).Sort("_id").All(&res)
+ c.Assert(err, IsNil)
+ c.Assert(res, DeepEquals, []doc{{1}, {2}, {3}})
+}
+
+func (s *S) TestBulkInsertErrorUnorderedSplitBatch(c *C) {
+ // The server has a batch limit of 1000 documents when using write commands.
+ // This artificial limit did not exist with the old wire protocol, so to
+ // avoid compatibility issues the implementation internally split batches
+ // into the proper size and delivers them one by one. This test ensures that
+ // the behavior of unordered (that is, continue on error) remains correct
+ // when errors happen and there are batches left.
+ session, err := mgo.Dial("localhost:40001")
+ c.Assert(err, IsNil)
+ defer session.Close()
+
+ coll := session.DB("mydb").C("mycoll")
+ bulk := coll.Bulk()
+ bulk.Unordered()
+
+ const total = 4096
+ type doc struct {
+ Id int `_id`
+ }
+ docs := make([]interface{}, total)
+ for i := 0; i < total; i++ {
+ docs[i] = doc{i}
+ }
+ docs[1] = doc{0}
+ bulk.Insert(docs...)
+ _, err = bulk.Run()
+ c.Assert(err, ErrorMatches, ".*duplicate key.*")
+
+ n, err := coll.Count()
+ c.Assert(err, IsNil)
+ c.Assert(n, Equals, total-1)
+
+ var res doc
+ err = coll.FindId(1500).One(&res)
+ c.Assert(err, IsNil)
+ c.Assert(res.Id, Equals, 1500)
+}
+
+func (s *S) TestBulkErrorString(c *C) {
+ session, err := mgo.Dial("localhost:40001")
+ c.Assert(err, IsNil)
+ defer session.Close()
+
+ coll := session.DB("mydb").C("mycoll")
+
+ // If it's just the same string multiple times, join it into a single message.
+ bulk := coll.Bulk()
+ bulk.Unordered()
+ bulk.Insert(M{"_id": 1}, M{"_id": 2}, M{"_id": 2})
+ _, err = bulk.Run()
+ c.Assert(err, ErrorMatches, ".*duplicate key.*")
+ c.Assert(err, Not(ErrorMatches), ".*duplicate key.*duplicate key")
+ c.Assert(mgo.IsDup(err), Equals, true)
+
+ // With matching errors but different messages, present them all.
+ bulk = coll.Bulk()
+ bulk.Unordered()
+ bulk.Insert(M{"_id": "dupone"}, M{"_id": "dupone"}, M{"_id": "duptwo"}, M{"_id": "duptwo"})
+ _, err = bulk.Run()
+ if s.versionAtLeast(2, 6) {
+ c.Assert(err, ErrorMatches, "multiple errors in bulk operation:\n( - .*duplicate.*\n){2}$")
+ c.Assert(err, ErrorMatches, "(?s).*dupone.*")
+ c.Assert(err, ErrorMatches, "(?s).*duptwo.*")
+ } else {
+ // Wire protocol query doesn't return all errors.
+ c.Assert(err, ErrorMatches, ".*duplicate.*")
+ }
+ c.Assert(mgo.IsDup(err), Equals, true)
+
+ // With mixed errors, present them all.
+ bulk = coll.Bulk()
+ bulk.Unordered()
+ bulk.Insert(M{"_id": 1}, M{"_id": []int{2}})
+ _, err = bulk.Run()
+ if s.versionAtLeast(2, 6) {
+ c.Assert(err, ErrorMatches, "multiple errors in bulk operation:\n - .*duplicate.*\n - .*array.*\n$")
+ } else {
+ // Wire protocol query doesn't return all errors.
+ c.Assert(err, ErrorMatches, ".*array.*")
+ }
+ c.Assert(mgo.IsDup(err), Equals, false)
+}
+
+func (s *S) TestBulkErrorCases_2_6(c *C) {
+ if !s.versionAtLeast(2, 6) {
+ c.Skip("2.4- has poor bulk reporting")
+ }
+ session, err := mgo.Dial("localhost:40001")
+ c.Assert(err, IsNil)
+ defer session.Close()
+
+ coll := session.DB("mydb").C("mycoll")
+
+ bulk := coll.Bulk()
+ bulk.Unordered()
+
+ // There's a limit of 1000 operations per command, so
+ // this forces the more complex indexing logic to act.
+ for i := 0; i < 1010; i++ {
+ switch i {
+ case 3, 14:
+ bulk.Insert(M{"_id": "dupone"})
+ case 5, 106:
+ bulk.Update(M{"_id": i - 1}, M{"$set": M{"_id": 4}})
+ case 7, 1008:
+ bulk.Insert(M{"_id": "duptwo"})
+ default:
+ bulk.Insert(M{"_id": i})
+ }
+ }
+
+ _, err = bulk.Run()
+ ecases := err.(*mgo.BulkError).Cases()
+
+ c.Check(ecases[0].Err, ErrorMatches, ".*duplicate.*dupone.*")
+ c.Check(ecases[0].Index, Equals, 14)
+ c.Check(ecases[1].Err, ErrorMatches, ".*update.*_id.*")
+ c.Check(ecases[1].Index, Equals, 106)
+ c.Check(ecases[2].Err, ErrorMatches, ".*duplicate.*duptwo.*")
+ c.Check(ecases[2].Index, Equals, 1008)
+}
+
+func (s *S) TestBulkErrorCases_2_4(c *C) {
+ if s.versionAtLeast(2, 6) {
+ c.Skip("2.6+ has better reporting")
+ }
+ session, err := mgo.Dial("localhost:40001")
+ c.Assert(err, IsNil)
+ defer session.Close()
+
+ coll := session.DB("mydb").C("mycoll")
+
+ bulk := coll.Bulk()
+ bulk.Unordered()
+
+ // There's a limit of 1000 operations per command, so
+ // this forces the more complex indexing logic to act.
+ for i := 0; i < 1010; i++ {
+ switch i {
+ case 3, 14:
+ bulk.Insert(M{"_id": "dupone"})
+ case 5:
+ bulk.Update(M{"_id": i - 1}, M{"$set": M{"n": 4}})
+ case 106:
+ bulk.Update(M{"_id": i - 1}, M{"$bogus": M{"n": 4}})
+ case 7, 1008:
+ bulk.Insert(M{"_id": "duptwo"})
+ default:
+ bulk.Insert(M{"_id": i})
+ }
+ }
+
+ _, err = bulk.Run()
+ ecases := err.(*mgo.BulkError).Cases()
+
+ c.Check(ecases[0].Err, ErrorMatches, ".*duplicate.*duptwo.*")
+ c.Check(ecases[0].Index, Equals, -1)
+ c.Check(ecases[1].Err, ErrorMatches, `.*\$bogus.*`)
+ c.Check(ecases[1].Index, Equals, 106)
+}
+
+func (s *S) TestBulkErrorCasesOrdered(c *C) {
+ session, err := mgo.Dial("localhost:40001")
+ c.Assert(err, IsNil)
+ defer session.Close()
+
+ coll := session.DB("mydb").C("mycoll")
+
+ bulk := coll.Bulk()
+
+ // There's a limit of 1000 operations per command, so
+ // this forces the more complex indexing logic to act.
+ for i := 0; i < 20; i++ {
+ switch i {
+ case 3, 14:
+ bulk.Insert(M{"_id": "dupone"})
+ case 7, 17:
+ bulk.Insert(M{"_id": "duptwo"})
+ default:
+ bulk.Insert(M{"_id": i})
+ }
+ }
+
+ _, err = bulk.Run()
+ ecases := err.(*mgo.BulkError).Cases()
+
+ c.Check(ecases[0].Err, ErrorMatches, ".*duplicate.*dupone.*")
+ if s.versionAtLeast(2, 6) {
+ c.Check(ecases[0].Index, Equals, 14)
+ } else {
+ c.Check(ecases[0].Index, Equals, -1)
+ }
+ c.Check(ecases, HasLen, 1)
+}
+
+func (s *S) TestBulkUpdate(c *C) {
+ session, err := mgo.Dial("localhost:40001")
+ c.Assert(err, IsNil)
+ defer session.Close()
+
+ coll := session.DB("mydb").C("mycoll")
+
+ err = coll.Insert(M{"n": 1}, M{"n": 2}, M{"n": 3})
+ c.Assert(err, IsNil)
+
+ bulk := coll.Bulk()
+ bulk.Update(M{"n": 1}, M{"$set": M{"n": 1}})
+ bulk.Update(M{"n": 2}, M{"$set": M{"n": 20}})
+ bulk.Update(M{"n": 5}, M{"$set": M{"n": 50}}) // Won't match.
+ bulk.Update(M{"n": 1}, M{"$set": M{"n": 10}}, M{"n": 3}, M{"$set": M{"n": 30}})
+ r, err := bulk.Run()
+ c.Assert(err, IsNil)
+ c.Assert(r.Matched, Equals, 4)
+ if s.versionAtLeast(2, 6) {
+ c.Assert(r.Modified, Equals, 3)
+ }
+
+ type doc struct{ N int }
+ var res []doc
+ err = coll.Find(nil).Sort("n").All(&res)
+ c.Assert(err, IsNil)
+ c.Assert(res, DeepEquals, []doc{{10}, {20}, {30}})
+}
+
+func (s *S) TestBulkUpdateError(c *C) {
+ session, err := mgo.Dial("localhost:40001")
+ c.Assert(err, IsNil)
+ defer session.Close()
+
+ coll := session.DB("mydb").C("mycoll")
+
+ err = coll.Insert(M{"n": 1}, M{"n": 2}, M{"n": 3})
+ c.Assert(err, IsNil)
+
+ bulk := coll.Bulk()
+ bulk.Update(
+ M{"n": 1}, M{"$set": M{"n": 10}},
+ M{"n": 2}, M{"$set": M{"n": 20, "_id": 20}},
+ M{"n": 3}, M{"$set": M{"n": 30}},
+ )
+ r, err := bulk.Run()
+ c.Assert(err, ErrorMatches, ".*_id.*")
+ c.Assert(r, FitsTypeOf, &mgo.BulkResult{})
+
+ type doc struct{ N int }
+ var res []doc
+ err = coll.Find(nil).Sort("n").All(&res)
+ c.Assert(err, IsNil)
+ c.Assert(res, DeepEquals, []doc{{2}, {3}, {10}})
+}
+
+func (s *S) TestBulkUpdateErrorUnordered(c *C) {
+ session, err := mgo.Dial("localhost:40001")
+ c.Assert(err, IsNil)
+ defer session.Close()
+
+ coll := session.DB("mydb").C("mycoll")
+
+ err = coll.Insert(M{"n": 1}, M{"n": 2}, M{"n": 3})
+ c.Assert(err, IsNil)
+
+ bulk := coll.Bulk()
+ bulk.Unordered()
+ bulk.Update(
+ M{"n": 1}, M{"$set": M{"n": 10}},
+ M{"n": 2}, M{"$set": M{"n": 20, "_id": 20}},
+ M{"n": 3}, M{"$set": M{"n": 30}},
+ )
+ r, err := bulk.Run()
+ c.Assert(err, ErrorMatches, ".*_id.*")
+ c.Assert(r, FitsTypeOf, &mgo.BulkResult{})
+
+ type doc struct{ N int }
+ var res []doc
+ err = coll.Find(nil).Sort("n").All(&res)
+ c.Assert(err, IsNil)
+ c.Assert(res, DeepEquals, []doc{{2}, {10}, {30}})
+}
+
+func (s *S) TestBulkUpdateAll(c *C) {
+ session, err := mgo.Dial("localhost:40001")
+ c.Assert(err, IsNil)
+ defer session.Close()
+
+ coll := session.DB("mydb").C("mycoll")
+
+ err = coll.Insert(M{"n": 1}, M{"n": 2}, M{"n": 3})
+ c.Assert(err, IsNil)
+
+ bulk := coll.Bulk()
+ bulk.UpdateAll(M{"n": 1}, M{"$set": M{"n": 10}})
+ bulk.UpdateAll(M{"n": 2}, M{"$set": M{"n": 2}}) // Won't change.
+ bulk.UpdateAll(M{"n": 5}, M{"$set": M{"n": 50}}) // Won't match.
+ bulk.UpdateAll(M{}, M{"$inc": M{"n": 1}}, M{"n": 11}, M{"$set": M{"n": 5}})
+ r, err := bulk.Run()
+ c.Assert(err, IsNil)
+ c.Assert(r.Matched, Equals, 6)
+ if s.versionAtLeast(2, 6) {
+ c.Assert(r.Modified, Equals, 5)
+ }
+
+ type doc struct{ N int }
+ var res []doc
+ err = coll.Find(nil).Sort("n").All(&res)
+ c.Assert(err, IsNil)
+ c.Assert(res, DeepEquals, []doc{{3}, {4}, {5}})
+}
+
+func (s *S) TestBulkMixedUnordered(c *C) {
+ session, err := mgo.Dial("localhost:40001")
+ c.Assert(err, IsNil)
+ defer session.Close()
+
+ coll := session.DB("mydb").C("mycoll")
+
+ // Abuse undefined behavior to ensure the desired implementation is in place.
+ bulk := coll.Bulk()
+ bulk.Unordered()
+ bulk.Insert(M{"n": 1})
+ bulk.Update(M{"n": 2}, M{"$inc": M{"n": 1}})
+ bulk.Insert(M{"n": 2})
+ bulk.Update(M{"n": 3}, M{"$inc": M{"n": 1}})
+ bulk.Update(M{"n": 1}, M{"$inc": M{"n": 1}})
+ bulk.Insert(M{"n": 3})
+ r, err := bulk.Run()
+ c.Assert(err, IsNil)
+ c.Assert(r.Matched, Equals, 3)
+ if s.versionAtLeast(2, 6) {
+ c.Assert(r.Modified, Equals, 3)
+ }
+
+ type doc struct{ N int }
+ var res []doc
+ err = coll.Find(nil).Sort("n").All(&res)
+ c.Assert(err, IsNil)
+ c.Assert(res, DeepEquals, []doc{{2}, {3}, {4}})
+}
+
+func (s *S) TestBulkUpsert(c *C) {
+ session, err := mgo.Dial("localhost:40001")
+ c.Assert(err, IsNil)
+ defer session.Close()
+
+ coll := session.DB("mydb").C("mycoll")
+
+ err = coll.Insert(M{"n": 1}, M{"n": 2}, M{"n": 3})
+ c.Assert(err, IsNil)
+
+ bulk := coll.Bulk()
+ bulk.Upsert(M{"n": 2}, M{"$set": M{"n": 20}})
+ bulk.Upsert(M{"n": 4}, M{"$set": M{"n": 40}}, M{"n": 3}, M{"$set": M{"n": 30}})
+ r, err := bulk.Run()
+ c.Assert(err, IsNil)
+ c.Assert(r, FitsTypeOf, &mgo.BulkResult{})
+
+ type doc struct{ N int }
+ var res []doc
+ err = coll.Find(nil).Sort("n").All(&res)
+ c.Assert(err, IsNil)
+ c.Assert(res, DeepEquals, []doc{{1}, {20}, {30}, {40}})
+}
+
+func (s *S) TestBulkRemove(c *C) {
+ session, err := mgo.Dial("localhost:40001")
+ c.Assert(err, IsNil)
+ defer session.Close()
+
+ coll := session.DB("mydb").C("mycoll")
+
+ err = coll.Insert(M{"n": 1}, M{"n": 2}, M{"n": 3}, M{"n": 4}, M{"n": 4})
+ c.Assert(err, IsNil)
+
+ bulk := coll.Bulk()
+ bulk.Remove(M{"n": 1})
+ bulk.Remove(M{"n": 2}, M{"n": 4})
+ r, err := bulk.Run()
+ c.Assert(err, IsNil)
+ c.Assert(r.Matched, Equals, 3)
+
+ type doc struct{ N int }
+ var res []doc
+ err = coll.Find(nil).Sort("n").All(&res)
+ c.Assert(err, IsNil)
+ c.Assert(res, DeepEquals, []doc{{3}, {4}})
+}
+
+func (s *S) TestBulkRemoveAll(c *C) {
+ session, err := mgo.Dial("localhost:40001")
+ c.Assert(err, IsNil)
+ defer session.Close()
+
+ coll := session.DB("mydb").C("mycoll")
+
+ err = coll.Insert(M{"n": 1}, M{"n": 2}, M{"n": 3}, M{"n": 4}, M{"n": 4})
+ c.Assert(err, IsNil)
+
+ bulk := coll.Bulk()
+ bulk.RemoveAll(M{"n": 1})
+ bulk.RemoveAll(M{"n": 2}, M{"n": 4})
+ r, err := bulk.Run()
+ c.Assert(err, IsNil)
+ c.Assert(r.Matched, Equals, 4)
+
+ type doc struct{ N int }
+ var res []doc
+ err = coll.Find(nil).Sort("n").All(&res)
+ c.Assert(err, IsNil)
+ c.Assert(res, DeepEquals, []doc{{3}})
+}
diff --git a/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/cluster.go b/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/cluster.go
new file mode 100644
index 00000000000..c3bf8b01375
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/cluster.go
@@ -0,0 +1,682 @@
+// mgo - MongoDB driver for Go
+//
+// Copyright (c) 2010-2012 - Gustavo Niemeyer <gustavo@niemeyer.net>
+//
+// All rights reserved.
+//
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are met:
+//
+// 1. Redistributions of source code must retain the above copyright notice, this
+// list of conditions and the following disclaimer.
+// 2. Redistributions in binary form must reproduce the above copyright notice,
+// this list of conditions and the following disclaimer in the documentation
+// and/or other materials provided with the distribution.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
+// ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
+// WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+// DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
+// ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
+// (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
+// LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
+// ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
+// SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+package mgo
+
+import (
+ "errors"
+ "fmt"
+ "net"
+ "strconv"
+ "strings"
+ "sync"
+ "time"
+
+ "gopkg.in/mgo.v2/bson"
+)
+
+// ---------------------------------------------------------------------------
+// Mongo cluster encapsulation.
+//
+// A cluster enables the communication with one or more servers participating
+// in a mongo cluster. This works with individual servers, a replica set,
+// a replica pair, one or multiple mongos routers, etc.
+
+type mongoCluster struct {
+ sync.RWMutex
+ serverSynced sync.Cond
+ userSeeds []string
+ dynaSeeds []string
+ servers mongoServers
+ masters mongoServers
+ references int
+ syncing bool
+ direct bool
+ failFast bool
+ syncCount uint
+ setName string
+ cachedIndex map[string]bool
+ sync chan bool
+ dial dialer
+}
+
+func newCluster(userSeeds []string, direct, failFast bool, dial dialer, setName string) *mongoCluster {
+ cluster := &mongoCluster{
+ userSeeds: userSeeds,
+ references: 1,
+ direct: direct,
+ failFast: failFast,
+ dial: dial,
+ setName: setName,
+ }
+ cluster.serverSynced.L = cluster.RWMutex.RLocker()
+ cluster.sync = make(chan bool, 1)
+ stats.cluster(+1)
+ go cluster.syncServersLoop()
+ return cluster
+}
+
+// Acquire increases the reference count for the cluster.
+func (cluster *mongoCluster) Acquire() {
+ cluster.Lock()
+ cluster.references++
+ debugf("Cluster %p acquired (refs=%d)", cluster, cluster.references)
+ cluster.Unlock()
+}
+
+// Release decreases the reference count for the cluster. Once
+// it reaches zero, all servers will be closed.
+func (cluster *mongoCluster) Release() {
+ cluster.Lock()
+ if cluster.references == 0 {
+ panic("cluster.Release() with references == 0")
+ }
+ cluster.references--
+ debugf("Cluster %p released (refs=%d)", cluster, cluster.references)
+ if cluster.references == 0 {
+ for _, server := range cluster.servers.Slice() {
+ server.Close()
+ }
+ // Wake up the sync loop so it can die.
+ cluster.syncServers()
+ stats.cluster(-1)
+ }
+ cluster.Unlock()
+}
+
+func (cluster *mongoCluster) LiveServers() (servers []string) {
+ cluster.RLock()
+ for _, serv := range cluster.servers.Slice() {
+ servers = append(servers, serv.Addr)
+ }
+ cluster.RUnlock()
+ return servers
+}
+
+func (cluster *mongoCluster) removeServer(server *mongoServer) {
+ cluster.Lock()
+ cluster.masters.Remove(server)
+ other := cluster.servers.Remove(server)
+ cluster.Unlock()
+ if other != nil {
+ other.Close()
+ log("Removed server ", server.Addr, " from cluster.")
+ }
+ server.Close()
+}
+
+type isMasterResult struct {
+ IsMaster bool
+ Secondary bool
+ Primary string
+ Hosts []string
+ Passives []string
+ Tags bson.D
+ Msg string
+ SetName string `bson:"setName"`
+ MaxWireVersion int `bson:"maxWireVersion"`
+}
+
+func (cluster *mongoCluster) isMaster(socket *mongoSocket, result *isMasterResult) error {
+ // Monotonic let's it talk to a slave and still hold the socket.
+ session := newSession(Monotonic, cluster, 10*time.Second)
+ session.setSocket(socket)
+ err := session.Run("ismaster", result)
+ session.Close()
+ return err
+}
+
+type possibleTimeout interface {
+ Timeout() bool
+}
+
+var syncSocketTimeout = 5 * time.Second
+
+func (cluster *mongoCluster) syncServer(server *mongoServer) (info *mongoServerInfo, hosts []string, err error) {
+ var syncTimeout time.Duration
+ if raceDetector {
+ // This variable is only ever touched by tests.
+ globalMutex.Lock()
+ syncTimeout = syncSocketTimeout
+ globalMutex.Unlock()
+ } else {
+ syncTimeout = syncSocketTimeout
+ }
+
+ addr := server.Addr
+ log("SYNC Processing ", addr, "...")
+
+ // Retry a few times to avoid knocking a server down for a hiccup.
+ var result isMasterResult
+ var tryerr error
+ for retry := 0; ; retry++ {
+ if retry == 3 || retry == 1 && cluster.failFast {
+ return nil, nil, tryerr
+ }
+ if retry > 0 {
+ // Don't abuse the server needlessly if there's something actually wrong.
+ if err, ok := tryerr.(possibleTimeout); ok && err.Timeout() {
+ // Give a chance for waiters to timeout as well.
+ cluster.serverSynced.Broadcast()
+ }
+ time.Sleep(syncShortDelay)
+ }
+
+ // It's not clear what would be a good timeout here. Is it
+ // better to wait longer or to retry?
+ socket, _, err := server.AcquireSocket(0, syncTimeout)
+ if err != nil {
+ tryerr = err
+ logf("SYNC Failed to get socket to %s: %v", addr, err)
+ continue
+ }
+ err = cluster.isMaster(socket, &result)
+ socket.Release()
+ if err != nil {
+ tryerr = err
+ logf("SYNC Command 'ismaster' to %s failed: %v", addr, err)
+ continue
+ }
+ debugf("SYNC Result of 'ismaster' from %s: %#v", addr, result)
+ break
+ }
+
+ if cluster.setName != "" && result.SetName != cluster.setName {
+ logf("SYNC Server %s is not a member of replica set %q", addr, cluster.setName)
+ return nil, nil, fmt.Errorf("server %s is not a member of replica set %q", addr, cluster.setName)
+ }
+
+ if result.IsMaster {
+ debugf("SYNC %s is a master.", addr)
+ if !server.info.Master {
+ // Made an incorrect assumption above, so fix stats.
+ stats.conn(-1, false)
+ stats.conn(+1, true)
+ }
+ } else if result.Secondary {
+ debugf("SYNC %s is a slave.", addr)
+ } else if cluster.direct {
+ logf("SYNC %s in unknown state. Pretending it's a slave due to direct connection.", addr)
+ } else {
+ logf("SYNC %s is neither a master nor a slave.", addr)
+ // Let stats track it as whatever was known before.
+ return nil, nil, errors.New(addr + " is not a master nor slave")
+ }
+
+ info = &mongoServerInfo{
+ Master: result.IsMaster,
+ Mongos: result.Msg == "isdbgrid",
+ Tags: result.Tags,
+ SetName: result.SetName,
+ MaxWireVersion: result.MaxWireVersion,
+ }
+
+ hosts = make([]string, 0, 1+len(result.Hosts)+len(result.Passives))
+ if result.Primary != "" {
+ // First in the list to speed up master discovery.
+ hosts = append(hosts, result.Primary)
+ }
+ hosts = append(hosts, result.Hosts...)
+ hosts = append(hosts, result.Passives...)
+
+ debugf("SYNC %s knows about the following peers: %#v", addr, hosts)
+ return info, hosts, nil
+}
+
+type syncKind bool
+
+const (
+ completeSync syncKind = true
+ partialSync syncKind = false
+)
+
+func (cluster *mongoCluster) addServer(server *mongoServer, info *mongoServerInfo, syncKind syncKind) {
+ cluster.Lock()
+ current := cluster.servers.Search(server.ResolvedAddr)
+ if current == nil {
+ if syncKind == partialSync {
+ cluster.Unlock()
+ server.Close()
+ log("SYNC Discarding unknown server ", server.Addr, " due to partial sync.")
+ return
+ }
+ cluster.servers.Add(server)
+ if info.Master {
+ cluster.masters.Add(server)
+ log("SYNC Adding ", server.Addr, " to cluster as a master.")
+ } else {
+ log("SYNC Adding ", server.Addr, " to cluster as a slave.")
+ }
+ } else {
+ if server != current {
+ panic("addServer attempting to add duplicated server")
+ }
+ if server.Info().Master != info.Master {
+ if info.Master {
+ log("SYNC Server ", server.Addr, " is now a master.")
+ cluster.masters.Add(server)
+ } else {
+ log("SYNC Server ", server.Addr, " is now a slave.")
+ cluster.masters.Remove(server)
+ }
+ }
+ }
+ server.SetInfo(info)
+ debugf("SYNC Broadcasting availability of server %s", server.Addr)
+ cluster.serverSynced.Broadcast()
+ cluster.Unlock()
+}
+
+func (cluster *mongoCluster) getKnownAddrs() []string {
+ cluster.RLock()
+ max := len(cluster.userSeeds) + len(cluster.dynaSeeds) + cluster.servers.Len()
+ seen := make(map[string]bool, max)
+ known := make([]string, 0, max)
+
+ add := func(addr string) {
+ if _, found := seen[addr]; !found {
+ seen[addr] = true
+ known = append(known, addr)
+ }
+ }
+
+ for _, addr := range cluster.userSeeds {
+ add(addr)
+ }
+ for _, addr := range cluster.dynaSeeds {
+ add(addr)
+ }
+ for _, serv := range cluster.servers.Slice() {
+ add(serv.Addr)
+ }
+ cluster.RUnlock()
+
+ return known
+}
+
+// syncServers injects a value into the cluster.sync channel to force
+// an iteration of the syncServersLoop function.
+func (cluster *mongoCluster) syncServers() {
+ select {
+ case cluster.sync <- true:
+ default:
+ }
+}
+
+// How long to wait for a checkup of the cluster topology if nothing
+// else kicks a synchronization before that.
+const syncServersDelay = 30 * time.Second
+const syncShortDelay = 500 * time.Millisecond
+
+// syncServersLoop loops while the cluster is alive to keep its idea of
+// the server topology up-to-date. It must be called just once from
+// newCluster. The loop iterates once syncServersDelay has passed, or
+// if somebody injects a value into the cluster.sync channel to force a
+// synchronization. A loop iteration will contact all servers in
+// parallel, ask them about known peers and their own role within the
+// cluster, and then attempt to do the same with all the peers
+// retrieved.
+func (cluster *mongoCluster) syncServersLoop() {
+ for {
+ debugf("SYNC Cluster %p is starting a sync loop iteration.", cluster)
+
+ cluster.Lock()
+ if cluster.references == 0 {
+ cluster.Unlock()
+ break
+ }
+ cluster.references++ // Keep alive while syncing.
+ direct := cluster.direct
+ cluster.Unlock()
+
+ cluster.syncServersIteration(direct)
+
+ // We just synchronized, so consume any outstanding requests.
+ select {
+ case <-cluster.sync:
+ default:
+ }
+
+ cluster.Release()
+
+ // Hold off before allowing another sync. No point in
+ // burning CPU looking for down servers.
+ if !cluster.failFast {
+ time.Sleep(syncShortDelay)
+ }
+
+ cluster.Lock()
+ if cluster.references == 0 {
+ cluster.Unlock()
+ break
+ }
+ cluster.syncCount++
+ // Poke all waiters so they have a chance to timeout or
+ // restart syncing if they wish to.
+ cluster.serverSynced.Broadcast()
+ // Check if we have to restart immediately either way.
+ restart := !direct && cluster.masters.Empty() || cluster.servers.Empty()
+ cluster.Unlock()
+
+ if restart {
+ log("SYNC No masters found. Will synchronize again.")
+ time.Sleep(syncShortDelay)
+ continue
+ }
+
+ debugf("SYNC Cluster %p waiting for next requested or scheduled sync.", cluster)
+
+ // Hold off until somebody explicitly requests a synchronization
+ // or it's time to check for a cluster topology change again.
+ select {
+ case <-cluster.sync:
+ case <-time.After(syncServersDelay):
+ }
+ }
+ debugf("SYNC Cluster %p is stopping its sync loop.", cluster)
+}
+
+func (cluster *mongoCluster) server(addr string, tcpaddr *net.TCPAddr) *mongoServer {
+ cluster.RLock()
+ server := cluster.servers.Search(tcpaddr.String())
+ cluster.RUnlock()
+ if server != nil {
+ return server
+ }
+ return newServer(addr, tcpaddr, cluster.sync, cluster.dial)
+}
+
+func resolveAddr(addr string) (*net.TCPAddr, error) {
+ // Simple cases that do not need actual resolution. Works with IPv4 and v6.
+ if host, port, err := net.SplitHostPort(addr); err == nil {
+ if port, _ := strconv.Atoi(port); port > 0 {
+ zone := ""
+ if i := strings.LastIndex(host, "%"); i >= 0 {
+ zone = host[i+1:]
+ host = host[:i]
+ }
+ ip := net.ParseIP(host)
+ if ip != nil {
+ return &net.TCPAddr{IP: ip, Port: port, Zone: zone}, nil
+ }
+ }
+ }
+
+ // Attempt to resolve IPv4 and v6 concurrently.
+ addrChan := make(chan *net.TCPAddr, 2)
+ for _, network := range []string{"udp4", "udp6"} {
+ network := network
+ go func() {
+ // The unfortunate UDP dialing hack allows having a timeout on address resolution.
+ conn, err := net.DialTimeout(network, addr, 10*time.Second)
+ if err != nil {
+ addrChan <- nil
+ } else {
+ addrChan <- (*net.TCPAddr)(conn.RemoteAddr().(*net.UDPAddr))
+ conn.Close()
+ }
+ }()
+ }
+
+ // Wait for the result of IPv4 and v6 resolution. Use IPv4 if available.
+ tcpaddr := <-addrChan
+ if tcpaddr == nil || len(tcpaddr.IP) != 4 {
+ var timeout <-chan time.Time
+ if tcpaddr != nil {
+ // Don't wait too long if an IPv6 address is known.
+ timeout = time.After(50 * time.Millisecond)
+ }
+ select {
+ case <-timeout:
+ case tcpaddr2 := <-addrChan:
+ if tcpaddr == nil || tcpaddr2 != nil {
+ // It's an IPv4 address or the only known address. Use it.
+ tcpaddr = tcpaddr2
+ }
+ }
+ }
+
+ if tcpaddr == nil {
+ log("SYNC Failed to resolve server address: ", addr)
+ return nil, errors.New("failed to resolve server address: " + addr)
+ }
+ if tcpaddr.String() != addr {
+ debug("SYNC Address ", addr, " resolved as ", tcpaddr.String())
+ }
+ return tcpaddr, nil
+}
+
+type pendingAdd struct {
+ server *mongoServer
+ info *mongoServerInfo
+}
+
+func (cluster *mongoCluster) syncServersIteration(direct bool) {
+ log("SYNC Starting full topology synchronization...")
+
+ var wg sync.WaitGroup
+ var m sync.Mutex
+ notYetAdded := make(map[string]pendingAdd)
+ addIfFound := make(map[string]bool)
+ seen := make(map[string]bool)
+ syncKind := partialSync
+
+ var spawnSync func(addr string, byMaster bool)
+ spawnSync = func(addr string, byMaster bool) {
+ wg.Add(1)
+ go func() {
+ defer wg.Done()
+
+ tcpaddr, err := resolveAddr(addr)
+ if err != nil {
+ log("SYNC Failed to start sync of ", addr, ": ", err.Error())
+ return
+ }
+ resolvedAddr := tcpaddr.String()
+
+ m.Lock()
+ if byMaster {
+ if pending, ok := notYetAdded[resolvedAddr]; ok {
+ delete(notYetAdded, resolvedAddr)
+ m.Unlock()
+ cluster.addServer(pending.server, pending.info, completeSync)
+ return
+ }
+ addIfFound[resolvedAddr] = true
+ }
+ if seen[resolvedAddr] {
+ m.Unlock()
+ return
+ }
+ seen[resolvedAddr] = true
+ m.Unlock()
+
+ server := cluster.server(addr, tcpaddr)
+ info, hosts, err := cluster.syncServer(server)
+ if err != nil {
+ cluster.removeServer(server)
+ return
+ }
+
+ m.Lock()
+ add := direct || info.Master || addIfFound[resolvedAddr]
+ if add {
+ syncKind = completeSync
+ } else {
+ notYetAdded[resolvedAddr] = pendingAdd{server, info}
+ }
+ m.Unlock()
+ if add {
+ cluster.addServer(server, info, completeSync)
+ }
+ if !direct {
+ for _, addr := range hosts {
+ spawnSync(addr, info.Master)
+ }
+ }
+ }()
+ }
+
+ knownAddrs := cluster.getKnownAddrs()
+ for _, addr := range knownAddrs {
+ spawnSync(addr, false)
+ }
+ wg.Wait()
+
+ if syncKind == completeSync {
+ logf("SYNC Synchronization was complete (got data from primary).")
+ for _, pending := range notYetAdded {
+ cluster.removeServer(pending.server)
+ }
+ } else {
+ logf("SYNC Synchronization was partial (cannot talk to primary).")
+ for _, pending := range notYetAdded {
+ cluster.addServer(pending.server, pending.info, partialSync)
+ }
+ }
+
+ cluster.Lock()
+ mastersLen := cluster.masters.Len()
+ logf("SYNC Synchronization completed: %d master(s) and %d slave(s) alive.", mastersLen, cluster.servers.Len()-mastersLen)
+
+ // Update dynamic seeds, but only if we have any good servers. Otherwise,
+ // leave them alone for better chances of a successful sync in the future.
+ if syncKind == completeSync {
+ dynaSeeds := make([]string, cluster.servers.Len())
+ for i, server := range cluster.servers.Slice() {
+ dynaSeeds[i] = server.Addr
+ }
+ cluster.dynaSeeds = dynaSeeds
+ debugf("SYNC New dynamic seeds: %#v\n", dynaSeeds)
+ }
+ cluster.Unlock()
+}
+
+// AcquireSocket returns a socket to a server in the cluster. If slaveOk is
+// true, it will attempt to return a socket to a slave server. If it is
+// false, the socket will necessarily be to a master server.
+func (cluster *mongoCluster) AcquireSocket(mode Mode, slaveOk bool, syncTimeout time.Duration, socketTimeout time.Duration, serverTags []bson.D, poolLimit int) (s *mongoSocket, err error) {
+ var started time.Time
+ var syncCount uint
+ warnedLimit := false
+ for {
+ cluster.RLock()
+ for {
+ mastersLen := cluster.masters.Len()
+ slavesLen := cluster.servers.Len() - mastersLen
+ debugf("Cluster has %d known masters and %d known slaves.", mastersLen, slavesLen)
+ if mastersLen > 0 && !(slaveOk && mode == Secondary) || slavesLen > 0 && slaveOk {
+ break
+ }
+ if mastersLen > 0 && mode == Secondary && cluster.masters.HasMongos() {
+ break
+ }
+ if started.IsZero() {
+ // Initialize after fast path above.
+ started = time.Now()
+ syncCount = cluster.syncCount
+ } else if syncTimeout != 0 && started.Before(time.Now().Add(-syncTimeout)) || cluster.failFast && cluster.syncCount != syncCount {
+ cluster.RUnlock()
+ return nil, errors.New("no reachable servers")
+ }
+ log("Waiting for servers to synchronize...")
+ cluster.syncServers()
+
+ // Remember: this will release and reacquire the lock.
+ cluster.serverSynced.Wait()
+ }
+
+ var server *mongoServer
+ if slaveOk {
+ server = cluster.servers.BestFit(mode, serverTags)
+ } else {
+ server = cluster.masters.BestFit(mode, nil)
+ }
+ cluster.RUnlock()
+
+ if server == nil {
+ // Must have failed the requested tags. Sleep to avoid spinning.
+ time.Sleep(1e8)
+ continue
+ }
+
+ s, abended, err := server.AcquireSocket(poolLimit, socketTimeout)
+ if err == errPoolLimit {
+ if !warnedLimit {
+ warnedLimit = true
+ log("WARNING: Per-server connection limit reached.")
+ }
+ time.Sleep(100 * time.Millisecond)
+ continue
+ }
+ if err != nil {
+ cluster.removeServer(server)
+ cluster.syncServers()
+ continue
+ }
+ if abended && !slaveOk {
+ var result isMasterResult
+ err := cluster.isMaster(s, &result)
+ if err != nil || !result.IsMaster {
+ logf("Cannot confirm server %s as master (%v)", server.Addr, err)
+ s.Release()
+ cluster.syncServers()
+ time.Sleep(100 * time.Millisecond)
+ continue
+ }
+ }
+ return s, nil
+ }
+ panic("unreached")
+}
+
+func (cluster *mongoCluster) CacheIndex(cacheKey string, exists bool) {
+ cluster.Lock()
+ if cluster.cachedIndex == nil {
+ cluster.cachedIndex = make(map[string]bool)
+ }
+ if exists {
+ cluster.cachedIndex[cacheKey] = true
+ } else {
+ delete(cluster.cachedIndex, cacheKey)
+ }
+ cluster.Unlock()
+}
+
+func (cluster *mongoCluster) HasCachedIndex(cacheKey string) (result bool) {
+ cluster.RLock()
+ if cluster.cachedIndex != nil {
+ result = cluster.cachedIndex[cacheKey]
+ }
+ cluster.RUnlock()
+ return
+}
+
+func (cluster *mongoCluster) ResetIndexCache() {
+ cluster.Lock()
+ cluster.cachedIndex = make(map[string]bool)
+ cluster.Unlock()
+}
diff --git a/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/cluster_test.go b/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/cluster_test.go
new file mode 100644
index 00000000000..54ec8676226
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/cluster_test.go
@@ -0,0 +1,2090 @@
+// mgo - MongoDB driver for Go
+//
+// Copyright (c) 2010-2012 - Gustavo Niemeyer <gustavo@niemeyer.net>
+//
+// All rights reserved.
+//
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are met:
+//
+// 1. Redistributions of source code must retain the above copyright notice, this
+// list of conditions and the following disclaimer.
+// 2. Redistributions in binary form must reproduce the above copyright notice,
+// this list of conditions and the following disclaimer in the documentation
+// and/or other materials provided with the distribution.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
+// ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
+// WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+// DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
+// ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
+// (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
+// LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
+// ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
+// SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+package mgo_test
+
+import (
+ "fmt"
+ "io"
+ "net"
+ "strings"
+ "sync"
+ "time"
+
+ . "gopkg.in/check.v1"
+ "gopkg.in/mgo.v2"
+ "gopkg.in/mgo.v2/bson"
+)
+
+func (s *S) TestNewSession(c *C) {
+ session, err := mgo.Dial("localhost:40001")
+ c.Assert(err, IsNil)
+ defer session.Close()
+
+ // Do a dummy operation to wait for connection.
+ coll := session.DB("mydb").C("mycoll")
+ err = coll.Insert(M{"_id": 1})
+ c.Assert(err, IsNil)
+
+ // Tweak safety and query settings to ensure other has copied those.
+ session.SetSafe(nil)
+ session.SetBatch(-1)
+ other := session.New()
+ defer other.Close()
+ session.SetSafe(&mgo.Safe{})
+
+ // Clone was copied while session was unsafe, so no errors.
+ otherColl := other.DB("mydb").C("mycoll")
+ err = otherColl.Insert(M{"_id": 1})
+ c.Assert(err, IsNil)
+
+ // Original session was made safe again.
+ err = coll.Insert(M{"_id": 1})
+ c.Assert(err, NotNil)
+
+ // With New(), each session has its own socket now.
+ stats := mgo.GetStats()
+ c.Assert(stats.MasterConns, Equals, 2)
+ c.Assert(stats.SocketsInUse, Equals, 2)
+
+ // Ensure query parameters were cloned.
+ err = otherColl.Insert(M{"_id": 2})
+ c.Assert(err, IsNil)
+
+ // Ping the database to ensure the nonce has been received already.
+ c.Assert(other.Ping(), IsNil)
+
+ mgo.ResetStats()
+
+ iter := otherColl.Find(M{}).Iter()
+ c.Assert(err, IsNil)
+
+ m := M{}
+ ok := iter.Next(m)
+ c.Assert(ok, Equals, true)
+ err = iter.Close()
+ c.Assert(err, IsNil)
+
+ // If Batch(-1) is in effect, a single document must have been received.
+ stats = mgo.GetStats()
+ c.Assert(stats.ReceivedDocs, Equals, 1)
+}
+
+func (s *S) TestCloneSession(c *C) {
+ session, err := mgo.Dial("localhost:40001")
+ c.Assert(err, IsNil)
+ defer session.Close()
+
+ // Do a dummy operation to wait for connection.
+ coll := session.DB("mydb").C("mycoll")
+ err = coll.Insert(M{"_id": 1})
+ c.Assert(err, IsNil)
+
+ // Tweak safety and query settings to ensure clone is copying those.
+ session.SetSafe(nil)
+ session.SetBatch(-1)
+ clone := session.Clone()
+ defer clone.Close()
+ session.SetSafe(&mgo.Safe{})
+
+ // Clone was copied while session was unsafe, so no errors.
+ cloneColl := clone.DB("mydb").C("mycoll")
+ err = cloneColl.Insert(M{"_id": 1})
+ c.Assert(err, IsNil)
+
+ // Original session was made safe again.
+ err = coll.Insert(M{"_id": 1})
+ c.Assert(err, NotNil)
+
+ // With Clone(), same socket is shared between sessions now.
+ stats := mgo.GetStats()
+ c.Assert(stats.SocketsInUse, Equals, 1)
+ c.Assert(stats.SocketRefs, Equals, 2)
+
+ // Refreshing one of them should let the original socket go,
+ // while preserving the safety settings.
+ clone.Refresh()
+ err = cloneColl.Insert(M{"_id": 1})
+ c.Assert(err, IsNil)
+
+ // Must have used another connection now.
+ stats = mgo.GetStats()
+ c.Assert(stats.SocketsInUse, Equals, 2)
+ c.Assert(stats.SocketRefs, Equals, 2)
+
+ // Ensure query parameters were cloned.
+ err = cloneColl.Insert(M{"_id": 2})
+ c.Assert(err, IsNil)
+
+ // Ping the database to ensure the nonce has been received already.
+ c.Assert(clone.Ping(), IsNil)
+
+ mgo.ResetStats()
+
+ iter := cloneColl.Find(M{}).Iter()
+ c.Assert(err, IsNil)
+
+ m := M{}
+ ok := iter.Next(m)
+ c.Assert(ok, Equals, true)
+ err = iter.Close()
+ c.Assert(err, IsNil)
+
+ // If Batch(-1) is in effect, a single document must have been received.
+ stats = mgo.GetStats()
+ c.Assert(stats.ReceivedDocs, Equals, 1)
+}
+
+func (s *S) TestModeStrong(c *C) {
+ session, err := mgo.Dial("localhost:40012")
+ c.Assert(err, IsNil)
+ defer session.Close()
+
+ session.SetMode(mgo.Monotonic, false)
+ session.SetMode(mgo.Strong, false)
+
+ c.Assert(session.Mode(), Equals, mgo.Strong)
+
+ result := M{}
+ cmd := session.DB("admin").C("$cmd")
+ err = cmd.Find(M{"ismaster": 1}).One(&result)
+ c.Assert(err, IsNil)
+ c.Assert(result["ismaster"], Equals, true)
+
+ coll := session.DB("mydb").C("mycoll")
+ err = coll.Insert(M{"a": 1})
+ c.Assert(err, IsNil)
+
+ // Wait since the sync also uses sockets.
+ for len(session.LiveServers()) != 3 {
+ c.Log("Waiting for cluster sync to finish...")
+ time.Sleep(5e8)
+ }
+
+ stats := mgo.GetStats()
+ c.Assert(stats.MasterConns, Equals, 1)
+ c.Assert(stats.SlaveConns, Equals, 2)
+ c.Assert(stats.SocketsInUse, Equals, 1)
+
+ session.SetMode(mgo.Strong, true)
+
+ stats = mgo.GetStats()
+ c.Assert(stats.SocketsInUse, Equals, 0)
+}
+
+func (s *S) TestModeMonotonic(c *C) {
+ // Must necessarily connect to a slave, otherwise the
+ // master connection will be available first.
+ session, err := mgo.Dial("localhost:40012")
+ c.Assert(err, IsNil)
+ defer session.Close()
+
+ session.SetMode(mgo.Monotonic, false)
+
+ c.Assert(session.Mode(), Equals, mgo.Monotonic)
+
+ var result struct{ IsMaster bool }
+ cmd := session.DB("admin").C("$cmd")
+ err = cmd.Find(M{"ismaster": 1}).One(&result)
+ c.Assert(err, IsNil)
+ c.Assert(result.IsMaster, Equals, false)
+
+ coll := session.DB("mydb").C("mycoll")
+ err = coll.Insert(M{"a": 1})
+ c.Assert(err, IsNil)
+
+ err = cmd.Find(M{"ismaster": 1}).One(&result)
+ c.Assert(err, IsNil)
+ c.Assert(result.IsMaster, Equals, true)
+
+ // Wait since the sync also uses sockets.
+ for len(session.LiveServers()) != 3 {
+ c.Log("Waiting for cluster sync to finish...")
+ time.Sleep(5e8)
+ }
+
+ stats := mgo.GetStats()
+ c.Assert(stats.MasterConns, Equals, 1)
+ c.Assert(stats.SlaveConns, Equals, 2)
+ c.Assert(stats.SocketsInUse, Equals, 2)
+
+ session.SetMode(mgo.Monotonic, true)
+
+ stats = mgo.GetStats()
+ c.Assert(stats.SocketsInUse, Equals, 0)
+}
+
+func (s *S) TestModeMonotonicAfterStrong(c *C) {
+ // Test that a strong session shifting to a monotonic
+ // one preserves the socket untouched.
+
+ session, err := mgo.Dial("localhost:40012")
+ c.Assert(err, IsNil)
+ defer session.Close()
+
+ // Insert something to force a connection to the master.
+ coll := session.DB("mydb").C("mycoll")
+ err = coll.Insert(M{"a": 1})
+ c.Assert(err, IsNil)
+
+ session.SetMode(mgo.Monotonic, false)
+
+ // Wait since the sync also uses sockets.
+ for len(session.LiveServers()) != 3 {
+ c.Log("Waiting for cluster sync to finish...")
+ time.Sleep(5e8)
+ }
+
+ // Master socket should still be reserved.
+ stats := mgo.GetStats()
+ c.Assert(stats.SocketsInUse, Equals, 1)
+
+ // Confirm it's the master even though it's Monotonic by now.
+ result := M{}
+ cmd := session.DB("admin").C("$cmd")
+ err = cmd.Find(M{"ismaster": 1}).One(&result)
+ c.Assert(err, IsNil)
+ c.Assert(result["ismaster"], Equals, true)
+}
+
+func (s *S) TestModeStrongAfterMonotonic(c *C) {
+ // Test that shifting from Monotonic to Strong while
+ // using a slave socket will keep the socket reserved
+ // until the master socket is necessary, so that no
+ // switch over occurs unless it's actually necessary.
+
+ // Must necessarily connect to a slave, otherwise the
+ // master connection will be available first.
+ session, err := mgo.Dial("localhost:40012")
+ c.Assert(err, IsNil)
+ defer session.Close()
+
+ session.SetMode(mgo.Monotonic, false)
+
+ // Ensure we're talking to a slave, and reserve the socket.
+ result := M{}
+ err = session.Run("ismaster", &result)
+ c.Assert(err, IsNil)
+ c.Assert(result["ismaster"], Equals, false)
+
+ // Switch to a Strong session.
+ session.SetMode(mgo.Strong, false)
+
+ // Wait since the sync also uses sockets.
+ for len(session.LiveServers()) != 3 {
+ c.Log("Waiting for cluster sync to finish...")
+ time.Sleep(5e8)
+ }
+
+ // Slave socket should still be reserved.
+ stats := mgo.GetStats()
+ c.Assert(stats.SocketsInUse, Equals, 1)
+
+ // But any operation will switch it to the master.
+ result = M{}
+ err = session.Run("ismaster", &result)
+ c.Assert(err, IsNil)
+ c.Assert(result["ismaster"], Equals, true)
+}
+
+func (s *S) TestModeMonotonicWriteOnIteration(c *C) {
+ // Must necessarily connect to a slave, otherwise the
+ // master connection will be available first.
+ session, err := mgo.Dial("localhost:40012")
+ c.Assert(err, IsNil)
+ defer session.Close()
+
+ session.SetMode(mgo.Monotonic, false)
+
+ c.Assert(session.Mode(), Equals, mgo.Monotonic)
+
+ coll1 := session.DB("mydb").C("mycoll1")
+ coll2 := session.DB("mydb").C("mycoll2")
+
+ ns := []int{40, 41, 42, 43, 44, 45, 46}
+ for _, n := range ns {
+ err := coll1.Insert(M{"n": n})
+ c.Assert(err, IsNil)
+ }
+
+ // Release master so we can grab a slave again.
+ session.Refresh()
+
+ // Wait until synchronization is done.
+ for {
+ n, err := coll1.Count()
+ c.Assert(err, IsNil)
+ if n == len(ns) {
+ break
+ }
+ }
+
+ iter := coll1.Find(nil).Batch(2).Iter()
+ i := 0
+ m := M{}
+ for iter.Next(&m) {
+ i++
+ if i > 3 {
+ err := coll2.Insert(M{"n": 47 + i})
+ c.Assert(err, IsNil)
+ }
+ }
+ c.Assert(i, Equals, len(ns))
+}
+
+func (s *S) TestModeEventual(c *C) {
+ // Must necessarily connect to a slave, otherwise the
+ // master connection will be available first.
+ session, err := mgo.Dial("localhost:40012")
+ c.Assert(err, IsNil)
+ defer session.Close()
+
+ session.SetMode(mgo.Eventual, false)
+
+ c.Assert(session.Mode(), Equals, mgo.Eventual)
+
+ result := M{}
+ err = session.Run("ismaster", &result)
+ c.Assert(err, IsNil)
+ c.Assert(result["ismaster"], Equals, false)
+
+ coll := session.DB("mydb").C("mycoll")
+ err = coll.Insert(M{"a": 1})
+ c.Assert(err, IsNil)
+
+ result = M{}
+ err = session.Run("ismaster", &result)
+ c.Assert(err, IsNil)
+ c.Assert(result["ismaster"], Equals, false)
+
+ // Wait since the sync also uses sockets.
+ for len(session.LiveServers()) != 3 {
+ c.Log("Waiting for cluster sync to finish...")
+ time.Sleep(5e8)
+ }
+
+ stats := mgo.GetStats()
+ c.Assert(stats.MasterConns, Equals, 1)
+ c.Assert(stats.SlaveConns, Equals, 2)
+ c.Assert(stats.SocketsInUse, Equals, 0)
+}
+
+func (s *S) TestModeEventualAfterStrong(c *C) {
+ // Test that a strong session shifting to an eventual
+ // one preserves the socket untouched.
+
+ session, err := mgo.Dial("localhost:40012")
+ c.Assert(err, IsNil)
+ defer session.Close()
+
+ // Insert something to force a connection to the master.
+ coll := session.DB("mydb").C("mycoll")
+ err = coll.Insert(M{"a": 1})
+ c.Assert(err, IsNil)
+
+ session.SetMode(mgo.Eventual, false)
+
+ // Wait since the sync also uses sockets.
+ for len(session.LiveServers()) != 3 {
+ c.Log("Waiting for cluster sync to finish...")
+ time.Sleep(5e8)
+ }
+
+ // Master socket should still be reserved.
+ stats := mgo.GetStats()
+ c.Assert(stats.SocketsInUse, Equals, 1)
+
+ // Confirm it's the master even though it's Eventual by now.
+ result := M{}
+ cmd := session.DB("admin").C("$cmd")
+ err = cmd.Find(M{"ismaster": 1}).One(&result)
+ c.Assert(err, IsNil)
+ c.Assert(result["ismaster"], Equals, true)
+
+ session.SetMode(mgo.Eventual, true)
+
+ stats = mgo.GetStats()
+ c.Assert(stats.SocketsInUse, Equals, 0)
+}
+
+func (s *S) TestModeStrongFallover(c *C) {
+ if *fast {
+ c.Skip("-fast")
+ }
+
+ session, err := mgo.Dial("localhost:40021")
+ c.Assert(err, IsNil)
+ defer session.Close()
+
+ // With strong consistency, this will open a socket to the master.
+ result := &struct{ Host string }{}
+ err = session.Run("serverStatus", result)
+ c.Assert(err, IsNil)
+
+ // Kill the master.
+ host := result.Host
+ s.Stop(host)
+
+ // This must fail, since the connection was broken.
+ err = session.Run("serverStatus", result)
+ c.Assert(err, Equals, io.EOF)
+
+ // With strong consistency, it fails again until reset.
+ err = session.Run("serverStatus", result)
+ c.Assert(err, Equals, io.EOF)
+
+ session.Refresh()
+
+ // Now we should be able to talk to the new master.
+ // Increase the timeout since this may take quite a while.
+ session.SetSyncTimeout(3 * time.Minute)
+
+ err = session.Run("serverStatus", result)
+ c.Assert(err, IsNil)
+ c.Assert(result.Host, Not(Equals), host)
+
+ // Insert some data to confirm it's indeed a master.
+ err = session.DB("mydb").C("mycoll").Insert(M{"n": 42})
+ c.Assert(err, IsNil)
+}
+
+func (s *S) TestModePrimaryHiccup(c *C) {
+ if *fast {
+ c.Skip("-fast")
+ }
+
+ session, err := mgo.Dial("localhost:40021")
+ c.Assert(err, IsNil)
+ defer session.Close()
+
+ // With strong consistency, this will open a socket to the master.
+ result := &struct{ Host string }{}
+ err = session.Run("serverStatus", result)
+ c.Assert(err, IsNil)
+
+ // Establish a few extra sessions to create spare sockets to
+ // the master. This increases a bit the chances of getting an
+ // incorrect cached socket.
+ var sessions []*mgo.Session
+ for i := 0; i < 20; i++ {
+ sessions = append(sessions, session.Copy())
+ err = sessions[len(sessions)-1].Run("serverStatus", result)
+ c.Assert(err, IsNil)
+ }
+ for i := range sessions {
+ sessions[i].Close()
+ }
+
+ // Kill the master, but bring it back immediatelly.
+ host := result.Host
+ s.Stop(host)
+ s.StartAll()
+
+ // This must fail, since the connection was broken.
+ err = session.Run("serverStatus", result)
+ c.Assert(err, Equals, io.EOF)
+
+ // With strong consistency, it fails again until reset.
+ err = session.Run("serverStatus", result)
+ c.Assert(err, Equals, io.EOF)
+
+ session.Refresh()
+
+ // Now we should be able to talk to the new master.
+ // Increase the timeout since this may take quite a while.
+ session.SetSyncTimeout(3 * time.Minute)
+
+ // Insert some data to confirm it's indeed a master.
+ err = session.DB("mydb").C("mycoll").Insert(M{"n": 42})
+ c.Assert(err, IsNil)
+}
+
+func (s *S) TestModeMonotonicFallover(c *C) {
+ if *fast {
+ c.Skip("-fast")
+ }
+
+ session, err := mgo.Dial("localhost:40021")
+ c.Assert(err, IsNil)
+ defer session.Close()
+
+ session.SetMode(mgo.Monotonic, true)
+
+ // Insert something to force a switch to the master.
+ coll := session.DB("mydb").C("mycoll")
+ err = coll.Insert(M{"a": 1})
+ c.Assert(err, IsNil)
+
+ // Wait a bit for this to be synchronized to slaves.
+ time.Sleep(3 * time.Second)
+
+ result := &struct{ Host string }{}
+ err = session.Run("serverStatus", result)
+ c.Assert(err, IsNil)
+
+ // Kill the master.
+ host := result.Host
+ s.Stop(host)
+
+ // This must fail, since the connection was broken.
+ err = session.Run("serverStatus", result)
+ c.Assert(err, Equals, io.EOF)
+
+ // With monotonic consistency, it fails again until reset.
+ err = session.Run("serverStatus", result)
+ c.Assert(err, Equals, io.EOF)
+
+ session.Refresh()
+
+ // Now we should be able to talk to the new master.
+ err = session.Run("serverStatus", result)
+ c.Assert(err, IsNil)
+ c.Assert(result.Host, Not(Equals), host)
+}
+
+func (s *S) TestModeMonotonicWithSlaveFallover(c *C) {
+ if *fast {
+ c.Skip("-fast")
+ }
+
+ session, err := mgo.Dial("localhost:40021")
+ c.Assert(err, IsNil)
+ defer session.Close()
+
+ ssresult := &struct{ Host string }{}
+ imresult := &struct{ IsMaster bool }{}
+
+ // Figure the master while still using the strong session.
+ err = session.Run("serverStatus", ssresult)
+ c.Assert(err, IsNil)
+ err = session.Run("isMaster", imresult)
+ c.Assert(err, IsNil)
+ master := ssresult.Host
+ c.Assert(imresult.IsMaster, Equals, true, Commentf("%s is not the master", master))
+
+ // Create new monotonic session with an explicit address to ensure
+ // a slave is synchronized before the master, otherwise a connection
+ // with the master may be used below for lack of other options.
+ var addr string
+ switch {
+ case strings.HasSuffix(ssresult.Host, ":40021"):
+ addr = "localhost:40022"
+ case strings.HasSuffix(ssresult.Host, ":40022"):
+ addr = "localhost:40021"
+ case strings.HasSuffix(ssresult.Host, ":40023"):
+ addr = "localhost:40021"
+ default:
+ c.Fatal("Unknown host: ", ssresult.Host)
+ }
+
+ session, err = mgo.Dial(addr)
+ c.Assert(err, IsNil)
+ defer session.Close()
+
+ session.SetMode(mgo.Monotonic, true)
+
+ // Check the address of the socket associated with the monotonic session.
+ c.Log("Running serverStatus and isMaster with monotonic session")
+ err = session.Run("serverStatus", ssresult)
+ c.Assert(err, IsNil)
+ err = session.Run("isMaster", imresult)
+ c.Assert(err, IsNil)
+ slave := ssresult.Host
+ c.Assert(imresult.IsMaster, Equals, false, Commentf("%s is not a slave", slave))
+
+ c.Assert(master, Not(Equals), slave)
+
+ // Kill the master.
+ s.Stop(master)
+
+ // Session must still be good, since we were talking to a slave.
+ err = session.Run("serverStatus", ssresult)
+ c.Assert(err, IsNil)
+
+ c.Assert(ssresult.Host, Equals, slave,
+ Commentf("Monotonic session moved from %s to %s", slave, ssresult.Host))
+
+ // If we try to insert something, it'll have to hold until the new
+ // master is available to move the connection, and work correctly.
+ coll := session.DB("mydb").C("mycoll")
+ err = coll.Insert(M{"a": 1})
+ c.Assert(err, IsNil)
+
+ // Must now be talking to the new master.
+ err = session.Run("serverStatus", ssresult)
+ c.Assert(err, IsNil)
+ err = session.Run("isMaster", imresult)
+ c.Assert(err, IsNil)
+ c.Assert(imresult.IsMaster, Equals, true, Commentf("%s is not the master", master))
+
+ // ... which is not the old one, since it's still dead.
+ c.Assert(ssresult.Host, Not(Equals), master)
+}
+
+func (s *S) TestModeEventualFallover(c *C) {
+ if *fast {
+ c.Skip("-fast")
+ }
+
+ session, err := mgo.Dial("localhost:40021")
+ c.Assert(err, IsNil)
+ defer session.Close()
+
+ result := &struct{ Host string }{}
+ err = session.Run("serverStatus", result)
+ c.Assert(err, IsNil)
+ master := result.Host
+
+ session.SetMode(mgo.Eventual, true)
+
+ // Should connect to the master when needed.
+ coll := session.DB("mydb").C("mycoll")
+ err = coll.Insert(M{"a": 1})
+ c.Assert(err, IsNil)
+
+ // Wait a bit for this to be synchronized to slaves.
+ time.Sleep(3 * time.Second)
+
+ // Kill the master.
+ s.Stop(master)
+
+ // Should still work, with the new master now.
+ coll = session.DB("mydb").C("mycoll")
+ err = coll.Insert(M{"a": 1})
+ c.Assert(err, IsNil)
+
+ err = session.Run("serverStatus", result)
+ c.Assert(err, IsNil)
+ c.Assert(result.Host, Not(Equals), master)
+}
+
+func (s *S) TestModeSecondaryJustPrimary(c *C) {
+ if *fast {
+ c.Skip("-fast")
+ }
+
+ session, err := mgo.Dial("localhost:40001")
+ c.Assert(err, IsNil)
+ defer session.Close()
+
+ session.SetMode(mgo.Secondary, true)
+
+ err = session.Ping()
+ c.Assert(err, ErrorMatches, "no reachable servers")
+}
+
+func (s *S) TestModeSecondaryPreferredJustPrimary(c *C) {
+ if *fast {
+ c.Skip("-fast")
+ }
+
+ session, err := mgo.Dial("localhost:40001")
+ c.Assert(err, IsNil)
+ defer session.Close()
+
+ session.SetMode(mgo.SecondaryPreferred, true)
+
+ result := &struct{ Host string }{}
+ err = session.Run("serverStatus", result)
+ c.Assert(err, IsNil)
+}
+
+func (s *S) TestModeSecondaryPreferredFallover(c *C) {
+ if *fast {
+ c.Skip("-fast")
+ }
+
+ session, err := mgo.Dial("localhost:40011")
+ c.Assert(err, IsNil)
+ defer session.Close()
+
+ // Ensure secondaries are available for being picked up.
+ for len(session.LiveServers()) != 3 {
+ c.Log("Waiting for cluster sync to finish...")
+ time.Sleep(5e8)
+ }
+
+ session.SetMode(mgo.SecondaryPreferred, true)
+
+ result := &struct{ Host string }{}
+ err = session.Run("serverStatus", result)
+ c.Assert(err, IsNil)
+ c.Assert(supvName(result.Host), Not(Equals), "rs1a")
+ secondary := result.Host
+
+ // Should connect to the primary when needed.
+ coll := session.DB("mydb").C("mycoll")
+ err = coll.Insert(M{"a": 1})
+ c.Assert(err, IsNil)
+
+ // Wait a bit for this to be synchronized to slaves.
+ time.Sleep(3 * time.Second)
+
+ // Kill the primary.
+ s.Stop("localhost:40011")
+
+ // It can still talk to the selected secondary.
+ err = session.Run("serverStatus", result)
+ c.Assert(err, IsNil)
+ c.Assert(result.Host, Equals, secondary)
+
+ // But cannot speak to the primary until reset.
+ coll = session.DB("mydb").C("mycoll")
+ err = coll.Insert(M{"a": 1})
+ c.Assert(err, Equals, io.EOF)
+
+ session.Refresh()
+
+ // Can still talk to a secondary.
+ err = session.Run("serverStatus", result)
+ c.Assert(err, IsNil)
+ c.Assert(supvName(result.Host), Not(Equals), "rs1a")
+
+ s.StartAll()
+
+ // Should now be able to talk to the primary again.
+ coll = session.DB("mydb").C("mycoll")
+ err = coll.Insert(M{"a": 1})
+ c.Assert(err, IsNil)
+}
+
+func (s *S) TestModePrimaryPreferredFallover(c *C) {
+ if *fast {
+ c.Skip("-fast")
+ }
+
+ session, err := mgo.Dial("localhost:40011")
+ c.Assert(err, IsNil)
+ defer session.Close()
+
+ session.SetMode(mgo.PrimaryPreferred, true)
+
+ result := &struct{ Host string }{}
+ err = session.Run("serverStatus", result)
+ c.Assert(err, IsNil)
+ c.Assert(supvName(result.Host), Equals, "rs1a")
+
+ // Kill the primary.
+ s.Stop("localhost:40011")
+
+ // Should now fail as there was a primary socket in use already.
+ err = session.Run("serverStatus", result)
+ c.Assert(err, Equals, io.EOF)
+
+ // Refresh so the reserved primary socket goes away.
+ session.Refresh()
+
+ // Should be able to talk to the secondary.
+ err = session.Run("serverStatus", result)
+ c.Assert(err, IsNil)
+
+ s.StartAll()
+
+ // Should wait for the new primary to become available.
+ coll := session.DB("mydb").C("mycoll")
+ err = coll.Insert(M{"a": 1})
+ c.Assert(err, IsNil)
+
+ // And should use the new primary in general, as it is preferred.
+ err = session.Run("serverStatus", result)
+ c.Assert(err, IsNil)
+ c.Assert(supvName(result.Host), Equals, "rs1a")
+}
+
+func (s *S) TestModePrimaryFallover(c *C) {
+ if *fast {
+ c.Skip("-fast")
+ }
+
+ session, err := mgo.Dial("localhost:40011")
+ c.Assert(err, IsNil)
+ defer session.Close()
+
+ session.SetSyncTimeout(3 * time.Second)
+
+ session.SetMode(mgo.Primary, true)
+
+ result := &struct{ Host string }{}
+ err = session.Run("serverStatus", result)
+ c.Assert(err, IsNil)
+ c.Assert(supvName(result.Host), Equals, "rs1a")
+
+ // Kill the primary.
+ s.Stop("localhost:40011")
+
+ session.Refresh()
+
+ err = session.Ping()
+ c.Assert(err, ErrorMatches, "no reachable servers")
+}
+
+func (s *S) TestModeSecondary(c *C) {
+ if *fast {
+ c.Skip("-fast")
+ }
+
+ session, err := mgo.Dial("localhost:40011")
+ c.Assert(err, IsNil)
+ defer session.Close()
+
+ session.SetMode(mgo.Secondary, true)
+
+ result := &struct{ Host string }{}
+ err = session.Run("serverStatus", result)
+ c.Assert(err, IsNil)
+ c.Assert(supvName(result.Host), Not(Equals), "rs1a")
+ secondary := result.Host
+
+ coll := session.DB("mydb").C("mycoll")
+ err = coll.Insert(M{"a": 1})
+ c.Assert(err, IsNil)
+
+ err = session.Run("serverStatus", result)
+ c.Assert(err, IsNil)
+ c.Assert(result.Host, Equals, secondary)
+}
+
+func (s *S) TestPreserveSocketCountOnSync(c *C) {
+ if *fast {
+ c.Skip("-fast")
+ }
+
+ session, err := mgo.Dial("localhost:40011")
+ c.Assert(err, IsNil)
+ defer session.Close()
+
+ stats := mgo.GetStats()
+ for stats.SocketsAlive != 3 {
+ c.Logf("Waiting for all connections to be established (sockets alive currently %d)...", stats.SocketsAlive)
+ stats = mgo.GetStats()
+ time.Sleep(5e8)
+ }
+
+ c.Assert(stats.SocketsAlive, Equals, 3)
+
+ // Kill the master (with rs1, 'a' is always the master).
+ s.Stop("localhost:40011")
+
+ // Wait for the logic to run for a bit and bring it back.
+ startedAll := make(chan bool)
+ go func() {
+ time.Sleep(5e9)
+ s.StartAll()
+ startedAll <- true
+ }()
+
+ // Do not allow the test to return before the goroutine above is done.
+ defer func() {
+ <-startedAll
+ }()
+
+ // Do an action to kick the resync logic in, and also to
+ // wait until the cluster recognizes the server is back.
+ result := struct{ Ok bool }{}
+ err = session.Run("getLastError", &result)
+ c.Assert(err, IsNil)
+ c.Assert(result.Ok, Equals, true)
+
+ for i := 0; i != 20; i++ {
+ stats = mgo.GetStats()
+ if stats.SocketsAlive == 3 {
+ break
+ }
+ c.Logf("Waiting for 3 sockets alive, have %d", stats.SocketsAlive)
+ time.Sleep(5e8)
+ }
+
+ // Ensure the number of sockets is preserved after syncing.
+ stats = mgo.GetStats()
+ c.Assert(stats.SocketsAlive, Equals, 3)
+ c.Assert(stats.SocketsInUse, Equals, 1)
+ c.Assert(stats.SocketRefs, Equals, 1)
+}
+
+// Connect to the master of a deployment with a single server,
+// run an insert, and then ensure the insert worked and that a
+// single connection was established.
+func (s *S) TestTopologySyncWithSingleMaster(c *C) {
+ // Use hostname here rather than IP, to make things trickier.
+ session, err := mgo.Dial("localhost:40001")
+ c.Assert(err, IsNil)
+ defer session.Close()
+
+ coll := session.DB("mydb").C("mycoll")
+ err = coll.Insert(M{"a": 1, "b": 2})
+ c.Assert(err, IsNil)
+
+ // One connection used for discovery. Master socket recycled for
+ // insert. Socket is reserved after insert.
+ stats := mgo.GetStats()
+ c.Assert(stats.MasterConns, Equals, 1)
+ c.Assert(stats.SlaveConns, Equals, 0)
+ c.Assert(stats.SocketsInUse, Equals, 1)
+
+ // Refresh session and socket must be released.
+ session.Refresh()
+ stats = mgo.GetStats()
+ c.Assert(stats.SocketsInUse, Equals, 0)
+}
+
+func (s *S) TestTopologySyncWithSlaveSeed(c *C) {
+ // That's supposed to be a slave. Must run discovery
+ // and find out master to insert successfully.
+ session, err := mgo.Dial("localhost:40012")
+ c.Assert(err, IsNil)
+ defer session.Close()
+
+ coll := session.DB("mydb").C("mycoll")
+ coll.Insert(M{"a": 1, "b": 2})
+
+ result := struct{ Ok bool }{}
+ err = session.Run("getLastError", &result)
+ c.Assert(err, IsNil)
+ c.Assert(result.Ok, Equals, true)
+
+ // One connection to each during discovery. Master
+ // socket recycled for insert.
+ stats := mgo.GetStats()
+ c.Assert(stats.MasterConns, Equals, 1)
+ c.Assert(stats.SlaveConns, Equals, 2)
+
+ // Only one socket reference alive, in the master socket owned
+ // by the above session.
+ c.Assert(stats.SocketsInUse, Equals, 1)
+
+ // Refresh it, and it must be gone.
+ session.Refresh()
+ stats = mgo.GetStats()
+ c.Assert(stats.SocketsInUse, Equals, 0)
+}
+
+func (s *S) TestSyncTimeout(c *C) {
+ if *fast {
+ c.Skip("-fast")
+ }
+
+ session, err := mgo.Dial("localhost:40001")
+ c.Assert(err, IsNil)
+ defer session.Close()
+
+ s.Stop("localhost:40001")
+
+ timeout := 3 * time.Second
+ session.SetSyncTimeout(timeout)
+ started := time.Now()
+
+ // Do something.
+ result := struct{ Ok bool }{}
+ err = session.Run("getLastError", &result)
+ c.Assert(err, ErrorMatches, "no reachable servers")
+ c.Assert(started.Before(time.Now().Add(-timeout)), Equals, true)
+ c.Assert(started.After(time.Now().Add(-timeout*2)), Equals, true)
+}
+
+func (s *S) TestDialWithTimeout(c *C) {
+ if *fast {
+ c.Skip("-fast")
+ }
+
+ timeout := 2 * time.Second
+ started := time.Now()
+
+ // 40009 isn't used by the test servers.
+ session, err := mgo.DialWithTimeout("localhost:40009", timeout)
+ if session != nil {
+ session.Close()
+ }
+ c.Assert(err, ErrorMatches, "no reachable servers")
+ c.Assert(session, IsNil)
+ c.Assert(started.Before(time.Now().Add(-timeout)), Equals, true)
+ c.Assert(started.After(time.Now().Add(-timeout*2)), Equals, true)
+}
+
+func (s *S) TestSocketTimeout(c *C) {
+ if *fast {
+ c.Skip("-fast")
+ }
+
+ session, err := mgo.Dial("localhost:40001")
+ c.Assert(err, IsNil)
+ defer session.Close()
+
+ s.Freeze("localhost:40001")
+
+ timeout := 3 * time.Second
+ session.SetSocketTimeout(timeout)
+ started := time.Now()
+
+ // Do something.
+ result := struct{ Ok bool }{}
+ err = session.Run("getLastError", &result)
+ c.Assert(err, ErrorMatches, ".*: i/o timeout")
+ c.Assert(started.Before(time.Now().Add(-timeout)), Equals, true)
+ c.Assert(started.After(time.Now().Add(-timeout*2)), Equals, true)
+}
+
+func (s *S) TestSocketTimeoutOnDial(c *C) {
+ if *fast {
+ c.Skip("-fast")
+ }
+
+ timeout := 1 * time.Second
+
+ defer mgo.HackSyncSocketTimeout(timeout)()
+
+ s.Freeze("localhost:40001")
+
+ started := time.Now()
+
+ session, err := mgo.DialWithTimeout("localhost:40001", timeout)
+ c.Assert(err, ErrorMatches, "no reachable servers")
+ c.Assert(session, IsNil)
+
+ c.Assert(started.Before(time.Now().Add(-timeout)), Equals, true)
+ c.Assert(started.After(time.Now().Add(-20*time.Second)), Equals, true)
+}
+
+func (s *S) TestSocketTimeoutOnInactiveSocket(c *C) {
+ if *fast {
+ c.Skip("-fast")
+ }
+
+ session, err := mgo.Dial("localhost:40001")
+ c.Assert(err, IsNil)
+ defer session.Close()
+
+ timeout := 2 * time.Second
+ session.SetSocketTimeout(timeout)
+
+ // Do something that relies on the timeout and works.
+ c.Assert(session.Ping(), IsNil)
+
+ // Freeze and wait for the timeout to go by.
+ s.Freeze("localhost:40001")
+ time.Sleep(timeout + 500*time.Millisecond)
+ s.Thaw("localhost:40001")
+
+ // Do something again. The timeout above should not have killed
+ // the socket as there was nothing to be done.
+ c.Assert(session.Ping(), IsNil)
+}
+
+func (s *S) TestDialWithReplicaSetName(c *C) {
+ seedLists := [][]string{
+ // rs1 primary and rs2 primary
+ []string{"localhost:40011", "localhost:40021"},
+ // rs1 primary and rs2 secondary
+ []string{"localhost:40011", "localhost:40022"},
+ // rs1 secondary and rs2 primary
+ []string{"localhost:40012", "localhost:40021"},
+ // rs1 secondary and rs2 secondary
+ []string{"localhost:40012", "localhost:40022"},
+ }
+
+ rs2Members := []string{":40021", ":40022", ":40023"}
+
+ verifySyncedServers := func(session *mgo.Session, numServers int) {
+ // wait for the server(s) to be synced
+ for len(session.LiveServers()) != numServers {
+ c.Log("Waiting for cluster sync to finish...")
+ time.Sleep(5e8)
+ }
+
+ // ensure none of the rs2 set members are communicated with
+ for _, addr := range session.LiveServers() {
+ for _, rs2Member := range rs2Members {
+ c.Assert(strings.HasSuffix(addr, rs2Member), Equals, false)
+ }
+ }
+ }
+
+ // only communication with rs1 members is expected
+ for _, seedList := range seedLists {
+ info := mgo.DialInfo{
+ Addrs: seedList,
+ Timeout: 5 * time.Second,
+ ReplicaSetName: "rs1",
+ }
+
+ session, err := mgo.DialWithInfo(&info)
+ c.Assert(err, IsNil)
+ verifySyncedServers(session, 3)
+ session.Close()
+
+ info.Direct = true
+ session, err = mgo.DialWithInfo(&info)
+ c.Assert(err, IsNil)
+ verifySyncedServers(session, 1)
+ session.Close()
+
+ connectionUrl := fmt.Sprintf("mongodb://%v/?replicaSet=rs1", strings.Join(seedList, ","))
+ session, err = mgo.Dial(connectionUrl)
+ c.Assert(err, IsNil)
+ verifySyncedServers(session, 3)
+ session.Close()
+
+ connectionUrl += "&connect=direct"
+ session, err = mgo.Dial(connectionUrl)
+ c.Assert(err, IsNil)
+ verifySyncedServers(session, 1)
+ session.Close()
+ }
+
+}
+
+func (s *S) TestDirect(c *C) {
+ session, err := mgo.Dial("localhost:40012?connect=direct")
+ c.Assert(err, IsNil)
+ defer session.Close()
+
+ // We know that server is a slave.
+ session.SetMode(mgo.Monotonic, true)
+
+ result := &struct{ Host string }{}
+ err = session.Run("serverStatus", result)
+ c.Assert(err, IsNil)
+ c.Assert(strings.HasSuffix(result.Host, ":40012"), Equals, true)
+
+ stats := mgo.GetStats()
+ c.Assert(stats.SocketsAlive, Equals, 1)
+ c.Assert(stats.SocketsInUse, Equals, 1)
+ c.Assert(stats.SocketRefs, Equals, 1)
+
+ // We've got no master, so it'll timeout.
+ session.SetSyncTimeout(5e8 * time.Nanosecond)
+
+ coll := session.DB("mydb").C("mycoll")
+ err = coll.Insert(M{"test": 1})
+ c.Assert(err, ErrorMatches, "no reachable servers")
+
+ // Writing to the local database is okay.
+ coll = session.DB("local").C("mycoll")
+ defer coll.RemoveAll(nil)
+ id := bson.NewObjectId()
+ err = coll.Insert(M{"_id": id})
+ c.Assert(err, IsNil)
+
+ // Data was stored in the right server.
+ n, err := coll.Find(M{"_id": id}).Count()
+ c.Assert(err, IsNil)
+ c.Assert(n, Equals, 1)
+
+ // Server hasn't changed.
+ result.Host = ""
+ err = session.Run("serverStatus", result)
+ c.Assert(err, IsNil)
+ c.Assert(strings.HasSuffix(result.Host, ":40012"), Equals, true)
+}
+
+func (s *S) TestDirectToUnknownStateMember(c *C) {
+ session, err := mgo.Dial("localhost:40041?connect=direct")
+ c.Assert(err, IsNil)
+ defer session.Close()
+
+ session.SetMode(mgo.Monotonic, true)
+
+ result := &struct{ Host string }{}
+ err = session.Run("serverStatus", result)
+ c.Assert(err, IsNil)
+ c.Assert(strings.HasSuffix(result.Host, ":40041"), Equals, true)
+
+ // We've got no master, so it'll timeout.
+ session.SetSyncTimeout(5e8 * time.Nanosecond)
+
+ coll := session.DB("mydb").C("mycoll")
+ err = coll.Insert(M{"test": 1})
+ c.Assert(err, ErrorMatches, "no reachable servers")
+
+ // Slave is still reachable.
+ result.Host = ""
+ err = session.Run("serverStatus", result)
+ c.Assert(err, IsNil)
+ c.Assert(strings.HasSuffix(result.Host, ":40041"), Equals, true)
+}
+
+func (s *S) TestFailFast(c *C) {
+ info := mgo.DialInfo{
+ Addrs: []string{"localhost:99999"},
+ Timeout: 5 * time.Second,
+ FailFast: true,
+ }
+
+ started := time.Now()
+
+ _, err := mgo.DialWithInfo(&info)
+ c.Assert(err, ErrorMatches, "no reachable servers")
+
+ c.Assert(started.After(time.Now().Add(-time.Second)), Equals, true)
+}
+
+func (s *S) countQueries(c *C, server string) (n int) {
+ defer func() { c.Logf("Queries for %q: %d", server, n) }()
+ session, err := mgo.Dial(server + "?connect=direct")
+ c.Assert(err, IsNil)
+ defer session.Close()
+ session.SetMode(mgo.Monotonic, true)
+ var result struct {
+ OpCounters struct {
+ Query int
+ }
+ Metrics struct {
+ Commands struct{ Find struct{ Total int } }
+ }
+ }
+ err = session.Run("serverStatus", &result)
+ c.Assert(err, IsNil)
+ if s.versionAtLeast(3, 2) {
+ return result.Metrics.Commands.Find.Total
+ }
+ return result.OpCounters.Query
+}
+
+func (s *S) countCommands(c *C, server, commandName string) (n int) {
+ defer func() { c.Logf("Queries for %q: %d", server, n) }()
+ session, err := mgo.Dial(server + "?connect=direct")
+ c.Assert(err, IsNil)
+ defer session.Close()
+ session.SetMode(mgo.Monotonic, true)
+ var result struct {
+ Metrics struct {
+ Commands map[string]struct{ Total int }
+ }
+ }
+ err = session.Run("serverStatus", &result)
+ c.Assert(err, IsNil)
+ return result.Metrics.Commands[commandName].Total
+}
+
+func (s *S) TestMonotonicSlaveOkFlagWithMongos(c *C) {
+ session, err := mgo.Dial("localhost:40021")
+ c.Assert(err, IsNil)
+ defer session.Close()
+
+ ssresult := &struct{ Host string }{}
+ imresult := &struct{ IsMaster bool }{}
+
+ // Figure the master while still using the strong session.
+ err = session.Run("serverStatus", ssresult)
+ c.Assert(err, IsNil)
+ err = session.Run("isMaster", imresult)
+ c.Assert(err, IsNil)
+ master := ssresult.Host
+ c.Assert(imresult.IsMaster, Equals, true, Commentf("%s is not the master", master))
+
+ // Ensure mongos is aware about the current topology.
+ s.Stop(":40201")
+ s.StartAll()
+
+ mongos, err := mgo.Dial("localhost:40202")
+ c.Assert(err, IsNil)
+ defer mongos.Close()
+
+ // Insert some data as otherwise 3.2+ doesn't seem to run the query at all.
+ err = mongos.DB("mydb").C("mycoll").Insert(bson.M{"n": 1})
+ c.Assert(err, IsNil)
+
+ // Wait until all servers see the data.
+ for _, addr := range []string{"localhost:40021", "localhost:40022", "localhost:40023"} {
+ session, err := mgo.Dial(addr + "?connect=direct")
+ c.Assert(err, IsNil)
+ defer session.Close()
+ session.SetMode(mgo.Monotonic, true)
+ for i := 300; i >= 0; i-- {
+ n, err := session.DB("mydb").C("mycoll").Find(nil).Count()
+ c.Assert(err, IsNil)
+ if n == 1 {
+ break
+ }
+ if i == 0 {
+ c.Fatalf("Inserted data never reached " + addr)
+ }
+ time.Sleep(100 * time.Millisecond)
+ }
+ }
+
+ // Collect op counters for everyone.
+ q21a := s.countQueries(c, "localhost:40021")
+ q22a := s.countQueries(c, "localhost:40022")
+ q23a := s.countQueries(c, "localhost:40023")
+
+ // Do a SlaveOk query through MongoS
+
+ mongos.SetMode(mgo.Monotonic, true)
+
+ coll := mongos.DB("mydb").C("mycoll")
+ var result struct{ N int }
+ for i := 0; i != 5; i++ {
+ err = coll.Find(nil).One(&result)
+ c.Assert(err, IsNil)
+ c.Assert(result.N, Equals, 1)
+ }
+
+ // Collect op counters for everyone again.
+ q21b := s.countQueries(c, "localhost:40021")
+ q22b := s.countQueries(c, "localhost:40022")
+ q23b := s.countQueries(c, "localhost:40023")
+
+ var masterDelta, slaveDelta int
+ switch hostPort(master) {
+ case "40021":
+ masterDelta = q21b - q21a
+ slaveDelta = (q22b - q22a) + (q23b - q23a)
+ case "40022":
+ masterDelta = q22b - q22a
+ slaveDelta = (q21b - q21a) + (q23b - q23a)
+ case "40023":
+ masterDelta = q23b - q23a
+ slaveDelta = (q21b - q21a) + (q22b - q22a)
+ default:
+ c.Fatal("Uh?")
+ }
+
+ c.Check(masterDelta, Equals, 0) // Just the counting itself.
+ c.Check(slaveDelta, Equals, 5) // The counting for both, plus 5 queries above.
+}
+
+func (s *S) TestSecondaryModeWithMongos(c *C) {
+ session, err := mgo.Dial("localhost:40021")
+ c.Assert(err, IsNil)
+ defer session.Close()
+
+ ssresult := &struct{ Host string }{}
+ imresult := &struct{ IsMaster bool }{}
+
+ // Figure the master while still using the strong session.
+ err = session.Run("serverStatus", ssresult)
+ c.Assert(err, IsNil)
+ err = session.Run("isMaster", imresult)
+ c.Assert(err, IsNil)
+ master := ssresult.Host
+ c.Assert(imresult.IsMaster, Equals, true, Commentf("%s is not the master", master))
+
+ // Ensure mongos is aware about the current topology.
+ s.Stop(":40201")
+ s.StartAll()
+
+ mongos, err := mgo.Dial("localhost:40202")
+ c.Assert(err, IsNil)
+ defer mongos.Close()
+
+ mongos.SetSyncTimeout(5 * time.Second)
+
+ // Insert some data as otherwise 3.2+ doesn't seem to run the query at all.
+ err = mongos.DB("mydb").C("mycoll").Insert(bson.M{"n": 1})
+ c.Assert(err, IsNil)
+
+ // Wait until all servers see the data.
+ for _, addr := range []string{"localhost:40021", "localhost:40022", "localhost:40023"} {
+ session, err := mgo.Dial(addr + "?connect=direct")
+ c.Assert(err, IsNil)
+ defer session.Close()
+ session.SetMode(mgo.Monotonic, true)
+ for i := 300; i >= 0; i-- {
+ n, err := session.DB("mydb").C("mycoll").Find(nil).Count()
+ c.Assert(err, IsNil)
+ if n == 1 {
+ break
+ }
+ if i == 0 {
+ c.Fatalf("Inserted data never reached " + addr)
+ }
+ time.Sleep(100 * time.Millisecond)
+ }
+ }
+
+ // Collect op counters for everyone.
+ q21a := s.countQueries(c, "localhost:40021")
+ q22a := s.countQueries(c, "localhost:40022")
+ q23a := s.countQueries(c, "localhost:40023")
+
+ // Do a Secondary query through MongoS
+
+ mongos.SetMode(mgo.Secondary, true)
+
+ coll := mongos.DB("mydb").C("mycoll")
+ var result struct{ N int }
+ for i := 0; i != 5; i++ {
+ err = coll.Find(nil).One(&result)
+ c.Assert(err, IsNil)
+ c.Assert(result.N, Equals, 1)
+ }
+
+ // Collect op counters for everyone again.
+ q21b := s.countQueries(c, "localhost:40021")
+ q22b := s.countQueries(c, "localhost:40022")
+ q23b := s.countQueries(c, "localhost:40023")
+
+ var masterDelta, slaveDelta int
+ switch hostPort(master) {
+ case "40021":
+ masterDelta = q21b - q21a
+ slaveDelta = (q22b - q22a) + (q23b - q23a)
+ case "40022":
+ masterDelta = q22b - q22a
+ slaveDelta = (q21b - q21a) + (q23b - q23a)
+ case "40023":
+ masterDelta = q23b - q23a
+ slaveDelta = (q21b - q21a) + (q22b - q22a)
+ default:
+ c.Fatal("Uh?")
+ }
+
+ c.Check(masterDelta, Equals, 0) // Just the counting itself.
+ c.Check(slaveDelta, Equals, 5) // The counting for both, plus 5 queries above.
+}
+
+func (s *S) TestSecondaryModeWithMongosInsert(c *C) {
+ if *fast {
+ c.Skip("-fast")
+ }
+
+ session, err := mgo.Dial("localhost:40202")
+ c.Assert(err, IsNil)
+ defer session.Close()
+
+ session.SetMode(mgo.Secondary, true)
+ session.SetSyncTimeout(4 * time.Second)
+
+ coll := session.DB("mydb").C("mycoll")
+ err = coll.Insert(M{"a": 1})
+ c.Assert(err, IsNil)
+
+ var result struct{ A int }
+ coll.Find(nil).One(&result)
+ c.Assert(result.A, Equals, 1)
+}
+
+
+func (s *S) TestRemovalOfClusterMember(c *C) {
+ if *fast {
+ c.Skip("-fast")
+ }
+
+ master, err := mgo.Dial("localhost:40021")
+ c.Assert(err, IsNil)
+ defer master.Close()
+
+ // Wait for cluster to fully sync up.
+ for i := 0; i < 10; i++ {
+ if len(master.LiveServers()) == 3 {
+ break
+ }
+ time.Sleep(5e8)
+ }
+ if len(master.LiveServers()) != 3 {
+ c.Fatalf("Test started with bad cluster state: %v", master.LiveServers())
+ }
+
+ result := &struct {
+ IsMaster bool
+ Me string
+ }{}
+ slave := master.Copy()
+ slave.SetMode(mgo.Monotonic, true) // Monotonic can hold a non-master socket persistently.
+ err = slave.Run("isMaster", result)
+ c.Assert(err, IsNil)
+ c.Assert(result.IsMaster, Equals, false)
+ slaveAddr := result.Me
+
+ defer func() {
+ config := map[string]string{
+ "40021": `{_id: 1, host: "127.0.0.1:40021", priority: 1, tags: {rs2: "a"}}`,
+ "40022": `{_id: 2, host: "127.0.0.1:40022", priority: 0, tags: {rs2: "b"}}`,
+ "40023": `{_id: 3, host: "127.0.0.1:40023", priority: 0, tags: {rs2: "c"}}`,
+ }
+ master.Refresh()
+ master.Run(bson.D{{"$eval", `rs.add(` + config[hostPort(slaveAddr)] + `)`}}, nil)
+ master.Close()
+ slave.Close()
+
+ // Ensure suite syncs up with the changes before next test.
+ s.Stop(":40201")
+ s.StartAll()
+ time.Sleep(8 * time.Second)
+ // TODO Find a better way to find out when mongos is fully aware that all
+ // servers are up. Without that follow up tests that depend on mongos will
+ // break due to their expectation of things being in a working state.
+ }()
+
+ c.Logf("========== Removing slave: %s ==========", slaveAddr)
+
+ master.Run(bson.D{{"$eval", `rs.remove("` + slaveAddr + `")`}}, nil)
+
+ master.Refresh()
+
+ // Give the cluster a moment to catch up by doing a roundtrip to the master.
+ err = master.Ping()
+ c.Assert(err, IsNil)
+
+ time.Sleep(3e9)
+
+ // This must fail since the slave has been taken off the cluster.
+ err = slave.Ping()
+ c.Assert(err, NotNil)
+
+ for i := 0; i < 15; i++ {
+ if len(master.LiveServers()) == 2 {
+ break
+ }
+ time.Sleep(time.Second)
+ }
+ live := master.LiveServers()
+ if len(live) != 2 {
+ c.Errorf("Removed server still considered live: %#s", live)
+ }
+
+ c.Log("========== Test succeeded. ==========")
+}
+
+func (s *S) TestPoolLimitSimple(c *C) {
+ for test := 0; test < 2; test++ {
+ var session *mgo.Session
+ var err error
+ if test == 0 {
+ session, err = mgo.Dial("localhost:40001")
+ c.Assert(err, IsNil)
+ session.SetPoolLimit(1)
+ } else {
+ session, err = mgo.Dial("localhost:40001?maxPoolSize=1")
+ c.Assert(err, IsNil)
+ }
+ defer session.Close()
+
+ // Put one socket in use.
+ c.Assert(session.Ping(), IsNil)
+
+ done := make(chan time.Duration)
+
+ // Now block trying to get another one due to the pool limit.
+ go func() {
+ copy := session.Copy()
+ defer copy.Close()
+ started := time.Now()
+ c.Check(copy.Ping(), IsNil)
+ done <- time.Now().Sub(started)
+ }()
+
+ time.Sleep(300 * time.Millisecond)
+
+ // Put the one socket back in the pool, freeing it for the copy.
+ session.Refresh()
+ delay := <-done
+ c.Assert(delay > 300*time.Millisecond, Equals, true, Commentf("Delay: %s", delay))
+ }
+}
+
+func (s *S) TestPoolLimitMany(c *C) {
+ if *fast {
+ c.Skip("-fast")
+ }
+
+ session, err := mgo.Dial("localhost:40011")
+ c.Assert(err, IsNil)
+ defer session.Close()
+
+ stats := mgo.GetStats()
+ for stats.SocketsAlive != 3 {
+ c.Logf("Waiting for all connections to be established (sockets alive currently %d)...", stats.SocketsAlive)
+ stats = mgo.GetStats()
+ time.Sleep(5e8)
+ }
+
+ const poolLimit = 64
+ session.SetPoolLimit(poolLimit)
+
+ // Consume the whole limit for the master.
+ var master []*mgo.Session
+ for i := 0; i < poolLimit; i++ {
+ s := session.Copy()
+ defer s.Close()
+ c.Assert(s.Ping(), IsNil)
+ master = append(master, s)
+ }
+
+ before := time.Now()
+ go func() {
+ time.Sleep(3e9)
+ master[0].Refresh()
+ }()
+
+ // Then, a single ping must block, since it would need another
+ // connection to the master, over the limit. Once the goroutine
+ // above releases its socket, it should move on.
+ session.Ping()
+ delay := time.Now().Sub(before)
+ c.Assert(delay > 3e9, Equals, true)
+ c.Assert(delay < 6e9, Equals, true)
+}
+
+func (s *S) TestSetModeEventualIterBug(c *C) {
+ session1, err := mgo.Dial("localhost:40011")
+ c.Assert(err, IsNil)
+ defer session1.Close()
+
+ session1.SetMode(mgo.Eventual, false)
+
+ coll1 := session1.DB("mydb").C("mycoll")
+
+ const N = 100
+ for i := 0; i < N; i++ {
+ err = coll1.Insert(M{"_id": i})
+ c.Assert(err, IsNil)
+ }
+
+ c.Logf("Waiting until secondary syncs")
+ for {
+ n, err := coll1.Count()
+ c.Assert(err, IsNil)
+ if n == N {
+ c.Logf("Found all")
+ break
+ }
+ }
+
+ session2, err := mgo.Dial("localhost:40011")
+ c.Assert(err, IsNil)
+ defer session2.Close()
+
+ session2.SetMode(mgo.Eventual, false)
+
+ coll2 := session2.DB("mydb").C("mycoll")
+
+ i := 0
+ iter := coll2.Find(nil).Batch(10).Iter()
+ var result struct{}
+ for iter.Next(&result) {
+ i++
+ }
+ c.Assert(iter.Close(), Equals, nil)
+ c.Assert(i, Equals, N)
+}
+
+func (s *S) TestCustomDialOld(c *C) {
+ dials := make(chan bool, 16)
+ dial := func(addr net.Addr) (net.Conn, error) {
+ tcpaddr, ok := addr.(*net.TCPAddr)
+ if !ok {
+ return nil, fmt.Errorf("unexpected address type: %T", addr)
+ }
+ dials <- true
+ return net.DialTCP("tcp", nil, tcpaddr)
+ }
+ info := mgo.DialInfo{
+ Addrs: []string{"localhost:40012"},
+ Dial: dial,
+ }
+
+ // Use hostname here rather than IP, to make things trickier.
+ session, err := mgo.DialWithInfo(&info)
+ c.Assert(err, IsNil)
+ defer session.Close()
+
+ const N = 3
+ for i := 0; i < N; i++ {
+ select {
+ case <-dials:
+ case <-time.After(5 * time.Second):
+ c.Fatalf("expected %d dials, got %d", N, i)
+ }
+ }
+ select {
+ case <-dials:
+ c.Fatalf("got more dials than expected")
+ case <-time.After(100 * time.Millisecond):
+ }
+}
+
+func (s *S) TestCustomDialNew(c *C) {
+ dials := make(chan bool, 16)
+ dial := func(addr *mgo.ServerAddr) (net.Conn, error) {
+ dials <- true
+ if addr.TCPAddr().Port == 40012 {
+ c.Check(addr.String(), Equals, "localhost:40012")
+ }
+ return net.DialTCP("tcp", nil, addr.TCPAddr())
+ }
+ info := mgo.DialInfo{
+ Addrs: []string{"localhost:40012"},
+ DialServer: dial,
+ }
+
+ // Use hostname here rather than IP, to make things trickier.
+ session, err := mgo.DialWithInfo(&info)
+ c.Assert(err, IsNil)
+ defer session.Close()
+
+ const N = 3
+ for i := 0; i < N; i++ {
+ select {
+ case <-dials:
+ case <-time.After(5 * time.Second):
+ c.Fatalf("expected %d dials, got %d", N, i)
+ }
+ }
+ select {
+ case <-dials:
+ c.Fatalf("got more dials than expected")
+ case <-time.After(100 * time.Millisecond):
+ }
+}
+
+func (s *S) TestPrimaryShutdownOnAuthShard(c *C) {
+ if *fast {
+ c.Skip("-fast")
+ }
+
+ // Dial the shard.
+ session, err := mgo.Dial("localhost:40203")
+ c.Assert(err, IsNil)
+ defer session.Close()
+
+ // Login and insert something to make it more realistic.
+ session.DB("admin").Login("root", "rapadura")
+ coll := session.DB("mydb").C("mycoll")
+ err = coll.Insert(bson.M{"n": 1})
+ c.Assert(err, IsNil)
+
+ // Dial the replica set to figure the master out.
+ rs, err := mgo.Dial("root:rapadura@localhost:40031")
+ c.Assert(err, IsNil)
+ defer rs.Close()
+
+ // With strong consistency, this will open a socket to the master.
+ result := &struct{ Host string }{}
+ err = rs.Run("serverStatus", result)
+ c.Assert(err, IsNil)
+
+ // Kill the master.
+ host := result.Host
+ s.Stop(host)
+
+ // This must fail, since the connection was broken.
+ err = rs.Run("serverStatus", result)
+ c.Assert(err, Equals, io.EOF)
+
+ // This won't work because the master just died.
+ err = coll.Insert(bson.M{"n": 2})
+ c.Assert(err, NotNil)
+
+ // Refresh session and wait for re-election.
+ session.Refresh()
+ for i := 0; i < 60; i++ {
+ err = coll.Insert(bson.M{"n": 3})
+ if err == nil {
+ break
+ }
+ c.Logf("Waiting for replica set to elect a new master. Last error: %v", err)
+ time.Sleep(500 * time.Millisecond)
+ }
+ c.Assert(err, IsNil)
+
+ count, err := coll.Count()
+ c.Assert(count > 1, Equals, true)
+}
+
+func (s *S) TestNearestSecondary(c *C) {
+ defer mgo.HackPingDelay(300 * time.Millisecond)()
+
+ rs1a := "127.0.0.1:40011"
+ rs1b := "127.0.0.1:40012"
+ rs1c := "127.0.0.1:40013"
+ s.Freeze(rs1b)
+
+ session, err := mgo.Dial(rs1a)
+ c.Assert(err, IsNil)
+ defer session.Close()
+
+ // Wait for the sync up to run through the first couple of servers.
+ for len(session.LiveServers()) != 2 {
+ c.Log("Waiting for two servers to be alive...")
+ time.Sleep(100 * time.Millisecond)
+ }
+
+ // Extra delay to ensure the third server gets penalized.
+ time.Sleep(500 * time.Millisecond)
+
+ // Release third server.
+ s.Thaw(rs1b)
+
+ // Wait for it to come up.
+ for len(session.LiveServers()) != 3 {
+ c.Log("Waiting for all servers to be alive...")
+ time.Sleep(100 * time.Millisecond)
+ }
+
+ session.SetMode(mgo.Monotonic, true)
+ var result struct{ Host string }
+
+ // See which slave picks the line, several times to avoid chance.
+ for i := 0; i < 10; i++ {
+ session.Refresh()
+ err = session.Run("serverStatus", &result)
+ c.Assert(err, IsNil)
+ c.Assert(hostPort(result.Host), Equals, hostPort(rs1c))
+ }
+
+ if *fast {
+ // Don't hold back for several seconds.
+ return
+ }
+
+ // Now hold the other server for long enough to penalize it.
+ s.Freeze(rs1c)
+ time.Sleep(5 * time.Second)
+ s.Thaw(rs1c)
+
+ // Wait for the ping to be processed.
+ time.Sleep(500 * time.Millisecond)
+
+ // Repeating the test should now pick the former server consistently.
+ for i := 0; i < 10; i++ {
+ session.Refresh()
+ err = session.Run("serverStatus", &result)
+ c.Assert(err, IsNil)
+ c.Assert(hostPort(result.Host), Equals, hostPort(rs1b))
+ }
+}
+
+func (s *S) TestNearestServer(c *C) {
+ defer mgo.HackPingDelay(300 * time.Millisecond)()
+
+ rs1a := "127.0.0.1:40011"
+ rs1b := "127.0.0.1:40012"
+ rs1c := "127.0.0.1:40013"
+
+ session, err := mgo.Dial(rs1a)
+ c.Assert(err, IsNil)
+ defer session.Close()
+
+ s.Freeze(rs1a)
+ s.Freeze(rs1b)
+
+ // Extra delay to ensure the first two servers get penalized.
+ time.Sleep(500 * time.Millisecond)
+
+ // Release them.
+ s.Thaw(rs1a)
+ s.Thaw(rs1b)
+
+ // Wait for everyone to come up.
+ for len(session.LiveServers()) != 3 {
+ c.Log("Waiting for all servers to be alive...")
+ time.Sleep(100 * time.Millisecond)
+ }
+
+ session.SetMode(mgo.Nearest, true)
+ var result struct{ Host string }
+
+ // See which server picks the line, several times to avoid chance.
+ for i := 0; i < 10; i++ {
+ session.Refresh()
+ err = session.Run("serverStatus", &result)
+ c.Assert(err, IsNil)
+ c.Assert(hostPort(result.Host), Equals, hostPort(rs1c))
+ }
+
+ if *fast {
+ // Don't hold back for several seconds.
+ return
+ }
+
+ // Now hold the two secondaries for long enough to penalize them.
+ s.Freeze(rs1b)
+ s.Freeze(rs1c)
+ time.Sleep(5 * time.Second)
+ s.Thaw(rs1b)
+ s.Thaw(rs1c)
+
+ // Wait for the ping to be processed.
+ time.Sleep(500 * time.Millisecond)
+
+ // Repeating the test should now pick the primary server consistently.
+ for i := 0; i < 10; i++ {
+ session.Refresh()
+ err = session.Run("serverStatus", &result)
+ c.Assert(err, IsNil)
+ c.Assert(hostPort(result.Host), Equals, hostPort(rs1a))
+ }
+}
+
+func (s *S) TestConnectCloseConcurrency(c *C) {
+ restore := mgo.HackPingDelay(500 * time.Millisecond)
+ defer restore()
+ var wg sync.WaitGroup
+ const n = 500
+ wg.Add(n)
+ for i := 0; i < n; i++ {
+ go func() {
+ defer wg.Done()
+ session, err := mgo.Dial("localhost:40001")
+ if err != nil {
+ c.Fatal(err)
+ }
+ time.Sleep(1)
+ session.Close()
+ }()
+ }
+ wg.Wait()
+}
+
+func (s *S) TestSelectServers(c *C) {
+ if !s.versionAtLeast(2, 2) {
+ c.Skip("read preferences introduced in 2.2")
+ }
+
+ session, err := mgo.Dial("localhost:40011")
+ c.Assert(err, IsNil)
+ defer session.Close()
+
+ session.SetMode(mgo.Eventual, true)
+
+ var result struct{ Host string }
+
+ session.Refresh()
+ session.SelectServers(bson.D{{"rs1", "b"}})
+ err = session.Run("serverStatus", &result)
+ c.Assert(err, IsNil)
+ c.Assert(hostPort(result.Host), Equals, "40012")
+
+ session.Refresh()
+ session.SelectServers(bson.D{{"rs1", "c"}})
+ err = session.Run("serverStatus", &result)
+ c.Assert(err, IsNil)
+ c.Assert(hostPort(result.Host), Equals, "40013")
+}
+
+func (s *S) TestSelectServersWithMongos(c *C) {
+ if !s.versionAtLeast(2, 2) {
+ c.Skip("read preferences introduced in 2.2")
+ }
+
+ session, err := mgo.Dial("localhost:40021")
+ c.Assert(err, IsNil)
+ defer session.Close()
+
+ ssresult := &struct{ Host string }{}
+ imresult := &struct{ IsMaster bool }{}
+
+ // Figure the master while still using the strong session.
+ err = session.Run("serverStatus", ssresult)
+ c.Assert(err, IsNil)
+ err = session.Run("isMaster", imresult)
+ c.Assert(err, IsNil)
+ master := ssresult.Host
+ c.Assert(imresult.IsMaster, Equals, true, Commentf("%s is not the master", master))
+
+ var slave1, slave2 string
+ switch hostPort(master) {
+ case "40021":
+ slave1, slave2 = "b", "c"
+ case "40022":
+ slave1, slave2 = "a", "c"
+ case "40023":
+ slave1, slave2 = "a", "b"
+ }
+
+ // Collect op counters for everyone.
+ q21a := s.countQueries(c, "localhost:40021")
+ q22a := s.countQueries(c, "localhost:40022")
+ q23a := s.countQueries(c, "localhost:40023")
+
+ // Do a SlaveOk query through MongoS
+ mongos, err := mgo.Dial("localhost:40202")
+ c.Assert(err, IsNil)
+ defer mongos.Close()
+
+ mongos.SetMode(mgo.Monotonic, true)
+
+ mongos.Refresh()
+ mongos.SelectServers(bson.D{{"rs2", slave1}})
+ coll := mongos.DB("mydb").C("mycoll")
+ result := &struct{}{}
+ for i := 0; i != 5; i++ {
+ err := coll.Find(nil).One(result)
+ c.Assert(err, Equals, mgo.ErrNotFound)
+ }
+
+ mongos.Refresh()
+ mongos.SelectServers(bson.D{{"rs2", slave2}})
+ coll = mongos.DB("mydb").C("mycoll")
+ for i := 0; i != 7; i++ {
+ err := coll.Find(nil).One(result)
+ c.Assert(err, Equals, mgo.ErrNotFound)
+ }
+
+ // Collect op counters for everyone again.
+ q21b := s.countQueries(c, "localhost:40021")
+ q22b := s.countQueries(c, "localhost:40022")
+ q23b := s.countQueries(c, "localhost:40023")
+
+ switch hostPort(master) {
+ case "40021":
+ c.Check(q21b-q21a, Equals, 0)
+ c.Check(q22b-q22a, Equals, 5)
+ c.Check(q23b-q23a, Equals, 7)
+ case "40022":
+ c.Check(q21b-q21a, Equals, 5)
+ c.Check(q22b-q22a, Equals, 0)
+ c.Check(q23b-q23a, Equals, 7)
+ case "40023":
+ c.Check(q21b-q21a, Equals, 5)
+ c.Check(q22b-q22a, Equals, 7)
+ c.Check(q23b-q23a, Equals, 0)
+ default:
+ c.Fatal("Uh?")
+ }
+}
+
+func (s *S) TestDoNotFallbackToMonotonic(c *C) {
+ // There was a bug at some point that some functions were
+ // falling back to Monotonic mode. This test ensures all listIndexes
+ // commands go to the primary, as should happen since the session is
+ // in Strong mode.
+ if !s.versionAtLeast(3, 0) {
+ c.Skip("command-counting logic depends on 3.0+")
+ }
+
+ session, err := mgo.Dial("localhost:40012")
+ c.Assert(err, IsNil)
+ defer session.Close()
+
+ for i := 0; i < 15; i++ {
+ q11a := s.countCommands(c, "localhost:40011", "listIndexes")
+ q12a := s.countCommands(c, "localhost:40012", "listIndexes")
+ q13a := s.countCommands(c, "localhost:40013", "listIndexes")
+
+ _, err := session.DB("local").C("system.indexes").Indexes()
+ c.Assert(err, IsNil)
+
+ q11b := s.countCommands(c, "localhost:40011", "listIndexes")
+ q12b := s.countCommands(c, "localhost:40012", "listIndexes")
+ q13b := s.countCommands(c, "localhost:40013", "listIndexes")
+
+ c.Assert(q11b, Equals, q11a+1)
+ c.Assert(q12b, Equals, q12a)
+ c.Assert(q13b, Equals, q13a)
+ }
+}
diff --git a/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/dbtest/dbserver.go b/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/dbtest/dbserver.go
new file mode 100644
index 00000000000..16b7b58417a
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/dbtest/dbserver.go
@@ -0,0 +1,196 @@
+package dbtest
+
+import (
+ "bytes"
+ "fmt"
+ "net"
+ "os"
+ "os/exec"
+ "strconv"
+ "time"
+
+ "gopkg.in/mgo.v2"
+ "gopkg.in/tomb.v2"
+)
+
+// DBServer controls a MongoDB server process to be used within test suites.
+//
+// The test server is started when Session is called the first time and should
+// remain running for the duration of all tests, with the Wipe method being
+// called between tests (before each of them) to clear stored data. After all tests
+// are done, the Stop method should be called to stop the test server.
+//
+// Before the DBServer is used the SetPath method must be called to define
+// the location for the database files to be stored.
+type DBServer struct {
+ session *mgo.Session
+ output bytes.Buffer
+ server *exec.Cmd
+ dbpath string
+ host string
+ tomb tomb.Tomb
+}
+
+// SetPath defines the path to the directory where the database files will be
+// stored if it is started. The directory path itself is not created or removed
+// by the test helper.
+func (dbs *DBServer) SetPath(dbpath string) {
+ dbs.dbpath = dbpath
+}
+
+func (dbs *DBServer) start() {
+ if dbs.server != nil {
+ panic("DBServer already started")
+ }
+ if dbs.dbpath == "" {
+ panic("DBServer.SetPath must be called before using the server")
+ }
+ mgo.SetStats(true)
+ l, err := net.Listen("tcp", "127.0.0.1:0")
+ if err != nil {
+ panic("unable to listen on a local address: " + err.Error())
+ }
+ addr := l.Addr().(*net.TCPAddr)
+ l.Close()
+ dbs.host = addr.String()
+
+ args := []string{
+ "--dbpath", dbs.dbpath,
+ "--bind_ip", "127.0.0.1",
+ "--port", strconv.Itoa(addr.Port),
+ "--nssize", "1",
+ "--noprealloc",
+ "--smallfiles",
+ "--nojournal",
+ }
+ dbs.tomb = tomb.Tomb{}
+ dbs.server = exec.Command("mongod", args...)
+ dbs.server.Stdout = &dbs.output
+ dbs.server.Stderr = &dbs.output
+ err = dbs.server.Start()
+ if err != nil {
+ panic(err)
+ }
+ dbs.tomb.Go(dbs.monitor)
+ dbs.Wipe()
+}
+
+func (dbs *DBServer) monitor() error {
+ dbs.server.Process.Wait()
+ if dbs.tomb.Alive() {
+ // Present some debugging information.
+ fmt.Fprintf(os.Stderr, "---- mongod process died unexpectedly:\n")
+ fmt.Fprintf(os.Stderr, "%s", dbs.output.Bytes())
+ fmt.Fprintf(os.Stderr, "---- mongod processes running right now:\n")
+ cmd := exec.Command("/bin/sh", "-c", "ps auxw | grep mongod")
+ cmd.Stdout = os.Stderr
+ cmd.Stderr = os.Stderr
+ cmd.Run()
+ fmt.Fprintf(os.Stderr, "----------------------------------------\n")
+
+ panic("mongod process died unexpectedly")
+ }
+ return nil
+}
+
+// Stop stops the test server process, if it is running.
+//
+// It's okay to call Stop multiple times. After the test server is
+// stopped it cannot be restarted.
+//
+// All database sessions must be closed before or while the Stop method
+// is running. Otherwise Stop will panic after a timeout informing that
+// there is a session leak.
+func (dbs *DBServer) Stop() {
+ if dbs.session != nil {
+ dbs.checkSessions()
+ if dbs.session != nil {
+ dbs.session.Close()
+ dbs.session = nil
+ }
+ }
+ if dbs.server != nil {
+ dbs.tomb.Kill(nil)
+ dbs.server.Process.Signal(os.Interrupt)
+ select {
+ case <-dbs.tomb.Dead():
+ case <-time.After(5 * time.Second):
+ panic("timeout waiting for mongod process to die")
+ }
+ dbs.server = nil
+ }
+}
+
+// Session returns a new session to the server. The returned session
+// must be closed after the test is done with it.
+//
+// The first Session obtained from a DBServer will start it.
+func (dbs *DBServer) Session() *mgo.Session {
+ if dbs.server == nil {
+ dbs.start()
+ }
+ if dbs.session == nil {
+ mgo.ResetStats()
+ var err error
+ dbs.session, err = mgo.Dial(dbs.host + "/test")
+ if err != nil {
+ panic(err)
+ }
+ }
+ return dbs.session.Copy()
+}
+
+// checkSessions ensures all mgo sessions opened were properly closed.
+// For slightly faster tests, it may be disabled setting the
+// environmnet variable CHECK_SESSIONS to 0.
+func (dbs *DBServer) checkSessions() {
+ if check := os.Getenv("CHECK_SESSIONS"); check == "0" || dbs.server == nil || dbs.session == nil {
+ return
+ }
+ dbs.session.Close()
+ dbs.session = nil
+ for i := 0; i < 100; i++ {
+ stats := mgo.GetStats()
+ if stats.SocketsInUse == 0 && stats.SocketsAlive == 0 {
+ return
+ }
+ time.Sleep(100 * time.Millisecond)
+ }
+ panic("There are mgo sessions still alive.")
+}
+
+// Wipe drops all created databases and their data.
+//
+// The MongoDB server remains running if it was prevoiusly running,
+// or stopped if it was previously stopped.
+//
+// All database sessions must be closed before or while the Wipe method
+// is running. Otherwise Wipe will panic after a timeout informing that
+// there is a session leak.
+func (dbs *DBServer) Wipe() {
+ if dbs.server == nil || dbs.session == nil {
+ return
+ }
+ dbs.checkSessions()
+ sessionUnset := dbs.session == nil
+ session := dbs.Session()
+ defer session.Close()
+ if sessionUnset {
+ dbs.session.Close()
+ dbs.session = nil
+ }
+ names, err := session.DatabaseNames()
+ if err != nil {
+ panic(err)
+ }
+ for _, name := range names {
+ switch name {
+ case "admin", "local", "config":
+ default:
+ err = session.DB(name).DropDatabase()
+ if err != nil {
+ panic(err)
+ }
+ }
+ }
+}
diff --git a/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/dbtest/dbserver_test.go b/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/dbtest/dbserver_test.go
new file mode 100644
index 00000000000..79812fde34f
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/dbtest/dbserver_test.go
@@ -0,0 +1,108 @@
+package dbtest_test
+
+import (
+ "os"
+ "testing"
+ "time"
+
+ . "gopkg.in/check.v1"
+
+ "gopkg.in/mgo.v2"
+ "gopkg.in/mgo.v2/dbtest"
+)
+
+type M map[string]interface{}
+
+func TestAll(t *testing.T) {
+ TestingT(t)
+}
+
+type S struct {
+ oldCheckSessions string
+}
+
+var _ = Suite(&S{})
+
+func (s *S) SetUpTest(c *C) {
+ s.oldCheckSessions = os.Getenv("CHECK_SESSIONS")
+ os.Setenv("CHECK_SESSIONS", "")
+}
+
+func (s *S) TearDownTest(c *C) {
+ os.Setenv("CHECK_SESSIONS", s.oldCheckSessions)
+}
+
+func (s *S) TestWipeData(c *C) {
+ var server dbtest.DBServer
+ server.SetPath(c.MkDir())
+ defer server.Stop()
+
+ session := server.Session()
+ err := session.DB("mydb").C("mycoll").Insert(M{"a": 1})
+ session.Close()
+ c.Assert(err, IsNil)
+
+ server.Wipe()
+
+ session = server.Session()
+ names, err := session.DatabaseNames()
+ session.Close()
+ c.Assert(err, IsNil)
+ for _, name := range names {
+ if name != "local" && name != "admin" {
+ c.Fatalf("Wipe should have removed this database: %s", name)
+ }
+ }
+}
+
+func (s *S) TestStop(c *C) {
+ var server dbtest.DBServer
+ server.SetPath(c.MkDir())
+ defer server.Stop()
+
+ // Server should not be running.
+ process := server.ProcessTest()
+ c.Assert(process, IsNil)
+
+ session := server.Session()
+ addr := session.LiveServers()[0]
+ session.Close()
+
+ // Server should be running now.
+ process = server.ProcessTest()
+ p, err := os.FindProcess(process.Pid)
+ c.Assert(err, IsNil)
+ p.Release()
+
+ server.Stop()
+
+ // Server should not be running anymore.
+ session, err = mgo.DialWithTimeout(addr, 500*time.Millisecond)
+ if session != nil {
+ session.Close()
+ c.Fatalf("Stop did not stop the server")
+ }
+}
+
+func (s *S) TestCheckSessions(c *C) {
+ var server dbtest.DBServer
+ server.SetPath(c.MkDir())
+ defer server.Stop()
+
+ session := server.Session()
+ defer session.Close()
+ c.Assert(server.Wipe, PanicMatches, "There are mgo sessions still alive.")
+}
+
+func (s *S) TestCheckSessionsDisabled(c *C) {
+ var server dbtest.DBServer
+ server.SetPath(c.MkDir())
+ defer server.Stop()
+
+ os.Setenv("CHECK_SESSIONS", "0")
+
+ // Should not panic, although it looks to Wipe like this session will leak.
+ session := server.Session()
+ defer session.Close()
+ server.Wipe()
+}
diff --git a/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/dbtest/export_test.go b/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/dbtest/export_test.go
new file mode 100644
index 00000000000..65f1cb02388
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/dbtest/export_test.go
@@ -0,0 +1,12 @@
+package dbtest
+
+import (
+ "os"
+)
+
+func (dbs *DBServer) ProcessTest() *os.Process {
+ if dbs.server == nil {
+ return nil
+ }
+ return dbs.server.Process
+}
diff --git a/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/doc.go b/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/doc.go
new file mode 100644
index 00000000000..859fd9b8df9
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/doc.go
@@ -0,0 +1,31 @@
+// Package mgo offers a rich MongoDB driver for Go.
+//
+// Details about the mgo project (pronounced as "mango") are found
+// in its web page:
+//
+// http://labix.org/mgo
+//
+// Usage of the driver revolves around the concept of sessions. To
+// get started, obtain a session using the Dial function:
+//
+// session, err := mgo.Dial(url)
+//
+// This will establish one or more connections with the cluster of
+// servers defined by the url parameter. From then on, the cluster
+// may be queried with multiple consistency rules (see SetMode) and
+// documents retrieved with statements such as:
+//
+// c := session.DB(database).C(collection)
+// err := c.Find(query).One(&result)
+//
+// New sessions are typically created by calling session.Copy on the
+// initial session obtained at dial time. These new sessions will share
+// the same cluster information and connection pool, and may be easily
+// handed into other methods and functions for organizing logic.
+// Every session created must have its Close method called at the end
+// of its life time, so its resources may be put back in the pool or
+// collected, depending on the case.
+//
+// For more details, see the documentation for the types and methods.
+//
+package mgo
diff --git a/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/export_test.go b/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/export_test.go
new file mode 100644
index 00000000000..690f84d3835
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/export_test.go
@@ -0,0 +1,33 @@
+package mgo
+
+import (
+ "time"
+)
+
+func HackPingDelay(newDelay time.Duration) (restore func()) {
+ globalMutex.Lock()
+ defer globalMutex.Unlock()
+
+ oldDelay := pingDelay
+ restore = func() {
+ globalMutex.Lock()
+ pingDelay = oldDelay
+ globalMutex.Unlock()
+ }
+ pingDelay = newDelay
+ return
+}
+
+func HackSyncSocketTimeout(newTimeout time.Duration) (restore func()) {
+ globalMutex.Lock()
+ defer globalMutex.Unlock()
+
+ oldTimeout := syncSocketTimeout
+ restore = func() {
+ globalMutex.Lock()
+ syncSocketTimeout = oldTimeout
+ globalMutex.Unlock()
+ }
+ syncSocketTimeout = newTimeout
+ return
+}
diff --git a/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/gridfs.go b/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/gridfs.go
new file mode 100644
index 00000000000..421472095cf
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/gridfs.go
@@ -0,0 +1,761 @@
+// mgo - MongoDB driver for Go
+//
+// Copyright (c) 2010-2012 - Gustavo Niemeyer <gustavo@niemeyer.net>
+//
+// All rights reserved.
+//
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are met:
+//
+// 1. Redistributions of source code must retain the above copyright notice, this
+// list of conditions and the following disclaimer.
+// 2. Redistributions in binary form must reproduce the above copyright notice,
+// this list of conditions and the following disclaimer in the documentation
+// and/or other materials provided with the distribution.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
+// ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
+// WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+// DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
+// ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
+// (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
+// LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
+// ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
+// SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+package mgo
+
+import (
+ "crypto/md5"
+ "encoding/hex"
+ "errors"
+ "hash"
+ "io"
+ "os"
+ "sync"
+ "time"
+
+ "gopkg.in/mgo.v2/bson"
+)
+
+type GridFS struct {
+ Files *Collection
+ Chunks *Collection
+}
+
+type gfsFileMode int
+
+const (
+ gfsClosed gfsFileMode = 0
+ gfsReading gfsFileMode = 1
+ gfsWriting gfsFileMode = 2
+)
+
+type GridFile struct {
+ m sync.Mutex
+ c sync.Cond
+ gfs *GridFS
+ mode gfsFileMode
+ err error
+
+ chunk int
+ offset int64
+
+ wpending int
+ wbuf []byte
+ wsum hash.Hash
+
+ rbuf []byte
+ rcache *gfsCachedChunk
+
+ doc gfsFile
+}
+
+type gfsFile struct {
+ Id interface{} "_id"
+ ChunkSize int "chunkSize"
+ UploadDate time.Time "uploadDate"
+ Length int64 ",minsize"
+ MD5 string
+ Filename string ",omitempty"
+ ContentType string "contentType,omitempty"
+ Metadata *bson.Raw ",omitempty"
+}
+
+type gfsChunk struct {
+ Id interface{} "_id"
+ FilesId interface{} "files_id"
+ N int
+ Data []byte
+}
+
+type gfsCachedChunk struct {
+ wait sync.Mutex
+ n int
+ data []byte
+ err error
+}
+
+func newGridFS(db *Database, prefix string) *GridFS {
+ return &GridFS{db.C(prefix + ".files"), db.C(prefix + ".chunks")}
+}
+
+func (gfs *GridFS) newFile() *GridFile {
+ file := &GridFile{gfs: gfs}
+ file.c.L = &file.m
+ //runtime.SetFinalizer(file, finalizeFile)
+ return file
+}
+
+func finalizeFile(file *GridFile) {
+ file.Close()
+}
+
+// Create creates a new file with the provided name in the GridFS. If the file
+// name already exists, a new version will be inserted with an up-to-date
+// uploadDate that will cause it to be atomically visible to the Open and
+// OpenId methods. If the file name is not important, an empty name may be
+// provided and the file Id used instead.
+//
+// It's important to Close files whether they are being written to
+// or read from, and to check the err result to ensure the operation
+// completed successfully.
+//
+// A simple example inserting a new file:
+//
+// func check(err error) {
+// if err != nil {
+// panic(err.String())
+// }
+// }
+// file, err := db.GridFS("fs").Create("myfile.txt")
+// check(err)
+// n, err := file.Write([]byte("Hello world!"))
+// check(err)
+// err = file.Close()
+// check(err)
+// fmt.Printf("%d bytes written\n", n)
+//
+// The io.Writer interface is implemented by *GridFile and may be used to
+// help on the file creation. For example:
+//
+// file, err := db.GridFS("fs").Create("myfile.txt")
+// check(err)
+// messages, err := os.Open("/var/log/messages")
+// check(err)
+// defer messages.Close()
+// err = io.Copy(file, messages)
+// check(err)
+// err = file.Close()
+// check(err)
+//
+func (gfs *GridFS) Create(name string) (file *GridFile, err error) {
+ file = gfs.newFile()
+ file.mode = gfsWriting
+ file.wsum = md5.New()
+ file.doc = gfsFile{Id: bson.NewObjectId(), ChunkSize: 255 * 1024, Filename: name}
+ return
+}
+
+// OpenId returns the file with the provided id, for reading.
+// If the file isn't found, err will be set to mgo.ErrNotFound.
+//
+// It's important to Close files whether they are being written to
+// or read from, and to check the err result to ensure the operation
+// completed successfully.
+//
+// The following example will print the first 8192 bytes from the file:
+//
+// func check(err error) {
+// if err != nil {
+// panic(err.String())
+// }
+// }
+// file, err := db.GridFS("fs").OpenId(objid)
+// check(err)
+// b := make([]byte, 8192)
+// n, err := file.Read(b)
+// check(err)
+// fmt.Println(string(b))
+// check(err)
+// err = file.Close()
+// check(err)
+// fmt.Printf("%d bytes read\n", n)
+//
+// The io.Reader interface is implemented by *GridFile and may be used to
+// deal with it. As an example, the following snippet will dump the whole
+// file into the standard output:
+//
+// file, err := db.GridFS("fs").OpenId(objid)
+// check(err)
+// err = io.Copy(os.Stdout, file)
+// check(err)
+// err = file.Close()
+// check(err)
+//
+func (gfs *GridFS) OpenId(id interface{}) (file *GridFile, err error) {
+ var doc gfsFile
+ err = gfs.Files.Find(bson.M{"_id": id}).One(&doc)
+ if err != nil {
+ return
+ }
+ file = gfs.newFile()
+ file.mode = gfsReading
+ file.doc = doc
+ return
+}
+
+// Open returns the most recently uploaded file with the provided
+// name, for reading. If the file isn't found, err will be set
+// to mgo.ErrNotFound.
+//
+// It's important to Close files whether they are being written to
+// or read from, and to check the err result to ensure the operation
+// completed successfully.
+//
+// The following example will print the first 8192 bytes from the file:
+//
+// file, err := db.GridFS("fs").Open("myfile.txt")
+// check(err)
+// b := make([]byte, 8192)
+// n, err := file.Read(b)
+// check(err)
+// fmt.Println(string(b))
+// check(err)
+// err = file.Close()
+// check(err)
+// fmt.Printf("%d bytes read\n", n)
+//
+// The io.Reader interface is implemented by *GridFile and may be used to
+// deal with it. As an example, the following snippet will dump the whole
+// file into the standard output:
+//
+// file, err := db.GridFS("fs").Open("myfile.txt")
+// check(err)
+// err = io.Copy(os.Stdout, file)
+// check(err)
+// err = file.Close()
+// check(err)
+//
+func (gfs *GridFS) Open(name string) (file *GridFile, err error) {
+ var doc gfsFile
+ err = gfs.Files.Find(bson.M{"filename": name}).Sort("-uploadDate").One(&doc)
+ if err != nil {
+ return
+ }
+ file = gfs.newFile()
+ file.mode = gfsReading
+ file.doc = doc
+ return
+}
+
+// OpenNext opens the next file from iter for reading, sets *file to it,
+// and returns true on the success case. If no more documents are available
+// on iter or an error occurred, *file is set to nil and the result is false.
+// Errors will be available via iter.Err().
+//
+// The iter parameter must be an iterator on the GridFS files collection.
+// Using the GridFS.Find method is an easy way to obtain such an iterator,
+// but any iterator on the collection will work.
+//
+// If the provided *file is non-nil, OpenNext will close it before attempting
+// to iterate to the next element. This means that in a loop one only
+// has to worry about closing files when breaking out of the loop early
+// (break, return, or panic).
+//
+// For example:
+//
+// gfs := db.GridFS("fs")
+// query := gfs.Find(nil).Sort("filename")
+// iter := query.Iter()
+// var f *mgo.GridFile
+// for gfs.OpenNext(iter, &f) {
+// fmt.Printf("Filename: %s\n", f.Name())
+// }
+// if iter.Close() != nil {
+// panic(iter.Close())
+// }
+//
+func (gfs *GridFS) OpenNext(iter *Iter, file **GridFile) bool {
+ if *file != nil {
+ // Ignoring the error here shouldn't be a big deal
+ // as we're reading the file and the loop iteration
+ // for this file is finished.
+ _ = (*file).Close()
+ }
+ var doc gfsFile
+ if !iter.Next(&doc) {
+ *file = nil
+ return false
+ }
+ f := gfs.newFile()
+ f.mode = gfsReading
+ f.doc = doc
+ *file = f
+ return true
+}
+
+// Find runs query on GridFS's files collection and returns
+// the resulting Query.
+//
+// This logic:
+//
+// gfs := db.GridFS("fs")
+// iter := gfs.Find(nil).Iter()
+//
+// Is equivalent to:
+//
+// files := db.C("fs" + ".files")
+// iter := files.Find(nil).Iter()
+//
+func (gfs *GridFS) Find(query interface{}) *Query {
+ return gfs.Files.Find(query)
+}
+
+// RemoveId deletes the file with the provided id from the GridFS.
+func (gfs *GridFS) RemoveId(id interface{}) error {
+ err := gfs.Files.Remove(bson.M{"_id": id})
+ if err != nil {
+ return err
+ }
+ _, err = gfs.Chunks.RemoveAll(bson.D{{"files_id", id}})
+ return err
+}
+
+type gfsDocId struct {
+ Id interface{} "_id"
+}
+
+// Remove deletes all files with the provided name from the GridFS.
+func (gfs *GridFS) Remove(name string) (err error) {
+ iter := gfs.Files.Find(bson.M{"filename": name}).Select(bson.M{"_id": 1}).Iter()
+ var doc gfsDocId
+ for iter.Next(&doc) {
+ if e := gfs.RemoveId(doc.Id); e != nil {
+ err = e
+ }
+ }
+ if err == nil {
+ err = iter.Close()
+ }
+ return err
+}
+
+func (file *GridFile) assertMode(mode gfsFileMode) {
+ switch file.mode {
+ case mode:
+ return
+ case gfsWriting:
+ panic("GridFile is open for writing")
+ case gfsReading:
+ panic("GridFile is open for reading")
+ case gfsClosed:
+ panic("GridFile is closed")
+ default:
+ panic("internal error: missing GridFile mode")
+ }
+}
+
+// SetChunkSize sets size of saved chunks. Once the file is written to, it
+// will be split in blocks of that size and each block saved into an
+// independent chunk document. The default chunk size is 255kb.
+//
+// It is a runtime error to call this function once the file has started
+// being written to.
+func (file *GridFile) SetChunkSize(bytes int) {
+ file.assertMode(gfsWriting)
+ debugf("GridFile %p: setting chunk size to %d", file, bytes)
+ file.m.Lock()
+ file.doc.ChunkSize = bytes
+ file.m.Unlock()
+}
+
+// Id returns the current file Id.
+func (file *GridFile) Id() interface{} {
+ return file.doc.Id
+}
+
+// SetId changes the current file Id.
+//
+// It is a runtime error to call this function once the file has started
+// being written to, or when the file is not open for writing.
+func (file *GridFile) SetId(id interface{}) {
+ file.assertMode(gfsWriting)
+ file.m.Lock()
+ file.doc.Id = id
+ file.m.Unlock()
+}
+
+// Name returns the optional file name. An empty string will be returned
+// in case it is unset.
+func (file *GridFile) Name() string {
+ return file.doc.Filename
+}
+
+// SetName changes the optional file name. An empty string may be used to
+// unset it.
+//
+// It is a runtime error to call this function when the file is not open
+// for writing.
+func (file *GridFile) SetName(name string) {
+ file.assertMode(gfsWriting)
+ file.m.Lock()
+ file.doc.Filename = name
+ file.m.Unlock()
+}
+
+// ContentType returns the optional file content type. An empty string will be
+// returned in case it is unset.
+func (file *GridFile) ContentType() string {
+ return file.doc.ContentType
+}
+
+// ContentType changes the optional file content type. An empty string may be
+// used to unset it.
+//
+// It is a runtime error to call this function when the file is not open
+// for writing.
+func (file *GridFile) SetContentType(ctype string) {
+ file.assertMode(gfsWriting)
+ file.m.Lock()
+ file.doc.ContentType = ctype
+ file.m.Unlock()
+}
+
+// GetMeta unmarshals the optional "metadata" field associated with the
+// file into the result parameter. The meaning of keys under that field
+// is user-defined. For example:
+//
+// result := struct{ INode int }{}
+// err = file.GetMeta(&result)
+// if err != nil {
+// panic(err.String())
+// }
+// fmt.Printf("inode: %d\n", result.INode)
+//
+func (file *GridFile) GetMeta(result interface{}) (err error) {
+ file.m.Lock()
+ if file.doc.Metadata != nil {
+ err = bson.Unmarshal(file.doc.Metadata.Data, result)
+ }
+ file.m.Unlock()
+ return
+}
+
+// SetMeta changes the optional "metadata" field associated with the
+// file. The meaning of keys under that field is user-defined.
+// For example:
+//
+// file.SetMeta(bson.M{"inode": inode})
+//
+// It is a runtime error to call this function when the file is not open
+// for writing.
+func (file *GridFile) SetMeta(metadata interface{}) {
+ file.assertMode(gfsWriting)
+ data, err := bson.Marshal(metadata)
+ file.m.Lock()
+ if err != nil && file.err == nil {
+ file.err = err
+ } else {
+ file.doc.Metadata = &bson.Raw{Data: data}
+ }
+ file.m.Unlock()
+}
+
+// Size returns the file size in bytes.
+func (file *GridFile) Size() (bytes int64) {
+ file.m.Lock()
+ bytes = file.doc.Length
+ file.m.Unlock()
+ return
+}
+
+// MD5 returns the file MD5 as a hex-encoded string.
+func (file *GridFile) MD5() (md5 string) {
+ return file.doc.MD5
+}
+
+// UploadDate returns the file upload time.
+func (file *GridFile) UploadDate() time.Time {
+ return file.doc.UploadDate
+}
+
+// SetUploadDate changes the file upload time.
+//
+// It is a runtime error to call this function when the file is not open
+// for writing.
+func (file *GridFile) SetUploadDate(t time.Time) {
+ file.assertMode(gfsWriting)
+ file.m.Lock()
+ file.doc.UploadDate = t
+ file.m.Unlock()
+}
+
+// Close flushes any pending changes in case the file is being written
+// to, waits for any background operations to finish, and closes the file.
+//
+// It's important to Close files whether they are being written to
+// or read from, and to check the err result to ensure the operation
+// completed successfully.
+func (file *GridFile) Close() (err error) {
+ file.m.Lock()
+ defer file.m.Unlock()
+ if file.mode == gfsWriting {
+ if len(file.wbuf) > 0 && file.err == nil {
+ file.insertChunk(file.wbuf)
+ file.wbuf = file.wbuf[0:0]
+ }
+ file.completeWrite()
+ } else if file.mode == gfsReading && file.rcache != nil {
+ file.rcache.wait.Lock()
+ file.rcache = nil
+ }
+ file.mode = gfsClosed
+ debugf("GridFile %p: closed", file)
+ return file.err
+}
+
+func (file *GridFile) completeWrite() {
+ for file.wpending > 0 {
+ debugf("GridFile %p: waiting for %d pending chunks to complete file write", file, file.wpending)
+ file.c.Wait()
+ }
+ if file.err == nil {
+ hexsum := hex.EncodeToString(file.wsum.Sum(nil))
+ if file.doc.UploadDate.IsZero() {
+ file.doc.UploadDate = bson.Now()
+ }
+ file.doc.MD5 = hexsum
+ file.err = file.gfs.Files.Insert(file.doc)
+ }
+ if file.err != nil {
+ file.gfs.Chunks.RemoveAll(bson.D{{"files_id", file.doc.Id}})
+ }
+ if file.err == nil {
+ index := Index{
+ Key: []string{"files_id", "n"},
+ Unique: true,
+ }
+ file.err = file.gfs.Chunks.EnsureIndex(index)
+ }
+}
+
+// Abort cancels an in-progress write, preventing the file from being
+// automically created and ensuring previously written chunks are
+// removed when the file is closed.
+//
+// It is a runtime error to call Abort when the file was not opened
+// for writing.
+func (file *GridFile) Abort() {
+ if file.mode != gfsWriting {
+ panic("file.Abort must be called on file opened for writing")
+ }
+ file.err = errors.New("write aborted")
+}
+
+// Write writes the provided data to the file and returns the
+// number of bytes written and an error in case something
+// wrong happened.
+//
+// The file will internally cache the data so that all but the last
+// chunk sent to the database have the size defined by SetChunkSize.
+// This also means that errors may be deferred until a future call
+// to Write or Close.
+//
+// The parameters and behavior of this function turn the file
+// into an io.Writer.
+func (file *GridFile) Write(data []byte) (n int, err error) {
+ file.assertMode(gfsWriting)
+ file.m.Lock()
+ debugf("GridFile %p: writing %d bytes", file, len(data))
+ defer file.m.Unlock()
+
+ if file.err != nil {
+ return 0, file.err
+ }
+
+ n = len(data)
+ file.doc.Length += int64(n)
+ chunkSize := file.doc.ChunkSize
+
+ if len(file.wbuf)+len(data) < chunkSize {
+ file.wbuf = append(file.wbuf, data...)
+ return
+ }
+
+ // First, flush file.wbuf complementing with data.
+ if len(file.wbuf) > 0 {
+ missing := chunkSize - len(file.wbuf)
+ if missing > len(data) {
+ missing = len(data)
+ }
+ file.wbuf = append(file.wbuf, data[:missing]...)
+ data = data[missing:]
+ file.insertChunk(file.wbuf)
+ file.wbuf = file.wbuf[0:0]
+ }
+
+ // Then, flush all chunks from data without copying.
+ for len(data) > chunkSize {
+ size := chunkSize
+ if size > len(data) {
+ size = len(data)
+ }
+ file.insertChunk(data[:size])
+ data = data[size:]
+ }
+
+ // And append the rest for a future call.
+ file.wbuf = append(file.wbuf, data...)
+
+ return n, file.err
+}
+
+func (file *GridFile) insertChunk(data []byte) {
+ n := file.chunk
+ file.chunk++
+ debugf("GridFile %p: adding to checksum: %q", file, string(data))
+ file.wsum.Write(data)
+
+ for file.doc.ChunkSize*file.wpending >= 1024*1024 {
+ // Hold on.. we got a MB pending.
+ file.c.Wait()
+ if file.err != nil {
+ return
+ }
+ }
+
+ file.wpending++
+
+ debugf("GridFile %p: inserting chunk %d with %d bytes", file, n, len(data))
+
+ // We may not own the memory of data, so rather than
+ // simply copying it, we'll marshal the document ahead of time.
+ data, err := bson.Marshal(gfsChunk{bson.NewObjectId(), file.doc.Id, n, data})
+ if err != nil {
+ file.err = err
+ return
+ }
+
+ go func() {
+ err := file.gfs.Chunks.Insert(bson.Raw{Data: data})
+ file.m.Lock()
+ file.wpending--
+ if err != nil && file.err == nil {
+ file.err = err
+ }
+ file.c.Broadcast()
+ file.m.Unlock()
+ }()
+}
+
+// Seek sets the offset for the next Read or Write on file to
+// offset, interpreted according to whence: 0 means relative to
+// the origin of the file, 1 means relative to the current offset,
+// and 2 means relative to the end. It returns the new offset and
+// an error, if any.
+func (file *GridFile) Seek(offset int64, whence int) (pos int64, err error) {
+ file.m.Lock()
+ debugf("GridFile %p: seeking for %s (whence=%d)", file, offset, whence)
+ defer file.m.Unlock()
+ switch whence {
+ case os.SEEK_SET:
+ case os.SEEK_CUR:
+ offset += file.offset
+ case os.SEEK_END:
+ offset += file.doc.Length
+ default:
+ panic("unsupported whence value")
+ }
+ if offset > file.doc.Length {
+ return file.offset, errors.New("seek past end of file")
+ }
+ if offset == file.doc.Length {
+ // If we're seeking to the end of the file,
+ // no need to read anything. This enables
+ // a client to find the size of the file using only the
+ // io.ReadSeeker interface with low overhead.
+ file.offset = offset
+ return file.offset, nil
+ }
+ chunk := int(offset / int64(file.doc.ChunkSize))
+ if chunk+1 == file.chunk && offset >= file.offset {
+ file.rbuf = file.rbuf[int(offset-file.offset):]
+ file.offset = offset
+ return file.offset, nil
+ }
+ file.offset = offset
+ file.chunk = chunk
+ file.rbuf = nil
+ file.rbuf, err = file.getChunk()
+ if err == nil {
+ file.rbuf = file.rbuf[int(file.offset-int64(chunk)*int64(file.doc.ChunkSize)):]
+ }
+ return file.offset, err
+}
+
+// Read reads into b the next available data from the file and
+// returns the number of bytes written and an error in case
+// something wrong happened. At the end of the file, n will
+// be zero and err will be set to io.EOF.
+//
+// The parameters and behavior of this function turn the file
+// into an io.Reader.
+func (file *GridFile) Read(b []byte) (n int, err error) {
+ file.assertMode(gfsReading)
+ file.m.Lock()
+ debugf("GridFile %p: reading at offset %d into buffer of length %d", file, file.offset, len(b))
+ defer file.m.Unlock()
+ if file.offset == file.doc.Length {
+ return 0, io.EOF
+ }
+ for err == nil {
+ i := copy(b, file.rbuf)
+ n += i
+ file.offset += int64(i)
+ file.rbuf = file.rbuf[i:]
+ if i == len(b) || file.offset == file.doc.Length {
+ break
+ }
+ b = b[i:]
+ file.rbuf, err = file.getChunk()
+ }
+ return n, err
+}
+
+func (file *GridFile) getChunk() (data []byte, err error) {
+ cache := file.rcache
+ file.rcache = nil
+ if cache != nil && cache.n == file.chunk {
+ debugf("GridFile %p: Getting chunk %d from cache", file, file.chunk)
+ cache.wait.Lock()
+ data, err = cache.data, cache.err
+ } else {
+ debugf("GridFile %p: Fetching chunk %d", file, file.chunk)
+ var doc gfsChunk
+ err = file.gfs.Chunks.Find(bson.D{{"files_id", file.doc.Id}, {"n", file.chunk}}).One(&doc)
+ data = doc.Data
+ }
+ file.chunk++
+ if int64(file.chunk)*int64(file.doc.ChunkSize) < file.doc.Length {
+ // Read the next one in background.
+ cache = &gfsCachedChunk{n: file.chunk}
+ cache.wait.Lock()
+ debugf("GridFile %p: Scheduling chunk %d for background caching", file, file.chunk)
+ // Clone the session to avoid having it closed in between.
+ chunks := file.gfs.Chunks
+ session := chunks.Database.Session.Clone()
+ go func(id interface{}, n int) {
+ defer session.Close()
+ chunks = chunks.With(session)
+ var doc gfsChunk
+ cache.err = chunks.Find(bson.D{{"files_id", id}, {"n", n}}).One(&doc)
+ cache.data = doc.Data
+ cache.wait.Unlock()
+ }(file.doc.Id, file.chunk)
+ file.rcache = cache
+ }
+ debugf("Returning err: %#v", err)
+ return
+}
diff --git a/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/gridfs_test.go b/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/gridfs_test.go
new file mode 100644
index 00000000000..5a6ed555950
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/gridfs_test.go
@@ -0,0 +1,708 @@
+// mgo - MongoDB driver for Go
+//
+// Copyright (c) 2010-2012 - Gustavo Niemeyer <gustavo@niemeyer.net>
+//
+// All rights reserved.
+//
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are met:
+//
+// 1. Redistributions of source code must retain the above copyright notice, this
+// list of conditions and the following disclaimer.
+// 2. Redistributions in binary form must reproduce the above copyright notice,
+// this list of conditions and the following disclaimer in the documentation
+// and/or other materials provided with the distribution.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
+// ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
+// WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+// DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
+// ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
+// (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
+// LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
+// ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
+// SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+package mgo_test
+
+import (
+ "io"
+ "os"
+ "time"
+
+ . "gopkg.in/check.v1"
+ "gopkg.in/mgo.v2"
+ "gopkg.in/mgo.v2/bson"
+)
+
+func (s *S) TestGridFSCreate(c *C) {
+ session, err := mgo.Dial("localhost:40011")
+ c.Assert(err, IsNil)
+ defer session.Close()
+
+ db := session.DB("mydb")
+
+ before := bson.Now()
+
+ gfs := db.GridFS("fs")
+ file, err := gfs.Create("")
+ c.Assert(err, IsNil)
+
+ n, err := file.Write([]byte("some data"))
+ c.Assert(err, IsNil)
+ c.Assert(n, Equals, 9)
+
+ err = file.Close()
+ c.Assert(err, IsNil)
+
+ after := bson.Now()
+
+ // Check the file information.
+ result := M{}
+ err = db.C("fs.files").Find(nil).One(result)
+ c.Assert(err, IsNil)
+
+ fileId, ok := result["_id"].(bson.ObjectId)
+ c.Assert(ok, Equals, true)
+ c.Assert(fileId.Valid(), Equals, true)
+ result["_id"] = "<id>"
+
+ ud, ok := result["uploadDate"].(time.Time)
+ c.Assert(ok, Equals, true)
+ c.Assert(ud.After(before) && ud.Before(after), Equals, true)
+ result["uploadDate"] = "<timestamp>"
+
+ expected := M{
+ "_id": "<id>",
+ "length": 9,
+ "chunkSize": 255 * 1024,
+ "uploadDate": "<timestamp>",
+ "md5": "1e50210a0202497fb79bc38b6ade6c34",
+ }
+ c.Assert(result, DeepEquals, expected)
+
+ // Check the chunk.
+ result = M{}
+ err = db.C("fs.chunks").Find(nil).One(result)
+ c.Assert(err, IsNil)
+
+ chunkId, ok := result["_id"].(bson.ObjectId)
+ c.Assert(ok, Equals, true)
+ c.Assert(chunkId.Valid(), Equals, true)
+ result["_id"] = "<id>"
+
+ expected = M{
+ "_id": "<id>",
+ "files_id": fileId,
+ "n": 0,
+ "data": []byte("some data"),
+ }
+ c.Assert(result, DeepEquals, expected)
+
+ // Check that an index was created.
+ indexes, err := db.C("fs.chunks").Indexes()
+ c.Assert(err, IsNil)
+ c.Assert(len(indexes), Equals, 2)
+ c.Assert(indexes[1].Key, DeepEquals, []string{"files_id", "n"})
+}
+
+func (s *S) TestGridFSFileDetails(c *C) {
+ session, err := mgo.Dial("localhost:40011")
+ c.Assert(err, IsNil)
+ defer session.Close()
+
+ db := session.DB("mydb")
+
+ gfs := db.GridFS("fs")
+
+ file, err := gfs.Create("myfile1.txt")
+ c.Assert(err, IsNil)
+
+ n, err := file.Write([]byte("some"))
+ c.Assert(err, IsNil)
+ c.Assert(n, Equals, 4)
+
+ c.Assert(file.Size(), Equals, int64(4))
+
+ n, err = file.Write([]byte(" data"))
+ c.Assert(err, IsNil)
+ c.Assert(n, Equals, 5)
+
+ c.Assert(file.Size(), Equals, int64(9))
+
+ id, _ := file.Id().(bson.ObjectId)
+ c.Assert(id.Valid(), Equals, true)
+ c.Assert(file.Name(), Equals, "myfile1.txt")
+ c.Assert(file.ContentType(), Equals, "")
+
+ var info interface{}
+ err = file.GetMeta(&info)
+ c.Assert(err, IsNil)
+ c.Assert(info, IsNil)
+
+ file.SetId("myid")
+ file.SetName("myfile2.txt")
+ file.SetContentType("text/plain")
+ file.SetMeta(M{"any": "thing"})
+
+ c.Assert(file.Id(), Equals, "myid")
+ c.Assert(file.Name(), Equals, "myfile2.txt")
+ c.Assert(file.ContentType(), Equals, "text/plain")
+
+ err = file.GetMeta(&info)
+ c.Assert(err, IsNil)
+ c.Assert(info, DeepEquals, bson.M{"any": "thing"})
+
+ err = file.Close()
+ c.Assert(err, IsNil)
+
+ c.Assert(file.MD5(), Equals, "1e50210a0202497fb79bc38b6ade6c34")
+
+ ud := file.UploadDate()
+ now := time.Now()
+ c.Assert(ud.Before(now), Equals, true)
+ c.Assert(ud.After(now.Add(-3*time.Second)), Equals, true)
+
+ result := M{}
+ err = db.C("fs.files").Find(nil).One(result)
+ c.Assert(err, IsNil)
+
+ result["uploadDate"] = "<timestamp>"
+
+ expected := M{
+ "_id": "myid",
+ "length": 9,
+ "chunkSize": 255 * 1024,
+ "uploadDate": "<timestamp>",
+ "md5": "1e50210a0202497fb79bc38b6ade6c34",
+ "filename": "myfile2.txt",
+ "contentType": "text/plain",
+ "metadata": M{"any": "thing"},
+ }
+ c.Assert(result, DeepEquals, expected)
+}
+
+func (s *S) TestGridFSSetUploadDate(c *C) {
+ session, err := mgo.Dial("localhost:40011")
+ c.Assert(err, IsNil)
+ defer session.Close()
+
+ db := session.DB("mydb")
+
+ gfs := db.GridFS("fs")
+ file, err := gfs.Create("")
+ c.Assert(err, IsNil)
+
+ t := time.Date(2014, 1, 1, 1, 1, 1, 0, time.Local)
+ file.SetUploadDate(t)
+
+ err = file.Close()
+ c.Assert(err, IsNil)
+
+ // Check the file information.
+ result := M{}
+ err = db.C("fs.files").Find(nil).One(result)
+ c.Assert(err, IsNil)
+
+ ud := result["uploadDate"].(time.Time)
+ if !ud.Equal(t) {
+ c.Fatalf("want upload date %s, got %s", t, ud)
+ }
+}
+
+func (s *S) TestGridFSCreateWithChunking(c *C) {
+ session, err := mgo.Dial("localhost:40011")
+ c.Assert(err, IsNil)
+ defer session.Close()
+
+ db := session.DB("mydb")
+
+ gfs := db.GridFS("fs")
+
+ file, err := gfs.Create("")
+ c.Assert(err, IsNil)
+
+ file.SetChunkSize(5)
+
+ // Smaller than the chunk size.
+ n, err := file.Write([]byte("abc"))
+ c.Assert(err, IsNil)
+ c.Assert(n, Equals, 3)
+
+ // Boundary in the middle.
+ n, err = file.Write([]byte("defg"))
+ c.Assert(err, IsNil)
+ c.Assert(n, Equals, 4)
+
+ // Boundary at the end.
+ n, err = file.Write([]byte("hij"))
+ c.Assert(err, IsNil)
+ c.Assert(n, Equals, 3)
+
+ // Larger than the chunk size, with 3 chunks.
+ n, err = file.Write([]byte("klmnopqrstuv"))
+ c.Assert(err, IsNil)
+ c.Assert(n, Equals, 12)
+
+ err = file.Close()
+ c.Assert(err, IsNil)
+
+ // Check the file information.
+ result := M{}
+ err = db.C("fs.files").Find(nil).One(result)
+ c.Assert(err, IsNil)
+
+ fileId, _ := result["_id"].(bson.ObjectId)
+ c.Assert(fileId.Valid(), Equals, true)
+ result["_id"] = "<id>"
+ result["uploadDate"] = "<timestamp>"
+
+ expected := M{
+ "_id": "<id>",
+ "length": 22,
+ "chunkSize": 5,
+ "uploadDate": "<timestamp>",
+ "md5": "44a66044834cbe55040089cabfc102d5",
+ }
+ c.Assert(result, DeepEquals, expected)
+
+ // Check the chunks.
+ iter := db.C("fs.chunks").Find(nil).Sort("n").Iter()
+ dataChunks := []string{"abcde", "fghij", "klmno", "pqrst", "uv"}
+ for i := 0; ; i++ {
+ result = M{}
+ if !iter.Next(result) {
+ if i != 5 {
+ c.Fatalf("Expected 5 chunks, got %d", i)
+ }
+ break
+ }
+ c.Assert(iter.Close(), IsNil)
+
+ result["_id"] = "<id>"
+
+ expected = M{
+ "_id": "<id>",
+ "files_id": fileId,
+ "n": i,
+ "data": []byte(dataChunks[i]),
+ }
+ c.Assert(result, DeepEquals, expected)
+ }
+}
+
+func (s *S) TestGridFSAbort(c *C) {
+ session, err := mgo.Dial("localhost:40011")
+ c.Assert(err, IsNil)
+ defer session.Close()
+
+ db := session.DB("mydb")
+
+ gfs := db.GridFS("fs")
+ file, err := gfs.Create("")
+ c.Assert(err, IsNil)
+
+ file.SetChunkSize(5)
+
+ n, err := file.Write([]byte("some data"))
+ c.Assert(err, IsNil)
+ c.Assert(n, Equals, 9)
+
+ var count int
+ for i := 0; i < 10; i++ {
+ count, err = db.C("fs.chunks").Count()
+ if count > 0 || err != nil {
+ break
+ }
+ }
+ c.Assert(err, IsNil)
+ c.Assert(count, Equals, 1)
+
+ file.Abort()
+
+ err = file.Close()
+ c.Assert(err, ErrorMatches, "write aborted")
+
+ count, err = db.C("fs.chunks").Count()
+ c.Assert(err, IsNil)
+ c.Assert(count, Equals, 0)
+}
+
+func (s *S) TestGridFSCloseConflict(c *C) {
+ session, err := mgo.Dial("localhost:40011")
+ c.Assert(err, IsNil)
+ defer session.Close()
+
+ db := session.DB("mydb")
+
+ db.C("fs.files").EnsureIndex(mgo.Index{Key: []string{"filename"}, Unique: true})
+
+ // For a closing-time conflict
+ err = db.C("fs.files").Insert(M{"filename": "foo.txt"})
+ c.Assert(err, IsNil)
+
+ gfs := db.GridFS("fs")
+ file, err := gfs.Create("foo.txt")
+ c.Assert(err, IsNil)
+
+ _, err = file.Write([]byte("some data"))
+ c.Assert(err, IsNil)
+
+ err = file.Close()
+ c.Assert(mgo.IsDup(err), Equals, true)
+
+ count, err := db.C("fs.chunks").Count()
+ c.Assert(err, IsNil)
+ c.Assert(count, Equals, 0)
+}
+
+func (s *S) TestGridFSOpenNotFound(c *C) {
+ session, err := mgo.Dial("localhost:40011")
+ c.Assert(err, IsNil)
+ defer session.Close()
+
+ db := session.DB("mydb")
+
+ gfs := db.GridFS("fs")
+ file, err := gfs.OpenId("non-existent")
+ c.Assert(err == mgo.ErrNotFound, Equals, true)
+ c.Assert(file, IsNil)
+
+ file, err = gfs.Open("non-existent")
+ c.Assert(err == mgo.ErrNotFound, Equals, true)
+ c.Assert(file, IsNil)
+}
+
+func (s *S) TestGridFSReadAll(c *C) {
+ session, err := mgo.Dial("localhost:40011")
+ c.Assert(err, IsNil)
+ defer session.Close()
+
+ db := session.DB("mydb")
+
+ gfs := db.GridFS("fs")
+ file, err := gfs.Create("")
+ c.Assert(err, IsNil)
+ id := file.Id()
+
+ file.SetChunkSize(5)
+
+ n, err := file.Write([]byte("abcdefghijklmnopqrstuv"))
+ c.Assert(err, IsNil)
+ c.Assert(n, Equals, 22)
+
+ err = file.Close()
+ c.Assert(err, IsNil)
+
+ file, err = gfs.OpenId(id)
+ c.Assert(err, IsNil)
+
+ b := make([]byte, 30)
+ n, err = file.Read(b)
+ c.Assert(n, Equals, 22)
+ c.Assert(err, IsNil)
+
+ n, err = file.Read(b)
+ c.Assert(n, Equals, 0)
+ c.Assert(err == io.EOF, Equals, true)
+
+ err = file.Close()
+ c.Assert(err, IsNil)
+}
+
+func (s *S) TestGridFSReadChunking(c *C) {
+ session, err := mgo.Dial("localhost:40011")
+ c.Assert(err, IsNil)
+ defer session.Close()
+
+ db := session.DB("mydb")
+
+ gfs := db.GridFS("fs")
+
+ file, err := gfs.Create("")
+ c.Assert(err, IsNil)
+
+ id := file.Id()
+
+ file.SetChunkSize(5)
+
+ n, err := file.Write([]byte("abcdefghijklmnopqrstuv"))
+ c.Assert(err, IsNil)
+ c.Assert(n, Equals, 22)
+
+ err = file.Close()
+ c.Assert(err, IsNil)
+
+ file, err = gfs.OpenId(id)
+ c.Assert(err, IsNil)
+
+ b := make([]byte, 30)
+
+ // Smaller than the chunk size.
+ n, err = file.Read(b[:3])
+ c.Assert(err, IsNil)
+ c.Assert(n, Equals, 3)
+ c.Assert(b[:3], DeepEquals, []byte("abc"))
+
+ // Boundary in the middle.
+ n, err = file.Read(b[:4])
+ c.Assert(err, IsNil)
+ c.Assert(n, Equals, 4)
+ c.Assert(b[:4], DeepEquals, []byte("defg"))
+
+ // Boundary at the end.
+ n, err = file.Read(b[:3])
+ c.Assert(err, IsNil)
+ c.Assert(n, Equals, 3)
+ c.Assert(b[:3], DeepEquals, []byte("hij"))
+
+ // Larger than the chunk size, with 3 chunks.
+ n, err = file.Read(b)
+ c.Assert(err, IsNil)
+ c.Assert(n, Equals, 12)
+ c.Assert(b[:12], DeepEquals, []byte("klmnopqrstuv"))
+
+ n, err = file.Read(b)
+ c.Assert(n, Equals, 0)
+ c.Assert(err == io.EOF, Equals, true)
+
+ err = file.Close()
+ c.Assert(err, IsNil)
+}
+
+func (s *S) TestGridFSOpen(c *C) {
+ session, err := mgo.Dial("localhost:40011")
+ c.Assert(err, IsNil)
+ defer session.Close()
+
+ db := session.DB("mydb")
+
+ gfs := db.GridFS("fs")
+
+ file, err := gfs.Create("myfile.txt")
+ c.Assert(err, IsNil)
+ file.Write([]byte{'1'})
+ file.Close()
+
+ file, err = gfs.Create("myfile.txt")
+ c.Assert(err, IsNil)
+ file.Write([]byte{'2'})
+ file.Close()
+
+ file, err = gfs.Open("myfile.txt")
+ c.Assert(err, IsNil)
+ defer file.Close()
+
+ var b [1]byte
+
+ _, err = file.Read(b[:])
+ c.Assert(err, IsNil)
+ c.Assert(string(b[:]), Equals, "2")
+}
+
+func (s *S) TestGridFSSeek(c *C) {
+ session, err := mgo.Dial("localhost:40011")
+ c.Assert(err, IsNil)
+ defer session.Close()
+
+ db := session.DB("mydb")
+
+ gfs := db.GridFS("fs")
+ file, err := gfs.Create("")
+ c.Assert(err, IsNil)
+ id := file.Id()
+
+ file.SetChunkSize(5)
+
+ n, err := file.Write([]byte("abcdefghijklmnopqrstuv"))
+ c.Assert(err, IsNil)
+ c.Assert(n, Equals, 22)
+
+ err = file.Close()
+ c.Assert(err, IsNil)
+
+ b := make([]byte, 5)
+
+ file, err = gfs.OpenId(id)
+ c.Assert(err, IsNil)
+
+ o, err := file.Seek(3, os.SEEK_SET)
+ c.Assert(err, IsNil)
+ c.Assert(o, Equals, int64(3))
+ _, err = file.Read(b)
+ c.Assert(err, IsNil)
+ c.Assert(b, DeepEquals, []byte("defgh"))
+
+ o, err = file.Seek(5, os.SEEK_CUR)
+ c.Assert(err, IsNil)
+ c.Assert(o, Equals, int64(13))
+ _, err = file.Read(b)
+ c.Assert(err, IsNil)
+ c.Assert(b, DeepEquals, []byte("nopqr"))
+
+ o, err = file.Seek(0, os.SEEK_END)
+ c.Assert(err, IsNil)
+ c.Assert(o, Equals, int64(22))
+ n, err = file.Read(b)
+ c.Assert(err, Equals, io.EOF)
+ c.Assert(n, Equals, 0)
+
+ o, err = file.Seek(-10, os.SEEK_END)
+ c.Assert(err, IsNil)
+ c.Assert(o, Equals, int64(12))
+ _, err = file.Read(b)
+ c.Assert(err, IsNil)
+ c.Assert(b, DeepEquals, []byte("mnopq"))
+
+ o, err = file.Seek(8, os.SEEK_SET)
+ c.Assert(err, IsNil)
+ c.Assert(o, Equals, int64(8))
+ _, err = file.Read(b)
+ c.Assert(err, IsNil)
+ c.Assert(b, DeepEquals, []byte("ijklm"))
+
+ // Trivial seek forward within same chunk. Already
+ // got the data, shouldn't touch the database.
+ sent := mgo.GetStats().SentOps
+ o, err = file.Seek(1, os.SEEK_CUR)
+ c.Assert(err, IsNil)
+ c.Assert(o, Equals, int64(14))
+ c.Assert(mgo.GetStats().SentOps, Equals, sent)
+ _, err = file.Read(b)
+ c.Assert(err, IsNil)
+ c.Assert(b, DeepEquals, []byte("opqrs"))
+
+ // Try seeking past end of file.
+ file.Seek(3, os.SEEK_SET)
+ o, err = file.Seek(23, os.SEEK_SET)
+ c.Assert(err, ErrorMatches, "seek past end of file")
+ c.Assert(o, Equals, int64(3))
+}
+
+func (s *S) TestGridFSRemoveId(c *C) {
+ session, err := mgo.Dial("localhost:40011")
+ c.Assert(err, IsNil)
+ defer session.Close()
+
+ db := session.DB("mydb")
+
+ gfs := db.GridFS("fs")
+
+ file, err := gfs.Create("myfile.txt")
+ c.Assert(err, IsNil)
+ file.Write([]byte{'1'})
+ file.Close()
+
+ file, err = gfs.Create("myfile.txt")
+ c.Assert(err, IsNil)
+ file.Write([]byte{'2'})
+ id := file.Id()
+ file.Close()
+
+ err = gfs.RemoveId(id)
+ c.Assert(err, IsNil)
+
+ file, err = gfs.Open("myfile.txt")
+ c.Assert(err, IsNil)
+ defer file.Close()
+
+ var b [1]byte
+
+ _, err = file.Read(b[:])
+ c.Assert(err, IsNil)
+ c.Assert(string(b[:]), Equals, "1")
+
+ n, err := db.C("fs.chunks").Find(M{"files_id": id}).Count()
+ c.Assert(err, IsNil)
+ c.Assert(n, Equals, 0)
+}
+
+func (s *S) TestGridFSRemove(c *C) {
+ session, err := mgo.Dial("localhost:40011")
+ c.Assert(err, IsNil)
+ defer session.Close()
+
+ db := session.DB("mydb")
+
+ gfs := db.GridFS("fs")
+
+ file, err := gfs.Create("myfile.txt")
+ c.Assert(err, IsNil)
+ file.Write([]byte{'1'})
+ file.Close()
+
+ file, err = gfs.Create("myfile.txt")
+ c.Assert(err, IsNil)
+ file.Write([]byte{'2'})
+ file.Close()
+
+ err = gfs.Remove("myfile.txt")
+ c.Assert(err, IsNil)
+
+ _, err = gfs.Open("myfile.txt")
+ c.Assert(err == mgo.ErrNotFound, Equals, true)
+
+ n, err := db.C("fs.chunks").Find(nil).Count()
+ c.Assert(err, IsNil)
+ c.Assert(n, Equals, 0)
+}
+
+func (s *S) TestGridFSOpenNext(c *C) {
+ session, err := mgo.Dial("localhost:40011")
+ c.Assert(err, IsNil)
+ defer session.Close()
+
+ db := session.DB("mydb")
+
+ gfs := db.GridFS("fs")
+
+ file, err := gfs.Create("myfile1.txt")
+ c.Assert(err, IsNil)
+ file.Write([]byte{'1'})
+ file.Close()
+
+ file, err = gfs.Create("myfile2.txt")
+ c.Assert(err, IsNil)
+ file.Write([]byte{'2'})
+ file.Close()
+
+ var f *mgo.GridFile
+ var b [1]byte
+
+ iter := gfs.Find(nil).Sort("-filename").Iter()
+
+ ok := gfs.OpenNext(iter, &f)
+ c.Assert(ok, Equals, true)
+ c.Check(f.Name(), Equals, "myfile2.txt")
+
+ _, err = f.Read(b[:])
+ c.Assert(err, IsNil)
+ c.Assert(string(b[:]), Equals, "2")
+
+ ok = gfs.OpenNext(iter, &f)
+ c.Assert(ok, Equals, true)
+ c.Check(f.Name(), Equals, "myfile1.txt")
+
+ _, err = f.Read(b[:])
+ c.Assert(err, IsNil)
+ c.Assert(string(b[:]), Equals, "1")
+
+ ok = gfs.OpenNext(iter, &f)
+ c.Assert(ok, Equals, false)
+ c.Assert(iter.Close(), IsNil)
+ c.Assert(f, IsNil)
+
+ // Do it again with a more restrictive query to make sure
+ // it's actually taken into account.
+ iter = gfs.Find(bson.M{"filename": "myfile1.txt"}).Iter()
+
+ ok = gfs.OpenNext(iter, &f)
+ c.Assert(ok, Equals, true)
+ c.Check(f.Name(), Equals, "myfile1.txt")
+
+ ok = gfs.OpenNext(iter, &f)
+ c.Assert(ok, Equals, false)
+ c.Assert(iter.Close(), IsNil)
+ c.Assert(f, IsNil)
+}
diff --git a/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/harness/certs/client.crt b/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/harness/certs/client.crt
new file mode 100644
index 00000000000..6143d925472
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/harness/certs/client.crt
@@ -0,0 +1,20 @@
+-----BEGIN CERTIFICATE-----
+MIIDLjCCAhYCAQcwDQYJKoZIhvcNAQELBQAwXDELMAkGA1UEBhMCR08xDDAKBgNV
+BAgMA01HTzEMMAoGA1UEBwwDTUdPMQwwCgYDVQQKDANNR08xDzANBgNVBAsMBlNl
+cnZlcjESMBAGA1UEAwwJbG9jYWxob3N0MCAXDTE1MDkyOTA4NDAzMFoYDzIxMTUw
+OTA1MDg0MDMwWjBcMQswCQYDVQQGEwJHTzEMMAoGA1UECAwDTUdPMQwwCgYDVQQH
+DANNR08xDDAKBgNVBAoMA01HTzEPMA0GA1UECwwGQ2xpZW50MRIwEAYDVQQDDAls
+b2NhbGhvc3QwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQC0UiQhmT+H
+4IIqrn8SMESDzvcl3rwImwUoRIHlmXkovCIZCbvBCJ1nAu6X5zIN89EPPOjfNrgZ
+616wPgVV/YEQXp+D7+jTAsE5s8JepRXFdecResmvh/+0i2DSuI4QFsuyVAPM1O0I
+AQ5EKgr0weZZmsX6lhPD4uYehV4DxDE0i/8aTAlDoNgRCAJrYFMharRTDdY7bQzd
+7ZYab/pK/3DSmOKxl/AFJ8Enmcj9w1bsvy0fgAgoGEBnBru80PRFpFiqk72TJkXO
+Hx7zcYFpegtKPbAreTCModaCnjP//fskCp4XJrkfH5+01NeeX/r1OfEbjgE/wzzx
+l8NaWnPCmxNfAgMBAAEwDQYJKoZIhvcNAQELBQADggEBAFwYpje3dCLDOIHYjd+5
+CpFOEb+bJsS4ryqm/NblTjIhCLo58hNpMsBqdJHRbHAFRCOE8fvY8yiWtdHeFZcW
+DgVRAXfHONLtN7faZaZQnhy/YzOhLfC/8dUMB0gQA8KXhBCPZqQmexE28AfkEO47
+PwICAxIWINfjm5VnFMkA3b7bDNLHon/pev2m7HqVQ3pRUJQNK3XgFOdDgRrnuXpR
+OKAfHORHVGTh1gf1DVwc0oM+0gnkSiJ1VG0n5pE3zhZ24fmZxu6JQ6X515W7APQI
+/nKVH+f1Fo+ustyTNLt8Bwxi1XmwT7IXwnkVSE9Ff6VejppXRF01V0aaWsa3kU3r
+z3A=
+-----END CERTIFICATE-----
diff --git a/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/harness/certs/client.key b/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/harness/certs/client.key
new file mode 100644
index 00000000000..892db714f91
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/harness/certs/client.key
@@ -0,0 +1,27 @@
+-----BEGIN RSA PRIVATE KEY-----
+MIIEogIBAAKCAQEAtFIkIZk/h+CCKq5/EjBEg873Jd68CJsFKESB5Zl5KLwiGQm7
+wQidZwLul+cyDfPRDzzo3za4GetesD4FVf2BEF6fg+/o0wLBObPCXqUVxXXnEXrJ
+r4f/tItg0riOEBbLslQDzNTtCAEORCoK9MHmWZrF+pYTw+LmHoVeA8QxNIv/GkwJ
+Q6DYEQgCa2BTIWq0Uw3WO20M3e2WGm/6Sv9w0pjisZfwBSfBJ5nI/cNW7L8tH4AI
+KBhAZwa7vND0RaRYqpO9kyZFzh8e83GBaXoLSj2wK3kwjKHWgp4z//37JAqeFya5
+Hx+ftNTXnl/69TnxG44BP8M88ZfDWlpzwpsTXwIDAQABAoIBADzCjOAxZkHfuZyu
+La0wTHXpkEfXdJ6ltagq5WY7P6MlOYwcRoK152vlhgXzZl9jL6ely4YjRwec0swq
+KdwezpV4fOGVPmuTuw45bx47HEnr/49ZQ4p9FgF9EYQPofbz53FQc/NaMACJcogv
+bn+osniw+VMFrOVNmGLiZ5p3Smk8zfXE7GRHO8CL5hpWLWO/aK236yytbfWOjM2f
+Pr76ICb26TPRNzYaYUEThU6DtgdLU8pLnJ6QKKaDsjn+zqQzRa+Nvc0c0K8gvWwA
+Afq7t0325+uMSwfpLgCOFldcaZQ5uvteJ0CAVRq1MvStnSHBmMzPlgS+NzsDm6lp
+QH5+rIkCgYEA5j3jrWsv7TueTNbk8Hr/Zwywc+fA2Ex0pBURBHlHyc6ahSXWSCqo
+DtvRGX0GDoK1lCfaIf1qb/DLlGaoHpkEeqcNhXQ+hHs+bZAxfbfBY9+ikit5ZTtl
+QN1tIlhaiyLDnwhkpi/hMw1tiouxJUf84Io61z0sCL4hyZSPCpjn0H0CgYEAyH6F
+Mwl+bCD3VDL/Dr5WSoOr2B/M3bF5SfvdStwy2IPcDJ716je1Ud/2qFCnKGgqvWhJ
++HU15c7CjAWo7/pXq2/pEMD8fDKTYww4Hr4p6duEA7DpbOGkwcUX8u3eknxUWT9F
+jOSbTCvAxuDOC1K3AElyMxVVTNUrFFe8M84R9gsCgYBXmb6RkdG3WlKde7m5gaLB
+K4PLZabq5RQQBe/mmtpkfxYtiLrh1FEC7kG9h+MRDExX5V3KRugDVUOv3+shUSjy
+HbM4ToUm1NloyE78PTj4bfMl2CKlEJcyucy3H5S7kWuKi5/31wnA6d/+sa2huKUP
+Lai7kgu5+9VRJBPUfV7d5QKBgCnhk/13TDtWH5QtGu5/gBMMskbxTaA5xHZZ8H4E
+xXJJCRxx0Dje7jduK145itF8AQGT2W/XPC0HJciOHh4TE2EyfWMMjTF8dyFHmimB
+28uIGWmT+Q7Pi9UWUMxkOAwtgIksGGE4F+CvexOQPjpLSwL6VKqrGCh2lwsm0J+Z
+ulLFAoGAKlC93c6XEj1A31c1+usdEhUe9BrmTqtSYLYpDNpeMLdZ3VctrAZuOQPZ
+4A4gkkQkqqwZGBYYSEqwqiLU6MsBdHPPZ9u3JXLLOQuh1xGeaKylvHj7qx6iT0Xo
+I+FkJ6/3JeMgOina/+wlzD4oyQpqR4Mnh+TuLkDfQTgY+Lg0WPk=
+-----END RSA PRIVATE KEY-----
diff --git a/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/harness/certs/client.pem b/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/harness/certs/client.pem
new file mode 100644
index 00000000000..93aed3556e9
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/harness/certs/client.pem
@@ -0,0 +1,57 @@
+To regenerate the key:
+
+ openssl req -newkey rsa:2048 -new -x509 -days 36500 -nodes -out server.crt -keyout server.key
+ cat server.key server.crt > server.pem
+ openssl genrsa -out client.key 2048
+ openssl req -key client.key -new -out client.req
+ openssl x509 -req -in client.req -CA server.crt -CAkey server.key -days 36500 -CAserial file.srl -out client.crt
+ cat client.key client.crt > client.pem
+
+-----BEGIN RSA PRIVATE KEY-----
+MIIEogIBAAKCAQEAtFIkIZk/h+CCKq5/EjBEg873Jd68CJsFKESB5Zl5KLwiGQm7
+wQidZwLul+cyDfPRDzzo3za4GetesD4FVf2BEF6fg+/o0wLBObPCXqUVxXXnEXrJ
+r4f/tItg0riOEBbLslQDzNTtCAEORCoK9MHmWZrF+pYTw+LmHoVeA8QxNIv/GkwJ
+Q6DYEQgCa2BTIWq0Uw3WO20M3e2WGm/6Sv9w0pjisZfwBSfBJ5nI/cNW7L8tH4AI
+KBhAZwa7vND0RaRYqpO9kyZFzh8e83GBaXoLSj2wK3kwjKHWgp4z//37JAqeFya5
+Hx+ftNTXnl/69TnxG44BP8M88ZfDWlpzwpsTXwIDAQABAoIBADzCjOAxZkHfuZyu
+La0wTHXpkEfXdJ6ltagq5WY7P6MlOYwcRoK152vlhgXzZl9jL6ely4YjRwec0swq
+KdwezpV4fOGVPmuTuw45bx47HEnr/49ZQ4p9FgF9EYQPofbz53FQc/NaMACJcogv
+bn+osniw+VMFrOVNmGLiZ5p3Smk8zfXE7GRHO8CL5hpWLWO/aK236yytbfWOjM2f
+Pr76ICb26TPRNzYaYUEThU6DtgdLU8pLnJ6QKKaDsjn+zqQzRa+Nvc0c0K8gvWwA
+Afq7t0325+uMSwfpLgCOFldcaZQ5uvteJ0CAVRq1MvStnSHBmMzPlgS+NzsDm6lp
+QH5+rIkCgYEA5j3jrWsv7TueTNbk8Hr/Zwywc+fA2Ex0pBURBHlHyc6ahSXWSCqo
+DtvRGX0GDoK1lCfaIf1qb/DLlGaoHpkEeqcNhXQ+hHs+bZAxfbfBY9+ikit5ZTtl
+QN1tIlhaiyLDnwhkpi/hMw1tiouxJUf84Io61z0sCL4hyZSPCpjn0H0CgYEAyH6F
+Mwl+bCD3VDL/Dr5WSoOr2B/M3bF5SfvdStwy2IPcDJ716je1Ud/2qFCnKGgqvWhJ
++HU15c7CjAWo7/pXq2/pEMD8fDKTYww4Hr4p6duEA7DpbOGkwcUX8u3eknxUWT9F
+jOSbTCvAxuDOC1K3AElyMxVVTNUrFFe8M84R9gsCgYBXmb6RkdG3WlKde7m5gaLB
+K4PLZabq5RQQBe/mmtpkfxYtiLrh1FEC7kG9h+MRDExX5V3KRugDVUOv3+shUSjy
+HbM4ToUm1NloyE78PTj4bfMl2CKlEJcyucy3H5S7kWuKi5/31wnA6d/+sa2huKUP
+Lai7kgu5+9VRJBPUfV7d5QKBgCnhk/13TDtWH5QtGu5/gBMMskbxTaA5xHZZ8H4E
+xXJJCRxx0Dje7jduK145itF8AQGT2W/XPC0HJciOHh4TE2EyfWMMjTF8dyFHmimB
+28uIGWmT+Q7Pi9UWUMxkOAwtgIksGGE4F+CvexOQPjpLSwL6VKqrGCh2lwsm0J+Z
+ulLFAoGAKlC93c6XEj1A31c1+usdEhUe9BrmTqtSYLYpDNpeMLdZ3VctrAZuOQPZ
+4A4gkkQkqqwZGBYYSEqwqiLU6MsBdHPPZ9u3JXLLOQuh1xGeaKylvHj7qx6iT0Xo
+I+FkJ6/3JeMgOina/+wlzD4oyQpqR4Mnh+TuLkDfQTgY+Lg0WPk=
+-----END RSA PRIVATE KEY-----
+-----BEGIN CERTIFICATE-----
+MIIDLjCCAhYCAQcwDQYJKoZIhvcNAQELBQAwXDELMAkGA1UEBhMCR08xDDAKBgNV
+BAgMA01HTzEMMAoGA1UEBwwDTUdPMQwwCgYDVQQKDANNR08xDzANBgNVBAsMBlNl
+cnZlcjESMBAGA1UEAwwJbG9jYWxob3N0MCAXDTE1MDkyOTA4NDAzMFoYDzIxMTUw
+OTA1MDg0MDMwWjBcMQswCQYDVQQGEwJHTzEMMAoGA1UECAwDTUdPMQwwCgYDVQQH
+DANNR08xDDAKBgNVBAoMA01HTzEPMA0GA1UECwwGQ2xpZW50MRIwEAYDVQQDDAls
+b2NhbGhvc3QwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQC0UiQhmT+H
+4IIqrn8SMESDzvcl3rwImwUoRIHlmXkovCIZCbvBCJ1nAu6X5zIN89EPPOjfNrgZ
+616wPgVV/YEQXp+D7+jTAsE5s8JepRXFdecResmvh/+0i2DSuI4QFsuyVAPM1O0I
+AQ5EKgr0weZZmsX6lhPD4uYehV4DxDE0i/8aTAlDoNgRCAJrYFMharRTDdY7bQzd
+7ZYab/pK/3DSmOKxl/AFJ8Enmcj9w1bsvy0fgAgoGEBnBru80PRFpFiqk72TJkXO
+Hx7zcYFpegtKPbAreTCModaCnjP//fskCp4XJrkfH5+01NeeX/r1OfEbjgE/wzzx
+l8NaWnPCmxNfAgMBAAEwDQYJKoZIhvcNAQELBQADggEBAFwYpje3dCLDOIHYjd+5
+CpFOEb+bJsS4ryqm/NblTjIhCLo58hNpMsBqdJHRbHAFRCOE8fvY8yiWtdHeFZcW
+DgVRAXfHONLtN7faZaZQnhy/YzOhLfC/8dUMB0gQA8KXhBCPZqQmexE28AfkEO47
+PwICAxIWINfjm5VnFMkA3b7bDNLHon/pev2m7HqVQ3pRUJQNK3XgFOdDgRrnuXpR
+OKAfHORHVGTh1gf1DVwc0oM+0gnkSiJ1VG0n5pE3zhZ24fmZxu6JQ6X515W7APQI
+/nKVH+f1Fo+ustyTNLt8Bwxi1XmwT7IXwnkVSE9Ff6VejppXRF01V0aaWsa3kU3r
+z3A=
+-----END CERTIFICATE-----
+
diff --git a/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/harness/certs/client.req b/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/harness/certs/client.req
new file mode 100644
index 00000000000..e44feb4e867
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/harness/certs/client.req
@@ -0,0 +1,17 @@
+-----BEGIN CERTIFICATE REQUEST-----
+MIICoTCCAYkCAQAwXDELMAkGA1UEBhMCR08xDDAKBgNVBAgMA01HTzEMMAoGA1UE
+BwwDTUdPMQwwCgYDVQQKDANNR08xDzANBgNVBAsMBkNsaWVudDESMBAGA1UEAwwJ
+bG9jYWxob3N0MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAtFIkIZk/
+h+CCKq5/EjBEg873Jd68CJsFKESB5Zl5KLwiGQm7wQidZwLul+cyDfPRDzzo3za4
+GetesD4FVf2BEF6fg+/o0wLBObPCXqUVxXXnEXrJr4f/tItg0riOEBbLslQDzNTt
+CAEORCoK9MHmWZrF+pYTw+LmHoVeA8QxNIv/GkwJQ6DYEQgCa2BTIWq0Uw3WO20M
+3e2WGm/6Sv9w0pjisZfwBSfBJ5nI/cNW7L8tH4AIKBhAZwa7vND0RaRYqpO9kyZF
+zh8e83GBaXoLSj2wK3kwjKHWgp4z//37JAqeFya5Hx+ftNTXnl/69TnxG44BP8M8
+8ZfDWlpzwpsTXwIDAQABoAAwDQYJKoZIhvcNAQELBQADggEBAKbOFblIscxlXalV
+sEGNm2oz380RN2QoLhN6nKtAiv0jWm6iKhdAhOIQIeaRPhUP3cyi8bcBvLdMeQ3d
+ZYIByB55/R0VSP1vs4qkXJCQegHcpMpyuIzsMV8p3Q4lxzGKyKtPA6Bb5c49p8Sk
+ncD+LL4ymrMEia4cBPsHL9hhFOm4gqDacbU8+ETLTpuoSvUZiw7OwngqhE2r+kMv
+KDweq5TOPeb+ftKzQKrrfB+XVdBoTKYw6CwARpogbc0/7mvottVcJ/0yAgC1fBbM
+vupkohkXwKfjxKl6nKNL3R2GkzHQOh91hglAx5zyybKQn2YMM328Vk4X6csBg+pg
+tb1s0MA=
+-----END CERTIFICATE REQUEST-----
diff --git a/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/harness/certs/server.crt b/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/harness/certs/server.crt
new file mode 100644
index 00000000000..4515f559294
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/harness/certs/server.crt
@@ -0,0 +1,22 @@
+-----BEGIN CERTIFICATE-----
+MIIDjTCCAnWgAwIBAgIJAMW+wDfcdzC+MA0GCSqGSIb3DQEBCwUAMFwxCzAJBgNV
+BAYTAkdPMQwwCgYDVQQIDANNR08xDDAKBgNVBAcMA01HTzEMMAoGA1UECgwDTUdP
+MQ8wDQYDVQQLDAZTZXJ2ZXIxEjAQBgNVBAMMCWxvY2FsaG9zdDAgFw0xNTA5Mjkw
+ODM0MTBaGA8yMTE1MDkwNTA4MzQxMFowXDELMAkGA1UEBhMCR08xDDAKBgNVBAgM
+A01HTzEMMAoGA1UEBwwDTUdPMQwwCgYDVQQKDANNR08xDzANBgNVBAsMBlNlcnZl
+cjESMBAGA1UEAwwJbG9jYWxob3N0MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIB
+CgKCAQEA/T5W1vTsAF+2gTXP1JKygjM7T/2BXHiJc6DRKVjlshTtPYuC3rpTddDm
+6d86d17LWEo+T2bCT4MzZJhSGAun9peFvehdElRMr57xs7j5V1QYjwadMTBkLQuK
+IAg6cISN1KPUzpUTUKsWIsbx97sA0t0wiEPifROb7nfSMIVQsdz/c9LlY2UNYI+5
+GiU88iDGg2wrdsa3U+l2G2KSx/9uE3c5iFki6bdequLiWmBZ6rxfoaLe4gk1INji
+fKssNsn2i3uJ4i4Tmr3PUc4kxx0mMKuWK3HdlQsMqtpq++HQmHSvsPrbgcjl9HyP
+JiHDsoJ+4O5bbtcE51oQbLh1bZAhYwIDAQABo1AwTjAdBgNVHQ4EFgQUhku/u9Kd
+OAc1L0OR649vCCuQT+0wHwYDVR0jBBgwFoAUhku/u9KdOAc1L0OR649vCCuQT+0w
+DAYDVR0TBAUwAwEB/zANBgkqhkiG9w0BAQsFAAOCAQEAw7Bgw3hlWXWSZjLhnSOu
+2mW/UJ2Sj31unHngmgtXwW/04cyzoULb+qmzPe/Z06QMgGIsku1jFBcu0JabQtUG
+TyalpfW77tfnvz238CYdImYwE9ZcIGuZGfhs6ySFN9XpW43B8YM7R8wTNPvOcSPw
+nfjqU6kueN4TTspQg9cKhDss5DcMTIdgJgLbITXhIsrCu6GlKOgtX3HrdMGpQX7s
+UoMXtZVG8pK32vxKWGTZ6DPqESeKjjq74NbYnB3H5U/kDU2dt7LF90C/Umdr9y+C
+W2OJb1WBrf6RTcbt8D6d7P9kOfLPOtyn/cbaA/pfXBMQMHqr7XNXzjnaNU+jB7hL
+yQ==
+-----END CERTIFICATE-----
diff --git a/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/harness/certs/server.key b/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/harness/certs/server.key
new file mode 100644
index 00000000000..082d093e922
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/harness/certs/server.key
@@ -0,0 +1,28 @@
+-----BEGIN PRIVATE KEY-----
+MIIEvQIBADANBgkqhkiG9w0BAQEFAASCBKcwggSjAgEAAoIBAQD9PlbW9OwAX7aB
+Nc/UkrKCMztP/YFceIlzoNEpWOWyFO09i4LeulN10Obp3zp3XstYSj5PZsJPgzNk
+mFIYC6f2l4W96F0SVEyvnvGzuPlXVBiPBp0xMGQtC4ogCDpwhI3Uo9TOlRNQqxYi
+xvH3uwDS3TCIQ+J9E5vud9IwhVCx3P9z0uVjZQ1gj7kaJTzyIMaDbCt2xrdT6XYb
+YpLH/24TdzmIWSLpt16q4uJaYFnqvF+hot7iCTUg2OJ8qyw2yfaLe4niLhOavc9R
+ziTHHSYwq5Yrcd2VCwyq2mr74dCYdK+w+tuByOX0fI8mIcOygn7g7ltu1wTnWhBs
+uHVtkCFjAgMBAAECggEASRAfRc1L+Z+jrAu2doIMdnwJdL6S//bW0UFolyFKw+I9
+wC/sBg6D3c3zkS4SVDZJPKPO7mGbVg1oWnGH3eAfCYoV0ACmOY+QwGp/GXcYmRVu
+MHWcDIEFpelaZHt7QNM9iEfsMd3YwMFblZUIYozVZADk66uKQMPTjS2Muur7qRSi
+wuVfSmsVZ5afH3B1Tr96BbmPsHrXLjvNpjO44k2wrnnSPQjUL7+YiZPvtnNW8Fby
+yuo2uoAyjg3+68PYZftOvvNneMsv1uyGlUs6Bk+DVWaqofIztWFdFZyXbHnK2PTk
+eGQt5EsL+RwIck5eoqd5vSE+KyzhhydL0zcpngVQoQKBgQD/Yelvholbz5NQtSy3
+ZoiW1y7hL1BKzvVNHuAMKJ5WOnj5szhjhKxt/wZ+hk0qcAmlV9WAPbf4izbEwPRC
+tnMBQzf1uBxqqbLL6WZ4YAyGrcX3UrT7GXsGfVT4zJjz7oYSw8aPircecw5V4exB
+xa4NF+ki8IycXSkHwvW2R56fRwKBgQD92xpxXtte/rUnmENbQmr0aKg7JEfMoih6
+MdX+f6mfgjMmqj+L4jPTI8/ql8HEy13SQS1534aDSHO+nBqBK5aHUCRMIgSLnTP9
+Xyx9Ngg03SZIkPfykqxQmnZgWkTPMhYS+K1Ao9FGVs8W5jVi7veyAdhHptAcxhP3
+IuxvrxVTBQKBgQCluMPiu0snaOwP04HRAZhhSgIB3tIbuXE1OnPpb/JPwmH+p25Q
+Jig+uN9d+4jXoRyhTv4c2fAoOS6xPwVCxWKbzyLhMTg/fx+ncy4rryhxvRJaDDGl
+QEO1Ul9xlFMs9/vI8YJIY5uxBrimwpStmbn4hSukoLSeQ1X802bfglpMwQKBgD8z
+GTY4Y20XBIrDAaHquy32EEwJEEcF6AXj+l7N8bDgfVOW9xMgUb6zH8RL29Xeu5Do
+4SWCXL66fvZpbr/R1jwB28eIgJExpgvicfUKSqi+lhVi4hfmJDg8/FOopZDf61b1
+ykxZfHSCkDQnRAtJaylKBEpyYUWImtfgPfTgJfLxAoGAc8A/Tl2h/DsdTA+cA5d7
+1e0l64m13ObruSWRczyru4hy8Yq6E/K2rOFw8cYCcFpy24NqNlk+2iXPLRpWm2zt
+9R497zAPvhK/bfPXjvm0j/VjB44lvRTC9hby/RRMHy9UJk4o/UQaD+1IodxZovvk
+SruEA1+5bfBRMW0P+h7Qfe4=
+-----END PRIVATE KEY-----
diff --git a/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/harness/certs/server.pem b/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/harness/certs/server.pem
new file mode 100644
index 00000000000..487b92d66b8
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/harness/certs/server.pem
@@ -0,0 +1,50 @@
+-----BEGIN PRIVATE KEY-----
+MIIEvQIBADANBgkqhkiG9w0BAQEFAASCBKcwggSjAgEAAoIBAQD9PlbW9OwAX7aB
+Nc/UkrKCMztP/YFceIlzoNEpWOWyFO09i4LeulN10Obp3zp3XstYSj5PZsJPgzNk
+mFIYC6f2l4W96F0SVEyvnvGzuPlXVBiPBp0xMGQtC4ogCDpwhI3Uo9TOlRNQqxYi
+xvH3uwDS3TCIQ+J9E5vud9IwhVCx3P9z0uVjZQ1gj7kaJTzyIMaDbCt2xrdT6XYb
+YpLH/24TdzmIWSLpt16q4uJaYFnqvF+hot7iCTUg2OJ8qyw2yfaLe4niLhOavc9R
+ziTHHSYwq5Yrcd2VCwyq2mr74dCYdK+w+tuByOX0fI8mIcOygn7g7ltu1wTnWhBs
+uHVtkCFjAgMBAAECggEASRAfRc1L+Z+jrAu2doIMdnwJdL6S//bW0UFolyFKw+I9
+wC/sBg6D3c3zkS4SVDZJPKPO7mGbVg1oWnGH3eAfCYoV0ACmOY+QwGp/GXcYmRVu
+MHWcDIEFpelaZHt7QNM9iEfsMd3YwMFblZUIYozVZADk66uKQMPTjS2Muur7qRSi
+wuVfSmsVZ5afH3B1Tr96BbmPsHrXLjvNpjO44k2wrnnSPQjUL7+YiZPvtnNW8Fby
+yuo2uoAyjg3+68PYZftOvvNneMsv1uyGlUs6Bk+DVWaqofIztWFdFZyXbHnK2PTk
+eGQt5EsL+RwIck5eoqd5vSE+KyzhhydL0zcpngVQoQKBgQD/Yelvholbz5NQtSy3
+ZoiW1y7hL1BKzvVNHuAMKJ5WOnj5szhjhKxt/wZ+hk0qcAmlV9WAPbf4izbEwPRC
+tnMBQzf1uBxqqbLL6WZ4YAyGrcX3UrT7GXsGfVT4zJjz7oYSw8aPircecw5V4exB
+xa4NF+ki8IycXSkHwvW2R56fRwKBgQD92xpxXtte/rUnmENbQmr0aKg7JEfMoih6
+MdX+f6mfgjMmqj+L4jPTI8/ql8HEy13SQS1534aDSHO+nBqBK5aHUCRMIgSLnTP9
+Xyx9Ngg03SZIkPfykqxQmnZgWkTPMhYS+K1Ao9FGVs8W5jVi7veyAdhHptAcxhP3
+IuxvrxVTBQKBgQCluMPiu0snaOwP04HRAZhhSgIB3tIbuXE1OnPpb/JPwmH+p25Q
+Jig+uN9d+4jXoRyhTv4c2fAoOS6xPwVCxWKbzyLhMTg/fx+ncy4rryhxvRJaDDGl
+QEO1Ul9xlFMs9/vI8YJIY5uxBrimwpStmbn4hSukoLSeQ1X802bfglpMwQKBgD8z
+GTY4Y20XBIrDAaHquy32EEwJEEcF6AXj+l7N8bDgfVOW9xMgUb6zH8RL29Xeu5Do
+4SWCXL66fvZpbr/R1jwB28eIgJExpgvicfUKSqi+lhVi4hfmJDg8/FOopZDf61b1
+ykxZfHSCkDQnRAtJaylKBEpyYUWImtfgPfTgJfLxAoGAc8A/Tl2h/DsdTA+cA5d7
+1e0l64m13ObruSWRczyru4hy8Yq6E/K2rOFw8cYCcFpy24NqNlk+2iXPLRpWm2zt
+9R497zAPvhK/bfPXjvm0j/VjB44lvRTC9hby/RRMHy9UJk4o/UQaD+1IodxZovvk
+SruEA1+5bfBRMW0P+h7Qfe4=
+-----END PRIVATE KEY-----
+-----BEGIN CERTIFICATE-----
+MIIDjTCCAnWgAwIBAgIJAMW+wDfcdzC+MA0GCSqGSIb3DQEBCwUAMFwxCzAJBgNV
+BAYTAkdPMQwwCgYDVQQIDANNR08xDDAKBgNVBAcMA01HTzEMMAoGA1UECgwDTUdP
+MQ8wDQYDVQQLDAZTZXJ2ZXIxEjAQBgNVBAMMCWxvY2FsaG9zdDAgFw0xNTA5Mjkw
+ODM0MTBaGA8yMTE1MDkwNTA4MzQxMFowXDELMAkGA1UEBhMCR08xDDAKBgNVBAgM
+A01HTzEMMAoGA1UEBwwDTUdPMQwwCgYDVQQKDANNR08xDzANBgNVBAsMBlNlcnZl
+cjESMBAGA1UEAwwJbG9jYWxob3N0MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIB
+CgKCAQEA/T5W1vTsAF+2gTXP1JKygjM7T/2BXHiJc6DRKVjlshTtPYuC3rpTddDm
+6d86d17LWEo+T2bCT4MzZJhSGAun9peFvehdElRMr57xs7j5V1QYjwadMTBkLQuK
+IAg6cISN1KPUzpUTUKsWIsbx97sA0t0wiEPifROb7nfSMIVQsdz/c9LlY2UNYI+5
+GiU88iDGg2wrdsa3U+l2G2KSx/9uE3c5iFki6bdequLiWmBZ6rxfoaLe4gk1INji
+fKssNsn2i3uJ4i4Tmr3PUc4kxx0mMKuWK3HdlQsMqtpq++HQmHSvsPrbgcjl9HyP
+JiHDsoJ+4O5bbtcE51oQbLh1bZAhYwIDAQABo1AwTjAdBgNVHQ4EFgQUhku/u9Kd
+OAc1L0OR649vCCuQT+0wHwYDVR0jBBgwFoAUhku/u9KdOAc1L0OR649vCCuQT+0w
+DAYDVR0TBAUwAwEB/zANBgkqhkiG9w0BAQsFAAOCAQEAw7Bgw3hlWXWSZjLhnSOu
+2mW/UJ2Sj31unHngmgtXwW/04cyzoULb+qmzPe/Z06QMgGIsku1jFBcu0JabQtUG
+TyalpfW77tfnvz238CYdImYwE9ZcIGuZGfhs6ySFN9XpW43B8YM7R8wTNPvOcSPw
+nfjqU6kueN4TTspQg9cKhDss5DcMTIdgJgLbITXhIsrCu6GlKOgtX3HrdMGpQX7s
+UoMXtZVG8pK32vxKWGTZ6DPqESeKjjq74NbYnB3H5U/kDU2dt7LF90C/Umdr9y+C
+W2OJb1WBrf6RTcbt8D6d7P9kOfLPOtyn/cbaA/pfXBMQMHqr7XNXzjnaNU+jB7hL
+yQ==
+-----END CERTIFICATE-----
diff --git a/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/harness/daemons/.env b/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/harness/daemons/.env
new file mode 100644
index 00000000000..96ee89e947d
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/harness/daemons/.env
@@ -0,0 +1,57 @@
+
+set -e
+
+MONGOVERSION=$(mongod --version | sed -n 's/.*v\([0-9]\+\.[0-9]\+\)\..*/\1/p')
+MONGOMAJOR=$(echo $MONGOVERSION | sed 's/\([0-9]\+\)\..*/\1/')
+MONGOMINOR=$(echo $MONGOVERSION | sed 's/[0-9]\+\.\([0-9]\+\)/\1/')
+
+versionAtLeast() {
+ TESTMAJOR="$1"
+ TESTMINOR="$2"
+ if [ "$MONGOMAJOR" -gt "$TESTMAJOR" ]; then
+ return 0
+ fi
+ if [ "$MONGOMAJOR" -lt "$TESTMAJOR" ]; then
+ return 100
+ fi
+ if [ "$MONGOMINOR" -ge "$TESTMINOR" ]; then
+ return 0
+ fi
+ return 100
+}
+
+COMMONDOPTSNOIP="
+ --nohttpinterface
+ --noprealloc
+ --nojournal
+ --smallfiles
+ --nssize=1
+ --oplogSize=1
+ --dbpath ./db
+ "
+COMMONDOPTS="
+ $COMMONDOPTSNOIP
+ --bind_ip=127.0.0.1
+ "
+COMMONCOPTS="
+ $COMMONDOPTS
+ "
+COMMONSOPTS="
+ --chunkSize 1
+ --bind_ip=127.0.0.1
+ "
+
+if versionAtLeast 3 2; then
+ # 3.2 doesn't like --nojournal on config servers.
+ #COMMONCOPTS="$(echo "$COMMONCOPTS" | sed '/--nojournal/d')"
+ # Using a hacked version of MongoDB 3.2 for now.
+
+ # Go back to MMAPv1 so it's not super sluggish. :-(
+ COMMONDOPTSNOIP="--storageEngine=mmapv1 $COMMONDOPTSNOIP"
+ COMMONDOPTS="--storageEngine=mmapv1 $COMMONDOPTS"
+ COMMONCOPTS="--storageEngine=mmapv1 $COMMONCOPTS"
+fi
+
+if [ "$TRAVIS" = true ]; then
+ set -x
+fi
diff --git a/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/harness/daemons/cfg1/db/.empty b/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/harness/daemons/cfg1/db/.empty
new file mode 100644
index 00000000000..e69de29bb2d
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/harness/daemons/cfg1/db/.empty
diff --git a/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/harness/daemons/cfg1/db/journal/tempLatencyTest b/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/harness/daemons/cfg1/db/journal/tempLatencyTest
new file mode 100644
index 00000000000..52972ec9e05
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/harness/daemons/cfg1/db/journal/tempLatencyTest
Binary files differ
diff --git a/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/harness/daemons/cfg1/db/mongod.lock b/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/harness/daemons/cfg1/db/mongod.lock
new file mode 100755
index 00000000000..e69de29bb2d
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/harness/daemons/cfg1/db/mongod.lock
diff --git a/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/harness/daemons/cfg1/log/run b/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/harness/daemons/cfg1/log/run
new file mode 100755
index 00000000000..e9d4404ba4f
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/harness/daemons/cfg1/log/run
@@ -0,0 +1,3 @@
+#!/bin/sh
+
+exec cat - > log.txt
diff --git a/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/harness/daemons/cfg1/run b/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/harness/daemons/cfg1/run
new file mode 100755
index 00000000000..ad6bddd040d
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/harness/daemons/cfg1/run
@@ -0,0 +1,8 @@
+#!/bin/sh
+
+. ../.env
+
+exec mongod $COMMONCOPTS \
+ --port 40101 \
+ --configsvr
+
diff --git a/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/harness/daemons/cfg2/db/.empty b/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/harness/daemons/cfg2/db/.empty
new file mode 100644
index 00000000000..e69de29bb2d
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/harness/daemons/cfg2/db/.empty
diff --git a/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/harness/daemons/cfg2/log/run b/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/harness/daemons/cfg2/log/run
new file mode 100755
index 00000000000..e9d4404ba4f
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/harness/daemons/cfg2/log/run
@@ -0,0 +1,3 @@
+#!/bin/sh
+
+exec cat - > log.txt
diff --git a/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/harness/daemons/cfg2/run b/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/harness/daemons/cfg2/run
new file mode 100755
index 00000000000..07d159ef538
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/harness/daemons/cfg2/run
@@ -0,0 +1,8 @@
+#!/bin/sh
+
+. ../.env
+
+exec mongod $COMMONCOPTS \
+ --port 40102 \
+ --configsvr
+
diff --git a/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/harness/daemons/cfg3/db/.empty b/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/harness/daemons/cfg3/db/.empty
new file mode 100644
index 00000000000..e69de29bb2d
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/harness/daemons/cfg3/db/.empty
diff --git a/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/harness/daemons/cfg3/log/run b/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/harness/daemons/cfg3/log/run
new file mode 100755
index 00000000000..e9d4404ba4f
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/harness/daemons/cfg3/log/run
@@ -0,0 +1,3 @@
+#!/bin/sh
+
+exec cat - > log.txt
diff --git a/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/harness/daemons/cfg3/run b/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/harness/daemons/cfg3/run
new file mode 100755
index 00000000000..bd812fa3e39
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/harness/daemons/cfg3/run
@@ -0,0 +1,9 @@
+#!/bin/sh
+
+. ../.env
+
+exec mongod $COMMONCOPTS \
+ --port 40103 \
+ --configsvr \
+ --auth \
+ --keyFile=../../certs/keyfile
diff --git a/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/harness/daemons/db1/db/.empty b/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/harness/daemons/db1/db/.empty
new file mode 100644
index 00000000000..e69de29bb2d
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/harness/daemons/db1/db/.empty
diff --git a/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/harness/daemons/db1/log/run b/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/harness/daemons/db1/log/run
new file mode 100755
index 00000000000..e9d4404ba4f
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/harness/daemons/db1/log/run
@@ -0,0 +1,3 @@
+#!/bin/sh
+
+exec cat - > log.txt
diff --git a/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/harness/daemons/db1/run b/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/harness/daemons/db1/run
new file mode 100755
index 00000000000..b6636d195ed
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/harness/daemons/db1/run
@@ -0,0 +1,15 @@
+#!/bin/sh
+
+. ../.env
+
+if [ x$NOIPV6 = x1 ]; then
+ BINDIP="127.0.0.1"
+else
+ BINDIP="127.0.0.1,::1"
+fi
+
+exec mongod $COMMONDOPTSNOIP \
+ --shardsvr \
+ --bind_ip=$BINDIP \
+ --port 40001 \
+ --ipv6
diff --git a/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/harness/daemons/db2/db/.empty b/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/harness/daemons/db2/db/.empty
new file mode 100644
index 00000000000..e69de29bb2d
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/harness/daemons/db2/db/.empty
diff --git a/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/harness/daemons/db2/log/run b/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/harness/daemons/db2/log/run
new file mode 100755
index 00000000000..e9d4404ba4f
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/harness/daemons/db2/log/run
@@ -0,0 +1,3 @@
+#!/bin/sh
+
+exec cat - > log.txt
diff --git a/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/harness/daemons/db2/run b/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/harness/daemons/db2/run
new file mode 100755
index 00000000000..5c7b1aa502a
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/harness/daemons/db2/run
@@ -0,0 +1,8 @@
+#!/bin/sh
+
+. ../.env
+
+exec mongod $COMMONDOPTS \
+ --shardsvr \
+ --port 40002 \
+ --auth
diff --git a/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/harness/daemons/db3/db/.empty b/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/harness/daemons/db3/db/.empty
new file mode 100644
index 00000000000..e69de29bb2d
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/harness/daemons/db3/db/.empty
diff --git a/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/harness/daemons/db3/log/run b/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/harness/daemons/db3/log/run
new file mode 100755
index 00000000000..e9d4404ba4f
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/harness/daemons/db3/log/run
@@ -0,0 +1,3 @@
+#!/bin/sh
+
+exec cat - > log.txt
diff --git a/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/harness/daemons/db3/run b/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/harness/daemons/db3/run
new file mode 100755
index 00000000000..539da5fb211
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/harness/daemons/db3/run
@@ -0,0 +1,12 @@
+#!/bin/sh
+
+. ../.env
+
+exec mongod $COMMONDOPTS \
+ --shardsvr \
+ --port 40003 \
+ --auth \
+ --sslMode preferSSL \
+ --sslCAFile ../../certs/server.pem \
+ --sslPEMKeyFile ../../certs/server.pem
+
diff --git a/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/harness/daemons/rs1a/db/.empty b/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/harness/daemons/rs1a/db/.empty
new file mode 100644
index 00000000000..e69de29bb2d
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/harness/daemons/rs1a/db/.empty
diff --git a/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/harness/daemons/rs1a/log/run b/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/harness/daemons/rs1a/log/run
new file mode 100755
index 00000000000..e9d4404ba4f
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/harness/daemons/rs1a/log/run
@@ -0,0 +1,3 @@
+#!/bin/sh
+
+exec cat - > log.txt
diff --git a/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/harness/daemons/rs1a/run b/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/harness/daemons/rs1a/run
new file mode 100755
index 00000000000..9de773041b7
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/harness/daemons/rs1a/run
@@ -0,0 +1,8 @@
+#!/bin/sh
+
+. ../.env
+
+exec mongod $COMMONDOPTS \
+ --shardsvr \
+ --replSet rs1 \
+ --port 40011
diff --git a/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/harness/daemons/rs1b/db/.empty b/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/harness/daemons/rs1b/db/.empty
new file mode 100644
index 00000000000..e69de29bb2d
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/harness/daemons/rs1b/db/.empty
diff --git a/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/harness/daemons/rs1b/log/run b/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/harness/daemons/rs1b/log/run
new file mode 100755
index 00000000000..e9d4404ba4f
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/harness/daemons/rs1b/log/run
@@ -0,0 +1,3 @@
+#!/bin/sh
+
+exec cat - > log.txt
diff --git a/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/harness/daemons/rs1b/run b/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/harness/daemons/rs1b/run
new file mode 100755
index 00000000000..dae593e1231
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/harness/daemons/rs1b/run
@@ -0,0 +1,8 @@
+#!/bin/sh
+
+. ../.env
+
+exec mongod $COMMONDOPTS \
+ --shardsvr \
+ --replSet rs1 \
+ --port 40012
diff --git a/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/harness/daemons/rs1c/db/.empty b/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/harness/daemons/rs1c/db/.empty
new file mode 100644
index 00000000000..e69de29bb2d
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/harness/daemons/rs1c/db/.empty
diff --git a/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/harness/daemons/rs1c/log/run b/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/harness/daemons/rs1c/log/run
new file mode 100755
index 00000000000..e9d4404ba4f
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/harness/daemons/rs1c/log/run
@@ -0,0 +1,3 @@
+#!/bin/sh
+
+exec cat - > log.txt
diff --git a/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/harness/daemons/rs1c/run b/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/harness/daemons/rs1c/run
new file mode 100755
index 00000000000..c28cdc35d89
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/harness/daemons/rs1c/run
@@ -0,0 +1,8 @@
+#!/bin/sh
+
+. ../.env
+
+exec mongod $COMMONDOPTS \
+ --shardsvr \
+ --replSet rs1 \
+ --port 40013
diff --git a/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/harness/daemons/rs2a/db/.empty b/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/harness/daemons/rs2a/db/.empty
new file mode 100644
index 00000000000..e69de29bb2d
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/harness/daemons/rs2a/db/.empty
diff --git a/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/harness/daemons/rs2a/log/run b/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/harness/daemons/rs2a/log/run
new file mode 100755
index 00000000000..e9d4404ba4f
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/harness/daemons/rs2a/log/run
@@ -0,0 +1,3 @@
+#!/bin/sh
+
+exec cat - > log.txt
diff --git a/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/harness/daemons/rs2a/run b/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/harness/daemons/rs2a/run
new file mode 100755
index 00000000000..2c77ab1ab04
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/harness/daemons/rs2a/run
@@ -0,0 +1,8 @@
+#!/bin/sh
+
+. ../.env
+
+exec mongod $COMMONDOPTS \
+ --shardsvr \
+ --replSet rs2 \
+ --port 40021
diff --git a/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/harness/daemons/rs2b/db/.empty b/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/harness/daemons/rs2b/db/.empty
new file mode 100644
index 00000000000..e69de29bb2d
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/harness/daemons/rs2b/db/.empty
diff --git a/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/harness/daemons/rs2b/log/run b/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/harness/daemons/rs2b/log/run
new file mode 100755
index 00000000000..e9d4404ba4f
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/harness/daemons/rs2b/log/run
@@ -0,0 +1,3 @@
+#!/bin/sh
+
+exec cat - > log.txt
diff --git a/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/harness/daemons/rs2b/run b/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/harness/daemons/rs2b/run
new file mode 100755
index 00000000000..57bcfce1580
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/harness/daemons/rs2b/run
@@ -0,0 +1,8 @@
+#!/bin/sh
+
+. ../.env
+
+exec mongod $COMMONDOPTS \
+ --shardsvr \
+ --replSet rs2 \
+ --port 40022
diff --git a/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/harness/daemons/rs2c/db/.empty b/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/harness/daemons/rs2c/db/.empty
new file mode 100644
index 00000000000..e69de29bb2d
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/harness/daemons/rs2c/db/.empty
diff --git a/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/harness/daemons/rs2c/log/run b/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/harness/daemons/rs2c/log/run
new file mode 100755
index 00000000000..e9d4404ba4f
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/harness/daemons/rs2c/log/run
@@ -0,0 +1,3 @@
+#!/bin/sh
+
+exec cat - > log.txt
diff --git a/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/harness/daemons/rs2c/run b/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/harness/daemons/rs2c/run
new file mode 100755
index 00000000000..a71222705ce
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/harness/daemons/rs2c/run
@@ -0,0 +1,8 @@
+#!/bin/sh
+
+. ../.env
+
+exec mongod $COMMONDOPTS \
+ --shardsvr \
+ --replSet rs2 \
+ --port 40023
diff --git a/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/harness/daemons/rs3a/db/.empty b/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/harness/daemons/rs3a/db/.empty
new file mode 100644
index 00000000000..e69de29bb2d
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/harness/daemons/rs3a/db/.empty
diff --git a/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/harness/daemons/rs3a/log/run b/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/harness/daemons/rs3a/log/run
new file mode 100755
index 00000000000..e9d4404ba4f
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/harness/daemons/rs3a/log/run
@@ -0,0 +1,3 @@
+#!/bin/sh
+
+exec cat - > log.txt
diff --git a/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/harness/daemons/rs3a/run b/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/harness/daemons/rs3a/run
new file mode 100755
index 00000000000..002fbaf8e3e
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/harness/daemons/rs3a/run
@@ -0,0 +1,9 @@
+#!/bin/sh
+
+. ../.env
+
+exec mongod $COMMONDOPTS \
+ --shardsvr \
+ --replSet rs3 \
+ --port 40031 \
+ --keyFile=../../certs/keyfile
diff --git a/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/harness/daemons/rs3b/db/.empty b/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/harness/daemons/rs3b/db/.empty
new file mode 100644
index 00000000000..e69de29bb2d
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/harness/daemons/rs3b/db/.empty
diff --git a/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/harness/daemons/rs3b/log/run b/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/harness/daemons/rs3b/log/run
new file mode 100755
index 00000000000..e9d4404ba4f
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/harness/daemons/rs3b/log/run
@@ -0,0 +1,3 @@
+#!/bin/sh
+
+exec cat - > log.txt
diff --git a/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/harness/daemons/rs3b/run b/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/harness/daemons/rs3b/run
new file mode 100755
index 00000000000..69825843ea1
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/harness/daemons/rs3b/run
@@ -0,0 +1,9 @@
+#!/bin/sh
+
+. ../.env
+
+exec mongod $COMMONDOPTS \
+ --shardsvr \
+ --replSet rs3 \
+ --port 40032 \
+ --keyFile=../../certs/keyfile
diff --git a/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/harness/daemons/rs3c/db/.empty b/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/harness/daemons/rs3c/db/.empty
new file mode 100644
index 00000000000..e69de29bb2d
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/harness/daemons/rs3c/db/.empty
diff --git a/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/harness/daemons/rs3c/log/run b/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/harness/daemons/rs3c/log/run
new file mode 100755
index 00000000000..e9d4404ba4f
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/harness/daemons/rs3c/log/run
@@ -0,0 +1,3 @@
+#!/bin/sh
+
+exec cat - > log.txt
diff --git a/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/harness/daemons/rs3c/run b/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/harness/daemons/rs3c/run
new file mode 100755
index 00000000000..97b32c92762
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/harness/daemons/rs3c/run
@@ -0,0 +1,9 @@
+#!/bin/sh
+
+. ../.env
+
+exec mongod $COMMONDOPTS \
+ --shardsvr \
+ --replSet rs3 \
+ --port 40033 \
+ --keyFile=../../certs/keyfile
diff --git a/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/harness/daemons/rs4a/db/.empty b/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/harness/daemons/rs4a/db/.empty
new file mode 100644
index 00000000000..e69de29bb2d
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/harness/daemons/rs4a/db/.empty
diff --git a/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/harness/daemons/rs4a/log/run b/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/harness/daemons/rs4a/log/run
new file mode 100755
index 00000000000..e9d4404ba4f
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/harness/daemons/rs4a/log/run
@@ -0,0 +1,3 @@
+#!/bin/sh
+
+exec cat - > log.txt
diff --git a/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/harness/daemons/rs4a/run b/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/harness/daemons/rs4a/run
new file mode 100755
index 00000000000..c2f2d556341
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/harness/daemons/rs4a/run
@@ -0,0 +1,8 @@
+#!/bin/sh
+
+. ../.env
+
+exec mongod $COMMONDOPTS \
+ --shardsvr \
+ --replSet rs4 \
+ --port 40041
diff --git a/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/harness/daemons/s1/log/run b/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/harness/daemons/s1/log/run
new file mode 100755
index 00000000000..e9d4404ba4f
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/harness/daemons/s1/log/run
@@ -0,0 +1,3 @@
+#!/bin/sh
+
+exec cat - > log.txt
diff --git a/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/harness/daemons/s1/run b/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/harness/daemons/s1/run
new file mode 100755
index 00000000000..0e31d2c9486
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/harness/daemons/s1/run
@@ -0,0 +1,7 @@
+#!/bin/sh
+
+. ../.env
+
+exec mongos $COMMONSOPTS \
+ --port 40201 \
+ --configdb 127.0.0.1:40101
diff --git a/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/harness/daemons/s2/log/run b/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/harness/daemons/s2/log/run
new file mode 100755
index 00000000000..e9d4404ba4f
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/harness/daemons/s2/log/run
@@ -0,0 +1,3 @@
+#!/bin/sh
+
+exec cat - > log.txt
diff --git a/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/harness/daemons/s2/run b/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/harness/daemons/s2/run
new file mode 100755
index 00000000000..3b5c67d5880
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/harness/daemons/s2/run
@@ -0,0 +1,7 @@
+#!/bin/sh
+
+. ../.env
+
+exec mongos $COMMONSOPTS \
+ --port 40202 \
+ --configdb 127.0.0.1:40102
diff --git a/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/harness/daemons/s3/log/run b/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/harness/daemons/s3/log/run
new file mode 100755
index 00000000000..e9d4404ba4f
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/harness/daemons/s3/log/run
@@ -0,0 +1,3 @@
+#!/bin/sh
+
+exec cat - > log.txt
diff --git a/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/harness/daemons/s3/run b/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/harness/daemons/s3/run
new file mode 100755
index 00000000000..fde6e479ba2
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/harness/daemons/s3/run
@@ -0,0 +1,8 @@
+#!/bin/sh
+
+. ../.env
+
+exec mongos $COMMONSOPTS \
+ --port 40203 \
+ --configdb 127.0.0.1:40103 \
+ --keyFile=../../certs/keyfile
diff --git a/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/harness/mongojs/dropall.js b/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/harness/mongojs/dropall.js
new file mode 100644
index 00000000000..7fa39d112e1
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/harness/mongojs/dropall.js
@@ -0,0 +1,66 @@
+
+var ports = [40001, 40002, 40011, 40012, 40013, 40021, 40022, 40023, 40041, 40101, 40102, 40103, 40201, 40202, 40203]
+var auth = [40002, 40103, 40203, 40031]
+var db1 = new Mongo("localhost:40001")
+
+if (db1.getDB("admin").serverBuildInfo().OpenSSLVersion) {
+ ports.push(40003)
+ auth.push(40003)
+}
+
+for (var i in ports) {
+ var port = ports[i]
+ var server = "localhost:" + port
+ var mongo = new Mongo("localhost:" + port)
+ var admin = mongo.getDB("admin")
+
+ for (var j in auth) {
+ if (auth[j] == port) {
+ admin.auth("root", "rapadura")
+ admin.system.users.find().forEach(function(u) {
+ if (u.user == "root" || u.user == "reader") {
+ return;
+ }
+ if (typeof admin.dropUser == "function") {
+ mongo.getDB(u.db).dropUser(u.user);
+ } else {
+ admin.removeUser(u.user);
+ }
+ })
+ break
+ }
+ }
+ var result = admin.runCommand({"listDatabases": 1})
+ for (var j = 0; j != 100; j++) {
+ if (typeof result.databases != "undefined" || notMaster(result)) {
+ break
+ }
+ result = admin.runCommand({"listDatabases": 1})
+ }
+ if (notMaster(result)) {
+ continue
+ }
+ if (typeof result.databases == "undefined") {
+ print("Could not list databases. Command result:")
+ print(JSON.stringify(result))
+ quit(12)
+ }
+ var dbs = result.databases
+ for (var j = 0; j != dbs.length; j++) {
+ var db = dbs[j]
+ switch (db.name) {
+ case "admin":
+ case "local":
+ case "config":
+ break
+ default:
+ mongo.getDB(db.name).dropDatabase()
+ }
+ }
+}
+
+function notMaster(result) {
+ return typeof result.errmsg != "undefined" && (result.errmsg.indexOf("not master") >= 0 || result.errmsg.indexOf("no master found"))
+}
+
+// vim:ts=4:sw=4:et
diff --git a/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/harness/mongojs/init.js b/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/harness/mongojs/init.js
new file mode 100644
index 00000000000..ceb75a5e4a0
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/harness/mongojs/init.js
@@ -0,0 +1,132 @@
+//var settings = {heartbeatSleep: 0.05, heartbeatTimeout: 0.5}
+var settings = {};
+
+// We know the master of the first set (pri=1), but not of the second.
+var rs1cfg = {_id: "rs1",
+ members: [{_id: 1, host: "127.0.0.1:40011", priority: 1, tags: {rs1: "a"}},
+ {_id: 2, host: "127.0.0.1:40012", priority: 0, tags: {rs1: "b"}},
+ {_id: 3, host: "127.0.0.1:40013", priority: 0, tags: {rs1: "c"}}],
+ settings: settings}
+var rs2cfg = {_id: "rs2",
+ members: [{_id: 1, host: "127.0.0.1:40021", priority: 1, tags: {rs2: "a"}},
+ {_id: 2, host: "127.0.0.1:40022", priority: 1, tags: {rs2: "b"}},
+ {_id: 3, host: "127.0.0.1:40023", priority: 1, tags: {rs2: "c"}}],
+ settings: settings}
+var rs3cfg = {_id: "rs3",
+ members: [{_id: 1, host: "127.0.0.1:40031", priority: 1, tags: {rs3: "a"}},
+ {_id: 2, host: "127.0.0.1:40032", priority: 1, tags: {rs3: "b"}},
+ {_id: 3, host: "127.0.0.1:40033", priority: 1, tags: {rs3: "c"}}],
+ settings: settings}
+
+for (var i = 0; i != 60; i++) {
+ try {
+ db1 = new Mongo("127.0.0.1:40001").getDB("admin")
+ db2 = new Mongo("127.0.0.1:40002").getDB("admin")
+ rs1a = new Mongo("127.0.0.1:40011").getDB("admin")
+ rs2a = new Mongo("127.0.0.1:40021").getDB("admin")
+ rs3a = new Mongo("127.0.0.1:40031").getDB("admin")
+ break
+ } catch(err) {
+ print("Can't connect yet...")
+ }
+ sleep(1000)
+}
+
+function hasSSL() {
+ return Boolean(db1.serverBuildInfo().OpenSSLVersion)
+}
+
+rs1a.runCommand({replSetInitiate: rs1cfg})
+rs2a.runCommand({replSetInitiate: rs2cfg})
+rs3a.runCommand({replSetInitiate: rs3cfg})
+
+function configShards() {
+ cfg1 = new Mongo("127.0.0.1:40201").getDB("admin")
+ cfg1.runCommand({addshard: "127.0.0.1:40001"})
+ cfg1.runCommand({addshard: "rs1/127.0.0.1:40011"})
+
+ cfg2 = new Mongo("127.0.0.1:40202").getDB("admin")
+ cfg2.runCommand({addshard: "rs2/127.0.0.1:40021"})
+
+ cfg3 = new Mongo("127.0.0.1:40203").getDB("admin")
+ cfg3.runCommand({addshard: "rs3/127.0.0.1:40031"})
+}
+
+function configAuth() {
+ var addrs = ["127.0.0.1:40002", "127.0.0.1:40203", "127.0.0.1:40031"]
+ if (hasSSL()) {
+ addrs.push("127.0.0.1:40003")
+ }
+ for (var i in addrs) {
+ print("Configuring auth for", addrs[i])
+ var db = new Mongo(addrs[i]).getDB("admin")
+ var v = db.serverBuildInfo().versionArray
+ var timedOut = false
+ if (v < [2, 5]) {
+ db.addUser("root", "rapadura")
+ } else {
+ try {
+ db.createUser({user: "root", pwd: "rapadura", roles: ["root"]})
+ } catch (err) {
+ // 3.2 consistently fails replication of creds on 40031 (config server)
+ print("createUser command returned an error: " + err)
+ if (String(err).indexOf("timed out") >= 0) {
+ timedOut = true;
+ }
+ }
+ }
+ for (var i = 0; i < 60; i++) {
+ var ok = db.auth("root", "rapadura")
+ if (ok || !timedOut) {
+ break
+ }
+ sleep(1000);
+ }
+ if (v >= [2, 6]) {
+ db.createUser({user: "reader", pwd: "rapadura", roles: ["readAnyDatabase"]})
+ } else if (v >= [2, 4]) {
+ db.addUser({user: "reader", pwd: "rapadura", roles: ["readAnyDatabase"]})
+ } else {
+ db.addUser("reader", "rapadura", true)
+ }
+ }
+}
+
+function countHealthy(rs) {
+ var status = rs.runCommand({replSetGetStatus: 1})
+ var count = 0
+ var primary = 0
+ if (typeof status.members != "undefined") {
+ for (var i = 0; i != status.members.length; i++) {
+ var m = status.members[i]
+ if (m.health == 1 && (m.state == 1 || m.state == 2)) {
+ count += 1
+ if (m.state == 1) {
+ primary = 1
+ }
+ }
+ }
+ }
+ if (primary == 0) {
+ count = 0
+ }
+ return count
+}
+
+var totalRSMembers = rs1cfg.members.length + rs2cfg.members.length + rs3cfg.members.length
+
+for (var i = 0; i != 60; i++) {
+ var count = countHealthy(rs1a) + countHealthy(rs2a) + countHealthy(rs3a)
+ print("Replica sets have", count, "healthy nodes.")
+ if (count == totalRSMembers) {
+ configShards()
+ configAuth()
+ quit(0)
+ }
+ sleep(1000)
+}
+
+print("Replica sets didn't sync up properly.")
+quit(12)
+
+// vim:ts=4:sw=4:et
diff --git a/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/harness/mongojs/wait.js b/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/harness/mongojs/wait.js
new file mode 100644
index 00000000000..2735d0e56e5
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/harness/mongojs/wait.js
@@ -0,0 +1,67 @@
+// We know the master of the first set (pri=1), but not of the second.
+var settings = {}
+var rs1cfg = {_id: "rs1",
+ members: [{_id: 1, host: "127.0.0.1:40011", priority: 1},
+ {_id: 2, host: "127.0.0.1:40012", priority: 0},
+ {_id: 3, host: "127.0.0.1:40013", priority: 0}]}
+var rs2cfg = {_id: "rs2",
+ members: [{_id: 1, host: "127.0.0.1:40021", priority: 1},
+ {_id: 2, host: "127.0.0.1:40022", priority: 1},
+ {_id: 3, host: "127.0.0.1:40023", priority: 0}]}
+var rs3cfg = {_id: "rs3",
+ members: [{_id: 1, host: "127.0.0.1:40031", priority: 1},
+ {_id: 2, host: "127.0.0.1:40032", priority: 1},
+ {_id: 3, host: "127.0.0.1:40033", priority: 1}],
+ settings: settings}
+
+for (var i = 0; i != 60; i++) {
+ try {
+ rs1a = new Mongo("127.0.0.1:40011").getDB("admin")
+ rs2a = new Mongo("127.0.0.1:40021").getDB("admin")
+ rs3a = new Mongo("127.0.0.1:40031").getDB("admin")
+ rs3a.auth("root", "rapadura")
+ db1 = new Mongo("127.0.0.1:40001").getDB("admin")
+ db2 = new Mongo("127.0.0.1:40002").getDB("admin")
+ break
+ } catch(err) {
+ print("Can't connect yet...")
+ }
+ sleep(1000)
+}
+
+function countHealthy(rs) {
+ var status = rs.runCommand({replSetGetStatus: 1})
+ var count = 0
+ var primary = 0
+ if (typeof status.members != "undefined") {
+ for (var i = 0; i != status.members.length; i++) {
+ var m = status.members[i]
+ if (m.health == 1 && (m.state == 1 || m.state == 2)) {
+ count += 1
+ if (m.state == 1) {
+ primary = 1
+ }
+ }
+ }
+ }
+ if (primary == 0) {
+ count = 0
+ }
+ return count
+}
+
+var totalRSMembers = rs1cfg.members.length + rs2cfg.members.length + rs3cfg.members.length
+
+for (var i = 0; i != 90; i++) {
+ var count = countHealthy(rs1a) + countHealthy(rs2a) + countHealthy(rs3a)
+ print("Replica sets have", count, "healthy nodes.")
+ if (count == totalRSMembers) {
+ quit(0)
+ }
+ sleep(1000)
+}
+
+print("Replica sets didn't sync up properly.")
+quit(12)
+
+// vim:ts=4:sw=4:et
diff --git a/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/harness/setup.sh b/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/harness/setup.sh
new file mode 100755
index 00000000000..e5db78a783d
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/harness/setup.sh
@@ -0,0 +1,96 @@
+#!/bin/sh -e
+
+LINE="---------------"
+
+start() {
+ if [ -d _harness ]; then
+ echo "Daemon setup already in place, stop it first."
+ exit 1
+ fi
+ mkdir -p _harness
+ cd _harness
+ cp -a ../harness/daemons .
+ cp -a ../harness/certs .
+ echo keyfile > certs/keyfile
+ chmod 600 certs/keyfile
+ if ! mongod --help | grep -q -- --ssl; then
+ rm -rf daemons/db3
+ fi
+ COUNT=$(ls daemons | wc -l)
+ echo "Running daemons..."
+ svscan daemons &
+ SVSCANPID=$!
+ echo $SVSCANPID > svscan.pid
+ if ! kill -0 $SVSCANPID; then
+ echo "Cannot execute svscan."
+ exit 1
+ fi
+ echo "Starting $COUNT processes..."
+ for i in $(seq 30); do
+ UP=$(svstat daemons/* | grep ' up ' | grep -v ' [0-3] seconds' | wc -l)
+ echo "$UP processes up..."
+ if [ x$COUNT = x$UP ]; then
+ echo "Running setup.js with mongo..."
+ mongo --nodb ../harness/mongojs/init.js
+ exit 0
+ fi
+ sleep 1
+ done
+ echo "Failed to start processes. svstat _harness/daemons/* output:"
+ echo $LINE
+ svstat daemons/*
+ echo $LINE
+ for DAEMON in daemons/*; do
+ if $(svstat $DAEMON | grep ' up ' | grep ' [0-3] seconds' > /dev/null); then
+ echo "Logs for _harness/$DAEMON:"
+ echo $LINE
+ cat $DAEMON/log/log.txt
+ echo $LINE
+ fi
+ done
+ exit 1
+}
+
+stop() {
+ if [ -d _harness ]; then
+ cd _harness
+ if [ -f svscan.pid ]; then
+ kill -9 $(cat svscan.pid) 2> /dev/null || true
+ svc -dx daemons/* daemons/*/log > /dev/null 2>&1 || true
+ COUNT=$(ls daemons | wc -l)
+ echo "Shutting down $COUNT processes..."
+ while true; do
+ DOWN=$(svstat daemons/* | grep 'supervise not running' | wc -l)
+ echo "$DOWN processes down..."
+ if [ x$DOWN = x$COUNT ]; then
+ break
+ fi
+ sleep 1
+ done
+ rm svscan.pid
+ echo "Done."
+ fi
+ cd ..
+ rm -rf _harness
+ fi
+}
+
+
+if [ ! -f suite_test.go ]; then
+ echo "This script must be run from within the source directory."
+ exit 1
+fi
+
+case "$1" in
+
+ start)
+ start $2
+ ;;
+
+ stop)
+ stop $2
+ ;;
+
+esac
+
+# vim:ts=4:sw=4:et
diff --git a/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/internal/json/LICENSE b/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/internal/json/LICENSE
new file mode 100644
index 00000000000..74487567632
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/internal/json/LICENSE
@@ -0,0 +1,27 @@
+Copyright (c) 2012 The Go Authors. All rights reserved.
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions are
+met:
+
+ * Redistributions of source code must retain the above copyright
+notice, this list of conditions and the following disclaimer.
+ * Redistributions in binary form must reproduce the above
+copyright notice, this list of conditions and the following disclaimer
+in the documentation and/or other materials provided with the
+distribution.
+ * Neither the name of Google Inc. nor the names of its
+contributors may be used to endorse or promote products derived from
+this software without specific prior written permission.
+
+THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
diff --git a/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/internal/json/bench_test.go b/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/internal/json/bench_test.go
new file mode 100644
index 00000000000..cd7380b1efb
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/internal/json/bench_test.go
@@ -0,0 +1,223 @@
+// Copyright 2011 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// Large data benchmark.
+// The JSON data is a summary of agl's changes in the
+// go, webkit, and chromium open source projects.
+// We benchmark converting between the JSON form
+// and in-memory data structures.
+
+package json
+
+import (
+ "bytes"
+ "compress/gzip"
+ "io/ioutil"
+ "os"
+ "strings"
+ "testing"
+)
+
+type codeResponse struct {
+ Tree *codeNode `json:"tree"`
+ Username string `json:"username"`
+}
+
+type codeNode struct {
+ Name string `json:"name"`
+ Kids []*codeNode `json:"kids"`
+ CLWeight float64 `json:"cl_weight"`
+ Touches int `json:"touches"`
+ MinT int64 `json:"min_t"`
+ MaxT int64 `json:"max_t"`
+ MeanT int64 `json:"mean_t"`
+}
+
+var codeJSON []byte
+var codeStruct codeResponse
+
+func codeInit() {
+ f, err := os.Open("testdata/code.json.gz")
+ if err != nil {
+ panic(err)
+ }
+ defer f.Close()
+ gz, err := gzip.NewReader(f)
+ if err != nil {
+ panic(err)
+ }
+ data, err := ioutil.ReadAll(gz)
+ if err != nil {
+ panic(err)
+ }
+
+ codeJSON = data
+
+ if err := Unmarshal(codeJSON, &codeStruct); err != nil {
+ panic("unmarshal code.json: " + err.Error())
+ }
+
+ if data, err = Marshal(&codeStruct); err != nil {
+ panic("marshal code.json: " + err.Error())
+ }
+
+ if !bytes.Equal(data, codeJSON) {
+ println("different lengths", len(data), len(codeJSON))
+ for i := 0; i < len(data) && i < len(codeJSON); i++ {
+ if data[i] != codeJSON[i] {
+ println("re-marshal: changed at byte", i)
+ println("orig: ", string(codeJSON[i-10:i+10]))
+ println("new: ", string(data[i-10:i+10]))
+ break
+ }
+ }
+ panic("re-marshal code.json: different result")
+ }
+}
+
+func BenchmarkCodeEncoder(b *testing.B) {
+ if codeJSON == nil {
+ b.StopTimer()
+ codeInit()
+ b.StartTimer()
+ }
+ enc := NewEncoder(ioutil.Discard)
+ for i := 0; i < b.N; i++ {
+ if err := enc.Encode(&codeStruct); err != nil {
+ b.Fatal("Encode:", err)
+ }
+ }
+ b.SetBytes(int64(len(codeJSON)))
+}
+
+func BenchmarkCodeMarshal(b *testing.B) {
+ if codeJSON == nil {
+ b.StopTimer()
+ codeInit()
+ b.StartTimer()
+ }
+ for i := 0; i < b.N; i++ {
+ if _, err := Marshal(&codeStruct); err != nil {
+ b.Fatal("Marshal:", err)
+ }
+ }
+ b.SetBytes(int64(len(codeJSON)))
+}
+
+func BenchmarkCodeDecoder(b *testing.B) {
+ if codeJSON == nil {
+ b.StopTimer()
+ codeInit()
+ b.StartTimer()
+ }
+ var buf bytes.Buffer
+ dec := NewDecoder(&buf)
+ var r codeResponse
+ for i := 0; i < b.N; i++ {
+ buf.Write(codeJSON)
+ // hide EOF
+ buf.WriteByte('\n')
+ buf.WriteByte('\n')
+ buf.WriteByte('\n')
+ if err := dec.Decode(&r); err != nil {
+ b.Fatal("Decode:", err)
+ }
+ }
+ b.SetBytes(int64(len(codeJSON)))
+}
+
+func BenchmarkDecoderStream(b *testing.B) {
+ b.StopTimer()
+ var buf bytes.Buffer
+ dec := NewDecoder(&buf)
+ buf.WriteString(`"` + strings.Repeat("x", 1000000) + `"` + "\n\n\n")
+ var x interface{}
+ if err := dec.Decode(&x); err != nil {
+ b.Fatal("Decode:", err)
+ }
+ ones := strings.Repeat(" 1\n", 300000) + "\n\n\n"
+ b.StartTimer()
+ for i := 0; i < b.N; i++ {
+ if i%300000 == 0 {
+ buf.WriteString(ones)
+ }
+ x = nil
+ if err := dec.Decode(&x); err != nil || x != 1.0 {
+ b.Fatalf("Decode: %v after %d", err, i)
+ }
+ }
+}
+
+func BenchmarkCodeUnmarshal(b *testing.B) {
+ if codeJSON == nil {
+ b.StopTimer()
+ codeInit()
+ b.StartTimer()
+ }
+ for i := 0; i < b.N; i++ {
+ var r codeResponse
+ if err := Unmarshal(codeJSON, &r); err != nil {
+ b.Fatal("Unmarshal:", err)
+ }
+ }
+ b.SetBytes(int64(len(codeJSON)))
+}
+
+func BenchmarkCodeUnmarshalReuse(b *testing.B) {
+ if codeJSON == nil {
+ b.StopTimer()
+ codeInit()
+ b.StartTimer()
+ }
+ var r codeResponse
+ for i := 0; i < b.N; i++ {
+ if err := Unmarshal(codeJSON, &r); err != nil {
+ b.Fatal("Unmarshal:", err)
+ }
+ }
+}
+
+func BenchmarkUnmarshalString(b *testing.B) {
+ data := []byte(`"hello, world"`)
+ var s string
+
+ for i := 0; i < b.N; i++ {
+ if err := Unmarshal(data, &s); err != nil {
+ b.Fatal("Unmarshal:", err)
+ }
+ }
+}
+
+func BenchmarkUnmarshalFloat64(b *testing.B) {
+ var f float64
+ data := []byte(`3.14`)
+
+ for i := 0; i < b.N; i++ {
+ if err := Unmarshal(data, &f); err != nil {
+ b.Fatal("Unmarshal:", err)
+ }
+ }
+}
+
+func BenchmarkUnmarshalInt64(b *testing.B) {
+ var x int64
+ data := []byte(`3`)
+
+ for i := 0; i < b.N; i++ {
+ if err := Unmarshal(data, &x); err != nil {
+ b.Fatal("Unmarshal:", err)
+ }
+ }
+}
+
+func BenchmarkIssue10335(b *testing.B) {
+ b.ReportAllocs()
+ var s struct{}
+ j := []byte(`{"a":{ }}`)
+ for n := 0; n < b.N; n++ {
+ if err := Unmarshal(j, &s); err != nil {
+ b.Fatal(err)
+ }
+ }
+}
diff --git a/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/internal/json/decode.go b/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/internal/json/decode.go
new file mode 100644
index 00000000000..ce7c7d2493d
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/internal/json/decode.go
@@ -0,0 +1,1685 @@
+// Copyright 2010 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// Represents JSON data structure using native Go types: booleans, floats,
+// strings, arrays, and maps.
+
+package json
+
+import (
+ "bytes"
+ "encoding"
+ "encoding/base64"
+ "errors"
+ "fmt"
+ "reflect"
+ "runtime"
+ "strconv"
+ "unicode"
+ "unicode/utf16"
+ "unicode/utf8"
+)
+
+// Unmarshal parses the JSON-encoded data and stores the result
+// in the value pointed to by v.
+//
+// Unmarshal uses the inverse of the encodings that
+// Marshal uses, allocating maps, slices, and pointers as necessary,
+// with the following additional rules:
+//
+// To unmarshal JSON into a pointer, Unmarshal first handles the case of
+// the JSON being the JSON literal null. In that case, Unmarshal sets
+// the pointer to nil. Otherwise, Unmarshal unmarshals the JSON into
+// the value pointed at by the pointer. If the pointer is nil, Unmarshal
+// allocates a new value for it to point to.
+//
+// To unmarshal JSON into a struct, Unmarshal matches incoming object
+// keys to the keys used by Marshal (either the struct field name or its tag),
+// preferring an exact match but also accepting a case-insensitive match.
+// Unmarshal will only set exported fields of the struct.
+//
+// To unmarshal JSON into an interface value,
+// Unmarshal stores one of these in the interface value:
+//
+// bool, for JSON booleans
+// float64, for JSON numbers
+// string, for JSON strings
+// []interface{}, for JSON arrays
+// map[string]interface{}, for JSON objects
+// nil for JSON null
+//
+// To unmarshal a JSON array into a slice, Unmarshal resets the slice length
+// to zero and then appends each element to the slice.
+// As a special case, to unmarshal an empty JSON array into a slice,
+// Unmarshal replaces the slice with a new empty slice.
+//
+// To unmarshal a JSON array into a Go array, Unmarshal decodes
+// JSON array elements into corresponding Go array elements.
+// If the Go array is smaller than the JSON array,
+// the additional JSON array elements are discarded.
+// If the JSON array is smaller than the Go array,
+// the additional Go array elements are set to zero values.
+//
+// To unmarshal a JSON object into a map, Unmarshal first establishes a map to
+// use, If the map is nil, Unmarshal allocates a new map. Otherwise Unmarshal
+// reuses the existing map, keeping existing entries. Unmarshal then stores key-
+// value pairs from the JSON object into the map. The map's key type must
+// either be a string or implement encoding.TextUnmarshaler.
+//
+// If a JSON value is not appropriate for a given target type,
+// or if a JSON number overflows the target type, Unmarshal
+// skips that field and completes the unmarshaling as best it can.
+// If no more serious errors are encountered, Unmarshal returns
+// an UnmarshalTypeError describing the earliest such error.
+//
+// The JSON null value unmarshals into an interface, map, pointer, or slice
+// by setting that Go value to nil. Because null is often used in JSON to mean
+// ``not present,'' unmarshaling a JSON null into any other Go type has no effect
+// on the value and produces no error.
+//
+// When unmarshaling quoted strings, invalid UTF-8 or
+// invalid UTF-16 surrogate pairs are not treated as an error.
+// Instead, they are replaced by the Unicode replacement
+// character U+FFFD.
+//
+func Unmarshal(data []byte, v interface{}) error {
+ // Check for well-formedness.
+ // Avoids filling out half a data structure
+ // before discovering a JSON syntax error.
+ var d decodeState
+ err := checkValid(data, &d.scan)
+ if err != nil {
+ return err
+ }
+
+ d.init(data)
+ return d.unmarshal(v)
+}
+
+// Unmarshaler is the interface implemented by types
+// that can unmarshal a JSON description of themselves.
+// The input can be assumed to be a valid encoding of
+// a JSON value. UnmarshalJSON must copy the JSON data
+// if it wishes to retain the data after returning.
+type Unmarshaler interface {
+ UnmarshalJSON([]byte) error
+}
+
+// An UnmarshalTypeError describes a JSON value that was
+// not appropriate for a value of a specific Go type.
+type UnmarshalTypeError struct {
+ Value string // description of JSON value - "bool", "array", "number -5"
+ Type reflect.Type // type of Go value it could not be assigned to
+ Offset int64 // error occurred after reading Offset bytes
+}
+
+func (e *UnmarshalTypeError) Error() string {
+ return "json: cannot unmarshal " + e.Value + " into Go value of type " + e.Type.String()
+}
+
+// An UnmarshalFieldError describes a JSON object key that
+// led to an unexported (and therefore unwritable) struct field.
+// (No longer used; kept for compatibility.)
+type UnmarshalFieldError struct {
+ Key string
+ Type reflect.Type
+ Field reflect.StructField
+}
+
+func (e *UnmarshalFieldError) Error() string {
+ return "json: cannot unmarshal object key " + strconv.Quote(e.Key) + " into unexported field " + e.Field.Name + " of type " + e.Type.String()
+}
+
+// An InvalidUnmarshalError describes an invalid argument passed to Unmarshal.
+// (The argument to Unmarshal must be a non-nil pointer.)
+type InvalidUnmarshalError struct {
+ Type reflect.Type
+}
+
+func (e *InvalidUnmarshalError) Error() string {
+ if e.Type == nil {
+ return "json: Unmarshal(nil)"
+ }
+
+ if e.Type.Kind() != reflect.Ptr {
+ return "json: Unmarshal(non-pointer " + e.Type.String() + ")"
+ }
+ return "json: Unmarshal(nil " + e.Type.String() + ")"
+}
+
+func (d *decodeState) unmarshal(v interface{}) (err error) {
+ defer func() {
+ if r := recover(); r != nil {
+ if _, ok := r.(runtime.Error); ok {
+ panic(r)
+ }
+ err = r.(error)
+ }
+ }()
+
+ rv := reflect.ValueOf(v)
+ if rv.Kind() != reflect.Ptr || rv.IsNil() {
+ return &InvalidUnmarshalError{reflect.TypeOf(v)}
+ }
+
+ d.scan.reset()
+ // We decode rv not rv.Elem because the Unmarshaler interface
+ // test must be applied at the top level of the value.
+ d.value(rv)
+ return d.savedError
+}
+
+// A Number represents a JSON number literal.
+type Number string
+
+// String returns the literal text of the number.
+func (n Number) String() string { return string(n) }
+
+// Float64 returns the number as a float64.
+func (n Number) Float64() (float64, error) {
+ return strconv.ParseFloat(string(n), 64)
+}
+
+// Int64 returns the number as an int64.
+func (n Number) Int64() (int64, error) {
+ return strconv.ParseInt(string(n), 10, 64)
+}
+
+// isValidNumber reports whether s is a valid JSON number literal.
+func isValidNumber(s string) bool {
+ // This function implements the JSON numbers grammar.
+ // See https://tools.ietf.org/html/rfc7159#section-6
+ // and http://json.org/number.gif
+
+ if s == "" {
+ return false
+ }
+
+ // Optional -
+ if s[0] == '-' {
+ s = s[1:]
+ if s == "" {
+ return false
+ }
+ }
+
+ // Digits
+ switch {
+ default:
+ return false
+
+ case s[0] == '0':
+ s = s[1:]
+
+ case '1' <= s[0] && s[0] <= '9':
+ s = s[1:]
+ for len(s) > 0 && '0' <= s[0] && s[0] <= '9' {
+ s = s[1:]
+ }
+ }
+
+ // . followed by 1 or more digits.
+ if len(s) >= 2 && s[0] == '.' && '0' <= s[1] && s[1] <= '9' {
+ s = s[2:]
+ for len(s) > 0 && '0' <= s[0] && s[0] <= '9' {
+ s = s[1:]
+ }
+ }
+
+ // e or E followed by an optional - or + and
+ // 1 or more digits.
+ if len(s) >= 2 && (s[0] == 'e' || s[0] == 'E') {
+ s = s[1:]
+ if s[0] == '+' || s[0] == '-' {
+ s = s[1:]
+ if s == "" {
+ return false
+ }
+ }
+ for len(s) > 0 && '0' <= s[0] && s[0] <= '9' {
+ s = s[1:]
+ }
+ }
+
+ // Make sure we are at the end.
+ return s == ""
+}
+
+// decodeState represents the state while decoding a JSON value.
+type decodeState struct {
+ data []byte
+ off int // read offset in data
+ scan scanner
+ nextscan scanner // for calls to nextValue
+ savedError error
+ useNumber bool
+ ext Extension
+}
+
+// errPhase is used for errors that should not happen unless
+// there is a bug in the JSON decoder or something is editing
+// the data slice while the decoder executes.
+var errPhase = errors.New("JSON decoder out of sync - data changing underfoot?")
+
+func (d *decodeState) init(data []byte) *decodeState {
+ d.data = data
+ d.off = 0
+ d.savedError = nil
+ return d
+}
+
+// error aborts the decoding by panicking with err.
+func (d *decodeState) error(err error) {
+ panic(err)
+}
+
+// saveError saves the first err it is called with,
+// for reporting at the end of the unmarshal.
+func (d *decodeState) saveError(err error) {
+ if d.savedError == nil {
+ d.savedError = err
+ }
+}
+
+// next cuts off and returns the next full JSON value in d.data[d.off:].
+// The next value is known to be an object or array, not a literal.
+func (d *decodeState) next() []byte {
+ c := d.data[d.off]
+ item, rest, err := nextValue(d.data[d.off:], &d.nextscan)
+ if err != nil {
+ d.error(err)
+ }
+ d.off = len(d.data) - len(rest)
+
+ // Our scanner has seen the opening brace/bracket
+ // and thinks we're still in the middle of the object.
+ // invent a closing brace/bracket to get it out.
+ if c == '{' {
+ d.scan.step(&d.scan, '}')
+ } else if c == '[' {
+ d.scan.step(&d.scan, ']')
+ } else {
+ // Was inside a function name. Get out of it.
+ d.scan.step(&d.scan, '(')
+ d.scan.step(&d.scan, ')')
+ }
+
+ return item
+}
+
+// scanWhile processes bytes in d.data[d.off:] until it
+// receives a scan code not equal to op.
+// It updates d.off and returns the new scan code.
+func (d *decodeState) scanWhile(op int) int {
+ var newOp int
+ for {
+ if d.off >= len(d.data) {
+ newOp = d.scan.eof()
+ d.off = len(d.data) + 1 // mark processed EOF with len+1
+ } else {
+ c := d.data[d.off]
+ d.off++
+ newOp = d.scan.step(&d.scan, c)
+ }
+ if newOp != op {
+ break
+ }
+ }
+ return newOp
+}
+
+// value decodes a JSON value from d.data[d.off:] into the value.
+// it updates d.off to point past the decoded value.
+func (d *decodeState) value(v reflect.Value) {
+ if !v.IsValid() {
+ _, rest, err := nextValue(d.data[d.off:], &d.nextscan)
+ if err != nil {
+ d.error(err)
+ }
+ d.off = len(d.data) - len(rest)
+
+ // d.scan thinks we're still at the beginning of the item.
+ // Feed in an empty string - the shortest, simplest value -
+ // so that it knows we got to the end of the value.
+ if d.scan.redo {
+ // rewind.
+ d.scan.redo = false
+ d.scan.step = stateBeginValue
+ }
+ d.scan.step(&d.scan, '"')
+ d.scan.step(&d.scan, '"')
+
+ n := len(d.scan.parseState)
+ if n > 0 && d.scan.parseState[n-1] == parseObjectKey {
+ // d.scan thinks we just read an object key; finish the object
+ d.scan.step(&d.scan, ':')
+ d.scan.step(&d.scan, '"')
+ d.scan.step(&d.scan, '"')
+ d.scan.step(&d.scan, '}')
+ }
+
+ return
+ }
+
+ switch op := d.scanWhile(scanSkipSpace); op {
+ default:
+ d.error(errPhase)
+
+ case scanBeginArray:
+ d.array(v)
+
+ case scanBeginObject:
+ d.object(v)
+
+ case scanBeginLiteral:
+ d.literal(v)
+
+ case scanBeginName:
+ d.name(v)
+ }
+}
+
+type unquotedValue struct{}
+
+// valueQuoted is like value but decodes a
+// quoted string literal or literal null into an interface value.
+// If it finds anything other than a quoted string literal or null,
+// valueQuoted returns unquotedValue{}.
+func (d *decodeState) valueQuoted() interface{} {
+ switch op := d.scanWhile(scanSkipSpace); op {
+ default:
+ d.error(errPhase)
+
+ case scanBeginArray:
+ d.array(reflect.Value{})
+
+ case scanBeginObject:
+ d.object(reflect.Value{})
+
+ case scanBeginName:
+ switch v := d.nameInterface().(type) {
+ case nil, string:
+ return v
+ }
+
+ case scanBeginLiteral:
+ switch v := d.literalInterface().(type) {
+ case nil, string:
+ return v
+ }
+ }
+ return unquotedValue{}
+}
+
+// indirect walks down v allocating pointers as needed,
+// until it gets to a non-pointer.
+// if it encounters an Unmarshaler, indirect stops and returns that.
+// if decodingNull is true, indirect stops at the last pointer so it can be set to nil.
+func (d *decodeState) indirect(v reflect.Value, decodingNull bool) (Unmarshaler, encoding.TextUnmarshaler, reflect.Value) {
+ // If v is a named type and is addressable,
+ // start with its address, so that if the type has pointer methods,
+ // we find them.
+ if v.Kind() != reflect.Ptr && v.Type().Name() != "" && v.CanAddr() {
+ v = v.Addr()
+ }
+ for {
+ // Load value from interface, but only if the result will be
+ // usefully addressable.
+ if v.Kind() == reflect.Interface && !v.IsNil() {
+ e := v.Elem()
+ if e.Kind() == reflect.Ptr && !e.IsNil() && (!decodingNull || e.Elem().Kind() == reflect.Ptr) {
+ v = e
+ continue
+ }
+ }
+
+ if v.Kind() != reflect.Ptr {
+ break
+ }
+
+ if v.Elem().Kind() != reflect.Ptr && decodingNull && v.CanSet() {
+ break
+ }
+ if v.IsNil() {
+ v.Set(reflect.New(v.Type().Elem()))
+ }
+ if v.Type().NumMethod() > 0 {
+ if u, ok := v.Interface().(Unmarshaler); ok {
+ return u, nil, v
+ }
+ if u, ok := v.Interface().(encoding.TextUnmarshaler); ok {
+ return nil, u, v
+ }
+ }
+ v = v.Elem()
+ }
+ return nil, nil, v
+}
+
+// array consumes an array from d.data[d.off-1:], decoding into the value v.
+// the first byte of the array ('[') has been read already.
+func (d *decodeState) array(v reflect.Value) {
+ // Check for unmarshaler.
+ u, ut, pv := d.indirect(v, false)
+ if u != nil {
+ d.off--
+ err := u.UnmarshalJSON(d.next())
+ if err != nil {
+ d.error(err)
+ }
+ return
+ }
+ if ut != nil {
+ d.saveError(&UnmarshalTypeError{"array", v.Type(), int64(d.off)})
+ d.off--
+ d.next()
+ return
+ }
+
+ v = pv
+
+ // Check type of target.
+ switch v.Kind() {
+ case reflect.Interface:
+ if v.NumMethod() == 0 {
+ // Decoding into nil interface? Switch to non-reflect code.
+ v.Set(reflect.ValueOf(d.arrayInterface()))
+ return
+ }
+ // Otherwise it's invalid.
+ fallthrough
+ default:
+ d.saveError(&UnmarshalTypeError{"array", v.Type(), int64(d.off)})
+ d.off--
+ d.next()
+ return
+ case reflect.Array:
+ case reflect.Slice:
+ break
+ }
+
+ i := 0
+ for {
+ // Look ahead for ] - can only happen on first iteration.
+ op := d.scanWhile(scanSkipSpace)
+ if op == scanEndArray {
+ break
+ }
+
+ // Back up so d.value can have the byte we just read.
+ d.off--
+ d.scan.undo(op)
+
+ // Get element of array, growing if necessary.
+ if v.Kind() == reflect.Slice {
+ // Grow slice if necessary
+ if i >= v.Cap() {
+ newcap := v.Cap() + v.Cap()/2
+ if newcap < 4 {
+ newcap = 4
+ }
+ newv := reflect.MakeSlice(v.Type(), v.Len(), newcap)
+ reflect.Copy(newv, v)
+ v.Set(newv)
+ }
+ if i >= v.Len() {
+ v.SetLen(i + 1)
+ }
+ }
+
+ if i < v.Len() {
+ // Decode into element.
+ d.value(v.Index(i))
+ } else {
+ // Ran out of fixed array: skip.
+ d.value(reflect.Value{})
+ }
+ i++
+
+ // Next token must be , or ].
+ op = d.scanWhile(scanSkipSpace)
+ if op == scanEndArray {
+ break
+ }
+ if op != scanArrayValue {
+ d.error(errPhase)
+ }
+ }
+
+ if i < v.Len() {
+ if v.Kind() == reflect.Array {
+ // Array. Zero the rest.
+ z := reflect.Zero(v.Type().Elem())
+ for ; i < v.Len(); i++ {
+ v.Index(i).Set(z)
+ }
+ } else {
+ v.SetLen(i)
+ }
+ }
+ if i == 0 && v.Kind() == reflect.Slice {
+ v.Set(reflect.MakeSlice(v.Type(), 0, 0))
+ }
+}
+
+var nullLiteral = []byte("null")
+var textUnmarshalerType = reflect.TypeOf(new(encoding.TextUnmarshaler)).Elem()
+
+// object consumes an object from d.data[d.off-1:], decoding into the value v.
+// the first byte ('{') of the object has been read already.
+func (d *decodeState) object(v reflect.Value) {
+ // Check for unmarshaler.
+ u, ut, pv := d.indirect(v, false)
+ if d.storeKeyed(pv) {
+ return
+ }
+ if u != nil {
+ d.off--
+ err := u.UnmarshalJSON(d.next())
+ if err != nil {
+ d.error(err)
+ }
+ return
+ }
+ if ut != nil {
+ d.saveError(&UnmarshalTypeError{"object", v.Type(), int64(d.off)})
+ d.off--
+ d.next() // skip over { } in input
+ return
+ }
+ v = pv
+
+ // Decoding into nil interface? Switch to non-reflect code.
+ if v.Kind() == reflect.Interface && v.NumMethod() == 0 {
+ v.Set(reflect.ValueOf(d.objectInterface()))
+ return
+ }
+
+ // Check type of target:
+ // struct or
+ // map[string]T or map[encoding.TextUnmarshaler]T
+ switch v.Kind() {
+ case reflect.Map:
+ // Map key must either have string kind or be an encoding.TextUnmarshaler.
+ t := v.Type()
+ if t.Key().Kind() != reflect.String &&
+ !reflect.PtrTo(t.Key()).Implements(textUnmarshalerType) {
+ d.saveError(&UnmarshalTypeError{"object", v.Type(), int64(d.off)})
+ d.off--
+ d.next() // skip over { } in input
+ return
+ }
+ if v.IsNil() {
+ v.Set(reflect.MakeMap(t))
+ }
+ case reflect.Struct:
+
+ default:
+ d.saveError(&UnmarshalTypeError{"object", v.Type(), int64(d.off)})
+ d.off--
+ d.next() // skip over { } in input
+ return
+ }
+
+ var mapElem reflect.Value
+
+ empty := true
+ for {
+ // Read opening " of string key or closing }.
+ op := d.scanWhile(scanSkipSpace)
+ if op == scanEndObject {
+ if !empty && !d.ext.trailingCommas {
+ d.syntaxError("beginning of object key string")
+ }
+ break
+ }
+ empty = false
+ if op == scanBeginName {
+ if !d.ext.unquotedKeys {
+ d.syntaxError("beginning of object key string")
+ }
+ } else if op != scanBeginLiteral {
+ d.error(errPhase)
+ }
+ unquotedKey := op == scanBeginName
+
+ // Read key.
+ start := d.off - 1
+ op = d.scanWhile(scanContinue)
+ item := d.data[start : d.off-1]
+ var key []byte
+ if unquotedKey {
+ key = item
+ // TODO Fix code below to quote item when necessary.
+ } else {
+ var ok bool
+ key, ok = unquoteBytes(item)
+ if !ok {
+ d.error(errPhase)
+ }
+ }
+
+ // Figure out field corresponding to key.
+ var subv reflect.Value
+ destring := false // whether the value is wrapped in a string to be decoded first
+
+ if v.Kind() == reflect.Map {
+ elemType := v.Type().Elem()
+ if !mapElem.IsValid() {
+ mapElem = reflect.New(elemType).Elem()
+ } else {
+ mapElem.Set(reflect.Zero(elemType))
+ }
+ subv = mapElem
+ } else {
+ var f *field
+ fields := cachedTypeFields(v.Type())
+ for i := range fields {
+ ff := &fields[i]
+ if bytes.Equal(ff.nameBytes, key) {
+ f = ff
+ break
+ }
+ if f == nil && ff.equalFold(ff.nameBytes, key) {
+ f = ff
+ }
+ }
+ if f != nil {
+ subv = v
+ destring = f.quoted
+ for _, i := range f.index {
+ if subv.Kind() == reflect.Ptr {
+ if subv.IsNil() {
+ subv.Set(reflect.New(subv.Type().Elem()))
+ }
+ subv = subv.Elem()
+ }
+ subv = subv.Field(i)
+ }
+ }
+ }
+
+ // Read : before value.
+ if op == scanSkipSpace {
+ op = d.scanWhile(scanSkipSpace)
+ }
+ if op != scanObjectKey {
+ d.error(errPhase)
+ }
+
+ // Read value.
+ if destring {
+ switch qv := d.valueQuoted().(type) {
+ case nil:
+ d.literalStore(nullLiteral, subv, false)
+ case string:
+ d.literalStore([]byte(qv), subv, true)
+ default:
+ d.saveError(fmt.Errorf("json: invalid use of ,string struct tag, trying to unmarshal unquoted value into %v", subv.Type()))
+ }
+ } else {
+ d.value(subv)
+ }
+
+ // Write value back to map;
+ // if using struct, subv points into struct already.
+ if v.Kind() == reflect.Map {
+ kt := v.Type().Key()
+ var kv reflect.Value
+ switch {
+ case kt.Kind() == reflect.String:
+ kv = reflect.ValueOf(key).Convert(v.Type().Key())
+ case reflect.PtrTo(kt).Implements(textUnmarshalerType):
+ kv = reflect.New(v.Type().Key())
+ d.literalStore(item, kv, true)
+ kv = kv.Elem()
+ default:
+ panic("json: Unexpected key type") // should never occur
+ }
+ v.SetMapIndex(kv, subv)
+ }
+
+ // Next token must be , or }.
+ op = d.scanWhile(scanSkipSpace)
+ if op == scanEndObject {
+ break
+ }
+ if op != scanObjectValue {
+ d.error(errPhase)
+ }
+ }
+}
+
+// isNull returns whether there's a null literal at the provided offset.
+func (d *decodeState) isNull(off int) bool {
+ if off+4 >= len(d.data) || d.data[off] != 'n' || d.data[off+1] != 'u' || d.data[off+2] != 'l' || d.data[off+3] != 'l' {
+ return false
+ }
+ d.nextscan.reset()
+ for i, c := range d.data[off:] {
+ if i > 4 {
+ return false
+ }
+ switch d.nextscan.step(&d.nextscan, c) {
+ case scanContinue, scanBeginName:
+ continue
+ }
+ break
+ }
+ return true
+}
+
+// name consumes a const or function from d.data[d.off-1:], decoding into the value v.
+// the first byte of the function name has been read already.
+func (d *decodeState) name(v reflect.Value) {
+ if d.isNull(d.off-1) {
+ d.literal(v)
+ return
+ }
+
+ // Check for unmarshaler.
+ u, ut, pv := d.indirect(v, false)
+ if d.storeKeyed(pv) {
+ return
+ }
+ if u != nil {
+ d.off--
+ err := u.UnmarshalJSON(d.next())
+ if err != nil {
+ d.error(err)
+ }
+ return
+ }
+ if ut != nil {
+ d.saveError(&UnmarshalTypeError{"object", v.Type(), int64(d.off)})
+ d.off--
+ d.next() // skip over function in input
+ return
+ }
+ v = pv
+
+ // Decoding into nil interface? Switch to non-reflect code.
+ if v.Kind() == reflect.Interface && v.NumMethod() == 0 {
+ out := d.nameInterface()
+ if out == nil {
+ v.Set(reflect.Zero(v.Type()))
+ } else {
+ v.Set(reflect.ValueOf(out))
+ }
+ return
+ }
+
+ nameStart := d.off - 1
+
+ op := d.scanWhile(scanContinue)
+
+ name := d.data[nameStart : d.off-1]
+ if op != scanParam {
+ // Back up so the byte just read is consumed next.
+ d.off--
+ d.scan.undo(op)
+ if l, ok := d.convertLiteral(name); ok {
+ d.storeValue(v, l)
+ return
+ }
+ d.error(&SyntaxError{fmt.Sprintf("json: unknown constant %q", name), int64(d.off)})
+ }
+
+ funcName := string(name)
+ funcData := d.ext.funcs[funcName]
+ if funcData.key == "" {
+ d.error(fmt.Errorf("json: unknown function %q", funcName))
+ }
+
+ // Check type of target:
+ // struct or
+ // map[string]T or map[encoding.TextUnmarshaler]T
+ switch v.Kind() {
+ case reflect.Map:
+ // Map key must either have string kind or be an encoding.TextUnmarshaler.
+ t := v.Type()
+ if t.Key().Kind() != reflect.String &&
+ !reflect.PtrTo(t.Key()).Implements(textUnmarshalerType) {
+ d.saveError(&UnmarshalTypeError{"object", v.Type(), int64(d.off)})
+ d.off--
+ d.next() // skip over { } in input
+ return
+ }
+ if v.IsNil() {
+ v.Set(reflect.MakeMap(t))
+ }
+ case reflect.Struct:
+
+ default:
+ d.saveError(&UnmarshalTypeError{"object", v.Type(), int64(d.off)})
+ d.off--
+ d.next() // skip over { } in input
+ return
+ }
+
+ // TODO Fix case of func field as map.
+ //topv := v
+
+ // Figure out field corresponding to function.
+ key := []byte(funcData.key)
+ if v.Kind() == reflect.Map {
+ elemType := v.Type().Elem()
+ v = reflect.New(elemType).Elem()
+ } else {
+ var f *field
+ fields := cachedTypeFields(v.Type())
+ for i := range fields {
+ ff := &fields[i]
+ if bytes.Equal(ff.nameBytes, key) {
+ f = ff
+ break
+ }
+ if f == nil && ff.equalFold(ff.nameBytes, key) {
+ f = ff
+ }
+ }
+ if f != nil {
+ for _, i := range f.index {
+ if v.Kind() == reflect.Ptr {
+ if v.IsNil() {
+ v.Set(reflect.New(v.Type().Elem()))
+ }
+ v = v.Elem()
+ }
+ v = v.Field(i)
+ }
+ if v.Kind() == reflect.Ptr {
+ if v.IsNil() {
+ v.Set(reflect.New(v.Type().Elem()))
+ }
+ v = v.Elem()
+ }
+ }
+ }
+
+ // Check for unmarshaler on func field itself.
+ u, ut, pv = d.indirect(v, false)
+ if u != nil {
+ d.off = nameStart
+ err := u.UnmarshalJSON(d.next())
+ if err != nil {
+ d.error(err)
+ }
+ return
+ }
+
+ var mapElem reflect.Value
+
+ // Parse function arguments.
+ for i := 0; ; i++ {
+ // closing ) - can only happen on first iteration.
+ op := d.scanWhile(scanSkipSpace)
+ if op == scanEndParams {
+ break
+ }
+
+ // Back up so d.value can have the byte we just read.
+ d.off--
+ d.scan.undo(op)
+
+ if i >= len(funcData.args) {
+ d.error(fmt.Errorf("json: too many arguments for function %s", funcName))
+ }
+ key := []byte(funcData.args[i])
+
+ // Figure out field corresponding to key.
+ var subv reflect.Value
+ destring := false // whether the value is wrapped in a string to be decoded first
+
+ if v.Kind() == reflect.Map {
+ elemType := v.Type().Elem()
+ if !mapElem.IsValid() {
+ mapElem = reflect.New(elemType).Elem()
+ } else {
+ mapElem.Set(reflect.Zero(elemType))
+ }
+ subv = mapElem
+ } else {
+ var f *field
+ fields := cachedTypeFields(v.Type())
+ for i := range fields {
+ ff := &fields[i]
+ if bytes.Equal(ff.nameBytes, key) {
+ f = ff
+ break
+ }
+ if f == nil && ff.equalFold(ff.nameBytes, key) {
+ f = ff
+ }
+ }
+ if f != nil {
+ subv = v
+ destring = f.quoted
+ for _, i := range f.index {
+ if subv.Kind() == reflect.Ptr {
+ if subv.IsNil() {
+ subv.Set(reflect.New(subv.Type().Elem()))
+ }
+ subv = subv.Elem()
+ }
+ subv = subv.Field(i)
+ }
+ }
+ }
+
+ // Read value.
+ if destring {
+ switch qv := d.valueQuoted().(type) {
+ case nil:
+ d.literalStore(nullLiteral, subv, false)
+ case string:
+ d.literalStore([]byte(qv), subv, true)
+ default:
+ d.saveError(fmt.Errorf("json: invalid use of ,string struct tag, trying to unmarshal unquoted value into %v", subv.Type()))
+ }
+ } else {
+ d.value(subv)
+ }
+
+ // Write value back to map;
+ // if using struct, subv points into struct already.
+ if v.Kind() == reflect.Map {
+ kt := v.Type().Key()
+ var kv reflect.Value
+ switch {
+ case kt.Kind() == reflect.String:
+ kv = reflect.ValueOf(key).Convert(v.Type().Key())
+ case reflect.PtrTo(kt).Implements(textUnmarshalerType):
+ kv = reflect.New(v.Type().Key())
+ d.literalStore(key, kv, true)
+ kv = kv.Elem()
+ default:
+ panic("json: Unexpected key type") // should never occur
+ }
+ v.SetMapIndex(kv, subv)
+ }
+
+ // Next token must be , or ).
+ op = d.scanWhile(scanSkipSpace)
+ if op == scanEndParams {
+ break
+ }
+ if op != scanParam {
+ d.error(errPhase)
+ }
+ }
+}
+
+// keyed attempts to decode an object or function using a keyed doc extension,
+// and returns the value and true on success, or nil and false otherwise.
+func (d *decodeState) keyed() (interface{}, bool) {
+ if len(d.ext.keyed) == 0 {
+ return nil, false
+ }
+
+ unquote := false
+
+ // Look-ahead first key to check for a keyed document extension.
+ d.nextscan.reset()
+ var start, end int
+ for i, c := range d.data[d.off-1:] {
+ switch op := d.nextscan.step(&d.nextscan, c); op {
+ case scanSkipSpace, scanContinue, scanBeginObject:
+ continue
+ case scanBeginLiteral, scanBeginName:
+ unquote = op == scanBeginLiteral
+ start = i
+ continue
+ }
+ end = i
+ break
+ }
+
+ name := d.data[d.off-1+start : d.off-1+end]
+
+ var key []byte
+ var ok bool
+ if unquote {
+ key, ok = unquoteBytes(name)
+ if !ok {
+ d.error(errPhase)
+ }
+ } else {
+ funcData, ok := d.ext.funcs[string(name)]
+ if !ok {
+ return nil, false
+ }
+ key = []byte(funcData.key)
+ }
+
+ decode, ok := d.ext.keyed[string(key)]
+ if !ok {
+ return nil, false
+ }
+
+ d.off--
+ out, err := decode(d.next())
+ if err != nil {
+ d.error(err)
+ }
+ return out, true
+}
+
+func (d *decodeState) storeKeyed(v reflect.Value) bool {
+ keyed, ok := d.keyed()
+ if !ok {
+ return false
+ }
+ d.storeValue(v, keyed)
+ return true
+}
+
+var (
+ trueBytes = []byte("true")
+ falseBytes = []byte("false")
+ nullBytes = []byte("null")
+)
+
+func (d *decodeState) storeValue(v reflect.Value, from interface{}) {
+ switch from {
+ case nil:
+ d.literalStore(nullBytes, v, false)
+ return
+ case true:
+ d.literalStore(trueBytes, v, false)
+ return
+ case false:
+ d.literalStore(falseBytes, v, false)
+ return
+ }
+ fromv := reflect.ValueOf(from)
+ for fromv.Kind() == reflect.Ptr && !fromv.IsNil() {
+ fromv = fromv.Elem()
+ }
+ fromt := fromv.Type()
+ for v.Kind() == reflect.Ptr && !v.IsNil() {
+ v = v.Elem()
+ }
+ vt := v.Type()
+ if fromt.AssignableTo(vt) {
+ v.Set(fromv)
+ } else if fromt.ConvertibleTo(vt) {
+ v.Set(fromv.Convert(vt))
+ } else {
+ d.saveError(&UnmarshalTypeError{"object", v.Type(), int64(d.off)})
+ }
+}
+
+func (d *decodeState) convertLiteral(name []byte) (interface{}, bool) {
+ if len(name) == 0 {
+ return nil, false
+ }
+ switch name[0] {
+ case 't':
+ if bytes.Equal(name, trueBytes) {
+ return true, true
+ }
+ case 'f':
+ if bytes.Equal(name, falseBytes) {
+ return false, true
+ }
+ case 'n':
+ if bytes.Equal(name, nullBytes) {
+ return nil, true
+ }
+ }
+ if l, ok := d.ext.consts[string(name)]; ok {
+ return l, true
+ }
+ return nil, false
+}
+
+// literal consumes a literal from d.data[d.off-1:], decoding into the value v.
+// The first byte of the literal has been read already
+// (that's how the caller knows it's a literal).
+func (d *decodeState) literal(v reflect.Value) {
+ // All bytes inside literal return scanContinue op code.
+ start := d.off - 1
+ op := d.scanWhile(scanContinue)
+
+ // Scan read one byte too far; back up.
+ d.off--
+ d.scan.undo(op)
+
+ d.literalStore(d.data[start:d.off], v, false)
+}
+
+// convertNumber converts the number literal s to a float64 or a Number
+// depending on the setting of d.useNumber.
+func (d *decodeState) convertNumber(s string) (interface{}, error) {
+ if d.useNumber {
+ return Number(s), nil
+ }
+ f, err := strconv.ParseFloat(s, 64)
+ if err != nil {
+ return nil, &UnmarshalTypeError{"number " + s, reflect.TypeOf(0.0), int64(d.off)}
+ }
+ return f, nil
+}
+
+var numberType = reflect.TypeOf(Number(""))
+
+// literalStore decodes a literal stored in item into v.
+//
+// fromQuoted indicates whether this literal came from unwrapping a
+// string from the ",string" struct tag option. this is used only to
+// produce more helpful error messages.
+func (d *decodeState) literalStore(item []byte, v reflect.Value, fromQuoted bool) {
+ // Check for unmarshaler.
+ if len(item) == 0 {
+ //Empty string given
+ d.saveError(fmt.Errorf("json: invalid use of ,string struct tag, trying to unmarshal %q into %v", item, v.Type()))
+ return
+ }
+ wantptr := item[0] == 'n' // null
+ u, ut, pv := d.indirect(v, wantptr)
+ if u != nil {
+ err := u.UnmarshalJSON(item)
+ if err != nil {
+ d.error(err)
+ }
+ return
+ }
+ if ut != nil {
+ if item[0] != '"' {
+ if fromQuoted {
+ d.saveError(fmt.Errorf("json: invalid use of ,string struct tag, trying to unmarshal %q into %v", item, v.Type()))
+ } else {
+ d.saveError(&UnmarshalTypeError{"string", v.Type(), int64(d.off)})
+ }
+ return
+ }
+ s, ok := unquoteBytes(item)
+ if !ok {
+ if fromQuoted {
+ d.error(fmt.Errorf("json: invalid use of ,string struct tag, trying to unmarshal %q into %v", item, v.Type()))
+ } else {
+ d.error(errPhase)
+ }
+ }
+ err := ut.UnmarshalText(s)
+ if err != nil {
+ d.error(err)
+ }
+ return
+ }
+
+ v = pv
+
+ switch c := item[0]; c {
+ case 'n': // null
+ switch v.Kind() {
+ case reflect.Interface, reflect.Ptr, reflect.Map, reflect.Slice:
+ v.Set(reflect.Zero(v.Type()))
+ // otherwise, ignore null for primitives/string
+ }
+ case 't', 'f': // true, false
+ value := c == 't'
+ switch v.Kind() {
+ default:
+ if fromQuoted {
+ d.saveError(fmt.Errorf("json: invalid use of ,string struct tag, trying to unmarshal %q into %v", item, v.Type()))
+ } else {
+ d.saveError(&UnmarshalTypeError{"bool", v.Type(), int64(d.off)})
+ }
+ case reflect.Bool:
+ v.SetBool(value)
+ case reflect.Interface:
+ if v.NumMethod() == 0 {
+ v.Set(reflect.ValueOf(value))
+ } else {
+ d.saveError(&UnmarshalTypeError{"bool", v.Type(), int64(d.off)})
+ }
+ }
+
+ case '"': // string
+ s, ok := unquoteBytes(item)
+ if !ok {
+ if fromQuoted {
+ d.error(fmt.Errorf("json: invalid use of ,string struct tag, trying to unmarshal %q into %v", item, v.Type()))
+ } else {
+ d.error(errPhase)
+ }
+ }
+ switch v.Kind() {
+ default:
+ d.saveError(&UnmarshalTypeError{"string", v.Type(), int64(d.off)})
+ case reflect.Slice:
+ if v.Type().Elem().Kind() != reflect.Uint8 {
+ d.saveError(&UnmarshalTypeError{"string", v.Type(), int64(d.off)})
+ break
+ }
+ b := make([]byte, base64.StdEncoding.DecodedLen(len(s)))
+ n, err := base64.StdEncoding.Decode(b, s)
+ if err != nil {
+ d.saveError(err)
+ break
+ }
+ v.SetBytes(b[:n])
+ case reflect.String:
+ v.SetString(string(s))
+ case reflect.Interface:
+ if v.NumMethod() == 0 {
+ v.Set(reflect.ValueOf(string(s)))
+ } else {
+ d.saveError(&UnmarshalTypeError{"string", v.Type(), int64(d.off)})
+ }
+ }
+
+ default: // number
+ if c != '-' && (c < '0' || c > '9') {
+ if fromQuoted {
+ d.error(fmt.Errorf("json: invalid use of ,string struct tag, trying to unmarshal %q into %v", item, v.Type()))
+ } else {
+ d.error(errPhase)
+ }
+ }
+ s := string(item)
+ switch v.Kind() {
+ default:
+ if v.Kind() == reflect.String && v.Type() == numberType {
+ v.SetString(s)
+ if !isValidNumber(s) {
+ d.error(fmt.Errorf("json: invalid number literal, trying to unmarshal %q into Number", item))
+ }
+ break
+ }
+ if fromQuoted {
+ d.error(fmt.Errorf("json: invalid use of ,string struct tag, trying to unmarshal %q into %v", item, v.Type()))
+ } else {
+ d.error(&UnmarshalTypeError{"number", v.Type(), int64(d.off)})
+ }
+ case reflect.Interface:
+ n, err := d.convertNumber(s)
+ if err != nil {
+ d.saveError(err)
+ break
+ }
+ if v.NumMethod() != 0 {
+ d.saveError(&UnmarshalTypeError{"number", v.Type(), int64(d.off)})
+ break
+ }
+ v.Set(reflect.ValueOf(n))
+
+ case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
+ n, err := strconv.ParseInt(s, 10, 64)
+ if err != nil || v.OverflowInt(n) {
+ d.saveError(&UnmarshalTypeError{"number " + s, v.Type(), int64(d.off)})
+ break
+ }
+ v.SetInt(n)
+
+ case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr:
+ n, err := strconv.ParseUint(s, 10, 64)
+ if err != nil || v.OverflowUint(n) {
+ d.saveError(&UnmarshalTypeError{"number " + s, v.Type(), int64(d.off)})
+ break
+ }
+ v.SetUint(n)
+
+ case reflect.Float32, reflect.Float64:
+ n, err := strconv.ParseFloat(s, v.Type().Bits())
+ if err != nil || v.OverflowFloat(n) {
+ d.saveError(&UnmarshalTypeError{"number " + s, v.Type(), int64(d.off)})
+ break
+ }
+ v.SetFloat(n)
+ }
+ }
+}
+
+// The xxxInterface routines build up a value to be stored
+// in an empty interface. They are not strictly necessary,
+// but they avoid the weight of reflection in this common case.
+
+// valueInterface is like value but returns interface{}
+func (d *decodeState) valueInterface() interface{} {
+ switch d.scanWhile(scanSkipSpace) {
+ default:
+ d.error(errPhase)
+ panic("unreachable")
+ case scanBeginArray:
+ return d.arrayInterface()
+ case scanBeginObject:
+ return d.objectInterface()
+ case scanBeginLiteral:
+ return d.literalInterface()
+ case scanBeginName:
+ return d.nameInterface()
+ }
+}
+
+func (d *decodeState) syntaxError(expected string) {
+ msg := fmt.Sprintf("invalid character '%c' looking for %s", d.data[d.off-1], expected)
+ d.error(&SyntaxError{msg, int64(d.off)})
+}
+
+// arrayInterface is like array but returns []interface{}.
+func (d *decodeState) arrayInterface() []interface{} {
+ var v = make([]interface{}, 0)
+ for {
+ // Look ahead for ] - can only happen on first iteration.
+ op := d.scanWhile(scanSkipSpace)
+ if op == scanEndArray {
+ if len(v) > 0 && !d.ext.trailingCommas {
+ d.syntaxError("beginning of value")
+ }
+ break
+ }
+
+ // Back up so d.value can have the byte we just read.
+ d.off--
+ d.scan.undo(op)
+
+ v = append(v, d.valueInterface())
+
+ // Next token must be , or ].
+ op = d.scanWhile(scanSkipSpace)
+ if op == scanEndArray {
+ break
+ }
+ if op != scanArrayValue {
+ d.error(errPhase)
+ }
+ }
+ return v
+}
+
+// objectInterface is like object but returns map[string]interface{}.
+func (d *decodeState) objectInterface() interface{} {
+ v, ok := d.keyed()
+ if ok {
+ return v
+ }
+
+ m := make(map[string]interface{})
+ for {
+ // Read opening " of string key or closing }.
+ op := d.scanWhile(scanSkipSpace)
+ if op == scanEndObject {
+ if len(m) > 0 && !d.ext.trailingCommas {
+ d.syntaxError("beginning of object key string")
+ }
+ break
+ }
+ if op == scanBeginName {
+ if !d.ext.unquotedKeys {
+ d.syntaxError("beginning of object key string")
+ }
+ } else if op != scanBeginLiteral {
+ d.error(errPhase)
+ }
+ unquotedKey := op == scanBeginName
+
+ // Read string key.
+ start := d.off - 1
+ op = d.scanWhile(scanContinue)
+ item := d.data[start : d.off-1]
+ var key string
+ if unquotedKey {
+ key = string(item)
+ } else {
+ var ok bool
+ key, ok = unquote(item)
+ if !ok {
+ d.error(errPhase)
+ }
+ }
+
+ // Read : before value.
+ if op == scanSkipSpace {
+ op = d.scanWhile(scanSkipSpace)
+ }
+ if op != scanObjectKey {
+ d.error(errPhase)
+ }
+
+ // Read value.
+ m[key] = d.valueInterface()
+
+ // Next token must be , or }.
+ op = d.scanWhile(scanSkipSpace)
+ if op == scanEndObject {
+ break
+ }
+ if op != scanObjectValue {
+ d.error(errPhase)
+ }
+ }
+ return m
+}
+
+// literalInterface is like literal but returns an interface value.
+func (d *decodeState) literalInterface() interface{} {
+ // All bytes inside literal return scanContinue op code.
+ start := d.off - 1
+ op := d.scanWhile(scanContinue)
+
+ // Scan read one byte too far; back up.
+ d.off--
+ d.scan.undo(op)
+ item := d.data[start:d.off]
+
+ switch c := item[0]; c {
+ case 'n': // null
+ return nil
+
+ case 't', 'f': // true, false
+ return c == 't'
+
+ case '"': // string
+ s, ok := unquote(item)
+ if !ok {
+ d.error(errPhase)
+ }
+ return s
+
+ default: // number
+ if c != '-' && (c < '0' || c > '9') {
+ d.error(errPhase)
+ }
+ n, err := d.convertNumber(string(item))
+ if err != nil {
+ d.saveError(err)
+ }
+ return n
+ }
+}
+
+// nameInterface is like function but returns map[string]interface{}.
+func (d *decodeState) nameInterface() interface{} {
+ v, ok := d.keyed()
+ if ok {
+ return v
+ }
+
+ nameStart := d.off - 1
+
+ op := d.scanWhile(scanContinue)
+
+ name := d.data[nameStart : d.off-1]
+ if op != scanParam {
+ // Back up so the byte just read is consumed next.
+ d.off--
+ d.scan.undo(op)
+ if l, ok := d.convertLiteral(name); ok {
+ return l
+ }
+ d.error(&SyntaxError{fmt.Sprintf("json: unknown constant %q", name), int64(d.off)})
+ }
+
+ funcName := string(name)
+ funcData := d.ext.funcs[funcName]
+ if funcData.key == "" {
+ d.error(fmt.Errorf("json: unknown function %q", funcName))
+ }
+
+ m := make(map[string]interface{})
+ for i := 0; ; i++ {
+ // Look ahead for ) - can only happen on first iteration.
+ op := d.scanWhile(scanSkipSpace)
+ if op == scanEndParams {
+ break
+ }
+
+ // Back up so d.value can have the byte we just read.
+ d.off--
+ d.scan.undo(op)
+
+ if i >= len(funcData.args) {
+ d.error(fmt.Errorf("json: too many arguments for function %s", funcName))
+ }
+ m[funcData.args[i]] = d.valueInterface()
+
+ // Next token must be , or ).
+ op = d.scanWhile(scanSkipSpace)
+ if op == scanEndParams {
+ break
+ }
+ if op != scanParam {
+ d.error(errPhase)
+ }
+ }
+ return map[string]interface{}{funcData.key: m}
+}
+
+// getu4 decodes \uXXXX from the beginning of s, returning the hex value,
+// or it returns -1.
+func getu4(s []byte) rune {
+ if len(s) < 6 || s[0] != '\\' || s[1] != 'u' {
+ return -1
+ }
+ r, err := strconv.ParseUint(string(s[2:6]), 16, 64)
+ if err != nil {
+ return -1
+ }
+ return rune(r)
+}
+
+// unquote converts a quoted JSON string literal s into an actual string t.
+// The rules are different than for Go, so cannot use strconv.Unquote.
+func unquote(s []byte) (t string, ok bool) {
+ s, ok = unquoteBytes(s)
+ t = string(s)
+ return
+}
+
+func unquoteBytes(s []byte) (t []byte, ok bool) {
+ if len(s) < 2 || s[0] != '"' || s[len(s)-1] != '"' {
+ return
+ }
+ s = s[1 : len(s)-1]
+
+ // Check for unusual characters. If there are none,
+ // then no unquoting is needed, so return a slice of the
+ // original bytes.
+ r := 0
+ for r < len(s) {
+ c := s[r]
+ if c == '\\' || c == '"' || c < ' ' {
+ break
+ }
+ if c < utf8.RuneSelf {
+ r++
+ continue
+ }
+ rr, size := utf8.DecodeRune(s[r:])
+ if rr == utf8.RuneError && size == 1 {
+ break
+ }
+ r += size
+ }
+ if r == len(s) {
+ return s, true
+ }
+
+ b := make([]byte, len(s)+2*utf8.UTFMax)
+ w := copy(b, s[0:r])
+ for r < len(s) {
+ // Out of room? Can only happen if s is full of
+ // malformed UTF-8 and we're replacing each
+ // byte with RuneError.
+ if w >= len(b)-2*utf8.UTFMax {
+ nb := make([]byte, (len(b)+utf8.UTFMax)*2)
+ copy(nb, b[0:w])
+ b = nb
+ }
+ switch c := s[r]; {
+ case c == '\\':
+ r++
+ if r >= len(s) {
+ return
+ }
+ switch s[r] {
+ default:
+ return
+ case '"', '\\', '/', '\'':
+ b[w] = s[r]
+ r++
+ w++
+ case 'b':
+ b[w] = '\b'
+ r++
+ w++
+ case 'f':
+ b[w] = '\f'
+ r++
+ w++
+ case 'n':
+ b[w] = '\n'
+ r++
+ w++
+ case 'r':
+ b[w] = '\r'
+ r++
+ w++
+ case 't':
+ b[w] = '\t'
+ r++
+ w++
+ case 'u':
+ r--
+ rr := getu4(s[r:])
+ if rr < 0 {
+ return
+ }
+ r += 6
+ if utf16.IsSurrogate(rr) {
+ rr1 := getu4(s[r:])
+ if dec := utf16.DecodeRune(rr, rr1); dec != unicode.ReplacementChar {
+ // A valid pair; consume.
+ r += 6
+ w += utf8.EncodeRune(b[w:], dec)
+ break
+ }
+ // Invalid surrogate; fall back to replacement rune.
+ rr = unicode.ReplacementChar
+ }
+ w += utf8.EncodeRune(b[w:], rr)
+ }
+
+ // Quote, control characters are invalid.
+ case c == '"', c < ' ':
+ return
+
+ // ASCII
+ case c < utf8.RuneSelf:
+ b[w] = c
+ r++
+ w++
+
+ // Coerce to well-formed UTF-8.
+ default:
+ rr, size := utf8.DecodeRune(s[r:])
+ r += size
+ w += utf8.EncodeRune(b[w:], rr)
+ }
+ }
+ return b[0:w], true
+}
diff --git a/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/internal/json/decode_test.go b/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/internal/json/decode_test.go
new file mode 100644
index 00000000000..30e46ca44f0
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/internal/json/decode_test.go
@@ -0,0 +1,1512 @@
+// Copyright 2010 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package json
+
+import (
+ "bytes"
+ "encoding"
+ "errors"
+ "fmt"
+ "image"
+ "net"
+ "reflect"
+ "strings"
+ "testing"
+ "time"
+)
+
+type T struct {
+ X string
+ Y int
+ Z int `json:"-"`
+}
+
+type U struct {
+ Alphabet string `json:"alpha"`
+}
+
+type V struct {
+ F1 interface{}
+ F2 int32
+ F3 Number
+}
+
+// ifaceNumAsFloat64/ifaceNumAsNumber are used to test unmarshaling with and
+// without UseNumber
+var ifaceNumAsFloat64 = map[string]interface{}{
+ "k1": float64(1),
+ "k2": "s",
+ "k3": []interface{}{float64(1), float64(2.0), float64(3e-3)},
+ "k4": map[string]interface{}{"kk1": "s", "kk2": float64(2)},
+}
+
+var ifaceNumAsNumber = map[string]interface{}{
+ "k1": Number("1"),
+ "k2": "s",
+ "k3": []interface{}{Number("1"), Number("2.0"), Number("3e-3")},
+ "k4": map[string]interface{}{"kk1": "s", "kk2": Number("2")},
+}
+
+type tx struct {
+ x int
+}
+
+// A type that can unmarshal itself.
+
+type unmarshaler struct {
+ T bool
+}
+
+func (u *unmarshaler) UnmarshalJSON(b []byte) error {
+ *u = unmarshaler{true} // All we need to see that UnmarshalJSON is called.
+ return nil
+}
+
+type ustruct struct {
+ M unmarshaler
+}
+
+type unmarshalerText struct {
+ A, B string
+}
+
+// needed for re-marshaling tests
+func (u unmarshalerText) MarshalText() ([]byte, error) {
+ return []byte(u.A + ":" + u.B), nil
+}
+
+func (u *unmarshalerText) UnmarshalText(b []byte) error {
+ pos := bytes.Index(b, []byte(":"))
+ if pos == -1 {
+ return errors.New("missing separator")
+ }
+ u.A, u.B = string(b[:pos]), string(b[pos+1:])
+ return nil
+}
+
+var _ encoding.TextUnmarshaler = (*unmarshalerText)(nil)
+
+type ustructText struct {
+ M unmarshalerText
+}
+
+var (
+ um0, um1 unmarshaler // target2 of unmarshaling
+ ump = &um1
+ umtrue = unmarshaler{true}
+ umslice = []unmarshaler{{true}}
+ umslicep = new([]unmarshaler)
+ umstruct = ustruct{unmarshaler{true}}
+
+ um0T, um1T unmarshalerText // target2 of unmarshaling
+ umpType = &um1T
+ umtrueXY = unmarshalerText{"x", "y"}
+ umsliceXY = []unmarshalerText{{"x", "y"}}
+ umslicepType = new([]unmarshalerText)
+ umstructType = new(ustructText)
+ umstructXY = ustructText{unmarshalerText{"x", "y"}}
+
+ ummapType = map[unmarshalerText]bool{}
+ ummapXY = map[unmarshalerText]bool{unmarshalerText{"x", "y"}: true}
+)
+
+// Test data structures for anonymous fields.
+
+type Point struct {
+ Z int
+}
+
+type Top struct {
+ Level0 int
+ Embed0
+ *Embed0a
+ *Embed0b `json:"e,omitempty"` // treated as named
+ Embed0c `json:"-"` // ignored
+ Loop
+ Embed0p // has Point with X, Y, used
+ Embed0q // has Point with Z, used
+ embed // contains exported field
+}
+
+type Embed0 struct {
+ Level1a int // overridden by Embed0a's Level1a with json tag
+ Level1b int // used because Embed0a's Level1b is renamed
+ Level1c int // used because Embed0a's Level1c is ignored
+ Level1d int // annihilated by Embed0a's Level1d
+ Level1e int `json:"x"` // annihilated by Embed0a.Level1e
+}
+
+type Embed0a struct {
+ Level1a int `json:"Level1a,omitempty"`
+ Level1b int `json:"LEVEL1B,omitempty"`
+ Level1c int `json:"-"`
+ Level1d int // annihilated by Embed0's Level1d
+ Level1f int `json:"x"` // annihilated by Embed0's Level1e
+}
+
+type Embed0b Embed0
+
+type Embed0c Embed0
+
+type Embed0p struct {
+ image.Point
+}
+
+type Embed0q struct {
+ Point
+}
+
+type embed struct {
+ Q int
+}
+
+type Loop struct {
+ Loop1 int `json:",omitempty"`
+ Loop2 int `json:",omitempty"`
+ *Loop
+}
+
+// From reflect test:
+// The X in S6 and S7 annihilate, but they also block the X in S8.S9.
+type S5 struct {
+ S6
+ S7
+ S8
+}
+
+type S6 struct {
+ X int
+}
+
+type S7 S6
+
+type S8 struct {
+ S9
+}
+
+type S9 struct {
+ X int
+ Y int
+}
+
+// From reflect test:
+// The X in S11.S6 and S12.S6 annihilate, but they also block the X in S13.S8.S9.
+type S10 struct {
+ S11
+ S12
+ S13
+}
+
+type S11 struct {
+ S6
+}
+
+type S12 struct {
+ S6
+}
+
+type S13 struct {
+ S8
+}
+
+type unmarshalTest struct {
+ in string
+ ptr interface{}
+ out interface{}
+ err error
+ useNumber bool
+}
+
+type Ambig struct {
+ // Given "hello", the first match should win.
+ First int `json:"HELLO"`
+ Second int `json:"Hello"`
+}
+
+type XYZ struct {
+ X interface{}
+ Y interface{}
+ Z interface{}
+}
+
+func sliceAddr(x []int) *[]int { return &x }
+func mapAddr(x map[string]int) *map[string]int { return &x }
+
+var unmarshalTests = []unmarshalTest{
+ // basic types
+ {in: `true`, ptr: new(bool), out: true},
+ {in: `1`, ptr: new(int), out: 1},
+ {in: `1.2`, ptr: new(float64), out: 1.2},
+ {in: `-5`, ptr: new(int16), out: int16(-5)},
+ {in: `2`, ptr: new(Number), out: Number("2"), useNumber: true},
+ {in: `2`, ptr: new(Number), out: Number("2")},
+ {in: `2`, ptr: new(interface{}), out: float64(2.0)},
+ {in: `2`, ptr: new(interface{}), out: Number("2"), useNumber: true},
+ {in: `"a\u1234"`, ptr: new(string), out: "a\u1234"},
+ {in: `"http:\/\/"`, ptr: new(string), out: "http://"},
+ {in: `"g-clef: \uD834\uDD1E"`, ptr: new(string), out: "g-clef: \U0001D11E"},
+ {in: `"invalid: \uD834x\uDD1E"`, ptr: new(string), out: "invalid: \uFFFDx\uFFFD"},
+ {in: "null", ptr: new(interface{}), out: nil},
+ {in: `{"X": [1,2,3], "Y": 4}`, ptr: new(T), out: T{Y: 4}, err: &UnmarshalTypeError{"array", reflect.TypeOf(""), 7}},
+ {in: `{"x": 1}`, ptr: new(tx), out: tx{}},
+ {in: `{"F1":1,"F2":2,"F3":3}`, ptr: new(V), out: V{F1: float64(1), F2: int32(2), F3: Number("3")}},
+ {in: `{"F1":1,"F2":2,"F3":3}`, ptr: new(V), out: V{F1: Number("1"), F2: int32(2), F3: Number("3")}, useNumber: true},
+ {in: `{"k1":1,"k2":"s","k3":[1,2.0,3e-3],"k4":{"kk1":"s","kk2":2}}`, ptr: new(interface{}), out: ifaceNumAsFloat64},
+ {in: `{"k1":1,"k2":"s","k3":[1,2.0,3e-3],"k4":{"kk1":"s","kk2":2}}`, ptr: new(interface{}), out: ifaceNumAsNumber, useNumber: true},
+
+ // raw values with whitespace
+ {in: "\n true ", ptr: new(bool), out: true},
+ {in: "\t 1 ", ptr: new(int), out: 1},
+ {in: "\r 1.2 ", ptr: new(float64), out: 1.2},
+ {in: "\t -5 \n", ptr: new(int16), out: int16(-5)},
+ {in: "\t \"a\\u1234\" \n", ptr: new(string), out: "a\u1234"},
+
+ // Z has a "-" tag.
+ {in: `{"Y": 1, "Z": 2}`, ptr: new(T), out: T{Y: 1}},
+
+ {in: `{"alpha": "abc", "alphabet": "xyz"}`, ptr: new(U), out: U{Alphabet: "abc"}},
+ {in: `{"alpha": "abc"}`, ptr: new(U), out: U{Alphabet: "abc"}},
+ {in: `{"alphabet": "xyz"}`, ptr: new(U), out: U{}},
+
+ // syntax errors
+ {in: `{"X": "foo", "Y"}`, err: &SyntaxError{"invalid character '}' after object key", 17}},
+ {in: `[1, 2, 3+]`, err: &SyntaxError{"invalid character '+' after array element", 9}},
+ {in: `{"X":12x}`, err: &SyntaxError{"invalid character 'x' after object key:value pair", 8}, useNumber: true},
+
+ // raw value errors
+ {in: "\x01 42", err: &SyntaxError{"invalid character '\\x01' looking for beginning of value", 1}},
+ {in: " 42 \x01", err: &SyntaxError{"invalid character '\\x01' after top-level value", 5}},
+ {in: "\x01 true", err: &SyntaxError{"invalid character '\\x01' looking for beginning of value", 1}},
+ {in: " false \x01", err: &SyntaxError{"invalid character '\\x01' after top-level value", 8}},
+ {in: "\x01 1.2", err: &SyntaxError{"invalid character '\\x01' looking for beginning of value", 1}},
+ {in: " 3.4 \x01", err: &SyntaxError{"invalid character '\\x01' after top-level value", 6}},
+ {in: "\x01 \"string\"", err: &SyntaxError{"invalid character '\\x01' looking for beginning of value", 1}},
+ {in: " \"string\" \x01", err: &SyntaxError{"invalid character '\\x01' after top-level value", 11}},
+
+ // array tests
+ {in: `[1, 2, 3]`, ptr: new([3]int), out: [3]int{1, 2, 3}},
+ {in: `[1, 2, 3]`, ptr: new([1]int), out: [1]int{1}},
+ {in: `[1, 2, 3]`, ptr: new([5]int), out: [5]int{1, 2, 3, 0, 0}},
+
+ // empty array to interface test
+ {in: `[]`, ptr: new([]interface{}), out: []interface{}{}},
+ {in: `null`, ptr: new([]interface{}), out: []interface{}(nil)},
+ {in: `{"T":[]}`, ptr: new(map[string]interface{}), out: map[string]interface{}{"T": []interface{}{}}},
+ {in: `{"T":null}`, ptr: new(map[string]interface{}), out: map[string]interface{}{"T": interface{}(nil)}},
+
+ // composite tests
+ {in: allValueIndent, ptr: new(All), out: allValue},
+ {in: allValueCompact, ptr: new(All), out: allValue},
+ {in: allValueIndent, ptr: new(*All), out: &allValue},
+ {in: allValueCompact, ptr: new(*All), out: &allValue},
+ {in: pallValueIndent, ptr: new(All), out: pallValue},
+ {in: pallValueCompact, ptr: new(All), out: pallValue},
+ {in: pallValueIndent, ptr: new(*All), out: &pallValue},
+ {in: pallValueCompact, ptr: new(*All), out: &pallValue},
+
+ // unmarshal interface test
+ {in: `{"T":false}`, ptr: &um0, out: umtrue}, // use "false" so test will fail if custom unmarshaler is not called
+ {in: `{"T":false}`, ptr: &ump, out: &umtrue},
+ {in: `[{"T":false}]`, ptr: &umslice, out: umslice},
+ {in: `[{"T":false}]`, ptr: &umslicep, out: &umslice},
+ {in: `{"M":{"T":"x:y"}}`, ptr: &umstruct, out: umstruct},
+
+ // UnmarshalText interface test
+ {in: `"x:y"`, ptr: &um0T, out: umtrueXY},
+ {in: `"x:y"`, ptr: &umpType, out: &umtrueXY},
+ {in: `["x:y"]`, ptr: &umsliceXY, out: umsliceXY},
+ {in: `["x:y"]`, ptr: &umslicepType, out: &umsliceXY},
+ {in: `{"M":"x:y"}`, ptr: umstructType, out: umstructXY},
+
+ // Map keys can be encoding.TextUnmarshalers
+ {in: `{"x:y":true}`, ptr: &ummapType, out: ummapXY},
+ // If multiple values for the same key exists, only the most recent value is used.
+ {in: `{"x:y":false,"x:y":true}`, ptr: &ummapType, out: ummapXY},
+
+ // Overwriting of data.
+ // This is different from package xml, but it's what we've always done.
+ // Now documented and tested.
+ {in: `[2]`, ptr: sliceAddr([]int{1}), out: []int{2}},
+ {in: `{"key": 2}`, ptr: mapAddr(map[string]int{"old": 0, "key": 1}), out: map[string]int{"key": 2}},
+
+ {
+ in: `{
+ "Level0": 1,
+ "Level1b": 2,
+ "Level1c": 3,
+ "x": 4,
+ "Level1a": 5,
+ "LEVEL1B": 6,
+ "e": {
+ "Level1a": 8,
+ "Level1b": 9,
+ "Level1c": 10,
+ "Level1d": 11,
+ "x": 12
+ },
+ "Loop1": 13,
+ "Loop2": 14,
+ "X": 15,
+ "Y": 16,
+ "Z": 17,
+ "Q": 18
+ }`,
+ ptr: new(Top),
+ out: Top{
+ Level0: 1,
+ Embed0: Embed0{
+ Level1b: 2,
+ Level1c: 3,
+ },
+ Embed0a: &Embed0a{
+ Level1a: 5,
+ Level1b: 6,
+ },
+ Embed0b: &Embed0b{
+ Level1a: 8,
+ Level1b: 9,
+ Level1c: 10,
+ Level1d: 11,
+ Level1e: 12,
+ },
+ Loop: Loop{
+ Loop1: 13,
+ Loop2: 14,
+ },
+ Embed0p: Embed0p{
+ Point: image.Point{X: 15, Y: 16},
+ },
+ Embed0q: Embed0q{
+ Point: Point{Z: 17},
+ },
+ embed: embed{
+ Q: 18,
+ },
+ },
+ },
+ {
+ in: `{"hello": 1}`,
+ ptr: new(Ambig),
+ out: Ambig{First: 1},
+ },
+
+ {
+ in: `{"X": 1,"Y":2}`,
+ ptr: new(S5),
+ out: S5{S8: S8{S9: S9{Y: 2}}},
+ },
+ {
+ in: `{"X": 1,"Y":2}`,
+ ptr: new(S10),
+ out: S10{S13: S13{S8: S8{S9: S9{Y: 2}}}},
+ },
+
+ // invalid UTF-8 is coerced to valid UTF-8.
+ {
+ in: "\"hello\xffworld\"",
+ ptr: new(string),
+ out: "hello\ufffdworld",
+ },
+ {
+ in: "\"hello\xc2\xc2world\"",
+ ptr: new(string),
+ out: "hello\ufffd\ufffdworld",
+ },
+ {
+ in: "\"hello\xc2\xffworld\"",
+ ptr: new(string),
+ out: "hello\ufffd\ufffdworld",
+ },
+ {
+ in: "\"hello\\ud800world\"",
+ ptr: new(string),
+ out: "hello\ufffdworld",
+ },
+ {
+ in: "\"hello\\ud800\\ud800world\"",
+ ptr: new(string),
+ out: "hello\ufffd\ufffdworld",
+ },
+ {
+ in: "\"hello\\ud800\\ud800world\"",
+ ptr: new(string),
+ out: "hello\ufffd\ufffdworld",
+ },
+ {
+ in: "\"hello\xed\xa0\x80\xed\xb0\x80world\"",
+ ptr: new(string),
+ out: "hello\ufffd\ufffd\ufffd\ufffd\ufffd\ufffdworld",
+ },
+
+ // Used to be issue 8305, but time.Time implements encoding.TextUnmarshaler so this works now.
+ {
+ in: `{"2009-11-10T23:00:00Z": "hello world"}`,
+ ptr: &map[time.Time]string{},
+ out: map[time.Time]string{time.Date(2009, 11, 10, 23, 0, 0, 0, time.UTC): "hello world"},
+ },
+
+ // issue 8305
+ {
+ in: `{"2009-11-10T23:00:00Z": "hello world"}`,
+ ptr: &map[Point]string{},
+ err: &UnmarshalTypeError{"object", reflect.TypeOf(map[Point]string{}), 1},
+ },
+ {
+ in: `{"asdf": "hello world"}`,
+ ptr: &map[unmarshaler]string{},
+ err: &UnmarshalTypeError{"object", reflect.TypeOf(map[unmarshaler]string{}), 1},
+ },
+}
+
+func TestMarshal(t *testing.T) {
+ b, err := Marshal(allValue)
+ if err != nil {
+ t.Fatalf("Marshal allValue: %v", err)
+ }
+ if string(b) != allValueCompact {
+ t.Errorf("Marshal allValueCompact")
+ diff(t, b, []byte(allValueCompact))
+ return
+ }
+
+ b, err = Marshal(pallValue)
+ if err != nil {
+ t.Fatalf("Marshal pallValue: %v", err)
+ }
+ if string(b) != pallValueCompact {
+ t.Errorf("Marshal pallValueCompact")
+ diff(t, b, []byte(pallValueCompact))
+ return
+ }
+}
+
+var badUTF8 = []struct {
+ in, out string
+}{
+ {"hello\xffworld", `"hello\ufffdworld"`},
+ {"", `""`},
+ {"\xff", `"\ufffd"`},
+ {"\xff\xff", `"\ufffd\ufffd"`},
+ {"a\xffb", `"a\ufffdb"`},
+ {"\xe6\x97\xa5\xe6\x9c\xac\xff\xaa\x9e", `"日本\ufffd\ufffd\ufffd"`},
+}
+
+func TestMarshalBadUTF8(t *testing.T) {
+ for _, tt := range badUTF8 {
+ b, err := Marshal(tt.in)
+ if string(b) != tt.out || err != nil {
+ t.Errorf("Marshal(%q) = %#q, %v, want %#q, nil", tt.in, b, err, tt.out)
+ }
+ }
+}
+
+func TestMarshalNumberZeroVal(t *testing.T) {
+ var n Number
+ out, err := Marshal(n)
+ if err != nil {
+ t.Fatal(err)
+ }
+ outStr := string(out)
+ if outStr != "0" {
+ t.Fatalf("Invalid zero val for Number: %q", outStr)
+ }
+}
+
+func TestMarshalEmbeds(t *testing.T) {
+ top := &Top{
+ Level0: 1,
+ Embed0: Embed0{
+ Level1b: 2,
+ Level1c: 3,
+ },
+ Embed0a: &Embed0a{
+ Level1a: 5,
+ Level1b: 6,
+ },
+ Embed0b: &Embed0b{
+ Level1a: 8,
+ Level1b: 9,
+ Level1c: 10,
+ Level1d: 11,
+ Level1e: 12,
+ },
+ Loop: Loop{
+ Loop1: 13,
+ Loop2: 14,
+ },
+ Embed0p: Embed0p{
+ Point: image.Point{X: 15, Y: 16},
+ },
+ Embed0q: Embed0q{
+ Point: Point{Z: 17},
+ },
+ embed: embed{
+ Q: 18,
+ },
+ }
+ b, err := Marshal(top)
+ if err != nil {
+ t.Fatal(err)
+ }
+ want := "{\"Level0\":1,\"Level1b\":2,\"Level1c\":3,\"Level1a\":5,\"LEVEL1B\":6,\"e\":{\"Level1a\":8,\"Level1b\":9,\"Level1c\":10,\"Level1d\":11,\"x\":12},\"Loop1\":13,\"Loop2\":14,\"X\":15,\"Y\":16,\"Z\":17,\"Q\":18}"
+ if string(b) != want {
+ t.Errorf("Wrong marshal result.\n got: %q\nwant: %q", b, want)
+ }
+}
+
+func TestUnmarshal(t *testing.T) {
+ for i, tt := range unmarshalTests {
+ var scan scanner
+ in := []byte(tt.in)
+ if err := checkValid(in, &scan); err != nil {
+ if !reflect.DeepEqual(err, tt.err) {
+ t.Errorf("#%d: checkValid: %#v", i, err)
+ continue
+ }
+ }
+ if tt.ptr == nil {
+ continue
+ }
+
+ // v = new(right-type)
+ v := reflect.New(reflect.TypeOf(tt.ptr).Elem())
+ dec := NewDecoder(bytes.NewReader(in))
+ if tt.useNumber {
+ dec.UseNumber()
+ }
+ if err := dec.Decode(v.Interface()); !reflect.DeepEqual(err, tt.err) {
+ t.Errorf("#%d: %v, want %v", i, err, tt.err)
+ continue
+ } else if err != nil {
+ continue
+ }
+ if !reflect.DeepEqual(v.Elem().Interface(), tt.out) {
+ t.Errorf("#%d: mismatch\nhave: %#+v\nwant: %#+v", i, v.Elem().Interface(), tt.out)
+ data, _ := Marshal(v.Elem().Interface())
+ println(string(data))
+ data, _ = Marshal(tt.out)
+ println(string(data))
+ continue
+ }
+
+ // Check round trip.
+ if tt.err == nil {
+ enc, err := Marshal(v.Interface())
+ if err != nil {
+ t.Errorf("#%d: error re-marshaling: %v", i, err)
+ continue
+ }
+ vv := reflect.New(reflect.TypeOf(tt.ptr).Elem())
+ dec = NewDecoder(bytes.NewReader(enc))
+ if tt.useNumber {
+ dec.UseNumber()
+ }
+ if err := dec.Decode(vv.Interface()); err != nil {
+ t.Errorf("#%d: error re-unmarshaling %#q: %v", i, enc, err)
+ continue
+ }
+ if !reflect.DeepEqual(v.Elem().Interface(), vv.Elem().Interface()) {
+ t.Errorf("#%d: mismatch\nhave: %#+v\nwant: %#+v", i, v.Elem().Interface(), vv.Elem().Interface())
+ t.Errorf(" In: %q", strings.Map(noSpace, string(in)))
+ t.Errorf("Marshal: %q", strings.Map(noSpace, string(enc)))
+ continue
+ }
+ }
+ }
+}
+
+func TestUnmarshalMarshal(t *testing.T) {
+ initBig()
+ var v interface{}
+ if err := Unmarshal(jsonBig, &v); err != nil {
+ t.Fatalf("Unmarshal: %v", err)
+ }
+ b, err := Marshal(v)
+ if err != nil {
+ t.Fatalf("Marshal: %v", err)
+ }
+ if !bytes.Equal(jsonBig, b) {
+ t.Errorf("Marshal jsonBig")
+ diff(t, b, jsonBig)
+ return
+ }
+}
+
+var numberTests = []struct {
+ in string
+ i int64
+ intErr string
+ f float64
+ floatErr string
+}{
+ {in: "-1.23e1", intErr: "strconv.ParseInt: parsing \"-1.23e1\": invalid syntax", f: -1.23e1},
+ {in: "-12", i: -12, f: -12.0},
+ {in: "1e1000", intErr: "strconv.ParseInt: parsing \"1e1000\": invalid syntax", floatErr: "strconv.ParseFloat: parsing \"1e1000\": value out of range"},
+}
+
+// Independent of Decode, basic coverage of the accessors in Number
+func TestNumberAccessors(t *testing.T) {
+ for _, tt := range numberTests {
+ n := Number(tt.in)
+ if s := n.String(); s != tt.in {
+ t.Errorf("Number(%q).String() is %q", tt.in, s)
+ }
+ if i, err := n.Int64(); err == nil && tt.intErr == "" && i != tt.i {
+ t.Errorf("Number(%q).Int64() is %d", tt.in, i)
+ } else if (err == nil && tt.intErr != "") || (err != nil && err.Error() != tt.intErr) {
+ t.Errorf("Number(%q).Int64() wanted error %q but got: %v", tt.in, tt.intErr, err)
+ }
+ if f, err := n.Float64(); err == nil && tt.floatErr == "" && f != tt.f {
+ t.Errorf("Number(%q).Float64() is %g", tt.in, f)
+ } else if (err == nil && tt.floatErr != "") || (err != nil && err.Error() != tt.floatErr) {
+ t.Errorf("Number(%q).Float64() wanted error %q but got: %v", tt.in, tt.floatErr, err)
+ }
+ }
+}
+
+func TestLargeByteSlice(t *testing.T) {
+ s0 := make([]byte, 2000)
+ for i := range s0 {
+ s0[i] = byte(i)
+ }
+ b, err := Marshal(s0)
+ if err != nil {
+ t.Fatalf("Marshal: %v", err)
+ }
+ var s1 []byte
+ if err := Unmarshal(b, &s1); err != nil {
+ t.Fatalf("Unmarshal: %v", err)
+ }
+ if !bytes.Equal(s0, s1) {
+ t.Errorf("Marshal large byte slice")
+ diff(t, s0, s1)
+ }
+}
+
+type Xint struct {
+ X int
+}
+
+func TestUnmarshalInterface(t *testing.T) {
+ var xint Xint
+ var i interface{} = &xint
+ if err := Unmarshal([]byte(`{"X":1}`), &i); err != nil {
+ t.Fatalf("Unmarshal: %v", err)
+ }
+ if xint.X != 1 {
+ t.Fatalf("Did not write to xint")
+ }
+}
+
+func TestUnmarshalPtrPtr(t *testing.T) {
+ var xint Xint
+ pxint := &xint
+ if err := Unmarshal([]byte(`{"X":1}`), &pxint); err != nil {
+ t.Fatalf("Unmarshal: %v", err)
+ }
+ if xint.X != 1 {
+ t.Fatalf("Did not write to xint")
+ }
+}
+
+func TestEscape(t *testing.T) {
+ const input = `"foobar"<html>` + " [\u2028 \u2029]"
+ const expected = `"\"foobar\"\u003chtml\u003e [\u2028 \u2029]"`
+ b, err := Marshal(input)
+ if err != nil {
+ t.Fatalf("Marshal error: %v", err)
+ }
+ if s := string(b); s != expected {
+ t.Errorf("Encoding of [%s]:\n got [%s]\nwant [%s]", input, s, expected)
+ }
+}
+
+// WrongString is a struct that's misusing the ,string modifier.
+type WrongString struct {
+ Message string `json:"result,string"`
+}
+
+type wrongStringTest struct {
+ in, err string
+}
+
+var wrongStringTests = []wrongStringTest{
+ {`{"result":"x"}`, `json: invalid use of ,string struct tag, trying to unmarshal "x" into string`},
+ {`{"result":"foo"}`, `json: invalid use of ,string struct tag, trying to unmarshal "foo" into string`},
+ {`{"result":"123"}`, `json: invalid use of ,string struct tag, trying to unmarshal "123" into string`},
+ {`{"result":123}`, `json: invalid use of ,string struct tag, trying to unmarshal unquoted value into string`},
+}
+
+// If people misuse the ,string modifier, the error message should be
+// helpful, telling the user that they're doing it wrong.
+func TestErrorMessageFromMisusedString(t *testing.T) {
+ for n, tt := range wrongStringTests {
+ r := strings.NewReader(tt.in)
+ var s WrongString
+ err := NewDecoder(r).Decode(&s)
+ got := fmt.Sprintf("%v", err)
+ if got != tt.err {
+ t.Errorf("%d. got err = %q, want %q", n, got, tt.err)
+ }
+ }
+}
+
+func noSpace(c rune) rune {
+ if isSpace(byte(c)) { //only used for ascii
+ return -1
+ }
+ return c
+}
+
+type All struct {
+ Bool bool
+ Int int
+ Int8 int8
+ Int16 int16
+ Int32 int32
+ Int64 int64
+ Uint uint
+ Uint8 uint8
+ Uint16 uint16
+ Uint32 uint32
+ Uint64 uint64
+ Uintptr uintptr
+ Float32 float32
+ Float64 float64
+
+ Foo string `json:"bar"`
+ Foo2 string `json:"bar2,dummyopt"`
+
+ IntStr int64 `json:",string"`
+
+ PBool *bool
+ PInt *int
+ PInt8 *int8
+ PInt16 *int16
+ PInt32 *int32
+ PInt64 *int64
+ PUint *uint
+ PUint8 *uint8
+ PUint16 *uint16
+ PUint32 *uint32
+ PUint64 *uint64
+ PUintptr *uintptr
+ PFloat32 *float32
+ PFloat64 *float64
+
+ String string
+ PString *string
+
+ Map map[string]Small
+ MapP map[string]*Small
+ PMap *map[string]Small
+ PMapP *map[string]*Small
+
+ EmptyMap map[string]Small
+ NilMap map[string]Small
+
+ Slice []Small
+ SliceP []*Small
+ PSlice *[]Small
+ PSliceP *[]*Small
+
+ EmptySlice []Small
+ NilSlice []Small
+
+ StringSlice []string
+ ByteSlice []byte
+
+ Small Small
+ PSmall *Small
+ PPSmall **Small
+
+ Interface interface{}
+ PInterface *interface{}
+
+ unexported int
+}
+
+type Small struct {
+ Tag string
+}
+
+var allValue = All{
+ Bool: true,
+ Int: 2,
+ Int8: 3,
+ Int16: 4,
+ Int32: 5,
+ Int64: 6,
+ Uint: 7,
+ Uint8: 8,
+ Uint16: 9,
+ Uint32: 10,
+ Uint64: 11,
+ Uintptr: 12,
+ Float32: 14.1,
+ Float64: 15.1,
+ Foo: "foo",
+ Foo2: "foo2",
+ IntStr: 42,
+ String: "16",
+ Map: map[string]Small{
+ "17": {Tag: "tag17"},
+ "18": {Tag: "tag18"},
+ },
+ MapP: map[string]*Small{
+ "19": {Tag: "tag19"},
+ "20": nil,
+ },
+ EmptyMap: map[string]Small{},
+ Slice: []Small{{Tag: "tag20"}, {Tag: "tag21"}},
+ SliceP: []*Small{{Tag: "tag22"}, nil, {Tag: "tag23"}},
+ EmptySlice: []Small{},
+ StringSlice: []string{"str24", "str25", "str26"},
+ ByteSlice: []byte{27, 28, 29},
+ Small: Small{Tag: "tag30"},
+ PSmall: &Small{Tag: "tag31"},
+ Interface: 5.2,
+}
+
+var pallValue = All{
+ PBool: &allValue.Bool,
+ PInt: &allValue.Int,
+ PInt8: &allValue.Int8,
+ PInt16: &allValue.Int16,
+ PInt32: &allValue.Int32,
+ PInt64: &allValue.Int64,
+ PUint: &allValue.Uint,
+ PUint8: &allValue.Uint8,
+ PUint16: &allValue.Uint16,
+ PUint32: &allValue.Uint32,
+ PUint64: &allValue.Uint64,
+ PUintptr: &allValue.Uintptr,
+ PFloat32: &allValue.Float32,
+ PFloat64: &allValue.Float64,
+ PString: &allValue.String,
+ PMap: &allValue.Map,
+ PMapP: &allValue.MapP,
+ PSlice: &allValue.Slice,
+ PSliceP: &allValue.SliceP,
+ PPSmall: &allValue.PSmall,
+ PInterface: &allValue.Interface,
+}
+
+var allValueIndent = `{
+ "Bool": true,
+ "Int": 2,
+ "Int8": 3,
+ "Int16": 4,
+ "Int32": 5,
+ "Int64": 6,
+ "Uint": 7,
+ "Uint8": 8,
+ "Uint16": 9,
+ "Uint32": 10,
+ "Uint64": 11,
+ "Uintptr": 12,
+ "Float32": 14.1,
+ "Float64": 15.1,
+ "bar": "foo",
+ "bar2": "foo2",
+ "IntStr": "42",
+ "PBool": null,
+ "PInt": null,
+ "PInt8": null,
+ "PInt16": null,
+ "PInt32": null,
+ "PInt64": null,
+ "PUint": null,
+ "PUint8": null,
+ "PUint16": null,
+ "PUint32": null,
+ "PUint64": null,
+ "PUintptr": null,
+ "PFloat32": null,
+ "PFloat64": null,
+ "String": "16",
+ "PString": null,
+ "Map": {
+ "17": {
+ "Tag": "tag17"
+ },
+ "18": {
+ "Tag": "tag18"
+ }
+ },
+ "MapP": {
+ "19": {
+ "Tag": "tag19"
+ },
+ "20": null
+ },
+ "PMap": null,
+ "PMapP": null,
+ "EmptyMap": {},
+ "NilMap": null,
+ "Slice": [
+ {
+ "Tag": "tag20"
+ },
+ {
+ "Tag": "tag21"
+ }
+ ],
+ "SliceP": [
+ {
+ "Tag": "tag22"
+ },
+ null,
+ {
+ "Tag": "tag23"
+ }
+ ],
+ "PSlice": null,
+ "PSliceP": null,
+ "EmptySlice": [],
+ "NilSlice": null,
+ "StringSlice": [
+ "str24",
+ "str25",
+ "str26"
+ ],
+ "ByteSlice": "Gxwd",
+ "Small": {
+ "Tag": "tag30"
+ },
+ "PSmall": {
+ "Tag": "tag31"
+ },
+ "PPSmall": null,
+ "Interface": 5.2,
+ "PInterface": null
+}`
+
+var allValueCompact = strings.Map(noSpace, allValueIndent)
+
+var pallValueIndent = `{
+ "Bool": false,
+ "Int": 0,
+ "Int8": 0,
+ "Int16": 0,
+ "Int32": 0,
+ "Int64": 0,
+ "Uint": 0,
+ "Uint8": 0,
+ "Uint16": 0,
+ "Uint32": 0,
+ "Uint64": 0,
+ "Uintptr": 0,
+ "Float32": 0,
+ "Float64": 0,
+ "bar": "",
+ "bar2": "",
+ "IntStr": "0",
+ "PBool": true,
+ "PInt": 2,
+ "PInt8": 3,
+ "PInt16": 4,
+ "PInt32": 5,
+ "PInt64": 6,
+ "PUint": 7,
+ "PUint8": 8,
+ "PUint16": 9,
+ "PUint32": 10,
+ "PUint64": 11,
+ "PUintptr": 12,
+ "PFloat32": 14.1,
+ "PFloat64": 15.1,
+ "String": "",
+ "PString": "16",
+ "Map": null,
+ "MapP": null,
+ "PMap": {
+ "17": {
+ "Tag": "tag17"
+ },
+ "18": {
+ "Tag": "tag18"
+ }
+ },
+ "PMapP": {
+ "19": {
+ "Tag": "tag19"
+ },
+ "20": null
+ },
+ "EmptyMap": null,
+ "NilMap": null,
+ "Slice": null,
+ "SliceP": null,
+ "PSlice": [
+ {
+ "Tag": "tag20"
+ },
+ {
+ "Tag": "tag21"
+ }
+ ],
+ "PSliceP": [
+ {
+ "Tag": "tag22"
+ },
+ null,
+ {
+ "Tag": "tag23"
+ }
+ ],
+ "EmptySlice": null,
+ "NilSlice": null,
+ "StringSlice": null,
+ "ByteSlice": null,
+ "Small": {
+ "Tag": ""
+ },
+ "PSmall": null,
+ "PPSmall": {
+ "Tag": "tag31"
+ },
+ "Interface": null,
+ "PInterface": 5.2
+}`
+
+var pallValueCompact = strings.Map(noSpace, pallValueIndent)
+
+func TestRefUnmarshal(t *testing.T) {
+ type S struct {
+ // Ref is defined in encode_test.go.
+ R0 Ref
+ R1 *Ref
+ R2 RefText
+ R3 *RefText
+ }
+ want := S{
+ R0: 12,
+ R1: new(Ref),
+ R2: 13,
+ R3: new(RefText),
+ }
+ *want.R1 = 12
+ *want.R3 = 13
+
+ var got S
+ if err := Unmarshal([]byte(`{"R0":"ref","R1":"ref","R2":"ref","R3":"ref"}`), &got); err != nil {
+ t.Fatalf("Unmarshal: %v", err)
+ }
+ if !reflect.DeepEqual(got, want) {
+ t.Errorf("got %+v, want %+v", got, want)
+ }
+}
+
+// Test that the empty string doesn't panic decoding when ,string is specified
+// Issue 3450
+func TestEmptyString(t *testing.T) {
+ type T2 struct {
+ Number1 int `json:",string"`
+ Number2 int `json:",string"`
+ }
+ data := `{"Number1":"1", "Number2":""}`
+ dec := NewDecoder(strings.NewReader(data))
+ var t2 T2
+ err := dec.Decode(&t2)
+ if err == nil {
+ t.Fatal("Decode: did not return error")
+ }
+ if t2.Number1 != 1 {
+ t.Fatal("Decode: did not set Number1")
+ }
+}
+
+// Test that a null for ,string is not replaced with the previous quoted string (issue 7046).
+// It should also not be an error (issue 2540, issue 8587).
+func TestNullString(t *testing.T) {
+ type T struct {
+ A int `json:",string"`
+ B int `json:",string"`
+ C *int `json:",string"`
+ }
+ data := []byte(`{"A": "1", "B": null, "C": null}`)
+ var s T
+ s.B = 1
+ s.C = new(int)
+ *s.C = 2
+ err := Unmarshal(data, &s)
+ if err != nil {
+ t.Fatalf("Unmarshal: %v", err)
+ }
+ if s.B != 1 || s.C != nil {
+ t.Fatalf("after Unmarshal, s.B=%d, s.C=%p, want 1, nil", s.B, s.C)
+ }
+}
+
+func intp(x int) *int {
+ p := new(int)
+ *p = x
+ return p
+}
+
+func intpp(x *int) **int {
+ pp := new(*int)
+ *pp = x
+ return pp
+}
+
+var interfaceSetTests = []struct {
+ pre interface{}
+ json string
+ post interface{}
+}{
+ {"foo", `"bar"`, "bar"},
+ {"foo", `2`, 2.0},
+ {"foo", `true`, true},
+ {"foo", `null`, nil},
+
+ {nil, `null`, nil},
+ {new(int), `null`, nil},
+ {(*int)(nil), `null`, nil},
+ {new(*int), `null`, new(*int)},
+ {(**int)(nil), `null`, nil},
+ {intp(1), `null`, nil},
+ {intpp(nil), `null`, intpp(nil)},
+ {intpp(intp(1)), `null`, intpp(nil)},
+}
+
+func TestInterfaceSet(t *testing.T) {
+ for _, tt := range interfaceSetTests {
+ b := struct{ X interface{} }{tt.pre}
+ blob := `{"X":` + tt.json + `}`
+ if err := Unmarshal([]byte(blob), &b); err != nil {
+ t.Errorf("Unmarshal %#q: %v", blob, err)
+ continue
+ }
+ if !reflect.DeepEqual(b.X, tt.post) {
+ t.Errorf("Unmarshal %#q into %#v: X=%#v, want %#v", blob, tt.pre, b.X, tt.post)
+ }
+ }
+}
+
+// JSON null values should be ignored for primitives and string values instead of resulting in an error.
+// Issue 2540
+func TestUnmarshalNulls(t *testing.T) {
+ jsonData := []byte(`{
+ "Bool" : null,
+ "Int" : null,
+ "Int8" : null,
+ "Int16" : null,
+ "Int32" : null,
+ "Int64" : null,
+ "Uint" : null,
+ "Uint8" : null,
+ "Uint16" : null,
+ "Uint32" : null,
+ "Uint64" : null,
+ "Float32" : null,
+ "Float64" : null,
+ "String" : null}`)
+
+ nulls := All{
+ Bool: true,
+ Int: 2,
+ Int8: 3,
+ Int16: 4,
+ Int32: 5,
+ Int64: 6,
+ Uint: 7,
+ Uint8: 8,
+ Uint16: 9,
+ Uint32: 10,
+ Uint64: 11,
+ Float32: 12.1,
+ Float64: 13.1,
+ String: "14"}
+
+ err := Unmarshal(jsonData, &nulls)
+ if err != nil {
+ t.Errorf("Unmarshal of null values failed: %v", err)
+ }
+ if !nulls.Bool || nulls.Int != 2 || nulls.Int8 != 3 || nulls.Int16 != 4 || nulls.Int32 != 5 || nulls.Int64 != 6 ||
+ nulls.Uint != 7 || nulls.Uint8 != 8 || nulls.Uint16 != 9 || nulls.Uint32 != 10 || nulls.Uint64 != 11 ||
+ nulls.Float32 != 12.1 || nulls.Float64 != 13.1 || nulls.String != "14" {
+
+ t.Errorf("Unmarshal of null values affected primitives")
+ }
+}
+
+func TestStringKind(t *testing.T) {
+ type stringKind string
+
+ var m1, m2 map[stringKind]int
+ m1 = map[stringKind]int{
+ "foo": 42,
+ }
+
+ data, err := Marshal(m1)
+ if err != nil {
+ t.Errorf("Unexpected error marshaling: %v", err)
+ }
+
+ err = Unmarshal(data, &m2)
+ if err != nil {
+ t.Errorf("Unexpected error unmarshaling: %v", err)
+ }
+
+ if !reflect.DeepEqual(m1, m2) {
+ t.Error("Items should be equal after encoding and then decoding")
+ }
+}
+
+// Custom types with []byte as underlying type could not be marshalled
+// and then unmarshalled.
+// Issue 8962.
+func TestByteKind(t *testing.T) {
+ type byteKind []byte
+
+ a := byteKind("hello")
+
+ data, err := Marshal(a)
+ if err != nil {
+ t.Error(err)
+ }
+ var b byteKind
+ err = Unmarshal(data, &b)
+ if err != nil {
+ t.Fatal(err)
+ }
+ if !reflect.DeepEqual(a, b) {
+ t.Errorf("expected %v == %v", a, b)
+ }
+}
+
+// The fix for issue 8962 introduced a regression.
+// Issue 12921.
+func TestSliceOfCustomByte(t *testing.T) {
+ type Uint8 uint8
+
+ a := []Uint8("hello")
+
+ data, err := Marshal(a)
+ if err != nil {
+ t.Fatal(err)
+ }
+ var b []Uint8
+ err = Unmarshal(data, &b)
+ if err != nil {
+ t.Fatal(err)
+ }
+ if !reflect.DeepEqual(a, b) {
+ t.Fatalf("expected %v == %v", a, b)
+ }
+}
+
+var decodeTypeErrorTests = []struct {
+ dest interface{}
+ src string
+}{
+ {new(string), `{"user": "name"}`}, // issue 4628.
+ {new(error), `{}`}, // issue 4222
+ {new(error), `[]`},
+ {new(error), `""`},
+ {new(error), `123`},
+ {new(error), `true`},
+}
+
+func TestUnmarshalTypeError(t *testing.T) {
+ for _, item := range decodeTypeErrorTests {
+ err := Unmarshal([]byte(item.src), item.dest)
+ if _, ok := err.(*UnmarshalTypeError); !ok {
+ t.Errorf("expected type error for Unmarshal(%q, type %T): got %T",
+ item.src, item.dest, err)
+ }
+ }
+}
+
+var unmarshalSyntaxTests = []string{
+ "tru",
+ "fals",
+ "nul",
+ "123e",
+ `"hello`,
+ `[1,2,3`,
+ `{"key":1`,
+ `{"key":1,`,
+}
+
+func TestUnmarshalSyntax(t *testing.T) {
+ var x interface{}
+ for _, src := range unmarshalSyntaxTests {
+ err := Unmarshal([]byte(src), &x)
+ if _, ok := err.(*SyntaxError); !ok {
+ t.Errorf("expected syntax error for Unmarshal(%q): got %T", src, err)
+ }
+ }
+}
+
+// Test handling of unexported fields that should be ignored.
+// Issue 4660
+type unexportedFields struct {
+ Name string
+ m map[string]interface{} `json:"-"`
+ m2 map[string]interface{} `json:"abcd"`
+}
+
+func TestUnmarshalUnexported(t *testing.T) {
+ input := `{"Name": "Bob", "m": {"x": 123}, "m2": {"y": 456}, "abcd": {"z": 789}}`
+ want := &unexportedFields{Name: "Bob"}
+
+ out := &unexportedFields{}
+ err := Unmarshal([]byte(input), out)
+ if err != nil {
+ t.Errorf("got error %v, expected nil", err)
+ }
+ if !reflect.DeepEqual(out, want) {
+ t.Errorf("got %q, want %q", out, want)
+ }
+}
+
+// Time3339 is a time.Time which encodes to and from JSON
+// as an RFC 3339 time in UTC.
+type Time3339 time.Time
+
+func (t *Time3339) UnmarshalJSON(b []byte) error {
+ if len(b) < 2 || b[0] != '"' || b[len(b)-1] != '"' {
+ return fmt.Errorf("types: failed to unmarshal non-string value %q as an RFC 3339 time", b)
+ }
+ tm, err := time.Parse(time.RFC3339, string(b[1:len(b)-1]))
+ if err != nil {
+ return err
+ }
+ *t = Time3339(tm)
+ return nil
+}
+
+func TestUnmarshalJSONLiteralError(t *testing.T) {
+ var t3 Time3339
+ err := Unmarshal([]byte(`"0000-00-00T00:00:00Z"`), &t3)
+ if err == nil {
+ t.Fatalf("expected error; got time %v", time.Time(t3))
+ }
+ if !strings.Contains(err.Error(), "range") {
+ t.Errorf("got err = %v; want out of range error", err)
+ }
+}
+
+// Test that extra object elements in an array do not result in a
+// "data changing underfoot" error.
+// Issue 3717
+func TestSkipArrayObjects(t *testing.T) {
+ json := `[{}]`
+ var dest [0]interface{}
+
+ err := Unmarshal([]byte(json), &dest)
+ if err != nil {
+ t.Errorf("got error %q, want nil", err)
+ }
+}
+
+// Test semantics of pre-filled struct fields and pre-filled map fields.
+// Issue 4900.
+func TestPrefilled(t *testing.T) {
+ ptrToMap := func(m map[string]interface{}) *map[string]interface{} { return &m }
+
+ // Values here change, cannot reuse table across runs.
+ var prefillTests = []struct {
+ in string
+ ptr interface{}
+ out interface{}
+ }{
+ {
+ in: `{"X": 1, "Y": 2}`,
+ ptr: &XYZ{X: float32(3), Y: int16(4), Z: 1.5},
+ out: &XYZ{X: float64(1), Y: float64(2), Z: 1.5},
+ },
+ {
+ in: `{"X": 1, "Y": 2}`,
+ ptr: ptrToMap(map[string]interface{}{"X": float32(3), "Y": int16(4), "Z": 1.5}),
+ out: ptrToMap(map[string]interface{}{"X": float64(1), "Y": float64(2), "Z": 1.5}),
+ },
+ }
+
+ for _, tt := range prefillTests {
+ ptrstr := fmt.Sprintf("%v", tt.ptr)
+ err := Unmarshal([]byte(tt.in), tt.ptr) // tt.ptr edited here
+ if err != nil {
+ t.Errorf("Unmarshal: %v", err)
+ }
+ if !reflect.DeepEqual(tt.ptr, tt.out) {
+ t.Errorf("Unmarshal(%#q, %s): have %v, want %v", tt.in, ptrstr, tt.ptr, tt.out)
+ }
+ }
+}
+
+var invalidUnmarshalTests = []struct {
+ v interface{}
+ want string
+}{
+ {nil, "json: Unmarshal(nil)"},
+ {struct{}{}, "json: Unmarshal(non-pointer struct {})"},
+ {(*int)(nil), "json: Unmarshal(nil *int)"},
+}
+
+func TestInvalidUnmarshal(t *testing.T) {
+ buf := []byte(`{"a":"1"}`)
+ for _, tt := range invalidUnmarshalTests {
+ err := Unmarshal(buf, tt.v)
+ if err == nil {
+ t.Errorf("Unmarshal expecting error, got nil")
+ continue
+ }
+ if got := err.Error(); got != tt.want {
+ t.Errorf("Unmarshal = %q; want %q", got, tt.want)
+ }
+ }
+}
+
+var invalidUnmarshalTextTests = []struct {
+ v interface{}
+ want string
+}{
+ {nil, "json: Unmarshal(nil)"},
+ {struct{}{}, "json: Unmarshal(non-pointer struct {})"},
+ {(*int)(nil), "json: Unmarshal(nil *int)"},
+ {new(net.IP), "json: cannot unmarshal string into Go value of type *net.IP"},
+}
+
+func TestInvalidUnmarshalText(t *testing.T) {
+ buf := []byte(`123`)
+ for _, tt := range invalidUnmarshalTextTests {
+ err := Unmarshal(buf, tt.v)
+ if err == nil {
+ t.Errorf("Unmarshal expecting error, got nil")
+ continue
+ }
+ if got := err.Error(); got != tt.want {
+ t.Errorf("Unmarshal = %q; want %q", got, tt.want)
+ }
+ }
+}
+
+// Test that string option is ignored for invalid types.
+// Issue 9812.
+func TestInvalidStringOption(t *testing.T) {
+ num := 0
+ item := struct {
+ T time.Time `json:",string"`
+ M map[string]string `json:",string"`
+ S []string `json:",string"`
+ A [1]string `json:",string"`
+ I interface{} `json:",string"`
+ P *int `json:",string"`
+ }{M: make(map[string]string), S: make([]string, 0), I: num, P: &num}
+
+ data, err := Marshal(item)
+ if err != nil {
+ t.Fatalf("Marshal: %v", err)
+ }
+
+ err = Unmarshal(data, &item)
+ if err != nil {
+ t.Fatalf("Unmarshal: %v", err)
+ }
+}
diff --git a/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/internal/json/encode.go b/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/internal/json/encode.go
new file mode 100644
index 00000000000..67a0f0062ba
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/internal/json/encode.go
@@ -0,0 +1,1256 @@
+// Copyright 2010 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// Package json implements encoding and decoding of JSON as defined in
+// RFC 4627. The mapping between JSON and Go values is described
+// in the documentation for the Marshal and Unmarshal functions.
+//
+// See "JSON and Go" for an introduction to this package:
+// https://golang.org/doc/articles/json_and_go.html
+package json
+
+import (
+ "bytes"
+ "encoding"
+ "encoding/base64"
+ "fmt"
+ "math"
+ "reflect"
+ "runtime"
+ "sort"
+ "strconv"
+ "strings"
+ "sync"
+ "unicode"
+ "unicode/utf8"
+)
+
+// Marshal returns the JSON encoding of v.
+//
+// Marshal traverses the value v recursively.
+// If an encountered value implements the Marshaler interface
+// and is not a nil pointer, Marshal calls its MarshalJSON method
+// to produce JSON. If no MarshalJSON method is present but the
+// value implements encoding.TextMarshaler instead, Marshal calls
+// its MarshalText method.
+// The nil pointer exception is not strictly necessary
+// but mimics a similar, necessary exception in the behavior of
+// UnmarshalJSON.
+//
+// Otherwise, Marshal uses the following type-dependent default encodings:
+//
+// Boolean values encode as JSON booleans.
+//
+// Floating point, integer, and Number values encode as JSON numbers.
+//
+// String values encode as JSON strings coerced to valid UTF-8,
+// replacing invalid bytes with the Unicode replacement rune.
+// The angle brackets "<" and ">" are escaped to "\u003c" and "\u003e"
+// to keep some browsers from misinterpreting JSON output as HTML.
+// Ampersand "&" is also escaped to "\u0026" for the same reason.
+// This escaping can be disabled using an Encoder with DisableHTMLEscaping.
+//
+// Array and slice values encode as JSON arrays, except that
+// []byte encodes as a base64-encoded string, and a nil slice
+// encodes as the null JSON value.
+//
+// Struct values encode as JSON objects. Each exported struct field
+// becomes a member of the object unless
+// - the field's tag is "-", or
+// - the field is empty and its tag specifies the "omitempty" option.
+// The empty values are false, 0, any
+// nil pointer or interface value, and any array, slice, map, or string of
+// length zero. The object's default key string is the struct field name
+// but can be specified in the struct field's tag value. The "json" key in
+// the struct field's tag value is the key name, followed by an optional comma
+// and options. Examples:
+//
+// // Field is ignored by this package.
+// Field int `json:"-"`
+//
+// // Field appears in JSON as key "myName".
+// Field int `json:"myName"`
+//
+// // Field appears in JSON as key "myName" and
+// // the field is omitted from the object if its value is empty,
+// // as defined above.
+// Field int `json:"myName,omitempty"`
+//
+// // Field appears in JSON as key "Field" (the default), but
+// // the field is skipped if empty.
+// // Note the leading comma.
+// Field int `json:",omitempty"`
+//
+// The "string" option signals that a field is stored as JSON inside a
+// JSON-encoded string. It applies only to fields of string, floating point,
+// integer, or boolean types. This extra level of encoding is sometimes used
+// when communicating with JavaScript programs:
+//
+// Int64String int64 `json:",string"`
+//
+// The key name will be used if it's a non-empty string consisting of
+// only Unicode letters, digits, dollar signs, percent signs, hyphens,
+// underscores and slashes.
+//
+// Anonymous struct fields are usually marshaled as if their inner exported fields
+// were fields in the outer struct, subject to the usual Go visibility rules amended
+// as described in the next paragraph.
+// An anonymous struct field with a name given in its JSON tag is treated as
+// having that name, rather than being anonymous.
+// An anonymous struct field of interface type is treated the same as having
+// that type as its name, rather than being anonymous.
+//
+// The Go visibility rules for struct fields are amended for JSON when
+// deciding which field to marshal or unmarshal. If there are
+// multiple fields at the same level, and that level is the least
+// nested (and would therefore be the nesting level selected by the
+// usual Go rules), the following extra rules apply:
+//
+// 1) Of those fields, if any are JSON-tagged, only tagged fields are considered,
+// even if there are multiple untagged fields that would otherwise conflict.
+// 2) If there is exactly one field (tagged or not according to the first rule), that is selected.
+// 3) Otherwise there are multiple fields, and all are ignored; no error occurs.
+//
+// Handling of anonymous struct fields is new in Go 1.1.
+// Prior to Go 1.1, anonymous struct fields were ignored. To force ignoring of
+// an anonymous struct field in both current and earlier versions, give the field
+// a JSON tag of "-".
+//
+// Map values encode as JSON objects. The map's key type must either be a string
+// or implement encoding.TextMarshaler. The map keys are used as JSON object
+// keys, subject to the UTF-8 coercion described for string values above.
+//
+// Pointer values encode as the value pointed to.
+// A nil pointer encodes as the null JSON value.
+//
+// Interface values encode as the value contained in the interface.
+// A nil interface value encodes as the null JSON value.
+//
+// Channel, complex, and function values cannot be encoded in JSON.
+// Attempting to encode such a value causes Marshal to return
+// an UnsupportedTypeError.
+//
+// JSON cannot represent cyclic data structures and Marshal does not
+// handle them. Passing cyclic structures to Marshal will result in
+// an infinite recursion.
+//
+func Marshal(v interface{}) ([]byte, error) {
+ e := &encodeState{}
+ err := e.marshal(v, encOpts{escapeHTML: true})
+ if err != nil {
+ return nil, err
+ }
+ return e.Bytes(), nil
+}
+
+// MarshalIndent is like Marshal but applies Indent to format the output.
+func MarshalIndent(v interface{}, prefix, indent string) ([]byte, error) {
+ b, err := Marshal(v)
+ if err != nil {
+ return nil, err
+ }
+ var buf bytes.Buffer
+ err = Indent(&buf, b, prefix, indent)
+ if err != nil {
+ return nil, err
+ }
+ return buf.Bytes(), nil
+}
+
+// HTMLEscape appends to dst the JSON-encoded src with <, >, &, U+2028 and U+2029
+// characters inside string literals changed to \u003c, \u003e, \u0026, \u2028, \u2029
+// so that the JSON will be safe to embed inside HTML <script> tags.
+// For historical reasons, web browsers don't honor standard HTML
+// escaping within <script> tags, so an alternative JSON encoding must
+// be used.
+func HTMLEscape(dst *bytes.Buffer, src []byte) {
+ // The characters can only appear in string literals,
+ // so just scan the string one byte at a time.
+ start := 0
+ for i, c := range src {
+ if c == '<' || c == '>' || c == '&' {
+ if start < i {
+ dst.Write(src[start:i])
+ }
+ dst.WriteString(`\u00`)
+ dst.WriteByte(hex[c>>4])
+ dst.WriteByte(hex[c&0xF])
+ start = i + 1
+ }
+ // Convert U+2028 and U+2029 (E2 80 A8 and E2 80 A9).
+ if c == 0xE2 && i+2 < len(src) && src[i+1] == 0x80 && src[i+2]&^1 == 0xA8 {
+ if start < i {
+ dst.Write(src[start:i])
+ }
+ dst.WriteString(`\u202`)
+ dst.WriteByte(hex[src[i+2]&0xF])
+ start = i + 3
+ }
+ }
+ if start < len(src) {
+ dst.Write(src[start:])
+ }
+}
+
+// Marshaler is the interface implemented by types that
+// can marshal themselves into valid JSON.
+type Marshaler interface {
+ MarshalJSON() ([]byte, error)
+}
+
+// An UnsupportedTypeError is returned by Marshal when attempting
+// to encode an unsupported value type.
+type UnsupportedTypeError struct {
+ Type reflect.Type
+}
+
+func (e *UnsupportedTypeError) Error() string {
+ return "json: unsupported type: " + e.Type.String()
+}
+
+type UnsupportedValueError struct {
+ Value reflect.Value
+ Str string
+}
+
+func (e *UnsupportedValueError) Error() string {
+ return "json: unsupported value: " + e.Str
+}
+
+// Before Go 1.2, an InvalidUTF8Error was returned by Marshal when
+// attempting to encode a string value with invalid UTF-8 sequences.
+// As of Go 1.2, Marshal instead coerces the string to valid UTF-8 by
+// replacing invalid bytes with the Unicode replacement rune U+FFFD.
+// This error is no longer generated but is kept for backwards compatibility
+// with programs that might mention it.
+type InvalidUTF8Error struct {
+ S string // the whole string value that caused the error
+}
+
+func (e *InvalidUTF8Error) Error() string {
+ return "json: invalid UTF-8 in string: " + strconv.Quote(e.S)
+}
+
+type MarshalerError struct {
+ Type reflect.Type
+ Err error
+}
+
+func (e *MarshalerError) Error() string {
+ return "json: error calling MarshalJSON for type " + e.Type.String() + ": " + e.Err.Error()
+}
+
+var hex = "0123456789abcdef"
+
+// An encodeState encodes JSON into a bytes.Buffer.
+type encodeState struct {
+ bytes.Buffer // accumulated output
+ scratch [64]byte
+ ext Extension
+}
+
+var encodeStatePool sync.Pool
+
+func newEncodeState() *encodeState {
+ if v := encodeStatePool.Get(); v != nil {
+ e := v.(*encodeState)
+ e.Reset()
+ return e
+ }
+ return new(encodeState)
+}
+
+func (e *encodeState) marshal(v interface{}, opts encOpts) (err error) {
+ defer func() {
+ if r := recover(); r != nil {
+ if _, ok := r.(runtime.Error); ok {
+ panic(r)
+ }
+ if s, ok := r.(string); ok {
+ panic(s)
+ }
+ err = r.(error)
+ }
+ }()
+ e.reflectValue(reflect.ValueOf(v), opts)
+ return nil
+}
+
+func (e *encodeState) error(err error) {
+ panic(err)
+}
+
+func isEmptyValue(v reflect.Value) bool {
+ switch v.Kind() {
+ case reflect.Array, reflect.Map, reflect.Slice, reflect.String:
+ return v.Len() == 0
+ case reflect.Bool:
+ return !v.Bool()
+ case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
+ return v.Int() == 0
+ case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr:
+ return v.Uint() == 0
+ case reflect.Float32, reflect.Float64:
+ return v.Float() == 0
+ case reflect.Interface, reflect.Ptr:
+ return v.IsNil()
+ }
+ return false
+}
+
+func (e *encodeState) reflectValue(v reflect.Value, opts encOpts) {
+ valueEncoder(v)(e, v, opts)
+}
+
+type encOpts struct {
+ // quoted causes primitive fields to be encoded inside JSON strings.
+ quoted bool
+ // escapeHTML causes '<', '>', and '&' to be escaped in JSON strings.
+ escapeHTML bool
+}
+
+type encoderFunc func(e *encodeState, v reflect.Value, opts encOpts)
+
+var encoderCache struct {
+ sync.RWMutex
+ m map[reflect.Type]encoderFunc
+}
+
+func valueEncoder(v reflect.Value) encoderFunc {
+ if !v.IsValid() {
+ return invalidValueEncoder
+ }
+ return typeEncoder(v.Type())
+}
+
+func typeEncoder(t reflect.Type) encoderFunc {
+ encoderCache.RLock()
+ f := encoderCache.m[t]
+ encoderCache.RUnlock()
+ if f != nil {
+ return f
+ }
+
+ // To deal with recursive types, populate the map with an
+ // indirect func before we build it. This type waits on the
+ // real func (f) to be ready and then calls it. This indirect
+ // func is only used for recursive types.
+ encoderCache.Lock()
+ if encoderCache.m == nil {
+ encoderCache.m = make(map[reflect.Type]encoderFunc)
+ }
+ var wg sync.WaitGroup
+ wg.Add(1)
+ encoderCache.m[t] = func(e *encodeState, v reflect.Value, opts encOpts) {
+ wg.Wait()
+ f(e, v, opts)
+ }
+ encoderCache.Unlock()
+
+ // Compute fields without lock.
+ // Might duplicate effort but won't hold other computations back.
+ innerf := newTypeEncoder(t, true)
+ f = func(e *encodeState, v reflect.Value, opts encOpts) {
+ encode, ok := e.ext.encode[v.Type()]
+ if !ok {
+ innerf(e, v, opts)
+ return
+ }
+
+ b, err := encode(v.Interface())
+ if err == nil {
+ // copy JSON into buffer, checking validity.
+ err = compact(&e.Buffer, b, opts.escapeHTML)
+ }
+ if err != nil {
+ e.error(&MarshalerError{v.Type(), err})
+ }
+ }
+ wg.Done()
+ encoderCache.Lock()
+ encoderCache.m[t] = f
+ encoderCache.Unlock()
+ return f
+}
+
+var (
+ marshalerType = reflect.TypeOf(new(Marshaler)).Elem()
+ textMarshalerType = reflect.TypeOf(new(encoding.TextMarshaler)).Elem()
+)
+
+// newTypeEncoder constructs an encoderFunc for a type.
+// The returned encoder only checks CanAddr when allowAddr is true.
+func newTypeEncoder(t reflect.Type, allowAddr bool) encoderFunc {
+ if t.Implements(marshalerType) {
+ return marshalerEncoder
+ }
+ if t.Kind() != reflect.Ptr && allowAddr {
+ if reflect.PtrTo(t).Implements(marshalerType) {
+ return newCondAddrEncoder(addrMarshalerEncoder, newTypeEncoder(t, false))
+ }
+ }
+
+ if t.Implements(textMarshalerType) {
+ return textMarshalerEncoder
+ }
+ if t.Kind() != reflect.Ptr && allowAddr {
+ if reflect.PtrTo(t).Implements(textMarshalerType) {
+ return newCondAddrEncoder(addrTextMarshalerEncoder, newTypeEncoder(t, false))
+ }
+ }
+
+ switch t.Kind() {
+ case reflect.Bool:
+ return boolEncoder
+ case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
+ return intEncoder
+ case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr:
+ return uintEncoder
+ case reflect.Float32:
+ return float32Encoder
+ case reflect.Float64:
+ return float64Encoder
+ case reflect.String:
+ return stringEncoder
+ case reflect.Interface:
+ return interfaceEncoder
+ case reflect.Struct:
+ return newStructEncoder(t)
+ case reflect.Map:
+ return newMapEncoder(t)
+ case reflect.Slice:
+ return newSliceEncoder(t)
+ case reflect.Array:
+ return newArrayEncoder(t)
+ case reflect.Ptr:
+ return newPtrEncoder(t)
+ default:
+ return unsupportedTypeEncoder
+ }
+}
+
+func invalidValueEncoder(e *encodeState, v reflect.Value, _ encOpts) {
+ e.WriteString("null")
+}
+
+func marshalerEncoder(e *encodeState, v reflect.Value, opts encOpts) {
+ if v.Kind() == reflect.Ptr && v.IsNil() {
+ e.WriteString("null")
+ return
+ }
+ m := v.Interface().(Marshaler)
+ b, err := m.MarshalJSON()
+ if err == nil {
+ // copy JSON into buffer, checking validity.
+ err = compact(&e.Buffer, b, opts.escapeHTML)
+ }
+ if err != nil {
+ e.error(&MarshalerError{v.Type(), err})
+ }
+}
+
+func addrMarshalerEncoder(e *encodeState, v reflect.Value, _ encOpts) {
+ va := v.Addr()
+ if va.IsNil() {
+ e.WriteString("null")
+ return
+ }
+ m := va.Interface().(Marshaler)
+ b, err := m.MarshalJSON()
+ if err == nil {
+ // copy JSON into buffer, checking validity.
+ err = compact(&e.Buffer, b, true)
+ }
+ if err != nil {
+ e.error(&MarshalerError{v.Type(), err})
+ }
+}
+
+func textMarshalerEncoder(e *encodeState, v reflect.Value, opts encOpts) {
+ if v.Kind() == reflect.Ptr && v.IsNil() {
+ e.WriteString("null")
+ return
+ }
+ m := v.Interface().(encoding.TextMarshaler)
+ b, err := m.MarshalText()
+ if err != nil {
+ e.error(&MarshalerError{v.Type(), err})
+ }
+ e.stringBytes(b, opts.escapeHTML)
+}
+
+func addrTextMarshalerEncoder(e *encodeState, v reflect.Value, opts encOpts) {
+ va := v.Addr()
+ if va.IsNil() {
+ e.WriteString("null")
+ return
+ }
+ m := va.Interface().(encoding.TextMarshaler)
+ b, err := m.MarshalText()
+ if err != nil {
+ e.error(&MarshalerError{v.Type(), err})
+ }
+ e.stringBytes(b, opts.escapeHTML)
+}
+
+func boolEncoder(e *encodeState, v reflect.Value, opts encOpts) {
+ if opts.quoted {
+ e.WriteByte('"')
+ }
+ if v.Bool() {
+ e.WriteString("true")
+ } else {
+ e.WriteString("false")
+ }
+ if opts.quoted {
+ e.WriteByte('"')
+ }
+}
+
+func intEncoder(e *encodeState, v reflect.Value, opts encOpts) {
+ b := strconv.AppendInt(e.scratch[:0], v.Int(), 10)
+ if opts.quoted {
+ e.WriteByte('"')
+ }
+ e.Write(b)
+ if opts.quoted {
+ e.WriteByte('"')
+ }
+}
+
+func uintEncoder(e *encodeState, v reflect.Value, opts encOpts) {
+ b := strconv.AppendUint(e.scratch[:0], v.Uint(), 10)
+ if opts.quoted {
+ e.WriteByte('"')
+ }
+ e.Write(b)
+ if opts.quoted {
+ e.WriteByte('"')
+ }
+}
+
+type floatEncoder int // number of bits
+
+func (bits floatEncoder) encode(e *encodeState, v reflect.Value, opts encOpts) {
+ f := v.Float()
+ if math.IsInf(f, 0) || math.IsNaN(f) {
+ e.error(&UnsupportedValueError{v, strconv.FormatFloat(f, 'g', -1, int(bits))})
+ }
+ b := strconv.AppendFloat(e.scratch[:0], f, 'g', -1, int(bits))
+ if opts.quoted {
+ e.WriteByte('"')
+ }
+ e.Write(b)
+ if opts.quoted {
+ e.WriteByte('"')
+ }
+}
+
+var (
+ float32Encoder = (floatEncoder(32)).encode
+ float64Encoder = (floatEncoder(64)).encode
+)
+
+func stringEncoder(e *encodeState, v reflect.Value, opts encOpts) {
+ if v.Type() == numberType {
+ numStr := v.String()
+ // In Go1.5 the empty string encodes to "0", while this is not a valid number literal
+ // we keep compatibility so check validity after this.
+ if numStr == "" {
+ numStr = "0" // Number's zero-val
+ }
+ if !isValidNumber(numStr) {
+ e.error(fmt.Errorf("json: invalid number literal %q", numStr))
+ }
+ e.WriteString(numStr)
+ return
+ }
+ if opts.quoted {
+ sb, err := Marshal(v.String())
+ if err != nil {
+ e.error(err)
+ }
+ e.string(string(sb), opts.escapeHTML)
+ } else {
+ e.string(v.String(), opts.escapeHTML)
+ }
+}
+
+func interfaceEncoder(e *encodeState, v reflect.Value, opts encOpts) {
+ if v.IsNil() {
+ e.WriteString("null")
+ return
+ }
+ e.reflectValue(v.Elem(), opts)
+}
+
+func unsupportedTypeEncoder(e *encodeState, v reflect.Value, _ encOpts) {
+ e.error(&UnsupportedTypeError{v.Type()})
+}
+
+type structEncoder struct {
+ fields []field
+ fieldEncs []encoderFunc
+}
+
+func (se *structEncoder) encode(e *encodeState, v reflect.Value, opts encOpts) {
+ e.WriteByte('{')
+ first := true
+ for i, f := range se.fields {
+ fv := fieldByIndex(v, f.index)
+ if !fv.IsValid() || f.omitEmpty && isEmptyValue(fv) {
+ continue
+ }
+ if first {
+ first = false
+ } else {
+ e.WriteByte(',')
+ }
+ e.string(f.name, opts.escapeHTML)
+ e.WriteByte(':')
+ opts.quoted = f.quoted
+ se.fieldEncs[i](e, fv, opts)
+ }
+ e.WriteByte('}')
+}
+
+func newStructEncoder(t reflect.Type) encoderFunc {
+ fields := cachedTypeFields(t)
+ se := &structEncoder{
+ fields: fields,
+ fieldEncs: make([]encoderFunc, len(fields)),
+ }
+ for i, f := range fields {
+ se.fieldEncs[i] = typeEncoder(typeByIndex(t, f.index))
+ }
+ return se.encode
+}
+
+type mapEncoder struct {
+ elemEnc encoderFunc
+}
+
+func (me *mapEncoder) encode(e *encodeState, v reflect.Value, opts encOpts) {
+ if v.IsNil() {
+ e.WriteString("null")
+ return
+ }
+ e.WriteByte('{')
+
+ // Extract and sort the keys.
+ keys := v.MapKeys()
+ sv := make([]reflectWithString, len(keys))
+ for i, v := range keys {
+ sv[i].v = v
+ if err := sv[i].resolve(); err != nil {
+ e.error(&MarshalerError{v.Type(), err})
+ }
+ }
+ sort.Sort(byString(sv))
+
+ for i, kv := range sv {
+ if i > 0 {
+ e.WriteByte(',')
+ }
+ e.string(kv.s, opts.escapeHTML)
+ e.WriteByte(':')
+ me.elemEnc(e, v.MapIndex(kv.v), opts)
+ }
+ e.WriteByte('}')
+}
+
+func newMapEncoder(t reflect.Type) encoderFunc {
+ if t.Key().Kind() != reflect.String && !t.Key().Implements(textMarshalerType) {
+ return unsupportedTypeEncoder
+ }
+ me := &mapEncoder{typeEncoder(t.Elem())}
+ return me.encode
+}
+
+func encodeByteSlice(e *encodeState, v reflect.Value, _ encOpts) {
+ if v.IsNil() {
+ e.WriteString("null")
+ return
+ }
+ s := v.Bytes()
+ e.WriteByte('"')
+ if len(s) < 1024 {
+ // for small buffers, using Encode directly is much faster.
+ dst := make([]byte, base64.StdEncoding.EncodedLen(len(s)))
+ base64.StdEncoding.Encode(dst, s)
+ e.Write(dst)
+ } else {
+ // for large buffers, avoid unnecessary extra temporary
+ // buffer space.
+ enc := base64.NewEncoder(base64.StdEncoding, e)
+ enc.Write(s)
+ enc.Close()
+ }
+ e.WriteByte('"')
+}
+
+// sliceEncoder just wraps an arrayEncoder, checking to make sure the value isn't nil.
+type sliceEncoder struct {
+ arrayEnc encoderFunc
+}
+
+func (se *sliceEncoder) encode(e *encodeState, v reflect.Value, opts encOpts) {
+ if v.IsNil() {
+ e.WriteString("null")
+ return
+ }
+ se.arrayEnc(e, v, opts)
+}
+
+func newSliceEncoder(t reflect.Type) encoderFunc {
+ // Byte slices get special treatment; arrays don't.
+ if t.Elem().Kind() == reflect.Uint8 &&
+ !t.Elem().Implements(marshalerType) &&
+ !t.Elem().Implements(textMarshalerType) {
+ return encodeByteSlice
+ }
+ enc := &sliceEncoder{newArrayEncoder(t)}
+ return enc.encode
+}
+
+type arrayEncoder struct {
+ elemEnc encoderFunc
+}
+
+func (ae *arrayEncoder) encode(e *encodeState, v reflect.Value, opts encOpts) {
+ e.WriteByte('[')
+ n := v.Len()
+ for i := 0; i < n; i++ {
+ if i > 0 {
+ e.WriteByte(',')
+ }
+ ae.elemEnc(e, v.Index(i), opts)
+ }
+ e.WriteByte(']')
+}
+
+func newArrayEncoder(t reflect.Type) encoderFunc {
+ enc := &arrayEncoder{typeEncoder(t.Elem())}
+ return enc.encode
+}
+
+type ptrEncoder struct {
+ elemEnc encoderFunc
+}
+
+func (pe *ptrEncoder) encode(e *encodeState, v reflect.Value, opts encOpts) {
+ if v.IsNil() {
+ e.WriteString("null")
+ return
+ }
+ pe.elemEnc(e, v.Elem(), opts)
+}
+
+func newPtrEncoder(t reflect.Type) encoderFunc {
+ enc := &ptrEncoder{typeEncoder(t.Elem())}
+ return enc.encode
+}
+
+type condAddrEncoder struct {
+ canAddrEnc, elseEnc encoderFunc
+}
+
+func (ce *condAddrEncoder) encode(e *encodeState, v reflect.Value, opts encOpts) {
+ if v.CanAddr() {
+ ce.canAddrEnc(e, v, opts)
+ } else {
+ ce.elseEnc(e, v, opts)
+ }
+}
+
+// newCondAddrEncoder returns an encoder that checks whether its value
+// CanAddr and delegates to canAddrEnc if so, else to elseEnc.
+func newCondAddrEncoder(canAddrEnc, elseEnc encoderFunc) encoderFunc {
+ enc := &condAddrEncoder{canAddrEnc: canAddrEnc, elseEnc: elseEnc}
+ return enc.encode
+}
+
+func isValidTag(s string) bool {
+ if s == "" {
+ return false
+ }
+ for _, c := range s {
+ switch {
+ case strings.ContainsRune("!#$%&()*+-./:<=>?@[]^_{|}~ ", c):
+ // Backslash and quote chars are reserved, but
+ // otherwise any punctuation chars are allowed
+ // in a tag name.
+ default:
+ if !unicode.IsLetter(c) && !unicode.IsDigit(c) {
+ return false
+ }
+ }
+ }
+ return true
+}
+
+func fieldByIndex(v reflect.Value, index []int) reflect.Value {
+ for _, i := range index {
+ if v.Kind() == reflect.Ptr {
+ if v.IsNil() {
+ return reflect.Value{}
+ }
+ v = v.Elem()
+ }
+ v = v.Field(i)
+ }
+ return v
+}
+
+func typeByIndex(t reflect.Type, index []int) reflect.Type {
+ for _, i := range index {
+ if t.Kind() == reflect.Ptr {
+ t = t.Elem()
+ }
+ t = t.Field(i).Type
+ }
+ return t
+}
+
+type reflectWithString struct {
+ v reflect.Value
+ s string
+}
+
+func (w *reflectWithString) resolve() error {
+ if w.v.Kind() == reflect.String {
+ w.s = w.v.String()
+ return nil
+ }
+ buf, err := w.v.Interface().(encoding.TextMarshaler).MarshalText()
+ w.s = string(buf)
+ return err
+}
+
+// byString is a slice of reflectWithString where the reflect.Value is either
+// a string or an encoding.TextMarshaler.
+// It implements the methods to sort by string.
+type byString []reflectWithString
+
+func (sv byString) Len() int { return len(sv) }
+func (sv byString) Swap(i, j int) { sv[i], sv[j] = sv[j], sv[i] }
+func (sv byString) Less(i, j int) bool { return sv[i].s < sv[j].s }
+
+// NOTE: keep in sync with stringBytes below.
+func (e *encodeState) string(s string, escapeHTML bool) int {
+ len0 := e.Len()
+ e.WriteByte('"')
+ start := 0
+ for i := 0; i < len(s); {
+ if b := s[i]; b < utf8.RuneSelf {
+ if 0x20 <= b && b != '\\' && b != '"' &&
+ (!escapeHTML || b != '<' && b != '>' && b != '&') {
+ i++
+ continue
+ }
+ if start < i {
+ e.WriteString(s[start:i])
+ }
+ switch b {
+ case '\\', '"':
+ e.WriteByte('\\')
+ e.WriteByte(b)
+ case '\n':
+ e.WriteByte('\\')
+ e.WriteByte('n')
+ case '\r':
+ e.WriteByte('\\')
+ e.WriteByte('r')
+ case '\t':
+ e.WriteByte('\\')
+ e.WriteByte('t')
+ default:
+ // This encodes bytes < 0x20 except for \t, \n and \r.
+ // If escapeHTML is set, it also escapes <, >, and &
+ // because they can lead to security holes when
+ // user-controlled strings are rendered into JSON
+ // and served to some browsers.
+ e.WriteString(`\u00`)
+ e.WriteByte(hex[b>>4])
+ e.WriteByte(hex[b&0xF])
+ }
+ i++
+ start = i
+ continue
+ }
+ c, size := utf8.DecodeRuneInString(s[i:])
+ if c == utf8.RuneError && size == 1 {
+ if start < i {
+ e.WriteString(s[start:i])
+ }
+ e.WriteString(`\ufffd`)
+ i += size
+ start = i
+ continue
+ }
+ // U+2028 is LINE SEPARATOR.
+ // U+2029 is PARAGRAPH SEPARATOR.
+ // They are both technically valid characters in JSON strings,
+ // but don't work in JSONP, which has to be evaluated as JavaScript,
+ // and can lead to security holes there. It is valid JSON to
+ // escape them, so we do so unconditionally.
+ // See http://timelessrepo.com/json-isnt-a-javascript-subset for discussion.
+ if c == '\u2028' || c == '\u2029' {
+ if start < i {
+ e.WriteString(s[start:i])
+ }
+ e.WriteString(`\u202`)
+ e.WriteByte(hex[c&0xF])
+ i += size
+ start = i
+ continue
+ }
+ i += size
+ }
+ if start < len(s) {
+ e.WriteString(s[start:])
+ }
+ e.WriteByte('"')
+ return e.Len() - len0
+}
+
+// NOTE: keep in sync with string above.
+func (e *encodeState) stringBytes(s []byte, escapeHTML bool) int {
+ len0 := e.Len()
+ e.WriteByte('"')
+ start := 0
+ for i := 0; i < len(s); {
+ if b := s[i]; b < utf8.RuneSelf {
+ if 0x20 <= b && b != '\\' && b != '"' &&
+ (!escapeHTML || b != '<' && b != '>' && b != '&') {
+ i++
+ continue
+ }
+ if start < i {
+ e.Write(s[start:i])
+ }
+ switch b {
+ case '\\', '"':
+ e.WriteByte('\\')
+ e.WriteByte(b)
+ case '\n':
+ e.WriteByte('\\')
+ e.WriteByte('n')
+ case '\r':
+ e.WriteByte('\\')
+ e.WriteByte('r')
+ case '\t':
+ e.WriteByte('\\')
+ e.WriteByte('t')
+ default:
+ // This encodes bytes < 0x20 except for \t, \n and \r.
+ // If escapeHTML is set, it also escapes <, >, and &
+ // because they can lead to security holes when
+ // user-controlled strings are rendered into JSON
+ // and served to some browsers.
+ e.WriteString(`\u00`)
+ e.WriteByte(hex[b>>4])
+ e.WriteByte(hex[b&0xF])
+ }
+ i++
+ start = i
+ continue
+ }
+ c, size := utf8.DecodeRune(s[i:])
+ if c == utf8.RuneError && size == 1 {
+ if start < i {
+ e.Write(s[start:i])
+ }
+ e.WriteString(`\ufffd`)
+ i += size
+ start = i
+ continue
+ }
+ // U+2028 is LINE SEPARATOR.
+ // U+2029 is PARAGRAPH SEPARATOR.
+ // They are both technically valid characters in JSON strings,
+ // but don't work in JSONP, which has to be evaluated as JavaScript,
+ // and can lead to security holes there. It is valid JSON to
+ // escape them, so we do so unconditionally.
+ // See http://timelessrepo.com/json-isnt-a-javascript-subset for discussion.
+ if c == '\u2028' || c == '\u2029' {
+ if start < i {
+ e.Write(s[start:i])
+ }
+ e.WriteString(`\u202`)
+ e.WriteByte(hex[c&0xF])
+ i += size
+ start = i
+ continue
+ }
+ i += size
+ }
+ if start < len(s) {
+ e.Write(s[start:])
+ }
+ e.WriteByte('"')
+ return e.Len() - len0
+}
+
+// A field represents a single field found in a struct.
+type field struct {
+ name string
+ nameBytes []byte // []byte(name)
+ equalFold func(s, t []byte) bool // bytes.EqualFold or equivalent
+
+ tag bool
+ index []int
+ typ reflect.Type
+ omitEmpty bool
+ quoted bool
+}
+
+func fillField(f field) field {
+ f.nameBytes = []byte(f.name)
+ f.equalFold = foldFunc(f.nameBytes)
+ return f
+}
+
+// byName sorts field by name, breaking ties with depth,
+// then breaking ties with "name came from json tag", then
+// breaking ties with index sequence.
+type byName []field
+
+func (x byName) Len() int { return len(x) }
+
+func (x byName) Swap(i, j int) { x[i], x[j] = x[j], x[i] }
+
+func (x byName) Less(i, j int) bool {
+ if x[i].name != x[j].name {
+ return x[i].name < x[j].name
+ }
+ if len(x[i].index) != len(x[j].index) {
+ return len(x[i].index) < len(x[j].index)
+ }
+ if x[i].tag != x[j].tag {
+ return x[i].tag
+ }
+ return byIndex(x).Less(i, j)
+}
+
+// byIndex sorts field by index sequence.
+type byIndex []field
+
+func (x byIndex) Len() int { return len(x) }
+
+func (x byIndex) Swap(i, j int) { x[i], x[j] = x[j], x[i] }
+
+func (x byIndex) Less(i, j int) bool {
+ for k, xik := range x[i].index {
+ if k >= len(x[j].index) {
+ return false
+ }
+ if xik != x[j].index[k] {
+ return xik < x[j].index[k]
+ }
+ }
+ return len(x[i].index) < len(x[j].index)
+}
+
+// typeFields returns a list of fields that JSON should recognize for the given type.
+// The algorithm is breadth-first search over the set of structs to include - the top struct
+// and then any reachable anonymous structs.
+func typeFields(t reflect.Type) []field {
+ // Anonymous fields to explore at the current level and the next.
+ current := []field{}
+ next := []field{{typ: t}}
+
+ // Count of queued names for current level and the next.
+ count := map[reflect.Type]int{}
+ nextCount := map[reflect.Type]int{}
+
+ // Types already visited at an earlier level.
+ visited := map[reflect.Type]bool{}
+
+ // Fields found.
+ var fields []field
+
+ for len(next) > 0 {
+ current, next = next, current[:0]
+ count, nextCount = nextCount, map[reflect.Type]int{}
+
+ for _, f := range current {
+ if visited[f.typ] {
+ continue
+ }
+ visited[f.typ] = true
+
+ // Scan f.typ for fields to include.
+ for i := 0; i < f.typ.NumField(); i++ {
+ sf := f.typ.Field(i)
+ if sf.PkgPath != "" && !sf.Anonymous { // unexported
+ continue
+ }
+ tag := sf.Tag.Get("json")
+ if tag == "-" {
+ continue
+ }
+ name, opts := parseTag(tag)
+ if !isValidTag(name) {
+ name = ""
+ }
+ index := make([]int, len(f.index)+1)
+ copy(index, f.index)
+ index[len(f.index)] = i
+
+ ft := sf.Type
+ if ft.Name() == "" && ft.Kind() == reflect.Ptr {
+ // Follow pointer.
+ ft = ft.Elem()
+ }
+
+ // Only strings, floats, integers, and booleans can be quoted.
+ quoted := false
+ if opts.Contains("string") {
+ switch ft.Kind() {
+ case reflect.Bool,
+ reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64,
+ reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64,
+ reflect.Float32, reflect.Float64,
+ reflect.String:
+ quoted = true
+ }
+ }
+
+ // Record found field and index sequence.
+ if name != "" || !sf.Anonymous || ft.Kind() != reflect.Struct {
+ tagged := name != ""
+ if name == "" {
+ name = sf.Name
+ }
+ fields = append(fields, fillField(field{
+ name: name,
+ tag: tagged,
+ index: index,
+ typ: ft,
+ omitEmpty: opts.Contains("omitempty"),
+ quoted: quoted,
+ }))
+ if count[f.typ] > 1 {
+ // If there were multiple instances, add a second,
+ // so that the annihilation code will see a duplicate.
+ // It only cares about the distinction between 1 or 2,
+ // so don't bother generating any more copies.
+ fields = append(fields, fields[len(fields)-1])
+ }
+ continue
+ }
+
+ // Record new anonymous struct to explore in next round.
+ nextCount[ft]++
+ if nextCount[ft] == 1 {
+ next = append(next, fillField(field{name: ft.Name(), index: index, typ: ft}))
+ }
+ }
+ }
+ }
+
+ sort.Sort(byName(fields))
+
+ // Delete all fields that are hidden by the Go rules for embedded fields,
+ // except that fields with JSON tags are promoted.
+
+ // The fields are sorted in primary order of name, secondary order
+ // of field index length. Loop over names; for each name, delete
+ // hidden fields by choosing the one dominant field that survives.
+ out := fields[:0]
+ for advance, i := 0, 0; i < len(fields); i += advance {
+ // One iteration per name.
+ // Find the sequence of fields with the name of this first field.
+ fi := fields[i]
+ name := fi.name
+ for advance = 1; i+advance < len(fields); advance++ {
+ fj := fields[i+advance]
+ if fj.name != name {
+ break
+ }
+ }
+ if advance == 1 { // Only one field with this name
+ out = append(out, fi)
+ continue
+ }
+ dominant, ok := dominantField(fields[i : i+advance])
+ if ok {
+ out = append(out, dominant)
+ }
+ }
+
+ fields = out
+ sort.Sort(byIndex(fields))
+
+ return fields
+}
+
+// dominantField looks through the fields, all of which are known to
+// have the same name, to find the single field that dominates the
+// others using Go's embedding rules, modified by the presence of
+// JSON tags. If there are multiple top-level fields, the boolean
+// will be false: This condition is an error in Go and we skip all
+// the fields.
+func dominantField(fields []field) (field, bool) {
+ // The fields are sorted in increasing index-length order. The winner
+ // must therefore be one with the shortest index length. Drop all
+ // longer entries, which is easy: just truncate the slice.
+ length := len(fields[0].index)
+ tagged := -1 // Index of first tagged field.
+ for i, f := range fields {
+ if len(f.index) > length {
+ fields = fields[:i]
+ break
+ }
+ if f.tag {
+ if tagged >= 0 {
+ // Multiple tagged fields at the same level: conflict.
+ // Return no field.
+ return field{}, false
+ }
+ tagged = i
+ }
+ }
+ if tagged >= 0 {
+ return fields[tagged], true
+ }
+ // All remaining fields have the same length. If there's more than one,
+ // we have a conflict (two fields named "X" at the same level) and we
+ // return no field.
+ if len(fields) > 1 {
+ return field{}, false
+ }
+ return fields[0], true
+}
+
+var fieldCache struct {
+ sync.RWMutex
+ m map[reflect.Type][]field
+}
+
+// cachedTypeFields is like typeFields but uses a cache to avoid repeated work.
+func cachedTypeFields(t reflect.Type) []field {
+ fieldCache.RLock()
+ f := fieldCache.m[t]
+ fieldCache.RUnlock()
+ if f != nil {
+ return f
+ }
+
+ // Compute fields without lock.
+ // Might duplicate effort but won't hold other computations back.
+ f = typeFields(t)
+ if f == nil {
+ f = []field{}
+ }
+
+ fieldCache.Lock()
+ if fieldCache.m == nil {
+ fieldCache.m = map[reflect.Type][]field{}
+ }
+ fieldCache.m[t] = f
+ fieldCache.Unlock()
+ return f
+}
diff --git a/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/internal/json/encode_test.go b/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/internal/json/encode_test.go
new file mode 100644
index 00000000000..b484022a70e
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/internal/json/encode_test.go
@@ -0,0 +1,613 @@
+// Copyright 2011 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package json
+
+import (
+ "bytes"
+ "fmt"
+ "math"
+ "reflect"
+ "testing"
+ "unicode"
+)
+
+type Optionals struct {
+ Sr string `json:"sr"`
+ So string `json:"so,omitempty"`
+ Sw string `json:"-"`
+
+ Ir int `json:"omitempty"` // actually named omitempty, not an option
+ Io int `json:"io,omitempty"`
+
+ Slr []string `json:"slr,random"`
+ Slo []string `json:"slo,omitempty"`
+
+ Mr map[string]interface{} `json:"mr"`
+ Mo map[string]interface{} `json:",omitempty"`
+
+ Fr float64 `json:"fr"`
+ Fo float64 `json:"fo,omitempty"`
+
+ Br bool `json:"br"`
+ Bo bool `json:"bo,omitempty"`
+
+ Ur uint `json:"ur"`
+ Uo uint `json:"uo,omitempty"`
+
+ Str struct{} `json:"str"`
+ Sto struct{} `json:"sto,omitempty"`
+}
+
+var optionalsExpected = `{
+ "sr": "",
+ "omitempty": 0,
+ "slr": null,
+ "mr": {},
+ "fr": 0,
+ "br": false,
+ "ur": 0,
+ "str": {},
+ "sto": {}
+}`
+
+func TestOmitEmpty(t *testing.T) {
+ var o Optionals
+ o.Sw = "something"
+ o.Mr = map[string]interface{}{}
+ o.Mo = map[string]interface{}{}
+
+ got, err := MarshalIndent(&o, "", " ")
+ if err != nil {
+ t.Fatal(err)
+ }
+ if got := string(got); got != optionalsExpected {
+ t.Errorf(" got: %s\nwant: %s\n", got, optionalsExpected)
+ }
+}
+
+type StringTag struct {
+ BoolStr bool `json:",string"`
+ IntStr int64 `json:",string"`
+ StrStr string `json:",string"`
+}
+
+var stringTagExpected = `{
+ "BoolStr": "true",
+ "IntStr": "42",
+ "StrStr": "\"xzbit\""
+}`
+
+func TestStringTag(t *testing.T) {
+ var s StringTag
+ s.BoolStr = true
+ s.IntStr = 42
+ s.StrStr = "xzbit"
+ got, err := MarshalIndent(&s, "", " ")
+ if err != nil {
+ t.Fatal(err)
+ }
+ if got := string(got); got != stringTagExpected {
+ t.Fatalf(" got: %s\nwant: %s\n", got, stringTagExpected)
+ }
+
+ // Verify that it round-trips.
+ var s2 StringTag
+ err = NewDecoder(bytes.NewReader(got)).Decode(&s2)
+ if err != nil {
+ t.Fatalf("Decode: %v", err)
+ }
+ if !reflect.DeepEqual(s, s2) {
+ t.Fatalf("decode didn't match.\nsource: %#v\nEncoded as:\n%s\ndecode: %#v", s, string(got), s2)
+ }
+}
+
+// byte slices are special even if they're renamed types.
+type renamedByte byte
+type renamedByteSlice []byte
+type renamedRenamedByteSlice []renamedByte
+
+func TestEncodeRenamedByteSlice(t *testing.T) {
+ s := renamedByteSlice("abc")
+ result, err := Marshal(s)
+ if err != nil {
+ t.Fatal(err)
+ }
+ expect := `"YWJj"`
+ if string(result) != expect {
+ t.Errorf(" got %s want %s", result, expect)
+ }
+ r := renamedRenamedByteSlice("abc")
+ result, err = Marshal(r)
+ if err != nil {
+ t.Fatal(err)
+ }
+ if string(result) != expect {
+ t.Errorf(" got %s want %s", result, expect)
+ }
+}
+
+var unsupportedValues = []interface{}{
+ math.NaN(),
+ math.Inf(-1),
+ math.Inf(1),
+}
+
+func TestUnsupportedValues(t *testing.T) {
+ for _, v := range unsupportedValues {
+ if _, err := Marshal(v); err != nil {
+ if _, ok := err.(*UnsupportedValueError); !ok {
+ t.Errorf("for %v, got %T want UnsupportedValueError", v, err)
+ }
+ } else {
+ t.Errorf("for %v, expected error", v)
+ }
+ }
+}
+
+// Ref has Marshaler and Unmarshaler methods with pointer receiver.
+type Ref int
+
+func (*Ref) MarshalJSON() ([]byte, error) {
+ return []byte(`"ref"`), nil
+}
+
+func (r *Ref) UnmarshalJSON([]byte) error {
+ *r = 12
+ return nil
+}
+
+// Val has Marshaler methods with value receiver.
+type Val int
+
+func (Val) MarshalJSON() ([]byte, error) {
+ return []byte(`"val"`), nil
+}
+
+// RefText has Marshaler and Unmarshaler methods with pointer receiver.
+type RefText int
+
+func (*RefText) MarshalText() ([]byte, error) {
+ return []byte(`"ref"`), nil
+}
+
+func (r *RefText) UnmarshalText([]byte) error {
+ *r = 13
+ return nil
+}
+
+// ValText has Marshaler methods with value receiver.
+type ValText int
+
+func (ValText) MarshalText() ([]byte, error) {
+ return []byte(`"val"`), nil
+}
+
+func TestRefValMarshal(t *testing.T) {
+ var s = struct {
+ R0 Ref
+ R1 *Ref
+ R2 RefText
+ R3 *RefText
+ V0 Val
+ V1 *Val
+ V2 ValText
+ V3 *ValText
+ }{
+ R0: 12,
+ R1: new(Ref),
+ R2: 14,
+ R3: new(RefText),
+ V0: 13,
+ V1: new(Val),
+ V2: 15,
+ V3: new(ValText),
+ }
+ const want = `{"R0":"ref","R1":"ref","R2":"\"ref\"","R3":"\"ref\"","V0":"val","V1":"val","V2":"\"val\"","V3":"\"val\""}`
+ b, err := Marshal(&s)
+ if err != nil {
+ t.Fatalf("Marshal: %v", err)
+ }
+ if got := string(b); got != want {
+ t.Errorf("got %q, want %q", got, want)
+ }
+}
+
+// C implements Marshaler and returns unescaped JSON.
+type C int
+
+func (C) MarshalJSON() ([]byte, error) {
+ return []byte(`"<&>"`), nil
+}
+
+// CText implements Marshaler and returns unescaped text.
+type CText int
+
+func (CText) MarshalText() ([]byte, error) {
+ return []byte(`"<&>"`), nil
+}
+
+func TestMarshalerEscaping(t *testing.T) {
+ var c C
+ want := `"\u003c\u0026\u003e"`
+ b, err := Marshal(c)
+ if err != nil {
+ t.Fatalf("Marshal(c): %v", err)
+ }
+ if got := string(b); got != want {
+ t.Errorf("Marshal(c) = %#q, want %#q", got, want)
+ }
+
+ var ct CText
+ want = `"\"\u003c\u0026\u003e\""`
+ b, err = Marshal(ct)
+ if err != nil {
+ t.Fatalf("Marshal(ct): %v", err)
+ }
+ if got := string(b); got != want {
+ t.Errorf("Marshal(ct) = %#q, want %#q", got, want)
+ }
+}
+
+type IntType int
+
+type MyStruct struct {
+ IntType
+}
+
+func TestAnonymousNonstruct(t *testing.T) {
+ var i IntType = 11
+ a := MyStruct{i}
+ const want = `{"IntType":11}`
+
+ b, err := Marshal(a)
+ if err != nil {
+ t.Fatalf("Marshal: %v", err)
+ }
+ if got := string(b); got != want {
+ t.Errorf("got %q, want %q", got, want)
+ }
+}
+
+type BugA struct {
+ S string
+}
+
+type BugB struct {
+ BugA
+ S string
+}
+
+type BugC struct {
+ S string
+}
+
+// Legal Go: We never use the repeated embedded field (S).
+type BugX struct {
+ A int
+ BugA
+ BugB
+}
+
+// Issue 5245.
+func TestEmbeddedBug(t *testing.T) {
+ v := BugB{
+ BugA{"A"},
+ "B",
+ }
+ b, err := Marshal(v)
+ if err != nil {
+ t.Fatal("Marshal:", err)
+ }
+ want := `{"S":"B"}`
+ got := string(b)
+ if got != want {
+ t.Fatalf("Marshal: got %s want %s", got, want)
+ }
+ // Now check that the duplicate field, S, does not appear.
+ x := BugX{
+ A: 23,
+ }
+ b, err = Marshal(x)
+ if err != nil {
+ t.Fatal("Marshal:", err)
+ }
+ want = `{"A":23}`
+ got = string(b)
+ if got != want {
+ t.Fatalf("Marshal: got %s want %s", got, want)
+ }
+}
+
+type BugD struct { // Same as BugA after tagging.
+ XXX string `json:"S"`
+}
+
+// BugD's tagged S field should dominate BugA's.
+type BugY struct {
+ BugA
+ BugD
+}
+
+// Test that a field with a tag dominates untagged fields.
+func TestTaggedFieldDominates(t *testing.T) {
+ v := BugY{
+ BugA{"BugA"},
+ BugD{"BugD"},
+ }
+ b, err := Marshal(v)
+ if err != nil {
+ t.Fatal("Marshal:", err)
+ }
+ want := `{"S":"BugD"}`
+ got := string(b)
+ if got != want {
+ t.Fatalf("Marshal: got %s want %s", got, want)
+ }
+}
+
+// There are no tags here, so S should not appear.
+type BugZ struct {
+ BugA
+ BugC
+ BugY // Contains a tagged S field through BugD; should not dominate.
+}
+
+func TestDuplicatedFieldDisappears(t *testing.T) {
+ v := BugZ{
+ BugA{"BugA"},
+ BugC{"BugC"},
+ BugY{
+ BugA{"nested BugA"},
+ BugD{"nested BugD"},
+ },
+ }
+ b, err := Marshal(v)
+ if err != nil {
+ t.Fatal("Marshal:", err)
+ }
+ want := `{}`
+ got := string(b)
+ if got != want {
+ t.Fatalf("Marshal: got %s want %s", got, want)
+ }
+}
+
+func TestStringBytes(t *testing.T) {
+ // Test that encodeState.stringBytes and encodeState.string use the same encoding.
+ var r []rune
+ for i := '\u0000'; i <= unicode.MaxRune; i++ {
+ r = append(r, i)
+ }
+ s := string(r) + "\xff\xff\xffhello" // some invalid UTF-8 too
+
+ for _, escapeHTML := range []bool{true, false} {
+ es := &encodeState{}
+ es.string(s, escapeHTML)
+
+ esBytes := &encodeState{}
+ esBytes.stringBytes([]byte(s), escapeHTML)
+
+ enc := es.Buffer.String()
+ encBytes := esBytes.Buffer.String()
+ if enc != encBytes {
+ i := 0
+ for i < len(enc) && i < len(encBytes) && enc[i] == encBytes[i] {
+ i++
+ }
+ enc = enc[i:]
+ encBytes = encBytes[i:]
+ i = 0
+ for i < len(enc) && i < len(encBytes) && enc[len(enc)-i-1] == encBytes[len(encBytes)-i-1] {
+ i++
+ }
+ enc = enc[:len(enc)-i]
+ encBytes = encBytes[:len(encBytes)-i]
+
+ if len(enc) > 20 {
+ enc = enc[:20] + "..."
+ }
+ if len(encBytes) > 20 {
+ encBytes = encBytes[:20] + "..."
+ }
+
+ t.Errorf("with escapeHTML=%t, encodings differ at %#q vs %#q",
+ escapeHTML, enc, encBytes)
+ }
+ }
+}
+
+func TestIssue6458(t *testing.T) {
+ type Foo struct {
+ M RawMessage
+ }
+ x := Foo{RawMessage(`"foo"`)}
+
+ b, err := Marshal(&x)
+ if err != nil {
+ t.Fatal(err)
+ }
+ if want := `{"M":"foo"}`; string(b) != want {
+ t.Errorf("Marshal(&x) = %#q; want %#q", b, want)
+ }
+
+ b, err = Marshal(x)
+ if err != nil {
+ t.Fatal(err)
+ }
+
+ if want := `{"M":"ImZvbyI="}`; string(b) != want {
+ t.Errorf("Marshal(x) = %#q; want %#q", b, want)
+ }
+}
+
+func TestIssue10281(t *testing.T) {
+ type Foo struct {
+ N Number
+ }
+ x := Foo{Number(`invalid`)}
+
+ b, err := Marshal(&x)
+ if err == nil {
+ t.Errorf("Marshal(&x) = %#q; want error", b)
+ }
+}
+
+func TestHTMLEscape(t *testing.T) {
+ var b, want bytes.Buffer
+ m := `{"M":"<html>foo &` + "\xe2\x80\xa8 \xe2\x80\xa9" + `</html>"}`
+ want.Write([]byte(`{"M":"\u003chtml\u003efoo \u0026\u2028 \u2029\u003c/html\u003e"}`))
+ HTMLEscape(&b, []byte(m))
+ if !bytes.Equal(b.Bytes(), want.Bytes()) {
+ t.Errorf("HTMLEscape(&b, []byte(m)) = %s; want %s", b.Bytes(), want.Bytes())
+ }
+}
+
+// golang.org/issue/8582
+func TestEncodePointerString(t *testing.T) {
+ type stringPointer struct {
+ N *int64 `json:"n,string"`
+ }
+ var n int64 = 42
+ b, err := Marshal(stringPointer{N: &n})
+ if err != nil {
+ t.Fatalf("Marshal: %v", err)
+ }
+ if got, want := string(b), `{"n":"42"}`; got != want {
+ t.Errorf("Marshal = %s, want %s", got, want)
+ }
+ var back stringPointer
+ err = Unmarshal(b, &back)
+ if err != nil {
+ t.Fatalf("Unmarshal: %v", err)
+ }
+ if back.N == nil {
+ t.Fatalf("Unmarshalled nil N field")
+ }
+ if *back.N != 42 {
+ t.Fatalf("*N = %d; want 42", *back.N)
+ }
+}
+
+var encodeStringTests = []struct {
+ in string
+ out string
+}{
+ {"\x00", `"\u0000"`},
+ {"\x01", `"\u0001"`},
+ {"\x02", `"\u0002"`},
+ {"\x03", `"\u0003"`},
+ {"\x04", `"\u0004"`},
+ {"\x05", `"\u0005"`},
+ {"\x06", `"\u0006"`},
+ {"\x07", `"\u0007"`},
+ {"\x08", `"\u0008"`},
+ {"\x09", `"\t"`},
+ {"\x0a", `"\n"`},
+ {"\x0b", `"\u000b"`},
+ {"\x0c", `"\u000c"`},
+ {"\x0d", `"\r"`},
+ {"\x0e", `"\u000e"`},
+ {"\x0f", `"\u000f"`},
+ {"\x10", `"\u0010"`},
+ {"\x11", `"\u0011"`},
+ {"\x12", `"\u0012"`},
+ {"\x13", `"\u0013"`},
+ {"\x14", `"\u0014"`},
+ {"\x15", `"\u0015"`},
+ {"\x16", `"\u0016"`},
+ {"\x17", `"\u0017"`},
+ {"\x18", `"\u0018"`},
+ {"\x19", `"\u0019"`},
+ {"\x1a", `"\u001a"`},
+ {"\x1b", `"\u001b"`},
+ {"\x1c", `"\u001c"`},
+ {"\x1d", `"\u001d"`},
+ {"\x1e", `"\u001e"`},
+ {"\x1f", `"\u001f"`},
+}
+
+func TestEncodeString(t *testing.T) {
+ for _, tt := range encodeStringTests {
+ b, err := Marshal(tt.in)
+ if err != nil {
+ t.Errorf("Marshal(%q): %v", tt.in, err)
+ continue
+ }
+ out := string(b)
+ if out != tt.out {
+ t.Errorf("Marshal(%q) = %#q, want %#q", tt.in, out, tt.out)
+ }
+ }
+}
+
+type jsonbyte byte
+
+func (b jsonbyte) MarshalJSON() ([]byte, error) { return tenc(`{"JB":%d}`, b) }
+
+type textbyte byte
+
+func (b textbyte) MarshalText() ([]byte, error) { return tenc(`TB:%d`, b) }
+
+type jsonint int
+
+func (i jsonint) MarshalJSON() ([]byte, error) { return tenc(`{"JI":%d}`, i) }
+
+type textint int
+
+func (i textint) MarshalText() ([]byte, error) { return tenc(`TI:%d`, i) }
+
+func tenc(format string, a ...interface{}) ([]byte, error) {
+ var buf bytes.Buffer
+ fmt.Fprintf(&buf, format, a...)
+ return buf.Bytes(), nil
+}
+
+// Issue 13783
+func TestEncodeBytekind(t *testing.T) {
+ testdata := []struct {
+ data interface{}
+ want string
+ }{
+ {byte(7), "7"},
+ {jsonbyte(7), `{"JB":7}`},
+ {textbyte(4), `"TB:4"`},
+ {jsonint(5), `{"JI":5}`},
+ {textint(1), `"TI:1"`},
+ {[]byte{0, 1}, `"AAE="`},
+ {[]jsonbyte{0, 1}, `[{"JB":0},{"JB":1}]`},
+ {[][]jsonbyte{{0, 1}, {3}}, `[[{"JB":0},{"JB":1}],[{"JB":3}]]`},
+ {[]textbyte{2, 3}, `["TB:2","TB:3"]`},
+ {[]jsonint{5, 4}, `[{"JI":5},{"JI":4}]`},
+ {[]textint{9, 3}, `["TI:9","TI:3"]`},
+ {[]int{9, 3}, `[9,3]`},
+ }
+ for _, d := range testdata {
+ js, err := Marshal(d.data)
+ if err != nil {
+ t.Error(err)
+ continue
+ }
+ got, want := string(js), d.want
+ if got != want {
+ t.Errorf("got %s, want %s", got, want)
+ }
+ }
+}
+
+func TestTextMarshalerMapKeysAreSorted(t *testing.T) {
+ b, err := Marshal(map[unmarshalerText]int{
+ {"x", "y"}: 1,
+ {"y", "x"}: 2,
+ {"a", "z"}: 3,
+ {"z", "a"}: 4,
+ })
+ if err != nil {
+ t.Fatalf("Failed to Marshal text.Marshaler: %v", err)
+ }
+ const want = `{"a:z":3,"x:y":1,"y:x":2,"z:a":4}`
+ if string(b) != want {
+ t.Errorf("Marshal map with text.Marshaler keys: got %#q, want %#q", b, want)
+ }
+}
diff --git a/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/internal/json/example_test.go b/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/internal/json/example_test.go
new file mode 100644
index 00000000000..326bdc9540e
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/internal/json/example_test.go
@@ -0,0 +1,252 @@
+// Copyright 2011 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package json_test
+
+import (
+ "bytes"
+ "encoding/json"
+ "fmt"
+ "io"
+ "log"
+ "os"
+ "strings"
+)
+
+func ExampleMarshal() {
+ type ColorGroup struct {
+ ID int
+ Name string
+ Colors []string
+ }
+ group := ColorGroup{
+ ID: 1,
+ Name: "Reds",
+ Colors: []string{"Crimson", "Red", "Ruby", "Maroon"},
+ }
+ b, err := json.Marshal(group)
+ if err != nil {
+ fmt.Println("error:", err)
+ }
+ os.Stdout.Write(b)
+ // Output:
+ // {"ID":1,"Name":"Reds","Colors":["Crimson","Red","Ruby","Maroon"]}
+}
+
+func ExampleUnmarshal() {
+ var jsonBlob = []byte(`[
+ {"Name": "Platypus", "Order": "Monotremata"},
+ {"Name": "Quoll", "Order": "Dasyuromorphia"}
+ ]`)
+ type Animal struct {
+ Name string
+ Order string
+ }
+ var animals []Animal
+ err := json.Unmarshal(jsonBlob, &animals)
+ if err != nil {
+ fmt.Println("error:", err)
+ }
+ fmt.Printf("%+v", animals)
+ // Output:
+ // [{Name:Platypus Order:Monotremata} {Name:Quoll Order:Dasyuromorphia}]
+}
+
+// This example uses a Decoder to decode a stream of distinct JSON values.
+func ExampleDecoder() {
+ const jsonStream = `
+ {"Name": "Ed", "Text": "Knock knock."}
+ {"Name": "Sam", "Text": "Who's there?"}
+ {"Name": "Ed", "Text": "Go fmt."}
+ {"Name": "Sam", "Text": "Go fmt who?"}
+ {"Name": "Ed", "Text": "Go fmt yourself!"}
+ `
+ type Message struct {
+ Name, Text string
+ }
+ dec := json.NewDecoder(strings.NewReader(jsonStream))
+ for {
+ var m Message
+ if err := dec.Decode(&m); err == io.EOF {
+ break
+ } else if err != nil {
+ log.Fatal(err)
+ }
+ fmt.Printf("%s: %s\n", m.Name, m.Text)
+ }
+ // Output:
+ // Ed: Knock knock.
+ // Sam: Who's there?
+ // Ed: Go fmt.
+ // Sam: Go fmt who?
+ // Ed: Go fmt yourself!
+}
+
+// This example uses a Decoder to decode a stream of distinct JSON values.
+func ExampleDecoder_Token() {
+ const jsonStream = `
+ {"Message": "Hello", "Array": [1, 2, 3], "Null": null, "Number": 1.234}
+ `
+ dec := json.NewDecoder(strings.NewReader(jsonStream))
+ for {
+ t, err := dec.Token()
+ if err == io.EOF {
+ break
+ }
+ if err != nil {
+ log.Fatal(err)
+ }
+ fmt.Printf("%T: %v", t, t)
+ if dec.More() {
+ fmt.Printf(" (more)")
+ }
+ fmt.Printf("\n")
+ }
+ // Output:
+ // json.Delim: { (more)
+ // string: Message (more)
+ // string: Hello (more)
+ // string: Array (more)
+ // json.Delim: [ (more)
+ // float64: 1 (more)
+ // float64: 2 (more)
+ // float64: 3
+ // json.Delim: ] (more)
+ // string: Null (more)
+ // <nil>: <nil> (more)
+ // string: Number (more)
+ // float64: 1.234
+ // json.Delim: }
+}
+
+// This example uses a Decoder to decode a streaming array of JSON objects.
+func ExampleDecoder_Decode_stream() {
+ const jsonStream = `
+ [
+ {"Name": "Ed", "Text": "Knock knock."},
+ {"Name": "Sam", "Text": "Who's there?"},
+ {"Name": "Ed", "Text": "Go fmt."},
+ {"Name": "Sam", "Text": "Go fmt who?"},
+ {"Name": "Ed", "Text": "Go fmt yourself!"}
+ ]
+ `
+ type Message struct {
+ Name, Text string
+ }
+ dec := json.NewDecoder(strings.NewReader(jsonStream))
+
+ // read open bracket
+ t, err := dec.Token()
+ if err != nil {
+ log.Fatal(err)
+ }
+ fmt.Printf("%T: %v\n", t, t)
+
+ var m Message
+ // while the array contains values
+ for dec.More() {
+
+ // decode an array value (Message)
+ err := dec.Decode(&m)
+ if err != nil {
+ log.Fatal(err)
+ }
+
+ fmt.Printf("%v: %v\n", m.Name, m.Text)
+ }
+
+ // read closing bracket
+ t, err = dec.Token()
+ if err != nil {
+ log.Fatal(err)
+ }
+ fmt.Printf("%T: %v\n", t, t)
+
+ // Output:
+ // json.Delim: [
+ // Ed: Knock knock.
+ // Sam: Who's there?
+ // Ed: Go fmt.
+ // Sam: Go fmt who?
+ // Ed: Go fmt yourself!
+ // json.Delim: ]
+
+}
+
+// This example uses RawMessage to delay parsing part of a JSON message.
+func ExampleRawMessage() {
+ type Color struct {
+ Space string
+ Point json.RawMessage // delay parsing until we know the color space
+ }
+ type RGB struct {
+ R uint8
+ G uint8
+ B uint8
+ }
+ type YCbCr struct {
+ Y uint8
+ Cb int8
+ Cr int8
+ }
+
+ var j = []byte(`[
+ {"Space": "YCbCr", "Point": {"Y": 255, "Cb": 0, "Cr": -10}},
+ {"Space": "RGB", "Point": {"R": 98, "G": 218, "B": 255}}
+ ]`)
+ var colors []Color
+ err := json.Unmarshal(j, &colors)
+ if err != nil {
+ log.Fatalln("error:", err)
+ }
+
+ for _, c := range colors {
+ var dst interface{}
+ switch c.Space {
+ case "RGB":
+ dst = new(RGB)
+ case "YCbCr":
+ dst = new(YCbCr)
+ }
+ err := json.Unmarshal(c.Point, dst)
+ if err != nil {
+ log.Fatalln("error:", err)
+ }
+ fmt.Println(c.Space, dst)
+ }
+ // Output:
+ // YCbCr &{255 0 -10}
+ // RGB &{98 218 255}
+}
+
+func ExampleIndent() {
+ type Road struct {
+ Name string
+ Number int
+ }
+ roads := []Road{
+ {"Diamond Fork", 29},
+ {"Sheep Creek", 51},
+ }
+
+ b, err := json.Marshal(roads)
+ if err != nil {
+ log.Fatal(err)
+ }
+
+ var out bytes.Buffer
+ json.Indent(&out, b, "=", "\t")
+ out.WriteTo(os.Stdout)
+ // Output:
+ // [
+ // = {
+ // = "Name": "Diamond Fork",
+ // = "Number": 29
+ // = },
+ // = {
+ // = "Name": "Sheep Creek",
+ // = "Number": 51
+ // = }
+ // =]
+}
diff --git a/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/internal/json/extension.go b/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/internal/json/extension.go
new file mode 100644
index 00000000000..1c8fd459753
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/internal/json/extension.go
@@ -0,0 +1,95 @@
+package json
+
+import (
+ "reflect"
+)
+
+// Extension holds a set of additional rules to be used when unmarshaling
+// strict JSON or JSON-like content.
+type Extension struct {
+ funcs map[string]funcExt
+ consts map[string]interface{}
+ keyed map[string]func([]byte) (interface{}, error)
+ encode map[reflect.Type]func(v interface{}) ([]byte, error)
+
+ unquotedKeys bool
+ trailingCommas bool
+}
+
+type funcExt struct {
+ key string
+ args []string
+}
+
+// Extend changes the decoder behavior to consider the provided extension.
+func (dec *Decoder) Extend(ext *Extension) { dec.d.ext = *ext }
+
+// Extend changes the encoder behavior to consider the provided extension.
+func (enc *Encoder) Extend(ext *Extension) { enc.ext = *ext }
+
+// Extend includes in e the extensions defined in ext.
+func (e *Extension) Extend(ext *Extension) {
+ for name, fext := range ext.funcs {
+ e.DecodeFunc(name, fext.key, fext.args...)
+ }
+ for name, value := range ext.consts {
+ e.DecodeConst(name, value)
+ }
+ for key, decode := range ext.keyed {
+ e.DecodeKeyed(key, decode)
+ }
+ for typ, encode := range ext.encode {
+ if e.encode == nil {
+ e.encode = make(map[reflect.Type]func(v interface{}) ([]byte, error))
+ }
+ e.encode[typ] = encode
+ }
+}
+
+// DecodeFunc defines a function call that may be observed inside JSON content.
+// A function with the provided name will be unmarshaled as the document
+// {key: {args[0]: ..., args[N]: ...}}.
+func (e *Extension) DecodeFunc(name string, key string, args ...string) {
+ if e.funcs == nil {
+ e.funcs = make(map[string]funcExt)
+ }
+ e.funcs[name] = funcExt{key, args}
+}
+
+// DecodeConst defines a constant name that may be observed inside JSON content
+// and will be decoded with the provided value.
+func (e *Extension) DecodeConst(name string, value interface{}) {
+ if e.consts == nil {
+ e.consts = make(map[string]interface{})
+ }
+ e.consts[name] = value
+}
+
+// DecodeKeyed defines a key that when observed as the first element inside a
+// JSON document triggers the decoding of that document via the provided
+// decode function.
+func (e *Extension) DecodeKeyed(key string, decode func(data []byte) (interface{}, error)) {
+ if e.keyed == nil {
+ e.keyed = make(map[string]func([]byte) (interface{}, error))
+ }
+ e.keyed[key] = decode
+}
+
+// DecodeUnquotedKeys defines whether to accept map keys that are unquoted strings.
+func (e *Extension) DecodeUnquotedKeys(accept bool) {
+ e.unquotedKeys = accept
+}
+
+// DecodeTrailingCommas defines whether to accept trailing commas in maps and arrays.
+func (e *Extension) DecodeTrailingCommas(accept bool) {
+ e.trailingCommas = accept
+}
+
+// EncodeType registers a function to encode values with the same type of the
+// provided sample.
+func (e *Extension) EncodeType(sample interface{}, encode func(v interface{}) ([]byte, error)) {
+ if e.encode == nil {
+ e.encode = make(map[reflect.Type]func(v interface{}) ([]byte, error))
+ }
+ e.encode[reflect.TypeOf(sample)] = encode
+}
diff --git a/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/internal/json/extension_test.go b/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/internal/json/extension_test.go
new file mode 100644
index 00000000000..8c228189724
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/internal/json/extension_test.go
@@ -0,0 +1,218 @@
+package json
+
+import (
+ "bytes"
+ "fmt"
+ "reflect"
+ "strconv"
+ "testing"
+)
+
+type funcN struct {
+ Arg1 int `json:"arg1"`
+ Arg2 int `json:"arg2"`
+}
+
+type funcs struct {
+ Func2 *funcN `json:"$func2"`
+ Func1 *funcN `json:"$func1"`
+}
+
+type funcsText struct {
+ Func1 jsonText `json:"$func1"`
+ Func2 jsonText `json:"$func2"`
+}
+
+type jsonText struct {
+ json string
+}
+
+func (jt *jsonText) UnmarshalJSON(data []byte) error {
+ jt.json = string(data)
+ return nil
+}
+
+type nestedText struct {
+ F jsonText
+ B bool
+}
+
+type unquotedKey struct {
+ S string `json:"$k_1"`
+}
+
+var ext Extension
+
+type keyed string
+
+func decodeKeyed(data []byte) (interface{}, error) {
+ return keyed(data), nil
+}
+
+type keyedType struct {
+ K keyed
+ I int
+}
+
+type docint int
+
+type const1Type struct{}
+
+var const1 = new(const1Type)
+
+func init() {
+ ext.DecodeFunc("Func1", "$func1")
+ ext.DecodeFunc("Func2", "$func2", "arg1", "arg2")
+ ext.DecodeFunc("Func3", "$func3", "arg1")
+ ext.DecodeFunc("new Func4", "$func4", "arg1")
+
+ ext.DecodeConst("Const1", const1)
+
+ ext.DecodeKeyed("$key1", decodeKeyed)
+ ext.DecodeKeyed("$func3", decodeKeyed)
+
+ ext.EncodeType(docint(0), func(v interface{}) ([]byte, error) {
+ s := `{"$docint": ` + strconv.Itoa(int(v.(docint))) + `}`
+ return []byte(s), nil
+ })
+
+ ext.DecodeUnquotedKeys(true)
+ ext.DecodeTrailingCommas(true)
+}
+
+type extDecodeTest struct {
+ in string
+ ptr interface{}
+ out interface{}
+ err error
+
+ noext bool
+}
+
+var extDecodeTests = []extDecodeTest{
+ // Functions
+ {in: `Func1()`, ptr: new(interface{}), out: map[string]interface{}{
+ "$func1": map[string]interface{}{},
+ }},
+ {in: `{"v": Func1()}`, ptr: new(interface{}), out: map[string]interface{}{
+ "v": map[string]interface{}{"$func1": map[string]interface{}{}},
+ }},
+ {in: `Func2(1)`, ptr: new(interface{}), out: map[string]interface{}{
+ "$func2": map[string]interface{}{"arg1": float64(1)},
+ }},
+ {in: `Func2(1, 2)`, ptr: new(interface{}), out: map[string]interface{}{
+ "$func2": map[string]interface{}{"arg1": float64(1), "arg2": float64(2)},
+ }},
+ {in: `Func2(Func1())`, ptr: new(interface{}), out: map[string]interface{}{
+ "$func2": map[string]interface{}{"arg1": map[string]interface{}{"$func1": map[string]interface{}{}}},
+ }},
+ {in: `Func2(1, 2, 3)`, ptr: new(interface{}), err: fmt.Errorf("json: too many arguments for function Func2")},
+ {in: `BadFunc()`, ptr: new(interface{}), err: fmt.Errorf(`json: unknown function "BadFunc"`)},
+
+ {in: `Func1()`, ptr: new(funcs), out: funcs{Func1: &funcN{}}},
+ {in: `Func2(1)`, ptr: new(funcs), out: funcs{Func2: &funcN{Arg1: 1}}},
+ {in: `Func2(1, 2)`, ptr: new(funcs), out: funcs{Func2: &funcN{Arg1: 1, Arg2: 2}}},
+
+ {in: `Func2(1, 2, 3)`, ptr: new(funcs), err: fmt.Errorf("json: too many arguments for function Func2")},
+ {in: `BadFunc()`, ptr: new(funcs), err: fmt.Errorf(`json: unknown function "BadFunc"`)},
+
+ {in: `Func2(1)`, ptr: new(jsonText), out: jsonText{"Func2(1)"}},
+ {in: `Func2(1, 2)`, ptr: new(funcsText), out: funcsText{Func2: jsonText{"Func2(1, 2)"}}},
+ {in: `{"f": Func2(1, 2), "b": true}`, ptr: new(nestedText), out: nestedText{jsonText{"Func2(1, 2)"}, true}},
+
+ {in: `Func1()`, ptr: new(struct{}), out: struct{}{}},
+
+ // Functions with "new" prefix
+ {in: `new Func4(1)`, ptr: new(interface{}), out: map[string]interface{}{
+ "$func4": map[string]interface{}{"arg1": float64(1)},
+ }},
+
+ // Constants
+ {in: `Const1`, ptr: new(interface{}), out: const1},
+ {in: `{"c": Const1}`, ptr: new(struct{ C *const1Type }), out: struct{ C *const1Type }{const1}},
+
+ // Keyed documents
+ {in: `{"v": {"$key1": 1}}`, ptr: new(interface{}), out: map[string]interface{}{"v": keyed(`{"$key1": 1}`)}},
+ {in: `{"k": {"$key1": 1}}`, ptr: new(keyedType), out: keyedType{K: keyed(`{"$key1": 1}`)}},
+ {in: `{"i": {"$key1": 1}}`, ptr: new(keyedType), err: &UnmarshalTypeError{"object", reflect.TypeOf(0), 18}},
+
+ // Keyed function documents
+ {in: `{"v": Func3()}`, ptr: new(interface{}), out: map[string]interface{}{"v": keyed(`Func3()`)}},
+ {in: `{"k": Func3()}`, ptr: new(keyedType), out: keyedType{K: keyed(`Func3()`)}},
+ {in: `{"i": Func3()}`, ptr: new(keyedType), err: &UnmarshalTypeError{"object", reflect.TypeOf(0), 13}},
+
+ // Unquoted keys
+ {in: `{$k_1: "bar"}`, ptr: new(interface{}), out: map[string]interface{}{"$k_1": "bar"}},
+ {in: `{$k_1: "bar"}`, ptr: new(unquotedKey), out: unquotedKey{"bar"}},
+
+ {in: `{$k_1: "bar"}`, noext: true, ptr: new(interface{}),
+ err: &SyntaxError{"invalid character '$' looking for beginning of object key string", 2}},
+ {in: `{$k_1: "bar"}`, noext: true, ptr: new(unquotedKey),
+ err: &SyntaxError{"invalid character '$' looking for beginning of object key string", 2}},
+
+ // Trailing commas
+ {in: `{"k": "v",}`, ptr: new(interface{}), out: map[string]interface{}{"k": "v"}},
+ {in: `{"k": "v",}`, ptr: new(struct{}), out: struct{}{}},
+ {in: `["v",]`, ptr: new(interface{}), out: []interface{}{"v"}},
+
+ {in: `{"k": "v",}`, noext: true, ptr: new(interface{}),
+ err: &SyntaxError{"invalid character '}' looking for beginning of object key string", 11}},
+ {in: `{"k": "v",}`, noext: true, ptr: new(struct{}),
+ err: &SyntaxError{"invalid character '}' looking for beginning of object key string", 11}},
+ {in: `["a",]`, noext: true, ptr: new(interface{}),
+ err: &SyntaxError{"invalid character ']' looking for beginning of value", 6}},
+}
+
+type extEncodeTest struct {
+ in interface{}
+ out string
+ err error
+}
+
+var extEncodeTests = []extEncodeTest{
+ {in: docint(13), out: "{\"$docint\":13}\n"},
+}
+
+func TestExtensionDecode(t *testing.T) {
+ for i, tt := range extDecodeTests {
+ in := []byte(tt.in)
+
+ // v = new(right-type)
+ v := reflect.New(reflect.TypeOf(tt.ptr).Elem())
+ dec := NewDecoder(bytes.NewReader(in))
+ if !tt.noext {
+ dec.Extend(&ext)
+ }
+ if err := dec.Decode(v.Interface()); !reflect.DeepEqual(err, tt.err) {
+ t.Errorf("#%d: %v, want %v", i, err, tt.err)
+ continue
+ } else if err != nil {
+ continue
+ }
+ if !reflect.DeepEqual(v.Elem().Interface(), tt.out) {
+ t.Errorf("#%d: mismatch\nhave: %#+v\nwant: %#+v", i, v.Elem().Interface(), tt.out)
+ data, _ := Marshal(v.Elem().Interface())
+ t.Logf("%s", string(data))
+ data, _ = Marshal(tt.out)
+ t.Logf("%s", string(data))
+ continue
+ }
+ }
+}
+
+func TestExtensionEncode(t *testing.T) {
+ var buf bytes.Buffer
+ for i, tt := range extEncodeTests {
+ buf.Truncate(0)
+ enc := NewEncoder(&buf)
+ enc.Extend(&ext)
+ err := enc.Encode(tt.in)
+ if !reflect.DeepEqual(err, tt.err) {
+ t.Errorf("#%d: %v, want %v", i, err, tt.err)
+ continue
+ }
+ if buf.String() != tt.out {
+ t.Errorf("#%d: mismatch\nhave: %q\nwant: %q", i, buf.String(), tt.out)
+ }
+ }
+}
diff --git a/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/internal/json/fold.go b/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/internal/json/fold.go
new file mode 100644
index 00000000000..9e170127dba
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/internal/json/fold.go
@@ -0,0 +1,143 @@
+// Copyright 2013 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package json
+
+import (
+ "bytes"
+ "unicode/utf8"
+)
+
+const (
+ caseMask = ^byte(0x20) // Mask to ignore case in ASCII.
+ kelvin = '\u212a'
+ smallLongEss = '\u017f'
+)
+
+// foldFunc returns one of four different case folding equivalence
+// functions, from most general (and slow) to fastest:
+//
+// 1) bytes.EqualFold, if the key s contains any non-ASCII UTF-8
+// 2) equalFoldRight, if s contains special folding ASCII ('k', 'K', 's', 'S')
+// 3) asciiEqualFold, no special, but includes non-letters (including _)
+// 4) simpleLetterEqualFold, no specials, no non-letters.
+//
+// The letters S and K are special because they map to 3 runes, not just 2:
+// * S maps to s and to U+017F 'ſ' Latin small letter long s
+// * k maps to K and to U+212A 'K' Kelvin sign
+// See https://play.golang.org/p/tTxjOc0OGo
+//
+// The returned function is specialized for matching against s and
+// should only be given s. It's not curried for performance reasons.
+func foldFunc(s []byte) func(s, t []byte) bool {
+ nonLetter := false
+ special := false // special letter
+ for _, b := range s {
+ if b >= utf8.RuneSelf {
+ return bytes.EqualFold
+ }
+ upper := b & caseMask
+ if upper < 'A' || upper > 'Z' {
+ nonLetter = true
+ } else if upper == 'K' || upper == 'S' {
+ // See above for why these letters are special.
+ special = true
+ }
+ }
+ if special {
+ return equalFoldRight
+ }
+ if nonLetter {
+ return asciiEqualFold
+ }
+ return simpleLetterEqualFold
+}
+
+// equalFoldRight is a specialization of bytes.EqualFold when s is
+// known to be all ASCII (including punctuation), but contains an 's',
+// 'S', 'k', or 'K', requiring a Unicode fold on the bytes in t.
+// See comments on foldFunc.
+func equalFoldRight(s, t []byte) bool {
+ for _, sb := range s {
+ if len(t) == 0 {
+ return false
+ }
+ tb := t[0]
+ if tb < utf8.RuneSelf {
+ if sb != tb {
+ sbUpper := sb & caseMask
+ if 'A' <= sbUpper && sbUpper <= 'Z' {
+ if sbUpper != tb&caseMask {
+ return false
+ }
+ } else {
+ return false
+ }
+ }
+ t = t[1:]
+ continue
+ }
+ // sb is ASCII and t is not. t must be either kelvin
+ // sign or long s; sb must be s, S, k, or K.
+ tr, size := utf8.DecodeRune(t)
+ switch sb {
+ case 's', 'S':
+ if tr != smallLongEss {
+ return false
+ }
+ case 'k', 'K':
+ if tr != kelvin {
+ return false
+ }
+ default:
+ return false
+ }
+ t = t[size:]
+
+ }
+ if len(t) > 0 {
+ return false
+ }
+ return true
+}
+
+// asciiEqualFold is a specialization of bytes.EqualFold for use when
+// s is all ASCII (but may contain non-letters) and contains no
+// special-folding letters.
+// See comments on foldFunc.
+func asciiEqualFold(s, t []byte) bool {
+ if len(s) != len(t) {
+ return false
+ }
+ for i, sb := range s {
+ tb := t[i]
+ if sb == tb {
+ continue
+ }
+ if ('a' <= sb && sb <= 'z') || ('A' <= sb && sb <= 'Z') {
+ if sb&caseMask != tb&caseMask {
+ return false
+ }
+ } else {
+ return false
+ }
+ }
+ return true
+}
+
+// simpleLetterEqualFold is a specialization of bytes.EqualFold for
+// use when s is all ASCII letters (no underscores, etc) and also
+// doesn't contain 'k', 'K', 's', or 'S'.
+// See comments on foldFunc.
+func simpleLetterEqualFold(s, t []byte) bool {
+ if len(s) != len(t) {
+ return false
+ }
+ for i, b := range s {
+ if b&caseMask != t[i]&caseMask {
+ return false
+ }
+ }
+ return true
+}
diff --git a/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/internal/json/fold_test.go b/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/internal/json/fold_test.go
new file mode 100644
index 00000000000..9fb94646a85
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/internal/json/fold_test.go
@@ -0,0 +1,116 @@
+// Copyright 2013 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package json
+
+import (
+ "bytes"
+ "strings"
+ "testing"
+ "unicode/utf8"
+)
+
+var foldTests = []struct {
+ fn func(s, t []byte) bool
+ s, t string
+ want bool
+}{
+ {equalFoldRight, "", "", true},
+ {equalFoldRight, "a", "a", true},
+ {equalFoldRight, "", "a", false},
+ {equalFoldRight, "a", "", false},
+ {equalFoldRight, "a", "A", true},
+ {equalFoldRight, "AB", "ab", true},
+ {equalFoldRight, "AB", "ac", false},
+ {equalFoldRight, "sbkKc", "ſbKKc", true},
+ {equalFoldRight, "SbKkc", "ſbKKc", true},
+ {equalFoldRight, "SbKkc", "ſbKK", false},
+ {equalFoldRight, "e", "é", false},
+ {equalFoldRight, "s", "S", true},
+
+ {simpleLetterEqualFold, "", "", true},
+ {simpleLetterEqualFold, "abc", "abc", true},
+ {simpleLetterEqualFold, "abc", "ABC", true},
+ {simpleLetterEqualFold, "abc", "ABCD", false},
+ {simpleLetterEqualFold, "abc", "xxx", false},
+
+ {asciiEqualFold, "a_B", "A_b", true},
+ {asciiEqualFold, "aa@", "aa`", false}, // verify 0x40 and 0x60 aren't case-equivalent
+}
+
+func TestFold(t *testing.T) {
+ for i, tt := range foldTests {
+ if got := tt.fn([]byte(tt.s), []byte(tt.t)); got != tt.want {
+ t.Errorf("%d. %q, %q = %v; want %v", i, tt.s, tt.t, got, tt.want)
+ }
+ truth := strings.EqualFold(tt.s, tt.t)
+ if truth != tt.want {
+ t.Errorf("strings.EqualFold doesn't agree with case %d", i)
+ }
+ }
+}
+
+func TestFoldAgainstUnicode(t *testing.T) {
+ const bufSize = 5
+ buf1 := make([]byte, 0, bufSize)
+ buf2 := make([]byte, 0, bufSize)
+ var runes []rune
+ for i := 0x20; i <= 0x7f; i++ {
+ runes = append(runes, rune(i))
+ }
+ runes = append(runes, kelvin, smallLongEss)
+
+ funcs := []struct {
+ name string
+ fold func(s, t []byte) bool
+ letter bool // must be ASCII letter
+ simple bool // must be simple ASCII letter (not 'S' or 'K')
+ }{
+ {
+ name: "equalFoldRight",
+ fold: equalFoldRight,
+ },
+ {
+ name: "asciiEqualFold",
+ fold: asciiEqualFold,
+ simple: true,
+ },
+ {
+ name: "simpleLetterEqualFold",
+ fold: simpleLetterEqualFold,
+ simple: true,
+ letter: true,
+ },
+ }
+
+ for _, ff := range funcs {
+ for _, r := range runes {
+ if r >= utf8.RuneSelf {
+ continue
+ }
+ if ff.letter && !isASCIILetter(byte(r)) {
+ continue
+ }
+ if ff.simple && (r == 's' || r == 'S' || r == 'k' || r == 'K') {
+ continue
+ }
+ for _, r2 := range runes {
+ buf1 := append(buf1[:0], 'x')
+ buf2 := append(buf2[:0], 'x')
+ buf1 = buf1[:1+utf8.EncodeRune(buf1[1:bufSize], r)]
+ buf2 = buf2[:1+utf8.EncodeRune(buf2[1:bufSize], r2)]
+ buf1 = append(buf1, 'x')
+ buf2 = append(buf2, 'x')
+ want := bytes.EqualFold(buf1, buf2)
+ if got := ff.fold(buf1, buf2); got != want {
+ t.Errorf("%s(%q, %q) = %v; want %v", ff.name, buf1, buf2, got, want)
+ }
+ }
+ }
+ }
+}
+
+func isASCIILetter(b byte) bool {
+ return ('A' <= b && b <= 'Z') || ('a' <= b && b <= 'z')
+}
diff --git a/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/internal/json/indent.go b/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/internal/json/indent.go
new file mode 100644
index 00000000000..fba19548c92
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/internal/json/indent.go
@@ -0,0 +1,141 @@
+// Copyright 2010 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package json
+
+import "bytes"
+
+// Compact appends to dst the JSON-encoded src with
+// insignificant space characters elided.
+func Compact(dst *bytes.Buffer, src []byte) error {
+ return compact(dst, src, false)
+}
+
+func compact(dst *bytes.Buffer, src []byte, escape bool) error {
+ origLen := dst.Len()
+ var scan scanner
+ scan.reset()
+ start := 0
+ for i, c := range src {
+ if escape && (c == '<' || c == '>' || c == '&') {
+ if start < i {
+ dst.Write(src[start:i])
+ }
+ dst.WriteString(`\u00`)
+ dst.WriteByte(hex[c>>4])
+ dst.WriteByte(hex[c&0xF])
+ start = i + 1
+ }
+ // Convert U+2028 and U+2029 (E2 80 A8 and E2 80 A9).
+ if c == 0xE2 && i+2 < len(src) && src[i+1] == 0x80 && src[i+2]&^1 == 0xA8 {
+ if start < i {
+ dst.Write(src[start:i])
+ }
+ dst.WriteString(`\u202`)
+ dst.WriteByte(hex[src[i+2]&0xF])
+ start = i + 3
+ }
+ v := scan.step(&scan, c)
+ if v >= scanSkipSpace {
+ if v == scanError {
+ break
+ }
+ if start < i {
+ dst.Write(src[start:i])
+ }
+ start = i + 1
+ }
+ }
+ if scan.eof() == scanError {
+ dst.Truncate(origLen)
+ return scan.err
+ }
+ if start < len(src) {
+ dst.Write(src[start:])
+ }
+ return nil
+}
+
+func newline(dst *bytes.Buffer, prefix, indent string, depth int) {
+ dst.WriteByte('\n')
+ dst.WriteString(prefix)
+ for i := 0; i < depth; i++ {
+ dst.WriteString(indent)
+ }
+}
+
+// Indent appends to dst an indented form of the JSON-encoded src.
+// Each element in a JSON object or array begins on a new,
+// indented line beginning with prefix followed by one or more
+// copies of indent according to the indentation nesting.
+// The data appended to dst does not begin with the prefix nor
+// any indentation, to make it easier to embed inside other formatted JSON data.
+// Although leading space characters (space, tab, carriage return, newline)
+// at the beginning of src are dropped, trailing space characters
+// at the end of src are preserved and copied to dst.
+// For example, if src has no trailing spaces, neither will dst;
+// if src ends in a trailing newline, so will dst.
+func Indent(dst *bytes.Buffer, src []byte, prefix, indent string) error {
+ origLen := dst.Len()
+ var scan scanner
+ scan.reset()
+ needIndent := false
+ depth := 0
+ for _, c := range src {
+ scan.bytes++
+ v := scan.step(&scan, c)
+ if v == scanSkipSpace {
+ continue
+ }
+ if v == scanError {
+ break
+ }
+ if needIndent && v != scanEndObject && v != scanEndArray {
+ needIndent = false
+ depth++
+ newline(dst, prefix, indent, depth)
+ }
+
+ // Emit semantically uninteresting bytes
+ // (in particular, punctuation in strings) unmodified.
+ if v == scanContinue {
+ dst.WriteByte(c)
+ continue
+ }
+
+ // Add spacing around real punctuation.
+ switch c {
+ case '{', '[':
+ // delay indent so that empty object and array are formatted as {} and [].
+ needIndent = true
+ dst.WriteByte(c)
+
+ case ',':
+ dst.WriteByte(c)
+ newline(dst, prefix, indent, depth)
+
+ case ':':
+ dst.WriteByte(c)
+ dst.WriteByte(' ')
+
+ case '}', ']':
+ if needIndent {
+ // suppress indent in empty object/array
+ needIndent = false
+ } else {
+ depth--
+ newline(dst, prefix, indent, depth)
+ }
+ dst.WriteByte(c)
+
+ default:
+ dst.WriteByte(c)
+ }
+ }
+ if scan.eof() == scanError {
+ dst.Truncate(origLen)
+ return scan.err
+ }
+ return nil
+}
diff --git a/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/internal/json/number_test.go b/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/internal/json/number_test.go
new file mode 100644
index 00000000000..4b869996388
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/internal/json/number_test.go
@@ -0,0 +1,133 @@
+// Copyright 2011 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package json
+
+import (
+ "regexp"
+ "testing"
+)
+
+func TestNumberIsValid(t *testing.T) {
+ // From: http://stackoverflow.com/a/13340826
+ var jsonNumberRegexp = regexp.MustCompile(`^-?(?:0|[1-9]\d*)(?:\.\d+)?(?:[eE][+-]?\d+)?$`)
+
+ validTests := []string{
+ "0",
+ "-0",
+ "1",
+ "-1",
+ "0.1",
+ "-0.1",
+ "1234",
+ "-1234",
+ "12.34",
+ "-12.34",
+ "12E0",
+ "12E1",
+ "12e34",
+ "12E-0",
+ "12e+1",
+ "12e-34",
+ "-12E0",
+ "-12E1",
+ "-12e34",
+ "-12E-0",
+ "-12e+1",
+ "-12e-34",
+ "1.2E0",
+ "1.2E1",
+ "1.2e34",
+ "1.2E-0",
+ "1.2e+1",
+ "1.2e-34",
+ "-1.2E0",
+ "-1.2E1",
+ "-1.2e34",
+ "-1.2E-0",
+ "-1.2e+1",
+ "-1.2e-34",
+ "0E0",
+ "0E1",
+ "0e34",
+ "0E-0",
+ "0e+1",
+ "0e-34",
+ "-0E0",
+ "-0E1",
+ "-0e34",
+ "-0E-0",
+ "-0e+1",
+ "-0e-34",
+ }
+
+ for _, test := range validTests {
+ if !isValidNumber(test) {
+ t.Errorf("%s should be valid", test)
+ }
+
+ var f float64
+ if err := Unmarshal([]byte(test), &f); err != nil {
+ t.Errorf("%s should be valid but Unmarshal failed: %v", test, err)
+ }
+
+ if !jsonNumberRegexp.MatchString(test) {
+ t.Errorf("%s should be valid but regexp does not match", test)
+ }
+ }
+
+ invalidTests := []string{
+ "",
+ "invalid",
+ "1.0.1",
+ "1..1",
+ "-1-2",
+ "012a42",
+ "01.2",
+ "012",
+ "12E12.12",
+ "1e2e3",
+ "1e+-2",
+ "1e--23",
+ "1e",
+ "e1",
+ "1e+",
+ "1ea",
+ "1a",
+ "1.a",
+ "1.",
+ "01",
+ "1.e1",
+ }
+
+ for _, test := range invalidTests {
+ if isValidNumber(test) {
+ t.Errorf("%s should be invalid", test)
+ }
+
+ var f float64
+ if err := Unmarshal([]byte(test), &f); err == nil {
+ t.Errorf("%s should be invalid but unmarshal wrote %v", test, f)
+ }
+
+ if jsonNumberRegexp.MatchString(test) {
+ t.Errorf("%s should be invalid but matches regexp", test)
+ }
+ }
+}
+
+func BenchmarkNumberIsValid(b *testing.B) {
+ s := "-61657.61667E+61673"
+ for i := 0; i < b.N; i++ {
+ isValidNumber(s)
+ }
+}
+
+func BenchmarkNumberIsValidRegexp(b *testing.B) {
+ var jsonNumberRegexp = regexp.MustCompile(`^-?(?:0|[1-9]\d*)(?:\.\d+)?(?:[eE][+-]?\d+)?$`)
+ s := "-61657.61667E+61673"
+ for i := 0; i < b.N; i++ {
+ jsonNumberRegexp.MatchString(s)
+ }
+}
diff --git a/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/internal/json/scanner.go b/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/internal/json/scanner.go
new file mode 100644
index 00000000000..97080438873
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/internal/json/scanner.go
@@ -0,0 +1,697 @@
+// Copyright 2010 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package json
+
+// JSON value parser state machine.
+// Just about at the limit of what is reasonable to write by hand.
+// Some parts are a bit tedious, but overall it nicely factors out the
+// otherwise common code from the multiple scanning functions
+// in this package (Compact, Indent, checkValid, nextValue, etc).
+//
+// This file starts with two simple examples using the scanner
+// before diving into the scanner itself.
+
+import "strconv"
+
+// checkValid verifies that data is valid JSON-encoded data.
+// scan is passed in for use by checkValid to avoid an allocation.
+func checkValid(data []byte, scan *scanner) error {
+ scan.reset()
+ for _, c := range data {
+ scan.bytes++
+ if scan.step(scan, c) == scanError {
+ return scan.err
+ }
+ }
+ if scan.eof() == scanError {
+ return scan.err
+ }
+ return nil
+}
+
+// nextValue splits data after the next whole JSON value,
+// returning that value and the bytes that follow it as separate slices.
+// scan is passed in for use by nextValue to avoid an allocation.
+func nextValue(data []byte, scan *scanner) (value, rest []byte, err error) {
+ scan.reset()
+ for i, c := range data {
+ v := scan.step(scan, c)
+ if v >= scanEndObject {
+ switch v {
+ // probe the scanner with a space to determine whether we will
+ // get scanEnd on the next character. Otherwise, if the next character
+ // is not a space, scanEndTop allocates a needless error.
+ case scanEndObject, scanEndArray, scanEndParams:
+ if scan.step(scan, ' ') == scanEnd {
+ return data[:i+1], data[i+1:], nil
+ }
+ case scanError:
+ return nil, nil, scan.err
+ case scanEnd:
+ return data[:i], data[i:], nil
+ }
+ }
+ }
+ if scan.eof() == scanError {
+ return nil, nil, scan.err
+ }
+ return data, nil, nil
+}
+
+// A SyntaxError is a description of a JSON syntax error.
+type SyntaxError struct {
+ msg string // description of error
+ Offset int64 // error occurred after reading Offset bytes
+}
+
+func (e *SyntaxError) Error() string { return e.msg }
+
+// A scanner is a JSON scanning state machine.
+// Callers call scan.reset() and then pass bytes in one at a time
+// by calling scan.step(&scan, c) for each byte.
+// The return value, referred to as an opcode, tells the
+// caller about significant parsing events like beginning
+// and ending literals, objects, and arrays, so that the
+// caller can follow along if it wishes.
+// The return value scanEnd indicates that a single top-level
+// JSON value has been completed, *before* the byte that
+// just got passed in. (The indication must be delayed in order
+// to recognize the end of numbers: is 123 a whole value or
+// the beginning of 12345e+6?).
+type scanner struct {
+ // The step is a func to be called to execute the next transition.
+ // Also tried using an integer constant and a single func
+ // with a switch, but using the func directly was 10% faster
+ // on a 64-bit Mac Mini, and it's nicer to read.
+ step func(*scanner, byte) int
+
+ // Reached end of top-level value.
+ endTop bool
+
+ // Stack of what we're in the middle of - array values, object keys, object values.
+ parseState []int
+
+ // Error that happened, if any.
+ err error
+
+ // 1-byte redo (see undo method)
+ redo bool
+ redoCode int
+ redoState func(*scanner, byte) int
+
+ // total bytes consumed, updated by decoder.Decode
+ bytes int64
+}
+
+// These values are returned by the state transition functions
+// assigned to scanner.state and the method scanner.eof.
+// They give details about the current state of the scan that
+// callers might be interested to know about.
+// It is okay to ignore the return value of any particular
+// call to scanner.state: if one call returns scanError,
+// every subsequent call will return scanError too.
+const (
+ // Continue.
+ scanContinue = iota // uninteresting byte
+ scanBeginLiteral // end implied by next result != scanContinue
+ scanBeginObject // begin object
+ scanObjectKey // just finished object key (string)
+ scanObjectValue // just finished non-last object value
+ scanEndObject // end object (implies scanObjectValue if possible)
+ scanBeginArray // begin array
+ scanArrayValue // just finished array value
+ scanEndArray // end array (implies scanArrayValue if possible)
+ scanBeginName // begin function call
+ scanParam // begin function argument
+ scanEndParams // end function call
+ scanSkipSpace // space byte; can skip; known to be last "continue" result
+
+ // Stop.
+ scanEnd // top-level value ended *before* this byte; known to be first "stop" result
+ scanError // hit an error, scanner.err.
+)
+
+// These values are stored in the parseState stack.
+// They give the current state of a composite value
+// being scanned. If the parser is inside a nested value
+// the parseState describes the nested state, outermost at entry 0.
+const (
+ parseObjectKey = iota // parsing object key (before colon)
+ parseObjectValue // parsing object value (after colon)
+ parseArrayValue // parsing array value
+ parseName // parsing unquoted name
+ parseParam // parsing function argument value
+)
+
+// reset prepares the scanner for use.
+// It must be called before calling s.step.
+func (s *scanner) reset() {
+ s.step = stateBeginValue
+ s.parseState = s.parseState[0:0]
+ s.err = nil
+ s.redo = false
+ s.endTop = false
+}
+
+// eof tells the scanner that the end of input has been reached.
+// It returns a scan status just as s.step does.
+func (s *scanner) eof() int {
+ if s.err != nil {
+ return scanError
+ }
+ if s.endTop {
+ return scanEnd
+ }
+ s.step(s, ' ')
+ if s.endTop {
+ return scanEnd
+ }
+ if s.err == nil {
+ s.err = &SyntaxError{"unexpected end of JSON input", s.bytes}
+ }
+ return scanError
+}
+
+// pushParseState pushes a new parse state p onto the parse stack.
+func (s *scanner) pushParseState(p int) {
+ s.parseState = append(s.parseState, p)
+}
+
+// popParseState pops a parse state (already obtained) off the stack
+// and updates s.step accordingly.
+func (s *scanner) popParseState() {
+ n := len(s.parseState) - 1
+ s.parseState = s.parseState[0:n]
+ s.redo = false
+ if n == 0 {
+ s.step = stateEndTop
+ s.endTop = true
+ } else {
+ s.step = stateEndValue
+ }
+}
+
+func isSpace(c byte) bool {
+ return c == ' ' || c == '\t' || c == '\r' || c == '\n'
+}
+
+// stateBeginValueOrEmpty is the state after reading `[`.
+func stateBeginValueOrEmpty(s *scanner, c byte) int {
+ if c <= ' ' && isSpace(c) {
+ return scanSkipSpace
+ }
+ if c == ']' {
+ return stateEndValue(s, c)
+ }
+ return stateBeginValue(s, c)
+}
+
+// stateBeginValue is the state at the beginning of the input.
+func stateBeginValue(s *scanner, c byte) int {
+ if c <= ' ' && isSpace(c) {
+ return scanSkipSpace
+ }
+ switch c {
+ case '{':
+ s.step = stateBeginStringOrEmpty
+ s.pushParseState(parseObjectKey)
+ return scanBeginObject
+ case '[':
+ s.step = stateBeginValueOrEmpty
+ s.pushParseState(parseArrayValue)
+ return scanBeginArray
+ case '"':
+ s.step = stateInString
+ return scanBeginLiteral
+ case '-':
+ s.step = stateNeg
+ return scanBeginLiteral
+ case '0': // beginning of 0.123
+ s.step = state0
+ return scanBeginLiteral
+ case 'n':
+ s.step = stateNew0
+ return scanBeginName
+ }
+ if '1' <= c && c <= '9' { // beginning of 1234.5
+ s.step = state1
+ return scanBeginLiteral
+ }
+ if isName(c) {
+ s.step = stateName
+ return scanBeginName
+ }
+ return s.error(c, "looking for beginning of value")
+}
+
+func isName(c byte) bool {
+ return c == '$' || c == '_' || 'a' <= c && c <= 'z' || 'A' <= c && c <= 'Z' || '0' <= c && c <= '9'
+}
+
+// stateBeginStringOrEmpty is the state after reading `{`.
+func stateBeginStringOrEmpty(s *scanner, c byte) int {
+ if c <= ' ' && isSpace(c) {
+ return scanSkipSpace
+ }
+ if c == '}' {
+ n := len(s.parseState)
+ s.parseState[n-1] = parseObjectValue
+ return stateEndValue(s, c)
+ }
+ return stateBeginString(s, c)
+}
+
+// stateBeginString is the state after reading `{"key": value,`.
+func stateBeginString(s *scanner, c byte) int {
+ if c <= ' ' && isSpace(c) {
+ return scanSkipSpace
+ }
+ if c == '"' {
+ s.step = stateInString
+ return scanBeginLiteral
+ }
+ if isName(c) {
+ s.step = stateName
+ return scanBeginName
+ }
+ return s.error(c, "looking for beginning of object key string")
+}
+
+// stateEndValue is the state after completing a value,
+// such as after reading `{}` or `true` or `["x"`.
+func stateEndValue(s *scanner, c byte) int {
+ n := len(s.parseState)
+ if n == 0 {
+ // Completed top-level before the current byte.
+ s.step = stateEndTop
+ s.endTop = true
+ return stateEndTop(s, c)
+ }
+ if c <= ' ' && isSpace(c) {
+ s.step = stateEndValue
+ return scanSkipSpace
+ }
+ ps := s.parseState[n-1]
+ switch ps {
+ case parseObjectKey:
+ if c == ':' {
+ s.parseState[n-1] = parseObjectValue
+ s.step = stateBeginValue
+ return scanObjectKey
+ }
+ return s.error(c, "after object key")
+ case parseObjectValue:
+ if c == ',' {
+ s.parseState[n-1] = parseObjectKey
+ s.step = stateBeginStringOrEmpty
+ return scanObjectValue
+ }
+ if c == '}' {
+ s.popParseState()
+ return scanEndObject
+ }
+ return s.error(c, "after object key:value pair")
+ case parseArrayValue:
+ if c == ',' {
+ s.step = stateBeginValueOrEmpty
+ return scanArrayValue
+ }
+ if c == ']' {
+ s.popParseState()
+ return scanEndArray
+ }
+ return s.error(c, "after array element")
+ case parseParam:
+ if c == ',' {
+ s.step = stateBeginValue
+ return scanParam
+ }
+ if c == ')' {
+ s.popParseState()
+ return scanEndParams
+ }
+ return s.error(c, "after array element")
+ }
+ return s.error(c, "")
+}
+
+// stateEndTop is the state after finishing the top-level value,
+// such as after reading `{}` or `[1,2,3]`.
+// Only space characters should be seen now.
+func stateEndTop(s *scanner, c byte) int {
+ if c != ' ' && c != '\t' && c != '\r' && c != '\n' {
+ // Complain about non-space byte on next call.
+ s.error(c, "after top-level value")
+ }
+ return scanEnd
+}
+
+// stateInString is the state after reading `"`.
+func stateInString(s *scanner, c byte) int {
+ if c == '"' {
+ s.step = stateEndValue
+ return scanContinue
+ }
+ if c == '\\' {
+ s.step = stateInStringEsc
+ return scanContinue
+ }
+ if c < 0x20 {
+ return s.error(c, "in string literal")
+ }
+ return scanContinue
+}
+
+// stateInStringEsc is the state after reading `"\` during a quoted string.
+func stateInStringEsc(s *scanner, c byte) int {
+ switch c {
+ case 'b', 'f', 'n', 'r', 't', '\\', '/', '"':
+ s.step = stateInString
+ return scanContinue
+ case 'u':
+ s.step = stateInStringEscU
+ return scanContinue
+ }
+ return s.error(c, "in string escape code")
+}
+
+// stateInStringEscU is the state after reading `"\u` during a quoted string.
+func stateInStringEscU(s *scanner, c byte) int {
+ if '0' <= c && c <= '9' || 'a' <= c && c <= 'f' || 'A' <= c && c <= 'F' {
+ s.step = stateInStringEscU1
+ return scanContinue
+ }
+ // numbers
+ return s.error(c, "in \\u hexadecimal character escape")
+}
+
+// stateInStringEscU1 is the state after reading `"\u1` during a quoted string.
+func stateInStringEscU1(s *scanner, c byte) int {
+ if '0' <= c && c <= '9' || 'a' <= c && c <= 'f' || 'A' <= c && c <= 'F' {
+ s.step = stateInStringEscU12
+ return scanContinue
+ }
+ // numbers
+ return s.error(c, "in \\u hexadecimal character escape")
+}
+
+// stateInStringEscU12 is the state after reading `"\u12` during a quoted string.
+func stateInStringEscU12(s *scanner, c byte) int {
+ if '0' <= c && c <= '9' || 'a' <= c && c <= 'f' || 'A' <= c && c <= 'F' {
+ s.step = stateInStringEscU123
+ return scanContinue
+ }
+ // numbers
+ return s.error(c, "in \\u hexadecimal character escape")
+}
+
+// stateInStringEscU123 is the state after reading `"\u123` during a quoted string.
+func stateInStringEscU123(s *scanner, c byte) int {
+ if '0' <= c && c <= '9' || 'a' <= c && c <= 'f' || 'A' <= c && c <= 'F' {
+ s.step = stateInString
+ return scanContinue
+ }
+ // numbers
+ return s.error(c, "in \\u hexadecimal character escape")
+}
+
+// stateNeg is the state after reading `-` during a number.
+func stateNeg(s *scanner, c byte) int {
+ if c == '0' {
+ s.step = state0
+ return scanContinue
+ }
+ if '1' <= c && c <= '9' {
+ s.step = state1
+ return scanContinue
+ }
+ return s.error(c, "in numeric literal")
+}
+
+// state1 is the state after reading a non-zero integer during a number,
+// such as after reading `1` or `100` but not `0`.
+func state1(s *scanner, c byte) int {
+ if '0' <= c && c <= '9' {
+ s.step = state1
+ return scanContinue
+ }
+ return state0(s, c)
+}
+
+// state0 is the state after reading `0` during a number.
+func state0(s *scanner, c byte) int {
+ if c == '.' {
+ s.step = stateDot
+ return scanContinue
+ }
+ if c == 'e' || c == 'E' {
+ s.step = stateE
+ return scanContinue
+ }
+ return stateEndValue(s, c)
+}
+
+// stateDot is the state after reading the integer and decimal point in a number,
+// such as after reading `1.`.
+func stateDot(s *scanner, c byte) int {
+ if '0' <= c && c <= '9' {
+ s.step = stateDot0
+ return scanContinue
+ }
+ return s.error(c, "after decimal point in numeric literal")
+}
+
+// stateDot0 is the state after reading the integer, decimal point, and subsequent
+// digits of a number, such as after reading `3.14`.
+func stateDot0(s *scanner, c byte) int {
+ if '0' <= c && c <= '9' {
+ return scanContinue
+ }
+ if c == 'e' || c == 'E' {
+ s.step = stateE
+ return scanContinue
+ }
+ return stateEndValue(s, c)
+}
+
+// stateE is the state after reading the mantissa and e in a number,
+// such as after reading `314e` or `0.314e`.
+func stateE(s *scanner, c byte) int {
+ if c == '+' || c == '-' {
+ s.step = stateESign
+ return scanContinue
+ }
+ return stateESign(s, c)
+}
+
+// stateESign is the state after reading the mantissa, e, and sign in a number,
+// such as after reading `314e-` or `0.314e+`.
+func stateESign(s *scanner, c byte) int {
+ if '0' <= c && c <= '9' {
+ s.step = stateE0
+ return scanContinue
+ }
+ return s.error(c, "in exponent of numeric literal")
+}
+
+// stateE0 is the state after reading the mantissa, e, optional sign,
+// and at least one digit of the exponent in a number,
+// such as after reading `314e-2` or `0.314e+1` or `3.14e0`.
+func stateE0(s *scanner, c byte) int {
+ if '0' <= c && c <= '9' {
+ return scanContinue
+ }
+ return stateEndValue(s, c)
+}
+
+// stateNew0 is the state after reading `n`.
+func stateNew0(s *scanner, c byte) int {
+ if c == 'e' {
+ s.step = stateNew1
+ return scanContinue
+ }
+ s.step = stateName
+ return stateName(s, c)
+}
+
+// stateNew1 is the state after reading `ne`.
+func stateNew1(s *scanner, c byte) int {
+ if c == 'w' {
+ s.step = stateNew2
+ return scanContinue
+ }
+ s.step = stateName
+ return stateName(s, c)
+}
+
+// stateNew2 is the state after reading `new`.
+func stateNew2(s *scanner, c byte) int {
+ s.step = stateName
+ if c == ' ' {
+ return scanContinue
+ }
+ return stateName(s, c)
+}
+
+// stateName is the state while reading an unquoted function name.
+func stateName(s *scanner, c byte) int {
+ if isName(c) {
+ return scanContinue
+ }
+ if c == '(' {
+ s.step = stateParamOrEmpty
+ s.pushParseState(parseParam)
+ return scanParam
+ }
+ return stateEndValue(s, c)
+}
+
+// stateParamOrEmpty is the state after reading `(`.
+func stateParamOrEmpty(s *scanner, c byte) int {
+ if c <= ' ' && isSpace(c) {
+ return scanSkipSpace
+ }
+ if c == ')' {
+ return stateEndValue(s, c)
+ }
+ return stateBeginValue(s, c)
+}
+
+// stateT is the state after reading `t`.
+func stateT(s *scanner, c byte) int {
+ if c == 'r' {
+ s.step = stateTr
+ return scanContinue
+ }
+ return s.error(c, "in literal true (expecting 'r')")
+}
+
+// stateTr is the state after reading `tr`.
+func stateTr(s *scanner, c byte) int {
+ if c == 'u' {
+ s.step = stateTru
+ return scanContinue
+ }
+ return s.error(c, "in literal true (expecting 'u')")
+}
+
+// stateTru is the state after reading `tru`.
+func stateTru(s *scanner, c byte) int {
+ if c == 'e' {
+ s.step = stateEndValue
+ return scanContinue
+ }
+ return s.error(c, "in literal true (expecting 'e')")
+}
+
+// stateF is the state after reading `f`.
+func stateF(s *scanner, c byte) int {
+ if c == 'a' {
+ s.step = stateFa
+ return scanContinue
+ }
+ return s.error(c, "in literal false (expecting 'a')")
+}
+
+// stateFa is the state after reading `fa`.
+func stateFa(s *scanner, c byte) int {
+ if c == 'l' {
+ s.step = stateFal
+ return scanContinue
+ }
+ return s.error(c, "in literal false (expecting 'l')")
+}
+
+// stateFal is the state after reading `fal`.
+func stateFal(s *scanner, c byte) int {
+ if c == 's' {
+ s.step = stateFals
+ return scanContinue
+ }
+ return s.error(c, "in literal false (expecting 's')")
+}
+
+// stateFals is the state after reading `fals`.
+func stateFals(s *scanner, c byte) int {
+ if c == 'e' {
+ s.step = stateEndValue
+ return scanContinue
+ }
+ return s.error(c, "in literal false (expecting 'e')")
+}
+
+// stateN is the state after reading `n`.
+func stateN(s *scanner, c byte) int {
+ if c == 'u' {
+ s.step = stateNu
+ return scanContinue
+ }
+ return s.error(c, "in literal null (expecting 'u')")
+}
+
+// stateNu is the state after reading `nu`.
+func stateNu(s *scanner, c byte) int {
+ if c == 'l' {
+ s.step = stateNul
+ return scanContinue
+ }
+ return s.error(c, "in literal null (expecting 'l')")
+}
+
+// stateNul is the state after reading `nul`.
+func stateNul(s *scanner, c byte) int {
+ if c == 'l' {
+ s.step = stateEndValue
+ return scanContinue
+ }
+ return s.error(c, "in literal null (expecting 'l')")
+}
+
+// stateError is the state after reaching a syntax error,
+// such as after reading `[1}` or `5.1.2`.
+func stateError(s *scanner, c byte) int {
+ return scanError
+}
+
+// error records an error and switches to the error state.
+func (s *scanner) error(c byte, context string) int {
+ s.step = stateError
+ s.err = &SyntaxError{"invalid character " + quoteChar(c) + " " + context, s.bytes}
+ return scanError
+}
+
+// quoteChar formats c as a quoted character literal
+func quoteChar(c byte) string {
+ // special cases - different from quoted strings
+ if c == '\'' {
+ return `'\''`
+ }
+ if c == '"' {
+ return `'"'`
+ }
+
+ // use quoted string with different quotation marks
+ s := strconv.Quote(string(c))
+ return "'" + s[1:len(s)-1] + "'"
+}
+
+// undo causes the scanner to return scanCode from the next state transition.
+// This gives callers a simple 1-byte undo mechanism.
+func (s *scanner) undo(scanCode int) {
+ if s.redo {
+ panic("json: invalid use of scanner")
+ }
+ s.redoCode = scanCode
+ s.redoState = s.step
+ s.step = stateRedo
+ s.redo = true
+}
+
+// stateRedo helps implement the scanner's 1-byte undo.
+func stateRedo(s *scanner, c byte) int {
+ s.redo = false
+ s.step = s.redoState
+ return s.redoCode
+}
diff --git a/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/internal/json/scanner_test.go b/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/internal/json/scanner_test.go
new file mode 100644
index 00000000000..70a28974f78
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/internal/json/scanner_test.go
@@ -0,0 +1,316 @@
+// Copyright 2010 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package json
+
+import (
+ "bytes"
+ "math"
+ "math/rand"
+ "reflect"
+ "testing"
+)
+
+// Tests of simple examples.
+
+type example struct {
+ compact string
+ indent string
+}
+
+var examples = []example{
+ {`1`, `1`},
+ {`{}`, `{}`},
+ {`[]`, `[]`},
+ {`{"":2}`, "{\n\t\"\": 2\n}"},
+ {`[3]`, "[\n\t3\n]"},
+ {`[1,2,3]`, "[\n\t1,\n\t2,\n\t3\n]"},
+ {`{"x":1}`, "{\n\t\"x\": 1\n}"},
+ {ex1, ex1i},
+}
+
+var ex1 = `[true,false,null,"x",1,1.5,0,-5e+2]`
+
+var ex1i = `[
+ true,
+ false,
+ null,
+ "x",
+ 1,
+ 1.5,
+ 0,
+ -5e+2
+]`
+
+func TestCompact(t *testing.T) {
+ var buf bytes.Buffer
+ for _, tt := range examples {
+ buf.Reset()
+ if err := Compact(&buf, []byte(tt.compact)); err != nil {
+ t.Errorf("Compact(%#q): %v", tt.compact, err)
+ } else if s := buf.String(); s != tt.compact {
+ t.Errorf("Compact(%#q) = %#q, want original", tt.compact, s)
+ }
+
+ buf.Reset()
+ if err := Compact(&buf, []byte(tt.indent)); err != nil {
+ t.Errorf("Compact(%#q): %v", tt.indent, err)
+ continue
+ } else if s := buf.String(); s != tt.compact {
+ t.Errorf("Compact(%#q) = %#q, want %#q", tt.indent, s, tt.compact)
+ }
+ }
+}
+
+func TestCompactSeparators(t *testing.T) {
+ // U+2028 and U+2029 should be escaped inside strings.
+ // They should not appear outside strings.
+ tests := []struct {
+ in, compact string
+ }{
+ {"{\"\u2028\": 1}", `{"\u2028":1}`},
+ {"{\"\u2029\" :2}", `{"\u2029":2}`},
+ }
+ for _, tt := range tests {
+ var buf bytes.Buffer
+ if err := Compact(&buf, []byte(tt.in)); err != nil {
+ t.Errorf("Compact(%q): %v", tt.in, err)
+ } else if s := buf.String(); s != tt.compact {
+ t.Errorf("Compact(%q) = %q, want %q", tt.in, s, tt.compact)
+ }
+ }
+}
+
+func TestIndent(t *testing.T) {
+ var buf bytes.Buffer
+ for _, tt := range examples {
+ buf.Reset()
+ if err := Indent(&buf, []byte(tt.indent), "", "\t"); err != nil {
+ t.Errorf("Indent(%#q): %v", tt.indent, err)
+ } else if s := buf.String(); s != tt.indent {
+ t.Errorf("Indent(%#q) = %#q, want original", tt.indent, s)
+ }
+
+ buf.Reset()
+ if err := Indent(&buf, []byte(tt.compact), "", "\t"); err != nil {
+ t.Errorf("Indent(%#q): %v", tt.compact, err)
+ continue
+ } else if s := buf.String(); s != tt.indent {
+ t.Errorf("Indent(%#q) = %#q, want %#q", tt.compact, s, tt.indent)
+ }
+ }
+}
+
+// Tests of a large random structure.
+
+func TestCompactBig(t *testing.T) {
+ initBig()
+ var buf bytes.Buffer
+ if err := Compact(&buf, jsonBig); err != nil {
+ t.Fatalf("Compact: %v", err)
+ }
+ b := buf.Bytes()
+ if !bytes.Equal(b, jsonBig) {
+ t.Error("Compact(jsonBig) != jsonBig")
+ diff(t, b, jsonBig)
+ return
+ }
+}
+
+func TestIndentBig(t *testing.T) {
+ initBig()
+ var buf bytes.Buffer
+ if err := Indent(&buf, jsonBig, "", "\t"); err != nil {
+ t.Fatalf("Indent1: %v", err)
+ }
+ b := buf.Bytes()
+ if len(b) == len(jsonBig) {
+ // jsonBig is compact (no unnecessary spaces);
+ // indenting should make it bigger
+ t.Fatalf("Indent(jsonBig) did not get bigger")
+ }
+
+ // should be idempotent
+ var buf1 bytes.Buffer
+ if err := Indent(&buf1, b, "", "\t"); err != nil {
+ t.Fatalf("Indent2: %v", err)
+ }
+ b1 := buf1.Bytes()
+ if !bytes.Equal(b1, b) {
+ t.Error("Indent(Indent(jsonBig)) != Indent(jsonBig)")
+ diff(t, b1, b)
+ return
+ }
+
+ // should get back to original
+ buf1.Reset()
+ if err := Compact(&buf1, b); err != nil {
+ t.Fatalf("Compact: %v", err)
+ }
+ b1 = buf1.Bytes()
+ if !bytes.Equal(b1, jsonBig) {
+ t.Error("Compact(Indent(jsonBig)) != jsonBig")
+ diff(t, b1, jsonBig)
+ return
+ }
+}
+
+type indentErrorTest struct {
+ in string
+ err error
+}
+
+var indentErrorTests = []indentErrorTest{
+ {`{"X": "foo", "Y"}`, &SyntaxError{"invalid character '}' after object key", 17}},
+ {`{"X": "foo" "Y": "bar"}`, &SyntaxError{"invalid character '\"' after object key:value pair", 13}},
+}
+
+func TestIndentErrors(t *testing.T) {
+ for i, tt := range indentErrorTests {
+ slice := make([]uint8, 0)
+ buf := bytes.NewBuffer(slice)
+ if err := Indent(buf, []uint8(tt.in), "", ""); err != nil {
+ if !reflect.DeepEqual(err, tt.err) {
+ t.Errorf("#%d: Indent: %#v", i, err)
+ continue
+ }
+ }
+ }
+}
+
+func TestNextValueBig(t *testing.T) {
+ initBig()
+ var scan scanner
+ item, rest, err := nextValue(jsonBig, &scan)
+ if err != nil {
+ t.Fatalf("nextValue: %s", err)
+ }
+ if len(item) != len(jsonBig) || &item[0] != &jsonBig[0] {
+ t.Errorf("invalid item: %d %d", len(item), len(jsonBig))
+ }
+ if len(rest) != 0 {
+ t.Errorf("invalid rest: %d", len(rest))
+ }
+
+ item, rest, err = nextValue(append(jsonBig, "HELLO WORLD"...), &scan)
+ if err != nil {
+ t.Fatalf("nextValue extra: %s", err)
+ }
+ if len(item) != len(jsonBig) {
+ t.Errorf("invalid item: %d %d", len(item), len(jsonBig))
+ }
+ if string(rest) != "HELLO WORLD" {
+ t.Errorf("invalid rest: %d", len(rest))
+ }
+}
+
+var benchScan scanner
+
+func BenchmarkSkipValue(b *testing.B) {
+ initBig()
+ b.ResetTimer()
+ for i := 0; i < b.N; i++ {
+ nextValue(jsonBig, &benchScan)
+ }
+ b.SetBytes(int64(len(jsonBig)))
+}
+
+func diff(t *testing.T, a, b []byte) {
+ for i := 0; ; i++ {
+ if i >= len(a) || i >= len(b) || a[i] != b[i] {
+ j := i - 10
+ if j < 0 {
+ j = 0
+ }
+ t.Errorf("diverge at %d: «%s» vs «%s»", i, trim(a[j:]), trim(b[j:]))
+ return
+ }
+ }
+}
+
+func trim(b []byte) []byte {
+ if len(b) > 20 {
+ return b[0:20]
+ }
+ return b
+}
+
+// Generate a random JSON object.
+
+var jsonBig []byte
+
+func initBig() {
+ n := 10000
+ if testing.Short() {
+ n = 100
+ }
+ b, err := Marshal(genValue(n))
+ if err != nil {
+ panic(err)
+ }
+ jsonBig = b
+}
+
+func genValue(n int) interface{} {
+ if n > 1 {
+ switch rand.Intn(2) {
+ case 0:
+ return genArray(n)
+ case 1:
+ return genMap(n)
+ }
+ }
+ switch rand.Intn(3) {
+ case 0:
+ return rand.Intn(2) == 0
+ case 1:
+ return rand.NormFloat64()
+ case 2:
+ return genString(30)
+ }
+ panic("unreachable")
+}
+
+func genString(stddev float64) string {
+ n := int(math.Abs(rand.NormFloat64()*stddev + stddev/2))
+ c := make([]rune, n)
+ for i := range c {
+ f := math.Abs(rand.NormFloat64()*64 + 32)
+ if f > 0x10ffff {
+ f = 0x10ffff
+ }
+ c[i] = rune(f)
+ }
+ return string(c)
+}
+
+func genArray(n int) []interface{} {
+ f := int(math.Abs(rand.NormFloat64()) * math.Min(10, float64(n/2)))
+ if f > n {
+ f = n
+ }
+ if f < 1 {
+ f = 1
+ }
+ x := make([]interface{}, f)
+ for i := range x {
+ x[i] = genValue(((i+1)*n)/f - (i*n)/f)
+ }
+ return x
+}
+
+func genMap(n int) map[string]interface{} {
+ f := int(math.Abs(rand.NormFloat64()) * math.Min(10, float64(n/2)))
+ if f > n {
+ f = n
+ }
+ if n > 0 && f == 0 {
+ f = 1
+ }
+ x := make(map[string]interface{})
+ for i := 0; i < f; i++ {
+ x[genString(10)] = genValue(((i+1)*n)/f - (i*n)/f)
+ }
+ return x
+}
diff --git a/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/internal/json/stream.go b/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/internal/json/stream.go
new file mode 100644
index 00000000000..e023702b571
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/internal/json/stream.go
@@ -0,0 +1,510 @@
+// Copyright 2010 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package json
+
+import (
+ "bytes"
+ "errors"
+ "io"
+)
+
+// A Decoder reads and decodes JSON values from an input stream.
+type Decoder struct {
+ r io.Reader
+ buf []byte
+ d decodeState
+ scanp int // start of unread data in buf
+ scan scanner
+ err error
+
+ tokenState int
+ tokenStack []int
+}
+
+// NewDecoder returns a new decoder that reads from r.
+//
+// The decoder introduces its own buffering and may
+// read data from r beyond the JSON values requested.
+func NewDecoder(r io.Reader) *Decoder {
+ return &Decoder{r: r}
+}
+
+// UseNumber causes the Decoder to unmarshal a number into an interface{} as a
+// Number instead of as a float64.
+func (dec *Decoder) UseNumber() { dec.d.useNumber = true }
+
+// Decode reads the next JSON-encoded value from its
+// input and stores it in the value pointed to by v.
+//
+// See the documentation for Unmarshal for details about
+// the conversion of JSON into a Go value.
+func (dec *Decoder) Decode(v interface{}) error {
+ if dec.err != nil {
+ return dec.err
+ }
+
+ if err := dec.tokenPrepareForDecode(); err != nil {
+ return err
+ }
+
+ if !dec.tokenValueAllowed() {
+ return &SyntaxError{msg: "not at beginning of value"}
+ }
+
+ // Read whole value into buffer.
+ n, err := dec.readValue()
+ if err != nil {
+ return err
+ }
+ dec.d.init(dec.buf[dec.scanp : dec.scanp+n])
+ dec.scanp += n
+
+ // Don't save err from unmarshal into dec.err:
+ // the connection is still usable since we read a complete JSON
+ // object from it before the error happened.
+ err = dec.d.unmarshal(v)
+
+ // fixup token streaming state
+ dec.tokenValueEnd()
+
+ return err
+}
+
+// Buffered returns a reader of the data remaining in the Decoder's
+// buffer. The reader is valid until the next call to Decode.
+func (dec *Decoder) Buffered() io.Reader {
+ return bytes.NewReader(dec.buf[dec.scanp:])
+}
+
+// readValue reads a JSON value into dec.buf.
+// It returns the length of the encoding.
+func (dec *Decoder) readValue() (int, error) {
+ dec.scan.reset()
+
+ scanp := dec.scanp
+ var err error
+Input:
+ for {
+ // Look in the buffer for a new value.
+ for i, c := range dec.buf[scanp:] {
+ dec.scan.bytes++
+ v := dec.scan.step(&dec.scan, c)
+ if v == scanEnd {
+ scanp += i
+ break Input
+ }
+ // scanEnd is delayed one byte.
+ // We might block trying to get that byte from src,
+ // so instead invent a space byte.
+ if (v == scanEndObject || v == scanEndArray) && dec.scan.step(&dec.scan, ' ') == scanEnd {
+ scanp += i + 1
+ break Input
+ }
+ if v == scanError {
+ dec.err = dec.scan.err
+ return 0, dec.scan.err
+ }
+ }
+ scanp = len(dec.buf)
+
+ // Did the last read have an error?
+ // Delayed until now to allow buffer scan.
+ if err != nil {
+ if err == io.EOF {
+ if dec.scan.step(&dec.scan, ' ') == scanEnd {
+ break Input
+ }
+ if nonSpace(dec.buf) {
+ err = io.ErrUnexpectedEOF
+ }
+ }
+ dec.err = err
+ return 0, err
+ }
+
+ n := scanp - dec.scanp
+ err = dec.refill()
+ scanp = dec.scanp + n
+ }
+ return scanp - dec.scanp, nil
+}
+
+func (dec *Decoder) refill() error {
+ // Make room to read more into the buffer.
+ // First slide down data already consumed.
+ if dec.scanp > 0 {
+ n := copy(dec.buf, dec.buf[dec.scanp:])
+ dec.buf = dec.buf[:n]
+ dec.scanp = 0
+ }
+
+ // Grow buffer if not large enough.
+ const minRead = 512
+ if cap(dec.buf)-len(dec.buf) < minRead {
+ newBuf := make([]byte, len(dec.buf), 2*cap(dec.buf)+minRead)
+ copy(newBuf, dec.buf)
+ dec.buf = newBuf
+ }
+
+ // Read. Delay error for next iteration (after scan).
+ n, err := dec.r.Read(dec.buf[len(dec.buf):cap(dec.buf)])
+ dec.buf = dec.buf[0 : len(dec.buf)+n]
+
+ return err
+}
+
+func nonSpace(b []byte) bool {
+ for _, c := range b {
+ if !isSpace(c) {
+ return true
+ }
+ }
+ return false
+}
+
+// An Encoder writes JSON values to an output stream.
+type Encoder struct {
+ w io.Writer
+ err error
+ escapeHTML bool
+
+ indentBuf *bytes.Buffer
+ indentPrefix string
+ indentValue string
+
+ ext Extension
+}
+
+// NewEncoder returns a new encoder that writes to w.
+func NewEncoder(w io.Writer) *Encoder {
+ return &Encoder{w: w, escapeHTML: true}
+}
+
+// Encode writes the JSON encoding of v to the stream,
+// followed by a newline character.
+//
+// See the documentation for Marshal for details about the
+// conversion of Go values to JSON.
+func (enc *Encoder) Encode(v interface{}) error {
+ if enc.err != nil {
+ return enc.err
+ }
+ e := newEncodeState()
+ e.ext = enc.ext
+ err := e.marshal(v, encOpts{escapeHTML: enc.escapeHTML})
+ if err != nil {
+ return err
+ }
+
+ // Terminate each value with a newline.
+ // This makes the output look a little nicer
+ // when debugging, and some kind of space
+ // is required if the encoded value was a number,
+ // so that the reader knows there aren't more
+ // digits coming.
+ e.WriteByte('\n')
+
+ b := e.Bytes()
+ if enc.indentBuf != nil {
+ enc.indentBuf.Reset()
+ err = Indent(enc.indentBuf, b, enc.indentPrefix, enc.indentValue)
+ if err != nil {
+ return err
+ }
+ b = enc.indentBuf.Bytes()
+ }
+ if _, err = enc.w.Write(b); err != nil {
+ enc.err = err
+ }
+ encodeStatePool.Put(e)
+ return err
+}
+
+// Indent sets the encoder to format each encoded value with Indent.
+func (enc *Encoder) Indent(prefix, indent string) {
+ enc.indentBuf = new(bytes.Buffer)
+ enc.indentPrefix = prefix
+ enc.indentValue = indent
+}
+
+// DisableHTMLEscaping causes the encoder not to escape angle brackets
+// ("<" and ">") or ampersands ("&") in JSON strings.
+func (enc *Encoder) DisableHTMLEscaping() {
+ enc.escapeHTML = false
+}
+
+// RawMessage is a raw encoded JSON value.
+// It implements Marshaler and Unmarshaler and can
+// be used to delay JSON decoding or precompute a JSON encoding.
+type RawMessage []byte
+
+// MarshalJSON returns *m as the JSON encoding of m.
+func (m *RawMessage) MarshalJSON() ([]byte, error) {
+ return *m, nil
+}
+
+// UnmarshalJSON sets *m to a copy of data.
+func (m *RawMessage) UnmarshalJSON(data []byte) error {
+ if m == nil {
+ return errors.New("json.RawMessage: UnmarshalJSON on nil pointer")
+ }
+ *m = append((*m)[0:0], data...)
+ return nil
+}
+
+var _ Marshaler = (*RawMessage)(nil)
+var _ Unmarshaler = (*RawMessage)(nil)
+
+// A Token holds a value of one of these types:
+//
+// Delim, for the four JSON delimiters [ ] { }
+// bool, for JSON booleans
+// float64, for JSON numbers
+// Number, for JSON numbers
+// string, for JSON string literals
+// nil, for JSON null
+//
+type Token interface{}
+
+const (
+ tokenTopValue = iota
+ tokenArrayStart
+ tokenArrayValue
+ tokenArrayComma
+ tokenObjectStart
+ tokenObjectKey
+ tokenObjectColon
+ tokenObjectValue
+ tokenObjectComma
+)
+
+// advance tokenstate from a separator state to a value state
+func (dec *Decoder) tokenPrepareForDecode() error {
+ // Note: Not calling peek before switch, to avoid
+ // putting peek into the standard Decode path.
+ // peek is only called when using the Token API.
+ switch dec.tokenState {
+ case tokenArrayComma:
+ c, err := dec.peek()
+ if err != nil {
+ return err
+ }
+ if c != ',' {
+ return &SyntaxError{"expected comma after array element", 0}
+ }
+ dec.scanp++
+ dec.tokenState = tokenArrayValue
+ case tokenObjectColon:
+ c, err := dec.peek()
+ if err != nil {
+ return err
+ }
+ if c != ':' {
+ return &SyntaxError{"expected colon after object key", 0}
+ }
+ dec.scanp++
+ dec.tokenState = tokenObjectValue
+ }
+ return nil
+}
+
+func (dec *Decoder) tokenValueAllowed() bool {
+ switch dec.tokenState {
+ case tokenTopValue, tokenArrayStart, tokenArrayValue, tokenObjectValue:
+ return true
+ }
+ return false
+}
+
+func (dec *Decoder) tokenValueEnd() {
+ switch dec.tokenState {
+ case tokenArrayStart, tokenArrayValue:
+ dec.tokenState = tokenArrayComma
+ case tokenObjectValue:
+ dec.tokenState = tokenObjectComma
+ }
+}
+
+// A Delim is a JSON array or object delimiter, one of [ ] { or }.
+type Delim rune
+
+func (d Delim) String() string {
+ return string(d)
+}
+
+// Token returns the next JSON token in the input stream.
+// At the end of the input stream, Token returns nil, io.EOF.
+//
+// Token guarantees that the delimiters [ ] { } it returns are
+// properly nested and matched: if Token encounters an unexpected
+// delimiter in the input, it will return an error.
+//
+// The input stream consists of basic JSON values—bool, string,
+// number, and null—along with delimiters [ ] { } of type Delim
+// to mark the start and end of arrays and objects.
+// Commas and colons are elided.
+func (dec *Decoder) Token() (Token, error) {
+ for {
+ c, err := dec.peek()
+ if err != nil {
+ return nil, err
+ }
+ switch c {
+ case '[':
+ if !dec.tokenValueAllowed() {
+ return dec.tokenError(c)
+ }
+ dec.scanp++
+ dec.tokenStack = append(dec.tokenStack, dec.tokenState)
+ dec.tokenState = tokenArrayStart
+ return Delim('['), nil
+
+ case ']':
+ if dec.tokenState != tokenArrayStart && dec.tokenState != tokenArrayComma {
+ return dec.tokenError(c)
+ }
+ dec.scanp++
+ dec.tokenState = dec.tokenStack[len(dec.tokenStack)-1]
+ dec.tokenStack = dec.tokenStack[:len(dec.tokenStack)-1]
+ dec.tokenValueEnd()
+ return Delim(']'), nil
+
+ case '{':
+ if !dec.tokenValueAllowed() {
+ return dec.tokenError(c)
+ }
+ dec.scanp++
+ dec.tokenStack = append(dec.tokenStack, dec.tokenState)
+ dec.tokenState = tokenObjectStart
+ return Delim('{'), nil
+
+ case '}':
+ if dec.tokenState != tokenObjectStart && dec.tokenState != tokenObjectComma {
+ return dec.tokenError(c)
+ }
+ dec.scanp++
+ dec.tokenState = dec.tokenStack[len(dec.tokenStack)-1]
+ dec.tokenStack = dec.tokenStack[:len(dec.tokenStack)-1]
+ dec.tokenValueEnd()
+ return Delim('}'), nil
+
+ case ':':
+ if dec.tokenState != tokenObjectColon {
+ return dec.tokenError(c)
+ }
+ dec.scanp++
+ dec.tokenState = tokenObjectValue
+ continue
+
+ case ',':
+ if dec.tokenState == tokenArrayComma {
+ dec.scanp++
+ dec.tokenState = tokenArrayValue
+ continue
+ }
+ if dec.tokenState == tokenObjectComma {
+ dec.scanp++
+ dec.tokenState = tokenObjectKey
+ continue
+ }
+ return dec.tokenError(c)
+
+ case '"':
+ if dec.tokenState == tokenObjectStart || dec.tokenState == tokenObjectKey {
+ var x string
+ old := dec.tokenState
+ dec.tokenState = tokenTopValue
+ err := dec.Decode(&x)
+ dec.tokenState = old
+ if err != nil {
+ clearOffset(err)
+ return nil, err
+ }
+ dec.tokenState = tokenObjectColon
+ return x, nil
+ }
+ fallthrough
+
+ default:
+ if !dec.tokenValueAllowed() {
+ return dec.tokenError(c)
+ }
+ var x interface{}
+ if err := dec.Decode(&x); err != nil {
+ clearOffset(err)
+ return nil, err
+ }
+ return x, nil
+ }
+ }
+}
+
+func clearOffset(err error) {
+ if s, ok := err.(*SyntaxError); ok {
+ s.Offset = 0
+ }
+}
+
+func (dec *Decoder) tokenError(c byte) (Token, error) {
+ var context string
+ switch dec.tokenState {
+ case tokenTopValue:
+ context = " looking for beginning of value"
+ case tokenArrayStart, tokenArrayValue, tokenObjectValue:
+ context = " looking for beginning of value"
+ case tokenArrayComma:
+ context = " after array element"
+ case tokenObjectKey:
+ context = " looking for beginning of object key string"
+ case tokenObjectColon:
+ context = " after object key"
+ case tokenObjectComma:
+ context = " after object key:value pair"
+ }
+ return nil, &SyntaxError{"invalid character " + quoteChar(c) + " " + context, 0}
+}
+
+// More reports whether there is another element in the
+// current array or object being parsed.
+func (dec *Decoder) More() bool {
+ c, err := dec.peek()
+ return err == nil && c != ']' && c != '}'
+}
+
+func (dec *Decoder) peek() (byte, error) {
+ var err error
+ for {
+ for i := dec.scanp; i < len(dec.buf); i++ {
+ c := dec.buf[i]
+ if isSpace(c) {
+ continue
+ }
+ dec.scanp = i
+ return c, nil
+ }
+ // buffer has been scanned, now report any error
+ if err != nil {
+ return 0, err
+ }
+ err = dec.refill()
+ }
+}
+
+/*
+TODO
+
+// EncodeToken writes the given JSON token to the stream.
+// It returns an error if the delimiters [ ] { } are not properly used.
+//
+// EncodeToken does not call Flush, because usually it is part of
+// a larger operation such as Encode, and those will call Flush when finished.
+// Callers that create an Encoder and then invoke EncodeToken directly,
+// without using Encode, need to call Flush when finished to ensure that
+// the JSON is written to the underlying writer.
+func (e *Encoder) EncodeToken(t Token) error {
+ ...
+}
+
+*/
diff --git a/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/internal/json/stream_test.go b/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/internal/json/stream_test.go
new file mode 100644
index 00000000000..0abdf7b5654
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/internal/json/stream_test.go
@@ -0,0 +1,418 @@
+// Copyright 2010 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package json
+
+import (
+ "bytes"
+ "io"
+ "io/ioutil"
+ "log"
+ "net"
+ "net/http"
+ "net/http/httptest"
+ "reflect"
+ "strings"
+ "testing"
+)
+
+// Test values for the stream test.
+// One of each JSON kind.
+var streamTest = []interface{}{
+ 0.1,
+ "hello",
+ nil,
+ true,
+ false,
+ []interface{}{"a", "b", "c"},
+ map[string]interface{}{"K": "Kelvin", "ß": "long s"},
+ 3.14, // another value to make sure something can follow map
+}
+
+var streamEncoded = `0.1
+"hello"
+null
+true
+false
+["a","b","c"]
+{"ß":"long s","K":"Kelvin"}
+3.14
+`
+
+func TestEncoder(t *testing.T) {
+ for i := 0; i <= len(streamTest); i++ {
+ var buf bytes.Buffer
+ enc := NewEncoder(&buf)
+ for j, v := range streamTest[0:i] {
+ if err := enc.Encode(v); err != nil {
+ t.Fatalf("encode #%d: %v", j, err)
+ }
+ }
+ if have, want := buf.String(), nlines(streamEncoded, i); have != want {
+ t.Errorf("encoding %d items: mismatch", i)
+ diff(t, []byte(have), []byte(want))
+ break
+ }
+ }
+}
+
+var streamEncodedIndent = `0.1
+"hello"
+null
+true
+false
+[
+>."a",
+>."b",
+>."c"
+>]
+{
+>."ß": "long s",
+>."K": "Kelvin"
+>}
+3.14
+`
+
+func TestEncoderIndent(t *testing.T) {
+ var buf bytes.Buffer
+ enc := NewEncoder(&buf)
+ enc.Indent(">", ".")
+ for _, v := range streamTest {
+ enc.Encode(v)
+ }
+ if have, want := buf.String(), streamEncodedIndent; have != want {
+ t.Error("indented encoding mismatch")
+ diff(t, []byte(have), []byte(want))
+ }
+}
+
+func TestEncoderDisableHTMLEscaping(t *testing.T) {
+ var c C
+ var ct CText
+ for _, tt := range []struct {
+ name string
+ v interface{}
+ wantEscape string
+ want string
+ }{
+ {"c", c, `"\u003c\u0026\u003e"`, `"<&>"`},
+ {"ct", ct, `"\"\u003c\u0026\u003e\""`, `"\"<&>\""`},
+ {`"<&>"`, "<&>", `"\u003c\u0026\u003e"`, `"<&>"`},
+ } {
+ var buf bytes.Buffer
+ enc := NewEncoder(&buf)
+ if err := enc.Encode(tt.v); err != nil {
+ t.Fatalf("Encode(%s): %s", tt.name, err)
+ }
+ if got := strings.TrimSpace(buf.String()); got != tt.wantEscape {
+ t.Errorf("Encode(%s) = %#q, want %#q", tt.name, got, tt.wantEscape)
+ }
+ buf.Reset()
+ enc.DisableHTMLEscaping()
+ if err := enc.Encode(tt.v); err != nil {
+ t.Fatalf("DisableHTMLEscaping Encode(%s): %s", tt.name, err)
+ }
+ if got := strings.TrimSpace(buf.String()); got != tt.want {
+ t.Errorf("DisableHTMLEscaping Encode(%s) = %#q, want %#q",
+ tt.name, got, tt.want)
+ }
+ }
+}
+
+func TestDecoder(t *testing.T) {
+ for i := 0; i <= len(streamTest); i++ {
+ // Use stream without newlines as input,
+ // just to stress the decoder even more.
+ // Our test input does not include back-to-back numbers.
+ // Otherwise stripping the newlines would
+ // merge two adjacent JSON values.
+ var buf bytes.Buffer
+ for _, c := range nlines(streamEncoded, i) {
+ // That's stupid isn't it!? nulltrue!?!? :/
+ //if c != '\n' {
+ buf.WriteRune(c)
+ //}
+ }
+ out := make([]interface{}, i)
+ dec := NewDecoder(&buf)
+ for j := range out {
+ if err := dec.Decode(&out[j]); err != nil {
+ t.Fatalf("decode #%d/%d: %v", j, i, err)
+ }
+ }
+ if !reflect.DeepEqual(out, streamTest[0:i]) {
+ t.Errorf("decoding %d items: mismatch", i)
+ for j := range out {
+ if !reflect.DeepEqual(out[j], streamTest[j]) {
+ t.Errorf("#%d: have %v want %v", j, out[j], streamTest[j])
+ }
+ }
+ break
+ }
+ }
+}
+
+func TestDecoderBuffered(t *testing.T) {
+ r := strings.NewReader(`{"Name": "Gopher"} extra `)
+ var m struct {
+ Name string
+ }
+ d := NewDecoder(r)
+ err := d.Decode(&m)
+ if err != nil {
+ t.Fatal(err)
+ }
+ if m.Name != "Gopher" {
+ t.Errorf("Name = %q; want Gopher", m.Name)
+ }
+ rest, err := ioutil.ReadAll(d.Buffered())
+ if err != nil {
+ t.Fatal(err)
+ }
+ if g, w := string(rest), " extra "; g != w {
+ t.Errorf("Remaining = %q; want %q", g, w)
+ }
+}
+
+func nlines(s string, n int) string {
+ if n <= 0 {
+ return ""
+ }
+ for i, c := range s {
+ if c == '\n' {
+ if n--; n == 0 {
+ return s[0 : i+1]
+ }
+ }
+ }
+ return s
+}
+
+func TestRawMessage(t *testing.T) {
+ // TODO(rsc): Should not need the * in *RawMessage
+ var data struct {
+ X float64
+ Id *RawMessage
+ Y float32
+ }
+ const raw = `["\u0056",null]`
+ const msg = `{"X":0.1,"Id":["\u0056",null],"Y":0.2}`
+ err := Unmarshal([]byte(msg), &data)
+ if err != nil {
+ t.Fatalf("Unmarshal: %v", err)
+ }
+ if string([]byte(*data.Id)) != raw {
+ t.Fatalf("Raw mismatch: have %#q want %#q", []byte(*data.Id), raw)
+ }
+ b, err := Marshal(&data)
+ if err != nil {
+ t.Fatalf("Marshal: %v", err)
+ }
+ if string(b) != msg {
+ t.Fatalf("Marshal: have %#q want %#q", b, msg)
+ }
+}
+
+func TestNullRawMessage(t *testing.T) {
+ // TODO(rsc): Should not need the * in *RawMessage
+ var data struct {
+ X float64
+ Id *RawMessage
+ Y float32
+ }
+ data.Id = new(RawMessage)
+ const msg = `{"X":0.1,"Id":null,"Y":0.2}`
+ err := Unmarshal([]byte(msg), &data)
+ if err != nil {
+ t.Fatalf("Unmarshal: %v", err)
+ }
+ if data.Id != nil {
+ t.Fatalf("Raw mismatch: have non-nil, want nil")
+ }
+ b, err := Marshal(&data)
+ if err != nil {
+ t.Fatalf("Marshal: %v", err)
+ }
+ if string(b) != msg {
+ t.Fatalf("Marshal: have %#q want %#q", b, msg)
+ }
+}
+
+var blockingTests = []string{
+ `{"x": 1}`,
+ `[1, 2, 3]`,
+}
+
+func TestBlocking(t *testing.T) {
+ for _, enc := range blockingTests {
+ r, w := net.Pipe()
+ go w.Write([]byte(enc))
+ var val interface{}
+
+ // If Decode reads beyond what w.Write writes above,
+ // it will block, and the test will deadlock.
+ if err := NewDecoder(r).Decode(&val); err != nil {
+ t.Errorf("decoding %s: %v", enc, err)
+ }
+ r.Close()
+ w.Close()
+ }
+}
+
+func BenchmarkEncoderEncode(b *testing.B) {
+ b.ReportAllocs()
+ type T struct {
+ X, Y string
+ }
+ v := &T{"foo", "bar"}
+ for i := 0; i < b.N; i++ {
+ if err := NewEncoder(ioutil.Discard).Encode(v); err != nil {
+ b.Fatal(err)
+ }
+ }
+}
+
+type tokenStreamCase struct {
+ json string
+ expTokens []interface{}
+}
+
+type decodeThis struct {
+ v interface{}
+}
+
+var tokenStreamCases []tokenStreamCase = []tokenStreamCase{
+ // streaming token cases
+ {json: `10`, expTokens: []interface{}{float64(10)}},
+ {json: ` [10] `, expTokens: []interface{}{
+ Delim('['), float64(10), Delim(']')}},
+ {json: ` [false,10,"b"] `, expTokens: []interface{}{
+ Delim('['), false, float64(10), "b", Delim(']')}},
+ {json: `{ "a": 1 }`, expTokens: []interface{}{
+ Delim('{'), "a", float64(1), Delim('}')}},
+ {json: `{"a": 1, "b":"3"}`, expTokens: []interface{}{
+ Delim('{'), "a", float64(1), "b", "3", Delim('}')}},
+ {json: ` [{"a": 1},{"a": 2}] `, expTokens: []interface{}{
+ Delim('['),
+ Delim('{'), "a", float64(1), Delim('}'),
+ Delim('{'), "a", float64(2), Delim('}'),
+ Delim(']')}},
+ {json: `{"obj": {"a": 1}}`, expTokens: []interface{}{
+ Delim('{'), "obj", Delim('{'), "a", float64(1), Delim('}'),
+ Delim('}')}},
+ {json: `{"obj": [{"a": 1}]}`, expTokens: []interface{}{
+ Delim('{'), "obj", Delim('['),
+ Delim('{'), "a", float64(1), Delim('}'),
+ Delim(']'), Delim('}')}},
+
+ // streaming tokens with intermittent Decode()
+ {json: `{ "a": 1 }`, expTokens: []interface{}{
+ Delim('{'), "a",
+ decodeThis{float64(1)},
+ Delim('}')}},
+ {json: ` [ { "a" : 1 } ] `, expTokens: []interface{}{
+ Delim('['),
+ decodeThis{map[string]interface{}{"a": float64(1)}},
+ Delim(']')}},
+ {json: ` [{"a": 1},{"a": 2}] `, expTokens: []interface{}{
+ Delim('['),
+ decodeThis{map[string]interface{}{"a": float64(1)}},
+ decodeThis{map[string]interface{}{"a": float64(2)}},
+ Delim(']')}},
+ {json: `{ "obj" : [ { "a" : 1 } ] }`, expTokens: []interface{}{
+ Delim('{'), "obj", Delim('['),
+ decodeThis{map[string]interface{}{"a": float64(1)}},
+ Delim(']'), Delim('}')}},
+
+ {json: `{"obj": {"a": 1}}`, expTokens: []interface{}{
+ Delim('{'), "obj",
+ decodeThis{map[string]interface{}{"a": float64(1)}},
+ Delim('}')}},
+ {json: `{"obj": [{"a": 1}]}`, expTokens: []interface{}{
+ Delim('{'), "obj",
+ decodeThis{[]interface{}{
+ map[string]interface{}{"a": float64(1)},
+ }},
+ Delim('}')}},
+ {json: ` [{"a": 1} {"a": 2}] `, expTokens: []interface{}{
+ Delim('['),
+ decodeThis{map[string]interface{}{"a": float64(1)}},
+ decodeThis{&SyntaxError{"expected comma after array element", 0}},
+ }},
+ {json: `{ "a" 1 }`, expTokens: []interface{}{
+ Delim('{'), "a",
+ decodeThis{&SyntaxError{"expected colon after object key", 0}},
+ }},
+}
+
+func TestDecodeInStream(t *testing.T) {
+
+ for ci, tcase := range tokenStreamCases {
+
+ dec := NewDecoder(strings.NewReader(tcase.json))
+ for i, etk := range tcase.expTokens {
+
+ var tk interface{}
+ var err error
+
+ if dt, ok := etk.(decodeThis); ok {
+ etk = dt.v
+ err = dec.Decode(&tk)
+ } else {
+ tk, err = dec.Token()
+ }
+ if experr, ok := etk.(error); ok {
+ if err == nil || err.Error() != experr.Error() {
+ t.Errorf("case %v: Expected error %v in %q, but was %v", ci, experr, tcase.json, err)
+ }
+ break
+ } else if err == io.EOF {
+ t.Errorf("case %v: Unexpected EOF in %q", ci, tcase.json)
+ break
+ } else if err != nil {
+ t.Errorf("case %v: Unexpected error '%v' in %q", ci, err, tcase.json)
+ break
+ }
+ if !reflect.DeepEqual(tk, etk) {
+ t.Errorf(`case %v: %q @ %v expected %T(%v) was %T(%v)`, ci, tcase.json, i, etk, etk, tk, tk)
+ break
+ }
+ }
+ }
+
+}
+
+// Test from golang.org/issue/11893
+func TestHTTPDecoding(t *testing.T) {
+ const raw = `{ "foo": "bar" }`
+
+ ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
+ w.Write([]byte(raw))
+ }))
+ defer ts.Close()
+ res, err := http.Get(ts.URL)
+ if err != nil {
+ log.Fatalf("GET failed: %v", err)
+ }
+ defer res.Body.Close()
+
+ foo := struct {
+ Foo string
+ }{}
+
+ d := NewDecoder(res.Body)
+ err = d.Decode(&foo)
+ if err != nil {
+ t.Fatalf("Decode: %v", err)
+ }
+ if foo.Foo != "bar" {
+ t.Errorf("decoded %q; want \"bar\"", foo.Foo)
+ }
+
+ // make sure we get the EOF the second time
+ err = d.Decode(&foo)
+ if err != io.EOF {
+ t.Errorf("err = %v; want io.EOF", err)
+ }
+}
diff --git a/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/internal/json/tagkey_test.go b/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/internal/json/tagkey_test.go
new file mode 100644
index 00000000000..c1739ea97f7
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/internal/json/tagkey_test.go
@@ -0,0 +1,115 @@
+// Copyright 2011 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package json
+
+import (
+ "testing"
+)
+
+type basicLatin2xTag struct {
+ V string `json:"$%-/"`
+}
+
+type basicLatin3xTag struct {
+ V string `json:"0123456789"`
+}
+
+type basicLatin4xTag struct {
+ V string `json:"ABCDEFGHIJKLMO"`
+}
+
+type basicLatin5xTag struct {
+ V string `json:"PQRSTUVWXYZ_"`
+}
+
+type basicLatin6xTag struct {
+ V string `json:"abcdefghijklmno"`
+}
+
+type basicLatin7xTag struct {
+ V string `json:"pqrstuvwxyz"`
+}
+
+type miscPlaneTag struct {
+ V string `json:"色は匂へど"`
+}
+
+type percentSlashTag struct {
+ V string `json:"text/html%"` // https://golang.org/issue/2718
+}
+
+type punctuationTag struct {
+ V string `json:"!#$%&()*+-./:<=>?@[]^_{|}~"` // https://golang.org/issue/3546
+}
+
+type emptyTag struct {
+ W string
+}
+
+type misnamedTag struct {
+ X string `jsom:"Misnamed"`
+}
+
+type badFormatTag struct {
+ Y string `:"BadFormat"`
+}
+
+type badCodeTag struct {
+ Z string `json:" !\"#&'()*+,."`
+}
+
+type spaceTag struct {
+ Q string `json:"With space"`
+}
+
+type unicodeTag struct {
+ W string `json:"Ελλάδα"`
+}
+
+var structTagObjectKeyTests = []struct {
+ raw interface{}
+ value string
+ key string
+}{
+ {basicLatin2xTag{"2x"}, "2x", "$%-/"},
+ {basicLatin3xTag{"3x"}, "3x", "0123456789"},
+ {basicLatin4xTag{"4x"}, "4x", "ABCDEFGHIJKLMO"},
+ {basicLatin5xTag{"5x"}, "5x", "PQRSTUVWXYZ_"},
+ {basicLatin6xTag{"6x"}, "6x", "abcdefghijklmno"},
+ {basicLatin7xTag{"7x"}, "7x", "pqrstuvwxyz"},
+ {miscPlaneTag{"いろはにほへと"}, "いろはにほへと", "色は匂へど"},
+ {emptyTag{"Pour Moi"}, "Pour Moi", "W"},
+ {misnamedTag{"Animal Kingdom"}, "Animal Kingdom", "X"},
+ {badFormatTag{"Orfevre"}, "Orfevre", "Y"},
+ {badCodeTag{"Reliable Man"}, "Reliable Man", "Z"},
+ {percentSlashTag{"brut"}, "brut", "text/html%"},
+ {punctuationTag{"Union Rags"}, "Union Rags", "!#$%&()*+-./:<=>?@[]^_{|}~"},
+ {spaceTag{"Perreddu"}, "Perreddu", "With space"},
+ {unicodeTag{"Loukanikos"}, "Loukanikos", "Ελλάδα"},
+}
+
+func TestStructTagObjectKey(t *testing.T) {
+ for _, tt := range structTagObjectKeyTests {
+ b, err := Marshal(tt.raw)
+ if err != nil {
+ t.Fatalf("Marshal(%#q) failed: %v", tt.raw, err)
+ }
+ var f interface{}
+ err = Unmarshal(b, &f)
+ if err != nil {
+ t.Fatalf("Unmarshal(%#q) failed: %v", b, err)
+ }
+ for i, v := range f.(map[string]interface{}) {
+ switch i {
+ case tt.key:
+ if s, ok := v.(string); !ok || s != tt.value {
+ t.Fatalf("Unexpected value: %#q, want %v", s, tt.value)
+ }
+ default:
+ t.Fatalf("Unexpected key: %#q, from %#q", i, b)
+ }
+ }
+ }
+}
diff --git a/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/internal/json/tags.go b/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/internal/json/tags.go
new file mode 100644
index 00000000000..c38fd5102f6
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/internal/json/tags.go
@@ -0,0 +1,44 @@
+// Copyright 2011 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package json
+
+import (
+ "strings"
+)
+
+// tagOptions is the string following a comma in a struct field's "json"
+// tag, or the empty string. It does not include the leading comma.
+type tagOptions string
+
+// parseTag splits a struct field's json tag into its name and
+// comma-separated options.
+func parseTag(tag string) (string, tagOptions) {
+ if idx := strings.Index(tag, ","); idx != -1 {
+ return tag[:idx], tagOptions(tag[idx+1:])
+ }
+ return tag, tagOptions("")
+}
+
+// Contains reports whether a comma-separated list of options
+// contains a particular substr flag. substr must be surrounded by a
+// string boundary or commas.
+func (o tagOptions) Contains(optionName string) bool {
+ if len(o) == 0 {
+ return false
+ }
+ s := string(o)
+ for s != "" {
+ var next string
+ i := strings.Index(s, ",")
+ if i >= 0 {
+ s, next = s[:i], s[i+1:]
+ }
+ if s == optionName {
+ return true
+ }
+ s = next
+ }
+ return false
+}
diff --git a/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/internal/json/tags_test.go b/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/internal/json/tags_test.go
new file mode 100644
index 00000000000..8ba8ddd5f80
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/internal/json/tags_test.go
@@ -0,0 +1,28 @@
+// Copyright 2011 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package json
+
+import (
+ "testing"
+)
+
+func TestTagParsing(t *testing.T) {
+ name, opts := parseTag("field,foobar,foo")
+ if name != "field" {
+ t.Fatalf("name = %q, want field", name)
+ }
+ for _, tt := range []struct {
+ opt string
+ want bool
+ }{
+ {"foobar", true},
+ {"foo", true},
+ {"bar", false},
+ } {
+ if opts.Contains(tt.opt) != tt.want {
+ t.Errorf("Contains(%q) = %v", tt.opt, !tt.want)
+ }
+ }
+}
diff --git a/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/internal/json/testdata/code.json.gz b/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/internal/json/testdata/code.json.gz
new file mode 100644
index 00000000000..1572a92bfbd
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/internal/json/testdata/code.json.gz
Binary files differ
diff --git a/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/internal/sasl/sasl.c b/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/internal/sasl/sasl.c
new file mode 100644
index 00000000000..8be0bc45964
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/internal/sasl/sasl.c
@@ -0,0 +1,77 @@
+// +build !windows
+
+#include <stdlib.h>
+#include <stdio.h>
+#include <string.h>
+#include <sasl/sasl.h>
+
+static int mgo_sasl_simple(void *context, int id, const char **result, unsigned int *len)
+{
+ if (!result) {
+ return SASL_BADPARAM;
+ }
+ switch (id) {
+ case SASL_CB_USER:
+ *result = (char *)context;
+ break;
+ case SASL_CB_AUTHNAME:
+ *result = (char *)context;
+ break;
+ case SASL_CB_LANGUAGE:
+ *result = NULL;
+ break;
+ default:
+ return SASL_BADPARAM;
+ }
+ if (len) {
+ *len = *result ? strlen(*result) : 0;
+ }
+ return SASL_OK;
+}
+
+typedef int (*callback)(void);
+
+static int mgo_sasl_secret(sasl_conn_t *conn, void *context, int id, sasl_secret_t **result)
+{
+ if (!conn || !result || id != SASL_CB_PASS) {
+ return SASL_BADPARAM;
+ }
+ *result = (sasl_secret_t *)context;
+ return SASL_OK;
+}
+
+sasl_callback_t *mgo_sasl_callbacks(const char *username, const char *password)
+{
+ sasl_callback_t *cb = malloc(4 * sizeof(sasl_callback_t));
+ int n = 0;
+
+ size_t len = strlen(password);
+ sasl_secret_t *secret = (sasl_secret_t*)malloc(sizeof(sasl_secret_t) + len);
+ if (!secret) {
+ free(cb);
+ return NULL;
+ }
+ strcpy((char *)secret->data, password);
+ secret->len = len;
+
+ cb[n].id = SASL_CB_PASS;
+ cb[n].proc = (callback)&mgo_sasl_secret;
+ cb[n].context = secret;
+ n++;
+
+ cb[n].id = SASL_CB_USER;
+ cb[n].proc = (callback)&mgo_sasl_simple;
+ cb[n].context = (char*)username;
+ n++;
+
+ cb[n].id = SASL_CB_AUTHNAME;
+ cb[n].proc = (callback)&mgo_sasl_simple;
+ cb[n].context = (char*)username;
+ n++;
+
+ cb[n].id = SASL_CB_LIST_END;
+ cb[n].proc = NULL;
+ cb[n].context = NULL;
+
+ return cb;
+}
diff --git a/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/internal/sasl/sasl.go b/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/internal/sasl/sasl.go
new file mode 100644
index 00000000000..8375dddf82a
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/internal/sasl/sasl.go
@@ -0,0 +1,138 @@
+// Package sasl is an implementation detail of the mgo package.
+//
+// This package is not meant to be used by itself.
+//
+
+// +build !windows
+
+package sasl
+
+// #cgo LDFLAGS: -lsasl2
+//
+// struct sasl_conn {};
+//
+// #include <stdlib.h>
+// #include <sasl/sasl.h>
+//
+// sasl_callback_t *mgo_sasl_callbacks(const char *username, const char *password);
+//
+import "C"
+
+import (
+ "fmt"
+ "strings"
+ "sync"
+ "unsafe"
+)
+
+type saslStepper interface {
+ Step(serverData []byte) (clientData []byte, done bool, err error)
+ Close()
+}
+
+type saslSession struct {
+ conn *C.sasl_conn_t
+ step int
+ mech string
+
+ cstrings []*C.char
+ callbacks *C.sasl_callback_t
+}
+
+var initError error
+var initOnce sync.Once
+
+func initSASL() {
+ rc := C.sasl_client_init(nil)
+ if rc != C.SASL_OK {
+ initError = saslError(rc, nil, "cannot initialize SASL library")
+ }
+}
+
+func New(username, password, mechanism, service, host string) (saslStepper, error) {
+ initOnce.Do(initSASL)
+ if initError != nil {
+ return nil, initError
+ }
+
+ ss := &saslSession{mech: mechanism}
+ if service == "" {
+ service = "mongodb"
+ }
+ if i := strings.Index(host, ":"); i >= 0 {
+ host = host[:i]
+ }
+ ss.callbacks = C.mgo_sasl_callbacks(ss.cstr(username), ss.cstr(password))
+ rc := C.sasl_client_new(ss.cstr(service), ss.cstr(host), nil, nil, ss.callbacks, 0, &ss.conn)
+ if rc != C.SASL_OK {
+ ss.Close()
+ return nil, saslError(rc, nil, "cannot create new SASL client")
+ }
+ return ss, nil
+}
+
+func (ss *saslSession) cstr(s string) *C.char {
+ cstr := C.CString(s)
+ ss.cstrings = append(ss.cstrings, cstr)
+ return cstr
+}
+
+func (ss *saslSession) Close() {
+ for _, cstr := range ss.cstrings {
+ C.free(unsafe.Pointer(cstr))
+ }
+ ss.cstrings = nil
+
+ if ss.callbacks != nil {
+ C.free(unsafe.Pointer(ss.callbacks))
+ }
+
+ // The documentation of SASL dispose makes it clear that this should only
+ // be done when the connection is done, not when the authentication phase
+ // is done, because an encryption layer may have been negotiated.
+ // Even then, we'll do this for now, because it's simpler and prevents
+ // keeping track of this state for every socket. If it breaks, we'll fix it.
+ C.sasl_dispose(&ss.conn)
+}
+
+func (ss *saslSession) Step(serverData []byte) (clientData []byte, done bool, err error) {
+ ss.step++
+ if ss.step > 10 {
+ return nil, false, fmt.Errorf("too many SASL steps without authentication")
+ }
+ var cclientData *C.char
+ var cclientDataLen C.uint
+ var rc C.int
+ if ss.step == 1 {
+ var mechanism *C.char // ignored - must match cred
+ rc = C.sasl_client_start(ss.conn, ss.cstr(ss.mech), nil, &cclientData, &cclientDataLen, &mechanism)
+ } else {
+ var cserverData *C.char
+ var cserverDataLen C.uint
+ if len(serverData) > 0 {
+ cserverData = (*C.char)(unsafe.Pointer(&serverData[0]))
+ cserverDataLen = C.uint(len(serverData))
+ }
+ rc = C.sasl_client_step(ss.conn, cserverData, cserverDataLen, nil, &cclientData, &cclientDataLen)
+ }
+ if cclientData != nil && cclientDataLen > 0 {
+ clientData = C.GoBytes(unsafe.Pointer(cclientData), C.int(cclientDataLen))
+ }
+ if rc == C.SASL_OK {
+ return clientData, true, nil
+ }
+ if rc == C.SASL_CONTINUE {
+ return clientData, false, nil
+ }
+ return nil, false, saslError(rc, ss.conn, "cannot establish SASL session")
+}
+
+func saslError(rc C.int, conn *C.sasl_conn_t, msg string) error {
+ var detail string
+ if conn == nil {
+ detail = C.GoString(C.sasl_errstring(rc, nil, nil))
+ } else {
+ detail = C.GoString(C.sasl_errdetail(conn))
+ }
+ return fmt.Errorf(msg + ": " + detail)
+}
diff --git a/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/internal/sasl/sasl_windows.c b/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/internal/sasl/sasl_windows.c
new file mode 100644
index 00000000000..c359fd6edba
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/internal/sasl/sasl_windows.c
@@ -0,0 +1,122 @@
+#include "sasl_windows.h"
+
+static const LPSTR SSPI_PACKAGE_NAME = "kerberos";
+
+SECURITY_STATUS SEC_ENTRY sspi_acquire_credentials_handle(CredHandle *cred_handle, char *username, char *password, char *domain)
+{
+ SEC_WINNT_AUTH_IDENTITY auth_identity;
+ SECURITY_INTEGER ignored;
+
+ auth_identity.Flags = SEC_WINNT_AUTH_IDENTITY_ANSI;
+ auth_identity.User = (LPSTR) username;
+ auth_identity.UserLength = strlen(username);
+ auth_identity.Password = NULL;
+ auth_identity.PasswordLength = 0;
+ if(password){
+ auth_identity.Password = (LPSTR) password;
+ auth_identity.PasswordLength = strlen(password);
+ }
+ auth_identity.Domain = (LPSTR) domain;
+ auth_identity.DomainLength = strlen(domain);
+ return call_sspi_acquire_credentials_handle(NULL, SSPI_PACKAGE_NAME, SECPKG_CRED_OUTBOUND, NULL, &auth_identity, NULL, NULL, cred_handle, &ignored);
+}
+
+int sspi_step(CredHandle *cred_handle, int has_context, CtxtHandle *context, PVOID buffer, ULONG buffer_length, PVOID *out_buffer, ULONG *out_buffer_length, char *target)
+{
+ SecBufferDesc inbuf;
+ SecBuffer in_bufs[1];
+ SecBufferDesc outbuf;
+ SecBuffer out_bufs[1];
+
+ if (has_context > 0) {
+ // If we already have a context, we now have data to send.
+ // Put this data in an inbuf.
+ inbuf.ulVersion = SECBUFFER_VERSION;
+ inbuf.cBuffers = 1;
+ inbuf.pBuffers = in_bufs;
+ in_bufs[0].pvBuffer = buffer;
+ in_bufs[0].cbBuffer = buffer_length;
+ in_bufs[0].BufferType = SECBUFFER_TOKEN;
+ }
+
+ outbuf.ulVersion = SECBUFFER_VERSION;
+ outbuf.cBuffers = 1;
+ outbuf.pBuffers = out_bufs;
+ out_bufs[0].pvBuffer = NULL;
+ out_bufs[0].cbBuffer = 0;
+ out_bufs[0].BufferType = SECBUFFER_TOKEN;
+
+ ULONG context_attr = 0;
+
+ int ret = call_sspi_initialize_security_context(cred_handle,
+ has_context > 0 ? context : NULL,
+ (LPSTR) target,
+ ISC_REQ_ALLOCATE_MEMORY | ISC_REQ_MUTUAL_AUTH,
+ 0,
+ SECURITY_NETWORK_DREP,
+ has_context > 0 ? &inbuf : NULL,
+ 0,
+ context,
+ &outbuf,
+ &context_attr,
+ NULL);
+
+ *out_buffer = malloc(out_bufs[0].cbBuffer);
+ *out_buffer_length = out_bufs[0].cbBuffer;
+ memcpy(*out_buffer, out_bufs[0].pvBuffer, *out_buffer_length);
+
+ return ret;
+}
+
+int sspi_send_client_authz_id(CtxtHandle *context, PVOID *buffer, ULONG *buffer_length, char *user_plus_realm)
+{
+ SecPkgContext_Sizes sizes;
+ SECURITY_STATUS status = call_sspi_query_context_attributes(context, SECPKG_ATTR_SIZES, &sizes);
+
+ if (status != SEC_E_OK) {
+ return status;
+ }
+
+ size_t user_plus_realm_length = strlen(user_plus_realm);
+ int msgSize = 4 + user_plus_realm_length;
+ char *msg = malloc((sizes.cbSecurityTrailer + msgSize + sizes.cbBlockSize) * sizeof(char));
+ msg[sizes.cbSecurityTrailer + 0] = 1;
+ msg[sizes.cbSecurityTrailer + 1] = 0;
+ msg[sizes.cbSecurityTrailer + 2] = 0;
+ msg[sizes.cbSecurityTrailer + 3] = 0;
+ memcpy(&msg[sizes.cbSecurityTrailer + 4], user_plus_realm, user_plus_realm_length);
+
+ SecBuffer wrapBufs[3];
+ SecBufferDesc wrapBufDesc;
+ wrapBufDesc.cBuffers = 3;
+ wrapBufDesc.pBuffers = wrapBufs;
+ wrapBufDesc.ulVersion = SECBUFFER_VERSION;
+
+ wrapBufs[0].cbBuffer = sizes.cbSecurityTrailer;
+ wrapBufs[0].BufferType = SECBUFFER_TOKEN;
+ wrapBufs[0].pvBuffer = msg;
+
+ wrapBufs[1].cbBuffer = msgSize;
+ wrapBufs[1].BufferType = SECBUFFER_DATA;
+ wrapBufs[1].pvBuffer = msg + sizes.cbSecurityTrailer;
+
+ wrapBufs[2].cbBuffer = sizes.cbBlockSize;
+ wrapBufs[2].BufferType = SECBUFFER_PADDING;
+ wrapBufs[2].pvBuffer = msg + sizes.cbSecurityTrailer + msgSize;
+
+ status = call_sspi_encrypt_message(context, SECQOP_WRAP_NO_ENCRYPT, &wrapBufDesc, 0);
+ if (status != SEC_E_OK) {
+ free(msg);
+ return status;
+ }
+
+ *buffer_length = wrapBufs[0].cbBuffer + wrapBufs[1].cbBuffer + wrapBufs[2].cbBuffer;
+ *buffer = malloc(*buffer_length);
+
+ memcpy(*buffer, wrapBufs[0].pvBuffer, wrapBufs[0].cbBuffer);
+ memcpy(*buffer + wrapBufs[0].cbBuffer, wrapBufs[1].pvBuffer, wrapBufs[1].cbBuffer);
+ memcpy(*buffer + wrapBufs[0].cbBuffer + wrapBufs[1].cbBuffer, wrapBufs[2].pvBuffer, wrapBufs[2].cbBuffer);
+
+ free(msg);
+ return SEC_E_OK;
+}
diff --git a/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/internal/sasl/sasl_windows.go b/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/internal/sasl/sasl_windows.go
new file mode 100644
index 00000000000..d8ec0013709
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/internal/sasl/sasl_windows.go
@@ -0,0 +1,142 @@
+package sasl
+
+// #include "sasl_windows.h"
+import "C"
+
+import (
+ "fmt"
+ "strings"
+ "sync"
+ "unsafe"
+)
+
+type saslStepper interface {
+ Step(serverData []byte) (clientData []byte, done bool, err error)
+ Close()
+}
+
+type saslSession struct {
+ // Credentials
+ mech string
+ service string
+ host string
+ userPlusRealm string
+ target string
+ domain string
+
+ // Internal state
+ authComplete bool
+ errored bool
+ step int
+
+ // C internal state
+ credHandle C.CredHandle
+ context C.CtxtHandle
+ hasContext C.int
+
+ // Keep track of pointers we need to explicitly free
+ stringsToFree []*C.char
+}
+
+var initError error
+var initOnce sync.Once
+
+func initSSPI() {
+ rc := C.load_secur32_dll()
+ if rc != 0 {
+ initError = fmt.Errorf("Error loading libraries: %v", rc)
+ }
+}
+
+func New(username, password, mechanism, service, host string) (saslStepper, error) {
+ initOnce.Do(initSSPI)
+ ss := &saslSession{mech: mechanism, hasContext: 0, userPlusRealm: username}
+ if service == "" {
+ service = "mongodb"
+ }
+ if i := strings.Index(host, ":"); i >= 0 {
+ host = host[:i]
+ }
+ ss.service = service
+ ss.host = host
+
+ usernameComponents := strings.Split(username, "@")
+ if len(usernameComponents) < 2 {
+ return nil, fmt.Errorf("Username '%v' doesn't contain a realm!", username)
+ }
+ user := usernameComponents[0]
+ ss.domain = usernameComponents[1]
+ ss.target = fmt.Sprintf("%s/%s", ss.service, ss.host)
+
+ var status C.SECURITY_STATUS
+ // Step 0: call AcquireCredentialsHandle to get a nice SSPI CredHandle
+ if len(password) > 0 {
+ status = C.sspi_acquire_credentials_handle(&ss.credHandle, ss.cstr(user), ss.cstr(password), ss.cstr(ss.domain))
+ } else {
+ status = C.sspi_acquire_credentials_handle(&ss.credHandle, ss.cstr(user), nil, ss.cstr(ss.domain))
+ }
+ if status != C.SEC_E_OK {
+ ss.errored = true
+ return nil, fmt.Errorf("Couldn't create new SSPI client, error code %v", status)
+ }
+ return ss, nil
+}
+
+func (ss *saslSession) cstr(s string) *C.char {
+ cstr := C.CString(s)
+ ss.stringsToFree = append(ss.stringsToFree, cstr)
+ return cstr
+}
+
+func (ss *saslSession) Close() {
+ for _, cstr := range ss.stringsToFree {
+ C.free(unsafe.Pointer(cstr))
+ }
+}
+
+func (ss *saslSession) Step(serverData []byte) (clientData []byte, done bool, err error) {
+ ss.step++
+ if ss.step > 10 {
+ return nil, false, fmt.Errorf("too many SSPI steps without authentication")
+ }
+ var buffer C.PVOID
+ var bufferLength C.ULONG
+ var outBuffer C.PVOID
+ var outBufferLength C.ULONG
+ if len(serverData) > 0 {
+ buffer = (C.PVOID)(unsafe.Pointer(&serverData[0]))
+ bufferLength = C.ULONG(len(serverData))
+ }
+ var status C.int
+ if ss.authComplete {
+ // Step 3: last bit of magic to use the correct server credentials
+ status = C.sspi_send_client_authz_id(&ss.context, &outBuffer, &outBufferLength, ss.cstr(ss.userPlusRealm))
+ } else {
+ // Step 1 + Step 2: set up security context with the server and TGT
+ status = C.sspi_step(&ss.credHandle, ss.hasContext, &ss.context, buffer, bufferLength, &outBuffer, &outBufferLength, ss.cstr(ss.target))
+ }
+ if outBuffer != C.PVOID(nil) {
+ defer C.free(unsafe.Pointer(outBuffer))
+ }
+ if status != C.SEC_E_OK && status != C.SEC_I_CONTINUE_NEEDED {
+ ss.errored = true
+ return nil, false, ss.handleSSPIErrorCode(status)
+ }
+
+ clientData = C.GoBytes(unsafe.Pointer(outBuffer), C.int(outBufferLength))
+ if status == C.SEC_E_OK {
+ ss.authComplete = true
+ return clientData, true, nil
+ } else {
+ ss.hasContext = 1
+ return clientData, false, nil
+ }
+}
+
+func (ss *saslSession) handleSSPIErrorCode(code C.int) error {
+ switch {
+ case code == C.SEC_E_TARGET_UNKNOWN:
+ return fmt.Errorf("Target %v@%v not found", ss.target, ss.domain)
+ }
+ return fmt.Errorf("Unknown error doing step %v, error code %v", ss.step, code)
+}
diff --git a/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/internal/sasl/sasl_windows.h b/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/internal/sasl/sasl_windows.h
new file mode 100644
index 00000000000..a6b039567cf
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/internal/sasl/sasl_windows.h
@@ -0,0 +1,7 @@
+#include <windows.h>
+
+#include "sspi_windows.h"
+
+SECURITY_STATUS SEC_ENTRY sspi_acquire_credentials_handle(CredHandle* cred_handle, char* username, char* password, char* domain);
+int sspi_step(CredHandle* cred_handle, int has_context, CtxtHandle* context, PVOID buffer, ULONG buffer_length, PVOID* out_buffer, ULONG* out_buffer_length, char* target);
+int sspi_send_client_authz_id(CtxtHandle* context, PVOID* buffer, ULONG* buffer_length, char* user_plus_realm);
diff --git a/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/internal/sasl/sspi_windows.c b/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/internal/sasl/sspi_windows.c
new file mode 100644
index 00000000000..63f9a6f8697
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/internal/sasl/sspi_windows.c
@@ -0,0 +1,96 @@
+// Code adapted from the NodeJS kerberos library:
+//
+// https://github.com/christkv/kerberos/tree/master/lib/win32/kerberos_sspi.c
+//
+// Under the terms of the Apache License, Version 2.0:
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+#include <stdlib.h>
+
+#include "sspi_windows.h"
+
+static HINSTANCE sspi_secur32_dll = NULL;
+
+int load_secur32_dll()
+{
+ sspi_secur32_dll = LoadLibrary("secur32.dll");
+ if (sspi_secur32_dll == NULL) {
+ return GetLastError();
+ }
+ return 0;
+}
+
+SECURITY_STATUS SEC_ENTRY call_sspi_encrypt_message(PCtxtHandle phContext, unsigned long fQOP, PSecBufferDesc pMessage, unsigned long MessageSeqNo)
+{
+ if (sspi_secur32_dll == NULL) {
+ return -1;
+ }
+ encryptMessage_fn pfn_encryptMessage = (encryptMessage_fn) GetProcAddress(sspi_secur32_dll, "EncryptMessage");
+ if (!pfn_encryptMessage) {
+ return -2;
+ }
+ return (*pfn_encryptMessage)(phContext, fQOP, pMessage, MessageSeqNo);
+}
+
+SECURITY_STATUS SEC_ENTRY call_sspi_acquire_credentials_handle(
+ LPSTR pszPrincipal, LPSTR pszPackage, unsigned long fCredentialUse,
+ void *pvLogonId, void *pAuthData, SEC_GET_KEY_FN pGetKeyFn, void *pvGetKeyArgument,
+ PCredHandle phCredential, PTimeStamp ptsExpiry)
+{
+ if (sspi_secur32_dll == NULL) {
+ return -1;
+ }
+ acquireCredentialsHandle_fn pfn_acquireCredentialsHandle;
+#ifdef _UNICODE
+ pfn_acquireCredentialsHandle = (acquireCredentialsHandle_fn) GetProcAddress(sspi_secur32_dll, "AcquireCredentialsHandleW");
+#else
+ pfn_acquireCredentialsHandle = (acquireCredentialsHandle_fn) GetProcAddress(sspi_secur32_dll, "AcquireCredentialsHandleA");
+#endif
+ if (!pfn_acquireCredentialsHandle) {
+ return -2;
+ }
+ return (*pfn_acquireCredentialsHandle)(
+ pszPrincipal, pszPackage, fCredentialUse, pvLogonId, pAuthData,
+ pGetKeyFn, pvGetKeyArgument, phCredential, ptsExpiry);
+}
+
+SECURITY_STATUS SEC_ENTRY call_sspi_initialize_security_context(
+ PCredHandle phCredential, PCtxtHandle phContext, LPSTR pszTargetName,
+ unsigned long fContextReq, unsigned long Reserved1, unsigned long TargetDataRep,
+ PSecBufferDesc pInput, unsigned long Reserved2, PCtxtHandle phNewContext,
+ PSecBufferDesc pOutput, unsigned long *pfContextAttr, PTimeStamp ptsExpiry)
+{
+ if (sspi_secur32_dll == NULL) {
+ return -1;
+ }
+ initializeSecurityContext_fn pfn_initializeSecurityContext;
+#ifdef _UNICODE
+ pfn_initializeSecurityContext = (initializeSecurityContext_fn) GetProcAddress(sspi_secur32_dll, "InitializeSecurityContextW");
+#else
+ pfn_initializeSecurityContext = (initializeSecurityContext_fn) GetProcAddress(sspi_secur32_dll, "InitializeSecurityContextA");
+#endif
+ if (!pfn_initializeSecurityContext) {
+ return -2;
+ }
+ return (*pfn_initializeSecurityContext)(
+ phCredential, phContext, pszTargetName, fContextReq, Reserved1, TargetDataRep,
+ pInput, Reserved2, phNewContext, pOutput, pfContextAttr, ptsExpiry);
+}
+
+SECURITY_STATUS SEC_ENTRY call_sspi_query_context_attributes(PCtxtHandle phContext, unsigned long ulAttribute, void *pBuffer)
+{
+ if (sspi_secur32_dll == NULL) {
+ return -1;
+ }
+ queryContextAttributes_fn pfn_queryContextAttributes;
+#ifdef _UNICODE
+ pfn_queryContextAttributes = (queryContextAttributes_fn) GetProcAddress(sspi_secur32_dll, "QueryContextAttributesW");
+#else
+ pfn_queryContextAttributes = (queryContextAttributes_fn) GetProcAddress(sspi_secur32_dll, "QueryContextAttributesA");
+#endif
+ if (!pfn_queryContextAttributes) {
+ return -2;
+ }
+ return (*pfn_queryContextAttributes)(phContext, ulAttribute, pBuffer);
+}
diff --git a/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/internal/sasl/sspi_windows.h b/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/internal/sasl/sspi_windows.h
new file mode 100644
index 00000000000..d2832703171
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/internal/sasl/sspi_windows.h
@@ -0,0 +1,70 @@
+// Code adapted from the NodeJS kerberos library:
+//
+// https://github.com/christkv/kerberos/tree/master/lib/win32/kerberos_sspi.h
+//
+// Under the terms of the Apache License, Version 2.0:
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+#ifndef SSPI_WINDOWS_H
+#define SSPI_WINDOWS_H
+
+#define SECURITY_WIN32 1
+
+#include <windows.h>
+#include <sspi.h>
+
+int load_secur32_dll();
+
+SECURITY_STATUS SEC_ENTRY call_sspi_encrypt_message(PCtxtHandle phContext, unsigned long fQOP, PSecBufferDesc pMessage, unsigned long MessageSeqNo);
+
+typedef DWORD (WINAPI *encryptMessage_fn)(PCtxtHandle phContext, ULONG fQOP, PSecBufferDesc pMessage, ULONG MessageSeqNo);
+
+SECURITY_STATUS SEC_ENTRY call_sspi_acquire_credentials_handle(
+ LPSTR pszPrincipal, // Name of principal
+ LPSTR pszPackage, // Name of package
+ unsigned long fCredentialUse, // Flags indicating use
+ void *pvLogonId, // Pointer to logon ID
+ void *pAuthData, // Package specific data
+ SEC_GET_KEY_FN pGetKeyFn, // Pointer to GetKey() func
+ void *pvGetKeyArgument, // Value to pass to GetKey()
+ PCredHandle phCredential, // (out) Cred Handle
+ PTimeStamp ptsExpiry // (out) Lifetime (optional)
+);
+
+typedef DWORD (WINAPI *acquireCredentialsHandle_fn)(
+ LPSTR pszPrincipal, LPSTR pszPackage, unsigned long fCredentialUse,
+ void *pvLogonId, void *pAuthData, SEC_GET_KEY_FN pGetKeyFn, void *pvGetKeyArgument,
+ PCredHandle phCredential, PTimeStamp ptsExpiry
+);
+
+SECURITY_STATUS SEC_ENTRY call_sspi_initialize_security_context(
+ PCredHandle phCredential, // Cred to base context
+ PCtxtHandle phContext, // Existing context (OPT)
+ LPSTR pszTargetName, // Name of target
+ unsigned long fContextReq, // Context Requirements
+ unsigned long Reserved1, // Reserved, MBZ
+ unsigned long TargetDataRep, // Data rep of target
+ PSecBufferDesc pInput, // Input Buffers
+ unsigned long Reserved2, // Reserved, MBZ
+ PCtxtHandle phNewContext, // (out) New Context handle
+ PSecBufferDesc pOutput, // (inout) Output Buffers
+ unsigned long *pfContextAttr, // (out) Context attrs
+ PTimeStamp ptsExpiry // (out) Life span (OPT)
+);
+
+typedef DWORD (WINAPI *initializeSecurityContext_fn)(
+ PCredHandle phCredential, PCtxtHandle phContext, LPSTR pszTargetName, unsigned long fContextReq,
+ unsigned long Reserved1, unsigned long TargetDataRep, PSecBufferDesc pInput, unsigned long Reserved2,
+ PCtxtHandle phNewContext, PSecBufferDesc pOutput, unsigned long *pfContextAttr, PTimeStamp ptsExpiry);
+
+SECURITY_STATUS SEC_ENTRY call_sspi_query_context_attributes(
+ PCtxtHandle phContext, // Context to query
+ unsigned long ulAttribute, // Attribute to query
+ void *pBuffer // Buffer for attributes
+);
+
+typedef DWORD (WINAPI *queryContextAttributes_fn)(
+ PCtxtHandle phContext, unsigned long ulAttribute, void *pBuffer);
+
+#endif // SSPI_WINDOWS_H
diff --git a/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/internal/scram/scram.go b/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/internal/scram/scram.go
new file mode 100644
index 00000000000..80cda913526
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/internal/scram/scram.go
@@ -0,0 +1,266 @@
+// mgo - MongoDB driver for Go
+//
+// Copyright (c) 2014 - Gustavo Niemeyer <gustavo@niemeyer.net>
+//
+// All rights reserved.
+//
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are met:
+//
+// 1. Redistributions of source code must retain the above copyright notice, this
+// list of conditions and the following disclaimer.
+// 2. Redistributions in binary form must reproduce the above copyright notice,
+// this list of conditions and the following disclaimer in the documentation
+// and/or other materials provided with the distribution.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
+// ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
+// WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+// DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
+// ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
+// (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
+// LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
+// ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
+// SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// Pacakage scram implements a SCRAM-{SHA-1,etc} client per RFC5802.
+//
+// http://tools.ietf.org/html/rfc5802
+//
+package scram
+
+import (
+ "bytes"
+ "crypto/hmac"
+ "crypto/rand"
+ "encoding/base64"
+ "fmt"
+ "hash"
+ "strconv"
+ "strings"
+)
+
+// Client implements a SCRAM-* client (SCRAM-SHA-1, SCRAM-SHA-256, etc).
+//
+// A Client may be used within a SASL conversation with logic resembling:
+//
+// var in []byte
+// var client = scram.NewClient(sha1.New, user, pass)
+// for client.Step(in) {
+// out := client.Out()
+// // send out to server
+// in := serverOut
+// }
+// if client.Err() != nil {
+// // auth failed
+// }
+//
+type Client struct {
+ newHash func() hash.Hash
+
+ user string
+ pass string
+ step int
+ out bytes.Buffer
+ err error
+
+ clientNonce []byte
+ serverNonce []byte
+ saltedPass []byte
+ authMsg bytes.Buffer
+}
+
+// NewClient returns a new SCRAM-* client with the provided hash algorithm.
+//
+// For SCRAM-SHA-1, for example, use:
+//
+// client := scram.NewClient(sha1.New, user, pass)
+//
+func NewClient(newHash func() hash.Hash, user, pass string) *Client {
+ c := &Client{
+ newHash: newHash,
+ user: user,
+ pass: pass,
+ }
+ c.out.Grow(256)
+ c.authMsg.Grow(256)
+ return c
+}
+
+// Out returns the data to be sent to the server in the current step.
+func (c *Client) Out() []byte {
+ if c.out.Len() == 0 {
+ return nil
+ }
+ return c.out.Bytes()
+}
+
+// Err returns the error that ocurred, or nil if there were no errors.
+func (c *Client) Err() error {
+ return c.err
+}
+
+// SetNonce sets the client nonce to the provided value.
+// If not set, the nonce is generated automatically out of crypto/rand on the first step.
+func (c *Client) SetNonce(nonce []byte) {
+ c.clientNonce = nonce
+}
+
+var escaper = strings.NewReplacer("=", "=3D", ",", "=2C")
+
+// Step processes the incoming data from the server and makes the
+// next round of data for the server available via Client.Out.
+// Step returns false if there are no errors and more data is
+// still expected.
+func (c *Client) Step(in []byte) bool {
+ c.out.Reset()
+ if c.step > 2 || c.err != nil {
+ return false
+ }
+ c.step++
+ switch c.step {
+ case 1:
+ c.err = c.step1(in)
+ case 2:
+ c.err = c.step2(in)
+ case 3:
+ c.err = c.step3(in)
+ }
+ return c.step > 2 || c.err != nil
+}
+
+func (c *Client) step1(in []byte) error {
+ if len(c.clientNonce) == 0 {
+ const nonceLen = 6
+ buf := make([]byte, nonceLen + b64.EncodedLen(nonceLen))
+ if _, err := rand.Read(buf[:nonceLen]); err != nil {
+ return fmt.Errorf("cannot read random SCRAM-SHA-1 nonce from operating system: %v", err)
+ }
+ c.clientNonce = buf[nonceLen:]
+ b64.Encode(c.clientNonce, buf[:nonceLen])
+ }
+ c.authMsg.WriteString("n=")
+ escaper.WriteString(&c.authMsg, c.user)
+ c.authMsg.WriteString(",r=")
+ c.authMsg.Write(c.clientNonce)
+
+ c.out.WriteString("n,,")
+ c.out.Write(c.authMsg.Bytes())
+ return nil
+}
+
+var b64 = base64.StdEncoding
+
+func (c *Client) step2(in []byte) error {
+ c.authMsg.WriteByte(',')
+ c.authMsg.Write(in)
+
+ fields := bytes.Split(in, []byte(","))
+ if len(fields) != 3 {
+ return fmt.Errorf("expected 3 fields in first SCRAM-SHA-1 server message, got %d: %q", len(fields), in)
+ }
+ if !bytes.HasPrefix(fields[0], []byte("r=")) || len(fields[0]) < 2 {
+ return fmt.Errorf("server sent an invalid SCRAM-SHA-1 nonce: %q", fields[0])
+ }
+ if !bytes.HasPrefix(fields[1], []byte("s=")) || len(fields[1]) < 6 {
+ return fmt.Errorf("server sent an invalid SCRAM-SHA-1 salt: %q", fields[1])
+ }
+ if !bytes.HasPrefix(fields[2], []byte("i=")) || len(fields[2]) < 6 {
+ return fmt.Errorf("server sent an invalid SCRAM-SHA-1 iteration count: %q", fields[2])
+ }
+
+ c.serverNonce = fields[0][2:]
+ if !bytes.HasPrefix(c.serverNonce, c.clientNonce) {
+ return fmt.Errorf("server SCRAM-SHA-1 nonce is not prefixed by client nonce: got %q, want %q+\"...\"", c.serverNonce, c.clientNonce)
+ }
+
+ salt := make([]byte, b64.DecodedLen(len(fields[1][2:])))
+ n, err := b64.Decode(salt, fields[1][2:])
+ if err != nil {
+ return fmt.Errorf("cannot decode SCRAM-SHA-1 salt sent by server: %q", fields[1])
+ }
+ salt = salt[:n]
+ iterCount, err := strconv.Atoi(string(fields[2][2:]))
+ if err != nil {
+ return fmt.Errorf("server sent an invalid SCRAM-SHA-1 iteration count: %q", fields[2])
+ }
+ c.saltPassword(salt, iterCount)
+
+ c.authMsg.WriteString(",c=biws,r=")
+ c.authMsg.Write(c.serverNonce)
+
+ c.out.WriteString("c=biws,r=")
+ c.out.Write(c.serverNonce)
+ c.out.WriteString(",p=")
+ c.out.Write(c.clientProof())
+ return nil
+}
+
+func (c *Client) step3(in []byte) error {
+ var isv, ise bool
+ var fields = bytes.Split(in, []byte(","))
+ if len(fields) == 1 {
+ isv = bytes.HasPrefix(fields[0], []byte("v="))
+ ise = bytes.HasPrefix(fields[0], []byte("e="))
+ }
+ if ise {
+ return fmt.Errorf("SCRAM-SHA-1 authentication error: %s", fields[0][2:])
+ } else if !isv {
+ return fmt.Errorf("unsupported SCRAM-SHA-1 final message from server: %q", in)
+ }
+ if !bytes.Equal(c.serverSignature(), fields[0][2:]) {
+ return fmt.Errorf("cannot authenticate SCRAM-SHA-1 server signature: %q", fields[0][2:])
+ }
+ return nil
+}
+
+func (c *Client) saltPassword(salt []byte, iterCount int) {
+ mac := hmac.New(c.newHash, []byte(c.pass))
+ mac.Write(salt)
+ mac.Write([]byte{0, 0, 0, 1})
+ ui := mac.Sum(nil)
+ hi := make([]byte, len(ui))
+ copy(hi, ui)
+ for i := 1; i < iterCount; i++ {
+ mac.Reset()
+ mac.Write(ui)
+ mac.Sum(ui[:0])
+ for j, b := range ui {
+ hi[j] ^= b
+ }
+ }
+ c.saltedPass = hi
+}
+
+func (c *Client) clientProof() []byte {
+ mac := hmac.New(c.newHash, c.saltedPass)
+ mac.Write([]byte("Client Key"))
+ clientKey := mac.Sum(nil)
+ hash := c.newHash()
+ hash.Write(clientKey)
+ storedKey := hash.Sum(nil)
+ mac = hmac.New(c.newHash, storedKey)
+ mac.Write(c.authMsg.Bytes())
+ clientProof := mac.Sum(nil)
+ for i, b := range clientKey {
+ clientProof[i] ^= b
+ }
+ clientProof64 := make([]byte, b64.EncodedLen(len(clientProof)))
+ b64.Encode(clientProof64, clientProof)
+ return clientProof64
+}
+
+func (c *Client) serverSignature() []byte {
+ mac := hmac.New(c.newHash, c.saltedPass)
+ mac.Write([]byte("Server Key"))
+ serverKey := mac.Sum(nil)
+
+ mac = hmac.New(c.newHash, serverKey)
+ mac.Write(c.authMsg.Bytes())
+ serverSignature := mac.Sum(nil)
+
+ encoded := make([]byte, b64.EncodedLen(len(serverSignature)))
+ b64.Encode(encoded, serverSignature)
+ return encoded
+}
diff --git a/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/internal/scram/scram_test.go b/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/internal/scram/scram_test.go
new file mode 100644
index 00000000000..9c20fdfc488
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/internal/scram/scram_test.go
@@ -0,0 +1,67 @@
+package scram_test
+
+import (
+ "crypto/sha1"
+ "testing"
+
+ . "gopkg.in/check.v1"
+ "gopkg.in/mgo.v2/internal/scram"
+ "strings"
+)
+
+var _ = Suite(&S{})
+
+func Test(t *testing.T) { TestingT(t) }
+
+type S struct{}
+
+var tests = [][]string{{
+ "U: user pencil",
+ "N: fyko+d2lbbFgONRv9qkxdawL",
+ "C: n,,n=user,r=fyko+d2lbbFgONRv9qkxdawL",
+ "S: r=fyko+d2lbbFgONRv9qkxdawL3rfcNHYJY1ZVvWVs7j,s=QSXCR+Q6sek8bf92,i=4096",
+ "C: c=biws,r=fyko+d2lbbFgONRv9qkxdawL3rfcNHYJY1ZVvWVs7j,p=v0X8v3Bz2T0CJGbJQyF0X+HI4Ts=",
+ "S: v=rmF9pqV8S7suAoZWja4dJRkFsKQ=",
+}, {
+ "U: root fe8c89e308ec08763df36333cbf5d3a2",
+ "N: OTcxNDk5NjM2MzE5",
+ "C: n,,n=root,r=OTcxNDk5NjM2MzE5",
+ "S: r=OTcxNDk5NjM2MzE581Ra3provgG0iDsMkDiIAlrh4532dDLp,s=XRDkVrFC9JuL7/F4tG0acQ==,i=10000",
+ "C: c=biws,r=OTcxNDk5NjM2MzE581Ra3provgG0iDsMkDiIAlrh4532dDLp,p=6y1jp9R7ETyouTXS9fW9k5UHdBc=",
+ "S: v=LBnd9dUJRxdqZiEq91NKP3z/bHA=",
+}}
+
+func (s *S) TestExamples(c *C) {
+ for _, steps := range tests {
+ if len(steps) < 2 || len(steps[0]) < 3 || !strings.HasPrefix(steps[0], "U: ") {
+ c.Fatalf("Invalid test: %#v", steps)
+ }
+ auth := strings.Fields(steps[0][3:])
+ client := scram.NewClient(sha1.New, auth[0], auth[1])
+ first, done := true, false
+ c.Logf("-----")
+ c.Logf("%s", steps[0])
+ for _, step := range steps[1:] {
+ c.Logf("%s", step)
+ switch step[:3] {
+ case "N: ":
+ client.SetNonce([]byte(step[3:]))
+ case "C: ":
+ if first {
+ first = false
+ done = client.Step(nil)
+ }
+ c.Assert(done, Equals, false)
+ c.Assert(client.Err(), IsNil)
+ c.Assert(string(client.Out()), Equals, step[3:])
+ case "S: ":
+ first = false
+ done = client.Step([]byte(step[3:]))
+ default:
+ panic("invalid test line: " + step)
+ }
+ }
+ c.Assert(done, Equals, true)
+ c.Assert(client.Err(), IsNil)
+ }
+}
diff --git a/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/log.go b/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/log.go
new file mode 100644
index 00000000000..53eb4237b89
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/log.go
@@ -0,0 +1,133 @@
+// mgo - MongoDB driver for Go
+//
+// Copyright (c) 2010-2012 - Gustavo Niemeyer <gustavo@niemeyer.net>
+//
+// All rights reserved.
+//
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are met:
+//
+// 1. Redistributions of source code must retain the above copyright notice, this
+// list of conditions and the following disclaimer.
+// 2. Redistributions in binary form must reproduce the above copyright notice,
+// this list of conditions and the following disclaimer in the documentation
+// and/or other materials provided with the distribution.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
+// ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
+// WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+// DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
+// ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
+// (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
+// LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
+// ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
+// SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+package mgo
+
+import (
+ "fmt"
+ "sync"
+)
+
+// ---------------------------------------------------------------------------
+// Logging integration.
+
+// Avoid importing the log type information unnecessarily. There's a small cost
+// associated with using an interface rather than the type. Depending on how
+// often the logger is plugged in, it would be worth using the type instead.
+type log_Logger interface {
+ Output(calldepth int, s string) error
+}
+
+var (
+ globalLogger log_Logger
+ globalDebug bool
+ globalMutex sync.Mutex
+)
+
+// RACE WARNING: There are known data races when logging, which are manually
+// silenced when the race detector is in use. These data races won't be
+// observed in typical use, because logging is supposed to be set up once when
+// the application starts. Having raceDetector as a constant, the compiler
+// should elide the locks altogether in actual use.
+
+// Specify the *log.Logger object where log messages should be sent to.
+func SetLogger(logger log_Logger) {
+ if raceDetector {
+ globalMutex.Lock()
+ defer globalMutex.Unlock()
+ }
+ globalLogger = logger
+}
+
+// Enable the delivery of debug messages to the logger. Only meaningful
+// if a logger is also set.
+func SetDebug(debug bool) {
+ if raceDetector {
+ globalMutex.Lock()
+ defer globalMutex.Unlock()
+ }
+ globalDebug = debug
+}
+
+func log(v ...interface{}) {
+ if raceDetector {
+ globalMutex.Lock()
+ defer globalMutex.Unlock()
+ }
+ if globalLogger != nil {
+ globalLogger.Output(2, fmt.Sprint(v...))
+ }
+}
+
+func logln(v ...interface{}) {
+ if raceDetector {
+ globalMutex.Lock()
+ defer globalMutex.Unlock()
+ }
+ if globalLogger != nil {
+ globalLogger.Output(2, fmt.Sprintln(v...))
+ }
+}
+
+func logf(format string, v ...interface{}) {
+ if raceDetector {
+ globalMutex.Lock()
+ defer globalMutex.Unlock()
+ }
+ if globalLogger != nil {
+ globalLogger.Output(2, fmt.Sprintf(format, v...))
+ }
+}
+
+func debug(v ...interface{}) {
+ if raceDetector {
+ globalMutex.Lock()
+ defer globalMutex.Unlock()
+ }
+ if globalDebug && globalLogger != nil {
+ globalLogger.Output(2, fmt.Sprint(v...))
+ }
+}
+
+func debugln(v ...interface{}) {
+ if raceDetector {
+ globalMutex.Lock()
+ defer globalMutex.Unlock()
+ }
+ if globalDebug && globalLogger != nil {
+ globalLogger.Output(2, fmt.Sprintln(v...))
+ }
+}
+
+func debugf(format string, v ...interface{}) {
+ if raceDetector {
+ globalMutex.Lock()
+ defer globalMutex.Unlock()
+ }
+ if globalDebug && globalLogger != nil {
+ globalLogger.Output(2, fmt.Sprintf(format, v...))
+ }
+}
diff --git a/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/queue.go b/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/queue.go
new file mode 100644
index 00000000000..e9245de7001
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/queue.go
@@ -0,0 +1,91 @@
+// mgo - MongoDB driver for Go
+//
+// Copyright (c) 2010-2012 - Gustavo Niemeyer <gustavo@niemeyer.net>
+//
+// All rights reserved.
+//
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are met:
+//
+// 1. Redistributions of source code must retain the above copyright notice, this
+// list of conditions and the following disclaimer.
+// 2. Redistributions in binary form must reproduce the above copyright notice,
+// this list of conditions and the following disclaimer in the documentation
+// and/or other materials provided with the distribution.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
+// ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
+// WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+// DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
+// ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
+// (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
+// LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
+// ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
+// SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+package mgo
+
+type queue struct {
+ elems []interface{}
+ nelems, popi, pushi int
+}
+
+func (q *queue) Len() int {
+ return q.nelems
+}
+
+func (q *queue) Push(elem interface{}) {
+ //debugf("Pushing(pushi=%d popi=%d cap=%d): %#v\n",
+ // q.pushi, q.popi, len(q.elems), elem)
+ if q.nelems == len(q.elems) {
+ q.expand()
+ }
+ q.elems[q.pushi] = elem
+ q.nelems++
+ q.pushi = (q.pushi + 1) % len(q.elems)
+ //debugf(" Pushed(pushi=%d popi=%d cap=%d): %#v\n",
+ // q.pushi, q.popi, len(q.elems), elem)
+}
+
+func (q *queue) Pop() (elem interface{}) {
+ //debugf("Popping(pushi=%d popi=%d cap=%d)\n",
+ // q.pushi, q.popi, len(q.elems))
+ if q.nelems == 0 {
+ return nil
+ }
+ elem = q.elems[q.popi]
+ q.elems[q.popi] = nil // Help GC.
+ q.nelems--
+ q.popi = (q.popi + 1) % len(q.elems)
+ //debugf(" Popped(pushi=%d popi=%d cap=%d): %#v\n",
+ // q.pushi, q.popi, len(q.elems), elem)
+ return elem
+}
+
+func (q *queue) expand() {
+ curcap := len(q.elems)
+ var newcap int
+ if curcap == 0 {
+ newcap = 8
+ } else if curcap < 1024 {
+ newcap = curcap * 2
+ } else {
+ newcap = curcap + (curcap / 4)
+ }
+ elems := make([]interface{}, newcap)
+
+ if q.popi == 0 {
+ copy(elems, q.elems)
+ q.pushi = curcap
+ } else {
+ newpopi := newcap - (curcap - q.popi)
+ copy(elems, q.elems[:q.popi])
+ copy(elems[newpopi:], q.elems[q.popi:])
+ q.popi = newpopi
+ }
+ for i := range q.elems {
+ q.elems[i] = nil // Help GC.
+ }
+ q.elems = elems
+}
diff --git a/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/queue_test.go b/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/queue_test.go
new file mode 100644
index 00000000000..bd0ab550f97
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/queue_test.go
@@ -0,0 +1,101 @@
+// mgo - MongoDB driver for Go
+//
+// Copyright (c) 2010-2012 - Gustavo Niemeyer <gustavo@niemeyer.net>
+//
+// All rights reserved.
+//
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are met:
+//
+// 1. Redistributions of source code must retain the above copyright notice, this
+// list of conditions and the following disclaimer.
+// 2. Redistributions in binary form must reproduce the above copyright notice,
+// this list of conditions and the following disclaimer in the documentation
+// and/or other materials provided with the distribution.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
+// ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
+// WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+// DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
+// ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
+// (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
+// LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
+// ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
+// SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+package mgo
+
+import (
+ . "gopkg.in/check.v1"
+)
+
+type QS struct{}
+
+var _ = Suite(&QS{})
+
+func (s *QS) TestSequentialGrowth(c *C) {
+ q := queue{}
+ n := 2048
+ for i := 0; i != n; i++ {
+ q.Push(i)
+ }
+ for i := 0; i != n; i++ {
+ c.Assert(q.Pop(), Equals, i)
+ }
+}
+
+var queueTestLists = [][]int{
+ // {0, 1, 2, 3, 4, 5, 6, 7, 8, 9}
+ {0, 1, 2, 3, 4, 5, 6, 7, 8, 9},
+
+ // {8, 9, 10, 11, ... 2, 3, 4, 5, 6, 7}
+ {0, 1, 2, 3, 4, 5, 6, 7, -1, -1, 8, 9, 10, 11},
+
+ // {8, 9, 10, 11, ... 2, 3, 4, 5, 6, 7}
+ {0, 1, 2, 3, -1, -1, 4, 5, 6, 7, 8, 9, 10, 11},
+
+ // {0, 1, 2, 3, 4, 5, 6, 7, 8}
+ {0, 1, 2, 3, 4, 5, 6, 7, 8,
+ -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ 0, 1, 2, 3, 4, 5, 6, 7, 8},
+}
+
+func (s *QS) TestQueueTestLists(c *C) {
+ test := []int{}
+ testi := 0
+ reset := func() {
+ test = test[0:0]
+ testi = 0
+ }
+ push := func(i int) {
+ test = append(test, i)
+ }
+ pop := func() (i int) {
+ if testi == len(test) {
+ return -1
+ }
+ i = test[testi]
+ testi++
+ return
+ }
+
+ for _, list := range queueTestLists {
+ reset()
+ q := queue{}
+ for _, n := range list {
+ if n == -1 {
+ c.Assert(q.Pop(), Equals, pop(), Commentf("With list %#v", list))
+ } else {
+ q.Push(n)
+ push(n)
+ }
+ }
+
+ for n := pop(); n != -1; n = pop() {
+ c.Assert(q.Pop(), Equals, n, Commentf("With list %#v", list))
+ }
+
+ c.Assert(q.Pop(), Equals, nil, Commentf("With list %#v", list))
+ }
+}
diff --git a/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/raceoff.go b/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/raceoff.go
new file mode 100644
index 00000000000..e60b141442e
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/raceoff.go
@@ -0,0 +1,5 @@
+// +build !race
+
+package mgo
+
+const raceDetector = false
diff --git a/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/raceon.go b/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/raceon.go
new file mode 100644
index 00000000000..737b08eced8
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/raceon.go
@@ -0,0 +1,5 @@
+// +build race
+
+package mgo
+
+const raceDetector = true
diff --git a/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/saslimpl.go b/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/saslimpl.go
new file mode 100644
index 00000000000..0d25f25cbb6
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/saslimpl.go
@@ -0,0 +1,11 @@
+//+build sasl
+
+package mgo
+
+import (
+ "gopkg.in/mgo.v2/internal/sasl"
+)
+
+func saslNew(cred Credential, host string) (saslStepper, error) {
+ return sasl.New(cred.Username, cred.Password, cred.Mechanism, cred.Service, host)
+}
diff --git a/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/saslstub.go b/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/saslstub.go
new file mode 100644
index 00000000000..6e9e30986dc
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/saslstub.go
@@ -0,0 +1,11 @@
+//+build !sasl
+
+package mgo
+
+import (
+ "fmt"
+)
+
+func saslNew(cred Credential, host string) (saslStepper, error) {
+ return nil, fmt.Errorf("SASL support not enabled during build (-tags sasl)")
+}
diff --git a/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/server.go b/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/server.go
new file mode 100644
index 00000000000..392598691f8
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/server.go
@@ -0,0 +1,463 @@
+// mgo - MongoDB driver for Go
+//
+// Copyright (c) 2010-2012 - Gustavo Niemeyer <gustavo@niemeyer.net>
+//
+// All rights reserved.
+//
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are met:
+//
+// 1. Redistributions of source code must retain the above copyright notice, this
+// list of conditions and the following disclaimer.
+// 2. Redistributions in binary form must reproduce the above copyright notice,
+// this list of conditions and the following disclaimer in the documentation
+// and/or other materials provided with the distribution.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
+// ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
+// WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+// DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
+// ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
+// (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
+// LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
+// ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
+// SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+package mgo
+
+import (
+ "errors"
+ "net"
+ "sort"
+ "sync"
+ "time"
+
+ "gopkg.in/mgo.v2/bson"
+)
+
+// ---------------------------------------------------------------------------
+// Mongo server encapsulation.
+
+type mongoServer struct {
+ sync.RWMutex
+ Addr string
+ ResolvedAddr string
+ tcpaddr *net.TCPAddr
+ unusedSockets []*mongoSocket
+ liveSockets []*mongoSocket
+ closed bool
+ abended bool
+ sync chan bool
+ dial dialer
+ pingValue time.Duration
+ pingIndex int
+ pingCount uint32
+ pingWindow [6]time.Duration
+ info *mongoServerInfo
+}
+
+type dialer struct {
+ old func(addr net.Addr) (net.Conn, error)
+ new func(addr *ServerAddr) (net.Conn, error)
+}
+
+func (dial dialer) isSet() bool {
+ return dial.old != nil || dial.new != nil
+}
+
+type mongoServerInfo struct {
+ Master bool
+ Mongos bool
+ Tags bson.D
+ MaxWireVersion int
+ SetName string
+}
+
+var defaultServerInfo mongoServerInfo
+
+func newServer(addr string, tcpaddr *net.TCPAddr, sync chan bool, dial dialer) *mongoServer {
+ server := &mongoServer{
+ Addr: addr,
+ ResolvedAddr: tcpaddr.String(),
+ tcpaddr: tcpaddr,
+ sync: sync,
+ dial: dial,
+ info: &defaultServerInfo,
+ pingValue: time.Hour, // Push it back before an actual ping.
+ }
+ go server.pinger(true)
+ return server
+}
+
+var errPoolLimit = errors.New("per-server connection limit reached")
+var errServerClosed = errors.New("server was closed")
+
+// AcquireSocket returns a socket for communicating with the server.
+// This will attempt to reuse an old connection, if one is available. Otherwise,
+// it will establish a new one. The returned socket is owned by the call site,
+// and will return to the cache when the socket has its Release method called
+// the same number of times as AcquireSocket + Acquire were called for it.
+// If the poolLimit argument is greater than zero and the number of sockets in
+// use in this server is greater than the provided limit, errPoolLimit is
+// returned.
+func (server *mongoServer) AcquireSocket(poolLimit int, timeout time.Duration) (socket *mongoSocket, abended bool, err error) {
+ for {
+ server.Lock()
+ abended = server.abended
+ if server.closed {
+ server.Unlock()
+ return nil, abended, errServerClosed
+ }
+ n := len(server.unusedSockets)
+ if poolLimit > 0 && len(server.liveSockets)-n >= poolLimit {
+ server.Unlock()
+ return nil, false, errPoolLimit
+ }
+ if n > 0 {
+ socket = server.unusedSockets[n-1]
+ server.unusedSockets[n-1] = nil // Help GC.
+ server.unusedSockets = server.unusedSockets[:n-1]
+ info := server.info
+ server.Unlock()
+ err = socket.InitialAcquire(info, timeout)
+ if err != nil {
+ continue
+ }
+ } else {
+ server.Unlock()
+ socket, err = server.Connect(timeout)
+ if err == nil {
+ server.Lock()
+ // We've waited for the Connect, see if we got
+ // closed in the meantime
+ if server.closed {
+ server.Unlock()
+ socket.Release()
+ socket.Close()
+ return nil, abended, errServerClosed
+ }
+ server.liveSockets = append(server.liveSockets, socket)
+ server.Unlock()
+ }
+ }
+ return
+ }
+ panic("unreachable")
+}
+
+// Connect establishes a new connection to the server. This should
+// generally be done through server.AcquireSocket().
+func (server *mongoServer) Connect(timeout time.Duration) (*mongoSocket, error) {
+ server.RLock()
+ master := server.info.Master
+ dial := server.dial
+ server.RUnlock()
+
+ logf("Establishing new connection to %s (timeout=%s)...", server.Addr, timeout)
+ var conn net.Conn
+ var err error
+ switch {
+ case !dial.isSet():
+ // Cannot do this because it lacks timeout support. :-(
+ //conn, err = net.DialTCP("tcp", nil, server.tcpaddr)
+ conn, err = net.DialTimeout("tcp", server.ResolvedAddr, timeout)
+ if tcpconn, ok := conn.(*net.TCPConn); ok {
+ tcpconn.SetKeepAlive(true)
+ } else if err == nil {
+ panic("internal error: obtained TCP connection is not a *net.TCPConn!?")
+ }
+ case dial.old != nil:
+ conn, err = dial.old(server.tcpaddr)
+ case dial.new != nil:
+ conn, err = dial.new(&ServerAddr{server.Addr, server.tcpaddr})
+ default:
+ panic("dialer is set, but both dial.old and dial.new are nil")
+ }
+ if err != nil {
+ logf("Connection to %s failed: %v", server.Addr, err.Error())
+ return nil, err
+ }
+ logf("Connection to %s established.", server.Addr)
+
+ stats.conn(+1, master)
+ return newSocket(server, conn, timeout), nil
+}
+
+// Close forces closing all sockets that are alive, whether
+// they're currently in use or not.
+func (server *mongoServer) Close() {
+ server.Lock()
+ server.closed = true
+ liveSockets := server.liveSockets
+ unusedSockets := server.unusedSockets
+ server.liveSockets = nil
+ server.unusedSockets = nil
+ server.Unlock()
+ logf("Connections to %s closing (%d live sockets).", server.Addr, len(liveSockets))
+ for i, s := range liveSockets {
+ s.Close()
+ liveSockets[i] = nil
+ }
+ for i := range unusedSockets {
+ unusedSockets[i] = nil
+ }
+}
+
+// RecycleSocket puts socket back into the unused cache.
+func (server *mongoServer) RecycleSocket(socket *mongoSocket) {
+ server.Lock()
+ if !server.closed {
+ server.unusedSockets = append(server.unusedSockets, socket)
+ }
+ server.Unlock()
+}
+
+func removeSocket(sockets []*mongoSocket, socket *mongoSocket) []*mongoSocket {
+ for i, s := range sockets {
+ if s == socket {
+ copy(sockets[i:], sockets[i+1:])
+ n := len(sockets) - 1
+ sockets[n] = nil
+ sockets = sockets[:n]
+ break
+ }
+ }
+ return sockets
+}
+
+// AbendSocket notifies the server that the given socket has terminated
+// abnormally, and thus should be discarded rather than cached.
+func (server *mongoServer) AbendSocket(socket *mongoSocket) {
+ server.Lock()
+ server.abended = true
+ if server.closed {
+ server.Unlock()
+ return
+ }
+ server.liveSockets = removeSocket(server.liveSockets, socket)
+ server.unusedSockets = removeSocket(server.unusedSockets, socket)
+ server.Unlock()
+ // Maybe just a timeout, but suggest a cluster sync up just in case.
+ select {
+ case server.sync <- true:
+ default:
+ }
+}
+
+func (server *mongoServer) SetInfo(info *mongoServerInfo) {
+ server.Lock()
+ server.info = info
+ server.Unlock()
+}
+
+func (server *mongoServer) Info() *mongoServerInfo {
+ server.Lock()
+ info := server.info
+ server.Unlock()
+ return info
+}
+
+func (server *mongoServer) hasTags(serverTags []bson.D) bool {
+NextTagSet:
+ for _, tags := range serverTags {
+ NextReqTag:
+ for _, req := range tags {
+ for _, has := range server.info.Tags {
+ if req.Name == has.Name {
+ if req.Value == has.Value {
+ continue NextReqTag
+ }
+ continue NextTagSet
+ }
+ }
+ continue NextTagSet
+ }
+ return true
+ }
+ return false
+}
+
+var pingDelay = 15 * time.Second
+
+func (server *mongoServer) pinger(loop bool) {
+ var delay time.Duration
+ if raceDetector {
+ // This variable is only ever touched by tests.
+ globalMutex.Lock()
+ delay = pingDelay
+ globalMutex.Unlock()
+ } else {
+ delay = pingDelay
+ }
+ op := queryOp{
+ collection: "admin.$cmd",
+ query: bson.D{{"ping", 1}},
+ flags: flagSlaveOk,
+ limit: -1,
+ }
+ for {
+ if loop {
+ time.Sleep(delay)
+ }
+ op := op
+ socket, _, err := server.AcquireSocket(0, delay)
+ if err == nil {
+ start := time.Now()
+ _, _ = socket.SimpleQuery(&op)
+ delay := time.Now().Sub(start)
+
+ server.pingWindow[server.pingIndex] = delay
+ server.pingIndex = (server.pingIndex + 1) % len(server.pingWindow)
+ server.pingCount++
+ var max time.Duration
+ for i := 0; i < len(server.pingWindow) && uint32(i) < server.pingCount; i++ {
+ if server.pingWindow[i] > max {
+ max = server.pingWindow[i]
+ }
+ }
+ socket.Release()
+ server.Lock()
+ if server.closed {
+ loop = false
+ }
+ server.pingValue = max
+ server.Unlock()
+ logf("Ping for %s is %d ms", server.Addr, max/time.Millisecond)
+ } else if err == errServerClosed {
+ return
+ }
+ if !loop {
+ return
+ }
+ }
+}
+
+type mongoServerSlice []*mongoServer
+
+func (s mongoServerSlice) Len() int {
+ return len(s)
+}
+
+func (s mongoServerSlice) Less(i, j int) bool {
+ return s[i].ResolvedAddr < s[j].ResolvedAddr
+}
+
+func (s mongoServerSlice) Swap(i, j int) {
+ s[i], s[j] = s[j], s[i]
+}
+
+func (s mongoServerSlice) Sort() {
+ sort.Sort(s)
+}
+
+func (s mongoServerSlice) Search(resolvedAddr string) (i int, ok bool) {
+ n := len(s)
+ i = sort.Search(n, func(i int) bool {
+ return s[i].ResolvedAddr >= resolvedAddr
+ })
+ return i, i != n && s[i].ResolvedAddr == resolvedAddr
+}
+
+type mongoServers struct {
+ slice mongoServerSlice
+}
+
+func (servers *mongoServers) Search(resolvedAddr string) (server *mongoServer) {
+ if i, ok := servers.slice.Search(resolvedAddr); ok {
+ return servers.slice[i]
+ }
+ return nil
+}
+
+func (servers *mongoServers) Add(server *mongoServer) {
+ servers.slice = append(servers.slice, server)
+ servers.slice.Sort()
+}
+
+func (servers *mongoServers) Remove(other *mongoServer) (server *mongoServer) {
+ if i, found := servers.slice.Search(other.ResolvedAddr); found {
+ server = servers.slice[i]
+ copy(servers.slice[i:], servers.slice[i+1:])
+ n := len(servers.slice) - 1
+ servers.slice[n] = nil // Help GC.
+ servers.slice = servers.slice[:n]
+ }
+ return
+}
+
+func (servers *mongoServers) Slice() []*mongoServer {
+ return ([]*mongoServer)(servers.slice)
+}
+
+func (servers *mongoServers) Get(i int) *mongoServer {
+ return servers.slice[i]
+}
+
+func (servers *mongoServers) Len() int {
+ return len(servers.slice)
+}
+
+func (servers *mongoServers) Empty() bool {
+ return len(servers.slice) == 0
+}
+
+func (servers *mongoServers) HasMongos() bool {
+ for _, s := range servers.slice {
+ if s.Info().Mongos {
+ return true
+ }
+ }
+ return false
+}
+
+// BestFit returns the best guess of what would be the most interesting
+// server to perform operations on at this point in time.
+func (servers *mongoServers) BestFit(mode Mode, serverTags []bson.D) *mongoServer {
+ var best *mongoServer
+ for _, next := range servers.slice {
+ if best == nil {
+ best = next
+ best.RLock()
+ if serverTags != nil && !next.info.Mongos && !best.hasTags(serverTags) {
+ best.RUnlock()
+ best = nil
+ }
+ continue
+ }
+ next.RLock()
+ swap := false
+ switch {
+ case serverTags != nil && !next.info.Mongos && !next.hasTags(serverTags):
+ // Must have requested tags.
+ case mode == Secondary && next.info.Master && !next.info.Mongos:
+ // Must be a secondary or mongos.
+ case next.info.Master != best.info.Master && mode != Nearest:
+ // Prefer slaves, unless the mode is PrimaryPreferred.
+ swap = (mode == PrimaryPreferred) != best.info.Master
+ case absDuration(next.pingValue-best.pingValue) > 15*time.Millisecond:
+ // Prefer nearest server.
+ swap = next.pingValue < best.pingValue
+ case len(next.liveSockets)-len(next.unusedSockets) < len(best.liveSockets)-len(best.unusedSockets):
+ // Prefer servers with less connections.
+ swap = true
+ }
+ if swap {
+ best.RUnlock()
+ best = next
+ } else {
+ next.RUnlock()
+ }
+ }
+ if best != nil {
+ best.RUnlock()
+ }
+ return best
+}
+
+func absDuration(d time.Duration) time.Duration {
+ if d < 0 {
+ return -d
+ }
+ return d
+}
diff --git a/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/session.go b/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/session.go
new file mode 100644
index 00000000000..12ca8f2ac37
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/session.go
@@ -0,0 +1,4826 @@
+// mgo - MongoDB driver for Go
+//
+// Copyright (c) 2010-2012 - Gustavo Niemeyer <gustavo@niemeyer.net>
+//
+// All rights reserved.
+//
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are met:
+//
+// 1. Redistributions of source code must retain the above copyright notice, this
+// list of conditions and the following disclaimer.
+// 2. Redistributions in binary form must reproduce the above copyright notice,
+// this list of conditions and the following disclaimer in the documentation
+// and/or other materials provided with the distribution.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
+// ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
+// WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+// DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
+// ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
+// (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
+// LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
+// ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
+// SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+package mgo
+
+import (
+ "crypto/md5"
+ "encoding/hex"
+ "errors"
+ "fmt"
+ "math"
+ "net"
+ "net/url"
+ "reflect"
+ "sort"
+ "strconv"
+ "strings"
+ "sync"
+ "time"
+
+ "gopkg.in/mgo.v2/bson"
+)
+
+type Mode int
+
+const (
+ // Relevant documentation on read preference modes:
+ //
+ // http://docs.mongodb.org/manual/reference/read-preference/
+ //
+ Primary Mode = 2 // Default mode. All operations read from the current replica set primary.
+ PrimaryPreferred Mode = 3 // Read from the primary if available. Read from the secondary otherwise.
+ Secondary Mode = 4 // Read from one of the nearest secondary members of the replica set.
+ SecondaryPreferred Mode = 5 // Read from one of the nearest secondaries if available. Read from primary otherwise.
+ Nearest Mode = 6 // Read from one of the nearest members, irrespective of it being primary or secondary.
+
+ // Read preference modes are specific to mgo:
+ Eventual Mode = 0 // Same as Nearest, but may change servers between reads.
+ Monotonic Mode = 1 // Same as SecondaryPreferred before first write. Same as Primary after first write.
+ Strong Mode = 2 // Same as Primary.
+)
+
+// mgo.v3: Drop Strong mode, suffix all modes with "Mode".
+
+// When changing the Session type, check if newSession and copySession
+// need to be updated too.
+
+// Session represents a communication session with the database.
+//
+// All Session methods are concurrency-safe and may be called from multiple
+// goroutines. In all session modes but Eventual, using the session from
+// multiple goroutines will cause them to share the same underlying socket.
+// See the documentation on Session.SetMode for more details.
+type Session struct {
+ m sync.RWMutex
+ cluster_ *mongoCluster
+ slaveSocket *mongoSocket
+ masterSocket *mongoSocket
+ slaveOk bool
+ consistency Mode
+ queryConfig query
+ safeOp *queryOp
+ syncTimeout time.Duration
+ sockTimeout time.Duration
+ defaultdb string
+ sourcedb string
+ dialCred *Credential
+ creds []Credential
+ poolLimit int
+ bypassValidation bool
+}
+
+type Database struct {
+ Session *Session
+ Name string
+}
+
+type Collection struct {
+ Database *Database
+ Name string // "collection"
+ FullName string // "db.collection"
+}
+
+type Query struct {
+ m sync.Mutex
+ session *Session
+ query // Enables default settings in session.
+}
+
+type query struct {
+ op queryOp
+ prefetch float64
+ limit int32
+}
+
+type getLastError struct {
+ CmdName int "getLastError,omitempty"
+ W interface{} "w,omitempty"
+ WTimeout int "wtimeout,omitempty"
+ FSync bool "fsync,omitempty"
+ J bool "j,omitempty"
+}
+
+type Iter struct {
+ m sync.Mutex
+ gotReply sync.Cond
+ session *Session
+ server *mongoServer
+ docData queue
+ err error
+ op getMoreOp
+ prefetch float64
+ limit int32
+ docsToReceive int
+ docsBeforeMore int
+ timeout time.Duration
+ timedout bool
+ findCmd bool
+}
+
+var (
+ ErrNotFound = errors.New("not found")
+ ErrCursor = errors.New("invalid cursor")
+)
+
+const (
+ defaultPrefetch = 0.25
+ maxUpsertRetries = 5
+)
+
+// Dial establishes a new session to the cluster identified by the given seed
+// server(s). The session will enable communication with all of the servers in
+// the cluster, so the seed servers are used only to find out about the cluster
+// topology.
+//
+// Dial will timeout after 10 seconds if a server isn't reached. The returned
+// session will timeout operations after one minute by default if servers
+// aren't available. To customize the timeout, see DialWithTimeout,
+// SetSyncTimeout, and SetSocketTimeout.
+//
+// This method is generally called just once for a given cluster. Further
+// sessions to the same cluster are then established using the New or Copy
+// methods on the obtained session. This will make them share the underlying
+// cluster, and manage the pool of connections appropriately.
+//
+// Once the session is not useful anymore, Close must be called to release the
+// resources appropriately.
+//
+// The seed servers must be provided in the following format:
+//
+// [mongodb://][user:pass@]host1[:port1][,host2[:port2],...][/database][?options]
+//
+// For example, it may be as simple as:
+//
+// localhost
+//
+// Or more involved like:
+//
+// mongodb://myuser:mypass@localhost:40001,otherhost:40001/mydb
+//
+// If the port number is not provided for a server, it defaults to 27017.
+//
+// The username and password provided in the URL will be used to authenticate
+// into the database named after the slash at the end of the host names, or
+// into the "admin" database if none is provided. The authentication information
+// will persist in sessions obtained through the New method as well.
+//
+// The following connection options are supported after the question mark:
+//
+// connect=direct
+//
+// Disables the automatic replica set server discovery logic, and
+// forces the use of servers provided only (even if secondaries).
+// Note that to talk to a secondary the consistency requirements
+// must be relaxed to Monotonic or Eventual via SetMode.
+//
+//
+// connect=replicaSet
+//
+// Discover replica sets automatically. Default connection behavior.
+//
+//
+// replicaSet=<setname>
+//
+// If specified will prevent the obtained session from communicating
+// with any server which is not part of a replica set with the given name.
+// The default is to communicate with any server specified or discovered
+// via the servers contacted.
+//
+//
+// authSource=<db>
+//
+// Informs the database used to establish credentials and privileges
+// with a MongoDB server. Defaults to the database name provided via
+// the URL path, and "admin" if that's unset.
+//
+//
+// authMechanism=<mechanism>
+//
+// Defines the protocol for credential negotiation. Defaults to "MONGODB-CR",
+// which is the default username/password challenge-response mechanism.
+//
+//
+// gssapiServiceName=<name>
+//
+// Defines the service name to use when authenticating with the GSSAPI
+// mechanism. Defaults to "mongodb".
+//
+//
+// maxPoolSize=<limit>
+//
+// Defines the per-server socket pool limit. Defaults to 4096.
+// See Session.SetPoolLimit for details.
+//
+//
+// Relevant documentation:
+//
+// http://docs.mongodb.org/manual/reference/connection-string/
+//
+func Dial(url string) (*Session, error) {
+ session, err := DialWithTimeout(url, 10*time.Second)
+ if err == nil {
+ session.SetSyncTimeout(1 * time.Minute)
+ session.SetSocketTimeout(1 * time.Minute)
+ }
+ return session, err
+}
+
+// DialWithTimeout works like Dial, but uses timeout as the amount of time to
+// wait for a server to respond when first connecting and also on follow up
+// operations in the session. If timeout is zero, the call may block
+// forever waiting for a connection to be made.
+//
+// See SetSyncTimeout for customizing the timeout for the session.
+func DialWithTimeout(url string, timeout time.Duration) (*Session, error) {
+ info, err := ParseURL(url)
+ if err != nil {
+ return nil, err
+ }
+ info.Timeout = timeout
+ return DialWithInfo(info)
+}
+
+// ParseURL parses a MongoDB URL as accepted by the Dial function and returns
+// a value suitable for providing into DialWithInfo.
+//
+// See Dial for more details on the format of url.
+func ParseURL(url string) (*DialInfo, error) {
+ uinfo, err := extractURL(url)
+ if err != nil {
+ return nil, err
+ }
+ direct := false
+ mechanism := ""
+ service := ""
+ source := ""
+ setName := ""
+ poolLimit := 0
+ for k, v := range uinfo.options {
+ switch k {
+ case "authSource":
+ source = v
+ case "authMechanism":
+ mechanism = v
+ case "gssapiServiceName":
+ service = v
+ case "replicaSet":
+ setName = v
+ case "maxPoolSize":
+ poolLimit, err = strconv.Atoi(v)
+ if err != nil {
+ return nil, errors.New("bad value for maxPoolSize: " + v)
+ }
+ case "connect":
+ if v == "direct" {
+ direct = true
+ break
+ }
+ if v == "replicaSet" {
+ break
+ }
+ fallthrough
+ default:
+ return nil, errors.New("unsupported connection URL option: " + k + "=" + v)
+ }
+ }
+ info := DialInfo{
+ Addrs: uinfo.addrs,
+ Direct: direct,
+ Database: uinfo.db,
+ Username: uinfo.user,
+ Password: uinfo.pass,
+ Mechanism: mechanism,
+ Service: service,
+ Source: source,
+ PoolLimit: poolLimit,
+ ReplicaSetName: setName,
+ }
+ return &info, nil
+}
+
+// DialInfo holds options for establishing a session with a MongoDB cluster.
+// To use a URL, see the Dial function.
+type DialInfo struct {
+ // Addrs holds the addresses for the seed servers.
+ Addrs []string
+
+ // Direct informs whether to establish connections only with the
+ // specified seed servers, or to obtain information for the whole
+ // cluster and establish connections with further servers too.
+ Direct bool
+
+ // Timeout is the amount of time to wait for a server to respond when
+ // first connecting and on follow up operations in the session. If
+ // timeout is zero, the call may block forever waiting for a connection
+ // to be established. Timeout does not affect logic in DialServer.
+ Timeout time.Duration
+
+ // FailFast will cause connection and query attempts to fail faster when
+ // the server is unavailable, instead of retrying until the configured
+ // timeout period. Note that an unavailable server may silently drop
+ // packets instead of rejecting them, in which case it's impossible to
+ // distinguish it from a slow server, so the timeout stays relevant.
+ FailFast bool
+
+ // Database is the default database name used when the Session.DB method
+ // is called with an empty name, and is also used during the initial
+ // authentication if Source is unset.
+ Database string
+
+ // ReplicaSetName, if specified, will prevent the obtained session from
+ // communicating with any server which is not part of a replica set
+ // with the given name. The default is to communicate with any server
+ // specified or discovered via the servers contacted.
+ ReplicaSetName string
+
+ // Source is the database used to establish credentials and privileges
+ // with a MongoDB server. Defaults to the value of Database, if that is
+ // set, or "admin" otherwise.
+ Source string
+
+ // Service defines the service name to use when authenticating with the GSSAPI
+ // mechanism. Defaults to "mongodb".
+ Service string
+
+ // ServiceHost defines which hostname to use when authenticating
+ // with the GSSAPI mechanism. If not specified, defaults to the MongoDB
+ // server's address.
+ ServiceHost string
+
+ // Mechanism defines the protocol for credential negotiation.
+ // Defaults to "MONGODB-CR".
+ Mechanism string
+
+ // Username and Password inform the credentials for the initial authentication
+ // done on the database defined by the Source field. See Session.Login.
+ Username string
+ Password string
+
+ // PoolLimit defines the per-server socket pool limit. Defaults to 4096.
+ // See Session.SetPoolLimit for details.
+ PoolLimit int
+
+ // DialServer optionally specifies the dial function for establishing
+ // connections with the MongoDB servers.
+ DialServer func(addr *ServerAddr) (net.Conn, error)
+
+ // WARNING: This field is obsolete. See DialServer above.
+ Dial func(addr net.Addr) (net.Conn, error)
+}
+
+// mgo.v3: Drop DialInfo.Dial.
+
+// ServerAddr represents the address for establishing a connection to an
+// individual MongoDB server.
+type ServerAddr struct {
+ str string
+ tcp *net.TCPAddr
+}
+
+// String returns the address that was provided for the server before resolution.
+func (addr *ServerAddr) String() string {
+ return addr.str
+}
+
+// TCPAddr returns the resolved TCP address for the server.
+func (addr *ServerAddr) TCPAddr() *net.TCPAddr {
+ return addr.tcp
+}
+
+// DialWithInfo establishes a new session to the cluster identified by info.
+func DialWithInfo(info *DialInfo) (*Session, error) {
+ addrs := make([]string, len(info.Addrs))
+ for i, addr := range info.Addrs {
+ p := strings.LastIndexAny(addr, "]:")
+ if p == -1 || addr[p] != ':' {
+ // XXX This is untested. The test suite doesn't use the standard port.
+ addr += ":27017"
+ }
+ addrs[i] = addr
+ }
+ cluster := newCluster(addrs, info.Direct, info.FailFast, dialer{info.Dial, info.DialServer}, info.ReplicaSetName)
+ session := newSession(Eventual, cluster, info.Timeout)
+ session.defaultdb = info.Database
+ if session.defaultdb == "" {
+ session.defaultdb = "test"
+ }
+ session.sourcedb = info.Source
+ if session.sourcedb == "" {
+ session.sourcedb = info.Database
+ if session.sourcedb == "" {
+ session.sourcedb = "admin"
+ }
+ }
+ if info.Username != "" {
+ source := session.sourcedb
+ if info.Source == "" &&
+ (info.Mechanism == "GSSAPI" || info.Mechanism == "PLAIN" || info.Mechanism == "MONGODB-X509") {
+ source = "$external"
+ }
+ session.dialCred = &Credential{
+ Username: info.Username,
+ Password: info.Password,
+ Mechanism: info.Mechanism,
+ Service: info.Service,
+ ServiceHost: info.ServiceHost,
+ Source: source,
+ }
+ session.creds = []Credential{*session.dialCred}
+ }
+ if info.PoolLimit > 0 {
+ session.poolLimit = info.PoolLimit
+ }
+ cluster.Release()
+
+ // People get confused when we return a session that is not actually
+ // established to any servers yet (e.g. what if url was wrong). So,
+ // ping the server to ensure there's someone there, and abort if it
+ // fails.
+ if err := session.Ping(); err != nil {
+ session.Close()
+ return nil, err
+ }
+ session.SetMode(Strong, true)
+ return session, nil
+}
+
+func isOptSep(c rune) bool {
+ return c == ';' || c == '&'
+}
+
+type urlInfo struct {
+ addrs []string
+ user string
+ pass string
+ db string
+ options map[string]string
+}
+
+func extractURL(s string) (*urlInfo, error) {
+ if strings.HasPrefix(s, "mongodb://") {
+ s = s[10:]
+ }
+ info := &urlInfo{options: make(map[string]string)}
+ if c := strings.Index(s, "?"); c != -1 {
+ for _, pair := range strings.FieldsFunc(s[c+1:], isOptSep) {
+ l := strings.SplitN(pair, "=", 2)
+ if len(l) != 2 || l[0] == "" || l[1] == "" {
+ return nil, errors.New("connection option must be key=value: " + pair)
+ }
+ info.options[l[0]] = l[1]
+ }
+ s = s[:c]
+ }
+ if c := strings.Index(s, "@"); c != -1 {
+ pair := strings.SplitN(s[:c], ":", 2)
+ if len(pair) > 2 || pair[0] == "" {
+ return nil, errors.New("credentials must be provided as user:pass@host")
+ }
+ var err error
+ info.user, err = url.QueryUnescape(pair[0])
+ if err != nil {
+ return nil, fmt.Errorf("cannot unescape username in URL: %q", pair[0])
+ }
+ if len(pair) > 1 {
+ info.pass, err = url.QueryUnescape(pair[1])
+ if err != nil {
+ return nil, fmt.Errorf("cannot unescape password in URL")
+ }
+ }
+ s = s[c+1:]
+ }
+ if c := strings.Index(s, "/"); c != -1 {
+ info.db = s[c+1:]
+ s = s[:c]
+ }
+ info.addrs = strings.Split(s, ",")
+ return info, nil
+}
+
+func newSession(consistency Mode, cluster *mongoCluster, timeout time.Duration) (session *Session) {
+ cluster.Acquire()
+ session = &Session{
+ cluster_: cluster,
+ syncTimeout: timeout,
+ sockTimeout: timeout,
+ poolLimit: 4096,
+ }
+ debugf("New session %p on cluster %p", session, cluster)
+ session.SetMode(consistency, true)
+ session.SetSafe(&Safe{})
+ session.queryConfig.prefetch = defaultPrefetch
+ return session
+}
+
+func copySession(session *Session, keepCreds bool) (s *Session) {
+ cluster := session.cluster()
+ cluster.Acquire()
+ if session.masterSocket != nil {
+ session.masterSocket.Acquire()
+ }
+ if session.slaveSocket != nil {
+ session.slaveSocket.Acquire()
+ }
+ var creds []Credential
+ if keepCreds {
+ creds = make([]Credential, len(session.creds))
+ copy(creds, session.creds)
+ } else if session.dialCred != nil {
+ creds = []Credential{*session.dialCred}
+ }
+ scopy := *session
+ scopy.m = sync.RWMutex{}
+ scopy.creds = creds
+ s = &scopy
+ debugf("New session %p on cluster %p (copy from %p)", s, cluster, session)
+ return s
+}
+
+// LiveServers returns a list of server addresses which are
+// currently known to be alive.
+func (s *Session) LiveServers() (addrs []string) {
+ s.m.RLock()
+ addrs = s.cluster().LiveServers()
+ s.m.RUnlock()
+ return addrs
+}
+
+// DB returns a value representing the named database. If name
+// is empty, the database name provided in the dialed URL is
+// used instead. If that is also empty, "test" is used as a
+// fallback in a way equivalent to the mongo shell.
+//
+// Creating this value is a very lightweight operation, and
+// involves no network communication.
+func (s *Session) DB(name string) *Database {
+ if name == "" {
+ name = s.defaultdb
+ }
+ return &Database{s, name}
+}
+
+// C returns a value representing the named collection.
+//
+// Creating this value is a very lightweight operation, and
+// involves no network communication.
+func (db *Database) C(name string) *Collection {
+ return &Collection{db, name, db.Name + "." + name}
+}
+
+// With returns a copy of db that uses session s.
+func (db *Database) With(s *Session) *Database {
+ newdb := *db
+ newdb.Session = s
+ return &newdb
+}
+
+// With returns a copy of c that uses session s.
+func (c *Collection) With(s *Session) *Collection {
+ newdb := *c.Database
+ newdb.Session = s
+ newc := *c
+ newc.Database = &newdb
+ return &newc
+}
+
+// GridFS returns a GridFS value representing collections in db that
+// follow the standard GridFS specification.
+// The provided prefix (sometimes known as root) will determine which
+// collections to use, and is usually set to "fs" when there is a
+// single GridFS in the database.
+//
+// See the GridFS Create, Open, and OpenId methods for more details.
+//
+// Relevant documentation:
+//
+// http://www.mongodb.org/display/DOCS/GridFS
+// http://www.mongodb.org/display/DOCS/GridFS+Tools
+// http://www.mongodb.org/display/DOCS/GridFS+Specification
+//
+func (db *Database) GridFS(prefix string) *GridFS {
+ return newGridFS(db, prefix)
+}
+
+// Run issues the provided command on the db database and unmarshals
+// its result in the respective argument. The cmd argument may be either
+// a string with the command name itself, in which case an empty document of
+// the form bson.M{cmd: 1} will be used, or it may be a full command document.
+//
+// Note that MongoDB considers the first marshalled key as the command
+// name, so when providing a command with options, it's important to
+// use an ordering-preserving document, such as a struct value or an
+// instance of bson.D. For instance:
+//
+// db.Run(bson.D{{"create", "mycollection"}, {"size", 1024}})
+//
+// For privilleged commands typically run on the "admin" database, see
+// the Run method in the Session type.
+//
+// Relevant documentation:
+//
+// http://www.mongodb.org/display/DOCS/Commands
+// http://www.mongodb.org/display/DOCS/List+of+Database+CommandSkips
+//
+func (db *Database) Run(cmd interface{}, result interface{}) error {
+ socket, err := db.Session.acquireSocket(true)
+ if err != nil {
+ return err
+ }
+ defer socket.Release()
+
+ // This is an optimized form of db.C("$cmd").Find(cmd).One(result).
+ return db.run(socket, cmd, result)
+}
+
+// Credential holds details to authenticate with a MongoDB server.
+type Credential struct {
+ // Username and Password hold the basic details for authentication.
+ // Password is optional with some authentication mechanisms.
+ Username string
+ Password string
+
+ // Source is the database used to establish credentials and privileges
+ // with a MongoDB server. Defaults to the default database provided
+ // during dial, or "admin" if that was unset.
+ Source string
+
+ // Service defines the service name to use when authenticating with the GSSAPI
+ // mechanism. Defaults to "mongodb".
+ Service string
+
+ // ServiceHost defines which hostname to use when authenticating
+ // with the GSSAPI mechanism. If not specified, defaults to the MongoDB
+ // server's address.
+ ServiceHost string
+
+ // Mechanism defines the protocol for credential negotiation.
+ // Defaults to "MONGODB-CR".
+ Mechanism string
+}
+
+// Login authenticates with MongoDB using the provided credential. The
+// authentication is valid for the whole session and will stay valid until
+// Logout is explicitly called for the same database, or the session is
+// closed.
+func (db *Database) Login(user, pass string) error {
+ return db.Session.Login(&Credential{Username: user, Password: pass, Source: db.Name})
+}
+
+// Login authenticates with MongoDB using the provided credential. The
+// authentication is valid for the whole session and will stay valid until
+// Logout is explicitly called for the same database, or the session is
+// closed.
+func (s *Session) Login(cred *Credential) error {
+ socket, err := s.acquireSocket(true)
+ if err != nil {
+ return err
+ }
+ defer socket.Release()
+
+ credCopy := *cred
+ if cred.Source == "" {
+ if cred.Mechanism == "GSSAPI" {
+ credCopy.Source = "$external"
+ } else {
+ credCopy.Source = s.sourcedb
+ }
+ }
+ err = socket.Login(credCopy)
+ if err != nil {
+ return err
+ }
+
+ s.m.Lock()
+ s.creds = append(s.creds, credCopy)
+ s.m.Unlock()
+ return nil
+}
+
+func (s *Session) socketLogin(socket *mongoSocket) error {
+ for _, cred := range s.creds {
+ if err := socket.Login(cred); err != nil {
+ return err
+ }
+ }
+ return nil
+}
+
+// Logout removes any established authentication credentials for the database.
+func (db *Database) Logout() {
+ session := db.Session
+ dbname := db.Name
+ session.m.Lock()
+ found := false
+ for i, cred := range session.creds {
+ if cred.Source == dbname {
+ copy(session.creds[i:], session.creds[i+1:])
+ session.creds = session.creds[:len(session.creds)-1]
+ found = true
+ break
+ }
+ }
+ if found {
+ if session.masterSocket != nil {
+ session.masterSocket.Logout(dbname)
+ }
+ if session.slaveSocket != nil {
+ session.slaveSocket.Logout(dbname)
+ }
+ }
+ session.m.Unlock()
+}
+
+// LogoutAll removes all established authentication credentials for the session.
+func (s *Session) LogoutAll() {
+ s.m.Lock()
+ for _, cred := range s.creds {
+ if s.masterSocket != nil {
+ s.masterSocket.Logout(cred.Source)
+ }
+ if s.slaveSocket != nil {
+ s.slaveSocket.Logout(cred.Source)
+ }
+ }
+ s.creds = s.creds[0:0]
+ s.m.Unlock()
+}
+
+// User represents a MongoDB user.
+//
+// Relevant documentation:
+//
+// http://docs.mongodb.org/manual/reference/privilege-documents/
+// http://docs.mongodb.org/manual/reference/user-privileges/
+//
+type User struct {
+ // Username is how the user identifies itself to the system.
+ Username string `bson:"user"`
+
+ // Password is the plaintext password for the user. If set,
+ // the UpsertUser method will hash it into PasswordHash and
+ // unset it before the user is added to the database.
+ Password string `bson:",omitempty"`
+
+ // PasswordHash is the MD5 hash of Username+":mongo:"+Password.
+ PasswordHash string `bson:"pwd,omitempty"`
+
+ // CustomData holds arbitrary data admins decide to associate
+ // with this user, such as the full name or employee id.
+ CustomData interface{} `bson:"customData,omitempty"`
+
+ // Roles indicates the set of roles the user will be provided.
+ // See the Role constants.
+ Roles []Role `bson:"roles"`
+
+ // OtherDBRoles allows assigning roles in other databases from
+ // user documents inserted in the admin database. This field
+ // only works in the admin database.
+ OtherDBRoles map[string][]Role `bson:"otherDBRoles,omitempty"`
+
+ // UserSource indicates where to look for this user's credentials.
+ // It may be set to a database name, or to "$external" for
+ // consulting an external resource such as Kerberos. UserSource
+ // must not be set if Password or PasswordHash are present.
+ //
+ // WARNING: This setting was only ever supported in MongoDB 2.4,
+ // and is now obsolete.
+ UserSource string `bson:"userSource,omitempty"`
+}
+
+type Role string
+
+const (
+ // Relevant documentation:
+ //
+ // http://docs.mongodb.org/manual/reference/user-privileges/
+ //
+ RoleRoot Role = "root"
+ RoleRead Role = "read"
+ RoleReadAny Role = "readAnyDatabase"
+ RoleReadWrite Role = "readWrite"
+ RoleReadWriteAny Role = "readWriteAnyDatabase"
+ RoleDBAdmin Role = "dbAdmin"
+ RoleDBAdminAny Role = "dbAdminAnyDatabase"
+ RoleUserAdmin Role = "userAdmin"
+ RoleUserAdminAny Role = "userAdminAnyDatabase"
+ RoleClusterAdmin Role = "clusterAdmin"
+)
+
+// UpsertUser updates the authentication credentials and the roles for
+// a MongoDB user within the db database. If the named user doesn't exist
+// it will be created.
+//
+// This method should only be used from MongoDB 2.4 and on. For older
+// MongoDB releases, use the obsolete AddUser method instead.
+//
+// Relevant documentation:
+//
+// http://docs.mongodb.org/manual/reference/user-privileges/
+// http://docs.mongodb.org/manual/reference/privilege-documents/
+//
+func (db *Database) UpsertUser(user *User) error {
+ if user.Username == "" {
+ return fmt.Errorf("user has no Username")
+ }
+ if (user.Password != "" || user.PasswordHash != "") && user.UserSource != "" {
+ return fmt.Errorf("user has both Password/PasswordHash and UserSource set")
+ }
+ if len(user.OtherDBRoles) > 0 && db.Name != "admin" && db.Name != "$external" {
+ return fmt.Errorf("user with OtherDBRoles is only supported in the admin or $external databases")
+ }
+
+ // Attempt to run this using 2.6+ commands.
+ rundb := db
+ if user.UserSource != "" {
+ // Compatibility logic for the userSource field of MongoDB <= 2.4.X
+ rundb = db.Session.DB(user.UserSource)
+ }
+ err := rundb.runUserCmd("updateUser", user)
+ // retry with createUser when isAuthError in order to enable the "localhost exception"
+ if isNotFound(err) || isAuthError(err) {
+ return rundb.runUserCmd("createUser", user)
+ }
+ if !isNoCmd(err) {
+ return err
+ }
+
+ // Command does not exist. Fallback to pre-2.6 behavior.
+ var set, unset bson.D
+ if user.Password != "" {
+ psum := md5.New()
+ psum.Write([]byte(user.Username + ":mongo:" + user.Password))
+ set = append(set, bson.DocElem{"pwd", hex.EncodeToString(psum.Sum(nil))})
+ unset = append(unset, bson.DocElem{"userSource", 1})
+ } else if user.PasswordHash != "" {
+ set = append(set, bson.DocElem{"pwd", user.PasswordHash})
+ unset = append(unset, bson.DocElem{"userSource", 1})
+ }
+ if user.UserSource != "" {
+ set = append(set, bson.DocElem{"userSource", user.UserSource})
+ unset = append(unset, bson.DocElem{"pwd", 1})
+ }
+ if user.Roles != nil || user.OtherDBRoles != nil {
+ set = append(set, bson.DocElem{"roles", user.Roles})
+ if len(user.OtherDBRoles) > 0 {
+ set = append(set, bson.DocElem{"otherDBRoles", user.OtherDBRoles})
+ } else {
+ unset = append(unset, bson.DocElem{"otherDBRoles", 1})
+ }
+ }
+ users := db.C("system.users")
+ err = users.Update(bson.D{{"user", user.Username}}, bson.D{{"$unset", unset}, {"$set", set}})
+ if err == ErrNotFound {
+ set = append(set, bson.DocElem{"user", user.Username})
+ if user.Roles == nil && user.OtherDBRoles == nil {
+ // Roles must be sent, as it's the way MongoDB distinguishes
+ // old-style documents from new-style documents in pre-2.6.
+ set = append(set, bson.DocElem{"roles", user.Roles})
+ }
+ err = users.Insert(set)
+ }
+ return err
+}
+
+func isNoCmd(err error) bool {
+ e, ok := err.(*QueryError)
+ return ok && (e.Code == 59 || e.Code == 13390 || strings.HasPrefix(e.Message, "no such cmd:"))
+}
+
+func isNotFound(err error) bool {
+ e, ok := err.(*QueryError)
+ return ok && e.Code == 11
+}
+
+func isAuthError(err error) bool {
+ e, ok := err.(*QueryError)
+ return ok && e.Code == 13
+}
+
+func (db *Database) runUserCmd(cmdName string, user *User) error {
+ cmd := make(bson.D, 0, 16)
+ cmd = append(cmd, bson.DocElem{cmdName, user.Username})
+ if user.Password != "" {
+ cmd = append(cmd, bson.DocElem{"pwd", user.Password})
+ }
+ var roles []interface{}
+ for _, role := range user.Roles {
+ roles = append(roles, role)
+ }
+ for db, dbroles := range user.OtherDBRoles {
+ for _, role := range dbroles {
+ roles = append(roles, bson.D{{"role", role}, {"db", db}})
+ }
+ }
+ if roles != nil || user.Roles != nil || cmdName == "createUser" {
+ cmd = append(cmd, bson.DocElem{"roles", roles})
+ }
+ err := db.Run(cmd, nil)
+ if !isNoCmd(err) && user.UserSource != "" && (user.UserSource != "$external" || db.Name != "$external") {
+ return fmt.Errorf("MongoDB 2.6+ does not support the UserSource setting")
+ }
+ return err
+}
+
+// AddUser creates or updates the authentication credentials of user within
+// the db database.
+//
+// WARNING: This method is obsolete and should only be used with MongoDB 2.2
+// or earlier. For MongoDB 2.4 and on, use UpsertUser instead.
+func (db *Database) AddUser(username, password string, readOnly bool) error {
+ // Try to emulate the old behavior on 2.6+
+ user := &User{Username: username, Password: password}
+ if db.Name == "admin" {
+ if readOnly {
+ user.Roles = []Role{RoleReadAny}
+ } else {
+ user.Roles = []Role{RoleReadWriteAny}
+ }
+ } else {
+ if readOnly {
+ user.Roles = []Role{RoleRead}
+ } else {
+ user.Roles = []Role{RoleReadWrite}
+ }
+ }
+ err := db.runUserCmd("updateUser", user)
+ if isNotFound(err) {
+ return db.runUserCmd("createUser", user)
+ }
+ if !isNoCmd(err) {
+ return err
+ }
+
+ // Command doesn't exist. Fallback to pre-2.6 behavior.
+ psum := md5.New()
+ psum.Write([]byte(username + ":mongo:" + password))
+ digest := hex.EncodeToString(psum.Sum(nil))
+ c := db.C("system.users")
+ _, err = c.Upsert(bson.M{"user": username}, bson.M{"$set": bson.M{"user": username, "pwd": digest, "readOnly": readOnly}})
+ return err
+}
+
+// RemoveUser removes the authentication credentials of user from the database.
+func (db *Database) RemoveUser(user string) error {
+ err := db.Run(bson.D{{"dropUser", user}}, nil)
+ if isNoCmd(err) {
+ users := db.C("system.users")
+ return users.Remove(bson.M{"user": user})
+ }
+ if isNotFound(err) {
+ return ErrNotFound
+ }
+ return err
+}
+
+type indexSpec struct {
+ Name, NS string
+ Key bson.D
+ Unique bool ",omitempty"
+ DropDups bool "dropDups,omitempty"
+ Background bool ",omitempty"
+ Sparse bool ",omitempty"
+ Bits int ",omitempty"
+ Min, Max float64 ",omitempty"
+ BucketSize float64 "bucketSize,omitempty"
+ ExpireAfter int "expireAfterSeconds,omitempty"
+ Weights bson.D ",omitempty"
+ DefaultLanguage string "default_language,omitempty"
+ LanguageOverride string "language_override,omitempty"
+ TextIndexVersion int "textIndexVersion,omitempty"
+
+ Collation *Collation "collation,omitempty"
+}
+
+type Index struct {
+ Key []string // Index key fields; prefix name with dash (-) for descending order
+ Unique bool // Prevent two documents from having the same index key
+ DropDups bool // Drop documents with the same index key as a previously indexed one
+ Background bool // Build index in background and return immediately
+ Sparse bool // Only index documents containing the Key fields
+
+ // If ExpireAfter is defined the server will periodically delete
+ // documents with indexed time.Time older than the provided delta.
+ ExpireAfter time.Duration
+
+ // Name holds the stored index name. On creation if this field is unset it is
+ // computed by EnsureIndex based on the index key.
+ Name string
+
+ // Properties for spatial indexes.
+ //
+ // Min and Max were improperly typed as int when they should have been
+ // floats. To preserve backwards compatibility they are still typed as
+ // int and the following two fields enable reading and writing the same
+ // fields as float numbers. In mgo.v3, these fields will be dropped and
+ // Min/Max will become floats.
+ Min, Max int
+ Minf, Maxf float64
+ BucketSize float64
+ Bits int
+
+ // Properties for text indexes.
+ DefaultLanguage string
+ LanguageOverride string
+
+ // Weights defines the significance of provided fields relative to other
+ // fields in a text index. The score for a given word in a document is derived
+ // from the weighted sum of the frequency for each of the indexed fields in
+ // that document. The default field weight is 1.
+ Weights map[string]int
+
+ // Collation defines the collation to use for the index.
+ Collation *Collation
+}
+
+type Collation struct {
+
+ // Locale defines the collation locale.
+ Locale string `bson:"locale"`
+
+ // CaseLevel defines whether to turn case sensitivity on at strength 1 or 2.
+ CaseLevel bool `bson:"caseLevel,omitempty"`
+
+ // CaseFirst may be set to "upper" or "lower" to define whether
+ // to have uppercase or lowercase items first. Default is "off".
+ CaseFirst string `bson:"caseFirst,omitempty"`
+
+ // Strength defines the priority of comparison properties, as follows:
+ //
+ // 1 (primary) - Strongest level, denote difference between base characters
+ // 2 (secondary) - Accents in characters are considered secondary differences
+ // 3 (tertiary) - Upper and lower case differences in characters are
+ // distinguished at the tertiary level
+ // 4 (quaternary) - When punctuation is ignored at level 1-3, an additional
+ // level can be used to distinguish words with and without
+ // punctuation. Should only be used if ignoring punctuation
+ // is required or when processing Japanese text.
+ // 5 (identical) - When all other levels are equal, the identical level is
+ // used as a tiebreaker. The Unicode code point values of
+ // the NFD form of each string are compared at this level,
+ // just in case there is no difference at levels 1-4
+ //
+ // Strength defaults to 3.
+ Strength int `bson:"strength,omitempty"`
+
+ // NumericOrdering defines whether to order numbers based on numerical
+ // order and not collation order.
+ NumericOrdering bool `bson:"numericOrdering,omitempty"`
+
+ // Alternate controls whether spaces and punctuation are considered base characters.
+ // May be set to "non-ignorable" (spaces and punctuation considered base characters)
+ // or "shifted" (spaces and punctuation not considered base characters, and only
+ // distinguished at strength > 3). Defaults to "non-ignorable".
+ Alternate string `bson:"alternate,omitempty"`
+
+ // Backwards defines whether to have secondary differences considered in reverse order,
+ // as done in the French language.
+ Backwards bool `bson:"backwards,omitempty"`
+}
+
+// mgo.v3: Drop Minf and Maxf and transform Min and Max to floats.
+// mgo.v3: Drop DropDups as it's unsupported past 2.8.
+
+type indexKeyInfo struct {
+ name string
+ key bson.D
+ weights bson.D
+}
+
+func parseIndexKey(key []string) (*indexKeyInfo, error) {
+ var keyInfo indexKeyInfo
+ isText := false
+ var order interface{}
+ for _, field := range key {
+ raw := field
+ if keyInfo.name != "" {
+ keyInfo.name += "_"
+ }
+ var kind string
+ if field != "" {
+ if field[0] == '$' {
+ if c := strings.Index(field, ":"); c > 1 && c < len(field)-1 {
+ kind = field[1:c]
+ field = field[c+1:]
+ keyInfo.name += field + "_" + kind
+ } else {
+ field = "\x00"
+ }
+ }
+ switch field[0] {
+ case 0:
+ // Logic above failed. Reset and error.
+ field = ""
+ case '@':
+ order = "2d"
+ field = field[1:]
+ // The shell used to render this field as key_ instead of key_2d,
+ // and mgo followed suit. This has been fixed in recent server
+ // releases, and mgo followed as well.
+ keyInfo.name += field + "_2d"
+ case '-':
+ order = -1
+ field = field[1:]
+ keyInfo.name += field + "_-1"
+ case '+':
+ field = field[1:]
+ fallthrough
+ default:
+ if kind == "" {
+ order = 1
+ keyInfo.name += field + "_1"
+ } else {
+ order = kind
+ }
+ }
+ }
+ if field == "" || kind != "" && order != kind {
+ return nil, fmt.Errorf(`invalid index key: want "[$<kind>:][-]<field name>", got %q`, raw)
+ }
+ if kind == "text" {
+ if !isText {
+ keyInfo.key = append(keyInfo.key, bson.DocElem{"_fts", "text"}, bson.DocElem{"_ftsx", 1})
+ isText = true
+ }
+ keyInfo.weights = append(keyInfo.weights, bson.DocElem{field, 1})
+ } else {
+ keyInfo.key = append(keyInfo.key, bson.DocElem{field, order})
+ }
+ }
+ if keyInfo.name == "" {
+ return nil, errors.New("invalid index key: no fields provided")
+ }
+ return &keyInfo, nil
+}
+
+// EnsureIndexKey ensures an index with the given key exists, creating it
+// if necessary.
+//
+// This example:
+//
+// err := collection.EnsureIndexKey("a", "b")
+//
+// Is equivalent to:
+//
+// err := collection.EnsureIndex(mgo.Index{Key: []string{"a", "b"}})
+//
+// See the EnsureIndex method for more details.
+func (c *Collection) EnsureIndexKey(key ...string) error {
+ return c.EnsureIndex(Index{Key: key})
+}
+
+// EnsureIndex ensures an index with the given key exists, creating it with
+// the provided parameters if necessary. EnsureIndex does not modify a previously
+// existent index with a matching key. The old index must be dropped first instead.
+//
+// Once EnsureIndex returns successfully, following requests for the same index
+// will not contact the server unless Collection.DropIndex is used to drop the
+// same index, or Session.ResetIndexCache is called.
+//
+// For example:
+//
+// index := Index{
+// Key: []string{"lastname", "firstname"},
+// Unique: true,
+// DropDups: true,
+// Background: true, // See notes.
+// Sparse: true,
+// }
+// err := collection.EnsureIndex(index)
+//
+// The Key value determines which fields compose the index. The index ordering
+// will be ascending by default. To obtain an index with a descending order,
+// the field name should be prefixed by a dash (e.g. []string{"-time"}). It can
+// also be optionally prefixed by an index kind, as in "$text:summary" or
+// "$2d:-point". The key string format is:
+//
+// [$<kind>:][-]<field name>
+//
+// If the Unique field is true, the index must necessarily contain only a single
+// document per Key. With DropDups set to true, documents with the same key
+// as a previously indexed one will be dropped rather than an error returned.
+//
+// If Background is true, other connections will be allowed to proceed using
+// the collection without the index while it's being built. Note that the
+// session executing EnsureIndex will be blocked for as long as it takes for
+// the index to be built.
+//
+// If Sparse is true, only documents containing the provided Key fields will be
+// included in the index. When using a sparse index for sorting, only indexed
+// documents will be returned.
+//
+// If ExpireAfter is non-zero, the server will periodically scan the collection
+// and remove documents containing an indexed time.Time field with a value
+// older than ExpireAfter. See the documentation for details:
+//
+// http://docs.mongodb.org/manual/tutorial/expire-data
+//
+// Other kinds of indexes are also supported through that API. Here is an example:
+//
+// index := Index{
+// Key: []string{"$2d:loc"},
+// Bits: 26,
+// }
+// err := collection.EnsureIndex(index)
+//
+// The example above requests the creation of a "2d" index for the "loc" field.
+//
+// The 2D index bounds may be changed using the Min and Max attributes of the
+// Index value. The default bound setting of (-180, 180) is suitable for
+// latitude/longitude pairs.
+//
+// The Bits parameter sets the precision of the 2D geohash values. If not
+// provided, 26 bits are used, which is roughly equivalent to 1 foot of
+// precision for the default (-180, 180) index bounds.
+//
+// Relevant documentation:
+//
+// http://www.mongodb.org/display/DOCS/Indexes
+// http://www.mongodb.org/display/DOCS/Indexing+Advice+and+FAQ
+// http://www.mongodb.org/display/DOCS/Indexing+as+a+Background+Operation
+// http://www.mongodb.org/display/DOCS/Geospatial+Indexing
+// http://www.mongodb.org/display/DOCS/Multikeys
+//
+func (c *Collection) EnsureIndex(index Index) error {
+ keyInfo, err := parseIndexKey(index.Key)
+ if err != nil {
+ return err
+ }
+
+ session := c.Database.Session
+ cacheKey := c.FullName + "\x00" + keyInfo.name
+ if session.cluster().HasCachedIndex(cacheKey) {
+ return nil
+ }
+
+ spec := indexSpec{
+ Name: keyInfo.name,
+ NS: c.FullName,
+ Key: keyInfo.key,
+ Unique: index.Unique,
+ DropDups: index.DropDups,
+ Background: index.Background,
+ Sparse: index.Sparse,
+ Bits: index.Bits,
+ Min: index.Minf,
+ Max: index.Maxf,
+ BucketSize: index.BucketSize,
+ ExpireAfter: int(index.ExpireAfter / time.Second),
+ Weights: keyInfo.weights,
+ DefaultLanguage: index.DefaultLanguage,
+ LanguageOverride: index.LanguageOverride,
+ Collation: index.Collation,
+ }
+
+ if spec.Min == 0 && spec.Max == 0 {
+ spec.Min = float64(index.Min)
+ spec.Max = float64(index.Max)
+ }
+
+ if index.Name != "" {
+ spec.Name = index.Name
+ }
+
+NextField:
+ for name, weight := range index.Weights {
+ for i, elem := range spec.Weights {
+ if elem.Name == name {
+ spec.Weights[i].Value = weight
+ continue NextField
+ }
+ }
+ panic("weight provided for field that is not part of index key: " + name)
+ }
+
+ cloned := session.Clone()
+ defer cloned.Close()
+ cloned.SetMode(Strong, false)
+ cloned.EnsureSafe(&Safe{})
+ db := c.Database.With(cloned)
+
+ // Try with a command first.
+ err = db.Run(bson.D{{"createIndexes", c.Name}, {"indexes", []indexSpec{spec}}}, nil)
+ if isNoCmd(err) {
+ // Command not yet supported. Insert into the indexes collection instead.
+ err = db.C("system.indexes").Insert(&spec)
+ }
+ if err == nil {
+ session.cluster().CacheIndex(cacheKey, true)
+ }
+ return err
+}
+
+// DropIndex drops the index with the provided key from the c collection.
+//
+// See EnsureIndex for details on the accepted key variants.
+//
+// For example:
+//
+// err1 := collection.DropIndex("firstField", "-secondField")
+// err2 := collection.DropIndex("customIndexName")
+//
+func (c *Collection) DropIndex(key ...string) error {
+ keyInfo, err := parseIndexKey(key)
+ if err != nil {
+ return err
+ }
+
+ session := c.Database.Session
+ cacheKey := c.FullName + "\x00" + keyInfo.name
+ session.cluster().CacheIndex(cacheKey, false)
+
+ session = session.Clone()
+ defer session.Close()
+ session.SetMode(Strong, false)
+
+ db := c.Database.With(session)
+ result := struct {
+ ErrMsg string
+ Ok bool
+ }{}
+ err = db.Run(bson.D{{"dropIndexes", c.Name}, {"index", keyInfo.name}}, &result)
+ if err != nil {
+ return err
+ }
+ if !result.Ok {
+ return errors.New(result.ErrMsg)
+ }
+ return nil
+}
+
+// DropIndexName removes the index with the provided index name.
+//
+// For example:
+//
+// err := collection.DropIndex("customIndexName")
+//
+func (c *Collection) DropIndexName(name string) error {
+ session := c.Database.Session
+
+ session = session.Clone()
+ defer session.Close()
+ session.SetMode(Strong, false)
+
+ c = c.With(session)
+
+ indexes, err := c.Indexes()
+ if err != nil {
+ return err
+ }
+
+ var index Index
+ for _, idx := range indexes {
+ if idx.Name == name {
+ index = idx
+ break
+ }
+ }
+
+ if index.Name != "" {
+ keyInfo, err := parseIndexKey(index.Key)
+ if err != nil {
+ return err
+ }
+
+ cacheKey := c.FullName + "\x00" + keyInfo.name
+ session.cluster().CacheIndex(cacheKey, false)
+ }
+
+ result := struct {
+ ErrMsg string
+ Ok bool
+ }{}
+ err = c.Database.Run(bson.D{{"dropIndexes", c.Name}, {"index", name}}, &result)
+ if err != nil {
+ return err
+ }
+ if !result.Ok {
+ return errors.New(result.ErrMsg)
+ }
+ return nil
+}
+
+// nonEventual returns a clone of session and ensures it is not Eventual.
+// This guarantees that the server that is used for queries may be reused
+// afterwards when a cursor is received.
+func (session *Session) nonEventual() *Session {
+ cloned := session.Clone()
+ if cloned.consistency == Eventual {
+ cloned.SetMode(Monotonic, false)
+ }
+ return cloned
+}
+
+// Indexes returns a list of all indexes for the collection.
+//
+// For example, this snippet would drop all available indexes:
+//
+// indexes, err := collection.Indexes()
+// if err != nil {
+// return err
+// }
+// for _, index := range indexes {
+// err = collection.DropIndex(index.Key...)
+// if err != nil {
+// return err
+// }
+// }
+//
+// See the EnsureIndex method for more details on indexes.
+func (c *Collection) Indexes() (indexes []Index, err error) {
+ cloned := c.Database.Session.nonEventual()
+ defer cloned.Close()
+
+ batchSize := int(cloned.queryConfig.op.limit)
+
+ // Try with a command.
+ var result struct {
+ Indexes []bson.Raw
+ Cursor cursorData
+ }
+ var iter *Iter
+ err = c.Database.With(cloned).Run(bson.D{{"listIndexes", c.Name}, {"cursor", bson.D{{"batchSize", batchSize}}}}, &result)
+ if err == nil {
+ firstBatch := result.Indexes
+ if firstBatch == nil {
+ firstBatch = result.Cursor.FirstBatch
+ }
+ ns := strings.SplitN(result.Cursor.NS, ".", 2)
+ if len(ns) < 2 {
+ iter = c.With(cloned).NewIter(nil, firstBatch, result.Cursor.Id, nil)
+ } else {
+ iter = cloned.DB(ns[0]).C(ns[1]).NewIter(nil, firstBatch, result.Cursor.Id, nil)
+ }
+ } else if isNoCmd(err) {
+ // Command not yet supported. Query the database instead.
+ iter = c.Database.C("system.indexes").Find(bson.M{"ns": c.FullName}).Iter()
+ } else {
+ return nil, err
+ }
+
+ var spec indexSpec
+ for iter.Next(&spec) {
+ indexes = append(indexes, indexFromSpec(spec))
+ }
+ if err = iter.Close(); err != nil {
+ return nil, err
+ }
+ sort.Sort(indexSlice(indexes))
+ return indexes, nil
+}
+
+func indexFromSpec(spec indexSpec) Index {
+ index := Index{
+ Name: spec.Name,
+ Key: simpleIndexKey(spec.Key),
+ Unique: spec.Unique,
+ DropDups: spec.DropDups,
+ Background: spec.Background,
+ Sparse: spec.Sparse,
+ Minf: spec.Min,
+ Maxf: spec.Max,
+ Bits: spec.Bits,
+ BucketSize: spec.BucketSize,
+ DefaultLanguage: spec.DefaultLanguage,
+ LanguageOverride: spec.LanguageOverride,
+ ExpireAfter: time.Duration(spec.ExpireAfter) * time.Second,
+ Collation: spec.Collation,
+ }
+ if float64(int(spec.Min)) == spec.Min && float64(int(spec.Max)) == spec.Max {
+ index.Min = int(spec.Min)
+ index.Max = int(spec.Max)
+ }
+ if spec.TextIndexVersion > 0 {
+ index.Key = make([]string, len(spec.Weights))
+ index.Weights = make(map[string]int)
+ for i, elem := range spec.Weights {
+ index.Key[i] = "$text:" + elem.Name
+ if w, ok := elem.Value.(int); ok {
+ index.Weights[elem.Name] = w
+ }
+ }
+ }
+ return index
+}
+
+type indexSlice []Index
+
+func (idxs indexSlice) Len() int { return len(idxs) }
+func (idxs indexSlice) Less(i, j int) bool { return idxs[i].Name < idxs[j].Name }
+func (idxs indexSlice) Swap(i, j int) { idxs[i], idxs[j] = idxs[j], idxs[i] }
+
+func simpleIndexKey(realKey bson.D) (key []string) {
+ for i := range realKey {
+ field := realKey[i].Name
+ vi, ok := realKey[i].Value.(int)
+ if !ok {
+ vf, _ := realKey[i].Value.(float64)
+ vi = int(vf)
+ }
+ if vi == 1 {
+ key = append(key, field)
+ continue
+ }
+ if vi == -1 {
+ key = append(key, "-"+field)
+ continue
+ }
+ if vs, ok := realKey[i].Value.(string); ok {
+ key = append(key, "$"+vs+":"+field)
+ continue
+ }
+ panic("Got unknown index key type for field " + field)
+ }
+ return
+}
+
+// ResetIndexCache() clears the cache of previously ensured indexes.
+// Following requests to EnsureIndex will contact the server.
+func (s *Session) ResetIndexCache() {
+ s.cluster().ResetIndexCache()
+}
+
+// New creates a new session with the same parameters as the original
+// session, including consistency, batch size, prefetching, safety mode,
+// etc. The returned session will use sockets from the pool, so there's
+// a chance that writes just performed in another session may not yet
+// be visible.
+//
+// Login information from the original session will not be copied over
+// into the new session unless it was provided through the initial URL
+// for the Dial function.
+//
+// See the Copy and Clone methods.
+//
+func (s *Session) New() *Session {
+ s.m.Lock()
+ scopy := copySession(s, false)
+ s.m.Unlock()
+ scopy.Refresh()
+ return scopy
+}
+
+// Copy works just like New, but preserves the exact authentication
+// information from the original session.
+func (s *Session) Copy() *Session {
+ s.m.Lock()
+ scopy := copySession(s, true)
+ s.m.Unlock()
+ scopy.Refresh()
+ return scopy
+}
+
+// Clone works just like Copy, but also reuses the same socket as the original
+// session, in case it had already reserved one due to its consistency
+// guarantees. This behavior ensures that writes performed in the old session
+// are necessarily observed when using the new session, as long as it was a
+// strong or monotonic session. That said, it also means that long operations
+// may cause other goroutines using the original session to wait.
+func (s *Session) Clone() *Session {
+ s.m.Lock()
+ scopy := copySession(s, true)
+ s.m.Unlock()
+ return scopy
+}
+
+// Close terminates the session. It's a runtime error to use a session
+// after it has been closed.
+func (s *Session) Close() {
+ s.m.Lock()
+ if s.cluster_ != nil {
+ debugf("Closing session %p", s)
+ s.unsetSocket()
+ s.cluster_.Release()
+ s.cluster_ = nil
+ }
+ s.m.Unlock()
+}
+
+func (s *Session) cluster() *mongoCluster {
+ if s.cluster_ == nil {
+ panic("Session already closed")
+ }
+ return s.cluster_
+}
+
+// Refresh puts back any reserved sockets in use and restarts the consistency
+// guarantees according to the current consistency setting for the session.
+func (s *Session) Refresh() {
+ s.m.Lock()
+ s.slaveOk = s.consistency != Strong
+ s.unsetSocket()
+ s.m.Unlock()
+}
+
+// SetMode changes the consistency mode for the session.
+//
+// The default mode is Strong.
+//
+// In the Strong consistency mode reads and writes will always be made to
+// the primary server using a unique connection so that reads and writes are
+// fully consistent, ordered, and observing the most up-to-date data.
+// This offers the least benefits in terms of distributing load, but the
+// most guarantees. See also Monotonic and Eventual.
+//
+// In the Monotonic consistency mode reads may not be entirely up-to-date,
+// but they will always see the history of changes moving forward, the data
+// read will be consistent across sequential queries in the same session,
+// and modifications made within the session will be observed in following
+// queries (read-your-writes).
+//
+// In practice, the Monotonic mode is obtained by performing initial reads
+// on a unique connection to an arbitrary secondary, if one is available,
+// and once the first write happens, the session connection is switched over
+// to the primary server. This manages to distribute some of the reading
+// load with secondaries, while maintaining some useful guarantees.
+//
+// In the Eventual consistency mode reads will be made to any secondary in the
+// cluster, if one is available, and sequential reads will not necessarily
+// be made with the same connection. This means that data may be observed
+// out of order. Writes will of course be issued to the primary, but
+// independent writes in the same Eventual session may also be made with
+// independent connections, so there are also no guarantees in terms of
+// write ordering (no read-your-writes guarantees either).
+//
+// The Eventual mode is the fastest and most resource-friendly, but is
+// also the one offering the least guarantees about ordering of the data
+// read and written.
+//
+// If refresh is true, in addition to ensuring the session is in the given
+// consistency mode, the consistency guarantees will also be reset (e.g.
+// a Monotonic session will be allowed to read from secondaries again).
+// This is equivalent to calling the Refresh function.
+//
+// Shifting between Monotonic and Strong modes will keep a previously
+// reserved connection for the session unless refresh is true or the
+// connection is unsuitable (to a secondary server in a Strong session).
+func (s *Session) SetMode(consistency Mode, refresh bool) {
+ s.m.Lock()
+ debugf("Session %p: setting mode %d with refresh=%v (master=%p, slave=%p)", s, consistency, refresh, s.masterSocket, s.slaveSocket)
+ s.consistency = consistency
+ if refresh {
+ s.slaveOk = s.consistency != Strong
+ s.unsetSocket()
+ } else if s.consistency == Strong {
+ s.slaveOk = false
+ } else if s.masterSocket == nil {
+ s.slaveOk = true
+ }
+ s.m.Unlock()
+}
+
+// Mode returns the current consistency mode for the session.
+func (s *Session) Mode() Mode {
+ s.m.RLock()
+ mode := s.consistency
+ s.m.RUnlock()
+ return mode
+}
+
+// SetSyncTimeout sets the amount of time an operation with this session
+// will wait before returning an error in case a connection to a usable
+// server can't be established. Set it to zero to wait forever. The
+// default value is 7 seconds.
+func (s *Session) SetSyncTimeout(d time.Duration) {
+ s.m.Lock()
+ s.syncTimeout = d
+ s.m.Unlock()
+}
+
+// SetSocketTimeout sets the amount of time to wait for a non-responding
+// socket to the database before it is forcefully closed.
+//
+// The default timeout is 1 minute.
+func (s *Session) SetSocketTimeout(d time.Duration) {
+ s.m.Lock()
+ s.sockTimeout = d
+ if s.masterSocket != nil {
+ s.masterSocket.SetTimeout(d)
+ }
+ if s.slaveSocket != nil {
+ s.slaveSocket.SetTimeout(d)
+ }
+ s.m.Unlock()
+}
+
+// SetCursorTimeout changes the standard timeout period that the server
+// enforces on created cursors. The only supported value right now is
+// 0, which disables the timeout. The standard server timeout is 10 minutes.
+func (s *Session) SetCursorTimeout(d time.Duration) {
+ s.m.Lock()
+ if d == 0 {
+ s.queryConfig.op.flags |= flagNoCursorTimeout
+ } else {
+ panic("SetCursorTimeout: only 0 (disable timeout) supported for now")
+ }
+ s.m.Unlock()
+}
+
+// SetPoolLimit sets the maximum number of sockets in use in a single server
+// before this session will block waiting for a socket to be available.
+// The default limit is 4096.
+//
+// This limit must be set to cover more than any expected workload of the
+// application. It is a bad practice and an unsupported use case to use the
+// database driver to define the concurrency limit of an application. Prevent
+// such concurrency "at the door" instead, by properly restricting the amount
+// of used resources and number of goroutines before they are created.
+func (s *Session) SetPoolLimit(limit int) {
+ s.m.Lock()
+ s.poolLimit = limit
+ s.m.Unlock()
+}
+
+// SetBypassValidation sets whether the server should bypass the registered
+// validation expressions executed when documents are inserted or modified,
+// in the interest of preserving invariants in the collection being modified.
+// The default is to not bypass, and thus to perform the validation
+// expressions registered for modified collections.
+//
+// Document validation was introuced in MongoDB 3.2.
+//
+// Relevant documentation:
+//
+// https://docs.mongodb.org/manual/release-notes/3.2/#bypass-validation
+//
+func (s *Session) SetBypassValidation(bypass bool) {
+ s.m.Lock()
+ s.bypassValidation = bypass
+ s.m.Unlock()
+}
+
+// SetBatch sets the default batch size used when fetching documents from the
+// database. It's possible to change this setting on a per-query basis as
+// well, using the Query.Batch method.
+//
+// The default batch size is defined by the database itself. As of this
+// writing, MongoDB will use an initial size of min(100 docs, 4MB) on the
+// first batch, and 4MB on remaining ones.
+func (s *Session) SetBatch(n int) {
+ if n == 1 {
+ // Server interprets 1 as -1 and closes the cursor (!?)
+ n = 2
+ }
+ s.m.Lock()
+ s.queryConfig.op.limit = int32(n)
+ s.m.Unlock()
+}
+
+// SetPrefetch sets the default point at which the next batch of results will be
+// requested. When there are p*batch_size remaining documents cached in an
+// Iter, the next batch will be requested in background. For instance, when
+// using this:
+//
+// session.SetBatch(200)
+// session.SetPrefetch(0.25)
+//
+// and there are only 50 documents cached in the Iter to be processed, the
+// next batch of 200 will be requested. It's possible to change this setting on
+// a per-query basis as well, using the Prefetch method of Query.
+//
+// The default prefetch value is 0.25.
+func (s *Session) SetPrefetch(p float64) {
+ s.m.Lock()
+ s.queryConfig.prefetch = p
+ s.m.Unlock()
+}
+
+// See SetSafe for details on the Safe type.
+type Safe struct {
+ W int // Min # of servers to ack before success
+ WMode string // Write mode for MongoDB 2.0+ (e.g. "majority")
+ WTimeout int // Milliseconds to wait for W before timing out
+ FSync bool // Sync via the journal if present, or via data files sync otherwise
+ J bool // Sync via the journal if present
+}
+
+// Safe returns the current safety mode for the session.
+func (s *Session) Safe() (safe *Safe) {
+ s.m.Lock()
+ defer s.m.Unlock()
+ if s.safeOp != nil {
+ cmd := s.safeOp.query.(*getLastError)
+ safe = &Safe{WTimeout: cmd.WTimeout, FSync: cmd.FSync, J: cmd.J}
+ switch w := cmd.W.(type) {
+ case string:
+ safe.WMode = w
+ case int:
+ safe.W = w
+ }
+ }
+ return
+}
+
+// SetSafe changes the session safety mode.
+//
+// If the safe parameter is nil, the session is put in unsafe mode, and writes
+// become fire-and-forget, without error checking. The unsafe mode is faster
+// since operations won't hold on waiting for a confirmation.
+//
+// If the safe parameter is not nil, any changing query (insert, update, ...)
+// will be followed by a getLastError command with the specified parameters,
+// to ensure the request was correctly processed.
+//
+// The default is &Safe{}, meaning check for errors and use the default
+// behavior for all fields.
+//
+// The safe.W parameter determines how many servers should confirm a write
+// before the operation is considered successful. If set to 0 or 1, the
+// command will return as soon as the primary is done with the request.
+// If safe.WTimeout is greater than zero, it determines how many milliseconds
+// to wait for the safe.W servers to respond before returning an error.
+//
+// Starting with MongoDB 2.0.0 the safe.WMode parameter can be used instead
+// of W to request for richer semantics. If set to "majority" the server will
+// wait for a majority of members from the replica set to respond before
+// returning. Custom modes may also be defined within the server to create
+// very detailed placement schemas. See the data awareness documentation in
+// the links below for more details (note that MongoDB internally reuses the
+// "w" field name for WMode).
+//
+// If safe.J is true, servers will block until write operations have been
+// committed to the journal. Cannot be used in combination with FSync. Prior
+// to MongoDB 2.6 this option was ignored if the server was running without
+// journaling. Starting with MongoDB 2.6 write operations will fail with an
+// exception if this option is used when the server is running without
+// journaling.
+//
+// If safe.FSync is true and the server is running without journaling, blocks
+// until the server has synced all data files to disk. If the server is running
+// with journaling, this acts the same as the J option, blocking until write
+// operations have been committed to the journal. Cannot be used in
+// combination with J.
+//
+// Since MongoDB 2.0.0, the safe.J option can also be used instead of FSync
+// to force the server to wait for a group commit in case journaling is
+// enabled. The option has no effect if the server has journaling disabled.
+//
+// For example, the following statement will make the session check for
+// errors, without imposing further constraints:
+//
+// session.SetSafe(&mgo.Safe{})
+//
+// The following statement will force the server to wait for a majority of
+// members of a replica set to return (MongoDB 2.0+ only):
+//
+// session.SetSafe(&mgo.Safe{WMode: "majority"})
+//
+// The following statement, on the other hand, ensures that at least two
+// servers have flushed the change to disk before confirming the success
+// of operations:
+//
+// session.EnsureSafe(&mgo.Safe{W: 2, FSync: true})
+//
+// The following statement, on the other hand, disables the verification
+// of errors entirely:
+//
+// session.SetSafe(nil)
+//
+// See also the EnsureSafe method.
+//
+// Relevant documentation:
+//
+// http://www.mongodb.org/display/DOCS/getLastError+Command
+// http://www.mongodb.org/display/DOCS/Verifying+Propagation+of+Writes+with+getLastError
+// http://www.mongodb.org/display/DOCS/Data+Center+Awareness
+//
+func (s *Session) SetSafe(safe *Safe) {
+ s.m.Lock()
+ s.safeOp = nil
+ s.ensureSafe(safe)
+ s.m.Unlock()
+}
+
+// EnsureSafe compares the provided safety parameters with the ones
+// currently in use by the session and picks the most conservative
+// choice for each setting.
+//
+// That is:
+//
+// - safe.WMode is always used if set.
+// - safe.W is used if larger than the current W and WMode is empty.
+// - safe.FSync is always used if true.
+// - safe.J is used if FSync is false.
+// - safe.WTimeout is used if set and smaller than the current WTimeout.
+//
+// For example, the following statement will ensure the session is
+// at least checking for errors, without enforcing further constraints.
+// If a more conservative SetSafe or EnsureSafe call was previously done,
+// the following call will be ignored.
+//
+// session.EnsureSafe(&mgo.Safe{})
+//
+// See also the SetSafe method for details on what each option means.
+//
+// Relevant documentation:
+//
+// http://www.mongodb.org/display/DOCS/getLastError+Command
+// http://www.mongodb.org/display/DOCS/Verifying+Propagation+of+Writes+with+getLastError
+// http://www.mongodb.org/display/DOCS/Data+Center+Awareness
+//
+func (s *Session) EnsureSafe(safe *Safe) {
+ s.m.Lock()
+ s.ensureSafe(safe)
+ s.m.Unlock()
+}
+
+func (s *Session) ensureSafe(safe *Safe) {
+ if safe == nil {
+ return
+ }
+
+ var w interface{}
+ if safe.WMode != "" {
+ w = safe.WMode
+ } else if safe.W > 0 {
+ w = safe.W
+ }
+
+ var cmd getLastError
+ if s.safeOp == nil {
+ cmd = getLastError{1, w, safe.WTimeout, safe.FSync, safe.J}
+ } else {
+ // Copy. We don't want to mutate the existing query.
+ cmd = *(s.safeOp.query.(*getLastError))
+ if cmd.W == nil {
+ cmd.W = w
+ } else if safe.WMode != "" {
+ cmd.W = safe.WMode
+ } else if i, ok := cmd.W.(int); ok && safe.W > i {
+ cmd.W = safe.W
+ }
+ if safe.WTimeout > 0 && safe.WTimeout < cmd.WTimeout {
+ cmd.WTimeout = safe.WTimeout
+ }
+ if safe.FSync {
+ cmd.FSync = true
+ cmd.J = false
+ } else if safe.J && !cmd.FSync {
+ cmd.J = true
+ }
+ }
+ s.safeOp = &queryOp{
+ query: &cmd,
+ collection: "admin.$cmd",
+ limit: -1,
+ }
+}
+
+// Run issues the provided command on the "admin" database and
+// and unmarshals its result in the respective argument. The cmd
+// argument may be either a string with the command name itself, in
+// which case an empty document of the form bson.M{cmd: 1} will be used,
+// or it may be a full command document.
+//
+// Note that MongoDB considers the first marshalled key as the command
+// name, so when providing a command with options, it's important to
+// use an ordering-preserving document, such as a struct value or an
+// instance of bson.D. For instance:
+//
+// db.Run(bson.D{{"create", "mycollection"}, {"size", 1024}})
+//
+// For commands on arbitrary databases, see the Run method in
+// the Database type.
+//
+// Relevant documentation:
+//
+// http://www.mongodb.org/display/DOCS/Commands
+// http://www.mongodb.org/display/DOCS/List+of+Database+CommandSkips
+//
+func (s *Session) Run(cmd interface{}, result interface{}) error {
+ return s.DB("admin").Run(cmd, result)
+}
+
+// SelectServers restricts communication to servers configured with the
+// given tags. For example, the following statement restricts servers
+// used for reading operations to those with both tag "disk" set to
+// "ssd" and tag "rack" set to 1:
+//
+// session.SelectServers(bson.D{{"disk", "ssd"}, {"rack", 1}})
+//
+// Multiple sets of tags may be provided, in which case the used server
+// must match all tags within any one set.
+//
+// If a connection was previously assigned to the session due to the
+// current session mode (see Session.SetMode), the tag selection will
+// only be enforced after the session is refreshed.
+//
+// Relevant documentation:
+//
+// http://docs.mongodb.org/manual/tutorial/configure-replica-set-tag-sets
+//
+func (s *Session) SelectServers(tags ...bson.D) {
+ s.m.Lock()
+ s.queryConfig.op.serverTags = tags
+ s.m.Unlock()
+}
+
+// Ping runs a trivial ping command just to get in touch with the server.
+func (s *Session) Ping() error {
+ return s.Run("ping", nil)
+}
+
+// Fsync flushes in-memory writes to disk on the server the session
+// is established with. If async is true, the call returns immediately,
+// otherwise it returns after the flush has been made.
+func (s *Session) Fsync(async bool) error {
+ return s.Run(bson.D{{"fsync", 1}, {"async", async}}, nil)
+}
+
+// FsyncLock locks all writes in the specific server the session is
+// established with and returns. Any writes attempted to the server
+// after it is successfully locked will block until FsyncUnlock is
+// called for the same server.
+//
+// This method works on secondaries as well, preventing the oplog from
+// being flushed while the server is locked, but since only the server
+// connected to is locked, for locking specific secondaries it may be
+// necessary to establish a connection directly to the secondary (see
+// Dial's connect=direct option).
+//
+// As an important caveat, note that once a write is attempted and
+// blocks, follow up reads will block as well due to the way the
+// lock is internally implemented in the server. More details at:
+//
+// https://jira.mongodb.org/browse/SERVER-4243
+//
+// FsyncLock is often used for performing consistent backups of
+// the database files on disk.
+//
+// Relevant documentation:
+//
+// http://www.mongodb.org/display/DOCS/fsync+Command
+// http://www.mongodb.org/display/DOCS/Backups
+//
+func (s *Session) FsyncLock() error {
+ return s.Run(bson.D{{"fsync", 1}, {"lock", true}}, nil)
+}
+
+// FsyncUnlock releases the server for writes. See FsyncLock for details.
+func (s *Session) FsyncUnlock() error {
+ err := s.Run(bson.D{{"fsyncUnlock", 1}}, nil)
+ if isNoCmd(err) {
+ err = s.DB("admin").C("$cmd.sys.unlock").Find(nil).One(nil) // WTF?
+ }
+ return err
+}
+
+// Find prepares a query using the provided document. The document may be a
+// map or a struct value capable of being marshalled with bson. The map
+// may be a generic one using interface{} for its key and/or values, such as
+// bson.M, or it may be a properly typed map. Providing nil as the document
+// is equivalent to providing an empty document such as bson.M{}.
+//
+// Further details of the query may be tweaked using the resulting Query value,
+// and then executed to retrieve results using methods such as One, For,
+// Iter, or Tail.
+//
+// In case the resulting document includes a field named $err or errmsg, which
+// are standard ways for MongoDB to return query errors, the returned err will
+// be set to a *QueryError value including the Err message and the Code. In
+// those cases, the result argument is still unmarshalled into with the
+// received document so that any other custom values may be obtained if
+// desired.
+//
+// Relevant documentation:
+//
+// http://www.mongodb.org/display/DOCS/Querying
+// http://www.mongodb.org/display/DOCS/Advanced+Queries
+//
+func (c *Collection) Find(query interface{}) *Query {
+ session := c.Database.Session
+ session.m.RLock()
+ q := &Query{session: session, query: session.queryConfig}
+ session.m.RUnlock()
+ q.op.query = query
+ q.op.collection = c.FullName
+ return q
+}
+
+type repairCmd struct {
+ RepairCursor string `bson:"repairCursor"`
+ Cursor *repairCmdCursor ",omitempty"
+}
+
+type repairCmdCursor struct {
+ BatchSize int `bson:"batchSize,omitempty"`
+}
+
+// Repair returns an iterator that goes over all recovered documents in the
+// collection, in a best-effort manner. This is most useful when there are
+// damaged data files. Multiple copies of the same document may be returned
+// by the iterator.
+//
+// Repair is supported in MongoDB 2.7.8 and later.
+func (c *Collection) Repair() *Iter {
+ // Clone session and set it to Monotonic mode so that the server
+ // used for the query may be safely obtained afterwards, if
+ // necessary for iteration when a cursor is received.
+ session := c.Database.Session
+ cloned := session.nonEventual()
+ defer cloned.Close()
+
+ batchSize := int(cloned.queryConfig.op.limit)
+
+ var result struct{ Cursor cursorData }
+
+ cmd := repairCmd{
+ RepairCursor: c.Name,
+ Cursor: &repairCmdCursor{batchSize},
+ }
+
+ clonedc := c.With(cloned)
+ err := clonedc.Database.Run(cmd, &result)
+ return clonedc.NewIter(session, result.Cursor.FirstBatch, result.Cursor.Id, err)
+}
+
+// FindId is a convenience helper equivalent to:
+//
+// query := collection.Find(bson.M{"_id": id})
+//
+// See the Find method for more details.
+func (c *Collection) FindId(id interface{}) *Query {
+ return c.Find(bson.D{{"_id", id}})
+}
+
+type Pipe struct {
+ session *Session
+ collection *Collection
+ pipeline interface{}
+ allowDisk bool
+ batchSize int
+}
+
+type pipeCmd struct {
+ Aggregate string
+ Pipeline interface{}
+ Cursor *pipeCmdCursor ",omitempty"
+ Explain bool ",omitempty"
+ AllowDisk bool "allowDiskUse,omitempty"
+}
+
+type pipeCmdCursor struct {
+ BatchSize int `bson:"batchSize,omitempty"`
+}
+
+// Pipe prepares a pipeline to aggregate. The pipeline document
+// must be a slice built in terms of the aggregation framework language.
+//
+// For example:
+//
+// pipe := collection.Pipe([]bson.M{{"$match": bson.M{"name": "Otavio"}}})
+// iter := pipe.Iter()
+//
+// Relevant documentation:
+//
+// http://docs.mongodb.org/manual/reference/aggregation
+// http://docs.mongodb.org/manual/applications/aggregation
+// http://docs.mongodb.org/manual/tutorial/aggregation-examples
+//
+func (c *Collection) Pipe(pipeline interface{}) *Pipe {
+ session := c.Database.Session
+ session.m.RLock()
+ batchSize := int(session.queryConfig.op.limit)
+ session.m.RUnlock()
+ return &Pipe{
+ session: session,
+ collection: c,
+ pipeline: pipeline,
+ batchSize: batchSize,
+ }
+}
+
+// Iter executes the pipeline and returns an iterator capable of going
+// over all the generated results.
+func (p *Pipe) Iter() *Iter {
+ // Clone session and set it to Monotonic mode so that the server
+ // used for the query may be safely obtained afterwards, if
+ // necessary for iteration when a cursor is received.
+ cloned := p.session.nonEventual()
+ defer cloned.Close()
+ c := p.collection.With(cloned)
+
+ var result struct {
+ Result []bson.Raw // 2.4, no cursors.
+ Cursor cursorData // 2.6+, with cursors.
+ }
+
+ cmd := pipeCmd{
+ Aggregate: c.Name,
+ Pipeline: p.pipeline,
+ AllowDisk: p.allowDisk,
+ Cursor: &pipeCmdCursor{p.batchSize},
+ }
+ err := c.Database.Run(cmd, &result)
+ if e, ok := err.(*QueryError); ok && e.Message == `unrecognized field "cursor` {
+ cmd.Cursor = nil
+ cmd.AllowDisk = false
+ err = c.Database.Run(cmd, &result)
+ }
+ firstBatch := result.Result
+ if firstBatch == nil {
+ firstBatch = result.Cursor.FirstBatch
+ }
+ return c.NewIter(p.session, firstBatch, result.Cursor.Id, err)
+}
+
+// NewIter returns a newly created iterator with the provided parameters.
+// Using this method is not recommended unless the desired functionality
+// is not yet exposed via a more convenient interface (Find, Pipe, etc).
+//
+// The optional session parameter associates the lifetime of the returned
+// iterator to an arbitrary session. If nil, the iterator will be bound to
+// c's session.
+//
+// Documents in firstBatch will be individually provided by the returned
+// iterator before documents from cursorId are made available. If cursorId
+// is zero, only the documents in firstBatch are provided.
+//
+// If err is not nil, the iterator's Err method will report it after
+// exhausting documents in firstBatch.
+//
+// NewIter must be called right after the cursor id is obtained, and must not
+// be called on a collection in Eventual mode, because the cursor id is
+// associated with the specific server that returned it. The provided session
+// parameter may be in any mode or state, though.
+//
+func (c *Collection) NewIter(session *Session, firstBatch []bson.Raw, cursorId int64, err error) *Iter {
+ var server *mongoServer
+ csession := c.Database.Session
+ csession.m.RLock()
+ socket := csession.masterSocket
+ if socket == nil {
+ socket = csession.slaveSocket
+ }
+ if socket != nil {
+ server = socket.Server()
+ }
+ csession.m.RUnlock()
+
+ if server == nil {
+ if csession.Mode() == Eventual {
+ panic("Collection.NewIter called in Eventual mode")
+ }
+ if err == nil {
+ err = errors.New("server not available")
+ }
+ }
+
+ if session == nil {
+ session = csession
+ }
+
+ iter := &Iter{
+ session: session,
+ server: server,
+ timeout: -1,
+ err: err,
+ }
+ iter.gotReply.L = &iter.m
+ for _, doc := range firstBatch {
+ iter.docData.Push(doc.Data)
+ }
+ if cursorId != 0 {
+ iter.op.cursorId = cursorId
+ iter.op.collection = c.FullName
+ iter.op.replyFunc = iter.replyFunc()
+ }
+ return iter
+}
+
+// All works like Iter.All.
+func (p *Pipe) All(result interface{}) error {
+ return p.Iter().All(result)
+}
+
+// One executes the pipeline and unmarshals the first item from the
+// result set into the result parameter.
+// It returns ErrNotFound if no items are generated by the pipeline.
+func (p *Pipe) One(result interface{}) error {
+ iter := p.Iter()
+ if iter.Next(result) {
+ return nil
+ }
+ if err := iter.Err(); err != nil {
+ return err
+ }
+ return ErrNotFound
+}
+
+// Explain returns a number of details about how the MongoDB server would
+// execute the requested pipeline, such as the number of objects examined,
+// the number of times the read lock was yielded to allow writes to go in,
+// and so on.
+//
+// For example:
+//
+// var m bson.M
+// err := collection.Pipe(pipeline).Explain(&m)
+// if err == nil {
+// fmt.Printf("Explain: %#v\n", m)
+// }
+//
+func (p *Pipe) Explain(result interface{}) error {
+ c := p.collection
+ cmd := pipeCmd{
+ Aggregate: c.Name,
+ Pipeline: p.pipeline,
+ AllowDisk: p.allowDisk,
+ Explain: true,
+ }
+ return c.Database.Run(cmd, result)
+}
+
+// AllowDiskUse enables writing to the "<dbpath>/_tmp" server directory so
+// that aggregation pipelines do not have to be held entirely in memory.
+func (p *Pipe) AllowDiskUse() *Pipe {
+ p.allowDisk = true
+ return p
+}
+
+// Batch sets the batch size used when fetching documents from the database.
+// It's possible to change this setting on a per-session basis as well, using
+// the Batch method of Session.
+//
+// The default batch size is defined by the database server.
+func (p *Pipe) Batch(n int) *Pipe {
+ p.batchSize = n
+ return p
+}
+
+// mgo.v3: Use a single user-visible error type.
+
+type LastError struct {
+ Err string
+ Code, N, Waited int
+ FSyncFiles int `bson:"fsyncFiles"`
+ WTimeout bool
+ UpdatedExisting bool `bson:"updatedExisting"`
+ UpsertedId interface{} `bson:"upserted"`
+
+ modified int
+ ecases []BulkErrorCase
+}
+
+func (err *LastError) Error() string {
+ return err.Err
+}
+
+type queryError struct {
+ Err string "$err"
+ ErrMsg string
+ Assertion string
+ Code int
+ AssertionCode int "assertionCode"
+}
+
+type QueryError struct {
+ Code int
+ Message string
+ Assertion bool
+}
+
+func (err *QueryError) Error() string {
+ return err.Message
+}
+
+// IsDup returns whether err informs of a duplicate key error because
+// a primary key index or a secondary unique index already has an entry
+// with the given value.
+func IsDup(err error) bool {
+ // Besides being handy, helps with MongoDB bugs SERVER-7164 and SERVER-11493.
+ // What follows makes me sad. Hopefully conventions will be more clear over time.
+ switch e := err.(type) {
+ case *LastError:
+ return e.Code == 11000 || e.Code == 11001 || e.Code == 12582 || e.Code == 16460 && strings.Contains(e.Err, " E11000 ")
+ case *QueryError:
+ return e.Code == 11000 || e.Code == 11001 || e.Code == 12582
+ case *BulkError:
+ for _, ecase := range e.ecases {
+ if !IsDup(ecase.Err) {
+ return false
+ }
+ }
+ return true
+ }
+ return false
+}
+
+// Insert inserts one or more documents in the respective collection. In
+// case the session is in safe mode (see the SetSafe method) and an error
+// happens while inserting the provided documents, the returned error will
+// be of type *LastError.
+func (c *Collection) Insert(docs ...interface{}) error {
+ _, err := c.writeOp(&insertOp{c.FullName, docs, 0}, true)
+ return err
+}
+
+// Update finds a single document matching the provided selector document
+// and modifies it according to the update document.
+// If the session is in safe mode (see SetSafe) a ErrNotFound error is
+// returned if a document isn't found, or a value of type *LastError
+// when some other error is detected.
+//
+// Relevant documentation:
+//
+// http://www.mongodb.org/display/DOCS/Updating
+// http://www.mongodb.org/display/DOCS/Atomic+Operations
+//
+func (c *Collection) Update(selector interface{}, update interface{}) error {
+ if selector == nil {
+ selector = bson.D{}
+ }
+ op := updateOp{
+ Collection: c.FullName,
+ Selector: selector,
+ Update: update,
+ }
+ lerr, err := c.writeOp(&op, true)
+ if err == nil && lerr != nil && !lerr.UpdatedExisting {
+ return ErrNotFound
+ }
+ return err
+}
+
+// UpdateId is a convenience helper equivalent to:
+//
+// err := collection.Update(bson.M{"_id": id}, update)
+//
+// See the Update method for more details.
+func (c *Collection) UpdateId(id interface{}, update interface{}) error {
+ return c.Update(bson.D{{"_id", id}}, update)
+}
+
+// ChangeInfo holds details about the outcome of an update operation.
+type ChangeInfo struct {
+ // Updated reports the number of existing documents modified.
+ // Due to server limitations, this reports the same value as the Matched field when
+ // talking to MongoDB <= 2.4 and on Upsert and Apply (findAndModify) operations.
+ Updated int
+ Removed int // Number of documents removed
+ Matched int // Number of documents matched but not necessarily changed
+ UpsertedId interface{} // Upserted _id field, when not explicitly provided
+}
+
+// UpdateAll finds all documents matching the provided selector document
+// and modifies them according to the update document.
+// If the session is in safe mode (see SetSafe) details of the executed
+// operation are returned in info or an error of type *LastError when
+// some problem is detected. It is not an error for the update to not be
+// applied on any documents because the selector doesn't match.
+//
+// Relevant documentation:
+//
+// http://www.mongodb.org/display/DOCS/Updating
+// http://www.mongodb.org/display/DOCS/Atomic+Operations
+//
+func (c *Collection) UpdateAll(selector interface{}, update interface{}) (info *ChangeInfo, err error) {
+ if selector == nil {
+ selector = bson.D{}
+ }
+ op := updateOp{
+ Collection: c.FullName,
+ Selector: selector,
+ Update: update,
+ Flags: 2,
+ Multi: true,
+ }
+ lerr, err := c.writeOp(&op, true)
+ if err == nil && lerr != nil {
+ info = &ChangeInfo{Updated: lerr.modified, Matched: lerr.N}
+ }
+ return info, err
+}
+
+// Upsert finds a single document matching the provided selector document
+// and modifies it according to the update document. If no document matching
+// the selector is found, the update document is applied to the selector
+// document and the result is inserted in the collection.
+// If the session is in safe mode (see SetSafe) details of the executed
+// operation are returned in info, or an error of type *LastError when
+// some problem is detected.
+//
+// Relevant documentation:
+//
+// http://www.mongodb.org/display/DOCS/Updating
+// http://www.mongodb.org/display/DOCS/Atomic+Operations
+//
+func (c *Collection) Upsert(selector interface{}, update interface{}) (info *ChangeInfo, err error) {
+ if selector == nil {
+ selector = bson.D{}
+ }
+ op := updateOp{
+ Collection: c.FullName,
+ Selector: selector,
+ Update: update,
+ Flags: 1,
+ Upsert: true,
+ }
+ var lerr *LastError
+ for i := 0; i < maxUpsertRetries; i++ {
+ lerr, err = c.writeOp(&op, true)
+ // Retry duplicate key errors on upserts.
+ // https://docs.mongodb.com/v3.2/reference/method/db.collection.update/#use-unique-indexes
+ if !IsDup(err) {
+ break
+ }
+ }
+ if err == nil && lerr != nil {
+ info = &ChangeInfo{}
+ if lerr.UpdatedExisting {
+ info.Matched = lerr.N
+ info.Updated = lerr.modified
+ } else {
+ info.UpsertedId = lerr.UpsertedId
+ }
+ }
+ return info, err
+}
+
+// UpsertId is a convenience helper equivalent to:
+//
+// info, err := collection.Upsert(bson.M{"_id": id}, update)
+//
+// See the Upsert method for more details.
+func (c *Collection) UpsertId(id interface{}, update interface{}) (info *ChangeInfo, err error) {
+ return c.Upsert(bson.D{{"_id", id}}, update)
+}
+
+// Remove finds a single document matching the provided selector document
+// and removes it from the database.
+// If the session is in safe mode (see SetSafe) a ErrNotFound error is
+// returned if a document isn't found, or a value of type *LastError
+// when some other error is detected.
+//
+// Relevant documentation:
+//
+// http://www.mongodb.org/display/DOCS/Removing
+//
+func (c *Collection) Remove(selector interface{}) error {
+ if selector == nil {
+ selector = bson.D{}
+ }
+ lerr, err := c.writeOp(&deleteOp{c.FullName, selector, 1, 1}, true)
+ if err == nil && lerr != nil && lerr.N == 0 {
+ return ErrNotFound
+ }
+ return err
+}
+
+// RemoveId is a convenience helper equivalent to:
+//
+// err := collection.Remove(bson.M{"_id": id})
+//
+// See the Remove method for more details.
+func (c *Collection) RemoveId(id interface{}) error {
+ return c.Remove(bson.D{{"_id", id}})
+}
+
+// RemoveAll finds all documents matching the provided selector document
+// and removes them from the database. In case the session is in safe mode
+// (see the SetSafe method) and an error happens when attempting the change,
+// the returned error will be of type *LastError.
+//
+// Relevant documentation:
+//
+// http://www.mongodb.org/display/DOCS/Removing
+//
+func (c *Collection) RemoveAll(selector interface{}) (info *ChangeInfo, err error) {
+ if selector == nil {
+ selector = bson.D{}
+ }
+ lerr, err := c.writeOp(&deleteOp{c.FullName, selector, 0, 0}, true)
+ if err == nil && lerr != nil {
+ info = &ChangeInfo{Removed: lerr.N, Matched: lerr.N}
+ }
+ return info, err
+}
+
+// DropDatabase removes the entire database including all of its collections.
+func (db *Database) DropDatabase() error {
+ return db.Run(bson.D{{"dropDatabase", 1}}, nil)
+}
+
+// DropCollection removes the entire collection including all of its documents.
+func (c *Collection) DropCollection() error {
+ return c.Database.Run(bson.D{{"drop", c.Name}}, nil)
+}
+
+// The CollectionInfo type holds metadata about a collection.
+//
+// Relevant documentation:
+//
+// http://www.mongodb.org/display/DOCS/createCollection+Command
+// http://www.mongodb.org/display/DOCS/Capped+Collections
+//
+type CollectionInfo struct {
+ // DisableIdIndex prevents the automatic creation of the index
+ // on the _id field for the collection.
+ DisableIdIndex bool
+
+ // ForceIdIndex enforces the automatic creation of the index
+ // on the _id field for the collection. Capped collections,
+ // for example, do not have such an index by default.
+ ForceIdIndex bool
+
+ // If Capped is true new documents will replace old ones when
+ // the collection is full. MaxBytes must necessarily be set
+ // to define the size when the collection wraps around.
+ // MaxDocs optionally defines the number of documents when it
+ // wraps, but MaxBytes still needs to be set.
+ Capped bool
+ MaxBytes int
+ MaxDocs int
+
+ // Validator contains a validation expression that defines which
+ // documents should be considered valid for this collection.
+ Validator interface{}
+
+ // ValidationLevel may be set to "strict" (the default) to force
+ // MongoDB to validate all documents on inserts and updates, to
+ // "moderate" to apply the validation rules only to documents
+ // that already fulfill the validation criteria, or to "off" for
+ // disabling validation entirely.
+ ValidationLevel string
+
+ // ValidationAction determines how MongoDB handles documents that
+ // violate the validation rules. It may be set to "error" (the default)
+ // to reject inserts or updates that violate the rules, or to "warn"
+ // to log invalid operations but allow them to proceed.
+ ValidationAction string
+
+ // StorageEngine allows specifying collection options for the
+ // storage engine in use. The map keys must hold the storage engine
+ // name for which options are being specified.
+ StorageEngine interface{}
+}
+
+// Create explicitly creates the c collection with details of info.
+// MongoDB creates collections automatically on use, so this method
+// is only necessary when creating collection with non-default
+// characteristics, such as capped collections.
+//
+// Relevant documentation:
+//
+// http://www.mongodb.org/display/DOCS/createCollection+Command
+// http://www.mongodb.org/display/DOCS/Capped+Collections
+//
+func (c *Collection) Create(info *CollectionInfo) error {
+ cmd := make(bson.D, 0, 4)
+ cmd = append(cmd, bson.DocElem{"create", c.Name})
+ if info.Capped {
+ if info.MaxBytes < 1 {
+ return fmt.Errorf("Collection.Create: with Capped, MaxBytes must also be set")
+ }
+ cmd = append(cmd, bson.DocElem{"capped", true})
+ cmd = append(cmd, bson.DocElem{"size", info.MaxBytes})
+ if info.MaxDocs > 0 {
+ cmd = append(cmd, bson.DocElem{"max", info.MaxDocs})
+ }
+ }
+ if info.DisableIdIndex {
+ cmd = append(cmd, bson.DocElem{"autoIndexId", false})
+ }
+ if info.ForceIdIndex {
+ cmd = append(cmd, bson.DocElem{"autoIndexId", true})
+ }
+ if info.Validator != nil {
+ cmd = append(cmd, bson.DocElem{"validator", info.Validator})
+ }
+ if info.ValidationLevel != "" {
+ cmd = append(cmd, bson.DocElem{"validationLevel", info.ValidationLevel})
+ }
+ if info.ValidationAction != "" {
+ cmd = append(cmd, bson.DocElem{"validationAction", info.ValidationAction})
+ }
+ if info.StorageEngine != nil {
+ cmd = append(cmd, bson.DocElem{"storageEngine", info.StorageEngine})
+ }
+ return c.Database.Run(cmd, nil)
+}
+
+// Batch sets the batch size used when fetching documents from the database.
+// It's possible to change this setting on a per-session basis as well, using
+// the Batch method of Session.
+
+// The default batch size is defined by the database itself. As of this
+// writing, MongoDB will use an initial size of min(100 docs, 4MB) on the
+// first batch, and 4MB on remaining ones.
+func (q *Query) Batch(n int) *Query {
+ if n == 1 {
+ // Server interprets 1 as -1 and closes the cursor (!?)
+ n = 2
+ }
+ q.m.Lock()
+ q.op.limit = int32(n)
+ q.m.Unlock()
+ return q
+}
+
+// Prefetch sets the point at which the next batch of results will be requested.
+// When there are p*batch_size remaining documents cached in an Iter, the next
+// batch will be requested in background. For instance, when using this:
+//
+// query.Batch(200).Prefetch(0.25)
+//
+// and there are only 50 documents cached in the Iter to be processed, the
+// next batch of 200 will be requested. It's possible to change this setting on
+// a per-session basis as well, using the SetPrefetch method of Session.
+//
+// The default prefetch value is 0.25.
+func (q *Query) Prefetch(p float64) *Query {
+ q.m.Lock()
+ q.prefetch = p
+ q.m.Unlock()
+ return q
+}
+
+// Skip skips over the n initial documents from the query results. Note that
+// this only makes sense with capped collections where documents are naturally
+// ordered by insertion time, or with sorted results.
+func (q *Query) Skip(n int) *Query {
+ q.m.Lock()
+ q.op.skip = int32(n)
+ q.m.Unlock()
+ return q
+}
+
+// Limit restricts the maximum number of documents retrieved to n, and also
+// changes the batch size to the same value. Once n documents have been
+// returned by Next, the following call will return ErrNotFound.
+func (q *Query) Limit(n int) *Query {
+ q.m.Lock()
+ switch {
+ case n == 1:
+ q.limit = 1
+ q.op.limit = -1
+ case n == math.MinInt32: // -MinInt32 == -MinInt32
+ q.limit = math.MaxInt32
+ q.op.limit = math.MinInt32 + 1
+ case n < 0:
+ q.limit = int32(-n)
+ q.op.limit = int32(n)
+ default:
+ q.limit = int32(n)
+ q.op.limit = int32(n)
+ }
+ q.m.Unlock()
+ return q
+}
+
+// Select enables selecting which fields should be retrieved for the results
+// found. For example, the following query would only retrieve the name field:
+//
+// err := collection.Find(nil).Select(bson.M{"name": 1}).One(&result)
+//
+// Relevant documentation:
+//
+// http://www.mongodb.org/display/DOCS/Retrieving+a+Subset+of+Fields
+//
+func (q *Query) Select(selector interface{}) *Query {
+ q.m.Lock()
+ q.op.selector = selector
+ q.m.Unlock()
+ return q
+}
+
+// Sort asks the database to order returned documents according to the
+// provided field names. A field name may be prefixed by - (minus) for
+// it to be sorted in reverse order.
+//
+// For example:
+//
+// query1 := collection.Find(nil).Sort("firstname", "lastname")
+// query2 := collection.Find(nil).Sort("-age")
+// query3 := collection.Find(nil).Sort("$natural")
+// query4 := collection.Find(nil).Select(bson.M{"score": bson.M{"$meta": "textScore"}}).Sort("$textScore:score")
+//
+// Relevant documentation:
+//
+// http://www.mongodb.org/display/DOCS/Sorting+and+Natural+Order
+//
+func (q *Query) Sort(fields ...string) *Query {
+ q.m.Lock()
+ var order bson.D
+ for _, field := range fields {
+ n := 1
+ var kind string
+ if field != "" {
+ if field[0] == '$' {
+ if c := strings.Index(field, ":"); c > 1 && c < len(field)-1 {
+ kind = field[1:c]
+ field = field[c+1:]
+ }
+ }
+ switch field[0] {
+ case '+':
+ field = field[1:]
+ case '-':
+ n = -1
+ field = field[1:]
+ }
+ }
+ if field == "" {
+ panic("Sort: empty field name")
+ }
+ if kind == "textScore" {
+ order = append(order, bson.DocElem{field, bson.M{"$meta": kind}})
+ } else {
+ order = append(order, bson.DocElem{field, n})
+ }
+ }
+ q.op.options.OrderBy = order
+ q.op.hasOptions = true
+ q.m.Unlock()
+ return q
+}
+
+// Explain returns a number of details about how the MongoDB server would
+// execute the requested query, such as the number of objects examined,
+// the number of times the read lock was yielded to allow writes to go in,
+// and so on.
+//
+// For example:
+//
+// m := bson.M{}
+// err := collection.Find(bson.M{"filename": name}).Explain(m)
+// if err == nil {
+// fmt.Printf("Explain: %#v\n", m)
+// }
+//
+// Relevant documentation:
+//
+// http://www.mongodb.org/display/DOCS/Optimization
+// http://www.mongodb.org/display/DOCS/Query+Optimizer
+//
+func (q *Query) Explain(result interface{}) error {
+ q.m.Lock()
+ clone := &Query{session: q.session, query: q.query}
+ q.m.Unlock()
+ clone.op.options.Explain = true
+ clone.op.hasOptions = true
+ if clone.op.limit > 0 {
+ clone.op.limit = -q.op.limit
+ }
+ iter := clone.Iter()
+ if iter.Next(result) {
+ return nil
+ }
+ return iter.Close()
+}
+
+// TODO: Add Collection.Explain. See https://goo.gl/1MDlvz.
+
+// Hint will include an explicit "hint" in the query to force the server
+// to use a specified index, potentially improving performance in some
+// situations. The provided parameters are the fields that compose the
+// key of the index to be used. For details on how the indexKey may be
+// built, see the EnsureIndex method.
+//
+// For example:
+//
+// query := collection.Find(bson.M{"firstname": "Joe", "lastname": "Winter"})
+// query.Hint("lastname", "firstname")
+//
+// Relevant documentation:
+//
+// http://www.mongodb.org/display/DOCS/Optimization
+// http://www.mongodb.org/display/DOCS/Query+Optimizer
+//
+func (q *Query) Hint(indexKey ...string) *Query {
+ q.m.Lock()
+ keyInfo, err := parseIndexKey(indexKey)
+ q.op.options.Hint = keyInfo.key
+ q.op.hasOptions = true
+ q.m.Unlock()
+ if err != nil {
+ panic(err)
+ }
+ return q
+}
+
+// SetMaxScan constrains the query to stop after scanning the specified
+// number of documents.
+//
+// This modifier is generally used to prevent potentially long running
+// queries from disrupting performance by scanning through too much data.
+func (q *Query) SetMaxScan(n int) *Query {
+ q.m.Lock()
+ q.op.options.MaxScan = n
+ q.op.hasOptions = true
+ q.m.Unlock()
+ return q
+}
+
+// SetMaxTime constrains the query to stop after running for the specified time.
+//
+// When the time limit is reached MongoDB automatically cancels the query.
+// This can be used to efficiently prevent and identify unexpectedly slow queries.
+//
+// A few important notes about the mechanism enforcing this limit:
+//
+// - Requests can block behind locking operations on the server, and that blocking
+// time is not accounted for. In other words, the timer starts ticking only after
+// the actual start of the query when it initially acquires the appropriate lock;
+//
+// - Operations are interrupted only at interrupt points where an operation can be
+// safely aborted – the total execution time may exceed the specified value;
+//
+// - The limit can be applied to both CRUD operations and commands, but not all
+// commands are interruptible;
+//
+// - While iterating over results, computing follow up batches is included in the
+// total time and the iteration continues until the alloted time is over, but
+// network roundtrips are not taken into account for the limit.
+//
+// - This limit does not override the inactive cursor timeout for idle cursors
+// (default is 10 min).
+//
+// This mechanism was introduced in MongoDB 2.6.
+//
+// Relevant documentation:
+//
+// http://blog.mongodb.org/post/83621787773/maxtimems-and-query-optimizer-introspection-in
+//
+func (q *Query) SetMaxTime(d time.Duration) *Query {
+ q.m.Lock()
+ q.op.options.MaxTimeMS = int(d / time.Millisecond)
+ q.op.hasOptions = true
+ q.m.Unlock()
+ return q
+}
+
+// Snapshot will force the performed query to make use of an available
+// index on the _id field to prevent the same document from being returned
+// more than once in a single iteration. This might happen without this
+// setting in situations when the document changes in size and thus has to
+// be moved while the iteration is running.
+//
+// Because snapshot mode traverses the _id index, it may not be used with
+// sorting or explicit hints. It also cannot use any other index for the
+// query.
+//
+// Even with snapshot mode, items inserted or deleted during the query may
+// or may not be returned; that is, this mode is not a true point-in-time
+// snapshot.
+//
+// The same effect of Snapshot may be obtained by using any unique index on
+// field(s) that will not be modified (best to use Hint explicitly too).
+// A non-unique index (such as creation time) may be made unique by
+// appending _id to the index when creating it.
+//
+// Relevant documentation:
+//
+// http://www.mongodb.org/display/DOCS/How+to+do+Snapshotted+Queries+in+the+Mongo+Database
+//
+func (q *Query) Snapshot() *Query {
+ q.m.Lock()
+ q.op.options.Snapshot = true
+ q.op.hasOptions = true
+ q.m.Unlock()
+ return q
+}
+
+// Comment adds a comment to the query to identify it in the database profiler output.
+//
+// Relevant documentation:
+//
+// http://docs.mongodb.org/manual/reference/operator/meta/comment
+// http://docs.mongodb.org/manual/reference/command/profile
+// http://docs.mongodb.org/manual/administration/analyzing-mongodb-performance/#database-profiling
+//
+func (q *Query) Comment(comment string) *Query {
+ q.m.Lock()
+ q.op.options.Comment = comment
+ q.op.hasOptions = true
+ q.m.Unlock()
+ return q
+}
+
+// LogReplay enables an option that optimizes queries that are typically
+// made on the MongoDB oplog for replaying it. This is an internal
+// implementation aspect and most likely uninteresting for other uses.
+// It has seen at least one use case, though, so it's exposed via the API.
+func (q *Query) LogReplay() *Query {
+ q.m.Lock()
+ q.op.flags |= flagLogReplay
+ q.m.Unlock()
+ return q
+}
+
+func checkQueryError(fullname string, d []byte) error {
+ l := len(d)
+ if l < 16 {
+ return nil
+ }
+ if d[5] == '$' && d[6] == 'e' && d[7] == 'r' && d[8] == 'r' && d[9] == '\x00' && d[4] == '\x02' {
+ goto Error
+ }
+ if len(fullname) < 5 || fullname[len(fullname)-5:] != ".$cmd" {
+ return nil
+ }
+ for i := 0; i+8 < l; i++ {
+ if d[i] == '\x02' && d[i+1] == 'e' && d[i+2] == 'r' && d[i+3] == 'r' && d[i+4] == 'm' && d[i+5] == 's' && d[i+6] == 'g' && d[i+7] == '\x00' {
+ goto Error
+ }
+ }
+ return nil
+
+Error:
+ result := &queryError{}
+ bson.Unmarshal(d, result)
+ if result.Err == "" && result.ErrMsg == "" {
+ return nil
+ }
+ if result.AssertionCode != 0 && result.Assertion != "" {
+ return &QueryError{Code: result.AssertionCode, Message: result.Assertion, Assertion: true}
+ }
+ if result.Err != "" {
+ return &QueryError{Code: result.Code, Message: result.Err}
+ }
+ return &QueryError{Code: result.Code, Message: result.ErrMsg}
+}
+
+// One executes the query and unmarshals the first obtained document into the
+// result argument. The result must be a struct or map value capable of being
+// unmarshalled into by gobson. This function blocks until either a result
+// is available or an error happens. For example:
+//
+// err := collection.Find(bson.M{"a": 1}).One(&result)
+//
+// In case the resulting document includes a field named $err or errmsg, which
+// are standard ways for MongoDB to return query errors, the returned err will
+// be set to a *QueryError value including the Err message and the Code. In
+// those cases, the result argument is still unmarshalled into with the
+// received document so that any other custom values may be obtained if
+// desired.
+//
+func (q *Query) One(result interface{}) (err error) {
+ q.m.Lock()
+ session := q.session
+ op := q.op // Copy.
+ q.m.Unlock()
+
+ socket, err := session.acquireSocket(true)
+ if err != nil {
+ return err
+ }
+ defer socket.Release()
+
+ op.limit = -1
+
+ session.prepareQuery(&op)
+
+ expectFindReply := prepareFindOp(socket, &op, 1)
+
+ data, err := socket.SimpleQuery(&op)
+ if err != nil {
+ return err
+ }
+ if data == nil {
+ return ErrNotFound
+ }
+ if expectFindReply {
+ var findReply struct {
+ Ok bool
+ Code int
+ Errmsg string
+ Cursor cursorData
+ }
+ err = bson.Unmarshal(data, &findReply)
+ if err != nil {
+ return err
+ }
+ if !findReply.Ok && findReply.Errmsg != "" {
+ return &QueryError{Code: findReply.Code, Message: findReply.Errmsg}
+ }
+ if len(findReply.Cursor.FirstBatch) == 0 {
+ return ErrNotFound
+ }
+ data = findReply.Cursor.FirstBatch[0].Data
+ }
+ if result != nil {
+ err = bson.Unmarshal(data, result)
+ if err == nil {
+ debugf("Query %p document unmarshaled: %#v", q, result)
+ } else {
+ debugf("Query %p document unmarshaling failed: %#v", q, err)
+ return err
+ }
+ }
+ return checkQueryError(op.collection, data)
+}
+
+// prepareFindOp translates op from being an old-style wire protocol query into
+// a new-style find command if that's supported by the MongoDB server (3.2+).
+// It returns whether to expect a find command result or not. Note op may be
+// translated into an explain command, in which case the function returns false.
+func prepareFindOp(socket *mongoSocket, op *queryOp, limit int32) bool {
+ if socket.ServerInfo().MaxWireVersion < 4 || op.collection == "admin.$cmd" {
+ return false
+ }
+
+ nameDot := strings.Index(op.collection, ".")
+ if nameDot < 0 {
+ panic("invalid query collection name: " + op.collection)
+ }
+
+ find := findCmd{
+ Collection: op.collection[nameDot+1:],
+ Filter: op.query,
+ Projection: op.selector,
+ Sort: op.options.OrderBy,
+ Skip: op.skip,
+ Limit: limit,
+ MaxTimeMS: op.options.MaxTimeMS,
+ MaxScan: op.options.MaxScan,
+ Hint: op.options.Hint,
+ Comment: op.options.Comment,
+ Snapshot: op.options.Snapshot,
+ OplogReplay: op.flags&flagLogReplay != 0,
+ }
+ if op.limit < 0 {
+ find.BatchSize = -op.limit
+ find.SingleBatch = true
+ } else {
+ find.BatchSize = op.limit
+ }
+
+ explain := op.options.Explain
+
+ op.collection = op.collection[:nameDot] + ".$cmd"
+ op.query = &find
+ op.skip = 0
+ op.limit = -1
+ op.options = queryWrapper{}
+ op.hasOptions = false
+
+ if explain {
+ op.query = bson.D{{"explain", op.query}}
+ return false
+ }
+ return true
+}
+
+type cursorData struct {
+ FirstBatch []bson.Raw "firstBatch"
+ NextBatch []bson.Raw "nextBatch"
+ NS string
+ Id int64
+}
+
+// findCmd holds the command used for performing queries on MongoDB 3.2+.
+//
+// Relevant documentation:
+//
+// https://docs.mongodb.org/master/reference/command/find/#dbcmd.find
+//
+type findCmd struct {
+ Collection string `bson:"find"`
+ Filter interface{} `bson:"filter,omitempty"`
+ Sort interface{} `bson:"sort,omitempty"`
+ Projection interface{} `bson:"projection,omitempty"`
+ Hint interface{} `bson:"hint,omitempty"`
+ Skip interface{} `bson:"skip,omitempty"`
+ Limit int32 `bson:"limit,omitempty"`
+ BatchSize int32 `bson:"batchSize,omitempty"`
+ SingleBatch bool `bson:"singleBatch,omitempty"`
+ Comment string `bson:"comment,omitempty"`
+ MaxScan int `bson:"maxScan,omitempty"`
+ MaxTimeMS int `bson:"maxTimeMS,omitempty"`
+ ReadConcern interface{} `bson:"readConcern,omitempty"`
+ Max interface{} `bson:"max,omitempty"`
+ Min interface{} `bson:"min,omitempty"`
+ ReturnKey bool `bson:"returnKey,omitempty"`
+ ShowRecordId bool `bson:"showRecordId,omitempty"`
+ Snapshot bool `bson:"snapshot,omitempty"`
+ Tailable bool `bson:"tailable,omitempty"`
+ AwaitData bool `bson:"awaitData,omitempty"`
+ OplogReplay bool `bson:"oplogReplay,omitempty"`
+ NoCursorTimeout bool `bson:"noCursorTimeout,omitempty"`
+ AllowPartialResults bool `bson:"allowPartialResults,omitempty"`
+}
+
+// getMoreCmd holds the command used for requesting more query results on MongoDB 3.2+.
+//
+// Relevant documentation:
+//
+// https://docs.mongodb.org/master/reference/command/getMore/#dbcmd.getMore
+//
+type getMoreCmd struct {
+ CursorId int64 `bson:"getMore"`
+ Collection string `bson:"collection"`
+ BatchSize int32 `bson:"batchSize,omitempty"`
+ MaxTimeMS int64 `bson:"maxTimeMS,omitempty"`
+}
+
+// run duplicates the behavior of collection.Find(query).One(&result)
+// as performed by Database.Run, specializing the logic for running
+// database commands on a given socket.
+func (db *Database) run(socket *mongoSocket, cmd, result interface{}) (err error) {
+ // Database.Run:
+ if name, ok := cmd.(string); ok {
+ cmd = bson.D{{name, 1}}
+ }
+
+ // Collection.Find:
+ session := db.Session
+ session.m.RLock()
+ op := session.queryConfig.op // Copy.
+ session.m.RUnlock()
+ op.query = cmd
+ op.collection = db.Name + ".$cmd"
+
+ // Query.One:
+ session.prepareQuery(&op)
+ op.limit = -1
+
+ data, err := socket.SimpleQuery(&op)
+ if err != nil {
+ return err
+ }
+ if data == nil {
+ return ErrNotFound
+ }
+ if result != nil {
+ err = bson.Unmarshal(data, result)
+ if err != nil {
+ debugf("Run command unmarshaling failed: %#v", op, err)
+ return err
+ }
+ if globalDebug && globalLogger != nil {
+ var res bson.M
+ bson.Unmarshal(data, &res)
+ debugf("Run command unmarshaled: %#v, result: %#v", op, res)
+ }
+ }
+ return checkQueryError(op.collection, data)
+}
+
+// The DBRef type implements support for the database reference MongoDB
+// convention as supported by multiple drivers. This convention enables
+// cross-referencing documents between collections and databases using
+// a structure which includes a collection name, a document id, and
+// optionally a database name.
+//
+// See the FindRef methods on Session and on Database.
+//
+// Relevant documentation:
+//
+// http://www.mongodb.org/display/DOCS/Database+References
+//
+type DBRef struct {
+ Collection string `bson:"$ref"`
+ Id interface{} `bson:"$id"`
+ Database string `bson:"$db,omitempty"`
+}
+
+// NOTE: Order of fields for DBRef above does matter, per documentation.
+
+// FindRef returns a query that looks for the document in the provided
+// reference. If the reference includes the DB field, the document will
+// be retrieved from the respective database.
+//
+// See also the DBRef type and the FindRef method on Session.
+//
+// Relevant documentation:
+//
+// http://www.mongodb.org/display/DOCS/Database+References
+//
+func (db *Database) FindRef(ref *DBRef) *Query {
+ var c *Collection
+ if ref.Database == "" {
+ c = db.C(ref.Collection)
+ } else {
+ c = db.Session.DB(ref.Database).C(ref.Collection)
+ }
+ return c.FindId(ref.Id)
+}
+
+// FindRef returns a query that looks for the document in the provided
+// reference. For a DBRef to be resolved correctly at the session level
+// it must necessarily have the optional DB field defined.
+//
+// See also the DBRef type and the FindRef method on Database.
+//
+// Relevant documentation:
+//
+// http://www.mongodb.org/display/DOCS/Database+References
+//
+func (s *Session) FindRef(ref *DBRef) *Query {
+ if ref.Database == "" {
+ panic(errors.New(fmt.Sprintf("Can't resolve database for %#v", ref)))
+ }
+ c := s.DB(ref.Database).C(ref.Collection)
+ return c.FindId(ref.Id)
+}
+
+// CollectionNames returns the collection names present in the db database.
+func (db *Database) CollectionNames() (names []string, err error) {
+ // Clone session and set it to Monotonic mode so that the server
+ // used for the query may be safely obtained afterwards, if
+ // necessary for iteration when a cursor is received.
+ cloned := db.Session.nonEventual()
+ defer cloned.Close()
+
+ batchSize := int(cloned.queryConfig.op.limit)
+
+ // Try with a command.
+ var result struct {
+ Collections []bson.Raw
+ Cursor cursorData
+ }
+ err = db.With(cloned).Run(bson.D{{"listCollections", 1}, {"cursor", bson.D{{"batchSize", batchSize}}}}, &result)
+ if err == nil {
+ firstBatch := result.Collections
+ if firstBatch == nil {
+ firstBatch = result.Cursor.FirstBatch
+ }
+ var iter *Iter
+ ns := strings.SplitN(result.Cursor.NS, ".", 2)
+ if len(ns) < 2 {
+ iter = db.With(cloned).C("").NewIter(nil, firstBatch, result.Cursor.Id, nil)
+ } else {
+ iter = cloned.DB(ns[0]).C(ns[1]).NewIter(nil, firstBatch, result.Cursor.Id, nil)
+ }
+ var coll struct{ Name string }
+ for iter.Next(&coll) {
+ names = append(names, coll.Name)
+ }
+ if err := iter.Close(); err != nil {
+ return nil, err
+ }
+ sort.Strings(names)
+ return names, err
+ }
+ if err != nil && !isNoCmd(err) {
+ return nil, err
+ }
+
+ // Command not yet supported. Query the database instead.
+ nameIndex := len(db.Name) + 1
+ iter := db.C("system.namespaces").Find(nil).Iter()
+ var coll struct{ Name string }
+ for iter.Next(&coll) {
+ if strings.Index(coll.Name, "$") < 0 || strings.Index(coll.Name, ".oplog.$") >= 0 {
+ names = append(names, coll.Name[nameIndex:])
+ }
+ }
+ if err := iter.Close(); err != nil {
+ return nil, err
+ }
+ sort.Strings(names)
+ return names, nil
+}
+
+type dbNames struct {
+ Databases []struct {
+ Name string
+ Empty bool
+ }
+}
+
+// DatabaseNames returns the names of non-empty databases present in the cluster.
+func (s *Session) DatabaseNames() (names []string, err error) {
+ var result dbNames
+ err = s.Run("listDatabases", &result)
+ if err != nil {
+ return nil, err
+ }
+ for _, db := range result.Databases {
+ if !db.Empty {
+ names = append(names, db.Name)
+ }
+ }
+ sort.Strings(names)
+ return names, nil
+}
+
+// Iter executes the query and returns an iterator capable of going over all
+// the results. Results will be returned in batches of configurable
+// size (see the Batch method) and more documents will be requested when a
+// configurable number of documents is iterated over (see the Prefetch method).
+func (q *Query) Iter() *Iter {
+ q.m.Lock()
+ session := q.session
+ op := q.op
+ prefetch := q.prefetch
+ limit := q.limit
+ q.m.Unlock()
+
+ iter := &Iter{
+ session: session,
+ prefetch: prefetch,
+ limit: limit,
+ timeout: -1,
+ }
+ iter.gotReply.L = &iter.m
+ iter.op.collection = op.collection
+ iter.op.limit = op.limit
+ iter.op.replyFunc = iter.replyFunc()
+ iter.docsToReceive++
+
+ socket, err := session.acquireSocket(true)
+ if err != nil {
+ iter.err = err
+ return iter
+ }
+ defer socket.Release()
+
+ session.prepareQuery(&op)
+ op.replyFunc = iter.op.replyFunc
+
+ if prepareFindOp(socket, &op, limit) {
+ iter.findCmd = true
+ }
+
+ iter.server = socket.Server()
+ err = socket.Query(&op)
+ if err != nil {
+ // Must lock as the query is already out and it may call replyFunc.
+ iter.m.Lock()
+ iter.err = err
+ iter.m.Unlock()
+ }
+
+ return iter
+}
+
+// Tail returns a tailable iterator. Unlike a normal iterator, a
+// tailable iterator may wait for new values to be inserted in the
+// collection once the end of the current result set is reached,
+// A tailable iterator may only be used with capped collections.
+//
+// The timeout parameter indicates how long Next will block waiting
+// for a result before timing out. If set to -1, Next will not
+// timeout, and will continue waiting for a result for as long as
+// the cursor is valid and the session is not closed. If set to 0,
+// Next times out as soon as it reaches the end of the result set.
+// Otherwise, Next will wait for at least the given number of
+// seconds for a new document to be available before timing out.
+//
+// On timeouts, Next will unblock and return false, and the Timeout
+// method will return true if called. In these cases, Next may still
+// be called again on the same iterator to check if a new value is
+// available at the current cursor position, and again it will block
+// according to the specified timeoutSecs. If the cursor becomes
+// invalid, though, both Next and Timeout will return false and
+// the query must be restarted.
+//
+// The following example demonstrates timeout handling and query
+// restarting:
+//
+// iter := collection.Find(nil).Sort("$natural").Tail(5 * time.Second)
+// for {
+// for iter.Next(&result) {
+// fmt.Println(result.Id)
+// lastId = result.Id
+// }
+// if iter.Err() != nil {
+// return iter.Close()
+// }
+// if iter.Timeout() {
+// continue
+// }
+// query := collection.Find(bson.M{"_id": bson.M{"$gt": lastId}})
+// iter = query.Sort("$natural").Tail(5 * time.Second)
+// }
+// iter.Close()
+//
+// Relevant documentation:
+//
+// http://www.mongodb.org/display/DOCS/Tailable+Cursors
+// http://www.mongodb.org/display/DOCS/Capped+Collections
+// http://www.mongodb.org/display/DOCS/Sorting+and+Natural+Order
+//
+func (q *Query) Tail(timeout time.Duration) *Iter {
+ q.m.Lock()
+ session := q.session
+ op := q.op
+ prefetch := q.prefetch
+ q.m.Unlock()
+
+ iter := &Iter{session: session, prefetch: prefetch}
+ iter.gotReply.L = &iter.m
+ iter.timeout = timeout
+ iter.op.collection = op.collection
+ iter.op.limit = op.limit
+ iter.op.replyFunc = iter.replyFunc()
+ iter.docsToReceive++
+ session.prepareQuery(&op)
+ op.replyFunc = iter.op.replyFunc
+ op.flags |= flagTailable | flagAwaitData
+
+ socket, err := session.acquireSocket(true)
+ if err != nil {
+ iter.err = err
+ } else {
+ iter.server = socket.Server()
+ err = socket.Query(&op)
+ if err != nil {
+ // Must lock as the query is already out and it may call replyFunc.
+ iter.m.Lock()
+ iter.err = err
+ iter.m.Unlock()
+ }
+ socket.Release()
+ }
+ return iter
+}
+
+func (s *Session) prepareQuery(op *queryOp) {
+ s.m.RLock()
+ op.mode = s.consistency
+ if s.slaveOk {
+ op.flags |= flagSlaveOk
+ }
+ s.m.RUnlock()
+ return
+}
+
+// Err returns nil if no errors happened during iteration, or the actual
+// error otherwise.
+//
+// In case a resulting document included a field named $err or errmsg, which are
+// standard ways for MongoDB to report an improper query, the returned value has
+// a *QueryError type, and includes the Err message and the Code.
+func (iter *Iter) Err() error {
+ iter.m.Lock()
+ err := iter.err
+ iter.m.Unlock()
+ if err == ErrNotFound {
+ return nil
+ }
+ return err
+}
+
+// Close kills the server cursor used by the iterator, if any, and returns
+// nil if no errors happened during iteration, or the actual error otherwise.
+//
+// Server cursors are automatically closed at the end of an iteration, which
+// means close will do nothing unless the iteration was interrupted before
+// the server finished sending results to the driver. If Close is not called
+// in such a situation, the cursor will remain available at the server until
+// the default cursor timeout period is reached. No further problems arise.
+//
+// Close is idempotent. That means it can be called repeatedly and will
+// return the same result every time.
+//
+// In case a resulting document included a field named $err or errmsg, which are
+// standard ways for MongoDB to report an improper query, the returned value has
+// a *QueryError type.
+func (iter *Iter) Close() error {
+ iter.m.Lock()
+ cursorId := iter.op.cursorId
+ iter.op.cursorId = 0
+ err := iter.err
+ iter.m.Unlock()
+ if cursorId == 0 {
+ if err == ErrNotFound {
+ return nil
+ }
+ return err
+ }
+ socket, err := iter.acquireSocket()
+ if err == nil {
+ // TODO Batch kills.
+ err = socket.Query(&killCursorsOp{[]int64{cursorId}})
+ socket.Release()
+ }
+
+ iter.m.Lock()
+ if err != nil && (iter.err == nil || iter.err == ErrNotFound) {
+ iter.err = err
+ } else if iter.err != ErrNotFound {
+ err = iter.err
+ }
+ iter.m.Unlock()
+ return err
+}
+
+// Done returns true only if a follow up Next call is guaranteed
+// to return false.
+//
+// For an iterator created with Tail, Done may return false for
+// an iterator that has no more data. Otherwise it's guaranteed
+// to return false only if there is data or an error happened.
+//
+// Done may block waiting for a pending query to verify whether
+// more data is actually available or not.
+func (iter *Iter) Done() bool {
+ iter.m.Lock()
+ defer iter.m.Unlock()
+
+ for {
+ if iter.docData.Len() > 0 {
+ return false
+ }
+ if iter.docsToReceive > 1 {
+ return true
+ }
+ if iter.docsToReceive > 0 {
+ iter.gotReply.Wait()
+ continue
+ }
+ return iter.op.cursorId == 0
+ }
+}
+
+// Timeout returns true if Next returned false due to a timeout of
+// a tailable cursor. In those cases, Next may be called again to continue
+// the iteration at the previous cursor position.
+func (iter *Iter) Timeout() bool {
+ iter.m.Lock()
+ result := iter.timedout
+ iter.m.Unlock()
+ return result
+}
+
+// Next retrieves the next document from the result set, blocking if necessary.
+// This method will also automatically retrieve another batch of documents from
+// the server when the current one is exhausted, or before that in background
+// if pre-fetching is enabled (see the Query.Prefetch and Session.SetPrefetch
+// methods).
+//
+// Next returns true if a document was successfully unmarshalled onto result,
+// and false at the end of the result set or if an error happened.
+// When Next returns false, the Err method should be called to verify if
+// there was an error during iteration.
+//
+// For example:
+//
+// iter := collection.Find(nil).Iter()
+// for iter.Next(&result) {
+// fmt.Printf("Result: %v\n", result.Id)
+// }
+// if err := iter.Close(); err != nil {
+// return err
+// }
+//
+func (iter *Iter) Next(result interface{}) bool {
+ iter.m.Lock()
+ iter.timedout = false
+ timeout := time.Time{}
+ for iter.err == nil && iter.docData.Len() == 0 && (iter.docsToReceive > 0 || iter.op.cursorId != 0) {
+ if iter.docsToReceive == 0 {
+ if iter.timeout >= 0 {
+ if timeout.IsZero() {
+ timeout = time.Now().Add(iter.timeout)
+ }
+ if time.Now().After(timeout) {
+ iter.timedout = true
+ iter.m.Unlock()
+ return false
+ }
+ }
+ iter.getMore()
+ if iter.err != nil {
+ break
+ }
+ }
+ iter.gotReply.Wait()
+ }
+
+ // Exhaust available data before reporting any errors.
+ if docData, ok := iter.docData.Pop().([]byte); ok {
+ close := false
+ if iter.limit > 0 {
+ iter.limit--
+ if iter.limit == 0 {
+ if iter.docData.Len() > 0 {
+ iter.m.Unlock()
+ panic(fmt.Errorf("data remains after limit exhausted: %d", iter.docData.Len()))
+ }
+ iter.err = ErrNotFound
+ close = true
+ }
+ }
+ if iter.op.cursorId != 0 && iter.err == nil {
+ iter.docsBeforeMore--
+ if iter.docsBeforeMore == -1 {
+ iter.getMore()
+ }
+ }
+ iter.m.Unlock()
+
+ if close {
+ iter.Close()
+ }
+ err := bson.Unmarshal(docData, result)
+ if err != nil {
+ debugf("Iter %p document unmarshaling failed: %#v", iter, err)
+ iter.m.Lock()
+ if iter.err == nil {
+ iter.err = err
+ }
+ iter.m.Unlock()
+ return false
+ }
+ debugf("Iter %p document unmarshaled: %#v", iter, result)
+ // XXX Only have to check first document for a query error?
+ err = checkQueryError(iter.op.collection, docData)
+ if err != nil {
+ iter.m.Lock()
+ if iter.err == nil {
+ iter.err = err
+ }
+ iter.m.Unlock()
+ return false
+ }
+ return true
+ } else if iter.err != nil {
+ debugf("Iter %p returning false: %s", iter, iter.err)
+ iter.m.Unlock()
+ return false
+ } else if iter.op.cursorId == 0 {
+ iter.err = ErrNotFound
+ debugf("Iter %p exhausted with cursor=0", iter)
+ iter.m.Unlock()
+ return false
+ }
+
+ panic("unreachable")
+}
+
+// All retrieves all documents from the result set into the provided slice
+// and closes the iterator.
+//
+// The result argument must necessarily be the address for a slice. The slice
+// may be nil or previously allocated.
+//
+// WARNING: Obviously, All must not be used with result sets that may be
+// potentially large, since it may consume all memory until the system
+// crashes. Consider building the query with a Limit clause to ensure the
+// result size is bounded.
+//
+// For instance:
+//
+// var result []struct{ Value int }
+// iter := collection.Find(nil).Limit(100).Iter()
+// err := iter.All(&result)
+// if err != nil {
+// return err
+// }
+//
+func (iter *Iter) All(result interface{}) error {
+ resultv := reflect.ValueOf(result)
+ if resultv.Kind() != reflect.Ptr || resultv.Elem().Kind() != reflect.Slice {
+ panic("result argument must be a slice address")
+ }
+ slicev := resultv.Elem()
+ slicev = slicev.Slice(0, slicev.Cap())
+ elemt := slicev.Type().Elem()
+ i := 0
+ for {
+ if slicev.Len() == i {
+ elemp := reflect.New(elemt)
+ if !iter.Next(elemp.Interface()) {
+ break
+ }
+ slicev = reflect.Append(slicev, elemp.Elem())
+ slicev = slicev.Slice(0, slicev.Cap())
+ } else {
+ if !iter.Next(slicev.Index(i).Addr().Interface()) {
+ break
+ }
+ }
+ i++
+ }
+ resultv.Elem().Set(slicev.Slice(0, i))
+ return iter.Close()
+}
+
+// All works like Iter.All.
+func (q *Query) All(result interface{}) error {
+ return q.Iter().All(result)
+}
+
+// The For method is obsolete and will be removed in a future release.
+// See Iter as an elegant replacement.
+func (q *Query) For(result interface{}, f func() error) error {
+ return q.Iter().For(result, f)
+}
+
+// The For method is obsolete and will be removed in a future release.
+// See Iter as an elegant replacement.
+func (iter *Iter) For(result interface{}, f func() error) (err error) {
+ valid := false
+ v := reflect.ValueOf(result)
+ if v.Kind() == reflect.Ptr {
+ v = v.Elem()
+ switch v.Kind() {
+ case reflect.Map, reflect.Ptr, reflect.Interface, reflect.Slice:
+ valid = v.IsNil()
+ }
+ }
+ if !valid {
+ panic("For needs a pointer to nil reference value. See the documentation.")
+ }
+ zero := reflect.Zero(v.Type())
+ for {
+ v.Set(zero)
+ if !iter.Next(result) {
+ break
+ }
+ err = f()
+ if err != nil {
+ return err
+ }
+ }
+ return iter.Err()
+}
+
+// acquireSocket acquires a socket from the same server that the iterator
+// cursor was obtained from.
+//
+// WARNING: This method must not be called with iter.m locked. Acquiring the
+// socket depends on the cluster sync loop, and the cluster sync loop might
+// attempt actions which cause replyFunc to be called, inducing a deadlock.
+func (iter *Iter) acquireSocket() (*mongoSocket, error) {
+ socket, err := iter.session.acquireSocket(true)
+ if err != nil {
+ return nil, err
+ }
+ if socket.Server() != iter.server {
+ // Socket server changed during iteration. This may happen
+ // with Eventual sessions, if a Refresh is done, or if a
+ // monotonic session gets a write and shifts from secondary
+ // to primary. Our cursor is in a specific server, though.
+ iter.session.m.Lock()
+ sockTimeout := iter.session.sockTimeout
+ iter.session.m.Unlock()
+ socket.Release()
+ socket, _, err = iter.server.AcquireSocket(0, sockTimeout)
+ if err != nil {
+ return nil, err
+ }
+ err := iter.session.socketLogin(socket)
+ if err != nil {
+ socket.Release()
+ return nil, err
+ }
+ }
+ return socket, nil
+}
+
+func (iter *Iter) getMore() {
+ // Increment now so that unlocking the iterator won't cause a
+ // different goroutine to get here as well.
+ iter.docsToReceive++
+ iter.m.Unlock()
+ socket, err := iter.acquireSocket()
+ iter.m.Lock()
+ if err != nil {
+ iter.err = err
+ return
+ }
+ defer socket.Release()
+
+ debugf("Iter %p requesting more documents", iter)
+ if iter.limit > 0 {
+ // The -1 below accounts for the fact docsToReceive was incremented above.
+ limit := iter.limit - int32(iter.docsToReceive-1) - int32(iter.docData.Len())
+ if limit < iter.op.limit {
+ iter.op.limit = limit
+ }
+ }
+ var op interface{}
+ if iter.findCmd {
+ op = iter.getMoreCmd()
+ } else {
+ op = &iter.op
+ }
+ if err := socket.Query(op); err != nil {
+ iter.docsToReceive--
+ iter.err = err
+ }
+}
+
+func (iter *Iter) getMoreCmd() *queryOp {
+ // TODO: Define the query statically in the Iter type, next to getMoreOp.
+ nameDot := strings.Index(iter.op.collection, ".")
+ if nameDot < 0 {
+ panic("invalid query collection name: " + iter.op.collection)
+ }
+
+ getMore := getMoreCmd{
+ CursorId: iter.op.cursorId,
+ Collection: iter.op.collection[nameDot+1:],
+ BatchSize: iter.op.limit,
+ }
+
+ var op queryOp
+ op.collection = iter.op.collection[:nameDot] + ".$cmd"
+ op.query = &getMore
+ op.limit = -1
+ op.replyFunc = iter.op.replyFunc
+ return &op
+}
+
+type countCmd struct {
+ Count string
+ Query interface{}
+ Limit int32 ",omitempty"
+ Skip int32 ",omitempty"
+}
+
+// Count returns the total number of documents in the result set.
+func (q *Query) Count() (n int, err error) {
+ q.m.Lock()
+ session := q.session
+ op := q.op
+ limit := q.limit
+ q.m.Unlock()
+
+ c := strings.Index(op.collection, ".")
+ if c < 0 {
+ return 0, errors.New("Bad collection name: " + op.collection)
+ }
+
+ dbname := op.collection[:c]
+ cname := op.collection[c+1:]
+ query := op.query
+ if query == nil {
+ query = bson.D{}
+ }
+ result := struct{ N int }{}
+ err = session.DB(dbname).Run(countCmd{cname, query, limit, op.skip}, &result)
+ return result.N, err
+}
+
+// Count returns the total number of documents in the collection.
+func (c *Collection) Count() (n int, err error) {
+ return c.Find(nil).Count()
+}
+
+type distinctCmd struct {
+ Collection string "distinct"
+ Key string
+ Query interface{} ",omitempty"
+}
+
+// Distinct unmarshals into result the list of distinct values for the given key.
+//
+// For example:
+//
+// var result []int
+// err := collection.Find(bson.M{"gender": "F"}).Distinct("age", &result)
+//
+// Relevant documentation:
+//
+// http://www.mongodb.org/display/DOCS/Aggregation
+//
+func (q *Query) Distinct(key string, result interface{}) error {
+ q.m.Lock()
+ session := q.session
+ op := q.op // Copy.
+ q.m.Unlock()
+
+ c := strings.Index(op.collection, ".")
+ if c < 0 {
+ return errors.New("Bad collection name: " + op.collection)
+ }
+
+ dbname := op.collection[:c]
+ cname := op.collection[c+1:]
+
+ var doc struct{ Values bson.Raw }
+ err := session.DB(dbname).Run(distinctCmd{cname, key, op.query}, &doc)
+ if err != nil {
+ return err
+ }
+ return doc.Values.Unmarshal(result)
+}
+
+type mapReduceCmd struct {
+ Collection string "mapreduce"
+ Map string ",omitempty"
+ Reduce string ",omitempty"
+ Finalize string ",omitempty"
+ Limit int32 ",omitempty"
+ Out interface{}
+ Query interface{} ",omitempty"
+ Sort interface{} ",omitempty"
+ Scope interface{} ",omitempty"
+ Verbose bool ",omitempty"
+}
+
+type mapReduceResult struct {
+ Results bson.Raw
+ Result bson.Raw
+ TimeMillis int64 "timeMillis"
+ Counts struct{ Input, Emit, Output int }
+ Ok bool
+ Err string
+ Timing *MapReduceTime
+}
+
+type MapReduce struct {
+ Map string // Map Javascript function code (required)
+ Reduce string // Reduce Javascript function code (required)
+ Finalize string // Finalize Javascript function code (optional)
+ Out interface{} // Output collection name or document. If nil, results are inlined into the result parameter.
+ Scope interface{} // Optional global scope for Javascript functions
+ Verbose bool
+}
+
+type MapReduceInfo struct {
+ InputCount int // Number of documents mapped
+ EmitCount int // Number of times reduce called emit
+ OutputCount int // Number of documents in resulting collection
+ Database string // Output database, if results are not inlined
+ Collection string // Output collection, if results are not inlined
+ Time int64 // Time to run the job, in nanoseconds
+ VerboseTime *MapReduceTime // Only defined if Verbose was true
+}
+
+type MapReduceTime struct {
+ Total int64 // Total time, in nanoseconds
+ Map int64 "mapTime" // Time within map function, in nanoseconds
+ EmitLoop int64 "emitLoop" // Time within the emit/map loop, in nanoseconds
+}
+
+// MapReduce executes a map/reduce job for documents covered by the query.
+// That kind of job is suitable for very flexible bulk aggregation of data
+// performed at the server side via Javascript functions.
+//
+// Results from the job may be returned as a result of the query itself
+// through the result parameter in case they'll certainly fit in memory
+// and in a single document. If there's the possibility that the amount
+// of data might be too large, results must be stored back in an alternative
+// collection or even a separate database, by setting the Out field of the
+// provided MapReduce job. In that case, provide nil as the result parameter.
+//
+// These are some of the ways to set Out:
+//
+// nil
+// Inline results into the result parameter.
+//
+// bson.M{"replace": "mycollection"}
+// The output will be inserted into a collection which replaces any
+// existing collection with the same name.
+//
+// bson.M{"merge": "mycollection"}
+// This option will merge new data into the old output collection. In
+// other words, if the same key exists in both the result set and the
+// old collection, the new key will overwrite the old one.
+//
+// bson.M{"reduce": "mycollection"}
+// If documents exist for a given key in the result set and in the old
+// collection, then a reduce operation (using the specified reduce
+// function) will be performed on the two values and the result will be
+// written to the output collection. If a finalize function was
+// provided, this will be run after the reduce as well.
+//
+// bson.M{...., "db": "mydb"}
+// Any of the above options can have the "db" key included for doing
+// the respective action in a separate database.
+//
+// The following is a trivial example which will count the number of
+// occurrences of a field named n on each document in a collection, and
+// will return results inline:
+//
+// job := &mgo.MapReduce{
+// Map: "function() { emit(this.n, 1) }",
+// Reduce: "function(key, values) { return Array.sum(values) }",
+// }
+// var result []struct { Id int "_id"; Value int }
+// _, err := collection.Find(nil).MapReduce(job, &result)
+// if err != nil {
+// return err
+// }
+// for _, item := range result {
+// fmt.Println(item.Value)
+// }
+//
+// This function is compatible with MongoDB 1.7.4+.
+//
+// Relevant documentation:
+//
+// http://www.mongodb.org/display/DOCS/MapReduce
+//
+func (q *Query) MapReduce(job *MapReduce, result interface{}) (info *MapReduceInfo, err error) {
+ q.m.Lock()
+ session := q.session
+ op := q.op // Copy.
+ limit := q.limit
+ q.m.Unlock()
+
+ c := strings.Index(op.collection, ".")
+ if c < 0 {
+ return nil, errors.New("Bad collection name: " + op.collection)
+ }
+
+ dbname := op.collection[:c]
+ cname := op.collection[c+1:]
+
+ cmd := mapReduceCmd{
+ Collection: cname,
+ Map: job.Map,
+ Reduce: job.Reduce,
+ Finalize: job.Finalize,
+ Out: fixMROut(job.Out),
+ Scope: job.Scope,
+ Verbose: job.Verbose,
+ Query: op.query,
+ Sort: op.options.OrderBy,
+ Limit: limit,
+ }
+
+ if cmd.Out == nil {
+ cmd.Out = bson.D{{"inline", 1}}
+ }
+
+ var doc mapReduceResult
+ err = session.DB(dbname).Run(&cmd, &doc)
+ if err != nil {
+ return nil, err
+ }
+ if doc.Err != "" {
+ return nil, errors.New(doc.Err)
+ }
+
+ info = &MapReduceInfo{
+ InputCount: doc.Counts.Input,
+ EmitCount: doc.Counts.Emit,
+ OutputCount: doc.Counts.Output,
+ Time: doc.TimeMillis * 1e6,
+ }
+
+ if doc.Result.Kind == 0x02 {
+ err = doc.Result.Unmarshal(&info.Collection)
+ info.Database = dbname
+ } else if doc.Result.Kind == 0x03 {
+ var v struct{ Collection, Db string }
+ err = doc.Result.Unmarshal(&v)
+ info.Collection = v.Collection
+ info.Database = v.Db
+ }
+
+ if doc.Timing != nil {
+ info.VerboseTime = doc.Timing
+ info.VerboseTime.Total *= 1e6
+ info.VerboseTime.Map *= 1e6
+ info.VerboseTime.EmitLoop *= 1e6
+ }
+
+ if err != nil {
+ return nil, err
+ }
+ if result != nil {
+ return info, doc.Results.Unmarshal(result)
+ }
+ return info, nil
+}
+
+// The "out" option in the MapReduce command must be ordered. This was
+// found after the implementation was accepting maps for a long time,
+// so rather than breaking the API, we'll fix the order if necessary.
+// Details about the order requirement may be seen in MongoDB's code:
+//
+// http://goo.gl/L8jwJX
+//
+func fixMROut(out interface{}) interface{} {
+ outv := reflect.ValueOf(out)
+ if outv.Kind() != reflect.Map || outv.Type().Key() != reflect.TypeOf("") {
+ return out
+ }
+ outs := make(bson.D, outv.Len())
+
+ outTypeIndex := -1
+ for i, k := range outv.MapKeys() {
+ ks := k.String()
+ outs[i].Name = ks
+ outs[i].Value = outv.MapIndex(k).Interface()
+ switch ks {
+ case "normal", "replace", "merge", "reduce", "inline":
+ outTypeIndex = i
+ }
+ }
+ if outTypeIndex > 0 {
+ outs[0], outs[outTypeIndex] = outs[outTypeIndex], outs[0]
+ }
+ return outs
+}
+
+// Change holds fields for running a findAndModify MongoDB command via
+// the Query.Apply method.
+type Change struct {
+ Update interface{} // The update document
+ Upsert bool // Whether to insert in case the document isn't found
+ Remove bool // Whether to remove the document found rather than updating
+ ReturnNew bool // Should the modified document be returned rather than the old one
+}
+
+type findModifyCmd struct {
+ Collection string "findAndModify"
+ Query, Update, Sort, Fields interface{} ",omitempty"
+ Upsert, Remove, New bool ",omitempty"
+}
+
+type valueResult struct {
+ Value bson.Raw
+ LastError LastError "lastErrorObject"
+}
+
+// Apply runs the findAndModify MongoDB command, which allows updating, upserting
+// or removing a document matching a query and atomically returning either the old
+// version (the default) or the new version of the document (when ReturnNew is true).
+// If no objects are found Apply returns ErrNotFound.
+//
+// The Sort and Select query methods affect the result of Apply. In case
+// multiple documents match the query, Sort enables selecting which document to
+// act upon by ordering it first. Select enables retrieving only a selection
+// of fields of the new or old document.
+//
+// This simple example increments a counter and prints its new value:
+//
+// change := mgo.Change{
+// Update: bson.M{"$inc": bson.M{"n": 1}},
+// ReturnNew: true,
+// }
+// info, err = col.Find(M{"_id": id}).Apply(change, &doc)
+// fmt.Println(doc.N)
+//
+// This method depends on MongoDB >= 2.0 to work properly.
+//
+// Relevant documentation:
+//
+// http://www.mongodb.org/display/DOCS/findAndModify+Command
+// http://www.mongodb.org/display/DOCS/Updating
+// http://www.mongodb.org/display/DOCS/Atomic+Operations
+//
+func (q *Query) Apply(change Change, result interface{}) (info *ChangeInfo, err error) {
+ q.m.Lock()
+ session := q.session
+ op := q.op // Copy.
+ q.m.Unlock()
+
+ c := strings.Index(op.collection, ".")
+ if c < 0 {
+ return nil, errors.New("bad collection name: " + op.collection)
+ }
+
+ dbname := op.collection[:c]
+ cname := op.collection[c+1:]
+
+ cmd := findModifyCmd{
+ Collection: cname,
+ Update: change.Update,
+ Upsert: change.Upsert,
+ Remove: change.Remove,
+ New: change.ReturnNew,
+ Query: op.query,
+ Sort: op.options.OrderBy,
+ Fields: op.selector,
+ }
+
+ session = session.Clone()
+ defer session.Close()
+ session.SetMode(Strong, false)
+
+ var doc valueResult
+ for i := 0; i < maxUpsertRetries; i++ {
+ err = session.DB(dbname).Run(&cmd, &doc)
+
+ if err == nil {
+ break
+ }
+ if change.Upsert && IsDup(err) {
+ // Retry duplicate key errors on upserts.
+ // https://docs.mongodb.com/v3.2/reference/method/db.collection.update/#use-unique-indexes
+ continue
+ }
+ if qerr, ok := err.(*QueryError); ok && qerr.Message == "No matching object found" {
+ return nil, ErrNotFound
+ }
+ return nil, err
+ }
+ if doc.LastError.N == 0 {
+ return nil, ErrNotFound
+ }
+ if doc.Value.Kind != 0x0A && result != nil {
+ err = doc.Value.Unmarshal(result)
+ if err != nil {
+ return nil, err
+ }
+ }
+ info = &ChangeInfo{}
+ lerr := &doc.LastError
+ if lerr.UpdatedExisting {
+ info.Updated = lerr.N
+ info.Matched = lerr.N
+ } else if change.Remove {
+ info.Removed = lerr.N
+ info.Matched = lerr.N
+ } else if change.Upsert {
+ info.UpsertedId = lerr.UpsertedId
+ }
+ return info, nil
+}
+
+// The BuildInfo type encapsulates details about the running MongoDB server.
+//
+// Note that the VersionArray field was introduced in MongoDB 2.0+, but it is
+// internally assembled from the Version information for previous versions.
+// In both cases, VersionArray is guaranteed to have at least 4 entries.
+type BuildInfo struct {
+ Version string
+ VersionArray []int `bson:"versionArray"` // On MongoDB 2.0+; assembled from Version otherwise
+ GitVersion string `bson:"gitVersion"`
+ OpenSSLVersion string `bson:"OpenSSLVersion"`
+ SysInfo string `bson:"sysInfo"` // Deprecated and empty on MongoDB 3.2+.
+ Bits int
+ Debug bool
+ MaxObjectSize int `bson:"maxBsonObjectSize"`
+}
+
+// VersionAtLeast returns whether the BuildInfo version is greater than or
+// equal to the provided version number. If more than one number is
+// provided, numbers will be considered as major, minor, and so on.
+func (bi *BuildInfo) VersionAtLeast(version ...int) bool {
+ for i, vi := range version {
+ if i == len(bi.VersionArray) {
+ return false
+ }
+ if bivi := bi.VersionArray[i]; bivi != vi {
+ return bivi >= vi
+ }
+ }
+ return true
+}
+
+// BuildInfo retrieves the version and other details about the
+// running MongoDB server.
+func (s *Session) BuildInfo() (info BuildInfo, err error) {
+ err = s.Run(bson.D{{"buildInfo", "1"}}, &info)
+ if len(info.VersionArray) == 0 {
+ for _, a := range strings.Split(info.Version, ".") {
+ i, err := strconv.Atoi(a)
+ if err != nil {
+ break
+ }
+ info.VersionArray = append(info.VersionArray, i)
+ }
+ }
+ for len(info.VersionArray) < 4 {
+ info.VersionArray = append(info.VersionArray, 0)
+ }
+ if i := strings.IndexByte(info.GitVersion, ' '); i >= 0 {
+ // Strip off the " modules: enterprise" suffix. This is a _git version_.
+ // That information may be moved to another field if people need it.
+ info.GitVersion = info.GitVersion[:i]
+ }
+ if info.SysInfo == "deprecated" {
+ info.SysInfo = ""
+ }
+ return
+}
+
+// ---------------------------------------------------------------------------
+// Internal session handling helpers.
+
+func (s *Session) acquireSocket(slaveOk bool) (*mongoSocket, error) {
+
+ // Read-only lock to check for previously reserved socket.
+ s.m.RLock()
+ // If there is a slave socket reserved and its use is acceptable, take it as long
+ // as there isn't a master socket which would be preferred by the read preference mode.
+ if s.slaveSocket != nil && s.slaveOk && slaveOk && (s.masterSocket == nil || s.consistency != PrimaryPreferred && s.consistency != Monotonic) {
+ socket := s.slaveSocket
+ socket.Acquire()
+ s.m.RUnlock()
+ return socket, nil
+ }
+ if s.masterSocket != nil {
+ socket := s.masterSocket
+ socket.Acquire()
+ s.m.RUnlock()
+ return socket, nil
+ }
+ s.m.RUnlock()
+
+ // No go. We may have to request a new socket and change the session,
+ // so try again but with an exclusive lock now.
+ s.m.Lock()
+ defer s.m.Unlock()
+
+ if s.slaveSocket != nil && s.slaveOk && slaveOk && (s.masterSocket == nil || s.consistency != PrimaryPreferred && s.consistency != Monotonic) {
+ s.slaveSocket.Acquire()
+ return s.slaveSocket, nil
+ }
+ if s.masterSocket != nil {
+ s.masterSocket.Acquire()
+ return s.masterSocket, nil
+ }
+
+ // Still not good. We need a new socket.
+ sock, err := s.cluster().AcquireSocket(s.consistency, slaveOk && s.slaveOk, s.syncTimeout, s.sockTimeout, s.queryConfig.op.serverTags, s.poolLimit)
+ if err != nil {
+ return nil, err
+ }
+
+ // Authenticate the new socket.
+ if err = s.socketLogin(sock); err != nil {
+ sock.Release()
+ return nil, err
+ }
+
+ // Keep track of the new socket, if necessary.
+ // Note that, as a special case, if the Eventual session was
+ // not refreshed (s.slaveSocket != nil), it means the developer
+ // asked to preserve an existing reserved socket, so we'll
+ // keep a master one around too before a Refresh happens.
+ if s.consistency != Eventual || s.slaveSocket != nil {
+ s.setSocket(sock)
+ }
+
+ // Switch over a Monotonic session to the master.
+ if !slaveOk && s.consistency == Monotonic {
+ s.slaveOk = false
+ }
+
+ return sock, nil
+}
+
+// setSocket binds socket to this section.
+func (s *Session) setSocket(socket *mongoSocket) {
+ info := socket.Acquire()
+ if info.Master {
+ if s.masterSocket != nil {
+ panic("setSocket(master) with existing master socket reserved")
+ }
+ s.masterSocket = socket
+ } else {
+ if s.slaveSocket != nil {
+ panic("setSocket(slave) with existing slave socket reserved")
+ }
+ s.slaveSocket = socket
+ }
+}
+
+// unsetSocket releases any slave and/or master sockets reserved.
+func (s *Session) unsetSocket() {
+ if s.masterSocket != nil {
+ s.masterSocket.Release()
+ }
+ if s.slaveSocket != nil {
+ s.slaveSocket.Release()
+ }
+ s.masterSocket = nil
+ s.slaveSocket = nil
+}
+
+func (iter *Iter) replyFunc() replyFunc {
+ return func(err error, op *replyOp, docNum int, docData []byte) {
+ iter.m.Lock()
+ iter.docsToReceive--
+ if err != nil {
+ iter.err = err
+ debugf("Iter %p received an error: %s", iter, err.Error())
+ } else if docNum == -1 {
+ debugf("Iter %p received no documents (cursor=%d).", iter, op.cursorId)
+ if op != nil && op.cursorId != 0 {
+ // It's a tailable cursor.
+ iter.op.cursorId = op.cursorId
+ } else if op != nil && op.cursorId == 0 && op.flags&1 == 1 {
+ // Cursor likely timed out.
+ iter.err = ErrCursor
+ } else {
+ iter.err = ErrNotFound
+ }
+ } else if iter.findCmd {
+ debugf("Iter %p received reply document %d/%d (cursor=%d)", iter, docNum+1, int(op.replyDocs), op.cursorId)
+ var findReply struct {
+ Ok bool
+ Code int
+ Errmsg string
+ Cursor cursorData
+ }
+ if err := bson.Unmarshal(docData, &findReply); err != nil {
+ iter.err = err
+ } else if !findReply.Ok && findReply.Errmsg != "" {
+ iter.err = &QueryError{Code: findReply.Code, Message: findReply.Errmsg}
+ } else if len(findReply.Cursor.FirstBatch) == 0 && len(findReply.Cursor.NextBatch) == 0 {
+ iter.err = ErrNotFound
+ } else {
+ batch := findReply.Cursor.FirstBatch
+ if len(batch) == 0 {
+ batch = findReply.Cursor.NextBatch
+ }
+ rdocs := len(batch)
+ for _, raw := range batch {
+ iter.docData.Push(raw.Data)
+ }
+ iter.docsToReceive = 0
+ docsToProcess := iter.docData.Len()
+ if iter.limit == 0 || int32(docsToProcess) < iter.limit {
+ iter.docsBeforeMore = docsToProcess - int(iter.prefetch*float64(rdocs))
+ } else {
+ iter.docsBeforeMore = -1
+ }
+ iter.op.cursorId = findReply.Cursor.Id
+ }
+ } else {
+ rdocs := int(op.replyDocs)
+ if docNum == 0 {
+ iter.docsToReceive += rdocs - 1
+ docsToProcess := iter.docData.Len() + rdocs
+ if iter.limit == 0 || int32(docsToProcess) < iter.limit {
+ iter.docsBeforeMore = docsToProcess - int(iter.prefetch*float64(rdocs))
+ } else {
+ iter.docsBeforeMore = -1
+ }
+ iter.op.cursorId = op.cursorId
+ }
+ debugf("Iter %p received reply document %d/%d (cursor=%d)", iter, docNum+1, rdocs, op.cursorId)
+ iter.docData.Push(docData)
+ }
+ iter.gotReply.Broadcast()
+ iter.m.Unlock()
+ }
+}
+
+type writeCmdResult struct {
+ Ok bool
+ N int
+ NModified int `bson:"nModified"`
+ Upserted []struct {
+ Index int
+ Id interface{} `_id`
+ }
+ ConcernError writeConcernError `bson:"writeConcernError"`
+ Errors []writeCmdError `bson:"writeErrors"`
+}
+
+type writeConcernError struct {
+ Code int
+ ErrMsg string
+}
+
+type writeCmdError struct {
+ Index int
+ Code int
+ ErrMsg string
+}
+
+func (r *writeCmdResult) BulkErrorCases() []BulkErrorCase {
+ ecases := make([]BulkErrorCase, len(r.Errors))
+ for i, err := range r.Errors {
+ ecases[i] = BulkErrorCase{err.Index, &QueryError{Code: err.Code, Message: err.ErrMsg}}
+ }
+ return ecases
+}
+
+// writeOp runs the given modifying operation, potentially followed up
+// by a getLastError command in case the session is in safe mode. The
+// LastError result is made available in lerr, and if lerr.Err is set it
+// will also be returned as err.
+func (c *Collection) writeOp(op interface{}, ordered bool) (lerr *LastError, err error) {
+ s := c.Database.Session
+ socket, err := s.acquireSocket(c.Database.Name == "local")
+ if err != nil {
+ return nil, err
+ }
+ defer socket.Release()
+
+ s.m.RLock()
+ safeOp := s.safeOp
+ bypassValidation := s.bypassValidation
+ s.m.RUnlock()
+
+ if socket.ServerInfo().MaxWireVersion >= 2 {
+ // Servers with a more recent write protocol benefit from write commands.
+ if op, ok := op.(*insertOp); ok && len(op.documents) > 1000 {
+ var lerr LastError
+
+ // Maximum batch size is 1000. Must split out in separate operations for compatibility.
+ all := op.documents
+ for i := 0; i < len(all); i += 1000 {
+ l := i + 1000
+ if l > len(all) {
+ l = len(all)
+ }
+ op.documents = all[i:l]
+ oplerr, err := c.writeOpCommand(socket, safeOp, op, ordered, bypassValidation)
+ lerr.N += oplerr.N
+ lerr.modified += oplerr.modified
+ if err != nil {
+ for ei := range oplerr.ecases {
+ oplerr.ecases[ei].Index += i
+ }
+ lerr.ecases = append(lerr.ecases, oplerr.ecases...)
+ if op.flags&1 == 0 {
+ return &lerr, err
+ }
+ }
+ }
+ if len(lerr.ecases) != 0 {
+ return &lerr, lerr.ecases[0].Err
+ }
+ return &lerr, nil
+ }
+ return c.writeOpCommand(socket, safeOp, op, ordered, bypassValidation)
+ } else if updateOps, ok := op.(bulkUpdateOp); ok {
+ var lerr LastError
+ for i, updateOp := range updateOps {
+ oplerr, err := c.writeOpQuery(socket, safeOp, updateOp, ordered)
+ lerr.N += oplerr.N
+ lerr.modified += oplerr.modified
+ if err != nil {
+ lerr.ecases = append(lerr.ecases, BulkErrorCase{i, err})
+ if ordered {
+ break
+ }
+ }
+ }
+ if len(lerr.ecases) != 0 {
+ return &lerr, lerr.ecases[0].Err
+ }
+ return &lerr, nil
+ } else if deleteOps, ok := op.(bulkDeleteOp); ok {
+ var lerr LastError
+ for i, deleteOp := range deleteOps {
+ oplerr, err := c.writeOpQuery(socket, safeOp, deleteOp, ordered)
+ lerr.N += oplerr.N
+ lerr.modified += oplerr.modified
+ if err != nil {
+ lerr.ecases = append(lerr.ecases, BulkErrorCase{i, err})
+ if ordered {
+ break
+ }
+ }
+ }
+ if len(lerr.ecases) != 0 {
+ return &lerr, lerr.ecases[0].Err
+ }
+ return &lerr, nil
+ }
+ return c.writeOpQuery(socket, safeOp, op, ordered)
+}
+
+func (c *Collection) writeOpQuery(socket *mongoSocket, safeOp *queryOp, op interface{}, ordered bool) (lerr *LastError, err error) {
+ if safeOp == nil {
+ return nil, socket.Query(op)
+ }
+
+ var mutex sync.Mutex
+ var replyData []byte
+ var replyErr error
+ mutex.Lock()
+ query := *safeOp // Copy the data.
+ query.collection = c.Database.Name + ".$cmd"
+ query.replyFunc = func(err error, reply *replyOp, docNum int, docData []byte) {
+ replyData = docData
+ replyErr = err
+ mutex.Unlock()
+ }
+ err = socket.Query(op, &query)
+ if err != nil {
+ return nil, err
+ }
+ mutex.Lock() // Wait.
+ if replyErr != nil {
+ return nil, replyErr // XXX TESTME
+ }
+ if hasErrMsg(replyData) {
+ // Looks like getLastError itself failed.
+ err = checkQueryError(query.collection, replyData)
+ if err != nil {
+ return nil, err
+ }
+ }
+ result := &LastError{}
+ bson.Unmarshal(replyData, &result)
+ debugf("Result from writing query: %#v", result)
+ if result.Err != "" {
+ result.ecases = []BulkErrorCase{{Index: 0, Err: result}}
+ if insert, ok := op.(*insertOp); ok && len(insert.documents) > 1 {
+ result.ecases[0].Index = -1
+ }
+ return result, result
+ }
+ // With MongoDB <2.6 we don't know how many actually changed, so make it the same as matched.
+ result.modified = result.N
+ return result, nil
+}
+
+func (c *Collection) writeOpCommand(socket *mongoSocket, safeOp *queryOp, op interface{}, ordered, bypassValidation bool) (lerr *LastError, err error) {
+ var writeConcern interface{}
+ if safeOp == nil {
+ writeConcern = bson.D{{"w", 0}}
+ } else {
+ writeConcern = safeOp.query.(*getLastError)
+ }
+
+ var cmd bson.D
+ switch op := op.(type) {
+ case *insertOp:
+ // http://docs.mongodb.org/manual/reference/command/insert
+ cmd = bson.D{
+ {"insert", c.Name},
+ {"documents", op.documents},
+ {"writeConcern", writeConcern},
+ {"ordered", op.flags&1 == 0},
+ }
+ case *updateOp:
+ // http://docs.mongodb.org/manual/reference/command/update
+ cmd = bson.D{
+ {"update", c.Name},
+ {"updates", []interface{}{op}},
+ {"writeConcern", writeConcern},
+ {"ordered", ordered},
+ }
+ case bulkUpdateOp:
+ // http://docs.mongodb.org/manual/reference/command/update
+ cmd = bson.D{
+ {"update", c.Name},
+ {"updates", op},
+ {"writeConcern", writeConcern},
+ {"ordered", ordered},
+ }
+ case *deleteOp:
+ // http://docs.mongodb.org/manual/reference/command/delete
+ cmd = bson.D{
+ {"delete", c.Name},
+ {"deletes", []interface{}{op}},
+ {"writeConcern", writeConcern},
+ {"ordered", ordered},
+ }
+ case bulkDeleteOp:
+ // http://docs.mongodb.org/manual/reference/command/delete
+ cmd = bson.D{
+ {"delete", c.Name},
+ {"deletes", op},
+ {"writeConcern", writeConcern},
+ {"ordered", ordered},
+ }
+ }
+ if bypassValidation {
+ cmd = append(cmd, bson.DocElem{"bypassDocumentValidation", true})
+ }
+
+ var result writeCmdResult
+ err = c.Database.run(socket, cmd, &result)
+ debugf("Write command result: %#v (err=%v)", result, err)
+ ecases := result.BulkErrorCases()
+ lerr = &LastError{
+ UpdatedExisting: result.N > 0 && len(result.Upserted) == 0,
+ N: result.N,
+
+ modified: result.NModified,
+ ecases: ecases,
+ }
+ if len(result.Upserted) > 0 {
+ lerr.UpsertedId = result.Upserted[0].Id
+ }
+ if len(result.Errors) > 0 {
+ e := result.Errors[0]
+ lerr.Code = e.Code
+ lerr.Err = e.ErrMsg
+ err = lerr
+ } else if result.ConcernError.Code != 0 {
+ e := result.ConcernError
+ lerr.Code = e.Code
+ lerr.Err = e.ErrMsg
+ err = lerr
+ }
+
+ if err == nil && safeOp == nil {
+ return nil, nil
+ }
+ return lerr, err
+}
+
+func hasErrMsg(d []byte) bool {
+ l := len(d)
+ for i := 0; i+8 < l; i++ {
+ if d[i] == '\x02' && d[i+1] == 'e' && d[i+2] == 'r' && d[i+3] == 'r' && d[i+4] == 'm' && d[i+5] == 's' && d[i+6] == 'g' && d[i+7] == '\x00' {
+ return true
+ }
+ }
+ return false
+}
diff --git a/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/session_test.go b/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/session_test.go
new file mode 100644
index 00000000000..a89279d38b1
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/session_test.go
@@ -0,0 +1,4216 @@
+// mgo - MongoDB driver for Go
+//
+// Copyright (c) 2010-2012 - Gustavo Niemeyer <gustavo@niemeyer.net>
+//
+// All rights reserved.
+//
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are met:
+//
+// 1. Redistributions of source code must retain the above copyright notice, this
+// list of conditions and the following disclaimer.
+// 2. Redistributions in binary form must reproduce the above copyright notice,
+// this list of conditions and the following disclaimer in the documentation
+// and/or other materials provided with the distribution.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
+// ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
+// WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+// DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
+// ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
+// (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
+// LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
+// ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
+// SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+package mgo_test
+
+import (
+ "flag"
+ "fmt"
+ "math"
+ "os"
+ "runtime"
+ "sort"
+ "strconv"
+ "strings"
+ "time"
+
+ . "gopkg.in/check.v1"
+ "gopkg.in/mgo.v2"
+ "gopkg.in/mgo.v2/bson"
+)
+
+func (s *S) TestRunString(c *C) {
+ session, err := mgo.Dial("localhost:40001")
+ c.Assert(err, IsNil)
+ defer session.Close()
+
+ result := struct{ Ok int }{}
+ err = session.Run("ping", &result)
+ c.Assert(err, IsNil)
+ c.Assert(result.Ok, Equals, 1)
+}
+
+func (s *S) TestRunValue(c *C) {
+ session, err := mgo.Dial("localhost:40001")
+ c.Assert(err, IsNil)
+ defer session.Close()
+
+ result := struct{ Ok int }{}
+ err = session.Run(M{"ping": 1}, &result)
+ c.Assert(err, IsNil)
+ c.Assert(result.Ok, Equals, 1)
+}
+
+func (s *S) TestPing(c *C) {
+ session, err := mgo.Dial("localhost:40001")
+ c.Assert(err, IsNil)
+ defer session.Close()
+
+ // Just ensure the nonce has been received.
+ result := struct{}{}
+ err = session.Run("ping", &result)
+
+ mgo.ResetStats()
+
+ err = session.Ping()
+ c.Assert(err, IsNil)
+
+ // Pretty boring.
+ stats := mgo.GetStats()
+ c.Assert(stats.SentOps, Equals, 1)
+ c.Assert(stats.ReceivedOps, Equals, 1)
+}
+
+func (s *S) TestDialIPAddress(c *C) {
+ session, err := mgo.Dial("127.0.0.1:40001")
+ c.Assert(err, IsNil)
+ defer session.Close()
+
+ if os.Getenv("NOIPV6") != "1" {
+ session, err = mgo.Dial("[::1%]:40001")
+ c.Assert(err, IsNil)
+ defer session.Close()
+ }
+}
+
+func (s *S) TestURLSingle(c *C) {
+ session, err := mgo.Dial("mongodb://localhost:40001/")
+ c.Assert(err, IsNil)
+ defer session.Close()
+
+ result := struct{ Ok int }{}
+ err = session.Run("ping", &result)
+ c.Assert(err, IsNil)
+ c.Assert(result.Ok, Equals, 1)
+}
+
+func (s *S) TestURLMany(c *C) {
+ session, err := mgo.Dial("mongodb://localhost:40011,localhost:40012/")
+ c.Assert(err, IsNil)
+ defer session.Close()
+
+ result := struct{ Ok int }{}
+ err = session.Run("ping", &result)
+ c.Assert(err, IsNil)
+ c.Assert(result.Ok, Equals, 1)
+}
+
+func (s *S) TestURLParsing(c *C) {
+ urls := []string{
+ "localhost:40001?foo=1&bar=2",
+ "localhost:40001?foo=1;bar=2",
+ }
+ for _, url := range urls {
+ session, err := mgo.Dial(url)
+ if session != nil {
+ session.Close()
+ }
+ c.Assert(err, ErrorMatches, "unsupported connection URL option: (foo=1|bar=2)")
+ }
+}
+
+func (s *S) TestInsertFindOne(c *C) {
+ session, err := mgo.Dial("localhost:40001")
+ c.Assert(err, IsNil)
+ defer session.Close()
+
+ coll := session.DB("mydb").C("mycoll")
+ err = coll.Insert(M{"a": 1, "b": 2})
+ c.Assert(err, IsNil)
+ err = coll.Insert(M{"a": 1, "b": 3})
+ c.Assert(err, IsNil)
+
+ result := struct{ A, B int }{}
+
+ err = coll.Find(M{"a": 1}).Sort("b").One(&result)
+ c.Assert(err, IsNil)
+ c.Assert(result.A, Equals, 1)
+ c.Assert(result.B, Equals, 2)
+
+ err = coll.Find(M{"a": 1}).Sort("-b").One(&result)
+ c.Assert(err, IsNil)
+ c.Assert(result.A, Equals, 1)
+ c.Assert(result.B, Equals, 3)
+}
+
+func (s *S) TestInsertFindOneNil(c *C) {
+ session, err := mgo.Dial("localhost:40002")
+ c.Assert(err, IsNil)
+ defer session.Close()
+
+ coll := session.DB("mydb").C("mycoll")
+ err = coll.Find(nil).One(nil)
+ c.Assert(err, ErrorMatches, "unauthorized.*|not authorized.*")
+}
+
+func (s *S) TestInsertFindOneMap(c *C) {
+ session, err := mgo.Dial("localhost:40001")
+ c.Assert(err, IsNil)
+ defer session.Close()
+
+ coll := session.DB("mydb").C("mycoll")
+ err = coll.Insert(M{"a": 1, "b": 2})
+ c.Assert(err, IsNil)
+ result := make(M)
+ err = coll.Find(M{"a": 1}).One(result)
+ c.Assert(err, IsNil)
+ c.Assert(result["a"], Equals, 1)
+ c.Assert(result["b"], Equals, 2)
+}
+
+func (s *S) TestInsertFindAll(c *C) {
+ session, err := mgo.Dial("localhost:40001")
+ c.Assert(err, IsNil)
+ defer session.Close()
+
+ coll := session.DB("mydb").C("mycoll")
+ err = coll.Insert(M{"a": 1, "b": 2})
+ c.Assert(err, IsNil)
+ err = coll.Insert(M{"a": 3, "b": 4})
+ c.Assert(err, IsNil)
+
+ type R struct{ A, B int }
+ var result []R
+
+ assertResult := func() {
+ c.Assert(len(result), Equals, 2)
+ c.Assert(result[0].A, Equals, 1)
+ c.Assert(result[0].B, Equals, 2)
+ c.Assert(result[1].A, Equals, 3)
+ c.Assert(result[1].B, Equals, 4)
+ }
+
+ // nil slice
+ err = coll.Find(nil).Sort("a").All(&result)
+ c.Assert(err, IsNil)
+ assertResult()
+
+ // Previously allocated slice
+ allocd := make([]R, 5)
+ result = allocd
+ err = coll.Find(nil).Sort("a").All(&result)
+ c.Assert(err, IsNil)
+ assertResult()
+
+ // Ensure result is backed by the originally allocated array
+ c.Assert(&result[0], Equals, &allocd[0])
+
+ // Non-pointer slice error
+ f := func() { coll.Find(nil).All(result) }
+ c.Assert(f, Panics, "result argument must be a slice address")
+
+ // Non-slice error
+ f = func() { coll.Find(nil).All(new(int)) }
+ c.Assert(f, Panics, "result argument must be a slice address")
+}
+
+func (s *S) TestFindRef(c *C) {
+ session, err := mgo.Dial("localhost:40001")
+ c.Assert(err, IsNil)
+ defer session.Close()
+
+ db1 := session.DB("db1")
+ db1col1 := db1.C("col1")
+
+ db2 := session.DB("db2")
+ db2col1 := db2.C("col1")
+
+ err = db1col1.Insert(M{"_id": 1, "n": 1})
+ c.Assert(err, IsNil)
+ err = db1col1.Insert(M{"_id": 2, "n": 2})
+ c.Assert(err, IsNil)
+ err = db2col1.Insert(M{"_id": 2, "n": 3})
+ c.Assert(err, IsNil)
+
+ result := struct{ N int }{}
+
+ ref1 := &mgo.DBRef{Collection: "col1", Id: 1}
+ ref2 := &mgo.DBRef{Collection: "col1", Id: 2, Database: "db2"}
+
+ err = db1.FindRef(ref1).One(&result)
+ c.Assert(err, IsNil)
+ c.Assert(result.N, Equals, 1)
+
+ err = db1.FindRef(ref2).One(&result)
+ c.Assert(err, IsNil)
+ c.Assert(result.N, Equals, 3)
+
+ err = db2.FindRef(ref1).One(&result)
+ c.Assert(err, Equals, mgo.ErrNotFound)
+
+ err = db2.FindRef(ref2).One(&result)
+ c.Assert(err, IsNil)
+ c.Assert(result.N, Equals, 3)
+
+ err = session.FindRef(ref2).One(&result)
+ c.Assert(err, IsNil)
+ c.Assert(result.N, Equals, 3)
+
+ f := func() { session.FindRef(ref1).One(&result) }
+ c.Assert(f, PanicMatches, "Can't resolve database for &mgo.DBRef{Collection:\"col1\", Id:1, Database:\"\"}")
+}
+
+func (s *S) TestDatabaseAndCollectionNames(c *C) {
+ session, err := mgo.Dial("localhost:40001")
+ c.Assert(err, IsNil)
+ defer session.Close()
+
+ db1 := session.DB("db1")
+ db1col1 := db1.C("col1")
+ db1col2 := db1.C("col2")
+
+ db2 := session.DB("db2")
+ db2col1 := db2.C("col3")
+
+ err = db1col1.Insert(M{"_id": 1})
+ c.Assert(err, IsNil)
+ err = db1col2.Insert(M{"_id": 1})
+ c.Assert(err, IsNil)
+ err = db2col1.Insert(M{"_id": 1})
+ c.Assert(err, IsNil)
+
+ names, err := session.DatabaseNames()
+ c.Assert(err, IsNil)
+ c.Assert(filterDBs(names), DeepEquals, []string{"db1", "db2"})
+
+ // Try to exercise cursor logic. 2.8.0-rc3 still ignores this.
+ session.SetBatch(2)
+
+ names, err = db1.CollectionNames()
+ c.Assert(err, IsNil)
+ c.Assert(names, DeepEquals, []string{"col1", "col2", "system.indexes"})
+
+ names, err = db2.CollectionNames()
+ c.Assert(err, IsNil)
+ c.Assert(names, DeepEquals, []string{"col3", "system.indexes"})
+}
+
+func (s *S) TestSelect(c *C) {
+ session, err := mgo.Dial("localhost:40001")
+ c.Assert(err, IsNil)
+ defer session.Close()
+
+ coll := session.DB("mydb").C("mycoll")
+ coll.Insert(M{"a": 1, "b": 2})
+
+ result := struct{ A, B int }{}
+
+ err = coll.Find(M{"a": 1}).Select(M{"b": 1}).One(&result)
+ c.Assert(err, IsNil)
+ c.Assert(result.A, Equals, 0)
+ c.Assert(result.B, Equals, 2)
+}
+
+func (s *S) TestInlineMap(c *C) {
+ session, err := mgo.Dial("localhost:40001")
+ c.Assert(err, IsNil)
+ defer session.Close()
+
+ coll := session.DB("mydb").C("mycoll")
+
+ var v, result1 struct {
+ A int
+ M map[string]int ",inline"
+ }
+
+ v.A = 1
+ v.M = map[string]int{"b": 2}
+ err = coll.Insert(v)
+ c.Assert(err, IsNil)
+
+ noId := M{"_id": 0}
+
+ err = coll.Find(nil).Select(noId).One(&result1)
+ c.Assert(err, IsNil)
+ c.Assert(result1.A, Equals, 1)
+ c.Assert(result1.M, DeepEquals, map[string]int{"b": 2})
+
+ var result2 M
+ err = coll.Find(nil).Select(noId).One(&result2)
+ c.Assert(err, IsNil)
+ c.Assert(result2, DeepEquals, M{"a": 1, "b": 2})
+
+}
+
+func (s *S) TestUpdate(c *C) {
+ session, err := mgo.Dial("localhost:40001")
+ c.Assert(err, IsNil)
+ defer session.Close()
+
+ coll := session.DB("mydb").C("mycoll")
+
+ ns := []int{40, 41, 42, 43, 44, 45, 46}
+ for _, n := range ns {
+ err := coll.Insert(M{"k": n, "n": n})
+ c.Assert(err, IsNil)
+ }
+
+ // No changes is a no-op and shouldn't return an error.
+ err = coll.Update(M{"k": 42}, M{"$set": M{"n": 42}})
+ c.Assert(err, IsNil)
+
+ err = coll.Update(M{"k": 42}, M{"$inc": M{"n": 1}})
+ c.Assert(err, IsNil)
+
+ result := make(M)
+ err = coll.Find(M{"k": 42}).One(result)
+ c.Assert(err, IsNil)
+ c.Assert(result["n"], Equals, 43)
+
+ err = coll.Update(M{"k": 47}, M{"k": 47, "n": 47})
+ c.Assert(err, Equals, mgo.ErrNotFound)
+
+ err = coll.Find(M{"k": 47}).One(result)
+ c.Assert(err, Equals, mgo.ErrNotFound)
+}
+
+func (s *S) TestUpdateId(c *C) {
+ session, err := mgo.Dial("localhost:40001")
+ c.Assert(err, IsNil)
+ defer session.Close()
+
+ coll := session.DB("mydb").C("mycoll")
+
+ ns := []int{40, 41, 42, 43, 44, 45, 46}
+ for _, n := range ns {
+ err := coll.Insert(M{"_id": n, "n": n})
+ c.Assert(err, IsNil)
+ }
+
+ err = coll.UpdateId(42, M{"$inc": M{"n": 1}})
+ c.Assert(err, IsNil)
+
+ result := make(M)
+ err = coll.FindId(42).One(result)
+ c.Assert(err, IsNil)
+ c.Assert(result["n"], Equals, 43)
+
+ err = coll.UpdateId(47, M{"k": 47, "n": 47})
+ c.Assert(err, Equals, mgo.ErrNotFound)
+
+ err = coll.FindId(47).One(result)
+ c.Assert(err, Equals, mgo.ErrNotFound)
+}
+
+func (s *S) TestUpdateNil(c *C) {
+ session, err := mgo.Dial("localhost:40001")
+ c.Assert(err, IsNil)
+ defer session.Close()
+
+ coll := session.DB("mydb").C("mycoll")
+
+ err = coll.Insert(M{"k": 42, "n": 42})
+ c.Assert(err, IsNil)
+ err = coll.Update(nil, M{"$inc": M{"n": 1}})
+ c.Assert(err, IsNil)
+
+ result := make(M)
+ err = coll.Find(M{"k": 42}).One(result)
+ c.Assert(err, IsNil)
+ c.Assert(result["n"], Equals, 43)
+
+ err = coll.Insert(M{"k": 45, "n": 45})
+ c.Assert(err, IsNil)
+ _, err = coll.UpdateAll(nil, M{"$inc": M{"n": 1}})
+ c.Assert(err, IsNil)
+
+ err = coll.Find(M{"k": 42}).One(result)
+ c.Assert(err, IsNil)
+ c.Assert(result["n"], Equals, 44)
+ err = coll.Find(M{"k": 45}).One(result)
+ c.Assert(err, IsNil)
+ c.Assert(result["n"], Equals, 46)
+}
+
+func (s *S) TestUpsert(c *C) {
+ session, err := mgo.Dial("localhost:40001")
+ c.Assert(err, IsNil)
+ defer session.Close()
+
+ coll := session.DB("mydb").C("mycoll")
+
+ ns := []int{40, 41, 42, 43, 44, 45, 46}
+ for _, n := range ns {
+ err := coll.Insert(bson.D{{"k", n}, {"n", n}})
+ c.Assert(err, IsNil)
+ }
+
+ info, err := coll.Upsert(M{"k": 42}, bson.D{{"k", 42}, {"n", 24}})
+ c.Assert(err, IsNil)
+ c.Assert(info.Updated, Equals, 1)
+ c.Assert(info.Matched, Equals, 1)
+ c.Assert(info.UpsertedId, IsNil)
+
+ result := M{}
+ err = coll.Find(M{"k": 42}).One(result)
+ c.Assert(err, IsNil)
+ c.Assert(result["n"], Equals, 24)
+
+ // Match but do not change.
+ info, err = coll.Upsert(M{"k": 42}, bson.D{{"k", 42}, {"n", 24}})
+ c.Assert(err, IsNil)
+ c.Assert(info.Updated, Equals, 1) // On 2.6+ this feels like a server mistake.
+ c.Assert(info.Matched, Equals, 1)
+ c.Assert(info.UpsertedId, IsNil)
+
+ // Insert with internally created id.
+ info, err = coll.Upsert(M{"k": 47}, M{"k": 47, "n": 47})
+ c.Assert(err, IsNil)
+ c.Assert(info.Updated, Equals, 0)
+ c.Assert(info.Matched, Equals, 0)
+ c.Assert(info.UpsertedId, NotNil)
+
+ err = coll.Find(M{"k": 47}).One(result)
+ c.Assert(err, IsNil)
+ c.Assert(result["n"], Equals, 47)
+
+ result = M{}
+ err = coll.Find(M{"_id": info.UpsertedId}).One(result)
+ c.Assert(err, IsNil)
+ c.Assert(result["n"], Equals, 47)
+
+ // Insert with provided id.
+ info, err = coll.Upsert(M{"k": 48}, M{"k": 48, "n": 48, "_id": 48})
+ c.Assert(err, IsNil)
+ c.Assert(info.Updated, Equals, 0)
+ c.Assert(info.Matched, Equals, 0)
+ if s.versionAtLeast(2, 6) {
+ c.Assert(info.UpsertedId, Equals, 48)
+ } else {
+ c.Assert(info.UpsertedId, IsNil) // Unfortunate, but that's what Mongo gave us.
+ }
+
+ err = coll.Find(M{"k": 48}).One(result)
+ c.Assert(err, IsNil)
+ c.Assert(result["n"], Equals, 48)
+}
+
+func (s *S) TestUpsertId(c *C) {
+ session, err := mgo.Dial("localhost:40001")
+ c.Assert(err, IsNil)
+ defer session.Close()
+
+ coll := session.DB("mydb").C("mycoll")
+
+ ns := []int{40, 41, 42, 43, 44, 45, 46}
+ for _, n := range ns {
+ err := coll.Insert(M{"_id": n, "n": n})
+ c.Assert(err, IsNil)
+ }
+
+ info, err := coll.UpsertId(42, M{"n": 24})
+ c.Assert(err, IsNil)
+ c.Assert(info.Updated, Equals, 1)
+ c.Assert(info.UpsertedId, IsNil)
+
+ result := M{}
+ err = coll.FindId(42).One(result)
+ c.Assert(err, IsNil)
+ c.Assert(result["n"], Equals, 24)
+
+ info, err = coll.UpsertId(47, M{"_id": 47, "n": 47})
+ c.Assert(err, IsNil)
+ c.Assert(info.Updated, Equals, 0)
+ if s.versionAtLeast(2, 6) {
+ c.Assert(info.UpsertedId, Equals, 47)
+ } else {
+ c.Assert(info.UpsertedId, IsNil)
+ }
+
+ err = coll.FindId(47).One(result)
+ c.Assert(err, IsNil)
+ c.Assert(result["n"], Equals, 47)
+}
+
+func (s *S) TestUpdateAll(c *C) {
+ session, err := mgo.Dial("localhost:40001")
+ c.Assert(err, IsNil)
+ defer session.Close()
+
+ coll := session.DB("mydb").C("mycoll")
+
+ ns := []int{40, 41, 42, 43, 44, 45, 46}
+ for _, n := range ns {
+ err := coll.Insert(M{"k": n, "n": n})
+ c.Assert(err, IsNil)
+ }
+
+ info, err := coll.UpdateAll(M{"k": M{"$gt": 42}}, M{"$unset": M{"missing": 1}})
+ c.Assert(err, IsNil)
+ if s.versionAtLeast(2, 6) {
+ c.Assert(info.Updated, Equals, 0)
+ c.Assert(info.Matched, Equals, 4)
+ } else {
+ c.Assert(info.Updated, Equals, 4)
+ c.Assert(info.Matched, Equals, 4)
+ }
+
+ info, err = coll.UpdateAll(M{"k": M{"$gt": 42}}, M{"$inc": M{"n": 1}})
+ c.Assert(err, IsNil)
+ c.Assert(info.Updated, Equals, 4)
+ c.Assert(info.Matched, Equals, 4)
+
+ result := make(M)
+ err = coll.Find(M{"k": 42}).One(result)
+ c.Assert(err, IsNil)
+ c.Assert(result["n"], Equals, 42)
+
+ err = coll.Find(M{"k": 43}).One(result)
+ c.Assert(err, IsNil)
+ c.Assert(result["n"], Equals, 44)
+
+ err = coll.Find(M{"k": 44}).One(result)
+ c.Assert(err, IsNil)
+ c.Assert(result["n"], Equals, 45)
+
+ if !s.versionAtLeast(2, 6) {
+ // 2.6 made this invalid.
+ info, err = coll.UpdateAll(M{"k": 47}, M{"k": 47, "n": 47})
+ c.Assert(err, Equals, nil)
+ c.Assert(info.Updated, Equals, 0)
+ }
+}
+
+func (s *S) TestRemove(c *C) {
+ session, err := mgo.Dial("localhost:40001")
+ c.Assert(err, IsNil)
+ defer session.Close()
+
+ coll := session.DB("mydb").C("mycoll")
+
+ ns := []int{40, 41, 42, 43, 44, 45, 46}
+ for _, n := range ns {
+ err := coll.Insert(M{"n": n})
+ c.Assert(err, IsNil)
+ }
+
+ err = coll.Remove(M{"n": M{"$gt": 42}})
+ c.Assert(err, IsNil)
+
+ result := &struct{ N int }{}
+ err = coll.Find(M{"n": 42}).One(result)
+ c.Assert(err, IsNil)
+ c.Assert(result.N, Equals, 42)
+
+ err = coll.Find(M{"n": 43}).One(result)
+ c.Assert(err, Equals, mgo.ErrNotFound)
+
+ err = coll.Find(M{"n": 44}).One(result)
+ c.Assert(err, IsNil)
+ c.Assert(result.N, Equals, 44)
+}
+
+func (s *S) TestRemoveId(c *C) {
+ session, err := mgo.Dial("localhost:40001")
+ c.Assert(err, IsNil)
+ defer session.Close()
+
+ coll := session.DB("mydb").C("mycoll")
+
+ err = coll.Insert(M{"_id": 40}, M{"_id": 41}, M{"_id": 42})
+ c.Assert(err, IsNil)
+
+ err = coll.RemoveId(41)
+ c.Assert(err, IsNil)
+
+ c.Assert(coll.FindId(40).One(nil), IsNil)
+ c.Assert(coll.FindId(41).One(nil), Equals, mgo.ErrNotFound)
+ c.Assert(coll.FindId(42).One(nil), IsNil)
+}
+
+func (s *S) TestRemoveUnsafe(c *C) {
+ session, err := mgo.Dial("localhost:40001")
+ c.Assert(err, IsNil)
+ defer session.Close()
+
+ session.SetSafe(nil)
+
+ coll := session.DB("mydb").C("mycoll")
+
+ err = coll.Insert(M{"_id": 40}, M{"_id": 41}, M{"_id": 42})
+ c.Assert(err, IsNil)
+
+ err = coll.RemoveId(41)
+ c.Assert(err, IsNil)
+
+ c.Assert(coll.FindId(40).One(nil), IsNil)
+ c.Assert(coll.FindId(41).One(nil), Equals, mgo.ErrNotFound)
+ c.Assert(coll.FindId(42).One(nil), IsNil)
+}
+
+func (s *S) TestRemoveAll(c *C) {
+ session, err := mgo.Dial("localhost:40001")
+ c.Assert(err, IsNil)
+ defer session.Close()
+
+ coll := session.DB("mydb").C("mycoll")
+
+ ns := []int{40, 41, 42, 43, 44, 45, 46}
+ for _, n := range ns {
+ err := coll.Insert(M{"n": n})
+ c.Assert(err, IsNil)
+ }
+
+ info, err := coll.RemoveAll(M{"n": M{"$gt": 42}})
+ c.Assert(err, IsNil)
+ c.Assert(info.Updated, Equals, 0)
+ c.Assert(info.Removed, Equals, 4)
+ c.Assert(info.Matched, Equals, 4)
+ c.Assert(info.UpsertedId, IsNil)
+
+ result := &struct{ N int }{}
+ err = coll.Find(M{"n": 42}).One(result)
+ c.Assert(err, IsNil)
+ c.Assert(result.N, Equals, 42)
+
+ err = coll.Find(M{"n": 43}).One(result)
+ c.Assert(err, Equals, mgo.ErrNotFound)
+
+ err = coll.Find(M{"n": 44}).One(result)
+ c.Assert(err, Equals, mgo.ErrNotFound)
+
+ info, err = coll.RemoveAll(nil)
+ c.Assert(err, IsNil)
+ c.Assert(info.Updated, Equals, 0)
+ c.Assert(info.Removed, Equals, 3)
+ c.Assert(info.Matched, Equals, 3)
+ c.Assert(info.UpsertedId, IsNil)
+
+ n, err := coll.Find(nil).Count()
+ c.Assert(err, IsNil)
+ c.Assert(n, Equals, 0)
+}
+
+func (s *S) TestDropDatabase(c *C) {
+ session, err := mgo.Dial("localhost:40001")
+ c.Assert(err, IsNil)
+ defer session.Close()
+
+ db1 := session.DB("db1")
+ db1.C("col").Insert(M{"_id": 1})
+
+ db2 := session.DB("db2")
+ db2.C("col").Insert(M{"_id": 1})
+
+ err = db1.DropDatabase()
+ c.Assert(err, IsNil)
+
+ names, err := session.DatabaseNames()
+ c.Assert(err, IsNil)
+ c.Assert(filterDBs(names), DeepEquals, []string{"db2"})
+
+ err = db2.DropDatabase()
+ c.Assert(err, IsNil)
+
+ names, err = session.DatabaseNames()
+ c.Assert(err, IsNil)
+ c.Assert(filterDBs(names), DeepEquals, []string{})
+}
+
+func filterDBs(dbs []string) []string {
+ var i int
+ for _, name := range dbs {
+ switch name {
+ case "admin", "local":
+ default:
+ dbs[i] = name
+ i++
+ }
+ }
+ if len(dbs) == 0 {
+ return []string{}
+ }
+ return dbs[:i]
+}
+
+func (s *S) TestDropCollection(c *C) {
+ session, err := mgo.Dial("localhost:40001")
+ c.Assert(err, IsNil)
+ defer session.Close()
+
+ db := session.DB("db1")
+ db.C("col1").Insert(M{"_id": 1})
+ db.C("col2").Insert(M{"_id": 1})
+
+ err = db.C("col1").DropCollection()
+ c.Assert(err, IsNil)
+
+ names, err := db.CollectionNames()
+ c.Assert(err, IsNil)
+ c.Assert(names, DeepEquals, []string{"col2", "system.indexes"})
+
+ err = db.C("col2").DropCollection()
+ c.Assert(err, IsNil)
+
+ names, err = db.CollectionNames()
+ c.Assert(err, IsNil)
+ c.Assert(names, DeepEquals, []string{"system.indexes"})
+}
+
+func (s *S) TestCreateCollectionCapped(c *C) {
+ session, err := mgo.Dial("localhost:40001")
+ c.Assert(err, IsNil)
+ defer session.Close()
+
+ coll := session.DB("mydb").C("mycoll")
+
+ info := &mgo.CollectionInfo{
+ Capped: true,
+ MaxBytes: 1024,
+ MaxDocs: 3,
+ }
+ err = coll.Create(info)
+ c.Assert(err, IsNil)
+
+ ns := []int{1, 2, 3, 4, 5}
+ for _, n := range ns {
+ err := coll.Insert(M{"n": n})
+ c.Assert(err, IsNil)
+ }
+
+ n, err := coll.Find(nil).Count()
+ c.Assert(err, IsNil)
+ c.Assert(n, Equals, 3)
+}
+
+func (s *S) TestCreateCollectionNoIndex(c *C) {
+ session, err := mgo.Dial("localhost:40001")
+ c.Assert(err, IsNil)
+ defer session.Close()
+
+ coll := session.DB("mydb").C("mycoll")
+
+ info := &mgo.CollectionInfo{
+ DisableIdIndex: true,
+ }
+ err = coll.Create(info)
+ c.Assert(err, IsNil)
+
+ err = coll.Insert(M{"n": 1})
+ c.Assert(err, IsNil)
+
+ indexes, err := coll.Indexes()
+ c.Assert(indexes, HasLen, 0)
+}
+
+func (s *S) TestCreateCollectionForceIndex(c *C) {
+ session, err := mgo.Dial("localhost:40001")
+ c.Assert(err, IsNil)
+ defer session.Close()
+
+ coll := session.DB("mydb").C("mycoll")
+
+ info := &mgo.CollectionInfo{
+ ForceIdIndex: true,
+ Capped: true,
+ MaxBytes: 1024,
+ }
+ err = coll.Create(info)
+ c.Assert(err, IsNil)
+
+ err = coll.Insert(M{"n": 1})
+ c.Assert(err, IsNil)
+
+ indexes, err := coll.Indexes()
+ c.Assert(indexes, HasLen, 1)
+}
+
+func (s *S) TestCreateCollectionValidator(c *C) {
+ if !s.versionAtLeast(3, 2) {
+ c.Skip("validation depends on MongoDB 3.2+")
+ }
+ session, err := mgo.Dial("localhost:40001")
+ c.Assert(err, IsNil)
+ defer session.Close()
+
+ db := session.DB("mydb")
+ coll := db.C("mycoll")
+
+ // Test Validator.
+ info := &mgo.CollectionInfo{
+ Validator: M{"b": M{"$exists": true}},
+ }
+ err = coll.Create(info)
+ c.Assert(err, IsNil)
+ err = coll.Insert(M{"a": 1})
+ c.Assert(err, ErrorMatches, "Document failed validation")
+ err = coll.DropCollection()
+ c.Assert(err, IsNil)
+
+ // Test ValidatorAction.
+ info = &mgo.CollectionInfo{
+ Validator: M{"b": M{"$exists": true}},
+ ValidationAction: "warn",
+ }
+ err = coll.Create(info)
+ c.Assert(err, IsNil)
+ err = coll.Insert(M{"a": 1})
+ c.Assert(err, IsNil)
+ err = coll.DropCollection()
+ c.Assert(err, IsNil)
+
+ // Test ValidationLevel.
+ info = &mgo.CollectionInfo{
+ Validator: M{"a": M{"$exists": true}},
+ ValidationLevel: "moderate",
+ }
+ err = coll.Create(info)
+ err = coll.Insert(M{"a": 1})
+ c.Assert(err, IsNil)
+ err = db.Run(bson.D{{"collMod", "mycoll"}, {"validator", M{"b": M{"$exists": true}}}}, nil)
+ c.Assert(err, IsNil)
+ err = coll.Insert(M{"a": 2})
+ c.Assert(err, ErrorMatches, "Document failed validation")
+ err = coll.Update(M{"a": 1}, M{"c": 1})
+ c.Assert(err, IsNil)
+ err = coll.DropCollection()
+ c.Assert(err, IsNil)
+}
+
+func (s *S) TestCreateCollectionStorageEngine(c *C) {
+ if !s.versionAtLeast(3, 0) {
+ c.Skip("storageEngine option depends on MongoDB 3.0+")
+ }
+ session, err := mgo.Dial("localhost:40001")
+ c.Assert(err, IsNil)
+ defer session.Close()
+
+ db := session.DB("mydb")
+ coll := db.C("mycoll")
+
+ info := &mgo.CollectionInfo{
+ StorageEngine: M{"test": M{}},
+ }
+ err = coll.Create(info)
+ c.Assert(err, ErrorMatches, "test is not a registered storage engine for this server")
+}
+
+func (s *S) TestIsDupValues(c *C) {
+ c.Assert(mgo.IsDup(nil), Equals, false)
+ c.Assert(mgo.IsDup(&mgo.LastError{Code: 1}), Equals, false)
+ c.Assert(mgo.IsDup(&mgo.QueryError{Code: 1}), Equals, false)
+ c.Assert(mgo.IsDup(&mgo.LastError{Code: 11000}), Equals, true)
+ c.Assert(mgo.IsDup(&mgo.QueryError{Code: 11000}), Equals, true)
+ c.Assert(mgo.IsDup(&mgo.LastError{Code: 11001}), Equals, true)
+ c.Assert(mgo.IsDup(&mgo.QueryError{Code: 11001}), Equals, true)
+ c.Assert(mgo.IsDup(&mgo.LastError{Code: 12582}), Equals, true)
+ c.Assert(mgo.IsDup(&mgo.QueryError{Code: 12582}), Equals, true)
+ lerr := &mgo.LastError{Code: 16460, Err: "error inserting 1 documents to shard ... caused by :: E11000 duplicate key error index: ..."}
+ c.Assert(mgo.IsDup(lerr), Equals, true)
+}
+
+func (s *S) TestIsDupPrimary(c *C) {
+ session, err := mgo.Dial("localhost:40001")
+ c.Assert(err, IsNil)
+ defer session.Close()
+
+ coll := session.DB("mydb").C("mycoll")
+
+ err = coll.Insert(M{"_id": 1})
+ c.Assert(err, IsNil)
+ err = coll.Insert(M{"_id": 1})
+ c.Assert(err, ErrorMatches, ".*duplicate key error.*")
+ c.Assert(mgo.IsDup(err), Equals, true)
+}
+
+func (s *S) TestIsDupUnique(c *C) {
+ session, err := mgo.Dial("localhost:40001")
+ c.Assert(err, IsNil)
+ defer session.Close()
+
+ index := mgo.Index{
+ Key: []string{"a", "b"},
+ Unique: true,
+ }
+
+ coll := session.DB("mydb").C("mycoll")
+
+ err = coll.EnsureIndex(index)
+ c.Assert(err, IsNil)
+
+ err = coll.Insert(M{"a": 1, "b": 1})
+ c.Assert(err, IsNil)
+ err = coll.Insert(M{"a": 1, "b": 1})
+ c.Assert(err, ErrorMatches, ".*duplicate key error.*")
+ c.Assert(mgo.IsDup(err), Equals, true)
+}
+
+func (s *S) TestIsDupCapped(c *C) {
+ session, err := mgo.Dial("localhost:40001")
+ c.Assert(err, IsNil)
+ defer session.Close()
+
+ coll := session.DB("mydb").C("mycoll")
+
+ info := &mgo.CollectionInfo{
+ ForceIdIndex: true,
+ Capped: true,
+ MaxBytes: 1024,
+ }
+ err = coll.Create(info)
+ c.Assert(err, IsNil)
+
+ err = coll.Insert(M{"_id": 1})
+ c.Assert(err, IsNil)
+ err = coll.Insert(M{"_id": 1})
+ // The error was different for capped collections before 2.6.
+ c.Assert(err, ErrorMatches, ".*duplicate key.*")
+ // The issue is reduced by using IsDup.
+ c.Assert(mgo.IsDup(err), Equals, true)
+}
+
+func (s *S) TestIsDupFindAndModify(c *C) {
+ session, err := mgo.Dial("localhost:40001")
+ c.Assert(err, IsNil)
+ defer session.Close()
+
+ coll := session.DB("mydb").C("mycoll")
+
+ err = coll.EnsureIndex(mgo.Index{Key: []string{"n"}, Unique: true})
+ c.Assert(err, IsNil)
+
+ err = coll.Insert(M{"n": 1})
+ c.Assert(err, IsNil)
+ err = coll.Insert(M{"n": 2})
+ c.Assert(err, IsNil)
+ _, err = coll.Find(M{"n": 1}).Apply(mgo.Change{Update: M{"$inc": M{"n": 1}}}, bson.M{})
+ c.Assert(err, ErrorMatches, ".*duplicate key error.*")
+ c.Assert(mgo.IsDup(err), Equals, true)
+}
+
+func (s *S) TestFindAndModify(c *C) {
+ session, err := mgo.Dial("localhost:40011")
+ c.Assert(err, IsNil)
+ defer session.Close()
+
+ coll := session.DB("mydb").C("mycoll")
+
+ err = coll.Insert(M{"n": 42})
+
+ session.SetMode(mgo.Monotonic, true)
+
+ result := M{}
+ info, err := coll.Find(M{"n": 42}).Apply(mgo.Change{Update: M{"$inc": M{"n": 1}}}, result)
+ c.Assert(err, IsNil)
+ c.Assert(result["n"], Equals, 42)
+ c.Assert(info.Updated, Equals, 1)
+ c.Assert(info.Matched, Equals, 1)
+ c.Assert(info.Removed, Equals, 0)
+ c.Assert(info.UpsertedId, IsNil)
+
+ // A nil result parameter should be acceptable.
+ info, err = coll.Find(M{"n": 43}).Apply(mgo.Change{Update: M{"$unset": M{"missing": 1}}}, nil)
+ c.Assert(err, IsNil)
+ c.Assert(info.Updated, Equals, 1) // On 2.6+ this feels like a server mistake.
+ c.Assert(info.Matched, Equals, 1)
+ c.Assert(info.Removed, Equals, 0)
+ c.Assert(info.UpsertedId, IsNil)
+
+ result = M{}
+ info, err = coll.Find(M{"n": 43}).Apply(mgo.Change{Update: M{"$inc": M{"n": 1}}, ReturnNew: true}, result)
+ c.Assert(err, IsNil)
+ c.Assert(result["n"], Equals, 44)
+ c.Assert(info.Updated, Equals, 1)
+ c.Assert(info.Removed, Equals, 0)
+ c.Assert(info.UpsertedId, IsNil)
+
+ result = M{}
+ info, err = coll.Find(M{"n": 50}).Apply(mgo.Change{Upsert: true, Update: M{"n": 51, "o": 52}}, result)
+ c.Assert(err, IsNil)
+ c.Assert(result["n"], IsNil)
+ c.Assert(info.Updated, Equals, 0)
+ c.Assert(info.Removed, Equals, 0)
+ c.Assert(info.UpsertedId, NotNil)
+
+ result = M{}
+ info, err = coll.Find(nil).Sort("-n").Apply(mgo.Change{Update: M{"$inc": M{"n": 1}}, ReturnNew: true}, result)
+ c.Assert(err, IsNil)
+ c.Assert(result["n"], Equals, 52)
+ c.Assert(info.Updated, Equals, 1)
+ c.Assert(info.Removed, Equals, 0)
+ c.Assert(info.UpsertedId, IsNil)
+
+ result = M{}
+ info, err = coll.Find(M{"n": 52}).Select(M{"o": 1}).Apply(mgo.Change{Remove: true}, result)
+ c.Assert(err, IsNil)
+ c.Assert(result["n"], IsNil)
+ c.Assert(result["o"], Equals, 52)
+ c.Assert(info.Updated, Equals, 0)
+ c.Assert(info.Removed, Equals, 1)
+ c.Assert(info.UpsertedId, IsNil)
+
+ result = M{}
+ info, err = coll.Find(M{"n": 60}).Apply(mgo.Change{Remove: true}, result)
+ c.Assert(err, Equals, mgo.ErrNotFound)
+ c.Assert(len(result), Equals, 0)
+ c.Assert(info, IsNil)
+}
+
+func (s *S) TestFindAndModifyBug997828(c *C) {
+ session, err := mgo.Dial("localhost:40001")
+ c.Assert(err, IsNil)
+ defer session.Close()
+
+ coll := session.DB("mydb").C("mycoll")
+
+ err = coll.Insert(M{"n": "not-a-number"})
+
+ result := make(M)
+ _, err = coll.Find(M{"n": "not-a-number"}).Apply(mgo.Change{Update: M{"$inc": M{"n": 1}}}, result)
+ c.Assert(err, ErrorMatches, `(exception: )?Cannot apply \$inc .*`)
+ if s.versionAtLeast(2, 1) {
+ qerr, _ := err.(*mgo.QueryError)
+ c.Assert(qerr, NotNil, Commentf("err: %#v", err))
+ if s.versionAtLeast(2, 6) {
+ // Oh, the dance of error codes. :-(
+ c.Assert(qerr.Code, Equals, 16837)
+ } else {
+ c.Assert(qerr.Code, Equals, 10140)
+ }
+ } else {
+ lerr, _ := err.(*mgo.LastError)
+ c.Assert(lerr, NotNil, Commentf("err: %#v", err))
+ c.Assert(lerr.Code, Equals, 10140)
+ }
+}
+
+func (s *S) TestFindAndModifyErrmsgDoc(c *C) {
+ session, err := mgo.Dial("localhost:40001")
+ c.Assert(err, IsNil)
+ defer session.Close()
+
+ coll := session.DB("mydb").C("mycoll")
+
+ err = coll.Insert(M{"errmsg": "an error"})
+
+ var result M
+ _, err = coll.Find(M{}).Apply(mgo.Change{Update: M{"$set": M{"n": 1}}}, &result)
+ c.Assert(err, IsNil)
+}
+
+func (s *S) TestCountCollection(c *C) {
+ session, err := mgo.Dial("localhost:40001")
+ c.Assert(err, IsNil)
+ defer session.Close()
+
+ coll := session.DB("mydb").C("mycoll")
+
+ ns := []int{40, 41, 42}
+ for _, n := range ns {
+ err := coll.Insert(M{"n": n})
+ c.Assert(err, IsNil)
+ }
+
+ n, err := coll.Count()
+ c.Assert(err, IsNil)
+ c.Assert(n, Equals, 3)
+}
+
+func (s *S) TestCountQuery(c *C) {
+ session, err := mgo.Dial("localhost:40001")
+ c.Assert(err, IsNil)
+ defer session.Close()
+
+ coll := session.DB("mydb").C("mycoll")
+
+ ns := []int{40, 41, 42}
+ for _, n := range ns {
+ err := coll.Insert(M{"n": n})
+ c.Assert(err, IsNil)
+ }
+
+ n, err := coll.Find(M{"n": M{"$gt": 40}}).Count()
+ c.Assert(err, IsNil)
+ c.Assert(n, Equals, 2)
+}
+
+func (s *S) TestCountQuerySorted(c *C) {
+ session, err := mgo.Dial("localhost:40001")
+ c.Assert(err, IsNil)
+ defer session.Close()
+
+ coll := session.DB("mydb").C("mycoll")
+
+ ns := []int{40, 41, 42}
+ for _, n := range ns {
+ err := coll.Insert(M{"n": n})
+ c.Assert(err, IsNil)
+ }
+
+ n, err := coll.Find(M{"n": M{"$gt": 40}}).Sort("n").Count()
+ c.Assert(err, IsNil)
+ c.Assert(n, Equals, 2)
+}
+
+func (s *S) TestCountSkipLimit(c *C) {
+ session, err := mgo.Dial("localhost:40001")
+ c.Assert(err, IsNil)
+ defer session.Close()
+
+ coll := session.DB("mydb").C("mycoll")
+
+ ns := []int{40, 41, 42, 43, 44}
+ for _, n := range ns {
+ err := coll.Insert(M{"n": n})
+ c.Assert(err, IsNil)
+ }
+
+ n, err := coll.Find(nil).Skip(1).Limit(3).Count()
+ c.Assert(err, IsNil)
+ c.Assert(n, Equals, 3)
+
+ n, err = coll.Find(nil).Skip(1).Limit(5).Count()
+ c.Assert(err, IsNil)
+ c.Assert(n, Equals, 4)
+}
+
+func (s *S) TestQueryExplain(c *C) {
+ session, err := mgo.Dial("localhost:40001")
+ c.Assert(err, IsNil)
+ defer session.Close()
+
+ coll := session.DB("mydb").C("mycoll")
+
+ ns := []int{40, 41, 42}
+ for _, n := range ns {
+ err := coll.Insert(M{"n": n})
+ c.Assert(err, IsNil)
+ }
+
+ m := M{}
+ query := coll.Find(nil).Limit(2)
+ err = query.Explain(m)
+ c.Assert(err, IsNil)
+ if m["queryPlanner"] != nil {
+ c.Assert(m["executionStats"].(M)["totalDocsExamined"], Equals, 2)
+ } else {
+ c.Assert(m["cursor"], Equals, "BasicCursor")
+ c.Assert(m["nscanned"], Equals, 2)
+ c.Assert(m["n"], Equals, 2)
+ }
+
+ n := 0
+ var result M
+ iter := query.Iter()
+ for iter.Next(&result) {
+ n++
+ }
+ c.Assert(iter.Close(), IsNil)
+ c.Assert(n, Equals, 2)
+}
+
+func (s *S) TestQuerySetMaxScan(c *C) {
+ session, err := mgo.Dial("localhost:40001")
+ c.Assert(err, IsNil)
+ defer session.Close()
+ coll := session.DB("mydb").C("mycoll")
+
+ ns := []int{40, 41, 42}
+ for _, n := range ns {
+ err := coll.Insert(M{"n": n})
+ c.Assert(err, IsNil)
+ }
+
+ query := coll.Find(nil).SetMaxScan(2)
+ var result []M
+ err = query.All(&result)
+ c.Assert(err, IsNil)
+ c.Assert(result, HasLen, 2)
+}
+
+func (s *S) TestQuerySetMaxTime(c *C) {
+ if !s.versionAtLeast(2, 6) {
+ c.Skip("SetMaxTime only supported in 2.6+")
+ }
+
+ session, err := mgo.Dial("localhost:40001")
+ c.Assert(err, IsNil)
+ defer session.Close()
+ coll := session.DB("mydb").C("mycoll")
+
+ for i := 0; i < 1000; i++ {
+ err := coll.Insert(M{"n": i})
+ c.Assert(err, IsNil)
+ }
+
+ query := coll.Find(nil)
+ query.SetMaxTime(1 * time.Millisecond)
+ query.Batch(2)
+ var result []M
+ err = query.All(&result)
+ c.Assert(err, ErrorMatches, "operation exceeded time limit")
+}
+
+func (s *S) TestQueryHint(c *C) {
+ session, err := mgo.Dial("localhost:40001")
+ c.Assert(err, IsNil)
+ defer session.Close()
+
+ coll := session.DB("mydb").C("mycoll")
+ coll.EnsureIndexKey("a")
+
+ m := M{}
+ err = coll.Find(nil).Hint("a").Explain(m)
+ c.Assert(err, IsNil)
+
+ if m["queryPlanner"] != nil {
+ m = m["queryPlanner"].(M)
+ m = m["winningPlan"].(M)
+ m = m["inputStage"].(M)
+ c.Assert(m["indexName"], Equals, "a_1")
+ } else {
+ c.Assert(m["indexBounds"], NotNil)
+ c.Assert(m["indexBounds"].(M)["a"], NotNil)
+ }
+}
+
+func (s *S) TestQueryComment(c *C) {
+ session, err := mgo.Dial("localhost:40001")
+ c.Assert(err, IsNil)
+ defer session.Close()
+
+ db := session.DB("mydb")
+ coll := db.C("mycoll")
+
+ err = db.Run(bson.M{"profile": 2}, nil)
+ c.Assert(err, IsNil)
+
+ ns := []int{40, 41, 42}
+ for _, n := range ns {
+ err := coll.Insert(M{"n": n})
+ c.Assert(err, IsNil)
+ }
+
+ query := coll.Find(bson.M{"n": 41})
+ query.Comment("some comment")
+ err = query.One(nil)
+ c.Assert(err, IsNil)
+
+ query = coll.Find(bson.M{"n": 41})
+ query.Comment("another comment")
+ err = query.One(nil)
+ c.Assert(err, IsNil)
+
+ commentField := "query.$comment"
+ nField := "query.$query.n"
+ if s.versionAtLeast(3, 2) {
+ commentField = "query.comment"
+ nField = "query.filter.n"
+ }
+ n, err := session.DB("mydb").C("system.profile").Find(bson.M{nField: 41, commentField: "some comment"}).Count()
+ c.Assert(err, IsNil)
+ c.Assert(n, Equals, 1)
+}
+
+func (s *S) TestFindOneNotFound(c *C) {
+ session, err := mgo.Dial("localhost:40001")
+ c.Assert(err, IsNil)
+ defer session.Close()
+
+ coll := session.DB("mydb").C("mycoll")
+
+ result := struct{ A, B int }{}
+ err = coll.Find(M{"a": 1}).One(&result)
+ c.Assert(err, Equals, mgo.ErrNotFound)
+ c.Assert(err, ErrorMatches, "not found")
+ c.Assert(err == mgo.ErrNotFound, Equals, true)
+}
+
+func (s *S) TestFindIterNotFound(c *C) {
+ session, err := mgo.Dial("localhost:40001")
+ c.Assert(err, IsNil)
+ defer session.Close()
+
+ coll := session.DB("mydb").C("mycoll")
+
+ result := struct{ A, B int }{}
+ iter := coll.Find(M{"a": 1}).Iter()
+ ok := iter.Next(&result)
+ c.Assert(ok, Equals, false)
+ c.Assert(iter.Err(), IsNil)
+}
+
+func (s *S) TestFindNil(c *C) {
+ session, err := mgo.Dial("localhost:40001")
+ c.Assert(err, IsNil)
+ defer session.Close()
+
+ coll := session.DB("mydb").C("mycoll")
+ err = coll.Insert(M{"n": 1})
+ c.Assert(err, IsNil)
+
+ result := struct{ N int }{}
+
+ err = coll.Find(nil).One(&result)
+ c.Assert(err, IsNil)
+ c.Assert(result.N, Equals, 1)
+}
+
+func (s *S) TestFindId(c *C) {
+ session, err := mgo.Dial("localhost:40001")
+ c.Assert(err, IsNil)
+ defer session.Close()
+
+ coll := session.DB("mydb").C("mycoll")
+ err = coll.Insert(M{"_id": 41, "n": 41})
+ c.Assert(err, IsNil)
+ err = coll.Insert(M{"_id": 42, "n": 42})
+ c.Assert(err, IsNil)
+
+ result := struct{ N int }{}
+
+ err = coll.FindId(42).One(&result)
+ c.Assert(err, IsNil)
+ c.Assert(result.N, Equals, 42)
+}
+
+func (s *S) TestFindIterAll(c *C) {
+ session, err := mgo.Dial("localhost:40001")
+ c.Assert(err, IsNil)
+ defer session.Close()
+
+ coll := session.DB("mydb").C("mycoll")
+
+ ns := []int{40, 41, 42, 43, 44, 45, 46}
+ for _, n := range ns {
+ coll.Insert(M{"n": n})
+ }
+
+ session.Refresh() // Release socket.
+
+ mgo.ResetStats()
+
+ iter := coll.Find(M{"n": M{"$gte": 42}}).Sort("$natural").Prefetch(0).Batch(2).Iter()
+ result := struct{ N int }{}
+ for i := 2; i < 7; i++ {
+ ok := iter.Next(&result)
+ c.Assert(ok, Equals, true, Commentf("err=%v", err))
+ c.Assert(result.N, Equals, ns[i])
+ if i == 1 {
+ stats := mgo.GetStats()
+ c.Assert(stats.ReceivedDocs, Equals, 2)
+ }
+ }
+
+ ok := iter.Next(&result)
+ c.Assert(ok, Equals, false)
+ c.Assert(iter.Close(), IsNil)
+
+ session.Refresh() // Release socket.
+
+ stats := mgo.GetStats()
+ c.Assert(stats.SentOps, Equals, 3) // 1*QUERY_OP + 2*GET_MORE_OP
+ c.Assert(stats.ReceivedOps, Equals, 3) // and their REPLY_OPs.
+ if s.versionAtLeast(3, 2) {
+ // In 3.2+ responses come in batches inside the op reply docs.
+ c.Assert(stats.ReceivedDocs, Equals, 3)
+ } else {
+ c.Assert(stats.ReceivedDocs, Equals, 5)
+ }
+ c.Assert(stats.SocketsInUse, Equals, 0)
+}
+
+func (s *S) TestFindIterTwiceWithSameQuery(c *C) {
+ session, err := mgo.Dial("localhost:40001")
+ c.Assert(err, IsNil)
+ defer session.Close()
+
+ coll := session.DB("mydb").C("mycoll")
+
+ for i := 40; i != 47; i++ {
+ err := coll.Insert(M{"n": i})
+ c.Assert(err, IsNil)
+ }
+
+ query := coll.Find(M{}).Sort("n")
+
+ iter1 := query.Skip(1).Iter()
+ iter2 := query.Skip(2).Iter()
+
+ var result struct{ N int }
+ ok := iter2.Next(&result)
+ c.Assert(ok, Equals, true)
+ c.Assert(result.N, Equals, 42)
+ ok = iter1.Next(&result)
+ c.Assert(ok, Equals, true)
+ c.Assert(result.N, Equals, 41)
+}
+
+func (s *S) TestFindIterWithoutResults(c *C) {
+ session, err := mgo.Dial("localhost:40001")
+ c.Assert(err, IsNil)
+ defer session.Close()
+
+ coll := session.DB("mydb").C("mycoll")
+ coll.Insert(M{"n": 42})
+
+ iter := coll.Find(M{"n": 0}).Iter()
+
+ result := struct{ N int }{}
+ ok := iter.Next(&result)
+ c.Assert(ok, Equals, false)
+ c.Assert(iter.Close(), IsNil)
+ c.Assert(result.N, Equals, 0)
+}
+
+func (s *S) TestFindIterLimit(c *C) {
+ session, err := mgo.Dial("localhost:40001")
+ c.Assert(err, IsNil)
+ defer session.Close()
+
+ coll := session.DB("mydb").C("mycoll")
+
+ ns := []int{40, 41, 42, 43, 44, 45, 46}
+ for _, n := range ns {
+ err := coll.Insert(M{"n": n})
+ c.Assert(err, IsNil)
+ }
+
+ session.Refresh() // Release socket.
+
+ mgo.ResetStats()
+
+ query := coll.Find(M{"n": M{"$gte": 42}}).Sort("$natural").Limit(3)
+ iter := query.Iter()
+
+ result := struct{ N int }{}
+ for i := 2; i < 5; i++ {
+ ok := iter.Next(&result)
+ c.Assert(ok, Equals, true)
+ c.Assert(result.N, Equals, ns[i])
+ }
+
+ ok := iter.Next(&result)
+ c.Assert(ok, Equals, false)
+ c.Assert(iter.Close(), IsNil)
+
+ session.Refresh() // Release socket.
+
+ stats := mgo.GetStats()
+ if s.versionAtLeast(3, 2) {
+ // Limit works properly in 3.2+, and results are batched in single doc.
+ c.Assert(stats.SentOps, Equals, 1) // 1*QUERY_OP
+ c.Assert(stats.ReceivedOps, Equals, 1) // and its REPLY_OP
+ c.Assert(stats.ReceivedDocs, Equals, 1)
+ } else {
+ c.Assert(stats.SentOps, Equals, 2) // 1*QUERY_OP + 1*KILL_CURSORS_OP
+ c.Assert(stats.ReceivedOps, Equals, 1) // and its REPLY_OP
+ c.Assert(stats.ReceivedDocs, Equals, 3)
+ }
+ c.Assert(stats.SocketsInUse, Equals, 0)
+}
+
+var cursorTimeout = flag.Bool("cursor-timeout", false, "Enable cursor timeout test")
+
+func (s *S) TestFindIterCursorTimeout(c *C) {
+ if !*cursorTimeout {
+ c.Skip("-cursor-timeout")
+ }
+ session, err := mgo.Dial("localhost:40001")
+ c.Assert(err, IsNil)
+ defer session.Close()
+
+ type Doc struct {
+ Id int "_id"
+ }
+
+ coll := session.DB("test").C("test")
+ coll.Remove(nil)
+ for i := 0; i < 100; i++ {
+ err = coll.Insert(Doc{i})
+ c.Assert(err, IsNil)
+ }
+
+ session.SetBatch(1)
+ iter := coll.Find(nil).Iter()
+ var doc Doc
+ if !iter.Next(&doc) {
+ c.Fatalf("iterator failed to return any documents")
+ }
+
+ for i := 10; i > 0; i-- {
+ c.Logf("Sleeping... %d minutes to go...", i)
+ time.Sleep(1*time.Minute + 2*time.Second)
+ }
+
+ // Drain any existing documents that were fetched.
+ if !iter.Next(&doc) {
+ c.Fatalf("iterator with timed out cursor failed to return previously cached document")
+ }
+ if iter.Next(&doc) {
+ c.Fatalf("timed out cursor returned document")
+ }
+
+ c.Assert(iter.Err(), Equals, mgo.ErrCursor)
+}
+
+func (s *S) TestTooManyItemsLimitBug(c *C) {
+ if *fast {
+ c.Skip("-fast")
+ }
+
+ session, err := mgo.Dial("localhost:40001")
+ c.Assert(err, IsNil)
+ defer session.Close()
+ defer runtime.GOMAXPROCS(runtime.GOMAXPROCS(runtime.NumCPU()))
+
+ mgo.SetDebug(false)
+ coll := session.DB("mydb").C("mycoll")
+ words := strings.Split("foo bar baz", " ")
+ for i := 0; i < 5; i++ {
+ words = append(words, words...)
+ }
+ doc := bson.D{{"words", words}}
+ inserts := 10000
+ limit := 5000
+ iters := 0
+ c.Assert(inserts > limit, Equals, true)
+ for i := 0; i < inserts; i++ {
+ err := coll.Insert(&doc)
+ c.Assert(err, IsNil)
+ }
+ iter := coll.Find(nil).Limit(limit).Iter()
+ for iter.Next(&doc) {
+ if iters%100 == 0 {
+ c.Logf("Seen %d docments", iters)
+ }
+ iters++
+ }
+ c.Assert(iter.Close(), IsNil)
+ c.Assert(iters, Equals, limit)
+}
+
+func (s *S) TestBatchSizeZeroGetMore(c *C) {
+ if *fast {
+ c.Skip("-fast")
+ }
+
+ session, err := mgo.Dial("localhost:40001")
+ c.Assert(err, IsNil)
+ defer session.Close()
+ defer runtime.GOMAXPROCS(runtime.GOMAXPROCS(runtime.NumCPU()))
+
+ mgo.SetDebug(false)
+ coll := session.DB("mydb").C("mycoll")
+ words := strings.Split("foo bar baz", " ")
+ for i := 0; i < 5; i++ {
+ words = append(words, words...)
+ }
+ doc := bson.D{{"words", words}}
+ inserts := 10000
+ iters := 0
+ for i := 0; i < inserts; i++ {
+ err := coll.Insert(&doc)
+ c.Assert(err, IsNil)
+ }
+ iter := coll.Find(nil).Iter()
+ for iter.Next(&doc) {
+ if iters%100 == 0 {
+ c.Logf("Seen %d docments", iters)
+ }
+ iters++
+ }
+ c.Assert(iter.Close(), IsNil)
+}
+
+func serverCursorsOpen(session *mgo.Session) int {
+ var result struct {
+ Cursors struct {
+ TotalOpen int `bson:"totalOpen"`
+ TimedOut int `bson:"timedOut"`
+ }
+ }
+ err := session.Run("serverStatus", &result)
+ if err != nil {
+ panic(err)
+ }
+ return result.Cursors.TotalOpen
+}
+
+func (s *S) TestFindIterLimitWithMore(c *C) {
+ session, err := mgo.Dial("localhost:40001")
+ c.Assert(err, IsNil)
+ defer session.Close()
+
+ coll := session.DB("mydb").C("mycoll")
+
+ // Insane amounts of logging otherwise due to the
+ // amount of data being shuffled.
+ mgo.SetDebug(false)
+ defer mgo.SetDebug(true)
+
+ // Should amount to more than 4MB bson payload,
+ // the default limit per result chunk.
+ const total = 4096
+ var d struct{ A [1024]byte }
+ docs := make([]interface{}, total)
+ for i := 0; i < total; i++ {
+ docs[i] = &d
+ }
+ err = coll.Insert(docs...)
+ c.Assert(err, IsNil)
+
+ n, err := coll.Count()
+ c.Assert(err, IsNil)
+ c.Assert(n, Equals, total)
+
+ // First, try restricting to a single chunk with a negative limit.
+ nresults := 0
+ iter := coll.Find(nil).Limit(-total).Iter()
+ var discard struct{}
+ for iter.Next(&discard) {
+ nresults++
+ }
+ if nresults < total/2 || nresults >= total {
+ c.Fatalf("Bad result size with negative limit: %d", nresults)
+ }
+
+ cursorsOpen := serverCursorsOpen(session)
+
+ // Try again, with a positive limit. Should reach the end now,
+ // using multiple chunks.
+ nresults = 0
+ iter = coll.Find(nil).Limit(total).Iter()
+ for iter.Next(&discard) {
+ nresults++
+ }
+ c.Assert(nresults, Equals, total)
+
+ // Ensure the cursor used is properly killed.
+ c.Assert(serverCursorsOpen(session), Equals, cursorsOpen)
+
+ // Edge case, -MinInt == -MinInt.
+ nresults = 0
+ iter = coll.Find(nil).Limit(math.MinInt32).Iter()
+ for iter.Next(&discard) {
+ nresults++
+ }
+ if nresults < total/2 || nresults >= total {
+ c.Fatalf("Bad result size with MinInt32 limit: %d", nresults)
+ }
+}
+
+func (s *S) TestFindIterLimitWithBatch(c *C) {
+ session, err := mgo.Dial("localhost:40001")
+ c.Assert(err, IsNil)
+ defer session.Close()
+
+ coll := session.DB("mydb").C("mycoll")
+
+ ns := []int{40, 41, 42, 43, 44, 45, 46}
+ for _, n := range ns {
+ coll.Insert(M{"n": n})
+ }
+
+ // Ping the database to ensure the nonce has been received already.
+ c.Assert(session.Ping(), IsNil)
+
+ session.Refresh() // Release socket.
+
+ mgo.ResetStats()
+
+ query := coll.Find(M{"n": M{"$gte": 42}}).Sort("$natural").Limit(3).Batch(2)
+ iter := query.Iter()
+ result := struct{ N int }{}
+ for i := 2; i < 5; i++ {
+ ok := iter.Next(&result)
+ c.Assert(ok, Equals, true)
+ c.Assert(result.N, Equals, ns[i])
+ if i == 3 {
+ stats := mgo.GetStats()
+ if s.versionAtLeast(3, 2) {
+ // In 3.2+ responses come in batches inside the op reply docs.
+ c.Assert(stats.ReceivedDocs, Equals, 1)
+ } else {
+ c.Assert(stats.ReceivedDocs, Equals, 2)
+ }
+ }
+ }
+
+ ok := iter.Next(&result)
+ c.Assert(ok, Equals, false)
+ c.Assert(iter.Close(), IsNil)
+
+ session.Refresh() // Release socket.
+
+ stats := mgo.GetStats()
+ if s.versionAtLeast(3, 2) {
+ // In 3.2+ limit works properly even with multiple batches..
+ c.Assert(stats.SentOps, Equals, 2) // 1*QUERY_OP + 1*GET_MORE_OP
+ c.Assert(stats.ReceivedOps, Equals, 2) // and its REPLY_OPs
+
+ // In 3.2+ responses come in batches inside the op reply docs.
+ c.Assert(stats.ReceivedDocs, Equals, 2)
+ } else {
+ c.Assert(stats.SentOps, Equals, 3) // 1*QUERY_OP + 1*GET_MORE_OP + 1*KILL_CURSORS_OP
+ c.Assert(stats.ReceivedOps, Equals, 2) // and its REPLY_OPs
+ c.Assert(stats.ReceivedDocs, Equals, 3)
+ }
+ c.Assert(stats.SocketsInUse, Equals, 0)
+}
+
+func (s *S) TestFindIterSortWithBatch(c *C) {
+ session, err := mgo.Dial("localhost:40001")
+ c.Assert(err, IsNil)
+ defer session.Close()
+
+ coll := session.DB("mydb").C("mycoll")
+
+ ns := []int{40, 41, 42, 43, 44, 45, 46}
+ for _, n := range ns {
+ coll.Insert(M{"n": n})
+ }
+
+ // Without this, the logic above breaks because Mongo refuses to
+ // return a cursor with an in-memory sort.
+ coll.EnsureIndexKey("n")
+
+ // Ping the database to ensure the nonce has been received already.
+ c.Assert(session.Ping(), IsNil)
+
+ session.Refresh() // Release socket.
+
+ mgo.ResetStats()
+
+ query := coll.Find(M{"n": M{"$lte": 44}}).Sort("-n").Batch(2)
+ iter := query.Iter()
+ ns = []int{46, 45, 44, 43, 42, 41, 40}
+ result := struct{ N int }{}
+ for i := 2; i < len(ns); i++ {
+ c.Logf("i=%d", i)
+ ok := iter.Next(&result)
+ c.Assert(ok, Equals, true)
+ c.Assert(result.N, Equals, ns[i])
+ if i == 3 {
+ stats := mgo.GetStats()
+ if s.versionAtLeast(3, 2) {
+ // Find command in 3.2+ bundles batches in a single document.
+ c.Assert(stats.ReceivedDocs, Equals, 1)
+ } else {
+ c.Assert(stats.ReceivedDocs, Equals, 2)
+ }
+ }
+ }
+
+ ok := iter.Next(&result)
+ c.Assert(ok, Equals, false)
+ c.Assert(iter.Close(), IsNil)
+
+ session.Refresh() // Release socket.
+
+ stats := mgo.GetStats()
+ c.Assert(stats.SentOps, Equals, 3) // 1*QUERY_OP + 2*GET_MORE_OP
+ c.Assert(stats.ReceivedOps, Equals, 3) // and its REPLY_OPs
+ if s.versionAtLeast(3, 2) {
+ // Find command in 3.2+ bundles batches in a single document.
+ c.Assert(stats.ReceivedDocs, Equals, 3)
+ } else {
+ c.Assert(stats.ReceivedDocs, Equals, 5)
+ }
+ c.Assert(stats.SocketsInUse, Equals, 0)
+}
+
+// Test tailable cursors in a situation where Next has to sleep to
+// respect the timeout requested on Tail.
+func (s *S) TestFindTailTimeoutWithSleep(c *C) {
+ if *fast {
+ c.Skip("-fast")
+ }
+
+ session, err := mgo.Dial("localhost:40001")
+ c.Assert(err, IsNil)
+ defer session.Close()
+
+ cresult := struct{ ErrMsg string }{}
+
+ db := session.DB("mydb")
+ err = db.Run(bson.D{{"create", "mycoll"}, {"capped", true}, {"size", 1024}}, &cresult)
+ c.Assert(err, IsNil)
+ c.Assert(cresult.ErrMsg, Equals, "")
+ coll := db.C("mycoll")
+
+ ns := []int{40, 41, 42, 43, 44, 45, 46}
+ for _, n := range ns {
+ coll.Insert(M{"n": n})
+ }
+
+ session.Refresh() // Release socket.
+
+ mgo.ResetStats()
+
+ timeout := 5 * time.Second
+
+ query := coll.Find(M{"n": M{"$gte": 42}}).Sort("$natural").Prefetch(0).Batch(2)
+ iter := query.Tail(timeout)
+
+ n := len(ns)
+ result := struct{ N int }{}
+ for i := 2; i != n; i++ {
+ ok := iter.Next(&result)
+ c.Assert(ok, Equals, true)
+ c.Assert(iter.Err(), IsNil)
+ c.Assert(iter.Timeout(), Equals, false)
+ c.Assert(result.N, Equals, ns[i])
+ if i == 3 { // The batch boundary.
+ stats := mgo.GetStats()
+ c.Assert(stats.ReceivedDocs, Equals, 2)
+ }
+ }
+
+ mgo.ResetStats()
+
+ // The following call to Next will block.
+ done := make(chan bool)
+ defer func() { <-done }()
+ go func() {
+ // The internal AwaitData timing of MongoDB is around 2 seconds,
+ // so this should force mgo to sleep at least once by itself to
+ // respect the requested timeout.
+ c.Logf("[GOROUTINE] Starting and sleeping...")
+ time.Sleep(timeout - 2*time.Second)
+ c.Logf("[GOROUTINE] Woke up...")
+ session := session.New()
+ c.Logf("[GOROUTINE] Session created and will insert...")
+ err := coll.Insert(M{"n": 47})
+ c.Logf("[GOROUTINE] Insert attempted, err=%v...", err)
+ session.Close()
+ c.Logf("[GOROUTINE] Session closed.")
+ c.Check(err, IsNil)
+ done <- true
+ }()
+
+ c.Log("Will wait for Next with N=47...")
+ ok := iter.Next(&result)
+ c.Log("Next unblocked...")
+ c.Assert(ok, Equals, true)
+
+ c.Assert(iter.Err(), IsNil)
+ c.Assert(iter.Timeout(), Equals, false)
+ c.Assert(result.N, Equals, 47)
+ c.Log("Got Next with N=47!")
+
+ c.Log("Will wait for a result which will never come...")
+
+ started := time.Now()
+ ok = iter.Next(&result)
+ c.Assert(ok, Equals, false)
+ c.Assert(iter.Err(), IsNil)
+ c.Assert(iter.Timeout(), Equals, true)
+ c.Assert(started.Before(time.Now().Add(-timeout)), Equals, true)
+
+ c.Log("Will now reuse the timed out tail cursor...")
+
+ coll.Insert(M{"n": 48})
+ ok = iter.Next(&result)
+ c.Assert(ok, Equals, true)
+ c.Assert(iter.Close(), IsNil)
+ c.Assert(iter.Timeout(), Equals, false)
+ c.Assert(result.N, Equals, 48)
+}
+
+// Test tailable cursors in a situation where Next never gets to sleep once
+// to respect the timeout requested on Tail.
+func (s *S) TestFindTailTimeoutNoSleep(c *C) {
+ session, err := mgo.Dial("localhost:40001")
+ c.Assert(err, IsNil)
+ defer session.Close()
+
+ cresult := struct{ ErrMsg string }{}
+
+ db := session.DB("mydb")
+ err = db.Run(bson.D{{"create", "mycoll"}, {"capped", true}, {"size", 1024}}, &cresult)
+ c.Assert(err, IsNil)
+ c.Assert(cresult.ErrMsg, Equals, "")
+ coll := db.C("mycoll")
+
+ ns := []int{40, 41, 42, 43, 44, 45, 46}
+ for _, n := range ns {
+ coll.Insert(M{"n": n})
+ }
+
+ session.Refresh() // Release socket.
+
+ mgo.ResetStats()
+
+ timeout := 1 * time.Second
+
+ query := coll.Find(M{"n": M{"$gte": 42}}).Sort("$natural").Prefetch(0).Batch(2)
+ iter := query.Tail(timeout)
+
+ n := len(ns)
+ result := struct{ N int }{}
+ for i := 2; i != n; i++ {
+ ok := iter.Next(&result)
+ c.Assert(ok, Equals, true)
+ c.Assert(iter.Err(), IsNil)
+ c.Assert(iter.Timeout(), Equals, false)
+ c.Assert(result.N, Equals, ns[i])
+ if i == 3 { // The batch boundary.
+ stats := mgo.GetStats()
+ c.Assert(stats.ReceivedDocs, Equals, 2)
+ }
+ }
+
+ // The following call to Next will block.
+ go func() {
+ // The internal AwaitData timing of MongoDB is around 2 seconds,
+ // so this item should arrive within the AwaitData threshold.
+ time.Sleep(500 * time.Millisecond)
+ session := session.New()
+ defer session.Close()
+ coll := session.DB("mydb").C("mycoll")
+ coll.Insert(M{"n": 47})
+ }()
+
+ c.Log("Will wait for Next with N=47...")
+ ok := iter.Next(&result)
+ c.Assert(ok, Equals, true)
+ c.Assert(iter.Err(), IsNil)
+ c.Assert(iter.Timeout(), Equals, false)
+ c.Assert(result.N, Equals, 47)
+ c.Log("Got Next with N=47!")
+
+ c.Log("Will wait for a result which will never come...")
+
+ started := time.Now()
+ ok = iter.Next(&result)
+ c.Assert(ok, Equals, false)
+ c.Assert(iter.Err(), IsNil)
+ c.Assert(iter.Timeout(), Equals, true)
+ c.Assert(started.Before(time.Now().Add(-timeout)), Equals, true)
+
+ c.Log("Will now reuse the timed out tail cursor...")
+
+ coll.Insert(M{"n": 48})
+ ok = iter.Next(&result)
+ c.Assert(ok, Equals, true)
+ c.Assert(iter.Close(), IsNil)
+ c.Assert(iter.Timeout(), Equals, false)
+ c.Assert(result.N, Equals, 48)
+}
+
+// Test tailable cursors in a situation where Next never gets to sleep once
+// to respect the timeout requested on Tail.
+func (s *S) TestFindTailNoTimeout(c *C) {
+ if *fast {
+ c.Skip("-fast")
+ }
+
+ session, err := mgo.Dial("localhost:40001")
+ c.Assert(err, IsNil)
+ defer session.Close()
+
+ cresult := struct{ ErrMsg string }{}
+
+ db := session.DB("mydb")
+ err = db.Run(bson.D{{"create", "mycoll"}, {"capped", true}, {"size", 1024}}, &cresult)
+ c.Assert(err, IsNil)
+ c.Assert(cresult.ErrMsg, Equals, "")
+ coll := db.C("mycoll")
+
+ ns := []int{40, 41, 42, 43, 44, 45, 46}
+ for _, n := range ns {
+ coll.Insert(M{"n": n})
+ }
+
+ session.Refresh() // Release socket.
+
+ mgo.ResetStats()
+
+ query := coll.Find(M{"n": M{"$gte": 42}}).Sort("$natural").Prefetch(0).Batch(2)
+ iter := query.Tail(-1)
+ c.Assert(err, IsNil)
+
+ n := len(ns)
+ result := struct{ N int }{}
+ for i := 2; i != n; i++ {
+ ok := iter.Next(&result)
+ c.Assert(ok, Equals, true)
+ c.Assert(result.N, Equals, ns[i])
+ if i == 3 { // The batch boundary.
+ stats := mgo.GetStats()
+ c.Assert(stats.ReceivedDocs, Equals, 2)
+ }
+ }
+
+ mgo.ResetStats()
+
+ // The following call to Next will block.
+ go func() {
+ time.Sleep(5e8)
+ session := session.New()
+ defer session.Close()
+ coll := session.DB("mydb").C("mycoll")
+ coll.Insert(M{"n": 47})
+ }()
+
+ c.Log("Will wait for Next with N=47...")
+ ok := iter.Next(&result)
+ c.Assert(ok, Equals, true)
+ c.Assert(iter.Err(), IsNil)
+ c.Assert(iter.Timeout(), Equals, false)
+ c.Assert(result.N, Equals, 47)
+ c.Log("Got Next with N=47!")
+
+ c.Log("Will wait for a result which will never come...")
+
+ gotNext := make(chan bool)
+ go func() {
+ ok := iter.Next(&result)
+ gotNext <- ok
+ }()
+
+ select {
+ case ok := <-gotNext:
+ c.Fatalf("Next returned: %v", ok)
+ case <-time.After(3e9):
+ // Good. Should still be sleeping at that point.
+ }
+
+ // Closing the session should cause Next to return.
+ session.Close()
+
+ select {
+ case ok := <-gotNext:
+ c.Assert(ok, Equals, false)
+ c.Assert(iter.Err(), ErrorMatches, "Closed explicitly")
+ c.Assert(iter.Timeout(), Equals, false)
+ case <-time.After(1e9):
+ c.Fatal("Closing the session did not unblock Next")
+ }
+}
+
+func (s *S) TestIterNextResetsResult(c *C) {
+ session, err := mgo.Dial("localhost:40001")
+ c.Assert(err, IsNil)
+ defer session.Close()
+
+ coll := session.DB("mydb").C("mycoll")
+
+ ns := []int{1, 2, 3}
+ for _, n := range ns {
+ coll.Insert(M{"n" + strconv.Itoa(n): n})
+ }
+
+ query := coll.Find(nil).Sort("$natural")
+
+ i := 0
+ var sresult *struct{ N1, N2, N3 int }
+ iter := query.Iter()
+ for iter.Next(&sresult) {
+ switch i {
+ case 0:
+ c.Assert(sresult.N1, Equals, 1)
+ c.Assert(sresult.N2+sresult.N3, Equals, 0)
+ case 1:
+ c.Assert(sresult.N2, Equals, 2)
+ c.Assert(sresult.N1+sresult.N3, Equals, 0)
+ case 2:
+ c.Assert(sresult.N3, Equals, 3)
+ c.Assert(sresult.N1+sresult.N2, Equals, 0)
+ }
+ i++
+ }
+ c.Assert(iter.Close(), IsNil)
+
+ i = 0
+ var mresult M
+ iter = query.Iter()
+ for iter.Next(&mresult) {
+ delete(mresult, "_id")
+ switch i {
+ case 0:
+ c.Assert(mresult, DeepEquals, M{"n1": 1})
+ case 1:
+ c.Assert(mresult, DeepEquals, M{"n2": 2})
+ case 2:
+ c.Assert(mresult, DeepEquals, M{"n3": 3})
+ }
+ i++
+ }
+ c.Assert(iter.Close(), IsNil)
+
+ i = 0
+ var iresult interface{}
+ iter = query.Iter()
+ for iter.Next(&iresult) {
+ mresult, ok := iresult.(bson.M)
+ c.Assert(ok, Equals, true, Commentf("%#v", iresult))
+ delete(mresult, "_id")
+ switch i {
+ case 0:
+ c.Assert(mresult, DeepEquals, bson.M{"n1": 1})
+ case 1:
+ c.Assert(mresult, DeepEquals, bson.M{"n2": 2})
+ case 2:
+ c.Assert(mresult, DeepEquals, bson.M{"n3": 3})
+ }
+ i++
+ }
+ c.Assert(iter.Close(), IsNil)
+}
+
+func (s *S) TestFindForOnIter(c *C) {
+ session, err := mgo.Dial("localhost:40001")
+ c.Assert(err, IsNil)
+ defer session.Close()
+
+ coll := session.DB("mydb").C("mycoll")
+
+ ns := []int{40, 41, 42, 43, 44, 45, 46}
+ for _, n := range ns {
+ coll.Insert(M{"n": n})
+ }
+
+ session.Refresh() // Release socket.
+
+ mgo.ResetStats()
+
+ query := coll.Find(M{"n": M{"$gte": 42}}).Sort("$natural").Prefetch(0).Batch(2)
+ iter := query.Iter()
+
+ i := 2
+ var result *struct{ N int }
+ err = iter.For(&result, func() error {
+ c.Assert(i < 7, Equals, true)
+ c.Assert(result.N, Equals, ns[i])
+ if i == 1 {
+ stats := mgo.GetStats()
+ if s.versionAtLeast(3, 2) {
+ // Find command in 3.2+ bundles batches in a single document.
+ c.Assert(stats.ReceivedDocs, Equals, 1)
+ } else {
+ c.Assert(stats.ReceivedDocs, Equals, 2)
+ }
+ }
+ i++
+ return nil
+ })
+ c.Assert(err, IsNil)
+
+ session.Refresh() // Release socket.
+
+ stats := mgo.GetStats()
+ c.Assert(stats.SentOps, Equals, 3) // 1*QUERY_OP + 2*GET_MORE_OP
+ c.Assert(stats.ReceivedOps, Equals, 3) // and their REPLY_OPs.
+ if s.versionAtLeast(3, 2) {
+ // Find command in 3.2+ bundles batches in a single document.
+ c.Assert(stats.ReceivedDocs, Equals, 3)
+ } else {
+ c.Assert(stats.ReceivedDocs, Equals, 5)
+ }
+ c.Assert(stats.SocketsInUse, Equals, 0)
+}
+
+func (s *S) TestFindFor(c *C) {
+ session, err := mgo.Dial("localhost:40001")
+ c.Assert(err, IsNil)
+ defer session.Close()
+
+ coll := session.DB("mydb").C("mycoll")
+
+ ns := []int{40, 41, 42, 43, 44, 45, 46}
+ for _, n := range ns {
+ coll.Insert(M{"n": n})
+ }
+
+ session.Refresh() // Release socket.
+
+ mgo.ResetStats()
+
+ query := coll.Find(M{"n": M{"$gte": 42}}).Sort("$natural").Prefetch(0).Batch(2)
+
+ i := 2
+ var result *struct{ N int }
+ err = query.For(&result, func() error {
+ c.Assert(i < 7, Equals, true)
+ c.Assert(result.N, Equals, ns[i])
+ if i == 1 {
+ stats := mgo.GetStats()
+ c.Assert(stats.ReceivedDocs, Equals, 2)
+ if s.versionAtLeast(3, 2) {
+ // Find command in 3.2+ bundles batches in a single document.
+ c.Assert(stats.ReceivedDocs, Equals, 1)
+ } else {
+ c.Assert(stats.ReceivedDocs, Equals, 2)
+ }
+ }
+ i++
+ return nil
+ })
+ c.Assert(err, IsNil)
+
+ session.Refresh() // Release socket.
+
+ stats := mgo.GetStats()
+ c.Assert(stats.SentOps, Equals, 3) // 1*QUERY_OP + 2*GET_MORE_OP
+ c.Assert(stats.ReceivedOps, Equals, 3) // and their REPLY_OPs.
+ if s.versionAtLeast(3, 2) {
+ // Find command in 3.2+ bundles batches in a single document.
+ c.Assert(stats.ReceivedDocs, Equals, 3)
+ } else {
+ c.Assert(stats.ReceivedDocs, Equals, 5)
+ }
+ c.Assert(stats.SocketsInUse, Equals, 0)
+}
+
+func (s *S) TestFindForStopOnError(c *C) {
+ session, err := mgo.Dial("localhost:40001")
+ c.Assert(err, IsNil)
+ defer session.Close()
+
+ coll := session.DB("mydb").C("mycoll")
+
+ ns := []int{40, 41, 42, 43, 44, 45, 46}
+ for _, n := range ns {
+ coll.Insert(M{"n": n})
+ }
+
+ query := coll.Find(M{"n": M{"$gte": 42}})
+ i := 2
+ var result *struct{ N int }
+ err = query.For(&result, func() error {
+ c.Assert(i < 4, Equals, true)
+ c.Assert(result.N, Equals, ns[i])
+ if i == 3 {
+ return fmt.Errorf("stop!")
+ }
+ i++
+ return nil
+ })
+ c.Assert(err, ErrorMatches, "stop!")
+}
+
+func (s *S) TestFindForResetsResult(c *C) {
+ session, err := mgo.Dial("localhost:40001")
+ c.Assert(err, IsNil)
+ defer session.Close()
+
+ coll := session.DB("mydb").C("mycoll")
+
+ ns := []int{1, 2, 3}
+ for _, n := range ns {
+ coll.Insert(M{"n" + strconv.Itoa(n): n})
+ }
+
+ query := coll.Find(nil).Sort("$natural")
+
+ i := 0
+ var sresult *struct{ N1, N2, N3 int }
+ err = query.For(&sresult, func() error {
+ switch i {
+ case 0:
+ c.Assert(sresult.N1, Equals, 1)
+ c.Assert(sresult.N2+sresult.N3, Equals, 0)
+ case 1:
+ c.Assert(sresult.N2, Equals, 2)
+ c.Assert(sresult.N1+sresult.N3, Equals, 0)
+ case 2:
+ c.Assert(sresult.N3, Equals, 3)
+ c.Assert(sresult.N1+sresult.N2, Equals, 0)
+ }
+ i++
+ return nil
+ })
+ c.Assert(err, IsNil)
+
+ i = 0
+ var mresult M
+ err = query.For(&mresult, func() error {
+ delete(mresult, "_id")
+ switch i {
+ case 0:
+ c.Assert(mresult, DeepEquals, M{"n1": 1})
+ case 1:
+ c.Assert(mresult, DeepEquals, M{"n2": 2})
+ case 2:
+ c.Assert(mresult, DeepEquals, M{"n3": 3})
+ }
+ i++
+ return nil
+ })
+ c.Assert(err, IsNil)
+
+ i = 0
+ var iresult interface{}
+ err = query.For(&iresult, func() error {
+ mresult, ok := iresult.(bson.M)
+ c.Assert(ok, Equals, true, Commentf("%#v", iresult))
+ delete(mresult, "_id")
+ switch i {
+ case 0:
+ c.Assert(mresult, DeepEquals, bson.M{"n1": 1})
+ case 1:
+ c.Assert(mresult, DeepEquals, bson.M{"n2": 2})
+ case 2:
+ c.Assert(mresult, DeepEquals, bson.M{"n3": 3})
+ }
+ i++
+ return nil
+ })
+ c.Assert(err, IsNil)
+}
+
+func (s *S) TestFindIterSnapshot(c *C) {
+ if s.versionAtLeast(3, 2) {
+ c.Skip("Broken in 3.2: https://jira.mongodb.org/browse/SERVER-21403")
+ }
+
+ session, err := mgo.Dial("localhost:40001")
+ c.Assert(err, IsNil)
+ defer session.Close()
+
+ // Insane amounts of logging otherwise due to the
+ // amount of data being shuffled.
+ mgo.SetDebug(false)
+ defer mgo.SetDebug(true)
+
+ coll := session.DB("mydb").C("mycoll")
+
+ var a [1024000]byte
+
+ for n := 0; n < 10; n++ {
+ err := coll.Insert(M{"_id": n, "n": n, "a1": &a})
+ c.Assert(err, IsNil)
+ }
+
+ query := coll.Find(M{"n": M{"$gt": -1}}).Batch(2).Prefetch(0)
+ query.Snapshot()
+ iter := query.Iter()
+
+ seen := map[int]bool{}
+ result := struct {
+ Id int "_id"
+ }{}
+ for iter.Next(&result) {
+ if len(seen) == 2 {
+ // Grow all entries so that they have to move.
+ // Backwards so that the order is inverted.
+ for n := 10; n >= 0; n-- {
+ _, err := coll.Upsert(M{"_id": n}, M{"$set": M{"a2": &a}})
+ c.Assert(err, IsNil)
+ }
+ }
+ if seen[result.Id] {
+ c.Fatalf("seen duplicated key: %d", result.Id)
+ }
+ seen[result.Id] = true
+ }
+ c.Assert(iter.Close(), IsNil)
+}
+
+func (s *S) TestSort(c *C) {
+ session, err := mgo.Dial("localhost:40001")
+ c.Assert(err, IsNil)
+ defer session.Close()
+
+ coll := session.DB("mydb").C("mycoll")
+
+ coll.Insert(M{"a": 1, "b": 1})
+ coll.Insert(M{"a": 2, "b": 2})
+ coll.Insert(M{"a": 2, "b": 1})
+ coll.Insert(M{"a": 0, "b": 1})
+ coll.Insert(M{"a": 2, "b": 0})
+ coll.Insert(M{"a": 0, "b": 2})
+ coll.Insert(M{"a": 1, "b": 2})
+ coll.Insert(M{"a": 0, "b": 0})
+ coll.Insert(M{"a": 1, "b": 0})
+
+ query := coll.Find(M{})
+ query.Sort("-a") // Should be ignored.
+ query.Sort("-b", "a")
+ iter := query.Iter()
+
+ l := make([]int, 18)
+ r := struct{ A, B int }{}
+ for i := 0; i != len(l); i += 2 {
+ ok := iter.Next(&r)
+ c.Assert(ok, Equals, true)
+ c.Assert(err, IsNil)
+ l[i] = r.A
+ l[i+1] = r.B
+ }
+
+ c.Assert(l, DeepEquals, []int{0, 2, 1, 2, 2, 2, 0, 1, 1, 1, 2, 1, 0, 0, 1, 0, 2, 0})
+}
+
+func (s *S) TestSortWithBadArgs(c *C) {
+ session, err := mgo.Dial("localhost:40001")
+ c.Assert(err, IsNil)
+ defer session.Close()
+
+ coll := session.DB("mydb").C("mycoll")
+
+ f1 := func() { coll.Find(nil).Sort("") }
+ f2 := func() { coll.Find(nil).Sort("+") }
+ f3 := func() { coll.Find(nil).Sort("foo", "-") }
+
+ for _, f := range []func(){f1, f2, f3} {
+ c.Assert(f, PanicMatches, "Sort: empty field name")
+ }
+}
+
+func (s *S) TestSortScoreText(c *C) {
+ session, err := mgo.Dial("localhost:40001")
+ c.Assert(err, IsNil)
+ defer session.Close()
+
+ if !s.versionAtLeast(2, 4) {
+ c.Skip("Text search depends on 2.4+")
+ }
+
+ coll := session.DB("mydb").C("mycoll")
+
+ err = coll.EnsureIndex(mgo.Index{
+ Key: []string{"$text:a", "$text:b"},
+ })
+ msg := "text search not enabled"
+ if err != nil && strings.Contains(err.Error(), msg) {
+ c.Skip(msg)
+ }
+ c.Assert(err, IsNil)
+
+ err = coll.Insert(M{
+ "a": "none",
+ "b": "twice: foo foo",
+ })
+ c.Assert(err, IsNil)
+ err = coll.Insert(M{
+ "a": "just once: foo",
+ "b": "none",
+ })
+ c.Assert(err, IsNil)
+ err = coll.Insert(M{
+ "a": "many: foo foo foo",
+ "b": "none",
+ })
+ c.Assert(err, IsNil)
+ err = coll.Insert(M{
+ "a": "none",
+ "b": "none",
+ "c": "ignore: foo",
+ })
+ c.Assert(err, IsNil)
+
+ query := coll.Find(M{"$text": M{"$search": "foo"}})
+ query.Select(M{"score": M{"$meta": "textScore"}})
+ query.Sort("$textScore:score")
+ iter := query.Iter()
+
+ var r struct{ A, B string }
+ var results []string
+ for iter.Next(&r) {
+ results = append(results, r.A, r.B)
+ }
+
+ c.Assert(results, DeepEquals, []string{
+ "many: foo foo foo", "none",
+ "none", "twice: foo foo",
+ "just once: foo", "none",
+ })
+}
+
+func (s *S) TestPrefetching(c *C) {
+ session, err := mgo.Dial("localhost:40001")
+ c.Assert(err, IsNil)
+ defer session.Close()
+
+ coll := session.DB("mydb").C("mycoll")
+
+ const total = 600
+ const batch = 100
+ mgo.SetDebug(false)
+ docs := make([]interface{}, total)
+ for i := 0; i != total; i++ {
+ docs[i] = bson.D{{"n", i}}
+ }
+ err = coll.Insert(docs...)
+ c.Assert(err, IsNil)
+
+ for testi := 0; testi < 5; testi++ {
+ mgo.ResetStats()
+
+ var iter *mgo.Iter
+ var beforeMore int
+
+ switch testi {
+ case 0: // The default session value.
+ session.SetBatch(batch)
+ iter = coll.Find(M{}).Iter()
+ beforeMore = 75
+
+ case 2: // Changing the session value.
+ session.SetBatch(batch)
+ session.SetPrefetch(0.27)
+ iter = coll.Find(M{}).Iter()
+ beforeMore = 73
+
+ case 1: // Changing via query methods.
+ iter = coll.Find(M{}).Prefetch(0.27).Batch(batch).Iter()
+ beforeMore = 73
+
+ case 3: // With prefetch on first document.
+ iter = coll.Find(M{}).Prefetch(1.0).Batch(batch).Iter()
+ beforeMore = 0
+
+ case 4: // Without prefetch.
+ iter = coll.Find(M{}).Prefetch(0).Batch(batch).Iter()
+ beforeMore = 100
+ }
+
+ pings := 0
+ for batchi := 0; batchi < len(docs)/batch-1; batchi++ {
+ c.Logf("Iterating over %d documents on batch %d", beforeMore, batchi)
+ var result struct{ N int }
+ for i := 0; i < beforeMore; i++ {
+ ok := iter.Next(&result)
+ c.Assert(ok, Equals, true, Commentf("iter.Err: %v", iter.Err()))
+ }
+ beforeMore = 99
+ c.Logf("Done iterating.")
+
+ session.Run("ping", nil) // Roundtrip to settle down.
+ pings++
+
+ stats := mgo.GetStats()
+ if s.versionAtLeast(3, 2) {
+ // Find command in 3.2+ bundles batches in a single document.
+ c.Assert(stats.ReceivedDocs, Equals, (batchi+1)+pings)
+ } else {
+ c.Assert(stats.ReceivedDocs, Equals, (batchi+1)*batch+pings)
+ }
+
+ c.Logf("Iterating over one more document on batch %d", batchi)
+ ok := iter.Next(&result)
+ c.Assert(ok, Equals, true, Commentf("iter.Err: %v", iter.Err()))
+ c.Logf("Done iterating.")
+
+ session.Run("ping", nil) // Roundtrip to settle down.
+ pings++
+
+ stats = mgo.GetStats()
+ if s.versionAtLeast(3, 2) {
+ // Find command in 3.2+ bundles batches in a single document.
+ c.Assert(stats.ReceivedDocs, Equals, (batchi+2)+pings)
+ } else {
+ c.Assert(stats.ReceivedDocs, Equals, (batchi+2)*batch+pings)
+ }
+ }
+ }
+}
+
+func (s *S) TestSafeSetting(c *C) {
+ session, err := mgo.Dial("localhost:40001")
+ c.Assert(err, IsNil)
+ defer session.Close()
+
+ // Check the default
+ safe := session.Safe()
+ c.Assert(safe.W, Equals, 0)
+ c.Assert(safe.WMode, Equals, "")
+ c.Assert(safe.WTimeout, Equals, 0)
+ c.Assert(safe.FSync, Equals, false)
+ c.Assert(safe.J, Equals, false)
+
+ // Tweak it
+ session.SetSafe(&mgo.Safe{W: 1, WTimeout: 2, FSync: true})
+ safe = session.Safe()
+ c.Assert(safe.W, Equals, 1)
+ c.Assert(safe.WMode, Equals, "")
+ c.Assert(safe.WTimeout, Equals, 2)
+ c.Assert(safe.FSync, Equals, true)
+ c.Assert(safe.J, Equals, false)
+
+ // Reset it again.
+ session.SetSafe(&mgo.Safe{})
+ safe = session.Safe()
+ c.Assert(safe.W, Equals, 0)
+ c.Assert(safe.WMode, Equals, "")
+ c.Assert(safe.WTimeout, Equals, 0)
+ c.Assert(safe.FSync, Equals, false)
+ c.Assert(safe.J, Equals, false)
+
+ // Ensure safety to something more conservative.
+ session.SetSafe(&mgo.Safe{W: 5, WTimeout: 6, J: true})
+ safe = session.Safe()
+ c.Assert(safe.W, Equals, 5)
+ c.Assert(safe.WMode, Equals, "")
+ c.Assert(safe.WTimeout, Equals, 6)
+ c.Assert(safe.FSync, Equals, false)
+ c.Assert(safe.J, Equals, true)
+
+ // Ensure safety to something less conservative won't change it.
+ session.EnsureSafe(&mgo.Safe{W: 4, WTimeout: 7})
+ safe = session.Safe()
+ c.Assert(safe.W, Equals, 5)
+ c.Assert(safe.WMode, Equals, "")
+ c.Assert(safe.WTimeout, Equals, 6)
+ c.Assert(safe.FSync, Equals, false)
+ c.Assert(safe.J, Equals, true)
+
+ // But to something more conservative will.
+ session.EnsureSafe(&mgo.Safe{W: 6, WTimeout: 4, FSync: true})
+ safe = session.Safe()
+ c.Assert(safe.W, Equals, 6)
+ c.Assert(safe.WMode, Equals, "")
+ c.Assert(safe.WTimeout, Equals, 4)
+ c.Assert(safe.FSync, Equals, true)
+ c.Assert(safe.J, Equals, false)
+
+ // Even more conservative.
+ session.EnsureSafe(&mgo.Safe{WMode: "majority", WTimeout: 2})
+ safe = session.Safe()
+ c.Assert(safe.W, Equals, 0)
+ c.Assert(safe.WMode, Equals, "majority")
+ c.Assert(safe.WTimeout, Equals, 2)
+ c.Assert(safe.FSync, Equals, true)
+ c.Assert(safe.J, Equals, false)
+
+ // WMode always overrides, whatever it is, but J doesn't.
+ session.EnsureSafe(&mgo.Safe{WMode: "something", J: true})
+ safe = session.Safe()
+ c.Assert(safe.W, Equals, 0)
+ c.Assert(safe.WMode, Equals, "something")
+ c.Assert(safe.WTimeout, Equals, 2)
+ c.Assert(safe.FSync, Equals, true)
+ c.Assert(safe.J, Equals, false)
+
+ // EnsureSafe with nil does nothing.
+ session.EnsureSafe(nil)
+ safe = session.Safe()
+ c.Assert(safe.W, Equals, 0)
+ c.Assert(safe.WMode, Equals, "something")
+ c.Assert(safe.WTimeout, Equals, 2)
+ c.Assert(safe.FSync, Equals, true)
+ c.Assert(safe.J, Equals, false)
+
+ // Changing the safety of a cloned session doesn't touch the original.
+ clone := session.Clone()
+ defer clone.Close()
+ clone.EnsureSafe(&mgo.Safe{WMode: "foo"})
+ safe = session.Safe()
+ c.Assert(safe.WMode, Equals, "something")
+}
+
+func (s *S) TestSafeInsert(c *C) {
+ session, err := mgo.Dial("localhost:40001")
+ c.Assert(err, IsNil)
+ defer session.Close()
+
+ coll := session.DB("mydb").C("mycoll")
+
+ // Insert an element with a predefined key.
+ err = coll.Insert(M{"_id": 1})
+ c.Assert(err, IsNil)
+
+ mgo.ResetStats()
+
+ // Session should be safe by default, so inserting it again must fail.
+ err = coll.Insert(M{"_id": 1})
+ c.Assert(err, ErrorMatches, ".*E11000 duplicate.*")
+ c.Assert(err.(*mgo.LastError).Code, Equals, 11000)
+
+ // It must have sent two operations (INSERT_OP + getLastError QUERY_OP)
+ stats := mgo.GetStats()
+
+ if s.versionAtLeast(2, 6) {
+ c.Assert(stats.SentOps, Equals, 1)
+ } else {
+ c.Assert(stats.SentOps, Equals, 2)
+ }
+
+ mgo.ResetStats()
+
+ // If we disable safety, though, it won't complain.
+ session.SetSafe(nil)
+ err = coll.Insert(M{"_id": 1})
+ c.Assert(err, IsNil)
+
+ // Must have sent a single operation this time (just the INSERT_OP)
+ stats = mgo.GetStats()
+ c.Assert(stats.SentOps, Equals, 1)
+}
+
+func (s *S) TestSafeParameters(c *C) {
+ session, err := mgo.Dial("localhost:40011")
+ c.Assert(err, IsNil)
+ defer session.Close()
+
+ coll := session.DB("mydb").C("mycoll")
+
+ // Tweak the safety parameters to something unachievable.
+ session.SetSafe(&mgo.Safe{W: 4, WTimeout: 100})
+ err = coll.Insert(M{"_id": 1})
+ c.Assert(err, ErrorMatches, "timeout|timed out waiting for slaves|Not enough data-bearing nodes|waiting for replication timed out") // :-(
+ if !s.versionAtLeast(2, 6) {
+ // 2.6 turned it into a query error.
+ c.Assert(err.(*mgo.LastError).WTimeout, Equals, true)
+ }
+}
+
+func (s *S) TestQueryErrorOne(c *C) {
+ session, err := mgo.Dial("localhost:40001")
+ c.Assert(err, IsNil)
+ defer session.Close()
+
+ coll := session.DB("mydb").C("mycoll")
+
+ err = coll.Find(M{"a": 1}).Select(M{"a": M{"b": 1}}).One(nil)
+ c.Assert(err, ErrorMatches, ".*Unsupported projection option:.*")
+ c.Assert(err.(*mgo.QueryError).Message, Matches, ".*Unsupported projection option:.*")
+ // Oh, the dance of error codes. :-(
+ if s.versionAtLeast(3, 2) {
+ c.Assert(err.(*mgo.QueryError).Code, Equals, 2)
+ } else if s.versionAtLeast(2, 6) {
+ c.Assert(err.(*mgo.QueryError).Code, Equals, 17287)
+ } else {
+ c.Assert(err.(*mgo.QueryError).Code, Equals, 13097)
+ }
+}
+
+func (s *S) TestQueryErrorNext(c *C) {
+ session, err := mgo.Dial("localhost:40001")
+ c.Assert(err, IsNil)
+ defer session.Close()
+
+ coll := session.DB("mydb").C("mycoll")
+
+ iter := coll.Find(M{"a": 1}).Select(M{"a": M{"b": 1}}).Iter()
+
+ var result struct{}
+ ok := iter.Next(&result)
+ c.Assert(ok, Equals, false)
+
+ err = iter.Close()
+ c.Assert(err, ErrorMatches, ".*Unsupported projection option:.*")
+ c.Assert(err.(*mgo.QueryError).Message, Matches, ".*Unsupported projection option:.*")
+ // Oh, the dance of error codes. :-(
+ if s.versionAtLeast(3, 2) {
+ c.Assert(err.(*mgo.QueryError).Code, Equals, 2)
+ } else if s.versionAtLeast(2, 6) {
+ c.Assert(err.(*mgo.QueryError).Code, Equals, 17287)
+ } else {
+ c.Assert(err.(*mgo.QueryError).Code, Equals, 13097)
+ }
+ c.Assert(iter.Err(), Equals, err)
+}
+
+var indexTests = []struct {
+ index mgo.Index
+ expected M
+}{{
+ mgo.Index{
+ Key: []string{"a"},
+ Background: true,
+ },
+ M{
+ "name": "a_1",
+ "key": M{"a": 1},
+ "ns": "mydb.mycoll",
+ "background": true,
+ },
+}, {
+ mgo.Index{
+ Key: []string{"a", "-b"},
+ Unique: true,
+ DropDups: true,
+ },
+ M{
+ "name": "a_1_b_-1",
+ "key": M{"a": 1, "b": -1},
+ "ns": "mydb.mycoll",
+ "unique": true,
+ "dropDups": true,
+ },
+}, {
+ mgo.Index{
+ Key: []string{"@loc_old"}, // Obsolete
+ Min: -500,
+ Max: 500,
+ Bits: 32,
+ },
+ M{
+ "name": "loc_old_2d",
+ "key": M{"loc_old": "2d"},
+ "ns": "mydb.mycoll",
+ "min": -500.0,
+ "max": 500.0,
+ "bits": 32,
+ },
+}, {
+ mgo.Index{
+ Key: []string{"$2d:loc"},
+ Min: -500,
+ Max: 500,
+ Bits: 32,
+ },
+ M{
+ "name": "loc_2d",
+ "key": M{"loc": "2d"},
+ "ns": "mydb.mycoll",
+ "min": -500.0,
+ "max": 500.0,
+ "bits": 32,
+ },
+}, {
+ mgo.Index{
+ Key: []string{"$2d:loc"},
+ Minf: -500.1,
+ Maxf: 500.1,
+ Min: 1, // Should be ignored
+ Max: 2,
+ Bits: 32,
+ },
+ M{
+ "name": "loc_2d",
+ "key": M{"loc": "2d"},
+ "ns": "mydb.mycoll",
+ "min": -500.1,
+ "max": 500.1,
+ "bits": 32,
+ },
+}, {
+ mgo.Index{
+ Key: []string{"$geoHaystack:loc", "type"},
+ BucketSize: 1,
+ },
+ M{
+ "name": "loc_geoHaystack_type_1",
+ "key": M{"loc": "geoHaystack", "type": 1},
+ "ns": "mydb.mycoll",
+ "bucketSize": 1.0,
+ },
+}, {
+ mgo.Index{
+ Key: []string{"$text:a", "$text:b"},
+ Weights: map[string]int{"b": 42},
+ },
+ M{
+ "name": "a_text_b_text",
+ "key": M{"_fts": "text", "_ftsx": 1},
+ "ns": "mydb.mycoll",
+ "weights": M{"a": 1, "b": 42},
+ "default_language": "english",
+ "language_override": "language",
+ "textIndexVersion": 2,
+ },
+}, {
+ mgo.Index{
+ Key: []string{"$text:a"},
+ DefaultLanguage: "portuguese",
+ LanguageOverride: "idioma",
+ },
+ M{
+ "name": "a_text",
+ "key": M{"_fts": "text", "_ftsx": 1},
+ "ns": "mydb.mycoll",
+ "weights": M{"a": 1},
+ "default_language": "portuguese",
+ "language_override": "idioma",
+ "textIndexVersion": 2,
+ },
+}, {
+ mgo.Index{
+ Key: []string{"$text:$**"},
+ },
+ M{
+ "name": "$**_text",
+ "key": M{"_fts": "text", "_ftsx": 1},
+ "ns": "mydb.mycoll",
+ "weights": M{"$**": 1},
+ "default_language": "english",
+ "language_override": "language",
+ "textIndexVersion": 2,
+ },
+}, {
+ mgo.Index{
+ Key: []string{"cn"},
+ Name: "CustomName",
+ },
+ M{
+ "name": "CustomName",
+ "key": M{"cn": 1},
+ "ns": "mydb.mycoll",
+ },
+}}
+
+func (s *S) TestEnsureIndex(c *C) {
+ session, err := mgo.Dial("localhost:40001")
+ c.Assert(err, IsNil)
+ defer session.Close()
+
+ coll := session.DB("mydb").C("mycoll")
+ idxs := session.DB("mydb").C("system.indexes")
+
+ for _, test := range indexTests {
+ if !s.versionAtLeast(2, 4) && test.expected["textIndexVersion"] != nil {
+ continue
+ }
+
+ err = coll.EnsureIndex(test.index)
+ msg := "text search not enabled"
+ if err != nil && strings.Contains(err.Error(), msg) {
+ continue
+ }
+ c.Assert(err, IsNil)
+
+ expectedName := test.index.Name
+ if expectedName == "" {
+ expectedName, _ = test.expected["name"].(string)
+ }
+
+ obtained := M{}
+ err = idxs.Find(M{"name": expectedName}).One(obtained)
+ c.Assert(err, IsNil)
+
+ delete(obtained, "v")
+
+ if s.versionAtLeast(2, 7) {
+ // Was deprecated in 2.6, and not being reported by 2.7+.
+ delete(test.expected, "dropDups")
+ test.index.DropDups = false
+ }
+ if s.versionAtLeast(3, 2) && test.expected["textIndexVersion"] != nil {
+ test.expected["textIndexVersion"] = 3
+ }
+
+ c.Assert(obtained, DeepEquals, test.expected)
+
+ // The result of Indexes must match closely what was used to create the index.
+ indexes, err := coll.Indexes()
+ c.Assert(err, IsNil)
+ c.Assert(indexes, HasLen, 2)
+ gotIndex := indexes[0]
+ if gotIndex.Name == "_id_" {
+ gotIndex = indexes[1]
+ }
+ wantIndex := test.index
+ if wantIndex.Name == "" {
+ wantIndex.Name = gotIndex.Name
+ }
+ if strings.HasPrefix(wantIndex.Key[0], "@") {
+ wantIndex.Key[0] = "$2d:" + wantIndex.Key[0][1:]
+ }
+ if wantIndex.Minf == 0 && wantIndex.Maxf == 0 {
+ wantIndex.Minf = float64(wantIndex.Min)
+ wantIndex.Maxf = float64(wantIndex.Max)
+ } else {
+ wantIndex.Min = gotIndex.Min
+ wantIndex.Max = gotIndex.Max
+ }
+ if wantIndex.DefaultLanguage == "" {
+ wantIndex.DefaultLanguage = gotIndex.DefaultLanguage
+ }
+ if wantIndex.LanguageOverride == "" {
+ wantIndex.LanguageOverride = gotIndex.LanguageOverride
+ }
+ for name, _ := range gotIndex.Weights {
+ if _, ok := wantIndex.Weights[name]; !ok {
+ if wantIndex.Weights == nil {
+ wantIndex.Weights = make(map[string]int)
+ }
+ wantIndex.Weights[name] = 1
+ }
+ }
+ c.Assert(gotIndex, DeepEquals, wantIndex)
+
+ // Drop created index by key or by name if a custom name was used.
+ if test.index.Name == "" {
+ err = coll.DropIndex(test.index.Key...)
+ c.Assert(err, IsNil)
+ } else {
+ err = coll.DropIndexName(test.index.Name)
+ c.Assert(err, IsNil)
+ }
+ }
+}
+
+func (s *S) TestEnsureIndexWithBadInfo(c *C) {
+ session, err := mgo.Dial("localhost:40001")
+ c.Assert(err, IsNil)
+ defer session.Close()
+
+ coll := session.DB("mydb").C("mycoll")
+
+ err = coll.EnsureIndex(mgo.Index{})
+ c.Assert(err, ErrorMatches, "invalid index key:.*")
+
+ err = coll.EnsureIndex(mgo.Index{Key: []string{""}})
+ c.Assert(err, ErrorMatches, "invalid index key:.*")
+}
+
+func (s *S) TestEnsureIndexWithUnsafeSession(c *C) {
+ session, err := mgo.Dial("localhost:40001")
+ c.Assert(err, IsNil)
+ defer session.Close()
+
+ session.SetSafe(nil)
+
+ coll := session.DB("mydb").C("mycoll")
+
+ err = coll.Insert(M{"a": 1})
+ c.Assert(err, IsNil)
+
+ err = coll.Insert(M{"a": 1})
+ c.Assert(err, IsNil)
+
+ // Should fail since there are duplicated entries.
+ index := mgo.Index{
+ Key: []string{"a"},
+ Unique: true,
+ }
+
+ err = coll.EnsureIndex(index)
+ c.Assert(err, ErrorMatches, ".*duplicate key error.*")
+}
+
+func (s *S) TestEnsureIndexKey(c *C) {
+ session, err := mgo.Dial("localhost:40001")
+ c.Assert(err, IsNil)
+ defer session.Close()
+
+ coll := session.DB("mydb").C("mycoll")
+
+ err = coll.EnsureIndexKey("a")
+ c.Assert(err, IsNil)
+
+ err = coll.EnsureIndexKey("a", "-b")
+ c.Assert(err, IsNil)
+
+ sysidx := session.DB("mydb").C("system.indexes")
+
+ result1 := M{}
+ err = sysidx.Find(M{"name": "a_1"}).One(result1)
+ c.Assert(err, IsNil)
+
+ result2 := M{}
+ err = sysidx.Find(M{"name": "a_1_b_-1"}).One(result2)
+ c.Assert(err, IsNil)
+
+ delete(result1, "v")
+ expected1 := M{
+ "name": "a_1",
+ "key": M{"a": 1},
+ "ns": "mydb.mycoll",
+ }
+ c.Assert(result1, DeepEquals, expected1)
+
+ delete(result2, "v")
+ expected2 := M{
+ "name": "a_1_b_-1",
+ "key": M{"a": 1, "b": -1},
+ "ns": "mydb.mycoll",
+ }
+ c.Assert(result2, DeepEquals, expected2)
+}
+
+func (s *S) TestEnsureIndexDropIndex(c *C) {
+ session, err := mgo.Dial("localhost:40001")
+ c.Assert(err, IsNil)
+ defer session.Close()
+
+ coll := session.DB("mydb").C("mycoll")
+
+ err = coll.EnsureIndexKey("a")
+ c.Assert(err, IsNil)
+
+ err = coll.EnsureIndexKey("-b")
+ c.Assert(err, IsNil)
+
+ err = coll.DropIndex("-b")
+ c.Assert(err, IsNil)
+
+ sysidx := session.DB("mydb").C("system.indexes")
+
+ err = sysidx.Find(M{"name": "a_1"}).One(nil)
+ c.Assert(err, IsNil)
+
+ err = sysidx.Find(M{"name": "b_1"}).One(nil)
+ c.Assert(err, Equals, mgo.ErrNotFound)
+
+ err = coll.DropIndex("a")
+ c.Assert(err, IsNil)
+
+ err = sysidx.Find(M{"name": "a_1"}).One(nil)
+ c.Assert(err, Equals, mgo.ErrNotFound)
+
+ err = coll.DropIndex("a")
+ c.Assert(err, ErrorMatches, "index not found.*")
+}
+
+func (s *S) TestEnsureIndexDropIndexName(c *C) {
+ session, err := mgo.Dial("localhost:40001")
+ c.Assert(err, IsNil)
+ defer session.Close()
+
+ coll := session.DB("mydb").C("mycoll")
+
+ err = coll.EnsureIndexKey("a")
+ c.Assert(err, IsNil)
+
+ err = coll.EnsureIndex(mgo.Index{Key: []string{"b"}, Name: "a"})
+ c.Assert(err, IsNil)
+
+ err = coll.DropIndexName("a")
+ c.Assert(err, IsNil)
+
+ sysidx := session.DB("mydb").C("system.indexes")
+
+ err = sysidx.Find(M{"name": "a_1"}).One(nil)
+ c.Assert(err, IsNil)
+
+ err = sysidx.Find(M{"name": "a"}).One(nil)
+ c.Assert(err, Equals, mgo.ErrNotFound)
+
+ err = coll.DropIndexName("a_1")
+ c.Assert(err, IsNil)
+
+ err = sysidx.Find(M{"name": "a_1"}).One(nil)
+ c.Assert(err, Equals, mgo.ErrNotFound)
+
+ err = coll.DropIndexName("a_1")
+ c.Assert(err, ErrorMatches, "index not found.*")
+}
+
+func (s *S) TestEnsureIndexCaching(c *C) {
+ session, err := mgo.Dial("localhost:40001")
+ c.Assert(err, IsNil)
+ defer session.Close()
+
+ coll := session.DB("mydb").C("mycoll")
+
+ err = coll.EnsureIndexKey("a")
+ c.Assert(err, IsNil)
+
+ mgo.ResetStats()
+
+ // Second EnsureIndex should be cached and do nothing.
+ err = coll.EnsureIndexKey("a")
+ c.Assert(err, IsNil)
+
+ stats := mgo.GetStats()
+ c.Assert(stats.SentOps, Equals, 0)
+
+ // Resetting the cache should make it contact the server again.
+ session.ResetIndexCache()
+
+ err = coll.EnsureIndexKey("a")
+ c.Assert(err, IsNil)
+
+ stats = mgo.GetStats()
+ c.Assert(stats.SentOps > 0, Equals, true)
+
+ // Dropping the index should also drop the cached index key.
+ err = coll.DropIndex("a")
+ c.Assert(err, IsNil)
+
+ mgo.ResetStats()
+
+ err = coll.EnsureIndexKey("a")
+ c.Assert(err, IsNil)
+
+ stats = mgo.GetStats()
+ c.Assert(stats.SentOps > 0, Equals, true)
+}
+
+func (s *S) TestEnsureIndexGetIndexes(c *C) {
+ session, err := mgo.Dial("localhost:40001")
+ c.Assert(err, IsNil)
+ defer session.Close()
+
+ coll := session.DB("mydb").C("mycoll")
+
+ err = coll.EnsureIndexKey("-b")
+ c.Assert(err, IsNil)
+
+ err = coll.EnsureIndexKey("a")
+ c.Assert(err, IsNil)
+
+ // Obsolete.
+ err = coll.EnsureIndexKey("@c")
+ c.Assert(err, IsNil)
+
+ err = coll.EnsureIndexKey("$2d:d")
+ c.Assert(err, IsNil)
+
+ // Try to exercise cursor logic. 2.8.0-rc3 still ignores this.
+ session.SetBatch(2)
+
+ indexes, err := coll.Indexes()
+ c.Assert(err, IsNil)
+
+ c.Assert(indexes[0].Name, Equals, "_id_")
+ c.Assert(indexes[1].Name, Equals, "a_1")
+ c.Assert(indexes[1].Key, DeepEquals, []string{"a"})
+ c.Assert(indexes[2].Name, Equals, "b_-1")
+ c.Assert(indexes[2].Key, DeepEquals, []string{"-b"})
+ c.Assert(indexes[3].Name, Equals, "c_2d")
+ c.Assert(indexes[3].Key, DeepEquals, []string{"$2d:c"})
+ c.Assert(indexes[4].Name, Equals, "d_2d")
+ c.Assert(indexes[4].Key, DeepEquals, []string{"$2d:d"})
+}
+
+func (s *S) TestEnsureIndexNameCaching(c *C) {
+ session, err := mgo.Dial("localhost:40001")
+ c.Assert(err, IsNil)
+ defer session.Close()
+
+ coll := session.DB("mydb").C("mycoll")
+
+ err = coll.EnsureIndex(mgo.Index{Key: []string{"a"}, Name: "custom"})
+ c.Assert(err, IsNil)
+
+ mgo.ResetStats()
+
+ // Second EnsureIndex should be cached and do nothing.
+ err = coll.EnsureIndexKey("a")
+ c.Assert(err, IsNil)
+
+ err = coll.EnsureIndex(mgo.Index{Key: []string{"a"}, Name: "custom"})
+ c.Assert(err, IsNil)
+
+ stats := mgo.GetStats()
+ c.Assert(stats.SentOps, Equals, 0)
+
+ // Resetting the cache should make it contact the server again.
+ session.ResetIndexCache()
+
+ err = coll.EnsureIndex(mgo.Index{Key: []string{"a"}, Name: "custom"})
+ c.Assert(err, IsNil)
+
+ stats = mgo.GetStats()
+ c.Assert(stats.SentOps > 0, Equals, true)
+
+ // Dropping the index should also drop the cached index key.
+ err = coll.DropIndexName("custom")
+ c.Assert(err, IsNil)
+
+ mgo.ResetStats()
+
+ err = coll.EnsureIndex(mgo.Index{Key: []string{"a"}, Name: "custom"})
+ c.Assert(err, IsNil)
+
+ stats = mgo.GetStats()
+ c.Assert(stats.SentOps > 0, Equals, true)
+}
+
+func (s *S) TestEnsureIndexEvalGetIndexes(c *C) {
+ session, err := mgo.Dial("localhost:40001")
+ c.Assert(err, IsNil)
+ defer session.Close()
+
+ coll := session.DB("mydb").C("mycoll")
+
+ err = session.Run(bson.D{{"eval", "db.getSiblingDB('mydb').mycoll.ensureIndex({b: -1})"}}, nil)
+ c.Assert(err, IsNil)
+ err = session.Run(bson.D{{"eval", "db.getSiblingDB('mydb').mycoll.ensureIndex({a: 1})"}}, nil)
+ c.Assert(err, IsNil)
+ err = session.Run(bson.D{{"eval", "db.getSiblingDB('mydb').mycoll.ensureIndex({c: -1, e: 1})"}}, nil)
+ c.Assert(err, IsNil)
+ err = session.Run(bson.D{{"eval", "db.getSiblingDB('mydb').mycoll.ensureIndex({d: '2d'})"}}, nil)
+ c.Assert(err, IsNil)
+
+ indexes, err := coll.Indexes()
+ c.Assert(err, IsNil)
+
+ c.Assert(indexes[0].Name, Equals, "_id_")
+ c.Assert(indexes[1].Name, Equals, "a_1")
+ c.Assert(indexes[1].Key, DeepEquals, []string{"a"})
+ c.Assert(indexes[2].Name, Equals, "b_-1")
+ c.Assert(indexes[2].Key, DeepEquals, []string{"-b"})
+ c.Assert(indexes[3].Name, Equals, "c_-1_e_1")
+ c.Assert(indexes[3].Key, DeepEquals, []string{"-c", "e"})
+ if s.versionAtLeast(2, 2) {
+ c.Assert(indexes[4].Name, Equals, "d_2d")
+ c.Assert(indexes[4].Key, DeepEquals, []string{"$2d:d"})
+ } else {
+ c.Assert(indexes[4].Name, Equals, "d_")
+ c.Assert(indexes[4].Key, DeepEquals, []string{"$2d:d"})
+ }
+}
+
+var testTTL = flag.Bool("test-ttl", false, "test TTL collections (may take 1 minute)")
+
+func (s *S) TestEnsureIndexExpireAfter(c *C) {
+ session, err := mgo.Dial("localhost:40001")
+ c.Assert(err, IsNil)
+ defer session.Close()
+
+ session.SetSafe(nil)
+
+ coll := session.DB("mydb").C("mycoll")
+
+ err = coll.Insert(M{"n": 1, "t": time.Now().Add(-120 * time.Second)})
+ c.Assert(err, IsNil)
+ err = coll.Insert(M{"n": 2, "t": time.Now()})
+ c.Assert(err, IsNil)
+
+ // Should fail since there are duplicated entries.
+ index := mgo.Index{
+ Key: []string{"t"},
+ ExpireAfter: 1 * time.Minute,
+ }
+
+ err = coll.EnsureIndex(index)
+ c.Assert(err, IsNil)
+
+ indexes, err := coll.Indexes()
+ c.Assert(err, IsNil)
+ c.Assert(indexes[1].Name, Equals, "t_1")
+ c.Assert(indexes[1].ExpireAfter, Equals, 1*time.Minute)
+
+ if *testTTL {
+ worked := false
+ stop := time.Now().Add(70 * time.Second)
+ for time.Now().Before(stop) {
+ n, err := coll.Count()
+ c.Assert(err, IsNil)
+ if n == 1 {
+ worked = true
+ break
+ }
+ c.Assert(n, Equals, 2)
+ c.Logf("Still has 2 entries...")
+ time.Sleep(1 * time.Second)
+ }
+ if !worked {
+ c.Fatalf("TTL index didn't work")
+ }
+ }
+}
+
+func (s *S) TestDistinct(c *C) {
+ session, err := mgo.Dial("localhost:40001")
+ c.Assert(err, IsNil)
+ defer session.Close()
+
+ coll := session.DB("mydb").C("mycoll")
+
+ for _, i := range []int{1, 4, 6, 2, 2, 3, 4} {
+ coll.Insert(M{"n": i})
+ }
+
+ var result []int
+ err = coll.Find(M{"n": M{"$gt": 2}}).Sort("n").Distinct("n", &result)
+
+ sort.IntSlice(result).Sort()
+ c.Assert(result, DeepEquals, []int{3, 4, 6})
+}
+
+func (s *S) TestMapReduce(c *C) {
+ session, err := mgo.Dial("localhost:40001")
+ c.Assert(err, IsNil)
+ defer session.Close()
+
+ coll := session.DB("mydb").C("mycoll")
+
+ for _, i := range []int{1, 4, 6, 2, 2, 3, 4} {
+ coll.Insert(M{"n": i})
+ }
+
+ job := &mgo.MapReduce{
+ Map: "function() { emit(this.n, 1); }",
+ Reduce: "function(key, values) { return Array.sum(values); }",
+ }
+ var result []struct {
+ Id int "_id"
+ Value int
+ }
+
+ info, err := coll.Find(M{"n": M{"$gt": 2}}).MapReduce(job, &result)
+ c.Assert(err, IsNil)
+ c.Assert(info.InputCount, Equals, 4)
+ c.Assert(info.EmitCount, Equals, 4)
+ c.Assert(info.OutputCount, Equals, 3)
+ c.Assert(info.VerboseTime, IsNil)
+
+ expected := map[int]int{3: 1, 4: 2, 6: 1}
+ for _, item := range result {
+ c.Logf("Item: %#v", &item)
+ c.Assert(item.Value, Equals, expected[item.Id])
+ expected[item.Id] = -1
+ }
+}
+
+func (s *S) TestMapReduceFinalize(c *C) {
+ session, err := mgo.Dial("localhost:40001")
+ c.Assert(err, IsNil)
+ defer session.Close()
+
+ coll := session.DB("mydb").C("mycoll")
+
+ for _, i := range []int{1, 4, 6, 2, 2, 3, 4} {
+ coll.Insert(M{"n": i})
+ }
+
+ job := &mgo.MapReduce{
+ Map: "function() { emit(this.n, 1) }",
+ Reduce: "function(key, values) { return Array.sum(values) }",
+ Finalize: "function(key, count) { return {count: count} }",
+ }
+ var result []struct {
+ Id int "_id"
+ Value struct{ Count int }
+ }
+ _, err = coll.Find(nil).MapReduce(job, &result)
+ c.Assert(err, IsNil)
+
+ expected := map[int]int{1: 1, 2: 2, 3: 1, 4: 2, 6: 1}
+ for _, item := range result {
+ c.Logf("Item: %#v", &item)
+ c.Assert(item.Value.Count, Equals, expected[item.Id])
+ expected[item.Id] = -1
+ }
+}
+
+func (s *S) TestMapReduceToCollection(c *C) {
+ session, err := mgo.Dial("localhost:40001")
+ c.Assert(err, IsNil)
+ defer session.Close()
+
+ coll := session.DB("mydb").C("mycoll")
+
+ for _, i := range []int{1, 4, 6, 2, 2, 3, 4} {
+ coll.Insert(M{"n": i})
+ }
+
+ job := &mgo.MapReduce{
+ Map: "function() { emit(this.n, 1); }",
+ Reduce: "function(key, values) { return Array.sum(values); }",
+ Out: "mr",
+ }
+
+ info, err := coll.Find(nil).MapReduce(job, nil)
+ c.Assert(err, IsNil)
+ c.Assert(info.InputCount, Equals, 7)
+ c.Assert(info.EmitCount, Equals, 7)
+ c.Assert(info.OutputCount, Equals, 5)
+ c.Assert(info.Collection, Equals, "mr")
+ c.Assert(info.Database, Equals, "mydb")
+
+ expected := map[int]int{1: 1, 2: 2, 3: 1, 4: 2, 6: 1}
+ var item *struct {
+ Id int "_id"
+ Value int
+ }
+ mr := session.DB("mydb").C("mr")
+ iter := mr.Find(nil).Iter()
+ for iter.Next(&item) {
+ c.Logf("Item: %#v", &item)
+ c.Assert(item.Value, Equals, expected[item.Id])
+ expected[item.Id] = -1
+ }
+ c.Assert(iter.Close(), IsNil)
+}
+
+func (s *S) TestMapReduceToOtherDb(c *C) {
+ session, err := mgo.Dial("localhost:40001")
+ c.Assert(err, IsNil)
+ defer session.Close()
+
+ coll := session.DB("mydb").C("mycoll")
+
+ for _, i := range []int{1, 4, 6, 2, 2, 3, 4} {
+ coll.Insert(M{"n": i})
+ }
+
+ job := &mgo.MapReduce{
+ Map: "function() { emit(this.n, 1); }",
+ Reduce: "function(key, values) { return Array.sum(values); }",
+ Out: bson.D{{"replace", "mr"}, {"db", "otherdb"}},
+ }
+
+ info, err := coll.Find(nil).MapReduce(job, nil)
+ c.Assert(err, IsNil)
+ c.Assert(info.InputCount, Equals, 7)
+ c.Assert(info.EmitCount, Equals, 7)
+ c.Assert(info.OutputCount, Equals, 5)
+ c.Assert(info.Collection, Equals, "mr")
+ c.Assert(info.Database, Equals, "otherdb")
+
+ expected := map[int]int{1: 1, 2: 2, 3: 1, 4: 2, 6: 1}
+ var item *struct {
+ Id int "_id"
+ Value int
+ }
+ mr := session.DB("otherdb").C("mr")
+ iter := mr.Find(nil).Iter()
+ for iter.Next(&item) {
+ c.Logf("Item: %#v", &item)
+ c.Assert(item.Value, Equals, expected[item.Id])
+ expected[item.Id] = -1
+ }
+ c.Assert(iter.Close(), IsNil)
+}
+
+func (s *S) TestMapReduceOutOfOrder(c *C) {
+ session, err := mgo.Dial("localhost:40001")
+ c.Assert(err, IsNil)
+ defer session.Close()
+
+ coll := session.DB("mydb").C("mycoll")
+
+ for _, i := range []int{1, 4, 6, 2, 2, 3, 4} {
+ coll.Insert(M{"n": i})
+ }
+
+ job := &mgo.MapReduce{
+ Map: "function() { emit(this.n, 1); }",
+ Reduce: "function(key, values) { return Array.sum(values); }",
+ Out: bson.M{"a": "a", "z": "z", "replace": "mr", "db": "otherdb", "b": "b", "y": "y"},
+ }
+
+ info, err := coll.Find(nil).MapReduce(job, nil)
+ c.Assert(err, IsNil)
+ c.Assert(info.Collection, Equals, "mr")
+ c.Assert(info.Database, Equals, "otherdb")
+}
+
+func (s *S) TestMapReduceScope(c *C) {
+ session, err := mgo.Dial("localhost:40001")
+ c.Assert(err, IsNil)
+ defer session.Close()
+
+ coll := session.DB("mydb").C("mycoll")
+
+ coll.Insert(M{"n": 1})
+
+ job := &mgo.MapReduce{
+ Map: "function() { emit(this.n, x); }",
+ Reduce: "function(key, values) { return Array.sum(values); }",
+ Scope: M{"x": 42},
+ }
+
+ var result []bson.M
+ _, err = coll.Find(nil).MapReduce(job, &result)
+ c.Assert(len(result), Equals, 1)
+ c.Assert(result[0]["value"], Equals, 42.0)
+}
+
+func (s *S) TestMapReduceVerbose(c *C) {
+ session, err := mgo.Dial("localhost:40001")
+ c.Assert(err, IsNil)
+ defer session.Close()
+
+ coll := session.DB("mydb").C("mycoll")
+
+ for i := 0; i < 100; i++ {
+ err = coll.Insert(M{"n": i})
+ c.Assert(err, IsNil)
+ }
+
+ job := &mgo.MapReduce{
+ Map: "function() { emit(this.n, 1); }",
+ Reduce: "function(key, values) { return Array.sum(values); }",
+ Verbose: true,
+ }
+
+ info, err := coll.Find(nil).MapReduce(job, nil)
+ c.Assert(err, IsNil)
+ c.Assert(info.VerboseTime, NotNil)
+}
+
+func (s *S) TestMapReduceLimit(c *C) {
+ session, err := mgo.Dial("localhost:40001")
+ c.Assert(err, IsNil)
+ defer session.Close()
+
+ coll := session.DB("mydb").C("mycoll")
+
+ for _, i := range []int{1, 4, 6, 2, 2, 3, 4} {
+ coll.Insert(M{"n": i})
+ }
+
+ job := &mgo.MapReduce{
+ Map: "function() { emit(this.n, 1); }",
+ Reduce: "function(key, values) { return Array.sum(values); }",
+ }
+
+ var result []bson.M
+ _, err = coll.Find(nil).Limit(3).MapReduce(job, &result)
+ c.Assert(err, IsNil)
+ c.Assert(len(result), Equals, 3)
+}
+
+func (s *S) TestBuildInfo(c *C) {
+ session, err := mgo.Dial("localhost:40001")
+ c.Assert(err, IsNil)
+ defer session.Close()
+
+ info, err := session.BuildInfo()
+ c.Assert(err, IsNil)
+
+ var v []int
+ for i, a := range strings.Split(info.Version, ".") {
+ for _, token := range []string{"-rc", "-pre"} {
+ if i == 2 && strings.Contains(a, token) {
+ a = a[:strings.Index(a, token)]
+ info.VersionArray[len(info.VersionArray)-1] = 0
+ }
+ }
+ n, err := strconv.Atoi(a)
+ c.Assert(err, IsNil)
+ v = append(v, n)
+ }
+ for len(v) < 4 {
+ v = append(v, 0)
+ }
+
+ c.Assert(info.VersionArray, DeepEquals, v)
+ c.Assert(info.GitVersion, Matches, "[a-z0-9]+")
+
+ if s.versionAtLeast(3, 2) {
+ // It was deprecated in 3.2.
+ c.Assert(info.SysInfo, Equals, "")
+ } else {
+ c.Assert(info.SysInfo, Matches, ".*[0-9:]+.*")
+ }
+ if info.Bits != 32 && info.Bits != 64 {
+ c.Fatalf("info.Bits is %d", info.Bits)
+ }
+ if info.MaxObjectSize < 8192 {
+ c.Fatalf("info.MaxObjectSize seems too small: %d", info.MaxObjectSize)
+ }
+}
+
+func (s *S) TestZeroTimeRoundtrip(c *C) {
+ session, err := mgo.Dial("localhost:40001")
+ c.Assert(err, IsNil)
+ defer session.Close()
+
+ var d struct{ T time.Time }
+ conn := session.DB("mydb").C("mycoll")
+ err = conn.Insert(d)
+ c.Assert(err, IsNil)
+
+ var result bson.M
+ err = conn.Find(nil).One(&result)
+ c.Assert(err, IsNil)
+ t, isTime := result["t"].(time.Time)
+ c.Assert(isTime, Equals, true)
+ c.Assert(t, Equals, time.Time{})
+}
+
+func (s *S) TestFsyncLock(c *C) {
+ session, err := mgo.Dial("localhost:40001")
+ c.Assert(err, IsNil)
+ defer session.Close()
+
+ clone := session.Clone()
+ defer clone.Close()
+
+ err = session.FsyncLock()
+ c.Assert(err, IsNil)
+
+ done := make(chan time.Time)
+ go func() {
+ time.Sleep(3 * time.Second)
+ now := time.Now()
+ err := session.FsyncUnlock()
+ c.Check(err, IsNil)
+ done <- now
+ }()
+
+ err = clone.DB("mydb").C("mycoll").Insert(bson.M{"n": 1})
+ unlocked := time.Now()
+ unlocking := <-done
+ c.Assert(err, IsNil)
+
+ c.Assert(unlocked.After(unlocking), Equals, true)
+}
+
+func (s *S) TestFsync(c *C) {
+ session, err := mgo.Dial("localhost:40001")
+ c.Assert(err, IsNil)
+ defer session.Close()
+
+ // Not much to do here. Just a smoke check.
+ err = session.Fsync(false)
+ c.Assert(err, IsNil)
+ err = session.Fsync(true)
+ c.Assert(err, IsNil)
+}
+
+func (s *S) TestRepairCursor(c *C) {
+ if !s.versionAtLeast(2, 7) {
+ c.Skip("RepairCursor only works on 2.7+")
+ }
+
+ session, err := mgo.Dial("localhost:40001")
+ c.Assert(err, IsNil)
+ defer session.Close()
+ session.SetBatch(2)
+
+ coll := session.DB("mydb").C("mycoll3")
+ err = coll.DropCollection()
+
+ ns := []int{0, 10, 20, 30, 40, 50}
+ for _, n := range ns {
+ coll.Insert(M{"n": n})
+ }
+
+ repairIter := coll.Repair()
+
+ c.Assert(repairIter.Err(), IsNil)
+
+ result := struct{ N int }{}
+ resultCounts := map[int]int{}
+ for repairIter.Next(&result) {
+ resultCounts[result.N]++
+ }
+
+ c.Assert(repairIter.Next(&result), Equals, false)
+ c.Assert(repairIter.Err(), IsNil)
+ c.Assert(repairIter.Close(), IsNil)
+
+ // Verify that the results of the repair cursor are valid.
+ // The repair cursor can return multiple copies
+ // of the same document, so to check correctness we only
+ // need to verify that at least 1 of each document was returned.
+
+ for _, key := range ns {
+ c.Assert(resultCounts[key] > 0, Equals, true)
+ }
+}
+
+func (s *S) TestPipeIter(c *C) {
+ if !s.versionAtLeast(2, 1) {
+ c.Skip("Pipe only works on 2.1+")
+ }
+
+ session, err := mgo.Dial("localhost:40001")
+ c.Assert(err, IsNil)
+ defer session.Close()
+
+ coll := session.DB("mydb").C("mycoll")
+
+ ns := []int{40, 41, 42, 43, 44, 45, 46}
+ for _, n := range ns {
+ coll.Insert(M{"n": n})
+ }
+
+ pipe := coll.Pipe([]M{{"$match": M{"n": M{"$gte": 42}}}})
+
+ // Ensure cursor logic is working by forcing a small batch.
+ pipe.Batch(2)
+
+ // Smoke test for AllowDiskUse.
+ pipe.AllowDiskUse()
+
+ iter := pipe.Iter()
+ result := struct{ N int }{}
+ for i := 2; i < 7; i++ {
+ ok := iter.Next(&result)
+ c.Assert(ok, Equals, true)
+ c.Assert(result.N, Equals, ns[i])
+ }
+
+ c.Assert(iter.Next(&result), Equals, false)
+ c.Assert(iter.Close(), IsNil)
+}
+
+func (s *S) TestPipeAll(c *C) {
+ if !s.versionAtLeast(2, 1) {
+ c.Skip("Pipe only works on 2.1+")
+ }
+
+ session, err := mgo.Dial("localhost:40001")
+ c.Assert(err, IsNil)
+ defer session.Close()
+
+ coll := session.DB("mydb").C("mycoll")
+
+ ns := []int{40, 41, 42, 43, 44, 45, 46}
+ for _, n := range ns {
+ err := coll.Insert(M{"n": n})
+ c.Assert(err, IsNil)
+ }
+
+ var result []struct{ N int }
+ err = coll.Pipe([]M{{"$match": M{"n": M{"$gte": 42}}}}).All(&result)
+ c.Assert(err, IsNil)
+ for i := 2; i < 7; i++ {
+ c.Assert(result[i-2].N, Equals, ns[i])
+ }
+}
+
+func (s *S) TestPipeOne(c *C) {
+ if !s.versionAtLeast(2, 1) {
+ c.Skip("Pipe only works on 2.1+")
+ }
+
+ session, err := mgo.Dial("localhost:40001")
+ c.Assert(err, IsNil)
+ defer session.Close()
+
+ coll := session.DB("mydb").C("mycoll")
+ coll.Insert(M{"a": 1, "b": 2})
+
+ result := struct{ A, B int }{}
+
+ pipe := coll.Pipe([]M{{"$project": M{"a": 1, "b": M{"$add": []interface{}{"$b", 1}}}}})
+ err = pipe.One(&result)
+ c.Assert(err, IsNil)
+ c.Assert(result.A, Equals, 1)
+ c.Assert(result.B, Equals, 3)
+
+ pipe = coll.Pipe([]M{{"$match": M{"a": 2}}})
+ err = pipe.One(&result)
+ c.Assert(err, Equals, mgo.ErrNotFound)
+}
+
+func (s *S) TestPipeExplain(c *C) {
+ if !s.versionAtLeast(2, 1) {
+ c.Skip("Pipe only works on 2.1+")
+ }
+
+ session, err := mgo.Dial("localhost:40001")
+ c.Assert(err, IsNil)
+ defer session.Close()
+
+ coll := session.DB("mydb").C("mycoll")
+ coll.Insert(M{"a": 1, "b": 2})
+
+ pipe := coll.Pipe([]M{{"$project": M{"a": 1, "b": M{"$add": []interface{}{"$b", 1}}}}})
+
+ // The explain command result changes across versions.
+ var result struct{ Ok int }
+ err = pipe.Explain(&result)
+ c.Assert(err, IsNil)
+ c.Assert(result.Ok, Equals, 1)
+}
+
+func (s *S) TestBatch1Bug(c *C) {
+ session, err := mgo.Dial("localhost:40001")
+ c.Assert(err, IsNil)
+ defer session.Close()
+
+ coll := session.DB("mydb").C("mycoll")
+
+ for i := 0; i < 3; i++ {
+ err := coll.Insert(M{"n": i})
+ c.Assert(err, IsNil)
+ }
+
+ var ns []struct{ N int }
+ err = coll.Find(nil).Batch(1).All(&ns)
+ c.Assert(err, IsNil)
+ c.Assert(len(ns), Equals, 3)
+
+ session.SetBatch(1)
+ err = coll.Find(nil).All(&ns)
+ c.Assert(err, IsNil)
+ c.Assert(len(ns), Equals, 3)
+}
+
+func (s *S) TestInterfaceIterBug(c *C) {
+ session, err := mgo.Dial("localhost:40001")
+ c.Assert(err, IsNil)
+ defer session.Close()
+
+ coll := session.DB("mydb").C("mycoll")
+
+ for i := 0; i < 3; i++ {
+ err := coll.Insert(M{"n": i})
+ c.Assert(err, IsNil)
+ }
+
+ var result interface{}
+
+ i := 0
+ iter := coll.Find(nil).Sort("n").Iter()
+ for iter.Next(&result) {
+ c.Assert(result.(bson.M)["n"], Equals, i)
+ i++
+ }
+ c.Assert(iter.Close(), IsNil)
+}
+
+func (s *S) TestFindIterCloseKillsCursor(c *C) {
+ session, err := mgo.Dial("localhost:40001")
+ c.Assert(err, IsNil)
+ defer session.Close()
+
+ cursors := serverCursorsOpen(session)
+
+ coll := session.DB("mydb").C("mycoll")
+ ns := []int{40, 41, 42, 43, 44, 45, 46}
+ for _, n := range ns {
+ err = coll.Insert(M{"n": n})
+ c.Assert(err, IsNil)
+ }
+
+ iter := coll.Find(nil).Batch(2).Iter()
+ c.Assert(iter.Next(bson.M{}), Equals, true)
+
+ c.Assert(iter.Close(), IsNil)
+ c.Assert(serverCursorsOpen(session), Equals, cursors)
+}
+
+func (s *S) TestFindIterDoneWithBatches(c *C) {
+ session, err := mgo.Dial("localhost:40001")
+ c.Assert(err, IsNil)
+ defer session.Close()
+
+ coll := session.DB("mydb").C("mycoll")
+
+ ns := []int{40, 41, 42, 43, 44, 45, 46}
+ for _, n := range ns {
+ coll.Insert(M{"n": n})
+ }
+
+ iter := coll.Find(M{"n": M{"$gte": 42}}).Sort("$natural").Prefetch(0).Batch(2).Iter()
+ result := struct{ N int }{}
+ for i := 2; i < 7; i++ {
+ // first check will be with pending local record;
+ // second will be with open cursor ID but no local
+ // records
+ c.Assert(iter.Done(), Equals, false)
+ ok := iter.Next(&result)
+ c.Assert(ok, Equals, true, Commentf("err=%v", err))
+ }
+
+ c.Assert(iter.Done(), Equals, true)
+ ok := iter.Next(&result)
+ c.Assert(ok, Equals, false)
+ c.Assert(iter.Close(), IsNil)
+}
+
+func (s *S) TestFindIterDoneErr(c *C) {
+ session, err := mgo.Dial("localhost:40002")
+ c.Assert(err, IsNil)
+ defer session.Close()
+
+ coll := session.DB("mydb").C("mycoll")
+ iter := coll.Find(nil).Iter()
+
+ result := struct{}{}
+ ok := iter.Next(&result)
+ c.Assert(iter.Done(), Equals, true)
+ c.Assert(ok, Equals, false)
+ c.Assert(iter.Err(), ErrorMatches, "unauthorized.*|not authorized.*")
+}
+
+func (s *S) TestFindIterDoneNotFound(c *C) {
+ session, err := mgo.Dial("localhost:40001")
+ c.Assert(err, IsNil)
+ defer session.Close()
+
+ coll := session.DB("mydb").C("mycoll")
+
+ result := struct{ A, B int }{}
+ iter := coll.Find(M{"a": 1}).Iter()
+ ok := iter.Next(&result)
+ c.Assert(ok, Equals, false)
+ c.Assert(iter.Done(), Equals, true)
+}
+
+func (s *S) TestLogReplay(c *C) {
+ session, err := mgo.Dial("localhost:40001")
+ c.Assert(err, IsNil)
+ defer session.Close()
+
+ coll := session.DB("mydb").C("mycoll")
+ for i := 0; i < 5; i++ {
+ err = coll.Insert(M{"ts": time.Now()})
+ c.Assert(err, IsNil)
+ }
+
+ iter := coll.Find(nil).LogReplay().Iter()
+ if s.versionAtLeast(2, 6) {
+ // This used to fail in 2.4. Now it's just a smoke test.
+ c.Assert(iter.Err(), IsNil)
+ } else {
+ c.Assert(iter.Next(bson.M{}), Equals, false)
+ c.Assert(iter.Err(), ErrorMatches, "no ts field in query")
+ }
+}
+
+func (s *S) TestSetCursorTimeout(c *C) {
+ session, err := mgo.Dial("localhost:40001")
+ c.Assert(err, IsNil)
+ defer session.Close()
+
+ coll := session.DB("mydb").C("mycoll")
+ err = coll.Insert(M{"n": 42})
+
+ // This is just a smoke test. Won't wait 10 minutes for an actual timeout.
+
+ session.SetCursorTimeout(0)
+
+ var result struct{ N int }
+ iter := coll.Find(nil).Iter()
+ c.Assert(iter.Next(&result), Equals, true)
+ c.Assert(result.N, Equals, 42)
+ c.Assert(iter.Next(&result), Equals, false)
+}
+
+func (s *S) TestNewIterNoServer(c *C) {
+ session, err := mgo.Dial("localhost:40001")
+ c.Assert(err, IsNil)
+ defer session.Close()
+
+ data, err := bson.Marshal(bson.M{"a": 1})
+
+ coll := session.DB("mydb").C("mycoll")
+ iter := coll.NewIter(nil, []bson.Raw{{3, data}}, 42, nil)
+
+ var result struct{ A int }
+ ok := iter.Next(&result)
+ c.Assert(ok, Equals, true)
+ c.Assert(result.A, Equals, 1)
+
+ ok = iter.Next(&result)
+ c.Assert(ok, Equals, false)
+
+ c.Assert(iter.Err(), ErrorMatches, "server not available")
+}
+
+func (s *S) TestNewIterNoServerPresetErr(c *C) {
+ session, err := mgo.Dial("localhost:40001")
+ c.Assert(err, IsNil)
+ defer session.Close()
+
+ data, err := bson.Marshal(bson.M{"a": 1})
+
+ coll := session.DB("mydb").C("mycoll")
+ iter := coll.NewIter(nil, []bson.Raw{{3, data}}, 42, fmt.Errorf("my error"))
+
+ var result struct{ A int }
+ ok := iter.Next(&result)
+ c.Assert(ok, Equals, true)
+ c.Assert(result.A, Equals, 1)
+
+ ok = iter.Next(&result)
+ c.Assert(ok, Equals, false)
+
+ c.Assert(iter.Err(), ErrorMatches, "my error")
+}
+
+func (s *S) TestBypassValidation(c *C) {
+ if !s.versionAtLeast(3, 2) {
+ c.Skip("validation supported on 3.2+")
+ }
+ session, err := mgo.Dial("localhost:40001")
+ c.Assert(err, IsNil)
+ defer session.Close()
+
+ coll := session.DB("mydb").C("mycoll")
+ err = coll.Insert(M{"n": 1})
+ c.Assert(err, IsNil)
+
+ err = coll.Database.Run(bson.D{
+ {"collMod", "mycoll"},
+ {"validator", M{"s": M{"$type": "string"}}},
+ }, nil)
+ c.Assert(err, IsNil)
+
+ err = coll.Insert(M{"n": 2})
+ c.Assert(err, ErrorMatches, "Document failed validation")
+
+ err = coll.Update(M{"n": 1}, M{"n": 10})
+ c.Assert(err, ErrorMatches, "Document failed validation")
+
+ session.SetBypassValidation(true)
+
+ err = coll.Insert(M{"n": 3})
+ c.Assert(err, IsNil)
+
+ err = coll.Update(M{"n": 3}, M{"n": 4})
+ c.Assert(err, IsNil)
+
+ // Ensure this still works. Shouldn't be affected.
+ err = coll.Remove(M{"n": 1})
+ c.Assert(err, IsNil)
+
+ var result struct{ N int }
+ var ns []int
+ iter := coll.Find(nil).Iter()
+ for iter.Next(&result) {
+ ns = append(ns, result.N)
+ }
+ c.Assert(iter.Err(), IsNil)
+ sort.Ints(ns)
+ c.Assert(ns, DeepEquals, []int{4})
+}
+
+func (s *S) TestVersionAtLeast(c *C) {
+ tests := [][][]int{
+ {{3,2,1}, {3,2,0}},
+ {{3,2,1}, {3,2}},
+ {{3,2,1}, {2,5,5,5}},
+ {{3,2,1}, {2,5,5}},
+ {{3,2,1}, {2,5}},
+ }
+ for _, pair := range tests {
+ bi := mgo.BuildInfo{VersionArray: pair[0]}
+ c.Assert(bi.VersionAtLeast(pair[1]...), Equals, true)
+
+ bi = mgo.BuildInfo{VersionArray: pair[0]}
+ c.Assert(bi.VersionAtLeast(pair[0]...), Equals, true)
+
+ bi = mgo.BuildInfo{VersionArray: pair[1]}
+ c.Assert(bi.VersionAtLeast(pair[1]...), Equals, true)
+
+ bi = mgo.BuildInfo{VersionArray: pair[1]}
+ c.Assert(bi.VersionAtLeast(pair[0]...), Equals, false)
+ }
+}
+
+// --------------------------------------------------------------------------
+// Some benchmarks that require a running database.
+
+func (s *S) BenchmarkFindIterRaw(c *C) {
+ session, err := mgo.Dial("localhost:40001")
+ c.Assert(err, IsNil)
+ defer session.Close()
+
+ coll := session.DB("mydb").C("mycoll")
+ doc := bson.D{
+ {"f2", "a short string"},
+ {"f3", bson.D{{"1", "one"}, {"2", 2.0}}},
+ {"f4", []string{"a", "b", "c", "d", "e", "f", "g"}},
+ }
+
+ for i := 0; i < c.N+1; i++ {
+ err := coll.Insert(doc)
+ c.Assert(err, IsNil)
+ }
+
+ session.SetBatch(c.N)
+
+ var raw bson.Raw
+ iter := coll.Find(nil).Iter()
+ iter.Next(&raw)
+ c.ResetTimer()
+ i := 0
+ for iter.Next(&raw) {
+ i++
+ }
+ c.StopTimer()
+ c.Assert(iter.Err(), IsNil)
+ c.Assert(i, Equals, c.N)
+}
diff --git a/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/socket.go b/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/socket.go
new file mode 100644
index 00000000000..8891dd5d734
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/socket.go
@@ -0,0 +1,707 @@
+// mgo - MongoDB driver for Go
+//
+// Copyright (c) 2010-2012 - Gustavo Niemeyer <gustavo@niemeyer.net>
+//
+// All rights reserved.
+//
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are met:
+//
+// 1. Redistributions of source code must retain the above copyright notice, this
+// list of conditions and the following disclaimer.
+// 2. Redistributions in binary form must reproduce the above copyright notice,
+// this list of conditions and the following disclaimer in the documentation
+// and/or other materials provided with the distribution.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
+// ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
+// WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+// DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
+// ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
+// (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
+// LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
+// ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
+// SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+package mgo
+
+import (
+ "errors"
+ "fmt"
+ "net"
+ "sync"
+ "time"
+
+ "gopkg.in/mgo.v2/bson"
+)
+
+type replyFunc func(err error, reply *replyOp, docNum int, docData []byte)
+
+type mongoSocket struct {
+ sync.Mutex
+ server *mongoServer // nil when cached
+ conn net.Conn
+ timeout time.Duration
+ addr string // For debugging only.
+ nextRequestId uint32
+ replyFuncs map[uint32]replyFunc
+ references int
+ creds []Credential
+ logout []Credential
+ cachedNonce string
+ gotNonce sync.Cond
+ dead error
+ serverInfo *mongoServerInfo
+}
+
+type queryOpFlags uint32
+
+const (
+ _ queryOpFlags = 1 << iota
+ flagTailable
+ flagSlaveOk
+ flagLogReplay
+ flagNoCursorTimeout
+ flagAwaitData
+)
+
+type queryOp struct {
+ collection string
+ query interface{}
+ skip int32
+ limit int32
+ selector interface{}
+ flags queryOpFlags
+ replyFunc replyFunc
+
+ mode Mode
+ options queryWrapper
+ hasOptions bool
+ serverTags []bson.D
+}
+
+type queryWrapper struct {
+ Query interface{} "$query"
+ OrderBy interface{} "$orderby,omitempty"
+ Hint interface{} "$hint,omitempty"
+ Explain bool "$explain,omitempty"
+ Snapshot bool "$snapshot,omitempty"
+ ReadPreference bson.D "$readPreference,omitempty"
+ MaxScan int "$maxScan,omitempty"
+ MaxTimeMS int "$maxTimeMS,omitempty"
+ Comment string "$comment,omitempty"
+}
+
+func (op *queryOp) finalQuery(socket *mongoSocket) interface{} {
+ if op.flags&flagSlaveOk != 0 && socket.ServerInfo().Mongos {
+ var modeName string
+ switch op.mode {
+ case Strong:
+ modeName = "primary"
+ case Monotonic, Eventual:
+ modeName = "secondaryPreferred"
+ case PrimaryPreferred:
+ modeName = "primaryPreferred"
+ case Secondary:
+ modeName = "secondary"
+ case SecondaryPreferred:
+ modeName = "secondaryPreferred"
+ case Nearest:
+ modeName = "nearest"
+ default:
+ panic(fmt.Sprintf("unsupported read mode: %d", op.mode))
+ }
+ op.hasOptions = true
+ op.options.ReadPreference = make(bson.D, 0, 2)
+ op.options.ReadPreference = append(op.options.ReadPreference, bson.DocElem{"mode", modeName})
+ if len(op.serverTags) > 0 {
+ op.options.ReadPreference = append(op.options.ReadPreference, bson.DocElem{"tags", op.serverTags})
+ }
+ }
+ if op.hasOptions {
+ if op.query == nil {
+ var empty bson.D
+ op.options.Query = empty
+ } else {
+ op.options.Query = op.query
+ }
+ debugf("final query is %#v\n", &op.options)
+ return &op.options
+ }
+ return op.query
+}
+
+type getMoreOp struct {
+ collection string
+ limit int32
+ cursorId int64
+ replyFunc replyFunc
+}
+
+type replyOp struct {
+ flags uint32
+ cursorId int64
+ firstDoc int32
+ replyDocs int32
+}
+
+type insertOp struct {
+ collection string // "database.collection"
+ documents []interface{} // One or more documents to insert
+ flags uint32
+}
+
+type updateOp struct {
+ Collection string `bson:"-"` // "database.collection"
+ Selector interface{} `bson:"q"`
+ Update interface{} `bson:"u"`
+ Flags uint32 `bson:"-"`
+ Multi bool `bson:"multi,omitempty"`
+ Upsert bool `bson:"upsert,omitempty"`
+}
+
+type deleteOp struct {
+ Collection string `bson:"-"` // "database.collection"
+ Selector interface{} `bson:"q"`
+ Flags uint32 `bson:"-"`
+ Limit int `bson:"limit"`
+}
+
+type killCursorsOp struct {
+ cursorIds []int64
+}
+
+type requestInfo struct {
+ bufferPos int
+ replyFunc replyFunc
+}
+
+func newSocket(server *mongoServer, conn net.Conn, timeout time.Duration) *mongoSocket {
+ socket := &mongoSocket{
+ conn: conn,
+ addr: server.Addr,
+ server: server,
+ replyFuncs: make(map[uint32]replyFunc),
+ }
+ socket.gotNonce.L = &socket.Mutex
+ if err := socket.InitialAcquire(server.Info(), timeout); err != nil {
+ panic("newSocket: InitialAcquire returned error: " + err.Error())
+ }
+ stats.socketsAlive(+1)
+ debugf("Socket %p to %s: initialized", socket, socket.addr)
+ socket.resetNonce()
+ go socket.readLoop()
+ return socket
+}
+
+// Server returns the server that the socket is associated with.
+// It returns nil while the socket is cached in its respective server.
+func (socket *mongoSocket) Server() *mongoServer {
+ socket.Lock()
+ server := socket.server
+ socket.Unlock()
+ return server
+}
+
+// ServerInfo returns details for the server at the time the socket
+// was initially acquired.
+func (socket *mongoSocket) ServerInfo() *mongoServerInfo {
+ socket.Lock()
+ serverInfo := socket.serverInfo
+ socket.Unlock()
+ return serverInfo
+}
+
+// InitialAcquire obtains the first reference to the socket, either
+// right after the connection is made or once a recycled socket is
+// being put back in use.
+func (socket *mongoSocket) InitialAcquire(serverInfo *mongoServerInfo, timeout time.Duration) error {
+ socket.Lock()
+ if socket.references > 0 {
+ panic("Socket acquired out of cache with references")
+ }
+ if socket.dead != nil {
+ dead := socket.dead
+ socket.Unlock()
+ return dead
+ }
+ socket.references++
+ socket.serverInfo = serverInfo
+ socket.timeout = timeout
+ stats.socketsInUse(+1)
+ stats.socketRefs(+1)
+ socket.Unlock()
+ return nil
+}
+
+// Acquire obtains an additional reference to the socket.
+// The socket will only be recycled when it's released as many
+// times as it's been acquired.
+func (socket *mongoSocket) Acquire() (info *mongoServerInfo) {
+ socket.Lock()
+ if socket.references == 0 {
+ panic("Socket got non-initial acquire with references == 0")
+ }
+ // We'll track references to dead sockets as well.
+ // Caller is still supposed to release the socket.
+ socket.references++
+ stats.socketRefs(+1)
+ serverInfo := socket.serverInfo
+ socket.Unlock()
+ return serverInfo
+}
+
+// Release decrements a socket reference. The socket will be
+// recycled once its released as many times as it's been acquired.
+func (socket *mongoSocket) Release() {
+ socket.Lock()
+ if socket.references == 0 {
+ panic("socket.Release() with references == 0")
+ }
+ socket.references--
+ stats.socketRefs(-1)
+ if socket.references == 0 {
+ stats.socketsInUse(-1)
+ server := socket.server
+ socket.Unlock()
+ socket.LogoutAll()
+ // If the socket is dead server is nil.
+ if server != nil {
+ server.RecycleSocket(socket)
+ }
+ } else {
+ socket.Unlock()
+ }
+}
+
+// SetTimeout changes the timeout used on socket operations.
+func (socket *mongoSocket) SetTimeout(d time.Duration) {
+ socket.Lock()
+ socket.timeout = d
+ socket.Unlock()
+}
+
+type deadlineType int
+
+const (
+ readDeadline deadlineType = 1
+ writeDeadline deadlineType = 2
+)
+
+func (socket *mongoSocket) updateDeadline(which deadlineType) {
+ var when time.Time
+ if socket.timeout > 0 {
+ when = time.Now().Add(socket.timeout)
+ }
+ whichstr := ""
+ switch which {
+ case readDeadline | writeDeadline:
+ whichstr = "read/write"
+ socket.conn.SetDeadline(when)
+ case readDeadline:
+ whichstr = "read"
+ socket.conn.SetReadDeadline(when)
+ case writeDeadline:
+ whichstr = "write"
+ socket.conn.SetWriteDeadline(when)
+ default:
+ panic("invalid parameter to updateDeadline")
+ }
+ debugf("Socket %p to %s: updated %s deadline to %s ahead (%s)", socket, socket.addr, whichstr, socket.timeout, when)
+}
+
+// Close terminates the socket use.
+func (socket *mongoSocket) Close() {
+ socket.kill(errors.New("Closed explicitly"), false)
+}
+
+func (socket *mongoSocket) kill(err error, abend bool) {
+ socket.Lock()
+ if socket.dead != nil {
+ debugf("Socket %p to %s: killed again: %s (previously: %s)", socket, socket.addr, err.Error(), socket.dead.Error())
+ socket.Unlock()
+ return
+ }
+ logf("Socket %p to %s: closing: %s (abend=%v)", socket, socket.addr, err.Error(), abend)
+ socket.dead = err
+ socket.conn.Close()
+ stats.socketsAlive(-1)
+ replyFuncs := socket.replyFuncs
+ socket.replyFuncs = make(map[uint32]replyFunc)
+ server := socket.server
+ socket.server = nil
+ socket.gotNonce.Broadcast()
+ socket.Unlock()
+ for _, replyFunc := range replyFuncs {
+ logf("Socket %p to %s: notifying replyFunc of closed socket: %s", socket, socket.addr, err.Error())
+ replyFunc(err, nil, -1, nil)
+ }
+ if abend {
+ server.AbendSocket(socket)
+ }
+}
+
+func (socket *mongoSocket) SimpleQuery(op *queryOp) (data []byte, err error) {
+ var wait, change sync.Mutex
+ var replyDone bool
+ var replyData []byte
+ var replyErr error
+ wait.Lock()
+ op.replyFunc = func(err error, reply *replyOp, docNum int, docData []byte) {
+ change.Lock()
+ if !replyDone {
+ replyDone = true
+ replyErr = err
+ if err == nil {
+ replyData = docData
+ }
+ }
+ change.Unlock()
+ wait.Unlock()
+ }
+ err = socket.Query(op)
+ if err != nil {
+ return nil, err
+ }
+ wait.Lock()
+ change.Lock()
+ data = replyData
+ err = replyErr
+ change.Unlock()
+ return data, err
+}
+
+func (socket *mongoSocket) Query(ops ...interface{}) (err error) {
+
+ if lops := socket.flushLogout(); len(lops) > 0 {
+ ops = append(lops, ops...)
+ }
+
+ buf := make([]byte, 0, 256)
+
+ // Serialize operations synchronously to avoid interrupting
+ // other goroutines while we can't really be sending data.
+ // Also, record id positions so that we can compute request
+ // ids at once later with the lock already held.
+ requests := make([]requestInfo, len(ops))
+ requestCount := 0
+
+ for _, op := range ops {
+ debugf("Socket %p to %s: serializing op: %#v", socket, socket.addr, op)
+ if qop, ok := op.(*queryOp); ok {
+ if cmd, ok := qop.query.(*findCmd); ok {
+ debugf("Socket %p to %s: find command: %#v", socket, socket.addr, cmd)
+ }
+ }
+ start := len(buf)
+ var replyFunc replyFunc
+ switch op := op.(type) {
+
+ case *updateOp:
+ buf = addHeader(buf, 2001)
+ buf = addInt32(buf, 0) // Reserved
+ buf = addCString(buf, op.Collection)
+ buf = addInt32(buf, int32(op.Flags))
+ debugf("Socket %p to %s: serializing selector document: %#v", socket, socket.addr, op.Selector)
+ buf, err = addBSON(buf, op.Selector)
+ if err != nil {
+ return err
+ }
+ debugf("Socket %p to %s: serializing update document: %#v", socket, socket.addr, op.Update)
+ buf, err = addBSON(buf, op.Update)
+ if err != nil {
+ return err
+ }
+
+ case *insertOp:
+ buf = addHeader(buf, 2002)
+ buf = addInt32(buf, int32(op.flags))
+ buf = addCString(buf, op.collection)
+ for _, doc := range op.documents {
+ debugf("Socket %p to %s: serializing document for insertion: %#v", socket, socket.addr, doc)
+ buf, err = addBSON(buf, doc)
+ if err != nil {
+ return err
+ }
+ }
+
+ case *queryOp:
+ buf = addHeader(buf, 2004)
+ buf = addInt32(buf, int32(op.flags))
+ buf = addCString(buf, op.collection)
+ buf = addInt32(buf, op.skip)
+ buf = addInt32(buf, op.limit)
+ buf, err = addBSON(buf, op.finalQuery(socket))
+ if err != nil {
+ return err
+ }
+ if op.selector != nil {
+ buf, err = addBSON(buf, op.selector)
+ if err != nil {
+ return err
+ }
+ }
+ replyFunc = op.replyFunc
+
+ case *getMoreOp:
+ buf = addHeader(buf, 2005)
+ buf = addInt32(buf, 0) // Reserved
+ buf = addCString(buf, op.collection)
+ buf = addInt32(buf, op.limit)
+ buf = addInt64(buf, op.cursorId)
+ replyFunc = op.replyFunc
+
+ case *deleteOp:
+ buf = addHeader(buf, 2006)
+ buf = addInt32(buf, 0) // Reserved
+ buf = addCString(buf, op.Collection)
+ buf = addInt32(buf, int32(op.Flags))
+ debugf("Socket %p to %s: serializing selector document: %#v", socket, socket.addr, op.Selector)
+ buf, err = addBSON(buf, op.Selector)
+ if err != nil {
+ return err
+ }
+
+ case *killCursorsOp:
+ buf = addHeader(buf, 2007)
+ buf = addInt32(buf, 0) // Reserved
+ buf = addInt32(buf, int32(len(op.cursorIds)))
+ for _, cursorId := range op.cursorIds {
+ buf = addInt64(buf, cursorId)
+ }
+
+ default:
+ panic("internal error: unknown operation type")
+ }
+
+ setInt32(buf, start, int32(len(buf)-start))
+
+ if replyFunc != nil {
+ request := &requests[requestCount]
+ request.replyFunc = replyFunc
+ request.bufferPos = start
+ requestCount++
+ }
+ }
+
+ // Buffer is ready for the pipe. Lock, allocate ids, and enqueue.
+
+ socket.Lock()
+ if socket.dead != nil {
+ dead := socket.dead
+ socket.Unlock()
+ debugf("Socket %p to %s: failing query, already closed: %s", socket, socket.addr, socket.dead.Error())
+ // XXX This seems necessary in case the session is closed concurrently
+ // with a query being performed, but it's not yet tested:
+ for i := 0; i != requestCount; i++ {
+ request := &requests[i]
+ if request.replyFunc != nil {
+ request.replyFunc(dead, nil, -1, nil)
+ }
+ }
+ return dead
+ }
+
+ wasWaiting := len(socket.replyFuncs) > 0
+
+ // Reserve id 0 for requests which should have no responses.
+ requestId := socket.nextRequestId + 1
+ if requestId == 0 {
+ requestId++
+ }
+ socket.nextRequestId = requestId + uint32(requestCount)
+ for i := 0; i != requestCount; i++ {
+ request := &requests[i]
+ setInt32(buf, request.bufferPos+4, int32(requestId))
+ socket.replyFuncs[requestId] = request.replyFunc
+ requestId++
+ }
+
+ debugf("Socket %p to %s: sending %d op(s) (%d bytes)", socket, socket.addr, len(ops), len(buf))
+ stats.sentOps(len(ops))
+
+ socket.updateDeadline(writeDeadline)
+ _, err = socket.conn.Write(buf)
+ if !wasWaiting && requestCount > 0 {
+ socket.updateDeadline(readDeadline)
+ }
+ socket.Unlock()
+ return err
+}
+
+func fill(r net.Conn, b []byte) error {
+ l := len(b)
+ n, err := r.Read(b)
+ for n != l && err == nil {
+ var ni int
+ ni, err = r.Read(b[n:])
+ n += ni
+ }
+ return err
+}
+
+// Estimated minimum cost per socket: 1 goroutine + memory for the largest
+// document ever seen.
+func (socket *mongoSocket) readLoop() {
+ p := make([]byte, 36) // 16 from header + 20 from OP_REPLY fixed fields
+ s := make([]byte, 4)
+ conn := socket.conn // No locking, conn never changes.
+ for {
+ err := fill(conn, p)
+ if err != nil {
+ socket.kill(err, true)
+ return
+ }
+
+ totalLen := getInt32(p, 0)
+ responseTo := getInt32(p, 8)
+ opCode := getInt32(p, 12)
+
+ // Don't use socket.server.Addr here. socket is not
+ // locked and socket.server may go away.
+ debugf("Socket %p to %s: got reply (%d bytes)", socket, socket.addr, totalLen)
+
+ _ = totalLen
+
+ if opCode != 1 {
+ socket.kill(errors.New("opcode != 1, corrupted data?"), true)
+ return
+ }
+
+ reply := replyOp{
+ flags: uint32(getInt32(p, 16)),
+ cursorId: getInt64(p, 20),
+ firstDoc: getInt32(p, 28),
+ replyDocs: getInt32(p, 32),
+ }
+
+ stats.receivedOps(+1)
+ stats.receivedDocs(int(reply.replyDocs))
+
+ socket.Lock()
+ replyFunc, ok := socket.replyFuncs[uint32(responseTo)]
+ if ok {
+ delete(socket.replyFuncs, uint32(responseTo))
+ }
+ socket.Unlock()
+
+ if replyFunc != nil && reply.replyDocs == 0 {
+ replyFunc(nil, &reply, -1, nil)
+ } else {
+ for i := 0; i != int(reply.replyDocs); i++ {
+ err := fill(conn, s)
+ if err != nil {
+ if replyFunc != nil {
+ replyFunc(err, nil, -1, nil)
+ }
+ socket.kill(err, true)
+ return
+ }
+
+ b := make([]byte, int(getInt32(s, 0)))
+
+ // copy(b, s) in an efficient way.
+ b[0] = s[0]
+ b[1] = s[1]
+ b[2] = s[2]
+ b[3] = s[3]
+
+ err = fill(conn, b[4:])
+ if err != nil {
+ if replyFunc != nil {
+ replyFunc(err, nil, -1, nil)
+ }
+ socket.kill(err, true)
+ return
+ }
+
+ if globalDebug && globalLogger != nil {
+ m := bson.M{}
+ if err := bson.Unmarshal(b, m); err == nil {
+ debugf("Socket %p to %s: received document: %#v", socket, socket.addr, m)
+ }
+ }
+
+ if replyFunc != nil {
+ replyFunc(nil, &reply, i, b)
+ }
+
+ // XXX Do bound checking against totalLen.
+ }
+ }
+
+ socket.Lock()
+ if len(socket.replyFuncs) == 0 {
+ // Nothing else to read for now. Disable deadline.
+ socket.conn.SetReadDeadline(time.Time{})
+ } else {
+ socket.updateDeadline(readDeadline)
+ }
+ socket.Unlock()
+
+ // XXX Do bound checking against totalLen.
+ }
+}
+
+var emptyHeader = []byte{0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0}
+
+func addHeader(b []byte, opcode int) []byte {
+ i := len(b)
+ b = append(b, emptyHeader...)
+ // Enough for current opcodes.
+ b[i+12] = byte(opcode)
+ b[i+13] = byte(opcode >> 8)
+ return b
+}
+
+func addInt32(b []byte, i int32) []byte {
+ return append(b, byte(i), byte(i>>8), byte(i>>16), byte(i>>24))
+}
+
+func addInt64(b []byte, i int64) []byte {
+ return append(b, byte(i), byte(i>>8), byte(i>>16), byte(i>>24),
+ byte(i>>32), byte(i>>40), byte(i>>48), byte(i>>56))
+}
+
+func addCString(b []byte, s string) []byte {
+ b = append(b, []byte(s)...)
+ b = append(b, 0)
+ return b
+}
+
+func addBSON(b []byte, doc interface{}) ([]byte, error) {
+ if doc == nil {
+ return append(b, 5, 0, 0, 0, 0), nil
+ }
+ data, err := bson.Marshal(doc)
+ if err != nil {
+ return b, err
+ }
+ return append(b, data...), nil
+}
+
+func setInt32(b []byte, pos int, i int32) {
+ b[pos] = byte(i)
+ b[pos+1] = byte(i >> 8)
+ b[pos+2] = byte(i >> 16)
+ b[pos+3] = byte(i >> 24)
+}
+
+func getInt32(b []byte, pos int) int32 {
+ return (int32(b[pos+0])) |
+ (int32(b[pos+1]) << 8) |
+ (int32(b[pos+2]) << 16) |
+ (int32(b[pos+3]) << 24)
+}
+
+func getInt64(b []byte, pos int) int64 {
+ return (int64(b[pos+0])) |
+ (int64(b[pos+1]) << 8) |
+ (int64(b[pos+2]) << 16) |
+ (int64(b[pos+3]) << 24) |
+ (int64(b[pos+4]) << 32) |
+ (int64(b[pos+5]) << 40) |
+ (int64(b[pos+6]) << 48) |
+ (int64(b[pos+7]) << 56)
+}
diff --git a/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/stats.go b/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/stats.go
new file mode 100644
index 00000000000..59723e60c2d
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/stats.go
@@ -0,0 +1,147 @@
+// mgo - MongoDB driver for Go
+//
+// Copyright (c) 2010-2012 - Gustavo Niemeyer <gustavo@niemeyer.net>
+//
+// All rights reserved.
+//
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are met:
+//
+// 1. Redistributions of source code must retain the above copyright notice, this
+// list of conditions and the following disclaimer.
+// 2. Redistributions in binary form must reproduce the above copyright notice,
+// this list of conditions and the following disclaimer in the documentation
+// and/or other materials provided with the distribution.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
+// ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
+// WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+// DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
+// ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
+// (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
+// LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
+// ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
+// SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+package mgo
+
+import (
+ "sync"
+)
+
+var stats *Stats
+var statsMutex sync.Mutex
+
+func SetStats(enabled bool) {
+ statsMutex.Lock()
+ if enabled {
+ if stats == nil {
+ stats = &Stats{}
+ }
+ } else {
+ stats = nil
+ }
+ statsMutex.Unlock()
+}
+
+func GetStats() (snapshot Stats) {
+ statsMutex.Lock()
+ snapshot = *stats
+ statsMutex.Unlock()
+ return
+}
+
+func ResetStats() {
+ statsMutex.Lock()
+ debug("Resetting stats")
+ old := stats
+ stats = &Stats{}
+ // These are absolute values:
+ stats.Clusters = old.Clusters
+ stats.SocketsInUse = old.SocketsInUse
+ stats.SocketsAlive = old.SocketsAlive
+ stats.SocketRefs = old.SocketRefs
+ statsMutex.Unlock()
+ return
+}
+
+type Stats struct {
+ Clusters int
+ MasterConns int
+ SlaveConns int
+ SentOps int
+ ReceivedOps int
+ ReceivedDocs int
+ SocketsAlive int
+ SocketsInUse int
+ SocketRefs int
+}
+
+func (stats *Stats) cluster(delta int) {
+ if stats != nil {
+ statsMutex.Lock()
+ stats.Clusters += delta
+ statsMutex.Unlock()
+ }
+}
+
+func (stats *Stats) conn(delta int, master bool) {
+ if stats != nil {
+ statsMutex.Lock()
+ if master {
+ stats.MasterConns += delta
+ } else {
+ stats.SlaveConns += delta
+ }
+ statsMutex.Unlock()
+ }
+}
+
+func (stats *Stats) sentOps(delta int) {
+ if stats != nil {
+ statsMutex.Lock()
+ stats.SentOps += delta
+ statsMutex.Unlock()
+ }
+}
+
+func (stats *Stats) receivedOps(delta int) {
+ if stats != nil {
+ statsMutex.Lock()
+ stats.ReceivedOps += delta
+ statsMutex.Unlock()
+ }
+}
+
+func (stats *Stats) receivedDocs(delta int) {
+ if stats != nil {
+ statsMutex.Lock()
+ stats.ReceivedDocs += delta
+ statsMutex.Unlock()
+ }
+}
+
+func (stats *Stats) socketsInUse(delta int) {
+ if stats != nil {
+ statsMutex.Lock()
+ stats.SocketsInUse += delta
+ statsMutex.Unlock()
+ }
+}
+
+func (stats *Stats) socketsAlive(delta int) {
+ if stats != nil {
+ statsMutex.Lock()
+ stats.SocketsAlive += delta
+ statsMutex.Unlock()
+ }
+}
+
+func (stats *Stats) socketRefs(delta int) {
+ if stats != nil {
+ statsMutex.Lock()
+ stats.SocketRefs += delta
+ statsMutex.Unlock()
+ }
+}
diff --git a/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/suite_test.go b/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/suite_test.go
new file mode 100644
index 00000000000..bac5d3f4a6f
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/suite_test.go
@@ -0,0 +1,262 @@
+// mgo - MongoDB driver for Go
+//
+// Copyright (c) 2010-2012 - Gustavo Niemeyer <gustavo@niemeyer.net>
+//
+// All rights reserved.
+//
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are met:
+//
+// 1. Redistributions of source code must retain the above copyright notice, this
+// list of conditions and the following disclaimer.
+// 2. Redistributions in binary form must reproduce the above copyright notice,
+// this list of conditions and the following disclaimer in the documentation
+// and/or other materials provided with the distribution.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
+// ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
+// WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+// DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
+// ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
+// (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
+// LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
+// ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
+// SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+package mgo_test
+
+import (
+ "bytes"
+ "errors"
+ "flag"
+ "fmt"
+ "net"
+ "os/exec"
+ "runtime"
+ "strconv"
+ "testing"
+ "time"
+
+ . "gopkg.in/check.v1"
+ "gopkg.in/mgo.v2"
+ "gopkg.in/mgo.v2/bson"
+)
+
+var fast = flag.Bool("fast", false, "Skip slow tests")
+
+type M bson.M
+
+type cLogger C
+
+func (c *cLogger) Output(calldepth int, s string) error {
+ ns := time.Now().UnixNano()
+ t := float64(ns%100e9) / 1e9
+ ((*C)(c)).Logf("[LOG] %.05f %s", t, s)
+ return nil
+}
+
+func TestAll(t *testing.T) {
+ TestingT(t)
+}
+
+type S struct {
+ session *mgo.Session
+ stopped bool
+ build mgo.BuildInfo
+ frozen []string
+}
+
+func (s *S) versionAtLeast(v ...int) (result bool) {
+ for i := range v {
+ if i == len(s.build.VersionArray) {
+ return false
+ }
+ if s.build.VersionArray[i] != v[i] {
+ return s.build.VersionArray[i] >= v[i]
+ }
+ }
+ return true
+}
+
+var _ = Suite(&S{})
+
+func (s *S) SetUpSuite(c *C) {
+ mgo.SetDebug(true)
+ mgo.SetStats(true)
+ s.StartAll()
+
+ session, err := mgo.Dial("localhost:40001")
+ c.Assert(err, IsNil)
+ s.build, err = session.BuildInfo()
+ c.Check(err, IsNil)
+ session.Close()
+}
+
+func (s *S) SetUpTest(c *C) {
+ err := run("mongo --nodb harness/mongojs/dropall.js")
+ if err != nil {
+ panic(err.Error())
+ }
+ mgo.SetLogger((*cLogger)(c))
+ mgo.ResetStats()
+}
+
+func (s *S) TearDownTest(c *C) {
+ if s.stopped {
+ s.Stop(":40201")
+ s.Stop(":40202")
+ s.Stop(":40203")
+ s.StartAll()
+ }
+ for _, host := range s.frozen {
+ if host != "" {
+ s.Thaw(host)
+ }
+ }
+ var stats mgo.Stats
+ for i := 0; ; i++ {
+ stats = mgo.GetStats()
+ if stats.SocketsInUse == 0 && stats.SocketsAlive == 0 {
+ break
+ }
+ if i == 20 {
+ c.Fatal("Test left sockets in a dirty state")
+ }
+ c.Logf("Waiting for sockets to die: %d in use, %d alive", stats.SocketsInUse, stats.SocketsAlive)
+ time.Sleep(500 * time.Millisecond)
+ }
+ for i := 0; ; i++ {
+ stats = mgo.GetStats()
+ if stats.Clusters == 0 {
+ break
+ }
+ if i == 60 {
+ c.Fatal("Test left clusters alive")
+ }
+ c.Logf("Waiting for clusters to die: %d alive", stats.Clusters)
+ time.Sleep(1 * time.Second)
+ }
+}
+
+func (s *S) Stop(host string) {
+ // Give a moment for slaves to sync and avoid getting rollback issues.
+ panicOnWindows()
+ time.Sleep(2 * time.Second)
+ err := run("svc -d _harness/daemons/" + supvName(host))
+ if err != nil {
+ panic(err)
+ }
+ s.stopped = true
+}
+
+func (s *S) pid(host string) int {
+ // Note recent releases of lsof force 'f' to be present in the output (WTF?).
+ cmd := exec.Command("lsof", "-iTCP:"+hostPort(host), "-sTCP:LISTEN", "-Fpf")
+ output, err := cmd.CombinedOutput()
+ if err != nil {
+ panic(err)
+ }
+ pidstr := string(bytes.Fields(output[1:])[0])
+ pid, err := strconv.Atoi(pidstr)
+ if err != nil {
+ panic(fmt.Errorf("cannot convert pid to int: %q, command line: %q", pidstr, cmd.Args))
+ }
+ return pid
+}
+
+func (s *S) Freeze(host string) {
+ err := stop(s.pid(host))
+ if err != nil {
+ panic(err)
+ }
+ s.frozen = append(s.frozen, host)
+}
+
+func (s *S) Thaw(host string) {
+ err := cont(s.pid(host))
+ if err != nil {
+ panic(err)
+ }
+ for i, frozen := range s.frozen {
+ if frozen == host {
+ s.frozen[i] = ""
+ }
+ }
+}
+
+func (s *S) StartAll() {
+ if s.stopped {
+ // Restart any stopped nodes.
+ run("svc -u _harness/daemons/*")
+ err := run("mongo --nodb harness/mongojs/wait.js")
+ if err != nil {
+ panic(err)
+ }
+ s.stopped = false
+ }
+}
+
+func run(command string) error {
+ var output []byte
+ var err error
+ if runtime.GOOS == "windows" {
+ output, err = exec.Command("cmd", "/C", command).CombinedOutput()
+ } else {
+ output, err = exec.Command("/bin/sh", "-c", command).CombinedOutput()
+ }
+
+ if err != nil {
+ msg := fmt.Sprintf("Failed to execute: %s: %s\n%s", command, err.Error(), string(output))
+ return errors.New(msg)
+ }
+ return nil
+}
+
+var supvNames = map[string]string{
+ "40001": "db1",
+ "40002": "db2",
+ "40011": "rs1a",
+ "40012": "rs1b",
+ "40013": "rs1c",
+ "40021": "rs2a",
+ "40022": "rs2b",
+ "40023": "rs2c",
+ "40031": "rs3a",
+ "40032": "rs3b",
+ "40033": "rs3c",
+ "40041": "rs4a",
+ "40101": "cfg1",
+ "40102": "cfg2",
+ "40103": "cfg3",
+ "40201": "s1",
+ "40202": "s2",
+ "40203": "s3",
+}
+
+// supvName returns the daemon name for the given host address.
+func supvName(host string) string {
+ host, port, err := net.SplitHostPort(host)
+ if err != nil {
+ panic(err)
+ }
+ name, ok := supvNames[port]
+ if !ok {
+ panic("Unknown host: " + host)
+ }
+ return name
+}
+
+func hostPort(host string) string {
+ _, port, err := net.SplitHostPort(host)
+ if err != nil {
+ panic(err)
+ }
+ return port
+}
+
+func panicOnWindows() {
+ if runtime.GOOS == "windows" {
+ panic("the test suite is not yet fully supported on Windows")
+ }
+}
diff --git a/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/syscall_test.go b/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/syscall_test.go
new file mode 100644
index 00000000000..b8bbd7b340f
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/syscall_test.go
@@ -0,0 +1,15 @@
+// +build !windows
+
+package mgo_test
+
+import (
+ "syscall"
+)
+
+func stop(pid int) (err error) {
+ return syscall.Kill(pid, syscall.SIGSTOP)
+}
+
+func cont(pid int) (err error) {
+ return syscall.Kill(pid, syscall.SIGCONT)
+}
diff --git a/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/syscall_windows_test.go b/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/syscall_windows_test.go
new file mode 100644
index 00000000000..f2deaca86ec
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/syscall_windows_test.go
@@ -0,0 +1,11 @@
+package mgo_test
+
+func stop(pid int) (err error) {
+ panicOnWindows() // Always does.
+ return nil
+}
+
+func cont(pid int) (err error) {
+ panicOnWindows() // Always does.
+ return nil
+}
diff --git a/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/txn/chaos.go b/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/txn/chaos.go
new file mode 100644
index 00000000000..c98adb91d22
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/txn/chaos.go
@@ -0,0 +1,68 @@
+package txn
+
+import (
+ mrand "math/rand"
+ "time"
+)
+
+var chaosEnabled = false
+var chaosSetting Chaos
+
+// Chaos holds parameters for the failure injection mechanism.
+type Chaos struct {
+ // KillChance is the 0.0 to 1.0 chance that a given checkpoint
+ // within the algorithm will raise an interruption that will
+ // stop the procedure.
+ KillChance float64
+
+ // SlowdownChance is the 0.0 to 1.0 chance that a given checkpoint
+ // within the algorithm will be delayed by Slowdown before
+ // continuing.
+ SlowdownChance float64
+ Slowdown time.Duration
+
+ // If Breakpoint is set, the above settings will only affect the
+ // named breakpoint.
+ Breakpoint string
+}
+
+// SetChaos sets the failure injection parameters to c.
+func SetChaos(c Chaos) {
+ chaosSetting = c
+ chaosEnabled = c.KillChance > 0 || c.SlowdownChance > 0
+}
+
+func chaos(bpname string) {
+ if !chaosEnabled {
+ return
+ }
+ switch chaosSetting.Breakpoint {
+ case "", bpname:
+ kc := chaosSetting.KillChance
+ if kc > 0 && mrand.Intn(1000) < int(kc*1000) {
+ panic(chaosError{})
+ }
+ if bpname == "insert" {
+ return
+ }
+ sc := chaosSetting.SlowdownChance
+ if sc > 0 && mrand.Intn(1000) < int(sc*1000) {
+ time.Sleep(chaosSetting.Slowdown)
+ }
+ }
+}
+
+type chaosError struct{}
+
+func (f *flusher) handleChaos(err *error) {
+ v := recover()
+ if v == nil {
+ return
+ }
+ if _, ok := v.(chaosError); ok {
+ f.debugf("Killed by chaos!")
+ *err = ErrChaos
+ return
+ }
+ panic(v)
+}
diff --git a/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/txn/debug.go b/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/txn/debug.go
new file mode 100644
index 00000000000..8224bb31387
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/txn/debug.go
@@ -0,0 +1,109 @@
+package txn
+
+import (
+ "bytes"
+ "fmt"
+ "sort"
+ "sync/atomic"
+
+ "gopkg.in/mgo.v2/bson"
+)
+
+var (
+ debugEnabled bool
+ logger log_Logger
+)
+
+type log_Logger interface {
+ Output(calldepth int, s string) error
+}
+
+// Specify the *log.Logger where logged messages should be sent to.
+func SetLogger(l log_Logger) {
+ logger = l
+}
+
+// SetDebug enables or disables debugging.
+func SetDebug(debug bool) {
+ debugEnabled = debug
+}
+
+var ErrChaos = fmt.Errorf("interrupted by chaos")
+
+var debugId uint32
+
+func debugPrefix() string {
+ d := atomic.AddUint32(&debugId, 1) - 1
+ s := make([]byte, 0, 10)
+ for i := uint(0); i < 8; i++ {
+ s = append(s, "abcdefghijklmnop"[(d>>(4*i))&0xf])
+ if d>>(4*(i+1)) == 0 {
+ break
+ }
+ }
+ s = append(s, ')', ' ')
+ return string(s)
+}
+
+func logf(format string, args ...interface{}) {
+ if logger != nil {
+ logger.Output(2, fmt.Sprintf(format, argsForLog(args)...))
+ }
+}
+
+func debugf(format string, args ...interface{}) {
+ if debugEnabled && logger != nil {
+ logger.Output(2, fmt.Sprintf(format, argsForLog(args)...))
+ }
+}
+
+func argsForLog(args []interface{}) []interface{} {
+ for i, arg := range args {
+ switch v := arg.(type) {
+ case bson.ObjectId:
+ args[i] = v.Hex()
+ case []bson.ObjectId:
+ lst := make([]string, len(v))
+ for j, id := range v {
+ lst[j] = id.Hex()
+ }
+ args[i] = lst
+ case map[docKey][]bson.ObjectId:
+ buf := &bytes.Buffer{}
+ var dkeys docKeys
+ for dkey := range v {
+ dkeys = append(dkeys, dkey)
+ }
+ sort.Sort(dkeys)
+ for i, dkey := range dkeys {
+ if i > 0 {
+ buf.WriteByte(' ')
+ }
+ buf.WriteString(fmt.Sprintf("%v: {", dkey))
+ for j, id := range v[dkey] {
+ if j > 0 {
+ buf.WriteByte(' ')
+ }
+ buf.WriteString(id.Hex())
+ }
+ buf.WriteByte('}')
+ }
+ args[i] = buf.String()
+ case map[docKey][]int64:
+ buf := &bytes.Buffer{}
+ var dkeys docKeys
+ for dkey := range v {
+ dkeys = append(dkeys, dkey)
+ }
+ sort.Sort(dkeys)
+ for i, dkey := range dkeys {
+ if i > 0 {
+ buf.WriteByte(' ')
+ }
+ buf.WriteString(fmt.Sprintf("%v: %v", dkey, v[dkey]))
+ }
+ args[i] = buf.String()
+ }
+ }
+ return args
+}
diff --git a/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/txn/dockey_test.go b/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/txn/dockey_test.go
new file mode 100644
index 00000000000..e8dee952cf4
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/txn/dockey_test.go
@@ -0,0 +1,205 @@
+package txn
+
+import (
+ "sort"
+
+ . "gopkg.in/check.v1"
+)
+
+type DocKeySuite struct{}
+
+var _ = Suite(&DocKeySuite{})
+
+type T struct {
+ A int
+ B string
+}
+
+type T2 struct {
+ A int
+ B string
+}
+
+type T3 struct {
+ A int
+ B string
+}
+
+type T4 struct {
+ A int
+ B string
+}
+
+type T5 struct {
+ F int
+ Q string
+}
+
+type T6 struct {
+ A int
+ B string
+}
+
+type T7 struct {
+ A bool
+ B float64
+}
+
+type T8 struct {
+ A int
+ B string
+}
+
+type T9 struct {
+ A int
+ B string
+ C bool
+}
+
+type T10 struct {
+ C int `bson:"a"`
+ D string `bson:"b,omitempty"`
+}
+
+type T11 struct {
+ C int
+ D string
+}
+
+type T12 struct {
+ S string
+}
+
+type T13 struct {
+ p, q, r bool
+ S string
+}
+
+var docKeysTests = [][]docKeys{
+ {{
+ {"c", 1},
+ {"c", 5},
+ {"c", 2},
+ }, {
+ {"c", 1},
+ {"c", 2},
+ {"c", 5},
+ }}, {{
+ {"c", "foo"},
+ {"c", "bar"},
+ {"c", "bob"},
+ }, {
+ {"c", "bar"},
+ {"c", "bob"},
+ {"c", "foo"},
+ }}, {{
+ {"c", 0.2},
+ {"c", 0.07},
+ {"c", 0.9},
+ }, {
+ {"c", 0.07},
+ {"c", 0.2},
+ {"c", 0.9},
+ }}, {{
+ {"c", true},
+ {"c", false},
+ {"c", true},
+ }, {
+ {"c", false},
+ {"c", true},
+ {"c", true},
+ }}, {{
+ {"c", T{1, "b"}},
+ {"c", T{1, "a"}},
+ {"c", T{0, "b"}},
+ {"c", T{0, "a"}},
+ }, {
+ {"c", T{0, "a"}},
+ {"c", T{0, "b"}},
+ {"c", T{1, "a"}},
+ {"c", T{1, "b"}},
+ }}, {{
+ {"c", T{1, "a"}},
+ {"c", T{0, "a"}},
+ }, {
+ {"c", T{0, "a"}},
+ {"c", T{1, "a"}},
+ }}, {{
+ {"c", T3{0, "b"}},
+ {"c", T2{1, "b"}},
+ {"c", T3{1, "a"}},
+ {"c", T2{0, "a"}},
+ }, {
+ {"c", T2{0, "a"}},
+ {"c", T3{0, "b"}},
+ {"c", T3{1, "a"}},
+ {"c", T2{1, "b"}},
+ }}, {{
+ {"c", T5{1, "b"}},
+ {"c", T4{1, "b"}},
+ {"c", T5{0, "a"}},
+ {"c", T4{0, "a"}},
+ }, {
+ {"c", T4{0, "a"}},
+ {"c", T5{0, "a"}},
+ {"c", T4{1, "b"}},
+ {"c", T5{1, "b"}},
+ }}, {{
+ {"c", T6{1, "b"}},
+ {"c", T7{true, 0.2}},
+ {"c", T6{0, "a"}},
+ {"c", T7{false, 0.04}},
+ }, {
+ {"c", T6{0, "a"}},
+ {"c", T6{1, "b"}},
+ {"c", T7{false, 0.04}},
+ {"c", T7{true, 0.2}},
+ }}, {{
+ {"c", T9{1, "b", true}},
+ {"c", T8{1, "b"}},
+ {"c", T9{0, "a", false}},
+ {"c", T8{0, "a"}},
+ }, {
+ {"c", T9{0, "a", false}},
+ {"c", T8{0, "a"}},
+ {"c", T9{1, "b", true}},
+ {"c", T8{1, "b"}},
+ }}, {{
+ {"b", 2},
+ {"a", 5},
+ {"c", 2},
+ {"b", 1},
+ }, {
+ {"a", 5},
+ {"b", 1},
+ {"b", 2},
+ {"c", 2},
+ }}, {{
+ {"c", T11{1, "a"}},
+ {"c", T11{1, "a"}},
+ {"c", T10{1, "a"}},
+ }, {
+ {"c", T10{1, "a"}},
+ {"c", T11{1, "a"}},
+ {"c", T11{1, "a"}},
+ }}, {{
+ {"c", T12{"a"}},
+ {"c", T13{false, true, false, "a"}},
+ {"c", T12{"b"}},
+ {"c", T13{false, true, false, "b"}},
+ }, {
+ {"c", T12{"a"}},
+ {"c", T13{false, true, false, "a"}},
+ {"c", T12{"b"}},
+ {"c", T13{false, true, false, "b"}},
+ }},
+}
+
+func (s *DocKeySuite) TestSort(c *C) {
+ for _, test := range docKeysTests {
+ keys := test[0]
+ expected := test[1]
+ sort.Sort(keys)
+ c.Check(keys, DeepEquals, expected)
+ }
+}
diff --git a/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/txn/flusher.go b/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/txn/flusher.go
new file mode 100644
index 00000000000..f640a438084
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/txn/flusher.go
@@ -0,0 +1,985 @@
+package txn
+
+import (
+ "fmt"
+
+ "gopkg.in/mgo.v2"
+ "gopkg.in/mgo.v2/bson"
+)
+
+func flush(r *Runner, t *transaction) error {
+ f := &flusher{
+ Runner: r,
+ goal: t,
+ goalKeys: make(map[docKey]bool),
+ queue: make(map[docKey][]token),
+ debugId: debugPrefix(),
+ }
+ for _, dkey := range f.goal.docKeys() {
+ f.goalKeys[dkey] = true
+ }
+ return f.run()
+}
+
+type flusher struct {
+ *Runner
+ goal *transaction
+ goalKeys map[docKey]bool
+ queue map[docKey][]token
+ debugId string
+}
+
+func (f *flusher) run() (err error) {
+ if chaosEnabled {
+ defer f.handleChaos(&err)
+ }
+
+ f.debugf("Processing %s", f.goal)
+ seen := make(map[bson.ObjectId]*transaction)
+ if err := f.recurse(f.goal, seen); err != nil {
+ return err
+ }
+ if f.goal.done() {
+ return nil
+ }
+
+ // Sparse workloads will generally be managed entirely by recurse.
+ // Getting here means one or more transactions have dependencies
+ // and perhaps cycles.
+
+ // Build successors data for Tarjan's sort. Must consider
+ // that entries in txn-queue are not necessarily valid.
+ successors := make(map[bson.ObjectId][]bson.ObjectId)
+ ready := true
+ for _, dqueue := range f.queue {
+ NextPair:
+ for i := 0; i < len(dqueue); i++ {
+ pred := dqueue[i]
+ predid := pred.id()
+ predt := seen[predid]
+ if predt == nil || predt.Nonce != pred.nonce() {
+ continue
+ }
+ predsuccids, ok := successors[predid]
+ if !ok {
+ successors[predid] = nil
+ }
+
+ for j := i + 1; j < len(dqueue); j++ {
+ succ := dqueue[j]
+ succid := succ.id()
+ succt := seen[succid]
+ if succt == nil || succt.Nonce != succ.nonce() {
+ continue
+ }
+ if _, ok := successors[succid]; !ok {
+ successors[succid] = nil
+ }
+
+ // Found a valid pred/succ pair.
+ i = j - 1
+ for _, predsuccid := range predsuccids {
+ if predsuccid == succid {
+ continue NextPair
+ }
+ }
+ successors[predid] = append(predsuccids, succid)
+ if succid == f.goal.Id {
+ // There are still pre-requisites to handle.
+ ready = false
+ }
+ continue NextPair
+ }
+ }
+ }
+ f.debugf("Queues: %v", f.queue)
+ f.debugf("Successors: %v", successors)
+ if ready {
+ f.debugf("Goal %s has no real pre-requisites", f.goal)
+ return f.advance(f.goal, nil, true)
+ }
+
+ // Robert Tarjan's algorithm for detecting strongly-connected
+ // components is used for topological sorting and detecting
+ // cycles at once. The order in which transactions are applied
+ // in commonly affected documents must be a global agreement.
+ sorted := tarjanSort(successors)
+ if debugEnabled {
+ f.debugf("Tarjan output: %v", sorted)
+ }
+ pull := make(map[bson.ObjectId]*transaction)
+ for i := len(sorted) - 1; i >= 0; i-- {
+ scc := sorted[i]
+ f.debugf("Flushing %v", scc)
+ if len(scc) == 1 {
+ pull[scc[0]] = seen[scc[0]]
+ }
+ for _, id := range scc {
+ if err := f.advance(seen[id], pull, true); err != nil {
+ return err
+ }
+ }
+ if len(scc) > 1 {
+ for _, id := range scc {
+ pull[id] = seen[id]
+ }
+ }
+ }
+ return nil
+}
+
+func (f *flusher) recurse(t *transaction, seen map[bson.ObjectId]*transaction) error {
+ seen[t.Id] = t
+ err := f.advance(t, nil, false)
+ if err != errPreReqs {
+ return err
+ }
+ for _, dkey := range t.docKeys() {
+ for _, dtt := range f.queue[dkey] {
+ id := dtt.id()
+ if seen[id] != nil {
+ continue
+ }
+ qt, err := f.load(id)
+ if err != nil {
+ return err
+ }
+ err = f.recurse(qt, seen)
+ if err != nil {
+ return err
+ }
+ }
+ }
+ return nil
+}
+
+func (f *flusher) advance(t *transaction, pull map[bson.ObjectId]*transaction, force bool) error {
+ for {
+ switch t.State {
+ case tpreparing, tprepared:
+ revnos, err := f.prepare(t, force)
+ if err != nil {
+ return err
+ }
+ if t.State != tprepared {
+ continue
+ }
+ if err = f.assert(t, revnos, pull); err != nil {
+ return err
+ }
+ if t.State != tprepared {
+ continue
+ }
+ if err = f.checkpoint(t, revnos); err != nil {
+ return err
+ }
+ case tapplying:
+ return f.apply(t, pull)
+ case taborting:
+ return f.abortOrReload(t, nil, pull)
+ case tapplied, taborted:
+ return nil
+ default:
+ panic(fmt.Errorf("transaction in unknown state: %q", t.State))
+ }
+ }
+ panic("unreachable")
+}
+
+type stash string
+
+const (
+ stashStable stash = ""
+ stashInsert stash = "insert"
+ stashRemove stash = "remove"
+)
+
+type txnInfo struct {
+ Queue []token `bson:"txn-queue"`
+ Revno int64 `bson:"txn-revno,omitempty"`
+ Insert bson.ObjectId `bson:"txn-insert,omitempty"`
+ Remove bson.ObjectId `bson:"txn-remove,omitempty"`
+}
+
+type stashState string
+
+const (
+ stashNew stashState = ""
+ stashInserting stashState = "inserting"
+)
+
+var txnFields = bson.D{{"txn-queue", 1}, {"txn-revno", 1}, {"txn-remove", 1}, {"txn-insert", 1}}
+
+var errPreReqs = fmt.Errorf("transaction has pre-requisites and force is false")
+
+// prepare injects t's id onto txn-queue for all affected documents
+// and collects the current txn-queue and txn-revno values during
+// the process. If the prepared txn-queue indicates that there are
+// pre-requisite transactions to be applied and the force parameter
+// is false, errPreReqs will be returned. Otherwise, the current
+// tip revision numbers for all the documents are returned.
+func (f *flusher) prepare(t *transaction, force bool) (revnos []int64, err error) {
+ if t.State != tpreparing {
+ return f.rescan(t, force)
+ }
+ f.debugf("Preparing %s", t)
+
+ // dkeys being sorted means stable iteration across all runners. This
+ // isn't strictly required, but reduces the chances of cycles.
+ dkeys := t.docKeys()
+
+ revno := make(map[docKey]int64)
+ info := txnInfo{}
+ tt := tokenFor(t)
+NextDoc:
+ for _, dkey := range dkeys {
+ change := mgo.Change{
+ Update: bson.D{{"$addToSet", bson.D{{"txn-queue", tt}}}},
+ ReturnNew: true,
+ }
+ c := f.tc.Database.C(dkey.C)
+ cquery := c.FindId(dkey.Id).Select(txnFields)
+
+ RetryDoc:
+ change.Upsert = false
+ chaos("")
+ if _, err := cquery.Apply(change, &info); err == nil {
+ if info.Remove == "" {
+ // Fast path, unless workload is insert/remove heavy.
+ revno[dkey] = info.Revno
+ f.queue[dkey] = info.Queue
+ f.debugf("[A] Prepared document %v with revno %d and queue: %v", dkey, info.Revno, info.Queue)
+ continue NextDoc
+ } else {
+ // Handle remove in progress before preparing it.
+ if err := f.loadAndApply(info.Remove); err != nil {
+ return nil, err
+ }
+ goto RetryDoc
+ }
+ } else if err != mgo.ErrNotFound {
+ return nil, err
+ }
+
+ // Document missing. Use stash collection.
+ change.Upsert = true
+ chaos("")
+ _, err := f.sc.FindId(dkey).Apply(change, &info)
+ if err != nil {
+ return nil, err
+ }
+ if info.Insert != "" {
+ // Handle insert in progress before preparing it.
+ if err := f.loadAndApply(info.Insert); err != nil {
+ return nil, err
+ }
+ goto RetryDoc
+ }
+
+ // Must confirm stash is still in use and is the same one
+ // prepared, since applying a remove overwrites the stash.
+ docFound := false
+ stashFound := false
+ if err = c.FindId(dkey.Id).Select(txnFields).One(&info); err == nil {
+ docFound = true
+ } else if err != mgo.ErrNotFound {
+ return nil, err
+ } else if err = f.sc.FindId(dkey).One(&info); err == nil {
+ stashFound = true
+ if info.Revno == 0 {
+ // Missing revno in the stash only happens when it
+ // has been upserted, in which case it defaults to -1.
+ // Txn-inserted documents get revno -1 while in the stash
+ // for the first time, and -revno-1 == 2 when they go live.
+ info.Revno = -1
+ }
+ } else if err != mgo.ErrNotFound {
+ return nil, err
+ }
+
+ if docFound && info.Remove == "" || stashFound && info.Insert == "" {
+ for _, dtt := range info.Queue {
+ if dtt != tt {
+ continue
+ }
+ // Found tt properly prepared.
+ if stashFound {
+ f.debugf("[B] Prepared document %v on stash with revno %d and queue: %v", dkey, info.Revno, info.Queue)
+ } else {
+ f.debugf("[B] Prepared document %v with revno %d and queue: %v", dkey, info.Revno, info.Queue)
+ }
+ revno[dkey] = info.Revno
+ f.queue[dkey] = info.Queue
+ continue NextDoc
+ }
+ }
+
+ // The stash wasn't valid and tt got overwritten. Try again.
+ f.unstashToken(tt, dkey)
+ goto RetryDoc
+ }
+
+ // Save the prepared nonce onto t.
+ nonce := tt.nonce()
+ qdoc := bson.D{{"_id", t.Id}, {"s", tpreparing}}
+ udoc := bson.D{{"$set", bson.D{{"s", tprepared}, {"n", nonce}}}}
+ chaos("set-prepared")
+ err = f.tc.Update(qdoc, udoc)
+ if err == nil {
+ t.State = tprepared
+ t.Nonce = nonce
+ } else if err == mgo.ErrNotFound {
+ f.debugf("Can't save nonce of %s: LOST RACE", tt)
+ if err := f.reload(t); err != nil {
+ return nil, err
+ } else if t.State == tpreparing {
+ panic("can't save nonce yet transaction is still preparing")
+ } else if t.State != tprepared {
+ return t.Revnos, nil
+ }
+ tt = t.token()
+ } else if err != nil {
+ return nil, err
+ }
+
+ prereqs, found := f.hasPreReqs(tt, dkeys)
+ if !found {
+ // Must only happen when reloading above.
+ return f.rescan(t, force)
+ } else if prereqs && !force {
+ f.debugf("Prepared queue with %s [has prereqs & not forced].", tt)
+ return nil, errPreReqs
+ }
+ revnos = assembledRevnos(t.Ops, revno)
+ if !prereqs {
+ f.debugf("Prepared queue with %s [no prereqs]. Revnos: %v", tt, revnos)
+ } else {
+ f.debugf("Prepared queue with %s [forced] Revnos: %v", tt, revnos)
+ }
+ return revnos, nil
+}
+
+func (f *flusher) unstashToken(tt token, dkey docKey) error {
+ qdoc := bson.D{{"_id", dkey}, {"txn-queue", tt}}
+ udoc := bson.D{{"$pull", bson.D{{"txn-queue", tt}}}}
+ chaos("")
+ if err := f.sc.Update(qdoc, udoc); err == nil {
+ chaos("")
+ err = f.sc.Remove(bson.D{{"_id", dkey}, {"txn-queue", bson.D{}}})
+ } else if err != mgo.ErrNotFound {
+ return err
+ }
+ return nil
+}
+
+func (f *flusher) rescan(t *transaction, force bool) (revnos []int64, err error) {
+ f.debugf("Rescanning %s", t)
+ if t.State != tprepared {
+ panic(fmt.Errorf("rescanning transaction in invalid state: %q", t.State))
+ }
+
+ // dkeys being sorted means stable iteration across all
+ // runners. This isn't strictly required, but reduces the chances
+ // of cycles.
+ dkeys := t.docKeys()
+
+ tt := t.token()
+ if !force {
+ prereqs, found := f.hasPreReqs(tt, dkeys)
+ if found && prereqs {
+ // Its state is already known.
+ return nil, errPreReqs
+ }
+ }
+
+ revno := make(map[docKey]int64)
+ info := txnInfo{}
+ for _, dkey := range dkeys {
+ const retries = 3
+ retry := -1
+
+ RetryDoc:
+ retry++
+ c := f.tc.Database.C(dkey.C)
+ if err := c.FindId(dkey.Id).Select(txnFields).One(&info); err == mgo.ErrNotFound {
+ // Document is missing. Look in stash.
+ chaos("")
+ if err := f.sc.FindId(dkey).One(&info); err == mgo.ErrNotFound {
+ // Stash also doesn't exist. Maybe someone applied it.
+ if err := f.reload(t); err != nil {
+ return nil, err
+ } else if t.State != tprepared {
+ return t.Revnos, err
+ }
+ // Not applying either.
+ if retry < retries {
+ // Retry since there might be an insert/remove race.
+ goto RetryDoc
+ }
+ // Neither the doc nor the stash seem to exist.
+ return nil, fmt.Errorf("cannot find document %v for applying transaction %s", dkey, t)
+ } else if err != nil {
+ return nil, err
+ }
+ // Stash found.
+ if info.Insert != "" {
+ // Handle insert in progress before assuming ordering is good.
+ if err := f.loadAndApply(info.Insert); err != nil {
+ return nil, err
+ }
+ goto RetryDoc
+ }
+ if info.Revno == 0 {
+ // Missing revno in the stash means -1.
+ info.Revno = -1
+ }
+ } else if err != nil {
+ return nil, err
+ } else if info.Remove != "" {
+ // Handle remove in progress before assuming ordering is good.
+ if err := f.loadAndApply(info.Remove); err != nil {
+ return nil, err
+ }
+ goto RetryDoc
+ }
+ revno[dkey] = info.Revno
+
+ found := false
+ for _, id := range info.Queue {
+ if id == tt {
+ found = true
+ break
+ }
+ }
+ f.queue[dkey] = info.Queue
+ if !found {
+ // Rescanned transaction id was not in the queue. This could mean one
+ // of three things:
+ // 1) The transaction was applied and popped by someone else. This is
+ // the common case.
+ // 2) We've read an out-of-date queue from the stash. This can happen
+ // when someone else was paused for a long while preparing another
+ // transaction for this document, and improperly upserted to the
+ // stash when unpaused (after someone else inserted the document).
+ // This is rare but possible.
+ // 3) There's an actual bug somewhere, or outside interference. Worst
+ // possible case.
+ f.debugf("Rescanned document %v misses %s in queue: %v", dkey, tt, info.Queue)
+ err := f.reload(t)
+ if t.State == tpreparing || t.State == tprepared {
+ if retry < retries {
+ // Case 2.
+ goto RetryDoc
+ }
+ // Case 3.
+ return nil, fmt.Errorf("cannot find transaction %s in queue for document %v", t, dkey)
+ }
+ // Case 1.
+ return t.Revnos, err
+ }
+ }
+
+ prereqs, found := f.hasPreReqs(tt, dkeys)
+ if !found {
+ panic("rescanning loop guarantees that this can't happen")
+ } else if prereqs && !force {
+ f.debugf("Rescanned queue with %s: has prereqs, not forced", tt)
+ return nil, errPreReqs
+ }
+ revnos = assembledRevnos(t.Ops, revno)
+ if !prereqs {
+ f.debugf("Rescanned queue with %s: no prereqs, revnos: %v", tt, revnos)
+ } else {
+ f.debugf("Rescanned queue with %s: has prereqs, forced, revnos: %v", tt, revnos)
+ }
+ return revnos, nil
+}
+
+func assembledRevnos(ops []Op, revno map[docKey]int64) []int64 {
+ revnos := make([]int64, len(ops))
+ for i, op := range ops {
+ dkey := op.docKey()
+ revnos[i] = revno[dkey]
+ drevno := revno[dkey]
+ switch {
+ case op.Insert != nil && drevno < 0:
+ revno[dkey] = -drevno + 1
+ case op.Update != nil && drevno >= 0:
+ revno[dkey] = drevno + 1
+ case op.Remove && drevno >= 0:
+ revno[dkey] = -drevno - 1
+ }
+ }
+ return revnos
+}
+
+func (f *flusher) hasPreReqs(tt token, dkeys docKeys) (prereqs, found bool) {
+ found = true
+NextDoc:
+ for _, dkey := range dkeys {
+ for _, dtt := range f.queue[dkey] {
+ if dtt == tt {
+ continue NextDoc
+ } else if dtt.id() != tt.id() {
+ prereqs = true
+ }
+ }
+ found = false
+ }
+ return
+}
+
+func (f *flusher) reload(t *transaction) error {
+ var newt transaction
+ query := f.tc.FindId(t.Id)
+ query.Select(bson.D{{"s", 1}, {"n", 1}, {"r", 1}})
+ if err := query.One(&newt); err != nil {
+ return fmt.Errorf("failed to reload transaction: %v", err)
+ }
+ t.State = newt.State
+ t.Nonce = newt.Nonce
+ t.Revnos = newt.Revnos
+ f.debugf("Reloaded %s: %q", t, t.State)
+ return nil
+}
+
+func (f *flusher) loadAndApply(id bson.ObjectId) error {
+ t, err := f.load(id)
+ if err != nil {
+ return err
+ }
+ return f.advance(t, nil, true)
+}
+
+// assert verifies that all assertions in t match the content that t
+// will be applied upon. If an assertion fails, the transaction state
+// is changed to aborted.
+func (f *flusher) assert(t *transaction, revnos []int64, pull map[bson.ObjectId]*transaction) error {
+ f.debugf("Asserting %s with revnos %v", t, revnos)
+ if t.State != tprepared {
+ panic(fmt.Errorf("asserting transaction in invalid state: %q", t.State))
+ }
+ qdoc := make(bson.D, 3)
+ revno := make(map[docKey]int64)
+ for i, op := range t.Ops {
+ dkey := op.docKey()
+ if _, ok := revno[dkey]; !ok {
+ revno[dkey] = revnos[i]
+ }
+ if op.Assert == nil {
+ continue
+ }
+ if op.Assert == DocMissing {
+ if revnos[i] >= 0 {
+ return f.abortOrReload(t, revnos, pull)
+ }
+ continue
+ }
+ if op.Insert != nil {
+ return fmt.Errorf("Insert can only Assert txn.DocMissing", op.Assert)
+ }
+ // if revnos[i] < 0 { abort }?
+
+ qdoc = append(qdoc[:0], bson.DocElem{"_id", op.Id})
+ if op.Assert != DocMissing {
+ var revnoq interface{}
+ if n := revno[dkey]; n == 0 {
+ revnoq = bson.D{{"$exists", false}}
+ } else {
+ revnoq = n
+ }
+ // XXX Add tt to the query here, once we're sure it's all working.
+ // Not having it increases the chances of breaking on bad logic.
+ qdoc = append(qdoc, bson.DocElem{"txn-revno", revnoq})
+ if op.Assert != DocExists {
+ qdoc = append(qdoc, bson.DocElem{"$or", []interface{}{op.Assert}})
+ }
+ }
+
+ c := f.tc.Database.C(op.C)
+ if err := c.Find(qdoc).Select(bson.D{{"_id", 1}}).One(nil); err == mgo.ErrNotFound {
+ // Assertion failed or someone else started applying.
+ return f.abortOrReload(t, revnos, pull)
+ } else if err != nil {
+ return err
+ }
+ }
+ f.debugf("Asserting %s succeeded", t)
+ return nil
+}
+
+func (f *flusher) abortOrReload(t *transaction, revnos []int64, pull map[bson.ObjectId]*transaction) (err error) {
+ f.debugf("Aborting or reloading %s (was %q)", t, t.State)
+ if t.State == tprepared {
+ qdoc := bson.D{{"_id", t.Id}, {"s", tprepared}}
+ udoc := bson.D{{"$set", bson.D{{"s", taborting}}}}
+ chaos("set-aborting")
+ if err = f.tc.Update(qdoc, udoc); err == nil {
+ t.State = taborting
+ } else if err == mgo.ErrNotFound {
+ if err = f.reload(t); err != nil || t.State != taborting {
+ f.debugf("Won't abort %s. Reloaded state: %q", t, t.State)
+ return err
+ }
+ } else {
+ return err
+ }
+ } else if t.State != taborting {
+ panic(fmt.Errorf("aborting transaction in invalid state: %q", t.State))
+ }
+
+ if len(revnos) > 0 {
+ if pull == nil {
+ pull = map[bson.ObjectId]*transaction{t.Id: t}
+ }
+ seen := make(map[docKey]bool)
+ for i, op := range t.Ops {
+ dkey := op.docKey()
+ if seen[op.docKey()] {
+ continue
+ }
+ seen[dkey] = true
+
+ pullAll := tokensToPull(f.queue[dkey], pull, "")
+ if len(pullAll) == 0 {
+ continue
+ }
+ udoc := bson.D{{"$pullAll", bson.D{{"txn-queue", pullAll}}}}
+ chaos("")
+ if revnos[i] < 0 {
+ err = f.sc.UpdateId(dkey, udoc)
+ } else {
+ c := f.tc.Database.C(dkey.C)
+ err = c.UpdateId(dkey.Id, udoc)
+ }
+ if err != nil && err != mgo.ErrNotFound {
+ return err
+ }
+ }
+ }
+ udoc := bson.D{{"$set", bson.D{{"s", taborted}}}}
+ chaos("set-aborted")
+ if err := f.tc.UpdateId(t.Id, udoc); err != nil && err != mgo.ErrNotFound {
+ return err
+ }
+ t.State = taborted
+ f.debugf("Aborted %s", t)
+ return nil
+}
+
+func (f *flusher) checkpoint(t *transaction, revnos []int64) error {
+ var debugRevnos map[docKey][]int64
+ if debugEnabled {
+ debugRevnos = make(map[docKey][]int64)
+ for i, op := range t.Ops {
+ dkey := op.docKey()
+ debugRevnos[dkey] = append(debugRevnos[dkey], revnos[i])
+ }
+ f.debugf("Ready to apply %s. Saving revnos %v", t, debugRevnos)
+ }
+
+ // Save in t the txn-revno values the transaction must run on.
+ qdoc := bson.D{{"_id", t.Id}, {"s", tprepared}}
+ udoc := bson.D{{"$set", bson.D{{"s", tapplying}, {"r", revnos}}}}
+ chaos("set-applying")
+ err := f.tc.Update(qdoc, udoc)
+ if err == nil {
+ t.State = tapplying
+ t.Revnos = revnos
+ f.debugf("Ready to apply %s. Saving revnos %v: DONE", t, debugRevnos)
+ } else if err == mgo.ErrNotFound {
+ f.debugf("Ready to apply %s. Saving revnos %v: LOST RACE", t, debugRevnos)
+ return f.reload(t)
+ }
+ return nil
+}
+
+func (f *flusher) apply(t *transaction, pull map[bson.ObjectId]*transaction) error {
+ f.debugf("Applying transaction %s", t)
+ if t.State != tapplying {
+ panic(fmt.Errorf("applying transaction in invalid state: %q", t.State))
+ }
+ if pull == nil {
+ pull = map[bson.ObjectId]*transaction{t.Id: t}
+ }
+
+ logRevnos := append([]int64(nil), t.Revnos...)
+ logDoc := bson.D{{"_id", t.Id}}
+
+ tt := tokenFor(t)
+ for i := range t.Ops {
+ op := &t.Ops[i]
+ dkey := op.docKey()
+ dqueue := f.queue[dkey]
+ revno := t.Revnos[i]
+
+ var opName string
+ if debugEnabled {
+ opName = op.name()
+ f.debugf("Applying %s op %d (%s) on %v with txn-revno %d", t, i, opName, dkey, revno)
+ }
+
+ c := f.tc.Database.C(op.C)
+
+ qdoc := bson.D{{"_id", dkey.Id}, {"txn-revno", revno}, {"txn-queue", tt}}
+ if op.Insert != nil {
+ qdoc[0].Value = dkey
+ if revno == -1 {
+ qdoc[1].Value = bson.D{{"$exists", false}}
+ }
+ } else if revno == 0 {
+ // There's no document with revno 0. The only way to see it is
+ // when an existent document participates in a transaction the
+ // first time. Txn-inserted documents get revno -1 while in the
+ // stash for the first time, and -revno-1 == 2 when they go live.
+ qdoc[1].Value = bson.D{{"$exists", false}}
+ }
+
+ pullAll := tokensToPull(dqueue, pull, tt)
+
+ var d bson.D
+ var outcome string
+ var err error
+ switch {
+ case op.Update != nil:
+ if revno < 0 {
+ err = mgo.ErrNotFound
+ f.debugf("Won't try to apply update op; negative revision means the document is missing or stashed")
+ } else {
+ newRevno := revno + 1
+ logRevnos[i] = newRevno
+ if d, err = objToDoc(op.Update); err != nil {
+ return err
+ }
+ if d, err = addToDoc(d, "$pullAll", bson.D{{"txn-queue", pullAll}}); err != nil {
+ return err
+ }
+ if d, err = addToDoc(d, "$set", bson.D{{"txn-revno", newRevno}}); err != nil {
+ return err
+ }
+ chaos("")
+ err = c.Update(qdoc, d)
+ }
+ case op.Remove:
+ if revno < 0 {
+ err = mgo.ErrNotFound
+ } else {
+ newRevno := -revno - 1
+ logRevnos[i] = newRevno
+ nonce := newNonce()
+ stash := txnInfo{}
+ change := mgo.Change{
+ Update: bson.D{{"$push", bson.D{{"n", nonce}}}},
+ Upsert: true,
+ ReturnNew: true,
+ }
+ if _, err = f.sc.FindId(dkey).Apply(change, &stash); err != nil {
+ return err
+ }
+ change = mgo.Change{
+ Update: bson.D{{"$set", bson.D{{"txn-remove", t.Id}}}},
+ ReturnNew: true,
+ }
+ var info txnInfo
+ if _, err = c.Find(qdoc).Apply(change, &info); err == nil {
+ // The document still exists so the stash previously
+ // observed was either out of date or necessarily
+ // contained the token being applied.
+ f.debugf("Marked document %v to be removed on revno %d with queue: %v", dkey, info.Revno, info.Queue)
+ updated := false
+ if !hasToken(stash.Queue, tt) {
+ var set, unset bson.D
+ if revno == 0 {
+ // Missing revno in stash means -1.
+ set = bson.D{{"txn-queue", info.Queue}}
+ unset = bson.D{{"n", 1}, {"txn-revno", 1}}
+ } else {
+ set = bson.D{{"txn-queue", info.Queue}, {"txn-revno", newRevno}}
+ unset = bson.D{{"n", 1}}
+ }
+ qdoc := bson.D{{"_id", dkey}, {"n", nonce}}
+ udoc := bson.D{{"$set", set}, {"$unset", unset}}
+ if err = f.sc.Update(qdoc, udoc); err == nil {
+ updated = true
+ } else if err != mgo.ErrNotFound {
+ return err
+ }
+ }
+ if updated {
+ f.debugf("Updated stash for document %v with revno %d and queue: %v", dkey, newRevno, info.Queue)
+ } else {
+ f.debugf("Stash for document %v was up-to-date", dkey)
+ }
+ err = c.Remove(qdoc)
+ }
+ }
+ case op.Insert != nil:
+ if revno >= 0 {
+ err = mgo.ErrNotFound
+ } else {
+ newRevno := -revno + 1
+ logRevnos[i] = newRevno
+ if d, err = objToDoc(op.Insert); err != nil {
+ return err
+ }
+ change := mgo.Change{
+ Update: bson.D{{"$set", bson.D{{"txn-insert", t.Id}}}},
+ ReturnNew: true,
+ }
+ chaos("")
+ var info txnInfo
+ if _, err = f.sc.Find(qdoc).Apply(change, &info); err == nil {
+ f.debugf("Stash for document %v has revno %d and queue: %v", dkey, info.Revno, info.Queue)
+ d = setInDoc(d, bson.D{{"_id", op.Id}, {"txn-revno", newRevno}, {"txn-queue", info.Queue}})
+ // Unlikely yet unfortunate race in here if this gets seriously
+ // delayed. If someone inserts+removes meanwhile, this will
+ // reinsert, and there's no way to avoid that while keeping the
+ // collection clean or compromising sharding. applyOps can solve
+ // the former, but it can't shard (SERVER-1439).
+ chaos("insert")
+ err = c.Insert(d)
+ if err == nil || mgo.IsDup(err) {
+ if err == nil {
+ f.debugf("New document %v inserted with revno %d and queue: %v", dkey, info.Revno, info.Queue)
+ } else {
+ f.debugf("Document %v already existed", dkey)
+ }
+ chaos("")
+ if err = f.sc.Remove(qdoc); err == nil {
+ f.debugf("Stash for document %v removed", dkey)
+ }
+ }
+ }
+ }
+ case op.Assert != nil:
+ // Pure assertion. No changes to apply.
+ }
+ if err == nil {
+ outcome = "DONE"
+ } else if err == mgo.ErrNotFound || mgo.IsDup(err) {
+ outcome = "MISS"
+ err = nil
+ } else {
+ outcome = err.Error()
+ }
+ if debugEnabled {
+ f.debugf("Applying %s op %d (%s) on %v with txn-revno %d: %s", t, i, opName, dkey, revno, outcome)
+ }
+ if err != nil {
+ return err
+ }
+
+ if f.lc != nil && op.isChange() {
+ // Add change to the log document.
+ var dr bson.D
+ for li := range logDoc {
+ elem := &logDoc[li]
+ if elem.Name == op.C {
+ dr = elem.Value.(bson.D)
+ break
+ }
+ }
+ if dr == nil {
+ logDoc = append(logDoc, bson.DocElem{op.C, bson.D{{"d", []interface{}{}}, {"r", []int64{}}}})
+ dr = logDoc[len(logDoc)-1].Value.(bson.D)
+ }
+ dr[0].Value = append(dr[0].Value.([]interface{}), op.Id)
+ dr[1].Value = append(dr[1].Value.([]int64), logRevnos[i])
+ }
+ }
+ t.State = tapplied
+
+ if f.lc != nil {
+ // Insert log document into the changelog collection.
+ f.debugf("Inserting %s into change log", t)
+ err := f.lc.Insert(logDoc)
+ if err != nil && !mgo.IsDup(err) {
+ return err
+ }
+ }
+
+ // It's been applied, so errors are ignored here. It's fine for someone
+ // else to win the race and mark it as applied, and it's also fine for
+ // it to remain pending until a later point when someone will perceive
+ // it has been applied and mark it at such.
+ f.debugf("Marking %s as applied", t)
+ chaos("set-applied")
+ f.tc.Update(bson.D{{"_id", t.Id}, {"s", tapplying}}, bson.D{{"$set", bson.D{{"s", tapplied}}}})
+ return nil
+}
+
+func tokensToPull(dqueue []token, pull map[bson.ObjectId]*transaction, dontPull token) []token {
+ var result []token
+ for j := len(dqueue) - 1; j >= 0; j-- {
+ dtt := dqueue[j]
+ if dtt == dontPull {
+ continue
+ }
+ if _, ok := pull[dtt.id()]; ok {
+ // It was handled before and this is a leftover invalid
+ // nonce in the queue. Cherry-pick it out.
+ result = append(result, dtt)
+ }
+ }
+ return result
+}
+
+func objToDoc(obj interface{}) (d bson.D, err error) {
+ data, err := bson.Marshal(obj)
+ if err != nil {
+ return nil, err
+ }
+ err = bson.Unmarshal(data, &d)
+ if err != nil {
+ return nil, err
+ }
+ return d, err
+}
+
+func addToDoc(doc bson.D, key string, add bson.D) (bson.D, error) {
+ for i := range doc {
+ elem := &doc[i]
+ if elem.Name != key {
+ continue
+ }
+ if old, ok := elem.Value.(bson.D); ok {
+ elem.Value = append(old, add...)
+ return doc, nil
+ } else {
+ return nil, fmt.Errorf("invalid %q value in change document: %#v", key, elem.Value)
+ }
+ }
+ return append(doc, bson.DocElem{key, add}), nil
+}
+
+func setInDoc(doc bson.D, set bson.D) bson.D {
+ dlen := len(doc)
+NextS:
+ for s := range set {
+ sname := set[s].Name
+ for d := 0; d < dlen; d++ {
+ if doc[d].Name == sname {
+ doc[d].Value = set[s].Value
+ continue NextS
+ }
+ }
+ doc = append(doc, set[s])
+ }
+ return doc
+}
+
+func hasToken(tokens []token, tt token) bool {
+ for _, ttt := range tokens {
+ if ttt == tt {
+ return true
+ }
+ }
+ return false
+}
+
+func (f *flusher) debugf(format string, args ...interface{}) {
+ if !debugEnabled {
+ return
+ }
+ debugf(f.debugId+format, args...)
+}
diff --git a/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/txn/output.txt b/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/txn/output.txt
new file mode 100644
index 00000000000..e69de29bb2d
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/txn/output.txt
diff --git a/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/txn/sim_test.go b/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/txn/sim_test.go
new file mode 100644
index 00000000000..a369ded7c31
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/txn/sim_test.go
@@ -0,0 +1,388 @@
+package txn_test
+
+import (
+ "flag"
+ "gopkg.in/mgo.v2"
+ "gopkg.in/mgo.v2/bson"
+ "gopkg.in/mgo.v2/dbtest"
+ "gopkg.in/mgo.v2/txn"
+ . "gopkg.in/check.v1"
+ "math/rand"
+ "time"
+)
+
+var (
+ duration = flag.Duration("duration", 200*time.Millisecond, "duration for each simulation")
+ seed = flag.Int64("seed", 0, "seed for rand")
+)
+
+type params struct {
+ killChance float64
+ slowdownChance float64
+ slowdown time.Duration
+
+ unsafe bool
+ workers int
+ accounts int
+ changeHalf bool
+ reinsertCopy bool
+ reinsertZeroed bool
+ changelog bool
+
+ changes int
+}
+
+func (s *S) TestSim1Worker(c *C) {
+ simulate(c, &s.server, params{
+ workers: 1,
+ accounts: 4,
+ killChance: 0.01,
+ slowdownChance: 0.3,
+ slowdown: 100 * time.Millisecond,
+ })
+}
+
+func (s *S) TestSim4WorkersDense(c *C) {
+ simulate(c, &s.server, params{
+ workers: 4,
+ accounts: 2,
+ killChance: 0.01,
+ slowdownChance: 0.3,
+ slowdown: 100 * time.Millisecond,
+ })
+}
+
+func (s *S) TestSim4WorkersSparse(c *C) {
+ simulate(c, &s.server, params{
+ workers: 4,
+ accounts: 10,
+ killChance: 0.01,
+ slowdownChance: 0.3,
+ slowdown: 100 * time.Millisecond,
+ })
+}
+
+func (s *S) TestSimHalf1Worker(c *C) {
+ simulate(c, &s.server, params{
+ workers: 1,
+ accounts: 4,
+ changeHalf: true,
+ killChance: 0.01,
+ slowdownChance: 0.3,
+ slowdown: 100 * time.Millisecond,
+ })
+}
+
+func (s *S) TestSimHalf4WorkersDense(c *C) {
+ simulate(c, &s.server, params{
+ workers: 4,
+ accounts: 2,
+ changeHalf: true,
+ killChance: 0.01,
+ slowdownChance: 0.3,
+ slowdown: 100 * time.Millisecond,
+ })
+}
+
+func (s *S) TestSimHalf4WorkersSparse(c *C) {
+ simulate(c, &s.server, params{
+ workers: 4,
+ accounts: 10,
+ changeHalf: true,
+ killChance: 0.01,
+ slowdownChance: 0.3,
+ slowdown: 100 * time.Millisecond,
+ })
+}
+
+func (s *S) TestSimReinsertCopy1Worker(c *C) {
+ simulate(c, &s.server, params{
+ workers: 1,
+ accounts: 10,
+ reinsertCopy: true,
+ killChance: 0.01,
+ slowdownChance: 0.3,
+ slowdown: 100 * time.Millisecond,
+ })
+}
+
+func (s *S) TestSimReinsertCopy4Workers(c *C) {
+ simulate(c, &s.server, params{
+ workers: 4,
+ accounts: 10,
+ reinsertCopy: true,
+ killChance: 0.01,
+ slowdownChance: 0.3,
+ slowdown: 100 * time.Millisecond,
+ })
+}
+
+func (s *S) TestSimReinsertZeroed1Worker(c *C) {
+ simulate(c, &s.server, params{
+ workers: 1,
+ accounts: 10,
+ reinsertZeroed: true,
+ killChance: 0.01,
+ slowdownChance: 0.3,
+ slowdown: 100 * time.Millisecond,
+ })
+}
+
+func (s *S) TestSimReinsertZeroed4Workers(c *C) {
+ simulate(c, &s.server, params{
+ workers: 4,
+ accounts: 10,
+ reinsertZeroed: true,
+ killChance: 0.01,
+ slowdownChance: 0.3,
+ slowdown: 100 * time.Millisecond,
+ })
+}
+
+func (s *S) TestSimChangeLog(c *C) {
+ simulate(c, &s.server, params{
+ workers: 4,
+ accounts: 10,
+ killChance: 0.01,
+ slowdownChance: 0.3,
+ slowdown: 100 * time.Millisecond,
+ changelog: true,
+ })
+}
+
+type balanceChange struct {
+ id bson.ObjectId
+ origin int
+ target int
+ amount int
+}
+
+func simulate(c *C, server *dbtest.DBServer, params params) {
+ seed := *seed
+ if seed == 0 {
+ seed = time.Now().UnixNano()
+ }
+ rand.Seed(seed)
+ c.Logf("Seed: %v", seed)
+
+ txn.SetChaos(txn.Chaos{
+ KillChance: params.killChance,
+ SlowdownChance: params.slowdownChance,
+ Slowdown: params.slowdown,
+ })
+ defer txn.SetChaos(txn.Chaos{})
+
+ session := server.Session()
+ defer session.Close()
+
+ db := session.DB("test")
+ tc := db.C("tc")
+
+ runner := txn.NewRunner(tc)
+
+ tclog := db.C("tc.log")
+ if params.changelog {
+ info := mgo.CollectionInfo{
+ Capped: true,
+ MaxBytes: 1000000,
+ }
+ err := tclog.Create(&info)
+ c.Assert(err, IsNil)
+ runner.ChangeLog(tclog)
+ }
+
+ accounts := db.C("accounts")
+ for i := 0; i < params.accounts; i++ {
+ err := accounts.Insert(M{"_id": i, "balance": 300})
+ c.Assert(err, IsNil)
+ }
+ var stop time.Time
+ if params.changes <= 0 {
+ stop = time.Now().Add(*duration)
+ }
+
+ max := params.accounts
+ if params.reinsertCopy || params.reinsertZeroed {
+ max = int(float64(params.accounts) * 1.5)
+ }
+
+ changes := make(chan balanceChange, 1024)
+
+ //session.SetMode(mgo.Eventual, true)
+ for i := 0; i < params.workers; i++ {
+ go func() {
+ n := 0
+ for {
+ if n > 0 && n == params.changes {
+ break
+ }
+ if !stop.IsZero() && time.Now().After(stop) {
+ break
+ }
+
+ change := balanceChange{
+ id: bson.NewObjectId(),
+ origin: rand.Intn(max),
+ target: rand.Intn(max),
+ amount: 100,
+ }
+
+ var old Account
+ var oldExists bool
+ if params.reinsertCopy || params.reinsertZeroed {
+ if err := accounts.FindId(change.origin).One(&old); err != mgo.ErrNotFound {
+ c.Check(err, IsNil)
+ change.amount = old.Balance
+ oldExists = true
+ }
+ }
+
+ var ops []txn.Op
+ switch {
+ case params.reinsertCopy && oldExists:
+ ops = []txn.Op{{
+ C: "accounts",
+ Id: change.origin,
+ Assert: M{"balance": change.amount},
+ Remove: true,
+ }, {
+ C: "accounts",
+ Id: change.target,
+ Assert: txn.DocMissing,
+ Insert: M{"balance": change.amount},
+ }}
+ case params.reinsertZeroed && oldExists:
+ ops = []txn.Op{{
+ C: "accounts",
+ Id: change.target,
+ Assert: txn.DocMissing,
+ Insert: M{"balance": 0},
+ }, {
+ C: "accounts",
+ Id: change.origin,
+ Assert: M{"balance": change.amount},
+ Remove: true,
+ }, {
+ C: "accounts",
+ Id: change.target,
+ Assert: txn.DocExists,
+ Update: M{"$inc": M{"balance": change.amount}},
+ }}
+ case params.changeHalf:
+ ops = []txn.Op{{
+ C: "accounts",
+ Id: change.origin,
+ Assert: M{"balance": M{"$gte": change.amount}},
+ Update: M{"$inc": M{"balance": -change.amount / 2}},
+ }, {
+ C: "accounts",
+ Id: change.target,
+ Assert: txn.DocExists,
+ Update: M{"$inc": M{"balance": change.amount / 2}},
+ }, {
+ C: "accounts",
+ Id: change.origin,
+ Update: M{"$inc": M{"balance": -change.amount / 2}},
+ }, {
+ C: "accounts",
+ Id: change.target,
+ Update: M{"$inc": M{"balance": change.amount / 2}},
+ }}
+ default:
+ ops = []txn.Op{{
+ C: "accounts",
+ Id: change.origin,
+ Assert: M{"balance": M{"$gte": change.amount}},
+ Update: M{"$inc": M{"balance": -change.amount}},
+ }, {
+ C: "accounts",
+ Id: change.target,
+ Assert: txn.DocExists,
+ Update: M{"$inc": M{"balance": change.amount}},
+ }}
+ }
+
+ err := runner.Run(ops, change.id, nil)
+ if err != nil && err != txn.ErrAborted && err != txn.ErrChaos {
+ c.Check(err, IsNil)
+ }
+ n++
+ changes <- change
+ }
+ changes <- balanceChange{}
+ }()
+ }
+
+ alive := params.workers
+ changeLog := make([]balanceChange, 0, 1024)
+ for alive > 0 {
+ change := <-changes
+ if change.id == "" {
+ alive--
+ } else {
+ changeLog = append(changeLog, change)
+ }
+ }
+ c.Check(len(changeLog), Not(Equals), 0, Commentf("No operations were even attempted."))
+
+ txn.SetChaos(txn.Chaos{})
+ err := runner.ResumeAll()
+ c.Assert(err, IsNil)
+
+ n, err := accounts.Count()
+ c.Check(err, IsNil)
+ c.Check(n, Equals, params.accounts, Commentf("Number of accounts has changed."))
+
+ n, err = accounts.Find(M{"balance": M{"$lt": 0}}).Count()
+ c.Check(err, IsNil)
+ c.Check(n, Equals, 0, Commentf("There are %d accounts with negative balance.", n))
+
+ globalBalance := 0
+ iter := accounts.Find(nil).Iter()
+ account := Account{}
+ for iter.Next(&account) {
+ globalBalance += account.Balance
+ }
+ c.Check(iter.Close(), IsNil)
+ c.Check(globalBalance, Equals, params.accounts*300, Commentf("Total amount of money should be constant."))
+
+ // Compute and verify the exact final state of all accounts.
+ balance := make(map[int]int)
+ for i := 0; i < params.accounts; i++ {
+ balance[i] += 300
+ }
+ var applied, aborted int
+ for _, change := range changeLog {
+ err := runner.Resume(change.id)
+ if err == txn.ErrAborted {
+ aborted++
+ continue
+ } else if err != nil {
+ c.Fatalf("resuming %s failed: %v", change.id, err)
+ }
+ balance[change.origin] -= change.amount
+ balance[change.target] += change.amount
+ applied++
+ }
+ iter = accounts.Find(nil).Iter()
+ for iter.Next(&account) {
+ c.Assert(account.Balance, Equals, balance[account.Id])
+ }
+ c.Check(iter.Close(), IsNil)
+ c.Logf("Total transactions: %d (%d applied, %d aborted)", len(changeLog), applied, aborted)
+
+ if params.changelog {
+ n, err := tclog.Count()
+ c.Assert(err, IsNil)
+ // Check if the capped collection is full.
+ dummy := make([]byte, 1024)
+ tclog.Insert(M{"_id": bson.NewObjectId(), "dummy": dummy})
+ m, err := tclog.Count()
+ c.Assert(err, IsNil)
+ if m == n+1 {
+ // Wasn't full, so it must have seen it all.
+ c.Assert(err, IsNil)
+ c.Assert(n, Equals, applied)
+ }
+ }
+}
diff --git a/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/txn/tarjan.go b/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/txn/tarjan.go
new file mode 100644
index 00000000000..e56541c9b62
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/txn/tarjan.go
@@ -0,0 +1,94 @@
+package txn
+
+import (
+ "gopkg.in/mgo.v2/bson"
+ "sort"
+)
+
+func tarjanSort(successors map[bson.ObjectId][]bson.ObjectId) [][]bson.ObjectId {
+ // http://en.wikipedia.org/wiki/Tarjan%27s_strongly_connected_components_algorithm
+ data := &tarjanData{
+ successors: successors,
+ nodes: make([]tarjanNode, 0, len(successors)),
+ index: make(map[bson.ObjectId]int, len(successors)),
+ }
+
+ for id := range successors {
+ id := bson.ObjectId(string(id))
+ if _, seen := data.index[id]; !seen {
+ data.strongConnect(id)
+ }
+ }
+
+ // Sort connected components to stabilize the algorithm.
+ for _, ids := range data.output {
+ if len(ids) > 1 {
+ sort.Sort(idList(ids))
+ }
+ }
+ return data.output
+}
+
+type tarjanData struct {
+ successors map[bson.ObjectId][]bson.ObjectId
+ output [][]bson.ObjectId
+
+ nodes []tarjanNode
+ stack []bson.ObjectId
+ index map[bson.ObjectId]int
+}
+
+type tarjanNode struct {
+ lowlink int
+ stacked bool
+}
+
+type idList []bson.ObjectId
+
+func (l idList) Len() int { return len(l) }
+func (l idList) Swap(i, j int) { l[i], l[j] = l[j], l[i] }
+func (l idList) Less(i, j int) bool { return l[i] < l[j] }
+
+func (data *tarjanData) strongConnect(id bson.ObjectId) *tarjanNode {
+ index := len(data.nodes)
+ data.index[id] = index
+ data.stack = append(data.stack, id)
+ data.nodes = append(data.nodes, tarjanNode{index, true})
+ node := &data.nodes[index]
+
+ for _, succid := range data.successors[id] {
+ succindex, seen := data.index[succid]
+ if !seen {
+ succnode := data.strongConnect(succid)
+ if succnode.lowlink < node.lowlink {
+ node.lowlink = succnode.lowlink
+ }
+ } else if data.nodes[succindex].stacked {
+ // Part of the current strongly-connected component.
+ if succindex < node.lowlink {
+ node.lowlink = succindex
+ }
+ }
+ }
+
+ if node.lowlink == index {
+ // Root node; pop stack and output new
+ // strongly-connected component.
+ var scc []bson.ObjectId
+ i := len(data.stack) - 1
+ for {
+ stackid := data.stack[i]
+ stackindex := data.index[stackid]
+ data.nodes[stackindex].stacked = false
+ scc = append(scc, stackid)
+ if stackindex == index {
+ break
+ }
+ i--
+ }
+ data.stack = data.stack[:i]
+ data.output = append(data.output, scc)
+ }
+
+ return node
+}
diff --git a/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/txn/tarjan_test.go b/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/txn/tarjan_test.go
new file mode 100644
index 00000000000..79745c39be6
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/txn/tarjan_test.go
@@ -0,0 +1,44 @@
+package txn
+
+import (
+ "fmt"
+ "gopkg.in/mgo.v2/bson"
+ . "gopkg.in/check.v1"
+)
+
+type TarjanSuite struct{}
+
+var _ = Suite(TarjanSuite{})
+
+func bid(n int) bson.ObjectId {
+ return bson.ObjectId(fmt.Sprintf("%024d", n))
+}
+
+func bids(ns ...int) (ids []bson.ObjectId) {
+ for _, n := range ns {
+ ids = append(ids, bid(n))
+ }
+ return
+}
+
+func (TarjanSuite) TestExample(c *C) {
+ successors := map[bson.ObjectId][]bson.ObjectId{
+ bid(1): bids(2, 3),
+ bid(2): bids(1, 5),
+ bid(3): bids(4),
+ bid(4): bids(3, 5),
+ bid(5): bids(6),
+ bid(6): bids(7),
+ bid(7): bids(8),
+ bid(8): bids(6, 9),
+ bid(9): bids(),
+ }
+
+ c.Assert(tarjanSort(successors), DeepEquals, [][]bson.ObjectId{
+ bids(9),
+ bids(6, 7, 8),
+ bids(5),
+ bids(3, 4),
+ bids(1, 2),
+ })
+}
diff --git a/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/txn/txn.go b/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/txn/txn.go
new file mode 100644
index 00000000000..204b3cf1d8d
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/txn/txn.go
@@ -0,0 +1,611 @@
+// The txn package implements support for multi-document transactions.
+//
+// For details check the following blog post:
+//
+// http://blog.labix.org/2012/08/22/multi-doc-transactions-for-mongodb
+//
+package txn
+
+import (
+ "encoding/binary"
+ "fmt"
+ "reflect"
+ "sort"
+ "strings"
+ "sync"
+
+ "gopkg.in/mgo.v2"
+ "gopkg.in/mgo.v2/bson"
+
+ crand "crypto/rand"
+ mrand "math/rand"
+)
+
+type state int
+
+const (
+ tpreparing state = 1 // One or more documents not prepared
+ tprepared state = 2 // Prepared but not yet ready to run
+ taborting state = 3 // Assertions failed, cleaning up
+ tapplying state = 4 // Changes are in progress
+ taborted state = 5 // Pre-conditions failed, nothing done
+ tapplied state = 6 // All changes applied
+)
+
+func (s state) String() string {
+ switch s {
+ case tpreparing:
+ return "preparing"
+ case tprepared:
+ return "prepared"
+ case taborting:
+ return "aborting"
+ case tapplying:
+ return "applying"
+ case taborted:
+ return "aborted"
+ case tapplied:
+ return "applied"
+ }
+ panic(fmt.Errorf("unknown state: %d", s))
+}
+
+var rand *mrand.Rand
+var randmu sync.Mutex
+
+func init() {
+ var seed int64
+ err := binary.Read(crand.Reader, binary.BigEndian, &seed)
+ if err != nil {
+ panic(err)
+ }
+ rand = mrand.New(mrand.NewSource(seed))
+}
+
+type transaction struct {
+ Id bson.ObjectId `bson:"_id"`
+ State state `bson:"s"`
+ Info interface{} `bson:"i,omitempty"`
+ Ops []Op `bson:"o"`
+ Nonce string `bson:"n,omitempty"`
+ Revnos []int64 `bson:"r,omitempty"`
+
+ docKeysCached docKeys
+}
+
+func (t *transaction) String() string {
+ if t.Nonce == "" {
+ return t.Id.Hex()
+ }
+ return string(t.token())
+}
+
+func (t *transaction) done() bool {
+ return t.State == tapplied || t.State == taborted
+}
+
+func (t *transaction) token() token {
+ if t.Nonce == "" {
+ panic("transaction has no nonce")
+ }
+ return tokenFor(t)
+}
+
+func (t *transaction) docKeys() docKeys {
+ if t.docKeysCached != nil {
+ return t.docKeysCached
+ }
+ dkeys := make(docKeys, 0, len(t.Ops))
+NextOp:
+ for _, op := range t.Ops {
+ dkey := op.docKey()
+ for i := range dkeys {
+ if dkey == dkeys[i] {
+ continue NextOp
+ }
+ }
+ dkeys = append(dkeys, dkey)
+ }
+ sort.Sort(dkeys)
+ t.docKeysCached = dkeys
+ return dkeys
+}
+
+// tokenFor returns a unique transaction token that
+// is composed by t's id and a nonce. If t already has
+// a nonce assigned to it, it will be used, otherwise
+// a new nonce will be generated.
+func tokenFor(t *transaction) token {
+ nonce := t.Nonce
+ if nonce == "" {
+ nonce = newNonce()
+ }
+ return token(t.Id.Hex() + "_" + nonce)
+}
+
+func newNonce() string {
+ randmu.Lock()
+ r := rand.Uint32()
+ randmu.Unlock()
+ n := make([]byte, 8)
+ for i := uint(0); i < 8; i++ {
+ n[i] = "0123456789abcdef"[(r>>(4*i))&0xf]
+ }
+ return string(n)
+}
+
+type token string
+
+func (tt token) id() bson.ObjectId { return bson.ObjectIdHex(string(tt[:24])) }
+func (tt token) nonce() string { return string(tt[25:]) }
+
+// Op represents an operation to a single document that may be
+// applied as part of a transaction with other operations.
+type Op struct {
+ // C and Id identify the collection and document this operation
+ // refers to. Id is matched against the "_id" document field.
+ C string `bson:"c"`
+ Id interface{} `bson:"d"`
+
+ // Assert optionally holds a query document that is used to
+ // test the operation document at the time the transaction is
+ // going to be applied. The assertions for all operations in
+ // a transaction are tested before any changes take place,
+ // and the transaction is entirely aborted if any of them
+ // fails. This is also the only way to prevent a transaction
+ // from being being applied (the transaction continues despite
+ // the outcome of Insert, Update, and Remove).
+ Assert interface{} `bson:"a,omitempty"`
+
+ // The Insert, Update and Remove fields describe the mutation
+ // intended by the operation. At most one of them may be set
+ // per operation. If none are set, Assert must be set and the
+ // operation becomes a read-only test.
+ //
+ // Insert holds the document to be inserted at the time the
+ // transaction is applied. The Id field will be inserted
+ // into the document automatically as its _id field. The
+ // transaction will continue even if the document already
+ // exists. Use Assert with txn.DocMissing if the insertion is
+ // required.
+ //
+ // Update holds the update document to be applied at the time
+ // the transaction is applied. The transaction will continue
+ // even if a document with Id is missing. Use Assert to
+ // test for the document presence or its contents.
+ //
+ // Remove indicates whether to remove the document with Id.
+ // The transaction continues even if the document doesn't yet
+ // exist at the time the transaction is applied. Use Assert
+ // with txn.DocExists to make sure it will be removed.
+ Insert interface{} `bson:"i,omitempty"`
+ Update interface{} `bson:"u,omitempty"`
+ Remove bool `bson:"r,omitempty"`
+}
+
+func (op *Op) isChange() bool {
+ return op.Update != nil || op.Insert != nil || op.Remove
+}
+
+func (op *Op) docKey() docKey {
+ return docKey{op.C, op.Id}
+}
+
+func (op *Op) name() string {
+ switch {
+ case op.Update != nil:
+ return "update"
+ case op.Insert != nil:
+ return "insert"
+ case op.Remove:
+ return "remove"
+ case op.Assert != nil:
+ return "assert"
+ }
+ return "none"
+}
+
+const (
+ // DocExists and DocMissing may be used on an operation's
+ // Assert value to assert that the document with the given
+ // Id exists or does not exist, respectively.
+ DocExists = "d+"
+ DocMissing = "d-"
+)
+
+// A Runner applies operations as part of a transaction onto any number
+// of collections within a database. See the Run method for details.
+type Runner struct {
+ tc *mgo.Collection // txns
+ sc *mgo.Collection // stash
+ lc *mgo.Collection // log
+}
+
+// NewRunner returns a new transaction runner that uses tc to hold its
+// transactions.
+//
+// Multiple transaction collections may exist in a single database, but
+// all collections that are touched by operations in a given transaction
+// collection must be handled exclusively by it.
+//
+// A second collection with the same name of tc but suffixed by ".stash"
+// will be used for implementing the transactional behavior of insert
+// and remove operations.
+func NewRunner(tc *mgo.Collection) *Runner {
+ return &Runner{tc, tc.Database.C(tc.Name + ".stash"), nil}
+}
+
+var ErrAborted = fmt.Errorf("transaction aborted")
+
+// Run creates a new transaction with ops and runs it immediately.
+// The id parameter specifies the transaction id, and may be written
+// down ahead of time to later verify the success of the change and
+// resume it, when the procedure is interrupted for any reason. If
+// empty, a random id will be generated.
+// The info parameter, if not nil, is included under the "i"
+// field of the transaction document.
+//
+// Operations across documents are not atomically applied, but are
+// guaranteed to be eventually all applied in the order provided or
+// all aborted, as long as the affected documents are only modified
+// through transactions. If documents are simultaneously modified
+// by transactions and out of transactions the behavior is undefined.
+//
+// If Run returns no errors, all operations were applied successfully.
+// If it returns ErrAborted, one or more operations can't be applied
+// and the transaction was entirely aborted with no changes performed.
+// Otherwise, if the transaction is interrupted while running for any
+// reason, it may be resumed explicitly or by attempting to apply
+// another transaction on any of the documents targeted by ops, as
+// long as the interruption was made after the transaction document
+// itself was inserted. Run Resume with the obtained transaction id
+// to confirm whether the transaction was applied or not.
+//
+// Any number of transactions may be run concurrently, with one
+// runner or many.
+func (r *Runner) Run(ops []Op, id bson.ObjectId, info interface{}) (err error) {
+ const efmt = "error in transaction op %d: %s"
+ for i := range ops {
+ op := &ops[i]
+ if op.C == "" || op.Id == nil {
+ return fmt.Errorf(efmt, i, "C or Id missing")
+ }
+ changes := 0
+ if op.Insert != nil {
+ changes++
+ }
+ if op.Update != nil {
+ changes++
+ }
+ if op.Remove {
+ changes++
+ }
+ if changes > 1 {
+ return fmt.Errorf(efmt, i, "more than one of Insert/Update/Remove set")
+ }
+ if changes == 0 && op.Assert == nil {
+ return fmt.Errorf(efmt, i, "none of Assert/Insert/Update/Remove set")
+ }
+ }
+ if id == "" {
+ id = bson.NewObjectId()
+ }
+
+ // Insert transaction sooner rather than later, to stay on the safer side.
+ t := transaction{
+ Id: id,
+ Ops: ops,
+ State: tpreparing,
+ Info: info,
+ }
+ if err = r.tc.Insert(&t); err != nil {
+ return err
+ }
+ if err = flush(r, &t); err != nil {
+ return err
+ }
+ if t.State == taborted {
+ return ErrAborted
+ } else if t.State != tapplied {
+ panic(fmt.Errorf("invalid state for %s after flush: %q", &t, t.State))
+ }
+ return nil
+}
+
+// ResumeAll resumes all pending transactions. All ErrAborted errors
+// from individual transactions are ignored.
+func (r *Runner) ResumeAll() (err error) {
+ debugf("Resuming all unfinished transactions")
+ iter := r.tc.Find(bson.D{{"s", bson.D{{"$in", []state{tpreparing, tprepared, tapplying}}}}}).Iter()
+ var t transaction
+ for iter.Next(&t) {
+ if t.State == tapplied || t.State == taborted {
+ continue
+ }
+ debugf("Resuming %s from %q", t.Id, t.State)
+ if err := flush(r, &t); err != nil {
+ return err
+ }
+ if !t.done() {
+ panic(fmt.Errorf("invalid state for %s after flush: %q", &t, t.State))
+ }
+ }
+ return nil
+}
+
+// Resume resumes the transaction with id. It returns mgo.ErrNotFound
+// if the transaction is not found. Otherwise, it has the same semantics
+// of the Run method after the transaction is inserted.
+func (r *Runner) Resume(id bson.ObjectId) (err error) {
+ t, err := r.load(id)
+ if err != nil {
+ return err
+ }
+ if !t.done() {
+ debugf("Resuming %s from %q", t, t.State)
+ if err := flush(r, t); err != nil {
+ return err
+ }
+ }
+ if t.State == taborted {
+ return ErrAborted
+ } else if t.State != tapplied {
+ panic(fmt.Errorf("invalid state for %s after flush: %q", t, t.State))
+ }
+ return nil
+}
+
+// ChangeLog enables logging of changes to the given collection
+// every time a transaction that modifies content is done being
+// applied.
+//
+// Saved documents are in the format:
+//
+// {"_id": <txn id>, <collection>: {"d": [<doc id>, ...], "r": [<doc revno>, ...]}}
+//
+// The document revision is the value of the txn-revno field after
+// the change has been applied. Negative values indicate the document
+// was not present in the collection. Revisions will not change when
+// updates or removes are applied to missing documents or inserts are
+// attempted when the document isn't present.
+func (r *Runner) ChangeLog(logc *mgo.Collection) {
+ r.lc = logc
+}
+
+// PurgeMissing removes from collections any state that refers to transaction
+// documents that for whatever reason have been lost from the system (removed
+// by accident or lost in a hard crash, for example).
+//
+// This method should very rarely be needed, if at all, and should never be
+// used during the normal operation of an application. Its purpose is to put
+// a system that has seen unavoidable corruption back in a working state.
+func (r *Runner) PurgeMissing(collections ...string) error {
+ type M map[string]interface{}
+ type S []interface{}
+
+ type TDoc struct {
+ Id interface{} "_id"
+ TxnQueue []string "txn-queue"
+ }
+
+ found := make(map[bson.ObjectId]bool)
+
+ sort.Strings(collections)
+ for _, collection := range collections {
+ c := r.tc.Database.C(collection)
+ iter := c.Find(nil).Select(bson.M{"_id": 1, "txn-queue": 1}).Iter()
+ var tdoc TDoc
+ for iter.Next(&tdoc) {
+ for _, txnToken := range tdoc.TxnQueue {
+ txnId := bson.ObjectIdHex(txnToken[:24])
+ if found[txnId] {
+ continue
+ }
+ if r.tc.FindId(txnId).One(nil) == nil {
+ found[txnId] = true
+ continue
+ }
+ logf("WARNING: purging from document %s/%v the missing transaction id %s", collection, tdoc.Id, txnId)
+ err := c.UpdateId(tdoc.Id, M{"$pull": M{"txn-queue": M{"$regex": "^" + txnId.Hex() + "_*"}}})
+ if err != nil {
+ return fmt.Errorf("error purging missing transaction %s: %v", txnId.Hex(), err)
+ }
+ }
+ }
+ if err := iter.Close(); err != nil {
+ return fmt.Errorf("transaction queue iteration error for %s: %v", collection, err)
+ }
+ }
+
+ type StashTDoc struct {
+ Id docKey "_id"
+ TxnQueue []string "txn-queue"
+ }
+
+ iter := r.sc.Find(nil).Select(bson.M{"_id": 1, "txn-queue": 1}).Iter()
+ var stdoc StashTDoc
+ for iter.Next(&stdoc) {
+ for _, txnToken := range stdoc.TxnQueue {
+ txnId := bson.ObjectIdHex(txnToken[:24])
+ if found[txnId] {
+ continue
+ }
+ if r.tc.FindId(txnId).One(nil) == nil {
+ found[txnId] = true
+ continue
+ }
+ logf("WARNING: purging from stash document %s/%v the missing transaction id %s", stdoc.Id.C, stdoc.Id.Id, txnId)
+ err := r.sc.UpdateId(stdoc.Id, M{"$pull": M{"txn-queue": M{"$regex": "^" + txnId.Hex() + "_*"}}})
+ if err != nil {
+ return fmt.Errorf("error purging missing transaction %s: %v", txnId.Hex(), err)
+ }
+ }
+ }
+ if err := iter.Close(); err != nil {
+ return fmt.Errorf("transaction stash iteration error: %v", err)
+ }
+
+ return nil
+}
+
+func (r *Runner) load(id bson.ObjectId) (*transaction, error) {
+ var t transaction
+ err := r.tc.FindId(id).One(&t)
+ if err == mgo.ErrNotFound {
+ return nil, fmt.Errorf("cannot find transaction %s", id)
+ } else if err != nil {
+ return nil, err
+ }
+ return &t, nil
+}
+
+type typeNature int
+
+const (
+ // The order of these values matters. Transactions
+ // from applications using different ordering will
+ // be incompatible with each other.
+ _ typeNature = iota
+ natureString
+ natureInt
+ natureFloat
+ natureBool
+ natureStruct
+)
+
+func valueNature(v interface{}) (value interface{}, nature typeNature) {
+ rv := reflect.ValueOf(v)
+ switch rv.Kind() {
+ case reflect.String:
+ return rv.String(), natureString
+ case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
+ return rv.Int(), natureInt
+ case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64:
+ return int64(rv.Uint()), natureInt
+ case reflect.Float32, reflect.Float64:
+ return rv.Float(), natureFloat
+ case reflect.Bool:
+ return rv.Bool(), natureBool
+ case reflect.Struct:
+ return v, natureStruct
+ }
+ panic("document id type unsupported by txn: " + rv.Kind().String())
+}
+
+type docKey struct {
+ C string
+ Id interface{}
+}
+
+type docKeys []docKey
+
+func (ks docKeys) Len() int { return len(ks) }
+func (ks docKeys) Swap(i, j int) { ks[i], ks[j] = ks[j], ks[i] }
+func (ks docKeys) Less(i, j int) bool {
+ a, b := ks[i], ks[j]
+ if a.C != b.C {
+ return a.C < b.C
+ }
+ return valuecmp(a.Id, b.Id) == -1
+}
+
+func valuecmp(a, b interface{}) int {
+ av, an := valueNature(a)
+ bv, bn := valueNature(b)
+ if an < bn {
+ return -1
+ }
+ if an > bn {
+ return 1
+ }
+
+ if av == bv {
+ return 0
+ }
+ var less bool
+ switch an {
+ case natureString:
+ less = av.(string) < bv.(string)
+ case natureInt:
+ less = av.(int64) < bv.(int64)
+ case natureFloat:
+ less = av.(float64) < bv.(float64)
+ case natureBool:
+ less = !av.(bool) && bv.(bool)
+ case natureStruct:
+ less = structcmp(av, bv) == -1
+ default:
+ panic("unreachable")
+ }
+ if less {
+ return -1
+ }
+ return 1
+}
+
+func structcmp(a, b interface{}) int {
+ av := reflect.ValueOf(a)
+ bv := reflect.ValueOf(b)
+
+ var ai, bi = 0, 0
+ var an, bn = av.NumField(), bv.NumField()
+ var avi, bvi interface{}
+ var af, bf reflect.StructField
+ for {
+ for ai < an {
+ af = av.Type().Field(ai)
+ if isExported(af.Name) {
+ avi = av.Field(ai).Interface()
+ ai++
+ break
+ }
+ ai++
+ }
+ for bi < bn {
+ bf = bv.Type().Field(bi)
+ if isExported(bf.Name) {
+ bvi = bv.Field(bi).Interface()
+ bi++
+ break
+ }
+ bi++
+ }
+ if n := valuecmp(avi, bvi); n != 0 {
+ return n
+ }
+ nameA := getFieldName(af)
+ nameB := getFieldName(bf)
+ if nameA < nameB {
+ return -1
+ }
+ if nameA > nameB {
+ return 1
+ }
+ if ai == an && bi == bn {
+ return 0
+ }
+ if ai == an || bi == bn {
+ if ai == bn {
+ return -1
+ }
+ return 1
+ }
+ }
+ panic("unreachable")
+}
+
+func isExported(name string) bool {
+ a := name[0]
+ return a >= 'A' && a <= 'Z'
+}
+
+func getFieldName(f reflect.StructField) string {
+ name := f.Tag.Get("bson")
+ if i := strings.Index(name, ","); i >= 0 {
+ name = name[:i]
+ }
+ if name == "" {
+ name = strings.ToLower(f.Name)
+ }
+ return name
+}
diff --git a/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/txn/txn_test.go b/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/txn/txn_test.go
new file mode 100644
index 00000000000..12923ca1209
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/gopkg.in/mgo.v2/txn/txn_test.go
@@ -0,0 +1,778 @@
+package txn_test
+
+import (
+ "flag"
+ "fmt"
+ "sync"
+ "testing"
+ "time"
+
+ . "gopkg.in/check.v1"
+ "gopkg.in/mgo.v2"
+ "gopkg.in/mgo.v2/bson"
+ "gopkg.in/mgo.v2/dbtest"
+ "gopkg.in/mgo.v2/txn"
+)
+
+func TestAll(t *testing.T) {
+ TestingT(t)
+}
+
+type S struct {
+ server dbtest.DBServer
+ session *mgo.Session
+ db *mgo.Database
+ tc, sc *mgo.Collection
+ accounts *mgo.Collection
+ runner *txn.Runner
+}
+
+var _ = Suite(&S{})
+
+type M map[string]interface{}
+
+func (s *S) SetUpSuite(c *C) {
+ s.server.SetPath(c.MkDir())
+}
+
+func (s *S) TearDownSuite(c *C) {
+ s.server.Stop()
+}
+
+func (s *S) SetUpTest(c *C) {
+ s.server.Wipe()
+
+ txn.SetChaos(txn.Chaos{})
+ txn.SetLogger(c)
+ txn.SetDebug(true)
+
+ s.session = s.server.Session()
+ s.db = s.session.DB("test")
+ s.tc = s.db.C("tc")
+ s.sc = s.db.C("tc.stash")
+ s.accounts = s.db.C("accounts")
+ s.runner = txn.NewRunner(s.tc)
+}
+
+func (s *S) TearDownTest(c *C) {
+ txn.SetLogger(nil)
+ txn.SetDebug(false)
+ s.session.Close()
+}
+
+type Account struct {
+ Id int `bson:"_id"`
+ Balance int
+}
+
+func (s *S) TestDocExists(c *C) {
+ err := s.accounts.Insert(M{"_id": 0, "balance": 300})
+ c.Assert(err, IsNil)
+
+ exists := []txn.Op{{
+ C: "accounts",
+ Id: 0,
+ Assert: txn.DocExists,
+ }}
+ missing := []txn.Op{{
+ C: "accounts",
+ Id: 0,
+ Assert: txn.DocMissing,
+ }}
+
+ err = s.runner.Run(exists, "", nil)
+ c.Assert(err, IsNil)
+ err = s.runner.Run(missing, "", nil)
+ c.Assert(err, Equals, txn.ErrAborted)
+
+ err = s.accounts.RemoveId(0)
+ c.Assert(err, IsNil)
+
+ err = s.runner.Run(exists, "", nil)
+ c.Assert(err, Equals, txn.ErrAborted)
+ err = s.runner.Run(missing, "", nil)
+ c.Assert(err, IsNil)
+}
+
+func (s *S) TestInsert(c *C) {
+ err := s.accounts.Insert(M{"_id": 0, "balance": 300})
+ c.Assert(err, IsNil)
+
+ ops := []txn.Op{{
+ C: "accounts",
+ Id: 0,
+ Insert: M{"balance": 200},
+ }}
+
+ err = s.runner.Run(ops, "", nil)
+ c.Assert(err, IsNil)
+
+ var account Account
+ err = s.accounts.FindId(0).One(&account)
+ c.Assert(err, IsNil)
+ c.Assert(account.Balance, Equals, 300)
+
+ ops[0].Id = 1
+ err = s.runner.Run(ops, "", nil)
+ c.Assert(err, IsNil)
+
+ err = s.accounts.FindId(1).One(&account)
+ c.Assert(err, IsNil)
+ c.Assert(account.Balance, Equals, 200)
+}
+
+func (s *S) TestInsertStructID(c *C) {
+ type id struct {
+ FirstName string
+ LastName string
+ }
+ ops := []txn.Op{{
+ C: "accounts",
+ Id: id{FirstName: "John", LastName: "Jones"},
+ Assert: txn.DocMissing,
+ Insert: M{"balance": 200},
+ }, {
+ C: "accounts",
+ Id: id{FirstName: "Sally", LastName: "Smith"},
+ Assert: txn.DocMissing,
+ Insert: M{"balance": 800},
+ }}
+
+ err := s.runner.Run(ops, "", nil)
+ c.Assert(err, IsNil)
+
+ n, err := s.accounts.Find(nil).Count()
+ c.Assert(err, IsNil)
+ c.Assert(n, Equals, 2)
+}
+
+func (s *S) TestRemove(c *C) {
+ err := s.accounts.Insert(M{"_id": 0, "balance": 300})
+ c.Assert(err, IsNil)
+
+ ops := []txn.Op{{
+ C: "accounts",
+ Id: 0,
+ Remove: true,
+ }}
+
+ err = s.runner.Run(ops, "", nil)
+ c.Assert(err, IsNil)
+
+ err = s.accounts.FindId(0).One(nil)
+ c.Assert(err, Equals, mgo.ErrNotFound)
+
+ err = s.runner.Run(ops, "", nil)
+ c.Assert(err, IsNil)
+}
+
+func (s *S) TestUpdate(c *C) {
+ var err error
+ err = s.accounts.Insert(M{"_id": 0, "balance": 200})
+ c.Assert(err, IsNil)
+ err = s.accounts.Insert(M{"_id": 1, "balance": 200})
+ c.Assert(err, IsNil)
+
+ ops := []txn.Op{{
+ C: "accounts",
+ Id: 0,
+ Update: M{"$inc": M{"balance": 100}},
+ }}
+
+ err = s.runner.Run(ops, "", nil)
+ c.Assert(err, IsNil)
+
+ var account Account
+ err = s.accounts.FindId(0).One(&account)
+ c.Assert(err, IsNil)
+ c.Assert(account.Balance, Equals, 300)
+
+ ops[0].Id = 1
+
+ err = s.accounts.FindId(1).One(&account)
+ c.Assert(err, IsNil)
+ c.Assert(account.Balance, Equals, 200)
+}
+
+func (s *S) TestInsertUpdate(c *C) {
+ ops := []txn.Op{{
+ C: "accounts",
+ Id: 0,
+ Insert: M{"_id": 0, "balance": 200},
+ }, {
+ C: "accounts",
+ Id: 0,
+ Update: M{"$inc": M{"balance": 100}},
+ }}
+
+ err := s.runner.Run(ops, "", nil)
+ c.Assert(err, IsNil)
+
+ var account Account
+ err = s.accounts.FindId(0).One(&account)
+ c.Assert(err, IsNil)
+ c.Assert(account.Balance, Equals, 300)
+
+ err = s.runner.Run(ops, "", nil)
+ c.Assert(err, IsNil)
+
+ err = s.accounts.FindId(0).One(&account)
+ c.Assert(err, IsNil)
+ c.Assert(account.Balance, Equals, 400)
+}
+
+func (s *S) TestUpdateInsert(c *C) {
+ ops := []txn.Op{{
+ C: "accounts",
+ Id: 0,
+ Update: M{"$inc": M{"balance": 100}},
+ }, {
+ C: "accounts",
+ Id: 0,
+ Insert: M{"_id": 0, "balance": 200},
+ }}
+
+ err := s.runner.Run(ops, "", nil)
+ c.Assert(err, IsNil)
+
+ var account Account
+ err = s.accounts.FindId(0).One(&account)
+ c.Assert(err, IsNil)
+ c.Assert(account.Balance, Equals, 200)
+
+ err = s.runner.Run(ops, "", nil)
+ c.Assert(err, IsNil)
+
+ err = s.accounts.FindId(0).One(&account)
+ c.Assert(err, IsNil)
+ c.Assert(account.Balance, Equals, 300)
+}
+
+func (s *S) TestInsertRemoveInsert(c *C) {
+ ops := []txn.Op{{
+ C: "accounts",
+ Id: 0,
+ Insert: M{"_id": 0, "balance": 200},
+ }, {
+ C: "accounts",
+ Id: 0,
+ Remove: true,
+ }, {
+ C: "accounts",
+ Id: 0,
+ Insert: M{"_id": 0, "balance": 300},
+ }}
+
+ err := s.runner.Run(ops, "", nil)
+ c.Assert(err, IsNil)
+
+ var account Account
+ err = s.accounts.FindId(0).One(&account)
+ c.Assert(err, IsNil)
+ c.Assert(account.Balance, Equals, 300)
+}
+
+func (s *S) TestQueueStashing(c *C) {
+ txn.SetChaos(txn.Chaos{
+ KillChance: 1,
+ Breakpoint: "set-applying",
+ })
+
+ opses := [][]txn.Op{{{
+ C: "accounts",
+ Id: 0,
+ Insert: M{"balance": 100},
+ }}, {{
+ C: "accounts",
+ Id: 0,
+ Remove: true,
+ }}, {{
+ C: "accounts",
+ Id: 0,
+ Insert: M{"balance": 200},
+ }}, {{
+ C: "accounts",
+ Id: 0,
+ Update: M{"$inc": M{"balance": 100}},
+ }}}
+
+ var last bson.ObjectId
+ for _, ops := range opses {
+ last = bson.NewObjectId()
+ err := s.runner.Run(ops, last, nil)
+ c.Assert(err, Equals, txn.ErrChaos)
+ }
+
+ txn.SetChaos(txn.Chaos{})
+ err := s.runner.Resume(last)
+ c.Assert(err, IsNil)
+
+ var account Account
+ err = s.accounts.FindId(0).One(&account)
+ c.Assert(err, IsNil)
+ c.Assert(account.Balance, Equals, 300)
+}
+
+func (s *S) TestInfo(c *C) {
+ ops := []txn.Op{{
+ C: "accounts",
+ Id: 0,
+ Assert: txn.DocMissing,
+ }}
+
+ id := bson.NewObjectId()
+ err := s.runner.Run(ops, id, M{"n": 42})
+ c.Assert(err, IsNil)
+
+ var t struct{ I struct{ N int } }
+ err = s.tc.FindId(id).One(&t)
+ c.Assert(err, IsNil)
+ c.Assert(t.I.N, Equals, 42)
+}
+
+func (s *S) TestErrors(c *C) {
+ doc := bson.M{"foo": 1}
+ tests := []txn.Op{{
+ C: "c",
+ Id: 0,
+ }, {
+ C: "c",
+ Id: 0,
+ Insert: doc,
+ Remove: true,
+ }, {
+ C: "c",
+ Id: 0,
+ Insert: doc,
+ Update: doc,
+ }, {
+ C: "c",
+ Id: 0,
+ Update: doc,
+ Remove: true,
+ }, {
+ C: "c",
+ Assert: doc,
+ }, {
+ Id: 0,
+ Assert: doc,
+ }}
+
+ txn.SetChaos(txn.Chaos{KillChance: 1.0})
+ for _, op := range tests {
+ c.Logf("op: %v", op)
+ err := s.runner.Run([]txn.Op{op}, "", nil)
+ c.Assert(err, ErrorMatches, "error in transaction op 0: .*")
+ }
+}
+
+func (s *S) TestAssertNestedOr(c *C) {
+ // Assert uses $or internally. Ensure nesting works.
+ err := s.accounts.Insert(M{"_id": 0, "balance": 300})
+ c.Assert(err, IsNil)
+
+ ops := []txn.Op{{
+ C: "accounts",
+ Id: 0,
+ Assert: bson.D{{"$or", []bson.D{{{"balance", 100}}, {{"balance", 300}}}}},
+ Update: bson.D{{"$inc", bson.D{{"balance", 100}}}},
+ }}
+
+ err = s.runner.Run(ops, "", nil)
+ c.Assert(err, IsNil)
+
+ var account Account
+ err = s.accounts.FindId(0).One(&account)
+ c.Assert(err, IsNil)
+ c.Assert(account.Balance, Equals, 400)
+}
+
+func (s *S) TestVerifyFieldOrdering(c *C) {
+ // Used to have a map in certain operations, which means
+ // the ordering of fields would be messed up.
+ fields := bson.D{{"a", 1}, {"b", 2}, {"c", 3}}
+ ops := []txn.Op{{
+ C: "accounts",
+ Id: 0,
+ Insert: fields,
+ }}
+
+ err := s.runner.Run(ops, "", nil)
+ c.Assert(err, IsNil)
+
+ var d bson.D
+ err = s.accounts.FindId(0).One(&d)
+ c.Assert(err, IsNil)
+
+ var filtered bson.D
+ for _, e := range d {
+ switch e.Name {
+ case "a", "b", "c":
+ filtered = append(filtered, e)
+ }
+ }
+ c.Assert(filtered, DeepEquals, fields)
+}
+
+func (s *S) TestChangeLog(c *C) {
+ chglog := s.db.C("chglog")
+ s.runner.ChangeLog(chglog)
+
+ ops := []txn.Op{{
+ C: "debts",
+ Id: 0,
+ Assert: txn.DocMissing,
+ }, {
+ C: "accounts",
+ Id: 0,
+ Insert: M{"balance": 300},
+ }, {
+ C: "accounts",
+ Id: 1,
+ Insert: M{"balance": 300},
+ }, {
+ C: "people",
+ Id: "joe",
+ Insert: M{"accounts": []int64{0, 1}},
+ }}
+ id := bson.NewObjectId()
+ err := s.runner.Run(ops, id, nil)
+ c.Assert(err, IsNil)
+
+ type IdList []interface{}
+ type Log struct {
+ Docs IdList "d"
+ Revnos []int64 "r"
+ }
+ var m map[string]*Log
+ err = chglog.FindId(id).One(&m)
+ c.Assert(err, IsNil)
+
+ c.Assert(m["accounts"], DeepEquals, &Log{IdList{0, 1}, []int64{2, 2}})
+ c.Assert(m["people"], DeepEquals, &Log{IdList{"joe"}, []int64{2}})
+ c.Assert(m["debts"], IsNil)
+
+ ops = []txn.Op{{
+ C: "accounts",
+ Id: 0,
+ Update: M{"$inc": M{"balance": 100}},
+ }, {
+ C: "accounts",
+ Id: 1,
+ Update: M{"$inc": M{"balance": 100}},
+ }}
+ id = bson.NewObjectId()
+ err = s.runner.Run(ops, id, nil)
+ c.Assert(err, IsNil)
+
+ m = nil
+ err = chglog.FindId(id).One(&m)
+ c.Assert(err, IsNil)
+
+ c.Assert(m["accounts"], DeepEquals, &Log{IdList{0, 1}, []int64{3, 3}})
+ c.Assert(m["people"], IsNil)
+
+ ops = []txn.Op{{
+ C: "accounts",
+ Id: 0,
+ Remove: true,
+ }, {
+ C: "people",
+ Id: "joe",
+ Remove: true,
+ }}
+ id = bson.NewObjectId()
+ err = s.runner.Run(ops, id, nil)
+ c.Assert(err, IsNil)
+
+ m = nil
+ err = chglog.FindId(id).One(&m)
+ c.Assert(err, IsNil)
+
+ c.Assert(m["accounts"], DeepEquals, &Log{IdList{0}, []int64{-4}})
+ c.Assert(m["people"], DeepEquals, &Log{IdList{"joe"}, []int64{-3}})
+}
+
+func (s *S) TestPurgeMissing(c *C) {
+ txn.SetChaos(txn.Chaos{
+ KillChance: 1,
+ Breakpoint: "set-applying",
+ })
+
+ err := s.accounts.Insert(M{"_id": 0, "balance": 100})
+ c.Assert(err, IsNil)
+ err = s.accounts.Insert(M{"_id": 1, "balance": 100})
+ c.Assert(err, IsNil)
+
+ ops1 := []txn.Op{{
+ C: "accounts",
+ Id: 3,
+ Insert: M{"balance": 100},
+ }}
+
+ ops2 := []txn.Op{{
+ C: "accounts",
+ Id: 0,
+ Remove: true,
+ }, {
+ C: "accounts",
+ Id: 1,
+ Update: M{"$inc": M{"balance": 100}},
+ }, {
+ C: "accounts",
+ Id: 2,
+ Insert: M{"balance": 100},
+ }}
+
+ first := bson.NewObjectId()
+ c.Logf("---- Running ops1 under transaction %q, to be canceled by chaos", first.Hex())
+ err = s.runner.Run(ops1, first, nil)
+ c.Assert(err, Equals, txn.ErrChaos)
+
+ last := bson.NewObjectId()
+ c.Logf("---- Running ops2 under transaction %q, to be canceled by chaos", last.Hex())
+ err = s.runner.Run(ops2, last, nil)
+ c.Assert(err, Equals, txn.ErrChaos)
+
+ c.Logf("---- Removing transaction %q", last.Hex())
+ err = s.tc.RemoveId(last)
+ c.Assert(err, IsNil)
+
+ c.Logf("---- Disabling chaos and attempting to resume all")
+ txn.SetChaos(txn.Chaos{})
+ err = s.runner.ResumeAll()
+ c.Assert(err, IsNil)
+
+ again := bson.NewObjectId()
+ c.Logf("---- Running ops2 again under transaction %q, to fail for missing transaction", again.Hex())
+ err = s.runner.Run(ops2, again, nil)
+ c.Assert(err, ErrorMatches, "cannot find transaction .*")
+
+ c.Logf("---- Purging missing transactions")
+ err = s.runner.PurgeMissing("accounts")
+ c.Assert(err, IsNil)
+
+ c.Logf("---- Resuming pending transactions")
+ err = s.runner.ResumeAll()
+ c.Assert(err, IsNil)
+
+ expect := []struct{ Id, Balance int }{
+ {0, -1},
+ {1, 200},
+ {2, 100},
+ {3, 100},
+ }
+ var got Account
+ for _, want := range expect {
+ err = s.accounts.FindId(want.Id).One(&got)
+ if want.Balance == -1 {
+ if err != mgo.ErrNotFound {
+ c.Errorf("Account %d should not exist, find got err=%#v", err)
+ }
+ } else if err != nil {
+ c.Errorf("Account %d should have balance of %d, but wasn't found", want.Id, want.Balance)
+ } else if got.Balance != want.Balance {
+ c.Errorf("Account %d should have balance of %d, got %d", want.Id, want.Balance, got.Balance)
+ }
+ }
+}
+
+func (s *S) TestTxnQueueStashStressTest(c *C) {
+ txn.SetChaos(txn.Chaos{
+ SlowdownChance: 0.3,
+ Slowdown: 50 * time.Millisecond,
+ })
+ defer txn.SetChaos(txn.Chaos{})
+
+ // So we can run more iterations of the test in less time.
+ txn.SetDebug(false)
+
+ const runners = 10
+ const inserts = 10
+ const repeat = 100
+
+ for r := 0; r < repeat; r++ {
+ var wg sync.WaitGroup
+ wg.Add(runners)
+ for i := 0; i < runners; i++ {
+ go func(i, r int) {
+ defer wg.Done()
+
+ session := s.session.New()
+ defer session.Close()
+ runner := txn.NewRunner(s.tc.With(session))
+
+ for j := 0; j < inserts; j++ {
+ ops := []txn.Op{{
+ C: "accounts",
+ Id: fmt.Sprintf("insert-%d-%d", r, j),
+ Insert: bson.M{
+ "added-by": i,
+ },
+ }}
+ err := runner.Run(ops, "", nil)
+ if err != txn.ErrAborted {
+ c.Check(err, IsNil)
+ }
+ }
+ }(i, r)
+ }
+ wg.Wait()
+ }
+}
+
+func (s *S) TestPurgeMissingPipelineSizeLimit(c *C) {
+ // This test ensures that PurgeMissing can handle very large
+ // txn-queue fields. Previous iterations of PurgeMissing would
+ // trigger a 16MB aggregation pipeline result size limit when run
+ // against a documents or stashes with large numbers of txn-queue
+ // entries. PurgeMissing now no longer uses aggregation pipelines
+ // to work around this limit.
+
+ // The pipeline result size limitation was removed from MongoDB in 2.6 so
+ // this test is only run for older MongoDB version.
+ build, err := s.session.BuildInfo()
+ c.Assert(err, IsNil)
+ if build.VersionAtLeast(2, 6) {
+ c.Skip("This tests a problem that can only happen with MongoDB < 2.6 ")
+ }
+
+ // Insert a single document to work with.
+ err = s.accounts.Insert(M{"_id": 0, "balance": 100})
+ c.Assert(err, IsNil)
+
+ ops := []txn.Op{{
+ C: "accounts",
+ Id: 0,
+ Update: M{"$inc": M{"balance": 100}},
+ }}
+
+ // Generate one successful transaction.
+ good := bson.NewObjectId()
+ c.Logf("---- Running ops under transaction %q", good.Hex())
+ err = s.runner.Run(ops, good, nil)
+ c.Assert(err, IsNil)
+
+ // Generate another transaction which which will go missing.
+ missing := bson.NewObjectId()
+ c.Logf("---- Running ops under transaction %q (which will go missing)", missing.Hex())
+ err = s.runner.Run(ops, missing, nil)
+ c.Assert(err, IsNil)
+
+ err = s.tc.RemoveId(missing)
+ c.Assert(err, IsNil)
+
+ // Generate a txn-queue on the test document that's large enough
+ // that it used to cause PurgeMissing to exceed MongoDB's pipeline
+ // result 16MB size limit (MongoDB 2.4 and older only).
+ //
+ // The contents of the txn-queue field doesn't matter, only that
+ // it's big enough to trigger the size limit. The required size
+ // can also be achieved by using multiple documents as long as the
+ // cumulative size of all the txn-queue fields exceeds the
+ // pipeline limit. A single document is easier to work with for
+ // this test however.
+ //
+ // The txn id of the successful transaction is used fill the
+ // txn-queue because this takes advantage of a short circuit in
+ // PurgeMissing, dramatically speeding up the test run time.
+ const fakeQueueLen = 250000
+ fakeTxnQueue := make([]string, fakeQueueLen)
+ token := good.Hex() + "_12345678" // txn id + nonce
+ for i := 0; i < fakeQueueLen; i++ {
+ fakeTxnQueue[i] = token
+ }
+
+ err = s.accounts.UpdateId(0, bson.M{
+ "$set": bson.M{"txn-queue": fakeTxnQueue},
+ })
+ c.Assert(err, IsNil)
+
+ // PurgeMissing could hit the same pipeline result size limit when
+ // processing the txn-queue fields of stash documents so insert
+ // the large txn-queue there too to ensure that no longer happens.
+ err = s.sc.Insert(
+ bson.D{{"c", "accounts"}, {"id", 0}},
+ bson.M{"txn-queue": fakeTxnQueue},
+ )
+ c.Assert(err, IsNil)
+
+ c.Logf("---- Purging missing transactions")
+ err = s.runner.PurgeMissing("accounts")
+ c.Assert(err, IsNil)
+}
+
+var flaky = flag.Bool("flaky", false, "Include flaky tests")
+
+func (s *S) TestTxnQueueStressTest(c *C) {
+ // This fails about 20% of the time on Mongo 3.2 (I haven't tried
+ // other versions) with account balance being 3999 instead of
+ // 4000. That implies that some updates are being lost. This is
+ // bad and we'll need to chase it down in the near future - the
+ // only reason it's being skipped now is that it's already failing
+ // and it's better to have the txn tests running without this one
+ // than to have them not running at all.
+ if !*flaky {
+ c.Skip("Fails intermittently - disabling until fixed")
+ }
+ txn.SetChaos(txn.Chaos{
+ SlowdownChance: 0.3,
+ Slowdown: 50 * time.Millisecond,
+ })
+ defer txn.SetChaos(txn.Chaos{})
+
+ // So we can run more iterations of the test in less time.
+ txn.SetDebug(false)
+
+ err := s.accounts.Insert(M{"_id": 0, "balance": 0}, M{"_id": 1, "balance": 0})
+ c.Assert(err, IsNil)
+
+ // Run half of the operations changing account 0 and then 1,
+ // and the other half in the opposite order.
+ ops01 := []txn.Op{{
+ C: "accounts",
+ Id: 0,
+ Update: M{"$inc": M{"balance": 1}},
+ }, {
+ C: "accounts",
+ Id: 1,
+ Update: M{"$inc": M{"balance": 1}},
+ }}
+
+ ops10 := []txn.Op{{
+ C: "accounts",
+ Id: 1,
+ Update: M{"$inc": M{"balance": 1}},
+ }, {
+ C: "accounts",
+ Id: 0,
+ Update: M{"$inc": M{"balance": 1}},
+ }}
+
+ ops := [][]txn.Op{ops01, ops10}
+
+ const runners = 4
+ const changes = 1000
+
+ var wg sync.WaitGroup
+ wg.Add(runners)
+ for n := 0; n < runners; n++ {
+ n := n
+ go func() {
+ defer wg.Done()
+ for i := 0; i < changes; i++ {
+ err = s.runner.Run(ops[n%2], "", nil)
+ c.Assert(err, IsNil)
+ }
+ }()
+ }
+ wg.Wait()
+
+ for id := 0; id < 2; id++ {
+ var account Account
+ err = s.accounts.FindId(id).One(&account)
+ if account.Balance != runners*changes {
+ c.Errorf("Account should have balance of %d, got %d", runners*changes, account.Balance)
+ }
+ }
+}
diff --git a/src/mongo/gotools/vendor/src/gopkg.in/tomb.v2/LICENSE b/src/mongo/gotools/vendor/src/gopkg.in/tomb.v2/LICENSE
new file mode 100644
index 00000000000..a4249bb31dd
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/gopkg.in/tomb.v2/LICENSE
@@ -0,0 +1,29 @@
+tomb - support for clean goroutine termination in Go.
+
+Copyright (c) 2010-2011 - Gustavo Niemeyer <gustavo@niemeyer.net>
+
+All rights reserved.
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions are met:
+
+ * Redistributions of source code must retain the above copyright notice,
+ this list of conditions and the following disclaimer.
+ * Redistributions in binary form must reproduce the above copyright notice,
+ this list of conditions and the following disclaimer in the documentation
+ and/or other materials provided with the distribution.
+ * Neither the name of the copyright holder nor the names of its
+ contributors may be used to endorse or promote products derived from
+ this software without specific prior written permission.
+
+THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
+CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
+EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
+PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
+LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
+NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
+SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
diff --git a/src/mongo/gotools/vendor/src/gopkg.in/tomb.v2/README.md b/src/mongo/gotools/vendor/src/gopkg.in/tomb.v2/README.md
new file mode 100644
index 00000000000..e7f282b5aa9
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/gopkg.in/tomb.v2/README.md
@@ -0,0 +1,4 @@
+Installation and usage
+----------------------
+
+See [gopkg.in/tomb.v2](https://gopkg.in/tomb.v2) for documentation and usage details.
diff --git a/src/mongo/gotools/vendor/src/gopkg.in/tomb.v2/tomb.go b/src/mongo/gotools/vendor/src/gopkg.in/tomb.v2/tomb.go
new file mode 100644
index 00000000000..28bc552b2cb
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/gopkg.in/tomb.v2/tomb.go
@@ -0,0 +1,223 @@
+// Copyright (c) 2011 - Gustavo Niemeyer <gustavo@niemeyer.net>
+//
+// All rights reserved.
+//
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are met:
+//
+// * Redistributions of source code must retain the above copyright notice,
+// this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above copyright notice,
+// this list of conditions and the following disclaimer in the documentation
+// and/or other materials provided with the distribution.
+// * Neither the name of the copyright holder nor the names of its
+// contributors may be used to endorse or promote products derived from
+// this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
+// CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
+// EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+// PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
+// PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
+// LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
+// NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
+// SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// The tomb package handles clean goroutine tracking and termination.
+//
+// The zero value of a Tomb is ready to handle the creation of a tracked
+// goroutine via its Go method, and then any tracked goroutine may call
+// the Go method again to create additional tracked goroutines at
+// any point.
+//
+// If any of the tracked goroutines returns a non-nil error, or the
+// Kill or Killf method is called by any goroutine in the system (tracked
+// or not), the tomb Err is set, Alive is set to false, and the Dying
+// channel is closed to flag that all tracked goroutines are supposed
+// to willingly terminate as soon as possible.
+//
+// Once all tracked goroutines terminate, the Dead channel is closed,
+// and Wait unblocks and returns the first non-nil error presented
+// to the tomb via a result or an explicit Kill or Killf method call,
+// or nil if there were no errors.
+//
+// It is okay to create further goroutines via the Go method while
+// the tomb is in a dying state. The final dead state is only reached
+// once all tracked goroutines terminate, at which point calling
+// the Go method again will cause a runtime panic.
+//
+// Tracked functions and methods that are still running while the tomb
+// is in dying state may choose to return ErrDying as their error value.
+// This preserves the well established non-nil error convention, but is
+// understood by the tomb as a clean termination. The Err and Wait
+// methods will still return nil if all observed errors were either
+// nil or ErrDying.
+//
+// For background and a detailed example, see the following blog post:
+//
+// http://blog.labix.org/2011/10/09/death-of-goroutines-under-control
+//
+package tomb
+
+import (
+ "errors"
+ "fmt"
+ "sync"
+)
+
+// A Tomb tracks the lifecycle of one or more goroutines as alive,
+// dying or dead, and the reason for their death.
+//
+// See the package documentation for details.
+type Tomb struct {
+ m sync.Mutex
+ alive int
+ dying chan struct{}
+ dead chan struct{}
+ reason error
+}
+
+var (
+ ErrStillAlive = errors.New("tomb: still alive")
+ ErrDying = errors.New("tomb: dying")
+)
+
+func (t *Tomb) init() {
+ t.m.Lock()
+ if t.dead == nil {
+ t.dead = make(chan struct{})
+ t.dying = make(chan struct{})
+ t.reason = ErrStillAlive
+ }
+ t.m.Unlock()
+}
+
+// Dead returns the channel that can be used to wait until
+// all goroutines have finished running.
+func (t *Tomb) Dead() <-chan struct{} {
+ t.init()
+ return t.dead
+}
+
+// Dying returns the channel that can be used to wait until
+// t.Kill is called.
+func (t *Tomb) Dying() <-chan struct{} {
+ t.init()
+ return t.dying
+}
+
+// Wait blocks until all goroutines have finished running, and
+// then returns the reason for their death.
+func (t *Tomb) Wait() error {
+ t.init()
+ <-t.dead
+ t.m.Lock()
+ reason := t.reason
+ t.m.Unlock()
+ return reason
+}
+
+// Go runs f in a new goroutine and tracks its termination.
+//
+// If f returns a non-nil error, t.Kill is called with that
+// error as the death reason parameter.
+//
+// It is f's responsibility to monitor the tomb and return
+// appropriately once it is in a dying state.
+//
+// It is safe for the f function to call the Go method again
+// to create additional tracked goroutines. Once all tracked
+// goroutines return, the Dead channel is closed and the
+// Wait method unblocks and returns the death reason.
+//
+// Calling the Go method after all tracked goroutines return
+// causes a runtime panic. For that reason, calling the Go
+// method a second time out of a tracked goroutine is unsafe.
+func (t *Tomb) Go(f func() error) {
+ t.init()
+ t.m.Lock()
+ defer t.m.Unlock()
+ select {
+ case <-t.dead:
+ panic("tomb.Go called after all goroutines terminated")
+ default:
+ }
+ t.alive++
+ go t.run(f)
+}
+
+func (t *Tomb) run(f func() error) {
+ err := f()
+ t.m.Lock()
+ defer t.m.Unlock()
+ t.alive--
+ if t.alive == 0 || err != nil {
+ t.kill(err)
+ if t.alive == 0 {
+ close(t.dead)
+ }
+ }
+}
+
+// Kill puts the tomb in a dying state for the given reason,
+// closes the Dying channel, and sets Alive to false.
+//
+// Althoguh Kill may be called multiple times, only the first
+// non-nil error is recorded as the death reason.
+//
+// If reason is ErrDying, the previous reason isn't replaced
+// even if nil. It's a runtime error to call Kill with ErrDying
+// if t is not in a dying state.
+func (t *Tomb) Kill(reason error) {
+ t.init()
+ t.m.Lock()
+ defer t.m.Unlock()
+ t.kill(reason)
+}
+
+func (t *Tomb) kill(reason error) {
+ if reason == ErrStillAlive {
+ panic("tomb: Kill with ErrStillAlive")
+ }
+ if reason == ErrDying {
+ if t.reason == ErrStillAlive {
+ panic("tomb: Kill with ErrDying while still alive")
+ }
+ return
+ }
+ if t.reason == ErrStillAlive {
+ t.reason = reason
+ close(t.dying)
+ return
+ }
+ if t.reason == nil {
+ t.reason = reason
+ return
+ }
+}
+
+// Killf calls the Kill method with an error built providing the received
+// parameters to fmt.Errorf. The generated error is also returned.
+func (t *Tomb) Killf(f string, a ...interface{}) error {
+ err := fmt.Errorf(f, a...)
+ t.Kill(err)
+ return err
+}
+
+// Err returns the death reason, or ErrStillAlive if the tomb
+// is not in a dying or dead state.
+func (t *Tomb) Err() (reason error) {
+ t.init()
+ t.m.Lock()
+ reason = t.reason
+ t.m.Unlock()
+ return
+}
+
+// Alive returns true if the tomb is not in a dying or dead state.
+func (t *Tomb) Alive() bool {
+ return t.Err() == ErrStillAlive
+}
diff --git a/src/mongo/gotools/vendor/src/gopkg.in/tomb.v2/tomb_test.go b/src/mongo/gotools/vendor/src/gopkg.in/tomb.v2/tomb_test.go
new file mode 100644
index 00000000000..a1064dffe65
--- /dev/null
+++ b/src/mongo/gotools/vendor/src/gopkg.in/tomb.v2/tomb_test.go
@@ -0,0 +1,183 @@
+package tomb_test
+
+import (
+ "errors"
+ "gopkg.in/tomb.v2"
+ "reflect"
+ "testing"
+)
+
+func nothing() error { return nil }
+
+func TestNewTomb(t *testing.T) {
+ tb := &tomb.Tomb{}
+ checkState(t, tb, false, false, tomb.ErrStillAlive)
+}
+
+func TestGo(t *testing.T) {
+ tb := &tomb.Tomb{}
+ alive := make(chan bool)
+ tb.Go(func() error {
+ alive <- true
+ tb.Go(func() error {
+ alive <- true
+ <-tb.Dying()
+ return nil
+ })
+ <-tb.Dying()
+ return nil
+ })
+ <-alive
+ <-alive
+ checkState(t, tb, false, false, tomb.ErrStillAlive)
+ tb.Kill(nil)
+ tb.Wait()
+ checkState(t, tb, true, true, nil)
+}
+
+func TestGoErr(t *testing.T) {
+ first := errors.New("first error")
+ second := errors.New("first error")
+ tb := &tomb.Tomb{}
+ alive := make(chan bool)
+ tb.Go(func() error {
+ alive <- true
+ tb.Go(func() error {
+ alive <- true
+ return first
+ })
+ <-tb.Dying()
+ return second
+ })
+ <-alive
+ <-alive
+ tb.Wait()
+ checkState(t, tb, true, true, first)
+}
+
+func TestGoPanic(t *testing.T) {
+ // ErrDying being used properly, after a clean death.
+ tb := &tomb.Tomb{}
+ tb.Go(nothing)
+ tb.Wait()
+ defer func() {
+ err := recover()
+ if err != "tomb.Go called after all goroutines terminated" {
+ t.Fatalf("Wrong panic on post-death tomb.Go call: %v", err)
+ }
+ checkState(t, tb, true, true, nil)
+ }()
+ tb.Go(nothing)
+}
+
+func TestKill(t *testing.T) {
+ // a nil reason flags the goroutine as dying
+ tb := &tomb.Tomb{}
+ tb.Kill(nil)
+ checkState(t, tb, true, false, nil)
+
+ // a non-nil reason now will override Kill
+ err := errors.New("some error")
+ tb.Kill(err)
+ checkState(t, tb, true, false, err)
+
+ // another non-nil reason won't replace the first one
+ tb.Kill(errors.New("ignore me"))
+ checkState(t, tb, true, false, err)
+
+ tb.Go(nothing)
+ tb.Wait()
+ checkState(t, tb, true, true, err)
+}
+
+func TestKillf(t *testing.T) {
+ tb := &tomb.Tomb{}
+
+ err := tb.Killf("BO%s", "OM")
+ if s := err.Error(); s != "BOOM" {
+ t.Fatalf(`Killf("BO%s", "OM"): want "BOOM", got %q`, s)
+ }
+ checkState(t, tb, true, false, err)
+
+ // another non-nil reason won't replace the first one
+ tb.Killf("ignore me")
+ checkState(t, tb, true, false, err)
+
+ tb.Go(nothing)
+ tb.Wait()
+ checkState(t, tb, true, true, err)
+}
+
+func TestErrDying(t *testing.T) {
+ // ErrDying being used properly, after a clean death.
+ tb := &tomb.Tomb{}
+ tb.Kill(nil)
+ tb.Kill(tomb.ErrDying)
+ checkState(t, tb, true, false, nil)
+
+ // ErrDying being used properly, after an errorful death.
+ err := errors.New("some error")
+ tb.Kill(err)
+ tb.Kill(tomb.ErrDying)
+ checkState(t, tb, true, false, err)
+
+ // ErrDying being used badly, with an alive tomb.
+ tb = &tomb.Tomb{}
+ defer func() {
+ err := recover()
+ if err != "tomb: Kill with ErrDying while still alive" {
+ t.Fatalf("Wrong panic on Kill(ErrDying): %v", err)
+ }
+ checkState(t, tb, false, false, tomb.ErrStillAlive)
+ }()
+ tb.Kill(tomb.ErrDying)
+}
+
+func TestKillErrStillAlivePanic(t *testing.T) {
+ tb := &tomb.Tomb{}
+ defer func() {
+ err := recover()
+ if err != "tomb: Kill with ErrStillAlive" {
+ t.Fatalf("Wrong panic on Kill(ErrStillAlive): %v", err)
+ }
+ checkState(t, tb, false, false, tomb.ErrStillAlive)
+ }()
+ tb.Kill(tomb.ErrStillAlive)
+}
+
+func checkState(t *testing.T, tb *tomb.Tomb, wantDying, wantDead bool, wantErr error) {
+ select {
+ case <-tb.Dying():
+ if !wantDying {
+ t.Error("<-Dying: should block")
+ }
+ default:
+ if wantDying {
+ t.Error("<-Dying: should not block")
+ }
+ }
+ seemsDead := false
+ select {
+ case <-tb.Dead():
+ if !wantDead {
+ t.Error("<-Dead: should block")
+ }
+ seemsDead = true
+ default:
+ if wantDead {
+ t.Error("<-Dead: should not block")
+ }
+ }
+ if err := tb.Err(); err != wantErr {
+ t.Errorf("Err: want %#v, got %#v", wantErr, err)
+ }
+ if wantDead && seemsDead {
+ waitErr := tb.Wait()
+ switch {
+ case waitErr == tomb.ErrStillAlive:
+ t.Errorf("Wait should not return ErrStillAlive")
+ case !reflect.DeepEqual(waitErr, wantErr):
+ t.Errorf("Wait: want %#v, got %#v", wantErr, waitErr)
+ }
+ }
+}