libgo: Update to weekly.2012-01-20.
From-SVN: r183540
This commit is contained in:
parent
df1304ee03
commit
af92e38566
198 changed files with 5123 additions and 1951 deletions
|
@ -1,4 +1,4 @@
|
|||
354b17404643
|
||||
9f2be4fbbf69
|
||||
|
||||
The first line of this file holds the Mercurial revision number of the
|
||||
last merge done from the master library sources.
|
||||
|
|
|
@ -197,6 +197,16 @@ toolexeclibgocryptox509dir = $(toolexeclibgocryptodir)/x509
|
|||
toolexeclibgocryptox509_DATA = \
|
||||
crypto/x509/pkix.gox
|
||||
|
||||
toolexeclibgodatabasedir = $(toolexeclibgodir)/database
|
||||
|
||||
toolexeclibgodatabase_DATA = \
|
||||
database/sql.gox
|
||||
|
||||
toolexeclibgodatabasesqldir = $(toolexeclibgodatabasedir)/sql
|
||||
|
||||
toolexeclibgodatabasesql_DATA = \
|
||||
database/sql/driver.gox
|
||||
|
||||
toolexeclibgodebugdir = $(toolexeclibgodir)/debug
|
||||
|
||||
toolexeclibgodebug_DATA = \
|
||||
|
@ -237,15 +247,10 @@ toolexeclibgoexp_DATA = \
|
|||
exp/norm.gox \
|
||||
exp/proxy.gox \
|
||||
exp/spdy.gox \
|
||||
exp/sql.gox \
|
||||
exp/ssh.gox \
|
||||
exp/terminal.gox \
|
||||
exp/types.gox
|
||||
|
||||
toolexeclibgoexpsqldir = $(toolexeclibgoexpdir)/sql
|
||||
|
||||
toolexeclibgoexpsql_DATA = \
|
||||
exp/sql/driver.gox
|
||||
exp/types.gox \
|
||||
exp/utf8string.gox
|
||||
|
||||
toolexeclibgogodir = $(toolexeclibgodir)/go
|
||||
|
||||
|
@ -717,6 +722,7 @@ go_net_files = \
|
|||
go/net/dnsclient_unix.go \
|
||||
go/net/dnsconfig.go \
|
||||
go/net/dnsmsg.go \
|
||||
go/net/doc.go \
|
||||
$(go_net_newpollserver_file) \
|
||||
go/net/fd.go \
|
||||
$(go_net_fd_os_file) \
|
||||
|
@ -793,6 +799,7 @@ endif
|
|||
go_os_files = \
|
||||
$(go_os_dir_file) \
|
||||
go/os/dir.go \
|
||||
go/os/doc.go \
|
||||
go/os/env.go \
|
||||
go/os/error.go \
|
||||
go/os/error_posix.go \
|
||||
|
@ -1005,7 +1012,8 @@ go_crypto_dsa_files = \
|
|||
go_crypto_ecdsa_files = \
|
||||
go/crypto/ecdsa/ecdsa.go
|
||||
go_crypto_elliptic_files = \
|
||||
go/crypto/elliptic/elliptic.go
|
||||
go/crypto/elliptic/elliptic.go \
|
||||
go/crypto/elliptic/p224.go
|
||||
go_crypto_hmac_files = \
|
||||
go/crypto/hmac/hmac.go
|
||||
go_crypto_md4_files = \
|
||||
|
@ -1094,6 +1102,14 @@ go_crypto_openpgp_s2k_files = \
|
|||
go_crypto_x509_pkix_files = \
|
||||
go/crypto/x509/pkix/pkix.go
|
||||
|
||||
go_database_sql_files = \
|
||||
go/database/sql/convert.go \
|
||||
go/database/sql/sql.go
|
||||
|
||||
go_database_sql_driver_files = \
|
||||
go/database/sql/driver/driver.go \
|
||||
go/database/sql/driver/types.go
|
||||
|
||||
go_debug_dwarf_files = \
|
||||
go/debug/dwarf/buf.go \
|
||||
go/debug/dwarf/const.go \
|
||||
|
@ -1179,9 +1195,6 @@ go_exp_spdy_files = \
|
|||
go/exp/spdy/read.go \
|
||||
go/exp/spdy/types.go \
|
||||
go/exp/spdy/write.go
|
||||
go_exp_sql_files = \
|
||||
go/exp/sql/convert.go \
|
||||
go/exp/sql/sql.go
|
||||
go_exp_ssh_files = \
|
||||
go/exp/ssh/channel.go \
|
||||
go/exp/ssh/cipher.go \
|
||||
|
@ -1205,10 +1218,8 @@ go_exp_types_files = \
|
|||
go/exp/types/gcimporter.go \
|
||||
go/exp/types/types.go \
|
||||
go/exp/types/universe.go
|
||||
|
||||
go_exp_sql_driver_files = \
|
||||
go/exp/sql/driver/driver.go \
|
||||
go/exp/sql/driver/types.go
|
||||
go_exp_utf8string_files = \
|
||||
go/exp/utf8string/string.go
|
||||
|
||||
go_go_ast_files = \
|
||||
go/go/ast/ast.go \
|
||||
|
@ -1467,7 +1478,6 @@ go_text_scanner_files = \
|
|||
go_unicode_utf16_files = \
|
||||
go/unicode/utf16/utf16.go
|
||||
go_unicode_utf8_files = \
|
||||
go/unicode/utf8/string.go \
|
||||
go/unicode/utf8/utf8.go
|
||||
|
||||
# Define Syscall and Syscall6.
|
||||
|
@ -1751,6 +1761,8 @@ libgo_go_objs = \
|
|||
crypto/openpgp/packet.lo \
|
||||
crypto/openpgp/s2k.lo \
|
||||
crypto/x509/pkix.lo \
|
||||
database/sql.lo \
|
||||
database/sql/driver.lo \
|
||||
debug/dwarf.lo \
|
||||
debug/elf.lo \
|
||||
debug/gosym.lo \
|
||||
|
@ -1772,11 +1784,10 @@ libgo_go_objs = \
|
|||
exp/norm.lo \
|
||||
exp/proxy.lo \
|
||||
exp/spdy.lo \
|
||||
exp/sql.lo \
|
||||
exp/ssh.lo \
|
||||
exp/terminal.lo \
|
||||
exp/types.lo \
|
||||
exp/sql/driver.lo \
|
||||
exp/utf8string.lo \
|
||||
html/template.lo \
|
||||
go/ast.lo \
|
||||
go/build.lo \
|
||||
|
@ -2646,6 +2657,26 @@ crypto/x509/pkix/check: $(CHECK_DEPS)
|
|||
@$(CHECK)
|
||||
.PHONY: crypto/x509/pkix/check
|
||||
|
||||
@go_include@ database/sql.lo.dep
|
||||
database/sql.lo.dep: $(go_database_sql_files)
|
||||
$(BUILDDEPS)
|
||||
database/sql.lo: $(go_database_sql_files)
|
||||
$(BUILDPACKAGE)
|
||||
database/sql/check: $(CHECK_DEPS)
|
||||
@$(MKDIR_P) database/sql
|
||||
@$(CHECK)
|
||||
.PHONY: database/sql/check
|
||||
|
||||
@go_include@ database/sql/driver.lo.dep
|
||||
database/sql/driver.lo.dep: $(go_database_sql_driver_files)
|
||||
$(BUILDDEPS)
|
||||
database/sql/driver.lo: $(go_database_sql_driver_files)
|
||||
$(BUILDPACKAGE)
|
||||
database/sql/driver/check: $(CHECK_DEPS)
|
||||
@$(MKDIR_P) database/sql/driver
|
||||
@$(CHECK)
|
||||
.PHONY: database/sql/driver/check
|
||||
|
||||
@go_include@ debug/dwarf.lo.dep
|
||||
debug/dwarf.lo.dep: $(go_debug_dwarf_files)
|
||||
$(BUILDDEPS)
|
||||
|
@ -2856,16 +2887,6 @@ exp/spdy/check: $(CHECK_DEPS)
|
|||
@$(CHECK)
|
||||
.PHONY: exp/spdy/check
|
||||
|
||||
@go_include@ exp/sql.lo.dep
|
||||
exp/sql.lo.dep: $(go_exp_sql_files)
|
||||
$(BUILDDEPS)
|
||||
exp/sql.lo: $(go_exp_sql_files)
|
||||
$(BUILDPACKAGE)
|
||||
exp/sql/check: $(CHECK_DEPS)
|
||||
@$(MKDIR_P) exp/sql
|
||||
@$(CHECK)
|
||||
.PHONY: exp/sql/check
|
||||
|
||||
@go_include@ exp/ssh.lo.dep
|
||||
exp/ssh.lo.dep: $(go_exp_ssh_files)
|
||||
$(BUILDDEPS)
|
||||
|
@ -2896,6 +2917,16 @@ exp/types/check: $(CHECK_DEPS)
|
|||
@$(CHECK)
|
||||
.PHONY: exp/types/check
|
||||
|
||||
@go_include@ exp/utf8string.lo.dep
|
||||
exp/utf8string.lo.dep: $(go_exp_utf8string_files)
|
||||
$(BUILDDEPS)
|
||||
exp/utf8string.lo: $(go_exp_utf8string_files)
|
||||
$(BUILDPACKAGE)
|
||||
exp/utf8string/check: $(CHECK_DEPS)
|
||||
@$(MKDIR_P) exp/utf8string
|
||||
@$(CHECK)
|
||||
.PHONY: exp/utf8string/check
|
||||
|
||||
@go_include@ exp/inotify.lo.dep
|
||||
exp/inotify.lo.dep: $(go_exp_inotify_files)
|
||||
$(BUILDDEPS)
|
||||
|
@ -2906,16 +2937,6 @@ exp/inotify/check: $(CHECK_DEPS)
|
|||
@$(CHECK)
|
||||
.PHONY: exp/inotify/check
|
||||
|
||||
@go_include@ exp/sql/driver.lo.dep
|
||||
exp/sql/driver.lo.dep: $(go_exp_sql_driver_files)
|
||||
$(BUILDDEPS)
|
||||
exp/sql/driver.lo: $(go_exp_sql_driver_files)
|
||||
$(BUILDPACKAGE)
|
||||
exp/sql/driver/check: $(CHECK_DEPS)
|
||||
@$(MKDIR_P) exp/sql/driver
|
||||
@$(CHECK)
|
||||
.PHONY: exp/sql/driver/check
|
||||
|
||||
@go_include@ html/template.lo.dep
|
||||
html/template.lo.dep: $(go_html_template_files)
|
||||
$(BUILDDEPS)
|
||||
|
@ -3670,6 +3691,12 @@ crypto/openpgp/s2k.gox: crypto/openpgp/s2k.lo
|
|||
crypto/x509/pkix.gox: crypto/x509/pkix.lo
|
||||
$(BUILDGOX)
|
||||
|
||||
database/sql.gox: database/sql.lo
|
||||
$(BUILDGOX)
|
||||
|
||||
database/sql/driver.gox: database/sql/driver.lo
|
||||
$(BUILDGOX)
|
||||
|
||||
debug/dwarf.gox: debug/dwarf.lo
|
||||
$(BUILDGOX)
|
||||
debug/elf.gox: debug/elf.lo
|
||||
|
@ -3716,16 +3743,13 @@ exp/proxy.gox: exp/proxy.lo
|
|||
$(BUILDGOX)
|
||||
exp/spdy.gox: exp/spdy.lo
|
||||
$(BUILDGOX)
|
||||
exp/sql.gox: exp/sql.lo
|
||||
$(BUILDGOX)
|
||||
exp/ssh.gox: exp/ssh.lo
|
||||
$(BUILDGOX)
|
||||
exp/terminal.gox: exp/terminal.lo
|
||||
$(BUILDGOX)
|
||||
exp/types.gox: exp/types.lo
|
||||
$(BUILDGOX)
|
||||
|
||||
exp/sql/driver.gox: exp/sql/driver.lo
|
||||
exp/utf8string.gox: exp/utf8string.lo
|
||||
$(BUILDGOX)
|
||||
|
||||
html/template.gox: html/template.lo
|
||||
|
@ -3941,6 +3965,8 @@ TEST_PACKAGES = \
|
|||
crypto/openpgp/elgamal/check \
|
||||
crypto/openpgp/packet/check \
|
||||
crypto/openpgp/s2k/check \
|
||||
database/sql/check \
|
||||
database/sql/driver/check \
|
||||
debug/dwarf/check \
|
||||
debug/elf/check \
|
||||
debug/macho/check \
|
||||
|
@ -3962,9 +3988,9 @@ TEST_PACKAGES = \
|
|||
exp/norm/check \
|
||||
exp/proxy/check \
|
||||
exp/spdy/check \
|
||||
exp/sql/check \
|
||||
exp/ssh/check \
|
||||
exp/terminal/check \
|
||||
exp/utf8string/check \
|
||||
html/template/check \
|
||||
go/ast/check \
|
||||
$(go_build_check_omitted_since_it_calls_6g) \
|
||||
|
|
|
@ -99,10 +99,11 @@ am__installdirs = "$(DESTDIR)$(toolexeclibdir)" \
|
|||
"$(DESTDIR)$(toolexeclibgocryptodir)" \
|
||||
"$(DESTDIR)$(toolexeclibgocryptoopenpgpdir)" \
|
||||
"$(DESTDIR)$(toolexeclibgocryptox509dir)" \
|
||||
"$(DESTDIR)$(toolexeclibgodatabasedir)" \
|
||||
"$(DESTDIR)$(toolexeclibgodatabasesqldir)" \
|
||||
"$(DESTDIR)$(toolexeclibgodebugdir)" \
|
||||
"$(DESTDIR)$(toolexeclibgoencodingdir)" \
|
||||
"$(DESTDIR)$(toolexeclibgoexpdir)" \
|
||||
"$(DESTDIR)$(toolexeclibgoexpsqldir)" \
|
||||
"$(DESTDIR)$(toolexeclibgogodir)" \
|
||||
"$(DESTDIR)$(toolexeclibgohashdir)" \
|
||||
"$(DESTDIR)$(toolexeclibgohtmldir)" \
|
||||
|
@ -154,33 +155,33 @@ am__DEPENDENCIES_2 = bufio/bufio.lo bytes/bytes.lo bytes/index.lo \
|
|||
crypto/tls.lo crypto/twofish.lo crypto/x509.lo crypto/xtea.lo \
|
||||
crypto/openpgp/armor.lo crypto/openpgp/elgamal.lo \
|
||||
crypto/openpgp/errors.lo crypto/openpgp/packet.lo \
|
||||
crypto/openpgp/s2k.lo crypto/x509/pkix.lo debug/dwarf.lo \
|
||||
debug/elf.lo debug/gosym.lo debug/macho.lo debug/pe.lo \
|
||||
encoding/ascii85.lo encoding/asn1.lo encoding/base32.lo \
|
||||
encoding/base64.lo encoding/binary.lo encoding/csv.lo \
|
||||
encoding/git85.lo encoding/gob.lo encoding/hex.lo \
|
||||
encoding/json.lo encoding/pem.lo encoding/xml.lo exp/ebnf.lo \
|
||||
exp/norm.lo exp/proxy.lo exp/spdy.lo exp/sql.lo exp/ssh.lo \
|
||||
exp/terminal.lo exp/types.lo exp/sql/driver.lo \
|
||||
html/template.lo go/ast.lo go/build.lo go/doc.lo go/parser.lo \
|
||||
go/printer.lo go/scanner.lo go/token.lo hash/adler32.lo \
|
||||
hash/crc32.lo hash/crc64.lo hash/fnv.lo net/http/cgi.lo \
|
||||
net/http/fcgi.lo net/http/httptest.lo net/http/httputil.lo \
|
||||
net/http/pprof.lo image/bmp.lo image/color.lo image/draw.lo \
|
||||
image/gif.lo image/jpeg.lo image/png.lo image/tiff.lo \
|
||||
index/suffixarray.lo io/ioutil.lo log/syslog.lo \
|
||||
log/syslog/syslog_c.lo math/big.lo math/cmplx.lo math/rand.lo \
|
||||
mime/mime.lo mime/multipart.lo net/dict.lo net/http.lo \
|
||||
net/mail.lo net/rpc.lo net/smtp.lo net/textproto.lo net/url.lo \
|
||||
old/netchan.lo old/regexp.lo old/template.lo \
|
||||
$(am__DEPENDENCIES_1) os/user.lo os/signal.lo path/filepath.lo \
|
||||
regexp/syntax.lo net/rpc/jsonrpc.lo runtime/debug.lo \
|
||||
runtime/pprof.lo sync/atomic.lo sync/atomic_c.lo \
|
||||
syscall/syscall.lo syscall/errno.lo syscall/wait.lo \
|
||||
text/scanner.lo text/tabwriter.lo text/template.lo \
|
||||
text/template/parse.lo testing/testing.lo testing/iotest.lo \
|
||||
testing/quick.lo testing/script.lo unicode/utf16.lo \
|
||||
unicode/utf8.lo
|
||||
crypto/openpgp/s2k.lo crypto/x509/pkix.lo database/sql.lo \
|
||||
database/sql/driver.lo debug/dwarf.lo debug/elf.lo \
|
||||
debug/gosym.lo debug/macho.lo debug/pe.lo encoding/ascii85.lo \
|
||||
encoding/asn1.lo encoding/base32.lo encoding/base64.lo \
|
||||
encoding/binary.lo encoding/csv.lo encoding/git85.lo \
|
||||
encoding/gob.lo encoding/hex.lo encoding/json.lo \
|
||||
encoding/pem.lo encoding/xml.lo exp/ebnf.lo exp/norm.lo \
|
||||
exp/proxy.lo exp/spdy.lo exp/ssh.lo exp/terminal.lo \
|
||||
exp/types.lo exp/utf8string.lo html/template.lo go/ast.lo \
|
||||
go/build.lo go/doc.lo go/parser.lo go/printer.lo go/scanner.lo \
|
||||
go/token.lo hash/adler32.lo hash/crc32.lo hash/crc64.lo \
|
||||
hash/fnv.lo net/http/cgi.lo net/http/fcgi.lo \
|
||||
net/http/httptest.lo net/http/httputil.lo net/http/pprof.lo \
|
||||
image/bmp.lo image/color.lo image/draw.lo image/gif.lo \
|
||||
image/jpeg.lo image/png.lo image/tiff.lo index/suffixarray.lo \
|
||||
io/ioutil.lo log/syslog.lo log/syslog/syslog_c.lo math/big.lo \
|
||||
math/cmplx.lo math/rand.lo mime/mime.lo mime/multipart.lo \
|
||||
net/dict.lo net/http.lo net/mail.lo net/rpc.lo net/smtp.lo \
|
||||
net/textproto.lo net/url.lo old/netchan.lo old/regexp.lo \
|
||||
old/template.lo $(am__DEPENDENCIES_1) os/user.lo os/signal.lo \
|
||||
path/filepath.lo regexp/syntax.lo net/rpc/jsonrpc.lo \
|
||||
runtime/debug.lo runtime/pprof.lo sync/atomic.lo \
|
||||
sync/atomic_c.lo syscall/syscall.lo syscall/errno.lo \
|
||||
syscall/wait.lo text/scanner.lo text/tabwriter.lo \
|
||||
text/template.lo text/template/parse.lo testing/testing.lo \
|
||||
testing/iotest.lo testing/quick.lo testing/script.lo \
|
||||
unicode/utf16.lo unicode/utf8.lo
|
||||
libgo_la_DEPENDENCIES = $(am__DEPENDENCIES_2) $(am__DEPENDENCIES_1) \
|
||||
$(am__DEPENDENCIES_1) $(am__DEPENDENCIES_1) \
|
||||
$(am__DEPENDENCIES_1)
|
||||
|
@ -289,20 +290,20 @@ RECURSIVE_TARGETS = all-recursive check-recursive dvi-recursive \
|
|||
DATA = $(toolexeclibgo_DATA) $(toolexeclibgoarchive_DATA) \
|
||||
$(toolexeclibgocompress_DATA) $(toolexeclibgocontainer_DATA) \
|
||||
$(toolexeclibgocrypto_DATA) $(toolexeclibgocryptoopenpgp_DATA) \
|
||||
$(toolexeclibgocryptox509_DATA) $(toolexeclibgodebug_DATA) \
|
||||
$(toolexeclibgocryptox509_DATA) $(toolexeclibgodatabase_DATA) \
|
||||
$(toolexeclibgodatabasesql_DATA) $(toolexeclibgodebug_DATA) \
|
||||
$(toolexeclibgoencoding_DATA) $(toolexeclibgoexp_DATA) \
|
||||
$(toolexeclibgoexpsql_DATA) $(toolexeclibgogo_DATA) \
|
||||
$(toolexeclibgohash_DATA) $(toolexeclibgohtml_DATA) \
|
||||
$(toolexeclibgoimage_DATA) $(toolexeclibgoindex_DATA) \
|
||||
$(toolexeclibgoio_DATA) $(toolexeclibgolog_DATA) \
|
||||
$(toolexeclibgomath_DATA) $(toolexeclibgomime_DATA) \
|
||||
$(toolexeclibgonet_DATA) $(toolexeclibgonethttp_DATA) \
|
||||
$(toolexeclibgonetrpc_DATA) $(toolexeclibgoold_DATA) \
|
||||
$(toolexeclibgoos_DATA) $(toolexeclibgopath_DATA) \
|
||||
$(toolexeclibgoregexp_DATA) $(toolexeclibgoruntime_DATA) \
|
||||
$(toolexeclibgosync_DATA) $(toolexeclibgotesting_DATA) \
|
||||
$(toolexeclibgotext_DATA) $(toolexeclibgotexttemplate_DATA) \
|
||||
$(toolexeclibgounicode_DATA)
|
||||
$(toolexeclibgogo_DATA) $(toolexeclibgohash_DATA) \
|
||||
$(toolexeclibgohtml_DATA) $(toolexeclibgoimage_DATA) \
|
||||
$(toolexeclibgoindex_DATA) $(toolexeclibgoio_DATA) \
|
||||
$(toolexeclibgolog_DATA) $(toolexeclibgomath_DATA) \
|
||||
$(toolexeclibgomime_DATA) $(toolexeclibgonet_DATA) \
|
||||
$(toolexeclibgonethttp_DATA) $(toolexeclibgonetrpc_DATA) \
|
||||
$(toolexeclibgoold_DATA) $(toolexeclibgoos_DATA) \
|
||||
$(toolexeclibgopath_DATA) $(toolexeclibgoregexp_DATA) \
|
||||
$(toolexeclibgoruntime_DATA) $(toolexeclibgosync_DATA) \
|
||||
$(toolexeclibgotesting_DATA) $(toolexeclibgotext_DATA) \
|
||||
$(toolexeclibgotexttemplate_DATA) $(toolexeclibgounicode_DATA)
|
||||
RECURSIVE_CLEAN_TARGETS = mostlyclean-recursive clean-recursive \
|
||||
distclean-recursive maintainer-clean-recursive
|
||||
AM_RECURSIVE_TARGETS = $(RECURSIVE_TARGETS:-recursive=) \
|
||||
|
@ -661,6 +662,14 @@ toolexeclibgocryptox509dir = $(toolexeclibgocryptodir)/x509
|
|||
toolexeclibgocryptox509_DATA = \
|
||||
crypto/x509/pkix.gox
|
||||
|
||||
toolexeclibgodatabasedir = $(toolexeclibgodir)/database
|
||||
toolexeclibgodatabase_DATA = \
|
||||
database/sql.gox
|
||||
|
||||
toolexeclibgodatabasesqldir = $(toolexeclibgodatabasedir)/sql
|
||||
toolexeclibgodatabasesql_DATA = \
|
||||
database/sql/driver.gox
|
||||
|
||||
toolexeclibgodebugdir = $(toolexeclibgodir)/debug
|
||||
toolexeclibgodebug_DATA = \
|
||||
debug/dwarf.gox \
|
||||
|
@ -695,14 +704,10 @@ toolexeclibgoexp_DATA = \
|
|||
exp/norm.gox \
|
||||
exp/proxy.gox \
|
||||
exp/spdy.gox \
|
||||
exp/sql.gox \
|
||||
exp/ssh.gox \
|
||||
exp/terminal.gox \
|
||||
exp/types.gox
|
||||
|
||||
toolexeclibgoexpsqldir = $(toolexeclibgoexpdir)/sql
|
||||
toolexeclibgoexpsql_DATA = \
|
||||
exp/sql/driver.gox
|
||||
exp/types.gox \
|
||||
exp/utf8string.gox
|
||||
|
||||
toolexeclibgogodir = $(toolexeclibgodir)/go
|
||||
toolexeclibgogo_DATA = \
|
||||
|
@ -1072,6 +1077,7 @@ go_net_files = \
|
|||
go/net/dnsclient_unix.go \
|
||||
go/net/dnsconfig.go \
|
||||
go/net/dnsmsg.go \
|
||||
go/net/doc.go \
|
||||
$(go_net_newpollserver_file) \
|
||||
go/net/fd.go \
|
||||
$(go_net_fd_os_file) \
|
||||
|
@ -1118,6 +1124,7 @@ go_net_files = \
|
|||
go_os_files = \
|
||||
$(go_os_dir_file) \
|
||||
go/os/dir.go \
|
||||
go/os/doc.go \
|
||||
go/os/env.go \
|
||||
go/os/error.go \
|
||||
go/os/error_posix.go \
|
||||
|
@ -1320,7 +1327,8 @@ go_crypto_ecdsa_files = \
|
|||
go/crypto/ecdsa/ecdsa.go
|
||||
|
||||
go_crypto_elliptic_files = \
|
||||
go/crypto/elliptic/elliptic.go
|
||||
go/crypto/elliptic/elliptic.go \
|
||||
go/crypto/elliptic/p224.go
|
||||
|
||||
go_crypto_hmac_files = \
|
||||
go/crypto/hmac/hmac.go
|
||||
|
@ -1430,6 +1438,14 @@ go_crypto_openpgp_s2k_files = \
|
|||
go_crypto_x509_pkix_files = \
|
||||
go/crypto/x509/pkix/pkix.go
|
||||
|
||||
go_database_sql_files = \
|
||||
go/database/sql/convert.go \
|
||||
go/database/sql/sql.go
|
||||
|
||||
go_database_sql_driver_files = \
|
||||
go/database/sql/driver/driver.go \
|
||||
go/database/sql/driver/types.go
|
||||
|
||||
go_debug_dwarf_files = \
|
||||
go/debug/dwarf/buf.go \
|
||||
go/debug/dwarf/const.go \
|
||||
|
@ -1535,10 +1551,6 @@ go_exp_spdy_files = \
|
|||
go/exp/spdy/types.go \
|
||||
go/exp/spdy/write.go
|
||||
|
||||
go_exp_sql_files = \
|
||||
go/exp/sql/convert.go \
|
||||
go/exp/sql/sql.go
|
||||
|
||||
go_exp_ssh_files = \
|
||||
go/exp/ssh/channel.go \
|
||||
go/exp/ssh/cipher.go \
|
||||
|
@ -1565,9 +1577,8 @@ go_exp_types_files = \
|
|||
go/exp/types/types.go \
|
||||
go/exp/types/universe.go
|
||||
|
||||
go_exp_sql_driver_files = \
|
||||
go/exp/sql/driver/driver.go \
|
||||
go/exp/sql/driver/types.go
|
||||
go_exp_utf8string_files = \
|
||||
go/exp/utf8string/string.go
|
||||
|
||||
go_go_ast_files = \
|
||||
go/go/ast/ast.go \
|
||||
|
@ -1854,7 +1865,6 @@ go_unicode_utf16_files = \
|
|||
go/unicode/utf16/utf16.go
|
||||
|
||||
go_unicode_utf8_files = \
|
||||
go/unicode/utf8/string.go \
|
||||
go/unicode/utf8/utf8.go
|
||||
|
||||
@LIBGO_IS_RTEMS_FALSE@syscall_syscall_file = go/syscall/syscall_unix.go
|
||||
|
@ -2025,6 +2035,8 @@ libgo_go_objs = \
|
|||
crypto/openpgp/packet.lo \
|
||||
crypto/openpgp/s2k.lo \
|
||||
crypto/x509/pkix.lo \
|
||||
database/sql.lo \
|
||||
database/sql/driver.lo \
|
||||
debug/dwarf.lo \
|
||||
debug/elf.lo \
|
||||
debug/gosym.lo \
|
||||
|
@ -2046,11 +2058,10 @@ libgo_go_objs = \
|
|||
exp/norm.lo \
|
||||
exp/proxy.lo \
|
||||
exp/spdy.lo \
|
||||
exp/sql.lo \
|
||||
exp/ssh.lo \
|
||||
exp/terminal.lo \
|
||||
exp/types.lo \
|
||||
exp/sql/driver.lo \
|
||||
exp/utf8string.lo \
|
||||
html/template.lo \
|
||||
go/ast.lo \
|
||||
go/build.lo \
|
||||
|
@ -2294,6 +2305,8 @@ TEST_PACKAGES = \
|
|||
crypto/openpgp/elgamal/check \
|
||||
crypto/openpgp/packet/check \
|
||||
crypto/openpgp/s2k/check \
|
||||
database/sql/check \
|
||||
database/sql/driver/check \
|
||||
debug/dwarf/check \
|
||||
debug/elf/check \
|
||||
debug/macho/check \
|
||||
|
@ -2315,9 +2328,9 @@ TEST_PACKAGES = \
|
|||
exp/norm/check \
|
||||
exp/proxy/check \
|
||||
exp/spdy/check \
|
||||
exp/sql/check \
|
||||
exp/ssh/check \
|
||||
exp/terminal/check \
|
||||
exp/utf8string/check \
|
||||
html/template/check \
|
||||
go/ast/check \
|
||||
$(go_build_check_omitted_since_it_calls_6g) \
|
||||
|
@ -3365,6 +3378,46 @@ uninstall-toolexeclibgocryptox509DATA:
|
|||
test -n "$$files" || exit 0; \
|
||||
echo " ( cd '$(DESTDIR)$(toolexeclibgocryptox509dir)' && rm -f" $$files ")"; \
|
||||
cd "$(DESTDIR)$(toolexeclibgocryptox509dir)" && rm -f $$files
|
||||
install-toolexeclibgodatabaseDATA: $(toolexeclibgodatabase_DATA)
|
||||
@$(NORMAL_INSTALL)
|
||||
test -z "$(toolexeclibgodatabasedir)" || $(MKDIR_P) "$(DESTDIR)$(toolexeclibgodatabasedir)"
|
||||
@list='$(toolexeclibgodatabase_DATA)'; test -n "$(toolexeclibgodatabasedir)" || list=; \
|
||||
for p in $$list; do \
|
||||
if test -f "$$p"; then d=; else d="$(srcdir)/"; fi; \
|
||||
echo "$$d$$p"; \
|
||||
done | $(am__base_list) | \
|
||||
while read files; do \
|
||||
echo " $(INSTALL_DATA) $$files '$(DESTDIR)$(toolexeclibgodatabasedir)'"; \
|
||||
$(INSTALL_DATA) $$files "$(DESTDIR)$(toolexeclibgodatabasedir)" || exit $$?; \
|
||||
done
|
||||
|
||||
uninstall-toolexeclibgodatabaseDATA:
|
||||
@$(NORMAL_UNINSTALL)
|
||||
@list='$(toolexeclibgodatabase_DATA)'; test -n "$(toolexeclibgodatabasedir)" || list=; \
|
||||
files=`for p in $$list; do echo $$p; done | sed -e 's|^.*/||'`; \
|
||||
test -n "$$files" || exit 0; \
|
||||
echo " ( cd '$(DESTDIR)$(toolexeclibgodatabasedir)' && rm -f" $$files ")"; \
|
||||
cd "$(DESTDIR)$(toolexeclibgodatabasedir)" && rm -f $$files
|
||||
install-toolexeclibgodatabasesqlDATA: $(toolexeclibgodatabasesql_DATA)
|
||||
@$(NORMAL_INSTALL)
|
||||
test -z "$(toolexeclibgodatabasesqldir)" || $(MKDIR_P) "$(DESTDIR)$(toolexeclibgodatabasesqldir)"
|
||||
@list='$(toolexeclibgodatabasesql_DATA)'; test -n "$(toolexeclibgodatabasesqldir)" || list=; \
|
||||
for p in $$list; do \
|
||||
if test -f "$$p"; then d=; else d="$(srcdir)/"; fi; \
|
||||
echo "$$d$$p"; \
|
||||
done | $(am__base_list) | \
|
||||
while read files; do \
|
||||
echo " $(INSTALL_DATA) $$files '$(DESTDIR)$(toolexeclibgodatabasesqldir)'"; \
|
||||
$(INSTALL_DATA) $$files "$(DESTDIR)$(toolexeclibgodatabasesqldir)" || exit $$?; \
|
||||
done
|
||||
|
||||
uninstall-toolexeclibgodatabasesqlDATA:
|
||||
@$(NORMAL_UNINSTALL)
|
||||
@list='$(toolexeclibgodatabasesql_DATA)'; test -n "$(toolexeclibgodatabasesqldir)" || list=; \
|
||||
files=`for p in $$list; do echo $$p; done | sed -e 's|^.*/||'`; \
|
||||
test -n "$$files" || exit 0; \
|
||||
echo " ( cd '$(DESTDIR)$(toolexeclibgodatabasesqldir)' && rm -f" $$files ")"; \
|
||||
cd "$(DESTDIR)$(toolexeclibgodatabasesqldir)" && rm -f $$files
|
||||
install-toolexeclibgodebugDATA: $(toolexeclibgodebug_DATA)
|
||||
@$(NORMAL_INSTALL)
|
||||
test -z "$(toolexeclibgodebugdir)" || $(MKDIR_P) "$(DESTDIR)$(toolexeclibgodebugdir)"
|
||||
|
@ -3425,26 +3478,6 @@ uninstall-toolexeclibgoexpDATA:
|
|||
test -n "$$files" || exit 0; \
|
||||
echo " ( cd '$(DESTDIR)$(toolexeclibgoexpdir)' && rm -f" $$files ")"; \
|
||||
cd "$(DESTDIR)$(toolexeclibgoexpdir)" && rm -f $$files
|
||||
install-toolexeclibgoexpsqlDATA: $(toolexeclibgoexpsql_DATA)
|
||||
@$(NORMAL_INSTALL)
|
||||
test -z "$(toolexeclibgoexpsqldir)" || $(MKDIR_P) "$(DESTDIR)$(toolexeclibgoexpsqldir)"
|
||||
@list='$(toolexeclibgoexpsql_DATA)'; test -n "$(toolexeclibgoexpsqldir)" || list=; \
|
||||
for p in $$list; do \
|
||||
if test -f "$$p"; then d=; else d="$(srcdir)/"; fi; \
|
||||
echo "$$d$$p"; \
|
||||
done | $(am__base_list) | \
|
||||
while read files; do \
|
||||
echo " $(INSTALL_DATA) $$files '$(DESTDIR)$(toolexeclibgoexpsqldir)'"; \
|
||||
$(INSTALL_DATA) $$files "$(DESTDIR)$(toolexeclibgoexpsqldir)" || exit $$?; \
|
||||
done
|
||||
|
||||
uninstall-toolexeclibgoexpsqlDATA:
|
||||
@$(NORMAL_UNINSTALL)
|
||||
@list='$(toolexeclibgoexpsql_DATA)'; test -n "$(toolexeclibgoexpsqldir)" || list=; \
|
||||
files=`for p in $$list; do echo $$p; done | sed -e 's|^.*/||'`; \
|
||||
test -n "$$files" || exit 0; \
|
||||
echo " ( cd '$(DESTDIR)$(toolexeclibgoexpsqldir)' && rm -f" $$files ")"; \
|
||||
cd "$(DESTDIR)$(toolexeclibgoexpsqldir)" && rm -f $$files
|
||||
install-toolexeclibgogoDATA: $(toolexeclibgogo_DATA)
|
||||
@$(NORMAL_INSTALL)
|
||||
test -z "$(toolexeclibgogodir)" || $(MKDIR_P) "$(DESTDIR)$(toolexeclibgogodir)"
|
||||
|
@ -4202,7 +4235,7 @@ all-am: Makefile $(LIBRARIES) $(LTLIBRARIES) all-multi $(DATA) \
|
|||
config.h
|
||||
installdirs: installdirs-recursive
|
||||
installdirs-am:
|
||||
for dir in "$(DESTDIR)$(toolexeclibdir)" "$(DESTDIR)$(toolexeclibdir)" "$(DESTDIR)$(toolexeclibgodir)" "$(DESTDIR)$(toolexeclibgoarchivedir)" "$(DESTDIR)$(toolexeclibgocompressdir)" "$(DESTDIR)$(toolexeclibgocontainerdir)" "$(DESTDIR)$(toolexeclibgocryptodir)" "$(DESTDIR)$(toolexeclibgocryptoopenpgpdir)" "$(DESTDIR)$(toolexeclibgocryptox509dir)" "$(DESTDIR)$(toolexeclibgodebugdir)" "$(DESTDIR)$(toolexeclibgoencodingdir)" "$(DESTDIR)$(toolexeclibgoexpdir)" "$(DESTDIR)$(toolexeclibgoexpsqldir)" "$(DESTDIR)$(toolexeclibgogodir)" "$(DESTDIR)$(toolexeclibgohashdir)" "$(DESTDIR)$(toolexeclibgohtmldir)" "$(DESTDIR)$(toolexeclibgoimagedir)" "$(DESTDIR)$(toolexeclibgoindexdir)" "$(DESTDIR)$(toolexeclibgoiodir)" "$(DESTDIR)$(toolexeclibgologdir)" "$(DESTDIR)$(toolexeclibgomathdir)" "$(DESTDIR)$(toolexeclibgomimedir)" "$(DESTDIR)$(toolexeclibgonetdir)" "$(DESTDIR)$(toolexeclibgonethttpdir)" "$(DESTDIR)$(toolexeclibgonetrpcdir)" "$(DESTDIR)$(toolexeclibgoolddir)" "$(DESTDIR)$(toolexeclibgoosdir)" "$(DESTDIR)$(toolexeclibgopathdir)" "$(DESTDIR)$(toolexeclibgoregexpdir)" "$(DESTDIR)$(toolexeclibgoruntimedir)" "$(DESTDIR)$(toolexeclibgosyncdir)" "$(DESTDIR)$(toolexeclibgotestingdir)" "$(DESTDIR)$(toolexeclibgotextdir)" "$(DESTDIR)$(toolexeclibgotexttemplatedir)" "$(DESTDIR)$(toolexeclibgounicodedir)"; do \
|
||||
for dir in "$(DESTDIR)$(toolexeclibdir)" "$(DESTDIR)$(toolexeclibdir)" "$(DESTDIR)$(toolexeclibgodir)" "$(DESTDIR)$(toolexeclibgoarchivedir)" "$(DESTDIR)$(toolexeclibgocompressdir)" "$(DESTDIR)$(toolexeclibgocontainerdir)" "$(DESTDIR)$(toolexeclibgocryptodir)" "$(DESTDIR)$(toolexeclibgocryptoopenpgpdir)" "$(DESTDIR)$(toolexeclibgocryptox509dir)" "$(DESTDIR)$(toolexeclibgodatabasedir)" "$(DESTDIR)$(toolexeclibgodatabasesqldir)" "$(DESTDIR)$(toolexeclibgodebugdir)" "$(DESTDIR)$(toolexeclibgoencodingdir)" "$(DESTDIR)$(toolexeclibgoexpdir)" "$(DESTDIR)$(toolexeclibgogodir)" "$(DESTDIR)$(toolexeclibgohashdir)" "$(DESTDIR)$(toolexeclibgohtmldir)" "$(DESTDIR)$(toolexeclibgoimagedir)" "$(DESTDIR)$(toolexeclibgoindexdir)" "$(DESTDIR)$(toolexeclibgoiodir)" "$(DESTDIR)$(toolexeclibgologdir)" "$(DESTDIR)$(toolexeclibgomathdir)" "$(DESTDIR)$(toolexeclibgomimedir)" "$(DESTDIR)$(toolexeclibgonetdir)" "$(DESTDIR)$(toolexeclibgonethttpdir)" "$(DESTDIR)$(toolexeclibgonetrpcdir)" "$(DESTDIR)$(toolexeclibgoolddir)" "$(DESTDIR)$(toolexeclibgoosdir)" "$(DESTDIR)$(toolexeclibgopathdir)" "$(DESTDIR)$(toolexeclibgoregexpdir)" "$(DESTDIR)$(toolexeclibgoruntimedir)" "$(DESTDIR)$(toolexeclibgosyncdir)" "$(DESTDIR)$(toolexeclibgotestingdir)" "$(DESTDIR)$(toolexeclibgotextdir)" "$(DESTDIR)$(toolexeclibgotexttemplatedir)" "$(DESTDIR)$(toolexeclibgounicodedir)"; do \
|
||||
test -z "$$dir" || $(MKDIR_P) "$$dir"; \
|
||||
done
|
||||
install: install-recursive
|
||||
|
@ -4270,14 +4303,16 @@ install-exec-am: install-multi install-toolexeclibLIBRARIES \
|
|||
install-toolexeclibgocryptoDATA \
|
||||
install-toolexeclibgocryptoopenpgpDATA \
|
||||
install-toolexeclibgocryptox509DATA \
|
||||
install-toolexeclibgodatabaseDATA \
|
||||
install-toolexeclibgodatabasesqlDATA \
|
||||
install-toolexeclibgodebugDATA \
|
||||
install-toolexeclibgoencodingDATA install-toolexeclibgoexpDATA \
|
||||
install-toolexeclibgoexpsqlDATA install-toolexeclibgogoDATA \
|
||||
install-toolexeclibgohashDATA install-toolexeclibgohtmlDATA \
|
||||
install-toolexeclibgoimageDATA install-toolexeclibgoindexDATA \
|
||||
install-toolexeclibgoioDATA install-toolexeclibgologDATA \
|
||||
install-toolexeclibgomathDATA install-toolexeclibgomimeDATA \
|
||||
install-toolexeclibgonetDATA install-toolexeclibgonethttpDATA \
|
||||
install-toolexeclibgogoDATA install-toolexeclibgohashDATA \
|
||||
install-toolexeclibgohtmlDATA install-toolexeclibgoimageDATA \
|
||||
install-toolexeclibgoindexDATA install-toolexeclibgoioDATA \
|
||||
install-toolexeclibgologDATA install-toolexeclibgomathDATA \
|
||||
install-toolexeclibgomimeDATA install-toolexeclibgonetDATA \
|
||||
install-toolexeclibgonethttpDATA \
|
||||
install-toolexeclibgonetrpcDATA install-toolexeclibgooldDATA \
|
||||
install-toolexeclibgoosDATA install-toolexeclibgopathDATA \
|
||||
install-toolexeclibgoregexpDATA \
|
||||
|
@ -4334,11 +4369,12 @@ uninstall-am: uninstall-toolexeclibLIBRARIES \
|
|||
uninstall-toolexeclibgocryptoDATA \
|
||||
uninstall-toolexeclibgocryptoopenpgpDATA \
|
||||
uninstall-toolexeclibgocryptox509DATA \
|
||||
uninstall-toolexeclibgodatabaseDATA \
|
||||
uninstall-toolexeclibgodatabasesqlDATA \
|
||||
uninstall-toolexeclibgodebugDATA \
|
||||
uninstall-toolexeclibgoencodingDATA \
|
||||
uninstall-toolexeclibgoexpDATA \
|
||||
uninstall-toolexeclibgoexpsqlDATA \
|
||||
uninstall-toolexeclibgogoDATA uninstall-toolexeclibgohashDATA \
|
||||
uninstall-toolexeclibgoexpDATA uninstall-toolexeclibgogoDATA \
|
||||
uninstall-toolexeclibgohashDATA \
|
||||
uninstall-toolexeclibgohtmlDATA \
|
||||
uninstall-toolexeclibgoimageDATA \
|
||||
uninstall-toolexeclibgoindexDATA uninstall-toolexeclibgoioDATA \
|
||||
|
@ -4382,14 +4418,16 @@ uninstall-am: uninstall-toolexeclibLIBRARIES \
|
|||
install-toolexeclibgocryptoDATA \
|
||||
install-toolexeclibgocryptoopenpgpDATA \
|
||||
install-toolexeclibgocryptox509DATA \
|
||||
install-toolexeclibgodatabaseDATA \
|
||||
install-toolexeclibgodatabasesqlDATA \
|
||||
install-toolexeclibgodebugDATA \
|
||||
install-toolexeclibgoencodingDATA install-toolexeclibgoexpDATA \
|
||||
install-toolexeclibgoexpsqlDATA install-toolexeclibgogoDATA \
|
||||
install-toolexeclibgohashDATA install-toolexeclibgohtmlDATA \
|
||||
install-toolexeclibgoimageDATA install-toolexeclibgoindexDATA \
|
||||
install-toolexeclibgoioDATA install-toolexeclibgologDATA \
|
||||
install-toolexeclibgomathDATA install-toolexeclibgomimeDATA \
|
||||
install-toolexeclibgonetDATA install-toolexeclibgonethttpDATA \
|
||||
install-toolexeclibgogoDATA install-toolexeclibgohashDATA \
|
||||
install-toolexeclibgohtmlDATA install-toolexeclibgoimageDATA \
|
||||
install-toolexeclibgoindexDATA install-toolexeclibgoioDATA \
|
||||
install-toolexeclibgologDATA install-toolexeclibgomathDATA \
|
||||
install-toolexeclibgomimeDATA install-toolexeclibgonetDATA \
|
||||
install-toolexeclibgonethttpDATA \
|
||||
install-toolexeclibgonetrpcDATA install-toolexeclibgooldDATA \
|
||||
install-toolexeclibgoosDATA install-toolexeclibgopathDATA \
|
||||
install-toolexeclibgoregexpDATA \
|
||||
|
@ -4410,11 +4448,12 @@ uninstall-am: uninstall-toolexeclibLIBRARIES \
|
|||
uninstall-toolexeclibgocryptoDATA \
|
||||
uninstall-toolexeclibgocryptoopenpgpDATA \
|
||||
uninstall-toolexeclibgocryptox509DATA \
|
||||
uninstall-toolexeclibgodatabaseDATA \
|
||||
uninstall-toolexeclibgodatabasesqlDATA \
|
||||
uninstall-toolexeclibgodebugDATA \
|
||||
uninstall-toolexeclibgoencodingDATA \
|
||||
uninstall-toolexeclibgoexpDATA \
|
||||
uninstall-toolexeclibgoexpsqlDATA \
|
||||
uninstall-toolexeclibgogoDATA uninstall-toolexeclibgohashDATA \
|
||||
uninstall-toolexeclibgoexpDATA uninstall-toolexeclibgogoDATA \
|
||||
uninstall-toolexeclibgohashDATA \
|
||||
uninstall-toolexeclibgohtmlDATA \
|
||||
uninstall-toolexeclibgoimageDATA \
|
||||
uninstall-toolexeclibgoindexDATA uninstall-toolexeclibgoioDATA \
|
||||
|
@ -5230,6 +5269,26 @@ crypto/x509/pkix/check: $(CHECK_DEPS)
|
|||
@$(CHECK)
|
||||
.PHONY: crypto/x509/pkix/check
|
||||
|
||||
@go_include@ database/sql.lo.dep
|
||||
database/sql.lo.dep: $(go_database_sql_files)
|
||||
$(BUILDDEPS)
|
||||
database/sql.lo: $(go_database_sql_files)
|
||||
$(BUILDPACKAGE)
|
||||
database/sql/check: $(CHECK_DEPS)
|
||||
@$(MKDIR_P) database/sql
|
||||
@$(CHECK)
|
||||
.PHONY: database/sql/check
|
||||
|
||||
@go_include@ database/sql/driver.lo.dep
|
||||
database/sql/driver.lo.dep: $(go_database_sql_driver_files)
|
||||
$(BUILDDEPS)
|
||||
database/sql/driver.lo: $(go_database_sql_driver_files)
|
||||
$(BUILDPACKAGE)
|
||||
database/sql/driver/check: $(CHECK_DEPS)
|
||||
@$(MKDIR_P) database/sql/driver
|
||||
@$(CHECK)
|
||||
.PHONY: database/sql/driver/check
|
||||
|
||||
@go_include@ debug/dwarf.lo.dep
|
||||
debug/dwarf.lo.dep: $(go_debug_dwarf_files)
|
||||
$(BUILDDEPS)
|
||||
|
@ -5440,16 +5499,6 @@ exp/spdy/check: $(CHECK_DEPS)
|
|||
@$(CHECK)
|
||||
.PHONY: exp/spdy/check
|
||||
|
||||
@go_include@ exp/sql.lo.dep
|
||||
exp/sql.lo.dep: $(go_exp_sql_files)
|
||||
$(BUILDDEPS)
|
||||
exp/sql.lo: $(go_exp_sql_files)
|
||||
$(BUILDPACKAGE)
|
||||
exp/sql/check: $(CHECK_DEPS)
|
||||
@$(MKDIR_P) exp/sql
|
||||
@$(CHECK)
|
||||
.PHONY: exp/sql/check
|
||||
|
||||
@go_include@ exp/ssh.lo.dep
|
||||
exp/ssh.lo.dep: $(go_exp_ssh_files)
|
||||
$(BUILDDEPS)
|
||||
|
@ -5480,6 +5529,16 @@ exp/types/check: $(CHECK_DEPS)
|
|||
@$(CHECK)
|
||||
.PHONY: exp/types/check
|
||||
|
||||
@go_include@ exp/utf8string.lo.dep
|
||||
exp/utf8string.lo.dep: $(go_exp_utf8string_files)
|
||||
$(BUILDDEPS)
|
||||
exp/utf8string.lo: $(go_exp_utf8string_files)
|
||||
$(BUILDPACKAGE)
|
||||
exp/utf8string/check: $(CHECK_DEPS)
|
||||
@$(MKDIR_P) exp/utf8string
|
||||
@$(CHECK)
|
||||
.PHONY: exp/utf8string/check
|
||||
|
||||
@go_include@ exp/inotify.lo.dep
|
||||
exp/inotify.lo.dep: $(go_exp_inotify_files)
|
||||
$(BUILDDEPS)
|
||||
|
@ -5490,16 +5549,6 @@ exp/inotify/check: $(CHECK_DEPS)
|
|||
@$(CHECK)
|
||||
.PHONY: exp/inotify/check
|
||||
|
||||
@go_include@ exp/sql/driver.lo.dep
|
||||
exp/sql/driver.lo.dep: $(go_exp_sql_driver_files)
|
||||
$(BUILDDEPS)
|
||||
exp/sql/driver.lo: $(go_exp_sql_driver_files)
|
||||
$(BUILDPACKAGE)
|
||||
exp/sql/driver/check: $(CHECK_DEPS)
|
||||
@$(MKDIR_P) exp/sql/driver
|
||||
@$(CHECK)
|
||||
.PHONY: exp/sql/driver/check
|
||||
|
||||
@go_include@ html/template.lo.dep
|
||||
html/template.lo.dep: $(go_html_template_files)
|
||||
$(BUILDDEPS)
|
||||
|
@ -6249,6 +6298,12 @@ crypto/openpgp/s2k.gox: crypto/openpgp/s2k.lo
|
|||
crypto/x509/pkix.gox: crypto/x509/pkix.lo
|
||||
$(BUILDGOX)
|
||||
|
||||
database/sql.gox: database/sql.lo
|
||||
$(BUILDGOX)
|
||||
|
||||
database/sql/driver.gox: database/sql/driver.lo
|
||||
$(BUILDGOX)
|
||||
|
||||
debug/dwarf.gox: debug/dwarf.lo
|
||||
$(BUILDGOX)
|
||||
debug/elf.gox: debug/elf.lo
|
||||
|
@ -6295,16 +6350,13 @@ exp/proxy.gox: exp/proxy.lo
|
|||
$(BUILDGOX)
|
||||
exp/spdy.gox: exp/spdy.lo
|
||||
$(BUILDGOX)
|
||||
exp/sql.gox: exp/sql.lo
|
||||
$(BUILDGOX)
|
||||
exp/ssh.gox: exp/ssh.lo
|
||||
$(BUILDGOX)
|
||||
exp/terminal.gox: exp/terminal.lo
|
||||
$(BUILDGOX)
|
||||
exp/types.gox: exp/types.lo
|
||||
$(BUILDGOX)
|
||||
|
||||
exp/sql/driver.gox: exp/sql/driver.lo
|
||||
exp/utf8string.gox: exp/utf8string.lo
|
||||
$(BUILDGOX)
|
||||
|
||||
html/template.gox: html/template.lo
|
||||
|
|
|
@ -1,3 +1,7 @@
|
|||
// Copyright 2011 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
package bytes_test
|
||||
|
||||
import (
|
||||
|
|
|
@ -3,7 +3,13 @@
|
|||
// license that can be found in the LICENSE file.
|
||||
|
||||
// Package heap provides heap operations for any type that implements
|
||||
// heap.Interface.
|
||||
// heap.Interface. A heap is a tree with the property that each node is the
|
||||
// highest-valued node in its subtree.
|
||||
//
|
||||
// A heap is a common way to impement a priority queue. To build a priority
|
||||
// queue, implement the Heap interface with the (negative) priority as the
|
||||
// ordering for the Less method, so Push adds items while Pop removes the
|
||||
// highest-priority item from the queue.
|
||||
//
|
||||
package heap
|
||||
|
||||
|
|
|
@ -20,7 +20,7 @@ import (
|
|||
|
||||
// PublicKey represents an ECDSA public key.
|
||||
type PublicKey struct {
|
||||
*elliptic.Curve
|
||||
elliptic.Curve
|
||||
X, Y *big.Int
|
||||
}
|
||||
|
||||
|
@ -34,22 +34,23 @@ var one = new(big.Int).SetInt64(1)
|
|||
|
||||
// randFieldElement returns a random element of the field underlying the given
|
||||
// curve using the procedure given in [NSA] A.2.1.
|
||||
func randFieldElement(c *elliptic.Curve, rand io.Reader) (k *big.Int, err error) {
|
||||
b := make([]byte, c.BitSize/8+8)
|
||||
func randFieldElement(c elliptic.Curve, rand io.Reader) (k *big.Int, err error) {
|
||||
params := c.Params()
|
||||
b := make([]byte, params.BitSize/8+8)
|
||||
_, err = io.ReadFull(rand, b)
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
|
||||
k = new(big.Int).SetBytes(b)
|
||||
n := new(big.Int).Sub(c.N, one)
|
||||
n := new(big.Int).Sub(params.N, one)
|
||||
k.Mod(k, n)
|
||||
k.Add(k, one)
|
||||
return
|
||||
}
|
||||
|
||||
// GenerateKey generates a public&private key pair.
|
||||
func GenerateKey(c *elliptic.Curve, rand io.Reader) (priv *PrivateKey, err error) {
|
||||
func GenerateKey(c elliptic.Curve, rand io.Reader) (priv *PrivateKey, err error) {
|
||||
k, err := randFieldElement(c, rand)
|
||||
if err != nil {
|
||||
return
|
||||
|
@ -66,8 +67,8 @@ func GenerateKey(c *elliptic.Curve, rand io.Reader) (priv *PrivateKey, err error
|
|||
// about how this is done. [NSA] suggests that this is done in the obvious
|
||||
// manner, but [SECG] truncates the hash to the bit-length of the curve order
|
||||
// first. We follow [SECG] because that's what OpenSSL does.
|
||||
func hashToInt(hash []byte, c *elliptic.Curve) *big.Int {
|
||||
orderBits := c.N.BitLen()
|
||||
func hashToInt(hash []byte, c elliptic.Curve) *big.Int {
|
||||
orderBits := c.Params().N.BitLen()
|
||||
orderBytes := (orderBits + 7) / 8
|
||||
if len(hash) > orderBytes {
|
||||
hash = hash[:orderBytes]
|
||||
|
@ -88,6 +89,7 @@ func hashToInt(hash []byte, c *elliptic.Curve) *big.Int {
|
|||
func Sign(rand io.Reader, priv *PrivateKey, hash []byte) (r, s *big.Int, err error) {
|
||||
// See [NSA] 3.4.1
|
||||
c := priv.PublicKey.Curve
|
||||
N := c.Params().N
|
||||
|
||||
var k, kInv *big.Int
|
||||
for {
|
||||
|
@ -98,9 +100,9 @@ func Sign(rand io.Reader, priv *PrivateKey, hash []byte) (r, s *big.Int, err err
|
|||
return
|
||||
}
|
||||
|
||||
kInv = new(big.Int).ModInverse(k, c.N)
|
||||
kInv = new(big.Int).ModInverse(k, N)
|
||||
r, _ = priv.Curve.ScalarBaseMult(k.Bytes())
|
||||
r.Mod(r, priv.Curve.N)
|
||||
r.Mod(r, N)
|
||||
if r.Sign() != 0 {
|
||||
break
|
||||
}
|
||||
|
@ -110,7 +112,7 @@ func Sign(rand io.Reader, priv *PrivateKey, hash []byte) (r, s *big.Int, err err
|
|||
s = new(big.Int).Mul(priv.D, r)
|
||||
s.Add(s, e)
|
||||
s.Mul(s, kInv)
|
||||
s.Mod(s, priv.PublicKey.Curve.N)
|
||||
s.Mod(s, N)
|
||||
if s.Sign() != 0 {
|
||||
break
|
||||
}
|
||||
|
@ -124,15 +126,16 @@ func Sign(rand io.Reader, priv *PrivateKey, hash []byte) (r, s *big.Int, err err
|
|||
func Verify(pub *PublicKey, hash []byte, r, s *big.Int) bool {
|
||||
// See [NSA] 3.4.2
|
||||
c := pub.Curve
|
||||
N := c.Params().N
|
||||
|
||||
if r.Sign() == 0 || s.Sign() == 0 {
|
||||
return false
|
||||
}
|
||||
if r.Cmp(c.N) >= 0 || s.Cmp(c.N) >= 0 {
|
||||
if r.Cmp(N) >= 0 || s.Cmp(N) >= 0 {
|
||||
return false
|
||||
}
|
||||
e := hashToInt(hash, c)
|
||||
w := new(big.Int).ModInverse(s, c.N)
|
||||
w := new(big.Int).ModInverse(s, N)
|
||||
|
||||
u1 := e.Mul(e, w)
|
||||
u2 := w.Mul(r, w)
|
||||
|
@ -143,6 +146,6 @@ func Verify(pub *PublicKey, hash []byte, r, s *big.Int) bool {
|
|||
return false
|
||||
}
|
||||
x, _ := c.Add(x1, y1, x2, y2)
|
||||
x.Mod(x, c.N)
|
||||
x.Mod(x, N)
|
||||
return x.Cmp(r) == 0
|
||||
}
|
||||
|
|
|
@ -13,7 +13,7 @@ import (
|
|||
"testing"
|
||||
)
|
||||
|
||||
func testKeyGeneration(t *testing.T, c *elliptic.Curve, tag string) {
|
||||
func testKeyGeneration(t *testing.T, c elliptic.Curve, tag string) {
|
||||
priv, err := GenerateKey(c, rand.Reader)
|
||||
if err != nil {
|
||||
t.Errorf("%s: error: %s", tag, err)
|
||||
|
@ -34,7 +34,7 @@ func TestKeyGeneration(t *testing.T) {
|
|||
testKeyGeneration(t, elliptic.P521(), "p521")
|
||||
}
|
||||
|
||||
func testSignAndVerify(t *testing.T, c *elliptic.Curve, tag string) {
|
||||
func testSignAndVerify(t *testing.T, c elliptic.Curve, tag string) {
|
||||
priv, _ := GenerateKey(c, rand.Reader)
|
||||
|
||||
hashed := []byte("testing")
|
||||
|
|
|
@ -21,7 +21,25 @@ import (
|
|||
|
||||
// A Curve represents a short-form Weierstrass curve with a=-3.
|
||||
// See http://www.hyperelliptic.org/EFD/g1p/auto-shortw.html
|
||||
type Curve struct {
|
||||
type Curve interface {
|
||||
// Params returns the parameters for the curve.
|
||||
Params() *CurveParams
|
||||
// IsOnCurve returns true if the given (x,y) lies on the curve.
|
||||
IsOnCurve(x, y *big.Int) bool
|
||||
// Add returns the sum of (x1,y1) and (x2,y2)
|
||||
Add(x1, y1, x2, y2 *big.Int) (x, y *big.Int)
|
||||
// Double returns 2*(x,y)
|
||||
Double(x1, y1 *big.Int) (x, y *big.Int)
|
||||
// ScalarMult returns k*(Bx,By) where k is a number in big-endian form.
|
||||
ScalarMult(x1, y1 *big.Int, scalar []byte) (x, y *big.Int)
|
||||
// ScalarBaseMult returns k*G, where G is the base point of the group and k
|
||||
// is an integer in big-endian form.
|
||||
ScalarBaseMult(scalar []byte) (x, y *big.Int)
|
||||
}
|
||||
|
||||
// CurveParams contains the parameters of an elliptic curve and also provides
|
||||
// a generic, non-constant time implementation of Curve.
|
||||
type CurveParams struct {
|
||||
P *big.Int // the order of the underlying field
|
||||
N *big.Int // the order of the base point
|
||||
B *big.Int // the constant of the curve equation
|
||||
|
@ -29,8 +47,11 @@ type Curve struct {
|
|||
BitSize int // the size of the underlying field
|
||||
}
|
||||
|
||||
// IsOnCurve returns true if the given (x,y) lies on the curve.
|
||||
func (curve *Curve) IsOnCurve(x, y *big.Int) bool {
|
||||
func (curve *CurveParams) Params() *CurveParams {
|
||||
return curve
|
||||
}
|
||||
|
||||
func (curve *CurveParams) IsOnCurve(x, y *big.Int) bool {
|
||||
// y² = x³ - 3x + b
|
||||
y2 := new(big.Int).Mul(y, y)
|
||||
y2.Mod(y2, curve.P)
|
||||
|
@ -50,7 +71,7 @@ func (curve *Curve) IsOnCurve(x, y *big.Int) bool {
|
|||
|
||||
// affineFromJacobian reverses the Jacobian transform. See the comment at the
|
||||
// top of the file.
|
||||
func (curve *Curve) affineFromJacobian(x, y, z *big.Int) (xOut, yOut *big.Int) {
|
||||
func (curve *CurveParams) affineFromJacobian(x, y, z *big.Int) (xOut, yOut *big.Int) {
|
||||
zinv := new(big.Int).ModInverse(z, curve.P)
|
||||
zinvsq := new(big.Int).Mul(zinv, zinv)
|
||||
|
||||
|
@ -62,15 +83,14 @@ func (curve *Curve) affineFromJacobian(x, y, z *big.Int) (xOut, yOut *big.Int) {
|
|||
return
|
||||
}
|
||||
|
||||
// Add returns the sum of (x1,y1) and (x2,y2)
|
||||
func (curve *Curve) Add(x1, y1, x2, y2 *big.Int) (*big.Int, *big.Int) {
|
||||
func (curve *CurveParams) Add(x1, y1, x2, y2 *big.Int) (*big.Int, *big.Int) {
|
||||
z := new(big.Int).SetInt64(1)
|
||||
return curve.affineFromJacobian(curve.addJacobian(x1, y1, z, x2, y2, z))
|
||||
}
|
||||
|
||||
// addJacobian takes two points in Jacobian coordinates, (x1, y1, z1) and
|
||||
// (x2, y2, z2) and returns their sum, also in Jacobian form.
|
||||
func (curve *Curve) addJacobian(x1, y1, z1, x2, y2, z2 *big.Int) (*big.Int, *big.Int, *big.Int) {
|
||||
func (curve *CurveParams) addJacobian(x1, y1, z1, x2, y2, z2 *big.Int) (*big.Int, *big.Int, *big.Int) {
|
||||
// See http://hyperelliptic.org/EFD/g1p/auto-shortw-jacobian-3.html#addition-add-2007-bl
|
||||
z1z1 := new(big.Int).Mul(z1, z1)
|
||||
z1z1.Mod(z1z1, curve.P)
|
||||
|
@ -133,15 +153,14 @@ func (curve *Curve) addJacobian(x1, y1, z1, x2, y2, z2 *big.Int) (*big.Int, *big
|
|||
return x3, y3, z3
|
||||
}
|
||||
|
||||
// Double returns 2*(x,y)
|
||||
func (curve *Curve) Double(x1, y1 *big.Int) (*big.Int, *big.Int) {
|
||||
func (curve *CurveParams) Double(x1, y1 *big.Int) (*big.Int, *big.Int) {
|
||||
z1 := new(big.Int).SetInt64(1)
|
||||
return curve.affineFromJacobian(curve.doubleJacobian(x1, y1, z1))
|
||||
}
|
||||
|
||||
// doubleJacobian takes a point in Jacobian coordinates, (x, y, z), and
|
||||
// returns its double, also in Jacobian form.
|
||||
func (curve *Curve) doubleJacobian(x, y, z *big.Int) (*big.Int, *big.Int, *big.Int) {
|
||||
func (curve *CurveParams) doubleJacobian(x, y, z *big.Int) (*big.Int, *big.Int, *big.Int) {
|
||||
// See http://hyperelliptic.org/EFD/g1p/auto-shortw-jacobian-3.html#doubling-dbl-2001-b
|
||||
delta := new(big.Int).Mul(z, z)
|
||||
delta.Mod(delta, curve.P)
|
||||
|
@ -199,8 +218,7 @@ func (curve *Curve) doubleJacobian(x, y, z *big.Int) (*big.Int, *big.Int, *big.I
|
|||
return x3, y3, z3
|
||||
}
|
||||
|
||||
// ScalarMult returns k*(Bx,By) where k is a number in big-endian form.
|
||||
func (curve *Curve) ScalarMult(Bx, By *big.Int, k []byte) (*big.Int, *big.Int) {
|
||||
func (curve *CurveParams) ScalarMult(Bx, By *big.Int, k []byte) (*big.Int, *big.Int) {
|
||||
// We have a slight problem in that the identity of the group (the
|
||||
// point at infinity) cannot be represented in (x, y) form on a finite
|
||||
// machine. Thus the standard add/double algorithm has to be tweaked
|
||||
|
@ -238,18 +256,17 @@ func (curve *Curve) ScalarMult(Bx, By *big.Int, k []byte) (*big.Int, *big.Int) {
|
|||
return curve.affineFromJacobian(x, y, z)
|
||||
}
|
||||
|
||||
// ScalarBaseMult returns k*G, where G is the base point of the group and k is
|
||||
// an integer in big-endian form.
|
||||
func (curve *Curve) ScalarBaseMult(k []byte) (*big.Int, *big.Int) {
|
||||
func (curve *CurveParams) ScalarBaseMult(k []byte) (*big.Int, *big.Int) {
|
||||
return curve.ScalarMult(curve.Gx, curve.Gy, k)
|
||||
}
|
||||
|
||||
var mask = []byte{0xff, 0x1, 0x3, 0x7, 0xf, 0x1f, 0x3f, 0x7f}
|
||||
|
||||
// GenerateKey returns a public/private key pair. The private key is generated
|
||||
// using the given reader, which must return random data.
|
||||
func (curve *Curve) GenerateKey(rand io.Reader) (priv []byte, x, y *big.Int, err error) {
|
||||
byteLen := (curve.BitSize + 7) >> 3
|
||||
// GenerateKey returns a public/private key pair. The private key is
|
||||
// generated using the given reader, which must return random data.
|
||||
func GenerateKey(curve Curve, rand io.Reader) (priv []byte, x, y *big.Int, err error) {
|
||||
bitSize := curve.Params().BitSize
|
||||
byteLen := (bitSize + 7) >> 3
|
||||
priv = make([]byte, byteLen)
|
||||
|
||||
for x == nil {
|
||||
|
@ -259,7 +276,7 @@ func (curve *Curve) GenerateKey(rand io.Reader) (priv []byte, x, y *big.Int, err
|
|||
}
|
||||
// We have to mask off any excess bits in the case that the size of the
|
||||
// underlying field is not a whole number of bytes.
|
||||
priv[0] &= mask[curve.BitSize%8]
|
||||
priv[0] &= mask[bitSize%8]
|
||||
// This is because, in tests, rand will return all zeros and we don't
|
||||
// want to get the point at infinity and loop forever.
|
||||
priv[1] ^= 0x42
|
||||
|
@ -268,10 +285,9 @@ func (curve *Curve) GenerateKey(rand io.Reader) (priv []byte, x, y *big.Int, err
|
|||
return
|
||||
}
|
||||
|
||||
// Marshal converts a point into the form specified in section 4.3.6 of ANSI
|
||||
// X9.62.
|
||||
func (curve *Curve) Marshal(x, y *big.Int) []byte {
|
||||
byteLen := (curve.BitSize + 7) >> 3
|
||||
// Marshal converts a point into the form specified in section 4.3.6 of ANSI X9.62.
|
||||
func Marshal(curve Curve, x, y *big.Int) []byte {
|
||||
byteLen := (curve.Params().BitSize + 7) >> 3
|
||||
|
||||
ret := make([]byte, 1+2*byteLen)
|
||||
ret[0] = 4 // uncompressed point
|
||||
|
@ -283,10 +299,9 @@ func (curve *Curve) Marshal(x, y *big.Int) []byte {
|
|||
return ret
|
||||
}
|
||||
|
||||
// Unmarshal converts a point, serialized by Marshal, into an x, y pair. On
|
||||
// error, x = nil.
|
||||
func (curve *Curve) Unmarshal(data []byte) (x, y *big.Int) {
|
||||
byteLen := (curve.BitSize + 7) >> 3
|
||||
// Unmarshal converts a point, serialized by Marshal, into an x, y pair. On error, x = nil.
|
||||
func Unmarshal(curve Curve, data []byte) (x, y *big.Int) {
|
||||
byteLen := (curve.Params().BitSize + 7) >> 3
|
||||
if len(data) != 1+2*byteLen {
|
||||
return
|
||||
}
|
||||
|
@ -299,10 +314,9 @@ func (curve *Curve) Unmarshal(data []byte) (x, y *big.Int) {
|
|||
}
|
||||
|
||||
var initonce sync.Once
|
||||
var p224 *Curve
|
||||
var p256 *Curve
|
||||
var p384 *Curve
|
||||
var p521 *Curve
|
||||
var p256 *CurveParams
|
||||
var p384 *CurveParams
|
||||
var p521 *CurveParams
|
||||
|
||||
func initAll() {
|
||||
initP224()
|
||||
|
@ -311,20 +325,9 @@ func initAll() {
|
|||
initP521()
|
||||
}
|
||||
|
||||
func initP224() {
|
||||
// See FIPS 186-3, section D.2.2
|
||||
p224 = new(Curve)
|
||||
p224.P, _ = new(big.Int).SetString("26959946667150639794667015087019630673557916260026308143510066298881", 10)
|
||||
p224.N, _ = new(big.Int).SetString("26959946667150639794667015087019625940457807714424391721682722368061", 10)
|
||||
p224.B, _ = new(big.Int).SetString("b4050a850c04b3abf54132565044b0b7d7bfd8ba270b39432355ffb4", 16)
|
||||
p224.Gx, _ = new(big.Int).SetString("b70e0cbd6bb4bf7f321390b94a03c1d356c21122343280d6115c1d21", 16)
|
||||
p224.Gy, _ = new(big.Int).SetString("bd376388b5f723fb4c22dfe6cd4375a05a07476444d5819985007e34", 16)
|
||||
p224.BitSize = 224
|
||||
}
|
||||
|
||||
func initP256() {
|
||||
// See FIPS 186-3, section D.2.3
|
||||
p256 = new(Curve)
|
||||
p256 = new(CurveParams)
|
||||
p256.P, _ = new(big.Int).SetString("115792089210356248762697446949407573530086143415290314195533631308867097853951", 10)
|
||||
p256.N, _ = new(big.Int).SetString("115792089210356248762697446949407573529996955224135760342422259061068512044369", 10)
|
||||
p256.B, _ = new(big.Int).SetString("5ac635d8aa3a93e7b3ebbd55769886bc651d06b0cc53b0f63bce3c3e27d2604b", 16)
|
||||
|
@ -335,7 +338,7 @@ func initP256() {
|
|||
|
||||
func initP384() {
|
||||
// See FIPS 186-3, section D.2.4
|
||||
p384 = new(Curve)
|
||||
p384 = new(CurveParams)
|
||||
p384.P, _ = new(big.Int).SetString("39402006196394479212279040100143613805079739270465446667948293404245721771496870329047266088258938001861606973112319", 10)
|
||||
p384.N, _ = new(big.Int).SetString("39402006196394479212279040100143613805079739270465446667946905279627659399113263569398956308152294913554433653942643", 10)
|
||||
p384.B, _ = new(big.Int).SetString("b3312fa7e23ee7e4988e056be3f82d19181d9c6efe8141120314088f5013875ac656398d8a2ed19d2a85c8edd3ec2aef", 16)
|
||||
|
@ -346,7 +349,7 @@ func initP384() {
|
|||
|
||||
func initP521() {
|
||||
// See FIPS 186-3, section D.2.5
|
||||
p521 = new(Curve)
|
||||
p521 = new(CurveParams)
|
||||
p521.P, _ = new(big.Int).SetString("6864797660130609714981900799081393217269435300143305409394463459185543183397656052122559640661454554977296311391480858037121987999716643812574028291115057151", 10)
|
||||
p521.N, _ = new(big.Int).SetString("6864797660130609714981900799081393217269435300143305409394463459185543183397655394245057746333217197532963996371363321113864768612440380340372808892707005449", 10)
|
||||
p521.B, _ = new(big.Int).SetString("051953eb9618e1c9a1f929a21a0b68540eea2da725b99b315f3b8b489918ef109e156193951ec7e937b1652c0bd3bb1bf073573df883d2c34f1ef451fd46b503f00", 16)
|
||||
|
@ -355,26 +358,20 @@ func initP521() {
|
|||
p521.BitSize = 521
|
||||
}
|
||||
|
||||
// P224 returns a Curve which implements P-224 (see FIPS 186-3, section D.2.2)
|
||||
func P224() *Curve {
|
||||
initonce.Do(initAll)
|
||||
return p224
|
||||
}
|
||||
|
||||
// P256 returns a Curve which implements P-256 (see FIPS 186-3, section D.2.3)
|
||||
func P256() *Curve {
|
||||
func P256() Curve {
|
||||
initonce.Do(initAll)
|
||||
return p256
|
||||
}
|
||||
|
||||
// P384 returns a Curve which implements P-384 (see FIPS 186-3, section D.2.4)
|
||||
func P384() *Curve {
|
||||
func P384() Curve {
|
||||
initonce.Do(initAll)
|
||||
return p384
|
||||
}
|
||||
|
||||
// P256 returns a Curve which implements P-521 (see FIPS 186-3, section D.2.5)
|
||||
func P521() *Curve {
|
||||
func P521() Curve {
|
||||
initonce.Do(initAll)
|
||||
return p521
|
||||
}
|
||||
|
|
|
@ -13,7 +13,7 @@ import (
|
|||
|
||||
func TestOnCurve(t *testing.T) {
|
||||
p224 := P224()
|
||||
if !p224.IsOnCurve(p224.Gx, p224.Gy) {
|
||||
if !p224.IsOnCurve(p224.Params().Gx, p224.Params().Gy) {
|
||||
t.Errorf("FAIL")
|
||||
}
|
||||
}
|
||||
|
@ -295,7 +295,25 @@ func TestBaseMult(t *testing.T) {
|
|||
}
|
||||
x, y := p224.ScalarBaseMult(k.Bytes())
|
||||
if fmt.Sprintf("%x", x) != e.x || fmt.Sprintf("%x", y) != e.y {
|
||||
t.Errorf("%d: bad output for k=%s: got (%x, %s), want (%x, %s)", i, e.k, x, y, e.x, e.y)
|
||||
t.Errorf("%d: bad output for k=%s: got (%x, %x), want (%s, %s)", i, e.k, x, y, e.x, e.y)
|
||||
}
|
||||
if testing.Short() && i > 5 {
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func TestGenericBaseMult(t *testing.T) {
|
||||
// We use the P224 CurveParams directly in order to test the generic implementation.
|
||||
p224 := P224().Params()
|
||||
for i, e := range p224BaseMultTests {
|
||||
k, ok := new(big.Int).SetString(e.k, 10)
|
||||
if !ok {
|
||||
t.Errorf("%d: bad value for k: %s", i, e.k)
|
||||
}
|
||||
x, y := p224.ScalarBaseMult(k.Bytes())
|
||||
if fmt.Sprintf("%x", x) != e.x || fmt.Sprintf("%x", y) != e.y {
|
||||
t.Errorf("%d: bad output for k=%s: got (%x, %x), want (%s, %s)", i, e.k, x, y, e.x, e.y)
|
||||
}
|
||||
if testing.Short() && i > 5 {
|
||||
break
|
||||
|
@ -316,13 +334,13 @@ func BenchmarkBaseMult(b *testing.B) {
|
|||
|
||||
func TestMarshal(t *testing.T) {
|
||||
p224 := P224()
|
||||
_, x, y, err := p224.GenerateKey(rand.Reader)
|
||||
_, x, y, err := GenerateKey(p224, rand.Reader)
|
||||
if err != nil {
|
||||
t.Error(err)
|
||||
return
|
||||
}
|
||||
serialized := p224.Marshal(x, y)
|
||||
xx, yy := p224.Unmarshal(serialized)
|
||||
serialized := Marshal(p224, x, y)
|
||||
xx, yy := Unmarshal(p224, serialized)
|
||||
if xx == nil {
|
||||
t.Error("failed to unmarshal")
|
||||
return
|
||||
|
|
685
libgo/go/crypto/elliptic/p224.go
Normal file
685
libgo/go/crypto/elliptic/p224.go
Normal file
|
@ -0,0 +1,685 @@
|
|||
// Copyright 2012 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
package elliptic
|
||||
|
||||
// This is a constant-time, 32-bit implementation of P224. See FIPS 186-3,
|
||||
// section D.2.2.
|
||||
//
|
||||
// See http://www.imperialviolet.org/2010/12/04/ecc.html ([1]) for background.
|
||||
|
||||
import (
|
||||
"math/big"
|
||||
)
|
||||
|
||||
var p224 p224Curve
|
||||
|
||||
type p224Curve struct {
|
||||
*CurveParams
|
||||
gx, gy, b p224FieldElement
|
||||
}
|
||||
|
||||
func initP224() {
|
||||
// See FIPS 186-3, section D.2.2
|
||||
p224.CurveParams = new(CurveParams)
|
||||
p224.P, _ = new(big.Int).SetString("26959946667150639794667015087019630673557916260026308143510066298881", 10)
|
||||
p224.N, _ = new(big.Int).SetString("26959946667150639794667015087019625940457807714424391721682722368061", 10)
|
||||
p224.B, _ = new(big.Int).SetString("b4050a850c04b3abf54132565044b0b7d7bfd8ba270b39432355ffb4", 16)
|
||||
p224.Gx, _ = new(big.Int).SetString("b70e0cbd6bb4bf7f321390b94a03c1d356c21122343280d6115c1d21", 16)
|
||||
p224.Gy, _ = new(big.Int).SetString("bd376388b5f723fb4c22dfe6cd4375a05a07476444d5819985007e34", 16)
|
||||
p224.BitSize = 224
|
||||
|
||||
p224FromBig(&p224.gx, p224.Gx)
|
||||
p224FromBig(&p224.gy, p224.Gy)
|
||||
p224FromBig(&p224.b, p224.B)
|
||||
}
|
||||
|
||||
// P224 returns a Curve which implements P-224 (see FIPS 186-3, section D.2.2)
|
||||
func P224() Curve {
|
||||
initonce.Do(initAll)
|
||||
return p224
|
||||
}
|
||||
|
||||
func (curve p224Curve) Params() *CurveParams {
|
||||
return curve.CurveParams
|
||||
}
|
||||
|
||||
func (curve p224Curve) IsOnCurve(bigX, bigY *big.Int) bool {
|
||||
var x, y p224FieldElement
|
||||
p224FromBig(&x, bigX)
|
||||
p224FromBig(&y, bigY)
|
||||
|
||||
// y² = x³ - 3x + b
|
||||
var tmp p224LargeFieldElement
|
||||
var x3 p224FieldElement
|
||||
p224Square(&x3, &x, &tmp)
|
||||
p224Mul(&x3, &x3, &x, &tmp)
|
||||
|
||||
for i := 0; i < 8; i++ {
|
||||
x[i] *= 3
|
||||
}
|
||||
p224Sub(&x3, &x3, &x)
|
||||
p224Reduce(&x3)
|
||||
p224Add(&x3, &x3, &curve.b)
|
||||
p224Contract(&x3, &x3)
|
||||
|
||||
p224Square(&y, &y, &tmp)
|
||||
p224Contract(&y, &y)
|
||||
|
||||
for i := 0; i < 8; i++ {
|
||||
if y[i] != x3[i] {
|
||||
return false
|
||||
}
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
func (p224Curve) Add(bigX1, bigY1, bigX2, bigY2 *big.Int) (x, y *big.Int) {
|
||||
var x1, y1, z1, x2, y2, z2, x3, y3, z3 p224FieldElement
|
||||
|
||||
p224FromBig(&x1, bigX1)
|
||||
p224FromBig(&y1, bigY1)
|
||||
z1[0] = 1
|
||||
p224FromBig(&x2, bigX2)
|
||||
p224FromBig(&y2, bigY2)
|
||||
z2[0] = 1
|
||||
|
||||
p224AddJacobian(&x3, &y3, &z3, &x1, &y1, &z1, &x2, &y2, &z2)
|
||||
return p224ToAffine(&x3, &y3, &z3)
|
||||
}
|
||||
|
||||
func (p224Curve) Double(bigX1, bigY1 *big.Int) (x, y *big.Int) {
|
||||
var x1, y1, z1, x2, y2, z2 p224FieldElement
|
||||
|
||||
p224FromBig(&x1, bigX1)
|
||||
p224FromBig(&y1, bigY1)
|
||||
z1[0] = 1
|
||||
|
||||
p224DoubleJacobian(&x2, &y2, &z2, &x1, &y1, &z1)
|
||||
return p224ToAffine(&x2, &y2, &z2)
|
||||
}
|
||||
|
||||
func (p224Curve) ScalarMult(bigX1, bigY1 *big.Int, scalar []byte) (x, y *big.Int) {
|
||||
var x1, y1, z1, x2, y2, z2 p224FieldElement
|
||||
|
||||
p224FromBig(&x1, bigX1)
|
||||
p224FromBig(&y1, bigY1)
|
||||
z1[0] = 1
|
||||
|
||||
p224ScalarMult(&x2, &y2, &z2, &x1, &y1, &z1, scalar)
|
||||
return p224ToAffine(&x2, &y2, &z2)
|
||||
}
|
||||
|
||||
func (curve p224Curve) ScalarBaseMult(scalar []byte) (x, y *big.Int) {
|
||||
var z1, x2, y2, z2 p224FieldElement
|
||||
|
||||
z1[0] = 1
|
||||
p224ScalarMult(&x2, &y2, &z2, &curve.gx, &curve.gy, &z1, scalar)
|
||||
return p224ToAffine(&x2, &y2, &z2)
|
||||
}
|
||||
|
||||
// Field element functions.
|
||||
//
|
||||
// The field that we're dealing with is ℤ/pℤ where p = 2**224 - 2**96 + 1.
|
||||
//
|
||||
// Field elements are represented by a FieldElement, which is a typedef to an
|
||||
// array of 8 uint32's. The value of a FieldElement, a, is:
|
||||
// a[0] + 2**28·a[1] + 2**56·a[1] + ... + 2**196·a[7]
|
||||
//
|
||||
// Using 28-bit limbs means that there's only 4 bits of headroom, which is less
|
||||
// than we would really like. But it has the useful feature that we hit 2**224
|
||||
// exactly, making the reflections during a reduce much nicer.
|
||||
type p224FieldElement [8]uint32
|
||||
|
||||
// p224Add computes *out = a+b
|
||||
//
|
||||
// a[i] + b[i] < 2**32
|
||||
func p224Add(out, a, b *p224FieldElement) {
|
||||
for i := 0; i < 8; i++ {
|
||||
out[i] = a[i] + b[i]
|
||||
}
|
||||
}
|
||||
|
||||
const two31p3 = 1<<31 + 1<<3
|
||||
const two31m3 = 1<<31 - 1<<3
|
||||
const two31m15m3 = 1<<31 - 1<<15 - 1<<3
|
||||
|
||||
// p224ZeroModP31 is 0 mod p where bit 31 is set in all limbs so that we can
|
||||
// subtract smaller amounts without underflow. See the section "Subtraction" in
|
||||
// [1] for reasoning.
|
||||
var p224ZeroModP31 = []uint32{two31p3, two31m3, two31m3, two31m15m3, two31m3, two31m3, two31m3, two31m3}
|
||||
|
||||
// p224Sub computes *out = a-b
|
||||
//
|
||||
// a[i], b[i] < 2**30
|
||||
// out[i] < 2**32
|
||||
func p224Sub(out, a, b *p224FieldElement) {
|
||||
for i := 0; i < 8; i++ {
|
||||
out[i] = a[i] + p224ZeroModP31[i] - b[i]
|
||||
}
|
||||
}
|
||||
|
||||
// LargeFieldElement also represents an element of the field. The limbs are
|
||||
// still spaced 28-bits apart and in little-endian order. So the limbs are at
|
||||
// 0, 28, 56, ..., 392 bits, each 64-bits wide.
|
||||
type p224LargeFieldElement [15]uint64
|
||||
|
||||
const two63p35 = 1<<63 + 1<<35
|
||||
const two63m35 = 1<<63 - 1<<35
|
||||
const two63m35m19 = 1<<63 - 1<<35 - 1<<19
|
||||
|
||||
// p224ZeroModP63 is 0 mod p where bit 63 is set in all limbs. See the section
|
||||
// "Subtraction" in [1] for why.
|
||||
var p224ZeroModP63 = [8]uint64{two63p35, two63m35, two63m35, two63m35, two63m35m19, two63m35, two63m35, two63m35}
|
||||
|
||||
const bottom12Bits = 0xfff
|
||||
const bottom28Bits = 0xfffffff
|
||||
|
||||
// p224Mul computes *out = a*b
|
||||
//
|
||||
// a[i] < 2**29, b[i] < 2**30 (or vice versa)
|
||||
// out[i] < 2**29
|
||||
func p224Mul(out, a, b *p224FieldElement, tmp *p224LargeFieldElement) {
|
||||
for i := 0; i < 15; i++ {
|
||||
tmp[i] = 0
|
||||
}
|
||||
|
||||
for i := 0; i < 8; i++ {
|
||||
for j := 0; j < 8; j++ {
|
||||
tmp[i+j] += uint64(a[i]) * uint64(b[j])
|
||||
}
|
||||
}
|
||||
|
||||
p224ReduceLarge(out, tmp)
|
||||
}
|
||||
|
||||
// Square computes *out = a*a
|
||||
//
|
||||
// a[i] < 2**29
|
||||
// out[i] < 2**29
|
||||
func p224Square(out, a *p224FieldElement, tmp *p224LargeFieldElement) {
|
||||
for i := 0; i < 15; i++ {
|
||||
tmp[i] = 0
|
||||
}
|
||||
|
||||
for i := 0; i < 8; i++ {
|
||||
for j := 0; j <= i; j++ {
|
||||
r := uint64(a[i]) * uint64(a[j])
|
||||
if i == j {
|
||||
tmp[i+j] += r
|
||||
} else {
|
||||
tmp[i+j] += r << 1
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
p224ReduceLarge(out, tmp)
|
||||
}
|
||||
|
||||
// ReduceLarge converts a p224LargeFieldElement to a p224FieldElement.
|
||||
//
|
||||
// in[i] < 2**62
|
||||
func p224ReduceLarge(out *p224FieldElement, in *p224LargeFieldElement) {
|
||||
for i := 0; i < 8; i++ {
|
||||
in[i] += p224ZeroModP63[i]
|
||||
}
|
||||
|
||||
// Elimintate the coefficients at 2**224 and greater.
|
||||
for i := 14; i >= 8; i-- {
|
||||
in[i-8] -= in[i]
|
||||
in[i-5] += (in[i] & 0xffff) << 12
|
||||
in[i-4] += in[i] >> 16
|
||||
}
|
||||
in[8] = 0
|
||||
// in[0..8] < 2**64
|
||||
|
||||
// As the values become small enough, we start to store them in |out|
|
||||
// and use 32-bit operations.
|
||||
for i := 1; i < 8; i++ {
|
||||
in[i+1] += in[i] >> 28
|
||||
out[i] = uint32(in[i] & bottom28Bits)
|
||||
}
|
||||
in[0] -= in[8]
|
||||
out[3] += uint32(in[8]&0xffff) << 12
|
||||
out[4] += uint32(in[8] >> 16)
|
||||
// in[0] < 2**64
|
||||
// out[3] < 2**29
|
||||
// out[4] < 2**29
|
||||
// out[1,2,5..7] < 2**28
|
||||
|
||||
out[0] = uint32(in[0] & bottom28Bits)
|
||||
out[1] += uint32((in[0] >> 28) & bottom28Bits)
|
||||
out[2] += uint32(in[0] >> 56)
|
||||
// out[0] < 2**28
|
||||
// out[1..4] < 2**29
|
||||
// out[5..7] < 2**28
|
||||
}
|
||||
|
||||
// Reduce reduces the coefficients of a to smaller bounds.
|
||||
//
|
||||
// On entry: a[i] < 2**31 + 2**30
|
||||
// On exit: a[i] < 2**29
|
||||
func p224Reduce(a *p224FieldElement) {
|
||||
for i := 0; i < 7; i++ {
|
||||
a[i+1] += a[i] >> 28
|
||||
a[i] &= bottom28Bits
|
||||
}
|
||||
top := a[7] >> 28
|
||||
a[7] &= bottom28Bits
|
||||
|
||||
// top < 2**4
|
||||
mask := top
|
||||
mask |= mask >> 2
|
||||
mask |= mask >> 1
|
||||
mask <<= 31
|
||||
mask = uint32(int32(mask) >> 31)
|
||||
// Mask is all ones if top != 0, all zero otherwise
|
||||
|
||||
a[0] -= top
|
||||
a[3] += top << 12
|
||||
|
||||
// We may have just made a[0] negative but, if we did, then we must
|
||||
// have added something to a[3], this it's > 2**12. Therefore we can
|
||||
// carry down to a[0].
|
||||
a[3] -= 1 & mask
|
||||
a[2] += mask & (1<<28 - 1)
|
||||
a[1] += mask & (1<<28 - 1)
|
||||
a[0] += mask & (1 << 28)
|
||||
}
|
||||
|
||||
// p224Invert calcuates *out = in**-1 by computing in**(2**224 - 2**96 - 1),
|
||||
// i.e. Fermat's little theorem.
|
||||
func p224Invert(out, in *p224FieldElement) {
|
||||
var f1, f2, f3, f4 p224FieldElement
|
||||
var c p224LargeFieldElement
|
||||
|
||||
p224Square(&f1, in, &c) // 2
|
||||
p224Mul(&f1, &f1, in, &c) // 2**2 - 1
|
||||
p224Square(&f1, &f1, &c) // 2**3 - 2
|
||||
p224Mul(&f1, &f1, in, &c) // 2**3 - 1
|
||||
p224Square(&f2, &f1, &c) // 2**4 - 2
|
||||
p224Square(&f2, &f2, &c) // 2**5 - 4
|
||||
p224Square(&f2, &f2, &c) // 2**6 - 8
|
||||
p224Mul(&f1, &f1, &f2, &c) // 2**6 - 1
|
||||
p224Square(&f2, &f1, &c) // 2**7 - 2
|
||||
for i := 0; i < 5; i++ { // 2**12 - 2**6
|
||||
p224Square(&f2, &f2, &c)
|
||||
}
|
||||
p224Mul(&f2, &f2, &f1, &c) // 2**12 - 1
|
||||
p224Square(&f3, &f2, &c) // 2**13 - 2
|
||||
for i := 0; i < 11; i++ { // 2**24 - 2**12
|
||||
p224Square(&f3, &f3, &c)
|
||||
}
|
||||
p224Mul(&f2, &f3, &f2, &c) // 2**24 - 1
|
||||
p224Square(&f3, &f2, &c) // 2**25 - 2
|
||||
for i := 0; i < 23; i++ { // 2**48 - 2**24
|
||||
p224Square(&f3, &f3, &c)
|
||||
}
|
||||
p224Mul(&f3, &f3, &f2, &c) // 2**48 - 1
|
||||
p224Square(&f4, &f3, &c) // 2**49 - 2
|
||||
for i := 0; i < 47; i++ { // 2**96 - 2**48
|
||||
p224Square(&f4, &f4, &c)
|
||||
}
|
||||
p224Mul(&f3, &f3, &f4, &c) // 2**96 - 1
|
||||
p224Square(&f4, &f3, &c) // 2**97 - 2
|
||||
for i := 0; i < 23; i++ { // 2**120 - 2**24
|
||||
p224Square(&f4, &f4, &c)
|
||||
}
|
||||
p224Mul(&f2, &f4, &f2, &c) // 2**120 - 1
|
||||
for i := 0; i < 6; i++ { // 2**126 - 2**6
|
||||
p224Square(&f2, &f2, &c)
|
||||
}
|
||||
p224Mul(&f1, &f1, &f2, &c) // 2**126 - 1
|
||||
p224Square(&f1, &f1, &c) // 2**127 - 2
|
||||
p224Mul(&f1, &f1, in, &c) // 2**127 - 1
|
||||
for i := 0; i < 97; i++ { // 2**224 - 2**97
|
||||
p224Square(&f1, &f1, &c)
|
||||
}
|
||||
p224Mul(out, &f1, &f3, &c) // 2**224 - 2**96 - 1
|
||||
}
|
||||
|
||||
// p224Contract converts a FieldElement to its unique, minimal form.
|
||||
//
|
||||
// On entry, in[i] < 2**32
|
||||
// On exit, in[i] < 2**28
|
||||
func p224Contract(out, in *p224FieldElement) {
|
||||
copy(out[:], in[:])
|
||||
|
||||
for i := 0; i < 7; i++ {
|
||||
out[i+1] += out[i] >> 28
|
||||
out[i] &= bottom28Bits
|
||||
}
|
||||
top := out[7] >> 28
|
||||
out[7] &= bottom28Bits
|
||||
|
||||
out[0] -= top
|
||||
out[3] += top << 12
|
||||
|
||||
// We may just have made out[i] negative. So we carry down. If we made
|
||||
// out[0] negative then we know that out[3] is sufficiently positive
|
||||
// because we just added to it.
|
||||
for i := 0; i < 3; i++ {
|
||||
mask := uint32(int32(out[i]) >> 31)
|
||||
out[i] += (1 << 28) & mask
|
||||
out[i+1] -= 1 & mask
|
||||
}
|
||||
|
||||
// Now we see if the value is >= p and, if so, subtract p.
|
||||
|
||||
// First we build a mask from the top four limbs, which must all be
|
||||
// equal to bottom28Bits if the whole value is >= p. If top4AllOnes
|
||||
// ends up with any zero bits in the bottom 28 bits, then this wasn't
|
||||
// true.
|
||||
top4AllOnes := uint32(0xffffffff)
|
||||
for i := 4; i < 8; i++ {
|
||||
top4AllOnes &= (out[i] & bottom28Bits) - 1
|
||||
}
|
||||
top4AllOnes |= 0xf0000000
|
||||
// Now we replicate any zero bits to all the bits in top4AllOnes.
|
||||
top4AllOnes &= top4AllOnes >> 16
|
||||
top4AllOnes &= top4AllOnes >> 8
|
||||
top4AllOnes &= top4AllOnes >> 4
|
||||
top4AllOnes &= top4AllOnes >> 2
|
||||
top4AllOnes &= top4AllOnes >> 1
|
||||
top4AllOnes = uint32(int32(top4AllOnes<<31) >> 31)
|
||||
|
||||
// Now we test whether the bottom three limbs are non-zero.
|
||||
bottom3NonZero := out[0] | out[1] | out[2]
|
||||
bottom3NonZero |= bottom3NonZero >> 16
|
||||
bottom3NonZero |= bottom3NonZero >> 8
|
||||
bottom3NonZero |= bottom3NonZero >> 4
|
||||
bottom3NonZero |= bottom3NonZero >> 2
|
||||
bottom3NonZero |= bottom3NonZero >> 1
|
||||
bottom3NonZero = uint32(int32(bottom3NonZero<<31) >> 31)
|
||||
|
||||
// Everything depends on the value of out[3].
|
||||
// If it's > 0xffff000 and top4AllOnes != 0 then the whole value is >= p
|
||||
// If it's = 0xffff000 and top4AllOnes != 0 and bottom3NonZero != 0,
|
||||
// then the whole value is >= p
|
||||
// If it's < 0xffff000, then the whole value is < p
|
||||
n := out[3] - 0xffff000
|
||||
out3Equal := n
|
||||
out3Equal |= out3Equal >> 16
|
||||
out3Equal |= out3Equal >> 8
|
||||
out3Equal |= out3Equal >> 4
|
||||
out3Equal |= out3Equal >> 2
|
||||
out3Equal |= out3Equal >> 1
|
||||
out3Equal = ^uint32(int32(out3Equal<<31) >> 31)
|
||||
|
||||
// If out[3] > 0xffff000 then n's MSB will be zero.
|
||||
out3GT := ^uint32(int32(n<<31) >> 31)
|
||||
|
||||
mask := top4AllOnes & ((out3Equal & bottom3NonZero) | out3GT)
|
||||
out[0] -= 1 & mask
|
||||
out[3] -= 0xffff000 & mask
|
||||
out[4] -= 0xfffffff & mask
|
||||
out[5] -= 0xfffffff & mask
|
||||
out[6] -= 0xfffffff & mask
|
||||
out[7] -= 0xfffffff & mask
|
||||
}
|
||||
|
||||
// Group element functions.
|
||||
//
|
||||
// These functions deal with group elements. The group is an elliptic curve
|
||||
// group with a = -3 defined in FIPS 186-3, section D.2.2.
|
||||
|
||||
// p224AddJacobian computes *out = a+b where a != b.
|
||||
func p224AddJacobian(x3, y3, z3, x1, y1, z1, x2, y2, z2 *p224FieldElement) {
|
||||
// See http://hyperelliptic.org/EFD/g1p/auto-shortw-jacobian-3.html#addition-p224Add-2007-bl
|
||||
var z1z1, z2z2, u1, u2, s1, s2, h, i, j, r, v p224FieldElement
|
||||
var c p224LargeFieldElement
|
||||
|
||||
// Z1Z1 = Z1²
|
||||
p224Square(&z1z1, z1, &c)
|
||||
// Z2Z2 = Z2²
|
||||
p224Square(&z2z2, z2, &c)
|
||||
// U1 = X1*Z2Z2
|
||||
p224Mul(&u1, x1, &z2z2, &c)
|
||||
// U2 = X2*Z1Z1
|
||||
p224Mul(&u2, x2, &z1z1, &c)
|
||||
// S1 = Y1*Z2*Z2Z2
|
||||
p224Mul(&s1, z2, &z2z2, &c)
|
||||
p224Mul(&s1, y1, &s1, &c)
|
||||
// S2 = Y2*Z1*Z1Z1
|
||||
p224Mul(&s2, z1, &z1z1, &c)
|
||||
p224Mul(&s2, y2, &s2, &c)
|
||||
// H = U2-U1
|
||||
p224Sub(&h, &u2, &u1)
|
||||
p224Reduce(&h)
|
||||
// I = (2*H)²
|
||||
for j := 0; j < 8; j++ {
|
||||
i[j] = h[j] << 1
|
||||
}
|
||||
p224Reduce(&i)
|
||||
p224Square(&i, &i, &c)
|
||||
// J = H*I
|
||||
p224Mul(&j, &h, &i, &c)
|
||||
// r = 2*(S2-S1)
|
||||
p224Sub(&r, &s2, &s1)
|
||||
p224Reduce(&r)
|
||||
for i := 0; i < 8; i++ {
|
||||
r[i] <<= 1
|
||||
}
|
||||
p224Reduce(&r)
|
||||
// V = U1*I
|
||||
p224Mul(&v, &u1, &i, &c)
|
||||
// Z3 = ((Z1+Z2)²-Z1Z1-Z2Z2)*H
|
||||
p224Add(&z1z1, &z1z1, &z2z2)
|
||||
p224Add(&z2z2, z1, z2)
|
||||
p224Reduce(&z2z2)
|
||||
p224Square(&z2z2, &z2z2, &c)
|
||||
p224Sub(z3, &z2z2, &z1z1)
|
||||
p224Reduce(z3)
|
||||
p224Mul(z3, z3, &h, &c)
|
||||
// X3 = r²-J-2*V
|
||||
for i := 0; i < 8; i++ {
|
||||
z1z1[i] = v[i] << 1
|
||||
}
|
||||
p224Add(&z1z1, &j, &z1z1)
|
||||
p224Reduce(&z1z1)
|
||||
p224Square(x3, &r, &c)
|
||||
p224Sub(x3, x3, &z1z1)
|
||||
p224Reduce(x3)
|
||||
// Y3 = r*(V-X3)-2*S1*J
|
||||
for i := 0; i < 8; i++ {
|
||||
s1[i] <<= 1
|
||||
}
|
||||
p224Mul(&s1, &s1, &j, &c)
|
||||
p224Sub(&z1z1, &v, x3)
|
||||
p224Reduce(&z1z1)
|
||||
p224Mul(&z1z1, &z1z1, &r, &c)
|
||||
p224Sub(y3, &z1z1, &s1)
|
||||
p224Reduce(y3)
|
||||
}
|
||||
|
||||
// p224DoubleJacobian computes *out = a+a.
|
||||
func p224DoubleJacobian(x3, y3, z3, x1, y1, z1 *p224FieldElement) {
|
||||
var delta, gamma, beta, alpha, t p224FieldElement
|
||||
var c p224LargeFieldElement
|
||||
|
||||
p224Square(&delta, z1, &c)
|
||||
p224Square(&gamma, y1, &c)
|
||||
p224Mul(&beta, x1, &gamma, &c)
|
||||
|
||||
// alpha = 3*(X1-delta)*(X1+delta)
|
||||
p224Add(&t, x1, &delta)
|
||||
for i := 0; i < 8; i++ {
|
||||
t[i] += t[i] << 1
|
||||
}
|
||||
p224Reduce(&t)
|
||||
p224Sub(&alpha, x1, &delta)
|
||||
p224Reduce(&alpha)
|
||||
p224Mul(&alpha, &alpha, &t, &c)
|
||||
|
||||
// Z3 = (Y1+Z1)²-gamma-delta
|
||||
p224Add(z3, y1, z1)
|
||||
p224Reduce(z3)
|
||||
p224Square(z3, z3, &c)
|
||||
p224Sub(z3, z3, &gamma)
|
||||
p224Reduce(z3)
|
||||
p224Sub(z3, z3, &delta)
|
||||
p224Reduce(z3)
|
||||
|
||||
// X3 = alpha²-8*beta
|
||||
for i := 0; i < 8; i++ {
|
||||
delta[i] = beta[i] << 3
|
||||
}
|
||||
p224Reduce(&delta)
|
||||
p224Square(x3, &alpha, &c)
|
||||
p224Sub(x3, x3, &delta)
|
||||
p224Reduce(x3)
|
||||
|
||||
// Y3 = alpha*(4*beta-X3)-8*gamma²
|
||||
for i := 0; i < 8; i++ {
|
||||
beta[i] <<= 2
|
||||
}
|
||||
p224Sub(&beta, &beta, x3)
|
||||
p224Reduce(&beta)
|
||||
p224Square(&gamma, &gamma, &c)
|
||||
for i := 0; i < 8; i++ {
|
||||
gamma[i] <<= 3
|
||||
}
|
||||
p224Reduce(&gamma)
|
||||
p224Mul(y3, &alpha, &beta, &c)
|
||||
p224Sub(y3, y3, &gamma)
|
||||
p224Reduce(y3)
|
||||
}
|
||||
|
||||
// p224CopyConditional sets *out = *in iff the least-significant-bit of control
|
||||
// is true, and it runs in constant time.
|
||||
func p224CopyConditional(out, in *p224FieldElement, control uint32) {
|
||||
control <<= 31
|
||||
control = uint32(int32(control) >> 31)
|
||||
|
||||
for i := 0; i < 8; i++ {
|
||||
out[i] ^= (out[i] ^ in[i]) & control
|
||||
}
|
||||
}
|
||||
|
||||
func p224ScalarMult(outX, outY, outZ, inX, inY, inZ *p224FieldElement, scalar []byte) {
|
||||
var xx, yy, zz p224FieldElement
|
||||
for i := 0; i < 8; i++ {
|
||||
outZ[i] = 0
|
||||
}
|
||||
|
||||
firstBit := uint32(1)
|
||||
for _, byte := range scalar {
|
||||
for bitNum := uint(0); bitNum < 8; bitNum++ {
|
||||
p224DoubleJacobian(outX, outY, outZ, outX, outY, outZ)
|
||||
bit := uint32((byte >> (7 - bitNum)) & 1)
|
||||
p224AddJacobian(&xx, &yy, &zz, inX, inY, inZ, outX, outY, outZ)
|
||||
p224CopyConditional(outX, inX, firstBit&bit)
|
||||
p224CopyConditional(outY, inY, firstBit&bit)
|
||||
p224CopyConditional(outZ, inZ, firstBit&bit)
|
||||
p224CopyConditional(outX, &xx, ^firstBit&bit)
|
||||
p224CopyConditional(outY, &yy, ^firstBit&bit)
|
||||
p224CopyConditional(outZ, &zz, ^firstBit&bit)
|
||||
firstBit = firstBit & ^bit
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// p224ToAffine converts from Jacobian to affine form.
|
||||
func p224ToAffine(x, y, z *p224FieldElement) (*big.Int, *big.Int) {
|
||||
var zinv, zinvsq, outx, outy p224FieldElement
|
||||
var tmp p224LargeFieldElement
|
||||
|
||||
isPointAtInfinity := true
|
||||
for i := 0; i < 8; i++ {
|
||||
if z[i] != 0 {
|
||||
isPointAtInfinity = false
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
if isPointAtInfinity {
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
p224Invert(&zinv, z)
|
||||
p224Square(&zinvsq, &zinv, &tmp)
|
||||
p224Mul(x, x, &zinvsq, &tmp)
|
||||
p224Mul(&zinvsq, &zinvsq, &zinv, &tmp)
|
||||
p224Mul(y, y, &zinvsq, &tmp)
|
||||
|
||||
p224Contract(&outx, x)
|
||||
p224Contract(&outy, y)
|
||||
return p224ToBig(&outx), p224ToBig(&outy)
|
||||
}
|
||||
|
||||
// get28BitsFromEnd returns the least-significant 28 bits from buf>>shift,
|
||||
// where buf is interpreted as a big-endian number.
|
||||
func get28BitsFromEnd(buf []byte, shift uint) (uint32, []byte) {
|
||||
var ret uint32
|
||||
|
||||
for i := uint(0); i < 4; i++ {
|
||||
var b byte
|
||||
if l := len(buf); l > 0 {
|
||||
b = buf[l-1]
|
||||
// We don't remove the byte if we're about to return and we're not
|
||||
// reading all of it.
|
||||
if i != 3 || shift == 4 {
|
||||
buf = buf[:l-1]
|
||||
}
|
||||
}
|
||||
ret |= uint32(b) << (8 * i) >> shift
|
||||
}
|
||||
ret &= bottom28Bits
|
||||
return ret, buf
|
||||
}
|
||||
|
||||
// p224FromBig sets *out = *in.
|
||||
func p224FromBig(out *p224FieldElement, in *big.Int) {
|
||||
bytes := in.Bytes()
|
||||
out[0], bytes = get28BitsFromEnd(bytes, 0)
|
||||
out[1], bytes = get28BitsFromEnd(bytes, 4)
|
||||
out[2], bytes = get28BitsFromEnd(bytes, 0)
|
||||
out[3], bytes = get28BitsFromEnd(bytes, 4)
|
||||
out[4], bytes = get28BitsFromEnd(bytes, 0)
|
||||
out[5], bytes = get28BitsFromEnd(bytes, 4)
|
||||
out[6], bytes = get28BitsFromEnd(bytes, 0)
|
||||
out[7], bytes = get28BitsFromEnd(bytes, 4)
|
||||
}
|
||||
|
||||
// p224ToBig returns in as a big.Int.
|
||||
func p224ToBig(in *p224FieldElement) *big.Int {
|
||||
var buf [28]byte
|
||||
buf[27] = byte(in[0])
|
||||
buf[26] = byte(in[0] >> 8)
|
||||
buf[25] = byte(in[0] >> 16)
|
||||
buf[24] = byte(((in[0] >> 24) & 0x0f) | (in[1]<<4)&0xf0)
|
||||
|
||||
buf[23] = byte(in[1] >> 4)
|
||||
buf[22] = byte(in[1] >> 12)
|
||||
buf[21] = byte(in[1] >> 20)
|
||||
|
||||
buf[20] = byte(in[2])
|
||||
buf[19] = byte(in[2] >> 8)
|
||||
buf[18] = byte(in[2] >> 16)
|
||||
buf[17] = byte(((in[2] >> 24) & 0x0f) | (in[3]<<4)&0xf0)
|
||||
|
||||
buf[16] = byte(in[3] >> 4)
|
||||
buf[15] = byte(in[3] >> 12)
|
||||
buf[14] = byte(in[3] >> 20)
|
||||
|
||||
buf[13] = byte(in[4])
|
||||
buf[12] = byte(in[4] >> 8)
|
||||
buf[11] = byte(in[4] >> 16)
|
||||
buf[10] = byte(((in[4] >> 24) & 0x0f) | (in[5]<<4)&0xf0)
|
||||
|
||||
buf[9] = byte(in[5] >> 4)
|
||||
buf[8] = byte(in[5] >> 12)
|
||||
buf[7] = byte(in[5] >> 20)
|
||||
|
||||
buf[6] = byte(in[6])
|
||||
buf[5] = byte(in[6] >> 8)
|
||||
buf[4] = byte(in[6] >> 16)
|
||||
buf[3] = byte(((in[6] >> 24) & 0x0f) | (in[7]<<4)&0xf0)
|
||||
|
||||
buf[2] = byte(in[7] >> 4)
|
||||
buf[1] = byte(in[7] >> 12)
|
||||
buf[0] = byte(in[7] >> 20)
|
||||
|
||||
return new(big.Int).SetBytes(buf[:])
|
||||
}
|
47
libgo/go/crypto/elliptic/p224_test.go
Normal file
47
libgo/go/crypto/elliptic/p224_test.go
Normal file
|
@ -0,0 +1,47 @@
|
|||
// Copyright 2012 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
package elliptic
|
||||
|
||||
import (
|
||||
"math/big"
|
||||
"testing"
|
||||
)
|
||||
|
||||
var toFromBigTests = []string{
|
||||
"0",
|
||||
"1",
|
||||
"23",
|
||||
"b70e0cb46bb4bf7f321390b94a03c1d356c01122343280d6105c1d21",
|
||||
"706a46d476dcb76798e6046d89474788d164c18032d268fd10704fa6",
|
||||
}
|
||||
|
||||
func p224AlternativeToBig(in *p224FieldElement) *big.Int {
|
||||
ret := new(big.Int)
|
||||
tmp := new(big.Int)
|
||||
|
||||
for i := uint(0); i < 8; i++ {
|
||||
tmp.SetInt64(int64(in[i]))
|
||||
tmp.Lsh(tmp, 28*i)
|
||||
ret.Add(ret, tmp)
|
||||
}
|
||||
ret.Mod(ret, p224.P)
|
||||
return ret
|
||||
}
|
||||
|
||||
func TestToFromBig(t *testing.T) {
|
||||
for i, test := range toFromBigTests {
|
||||
n, _ := new(big.Int).SetString(test, 16)
|
||||
var x p224FieldElement
|
||||
p224FromBig(&x, n)
|
||||
m := p224ToBig(&x)
|
||||
if n.Cmp(m) != 0 {
|
||||
t.Errorf("#%d: %x != %x", i, n, m)
|
||||
}
|
||||
q := p224AlternativeToBig(&x)
|
||||
if n.Cmp(q) != 0 {
|
||||
t.Errorf("#%d: %x != %x (alternative)", i, n, m)
|
||||
}
|
||||
}
|
||||
}
|
|
@ -9,32 +9,20 @@
|
|||
package hmac
|
||||
|
||||
import (
|
||||
"crypto/md5"
|
||||
"crypto/sha1"
|
||||
"crypto/sha256"
|
||||
"hash"
|
||||
)
|
||||
|
||||
// FIPS 198:
|
||||
// http://csrc.nist.gov/publications/fips/fips198/fips-198a.pdf
|
||||
|
||||
// key is zero padded to 64 bytes
|
||||
// ipad = 0x36 byte repeated to 64 bytes
|
||||
// opad = 0x5c byte repeated to 64 bytes
|
||||
// key is zero padded to the block size of the hash function
|
||||
// ipad = 0x36 byte repeated for key length
|
||||
// opad = 0x5c byte repeated for key length
|
||||
// hmac = H([key ^ opad] H([key ^ ipad] text))
|
||||
|
||||
const (
|
||||
// NOTE(rsc): This constant is actually the
|
||||
// underlying hash function's block size.
|
||||
// HMAC is only conventionally used with
|
||||
// MD5 and SHA1, and both use 64-byte blocks.
|
||||
// The hash.Hash interface doesn't provide a
|
||||
// way to find out the block size.
|
||||
padSize = 64
|
||||
)
|
||||
|
||||
type hmac struct {
|
||||
size int
|
||||
blocksize int
|
||||
key, tmp []byte
|
||||
outer, inner hash.Hash
|
||||
}
|
||||
|
@ -43,7 +31,7 @@ func (h *hmac) tmpPad(xor byte) {
|
|||
for i, k := range h.key {
|
||||
h.tmp[i] = xor ^ k
|
||||
}
|
||||
for i := len(h.key); i < padSize; i++ {
|
||||
for i := len(h.key); i < h.blocksize; i++ {
|
||||
h.tmp[i] = xor
|
||||
}
|
||||
}
|
||||
|
@ -52,7 +40,7 @@ func (h *hmac) Sum(in []byte) []byte {
|
|||
origLen := len(in)
|
||||
in = h.inner.Sum(in)
|
||||
h.tmpPad(0x5c)
|
||||
copy(h.tmp[padSize:], in[origLen:])
|
||||
copy(h.tmp[h.blocksize:], in[origLen:])
|
||||
h.outer.Reset()
|
||||
h.outer.Write(h.tmp)
|
||||
return h.outer.Sum(in[:origLen])
|
||||
|
@ -64,20 +52,23 @@ func (h *hmac) Write(p []byte) (n int, err error) {
|
|||
|
||||
func (h *hmac) Size() int { return h.size }
|
||||
|
||||
func (h *hmac) BlockSize() int { return h.blocksize }
|
||||
|
||||
func (h *hmac) Reset() {
|
||||
h.inner.Reset()
|
||||
h.tmpPad(0x36)
|
||||
h.inner.Write(h.tmp[0:padSize])
|
||||
h.inner.Write(h.tmp[0:h.blocksize])
|
||||
}
|
||||
|
||||
// New returns a new HMAC hash using the given hash generator and key.
|
||||
// New returns a new HMAC hash using the given hash.Hash type and key.
|
||||
func New(h func() hash.Hash, key []byte) hash.Hash {
|
||||
hm := new(hmac)
|
||||
hm.outer = h()
|
||||
hm.inner = h()
|
||||
hm.size = hm.inner.Size()
|
||||
hm.tmp = make([]byte, padSize+hm.size)
|
||||
if len(key) > padSize {
|
||||
hm.blocksize = hm.inner.BlockSize()
|
||||
hm.tmp = make([]byte, hm.blocksize+hm.size)
|
||||
if len(key) > hm.blocksize {
|
||||
// If key is too big, hash it.
|
||||
hm.outer.Write(key)
|
||||
key = hm.outer.Sum(nil)
|
||||
|
@ -87,12 +78,3 @@ func New(h func() hash.Hash, key []byte) hash.Hash {
|
|||
hm.Reset()
|
||||
return hm
|
||||
}
|
||||
|
||||
// NewMD5 returns a new HMAC-MD5 hash using the given key.
|
||||
func NewMD5(key []byte) hash.Hash { return New(md5.New, key) }
|
||||
|
||||
// NewSHA1 returns a new HMAC-SHA1 hash using the given key.
|
||||
func NewSHA1(key []byte) hash.Hash { return New(sha1.New, key) }
|
||||
|
||||
// NewSHA256 returns a new HMAC-SHA256 hash using the given key.
|
||||
func NewSHA256(key []byte) hash.Hash { return New(sha256.New, key) }
|
||||
|
|
|
@ -5,13 +5,17 @@
|
|||
package hmac
|
||||
|
||||
import (
|
||||
"crypto/md5"
|
||||
"crypto/sha1"
|
||||
"crypto/sha256"
|
||||
"crypto/sha512"
|
||||
"fmt"
|
||||
"hash"
|
||||
"testing"
|
||||
)
|
||||
|
||||
type hmacTest struct {
|
||||
hash func([]byte) hash.Hash
|
||||
hash func() hash.Hash
|
||||
key []byte
|
||||
in []byte
|
||||
out string
|
||||
|
@ -21,7 +25,7 @@ var hmacTests = []hmacTest{
|
|||
// Tests from US FIPS 198
|
||||
// http://csrc.nist.gov/publications/fips/fips198/fips-198a.pdf
|
||||
{
|
||||
NewSHA1,
|
||||
sha1.New,
|
||||
[]byte{
|
||||
0x00, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07,
|
||||
0x08, 0x09, 0x0a, 0x0b, 0x0c, 0x0d, 0x0e, 0x0f,
|
||||
|
@ -36,7 +40,7 @@ var hmacTests = []hmacTest{
|
|||
"4f4ca3d5d68ba7cc0a1208c9c61e9c5da0403c0a",
|
||||
},
|
||||
{
|
||||
NewSHA1,
|
||||
sha1.New,
|
||||
[]byte{
|
||||
0x30, 0x31, 0x32, 0x33, 0x34, 0x35, 0x36, 0x37,
|
||||
0x38, 0x39, 0x3a, 0x3b, 0x3c, 0x3d, 0x3e, 0x3f,
|
||||
|
@ -46,7 +50,7 @@ var hmacTests = []hmacTest{
|
|||
"0922d3405faa3d194f82a45830737d5cc6c75d24",
|
||||
},
|
||||
{
|
||||
NewSHA1,
|
||||
sha1.New,
|
||||
[]byte{
|
||||
0x50, 0x51, 0x52, 0x53, 0x54, 0x55, 0x56, 0x57,
|
||||
0x58, 0x59, 0x5a, 0x5b, 0x5c, 0x5d, 0x5e, 0x5f,
|
||||
|
@ -68,7 +72,7 @@ var hmacTests = []hmacTest{
|
|||
|
||||
// Test from Plan 9.
|
||||
{
|
||||
NewMD5,
|
||||
md5.New,
|
||||
[]byte("Jefe"),
|
||||
[]byte("what do ya want for nothing?"),
|
||||
"750c783e6ab0b503eaa86e310a5db738",
|
||||
|
@ -76,7 +80,7 @@ var hmacTests = []hmacTest{
|
|||
|
||||
// Tests from RFC 4231
|
||||
{
|
||||
NewSHA256,
|
||||
sha256.New,
|
||||
[]byte{
|
||||
0x0b, 0x0b, 0x0b, 0x0b, 0x0b, 0x0b, 0x0b, 0x0b,
|
||||
0x0b, 0x0b, 0x0b, 0x0b, 0x0b, 0x0b, 0x0b, 0x0b,
|
||||
|
@ -86,13 +90,13 @@ var hmacTests = []hmacTest{
|
|||
"b0344c61d8db38535ca8afceaf0bf12b881dc200c9833da726e9376c2e32cff7",
|
||||
},
|
||||
{
|
||||
NewSHA256,
|
||||
sha256.New,
|
||||
[]byte("Jefe"),
|
||||
[]byte("what do ya want for nothing?"),
|
||||
"5bdcc146bf60754e6a042426089575c75a003f089d2739839dec58b964ec3843",
|
||||
},
|
||||
{
|
||||
NewSHA256,
|
||||
sha256.New,
|
||||
[]byte{
|
||||
0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa,
|
||||
0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa,
|
||||
|
@ -110,7 +114,7 @@ var hmacTests = []hmacTest{
|
|||
"773ea91e36800e46854db8ebd09181a72959098b3ef8c122d9635514ced565fe",
|
||||
},
|
||||
{
|
||||
NewSHA256,
|
||||
sha256.New,
|
||||
[]byte{
|
||||
0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, 0x08,
|
||||
0x09, 0x0a, 0x0b, 0x0c, 0x0d, 0x0e, 0x0f, 0x10,
|
||||
|
@ -129,7 +133,7 @@ var hmacTests = []hmacTest{
|
|||
"82558a389a443c0ea4cc819899f2083a85f0faa3e578f8077a2e3ff46729665b",
|
||||
},
|
||||
{
|
||||
NewSHA256,
|
||||
sha256.New,
|
||||
[]byte{
|
||||
0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa,
|
||||
0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa,
|
||||
|
@ -153,7 +157,7 @@ var hmacTests = []hmacTest{
|
|||
"60e431591ee0b67f0d8a26aacbf5b77f8e0bc6213728c5140546040f0ee37f54",
|
||||
},
|
||||
{
|
||||
NewSHA256,
|
||||
sha256.New,
|
||||
[]byte{
|
||||
0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa,
|
||||
0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa,
|
||||
|
@ -178,11 +182,295 @@ var hmacTests = []hmacTest{
|
|||
"be hashed before being used by the HMAC algorithm."),
|
||||
"9b09ffa71b942fcb27635fbcd5b0e944bfdc63644f0713938a7f51535c3a35e2",
|
||||
},
|
||||
|
||||
// Tests from http://csrc.nist.gov/groups/ST/toolkit/examples.html
|
||||
// (truncated tag tests are left out)
|
||||
{
|
||||
sha1.New,
|
||||
[]byte{
|
||||
0x00, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07,
|
||||
0x08, 0x09, 0x0a, 0x0b, 0x0c, 0x0d, 0x0e, 0x0f,
|
||||
0x10, 0x11, 0x12, 0x13, 0x14, 0x15, 0x16, 0x17,
|
||||
0x18, 0x19, 0x1a, 0x1b, 0x1c, 0x1d, 0x1e, 0x1f,
|
||||
0x20, 0x21, 0x22, 0x23, 0x24, 0x25, 0x26, 0x27,
|
||||
0x28, 0x29, 0x2a, 0x2b, 0x2c, 0x2d, 0x2e, 0x2f,
|
||||
0x30, 0x31, 0x32, 0x33, 0x34, 0x35, 0x36, 0x37,
|
||||
0x38, 0x39, 0x3a, 0x3b, 0x3c, 0x3d, 0x3e, 0x3f,
|
||||
},
|
||||
[]byte("Sample message for keylen=blocklen"),
|
||||
"5fd596ee78d5553c8ff4e72d266dfd192366da29",
|
||||
},
|
||||
{
|
||||
sha1.New,
|
||||
[]byte{
|
||||
0x00, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07,
|
||||
0x08, 0x09, 0x0a, 0x0b, 0x0c, 0x0d, 0x0e, 0x0f,
|
||||
0x10, 0x11, 0x12, 0x13,
|
||||
},
|
||||
[]byte("Sample message for keylen<blocklen"),
|
||||
"4c99ff0cb1b31bd33f8431dbaf4d17fcd356a807",
|
||||
},
|
||||
{
|
||||
sha1.New,
|
||||
[]byte{
|
||||
0x00, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07,
|
||||
0x08, 0x09, 0x0a, 0x0b, 0x0c, 0x0d, 0x0e, 0x0f,
|
||||
0x10, 0x11, 0x12, 0x13, 0x14, 0x15, 0x16, 0x17,
|
||||
0x18, 0x19, 0x1a, 0x1b, 0x1c, 0x1d, 0x1e, 0x1f,
|
||||
0x20, 0x21, 0x22, 0x23, 0x24, 0x25, 0x26, 0x27,
|
||||
0x28, 0x29, 0x2a, 0x2b, 0x2c, 0x2d, 0x2e, 0x2f,
|
||||
0x30, 0x31, 0x32, 0x33, 0x34, 0x35, 0x36, 0x37,
|
||||
0x38, 0x39, 0x3a, 0x3b, 0x3c, 0x3d, 0x3e, 0x3f,
|
||||
0x40, 0x41, 0x42, 0x43, 0x44, 0x45, 0x46, 0x47,
|
||||
0x48, 0x49, 0x4a, 0x4b, 0x4c, 0x4d, 0x4e, 0x4f,
|
||||
0x50, 0x51, 0x52, 0x53, 0x54, 0x55, 0x56, 0x57,
|
||||
0x58, 0x59, 0x5a, 0x5b, 0x5c, 0x5d, 0x5e, 0x5f,
|
||||
0x60, 0x61, 0x62, 0x63,
|
||||
},
|
||||
[]byte("Sample message for keylen=blocklen"),
|
||||
"2d51b2f7750e410584662e38f133435f4c4fd42a",
|
||||
},
|
||||
{
|
||||
sha256.New224,
|
||||
[]byte{
|
||||
0x00, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07,
|
||||
0x08, 0x09, 0x0a, 0x0b, 0x0c, 0x0d, 0x0e, 0x0f,
|
||||
0x10, 0x11, 0x12, 0x13, 0x14, 0x15, 0x16, 0x17,
|
||||
0x18, 0x19, 0x1a, 0x1b, 0x1c, 0x1d, 0x1e, 0x1f,
|
||||
0x20, 0x21, 0x22, 0x23, 0x24, 0x25, 0x26, 0x27,
|
||||
0x28, 0x29, 0x2a, 0x2b, 0x2c, 0x2d, 0x2e, 0x2f,
|
||||
0x30, 0x31, 0x32, 0x33, 0x34, 0x35, 0x36, 0x37,
|
||||
0x38, 0x39, 0x3a, 0x3b, 0x3c, 0x3d, 0x3e, 0x3f,
|
||||
},
|
||||
[]byte("Sample message for keylen=blocklen"),
|
||||
"c7405e3ae058e8cd30b08b4140248581ed174cb34e1224bcc1efc81b",
|
||||
},
|
||||
{
|
||||
sha256.New224,
|
||||
[]byte{
|
||||
0x00, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07,
|
||||
0x08, 0x09, 0x0a, 0x0b, 0x0c, 0x0d, 0x0e, 0x0f,
|
||||
0x10, 0x11, 0x12, 0x13, 0x14, 0x15, 0x16, 0x17,
|
||||
0x18, 0x19, 0x1a, 0x1b,
|
||||
},
|
||||
[]byte("Sample message for keylen<blocklen"),
|
||||
"e3d249a8cfb67ef8b7a169e9a0a599714a2cecba65999a51beb8fbbe",
|
||||
},
|
||||
{
|
||||
sha256.New224,
|
||||
[]byte{
|
||||
0x00, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07,
|
||||
0x08, 0x09, 0x0a, 0x0b, 0x0c, 0x0d, 0x0e, 0x0f,
|
||||
0x10, 0x11, 0x12, 0x13, 0x14, 0x15, 0x16, 0x17,
|
||||
0x18, 0x19, 0x1a, 0x1b, 0x1c, 0x1d, 0x1e, 0x1f,
|
||||
0x20, 0x21, 0x22, 0x23, 0x24, 0x25, 0x26, 0x27,
|
||||
0x28, 0x29, 0x2a, 0x2b, 0x2c, 0x2d, 0x2e, 0x2f,
|
||||
0x30, 0x31, 0x32, 0x33, 0x34, 0x35, 0x36, 0x37,
|
||||
0x38, 0x39, 0x3a, 0x3b, 0x3c, 0x3d, 0x3e, 0x3f,
|
||||
0x40, 0x41, 0x42, 0x43, 0x44, 0x45, 0x46, 0x47,
|
||||
0x48, 0x49, 0x4a, 0x4b, 0x4c, 0x4d, 0x4e, 0x4f,
|
||||
0x50, 0x51, 0x52, 0x53, 0x54, 0x55, 0x56, 0x57,
|
||||
0x58, 0x59, 0x5a, 0x5b, 0x5c, 0x5d, 0x5e, 0x5f,
|
||||
0x60, 0x61, 0x62, 0x63,
|
||||
},
|
||||
[]byte("Sample message for keylen=blocklen"),
|
||||
"91c52509e5af8531601ae6230099d90bef88aaefb961f4080abc014d",
|
||||
},
|
||||
{
|
||||
sha256.New,
|
||||
[]byte{
|
||||
0x00, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07,
|
||||
0x08, 0x09, 0x0a, 0x0b, 0x0c, 0x0d, 0x0e, 0x0f,
|
||||
0x10, 0x11, 0x12, 0x13, 0x14, 0x15, 0x16, 0x17,
|
||||
0x18, 0x19, 0x1a, 0x1b, 0x1c, 0x1d, 0x1e, 0x1f,
|
||||
0x20, 0x21, 0x22, 0x23, 0x24, 0x25, 0x26, 0x27,
|
||||
0x28, 0x29, 0x2a, 0x2b, 0x2c, 0x2d, 0x2e, 0x2f,
|
||||
0x30, 0x31, 0x32, 0x33, 0x34, 0x35, 0x36, 0x37,
|
||||
0x38, 0x39, 0x3a, 0x3b, 0x3c, 0x3d, 0x3e, 0x3f,
|
||||
},
|
||||
[]byte("Sample message for keylen=blocklen"),
|
||||
"8bb9a1db9806f20df7f77b82138c7914d174d59e13dc4d0169c9057b133e1d62",
|
||||
},
|
||||
{
|
||||
sha256.New,
|
||||
[]byte{
|
||||
0x00, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07,
|
||||
0x08, 0x09, 0x0a, 0x0b, 0x0c, 0x0d, 0x0e, 0x0f,
|
||||
0x10, 0x11, 0x12, 0x13, 0x14, 0x15, 0x16, 0x17,
|
||||
0x18, 0x19, 0x1a, 0x1b, 0x1c, 0x1d, 0x1e, 0x1f,
|
||||
},
|
||||
[]byte("Sample message for keylen<blocklen"),
|
||||
"a28cf43130ee696a98f14a37678b56bcfcbdd9e5cf69717fecf5480f0ebdf790",
|
||||
},
|
||||
{
|
||||
sha256.New,
|
||||
[]byte{
|
||||
0x00, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07,
|
||||
0x08, 0x09, 0x0a, 0x0b, 0x0c, 0x0d, 0x0e, 0x0f,
|
||||
0x10, 0x11, 0x12, 0x13, 0x14, 0x15, 0x16, 0x17,
|
||||
0x18, 0x19, 0x1a, 0x1b, 0x1c, 0x1d, 0x1e, 0x1f,
|
||||
0x20, 0x21, 0x22, 0x23, 0x24, 0x25, 0x26, 0x27,
|
||||
0x28, 0x29, 0x2a, 0x2b, 0x2c, 0x2d, 0x2e, 0x2f,
|
||||
0x30, 0x31, 0x32, 0x33, 0x34, 0x35, 0x36, 0x37,
|
||||
0x38, 0x39, 0x3a, 0x3b, 0x3c, 0x3d, 0x3e, 0x3f,
|
||||
0x40, 0x41, 0x42, 0x43, 0x44, 0x45, 0x46, 0x47,
|
||||
0x48, 0x49, 0x4a, 0x4b, 0x4c, 0x4d, 0x4e, 0x4f,
|
||||
0x50, 0x51, 0x52, 0x53, 0x54, 0x55, 0x56, 0x57,
|
||||
0x58, 0x59, 0x5a, 0x5b, 0x5c, 0x5d, 0x5e, 0x5f,
|
||||
0x60, 0x61, 0x62, 0x63,
|
||||
},
|
||||
[]byte("Sample message for keylen=blocklen"),
|
||||
"bdccb6c72ddeadb500ae768386cb38cc41c63dbb0878ddb9c7a38a431b78378d",
|
||||
},
|
||||
{
|
||||
sha512.New384,
|
||||
[]byte{
|
||||
0x00, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07,
|
||||
0x08, 0x09, 0x0a, 0x0b, 0x0c, 0x0d, 0x0e, 0x0f,
|
||||
0x10, 0x11, 0x12, 0x13, 0x14, 0x15, 0x16, 0x17,
|
||||
0x18, 0x19, 0x1a, 0x1b, 0x1c, 0x1d, 0x1e, 0x1f,
|
||||
0x20, 0x21, 0x22, 0x23, 0x24, 0x25, 0x26, 0x27,
|
||||
0x28, 0x29, 0x2a, 0x2b, 0x2c, 0x2d, 0x2e, 0x2f,
|
||||
0x30, 0x31, 0x32, 0x33, 0x34, 0x35, 0x36, 0x37,
|
||||
0x38, 0x39, 0x3a, 0x3b, 0x3c, 0x3d, 0x3e, 0x3f,
|
||||
0x40, 0x41, 0x42, 0x43, 0x44, 0x45, 0x46, 0x47,
|
||||
0x48, 0x49, 0x4a, 0x4b, 0x4c, 0x4d, 0x4e, 0x4f,
|
||||
0x50, 0x51, 0x52, 0x53, 0x54, 0x55, 0x56, 0x57,
|
||||
0x58, 0x59, 0x5a, 0x5b, 0x5c, 0x5d, 0x5e, 0x5f,
|
||||
0x60, 0x61, 0x62, 0x63, 0x64, 0x65, 0x66, 0x67,
|
||||
0x68, 0x69, 0x6a, 0x6b, 0x6c, 0x6d, 0x6e, 0x6f,
|
||||
0x70, 0x71, 0x72, 0x73, 0x74, 0x75, 0x76, 0x77,
|
||||
0x78, 0x79, 0x7a, 0x7b, 0x7c, 0x7d, 0x7e, 0x7f,
|
||||
},
|
||||
[]byte("Sample message for keylen=blocklen"),
|
||||
"63c5daa5e651847ca897c95814ab830bededc7d25e83eef9195cd45857a37f448947858f5af50cc2b1b730ddf29671a9",
|
||||
},
|
||||
{
|
||||
sha512.New384,
|
||||
[]byte{
|
||||
0x00, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07,
|
||||
0x08, 0x09, 0x0a, 0x0b, 0x0c, 0x0d, 0x0e, 0x0f,
|
||||
0x10, 0x11, 0x12, 0x13, 0x14, 0x15, 0x16, 0x17,
|
||||
0x18, 0x19, 0x1a, 0x1b, 0x1c, 0x1d, 0x1e, 0x1f,
|
||||
0x20, 0x21, 0x22, 0x23, 0x24, 0x25, 0x26, 0x27,
|
||||
0x28, 0x29, 0x2a, 0x2b, 0x2c, 0x2d, 0x2e, 0x2f,
|
||||
},
|
||||
[]byte("Sample message for keylen<blocklen"),
|
||||
"6eb242bdbb582ca17bebfa481b1e23211464d2b7f8c20b9ff2201637b93646af5ae9ac316e98db45d9cae773675eeed0",
|
||||
},
|
||||
{
|
||||
sha512.New384,
|
||||
[]byte{
|
||||
0x00, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07,
|
||||
0x08, 0x09, 0x0a, 0x0b, 0x0c, 0x0d, 0x0e, 0x0f,
|
||||
0x10, 0x11, 0x12, 0x13, 0x14, 0x15, 0x16, 0x17,
|
||||
0x18, 0x19, 0x1a, 0x1b, 0x1c, 0x1d, 0x1e, 0x1f,
|
||||
0x20, 0x21, 0x22, 0x23, 0x24, 0x25, 0x26, 0x27,
|
||||
0x28, 0x29, 0x2a, 0x2b, 0x2c, 0x2d, 0x2e, 0x2f,
|
||||
0x30, 0x31, 0x32, 0x33, 0x34, 0x35, 0x36, 0x37,
|
||||
0x38, 0x39, 0x3a, 0x3b, 0x3c, 0x3d, 0x3e, 0x3f,
|
||||
0x40, 0x41, 0x42, 0x43, 0x44, 0x45, 0x46, 0x47,
|
||||
0x48, 0x49, 0x4a, 0x4b, 0x4c, 0x4d, 0x4e, 0x4f,
|
||||
0x50, 0x51, 0x52, 0x53, 0x54, 0x55, 0x56, 0x57,
|
||||
0x58, 0x59, 0x5a, 0x5b, 0x5c, 0x5d, 0x5e, 0x5f,
|
||||
0x60, 0x61, 0x62, 0x63, 0x64, 0x65, 0x66, 0x67,
|
||||
0x68, 0x69, 0x6a, 0x6b, 0x6c, 0x6d, 0x6e, 0x6f,
|
||||
0x70, 0x71, 0x72, 0x73, 0x74, 0x75, 0x76, 0x77,
|
||||
0x78, 0x79, 0x7a, 0x7b, 0x7c, 0x7d, 0x7e, 0x7f,
|
||||
0x80, 0x81, 0x82, 0x83, 0x84, 0x85, 0x86, 0x87,
|
||||
0x88, 0x89, 0x8a, 0x8b, 0x8c, 0x8d, 0x8e, 0x8f,
|
||||
0x90, 0x91, 0x92, 0x93, 0x94, 0x95, 0x96, 0x97,
|
||||
0x98, 0x99, 0x9a, 0x9b, 0x9c, 0x9d, 0x9e, 0x9f,
|
||||
0xa0, 0xa1, 0xa2, 0xa3, 0xa4, 0xa5, 0xa6, 0xa7,
|
||||
0xa8, 0xa9, 0xaa, 0xab, 0xac, 0xad, 0xae, 0xaf,
|
||||
0xb0, 0xb1, 0xb2, 0xb3, 0xb4, 0xb5, 0xb6, 0xb7,
|
||||
0xb8, 0xb9, 0xba, 0xbb, 0xbc, 0xbd, 0xbe, 0xbf,
|
||||
0xc0, 0xc1, 0xc2, 0xc3, 0xc4, 0xc5, 0xc6, 0xc7,
|
||||
},
|
||||
[]byte("Sample message for keylen=blocklen"),
|
||||
"5b664436df69b0ca22551231a3f0a3d5b4f97991713cfa84bff4d0792eff96c27dccbbb6f79b65d548b40e8564cef594",
|
||||
},
|
||||
{
|
||||
sha512.New,
|
||||
[]byte{
|
||||
0x00, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07,
|
||||
0x08, 0x09, 0x0a, 0x0b, 0x0c, 0x0d, 0x0e, 0x0f,
|
||||
0x10, 0x11, 0x12, 0x13, 0x14, 0x15, 0x16, 0x17,
|
||||
0x18, 0x19, 0x1a, 0x1b, 0x1c, 0x1d, 0x1e, 0x1f,
|
||||
0x20, 0x21, 0x22, 0x23, 0x24, 0x25, 0x26, 0x27,
|
||||
0x28, 0x29, 0x2a, 0x2b, 0x2c, 0x2d, 0x2e, 0x2f,
|
||||
0x30, 0x31, 0x32, 0x33, 0x34, 0x35, 0x36, 0x37,
|
||||
0x38, 0x39, 0x3a, 0x3b, 0x3c, 0x3d, 0x3e, 0x3f,
|
||||
0x40, 0x41, 0x42, 0x43, 0x44, 0x45, 0x46, 0x47,
|
||||
0x48, 0x49, 0x4a, 0x4b, 0x4c, 0x4d, 0x4e, 0x4f,
|
||||
0x50, 0x51, 0x52, 0x53, 0x54, 0x55, 0x56, 0x57,
|
||||
0x58, 0x59, 0x5a, 0x5b, 0x5c, 0x5d, 0x5e, 0x5f,
|
||||
0x60, 0x61, 0x62, 0x63, 0x64, 0x65, 0x66, 0x67,
|
||||
0x68, 0x69, 0x6a, 0x6b, 0x6c, 0x6d, 0x6e, 0x6f,
|
||||
0x70, 0x71, 0x72, 0x73, 0x74, 0x75, 0x76, 0x77,
|
||||
0x78, 0x79, 0x7a, 0x7b, 0x7c, 0x7d, 0x7e, 0x7f,
|
||||
},
|
||||
[]byte("Sample message for keylen=blocklen"),
|
||||
"fc25e240658ca785b7a811a8d3f7b4ca" +
|
||||
"48cfa26a8a366bf2cd1f836b05fcb024bd36853081811d6c" +
|
||||
"ea4216ebad79da1cfcb95ea4586b8a0ce356596a55fb1347",
|
||||
},
|
||||
{
|
||||
sha512.New,
|
||||
[]byte{
|
||||
0x00, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07,
|
||||
0x08, 0x09, 0x0a, 0x0b, 0x0c, 0x0d, 0x0e, 0x0f,
|
||||
0x10, 0x11, 0x12, 0x13, 0x14, 0x15, 0x16, 0x17,
|
||||
0x18, 0x19, 0x1a, 0x1b, 0x1c, 0x1d, 0x1e, 0x1f,
|
||||
0x20, 0x21, 0x22, 0x23, 0x24, 0x25, 0x26, 0x27,
|
||||
0x28, 0x29, 0x2a, 0x2b, 0x2c, 0x2d, 0x2e, 0x2f,
|
||||
0x30, 0x31, 0x32, 0x33, 0x34, 0x35, 0x36, 0x37,
|
||||
0x38, 0x39, 0x3a, 0x3b, 0x3c, 0x3d, 0x3e, 0x3f,
|
||||
},
|
||||
[]byte("Sample message for keylen<blocklen"),
|
||||
"fd44c18bda0bb0a6ce0e82b031bf2818" +
|
||||
"f6539bd56ec00bdc10a8a2d730b3634de2545d639b0f2cf7" +
|
||||
"10d0692c72a1896f1f211c2b922d1a96c392e07e7ea9fedc",
|
||||
},
|
||||
{
|
||||
sha512.New,
|
||||
[]byte{
|
||||
0x00, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07,
|
||||
0x08, 0x09, 0x0a, 0x0b, 0x0c, 0x0d, 0x0e, 0x0f,
|
||||
0x10, 0x11, 0x12, 0x13, 0x14, 0x15, 0x16, 0x17,
|
||||
0x18, 0x19, 0x1a, 0x1b, 0x1c, 0x1d, 0x1e, 0x1f,
|
||||
0x20, 0x21, 0x22, 0x23, 0x24, 0x25, 0x26, 0x27,
|
||||
0x28, 0x29, 0x2a, 0x2b, 0x2c, 0x2d, 0x2e, 0x2f,
|
||||
0x30, 0x31, 0x32, 0x33, 0x34, 0x35, 0x36, 0x37,
|
||||
0x38, 0x39, 0x3a, 0x3b, 0x3c, 0x3d, 0x3e, 0x3f,
|
||||
0x40, 0x41, 0x42, 0x43, 0x44, 0x45, 0x46, 0x47,
|
||||
0x48, 0x49, 0x4a, 0x4b, 0x4c, 0x4d, 0x4e, 0x4f,
|
||||
0x50, 0x51, 0x52, 0x53, 0x54, 0x55, 0x56, 0x57,
|
||||
0x58, 0x59, 0x5a, 0x5b, 0x5c, 0x5d, 0x5e, 0x5f,
|
||||
0x60, 0x61, 0x62, 0x63, 0x64, 0x65, 0x66, 0x67,
|
||||
0x68, 0x69, 0x6a, 0x6b, 0x6c, 0x6d, 0x6e, 0x6f,
|
||||
0x70, 0x71, 0x72, 0x73, 0x74, 0x75, 0x76, 0x77,
|
||||
0x78, 0x79, 0x7a, 0x7b, 0x7c, 0x7d, 0x7e, 0x7f,
|
||||
0x80, 0x81, 0x82, 0x83, 0x84, 0x85, 0x86, 0x87,
|
||||
0x88, 0x89, 0x8a, 0x8b, 0x8c, 0x8d, 0x8e, 0x8f,
|
||||
0x90, 0x91, 0x92, 0x93, 0x94, 0x95, 0x96, 0x97,
|
||||
0x98, 0x99, 0x9a, 0x9b, 0x9c, 0x9d, 0x9e, 0x9f,
|
||||
0xa0, 0xa1, 0xa2, 0xa3, 0xa4, 0xa5, 0xa6, 0xa7,
|
||||
0xa8, 0xa9, 0xaa, 0xab, 0xac, 0xad, 0xae, 0xaf,
|
||||
0xb0, 0xb1, 0xb2, 0xb3, 0xb4, 0xb5, 0xb6, 0xb7,
|
||||
0xb8, 0xb9, 0xba, 0xbb, 0xbc, 0xbd, 0xbe, 0xbf,
|
||||
0xc0, 0xc1, 0xc2, 0xc3, 0xc4, 0xc5, 0xc6, 0xc7,
|
||||
},
|
||||
[]byte("Sample message for keylen=blocklen"),
|
||||
"d93ec8d2de1ad2a9957cb9b83f14e76a" +
|
||||
"d6b5e0cce285079a127d3b14bccb7aa7286d4ac0d4ce6421" +
|
||||
"5f2bc9e6870b33d97438be4aaa20cda5c5a912b48b8e27f3",
|
||||
},
|
||||
}
|
||||
|
||||
func TestHMAC(t *testing.T) {
|
||||
for i, tt := range hmacTests {
|
||||
h := tt.hash(tt.key)
|
||||
h := New(tt.hash, tt.key)
|
||||
for j := 0; j < 2; j++ {
|
||||
n, err := h.Write(tt.in)
|
||||
if n != len(tt.in) || err != nil {
|
||||
|
|
|
@ -17,6 +17,9 @@ func init() {
|
|||
// The size of an MD4 checksum in bytes.
|
||||
const Size = 16
|
||||
|
||||
// The blocksize of MD4 in bytes.
|
||||
const BlockSize = 64
|
||||
|
||||
const (
|
||||
_Chunk = 64
|
||||
_Init0 = 0x67452301
|
||||
|
@ -51,6 +54,8 @@ func New() hash.Hash {
|
|||
|
||||
func (d *digest) Size() int { return Size }
|
||||
|
||||
func (d *digest) BlockSize() int { return BlockSize }
|
||||
|
||||
func (d *digest) Write(p []byte) (nn int, err error) {
|
||||
nn = len(p)
|
||||
d.len += uint64(nn)
|
||||
|
|
|
@ -17,6 +17,9 @@ func init() {
|
|||
// The size of an MD5 checksum in bytes.
|
||||
const Size = 16
|
||||
|
||||
// The blocksize of MD5 in bytes.
|
||||
const BlockSize = 64
|
||||
|
||||
const (
|
||||
_Chunk = 64
|
||||
_Init0 = 0x67452301
|
||||
|
@ -51,6 +54,8 @@ func New() hash.Hash {
|
|||
|
||||
func (d *digest) Size() int { return Size }
|
||||
|
||||
func (d *digest) BlockSize() int { return BlockSize }
|
||||
|
||||
func (d *digest) Write(p []byte) (nn int, err error) {
|
||||
nn = len(p)
|
||||
d.len += uint64(nn)
|
||||
|
|
|
@ -1,3 +1,7 @@
|
|||
// Copyright 2010 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
package ocsp
|
||||
|
||||
import (
|
||||
|
|
|
@ -53,3 +53,7 @@ func (cth *canonicalTextHash) Reset() {
|
|||
func (cth *canonicalTextHash) Size() int {
|
||||
return cth.h.Size()
|
||||
}
|
||||
|
||||
func (cth *canonicalTextHash) BlockSize() int {
|
||||
return cth.h.BlockSize()
|
||||
}
|
||||
|
|
|
@ -29,6 +29,10 @@ func (r recordingHash) Size() int {
|
|||
panic("shouldn't be called")
|
||||
}
|
||||
|
||||
func (r recordingHash) BlockSize() int {
|
||||
panic("shouldn't be called")
|
||||
}
|
||||
|
||||
func testCanonicalText(t *testing.T, input, expected string) {
|
||||
r := recordingHash{bytes.NewBuffer(nil)}
|
||||
c := NewCanonicalTextHash(r)
|
||||
|
|
|
@ -55,6 +55,8 @@ func New() hash.Hash {
|
|||
|
||||
func (d *digest) Size() int { return Size }
|
||||
|
||||
func (d *digest) BlockSize() int { return BlockSize }
|
||||
|
||||
func (d *digest) Write(p []byte) (nn int, err error) {
|
||||
nn = len(p)
|
||||
d.tc += uint64(nn)
|
||||
|
|
|
@ -17,6 +17,9 @@ func init() {
|
|||
// The size of a SHA1 checksum in bytes.
|
||||
const Size = 20
|
||||
|
||||
// The blocksize of SHA1 in bytes.
|
||||
const BlockSize = 64
|
||||
|
||||
const (
|
||||
_Chunk = 64
|
||||
_Init0 = 0x67452301
|
||||
|
@ -53,6 +56,8 @@ func New() hash.Hash {
|
|||
|
||||
func (d *digest) Size() int { return Size }
|
||||
|
||||
func (d *digest) BlockSize() int { return BlockSize }
|
||||
|
||||
func (d *digest) Write(p []byte) (nn int, err error) {
|
||||
nn = len(p)
|
||||
d.len += uint64(nn)
|
||||
|
|
|
@ -22,6 +22,9 @@ const Size = 32
|
|||
// The size of a SHA224 checksum in bytes.
|
||||
const Size224 = 28
|
||||
|
||||
// The blocksize of SHA256 and SHA224 in bytes.
|
||||
const BlockSize = 64
|
||||
|
||||
const (
|
||||
_Chunk = 64
|
||||
_Init0 = 0x6A09E667
|
||||
|
@ -97,6 +100,8 @@ func (d *digest) Size() int {
|
|||
return Size224
|
||||
}
|
||||
|
||||
func (d *digest) BlockSize() int { return BlockSize }
|
||||
|
||||
func (d *digest) Write(p []byte) (nn int, err error) {
|
||||
nn = len(p)
|
||||
d.len += uint64(nn)
|
||||
|
|
|
@ -22,6 +22,9 @@ const Size = 64
|
|||
// The size of a SHA384 checksum in bytes.
|
||||
const Size384 = 48
|
||||
|
||||
// The blocksize of SHA512 and SHA384 in bytes.
|
||||
const BlockSize = 128
|
||||
|
||||
const (
|
||||
_Chunk = 128
|
||||
_Init0 = 0x6a09e667f3bcc908
|
||||
|
@ -97,6 +100,8 @@ func (d *digest) Size() int {
|
|||
return Size384
|
||||
}
|
||||
|
||||
func (d *digest) BlockSize() int { return BlockSize }
|
||||
|
||||
func (d *digest) Write(p []byte) (nn int, err error) {
|
||||
nn = len(p)
|
||||
d.len += uint64(nn)
|
||||
|
|
|
@ -91,7 +91,7 @@ func macSHA1(version uint16, key []byte) macFunction {
|
|||
copy(mac.key, key)
|
||||
return mac
|
||||
}
|
||||
return tls10MAC{hmac.NewSHA1(key)}
|
||||
return tls10MAC{hmac.New(sha1.New, key)}
|
||||
}
|
||||
|
||||
type macFunction interface {
|
||||
|
|
|
@ -15,6 +15,7 @@ import (
|
|||
"io"
|
||||
"net"
|
||||
"sync"
|
||||
"time"
|
||||
)
|
||||
|
||||
// A Conn represents a secured connection.
|
||||
|
@ -86,24 +87,23 @@ func (c *Conn) RemoteAddr() net.Addr {
|
|||
return c.conn.RemoteAddr()
|
||||
}
|
||||
|
||||
// SetTimeout sets the read deadline associated with the connection.
|
||||
// SetDeadline sets the read deadline associated with the connection.
|
||||
// There is no write deadline.
|
||||
func (c *Conn) SetTimeout(nsec int64) error {
|
||||
return c.conn.SetTimeout(nsec)
|
||||
// A zero value for t means Read will not time out.
|
||||
func (c *Conn) SetDeadline(t time.Time) error {
|
||||
return c.conn.SetDeadline(t)
|
||||
}
|
||||
|
||||
// SetReadTimeout sets the time (in nanoseconds) that
|
||||
// Read will wait for data before returning a net.Error
|
||||
// with Timeout() == true.
|
||||
// Setting nsec == 0 (the default) disables the deadline.
|
||||
func (c *Conn) SetReadTimeout(nsec int64) error {
|
||||
return c.conn.SetReadTimeout(nsec)
|
||||
// SetReadDeadline sets the read deadline on the underlying connection.
|
||||
// A zero value for t means Read will not time out.
|
||||
func (c *Conn) SetReadDeadline(t time.Time) error {
|
||||
return c.conn.SetReadDeadline(t)
|
||||
}
|
||||
|
||||
// SetWriteTimeout exists to satisfy the net.Conn interface
|
||||
// SetWriteDeadline exists to satisfy the net.Conn interface
|
||||
// but is not implemented by TLS. It always returns an error.
|
||||
func (c *Conn) SetWriteTimeout(nsec int64) error {
|
||||
return errors.New("TLS does not support SetWriteTimeout")
|
||||
func (c *Conn) SetWriteDeadline(t time.Time) error {
|
||||
return errors.New("TLS does not support SetWriteDeadline")
|
||||
}
|
||||
|
||||
// A halfConn represents one direction of the record layer
|
||||
|
@ -744,7 +744,7 @@ func (c *Conn) Write(b []byte) (n int, err error) {
|
|||
}
|
||||
|
||||
// Read can be made to time out and return a net.Error with Timeout() == true
|
||||
// after a fixed time limit; see SetTimeout and SetReadTimeout.
|
||||
// after a fixed time limit; see SetDeadline and SetReadDeadline.
|
||||
func (c *Conn) Read(b []byte) (n int, err error) {
|
||||
if err = c.Handshake(); err != nil {
|
||||
return
|
||||
|
|
|
@ -105,7 +105,7 @@ func md5SHA1Hash(slices ...[]byte) []byte {
|
|||
// pre-master secret is then calculated using ECDH.
|
||||
type ecdheRSAKeyAgreement struct {
|
||||
privateKey []byte
|
||||
curve *elliptic.Curve
|
||||
curve elliptic.Curve
|
||||
x, y *big.Int
|
||||
}
|
||||
|
||||
|
@ -132,11 +132,11 @@ Curve:
|
|||
|
||||
var x, y *big.Int
|
||||
var err error
|
||||
ka.privateKey, x, y, err = ka.curve.GenerateKey(config.rand())
|
||||
ka.privateKey, x, y, err = elliptic.GenerateKey(ka.curve, config.rand())
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
ecdhePublic := ka.curve.Marshal(x, y)
|
||||
ecdhePublic := elliptic.Marshal(ka.curve, x, y)
|
||||
|
||||
// http://tools.ietf.org/html/rfc4492#section-5.4
|
||||
serverECDHParams := make([]byte, 1+2+1+len(ecdhePublic))
|
||||
|
@ -167,12 +167,12 @@ func (ka *ecdheRSAKeyAgreement) processClientKeyExchange(config *Config, ckx *cl
|
|||
if len(ckx.ciphertext) == 0 || int(ckx.ciphertext[0]) != len(ckx.ciphertext)-1 {
|
||||
return nil, errors.New("bad ClientKeyExchange")
|
||||
}
|
||||
x, y := ka.curve.Unmarshal(ckx.ciphertext[1:])
|
||||
x, y := elliptic.Unmarshal(ka.curve, ckx.ciphertext[1:])
|
||||
if x == nil {
|
||||
return nil, errors.New("bad ClientKeyExchange")
|
||||
}
|
||||
x, _ = ka.curve.ScalarMult(x, y, ka.privateKey)
|
||||
preMasterSecret := make([]byte, (ka.curve.BitSize+7)>>3)
|
||||
preMasterSecret := make([]byte, (ka.curve.Params().BitSize+7)>>3)
|
||||
xBytes := x.Bytes()
|
||||
copy(preMasterSecret[len(preMasterSecret)-len(xBytes):], xBytes)
|
||||
|
||||
|
@ -205,7 +205,7 @@ func (ka *ecdheRSAKeyAgreement) processServerKeyExchange(config *Config, clientH
|
|||
if publicLen+4 > len(skx.key) {
|
||||
return errServerKeyExchange
|
||||
}
|
||||
ka.x, ka.y = ka.curve.Unmarshal(skx.key[4 : 4+publicLen])
|
||||
ka.x, ka.y = elliptic.Unmarshal(ka.curve, skx.key[4:4+publicLen])
|
||||
if ka.x == nil {
|
||||
return errServerKeyExchange
|
||||
}
|
||||
|
@ -229,16 +229,16 @@ func (ka *ecdheRSAKeyAgreement) generateClientKeyExchange(config *Config, client
|
|||
if ka.curve == nil {
|
||||
return nil, nil, errors.New("missing ServerKeyExchange message")
|
||||
}
|
||||
priv, mx, my, err := ka.curve.GenerateKey(config.rand())
|
||||
priv, mx, my, err := elliptic.GenerateKey(ka.curve, config.rand())
|
||||
if err != nil {
|
||||
return nil, nil, err
|
||||
}
|
||||
x, _ := ka.curve.ScalarMult(ka.x, ka.y, priv)
|
||||
preMasterSecret := make([]byte, (ka.curve.BitSize+7)>>3)
|
||||
preMasterSecret := make([]byte, (ka.curve.Params().BitSize+7)>>3)
|
||||
xBytes := x.Bytes()
|
||||
copy(preMasterSecret[len(preMasterSecret)-len(xBytes):], xBytes)
|
||||
|
||||
serialized := ka.curve.Marshal(mx, my)
|
||||
serialized := elliptic.Marshal(ka.curve, mx, my)
|
||||
|
||||
ckx := new(clientKeyExchangeMsg)
|
||||
ckx.ciphertext = make([]byte, 1+len(serialized))
|
||||
|
|
|
@ -17,6 +17,7 @@ var certFiles = []string{
|
|||
"/etc/pki/tls/certs/ca-bundle.crt", // Fedora/RHEL
|
||||
"/etc/ssl/ca-bundle.pem", // OpenSUSE
|
||||
"/etc/ssl/cert.pem", // OpenBSD
|
||||
"/usr/local/share/certs/ca-root-nss.crt", // FreeBSD
|
||||
}
|
||||
|
||||
func initDefaultRoots() {
|
||||
|
|
|
@ -899,6 +899,14 @@ var (
|
|||
oidRSA = []int{1, 2, 840, 113549, 1, 1, 1}
|
||||
)
|
||||
|
||||
func subjectBytes(cert *Certificate) ([]byte, error) {
|
||||
if len(cert.RawSubject) > 0 {
|
||||
return cert.RawSubject, nil
|
||||
}
|
||||
|
||||
return asn1.Marshal(cert.Subject.ToRDNSequence())
|
||||
}
|
||||
|
||||
// CreateCertificate creates a new certificate based on a template. The
|
||||
// following members of template are used: SerialNumber, Subject, NotBefore,
|
||||
// NotAfter, KeyUsage, BasicConstraintsValid, IsCA, MaxPathLen, SubjectKeyId,
|
||||
|
@ -909,10 +917,23 @@ var (
|
|||
// signee and priv is the private key of the signer.
|
||||
//
|
||||
// The returned slice is the certificate in DER encoding.
|
||||
func CreateCertificate(rand io.Reader, template, parent *Certificate, pub *rsa.PublicKey, priv *rsa.PrivateKey) (cert []byte, err error) {
|
||||
//
|
||||
// The only supported key type is RSA (*rsa.PublicKey for pub, *rsa.PrivateKey
|
||||
// for priv).
|
||||
func CreateCertificate(rand io.Reader, template, parent *Certificate, pub interface{}, priv interface{}) (cert []byte, err error) {
|
||||
rsaPub, ok := pub.(*rsa.PublicKey)
|
||||
if !ok {
|
||||
return nil, errors.New("x509: non-RSA public keys not supported")
|
||||
}
|
||||
|
||||
rsaPriv, ok := priv.(*rsa.PrivateKey)
|
||||
if !ok {
|
||||
return nil, errors.New("x509: non-RSA private keys not supported")
|
||||
}
|
||||
|
||||
asn1PublicKey, err := asn1.Marshal(rsaPublicKey{
|
||||
N: pub.N,
|
||||
E: pub.E,
|
||||
N: rsaPub.N,
|
||||
E: rsaPub.E,
|
||||
})
|
||||
if err != nil {
|
||||
return
|
||||
|
@ -927,16 +948,12 @@ func CreateCertificate(rand io.Reader, template, parent *Certificate, pub *rsa.P
|
|||
return
|
||||
}
|
||||
|
||||
var asn1Issuer []byte
|
||||
if len(parent.RawSubject) > 0 {
|
||||
asn1Issuer = parent.RawSubject
|
||||
} else {
|
||||
if asn1Issuer, err = asn1.Marshal(parent.Subject.ToRDNSequence()); err != nil {
|
||||
asn1Issuer, err := subjectBytes(parent)
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
asn1Subject, err := asn1.Marshal(template.Subject.ToRDNSequence())
|
||||
asn1Subject, err := subjectBytes(template)
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
|
@ -964,7 +981,7 @@ func CreateCertificate(rand io.Reader, template, parent *Certificate, pub *rsa.P
|
|||
h.Write(tbsCertContents)
|
||||
digest := h.Sum(nil)
|
||||
|
||||
signature, err := rsa.SignPKCS1v15(rand, priv, crypto.SHA1, digest)
|
||||
signature, err := rsa.SignPKCS1v15(rand, rsaPriv, crypto.SHA1, digest)
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
|
@ -1011,7 +1028,13 @@ func ParseDERCRL(derBytes []byte) (certList *pkix.CertificateList, err error) {
|
|||
|
||||
// CreateCRL returns a DER encoded CRL, signed by this Certificate, that
|
||||
// contains the given list of revoked certificates.
|
||||
func (c *Certificate) CreateCRL(rand io.Reader, priv *rsa.PrivateKey, revokedCerts []pkix.RevokedCertificate, now, expiry time.Time) (crlBytes []byte, err error) {
|
||||
//
|
||||
// The only supported key type is RSA (*rsa.PrivateKey for priv).
|
||||
func (c *Certificate) CreateCRL(rand io.Reader, priv interface{}, revokedCerts []pkix.RevokedCertificate, now, expiry time.Time) (crlBytes []byte, err error) {
|
||||
rsaPriv, ok := priv.(*rsa.PrivateKey)
|
||||
if !ok {
|
||||
return nil, errors.New("x509: non-RSA private keys not supported")
|
||||
}
|
||||
tbsCertList := pkix.TBSCertificateList{
|
||||
Version: 2,
|
||||
Signature: pkix.AlgorithmIdentifier{
|
||||
|
@ -1032,7 +1055,7 @@ func (c *Certificate) CreateCRL(rand io.Reader, priv *rsa.PrivateKey, revokedCer
|
|||
h.Write(tbsCertListContents)
|
||||
digest := h.Sum(nil)
|
||||
|
||||
signature, err := rsa.SignPKCS1v15(rand, priv, crypto.SHA1, digest)
|
||||
signature, err := rsa.SignPKCS1v15(rand, rsaPriv, crypto.SHA1, digest)
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
|
|
|
@ -7,8 +7,8 @@
|
|||
package sql
|
||||
|
||||
import (
|
||||
"database/sql/driver"
|
||||
"errors"
|
||||
"exp/sql/driver"
|
||||
"fmt"
|
||||
"reflect"
|
||||
"strconv"
|
|
@ -5,6 +5,7 @@
|
|||
package sql
|
||||
|
||||
import (
|
||||
"database/sql/driver"
|
||||
"fmt"
|
||||
"reflect"
|
||||
"testing"
|
||||
|
@ -154,8 +155,8 @@ func TestConversions(t *testing.T) {
|
|||
}
|
||||
}
|
||||
|
||||
func TestNullableString(t *testing.T) {
|
||||
var ns NullableString
|
||||
func TestNullString(t *testing.T) {
|
||||
var ns NullString
|
||||
convertAssign(&ns, []byte("foo"))
|
||||
if !ns.Valid {
|
||||
t.Errorf("expecting not null")
|
||||
|
@ -171,3 +172,35 @@ func TestNullableString(t *testing.T) {
|
|||
t.Errorf("expecting blank on nil; got %q", ns.String)
|
||||
}
|
||||
}
|
||||
|
||||
type valueConverterTest struct {
|
||||
c driver.ValueConverter
|
||||
in, out interface{}
|
||||
err string
|
||||
}
|
||||
|
||||
var valueConverterTests = []valueConverterTest{
|
||||
{driver.DefaultParameterConverter, NullString{"hi", true}, "hi", ""},
|
||||
{driver.DefaultParameterConverter, NullString{"", false}, nil, ""},
|
||||
}
|
||||
|
||||
func TestValueConverters(t *testing.T) {
|
||||
for i, tt := range valueConverterTests {
|
||||
out, err := tt.c.ConvertValue(tt.in)
|
||||
goterr := ""
|
||||
if err != nil {
|
||||
goterr = err.Error()
|
||||
}
|
||||
if goterr != tt.err {
|
||||
t.Errorf("test %d: %s(%T(%v)) error = %q; want error = %q",
|
||||
i, tt.c, tt.in, tt.in, goterr, tt.err)
|
||||
}
|
||||
if tt.err != "" {
|
||||
continue
|
||||
}
|
||||
if !reflect.DeepEqual(out, tt.out) {
|
||||
t.Errorf("test %d: %s(%T(%v)) = %v (%T); want %v (%T)",
|
||||
i, tt.c, tt.in, tt.in, out, out, tt.out, tt.out)
|
||||
}
|
||||
}
|
||||
}
|
|
@ -32,6 +32,15 @@ type ValueConverter interface {
|
|||
ConvertValue(v interface{}) (interface{}, error)
|
||||
}
|
||||
|
||||
// SubsetValuer is the interface providing the SubsetValue method.
|
||||
//
|
||||
// Types implementing SubsetValuer interface are able to convert
|
||||
// themselves to one of the driver's allowed subset values.
|
||||
type SubsetValuer interface {
|
||||
// SubsetValue returns a driver parameter subset value.
|
||||
SubsetValue() (interface{}, error)
|
||||
}
|
||||
|
||||
// Bool is a ValueConverter that converts input values to bools.
|
||||
//
|
||||
// The conversion rules are:
|
||||
|
@ -136,6 +145,32 @@ func (stringType) ConvertValue(v interface{}) (interface{}, error) {
|
|||
return fmt.Sprintf("%v", v), nil
|
||||
}
|
||||
|
||||
// Null is a type that implements ValueConverter by allowing nil
|
||||
// values but otherwise delegating to another ValueConverter.
|
||||
type Null struct {
|
||||
Converter ValueConverter
|
||||
}
|
||||
|
||||
func (n Null) ConvertValue(v interface{}) (interface{}, error) {
|
||||
if v == nil {
|
||||
return nil, nil
|
||||
}
|
||||
return n.Converter.ConvertValue(v)
|
||||
}
|
||||
|
||||
// NotNull is a type that implements ValueConverter by disallowing nil
|
||||
// values but otherwise delegating to another ValueConverter.
|
||||
type NotNull struct {
|
||||
Converter ValueConverter
|
||||
}
|
||||
|
||||
func (n NotNull) ConvertValue(v interface{}) (interface{}, error) {
|
||||
if v == nil {
|
||||
return nil, fmt.Errorf("nil value not allowed")
|
||||
}
|
||||
return n.Converter.ConvertValue(v)
|
||||
}
|
||||
|
||||
// IsParameterSubsetType reports whether v is of a valid type for a
|
||||
// parameter. These types are:
|
||||
//
|
||||
|
@ -200,6 +235,17 @@ func (defaultConverter) ConvertValue(v interface{}) (interface{}, error) {
|
|||
return v, nil
|
||||
}
|
||||
|
||||
if svi, ok := v.(SubsetValuer); ok {
|
||||
sv, err := svi.SubsetValue()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if !IsParameterSubsetType(sv) {
|
||||
return nil, fmt.Errorf("non-subset type %T returned from SubsetValue", sv)
|
||||
}
|
||||
return sv, nil
|
||||
}
|
||||
|
||||
rv := reflect.ValueOf(v)
|
||||
switch rv.Kind() {
|
||||
case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
|
||||
|
@ -215,5 +261,5 @@ func (defaultConverter) ConvertValue(v interface{}) (interface{}, error) {
|
|||
case reflect.Float32, reflect.Float64:
|
||||
return rv.Float(), nil
|
||||
}
|
||||
return nil, fmt.Errorf("unsupported type %s", rv.Kind())
|
||||
return nil, fmt.Errorf("unsupported type %T, a %s", v, rv.Kind())
|
||||
}
|
|
@ -5,6 +5,7 @@
|
|||
package sql
|
||||
|
||||
import (
|
||||
"database/sql/driver"
|
||||
"errors"
|
||||
"fmt"
|
||||
"io"
|
||||
|
@ -13,8 +14,6 @@ import (
|
|||
"strings"
|
||||
"sync"
|
||||
"time"
|
||||
|
||||
"exp/sql/driver"
|
||||
)
|
||||
|
||||
var _ = log.Printf
|
||||
|
@ -589,7 +588,9 @@ func converterForType(typ string) driver.ValueConverter {
|
|||
case "int32":
|
||||
return driver.Int32
|
||||
case "string":
|
||||
return driver.String
|
||||
return driver.NotNull{driver.String}
|
||||
case "nullstring":
|
||||
return driver.Null{driver.String}
|
||||
case "datetime":
|
||||
return driver.DefaultParameterConverter
|
||||
}
|
|
@ -7,12 +7,11 @@
|
|||
package sql
|
||||
|
||||
import (
|
||||
"database/sql/driver"
|
||||
"errors"
|
||||
"fmt"
|
||||
"io"
|
||||
"sync"
|
||||
|
||||
"exp/sql/driver"
|
||||
)
|
||||
|
||||
var drivers = make(map[string]driver.Driver)
|
||||
|
@ -30,11 +29,16 @@ func Register(name string, driver driver.Driver) {
|
|||
drivers[name] = driver
|
||||
}
|
||||
|
||||
// NullableString represents a string that may be null.
|
||||
// NullableString implements the ScannerInto interface so
|
||||
// RawBytes is a byte slice that holds a reference to memory owned by
|
||||
// the database itself. After a Scan into a RawBytes, the slice is only
|
||||
// valid until the next call to Next, Scan, or Close.
|
||||
type RawBytes []byte
|
||||
|
||||
// NullString represents a string that may be null.
|
||||
// NullString implements the ScannerInto interface so
|
||||
// it can be used as a scan destination:
|
||||
//
|
||||
// var s NullableString
|
||||
// var s NullString
|
||||
// err := db.QueryRow("SELECT name FROM foo WHERE id=?", id).Scan(&s)
|
||||
// ...
|
||||
// if s.Valid {
|
||||
|
@ -44,19 +48,27 @@ func Register(name string, driver driver.Driver) {
|
|||
// }
|
||||
//
|
||||
// TODO(bradfitz): add other types.
|
||||
type NullableString struct {
|
||||
type NullString struct {
|
||||
String string
|
||||
Valid bool // Valid is true if String is not NULL
|
||||
}
|
||||
|
||||
// ScanInto implements the ScannerInto interface.
|
||||
func (ms *NullableString) ScanInto(value interface{}) error {
|
||||
func (ns *NullString) ScanInto(value interface{}) error {
|
||||
if value == nil {
|
||||
ms.String, ms.Valid = "", false
|
||||
ns.String, ns.Valid = "", false
|
||||
return nil
|
||||
}
|
||||
ms.Valid = true
|
||||
return convertAssign(&ms.String, value)
|
||||
ns.Valid = true
|
||||
return convertAssign(&ns.String, value)
|
||||
}
|
||||
|
||||
// SubsetValue implements the driver SubsetValuer interface.
|
||||
func (ns NullString) SubsetValue() (interface{}, error) {
|
||||
if !ns.Valid {
|
||||
return nil, nil
|
||||
}
|
||||
return ns.String, nil
|
||||
}
|
||||
|
||||
// ScannerInto is an interface used by Scan.
|
||||
|
@ -525,6 +537,27 @@ func (s *Stmt) Exec(args ...interface{}) (Result, error) {
|
|||
// Convert args to subset types.
|
||||
if cc, ok := si.(driver.ColumnConverter); ok {
|
||||
for n, arg := range args {
|
||||
// First, see if the value itself knows how to convert
|
||||
// itself to a driver type. For example, a NullString
|
||||
// struct changing into a string or nil.
|
||||
if svi, ok := arg.(driver.SubsetValuer); ok {
|
||||
sv, err := svi.SubsetValue()
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("sql: argument index %d from SubsetValue: %v", n, err)
|
||||
}
|
||||
if !driver.IsParameterSubsetType(sv) {
|
||||
return nil, fmt.Errorf("sql: argument index %d: non-subset type %T returned from SubsetValue", n, sv)
|
||||
}
|
||||
arg = sv
|
||||
}
|
||||
|
||||
// Second, ask the column to sanity check itself. For
|
||||
// example, drivers might use this to make sure that
|
||||
// an int64 values being inserted into a 16-bit
|
||||
// integer field is in range (before getting
|
||||
// truncated), or that a nil can't go into a NOT NULL
|
||||
// column before going across the network to get the
|
||||
// same error.
|
||||
args[n], err = cc.ColumnConverter(n).ConvertValue(arg)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("sql: converting Exec argument #%d's type: %v", n, err)
|
||||
|
@ -760,9 +793,13 @@ func (rs *Rows) Columns() ([]string, error) {
|
|||
}
|
||||
|
||||
// Scan copies the columns in the current row into the values pointed
|
||||
// at by dest. If dest contains pointers to []byte, the slices should
|
||||
// not be modified and should only be considered valid until the next
|
||||
// call to Next or Scan.
|
||||
// at by dest.
|
||||
//
|
||||
// If an argument has type *[]byte, Scan saves in that argument a copy
|
||||
// of the corresponding data. The copy is owned by the caller and can
|
||||
// be modified and held indefinitely. The copy can be avoided by using
|
||||
// an argument of type *RawBytes instead; see the documentation for
|
||||
// RawBytes for restrictions on its use.
|
||||
func (rs *Rows) Scan(dest ...interface{}) error {
|
||||
if rs.closed {
|
||||
return errors.New("sql: Rows closed")
|
||||
|
@ -782,6 +819,18 @@ func (rs *Rows) Scan(dest ...interface{}) error {
|
|||
return fmt.Errorf("sql: Scan error on column index %d: %v", i, err)
|
||||
}
|
||||
}
|
||||
for _, dp := range dest {
|
||||
b, ok := dp.(*[]byte)
|
||||
if !ok {
|
||||
continue
|
||||
}
|
||||
if _, ok = dp.(*RawBytes); ok {
|
||||
continue
|
||||
}
|
||||
clone := make([]byte, len(*b))
|
||||
copy(clone, *b)
|
||||
*b = clone
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
|
@ -838,6 +887,9 @@ func (r *Row) Scan(dest ...interface{}) error {
|
|||
// they were obtained from the network anyway) But for now we
|
||||
// don't care.
|
||||
for _, dp := range dest {
|
||||
if _, ok := dp.(*RawBytes); ok {
|
||||
return errors.New("sql: RawBytes isn't allowed on Row.Scan")
|
||||
}
|
||||
b, ok := dp.(*[]byte)
|
||||
if !ok {
|
||||
continue
|
|
@ -76,7 +76,7 @@ func TestQuery(t *testing.T) {
|
|||
{age: 3, name: "Chris"},
|
||||
}
|
||||
if !reflect.DeepEqual(got, want) {
|
||||
t.Logf(" got: %#v\nwant: %#v", got, want)
|
||||
t.Errorf("mismatch.\n got: %#v\nwant: %#v", got, want)
|
||||
}
|
||||
|
||||
// And verify that the final rows.Next() call, which hit EOF,
|
||||
|
@ -86,6 +86,43 @@ func TestQuery(t *testing.T) {
|
|||
}
|
||||
}
|
||||
|
||||
func TestByteOwnership(t *testing.T) {
|
||||
db := newTestDB(t, "people")
|
||||
defer closeDB(t, db)
|
||||
rows, err := db.Query("SELECT|people|name,photo|")
|
||||
if err != nil {
|
||||
t.Fatalf("Query: %v", err)
|
||||
}
|
||||
type row struct {
|
||||
name []byte
|
||||
photo RawBytes
|
||||
}
|
||||
got := []row{}
|
||||
for rows.Next() {
|
||||
var r row
|
||||
err = rows.Scan(&r.name, &r.photo)
|
||||
if err != nil {
|
||||
t.Fatalf("Scan: %v", err)
|
||||
}
|
||||
got = append(got, r)
|
||||
}
|
||||
corruptMemory := []byte("\xffPHOTO")
|
||||
want := []row{
|
||||
{name: []byte("Alice"), photo: corruptMemory},
|
||||
{name: []byte("Bob"), photo: corruptMemory},
|
||||
{name: []byte("Chris"), photo: corruptMemory},
|
||||
}
|
||||
if !reflect.DeepEqual(got, want) {
|
||||
t.Errorf("mismatch.\n got: %#v\nwant: %#v", got, want)
|
||||
}
|
||||
|
||||
var photo RawBytes
|
||||
err = db.QueryRow("SELECT|people|photo|name=?", "Alice").Scan(&photo)
|
||||
if err == nil {
|
||||
t.Error("want error scanning into RawBytes from QueryRow")
|
||||
}
|
||||
}
|
||||
|
||||
func TestRowsColumns(t *testing.T) {
|
||||
db := newTestDB(t, "people")
|
||||
defer closeDB(t, db)
|
||||
|
@ -300,6 +337,68 @@ func TestQueryRowClosingStmt(t *testing.T) {
|
|||
}
|
||||
fakeConn := db.freeConn[0].(*fakeConn)
|
||||
if made, closed := fakeConn.stmtsMade, fakeConn.stmtsClosed; made != closed {
|
||||
t.Logf("statement close mismatch: made %d, closed %d", made, closed)
|
||||
t.Errorf("statement close mismatch: made %d, closed %d", made, closed)
|
||||
}
|
||||
}
|
||||
|
||||
func TestNullStringParam(t *testing.T) {
|
||||
db := newTestDB(t, "")
|
||||
defer closeDB(t, db)
|
||||
exec(t, db, "CREATE|t|id=int32,name=string,favcolor=nullstring")
|
||||
|
||||
// Inserts with db.Exec:
|
||||
exec(t, db, "INSERT|t|id=?,name=?,favcolor=?", 1, "alice", NullString{"aqua", true})
|
||||
exec(t, db, "INSERT|t|id=?,name=?,favcolor=?", 2, "bob", NullString{"brown", false})
|
||||
|
||||
_, err := db.Exec("INSERT|t|id=?,name=?,favcolor=?", 999, nil, nil)
|
||||
if err == nil {
|
||||
// TODO: this test fails, but it's just because
|
||||
// fakeConn implements the optional Execer interface,
|
||||
// so arguably this is the correct behavior. But
|
||||
// maybe I should flesh out the fakeConn.Exec
|
||||
// implementation so this properly fails.
|
||||
// t.Errorf("expected error inserting nil name with Exec")
|
||||
}
|
||||
|
||||
// Inserts with a prepared statement:
|
||||
stmt, err := db.Prepare("INSERT|t|id=?,name=?,favcolor=?")
|
||||
if err != nil {
|
||||
t.Fatalf("prepare: %v", err)
|
||||
}
|
||||
if _, err := stmt.Exec(3, "chris", "chartreuse"); err != nil {
|
||||
t.Errorf("exec insert chris: %v", err)
|
||||
}
|
||||
if _, err := stmt.Exec(4, "dave", NullString{"darkred", true}); err != nil {
|
||||
t.Errorf("exec insert dave: %v", err)
|
||||
}
|
||||
if _, err := stmt.Exec(5, "eleanor", NullString{"eel", false}); err != nil {
|
||||
t.Errorf("exec insert dave: %v", err)
|
||||
}
|
||||
|
||||
// Can't put null name into non-nullstring column,
|
||||
if _, err := stmt.Exec(5, NullString{"", false}, nil); err == nil {
|
||||
t.Errorf("expected error inserting nil name with prepared statement Exec")
|
||||
}
|
||||
|
||||
type nameColor struct {
|
||||
name string
|
||||
favColor NullString
|
||||
}
|
||||
|
||||
wantMap := map[int]nameColor{
|
||||
1: nameColor{"alice", NullString{"aqua", true}},
|
||||
2: nameColor{"bob", NullString{"", false}},
|
||||
3: nameColor{"chris", NullString{"chartreuse", true}},
|
||||
4: nameColor{"dave", NullString{"darkred", true}},
|
||||
5: nameColor{"eleanor", NullString{"", false}},
|
||||
}
|
||||
for id, want := range wantMap {
|
||||
var got nameColor
|
||||
if err := db.QueryRow("SELECT|t|name,favcolor|id=?", id).Scan(&got.name, &got.favColor); err != nil {
|
||||
t.Errorf("id=%d Scan: %v", id, err)
|
||||
}
|
||||
if got != want {
|
||||
t.Errorf("id=%d got %#v, want %#v", id, got, want)
|
||||
}
|
||||
}
|
||||
}
|
|
@ -1,3 +1,7 @@
|
|||
// Copyright 2009 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
package gob
|
||||
|
||||
// This file is not normally included in the gob package. Used only for debugging the package itself.
|
||||
|
|
|
@ -1,3 +1,7 @@
|
|||
// Copyright 2009 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
package main
|
||||
|
||||
// Need to compile package gob with debug.go to build this program.
|
||||
|
|
|
@ -39,6 +39,8 @@ import (
|
|||
//
|
||||
// String values encode as JSON strings, with each invalid UTF-8 sequence
|
||||
// replaced by the encoding of the Unicode replacement character U+FFFD.
|
||||
// The angle brackets "<" and ">" are escaped to "\u003c" and "\u003e"
|
||||
// to keep some browsers from misinterpreting JSON output as HTML.
|
||||
//
|
||||
// Array and slice values encode as JSON arrays, except that
|
||||
// []byte encodes as a base64-encoded string.
|
||||
|
@ -77,7 +79,8 @@ import (
|
|||
// Int64String int64 `json:",string"`
|
||||
//
|
||||
// The key name will be used if it's a non-empty string consisting of
|
||||
// only Unicode letters, digits, dollar signs, hyphens, and underscores.
|
||||
// only Unicode letters, digits, dollar signs, percent signs, hyphens,
|
||||
// underscores and slashes.
|
||||
//
|
||||
// Map values encode as JSON objects.
|
||||
// The map's key type must be string; the object keys are used directly
|
||||
|
@ -417,10 +420,15 @@ func isValidTag(s string) bool {
|
|||
return false
|
||||
}
|
||||
for _, c := range s {
|
||||
if c != '$' && c != '-' && c != '_' && !unicode.IsLetter(c) && !unicode.IsDigit(c) {
|
||||
switch c {
|
||||
case '$', '-', '_', '/', '%':
|
||||
// Acceptable
|
||||
default:
|
||||
if !unicode.IsLetter(c) && !unicode.IsDigit(c) {
|
||||
return false
|
||||
}
|
||||
}
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
|
|
|
@ -9,7 +9,7 @@ import (
|
|||
)
|
||||
|
||||
type basicLatin2xTag struct {
|
||||
V string `json:"$-"`
|
||||
V string `json:"$%-/"`
|
||||
}
|
||||
|
||||
type basicLatin3xTag struct {
|
||||
|
@ -36,6 +36,10 @@ type miscPlaneTag struct {
|
|||
V string `json:"色は匂へど"`
|
||||
}
|
||||
|
||||
type percentSlashTag struct {
|
||||
V string `json:"text/html%"` // http://golang.org/issue/2718
|
||||
}
|
||||
|
||||
type emptyTag struct {
|
||||
W string
|
||||
}
|
||||
|
@ -49,7 +53,7 @@ type badFormatTag struct {
|
|||
}
|
||||
|
||||
type badCodeTag struct {
|
||||
Z string `json:" !\"#%&'()*+,./"`
|
||||
Z string `json:" !\"#&'()*+,."`
|
||||
}
|
||||
|
||||
var structTagObjectKeyTests = []struct {
|
||||
|
@ -57,7 +61,7 @@ var structTagObjectKeyTests = []struct {
|
|||
value string
|
||||
key string
|
||||
}{
|
||||
{basicLatin2xTag{"2x"}, "2x", "$-"},
|
||||
{basicLatin2xTag{"2x"}, "2x", "$%-/"},
|
||||
{basicLatin3xTag{"3x"}, "3x", "0123456789"},
|
||||
{basicLatin4xTag{"4x"}, "4x", "ABCDEFGHIJKLMO"},
|
||||
{basicLatin5xTag{"5x"}, "5x", "PQRSTUVWXYZ_"},
|
||||
|
@ -68,6 +72,7 @@ var structTagObjectKeyTests = []struct {
|
|||
{misnamedTag{"Animal Kingdom"}, "Animal Kingdom", "X"},
|
||||
{badFormatTag{"Orfevre"}, "Orfevre", "Y"},
|
||||
{badCodeTag{"Reliable Man"}, "Reliable Man", "Z"},
|
||||
{percentSlashTag{"brut"}, "brut", "text/html%"},
|
||||
}
|
||||
|
||||
func TestStructTagObjectKey(t *testing.T) {
|
||||
|
@ -88,7 +93,7 @@ func TestStructTagObjectKey(t *testing.T) {
|
|||
t.Fatalf("Unexpected value: %#q, want %v", s, tt.value)
|
||||
}
|
||||
default:
|
||||
t.Fatalf("Unexpected key: %#q", i)
|
||||
t.Fatalf("Unexpected key: %#q, from %#q", i, b)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -150,6 +150,10 @@ type XMLNameWithoutTag struct {
|
|||
Value string ",chardata"
|
||||
}
|
||||
|
||||
type NameInField struct {
|
||||
Foo Name `xml:"ns foo"`
|
||||
}
|
||||
|
||||
type AttrTest struct {
|
||||
Int int `xml:",attr"`
|
||||
Lower int `xml:"int,attr"`
|
||||
|
@ -483,6 +487,19 @@ var marshalTests = []struct {
|
|||
UnmarshalOnly: true,
|
||||
},
|
||||
|
||||
// xml.Name works in a plain field as well.
|
||||
{
|
||||
Value: &NameInField{Name{Space: "ns", Local: "foo"}},
|
||||
ExpectXML: `<NameInField><foo xmlns="ns"></foo></NameInField>`,
|
||||
},
|
||||
|
||||
// Marshaling zero xml.Name uses the tag or field name.
|
||||
{
|
||||
Value: &NameInField{},
|
||||
ExpectXML: `<NameInField><foo xmlns="ns"></foo></NameInField>`,
|
||||
MarshalOnly: true,
|
||||
},
|
||||
|
||||
// Test attributes
|
||||
{
|
||||
Value: &AttrTest{
|
||||
|
|
|
@ -271,6 +271,10 @@ func (p *Parser) unmarshal(val reflect.Value, start *StartElement) error {
|
|||
case reflect.Struct:
|
||||
sv = v
|
||||
typ := sv.Type()
|
||||
if typ == nameType {
|
||||
v.Set(reflect.ValueOf(start.Name))
|
||||
break
|
||||
}
|
||||
tinfo, err = getTypeInfo(typ)
|
||||
if err != nil {
|
||||
return err
|
||||
|
|
|
@ -46,6 +46,8 @@ const (
|
|||
var tinfoMap = make(map[reflect.Type]*typeInfo)
|
||||
var tinfoLock sync.RWMutex
|
||||
|
||||
var nameType = reflect.TypeOf(Name{})
|
||||
|
||||
// getTypeInfo returns the typeInfo structure with details necessary
|
||||
// for marshalling and unmarshalling typ.
|
||||
func getTypeInfo(typ reflect.Type) (*typeInfo, error) {
|
||||
|
@ -56,7 +58,7 @@ func getTypeInfo(typ reflect.Type) (*typeInfo, error) {
|
|||
return tinfo, nil
|
||||
}
|
||||
tinfo = &typeInfo{}
|
||||
if typ.Kind() == reflect.Struct {
|
||||
if typ.Kind() == reflect.Struct && typ != nameType {
|
||||
n := typ.NumField()
|
||||
for i := 0; i < n; i++ {
|
||||
f := typ.Field(i)
|
||||
|
|
4
libgo/go/exp/gotype/testdata/test1.go
vendored
4
libgo/go/exp/gotype/testdata/test1.go
vendored
|
@ -1,3 +1,7 @@
|
|||
// Copyright 2011 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
package p
|
||||
|
||||
func _() {
|
||||
|
|
|
@ -83,14 +83,15 @@ func TestInotifyClose(t *testing.T) {
|
|||
watcher, _ := NewWatcher()
|
||||
watcher.Close()
|
||||
|
||||
done := false
|
||||
done := make(chan bool)
|
||||
go func() {
|
||||
watcher.Close()
|
||||
done = true
|
||||
done <- true
|
||||
}()
|
||||
|
||||
time.Sleep(50 * time.Millisecond)
|
||||
if !done {
|
||||
select {
|
||||
case <-done:
|
||||
case <-time.After(50 * time.Millisecond):
|
||||
t.Fatal("double Close() test failed: second Close() call didn't return")
|
||||
}
|
||||
|
||||
|
|
|
@ -1,3 +1,7 @@
|
|||
// Copyright 2011 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
package norm
|
||||
|
||||
import (
|
||||
|
|
|
@ -11,7 +11,6 @@ import (
|
|||
"net"
|
||||
"net/url"
|
||||
"os"
|
||||
"strings"
|
||||
)
|
||||
|
||||
// A Dialer is a means to establish a connection.
|
||||
|
@ -70,14 +69,11 @@ func RegisterDialerType(scheme string, f func(*url.URL, Dialer) (Dialer, error))
|
|||
// Dialer for it to make network requests.
|
||||
func FromURL(u *url.URL, forward Dialer) (Dialer, error) {
|
||||
var auth *Auth
|
||||
if len(u.RawUserinfo) > 0 {
|
||||
if u.User != nil {
|
||||
auth = new(Auth)
|
||||
parts := strings.SplitN(u.RawUserinfo, ":", 1)
|
||||
if len(parts) == 1 {
|
||||
auth.User = parts[0]
|
||||
} else if len(parts) >= 2 {
|
||||
auth.User = parts[0]
|
||||
auth.Password = parts[1]
|
||||
auth.User = u.User.Username()
|
||||
if p, ok := u.User.Password(); ok {
|
||||
auth.Password = p
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -306,7 +306,6 @@ type clientChan struct {
|
|||
stdout *chanReader // receives the payload of channelData messages
|
||||
stderr *chanReader // receives the payload of channelExtendedData messages
|
||||
msg chan interface{} // incoming messages
|
||||
|
||||
theyClosed bool // indicates the close msg has been received from the remote side
|
||||
weClosed bool // incidates the close msg has been sent from our side
|
||||
}
|
||||
|
|
|
@ -484,6 +484,26 @@ func intLength(n *big.Int) int {
|
|||
return length
|
||||
}
|
||||
|
||||
func marshalUint32(to []byte, n uint32) []byte {
|
||||
to[0] = byte(n >> 24)
|
||||
to[1] = byte(n >> 16)
|
||||
to[2] = byte(n >> 8)
|
||||
to[3] = byte(n)
|
||||
return to[4:]
|
||||
}
|
||||
|
||||
func marshalUint64(to []byte, n uint64) []byte {
|
||||
to[0] = byte(n >> 56)
|
||||
to[1] = byte(n >> 48)
|
||||
to[2] = byte(n >> 40)
|
||||
to[3] = byte(n >> 32)
|
||||
to[4] = byte(n >> 24)
|
||||
to[5] = byte(n >> 16)
|
||||
to[6] = byte(n >> 8)
|
||||
to[7] = byte(n)
|
||||
return to[8:]
|
||||
}
|
||||
|
||||
func marshalInt(to []byte, n *big.Int) []byte {
|
||||
lengthBytes := to
|
||||
to = to[4:]
|
||||
|
|
|
@ -70,7 +70,7 @@ type Session struct {
|
|||
|
||||
started bool // true once Start, Run or Shell is invoked.
|
||||
copyFuncs []func() error
|
||||
errch chan error // one send per copyFunc
|
||||
errors chan error // one send per copyFunc
|
||||
|
||||
// true if pipe method is active
|
||||
stdinpipe, stdoutpipe, stderrpipe bool
|
||||
|
@ -244,10 +244,10 @@ func (s *Session) start() error {
|
|||
setupFd(s)
|
||||
}
|
||||
|
||||
s.errch = make(chan error, len(s.copyFuncs))
|
||||
s.errors = make(chan error, len(s.copyFuncs))
|
||||
for _, fn := range s.copyFuncs {
|
||||
go func(fn func() error) {
|
||||
s.errch <- fn()
|
||||
s.errors <- fn()
|
||||
}(fn)
|
||||
}
|
||||
return nil
|
||||
|
@ -270,7 +270,7 @@ func (s *Session) Wait() error {
|
|||
|
||||
var copyError error
|
||||
for _ = range s.copyFuncs {
|
||||
if err := <-s.errch; err != nil && copyError == nil {
|
||||
if err := <-s.errors; err != nil && copyError == nil {
|
||||
copyError = err
|
||||
}
|
||||
}
|
||||
|
|
|
@ -9,6 +9,7 @@ import (
|
|||
"fmt"
|
||||
"io"
|
||||
"net"
|
||||
"time"
|
||||
)
|
||||
|
||||
// Dial initiates a connection to the addr from the remote host.
|
||||
|
@ -107,27 +108,25 @@ func (t *tcpchanconn) RemoteAddr() net.Addr {
|
|||
return t.raddr
|
||||
}
|
||||
|
||||
// SetTimeout sets the read and write deadlines associated
|
||||
// SetDeadline sets the read and write deadlines associated
|
||||
// with the connection.
|
||||
func (t *tcpchanconn) SetTimeout(nsec int64) error {
|
||||
if err := t.SetReadTimeout(nsec); err != nil {
|
||||
func (t *tcpchanconn) SetDeadline(deadline time.Time) error {
|
||||
if err := t.SetReadDeadline(deadline); err != nil {
|
||||
return err
|
||||
}
|
||||
return t.SetWriteTimeout(nsec)
|
||||
return t.SetWriteDeadline(deadline)
|
||||
}
|
||||
|
||||
// SetReadTimeout sets the time (in nanoseconds) that
|
||||
// Read will wait for data before returning an error with Timeout() == true.
|
||||
// Setting nsec == 0 (the default) disables the deadline.
|
||||
func (t *tcpchanconn) SetReadTimeout(nsec int64) error {
|
||||
return errors.New("ssh: tcpchan: timeout not supported")
|
||||
// SetReadDeadline sets the read deadline.
|
||||
// A zero value for t means Read will not time out.
|
||||
// After the deadline, the error from Read will implement net.Error
|
||||
// with Timeout() == true.
|
||||
func (t *tcpchanconn) SetReadDeadline(deadline time.Time) error {
|
||||
return errors.New("ssh: tcpchan: deadline not supported")
|
||||
}
|
||||
|
||||
// SetWriteTimeout sets the time (in nanoseconds) that
|
||||
// Write will wait to send its data before returning an error with Timeout() == true.
|
||||
// Setting nsec == 0 (the default) disables the deadline.
|
||||
// Even if write times out, it may return n > 0, indicating that
|
||||
// some of the data was successfully written.
|
||||
func (t *tcpchanconn) SetWriteTimeout(nsec int64) error {
|
||||
return errors.New("ssh: tcpchan: timeout not supported")
|
||||
// SetWriteDeadline exists to satisfy the net.Conn interface
|
||||
// but is not implemented by this type. It always returns an error.
|
||||
func (t *tcpchanconn) SetWriteDeadline(deadline time.Time) error {
|
||||
return errors.New("ssh: tcpchan: deadline not supported")
|
||||
}
|
||||
|
|
|
@ -9,6 +9,7 @@ import (
|
|||
"crypto"
|
||||
"crypto/cipher"
|
||||
"crypto/hmac"
|
||||
"crypto/sha1"
|
||||
"crypto/subtle"
|
||||
"errors"
|
||||
"hash"
|
||||
|
@ -266,7 +267,7 @@ func (c *common) setupKeys(d direction, K, H, sessionId []byte, hashFunc crypto.
|
|||
generateKeyMaterial(key, d.keyTag, K, H, sessionId, h)
|
||||
generateKeyMaterial(macKey, d.macKeyTag, K, H, sessionId, h)
|
||||
|
||||
c.mac = truncatingMAC{12, hmac.NewSHA1(macKey)}
|
||||
c.mac = truncatingMAC{12, hmac.New(sha1.New, macKey)}
|
||||
|
||||
cipher, err := cipherMode.createCipher(key, iv)
|
||||
if err != nil {
|
||||
|
@ -328,6 +329,8 @@ func (t truncatingMAC) Size() int {
|
|||
return t.length
|
||||
}
|
||||
|
||||
func (t truncatingMAC) BlockSize() int { return t.hmac.BlockSize() }
|
||||
|
||||
// maxVersionStringBytes is the maximum number of bytes that we'll accept as a
|
||||
// version string. In the event that the client is talking a different protocol
|
||||
// we need to set a limit otherwise we will keep using more and more memory
|
||||
|
@ -337,7 +340,7 @@ const maxVersionStringBytes = 1024
|
|||
// Read version string as specified by RFC 4253, section 4.2.
|
||||
func readVersion(r io.Reader) ([]byte, error) {
|
||||
versionString := make([]byte, 0, 64)
|
||||
var ok, seenCR bool
|
||||
var ok bool
|
||||
var buf [1]byte
|
||||
forEachByte:
|
||||
for len(versionString) < maxVersionStringBytes {
|
||||
|
@ -345,27 +348,22 @@ forEachByte:
|
|||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
b := buf[0]
|
||||
|
||||
if !seenCR {
|
||||
if b == '\r' {
|
||||
seenCR = true
|
||||
}
|
||||
} else {
|
||||
if b == '\n' {
|
||||
// The RFC says that the version should be terminated with \r\n
|
||||
// but several SSH servers actually only send a \n.
|
||||
if buf[0] == '\n' {
|
||||
ok = true
|
||||
break forEachByte
|
||||
} else {
|
||||
seenCR = false
|
||||
}
|
||||
}
|
||||
versionString = append(versionString, b)
|
||||
versionString = append(versionString, buf[0])
|
||||
}
|
||||
|
||||
if !ok {
|
||||
return nil, errors.New("failed to read version string")
|
||||
return nil, errors.New("ssh: failed to read version string")
|
||||
}
|
||||
|
||||
// We need to remove the CR from versionString
|
||||
return versionString[:len(versionString)-1], nil
|
||||
// There might be a '\r' on the end which we should remove.
|
||||
if len(versionString) > 0 && versionString[len(versionString)-1] == '\r' {
|
||||
versionString = versionString[:len(versionString)-1]
|
||||
}
|
||||
return versionString, nil
|
||||
}
|
||||
|
|
|
@ -11,7 +11,7 @@ import (
|
|||
)
|
||||
|
||||
func TestReadVersion(t *testing.T) {
|
||||
buf := []byte(serverVersion)
|
||||
buf := serverVersion
|
||||
result, err := readVersion(bufio.NewReader(bytes.NewBuffer(buf)))
|
||||
if err != nil {
|
||||
t.Errorf("readVersion didn't read version correctly: %s", err)
|
||||
|
@ -21,6 +21,20 @@ func TestReadVersion(t *testing.T) {
|
|||
}
|
||||
}
|
||||
|
||||
func TestReadVersionWithJustLF(t *testing.T) {
|
||||
var buf []byte
|
||||
buf = append(buf, serverVersion...)
|
||||
buf = buf[:len(buf)-1]
|
||||
buf[len(buf)-1] = '\n'
|
||||
result, err := readVersion(bufio.NewReader(bytes.NewBuffer(buf)))
|
||||
if err != nil {
|
||||
t.Error("readVersion failed to handle just a \n")
|
||||
}
|
||||
if !bytes.Equal(buf[:len(buf)-1], result) {
|
||||
t.Errorf("version read did not match expected: got %x, want %x", result, buf[:len(buf)-1])
|
||||
}
|
||||
}
|
||||
|
||||
func TestReadVersionTooLong(t *testing.T) {
|
||||
buf := make([]byte, maxVersionStringBytes+1)
|
||||
if _, err := readVersion(bufio.NewReader(bytes.NewBuffer(buf))); err == nil {
|
||||
|
@ -29,7 +43,7 @@ func TestReadVersionTooLong(t *testing.T) {
|
|||
}
|
||||
|
||||
func TestReadVersionWithoutCRLF(t *testing.T) {
|
||||
buf := []byte(serverVersion)
|
||||
buf := serverVersion
|
||||
buf = buf[:len(buf)-1]
|
||||
if _, err := readVersion(bufio.NewReader(bytes.NewBuffer(buf))); err == nil {
|
||||
t.Error("readVersion did not notice \\n was missing")
|
||||
|
|
|
@ -47,17 +47,17 @@ var tests = []struct {
|
|||
|
||||
var fset = token.NewFileSet()
|
||||
|
||||
// TODO(gri) This functionality should be in token.Fileset.
|
||||
func getFile(filename string) *token.File {
|
||||
for f := range fset.Files() {
|
||||
func getFile(filename string) (file *token.File) {
|
||||
fset.Iterate(func(f *token.File) bool {
|
||||
if f.Name() == filename {
|
||||
return f
|
||||
file = f
|
||||
return false // end iteration
|
||||
}
|
||||
}
|
||||
return nil
|
||||
return true
|
||||
})
|
||||
return file
|
||||
}
|
||||
|
||||
// TODO(gri) This functionality should be in token.Fileset.
|
||||
func getPos(filename string, offset int) token.Pos {
|
||||
if f := getFile(filename); f != nil {
|
||||
return f.Pos(offset)
|
||||
|
@ -65,8 +65,6 @@ func getPos(filename string, offset int) token.Pos {
|
|||
return token.NoPos
|
||||
}
|
||||
|
||||
// TODO(gri) Need to revisit parser interface. We should be able to use parser.ParseFiles
|
||||
// or a similar function instead.
|
||||
func parseFiles(t *testing.T, testname string, filenames []string) (map[string]*ast.File, error) {
|
||||
files := make(map[string]*ast.File)
|
||||
var errors scanner.ErrorList
|
||||
|
@ -145,8 +143,6 @@ func eliminate(t *testing.T, expected map[token.Pos]string, errors error) {
|
|||
for _, error := range errors.(scanner.ErrorList) {
|
||||
// error.Pos is a token.Position, but we want
|
||||
// a token.Pos so we can do a map lookup
|
||||
// TODO(gri) Need to move scanner.Errors over
|
||||
// to use token.Pos and file set info.
|
||||
pos := getPos(error.Pos.Filename, error.Pos.Offset)
|
||||
if msg, found := expected[pos]; found {
|
||||
// we expect a message at pos; check if it matches
|
||||
|
|
|
@ -460,29 +460,32 @@ func (p *gcParser) parseSignature() *Func {
|
|||
return &Func{Params: params, Results: results, IsVariadic: isVariadic}
|
||||
}
|
||||
|
||||
// MethodSpec = ( identifier | ExportedName ) Signature .
|
||||
// MethodOrEmbedSpec = Name [ Signature ] .
|
||||
//
|
||||
func (p *gcParser) parseMethodSpec() *ast.Object {
|
||||
if p.tok == scanner.Ident {
|
||||
p.expect(scanner.Ident)
|
||||
} else {
|
||||
p.parseExportedName()
|
||||
}
|
||||
func (p *gcParser) parseMethodOrEmbedSpec() *ast.Object {
|
||||
p.parseName()
|
||||
if p.tok == '(' {
|
||||
p.parseSignature()
|
||||
|
||||
// TODO(gri) compute method object
|
||||
return ast.NewObj(ast.Fun, "_")
|
||||
}
|
||||
// TODO lookup name and return that type
|
||||
return ast.NewObj(ast.Typ, "_")
|
||||
}
|
||||
|
||||
// InterfaceType = "interface" "{" [ MethodList ] "}" .
|
||||
// MethodList = MethodSpec { ";" MethodSpec } .
|
||||
// InterfaceType = "interface" "{" [ MethodOrEmbedList ] "}" .
|
||||
// MethodOrEmbedList = MethodOrEmbedSpec { ";" MethodOrEmbedSpec } .
|
||||
//
|
||||
func (p *gcParser) parseInterfaceType() Type {
|
||||
var methods ObjList
|
||||
|
||||
parseMethod := func() {
|
||||
meth := p.parseMethodSpec()
|
||||
methods = append(methods, meth)
|
||||
switch m := p.parseMethodOrEmbedSpec(); m.Kind {
|
||||
case ast.Typ:
|
||||
// TODO expand embedded methods
|
||||
case ast.Fun:
|
||||
methods = append(methods, m)
|
||||
}
|
||||
}
|
||||
|
||||
p.expectKeyword("interface")
|
||||
|
|
|
@ -2,9 +2,13 @@
|
|||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
package utf8
|
||||
// Package utf8string provides an efficient way to index strings by rune rather than by byte.
|
||||
package utf8string
|
||||
|
||||
import "errors"
|
||||
import (
|
||||
"errors"
|
||||
"unicode/utf8"
|
||||
)
|
||||
|
||||
// String wraps a regular string with a small structure that provides more
|
||||
// efficient indexing by code point index, as opposed to byte index.
|
||||
|
@ -37,10 +41,10 @@ func (s *String) Init(contents string) *String {
|
|||
s.bytePos = 0
|
||||
s.runePos = 0
|
||||
for i := 0; i < len(contents); i++ {
|
||||
if contents[i] >= RuneSelf {
|
||||
if contents[i] >= utf8.RuneSelf {
|
||||
// Not ASCII.
|
||||
s.numRunes = RuneCountInString(contents)
|
||||
_, s.width = DecodeRuneInString(contents)
|
||||
s.numRunes = utf8.RuneCountInString(contents)
|
||||
_, s.width = utf8.DecodeRuneInString(contents)
|
||||
s.nonASCII = i
|
||||
return s
|
||||
}
|
||||
|
@ -121,7 +125,7 @@ func (s *String) At(i int) rune {
|
|||
switch {
|
||||
|
||||
case i == s.runePos-1: // backing up one rune
|
||||
r, s.width = DecodeLastRuneInString(s.str[0:s.bytePos])
|
||||
r, s.width = utf8.DecodeLastRuneInString(s.str[0:s.bytePos])
|
||||
s.runePos = i
|
||||
s.bytePos -= s.width
|
||||
return r
|
||||
|
@ -130,16 +134,16 @@ func (s *String) At(i int) rune {
|
|||
s.bytePos += s.width
|
||||
fallthrough
|
||||
case i == s.runePos:
|
||||
r, s.width = DecodeRuneInString(s.str[s.bytePos:])
|
||||
r, s.width = utf8.DecodeRuneInString(s.str[s.bytePos:])
|
||||
return r
|
||||
case i == 0: // start of string
|
||||
r, s.width = DecodeRuneInString(s.str)
|
||||
r, s.width = utf8.DecodeRuneInString(s.str)
|
||||
s.runePos = 0
|
||||
s.bytePos = 0
|
||||
return r
|
||||
|
||||
case i == s.numRunes-1: // last rune in string
|
||||
r, s.width = DecodeLastRuneInString(s.str)
|
||||
r, s.width = utf8.DecodeLastRuneInString(s.str)
|
||||
s.runePos = i
|
||||
s.bytePos = len(s.str) - s.width
|
||||
return r
|
||||
|
@ -175,7 +179,7 @@ func (s *String) At(i int) rune {
|
|||
if forward {
|
||||
// TODO: Is it much faster to use a range loop for this scan?
|
||||
for {
|
||||
r, s.width = DecodeRuneInString(s.str[s.bytePos:])
|
||||
r, s.width = utf8.DecodeRuneInString(s.str[s.bytePos:])
|
||||
if s.runePos == i {
|
||||
break
|
||||
}
|
||||
|
@ -184,7 +188,7 @@ func (s *String) At(i int) rune {
|
|||
}
|
||||
} else {
|
||||
for {
|
||||
r, s.width = DecodeLastRuneInString(s.str[0:s.bytePos])
|
||||
r, s.width = utf8.DecodeLastRuneInString(s.str[0:s.bytePos])
|
||||
s.runePos--
|
||||
s.bytePos -= s.width
|
||||
if s.runePos == i {
|
|
@ -2,14 +2,23 @@
|
|||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
package utf8_test
|
||||
package utf8string
|
||||
|
||||
import (
|
||||
"math/rand"
|
||||
"testing"
|
||||
. "unicode/utf8"
|
||||
"unicode/utf8"
|
||||
)
|
||||
|
||||
var testStrings = []string{
|
||||
"",
|
||||
"abcd",
|
||||
"☺☻☹",
|
||||
"日a本b語ç日ð本Ê語þ日¥本¼語i日©",
|
||||
"日a本b語ç日ð本Ê語þ日¥本¼語i日©日a本b語ç日ð本Ê語þ日¥本¼語i日©日a本b語ç日ð本Ê語þ日¥本¼語i日©",
|
||||
"\x80\x80\x80\x80",
|
||||
}
|
||||
|
||||
func TestScanForwards(t *testing.T) {
|
||||
for _, s := range testStrings {
|
||||
runes := []rune(s)
|
||||
|
@ -106,7 +115,7 @@ func TestLimitSliceAccess(t *testing.T) {
|
|||
if str.Slice(0, 0) != "" {
|
||||
t.Error("failure with empty slice at beginning")
|
||||
}
|
||||
nr := RuneCountInString(s)
|
||||
nr := utf8.RuneCountInString(s)
|
||||
if str.Slice(nr, nr) != "" {
|
||||
t.Error("failure with empty slice at end")
|
||||
}
|
|
@ -508,27 +508,28 @@ func BenchmarkSprintfFloat(b *testing.B) {
|
|||
|
||||
var mallocBuf bytes.Buffer
|
||||
|
||||
// gccgo numbers are different because gccgo does not have escape
|
||||
// analysis yet.
|
||||
var mallocTest = []struct {
|
||||
count int
|
||||
desc string
|
||||
fn func()
|
||||
}{
|
||||
{0, `Sprintf("")`, func() { Sprintf("") }},
|
||||
{1, `Sprintf("xxx")`, func() { Sprintf("xxx") }},
|
||||
{1, `Sprintf("%x")`, func() { Sprintf("%x", 7) }},
|
||||
{2, `Sprintf("%s")`, func() { Sprintf("%s", "hello") }},
|
||||
{1, `Sprintf("%x %x")`, func() { Sprintf("%x %x", 7, 112) }},
|
||||
{1, `Sprintf("%g")`, func() { Sprintf("%g", 3.14159) }},
|
||||
{0, `Fprintf(buf, "%x %x %x")`, func() { mallocBuf.Reset(); Fprintf(&mallocBuf, "%x %x %x", 7, 8, 9) }},
|
||||
{1, `Fprintf(buf, "%s")`, func() { mallocBuf.Reset(); Fprintf(&mallocBuf, "%s", "hello") }},
|
||||
{5, `Sprintf("")`, func() { Sprintf("") }},
|
||||
{5, `Sprintf("xxx")`, func() { Sprintf("xxx") }},
|
||||
{5, `Sprintf("%x")`, func() { Sprintf("%x", 7) }},
|
||||
{5, `Sprintf("%s")`, func() { Sprintf("%s", "hello") }},
|
||||
{5, `Sprintf("%x %x")`, func() { Sprintf("%x %x", 7, 112) }},
|
||||
// For %g we use a float32, not float64, to guarantee passing the argument
|
||||
// does not need to allocate memory to store the result in a pointer-sized word.
|
||||
{20, `Sprintf("%g")`, func() { Sprintf("%g", float32(3.14159)) }},
|
||||
{5, `Fprintf(buf, "%x %x %x")`, func() { mallocBuf.Reset(); Fprintf(&mallocBuf, "%x %x %x", 7, 8, 9) }},
|
||||
{5, `Fprintf(buf, "%s")`, func() { mallocBuf.Reset(); Fprintf(&mallocBuf, "%s", "hello") }},
|
||||
}
|
||||
|
||||
var _ bytes.Buffer
|
||||
|
||||
func TestCountMallocs(t *testing.T) {
|
||||
if testing.Short() {
|
||||
return
|
||||
}
|
||||
for _, mt := range mallocTest {
|
||||
const N = 100
|
||||
runtime.UpdateMemStats()
|
||||
|
@ -538,7 +539,7 @@ func TestCountMallocs(t *testing.T) {
|
|||
}
|
||||
runtime.UpdateMemStats()
|
||||
mallocs += runtime.MemStats.Mallocs
|
||||
if mallocs/N != uint64(mt.count) {
|
||||
if mallocs/N > uint64(mt.count) {
|
||||
t.Errorf("%s: expected %d mallocs, got %d", mt.desc, mt.count, mallocs/N)
|
||||
}
|
||||
}
|
||||
|
|
|
@ -105,14 +105,14 @@ func FindTree(path string) (tree *Tree, pkg string, err error) {
|
|||
continue
|
||||
}
|
||||
tree = t
|
||||
pkg = path[len(tpath):]
|
||||
pkg = filepath.ToSlash(path[len(tpath):])
|
||||
return
|
||||
}
|
||||
err = fmt.Errorf("path %q not inside a GOPATH", path)
|
||||
return
|
||||
}
|
||||
tree = defaultTree
|
||||
pkg = path
|
||||
pkg = filepath.ToSlash(path)
|
||||
for _, t := range Path {
|
||||
if t.HasSrc(pkg) {
|
||||
tree = t
|
||||
|
|
|
@ -1,3 +1,7 @@
|
|||
// Copyright 2011 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
package pkgtest
|
||||
|
||||
import "fmt"
|
||||
|
|
|
@ -1,3 +1,7 @@
|
|||
// Copyright 2011 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
package pkgtest_test
|
||||
|
||||
import "pkgtest"
|
||||
|
|
|
@ -68,7 +68,8 @@ var (
|
|||
html_endp = []byte("</p>\n")
|
||||
html_pre = []byte("<pre>")
|
||||
html_endpre = []byte("</pre>\n")
|
||||
html_h = []byte("<h3>")
|
||||
html_h = []byte(`<h3 id="`)
|
||||
html_hq = []byte(`">`)
|
||||
html_endh = []byte("</h3>\n")
|
||||
)
|
||||
|
||||
|
@ -225,6 +226,12 @@ type block struct {
|
|||
lines []string
|
||||
}
|
||||
|
||||
var nonAlphaNumRx = regexp.MustCompile(`[^a-zA-Z0-9]`)
|
||||
|
||||
func anchorID(line string) string {
|
||||
return nonAlphaNumRx.ReplaceAllString(line, "_")
|
||||
}
|
||||
|
||||
// ToHTML converts comment text to formatted HTML.
|
||||
// The comment was prepared by DocReader,
|
||||
// so it is known not to have leading, trailing blank lines
|
||||
|
@ -253,9 +260,18 @@ func ToHTML(w io.Writer, text string, words map[string]string) {
|
|||
w.Write(html_endp)
|
||||
case opHead:
|
||||
w.Write(html_h)
|
||||
id := ""
|
||||
for _, line := range b.lines {
|
||||
if id == "" {
|
||||
id = anchorID(line)
|
||||
w.Write([]byte(id))
|
||||
w.Write(html_hq)
|
||||
}
|
||||
commentEscape(w, line, true)
|
||||
}
|
||||
if id == "" {
|
||||
w.Write(html_hq)
|
||||
}
|
||||
w.Write(html_endh)
|
||||
case opPre:
|
||||
w.Write(html_pre)
|
||||
|
|
|
@ -15,7 +15,7 @@ type Package struct {
|
|||
Doc string
|
||||
Name string
|
||||
ImportPath string
|
||||
Imports []string // TODO(gri) this field is not computed at the moment
|
||||
Imports []string
|
||||
Filenames []string
|
||||
Consts []*Value
|
||||
Types []*Type
|
||||
|
@ -36,7 +36,7 @@ type Value struct {
|
|||
type Method struct {
|
||||
*Func
|
||||
// TODO(gri) The following fields are not set at the moment.
|
||||
Recv *Type // original receiver base type
|
||||
Origin *Type // original receiver base type
|
||||
Level int // embedding level; 0 means Func is not embedded
|
||||
}
|
||||
|
||||
|
|
|
@ -6,132 +6,110 @@ package doc
|
|||
|
||||
import (
|
||||
"bytes"
|
||||
"fmt"
|
||||
"go/ast"
|
||||
"flag"
|
||||
"go/parser"
|
||||
"go/printer"
|
||||
"go/token"
|
||||
"io/ioutil"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"strings"
|
||||
"testing"
|
||||
"text/template"
|
||||
)
|
||||
|
||||
type sources map[string]string // filename -> file contents
|
||||
var update = flag.Bool("update", false, "update golden (.out) files")
|
||||
|
||||
type testCase struct {
|
||||
name string
|
||||
importPath string
|
||||
mode Mode
|
||||
srcs sources
|
||||
doc string
|
||||
const dataDir = "testdata"
|
||||
|
||||
var templateTxt = readTemplate("template.txt")
|
||||
|
||||
func readTemplate(filename string) *template.Template {
|
||||
t := template.New(filename)
|
||||
t.Funcs(template.FuncMap{
|
||||
"node": nodeFmt,
|
||||
"synopsis": synopsisFmt,
|
||||
})
|
||||
return template.Must(t.ParseFiles(filepath.Join(dataDir, filename)))
|
||||
}
|
||||
|
||||
var tests = make(map[string]*testCase)
|
||||
|
||||
// To register a new test case, use the pattern:
|
||||
//
|
||||
// var _ = register(&testCase{ ... })
|
||||
//
|
||||
// (The result value of register is always 0 and only present to enable the pattern.)
|
||||
//
|
||||
func register(test *testCase) int {
|
||||
if _, found := tests[test.name]; found {
|
||||
panic(fmt.Sprintf("registration failed: test case %q already exists", test.name))
|
||||
}
|
||||
tests[test.name] = test
|
||||
return 0
|
||||
func nodeFmt(node interface{}, fset *token.FileSet) string {
|
||||
var buf bytes.Buffer
|
||||
printer.Fprint(&buf, fset, node)
|
||||
return strings.Replace(strings.TrimSpace(buf.String()), "\n", "\n\t", -1)
|
||||
}
|
||||
|
||||
func runTest(t *testing.T, test *testCase) {
|
||||
// create AST
|
||||
fset := token.NewFileSet()
|
||||
var pkg ast.Package
|
||||
pkg.Files = make(map[string]*ast.File)
|
||||
for filename, src := range test.srcs {
|
||||
file, err := parser.ParseFile(fset, filename, src, parser.ParseComments)
|
||||
if err != nil {
|
||||
t.Errorf("test %s: %v", test.name, err)
|
||||
return
|
||||
func synopsisFmt(s string) string {
|
||||
const n = 64
|
||||
if len(s) > n {
|
||||
// cut off excess text and go back to a word boundary
|
||||
s = s[0:n]
|
||||
if i := strings.LastIndexAny(s, "\t\n "); i >= 0 {
|
||||
s = s[0:i]
|
||||
}
|
||||
switch {
|
||||
case pkg.Name == "":
|
||||
pkg.Name = file.Name.Name
|
||||
case pkg.Name != file.Name.Name:
|
||||
t.Errorf("test %s: different package names in test files", test.name)
|
||||
return
|
||||
s = strings.TrimSpace(s) + " ..."
|
||||
}
|
||||
pkg.Files[filename] = file
|
||||
return "// " + strings.Replace(s, "\n", " ", -1)
|
||||
}
|
||||
|
||||
doc := New(&pkg, test.importPath, test.mode).String()
|
||||
if doc != test.doc {
|
||||
//TODO(gri) Enable this once the sorting issue of comments is fixed
|
||||
//t.Errorf("test %s\n\tgot : %s\n\twant: %s", test.name, doc, test.doc)
|
||||
func isGoFile(fi os.FileInfo) bool {
|
||||
name := fi.Name()
|
||||
return !fi.IsDir() &&
|
||||
len(name) > 0 && name[0] != '.' && // ignore .files
|
||||
filepath.Ext(name) == ".go"
|
||||
}
|
||||
|
||||
type bundle struct {
|
||||
*Package
|
||||
FSet *token.FileSet
|
||||
}
|
||||
|
||||
func Test(t *testing.T) {
|
||||
for _, test := range tests {
|
||||
runTest(t, test)
|
||||
}
|
||||
// get all packages
|
||||
fset := token.NewFileSet()
|
||||
pkgs, err := parser.ParseDir(fset, dataDir, isGoFile, parser.ParseComments)
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
// ----------------------------------------------------------------------------
|
||||
// Printing support
|
||||
// test all packages
|
||||
for _, pkg := range pkgs {
|
||||
importpath := dataDir + "/" + pkg.Name
|
||||
doc := New(pkg, importpath, 0)
|
||||
|
||||
func (pkg *Package) String() string {
|
||||
// golden files always use / in filenames - canonicalize them
|
||||
for i, filename := range doc.Filenames {
|
||||
doc.Filenames[i] = filepath.ToSlash(filename)
|
||||
}
|
||||
|
||||
// print documentation
|
||||
var buf bytes.Buffer
|
||||
docText.Execute(&buf, pkg) // ignore error - test will fail w/ incorrect output
|
||||
return buf.String()
|
||||
if err := templateTxt.Execute(&buf, bundle{doc, fset}); err != nil {
|
||||
t.Error(err)
|
||||
continue
|
||||
}
|
||||
got := buf.Bytes()
|
||||
|
||||
// update golden file if necessary
|
||||
golden := filepath.Join(dataDir, pkg.Name+".out")
|
||||
if *update {
|
||||
err := ioutil.WriteFile(golden, got, 0644)
|
||||
if err != nil {
|
||||
t.Error(err)
|
||||
}
|
||||
continue
|
||||
}
|
||||
|
||||
// TODO(gri) complete template
|
||||
var docText = template.Must(template.New("docText").Parse(
|
||||
`
|
||||
PACKAGE {{.Name}}
|
||||
DOC {{printf "%q" .Doc}}
|
||||
IMPORTPATH {{.ImportPath}}
|
||||
FILENAMES {{.Filenames}}
|
||||
`))
|
||||
// get golden file
|
||||
want, err := ioutil.ReadFile(golden)
|
||||
if err != nil {
|
||||
t.Error(err)
|
||||
continue
|
||||
}
|
||||
|
||||
// ----------------------------------------------------------------------------
|
||||
// Test cases
|
||||
|
||||
// Test that all package comments and bugs are collected,
|
||||
// and that the importPath is correctly set.
|
||||
//
|
||||
var _ = register(&testCase{
|
||||
name: "p",
|
||||
importPath: "p",
|
||||
srcs: sources{
|
||||
"p1.go": "// comment 1\npackage p\n//BUG(uid): bug1",
|
||||
"p0.go": "// comment 0\npackage p\n// BUG(uid): bug0",
|
||||
},
|
||||
doc: `
|
||||
PACKAGE p
|
||||
DOC "comment 0\n\ncomment 1\n"
|
||||
IMPORTPATH p
|
||||
FILENAMES [p0.go p1.go]
|
||||
`,
|
||||
})
|
||||
|
||||
// Test basic functionality.
|
||||
//
|
||||
var _ = register(&testCase{
|
||||
name: "p1",
|
||||
importPath: "p",
|
||||
srcs: sources{
|
||||
"p.go": `
|
||||
package p
|
||||
import "a"
|
||||
const pi = 3.14 // pi
|
||||
type T struct{} // T
|
||||
var V T // v
|
||||
func F(x int) int {} // F
|
||||
`,
|
||||
},
|
||||
doc: `
|
||||
PACKAGE p
|
||||
DOC ""
|
||||
IMPORTPATH p
|
||||
FILENAMES [p.go]
|
||||
`,
|
||||
})
|
||||
// compare
|
||||
if bytes.Compare(got, want) != 0 {
|
||||
t.Errorf("package %s\n\tgot:\n%s\n\twant:\n%s", pkg.Name, got, want)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -124,6 +124,9 @@ func (doc *docReader) filterType(tinfo *typeInfo, typ ast.Expr) bool {
|
|||
|
||||
func (doc *docReader) filterSpec(spec ast.Spec) bool {
|
||||
switch s := spec.(type) {
|
||||
case *ast.ImportSpec:
|
||||
// always keep imports so we can collect them
|
||||
return true
|
||||
case *ast.ValueSpec:
|
||||
s.Names = filterIdentList(s.Names)
|
||||
if len(s.Names) > 0 {
|
||||
|
|
|
@ -9,6 +9,7 @@ import (
|
|||
"go/token"
|
||||
"regexp"
|
||||
"sort"
|
||||
"strconv"
|
||||
)
|
||||
|
||||
// ----------------------------------------------------------------------------
|
||||
|
@ -55,6 +56,7 @@ type docReader struct {
|
|||
doc *ast.CommentGroup // package documentation, if any
|
||||
pkgName string
|
||||
mode Mode
|
||||
imports map[string]int
|
||||
values []*ast.GenDecl // consts and vars
|
||||
types map[string]*typeInfo
|
||||
embedded map[string]*typeInfo // embedded types, possibly not exported
|
||||
|
@ -65,6 +67,7 @@ type docReader struct {
|
|||
func (doc *docReader) init(pkgName string, mode Mode) {
|
||||
doc.pkgName = pkgName
|
||||
doc.mode = mode
|
||||
doc.imports = make(map[string]int)
|
||||
doc.types = make(map[string]*typeInfo)
|
||||
doc.embedded = make(map[string]*typeInfo)
|
||||
doc.funcs = make(map[string]*ast.FuncDecl)
|
||||
|
@ -244,6 +247,13 @@ func (doc *docReader) addDecl(decl ast.Decl) {
|
|||
case *ast.GenDecl:
|
||||
if len(d.Specs) > 0 {
|
||||
switch d.Tok {
|
||||
case token.IMPORT:
|
||||
// imports are handled individually
|
||||
for _, spec := range d.Specs {
|
||||
if import_, err := strconv.Unquote(spec.(*ast.ImportSpec).Path.Value); err == nil {
|
||||
doc.imports[import_] = 1
|
||||
}
|
||||
}
|
||||
case token.CONST, token.VAR:
|
||||
// constants and variables are always handled as a group
|
||||
doc.addValue(d)
|
||||
|
@ -346,6 +356,17 @@ func (doc *docReader) addFile(src *ast.File) {
|
|||
// ----------------------------------------------------------------------------
|
||||
// Conversion to external representation
|
||||
|
||||
func (doc *docReader) makeImports() []string {
|
||||
list := make([]string, len(doc.imports))
|
||||
i := 0
|
||||
for import_ := range doc.imports {
|
||||
list[i] = import_
|
||||
i++
|
||||
}
|
||||
sort.Strings(list)
|
||||
return list
|
||||
}
|
||||
|
||||
type sortValue []*Value
|
||||
|
||||
func (p sortValue) Len() int { return len(p) }
|
||||
|
@ -661,6 +682,7 @@ func (doc *docReader) newDoc(importpath string, filenames []string) *Package {
|
|||
// doc.funcs and thus must be called before any other
|
||||
// function consuming those lists
|
||||
p.Types = doc.makeTypes(doc.types)
|
||||
p.Imports = doc.makeImports()
|
||||
p.Consts = makeValues(doc.values, token.CONST)
|
||||
p.Vars = makeValues(doc.values, token.VAR)
|
||||
p.Funcs = makeFuncs(doc.funcs)
|
||||
|
|
13
libgo/go/go/doc/testdata/a.out
vendored
Normal file
13
libgo/go/go/doc/testdata/a.out
vendored
Normal file
|
@ -0,0 +1,13 @@
|
|||
// comment 0 comment 1
|
||||
PACKAGE a
|
||||
|
||||
IMPORTPATH
|
||||
testdata/a
|
||||
|
||||
FILENAMES
|
||||
testdata/a0.go
|
||||
testdata/a1.go
|
||||
|
||||
BUGS
|
||||
// bug0
|
||||
// bug1
|
8
libgo/go/go/doc/testdata/a0.go
vendored
Normal file
8
libgo/go/go/doc/testdata/a0.go
vendored
Normal file
|
@ -0,0 +1,8 @@
|
|||
// Copyright 2012 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
// comment 0
|
||||
package a
|
||||
|
||||
//BUG(uid): bug0
|
8
libgo/go/go/doc/testdata/a1.go
vendored
Normal file
8
libgo/go/go/doc/testdata/a1.go
vendored
Normal file
|
@ -0,0 +1,8 @@
|
|||
// Copyright 2012 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
// comment 1
|
||||
package a
|
||||
|
||||
//BUG(uid): bug1
|
13
libgo/go/go/doc/testdata/b.go
vendored
Normal file
13
libgo/go/go/doc/testdata/b.go
vendored
Normal file
|
@ -0,0 +1,13 @@
|
|||
// Copyright 2012 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
package b
|
||||
|
||||
import "a"
|
||||
|
||||
const Pi = 3.14 // Pi
|
||||
var MaxInt int // MaxInt
|
||||
type T struct{} // T
|
||||
var V T // v
|
||||
func F(x int) int {} // F
|
34
libgo/go/go/doc/testdata/b.out
vendored
Normal file
34
libgo/go/go/doc/testdata/b.out
vendored
Normal file
|
@ -0,0 +1,34 @@
|
|||
//
|
||||
PACKAGE b
|
||||
|
||||
IMPORTPATH
|
||||
testdata/b
|
||||
|
||||
IMPORTS
|
||||
a
|
||||
|
||||
FILENAMES
|
||||
testdata/b.go
|
||||
|
||||
CONSTANTS
|
||||
//
|
||||
const Pi = 3.14 // Pi
|
||||
|
||||
|
||||
VARIABLES
|
||||
//
|
||||
var MaxInt int // MaxInt
|
||||
|
||||
|
||||
FUNCTIONS
|
||||
//
|
||||
func F(x int) int
|
||||
|
||||
|
||||
TYPES
|
||||
//
|
||||
type T struct{} // T
|
||||
|
||||
//
|
||||
var V T // v
|
||||
|
293
libgo/go/go/doc/testdata/benchmark.go
vendored
Normal file
293
libgo/go/go/doc/testdata/benchmark.go
vendored
Normal file
|
@ -0,0 +1,293 @@
|
|||
// Copyright 2009 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
package testing
|
||||
|
||||
import (
|
||||
"flag"
|
||||
"fmt"
|
||||
"os"
|
||||
"runtime"
|
||||
"time"
|
||||
)
|
||||
|
||||
var matchBenchmarks = flag.String("test.bench", "", "regular expression to select benchmarks to run")
|
||||
var benchTime = flag.Float64("test.benchtime", 1, "approximate run time for each benchmark, in seconds")
|
||||
|
||||
// An internal type but exported because it is cross-package; part of the implementation
|
||||
// of gotest.
|
||||
type InternalBenchmark struct {
|
||||
Name string
|
||||
F func(b *B)
|
||||
}
|
||||
|
||||
// B is a type passed to Benchmark functions to manage benchmark
|
||||
// timing and to specify the number of iterations to run.
|
||||
type B struct {
|
||||
common
|
||||
N int
|
||||
benchmark InternalBenchmark
|
||||
bytes int64
|
||||
timerOn bool
|
||||
result BenchmarkResult
|
||||
}
|
||||
|
||||
// StartTimer starts timing a test. This function is called automatically
|
||||
// before a benchmark starts, but it can also used to resume timing after
|
||||
// a call to StopTimer.
|
||||
func (b *B) StartTimer() {
|
||||
if !b.timerOn {
|
||||
b.start = time.Now()
|
||||
b.timerOn = true
|
||||
}
|
||||
}
|
||||
|
||||
// StopTimer stops timing a test. This can be used to pause the timer
|
||||
// while performing complex initialization that you don't
|
||||
// want to measure.
|
||||
func (b *B) StopTimer() {
|
||||
if b.timerOn {
|
||||
b.duration += time.Now().Sub(b.start)
|
||||
b.timerOn = false
|
||||
}
|
||||
}
|
||||
|
||||
// ResetTimer sets the elapsed benchmark time to zero.
|
||||
// It does not affect whether the timer is running.
|
||||
func (b *B) ResetTimer() {
|
||||
if b.timerOn {
|
||||
b.start = time.Now()
|
||||
}
|
||||
b.duration = 0
|
||||
}
|
||||
|
||||
// SetBytes records the number of bytes processed in a single operation.
|
||||
// If this is called, the benchmark will report ns/op and MB/s.
|
||||
func (b *B) SetBytes(n int64) { b.bytes = n }
|
||||
|
||||
func (b *B) nsPerOp() int64 {
|
||||
if b.N <= 0 {
|
||||
return 0
|
||||
}
|
||||
return b.duration.Nanoseconds() / int64(b.N)
|
||||
}
|
||||
|
||||
// runN runs a single benchmark for the specified number of iterations.
|
||||
func (b *B) runN(n int) {
|
||||
// Try to get a comparable environment for each run
|
||||
// by clearing garbage from previous runs.
|
||||
runtime.GC()
|
||||
b.N = n
|
||||
b.ResetTimer()
|
||||
b.StartTimer()
|
||||
b.benchmark.F(b)
|
||||
b.StopTimer()
|
||||
}
|
||||
|
||||
func min(x, y int) int {
|
||||
if x > y {
|
||||
return y
|
||||
}
|
||||
return x
|
||||
}
|
||||
|
||||
func max(x, y int) int {
|
||||
if x < y {
|
||||
return y
|
||||
}
|
||||
return x
|
||||
}
|
||||
|
||||
// roundDown10 rounds a number down to the nearest power of 10.
|
||||
func roundDown10(n int) int {
|
||||
var tens = 0
|
||||
// tens = floor(log_10(n))
|
||||
for n > 10 {
|
||||
n = n / 10
|
||||
tens++
|
||||
}
|
||||
// result = 10^tens
|
||||
result := 1
|
||||
for i := 0; i < tens; i++ {
|
||||
result *= 10
|
||||
}
|
||||
return result
|
||||
}
|
||||
|
||||
// roundUp rounds x up to a number of the form [1eX, 2eX, 5eX].
|
||||
func roundUp(n int) int {
|
||||
base := roundDown10(n)
|
||||
if n < (2 * base) {
|
||||
return 2 * base
|
||||
}
|
||||
if n < (5 * base) {
|
||||
return 5 * base
|
||||
}
|
||||
return 10 * base
|
||||
}
|
||||
|
||||
// run times the benchmark function in a separate goroutine.
|
||||
func (b *B) run() BenchmarkResult {
|
||||
go b.launch()
|
||||
<-b.signal
|
||||
return b.result
|
||||
}
|
||||
|
||||
// launch launches the benchmark function. It gradually increases the number
|
||||
// of benchmark iterations until the benchmark runs for a second in order
|
||||
// to get a reasonable measurement. It prints timing information in this form
|
||||
// testing.BenchmarkHello 100000 19 ns/op
|
||||
// launch is run by the fun function as a separate goroutine.
|
||||
func (b *B) launch() {
|
||||
// Run the benchmark for a single iteration in case it's expensive.
|
||||
n := 1
|
||||
|
||||
// Signal that we're done whether we return normally
|
||||
// or by FailNow's runtime.Goexit.
|
||||
defer func() {
|
||||
b.signal <- b
|
||||
}()
|
||||
|
||||
b.runN(n)
|
||||
// Run the benchmark for at least the specified amount of time.
|
||||
d := time.Duration(*benchTime * float64(time.Second))
|
||||
for !b.failed && b.duration < d && n < 1e9 {
|
||||
last := n
|
||||
// Predict iterations/sec.
|
||||
if b.nsPerOp() == 0 {
|
||||
n = 1e9
|
||||
} else {
|
||||
n = int(d.Nanoseconds() / b.nsPerOp())
|
||||
}
|
||||
// Run more iterations than we think we'll need for a second (1.5x).
|
||||
// Don't grow too fast in case we had timing errors previously.
|
||||
// Be sure to run at least one more than last time.
|
||||
n = max(min(n+n/2, 100*last), last+1)
|
||||
// Round up to something easy to read.
|
||||
n = roundUp(n)
|
||||
b.runN(n)
|
||||
}
|
||||
b.result = BenchmarkResult{b.N, b.duration, b.bytes}
|
||||
}
|
||||
|
||||
// The results of a benchmark run.
|
||||
type BenchmarkResult struct {
|
||||
N int // The number of iterations.
|
||||
T time.Duration // The total time taken.
|
||||
Bytes int64 // Bytes processed in one iteration.
|
||||
}
|
||||
|
||||
func (r BenchmarkResult) NsPerOp() int64 {
|
||||
if r.N <= 0 {
|
||||
return 0
|
||||
}
|
||||
return r.T.Nanoseconds() / int64(r.N)
|
||||
}
|
||||
|
||||
func (r BenchmarkResult) mbPerSec() float64 {
|
||||
if r.Bytes <= 0 || r.T <= 0 || r.N <= 0 {
|
||||
return 0
|
||||
}
|
||||
return (float64(r.Bytes) * float64(r.N) / 1e6) / r.T.Seconds()
|
||||
}
|
||||
|
||||
func (r BenchmarkResult) String() string {
|
||||
mbs := r.mbPerSec()
|
||||
mb := ""
|
||||
if mbs != 0 {
|
||||
mb = fmt.Sprintf("\t%7.2f MB/s", mbs)
|
||||
}
|
||||
nsop := r.NsPerOp()
|
||||
ns := fmt.Sprintf("%10d ns/op", nsop)
|
||||
if r.N > 0 && nsop < 100 {
|
||||
// The format specifiers here make sure that
|
||||
// the ones digits line up for all three possible formats.
|
||||
if nsop < 10 {
|
||||
ns = fmt.Sprintf("%13.2f ns/op", float64(r.T.Nanoseconds())/float64(r.N))
|
||||
} else {
|
||||
ns = fmt.Sprintf("%12.1f ns/op", float64(r.T.Nanoseconds())/float64(r.N))
|
||||
}
|
||||
}
|
||||
return fmt.Sprintf("%8d\t%s%s", r.N, ns, mb)
|
||||
}
|
||||
|
||||
// An internal function but exported because it is cross-package; part of the implementation
|
||||
// of gotest.
|
||||
func RunBenchmarks(matchString func(pat, str string) (bool, error), benchmarks []InternalBenchmark) {
|
||||
// If no flag was specified, don't run benchmarks.
|
||||
if len(*matchBenchmarks) == 0 {
|
||||
return
|
||||
}
|
||||
for _, Benchmark := range benchmarks {
|
||||
matched, err := matchString(*matchBenchmarks, Benchmark.Name)
|
||||
if err != nil {
|
||||
fmt.Fprintf(os.Stderr, "testing: invalid regexp for -test.bench: %s\n", err)
|
||||
os.Exit(1)
|
||||
}
|
||||
if !matched {
|
||||
continue
|
||||
}
|
||||
for _, procs := range cpuList {
|
||||
runtime.GOMAXPROCS(procs)
|
||||
b := &B{
|
||||
common: common{
|
||||
signal: make(chan interface{}),
|
||||
},
|
||||
benchmark: Benchmark,
|
||||
}
|
||||
benchName := Benchmark.Name
|
||||
if procs != 1 {
|
||||
benchName = fmt.Sprintf("%s-%d", Benchmark.Name, procs)
|
||||
}
|
||||
fmt.Printf("%s\t", benchName)
|
||||
r := b.run()
|
||||
if b.failed {
|
||||
// The output could be very long here, but probably isn't.
|
||||
// We print it all, regardless, because we don't want to trim the reason
|
||||
// the benchmark failed.
|
||||
fmt.Printf("--- FAIL: %s\n%s", benchName, b.output)
|
||||
continue
|
||||
}
|
||||
fmt.Printf("%v\n", r)
|
||||
// Unlike with tests, we ignore the -chatty flag and always print output for
|
||||
// benchmarks since the output generation time will skew the results.
|
||||
if len(b.output) > 0 {
|
||||
b.trimOutput()
|
||||
fmt.Printf("--- BENCH: %s\n%s", benchName, b.output)
|
||||
}
|
||||
if p := runtime.GOMAXPROCS(-1); p != procs {
|
||||
fmt.Fprintf(os.Stderr, "testing: %s left GOMAXPROCS set to %d\n", benchName, p)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// trimOutput shortens the output from a benchmark, which can be very long.
|
||||
func (b *B) trimOutput() {
|
||||
// The output is likely to appear multiple times because the benchmark
|
||||
// is run multiple times, but at least it will be seen. This is not a big deal
|
||||
// because benchmarks rarely print, but just in case, we trim it if it's too long.
|
||||
const maxNewlines = 10
|
||||
for nlCount, j := 0, 0; j < len(b.output); j++ {
|
||||
if b.output[j] == '\n' {
|
||||
nlCount++
|
||||
if nlCount >= maxNewlines {
|
||||
b.output = append(b.output[:j], "\n\t... [output truncated]\n"...)
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Benchmark benchmarks a single function. Useful for creating
|
||||
// custom benchmarks that do not use gotest.
|
||||
func Benchmark(f func(b *B)) BenchmarkResult {
|
||||
b := &B{
|
||||
common: common{
|
||||
signal: make(chan interface{}),
|
||||
},
|
||||
benchmark: InternalBenchmark{"", f},
|
||||
}
|
||||
return b.run()
|
||||
}
|
81
libgo/go/go/doc/testdata/example.go
vendored
Normal file
81
libgo/go/go/doc/testdata/example.go
vendored
Normal file
|
@ -0,0 +1,81 @@
|
|||
// Copyright 2009 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
package testing
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"fmt"
|
||||
"io"
|
||||
"os"
|
||||
"strings"
|
||||
"time"
|
||||
)
|
||||
|
||||
type InternalExample struct {
|
||||
Name string
|
||||
F func()
|
||||
Output string
|
||||
}
|
||||
|
||||
func RunExamples(examples []InternalExample) (ok bool) {
|
||||
ok = true
|
||||
|
||||
var eg InternalExample
|
||||
|
||||
stdout, stderr := os.Stdout, os.Stderr
|
||||
defer func() {
|
||||
os.Stdout, os.Stderr = stdout, stderr
|
||||
if e := recover(); e != nil {
|
||||
fmt.Printf("--- FAIL: %s\npanic: %v\n", eg.Name, e)
|
||||
os.Exit(1)
|
||||
}
|
||||
}()
|
||||
|
||||
for _, eg = range examples {
|
||||
if *chatty {
|
||||
fmt.Printf("=== RUN: %s\n", eg.Name)
|
||||
}
|
||||
|
||||
// capture stdout and stderr
|
||||
r, w, err := os.Pipe()
|
||||
if err != nil {
|
||||
fmt.Fprintln(os.Stderr, err)
|
||||
os.Exit(1)
|
||||
}
|
||||
os.Stdout, os.Stderr = w, w
|
||||
outC := make(chan string)
|
||||
go func() {
|
||||
buf := new(bytes.Buffer)
|
||||
_, err := io.Copy(buf, r)
|
||||
if err != nil {
|
||||
fmt.Fprintf(stderr, "testing: copying pipe: %v\n", err)
|
||||
os.Exit(1)
|
||||
}
|
||||
outC <- buf.String()
|
||||
}()
|
||||
|
||||
// run example
|
||||
t0 := time.Now()
|
||||
eg.F()
|
||||
dt := time.Now().Sub(t0)
|
||||
|
||||
// close pipe, restore stdout/stderr, get output
|
||||
w.Close()
|
||||
os.Stdout, os.Stderr = stdout, stderr
|
||||
out := <-outC
|
||||
|
||||
// report any errors
|
||||
tstr := fmt.Sprintf("(%.2f seconds)", dt.Seconds())
|
||||
if g, e := strings.TrimSpace(out), strings.TrimSpace(eg.Output); g != e {
|
||||
fmt.Printf("--- FAIL: %s %s\ngot:\n%s\nwant:\n%s\n",
|
||||
eg.Name, tstr, g, e)
|
||||
ok = false
|
||||
} else if *chatty {
|
||||
fmt.Printf("--- PASS: %s %s\n", eg.Name, tstr)
|
||||
}
|
||||
}
|
||||
|
||||
return
|
||||
}
|
65
libgo/go/go/doc/testdata/template.txt
vendored
Normal file
65
libgo/go/go/doc/testdata/template.txt
vendored
Normal file
|
@ -0,0 +1,65 @@
|
|||
{{synopsis .Doc}}
|
||||
PACKAGE {{.Name}}
|
||||
|
||||
IMPORTPATH
|
||||
{{.ImportPath}}
|
||||
|
||||
{{with .Imports}}IMPORTS
|
||||
{{range .}} {{.}}
|
||||
{{end}}
|
||||
{{end}}{{/*
|
||||
|
||||
*/}}FILENAMES
|
||||
{{range .Filenames}} {{.}}
|
||||
{{end}}{{/*
|
||||
|
||||
*/}}{{with .Consts}}
|
||||
CONSTANTS
|
||||
{{range .}} {{synopsis .Doc}}
|
||||
{{node .Decl $.FSet}}
|
||||
|
||||
{{end}}{{end}}{{/*
|
||||
|
||||
*/}}{{with .Vars}}
|
||||
VARIABLES
|
||||
{{range .}} {{synopsis .Doc}}
|
||||
{{node .Decl $.FSet}}
|
||||
|
||||
{{end}}{{end}}{{/*
|
||||
|
||||
*/}}{{with .Funcs}}
|
||||
FUNCTIONS
|
||||
{{range .}} {{synopsis .Doc}}
|
||||
{{node .Decl $.FSet}}
|
||||
|
||||
{{end}}{{end}}{{/*
|
||||
|
||||
*/}}{{with .Types}}
|
||||
TYPES
|
||||
{{range .}} {{synopsis .Doc}}
|
||||
{{node .Decl $.FSet}}
|
||||
|
||||
{{range .Consts}} {{synopsis .Doc}}
|
||||
{{node .Decl $.FSet}}
|
||||
|
||||
{{end}}{{/*
|
||||
|
||||
*/}}{{range .Vars}} {{synopsis .Doc}}
|
||||
{{node .Decl $.FSet}}
|
||||
|
||||
{{end}}{{/*
|
||||
|
||||
*/}}{{range .Funcs}} {{synopsis .Doc}}
|
||||
{{node .Decl $.FSet}}
|
||||
|
||||
{{end}}{{/*
|
||||
|
||||
*/}}{{range .Methods}} {{synopsis .Doc}}
|
||||
{{node .Decl $.FSet}}
|
||||
|
||||
{{end}}{{end}}{{end}}{{/*
|
||||
|
||||
*/}}{{with .Bugs}}
|
||||
BUGS
|
||||
{{range .}} {{synopsis .}}
|
||||
{{end}}{{end}}
|
404
libgo/go/go/doc/testdata/testing.go
vendored
Normal file
404
libgo/go/go/doc/testdata/testing.go
vendored
Normal file
|
@ -0,0 +1,404 @@
|
|||
// Copyright 2009 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
// Package testing provides support for automated testing of Go packages.
|
||||
// It is intended to be used in concert with the ``gotest'' utility, which automates
|
||||
// execution of any function of the form
|
||||
// func TestXxx(*testing.T)
|
||||
// where Xxx can be any alphanumeric string (but the first letter must not be in
|
||||
// [a-z]) and serves to identify the test routine.
|
||||
// These TestXxx routines should be declared within the package they are testing.
|
||||
//
|
||||
// Functions of the form
|
||||
// func BenchmarkXxx(*testing.B)
|
||||
// are considered benchmarks, and are executed by gotest when the -test.bench
|
||||
// flag is provided.
|
||||
//
|
||||
// A sample benchmark function looks like this:
|
||||
// func BenchmarkHello(b *testing.B) {
|
||||
// for i := 0; i < b.N; i++ {
|
||||
// fmt.Sprintf("hello")
|
||||
// }
|
||||
// }
|
||||
// The benchmark package will vary b.N until the benchmark function lasts
|
||||
// long enough to be timed reliably. The output
|
||||
// testing.BenchmarkHello 10000000 282 ns/op
|
||||
// means that the loop ran 10000000 times at a speed of 282 ns per loop.
|
||||
//
|
||||
// If a benchmark needs some expensive setup before running, the timer
|
||||
// may be stopped:
|
||||
// func BenchmarkBigLen(b *testing.B) {
|
||||
// b.StopTimer()
|
||||
// big := NewBig()
|
||||
// b.StartTimer()
|
||||
// for i := 0; i < b.N; i++ {
|
||||
// big.Len()
|
||||
// }
|
||||
// }
|
||||
package testing
|
||||
|
||||
import (
|
||||
"flag"
|
||||
"fmt"
|
||||
"os"
|
||||
"runtime"
|
||||
"runtime/pprof"
|
||||
"strconv"
|
||||
"strings"
|
||||
"time"
|
||||
)
|
||||
|
||||
var (
|
||||
// The short flag requests that tests run more quickly, but its functionality
|
||||
// is provided by test writers themselves. The testing package is just its
|
||||
// home. The all.bash installation script sets it to make installation more
|
||||
// efficient, but by default the flag is off so a plain "gotest" will do a
|
||||
// full test of the package.
|
||||
short = flag.Bool("test.short", false, "run smaller test suite to save time")
|
||||
|
||||
// Report as tests are run; default is silent for success.
|
||||
chatty = flag.Bool("test.v", false, "verbose: print additional output")
|
||||
match = flag.String("test.run", "", "regular expression to select tests to run")
|
||||
memProfile = flag.String("test.memprofile", "", "write a memory profile to the named file after execution")
|
||||
memProfileRate = flag.Int("test.memprofilerate", 0, "if >=0, sets runtime.MemProfileRate")
|
||||
cpuProfile = flag.String("test.cpuprofile", "", "write a cpu profile to the named file during execution")
|
||||
timeout = flag.Duration("test.timeout", 0, "if positive, sets an aggregate time limit for all tests")
|
||||
cpuListStr = flag.String("test.cpu", "", "comma-separated list of number of CPUs to use for each test")
|
||||
parallel = flag.Int("test.parallel", runtime.GOMAXPROCS(0), "maximum test parallelism")
|
||||
|
||||
cpuList []int
|
||||
)
|
||||
|
||||
// common holds the elements common between T and B and
|
||||
// captures common methods such as Errorf.
|
||||
type common struct {
|
||||
output []byte // Output generated by test or benchmark.
|
||||
failed bool // Test or benchmark has failed.
|
||||
start time.Time // Time test or benchmark started
|
||||
duration time.Duration
|
||||
self interface{} // To be sent on signal channel when done.
|
||||
signal chan interface{} // Output for serial tests.
|
||||
}
|
||||
|
||||
// Short reports whether the -test.short flag is set.
|
||||
func Short() bool {
|
||||
return *short
|
||||
}
|
||||
|
||||
// decorate inserts the final newline if needed and indentation tabs for formatting.
|
||||
// If addFileLine is true, it also prefixes the string with the file and line of the call site.
|
||||
func decorate(s string, addFileLine bool) string {
|
||||
if addFileLine {
|
||||
_, file, line, ok := runtime.Caller(3) // decorate + log + public function.
|
||||
if ok {
|
||||
// Truncate file name at last file name separator.
|
||||
if index := strings.LastIndex(file, "/"); index >= 0 {
|
||||
file = file[index+1:]
|
||||
} else if index = strings.LastIndex(file, "\\"); index >= 0 {
|
||||
file = file[index+1:]
|
||||
}
|
||||
} else {
|
||||
file = "???"
|
||||
line = 1
|
||||
}
|
||||
s = fmt.Sprintf("%s:%d: %s", file, line, s)
|
||||
}
|
||||
s = "\t" + s // Every line is indented at least one tab.
|
||||
n := len(s)
|
||||
if n > 0 && s[n-1] != '\n' {
|
||||
s += "\n"
|
||||
n++
|
||||
}
|
||||
for i := 0; i < n-1; i++ { // -1 to avoid final newline
|
||||
if s[i] == '\n' {
|
||||
// Second and subsequent lines are indented an extra tab.
|
||||
return s[0:i+1] + "\t" + decorate(s[i+1:n], false)
|
||||
}
|
||||
}
|
||||
return s
|
||||
}
|
||||
|
||||
// T is a type passed to Test functions to manage test state and support formatted test logs.
|
||||
// Logs are accumulated during execution and dumped to standard error when done.
|
||||
type T struct {
|
||||
common
|
||||
name string // Name of test.
|
||||
startParallel chan bool // Parallel tests will wait on this.
|
||||
}
|
||||
|
||||
// Fail marks the function as having failed but continues execution.
|
||||
func (c *common) Fail() { c.failed = true }
|
||||
|
||||
// Failed returns whether the function has failed.
|
||||
func (c *common) Failed() bool { return c.failed }
|
||||
|
||||
// FailNow marks the function as having failed and stops its execution.
|
||||
// Execution will continue at the next Test.
|
||||
func (c *common) FailNow() {
|
||||
c.Fail()
|
||||
|
||||
// Calling runtime.Goexit will exit the goroutine, which
|
||||
// will run the deferred functions in this goroutine,
|
||||
// which will eventually run the deferred lines in tRunner,
|
||||
// which will signal to the test loop that this test is done.
|
||||
//
|
||||
// A previous version of this code said:
|
||||
//
|
||||
// c.duration = ...
|
||||
// c.signal <- c.self
|
||||
// runtime.Goexit()
|
||||
//
|
||||
// This previous version duplicated code (those lines are in
|
||||
// tRunner no matter what), but worse the goroutine teardown
|
||||
// implicit in runtime.Goexit was not guaranteed to complete
|
||||
// before the test exited. If a test deferred an important cleanup
|
||||
// function (like removing temporary files), there was no guarantee
|
||||
// it would run on a test failure. Because we send on c.signal during
|
||||
// a top-of-stack deferred function now, we know that the send
|
||||
// only happens after any other stacked defers have completed.
|
||||
runtime.Goexit()
|
||||
}
|
||||
|
||||
// log generates the output. It's always at the same stack depth.
|
||||
func (c *common) log(s string) {
|
||||
c.output = append(c.output, decorate(s, true)...)
|
||||
}
|
||||
|
||||
// Log formats its arguments using default formatting, analogous to Println(),
|
||||
// and records the text in the error log.
|
||||
func (c *common) Log(args ...interface{}) { c.log(fmt.Sprintln(args...)) }
|
||||
|
||||
// Logf formats its arguments according to the format, analogous to Printf(),
|
||||
// and records the text in the error log.
|
||||
func (c *common) Logf(format string, args ...interface{}) { c.log(fmt.Sprintf(format, args...)) }
|
||||
|
||||
// Error is equivalent to Log() followed by Fail().
|
||||
func (c *common) Error(args ...interface{}) {
|
||||
c.log(fmt.Sprintln(args...))
|
||||
c.Fail()
|
||||
}
|
||||
|
||||
// Errorf is equivalent to Logf() followed by Fail().
|
||||
func (c *common) Errorf(format string, args ...interface{}) {
|
||||
c.log(fmt.Sprintf(format, args...))
|
||||
c.Fail()
|
||||
}
|
||||
|
||||
// Fatal is equivalent to Log() followed by FailNow().
|
||||
func (c *common) Fatal(args ...interface{}) {
|
||||
c.log(fmt.Sprintln(args...))
|
||||
c.FailNow()
|
||||
}
|
||||
|
||||
// Fatalf is equivalent to Logf() followed by FailNow().
|
||||
func (c *common) Fatalf(format string, args ...interface{}) {
|
||||
c.log(fmt.Sprintf(format, args...))
|
||||
c.FailNow()
|
||||
}
|
||||
|
||||
// Parallel signals that this test is to be run in parallel with (and only with)
|
||||
// other parallel tests in this CPU group.
|
||||
func (t *T) Parallel() {
|
||||
t.signal <- (*T)(nil) // Release main testing loop
|
||||
<-t.startParallel // Wait for serial tests to finish
|
||||
}
|
||||
|
||||
// An internal type but exported because it is cross-package; part of the implementation
|
||||
// of gotest.
|
||||
type InternalTest struct {
|
||||
Name string
|
||||
F func(*T)
|
||||
}
|
||||
|
||||
func tRunner(t *T, test *InternalTest) {
|
||||
t.start = time.Now()
|
||||
|
||||
// When this goroutine is done, either because test.F(t)
|
||||
// returned normally or because a test failure triggered
|
||||
// a call to runtime.Goexit, record the duration and send
|
||||
// a signal saying that the test is done.
|
||||
defer func() {
|
||||
t.duration = time.Now().Sub(t.start)
|
||||
t.signal <- t
|
||||
}()
|
||||
|
||||
test.F(t)
|
||||
}
|
||||
|
||||
// An internal function but exported because it is cross-package; part of the implementation
|
||||
// of gotest.
|
||||
func Main(matchString func(pat, str string) (bool, error), tests []InternalTest, benchmarks []InternalBenchmark, examples []InternalExample) {
|
||||
flag.Parse()
|
||||
parseCpuList()
|
||||
|
||||
before()
|
||||
startAlarm()
|
||||
testOk := RunTests(matchString, tests)
|
||||
exampleOk := RunExamples(examples)
|
||||
if !testOk || !exampleOk {
|
||||
fmt.Println("FAIL")
|
||||
os.Exit(1)
|
||||
}
|
||||
fmt.Println("PASS")
|
||||
stopAlarm()
|
||||
RunBenchmarks(matchString, benchmarks)
|
||||
after()
|
||||
}
|
||||
|
||||
func (t *T) report() {
|
||||
tstr := fmt.Sprintf("(%.2f seconds)", t.duration.Seconds())
|
||||
format := "--- %s: %s %s\n%s"
|
||||
if t.failed {
|
||||
fmt.Printf(format, "FAIL", t.name, tstr, t.output)
|
||||
} else if *chatty {
|
||||
fmt.Printf(format, "PASS", t.name, tstr, t.output)
|
||||
}
|
||||
}
|
||||
|
||||
func RunTests(matchString func(pat, str string) (bool, error), tests []InternalTest) (ok bool) {
|
||||
ok = true
|
||||
if len(tests) == 0 {
|
||||
fmt.Fprintln(os.Stderr, "testing: warning: no tests to run")
|
||||
return
|
||||
}
|
||||
for _, procs := range cpuList {
|
||||
runtime.GOMAXPROCS(procs)
|
||||
// We build a new channel tree for each run of the loop.
|
||||
// collector merges in one channel all the upstream signals from parallel tests.
|
||||
// If all tests pump to the same channel, a bug can occur where a test
|
||||
// kicks off a goroutine that Fails, yet the test still delivers a completion signal,
|
||||
// which skews the counting.
|
||||
var collector = make(chan interface{})
|
||||
|
||||
numParallel := 0
|
||||
startParallel := make(chan bool)
|
||||
|
||||
for i := 0; i < len(tests); i++ {
|
||||
matched, err := matchString(*match, tests[i].Name)
|
||||
if err != nil {
|
||||
fmt.Fprintf(os.Stderr, "testing: invalid regexp for -test.run: %s\n", err)
|
||||
os.Exit(1)
|
||||
}
|
||||
if !matched {
|
||||
continue
|
||||
}
|
||||
testName := tests[i].Name
|
||||
if procs != 1 {
|
||||
testName = fmt.Sprintf("%s-%d", tests[i].Name, procs)
|
||||
}
|
||||
t := &T{
|
||||
common: common{
|
||||
signal: make(chan interface{}),
|
||||
},
|
||||
name: testName,
|
||||
startParallel: startParallel,
|
||||
}
|
||||
t.self = t
|
||||
if *chatty {
|
||||
fmt.Printf("=== RUN %s\n", t.name)
|
||||
}
|
||||
go tRunner(t, &tests[i])
|
||||
out := (<-t.signal).(*T)
|
||||
if out == nil { // Parallel run.
|
||||
go func() {
|
||||
collector <- <-t.signal
|
||||
}()
|
||||
numParallel++
|
||||
continue
|
||||
}
|
||||
t.report()
|
||||
ok = ok && !out.failed
|
||||
}
|
||||
|
||||
running := 0
|
||||
for numParallel+running > 0 {
|
||||
if running < *parallel && numParallel > 0 {
|
||||
startParallel <- true
|
||||
running++
|
||||
numParallel--
|
||||
continue
|
||||
}
|
||||
t := (<-collector).(*T)
|
||||
t.report()
|
||||
ok = ok && !t.failed
|
||||
running--
|
||||
}
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
// before runs before all testing.
|
||||
func before() {
|
||||
if *memProfileRate > 0 {
|
||||
runtime.MemProfileRate = *memProfileRate
|
||||
}
|
||||
if *cpuProfile != "" {
|
||||
f, err := os.Create(*cpuProfile)
|
||||
if err != nil {
|
||||
fmt.Fprintf(os.Stderr, "testing: %s", err)
|
||||
return
|
||||
}
|
||||
if err := pprof.StartCPUProfile(f); err != nil {
|
||||
fmt.Fprintf(os.Stderr, "testing: can't start cpu profile: %s", err)
|
||||
f.Close()
|
||||
return
|
||||
}
|
||||
// Could save f so after can call f.Close; not worth the effort.
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
// after runs after all testing.
|
||||
func after() {
|
||||
if *cpuProfile != "" {
|
||||
pprof.StopCPUProfile() // flushes profile to disk
|
||||
}
|
||||
if *memProfile != "" {
|
||||
f, err := os.Create(*memProfile)
|
||||
if err != nil {
|
||||
fmt.Fprintf(os.Stderr, "testing: %s", err)
|
||||
return
|
||||
}
|
||||
if err = pprof.WriteHeapProfile(f); err != nil {
|
||||
fmt.Fprintf(os.Stderr, "testing: can't write %s: %s", *memProfile, err)
|
||||
}
|
||||
f.Close()
|
||||
}
|
||||
}
|
||||
|
||||
var timer *time.Timer
|
||||
|
||||
// startAlarm starts an alarm if requested.
|
||||
func startAlarm() {
|
||||
if *timeout > 0 {
|
||||
timer = time.AfterFunc(*timeout, alarm)
|
||||
}
|
||||
}
|
||||
|
||||
// stopAlarm turns off the alarm.
|
||||
func stopAlarm() {
|
||||
if *timeout > 0 {
|
||||
timer.Stop()
|
||||
}
|
||||
}
|
||||
|
||||
// alarm is called if the timeout expires.
|
||||
func alarm() {
|
||||
panic("test timed out")
|
||||
}
|
||||
|
||||
func parseCpuList() {
|
||||
if len(*cpuListStr) == 0 {
|
||||
cpuList = append(cpuList, runtime.GOMAXPROCS(-1))
|
||||
} else {
|
||||
for _, val := range strings.Split(*cpuListStr, ",") {
|
||||
cpu, err := strconv.Atoi(val)
|
||||
if err != nil || cpu <= 0 {
|
||||
fmt.Fprintf(os.Stderr, "testing: invalid value %q for -test.cpu", val)
|
||||
os.Exit(1)
|
||||
}
|
||||
cpuList = append(cpuList, cpu)
|
||||
}
|
||||
}
|
||||
}
|
156
libgo/go/go/doc/testdata/testing.out
vendored
Normal file
156
libgo/go/go/doc/testdata/testing.out
vendored
Normal file
|
@ -0,0 +1,156 @@
|
|||
// Package testing provides support for automated testing of Go ...
|
||||
PACKAGE testing
|
||||
|
||||
IMPORTPATH
|
||||
testdata/testing
|
||||
|
||||
IMPORTS
|
||||
bytes
|
||||
flag
|
||||
fmt
|
||||
io
|
||||
os
|
||||
runtime
|
||||
runtime/pprof
|
||||
strconv
|
||||
strings
|
||||
time
|
||||
|
||||
FILENAMES
|
||||
testdata/benchmark.go
|
||||
testdata/example.go
|
||||
testdata/testing.go
|
||||
|
||||
FUNCTIONS
|
||||
// An internal function but exported because it is cross-package; ...
|
||||
func Main(matchString func(pat, str string) (bool, error), tests []InternalTest, benchmarks []InternalBenchmark, examples []InternalExample)
|
||||
|
||||
// An internal function but exported because it is cross-package; ...
|
||||
func RunBenchmarks(matchString func(pat, str string) (bool, error), benchmarks []InternalBenchmark)
|
||||
|
||||
//
|
||||
func RunExamples(examples []InternalExample) (ok bool)
|
||||
|
||||
//
|
||||
func RunTests(matchString func(pat, str string) (bool, error), tests []InternalTest) (ok bool)
|
||||
|
||||
// Short reports whether the -test.short flag is set.
|
||||
func Short() bool
|
||||
|
||||
|
||||
TYPES
|
||||
// B is a type passed to Benchmark functions to manage benchmark ...
|
||||
type B struct {
|
||||
N int
|
||||
// contains filtered or unexported fields
|
||||
}
|
||||
|
||||
// Error is equivalent to Log() followed by Fail().
|
||||
func (c *B) Error(args ...interface{})
|
||||
|
||||
// Errorf is equivalent to Logf() followed by Fail().
|
||||
func (c *B) Errorf(format string, args ...interface{})
|
||||
|
||||
// Fail marks the function as having failed but continues ...
|
||||
func (c *B) Fail()
|
||||
|
||||
// FailNow marks the function as having failed and stops its ...
|
||||
func (c *B) FailNow()
|
||||
|
||||
// Failed returns whether the function has failed.
|
||||
func (c *B) Failed() bool
|
||||
|
||||
// Fatal is equivalent to Log() followed by FailNow().
|
||||
func (c *B) Fatal(args ...interface{})
|
||||
|
||||
// Fatalf is equivalent to Logf() followed by FailNow().
|
||||
func (c *B) Fatalf(format string, args ...interface{})
|
||||
|
||||
// Log formats its arguments using default formatting, analogous ...
|
||||
func (c *B) Log(args ...interface{})
|
||||
|
||||
// Logf formats its arguments according to the format, analogous ...
|
||||
func (c *B) Logf(format string, args ...interface{})
|
||||
|
||||
// ResetTimer sets the elapsed benchmark time to zero. It does not ...
|
||||
func (b *B) ResetTimer()
|
||||
|
||||
// SetBytes records the number of bytes processed in a single ...
|
||||
func (b *B) SetBytes(n int64)
|
||||
|
||||
// StartTimer starts timing a test. This function is called ...
|
||||
func (b *B) StartTimer()
|
||||
|
||||
// StopTimer stops timing a test. This can be used to pause the ...
|
||||
func (b *B) StopTimer()
|
||||
|
||||
// The results of a benchmark run.
|
||||
type BenchmarkResult struct {
|
||||
N int // The number of iterations.
|
||||
T time.Duration // The total time taken.
|
||||
Bytes int64 // Bytes processed in one iteration.
|
||||
}
|
||||
|
||||
// Benchmark benchmarks a single function. Useful for creating ...
|
||||
func Benchmark(f func(b *B)) BenchmarkResult
|
||||
|
||||
//
|
||||
func (r BenchmarkResult) NsPerOp() int64
|
||||
|
||||
//
|
||||
func (r BenchmarkResult) String() string
|
||||
|
||||
// An internal type but exported because it is cross-package; part ...
|
||||
type InternalBenchmark struct {
|
||||
Name string
|
||||
F func(b *B)
|
||||
}
|
||||
|
||||
//
|
||||
type InternalExample struct {
|
||||
Name string
|
||||
F func()
|
||||
Output string
|
||||
}
|
||||
|
||||
// An internal type but exported because it is cross-package; part ...
|
||||
type InternalTest struct {
|
||||
Name string
|
||||
F func(*T)
|
||||
}
|
||||
|
||||
// T is a type passed to Test functions to manage test state and ...
|
||||
type T struct {
|
||||
// contains filtered or unexported fields
|
||||
}
|
||||
|
||||
// Error is equivalent to Log() followed by Fail().
|
||||
func (c *T) Error(args ...interface{})
|
||||
|
||||
// Errorf is equivalent to Logf() followed by Fail().
|
||||
func (c *T) Errorf(format string, args ...interface{})
|
||||
|
||||
// Fail marks the function as having failed but continues ...
|
||||
func (c *T) Fail()
|
||||
|
||||
// FailNow marks the function as having failed and stops its ...
|
||||
func (c *T) FailNow()
|
||||
|
||||
// Failed returns whether the function has failed.
|
||||
func (c *T) Failed() bool
|
||||
|
||||
// Fatal is equivalent to Log() followed by FailNow().
|
||||
func (c *T) Fatal(args ...interface{})
|
||||
|
||||
// Fatalf is equivalent to Logf() followed by FailNow().
|
||||
func (c *T) Fatalf(format string, args ...interface{})
|
||||
|
||||
// Log formats its arguments using default formatting, analogous ...
|
||||
func (c *T) Log(args ...interface{})
|
||||
|
||||
// Logf formats its arguments according to the format, analogous ...
|
||||
func (c *T) Logf(format string, args ...interface{})
|
||||
|
||||
// Parallel signals that this test is to be run in parallel with ...
|
||||
func (t *T) Parallel()
|
||||
|
|
@ -135,8 +135,10 @@ func ParseDir(fset *token.FileSet, path string, filter func(os.FileInfo) bool, m
|
|||
//
|
||||
func ParseExpr(x string) (ast.Expr, error) {
|
||||
// parse x within the context of a complete package for correct scopes;
|
||||
// use //line directive for correct positions in error messages
|
||||
file, err := ParseFile(token.NewFileSet(), "", "package p;func _(){_=\n//line :1\n"+x+";}", 0)
|
||||
// use //line directive for correct positions in error messages and put
|
||||
// x alone on a separate line (handles line comments), followed by a ';'
|
||||
// to force an error if the expression is incomplete
|
||||
file, err := ParseFile(token.NewFileSet(), "", "package p;func _(){_=\n//line :1\n"+x+"\n;}", 0)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
|
|
@ -6,7 +6,7 @@
|
|||
// source which can then be tokenized through repeated calls to the Scan
|
||||
// function. Typical use:
|
||||
//
|
||||
// var s Scanner
|
||||
// var s scanner.Scanner
|
||||
// fset := token.NewFileSet() // position information is relative to fset
|
||||
// file := fset.AddFile(filename, fset.Base(), len(src)) // register file
|
||||
// s.Init(file, src, nil /* no error handler */, 0)
|
||||
|
|
|
@ -12,6 +12,9 @@ import (
|
|||
"sync"
|
||||
)
|
||||
|
||||
// -----------------------------------------------------------------------------
|
||||
// Positions
|
||||
|
||||
// Position describes an arbitrary source position
|
||||
// including the file, line, and column location.
|
||||
// A Position is valid if the line number is > 0.
|
||||
|
@ -81,84 +84,8 @@ func (p Pos) IsValid() bool {
|
|||
return p != NoPos
|
||||
}
|
||||
|
||||
func searchFiles(a []*File, x int) int {
|
||||
return sort.Search(len(a), func(i int) bool { return a[i].base > x }) - 1
|
||||
}
|
||||
|
||||
func (s *FileSet) file(p Pos) *File {
|
||||
// common case: p is in last file touched
|
||||
if f := s.last; f != nil && f.base <= int(p) && int(p) <= f.base+f.size {
|
||||
return f
|
||||
}
|
||||
// p is not in last file touched - search all files
|
||||
if i := searchFiles(s.files, int(p)); i >= 0 {
|
||||
f := s.files[i]
|
||||
// f.base <= int(p) by definition of searchFiles
|
||||
if int(p) <= f.base+f.size {
|
||||
s.last = f
|
||||
return f
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// File returns the file which contains the position p.
|
||||
// If no such file is found (for instance for p == NoPos),
|
||||
// the result is nil.
|
||||
//
|
||||
func (s *FileSet) File(p Pos) (f *File) {
|
||||
if p != NoPos {
|
||||
s.mutex.RLock()
|
||||
f = s.file(p)
|
||||
s.mutex.RUnlock()
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
func (f *File) position(p Pos) (pos Position) {
|
||||
offset := int(p) - f.base
|
||||
pos.Offset = offset
|
||||
pos.Filename, pos.Line, pos.Column = f.info(offset)
|
||||
return
|
||||
}
|
||||
|
||||
// Position converts a Pos in the fileset into a general Position.
|
||||
func (s *FileSet) Position(p Pos) (pos Position) {
|
||||
if p != NoPos {
|
||||
s.mutex.RLock()
|
||||
if f := s.file(p); f != nil {
|
||||
pos = f.position(p)
|
||||
}
|
||||
s.mutex.RUnlock()
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
// A lineInfo object describes alternative file and line number
|
||||
// information (such as provided via a //line comment in a .go
|
||||
// file) for a given file offset.
|
||||
type lineInfo struct {
|
||||
// fields are exported to make them accessible to gob
|
||||
Offset int
|
||||
Filename string
|
||||
Line int
|
||||
}
|
||||
|
||||
// AddLineInfo adds alternative file and line number information for
|
||||
// a given file offset. The offset must be larger than the offset for
|
||||
// the previously added alternative line info and smaller than the
|
||||
// file size; otherwise the information is ignored.
|
||||
//
|
||||
// AddLineInfo is typically used to register alternative position
|
||||
// information for //line filename:line comments in source files.
|
||||
//
|
||||
func (f *File) AddLineInfo(offset int, filename string, line int) {
|
||||
f.set.mutex.Lock()
|
||||
if i := len(f.infos); i == 0 || f.infos[i-1].Offset < offset && offset < f.size {
|
||||
f.infos = append(f.infos, lineInfo{offset, filename, line})
|
||||
}
|
||||
f.set.mutex.Unlock()
|
||||
}
|
||||
// -----------------------------------------------------------------------------
|
||||
// File
|
||||
|
||||
// A File is a handle for a file belonging to a FileSet.
|
||||
// A File has a name, size, and line offset table.
|
||||
|
@ -253,6 +180,32 @@ func (f *File) SetLinesForContent(content []byte) {
|
|||
f.set.mutex.Unlock()
|
||||
}
|
||||
|
||||
// A lineInfo object describes alternative file and line number
|
||||
// information (such as provided via a //line comment in a .go
|
||||
// file) for a given file offset.
|
||||
type lineInfo struct {
|
||||
// fields are exported to make them accessible to gob
|
||||
Offset int
|
||||
Filename string
|
||||
Line int
|
||||
}
|
||||
|
||||
// AddLineInfo adds alternative file and line number information for
|
||||
// a given file offset. The offset must be larger than the offset for
|
||||
// the previously added alternative line info and smaller than the
|
||||
// file size; otherwise the information is ignored.
|
||||
//
|
||||
// AddLineInfo is typically used to register alternative position
|
||||
// information for //line filename:line comments in source files.
|
||||
//
|
||||
func (f *File) AddLineInfo(offset int, filename string, line int) {
|
||||
f.set.mutex.Lock()
|
||||
if i := len(f.infos); i == 0 || f.infos[i-1].Offset < offset && offset < f.size {
|
||||
f.infos = append(f.infos, lineInfo{offset, filename, line})
|
||||
}
|
||||
f.set.mutex.Unlock()
|
||||
}
|
||||
|
||||
// Pos returns the Pos value for the given file offset;
|
||||
// the offset must be <= f.Size().
|
||||
// f.Pos(f.Offset(p)) == p.
|
||||
|
@ -283,41 +236,6 @@ func (f *File) Line(p Pos) int {
|
|||
return f.Position(p).Line
|
||||
}
|
||||
|
||||
// Position returns the Position value for the given file position p;
|
||||
// p must be a Pos value in that file or NoPos.
|
||||
//
|
||||
func (f *File) Position(p Pos) (pos Position) {
|
||||
if p != NoPos {
|
||||
if int(p) < f.base || int(p) > f.base+f.size {
|
||||
panic("illegal Pos value")
|
||||
}
|
||||
pos = f.position(p)
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
func searchInts(a []int, x int) int {
|
||||
// This function body is a manually inlined version of:
|
||||
//
|
||||
// return sort.Search(len(a), func(i int) bool { return a[i] > x }) - 1
|
||||
//
|
||||
// With better compiler optimizations, this may not be needed in the
|
||||
// future, but at the moment this change improves the go/printer
|
||||
// benchmark performance by ~30%. This has a direct impact on the
|
||||
// speed of gofmt and thus seems worthwhile (2011-04-29).
|
||||
i, j := 0, len(a)
|
||||
for i < j {
|
||||
h := i + (j-i)/2 // avoid overflow when computing h
|
||||
// i ≤ h < j
|
||||
if a[h] <= x {
|
||||
i = h + 1
|
||||
} else {
|
||||
j = h
|
||||
}
|
||||
}
|
||||
return i - 1
|
||||
}
|
||||
|
||||
func searchLineInfos(a []lineInfo, x int) int {
|
||||
return sort.Search(len(a), func(i int) bool { return a[i].Offset > x }) - 1
|
||||
}
|
||||
|
@ -341,6 +259,29 @@ func (f *File) info(offset int) (filename string, line, column int) {
|
|||
return
|
||||
}
|
||||
|
||||
func (f *File) position(p Pos) (pos Position) {
|
||||
offset := int(p) - f.base
|
||||
pos.Offset = offset
|
||||
pos.Filename, pos.Line, pos.Column = f.info(offset)
|
||||
return
|
||||
}
|
||||
|
||||
// Position returns the Position value for the given file position p;
|
||||
// p must be a Pos value in that file or NoPos.
|
||||
//
|
||||
func (f *File) Position(p Pos) (pos Position) {
|
||||
if p != NoPos {
|
||||
if int(p) < f.base || int(p) > f.base+f.size {
|
||||
panic("illegal Pos value")
|
||||
}
|
||||
pos = f.position(p)
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
// -----------------------------------------------------------------------------
|
||||
// FileSet
|
||||
|
||||
// A FileSet represents a set of source files.
|
||||
// Methods of file sets are synchronized; multiple goroutines
|
||||
// may invoke them concurrently.
|
||||
|
@ -404,23 +345,91 @@ func (s *FileSet) AddFile(filename string, base, size int) *File {
|
|||
return f
|
||||
}
|
||||
|
||||
// Files returns the files added to the file set.
|
||||
func (s *FileSet) Files() <-chan *File {
|
||||
ch := make(chan *File)
|
||||
go func() {
|
||||
// Iterate calls f for the files in the file set in the order they were added
|
||||
// until f returns false.
|
||||
//
|
||||
func (s *FileSet) Iterate(f func(*File) bool) {
|
||||
for i := 0; ; i++ {
|
||||
var f *File
|
||||
var file *File
|
||||
s.mutex.RLock()
|
||||
if i < len(s.files) {
|
||||
f = s.files[i]
|
||||
file = s.files[i]
|
||||
}
|
||||
s.mutex.RUnlock()
|
||||
if f == nil {
|
||||
if file == nil || !f(file) {
|
||||
break
|
||||
}
|
||||
ch <- f
|
||||
}
|
||||
close(ch)
|
||||
}()
|
||||
return ch
|
||||
}
|
||||
|
||||
func searchFiles(a []*File, x int) int {
|
||||
return sort.Search(len(a), func(i int) bool { return a[i].base > x }) - 1
|
||||
}
|
||||
|
||||
func (s *FileSet) file(p Pos) *File {
|
||||
// common case: p is in last file
|
||||
if f := s.last; f != nil && f.base <= int(p) && int(p) <= f.base+f.size {
|
||||
return f
|
||||
}
|
||||
// p is not in last file - search all files
|
||||
if i := searchFiles(s.files, int(p)); i >= 0 {
|
||||
f := s.files[i]
|
||||
// f.base <= int(p) by definition of searchFiles
|
||||
if int(p) <= f.base+f.size {
|
||||
s.last = f
|
||||
return f
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// File returns the file that contains the position p.
|
||||
// If no such file is found (for instance for p == NoPos),
|
||||
// the result is nil.
|
||||
//
|
||||
func (s *FileSet) File(p Pos) (f *File) {
|
||||
if p != NoPos {
|
||||
s.mutex.RLock()
|
||||
f = s.file(p)
|
||||
s.mutex.RUnlock()
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
// Position converts a Pos in the fileset into a general Position.
|
||||
func (s *FileSet) Position(p Pos) (pos Position) {
|
||||
if p != NoPos {
|
||||
s.mutex.RLock()
|
||||
if f := s.file(p); f != nil {
|
||||
pos = f.position(p)
|
||||
}
|
||||
s.mutex.RUnlock()
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
// -----------------------------------------------------------------------------
|
||||
// Helper functions
|
||||
|
||||
func searchInts(a []int, x int) int {
|
||||
// This function body is a manually inlined version of:
|
||||
//
|
||||
// return sort.Search(len(a), func(i int) bool { return a[i] > x }) - 1
|
||||
//
|
||||
// With better compiler optimizations, this may not be needed in the
|
||||
// future, but at the moment this change improves the go/printer
|
||||
// benchmark performance by ~30%. This has a direct impact on the
|
||||
// speed of gofmt and thus seems worthwhile (2011-04-29).
|
||||
// TODO(gri): Remove this when compilers have caught up.
|
||||
i, j := 0, len(a)
|
||||
for i < j {
|
||||
h := i + (j-i)/2 // avoid overflow when computing h
|
||||
// i ≤ h < j
|
||||
if a[h] <= x {
|
||||
i = h + 1
|
||||
} else {
|
||||
j = h
|
||||
}
|
||||
}
|
||||
return i - 1
|
||||
}
|
||||
|
|
|
@ -167,12 +167,13 @@ func TestFiles(t *testing.T) {
|
|||
for i, test := range tests {
|
||||
fset.AddFile(test.filename, fset.Base(), test.size)
|
||||
j := 0
|
||||
for g := range fset.Files() {
|
||||
if g.Name() != tests[j].filename {
|
||||
t.Errorf("expected filename = %s; got %s", tests[j].filename, g.Name())
|
||||
fset.Iterate(func(f *File) bool {
|
||||
if f.Name() != tests[j].filename {
|
||||
t.Errorf("expected filename = %s; got %s", tests[j].filename, f.Name())
|
||||
}
|
||||
j++
|
||||
}
|
||||
return true
|
||||
})
|
||||
if j != i+1 {
|
||||
t.Errorf("expected %d files; got %d", i+1, j)
|
||||
}
|
||||
|
|
|
@ -38,6 +38,8 @@ func New() hash.Hash32 {
|
|||
|
||||
func (d *digest) Size() int { return Size }
|
||||
|
||||
func (d *digest) BlockSize() int { return 1 }
|
||||
|
||||
// Add p to the running checksum a, b.
|
||||
func update(a, b uint32, p []byte) (aa, bb uint32) {
|
||||
for _, pi := range p {
|
||||
|
|
|
@ -94,6 +94,8 @@ func NewIEEE() hash.Hash32 { return New(IEEETable) }
|
|||
|
||||
func (d *digest) Size() int { return Size }
|
||||
|
||||
func (d *digest) BlockSize() int { return 1 }
|
||||
|
||||
func (d *digest) Reset() { d.crc = 0 }
|
||||
|
||||
func update(crc uint32, tab *Table, p []byte) uint32 {
|
||||
|
|
|
@ -53,6 +53,8 @@ func New(tab *Table) hash.Hash64 { return &digest{0, tab} }
|
|||
|
||||
func (d *digest) Size() int { return Size }
|
||||
|
||||
func (d *digest) BlockSize() int { return 1 }
|
||||
|
||||
func (d *digest) Reset() { d.crc = 0 }
|
||||
|
||||
func update(crc uint64, tab *Table, p []byte) uint64 {
|
||||
|
|
|
@ -104,6 +104,11 @@ func (s *sum32a) Size() int { return 4 }
|
|||
func (s *sum64) Size() int { return 8 }
|
||||
func (s *sum64a) Size() int { return 8 }
|
||||
|
||||
func (s *sum32) BlockSize() int { return 1 }
|
||||
func (s *sum32a) BlockSize() int { return 1 }
|
||||
func (s *sum64) BlockSize() int { return 1 }
|
||||
func (s *sum64a) BlockSize() int { return 1 }
|
||||
|
||||
func (s *sum32) Sum(in []byte) []byte {
|
||||
v := uint32(*s)
|
||||
in = append(in, byte(v>>24))
|
||||
|
|
|
@ -22,6 +22,12 @@ type Hash interface {
|
|||
|
||||
// Size returns the number of bytes Sum will return.
|
||||
Size() int
|
||||
|
||||
// BlockSize returns the hash's underlying block size.
|
||||
// The Write method must be able to accept any amount
|
||||
// of data, but it may operate more efficiently if all writes
|
||||
// are a multiple of the block size.
|
||||
BlockSize() int
|
||||
}
|
||||
|
||||
// Hash32 is the common interface implemented by all 32-bit hash functions.
|
||||
|
|
|
@ -1713,8 +1713,8 @@ func parseForeignContent(p *parser) bool {
|
|||
}
|
||||
if breakout[p.tok.Data] {
|
||||
for i := len(p.oe) - 1; i >= 0; i-- {
|
||||
// TODO: HTML, MathML integration points.
|
||||
if p.oe[i].Namespace == "" {
|
||||
// TODO: MathML integration points.
|
||||
if p.oe[i].Namespace == "" || htmlIntegrationPoint(p.oe[i]) {
|
||||
p.oe = p.oe[:i+1]
|
||||
break
|
||||
}
|
||||
|
|
|
@ -184,7 +184,7 @@ func TestParser(t *testing.T) {
|
|||
{"tests4.dat", -1},
|
||||
{"tests5.dat", -1},
|
||||
{"tests6.dat", -1},
|
||||
{"tests10.dat", 33},
|
||||
{"tests10.dat", 35},
|
||||
}
|
||||
for _, tf := range testFiles {
|
||||
f, err := os.Open("testdata/webkit/" + tf.filename)
|
||||
|
|
|
@ -31,8 +31,8 @@ Example
|
|||
|
||||
import "text/template"
|
||||
...
|
||||
t, err := (&template.Set{}).Parse(`{{define "T"}}Hello, {{.}}!{{end}}`)
|
||||
err = t.Execute(out, "T", "<script>alert('you have been pwned')</script>")
|
||||
t, err := template.New("foo").Parse(`{{define "T"}}Hello, {{.}}!{{end}}`)
|
||||
err = t.ExecuteTemplate(out, "T", "<script>alert('you have been pwned')</script>")
|
||||
|
||||
produces
|
||||
|
||||
|
@ -42,12 +42,12 @@ but with contextual autoescaping,
|
|||
|
||||
import "html/template"
|
||||
...
|
||||
t, err := (&template.Set{}).Parse(`{{define "T"}}Hello, {{.}}!{{end}}`)
|
||||
err = t.Execute(out, "T", "<script>alert('you have been pwned')</script>")
|
||||
t, err := template.New("foo").Parse(`{{define "T"}}Hello, {{.}}!{{end}}`)
|
||||
err = t.ExecuteTemplate(out, "T", "<script>alert('you have been pwned')</script>")
|
||||
|
||||
produces safe, escaped HTML output
|
||||
|
||||
Hello, <script>alert('you have been pwned')</script>!
|
||||
Hello, <script>alert('you have been pwned')</script>!
|
||||
|
||||
|
||||
Contexts
|
||||
|
@ -57,8 +57,8 @@ functions to each simple action pipeline, so given the excerpt
|
|||
|
||||
<a href="/search?q={{.}}">{{.}}</a>
|
||||
|
||||
At parse time each {{.}} is overwritten to add escaping functions as necessary,
|
||||
in this case,
|
||||
At parse time each {{.}} is overwritten to add escaping functions as necessary.
|
||||
In this case it becomes
|
||||
|
||||
<a href="/search?q={{. | urlquery}}">{{. | html}}</a>
|
||||
|
||||
|
|
|
@ -899,7 +899,7 @@ func TestErrors(t *testing.T) {
|
|||
},
|
||||
{
|
||||
`<a href="{{if .F}}/foo?a={{else}}/bar/{{end}}{{.H}}">`,
|
||||
"z:1: (action: [(command: [F=[H]])]) appears in an ambiguous URL context",
|
||||
"z:1: {{.H}} appears in an ambiguous URL context",
|
||||
},
|
||||
{
|
||||
`<a onclick="alert('Hello \`,
|
||||
|
@ -1490,62 +1490,62 @@ func TestEnsurePipelineContains(t *testing.T) {
|
|||
}{
|
||||
{
|
||||
"{{.X}}",
|
||||
"[(command: [F=[X]])]",
|
||||
".X",
|
||||
[]string{},
|
||||
},
|
||||
{
|
||||
"{{.X | html}}",
|
||||
"[(command: [F=[X]]) (command: [I=html])]",
|
||||
".X | html",
|
||||
[]string{},
|
||||
},
|
||||
{
|
||||
"{{.X}}",
|
||||
"[(command: [F=[X]]) (command: [I=html])]",
|
||||
".X | html",
|
||||
[]string{"html"},
|
||||
},
|
||||
{
|
||||
"{{.X | html}}",
|
||||
"[(command: [F=[X]]) (command: [I=html]) (command: [I=urlquery])]",
|
||||
".X | html | urlquery",
|
||||
[]string{"urlquery"},
|
||||
},
|
||||
{
|
||||
"{{.X | html | urlquery}}",
|
||||
"[(command: [F=[X]]) (command: [I=html]) (command: [I=urlquery])]",
|
||||
".X | html | urlquery",
|
||||
[]string{"urlquery"},
|
||||
},
|
||||
{
|
||||
"{{.X | html | urlquery}}",
|
||||
"[(command: [F=[X]]) (command: [I=html]) (command: [I=urlquery])]",
|
||||
".X | html | urlquery",
|
||||
[]string{"html", "urlquery"},
|
||||
},
|
||||
{
|
||||
"{{.X | html | urlquery}}",
|
||||
"[(command: [F=[X]]) (command: [I=html]) (command: [I=urlquery])]",
|
||||
".X | html | urlquery",
|
||||
[]string{"html"},
|
||||
},
|
||||
{
|
||||
"{{.X | urlquery}}",
|
||||
"[(command: [F=[X]]) (command: [I=html]) (command: [I=urlquery])]",
|
||||
".X | html | urlquery",
|
||||
[]string{"html", "urlquery"},
|
||||
},
|
||||
{
|
||||
"{{.X | html | print}}",
|
||||
"[(command: [F=[X]]) (command: [I=urlquery]) (command: [I=html]) (command: [I=print])]",
|
||||
".X | urlquery | html | print",
|
||||
[]string{"urlquery", "html"},
|
||||
},
|
||||
}
|
||||
for _, test := range tests {
|
||||
for i, test := range tests {
|
||||
tmpl := template.Must(template.New("test").Parse(test.input))
|
||||
action, ok := (tmpl.Tree.Root.Nodes[0].(*parse.ActionNode))
|
||||
if !ok {
|
||||
t.Errorf("First node is not an action: %s", test.input)
|
||||
t.Errorf("#%d: First node is not an action: %s", i, test.input)
|
||||
continue
|
||||
}
|
||||
pipe := action.Pipe
|
||||
ensurePipelineContains(pipe, test.ids)
|
||||
got := pipe.String()
|
||||
if got != test.output {
|
||||
t.Errorf("%s, %v: want\n\t%s\ngot\n\t%s", test.input, test.ids, test.output, got)
|
||||
t.Errorf("#%d: %s, %v: want\n\t%s\ngot\n\t%s", i, test.input, test.ids, test.output, got)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -152,26 +152,35 @@ func (m *modelFunc) Convert(c Color) Color {
|
|||
return m.f(c)
|
||||
}
|
||||
|
||||
// RGBAModel is the Model for RGBA colors.
|
||||
var RGBAModel Model = ModelFunc(func(c Color) Color {
|
||||
// Models for the standard color types.
|
||||
var (
|
||||
RGBAModel Model = ModelFunc(rgbaModel)
|
||||
RGBA64Model Model = ModelFunc(rgba64Model)
|
||||
NRGBAModel Model = ModelFunc(nrgbaModel)
|
||||
NRGBA64Model Model = ModelFunc(nrgba64Model)
|
||||
AlphaModel Model = ModelFunc(alphaModel)
|
||||
Alpha16Model Model = ModelFunc(alpha16Model)
|
||||
GrayModel Model = ModelFunc(grayModel)
|
||||
Gray16Model Model = ModelFunc(gray16Model)
|
||||
)
|
||||
|
||||
func rgbaModel(c Color) Color {
|
||||
if _, ok := c.(RGBA); ok {
|
||||
return c
|
||||
}
|
||||
r, g, b, a := c.RGBA()
|
||||
return RGBA{uint8(r >> 8), uint8(g >> 8), uint8(b >> 8), uint8(a >> 8)}
|
||||
})
|
||||
}
|
||||
|
||||
// RGBAModel is the Model for RGBA64 colors.
|
||||
var RGBA64Model Model = ModelFunc(func(c Color) Color {
|
||||
func rgba64Model(c Color) Color {
|
||||
if _, ok := c.(RGBA64); ok {
|
||||
return c
|
||||
}
|
||||
r, g, b, a := c.RGBA()
|
||||
return RGBA64{uint16(r), uint16(g), uint16(b), uint16(a)}
|
||||
})
|
||||
}
|
||||
|
||||
// NRGBAModel is the Model for NRGBA colors.
|
||||
var NRGBAModel Model = ModelFunc(func(c Color) Color {
|
||||
func nrgbaModel(c Color) Color {
|
||||
if _, ok := c.(NRGBA); ok {
|
||||
return c
|
||||
}
|
||||
|
@ -187,10 +196,9 @@ var NRGBAModel Model = ModelFunc(func(c Color) Color {
|
|||
g = (g * 0xffff) / a
|
||||
b = (b * 0xffff) / a
|
||||
return NRGBA{uint8(r >> 8), uint8(g >> 8), uint8(b >> 8), uint8(a >> 8)}
|
||||
})
|
||||
}
|
||||
|
||||
// NRGBAModel is the Model for NRGBA64 colors.
|
||||
var NRGBA64Model Model = ModelFunc(func(c Color) Color {
|
||||
func nrgba64Model(c Color) Color {
|
||||
if _, ok := c.(NRGBA64); ok {
|
||||
return c
|
||||
}
|
||||
|
@ -206,45 +214,41 @@ var NRGBA64Model Model = ModelFunc(func(c Color) Color {
|
|||
g = (g * 0xffff) / a
|
||||
b = (b * 0xffff) / a
|
||||
return NRGBA64{uint16(r), uint16(g), uint16(b), uint16(a)}
|
||||
})
|
||||
}
|
||||
|
||||
// AlphaModel is the Model for Alpha colors.
|
||||
var AlphaModel Model = ModelFunc(func(c Color) Color {
|
||||
func alphaModel(c Color) Color {
|
||||
if _, ok := c.(Alpha); ok {
|
||||
return c
|
||||
}
|
||||
_, _, _, a := c.RGBA()
|
||||
return Alpha{uint8(a >> 8)}
|
||||
})
|
||||
}
|
||||
|
||||
// Alpha16Model is the Model for Alpha16 colors.
|
||||
var Alpha16Model Model = ModelFunc(func(c Color) Color {
|
||||
func alpha16Model(c Color) Color {
|
||||
if _, ok := c.(Alpha16); ok {
|
||||
return c
|
||||
}
|
||||
_, _, _, a := c.RGBA()
|
||||
return Alpha16{uint16(a)}
|
||||
})
|
||||
}
|
||||
|
||||
// GrayModel is the Model for Gray colors.
|
||||
var GrayModel Model = ModelFunc(func(c Color) Color {
|
||||
func grayModel(c Color) Color {
|
||||
if _, ok := c.(Gray); ok {
|
||||
return c
|
||||
}
|
||||
r, g, b, _ := c.RGBA()
|
||||
y := (299*r + 587*g + 114*b + 500) / 1000
|
||||
return Gray{uint8(y >> 8)}
|
||||
})
|
||||
}
|
||||
|
||||
// Gray16Model is the Model for Gray16 colors.
|
||||
var Gray16Model Model = ModelFunc(func(c Color) Color {
|
||||
func gray16Model(c Color) Color {
|
||||
if _, ok := c.(Gray16); ok {
|
||||
return c
|
||||
}
|
||||
r, g, b, _ := c.RGBA()
|
||||
y := (299*r + 587*g + 114*b + 500) / 1000
|
||||
return Gray16{uint16(y)}
|
||||
})
|
||||
}
|
||||
|
||||
// Palette is a palette of colors.
|
||||
type Palette []Color
|
||||
|
@ -290,13 +294,10 @@ func (p Palette) Index(c Color) int {
|
|||
return ret
|
||||
}
|
||||
|
||||
// Standard colors.
|
||||
var (
|
||||
// Black is an opaque black Color.
|
||||
Black = Gray16{0}
|
||||
// White is an opaque white Color.
|
||||
White = Gray16{0xffff}
|
||||
// Transparent is a fully transparent Color.
|
||||
Transparent = Alpha16{0}
|
||||
// Opaque is a fully opaque Color.
|
||||
Opaque = Alpha16{0xffff}
|
||||
)
|
||||
|
|
|
@ -4,8 +4,7 @@
|
|||
|
||||
package color
|
||||
|
||||
// RGBToYCbCr converts an RGB triple to a Y'CbCr triple. All components lie
|
||||
// within the range [0, 255].
|
||||
// RGBToYCbCr converts an RGB triple to a Y'CbCr triple.
|
||||
func RGBToYCbCr(r, g, b uint8) (uint8, uint8, uint8) {
|
||||
// The JFIF specification says:
|
||||
// Y' = 0.2990*R + 0.5870*G + 0.1140*B
|
||||
|
@ -36,8 +35,7 @@ func RGBToYCbCr(r, g, b uint8) (uint8, uint8, uint8) {
|
|||
return uint8(yy), uint8(cb), uint8(cr)
|
||||
}
|
||||
|
||||
// YCbCrToRGB converts a Y'CbCr triple to an RGB triple. All components lie
|
||||
// within the range [0, 255].
|
||||
// YCbCrToRGB converts a Y'CbCr triple to an RGB triple.
|
||||
func YCbCrToRGB(y, cb, cr uint8) (uint8, uint8, uint8) {
|
||||
// The JFIF specification says:
|
||||
// R = Y' + 1.40200*(Cr-128)
|
||||
|
@ -89,11 +87,13 @@ func (c YCbCr) RGBA() (uint32, uint32, uint32, uint32) {
|
|||
}
|
||||
|
||||
// YCbCrModel is the Model for Y'CbCr colors.
|
||||
var YCbCrModel Model = ModelFunc(func(c Color) Color {
|
||||
var YCbCrModel Model = ModelFunc(yCbCrModel)
|
||||
|
||||
func yCbCrModel(c Color) Color {
|
||||
if _, ok := c.(YCbCr); ok {
|
||||
return c
|
||||
}
|
||||
r, g, b, _ := c.RGBA()
|
||||
y, u, v := RGBToYCbCr(uint8(r>>8), uint8(g>>8), uint8(b>>8))
|
||||
return YCbCr{y, u, v}
|
||||
})
|
||||
}
|
||||
|
|
|
@ -171,7 +171,7 @@ func drawFillOver(dst *image.RGBA, r image.Rectangle, src *image.Uniform) {
|
|||
sr, sg, sb, sa := src.RGBA()
|
||||
// The 0x101 is here for the same reason as in drawRGBA.
|
||||
a := (m - sa) * 0x101
|
||||
i0 := (r.Min.Y-dst.Rect.Min.Y)*dst.Stride + (r.Min.X-dst.Rect.Min.X)*4
|
||||
i0 := dst.PixOffset(r.Min.X, r.Min.Y)
|
||||
i1 := i0 + r.Dx()*4
|
||||
for y := r.Min.Y; y != r.Max.Y; y++ {
|
||||
for i := i0; i < i1; i += 4 {
|
||||
|
@ -195,7 +195,7 @@ func drawFillSrc(dst *image.RGBA, r image.Rectangle, src *image.Uniform) {
|
|||
// The built-in copy function is faster than a straightforward for loop to fill the destination with
|
||||
// the color, but copy requires a slice source. We therefore use a for loop to fill the first row, and
|
||||
// then use the first row as the slice source for the remaining rows.
|
||||
i0 := (r.Min.Y-dst.Rect.Min.Y)*dst.Stride + (r.Min.X-dst.Rect.Min.X)*4
|
||||
i0 := dst.PixOffset(r.Min.X, r.Min.Y)
|
||||
i1 := i0 + r.Dx()*4
|
||||
for i := i0; i < i1; i += 4 {
|
||||
dst.Pix[i+0] = uint8(sr >> 8)
|
||||
|
@ -213,8 +213,8 @@ func drawFillSrc(dst *image.RGBA, r image.Rectangle, src *image.Uniform) {
|
|||
|
||||
func drawCopyOver(dst *image.RGBA, r image.Rectangle, src *image.RGBA, sp image.Point) {
|
||||
dx, dy := r.Dx(), r.Dy()
|
||||
d0 := (r.Min.Y-dst.Rect.Min.Y)*dst.Stride + (r.Min.X-dst.Rect.Min.X)*4
|
||||
s0 := (sp.Y-src.Rect.Min.Y)*src.Stride + (sp.X-src.Rect.Min.X)*4
|
||||
d0 := dst.PixOffset(r.Min.X, r.Min.Y)
|
||||
s0 := src.PixOffset(sp.X, sp.Y)
|
||||
var (
|
||||
ddelta, sdelta int
|
||||
i0, i1, idelta int
|
||||
|
@ -261,8 +261,8 @@ func drawCopyOver(dst *image.RGBA, r image.Rectangle, src *image.RGBA, sp image.
|
|||
|
||||
func drawCopySrc(dst *image.RGBA, r image.Rectangle, src *image.RGBA, sp image.Point) {
|
||||
n, dy := 4*r.Dx(), r.Dy()
|
||||
d0 := (r.Min.Y-dst.Rect.Min.Y)*dst.Stride + (r.Min.X-dst.Rect.Min.X)*4
|
||||
s0 := (sp.Y-src.Rect.Min.Y)*src.Stride + (sp.X-src.Rect.Min.X)*4
|
||||
d0 := dst.PixOffset(r.Min.X, r.Min.Y)
|
||||
s0 := src.PixOffset(sp.X, sp.Y)
|
||||
var ddelta, sdelta int
|
||||
if r.Min.Y <= sp.Y {
|
||||
ddelta = dst.Stride
|
||||
|
@ -348,9 +348,6 @@ func drawNRGBASrc(dst *image.RGBA, r image.Rectangle, src *image.NRGBA, sp image
|
|||
func drawYCbCr(dst *image.RGBA, r image.Rectangle, src *image.YCbCr, sp image.Point) {
|
||||
// An image.YCbCr is always fully opaque, and so if the mask is implicitly nil
|
||||
// (i.e. fully opaque) then the op is effectively always Src.
|
||||
var (
|
||||
yy, cb, cr uint8
|
||||
)
|
||||
x0 := (r.Min.X - dst.Rect.Min.X) * 4
|
||||
x1 := (r.Max.X - dst.Rect.Min.X) * 4
|
||||
y0 := r.Min.Y - dst.Rect.Min.Y
|
||||
|
@ -359,12 +356,11 @@ func drawYCbCr(dst *image.RGBA, r image.Rectangle, src *image.YCbCr, sp image.Po
|
|||
case image.YCbCrSubsampleRatio422:
|
||||
for y, sy := y0, sp.Y; y != y1; y, sy = y+1, sy+1 {
|
||||
dpix := dst.Pix[y*dst.Stride:]
|
||||
for x, sx := x0, sp.X; x != x1; x, sx = x+4, sx+1 {
|
||||
i := sx / 2
|
||||
yy = src.Y[sy*src.YStride+sx]
|
||||
cb = src.Cb[sy*src.CStride+i]
|
||||
cr = src.Cr[sy*src.CStride+i]
|
||||
rr, gg, bb := color.YCbCrToRGB(yy, cb, cr)
|
||||
yi := (sy-src.Rect.Min.Y)*src.YStride + (sp.X - src.Rect.Min.X)
|
||||
ciBase := (sy-src.Rect.Min.Y)*src.CStride - src.Rect.Min.X/2
|
||||
for x, sx := x0, sp.X; x != x1; x, sx, yi = x+4, sx+1, yi+1 {
|
||||
ci := ciBase + sx/2
|
||||
rr, gg, bb := color.YCbCrToRGB(src.Y[yi], src.Cb[ci], src.Cr[ci])
|
||||
dpix[x+0] = rr
|
||||
dpix[x+1] = gg
|
||||
dpix[x+2] = bb
|
||||
|
@ -374,12 +370,11 @@ func drawYCbCr(dst *image.RGBA, r image.Rectangle, src *image.YCbCr, sp image.Po
|
|||
case image.YCbCrSubsampleRatio420:
|
||||
for y, sy := y0, sp.Y; y != y1; y, sy = y+1, sy+1 {
|
||||
dpix := dst.Pix[y*dst.Stride:]
|
||||
for x, sx := x0, sp.X; x != x1; x, sx = x+4, sx+1 {
|
||||
i, j := sx/2, sy/2
|
||||
yy = src.Y[sy*src.YStride+sx]
|
||||
cb = src.Cb[j*src.CStride+i]
|
||||
cr = src.Cr[j*src.CStride+i]
|
||||
rr, gg, bb := color.YCbCrToRGB(yy, cb, cr)
|
||||
yi := (sy-src.Rect.Min.Y)*src.YStride + (sp.X - src.Rect.Min.X)
|
||||
ciBase := (sy/2-src.Rect.Min.Y/2)*src.CStride - src.Rect.Min.X/2
|
||||
for x, sx := x0, sp.X; x != x1; x, sx, yi = x+4, sx+1, yi+1 {
|
||||
ci := ciBase + sx/2
|
||||
rr, gg, bb := color.YCbCrToRGB(src.Y[yi], src.Cb[ci], src.Cr[ci])
|
||||
dpix[x+0] = rr
|
||||
dpix[x+1] = gg
|
||||
dpix[x+2] = bb
|
||||
|
@ -390,11 +385,10 @@ func drawYCbCr(dst *image.RGBA, r image.Rectangle, src *image.YCbCr, sp image.Po
|
|||
// Default to 4:4:4 subsampling.
|
||||
for y, sy := y0, sp.Y; y != y1; y, sy = y+1, sy+1 {
|
||||
dpix := dst.Pix[y*dst.Stride:]
|
||||
for x, sx := x0, sp.X; x != x1; x, sx = x+4, sx+1 {
|
||||
yy = src.Y[sy*src.YStride+sx]
|
||||
cb = src.Cb[sy*src.CStride+sx]
|
||||
cr = src.Cr[sy*src.CStride+sx]
|
||||
rr, gg, bb := color.YCbCrToRGB(yy, cb, cr)
|
||||
yi := (sy-src.Rect.Min.Y)*src.YStride + (sp.X - src.Rect.Min.X)
|
||||
ci := (sy-src.Rect.Min.Y)*src.CStride + (sp.X - src.Rect.Min.X)
|
||||
for x := x0; x != x1; x, yi, ci = x+4, yi+1, ci+1 {
|
||||
rr, gg, bb := color.YCbCrToRGB(src.Y[yi], src.Cb[ci], src.Cr[ci])
|
||||
dpix[x+0] = rr
|
||||
dpix[x+1] = gg
|
||||
dpix[x+2] = bb
|
||||
|
@ -405,9 +399,9 @@ func drawYCbCr(dst *image.RGBA, r image.Rectangle, src *image.YCbCr, sp image.Po
|
|||
}
|
||||
|
||||
func drawGlyphOver(dst *image.RGBA, r image.Rectangle, src *image.Uniform, mask *image.Alpha, mp image.Point) {
|
||||
i0 := (r.Min.Y-dst.Rect.Min.Y)*dst.Stride + (r.Min.X-dst.Rect.Min.X)*4
|
||||
i0 := dst.PixOffset(r.Min.X, r.Min.Y)
|
||||
i1 := i0 + r.Dx()*4
|
||||
mi0 := (mp.Y-mask.Rect.Min.Y)*mask.Stride + mp.X - mask.Rect.Min.X
|
||||
mi0 := mask.PixOffset(mp.X, mp.Y)
|
||||
sr, sg, sb, sa := src.RGBA()
|
||||
for y, my := r.Min.Y, mp.Y; y != r.Max.Y; y, my = y+1, my+1 {
|
||||
for i, mi := i0, mi0; i < i1; i, mi = i+4, mi+1 {
|
||||
|
@ -451,7 +445,7 @@ func drawRGBA(dst *image.RGBA, r image.Rectangle, src image.Image, sp image.Poin
|
|||
sx0 := sp.X + x0 - r.Min.X
|
||||
mx0 := mp.X + x0 - r.Min.X
|
||||
sx1 := sx0 + (x1 - x0)
|
||||
i0 := (y0-dst.Rect.Min.Y)*dst.Stride + (x0-dst.Rect.Min.X)*4
|
||||
i0 := dst.PixOffset(x0, y0)
|
||||
di := dx * 4
|
||||
for y := y0; y != y1; y, sy, my = y+dy, sy+dy, my+dy {
|
||||
for i, sx, mx := i0, sx0, mx0; sx != sx1; i, sx, mx = i+di, sx+dx, mx+dx {
|
||||
|
|
|
@ -112,7 +112,7 @@ func (r Rectangle) Add(p Point) Rectangle {
|
|||
}
|
||||
}
|
||||
|
||||
// Add returns the rectangle r translated by -p.
|
||||
// Sub returns the rectangle r translated by -p.
|
||||
func (r Rectangle) Sub(p Point) Rectangle {
|
||||
return Rectangle{
|
||||
Point{r.Min.X - p.X, r.Min.Y - p.Y},
|
||||
|
|
|
@ -61,15 +61,21 @@ func (p *RGBA) At(x, y int) color.Color {
|
|||
if !(Point{x, y}.In(p.Rect)) {
|
||||
return color.RGBA{}
|
||||
}
|
||||
i := (y-p.Rect.Min.Y)*p.Stride + (x-p.Rect.Min.X)*4
|
||||
i := p.PixOffset(x, y)
|
||||
return color.RGBA{p.Pix[i+0], p.Pix[i+1], p.Pix[i+2], p.Pix[i+3]}
|
||||
}
|
||||
|
||||
// PixOffset returns the index of the first element of Pix that corresponds to
|
||||
// the pixel at (x, y).
|
||||
func (p *RGBA) PixOffset(x, y int) int {
|
||||
return (y-p.Rect.Min.Y)*p.Stride + (x-p.Rect.Min.X)*4
|
||||
}
|
||||
|
||||
func (p *RGBA) Set(x, y int, c color.Color) {
|
||||
if !(Point{x, y}.In(p.Rect)) {
|
||||
return
|
||||
}
|
||||
i := (y-p.Rect.Min.Y)*p.Stride + (x-p.Rect.Min.X)*4
|
||||
i := p.PixOffset(x, y)
|
||||
c1 := color.RGBAModel.Convert(c).(color.RGBA)
|
||||
p.Pix[i+0] = c1.R
|
||||
p.Pix[i+1] = c1.G
|
||||
|
@ -81,7 +87,7 @@ func (p *RGBA) SetRGBA(x, y int, c color.RGBA) {
|
|||
if !(Point{x, y}.In(p.Rect)) {
|
||||
return
|
||||
}
|
||||
i := (y-p.Rect.Min.Y)*p.Stride + (x-p.Rect.Min.X)*4
|
||||
i := p.PixOffset(x, y)
|
||||
p.Pix[i+0] = c.R
|
||||
p.Pix[i+1] = c.G
|
||||
p.Pix[i+2] = c.B
|
||||
|
@ -98,7 +104,7 @@ func (p *RGBA) SubImage(r Rectangle) Image {
|
|||
if r.Empty() {
|
||||
return &RGBA{}
|
||||
}
|
||||
i := (r.Min.Y-p.Rect.Min.Y)*p.Stride + (r.Min.X-p.Rect.Min.X)*4
|
||||
i := p.PixOffset(r.Min.X, r.Min.Y)
|
||||
return &RGBA{
|
||||
Pix: p.Pix[i:],
|
||||
Stride: p.Stride,
|
||||
|
@ -124,7 +130,7 @@ func (p *RGBA) Opaque() bool {
|
|||
return true
|
||||
}
|
||||
|
||||
// NewRGBA returns a new RGBA with the given width and height.
|
||||
// NewRGBA returns a new RGBA with the given bounds.
|
||||
func NewRGBA(r Rectangle) *RGBA {
|
||||
w, h := r.Dx(), r.Dy()
|
||||
buf := make([]uint8, 4*w*h)
|
||||
|
@ -150,7 +156,7 @@ func (p *RGBA64) At(x, y int) color.Color {
|
|||
if !(Point{x, y}.In(p.Rect)) {
|
||||
return color.RGBA64{}
|
||||
}
|
||||
i := (y-p.Rect.Min.Y)*p.Stride + (x-p.Rect.Min.X)*8
|
||||
i := p.PixOffset(x, y)
|
||||
return color.RGBA64{
|
||||
uint16(p.Pix[i+0])<<8 | uint16(p.Pix[i+1]),
|
||||
uint16(p.Pix[i+2])<<8 | uint16(p.Pix[i+3]),
|
||||
|
@ -159,11 +165,17 @@ func (p *RGBA64) At(x, y int) color.Color {
|
|||
}
|
||||
}
|
||||
|
||||
// PixOffset returns the index of the first element of Pix that corresponds to
|
||||
// the pixel at (x, y).
|
||||
func (p *RGBA64) PixOffset(x, y int) int {
|
||||
return (y-p.Rect.Min.Y)*p.Stride + (x-p.Rect.Min.X)*8
|
||||
}
|
||||
|
||||
func (p *RGBA64) Set(x, y int, c color.Color) {
|
||||
if !(Point{x, y}.In(p.Rect)) {
|
||||
return
|
||||
}
|
||||
i := (y-p.Rect.Min.Y)*p.Stride + (x-p.Rect.Min.X)*8
|
||||
i := p.PixOffset(x, y)
|
||||
c1 := color.RGBA64Model.Convert(c).(color.RGBA64)
|
||||
p.Pix[i+0] = uint8(c1.R >> 8)
|
||||
p.Pix[i+1] = uint8(c1.R)
|
||||
|
@ -179,7 +191,7 @@ func (p *RGBA64) SetRGBA64(x, y int, c color.RGBA64) {
|
|||
if !(Point{x, y}.In(p.Rect)) {
|
||||
return
|
||||
}
|
||||
i := (y-p.Rect.Min.Y)*p.Stride + (x-p.Rect.Min.X)*8
|
||||
i := p.PixOffset(x, y)
|
||||
p.Pix[i+0] = uint8(c.R >> 8)
|
||||
p.Pix[i+1] = uint8(c.R)
|
||||
p.Pix[i+2] = uint8(c.G >> 8)
|
||||
|
@ -200,7 +212,7 @@ func (p *RGBA64) SubImage(r Rectangle) Image {
|
|||
if r.Empty() {
|
||||
return &RGBA64{}
|
||||
}
|
||||
i := (r.Min.Y-p.Rect.Min.Y)*p.Stride + (r.Min.X-p.Rect.Min.X)*8
|
||||
i := p.PixOffset(r.Min.X, r.Min.Y)
|
||||
return &RGBA64{
|
||||
Pix: p.Pix[i:],
|
||||
Stride: p.Stride,
|
||||
|
@ -226,7 +238,7 @@ func (p *RGBA64) Opaque() bool {
|
|||
return true
|
||||
}
|
||||
|
||||
// NewRGBA64 returns a new RGBA64 with the given width and height.
|
||||
// NewRGBA64 returns a new RGBA64 with the given bounds.
|
||||
func NewRGBA64(r Rectangle) *RGBA64 {
|
||||
w, h := r.Dx(), r.Dy()
|
||||
pix := make([]uint8, 8*w*h)
|
||||
|
@ -252,15 +264,21 @@ func (p *NRGBA) At(x, y int) color.Color {
|
|||
if !(Point{x, y}.In(p.Rect)) {
|
||||
return color.NRGBA{}
|
||||
}
|
||||
i := (y-p.Rect.Min.Y)*p.Stride + (x-p.Rect.Min.X)*4
|
||||
i := p.PixOffset(x, y)
|
||||
return color.NRGBA{p.Pix[i+0], p.Pix[i+1], p.Pix[i+2], p.Pix[i+3]}
|
||||
}
|
||||
|
||||
// PixOffset returns the index of the first element of Pix that corresponds to
|
||||
// the pixel at (x, y).
|
||||
func (p *NRGBA) PixOffset(x, y int) int {
|
||||
return (y-p.Rect.Min.Y)*p.Stride + (x-p.Rect.Min.X)*4
|
||||
}
|
||||
|
||||
func (p *NRGBA) Set(x, y int, c color.Color) {
|
||||
if !(Point{x, y}.In(p.Rect)) {
|
||||
return
|
||||
}
|
||||
i := (y-p.Rect.Min.Y)*p.Stride + (x-p.Rect.Min.X)*4
|
||||
i := p.PixOffset(x, y)
|
||||
c1 := color.NRGBAModel.Convert(c).(color.NRGBA)
|
||||
p.Pix[i+0] = c1.R
|
||||
p.Pix[i+1] = c1.G
|
||||
|
@ -272,7 +290,7 @@ func (p *NRGBA) SetNRGBA(x, y int, c color.NRGBA) {
|
|||
if !(Point{x, y}.In(p.Rect)) {
|
||||
return
|
||||
}
|
||||
i := (y-p.Rect.Min.Y)*p.Stride + (x-p.Rect.Min.X)*4
|
||||
i := p.PixOffset(x, y)
|
||||
p.Pix[i+0] = c.R
|
||||
p.Pix[i+1] = c.G
|
||||
p.Pix[i+2] = c.B
|
||||
|
@ -289,7 +307,7 @@ func (p *NRGBA) SubImage(r Rectangle) Image {
|
|||
if r.Empty() {
|
||||
return &NRGBA{}
|
||||
}
|
||||
i := (r.Min.Y-p.Rect.Min.Y)*p.Stride + (r.Min.X-p.Rect.Min.X)*4
|
||||
i := p.PixOffset(r.Min.X, r.Min.Y)
|
||||
return &NRGBA{
|
||||
Pix: p.Pix[i:],
|
||||
Stride: p.Stride,
|
||||
|
@ -315,7 +333,7 @@ func (p *NRGBA) Opaque() bool {
|
|||
return true
|
||||
}
|
||||
|
||||
// NewNRGBA returns a new NRGBA with the given width and height.
|
||||
// NewNRGBA returns a new NRGBA with the given bounds.
|
||||
func NewNRGBA(r Rectangle) *NRGBA {
|
||||
w, h := r.Dx(), r.Dy()
|
||||
pix := make([]uint8, 4*w*h)
|
||||
|
@ -341,7 +359,7 @@ func (p *NRGBA64) At(x, y int) color.Color {
|
|||
if !(Point{x, y}.In(p.Rect)) {
|
||||
return color.NRGBA64{}
|
||||
}
|
||||
i := (y-p.Rect.Min.Y)*p.Stride + (x-p.Rect.Min.X)*8
|
||||
i := p.PixOffset(x, y)
|
||||
return color.NRGBA64{
|
||||
uint16(p.Pix[i+0])<<8 | uint16(p.Pix[i+1]),
|
||||
uint16(p.Pix[i+2])<<8 | uint16(p.Pix[i+3]),
|
||||
|
@ -350,11 +368,17 @@ func (p *NRGBA64) At(x, y int) color.Color {
|
|||
}
|
||||
}
|
||||
|
||||
// PixOffset returns the index of the first element of Pix that corresponds to
|
||||
// the pixel at (x, y).
|
||||
func (p *NRGBA64) PixOffset(x, y int) int {
|
||||
return (y-p.Rect.Min.Y)*p.Stride + (x-p.Rect.Min.X)*8
|
||||
}
|
||||
|
||||
func (p *NRGBA64) Set(x, y int, c color.Color) {
|
||||
if !(Point{x, y}.In(p.Rect)) {
|
||||
return
|
||||
}
|
||||
i := (y-p.Rect.Min.Y)*p.Stride + (x-p.Rect.Min.X)*8
|
||||
i := p.PixOffset(x, y)
|
||||
c1 := color.NRGBA64Model.Convert(c).(color.NRGBA64)
|
||||
p.Pix[i+0] = uint8(c1.R >> 8)
|
||||
p.Pix[i+1] = uint8(c1.R)
|
||||
|
@ -370,7 +394,7 @@ func (p *NRGBA64) SetNRGBA64(x, y int, c color.NRGBA64) {
|
|||
if !(Point{x, y}.In(p.Rect)) {
|
||||
return
|
||||
}
|
||||
i := (y-p.Rect.Min.Y)*p.Stride + (x-p.Rect.Min.X)*8
|
||||
i := p.PixOffset(x, y)
|
||||
p.Pix[i+0] = uint8(c.R >> 8)
|
||||
p.Pix[i+1] = uint8(c.R)
|
||||
p.Pix[i+2] = uint8(c.G >> 8)
|
||||
|
@ -391,7 +415,7 @@ func (p *NRGBA64) SubImage(r Rectangle) Image {
|
|||
if r.Empty() {
|
||||
return &NRGBA64{}
|
||||
}
|
||||
i := (r.Min.Y-p.Rect.Min.Y)*p.Stride + (r.Min.X-p.Rect.Min.X)*8
|
||||
i := p.PixOffset(r.Min.X, r.Min.Y)
|
||||
return &NRGBA64{
|
||||
Pix: p.Pix[i:],
|
||||
Stride: p.Stride,
|
||||
|
@ -417,7 +441,7 @@ func (p *NRGBA64) Opaque() bool {
|
|||
return true
|
||||
}
|
||||
|
||||
// NewNRGBA64 returns a new NRGBA64 with the given width and height.
|
||||
// NewNRGBA64 returns a new NRGBA64 with the given bounds.
|
||||
func NewNRGBA64(r Rectangle) *NRGBA64 {
|
||||
w, h := r.Dx(), r.Dy()
|
||||
pix := make([]uint8, 8*w*h)
|
||||
|
@ -443,15 +467,21 @@ func (p *Alpha) At(x, y int) color.Color {
|
|||
if !(Point{x, y}.In(p.Rect)) {
|
||||
return color.Alpha{}
|
||||
}
|
||||
i := (y-p.Rect.Min.Y)*p.Stride + (x - p.Rect.Min.X)
|
||||
i := p.PixOffset(x, y)
|
||||
return color.Alpha{p.Pix[i]}
|
||||
}
|
||||
|
||||
// PixOffset returns the index of the first element of Pix that corresponds to
|
||||
// the pixel at (x, y).
|
||||
func (p *Alpha) PixOffset(x, y int) int {
|
||||
return (y-p.Rect.Min.Y)*p.Stride + (x-p.Rect.Min.X)*1
|
||||
}
|
||||
|
||||
func (p *Alpha) Set(x, y int, c color.Color) {
|
||||
if !(Point{x, y}.In(p.Rect)) {
|
||||
return
|
||||
}
|
||||
i := (y-p.Rect.Min.Y)*p.Stride + (x - p.Rect.Min.X)
|
||||
i := p.PixOffset(x, y)
|
||||
p.Pix[i] = color.AlphaModel.Convert(c).(color.Alpha).A
|
||||
}
|
||||
|
||||
|
@ -459,7 +489,7 @@ func (p *Alpha) SetAlpha(x, y int, c color.Alpha) {
|
|||
if !(Point{x, y}.In(p.Rect)) {
|
||||
return
|
||||
}
|
||||
i := (y-p.Rect.Min.Y)*p.Stride + (x - p.Rect.Min.X)
|
||||
i := p.PixOffset(x, y)
|
||||
p.Pix[i] = c.A
|
||||
}
|
||||
|
||||
|
@ -473,7 +503,7 @@ func (p *Alpha) SubImage(r Rectangle) Image {
|
|||
if r.Empty() {
|
||||
return &Alpha{}
|
||||
}
|
||||
i := (r.Min.Y-p.Rect.Min.Y)*p.Stride + (r.Min.X-p.Rect.Min.X)*1
|
||||
i := p.PixOffset(r.Min.X, r.Min.Y)
|
||||
return &Alpha{
|
||||
Pix: p.Pix[i:],
|
||||
Stride: p.Stride,
|
||||
|
@ -499,7 +529,7 @@ func (p *Alpha) Opaque() bool {
|
|||
return true
|
||||
}
|
||||
|
||||
// NewAlpha returns a new Alpha with the given width and height.
|
||||
// NewAlpha returns a new Alpha with the given bounds.
|
||||
func NewAlpha(r Rectangle) *Alpha {
|
||||
w, h := r.Dx(), r.Dy()
|
||||
pix := make([]uint8, 1*w*h)
|
||||
|
@ -525,15 +555,21 @@ func (p *Alpha16) At(x, y int) color.Color {
|
|||
if !(Point{x, y}.In(p.Rect)) {
|
||||
return color.Alpha16{}
|
||||
}
|
||||
i := (y-p.Rect.Min.Y)*p.Stride + (x-p.Rect.Min.X)*2
|
||||
i := p.PixOffset(x, y)
|
||||
return color.Alpha16{uint16(p.Pix[i+0])<<8 | uint16(p.Pix[i+1])}
|
||||
}
|
||||
|
||||
// PixOffset returns the index of the first element of Pix that corresponds to
|
||||
// the pixel at (x, y).
|
||||
func (p *Alpha16) PixOffset(x, y int) int {
|
||||
return (y-p.Rect.Min.Y)*p.Stride + (x-p.Rect.Min.X)*2
|
||||
}
|
||||
|
||||
func (p *Alpha16) Set(x, y int, c color.Color) {
|
||||
if !(Point{x, y}.In(p.Rect)) {
|
||||
return
|
||||
}
|
||||
i := (y-p.Rect.Min.Y)*p.Stride + (x-p.Rect.Min.X)*2
|
||||
i := p.PixOffset(x, y)
|
||||
c1 := color.Alpha16Model.Convert(c).(color.Alpha16)
|
||||
p.Pix[i+0] = uint8(c1.A >> 8)
|
||||
p.Pix[i+1] = uint8(c1.A)
|
||||
|
@ -543,7 +579,7 @@ func (p *Alpha16) SetAlpha16(x, y int, c color.Alpha16) {
|
|||
if !(Point{x, y}.In(p.Rect)) {
|
||||
return
|
||||
}
|
||||
i := (y-p.Rect.Min.Y)*p.Stride + (x-p.Rect.Min.X)*2
|
||||
i := p.PixOffset(x, y)
|
||||
p.Pix[i+0] = uint8(c.A >> 8)
|
||||
p.Pix[i+1] = uint8(c.A)
|
||||
}
|
||||
|
@ -558,7 +594,7 @@ func (p *Alpha16) SubImage(r Rectangle) Image {
|
|||
if r.Empty() {
|
||||
return &Alpha16{}
|
||||
}
|
||||
i := (r.Min.Y-p.Rect.Min.Y)*p.Stride + (r.Min.X-p.Rect.Min.X)*2
|
||||
i := p.PixOffset(r.Min.X, r.Min.Y)
|
||||
return &Alpha16{
|
||||
Pix: p.Pix[i:],
|
||||
Stride: p.Stride,
|
||||
|
@ -584,7 +620,7 @@ func (p *Alpha16) Opaque() bool {
|
|||
return true
|
||||
}
|
||||
|
||||
// NewAlpha16 returns a new Alpha16 with the given width and height.
|
||||
// NewAlpha16 returns a new Alpha16 with the given bounds.
|
||||
func NewAlpha16(r Rectangle) *Alpha16 {
|
||||
w, h := r.Dx(), r.Dy()
|
||||
pix := make([]uint8, 2*w*h)
|
||||
|
@ -610,15 +646,21 @@ func (p *Gray) At(x, y int) color.Color {
|
|||
if !(Point{x, y}.In(p.Rect)) {
|
||||
return color.Gray{}
|
||||
}
|
||||
i := (y-p.Rect.Min.Y)*p.Stride + (x - p.Rect.Min.X)
|
||||
i := p.PixOffset(x, y)
|
||||
return color.Gray{p.Pix[i]}
|
||||
}
|
||||
|
||||
// PixOffset returns the index of the first element of Pix that corresponds to
|
||||
// the pixel at (x, y).
|
||||
func (p *Gray) PixOffset(x, y int) int {
|
||||
return (y-p.Rect.Min.Y)*p.Stride + (x-p.Rect.Min.X)*1
|
||||
}
|
||||
|
||||
func (p *Gray) Set(x, y int, c color.Color) {
|
||||
if !(Point{x, y}.In(p.Rect)) {
|
||||
return
|
||||
}
|
||||
i := (y-p.Rect.Min.Y)*p.Stride + (x - p.Rect.Min.X)
|
||||
i := p.PixOffset(x, y)
|
||||
p.Pix[i] = color.GrayModel.Convert(c).(color.Gray).Y
|
||||
}
|
||||
|
||||
|
@ -626,7 +668,7 @@ func (p *Gray) SetGray(x, y int, c color.Gray) {
|
|||
if !(Point{x, y}.In(p.Rect)) {
|
||||
return
|
||||
}
|
||||
i := (y-p.Rect.Min.Y)*p.Stride + (x - p.Rect.Min.X)
|
||||
i := p.PixOffset(x, y)
|
||||
p.Pix[i] = c.Y
|
||||
}
|
||||
|
||||
|
@ -640,7 +682,7 @@ func (p *Gray) SubImage(r Rectangle) Image {
|
|||
if r.Empty() {
|
||||
return &Gray{}
|
||||
}
|
||||
i := (r.Min.Y-p.Rect.Min.Y)*p.Stride + (r.Min.X-p.Rect.Min.X)*1
|
||||
i := p.PixOffset(r.Min.X, r.Min.Y)
|
||||
return &Gray{
|
||||
Pix: p.Pix[i:],
|
||||
Stride: p.Stride,
|
||||
|
@ -653,7 +695,7 @@ func (p *Gray) Opaque() bool {
|
|||
return true
|
||||
}
|
||||
|
||||
// NewGray returns a new Gray with the given width and height.
|
||||
// NewGray returns a new Gray with the given bounds.
|
||||
func NewGray(r Rectangle) *Gray {
|
||||
w, h := r.Dx(), r.Dy()
|
||||
pix := make([]uint8, 1*w*h)
|
||||
|
@ -679,15 +721,21 @@ func (p *Gray16) At(x, y int) color.Color {
|
|||
if !(Point{x, y}.In(p.Rect)) {
|
||||
return color.Gray16{}
|
||||
}
|
||||
i := (y-p.Rect.Min.Y)*p.Stride + (x-p.Rect.Min.X)*2
|
||||
i := p.PixOffset(x, y)
|
||||
return color.Gray16{uint16(p.Pix[i+0])<<8 | uint16(p.Pix[i+1])}
|
||||
}
|
||||
|
||||
// PixOffset returns the index of the first element of Pix that corresponds to
|
||||
// the pixel at (x, y).
|
||||
func (p *Gray16) PixOffset(x, y int) int {
|
||||
return (y-p.Rect.Min.Y)*p.Stride + (x-p.Rect.Min.X)*2
|
||||
}
|
||||
|
||||
func (p *Gray16) Set(x, y int, c color.Color) {
|
||||
if !(Point{x, y}.In(p.Rect)) {
|
||||
return
|
||||
}
|
||||
i := (y-p.Rect.Min.Y)*p.Stride + (x-p.Rect.Min.X)*2
|
||||
i := p.PixOffset(x, y)
|
||||
c1 := color.Gray16Model.Convert(c).(color.Gray16)
|
||||
p.Pix[i+0] = uint8(c1.Y >> 8)
|
||||
p.Pix[i+1] = uint8(c1.Y)
|
||||
|
@ -697,7 +745,7 @@ func (p *Gray16) SetGray16(x, y int, c color.Gray16) {
|
|||
if !(Point{x, y}.In(p.Rect)) {
|
||||
return
|
||||
}
|
||||
i := (y-p.Rect.Min.Y)*p.Stride + (x-p.Rect.Min.X)*2
|
||||
i := p.PixOffset(x, y)
|
||||
p.Pix[i+0] = uint8(c.Y >> 8)
|
||||
p.Pix[i+1] = uint8(c.Y)
|
||||
}
|
||||
|
@ -712,7 +760,7 @@ func (p *Gray16) SubImage(r Rectangle) Image {
|
|||
if r.Empty() {
|
||||
return &Gray16{}
|
||||
}
|
||||
i := (r.Min.Y-p.Rect.Min.Y)*p.Stride + (r.Min.X-p.Rect.Min.X)*2
|
||||
i := p.PixOffset(r.Min.X, r.Min.Y)
|
||||
return &Gray16{
|
||||
Pix: p.Pix[i:],
|
||||
Stride: p.Stride,
|
||||
|
@ -725,7 +773,7 @@ func (p *Gray16) Opaque() bool {
|
|||
return true
|
||||
}
|
||||
|
||||
// NewGray16 returns a new Gray16 with the given width and height.
|
||||
// NewGray16 returns a new Gray16 with the given bounds.
|
||||
func NewGray16(r Rectangle) *Gray16 {
|
||||
w, h := r.Dx(), r.Dy()
|
||||
pix := make([]uint8, 2*w*h)
|
||||
|
@ -756,15 +804,21 @@ func (p *Paletted) At(x, y int) color.Color {
|
|||
if !(Point{x, y}.In(p.Rect)) {
|
||||
return p.Palette[0]
|
||||
}
|
||||
i := (y-p.Rect.Min.Y)*p.Stride + (x - p.Rect.Min.X)
|
||||
i := p.PixOffset(x, y)
|
||||
return p.Palette[p.Pix[i]]
|
||||
}
|
||||
|
||||
// PixOffset returns the index of the first element of Pix that corresponds to
|
||||
// the pixel at (x, y).
|
||||
func (p *Paletted) PixOffset(x, y int) int {
|
||||
return (y-p.Rect.Min.Y)*p.Stride + (x-p.Rect.Min.X)*1
|
||||
}
|
||||
|
||||
func (p *Paletted) Set(x, y int, c color.Color) {
|
||||
if !(Point{x, y}.In(p.Rect)) {
|
||||
return
|
||||
}
|
||||
i := (y-p.Rect.Min.Y)*p.Stride + (x - p.Rect.Min.X)
|
||||
i := p.PixOffset(x, y)
|
||||
p.Pix[i] = uint8(p.Palette.Index(c))
|
||||
}
|
||||
|
||||
|
@ -772,7 +826,7 @@ func (p *Paletted) ColorIndexAt(x, y int) uint8 {
|
|||
if !(Point{x, y}.In(p.Rect)) {
|
||||
return 0
|
||||
}
|
||||
i := (y-p.Rect.Min.Y)*p.Stride + (x - p.Rect.Min.X)
|
||||
i := p.PixOffset(x, y)
|
||||
return p.Pix[i]
|
||||
}
|
||||
|
||||
|
@ -780,7 +834,7 @@ func (p *Paletted) SetColorIndex(x, y int, index uint8) {
|
|||
if !(Point{x, y}.In(p.Rect)) {
|
||||
return
|
||||
}
|
||||
i := (y-p.Rect.Min.Y)*p.Stride + (x - p.Rect.Min.X)
|
||||
i := p.PixOffset(x, y)
|
||||
p.Pix[i] = index
|
||||
}
|
||||
|
||||
|
@ -796,7 +850,7 @@ func (p *Paletted) SubImage(r Rectangle) Image {
|
|||
Palette: p.Palette,
|
||||
}
|
||||
}
|
||||
i := (r.Min.Y-p.Rect.Min.Y)*p.Stride + (r.Min.X-p.Rect.Min.X)*1
|
||||
i := p.PixOffset(r.Min.X, r.Min.Y)
|
||||
return &Paletted{
|
||||
Pix: p.Pix[i:],
|
||||
Stride: p.Stride,
|
||||
|
|
|
@ -203,8 +203,7 @@ func (d *decoder) makeImg(h0, v0, mxx, myy int) {
|
|||
return
|
||||
}
|
||||
var subsampleRatio image.YCbCrSubsampleRatio
|
||||
n := h0 * v0
|
||||
switch n {
|
||||
switch h0 * v0 {
|
||||
case 1:
|
||||
subsampleRatio = image.YCbCrSubsampleRatio444
|
||||
case 2:
|
||||
|
@ -214,16 +213,8 @@ func (d *decoder) makeImg(h0, v0, mxx, myy int) {
|
|||
default:
|
||||
panic("unreachable")
|
||||
}
|
||||
b := make([]byte, mxx*myy*(1*8*8*n+2*8*8))
|
||||
d.img3 = &image.YCbCr{
|
||||
Y: b[mxx*myy*(0*8*8*n+0*8*8) : mxx*myy*(1*8*8*n+0*8*8)],
|
||||
Cb: b[mxx*myy*(1*8*8*n+0*8*8) : mxx*myy*(1*8*8*n+1*8*8)],
|
||||
Cr: b[mxx*myy*(1*8*8*n+1*8*8) : mxx*myy*(1*8*8*n+2*8*8)],
|
||||
SubsampleRatio: subsampleRatio,
|
||||
YStride: mxx * 8 * h0,
|
||||
CStride: mxx * 8,
|
||||
Rect: image.Rect(0, 0, d.width, d.height),
|
||||
}
|
||||
m := image.NewYCbCr(image.Rect(0, 0, 8*h0*mxx, 8*v0*myy), subsampleRatio)
|
||||
d.img3 = m.SubImage(image.Rect(0, 0, d.width, d.height)).(*image.YCbCr)
|
||||
}
|
||||
|
||||
// Specified in section B.2.3.
|
||||
|
|
|
@ -223,8 +223,8 @@ func (d *decoder) decode(dst image.Image, ymin, ymax int) error {
|
|||
}
|
||||
case mRGB:
|
||||
img := dst.(*image.RGBA)
|
||||
min := (ymin-img.Rect.Min.Y)*img.Stride - img.Rect.Min.X*4
|
||||
max := (ymax-img.Rect.Min.Y)*img.Stride - img.Rect.Min.X*4
|
||||
min := img.PixOffset(0, ymin)
|
||||
max := img.PixOffset(0, ymax)
|
||||
var off int
|
||||
for i := min; i < max; i += 4 {
|
||||
img.Pix[i+0] = d.buf[off+0]
|
||||
|
@ -235,16 +235,16 @@ func (d *decoder) decode(dst image.Image, ymin, ymax int) error {
|
|||
}
|
||||
case mNRGBA:
|
||||
img := dst.(*image.NRGBA)
|
||||
min := (ymin-img.Rect.Min.Y)*img.Stride - img.Rect.Min.X*4
|
||||
max := (ymax-img.Rect.Min.Y)*img.Stride - img.Rect.Min.X*4
|
||||
min := img.PixOffset(0, ymin)
|
||||
max := img.PixOffset(0, ymax)
|
||||
if len(d.buf) != max-min {
|
||||
return FormatError("short data strip")
|
||||
}
|
||||
copy(img.Pix[min:max], d.buf)
|
||||
case mRGBA:
|
||||
img := dst.(*image.RGBA)
|
||||
min := (ymin-img.Rect.Min.Y)*img.Stride - img.Rect.Min.X*4
|
||||
max := (ymax-img.Rect.Min.Y)*img.Stride - img.Rect.Min.X*4
|
||||
min := img.PixOffset(0, ymin)
|
||||
max := img.PixOffset(0, ymax)
|
||||
if len(d.buf) != max-min {
|
||||
return FormatError("short data strip")
|
||||
}
|
||||
|
|
|
@ -17,6 +17,18 @@ const (
|
|||
YCbCrSubsampleRatio420
|
||||
)
|
||||
|
||||
func (s YCbCrSubsampleRatio) String() string {
|
||||
switch s {
|
||||
case YCbCrSubsampleRatio444:
|
||||
return "YCbCrSubsampleRatio444"
|
||||
case YCbCrSubsampleRatio422:
|
||||
return "YCbCrSubsampleRatio422"
|
||||
case YCbCrSubsampleRatio420:
|
||||
return "YCbCrSubsampleRatio420"
|
||||
}
|
||||
return "YCbCrSubsampleRatioUnknown"
|
||||
}
|
||||
|
||||
// YCbCr is an in-memory image of Y'CbCr colors. There is one Y sample per
|
||||
// pixel, but each Cb and Cr sample can span one or more pixels.
|
||||
// YStride is the Y slice index delta between vertically adjacent pixels.
|
||||
|
@ -28,9 +40,7 @@ const (
|
|||
// For 4:2:2, CStride == YStride/2 && len(Cb) == len(Cr) == len(Y)/2.
|
||||
// For 4:2:0, CStride == YStride/2 && len(Cb) == len(Cr) == len(Y)/4.
|
||||
type YCbCr struct {
|
||||
Y []uint8
|
||||
Cb []uint8
|
||||
Cr []uint8
|
||||
Y, Cb, Cr []uint8
|
||||
YStride int
|
||||
CStride int
|
||||
SubsampleRatio YCbCrSubsampleRatio
|
||||
|
@ -49,39 +59,86 @@ func (p *YCbCr) At(x, y int) color.Color {
|
|||
if !(Point{x, y}.In(p.Rect)) {
|
||||
return color.YCbCr{}
|
||||
}
|
||||
yi := p.YOffset(x, y)
|
||||
ci := p.COffset(x, y)
|
||||
return color.YCbCr{
|
||||
p.Y[yi],
|
||||
p.Cb[ci],
|
||||
p.Cr[ci],
|
||||
}
|
||||
}
|
||||
|
||||
// YOffset returns the index of the first element of Y that corresponds to
|
||||
// the pixel at (x, y).
|
||||
func (p *YCbCr) YOffset(x, y int) int {
|
||||
return (y-p.Rect.Min.Y)*p.YStride + (x - p.Rect.Min.X)
|
||||
}
|
||||
|
||||
// COffset returns the index of the first element of Cb or Cr that corresponds
|
||||
// to the pixel at (x, y).
|
||||
func (p *YCbCr) COffset(x, y int) int {
|
||||
switch p.SubsampleRatio {
|
||||
case YCbCrSubsampleRatio422:
|
||||
i := x / 2
|
||||
return color.YCbCr{
|
||||
p.Y[y*p.YStride+x],
|
||||
p.Cb[y*p.CStride+i],
|
||||
p.Cr[y*p.CStride+i],
|
||||
}
|
||||
return (y-p.Rect.Min.Y)*p.CStride + (x/2 - p.Rect.Min.X/2)
|
||||
case YCbCrSubsampleRatio420:
|
||||
i, j := x/2, y/2
|
||||
return color.YCbCr{
|
||||
p.Y[y*p.YStride+x],
|
||||
p.Cb[j*p.CStride+i],
|
||||
p.Cr[j*p.CStride+i],
|
||||
}
|
||||
return (y/2-p.Rect.Min.Y/2)*p.CStride + (x/2 - p.Rect.Min.X/2)
|
||||
}
|
||||
// Default to 4:4:4 subsampling.
|
||||
return color.YCbCr{
|
||||
p.Y[y*p.YStride+x],
|
||||
p.Cb[y*p.CStride+x],
|
||||
p.Cr[y*p.CStride+x],
|
||||
}
|
||||
return (y-p.Rect.Min.Y)*p.CStride + (x - p.Rect.Min.X)
|
||||
}
|
||||
|
||||
// SubImage returns an image representing the portion of the image p visible
|
||||
// through r. The returned value shares pixels with the original image.
|
||||
func (p *YCbCr) SubImage(r Rectangle) Image {
|
||||
q := new(YCbCr)
|
||||
*q = *p
|
||||
q.Rect = q.Rect.Intersect(r)
|
||||
return q
|
||||
r = r.Intersect(p.Rect)
|
||||
// If r1 and r2 are Rectangles, r1.Intersect(r2) is not guaranteed to be inside
|
||||
// either r1 or r2 if the intersection is empty. Without explicitly checking for
|
||||
// this, the Pix[i:] expression below can panic.
|
||||
if r.Empty() {
|
||||
return &YCbCr{
|
||||
SubsampleRatio: p.SubsampleRatio,
|
||||
}
|
||||
}
|
||||
yi := p.YOffset(r.Min.X, r.Min.Y)
|
||||
ci := p.COffset(r.Min.X, r.Min.Y)
|
||||
return &YCbCr{
|
||||
Y: p.Y[yi:],
|
||||
Cb: p.Cb[ci:],
|
||||
Cr: p.Cr[ci:],
|
||||
SubsampleRatio: p.SubsampleRatio,
|
||||
YStride: p.YStride,
|
||||
CStride: p.CStride,
|
||||
Rect: r,
|
||||
}
|
||||
}
|
||||
|
||||
func (p *YCbCr) Opaque() bool {
|
||||
return true
|
||||
}
|
||||
|
||||
// NewYCbCr returns a new YCbCr with the given bounds and subsample ratio.
|
||||
func NewYCbCr(r Rectangle, subsampleRatio YCbCrSubsampleRatio) *YCbCr {
|
||||
w, h, cw, ch := r.Dx(), r.Dy(), 0, 0
|
||||
switch subsampleRatio {
|
||||
case YCbCrSubsampleRatio422:
|
||||
cw = (r.Max.X+1)/2 - r.Min.X/2
|
||||
ch = h
|
||||
case YCbCrSubsampleRatio420:
|
||||
cw = (r.Max.X+1)/2 - r.Min.X/2
|
||||
ch = (r.Max.Y+1)/2 - r.Min.Y/2
|
||||
default:
|
||||
// Default to 4:4:4 subsampling.
|
||||
cw = w
|
||||
ch = h
|
||||
}
|
||||
b := make([]byte, w*h+2*cw*ch)
|
||||
return &YCbCr{
|
||||
Y: b[:w*h],
|
||||
Cb: b[w*h+0*cw*ch : w*h+1*cw*ch],
|
||||
Cr: b[w*h+1*cw*ch : w*h+2*cw*ch],
|
||||
SubsampleRatio: subsampleRatio,
|
||||
YStride: w,
|
||||
CStride: cw,
|
||||
Rect: r,
|
||||
}
|
||||
}
|
||||
|
|
104
libgo/go/image/ycbcr_test.go
Normal file
104
libgo/go/image/ycbcr_test.go
Normal file
|
@ -0,0 +1,104 @@
|
|||
// Copyright 2012 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
package image_test
|
||||
|
||||
import (
|
||||
. "image"
|
||||
"image/color"
|
||||
"testing"
|
||||
)
|
||||
|
||||
func TestYCbCr(t *testing.T) {
|
||||
rects := []Rectangle{
|
||||
Rect(0, 0, 16, 16),
|
||||
Rect(1, 0, 16, 16),
|
||||
Rect(0, 1, 16, 16),
|
||||
Rect(1, 1, 16, 16),
|
||||
Rect(1, 1, 15, 16),
|
||||
Rect(1, 1, 16, 15),
|
||||
Rect(1, 1, 15, 15),
|
||||
Rect(2, 3, 14, 15),
|
||||
Rect(7, 0, 7, 16),
|
||||
Rect(0, 8, 16, 8),
|
||||
Rect(0, 0, 10, 11),
|
||||
Rect(5, 6, 16, 16),
|
||||
Rect(7, 7, 8, 8),
|
||||
Rect(7, 8, 8, 9),
|
||||
Rect(8, 7, 9, 8),
|
||||
Rect(8, 8, 9, 9),
|
||||
Rect(7, 7, 17, 17),
|
||||
Rect(8, 8, 17, 17),
|
||||
Rect(9, 9, 17, 17),
|
||||
Rect(10, 10, 17, 17),
|
||||
}
|
||||
subsampleRatios := []YCbCrSubsampleRatio{
|
||||
YCbCrSubsampleRatio444,
|
||||
YCbCrSubsampleRatio422,
|
||||
YCbCrSubsampleRatio420,
|
||||
}
|
||||
deltas := []Point{
|
||||
Pt(0, 0),
|
||||
Pt(1000, 1001),
|
||||
Pt(5001, -400),
|
||||
Pt(-701, -801),
|
||||
}
|
||||
for _, r := range rects {
|
||||
for _, subsampleRatio := range subsampleRatios {
|
||||
for _, delta := range deltas {
|
||||
testYCbCr(t, r, subsampleRatio, delta)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func testYCbCr(t *testing.T, r Rectangle, subsampleRatio YCbCrSubsampleRatio, delta Point) {
|
||||
// Create a YCbCr image m, whose bounds are r translated by (delta.X, delta.Y).
|
||||
r1 := r.Add(delta)
|
||||
m := NewYCbCr(r1, subsampleRatio)
|
||||
|
||||
// Test that the image buffer is reasonably small even if (delta.X, delta.Y) is far from the origin.
|
||||
if len(m.Y) > 100*100 {
|
||||
t.Errorf("r=%v, subsampleRatio=%v, delta=%v: image buffer is too large",
|
||||
r, subsampleRatio, delta)
|
||||
return
|
||||
}
|
||||
|
||||
// Initialize m's pixels. For 422 and 420 subsampling, some of the Cb and Cr elements
|
||||
// will be set multiple times. That's OK. We just want to avoid a uniform image.
|
||||
for y := r1.Min.Y; y < r1.Max.Y; y++ {
|
||||
for x := r1.Min.X; x < r1.Max.X; x++ {
|
||||
yi := m.YOffset(x, y)
|
||||
ci := m.COffset(x, y)
|
||||
m.Y[yi] = uint8(16*y + x)
|
||||
m.Cb[ci] = uint8(y + 16*x)
|
||||
m.Cr[ci] = uint8(y + 16*x)
|
||||
}
|
||||
}
|
||||
|
||||
// Make various sub-images of m.
|
||||
for y0 := delta.Y + 3; y0 < delta.Y+7; y0++ {
|
||||
for y1 := delta.Y + 8; y1 < delta.Y+13; y1++ {
|
||||
for x0 := delta.X + 3; x0 < delta.X+7; x0++ {
|
||||
for x1 := delta.X + 8; x1 < delta.X+13; x1++ {
|
||||
subRect := Rect(x0, y0, x1, y1)
|
||||
sub := m.SubImage(subRect).(*YCbCr)
|
||||
|
||||
// For each point in the sub-image's bounds, check that m.At(x, y) equals sub.At(x, y).
|
||||
for y := sub.Rect.Min.Y; y < sub.Rect.Max.Y; y++ {
|
||||
for x := sub.Rect.Min.X; x < sub.Rect.Max.X; x++ {
|
||||
color0 := m.At(x, y).(color.YCbCr)
|
||||
color1 := sub.At(x, y).(color.YCbCr)
|
||||
if color0 != color1 {
|
||||
t.Errorf("r=%v, subsampleRatio=%v, delta=%v, x=%d, y=%d, color0=%v, color1=%v",
|
||||
r, subsampleRatio, delta, x, y, color0, color1)
|
||||
return
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
|
@ -50,7 +50,7 @@ func ReadFile(filename string) ([]byte, error) {
|
|||
// WriteFile writes data to a file named by filename.
|
||||
// If the file does not exist, WriteFile creates it with permissions perm;
|
||||
// otherwise WriteFile truncates it before writing.
|
||||
func WriteFile(filename string, data []byte, perm uint32) error {
|
||||
func WriteFile(filename string, data []byte, perm os.FileMode) error {
|
||||
f, err := os.OpenFile(filename, os.O_WRONLY|os.O_CREATE|os.O_TRUNC, perm)
|
||||
if err != nil {
|
||||
return err
|
||||
|
|
|
@ -8,6 +8,7 @@ import (
|
|||
"log"
|
||||
"net"
|
||||
"testing"
|
||||
"time"
|
||||
)
|
||||
|
||||
var serverAddr string
|
||||
|
@ -31,7 +32,7 @@ func startServer(done chan<- string) {
|
|||
log.Fatalf("net.ListenPacket failed udp :0 %v", e)
|
||||
}
|
||||
serverAddr = c.LocalAddr().String()
|
||||
c.SetReadTimeout(100e6) // 100ms
|
||||
c.SetReadDeadline(time.Now().Add(100 * time.Millisecond))
|
||||
go runSyslog(c, done)
|
||||
}
|
||||
|
||||
|
|
|
@ -107,7 +107,9 @@ func (r *Rand) Perm(n int) []int {
|
|||
|
||||
var globalRand = New(&lockedSource{src: NewSource(1)})
|
||||
|
||||
// Seed uses the provided seed value to initialize the generator to a deterministic state.
|
||||
// Seed uses the provided seed value to initialize the generator to a
|
||||
// deterministic state. If Seed is not called, the generator behaves as
|
||||
// if seeded by Seed(1).
|
||||
func Seed(seed int64) { globalRand.Seed(seed) }
|
||||
|
||||
// Int63 returns a non-negative pseudo-random 63-bit integer as an int64.
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Add table
Add a link
Reference in a new issue