1
0
mirror of https://github.com/henrydcase/nobs.git synced 2024-11-22 23:28:57 +00:00

Adds go.mod (#21)

* Reset Makefile after adding go.mod
* Remove ``build`` directory
* Simiplifies makefile
* shake: Make xorIn copyOut platform specific
This commit is contained in:
Henry Case 2019-05-15 18:03:35 +01:00 committed by GitHub
parent 49bf0db8fd
commit 7298b650cc
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
7 changed files with 107 additions and 144 deletions

View File

@ -1,5 +1,9 @@
sudo: required sudo: required
language: go language: go
go:
- 1.11.x
- 1.12.x
- master
matrix: matrix:
include: include:

View File

@ -2,8 +2,6 @@
MK_FILE_PATH = $(lastword $(MAKEFILE_LIST)) MK_FILE_PATH = $(lastword $(MAKEFILE_LIST))
PRJ_DIR = $(abspath $(dir $(MK_FILE_PATH))) PRJ_DIR = $(abspath $(dir $(MK_FILE_PATH)))
GO ?= go GO ?= go
GOPATH_LOCAL = $(PRJ_DIR)/build/
GOPATH_DIR = src/github.com/henrydcase/nobs
VENDOR_DIR = tls_vendor VENDOR_DIR = tls_vendor
OPTS ?= -v OPTS ?= -v
NOASM ?= NOASM ?=
@ -31,37 +29,19 @@ ifeq ($(DBG),1)
OPTS_ENV+= GOTRACEBACK=crash # enable core dumps OPTS_ENV+= GOTRACEBACK=crash # enable core dumps
endif endif
TARGETS ?= \ test:
dh \ $(OPTS_ENV) $(GO) test $(OPTS) $(TEST_PATH)
drbg \
ec \
hash \
kem \
utils
prep-%:
mkdir -p $(GOPATH_LOCAL)/$(GOPATH_DIR)
cp -rf $* $(GOPATH_LOCAL)/$(GOPATH_DIR)/$*
make_dirs:
mkdir -p $(GOPATH_LOCAL)/$(GOPATH_DIR)
cp -rf etc $(GOPATH_LOCAL)/$(GOPATH_DIR)
test: clean make_dirs $(addprefix prep-,$(TARGETS))
cd $(GOPATH_LOCAL); $(OPTS_ENV) GOPATH=$(GOPATH_LOCAL) go test $(OPTS) $(TEST_PATH)
cover: cover:
cd $(GOPATH_LOCAL); $(OPTS_ENV) GOPATH=$(GOPATH_LOCAL) go test \ $(GO) test \
-race -coverprofile=coverage_$(NOASM).txt -covermode=atomic $(OPTS) $(TEST_PATH) -coverprofile=coverage.txt -covermode=atomic $(OPTS) $(TEST_PATH)
cat $(GOPATH_LOCAL)/coverage_$(NOASM).txt >> coverage.txt
bench: clean $(addprefix prep-,$(TARGETS)) bench:
cd $(GOPATH_LOCAL); GOCACHE=$(GOCACHE) GOPATH=$(GOPATH_LOCAL) $(GO) test \ $(GO) test $(BENCH_OPTS) $(TEST_PATH)
$(BENCH_OPTS) $(TEST_PATH)
clean: clean:
rm -rf $(GOPATH_LOCAL)
rm -rf $(VENDOR_DIR) rm -rf $(VENDOR_DIR)
rm -rf coverage.txt
vendor-sidh-for-tls: clean vendor-sidh-for-tls: clean
mkdir -p $(VENDOR_DIR)/github_com/henrydcase/nobs/ mkdir -p $(VENDOR_DIR)/github_com/henrydcase/nobs/
@ -69,7 +49,7 @@ vendor-sidh-for-tls: clean
find $(VENDOR_DIR) -type f -print0 -name "*.go" | xargs -0 sed -i 's/github\.com/github_com/g' find $(VENDOR_DIR) -type f -print0 -name "*.go" | xargs -0 sed -i 's/github\.com/github_com/g'
pprof-cpu: pprof-cpu:
$(GO) tool pprof $(GOPATH_LOCAL)/cpu.out $(GO) tool pprof cpu.out
pprof-mem: pprof-mem:
$(GO) tool pprof $(GOPATH_LOCAL)/mem0.out $(GO) tool pprof mem0.out

3
go.mod Normal file
View File

@ -0,0 +1,3 @@
module github.com/henrydcase/nobs
go 1.12

View File

@ -83,7 +83,7 @@ func (d *state) permute() {
case spongeAbsorbing: case spongeAbsorbing:
// If we're absorbing, we need to xor the input into the state // If we're absorbing, we need to xor the input into the state
// before applying the permutation. // before applying the permutation.
xorInUnaligned(d, d.buf) xorIn(d, d.buf)
d.buf = d.storage[:0] d.buf = d.storage[:0]
keccakF1600(&d.a) keccakF1600(&d.a)
case spongeSqueezing: case spongeSqueezing:
@ -91,7 +91,7 @@ func (d *state) permute() {
// copying more output. // copying more output.
keccakF1600(&d.a) keccakF1600(&d.a)
d.buf = d.storage[:d.rate] d.buf = d.storage[:d.rate]
copyOutUnaligned(d, d.buf) copyOut(d, d.buf)
} }
} }
@ -119,7 +119,7 @@ func (d *state) padAndPermute(dsbyte byte) {
d.permute() d.permute()
d.state = spongeSqueezing d.state = spongeSqueezing
d.buf = d.storage[:d.rate] d.buf = d.storage[:d.rate]
copyOutUnaligned(d, d.buf) copyOut(d, d.buf)
} }
// Write absorbs more data into the hash's state. It produces an error // Write absorbs more data into the hash's state. It produces an error
@ -136,7 +136,7 @@ func (d *state) Write(p []byte) (written int, err error) {
for len(p) > 0 { for len(p) > 0 {
if len(d.buf) == 0 && len(p) >= d.rate { if len(d.buf) == 0 && len(p) >= d.rate {
// The fast path; absorb a full "rate" bytes of input and apply the permutation. // The fast path; absorb a full "rate" bytes of input and apply the permutation.
xorInUnaligned(d, p[:d.rate]) xorIn(d, p[:d.rate])
p = p[d.rate:] p = p[d.rate:]
keccakF1600(&d.a) keccakF1600(&d.a)
} else { } else {

View File

@ -22,11 +22,6 @@ import (
"testing" "testing"
) )
var (
xorIn = xorInUnaligned
copyOut = copyOutUnaligned
)
const ( const (
testString = "brekeccakkeccak koax koax" testString = "brekeccakkeccak koax koax"
katFilename = "testdata/keccakKats.json.deflate" katFilename = "testdata/keccakKats.json.deflate"
@ -68,22 +63,10 @@ type KeccakKats struct {
} }
} }
func testUnalignedAndGeneric(t *testing.T, testf func(impl string)) {
xorInOrig, copyOutOrig := xorIn, copyOut
xorIn, copyOut = xorInGeneric, copyOutGeneric
testf("generic")
if xorImplementationUnaligned != "generic" {
xorIn, copyOut = xorInGeneric, copyOutGeneric
testf("unaligned")
}
xorIn, copyOut = xorInOrig, copyOutOrig
}
// TestKeccakKats tests the SHA-3 and Shake implementations against all the // TestKeccakKats tests the SHA-3 and Shake implementations against all the
// ShortMsgKATs from https://github.com/gvanas/KeccakCodePackage // ShortMsgKATs from https://github.com/gvanas/KeccakCodePackage
// (The testvectors are stored in keccakKats.json.deflate due to their length.) // (The testvectors are stored in keccakKats.json.deflate due to their length.)
func TestKeccakKats(t *testing.T) { func TestKeccakKats(t *testing.T) {
testUnalignedAndGeneric(t, func(impl string) {
// Read the KATs. // Read the KATs.
deflated, err := os.Open(katFilename) deflated, err := os.Open(katFilename)
if err != nil { if err != nil {
@ -119,15 +102,14 @@ func TestKeccakKats(t *testing.T) {
d.Read(out) d.Read(out)
got := strings.ToUpper(hex.EncodeToString(out)) got := strings.ToUpper(hex.EncodeToString(out))
if got != kat.Digest { if got != kat.Digest {
t.Errorf("function=%s, implementation=%s, length=%d N:%s\n S:%s\nmessage:\n %s \ngot:\n %s\nwanted:\n %s", t.Errorf("function=%s, length=%d N:%s\n S:%s\nmessage:\n %s \ngot:\n %s\nwanted:\n %s",
algo, impl, kat.Length, kat.N, kat.S, kat.Message, got, kat.Digest) algo, kat.Length, kat.N, kat.S, kat.Message, got, kat.Digest)
t.Logf("wanted %+v", kat) t.Logf("wanted %+v", kat)
t.FailNow() t.FailNow()
} }
continue continue
} }
} }
})
} }
// TestKeccak does a basic test of the non-standardized Keccak hash functions. // TestKeccak does a basic test of the non-standardized Keccak hash functions.
@ -158,7 +140,6 @@ func TestKeccak(t *testing.T) {
// TestUnalignedWrite tests that writing data in an arbitrary pattern with // TestUnalignedWrite tests that writing data in an arbitrary pattern with
// small input buffers. // small input buffers.
func TestUnalignedWrite(t *testing.T) { func TestUnalignedWrite(t *testing.T) {
testUnalignedAndGeneric(t, func(impl string) {
buf := sequentialBytes(0x10000) buf := sequentialBytes(0x10000)
// Same for SHAKE // Same for SHAKE
@ -185,10 +166,9 @@ func TestUnalignedWrite(t *testing.T) {
} }
d.Read(got) d.Read(got)
if !bytes.Equal(got, want) { if !bytes.Equal(got, want) {
t.Errorf("Unaligned writes, implementation=%s, alg=%s\ngot %q, want %q", impl, alg, got, want) t.Errorf("Unaligned writes, alg=%s\ngot %q, want %q", alg, got, want)
} }
} }
})
} }
/* TODO: To redesign those tests and unlock /* TODO: To redesign those tests and unlock
@ -231,7 +211,6 @@ func TestAppendNoRealloc(t *testing.T) {
// TestSqueezing checks that squeezing the full output a single time produces // TestSqueezing checks that squeezing the full output a single time produces
// the same output as repeatedly squeezing the instance. // the same output as repeatedly squeezing the instance.
func TestSqueezing(t *testing.T) { func TestSqueezing(t *testing.T) {
testUnalignedAndGeneric(t, func(impl string) {
for algo, v := range testShakes { for algo, v := range testShakes {
d0 := v.constructor([]byte(v.defAlgoName), []byte(v.defCustomStr)) d0 := v.constructor([]byte(v.defAlgoName), []byte(v.defCustomStr))
d0.Write([]byte(testString)) d0.Write([]byte(testString))
@ -247,10 +226,9 @@ func TestSqueezing(t *testing.T) {
multiple = append(multiple, one...) multiple = append(multiple, one...)
} }
if !bytes.Equal(ref, multiple) { if !bytes.Equal(ref, multiple) {
t.Errorf("%s (%s): squeezing %d bytes one at a time failed", algo, impl, len(ref)) t.Errorf("%s : squeezing %d bytes one at a time failed", algo, len(ref))
} }
} }
})
} }
// sequentialBytes produces a buffer of size consecutive bytes 0x00, 0x01, ..., used for testing. // sequentialBytes produces a buffer of size consecutive bytes 0x00, 0x01, ..., used for testing.

View File

@ -2,6 +2,8 @@
// Use of this source code is governed by a BSD-style // Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file. // license that can be found in the LICENSE file.
// +build !amd64,!386,!ppc64le
package shake package shake
import "encoding/binary" import "encoding/binary"
@ -9,7 +11,7 @@ import "encoding/binary"
// xorInGeneric xors the bytes in buf into the state; it // xorInGeneric xors the bytes in buf into the state; it
// makes no non-portable assumptions about memory layout // makes no non-portable assumptions about memory layout
// or alignment. // or alignment.
func xorInGeneric(d *state, buf []byte) { func xorIn(d *state, buf []byte) {
n := len(buf) / 8 n := len(buf) / 8
for i := 0; i < n; i++ { for i := 0; i < n; i++ {
@ -20,11 +22,9 @@ func xorInGeneric(d *state, buf []byte) {
} }
// copyOutGeneric copies ulint64s to a byte buffer. // copyOutGeneric copies ulint64s to a byte buffer.
func copyOutGeneric(d *state, b []byte) { func copyOut(d *state, b []byte) {
for i := 0; len(b) >= 8; i++ { for i := 0; len(b) >= 8; i++ {
binary.LittleEndian.PutUint64(b, d.a[i]) binary.LittleEndian.PutUint64(b, d.a[i])
b = b[8:] b = b[8:]
} }
} }
const xorImplementationGeneric = "generic"

View File

@ -2,14 +2,14 @@
// Use of this source code is governed by a BSD-style // Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file. // license that can be found in the LICENSE file.
// +build amd64 arm64 386 ppc64le // +build amd64 386 ppc64le
// +build !appengine // +build !appengine
package shake package shake
import "unsafe" import "unsafe"
func xorInUnaligned(d *state, buf []byte) { func xorIn(d *state, buf []byte) {
bw := (*[maxRate / 8]uint64)(unsafe.Pointer(&buf[0])) bw := (*[maxRate / 8]uint64)(unsafe.Pointer(&buf[0]))
n := len(buf) n := len(buf)
if n >= 72 { if n >= 72 {
@ -45,9 +45,7 @@ func xorInUnaligned(d *state, buf []byte) {
} }
} }
func copyOutUnaligned(d *state, buf []byte) { func copyOut(d *state, buf []byte) {
ab := (*[maxRate]uint8)(unsafe.Pointer(&d.a[0])) ab := (*[maxRate]uint8)(unsafe.Pointer(&d.a[0]))
copy(buf, ab[:]) copy(buf, ab[:])
} }
const xorImplementationUnaligned = "unaligned"