Skip to content

Commit

Permalink
Merge remote-tracking branch 'upstream/master' into feature/alphanet
Browse files Browse the repository at this point in the history
  • Loading branch information
cce committed Jul 5, 2022
2 parents 42ed29b + 77cb332 commit 15cdcea
Show file tree
Hide file tree
Showing 3 changed files with 152 additions and 49 deletions.
119 changes: 84 additions & 35 deletions .circleci/config.yml
Original file line number Diff line number Diff line change
Expand Up @@ -45,6 +45,17 @@ executors:
resource_class: large
environment:
HOMEBREW_NO_AUTO_UPDATE: "true"
mac_arm64: &executor-mac-arm64
machine: true
resource_class: algorand/macstadium-m1
environment:
HOMEBREW_NO_AUTO_UPDATE: "true"
# these are required b/c jobs explicitly assign sizes to the executors
# for `mac_arm64` there is only one size
mac_arm64_medium:
<<: *executor-mac-arm64
mac_arm64_large:
<<: *executor-mac-arm64

workflows:
version: 2
Expand All @@ -57,30 +68,37 @@ workflows:
matrix: &matrix-default
parameters:
platform: ["amd64", "arm64", "mac_amd64"]
filters: &filters-default
branches:
ignore:
- /rel\/.*/
- /hotfix\/.*/

- build_nightly:
name: << matrix.platform >>_build_nightly
matrix: &matrix-nightly
parameters:
platform: ["amd64", "arm64", "mac_amd64", "mac_arm64"]
filters: &filters-nightly
branches:
only:
- /rel\/.*/
- /hotfix\/.*/
context: slack-secrets

- test:
name: << matrix.platform >>_test
matrix:
<<: *matrix-default
requires:
- << matrix.platform >>_build
filters: &filters-default
branches:
ignore:
- /rel\/.*/
- /hotfix\/.*/

- test_nightly:
name: << matrix.platform >>_test_nightly
matrix:
<<: *matrix-default
<<: *matrix-nightly
requires:
- << matrix.platform >>_build
filters: &filters-nightly
branches:
only:
- /rel\/.*/
- /hotfix\/.*/
- << matrix.platform >>_build_nightly
context: slack-secrets

- integration:
Expand All @@ -89,17 +107,13 @@ workflows:
<<: *matrix-default
requires:
- << matrix.platform >>_build
filters:
<<: *filters-default

- integration_nightly:
name: << matrix.platform >>_integration_nightly
matrix:
<<: *matrix-default
<<: *matrix-nightly
requires:
- << matrix.platform >>_build
filters:
<<: *filters-nightly
- << matrix.platform >>_build_nightly
context: slack-secrets

- e2e_expect:
Expand All @@ -108,17 +122,13 @@ workflows:
<<: *matrix-default
requires:
- << matrix.platform >>_build
filters:
<<: *filters-default

- e2e_expect_nightly:
name: << matrix.platform >>_e2e_expect_nightly
matrix:
<<: *matrix-default
<<: *matrix-nightly
requires:
- << matrix.platform >>_build
filters:
<<: *filters-nightly
- << matrix.platform >>_build_nightly
context: slack-secrets

- e2e_subs:
Expand All @@ -127,17 +137,13 @@ workflows:
<<: *matrix-default
requires:
- << matrix.platform >>_build
filters:
<<: *filters-default

- e2e_subs_nightly:
name: << matrix.platform >>_e2e_subs_nightly
matrix:
<<: *matrix-default
<<: *matrix-nightly
requires:
- << matrix.platform >>_build
filters:
<<: *filters-nightly
- << matrix.platform >>_build_nightly
context:
- slack-secrets
- aws-secrets
Expand All @@ -147,14 +153,24 @@ workflows:
matrix:
parameters:
platform: ["amd64", "arm64", "mac_amd64"]
job_type: ["test", "test_nightly", "integration", "integration_nightly", "e2e_expect", "e2e_expect_nightly"]
job_type: ["test", "integration", "e2e_expect"]
requires:
- << matrix.platform >>_<< matrix.job_type >>

- tests_verification_job_nightly:
name: << matrix.platform >>_<< matrix.job_type >>_verification
matrix:
parameters:
platform: ["amd64", "arm64", "mac_amd64", "mac_arm64"]
job_type: ["test_nightly", "integration_nightly", "e2e_expect_nightly"]
requires:
- << matrix.platform >>_<< matrix.job_type >>
context: slack-secrets

- upload_binaries:
name: << matrix.platform >>_upload_binaries
matrix:
<<: *matrix-default
<<: *matrix-nightly
requires:
- << matrix.platform >>_test_nightly_verification
- << matrix.platform >>_integration_nightly_verification
Expand All @@ -168,6 +184,7 @@ workflows:
context:
- slack-secrets
- aws-secrets

#- windows_x64_build

commands:
Expand All @@ -187,6 +204,7 @@ commands:
- run:
working_directory: /tmp
command: |
sudo rm -rf << parameters.build_dir >>
sudo mkdir -p << parameters.build_dir >>
sudo chown -R $USER:$GROUP << parameters.build_dir >>
Expand Down Expand Up @@ -483,6 +501,21 @@ jobs:
- prepare_go
- generic_build

build_nightly:
parameters:
platform:
type: string
executor: << parameters.platform >>_medium
working_directory: << pipeline.parameters.build_dir >>/project
steps:
- prepare_build_dir
- checkout
- prepare_go
- generic_build
- slack/notify: &slack-fail-event
event: fail
template: basic_fail_1

test:
parameters:
platform:
Expand Down Expand Up @@ -514,9 +547,8 @@ jobs:
result_subdir: << parameters.platform >>_test_nightly
no_output_timeout: 45m
- upload_coverage
- slack/notify: &slack-fail-event
event: fail
template: basic_fail_1
- slack/notify:
<<: *slack-fail-event

integration:
parameters:
Expand Down Expand Up @@ -659,6 +691,23 @@ jobs:
- tests_verification_command:
result_subdir: << parameters.platform >>_<< parameters.job_type >>

tests_verification_job_nightly:
docker:
- image: python:3.9.6-alpine
resource_class: small
working_directory: << pipeline.parameters.build_dir >>/project
parameters:
platform: # platform: ["amd64", "arm64", "mac_amd64"]
type: string
job_type: # job_type: ["test", "test_nightly", "integration", "integration_nightly", "e2e_expect", "e2e_expect_nightly"]
type: string
steps:
- checkout
- tests_verification_command:
result_subdir: << parameters.platform >>_<< parameters.job_type >>
- slack/notify:
<<: *slack-fail-event

upload_binaries:
working_directory: << pipeline.parameters.build_dir >>/project
parameters:
Expand Down
21 changes: 7 additions & 14 deletions data/basics/overflow.go
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@ package basics

import (
"math"
"math/big"
"math/bits"
)

// OverflowTracker is used to track when an operation causes an overflow
Expand Down Expand Up @@ -200,17 +200,10 @@ func (t *OverflowTracker) ScalarMulA(a MicroAlgos, b uint64) MicroAlgos {
// Muldiv computes a*b/c. The overflow flag indicates that
// the result was 2^64 or greater.
func Muldiv(a uint64, b uint64, c uint64) (res uint64, overflow bool) {
var aa big.Int
aa.SetUint64(a)

var bb big.Int
bb.SetUint64(b)

var cc big.Int
cc.SetUint64(c)

aa.Mul(&aa, &bb)
aa.Div(&aa, &cc)

return aa.Uint64(), !aa.IsUint64()
hi, lo := bits.Mul64(a, b)
if c <= hi {
return 0, true
}
quo, _ := bits.Div64(hi, lo, c)
return quo, false
}
61 changes: 61 additions & 0 deletions data/basics/units_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,7 @@ package basics

import (
"math"
"math/big"
"testing"

"github.com/algorand/go-algorand/test/partitiontest"
Expand Down Expand Up @@ -69,3 +70,63 @@ func TestRoundUpToMultipleOf(t *testing.T) {
}
}
}

func OldMuldiv(a uint64, b uint64, c uint64) (res uint64, overflow bool) {
var aa big.Int
aa.SetUint64(a)

var bb big.Int
bb.SetUint64(b)

var cc big.Int
cc.SetUint64(c)

aa.Mul(&aa, &bb)
aa.Div(&aa, &cc)

return aa.Uint64(), !aa.IsUint64()
}

func BenchmarkOldMuldiv(b *testing.B) {
for i := 0; i < b.N; i++ {
u64 := uint64(i + 1)
OldMuldiv(u64, u64, u64)
OldMuldiv(math.MaxUint64, u64, u64)
OldMuldiv(u64, math.MaxUint64, u64)
OldMuldiv(math.MaxInt64, math.MaxInt64, u64)
}
}

func BenchmarkNewMuldiv(b *testing.B) {
for i := 0; i < b.N; i++ {
u64 := uint64(i + 1)
Muldiv(u64, u64, u64)
Muldiv(math.MaxUint64, u64, u64)
Muldiv(u64, math.MaxUint64, u64)
Muldiv(math.MaxInt64, math.MaxInt64, u64)
}
}

func TestNewMuldiv(t *testing.T) {
partitiontest.PartitionTest(t)
t.Parallel()

test := func(a, b, c uint64) {
r1, o1 := OldMuldiv(a, b, c)
r2, o2 := Muldiv(a, b, c)
require.Equal(t, o1, o2)
// implementations differ in r1,r2 if overflow. old implemention is
// returning an unspecified value
if !o1 {
require.Equal(t, r1, r2)
}
}
test(1, 2, 3)
test(1000000000, 2000000000, 1)
test(math.MaxUint64, 3, 4)
test(math.MaxUint64, 4, 3)
test(3, math.MaxUint64, 4)
test(4, math.MaxUint64, 3)
test(math.MaxUint64, math.MaxUint64, math.MaxUint64)
test(math.MaxUint64, math.MaxUint64, 5)
}

0 comments on commit 15cdcea

Please sign in to comment.