bits_test.go 32 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065106610671068106910701071107210731074107510761077107810791080108110821083108410851086108710881089109010911092109310941095109610971098109911001101110211031104110511061107110811091110111111121113111411151116111711181119112011211122112311241125112611271128112911301131113211331134113511361137113811391140114111421143114411451146114711481149115011511152115311541155115611571158115911601161116211631164116511661167116811691170117111721173117411751176117711781179118011811182118311841185118611871188118911901191119211931194119511961197119811991200120112021203120412051206120712081209121012111212121312141215121612171218121912201221122212231224122512261227122812291230123112321233123412351236123712381239124012411242124312441245124612471248124912501251125212531254125512561257125812591260126112621263126412651266126712681269127012711272127312741275127612771278127912801281128212831284128512861287128812891290129112921293129412951296129712981299130013011302130313041305130613071308130913101311131213131314131513161317131813191320132113221323132413251326132713281329133013311332133313341335133613371338133913401341134213431344134513461347
  1. // Copyright 2017 The Go Authors. All rights reserved.
  2. // Use of this source code is governed by a BSD-style
  3. // license that can be found in the LICENSE file.
  4. package bits_test
  5. import (
  6. . "math/bits"
  7. "runtime"
  8. "testing"
  9. "unsafe"
  10. )
  11. func TestUintSize(t *testing.T) {
  12. var x uint
  13. if want := unsafe.Sizeof(x) * 8; UintSize != want {
  14. t.Fatalf("UintSize = %d; want %d", UintSize, want)
  15. }
  16. }
  17. func TestLeadingZeros(t *testing.T) {
  18. for i := 0; i < 256; i++ {
  19. nlz := tab[i].nlz
  20. for k := 0; k < 64-8; k++ {
  21. x := uint64(i) << uint(k)
  22. if x <= 1<<8-1 {
  23. got := LeadingZeros8(uint8(x))
  24. want := nlz - k + (8 - 8)
  25. if x == 0 {
  26. want = 8
  27. }
  28. if got != want {
  29. t.Fatalf("LeadingZeros8(%#02x) == %d; want %d", x, got, want)
  30. }
  31. }
  32. if x <= 1<<16-1 {
  33. got := LeadingZeros16(uint16(x))
  34. want := nlz - k + (16 - 8)
  35. if x == 0 {
  36. want = 16
  37. }
  38. if got != want {
  39. t.Fatalf("LeadingZeros16(%#04x) == %d; want %d", x, got, want)
  40. }
  41. }
  42. if x <= 1<<32-1 {
  43. got := LeadingZeros32(uint32(x))
  44. want := nlz - k + (32 - 8)
  45. if x == 0 {
  46. want = 32
  47. }
  48. if got != want {
  49. t.Fatalf("LeadingZeros32(%#08x) == %d; want %d", x, got, want)
  50. }
  51. if UintSize == 32 {
  52. got = LeadingZeros(uint(x))
  53. if got != want {
  54. t.Fatalf("LeadingZeros(%#08x) == %d; want %d", x, got, want)
  55. }
  56. }
  57. }
  58. if x <= 1<<64-1 {
  59. got := LeadingZeros64(uint64(x))
  60. want := nlz - k + (64 - 8)
  61. if x == 0 {
  62. want = 64
  63. }
  64. if got != want {
  65. t.Fatalf("LeadingZeros64(%#016x) == %d; want %d", x, got, want)
  66. }
  67. if UintSize == 64 {
  68. got = LeadingZeros(uint(x))
  69. if got != want {
  70. t.Fatalf("LeadingZeros(%#016x) == %d; want %d", x, got, want)
  71. }
  72. }
  73. }
  74. }
  75. }
  76. }
  77. // Exported (global) variable serving as input for some
  78. // of the benchmarks to ensure side-effect free calls
  79. // are not optimized away.
  80. var Input uint64 = DeBruijn64
  81. // Exported (global) variable to store function results
  82. // during benchmarking to ensure side-effect free calls
  83. // are not optimized away.
  84. var Output int
  85. func BenchmarkLeadingZeros(b *testing.B) {
  86. var s int
  87. for i := 0; i < b.N; i++ {
  88. s += LeadingZeros(uint(Input) >> (uint(i) % UintSize))
  89. }
  90. Output = s
  91. }
  92. func BenchmarkLeadingZeros8(b *testing.B) {
  93. var s int
  94. for i := 0; i < b.N; i++ {
  95. s += LeadingZeros8(uint8(Input) >> (uint(i) % 8))
  96. }
  97. Output = s
  98. }
  99. func BenchmarkLeadingZeros16(b *testing.B) {
  100. var s int
  101. for i := 0; i < b.N; i++ {
  102. s += LeadingZeros16(uint16(Input) >> (uint(i) % 16))
  103. }
  104. Output = s
  105. }
  106. func BenchmarkLeadingZeros32(b *testing.B) {
  107. var s int
  108. for i := 0; i < b.N; i++ {
  109. s += LeadingZeros32(uint32(Input) >> (uint(i) % 32))
  110. }
  111. Output = s
  112. }
  113. func BenchmarkLeadingZeros64(b *testing.B) {
  114. var s int
  115. for i := 0; i < b.N; i++ {
  116. s += LeadingZeros64(uint64(Input) >> (uint(i) % 64))
  117. }
  118. Output = s
  119. }
  120. func TestTrailingZeros(t *testing.T) {
  121. for i := 0; i < 256; i++ {
  122. ntz := tab[i].ntz
  123. for k := 0; k < 64-8; k++ {
  124. x := uint64(i) << uint(k)
  125. want := ntz + k
  126. if x <= 1<<8-1 {
  127. got := TrailingZeros8(uint8(x))
  128. if x == 0 {
  129. want = 8
  130. }
  131. if got != want {
  132. t.Fatalf("TrailingZeros8(%#02x) == %d; want %d", x, got, want)
  133. }
  134. }
  135. if x <= 1<<16-1 {
  136. got := TrailingZeros16(uint16(x))
  137. if x == 0 {
  138. want = 16
  139. }
  140. if got != want {
  141. t.Fatalf("TrailingZeros16(%#04x) == %d; want %d", x, got, want)
  142. }
  143. }
  144. if x <= 1<<32-1 {
  145. got := TrailingZeros32(uint32(x))
  146. if x == 0 {
  147. want = 32
  148. }
  149. if got != want {
  150. t.Fatalf("TrailingZeros32(%#08x) == %d; want %d", x, got, want)
  151. }
  152. if UintSize == 32 {
  153. got = TrailingZeros(uint(x))
  154. if got != want {
  155. t.Fatalf("TrailingZeros(%#08x) == %d; want %d", x, got, want)
  156. }
  157. }
  158. }
  159. if x <= 1<<64-1 {
  160. got := TrailingZeros64(uint64(x))
  161. if x == 0 {
  162. want = 64
  163. }
  164. if got != want {
  165. t.Fatalf("TrailingZeros64(%#016x) == %d; want %d", x, got, want)
  166. }
  167. if UintSize == 64 {
  168. got = TrailingZeros(uint(x))
  169. if got != want {
  170. t.Fatalf("TrailingZeros(%#016x) == %d; want %d", x, got, want)
  171. }
  172. }
  173. }
  174. }
  175. }
  176. }
  177. func BenchmarkTrailingZeros(b *testing.B) {
  178. var s int
  179. for i := 0; i < b.N; i++ {
  180. s += TrailingZeros(uint(Input) << (uint(i) % UintSize))
  181. }
  182. Output = s
  183. }
  184. func BenchmarkTrailingZeros8(b *testing.B) {
  185. var s int
  186. for i := 0; i < b.N; i++ {
  187. s += TrailingZeros8(uint8(Input) << (uint(i) % 8))
  188. }
  189. Output = s
  190. }
  191. func BenchmarkTrailingZeros16(b *testing.B) {
  192. var s int
  193. for i := 0; i < b.N; i++ {
  194. s += TrailingZeros16(uint16(Input) << (uint(i) % 16))
  195. }
  196. Output = s
  197. }
  198. func BenchmarkTrailingZeros32(b *testing.B) {
  199. var s int
  200. for i := 0; i < b.N; i++ {
  201. s += TrailingZeros32(uint32(Input) << (uint(i) % 32))
  202. }
  203. Output = s
  204. }
  205. func BenchmarkTrailingZeros64(b *testing.B) {
  206. var s int
  207. for i := 0; i < b.N; i++ {
  208. s += TrailingZeros64(uint64(Input) << (uint(i) % 64))
  209. }
  210. Output = s
  211. }
  212. func TestOnesCount(t *testing.T) {
  213. var x uint64
  214. for i := 0; i <= 64; i++ {
  215. testOnesCount(t, x, i)
  216. x = x<<1 | 1
  217. }
  218. for i := 64; i >= 0; i-- {
  219. testOnesCount(t, x, i)
  220. x = x << 1
  221. }
  222. for i := 0; i < 256; i++ {
  223. for k := 0; k < 64-8; k++ {
  224. testOnesCount(t, uint64(i)<<uint(k), tab[i].pop)
  225. }
  226. }
  227. }
  228. func testOnesCount(t *testing.T, x uint64, want int) {
  229. if x <= 1<<8-1 {
  230. got := OnesCount8(uint8(x))
  231. if got != want {
  232. t.Fatalf("OnesCount8(%#02x) == %d; want %d", uint8(x), got, want)
  233. }
  234. }
  235. if x <= 1<<16-1 {
  236. got := OnesCount16(uint16(x))
  237. if got != want {
  238. t.Fatalf("OnesCount16(%#04x) == %d; want %d", uint16(x), got, want)
  239. }
  240. }
  241. if x <= 1<<32-1 {
  242. got := OnesCount32(uint32(x))
  243. if got != want {
  244. t.Fatalf("OnesCount32(%#08x) == %d; want %d", uint32(x), got, want)
  245. }
  246. if UintSize == 32 {
  247. got = OnesCount(uint(x))
  248. if got != want {
  249. t.Fatalf("OnesCount(%#08x) == %d; want %d", uint32(x), got, want)
  250. }
  251. }
  252. }
  253. if x <= 1<<64-1 {
  254. got := OnesCount64(uint64(x))
  255. if got != want {
  256. t.Fatalf("OnesCount64(%#016x) == %d; want %d", x, got, want)
  257. }
  258. if UintSize == 64 {
  259. got = OnesCount(uint(x))
  260. if got != want {
  261. t.Fatalf("OnesCount(%#016x) == %d; want %d", x, got, want)
  262. }
  263. }
  264. }
  265. }
  266. func BenchmarkOnesCount(b *testing.B) {
  267. var s int
  268. for i := 0; i < b.N; i++ {
  269. s += OnesCount(uint(Input))
  270. }
  271. Output = s
  272. }
  273. func BenchmarkOnesCount8(b *testing.B) {
  274. var s int
  275. for i := 0; i < b.N; i++ {
  276. s += OnesCount8(uint8(Input))
  277. }
  278. Output = s
  279. }
  280. func BenchmarkOnesCount16(b *testing.B) {
  281. var s int
  282. for i := 0; i < b.N; i++ {
  283. s += OnesCount16(uint16(Input))
  284. }
  285. Output = s
  286. }
  287. func BenchmarkOnesCount32(b *testing.B) {
  288. var s int
  289. for i := 0; i < b.N; i++ {
  290. s += OnesCount32(uint32(Input))
  291. }
  292. Output = s
  293. }
  294. func BenchmarkOnesCount64(b *testing.B) {
  295. var s int
  296. for i := 0; i < b.N; i++ {
  297. s += OnesCount64(uint64(Input))
  298. }
  299. Output = s
  300. }
  301. func TestRotateLeft(t *testing.T) {
  302. var m uint64 = DeBruijn64
  303. for k := uint(0); k < 128; k++ {
  304. x8 := uint8(m)
  305. got8 := RotateLeft8(x8, int(k))
  306. want8 := x8<<(k&0x7) | x8>>(8-k&0x7)
  307. if got8 != want8 {
  308. t.Fatalf("RotateLeft8(%#02x, %d) == %#02x; want %#02x", x8, k, got8, want8)
  309. }
  310. got8 = RotateLeft8(want8, -int(k))
  311. if got8 != x8 {
  312. t.Fatalf("RotateLeft8(%#02x, -%d) == %#02x; want %#02x", want8, k, got8, x8)
  313. }
  314. x16 := uint16(m)
  315. got16 := RotateLeft16(x16, int(k))
  316. want16 := x16<<(k&0xf) | x16>>(16-k&0xf)
  317. if got16 != want16 {
  318. t.Fatalf("RotateLeft16(%#04x, %d) == %#04x; want %#04x", x16, k, got16, want16)
  319. }
  320. got16 = RotateLeft16(want16, -int(k))
  321. if got16 != x16 {
  322. t.Fatalf("RotateLeft16(%#04x, -%d) == %#04x; want %#04x", want16, k, got16, x16)
  323. }
  324. x32 := uint32(m)
  325. got32 := RotateLeft32(x32, int(k))
  326. want32 := x32<<(k&0x1f) | x32>>(32-k&0x1f)
  327. if got32 != want32 {
  328. t.Fatalf("RotateLeft32(%#08x, %d) == %#08x; want %#08x", x32, k, got32, want32)
  329. }
  330. got32 = RotateLeft32(want32, -int(k))
  331. if got32 != x32 {
  332. t.Fatalf("RotateLeft32(%#08x, -%d) == %#08x; want %#08x", want32, k, got32, x32)
  333. }
  334. if UintSize == 32 {
  335. x := uint(m)
  336. got := RotateLeft(x, int(k))
  337. want := x<<(k&0x1f) | x>>(32-k&0x1f)
  338. if got != want {
  339. t.Fatalf("RotateLeft(%#08x, %d) == %#08x; want %#08x", x, k, got, want)
  340. }
  341. got = RotateLeft(want, -int(k))
  342. if got != x {
  343. t.Fatalf("RotateLeft(%#08x, -%d) == %#08x; want %#08x", want, k, got, x)
  344. }
  345. }
  346. x64 := uint64(m)
  347. got64 := RotateLeft64(x64, int(k))
  348. want64 := x64<<(k&0x3f) | x64>>(64-k&0x3f)
  349. if got64 != want64 {
  350. t.Fatalf("RotateLeft64(%#016x, %d) == %#016x; want %#016x", x64, k, got64, want64)
  351. }
  352. got64 = RotateLeft64(want64, -int(k))
  353. if got64 != x64 {
  354. t.Fatalf("RotateLeft64(%#016x, -%d) == %#016x; want %#016x", want64, k, got64, x64)
  355. }
  356. if UintSize == 64 {
  357. x := uint(m)
  358. got := RotateLeft(x, int(k))
  359. want := x<<(k&0x3f) | x>>(64-k&0x3f)
  360. if got != want {
  361. t.Fatalf("RotateLeft(%#016x, %d) == %#016x; want %#016x", x, k, got, want)
  362. }
  363. got = RotateLeft(want, -int(k))
  364. if got != x {
  365. t.Fatalf("RotateLeft(%#08x, -%d) == %#08x; want %#08x", want, k, got, x)
  366. }
  367. }
  368. }
  369. }
  370. func BenchmarkRotateLeft(b *testing.B) {
  371. var s uint
  372. for i := 0; i < b.N; i++ {
  373. s += RotateLeft(uint(Input), i)
  374. }
  375. Output = int(s)
  376. }
  377. func BenchmarkRotateLeft8(b *testing.B) {
  378. var s uint8
  379. for i := 0; i < b.N; i++ {
  380. s += RotateLeft8(uint8(Input), i)
  381. }
  382. Output = int(s)
  383. }
  384. func BenchmarkRotateLeft16(b *testing.B) {
  385. var s uint16
  386. for i := 0; i < b.N; i++ {
  387. s += RotateLeft16(uint16(Input), i)
  388. }
  389. Output = int(s)
  390. }
  391. func BenchmarkRotateLeft32(b *testing.B) {
  392. var s uint32
  393. for i := 0; i < b.N; i++ {
  394. s += RotateLeft32(uint32(Input), i)
  395. }
  396. Output = int(s)
  397. }
  398. func BenchmarkRotateLeft64(b *testing.B) {
  399. var s uint64
  400. for i := 0; i < b.N; i++ {
  401. s += RotateLeft64(uint64(Input), i)
  402. }
  403. Output = int(s)
  404. }
  405. func TestReverse(t *testing.T) {
  406. // test each bit
  407. for i := uint(0); i < 64; i++ {
  408. testReverse(t, uint64(1)<<i, uint64(1)<<(63-i))
  409. }
  410. // test a few patterns
  411. for _, test := range []struct {
  412. x, r uint64
  413. }{
  414. {0, 0},
  415. {0x1, 0x8 << 60},
  416. {0x2, 0x4 << 60},
  417. {0x3, 0xc << 60},
  418. {0x4, 0x2 << 60},
  419. {0x5, 0xa << 60},
  420. {0x6, 0x6 << 60},
  421. {0x7, 0xe << 60},
  422. {0x8, 0x1 << 60},
  423. {0x9, 0x9 << 60},
  424. {0xa, 0x5 << 60},
  425. {0xb, 0xd << 60},
  426. {0xc, 0x3 << 60},
  427. {0xd, 0xb << 60},
  428. {0xe, 0x7 << 60},
  429. {0xf, 0xf << 60},
  430. {0x5686487, 0xe12616a000000000},
  431. {0x0123456789abcdef, 0xf7b3d591e6a2c480},
  432. } {
  433. testReverse(t, test.x, test.r)
  434. testReverse(t, test.r, test.x)
  435. }
  436. }
  437. func testReverse(t *testing.T, x64, want64 uint64) {
  438. x8 := uint8(x64)
  439. got8 := Reverse8(x8)
  440. want8 := uint8(want64 >> (64 - 8))
  441. if got8 != want8 {
  442. t.Fatalf("Reverse8(%#02x) == %#02x; want %#02x", x8, got8, want8)
  443. }
  444. x16 := uint16(x64)
  445. got16 := Reverse16(x16)
  446. want16 := uint16(want64 >> (64 - 16))
  447. if got16 != want16 {
  448. t.Fatalf("Reverse16(%#04x) == %#04x; want %#04x", x16, got16, want16)
  449. }
  450. x32 := uint32(x64)
  451. got32 := Reverse32(x32)
  452. want32 := uint32(want64 >> (64 - 32))
  453. if got32 != want32 {
  454. t.Fatalf("Reverse32(%#08x) == %#08x; want %#08x", x32, got32, want32)
  455. }
  456. if UintSize == 32 {
  457. x := uint(x32)
  458. got := Reverse(x)
  459. want := uint(want32)
  460. if got != want {
  461. t.Fatalf("Reverse(%#08x) == %#08x; want %#08x", x, got, want)
  462. }
  463. }
  464. got64 := Reverse64(x64)
  465. if got64 != want64 {
  466. t.Fatalf("Reverse64(%#016x) == %#016x; want %#016x", x64, got64, want64)
  467. }
  468. if UintSize == 64 {
  469. x := uint(x64)
  470. got := Reverse(x)
  471. want := uint(want64)
  472. if got != want {
  473. t.Fatalf("Reverse(%#08x) == %#016x; want %#016x", x, got, want)
  474. }
  475. }
  476. }
  477. func BenchmarkReverse(b *testing.B) {
  478. var s uint
  479. for i := 0; i < b.N; i++ {
  480. s += Reverse(uint(i))
  481. }
  482. Output = int(s)
  483. }
  484. func BenchmarkReverse8(b *testing.B) {
  485. var s uint8
  486. for i := 0; i < b.N; i++ {
  487. s += Reverse8(uint8(i))
  488. }
  489. Output = int(s)
  490. }
  491. func BenchmarkReverse16(b *testing.B) {
  492. var s uint16
  493. for i := 0; i < b.N; i++ {
  494. s += Reverse16(uint16(i))
  495. }
  496. Output = int(s)
  497. }
  498. func BenchmarkReverse32(b *testing.B) {
  499. var s uint32
  500. for i := 0; i < b.N; i++ {
  501. s += Reverse32(uint32(i))
  502. }
  503. Output = int(s)
  504. }
  505. func BenchmarkReverse64(b *testing.B) {
  506. var s uint64
  507. for i := 0; i < b.N; i++ {
  508. s += Reverse64(uint64(i))
  509. }
  510. Output = int(s)
  511. }
  512. func TestReverseBytes(t *testing.T) {
  513. for _, test := range []struct {
  514. x, r uint64
  515. }{
  516. {0, 0},
  517. {0x01, 0x01 << 56},
  518. {0x0123, 0x2301 << 48},
  519. {0x012345, 0x452301 << 40},
  520. {0x01234567, 0x67452301 << 32},
  521. {0x0123456789, 0x8967452301 << 24},
  522. {0x0123456789ab, 0xab8967452301 << 16},
  523. {0x0123456789abcd, 0xcdab8967452301 << 8},
  524. {0x0123456789abcdef, 0xefcdab8967452301 << 0},
  525. } {
  526. testReverseBytes(t, test.x, test.r)
  527. testReverseBytes(t, test.r, test.x)
  528. }
  529. }
  530. func testReverseBytes(t *testing.T, x64, want64 uint64) {
  531. x16 := uint16(x64)
  532. got16 := ReverseBytes16(x16)
  533. want16 := uint16(want64 >> (64 - 16))
  534. if got16 != want16 {
  535. t.Fatalf("ReverseBytes16(%#04x) == %#04x; want %#04x", x16, got16, want16)
  536. }
  537. x32 := uint32(x64)
  538. got32 := ReverseBytes32(x32)
  539. want32 := uint32(want64 >> (64 - 32))
  540. if got32 != want32 {
  541. t.Fatalf("ReverseBytes32(%#08x) == %#08x; want %#08x", x32, got32, want32)
  542. }
  543. if UintSize == 32 {
  544. x := uint(x32)
  545. got := ReverseBytes(x)
  546. want := uint(want32)
  547. if got != want {
  548. t.Fatalf("ReverseBytes(%#08x) == %#08x; want %#08x", x, got, want)
  549. }
  550. }
  551. got64 := ReverseBytes64(x64)
  552. if got64 != want64 {
  553. t.Fatalf("ReverseBytes64(%#016x) == %#016x; want %#016x", x64, got64, want64)
  554. }
  555. if UintSize == 64 {
  556. x := uint(x64)
  557. got := ReverseBytes(x)
  558. want := uint(want64)
  559. if got != want {
  560. t.Fatalf("ReverseBytes(%#016x) == %#016x; want %#016x", x, got, want)
  561. }
  562. }
  563. }
  564. func BenchmarkReverseBytes(b *testing.B) {
  565. var s uint
  566. for i := 0; i < b.N; i++ {
  567. s += ReverseBytes(uint(i))
  568. }
  569. Output = int(s)
  570. }
  571. func BenchmarkReverseBytes16(b *testing.B) {
  572. var s uint16
  573. for i := 0; i < b.N; i++ {
  574. s += ReverseBytes16(uint16(i))
  575. }
  576. Output = int(s)
  577. }
  578. func BenchmarkReverseBytes32(b *testing.B) {
  579. var s uint32
  580. for i := 0; i < b.N; i++ {
  581. s += ReverseBytes32(uint32(i))
  582. }
  583. Output = int(s)
  584. }
  585. func BenchmarkReverseBytes64(b *testing.B) {
  586. var s uint64
  587. for i := 0; i < b.N; i++ {
  588. s += ReverseBytes64(uint64(i))
  589. }
  590. Output = int(s)
  591. }
  592. func TestLen(t *testing.T) {
  593. for i := 0; i < 256; i++ {
  594. len := 8 - tab[i].nlz
  595. for k := 0; k < 64-8; k++ {
  596. x := uint64(i) << uint(k)
  597. want := 0
  598. if x != 0 {
  599. want = len + k
  600. }
  601. if x <= 1<<8-1 {
  602. got := Len8(uint8(x))
  603. if got != want {
  604. t.Fatalf("Len8(%#02x) == %d; want %d", x, got, want)
  605. }
  606. }
  607. if x <= 1<<16-1 {
  608. got := Len16(uint16(x))
  609. if got != want {
  610. t.Fatalf("Len16(%#04x) == %d; want %d", x, got, want)
  611. }
  612. }
  613. if x <= 1<<32-1 {
  614. got := Len32(uint32(x))
  615. if got != want {
  616. t.Fatalf("Len32(%#08x) == %d; want %d", x, got, want)
  617. }
  618. if UintSize == 32 {
  619. got := Len(uint(x))
  620. if got != want {
  621. t.Fatalf("Len(%#08x) == %d; want %d", x, got, want)
  622. }
  623. }
  624. }
  625. if x <= 1<<64-1 {
  626. got := Len64(uint64(x))
  627. if got != want {
  628. t.Fatalf("Len64(%#016x) == %d; want %d", x, got, want)
  629. }
  630. if UintSize == 64 {
  631. got := Len(uint(x))
  632. if got != want {
  633. t.Fatalf("Len(%#016x) == %d; want %d", x, got, want)
  634. }
  635. }
  636. }
  637. }
  638. }
  639. }
  640. const (
  641. _M = 1<<UintSize - 1
  642. _M32 = 1<<32 - 1
  643. _M64 = 1<<64 - 1
  644. )
  645. func TestAddSubUint(t *testing.T) {
  646. test := func(msg string, f func(x, y, c uint) (z, cout uint), x, y, c, z, cout uint) {
  647. z1, cout1 := f(x, y, c)
  648. if z1 != z || cout1 != cout {
  649. t.Errorf("%s: got z:cout = %#x:%#x; want %#x:%#x", msg, z1, cout1, z, cout)
  650. }
  651. }
  652. for _, a := range []struct{ x, y, c, z, cout uint }{
  653. {0, 0, 0, 0, 0},
  654. {0, 1, 0, 1, 0},
  655. {0, 0, 1, 1, 0},
  656. {0, 1, 1, 2, 0},
  657. {12345, 67890, 0, 80235, 0},
  658. {12345, 67890, 1, 80236, 0},
  659. {_M, 1, 0, 0, 1},
  660. {_M, 0, 1, 0, 1},
  661. {_M, 1, 1, 1, 1},
  662. {_M, _M, 0, _M - 1, 1},
  663. {_M, _M, 1, _M, 1},
  664. } {
  665. test("Add", Add, a.x, a.y, a.c, a.z, a.cout)
  666. test("Add symmetric", Add, a.y, a.x, a.c, a.z, a.cout)
  667. test("Sub", Sub, a.z, a.x, a.c, a.y, a.cout)
  668. test("Sub symmetric", Sub, a.z, a.y, a.c, a.x, a.cout)
  669. // The above code can't test intrinsic implementation, because the passed function is not called directly.
  670. // The following code uses a closure to test the intrinsic version in case the function is intrinsified.
  671. test("Add intrinsic", func(x, y, c uint) (uint, uint) { return Add(x, y, c) }, a.x, a.y, a.c, a.z, a.cout)
  672. test("Add intrinsic symmetric", func(x, y, c uint) (uint, uint) { return Add(x, y, c) }, a.y, a.x, a.c, a.z, a.cout)
  673. test("Sub intrinsic", func(x, y, c uint) (uint, uint) { return Sub(x, y, c) }, a.z, a.x, a.c, a.y, a.cout)
  674. test("Sub intrinsic symmetric", func(x, y, c uint) (uint, uint) { return Sub(x, y, c) }, a.z, a.y, a.c, a.x, a.cout)
  675. }
  676. }
  677. func TestAddSubUint32(t *testing.T) {
  678. test := func(msg string, f func(x, y, c uint32) (z, cout uint32), x, y, c, z, cout uint32) {
  679. z1, cout1 := f(x, y, c)
  680. if z1 != z || cout1 != cout {
  681. t.Errorf("%s: got z:cout = %#x:%#x; want %#x:%#x", msg, z1, cout1, z, cout)
  682. }
  683. }
  684. for _, a := range []struct{ x, y, c, z, cout uint32 }{
  685. {0, 0, 0, 0, 0},
  686. {0, 1, 0, 1, 0},
  687. {0, 0, 1, 1, 0},
  688. {0, 1, 1, 2, 0},
  689. {12345, 67890, 0, 80235, 0},
  690. {12345, 67890, 1, 80236, 0},
  691. {_M32, 1, 0, 0, 1},
  692. {_M32, 0, 1, 0, 1},
  693. {_M32, 1, 1, 1, 1},
  694. {_M32, _M32, 0, _M32 - 1, 1},
  695. {_M32, _M32, 1, _M32, 1},
  696. } {
  697. test("Add32", Add32, a.x, a.y, a.c, a.z, a.cout)
  698. test("Add32 symmetric", Add32, a.y, a.x, a.c, a.z, a.cout)
  699. test("Sub32", Sub32, a.z, a.x, a.c, a.y, a.cout)
  700. test("Sub32 symmetric", Sub32, a.z, a.y, a.c, a.x, a.cout)
  701. }
  702. }
  703. func TestAddSubUint64(t *testing.T) {
  704. test := func(msg string, f func(x, y, c uint64) (z, cout uint64), x, y, c, z, cout uint64) {
  705. z1, cout1 := f(x, y, c)
  706. if z1 != z || cout1 != cout {
  707. t.Errorf("%s: got z:cout = %#x:%#x; want %#x:%#x", msg, z1, cout1, z, cout)
  708. }
  709. }
  710. for _, a := range []struct{ x, y, c, z, cout uint64 }{
  711. {0, 0, 0, 0, 0},
  712. {0, 1, 0, 1, 0},
  713. {0, 0, 1, 1, 0},
  714. {0, 1, 1, 2, 0},
  715. {12345, 67890, 0, 80235, 0},
  716. {12345, 67890, 1, 80236, 0},
  717. {_M64, 1, 0, 0, 1},
  718. {_M64, 0, 1, 0, 1},
  719. {_M64, 1, 1, 1, 1},
  720. {_M64, _M64, 0, _M64 - 1, 1},
  721. {_M64, _M64, 1, _M64, 1},
  722. } {
  723. test("Add64", Add64, a.x, a.y, a.c, a.z, a.cout)
  724. test("Add64 symmetric", Add64, a.y, a.x, a.c, a.z, a.cout)
  725. test("Sub64", Sub64, a.z, a.x, a.c, a.y, a.cout)
  726. test("Sub64 symmetric", Sub64, a.z, a.y, a.c, a.x, a.cout)
  727. // The above code can't test intrinsic implementation, because the passed function is not called directly.
  728. // The following code uses a closure to test the intrinsic version in case the function is intrinsified.
  729. test("Add64 intrinsic", func(x, y, c uint64) (uint64, uint64) { return Add64(x, y, c) }, a.x, a.y, a.c, a.z, a.cout)
  730. test("Add64 intrinsic symmetric", func(x, y, c uint64) (uint64, uint64) { return Add64(x, y, c) }, a.y, a.x, a.c, a.z, a.cout)
  731. test("Sub64 intrinsic", func(x, y, c uint64) (uint64, uint64) { return Sub64(x, y, c) }, a.z, a.x, a.c, a.y, a.cout)
  732. test("Sub64 intrinsic symmetric", func(x, y, c uint64) (uint64, uint64) { return Sub64(x, y, c) }, a.z, a.y, a.c, a.x, a.cout)
  733. }
  734. }
  735. func TestAdd64OverflowPanic(t *testing.T) {
  736. // Test that 64-bit overflow panics fire correctly.
  737. // These are designed to improve coverage of compiler intrinsics.
  738. tests := []func(uint64, uint64) uint64{
  739. func(a, b uint64) uint64 {
  740. x, c := Add64(a, b, 0)
  741. if c > 0 {
  742. panic("overflow")
  743. }
  744. return x
  745. },
  746. func(a, b uint64) uint64 {
  747. x, c := Add64(a, b, 0)
  748. if c != 0 {
  749. panic("overflow")
  750. }
  751. return x
  752. },
  753. func(a, b uint64) uint64 {
  754. x, c := Add64(a, b, 0)
  755. if c == 1 {
  756. panic("overflow")
  757. }
  758. return x
  759. },
  760. func(a, b uint64) uint64 {
  761. x, c := Add64(a, b, 0)
  762. if c != 1 {
  763. return x
  764. }
  765. panic("overflow")
  766. },
  767. func(a, b uint64) uint64 {
  768. x, c := Add64(a, b, 0)
  769. if c == 0 {
  770. return x
  771. }
  772. panic("overflow")
  773. },
  774. }
  775. for _, test := range tests {
  776. shouldPanic := func(f func()) {
  777. defer func() {
  778. if err := recover(); err == nil {
  779. t.Fatalf("expected panic")
  780. }
  781. }()
  782. f()
  783. }
  784. // overflow
  785. shouldPanic(func() { test(_M64, 1) })
  786. shouldPanic(func() { test(1, _M64) })
  787. shouldPanic(func() { test(_M64, _M64) })
  788. // no overflow
  789. test(_M64, 0)
  790. test(0, 0)
  791. test(1, 1)
  792. }
  793. }
  794. func TestSub64OverflowPanic(t *testing.T) {
  795. // Test that 64-bit overflow panics fire correctly.
  796. // These are designed to improve coverage of compiler intrinsics.
  797. tests := []func(uint64, uint64) uint64{
  798. func(a, b uint64) uint64 {
  799. x, c := Sub64(a, b, 0)
  800. if c > 0 {
  801. panic("overflow")
  802. }
  803. return x
  804. },
  805. func(a, b uint64) uint64 {
  806. x, c := Sub64(a, b, 0)
  807. if c != 0 {
  808. panic("overflow")
  809. }
  810. return x
  811. },
  812. func(a, b uint64) uint64 {
  813. x, c := Sub64(a, b, 0)
  814. if c == 1 {
  815. panic("overflow")
  816. }
  817. return x
  818. },
  819. func(a, b uint64) uint64 {
  820. x, c := Sub64(a, b, 0)
  821. if c != 1 {
  822. return x
  823. }
  824. panic("overflow")
  825. },
  826. func(a, b uint64) uint64 {
  827. x, c := Sub64(a, b, 0)
  828. if c == 0 {
  829. return x
  830. }
  831. panic("overflow")
  832. },
  833. }
  834. for _, test := range tests {
  835. shouldPanic := func(f func()) {
  836. defer func() {
  837. if err := recover(); err == nil {
  838. t.Fatalf("expected panic")
  839. }
  840. }()
  841. f()
  842. }
  843. // overflow
  844. shouldPanic(func() { test(0, 1) })
  845. shouldPanic(func() { test(1, _M64) })
  846. shouldPanic(func() { test(_M64-1, _M64) })
  847. // no overflow
  848. test(_M64, 0)
  849. test(0, 0)
  850. test(1, 1)
  851. }
  852. }
  853. func TestMulDiv(t *testing.T) {
  854. testMul := func(msg string, f func(x, y uint) (hi, lo uint), x, y, hi, lo uint) {
  855. hi1, lo1 := f(x, y)
  856. if hi1 != hi || lo1 != lo {
  857. t.Errorf("%s: got hi:lo = %#x:%#x; want %#x:%#x", msg, hi1, lo1, hi, lo)
  858. }
  859. }
  860. testDiv := func(msg string, f func(hi, lo, y uint) (q, r uint), hi, lo, y, q, r uint) {
  861. q1, r1 := f(hi, lo, y)
  862. if q1 != q || r1 != r {
  863. t.Errorf("%s: got q:r = %#x:%#x; want %#x:%#x", msg, q1, r1, q, r)
  864. }
  865. }
  866. for _, a := range []struct {
  867. x, y uint
  868. hi, lo, r uint
  869. }{
  870. {1 << (UintSize - 1), 2, 1, 0, 1},
  871. {_M, _M, _M - 1, 1, 42},
  872. } {
  873. testMul("Mul", Mul, a.x, a.y, a.hi, a.lo)
  874. testMul("Mul symmetric", Mul, a.y, a.x, a.hi, a.lo)
  875. testDiv("Div", Div, a.hi, a.lo+a.r, a.y, a.x, a.r)
  876. testDiv("Div symmetric", Div, a.hi, a.lo+a.r, a.x, a.y, a.r)
  877. // The above code can't test intrinsic implementation, because the passed function is not called directly.
  878. // The following code uses a closure to test the intrinsic version in case the function is intrinsified.
  879. testMul("Mul intrinsic", func(x, y uint) (uint, uint) { return Mul(x, y) }, a.x, a.y, a.hi, a.lo)
  880. testMul("Mul intrinsic symmetric", func(x, y uint) (uint, uint) { return Mul(x, y) }, a.y, a.x, a.hi, a.lo)
  881. testDiv("Div intrinsic", func(hi, lo, y uint) (uint, uint) { return Div(hi, lo, y) }, a.hi, a.lo+a.r, a.y, a.x, a.r)
  882. testDiv("Div intrinsic symmetric", func(hi, lo, y uint) (uint, uint) { return Div(hi, lo, y) }, a.hi, a.lo+a.r, a.x, a.y, a.r)
  883. }
  884. }
  885. func TestMulDiv32(t *testing.T) {
  886. testMul := func(msg string, f func(x, y uint32) (hi, lo uint32), x, y, hi, lo uint32) {
  887. hi1, lo1 := f(x, y)
  888. if hi1 != hi || lo1 != lo {
  889. t.Errorf("%s: got hi:lo = %#x:%#x; want %#x:%#x", msg, hi1, lo1, hi, lo)
  890. }
  891. }
  892. testDiv := func(msg string, f func(hi, lo, y uint32) (q, r uint32), hi, lo, y, q, r uint32) {
  893. q1, r1 := f(hi, lo, y)
  894. if q1 != q || r1 != r {
  895. t.Errorf("%s: got q:r = %#x:%#x; want %#x:%#x", msg, q1, r1, q, r)
  896. }
  897. }
  898. for _, a := range []struct {
  899. x, y uint32
  900. hi, lo, r uint32
  901. }{
  902. {1 << 31, 2, 1, 0, 1},
  903. {0xc47dfa8c, 50911, 0x98a4, 0x998587f4, 13},
  904. {_M32, _M32, _M32 - 1, 1, 42},
  905. } {
  906. testMul("Mul32", Mul32, a.x, a.y, a.hi, a.lo)
  907. testMul("Mul32 symmetric", Mul32, a.y, a.x, a.hi, a.lo)
  908. testDiv("Div32", Div32, a.hi, a.lo+a.r, a.y, a.x, a.r)
  909. testDiv("Div32 symmetric", Div32, a.hi, a.lo+a.r, a.x, a.y, a.r)
  910. }
  911. }
  912. func TestMulDiv64(t *testing.T) {
  913. testMul := func(msg string, f func(x, y uint64) (hi, lo uint64), x, y, hi, lo uint64) {
  914. hi1, lo1 := f(x, y)
  915. if hi1 != hi || lo1 != lo {
  916. t.Errorf("%s: got hi:lo = %#x:%#x; want %#x:%#x", msg, hi1, lo1, hi, lo)
  917. }
  918. }
  919. testDiv := func(msg string, f func(hi, lo, y uint64) (q, r uint64), hi, lo, y, q, r uint64) {
  920. q1, r1 := f(hi, lo, y)
  921. if q1 != q || r1 != r {
  922. t.Errorf("%s: got q:r = %#x:%#x; want %#x:%#x", msg, q1, r1, q, r)
  923. }
  924. }
  925. for _, a := range []struct {
  926. x, y uint64
  927. hi, lo, r uint64
  928. }{
  929. {1 << 63, 2, 1, 0, 1},
  930. {0x3626229738a3b9, 0xd8988a9f1cc4a61, 0x2dd0712657fe8, 0x9dd6a3364c358319, 13},
  931. {_M64, _M64, _M64 - 1, 1, 42},
  932. } {
  933. testMul("Mul64", Mul64, a.x, a.y, a.hi, a.lo)
  934. testMul("Mul64 symmetric", Mul64, a.y, a.x, a.hi, a.lo)
  935. testDiv("Div64", Div64, a.hi, a.lo+a.r, a.y, a.x, a.r)
  936. testDiv("Div64 symmetric", Div64, a.hi, a.lo+a.r, a.x, a.y, a.r)
  937. // The above code can't test intrinsic implementation, because the passed function is not called directly.
  938. // The following code uses a closure to test the intrinsic version in case the function is intrinsified.
  939. testMul("Mul64 intrinsic", func(x, y uint64) (uint64, uint64) { return Mul64(x, y) }, a.x, a.y, a.hi, a.lo)
  940. testMul("Mul64 intrinsic symmetric", func(x, y uint64) (uint64, uint64) { return Mul64(x, y) }, a.y, a.x, a.hi, a.lo)
  941. testDiv("Div64 intrinsic", func(hi, lo, y uint64) (uint64, uint64) { return Div64(hi, lo, y) }, a.hi, a.lo+a.r, a.y, a.x, a.r)
  942. testDiv("Div64 intrinsic symmetric", func(hi, lo, y uint64) (uint64, uint64) { return Div64(hi, lo, y) }, a.hi, a.lo+a.r, a.x, a.y, a.r)
  943. }
  944. }
  945. const (
  946. divZeroError = "runtime error: integer divide by zero"
  947. overflowError = "runtime error: integer overflow"
  948. )
  949. func TestDivPanicOverflow(t *testing.T) {
  950. // Expect a panic
  951. defer func() {
  952. if err := recover(); err == nil {
  953. t.Error("Div should have panicked when y<=hi")
  954. } else if e, ok := err.(runtime.Error); !ok || e.Error() != overflowError {
  955. t.Errorf("Div expected panic: %q, got: %q ", overflowError, e.Error())
  956. }
  957. }()
  958. q, r := Div(1, 0, 1)
  959. t.Errorf("undefined q, r = %v, %v calculated when Div should have panicked", q, r)
  960. }
  961. func TestDiv32PanicOverflow(t *testing.T) {
  962. // Expect a panic
  963. defer func() {
  964. if err := recover(); err == nil {
  965. t.Error("Div32 should have panicked when y<=hi")
  966. } else if e, ok := err.(runtime.Error); !ok || e.Error() != overflowError {
  967. t.Errorf("Div32 expected panic: %q, got: %q ", overflowError, e.Error())
  968. }
  969. }()
  970. q, r := Div32(1, 0, 1)
  971. t.Errorf("undefined q, r = %v, %v calculated when Div32 should have panicked", q, r)
  972. }
  973. func TestDiv64PanicOverflow(t *testing.T) {
  974. // Expect a panic
  975. defer func() {
  976. if err := recover(); err == nil {
  977. t.Error("Div64 should have panicked when y<=hi")
  978. } else if e, ok := err.(runtime.Error); !ok || e.Error() != overflowError {
  979. t.Errorf("Div64 expected panic: %q, got: %q ", overflowError, e.Error())
  980. }
  981. }()
  982. q, r := Div64(1, 0, 1)
  983. t.Errorf("undefined q, r = %v, %v calculated when Div64 should have panicked", q, r)
  984. }
  985. func TestDivPanicZero(t *testing.T) {
  986. // Expect a panic
  987. defer func() {
  988. if err := recover(); err == nil {
  989. t.Error("Div should have panicked when y==0")
  990. } else if e, ok := err.(runtime.Error); !ok || e.Error() != divZeroError {
  991. t.Errorf("Div expected panic: %q, got: %q ", divZeroError, e.Error())
  992. }
  993. }()
  994. q, r := Div(1, 1, 0)
  995. t.Errorf("undefined q, r = %v, %v calculated when Div should have panicked", q, r)
  996. }
  997. func TestDiv32PanicZero(t *testing.T) {
  998. // Expect a panic
  999. defer func() {
  1000. if err := recover(); err == nil {
  1001. t.Error("Div32 should have panicked when y==0")
  1002. } else if e, ok := err.(runtime.Error); !ok || e.Error() != divZeroError {
  1003. t.Errorf("Div32 expected panic: %q, got: %q ", divZeroError, e.Error())
  1004. }
  1005. }()
  1006. q, r := Div32(1, 1, 0)
  1007. t.Errorf("undefined q, r = %v, %v calculated when Div32 should have panicked", q, r)
  1008. }
  1009. func TestDiv64PanicZero(t *testing.T) {
  1010. // Expect a panic
  1011. defer func() {
  1012. if err := recover(); err == nil {
  1013. t.Error("Div64 should have panicked when y==0")
  1014. } else if e, ok := err.(runtime.Error); !ok || e.Error() != divZeroError {
  1015. t.Errorf("Div64 expected panic: %q, got: %q ", divZeroError, e.Error())
  1016. }
  1017. }()
  1018. q, r := Div64(1, 1, 0)
  1019. t.Errorf("undefined q, r = %v, %v calculated when Div64 should have panicked", q, r)
  1020. }
  1021. func TestRem32(t *testing.T) {
  1022. // Sanity check: for non-oveflowing dividends, the result is the
  1023. // same as the rem returned by Div32
  1024. hi, lo, y := uint32(510510), uint32(9699690), uint32(510510+1) // ensure hi < y
  1025. for i := 0; i < 1000; i++ {
  1026. r := Rem32(hi, lo, y)
  1027. _, r2 := Div32(hi, lo, y)
  1028. if r != r2 {
  1029. t.Errorf("Rem32(%v, %v, %v) returned %v, but Div32 returned rem %v", hi, lo, y, r, r2)
  1030. }
  1031. y += 13
  1032. }
  1033. }
  1034. func TestRem32Overflow(t *testing.T) {
  1035. // To trigger a quotient overflow, we need y <= hi
  1036. hi, lo, y := uint32(510510), uint32(9699690), uint32(7)
  1037. for i := 0; i < 1000; i++ {
  1038. r := Rem32(hi, lo, y)
  1039. _, r2 := Div64(0, uint64(hi)<<32|uint64(lo), uint64(y))
  1040. if r != uint32(r2) {
  1041. t.Errorf("Rem32(%v, %v, %v) returned %v, but Div64 returned rem %v", hi, lo, y, r, r2)
  1042. }
  1043. y += 13
  1044. }
  1045. }
  1046. func TestRem64(t *testing.T) {
  1047. // Sanity check: for non-oveflowing dividends, the result is the
  1048. // same as the rem returned by Div64
  1049. hi, lo, y := uint64(510510), uint64(9699690), uint64(510510+1) // ensure hi < y
  1050. for i := 0; i < 1000; i++ {
  1051. r := Rem64(hi, lo, y)
  1052. _, r2 := Div64(hi, lo, y)
  1053. if r != r2 {
  1054. t.Errorf("Rem64(%v, %v, %v) returned %v, but Div64 returned rem %v", hi, lo, y, r, r2)
  1055. }
  1056. y += 13
  1057. }
  1058. }
  1059. func TestRem64Overflow(t *testing.T) {
  1060. Rem64Tests := []struct {
  1061. hi, lo, y uint64
  1062. rem uint64
  1063. }{
  1064. // Testcases computed using Python 3, as:
  1065. // >>> hi = 42; lo = 1119; y = 42
  1066. // >>> ((hi<<64)+lo) % y
  1067. {42, 1119, 42, 27},
  1068. {42, 1119, 38, 9},
  1069. {42, 1119, 26, 23},
  1070. {469, 0, 467, 271},
  1071. {469, 0, 113, 58},
  1072. {111111, 111111, 1171, 803},
  1073. {3968194946088682615, 3192705705065114702, 1000037, 56067},
  1074. }
  1075. for _, rt := range Rem64Tests {
  1076. if rt.hi < rt.y {
  1077. t.Fatalf("Rem64(%v, %v, %v) is not a test with quo overflow", rt.hi, rt.lo, rt.y)
  1078. }
  1079. rem := Rem64(rt.hi, rt.lo, rt.y)
  1080. if rem != rt.rem {
  1081. t.Errorf("Rem64(%v, %v, %v) returned %v, wanted %v",
  1082. rt.hi, rt.lo, rt.y, rem, rt.rem)
  1083. }
  1084. }
  1085. }
  1086. func BenchmarkAdd(b *testing.B) {
  1087. var z, c uint
  1088. for i := 0; i < b.N; i++ {
  1089. z, c = Add(uint(Input), uint(i), c)
  1090. }
  1091. Output = int(z + c)
  1092. }
  1093. func BenchmarkAdd32(b *testing.B) {
  1094. var z, c uint32
  1095. for i := 0; i < b.N; i++ {
  1096. z, c = Add32(uint32(Input), uint32(i), c)
  1097. }
  1098. Output = int(z + c)
  1099. }
  1100. func BenchmarkAdd64(b *testing.B) {
  1101. var z, c uint64
  1102. for i := 0; i < b.N; i++ {
  1103. z, c = Add64(uint64(Input), uint64(i), c)
  1104. }
  1105. Output = int(z + c)
  1106. }
  1107. func BenchmarkAdd64multiple(b *testing.B) {
  1108. var z0 = uint64(Input)
  1109. var z1 = uint64(Input)
  1110. var z2 = uint64(Input)
  1111. var z3 = uint64(Input)
  1112. for i := 0; i < b.N; i++ {
  1113. var c uint64
  1114. z0, c = Add64(z0, uint64(i), c)
  1115. z1, c = Add64(z1, uint64(i), c)
  1116. z2, c = Add64(z2, uint64(i), c)
  1117. z3, _ = Add64(z3, uint64(i), c)
  1118. }
  1119. Output = int(z0 + z1 + z2 + z3)
  1120. }
  1121. func BenchmarkSub(b *testing.B) {
  1122. var z, c uint
  1123. for i := 0; i < b.N; i++ {
  1124. z, c = Sub(uint(Input), uint(i), c)
  1125. }
  1126. Output = int(z + c)
  1127. }
  1128. func BenchmarkSub32(b *testing.B) {
  1129. var z, c uint32
  1130. for i := 0; i < b.N; i++ {
  1131. z, c = Sub32(uint32(Input), uint32(i), c)
  1132. }
  1133. Output = int(z + c)
  1134. }
  1135. func BenchmarkSub64(b *testing.B) {
  1136. var z, c uint64
  1137. for i := 0; i < b.N; i++ {
  1138. z, c = Sub64(uint64(Input), uint64(i), c)
  1139. }
  1140. Output = int(z + c)
  1141. }
  1142. func BenchmarkSub64multiple(b *testing.B) {
  1143. var z0 = uint64(Input)
  1144. var z1 = uint64(Input)
  1145. var z2 = uint64(Input)
  1146. var z3 = uint64(Input)
  1147. for i := 0; i < b.N; i++ {
  1148. var c uint64
  1149. z0, c = Sub64(z0, uint64(i), c)
  1150. z1, c = Sub64(z1, uint64(i), c)
  1151. z2, c = Sub64(z2, uint64(i), c)
  1152. z3, _ = Sub64(z3, uint64(i), c)
  1153. }
  1154. Output = int(z0 + z1 + z2 + z3)
  1155. }
  1156. func BenchmarkMul(b *testing.B) {
  1157. var hi, lo uint
  1158. for i := 0; i < b.N; i++ {
  1159. hi, lo = Mul(uint(Input), uint(i))
  1160. }
  1161. Output = int(hi + lo)
  1162. }
  1163. func BenchmarkMul32(b *testing.B) {
  1164. var hi, lo uint32
  1165. for i := 0; i < b.N; i++ {
  1166. hi, lo = Mul32(uint32(Input), uint32(i))
  1167. }
  1168. Output = int(hi + lo)
  1169. }
  1170. func BenchmarkMul64(b *testing.B) {
  1171. var hi, lo uint64
  1172. for i := 0; i < b.N; i++ {
  1173. hi, lo = Mul64(uint64(Input), uint64(i))
  1174. }
  1175. Output = int(hi + lo)
  1176. }
  1177. func BenchmarkDiv(b *testing.B) {
  1178. var q, r uint
  1179. for i := 0; i < b.N; i++ {
  1180. q, r = Div(1, uint(i), uint(Input))
  1181. }
  1182. Output = int(q + r)
  1183. }
  1184. func BenchmarkDiv32(b *testing.B) {
  1185. var q, r uint32
  1186. for i := 0; i < b.N; i++ {
  1187. q, r = Div32(1, uint32(i), uint32(Input))
  1188. }
  1189. Output = int(q + r)
  1190. }
  1191. func BenchmarkDiv64(b *testing.B) {
  1192. var q, r uint64
  1193. for i := 0; i < b.N; i++ {
  1194. q, r = Div64(1, uint64(i), uint64(Input))
  1195. }
  1196. Output = int(q + r)
  1197. }
  1198. // ----------------------------------------------------------------------------
  1199. // Testing support
  1200. type entry = struct {
  1201. nlz, ntz, pop int
  1202. }
  1203. // tab contains results for all uint8 values
  1204. var tab [256]entry
  1205. func init() {
  1206. tab[0] = entry{8, 8, 0}
  1207. for i := 1; i < len(tab); i++ {
  1208. // nlz
  1209. x := i // x != 0
  1210. n := 0
  1211. for x&0x80 == 0 {
  1212. n++
  1213. x <<= 1
  1214. }
  1215. tab[i].nlz = n
  1216. // ntz
  1217. x = i // x != 0
  1218. n = 0
  1219. for x&1 == 0 {
  1220. n++
  1221. x >>= 1
  1222. }
  1223. tab[i].ntz = n
  1224. // pop
  1225. x = i // x != 0
  1226. n = 0
  1227. for x != 0 {
  1228. n += int(x & 1)
  1229. x >>= 1
  1230. }
  1231. tab[i].pop = n
  1232. }
  1233. }