github.com/lzhfromustc/gofuzz@v0.0.0-20211116160056-151b3108bbd1/runtime/hash64.go (about) 1 // Copyright 2014 The Go Authors. All rights reserved. 2 // Use of this source code is governed by a BSD-style 3 // license that can be found in the LICENSE file. 4 5 // Hashing algorithm inspired by 6 // xxhash: https://code.google.com/p/xxhash/ 7 // cityhash: https://code.google.com/p/cityhash/ 8 9 // +build amd64 arm64 mips64 mips64le ppc64 ppc64le riscv64 s390x wasm 10 11 package runtime 12 13 import "unsafe" 14 15 const ( 16 // Constants for multiplication: four random odd 64-bit numbers. 17 m1 = 16877499708836156737 18 m2 = 2820277070424839065 19 m3 = 9497967016996688599 20 m4 = 15839092249703872147 21 ) 22 23 func memhashFallback(p unsafe.Pointer, seed, s uintptr) uintptr { 24 h := uint64(seed + s*hashkey[0]) 25 tail: 26 switch { 27 case s == 0: 28 case s < 4: 29 h ^= uint64(*(*byte)(p)) 30 h ^= uint64(*(*byte)(add(p, s>>1))) << 8 31 h ^= uint64(*(*byte)(add(p, s-1))) << 16 32 h = rotl_31(h*m1) * m2 33 case s <= 8: 34 h ^= uint64(readUnaligned32(p)) 35 h ^= uint64(readUnaligned32(add(p, s-4))) << 32 36 h = rotl_31(h*m1) * m2 37 case s <= 16: 38 h ^= readUnaligned64(p) 39 h = rotl_31(h*m1) * m2 40 h ^= readUnaligned64(add(p, s-8)) 41 h = rotl_31(h*m1) * m2 42 case s <= 32: 43 h ^= readUnaligned64(p) 44 h = rotl_31(h*m1) * m2 45 h ^= readUnaligned64(add(p, 8)) 46 h = rotl_31(h*m1) * m2 47 h ^= readUnaligned64(add(p, s-16)) 48 h = rotl_31(h*m1) * m2 49 h ^= readUnaligned64(add(p, s-8)) 50 h = rotl_31(h*m1) * m2 51 default: 52 v1 := h 53 v2 := uint64(seed * hashkey[1]) 54 v3 := uint64(seed * hashkey[2]) 55 v4 := uint64(seed * hashkey[3]) 56 for s >= 32 { 57 v1 ^= readUnaligned64(p) 58 v1 = rotl_31(v1*m1) * m2 59 p = add(p, 8) 60 v2 ^= readUnaligned64(p) 61 v2 = rotl_31(v2*m2) * m3 62 p = add(p, 8) 63 v3 ^= readUnaligned64(p) 64 v3 = rotl_31(v3*m3) * m4 65 p = add(p, 8) 66 v4 ^= readUnaligned64(p) 67 v4 = rotl_31(v4*m4) * m1 68 p = add(p, 8) 69 s -= 32 70 } 71 h = v1 ^ v2 ^ v3 ^ v4 72 goto tail 73 } 74 75 h ^= h >> 29 76 h *= m3 77 h ^= h >> 32 78 return uintptr(h) 79 } 80 81 func memhash32Fallback(p unsafe.Pointer, seed uintptr) uintptr { 82 h := uint64(seed + 4*hashkey[0]) 83 v := uint64(readUnaligned32(p)) 84 h ^= v 85 h ^= v << 32 86 h = rotl_31(h*m1) * m2 87 h ^= h >> 29 88 h *= m3 89 h ^= h >> 32 90 return uintptr(h) 91 } 92 93 func memhash64Fallback(p unsafe.Pointer, seed uintptr) uintptr { 94 h := uint64(seed + 8*hashkey[0]) 95 h ^= uint64(readUnaligned32(p)) | uint64(readUnaligned32(add(p, 4)))<<32 96 h = rotl_31(h*m1) * m2 97 h ^= h >> 29 98 h *= m3 99 h ^= h >> 32 100 return uintptr(h) 101 } 102 103 // Note: in order to get the compiler to issue rotl instructions, we 104 // need to constant fold the shift amount by hand. 105 // TODO: convince the compiler to issue rotl instructions after inlining. 106 func rotl_31(x uint64) uint64 { 107 return (x << 31) | (x >> (64 - 31)) 108 }