github.com/benoitkugler/goacve@v0.0.0-20201217100549-151ce6e55dc8/server/core/datamodel/base_hash.go (about)

     1  package datamodel
     2  
     3  import (
     4  	"bytes"
     5  	"compress/gzip"
     6  	"encoding/binary"
     7  	"encoding/json"
     8  	"errors"
     9  	"io"
    10  	"io/ioutil"
    11  	"math"
    12  )
    13  
    14  // BaseHash stores, for each table (links included), a hash of each item (identified by id)
    15  type BaseHash map[string]map[int64]uint32
    16  
    17  // NewBaseHash décompresse les hashs, reçus au format renvoyé
    18  // par `Compress`.
    19  func NewBaseHash(headerJSON []byte, compressed []byte) (BaseHash, error) {
    20  	gr, err := gzip.NewReader(bytes.NewReader(compressed))
    21  	if err != nil {
    22  		return nil, err
    23  	}
    24  	decompressed, err := ioutil.ReadAll(gr)
    25  	if err != nil {
    26  		return nil, err
    27  	}
    28  
    29  	var header map[string][2]int
    30  
    31  	err = json.Unmarshal(headerJSON, &header)
    32  	if err != nil {
    33  		return nil, err
    34  	}
    35  
    36  	out := make(BaseHash, len(header))
    37  	for name, offsets := range header {
    38  		tmp, err := decodeCompressHashes(decompressed[offsets[0] : offsets[0]+offsets[1]])
    39  		if err != nil {
    40  			return nil, err
    41  		}
    42  		out[name] = tmp.decompress()
    43  		if err != nil {
    44  			return nil, err
    45  		}
    46  	}
    47  	return out, nil
    48  }
    49  
    50  // HashDiff stocke les éléments qui ont changé sur le serveur:
    51  //	- les éléments nouveaux ou modifiés
    52  //	- les éléments supprimés
    53  type HashDiff struct {
    54  	Modified map[string][]int64
    55  	Deleted  map[string][]int64
    56  }
    57  
    58  // Total renvoie le nombre de modifications et de suppressions, cumulé sur
    59  // toutes les tables.
    60  func (h HashDiff) Total() (int, int) {
    61  	m, d := 0, 0
    62  	for _, table := range h.Modified {
    63  		m += len(table)
    64  	}
    65  	for _, table := range h.Deleted {
    66  		d += len(table)
    67  	}
    68  	return m, d
    69  }
    70  
    71  // Compare compare le hash envoyé par le serveur à celui présent en local.
    72  func (current BaseHash) Compare(incomming BaseHash) HashDiff {
    73  	out := HashDiff{
    74  		Modified: make(map[string][]int64, len(current)),
    75  		Deleted:  make(map[string][]int64, len(current)),
    76  	}
    77  
    78  	for name := range incomming {
    79  		currentTableHash, incomingTableHash := current[name], incomming[name]
    80  		var diff []int64
    81  		for id, h := range incomingTableHash {
    82  			currentHash, has := currentTableHash[id]
    83  			if !has { // l'item est nouveau
    84  				diff = append(diff, id)
    85  			} else if h != currentHash { // l'item a changé
    86  				diff = append(diff, id)
    87  			}
    88  		}
    89  		out.Modified[name] = diff
    90  	}
    91  
    92  	for name := range current {
    93  		currentTableHash, incomingTableHash := current[name], incomming[name]
    94  		var toSupp []int64
    95  		for id := range currentTableHash {
    96  			if _, in := incomingTableHash[id]; !in { // l'item n'existe plus, il pourra être supprimé
    97  				toSupp = append(toSupp, id)
    98  			}
    99  		}
   100  		out.Deleted[name] = toSupp
   101  	}
   102  	return out
   103  }
   104  
   105  // Compress écrit les hash sous forme binaire, compact. Le header renvoyé
   106  // est nécessaire pour décrypter les données binaires.
   107  func (b BaseHash) Compress() (headerJSON []byte, bin []byte, err error) {
   108  	header, compact := b.binary()
   109  
   110  	var out bytes.Buffer
   111  	gzOut := gzip.NewWriter(&out)
   112  	if err != nil {
   113  		return nil, nil, err
   114  	}
   115  	_, err = io.Copy(gzOut, bytes.NewReader(compact))
   116  	if err != nil {
   117  		return nil, nil, err
   118  	}
   119  	err = gzOut.Close()
   120  	if err != nil {
   121  		return nil, nil, err
   122  	}
   123  
   124  	headerJSON, err = json.Marshal(header)
   125  	return headerJSON, out.Bytes(), err
   126  }
   127  
   128  type compressHashes struct {
   129  	First   int64
   130  	Hashes  []uint32
   131  	Encoded []byte
   132  }
   133  
   134  func decodeCompressHashes(buf []byte) (compressHashes, error) {
   135  	if len(buf) < 8 {
   136  		return compressHashes{}, errors.New("version binaire des hash invalide")
   137  	}
   138  	first := int64(binary.BigEndian.Uint64(buf))
   139  	buf = buf[8:]
   140  	if len(buf)%5 != 0 {
   141  		return compressHashes{}, errors.New("version binaire des hash invalide")
   142  	}
   143  	L := len(buf) / 5
   144  	out := compressHashes{
   145  		First:   first,
   146  		Hashes:  make([]uint32, L),
   147  		Encoded: make([]byte, L),
   148  	}
   149  	for i := range out.Hashes {
   150  		out.Encoded[i] = buf[5*i]
   151  		out.Hashes[i] = binary.BigEndian.Uint32(buf[5*i+1:])
   152  	}
   153  	return out, nil
   154  }
   155  
   156  func newCompressHashes(hashes map[int64]uint32) compressHashes {
   157  	if len(hashes) == 0 {
   158  		return compressHashes{}
   159  	}
   160  	max, first := int64(0), int64(math.MaxInt64)
   161  	for id := range hashes {
   162  		if id > max {
   163  			max = id
   164  		}
   165  		if id < first {
   166  			first = id
   167  		}
   168  	}
   169  	out := compressHashes{
   170  		First:   first,
   171  		Hashes:  make([]uint32, (max - first + 1)),
   172  		Encoded: make([]byte, (max - first + 1)),
   173  	}
   174  
   175  	for id, hash := range hashes {
   176  		out.Hashes[id-first] = hash
   177  		out.Encoded[id-first] = 1
   178  	}
   179  	return out
   180  }
   181  
   182  // suppose the len(c.Encoded) == len(c.Hashes)
   183  func (c compressHashes) decompress() map[int64]uint32 {
   184  	out := make(map[int64]uint32)
   185  	for i, e := range c.Encoded {
   186  		if e == 1 {
   187  			out[c.First+int64(i)] = c.Hashes[i]
   188  		}
   189  	}
   190  	return out
   191  }
   192  
   193  func (c compressHashes) binary() []byte {
   194  	out := make([]byte, 5*len(c.Encoded)+8)
   195  	binary.BigEndian.PutUint64(out, uint64(c.First))
   196  	data := out[8:]
   197  	for i, e := range c.Encoded {
   198  		data[5*i] = e
   199  		binary.BigEndian.PutUint32(data[5*i+1:], c.Hashes[i])
   200  	}
   201  	return out
   202  }
   203  
   204  func (b BaseHash) binary() (map[string][2]int, []byte) {
   205  	var (
   206  		header = make(map[string][2]int)
   207  		out    []byte
   208  	)
   209  	currentOffset := 0
   210  	for name, m := range b {
   211  		data := newCompressHashes(m).binary()
   212  		out = append(out, data...)
   213  		header[name] = [2]int{currentOffset, len(data)}
   214  		currentOffset += len(data)
   215  	}
   216  	return header, out
   217  }