gorgonia.org/tensor@v0.9.24/dense_io.go (about) 1 // Code generated by genlib2. DO NOT EDIT. 2 3 package tensor 4 5 import ( 6 "bytes" 7 "encoding/binary" 8 "encoding/csv" 9 "encoding/gob" 10 "fmt" 11 "io" 12 "reflect" 13 "regexp" 14 "strconv" 15 "strings" 16 17 flatbuffers "github.com/google/flatbuffers/go" 18 "github.com/pkg/errors" 19 "gorgonia.org/tensor/internal/serialization/fb" 20 "gorgonia.org/tensor/internal/serialization/pb" 21 ) 22 23 /* GOB SERIALIZATION */ 24 25 // GobEncode implements gob.GobEncoder 26 func (t *Dense) GobEncode() (p []byte, err error) { 27 var buf bytes.Buffer 28 encoder := gob.NewEncoder(&buf) 29 30 if err = encoder.Encode(t.Shape()); err != nil { 31 return 32 } 33 34 if err = encoder.Encode(t.Strides()); err != nil { 35 return 36 } 37 38 if err = encoder.Encode(t.AP.o); err != nil { 39 return 40 } 41 42 if err = encoder.Encode(t.AP.Δ); err != nil { 43 return 44 } 45 46 if err = encoder.Encode(t.mask); err != nil { 47 return 48 } 49 50 data := t.Data() 51 if err = encoder.Encode(&data); err != nil { 52 return 53 } 54 55 return buf.Bytes(), err 56 } 57 58 // GobDecode implements gob.GobDecoder 59 func (t *Dense) GobDecode(p []byte) (err error) { 60 buf := bytes.NewBuffer(p) 61 decoder := gob.NewDecoder(buf) 62 63 var shape Shape 64 if err = decoder.Decode(&shape); err != nil { 65 return 66 } 67 68 var strides []int 69 if err = decoder.Decode(&strides); err != nil { 70 return 71 } 72 73 var o DataOrder 74 var tr Triangle 75 if err = decoder.Decode(&o); err == nil { 76 if err = decoder.Decode(&tr); err != nil { 77 return 78 } 79 } 80 81 t.AP.Init(shape, strides) 82 t.AP.o = o 83 t.AP.Δ = tr 84 85 var mask []bool 86 if err = decoder.Decode(&mask); err != nil { 87 return 88 } 89 90 var data interface{} 91 if err = decoder.Decode(&data); err != nil { 92 return 93 } 94 95 t.fromSlice(data) 96 t.addMask(mask) 97 t.fix() 98 if t.e == nil { 99 t.e = StdEng{} 100 } 101 return t.sanity() 102 } 103 104 /* NPY SERIALIZATION */ 105 106 var npyDescRE = regexp.MustCompile(`'descr':\s*'([^']*)'`) 107 var rowOrderRE = regexp.MustCompile(`'fortran_order':\s*(False|True)`) 108 var shapeRE = regexp.MustCompile(`'shape':\s*\(([^\(]*)\)`) 109 110 type binaryWriter struct { 111 io.Writer 112 err error 113 seq int 114 } 115 116 func (w *binaryWriter) w(x interface{}) { 117 if w.err != nil { 118 return 119 } 120 121 w.err = binary.Write(w, binary.LittleEndian, x) 122 w.seq++ 123 } 124 125 func (w *binaryWriter) Err() error { 126 if w.err == nil { 127 return nil 128 } 129 return errors.Wrapf(w.err, "Sequence %d", w.seq) 130 } 131 132 type binaryReader struct { 133 io.Reader 134 err error 135 seq int 136 } 137 138 func (r *binaryReader) Read(data interface{}) { 139 if r.err != nil { 140 return 141 } 142 r.err = binary.Read(r.Reader, binary.LittleEndian, data) 143 r.seq++ 144 } 145 146 func (r *binaryReader) Err() error { 147 if r.err == nil { 148 return nil 149 } 150 return errors.Wrapf(r.err, "Sequence %d", r.seq) 151 } 152 153 // WriteNpy writes the *Tensor as a numpy compatible serialized file. 154 // 155 // The format is very well documented here: 156 // http://docs.scipy.org/doc/numpy/neps/npy-format.html 157 // 158 // Gorgonia specifically uses Version 1.0, as 65535 bytes should be more than enough for the headers. 159 // The values are written in little endian order, because let's face it - 160 // 90% of the world's computers are running on x86+ processors. 161 // 162 // This method does not close the writer. Closing (if needed) is deferred to the caller 163 // If tensor is masked, invalid values are replaced by the default fill value. 164 func (t *Dense) WriteNpy(w io.Writer) (err error) { 165 var npdt string 166 if npdt, err = t.t.numpyDtype(); err != nil { 167 return 168 } 169 170 var header string 171 if t.Dims() == 1 { 172 // when t is a 1D vector, numpy expects "(N,)" instead of "(N)" which t.Shape() returns. 173 header = "{'descr': '<%v', 'fortran_order': False, 'shape': (%d,)}" 174 header = fmt.Sprintf(header, npdt, t.Shape()[0]) 175 } else { 176 header = "{'descr': '<%v', 'fortran_order': False, 'shape': %v}" 177 header = fmt.Sprintf(header, npdt, t.Shape()) 178 } 179 padding := 16 - ((10 + len(header)) % 16) 180 if padding > 0 { 181 header = header + strings.Repeat(" ", padding) 182 } 183 bw := binaryWriter{Writer: w} 184 bw.Write([]byte("\x93NUMPY")) // stupid magic 185 bw.w(byte(1)) // major version 186 bw.w(byte(0)) // minor version 187 bw.w(uint16(len(header))) // 4 bytes to denote header length 188 if err = bw.Err(); err != nil { 189 return err 190 } 191 bw.Write([]byte(header)) 192 193 bw.seq = 0 194 if t.IsMasked() { 195 fillval := t.FillValue() 196 it := FlatMaskedIteratorFromDense(t) 197 for i, err := it.Next(); err == nil; i, err = it.Next() { 198 if t.mask[i] { 199 bw.w(fillval) 200 } else { 201 bw.w(t.Get(i)) 202 } 203 } 204 } else { 205 for i := 0; i < t.len(); i++ { 206 bw.w(t.Get(i)) 207 } 208 } 209 210 return bw.Err() 211 } 212 213 // ReadNpy reads NumPy formatted files into a *Dense 214 func (t *Dense) ReadNpy(r io.Reader) (err error) { 215 br := binaryReader{Reader: r} 216 var magic [6]byte 217 if br.Read(magic[:]); string(magic[:]) != "\x93NUMPY" { 218 return errors.Errorf("Not a numpy file. Got %q as the magic number instead", string(magic[:])) 219 } 220 221 var version, minor byte 222 if br.Read(&version); version != 1 { 223 return errors.New("Only verion 1.0 of numpy's serialization format is currently supported (65535 bytes ought to be enough for a header)") 224 } 225 226 if br.Read(&minor); minor != 0 { 227 return errors.New("Only verion 1.0 of numpy's serialization format is currently supported (65535 bytes ought to be enough for a header)") 228 } 229 230 var headerLen uint16 231 br.Read(&headerLen) 232 header := make([]byte, int(headerLen)) 233 br.Read(header) 234 if err = br.Err(); err != nil { 235 return 236 } 237 238 // extract stuff from header 239 var match [][]byte 240 if match = npyDescRE.FindSubmatch(header); match == nil { 241 return errors.New("No dtype information in npy file") 242 } 243 244 // TODO: check for endianness. For now we assume everything is little endian 245 if t.t, err = fromNumpyDtype(string(match[1][1:])); err != nil { 246 return 247 } 248 249 if match = rowOrderRE.FindSubmatch(header); match == nil { 250 return errors.New("No Row Order information found in the numpy file") 251 } 252 if string(match[1]) != "False" { 253 return errors.New("Cannot yet read from Fortran Ordered Numpy files") 254 } 255 256 if match = shapeRE.FindSubmatch(header); match == nil { 257 return errors.New("No shape information found in npy file") 258 } 259 sizesStr := strings.Split(string(match[1]), ",") 260 261 var shape Shape 262 for _, s := range sizesStr { 263 s = strings.Trim(s, " ") 264 if len(s) == 0 { 265 break 266 } 267 var size int 268 if size, err = strconv.Atoi(s); err != nil { 269 return 270 } 271 shape = append(shape, size) 272 } 273 274 size := shape.TotalSize() 275 if t.e == nil { 276 t.e = StdEng{} 277 } 278 t.makeArray(size) 279 280 switch t.t.Kind() { 281 case reflect.Int: 282 data := t.Ints() 283 for i := 0; i < size; i++ { 284 br.Read(&data[i]) 285 } 286 case reflect.Int8: 287 data := t.Int8s() 288 for i := 0; i < size; i++ { 289 br.Read(&data[i]) 290 } 291 case reflect.Int16: 292 data := t.Int16s() 293 for i := 0; i < size; i++ { 294 br.Read(&data[i]) 295 } 296 case reflect.Int32: 297 data := t.Int32s() 298 for i := 0; i < size; i++ { 299 br.Read(&data[i]) 300 } 301 case reflect.Int64: 302 data := t.Int64s() 303 for i := 0; i < size; i++ { 304 br.Read(&data[i]) 305 } 306 case reflect.Uint: 307 data := t.Uints() 308 for i := 0; i < size; i++ { 309 br.Read(&data[i]) 310 } 311 case reflect.Uint8: 312 data := t.Uint8s() 313 for i := 0; i < size; i++ { 314 br.Read(&data[i]) 315 } 316 case reflect.Uint16: 317 data := t.Uint16s() 318 for i := 0; i < size; i++ { 319 br.Read(&data[i]) 320 } 321 case reflect.Uint32: 322 data := t.Uint32s() 323 for i := 0; i < size; i++ { 324 br.Read(&data[i]) 325 } 326 case reflect.Uint64: 327 data := t.Uint64s() 328 for i := 0; i < size; i++ { 329 br.Read(&data[i]) 330 } 331 case reflect.Float32: 332 data := t.Float32s() 333 for i := 0; i < size; i++ { 334 br.Read(&data[i]) 335 } 336 case reflect.Float64: 337 data := t.Float64s() 338 for i := 0; i < size; i++ { 339 br.Read(&data[i]) 340 } 341 case reflect.Complex64: 342 data := t.Complex64s() 343 for i := 0; i < size; i++ { 344 br.Read(&data[i]) 345 } 346 case reflect.Complex128: 347 data := t.Complex128s() 348 for i := 0; i < size; i++ { 349 br.Read(&data[i]) 350 } 351 } 352 if err = br.Err(); err != nil { 353 return err 354 } 355 356 t.AP.zeroWithDims(len(shape)) 357 t.setShape(shape...) 358 t.fix() 359 return t.sanity() 360 } 361 362 /* CSV SERIALIZATION */ 363 364 // WriteCSV writes the *Dense to a CSV. It accepts an optional string formatting ("%v", "%f", etc...), which controls what is written to the CSV. 365 // If tensor is masked, invalid values are replaced by the default fill value. 366 func (t *Dense) WriteCSV(w io.Writer, formats ...string) (err error) { 367 // checks: 368 if !t.IsMatrix() { 369 // error 370 err = errors.Errorf("Cannot write *Dense to CSV. Expected number of dimensions: <=2, T has got %d dimensions (Shape: %v)", t.Dims(), t.Shape()) 371 return 372 } 373 format := "%v" 374 if len(formats) > 0 { 375 format = formats[0] 376 } 377 378 cw := csv.NewWriter(w) 379 it := IteratorFromDense(t) 380 coord := it.Coord() 381 382 // rows := t.Shape()[0] 383 cols := t.Shape()[1] 384 record := make([]string, 0, cols) 385 var i, k, lastCol int 386 isMasked := t.IsMasked() 387 fillval := t.FillValue() 388 fillstr := fmt.Sprintf(format, fillval) 389 for i, err = it.Next(); err == nil; i, err = it.Next() { 390 record = append(record, fmt.Sprintf(format, t.Get(i))) 391 if isMasked { 392 if t.mask[i] { 393 record[k] = fillstr 394 } 395 k++ 396 } 397 if lastCol == cols-1 { 398 if err = cw.Write(record); err != nil { 399 // TODO: wrap errors 400 return 401 } 402 cw.Flush() 403 record = record[:0] 404 } 405 406 // cleanup 407 switch { 408 case t.IsRowVec(): 409 // lastRow = coord[len(coord)-2] 410 lastCol = coord[len(coord)-1] 411 case t.IsColVec(): 412 // lastRow = coord[len(coord)-1] 413 lastCol = coord[len(coord)-2] 414 case t.IsVector(): 415 lastCol = coord[len(coord)-1] 416 default: 417 // lastRow = coord[len(coord)-2] 418 lastCol = coord[len(coord)-1] 419 } 420 } 421 return nil 422 } 423 424 // convFromStrs converts a []string to a slice of the Dtype provided. It takes a provided backing slice. 425 // If into is nil, then a backing slice will be created. 426 func convFromStrs(to Dtype, record []string, into interface{}) (interface{}, error) { 427 var err error 428 switch to.Kind() { 429 case reflect.Int: 430 retVal := make([]int, len(record)) 431 var backing []int 432 if into == nil { 433 backing = make([]int, 0, len(record)) 434 } else { 435 backing = into.([]int) 436 } 437 438 for i, v := range record { 439 var i64 int64 440 if i64, err = strconv.ParseInt(v, 10, 0); err != nil { 441 return nil, err 442 } 443 retVal[i] = int(i64) 444 } 445 backing = append(backing, retVal...) 446 return backing, nil 447 case reflect.Int8: 448 retVal := make([]int8, len(record)) 449 var backing []int8 450 if into == nil { 451 backing = make([]int8, 0, len(record)) 452 } else { 453 backing = into.([]int8) 454 } 455 456 for i, v := range record { 457 var i64 int64 458 if i64, err = strconv.ParseInt(v, 10, 8); err != nil { 459 return nil, err 460 } 461 retVal[i] = int8(i64) 462 } 463 backing = append(backing, retVal...) 464 return backing, nil 465 case reflect.Int16: 466 retVal := make([]int16, len(record)) 467 var backing []int16 468 if into == nil { 469 backing = make([]int16, 0, len(record)) 470 } else { 471 backing = into.([]int16) 472 } 473 474 for i, v := range record { 475 var i64 int64 476 if i64, err = strconv.ParseInt(v, 10, 16); err != nil { 477 return nil, err 478 } 479 retVal[i] = int16(i64) 480 } 481 backing = append(backing, retVal...) 482 return backing, nil 483 case reflect.Int32: 484 retVal := make([]int32, len(record)) 485 var backing []int32 486 if into == nil { 487 backing = make([]int32, 0, len(record)) 488 } else { 489 backing = into.([]int32) 490 } 491 492 for i, v := range record { 493 var i64 int64 494 if i64, err = strconv.ParseInt(v, 10, 32); err != nil { 495 return nil, err 496 } 497 retVal[i] = int32(i64) 498 } 499 backing = append(backing, retVal...) 500 return backing, nil 501 case reflect.Int64: 502 retVal := make([]int64, len(record)) 503 var backing []int64 504 if into == nil { 505 backing = make([]int64, 0, len(record)) 506 } else { 507 backing = into.([]int64) 508 } 509 510 for i, v := range record { 511 var i64 int64 512 if i64, err = strconv.ParseInt(v, 10, 64); err != nil { 513 return nil, err 514 } 515 retVal[i] = int64(i64) 516 } 517 backing = append(backing, retVal...) 518 return backing, nil 519 case reflect.Uint: 520 retVal := make([]uint, len(record)) 521 var backing []uint 522 if into == nil { 523 backing = make([]uint, 0, len(record)) 524 } else { 525 backing = into.([]uint) 526 } 527 528 for i, v := range record { 529 var u uint64 530 if u, err = strconv.ParseUint(v, 10, 0); err != nil { 531 return nil, err 532 } 533 retVal[i] = uint(u) 534 } 535 backing = append(backing, retVal...) 536 return backing, nil 537 case reflect.Uint8: 538 retVal := make([]uint8, len(record)) 539 var backing []uint8 540 if into == nil { 541 backing = make([]uint8, 0, len(record)) 542 } else { 543 backing = into.([]uint8) 544 } 545 546 for i, v := range record { 547 var u uint64 548 if u, err = strconv.ParseUint(v, 10, 8); err != nil { 549 return nil, err 550 } 551 retVal[i] = uint8(u) 552 } 553 backing = append(backing, retVal...) 554 return backing, nil 555 case reflect.Uint16: 556 retVal := make([]uint16, len(record)) 557 var backing []uint16 558 if into == nil { 559 backing = make([]uint16, 0, len(record)) 560 } else { 561 backing = into.([]uint16) 562 } 563 564 for i, v := range record { 565 var u uint64 566 if u, err = strconv.ParseUint(v, 10, 16); err != nil { 567 return nil, err 568 } 569 retVal[i] = uint16(u) 570 } 571 backing = append(backing, retVal...) 572 return backing, nil 573 case reflect.Uint32: 574 retVal := make([]uint32, len(record)) 575 var backing []uint32 576 if into == nil { 577 backing = make([]uint32, 0, len(record)) 578 } else { 579 backing = into.([]uint32) 580 } 581 582 for i, v := range record { 583 var u uint64 584 if u, err = strconv.ParseUint(v, 10, 32); err != nil { 585 return nil, err 586 } 587 retVal[i] = uint32(u) 588 } 589 backing = append(backing, retVal...) 590 return backing, nil 591 case reflect.Uint64: 592 retVal := make([]uint64, len(record)) 593 var backing []uint64 594 if into == nil { 595 backing = make([]uint64, 0, len(record)) 596 } else { 597 backing = into.([]uint64) 598 } 599 600 for i, v := range record { 601 var u uint64 602 if u, err = strconv.ParseUint(v, 10, 64); err != nil { 603 return nil, err 604 } 605 retVal[i] = uint64(u) 606 } 607 backing = append(backing, retVal...) 608 return backing, nil 609 case reflect.Float32: 610 retVal := make([]float32, len(record)) 611 var backing []float32 612 if into == nil { 613 backing = make([]float32, 0, len(record)) 614 } else { 615 backing = into.([]float32) 616 } 617 618 for i, v := range record { 619 var f float64 620 if f, err = strconv.ParseFloat(v, 32); err != nil { 621 return nil, err 622 } 623 retVal[i] = float32(f) 624 } 625 backing = append(backing, retVal...) 626 return backing, nil 627 case reflect.Float64: 628 retVal := make([]float64, len(record)) 629 var backing []float64 630 if into == nil { 631 backing = make([]float64, 0, len(record)) 632 } else { 633 backing = into.([]float64) 634 } 635 636 for i, v := range record { 637 if retVal[i], err = strconv.ParseFloat(v, 64); err != nil { 638 return nil, err 639 } 640 } 641 backing = append(backing, retVal...) 642 return backing, nil 643 case reflect.String: 644 var backing []string 645 if into == nil { 646 backing = make([]string, 0, len(record)) 647 } else { 648 backing = into.([]string) 649 } 650 backing = append(backing, record...) 651 return backing, nil 652 default: 653 return nil, errors.Errorf(methodNYI, "convFromStrs", to) 654 } 655 } 656 657 // ReadCSV reads a CSV into a *Dense. It will override the underlying data. 658 // 659 // BUG(chewxy): reading CSV doesn't handle CSVs with different columns per row yet. 660 func (t *Dense) ReadCSV(r io.Reader, opts ...FuncOpt) (err error) { 661 fo := ParseFuncOpts(opts...) 662 as := fo.As() 663 if as.Type == nil { 664 as = Float64 665 } 666 667 cr := csv.NewReader(r) 668 669 var record []string 670 var rows, cols int 671 var backing interface{} 672 for { 673 record, err = cr.Read() 674 if err == io.EOF { 675 break 676 } else if err != nil { 677 return 678 } 679 if backing, err = convFromStrs(as, record, backing); err != nil { 680 return 681 } 682 cols = len(record) 683 rows++ 684 } 685 t.fromSlice(backing) 686 t.AP.zero() 687 t.AP.SetShape(rows, cols) 688 return nil 689 return errors.Errorf("not yet handled") 690 } 691 692 /* FB SERIALIZATION */ 693 694 // FBEncode encodes to a byte slice using flatbuffers. 695 // 696 // Only natively accessible data can be encided 697 func (t *Dense) FBEncode() ([]byte, error) { 698 builder := flatbuffers.NewBuilder(1024) 699 700 fb.DenseStartShapeVector(builder, len(t.shape)) 701 for i := len(t.shape) - 1; i >= 0; i-- { 702 builder.PrependInt32(int32(t.shape[i])) 703 } 704 shape := builder.EndVector(len(t.shape)) 705 706 fb.DenseStartStridesVector(builder, len(t.strides)) 707 for i := len(t.strides) - 1; i >= 0; i-- { 708 builder.PrependInt32(int32(t.strides[i])) 709 } 710 strides := builder.EndVector(len(t.strides)) 711 712 var o uint32 713 switch { 714 case t.o.IsRowMajor() && t.o.IsContiguous(): 715 o = 0 716 case t.o.IsRowMajor() && !t.o.IsContiguous(): 717 o = 1 718 case t.o.IsColMajor() && t.o.IsContiguous(): 719 o = 2 720 case t.o.IsColMajor() && !t.o.IsContiguous(): 721 o = 3 722 } 723 724 var triangle int32 725 switch t.Δ { 726 case NotTriangle: 727 triangle = fb.TriangleNOT_TRIANGLE 728 case Upper: 729 triangle = fb.TriangleUPPER 730 case Lower: 731 triangle = fb.TriangleLOWER 732 case Symmetric: 733 triangle = fb.TriangleSYMMETRIC 734 } 735 736 dt := builder.CreateString(t.Dtype().String()) 737 data := t.byteSlice() 738 739 fb.DenseStartDataVector(builder, len(data)) 740 for i := len(data) - 1; i >= 0; i-- { 741 builder.PrependUint8(data[i]) 742 } 743 databyte := builder.EndVector(len(data)) 744 745 fb.DenseStart(builder) 746 fb.DenseAddShape(builder, shape) 747 fb.DenseAddStrides(builder, strides) 748 fb.DenseAddO(builder, o) 749 fb.DenseAddT(builder, triangle) 750 fb.DenseAddType(builder, dt) 751 fb.DenseAddData(builder, databyte) 752 serialized := fb.DenseEnd(builder) 753 builder.Finish(serialized) 754 755 return builder.FinishedBytes(), nil 756 } 757 758 // FBDecode decodes a byteslice from a flatbuffer table into a *Dense 759 func (t *Dense) FBDecode(buf []byte) error { 760 serialized := fb.GetRootAsDense(buf, 0) 761 762 o := serialized.O() 763 switch o { 764 case 0: 765 t.o = 0 766 case 1: 767 t.o = MakeDataOrder(NonContiguous) 768 case 2: 769 t.o = MakeDataOrder(ColMajor) 770 case 3: 771 t.o = MakeDataOrder(ColMajor, NonContiguous) 772 } 773 774 tri := serialized.T() 775 switch tri { 776 case fb.TriangleNOT_TRIANGLE: 777 t.Δ = NotTriangle 778 case fb.TriangleUPPER: 779 t.Δ = Upper 780 case fb.TriangleLOWER: 781 t.Δ = Lower 782 case fb.TriangleSYMMETRIC: 783 t.Δ = Symmetric 784 } 785 786 t.shape = Shape(BorrowInts(serialized.ShapeLength())) 787 for i := 0; i < serialized.ShapeLength(); i++ { 788 t.shape[i] = int(int32(serialized.Shape(i))) 789 } 790 791 t.strides = BorrowInts(serialized.StridesLength()) 792 for i := 0; i < serialized.ShapeLength(); i++ { 793 t.strides[i] = int(serialized.Strides(i)) 794 } 795 typ := string(serialized.Type()) 796 for _, dt := range allTypes.set { 797 if dt.String() == typ { 798 t.t = dt 799 break 800 } 801 } 802 803 if t.e == nil { 804 t.e = StdEng{} 805 } 806 t.makeArray(t.shape.TotalSize()) 807 808 // allocated data. Now time to actually copy over the data 809 db := t.byteSlice() 810 copy(db, serialized.DataBytes()) 811 t.fix() 812 return t.sanity() 813 } 814 815 /* PB SERIALIZATION */ 816 817 // PBEncode encodes the Dense into a protobuf byte slice. 818 func (t *Dense) PBEncode() ([]byte, error) { 819 var toSerialize pb.Dense 820 toSerialize.Shape = make([]int32, len(t.shape)) 821 for i, v := range t.shape { 822 toSerialize.Shape[i] = int32(v) 823 } 824 toSerialize.Strides = make([]int32, len(t.strides)) 825 for i, v := range t.strides { 826 toSerialize.Strides[i] = int32(v) 827 } 828 829 switch { 830 case t.o.IsRowMajor() && t.o.IsContiguous(): 831 toSerialize.O = pb.RowMajorContiguous 832 case t.o.IsRowMajor() && !t.o.IsContiguous(): 833 toSerialize.O = pb.RowMajorNonContiguous 834 case t.o.IsColMajor() && t.o.IsContiguous(): 835 toSerialize.O = pb.ColMajorContiguous 836 case t.o.IsColMajor() && !t.o.IsContiguous(): 837 toSerialize.O = pb.ColMajorNonContiguous 838 } 839 toSerialize.T = pb.Triangle(t.Δ) 840 toSerialize.Type = t.t.String() 841 data := t.byteSlice() 842 toSerialize.Data = make([]byte, len(data)) 843 copy(toSerialize.Data, data) 844 return toSerialize.Marshal() 845 } 846 847 // PBDecode unmarshalls a protobuf byteslice into a *Dense. 848 func (t *Dense) PBDecode(buf []byte) error { 849 var toSerialize pb.Dense 850 if err := toSerialize.Unmarshal(buf); err != nil { 851 return err 852 } 853 t.shape = make(Shape, len(toSerialize.Shape)) 854 for i, v := range toSerialize.Shape { 855 t.shape[i] = int(v) 856 } 857 t.strides = make([]int, len(toSerialize.Strides)) 858 for i, v := range toSerialize.Strides { 859 t.strides[i] = int(v) 860 } 861 862 switch toSerialize.O { 863 case pb.RowMajorContiguous: 864 case pb.RowMajorNonContiguous: 865 t.o = MakeDataOrder(NonContiguous) 866 case pb.ColMajorContiguous: 867 t.o = MakeDataOrder(ColMajor) 868 case pb.ColMajorNonContiguous: 869 t.o = MakeDataOrder(ColMajor, NonContiguous) 870 } 871 t.Δ = Triangle(toSerialize.T) 872 typ := string(toSerialize.Type) 873 for _, dt := range allTypes.set { 874 if dt.String() == typ { 875 t.t = dt 876 break 877 } 878 } 879 880 if t.e == nil { 881 t.e = StdEng{} 882 } 883 t.makeArray(t.shape.TotalSize()) 884 885 // allocated data. Now time to actually copy over the data 886 db := t.byteSlice() 887 copy(db, toSerialize.Data) 888 return t.sanity() 889 }