github.com/eagleql/xray-core@v1.4.4/common/crypto/chunk.go (about) 1 package crypto 2 3 import ( 4 "encoding/binary" 5 "io" 6 7 "github.com/eagleql/xray-core/common" 8 "github.com/eagleql/xray-core/common/buf" 9 ) 10 11 // ChunkSizeDecoder is a utility class to decode size value from bytes. 12 type ChunkSizeDecoder interface { 13 SizeBytes() int32 14 Decode([]byte) (uint16, error) 15 } 16 17 // ChunkSizeEncoder is a utility class to encode size value into bytes. 18 type ChunkSizeEncoder interface { 19 SizeBytes() int32 20 Encode(uint16, []byte) []byte 21 } 22 23 type PaddingLengthGenerator interface { 24 MaxPaddingLen() uint16 25 NextPaddingLen() uint16 26 } 27 28 type PlainChunkSizeParser struct{} 29 30 func (PlainChunkSizeParser) SizeBytes() int32 { 31 return 2 32 } 33 34 func (PlainChunkSizeParser) Encode(size uint16, b []byte) []byte { 35 binary.BigEndian.PutUint16(b, size) 36 return b[:2] 37 } 38 39 func (PlainChunkSizeParser) Decode(b []byte) (uint16, error) { 40 return binary.BigEndian.Uint16(b), nil 41 } 42 43 type AEADChunkSizeParser struct { 44 Auth *AEADAuthenticator 45 } 46 47 func (p *AEADChunkSizeParser) SizeBytes() int32 { 48 return 2 + int32(p.Auth.Overhead()) 49 } 50 51 func (p *AEADChunkSizeParser) Encode(size uint16, b []byte) []byte { 52 binary.BigEndian.PutUint16(b, size-uint16(p.Auth.Overhead())) 53 b, err := p.Auth.Seal(b[:0], b[:2]) 54 common.Must(err) 55 return b 56 } 57 58 func (p *AEADChunkSizeParser) Decode(b []byte) (uint16, error) { 59 b, err := p.Auth.Open(b[:0], b) 60 if err != nil { 61 return 0, err 62 } 63 return binary.BigEndian.Uint16(b) + uint16(p.Auth.Overhead()), nil 64 } 65 66 type ChunkStreamReader struct { 67 sizeDecoder ChunkSizeDecoder 68 reader *buf.BufferedReader 69 70 buffer []byte 71 leftOverSize int32 72 maxNumChunk uint32 73 numChunk uint32 74 } 75 76 func NewChunkStreamReader(sizeDecoder ChunkSizeDecoder, reader io.Reader) *ChunkStreamReader { 77 return NewChunkStreamReaderWithChunkCount(sizeDecoder, reader, 0) 78 } 79 80 func NewChunkStreamReaderWithChunkCount(sizeDecoder ChunkSizeDecoder, reader io.Reader, maxNumChunk uint32) *ChunkStreamReader { 81 r := &ChunkStreamReader{ 82 sizeDecoder: sizeDecoder, 83 buffer: make([]byte, sizeDecoder.SizeBytes()), 84 maxNumChunk: maxNumChunk, 85 } 86 if breader, ok := reader.(*buf.BufferedReader); ok { 87 r.reader = breader 88 } else { 89 r.reader = &buf.BufferedReader{Reader: buf.NewReader(reader)} 90 } 91 92 return r 93 } 94 95 func (r *ChunkStreamReader) readSize() (uint16, error) { 96 if _, err := io.ReadFull(r.reader, r.buffer); err != nil { 97 return 0, err 98 } 99 return r.sizeDecoder.Decode(r.buffer) 100 } 101 102 func (r *ChunkStreamReader) ReadMultiBuffer() (buf.MultiBuffer, error) { 103 size := r.leftOverSize 104 if size == 0 { 105 r.numChunk++ 106 if r.maxNumChunk > 0 && r.numChunk > r.maxNumChunk { 107 return nil, io.EOF 108 } 109 nextSize, err := r.readSize() 110 if err != nil { 111 return nil, err 112 } 113 if nextSize == 0 { 114 return nil, io.EOF 115 } 116 size = int32(nextSize) 117 } 118 r.leftOverSize = size 119 120 mb, err := r.reader.ReadAtMost(size) 121 if !mb.IsEmpty() { 122 r.leftOverSize -= mb.Len() 123 return mb, nil 124 } 125 return nil, err 126 } 127 128 type ChunkStreamWriter struct { 129 sizeEncoder ChunkSizeEncoder 130 writer buf.Writer 131 } 132 133 func NewChunkStreamWriter(sizeEncoder ChunkSizeEncoder, writer io.Writer) *ChunkStreamWriter { 134 return &ChunkStreamWriter{ 135 sizeEncoder: sizeEncoder, 136 writer: buf.NewWriter(writer), 137 } 138 } 139 140 func (w *ChunkStreamWriter) WriteMultiBuffer(mb buf.MultiBuffer) error { 141 const sliceSize = 8192 142 mbLen := mb.Len() 143 mb2Write := make(buf.MultiBuffer, 0, mbLen/buf.Size+mbLen/sliceSize+2) 144 145 for { 146 mb2, slice := buf.SplitSize(mb, sliceSize) 147 mb = mb2 148 149 b := buf.New() 150 w.sizeEncoder.Encode(uint16(slice.Len()), b.Extend(w.sizeEncoder.SizeBytes())) 151 mb2Write = append(mb2Write, b) 152 mb2Write = append(mb2Write, slice...) 153 154 if mb.IsEmpty() { 155 break 156 } 157 } 158 159 return w.writer.WriteMultiBuffer(mb2Write) 160 }