github.com/tomwright/dasel@v1.27.3/storage/csv.go (about) 1 package storage 2 3 import ( 4 "bytes" 5 "encoding/csv" 6 "fmt" 7 "sort" 8 ) 9 10 func init() { 11 registerReadParser([]string{"csv"}, []string{".csv"}, &CSVParser{}) 12 registerWriteParser([]string{"csv"}, []string{".csv"}, &CSVParser{}) 13 } 14 15 // CSVParser is a Parser implementation to handle csv files. 16 type CSVParser struct { 17 } 18 19 // CSVDocument represents a CSV file. 20 // This is required to keep headers in the expected order. 21 type CSVDocument struct { 22 originalRequired 23 Value []map[string]interface{} 24 Headers []string 25 } 26 27 // RealValue returns the real value that dasel should use when processing data. 28 func (d *CSVDocument) RealValue() interface{} { 29 return d.Value 30 } 31 32 // Documents returns the documents that should be written to output. 33 func (d *CSVDocument) Documents() []interface{} { 34 res := make([]interface{}, len(d.Value)) 35 for i := range d.Value { 36 res[i] = d.Value[i] 37 } 38 return res 39 } 40 41 // FromBytes returns some data that is represented by the given bytes. 42 func (p *CSVParser) FromBytes(byteData []byte) (interface{}, error) { 43 if byteData == nil { 44 return nil, fmt.Errorf("could not read csv file: no data") 45 } 46 47 reader := csv.NewReader(bytes.NewBuffer(byteData)) 48 res := make([]map[string]interface{}, 0) 49 records, err := reader.ReadAll() 50 if err != nil { 51 return nil, fmt.Errorf("could not read csv file: %w", err) 52 } 53 if len(records) == 0 { 54 return nil, nil 55 } 56 var headers []string 57 for i, row := range records { 58 if i == 0 { 59 headers = row 60 continue 61 } 62 rowRes := make(map[string]interface{}) 63 allEmpty := true 64 for index, val := range row { 65 if val != "" { 66 allEmpty = false 67 } 68 rowRes[headers[index]] = val 69 } 70 if !allEmpty { 71 res = append(res, rowRes) 72 } 73 } 74 return &CSVDocument{ 75 Value: res, 76 Headers: headers, 77 }, nil 78 } 79 80 func interfaceToCSVDocument(val interface{}) (*CSVDocument, error) { 81 switch v := val.(type) { 82 case map[string]interface{}: 83 headers := make([]string, 0) 84 for k := range v { 85 headers = append(headers, k) 86 } 87 sort.Strings(headers) 88 return &CSVDocument{ 89 Value: []map[string]interface{}{v}, 90 Headers: headers, 91 }, nil 92 93 case []interface{}: 94 mapVals := make([]map[string]interface{}, 0) 95 headers := make([]string, 0) 96 for _, val := range v { 97 if x, ok := val.(map[string]interface{}); ok { 98 mapVals = append(mapVals, x) 99 100 for objectKey := range x { 101 found := false 102 for _, existingHeader := range headers { 103 if existingHeader == objectKey { 104 found = true 105 break 106 } 107 } 108 if !found { 109 headers = append(headers, objectKey) 110 } 111 } 112 } 113 } 114 sort.Strings(headers) 115 return &CSVDocument{ 116 Value: mapVals, 117 Headers: headers, 118 }, nil 119 120 default: 121 return nil, fmt.Errorf("CSVParser.toBytes cannot handle type %T", val) 122 } 123 } 124 125 // ToBytes returns a slice of bytes that represents the given value. 126 func (p *CSVParser) ToBytes(value interface{}, options ...ReadWriteOption) ([]byte, error) { 127 buffer := new(bytes.Buffer) 128 writer := csv.NewWriter(buffer) 129 130 // Allow for multi document output by just appending documents on the end of each other. 131 // This is really only supported so as we have nicer output when converting to CSV from 132 // other multi-document formats. 133 134 docs := make([]*CSVDocument, 0) 135 136 switch d := value.(type) { 137 case *CSVDocument: 138 docs = append(docs, d) 139 case SingleDocument: 140 doc, err := interfaceToCSVDocument(d.Document()) 141 if err != nil { 142 return nil, err 143 } 144 docs = append(docs, doc) 145 case MultiDocument: 146 for _, dd := range d.Documents() { 147 doc, err := interfaceToCSVDocument(dd) 148 if err != nil { 149 return nil, err 150 } 151 docs = append(docs, doc) 152 } 153 default: 154 return []byte(fmt.Sprintf("%v\n", value)), nil 155 } 156 157 for _, doc := range docs { 158 if err := p.toBytesHandleDoc(writer, doc); err != nil { 159 return nil, err 160 } 161 } 162 163 return append(buffer.Bytes()), nil 164 } 165 166 func (p *CSVParser) toBytesHandleDoc(writer *csv.Writer, doc *CSVDocument) error { 167 // Iterate through the rows and detect any new headers. 168 for _, r := range doc.Value { 169 for k := range r { 170 headerExists := false 171 for _, header := range doc.Headers { 172 if k == header { 173 headerExists = true 174 break 175 } 176 } 177 if !headerExists { 178 doc.Headers = append(doc.Headers, k) 179 } 180 } 181 } 182 183 // Iterate through the rows and write the output. 184 for i, r := range doc.Value { 185 if i == 0 { 186 if err := writer.Write(doc.Headers); err != nil { 187 return fmt.Errorf("could not write headers: %w", err) 188 } 189 } 190 191 values := make([]string, 0) 192 for _, header := range doc.Headers { 193 val, ok := r[header] 194 if !ok { 195 val = "" 196 } 197 switch val.(type) { 198 case float32, float64: 199 values = append(values, fmt.Sprintf("%f", val)) 200 default: 201 values = append(values, fmt.Sprint(val)) 202 } 203 } 204 205 if err := writer.Write(values); err != nil { 206 return fmt.Errorf("could not write values: %w", err) 207 } 208 209 writer.Flush() 210 } 211 212 return nil 213 }