github.com/qri-io/qri@v0.10.1-0.20220104210721-c771715036cb/base/dsfs/dsfs_test.go (about) 1 package dsfs 2 3 import ( 4 "context" 5 "encoding/json" 6 "fmt" 7 "io/ioutil" 8 9 "github.com/qri-io/dataset" 10 "github.com/qri-io/qfs" 11 testkeys "github.com/qri-io/qri/auth/key/test" 12 "github.com/qri-io/qri/event" 13 ) 14 15 var AirportCodes = &dataset.Dataset{ 16 Meta: &dataset.Meta{ 17 Title: "Airport Codes", 18 HomeURL: "http://www.ourairports.com/", 19 License: &dataset.License{ 20 Type: "PDDL-1.0", 21 }, 22 Citations: []*dataset.Citation{ 23 { 24 Name: "Our Airports", 25 URL: "http://ourairports.com/data/", 26 }, 27 }, 28 }, 29 // File: "data/airport-codes.csv", 30 // Readme: "readme.md", 31 // Format: "text/csv", 32 } 33 34 var AirportCodesCommit = &dataset.Commit{ 35 Qri: dataset.KindCommit.String(), 36 Message: "initial commit", 37 } 38 39 var AirportCodesStructure = &dataset.Structure{ 40 Format: "csv", 41 FormatConfig: map[string]interface{}{ 42 "headerRow": true, 43 }, 44 Schema: map[string]interface{}{ 45 "type": "array", 46 "items": map[string]interface{}{ 47 "type": "array", 48 "items": []interface{}{ 49 map[string]interface{}{"title": "ident", "type": "string"}, 50 map[string]interface{}{"title": "type", "type": "string"}, 51 map[string]interface{}{"title": "name", "type": "string"}, 52 map[string]interface{}{"title": "latitude_deg", "type": "number"}, 53 map[string]interface{}{"title": "longitude_deg", "type": "number"}, 54 map[string]interface{}{"title": "elevation_ft", "type": "integer"}, 55 map[string]interface{}{"title": "continent", "type": "string"}, 56 map[string]interface{}{"title": "iso_country", "type": "string"}, 57 map[string]interface{}{"title": "iso_region", "type": "string"}, 58 map[string]interface{}{"title": "municipality", "type": "string"}, 59 map[string]interface{}{"title": "gps_code", "type": "string"}, 60 map[string]interface{}{"title": "iata_code", "type": "string"}, 61 map[string]interface{}{"title": "local_code", "type": "string"}, 62 }, 63 }, 64 }, 65 } 66 67 var AirportCodesStructureAgebraic = &dataset.Structure{ 68 Format: "csv", 69 FormatConfig: map[string]interface{}{"headerRow": true}, 70 Schema: map[string]interface{}{ 71 "type": "array", 72 "items": map[string]interface{}{ 73 "type": "array", 74 "items": []interface{}{ 75 map[string]interface{}{"title": "col_0", "type": "string"}, 76 map[string]interface{}{"title": "col_1", "type": "string"}, 77 map[string]interface{}{"title": "col_2", "type": "string"}, 78 map[string]interface{}{"title": "col_3", "type": "number"}, 79 map[string]interface{}{"title": "col_4", "type": "number"}, 80 map[string]interface{}{"title": "col_5", "type": "integer"}, 81 map[string]interface{}{"title": "col_6", "type": "string"}, 82 map[string]interface{}{"title": "col_7", "type": "string"}, 83 map[string]interface{}{"title": "col_8", "type": "string"}, 84 map[string]interface{}{"title": "col_9", "type": "string"}, 85 map[string]interface{}{"title": "col_10", "type": "string"}, 86 map[string]interface{}{"title": "col_11", "type": "string"}, 87 map[string]interface{}{"title": "col_12", "type": "string"}, 88 }, 89 }, 90 }, 91 } 92 93 var ContinentCodes = &dataset.Dataset{ 94 Qri: dataset.KindDataset.String(), 95 Meta: &dataset.Meta{ 96 Qri: dataset.KindMeta.String(), 97 Title: "Continent Codes", 98 Description: "list of continents with corresponding two letter codes", 99 License: &dataset.License{ 100 Type: "odc-pddl", 101 URL: "http://opendatacommons.org/licenses/pddl/", 102 }, 103 Keywords: []string{ 104 "Continents", 105 "Two letter code", 106 "Continent codes", 107 "Continent code list", 108 }, 109 }, 110 } 111 112 var ContinentCodesStructure = &dataset.Structure{ 113 Format: "csv", 114 Schema: map[string]interface{}{ 115 "type": "array", 116 "items": map[string]interface{}{ 117 "type": "array", 118 "items": []interface{}{ 119 map[string]interface{}{"title": "code", "type": "string"}, 120 map[string]interface{}{"title": "name", "type": "string"}, 121 }, 122 }, 123 }, 124 } 125 126 var Hours = &dataset.Dataset{ 127 Meta: &dataset.Meta{ 128 Title: "hours", 129 }, 130 // Body: "/ipfs/QmS1dVa1xemo7gQzJgjimj1WwnVBF3TwRTGsyKa1uEBWbJ", 131 } 132 133 var HoursStructure = &dataset.Structure{ 134 Format: "csv", 135 Schema: map[string]interface{}{ 136 "type": "array", 137 "items": map[string]interface{}{ 138 "type": "array", 139 "items": []interface{}{ 140 map[string]interface{}{"title": "field_1", "type": "string"}, 141 map[string]interface{}{"title": "field_2", "type": "number"}, 142 map[string]interface{}{"title": "field_3", "type": "string"}, 143 map[string]interface{}{"title": "field_4", "type": "string"}, 144 }, 145 }, 146 }, 147 } 148 149 func makeFilestore() (map[string]string, qfs.Filesystem, error) { 150 ctx := context.Background() 151 fs := qfs.NewMemFS() 152 153 // These tests are using hard-coded ids that require this exact peer's private key. 154 pk := testkeys.GetKeyData(10).PrivKey 155 156 datasets := map[string]string{ 157 "movies": "", 158 "cities": "", 159 } 160 161 for k := range datasets { 162 dsdata, err := ioutil.ReadFile(fmt.Sprintf("testdata/%s/input.dataset.json", k)) 163 if err != nil { 164 return datasets, nil, err 165 } 166 167 ds := &dataset.Dataset{} 168 if err := json.Unmarshal(dsdata, ds); err != nil { 169 return datasets, nil, err 170 } 171 172 ds.Commit = &dataset.Commit{} 173 dataPath := fmt.Sprintf("testdata/%s/%s", k, ds.Structure.BodyFilename()) 174 data, err := ioutil.ReadFile(dataPath) 175 if err != nil { 176 return datasets, nil, err 177 } 178 179 ds.SetBodyFile(qfs.NewMemfileBytes(fmt.Sprintf("/%s", ds.Structure.BodyFilename()), data)) 180 181 dskey, err := WriteDataset(ctx, fs, fs, nil, ds, event.NilBus, pk, SaveSwitches{Pin: true}) 182 if err != nil { 183 return datasets, nil, fmt.Errorf("dataset: %s write error: %s", k, err.Error()) 184 } 185 datasets[k] = dskey 186 } 187 188 return datasets, fs, nil 189 }