github.com/opentelekomcloud/gophertelekomcloud@v0.9.3/pagination/pager.go (about) 1 package pagination 2 3 import ( 4 "encoding/json" 5 "fmt" 6 "io" 7 "net/http" 8 "reflect" 9 "strings" 10 11 "github.com/opentelekomcloud/gophertelekomcloud" 12 ) 13 14 // Page must be satisfied by the result type of any resource collection. 15 // It allows clients to interact with the resource uniformly, regardless of whether or not or how it's paginated. 16 // Generally, rather than implementing this interface directly, implementors should embed one of the concrete PageBase structs, 17 // instead. 18 // Depending on the pagination strategy of a particular resource, there may be an additional subinterface that the result type 19 // will need to implement. 20 type Page interface { 21 // NextPageURL generates the URL for the page of data that follows this collection. 22 // Return "" if no such page exists. 23 NextPageURL() (string, error) 24 25 // IsEmpty returns true if this Page has no items in it. 26 IsEmpty() (bool, error) 27 28 // GetBody returns the Page Body. This is used in the `AllPages` method. 29 GetBody() []byte 30 // GetBodyAsSlice tries to convert page body to a slice. 31 GetBodyAsSlice() ([]any, error) 32 // GetBodyAsMap tries to convert page body to a map. 33 GetBodyAsMap() (map[string]any, error) 34 } 35 36 // Pager knows how to advance through a specific resource collection, one page at a time. 37 type Pager struct { 38 client *golangsdk.ServiceClient 39 Client *golangsdk.ServiceClient 40 41 initialURL string 42 InitialURL string 43 44 createPage func(r PageResult) Page 45 CreatePage func(r NewPageResult) NewPage 46 47 Err error 48 49 // Headers supplies additional HTTP headers to populate on each paged request. 50 Headers map[string]string 51 } 52 53 // NewPager constructs a manually-configured pager. 54 // Supply the URL for the first page, a function that requests a specific page given a URL, and a function that counts a page. 55 func NewPager(client *golangsdk.ServiceClient, initialURL string, createPage func(r PageResult) Page) Pager { 56 return Pager{ 57 client: client, 58 initialURL: initialURL, 59 createPage: createPage, 60 } 61 } 62 63 func (p Pager) fetchNextPage(url string) (Page, error) { 64 resp, err := Request(p.client, p.Headers, url) 65 if err != nil { 66 return nil, err 67 } 68 69 remembered, err := PageResultFrom(resp) 70 if err != nil { 71 return nil, err 72 } 73 74 return p.createPage(remembered), nil 75 } 76 77 // EachPage iterates over each page returned by a Pager, yielding one at a time to a handler function. 78 // Return "false" from the handler to prematurely stop iterating. 79 func (p Pager) EachPage(handler func(Page) (bool, error)) error { 80 if p.Err != nil { 81 return p.Err 82 } 83 currentURL := p.initialURL 84 for { 85 currentPage, err := p.fetchNextPage(currentURL) 86 if err != nil { 87 return err 88 } 89 90 empty, err := currentPage.IsEmpty() 91 if err != nil { 92 return err 93 } 94 if empty { 95 return nil 96 } 97 98 ok, err := handler(currentPage) 99 if err != nil { 100 return err 101 } 102 if !ok { 103 return nil 104 } 105 106 currentURL, err = currentPage.NextPageURL() 107 if err != nil { 108 return err 109 } 110 if currentURL == "" { 111 return nil 112 } 113 } 114 } 115 116 // AllPages returns all the pages from a `List` operation in a single page, 117 // allowing the user to retrieve all the pages at once. 118 func (p Pager) AllPages() (Page, error) { 119 // body will contain the final concatenated Page body. 120 var body []byte 121 122 // Grab a test page to ascertain the page body type. 123 testPage, err := p.fetchNextPage(p.initialURL) 124 if err != nil { 125 return nil, err 126 } 127 // Store the page type, so we can use reflection to create a new mega-page of 128 // that type. 129 pageType := reflect.TypeOf(testPage) 130 131 // if it's a single page, just return the testPage (first page) 132 if _, found := pageType.FieldByName("SinglePageBase"); found { 133 return testPage, nil 134 } 135 136 if _, err := testPage.GetBodyAsSlice(); err == nil { 137 var pagesSlice []any 138 139 // Iterate over the pages to concatenate the bodies. 140 err = p.EachPage(func(page Page) (bool, error) { 141 b, err := page.GetBodyAsSlice() 142 if err != nil { 143 return false, fmt.Errorf("error paginating page with slice body: %w", err) 144 } 145 pagesSlice = append(pagesSlice, b...) 146 return true, nil 147 }) 148 if err != nil { 149 return nil, err 150 } 151 152 body, err = json.Marshal(pagesSlice) 153 if err != nil { 154 return nil, err 155 } 156 } else if _, err := testPage.GetBodyAsMap(); err == nil { 157 var pagesSlice []any 158 159 // key is the map key for the page body if the body type is `map[string]any`. 160 var key string 161 // Iterate over the pages to concatenate the bodies. 162 err = p.EachPage(func(page Page) (bool, error) { 163 b, err := page.GetBodyAsMap() 164 if err != nil { 165 return false, fmt.Errorf("error paginating page with map body: %w", err) 166 } 167 for k, v := range b { 168 // If it's a linked page, we don't want the `links`, we want the other one. 169 if !strings.HasSuffix(k, "links") { 170 // check the field's type. we only want []any (which is really []map[string]interface{}) 171 switch vt := v.(type) { 172 case []any: 173 key = k 174 pagesSlice = append(pagesSlice, vt...) 175 } 176 } 177 } 178 return true, nil 179 }) 180 if err != nil { 181 return nil, err 182 } 183 184 mapBody := map[string]any{ 185 key: pagesSlice, 186 } 187 188 body, err = json.Marshal(mapBody) 189 if err != nil { 190 return nil, err 191 } 192 } else { 193 var pagesSlice [][]byte 194 195 // Iterate over the pages to concatenate the bodies. 196 err = p.EachPage(func(page Page) (bool, error) { 197 b := page.GetBody() 198 pagesSlice = append(pagesSlice, b) 199 // separate pages with a comma 200 pagesSlice = append(pagesSlice, []byte{10}) 201 return true, nil 202 }) 203 if err != nil { 204 return nil, err 205 } 206 if len(pagesSlice) > 0 { 207 // Remove the trailing comma. 208 pagesSlice = pagesSlice[:len(pagesSlice)-1] 209 } 210 var b []byte 211 // Combine the slice of slices in to a single slice. 212 for _, slice := range pagesSlice { 213 b = append(b, slice...) 214 } 215 216 body = b 217 } 218 219 // Each `Extract*` function is expecting a specific type of page coming back, 220 // otherwise the type assertion in those functions will fail. pageType is needed 221 // to create a type in this method that has the same type that the `Extract*` 222 // function is expecting and set the Body of that object to the concatenated 223 // pages. 224 page := reflect.New(pageType) 225 // Set the page body to be the concatenated pages. 226 page.Elem().FieldByName("Body").Set(reflect.ValueOf(body)) 227 // Set any additional headers that were pass along. The `objectstorage` pacakge, 228 // for example, passes a Content-Type header. 229 h := make(http.Header) 230 for k, v := range p.Headers { 231 h.Add(k, v) 232 } 233 page.Elem().FieldByName("Header").Set(reflect.ValueOf(h)) 234 // Type assert the page to a Page interface so that the type assertion in the 235 // `Extract*` methods will work. 236 return page.Elem().Interface().(Page), err 237 } 238 239 // NewPage must be satisfied by the result type of any resource collection. 240 // It allows clients to interact with the resource uniformly, regardless of whether or not or how it's paginated. 241 // Generally, rather than implementing this interface directly, implementors should embed one of the concrete PageBase structs, 242 // instead. 243 // Depending on the pagination strategy of a particular resource, there may be an additional subinterface that the result type 244 // will need to implement. 245 type NewPage interface { 246 // NewNextPageURL generates the URL for the page of data that follows this collection. 247 // Return "" if no such page exists. 248 NewNextPageURL() (string, error) 249 250 // NewIsEmpty returns true if this Page has no items in it. 251 NewIsEmpty() (bool, error) 252 253 // NewGetBody returns the Page Body. This is used in the `AllPages` method. 254 NewGetBody() []byte 255 // NewGetBodyAsSlice tries to convert page body to a slice. 256 NewGetBodyAsSlice() ([]any, error) 257 // NewGetBodyAsMap tries to convert page body to a map. 258 NewGetBodyAsMap() (map[string]any, error) 259 } 260 261 func (p Pager) newFetchNextPage(url string) (NewPage, error) { 262 resp, err := Request(p.Client, p.Headers, url) 263 if err != nil { 264 return nil, err 265 } 266 267 defer resp.Body.Close() 268 rawBody, err := io.ReadAll(resp.Body) 269 if err != nil { 270 return nil, err 271 } 272 273 return p.CreatePage(NewPageResult{ 274 Body: rawBody, 275 Header: resp.Header, 276 URL: *resp.Request.URL, 277 }), nil 278 } 279 280 // NewEachPage iterates over each page returned by a Pager, yielding one at a time to a handler function. 281 // Return "false" from the handler to prematurely stop iterating. 282 func (p Pager) NewEachPage(handler func(NewPage) (bool, error)) error { 283 if p.Err != nil { 284 return p.Err 285 } 286 currentURL := p.InitialURL 287 for { 288 currentPage, err := p.newFetchNextPage(currentURL) 289 if err != nil { 290 return err 291 } 292 293 empty, err := currentPage.NewIsEmpty() 294 if err != nil { 295 return err 296 } 297 if empty { 298 return nil 299 } 300 301 ok, err := handler(currentPage) 302 if err != nil { 303 return err 304 } 305 if !ok { 306 return nil 307 } 308 309 currentURL, err = currentPage.NewNextPageURL() 310 if err != nil { 311 return err 312 } 313 if currentURL == "" { 314 return nil 315 } 316 } 317 } 318 319 // NewAllPages returns all the pages from a `List` operation in a single page, 320 // allowing the user to retrieve all the pages at once. 321 func (p Pager) NewAllPages() (NewPage, error) { 322 // body will contain the final concatenated Page body. 323 var body []byte 324 325 // Grab a test page to ascertain the page body type. 326 testPage, err := p.newFetchNextPage(p.InitialURL) 327 if err != nil { 328 return nil, err 329 } 330 // Store the page type, so we can use reflection to create a new mega-page of 331 // that type. 332 pageType := reflect.TypeOf(testPage) 333 334 // if it's a single page, just return the testPage (first page) 335 if _, found := pageType.FieldByName("NewSinglePageBase"); found { 336 return testPage, nil 337 } 338 339 if _, err := testPage.NewGetBodyAsSlice(); err == nil { 340 var pagesSlice []any 341 342 // Iterate over the pages to concatenate the bodies. 343 err = p.NewEachPage(func(page NewPage) (bool, error) { 344 b, err := page.NewGetBodyAsSlice() 345 if err != nil { 346 return false, fmt.Errorf("error paginating page with slice body: %w", err) 347 } 348 pagesSlice = append(pagesSlice, b...) 349 return true, nil 350 }) 351 if err != nil { 352 return nil, err 353 } 354 355 body, err = json.Marshal(pagesSlice) 356 if err != nil { 357 return nil, err 358 } 359 } else if _, err := testPage.NewGetBodyAsMap(); err == nil { 360 var pagesSlice []any 361 362 // key is the map key for the page body if the body type is `map[string]any`. 363 var key string 364 // Iterate over the pages to concatenate the bodies. 365 err = p.NewEachPage(func(page NewPage) (bool, error) { 366 b, err := page.NewGetBodyAsMap() 367 if err != nil { 368 return false, fmt.Errorf("error paginating page with map body: %w", err) 369 } 370 for k, v := range b { 371 // If it's a linked page, we don't want the `links`, we want the other one. 372 if !strings.HasSuffix(k, "links") { 373 // check the field's type. we only want []any (which is really []map[string]interface{}) 374 switch vt := v.(type) { 375 case []any: 376 key = k 377 pagesSlice = append(pagesSlice, vt...) 378 } 379 } 380 } 381 return true, nil 382 }) 383 if err != nil { 384 return nil, err 385 } 386 387 mapBody := map[string]any{ 388 key: pagesSlice, 389 } 390 391 body, err = json.Marshal(mapBody) 392 if err != nil { 393 return nil, err 394 } 395 } else { 396 var pagesSlice [][]byte 397 398 // Iterate over the pages to concatenate the bodies. 399 err = p.NewEachPage(func(page NewPage) (bool, error) { 400 b := page.NewGetBody() 401 pagesSlice = append(pagesSlice, b) 402 // separate pages with a comma 403 pagesSlice = append(pagesSlice, []byte{10}) 404 return true, nil 405 }) 406 if err != nil { 407 return nil, err 408 } 409 if len(pagesSlice) > 0 { 410 // Remove the trailing comma. 411 pagesSlice = pagesSlice[:len(pagesSlice)-1] 412 } 413 var b []byte 414 // Combine the slice of slices in to a single slice. 415 for _, slice := range pagesSlice { 416 b = append(b, slice...) 417 } 418 419 body = b 420 } 421 422 // Each `Extract*` function is expecting a specific type of page coming back, 423 // otherwise the type assertion in those functions will fail. pageType is needed 424 // to create a type in this method that has the same type that the `Extract*` 425 // function is expecting and set the Body of that object to the concatenated 426 // pages. 427 page := reflect.New(pageType) 428 // Set the page body to be the concatenated pages. 429 page.Elem().FieldByName("Body").Set(reflect.ValueOf(body)) 430 // Set any additional headers that were pass along. The `objectstorage` pacakge, 431 // for example, passes a Content-Type header. 432 h := make(http.Header) 433 for k, v := range p.Headers { 434 h.Add(k, v) 435 } 436 page.Elem().FieldByName("Header").Set(reflect.ValueOf(h)) 437 // Type assert the page to a Page interface so that the type assertion in the 438 // `Extract*` methods will work. 439 return page.Elem().Interface().(NewPage), err 440 }