github.com/ravendb/ravendb-go-client@v0.0.0-20240229102137-4474ee7aa0fa/get_documents_command.go (about)

     1  package ravendb
     2  
     3  import (
     4  	"net/http"
     5  	"strconv"
     6  )
     7  
     8  var (
     9  	_ RavenCommand = &GetDocumentsCommand{}
    10  )
    11  
    12  type GetDocumentsCommand struct {
    13  	RavenCommandBase
    14  
    15  	_id string
    16  
    17  	_ids      []string
    18  	_includes []string
    19  
    20  	_metadataOnly bool
    21  
    22  	_startWith  string
    23  	_matches    string
    24  	_start      int
    25  	_pageSize   int
    26  	_exclude    string
    27  	_startAfter string
    28  
    29  	Result *GetDocumentsResult
    30  }
    31  
    32  func NewGetDocumentsCommand(ids []string, includes []string, metadataOnly bool) (*GetDocumentsCommand, error) {
    33  	if len(ids) == 0 {
    34  		return nil, newIllegalArgumentError("Please supply at least one id")
    35  	}
    36  
    37  	cmd := &GetDocumentsCommand{
    38  		RavenCommandBase: NewRavenCommandBase(),
    39  
    40  		_includes:     includes,
    41  		_metadataOnly: metadataOnly,
    42  		_start:        -1,
    43  		_pageSize:     -1,
    44  	}
    45  
    46  	if len(ids) == 1 {
    47  		cmd._id = ids[0]
    48  	} else {
    49  		cmd._ids = ids
    50  	}
    51  	cmd.IsReadRequest = true
    52  	return cmd, nil
    53  }
    54  
    55  func NewGetDocumentsCommandFull(startWith string, startAfter string, matches string, exclude string, start int, pageSize int, metadataOnly bool) (*GetDocumentsCommand, error) {
    56  	if startWith == "" {
    57  		return nil, newIllegalArgumentError("startWith cannot be null")
    58  	}
    59  	return &GetDocumentsCommand{
    60  		RavenCommandBase: NewRavenCommandBase(),
    61  
    62  		_startWith:    startWith,
    63  		_startAfter:   startAfter,
    64  		_matches:      matches,
    65  		_exclude:      exclude,
    66  		_start:        start,
    67  		_pageSize:     pageSize,
    68  		_metadataOnly: metadataOnly,
    69  	}, nil
    70  }
    71  
    72  func (c *GetDocumentsCommand) CreateRequest(node *ServerNode) (*http.Request, error) {
    73  	url := node.URL + "/databases/" + node.Database + "/docs?"
    74  	if c._start > 0 {
    75  		url += "&start=" + strconv.Itoa(c._start)
    76  	}
    77  
    78  	if c._pageSize > 0 {
    79  		url += "&pageSize=" + strconv.Itoa(c._pageSize)
    80  	}
    81  
    82  	if c._metadataOnly {
    83  		url += "&metadataOnly=true"
    84  	}
    85  
    86  	if c._startWith != "" {
    87  		url += "&startsWith="
    88  		url += urlUtilsEscapeDataString(c._startWith)
    89  
    90  		if c._matches != "" {
    91  			url += "&matches="
    92  			url += c._matches
    93  		}
    94  
    95  		if c._exclude != "" {
    96  			url += "&exclude="
    97  			url += c._exclude
    98  		}
    99  
   100  		if c._startAfter != "" {
   101  			url += "&startAfter="
   102  			url += c._startAfter
   103  		}
   104  	}
   105  
   106  	for _, include := range c._includes {
   107  		url += "&include="
   108  		url += include
   109  	}
   110  
   111  	if c._id != "" {
   112  		url += "&id="
   113  		url += urlUtilsEscapeDataString(c._id)
   114  	} else if len(c._ids) > 0 {
   115  		return c.prepareRequestWithMultipleIds(url)
   116  	}
   117  
   118  	return newHttpGet(url)
   119  }
   120  
   121  func (c *GetDocumentsCommand) prepareRequestWithMultipleIds(url string) (*http.Request, error) {
   122  	uniqueIds := stringArrayCopy(c._ids)
   123  	uniqueIds = stringArrayRemoveDuplicatesNoCase(uniqueIds)
   124  	totalLen := 0
   125  	for _, s := range uniqueIds {
   126  		totalLen += len(s)
   127  	}
   128  
   129  	// if it is too big, we drop to POST (note that means that we can't use the HTTP cache any longer)
   130  	// we are fine with that, requests to load > 1024 items are going to be rare
   131  	isGet := totalLen < 1024
   132  
   133  	if isGet {
   134  		for _, s := range uniqueIds {
   135  			url += "&id=" + urlUtilsEscapeDataString(s)
   136  		}
   137  		return newHttpGet(url)
   138  	}
   139  
   140  	calculateHash := c.calculateHash(uniqueIds)
   141  	url += "&loadHash="
   142  	url += calculateHash
   143  
   144  	m := map[string]interface{}{
   145  		"Ids": uniqueIds,
   146  	}
   147  	d, err := jsonMarshal(m)
   148  	panicIf(err != nil, "jsonMarshal() failed with %s", err)
   149  	return NewHttpPost(url, d)
   150  }
   151  
   152  func (c *GetDocumentsCommand) calculateHash(uniqueIds []string) string {
   153  	hasher := &HashCalculator{}
   154  	for _, x := range uniqueIds {
   155  		hasher.write(x)
   156  	}
   157  	return hasher.getHash()
   158  }
   159  
   160  func (c *GetDocumentsCommand) SetResponse(response []byte, fromCache bool) error {
   161  	if len(response) == 0 {
   162  		return nil
   163  	}
   164  
   165  	return jsonUnmarshal(response, &c.Result)
   166  }