github.com/weaviate/weaviate@v1.24.6/modules/generative-openai/clients/openai_tokens_test.go (about)

     1  //                           _       _
     2  // __      _____  __ ___   ___  __ _| |_ ___
     3  // \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \
     4  //  \ V  V /  __/ (_| |\ V /| | (_| | ||  __/
     5  //   \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___|
     6  //
     7  //  Copyright © 2016 - 2024 Weaviate B.V. All rights reserved.
     8  //
     9  //  CONTACT: hello@weaviate.io
    10  //
    11  
    12  package clients
    13  
    14  import (
    15  	"testing"
    16  
    17  	"github.com/stretchr/testify/assert"
    18  )
    19  
    20  func Test_getTokensCount(t *testing.T) {
    21  	prompt := `
    22  	Summarize the following in a tweet:
    23  
    24  	As generative language models such as GPT-4 continue to push the boundaries of what AI can do,
    25  	the excitement surrounding its potential is spreading quickly. Many applications and projects are
    26  	built on top of GPT-4 to extend its capabilities and features. Additionally, many tools were created
    27  	in order to interact with large language models, like LangChain as an example. Auto-GPT is one of the fastest
    28  	rising open-source python projects harnessing the power of GPT-4!
    29  	`
    30  	messages := []message{
    31  		{Role: "user", Content: prompt},
    32  	}
    33  	// Example messages from: https://github.com/openai/openai-cookbook/blob/main/examples/How_to_count_tokens_with_tiktoken.ipynb
    34  	// added for sanity check that getTokensCount method computes tokens accordingly to above examples provided by OpenAI
    35  	exampleMessages := []message{
    36  		{
    37  			Role:    "system",
    38  			Content: "You are a helpful, pattern-following assistant that translates corporate jargon into plain English.",
    39  		},
    40  		{
    41  			Role:    "system",
    42  			Name:    "example_user",
    43  			Content: "New synergies will help drive top-line growth.",
    44  		},
    45  		{
    46  			Role:    "system",
    47  			Name:    "example_assistant",
    48  			Content: "Things working well together will increase revenue.",
    49  		},
    50  		{
    51  			Role:    "system",
    52  			Name:    "example_user",
    53  			Content: "Let's circle back when we have more bandwidth to touch base on opportunities for increased leverage.",
    54  		},
    55  		{
    56  			Role:    "system",
    57  			Name:    "example_assistant",
    58  			Content: "Let's talk later when we're less busy about how to do better.",
    59  		},
    60  		{
    61  			Role:    "user",
    62  			Content: "This late pivot means we don't have time to boil the ocean for the client deliverable.",
    63  		},
    64  	}
    65  	tests := []struct {
    66  		name     string
    67  		model    string
    68  		messages []message
    69  		want     int
    70  		wantErr  string
    71  	}{
    72  		{
    73  			name:     "text-davinci-002",
    74  			model:    "text-davinci-002",
    75  			messages: messages,
    76  			want:     128,
    77  		},
    78  		{
    79  			name:     "text-davinci-003",
    80  			model:    "text-davinci-003",
    81  			messages: messages,
    82  			want:     128,
    83  		},
    84  		{
    85  			name:     "gpt-3.5-turbo",
    86  			model:    "gpt-3.5-turbo",
    87  			messages: messages,
    88  			want:     122,
    89  		},
    90  		{
    91  			name:     "gpt-4",
    92  			model:    "gpt-4",
    93  			messages: messages,
    94  			want:     121,
    95  		},
    96  		{
    97  			name:     "gpt-4-32k",
    98  			model:    "gpt-4-32k",
    99  			messages: messages,
   100  			want:     121,
   101  		},
   102  		{
   103  			name:     "non-existent-model",
   104  			model:    "non-existent-model",
   105  			messages: messages,
   106  			wantErr:  "encoding for model non-existent-model: no encoding for model non-existent-model",
   107  		},
   108  		{
   109  			name:     "OpenAI cookbook example - gpt-3.5-turbo-0301",
   110  			model:    "gpt-3.5-turbo-0301",
   111  			messages: exampleMessages,
   112  			want:     127,
   113  		},
   114  		{
   115  			name:     "OpenAI cookbook example - gpt-4",
   116  			model:    "gpt-4",
   117  			messages: exampleMessages,
   118  			want:     129,
   119  		},
   120  	}
   121  	for _, tt := range tests {
   122  		t.Run(tt.name, func(t *testing.T) {
   123  			got, err := getTokensCount(tt.model, tt.messages)
   124  			if err != nil {
   125  				assert.EqualError(t, err, tt.wantErr)
   126  			} else {
   127  				assert.Nil(t, err)
   128  				assert.Equal(t, tt.want, got)
   129  			}
   130  		})
   131  	}
   132  }