github.com/weaviate/weaviate@v1.24.6/adapters/repos/db/shard_dimension_tracking_test.go (about)

     1  //                           _       _
     2  // __      _____  __ ___   ___  __ _| |_ ___
     3  // \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \
     4  //  \ V  V /  __/ (_| |\ V /| | (_| | ||  __/
     5  //   \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___|
     6  //
     7  //  Copyright © 2016 - 2024 Weaviate B.V. All rights reserved.
     8  //
     9  //  CONTACT: hello@weaviate.io
    10  //
    11  
    12  //go:build integrationTest
    13  // +build integrationTest
    14  
    15  package db
    16  
    17  import (
    18  	"context"
    19  	"fmt"
    20  	"log"
    21  	"math/rand"
    22  	"testing"
    23  
    24  	"github.com/go-openapi/strfmt"
    25  	"github.com/google/uuid"
    26  	"github.com/prometheus/client_golang/prometheus/testutil"
    27  	"github.com/sirupsen/logrus"
    28  	"github.com/stretchr/testify/assert"
    29  	"github.com/stretchr/testify/require"
    30  	"github.com/weaviate/weaviate/entities/models"
    31  	"github.com/weaviate/weaviate/entities/schema"
    32  	enthnsw "github.com/weaviate/weaviate/entities/vectorindex/hnsw"
    33  	"github.com/weaviate/weaviate/usecases/monitoring"
    34  )
    35  
    36  func Benchmark_Migration(b *testing.B) {
    37  	fmt.Printf("Running benchmark %v times\n", b.N)
    38  	for i := 0; i < b.N; i++ {
    39  		func() {
    40  			r := getRandomSeed()
    41  			dirName := b.TempDir()
    42  
    43  			shardState := singleShardState()
    44  			logger := logrus.New()
    45  			schemaGetter := &fakeSchemaGetter{
    46  				schema:     schema.Schema{Objects: &models.Schema{Classes: nil}},
    47  				shardState: shardState,
    48  			}
    49  			repo, err := New(logger, Config{
    50  				RootPath:                  dirName,
    51  				QueryMaximumResults:       1000,
    52  				MaxImportGoroutinesFactor: 1,
    53  				TrackVectorDimensions:     true,
    54  			}, &fakeRemoteClient{}, &fakeNodeResolver{}, &fakeRemoteNodeClient{}, &fakeReplicationClient{}, nil)
    55  			require.Nil(b, err)
    56  			repo.SetSchemaGetter(schemaGetter)
    57  			require.Nil(b, repo.WaitForStartup(testCtx()))
    58  			defer repo.Shutdown(context.Background())
    59  
    60  			migrator := NewMigrator(repo, logger)
    61  
    62  			class := &models.Class{
    63  				Class:               "Test",
    64  				VectorIndexConfig:   enthnsw.NewDefaultUserConfig(),
    65  				InvertedIndexConfig: invertedConfig(),
    66  			}
    67  			schema := schema.Schema{
    68  				Objects: &models.Schema{
    69  					Classes: []*models.Class{class},
    70  				},
    71  			}
    72  
    73  			migrator.AddClass(context.Background(), class, schemaGetter.shardState)
    74  
    75  			schemaGetter.schema = schema
    76  
    77  			repo.config.TrackVectorDimensions = false
    78  
    79  			dim := 128
    80  			for i := 0; i < 100; i++ {
    81  				vec := make([]float32, dim)
    82  				for j := range vec {
    83  					vec[j] = r.Float32()
    84  				}
    85  
    86  				id := strfmt.UUID(uuid.MustParse(fmt.Sprintf("%032d", i)).String())
    87  				obj := &models.Object{Class: "Test", ID: id}
    88  				err := repo.PutObject(context.Background(), obj, vec, nil, nil)
    89  				if err != nil {
    90  					b.Fatal(err)
    91  				}
    92  			}
    93  
    94  			fmt.Printf("Added vectors, now migrating\n")
    95  
    96  			repo.config.TrackVectorDimensions = true
    97  			migrator.RecalculateVectorDimensions(context.TODO())
    98  			fmt.Printf("Benchmark complete")
    99  		}()
   100  	}
   101  }
   102  
   103  // Rebuild dimensions at startup
   104  func Test_Migration(t *testing.T) {
   105  	r := getRandomSeed()
   106  	dirName := t.TempDir()
   107  
   108  	shardState := singleShardState()
   109  	logger := logrus.New()
   110  	schemaGetter := &fakeSchemaGetter{
   111  		schema:     schema.Schema{Objects: &models.Schema{Classes: nil}},
   112  		shardState: shardState,
   113  	}
   114  	repo, err := New(logger, Config{
   115  		RootPath:                  dirName,
   116  		QueryMaximumResults:       1000,
   117  		MaxImportGoroutinesFactor: 1,
   118  		TrackVectorDimensions:     true,
   119  	}, &fakeRemoteClient{}, &fakeNodeResolver{}, &fakeRemoteNodeClient{}, &fakeReplicationClient{}, nil)
   120  	require.Nil(t, err)
   121  	repo.SetSchemaGetter(schemaGetter)
   122  	require.Nil(t, repo.WaitForStartup(testCtx()))
   123  	defer repo.Shutdown(context.Background())
   124  
   125  	migrator := NewMigrator(repo, logger)
   126  
   127  	t.Run("set schema", func(t *testing.T) {
   128  		class := &models.Class{
   129  			Class:               "Test",
   130  			VectorIndexConfig:   enthnsw.NewDefaultUserConfig(),
   131  			InvertedIndexConfig: invertedConfig(),
   132  		}
   133  		schema := schema.Schema{
   134  			Objects: &models.Schema{
   135  				Classes: []*models.Class{class},
   136  			},
   137  		}
   138  
   139  		require.Nil(t,
   140  			migrator.AddClass(context.Background(), class, schemaGetter.shardState))
   141  
   142  		schemaGetter.schema = schema
   143  	})
   144  
   145  	repo.config.TrackVectorDimensions = false
   146  
   147  	t.Run("import objects with d=128", func(t *testing.T) {
   148  		dim := 128
   149  		for i := 0; i < 100; i++ {
   150  			vec := make([]float32, dim)
   151  			for j := range vec {
   152  				vec[j] = r.Float32()
   153  			}
   154  
   155  			id := strfmt.UUID(uuid.MustParse(fmt.Sprintf("%032d", i)).String())
   156  			obj := &models.Object{Class: "Test", ID: id}
   157  			err := repo.PutObject(context.Background(), obj, vec, nil, nil)
   158  			require.Nil(t, err)
   159  		}
   160  		dimAfter := GetDimensionsFromRepo(repo, "Test")
   161  		require.Equal(t, 0, dimAfter, "dimensions should not have been calculated")
   162  	})
   163  
   164  	dimBefore := GetDimensionsFromRepo(repo, "Test")
   165  	require.Equal(t, 0, dimBefore, "dimensions should not have been calculated")
   166  	repo.config.TrackVectorDimensions = true
   167  	migrator.RecalculateVectorDimensions(context.TODO())
   168  	dimAfter := GetDimensionsFromRepo(repo, "Test")
   169  	require.Equal(t, 12800, dimAfter, "dimensions should be counted now")
   170  }
   171  
   172  func Test_DimensionTracking(t *testing.T) {
   173  	r := getRandomSeed()
   174  	dirName := t.TempDir()
   175  
   176  	shardState := singleShardState()
   177  	logger := logrus.New()
   178  	schemaGetter := &fakeSchemaGetter{
   179  		schema:     schema.Schema{Objects: &models.Schema{Classes: nil}},
   180  		shardState: shardState,
   181  	}
   182  	repo, err := New(logger, Config{
   183  		RootPath:                  dirName,
   184  		QueryMaximumResults:       10000,
   185  		MaxImportGoroutinesFactor: 1,
   186  		TrackVectorDimensions:     true,
   187  	}, &fakeRemoteClient{}, &fakeNodeResolver{}, &fakeRemoteNodeClient{}, &fakeReplicationClient{}, nil)
   188  	require.Nil(t, err)
   189  	repo.SetSchemaGetter(schemaGetter)
   190  	require.Nil(t, repo.WaitForStartup(testCtx()))
   191  	defer repo.Shutdown(context.Background())
   192  
   193  	migrator := NewMigrator(repo, logger)
   194  
   195  	t.Run("set schema", func(t *testing.T) {
   196  		class := &models.Class{
   197  			Class:               "Test",
   198  			VectorIndexConfig:   enthnsw.NewDefaultUserConfig(),
   199  			InvertedIndexConfig: invertedConfig(),
   200  		}
   201  		schema := schema.Schema{
   202  			Objects: &models.Schema{
   203  				Classes: []*models.Class{class},
   204  			},
   205  		}
   206  
   207  		require.Nil(t,
   208  			migrator.AddClass(context.Background(), class, schemaGetter.shardState))
   209  
   210  		schemaGetter.schema = schema
   211  	})
   212  
   213  	t.Run("import objects with d=128", func(t *testing.T) {
   214  		dim := 128
   215  		for i := 0; i < 100; i++ {
   216  			vec := make([]float32, dim)
   217  			for j := range vec {
   218  				vec[j] = r.Float32()
   219  			}
   220  
   221  			id := strfmt.UUID(uuid.MustParse(fmt.Sprintf("%032d", i)).String())
   222  			obj := &models.Object{Class: "Test", ID: id}
   223  			err := repo.PutObject(context.Background(), obj, vec, nil, nil)
   224  			require.Nil(t, err)
   225  		}
   226  		dimAfter := GetDimensionsFromRepo(repo, "Test")
   227  		require.Equal(t, 12800, dimAfter, "dimensions should not have changed")
   228  		quantDimAfter := GetQuantizedDimensionsFromRepo(repo, "Test", 64)
   229  		require.Equal(t, 6400, quantDimAfter, "quantized dimensions should not have changed")
   230  	})
   231  
   232  	t.Run("import objects with d=0", func(t *testing.T) {
   233  		dimBefore := GetDimensionsFromRepo(repo, "Test")
   234  		quantDimBefore := GetQuantizedDimensionsFromRepo(repo, "Test", 64)
   235  		for i := 100; i < 200; i++ {
   236  			id := strfmt.UUID(uuid.MustParse(fmt.Sprintf("%032d", i)).String())
   237  			obj := &models.Object{Class: "Test", ID: id}
   238  			err := repo.PutObject(context.Background(), obj, nil, nil, nil)
   239  			require.Nil(t, err)
   240  		}
   241  		dimAfter := GetDimensionsFromRepo(repo, "Test")
   242  		require.Equal(t, dimBefore, dimAfter, "dimensions should not have changed")
   243  		quantDimAfter := GetQuantizedDimensionsFromRepo(repo, "Test", 64)
   244  		require.Equal(t, quantDimBefore, quantDimAfter, "quantized dimensions should not have changed")
   245  	})
   246  
   247  	t.Run("verify dimensions after initial import", func(t *testing.T) {
   248  		idx := repo.GetIndex("Test")
   249  		idx.ForEachShard(func(name string, shard ShardLike) error {
   250  			assert.Equal(t, 12800, shard.Dimensions())
   251  			assert.Equal(t, 6400, shard.QuantizedDimensions(64))
   252  			return nil
   253  		})
   254  	})
   255  
   256  	t.Run("delete 10 objects with d=128", func(t *testing.T) {
   257  		dimBefore := GetDimensionsFromRepo(repo, "Test")
   258  		quantDimBefore := GetQuantizedDimensionsFromRepo(repo, "Test", 64)
   259  		for i := 0; i < 10; i++ {
   260  			id := strfmt.UUID(uuid.MustParse(fmt.Sprintf("%032d", i)).String())
   261  			err := repo.DeleteObject(context.Background(), "Test", id, nil, "")
   262  			require.Nil(t, err)
   263  		}
   264  		dimAfter := GetDimensionsFromRepo(repo, "Test")
   265  		require.Equal(t, dimBefore, dimAfter+10*128, "dimensions should have decreased")
   266  		quantDimAfter := GetQuantizedDimensionsFromRepo(repo, "Test", 64)
   267  		require.Equal(t, quantDimBefore, quantDimAfter+10*64, "dimensions should have decreased")
   268  	})
   269  
   270  	t.Run("verify dimensions after delete", func(t *testing.T) {
   271  		idx := repo.GetIndex("Test")
   272  		idx.ForEachShard(func(name string, shard ShardLike) error {
   273  			assert.Equal(t, 11520, shard.Dimensions())
   274  			assert.Equal(t, 5760, shard.QuantizedDimensions(64))
   275  			return nil
   276  		})
   277  	})
   278  
   279  	t.Run("update some of the d=128 objects with a new vector", func(t *testing.T) {
   280  		dimBefore := GetDimensionsFromRepo(repo, "Test")
   281  		quantDimBefore := GetQuantizedDimensionsFromRepo(repo, "Test", 64)
   282  		dim := 128
   283  		for i := 0; i < 50; i++ {
   284  			vec := make([]float32, dim)
   285  			for j := range vec {
   286  				vec[j] = rand.Float32()
   287  			}
   288  
   289  			id := strfmt.UUID(uuid.MustParse(fmt.Sprintf("%032d", i)).String())
   290  			obj := &models.Object{Class: "Test", ID: id}
   291  			// Put is idempotent, but since the IDs exist now, this is an update
   292  			// under the hood and a "reinstert" for the already deleted ones
   293  			err := repo.PutObject(context.Background(), obj, vec, nil, nil)
   294  			require.Nil(t, err)
   295  		}
   296  		dimAfter := GetDimensionsFromRepo(repo, "Test")
   297  		quantDimAfter := GetQuantizedDimensionsFromRepo(repo, "Test", 64)
   298  		require.Equal(t, dimBefore+10*128, dimAfter, "dimensions should have been restored")
   299  		require.Equal(t, quantDimBefore+10*64, quantDimAfter, "dimensions should have been restored")
   300  	})
   301  
   302  	t.Run("update some of the d=128 objects with a nil vector", func(t *testing.T) {
   303  		dimBefore := GetDimensionsFromRepo(repo, "Test")
   304  		quantDimBefore := GetQuantizedDimensionsFromRepo(repo, "Test", 32)
   305  		for i := 50; i < 100; i++ {
   306  			id := strfmt.UUID(uuid.MustParse(fmt.Sprintf("%032d", i)).String())
   307  			obj := &models.Object{Class: "Test", ID: id}
   308  			// Put is idempotent, but since the IDs exist now, this is an update
   309  			// under the hood and a "reinsert" for the already deleted ones
   310  			err := repo.PutObject(context.Background(), obj, nil, nil, nil)
   311  			require.Nil(t, err)
   312  		}
   313  		dimAfter := GetDimensionsFromRepo(repo, "Test")
   314  		quantDimAfter := GetQuantizedDimensionsFromRepo(repo, "Test", 32)
   315  		require.Equal(t, dimBefore, dimAfter+50*128, "dimensions should decrease")
   316  		require.Equal(t, quantDimBefore, quantDimAfter+50*32, "dimensions should decrease")
   317  	})
   318  
   319  	t.Run("verify dimensions after first set of updates", func(t *testing.T) {
   320  		idx := repo.GetIndex("Test")
   321  		idx.ForEachShard(func(name string, shard ShardLike) error {
   322  			assert.Equal(t, 6400, shard.Dimensions())
   323  			assert.Equal(t, 3200, shard.QuantizedDimensions(64))
   324  			assert.Equal(t, 1600, shard.QuantizedDimensions(32))
   325  			return nil
   326  		})
   327  	})
   328  
   329  	t.Run("update some of the origin nil vector objects with a d=128 vector", func(t *testing.T) {
   330  		dimBefore := GetDimensionsFromRepo(repo, "Test")
   331  		quantDimBefore := GetQuantizedDimensionsFromRepo(repo, "Test", 64)
   332  		dim := 128
   333  		for i := 100; i < 150; i++ {
   334  			vec := make([]float32, dim)
   335  			for j := range vec {
   336  				vec[j] = rand.Float32()
   337  			}
   338  
   339  			id := strfmt.UUID(uuid.MustParse(fmt.Sprintf("%032d", i)).String())
   340  			obj := &models.Object{Class: "Test", ID: id}
   341  			// Put is idempotent, but since the IDs exist now, this is an update
   342  			// under the hood and a "reinsert" for the already deleted ones
   343  			err := repo.PutObject(context.Background(), obj, vec, nil, nil)
   344  			require.Nil(t, err)
   345  		}
   346  		dimAfter := GetDimensionsFromRepo(repo, "Test")
   347  		quantDimAfter := GetQuantizedDimensionsFromRepo(repo, "Test", 64)
   348  		require.Equal(t, dimBefore+50*128, dimAfter, "dimensions should increase")
   349  		require.Equal(t, quantDimBefore+50*64, quantDimAfter, "dimensions should increase")
   350  	})
   351  
   352  	t.Run("update some of the nil objects with another nil vector", func(t *testing.T) {
   353  		dimBefore := GetDimensionsFromRepo(repo, "Test")
   354  		quantDimBefore := GetQuantizedDimensionsFromRepo(repo, "Test", 64)
   355  		for i := 150; i < 200; i++ {
   356  			id := strfmt.UUID(uuid.MustParse(fmt.Sprintf("%032d", i)).String())
   357  			obj := &models.Object{Class: "Test", ID: id}
   358  			// Put is idempotent, but since the IDs exist now, this is an update
   359  			// under the hood and a "reinstert" for the already deleted ones
   360  			err := repo.PutObject(context.Background(), obj, nil, nil, nil)
   361  			require.Nil(t, err)
   362  		}
   363  		dimAfter := GetDimensionsFromRepo(repo, "Test")
   364  		quantDimAfter := GetQuantizedDimensionsFromRepo(repo, "Test", 64)
   365  		require.Equal(t, dimBefore, dimAfter, "dimensions should not have changed")
   366  		require.Equal(t, quantDimBefore, quantDimAfter, "dimensions should not have changed")
   367  	})
   368  
   369  	t.Run("verify dimensions after more updates", func(t *testing.T) {
   370  		idx := repo.GetIndex("Test")
   371  		idx.ForEachShard(func(name string, shard ShardLike) error {
   372  			assert.Equal(t, 12800, shard.Dimensions())
   373  			assert.Equal(t, 6400, shard.QuantizedDimensions(64))
   374  			assert.Equal(t, 12800, shard.QuantizedDimensions(0))
   375  			return nil
   376  		})
   377  	})
   378  }
   379  
   380  func publishDimensionMetricsFromRepo(repo *DB, className string) {
   381  	if !repo.config.TrackVectorDimensions {
   382  		log.Printf("Vector dimensions tracking is disabled, returning 0")
   383  		return
   384  	}
   385  	index := repo.GetIndex(schema.ClassName(className))
   386  	index.ForEachShard(func(name string, shard ShardLike) error {
   387  		shard.publishDimensionMetrics()
   388  		return nil
   389  	})
   390  }
   391  
   392  func getSingleShardNameFromRepo(repo *DB, className string) string {
   393  	shardName := ""
   394  	if !repo.config.TrackVectorDimensions {
   395  		log.Printf("Vector dimensions tracking is disabled, returning 0")
   396  		return shardName
   397  	}
   398  	index := repo.GetIndex(schema.ClassName(className))
   399  	index.ForEachShard(func(name string, shard ShardLike) error {
   400  		shardName = shard.Name()
   401  		return nil
   402  	})
   403  	return shardName
   404  }
   405  
   406  func Test_DimensionTrackingMetrics(t *testing.T) {
   407  	r := getRandomSeed()
   408  	dirName := t.TempDir()
   409  	var shardName string
   410  
   411  	shardState := singleShardState()
   412  	logger := logrus.New()
   413  	schemaGetter := &fakeSchemaGetter{
   414  		schema:     schema.Schema{Objects: &models.Schema{Classes: nil}},
   415  		shardState: shardState,
   416  	}
   417  	metrics := monitoring.GetMetrics()
   418  	repo, err := New(logger, Config{
   419  		RootPath:                  dirName,
   420  		QueryMaximumResults:       10000,
   421  		MaxImportGoroutinesFactor: 1,
   422  		TrackVectorDimensions:     true,
   423  	}, &fakeRemoteClient{}, &fakeNodeResolver{}, &fakeRemoteNodeClient{}, &fakeReplicationClient{}, metrics)
   424  	require.Nil(t, err)
   425  	repo.SetSchemaGetter(schemaGetter)
   426  	require.Nil(t, repo.WaitForStartup(testCtx()))
   427  	defer repo.Shutdown(context.Background())
   428  
   429  	migrator := NewMigrator(repo, logger)
   430  
   431  	t.Run("set schema type=HNSW", func(t *testing.T) {
   432  		class := &models.Class{
   433  			Class:               "HNSW",
   434  			VectorIndexConfig:   enthnsw.NewDefaultUserConfig(),
   435  			InvertedIndexConfig: invertedConfig(),
   436  		}
   437  		schema := schema.Schema{
   438  			Objects: &models.Schema{
   439  				Classes: []*models.Class{class},
   440  			},
   441  		}
   442  
   443  		require.Nil(t,
   444  			migrator.AddClass(context.Background(), class, schemaGetter.shardState))
   445  
   446  		schemaGetter.schema = schema
   447  	})
   448  
   449  	t.Run("import objects and validate metric", func(t *testing.T) {
   450  		dim := 64
   451  		for i := 0; i < 100; i++ {
   452  			vec := make([]float32, dim)
   453  			for j := range vec {
   454  				vec[j] = r.Float32()
   455  			}
   456  
   457  			id := strfmt.UUID(uuid.MustParse(fmt.Sprintf("%032d", i)).String())
   458  			obj := &models.Object{Class: "HNSW", ID: id}
   459  			err := repo.PutObject(context.Background(), obj, vec, nil, nil)
   460  			require.Nil(t, err)
   461  		}
   462  
   463  		publishDimensionMetricsFromRepo(repo, "HNSW")
   464  
   465  		shardName = getSingleShardNameFromRepo(repo, "HNSW")
   466  		metric, err := metrics.VectorDimensionsSum.GetMetricWithLabelValues("HNSW", shardName)
   467  		require.Nil(t, err)
   468  		metricValue := testutil.ToFloat64(metric)
   469  		require.Equal(t, 6400.0, metricValue, "dimensions should not have changed")
   470  	})
   471  
   472  	t.Run("delete class", func(t *testing.T) {
   473  		err := migrator.DropClass(context.Background(), "HNSW")
   474  		require.Nil(t, err)
   475  		metric, err := metrics.VectorDimensionsSum.GetMetricWithLabelValues("HNSW", shardName)
   476  		require.Nil(t, err)
   477  		metricValue := testutil.ToFloat64(metric)
   478  		require.Equal(t, 0.0, metricValue, "metric should be reset")
   479  	})
   480  
   481  	t.Run("set schema type=BQ", func(t *testing.T) {
   482  		vectorIndexConfig := enthnsw.NewDefaultUserConfig()
   483  		vectorIndexConfig.BQ.Enabled = true
   484  
   485  		class := &models.Class{
   486  			Class:               "BQ",
   487  			VectorIndexConfig:   vectorIndexConfig,
   488  			InvertedIndexConfig: invertedConfig(),
   489  		}
   490  		schema := schema.Schema{
   491  			Objects: &models.Schema{
   492  				Classes: []*models.Class{class},
   493  			},
   494  		}
   495  
   496  		require.Nil(t,
   497  			migrator.AddClass(context.Background(), class, schemaGetter.shardState))
   498  
   499  		schemaGetter.schema = schema
   500  	})
   501  
   502  	t.Run("import objects and validate metric type=BQ", func(t *testing.T) {
   503  		dim := 64
   504  		for i := 0; i < 100; i++ {
   505  			vec := make([]float32, dim)
   506  			for j := range vec {
   507  				vec[j] = r.Float32()
   508  			}
   509  
   510  			id := strfmt.UUID(uuid.MustParse(fmt.Sprintf("%032d", i)).String())
   511  			obj := &models.Object{Class: "BQ", ID: id}
   512  			err := repo.PutObject(context.Background(), obj, vec, nil, nil)
   513  			require.Nil(t, err)
   514  		}
   515  
   516  		publishDimensionMetricsFromRepo(repo, "BQ")
   517  
   518  		shardName = getSingleShardNameFromRepo(repo, "BQ")
   519  		metric, err := metrics.VectorDimensionsSum.GetMetricWithLabelValues("BQ", shardName)
   520  		require.Nil(t, err)
   521  		metricValue := testutil.ToFloat64(metric)
   522  		require.Equal(t, 0.0, metricValue, "dimensions should not have changed")
   523  
   524  		metric, err = metrics.VectorSegmentsSum.GetMetricWithLabelValues("BQ", shardName)
   525  		require.Nil(t, err)
   526  		metricValue = testutil.ToFloat64(metric)
   527  		require.Equal(t, 800.0, metricValue, "segments should match")
   528  	})
   529  
   530  	t.Run("delete class type=BQ", func(t *testing.T) {
   531  		err := migrator.DropClass(context.Background(), "BQ")
   532  		require.Nil(t, err)
   533  		metric, err := metrics.VectorDimensionsSum.GetMetricWithLabelValues("BQ", shardName)
   534  		require.Nil(t, err)
   535  		metricValue := testutil.ToFloat64(metric)
   536  		require.Equal(t, 0.0, metricValue, "metric should be still zero")
   537  
   538  		metric, err = metrics.VectorSegmentsSum.GetMetricWithLabelValues("BQ", shardName)
   539  		require.Nil(t, err)
   540  		metricValue = testutil.ToFloat64(metric)
   541  		require.Equal(t, 0.0, metricValue, "metrics should be reset")
   542  	})
   543  
   544  	t.Run("set schema type=PQ", func(t *testing.T) {
   545  		vectorIndexConfig := enthnsw.NewDefaultUserConfig()
   546  		vectorIndexConfig.PQ.Enabled = true
   547  		vectorIndexConfig.PQ.Segments = 10
   548  
   549  		class := &models.Class{
   550  			Class:               "PQ",
   551  			VectorIndexConfig:   vectorIndexConfig,
   552  			InvertedIndexConfig: invertedConfig(),
   553  		}
   554  		schema := schema.Schema{
   555  			Objects: &models.Schema{
   556  				Classes: []*models.Class{class},
   557  			},
   558  		}
   559  
   560  		require.Nil(t,
   561  			migrator.AddClass(context.Background(), class, schemaGetter.shardState))
   562  
   563  		schemaGetter.schema = schema
   564  	})
   565  
   566  	t.Run("import objects and validate metric type=PQ", func(t *testing.T) {
   567  		dim := 64
   568  		for i := 0; i < 100; i++ {
   569  			vec := make([]float32, dim)
   570  			for j := range vec {
   571  				vec[j] = r.Float32()
   572  			}
   573  
   574  			id := strfmt.UUID(uuid.MustParse(fmt.Sprintf("%032d", i)).String())
   575  			obj := &models.Object{Class: "PQ", ID: id}
   576  			err := repo.PutObject(context.Background(), obj, vec, nil, nil)
   577  			require.Nil(t, err)
   578  		}
   579  
   580  		publishDimensionMetricsFromRepo(repo, "PQ")
   581  
   582  		shardName = getSingleShardNameFromRepo(repo, "PQ")
   583  		metric, err := metrics.VectorDimensionsSum.GetMetricWithLabelValues("PQ", shardName)
   584  		require.Nil(t, err)
   585  		metricValue := testutil.ToFloat64(metric)
   586  		require.Equal(t, 0.0, metricValue, "dimensions should not have changed")
   587  
   588  		metric, err = metrics.VectorSegmentsSum.GetMetricWithLabelValues("PQ", shardName)
   589  		require.Nil(t, err)
   590  		metricValue = testutil.ToFloat64(metric)
   591  		require.Equal(t, 1000.0, metricValue, "segments should match")
   592  	})
   593  
   594  	t.Run("delete class type=PQ", func(t *testing.T) {
   595  		err := migrator.DropClass(context.Background(), "PQ")
   596  		require.Nil(t, err)
   597  		metric, err := metrics.VectorDimensionsSum.GetMetricWithLabelValues("PQ", shardName)
   598  		require.Nil(t, err)
   599  		metricValue := testutil.ToFloat64(metric)
   600  		require.Equal(t, 0.0, metricValue, "metric should be still zero")
   601  
   602  		metric, err = metrics.VectorSegmentsSum.GetMetricWithLabelValues("PQ", shardName)
   603  		require.Nil(t, err)
   604  		metricValue = testutil.ToFloat64(metric)
   605  		require.Equal(t, 0.0, metricValue, "metrics should be reset")
   606  	})
   607  }