github.com/status-im/status-go@v1.1.0/appdatabase/database.go (about)

     1  package appdatabase
     2  
     3  import (
     4  	"crypto/sha256"
     5  	"database/sql"
     6  	"encoding/json"
     7  	"math/big"
     8  
     9  	d_common "github.com/status-im/status-go/common"
    10  
    11  	"github.com/ethereum/go-ethereum/common"
    12  	"github.com/ethereum/go-ethereum/common/hexutil"
    13  	"github.com/ethereum/go-ethereum/core/types"
    14  	"github.com/ethereum/go-ethereum/crypto"
    15  	"github.com/ethereum/go-ethereum/log"
    16  
    17  	"github.com/status-im/status-go/appdatabase/migrations"
    18  	migrationsprevnodecfg "github.com/status-im/status-go/appdatabase/migrationsprevnodecfg"
    19  	"github.com/status-im/status-go/nodecfg"
    20  	"github.com/status-im/status-go/services/wallet/bigint"
    21  	w_common "github.com/status-im/status-go/services/wallet/common"
    22  	"github.com/status-im/status-go/sqlite"
    23  
    24  	e_types "github.com/status-im/status-go/eth-node/types"
    25  )
    26  
    27  const nodeCfgMigrationDate = 1640111208
    28  
    29  var customSteps = []*sqlite.PostStep{
    30  	{Version: 1674136690, CustomMigration: migrateEnsUsernames},
    31  	{Version: 1686048341, CustomMigration: migrateWalletJSONBlobs, RollBackVersion: 1686041510},
    32  	{Version: 1687193315, CustomMigration: migrateWalletTransferFromToAddresses, RollBackVersion: 1686825075},
    33  }
    34  
    35  var CurrentAppDBKeyUID string
    36  
    37  type DbInitializer struct {
    38  }
    39  
    40  func (a DbInitializer) Initialize(path, password string, kdfIterationsNumber int) (*sql.DB, error) {
    41  	return InitializeDB(path, password, kdfIterationsNumber)
    42  }
    43  
    44  func doMigration(db *sql.DB) error {
    45  	lastMigration, migrationTableExists, err := sqlite.GetLastMigrationVersion(db)
    46  	if err != nil {
    47  		return err
    48  	}
    49  
    50  	if !migrationTableExists || (lastMigration > 0 && lastMigration < nodeCfgMigrationDate) {
    51  		// If it's the first time migration's being run, or latest migration happened before migrating the nodecfg table
    52  		err = migrationsprevnodecfg.Migrate(db)
    53  		if err != nil {
    54  			return err
    55  		}
    56  
    57  		// NodeConfig migration cannot be done with SQL
    58  		err = nodecfg.MigrateNodeConfig(db)
    59  		if err != nil {
    60  			return err
    61  		}
    62  	}
    63  
    64  	postSteps := []*sqlite.PostStep{
    65  		{Version: 1662365868, CustomMigration: FixMissingKeyUIDForAccounts},
    66  		{Version: 1720606449, CustomMigration: OptimizeMobileWakuV2SettingsForMobileV1},
    67  	}
    68  	postSteps = append(postSteps, customSteps...)
    69  	// Run all the new migrations
    70  	err = migrations.Migrate(db, postSteps)
    71  	if err != nil {
    72  		return err
    73  	}
    74  
    75  	return nil
    76  }
    77  
    78  // InitializeDB creates db file at a given path and applies migrations.
    79  func InitializeDB(path, password string, kdfIterationsNumber int) (*sql.DB, error) {
    80  	db, err := sqlite.OpenDB(path, password, kdfIterationsNumber)
    81  	if err != nil {
    82  		return nil, err
    83  	}
    84  
    85  	err = doMigration(db)
    86  	if err != nil {
    87  		return nil, err
    88  	}
    89  
    90  	return db, nil
    91  }
    92  
    93  func OptimizeMobileWakuV2SettingsForMobileV1(sqlTx *sql.Tx) error {
    94  	if d_common.IsMobilePlatform() {
    95  		_, err := sqlTx.Exec(`UPDATE wakuv2_config SET light_client = ?, enable_store_confirmation_for_messages_sent = ?`, true, false)
    96  		if err != nil {
    97  			log.Error("failed to enable light client and disable store confirmation for mobile v1", "err", err.Error())
    98  			return err
    99  		}
   100  	}
   101  	return nil
   102  }
   103  
   104  func FixMissingKeyUIDForAccounts(sqlTx *sql.Tx) error {
   105  	rows, err := sqlTx.Query(`SELECT address,pubkey FROM accounts WHERE pubkey IS NOT NULL AND type != '' AND type != 'generated'`)
   106  	if err != nil {
   107  		log.Error("Migrating accounts: failed to query accounts", "err", err.Error())
   108  		return err
   109  	}
   110  	defer rows.Close()
   111  	for rows.Next() {
   112  		var address e_types.Address
   113  		var pubkey e_types.HexBytes
   114  		err = rows.Scan(&address, &pubkey)
   115  		if err != nil {
   116  			log.Error("Migrating accounts: failed to scan records", "err", err.Error())
   117  			return err
   118  		}
   119  		pk, err := crypto.UnmarshalPubkey(pubkey)
   120  		if err != nil {
   121  			log.Error("Migrating accounts: failed to unmarshal pubkey", "err", err.Error(), "pubkey", string(pubkey))
   122  			return err
   123  		}
   124  		pkBytes := sha256.Sum256(crypto.FromECDSAPub(pk))
   125  		keyUIDHex := hexutil.Encode(pkBytes[:])
   126  		_, err = sqlTx.Exec(`UPDATE accounts SET key_uid = ? WHERE address = ?`, keyUIDHex, address)
   127  		if err != nil {
   128  			log.Error("Migrating accounts: failed to update key_uid for imported accounts", "err", err.Error())
   129  			return err
   130  		}
   131  	}
   132  
   133  	var walletRootAddress e_types.Address
   134  	err = sqlTx.QueryRow(`SELECT wallet_root_address FROM settings WHERE synthetic_id='id'`).Scan(&walletRootAddress)
   135  	if err == sql.ErrNoRows {
   136  		// we shouldn't reach here, but if we do, it probably happened from the test
   137  		log.Warn("Migrating accounts: no wallet_root_address found in settings")
   138  		return nil
   139  	}
   140  	if err != nil {
   141  		log.Error("Migrating accounts: failed to get wallet_root_address", "err", err.Error())
   142  		return err
   143  	}
   144  	_, err = sqlTx.Exec(`UPDATE accounts SET key_uid = ?, derived_from = ? WHERE type = '' OR type = 'generated'`, CurrentAppDBKeyUID, walletRootAddress.Hex())
   145  	if err != nil {
   146  		log.Error("Migrating accounts: failed to update key_uid/derived_from", "err", err.Error())
   147  		return err
   148  	}
   149  	// fix the default wallet account color issue https://github.com/status-im/status-mobile/issues/20476
   150  	// we don't care the other type of account's color
   151  	_, err = sqlTx.Exec(`UPDATE accounts SET color = 'blue',emoji='🐳' WHERE wallet = 1`)
   152  	if err != nil {
   153  		log.Error("Migrating accounts: failed to update default wallet account's color to blue", "err", err.Error())
   154  		return err
   155  	}
   156  	return nil
   157  }
   158  
   159  func migrateEnsUsernames(sqlTx *sql.Tx) error {
   160  
   161  	// 1. Check if ens_usernames table already exist
   162  
   163  	// row := sqlTx.QueryRow("SELECT exists(SELECT name FROM sqlite_master WHERE type='table' AND name='ens_usernames')")
   164  	// tableExists := false
   165  	// err := row.Scan(&tableExists)
   166  
   167  	// if err != nil && err != sql.ErrNoRows {
   168  	// 	return err
   169  	// }
   170  
   171  	// if tableExists {
   172  	// 	return nil
   173  	// }
   174  
   175  	// -- 1. Create new ens_usernames table
   176  
   177  	// _, err = sqlTx.Exec(`CREATE TABLE IF NOT EXISTS ens_usernames (
   178  	// 	"username" TEXT NOT NULL,
   179  	// 	"chain_id" UNSIGNED BIGINT DEFAULT 1);`)
   180  
   181  	// if err != nil {
   182  	// 	log.Error("Migrating ens usernames: failed to create table", "err", err.Error())
   183  	// 	return err
   184  	// }
   185  
   186  	// -- 2. Move current `settings.usernames` to the new table
   187  	/*
   188  		INSERT INTO ens_usernames (username)
   189  			SELECT json_each.value FROM settings, json_each(usernames);
   190  	*/
   191  
   192  	rows, err := sqlTx.Query(`SELECT usernames FROM settings`)
   193  
   194  	if err != nil {
   195  		log.Error("Migrating ens usernames: failed to query 'settings.usernames'", "err", err.Error())
   196  		return err
   197  	}
   198  
   199  	defer rows.Close()
   200  
   201  	var usernames []string
   202  
   203  	for rows.Next() {
   204  		var usernamesJSON sql.NullString
   205  		err := rows.Scan(&usernamesJSON)
   206  
   207  		if err != nil {
   208  			return err
   209  		}
   210  
   211  		if !usernamesJSON.Valid {
   212  			continue
   213  		}
   214  
   215  		var list []string
   216  		err = json.Unmarshal([]byte(usernamesJSON.String), &list)
   217  		if err != nil {
   218  			return err
   219  		}
   220  
   221  		usernames = append(usernames, list...)
   222  	}
   223  
   224  	defaultChainID := 1
   225  
   226  	for _, username := range usernames {
   227  
   228  		var usernameAlreadyMigrated bool
   229  
   230  		row := sqlTx.QueryRow(`SELECT EXISTS(SELECT 1 FROM ens_usernames WHERE username=? AND chain_id=?)`, username, defaultChainID)
   231  		err := row.Scan(&usernameAlreadyMigrated)
   232  
   233  		if err != nil {
   234  			return err
   235  		}
   236  
   237  		if usernameAlreadyMigrated {
   238  			continue
   239  		}
   240  
   241  		_, err = sqlTx.Exec(`INSERT INTO ens_usernames (username, chain_id) VALUES (?, ?)`, username, defaultChainID)
   242  		if err != nil {
   243  			log.Error("Migrating ens usernames: failed to insert username into new database", "ensUsername", username, "err", err.Error())
   244  		}
   245  	}
   246  
   247  	return nil
   248  }
   249  
   250  func MigrateV3ToV4(v3Path string, v4Path string, password string, kdfIterationsNumber int, onStart func(), onEnd func()) error {
   251  	return sqlite.MigrateV3ToV4(v3Path, v4Path, password, kdfIterationsNumber, onStart, onEnd)
   252  }
   253  
   254  const (
   255  	batchSize = 1000
   256  )
   257  
   258  func migrateWalletJSONBlobs(sqlTx *sql.Tx) error {
   259  	var batchEntries [][]interface{}
   260  
   261  	// Extract useful information from the receipt blob and store it as sql interpretable
   262  	//
   263  	// Added tx_hash because the hash column in the transfers table is not (always) the transaction hash.
   264  	// Each entry in that table could either be: A native token (ETH) transfer or ERC20/ERC721 token transfer
   265  	// Added block_hash because the block_hash we have is generated by us and used as block entry ID
   266  	// Added receipt_type, the type we have only indicates if chain or token
   267  	// Added log_index that the log data represents
   268  	//
   269  	// Dropped storing postState because it was replaced by the status after EIP 658
   270  	// Dropped duplicating logs until we have a more structured way to store them.
   271  	// They can be extracted from the transfers.receipt still
   272  	// Dropped the bloom filter because in SQLite is not possible to use it in an
   273  	// efficient manner
   274  	//
   275  	// Extract useful information from the tx blob
   276  	//
   277  	// Added tx_type, which might be different than the receipt type
   278  	//
   279  	// Dropped access_list, need a separate table for it
   280  	// Already there chain_id
   281  	// Dropped v, r, s because I see no way to be useful as BLOBs
   282  	// Added BIGINT values as clamped 64 INT because we can't use 128 bits blobs/strings for int arithmetics
   283  	// _clamped64 prefix indicate clamped 64 bits INT values might be useful for queries (sorting, filtering ...)
   284  	// The amount is stored as a fixed length 128 bit hex string, in
   285  	// order to be able to sort and filter by it
   286  	newColumnsAndIndexSetup := `
   287  		ALTER TABLE transfers ADD COLUMN status INT;
   288  		ALTER TABLE transfers ADD COLUMN receipt_type INT;
   289  		ALTER TABLE transfers ADD COLUMN tx_hash BLOB;
   290  		ALTER TABLE transfers ADD COLUMN log_index INT;
   291  		ALTER TABLE transfers ADD COLUMN block_hash BLOB;
   292  		ALTER TABLE transfers ADD COLUMN cumulative_gas_used INT;
   293  		ALTER TABLE transfers ADD COLUMN contract_address TEXT;
   294  		ALTER TABLE transfers ADD COLUMN gas_used INT;
   295  		ALTER TABLE transfers ADD COLUMN tx_index INT;
   296  
   297  		ALTER TABLE transfers ADD COLUMN tx_type INT;
   298  		ALTER TABLE transfers ADD COLUMN protected BOOLEAN;
   299  		ALTER TABLE transfers ADD COLUMN gas_limit UNSIGNED INT;
   300  		ALTER TABLE transfers ADD COLUMN gas_price_clamped64 INT;
   301  		ALTER TABLE transfers ADD COLUMN gas_tip_cap_clamped64 INT;
   302  		ALTER TABLE transfers ADD COLUMN gas_fee_cap_clamped64 INT;
   303  		ALTER TABLE transfers ADD COLUMN amount_padded128hex CHAR(32);
   304  		ALTER TABLE transfers ADD COLUMN account_nonce INT;
   305  		ALTER TABLE transfers ADD COLUMN size INT;
   306  		ALTER TABLE transfers ADD COLUMN token_address BLOB;
   307  		ALTER TABLE transfers ADD COLUMN token_id BLOB;
   308  
   309  		CREATE INDEX idx_transfers_filter ON transfers (status, token_address, token_id);`
   310  
   311  	rowIndex := 0
   312  	mightHaveRows := true
   313  
   314  	_, err := sqlTx.Exec(newColumnsAndIndexSetup)
   315  	if err != nil {
   316  		return err
   317  	}
   318  
   319  	for mightHaveRows {
   320  		var chainID uint64
   321  		var hash common.Hash
   322  		var address common.Address
   323  		var entryType string
   324  
   325  		rows, err := sqlTx.Query(`SELECT hash, address, network_id, tx, receipt, log, type FROM transfers WHERE tx IS NOT NULL OR receipt IS NOT NULL LIMIT ? OFFSET ?`, batchSize, rowIndex)
   326  		if err != nil {
   327  			return err
   328  		}
   329  
   330  		curProcessed := 0
   331  		for rows.Next() {
   332  			tx := &types.Transaction{}
   333  			r := &types.Receipt{}
   334  			l := &types.Log{}
   335  
   336  			// Scan row data into the transaction and receipt objects
   337  			nullableTx := sqlite.JSONBlob{Data: tx}
   338  			nullableR := sqlite.JSONBlob{Data: r}
   339  			nullableL := sqlite.JSONBlob{Data: l}
   340  			err = rows.Scan(&hash, &address, &chainID, &nullableTx, &nullableR, &nullableL, &entryType)
   341  			if err != nil {
   342  				rows.Close()
   343  				return err
   344  			}
   345  			var logIndex *uint
   346  			if nullableL.Valid {
   347  				logIndex = new(uint)
   348  				*logIndex = l.Index
   349  			}
   350  
   351  			var currentRow []interface{}
   352  
   353  			// Check if the receipt is not null before transferring the receipt data
   354  			if nullableR.Valid {
   355  				currentRow = append(currentRow, r.Status, r.Type, r.TxHash, logIndex, r.BlockHash, r.CumulativeGasUsed, r.ContractAddress, r.GasUsed, r.TransactionIndex)
   356  			} else {
   357  				for i := 0; i < 9; i++ {
   358  					currentRow = append(currentRow, nil)
   359  				}
   360  			}
   361  
   362  			if nullableTx.Valid {
   363  				correctType, tokenID, value, tokenAddress := extractToken(entryType, tx, l, nullableL.Valid)
   364  
   365  				gasPrice := sqlite.BigIntToClampedInt64(tx.GasPrice())
   366  				gasTipCap := sqlite.BigIntToClampedInt64(tx.GasTipCap())
   367  				gasFeeCap := sqlite.BigIntToClampedInt64(tx.GasFeeCap())
   368  				valueStr := sqlite.BigIntToPadded128BitsStr(value)
   369  
   370  				currentRow = append(currentRow, tx.Type(), tx.Protected(), tx.Gas(), gasPrice, gasTipCap, gasFeeCap, valueStr, tx.Nonce(), int64(tx.Size()), tokenAddress, (*bigint.SQLBigIntBytes)(tokenID), correctType)
   371  			} else {
   372  				for i := 0; i < 11; i++ {
   373  					currentRow = append(currentRow, nil)
   374  				}
   375  				currentRow = append(currentRow, w_common.EthTransfer)
   376  			}
   377  			currentRow = append(currentRow, hash, address, chainID)
   378  			batchEntries = append(batchEntries, currentRow)
   379  
   380  			curProcessed++
   381  		}
   382  		rowIndex += curProcessed
   383  
   384  		// Check if there was an error in the last rows.Next()
   385  		rows.Close()
   386  		if err = rows.Err(); err != nil {
   387  			return err
   388  		}
   389  		mightHaveRows = (curProcessed == batchSize)
   390  
   391  		// insert extracted data into the new columns
   392  		if len(batchEntries) > 0 {
   393  			var stmt *sql.Stmt
   394  			stmt, err = sqlTx.Prepare(`UPDATE transfers SET status = ?, receipt_type = ?, tx_hash = ?, log_index = ?, block_hash = ?, cumulative_gas_used = ?, contract_address = ?, gas_used = ?, tx_index = ?,
   395  				tx_type = ?, protected = ?, gas_limit = ?, gas_price_clamped64 = ?, gas_tip_cap_clamped64 = ?, gas_fee_cap_clamped64 = ?, amount_padded128hex = ?, account_nonce = ?, size = ?, token_address = ?, token_id = ?, type = ?
   396  				WHERE hash = ? AND address = ? AND network_id = ?`)
   397  			if err != nil {
   398  				return err
   399  			}
   400  
   401  			for _, dataEntry := range batchEntries {
   402  				_, err = stmt.Exec(dataEntry...)
   403  				if err != nil {
   404  					return err
   405  				}
   406  			}
   407  
   408  			// Reset placeHolders and batchEntries for the next batch
   409  			batchEntries = [][]interface{}{}
   410  		}
   411  	}
   412  
   413  	return nil
   414  }
   415  
   416  func extractToken(entryType string, tx *types.Transaction, l *types.Log, logValid bool) (correctType w_common.Type, tokenID *big.Int, value *big.Int, tokenAddress *common.Address) {
   417  	if logValid {
   418  		correctType, tokenAddress, _, _ = w_common.ExtractTokenTransferData(w_common.Type(entryType), l, tx)
   419  		_, _, _, tokenIDs, values, _ := w_common.ParseTransferLog(*l)
   420  		if len(tokenIDs) > 0 {
   421  			tokenID = tokenIDs[0]
   422  		}
   423  		if len(values) > 0 {
   424  			value = values[0]
   425  		}
   426  	} else {
   427  		correctType = w_common.Type(entryType)
   428  		value = new(big.Int).Set(tx.Value())
   429  	}
   430  	return
   431  }
   432  
   433  func migrateWalletTransferFromToAddresses(sqlTx *sql.Tx) error {
   434  	var batchEntries [][]interface{}
   435  
   436  	// Extract transfer from/to addresses and add the information into the new columns
   437  	// Re-extract token address and insert it as blob instead of string
   438  	newColumnsAndIndexSetup := `
   439  		ALTER TABLE transfers ADD COLUMN tx_from_address BLOB;
   440  		ALTER TABLE transfers ADD COLUMN tx_to_address BLOB;`
   441  
   442  	rowIndex := 0
   443  	mightHaveRows := true
   444  
   445  	_, err := sqlTx.Exec(newColumnsAndIndexSetup)
   446  	if err != nil {
   447  		return err
   448  	}
   449  
   450  	for mightHaveRows {
   451  		var chainID uint64
   452  		var hash common.Hash
   453  		var address common.Address
   454  		var sender common.Address
   455  		var entryType string
   456  
   457  		rows, err := sqlTx.Query(`SELECT hash, address, sender, network_id, tx, log, type FROM transfers WHERE tx IS NOT NULL OR receipt IS NOT NULL LIMIT ? OFFSET ?`, batchSize, rowIndex)
   458  		if err != nil {
   459  			return err
   460  		}
   461  
   462  		curProcessed := 0
   463  		for rows.Next() {
   464  			tx := &types.Transaction{}
   465  			l := &types.Log{}
   466  
   467  			// Scan row data into the transaction and receipt objects
   468  			nullableTx := sqlite.JSONBlob{Data: tx}
   469  			nullableL := sqlite.JSONBlob{Data: l}
   470  			err = rows.Scan(&hash, &address, &sender, &chainID, &nullableTx, &nullableL, &entryType)
   471  			if err != nil {
   472  				rows.Close()
   473  				return err
   474  			}
   475  
   476  			var currentRow []interface{}
   477  
   478  			var tokenAddress *common.Address
   479  			var txFrom *common.Address
   480  			var txTo *common.Address
   481  
   482  			if nullableTx.Valid {
   483  				if nullableL.Valid {
   484  					_, tokenAddress, txFrom, txTo = w_common.ExtractTokenTransferData(w_common.Type(entryType), l, tx)
   485  				} else {
   486  					txFrom = &sender
   487  					txTo = tx.To()
   488  				}
   489  			}
   490  
   491  			currentRow = append(currentRow, tokenAddress, txFrom, txTo)
   492  
   493  			currentRow = append(currentRow, hash, address, chainID)
   494  			batchEntries = append(batchEntries, currentRow)
   495  
   496  			curProcessed++
   497  		}
   498  		rowIndex += curProcessed
   499  
   500  		// Check if there was an error in the last rows.Next()
   501  		rows.Close()
   502  		if err = rows.Err(); err != nil {
   503  			return err
   504  		}
   505  		mightHaveRows = (curProcessed == batchSize)
   506  
   507  		// insert extracted data into the new columns
   508  		if len(batchEntries) > 0 {
   509  			var stmt *sql.Stmt
   510  			stmt, err = sqlTx.Prepare(`UPDATE transfers SET token_address = ?, tx_from_address = ?, tx_to_address = ?
   511  				WHERE hash = ? AND address = ? AND network_id = ?`)
   512  			if err != nil {
   513  				return err
   514  			}
   515  
   516  			for _, dataEntry := range batchEntries {
   517  				_, err = stmt.Exec(dataEntry...)
   518  				if err != nil {
   519  					return err
   520  				}
   521  			}
   522  
   523  			// Reset placeHolders and batchEntries for the next batch
   524  			batchEntries = [][]interface{}{}
   525  		}
   526  	}
   527  
   528  	return nil
   529  }