214
vendor/github.com/status-im/status-go/services/wallet/history/balance.go
generated
vendored
Normal file
214
vendor/github.com/status-im/status-go/services/wallet/history/balance.go
generated
vendored
Normal file
@@ -0,0 +1,214 @@
|
||||
package history
|
||||
|
||||
import (
|
||||
"context"
|
||||
"errors"
|
||||
"fmt"
|
||||
"math/big"
|
||||
"time"
|
||||
|
||||
"github.com/ethereum/go-ethereum/common"
|
||||
"github.com/ethereum/go-ethereum/common/hexutil"
|
||||
"github.com/ethereum/go-ethereum/log"
|
||||
)
|
||||
|
||||
const genesisTimestamp = 1438269988
|
||||
|
||||
// Specific time intervals for which balance history can be fetched
|
||||
type TimeInterval int
|
||||
|
||||
const (
|
||||
BalanceHistory7Days TimeInterval = iota + 1
|
||||
BalanceHistory1Month
|
||||
BalanceHistory6Months
|
||||
BalanceHistory1Year
|
||||
BalanceHistoryAllTime
|
||||
)
|
||||
|
||||
const aDay = time.Duration(24) * time.Hour
|
||||
|
||||
var timeIntervalDuration = map[TimeInterval]time.Duration{
|
||||
BalanceHistory7Days: time.Duration(7) * aDay,
|
||||
BalanceHistory1Month: time.Duration(30) * aDay,
|
||||
BalanceHistory6Months: time.Duration(6*30) * aDay,
|
||||
BalanceHistory1Year: time.Duration(365) * aDay,
|
||||
}
|
||||
|
||||
func TimeIntervalDurationSecs(timeInterval TimeInterval) uint64 {
|
||||
return uint64(timeIntervalDuration[timeInterval].Seconds())
|
||||
}
|
||||
|
||||
type DataPoint struct {
|
||||
Balance *hexutil.Big
|
||||
Timestamp uint64
|
||||
BlockNumber *hexutil.Big
|
||||
}
|
||||
|
||||
// String returns a string representation of the data point
|
||||
func (d *DataPoint) String() string {
|
||||
return fmt.Sprintf("timestamp: %d balance: %v block: %v", d.Timestamp, d.Balance.ToInt(), d.BlockNumber.ToInt())
|
||||
}
|
||||
|
||||
type Balance struct {
|
||||
db *BalanceDB
|
||||
}
|
||||
|
||||
func NewBalance(db *BalanceDB) *Balance {
|
||||
return &Balance{db}
|
||||
}
|
||||
|
||||
// get returns the balance history for the given address from the given timestamp till now
|
||||
func (b *Balance) get(ctx context.Context, chainID uint64, currency string, addresses []common.Address, fromTimestamp uint64) ([]*entry, error) {
|
||||
log.Debug("Getting balance history", "chainID", chainID, "currency", currency, "address", addresses, "fromTimestamp", fromTimestamp)
|
||||
|
||||
cached, err := b.db.getNewerThan(&assetIdentity{chainID, addresses, currency}, fromTimestamp)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return cached, nil
|
||||
}
|
||||
|
||||
func (b *Balance) addEdgePoints(chainID uint64, currency string, addresses []common.Address, fromTimestamp, toTimestamp uint64, data []*entry) (res []*entry, err error) {
|
||||
log.Debug("Adding edge points", "chainID", chainID, "currency", currency, "address", addresses, "fromTimestamp", fromTimestamp)
|
||||
|
||||
res = data
|
||||
|
||||
for _, address := range addresses {
|
||||
var firstEntry *entry
|
||||
|
||||
if len(data) > 0 {
|
||||
for _, entry := range data {
|
||||
if entry.address == address {
|
||||
firstEntry = entry
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
if firstEntry == nil {
|
||||
firstEntry = &entry{
|
||||
chainID: chainID,
|
||||
address: address,
|
||||
tokenSymbol: currency,
|
||||
timestamp: int64(fromTimestamp),
|
||||
}
|
||||
}
|
||||
|
||||
previous, err := b.db.getEntryPreviousTo(firstEntry)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
firstTimestamp, lastTimestamp := timestampBoundaries(fromTimestamp, toTimestamp, address, data)
|
||||
|
||||
if previous != nil {
|
||||
previous.timestamp = int64(firstTimestamp) // We might need to use another minimal offset respecting the time interval
|
||||
previous.block = nil
|
||||
res = append([]*entry{previous}, res...)
|
||||
} else {
|
||||
// Add a zero point at the beginning to draw a line from
|
||||
res = append([]*entry{
|
||||
{
|
||||
chainID: chainID,
|
||||
address: address,
|
||||
tokenSymbol: currency,
|
||||
timestamp: int64(firstTimestamp),
|
||||
balance: big.NewInt(0),
|
||||
},
|
||||
}, res...)
|
||||
}
|
||||
|
||||
if res[len(res)-1].timestamp < int64(lastTimestamp) {
|
||||
// Add a last point to draw a line to
|
||||
res = append(res, &entry{
|
||||
chainID: chainID,
|
||||
address: address,
|
||||
tokenSymbol: currency,
|
||||
timestamp: int64(lastTimestamp),
|
||||
balance: res[len(res)-1].balance,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
return res, nil
|
||||
}
|
||||
|
||||
func timestampBoundaries(fromTimestamp, toTimestamp uint64, address common.Address, data []*entry) (firstTimestamp, lastTimestamp uint64) {
|
||||
firstTimestamp = fromTimestamp
|
||||
if fromTimestamp == 0 {
|
||||
if len(data) > 0 {
|
||||
for _, entry := range data {
|
||||
if entry.address == address {
|
||||
if entry.timestamp == 0 {
|
||||
panic("data[0].timestamp must never be 0")
|
||||
}
|
||||
firstTimestamp = uint64(entry.timestamp) - 1
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
if firstTimestamp == fromTimestamp {
|
||||
firstTimestamp = genesisTimestamp
|
||||
}
|
||||
}
|
||||
|
||||
if toTimestamp < firstTimestamp {
|
||||
panic("toTimestamp < fromTimestamp")
|
||||
}
|
||||
|
||||
lastTimestamp = toTimestamp
|
||||
|
||||
return firstTimestamp, lastTimestamp
|
||||
}
|
||||
|
||||
func addPaddingPoints(currency string, addresses []common.Address, toTimestamp uint64, data []*entry, limit int) (res []*entry, err error) {
|
||||
log.Debug("addPaddingPoints start", "currency", currency, "address", addresses, "len(data)", len(data), "data", data, "limit", limit)
|
||||
|
||||
if len(data) < 2 { // Edge points must be added separately during the previous step
|
||||
return nil, errors.New("slice is empty")
|
||||
}
|
||||
|
||||
if limit <= len(data) {
|
||||
return data, nil
|
||||
}
|
||||
|
||||
fromTimestamp := uint64(data[0].timestamp)
|
||||
delta := (toTimestamp - fromTimestamp) / uint64(limit-1)
|
||||
|
||||
res = make([]*entry, len(data))
|
||||
copy(res, data)
|
||||
|
||||
var address common.Address
|
||||
if len(addresses) > 0 {
|
||||
address = addresses[0]
|
||||
}
|
||||
|
||||
for i, j, index := 1, 0, 0; len(res) < limit; index++ {
|
||||
// Add a last point to draw a line to. For some cases we might not need it,
|
||||
// but when merging with points from other chains, we might get wrong balance if we don't have it.
|
||||
paddingTimestamp := int64(fromTimestamp + delta*uint64(i))
|
||||
|
||||
if paddingTimestamp < data[j].timestamp {
|
||||
// make a room for a new point
|
||||
res = append(res[:index+1], res[index:]...)
|
||||
// insert a new point
|
||||
entry := &entry{
|
||||
address: address,
|
||||
tokenSymbol: currency,
|
||||
timestamp: paddingTimestamp,
|
||||
balance: data[j-1].balance, // take the previous balance
|
||||
}
|
||||
res[index] = entry
|
||||
|
||||
log.Debug("Added padding point", "entry", entry, "timestamp", paddingTimestamp, "i", i, "j", j, "index", index)
|
||||
i++
|
||||
} else if paddingTimestamp >= data[j].timestamp {
|
||||
log.Debug("Kept real point", "entry", data[j], "timestamp", paddingTimestamp, "i", i, "j", j, "index", index)
|
||||
j++
|
||||
}
|
||||
}
|
||||
|
||||
log.Debug("addPaddingPoints end", "len(res)", len(res))
|
||||
|
||||
return res, nil
|
||||
}
|
||||
152
vendor/github.com/status-im/status-go/services/wallet/history/balance_db.go
generated
vendored
Normal file
152
vendor/github.com/status-im/status-go/services/wallet/history/balance_db.go
generated
vendored
Normal file
@@ -0,0 +1,152 @@
|
||||
package history
|
||||
|
||||
import (
|
||||
"database/sql"
|
||||
"encoding/hex"
|
||||
"fmt"
|
||||
"math/big"
|
||||
|
||||
"github.com/ethereum/go-ethereum/common"
|
||||
"github.com/ethereum/go-ethereum/log"
|
||||
"github.com/status-im/status-go/services/wallet/bigint"
|
||||
)
|
||||
|
||||
type BalanceDB struct {
|
||||
db *sql.DB
|
||||
}
|
||||
|
||||
func NewBalanceDB(sqlDb *sql.DB) *BalanceDB {
|
||||
return &BalanceDB{
|
||||
db: sqlDb,
|
||||
}
|
||||
}
|
||||
|
||||
// entry represents a single row in the balance_history table
|
||||
type entry struct {
|
||||
chainID uint64
|
||||
address common.Address
|
||||
tokenSymbol string
|
||||
tokenAddress common.Address
|
||||
block *big.Int
|
||||
timestamp int64
|
||||
balance *big.Int
|
||||
}
|
||||
|
||||
type assetIdentity struct {
|
||||
ChainID uint64
|
||||
Addresses []common.Address
|
||||
TokenSymbol string
|
||||
}
|
||||
|
||||
func (a *assetIdentity) addressesToString() string {
|
||||
var addressesStr string
|
||||
for i, address := range a.Addresses {
|
||||
addressStr := hex.EncodeToString(address[:])
|
||||
if i == 0 {
|
||||
addressesStr = "X'" + addressStr + "'"
|
||||
} else {
|
||||
addressesStr += ", X'" + addressStr + "'"
|
||||
}
|
||||
}
|
||||
return addressesStr
|
||||
}
|
||||
|
||||
func (e *entry) String() string {
|
||||
return fmt.Sprintf("chainID: %v, address: %v, tokenSymbol: %v, tokenAddress: %v, block: %v, timestamp: %v, balance: %v",
|
||||
e.chainID, e.address, e.tokenSymbol, e.tokenAddress, e.block, e.timestamp, e.balance)
|
||||
}
|
||||
|
||||
func (b *BalanceDB) add(entry *entry) error {
|
||||
log.Debug("Adding entry to balance_history", "entry", entry)
|
||||
|
||||
_, err := b.db.Exec("INSERT OR IGNORE INTO balance_history (chain_id, address, currency, block, timestamp, balance) VALUES (?, ?, ?, ?, ?, ?)", entry.chainID, entry.address, entry.tokenSymbol, (*bigint.SQLBigInt)(entry.block), entry.timestamp, (*bigint.SQLBigIntBytes)(entry.balance))
|
||||
return err
|
||||
}
|
||||
|
||||
func (b *BalanceDB) getEntriesWithoutBalances(chainID uint64, address common.Address) (entries []*entry, err error) {
|
||||
rows, err := b.db.Query("SELECT blk_number, tr.timestamp, token_address from transfers tr LEFT JOIN balance_history bh ON bh.block = tr.blk_number WHERE tr.network_id = ? AND tr.address = ? AND tr.type != 'erc721' AND bh.block IS NULL",
|
||||
chainID, address)
|
||||
if err == sql.ErrNoRows {
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
defer rows.Close()
|
||||
|
||||
entries = make([]*entry, 0)
|
||||
for rows.Next() {
|
||||
entry := &entry{
|
||||
chainID: chainID,
|
||||
address: address,
|
||||
block: new(big.Int),
|
||||
}
|
||||
|
||||
// tokenAddress can be NULL and can not unmarshal to common.Address
|
||||
tokenHexAddress := make([]byte, common.AddressLength)
|
||||
err := rows.Scan((*bigint.SQLBigInt)(entry.block), &entry.timestamp, &tokenHexAddress)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
tokenAddress := common.BytesToAddress(tokenHexAddress)
|
||||
if tokenAddress != (common.Address{}) {
|
||||
entry.tokenAddress = tokenAddress
|
||||
}
|
||||
entries = append(entries, entry)
|
||||
}
|
||||
return entries, nil
|
||||
}
|
||||
|
||||
func (b *BalanceDB) getNewerThan(identity *assetIdentity, timestamp uint64) (entries []*entry, err error) {
|
||||
// DISTINCT removes duplicates that can happen when a block has multiple transfers of same token
|
||||
rawQueryStr := "SELECT DISTINCT block, timestamp, balance, address FROM balance_history WHERE chain_id = ? AND address IN (%s) AND currency = ? AND timestamp > ? ORDER BY timestamp"
|
||||
queryString := fmt.Sprintf(rawQueryStr, identity.addressesToString())
|
||||
rows, err := b.db.Query(queryString, identity.ChainID, identity.TokenSymbol, timestamp)
|
||||
if err == sql.ErrNoRows {
|
||||
return nil, nil
|
||||
} else if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
defer rows.Close()
|
||||
|
||||
result := make([]*entry, 0)
|
||||
for rows.Next() {
|
||||
entry := &entry{
|
||||
chainID: identity.ChainID,
|
||||
tokenSymbol: identity.TokenSymbol,
|
||||
block: new(big.Int),
|
||||
balance: new(big.Int),
|
||||
}
|
||||
err := rows.Scan((*bigint.SQLBigInt)(entry.block), &entry.timestamp, (*bigint.SQLBigIntBytes)(entry.balance), &entry.address)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
result = append(result, entry)
|
||||
}
|
||||
return result, nil
|
||||
}
|
||||
|
||||
func (b *BalanceDB) getEntryPreviousTo(item *entry) (res *entry, err error) {
|
||||
res = &entry{
|
||||
chainID: item.chainID,
|
||||
address: item.address,
|
||||
block: new(big.Int),
|
||||
balance: new(big.Int),
|
||||
tokenSymbol: item.tokenSymbol,
|
||||
}
|
||||
|
||||
queryStr := "SELECT block, timestamp, balance FROM balance_history WHERE chain_id = ? AND address = ? AND currency = ? AND timestamp < ? ORDER BY timestamp DESC LIMIT 1"
|
||||
row := b.db.QueryRow(queryStr, item.chainID, item.address, item.tokenSymbol, item.timestamp)
|
||||
|
||||
err = row.Scan((*bigint.SQLBigInt)(res.block), &res.timestamp, (*bigint.SQLBigIntBytes)(res.balance))
|
||||
if err == sql.ErrNoRows {
|
||||
return nil, nil
|
||||
} else if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return res, nil
|
||||
}
|
||||
176
vendor/github.com/status-im/status-go/services/wallet/history/exchange.go
generated
vendored
Normal file
176
vendor/github.com/status-im/status-go/services/wallet/history/exchange.go
generated
vendored
Normal file
@@ -0,0 +1,176 @@
|
||||
package history
|
||||
|
||||
import (
|
||||
"errors"
|
||||
"sync"
|
||||
"time"
|
||||
|
||||
"github.com/status-im/status-go/services/wallet/market"
|
||||
)
|
||||
|
||||
type tokenType = string
|
||||
type currencyType = string
|
||||
type yearType = int
|
||||
|
||||
type allTimeEntry struct {
|
||||
value float32
|
||||
startTimestamp int64
|
||||
endTimestamp int64
|
||||
}
|
||||
|
||||
// Exchange caches conversion rates in memory on a daily basis
|
||||
type Exchange struct {
|
||||
// year map keeps a list of values with days as index in the slice for the corresponding year (key) starting from the first to the last available
|
||||
cache map[tokenType]map[currencyType]map[yearType][]float32
|
||||
// special case for all time information
|
||||
allTimeCache map[tokenType]map[currencyType][]allTimeEntry
|
||||
fetchMutex sync.Mutex
|
||||
|
||||
marketManager *market.Manager
|
||||
}
|
||||
|
||||
func NewExchange(marketManager *market.Manager) *Exchange {
|
||||
return &Exchange{
|
||||
cache: make(map[tokenType]map[currencyType]map[yearType][]float32),
|
||||
marketManager: marketManager,
|
||||
}
|
||||
}
|
||||
|
||||
// GetExchangeRate returns the exchange rate from token to currency in the day of the given date
|
||||
// if none exists returns "missing <element>" error
|
||||
func (e *Exchange) GetExchangeRateForDay(token tokenType, currency currencyType, date time.Time) (float32, error) {
|
||||
e.fetchMutex.Lock()
|
||||
defer e.fetchMutex.Unlock()
|
||||
|
||||
currencyMap, found := e.cache[token]
|
||||
if !found {
|
||||
return 0, errors.New("missing token")
|
||||
}
|
||||
|
||||
yearsMap, found := currencyMap[currency]
|
||||
if !found {
|
||||
return 0, errors.New("missing currency")
|
||||
}
|
||||
|
||||
year := date.Year()
|
||||
valueForDays, found := yearsMap[year]
|
||||
if !found {
|
||||
// Search closest in all time
|
||||
allCurrencyMap, found := e.allTimeCache[token]
|
||||
if !found {
|
||||
return 0, errors.New("missing token in all time data")
|
||||
}
|
||||
|
||||
allYearsMap, found := allCurrencyMap[currency]
|
||||
if !found {
|
||||
return 0, errors.New("missing currency in all time data")
|
||||
}
|
||||
for _, entry := range allYearsMap {
|
||||
if entry.startTimestamp <= date.Unix() && entry.endTimestamp > date.Unix() {
|
||||
return entry.value, nil
|
||||
}
|
||||
}
|
||||
return 0, errors.New("missing entry")
|
||||
}
|
||||
|
||||
day := date.YearDay()
|
||||
if day >= len(valueForDays) {
|
||||
return 0, errors.New("missing day")
|
||||
}
|
||||
return valueForDays[day], nil
|
||||
}
|
||||
|
||||
// fetchAndCacheRates fetches and in memory cache exchange rates for this and last year
|
||||
func (e *Exchange) FetchAndCacheMissingRates(token tokenType, currency currencyType) error {
|
||||
// Protect REST calls also to prevent fetching the same token/currency twice
|
||||
e.fetchMutex.Lock()
|
||||
defer e.fetchMutex.Unlock()
|
||||
|
||||
// Allocate missing values
|
||||
currencyMap, found := e.cache[token]
|
||||
if !found {
|
||||
currencyMap = make(map[currencyType]map[yearType][]float32)
|
||||
e.cache[token] = currencyMap
|
||||
}
|
||||
|
||||
yearsMap, found := currencyMap[currency]
|
||||
if !found {
|
||||
yearsMap = make(map[yearType][]float32)
|
||||
currencyMap[currency] = yearsMap
|
||||
}
|
||||
|
||||
currentTime := time.Now().UTC()
|
||||
endOfPrevYearTime := time.Date(currentTime.Year()-1, 12, 31, 23, 0, 0, 0, time.UTC)
|
||||
|
||||
daysToFetch := extendDaysSliceForYear(yearsMap, endOfPrevYearTime)
|
||||
|
||||
curYearTime := time.Date(currentTime.Year(), currentTime.Month(), currentTime.Day(), 0, 0, 0, 0, time.UTC)
|
||||
daysToFetch += extendDaysSliceForYear(yearsMap, curYearTime)
|
||||
if daysToFetch == 0 {
|
||||
return nil
|
||||
}
|
||||
|
||||
res, err := e.marketManager.FetchHistoricalDailyPrices(token, currency, daysToFetch, false, 1)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
for i := 0; i < len(res); i++ {
|
||||
t := time.Unix(res[i].Timestamp, 0).UTC()
|
||||
yearDayIndex := t.YearDay() - 1
|
||||
yearValues, found := yearsMap[t.Year()]
|
||||
if found && yearDayIndex < len(yearValues) {
|
||||
yearValues[yearDayIndex] = float32(res[i].Value)
|
||||
}
|
||||
}
|
||||
|
||||
// Fetch all time
|
||||
allTime, err := e.marketManager.FetchHistoricalDailyPrices(token, currency, 1, true, 30)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if e.allTimeCache == nil {
|
||||
e.allTimeCache = make(map[tokenType]map[currencyType][]allTimeEntry)
|
||||
}
|
||||
_, found = e.allTimeCache[token]
|
||||
if !found {
|
||||
e.allTimeCache[token] = make(map[currencyType][]allTimeEntry)
|
||||
}
|
||||
|
||||
// No benefit to fetch intermendiate values, overwrite historical
|
||||
e.allTimeCache[token][currency] = make([]allTimeEntry, 0)
|
||||
|
||||
for i := 0; i < len(allTime) && allTime[i].Timestamp < res[0].Timestamp; i++ {
|
||||
if allTime[i].Value > 0 {
|
||||
var endTimestamp int64
|
||||
if i+1 < len(allTime) {
|
||||
endTimestamp = allTime[i+1].Timestamp
|
||||
} else {
|
||||
endTimestamp = res[0].Timestamp
|
||||
}
|
||||
e.allTimeCache[token][currency] = append(e.allTimeCache[token][currency],
|
||||
allTimeEntry{
|
||||
value: float32(allTime[i].Value),
|
||||
startTimestamp: allTime[i].Timestamp,
|
||||
endTimestamp: endTimestamp,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func extendDaysSliceForYear(yearsMap map[yearType][]float32, untilTime time.Time) (daysToFetch int) {
|
||||
year := untilTime.Year()
|
||||
_, found := yearsMap[year]
|
||||
if !found {
|
||||
yearsMap[year] = make([]float32, untilTime.YearDay())
|
||||
return untilTime.YearDay()
|
||||
}
|
||||
|
||||
// Just extend the slice if needed
|
||||
missingDays := untilTime.YearDay() - len(yearsMap[year])
|
||||
yearsMap[year] = append(yearsMap[year], make([]float32, missingDays)...)
|
||||
return missingDays
|
||||
}
|
||||
565
vendor/github.com/status-im/status-go/services/wallet/history/service.go
generated
vendored
Normal file
565
vendor/github.com/status-im/status-go/services/wallet/history/service.go
generated
vendored
Normal file
@@ -0,0 +1,565 @@
|
||||
package history
|
||||
|
||||
import (
|
||||
"context"
|
||||
"database/sql"
|
||||
"errors"
|
||||
"math"
|
||||
"math/big"
|
||||
"reflect"
|
||||
"sort"
|
||||
"time"
|
||||
|
||||
"github.com/ethereum/go-ethereum/common"
|
||||
"github.com/ethereum/go-ethereum/common/hexutil"
|
||||
"github.com/ethereum/go-ethereum/event"
|
||||
"github.com/ethereum/go-ethereum/log"
|
||||
|
||||
statustypes "github.com/status-im/status-go/eth-node/types"
|
||||
"github.com/status-im/status-go/multiaccounts/accounts"
|
||||
"github.com/status-im/status-go/params"
|
||||
statusrpc "github.com/status-im/status-go/rpc"
|
||||
"github.com/status-im/status-go/rpc/chain"
|
||||
"github.com/status-im/status-go/rpc/network"
|
||||
|
||||
"github.com/status-im/status-go/services/wallet/balance"
|
||||
"github.com/status-im/status-go/services/wallet/market"
|
||||
"github.com/status-im/status-go/services/wallet/token"
|
||||
"github.com/status-im/status-go/services/wallet/transfer"
|
||||
"github.com/status-im/status-go/services/wallet/walletevent"
|
||||
)
|
||||
|
||||
const minPointsForGraph = 14 // for minimal time frame - 7 days, twice a day
|
||||
|
||||
// EventBalanceHistoryUpdateStarted and EventBalanceHistoryUpdateDone are used to notify the UI that balance history is being updated
|
||||
const (
|
||||
EventBalanceHistoryUpdateStarted walletevent.EventType = "wallet-balance-history-update-started"
|
||||
EventBalanceHistoryUpdateFinished walletevent.EventType = "wallet-balance-history-update-finished"
|
||||
EventBalanceHistoryUpdateFinishedWithError walletevent.EventType = "wallet-balance-history-update-finished-with-error"
|
||||
)
|
||||
|
||||
type ValuePoint struct {
|
||||
Value float64 `json:"value"`
|
||||
Timestamp uint64 `json:"time"`
|
||||
}
|
||||
|
||||
type Service struct {
|
||||
balance *Balance
|
||||
db *sql.DB
|
||||
accountsDB *accounts.Database
|
||||
eventFeed *event.Feed
|
||||
rpcClient *statusrpc.Client
|
||||
networkManager *network.Manager
|
||||
tokenManager *token.Manager
|
||||
serviceContext context.Context
|
||||
cancelFn context.CancelFunc
|
||||
transferWatcher *Watcher
|
||||
exchange *Exchange
|
||||
balanceCache balance.CacheIface
|
||||
}
|
||||
|
||||
func NewService(db *sql.DB, accountsDB *accounts.Database, eventFeed *event.Feed, rpcClient *statusrpc.Client, tokenManager *token.Manager, marketManager *market.Manager, balanceCache balance.CacheIface) *Service {
|
||||
return &Service{
|
||||
balance: NewBalance(NewBalanceDB(db)),
|
||||
db: db,
|
||||
accountsDB: accountsDB,
|
||||
eventFeed: eventFeed,
|
||||
rpcClient: rpcClient,
|
||||
networkManager: rpcClient.NetworkManager,
|
||||
tokenManager: tokenManager,
|
||||
exchange: NewExchange(marketManager),
|
||||
balanceCache: balanceCache,
|
||||
}
|
||||
}
|
||||
|
||||
func (s *Service) Stop() {
|
||||
if s.cancelFn != nil {
|
||||
s.cancelFn()
|
||||
}
|
||||
|
||||
s.stopTransfersWatcher()
|
||||
}
|
||||
|
||||
func (s *Service) triggerEvent(eventType walletevent.EventType, account statustypes.Address, message string) {
|
||||
s.eventFeed.Send(walletevent.Event{
|
||||
Type: eventType,
|
||||
Accounts: []common.Address{
|
||||
common.Address(account),
|
||||
},
|
||||
Message: message,
|
||||
})
|
||||
}
|
||||
|
||||
func (s *Service) Start() {
|
||||
log.Debug("Starting balance history service")
|
||||
|
||||
s.startTransfersWatcher()
|
||||
|
||||
go func() {
|
||||
s.serviceContext, s.cancelFn = context.WithCancel(context.Background())
|
||||
|
||||
err := s.updateBalanceHistory(s.serviceContext)
|
||||
if s.serviceContext.Err() != nil {
|
||||
s.triggerEvent(EventBalanceHistoryUpdateFinished, statustypes.Address{}, "Service canceled")
|
||||
}
|
||||
if err != nil {
|
||||
s.triggerEvent(EventBalanceHistoryUpdateFinishedWithError, statustypes.Address{}, err.Error())
|
||||
}
|
||||
}()
|
||||
}
|
||||
|
||||
func (s *Service) mergeChainsBalances(chainIDs []uint64, addresses []common.Address, tokenSymbol string, fromTimestamp uint64, data map[uint64][]*entry) ([]*DataPoint, error) {
|
||||
log.Debug("Merging balances", "address", addresses, "tokenSymbol", tokenSymbol, "fromTimestamp", fromTimestamp, "len(data)", len(data))
|
||||
|
||||
toTimestamp := uint64(time.Now().UTC().Unix())
|
||||
allData := make([]*entry, 0)
|
||||
|
||||
// Add edge points per chain
|
||||
// Iterate over chainIDs param, not data keys, because data may not contain all the chains, but we need edge points for all of them
|
||||
for _, chainID := range chainIDs {
|
||||
// edge points are needed to properly calculate total balance, as they contain the balance for the first and last timestamp
|
||||
chainData, err := s.balance.addEdgePoints(chainID, tokenSymbol, addresses, fromTimestamp, toTimestamp, data[chainID])
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
allData = append(allData, chainData...)
|
||||
}
|
||||
|
||||
// Sort by timestamp
|
||||
sort.Slice(allData, func(i, j int) bool {
|
||||
return allData[i].timestamp < allData[j].timestamp
|
||||
})
|
||||
|
||||
log.Debug("Sorted balances", "len", len(allData))
|
||||
for _, entry := range allData {
|
||||
log.Debug("Sorted balances", "entry", entry)
|
||||
}
|
||||
|
||||
// Add padding points to make chart look nice
|
||||
if len(allData) < minPointsForGraph {
|
||||
allData, _ = addPaddingPoints(tokenSymbol, addresses, toTimestamp, allData, minPointsForGraph)
|
||||
}
|
||||
|
||||
return entriesToDataPoints(allData)
|
||||
}
|
||||
|
||||
// Expects sorted data
|
||||
func entriesToDataPoints(data []*entry) ([]*DataPoint, error) {
|
||||
var resSlice []*DataPoint
|
||||
var groupedEntries []*entry // Entries with the same timestamp
|
||||
|
||||
type AddressKey struct {
|
||||
Address common.Address
|
||||
ChainID uint64
|
||||
}
|
||||
|
||||
sumBalances := func(balanceMap map[AddressKey]*big.Int) *big.Int {
|
||||
// Sum balances of all accounts and chains in current timestamp
|
||||
sum := big.NewInt(0)
|
||||
for _, balance := range balanceMap {
|
||||
sum.Add(sum, balance)
|
||||
}
|
||||
return sum
|
||||
}
|
||||
|
||||
updateBalanceMap := func(balanceMap map[AddressKey]*big.Int, entries []*entry) map[AddressKey]*big.Int {
|
||||
// Update balance map for this timestamp
|
||||
for _, entry := range entries {
|
||||
if entry.chainID == 0 {
|
||||
continue
|
||||
}
|
||||
key := AddressKey{
|
||||
Address: entry.address,
|
||||
ChainID: entry.chainID,
|
||||
}
|
||||
balanceMap[key] = entry.balance
|
||||
}
|
||||
return balanceMap
|
||||
}
|
||||
|
||||
// Balance map always contains current balance for each address in specific timestamp
|
||||
// It is required to sum up balances from previous timestamp from accounts not present in current timestamp
|
||||
balanceMap := make(map[AddressKey]*big.Int)
|
||||
|
||||
for _, entry := range data {
|
||||
if len(groupedEntries) > 0 {
|
||||
if entry.timestamp == groupedEntries[0].timestamp {
|
||||
groupedEntries = append(groupedEntries, entry)
|
||||
continue
|
||||
} else {
|
||||
// Split grouped entries into addresses
|
||||
balanceMap = updateBalanceMap(balanceMap, groupedEntries)
|
||||
// Calculate balance for all the addresses
|
||||
cumulativeBalance := sumBalances(balanceMap)
|
||||
// Points in slice contain balances for all chains
|
||||
resSlice = appendPointToSlice(resSlice, &DataPoint{
|
||||
Timestamp: uint64(groupedEntries[0].timestamp),
|
||||
Balance: (*hexutil.Big)(cumulativeBalance),
|
||||
})
|
||||
|
||||
// Reset grouped entries
|
||||
groupedEntries = nil
|
||||
groupedEntries = append(groupedEntries, entry)
|
||||
}
|
||||
} else {
|
||||
groupedEntries = append(groupedEntries, entry)
|
||||
}
|
||||
}
|
||||
|
||||
// If only edge points are present, groupedEntries will be non-empty
|
||||
if len(groupedEntries) > 0 {
|
||||
// Split grouped entries into addresses
|
||||
balanceMap = updateBalanceMap(balanceMap, groupedEntries)
|
||||
// Calculate balance for all the addresses
|
||||
cumulativeBalance := sumBalances(balanceMap)
|
||||
resSlice = appendPointToSlice(resSlice, &DataPoint{
|
||||
Timestamp: uint64(groupedEntries[0].timestamp),
|
||||
Balance: (*hexutil.Big)(cumulativeBalance),
|
||||
})
|
||||
}
|
||||
|
||||
return resSlice, nil
|
||||
}
|
||||
|
||||
func appendPointToSlice(slice []*DataPoint, point *DataPoint) []*DataPoint {
|
||||
// Replace the last point in slice if it has the same timestamp or add a new one if different
|
||||
if len(slice) > 0 {
|
||||
if slice[len(slice)-1].Timestamp != point.Timestamp {
|
||||
// Timestamps are different, appending to slice
|
||||
slice = append(slice, point)
|
||||
} else {
|
||||
// Replace last item in slice because timestamps are the same
|
||||
slice[len(slice)-1] = point
|
||||
}
|
||||
} else {
|
||||
slice = append(slice, point)
|
||||
}
|
||||
|
||||
return slice
|
||||
}
|
||||
|
||||
// GetBalanceHistory returns token count balance
|
||||
func (s *Service) GetBalanceHistory(ctx context.Context, chainIDs []uint64, addresses []common.Address, tokenSymbol string, currencySymbol string, fromTimestamp uint64) ([]*ValuePoint, error) {
|
||||
log.Debug("GetBalanceHistory", "chainIDs", chainIDs, "address", addresses, "tokenSymbol", tokenSymbol, "currencySymbol", currencySymbol, "fromTimestamp", fromTimestamp)
|
||||
|
||||
chainDataMap := make(map[uint64][]*entry)
|
||||
for _, chainID := range chainIDs {
|
||||
chainData, err := s.balance.get(ctx, chainID, tokenSymbol, addresses, fromTimestamp) // TODO Make chainID a slice?
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if len(chainData) == 0 {
|
||||
continue
|
||||
}
|
||||
|
||||
chainDataMap[chainID] = chainData
|
||||
}
|
||||
|
||||
// Need to get balance for all the chains for the first timestamp, otherwise total values will be incorrect
|
||||
data, err := s.mergeChainsBalances(chainIDs, addresses, tokenSymbol, fromTimestamp, chainDataMap)
|
||||
|
||||
if err != nil {
|
||||
return nil, err
|
||||
} else if len(data) == 0 {
|
||||
return make([]*ValuePoint, 0), nil
|
||||
}
|
||||
|
||||
return s.dataPointsToValuePoints(chainIDs, tokenSymbol, currencySymbol, data)
|
||||
}
|
||||
|
||||
func (s *Service) dataPointsToValuePoints(chainIDs []uint64, tokenSymbol string, currencySymbol string, data []*DataPoint) ([]*ValuePoint, error) {
|
||||
if len(data) == 0 {
|
||||
return make([]*ValuePoint, 0), nil
|
||||
}
|
||||
|
||||
// Check if historical exchange rate for data point is present and fetch remaining if not
|
||||
lastDayTime := time.Unix(int64(data[len(data)-1].Timestamp), 0).UTC()
|
||||
currentTime := time.Now().UTC()
|
||||
currentDayStart := time.Date(currentTime.Year(), currentTime.Month(), currentTime.Day(), 0, 0, 0, 0, time.UTC)
|
||||
if lastDayTime.After(currentDayStart) {
|
||||
// No chance to have today, use the previous day value for the last data point
|
||||
lastDayTime = lastDayTime.AddDate(0, 0, -1)
|
||||
}
|
||||
|
||||
lastDayValue, err := s.exchange.GetExchangeRateForDay(tokenSymbol, currencySymbol, lastDayTime)
|
||||
if err != nil {
|
||||
err := s.exchange.FetchAndCacheMissingRates(tokenSymbol, currencySymbol)
|
||||
if err != nil {
|
||||
log.Error("Error fetching exchange rates", "tokenSymbol", tokenSymbol, "currencySymbol", currencySymbol, "err", err)
|
||||
return nil, err
|
||||
}
|
||||
|
||||
lastDayValue, err = s.exchange.GetExchangeRateForDay(tokenSymbol, currencySymbol, lastDayTime)
|
||||
if err != nil {
|
||||
log.Error("Exchange rate missing for", "tokenSymbol", tokenSymbol, "currencySymbol", currencySymbol, "lastDayTime", lastDayTime, "err", err)
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
|
||||
decimals, err := s.decimalsForToken(tokenSymbol, chainIDs[0])
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
weisInOneMain := big.NewFloat(math.Pow(10, float64(decimals)))
|
||||
|
||||
var res []*ValuePoint
|
||||
for _, d := range data {
|
||||
var dayValue float32
|
||||
dayTime := time.Unix(int64(d.Timestamp), 0).UTC()
|
||||
if dayTime.After(currentDayStart) {
|
||||
// No chance to have today, use the previous day value for the last data point
|
||||
if lastDayValue > 0 {
|
||||
dayValue = lastDayValue
|
||||
} else {
|
||||
log.Warn("Exchange rate missing for", "dayTime", dayTime, "err", err)
|
||||
continue
|
||||
}
|
||||
} else {
|
||||
dayValue, err = s.exchange.GetExchangeRateForDay(tokenSymbol, currencySymbol, dayTime)
|
||||
if err != nil {
|
||||
log.Warn("Exchange rate missing for", "dayTime", dayTime, "err", err)
|
||||
continue
|
||||
}
|
||||
}
|
||||
|
||||
// The big.Int values are discarded, hence copy the original values
|
||||
res = append(res, &ValuePoint{
|
||||
Timestamp: d.Timestamp,
|
||||
Value: tokenToValue((*big.Int)(d.Balance), dayValue, weisInOneMain),
|
||||
})
|
||||
}
|
||||
|
||||
return res, nil
|
||||
}
|
||||
|
||||
func (s *Service) decimalsForToken(tokenSymbol string, chainID uint64) (int, error) {
|
||||
network := s.networkManager.Find(chainID)
|
||||
if network == nil {
|
||||
return 0, errors.New("network not found")
|
||||
}
|
||||
token := s.tokenManager.FindToken(network, tokenSymbol)
|
||||
if token == nil {
|
||||
return 0, errors.New("token not found")
|
||||
}
|
||||
return int(token.Decimals), nil
|
||||
}
|
||||
|
||||
func tokenToValue(tokenCount *big.Int, mainDenominationValue float32, weisInOneMain *big.Float) float64 {
|
||||
weis := new(big.Float).SetInt(tokenCount)
|
||||
mainTokens := new(big.Float).Quo(weis, weisInOneMain)
|
||||
mainTokenValue := new(big.Float).SetFloat64(float64(mainDenominationValue))
|
||||
res, accuracy := new(big.Float).Mul(mainTokens, mainTokenValue).Float64()
|
||||
if res == 0 && accuracy == big.Below {
|
||||
return math.SmallestNonzeroFloat64
|
||||
} else if res == math.Inf(1) && accuracy == big.Above {
|
||||
return math.Inf(1)
|
||||
}
|
||||
|
||||
return res
|
||||
}
|
||||
|
||||
// updateBalanceHistory iterates over all networks depending on test/prod for the s.visibleTokenSymbol
|
||||
// and updates the balance history for the given address
|
||||
//
|
||||
// expects ctx to have cancellation support and processing to be cancelled by the caller
|
||||
func (s *Service) updateBalanceHistory(ctx context.Context) error {
|
||||
log.Debug("updateBalanceHistory started")
|
||||
|
||||
addresses, err := s.accountsDB.GetWalletAddresses()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
areTestNetworksEnabled, err := s.accountsDB.GetTestNetworksEnabled()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
onlyEnabledNetworks := false
|
||||
networks, err := s.networkManager.Get(onlyEnabledNetworks)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
for _, address := range addresses {
|
||||
s.triggerEvent(EventBalanceHistoryUpdateStarted, address, "")
|
||||
|
||||
for _, network := range networks {
|
||||
if network.IsTest != areTestNetworksEnabled {
|
||||
continue
|
||||
}
|
||||
|
||||
entries, err := s.balance.db.getEntriesWithoutBalances(network.ChainID, common.Address(address))
|
||||
if err != nil {
|
||||
log.Error("Error getting blocks without balances", "chainID", network.ChainID, "address", address.String(), "err", err)
|
||||
return err
|
||||
}
|
||||
|
||||
log.Debug("Blocks without balances", "chainID", network.ChainID, "address", address.String(), "entries", entries)
|
||||
|
||||
client, err := s.rpcClient.EthClient(network.ChainID)
|
||||
if err != nil {
|
||||
log.Error("Error getting client", "chainID", network.ChainID, "address", address.String(), "err", err)
|
||||
return err
|
||||
}
|
||||
|
||||
err = s.addEntriesToDB(ctx, client, network, address, entries)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
s.triggerEvent(EventBalanceHistoryUpdateFinished, address, "")
|
||||
}
|
||||
|
||||
log.Debug("updateBalanceHistory finished")
|
||||
return nil
|
||||
}
|
||||
|
||||
func (s *Service) addEntriesToDB(ctx context.Context, client chain.ClientInterface, network *params.Network, address statustypes.Address, entries []*entry) (err error) {
|
||||
for _, entry := range entries {
|
||||
var balance *big.Int
|
||||
// tokenAddess is zero for native currency
|
||||
if (entry.tokenAddress == common.Address{}) {
|
||||
// Check in cache
|
||||
balance = s.balanceCache.GetBalance(common.Address(address), network.ChainID, entry.block)
|
||||
log.Debug("Balance from cache", "chainID", network.ChainID, "address", address.String(), "block", entry.block, "balance", balance)
|
||||
|
||||
if balance == nil {
|
||||
balance, err = client.BalanceAt(ctx, common.Address(address), entry.block)
|
||||
if balance == nil {
|
||||
log.Error("Error getting balance", "chainID", network.ChainID, "address", address.String(), "err", err, "unwrapped", errors.Unwrap(err))
|
||||
return err
|
||||
}
|
||||
time.Sleep(50 * time.Millisecond) // TODO Remove this sleep after fixing exceeding rate limit
|
||||
}
|
||||
entry.tokenSymbol = network.NativeCurrencySymbol
|
||||
} else {
|
||||
// Check token first if it is supported
|
||||
token := s.tokenManager.FindTokenByAddress(network.ChainID, entry.tokenAddress)
|
||||
if token == nil {
|
||||
log.Warn("Token not found", "chainID", network.ChainID, "address", address.String(), "tokenAddress", entry.tokenAddress.String())
|
||||
// TODO Add "supported=false" flag to such tokens to avoid checking them again and again
|
||||
continue // Skip token that we don't have symbol for. For example we don't have tokens in store for goerli optimism
|
||||
} else {
|
||||
entry.tokenSymbol = token.Symbol
|
||||
}
|
||||
|
||||
// Check balance for token
|
||||
balance, err = s.tokenManager.GetTokenBalanceAt(ctx, client, common.Address(address), entry.tokenAddress, entry.block)
|
||||
log.Debug("Balance from token manager", "chainID", network.ChainID, "address", address.String(), "block", entry.block, "balance", balance)
|
||||
|
||||
if err != nil {
|
||||
log.Error("Error getting token balance", "chainID", network.ChainID, "address", address.String(), "tokenAddress", entry.tokenAddress.String(), "err", err)
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
entry.balance = balance
|
||||
err = s.balance.db.add(entry)
|
||||
if err != nil {
|
||||
log.Error("Error adding balance", "chainID", network.ChainID, "address", address.String(), "err", err)
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (s *Service) startTransfersWatcher() {
|
||||
if s.transferWatcher != nil {
|
||||
return
|
||||
}
|
||||
|
||||
transferLoadedCb := func(chainID uint64, addresses []common.Address, block *big.Int) {
|
||||
log.Debug("Balance history watcher: transfer loaded:", "chainID", chainID, "addresses", addresses, "block", block.Uint64())
|
||||
|
||||
client, err := s.rpcClient.EthClient(chainID)
|
||||
if err != nil {
|
||||
log.Error("Error getting client", "chainID", chainID, "err", err)
|
||||
return
|
||||
}
|
||||
|
||||
transferDB := transfer.NewDB(s.db)
|
||||
|
||||
for _, address := range addresses {
|
||||
network := s.networkManager.Find(chainID)
|
||||
|
||||
transfers, err := transferDB.GetTransfersByAddressAndBlock(chainID, address, block, 1500) // 1500 is quite arbitrary and far from real, but should be enough to cover all transfers in a block
|
||||
if err != nil {
|
||||
log.Error("Error getting transfers", "chainID", chainID, "address", address.String(), "err", err)
|
||||
continue
|
||||
}
|
||||
|
||||
if len(transfers) == 0 {
|
||||
log.Debug("No transfers found", "chainID", chainID, "address", address.String(), "block", block.Uint64())
|
||||
continue
|
||||
}
|
||||
|
||||
entries := transfersToEntries(address, block, transfers) // TODO Remove address and block after testing that they match
|
||||
unique := removeDuplicates(entries)
|
||||
log.Debug("Entries after filtering", "entries", entries, "unique", unique)
|
||||
|
||||
err = s.addEntriesToDB(s.serviceContext, client, network, statustypes.Address(address), unique)
|
||||
if err != nil {
|
||||
log.Error("Error adding entries to DB", "chainID", chainID, "address", address.String(), "err", err)
|
||||
continue
|
||||
}
|
||||
|
||||
// No event triggering here, because noone cares about balance history updates yet
|
||||
}
|
||||
}
|
||||
|
||||
s.transferWatcher = NewWatcher(s.eventFeed, transferLoadedCb)
|
||||
s.transferWatcher.Start()
|
||||
}
|
||||
|
||||
func removeDuplicates(entries []*entry) []*entry {
|
||||
unique := make([]*entry, 0, len(entries))
|
||||
for _, entry := range entries {
|
||||
found := false
|
||||
for _, u := range unique {
|
||||
if reflect.DeepEqual(entry, u) {
|
||||
found = true
|
||||
break
|
||||
}
|
||||
}
|
||||
if !found {
|
||||
unique = append(unique, entry)
|
||||
}
|
||||
}
|
||||
|
||||
return unique
|
||||
}
|
||||
|
||||
func transfersToEntries(address common.Address, block *big.Int, transfers []transfer.Transfer) []*entry {
|
||||
entries := make([]*entry, 0)
|
||||
|
||||
for _, transfer := range transfers {
|
||||
if transfer.Address != address {
|
||||
panic("Address mismatch") // coding error
|
||||
}
|
||||
|
||||
if transfer.BlockNumber.Cmp(block) != 0 {
|
||||
panic("Block number mismatch") // coding error
|
||||
}
|
||||
entry := &entry{
|
||||
chainID: transfer.NetworkID,
|
||||
address: transfer.Address,
|
||||
tokenAddress: transfer.Receipt.ContractAddress,
|
||||
block: transfer.BlockNumber,
|
||||
timestamp: (int64)(transfer.Timestamp),
|
||||
}
|
||||
|
||||
entries = append(entries, entry)
|
||||
}
|
||||
|
||||
return entries
|
||||
}
|
||||
|
||||
func (s *Service) stopTransfersWatcher() {
|
||||
if s.transferWatcher != nil {
|
||||
s.transferWatcher.Stop()
|
||||
s.transferWatcher = nil
|
||||
}
|
||||
}
|
||||
75
vendor/github.com/status-im/status-go/services/wallet/history/transfers_watcher.go
generated
vendored
Normal file
75
vendor/github.com/status-im/status-go/services/wallet/history/transfers_watcher.go
generated
vendored
Normal file
@@ -0,0 +1,75 @@
|
||||
package history
|
||||
|
||||
import (
|
||||
"context"
|
||||
"math/big"
|
||||
|
||||
"github.com/ethereum/go-ethereum/common"
|
||||
"github.com/ethereum/go-ethereum/event"
|
||||
"github.com/ethereum/go-ethereum/log"
|
||||
"github.com/status-im/status-go/services/wallet/async"
|
||||
"github.com/status-im/status-go/services/wallet/transfer"
|
||||
"github.com/status-im/status-go/services/wallet/walletevent"
|
||||
)
|
||||
|
||||
type TransfersLoadedCb func(chainID uint64, addresses []common.Address, block *big.Int)
|
||||
|
||||
// Watcher executes a given callback whenever an account gets added/removed
|
||||
type Watcher struct {
|
||||
feed *event.Feed
|
||||
group *async.Group
|
||||
callback TransfersLoadedCb
|
||||
}
|
||||
|
||||
func NewWatcher(feed *event.Feed, callback TransfersLoadedCb) *Watcher {
|
||||
return &Watcher{
|
||||
feed: feed,
|
||||
callback: callback,
|
||||
}
|
||||
}
|
||||
|
||||
func (w *Watcher) Start() {
|
||||
if w.group != nil {
|
||||
return
|
||||
}
|
||||
|
||||
w.group = async.NewGroup(context.Background())
|
||||
w.group.Add(func(ctx context.Context) error {
|
||||
return watch(ctx, w.feed, w.callback)
|
||||
})
|
||||
}
|
||||
|
||||
func (w *Watcher) Stop() {
|
||||
if w.group != nil {
|
||||
w.group.Stop()
|
||||
w.group.Wait()
|
||||
w.group = nil
|
||||
}
|
||||
}
|
||||
|
||||
func onTransfersLoaded(callback TransfersLoadedCb, chainID uint64, addresses []common.Address, blockNum *big.Int) {
|
||||
if callback != nil {
|
||||
callback(chainID, addresses, blockNum)
|
||||
}
|
||||
}
|
||||
|
||||
func watch(ctx context.Context, feed *event.Feed, callback TransfersLoadedCb) error {
|
||||
ch := make(chan walletevent.Event, 100)
|
||||
sub := feed.Subscribe(ch)
|
||||
defer sub.Unsubscribe()
|
||||
|
||||
for {
|
||||
select {
|
||||
case <-ctx.Done():
|
||||
return nil
|
||||
case err := <-sub.Err():
|
||||
if err != nil {
|
||||
log.Error("history: transfers watcher subscription failed", "error", err)
|
||||
}
|
||||
case ev := <-ch:
|
||||
if ev.Type == transfer.EventNewTransfers {
|
||||
onTransfersLoaded(callback, ev.ChainID, ev.Accounts, ev.BlockNumber)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
Reference in New Issue
Block a user