Skip to content

Commit

Permalink
HIP-584 Historical: Adapt DB Accessors to work with timestamp filters (
Browse files Browse the repository at this point in the history
…hashgraph#7242)

Adapt DB Accessors to work with timestamp filters by using the ContractCallContext's record file timestamp if it is set. If not - use the non-historical queries.

---------

Signed-off-by: Bilyana Gospodinova <[email protected]>
Signed-off-by: Ivan Ivanov <[email protected]>
Co-authored-by: Bilyana Gospodinova <[email protected]>
  • Loading branch information
0xivanov and bilyana-gospodinova authored Dec 13, 2023
1 parent ad75a33 commit a1b9c41
Show file tree
Hide file tree
Showing 37 changed files with 888 additions and 221 deletions.
2 changes: 1 addition & 1 deletion hedera-mirror-test/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -39,7 +39,7 @@ uses [Spring Boot](https://spring.io/projects/spring-boot) properties to configu
include:

| Name | Default | Description |
|----------------------------------------------------------------|----------------------------------------------|-----------------------------------------------------------------------------------------------------------|
| -------------------------------------------------------------- | -------------------------------------------- | --------------------------------------------------------------------------------------------------------- |
| `hedera.mirror.test.acceptance.backOffPeriod` | 5s | The amount of time the client will wait before retrying a retryable failure. |
| `hedera.mirror.test.acceptance.createOperatorAccount` | true | Whether to create a separate operator account to run the acceptance tests. |
| `hedera.mirror.test.acceptance.emitBackgroundMessages` | false | Whether background topic messages should be emitted. |
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -35,5 +35,4 @@
backoff = @Backoff(delayExpression = "#{@restPollingProperties.minBackoff.toMillis()}"),
maxAttemptsExpression = "#{@restPollingProperties.maxAttempts}")
@Target({ElementType.METHOD, ElementType.TYPE})
public @interface RetryAsserts {
}
public @interface RetryAsserts {}
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,7 @@
import com.hedera.mirror.web3.evm.store.CachingStateFrame;
import com.hedera.mirror.web3.evm.store.StackedStateFrames;
import java.util.EmptyStackException;
import java.util.Optional;
import lombok.Getter;
import lombok.Setter;

Expand All @@ -30,9 +31,7 @@ public class ContractCallContext implements AutoCloseable {
private static final ThreadLocal<ContractCallContext> THREAD_LOCAL = ThreadLocal.withInitial(() -> null);

/**
* Long value which stores the block timestamp used for filtering of historical data.
* A value of UNSET_TIMESTAMP indicates that the timestamp is unset or disabled for filtering.
* Any value other than UNSET_TIMESTAMP that is a valid timestamp should be considered for filtering operations.
* Record file which stores the block timestamp and other historical block details used for filtering of historical data.
*/
@Setter
private RecordFile recordFile;
Expand All @@ -58,11 +57,8 @@ public static ContractCallContext get() {
* (required!) from the RO-cache without touching the database again - if you cut back the stack between executions
* using this method.)
*/
public static ContractCallContext init(final StackedStateFrames stackedStateFrames) {
public static ContractCallContext init() {
var context = new ContractCallContext();
if (stackedStateFrames != null) {
context.stackBase = context.stack = stackedStateFrames.getInitializedStackBase();
}
THREAD_LOCAL.set(context);
return context;
}
Expand Down Expand Up @@ -95,6 +91,22 @@ public void updateStackFromUpstream() {
setStack(stack.getUpstream().orElseThrow(EmptyStackException::new));
}

/**
* Chop the stack back to its base. This keeps the most-upstream-layer which connects to the database, and the
* `ROCachingStateFrame` on top of it. Therefore, everything already read from the database is still present,
* unchanged, in the stacked cache. (Usage case is the multiple calls to `eth_estimateGas` in order to "binary
* search" to the closest gas approximation for a given contract call: The _first_ call is the only one that
* actually hits the database (via the database accessors), all subsequent executions will fetch the same values
* (required!) from the RO-cache without touching the database again - if you cut back the stack between executions
* using this method.)
*/
public void initializeStackFrames(final StackedStateFrames stackedStateFrames) {
if (stackedStateFrames != null) {
final var timestamp = Optional.ofNullable(recordFile).map(RecordFile::getConsensusEnd);
stackBase = stack = stackedStateFrames.getInitializedStackBase(timestamp);
}
}

public boolean useHistorical() {
return recordFile != null;
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,9 @@

package com.hedera.mirror.web3.evm.store;

import com.hedera.mirror.web3.evm.exception.EvmException;
import com.hedera.mirror.web3.evm.store.accessor.DatabaseAccessor;
import java.io.Serial;
import java.util.List;
import java.util.Map;
import java.util.Optional;
Expand All @@ -30,13 +32,17 @@ public class DatabaseBackedStateFrame<K> extends CachingStateFrame<K> {
@NonNull
final Map<Class<?>, DatabaseAccessor<K, ?>> databaseAccessors;

final Optional<Long> timestamp;

public DatabaseBackedStateFrame(
@NonNull final List<DatabaseAccessor<K, ?>> accessors, @NonNull final Class<?>[] valueClasses) {
@NonNull final List<DatabaseAccessor<K, ?>> accessors,
@NonNull final Class<?>[] valueClasses,
final Optional<Long> timestamp) {
super(
Optional.empty(),
valueClasses); // superclass of this frame will create/hold useless UpdatableReferenceCaches

databaseAccessors = accessors.stream().collect(Collectors.toMap(DatabaseAccessor::getValueClass, a -> a));
this.timestamp = timestamp;
}

@Override
Expand All @@ -47,7 +53,7 @@ public Optional<Object> getValue(
if (databaseAccessor == null) {
throw new NullPointerException("no available accessor for given klass");
}
return databaseAccessor.get(key).flatMap(o -> Optional.of(klass.cast(o)));
return databaseAccessor.get(key, timestamp).flatMap(o -> Optional.of(klass.cast(o)));
}

@Override
Expand All @@ -74,4 +80,16 @@ public void updatesFromDownstream(@NonNull final CachingStateFrame<K> childFrame
public void commit() {
throw new UnsupportedOperationException("Cannot commit to a database-backed StateFrame (oddly enough)");
}

/** Signals that a type error occurred with the _value_ type */
@SuppressWarnings("java:S110")
public static class DatabaseAccessIncorrectKeyTypeException extends EvmException {

@Serial
private static final long serialVersionUID = 1163169205069277931L;

public DatabaseAccessIncorrectKeyTypeException(@NonNull final String message) {
super(message);
}
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -61,8 +61,8 @@ public StackedStateFrames(@NonNull final List<DatabaseAccessor<Object, ?>> acces
// so that you can commit specific changes from a nested transaction
}

public CachingStateFrame<Object> getInitializedStackBase() {
final var database = new DatabaseBackedStateFrame<>(accessors, valueClasses);
public CachingStateFrame<Object> getInitializedStackBase(final Optional<Long> timestamp) {
final var database = new DatabaseBackedStateFrame<>(accessors, valueClasses, timestamp);
return new ROCachingStateFrame<>(Optional.of(database), valueClasses);
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,7 @@
import com.hederahashgraph.api.proto.java.AccountID;
import com.hederahashgraph.api.proto.java.TokenID;
import java.util.List;
import java.util.Optional;
import org.hyperledger.besu.datatypes.Address;

/**
Expand Down Expand Up @@ -93,6 +94,8 @@ public interface Store {

boolean exists(Address accountID);

Optional<Long> getHistoricalTimestamp();

enum OnMissing {
THROW,
DONT_THROW
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -40,6 +40,7 @@
import jakarta.inject.Named;
import java.util.HashMap;
import java.util.List;
import java.util.Optional;
import java.util.Set;
import org.hyperledger.besu.datatypes.Address;

Expand Down Expand Up @@ -271,6 +272,22 @@ public boolean exists(final Address address) {
return account.isPresent();
}

@SuppressWarnings("unchecked")
@Override
public Optional<Long> getHistoricalTimestamp() {
return stackedStateFrames
.top()
.upstreamFrame
// flatten the nested Optional<UpstreamFrame>
.flatMap(CachingStateFrame::getUpstream)
// filter out non-DatabaseBackedStateFrame instances
.filter(DatabaseBackedStateFrame.class::isInstance)
// cast the filtered object to DatabaseBackedStateFrame
.map(DatabaseBackedStateFrame.class::cast)
// return the timestamp
.flatMap(databaseBackedStateFrame -> databaseBackedStateFrame.timestamp);
}

/**
* Returns a {@link Token} model with loaded unique tokens
*
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -28,6 +28,7 @@
import com.hedera.mirror.common.domain.entity.Entity;
import com.hedera.mirror.common.domain.entity.EntityId;
import com.hedera.mirror.web3.evm.exception.WrongTypeException;
import com.hedera.mirror.web3.evm.store.DatabaseBackedStateFrame.DatabaseAccessIncorrectKeyTypeException;
import com.hedera.mirror.web3.repository.CryptoAllowanceRepository;
import com.hedera.mirror.web3.repository.NftAllowanceRepository;
import com.hedera.mirror.web3.repository.NftRepository;
Expand All @@ -51,6 +52,7 @@
import java.util.stream.Collectors;
import lombok.NonNull;
import lombok.RequiredArgsConstructor;
import org.hyperledger.besu.datatypes.Address;

@Named
@RequiredArgsConstructor
Expand All @@ -64,21 +66,24 @@ public class AccountDatabaseAccessor extends DatabaseAccessor<Object, Account> {
private final NftAllowanceRepository nftAllowanceRepository;
private final NftRepository nftRepository;
private final TokenAllowanceRepository tokenAllowanceRepository;

private final CryptoAllowanceRepository cryptoAllowanceRepository;
private final TokenAccountRepository tokenAccountRepository;

@Override
public @NonNull Optional<Account> get(@NonNull Object address) {
return entityDatabaseAccessor.get(address).map(this::accountFromEntity);
public @NonNull Optional<Account> get(@NonNull Object key, final Optional<Long> timestamp) {
if (key instanceof Address address) {
return entityDatabaseAccessor.get(address, timestamp).map(entity -> accountFromEntity(entity, timestamp));
}
throw new DatabaseAccessIncorrectKeyTypeException("Accessor for class %s failed to fetch by key of type %s"
.formatted(Account.class.getTypeName(), key.getClass().getTypeName()));
}

private Account accountFromEntity(Entity entity) {
private Account accountFromEntity(Entity entity, final Optional<Long> timestamp) {
if (!ACCOUNT.equals(entity.getType()) && !CONTRACT.equals(entity.getType())) {
throw new WrongTypeException("Trying to map an account/contract from a different type");
}

final var tokenAssociationsCounts = getNumberOfAllAndPositiveBalanceTokenAssociations(entity.getId());
final var tokenAssociationsCounts =
getNumberOfAllAndPositiveBalanceTokenAssociations(entity.getId(), timestamp);
return new Account(
entity.getEvmAddress() != null && entity.getEvmAddress().length > 0
? ByteString.copyFrom(entity.getEvmAddress())
Expand All @@ -88,13 +93,13 @@ private Account accountFromEntity(Entity entity) {
TimeUnit.SECONDS.convert(entity.getEffectiveExpiration(), TimeUnit.NANOSECONDS),
Optional.ofNullable(entity.getBalance()).orElse(0L),
Optional.ofNullable(entity.getDeleted()).orElse(false),
getOwnedNfts(entity.getId()),
getOwnedNfts(entity.getId(), timestamp),
Optional.ofNullable(entity.getAutoRenewPeriod()).orElse(DEFAULT_AUTO_RENEW_PERIOD),
idFromEntityId(entity.getProxyAccountId()),
Optional.ofNullable(entity.getMaxAutomaticTokenAssociations()).orElse(0),
getCryptoAllowances(entity.getId()),
getFungibleTokenAllowances(entity.getId()),
getApproveForAllNfts(entity.getId()),
getCryptoAllowances(entity.getId(), timestamp),
getFungibleTokenAllowances(entity.getId(), timestamp),
getApproveForAllNfts(entity.getId(), timestamp),
tokenAssociationsCounts.all(),
tokenAssociationsCounts.positive(),
0,
Expand All @@ -106,21 +111,30 @@ private Account accountFromEntity(Entity entity) {
: 0L);
}

private long getOwnedNfts(Long accountId) {
return nftRepository.countByAccountIdNotDeleted(accountId);
private long getOwnedNfts(Long accountId, final Optional<Long> timestamp) {
return timestamp
.map(t -> nftRepository.countByAccountIdAndTimestampNotDeleted(accountId, t))
.orElseGet(() -> nftRepository.countByAccountIdNotDeleted(accountId));
}

private SortedMap<EntityNum, Long> getCryptoAllowances(Long ownerId) {
return cryptoAllowanceRepository.findByOwner(ownerId).stream()
private SortedMap<EntityNum, Long> getCryptoAllowances(Long ownerId, final Optional<Long> timestamp) {
return timestamp
.map(t -> cryptoAllowanceRepository.findByOwnerAndTimestamp(ownerId, t))
.orElseGet(() -> cryptoAllowanceRepository.findByOwner(ownerId))
.stream()
.collect(Collectors.toMap(
cryptoAllowance -> entityNumFromId(EntityId.of(cryptoAllowance.getSpender())),
CryptoAllowance::getAmount,
NO_DUPLICATE_MERGE_FUNCTION,
TreeMap::new));
}

private SortedMap<FcTokenAllowanceId, Long> getFungibleTokenAllowances(Long ownerId) {
return tokenAllowanceRepository.findByOwner(ownerId).stream()
private SortedMap<FcTokenAllowanceId, Long> getFungibleTokenAllowances(
Long ownerId, final Optional<Long> timestamp) {
return timestamp
.map(t -> tokenAllowanceRepository.findByOwnerAndTimestamp(ownerId, t))
.orElseGet(() -> tokenAllowanceRepository.findByOwner(ownerId))
.stream()
.collect(Collectors.toMap(
tokenAllowance -> new FcTokenAllowanceId(
entityNumFromId(EntityId.of(tokenAllowance.getTokenId())),
Expand All @@ -130,8 +144,11 @@ private SortedMap<FcTokenAllowanceId, Long> getFungibleTokenAllowances(Long owne
TreeMap::new));
}

private SortedSet<FcTokenAllowanceId> getApproveForAllNfts(Long ownerId) {
return nftAllowanceRepository.findByOwnerAndApprovedForAllIsTrue(ownerId).stream()
private SortedSet<FcTokenAllowanceId> getApproveForAllNfts(Long ownerId, final Optional<Long> timestamp) {
return timestamp
.map(t -> nftAllowanceRepository.findByOwnerAndTimestampAndApprovedForAllIsTrue(ownerId, t))
.orElseGet(() -> nftAllowanceRepository.findByOwnerAndApprovedForAllIsTrue(ownerId))
.stream()
.map(nftAllowance -> new FcTokenAllowanceId(
entityNumFromId(EntityId.of(nftAllowance.getTokenId())),
entityNumFromId(EntityId.of(nftAllowance.getSpender()))))
Expand All @@ -142,8 +159,13 @@ private EntityNum entityNumFromId(EntityId entityId) {
return EntityNum.fromLong(entityId.getNum());
}

private TokenAccountBalances getNumberOfAllAndPositiveBalanceTokenAssociations(long accountId) {
final var counts = tokenAccountRepository.countByAccountIdAndAssociatedGroupedByBalanceIsPositive(accountId);
private TokenAccountBalances getNumberOfAllAndPositiveBalanceTokenAssociations(
long accountId, final Optional<Long> timestamp) {
var counts = timestamp
.map(t -> tokenAccountRepository.countByAccountIdAndTimestampAndAssociatedGroupedByBalanceIsPositive(
accountId, t))
.orElseGet(() ->
tokenAccountRepository.countByAccountIdAndAssociatedGroupedByBalanceIsPositive(accountId));
int all = 0;
int positive = 0;

Expand All @@ -165,6 +187,5 @@ private JKey parseJkey(byte[] keyBytes) {
}
}

private record TokenAccountBalances(int all, int positive) {
}
private record TokenAccountBalances(int all, int positive) {}
}
Loading

0 comments on commit a1b9c41

Please sign in to comment.