Skip to content

Commit

Permalink
Merge branch 'main' into add_type_annotation
Browse files Browse the repository at this point in the history
  • Loading branch information
areshand authored Dec 24, 2024
2 parents 8f3f680 + 6b09e63 commit ea0a381
Show file tree
Hide file tree
Showing 136 changed files with 4,539 additions and 3,198 deletions.
17 changes: 5 additions & 12 deletions .github/workflows/forge-stable.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -49,10 +49,8 @@ on:
description: The number of test jobs to run in parallel. If not specified, defaults to 1
default: 1

# NOTE: to support testing different branches on different schedules, you need to specify the cron schedule in the 'determine-test-branch' step as well below
# Reference: https://docs.github.com/en/actions/using-workflows/events-that-trigger-workflows#schedule
schedule:
- cron: "0 22 * * 0,2,4" # The main branch cadence. This runs every Sun,Tues,Thurs
- cron: "0 22 * * 1-5" # The main branch cadence. This runs every Mon-Fri
pull_request:
paths:
- ".github/workflows/forge-stable.yaml"
Expand Down Expand Up @@ -147,24 +145,19 @@ jobs:
id: determine-test-branch
# NOTE: the schedule cron MUST match the one in the 'on.schedule.cron' section above
run: |
BRANCH=""
if [[ "${{ github.event_name }}" == "schedule" ]]; then
if [[ "${{ github.event.schedule }}" == "0 22 * * 0,2,4" ]]; then
echo "Branch: main"
echo "BRANCH=main" >> $GITHUB_OUTPUT
else
echo "Unknown schedule: ${{ github.event.schedule }}"
exit 1
fi
echo "BRANCH=main" >> $GITHUB_OUTPUT
elif [[ "${{ github.event_name }}" == "push" ]]; then
echo "Branch: ${{ github.ref_name }}"
echo "BRANCH=${{ github.ref_name }}" >> $GITHUB_OUTPUT
echo "BRANCH=${{ github.ref_name }}" >> $GITHUB_OUTPUT
# on workflow_dispatch, this will simply use the inputs.GIT_SHA given (or the default)
elif [[ -n "${{ inputs.GIT_SHA }}" ]]; then
echo "BRANCH=${{ inputs.GIT_SHA }}" >> $GITHUB_OUTPUT
# if GIT_SHA not provided, use the branch where workflow runs on
else
echo "BRANCH=${{ github.head_ref }}" >> $GITHUB_OUTPUT
fi
echo "Branch: $(grep BRANCH= $GITHUB_OUTPUT)"
# Use the branch hash instead of the full branch name to stay under kubernetes namespace length limit
- name: Hash the branch
id: hash-branch
Expand Down
42 changes: 21 additions & 21 deletions Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

1 change: 1 addition & 0 deletions Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -875,6 +875,7 @@ move-prover = { path = "third_party/move/move-prover" }
move-prover-boogie-backend = { path = "third_party/move/move-prover/boogie-backend" }
move-prover-bytecode-pipeline = { path = "third_party/move/move-prover/bytecode-pipeline" }
move-prover-test-utils = { path = "third_party/move/move-prover/test-utils" }
move-prover-lab = { path = "third_party/move/move-prover/lab" }
aptos-move-stdlib = { path = "aptos-move/framework/move-stdlib" }
aptos-table-natives = { path = "aptos-move/framework/table-natives" }
move-resource-viewer = { path = "third_party/move/tools/move-resource-viewer" }
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -212,7 +212,7 @@ crate::gas_schedule::macros::define_gas_parameters!(
[
max_execution_gas_gov: InternalGas,
{ RELEASE_V1_13.. => "max_execution_gas.gov" },
4_300_000_000,
4_000_000_000,
],
[
max_io_gas: InternalGas,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -15,17 +15,20 @@ use move_binary_format::{
CompiledModule,
};
use move_core_types::{
account_address::AccountAddress, identifier::IdentStr, language_storage::ModuleId,
account_address::AccountAddress,
identifier::IdentStr,
language_storage::{ModuleId, TypeTag},
metadata::Metadata,
};
use move_vm_runtime::{
ambassador_impl_CodeStorage, ambassador_impl_ModuleStorage,
ambassador_impl_WithRuntimeEnvironment, AsUnsyncCodeStorage, BorrowedOrOwned, CodeStorage,
Module, ModuleStorage, RuntimeEnvironment, Script, UnsyncCodeStorage, UnsyncModuleStorage,
WithRuntimeEnvironment,
Function, Module, ModuleStorage, RuntimeEnvironment, Script, UnsyncCodeStorage,
UnsyncModuleStorage, WithRuntimeEnvironment,
};
use move_vm_types::{
code::{ModuleBytesStorage, ModuleCode},
loaded_data::runtime_types::{StructType, Type},
module_storage_error,
};
use std::{ops::Deref, sync::Arc};
Expand Down
18 changes: 6 additions & 12 deletions aptos-move/aptos-vm-types/src/resolver.rs
Original file line number Diff line number Diff line change
Expand Up @@ -203,33 +203,27 @@ pub trait StateStorageView {
/// TODO: audit and reconsider the default implementation (e.g. should not
/// resolve AggregatorV2 via the state-view based default implementation, as it
/// doesn't provide a value exchange functionality).
pub trait TExecutorView<K, T, L, I, V>:
pub trait TExecutorView<K, T, L, V>:
TResourceView<Key = K, Layout = L>
+ TModuleView<Key = K>
+ TAggregatorV1View<Identifier = K>
+ TDelayedFieldView<Identifier = I, ResourceKey = K, ResourceGroupTag = T>
+ TDelayedFieldView<Identifier = DelayedFieldID, ResourceKey = K, ResourceGroupTag = T>
+ StateStorageView<Key = K>
{
}

impl<A, K, T, L, I, V> TExecutorView<K, T, L, I, V> for A where
impl<A, K, T, L, V> TExecutorView<K, T, L, V> for A where
A: TResourceView<Key = K, Layout = L>
+ TModuleView<Key = K>
+ TAggregatorV1View<Identifier = K>
+ TDelayedFieldView<Identifier = I, ResourceKey = K, ResourceGroupTag = T>
+ TDelayedFieldView<Identifier = DelayedFieldID, ResourceKey = K, ResourceGroupTag = T>
+ StateStorageView<Key = K>
{
}

pub trait ExecutorView:
TExecutorView<StateKey, StructTag, MoveTypeLayout, DelayedFieldID, WriteOp>
{
}
pub trait ExecutorView: TExecutorView<StateKey, StructTag, MoveTypeLayout, WriteOp> {}

impl<T> ExecutorView for T where
T: TExecutorView<StateKey, StructTag, MoveTypeLayout, DelayedFieldID, WriteOp>
{
}
impl<T> ExecutorView for T where T: TExecutorView<StateKey, StructTag, MoveTypeLayout, WriteOp> {}

pub trait ResourceGroupView:
TResourceGroupView<GroupKey = StateKey, ResourceTag = StructTag, Layout = MoveTypeLayout>
Expand Down
2 changes: 1 addition & 1 deletion aptos-move/aptos-vm/src/block_executor/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -354,7 +354,7 @@ impl BlockExecutorTransactionOutput for AptosTransactionOutput {
.materialized_size()
}

fn get_write_summary(&self) -> HashSet<InputOutputKey<StateKey, StructTag, DelayedFieldID>> {
fn get_write_summary(&self) -> HashSet<InputOutputKey<StateKey, StructTag>> {
let vm_output = self.vm_output.lock();
let output = vm_output
.as_ref()
Expand Down
19 changes: 12 additions & 7 deletions aptos-move/aptos-vm/src/move_vm_ext/session/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -38,10 +38,10 @@ use move_core_types::{
vm_status::StatusCode,
};
use move_vm_runtime::{
move_vm::MoveVM, native_extensions::NativeContextExtensions, session::Session, ModuleStorage,
VerifiedModuleBundle,
move_vm::MoveVM, native_extensions::NativeContextExtensions, session::Session,
AsFunctionValueExtension, ModuleStorage, VerifiedModuleBundle,
};
use move_vm_types::{value_serde::serialize_and_allow_delayed_values, values::Value};
use move_vm_types::{value_serde::ValueSerDeContext, values::Value};
use std::{
collections::BTreeMap,
ops::{Deref, DerefMut},
Expand Down Expand Up @@ -127,6 +127,7 @@ impl<'r, 'l> SessionExt<'r, 'l> {
module_storage: &impl ModuleStorage,
) -> VMResult<(VMChangeSet, ModuleWriteSet)> {
let move_vm = self.inner.get_move_vm();
let function_extension = module_storage.as_function_value_extension();

let resource_converter = |value: Value,
layout: MoveTypeLayout,
Expand All @@ -136,13 +137,17 @@ impl<'r, 'l> SessionExt<'r, 'l> {
// We allow serialization of native values here because we want to
// temporarily store native values (via encoding to ensure deterministic
// gas charging) in block storage.
serialize_and_allow_delayed_values(&value, &layout)?
ValueSerDeContext::new()
.with_delayed_fields_serde()
.with_func_args_deserialization(&function_extension)
.serialize(&value, &layout)?
.map(|bytes| (bytes.into(), Some(Arc::new(layout))))
} else {
// Otherwise, there should be no native values so ensure
// serialization fails here if there are any.
value
.simple_serialize(&layout)
ValueSerDeContext::new()
.with_func_args_deserialization(&function_extension)
.serialize(&value, &layout)?
.map(|bytes| (bytes.into(), None))
};
serialization_result.ok_or_else(|| {
Expand All @@ -165,7 +170,7 @@ impl<'r, 'l> SessionExt<'r, 'l> {

let table_context: NativeTableContext = extensions.remove();
let table_change_set = table_context
.into_change_set()
.into_change_set(&function_extension)
.map_err(|e| e.finish(Location::Undefined))?;

let aggregator_context: NativeAggregatorContext = extensions.remove();
Expand Down
Loading

0 comments on commit ea0a381

Please sign in to comment.