Skip to content

Commit

Permalink
Tweak passing value types
Browse files Browse the repository at this point in the history
  • Loading branch information
yorickpeterse committed Dec 2, 2022
1 parent 2d879ba commit 42bbd57
Show file tree
Hide file tree
Showing 6 changed files with 80 additions and 142 deletions.
80 changes: 0 additions & 80 deletions compiler/src/codegen.rs
Original file line number Diff line number Diff line change
Expand Up @@ -907,83 +907,3 @@ impl<'a> LowerToBytecode<'a> {
*self.constant_indexes.entry(constant).or_insert(len)
}
}

// TODO: remove?
// /// A compiler pass that lowers MIR into C source code.
// pub(crate) struct LowerToC<'a> {
// db: &'a Database,
// mir: &'a Mir,
// module_index: usize,
// // class_info: &'a HashMap<ClassId, ClassInfo>,
// // method_info: &'a HashMap<MethodId, MethodInfo>,
// // constant_indexes: HashMap<Constant, u32>,
// }
//
// impl<'a> LowerToC<'a> {
// pub(crate) fn run_all(db: &'a Database, mir: &'a Mir) {
// for module_index in 0..mir.modules.len() {
// LowerToC { db, mir, module_index }.run();
// }
// }
//
// pub(crate) fn run(self) {
// // TODO: remove
// if !self.mir.modules[self.module_index]
// .id
// .file(self.db)
// .ends_with("Downloads/test.inko")
// {
// return;
// }
//
// let mut buffer = String::new();
//
// for &class in &self.mir.modules[self.module_index].classes {
// for &method in &self.mir.classes.get(&class).unwrap().methods {
// self.method(class, method, &mut buffer);
// }
// }
//
// println!("{}", buffer);
// }
//
// fn method(
// &self,
// class_id: ClassId,
// method_id: MethodId,
// buffer: &mut String,
// ) {
// // TODO: remove
// if method_id.name(self.db) != "main" {
// return;
// }
//
// println!("Generating C for {}", method_id.name(self.db));
//
// let name = format!("inko_{}", method_id.0);
// let method = self.mir.methods.get(&method_id).unwrap();
// let mut queue = VecDeque::new();
// let mut visited = HashSet::new();
//
// queue.push_back(method.body.start_id);
// visited.insert(method.body.start_id);
//
// while let Some(block_id) = queue.pop_front() {
// let mir_block = &method.body.blocks[block_id.0];
//
// for ins in &mir_block.instructions {
// self.instruction(method, ins);
// }
//
// for &child in &mir_block.successors {
// if visited.insert(child) {
// queue.push_back(child);
// }
// }
// }
// }
//
// fn instruction(&self, method: &Method, instruction: &mir::Instruction) {
// // TODO
// }
// }
30 changes: 28 additions & 2 deletions compiler/src/llvm.rs
Original file line number Diff line number Diff line change
Expand Up @@ -76,7 +76,7 @@ const REFS_INDEX: usize = 2;

// The values used to represent the kind of a value/reference. These values
// must match the values used by `Kind` in the runtime library.
const REGULAR_KIND: u8 = 0;
const OWNED_KIND: u8 = 0;
const REF_KIND: u8 = 1;
const ATOMIC_KIND: u8 = 2;
const PERMANENT_KIND: u8 = 3;
Expand Down Expand Up @@ -1979,7 +1979,7 @@ impl<'a, 'b, 'ctx> LowerMethod<'a, 'b, 'ctx> {
let atomic_block = llvm_blocks[ins.blocks[2].0];
let perm_block = llvm_blocks[ins.blocks[3].0];
let cases = [
(builder.u8_literal(REGULAR_KIND), owned_block),
(builder.u8_literal(OWNED_KIND), owned_block),
(builder.u8_literal(REF_KIND), ref_block),
(builder.u8_literal(ATOMIC_KIND), atomic_block),
(builder.u8_literal(PERMANENT_KIND), perm_block),
Expand Down Expand Up @@ -2179,6 +2179,32 @@ impl<'a, 'b, 'ctx> LowerMethod<'a, 'b, 'ctx> {
// it's a bit redundant to again check the tag bit. We'd
// probably have to check if the type is a class or not,
// as in other cases we have a switch_kind() anyway.
//
// TODO: if the value is a value type, drop it instead
// of decrementing it.
if self
.method
.registers
.value_type(ins.register)
.is_int(self.db, self.method.id.self_type(self.db))
{
// let ptr = builder.load_pointer(var);
// let addr = builder.cast_pointer_to_int(ptr);
// let mask = builder.i64_literal(INT_MASK);
// let bits = builder.build_and(addr, mask, "");
// let cond = builder.build_int_compare(
// IntPredicate::EQ,
// bits,
// mask,
// "",
// );
//
// builder.build_conditional_branch(
// cond,
// after_block,
// decrement_block,
// );
}

// We're decrementing a reference, so the ref bit is set
// and must be masked off before first.
Expand Down
1 change: 1 addition & 0 deletions compiler/src/mir/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -1443,6 +1443,7 @@ pub(crate) struct Mir {
pub(crate) classes: HashMap<types::ClassId, Class>,
pub(crate) traits: HashMap<types::TraitId, Trait>,
pub(crate) methods: HashMap<types::MethodId, Method>,
// TODO: do we really need this?
pub(crate) closure_classes: HashMap<types::ClosureId, types::ClassId>,
locations: Vec<Location>,
}
Expand Down
92 changes: 39 additions & 53 deletions compiler/src/mir/passes.rs
Original file line number Diff line number Diff line change
Expand Up @@ -3564,11 +3564,29 @@ impl<'a> LowerMethod<'a> {
}
}

// Value types are always passed as a new value, whether the receiving
// argument is owned or a reference.
//
// This ensures that if we pass the value to generic code, it can freely
// add references to it (if the value is boxed), without this affecting
// our current code (i.e. by said reference outliving the input value).
if register_type.is_value_type(self.db())
&& !register_type
.use_atomic_reference_counting(self.db(), self.self_type())
{
return self.clone_value_type(
register,
register_type,
true,
location,
);
}

if register_type.is_owned_or_uni(self.db()) {
match expected {
// Owned values passed to references are implicitly passed as
// references.
Some(exp) if !exp.is_owned_or_uni(self.db()) => {
if let Some(exp) = expected {
// Regular owned values passed to references are implicitly
// passed as references.
if !exp.is_owned_or_uni(self.db()) {
let typ = register_type.cast_according_to(exp, self.db());
let reg = self.new_register(typ);

Expand All @@ -3577,7 +3595,6 @@ impl<'a> LowerMethod<'a> {

return reg;
}
_ => {}
}

self.check_field_move(register, location);
Expand All @@ -3602,25 +3619,6 @@ impl<'a> LowerMethod<'a> {
return register;
}

// References of value types passed to owned values should be cloned.
// This allows passing e.g. `ref Int` to something that expects `Int`,
// without the need for an explicit clone.
if register_type.is_value_type(self.db())
&& expected.map_or(false, |v| v.is_owned_or_uni(self.db()))
{
// In this case we force cloning, so expressions such as
// `foo(vals[0])` pass a clone instead of passing the returned ref
// directly. If we were to pass the ref directly, `foo` might drop
// it thinking its an owned value, then fail because a ref still
// exists.
return self.clone_value_type(
register,
register_type,
true,
location,
);
}

// For reference types we only need to increment if they originate from
// a variable or field, as regular registers can't be referred to more
// than once.
Expand Down Expand Up @@ -4498,38 +4496,27 @@ impl<'a> ExpandDrop<'a> {
let stype = self.method.id.self_type(self.db);

if value_type.use_atomic_reference_counting(self.db, stype) {
self.block_mut(before_id)
.decrement_atomic(value, after_id, after_id, location);

self.method.body.add_edge(before_id, after_id);
} else if value_type.is_type_parameter(self.db)
|| value_type.is_self_type(self.db)
{
let atomic_id = self.add_block();
let ref_id = self.add_block();

self.block_mut(before_id).switch_kind(
value,
vec![after_id, ref_id, atomic_id, after_id],
location,
);

self.block_mut(ref_id).decrement(value, location);
self.block_mut(ref_id).goto(after_id, location);

self.block_mut(atomic_id)
.decrement_atomic(value, after_id, after_id, location);

self.method.body.add_edge(ref_id, after_id);
self.method.body.add_edge(atomic_id, after_id);
self.method.body.add_edge(before_id, atomic_id);
self.method.body.add_edge(before_id, ref_id);
self.drop_atomic(before_id, after_id, value, location);
} else if value_type.is_value_type(self.db) {
self.block_mut(before_id).free(value, location);
self.block_mut(before_id).goto(after_id, location);
self.method.body.add_edge(before_id, after_id);
} else {
} else if value_type.class_id_with_self_type(self.db, stype).is_some() {
self.block_mut(before_id).decrement(value, location);
self.block_mut(before_id).goto(after_id, location);

self.method.body.add_edge(before_id, after_id);
} else {
// If the value is typed as a type parameter or trait, it may be
// passed a value type or a type that uses atomic reference
// counting. As such we fall back to a runtime check for such cases.
//
// Disabling the use of a dropper here ensures that _if_ the value
// is a value type (in which case it's treated as an owned value),
// we simply free it, as running its dropper is redundant in that
// case.
self.drop_with_runtime_check(
before_id, after_id, value, false, location,
);
}
}

Expand Down Expand Up @@ -4573,7 +4560,6 @@ impl<'a> ExpandDrop<'a> {
{
self.call_dropper(before_id, value, location);
} else {
self.block_mut(before_id).check_refs(value, location);
self.block_mut(before_id).free(value, location);
}

Expand Down
15 changes: 10 additions & 5 deletions vm/src/mem.rs
Original file line number Diff line number Diff line change
Expand Up @@ -35,14 +35,19 @@ const UNTAG_MASK: usize = (!0b11) as usize;
///
/// The values of the variants are specified explicitly to make it more explicit
/// we depend on these exact values (e.g. in the compiler).
///
/// TODO: make this a compiler-only thing.
#[repr(u8)]
#[derive(Copy, Clone)]
pub enum Kind {
Regular = 0,
/// The value is a regular heap allocated, owned value.
Owned = 0,

/// The value is a reference to a heap allocated value.
Ref = 1,

/// The value is an owned value that uses atomic reference counting.
Atomic = 2,

/// The value musn't be dropped until the program stops.
Permanent = 3,
}

Expand Down Expand Up @@ -140,7 +145,7 @@ pub struct Header {
impl Header {
pub(crate) fn init(&mut self, class: ClassPointer) {
self.class = class;
self.kind = Kind::Regular;
self.kind = Kind::Owned;
self.references = 0;
}

Expand Down Expand Up @@ -819,7 +824,7 @@ mod tests {

unsafe {
assert!(matches!(Kind::of(tagged as _), Kind::Permanent));
assert!(matches!(Kind::of(heap as _), Kind::Regular));
assert!(matches!(Kind::of(heap as _), Kind::Owned));
assert!(matches!(Kind::of(heap_ref as _), Kind::Ref));
assert!(matches!(Kind::of(string as _), Kind::Atomic));
assert!(matches!(Kind::of(perm_string as _), Kind::Permanent));
Expand Down
4 changes: 2 additions & 2 deletions vm/src/runtime/general.rs
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,7 @@ pub unsafe extern "system" fn inko_increment(pointer: *const u8) -> *const u8 {
header.increment_atomic();
pointer
}
Kind::Regular | Kind::Ref => {
Kind::Owned | Kind::Ref => {
header.increment();
as_ref(pointer)
}
Expand All @@ -50,7 +50,7 @@ pub unsafe extern "system" fn inko_decrement(pointer: *const u8) {
let header = header_of(without_tags(pointer));

match header.kind {
Kind::Regular | Kind::Ref => header.decrement(),
Kind::Owned | Kind::Ref => header.decrement(),
Kind::Atomic => {
header.decrement_atomic();
}
Expand Down

0 comments on commit 42bbd57

Please sign in to comment.