summaryrefslogtreecommitdiff
path: root/tools/standalone_miri/value.cpp
diff options
context:
space:
mode:
authorJohn Hodge <tpg@mutabah.net>2018-02-16 20:51:04 +0800
committerJohn Hodge <tpg@mutabah.net>2018-02-16 20:51:04 +0800
commit75413b30fcf5dd97ddf44daa90fe1e361098d9e0 (patch)
tree18dbe1eb09bf0491194255813134b33814d82ac3 /tools/standalone_miri/value.cpp
parent97156f41a6831d0bfb8870a3757a2b19cd4d2495 (diff)
downloadmrust-75413b30fcf5dd97ddf44daa90fe1e361098d9e0.tar.gz
Standalone MIRI - Hacking along, hit a blocker that will need some refactor.
Diffstat (limited to 'tools/standalone_miri/value.cpp')
-rw-r--r--tools/standalone_miri/value.cpp165
1 files changed, 140 insertions, 25 deletions
diff --git a/tools/standalone_miri/value.cpp b/tools/standalone_miri/value.cpp
index db8ec085..e4a19196 100644
--- a/tools/standalone_miri/value.cpp
+++ b/tools/standalone_miri/value.cpp
@@ -7,7 +7,7 @@
#include <iostream>
#include <iomanip>
#include <algorithm>
-
+#include "debug.hpp"
AllocationPtr Allocation::new_alloc(size_t size)
{
@@ -15,6 +15,7 @@ AllocationPtr Allocation::new_alloc(size_t size)
rv->refcount = 1;
rv->data.resize( (size + 8-1) / 8 ); // QWORDS
rv->mask.resize( (size + 8-1) / 8 ); // bitmap bytes
+ //LOG_DEBUG(rv << " ALLOC");
return AllocationPtr(rv);
}
AllocationPtr AllocationPtr::new_fcn(::HIR::Path p)
@@ -25,11 +26,34 @@ AllocationPtr AllocationPtr::new_fcn(::HIR::Path p)
return rv;
}
AllocationPtr::AllocationPtr(const AllocationPtr& x):
- m_ptr(x.m_ptr)
+ m_ptr(nullptr)
{
- if( is_alloc() ) {
- assert(alloc().refcount != SIZE_MAX);
- alloc().refcount += 1;
+ if( x )
+ {
+ switch(x.get_ty())
+ {
+ case Ty::Allocation:
+ m_ptr = x.m_ptr;
+ assert(alloc().refcount != 0);
+ assert(alloc().refcount != SIZE_MAX);
+ alloc().refcount += 1;
+ //LOG_DEBUG(&alloc() << " REF++ " << alloc().refcount);
+ break;
+ case Ty::Function: {
+ auto ptr_i = reinterpret_cast<uintptr_t>(new ::HIR::Path(x.fcn()));
+ assert( (ptr_i & 3) == 0 );
+ m_ptr = reinterpret_cast<void*>( ptr_i + static_cast<uintptr_t>(Ty::Function) );
+ assert(get_ty() == Ty::Function);
+ } break;
+ case Ty::Unused1:
+ throw "BUG";
+ case Ty::Unused2:
+ throw "BUG";
+ }
+ }
+ else
+ {
+ m_ptr = nullptr;
}
}
AllocationPtr::~AllocationPtr()
@@ -41,8 +65,11 @@ AllocationPtr::~AllocationPtr()
case Ty::Allocation: {
auto* ptr = &alloc();
ptr->refcount -= 1;
+ //LOG_DEBUG(&alloc() << " REF-- " << ptr->refcount);
if(ptr->refcount == 0)
+ {
delete ptr;
+ }
} break;
case Ty::Function: {
auto* ptr = const_cast<::HIR::Path*>(&fcn());
@@ -56,6 +83,31 @@ AllocationPtr::~AllocationPtr()
}
}
+::std::ostream& operator<<(::std::ostream& os, const AllocationPtr& x)
+{
+ if( x )
+ {
+ switch(x.get_ty())
+ {
+ case AllocationPtr::Ty::Allocation:
+ os << &x.alloc();
+ break;
+ case AllocationPtr::Ty::Function:
+ os << *const_cast<::HIR::Path*>(&x.fcn());
+ break;
+ case AllocationPtr::Ty::Unused1:
+ break;
+ case AllocationPtr::Ty::Unused2:
+ break;
+ }
+ }
+ else
+ {
+ os << "null";
+ }
+ return os;
+}
+
Value::Value()
{
this->meta.direct_data.size = 0;
@@ -99,7 +151,7 @@ Value::Value(::HIR::TypeRef ty)
if( ! H::has_pointer(ty) )
{
// Will fit in a inline allocation, nice.
- ::std::cout << "Value::Value(): No pointers in " << ty << ", storing inline" << ::std::endl;
+ //LOG_TRACE("No pointers in " << ty << ", storing inline");
this->meta.direct_data.size = static_cast<uint8_t>(size);
this->meta.direct_data.mask[0] = 0;
this->meta.direct_data.mask[1] = 0;
@@ -109,7 +161,7 @@ Value::Value(::HIR::TypeRef ty)
#endif
// Fallback: Make a new allocation
- ::std::cout << "Value::Value(): Creating allocation for " << ty << ::std::endl;
+ //LOG_TRACE(" Creating allocation for " << ty);
this->allocation = Allocation::new_alloc(size);
this->meta.indirect_meta.offset = 0;
this->meta.indirect_meta.size = size;
@@ -174,7 +226,7 @@ void Value::mark_bytes_valid(size_t ofs, size_t size)
auto& alloc = this->allocation.alloc();
// TODO: Assert range.
ofs += this->meta.indirect_meta.offset;
- assert( (ofs+size+8-1) / 8 < alloc.mask.size() );
+ assert( ofs+size <= alloc.mask.size() * 8 );
for(size_t i = ofs; i < ofs + size; i++)
{
alloc.mask[i/8] |= (1 << i%8);
@@ -191,7 +243,8 @@ void Value::mark_bytes_valid(size_t ofs, size_t size)
Value Value::read_value(size_t ofs, size_t size) const
{
- ::std::cout << "Value::read_value(" << ofs << ", " << size << ")" << ::std::endl;
+ Value rv;
+ LOG_DEBUG("(" << ofs << ", " << size << ") - " << *this);
check_bytes_valid(ofs, size);
if( this->allocation )
{
@@ -205,28 +258,34 @@ Value Value::read_value(size_t ofs, size_t size) const
has_reloc = true;
}
}
- Value rv;
- if( has_reloc && size < sizeof(this->meta.direct_data.data) )
+ if( has_reloc || size > sizeof(this->meta.direct_data.data) )
{
rv.allocation = Allocation::new_alloc(size);
rv.meta.indirect_meta.offset = 0;
rv.meta.indirect_meta.size = size;
+
+ for(const auto& r : alloc.relocations)
+ {
+ if( this->meta.indirect_meta.offset+ofs <= r.slot_ofs && r.slot_ofs < this->meta.indirect_meta.offset + ofs + size )
+ {
+ rv.allocation.alloc().relocations.push_back({ r.slot_ofs - (this->meta.indirect_meta.offset+ofs), r.backing_alloc });
+ }
+ }
}
else
{
rv.meta.direct_data.size = static_cast<uint8_t>(size);
}
rv.write_bytes(0, this->data_ptr() + ofs, size);
- return rv;
}
else
{
// Inline can become inline.
- Value rv;
rv.meta.direct_data.size = static_cast<uint8_t>(size);
rv.write_bytes(0, this->meta.direct_data.data+ofs, size);
- return rv;
}
+ LOG_DEBUG("RETURN " << rv);
+ return rv;
}
void Value::read_bytes(size_t ofs, void* dst, size_t count) const
{
@@ -250,6 +309,23 @@ void Value::write_bytes(size_t ofs, const void* src, size_t count)
::std::cerr << "Value::write_bytes - Out of bounds write, " << ofs << "+" << count << " > size " << this->meta.indirect_meta.size << ::std::endl;
throw "ERROR";
}
+
+
+ // - Remove any relocations already within this region
+ auto& this_relocs = this->allocation.alloc().relocations;
+ for(auto it = this_relocs.begin(); it != this_relocs.end(); )
+ {
+ if( this->meta.indirect_meta.offset + ofs <= it->slot_ofs && it->slot_ofs < this->meta.indirect_meta.offset + ofs + count)
+ {
+ LOG_TRACE("Delete " << it->backing_alloc);
+ it = this_relocs.erase(it);
+ }
+ else
+ {
+ ++it;
+ }
+ }
+
}
else
{
@@ -267,21 +343,49 @@ void Value::write_value(size_t ofs, Value v)
{
if( v.allocation )
{
- v.check_bytes_valid(0, v.meta.indirect_meta.size);
+ size_t v_size = v.meta.indirect_meta.size;
+ v.check_bytes_valid(0, v_size);
const auto& src_alloc = v.allocation.alloc();
- write_bytes(ofs, v.data_ptr(), v.meta.indirect_meta.size);
+ write_bytes(ofs, v.data_ptr(), v_size);
// Find any relocations that apply and copy those in.
- // - Any relocations in the source within `v.meta.indirect_meta.offset` .. `v.meta.indirect_meta.offset + v.meta.indirect_meta.size`
+ // - Any relocations in the source within `v.meta.indirect_meta.offset` .. `v.meta.indirect_meta.offset + v_size`
+ ::std::vector<Relocation> new_relocs;
for(const auto& r : src_alloc.relocations)
{
// TODO: Negative offsets in destination?
- if( v.meta.indirect_meta.offset <= r.slot_ofs && r.slot_ofs < v.meta.indirect_meta.offset + v.meta.indirect_meta.size )
+ if( v.meta.indirect_meta.offset <= r.slot_ofs && r.slot_ofs < v.meta.indirect_meta.offset + v_size )
+ {
+ LOG_TRACE("Copy " << r.backing_alloc);
+ // Applicable, save for later
+ new_relocs.push_back( r );
+ }
+ }
+ if( !new_relocs.empty() )
+ {
+ if( !this->allocation ) {
+ throw ::std::runtime_error("TODO: Writing value with a relocation into a slot without a relocation");
+ }
+ // 1. Remove any relocations already within this region
+ auto& this_relocs = this->allocation.alloc().relocations;
+ for(auto it = this_relocs.begin(); it != this_relocs.end(); )
{
- // Applicable
- if( !this->allocation ) {
- throw ::std::runtime_error("TODO: Writing value with a relocation into a slot without a relocation");
+ if( this->meta.indirect_meta.offset + ofs <= it->slot_ofs && it->slot_ofs < this->meta.indirect_meta.offset + ofs + v_size)
+ {
+ LOG_TRACE("Delete " << it->backing_alloc);
+ it = this_relocs.erase(it);
+ }
+ else
+ {
+ ++it;
}
- this->allocation.alloc().relocations.push_back( r );
+ }
+ // 2. Move the new relocations into this allocation
+ for(auto& r : new_relocs)
+ {
+ LOG_TRACE("Insert " << r.backing_alloc);
+ r.slot_ofs -= v.meta.indirect_meta.offset;
+ r.slot_ofs += this->meta.indirect_meta.offset + ofs;
+ this_relocs.push_back( ::std::move(r) );
}
}
}
@@ -292,13 +396,14 @@ void Value::write_value(size_t ofs, Value v)
}
}
-size_t Value::as_usize() const
+uint64_t Value::read_usize(size_t ofs) const
{
- uint64_t v;
+ uint64_t v = 0;
+ // TODO: Handle different pointer sizes
this->read_bytes(0, &v, 8);
- // TODO: Handle endian and different architectures
return v;
}
+
::std::ostream& operator<<(::std::ostream& os, const Value& v)
{
auto flags = os.flags();
@@ -321,6 +426,16 @@ size_t Value::as_usize() const
os << "--";
}
}
+
+ os << " {";
+ for(const auto& r : alloc.relocations)
+ {
+ if( v.meta.indirect_meta.offset <= r.slot_ofs && r.slot_ofs < v.meta.indirect_meta.offset + v.meta.indirect_meta.size )
+ {
+ os << " @" << (r.slot_ofs - v.meta.indirect_meta.offset) << "=" << r.backing_alloc;
+ }
+ }
+ os << " }";
}
else
{