Skip to content

Commit 45a46ca

Browse files
sstricklcommit-bot@chromium.org
authored andcommitted
[vm] Add (S)LEB128 encoding/decoding to BaseWriteStream.
Unlike cfc8e6d, this does _not_ replace the default variable length encoding for {Read,Write}Streams, but insteads adds separate {Read,Write}{S,}LEB128 methods to the appropriate classes. If we later find the cause of the issues that led to the revert of cfc8e6d, it'll be easy to switch over then. Note that WriteLEB128 asserts that the value is non-negative if used with a signed type (since negative values suggests that SLEB128 should be used instead for minimal encoding). Also removes the various other encoding and decoding methods for (S)LEB128 across the codebase and changes those clients to use {Read,Write}Streams instead. Other cleanups: * Various constant-related cleanups in datastream.h. * Adds DART_FORCE_INLINE to ReadStream::ReadByte and uses it in the default variable length decoding methods for retrieving bytes from the stream instead of managing current_ by hand. * Creates a canonical empty CompressedStackMaps instance and uses that instead of the null CompressedStackMaps instance in most cases. The only remaining (expected) use of the null CompressedStackMaps instance is for the global table in the object store when no global table exists (e.g., in JIT mode before any snapshotting). * Moves CompressedStackMapsIterator from code_descriptors.h to an Iterator class within CompressedStackMaps in object.h (similar to PcDescriptors::Iterator), to limit friend declarations and because it conceptually makes more sense as part of CompressedStackMaps. * Removed CompressedStackMaps::PayloadByte, since existing clients (CompressedStackMaps::Iterator, StackMapEntry in program_visitor.cc) are better served by just operating on the payload buffer directly (with appropriate NoSafepointScopes). * WriteStreams no longer allocate their initial space on construction, but rather on the first write, so no allocation is performed by constructing a never-used WriteStream. Cq-Include-Trybots: luci.dart.try:vm-kernel-precomp-linux-debug-x64-try,vm-kernel-precomp-linux-debug-simarm_x64-try,vm-kernel-precomp-mac-release-simarm64-try,vm-kernel-mac-debug-x64-try,vm-kernel-win-debug-x64-try,vm-kernel-win-debug-ia32-try,vm-kernel-precomp-win-release-x64-try,vm-kernel-ubsan-linux-release-x64-try,vm-kernel-tsan-linux-release-x64-try,vm-kernel-precomp-ubsan-linux-release-x64-try,vm-kernel-precomp-tsan-linux-release-x64-try,vm-kernel-precomp-msan-linux-release-x64-try,vm-kernel-precomp-asan-linux-release-x64-try,vm-kernel-msan-linux-release-x64-try,vm-kernel-asan-linux-release-x64-try Change-Id: Ice63321abaa79157fbe9f230a864c8bba0e6dea9 Reviewed-on: https://dart-review.googlesource.com/c/sdk/+/166421 Reviewed-by: Ryan Macnak <rmacnak@google.com> Commit-Queue: Tess Strickland <sstrickl@google.com>
1 parent b2e33ee commit 45a46ca

20 files changed

+677
-680
lines changed

runtime/platform/utils.h

Lines changed: 0 additions & 23 deletions
Original file line numberDiff line numberDiff line change
@@ -387,29 +387,6 @@ class Utils {
387387
return ((mask >> position) & 1) != 0;
388388
}
389389

390-
// Decode integer in SLEB128 format from |data| and update |byte_index|.
391-
template <typename ValueType>
392-
static ValueType DecodeSLEB128(const uint8_t* data,
393-
const intptr_t data_length,
394-
intptr_t* byte_index) {
395-
using Unsigned = typename std::make_unsigned<ValueType>::type;
396-
ASSERT(*byte_index < data_length);
397-
uword shift = 0;
398-
Unsigned value = 0;
399-
uint8_t part = 0;
400-
do {
401-
part = data[(*byte_index)++];
402-
value |= static_cast<Unsigned>(part & 0x7f) << shift;
403-
shift += 7;
404-
} while ((part & 0x80) != 0);
405-
406-
if ((shift < (sizeof(ValueType) * CHAR_BIT)) && ((part & 0x40) != 0)) {
407-
const Unsigned kMax = std::numeric_limits<Unsigned>::max();
408-
value |= static_cast<Unsigned>(kMax << shift);
409-
}
410-
return static_cast<ValueType>(value);
411-
}
412-
413390
static char* StrError(int err, char* buffer, size_t bufsize);
414391

415392
// Not all platforms support strndup.

runtime/vm/bitmap.cc

Lines changed: 11 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -77,7 +77,7 @@ void BitmapBuilder::Print() const {
7777
}
7878
}
7979

80-
void BitmapBuilder::AppendAsBytesTo(GrowableArray<uint8_t>* bytes) const {
80+
void BitmapBuilder::AppendAsBytesTo(BaseWriteStream* stream) const {
8181
// Early return if there are no bits in the payload to copy.
8282
if (Length() == 0) return;
8383

@@ -94,19 +94,20 @@ void BitmapBuilder::AppendAsBytesTo(GrowableArray<uint8_t>* bytes) const {
9494
payload_size = total_size;
9595
extra_size = 0;
9696
}
97+
#if defined(DEBUG)
98+
// Make sure any bits in the payload beyond the bit length if we're not
99+
// appending trailing zeroes are cleared to ensure deterministic snapshots.
100+
if (extra_size == 0 && Length() % kBitsPerByte != 0) {
101+
const int8_t mask = (1 << (Length() % kBitsPerByte)) - 1;
102+
ASSERT_EQUAL(data_[payload_size - 1], (data_[payload_size - 1] & mask));
103+
}
104+
#endif
97105
for (intptr_t i = 0; i < payload_size; i++) {
98-
bytes->Add(data_[i]);
106+
stream->WriteByte(data_[i]);
99107
}
100108
for (intptr_t i = 0; i < extra_size; i++) {
101-
bytes->Add(0U);
109+
stream->WriteByte(0U);
102110
}
103-
// Make sure any bits in the payload beyond the bit length are cleared to
104-
// ensure deterministic snapshots.
105-
#if defined(DEBUG)
106-
if (Length() % kBitsPerByte == 0) return;
107-
const int8_t mask = (1 << (Length() % kBitsPerByte)) - 1;
108-
ASSERT(bytes->Last() == (bytes->Last() & mask));
109-
#endif
110111
}
111112

112113
bool BitmapBuilder::GetBit(intptr_t bit_offset) const {

runtime/vm/bitmap.h

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -6,7 +6,7 @@
66
#define RUNTIME_VM_BITMAP_H_
77

88
#include "vm/allocation.h"
9-
#include "vm/growable_array.h"
9+
#include "vm/datastream.h"
1010
#include "vm/thread_state.h"
1111
#include "vm/zone.h"
1212

@@ -44,7 +44,7 @@ class BitmapBuilder : public ZoneAllocated {
4444
void SetRange(intptr_t min, intptr_t max, bool value);
4545

4646
void Print() const;
47-
void AppendAsBytesTo(GrowableArray<uint8_t>* bytes) const;
47+
void AppendAsBytesTo(BaseWriteStream* stream) const;
4848

4949
private:
5050
static const intptr_t kInitialSizeInBytes = 16;

runtime/vm/bitmap_test.cc

Lines changed: 8 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -16,8 +16,8 @@ namespace dart {
1616
static const uint32_t kTestPcOffset = 0x4;
1717
static const intptr_t kTestSpillSlotBitCount = 0;
1818

19-
static CompressedStackMapsPtr MapsFromBuilder(BitmapBuilder* bmap) {
20-
CompressedStackMapsBuilder builder;
19+
static CompressedStackMapsPtr MapsFromBuilder(Zone* zone, BitmapBuilder* bmap) {
20+
CompressedStackMapsBuilder builder(zone);
2121
builder.AddEntry(kTestPcOffset, bmap, kTestSpillSlotBitCount);
2222
return builder.Finalize();
2323
}
@@ -51,8 +51,9 @@ ISOLATE_UNIT_TEST_CASE(BitmapBuilder) {
5151
}
5252

5353
// Create a CompressedStackMaps object and verify its contents.
54-
const auto& maps1 = CompressedStackMaps::Handle(MapsFromBuilder(builder1));
55-
CompressedStackMapsIterator it1(maps1);
54+
const auto& maps1 = CompressedStackMaps::Handle(
55+
thread->zone(), MapsFromBuilder(thread->zone(), builder1));
56+
CompressedStackMaps::Iterator it1(thread, maps1);
5657
EXPECT(it1.MoveNext());
5758

5859
EXPECT_EQ(kTestPcOffset, it1.pc_offset());
@@ -83,8 +84,9 @@ ISOLATE_UNIT_TEST_CASE(BitmapBuilder) {
8384
EXPECT(!builder1->Get(i));
8485
}
8586

86-
const auto& maps2 = CompressedStackMaps::Handle(MapsFromBuilder(builder1));
87-
CompressedStackMapsIterator it2(maps2);
87+
const auto& maps2 = CompressedStackMaps::Handle(
88+
thread->zone(), MapsFromBuilder(thread->zone(), builder1));
89+
CompressedStackMaps::Iterator it2(thread, maps2);
8890
EXPECT(it2.MoveNext());
8991

9092
EXPECT_EQ(kTestPcOffset, it2.pc_offset());

runtime/vm/code_descriptors.cc

Lines changed: 18 additions & 187 deletions
Original file line numberDiff line numberDiff line change
@@ -35,37 +35,25 @@ void DescriptorList::AddDescriptor(PcDescriptorsLayout::Kind kind,
3535
PcDescriptorsLayout::KindAndMetadata::Encode(kind, try_index,
3636
yield_index);
3737

38-
PcDescriptors::EncodeInteger(&encoded_data_, kind_and_metadata);
39-
PcDescriptors::EncodeInteger(&encoded_data_, pc_offset - prev_pc_offset);
38+
encoded_data_.WriteSLEB128(kind_and_metadata);
39+
encoded_data_.WriteSLEB128(pc_offset - prev_pc_offset);
4040
prev_pc_offset = pc_offset;
4141

4242
if (!FLAG_precompiled_mode) {
43-
PcDescriptors::EncodeInteger(&encoded_data_, deopt_id - prev_deopt_id);
44-
PcDescriptors::EncodeInteger(&encoded_data_,
45-
token_pos.value() - prev_token_pos);
43+
encoded_data_.WriteSLEB128(deopt_id - prev_deopt_id);
44+
encoded_data_.WriteSLEB128(token_pos.value() - prev_token_pos);
4645
prev_deopt_id = deopt_id;
4746
prev_token_pos = token_pos.value();
4847
}
4948
}
5049
}
5150

5251
PcDescriptorsPtr DescriptorList::FinalizePcDescriptors(uword entry_point) {
53-
if (encoded_data_.length() == 0) {
52+
if (encoded_data_.bytes_written() == 0) {
5453
return Object::empty_descriptors().raw();
5554
}
56-
return PcDescriptors::New(&encoded_data_);
57-
}
58-
59-
// Encode unsigned integer |value| in LEB128 format and store into |data|.
60-
void CompressedStackMapsBuilder::EncodeLEB128(GrowableArray<uint8_t>* data,
61-
uintptr_t value) {
62-
while (true) {
63-
uint8_t part = value & 0x7f;
64-
value >>= 7;
65-
if (value != 0) part |= 0x80;
66-
data->Add(part);
67-
if (value == 0) break;
68-
}
55+
return PcDescriptors::New(encoded_data_.buffer(),
56+
encoded_data_.bytes_written());
6957
}
7058

7159
void CompressedStackMapsBuilder::AddEntry(intptr_t pc_offset,
@@ -74,179 +62,22 @@ void CompressedStackMapsBuilder::AddEntry(intptr_t pc_offset,
7462
ASSERT(bitmap != nullptr);
7563
ASSERT(pc_offset > last_pc_offset_);
7664
ASSERT(spill_slot_bit_count >= 0 && spill_slot_bit_count <= bitmap->Length());
77-
auto const pc_delta = pc_offset - last_pc_offset_;
78-
auto const non_spill_slot_bit_count = bitmap->Length() - spill_slot_bit_count;
79-
EncodeLEB128(&encoded_bytes_, pc_delta);
80-
EncodeLEB128(&encoded_bytes_, spill_slot_bit_count);
81-
EncodeLEB128(&encoded_bytes_, non_spill_slot_bit_count);
65+
const uword pc_delta = pc_offset - last_pc_offset_;
66+
const uword non_spill_slot_bit_count =
67+
bitmap->Length() - spill_slot_bit_count;
68+
encoded_bytes_.WriteLEB128(pc_delta);
69+
encoded_bytes_.WriteLEB128(spill_slot_bit_count);
70+
encoded_bytes_.WriteLEB128(non_spill_slot_bit_count);
8271
bitmap->AppendAsBytesTo(&encoded_bytes_);
8372
last_pc_offset_ = pc_offset;
8473
}
8574

8675
CompressedStackMapsPtr CompressedStackMapsBuilder::Finalize() const {
87-
if (encoded_bytes_.length() == 0) return CompressedStackMaps::null();
88-
return CompressedStackMaps::NewInlined(encoded_bytes_);
89-
}
90-
91-
CompressedStackMapsIterator::CompressedStackMapsIterator(
92-
const CompressedStackMaps& maps,
93-
const CompressedStackMaps& global_table)
94-
: maps_(maps),
95-
bits_container_(maps_.UsesGlobalTable() ? global_table : maps_) {
96-
ASSERT(!maps_.IsGlobalTable());
97-
ASSERT(!maps_.UsesGlobalTable() || bits_container_.IsGlobalTable());
98-
}
99-
100-
CompressedStackMapsIterator::CompressedStackMapsIterator(
101-
const CompressedStackMaps& maps)
102-
: CompressedStackMapsIterator(
103-
maps,
104-
// Only look up the global table if the map will end up using it.
105-
maps.UsesGlobalTable() ? CompressedStackMaps::Handle(
106-
Thread::Current()
107-
->isolate()
108-
->object_store()
109-
->canonicalized_stack_map_entries())
110-
: Object::null_compressed_stack_maps()) {}
111-
112-
CompressedStackMapsIterator::CompressedStackMapsIterator(
113-
const CompressedStackMapsIterator& it)
114-
: maps_(it.maps_),
115-
bits_container_(it.bits_container_),
116-
next_offset_(it.next_offset_),
117-
current_pc_offset_(it.current_pc_offset_),
118-
current_global_table_offset_(it.current_global_table_offset_),
119-
current_spill_slot_bit_count_(it.current_spill_slot_bit_count_),
120-
current_non_spill_slot_bit_count_(it.current_spill_slot_bit_count_),
121-
current_bits_offset_(it.current_bits_offset_) {}
122-
123-
// Decode unsigned integer in LEB128 format from the payload of |maps| and
124-
// update |byte_index|.
125-
uintptr_t CompressedStackMapsIterator::DecodeLEB128(
126-
const CompressedStackMaps& maps,
127-
uintptr_t* byte_index) {
128-
uword shift = 0;
129-
uintptr_t value = 0;
130-
uint8_t part = 0;
131-
do {
132-
ASSERT(*byte_index < maps.payload_size());
133-
part = maps.PayloadByte((*byte_index)++);
134-
value |= static_cast<uintptr_t>(part & 0x7f) << shift;
135-
shift += 7;
136-
} while ((part & 0x80) != 0);
137-
138-
return value;
139-
}
140-
141-
bool CompressedStackMapsIterator::MoveNext() {
142-
// Empty CompressedStackMaps are represented as null values.
143-
if (maps_.IsNull() || next_offset_ >= maps_.payload_size()) return false;
144-
uintptr_t offset = next_offset_;
145-
146-
auto const pc_delta = DecodeLEB128(maps_, &offset);
147-
ASSERT(pc_delta <= (kMaxUint32 - current_pc_offset_));
148-
current_pc_offset_ += pc_delta;
149-
150-
// Table-using CSMs have a table offset after the PC offset delta, whereas
151-
// the post-delta part of inlined entries has the same information as
152-
// global table entries.
153-
if (maps_.UsesGlobalTable()) {
154-
current_global_table_offset_ = DecodeLEB128(maps_, &offset);
155-
ASSERT(current_global_table_offset_ < bits_container_.payload_size());
156-
157-
// Since generally we only use entries in the GC and the GC only needs
158-
// the rest of the entry information if the PC offset matches, we lazily
159-
// load and cache the information stored in the global object when it is
160-
// actually requested.
161-
current_spill_slot_bit_count_ = -1;
162-
current_non_spill_slot_bit_count_ = -1;
163-
current_bits_offset_ = -1;
164-
} else {
165-
current_spill_slot_bit_count_ = DecodeLEB128(maps_, &offset);
166-
ASSERT(current_spill_slot_bit_count_ >= 0);
167-
168-
current_non_spill_slot_bit_count_ = DecodeLEB128(maps_, &offset);
169-
ASSERT(current_non_spill_slot_bit_count_ >= 0);
170-
171-
const auto stackmap_bits =
172-
current_spill_slot_bit_count_ + current_non_spill_slot_bit_count_;
173-
const uintptr_t stackmap_size =
174-
Utils::RoundUp(stackmap_bits, kBitsPerByte) >> kBitsPerByteLog2;
175-
ASSERT(stackmap_size <= (maps_.payload_size() - offset));
176-
177-
current_bits_offset_ = offset;
178-
offset += stackmap_size;
179-
}
180-
181-
next_offset_ = offset;
182-
return true;
183-
}
184-
185-
intptr_t CompressedStackMapsIterator::Length() {
186-
EnsureFullyLoadedEntry();
187-
return current_spill_slot_bit_count_ + current_non_spill_slot_bit_count_;
188-
}
189-
intptr_t CompressedStackMapsIterator::SpillSlotBitCount() {
190-
EnsureFullyLoadedEntry();
191-
return current_spill_slot_bit_count_;
192-
}
193-
194-
bool CompressedStackMapsIterator::IsObject(intptr_t bit_index) {
195-
EnsureFullyLoadedEntry();
196-
ASSERT(!bits_container_.IsNull());
197-
ASSERT(bit_index >= 0 && bit_index < Length());
198-
const intptr_t byte_index = bit_index >> kBitsPerByteLog2;
199-
const intptr_t bit_remainder = bit_index & (kBitsPerByte - 1);
200-
uint8_t byte_mask = 1U << bit_remainder;
201-
const intptr_t byte_offset = current_bits_offset_ + byte_index;
202-
return (bits_container_.PayloadByte(byte_offset) & byte_mask) != 0;
203-
}
204-
205-
void CompressedStackMapsIterator::LazyLoadGlobalTableEntry() {
206-
ASSERT(maps_.UsesGlobalTable() && bits_container_.IsGlobalTable());
207-
ASSERT(HasLoadedEntry());
208-
ASSERT(current_global_table_offset_ < bits_container_.payload_size());
209-
210-
uintptr_t offset = current_global_table_offset_;
211-
current_spill_slot_bit_count_ = DecodeLEB128(bits_container_, &offset);
212-
ASSERT(current_spill_slot_bit_count_ >= 0);
213-
214-
current_non_spill_slot_bit_count_ = DecodeLEB128(bits_container_, &offset);
215-
ASSERT(current_non_spill_slot_bit_count_ >= 0);
216-
217-
const auto stackmap_bits = Length();
218-
const uintptr_t stackmap_size =
219-
Utils::RoundUp(stackmap_bits, kBitsPerByte) >> kBitsPerByteLog2;
220-
ASSERT(stackmap_size <= (bits_container_.payload_size() - offset));
221-
222-
current_bits_offset_ = offset;
223-
}
224-
225-
const char* CompressedStackMapsIterator::ToCString(Zone* zone) const {
226-
ZoneTextBuffer b(zone, 100);
227-
CompressedStackMapsIterator it(*this);
228-
// If we haven't loaded an entry yet, do so (but don't skip the current
229-
// one if we have!)
230-
if (!it.HasLoadedEntry()) {
231-
if (!it.MoveNext()) return b.buffer();
232-
}
233-
bool first_entry = true;
234-
do {
235-
if (first_entry) {
236-
first_entry = false;
237-
} else {
238-
b.AddString("\n");
239-
}
240-
b.Printf("0x%08x: ", it.pc_offset());
241-
for (intptr_t i = 0, n = it.Length(); i < n; i++) {
242-
b.AddString(it.IsObject(i) ? "1" : "0");
243-
}
244-
} while (it.MoveNext());
245-
return b.buffer();
246-
}
247-
248-
const char* CompressedStackMapsIterator::ToCString() const {
249-
return ToCString(Thread::Current()->zone());
76+
if (encoded_bytes_.bytes_written() == 0) {
77+
return Object::empty_compressed_stackmaps().raw();
78+
}
79+
return CompressedStackMaps::NewInlined(encoded_bytes_.buffer(),
80+
encoded_bytes_.bytes_written());
25081
}
25182

25283
ExceptionHandlersPtr ExceptionHandlerList::FinalizeExceptionHandlers(

0 commit comments

Comments
 (0)