for (Event *evp = &part->events[0]; evp < end; evp++) {
Event *evp0 = evp;
if (!evp->is_access && !evp->is_func) {
- switch (evp->type) {
+ switch (evp->GetType()) {
case EventType::kTime: {
auto *ev = reinterpret_cast<EventTime *>(evp);
ev_sid = static_cast<Sid>(ev->sid);
[&](Sid ev_sid, Epoch ev_epoch, Event *evp) {
bool match = ev_sid == sid && ev_epoch == epoch;
if (evp->is_access) {
- if (evp->is_func == 0 && evp->type == EventType::kAccessExt &&
+ if (evp->is_func == 0 && evp->GetType() == EventType::kAccessExt &&
evp->_ == 0) // NopEvent
return;
auto *ev = reinterpret_cast<EventAccess *>(evp);
}
return;
}
- switch (evp->type) {
+ switch (evp->GetType()) {
case EventType::kAccessExt: {
auto *ev = reinterpret_cast<EventAccessExt *>(evp);
uptr ev_addr = RestoreAddr(ev->addr);
// Otherwise type denotes the type.
u64 is_access : 1;
u64 is_func : 1;
- EventType type : 3;
+ u64 type : 3;
u64 _ : 59;
+
+ EventType GetType() const {
+ return static_cast<EventType>(type);
+ }
};
static_assert(sizeof(Event) == 8, "bad Event size");
// Nop event used as padding and does not affect state during replay.
-static constexpr Event NopEvent = {1, 0, EventType::kAccessExt, 0};
+static constexpr Event NopEvent = {1, 0, static_cast<u64>(EventType::kAccessExt), 0};
// Compressed memory access can represent only some events with PCs
// close enough to each other. Otherwise we fall back to EventAccessExt.