summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorOxore <oxore@protonmail.com>2023-05-27 18:46:34 +0300
committerOxore <oxore@protonmail.com>2023-05-27 18:46:34 +0300
commit6b8ec09807dde64ccd36a5f0d2ba13d6cdbc66a1 (patch)
tree27a3b4c67c6c8bc26e65123c814718f52e558a70
parenta4841d2a593f9efed1cb116137034c307c1d74bc (diff)
Change `offset` wording to `address` wording in almost all places
-rw-r--r--disasm.cpp136
-rw-r--r--disasm.h10
-rw-r--r--main.cpp106
3 files changed, 126 insertions, 126 deletions
diff --git a/disasm.cpp b/disasm.cpp
index 20e4ec8..3755e39 100644
--- a/disasm.cpp
+++ b/disasm.cpp
@@ -27,17 +27,17 @@ enum class ShiftKind: int {
kRotate = 3,
};
-constexpr Arg FetchImmediate(const uint32_t offset, const DataBuffer &code, const OpSize s)
+constexpr Arg FetchImmediate(const uint32_t address, const DataBuffer &code, const OpSize s)
{
if (s == OpSize::kInvalid) {
return Arg{};
} else if (s == OpSize::kLong) {
- if (offset + kInstructionSizeStepBytes < code.occupied_size) {
- const int32_t value = GetI32BE(code.buffer + offset);
+ if (address + kInstructionSizeStepBytes < code.occupied_size) {
+ const int32_t value = GetI32BE(code.buffer + address);
return Arg::Immediate(value);
}
- } else if (offset < code.occupied_size) {
- const int16_t value = GetI16BE(code.buffer + offset);
+ } else if (address < code.occupied_size) {
+ const int16_t value = GetI16BE(code.buffer + address);
if (s == OpSize::kByte) {
// Technically it is impossible to have value lower that -128 in 8
// bits signed integer, but the second byte being 0xff is actually
@@ -54,7 +54,7 @@ constexpr Arg FetchImmediate(const uint32_t offset, const DataBuffer &code, cons
}
constexpr Arg FetchArg(
- const uint32_t offset, const DataBuffer &code, const int m, const int xn, const OpSize s)
+ const uint32_t address, const DataBuffer &code, const int m, const int xn, const OpSize s)
{
switch (m) {
case 0: // Dn
@@ -68,14 +68,14 @@ constexpr Arg FetchArg(
case 4: // -(An)
return Arg::AnAddrDecr(xn);
case 5: // (d16, An), Additional Word
- if (offset < code.occupied_size) {
- const int16_t d16 = GetI16BE(code.buffer + offset);
+ if (address < code.occupied_size) {
+ const int16_t d16 = GetI16BE(code.buffer + address);
return Arg::D16AnAddr(xn, d16);
}
break;
case 6: // (d8, An, Xi), Brief Extension Word
- if (offset < code.occupied_size) {
- const uint16_t briefext = GetU16BE(code.buffer + offset);
+ if (address < code.occupied_size) {
+ const uint16_t briefext = GetU16BE(code.buffer + address);
if (briefext & 0x0700) {
// briefext must have zeros on 8, 9 an 10-th bits,
// i.e. xxxx_x000_xxxx_xxxx
@@ -91,26 +91,26 @@ constexpr Arg FetchArg(
case 7:
switch (xn) {
case 0: // (xxx).W, Additional Word
- if (offset < code.occupied_size) {
- const int32_t w = GetI16BE(code.buffer + offset);
+ if (address < code.occupied_size) {
+ const int32_t w = GetI16BE(code.buffer + address);
return Arg::Word(w);
}
break;
case 1: // (xxx).L, Additional Long
- if (offset + kInstructionSizeStepBytes < code.occupied_size) {
- const int32_t l = GetI32BE(code.buffer + offset);
+ if (address + kInstructionSizeStepBytes < code.occupied_size) {
+ const int32_t l = GetI32BE(code.buffer + address);
return Arg::Long(l);
}
break;
case 2: // (d16, PC), Additional Word
- if (offset < code.occupied_size) {
- const int16_t d16 = GetI16BE(code.buffer + offset);
+ if (address < code.occupied_size) {
+ const int16_t d16 = GetI16BE(code.buffer + address);
return Arg::D16PCAddr(d16);
}
break;
case 3: // (d8, PC, Xi), Brief Extension Word
- if (offset < code.occupied_size) {
- const uint16_t briefext = GetU16BE(code.buffer + offset);
+ if (address < code.occupied_size) {
+ const uint16_t briefext = GetU16BE(code.buffer + address);
if (briefext & 0x0700) {
// briefext must have zeros on 8, 9 an 10-th bits,
// i.e. xxxx_x000_xxxx_xxxx
@@ -124,7 +124,7 @@ constexpr Arg FetchArg(
}
break;
case 4: // #imm
- return FetchImmediate(offset, code, s);
+ return FetchImmediate(address, code, s);
case 5: // Does not exist
case 6: // Does not exist
case 7: // Does not exist
@@ -136,12 +136,12 @@ constexpr Arg FetchArg(
}
static Arg FetchArg(
- const uint32_t offset, const DataBuffer &code, const uint16_t instr, const OpSize s)
+ const uint32_t address, const DataBuffer &code, const uint16_t instr, const OpSize s)
{
const int addrmode = instr & 0x3f;
const int m = (addrmode >> 3) & 7;
const int xn = addrmode & 7;
- return FetchArg(offset, code, m, xn, s);
+ return FetchArg(address, code, m, xn, s);
}
static size_t disasm_verbatim(DisasmNode &node, const uint16_t instr)
@@ -154,7 +154,7 @@ static size_t disasm_jsr_jmp(
DisasmNode &node, const uint16_t instr, const DataBuffer &code)
{
const OpSize opsize = OpSize::kWord;
- const auto a = FetchArg(node.offset + kInstructionSizeStepBytes, code, instr, opsize);
+ const auto a = FetchArg(node.address + kInstructionSizeStepBytes, code, instr, opsize);
switch (a.mode) {
case AddrMode::kInvalid:
case AddrMode::kDn: // 4e80..4e87 / 4ec0..4ec7
@@ -191,7 +191,7 @@ static size_t disasm_jsr_jmp(
break;
case AddrMode::kD16PCAddr: // 4eba / 4efa
{
- const uint32_t ref_addr = node.offset + kInstructionSizeStepBytes +
+ const uint32_t ref_addr = node.address + kInstructionSizeStepBytes +
static_cast<uint32_t>(a.d16_pc.d16);
node.ref1_addr = ref_addr;
node.ref_kinds = kRef1RelMask;
@@ -227,17 +227,17 @@ static size_t disasm_ext_movem(
if (m == 0 && dir == MoveDirection::kRegisterToMemory) {
return disasm_ext(node, opsize, Arg::Dn(xn));
}
- if (node.offset + kInstructionSizeStepBytes >= code.occupied_size) {
+ if (node.address + kInstructionSizeStepBytes >= code.occupied_size) {
// Not enough space for regmask, but maybe it is just EXT?
return disasm_verbatim(node, instr);
}
- const unsigned regmask = GetU16BE(code.buffer + node.offset + kInstructionSizeStepBytes);
+ const unsigned regmask = GetU16BE(code.buffer + node.address + kInstructionSizeStepBytes);
if (regmask == 0) {
// This is just not representable: at least one register must be specified
return disasm_verbatim(node, instr);
}
const auto a = FetchArg(
- node.offset + kInstructionSizeStepBytes * 2, code, m, xn, opsize);
+ node.address + kInstructionSizeStepBytes * 2, code, m, xn, opsize);
switch (a.mode) {
case AddrMode::kInvalid:
case AddrMode::kDn: // 4880..4887 / 4c80..4c87 / 48c0..48c7 / 4cc0..4cc7
@@ -277,7 +277,7 @@ static size_t disasm_ext_movem(
// print label for PC relative referenced value of MOVEM. Alongside
// with *NOT* adding kInstructionSizeStepBytes to ref1_addr. Still
// figuring that out.
- node.ref1_addr = node.offset + kInstructionSizeStepBytes * 2 +
+ node.ref1_addr = node.address + kInstructionSizeStepBytes * 2 +
static_cast<uint32_t>(a.d16_pc.d16);
node.ref_kinds = kRef1RelMask | kRef1ReadMask | kRefPcRelFix2Bytes;
}
@@ -302,7 +302,7 @@ static size_t disasm_lea(
{
const OpSize opsize = OpSize::kLong;
const auto addr = FetchArg(
- node.offset + kInstructionSizeStepBytes, code, instr, opsize);
+ node.address + kInstructionSizeStepBytes, code, instr, opsize);
switch (addr.mode) {
case AddrMode::kInvalid:
case AddrMode::kDn:
@@ -322,7 +322,7 @@ static size_t disasm_lea(
node.ref_kinds = kRef1AbsMask | kRef1ReadMask;
break;
case AddrMode::kD16PCAddr:
- node.ref1_addr = node.offset + kInstructionSizeStepBytes +
+ node.ref1_addr = node.address + kInstructionSizeStepBytes +
static_cast<uint32_t>(addr.d16_pc.d16);
node.ref_kinds = kRef1RelMask | kRef1ReadMask;
break;
@@ -342,7 +342,7 @@ static size_t disasm_chk(
{
const OpSize opsize = OpSize::kWord;
const auto src = FetchArg(
- node.offset + kInstructionSizeStepBytes, code, instr, opsize);
+ node.address + kInstructionSizeStepBytes, code, instr, opsize);
switch (src.mode) {
case AddrMode::kInvalid:
return disasm_verbatim(node, instr);
@@ -381,7 +381,7 @@ static size_t disasm_bra_bsr_bcc(
const auto opsize = dispmt0 ? OpSize::kShort : OpSize::kWord;
if (dispmt0 == 0) {
// Check the boundaries
- if (node.offset + kInstructionSizeStepBytes >= code.occupied_size) {
+ if (node.address + kInstructionSizeStepBytes >= code.occupied_size) {
return disasm_verbatim(node, instr);
}
node.size = kInstructionSizeStepBytes * 2;
@@ -389,8 +389,8 @@ static size_t disasm_bra_bsr_bcc(
node.size = kInstructionSizeStepBytes;
}
const int16_t dispmt = kInstructionSizeStepBytes + (dispmt0
- ? dispmt0 : GetI16BE(code.buffer + node.offset + kInstructionSizeStepBytes));
- const uint32_t ref_addr = static_cast<uint32_t>(node.offset + dispmt);
+ ? dispmt0 : GetI16BE(code.buffer + node.address + kInstructionSizeStepBytes));
+ const uint32_t ref_addr = static_cast<uint32_t>(node.address + dispmt);
Condition condition = static_cast<Condition>((instr >> 8) & 0xf);
// False condition Indicates BSR
node.ref1_addr = ref_addr;
@@ -419,7 +419,7 @@ static size_t disasm_movep(
const OpSize opsize = ((instr >> 6) & 1) ? OpSize::kLong : OpSize::kWord;
const auto dir = static_cast<MoveDirection>(!((instr >> 7) & 1));
const auto addr = FetchArg(
- node.offset + kInstructionSizeStepBytes, code, 5, an, opsize);
+ node.address + kInstructionSizeStepBytes, code, 5, an, opsize);
if (addr.mode == AddrMode::kInvalid) {
// Boundary check failed, most likely
return disasm_verbatim(node, instr);
@@ -450,7 +450,7 @@ static size_t disasm_src_arg_bitops_movep(
// Fetch AddrMode::kDn if has_dn_src, otherwise fetch AddrMode::kImmediate
// byte
const auto src = FetchArg(
- node.offset + kInstructionSizeStepBytes,
+ node.address + kInstructionSizeStepBytes,
code,
(has_dn_src) ? 0 : 7,
dn,
@@ -465,7 +465,7 @@ static size_t disasm_src_arg_bitops_movep(
assert(src.mode == AddrMode::kImmediate);
}
const auto dst = FetchArg(
- node.offset + kInstructionSizeStepBytes + src.Size(opsize0), code, m, xn, opsize0);
+ node.address + kInstructionSizeStepBytes + src.Size(opsize0), code, m, xn, opsize0);
const unsigned opcode = (instr >> 6) & 3;
switch (dst.mode) {
case AddrMode::kInvalid:
@@ -559,7 +559,7 @@ static size_t disasm_bitops_movep(
return disasm_verbatim(node, instr);
}
}
- const auto src = FetchImmediate(node.offset + kInstructionSizeStepBytes, code, opsize);
+ const auto src = FetchImmediate(node.address + kInstructionSizeStepBytes, code, opsize);
if (src.mode == AddrMode::kInvalid) {
return disasm_verbatim(node, instr);
}
@@ -569,7 +569,7 @@ static size_t disasm_bitops_movep(
return disasm_logical_immediate_to(node, mnemonic, opsize, src);
}
const auto dst = FetchArg(
- node.offset + kInstructionSizeStepBytes + src.Size(opsize), code, m, xn, opsize);
+ node.address + kInstructionSizeStepBytes + src.Size(opsize), code, m, xn, opsize);
switch (dst.mode) {
case AddrMode::kInvalid:
return disasm_verbatim(node, instr);
@@ -606,7 +606,7 @@ static size_t disasm_move_movea(
const OpSize opsize = (opsize_raw == 1)
? OpSize::kByte : (opsize_raw == 3 ? OpSize::kWord : OpSize::kLong);
const auto src = FetchArg(
- node.offset + kInstructionSizeStepBytes, code, instr, opsize);
+ node.address + kInstructionSizeStepBytes, code, instr, opsize);
switch (src.mode) {
case AddrMode::kInvalid:
return disasm_verbatim(node, instr);
@@ -629,7 +629,7 @@ static size_t disasm_move_movea(
node.ref_kinds |= kRef1AbsMask | kRef1ReadMask;
break;
case AddrMode::kD16PCAddr:
- node.ref1_addr = node.offset + kInstructionSizeStepBytes +
+ node.ref1_addr = node.address + kInstructionSizeStepBytes +
static_cast<uint32_t>(src.d16_pc.d16);
node.ref_kinds |= kRef1RelMask | kRef1ReadMask;
break;
@@ -640,7 +640,7 @@ static size_t disasm_move_movea(
const int m = (instr >> 6) & 7;
const int xn = (instr >> 9) & 7;
const auto dst = FetchArg(
- node.offset + kInstructionSizeStepBytes + src.Size(opsize), code, m, xn, opsize);
+ node.address + kInstructionSizeStepBytes + src.Size(opsize), code, m, xn, opsize);
switch (dst.mode) {
case AddrMode::kInvalid:
return disasm_verbatim(node, instr);
@@ -688,7 +688,7 @@ static size_t disasm_move_from_sr(
{
const auto opsize = OpSize::kWord;
const auto dst = FetchArg(
- node.offset + kInstructionSizeStepBytes, code, instr, opsize);
+ node.address + kInstructionSizeStepBytes, code, instr, opsize);
switch (dst.mode) {
case AddrMode::kInvalid:
return disasm_verbatim(node, instr);
@@ -718,7 +718,7 @@ static size_t disasm_move_to(
{
const auto opsize = OpSize::kWord;
const auto src = FetchArg(
- node.offset + kInstructionSizeStepBytes, code, instr, opsize);
+ node.address + kInstructionSizeStepBytes, code, instr, opsize);
switch (src.mode) {
case AddrMode::kInvalid:
return disasm_verbatim(node, instr);
@@ -774,7 +774,7 @@ static size_t disasm_move_negx_clr_neg_not(
return disasm_verbatim(node, instr);
}
const auto a = FetchArg(
- node.offset + kInstructionSizeStepBytes, code, instr, opsize);
+ node.address + kInstructionSizeStepBytes, code, instr, opsize);
switch (a.mode) {
case AddrMode::kInvalid:
return disasm_verbatim(node, instr);
@@ -811,7 +811,7 @@ static size_t disasm_tas(
{
const auto opsize = OpSize::kByte;
const auto a = FetchArg(
- node.offset + kInstructionSizeStepBytes, code, instr, opsize);
+ node.address + kInstructionSizeStepBytes, code, instr, opsize);
switch (a.mode) {
case AddrMode::kInvalid:
return disasm_verbatim(node, instr);
@@ -848,7 +848,7 @@ static size_t disasm_tst_tas_illegal(
}
return disasm_tas(node, instr, code);
}
- const auto a = FetchArg(node.offset + kInstructionSizeStepBytes, code, m, xn, opsize);
+ const auto a = FetchArg(node.address + kInstructionSizeStepBytes, code, m, xn, opsize);
switch (a.mode) {
case AddrMode::kInvalid:
return disasm_verbatim(node, instr);
@@ -889,7 +889,7 @@ static size_t disasm_link_unlink(DisasmNode &node, const uint16_t instr, const D
return node.size = kInstructionSizeStepBytes;
}
const auto opsize = OpSize::kWord;
- const auto src = FetchImmediate(node.offset + kInstructionSizeStepBytes, code, opsize);
+ const auto src = FetchImmediate(node.address + kInstructionSizeStepBytes, code, opsize);
if (src.mode != AddrMode::kImmediate) {
return disasm_verbatim(node, instr);
}
@@ -916,7 +916,7 @@ static size_t disasm_nbcd_swap_pea(DisasmNode &node, const uint16_t instr, const
const bool is_nbcd = !((instr >> 6) & 1);
const OpSize opsize0 = OpSize::kWord;
const auto arg = FetchArg(
- node.offset + kInstructionSizeStepBytes, code, instr, opsize0);
+ node.address + kInstructionSizeStepBytes, code, instr, opsize0);
bool is_swap{};
switch (arg.mode) {
case AddrMode::kInvalid:
@@ -950,7 +950,7 @@ static size_t disasm_nbcd_swap_pea(DisasmNode &node, const uint16_t instr, const
return disasm_verbatim(node, instr);
}
if (arg.mode == AddrMode::kD16PCAddr) {
- node.ref1_addr = node.offset + kInstructionSizeStepBytes +
+ node.ref1_addr = node.address + kInstructionSizeStepBytes +
static_cast<uint32_t>(arg.d16_pc.d16);
node.ref_kinds = kRef1RelMask | kRef1ReadMask;
}
@@ -966,7 +966,7 @@ static size_t disasm_nbcd_swap_pea(DisasmNode &node, const uint16_t instr, const
static size_t disasm_stop(DisasmNode &node, const uint16_t instr, const DataBuffer &code)
{
- const auto a = FetchImmediate(node.offset + kInstructionSizeStepBytes, code, OpSize::kWord);
+ const auto a = FetchImmediate(node.address + kInstructionSizeStepBytes, code, OpSize::kWord);
if (a.mode != AddrMode::kImmediate) {
return disasm_verbatim(node, instr);
}
@@ -1020,7 +1020,7 @@ static size_t disasm_chunk_4(DisasmNode &node, const uint16_t instr, const DataB
static size_t disasm_addq_subq(
DisasmNode &node, const uint16_t instr, const DataBuffer &code, const OpSize opsize)
{
- const auto a = FetchArg(node.offset + kInstructionSizeStepBytes, code, instr, opsize);
+ const auto a = FetchArg(node.address + kInstructionSizeStepBytes, code, instr, opsize);
switch (a.mode) {
case AddrMode::kInvalid:
return disasm_verbatim(node, instr);
@@ -1055,12 +1055,12 @@ static size_t disasm_addq_subq(
static size_t disasm_dbcc(DisasmNode &node, const uint16_t instr, const DataBuffer &code)
{
- if (node.offset + kInstructionSizeStepBytes >= code.occupied_size) {
+ if (node.address + kInstructionSizeStepBytes >= code.occupied_size) {
return disasm_verbatim(node, instr);
}
- const int16_t dispmt_raw = GetI16BE(code.buffer + node.offset + kInstructionSizeStepBytes);
+ const int16_t dispmt_raw = GetI16BE(code.buffer + node.address + kInstructionSizeStepBytes);
const int32_t dispmt = dispmt_raw + kInstructionSizeStepBytes;
- node.ref2_addr = static_cast<uint32_t>(node.offset + dispmt);
+ node.ref2_addr = static_cast<uint32_t>(node.address + dispmt);
node.ref_kinds = kRef2RelMask;
node.op = Op{
OpCode::kDBcc,
@@ -1076,7 +1076,7 @@ static size_t disasm_scc_dbcc(DisasmNode &node, const uint16_t instr, const Data
{
const OpSize opsize = OpSize::kWord;
const auto a = FetchArg(
- node.offset + kInstructionSizeStepBytes, code, instr, opsize);
+ node.address + kInstructionSizeStepBytes, code, instr, opsize);
switch (a.mode) {
case AddrMode::kInvalid:
return disasm_verbatim(node, instr);
@@ -1133,7 +1133,7 @@ static size_t disasm_divu_divs_mulu_muls(
{
const auto opsize = OpSize::kWord;
const auto src = FetchArg(
- node.offset + kInstructionSizeStepBytes, code, instr, opsize);
+ node.address + kInstructionSizeStepBytes, code, instr, opsize);
switch (src.mode) {
case AddrMode::kInvalid:
return disasm_verbatim(node, instr);
@@ -1187,7 +1187,7 @@ static size_t disasm_or_and(
{
const bool dir_to_addr = (instr >> 8) & 1;
const auto addr = FetchArg(
- node.offset + kInstructionSizeStepBytes, code, instr, opsize);
+ node.address + kInstructionSizeStepBytes, code, instr, opsize);
switch (addr.mode) {
case AddrMode::kInvalid:
return disasm_verbatim(node, instr);
@@ -1251,7 +1251,7 @@ static size_t disasm_adda_suba_cmpa(
{
const OpSize opsize = static_cast<OpSize>(((instr >> 8) & 1) + 1);
const auto src = FetchArg(
- node.offset + kInstructionSizeStepBytes, code, instr, opsize);
+ node.address + kInstructionSizeStepBytes, code, instr, opsize);
switch (src.mode) {
case AddrMode::kInvalid:
return disasm_verbatim(node, instr);
@@ -1284,7 +1284,7 @@ static size_t disasm_add_sub_cmp(
const bool dir_to_addr)
{
const auto addr = FetchArg(
- node.offset + kInstructionSizeStepBytes, code, instr, opsize);
+ node.address + kInstructionSizeStepBytes, code, instr, opsize);
switch (addr.mode) {
case AddrMode::kInvalid:
return disasm_verbatim(node, instr);
@@ -1319,7 +1319,7 @@ static size_t disasm_add_sub_cmp(
return disasm_verbatim(node, instr);
}
if (addr.mode == AddrMode::kD16PCAddr) {
- node.ref1_addr = node.offset + kInstructionSizeStepBytes +
+ node.ref1_addr = node.address + kInstructionSizeStepBytes +
static_cast<uint32_t>(addr.d16_pc.d16);
node.ref_kinds = kRef1RelMask | kRef1ReadMask;
}
@@ -1360,7 +1360,7 @@ static size_t disasm_eor(DisasmNode &node, const uint16_t instr, const DataBuffe
{
const OpSize opsize = static_cast<OpSize>((instr >> 6) & 3);
const auto addr = FetchArg(
- node.offset + kInstructionSizeStepBytes, code, instr, opsize);
+ node.address + kInstructionSizeStepBytes, code, instr, opsize);
switch (addr.mode) {
case AddrMode::kInvalid:
return disasm_verbatim(node, instr);
@@ -1489,7 +1489,7 @@ static size_t disasm_shift_rotate(DisasmNode &node, const uint16_t instr, const
return disasm_verbatim(node, instr);
}
const auto dst = (opsize == OpSize::kInvalid)
- ? FetchArg(node.offset + kInstructionSizeStepBytes, code, instr, opsize)
+ ? FetchArg(node.address + kInstructionSizeStepBytes, code, instr, opsize)
: Arg::Dn(xn);
if (opsize == OpSize::kInvalid) {
switch (dst.mode) {
@@ -1572,7 +1572,7 @@ static size_t m68k_disasm(DisasmNode &n, uint16_t i, const DataBuffer &c)
size_t DisasmNode::Disasm(const DataBuffer &code)
{
// We assume that machine have no MMU and ROM data always starts with 0
- assert(this->offset < code.occupied_size);
+ assert(this->address < code.occupied_size);
// It is possible to have multiple DisasmNode::Disasm() calls, and there is
// no point to disassemble it again if it already has opcode determined
if (this->op.opcode != OpCode::kNone) {
@@ -1582,7 +1582,7 @@ size_t DisasmNode::Disasm(const DataBuffer &code)
ref_kinds = 0;
ref1_addr = 0;
ref2_addr = 0;
- const uint16_t instr = GetU16BE(code.buffer + this->offset);
+ const uint16_t instr = GetU16BE(code.buffer + this->address);
if (this->type == TracedNodeType::kInstruction) {
return m68k_disasm(*this, instr, code);
} else {
@@ -1594,12 +1594,12 @@ size_t DisasmNode::Disasm(const DataBuffer &code)
size_t DisasmNode::DisasmAsRaw(const DataBuffer &code)
{
// We assume that machine have no MMU and ROM data always starts with 0
- assert(this->offset < code.occupied_size);
+ assert(this->address < code.occupied_size);
size = kInstructionSizeStepBytes;
ref_kinds = 0;
ref1_addr = 0;
ref2_addr = 0;
- const uint16_t instr = GetU16BE(code.buffer + this->offset);
+ const uint16_t instr = GetU16BE(code.buffer + this->address);
return disasm_verbatim(*this, instr);
}
@@ -1981,7 +1981,7 @@ int Op::FPrint(
}
}
-void DisasmNode::AddReferencedBy(const uint32_t offset, const ReferenceType type)
+void DisasmNode::AddReferencedBy(const uint32_t address, const ReferenceType type)
{
ReferenceNode *node{};
if (this->last_ref_by) {
@@ -1991,7 +1991,7 @@ void DisasmNode::AddReferencedBy(const uint32_t offset, const ReferenceType type
assert(node);
this->ref_by = this->last_ref_by = node;
}
- node->refs[node->refs_count] = ReferenceRecord{type, offset};
+ node->refs[node->refs_count] = ReferenceRecord{type, address};
node->refs_count++;
if (node->refs_count >= kRefsCountPerBuffer) {
ReferenceNode *new_node = new ReferenceNode{};
diff --git a/disasm.h b/disasm.h
index f138e5f..9228ffb 100644
--- a/disasm.h
+++ b/disasm.h
@@ -365,15 +365,15 @@ struct Op {
struct DisasmNode {
const TracedNodeType type{};
- /// Absolute offset of the instruction (PC value basically)
- const uint32_t offset{};
+ /// Address of the instruction (PC value basically)
+ const uint32_t address{};
/// Instruction size in bytes
size_t size{kInstructionSizeStepBytes};
/// Indicates whether `ref_addr` should be interpreted and how
RefKindMask ref_kinds{};
- /// Absolute address of reference
+ /// Address of first argument reference
uint32_t ref1_addr{};
- /// Absolute address of reference
+ /// Address of second argument reference
uint32_t ref2_addr{};
ReferenceNode *ref_by{};
ReferenceNode *last_ref_by{};
@@ -384,6 +384,6 @@ struct DisasmNode {
*/
size_t Disasm(const DataBuffer &code);
size_t DisasmAsRaw(const DataBuffer &code);
- void AddReferencedBy(uint32_t offset, ReferenceType);
+ void AddReferencedBy(uint32_t address, ReferenceType);
~DisasmNode();
};
diff --git a/main.cpp b/main.cpp
index 84abd3f..88eef0d 100644
--- a/main.cpp
+++ b/main.cpp
@@ -26,8 +26,8 @@ enum class DisasmMapType {
class DisasmMap {
const DisasmMapType _type;
DisasmNode *_map[kDisasmMapSizeElements]{};
- constexpr DisasmNode *findNodeByOffset(uint32_t offset) const;
- DisasmNode *insertTracedNode(uint32_t offset, TracedNodeType);
+ constexpr DisasmNode *findNodeByAddress(uint32_t address) const;
+ DisasmNode *insertTracedNode(uint32_t address, TracedNodeType);
void insertReferencedBy(
const uint32_t by_addr,
const uint32_t ref_addr,
@@ -36,37 +36,37 @@ class DisasmMap {
const ReferenceType ref_type);
bool canBeAllocated(const DisasmNode& node) const;
public:
- constexpr const DisasmNode *FindNodeByOffset(uint32_t offset) const
+ constexpr const DisasmNode *FindNodeByAddress(uint32_t address) const
{
- return findNodeByOffset(offset);
+ return findNodeByAddress(address);
};
// Returns true if node inserted, false if node already exist and has not
// been changed
- bool InsertTracedNode(uint32_t offset, TracedNodeType type)
+ bool InsertTracedNode(uint32_t address, TracedNodeType type)
{
assert(_type == DisasmMapType::kTraced);
- return nullptr != insertTracedNode(offset, type);
+ return nullptr != insertTracedNode(address, type);
}
void Disasm(const DataBuffer &code, const Settings &);
DisasmMap(DisasmMapType type): _type(type) {}
~DisasmMap();
};
-constexpr DisasmNode *DisasmMap::findNodeByOffset(uint32_t offset) const
+constexpr DisasmNode *DisasmMap::findNodeByAddress(uint32_t address) const
{
- if (offset < kRomSizeBytes)
- return _map[offset / kInstructionSizeStepBytes];
+ if (address < kRomSizeBytes)
+ return _map[address / kInstructionSizeStepBytes];
return nullptr;
}
-static uint32_t AlignInstructionAddress(const uint32_t offset)
+static uint32_t AlignInstructionAddress(const uint32_t address)
{
- return offset & ~1UL;
+ return address & ~1UL;
}
-DisasmNode *DisasmMap::insertTracedNode(const uint32_t offset, const TracedNodeType type)
+DisasmNode *DisasmMap::insertTracedNode(const uint32_t address, const TracedNodeType type)
{
- auto *node = findNodeByOffset(offset);
+ auto *node = findNodeByAddress(address);
if (node) {
// Instruction nodes take precedence over data nodes. If a node that
// was previously accessed only as data now turns out to be an
@@ -78,9 +78,9 @@ DisasmNode *DisasmMap::insertTracedNode(const uint32_t offset, const TracedNodeT
}
return node;
}
- node = new DisasmNode(DisasmNode{type, AlignInstructionAddress(offset)});
+ node = new DisasmNode(DisasmNode{type, AlignInstructionAddress(address)});
assert(node);
- _map[offset / kInstructionSizeStepBytes] = node;
+ _map[address / kInstructionSizeStepBytes] = node;
return node;
}
@@ -97,7 +97,7 @@ void DisasmMap::insertReferencedBy(
if (canBeAllocated(*ref_node)) {
// Spread across the size
for (size_t o = kInstructionSizeStepBytes; o < size; o++) {
- _map[(ref_node->offset + o) / kInstructionSizeStepBytes] = ref_node;
+ _map[(ref_node->address + o) / kInstructionSizeStepBytes] = ref_node;
}
} else {
ref_node->DisasmAsRaw(code);
@@ -108,9 +108,9 @@ void DisasmMap::insertReferencedBy(
bool DisasmMap::canBeAllocated(const DisasmNode& node) const
{
const auto size = node.size / kInstructionSizeStepBytes;
- const auto *const node_real = findNodeByOffset(node.offset);
+ const auto *const node_real = findNodeByAddress(node.address);
for (size_t i = 1; i < size; i++) {
- const auto *const ptr = _map[node.offset / kInstructionSizeStepBytes + i];
+ const auto *const ptr = _map[node.address / kInstructionSizeStepBytes + i];
if (ptr != nullptr && ptr != node_real) {
return false;
}
@@ -158,7 +158,7 @@ void DisasmMap::Disasm(const DataBuffer &code, const Settings &)
if (canBeAllocated(*node)) {
// Spread across the size
for (size_t o = kInstructionSizeStepBytes; o < size; o++) {
- _map[(node->offset + o) / kInstructionSizeStepBytes] = node;
+ _map[(node->address + o) / kInstructionSizeStepBytes] = node;
}
} else {
node->DisasmAsRaw(code);
@@ -170,7 +170,7 @@ void DisasmMap::Disasm(const DataBuffer &code, const Settings &)
const TracedNodeType type = (node->ref_kinds & (kRef1ReadMask | kRef1WriteMask))
? TracedNodeType::kData : TracedNodeType::kInstruction;
const auto ref_type = ReferenceTypeFromRefKindMask1(node->ref_kinds);
- insertReferencedBy(node->offset, node->ref1_addr, type, code, ref_type);
+ insertReferencedBy(node->address, node->ref1_addr, type, code, ref_type);
}
const bool has_code_ref2 =
((node->ref_kinds & kRef2Mask) && node->ref2_addr < code.occupied_size);
@@ -178,7 +178,7 @@ void DisasmMap::Disasm(const DataBuffer &code, const Settings &)
const TracedNodeType type = (node->ref_kinds & (kRef2ReadMask | kRef2WriteMask))
? TracedNodeType::kData : TracedNodeType::kInstruction;
const auto ref_type = ReferenceTypeFromRefKindMask2(node->ref_kinds);
- insertReferencedBy(node->offset, node->ref2_addr, type, code, ref_type);
+ insertReferencedBy(node->address, node->ref2_addr, type, code, ref_type);
}
i += node->size;
}
@@ -202,7 +202,7 @@ DisasmMap::~DisasmMap()
}
static size_t RenderRawDataComment(
- char *out, size_t out_sz, uint32_t offset, size_t instr_sz, const DataBuffer &code)
+ char *out, size_t out_sz, uint32_t address, size_t instr_sz, const DataBuffer &code)
{
size_t overall_sz{};
for (size_t i = 0; i < instr_sz; i += kInstructionSizeStepBytes)
@@ -213,11 +213,11 @@ static size_t RenderRawDataComment(
out + overall_sz,
out_sz - overall_sz,
" %04x",
- GetU16BE(code.buffer + offset + i)));
+ GetU16BE(code.buffer + address + i)));
}
overall_sz += Min(
out_sz - overall_sz,
- snprintf(out + overall_sz, out_sz - overall_sz, " @%08x", offset));
+ snprintf(out + overall_sz, out_sz - overall_sz, " @%08x", address));
return overall_sz;
}
@@ -260,7 +260,7 @@ static constexpr bool HasCallReference(const DisasmNode &node)
static constexpr size_t GetNodeSizeByAddress(const DisasmMap &disasm_map, const uint32_t address)
{
- const auto *node = disasm_map.FindNodeByOffset(address);
+ const auto *node = disasm_map.FindNodeByAddress(address);
if (node == nullptr) {
return kInstructionSizeStepBytes;
}
@@ -276,22 +276,22 @@ static constexpr bool IsLocalLocation(const DisasmMap &disasm_map, const DisasmN
// Locals are definitely not made for calls
return false;
}
- const bool forward = ref_rec.address < node.offset;
- const size_t min_addr = forward ? ref_rec.address : node.offset;
+ const bool forward = ref_rec.address < node.address;
+ const size_t min_addr = forward ? ref_rec.address : node.address;
const size_t start = min_addr + GetNodeSizeByAddress(disasm_map, min_addr);
- const size_t max_addr = forward ? node.offset : ref_rec.address;
+ const size_t max_addr = forward ? node.address : ref_rec.address;
const size_t end = max_addr + (forward ? 0 : GetNodeSizeByAddress(disasm_map, min_addr));
- for (size_t o = start; o < end;) {
- const auto *intermediate_node = disasm_map.FindNodeByOffset(o);
+ for (size_t addr = start; addr < end;) {
+ const auto *intermediate_node = disasm_map.FindNodeByAddress(addr);
if (intermediate_node) {
if (intermediate_node->ref_by) {
// Another labeled node detected on the jump path, hence
// current node's location cannot be considered local
return false;
}
- o += intermediate_node->size;
+ addr += intermediate_node->size;
} else {
- o += kInstructionSizeStepBytes;
+ addr += kInstructionSizeStepBytes;
}
}
}
@@ -330,9 +330,9 @@ static void RenderNodeDisassembly(
(s.export_labels && node.ref_by && (node.ref_by->refs_count > 1)) ||
export_this_function;
if (export_this_label) {
- fprintf(output, "\n%s.globl\tL%08x\n", s.indent, node.offset);
+ fprintf(output, "\n%s.globl\tL%08x\n", s.indent, node.address);
if (export_this_function) {
- fprintf(output, "%s.type\tL%08x, @function\n", s.indent, node.offset);
+ fprintf(output, "%s.type\tL%08x, @function\n", s.indent, node.address);
}
}
}
@@ -354,18 +354,18 @@ static void RenderNodeDisassembly(
if (s.short_ref_local_labels && is_local) {
fprintf(output, "1:%s", StringWihoutFristNChars(s.indent, (sizeof "1:") - 1));
} else {
- fprintf(output, "L%08x:\n", node.offset);
+ fprintf(output, "L%08x:\n", node.address);
}
}
}
assert(node.op.opcode != OpCode::kNone);
if (ShouldPrintAsRaw(node.op)) {
- auto raw = Op::Raw(GetU16BE(code.buffer + node.offset));
+ auto raw = Op::Raw(GetU16BE(code.buffer + node.address));
raw.FPrint(output, s.indent);
uint32_t i = kInstructionSizeStepBytes;
for (; i < node.size; i += kInstructionSizeStepBytes) {
char arg_str[kArgsBufferSize]{};
- const auto arg = Arg::Raw(GetU16BE(code.buffer + node.offset + i));
+ const auto arg = Arg::Raw(GetU16BE(code.buffer + node.address + i));
arg.SNPrint(arg_str, kArgsBufferSize);
fprintf(output, ", %s", arg_str);
}
@@ -373,11 +373,11 @@ static void RenderNodeDisassembly(
} else {
const bool with_ref = node.ref_kinds && s.labels && (s.abs_labels || s.rel_labels);
const auto *ref1 = (node.ref_kinds & kRef1Mask)
- ? disasm_map.FindNodeByOffset(node.ref1_addr) : nullptr;
+ ? disasm_map.FindNodeByAddress(node.ref1_addr) : nullptr;
const auto *ref2 = (node.ref_kinds & kRef2Mask)
- ? disasm_map.FindNodeByOffset(node.ref2_addr) : nullptr;
- const uint32_t ref1_addr = (with_ref && ref1) ? ref1->offset : 0;
- const uint32_t ref2_addr = (with_ref && ref2) ? ref2->offset : 0;
+ ? disasm_map.FindNodeByAddress(node.ref2_addr) : nullptr;
+ const uint32_t ref1_addr = (with_ref && ref1) ? ref1->address : 0;
+ const uint32_t ref2_addr = (with_ref && ref2) ? ref2->address : 0;
if (with_ref && (ref1 || ref2)) {
const RefKindMask ref_kinds =
(s.abs_labels
@@ -394,7 +394,7 @@ static void RenderNodeDisassembly(
char ref1_label[32]{};
if (ref1) {
if (s.short_ref_local_labels && ref1_is_local) {
- const char dir = ref1_addr <= node.offset ? 'b' : 'f';
+ const char dir = ref1_addr <= node.address ? 'b' : 'f';
snprintf(ref1_label, (sizeof ref1_label), "1%c", dir);
} else {
snprintf(ref1_label, (sizeof ref1_label), "L%08x", ref1_addr);
@@ -404,7 +404,7 @@ static void RenderNodeDisassembly(
char ref2_label[32]{};
if (ref2) {
if (s.short_ref_local_labels && ref2_is_local) {
- const char dir = ref2_addr <= node.offset ? 'b' : 'f';
+ const char dir = ref2_addr <= node.address ? 'b' : 'f';
snprintf(ref2_label, (sizeof ref2_label), "1%c", dir);
} else {
snprintf(ref2_label, (sizeof ref2_label), "L%08x", ref2_addr);
@@ -416,7 +416,7 @@ static void RenderNodeDisassembly(
ref_kinds,
ref1_label,
ref2_label,
- node.offset,
+ node.address,
ref1_addr,
ref2_addr);
if (s.xrefs_to && !(s.short_ref_local_labels && ref1_is_local)) {
@@ -434,7 +434,7 @@ static void RenderNodeDisassembly(
RenderRawDataComment(
raw_data_comment,
(sizeof raw_data_comment) - 1,
- node.offset,
+ node.address,
node.size, code);
fprintf(output, " |%s", raw_data_comment);
}
@@ -445,7 +445,7 @@ static void RenderDisassembly(
FILE *const output, const DisasmMap &disasm_map, const DataBuffer &code, const Settings &s)
{
for (size_t i = 0; i < code.occupied_size;) {
- const DisasmNode *node = disasm_map.FindNodeByOffset(i);
+ const DisasmNode *node = disasm_map.FindNodeByAddress(i);
if (node) {
RenderNodeDisassembly(output, disasm_map, code, s, *node);
i += node->size;
@@ -470,20 +470,20 @@ static void ParseTraceData(DisasmMap &disasm_map, const DataBuffer &trace_data)
errno = 0;
char *startptr = reinterpret_cast<char *>(trace_data.buffer + i);
char *endptr = startptr;
- const long offset = strtol(startptr, &endptr, 10);
- if ((offset == LONG_MAX || offset == LONG_MIN) && errno == ERANGE) {
+ const long address = strtol(startptr, &endptr, 10);
+ if ((address == LONG_MAX || address == LONG_MIN) && errno == ERANGE) {
// Parsing error, just skip
} else if (startptr == endptr) {
// Parsing error, just skip
- } else if (offset % 2) {
- fprintf(stderr, "Error: Uneven PC values are not supported (got PC=0x%08lx), exiting\n", offset);
+ } else if (address % 2) {
+ fprintf(stderr, "Error: Uneven PC values are not supported (got PC=0x%08lx), exiting\n", address);
exit(1);
- } else if (static_cast<unsigned long>(offset) > kRomSizeBytes) {
- fprintf(stderr, "Error: PC values > 4MiB are not supported (got PC=0x%08lx), exiting\n", offset);
+ } else if (static_cast<unsigned long>(address) > kRomSizeBytes) {
+ fprintf(stderr, "Error: PC values > 4MiB are not supported (got PC=0x%08lx), exiting\n", address);
exit(1);
} else {
// Valid value
- disasm_map.InsertTracedNode(offset, TracedNodeType::kInstruction);
+ disasm_map.InsertTracedNode(address, TracedNodeType::kInstruction);
}
if (startptr != endptr) {
i += endptr - startptr - 1;