summaryrefslogtreecommitdiff
path: root/src/disasm.cpp
diff options
context:
space:
mode:
authorOxore <oxore@protonmail.com>2025-01-08 00:46:44 +0300
committerOxore <oxore@protonmail.com>2025-02-01 18:26:18 +0300
commit6769fca1dd90f4e34e1fd6b2256c3795bbcaf658 (patch)
tree80899430ea776d80b98be4e198591a61f8384d16 /src/disasm.cpp
parent8340b1f42288e0143bca8a254600fb34025ec803 (diff)
WIP
Diffstat (limited to 'src/disasm.cpp')
-rw-r--r--src/disasm.cpp86
1 files changed, 53 insertions, 33 deletions
diff --git a/src/disasm.cpp b/src/disasm.cpp
index 35c2351..3d1ac4a 100644
--- a/src/disasm.cpp
+++ b/src/disasm.cpp
@@ -3,6 +3,7 @@
#include "disasm.h"
#include "m68k.h"
+#include "debug.h"
#include <cassert>
#include <cerrno>
@@ -12,11 +13,11 @@
void DisasmNode::AddReferencedBy(const uint32_t address_from, const ReferenceType ref_type)
{
ReferenceRecord *node = new ReferenceRecord{nullptr, ref_type, address_from};
- assert(node);
+ ASSERT(node);
if (this->last_ref_by) {
this->last_ref_by->next = node;
} else {
- assert(nullptr == this->ref_by);
+ ASSERT(nullptr == this->ref_by);
this->ref_by = node;
}
this->last_ref_by = node;
@@ -30,30 +31,45 @@ DisasmNode::~DisasmNode()
ref = ref->next;
delete prev;
}
-}
-
-static constexpr uint32_t AlignInstructionAddress(const uint32_t address)
-{
- return address & ~1UL;
+ ref_by = nullptr;
+ last_ref_by = nullptr;
}
DisasmNode &DisasmMap::insertNode(uint32_t address, NodeType type)
{
+ ASSERT(address < _code_size);
+ if (IsInstruction(type)) {
+ address = AlignInstructionAddress(address);
+ }
auto *node = findNodeByAddress(address);
if (node) {
// Instruction nodes take precedence over data nodes. If a node that
// was previously accessed only as data now turns out to be an
// instruction, then it must become an instruction node.
+ // XXX: Traced data must not be classified as instruction. But the
+ // traced data support is yet to come.
if (IsInstruction(type) && !IsInstruction(node->type)) {
- *const_cast<NodeType*>(&node->type) = type;
- // Make sure it is OpCode::kNone so it will be properly disassembled
- node->op = Op{};
+ if (0 == (node->size & 1) && 0 == (node->address & 1)) {
+ *const_cast<NodeType*>(&node->type) = type;
+ // Make sure it is OpCode::kNone so it will be properly disassembled
+ node->op = Op{};
+ }
}
return *node;
}
- node = new DisasmNode(DisasmNode{type, AlignInstructionAddress(address)});
- assert(node);
- _map[address / kInstructionSizeStepBytes] = node;
+ if (IsInstruction(type) && _map[address + 1]) {
+ // Sorry, can't do instruction here. Only 1 byte of data could fit.
+ node = new DisasmNode(DisasmNode::DataRaw8(address, 0));
+ ASSERT(node->size == 1);
+ } else {
+ node = new DisasmNode(DisasmNode::Simple(type, address));
+ }
+ ASSERT(node);
+ // Spread across the size
+ for (size_t o = 0; o < node->size; o++) {
+ ASSERT(_map[address + o] == nullptr || _map[address + o] == node);
+ _map[address + o] = node;
+ }
return *node;
}
@@ -70,7 +86,7 @@ DisasmNode &DisasmMap::insertReferencedBy(
void DisasmMap::InsertNode(uint32_t address, NodeType type)
{
- assert(_type == DisasmMapType::kTraced);
+ ASSERT(_type == DisasmMapType::kTraced);
insertNode(address, type);
}
@@ -114,8 +130,7 @@ bool DisasmMap::ApplySymbolsFromElf(const ELF::Image &elf)
if (null_symbol.name != nullptr && *null_symbol.name != '\0') {
const size_t ret = fwrite(
&null_symbol, sizeof null_symbol, 1, symtab_stream);
- (void) ret;
- assert(ret == 1);
+ ASSERT(ret == 1), (void)ret;
}
const size_t nentries = symtab.size/symtab.entsize;
for (size_t i = 0; i < nentries; i++) {
@@ -131,8 +146,7 @@ bool DisasmMap::ApplySymbolsFromElf(const ELF::Image &elf)
const auto symbol = Symbol{elfsym.value, type, elfsym.name, elfsym.size};
if (symbol.name != nullptr && *symbol.name != '\0') {
const size_t ret = fwrite(&symbol, sizeof symbol, 1, symtab_stream);
- (void) ret;
- assert(ret == 1);
+ ASSERT(ret == 1), (void) ret;
}
}
}
@@ -173,6 +187,7 @@ static constexpr bool IsNextLikelyAnInstruction(const Op &op)
{
return (op.opcode != OpCode::kNone &&
op.opcode != OpCode::kRaw &&
+ op.opcode != OpCode::kRaw8 &&
!IsBRA(op) &&
op.opcode != OpCode::kJMP &&
op.opcode != OpCode::kRTS &&
@@ -183,13 +198,16 @@ static constexpr bool IsNextLikelyAnInstruction(const Op &op)
void DisasmMap::Disasm(
const DataView &code, const Settings &s, size_t at, bool nested)
{
+ at = AlignInstructionAddress(at);
+ _code_size = code.size;
+ ASSERT(_code_size <= kRomSizeBytes);
// Some of logic of this function is covered by integration tests in
// `test_walk_and_follow_jumps.bash`.
bool inside_code_span = nested;
- while (at < Min(kRomSizeBytes, code.size)) {
+ while (at < code.size) {
DisasmNode *node;
if (_type == DisasmMapType::kTraced) {
- node = _map[at / kInstructionSizeStepBytes];
+ node = _map[at];
if (!node) {
if (inside_code_span) {
node = &insertNode(at, NodeType::kTracedInstruction);
@@ -198,19 +216,19 @@ void DisasmMap::Disasm(
continue;
}
}
+ ASSERT(node->address == at);
} else {
node = &insertNode(at, NodeType::kTracedInstruction);
}
- const bool perform_disasm = node->op.opcode == OpCode::kNone ||
- (_type == DisasmMapType::kRaw && node->op.opcode == OpCode::kRaw) ||
- inside_code_span;
+ const bool perform_disasm = node->IsYetToBeHandled(_type) || inside_code_span;
if (perform_disasm) {
const auto size = node->Disasm(code, s);
- assert(size >= kInstructionSizeStepBytes);
if (canBeAllocated(*node)) {
// Spread across the size
- for (size_t o = kInstructionSizeStepBytes; o < size; o++) {
- _map[(node->address + o) / kInstructionSizeStepBytes] = node;
+ const size_t address = node->address;
+ for (size_t o = 0; o < size; o++) {
+ ASSERT(_map[address + o] == nullptr || _map[address + o] == node);
+ _map[address + o] = node;
}
} else {
node->DisasmAsRaw(code);
@@ -227,11 +245,11 @@ void DisasmMap::Disasm(
const bool has_code_ref1 = node->ref1_addr < code.size && has_ref1;
if (has_code_ref1) {
const NodeType type = (node->ref_kinds & (kRef1ReadMask | kRef1WriteMask))
- ? NodeType::kData : NodeType::kRefInstruction;
+ ? NodeType::kRefData : NodeType::kRefInstruction;
const auto ref_type = ReferenceTypeFromRefKindMask1(node->ref_kinds);
auto &ref_node = insertReferencedBy(
node->address, node->ref1_addr, type, ref_type);
- if (ref_node.op.opcode == OpCode::kNone) {
+ if (ref_node.IsYetToBeHandled(_type)) {
if (s.follow_jumps) {
Disasm(code, s, ref_node.address, true);
} else {
@@ -243,11 +261,11 @@ void DisasmMap::Disasm(
const bool has_code_ref2 = (has_ref2 && node->ref2_addr < code.size);
if (has_code_ref2) {
const NodeType type = (node->ref_kinds & (kRef2ReadMask | kRef2WriteMask))
- ? NodeType::kData : NodeType::kRefInstruction;
+ ? NodeType::kRefData : NodeType::kRefInstruction;
const auto ref_type = ReferenceTypeFromRefKindMask2(node->ref_kinds);
auto &ref_node = insertReferencedBy(
node->address, node->ref2_addr, type, ref_type);
- if (ref_node.op.opcode == OpCode::kNone) {
+ if (ref_node.IsYetToBeHandled(_type)) {
if (s.follow_jumps) {
Disasm(code, s, ref_node.address, true);
} else {
@@ -263,19 +281,21 @@ void DisasmMap::Disasm(
DisasmMap::~DisasmMap()
{
- for (size_t i = 0; i < kDisasmMapSizeElements; i++) {
+ ASSERT(_map != nullptr);
+ for (size_t i = 0; i < kRomSizeBytes; i++) {
auto *const node = _map[i];
if (!node) {
continue;
}
- const auto size = node->size / kInstructionSizeStepBytes;
+ const auto size = node->size;
for (size_t o = 0; o < size; o++) {
- assert(_map[i + o] == node);
+ ASSERT(_map[i + o] == node);
_map[i + o] = nullptr;
}
delete node;
i += size - 1;
}
+ free(_map);
if (_symtab != nullptr) {
free(_symtab);
}