black'ed source tree

This commit is contained in:
Christian Vogelgsang 2020-03-10 20:19:31 +01:00
parent 54479250d5
commit 21d2e192c8
390 changed files with 42070 additions and 39826 deletions

View File

@ -1,53 +1,53 @@
from .elf.BinFmtELF import BinFmtELF
from .hunk.BinFmtHunk import BinFmtHunk
class BinFmt:
def __init__(self):
self.formats = [BinFmtHunk(), BinFmtELF()]
def __init__(self):
self.formats = [BinFmtHunk(), BinFmtELF()]
def get_format(self, path):
"""get instance of BinFmt loader or None"""
with open(path, "rb") as f:
return self.get_format_fobj(f)
def get_format(self, path):
"""get instance of BinFmt loader or None"""
with open(path, "rb") as f:
return self.get_format_fobj(f)
def get_format_fobj(self, fobj):
"""get instance of BinFmt loader or None"""
for f in self.formats:
if f.is_image_fobj(fobj):
return f
return None
def get_format_fobj(self, fobj):
"""get instance of BinFmt loader or None"""
for f in self.formats:
if f.is_image_fobj(fobj):
return f
return None
def is_image(self, path):
"""check if a given file is a supported binary file"""
with open(path, "rb") as f:
return self.is_image_fobj(f)
def is_image(self, path):
"""check if a given file is a supported binary file"""
with open(path, "rb") as f:
return self.is_image_fobj(f)
def is_image_fobj(self, fobj):
"""check if a given file is a supported binary file"""
f = self.get_format_fobj(fobj)
return f is not None
def is_image_fobj(self, fobj):
"""check if a given file is a supported binary file"""
f = self.get_format_fobj(fobj)
return f is not None
def load_image(self, path):
"""load a binary file and return a BinImage. unknown format returns None"""
with open(path, "rb") as f:
return self.load_image_fobj(f)
def load_image(self, path):
"""load a binary file and return a BinImage. unknown format returns None"""
with open(path, "rb") as f:
return self.load_image_fobj(f)
def load_image_fobj(self, fobj):
"""load a binary file and return a BinImage. unknown format returns None"""
f = self.get_format_fobj(fobj)
if f is not None:
return f.load_image_fobj(fobj)
else:
return None
def load_image_fobj(self, fobj):
"""load a binary file and return a BinImage. unknown format returns None"""
f = self.get_format_fobj(fobj)
if f is not None:
return f.load_image_fobj(fobj)
else:
return None
# mini test
if __name__ == '__main__':
import sys
bf = BinFmt()
for a in sys.argv[1:]:
ok = bf.is_image(a)
bi = bf.load_image(a)
print(a, ok, str(bi))
if __name__ == "__main__":
import sys
bf = BinFmt()
for a in sys.argv[1:]:
ok = bf.is_image(a)
bi = bf.load_image(a)
print(a, ok, str(bi))

View File

@ -1,279 +1,282 @@
SEGMENT_TYPE_CODE = 0
SEGMENT_TYPE_DATA = 1
SEGMENT_TYPE_BSS = 2
SEGMENT_FLAG_READ_ONLY = 1
segment_type_names = [
"CODE", "DATA", "BSS"
]
segment_type_names = ["CODE", "DATA", "BSS"]
BIN_IMAGE_TYPE_HUNK = 0
BIN_IMAGE_TYPE_ELF = 1
bin_image_type_names = [
"hunk", "elf"
]
bin_image_type_names = ["hunk", "elf"]
class Reloc:
def __init__(self, offset, width=2, addend=0):
self.offset = offset
self.width = width
self.addend = addend
def __init__(self, offset, width=2, addend=0):
self.offset = offset
self.width = width
self.addend = addend
def get_offset(self):
return self.offset
def get_offset(self):
return self.offset
def get_width(self):
return self.width
def get_width(self):
return self.width
def get_addend(self):
return self.addend
def get_addend(self):
return self.addend
class Relocations:
def __init__(self, to_seg):
self.to_seg = to_seg
self.entries = []
def __init__(self, to_seg):
self.to_seg = to_seg
self.entries = []
def add_reloc(self, reloc):
self.entries.append(reloc)
def add_reloc(self, reloc):
self.entries.append(reloc)
def get_relocs(self):
return self.entries
def get_relocs(self):
return self.entries
class Symbol:
def __init__(self, offset, name, file_name=None):
self.offset = offset
self.name = name
self.file_name = file_name
def __init__(self, offset, name, file_name=None):
self.offset = offset
self.name = name
self.file_name = file_name
def get_offset(self):
return self.offset
def get_offset(self):
return self.offset
def get_name(self):
return self.name
def get_name(self):
return self.name
def get_file_name(self):
return self.file_name
def get_file_name(self):
return self.file_name
class SymbolTable:
def __init__(self):
self.symbols = []
def __init__(self):
self.symbols = []
def add_symbol(self, symbol):
self.symbols.append(symbol)
def add_symbol(self, symbol):
self.symbols.append(symbol)
def get_symbols(self):
return self.symbols
def get_symbols(self):
return self.symbols
class DebugLineEntry:
def __init__(self, offset, src_line, flags=0):
self.offset = offset
self.src_line = src_line
self.flags = flags
self.file_ = None
def __init__(self, offset, src_line, flags=0):
self.offset = offset
self.src_line = src_line
self.flags = flags
self.file_ = None
def get_offset(self):
return self.offset
def get_offset(self):
return self.offset
def get_src_line(self):
return self.src_line
def get_src_line(self):
return self.src_line
def get_flags(self):
return self.flags
def get_flags(self):
return self.flags
def get_file(self):
return self.file_
def get_file(self):
return self.file_
class DebugLineFile:
def __init__(self, src_file, dir_name=None, base_offset=0):
self.src_file = src_file
self.dir_name = dir_name
self.base_offset = base_offset
self.entries = []
def __init__(self, src_file, dir_name=None, base_offset=0):
self.src_file = src_file
self.dir_name = dir_name
self.base_offset = base_offset
self.entries = []
def get_src_file(self):
return self.src_file
def get_src_file(self):
return self.src_file
def get_dir_name(self):
return self.dir_name
def get_dir_name(self):
return self.dir_name
def get_entries(self):
return self.entries
def get_entries(self):
return self.entries
def get_base_offset(self):
return self.base_offset
def get_base_offset(self):
return self.base_offset
def add_entry(self, e):
self.entries.append(e)
e.file_ = self
def add_entry(self, e):
self.entries.append(e)
e.file_ = self
class DebugLine:
def __init__(self):
self.files = []
def __init__(self):
self.files = []
def add_file(self, src_file):
self.files.append(src_file)
def add_file(self, src_file):
self.files.append(src_file)
def get_files(self):
return self.files
def get_files(self):
return self.files
class Segment:
def __init__(self, seg_type, size, data=None, flags=0):
self.seg_type = seg_type
self.size = size
self.data = data
self.flags = flags
self.relocs = {}
self.symtab = None
self.id = None
self.file_data = None
self.debug_line = None
def __init__(self, seg_type, size, data=None, flags=0):
self.seg_type = seg_type
self.size = size
self.data = data
self.flags = flags
self.relocs = {}
self.symtab = None
self.id = None
self.file_data = None
self.debug_line = None
def __str__(self):
# relocs
relocs = []
for to_seg in self.relocs:
r = self.relocs[to_seg]
relocs.append("(#%d:size=%d)" % (to_seg.id, len(r.entries)))
# symtab
if self.symtab is not None:
symtab = "symtab=#%d" % len(self.symtab.symbols)
else:
symtab = ""
# debug_line
if self.debug_line is not None:
dl_files = self.debug_line.get_files()
file_info = []
for dl_file in dl_files:
n = len(dl_file.entries)
file_info.append("(%s:#%d)" % (dl_file.src_file, n))
debug_line = "debug_line=" + ",".join(file_info)
else:
debug_line = ""
# summary
return "[#%d:%s:size=%d,flags=%d,%s,%s,%s]" % (self.id,
segment_type_names[self.seg_type], self.size, self.flags,
",".join(relocs), symtab, debug_line)
def __str__(self):
# relocs
relocs = []
for to_seg in self.relocs:
r = self.relocs[to_seg]
relocs.append("(#%d:size=%d)" % (to_seg.id, len(r.entries)))
# symtab
if self.symtab is not None:
symtab = "symtab=#%d" % len(self.symtab.symbols)
else:
symtab = ""
# debug_line
if self.debug_line is not None:
dl_files = self.debug_line.get_files()
file_info = []
for dl_file in dl_files:
n = len(dl_file.entries)
file_info.append("(%s:#%d)" % (dl_file.src_file, n))
debug_line = "debug_line=" + ",".join(file_info)
else:
debug_line = ""
# summary
return "[#%d:%s:size=%d,flags=%d,%s,%s,%s]" % (
self.id,
segment_type_names[self.seg_type],
self.size,
self.flags,
",".join(relocs),
symtab,
debug_line,
)
def get_type(self):
return self.seg_type
def get_type(self):
return self.seg_type
def get_type_name(self):
return segment_type_names[self.seg_type]
def get_type_name(self):
return segment_type_names[self.seg_type]
def get_size(self):
return self.size
def get_size(self):
return self.size
def get_data(self):
return self.data
def get_data(self):
return self.data
def add_reloc(self, to_seg, relocs):
self.relocs[to_seg] = relocs
def add_reloc(self, to_seg, relocs):
self.relocs[to_seg] = relocs
def get_reloc_to_segs(self):
keys = list(self.relocs.keys())
return sorted(keys, key=lambda x: x.id)
def get_reloc_to_segs(self):
keys = list(self.relocs.keys())
return sorted(keys, key=lambda x: x.id)
def get_reloc(self, to_seg):
if to_seg in self.relocs:
return self.relocs[to_seg]
else:
return None
def get_reloc(self, to_seg):
if to_seg in self.relocs:
return self.relocs[to_seg]
else:
return None
def set_symtab(self, symtab):
self.symtab = symtab
def set_symtab(self, symtab):
self.symtab = symtab
def get_symtab(self):
return self.symtab
def get_symtab(self):
return self.symtab
def set_debug_line(self, debug_line):
self.debug_line = debug_line
def set_debug_line(self, debug_line):
self.debug_line = debug_line
def get_debug_line(self):
return self.debug_line
def get_debug_line(self):
return self.debug_line
def set_file_data(self, file_data):
"""set associated loaded binary file"""
self.file_data = file_data
def set_file_data(self, file_data):
"""set associated loaded binary file"""
self.file_data = file_data
def get_file_data(self):
"""get associated loaded binary file"""
return self.file_data
def get_file_data(self):
"""get associated loaded binary file"""
return self.file_data
def find_symbol(self, offset):
symtab = self.get_symtab()
if symtab is None:
return None
for symbol in symtab.get_symbols():
off = symbol.get_offset()
if off == offset:
return symbol.get_name()
return None
def find_symbol(self, offset):
symtab = self.get_symtab()
if symtab is None:
return None
for symbol in symtab.get_symbols():
off = symbol.get_offset()
if off == offset:
return symbol.get_name()
return None
def find_reloc(self, offset, size):
to_segs = self.get_reloc_to_segs()
for to_seg in to_segs:
reloc = self.get_reloc(to_seg)
for r in reloc.get_relocs():
off = r.get_offset()
if off >= offset and off <= (offset + size):
return r,to_seg,off
return None
def find_reloc(self, offset, size):
to_segs = self.get_reloc_to_segs()
for to_seg in to_segs:
reloc = self.get_reloc(to_seg)
for r in reloc.get_relocs():
off = r.get_offset()
if off >= offset and off <= (offset + size):
return r, to_seg, off
return None
def find_debug_line(self, offset):
debug_line = self.debug_line
if debug_line is None:
return None
for df in debug_line.get_files():
for e in df.get_entries():
if e.get_offset() == offset:
return e
return None
def find_debug_line(self, offset):
debug_line = self.debug_line
if debug_line is None:
return None
for df in debug_line.get_files():
for e in df.get_entries():
if e.get_offset() == offset:
return e
return None
class BinImage:
"""A binary image contains all the segments of a program's binary image.
"""A binary image contains all the segments of a program's binary image.
"""
def __init__(self, file_type):
self.segments = []
self.file_data = None
self.file_type = file_type
def __str__(self):
return "<%s>" % ",".join(map(str,self.segments))
def __init__(self, file_type):
self.segments = []
self.file_data = None
self.file_type = file_type
def get_size(self):
total_size = 0
for seg in self.segments:
total_size += seg.get_size()
return total_size
def __str__(self):
return "<%s>" % ",".join(map(str, self.segments))
def add_segment(self, seg):
seg.id = len(self.segments)
self.segments.append(seg)
def get_size(self):
total_size = 0
for seg in self.segments:
total_size += seg.get_size()
return total_size
def get_segments(self):
return self.segments
def add_segment(self, seg):
seg.id = len(self.segments)
self.segments.append(seg)
def set_file_data(self, file_data):
"""set associated loaded binary file"""
self.file_data = file_data
def get_segments(self):
return self.segments
def get_file_data(self):
"""get associated loaded binary file"""
return self.file_data
def set_file_data(self, file_data):
"""set associated loaded binary file"""
self.file_data = file_data
def get_segment_names(self):
names = []
for seg in self.segments:
names.append(seg.get_type_name())
return names
def get_file_data(self):
"""get associated loaded binary file"""
return self.file_data
def get_segment_names(self):
names = []
for seg in self.segments:
names.append(seg.get_type_name())
return names

View File

@ -1,77 +1,86 @@
from amitools.vamos.machine import DisAsm
from .BinImage import *
class Disassemble:
"""allows to disassemble code segments of a BinImage"""
def __init__(self, cpu='68000'):
self.disasm = DisAsm.create(cpu)
"""allows to disassemble code segments of a BinImage"""
def _get_line_info(self, segment, addr, size):
infos = []
# info about src line
d = segment.find_debug_line(addr)
if d is not None:
f = d.get_file()
infos.append("src %10s:%d [%s]" % (f.get_src_file(),
d.get_src_line(),
f.get_dir_name()))
# info about relocation
r = segment.find_reloc(addr, size)
if r is not None:
delta = r[2] - addr
infos.append("reloc +%02d: (#%02d + %08x)" % (delta, r[1].id, r[0].addend))
return infos
def __init__(self, cpu="68000"):
self.disasm = DisAsm.create(cpu)
def disassemble(self, segment, bin_img):
# make sure its a code segment
if segment.seg_type != SEGMENT_TYPE_CODE:
return None
def _get_line_info(self, segment, addr, size):
infos = []
# info about src line
d = segment.find_debug_line(addr)
if d is not None:
f = d.get_file()
infos.append(
"src %10s:%d [%s]"
% (f.get_src_file(), d.get_src_line(), f.get_dir_name())
)
# info about relocation
r = segment.find_reloc(addr, size)
if r is not None:
delta = r[2] - addr
infos.append("reloc +%02d: (#%02d + %08x)" % (delta, r[1].id, r[0].addend))
return infos
# generate raw assembly
data = segment.data
lines = self.disasm.disassemble_block(data)
def disassemble(self, segment, bin_img):
# make sure its a code segment
if segment.seg_type != SEGMENT_TYPE_CODE:
return None
# process lines
result = []
for l in lines:
addr = l[0]
word = l[1]
code = l[2]
# generate raw assembly
data = segment.data
lines = self.disasm.disassemble_block(data)
# try to find a symbol for this addr
symbol = segment.find_symbol(addr)
if symbol is not None:
line = "\t\t\t\t%s:" % symbol
result.append(line)
# process lines
result = []
for l in lines:
addr = l[0]
word = l[1]
code = l[2]
# create final line
line = "%08x\t%-20s\t%-30s " % (addr," ".join(["%04x" %x for x in word]),code)
# try to find a symbol for this addr
symbol = segment.find_symbol(addr)
if symbol is not None:
line = "\t\t\t\t%s:" % symbol
result.append(line)
# create line info
size = len(word) * 2
info = self._get_line_info(segment, addr, size)
if info is None or len(info) == 0:
result.append(line)
else:
result.append(line + "; " + info[0])
spc = " " * len(line)
for i in info[1:]:
result.append(spc + "; " + i)
# create final line
line = "%08x\t%-20s\t%-30s " % (
addr,
" ".join(["%04x" % x for x in word]),
code,
)
# create line info
size = len(word) * 2
info = self._get_line_info(segment, addr, size)
if info is None or len(info) == 0:
result.append(line)
else:
result.append(line + "; " + info[0])
spc = " " * len(line)
for i in info[1:]:
result.append(spc + "; " + i)
return result
return result
# mini test
if __name__ == '__main__':
import sys
from .BinFmt import BinFmt
bf = BinFmt()
for a in sys.argv[1:]:
bi = bf.load_image(a)
if bi is not None:
print(a)
d = Disassemble()
for seg in bi.get_segments():
if seg.seg_type == SEGMENT_TYPE_CODE:
lines = d.disassemble(seg, bi)
for l in lines:
print(l)
if __name__ == "__main__":
import sys
from .BinFmt import BinFmt
bf = BinFmt()
for a in sys.argv[1:]:
bi = bf.load_image(a)
if bi is not None:
print(a)
d = Disassemble()
for seg in bi.get_segments():
if seg.seg_type == SEGMENT_TYPE_CODE:
lines = d.disassemble(seg, bi)
for l in lines:
print(l)

View File

@ -1,59 +1,68 @@
from .BinImage import *
import amitools.util.HexDump as HexDump
class Dumper:
def __init__(self, bin_img):
self.bin_img = bin_img
def dump(self, hex_dump=False, show_reloc=False, show_symbols=False, show_debug_line=False):
for seg in self.bin_img.get_segments():
seg_type = seg.seg_type
seg_type_name = segment_type_names[seg_type]
size = seg.size
print("#%02d %04s %08x/%10d" % (seg.id, seg_type_name, size, size))
# show hex dump?
data = seg.data
if data is not None and hex_dump:
HexDump.print_hex(data, 4)
# show reloc
if show_reloc:
to_segs = seg.get_reloc_to_segs()
for to_seg in to_segs:
print(" RELOC to #%02d" % (to_seg.id))
reloc = seg.get_reloc(to_seg)
for r in reloc.get_relocs():
off = r.get_offset()
add = r.get_addend()
print(" %08x/%10d +%08x/%10d" % (off, off, add, add))
# show symbols
if show_symbols:
symtab = seg.get_symtab()
if symtab is not None:
print(" SYMBOLS")
for sym in symtab.get_symbols():
off = sym.get_offset()
name = sym.get_name()
print(" %08x/%10d %s" % (off, off, name))
# show debug info
if show_debug_line:
debug_line = seg.get_debug_line()
if debug_line is not None:
print(" DEBUG LINE")
for f in debug_line.get_files():
print(" FILE: [%s] %s" % (f.get_dir_name(), f.get_src_file()))
for e in f.get_entries():
print(" %08x %d" % (e.get_offset(), e.get_src_line()))
class Dumper:
def __init__(self, bin_img):
self.bin_img = bin_img
def dump(
self,
hex_dump=False,
show_reloc=False,
show_symbols=False,
show_debug_line=False,
):
for seg in self.bin_img.get_segments():
seg_type = seg.seg_type
seg_type_name = segment_type_names[seg_type]
size = seg.size
print("#%02d %04s %08x/%10d" % (seg.id, seg_type_name, size, size))
# show hex dump?
data = seg.data
if data is not None and hex_dump:
HexDump.print_hex(data, 4)
# show reloc
if show_reloc:
to_segs = seg.get_reloc_to_segs()
for to_seg in to_segs:
print(" RELOC to #%02d" % (to_seg.id))
reloc = seg.get_reloc(to_seg)
for r in reloc.get_relocs():
off = r.get_offset()
add = r.get_addend()
print(" %08x/%10d +%08x/%10d" % (off, off, add, add))
# show symbols
if show_symbols:
symtab = seg.get_symtab()
if symtab is not None:
print(" SYMBOLS")
for sym in symtab.get_symbols():
off = sym.get_offset()
name = sym.get_name()
print(" %08x/%10d %s" % (off, off, name))
# show debug info
if show_debug_line:
debug_line = seg.get_debug_line()
if debug_line is not None:
print(" DEBUG LINE")
for f in debug_line.get_files():
print(
" FILE: [%s] %s" % (f.get_dir_name(), f.get_src_file())
)
for e in f.get_entries():
print(" %08x %d" % (e.get_offset(), e.get_src_line()))
# mini test
if __name__ == '__main__':
import sys
from .BinFmt import BinFmt
bf = BinFmt()
for a in sys.argv[1:]:
bi = bf.load_image(a)
if bi is not None:
print(a)
d = Dumper(bi)
d.dump(True, True, True, True)
if __name__ == "__main__":
import sys
from .BinFmt import BinFmt
bf = BinFmt()
for a in sys.argv[1:]:
bi = bf.load_image(a)
if bi is not None:
print(a)
d = Dumper(bi)
d.dump(True, True, True, True)

View File

@ -1,116 +1,120 @@
import struct
class Relocate:
"""Relocate a BinImage to given addresses"""
def __init__(self, bin_img, verbose=False):
self.bin_img = bin_img
self.verbose = verbose
"""Relocate a BinImage to given addresses"""
def get_sizes(self):
"""return a list of the required sizes for all sections"""
sizes = []
for segment in self.bin_img.get_segments():
size = segment.size
sizes.append(size)
return sizes
def __init__(self, bin_img, verbose=False):
self.bin_img = bin_img
self.verbose = verbose
def get_total_size(self, padding=0):
"""return the total size of all segments appended. useful for one large blob"""
sizes = self.get_sizes()
total = 0
for s in sizes:
total += s + padding
return total
def get_sizes(self):
"""return a list of the required sizes for all sections"""
sizes = []
for segment in self.bin_img.get_segments():
size = segment.size
sizes.append(size)
return sizes
def get_seq_addrs(self, base_addr, padding=0):
"""generate a sequence of addresses for continous segments in one blob"""
sizes = self.get_sizes()
addrs = []
addr = base_addr
for s in sizes:
addrs.append(addr)
addr += s + padding
return addrs
def get_total_size(self, padding=0):
"""return the total size of all segments appended. useful for one large blob"""
sizes = self.get_sizes()
total = 0
for s in sizes:
total += s + padding
return total
def relocate_one_block(self, base_addr, padding=0):
total_size = self.get_total_size(padding)
data = bytearray(total_size)
addrs = self.get_seq_addrs(base_addr, padding)
offset = 0
segs = self.bin_img.get_segments()
for segment in segs:
self._copy_data(data, segment, offset)
self._reloc_data(data, segment, addrs, offset)
offset += segment.size + padding
return data
def get_seq_addrs(self, base_addr, padding=0):
"""generate a sequence of addresses for continous segments in one blob"""
sizes = self.get_sizes()
addrs = []
addr = base_addr
for s in sizes:
addrs.append(addr)
addr += s + padding
return addrs
def relocate(self, addrs, in_data=None):
"""perform relocations on segments and return relocated data"""
segs = self.bin_img.get_segments()
if len(segs) != len(addrs):
raise ValueError("addrs != segments")
datas = []
for segment in segs:
# allocate new buffer
data = bytearray(segment.size)
self._copy_data(data, segment)
self._reloc_data(data, segment, addrs)
datas.append(data)
return datas
def relocate_one_block(self, base_addr, padding=0):
total_size = self.get_total_size(padding)
data = bytearray(total_size)
addrs = self.get_seq_addrs(base_addr, padding)
offset = 0
segs = self.bin_img.get_segments()
for segment in segs:
self._copy_data(data, segment, offset)
self._reloc_data(data, segment, addrs, offset)
offset += segment.size + padding
return data
def _copy_data(self, data, segment, offset=0):
# allocate segment data
size = segment.size
src_data = segment.data
if src_data is not None:
src_len = len(src_data)
data[offset:src_len+offset] = src_data
def relocate(self, addrs, in_data=None):
"""perform relocations on segments and return relocated data"""
segs = self.bin_img.get_segments()
if len(segs) != len(addrs):
raise ValueError("addrs != segments")
datas = []
for segment in segs:
# allocate new buffer
data = bytearray(segment.size)
self._copy_data(data, segment)
self._reloc_data(data, segment, addrs)
datas.append(data)
return datas
if self.verbose:
print("#%02d @%06x +%06x" % (segment.id, addrs[segment.id], size))
def _copy_data(self, data, segment, offset=0):
# allocate segment data
size = segment.size
src_data = segment.data
if src_data is not None:
src_len = len(src_data)
data[offset : src_len + offset] = src_data
def _reloc_data(self, data, segment, addrs, offset=0):
# find relocations
to_segs = segment.get_reloc_to_segs()
for to_seg in to_segs:
# get target segment's address
to_id = to_seg.id
to_addr = addrs[to_id]
# get relocations
reloc = segment.get_reloc(to_seg)
for r in reloc.get_relocs():
self._reloc(segment.id, data, r, to_addr, to_id, offset)
if self.verbose:
print("#%02d @%06x +%06x" % (segment.id, addrs[segment.id], size))
def _reloc(self, my_id, data, reloc, to_addr, to_id, extra_offset):
"""relocate one entry"""
offset = reloc.get_offset() + extra_offset
delta = self._read_long(data, offset) + reloc.addend
addr = to_addr + delta
self._write_long(data, offset, addr)
if self.verbose:
print("#%02d + %06x: %06x (delta) + @%06x (#%02d) -> %06x" %
(my_id, offset, delta, to_addr, to_id, addr))
def _reloc_data(self, data, segment, addrs, offset=0):
# find relocations
to_segs = segment.get_reloc_to_segs()
for to_seg in to_segs:
# get target segment's address
to_id = to_seg.id
to_addr = addrs[to_id]
# get relocations
reloc = segment.get_reloc(to_seg)
for r in reloc.get_relocs():
self._reloc(segment.id, data, r, to_addr, to_id, offset)
def _read_long(self, data, offset):
d = data[offset:offset+4]
return struct.unpack(">i",d)[0]
def _reloc(self, my_id, data, reloc, to_addr, to_id, extra_offset):
"""relocate one entry"""
offset = reloc.get_offset() + extra_offset
delta = self._read_long(data, offset) + reloc.addend
addr = to_addr + delta
self._write_long(data, offset, addr)
if self.verbose:
print(
"#%02d + %06x: %06x (delta) + @%06x (#%02d) -> %06x"
% (my_id, offset, delta, to_addr, to_id, addr)
)
def _write_long(self, data, offset, value):
d = struct.pack(">i",value)
data[offset:offset+4] = d
def _read_long(self, data, offset):
d = data[offset : offset + 4]
return struct.unpack(">i", d)[0]
def _write_long(self, data, offset, value):
d = struct.pack(">i", value)
data[offset : offset + 4] = d
# mini test
if __name__ == '__main__':
import sys
from .BinFmt import BinFmt
bf = BinFmt()
for a in sys.argv[1:]:
bi = bf.load_image(a)
if bi is not None:
print(a)
r = Relocate(bi, True)
addrs = r.get_seq_addrs(0)
datas = r.relocate(addrs)
data = r.relocate_one_block(0)
if __name__ == "__main__":
import sys
from .BinFmt import BinFmt
bf = BinFmt()
for a in sys.argv[1:]:
bi = bf.load_image(a)
if bi is not None:
print(a)
r = Relocate(bi, True)
addrs = r.get_seq_addrs(0)
datas = r.relocate(addrs)
data = r.relocate_one_block(0)

View File

@ -1,5 +1,3 @@
from amitools.binfmt.BinImage import *
from .ELFFile import *
from .ELF import *
@ -8,176 +6,176 @@ from .DwarfDebugLine import DwarfDebugLine
class BinFmtELF:
"""Handle Amiga m68k binaries in ELF format (usually from AROS)"""
"""Handle Amiga m68k binaries in ELF format (usually from AROS)"""
def is_image(self, path):
"""check if a given file is a supported ELF file"""
with open(path, "rb") as f:
return self.is_image_fobj(f)
def is_image(self, path):
"""check if a given file is a supported ELF file"""
with open(path, "rb") as f:
return self.is_image_fobj(f)
def is_image_fobj(self, fobj):
"""check if a given fobj is a supported ELF file"""
try:
pos = fobj.tell()
def is_image_fobj(self, fobj):
"""check if a given fobj is a supported ELF file"""
try:
pos = fobj.tell()
# read identifier
ident = ELFIdentifier()
ident_data = fobj.read(16)
ident.parse(ident_data)
# read identifier
ident = ELFIdentifier()
ident_data = fobj.read(16)
ident.parse(ident_data)
# read header
hdr = ELFHeader()
hdr_data = fobj.read(36)
hdr.parse(hdr_data)
# read header
hdr = ELFHeader()
hdr_data = fobj.read(36)
hdr.parse(hdr_data)
# seek back
fobj.seek(pos,0)
# seek back
fobj.seek(pos, 0)
# check header
return self.is_supported_elf(ident, hdr)
except ELFParseError:
return False
# check header
return self.is_supported_elf(ident, hdr)
except ELFParseError:
return False
def is_supported_elf(self, ident, hdr):
"""check ELF header if its a m68k binary"""
if hdr.machine != EM_68K:
return False
if ident.osabi not in (ELFOSABI_SYSV, ELFOSABI_AROS):
return False
return True
def is_supported_elf(self, ident, hdr):
"""check ELF header if its a m68k binary"""
if hdr.machine != EM_68K:
return False
if ident.osabi not in (ELFOSABI_SYSV, ELFOSABI_AROS):
return False
return True
def load_image(self, path):
"""load a BinImage from an ELF file given via path"""
with open(path, "rb") as f:
return self.load_image_fobj(f)
def load_image(self, path):
"""load a BinImage from an ELF file given via path"""
with open(path, "rb") as f:
return self.load_image_fobj(f)
def load_image_fobj(self, fobj):
"""load a BinImage from an ELF file given via file object"""
# read elf file
reader = ELFReader()
elf = reader.load(fobj)
# create bin image and assign elf file
bi = BinImage(BIN_IMAGE_TYPE_ELF)
bi.set_file_data(elf)
# walk through elf sections
sect_to_seg = {}
for sect in elf.sections:
# determine segment type
seg_type = None
name = sect.name_str
flags = 0
if name == b'.text':
seg_type = SEGMENT_TYPE_CODE
elif name == b'.data':
seg_type = SEGMENT_TYPE_DATA
elif name == b'.rodata':
seg_type = SEGMENT_TYPE_DATA
flags = SEGMENT_FLAG_READ_ONLY
elif name == b'.bss':
seg_type = SEGMENT_TYPE_BSS
# we got a segment
if seg_type is not None:
size = sect.header.size
data = sect.data
seg = Segment(seg_type, size, data, flags)
bi.add_segment(seg)
# assign section to segment
seg.set_file_data(sect)
sect_to_seg[sect] = seg
def load_image_fobj(self, fobj):
"""load a BinImage from an ELF file given via file object"""
# read elf file
reader = ELFReader()
elf = reader.load(fobj)
# create bin image and assign elf file
bi = BinImage(BIN_IMAGE_TYPE_ELF)
bi.set_file_data(elf)
# walk through elf sections
sect_to_seg = {}
for sect in elf.sections:
# determine segment type
seg_type = None
name = sect.name_str
flags = 0
if name == b".text":
seg_type = SEGMENT_TYPE_CODE
elif name == b".data":
seg_type = SEGMENT_TYPE_DATA
elif name == b".rodata":
seg_type = SEGMENT_TYPE_DATA
flags = SEGMENT_FLAG_READ_ONLY
elif name == b".bss":
seg_type = SEGMENT_TYPE_BSS
# we got a segment
if seg_type is not None:
size = sect.header.size
data = sect.data
seg = Segment(seg_type, size, data, flags)
bi.add_segment(seg)
# assign section to segment
seg.set_file_data(sect)
sect_to_seg[sect] = seg
# now run through segments to add relocations
bi_segs = bi.get_segments()
for seg in bi_segs:
# retrieve associated ELF section
sect = seg.get_file_data()
# now run through segments to add relocations
bi_segs = bi.get_segments()
for seg in bi_segs:
# retrieve associated ELF section
sect = seg.get_file_data()
# any relocations?
rela = sect.get_rela()
num_rela = len(rela)
if num_rela > 0:
self.add_elf_rela(sect, seg, sect_to_seg)
# any relocations?
rela = sect.get_rela()
num_rela = len(rela)
if num_rela > 0:
self.add_elf_rela(sect, seg, sect_to_seg)
# any symbols?
symbols = sect.get_symbols()
num_syms = len(symbols)
if num_syms > 0:
self.add_elf_symbols(symbols, seg)
# any symbols?
symbols = sect.get_symbols()
num_syms = len(symbols)
if num_syms > 0:
self.add_elf_symbols(symbols, seg)
# try to add debug info
ddl = DwarfDebugLine()
got = ddl.decode(elf)
if got:
self.add_debug_line(ddl, bi, sect_to_seg)
# try to add debug info
ddl = DwarfDebugLine()
got = ddl.decode(elf)
if got:
self.add_debug_line(ddl, bi, sect_to_seg)
return bi
return bi
def add_elf_rela(self, sect, seg, sect_to_seg):
for tgt_sect in sect.get_rela_sections():
# is this a relocation to a used section?
if tgt_sect in sect_to_seg:
to_seg = sect_to_seg[tgt_sect]
rl = Relocations(to_seg)
seg.add_reloc(to_seg, rl)
# add relocations
for rel in sect.get_rela_by_section(tgt_sect):
r = Reloc(rel.offset, addend=rel.section_addend)
rl.add_reloc(r)
def add_elf_rela(self, sect, seg, sect_to_seg):
for tgt_sect in sect.get_rela_sections():
# is this a relocation to a used section?
if tgt_sect in sect_to_seg:
to_seg = sect_to_seg[tgt_sect]
rl = Relocations(to_seg)
seg.add_reloc(to_seg, rl)
# add relocations
for rel in sect.get_rela_by_section(tgt_sect):
r = Reloc(rel.offset, addend=rel.section_addend)
rl.add_reloc(r)
def add_elf_symbols(self, symbols, seg):
symtab = SymbolTable()
seg.set_symtab(symtab)
for sym in symbols:
# add entry
off = sym.value
name = sym.name_str
file_sym = sym.file_sym
if file_sym is not None:
file_name = file_sym.name_str
else:
file_name = None
symbol = Symbol(off, name, file_name)
symtab.add_symbol(symbol)
def add_elf_symbols(self, symbols, seg):
symtab = SymbolTable()
seg.set_symtab(symtab)
for sym in symbols:
# add entry
off = sym.value
name = sym.name_str
file_sym = sym.file_sym
if file_sym is not None:
file_name = file_sym.name_str
else:
file_name = None
symbol = Symbol(off, name, file_name)
symtab.add_symbol(symbol)
def add_debug_line(self, ddl, bi, sect_to_seg):
seg_to_dl = {}
matrix = ddl.get_matrix()
for row in matrix:
sect = row.section
if sect in sect_to_seg:
segment = sect_to_seg[sect]
def add_debug_line(self, ddl, bi, sect_to_seg):
seg_to_dl = {}
matrix = ddl.get_matrix()
for row in matrix:
sect = row.section
if sect in sect_to_seg:
segment = sect_to_seg[sect]
# fetch debug info
if segment in seg_to_dl:
dl, file_to_df = seg_to_dl[segment]
else:
dl = DebugLine()
file_to_df = {}
segment.set_debug_line(dl)
seg_to_dl[segment] = (dl, file_to_df)
# fetch debug info
if segment in seg_to_dl:
dl, file_to_df = seg_to_dl[segment]
else:
dl = DebugLine()
file_to_df = {}
segment.set_debug_line(dl)
seg_to_dl[segment] = (dl, file_to_df)
# fetch file instance
fid = row.file
if fid in file_to_df:
df = file_to_df[fid]
else:
df = DebugLineFile(ddl.get_file_name(fid),
ddl.get_file_dir(fid))
dl.add_file(df)
file_to_df[fid] = df
# fetch file instance
fid = row.file
if fid in file_to_df:
df = file_to_df[fid]
else:
df = DebugLineFile(ddl.get_file_name(fid), ddl.get_file_dir(fid))
dl.add_file(df)
file_to_df[fid] = df
# add entry
e = DebugLineEntry(row.address, row.line)
df.add_entry(e)
# add entry
e = DebugLineEntry(row.address, row.line)
df.add_entry(e)
# mini test
if __name__ == '__main__':
import sys
bf = BinFmtELF()
for a in sys.argv[1:]:
if bf.is_image(a):
print("loading", a)
bi = bf.load_image(a)
print(bi)
else:
print("NO ELF:", a)
if __name__ == "__main__":
import sys
bf = BinFmtELF()
for a in sys.argv[1:]:
if bf.is_image(a):
print("loading", a)
bi = bf.load_image(a)
print(bi)
else:
print("NO ELF:", a)

View File

@ -1,324 +1,351 @@
import io
import struct
class LineState:
def __init__(self, is_stmt=False):
self.address = 0
self.file = 1
self.line = 1
self.column = 0
self.is_stmt = is_stmt
self.basic_block = False
self.end_sequence = False
self.section = None
def __init__(self, is_stmt=False):
self.address = 0
self.file = 1
self.line = 1
self.column = 0
self.is_stmt = is_stmt
self.basic_block = False
self.end_sequence = False
self.section = None
def clone(self):
state = LineState()
state.address = self.address
state.file = self.file
state.line = self.line
state.column = self.column
state.is_stmt = self.is_stmt
state.basic_block = self.basic_block
state.end_sequence = self.end_sequence
state.section = self.section
return state
def clone(self):
state = LineState()
state.address = self.address
state.file = self.file
state.line = self.line
state.column = self.column
state.is_stmt = self.is_stmt
state.basic_block = self.basic_block
state.end_sequence = self.end_sequence
state.section = self.section
return state
def __str__(self):
return "[address=%08x file=%d line=%d column=%d is_stmt=%s basic_block=%s end_sequence=%d]" % \
(self.address, self.file, self.line, self.column, self.is_stmt,
self.basic_block, self.end_sequence)
def __str__(self):
return (
"[address=%08x file=%d line=%d column=%d is_stmt=%s basic_block=%s end_sequence=%d]"
% (
self.address,
self.file,
self.line,
self.column,
self.is_stmt,
self.basic_block,
self.end_sequence,
)
)
class DwarfDebugLine:
"""decode .debug_line Dwarf line debug sections"""
def __init__(self, verbose=False):
self.input = None
self.error = None
self.verbose = verbose
self.matrix = None
"""decode .debug_line Dwarf line debug sections"""
def _log(self, *args):
if self.verbose:
print(*args)
def __init__(self, verbose=False):
self.input = None
self.error = None
self.verbose = verbose
self.matrix = None
def decode(self, elf_file):
# get section with debug info
debug_line = elf_file.get_section_by_name(".debug_line")
if debug_line is None:
self.error = "No .debug_line section found! No debug info?"
return False
# get (optional) relocations
rela = elf_file.get_section_by_name(".rela.debug_line")
# start parsing
self.input = io.StringIO(debug_line.data)
# decode header
if not self.decode_header():
return False
if self.verbose:
self.dump_header()
# decode line program
matrix = []
state = LineState(self.default_is_stmt)
log = self._log
while True:
# read opcode
opc_ch = self.input.read(1)
if len(opc_ch) == 0:
break
opc = ord(opc_ch)
log("opcode=", opc)
# 0 = extended opcode
if opc == 0:
opc_size = self.read_leb128()
sub_opc = ord(self.input.read(1))
log(" sub_opcode=", sub_opc)
# 1: DW_LNE_end_sequence
if sub_opc == 1:
state.end_sequence = True
line = state.clone()
matrix.append(line)
state.__init__()
log("DW_LNE_end_sequence:", line)
# 2: DW_LNE_set_address
elif sub_opc == 2:
pos = self.input.tell()
addr = self.read_long()
addend, sect = self.find_rela(rela, pos)
state.address = addr + addend
state.section = sect
log("DW_LNE_set_address: %08x sect=%s" % (state.address, sect))
# 3: DW_LNE_set_file
elif sub_opc == 3:
tup = self.decode_file()
self.files.append(tup)
log("DW_LNE_set_file", tup)
# other (unknown) ext opc
def _log(self, *args):
if self.verbose:
print(*args)
def decode(self, elf_file):
# get section with debug info
debug_line = elf_file.get_section_by_name(".debug_line")
if debug_line is None:
self.error = "No .debug_line section found! No debug info?"
return False
# get (optional) relocations
rela = elf_file.get_section_by_name(".rela.debug_line")
# start parsing
self.input = io.StringIO(debug_line.data)
# decode header
if not self.decode_header():
return False
if self.verbose:
self.dump_header()
# decode line program
matrix = []
state = LineState(self.default_is_stmt)
log = self._log
while True:
# read opcode
opc_ch = self.input.read(1)
if len(opc_ch) == 0:
break
opc = ord(opc_ch)
log("opcode=", opc)
# 0 = extended opcode
if opc == 0:
opc_size = self.read_leb128()
sub_opc = ord(self.input.read(1))
log(" sub_opcode=", sub_opc)
# 1: DW_LNE_end_sequence
if sub_opc == 1:
state.end_sequence = True
line = state.clone()
matrix.append(line)
state.__init__()
log("DW_LNE_end_sequence:", line)
# 2: DW_LNE_set_address
elif sub_opc == 2:
pos = self.input.tell()
addr = self.read_long()
addend, sect = self.find_rela(rela, pos)
state.address = addr + addend
state.section = sect
log("DW_LNE_set_address: %08x sect=%s" % (state.address, sect))
# 3: DW_LNE_set_file
elif sub_opc == 3:
tup = self.decode_file()
self.files.append(tup)
log("DW_LNE_set_file", tup)
# other (unknown) ext opc
else:
log("unknown sub opcode!")
self.input.seek(opc_size - 1, 1)
# standard opcodes
elif opc < self.opc_base:
# 1: DW_LNS_copy
if opc == 1:
line = state.clone()
matrix.append(line)
log("DW_LNS_copy:", line)
state.basic_block = False
# 2: DW_LNS_advance_pc
elif opc == 2:
offset = self.read_leb128() * self.min_instr_len
state.address += offset
log("DW_LNS_advance_pc: +%d -> %08x" % (offset, state.address))
# 3: DW_LNS_advance_line
elif opc == 3:
offset = self.read_sleb128()
state.line += offset
log("DW_LNS_advance_line: +%d -> %d" % (offset, state.line))
# 4: DW_LNS_set_file
elif opc == 4:
state.file = self.read_leb128()
log("DW_LNS_set_file", state.file)
# 5: DW_LNS_set_column
elif opc == 5:
state.column = self.read_leb128()
log("DW_LNS_set_column", state.column)
# 6: DW_LNS_negate_stmt
elif opc == 6:
state.is_stmt = not state.is_stmt
log("DW_LNS_negate_stmt", state.is_stmt)
# 7: DW_LNS_set_basic_block
elif opc == 7:
state.basic_block = True
log("DW_LNS_set_basic_block")
# 8: DW_LNS_const_add_pc
elif opc == 8:
(addr_addend, _) = self.decode_special_opcode(255)
state.address += addr_addend
log(
"DW_LNS_const_add_pc: +%d -> %08x"
% (addr_addend, state.address)
)
# 9: DW_LNS_fixed_advance_pc
elif opc == 9:
offset = self.read_word()
state.address += offset
log("DW_LNS_fixed_advance_pc: %+08x" % offset)
# other (unknown) opc
else:
num_args = self.std_opc_lens[opc]
log("skip unknown: num_args=", num_args)
for i in range(num_args):
self.read_leb128()
# special opcodes:
else:
(addr_addend, line_addend) = self.decode_special_opcode(opc)
state.address += addr_addend
state.line += line_addend
state.basic_block = False
line = state.clone()
matrix.append(line)
log("special", (opc - self.opc_base), line)
# done
self.matrix = matrix
return True
def get_matrix(self):
return self.matrix
def get_file_dir(self, idx):
f = self.files[idx - 1]
dir_idx = f[1]
if dir_idx > 0:
dir_name = self.inc_dirs[dir_idx - 1]
else:
log("unknown sub opcode!")
self.input.seek(opc_size-1,1)
# standard opcodes
elif opc < self.opc_base:
# 1: DW_LNS_copy
if opc == 1:
line = state.clone()
matrix.append(line)
log("DW_LNS_copy:", line)
state.basic_block = False
# 2: DW_LNS_advance_pc
elif opc == 2:
offset = self.read_leb128() * self.min_instr_len
state.address += offset
log("DW_LNS_advance_pc: +%d -> %08x" % (offset, state.address))
# 3: DW_LNS_advance_line
elif opc == 3:
offset = self.read_sleb128()
state.line += offset
log("DW_LNS_advance_line: +%d -> %d" % (offset, state.line))
# 4: DW_LNS_set_file
elif opc == 4:
state.file = self.read_leb128()
log("DW_LNS_set_file", state.file)
# 5: DW_LNS_set_column
elif opc == 5:
state.column = self.read_leb128()
log("DW_LNS_set_column", state.column)
# 6: DW_LNS_negate_stmt
elif opc == 6:
state.is_stmt = not state.is_stmt
log("DW_LNS_negate_stmt", state.is_stmt)
# 7: DW_LNS_set_basic_block
elif opc == 7:
state.basic_block = True
log("DW_LNS_set_basic_block")
# 8: DW_LNS_const_add_pc
elif opc == 8:
(addr_addend,_) = self.decode_special_opcode(255)
state.address += addr_addend
log("DW_LNS_const_add_pc: +%d -> %08x" % (addr_addend, state.address))
# 9: DW_LNS_fixed_advance_pc
elif opc == 9:
offset = self.read_word()
state.address += offset
log("DW_LNS_fixed_advance_pc: %+08x" % offset)
# other (unknown) opc
else:
num_args = self.std_opc_lens[opc]
log("skip unknown: num_args=", num_args)
for i in range(num_args):
self.read_leb128()
# special opcodes:
else:
(addr_addend, line_addend) = self.decode_special_opcode(opc)
state.address += addr_addend
state.line += line_addend
state.basic_block = False
line = state.clone()
matrix.append(line)
log("special", (opc - self.opc_base), line)
# done
self.matrix = matrix
return True
dir_name = ""
return dir_name
def get_matrix(self):
return self.matrix
def get_file_name(self, idx):
return self.files[idx - 1][0]
def get_file_dir(self, idx):
f = self.files[idx-1]
dir_idx = f[1]
if dir_idx > 0:
dir_name = self.inc_dirs[dir_idx-1]
else:
dir_name = ""
return dir_name
def find_rela(self, rela_section, pos):
if rela_section is not None:
for rela in rela_section.rela:
if rela.offset == pos:
return rela.addend, rela.section
return 0, None
def get_file_name(self, idx):
return self.files[idx-1][0]
def decode_special_opcode(self, opc):
adj_opc = opc - self.opc_base
addr_addend = (adj_opc // self.line_range) * self.min_instr_len
line_addend = self.line_base + (adj_opc % self.line_range)
return (addr_addend, line_addend)
def find_rela(self, rela_section, pos):
if rela_section is not None:
for rela in rela_section.rela:
if rela.offset == pos:
return rela.addend, rela.section
return 0,None
def decode_header(self):
# header
self.unit_length = self.read_long()
self.version = self.read_word()
if self.version != 2:
self.error = "Can only decode DWARF 2 debug info"
return False
self.header_length = self.read_long()
self.min_instr_len = self.read_byte()
self.default_is_stmt = self.read_byte()
self.line_base = self.read_sbyte()
self.line_range = self.read_byte()
self.opc_base = self.read_byte()
# 9 standard opcode lengths
self.std_opc_lens = []
n = self.opc_base
if n > 0:
for i in range(n - 1):
l = self.read_byte()
self.std_opc_lens.append(l)
# 10 include dirs
self.inc_dirs = []
while True:
inc_dir = self.read_string()
if inc_dir == "":
break
self.inc_dirs.append(inc_dir)
# 11 file names
self.files = []
while True:
tup = self.decode_file()
if tup is None:
break
self.files.append(tup)
# end header: check header size
pos = self.input.tell()
hdr_len = pos - 10
if hdr_len != self.header_length:
self.error = "Error size mismatch: %d != %d" % (hdr_len, self.header_length)
return False
return True
def decode_special_opcode(self, opc):
adj_opc = opc - self.opc_base
addr_addend = (adj_opc // self.line_range) * self.min_instr_len
line_addend = self.line_base + (adj_opc % self.line_range)
return (addr_addend, line_addend)
def decode_file(self):
file_name = self.read_string()
if file_name == "":
return None
dir_idx = self.read_leb128()
last_mod = self.read_leb128()
file_size = self.read_leb128()
return (file_name, dir_idx, last_mod, file_size)
def decode_header(self):
# header
self.unit_length = self.read_long()
self.version = self.read_word()
if self.version != 2:
self.error = "Can only decode DWARF 2 debug info"
return False
self.header_length = self.read_long()
self.min_instr_len = self.read_byte()
self.default_is_stmt = self.read_byte()
self.line_base = self.read_sbyte()
self.line_range = self.read_byte()
self.opc_base = self.read_byte()
# 9 standard opcode lengths
self.std_opc_lens = []
n = self.opc_base
if n > 0:
for i in range(n-1):
l = self.read_byte()
self.std_opc_lens.append(l)
# 10 include dirs
self.inc_dirs = []
while True:
inc_dir = self.read_string()
if inc_dir == "":
break
self.inc_dirs.append(inc_dir)
# 11 file names
self.files = []
while True:
tup = self.decode_file()
if tup is None:
break
self.files.append(tup)
# end header: check header size
pos = self.input.tell()
hdr_len = pos - 10
if hdr_len != self.header_length:
self.error = "Error size mismatch: %d != %d" % (hdr_len, self.header_length)
return False
return True
def dump_header(self):
print(
"unit_length=%x version=%d header_length=%x max_instr_len=%d "
"default_is_stmt=%d line_base=%d line_range=%d opc_base=%d"
% (
self.unit_length,
self.version,
self.header_length,
self.min_instr_len,
self.default_is_stmt,
self.line_base,
self.line_range,
self.opc_base,
)
)
print("std_opc_lens:", ",".join(map(str, self.std_opc_lens)))
print("inc_dirs")
for d in self.inc_dirs:
print(d)
print("files")
for f in self.files:
print(f)
def decode_file(self):
file_name = self.read_string()
if file_name == "":
return None
dir_idx = self.read_leb128()
last_mod = self.read_leb128()
file_size = self.read_leb128()
return (file_name, dir_idx, last_mod, file_size)
def read_string(self):
result = []
while True:
ch = self.input.read(1)
if ord(ch) == 0:
break
result.append(ch)
return "".join(result)
def dump_header(self):
print("unit_length=%x version=%d header_length=%x max_instr_len=%d "
"default_is_stmt=%d line_base=%d line_range=%d opc_base=%d" %
(self.unit_length, self.version, self.header_length,
self.min_instr_len, self.default_is_stmt, self.line_base,
self.line_range, self.opc_base))
print("std_opc_lens:",",".join(map(str,self.std_opc_lens)))
print("inc_dirs")
for d in self.inc_dirs:
print(d)
print("files")
for f in self.files:
print(f)
def read_leb128(self):
result = 0
shift = 0
while True:
byte = self.read_byte()
result |= (byte & 0x7F) << shift
if byte & 0x80 == 0:
break
shift += 7
return result
def read_string(self):
result = []
while True:
ch = self.input.read(1)
if ord(ch) == 0:
break
result.append(ch)
return "".join(result)
def read_sleb128(self):
result = 0
shift = 0
while True:
byte = self.read_byte()
result |= (byte & 0x7F) << shift
shift += 7
if byte & 0x80 == 0:
break
# negative?
if byte & 0x40 == 0x40:
mask = 1 << shift
result |= -mask
return result
def read_leb128(self):
result = 0
shift = 0
while True:
byte = self.read_byte()
result |= (byte & 0x7f) << shift
if byte & 0x80 == 0:
break
shift += 7
return result
def read_long(self):
data = self.input.read(4)
return struct.unpack(">I", data)[0]
def read_sleb128(self):
result = 0
shift = 0
while True:
byte = self.read_byte()
result |= (byte & 0x7f) << shift
shift += 7
if byte & 0x80 == 0:
break
# negative?
if byte & 0x40 == 0x40:
mask = 1 << shift
result |= - mask
return result
def read_word(self):
data = self.input.read(2)
return struct.unpack(">H", data)[0]
def read_long(self):
data = self.input.read(4)
return struct.unpack(">I", data)[0]
def read_byte(self):
data = self.input.read(1)
return struct.unpack(">B", data)[0]
def read_word(self):
data = self.input.read(2)
return struct.unpack(">H", data)[0]
def read_sbyte(self):
data = self.input.read(1)
return struct.unpack(">b", data)[0]
def read_byte(self):
data = self.input.read(1)
return struct.unpack(">B", data)[0]
def read_sbyte(self):
data = self.input.read(1)
return struct.unpack(">b", data)[0]
# mini test
if __name__ == '__main__':
import sys
from .ELFReader import ELFReader
reader = ELFReader()
for a in sys.argv[1:]:
f = open(a, "rb")
ef = reader.load(f)
ddl = DwarfDebugLine(verbose=True)
ok = ddl.decode(ef)
if ok:
print("--- line matrix ---")
for row in ddl.get_matrix():
name = ddl.get_file_name(row.file)
fdir = ddl.get_file_dir(row.file)
sect_name = row.section.name_str
print("%08x: %s [%s] %s:%d" % (row.address, sect_name, fdir, name, row.line))
if __name__ == "__main__":
import sys
from .ELFReader import ELFReader
reader = ELFReader()
for a in sys.argv[1:]:
f = open(a, "rb")
ef = reader.load(f)
ddl = DwarfDebugLine(verbose=True)
ok = ddl.decode(ef)
if ok:
print("--- line matrix ---")
for row in ddl.get_matrix():
name = ddl.get_file_name(row.file)
fdir = ddl.get_file_dir(row.file)
sect_name = row.section.name_str
print(
"%08x: %s [%s] %s:%d"
% (row.address, sect_name, fdir, name, row.line)
)

View File

@ -7,13 +7,7 @@ ELFOSABI_AROS = 15
EM_68K = 4
ET_values = {
0: "NONE",
1: "REL",
2: "EXEC",
3: "DYN",
4: "CORE"
}
ET_values = {0: "NONE", 1: "REL", 2: "EXEC", 3: "DYN", 4: "CORE"}
SHN_UNDEF = 0
SHT_SYMTAB = 2
@ -22,69 +16,53 @@ SHT_RELA = 4
SHT_NOBITS = 8
SHT_values = {
0: "NULL",
1: "PROGBITS",
2: "SYMTAB",
3: "STRTAB",
4: "RELA",
5: "HASH",
6: "DYNAMIC",
7: "NOTE",
8: "NOBITS",
9: "REL",
10: "SHLIB",
11: "DYNSYM",
14: "INIT_ARRAY",
15: "FINI_ARRAY",
16: "PREINIT_ARRAY",
17: "GROUP",
18: "SYMTAB_SHNDX"
0: "NULL",
1: "PROGBITS",
2: "SYMTAB",
3: "STRTAB",
4: "RELA",
5: "HASH",
6: "DYNAMIC",
7: "NOTE",
8: "NOBITS",
9: "REL",
10: "SHLIB",
11: "DYNSYM",
14: "INIT_ARRAY",
15: "FINI_ARRAY",
16: "PREINIT_ARRAY",
17: "GROUP",
18: "SYMTAB_SHNDX",
}
SHT_flags = {
1: "WRITE",
2: "ALLOC",
4: "EXECINSTR",
8: "MERGE",
16: "STRINGS",
32: "INFO_LINK",
64: "LINK_ORDER",
128: "OS_NONCONFORMING",
256: "GROUP",
512: "TLS"
1: "WRITE",
2: "ALLOC",
4: "EXECINSTR",
8: "MERGE",
16: "STRINGS",
32: "INFO_LINK",
64: "LINK_ORDER",
128: "OS_NONCONFORMING",
256: "GROUP",
512: "TLS",
}
SHN_values = {
0: "UND",
0xfff1: "ABS"
}
SHN_values = {0: "UND", 0xFFF1: "ABS"}
STB_values = {
0: "LOCAL",
1: "GLOBAL",
2: "WEAK",
3: "NUM"
}
STB_values = {0: "LOCAL", 1: "GLOBAL", 2: "WEAK", 3: "NUM"}
STT_values = {
0: "NOTYPE",
1: "OBJECT",
2: "FUNC",
3: "SECTION",
4: "FILE",
5: "COMMON",
6: "TLS",
7: "NUM"
0: "NOTYPE",
1: "OBJECT",
2: "FUNC",
3: "SECTION",
4: "FILE",
5: "COMMON",
6: "TLS",
7: "NUM",
}
STV_values = {
0: "DEFAULT",
1: "INTERNAL",
2: "HIDDEN",
3: "PROTECTED"
}
STV_values = {0: "DEFAULT", 1: "INTERNAL", 2: "HIDDEN", 3: "PROTECTED"}
R_68K_values = {
0: "68K_NONE",
1: "68K_32"
}
R_68K_values = {0: "68K_NONE", 1: "68K_32"}

View File

@ -1,114 +1,145 @@
from .ELF import *
class ELFDumper:
def __init__(self, elf_file):
self.elf = elf_file
def __init__(self, elf_file):
self.elf = elf_file
def _dump_rela_entry(self, rel, prefix="\t\t\t"):
rel_sect = rel.section
sect_txt = "%s (%d) + %d" % (
rel_sect.name_str,
rel_sect.idx,
rel.section_addend,
)
rel_symbol = rel.symbol
if rel_symbol is not None:
sym_txt = "%s (%d) + %d" % (rel_symbol.name_str, rel_symbol.idx, rel.addend)
else:
sym_txt = ""
print(
"%s%08x %-10s %-20s %s"
% (prefix, rel.offset, rel.type_str, sect_txt, sym_txt)
)
def _dump_rela_entry(self, rel, prefix="\t\t\t"):
rel_sect = rel.section
sect_txt = "%s (%d) + %d" % (rel_sect.name_str, rel_sect.idx, rel.section_addend)
rel_symbol = rel.symbol
if rel_symbol is not None:
sym_txt = "%s (%d) + %d" % (rel_symbol.name_str, rel_symbol.idx, rel.addend)
else:
sym_txt = ""
print("%s%08x %-10s %-20s %s" % (prefix, rel.offset, rel.type_str, sect_txt, sym_txt))
def _dump_symbol(self, sym):
print(
"\t\t\t%08x %6d %-8s %-8s %-16s"
% (sym.value, sym.size, sym.type_str, sym.bind_str, sym.name_str)
)
def _dump_symbol(self, sym):
print("\t\t\t%08x %6d %-8s %-8s %-16s" % (sym.value, sym.size, sym.type_str, sym.bind_str, sym.name_str))
def dump_sections(self, show_relocs=False, show_debug=False):
print("ELF Sections")
print("id name size rela syms type flags")
for sect in self.elf.sections:
def dump_sections(self, show_relocs=False, show_debug=False):
print("ELF Sections")
print("id name size rela syms type flags")
for sect in self.elf.sections:
# determine number of relocations
rela = sect.get_rela()
num_rela = len(rela)
# determine number of relocations
rela = sect.get_rela()
num_rela = len(rela)
# determine number of symbols
symbols = sect.get_symbols()
num_syms = len(symbols)
# determine number of symbols
symbols = sect.get_symbols()
num_syms = len(symbols)
print(
"%2d %-16s %08x %4d %4d %-10s %s"
% (
sect.idx,
sect.name_str,
sect.header.size,
num_rela,
num_syms,
sect.header.type_str,
",".join(sect.header.flags_dec),
)
)
print("%2d %-16s %08x %4d %4d %-10s %s" % \
(sect.idx, sect.name_str, sect.header.size, num_rela, num_syms,
sect.header.type_str, ",".join(sect.header.flags_dec)))
# show relas
if show_relocs and num_rela > 0:
print("\t\tRelocations:")
for rel in rela:
self._dump_rela_entry(rel)
# show relas
if show_relocs and num_rela > 0:
print("\t\tRelocations:")
for rel in rela:
self._dump_rela_entry(rel)
# per segment relocations
for tgt_sect in sect.get_rela_sections():
print("\t\tTo Section #%d:" % tgt_sect.idx)
for rel in sect.get_rela_by_section(tgt_sect):
self._dump_rela_entry(rel)
# per segment relocations
for tgt_sect in sect.get_rela_sections():
print("\t\tTo Section #%d:" % tgt_sect.idx)
for rel in sect.get_rela_by_section(tgt_sect):
self._dump_rela_entry(rel)
# show symbols
if show_debug and num_syms > 0:
print("\t\tSymbols:")
for sym in symbols:
self._dump_symbol(sym)
# show symbols
if show_debug and num_syms > 0:
print("\t\tSymbols:")
for sym in symbols:
self._dump_symbol(sym)
def dump_symbols(self):
print("ELF Symbol Table")
symtabs = self.elf.symtabs
if len(symtabs) == 0:
print("no symbols")
return
def dump_symbols(self):
print("ELF Symbol Table")
symtabs = self.elf.symtabs
if len(symtabs) == 0:
print("no symbols")
return
print(
"idx value size type bind visible ndx name"
)
for symtab in symtabs:
for sym in symtab.get_table_symbols():
txt = sym.shndx_str
if txt is None:
txt = sym.section.name_str
print(
"%4d %08x %6d %-8s %-8s %-8s %-16s %s"
% (
sym.idx,
sym.value,
sym.size,
sym.type_str,
sym.bind_str,
sym.visibility_str,
txt,
sym.name_str,
)
)
print("idx value size type bind visible ndx name")
for symtab in symtabs:
for sym in symtab.get_table_symbols():
txt = sym.shndx_str
if txt is None:
txt =sym.section.name_str
print("%4d %08x %6d %-8s %-8s %-8s %-16s %s" % \
(sym.idx, sym.value, sym.size, sym.type_str,
sym.bind_str, sym.visibility_str,
txt, sym.name_str))
def dump_relas(self):
print("ELF Relocations")
rela_sects = self.elf.relas
if len(rela_sects) == 0:
print("no relocations")
return
def dump_relas(self):
print("ELF Relocations")
rela_sects = self.elf.relas
if len(rela_sects) == 0:
print("no relocations")
return
for rela_sect in rela_sects:
print(rela_sect.name_str, "linked to", rela_sect.reloc_section.name_str)
print(" offset type segment + addend symbol + addend")
num = 0
for rela in rela_sect.get_relocations():
self._dump_rela_entry(rela, prefix="%4d " % num)
num += 1
for rela_sect in rela_sects:
print(rela_sect.name_str, "linked to", rela_sect.reloc_section.name_str)
print(" offset type segment + addend symbol + addend")
num = 0
for rela in rela_sect.get_relocations():
self._dump_rela_entry(rela, prefix="%4d " % num)
num += 1
def dump_relas_by_sect(self):
print("ELF Relocations (by sections)")
for sect in self.elf.sections:
to_sects = sect.get_rela_sections()
if len(to_sects) > 0:
print(" section", sect.idx)
for to_sect in to_sects:
print(" -> section", to_sect.idx)
num = 0
for rela in sect.get_rela_by_section(to_sect):
self._dump_rela_entry(rela, prefix=" %4d " % num)
num += 1
def dump_relas_by_sect(self):
print("ELF Relocations (by sections)")
for sect in self.elf.sections:
to_sects = sect.get_rela_sections()
if len(to_sects) > 0:
print(" section", sect.idx)
for to_sect in to_sects:
print(" -> section", to_sect.idx)
num = 0
for rela in sect.get_rela_by_section(to_sect):
self._dump_rela_entry(rela, prefix=" %4d " % num)
num += 1
if __name__ == '__main__':
from .ELFReader import ELFReader
import sys
reader = ELFReader()
for a in sys.argv[1:]:
f = open(a, "rb")
ef = reader.load(f)
dumper = ELFDumper(ef)
dumper.dump_sections(True, True)
dumper.dump_symbols()
dumper.dump_relas()
dumper.dump_relas_by_sect()
if __name__ == "__main__":
from .ELFReader import ELFReader
import sys
reader = ELFReader()
for a in sys.argv[1:]:
f = open(a, "rb")
ef = reader.load(f)
dumper = ELFDumper(ef)
dumper.dump_sections(True, True)
dumper.dump_symbols()
dumper.dump_relas()
dumper.dump_relas_by_sect()

View File

@ -3,266 +3,294 @@ from .ELF import *
class ELFParseError(Exception):
def __init__(self, msg):
self.msg = msg
def __init__(self, msg):
self.msg = msg
class ELFPart:
"""base class for parts of ELF file"""
_names = []
"""base class for parts of ELF file"""
def __init__(self):
for name in self._names:
setattr(self, name, None)
_names = []
def _parse_data(self, fmt, data):
flen = len(fmt)
nlen = len(self._names)
if flen != nlen:
raise ValueError("_parse_data size mismatch")
decoded = struct.unpack(">"+fmt, data)
if len(decoded) != nlen:
raise ELFParseError("data decode error")
for i in range(nlen):
setattr(self, self._names[i], decoded[i])
def __init__(self):
for name in self._names:
setattr(self, name, None)
def _decode_flags(self, value, names):
result = []
for mask in names:
if mask & value == mask:
result.append(names[mask])
return result
def _parse_data(self, fmt, data):
flen = len(fmt)
nlen = len(self._names)
if flen != nlen:
raise ValueError("_parse_data size mismatch")
decoded = struct.unpack(">" + fmt, data)
if len(decoded) != nlen:
raise ELFParseError("data decode error")
for i in range(nlen):
setattr(self, self._names[i], decoded[i])
def _decode_value(self, value, names):
if value in names:
return names[value]
else:
return None
def _decode_flags(self, value, names):
result = []
for mask in names:
if mask & value == mask:
result.append(names[mask])
return result
def _decode_value(self, value, names):
if value in names:
return names[value]
else:
return None
class ELFIdentifier(ELFPart):
_names = ["class_", "data", "version", "osabi", "abiversion"]
_names = ["class_", "data", "version", "osabi", "abiversion"]
def __init__(self):
ELFPart.__init__(self)
def __init__(self):
ELFPart.__init__(self)
def parse(self, ident_data):
# magic
magic = ident_data[0:4]
if magic != b"\177ELF":
raise ELFParseError("No ELF Magic found!")
self.class_ = ident_data[4]
self.data = ident_data[5]
self.version = ident_data[6]
self.osabi = ident_data[7]
self.abiversion = ident_data[8]
def parse(self, ident_data):
# magic
magic = ident_data[0:4]
if magic != b"\177ELF":
raise ELFParseError("No ELF Magic found!")
self.class_ = ident_data[4]
self.data = ident_data[5]
self.version = ident_data[6]
self.osabi = ident_data[7]
self.abiversion = ident_data[8]
class ELFHeader(ELFPart):
_names = ['type_','machine','version','entry','phoff','shoff','flags','ehsize','phentsize','phnum','shentsize','shnum','shstrndx']
_names = [
"type_",
"machine",
"version",
"entry",
"phoff",
"shoff",
"flags",
"ehsize",
"phentsize",
"phnum",
"shentsize",
"shnum",
"shstrndx",
]
def __init__(self):
ELFPart.__init__(self)
def __init__(self):
ELFPart.__init__(self)
def parse(self, data):
fmt = "HHIIIIIHHHHHH"
self._parse_data(fmt,data)
self.type_str = self._decode_value(self.type_,ET_values)
def parse(self, data):
fmt = "HHIIIIIHHHHHH"
self._parse_data(fmt, data)
self.type_str = self._decode_value(self.type_, ET_values)
class ELFSectionHeader(ELFPart):
_names = ['name','type_','flags','addr','offset','size','link','info','addralign','entsize']
_names = [
"name",
"type_",
"flags",
"addr",
"offset",
"size",
"link",
"info",
"addralign",
"entsize",
]
def __init__(self):
ELFPart.__init__(self)
def __init__(self):
ELFPart.__init__(self)
def parse(self, data):
fmt = "IIIIIIIIII"
self._parse_data(fmt, data)
self.flags_dec = self._decode_flags(self.flags, SHT_flags)
self.type_str = self._decode_value(self.type_, SHT_values)
def parse(self, data):
fmt = "IIIIIIIIII"
self._parse_data(fmt,data)
self.flags_dec = self._decode_flags(self.flags, SHT_flags)
self.type_str = self._decode_value(self.type_, SHT_values)
# ----- Sections -----
class ELFSection:
def __init__(self, header, idx):
self.header = header
self.idx = idx
self.data = None
# resolved data
self.name_str = None
self.symbols = []
self.relocations = None
self.reloc_by_sect = {}
def __init__(self, header, idx):
self.header = header
self.idx = idx
self.data = None
# resolved data
self.name_str = None
self.symbols = []
self.relocations = None
self.reloc_by_sect = {}
def get_rela(self):
"""return a list with all relocations"""
if self.relocations is not None:
return self.relocations.rela
else:
return []
def get_rela(self):
"""return a list with all relocations"""
if self.relocations is not None:
return self.relocations.rela
else:
return []
def get_rela_by_section(self, sect):
"""return a list of relocations from the given section"""
if sect in self.reloc_by_sect:
return self.reloc_by_sect[sect]
else:
return []
def get_rela_by_section(self, sect):
"""return a list of relocations from the given section"""
if sect in self.reloc_by_sect:
return self.reloc_by_sect[sect]
else:
return []
def get_rela_sections(self):
return sorted(list(self.reloc_by_sect.keys()), key=lambda x : x.idx)
def get_rela_sections(self):
return sorted(list(self.reloc_by_sect.keys()), key=lambda x: x.idx)
def get_symbols(self):
return self.symbols
def get_symbols(self):
return self.symbols
class ELFSectionWithData(ELFSection):
def __init__(self, header, index, data):
ELFSection.__init__(self, header, index)
self.data = data
def __init__(self, header, index, data):
ELFSection.__init__(self, header, index)
self.data = data
class ELFSectionStringTable(ELFSectionWithData):
def __init__(self, header, index, data):
ELFSectionWithData.__init__(self, header, index, data)
self.strtab = None
def __init__(self, header, index, data):
ELFSectionWithData.__init__(self, header, index, data)
self.strtab = None
def decode(self):
l = len(self.data)
o = 0
strtab = []
while o < l:
n = self.data.find(b'\0',o)
if n == -1:
raise ELFParseError("Invalid strtab!")
if n > 0:
s = self.data[o:n]
else:
s = ""
strtab.append((o,s))
o = n+1
self.strtab = strtab
def decode(self):
l = len(self.data)
o = 0
strtab = []
while o < l:
n = self.data.find(b"\0", o)
if n == -1:
raise ELFParseError("Invalid strtab!")
if n > 0:
s = self.data[o:n]
else:
s = ""
strtab.append((o, s))
o = n + 1
self.strtab = strtab
def get_string(self, off):
old = (0,"")
for e in self.strtab:
if off < e[0]:
delta = off - old[0]
return old[1][delta:]
old = e
delta = off - self.strtab[-1][0]
return self.strtab[-1][1][delta:]
def get_string(self, off):
old = (0, "")
for e in self.strtab:
if off < e[0]:
delta = off - old[0]
return old[1][delta:]
old = e
delta = off - self.strtab[-1][0]
return self.strtab[-1][1][delta:]
class ELFSymbol(ELFPart):
_names = ['name','value','size','info','other','shndx']
_names = ["name", "value", "size", "info", "other", "shndx"]
def __init__(self, idx):
ELFPart.__init__(self)
self.idx = idx
self.bind = None
self.type_ = None
self.visibility = None
# will be resolved
self.name_str = None
self.section = None
def __init__(self, idx):
ELFPart.__init__(self)
self.idx = idx
self.bind = None
self.type_ = None
self.visibility = None
# will be resolved
self.name_str = None
self.section = None
def parse(self, data):
fmt = "IIIBBH"
self._parse_data(fmt, data)
# decode sub values
self.bind = self.info >> 4
self.type_ = self.info & 0xf
self.visibility = self.other & 3
# string values
self.bind_str = self._decode_value(self.bind, STB_values)
self.type_str = self._decode_value(self.type_, STT_values)
self.visibility_str = self._decode_value(self.visibility, STV_values)
self.shndx_str = self._decode_value(self.shndx, SHN_values)
def parse(self, data):
fmt = "IIIBBH"
self._parse_data(fmt, data)
# decode sub values
self.bind = self.info >> 4
self.type_ = self.info & 0xF
self.visibility = self.other & 3
# string values
self.bind_str = self._decode_value(self.bind, STB_values)
self.type_str = self._decode_value(self.type_, STT_values)
self.visibility_str = self._decode_value(self.visibility, STV_values)
self.shndx_str = self._decode_value(self.shndx, SHN_values)
class ELFSectionSymbolTable(ELFSectionWithData):
def __init__(self, header, index, data):
ELFSectionWithData.__init__(self, header, index, data)
self.symtab = []
def __init__(self, header, index, data):
ELFSectionWithData.__init__(self, header, index, data)
self.symtab = []
def decode(self):
entsize = self.header.entsize
num = self.header.size // entsize
symtab = []
self.symtab = symtab
off = 0
idx = 0
for n in range(num):
entry = ELFSymbol(idx)
entry_data = self.data[off:off+entsize]
entry.parse(entry_data)
symtab.append(entry)
off += entsize
idx += 1
return True
def decode(self):
entsize = self.header.entsize
num = self.header.size // entsize
symtab = []
self.symtab = symtab
off = 0
idx = 0
for n in range(num):
entry = ELFSymbol(idx)
entry_data = self.data[off : off + entsize]
entry.parse(entry_data)
symtab.append(entry)
off += entsize
idx += 1
return True
def get_symbol(self, idx):
return self.symtab[idx]
def get_symbol(self, idx):
return self.symtab[idx]
def get_table_symbols(self):
return self.symtab
def get_table_symbols(self):
return self.symtab
class ELFRelocationWithAddend(ELFPart):
_names = ['offset','info','addend']
_names = ["offset", "info", "addend"]
def __init__(self):
ELFPart.__init__(self)
self.sym = None
self.type_ = None
self.type_str = None
self.symbol = None
def __init__(self):
ELFPart.__init__(self)
self.sym = None
self.type_ = None
self.type_str = None
self.symbol = None
def parse(self, data):
fmt = "IIi"
self._parse_data(fmt, data)
# decode sym and type
self.sym = self.info >> 8
self.type_ = self.info & 0xff
self.type_str = self._decode_value(self.type_, R_68K_values)
def parse(self, data):
fmt = "IIi"
self._parse_data(fmt, data)
# decode sym and type
self.sym = self.info >> 8
self.type_ = self.info & 0xFF
self.type_str = self._decode_value(self.type_, R_68K_values)
class ELFSectionRelocationsWithAddend(ELFSectionWithData):
def __init__(self, header, index, data):
ELFSectionWithData.__init__(self, header, index, data)
self.rela = []
self.symtab = None
self.reloc_section = None
def __init__(self, header, index, data):
ELFSectionWithData.__init__(self, header, index, data)
self.rela = []
self.symtab = None
self.reloc_section = None
def decode(self):
entsize = self.header.entsize
num = self.header.size // entsize
rela = []
self.rela = rela
off = 0
for n in range(num):
entry = ELFRelocationWithAddend()
entry_data = self.data[off:off+entsize]
entry.parse(entry_data)
rela.append(entry)
off += entsize
def decode(self):
entsize = self.header.entsize
num = self.header.size // entsize
rela = []
self.rela = rela
off = 0
for n in range(num):
entry = ELFRelocationWithAddend()
entry_data = self.data[off : off + entsize]
entry.parse(entry_data)
rela.append(entry)
off += entsize
def get_relocations(self):
return self.rela
def get_relocations(self):
return self.rela
class ELFFile:
def __init__(self):
self.identifier = None
self.header = None
self.section_hdrs = []
self.sections = []
self.symtabs = []
self.relas = []
def __init__(self):
self.identifier = None
self.header = None
self.section_hdrs = []
self.sections = []
self.symtabs = []
self.relas = []
def get_section_by_name(self, name):
for sect in self.sections:
if sect.name_str == name:
return sect
return None
def get_section_by_name(self, name):
for sect in self.sections:
if sect.name_str == name:
return sect
return None

View File

@ -7,214 +7,213 @@ from .ELFFile import *
class ELFReader:
def _load_section_headers(self, f, ef):
shoff = ef.header.shoff
shentsize = ef.header.shentsize
f.seek(shoff, os.SEEK_SET)
shnum = ef.header.shnum
for i in range(shnum):
sh = ELFSectionHeader()
sh_data = f.read(shentsize)
sh.parse(sh_data)
ef.section_hdrs.append(sh)
def _load_section_headers(self, f, ef):
shoff = ef.header.shoff
shentsize = ef.header.shentsize
f.seek(shoff, os.SEEK_SET)
shnum = ef.header.shnum
for i in range(shnum):
sh = ELFSectionHeader()
sh_data = f.read(shentsize)
sh.parse(sh_data)
ef.section_hdrs.append(sh)
def _load_sections(self, f, ef):
sect_hdrs = ef.section_hdrs
idx = 0
for sect_hdr in sect_hdrs:
idx += 1
sect = self._load_section(f, sect_hdr, idx)
ef.sections.append(sect)
def _load_sections(self, f, ef):
sect_hdrs = ef.section_hdrs
idx = 0
for sect_hdr in sect_hdrs:
idx += 1
sect = self._load_section(f, sect_hdr, idx)
ef.sections.append(sect)
def _load_section(self, f, sect_hdr, idx):
t = sect_hdr.type_
size = sect_hdr.size
if t == SHT_NOBITS or size == 0:
sect = ELFSection(sect_hdr, idx)
else:
# read data
offset = sect_hdr.offset
f.seek(offset, os.SEEK_SET)
data = f.read(size)
# decode?
if t == SHT_STRTAB:
sect = ELFSectionStringTable(sect_hdr, idx, data)
sect.decode()
elif t == SHT_SYMTAB:
sect = ELFSectionSymbolTable(sect_hdr, idx, data)
sect.decode()
elif t == SHT_RELA:
sect = ELFSectionRelocationsWithAddend(sect_hdr, idx, data)
sect.decode()
else:
sect = ELFSectionWithData(sect_hdr, idx, data)
return sect
def _name_section(self, section, strtab):
off = section.header.name
section.name_str = strtab.get_string(off)
def _resolve_symtab_names(self, sect, sections):
# get linked string table
strtab_seg_num = sect.header.link
if strtab_seg_num < 1 or strtab_seg_num >= len(sections):
raise ELFParseError("Invalid strtab for symtab: "+strtab_seg_num)
strtab = sections[strtab_seg_num]
if strtab.__class__ != ELFSectionStringTable:
raise ELFParserError("Invalid strtab segment for symtab")
# resolve all symbol names
for sym in sect.symtab:
sym.name_str = strtab.get_string(sym.name)
def _resolve_symtab_indices(self, sect, sections):
for sym in sect.symtab:
if sym.shndx_str == None:
# refers a valid section
idx = sym.shndx
sym.section = sections[idx]
def _assign_symbols_to_sections(self, sect):
src_file_sym = None
all_symbols = []
for sym in sect.symtab:
sym_type = sym.type_str
if sym_type == 'FILE':
# store file symbol for following symbols
src_file_sym = sym
elif sym_type in ('OBJECT','FUNC','NOTYPE'):
# add containing file symbol and its name
if src_file_sym != None:
sym.file_sym = src_file_sym
def _load_section(self, f, sect_hdr, idx):
t = sect_hdr.type_
size = sect_hdr.size
if t == SHT_NOBITS or size == 0:
sect = ELFSection(sect_hdr, idx)
else:
sym.file_sym = None
# add symbol to segment
sym_sect = sym.section
if sym_sect is not None:
sym_sect.symbols.append(sym)
# list of all symbols assigned
all_symbols.append(sym_sect.symbols)
# now sort all symbol lists
for symbols in all_symbols:
symbols.sort(key=lambda x : x.value)
# read data
offset = sect_hdr.offset
f.seek(offset, os.SEEK_SET)
data = f.read(size)
# decode?
if t == SHT_STRTAB:
sect = ELFSectionStringTable(sect_hdr, idx, data)
sect.decode()
elif t == SHT_SYMTAB:
sect = ELFSectionSymbolTable(sect_hdr, idx, data)
sect.decode()
elif t == SHT_RELA:
sect = ELFSectionRelocationsWithAddend(sect_hdr, idx, data)
sect.decode()
else:
sect = ELFSectionWithData(sect_hdr, idx, data)
return sect
def _resolve_rela_links(self, sect, sections):
link = sect.header.link
info = sect.header.info
num_sects = len(sections)
if link == 0 or link >= num_sects:
raise ELFParseError("Invalid rela link!")
if info == 0 or info >= num_sects:
raise ELFParseError("Invalid rela info!")
def _name_section(self, section, strtab):
off = section.header.name
section.name_str = strtab.get_string(off)
# info_seg -> src segment we will apply rela on
src_sect = sections[info]
sect.reloc_section = src_sect
def _resolve_symtab_names(self, sect, sections):
# get linked string table
strtab_seg_num = sect.header.link
if strtab_seg_num < 1 or strtab_seg_num >= len(sections):
raise ELFParseError("Invalid strtab for symtab: " + strtab_seg_num)
strtab = sections[strtab_seg_num]
if strtab.__class__ != ELFSectionStringTable:
raise ELFParserError("Invalid strtab segment for symtab")
# resolve all symbol names
for sym in sect.symtab:
sym.name_str = strtab.get_string(sym.name)
# link_seg -> symbol table
sect.symtab = sections[link]
def _resolve_symtab_indices(self, sect, sections):
for sym in sect.symtab:
if sym.shndx_str == None:
# refers a valid section
idx = sym.shndx
sym.section = sections[idx]
# store link in segment for this relocation
src_sect.relocations = sect
def _assign_symbols_to_sections(self, sect):
src_file_sym = None
all_symbols = []
for sym in sect.symtab:
sym_type = sym.type_str
if sym_type == "FILE":
# store file symbol for following symbols
src_file_sym = sym
elif sym_type in ("OBJECT", "FUNC", "NOTYPE"):
# add containing file symbol and its name
if src_file_sym != None:
sym.file_sym = src_file_sym
else:
sym.file_sym = None
# add symbol to segment
sym_sect = sym.section
if sym_sect is not None:
sym_sect.symbols.append(sym)
# list of all symbols assigned
all_symbols.append(sym_sect.symbols)
# now sort all symbol lists
for symbols in all_symbols:
symbols.sort(key=lambda x: x.value)
# a map for rela by tgt segment
by_sect = {}
src_sect.reloc_by_sect = by_sect
def _resolve_rela_links(self, sect, sections):
link = sect.header.link
info = sect.header.info
num_sects = len(sections)
if link == 0 or link >= num_sects:
raise ELFParseError("Invalid rela link!")
if info == 0 or info >= num_sects:
raise ELFParseError("Invalid rela info!")
# now process all rela entries
symtab = sect.symtab
for entry in sect.rela:
# look up symbol of rela entry
sym_idx = entry.sym
sym = symtab.get_symbol(sym_idx)
entry.symbol = sym
# copy section we relocate from
entry.section = sym.section
# calc addend in segment
entry.section_addend = entry.addend + sym.value
# info_seg -> src segment we will apply rela on
src_sect = sections[info]
sect.reloc_section = src_sect
# clear symbol if its empty
if sym.name_str == "":
entry.symbol = None
# link_seg -> symbol table
sect.symtab = sections[link]
# add entry to section list
tgt_sect = entry.section
if tgt_sect in by_sect:
by_sect_list = by_sect[tgt_sect]
else:
by_sect_list = []
by_sect[tgt_sect] = by_sect_list
by_sect_list.append(entry)
# store link in segment for this relocation
src_sect.relocations = sect
# sort all by_seg entries
for sect in by_sect:
by_sect_list = by_sect[sect]
by_sect_list.sort(key=lambda x : x.offset)
# a map for rela by tgt segment
by_sect = {}
src_sect.reloc_by_sect = by_sect
def load(self, f):
"""load an ELF file from the given file object f
# now process all rela entries
symtab = sect.symtab
for entry in sect.rela:
# look up symbol of rela entry
sym_idx = entry.sym
sym = symtab.get_symbol(sym_idx)
entry.symbol = sym
# copy section we relocate from
entry.section = sym.section
# calc addend in segment
entry.section_addend = entry.addend + sym.value
# clear symbol if its empty
if sym.name_str == "":
entry.symbol = None
# add entry to section list
tgt_sect = entry.section
if tgt_sect in by_sect:
by_sect_list = by_sect[tgt_sect]
else:
by_sect_list = []
by_sect[tgt_sect] = by_sect_list
by_sect_list.append(entry)
# sort all by_seg entries
for sect in by_sect:
by_sect_list = by_sect[sect]
by_sect_list.sort(key=lambda x: x.offset)
def load(self, f):
"""load an ELF file from the given file object f
and return an ELFFile instance or None if loading failed"""
ef = ELFFile()
ef = ELFFile()
# read identifier
ident = ELFIdentifier()
ident_data = f.read(16)
ident.parse(ident_data)
ef.identifier = ident
# read identifier
ident = ELFIdentifier()
ident_data = f.read(16)
ident.parse(ident_data)
ef.identifier = ident
# read header
hdr = ELFHeader()
hdr_data = f.read(36)
hdr.parse(hdr_data)
ef.header = hdr
# read header
hdr = ELFHeader()
hdr_data = f.read(36)
hdr.parse(hdr_data)
ef.header = hdr
# expect a non-empty section header
if hdr.shnum == 0:
raise ELFParseError("No segment header defined!")
# expect a non-empty section header
if hdr.shnum == 0:
raise ELFParseError("No segment header defined!")
# load all section headers
self._load_section_headers(f, ef)
# load all section headers
self._load_section_headers(f, ef)
# load and decode sections
self._load_sections(f, ef)
# load and decode sections
self._load_sections(f, ef)
# get string table with segment names
strtab_idx = ef.header.shstrndx
strtab = ef.sections[strtab_idx]
if strtab.__class__ != ELFSectionStringTable:
raise ELFParseError("No strtab for segment header found! ")
# get string table with segment names
strtab_idx = ef.header.shstrndx
strtab = ef.sections[strtab_idx]
if strtab.__class__ != ELFSectionStringTable:
raise ELFParseError("No strtab for segment header found! ")
# process sections
for sect in ef.sections:
# name all sections by using the string table
self._name_section(sect, strtab)
# resolve symbol table names
if sect.header.type_ == SHT_SYMTAB:
# store in file symtabs
ef.symtabs.append(sect)
# get names in symtab
self._resolve_symtab_names(sect, ef.sections)
# link sections to symbols
self._resolve_symtab_indices(sect, ef.sections)
# assign symbols to sections
self._assign_symbols_to_sections(sect)
# process sections
for sect in ef.sections:
# name all sections by using the string table
self._name_section(sect, strtab)
# resolve symbol table names
if sect.header.type_ == SHT_SYMTAB:
# store in file symtabs
ef.symtabs.append(sect)
# get names in symtab
self._resolve_symtab_names(sect, ef.sections)
# link sections to symbols
self._resolve_symtab_indices(sect, ef.sections)
# assign symbols to sections
self._assign_symbols_to_sections(sect)
# resolve rela links and symbols
for sect in ef.sections:
if sect.header.type_ == SHT_RELA:
self._resolve_rela_links(sect, ef.sections)
ef.relas.append(sect)
# resolve rela links and symbols
for sect in ef.sections:
if sect.header.type_ == SHT_RELA:
self._resolve_rela_links(sect, ef.sections)
ef.relas.append(sect)
return ef
return ef
# mini test
if __name__ == '__main__':
import sys
reader = ELFReader()
for a in sys.argv[1:]:
f = open(a, "rb")
ef = reader.load(f)
if __name__ == "__main__":
import sys
reader = ELFReader()
for a in sys.argv[1:]:
f = open(a, "rb")
ef = reader.load(f)

View File

@ -1,223 +1,223 @@
from amitools.binfmt.BinImage import *
from .HunkBlockFile import HunkBlockFile, HunkParseError
from .HunkLoadSegFile import HunkLoadSegFile, HunkSegment
from .HunkDebug import *
from . import Hunk
class BinFmtHunk:
"""Handle Amiga's native Hunk file format"""
"""Handle Amiga's native Hunk file format"""
def is_image(self, path):
"""check if a given file is a hunk LoadSeg file"""
with open(path, "rb") as f:
return self.is_image_fobj(f)
def is_image(self, path):
"""check if a given file is a hunk LoadSeg file"""
with open(path, "rb") as f:
return self.is_image_fobj(f)
def is_image_fobj(self, fobj):
"""check if a given fobj is a hunk LoadSeg file"""
bf = HunkBlockFile()
bf_type = bf.peek_type(fobj)
return bf_type == Hunk.TYPE_LOADSEG
def is_image_fobj(self, fobj):
"""check if a given fobj is a hunk LoadSeg file"""
bf = HunkBlockFile()
bf_type = bf.peek_type(fobj)
return bf_type == Hunk.TYPE_LOADSEG
def load_image(self, path):
"""load a BinImage from a hunk file given via path"""
with open(path, "rb") as f:
return self.load_image_fobj(f)
def load_image(self, path):
"""load a BinImage from a hunk file given via path"""
with open(path, "rb") as f:
return self.load_image_fobj(f)
def load_image_fobj(self, fobj):
"""load a BinImage from a hunk file given via file obj"""
# read the hunk blocks
bf = HunkBlockFile()
bf.read(fobj, isLoadSeg=True)
# derive load seg file
lsf = HunkLoadSegFile()
lsf.parse_block_file(bf)
# convert load seg file
return self.create_image_from_load_seg_file(lsf)
def load_image_fobj(self, fobj):
"""load a BinImage from a hunk file given via file obj"""
# read the hunk blocks
bf = HunkBlockFile()
bf.read(fobj, isLoadSeg=True)
# derive load seg file
lsf = HunkLoadSegFile()
lsf.parse_block_file(bf)
# convert load seg file
return self.create_image_from_load_seg_file(lsf)
def save_image(self, path, bin_img):
"""save a BinImage to a hunk file given via path"""
with open(path, "wb") as f:
self.save_image_fobj(f, bin_img)
def save_image(self, path, bin_img):
"""save a BinImage to a hunk file given via path"""
with open(path, "wb") as f:
self.save_image_fobj(f, bin_img)
def save_image_fobj(self, fobj, bin_img):
"""save a BinImage to a hunk file given via file obj"""
lsf = self.create_load_seg_file_from_image(bin_img)
bf = lsf.create_block_file()
bf.write(fobj, isLoadSeg=True)
def save_image_fobj(self, fobj, bin_img):
"""save a BinImage to a hunk file given via file obj"""
lsf = self.create_load_seg_file_from_image(bin_img)
bf = lsf.create_block_file()
bf.write(fobj, isLoadSeg=True)
def create_load_seg_file_from_image(self, bin_img):
"""create a HunkLodSegFile from a BinImage"""
lsf = HunkLoadSegFile()
for seg in bin_img.segments:
seg_type = seg.get_type()
# create HunkSegment
lseg = HunkSegment()
lsf.add_segment(lseg)
if seg_type == SEGMENT_TYPE_CODE:
lseg.setup_code(seg.data)
elif seg_type == SEGMENT_TYPE_DATA:
lseg.setup_data(seg.data)
elif seg_type == SEGMENT_TYPE_BSS:
lseg.setup_bss(seg.size)
else:
raise HunkParseError("Unknown Segment Type in BinImage: %d" % seg_type)
# add relocs
self._add_bin_img_relocs(lseg, seg)
# add symbols
self._add_bin_img_symbols(lseg, seg)
# add debug info
self._add_bin_img_debug_info(lseg, seg)
return lsf
def create_load_seg_file_from_image(self, bin_img):
"""create a HunkLodSegFile from a BinImage"""
lsf = HunkLoadSegFile()
for seg in bin_img.segments:
seg_type = seg.get_type()
# create HunkSegment
lseg = HunkSegment()
lsf.add_segment(lseg)
if seg_type == SEGMENT_TYPE_CODE:
lseg.setup_code(seg.data)
elif seg_type == SEGMENT_TYPE_DATA:
lseg.setup_data(seg.data)
elif seg_type == SEGMENT_TYPE_BSS:
lseg.setup_bss(seg.size)
else:
raise HunkParseError("Unknown Segment Type in BinImage: %d" % seg_type)
# add relocs
self._add_bin_img_relocs(lseg, seg)
# add symbols
self._add_bin_img_symbols(lseg, seg)
# add debug info
self._add_bin_img_debug_info(lseg, seg)
return lsf
def _add_bin_img_relocs(self, hunk_seg, seg):
reloc_segs = seg.get_reloc_to_segs()
hunk_relocs = []
for reloc_seg in reloc_segs:
seg_id = reloc_seg.id
reloc = seg.get_reloc(reloc_seg)
relocs = reloc.get_relocs()
offsets = []
for r in relocs:
if r.get_width() != 2 or r.get_addend() != 0:
raise HunkParseError("Invalid reloc: " + r)
offsets.append(r.get_offset())
hunk_relocs.append((seg_id, offsets))
if len(hunk_relocs) > 0:
hunk_seg.setup_relocs(hunk_relocs)
def _add_bin_img_relocs(self, hunk_seg, seg):
reloc_segs = seg.get_reloc_to_segs()
hunk_relocs = []
for reloc_seg in reloc_segs:
seg_id = reloc_seg.id
reloc = seg.get_reloc(reloc_seg)
relocs = reloc.get_relocs()
offsets = []
for r in relocs:
if r.get_width() != 2 or r.get_addend() != 0:
raise HunkParseError("Invalid reloc: " + r)
offsets.append(r.get_offset())
hunk_relocs.append((seg_id, offsets))
if len(hunk_relocs) > 0:
hunk_seg.setup_relocs(hunk_relocs)
def _add_bin_img_symbols(self, hunk_seg, seg):
sym_tab = seg.get_symtab()
if sym_tab is not None:
hunk_sym_list = []
for sym in sym_tab.get_symbols():
hunk_sym_list.append((sym.get_name(), sym.get_offset()))
hunk_seg.setup_symbols(hunk_sym_list)
def _add_bin_img_symbols(self, hunk_seg, seg):
sym_tab = seg.get_symtab()
if sym_tab is not None:
hunk_sym_list = []
for sym in sym_tab.get_symbols():
hunk_sym_list.append((sym.get_name(), sym.get_offset()))
hunk_seg.setup_symbols(hunk_sym_list)
def _add_bin_img_debug_info(self, hunk_seg, seg):
debug_line = seg.get_debug_line()
if debug_line is not None:
for file in debug_line.get_files():
src_file = file.get_src_file()
base_offset = file.get_base_offset()
dl = HunkDebugLine(src_file, base_offset)
for e in file.get_entries():
offset = e.get_offset()
src_line = e.get_src_line()
flags = e.get_flags()
hunk_src_line = src_line | (flags << 24)
dl.add_entry(offset, hunk_src_line)
hunk_seg.setup_debug(dl)
def _add_bin_img_debug_info(self, hunk_seg, seg):
debug_line = seg.get_debug_line()
if debug_line is not None:
for file in debug_line.get_files():
src_file = file.get_src_file()
base_offset = file.get_base_offset()
dl = HunkDebugLine(src_file, base_offset)
for e in file.get_entries():
offset = e.get_offset()
src_line = e.get_src_line()
flags = e.get_flags()
hunk_src_line = src_line | (flags << 24)
dl.add_entry(offset, hunk_src_line)
hunk_seg.setup_debug(dl)
def create_image_from_load_seg_file(self, lsf):
"""create a BinImage from a HunkLoadSegFile object"""
bi = BinImage(BIN_IMAGE_TYPE_HUNK)
bi.set_file_data(lsf)
segs = lsf.get_segments()
for seg in segs:
# what type of segment to we have?
blk_id = seg.seg_blk.blk_id
size = seg.size_longs * 4
data = seg.seg_blk.data
if blk_id == Hunk.HUNK_CODE:
seg_type = SEGMENT_TYPE_CODE
elif blk_id == Hunk.HUNK_DATA:
seg_type = SEGMENT_TYPE_DATA
elif blk_id == Hunk.HUNK_BSS:
seg_type = SEGMENT_TYPE_BSS
else:
raise HunkParseError("Unknown Segment Type for BinImage: %d" % blk_id)
# create seg
bs = Segment(seg_type, size, data)
bs.set_file_data(seg)
bi.add_segment(bs)
# add relocations if any
bi_segs = bi.get_segments()
for seg in bi_segs:
# add relocations?
hseg = seg.file_data
reloc_blks = hseg.reloc_blks
if reloc_blks is not None:
self._add_hunk_relocs(reloc_blks, seg, bi_segs)
# add symbol table
symbol_blk = hseg.symbol_blk
if symbol_blk is not None:
self._add_hunk_symbols(symbol_blk, seg)
# add debug infos
debug_infos = hseg.debug_infos
if debug_infos is not None:
self._add_debug_infos(debug_infos, seg)
def create_image_from_load_seg_file(self, lsf):
"""create a BinImage from a HunkLoadSegFile object"""
bi = BinImage(BIN_IMAGE_TYPE_HUNK)
bi.set_file_data(lsf)
segs = lsf.get_segments()
for seg in segs:
# what type of segment to we have?
blk_id = seg.seg_blk.blk_id
size = seg.size_longs * 4
data = seg.seg_blk.data
if blk_id == Hunk.HUNK_CODE:
seg_type = SEGMENT_TYPE_CODE
elif blk_id == Hunk.HUNK_DATA:
seg_type = SEGMENT_TYPE_DATA
elif blk_id == Hunk.HUNK_BSS:
seg_type = SEGMENT_TYPE_BSS
else:
raise HunkParseError("Unknown Segment Type for BinImage: %d" % blk_id)
# create seg
bs = Segment(seg_type, size, data)
bs.set_file_data(seg)
bi.add_segment(bs)
# add relocations if any
bi_segs = bi.get_segments()
for seg in bi_segs:
# add relocations?
hseg = seg.file_data
reloc_blks = hseg.reloc_blks
if reloc_blks is not None:
self._add_hunk_relocs(reloc_blks, seg, bi_segs)
# add symbol table
symbol_blk = hseg.symbol_blk
if symbol_blk is not None:
self._add_hunk_symbols(symbol_blk, seg)
# add debug infos
debug_infos = hseg.debug_infos
if debug_infos is not None:
self._add_debug_infos(debug_infos, seg)
return bi
return bi
def _add_hunk_relocs(self, blks, seg, all_segs):
"""add relocations to a segment"""
for blk in blks:
if blk.blk_id not in (Hunk.HUNK_ABSRELOC32, Hunk.HUNK_RELOC32SHORT):
raise HunkParseError("Invalid Relocations for BinImage: %d" % blk_id)
relocs = blk.relocs
for r in relocs:
hunk_num = r[0]
offsets = r[1]
to_seg = all_segs[hunk_num]
# create reloc for target segment or reuse one.
rl = seg.get_reloc(to_seg)
if rl == None:
rl = Relocations(to_seg)
# add offsets
for o in offsets:
r = Reloc(o)
rl.add_reloc(r)
seg.add_reloc(to_seg, rl)
def _add_hunk_relocs(self, blks, seg, all_segs):
"""add relocations to a segment"""
for blk in blks:
if blk.blk_id not in (Hunk.HUNK_ABSRELOC32, Hunk.HUNK_RELOC32SHORT):
raise HunkParseError("Invalid Relocations for BinImage: %d" % blk_id)
relocs = blk.relocs
for r in relocs:
hunk_num = r[0]
offsets = r[1]
to_seg = all_segs[hunk_num]
# create reloc for target segment or reuse one.
rl = seg.get_reloc(to_seg)
if rl == None:
rl = Relocations(to_seg)
# add offsets
for o in offsets:
r = Reloc(o)
rl.add_reloc(r)
seg.add_reloc(to_seg, rl)
def _add_hunk_symbols(self, blk, seg):
"""add symbols to segment"""
syms = blk.symbols
if len(syms) == 0:
return
st = SymbolTable()
seg.set_symtab(st)
for sym in syms:
name = sym[0]
offset = sym[1]
symbol = Symbol(offset, name)
st.add_symbol(symbol)
def _add_hunk_symbols(self, blk, seg):
"""add symbols to segment"""
syms = blk.symbols
if len(syms) == 0:
return
st = SymbolTable()
seg.set_symtab(st)
for sym in syms:
name = sym[0]
offset = sym[1]
symbol = Symbol(offset, name)
st.add_symbol(symbol)
def _add_debug_infos(self, debug_infos, seg):
dl = DebugLine()
seg.set_debug_line(dl)
for debug_info in debug_infos:
# add source line infos
if isinstance(debug_info, HunkDebugLine):
src_file = debug_info.src_file
# abs path?
pos = src_file.rfind('/')
if pos != -1:
dir_name = src_file[:pos]
src_file = src_file[pos+1:]
else:
dir_name = ""
base_offset = debug_info.base_offset
df = DebugLineFile(src_file, dir_name, base_offset)
dl.add_file(df)
for entry in debug_info.get_entries():
off = entry.offset
src_line = entry.src_line & 0xffffff
flags = (entry.src_line & 0xff000000) >> 24
e = DebugLineEntry(off, src_line, flags)
df.add_entry(e)
def _add_debug_infos(self, debug_infos, seg):
dl = DebugLine()
seg.set_debug_line(dl)
for debug_info in debug_infos:
# add source line infos
if isinstance(debug_info, HunkDebugLine):
src_file = debug_info.src_file
# abs path?
pos = src_file.rfind("/")
if pos != -1:
dir_name = src_file[:pos]
src_file = src_file[pos + 1 :]
else:
dir_name = ""
base_offset = debug_info.base_offset
df = DebugLineFile(src_file, dir_name, base_offset)
dl.add_file(df)
for entry in debug_info.get_entries():
off = entry.offset
src_line = entry.src_line & 0xFFFFFF
flags = (entry.src_line & 0xFF000000) >> 24
e = DebugLineEntry(off, src_line, flags)
df.add_entry(e)
# mini test
if __name__ == '__main__':
import sys
bf = BinFmtHunk()
for a in sys.argv[1:]:
if bf.is_image(a):
print("loading", a)
bi = bf.load_image(a)
print(bi)
bf.save_image("a.out", bi)
else:
print("NO HUNK:", a)
if __name__ == "__main__":
import sys
bf = BinFmtHunk()
for a in sys.argv[1:]:
if bf.is_image(a):
print("loading", a)
bi = bf.load_image(a)
print(bi)
bf.save_image("a.out", bi)
else:
print("NO HUNK:", a)

View File

@ -1,149 +1,139 @@
"""General definitions for the AmigaOS Hunk format"""
HUNK_UNIT = 999
HUNK_NAME = 1000
HUNK_CODE = 1001
HUNK_DATA = 1002
HUNK_BSS = 1003
HUNK_UNIT = 999
HUNK_NAME = 1000
HUNK_CODE = 1001
HUNK_DATA = 1002
HUNK_BSS = 1003
HUNK_ABSRELOC32 = 1004
HUNK_RELRELOC16 = 1005
HUNK_RELRELOC8 = 1006
HUNK_EXT = 1007
HUNK_SYMBOL = 1008
HUNK_DEBUG = 1009
HUNK_END = 1010
HUNK_HEADER = 1011
HUNK_RELRELOC8 = 1006
HUNK_EXT = 1007
HUNK_SYMBOL = 1008
HUNK_DEBUG = 1009
HUNK_END = 1010
HUNK_HEADER = 1011
HUNK_OVERLAY = 1013
HUNK_BREAK = 1014
HUNK_DREL32 = 1015
HUNK_DREL16 = 1016
HUNK_DREL8 = 1017
HUNK_LIB = 1018
HUNK_INDEX = 1019
HUNK_OVERLAY = 1013
HUNK_BREAK = 1014
HUNK_DREL32 = 1015
HUNK_DREL16 = 1016
HUNK_DREL8 = 1017
HUNK_LIB = 1018
HUNK_INDEX = 1019
HUNK_RELOC32SHORT = 1020
HUNK_RELRELOC32 = 1021
HUNK_ABSRELOC16 = 1022
HUNK_PPC_CODE = 1257
HUNK_PPC_CODE = 1257
HUNK_RELRELOC26 = 1260
hunk_names = {
HUNK_UNIT : "HUNK_UNIT",
HUNK_NAME : "HUNK_NAME",
HUNK_CODE : "HUNK_CODE",
HUNK_DATA : "HUNK_DATA",
HUNK_BSS : "HUNK_BSS",
HUNK_ABSRELOC32 : "HUNK_ABSRELOC32",
HUNK_RELRELOC16 : "HUNK_RELRELOC16",
HUNK_RELRELOC8 : "HUNK_RELRELOC8",
HUNK_EXT : "HUNK_EXT",
HUNK_SYMBOL : "HUNK_SYMBOL",
HUNK_DEBUG : "HUNK_DEBUG",
HUNK_END : "HUNK_END",
HUNK_HEADER : "HUNK_HEADER",
HUNK_OVERLAY : "HUNK_OVERLAY",
HUNK_BREAK : "HUNK_BREAK",
HUNK_DREL32 : "HUNK_DREL32",
HUNK_DREL16 : "HUNK_DREL16",
HUNK_DREL8 : "HUNK_DREL8",
HUNK_LIB : "HUNK_LIB",
HUNK_INDEX : "HUNK_INDEX",
HUNK_RELOC32SHORT : "HUNK_RELOC32SHORT",
HUNK_RELRELOC32 : "HUNK_RELRELOC32",
HUNK_ABSRELOC16 : "HUNK_ABSRELOC16",
HUNK_PPC_CODE : "HUNK_PPC_CODE",
HUNK_RELRELOC26 : "HUNK_RELRELOC26",
HUNK_UNIT: "HUNK_UNIT",
HUNK_NAME: "HUNK_NAME",
HUNK_CODE: "HUNK_CODE",
HUNK_DATA: "HUNK_DATA",
HUNK_BSS: "HUNK_BSS",
HUNK_ABSRELOC32: "HUNK_ABSRELOC32",
HUNK_RELRELOC16: "HUNK_RELRELOC16",
HUNK_RELRELOC8: "HUNK_RELRELOC8",
HUNK_EXT: "HUNK_EXT",
HUNK_SYMBOL: "HUNK_SYMBOL",
HUNK_DEBUG: "HUNK_DEBUG",
HUNK_END: "HUNK_END",
HUNK_HEADER: "HUNK_HEADER",
HUNK_OVERLAY: "HUNK_OVERLAY",
HUNK_BREAK: "HUNK_BREAK",
HUNK_DREL32: "HUNK_DREL32",
HUNK_DREL16: "HUNK_DREL16",
HUNK_DREL8: "HUNK_DREL8",
HUNK_LIB: "HUNK_LIB",
HUNK_INDEX: "HUNK_INDEX",
HUNK_RELOC32SHORT: "HUNK_RELOC32SHORT",
HUNK_RELRELOC32: "HUNK_RELRELOC32",
HUNK_ABSRELOC16: "HUNK_ABSRELOC16",
HUNK_PPC_CODE: "HUNK_PPC_CODE",
HUNK_RELRELOC26: "HUNK_RELRELOC26",
}
loadseg_valid_begin_hunks = [
HUNK_CODE,
HUNK_DATA,
HUNK_BSS,
HUNK_PPC_CODE
]
loadseg_valid_begin_hunks = [HUNK_CODE, HUNK_DATA, HUNK_BSS, HUNK_PPC_CODE]
loadseg_valid_extra_hunks = [
HUNK_ABSRELOC32,
HUNK_RELOC32SHORT,
HUNK_DEBUG,
HUNK_SYMBOL,
HUNK_NAME
HUNK_ABSRELOC32,
HUNK_RELOC32SHORT,
HUNK_DEBUG,
HUNK_SYMBOL,
HUNK_NAME,
]
unit_valid_main_hunks = [
HUNK_CODE,
HUNK_DATA,
HUNK_BSS,
HUNK_PPC_CODE
]
unit_valid_main_hunks = [HUNK_CODE, HUNK_DATA, HUNK_BSS, HUNK_PPC_CODE]
unit_valid_extra_hunks = [
HUNK_DEBUG,
HUNK_SYMBOL,
HUNK_NAME,
HUNK_EXT,
HUNK_ABSRELOC32,
HUNK_RELRELOC16,
HUNK_RELRELOC8,
HUNK_DREL32,
HUNK_DREL16,
HUNK_DREL8,
HUNK_RELOC32SHORT,
HUNK_RELRELOC32,
HUNK_ABSRELOC16,
HUNK_RELRELOC26,
HUNK_DEBUG,
HUNK_SYMBOL,
HUNK_NAME,
HUNK_EXT,
HUNK_ABSRELOC32,
HUNK_RELRELOC16,
HUNK_RELRELOC8,
HUNK_DREL32,
HUNK_DREL16,
HUNK_DREL8,
HUNK_RELOC32SHORT,
HUNK_RELRELOC32,
HUNK_ABSRELOC16,
HUNK_RELRELOC26,
]
reloc_hunks = [
HUNK_ABSRELOC32,
HUNK_RELRELOC16,
HUNK_RELRELOC8,
HUNK_DREL32,
HUNK_DREL16,
HUNK_DREL8,
HUNK_RELOC32SHORT,
HUNK_RELRELOC32,
HUNK_ABSRELOC16,
HUNK_RELRELOC26,
HUNK_ABSRELOC32,
HUNK_RELRELOC16,
HUNK_RELRELOC8,
HUNK_DREL32,
HUNK_DREL16,
HUNK_DREL8,
HUNK_RELOC32SHORT,
HUNK_RELRELOC32,
HUNK_ABSRELOC16,
HUNK_RELRELOC26,
]
EXT_SYMB = 0
EXT_DEF = 1
EXT_ABS = 2
EXT_RES = 3
EXT_ABSREF32 = 129
EXT_ABSCOMMON = 130
EXT_RELREF16 = 131
EXT_RELREF8 = 132
EXT_DEXT32 = 133
EXT_DEXT16 = 134
EXT_DEXT8 = 135
EXT_RELREF32 = 136
EXT_RELCOMMON = 137
EXT_ABSREF16 = 138
EXT_ABSREF8 = 139
EXT_RELREF26 = 229
EXT_SYMB = 0
EXT_DEF = 1
EXT_ABS = 2
EXT_RES = 3
EXT_ABSREF32 = 129
EXT_ABSCOMMON = 130
EXT_RELREF16 = 131
EXT_RELREF8 = 132
EXT_DEXT32 = 133
EXT_DEXT16 = 134
EXT_DEXT8 = 135
EXT_RELREF32 = 136
EXT_RELCOMMON = 137
EXT_ABSREF16 = 138
EXT_ABSREF8 = 139
EXT_RELREF26 = 229
ext_names = {
EXT_SYMB : 'EXT_SYMB',
EXT_DEF : 'EXT_DEF',
EXT_ABS : 'EXT_ABS',
EXT_RES : 'EXT_RES',
EXT_ABSREF32 : 'EXT_ABSREF32',
EXT_ABSCOMMON : 'EXT_ABSCOMMON',
EXT_RELREF16 : 'EXT_RELREF16',
EXT_RELREF8 : 'EXT_RELREF8',
EXT_DEXT32 : 'EXT_DEXT32',
EXT_DEXT16 : 'EXT_DEXT16',
EXT_DEXT8 : 'EXT_DEXT8',
EXT_RELREF32 : 'EXT_RELREF32',
EXT_RELCOMMON : 'EXT_RELCOMMON',
EXT_ABSREF16 : 'EXT_ABSREF16',
EXT_ABSREF8 : 'EXT_ABSREF8',
EXT_RELREF26 : 'EXT_RELREF26'
EXT_SYMB: "EXT_SYMB",
EXT_DEF: "EXT_DEF",
EXT_ABS: "EXT_ABS",
EXT_RES: "EXT_RES",
EXT_ABSREF32: "EXT_ABSREF32",
EXT_ABSCOMMON: "EXT_ABSCOMMON",
EXT_RELREF16: "EXT_RELREF16",
EXT_RELREF8: "EXT_RELREF8",
EXT_DEXT32: "EXT_DEXT32",
EXT_DEXT16: "EXT_DEXT16",
EXT_DEXT8: "EXT_DEXT8",
EXT_RELREF32: "EXT_RELREF32",
EXT_RELCOMMON: "EXT_RELCOMMON",
EXT_ABSREF16: "EXT_ABSREF16",
EXT_ABSREF8: "EXT_ABSREF8",
EXT_RELREF26: "EXT_RELREF26",
}
EXT_TYPE_SHIFT = 24
EXT_TYPE_SIZE_MASK = 0xffffff
EXT_TYPE_SIZE_MASK = 0xFFFFFF
RESULT_OK = 0
RESULT_NO_HUNK_FILE = 1
@ -151,28 +141,28 @@ RESULT_INVALID_HUNK_FILE = 2
RESULT_UNSUPPORTED_HUNKS = 3
result_names = {
RESULT_OK : "RESULT_OK",
RESULT_NO_HUNK_FILE : "RESULT_NO_HUNK_FILE",
RESULT_INVALID_HUNK_FILE : "RESULT_INVALID_HUNK_FILE",
RESULT_UNSUPPORTED_HUNKS : "RESULT_UNSUPPORTED_HUNKS"
RESULT_OK: "RESULT_OK",
RESULT_NO_HUNK_FILE: "RESULT_NO_HUNK_FILE",
RESULT_INVALID_HUNK_FILE: "RESULT_INVALID_HUNK_FILE",
RESULT_UNSUPPORTED_HUNKS: "RESULT_UNSUPPORTED_HUNKS",
}
HUNKF_ADVISORY = 1<<29
HUNKF_CHIP = 1<<30
HUNKF_FAST = 1<<31
HUNKF_ALL = (HUNKF_ADVISORY | HUNKF_CHIP | HUNKF_FAST)
HUNKF_ADVISORY = 1 << 29
HUNKF_CHIP = 1 << 30
HUNKF_FAST = 1 << 31
HUNKF_ALL = HUNKF_ADVISORY | HUNKF_CHIP | HUNKF_FAST
HUNK_TYPE_MASK = 0xffff
HUNK_FLAGS_MASK = 0xffff0000
HUNK_TYPE_MASK = 0xFFFF
HUNK_FLAGS_MASK = 0xFFFF0000
TYPE_UNKNOWN = 0
TYPE_LOADSEG = 1
TYPE_UNIT = 2
TYPE_LIB = 3
TYPE_UNKNOWN = 0
TYPE_LOADSEG = 1
TYPE_UNIT = 2
TYPE_LIB = 3
type_names = {
TYPE_UNKNOWN: 'TYPE_UNKNOWN',
TYPE_LOADSEG: 'TYPE_LOADSEG',
TYPE_UNIT: 'TYPE_UNIT',
TYPE_LIB: 'TYPE_LIB'
TYPE_UNKNOWN: "TYPE_UNKNOWN",
TYPE_LOADSEG: "TYPE_LOADSEG",
TYPE_UNIT: "TYPE_UNIT",
TYPE_LIB: "TYPE_LIB",
}

File diff suppressed because it is too large Load Diff

View File

@ -1,156 +1,157 @@
import struct
import io
class HunkDebugLineEntry:
def __init__(self, offset, src_line):
self.offset = offset
self.src_line = src_line
def __init__(self, offset, src_line):
self.offset = offset
self.src_line = src_line
def __str__(self):
return "[+%08x: %d]" % (self.offset, self.src_line)
def __str__(self):
return "[+%08x: %d]" % (self.offset, self.src_line)
def get_offset(self):
return self.offset
def get_offset(self):
return self.offset
def get_src_line(self):
return self.src_line
def get_src_line(self):
return self.src_line
class HunkDebugLine:
"""structure to hold source line info"""
def __init__(self, src_file, base_offset):
self.tag = 'LINE'
self.src_file = src_file
self.base_offset = base_offset
self.entries = []
"""structure to hold source line info"""
def add_entry(self, offset, src_line):
self.entries.append(HunkDebugLineEntry(offset, src_line))
def __init__(self, src_file, base_offset):
self.tag = "LINE"
self.src_file = src_file
self.base_offset = base_offset
self.entries = []
def __str__(self):
prefix = "{%s,%s,@%08x:" % (self.tag, self.src_file, self.base_offset)
return prefix + ",".join(map(str,self.entries)) + "}"
def add_entry(self, offset, src_line):
self.entries.append(HunkDebugLineEntry(offset, src_line))
def get_src_file(self):
return self.src_file
def __str__(self):
prefix = "{%s,%s,@%08x:" % (self.tag, self.src_file, self.base_offset)
return prefix + ",".join(map(str, self.entries)) + "}"
def get_base_offset(self):
return self.base_offset
def get_src_file(self):
return self.src_file
def get_entries(self):
return self.entries
def get_base_offset(self):
return self.base_offset
def get_entries(self):
return self.entries
class HunkDebugAny:
def __init__(self, tag, data, base_offset):
self.tag = tag
self.data = data
self.base_offset = base_offset
def __init__(self, tag, data, base_offset):
self.tag = tag
self.data = data
self.base_offset = base_offset
def __str__(self):
return "{%s,%d,%s}" % (self.tag, self.base_offset, self.data)
def __str__(self):
return "{%s,%d,%s}" % (self.tag, self.base_offset, self.data)
class HunkDebug:
def encode(self, debug_info):
"""encode a debug info and return a debug_data chunk"""
out = io.StringIO()
# +0: base offset
self._write_long(out, debug_info.base_offset)
# +4: type tag
tag = debug_info.tag
out.write(tag)
if tag == 'LINE':
# file name
self._write_string(out, debug_info.src_file)
# entries
for e in debug_info.entries:
self._write_long(out, e.src_line)
self._write_long(out, e.offset)
elif tag == 'HEAD':
out.write("DBGV01\0\0")
out.write(debug_info.data)
else: # any
out.write(debug_info.data)
# retrieve result
res = out.getvalue()
out.close()
return res
def encode(self, debug_info):
"""encode a debug info and return a debug_data chunk"""
out = io.StringIO()
# +0: base offset
self._write_long(out, debug_info.base_offset)
# +4: type tag
tag = debug_info.tag
out.write(tag)
if tag == "LINE":
# file name
self._write_string(out, debug_info.src_file)
# entries
for e in debug_info.entries:
self._write_long(out, e.src_line)
self._write_long(out, e.offset)
elif tag == "HEAD":
out.write("DBGV01\0\0")
out.write(debug_info.data)
else: # any
out.write(debug_info.data)
# retrieve result
res = out.getvalue()
out.close()
return res
def decode(self, debug_data):
"""decode a data block from a debug hunk"""
if len(debug_data) < 12:
return None
# +0: base_offset for file
base_offset = self._read_long(debug_data, 0)
# +4: tag
tag = debug_data[4:8]
if tag == 'LINE': # SAS/C source line info
# +8: string file name
src_file, src_size = self._read_string(debug_data, 8)
dl = HunkDebugLine(src_file, base_offset)
off = 12 + src_size
num = (len(debug_data) - off) // 8
for i in range(num):
src_line = self._read_long(debug_data, off)
offset = self._read_long(debug_data, off+4)
off += 8
dl.add_entry(offset, src_line)
return dl
elif tag == 'HEAD':
tag2 = debug_data[8:16]
assert tag2 == "DBGV01\0\0"
data = debug_data[16:]
return HunkDebugAny(tag, data, base_offset)
else:
data = debug_data[8:]
return HunkDebugAny(tag, data, base_offset)
def decode(self, debug_data):
"""decode a data block from a debug hunk"""
if len(debug_data) < 12:
return None
# +0: base_offset for file
base_offset = self._read_long(debug_data, 0)
# +4: tag
tag = debug_data[4:8]
if tag == "LINE": # SAS/C source line info
# +8: string file name
src_file, src_size = self._read_string(debug_data, 8)
dl = HunkDebugLine(src_file, base_offset)
off = 12 + src_size
num = (len(debug_data) - off) // 8
for i in range(num):
src_line = self._read_long(debug_data, off)
offset = self._read_long(debug_data, off + 4)
off += 8
dl.add_entry(offset, src_line)
return dl
elif tag == "HEAD":
tag2 = debug_data[8:16]
assert tag2 == "DBGV01\0\0"
data = debug_data[16:]
return HunkDebugAny(tag, data, base_offset)
else:
data = debug_data[8:]
return HunkDebugAny(tag, data, base_offset)
def _read_string(self, buf, pos):
size = self._read_long(buf,pos) * 4
off = pos + 4
data = buf[off:off+size]
pos = data.find('\0')
if pos == 0:
return "", size
elif pos != -1:
return data[:pos], size
else:
return data, size
def _read_string(self, buf, pos):
size = self._read_long(buf, pos) * 4
off = pos + 4
data = buf[off : off + size]
pos = data.find("\0")
if pos == 0:
return "", size
elif pos != -1:
return data[:pos], size
else:
return data, size
def _write_string(self, f, s):
n = len(s)
num_longs = int((n + 3)/4)
self._write_long(f, num_longs)
add = num_longs * 4 - n
if add > 0:
s += '\0' * add
f.write(s)
def _write_string(self, f, s):
n = len(s)
num_longs = int((n + 3) / 4)
self._write_long(f, num_longs)
add = num_longs * 4 - n
if add > 0:
s += "\0" * add
f.write(s)
def _read_long(self, buf, pos):
return struct.unpack_from(">I",buf,pos)[0]
def _read_long(self, buf, pos):
return struct.unpack_from(">I", buf, pos)[0]
def _write_long(self, f, v):
data = struct.pack(">I",v)
f.write(data)
def _write_long(self, f, v):
data = struct.pack(">I", v)
f.write(data)
# ----- mini test -----
if __name__ == '__main__':
import sys
from .HunkBlockFile import HunkBlockFile, HunkDebugBlock
hd = HunkDebug()
for a in sys.argv[1:]:
hbf = HunkBlockFile()
hbf.read_path(a)
for blk in hbf.get_blocks():
if isinstance(blk, HunkDebugBlock):
# decode debug data
dd = hd.decode(blk.debug_data)
print(a,"->",dd.tag)
# now encode again
new_debug_data = hd.encode(dd)
# compare!
assert new_debug_data == blk.debug_data
if __name__ == "__main__":
import sys
from .HunkBlockFile import HunkBlockFile, HunkDebugBlock
hd = HunkDebug()
for a in sys.argv[1:]:
hbf = HunkBlockFile()
hbf.read_path(a)
for blk in hbf.get_blocks():
if isinstance(blk, HunkDebugBlock):
# decode debug data
dd = hd.decode(blk.debug_data)
print(a, "->", dd.tag)
# now encode again
new_debug_data = hd.encode(dd)
# compare!
assert new_debug_data == blk.debug_data

View File

@ -1,209 +1,218 @@
from amitools.vamos.machine import DisAsm
from . import Hunk
class HunkDisassembler:
def __init__(self, cpu='68000'):
self.disasm = DisAsm.create(cpu)
def get_symtab(self, hunk):
for h in hunk[1:]:
if h['type'] == Hunk.HUNK_SYMBOL:
return h['symbols']
return None
def find_symbol(self, hunk, offset, always):
symtab = self.get_symtab(hunk)
if symtab == None:
if always:
return "%08x" % offset
else:
class HunkDisassembler:
def __init__(self, cpu="68000"):
self.disasm = DisAsm.create(cpu)
def get_symtab(self, hunk):
for h in hunk[1:]:
if h["type"] == Hunk.HUNK_SYMBOL:
return h["symbols"]
return None
symmap = {}
for s in symtab:
symmap[s[1]] = s[0]
offs = sorted(symmap.keys());
last = None
last_offset = 0
for o in offs:
if o == offset:
return symmap[o]
def find_symbol(self, hunk, offset, always):
symtab = self.get_symtab(hunk)
if symtab == None:
if always:
return "%08x" % offset
else:
return None
if always:
if o < offset:
# approximate to last symbol
if last != None:
return last + " + %08x" % (o - last_offset)
else:
symmap = {}
for s in symtab:
symmap[s[1]] = s[0]
offs = sorted(symmap.keys())
last = None
last_offset = 0
for o in offs:
if o == offset:
return symmap[o]
if always:
if o < offset:
# approximate to last symbol
if last != None:
return last + " + %08x" % (o - last_offset)
else:
return "%08x" % offset
last = symmap[o]
last_offset = o
if always:
return "%08x" % offset
last = symmap[o]
last_offset = o
if always:
return "%08x" % offset
else:
return None
def find_src_line(self, hunk, addr):
for h in hunk[1:]:
if h['type'] == Hunk.HUNK_DEBUG and h['debug_type'] == 'LINE':
src_map = h['src_map']
for e in src_map:
src_line = e[0]
src_addr = e[1] + h['debug_offset']
if src_addr == addr:
return (h['src_file'],src_line)
return None
# map reloc type to number of words to be relocated
map_reloc_to_num_words = {
Hunk.HUNK_ABSRELOC32 : 2,
Hunk.HUNK_DREL16 : 1,
Hunk.HUNK_DREL32 : 2
}
# find_reloc
# return
# 0 - rel_offset to addr reloc begin (in words)
# 1 - size of reloc (in words)
# 2 - hunk number reloc references
# 3 - relative offset in hunk (in bytes)
# 4 - reloc hunk
def find_reloc(self, hunk, addr, word):
end_addr = addr + len(word) * 2
for h in hunk[1:]:
valid = h['type'] in self.map_reloc_to_num_words
if valid:
num_words = self.map_reloc_to_num_words[h['type']]
reloc = h['reloc']
for hunk_num in reloc:
offsets = reloc[hunk_num]
for off in offsets:
if off >= addr and off + num_words * 2 <= end_addr:
word_offset = (off - addr) // 2 # in words
# calc offset
addr = 0
for i in range(num_words):
addr = addr * 0x10000 + word[word_offset+i]
reloc_type_name = h['type_name'].replace("HUNK_","").lower()
return (word_offset, num_words, hunk_num, addr, reloc_type_name)
return None
map_ext_ref_to_num_words = {
Hunk.EXT_ABSREF32 : 2,
Hunk.EXT_RELREF16: 1,
Hunk.EXT_DEXT16: 1
}
# find_ext_ref
# return
# 0 - word offset to word begin (in words)
# 1 - size of reloc (in words)
# 2 - name of external symbol
# 3 - type name of ext ref
def find_ext_ref(self, hunk, addr, word):
end_addr = addr + len(word) * 2
for h in hunk[1:]:
if h['type'] == Hunk.HUNK_EXT:
for ext in h['ext_ref']:
refs = ext['refs']
valid = ext['type'] in self.map_ext_ref_to_num_words
if valid:
num_words = self.map_ext_ref_to_num_words[ext['type']]
for ref in refs:
if ref >= addr and ref < end_addr:
word_offset = (ref - addr) // 2
type_name = ext['type_name'].replace("EXT_","").lower()
return (word_offset, num_words, ext['name'], type_name)
return None
# search the HUNK_EXT for a defintion
def find_ext_def(self, hunk, addr):
for h in hunk[1:]:
if h['type'] == Hunk.HUNK_EXT:
for ext in h['ext_def']:
if addr == ext['def']:
return ext['name']
return None
# search the index of a lib for a definition
def find_index_def(self, hunk, addr):
main = hunk[0]
if 'index_hunk' in main:
info = main['index_hunk']
if 'defs' in info:
for d in info['defs']:
if d['value'] == addr:
return d['name']
return None
def find_symbol_or_def(self, hunk, addr, always):
symbol = self.find_symbol(hunk, addr, False)
if symbol == None:
symbol = self.find_ext_def(hunk, addr)
if symbol == None:
symbol = self.find_index_def(hunk, addr)
if symbol == None and always:
return "%08x" % addr
return symbol
# ----- show disassembly -----
def show_disassembly(self, hunk, seg_list, start):
main = hunk[0]
lines = self.disasm.disassemble_block(main['data'],start)
# show line by line
for l in lines:
addr = l[0]
word = l[1]
code = l[2]
# try to find a symbol for this addr
symbol = self.find_symbol_or_def(hunk, addr, False)
# create line info
info = []
# find source line info
line = self.find_src_line(hunk,addr)
if line != None:
(src_file, src_line) = line
info.append( "src: %s:%d" % (src_file,src_line))
# find an extref
ext_ref = self.find_ext_ref(hunk,addr,word)
if ext_ref != None:
ref_symbol = ext_ref[2]
ref_type = ext_ref[3]
info.append( "%s: %s" % (ref_type, ref_symbol) )
# find a relocation
reloc = self.find_reloc(hunk,addr,word)
if reloc != None:
hunk_num = reloc[2]
offset = reloc[3]
reloc_type_name = reloc[4]
# a self reference
reloc_symbol = self.find_symbol_or_def(seg_list[hunk_num],offset,True)
if hunk_num == main['hunk_no']:
src = "self"
else:
src = "#%03d %s" % (hunk_num, seg_list[hunk_num][0]['type_name'])
info.append( "%s: %s: %s" % (reloc_type_name, src, reloc_symbol) )
# build comment from all infos
if len(info) > 0:
comment = "; " + ", ".join(info)
else:
comment = ""
# create final line
if symbol != None:
print("\t\t\t\t%s:" % symbol)
print("%08x\t%-20s\t%-30s %s" % (addr," ".join(["%04x" %x for x in word]),code,comment))
return None
def find_src_line(self, hunk, addr):
for h in hunk[1:]:
if h["type"] == Hunk.HUNK_DEBUG and h["debug_type"] == "LINE":
src_map = h["src_map"]
for e in src_map:
src_line = e[0]
src_addr = e[1] + h["debug_offset"]
if src_addr == addr:
return (h["src_file"], src_line)
return None
# map reloc type to number of words to be relocated
map_reloc_to_num_words = {
Hunk.HUNK_ABSRELOC32: 2,
Hunk.HUNK_DREL16: 1,
Hunk.HUNK_DREL32: 2,
}
# find_reloc
# return
# 0 - rel_offset to addr reloc begin (in words)
# 1 - size of reloc (in words)
# 2 - hunk number reloc references
# 3 - relative offset in hunk (in bytes)
# 4 - reloc hunk
def find_reloc(self, hunk, addr, word):
end_addr = addr + len(word) * 2
for h in hunk[1:]:
valid = h["type"] in self.map_reloc_to_num_words
if valid:
num_words = self.map_reloc_to_num_words[h["type"]]
reloc = h["reloc"]
for hunk_num in reloc:
offsets = reloc[hunk_num]
for off in offsets:
if off >= addr and off + num_words * 2 <= end_addr:
word_offset = (off - addr) // 2 # in words
# calc offset
addr = 0
for i in range(num_words):
addr = addr * 0x10000 + word[word_offset + i]
reloc_type_name = (
h["type_name"].replace("HUNK_", "").lower()
)
return (
word_offset,
num_words,
hunk_num,
addr,
reloc_type_name,
)
return None
map_ext_ref_to_num_words = {
Hunk.EXT_ABSREF32: 2,
Hunk.EXT_RELREF16: 1,
Hunk.EXT_DEXT16: 1,
}
# find_ext_ref
# return
# 0 - word offset to word begin (in words)
# 1 - size of reloc (in words)
# 2 - name of external symbol
# 3 - type name of ext ref
def find_ext_ref(self, hunk, addr, word):
end_addr = addr + len(word) * 2
for h in hunk[1:]:
if h["type"] == Hunk.HUNK_EXT:
for ext in h["ext_ref"]:
refs = ext["refs"]
valid = ext["type"] in self.map_ext_ref_to_num_words
if valid:
num_words = self.map_ext_ref_to_num_words[ext["type"]]
for ref in refs:
if ref >= addr and ref < end_addr:
word_offset = (ref - addr) // 2
type_name = ext["type_name"].replace("EXT_", "").lower()
return (word_offset, num_words, ext["name"], type_name)
return None
# search the HUNK_EXT for a defintion
def find_ext_def(self, hunk, addr):
for h in hunk[1:]:
if h["type"] == Hunk.HUNK_EXT:
for ext in h["ext_def"]:
if addr == ext["def"]:
return ext["name"]
return None
# search the index of a lib for a definition
def find_index_def(self, hunk, addr):
main = hunk[0]
if "index_hunk" in main:
info = main["index_hunk"]
if "defs" in info:
for d in info["defs"]:
if d["value"] == addr:
return d["name"]
return None
def find_symbol_or_def(self, hunk, addr, always):
symbol = self.find_symbol(hunk, addr, False)
if symbol == None:
symbol = self.find_ext_def(hunk, addr)
if symbol == None:
symbol = self.find_index_def(hunk, addr)
if symbol == None and always:
return "%08x" % addr
return symbol
# ----- show disassembly -----
def show_disassembly(self, hunk, seg_list, start):
main = hunk[0]
lines = self.disasm.disassemble_block(main["data"], start)
# show line by line
for l in lines:
addr = l[0]
word = l[1]
code = l[2]
# try to find a symbol for this addr
symbol = self.find_symbol_or_def(hunk, addr, False)
# create line info
info = []
# find source line info
line = self.find_src_line(hunk, addr)
if line != None:
(src_file, src_line) = line
info.append("src: %s:%d" % (src_file, src_line))
# find an extref
ext_ref = self.find_ext_ref(hunk, addr, word)
if ext_ref != None:
ref_symbol = ext_ref[2]
ref_type = ext_ref[3]
info.append("%s: %s" % (ref_type, ref_symbol))
# find a relocation
reloc = self.find_reloc(hunk, addr, word)
if reloc != None:
hunk_num = reloc[2]
offset = reloc[3]
reloc_type_name = reloc[4]
# a self reference
reloc_symbol = self.find_symbol_or_def(seg_list[hunk_num], offset, True)
if hunk_num == main["hunk_no"]:
src = "self"
else:
src = "#%03d %s" % (hunk_num, seg_list[hunk_num][0]["type_name"])
info.append("%s: %s: %s" % (reloc_type_name, src, reloc_symbol))
# build comment from all infos
if len(info) > 0:
comment = "; " + ", ".join(info)
else:
comment = ""
# create final line
if symbol != None:
print("\t\t\t\t%s:" % symbol)
print(
"%08x\t%-20s\t%-30s %s"
% (addr, " ".join(["%04x" % x for x in word]), code, comment)
)

View File

@ -1,275 +1,278 @@
from .HunkBlockFile import *
from .HunkDebug import HunkDebug
class HunkSegment:
"""holds a code, data, or bss hunk/segment"""
def __init__(self):
self.blocks = None
self.seg_blk = None
self.symbol_blk = None
self.reloc_blks = None
self.debug_blks = None
self.debug_infos = None
"""holds a code, data, or bss hunk/segment"""
def __repr__(self):
return "[seg=%s,symbol=%s,reloc=%s,debug=%s,debug_info=%s]" % \
(self._blk_str(self.seg_blk),
self._blk_str(self.symbol_blk),
self._blk_str_list(self.reloc_blks),
self._blk_str_list(self.debug_blks),
self._debug_infos_str())
def __init__(self):
self.blocks = None
self.seg_blk = None
self.symbol_blk = None
self.reloc_blks = None
self.debug_blks = None
self.debug_infos = None
def setup_code(self, data):
data, size_longs = self._pad_data(data)
self.seg_blk = HunkSegmentBlock(HUNK_CODE, data, size_longs)
def __repr__(self):
return "[seg=%s,symbol=%s,reloc=%s,debug=%s,debug_info=%s]" % (
self._blk_str(self.seg_blk),
self._blk_str(self.symbol_blk),
self._blk_str_list(self.reloc_blks),
self._blk_str_list(self.debug_blks),
self._debug_infos_str(),
)
def setup_data(self, data):
data, size_longs = self._pad_data(data)
self.seg_blk = HunkSegmentBlock(HUNK_DATA, data, size_longs)
def setup_code(self, data):
data, size_longs = self._pad_data(data)
self.seg_blk = HunkSegmentBlock(HUNK_CODE, data, size_longs)
def _pad_data(self, data):
size_bytes = len(data)
bytes_mod = size_bytes % 4
if bytes_mod > 0:
add = 4 - bytes_mod
data = data + '\0' * add
size_long = int((size_bytes + 3)/4)
return data, size_long
def setup_data(self, data):
data, size_longs = self._pad_data(data)
self.seg_blk = HunkSegmentBlock(HUNK_DATA, data, size_longs)
def setup_bss(self, size_bytes):
size_longs = int((size_bytes + 3)/4)
self.seg_blk = HunkSegmentBlock(HUNK_BSS, None, size_longs)
def _pad_data(self, data):
size_bytes = len(data)
bytes_mod = size_bytes % 4
if bytes_mod > 0:
add = 4 - bytes_mod
data = data + "\0" * add
size_long = int((size_bytes + 3) / 4)
return data, size_long
def setup_relocs(self, relocs, force_long=False):
"""relocs: ((hunk_num, (off1, off2, ...)), ...)"""
if force_long:
use_short = False
else:
use_short = self._are_relocs_short(relocs)
if use_short:
self.reloc_blks = [HunkRelocWordBlock(HUNK_RELOC32SHORT, relocs)]
else:
self.reloc_blks = [HunkRelocLongBlock(HUNK_ABSRELOC32, relocs)]
def setup_bss(self, size_bytes):
size_longs = int((size_bytes + 3) / 4)
self.seg_blk = HunkSegmentBlock(HUNK_BSS, None, size_longs)
def setup_symbols(self, symbols):
"""symbols: ((name, off), ...)"""
self.symbol_blk = HunkSymbolBlock(symbols)
def setup_debug(self, debug_info):
if self.debug_infos is None:
self.debug_infos = []
self.debug_infos.append(debug_info)
hd = HunkDebug()
debug_data = hd.encode(debug_info)
blk = HunkDebugBlock(debug_data)
if self.debug_blks is None:
self.debug_blks = []
self.debug_blks.append(blk)
def _are_relocs_short(self, relocs):
for hunk_num, offsets in relocs:
for off in offsets:
if off > 65535:
return False
return True
def _debug_infos_str(self):
if self.debug_infos is None:
return "n/a"
else:
return ",".join(map(str, self.debug_infos))
def _blk_str(self, blk):
if blk is None:
return "n/a"
else:
return hunk_names[blk.blk_id]
def _blk_str_list(self, blk_list):
res = []
if blk_list is None:
return "n/a"
for blk in blk_list:
res.append(hunk_names[blk.blk_id])
return ",".join(res)
def parse(self, blocks):
hd = HunkDebug()
self.blocks = blocks
for blk in blocks:
blk_id = blk.blk_id
if blk_id in loadseg_valid_begin_hunks:
self.seg_blk = blk
elif blk_id == HUNK_SYMBOL:
if self.symbol_blk is None:
self.symbol_blk = blk
def setup_relocs(self, relocs, force_long=False):
"""relocs: ((hunk_num, (off1, off2, ...)), ...)"""
if force_long:
use_short = False
else:
raise HunkParserError("duplicate symbols in hunk")
elif blk_id == HUNK_DEBUG:
if self.debug_blks is None:
self.debug_blks = []
self.debug_blks.append(blk)
# decode hunk debug info
debug_info = hd.decode(blk.debug_data)
if debug_info is not None:
if self.debug_infos is None:
self.debug_infos = []
self.debug_infos.append(debug_info)
elif blk_id in (HUNK_ABSRELOC32, HUNK_RELOC32SHORT):
if self.reloc_blks is None:
self.reloc_blks = []
self.reloc_blks.append(blk)
else:
raise HunkParseError("invalid hunk block")
use_short = self._are_relocs_short(relocs)
if use_short:
self.reloc_blks = [HunkRelocWordBlock(HUNK_RELOC32SHORT, relocs)]
else:
self.reloc_blks = [HunkRelocLongBlock(HUNK_ABSRELOC32, relocs)]
def create(self, blocks):
# already has blocks?
if self.blocks is not None:
blocks += self.blocks
return self.seg_blk.size_longs
# start with segment block
if self.seg_blk is None:
raise HunkParseError("no segment block!")
self.blocks = [self.seg_blk]
# has relocations
if self.reloc_blks is not None:
self.blocks += self.reloc_blks
# has debug?
if self.debug_blks is not None:
self.blocks += self.debug_blks
# has symbols?
if self.symbol_blk is not None:
self.blocks.append(self.symbol_blk)
# store blocks
blocks += self.blocks
# return size of segment
return self.seg_blk.size_longs
def setup_symbols(self, symbols):
"""symbols: ((name, off), ...)"""
self.symbol_blk = HunkSymbolBlock(symbols)
def setup_debug(self, debug_info):
if self.debug_infos is None:
self.debug_infos = []
self.debug_infos.append(debug_info)
hd = HunkDebug()
debug_data = hd.encode(debug_info)
blk = HunkDebugBlock(debug_data)
if self.debug_blks is None:
self.debug_blks = []
self.debug_blks.append(blk)
def _are_relocs_short(self, relocs):
for hunk_num, offsets in relocs:
for off in offsets:
if off > 65535:
return False
return True
def _debug_infos_str(self):
if self.debug_infos is None:
return "n/a"
else:
return ",".join(map(str, self.debug_infos))
def _blk_str(self, blk):
if blk is None:
return "n/a"
else:
return hunk_names[blk.blk_id]
def _blk_str_list(self, blk_list):
res = []
if blk_list is None:
return "n/a"
for blk in blk_list:
res.append(hunk_names[blk.blk_id])
return ",".join(res)
def parse(self, blocks):
hd = HunkDebug()
self.blocks = blocks
for blk in blocks:
blk_id = blk.blk_id
if blk_id in loadseg_valid_begin_hunks:
self.seg_blk = blk
elif blk_id == HUNK_SYMBOL:
if self.symbol_blk is None:
self.symbol_blk = blk
else:
raise HunkParserError("duplicate symbols in hunk")
elif blk_id == HUNK_DEBUG:
if self.debug_blks is None:
self.debug_blks = []
self.debug_blks.append(blk)
# decode hunk debug info
debug_info = hd.decode(blk.debug_data)
if debug_info is not None:
if self.debug_infos is None:
self.debug_infos = []
self.debug_infos.append(debug_info)
elif blk_id in (HUNK_ABSRELOC32, HUNK_RELOC32SHORT):
if self.reloc_blks is None:
self.reloc_blks = []
self.reloc_blks.append(blk)
else:
raise HunkParseError("invalid hunk block")
def create(self, blocks):
# already has blocks?
if self.blocks is not None:
blocks += self.blocks
return self.seg_blk.size_longs
# start with segment block
if self.seg_blk is None:
raise HunkParseError("no segment block!")
self.blocks = [self.seg_blk]
# has relocations
if self.reloc_blks is not None:
self.blocks += self.reloc_blks
# has debug?
if self.debug_blks is not None:
self.blocks += self.debug_blks
# has symbols?
if self.symbol_blk is not None:
self.blocks.append(self.symbol_blk)
# store blocks
blocks += self.blocks
# return size of segment
return self.seg_blk.size_longs
class HunkLoadSegFile:
"""manage a LoadSeg() hunk file starting with HUNK_HEADER"""
def __init__(self):
self.hdr_blk = None
self.segments = []
"""manage a LoadSeg() hunk file starting with HUNK_HEADER"""
def get_segments(self):
return self.segments
def __init__(self):
self.hdr_blk = None
self.segments = []
def add_segment(self, seg):
self.segments.append(seg)
def get_segments(self):
return self.segments
def parse_block_file(self, bf):
"""assign hunk blocks into segments"""
# get file blocks
blks = bf.get_blocks()
if blks is None or len(blks) == 0:
raise HunkParseError("no hunk blocks found!")
# ensure its a HUNK_HEADER
hdr_blk = blks[0]
if hdr_blk.blk_id != HUNK_HEADER:
raise HunkParseError("no HEADER block found!")
self.hdr_blk = hdr_blk
# first round: split block list into sections seperated by END
first = []
cur = None
for blk in blks[1:]:
blk_id = blk.blk_id
# split by END block
if blk_id == HUNK_END:
def add_segment(self, seg):
self.segments.append(seg)
def parse_block_file(self, bf):
"""assign hunk blocks into segments"""
# get file blocks
blks = bf.get_blocks()
if blks is None or len(blks) == 0:
raise HunkParseError("no hunk blocks found!")
# ensure its a HUNK_HEADER
hdr_blk = blks[0]
if hdr_blk.blk_id != HUNK_HEADER:
raise HunkParseError("no HEADER block found!")
self.hdr_blk = hdr_blk
# first round: split block list into sections seperated by END
first = []
cur = None
# add non end block to list
else:
# check validity of block
if blk_id not in loadseg_valid_begin_hunks and \
blk_id not in loadseg_valid_extra_hunks:
raise HunkParseError("invalid block found: %d" % blk_id)
if cur is None:
cur = []
first.append(cur)
cur.append(blk)
# second round: split list if two segments are found in a single list
# this is only necessary for broken files that lack END blocks
second = []
for l in first:
pos_seg = []
off = 0
for blk in l:
if blk.blk_id in loadseg_valid_begin_hunks:
pos_seg.append(off)
off+=1
n = len(pos_seg)
if n == 1:
# list is ok
second.append(l)
elif n > 1:
# list needs split
# we can only split if no extra block is before next segment block
new_list = None
for blk in l:
if blk.blk_id in loadseg_valid_begin_hunks:
new_list = [blk]
second.append(new_list)
elif new_list is not None:
new_list.append(blk)
else:
raise HunkParseError("can't split block list")
# check size of hunk table
if len(hdr_blk.hunk_table) != len(second):
raise HunkParseError("can't match hunks to header")
# convert block lists into segments
for l in second:
seg = HunkSegment()
seg.parse(l)
self.segments.append(seg)
# set size in segments
n = len(second)
for i in range(n):
self.segments[i].size_longs = hdr_blk.hunk_table[i]
self.segments[i].size = self.segments[i].size_longs * 4
for blk in blks[1:]:
blk_id = blk.blk_id
# split by END block
if blk_id == HUNK_END:
cur = None
# add non end block to list
else:
# check validity of block
if (
blk_id not in loadseg_valid_begin_hunks
and blk_id not in loadseg_valid_extra_hunks
):
raise HunkParseError("invalid block found: %d" % blk_id)
if cur is None:
cur = []
first.append(cur)
cur.append(blk)
# second round: split list if two segments are found in a single list
# this is only necessary for broken files that lack END blocks
second = []
for l in first:
pos_seg = []
off = 0
for blk in l:
if blk.blk_id in loadseg_valid_begin_hunks:
pos_seg.append(off)
off += 1
n = len(pos_seg)
if n == 1:
# list is ok
second.append(l)
elif n > 1:
# list needs split
# we can only split if no extra block is before next segment block
new_list = None
for blk in l:
if blk.blk_id in loadseg_valid_begin_hunks:
new_list = [blk]
second.append(new_list)
elif new_list is not None:
new_list.append(blk)
else:
raise HunkParseError("can't split block list")
# check size of hunk table
if len(hdr_blk.hunk_table) != len(second):
raise HunkParseError("can't match hunks to header")
# convert block lists into segments
for l in second:
seg = HunkSegment()
seg.parse(l)
self.segments.append(seg)
# set size in segments
n = len(second)
for i in range(n):
self.segments[i].size_longs = hdr_blk.hunk_table[i]
self.segments[i].size = self.segments[i].size_longs * 4
def create_block_file(self):
"""create a HunkBlockFile from the segments given"""
# setup header block
self.hdr_blk = HunkHeaderBlock()
blks = [self.hdr_blk]
sizes = []
for seg in self.segments:
size = seg.create(blks)
sizes.append(size)
# add HUNK_END
blks.append(HunkEndBlock())
# finally setup header
self.hdr_blk.setup(sizes)
# create HunkBlockFile
return HunkBlockFile(blks)
def create_block_file(self):
"""create a HunkBlockFile from the segments given"""
# setup header block
self.hdr_blk = HunkHeaderBlock()
blks = [self.hdr_blk]
sizes = []
for seg in self.segments:
size = seg.create(blks)
sizes.append(size)
# add HUNK_END
blks.append(HunkEndBlock())
# finally setup header
self.hdr_blk.setup(sizes)
# create HunkBlockFile
return HunkBlockFile(blks)
# mini test
if __name__ == '__main__':
import sys
for a in sys.argv[1:]:
bf = HunkBlockFile()
bf.read_path(a, isLoadSeg=True)
print(bf.get_block_type_names())
lsf = HunkLoadSegFile()
lsf.parse_block_file(bf)
print(lsf.get_segments())
# write back
new_bf = lsf.create_block_file()
new_bf.write_path("a.out")
# compare read and written stream
with open(a, "rb") as fh:
data = fh.read()
with open("a.out", "rb") as fh:
new_data = fh.read()
if len(data) != len(new_data):
print("MISMATCH", len(data), len(new_data))
else:
for i in range(len(data)):
if data[i] != new_data[i]:
print("MISMATCH @%x" % i)
print("OK")
if __name__ == "__main__":
import sys
for a in sys.argv[1:]:
bf = HunkBlockFile()
bf.read_path(a, isLoadSeg=True)
print(bf.get_block_type_names())
lsf = HunkLoadSegFile()
lsf.parse_block_file(bf)
print(lsf.get_segments())
# write back
new_bf = lsf.create_block_file()
new_bf.write_path("a.out")
# compare read and written stream
with open(a, "rb") as fh:
data = fh.read()
with open("a.out", "rb") as fh:
new_data = fh.read()
if len(data) != len(new_data):
print("MISMATCH", len(data), len(new_data))
else:
for i in range(len(data)):
if data[i] != new_data[i]:
print("MISMATCH @%x" % i)
print("OK")

File diff suppressed because it is too large Load Diff

View File

@ -2,89 +2,95 @@ import ctypes
import struct
from . import Hunk
class HunkRelocate:
def __init__(self, hunk_file, verbose=False):
self.hunk_file = hunk_file
self.verbose = verbose
def get_sizes(self):
sizes = []
for segment in self.hunk_file.segments:
main_hunk = segment[0]
size = main_hunk['alloc_size']
sizes.append(size)
return sizes
def __init__(self, hunk_file, verbose=False):
self.hunk_file = hunk_file
self.verbose = verbose
def get_total_size(self):
sizes = self.get_sizes()
total = 0
for s in sizes:
total += s
return total
def get_sizes(self):
sizes = []
for segment in self.hunk_file.segments:
main_hunk = segment[0]
size = main_hunk["alloc_size"]
sizes.append(size)
return sizes
def get_type_names(self):
names = []
for segment in self.hunk_file.segments:
main_hunk = segment[0]
name = main_hunk['type_name']
names.append(name)
return names
def get_total_size(self):
sizes = self.get_sizes()
total = 0
for s in sizes:
total += s
return total
# generate a sequence of addresses suitable for relocation
# in a single block
def get_seq_addrs(self, base_addr, padding=0):
sizes = self.get_sizes()
addrs = []
addr = base_addr
for s in sizes:
addrs.append(addr)
addr += s + padding
return addrs
def relocate(self, addr):
datas = []
for segment in self.hunk_file.segments:
main_hunk = segment[0]
hunk_no = main_hunk['hunk_no']
alloc_size = main_hunk['alloc_size']
size = main_hunk['size']
data = ctypes.create_string_buffer(alloc_size)
# fill in segment data
if 'data' in main_hunk:
data.value = main_hunk['data']
if self.verbose:
print("#%02d @ %06x" % (hunk_no, addr[hunk_no]))
# find relocation hunks
for hunk in segment[1:]:
# abs reloc 32 or
# HUNK_DREL32 is a buggy V37 HUNK_RELOC32SHORT...
if hunk['type'] == Hunk.HUNK_ABSRELOC32 or hunk['type'] == Hunk.HUNK_DREL32:
reloc = hunk['reloc']
for hunk_num in reloc:
# get address of other hunk
hunk_addr = addr[hunk_num]
offsets = reloc[hunk_num]
for offset in offsets:
self.relocate32(hunk_no,data,offset,hunk_addr)
datas.append(data.raw)
return datas
def get_type_names(self):
names = []
for segment in self.hunk_file.segments:
main_hunk = segment[0]
name = main_hunk["type_name"]
names.append(name)
return names
def relocate32(self, hunk_no, data, offset, hunk_addr):
delta = self.read_long(data, offset)
addr = hunk_addr + delta
self.write_long(data, offset, addr)
if self.verbose:
print("#%02d + %06x: %06x (delta) + %06x (hunk_addr) -> %06x" % (hunk_no, offset, delta, hunk_addr, addr))
def read_long(self, data, offset):
bytes = data[offset:offset+4]
return struct.unpack(">i",bytes)[0]
def write_long(self, data, offset, value):
bytes = struct.pack(">i",value)
data[offset:offset+4] = bytes
# generate a sequence of addresses suitable for relocation
# in a single block
def get_seq_addrs(self, base_addr, padding=0):
sizes = self.get_sizes()
addrs = []
addr = base_addr
for s in sizes:
addrs.append(addr)
addr += s + padding
return addrs
def relocate(self, addr):
datas = []
for segment in self.hunk_file.segments:
main_hunk = segment[0]
hunk_no = main_hunk["hunk_no"]
alloc_size = main_hunk["alloc_size"]
size = main_hunk["size"]
data = ctypes.create_string_buffer(alloc_size)
# fill in segment data
if "data" in main_hunk:
data.value = main_hunk["data"]
if self.verbose:
print("#%02d @ %06x" % (hunk_no, addr[hunk_no]))
# find relocation hunks
for hunk in segment[1:]:
# abs reloc 32 or
# HUNK_DREL32 is a buggy V37 HUNK_RELOC32SHORT...
if (
hunk["type"] == Hunk.HUNK_ABSRELOC32
or hunk["type"] == Hunk.HUNK_DREL32
):
reloc = hunk["reloc"]
for hunk_num in reloc:
# get address of other hunk
hunk_addr = addr[hunk_num]
offsets = reloc[hunk_num]
for offset in offsets:
self.relocate32(hunk_no, data, offset, hunk_addr)
datas.append(data.raw)
return datas
def relocate32(self, hunk_no, data, offset, hunk_addr):
delta = self.read_long(data, offset)
addr = hunk_addr + delta
self.write_long(data, offset, addr)
if self.verbose:
print(
"#%02d + %06x: %06x (delta) + %06x (hunk_addr) -> %06x"
% (hunk_no, offset, delta, hunk_addr, addr)
)
def read_long(self, data, offset):
bytes = data[offset : offset + 4]
return struct.unpack(">i", bytes)[0]
def write_long(self, data, offset, value):
bytes = struct.pack(">i", value)
data[offset : offset + 4] = bytes

View File

@ -2,223 +2,260 @@ from . import Hunk
from . import HunkDisassembler
from amitools.util.HexDump import *
class HunkShow:
def __init__(
self,
hunk_file,
show_relocs=False,
show_debug=False,
disassemble=False,
disassemble_start=0,
hexdump=False,
brief=False,
cpu="68000",
):
self.hunk_file = hunk_file
def __init__(self, hunk_file, show_relocs=False, show_debug=False, \
disassemble=False, disassemble_start=0, hexdump=False, brief=False, \
cpu='68000'):
self.hunk_file = hunk_file
# clone file refs
self.header = hunk_file.header
self.segments = hunk_file.segments
self.overlay = hunk_file.overlay
self.overlay_headers = hunk_file.overlay_headers
self.overlay_segments = hunk_file.overlay_segments
self.libs = hunk_file.libs
self.units = hunk_file.units
# clone file refs
self.header = hunk_file.header
self.segments = hunk_file.segments
self.overlay = hunk_file.overlay
self.overlay_headers = hunk_file.overlay_headers
self.overlay_segments = hunk_file.overlay_segments
self.libs = hunk_file.libs
self.units = hunk_file.units
self.show_relocs = show_relocs
self.show_debug = show_debug
self.disassemble = disassemble
self.disassemble_start = disassemble_start
self.cpu = cpu
self.hexdump = hexdump
self.brief = brief
self.show_relocs=show_relocs
self.show_debug=show_debug
self.disassemble=disassemble
self.disassemble_start=disassemble_start
self.cpu = cpu
self.hexdump=hexdump
self.brief=brief
def show_segments(self):
hunk_type = self.hunk_file.type
if hunk_type == Hunk.TYPE_LOADSEG:
self.show_loadseg_segments()
elif hunk_type == Hunk.TYPE_UNIT:
self.show_unit_segments()
elif hunk_type == Hunk.TYPE_LIB:
self.show_lib_segments()
def show_segments(self):
hunk_type = self.hunk_file.type
if hunk_type == Hunk.TYPE_LOADSEG:
self.show_loadseg_segments()
elif hunk_type == Hunk.TYPE_UNIT:
self.show_unit_segments()
elif hunk_type == Hunk.TYPE_LIB:
self.show_lib_segments()
def show_lib_segments(self):
for lib in self.libs:
print("Library #%d" % lib["lib_no"])
for unit in lib["units"]:
self.print_unit(unit["unit_no"], unit["name"])
for segment in unit["segments"]:
self.show_segment(segment, unit["segments"])
def show_lib_segments(self):
for lib in self.libs:
print("Library #%d" % lib['lib_no'])
for unit in lib['units']:
self.print_unit(unit['unit_no'], unit['name'])
for segment in unit['segments']:
self.show_segment(segment, unit['segments'])
def show_unit_segments(self):
for unit in self.units:
self.print_unit(unit["unit_no"], unit["name"])
for segment in unit["segments"]:
self.show_segment(segment, unit["segments"])
def show_unit_segments(self):
for unit in self.units:
self.print_unit(unit['unit_no'], unit['name'])
for segment in unit['segments']:
self.show_segment(segment, unit['segments'])
def show_loadseg_segments(self):
# header + segments
if not self.brief:
self.print_header(self.header)
for segment in self.segments:
self.show_segment(segment, self.segments)
# overlay
if self.overlay != None:
print("Overlay")
num_ov = len(self.overlay_headers)
for o in range(num_ov):
def show_loadseg_segments(self):
# header + segments
if not self.brief:
self.print_header(self.overlay_headers[o])
for segment in self.overlay_segments[o]:
self.show_segment(segment, self.overlay_segments[o])
self.print_header(self.header)
for segment in self.segments:
self.show_segment(segment, self.segments)
def show_segment(self, hunk, seg_list):
main = hunk[0]
# overlay
if self.overlay != None:
print("Overlay")
num_ov = len(self.overlay_headers)
for o in range(num_ov):
if not self.brief:
self.print_header(self.overlay_headers[o])
for segment in self.overlay_segments[o]:
self.show_segment(segment, self.overlay_segments[o])
# unit hunks are named
name = ""
if 'name' in hunk[0]:
name = "'%s'" % main['name']
def show_segment(self, hunk, seg_list):
main = hunk[0]
type_name = main['type_name'].replace("HUNK_","")
size = main['size']
hunk_no = main['hunk_no']
if 'data_file_offset' in main:
data_file_offset = main['data_file_offset']
else:
data_file_offset = None
hunk_file_offset = main['hunk_file_offset']
if 'alloc_size' in main:
alloc_size = main['alloc_size']
else:
alloc_size = None
# unit hunks are named
name = ""
if "name" in hunk[0]:
name = "'%s'" % main["name"]
self.print_segment_header(hunk_no, type_name, size, name, data_file_offset, hunk_file_offset, alloc_size)
if self.hexdump and 'data' in main:
print_hex(main['data'],indent=8)
type_name = main["type_name"].replace("HUNK_", "")
size = main["size"]
hunk_no = main["hunk_no"]
if "data_file_offset" in main:
data_file_offset = main["data_file_offset"]
else:
data_file_offset = None
hunk_file_offset = main["hunk_file_offset"]
if "alloc_size" in main:
alloc_size = main["alloc_size"]
else:
alloc_size = None
for extra in hunk[1:]:
self.show_extra_hunk(extra)
self.print_segment_header(
hunk_no,
type_name,
size,
name,
data_file_offset,
hunk_file_offset,
alloc_size,
)
if self.hexdump and "data" in main:
print_hex(main["data"], indent=8)
# index hunk info is embedded if its in a lib
if 'index_hunk' in main:
self.show_index_info(main['index_hunk'])
for extra in hunk[1:]:
self.show_extra_hunk(extra)
if main['type'] == Hunk.HUNK_CODE and self.disassemble and len(main['data'])>0:
disas = HunkDisassembler.HunkDisassembler(cpu = self.cpu)
print()
disas.show_disassembly(hunk, seg_list, self.disassemble_start)
print()
# index hunk info is embedded if its in a lib
if "index_hunk" in main:
self.show_index_info(main["index_hunk"])
def show_index_info(self, info):
# references from index
if 'refs' in info:
self.print_extra("refs","#%d" % len(info['refs']))
if not self.brief:
for ref in info['refs']:
self.print_symbol(-1,ref['name'],"(%d bits)" % ref['bits'])
# defines from index
if 'defs' in info:
self.print_extra("defs","#%d" % len(info['defs']))
if not self.brief:
for d in info['defs']:
self.print_symbol(d['value'],d['name'],"(type %d)" % d['type'])
if (
main["type"] == Hunk.HUNK_CODE
and self.disassemble
and len(main["data"]) > 0
):
disas = HunkDisassembler.HunkDisassembler(cpu=self.cpu)
print()
disas.show_disassembly(hunk, seg_list, self.disassemble_start)
print()
def show_extra_hunk(self, hunk):
hunk_type = hunk['type']
if hunk_type in Hunk.reloc_hunks:
type_name = hunk['type_name'].replace("HUNK_","").lower()
self.print_extra("reloc","%s #%d" % (type_name, len(hunk['reloc'])))
if not self.brief:
self.show_reloc_hunk(hunk)
def show_index_info(self, info):
# references from index
if "refs" in info:
self.print_extra("refs", "#%d" % len(info["refs"]))
if not self.brief:
for ref in info["refs"]:
self.print_symbol(-1, ref["name"], "(%d bits)" % ref["bits"])
# defines from index
if "defs" in info:
self.print_extra("defs", "#%d" % len(info["defs"]))
if not self.brief:
for d in info["defs"]:
self.print_symbol(d["value"], d["name"], "(type %d)" % d["type"])
elif hunk_type == Hunk.HUNK_DEBUG:
self.print_extra("debug","%s offset=%08x" % (hunk['debug_type'], hunk['debug_offset']))
if not self.brief:
self.show_debug_hunk(hunk)
def show_extra_hunk(self, hunk):
hunk_type = hunk["type"]
if hunk_type in Hunk.reloc_hunks:
type_name = hunk["type_name"].replace("HUNK_", "").lower()
self.print_extra("reloc", "%s #%d" % (type_name, len(hunk["reloc"])))
if not self.brief:
self.show_reloc_hunk(hunk)
elif hunk_type == Hunk.HUNK_SYMBOL:
self.print_extra("symbol","#%d" % (len(hunk['symbols'])))
if not self.brief:
self.show_symbol_hunk(hunk)
elif hunk_type == Hunk.HUNK_DEBUG:
self.print_extra(
"debug", "%s offset=%08x" % (hunk["debug_type"], hunk["debug_offset"])
)
if not self.brief:
self.show_debug_hunk(hunk)
elif hunk_type == Hunk.HUNK_EXT:
self.print_extra("ext","def #%d ref #%d common #%d" % (len(hunk['ext_def']),len(hunk['ext_ref']),len(hunk['ext_common'])))
if not self.brief:
self.show_ext_hunk(hunk)
elif hunk_type == Hunk.HUNK_SYMBOL:
self.print_extra("symbol", "#%d" % (len(hunk["symbols"])))
if not self.brief:
self.show_symbol_hunk(hunk)
else:
self.print_extra("extra","%s" % hunk['type_name'])
elif hunk_type == Hunk.HUNK_EXT:
self.print_extra(
"ext",
"def #%d ref #%d common #%d"
% (len(hunk["ext_def"]), len(hunk["ext_ref"]), len(hunk["ext_common"])),
)
if not self.brief:
self.show_ext_hunk(hunk)
def show_reloc_hunk(self, hunk):
reloc = hunk['reloc']
for hunk_num in reloc:
offsets = reloc[hunk_num]
if self.show_relocs:
for offset in offsets:
self.print_symbol(offset,"Segment #%d" % hunk_num,"")
else:
self.print_extra_sub("To Segment #%d: %4d entries" % (hunk_num, len(offsets)))
else:
self.print_extra("extra", "%s" % hunk["type_name"])
def show_debug_hunk(self, hunk):
debug_type = hunk['debug_type']
if debug_type == 'LINE':
self.print_extra_sub("line for '%s'" % hunk['src_file'])
if self.show_debug:
for src_off in hunk['src_map']:
addr = src_off[1]
line = src_off[0]
self.print_symbol(addr,"line %d" % line,"")
else:
if self.show_debug:
print_hex(hunk['data'],indent=8)
def show_reloc_hunk(self, hunk):
reloc = hunk["reloc"]
for hunk_num in reloc:
offsets = reloc[hunk_num]
if self.show_relocs:
for offset in offsets:
self.print_symbol(offset, "Segment #%d" % hunk_num, "")
else:
self.print_extra_sub(
"To Segment #%d: %4d entries" % (hunk_num, len(offsets))
)
def show_symbol_hunk(self, hunk):
for symbol in hunk['symbols']:
self.print_symbol(symbol[1],symbol[0],"")
def show_debug_hunk(self, hunk):
debug_type = hunk["debug_type"]
if debug_type == "LINE":
self.print_extra_sub("line for '%s'" % hunk["src_file"])
if self.show_debug:
for src_off in hunk["src_map"]:
addr = src_off[1]
line = src_off[0]
self.print_symbol(addr, "line %d" % line, "")
else:
if self.show_debug:
print_hex(hunk["data"], indent=8)
def show_ext_hunk(self, hunk):
# definition
for ext in hunk['ext_def']:
tname = ext['type_name'].replace("EXT_","").lower()
self.print_symbol(ext['def'],ext['name'],tname)
# references
for ext in hunk['ext_ref']:
refs = ext['refs']
tname = ext['type_name'].replace("EXT_","").lower()
for ref in refs:
self.print_symbol(ref,ext['name'],tname)
def show_symbol_hunk(self, hunk):
for symbol in hunk["symbols"]:
self.print_symbol(symbol[1], symbol[0], "")
# common_base
for ext in hunk['ext_common']:
tname = ext['type_name'].replace("EXT_","").lower()
self.print_symbol(ext['common_size'],ext['name'],tname)
def show_ext_hunk(self, hunk):
# definition
for ext in hunk["ext_def"]:
tname = ext["type_name"].replace("EXT_", "").lower()
self.print_symbol(ext["def"], ext["name"], tname)
# references
for ext in hunk["ext_ref"]:
refs = ext["refs"]
tname = ext["type_name"].replace("EXT_", "").lower()
for ref in refs:
self.print_symbol(ref, ext["name"], tname)
# ----- printing -----
# common_base
for ext in hunk["ext_common"]:
tname = ext["type_name"].replace("EXT_", "").lower()
self.print_symbol(ext["common_size"], ext["name"], tname)
def print_header(self, hdr):
print("\t header (segments: first=%d, last=%d, table size=%d)" % (hdr['first_hunk'], hdr['last_hunk'], hdr['table_size']))
# ----- printing -----
def print_extra(self, type_name, info):
print("\t\t%8s %s" % (type_name, info))
def print_extra_sub(self, text):
print("\t\t\t%s" % text)
def print_segment_header(self, hunk_no, type_name, size, name, data_file_offset, hunk_file_offset, alloc_size):
extra = ""
if alloc_size != None:
extra += "alloc size %08x " % alloc_size
extra += "file header @%08x" % hunk_file_offset
if data_file_offset != None:
extra += " data @%08x" % data_file_offset
print("\t#%03d %-5s size %08x %s %s" % (hunk_no, type_name, size, extra, name))
def print_symbol(self,addr,name,extra):
if addr == -1:
a = "xxxxxxxx"
else:
a = "%08x" % addr
print("\t\t\t%s %-32s %s" % (a,name,extra))
def print_unit(self, no, name):
print(" #%03d UNIT %s" % (no, name))
def print_header(self, hdr):
print(
"\t header (segments: first=%d, last=%d, table size=%d)"
% (hdr["first_hunk"], hdr["last_hunk"], hdr["table_size"])
)
def print_extra(self, type_name, info):
print("\t\t%8s %s" % (type_name, info))
def print_extra_sub(self, text):
print("\t\t\t%s" % text)
def print_segment_header(
self,
hunk_no,
type_name,
size,
name,
data_file_offset,
hunk_file_offset,
alloc_size,
):
extra = ""
if alloc_size != None:
extra += "alloc size %08x " % alloc_size
extra += "file header @%08x" % hunk_file_offset
if data_file_offset != None:
extra += " data @%08x" % data_file_offset
print(
"\t#%03d %-5s size %08x %s %s" % (hunk_no, type_name, size, extra, name)
)
def print_symbol(self, addr, name, extra):
if addr == -1:
a = "xxxxxxxx"
else:
a = "%08x" % addr
print("\t\t\t%s %-32s %s" % (a, name, extra))
def print_unit(self, no, name):
print(" #%03d UNIT %s" % (no, name))

View File

@ -4,153 +4,158 @@ from .FuncTable import FuncTable
from .FuncDef import FuncDef
from amitools.util.DataDir import get_data_sub_dir
def get_fd_name(lib_name):
"""return the name associated for a given library/device name"""
if lib_name.endswith(".device"):
fd_name = lib_name.replace(".device", "_lib.fd")
elif lib_name.endswith(".library"):
fd_name = lib_name.replace(".library", "_lib.fd")
else:
raise ValueError("can't find fd name for '%s'" % lib_name)
return fd_name
"""return the name associated for a given library/device name"""
if lib_name.endswith(".device"):
fd_name = lib_name.replace(".device", "_lib.fd")
elif lib_name.endswith(".library"):
fd_name = lib_name.replace(".library", "_lib.fd")
else:
raise ValueError("can't find fd name for '%s'" % lib_name)
return fd_name
def get_base_name(lib_name):
if lib_name.endswith(".device"):
base_name = lib_name.replace(".device", "Base")
elif lib_name.endswith(".library"):
base_name = lib_name.replace(".library", "Base")
else:
raise ValueError("can't find base name for '%s'" % lib_name)
return "_" + base_name[0].upper() + base_name[1:]
if lib_name.endswith(".device"):
base_name = lib_name.replace(".device", "Base")
elif lib_name.endswith(".library"):
base_name = lib_name.replace(".library", "Base")
else:
raise ValueError("can't find base name for '%s'" % lib_name)
return "_" + base_name[0].upper() + base_name[1:]
def is_device(lib_name):
"""return true if given name is associated with a device"""
return lib_name.endswith(".device")
"""return true if given name is associated with a device"""
return lib_name.endswith(".device")
def read_lib_fd(lib_name, fd_dir=None, add_std_calls=True):
# get default path if none is given
if fd_dir is None:
fd_dir = get_data_sub_dir("fd")
# get fd path
fd_name = get_fd_name(lib_name)
fd_path = os.path.join(fd_dir, fd_name)
if not os.path.isfile(fd_path):
return None
# try to read fd
fd = read_fd(fd_path)
fd.is_device = is_device(lib_name)
if add_std_calls:
fd.add_std_calls()
return fd
# get default path if none is given
if fd_dir is None:
fd_dir = get_data_sub_dir("fd")
# get fd path
fd_name = get_fd_name(lib_name)
fd_path = os.path.join(fd_dir, fd_name)
if not os.path.isfile(fd_path):
return None
# try to read fd
fd = read_fd(fd_path)
fd.is_device = is_device(lib_name)
if add_std_calls:
fd.add_std_calls()
return fd
def generate_fd(lib_name, num_calls=0, add_std_calls=True):
base = get_base_name(lib_name)
func_table = FuncTable(base)
func_table.is_device = is_device(lib_name)
offset = func_table.get_num_std_calls() + 1
bias = offset * 6
while offset <= num_calls:
n = "FakeFunc_%d" % offset
f = FuncDef(n, bias)
func_table.add_func(f)
offset += 1
bias += 6
if add_std_calls:
func_table.add_std_calls()
return func_table
base = get_base_name(lib_name)
func_table = FuncTable(base)
func_table.is_device = is_device(lib_name)
offset = func_table.get_num_std_calls() + 1
bias = offset * 6
while offset <= num_calls:
n = "FakeFunc_%d" % offset
f = FuncDef(n, bias)
func_table.add_func(f)
offset += 1
bias += 6
if add_std_calls:
func_table.add_std_calls()
return func_table
def read_fd(fname):
func_pat = r"([A-Za-z][_A-Za-z00-9]+)\((.*)\)\((.*)\)"
func_table = None
bias = 0
private = True
# parse file
f = open(fname, "r")
for line in f:
l = line.strip()
if len(l) > 1 and l[0] != '*':
# a command
if l[0] == '#' and l[1] == '#':
cmdline = l[2:]
cmda = cmdline.split(" ")
cmd = cmda[0]
if cmd == "base":
base = cmda[1]
func_table = FuncTable(base)
elif cmd == "bias":
bias = int(cmda[1])
elif cmd == "private":
private = True
elif cmd == "public":
private = False
elif cmd == "end":
break
else:
print("Invalid command:",cmda)
return None
# a function
else:
m = re.match(func_pat, l)
if m == None:
raise IOError("Invalid FD Format")
else:
name = m.group(1)
# create a function definition
func_def = FuncDef(name, bias, private)
if func_table != None:
func_table.add_func(func_def)
# check args
args = m.group(2)
regs = m.group(3)
arg = args.replace(',','/').split('/')
reg = regs.replace(',','/').split('/')
if len(arg) != len(reg):
# hack for double reg args found in mathieeedoub* libs
if len(arg) * 2 == len(reg):
arg_hi = [x + "_hi" for x in arg]
arg_lo = [x + "_lo" for x in arg]
arg = [x for pair in zip(arg_hi, arg_lo) for x in pair]
else:
raise IOError("Reg and Arg name mismatch in FD File")
if arg[0] != '':
num_args = len(arg)
for i in range(num_args):
func_def.add_arg(arg[i],reg[i])
bias += 6
f.close()
return func_table
func_pat = r"([A-Za-z][_A-Za-z00-9]+)\((.*)\)\((.*)\)"
func_table = None
bias = 0
private = True
# parse file
f = open(fname, "r")
for line in f:
l = line.strip()
if len(l) > 1 and l[0] != "*":
# a command
if l[0] == "#" and l[1] == "#":
cmdline = l[2:]
cmda = cmdline.split(" ")
cmd = cmda[0]
if cmd == "base":
base = cmda[1]
func_table = FuncTable(base)
elif cmd == "bias":
bias = int(cmda[1])
elif cmd == "private":
private = True
elif cmd == "public":
private = False
elif cmd == "end":
break
else:
print("Invalid command:", cmda)
return None
# a function
else:
m = re.match(func_pat, l)
if m == None:
raise IOError("Invalid FD Format")
else:
name = m.group(1)
# create a function definition
func_def = FuncDef(name, bias, private)
if func_table != None:
func_table.add_func(func_def)
# check args
args = m.group(2)
regs = m.group(3)
arg = args.replace(",", "/").split("/")
reg = regs.replace(",", "/").split("/")
if len(arg) != len(reg):
# hack for double reg args found in mathieeedoub* libs
if len(arg) * 2 == len(reg):
arg_hi = [x + "_hi" for x in arg]
arg_lo = [x + "_lo" for x in arg]
arg = [x for pair in zip(arg_hi, arg_lo) for x in pair]
else:
raise IOError("Reg and Arg name mismatch in FD File")
if arg[0] != "":
num_args = len(arg)
for i in range(num_args):
func_def.add_arg(arg[i], reg[i])
bias += 6
f.close()
return func_table
def write_fd(fname, fd, add_private):
fo = open(fname, "w")
fo.write("##base %s\n" % (fd.get_base_name()))
last_bias = 0
last_mode = None
funcs = fd.get_funcs()
for f in funcs:
if not f.is_private() or add_private:
# check new mode
if f.is_private():
new_mode = "private"
else:
new_mode = "public"
if last_mode != new_mode:
fo.write("##%s\n" % new_mode)
last_mode = new_mode
# check new bias
new_bias = f.get_bias()
if last_bias + 6 != new_bias:
fo.write("##bias %d\n" % new_bias)
last_bias = new_bias
# build func
line = f.get_name()
args = f.get_args()
if args == None:
line += "()()"
else:
line += "(" + ",".join([x[0] for x in args]) + ")"
line += "(" + "/".join([x[1] for x in args]) + ")"
fo.write("%s\n" % line)
fo.write("##end\n")
fo.close()
fo = open(fname, "w")
fo.write("##base %s\n" % (fd.get_base_name()))
last_bias = 0
last_mode = None
funcs = fd.get_funcs()
for f in funcs:
if not f.is_private() or add_private:
# check new mode
if f.is_private():
new_mode = "private"
else:
new_mode = "public"
if last_mode != new_mode:
fo.write("##%s\n" % new_mode)
last_mode = new_mode
# check new bias
new_bias = f.get_bias()
if last_bias + 6 != new_bias:
fo.write("##bias %d\n" % new_bias)
last_bias = new_bias
# build func
line = f.get_name()
args = f.get_args()
if args == None:
line += "()()"
else:
line += "(" + ",".join([x[0] for x in args]) + ")"
line += "(" + "/".join([x[1] for x in args]) + ")"
fo.write("%s\n" % line)
fo.write("##end\n")
fo.close()

View File

@ -1,36 +1,48 @@
class FuncDef:
"""A function definition"""
def __init__(self, name, bias, private=False, is_std=False):
self.name = name
self.bias = bias
self.index = (bias - 6) // 6
self.private = private
self.std = is_std
self.args = []
def __str__(self):
return self.get_str()
def get_name(self):
return self.name
def get_bias(self):
return self.bias
def get_index(self):
return self.index
def is_private(self):
return self.private
def is_std(self):
return self.std
def get_args(self):
return self.args
def add_arg(self, name, reg):
self.args.append((name, reg))
def dump(self):
print((self.name,self.bias,self.private,self.args))
def get_arg_str(self, with_reg=True):
if len(self.args) == 0:
return "()"
elif with_reg:
return "( " + ", ".join(["%s/%s" % (x[0],x[1]) for x in self.args]) + " )"
else:
return "( " + ", ".join(["%s" % x[0] for x in self.args]) + " )"
def get_str(self, with_reg=True):
return self.name + self.get_arg_str(with_reg)
"""A function definition"""
def __init__(self, name, bias, private=False, is_std=False):
self.name = name
self.bias = bias
self.index = (bias - 6) // 6
self.private = private
self.std = is_std
self.args = []
def __str__(self):
return self.get_str()
def get_name(self):
return self.name
def get_bias(self):
return self.bias
def get_index(self):
return self.index
def is_private(self):
return self.private
def is_std(self):
return self.std
def get_args(self):
return self.args
def add_arg(self, name, reg):
self.args.append((name, reg))
def dump(self):
print((self.name, self.bias, self.private, self.args))
def get_arg_str(self, with_reg=True):
if len(self.args) == 0:
return "()"
elif with_reg:
return "( " + ", ".join(["%s/%s" % (x[0], x[1]) for x in self.args]) + " )"
else:
return "( " + ", ".join(["%s" % x[0] for x in self.args]) + " )"
def get_str(self, with_reg=True):
return self.name + self.get_arg_str(with_reg)

View File

@ -1,111 +1,113 @@
from .FuncDef import FuncDef
class FuncTable:
"""Store a function table"""
def __init__(self, base_name, is_device=False):
self.funcs = []
self.base_name = base_name
self.bias_map = {}
self.name_map = {}
self.index_tab = []
self.max_bias = 0
self.is_device = is_device
"""Store a function table"""
def get_base_name(self):
return self.base_name
def __init__(self, base_name, is_device=False):
self.funcs = []
self.base_name = base_name
self.bias_map = {}
self.name_map = {}
self.index_tab = []
self.max_bias = 0
self.is_device = is_device
def get_funcs(self):
return self.funcs
def get_base_name(self):
return self.base_name
def get_func_by_bias(self, bias):
if bias in self.bias_map:
return self.bias_map[bias]
else:
return None
def get_funcs(self):
return self.funcs
def get_max_bias(self):
return self.max_bias
def get_func_by_bias(self, bias):
if bias in self.bias_map:
return self.bias_map[bias]
else:
return None
def get_neg_size(self):
return self.max_bias + 6
def get_max_bias(self):
return self.max_bias
def get_num_indices(self):
return self.max_bias // 6
def get_neg_size(self):
return self.max_bias + 6
def get_all_func_names():
return list(self.name_map.keys())
def get_num_indices(self):
return self.max_bias // 6
def has_func(self, name):
return name in self.name_map
def get_all_func_names():
return list(self.name_map.keys())
def get_func_by_name(self, name):
if name in self.name_map:
return self.name_map[name]
else:
return None
def has_func(self, name):
return name in self.name_map
def get_num_funcs(self):
return len(self.funcs)
def get_func_by_name(self, name):
if name in self.name_map:
return self.name_map[name]
else:
return None
def get_index_table(self):
return self.index_tab
def get_num_funcs(self):
return len(self.funcs)
def get_func_by_index(self, idx):
return self.index_tab[idx]
def get_index_table(self):
return self.index_tab
def add_func(self, f):
# add to list
self.funcs.append(f)
# store by bias
bias = f.get_bias()
if bias in self.bias_map:
raise ValueError("bias %d already added!" % bias)
self.bias_map[bias] = f
# store by name
name = f.get_name()
self.name_map[name] = f
# adjust max bias
if bias > self.max_bias:
self.max_bias = bias
# update index table
tab_len = bias // 6
while len(self.index_tab) < tab_len:
self.index_tab.append(None)
index = tab_len - 1
self.index_tab[index] = f
def get_func_by_index(self, idx):
return self.index_tab[idx]
def add_call(self,name,bias,arg,reg,is_std=False):
if len(arg) != len(reg):
raise IOError("Reg and Arg name mismatch in function definition")
else:
func_def = FuncDef(name, bias, False, is_std)
self.add_func(func_def)
if arg and len(arg) > 0:
num_args = len(arg)
for i in range(num_args):
func_def.add_arg(arg[i],reg[i])
def add_func(self, f):
# add to list
self.funcs.append(f)
# store by bias
bias = f.get_bias()
if bias in self.bias_map:
raise ValueError("bias %d already added!" % bias)
self.bias_map[bias] = f
# store by name
name = f.get_name()
self.name_map[name] = f
# adjust max bias
if bias > self.max_bias:
self.max_bias = bias
# update index table
tab_len = bias // 6
while len(self.index_tab) < tab_len:
self.index_tab.append(None)
index = tab_len - 1
self.index_tab[index] = f
def dump(self):
print(("FuncTable:",self.base_name))
for f in self.funcs:
f.dump()
def add_call(self, name, bias, arg, reg, is_std=False):
if len(arg) != len(reg):
raise IOError("Reg and Arg name mismatch in function definition")
else:
func_def = FuncDef(name, bias, False, is_std)
self.add_func(func_def)
if arg and len(arg) > 0:
num_args = len(arg)
for i in range(num_args):
func_def.add_arg(arg[i], reg[i])
def get_num_std_calls(self):
if self.is_device:
return 6
else:
return 4
def dump(self):
print(("FuncTable:", self.base_name))
for f in self.funcs:
f.dump()
def add_std_calls(self):
if self.is_device:
self.add_call("_OpenDev",6,["IORequest","Unit"],["a1","d0"],True)
self.add_call("_CloseDev",12,["IORequest"],["a1"],True)
self.add_call("_ExpungeDev",18,["MyDev"],["a6"],True)
self.add_call("_Empty",24,[],[],True)
self.add_call("BeginIO",30,["IORequest"],["a1"],True)
self.add_call("AbortIO",36,["IORequest"],["a1"],True)
else:
self.add_call("_OpenLib",6,["MyLib"],["a6"],True)
self.add_call("_CloseLib",12,["MyLib"],["a6"],True)
self.add_call("_ExpungeLib",18,["MyLib"],["a6"],True)
self.add_call("_Empty",24,[],[],True)
def get_num_std_calls(self):
if self.is_device:
return 6
else:
return 4
def add_std_calls(self):
if self.is_device:
self.add_call("_OpenDev", 6, ["IORequest", "Unit"], ["a1", "d0"], True)
self.add_call("_CloseDev", 12, ["IORequest"], ["a1"], True)
self.add_call("_ExpungeDev", 18, ["MyDev"], ["a6"], True)
self.add_call("_Empty", 24, [], [], True)
self.add_call("BeginIO", 30, ["IORequest"], ["a1"], True)
self.add_call("AbortIO", 36, ["IORequest"], ["a1"], True)
else:
self.add_call("_OpenLib", 6, ["MyLib"], ["a6"], True)
self.add_call("_CloseLib", 12, ["MyLib"], ["a6"], True)
self.add_call("_ExpungeLib", 18, ["MyLib"], ["a6"], True)
self.add_call("_Empty", 24, [], [], True)

View File

@ -1,6 +1,3 @@
import struct
import ctypes
@ -9,343 +6,356 @@ from .block.BitmapExtBlock import BitmapExtBlock
from .DosType import *
from .FSError import *
class ADFSBitmap:
def __init__(self, root_blk):
self.root_blk = root_blk
self.blkdev = self.root_blk.blkdev
# state
self.ext_blks = []
self.bitmap_blks = []
self.bitmap_data = None
self.valid = False
# bitmap block entries
self.bitmap_blk_bytes = root_blk.blkdev.block_bytes - 4
self.bitmap_blk_longs = root_blk.blkdev.block_longs - 1
# calc size of bitmap
self.bitmap_bits = self.blkdev.num_blocks - self.blkdev.reserved
self.bitmap_longs = (self.bitmap_bits + 31) // 32
self.bitmap_bytes = (self.bitmap_bits + 7) // 8
# number of blocks required for bitmap (and bytes consumed there)
self.bitmap_num_blks = (self.bitmap_longs + self.bitmap_blk_longs - 1) // self.bitmap_blk_longs
self.bitmap_all_blk_bytes = self.bitmap_num_blks * self.bitmap_blk_bytes
# blocks stored in root and in every ext block
self.num_blks_in_root = len(self.root_blk.bitmap_ptrs)
self.num_blks_in_ext = self.blkdev.block_longs - 1
# number of ext blocks required
self.num_ext = (self.bitmap_num_blks - self.num_blks_in_root + self.num_blks_in_ext - 1) // (self.num_blks_in_ext)
# start a root block
self.find_start = root_blk.blk_num
# was bitmap modified?
self.dirty = False
# for DOS6/7 track used blocks
self.num_used = 0
def __init__(self, root_blk):
self.root_blk = root_blk
self.blkdev = self.root_blk.blkdev
# state
self.ext_blks = []
self.bitmap_blks = []
self.bitmap_data = None
self.valid = False
# bitmap block entries
self.bitmap_blk_bytes = root_blk.blkdev.block_bytes - 4
self.bitmap_blk_longs = root_blk.blkdev.block_longs - 1
# calc size of bitmap
self.bitmap_bits = self.blkdev.num_blocks - self.blkdev.reserved
self.bitmap_longs = (self.bitmap_bits + 31) // 32
self.bitmap_bytes = (self.bitmap_bits + 7) // 8
# number of blocks required for bitmap (and bytes consumed there)
self.bitmap_num_blks = (
self.bitmap_longs + self.bitmap_blk_longs - 1
) // self.bitmap_blk_longs
self.bitmap_all_blk_bytes = self.bitmap_num_blks * self.bitmap_blk_bytes
# blocks stored in root and in every ext block
self.num_blks_in_root = len(self.root_blk.bitmap_ptrs)
self.num_blks_in_ext = self.blkdev.block_longs - 1
# number of ext blocks required
self.num_ext = (
self.bitmap_num_blks - self.num_blks_in_root + self.num_blks_in_ext - 1
) // (self.num_blks_in_ext)
# start a root block
self.find_start = root_blk.blk_num
# was bitmap modified?
self.dirty = False
# for DOS6/7 track used blocks
self.num_used = 0
def create(self):
# clear local count
self.num_used = 0
def create(self):
# clear local count
self.num_used = 0
# create data and preset with 0xff
self.bitmap_data = ctypes.create_string_buffer(self.bitmap_all_blk_bytes)
for i in range(self.bitmap_all_blk_bytes):
self.bitmap_data[i] = 0xff
# create data and preset with 0xff
self.bitmap_data = ctypes.create_string_buffer(self.bitmap_all_blk_bytes)
for i in range(self.bitmap_all_blk_bytes):
self.bitmap_data[i] = 0xFF
# clear bit for root block
blk_pos = self.root_blk.blk_num
self.clr_bit(blk_pos)
blk_pos += 1
# clear bit for root block
blk_pos = self.root_blk.blk_num
self.clr_bit(blk_pos)
blk_pos += 1
# create ext blocks
for i in range(self.num_ext):
bm_ext = BitmapExtBlock(self.blkdev, blk_pos)
bm_ext.create()
self.clr_bit(blk_pos)
blk_pos += 1
self.ext_blks.append(bm_ext)
# create ext blocks
for i in range(self.num_ext):
bm_ext = BitmapExtBlock(self.blkdev, blk_pos)
bm_ext.create()
self.clr_bit(blk_pos)
blk_pos += 1
self.ext_blks.append(bm_ext)
# create bitmap blocks
for i in range(self.bitmap_num_blks):
bm = BitmapBlock(self.blkdev, blk_pos)
bm.create()
self.clr_bit(blk_pos)
blk_pos += 1
self.bitmap_blks.append(bm)
# create bitmap blocks
for i in range(self.bitmap_num_blks):
bm = BitmapBlock(self.blkdev, blk_pos)
bm.create()
self.clr_bit(blk_pos)
blk_pos += 1
self.bitmap_blks.append(bm)
# set pointers to ext blocks
if self.num_ext > 0:
self.root_blk.bitmap_ext_blk = self.ext_blks[0].blk_num
for i in range(self.num_ext-1):
bm_ext = self.ext_blks[i]
bm_ext_next = self.ext_blks[i+1]
bm_ext.bitmap_ext_blk = bm_ext_next.blk_num
# set pointers to ext blocks
if self.num_ext > 0:
self.root_blk.bitmap_ext_blk = self.ext_blks[0].blk_num
for i in range(self.num_ext - 1):
bm_ext = self.ext_blks[i]
bm_ext_next = self.ext_blks[i + 1]
bm_ext.bitmap_ext_blk = bm_ext_next.blk_num
# set pointers to bitmap blocks
cur_ext_index = 0
cur_ext_pos = 0
for i in range(self.bitmap_num_blks):
blk_num = self.bitmap_blks[i].blk_num
if i < self.num_blks_in_root:
# pointers in root block
self.root_blk.bitmap_ptrs[i] = blk_num
else:
# pointers in ext block
self.ext_blks[cur_ext_index].bitmap_ptrs[cur_ext_pos] = blk_num
cur_ext_pos += 1
if cur_ext_pos == self.num_blks_in_ext:
cur_ext_pos = 0
cur_ext_index += 1
self.valid = True
self.dirty = True
# set pointers to bitmap blocks
cur_ext_index = 0
cur_ext_pos = 0
for i in range(self.bitmap_num_blks):
blk_num = self.bitmap_blks[i].blk_num
if i < self.num_blks_in_root:
# pointers in root block
self.root_blk.bitmap_ptrs[i] = blk_num
else:
# pointers in ext block
self.ext_blks[cur_ext_index].bitmap_ptrs[cur_ext_pos] = blk_num
cur_ext_pos += 1
if cur_ext_pos == self.num_blks_in_ext:
cur_ext_pos = 0
cur_ext_index += 1
self.valid = True
self.dirty = True
def write(self):
if self.dirty:
self.dirty = False
# update bitmap
self._write_ext_blks()
self._write_bitmap_blks()
# in DOS6/DOS7 update root block stats
if rootblock_tracks_used_blocks(self.root_blk.fstype):
self.root_blk.blocks_used = self.num_used
# always write root (bitmap pointers)
self.root_blk.write()
def write(self):
if self.dirty:
self.dirty = False
# update bitmap
self._write_ext_blks()
self._write_bitmap_blks()
# in DOS6/DOS7 update root block stats
if rootblock_tracks_used_blocks(self.root_blk.fstype):
self.root_blk.blocks_used = self.num_used
# always write root (bitmap pointers)
self.root_blk.write()
def _write_ext_blks(self):
# write ext blocks
for ext_blk in self.ext_blks:
ext_blk.write()
def _write_ext_blks(self):
# write ext blocks
for ext_blk in self.ext_blks:
ext_blk.write()
def _write_bitmap_blks(self):
# write bitmap blocks
off = 0
for blk in self.bitmap_blks:
blk.set_bitmap_data(self.bitmap_data[off:off+self.bitmap_blk_bytes])
blk.write()
off += self.bitmap_blk_bytes
def _write_bitmap_blks(self):
# write bitmap blocks
off = 0
for blk in self.bitmap_blks:
blk.set_bitmap_data(self.bitmap_data[off : off + self.bitmap_blk_bytes])
blk.write()
off += self.bitmap_blk_bytes
def read(self):
self.bitmap_blks = []
bitmap_data = bytearray()
def read(self):
self.bitmap_blks = []
bitmap_data = bytearray()
# DOS6/7: update num used
if rootblock_tracks_used_blocks(self.root_blk.fstype):
self.num_used = self.root_blk.blocks_used
# DOS6/7: update num used
if rootblock_tracks_used_blocks(self.root_blk.fstype):
self.num_used = self.root_blk.blocks_used
# get bitmap blocks from root block
blocks = self.root_blk.bitmap_ptrs
for blk in blocks:
if blk == 0:
break
bm = BitmapBlock(self.blkdev, blk)
bm.read()
if not bm.valid:
raise FSError(INVALID_BITMAP_BLOCK, block=bm)
self.bitmap_blks.append(bm)
bitmap_data += bm.get_bitmap_data()
# get bitmap blocks from root block
blocks = self.root_blk.bitmap_ptrs
for blk in blocks:
if blk == 0:
break
bm = BitmapBlock(self.blkdev, blk)
bm.read()
if not bm.valid:
raise FSError(INVALID_BITMAP_BLOCK, block=bm)
self.bitmap_blks.append(bm)
bitmap_data += bm.get_bitmap_data()
# now check extended bitmap blocks
ext_blk = self.root_blk.bitmap_ext_blk
while ext_blk != 0:
bm_ext = BitmapExtBlock(self.blkdev, ext_blk)
bm_ext.read()
self.ext_blks.append(bm_ext)
blocks = bm_ext.bitmap_ptrs
for blk in blocks:
if blk == 0:
break
bm = BitmapBlock(self.blkdev, blk)
bm.read()
if not bm.valid:
raise FSError(INVALID_BITMAP_BLOCK, block=bm)
bitmap_data += bm.get_bitmap_data()
self.bitmap_blks.append(bm)
ext_blk = bm_ext.bitmap_ext_blk
# now check extended bitmap blocks
ext_blk = self.root_blk.bitmap_ext_blk
while ext_blk != 0:
bm_ext = BitmapExtBlock(self.blkdev, ext_blk)
bm_ext.read()
self.ext_blks.append(bm_ext)
blocks = bm_ext.bitmap_ptrs
for blk in blocks:
if blk == 0:
break
bm = BitmapBlock(self.blkdev, blk)
bm.read()
if not bm.valid:
raise FSError(INVALID_BITMAP_BLOCK, block=bm)
bitmap_data += bm.get_bitmap_data()
self.bitmap_blks.append(bm)
ext_blk = bm_ext.bitmap_ext_blk
# check bitmap data
num_bm_blks = len(self.bitmap_blks)
num_bytes = self.bitmap_blk_bytes * num_bm_blks
if num_bytes != len(bitmap_data):
raise FSError(BITMAP_SIZE_MISMATCH, node=self, extra="got=%d want=%d" % (len(bitmap_data), num_bytes))
if num_bm_blks != self.bitmap_num_blks:
raise FSError(BITMAP_BLOCK_COUNT_MISMATCH, node=self, extra="got=%d want=%d" % (self.bitmap_num_blks, num_bm_blks))
# check bitmap data
num_bm_blks = len(self.bitmap_blks)
num_bytes = self.bitmap_blk_bytes * num_bm_blks
if num_bytes != len(bitmap_data):
raise FSError(
BITMAP_SIZE_MISMATCH,
node=self,
extra="got=%d want=%d" % (len(bitmap_data), num_bytes),
)
if num_bm_blks != self.bitmap_num_blks:
raise FSError(
BITMAP_BLOCK_COUNT_MISMATCH,
node=self,
extra="got=%d want=%d" % (self.bitmap_num_blks, num_bm_blks),
)
# create a modyfiable bitmap
self.bitmap_data = ctypes.create_string_buffer(len(bitmap_data))
self.bitmap_data[:] = bitmap_data
self.valid = True
# create a modyfiable bitmap
self.bitmap_data = ctypes.create_string_buffer(len(bitmap_data))
self.bitmap_data[:] = bitmap_data
self.valid = True
def find_free(self, start=None):
# give start of search
if start == None:
pos = self.find_start
else:
pos = start
# at most scan all bits
num = self.bitmap_bits
while num > 0:
# a free bit?
found = self.get_bit(pos)
old_pos = pos
pos += 1
if pos == self.bitmap_bits + self.blkdev.reserved:
pos = self.blkdev.reserved
if found:
# start a next position
self.find_start = pos
return old_pos
num -= 1
return None
def find_n_free(self, num, start=None):
first_blk = self.find_free(start)
if first_blk == None:
return None
if num == 1:
return [first_blk]
result = [first_blk]
for i in range(num-1):
blk_num = self.find_free()
if blk_num == None:
def find_free(self, start=None):
# give start of search
if start == None:
pos = self.find_start
else:
pos = start
# at most scan all bits
num = self.bitmap_bits
while num > 0:
# a free bit?
found = self.get_bit(pos)
old_pos = pos
pos += 1
if pos == self.bitmap_bits + self.blkdev.reserved:
pos = self.blkdev.reserved
if found:
# start a next position
self.find_start = pos
return old_pos
num -= 1
return None
if blk_num in result:
return None
result.append(blk_num)
return result
def get_num_free(self):
num = 0
res = self.blkdev.reserved
for i in range(self.bitmap_bits):
if self.get_bit(i + res):
num+=1
return num
def find_n_free(self, num, start=None):
first_blk = self.find_free(start)
if first_blk == None:
return None
if num == 1:
return [first_blk]
result = [first_blk]
for i in range(num - 1):
blk_num = self.find_free()
if blk_num == None:
return None
if blk_num in result:
return None
result.append(blk_num)
return result
def get_num_used(self):
num = 0
res = self.blkdev.reserved
for i in range(self.bitmap_bits):
if not self.get_bit(i + res):
num+=1
return num
def get_num_free(self):
num = 0
res = self.blkdev.reserved
for i in range(self.bitmap_bits):
if self.get_bit(i + res):
num += 1
return num
def alloc_n(self, num, start=None):
free_blks = self.find_n_free(num, start)
if free_blks == None:
return None
for b in free_blks:
self.clr_bit(b)
return free_blks
def get_num_used(self):
num = 0
res = self.blkdev.reserved
for i in range(self.bitmap_bits):
if not self.get_bit(i + res):
num += 1
return num
def dealloc_n(self, blks):
for b in blks:
self.set_bit(b)
def alloc_n(self, num, start=None):
free_blks = self.find_n_free(num, start)
if free_blks == None:
return None
for b in free_blks:
self.clr_bit(b)
return free_blks
def get_bit(self, off):
if off < self.blkdev.reserved or off >= self.blkdev.num_blocks:
return None
off = (off - self.blkdev.reserved)
long_off = off // 32
bit_off = off % 32
val = struct.unpack_from(">I", self.bitmap_data, long_off * 4)[0]
mask = 1 << bit_off
return (val & mask) == mask
def dealloc_n(self, blks):
for b in blks:
self.set_bit(b)
# mark as free
def set_bit(self, off):
if off < self.blkdev.reserved or off >= self.blkdev.num_blocks:
return False
off = (off - self.blkdev.reserved)
long_off = off // 32
bit_off = off % 32
val = struct.unpack_from(">I", self.bitmap_data, long_off * 4)[0]
mask = 1 << bit_off
if val & mask == 0:
val = val | mask
struct.pack_into(">I", self.bitmap_data, long_off * 4, val)
self.dirty = True
self.num_used -= 1
def get_bit(self, off):
if off < self.blkdev.reserved or off >= self.blkdev.num_blocks:
return None
off = off - self.blkdev.reserved
long_off = off // 32
bit_off = off % 32
val = struct.unpack_from(">I", self.bitmap_data, long_off * 4)[0]
mask = 1 << bit_off
return (val & mask) == mask
# mark as used
def clr_bit(self, off):
if off < self.blkdev.reserved or off >= self.blkdev.num_blocks:
return False
off = (off - self.blkdev.reserved)
long_off = off // 32
bit_off = off % 32
val = struct.unpack_from(">I", self.bitmap_data, long_off * 4)[0]
mask = 1 << bit_off
if val & mask == mask:
val = val & ~mask
struct.pack_into(">I", self.bitmap_data, long_off * 4, val)
self.dirty = True
self.num_used += 1
# mark as free
def set_bit(self, off):
if off < self.blkdev.reserved or off >= self.blkdev.num_blocks:
return False
off = off - self.blkdev.reserved
long_off = off // 32
bit_off = off % 32
val = struct.unpack_from(">I", self.bitmap_data, long_off * 4)[0]
mask = 1 << bit_off
if val & mask == 0:
val = val | mask
struct.pack_into(">I", self.bitmap_data, long_off * 4, val)
self.dirty = True
self.num_used -= 1
def dump(self):
print("Bitmap:")
print(" ext: ",self.ext_blks)
print(" blks:",len(self.bitmap_blks))
print(" bits:",len(self.bitmap_data) * 8,self.blkdev.num_blocks)
# mark as used
def clr_bit(self, off):
if off < self.blkdev.reserved or off >= self.blkdev.num_blocks:
return False
off = off - self.blkdev.reserved
long_off = off // 32
bit_off = off % 32
val = struct.unpack_from(">I", self.bitmap_data, long_off * 4)[0]
mask = 1 << bit_off
if val & mask == mask:
val = val & ~mask
struct.pack_into(">I", self.bitmap_data, long_off * 4, val)
self.dirty = True
self.num_used += 1
def print_info(self):
num_free = self.get_num_free()
num_used = self.get_num_used()
print("num free:", num_free)
print("num used:", num_used)
if rootblock_tracks_used_blocks(self.root_blk.fstype):
print("num used:", self.num_used, "(cached in root)")
print("sum: ", num_free + num_used)
print("total: ", self.bitmap_bits)
def dump(self):
print("Bitmap:")
print(" ext: ", self.ext_blks)
print(" blks:", len(self.bitmap_blks))
print(" bits:", len(self.bitmap_data) * 8, self.blkdev.num_blocks)
def create_draw_bitmap(self):
bm = ctypes.create_string_buffer(self.blkdev.num_blocks)
for i in range(self.blkdev.num_blocks):
bm[i] = chr(0)
return bm
def print_info(self):
num_free = self.get_num_free()
num_used = self.get_num_used()
print("num free:", num_free)
print("num used:", num_used)
if rootblock_tracks_used_blocks(self.root_blk.fstype):
print("num used:", self.num_used, "(cached in root)")
print("sum: ", num_free + num_used)
print("total: ", self.bitmap_bits)
def print_free(self, brief=False):
bm = self.create_draw_bitmap()
res = self.blkdev.reserved
for i in range(self.blkdev.num_blocks):
if i >= res and self.get_bit(i):
bm[i] = 'F'
self.print_draw_bitmap(bm, brief)
def create_draw_bitmap(self):
bm = ctypes.create_string_buffer(self.blkdev.num_blocks)
for i in range(self.blkdev.num_blocks):
bm[i] = chr(0)
return bm
def print_used(self, brief=False):
bm = self.create_draw_bitmap()
res = self.blkdev.reserved
for i in range(self.blkdev.num_blocks):
if i >= res and not self.get_bit(i):
bm[i] = '#'
self.print_draw_bitmap(bm, brief)
def print_free(self, brief=False):
bm = self.create_draw_bitmap()
res = self.blkdev.reserved
for i in range(self.blkdev.num_blocks):
if i >= res and self.get_bit(i):
bm[i] = "F"
self.print_draw_bitmap(bm, brief)
def draw_on_bitmap(self, bm):
# show reserved blocks
res = self.blkdev.reserved
bm[0:res] = "x" * res
# root block
bm[self.root_blk.blk_num] = 'R'
# bitmap blocks
for bm_blk in self.bitmap_blks:
bm[bm_blk.blk_num] = 'b'
# bitmap ext blocks
for ext_blk in self.ext_blks:
bm[ext_blk.blk_num] = 'B'
def print_used(self, brief=False):
bm = self.create_draw_bitmap()
res = self.blkdev.reserved
for i in range(self.blkdev.num_blocks):
if i >= res and not self.get_bit(i):
bm[i] = "#"
self.print_draw_bitmap(bm, brief)
def print_draw_bitmap(self, bm, brief=False):
line = ""
blk = 0
blk_cyl = self.blkdev.sectors * self.blkdev.heads
found = False
for i in range(self.blkdev.num_blocks):
c = bm[i]
if ord(c) == 0:
c = '.'
else:
found = True
line += c
if i % self.blkdev.sectors == self.blkdev.sectors - 1:
line += " "
if i % blk_cyl == blk_cyl - 1:
if not brief or found:
print("%8d: %s" % (blk,line))
blk += blk_cyl
def draw_on_bitmap(self, bm):
# show reserved blocks
res = self.blkdev.reserved
bm[0:res] = "x" * res
# root block
bm[self.root_blk.blk_num] = "R"
# bitmap blocks
for bm_blk in self.bitmap_blks:
bm[bm_blk.blk_num] = "b"
# bitmap ext blocks
for ext_blk in self.ext_blks:
bm[ext_blk.blk_num] = "B"
def print_draw_bitmap(self, bm, brief=False):
line = ""
blk = 0
blk_cyl = self.blkdev.sectors * self.blkdev.heads
found = False
for i in range(self.blkdev.num_blocks):
c = bm[i]
if ord(c) == 0:
c = "."
else:
found = True
line += c
if i % self.blkdev.sectors == self.blkdev.sectors - 1:
line += " "
if i % blk_cyl == blk_cyl - 1:
if not brief or found:
print("%8d: %s" % (blk, line))
blk += blk_cyl
line = ""
found = False

File diff suppressed because it is too large Load Diff

View File

@ -1,6 +1,3 @@
from .block.EntryBlock import EntryBlock
from .block.FileHeaderBlock import FileHeaderBlock
from .block.FileListBlock import FileListBlock
@ -8,282 +5,313 @@ from .block.FileDataBlock import FileDataBlock
from .ADFSNode import ADFSNode
from .FSError import *
class ADFSFile(ADFSNode):
def __init__(self, volume, parent):
ADFSNode.__init__(self, volume, parent)
# state
self.ext_blk_nums = []
self.ext_blks = []
self.data_blk_nums = []
self.data_blks = []
self.valid = False
self.data = None
self.data_size = 0
self.total_blks = 0
def __repr__(self):
return "[File(%d)'%s':%d]" % (self.block.blk_num, self.block.name, self.block.byte_size)
def blocks_create_old(self, anon_blk):
# create file header block
fhb = FileHeaderBlock(self.blkdev, anon_blk.blk_num, self.volume.is_longname)
fhb.set(anon_blk.data)
if not fhb.valid:
raise FSError(INVALID_FILE_HEADER_BLOCK, block=anon_blk)
self.set_block(fhb)
def __init__(self, volume, parent):
ADFSNode.__init__(self, volume, parent)
# state
self.ext_blk_nums = []
self.ext_blks = []
self.data_blk_nums = []
self.data_blks = []
self.valid = False
self.data = None
self.data_size = 0
self.total_blks = 0
# retrieve data blocks and size from header
self.data_blk_nums = fhb.data_blocks[:]
self.data_size = fhb.byte_size
def __repr__(self):
return "[File(%d)'%s':%d]" % (
self.block.blk_num,
self.block.name,
self.block.byte_size,
)
# scan for extension blocks
next_ext = self.block.extension
while next_ext != 0:
ext_blk = FileListBlock(self.block.blkdev, next_ext)
ext_blk.read()
if not ext_blk.valid:
raise FSError(INVALID_FILE_LIST_BLOCK, block=ext_blk)
self.ext_blk_nums.append(next_ext)
self.ext_blks.append(ext_blk)
self.data_blk_nums += ext_blk.data_blocks
next_ext = ext_blk.extension
# now check number of ext blocks
self.num_ext_blks = self.calc_number_of_list_blks()
my_num_ext_blks = len(self.ext_blks)
if my_num_ext_blks != self.num_ext_blks:
raise FSError(FILE_LIST_BLOCK_COUNT_MISMATCH, node=self, extra="got=%d want=%d" % (my_num_ext_blks, self.num_ext_blks))
def blocks_create_old(self, anon_blk):
# create file header block
fhb = FileHeaderBlock(self.blkdev, anon_blk.blk_num, self.volume.is_longname)
fhb.set(anon_blk.data)
if not fhb.valid:
raise FSError(INVALID_FILE_HEADER_BLOCK, block=anon_blk)
self.set_block(fhb)
# now check number of data blocks
self.num_data_blks = self.calc_number_of_data_blks()
my_num_data_blks = len(self.data_blk_nums)
if my_num_data_blks != self.num_data_blks:
raise FSError(FILE_DATA_BLOCK_COUNT_MISMATCH, node=self, extra="got=%d want=%d" % (my_num_data_blks, self.num_data_blks))
# retrieve data blocks and size from header
self.data_blk_nums = fhb.data_blocks[:]
self.data_size = fhb.byte_size
# calc number of total blocks occupied by this file
self.total_blks = 1 + my_num_ext_blks + my_num_data_blks
if self.block.comment_block_id != 0:
self.total_blks += 1
# scan for extension blocks
next_ext = self.block.extension
while next_ext != 0:
ext_blk = FileListBlock(self.block.blkdev, next_ext)
ext_blk.read()
if not ext_blk.valid:
raise FSError(INVALID_FILE_LIST_BLOCK, block=ext_blk)
self.ext_blk_nums.append(next_ext)
self.ext_blks.append(ext_blk)
self.data_blk_nums += ext_blk.data_blocks
next_ext = ext_blk.extension
return fhb
def read(self):
"""read data blocks"""
self.data_blks = []
want_seq_num = 1
total_size = 0
is_ffs = self.volume.is_ffs
byte_size = self.block.byte_size
data = bytearray()
for blk in self.data_blk_nums:
if is_ffs:
# ffs has raw data blocks
dat_blk = self.volume.blkdev.read_block(blk)
total_size += len(dat_blk)
# shrink last read if necessary
if total_size > byte_size:
shrink = total_size - byte_size
dat_blk = dat_blk[:-shrink]
total_size = byte_size
data += dat_blk
else:
# ofs
dat_blk = FileDataBlock(self.block.blkdev, blk)
dat_blk.read()
if not dat_blk.valid:
raise FSError(INVALID_FILE_DATA_BLOCK, block=dat_blk, node=self)
# check sequence number
if dat_blk.seq_num != want_seq_num:
raise FSError(INVALID_SEQ_NUM, block=dat_blk, node=self, extra="got=%d wanted=%d" % (dat_blk.seq_num, want_seq_num))
# store data blocks
self.data_blks.append(dat_blk)
total_size += dat_blk.data_size
data += dat_blk.get_block_data()
want_seq_num += 1
# store full contents of file
self.data = data
# make sure all went well
got_size = len(data)
want_size = self.block.byte_size
if got_size != want_size:
raise FSError(INTERNAL_ERROR, block=self.block, node=self, extra="file size mismatch: got=%d want=%d" % (got_size, want_size))
def get_file_data(self):
if self.data != None:
return self.data
self.read()
return self.data
def flush(self):
self.data = None
self.data_blks = None
def ensure_data(self):
if self.data == None:
self.read()
def set_file_data(self, data):
self.data = data
self.data_size = len(data)
self.num_data_blks = self.calc_number_of_data_blks()
self.num_ext_blks = self.calc_number_of_list_blks()
def get_data_block_contents_bytes(self):
"""how many bytes of file data can be stored in a block?"""
bb = self.volume.blkdev.block_bytes
if self.volume.is_ffs:
return bb
else:
return bb - 24
def calc_number_of_data_blks(self):
"""given the file size: how many data blocks do we need to store the file?"""
bb = self.get_data_block_contents_bytes()
ds = self.data_size
return (ds + bb -1 ) // bb
def calc_number_of_list_blks(self):
"""given the file size: how many list blocks do we need to store the data blk ptrs?"""
db = self.calc_number_of_data_blks()
# ptr per block
ppb = self.volume.blkdev.block_longs - 56
# fits in header block?
if db <= ppb:
return 0
else:
db -= ppb
return (db + ppb - 1) // ppb
def blocks_get_create_num(self):
# determine number of blocks to create
return 1 + self.num_data_blks + self.num_ext_blks
def blocks_create_new(self, free_blks, name, hash_chain_blk, parent_blk, meta_info):
# assign block numbers
fhb_num = free_blks[0]
# ... for ext
self.ext_blk_nums = []
for i in range(self.num_ext_blks):
self.ext_blk_nums.append(free_blks[1+i])
# ... for data
off = 1 + self.num_ext_blks
self.data_blk_nums = []
for i in range(self.num_data_blks):
self.data_blk_nums.append(free_blks[off])
off += 1
ppb = self.volume.blkdev.block_longs - 56 # data pointer per block
# create file header block
fhb = FileHeaderBlock(self.blkdev, fhb_num, self.volume.is_longname)
byte_size = len(self.data)
if self.num_data_blks > ppb:
hdr_blks = self.data_blk_nums[0:ppb]
hdr_ext = self.ext_blk_nums[0]
else:
hdr_blks = self.data_blk_nums
hdr_ext = 0
fhb.create(parent_blk, name, hdr_blks, hdr_ext, byte_size,
meta_info.get_protect(), meta_info.get_comment(),
meta_info.get_mod_ts(), hash_chain_blk)
fhb.write()
self.set_block(fhb)
# create file list (=ext) blocks
ext_off = ppb
for i in range(self.num_ext_blks):
flb = FileListBlock(self.blkdev, self.ext_blk_nums[i])
if i == self.num_ext_blks - 1:
ext_blk = 0
blks = self.data_blk_nums[ext_off:]
else:
ext_blk = self.ext_blk_nums[i+1]
blks = self.data_blk_nums[ext_off:ext_off+ppb]
flb.create(fhb_num, blks, ext_blk)
flb.write()
self.ext_blks.append(flb)
ext_off += ppb
# write data blocks
self.write()
# now check number of ext blocks
self.num_ext_blks = self.calc_number_of_list_blks()
my_num_ext_blks = len(self.ext_blks)
if my_num_ext_blks != self.num_ext_blks:
raise FSError(
FILE_LIST_BLOCK_COUNT_MISMATCH,
node=self,
extra="got=%d want=%d" % (my_num_ext_blks, self.num_ext_blks),
)
self.valid = True
return fhb_num
def write(self):
self.data_blks = []
off = 0
left = self.data_size
blk_idx = 0
bs = self.get_data_block_contents_bytes()
is_ffs = self.volume.is_ffs
while off < self.data_size:
# number of data block
blk_num = self.data_blk_nums[blk_idx]
# extract file data
size = left
if size > bs:
size = bs
d = self.data[off:off+size]
if is_ffs:
# pad block
if size < bs:
d += b'\0' * (bs-size)
# write raw block data in FFS
self.blkdev.write_block(blk_num, d)
else:
# old FS: create and write data block
fdb = FileDataBlock(self.blkdev, blk_num)
if blk_idx == self.num_data_blks - 1:
next_data = 0
# now check number of data blocks
self.num_data_blks = self.calc_number_of_data_blks()
my_num_data_blks = len(self.data_blk_nums)
if my_num_data_blks != self.num_data_blks:
raise FSError(
FILE_DATA_BLOCK_COUNT_MISMATCH,
node=self,
extra="got=%d want=%d" % (my_num_data_blks, self.num_data_blks),
)
# calc number of total blocks occupied by this file
self.total_blks = 1 + my_num_ext_blks + my_num_data_blks
if self.block.comment_block_id != 0:
self.total_blks += 1
return fhb
def read(self):
"""read data blocks"""
self.data_blks = []
want_seq_num = 1
total_size = 0
is_ffs = self.volume.is_ffs
byte_size = self.block.byte_size
data = bytearray()
for blk in self.data_blk_nums:
if is_ffs:
# ffs has raw data blocks
dat_blk = self.volume.blkdev.read_block(blk)
total_size += len(dat_blk)
# shrink last read if necessary
if total_size > byte_size:
shrink = total_size - byte_size
dat_blk = dat_blk[:-shrink]
total_size = byte_size
data += dat_blk
else:
# ofs
dat_blk = FileDataBlock(self.block.blkdev, blk)
dat_blk.read()
if not dat_blk.valid:
raise FSError(INVALID_FILE_DATA_BLOCK, block=dat_blk, node=self)
# check sequence number
if dat_blk.seq_num != want_seq_num:
raise FSError(
INVALID_SEQ_NUM,
block=dat_blk,
node=self,
extra="got=%d wanted=%d" % (dat_blk.seq_num, want_seq_num),
)
# store data blocks
self.data_blks.append(dat_blk)
total_size += dat_blk.data_size
data += dat_blk.get_block_data()
want_seq_num += 1
# store full contents of file
self.data = data
# make sure all went well
got_size = len(data)
want_size = self.block.byte_size
if got_size != want_size:
raise FSError(
INTERNAL_ERROR,
block=self.block,
node=self,
extra="file size mismatch: got=%d want=%d" % (got_size, want_size),
)
def get_file_data(self):
if self.data != None:
return self.data
self.read()
return self.data
def flush(self):
self.data = None
self.data_blks = None
def ensure_data(self):
if self.data == None:
self.read()
def set_file_data(self, data):
self.data = data
self.data_size = len(data)
self.num_data_blks = self.calc_number_of_data_blks()
self.num_ext_blks = self.calc_number_of_list_blks()
def get_data_block_contents_bytes(self):
"""how many bytes of file data can be stored in a block?"""
bb = self.volume.blkdev.block_bytes
if self.volume.is_ffs:
return bb
else:
next_data = self.data_blk_nums[blk_idx+1]
fdb.create(self.block.blk_num, blk_idx+1, d, next_data)
fdb.write()
self.data_blks.append(fdb)
blk_idx += 1
off += bs
left -= bs
def draw_on_bitmap(self, bm, show_all=False, first=False):
bm[self.block.blk_num] = 'H'
for b in self.ext_blk_nums:
bm[b] = 'E'
for b in self.data_blk_nums:
bm[b] = 'd'
return bb - 24
def get_block_nums(self):
result = [self.block.blk_num]
result += self.ext_blk_nums
result += self.data_blk_nums
return result
def get_blocks(self, with_data=True):
result = [self.block]
result += self.ext_blks
if with_data:
self.ensure_data()
result += self.data_blks
return result
def can_delete(self):
return True
def get_size(self):
return self.data_size
def get_size_str(self):
return "%8d" % self.data_size
def calc_number_of_data_blks(self):
"""given the file size: how many data blocks do we need to store the file?"""
bb = self.get_data_block_contents_bytes()
ds = self.data_size
return (ds + bb - 1) // bb
def get_detail_str(self):
return "data=%d ext=%d" % (len(self.data_blk_nums), len(self.ext_blk_nums))
def calc_number_of_list_blks(self):
"""given the file size: how many list blocks do we need to store the data blk ptrs?"""
db = self.calc_number_of_data_blks()
# ptr per block
ppb = self.volume.blkdev.block_longs - 56
# fits in header block?
if db <= ppb:
return 0
else:
db -= ppb
return (db + ppb - 1) // ppb
def get_block_usage(self, all=False, first=True):
return (len(self.data_blk_nums), len(self.ext_blk_nums) + 1)
def blocks_get_create_num(self):
# determine number of blocks to create
return 1 + self.num_data_blks + self.num_ext_blks
def get_file_bytes(self, all=False, first=True):
return self.data_size
def blocks_create_new(self, free_blks, name, hash_chain_blk, parent_blk, meta_info):
# assign block numbers
fhb_num = free_blks[0]
# ... for ext
self.ext_blk_nums = []
for i in range(self.num_ext_blks):
self.ext_blk_nums.append(free_blks[1 + i])
# ... for data
off = 1 + self.num_ext_blks
self.data_blk_nums = []
for i in range(self.num_data_blks):
self.data_blk_nums.append(free_blks[off])
off += 1
def is_file(self):
return True
ppb = self.volume.blkdev.block_longs - 56 # data pointer per block
# create file header block
fhb = FileHeaderBlock(self.blkdev, fhb_num, self.volume.is_longname)
byte_size = len(self.data)
if self.num_data_blks > ppb:
hdr_blks = self.data_blk_nums[0:ppb]
hdr_ext = self.ext_blk_nums[0]
else:
hdr_blks = self.data_blk_nums
hdr_ext = 0
fhb.create(
parent_blk,
name,
hdr_blks,
hdr_ext,
byte_size,
meta_info.get_protect(),
meta_info.get_comment(),
meta_info.get_mod_ts(),
hash_chain_blk,
)
fhb.write()
self.set_block(fhb)
# create file list (=ext) blocks
ext_off = ppb
for i in range(self.num_ext_blks):
flb = FileListBlock(self.blkdev, self.ext_blk_nums[i])
if i == self.num_ext_blks - 1:
ext_blk = 0
blks = self.data_blk_nums[ext_off:]
else:
ext_blk = self.ext_blk_nums[i + 1]
blks = self.data_blk_nums[ext_off : ext_off + ppb]
flb.create(fhb_num, blks, ext_blk)
flb.write()
self.ext_blks.append(flb)
ext_off += ppb
# write data blocks
self.write()
self.valid = True
return fhb_num
def write(self):
self.data_blks = []
off = 0
left = self.data_size
blk_idx = 0
bs = self.get_data_block_contents_bytes()
is_ffs = self.volume.is_ffs
while off < self.data_size:
# number of data block
blk_num = self.data_blk_nums[blk_idx]
# extract file data
size = left
if size > bs:
size = bs
d = self.data[off : off + size]
if is_ffs:
# pad block
if size < bs:
d += b"\0" * (bs - size)
# write raw block data in FFS
self.blkdev.write_block(blk_num, d)
else:
# old FS: create and write data block
fdb = FileDataBlock(self.blkdev, blk_num)
if blk_idx == self.num_data_blks - 1:
next_data = 0
else:
next_data = self.data_blk_nums[blk_idx + 1]
fdb.create(self.block.blk_num, blk_idx + 1, d, next_data)
fdb.write()
self.data_blks.append(fdb)
blk_idx += 1
off += bs
left -= bs
def draw_on_bitmap(self, bm, show_all=False, first=False):
bm[self.block.blk_num] = "H"
for b in self.ext_blk_nums:
bm[b] = "E"
for b in self.data_blk_nums:
bm[b] = "d"
def get_block_nums(self):
result = [self.block.blk_num]
result += self.ext_blk_nums
result += self.data_blk_nums
return result
def get_blocks(self, with_data=True):
result = [self.block]
result += self.ext_blks
if with_data:
self.ensure_data()
result += self.data_blks
return result
def can_delete(self):
return True
def get_size(self):
return self.data_size
def get_size_str(self):
return "%8d" % self.data_size
def get_detail_str(self):
return "data=%d ext=%d" % (len(self.data_blk_nums), len(self.ext_blk_nums))
def get_block_usage(self, all=False, first=True):
return (len(self.data_blk_nums), len(self.ext_blk_nums) + 1)
def get_file_bytes(self, all=False, first=True):
return self.data_size
def is_file(self):
return True

View File

@ -1,6 +1,3 @@
from .block.CommentBlock import CommentBlock
from .block.EntryBlock import EntryBlock
from .FileName import FileName
@ -11,206 +8,228 @@ from .FSError import *
from .FSString import FSString
import amitools.util.ByteSize as ByteSize
class ADFSNode:
def __init__(self, volume, parent):
self.volume = volume
self.blkdev = volume.blkdev
self.parent = parent
self.block_bytes = self.blkdev.block_bytes
self.block = None
self.name = None
self.valid = False
self.meta_info = None
def __str__(self):
return "%s:'%s'(@%d)" % (self.__class__.__name__, self.get_node_path_name(), self.block.blk_num)
def set_block(self, block):
self.block = block
self.name = FileName(self.block.name, is_intl=self.volume.is_intl,is_longname=self.volume.is_longname)
self.valid = True
self.create_meta_info()
def create_meta_info(self):
if self.block.comment_block_id != 0:
comment_block = CommentBlock(self.blkdev, self.block.comment_block_id)
comment_block.read()
comment = comment_block.comment
else:
comment = self.block.comment
self.meta_info = MetaInfo(self.block.protect, self.block.mod_ts, comment)
def __init__(self, volume, parent):
self.volume = volume
self.blkdev = volume.blkdev
self.parent = parent
self.block_bytes = self.blkdev.block_bytes
self.block = None
self.name = None
self.valid = False
self.meta_info = None
def get_file_name(self):
return self.name
def __str__(self):
return "%s:'%s'(@%d)" % (
self.__class__.__name__,
self.get_node_path_name(),
self.block.blk_num,
)
def delete(self, wipe=False, all=False, update_ts=True):
if all:
self.delete_children(wipe, all, update_ts)
self.parent._delete(self, wipe, update_ts)
def set_block(self, block):
self.block = block
self.name = FileName(
self.block.name,
is_intl=self.volume.is_intl,
is_longname=self.volume.is_longname,
)
self.valid = True
self.create_meta_info()
def delete_children(self, wipe, all, update_ts):
pass
def get_meta_info(self):
return self.meta_info
def change_meta_info(self, meta_info):
dirty = False
# dircache?
rebuild_dircache = False
if self.volume.is_dircache and self.parent:
record = self.parent.get_dircache_record(self.name.get_name())
if not record:
raise FSError(INTERNAL_ERROR, node=self, extra="dc not found!")
else:
record = None
# alter protect flags
protect = meta_info.get_protect()
if protect and hasattr(self.block, 'protect'):
self.block.protect = protect
self.meta_info.set_protect(protect)
dirty = True
if record:
record.protect = protect
# alter mod time
mod_ts = meta_info.get_mod_ts()
if mod_ts:
self.block.mod_ts = mod_ts
self.meta_info.set_mod_ts(mod_ts)
dirty = True
if record:
record.mod_ts = mod_ts
# alter comment
comment = meta_info.get_comment()
if comment and hasattr(self.block, "comment"):
if EntryBlock.needs_extra_comment_block(self.name, comment):
if self.block.comment_block_id == 0:
# Allocate and initialize extra block for comment
blks = self.volume.bitmap.alloc_n(1)
if blks is not None:
cblk = CommentBlock(self.blkdev, blks[0])
cblk.create(self.block.blk_num)
self.block.comment_block_id = cblk.blk_num
else:
raise FSError(NO_FREE_BLOCKS, node=self)
else:
cblk = CommentBlock(self.blkdev, self.block.comment_block_id)
cblk.read()
cblk.comment = comment
cblk.write()
else:
self.block.comment = comment
def create_meta_info(self):
if self.block.comment_block_id != 0:
self.volume.bitmap.dealloc_n([self.block.comment_block_id])
self.block.comment_block_id = 0
comment_block = CommentBlock(self.blkdev, self.block.comment_block_id)
comment_block.read()
comment = comment_block.comment
else:
comment = self.block.comment
self.meta_info = MetaInfo(self.block.protect, self.block.mod_ts, comment)
self.meta_info.set_comment(comment)
dirty = True
if record:
rebuild_dircache = len(record.comment) < comment
record.comment = comment
# really need update?
if dirty:
self.block.write()
# dirache update
if record:
self.parent.update_dircache_record(record, rebuild_dircache)
def change_comment(self, comment):
self.change_meta_info(MetaInfo(comment=comment))
def change_protect(self, protect):
self.change_meta_info(MetaInfo(protect=protect))
def change_protect_by_string(self, pr_str):
p = ProtectFlags()
p.parse(pr_str)
self.change_protect(p.mask)
def change_mod_ts(self, mod_ts):
self.change_meta_info(MetaInfo(mod_ts=mod_ts))
def change_mod_ts_by_string(self, tm_str):
t = TimeStamp()
t.parse(tm_str)
self.change_meta_info(MetaInfo(mod_ts=t))
def get_file_name(self):
return self.name
def get_list_str(self, indent=0, all=False, detail=False):
istr = ' ' * indent
if detail:
extra = self.get_detail_str()
else:
extra = self.meta_info.get_str_line()
return '%-40s %8s %s' % (istr + self.name.get_unicode_name(), self.get_size_str(), extra)
def list(self, indent=0, all=False, detail=False, encoding="UTF-8"):
print(self.get_list_str(indent=indent, all=all, detail=detail))
def delete(self, wipe=False, all=False, update_ts=True):
if all:
self.delete_children(wipe, all, update_ts)
self.parent._delete(self, wipe, update_ts)
def get_size_str(self):
# re-implemented in derived classes!
return ""
def get_blocks(self, with_data=False):
# re-implemented in derived classes!
return 0
def delete_children(self, wipe, all, update_ts):
pass
def get_file_data(self):
return None
def get_meta_info(self):
return self.meta_info
def dump_blocks(self, with_data=False):
blks = self.get_blocks(with_data)
for b in blks:
b.dump()
def change_meta_info(self, meta_info):
dirty = False
def get_node_path(self, with_vol=False):
if self.parent != None:
if not with_vol and self.parent.parent == None:
r = []
else:
r = self.parent.get_node_path()
else:
if not with_vol:
return []
r = []
r.append(self.name.get_unicode_name())
return r
# dircache?
rebuild_dircache = False
if self.volume.is_dircache and self.parent:
record = self.parent.get_dircache_record(self.name.get_name())
if not record:
raise FSError(INTERNAL_ERROR, node=self, extra="dc not found!")
else:
record = None
def get_node_path_name(self, with_vol=False):
r = self.get_node_path()
return FSString("/".join(r))
# alter protect flags
protect = meta_info.get_protect()
if protect and hasattr(self.block, "protect"):
self.block.protect = protect
self.meta_info.set_protect(protect)
dirty = True
if record:
record.protect = protect
def get_detail_str(self):
return ""
def get_block_usage(self, all=False, first=True):
return (0,0)
# alter mod time
mod_ts = meta_info.get_mod_ts()
if mod_ts:
self.block.mod_ts = mod_ts
self.meta_info.set_mod_ts(mod_ts)
dirty = True
if record:
record.mod_ts = mod_ts
def get_file_bytes(self, all=False, first=True):
return (0,0)
# alter comment
comment = meta_info.get_comment()
if comment and hasattr(self.block, "comment"):
if EntryBlock.needs_extra_comment_block(self.name, comment):
if self.block.comment_block_id == 0:
# Allocate and initialize extra block for comment
blks = self.volume.bitmap.alloc_n(1)
if blks is not None:
cblk = CommentBlock(self.blkdev, blks[0])
cblk.create(self.block.blk_num)
self.block.comment_block_id = cblk.blk_num
else:
raise FSError(NO_FREE_BLOCKS, node=self)
else:
cblk = CommentBlock(self.blkdev, self.block.comment_block_id)
cblk.read()
cblk.comment = comment
cblk.write()
else:
self.block.comment = comment
if self.block.comment_block_id != 0:
self.volume.bitmap.dealloc_n([self.block.comment_block_id])
self.block.comment_block_id = 0
def is_file(self):
return False
def is_dir(self):
return False
self.meta_info.set_comment(comment)
dirty = True
if record:
rebuild_dircache = len(record.comment) < comment
record.comment = comment
def get_info(self, all=False):
# block usage: data + fs blocks
(data,fs) = self.get_block_usage(all=all)
total = data + fs
bb = self.blkdev.block_bytes
btotal = total * bb
bdata = data * bb
bfs = fs * bb
prc_data = 10000 * data / total
prc_fs = 10000 - prc_data
res = []
res.append("sum: %10d %s %12d" % (total, ByteSize.to_byte_size_str(btotal), btotal))
res.append("data: %10d %s %12d %5.2f%%" % (data, ByteSize.to_byte_size_str(bdata), bdata, prc_data / 100.0))
res.append("fs: %10d %s %12d %5.2f%%" % (fs, ByteSize.to_byte_size_str(bfs), bfs, prc_fs / 100.0))
return res
# really need update?
if dirty:
self.block.write()
# dirache update
if record:
self.parent.update_dircache_record(record, rebuild_dircache)
def change_comment(self, comment):
self.change_meta_info(MetaInfo(comment=comment))
def change_protect(self, protect):
self.change_meta_info(MetaInfo(protect=protect))
def change_protect_by_string(self, pr_str):
p = ProtectFlags()
p.parse(pr_str)
self.change_protect(p.mask)
def change_mod_ts(self, mod_ts):
self.change_meta_info(MetaInfo(mod_ts=mod_ts))
def change_mod_ts_by_string(self, tm_str):
t = TimeStamp()
t.parse(tm_str)
self.change_meta_info(MetaInfo(mod_ts=t))
def get_list_str(self, indent=0, all=False, detail=False):
istr = " " * indent
if detail:
extra = self.get_detail_str()
else:
extra = self.meta_info.get_str_line()
return "%-40s %8s %s" % (
istr + self.name.get_unicode_name(),
self.get_size_str(),
extra,
)
def list(self, indent=0, all=False, detail=False, encoding="UTF-8"):
print(self.get_list_str(indent=indent, all=all, detail=detail))
def get_size_str(self):
# re-implemented in derived classes!
return ""
def get_blocks(self, with_data=False):
# re-implemented in derived classes!
return 0
def get_file_data(self):
return None
def dump_blocks(self, with_data=False):
blks = self.get_blocks(with_data)
for b in blks:
b.dump()
def get_node_path(self, with_vol=False):
if self.parent != None:
if not with_vol and self.parent.parent == None:
r = []
else:
r = self.parent.get_node_path()
else:
if not with_vol:
return []
r = []
r.append(self.name.get_unicode_name())
return r
def get_node_path_name(self, with_vol=False):
r = self.get_node_path()
return FSString("/".join(r))
def get_detail_str(self):
return ""
def get_block_usage(self, all=False, first=True):
return (0, 0)
def get_file_bytes(self, all=False, first=True):
return (0, 0)
def is_file(self):
return False
def is_dir(self):
return False
def get_info(self, all=False):
# block usage: data + fs blocks
(data, fs) = self.get_block_usage(all=all)
total = data + fs
bb = self.blkdev.block_bytes
btotal = total * bb
bdata = data * bb
bfs = fs * bb
prc_data = 10000 * data / total
prc_fs = 10000 - prc_data
res = []
res.append(
"sum: %10d %s %12d"
% (total, ByteSize.to_byte_size_str(btotal), btotal)
)
res.append(
"data: %10d %s %12d %5.2f%%"
% (data, ByteSize.to_byte_size_str(bdata), bdata, prc_data / 100.0)
)
res.append(
"fs: %10d %s %12d %5.2f%%"
% (fs, ByteSize.to_byte_size_str(bfs), bfs, prc_fs / 100.0)
)
return res

View File

@ -1,39 +1,40 @@
from .ADFSDir import ADFSDir
from .MetaInfo import MetaInfo
from . import DosType
class ADFSVolDir(ADFSDir):
def __init__(self, volume, root_block):
ADFSDir.__init__(self, volume, None)
self.set_block(root_block)
self._init_name_hash()
def __repr__(self):
return "[VolDir(%d)'%s':%s]" % (self.block.blk_num, self.block.name, self.entries)
def draw_on_bitmap(self, bm, show_all=False, first=True):
blk_num = self.block.blk_num
bm[blk_num] = 'V'
if show_all or first:
self.ensure_entries()
for e in self.entries:
e.draw_on_bitmap(bm, show_all, False)
def __init__(self, volume, root_block):
ADFSDir.__init__(self, volume, None)
self.set_block(root_block)
self._init_name_hash()
def get_size_str(self):
return "VOLUME"
def __repr__(self):
return "[VolDir(%d)'%s':%s]" % (
self.block.blk_num,
self.block.name,
self.entries,
)
def create_meta_info(self):
self.meta_info = MetaInfo(mod_ts=self.block.mod_ts)
def can_delete(self):
return False
def draw_on_bitmap(self, bm, show_all=False, first=True):
blk_num = self.block.blk_num
bm[blk_num] = "V"
if show_all or first:
self.ensure_entries()
for e in self.entries:
e.draw_on_bitmap(bm, show_all, False)
def get_list_str(self, indent=0, all=False, detail=False):
a = ADFSDir.get_list_str(self, indent=indent, all=all, detail=detail)
a += DosType.get_dos_type_str(self.volume.get_dos_type())
a += " #%d" % self.block_bytes
return a
def get_size_str(self):
return "VOLUME"
def create_meta_info(self):
self.meta_info = MetaInfo(mod_ts=self.block.mod_ts)
def can_delete(self):
return False
def get_list_str(self, indent=0, all=False, detail=False):
a = ADFSDir.get_list_str(self, indent=indent, all=all, detail=detail)
a += DosType.get_dos_type_str(self.volume.get_dos_type())
a += " #%d" % self.block_bytes
return a

View File

@ -1,6 +1,3 @@
from .block.BootBlock import BootBlock
from .block.RootBlock import RootBlock
from .ADFSVolDir import ADFSVolDir
@ -13,342 +10,371 @@ from .TimeStamp import TimeStamp
from . import DosType
import amitools.util.ByteSize as ByteSize
class ADFSVolume:
def __init__(self, blkdev):
self.blkdev = blkdev
self.boot = None
self.root = None
self.root_dir = None
self.bitmap = None
self.valid = False
self.is_ffs = None
self.is_intl = None
self.is_dircache = None
self.is_longname = None
self.name = None
self.meta_info = None
def open(self):
# read boot block
self.boot = BootBlock(self.blkdev)
self.boot.read()
# valid root block?
if self.boot.valid:
# get fs flags
dos_type = self.boot.dos_type
self.is_ffs = DosType.is_ffs(dos_type)
self.is_intl = DosType.is_intl(dos_type)
self.is_dircache = DosType.is_dircache(dos_type)
self.is_longname = DosType.is_longname(dos_type)
# read root
self.root = RootBlock(self.blkdev, self.boot.calc_root_blk)
self.root.read()
if self.root.valid:
self.name = self.root.name
# build meta info
self.meta_info = RootMetaInfo( self.root.create_ts, self.root.disk_ts, self.root.mod_ts )
# create root dir
self.root_dir = ADFSVolDir(self, self.root)
self.root_dir.read()
def __init__(self, blkdev):
self.blkdev = blkdev
self.boot = None
self.root = None
self.root_dir = None
self.bitmap = None
self.valid = False
self.is_ffs = None
self.is_intl = None
self.is_dircache = None
self.is_longname = None
self.name = None
self.meta_info = None
def open(self):
# read boot block
self.boot = BootBlock(self.blkdev)
self.boot.read()
# valid root block?
if self.boot.valid:
# get fs flags
dos_type = self.boot.dos_type
self.is_ffs = DosType.is_ffs(dos_type)
self.is_intl = DosType.is_intl(dos_type)
self.is_dircache = DosType.is_dircache(dos_type)
self.is_longname = DosType.is_longname(dos_type)
# read root
self.root = RootBlock(self.blkdev, self.boot.calc_root_blk)
self.root.read()
if self.root.valid:
self.name = self.root.name
# build meta info
self.meta_info = RootMetaInfo(
self.root.create_ts, self.root.disk_ts, self.root.mod_ts
)
# create root dir
self.root_dir = ADFSVolDir(self, self.root)
self.root_dir.read()
# create bitmap
self.bitmap = ADFSBitmap(self.root)
self.bitmap.read()
self.valid = True
else:
raise FSError(INVALID_ROOT_BLOCK, block=self.root)
else:
raise FSError(INVALID_BOOT_BLOCK, block=self.boot)
def create(
self,
name,
meta_info=None,
dos_type=None,
boot_code=None,
is_ffs=False,
is_intl=False,
is_dircache=False,
is_longname=False,
):
# determine dos_type
if dos_type == None:
dos_type = DosType.DOS0
if is_longname:
dos_type = DosType.DOS6
elif is_dircache:
dos_type |= DosType.DOS_MASK_DIRCACHE
elif is_intl:
dos_type |= DosType.DOS_MASK_INTL
if is_ffs:
dos_type |= DosType.DOS_MASK_FFS
# update flags
self.is_ffs = DosType.is_ffs(dos_type)
self.is_intl = DosType.is_intl(dos_type)
self.is_dircache = DosType.is_dircache(dos_type)
self.is_longname = DosType.is_longname(dos_type)
# convert and check volume name
if not isinstance(name, FSString):
raise ValueError("create's name must be a FSString")
fn = FileName(
name, is_intl=self.is_intl, is_longname=False
) # Volumes don't support long names
if not fn.is_valid():
raise FSError(INVALID_VOLUME_NAME, file_name=name, node=self)
# create a boot block
self.boot = BootBlock(self.blkdev)
self.boot.create(dos_type=dos_type, boot_code=boot_code)
self.boot.write()
# create a root block
self.root = RootBlock(self.blkdev, self.boot.calc_root_blk)
if meta_info == None:
meta_info = RootMetaInfo()
meta_info.set_current_as_create_time()
meta_info.set_current_as_mod_time()
meta_info.set_current_as_disk_time()
create_ts = meta_info.get_create_ts()
disk_ts = meta_info.get_disk_ts()
mod_ts = meta_info.get_mod_ts()
self.meta_info = meta_info
self.root.create(fn.get_name(), create_ts, disk_ts, mod_ts, fstype=dos_type)
self.name = name
# create bitmap
self.bitmap = ADFSBitmap(self.root)
self.bitmap.read()
self.bitmap.create()
self.bitmap.write() # writes root block, too
# create empty root dir
self.root_dir = ADFSVolDir(self, self.root)
self.root_dir.read()
# all ok
self.valid = True
else:
raise FSError(INVALID_ROOT_BLOCK, block=self.root)
else:
raise FSError(INVALID_BOOT_BLOCK, block=self.boot)
def create(self, name, meta_info=None, dos_type=None, boot_code=None, is_ffs=False, is_intl=False, is_dircache=False, is_longname=False):
# determine dos_type
if dos_type == None:
dos_type = DosType.DOS0
if is_longname:
dos_type = DosType.DOS6
elif is_dircache:
dos_type |= DosType.DOS_MASK_DIRCACHE
elif is_intl:
dos_type |= DosType.DOS_MASK_INTL
if is_ffs:
dos_type |= DosType.DOS_MASK_FFS
# update flags
self.is_ffs = DosType.is_ffs(dos_type)
self.is_intl = DosType.is_intl(dos_type)
self.is_dircache = DosType.is_dircache(dos_type)
self.is_longname = DosType.is_longname(dos_type)
# convert and check volume name
if not isinstance(name, FSString):
raise ValueError("create's name must be a FSString")
fn = FileName(name, is_intl=self.is_intl, is_longname=False) # Volumes don't support long names
if not fn.is_valid():
raise FSError(INVALID_VOLUME_NAME, file_name=name, node=self)
# create a boot block
self.boot = BootBlock(self.blkdev)
self.boot.create(dos_type=dos_type, boot_code=boot_code)
self.boot.write()
# create a root block
self.root = RootBlock(self.blkdev, self.boot.calc_root_blk)
if meta_info == None:
meta_info = RootMetaInfo()
meta_info.set_current_as_create_time()
meta_info.set_current_as_mod_time()
meta_info.set_current_as_disk_time()
create_ts = meta_info.get_create_ts()
disk_ts = meta_info.get_disk_ts()
mod_ts = meta_info.get_mod_ts()
self.meta_info = meta_info
self.root.create(fn.get_name(), create_ts, disk_ts, mod_ts, fstype=dos_type)
self.name = name
# create bitmap
self.bitmap = ADFSBitmap(self.root)
self.bitmap.create()
self.bitmap.write() # writes root block, too
# create empty root dir
self.root_dir = ADFSVolDir(self, self.root)
self.root_dir.read()
# all ok
self.valid = True
def close(self):
# flush bitmap state (if it was dirty)
if self.bitmap:
self.bitmap.write()
def get_info(self):
"""return an array of strings with information on the volume"""
res = []
total = self.get_total_blocks()
free = self.get_free_blocks()
used = total - free
bb = self.blkdev.block_bytes
btotal = total * bb
bfree = free * bb
bused = used * bb
prc_free = 10000 * free / total
prc_used = 10000 - prc_free
res.append("total: %10d %s %12d" % (total, ByteSize.to_byte_size_str(btotal), btotal))
res.append("used: %10d %s %12d %5.2f%%" % (used, ByteSize.to_byte_size_str(bused), bused, prc_used / 100.0))
res.append("free: %10d %s %12d %5.2f%%" % (free, ByteSize.to_byte_size_str(bfree), bfree, prc_free / 100.0))
return res
def close(self):
# flush bitmap state (if it was dirty)
if self.bitmap:
self.bitmap.write()
# ----- Path Queries -----
def get_info(self):
"""return an array of strings with information on the volume"""
res = []
total = self.get_total_blocks()
free = self.get_free_blocks()
used = total - free
bb = self.blkdev.block_bytes
btotal = total * bb
bfree = free * bb
bused = used * bb
prc_free = 10000 * free / total
prc_used = 10000 - prc_free
res.append(
"total: %10d %s %12d"
% (total, ByteSize.to_byte_size_str(btotal), btotal)
)
res.append(
"used: %10d %s %12d %5.2f%%"
% (used, ByteSize.to_byte_size_str(bused), bused, prc_used / 100.0)
)
res.append(
"free: %10d %s %12d %5.2f%%"
% (free, ByteSize.to_byte_size_str(bfree), bfree, prc_free / 100.0)
)
return res
def get_path_name(self, path_name, allow_file=True, allow_dir=True):
"""get node for given path"""
# make sure path name is a FSString
if not isinstance(path_name, FSString):
raise ValueError("get_path_name's path must be a FSString")
# create and check file name
fn = FileName(path_name, is_intl=self.is_intl, is_longname=self.is_longname)
if not fn.is_valid():
raise FSError(INVALID_FILE_NAME, file_name=path_name, node=self)
# find node
if fn.is_root_path_alias():
# its the root node
return self.root_dir
else:
# find a sub node
path = fn.split_path()
return self.root_dir.get_path(path, allow_file, allow_dir)
# ----- Path Queries -----
def get_dir_path_name(self, path_name):
"""get node for given path and ensure its a directory"""
return self.get_path_name(path_name, allow_file=False)
def get_file_path_name(self, path_name):
"""get node for given path and ensure its a file"""
return self.get_path_name(path_name, allow_dir=False)
def get_create_path_name(self, path_name, suggest_name=None):
"""get a parent node and path name for creation
def get_path_name(self, path_name, allow_file=True, allow_dir=True):
"""get node for given path"""
# make sure path name is a FSString
if not isinstance(path_name, FSString):
raise ValueError("get_path_name's path must be a FSString")
# create and check file name
fn = FileName(path_name, is_intl=self.is_intl, is_longname=self.is_longname)
if not fn.is_valid():
raise FSError(INVALID_FILE_NAME, file_name=path_name, node=self)
# find node
if fn.is_root_path_alias():
# its the root node
return self.root_dir
else:
# find a sub node
path = fn.split_path()
return self.root_dir.get_path(path, allow_file, allow_dir)
def get_dir_path_name(self, path_name):
"""get node for given path and ensure its a directory"""
return self.get_path_name(path_name, allow_file=False)
def get_file_path_name(self, path_name):
"""get node for given path and ensure its a file"""
return self.get_path_name(path_name, allow_dir=False)
def get_create_path_name(self, path_name, suggest_name=None):
"""get a parent node and path name for creation
return: parent_node_or_none, file_name_or_none
"""
# make sure input is correct
if not isinstance(path_name, FSString):
raise ValueError("get_create_path_name's path_name must be a FSString")
if suggest_name != None and not isinstance(suggest_name, FSString):
raise ValueError("get_create_path_name's suggest_name must be a FSString")
# is root path?
fn = FileName(path_name, is_intl=self.is_intl, is_longname=self.is_longname)
if not fn.is_valid():
raise FSError(INVALID_FILE_NAME, file_name=path_name, node=self)
# find node
if fn.is_root_path_alias():
return self.root_dir, suggest_name
else:
# try to get path_name as a directory
node = self.get_dir_path_name(path_name)
if node != None:
return node, suggest_name
else:
# split into dir and file name
dn, fn = fn.get_dir_and_base_name()
if dn != None:
# has a directory -> try to fetch it
node = self.get_dir_path_name(dn)
# make sure input is correct
if not isinstance(path_name, FSString):
raise ValueError("get_create_path_name's path_name must be a FSString")
if suggest_name != None and not isinstance(suggest_name, FSString):
raise ValueError("get_create_path_name's suggest_name must be a FSString")
# is root path?
fn = FileName(path_name, is_intl=self.is_intl, is_longname=self.is_longname)
if not fn.is_valid():
raise FSError(INVALID_FILE_NAME, file_name=path_name, node=self)
# find node
if fn.is_root_path_alias():
return self.root_dir, suggest_name
else:
# no dir -> assume root dir
node = self.root_dir
if fn != None:
# take given name
return node, fn
else:
# use suggested name
return node, suggest_name
# ----- convenience API -----
def get_volume_name(self):
return self.name
def get_root_dir(self):
return self.root_dir
def get_dos_type(self):
return self.boot.dos_type
def get_boot_code(self):
return self.boot.boot_code
def get_free_blocks(self):
return self.bitmap.get_num_free()
def get_used_blocks(self):
free = self.bitmap.get_num_free()
total = self.blkdev.num_blocks
return total - free
def get_total_blocks(self):
return self.blkdev.num_blocks
def get_meta_info(self):
return self.meta_info
# try to get path_name as a directory
node = self.get_dir_path_name(path_name)
if node != None:
return node, suggest_name
else:
# split into dir and file name
dn, fn = fn.get_dir_and_base_name()
if dn != None:
# has a directory -> try to fetch it
node = self.get_dir_path_name(dn)
else:
# no dir -> assume root dir
node = self.root_dir
if fn != None:
# take given name
return node, fn
else:
# use suggested name
return node, suggest_name
def update_disk_time(self):
mi = RootMetaInfo()
mi.set_current_as_disk_time()
self.change_meta_info(mi)
def change_meta_info(self, meta_info):
if self.root != None and self.root.valid:
dirty = False
# update create_ts
create_ts = meta_info.get_create_ts()
if create_ts != None:
self.root.create_ts = meta_info.get_create_ts()
dirty = True
# update disk_ts
disk_ts = meta_info.get_disk_ts()
if disk_ts != None:
self.root.disk_ts = disk_ts
dirty = True
# update mod_ts
mod_ts = meta_info.get_mod_ts()
if mod_ts != None:
self.root.mod_ts = mod_ts
dirty = True
# update if something changed
if dirty:
# ----- convenience API -----
def get_volume_name(self):
return self.name
def get_root_dir(self):
return self.root_dir
def get_dos_type(self):
return self.boot.dos_type
def get_boot_code(self):
return self.boot.boot_code
def get_free_blocks(self):
return self.bitmap.get_num_free()
def get_used_blocks(self):
free = self.bitmap.get_num_free()
total = self.blkdev.num_blocks
return total - free
def get_total_blocks(self):
return self.blkdev.num_blocks
def get_meta_info(self):
return self.meta_info
def update_disk_time(self):
mi = RootMetaInfo()
mi.set_current_as_disk_time()
self.change_meta_info(mi)
def change_meta_info(self, meta_info):
if self.root != None and self.root.valid:
dirty = False
# update create_ts
create_ts = meta_info.get_create_ts()
if create_ts != None:
self.root.create_ts = meta_info.get_create_ts()
dirty = True
# update disk_ts
disk_ts = meta_info.get_disk_ts()
if disk_ts != None:
self.root.disk_ts = disk_ts
dirty = True
# update mod_ts
mod_ts = meta_info.get_mod_ts()
if mod_ts != None:
self.root.mod_ts = mod_ts
dirty = True
# update if something changed
if dirty:
self.root.write()
self.meta_info = RootMetaInfo(
self.root.create_ts, self.root.disk_ts, self.root.mod_ts
)
return True
else:
return False
def change_create_ts(self, create_ts):
return self.change_meta_info(RootMetaInfo(create_ts=create_ts))
def change_disk_ts(self, disk_ts):
return self.change_meta_info(RootMetaInfo(disk_ts=disk_ts))
def change_mod_ts(self, mod_ts):
return self.change_meta_info(RootMetaInfo(mod_ts=mod_ts))
def change_create_ts_by_string(self, create_ts_str):
t = TimeStamp()
t.parse(create_ts_str)
return self.change_meta_info(RootMetaInfo(create_ts=t))
def change_disk_ts_by_string(self, disk_ts_str):
t = TimeStamp()
t.parse(disk_ts_str)
return self.change_meta_info(RootMetaInfo(disk_ts=t))
def change_mod_ts_by_string(self, mod_ts_str):
t = TimeStamp()
t.parse(mod_ts_str)
return self.change_meta_info(RootMetaInfo(mod_ts=t))
def relabel(self, name):
"""Relabel the volume"""
# make sure its a FSString
if not isinstance(name, FSString):
raise ValueError("relabel's name must be a FSString")
# validate file name
fn = FileName(name, is_intl=self.is_intl, is_longname=False)
if not fn.is_valid():
raise FSError(INVALID_VOLUME_NAME, file_name=name, node=self)
# update root block
ami_name = name.get_ami_str()
self.root.name = ami_name
self.root.write()
self.meta_info = RootMetaInfo( self.root.create_ts, self.root.disk_ts, self.root.mod_ts )
return True
else:
return False
def change_create_ts(self, create_ts):
return self.change_meta_info(RootMetaInfo(create_ts=create_ts))
def change_disk_ts(self, disk_ts):
return self.change_meta_info(RootMetaInfo(disk_ts=disk_ts))
def change_mod_ts(self, mod_ts):
return self.change_meta_info(RootMetaInfo(mod_ts=mod_ts))
def change_create_ts_by_string(self, create_ts_str):
t = TimeStamp()
t.parse(create_ts_str)
return self.change_meta_info(RootMetaInfo(create_ts=t))
# store internally
self.name = name
self.root_dir.name = name
def change_disk_ts_by_string(self, disk_ts_str):
t = TimeStamp()
t.parse(disk_ts_str)
return self.change_meta_info(RootMetaInfo(disk_ts=t))
def change_mod_ts_by_string(self, mod_ts_str):
t = TimeStamp()
t.parse(mod_ts_str)
return self.change_meta_info(RootMetaInfo(mod_ts=t))
def create_dir(self, ami_path):
"""Create a new directory"""
# make sure its a FSString
if not isinstance(ami_path, FSString):
raise ValueError("create_dir's ami_path must be a FSString")
# check file path
fn = FileName(ami_path, is_intl=self.is_intl, is_longname=self.is_longname)
if not fn.is_valid():
raise FSError(INVALID_FILE_NAME, file_name=ami_path)
# split into dir and base name
dir_name, base_name = fn.get_dir_and_base_name()
if base_name == None:
raise FSError(INVALID_FILE_NAME, file_name=ami_path)
# find parent of dir
if dir_name == None:
node = self.root_dir
else:
# no parent dir found
node = self.get_dir_path_name(dir_name)
if node == None:
raise FSError(
INVALID_PARENT_DIRECTORY,
file_name=ami_path,
extra="not found: " + dir_name,
)
node.create_dir(base_name)
def relabel(self, name):
"""Relabel the volume"""
# make sure its a FSString
if not isinstance(name, FSString):
raise ValueError("relabel's name must be a FSString")
# validate file name
fn = FileName(name, is_intl=self.is_intl, is_longname=False)
if not fn.is_valid():
raise FSError(INVALID_VOLUME_NAME, file_name=name, node=self)
# update root block
ami_name = name.get_ami_str()
self.root.name = ami_name
self.root.write()
# store internally
self.name = name
self.root_dir.name = name
def write_file(self, data, ami_path, suggest_name=None, cache=False):
"""Write given data as a file"""
# get parent node and file_name
parent_node, file_name = self.get_create_path_name(ami_path, suggest_name)
if parent_node == None:
raise FSError(INVALID_PARENT_DIRECTORY, file_name=ami_path)
if file_name == None:
raise FSError(INVALID_FILE_NAME, file_name=file_name)
# create file
node = parent_node.create_file(file_name, data)
if not cache:
node.flush()
def create_dir(self, ami_path):
"""Create a new directory"""
# make sure its a FSString
if not isinstance(ami_path, FSString):
raise ValueError("create_dir's ami_path must be a FSString")
# check file path
fn = FileName(ami_path, is_intl=self.is_intl, is_longname=self.is_longname)
if not fn.is_valid():
raise FSError(INVALID_FILE_NAME, file_name=ami_path)
# split into dir and base name
dir_name, base_name = fn.get_dir_and_base_name()
if base_name == None:
raise FSError(INVALID_FILE_NAME, file_name=ami_path)
# find parent of dir
if dir_name == None:
node = self.root_dir
else:
# no parent dir found
node = self.get_dir_path_name(dir_name)
if node == None:
raise FSError(INVALID_PARENT_DIRECTORY, file_name=ami_path, extra="not found: "+dir_name)
node.create_dir(base_name)
def write_file(self, data, ami_path, suggest_name=None, cache=False):
"""Write given data as a file"""
# get parent node and file_name
parent_node, file_name = self.get_create_path_name(ami_path, suggest_name)
if parent_node == None:
raise FSError(INVALID_PARENT_DIRECTORY, file_name=ami_path)
if file_name == None:
raise FSError(INVALID_FILE_NAME, file_name=file_name)
# create file
node = parent_node.create_file(file_name, data)
if not cache:
node.flush()
def read_file(self, ami_path, cache=False):
"""Read a file and return data"""
# get node of file
node = self.get_file_path_name(ami_path)
if node == None:
raise FSError(FILE_NOT_FOUND, file_name=ami_path)
data = node.get_file_data()
if not cache:
node.flush()
return data
def delete(self, ami_path, wipe=False, all=False):
"""Delete a file or directory at given path"""
node = self.get_path_name(ami_path)
if node == None:
raise FSError(FILE_NOT_FOUND, file_name=ami_path)
node.delete(wipe=wipe, all=all)
def read_file(self, ami_path, cache=False):
"""Read a file and return data"""
# get node of file
node = self.get_file_path_name(ami_path)
if node == None:
raise FSError(FILE_NOT_FOUND, file_name=ami_path)
data = node.get_file_data()
if not cache:
node.flush()
return data
def delete(self, ami_path, wipe=False, all=False):
"""Delete a file or directory at given path"""
node = self.get_path_name(ami_path)
if node == None:
raise FSError(FILE_NOT_FOUND, file_name=ami_path)
node.delete(wipe=wipe, all=all)

View File

@ -1,14 +1,14 @@
"""Helper functions and constants useable with DosTypes"""
# raw dos types
DOS0 = 0x444f5300
DOS1 = 0x444f5301
DOS2 = 0x444f5302
DOS3 = 0x444f5303
DOS4 = 0x444f5304
DOS5 = 0x444f5305
DOS6 = 0x444f5306
DOS7 = 0x444f5307
DOS0 = 0x444F5300
DOS1 = 0x444F5301
DOS2 = 0x444F5302
DOS3 = 0x444F5303
DOS4 = 0x444F5304
DOS5 = 0x444F5305
DOS6 = 0x444F5306
DOS7 = 0x444F5307
# more convenient dos type
DOS_OFS = DOS0
@ -22,14 +22,14 @@ DOS_FFS_INTL_LONGNAME = DOS7
# string names for dos types
dos_type_names = [
'DOS0:ofs',
'DOS1:ffs',
'DOS2:ofs+intl',
'DOS3:ffs+intl',
'DOS4:ofs+intl+dircache',
'DOS5:ffs+intl+dircache',
'DOS6:ofs+intl+longname',
'DOS7:ffs+intl+longname'
"DOS0:ofs",
"DOS1:ffs",
"DOS2:ofs+intl",
"DOS3:ffs+intl",
"DOS4:ofs+intl+dircache",
"DOS5:ffs+intl+dircache",
"DOS6:ofs+intl+longname",
"DOS7:ffs+intl+longname",
]
# masks for modes
@ -37,103 +37,117 @@ DOS_MASK_FFS = 1
DOS_MASK_INTL = 2
DOS_MASK_DIRCACHE = 4
def parse_dos_type_str(string):
"""parse a dos type string
"""parse a dos type string
return None if its invalid or dostype value
"""
comp = string.split("+")
if "ffs" in comp:
if "dc" in comp or "dircache" in comp:
return DOS_FFS_INTL_DIRCACHE
elif "ln" in comp or "longname" in comp:
return DOS_FFS_INTL_LONGNAME
elif "intl" in comp:
return DOS_FFS_INTL
comp = string.split("+")
if "ffs" in comp:
if "dc" in comp or "dircache" in comp:
return DOS_FFS_INTL_DIRCACHE
elif "ln" in comp or "longname" in comp:
return DOS_FFS_INTL_LONGNAME
elif "intl" in comp:
return DOS_FFS_INTL
else:
return DOS_FFS
elif "ofs" in comp:
if "dc" in comp or "dircache" in comp:
return DOS_OFS_INTL_DIRCACHE
elif "ln" in comp or "longname" in comp:
return DOS_OFS_INTL_LONGNAME
elif "intl" in comp:
return DOS_OFS_INTL
else:
return DOS_OFS
else:
return DOS_FFS
elif "ofs" in comp:
if "dc" in comp or "dircache" in comp:
return DOS_OFS_INTL_DIRCACHE
elif "ln" in comp or "longname" in comp:
return DOS_OFS_INTL_LONGNAME
elif "intl" in comp:
return DOS_OFS_INTL
else:
return DOS_OFS
else:
n = len(string)
# use 'DOS0' .. 'DOS7'
if n == 4 and string[0:3] == 'DOS':
off = ord(string[3]) - ord('0')
if off >= 0 and off <= 7:
return DOS0 + off
else:
return None
# other tag?
elif string[0].isalpha() and n==4:
return tag_str_to_num(string)
# use '0x01234567' hex value
elif string[0:2] == '0x':
try:
return int(string[2:],16)
except ValueError:
return None
# try number
else:
try:
return int(string)
except ValueError:
return None
n = len(string)
# use 'DOS0' .. 'DOS7'
if n == 4 and string[0:3] == "DOS":
off = ord(string[3]) - ord("0")
if off >= 0 and off <= 7:
return DOS0 + off
else:
return None
# other tag?
elif string[0].isalpha() and n == 4:
return tag_str_to_num(string)
# use '0x01234567' hex value
elif string[0:2] == "0x":
try:
return int(string[2:], 16)
except ValueError:
return None
# try number
else:
try:
return int(string)
except ValueError:
return None
def tag_str_to_num(s):
"""Convert the DosType in a 4 letter tag string to 32 bit value"""
if len(s) != 4:
return 0
a = ord(s[0]) << 24
b = ord(s[1]) << 16
c = ord(s[2]) << 8
l = s[3]
d = ord(l)
if l.isdigit():
d = d - ord('0')
return a+b+c+d
"""Convert the DosType in a 4 letter tag string to 32 bit value"""
if len(s) != 4:
return 0
a = ord(s[0]) << 24
b = ord(s[1]) << 16
c = ord(s[2]) << 8
l = s[3]
d = ord(l)
if l.isdigit():
d = d - ord("0")
return a + b + c + d
def num_to_tag_str(l):
"""Convert the DosType in a 32 bit value to its 4 letter tag string"""
a = chr((l >> 24) & 0xff)
b = chr((l >> 16) & 0xff)
c = chr((l >> 8) & 0xff)
last = (l & 0xff)
if last < 32:
last = chr(last + 48)
else:
last = chr(last)
return a+b+c+last
"""Convert the DosType in a 32 bit value to its 4 letter tag string"""
a = chr((l >> 24) & 0xFF)
b = chr((l >> 16) & 0xFF)
c = chr((l >> 8) & 0xFF)
last = l & 0xFF
if last < 32:
last = chr(last + 48)
else:
last = chr(last)
return a + b + c + last
def get_dos_type_str(dos_type):
"""return description of dos type as a string"""
return dos_type_names[dos_type & 0x7]
"""return description of dos type as a string"""
return dos_type_names[dos_type & 0x7]
def is_valid(dos_type):
"""check if its a valid dos type"""
return (dos_type >= DOS0) and (dos_type <= DOS7)
"""check if its a valid dos type"""
return (dos_type >= DOS0) and (dos_type <= DOS7)
def is_ffs(dos_type):
"""check if its a fast file system dostype"""
return (dos_type & DOS_MASK_FFS) == DOS_MASK_FFS
"""check if its a fast file system dostype"""
return (dos_type & DOS_MASK_FFS) == DOS_MASK_FFS
def is_intl(dos_type):
"""check if international mode is enabled in dostype"""
return is_dircache(dos_type) or is_longname(dos_type) or (dos_type & DOS_MASK_INTL) == DOS_MASK_INTL
"""check if international mode is enabled in dostype"""
return (
is_dircache(dos_type)
or is_longname(dos_type)
or (dos_type & DOS_MASK_INTL) == DOS_MASK_INTL
)
def is_dircache(dos_type):
"""check if dir cache mode is enabled in dostype"""
return (dos_type == DOS4) or (dos_type == DOS5)
"""check if dir cache mode is enabled in dostype"""
return (dos_type == DOS4) or (dos_type == DOS5)
def is_longname(dos_type):
"""check if long filename mode is enabled in dostype"""
return (dos_type == DOS6) or (dos_type == DOS7)
"""check if long filename mode is enabled in dostype"""
return (dos_type == DOS6) or (dos_type == DOS7)
def rootblock_tracks_used_blocks(dos_type):
"""checks if the number of used blocks is stored within the rootblock"""
return (dos_type == DOS6) or (dos_type == DOS7)
"""checks if the number of used blocks is stored within the rootblock"""
return (dos_type == DOS6) or (dos_type == DOS7)

View File

@ -22,52 +22,51 @@ FILE_NOT_FOUND = 21
INVALID_VOLUME_NAME = 22
error_names = {
INVALID_BOOT_BLOCK : "Invalid Boot Block",
INVALID_ROOT_BLOCK : "Invalid Root Block",
INVALID_USER_DIR_BLOCK : "Invalid UserDir Block",
INVALID_FILE_HEADER_BLOCK : "Invalid FileHeader Block",
INVALID_FILE_LIST_BLOCK : "Invalid FileList Block",
INVALID_FILE_DATA_BLOCK : "Invalid FileData Block",
NO_FREE_BLOCKS : "No Free Blocks",
UNSUPPORTED_DIR_BLOCK : "Unsupported Dir Block",
INVALID_FILE_NAME : "Invalid File Name",
NAME_ALREADY_EXISTS : "Name already exists",
INVALID_SEQ_NUM : "Invalid Sequence Number",
FILE_LIST_BLOCK_COUNT_MISMATCH : "FileList Block Count Mismatch",
FILE_DATA_BLOCK_COUNT_MISMATCH : "FileData Block Count Mismatch",
INVALID_BITMAP_BLOCK : "Invalid Bitmap Block",
BITMAP_BLOCK_COUNT_MISMATCH : "Bitmap Block Count Mismatch",
BITMAP_SIZE_MISMATCH : "Bitmap Size Mismatch",
DELETE_NOT_ALLOWED : "Delete Not Allowed",
INTERNAL_ERROR : "Internal Error",
INVALID_PROTECT_FORMAT : "Invalid Protect Format",
INVALID_PARENT_DIRECTORY : "Invalid Parent Directory",
FILE_NOT_FOUND : "File not found",
INVALID_VOLUME_NAME : "Invalid volume name"
INVALID_BOOT_BLOCK: "Invalid Boot Block",
INVALID_ROOT_BLOCK: "Invalid Root Block",
INVALID_USER_DIR_BLOCK: "Invalid UserDir Block",
INVALID_FILE_HEADER_BLOCK: "Invalid FileHeader Block",
INVALID_FILE_LIST_BLOCK: "Invalid FileList Block",
INVALID_FILE_DATA_BLOCK: "Invalid FileData Block",
NO_FREE_BLOCKS: "No Free Blocks",
UNSUPPORTED_DIR_BLOCK: "Unsupported Dir Block",
INVALID_FILE_NAME: "Invalid File Name",
NAME_ALREADY_EXISTS: "Name already exists",
INVALID_SEQ_NUM: "Invalid Sequence Number",
FILE_LIST_BLOCK_COUNT_MISMATCH: "FileList Block Count Mismatch",
FILE_DATA_BLOCK_COUNT_MISMATCH: "FileData Block Count Mismatch",
INVALID_BITMAP_BLOCK: "Invalid Bitmap Block",
BITMAP_BLOCK_COUNT_MISMATCH: "Bitmap Block Count Mismatch",
BITMAP_SIZE_MISMATCH: "Bitmap Size Mismatch",
DELETE_NOT_ALLOWED: "Delete Not Allowed",
INTERNAL_ERROR: "Internal Error",
INVALID_PROTECT_FORMAT: "Invalid Protect Format",
INVALID_PARENT_DIRECTORY: "Invalid Parent Directory",
FILE_NOT_FOUND: "File not found",
INVALID_VOLUME_NAME: "Invalid volume name",
}
class FSError(Exception):
def __init__(self, code, node=None, block=None, file_name=None, extra=None):
self.code = code
self.node = node
self.block = block
self.file_name = file_name
self.extra = extra
def __str__(self):
if self.code in error_names:
code_str = str(error_names[self.code])
else:
code_str = "?"
srcs = []
if self.node != None:
srcs.append("node=" + str(self.node))
if self.block != None:
srcs.append("block=" + str(self.block))
if self.file_name != None:
srcs.append("file_name=" + self.file_name.get_unicode())
if self.extra != None:
srcs.append(str(self.extra))
return "%s(%d):%s" % (code_str, self.code, ",".join(srcs))
class FSError(Exception):
def __init__(self, code, node=None, block=None, file_name=None, extra=None):
self.code = code
self.node = node
self.block = block
self.file_name = file_name
self.extra = extra
def __str__(self):
if self.code in error_names:
code_str = str(error_names[self.code])
else:
code_str = "?"
srcs = []
if self.node != None:
srcs.append("node=" + str(self.node))
if self.block != None:
srcs.append("block=" + str(self.block))
if self.file_name != None:
srcs.append("file_name=" + self.file_name.get_unicode())
if self.extra != None:
srcs.append(str(self.extra))
return "%s(%d):%s" % (code_str, self.code, ",".join(srcs))

View File

@ -3,44 +3,43 @@ import unicodedata
class FSString:
"""Simple string class that allows to manage strings encoded in Latin-1 used for the Amiga FS.
"""Simple string class that allows to manage strings encoded in Latin-1 used for the Amiga FS.
It stores the string internally as a python UTF-8 string but allows to convert to Amiga format.
"""
def __init__(self, txt="", encoding="Latin-1"):
"""Init the string. Either with a string or with bytes.
def __init__(self, txt="", encoding="Latin-1"):
"""Init the string. Either with a string or with bytes.
If the latter is given then the "encoding" flag determines the encoding.
"""
if type(txt) is str:
self.txt = txt
elif type(txt) is bytes:
self.txt = txt.decode(encoding)
else:
raise ValueError("FSString must be str or bytes!")
def __repr__(self):
return "FSString({})".format(self.txt)
def __str__(self):
return self.txt
def __eq__(self, other):
if isinstance(other, FSString):
return self.txt == other.txt
else:
return False
if type(txt) is str:
self.txt = txt
elif type(txt) is bytes:
self.txt = txt.decode(encoding)
else:
raise ValueError("FSString must be str or bytes!")
def __ne__(self, other):
if isinstance(other, FSString):
return self.txt != other.txt
else:
return True
def __repr__(self):
return "FSString({})".format(self.txt)
def get_unicode(self):
return self.txt
def get_ami_str(self):
# make sure to normalize utf-8
nrm = unicodedata.normalize("NFKC", self.txt)
return nrm.encode("Latin-1")
def __str__(self):
return self.txt
def __eq__(self, other):
if isinstance(other, FSString):
return self.txt == other.txt
else:
return False
def __ne__(self, other):
if isinstance(other, FSString):
return self.txt != other.txt
else:
return True
def get_unicode(self):
return self.txt
def get_ami_str(self):
# make sure to normalize utf-8
nrm = unicodedata.normalize("NFKC", self.txt)
return nrm.encode("Latin-1")

View File

@ -1,111 +1,113 @@
from .FSString import FSString
class FileName:
root_path_aliases = ('', '/', ':')
def __init__(self, name, is_intl=False, is_longname=False):
# check that name is a FSString
if not isinstance(name, FSString):
raise ValueError("FileName's name must be a FSString")
self.name = name
self.is_intl = is_intl
self.is_longname = is_longname
def __str__(self):
return self.name
def __repr__(self):
return self.name
def is_root_path_alias(self):
return self.name.get_unicode() in self.root_path_aliases
def has_dir_prefix(self):
return self.name.get_unicode().find("/") != -1
def split_path(self):
pc = self.name.get_unicode().split("/")
p = []
for path in pc:
p.append(FileName(FSString(path), is_intl=self.is_intl, is_longname=self.is_longname))
return p
def get_dir_and_base_name(self):
"""Return portion after last slash '/' or the full name in unicode"""
s = self.name.get_unicode()
pos = s.rfind('/')
if pos != -1:
dir_name = s[:pos]
file_name = s[pos+1:]
if len(file_name) == 0:
return FSString(dir_name), None
else:
return FSString(dir_name), FSString(file_name)
else:
return None, self.name
def get_upper_ami_str(self):
result = self.name.get_ami_str().upper()
if self.is_intl:
r = bytearray()
for i in range(len(result)):
o = result[i]
if o >= 224 and o <= 254 and o != 247:
r.append(o - (ord('a')-ord('A')))
class FileName:
root_path_aliases = ("", "/", ":")
def __init__(self, name, is_intl=False, is_longname=False):
# check that name is a FSString
if not isinstance(name, FSString):
raise ValueError("FileName's name must be a FSString")
self.name = name
self.is_intl = is_intl
self.is_longname = is_longname
def __str__(self):
return self.name
def __repr__(self):
return self.name
def is_root_path_alias(self):
return self.name.get_unicode() in self.root_path_aliases
def has_dir_prefix(self):
return self.name.get_unicode().find("/") != -1
def split_path(self):
pc = self.name.get_unicode().split("/")
p = []
for path in pc:
p.append(
FileName(
FSString(path), is_intl=self.is_intl, is_longname=self.is_longname
)
)
return p
def get_dir_and_base_name(self):
"""Return portion after last slash '/' or the full name in unicode"""
s = self.name.get_unicode()
pos = s.rfind("/")
if pos != -1:
dir_name = s[:pos]
file_name = s[pos + 1 :]
if len(file_name) == 0:
return FSString(dir_name), None
else:
return FSString(dir_name), FSString(file_name)
else:
r.append(o)
return r
else:
return result
def is_valid(self):
# check if path contains dir prefix components
if self.has_dir_prefix():
e = self.split_path()
# empty path?
if len(e) == 0:
return False
for p in e:
if not p.is_valid():
return None, self.name
def get_upper_ami_str(self):
result = self.name.get_ami_str().upper()
if self.is_intl:
r = bytearray()
for i in range(len(result)):
o = result[i]
if o >= 224 and o <= 254 and o != 247:
r.append(o - (ord("a") - ord("A")))
else:
r.append(o)
return r
else:
return result
def is_valid(self):
# check if path contains dir prefix components
if self.has_dir_prefix():
e = self.split_path()
# empty path?
if len(e) == 0:
return False
for p in e:
if not p.is_valid():
return False
return True
else:
# single file name
s = self.name.get_ami_str()
# check for invalid chars
for c in s:
# o = ord(c)
# if o == ':' or o == '/':
# FIXME: FS
if c == ":" or c == "/":
return False
# check max size
if self.is_longname:
if len(s) > 110:
return False
elif len(s) > 30:
return False
return True
else:
# single file name
s = self.name.get_ami_str()
# check for invalid chars
for c in s:
# o = ord(c)
# if o == ':' or o == '/':
# FIXME: FS
if c == ':' or c == '/':
return False
# check max size
if self.is_longname:
if len(s) > 110:
return False
elif len(s) > 30:
return False
return True
def hash(self, hash_size=72):
up = self.get_upper_ami_str()
h = len(up)
for c in up:
h = h * 13;
h += c
h &= 0x7ff
h = h % hash_size
return h
def get_name(self):
"""Return file name string as a FSString."""
return self.name
def hash(self, hash_size=72):
up = self.get_upper_ami_str()
h = len(up)
for c in up:
h = h * 13
h += c
h &= 0x7FF
h = h % hash_size
return h
def get_ami_str_name(self):
return self.name.get_ami_str()
def get_unicode_name(self):
return self.name.get_unicode()
def get_name(self):
"""Return file name string as a FSString."""
return self.name
def get_ami_str_name(self):
return self.name.get_ami_str()
def get_unicode_name(self):
return self.name.get_unicode()

View File

@ -1,6 +1,3 @@
import os
import os.path
import sys
@ -18,229 +15,232 @@ import amitools.util.KeyValue as KeyValue
from .FSString import FSString
from .MetaInfoFSUAE import MetaInfoFSUAE
class Imager:
META_MODE_NONE = 0
META_MODE_DB = 1
META_MODE_FSUAE = 2
META_MODE_NONE = 0
META_MODE_DB = 1
META_MODE_FSUAE = 2
def __init__(self, path_encoding=None, meta_mode=META_MODE_DB):
self.meta_mode = meta_mode
self.meta_db = None
self.meta_fsuae = MetaInfoFSUAE()
self.total_bytes = 0
self.path_encoding = path_encoding
# get path name encoding for host file system
if self.path_encoding == None:
self.path_encoding = sys.getfilesystemencoding()
def __init__(self, path_encoding=None, meta_mode=META_MODE_DB):
self.meta_mode = meta_mode
self.meta_db = None
self.meta_fsuae = MetaInfoFSUAE()
self.total_bytes = 0
self.path_encoding = path_encoding
# get path name encoding for host file system
if self.path_encoding == None:
self.path_encoding = sys.getfilesystemencoding()
def get_total_bytes(self):
return self.total_bytes
def get_total_bytes(self):
return self.total_bytes
# ----- unpack -----
# ----- unpack -----
def unpack(self, volume, out_path):
# check for volume path
vol_name = volume.name.get_unicode()
if not os.path.exists(out_path):
vol_path = out_path
else:
path = os.path.abspath(out_path)
vol_path = os.path.join(path, vol_name)
if os.path.exists(vol_path):
raise IOError("Unpack directory already exists: "+vol_path)
# check for meta file
meta_path = vol_path + ".xdfmeta"
if os.path.exists(meta_path):
raise IOError("Unpack meta file already exists:"+meta_path)
# check for block dev file
blkdev_path = vol_path + ".blkdev"
if os.path.exists(blkdev_path):
raise IOError("Unpack blkdev file aready exists:"+blkdev_path)
# create volume path
if self.meta_mode != self.META_MODE_NONE:
self.meta_db = MetaDB()
self.unpack_root(volume, vol_path)
# save meta db
if self.meta_db:
self.meta_db.set_volume_name(volume.name.get_unicode())
self.meta_db.set_root_meta_info(volume.get_meta_info())
self.meta_db.set_dos_type(volume.boot.dos_type)
self.meta_db.save(meta_path)
# save boot code
if volume.boot.boot_code != None:
boot_code_path = vol_path + ".bootcode"
f = open(boot_code_path,"wb")
f.write(volume.boot.boot_code)
f.close()
# save blkdev: geo and block size
f = open(blkdev_path,"wb")
msg = "%s\n%s\n" % (volume.blkdev.get_chs_str(),
volume.blkdev.get_block_size_str())
f.write(msg.encode('UTF-8'))
f.close()
def unpack_root(self, volume, vol_path):
self.unpack_dir(volume.get_root_dir(), vol_path)
def unpack_dir(self, dir, path):
if not os.path.exists(path):
os.mkdir(path)
for e in dir.get_entries():
self.unpack_node(e, path)
def unpack_node(self, node, path):
name = node.name.get_unicode_name()
file_path = os.path.join(path, name)
# store meta info
if self.meta_mode == self.META_MODE_DB:
# get path as FSString
node_path = node.get_node_path_name()
self.meta_db.set_meta_info(node_path.get_unicode(), node.meta_info)
# store meta in .uaem file
elif self.meta_mode == self.META_MODE_FSUAE:
uaem_path = file_path + self.meta_fsuae.get_suffix()
self.meta_fsuae.save_meta(uaem_path, node.meta_info)
# sub dir
if node.is_dir():
sub_dir = file_path
os.mkdir(sub_dir)
for sub_node in node.get_entries():
self.unpack_node(sub_node, sub_dir)
node.flush()
# file
elif node.is_file():
data = node.get_file_data()
node.flush()
fh = open(file_path, "wb")
fh.write(data)
fh.close()
self.total_bytes += len(data)
# ----- pack -----
def pack(self, in_path, image_file, force=True, options=None, dos_type=None):
self.pack_begin(in_path)
blkdev = self.pack_create_blkdev(in_path, image_file, force, options)
if blkdev == None:
raise IOError("Can't create block device for image: "+in_path)
volume = self.pack_create_volume(in_path, blkdev, dos_type)
if not volume.valid:
raise IOError("Can't create volume for image: "+in_path)
self.pack_root(in_path, volume)
self.pack_end(in_path, volume)
def pack_begin(self, in_path):
# remove trailing slash
if in_path[-1] == '/':
in_path = in_path[:-1]
meta_path = in_path + ".xdfmeta"
if os.path.exists(meta_path):
self.meta_db = MetaDB()
self.meta_db.load(meta_path)
def pack_end(self, in_path, volume):
boot_code_path = in_path + ".bootcode"
if os.path.exists(boot_code_path):
# read boot code
f = open(boot_code_path, "rb")
data = f.read()
f.close()
# write boot code
bb = volume.boot
ok = bb.set_boot_code(data)
if ok:
bb.write()
else:
raise IOError("Invalid Boot Code")
def pack_create_blkdev(self, in_path, image_file, force=True, options=None):
factory = BlkDevFactory()
blkdev = None
if not force:
# try to open an existing image or return None
blkdev = factory.open(image_file, none_if_missing=True)
if not blkdev:
# try to read options from blkdev file
if options == None or len(options) == 0:
blkdev_path = in_path + ".blkdev"
def unpack(self, volume, out_path):
# check for volume path
vol_name = volume.name.get_unicode()
if not os.path.exists(out_path):
vol_path = out_path
else:
path = os.path.abspath(out_path)
vol_path = os.path.join(path, vol_name)
if os.path.exists(vol_path):
raise IOError("Unpack directory already exists: " + vol_path)
# check for meta file
meta_path = vol_path + ".xdfmeta"
if os.path.exists(meta_path):
raise IOError("Unpack meta file already exists:" + meta_path)
# check for block dev file
blkdev_path = vol_path + ".blkdev"
if os.path.exists(blkdev_path):
f = open(blkdev_path, "rb")
options = {}
for line in f:
KeyValue.parse_key_value_string(line, options)
f.close()
# create a new blkdev
blkdev = factory.create(image_file, force=force, options=options)
return blkdev
raise IOError("Unpack blkdev file aready exists:" + blkdev_path)
# create volume path
if self.meta_mode != self.META_MODE_NONE:
self.meta_db = MetaDB()
self.unpack_root(volume, vol_path)
# save meta db
if self.meta_db:
self.meta_db.set_volume_name(volume.name.get_unicode())
self.meta_db.set_root_meta_info(volume.get_meta_info())
self.meta_db.set_dos_type(volume.boot.dos_type)
self.meta_db.save(meta_path)
# save boot code
if volume.boot.boot_code != None:
boot_code_path = vol_path + ".bootcode"
f = open(boot_code_path, "wb")
f.write(volume.boot.boot_code)
f.close()
# save blkdev: geo and block size
f = open(blkdev_path, "wb")
msg = "%s\n%s\n" % (
volume.blkdev.get_chs_str(),
volume.blkdev.get_block_size_str(),
)
f.write(msg.encode("UTF-8"))
f.close()
def pack_create_volume(self, in_path, blkdev, dos_type=None):
if self.meta_db != None:
name = self.meta_db.get_volume_name()
meta_info = self.meta_db.get_root_meta_info()
if dos_type is None:
dos_type = self.meta_db.get_dos_type()
else:
# try to derive volume name from image name
if in_path == None or in_path == "":
raise IOError("Invalid pack input path!")
# remove trailing slash
if in_path[-1] == '/':
in_path = in_path[:-1]
name = os.path.basename(in_path)
meta_info = None
if dos_type is None:
dos_type = DosType.DOS0
volume = ADFSVolume(blkdev)
volume.create(FSString(name), meta_info, dos_type=dos_type)
return volume
def unpack_root(self, volume, vol_path):
self.unpack_dir(volume.get_root_dir(), vol_path)
def pack_root(self, in_path, volume):
self.pack_dir(in_path, volume.get_root_dir())
def unpack_dir(self, dir, path):
if not os.path.exists(path):
os.mkdir(path)
for e in dir.get_entries():
self.unpack_node(e, path)
def pack_dir(self, in_path, parent_node):
path = os.path.abspath(in_path)
if not os.path.exists(path):
raise IOError("Pack directory does not exist: "+path)
for name in os.listdir(in_path):
sub_path = os.path.join(in_path, name)
self.pack_entry(sub_path, parent_node)
def unpack_node(self, node, path):
name = node.name.get_unicode_name()
file_path = os.path.join(path, name)
# store meta info
if self.meta_mode == self.META_MODE_DB:
# get path as FSString
node_path = node.get_node_path_name()
self.meta_db.set_meta_info(node_path.get_unicode(), node.meta_info)
# store meta in .uaem file
elif self.meta_mode == self.META_MODE_FSUAE:
uaem_path = file_path + self.meta_fsuae.get_suffix()
self.meta_fsuae.save_meta(uaem_path, node.meta_info)
# sub dir
if node.is_dir():
sub_dir = file_path
os.mkdir(sub_dir)
for sub_node in node.get_entries():
self.unpack_node(sub_node, sub_dir)
node.flush()
# file
elif node.is_file():
data = node.get_file_data()
node.flush()
fh = open(file_path, "wb")
fh.write(data)
fh.close()
self.total_bytes += len(data)
def pack_entry(self, in_path, parent_node):
# skip .uaem files
if self.meta_fsuae.is_meta_file(in_path):
return
# convert amiga name
ami_name = FSString(os.path.basename(in_path)).get_ami_str()
# check for meta file
meta_path = in_path + self.meta_fsuae.get_suffix()
if os.path.isfile(meta_path):
meta_info = self.meta_fsuae.load_meta(meta_path)
# retrieve meta info for path from DB
elif self.meta_db != None:
ami_path = parent_node.get_node_path_name().get_unicode()
if ami_path != "":
ami_path += "/" + ami_name
else:
ami_path = ami_name
meta_info = self.meta_db.get_meta_info(ami_path)
else:
meta_info = None
# ----- pack -----
# pack directory
if os.path.isdir(in_path):
node = parent_node.create_dir(FSString(ami_name), meta_info, False)
for name in os.listdir(in_path):
sub_path = os.path.join(in_path, name)
self.pack_entry(sub_path, node)
node.flush()
# pack file
elif os.path.isfile(in_path):
# read file
fh = open(in_path, "rb")
data = fh.read()
fh.close()
node = parent_node.create_file(FSString(ami_name), data, meta_info, False)
node.flush()
self.total_bytes += len(data)
def pack(self, in_path, image_file, force=True, options=None, dos_type=None):
self.pack_begin(in_path)
blkdev = self.pack_create_blkdev(in_path, image_file, force, options)
if blkdev == None:
raise IOError("Can't create block device for image: " + in_path)
volume = self.pack_create_volume(in_path, blkdev, dos_type)
if not volume.valid:
raise IOError("Can't create volume for image: " + in_path)
self.pack_root(in_path, volume)
self.pack_end(in_path, volume)
def pack_begin(self, in_path):
# remove trailing slash
if in_path[-1] == "/":
in_path = in_path[:-1]
meta_path = in_path + ".xdfmeta"
if os.path.exists(meta_path):
self.meta_db = MetaDB()
self.meta_db.load(meta_path)
def pack_end(self, in_path, volume):
boot_code_path = in_path + ".bootcode"
if os.path.exists(boot_code_path):
# read boot code
f = open(boot_code_path, "rb")
data = f.read()
f.close()
# write boot code
bb = volume.boot
ok = bb.set_boot_code(data)
if ok:
bb.write()
else:
raise IOError("Invalid Boot Code")
def pack_create_blkdev(self, in_path, image_file, force=True, options=None):
factory = BlkDevFactory()
blkdev = None
if not force:
# try to open an existing image or return None
blkdev = factory.open(image_file, none_if_missing=True)
if not blkdev:
# try to read options from blkdev file
if options == None or len(options) == 0:
blkdev_path = in_path + ".blkdev"
if os.path.exists(blkdev_path):
f = open(blkdev_path, "rb")
options = {}
for line in f:
KeyValue.parse_key_value_string(line, options)
f.close()
# create a new blkdev
blkdev = factory.create(image_file, force=force, options=options)
return blkdev
def pack_create_volume(self, in_path, blkdev, dos_type=None):
if self.meta_db != None:
name = self.meta_db.get_volume_name()
meta_info = self.meta_db.get_root_meta_info()
if dos_type is None:
dos_type = self.meta_db.get_dos_type()
else:
# try to derive volume name from image name
if in_path == None or in_path == "":
raise IOError("Invalid pack input path!")
# remove trailing slash
if in_path[-1] == "/":
in_path = in_path[:-1]
name = os.path.basename(in_path)
meta_info = None
if dos_type is None:
dos_type = DosType.DOS0
volume = ADFSVolume(blkdev)
volume.create(FSString(name), meta_info, dos_type=dos_type)
return volume
def pack_root(self, in_path, volume):
self.pack_dir(in_path, volume.get_root_dir())
def pack_dir(self, in_path, parent_node):
path = os.path.abspath(in_path)
if not os.path.exists(path):
raise IOError("Pack directory does not exist: " + path)
for name in os.listdir(in_path):
sub_path = os.path.join(in_path, name)
self.pack_entry(sub_path, parent_node)
def pack_entry(self, in_path, parent_node):
# skip .uaem files
if self.meta_fsuae.is_meta_file(in_path):
return
# convert amiga name
ami_name = FSString(os.path.basename(in_path)).get_ami_str()
# check for meta file
meta_path = in_path + self.meta_fsuae.get_suffix()
if os.path.isfile(meta_path):
meta_info = self.meta_fsuae.load_meta(meta_path)
# retrieve meta info for path from DB
elif self.meta_db != None:
ami_path = parent_node.get_node_path_name().get_unicode()
if ami_path != "":
ami_path += "/" + ami_name
else:
ami_path = ami_name
meta_info = self.meta_db.get_meta_info(ami_path)
else:
meta_info = None
# pack directory
if os.path.isdir(in_path):
node = parent_node.create_dir(FSString(ami_name), meta_info, False)
for name in os.listdir(in_path):
sub_path = os.path.join(in_path, name)
self.pack_entry(sub_path, node)
node.flush()
# pack file
elif os.path.isfile(in_path):
# read file
fh = open(in_path, "rb")
data = fh.read()
fh.close()
node = parent_node.create_file(FSString(ami_name), data, meta_info, False)
node.flush()
self.total_bytes += len(data)

View File

@ -1,6 +1,3 @@
from .MetaInfo import MetaInfo
from .RootMetaInfo import RootMetaInfo
from .ProtectFlags import ProtectFlags
@ -9,141 +6,148 @@ from amitools.fs.block.BootBlock import BootBlock
from . import DosType
from .FSString import FSString
class MetaDB:
def __init__(self):
self.metas = {}
self.vol_name = None
self.vol_meta = None
self.dos_type = DosType.DOS0
def set_root_meta_info(self, meta):
self.vol_meta = meta
def get_root_meta_info(self):
return self.vol_meta
def set_volume_name(self, name):
if type(name) != str:
raise ValueError("set_volume_name must be unicode")
self.vol_name = name
def get_volume_name(self):
return self.vol_name
def set_dos_type(self, dos_type):
self.dos_type = dos_type
def get_dos_type(self):
return self.dos_type
def set_meta_info(self, path, meta_info):
if type(path) != str:
raise ValueError("set_meta_info: path must be unicode")
self.metas[path] = meta_info
def get_meta_info(self, path):
if path in self.metas:
return self.metas[path]
else:
return None
def dump(self):
print(self.vol_name, self.vol_meta, self.dos_type)
for m in self.metas:
print(m)
# ----- load -----
def load(self, file_path):
self.metas = {}
f = open(file_path, "r")
first = True
for line in f:
if first:
self.load_header(line)
first = False
else:
self.load_entry(line)
f.close()
def load_header(self, line):
pos = line.find(':')
if pos == -1:
raise IOError("Invalid xdfmeta header! (no colon in line)")
# first extract volume name
vol_name = line[:pos]
self.vol_name = vol_name.decode("UTF-8")
line = line[pos+1:]
# now get parameters
comp = line.split(',')
if len(comp) != 4:
raise IOError("Invalid xdfmeta header! (wrong number of parameters found)")
# first dos type
dos_type_str = comp[0]
if len(dos_type_str) != 4:
raise IOError("Invalid xdfmeta dostype string")
num = ord(dos_type_str[3]) - ord('0')
if num < 0 or num > 7:
raise IOError("Invalid xdfmeta dostype number")
self.dos_type = DosType.DOS0 + num
# then time stamps
create_ts = TimeStamp()
ok1 = create_ts.parse(comp[1])
disk_ts = TimeStamp()
ok2 = disk_ts.parse(comp[2])
mod_ts = TimeStamp()
ok3 = mod_ts.parse(comp[3])
if not ok1 or not ok2 or not ok3:
raise IOError("Invalid xdfmeta header! (invalid timestamp found)")
self.vol_meta = RootMetaInfo(create_ts, disk_ts, mod_ts)
def load_entry(self, line):
line = line.strip()
# path
pos = line.find(':')
if pos == -1:
raise IOError("Invalid xdfmeta file! (no colon in line)")
path = line[:pos].decode("UTF-8")
# prot
line = line[pos+1:]
pos = line.find(',')
if pos == -1:
raise IOError("Invalid xdfmeta file! (no first comma)")
prot_str = line[:pos]
prot = ProtectFlags()
prot.parse(prot_str)
# time
line = line[pos+1:]
pos = line.find(',')
if pos == -1:
raise IOError("Invalid xdfmeta file! (no second comma)")
time_str = line[:pos]
time = TimeStamp()
time.parse(time_str)
# comment
comment = FSString(line[pos+1:].decode("UTF-8"))
# meta info
mi = MetaInfo(protect_flags=prot, mod_ts=time, comment=comment)
self.set_meta_info(path, mi)
# ----- save -----
def save(self, file_path):
f = open(file_path, "w")
# header
mi = self.vol_meta
num = self.dos_type - DosType.DOS0 + ord('0')
dos_type_str = "DOS%c" % num
vol_name = self.vol_name.encode("UTF-8")
line = "%s:%s,%s,%s,%s\n" % (vol_name, dos_type_str, mi.get_create_ts(), mi.get_disk_ts(), mi.get_mod_ts())
f.write(line)
# entries
for path in sorted(self.metas):
meta_info = self.metas[path]
protect = meta_info.get_protect_short_str()
mod_time = meta_info.get_mod_time_str()
comment = meta_info.get_comment_unicode_str().encode("UTF-8")
path_name = path.encode("UTF-8")
line = "%s:%s,%s,%s\n" % (path_name, protect, mod_time, comment)
f.write(line)
f.close()
def __init__(self):
self.metas = {}
self.vol_name = None
self.vol_meta = None
self.dos_type = DosType.DOS0
def set_root_meta_info(self, meta):
self.vol_meta = meta
def get_root_meta_info(self):
return self.vol_meta
def set_volume_name(self, name):
if type(name) != str:
raise ValueError("set_volume_name must be unicode")
self.vol_name = name
def get_volume_name(self):
return self.vol_name
def set_dos_type(self, dos_type):
self.dos_type = dos_type
def get_dos_type(self):
return self.dos_type
def set_meta_info(self, path, meta_info):
if type(path) != str:
raise ValueError("set_meta_info: path must be unicode")
self.metas[path] = meta_info
def get_meta_info(self, path):
if path in self.metas:
return self.metas[path]
else:
return None
def dump(self):
print(self.vol_name, self.vol_meta, self.dos_type)
for m in self.metas:
print(m)
# ----- load -----
def load(self, file_path):
self.metas = {}
f = open(file_path, "r")
first = True
for line in f:
if first:
self.load_header(line)
first = False
else:
self.load_entry(line)
f.close()
def load_header(self, line):
pos = line.find(":")
if pos == -1:
raise IOError("Invalid xdfmeta header! (no colon in line)")
# first extract volume name
vol_name = line[:pos]
self.vol_name = vol_name.decode("UTF-8")
line = line[pos + 1 :]
# now get parameters
comp = line.split(",")
if len(comp) != 4:
raise IOError("Invalid xdfmeta header! (wrong number of parameters found)")
# first dos type
dos_type_str = comp[0]
if len(dos_type_str) != 4:
raise IOError("Invalid xdfmeta dostype string")
num = ord(dos_type_str[3]) - ord("0")
if num < 0 or num > 7:
raise IOError("Invalid xdfmeta dostype number")
self.dos_type = DosType.DOS0 + num
# then time stamps
create_ts = TimeStamp()
ok1 = create_ts.parse(comp[1])
disk_ts = TimeStamp()
ok2 = disk_ts.parse(comp[2])
mod_ts = TimeStamp()
ok3 = mod_ts.parse(comp[3])
if not ok1 or not ok2 or not ok3:
raise IOError("Invalid xdfmeta header! (invalid timestamp found)")
self.vol_meta = RootMetaInfo(create_ts, disk_ts, mod_ts)
def load_entry(self, line):
line = line.strip()
# path
pos = line.find(":")
if pos == -1:
raise IOError("Invalid xdfmeta file! (no colon in line)")
path = line[:pos].decode("UTF-8")
# prot
line = line[pos + 1 :]
pos = line.find(",")
if pos == -1:
raise IOError("Invalid xdfmeta file! (no first comma)")
prot_str = line[:pos]
prot = ProtectFlags()
prot.parse(prot_str)
# time
line = line[pos + 1 :]
pos = line.find(",")
if pos == -1:
raise IOError("Invalid xdfmeta file! (no second comma)")
time_str = line[:pos]
time = TimeStamp()
time.parse(time_str)
# comment
comment = FSString(line[pos + 1 :].decode("UTF-8"))
# meta info
mi = MetaInfo(protect_flags=prot, mod_ts=time, comment=comment)
self.set_meta_info(path, mi)
# ----- save -----
def save(self, file_path):
f = open(file_path, "w")
# header
mi = self.vol_meta
num = self.dos_type - DosType.DOS0 + ord("0")
dos_type_str = "DOS%c" % num
vol_name = self.vol_name.encode("UTF-8")
line = "%s:%s,%s,%s,%s\n" % (
vol_name,
dos_type_str,
mi.get_create_ts(),
mi.get_disk_ts(),
mi.get_mod_ts(),
)
f.write(line)
# entries
for path in sorted(self.metas):
meta_info = self.metas[path]
protect = meta_info.get_protect_short_str()
mod_time = meta_info.get_mod_time_str()
comment = meta_info.get_comment_unicode_str().encode("UTF-8")
path_name = path.encode("UTF-8")
line = "%s:%s,%s,%s\n" % (path_name, protect, mod_time, comment)
f.write(line)
f.close()

View File

@ -1,112 +1,110 @@
from .ProtectFlags import *
from .TimeStamp import *
from .FSString import FSString
class MetaInfo:
def __init__(self, protect=None, mod_ts=None, comment=None, protect_flags=None):
if protect_flags != None:
self.set_protect_flags(protect_flags)
else:
self.set_protect(protect)
self.set_mod_ts(mod_ts)
self.set_comment(comment)
def get_str_line(self):
"""Return a unicode string with protect flags, mod time and (optional) comment"""
res = []
res.append(self.get_protect_str())
res.append(self.get_mod_time_str())
comment = self.get_comment()
if comment == None:
res.append('')
else:
res.append(self.get_comment().get_unicode())
return ' '.join(res)
def get_mod_time_str(self):
if self.mod_ts != None:
return str(self.mod_ts)
else:
return ts_empty_string
def get_protect_str(self):
if self.protect_flags != None:
return str(self.protect_flags)
else:
return ProtectFlags.empty_string
def get_protect_short_str(self):
if self.protect_flags != None:
return self.protect_flags.short_str()
else:
return ""
def set_protect(self, protect):
self.protect = protect
if self.protect != None:
self.protect_flags = ProtectFlags(protect)
else:
self.protect_flags = None
def set_protect_flags(self, pf):
self.protect_flags = pf
self.protect = pf.mask
def set_default_protect(self):
self.protect = 0
self.protect_flags = ProtectFlags(self.protect)
def set_current_as_mod_time(self):
mod_time = time.mktime(time.localtime())
self.set_mod_time(mod_time)
def set_mod_time(self, mod_time):
self.mod_time = mod_time
if self.mod_time != None:
self.mod_ts = TimeStamp()
self.mod_ts.from_secs(mod_time)
else:
self.mod_ts = None
def set_mod_ts(self, mod_ts):
self.mod_ts = mod_ts
if self.mod_ts != None:
self.mod_time = self.mod_ts.get_secsf()
else:
self.mod_time = None
def set_comment(self, comment):
"""Set comment as a FSString"""
if comment != None and not isinstance(comment, FSString):
raise ValueError("Comment must be a FSString")
self.comment = comment
def get_protect(self):
return self.protect
def get_protect_flags(self):
return self.protect_flags
def get_mod_time(self):
return self.mod_time
def __init__(self, protect=None, mod_ts=None, comment=None, protect_flags=None):
if protect_flags != None:
self.set_protect_flags(protect_flags)
else:
self.set_protect(protect)
self.set_mod_ts(mod_ts)
self.set_comment(comment)
def get_mod_ts(self):
return self.mod_ts
def get_str_line(self):
"""Return a unicode string with protect flags, mod time and (optional) comment"""
res = []
res.append(self.get_protect_str())
res.append(self.get_mod_time_str())
comment = self.get_comment()
if comment == None:
res.append("")
else:
res.append(self.get_comment().get_unicode())
return " ".join(res)
def get_comment(self):
return self.comment
def get_mod_time_str(self):
if self.mod_ts != None:
return str(self.mod_ts)
else:
return ts_empty_string
def get_comment_ami_str(self):
if self.comment != None:
return self.comment.get_ami_str()
else:
return ""
def get_comment_unicode_str(self):
if self.comment != None:
return self.comment.get_unicode()
else:
return ""
def get_protect_str(self):
if self.protect_flags != None:
return str(self.protect_flags)
else:
return ProtectFlags.empty_string
def get_protect_short_str(self):
if self.protect_flags != None:
return self.protect_flags.short_str()
else:
return ""
def set_protect(self, protect):
self.protect = protect
if self.protect != None:
self.protect_flags = ProtectFlags(protect)
else:
self.protect_flags = None
def set_protect_flags(self, pf):
self.protect_flags = pf
self.protect = pf.mask
def set_default_protect(self):
self.protect = 0
self.protect_flags = ProtectFlags(self.protect)
def set_current_as_mod_time(self):
mod_time = time.mktime(time.localtime())
self.set_mod_time(mod_time)
def set_mod_time(self, mod_time):
self.mod_time = mod_time
if self.mod_time != None:
self.mod_ts = TimeStamp()
self.mod_ts.from_secs(mod_time)
else:
self.mod_ts = None
def set_mod_ts(self, mod_ts):
self.mod_ts = mod_ts
if self.mod_ts != None:
self.mod_time = self.mod_ts.get_secsf()
else:
self.mod_time = None
def set_comment(self, comment):
"""Set comment as a FSString"""
if comment != None and not isinstance(comment, FSString):
raise ValueError("Comment must be a FSString")
self.comment = comment
def get_protect(self):
return self.protect
def get_protect_flags(self):
return self.protect_flags
def get_mod_time(self):
return self.mod_time
def get_mod_ts(self):
return self.mod_ts
def get_comment(self):
return self.comment
def get_comment_ami_str(self):
if self.comment != None:
return self.comment.get_ami_str()
else:
return ""
def get_comment_unicode_str(self):
if self.comment != None:
return self.comment.get_unicode()
else:
return ""

View File

@ -11,20 +11,19 @@ TS_FORMAT = "%Y-%m-%d %H:%M:%S"
class MetaInfoFSUAE:
@staticmethod
def is_meta_file(path):
return path.lower().endswith(".uaem")
@staticmethod
def get_suffix():
return ".uaem"
def load_meta(self, path):
with open(path, "rb") as fh:
data = fh.read().decode('utf-8')
data = fh.read().decode("utf-8")
return self.parse_data(data)
def parse_data(self, data):
if data.endswith("\n"):
data = data[:-1]
@ -68,4 +67,4 @@ class MetaInfoFSUAE:
def save_meta(self, path, meta_info):
with open(path, "wb") as fh:
txt = self.generate_data(meta_info)
fh.write(txt.encode('utf-8'))
fh.write(txt.encode("utf-8"))

View File

@ -1,143 +1,145 @@
from .FSError import *
class ProtectFlags:
FIBF_DELETE = 1
FIBF_EXECUTE = 2
FIBF_WRITE = 4
FIBF_READ = 8
FIBF_ARCHIVE = 16
FIBF_PURE = 32
FIBF_SCRIPT = 64
flag_txt = "HSPArwed"
flag_num = len(flag_txt)
flag_none = 0xf # --------
empty_string = "-" * flag_num
def __init__(self, mask=0):
self.mask = mask
FIBF_DELETE = 1
FIBF_EXECUTE = 2
FIBF_WRITE = 4
FIBF_READ = 8
FIBF_ARCHIVE = 16
FIBF_PURE = 32
FIBF_SCRIPT = 64
def get_mask(self):
return self.mask
flag_txt = "HSPArwed"
flag_num = len(flag_txt)
flag_none = 0xF # --------
empty_string = "-" * flag_num
def __str__(self):
txt = ""
pos = self.flag_num - 1
m = 1 << pos
for i in range(self.flag_num):
bit = self.mask & m == m
show = '-'
flg = self.flag_txt[i]
flg_low = flg.lower()
if bit:
if flg_low != flg:
show = flg_low
else:
if flg_low == flg:
show = flg_low
txt += show
m >>= 1
pos -= 1
return txt
def bin_str(self):
res = ""
m = 1 << (self.flag_num - 1)
for i in range(self.flag_num):
if m & self.mask == m:
res += "1"
else:
res += "0"
m >>= 1
return res
def __init__(self, mask=0):
self.mask = mask
def short_str(self):
return str(self).replace("-","")
def get_mask(self):
return self.mask
def parse_full(self, s):
"""parse a string with all flags"""
n = len(self.flag_txt)
if len(s) != n:
raise ValueError("full string size mismatch!")
mask = 0
for i in range(n):
val = s[i]
ref = self.flag_txt[i]
ref_lo = ref.lower()
if val not in (ref, ref_lo, '-'):
raise ValueError("invalid protect char: " + val)
is_lo = ref == ref_lo
is_blank = val == '-'
if is_lo:
do_set = is_blank
else:
do_set = not is_blank
if do_set:
bit_pos = n - i - 1
bit_mask = 1 << bit_pos
mask |= bit_mask
self.mask = mask
def parse(self, s):
if len(s) == 0:
return
# allow to add with '+' or sub with '-'
n = self.flag_txt
mode = '+'
self.mask = self.flag_none
for a in s.lower():
if a in '+-':
mode = a
else:
mask = None
is_low = None
def __str__(self):
txt = ""
pos = self.flag_num - 1
m = 1 << pos
for i in range(self.flag_num):
flg = self.flag_txt[i]
flg_low = flg.lower()
if flg_low == a:
mask = 1<<(self.flag_num - 1 - i)
is_low = flg_low == flg
break
if mask == None:
raise FSError(INVALID_PROTECT_FORMAT,extra="char: "+a)
# apply mask
if mode == '+':
if is_low:
self.mask &= ~mask
else:
self.mask |= mask
else:
if is_low:
self.mask |= mask
else:
self.mask &= ~mask
def is_set(self, mask):
return self.mask & mask == 0 # LO active
def set(self, mask):
self.mask &= ~mask
def clr(self, mask):
self.mask |= mask
bit = self.mask & m == m
show = "-"
flg = self.flag_txt[i]
flg_low = flg.lower()
if bit:
if flg_low != flg:
show = flg_low
else:
if flg_low == flg:
show = flg_low
txt += show
m >>= 1
pos -= 1
return txt
def is_d(self):
return self.is_set(self.FIBF_DELETE)
def is_e(self):
return self.is_set(self.FIBF_EXECUTE)
def is_w(self):
return self.is_set(self.FIBF_WRITE)
def is_r(self):
return self.is_set(self.FIBF_READ)
def bin_str(self):
res = ""
m = 1 << (self.flag_num - 1)
for i in range(self.flag_num):
if m & self.mask == m:
res += "1"
else:
res += "0"
m >>= 1
return res
if __name__ == '__main__':
inp = ["h","s","p","a","r","w","e","d"]
for i in inp:
p = ProtectFlags()
p.parse(i)
s = str(p)
if not i in s:
print(s)
def short_str(self):
return str(self).replace("-", "")
def parse_full(self, s):
"""parse a string with all flags"""
n = len(self.flag_txt)
if len(s) != n:
raise ValueError("full string size mismatch!")
mask = 0
for i in range(n):
val = s[i]
ref = self.flag_txt[i]
ref_lo = ref.lower()
if val not in (ref, ref_lo, "-"):
raise ValueError("invalid protect char: " + val)
is_lo = ref == ref_lo
is_blank = val == "-"
if is_lo:
do_set = is_blank
else:
do_set = not is_blank
if do_set:
bit_pos = n - i - 1
bit_mask = 1 << bit_pos
mask |= bit_mask
self.mask = mask
def parse(self, s):
if len(s) == 0:
return
# allow to add with '+' or sub with '-'
n = self.flag_txt
mode = "+"
self.mask = self.flag_none
for a in s.lower():
if a in "+-":
mode = a
else:
mask = None
is_low = None
for i in range(self.flag_num):
flg = self.flag_txt[i]
flg_low = flg.lower()
if flg_low == a:
mask = 1 << (self.flag_num - 1 - i)
is_low = flg_low == flg
break
if mask == None:
raise FSError(INVALID_PROTECT_FORMAT, extra="char: " + a)
# apply mask
if mode == "+":
if is_low:
self.mask &= ~mask
else:
self.mask |= mask
else:
if is_low:
self.mask |= mask
else:
self.mask &= ~mask
def is_set(self, mask):
return self.mask & mask == 0 # LO active
def set(self, mask):
self.mask &= ~mask
def clr(self, mask):
self.mask |= mask
def is_d(self):
return self.is_set(self.FIBF_DELETE)
def is_e(self):
return self.is_set(self.FIBF_EXECUTE)
def is_w(self):
return self.is_set(self.FIBF_WRITE)
def is_r(self):
return self.is_set(self.FIBF_READ)
if __name__ == "__main__":
inp = ["h", "s", "p", "a", "r", "w", "e", "d"]
for i in inp:
p = ProtectFlags()
p.parse(i)
s = str(p)
if not i in s:
print(s)

View File

@ -1,84 +1,88 @@
from .ADFSVolume import ADFSVolume
from amitools.fs.blkdev.BlkDevFactory import BlkDevFactory
class Repacker:
def __init__(self, in_image_file, in_options=None):
self.in_image_file = in_image_file
self.in_options = in_options
self.in_blkdev = None
self.out_blkdev = None
self.in_volume = None
self.out_volume = None
def create_in_blkdev(self):
f = BlkDevFactory()
self.in_blkdev = f.open(self.in_image_file, read_only=True, options=self.in_options)
return self.in_blkdev
def create_in_volume(self):
if self.in_blkdev == None:
return None
self.in_volume = ADFSVolume(self.in_blkdev)
self.in_volume.open()
return self.in_volume
def create_in(self):
if self.create_in_blkdev() == None:
return False
if self.create_in_volume() == None:
return False
return True
def create_out_blkdev(self, image_file, force=True, options=None):
if self.in_blkdev == None:
return None
# clone geo from input
if options == None:
options = self.in_blkdev.get_options()
f = BlkDevFactory()
self.out_blkdev = f.create(image_file, force=force, options=options)
return self.out_blkdev
def create_out_volume(self, blkdev=None):
if blkdev != None:
self.out_blkdev = blkdev
if self.out_blkdev == None:
return None
if self.in_volume == None:
return None
# clone input volume
iv = self.in_volume
name = iv.get_volume_name()
dos_type = iv.get_dos_type()
meta_info = iv.get_meta_info()
boot_code = iv.get_boot_code()
self.out_volume = ADFSVolume(self.out_blkdev)
self.out_volume.create(name, meta_info=meta_info, dos_type=dos_type, boot_code=boot_code)
return self.out_volume
def repack(self):
self.repack_node_dir(self.in_volume.get_root_dir(), self.out_volume.get_root_dir())
def repack_node_dir(self, in_root, out_root):
entries = in_root.get_entries()
for e in entries:
self.repack_node(e, out_root)
def repack_node(self, in_node, out_dir):
name = in_node.get_file_name().get_name()
meta_info = in_node.get_meta_info()
# sub dir
if in_node.is_dir():
sub_dir = out_dir.create_dir(name, meta_info, False)
for child in in_node.get_entries():
self.repack_node(child, sub_dir)
sub_dir.flush()
# file
elif in_node.is_file():
data = in_node.get_file_data()
out_file = out_dir.create_file(name, data, meta_info, False)
out_file.flush()
in_node.flush()
def __init__(self, in_image_file, in_options=None):
self.in_image_file = in_image_file
self.in_options = in_options
self.in_blkdev = None
self.out_blkdev = None
self.in_volume = None
self.out_volume = None
def create_in_blkdev(self):
f = BlkDevFactory()
self.in_blkdev = f.open(
self.in_image_file, read_only=True, options=self.in_options
)
return self.in_blkdev
def create_in_volume(self):
if self.in_blkdev == None:
return None
self.in_volume = ADFSVolume(self.in_blkdev)
self.in_volume.open()
return self.in_volume
def create_in(self):
if self.create_in_blkdev() == None:
return False
if self.create_in_volume() == None:
return False
return True
def create_out_blkdev(self, image_file, force=True, options=None):
if self.in_blkdev == None:
return None
# clone geo from input
if options == None:
options = self.in_blkdev.get_options()
f = BlkDevFactory()
self.out_blkdev = f.create(image_file, force=force, options=options)
return self.out_blkdev
def create_out_volume(self, blkdev=None):
if blkdev != None:
self.out_blkdev = blkdev
if self.out_blkdev == None:
return None
if self.in_volume == None:
return None
# clone input volume
iv = self.in_volume
name = iv.get_volume_name()
dos_type = iv.get_dos_type()
meta_info = iv.get_meta_info()
boot_code = iv.get_boot_code()
self.out_volume = ADFSVolume(self.out_blkdev)
self.out_volume.create(
name, meta_info=meta_info, dos_type=dos_type, boot_code=boot_code
)
return self.out_volume
def repack(self):
self.repack_node_dir(
self.in_volume.get_root_dir(), self.out_volume.get_root_dir()
)
def repack_node_dir(self, in_root, out_root):
entries = in_root.get_entries()
for e in entries:
self.repack_node(e, out_root)
def repack_node(self, in_node, out_dir):
name = in_node.get_file_name().get_name()
meta_info = in_node.get_meta_info()
# sub dir
if in_node.is_dir():
sub_dir = out_dir.create_dir(name, meta_info, False)
for child in in_node.get_entries():
self.repack_node(child, sub_dir)
sub_dir.flush()
# file
elif in_node.is_file():
data = in_node.get_file_data()
out_file = out_dir.create_file(name, data, meta_info, False)
out_file.flush()
in_node.flush()

View File

@ -1,116 +1,112 @@
import time
from .TimeStamp import *
class RootMetaInfo:
def __init__(self, create_ts=None, disk_ts=None, mod_ts=None):
self.set_create_ts(create_ts)
self.set_disk_ts(disk_ts)
self.set_mod_ts(mod_ts)
def __str__(self):
res = []
res.append(self.get_create_time_str())
res.append(self.get_disk_time_str())
res.append(self.get_mod_time_str())
return " ".join(res)
# create_ts
def set_create_time(self, create_time):
self.create_time = create_time
if self.create_time != None:
self.create_ts = TimeStamp()
self.create_ts.from_secs(create_time)
else:
self.create_ts = None
def set_create_ts(self, create_ts):
self.create_ts = create_ts
if self.create_ts != None:
self.create_time = self.create_ts.get_secsf()
else:
self.create_time = None
def get_create_time(self):
return self.create_time
def get_create_ts(self):
return self.create_ts
def __init__(self, create_ts=None, disk_ts=None, mod_ts=None):
self.set_create_ts(create_ts)
self.set_disk_ts(disk_ts)
self.set_mod_ts(mod_ts)
def get_create_time_str(self):
if self.create_ts != None:
return str(self.create_ts)
else:
return ts_empty_string
def __str__(self):
res = []
res.append(self.get_create_time_str())
res.append(self.get_disk_time_str())
res.append(self.get_mod_time_str())
return " ".join(res)
def set_current_as_create_time(self):
create_time = time.mktime(time.localtime())
self.set_create_time(create_time)
# create_ts
def set_create_time(self, create_time):
self.create_time = create_time
if self.create_time != None:
self.create_ts = TimeStamp()
self.create_ts.from_secs(create_time)
else:
self.create_ts = None
# disk_ts
def set_disk_time(self, disk_time):
self.disk_time = disk_time
if self.disk_time != None:
self.disk_ts = TimeStamp()
self.disk_ts.from_secs(disk_time)
else:
self.disk_ts = None
def set_disk_ts(self, disk_ts):
self.disk_ts = disk_ts
if self.disk_ts != None:
self.disk_time = self.disk_ts.get_secsf()
else:
self.disk_time = None
def get_disk_time(self):
return self.disk_time
def get_disk_ts(self):
return self.disk_ts
def set_create_ts(self, create_ts):
self.create_ts = create_ts
if self.create_ts != None:
self.create_time = self.create_ts.get_secsf()
else:
self.create_time = None
def get_disk_time_str(self):
if self.disk_ts != None:
return str(self.disk_ts)
else:
return ts_empty_string
def get_create_time(self):
return self.create_time
def set_current_as_disk_time(self):
disk_time = time.mktime(time.localtime())
self.set_disk_time(disk_time)
def get_create_ts(self):
return self.create_ts
# mod_ts
def set_mod_time(self, mod_time):
self.mod_time = mod_time
if self.mod_time != None:
self.mod_ts = TimeStamp()
self.mod_ts.from_secs(mod_time)
else:
self.mod_ts = None
def set_mod_ts(self, mod_ts):
self.mod_ts = mod_ts
if self.mod_ts != None:
self.mod_time = self.mod_ts.get_secsf()
else:
self.mod_time = None
def get_mod_time(self):
return self.mod_time
def get_mod_ts(self):
return self.mod_ts
def get_mod_time_str(self):
if self.mod_ts != None:
return str(self.mod_ts)
else:
return ts_empty_string
def set_current_as_mod_time(self):
mod_time = time.mktime(time.localtime())
self.set_mod_time(mod_time)
def get_create_time_str(self):
if self.create_ts != None:
return str(self.create_ts)
else:
return ts_empty_string
def set_current_as_create_time(self):
create_time = time.mktime(time.localtime())
self.set_create_time(create_time)
# disk_ts
def set_disk_time(self, disk_time):
self.disk_time = disk_time
if self.disk_time != None:
self.disk_ts = TimeStamp()
self.disk_ts.from_secs(disk_time)
else:
self.disk_ts = None
def set_disk_ts(self, disk_ts):
self.disk_ts = disk_ts
if self.disk_ts != None:
self.disk_time = self.disk_ts.get_secsf()
else:
self.disk_time = None
def get_disk_time(self):
return self.disk_time
def get_disk_ts(self):
return self.disk_ts
def get_disk_time_str(self):
if self.disk_ts != None:
return str(self.disk_ts)
else:
return ts_empty_string
def set_current_as_disk_time(self):
disk_time = time.mktime(time.localtime())
self.set_disk_time(disk_time)
# mod_ts
def set_mod_time(self, mod_time):
self.mod_time = mod_time
if self.mod_time != None:
self.mod_ts = TimeStamp()
self.mod_ts.from_secs(mod_time)
else:
self.mod_ts = None
def set_mod_ts(self, mod_ts):
self.mod_ts = mod_ts
if self.mod_ts != None:
self.mod_time = self.mod_ts.get_secsf()
else:
self.mod_time = None
def get_mod_time(self):
return self.mod_time
def get_mod_ts(self):
return self.mod_ts
def get_mod_time_str(self):
if self.mod_ts != None:
return str(self.mod_ts)
else:
return ts_empty_string
def set_current_as_mod_time(self):
mod_time = time.mktime(time.localtime())
self.set_mod_time(mod_time)

View File

@ -1,6 +1,3 @@
import time
ts_empty_string = "--.--.---- --:--:--.--"
@ -10,78 +7,80 @@ ts_format = "%d.%m.%Y %H:%M:%S"
# which is 1970, but Amiga specs say that 1978 is the base year.
amiga_epoch = time.mktime(time.strptime("01.01.1978 00:00:00", ts_format))
class TimeStamp:
def __init__(self, days=0, mins=0, ticks=0):
self.days = days
self.mins = mins
self.ticks = ticks
self.secs = days * 24 * 60 * 60 + mins * 60 + (ticks // 50)
self.sub_secs = (ticks % 50)
def __str__(self):
t = time.localtime(self.secs + amiga_epoch)
ts = time.strftime(ts_format, t)
return "%s.%02d" % (ts, self.sub_secs)
def format(self, my_format):
t = time.localtime(self.secs + amiga_epoch)
return time.strftime(my_format, t)
def __init__(self, days=0, mins=0, ticks=0):
self.days = days
self.mins = mins
self.ticks = ticks
self.secs = days * 24 * 60 * 60 + mins * 60 + (ticks // 50)
self.sub_secs = ticks % 50
def get_secsf(self):
return self.secs + self.sub_secs / 50.0
def get_secs(self):
return self.secs
def __str__(self):
t = time.localtime(self.secs + amiga_epoch)
ts = time.strftime(ts_format, t)
return "%s.%02d" % (ts, self.sub_secs)
def get_sub_secs(self):
return self.sub_secs
def from_secs(self, secs, sub_secs=0):
secs = int(secs - amiga_epoch)
ticks = secs * 50
mins = ticks // (50 * 60)
self.ticks = ticks % (50 * 60)
self.days = mins // (60 * 24)
self.mins = mins % (60 * 24)
self.secs = secs
self.sub_secs = sub_secs
def parse(self, s):
# check for ticks
s = s.strip()
ticks = 0
if len(s) > 3:
# ticks
t = s[-3:]
# old notation ' t00'
if t[0] == 't' and t[1:].isdigit():
ticks = int(t[1:])
s = s[:-4]
# new notation '.00'
elif t[0] == '.' and t[1:].isdigit():
ticks = int(t[1:])
s = s[:-3]
# parse normal time
try:
ts = time.strptime(s, ts_format)
secs = int(time.mktime(ts))
self.from_secs(secs)
self.sub_secs = ticks
self.ticks += ticks
return True
except ValueError:
return False
if __name__ == '__main__':
ts = TimeStamp()
ts.from_secs(123)
ts2 = TimeStamp(days=ts.days, mins=ts.mins, ticks=ts.ticks)
if ts2.get_secs() != 123:
print("FAIL")
ts = TimeStamp()
s = "05.01.2012 21:47:34 t40"
ts.parse(s)
txt = str(ts)
if s != txt:
print("FAIL")
def format(self, my_format):
t = time.localtime(self.secs + amiga_epoch)
return time.strftime(my_format, t)
def get_secsf(self):
return self.secs + self.sub_secs / 50.0
def get_secs(self):
return self.secs
def get_sub_secs(self):
return self.sub_secs
def from_secs(self, secs, sub_secs=0):
secs = int(secs - amiga_epoch)
ticks = secs * 50
mins = ticks // (50 * 60)
self.ticks = ticks % (50 * 60)
self.days = mins // (60 * 24)
self.mins = mins % (60 * 24)
self.secs = secs
self.sub_secs = sub_secs
def parse(self, s):
# check for ticks
s = s.strip()
ticks = 0
if len(s) > 3:
# ticks
t = s[-3:]
# old notation ' t00'
if t[0] == "t" and t[1:].isdigit():
ticks = int(t[1:])
s = s[:-4]
# new notation '.00'
elif t[0] == "." and t[1:].isdigit():
ticks = int(t[1:])
s = s[:-3]
# parse normal time
try:
ts = time.strptime(s, ts_format)
secs = int(time.mktime(ts))
self.from_secs(secs)
self.sub_secs = ticks
self.ticks += ticks
return True
except ValueError:
return False
if __name__ == "__main__":
ts = TimeStamp()
ts.from_secs(123)
ts2 = TimeStamp(days=ts.days, mins=ts.mins, ticks=ts.ticks)
if ts2.get_secs() != 123:
print("FAIL")
ts = TimeStamp()
s = "05.01.2012 21:47:34 t40"
ts.parse(s)
txt = str(ts)
if s != txt:
print("FAIL")

View File

@ -1,122 +1,132 @@
from .BlockDevice import BlockDevice
import ctypes
import gzip
import io
class ADFBlockDevice(BlockDevice):
def __init__(self, adf_file, read_only=False, fobj=None):
self.adf_file = adf_file
self.read_only = read_only
self.fobj = fobj
self.dirty = False
lo = adf_file.lower()
self.gzipped = lo.endswith('.adz') or lo.endswith('.adf.gz')
def __init__(self, adf_file, read_only=False, fobj=None):
self.adf_file = adf_file
self.read_only = read_only
self.fobj = fobj
self.dirty = False
lo = adf_file.lower()
self.gzipped = lo.endswith(".adz") or lo.endswith(".adf.gz")
def create(self):
if self.read_only:
raise IOError("ADF creation not allowed in read-only mode!")
self._set_geometry() # set default geometry
# allocate image in memory
self.data = ctypes.create_string_buffer(self.num_bytes)
self.dirty = True
def create(self):
if self.read_only:
raise IOError("ADF creation not allowed in read-only mode!")
self._set_geometry() # set default geometry
# allocate image in memory
self.data = ctypes.create_string_buffer(self.num_bytes)
self.dirty = True
def open(self):
self._set_geometry() # set default geometry
close = True
# open adf file via fobj
if self.fobj is not None:
if self.gzipped:
fh = gzip.GzipFile(self.adf_file, "rb", fileobj=self.fobj)
else:
fh = self.fobj
close = False
# open adf file
else:
if self.gzipped:
fh = gzip.open(self.adf_file,"rb")
else:
fh = io.open(self.adf_file, "rb")
# read image
data = fh.read(self.num_bytes)
# close input file
if close:
fh.close()
# check size
if len(data) != self.num_bytes:
raise IOError("Invalid ADF Size: got %d but expected %d" % (len(data), self.num_bytes))
# create modifyable data
if self.read_only:
self.data = data
else:
self.data = ctypes.create_string_buffer(self.num_bytes)
self.data[:] = data
def flush(self):
# write dirty adf
if self.dirty and not self.read_only:
close = True
if self.fobj is not None:
# seek fobj to beginning
self.fobj.seek(0,0)
if self.gzipped:
fh = gzip.GzipFile(self.adf_file, "wb", fileobj=self.fobj)
def open(self):
self._set_geometry() # set default geometry
close = True
# open adf file via fobj
if self.fobj is not None:
if self.gzipped:
fh = gzip.GzipFile(self.adf_file, "rb", fileobj=self.fobj)
else:
fh = self.fobj
close = False
# open adf file
else:
fh = self.fobj
close = False
else:
if self.gzipped:
fh = gzip.open(self.adf_file,"wb")
if self.gzipped:
fh = gzip.open(self.adf_file, "rb")
else:
fh = io.open(self.adf_file, "rb")
# read image
data = fh.read(self.num_bytes)
# close input file
if close:
fh.close()
# check size
if len(data) != self.num_bytes:
raise IOError(
"Invalid ADF Size: got %d but expected %d" % (len(data), self.num_bytes)
)
# create modifyable data
if self.read_only:
self.data = data
else:
fh = io.open(self.adf_file, "wb")
# write image
fh.write(self.data)
# close file
if close:
fh.close()
self.dirty = False
self.data = ctypes.create_string_buffer(self.num_bytes)
self.data[:] = data
def close(self):
self.flush()
self.data = None
# now close fobj
if self.fobj is not None:
self.fobj.close()
def flush(self):
# write dirty adf
if self.dirty and not self.read_only:
close = True
if self.fobj is not None:
# seek fobj to beginning
self.fobj.seek(0, 0)
if self.gzipped:
fh = gzip.GzipFile(self.adf_file, "wb", fileobj=self.fobj)
else:
fh = self.fobj
close = False
else:
if self.gzipped:
fh = gzip.open(self.adf_file, "wb")
else:
fh = io.open(self.adf_file, "wb")
# write image
fh.write(self.data)
# close file
if close:
fh.close()
self.dirty = False
def read_block(self, blk_num):
if blk_num >= self.num_blocks:
raise ValueError("Invalid ADF block num: got %d but max is %d" % (blk_num, self.num_blocks))
off = self._blk_to_offset(blk_num)
return self.data[off:off+self.block_bytes]
def close(self):
self.flush()
self.data = None
# now close fobj
if self.fobj is not None:
self.fobj.close()
def write_block(self, blk_num, data):
if self.read_only:
raise IOError("ADF File is read-only!")
if blk_num >= self.num_blocks:
raise ValueError("Invalid ADF block num: got %d but max is %d" % (blk_num, self.num_blocks))
if len(data) != self.block_bytes:
raise ValueError("Invalid ADF block size written: got %d but size is %d" % (len(data), self.block_bytes))
off = self._blk_to_offset(blk_num)
self.data[off:off+self.block_bytes] = data
self.dirty = True
def read_block(self, blk_num):
if blk_num >= self.num_blocks:
raise ValueError(
"Invalid ADF block num: got %d but max is %d"
% (blk_num, self.num_blocks)
)
off = self._blk_to_offset(blk_num)
return self.data[off : off + self.block_bytes]
def write_block(self, blk_num, data):
if self.read_only:
raise IOError("ADF File is read-only!")
if blk_num >= self.num_blocks:
raise ValueError(
"Invalid ADF block num: got %d but max is %d"
% (blk_num, self.num_blocks)
)
if len(data) != self.block_bytes:
raise ValueError(
"Invalid ADF block size written: got %d but size is %d"
% (len(data), self.block_bytes)
)
off = self._blk_to_offset(blk_num)
self.data[off : off + self.block_bytes] = data
self.dirty = True
# --- mini test ---
if __name__ == '__main__':
import sys
for a in sys.argv[1:]:
# write to file device
adf = ADFBlockDevice(a)
adf.open()
d = adf.read_block(0)
adf.write_block(0, d)
adf.close()
# write via fobj
fobj = open(a, "rb")
adf = ADFBlockDevice(a, fobj=fobj)
adf.open()
d = adf.read_block(0)
adf.write_block(0, d)
adf.close()
if __name__ == "__main__":
import sys
for a in sys.argv[1:]:
# write to file device
adf = ADFBlockDevice(a)
adf.open()
d = adf.read_block(0)
adf.write_block(0, d)
adf.close()
# write via fobj
fobj = open(a, "rb")
adf = ADFBlockDevice(a, fobj=fobj)
adf.open()
d = adf.read_block(0)
adf.write_block(0, d)
adf.close()

View File

@ -1,6 +1,3 @@
import os
import os.path
import stat
@ -12,197 +9,200 @@ from .DiskGeometry import DiskGeometry
from amitools.fs.rdb.RDisk import RDisk
import amitools.util.BlkDevTools as BlkDevTools
class BlkDevFactory:
"""the block device factory opens or creates image files suitable as a block device for file system access."""
"""the block device factory opens or creates image files suitable as a block device for file system access."""
valid_extensions = ('.adf','.adz','.adf.gz','.hdf','.rdisk')
valid_extensions = (".adf", ".adz", ".adf.gz", ".hdf", ".rdisk")
TYPE_ADF = 1
TYPE_HDF = 2
TYPE_RDISK = 3
TYPE_ADF = 1
TYPE_HDF = 2
TYPE_RDISK = 3
def detect_type(self, img_file, fobj, options=None):
"""try to detect the type of a given img_file name"""
# 1. take type from options
t = self.type_from_options(options)
if t == None:
# 2. look in file
t = self.type_from_contents(img_file, fobj)
if t == None:
# 3. from extension
t = self.type_from_extension(img_file)
return t
def detect_type(self, img_file, fobj, options=None):
"""try to detect the type of a given img_file name"""
# 1. take type from options
t = self.type_from_options(options)
if t == None:
# 2. look in file
t = self.type_from_contents(img_file, fobj)
if t == None:
# 3. from extension
t = self.type_from_extension(img_file)
return t
def type_from_options(self, options):
"""look in options for type"""
if options != None:
if 'type' in options:
t = options['type'].lower()
if t in ('adf','adz'):
return self.TYPE_ADF
elif t == 'hdf':
return self.TYPE_HDF
elif t == 'rdisk':
return self.TYPE_RDISK
return None
def type_from_contents(self, img_file, fobj):
"""look in first 4 bytes for type of image"""
# load 4 bytes
if fobj is None:
# make sure file exists
if not os.path.exists(img_file):
def type_from_options(self, options):
"""look in options for type"""
if options != None:
if "type" in options:
t = options["type"].lower()
if t in ("adf", "adz"):
return self.TYPE_ADF
elif t == "hdf":
return self.TYPE_HDF
elif t == "rdisk":
return self.TYPE_RDISK
return None
f = open(img_file, "rb")
hdr = f.read(4)
f.close()
else:
hdr = fobj.read(4)
fobj.seek(0,0)
# check for 'RDISK':
if hdr == b'RDSK':
return self.TYPE_RDISK
return None
def type_from_extension(self, img_file):
"""look at file extension for type of image"""
ext = img_file.lower()
if ext.endswith('.adf') or ext.endswith('.adz') or ext.endswith('.adf.gz'):
return self.TYPE_ADF
elif ext.endswith(".hdf"):
return self.TYPE_HDF
elif ext.endswith(".rdsk"):
return self.TYPE_RDISK
else:
return None
def type_from_contents(self, img_file, fobj):
"""look in first 4 bytes for type of image"""
# load 4 bytes
if fobj is None:
# make sure file exists
if not os.path.exists(img_file):
return None
f = open(img_file, "rb")
hdr = f.read(4)
f.close()
else:
hdr = fobj.read(4)
fobj.seek(0, 0)
# check for 'RDISK':
if hdr == b"RDSK":
return self.TYPE_RDISK
return None
def _get_block_size(self, options):
if options and 'bs' in options:
bs = int(options['bs'])
if bs % 512 != 0 and bs < 512:
raise ValueError("invalid block size given: %d" % bs)
return bs
else:
return 512
def type_from_extension(self, img_file):
"""look at file extension for type of image"""
ext = img_file.lower()
if ext.endswith(".adf") or ext.endswith(".adz") or ext.endswith(".adf.gz"):
return self.TYPE_ADF
elif ext.endswith(".hdf"):
return self.TYPE_HDF
elif ext.endswith(".rdsk"):
return self.TYPE_RDISK
else:
return None
def open(self, img_file, read_only=False, options=None, fobj=None,
none_if_missing=False):
"""open an existing image file"""
# file base check
if fobj is None:
# make sure image file exists
if not os.path.exists(img_file):
if none_if_missing:
return None
raise IOError("image file not found")
# is readable?
if not os.access(img_file, os.R_OK):
raise IOError("can't read from image file")
# is writeable? -> no: enforce read_only
if not os.access(img_file, os.W_OK):
read_only = True
# check size
st = os.stat(img_file)
mode = st.st_mode
if stat.S_ISBLK(mode) or stat.S_ISCHR(mode):
size = BlkDevTools.getblkdevsize(img_file)
else:
size = os.path.getsize(img_file)
if size == 0:
raise IOError("image file is empty")
# fobj
else:
fobj.seek(0,2)
size = fobj.tell()
fobj.seek(0,0)
# detect type
t = self.detect_type(img_file, fobj, options)
if t == None:
raise IOError("can't detect type of image file")
# get block size
bs = self._get_block_size(options)
# create blkdev
if t == self.TYPE_ADF:
blkdev = ADFBlockDevice(img_file, read_only, fobj=fobj)
blkdev.open()
elif t == self.TYPE_HDF:
# detect geometry
geo = DiskGeometry(block_bytes=bs)
if not geo.detect(size, options):
raise IOError("can't detect geometry of HDF image file")
blkdev = HDFBlockDevice(img_file, read_only, fobj=fobj, block_size=bs)
blkdev.open(geo)
else:
rawdev = RawBlockDevice(img_file, read_only, fobj=fobj, block_bytes=bs)
rawdev.open()
# check block size stored in rdb
rdisk = RDisk(rawdev)
rdb_bs = rdisk.peek_block_size()
if rdb_bs != bs:
# adjust block size and re-open
rawdev.close()
bs = rdb_bs
rawdev = RawBlockDevice(img_file, read_only, fobj=fobj, block_bytes=bs)
rawdev.open()
rdisk = RDisk(rawdev)
if not rdisk.open():
raise IOError("can't open rdisk of image file")
# determine partition
p = "0"
if options != None and 'part' in options:
p = str(options['part'])
part = rdisk.find_partition_by_string(p)
if part == None:
raise IOError("can't find partition in image file")
blkdev = part.create_blkdev(True) # auto_close rdisk
blkdev.open()
return blkdev
def _get_block_size(self, options):
if options and "bs" in options:
bs = int(options["bs"])
if bs % 512 != 0 and bs < 512:
raise ValueError("invalid block size given: %d" % bs)
return bs
else:
return 512
def create(self, img_file, force=True, options=None, fobj=None):
if fobj is None:
# make sure we are allowed to overwrite existing file
if os.path.exists(img_file):
if not force:
raise IOError("can't overwrite existing image file")
# not writeable?
if not os.access(img_file, os.W_OK):
raise IOError("can't write image file")
# detect type
t = self.detect_type(img_file, fobj, options)
if t == None:
raise IOError("can't detect type of image file")
if t == self.TYPE_RDISK:
raise IOError("can't create rdisk. use rdbtool first")
# get block size
bs = self._get_block_size(options)
# create blkdev
if t == self.TYPE_ADF:
blkdev = ADFBlockDevice(img_file, fobj=fobj)
blkdev.create()
else:
# determine geometry from size or chs
geo = DiskGeometry()
if not geo.setup(options):
raise IOError("can't determine geometry of HDF image file")
blkdev = HDFBlockDevice(img_file, fobj=fobj, block_size=bs)
blkdev.create(geo)
return blkdev
def open(
self, img_file, read_only=False, options=None, fobj=None, none_if_missing=False
):
"""open an existing image file"""
# file base check
if fobj is None:
# make sure image file exists
if not os.path.exists(img_file):
if none_if_missing:
return None
raise IOError("image file not found")
# is readable?
if not os.access(img_file, os.R_OK):
raise IOError("can't read from image file")
# is writeable? -> no: enforce read_only
if not os.access(img_file, os.W_OK):
read_only = True
# check size
st = os.stat(img_file)
mode = st.st_mode
if stat.S_ISBLK(mode) or stat.S_ISCHR(mode):
size = BlkDevTools.getblkdevsize(img_file)
else:
size = os.path.getsize(img_file)
if size == 0:
raise IOError("image file is empty")
# fobj
else:
fobj.seek(0, 2)
size = fobj.tell()
fobj.seek(0, 0)
# detect type
t = self.detect_type(img_file, fobj, options)
if t == None:
raise IOError("can't detect type of image file")
# get block size
bs = self._get_block_size(options)
# create blkdev
if t == self.TYPE_ADF:
blkdev = ADFBlockDevice(img_file, read_only, fobj=fobj)
blkdev.open()
elif t == self.TYPE_HDF:
# detect geometry
geo = DiskGeometry(block_bytes=bs)
if not geo.detect(size, options):
raise IOError("can't detect geometry of HDF image file")
blkdev = HDFBlockDevice(img_file, read_only, fobj=fobj, block_size=bs)
blkdev.open(geo)
else:
rawdev = RawBlockDevice(img_file, read_only, fobj=fobj, block_bytes=bs)
rawdev.open()
# check block size stored in rdb
rdisk = RDisk(rawdev)
rdb_bs = rdisk.peek_block_size()
if rdb_bs != bs:
# adjust block size and re-open
rawdev.close()
bs = rdb_bs
rawdev = RawBlockDevice(img_file, read_only, fobj=fobj, block_bytes=bs)
rawdev.open()
rdisk = RDisk(rawdev)
if not rdisk.open():
raise IOError("can't open rdisk of image file")
# determine partition
p = "0"
if options != None and "part" in options:
p = str(options["part"])
part = rdisk.find_partition_by_string(p)
if part == None:
raise IOError("can't find partition in image file")
blkdev = part.create_blkdev(True) # auto_close rdisk
blkdev.open()
return blkdev
def create(self, img_file, force=True, options=None, fobj=None):
if fobj is None:
# make sure we are allowed to overwrite existing file
if os.path.exists(img_file):
if not force:
raise IOError("can't overwrite existing image file")
# not writeable?
if not os.access(img_file, os.W_OK):
raise IOError("can't write image file")
# detect type
t = self.detect_type(img_file, fobj, options)
if t == None:
raise IOError("can't detect type of image file")
if t == self.TYPE_RDISK:
raise IOError("can't create rdisk. use rdbtool first")
# get block size
bs = self._get_block_size(options)
# create blkdev
if t == self.TYPE_ADF:
blkdev = ADFBlockDevice(img_file, fobj=fobj)
blkdev.create()
else:
# determine geometry from size or chs
geo = DiskGeometry()
if not geo.setup(options):
raise IOError("can't determine geometry of HDF image file")
blkdev = HDFBlockDevice(img_file, fobj=fobj, block_size=bs)
blkdev.create(geo)
return blkdev
# --- mini test ---
if __name__ == '__main__':
import sys
import io
bdf = BlkDevFactory()
for a in sys.argv[1:]:
# open by file
blkdev = bdf.open(a)
print(a, blkdev.__class__.__name__)
blkdev.close()
# open via fobj
fobj = open(a,"rb")
data = fobj.read()
nobj = io.StringIO(data)
blkdev = bdf.open("bluna"+a, fobj=nobj)
print(a, blkdev.__class__.__name__)
blkdev.close()
if __name__ == "__main__":
import sys
import io
bdf = BlkDevFactory()
for a in sys.argv[1:]:
# open by file
blkdev = bdf.open(a)
print(a, blkdev.__class__.__name__)
blkdev.close()
# open via fobj
fobj = open(a, "rb")
data = fobj.read()
nobj = io.StringIO(data)
blkdev = bdf.open("bluna" + a, fobj=nobj)
print(a, blkdev.__class__.__name__)
blkdev.close()

View File

@ -1,54 +1,65 @@
# a block device defines a set of blocks used by a file system
from .DiskGeometry import DiskGeometry
class BlockDevice:
def _set_geometry(self, cyls=80, heads=2, sectors=11, block_bytes=512, reserved=2, bootblocks=2):
self.cyls = cyls
self.heads = heads
self.sectors = sectors
self.block_bytes = block_bytes
self.reserved = reserved
self.bootblocks = bootblocks
# derived values
self.num_tracks = self.cyls * self.heads
self.num_blocks = self.num_tracks * self.sectors
self.num_bytes = self.num_blocks * self.block_bytes
self.block_longs = self.block_bytes // 4
self.num_longs = self.num_blocks * self.block_longs
def dump(self):
print("cylinders: ", self.cyls)
print("heads: ", self.heads)
print("sectors: ", self.sectors)
print("block_bytes:", self.block_bytes)
print("reserved: ", self.reserved)
print("bootblocks: ", self.bootblocks)
def _blk_to_offset(self, blk_num):
return self.block_bytes * blk_num
# ----- API -----
def create(self, **args):
pass
def open(self):
pass
def close(self):
pass
def flush(self):
pass
def read_block(self, blk_num):
pass
def write_block(self, blk_num, data):
pass
def get_geometry(self):
return DiskGeometry(self.cyls, self.heads, self.sectors)
def get_chs_str(self):
return "chs=%d,%d,%d" % (self.cyls, self.heads, self.sectors)
def get_options(self):
return { 'chs' : "%d,%d,%d" % (self.cyls, self.heads, self.sectors),
'bs' : self.block_bytes }
def get_block_size_str(self):
return "bs=%d" % self.block_bytes
class BlockDevice:
def _set_geometry(
self, cyls=80, heads=2, sectors=11, block_bytes=512, reserved=2, bootblocks=2
):
self.cyls = cyls
self.heads = heads
self.sectors = sectors
self.block_bytes = block_bytes
self.reserved = reserved
self.bootblocks = bootblocks
# derived values
self.num_tracks = self.cyls * self.heads
self.num_blocks = self.num_tracks * self.sectors
self.num_bytes = self.num_blocks * self.block_bytes
self.block_longs = self.block_bytes // 4
self.num_longs = self.num_blocks * self.block_longs
def dump(self):
print("cylinders: ", self.cyls)
print("heads: ", self.heads)
print("sectors: ", self.sectors)
print("block_bytes:", self.block_bytes)
print("reserved: ", self.reserved)
print("bootblocks: ", self.bootblocks)
def _blk_to_offset(self, blk_num):
return self.block_bytes * blk_num
# ----- API -----
def create(self, **args):
pass
def open(self):
pass
def close(self):
pass
def flush(self):
pass
def read_block(self, blk_num):
pass
def write_block(self, blk_num, data):
pass
def get_geometry(self):
return DiskGeometry(self.cyls, self.heads, self.sectors)
def get_chs_str(self):
return "chs=%d,%d,%d" % (self.cyls, self.heads, self.sectors)
def get_options(self):
return {
"chs": "%d,%d,%d" % (self.cyls, self.heads, self.sectors),
"bs": self.block_bytes,
}
def get_block_size_str(self):
return "bs=%d" % self.block_bytes

View File

@ -1,182 +1,189 @@
import amitools.util.ByteSize as ByteSize
class DiskGeometry:
def __init__(self, cyls=0, heads=0, secs=0, block_bytes=512):
self.cyls = cyls
self.heads = heads
self.secs = secs
self.block_bytes = block_bytes
def __str__(self):
size = self.get_num_bytes()
return "chs=%d,%d,%d bs=%d size=%d/%s" % (self.cyls, self.heads, self.secs,
self.block_bytes, size, ByteSize.to_byte_size_str(size))
def get_num_blocks(self):
"""return the number of block allocated by geometry"""
return self.cyls * self.heads * self.secs
def get_num_bytes(self):
"""return the number of bytes allocated by geometry"""
return self.get_num_blocks() * self.block_bytes
def _update_block_size(self, options):
if options and 'bs' in options:
bs = int(options['bs'])
if bs % 512 != 0 or bs < 512:
raise ValueError("invalid block size given: %d" % bs)
self.block_bytes = bs
def detect(self, byte_size, options=None):
"""detect a geometry from a given image size and optional options.
class DiskGeometry:
def __init__(self, cyls=0, heads=0, secs=0, block_bytes=512):
self.cyls = cyls
self.heads = heads
self.secs = secs
self.block_bytes = block_bytes
def __str__(self):
size = self.get_num_bytes()
return "chs=%d,%d,%d bs=%d size=%d/%s" % (
self.cyls,
self.heads,
self.secs,
self.block_bytes,
size,
ByteSize.to_byte_size_str(size),
)
def get_num_blocks(self):
"""return the number of block allocated by geometry"""
return self.cyls * self.heads * self.secs
def get_num_bytes(self):
"""return the number of bytes allocated by geometry"""
return self.get_num_blocks() * self.block_bytes
def _update_block_size(self, options):
if options and "bs" in options:
bs = int(options["bs"])
if bs % 512 != 0 or bs < 512:
raise ValueError("invalid block size given: %d" % bs)
self.block_bytes = bs
def detect(self, byte_size, options=None):
"""detect a geometry from a given image size and optional options.
return bytes required by geometry or None if geomtry is invalid
"""
c = None
h = None
s = None
self._update_block_size(options)
num_blocks = byte_size // self.block_bytes
algo = None
if options != None:
(c, h, s) = self._parse_chs(options)
if 'algo' in options:
algo = int(options['algo'])
# chs if fully specified then take this one
if c != None and h != None and s != None:
self.cyls = c
self.heads = h
self.secs = s
size = self.get_num_bytes()
if size == byte_size:
return size
else:
return None
else:
return self._guess_for_size(byte_size, algo=algo, secs=s, heads=h)
def setup(self, options):
"""setup a new geometry by giving options
c = None
h = None
s = None
self._update_block_size(options)
num_blocks = byte_size // self.block_bytes
algo = None
if options != None:
(c, h, s) = self._parse_chs(options)
if "algo" in options:
algo = int(options["algo"])
# chs if fully specified then take this one
if c != None and h != None and s != None:
self.cyls = c
self.heads = h
self.secs = s
size = self.get_num_bytes()
if size == byte_size:
return size
else:
return None
else:
return self._guess_for_size(byte_size, algo=algo, secs=s, heads=h)
def setup(self, options):
"""setup a new geometry by giving options
return bytes required by geometry or None if geometry is invalid
"""
if options == None:
return False
c = None
h = None
s = None
(c, h, s) = self._parse_chs(options)
self._update_block_size(options)
# chs is fully specified
if c != None and h != None and s != None:
self.cyls = c
self.heads = h
self.secs = s
return self.get_num_bytes()
else:
# we require a size
if 'size' not in options:
return None
# parse size
size = options['size']
if type(size) != int:
size = ByteSize.parse_byte_size_str(size)
if size == None:
return None
# select guess algo
algo = None
if 'algo' in options:
algo = int(options['algo'])
# guess size
return self._guess_for_size(size, approx=True, algo=algo, secs=s, heads=h)
def _parse_chs(self, options):
c = None
h = None
s = None
# chs=<n>,<n>,<n>
if 'chs' in options:
comp = options['chs'].split(',')
if len(comp) == 3:
return [int(x) for x in comp]
else:
if 's' in options:
s = int(options['s'])
if 'h' in options:
h = int(options['h'])
if 'c' in options:
c = int(options['c'])
return (c,h,s)
def _guess_for_size1(self, size, approx=True, secs=None, heads=None):
mb = size // 1024
if secs == None:
secs = 63
if heads == None:
if mb <= 504 * 1024:
heads = 16
elif mb <= 1008 * 1024:
heads = 32
elif mb <= 2016 * 1024:
heads = 64
elif mb <= 4032 * 1024:
heads = 128
else:
heads = 256
cyls = (size // self.block_bytes) // (secs * heads)
geo_size = cyls * secs * heads * self.block_bytes
# keep approx values or match
if approx or geo_size == size:
self.cyls = cyls
self.heads = heads
self.secs = secs
return geo_size
else:
return None
def _guess_for_size2(self, size, approx=True, secs=None, heads=None):
if heads == None:
heads = 1
if secs == None:
secs = 32
cyls = (size // self.block_bytes) // (secs * heads)
# keep cyls low
while cyls > 65535:
cyls //= 2
heads *= 2
# keep approx values or match
geo_size = cyls * secs * heads * self.block_bytes
if approx or geo_size == size:
self.cyls = cyls
self.heads = heads
self.secs = secs
return geo_size
else:
return None
def _guess_for_size(self, size, approx=True, algo=None, secs=None, heads=None):
if algo == 1:
return self._guess_for_size1(size, approx, secs, heads)
elif algo == 2:
return self._guess_for_size2(size, approx, secs, heads)
else:
algos = [self._guess_for_size1, self._guess_for_size2]
if approx:
# find min diff to real size
min_diff = size
min_algo = None
for a in algos:
s = a(size, True, secs, heads)
if s != None:
delta = abs(size - s)
if delta < min_diff:
min_diff = delta
min_algo = a
if min_algo != None:
return min_algo(size, True, secs, heads)
if options == None:
return False
c = None
h = None
s = None
(c, h, s) = self._parse_chs(options)
self._update_block_size(options)
# chs is fully specified
if c != None and h != None and s != None:
self.cyls = c
self.heads = h
self.secs = s
return self.get_num_bytes()
else:
return None
else: # exact match
for a in algos:
s = a(size, True, secs, heads)
if s == size:
return size
return None
# we require a size
if "size" not in options:
return None
# parse size
size = options["size"]
if type(size) != int:
size = ByteSize.parse_byte_size_str(size)
if size == None:
return None
# select guess algo
algo = None
if "algo" in options:
algo = int(options["algo"])
# guess size
return self._guess_for_size(size, approx=True, algo=algo, secs=s, heads=h)
def _parse_chs(self, options):
c = None
h = None
s = None
# chs=<n>,<n>,<n>
if "chs" in options:
comp = options["chs"].split(",")
if len(comp) == 3:
return [int(x) for x in comp]
else:
if "s" in options:
s = int(options["s"])
if "h" in options:
h = int(options["h"])
if "c" in options:
c = int(options["c"])
return (c, h, s)
def _guess_for_size1(self, size, approx=True, secs=None, heads=None):
mb = size // 1024
if secs == None:
secs = 63
if heads == None:
if mb <= 504 * 1024:
heads = 16
elif mb <= 1008 * 1024:
heads = 32
elif mb <= 2016 * 1024:
heads = 64
elif mb <= 4032 * 1024:
heads = 128
else:
heads = 256
cyls = (size // self.block_bytes) // (secs * heads)
geo_size = cyls * secs * heads * self.block_bytes
# keep approx values or match
if approx or geo_size == size:
self.cyls = cyls
self.heads = heads
self.secs = secs
return geo_size
else:
return None
def _guess_for_size2(self, size, approx=True, secs=None, heads=None):
if heads == None:
heads = 1
if secs == None:
secs = 32
cyls = (size // self.block_bytes) // (secs * heads)
# keep cyls low
while cyls > 65535:
cyls //= 2
heads *= 2
# keep approx values or match
geo_size = cyls * secs * heads * self.block_bytes
if approx or geo_size == size:
self.cyls = cyls
self.heads = heads
self.secs = secs
return geo_size
else:
return None
def _guess_for_size(self, size, approx=True, algo=None, secs=None, heads=None):
if algo == 1:
return self._guess_for_size1(size, approx, secs, heads)
elif algo == 2:
return self._guess_for_size2(size, approx, secs, heads)
else:
algos = [self._guess_for_size1, self._guess_for_size2]
if approx:
# find min diff to real size
min_diff = size
min_algo = None
for a in algos:
s = a(size, True, secs, heads)
if s != None:
delta = abs(size - s)
if delta < min_diff:
min_diff = delta
min_algo = a
if min_algo != None:
return min_algo(size, True, secs, heads)
else:
return None
else: # exact match
for a in algos:
s = a(size, True, secs, heads)
if s == size:
return size
return None

View File

@ -1,35 +1,43 @@
from .BlockDevice import BlockDevice
from .DiskGeometry import DiskGeometry
from .ImageFile import ImageFile
import os.path
import os
class HDFBlockDevice(BlockDevice):
def __init__(self, hdf_file, read_only=False, block_size=512, fobj=None):
self.img_file = ImageFile(hdf_file, read_only, block_size, fobj)
def __init__(self, hdf_file, read_only=False, block_size=512, fobj=None):
self.img_file = ImageFile(hdf_file, read_only, block_size, fobj)
def create(self, geo, reserved=2):
self._set_geometry(geo.cyls, geo.heads, geo.secs, reserved=reserved,
block_bytes=self.img_file.block_bytes)
self.img_file.create(geo.get_num_blocks())
self.img_file.open()
def create(self, geo, reserved=2):
self._set_geometry(
geo.cyls,
geo.heads,
geo.secs,
reserved=reserved,
block_bytes=self.img_file.block_bytes,
)
self.img_file.create(geo.get_num_blocks())
self.img_file.open()
def open(self, geo, reserved=2):
self._set_geometry(geo.cyls, geo.heads, geo.secs, reserved=reserved,
block_bytes=self.img_file.block_bytes)
self.img_file.open()
def open(self, geo, reserved=2):
self._set_geometry(
geo.cyls,
geo.heads,
geo.secs,
reserved=reserved,
block_bytes=self.img_file.block_bytes,
)
self.img_file.open()
def flush(self):
pass
def flush(self):
pass
def close(self):
self.img_file.close()
def close(self):
self.img_file.close()
def read_block(self, blk_num):
return self.img_file.read_blk(blk_num)
def read_block(self, blk_num):
return self.img_file.read_blk(blk_num)
def write_block(self, blk_num, data):
return self.img_file.write_blk(blk_num, data)
def write_block(self, blk_num, data):
return self.img_file.write_blk(blk_num, data)

View File

@ -4,109 +4,121 @@ import amitools.util.BlkDevTools as BlkDevTools
import zlib
import io
class ImageFile:
def __init__(self, file_name, read_only=False, block_bytes=512, fobj=None):
self.file_name = file_name
self.read_only = read_only
self.block_bytes = block_bytes
self.fobj = fobj
self.fh = None
self.size = 0
self.num_blocks = 0
def __init__(self, file_name, read_only=False, block_bytes=512, fobj=None):
self.file_name = file_name
self.read_only = read_only
self.block_bytes = block_bytes
self.fobj = fobj
self.fh = None
self.size = 0
self.num_blocks = 0
def open(self):
# file obj?
if self.fobj is not None:
self.fh = self.fobj
# get size via seek
self.fobj.seek(0,2) # end of file
self.size = self.fobj.tell()
self.fobj.seek(0,0) # return to begin
self.num_blocks = self.size // self.block_bytes
# file name given
else:
# is readable?
if not os.access(self.file_name, os.R_OK):
raise IOError("Can't read from image file")
# is writeable?
if not os.access(self.file_name, os.W_OK):
self.read_only = True
# is it a block/char device?
st = os.stat(self.file_name)
mode = st.st_mode
if stat.S_ISBLK(mode) or stat.S_ISCHR(mode):
self.size = BlkDevTools.getblkdevsize(self.file_name)
else:
# get size and make sure its not empty
self.size = os.path.getsize(self.file_name)
if self.size == 0:
raise IOError("Empty image file detected!")
self.num_blocks = self.size // self.block_bytes
# open raw file
if self.read_only:
flags = "rb"
else:
flags = "r+b"
self.fh = io.open(self.file_name, flags)
def open(self):
# file obj?
if self.fobj is not None:
self.fh = self.fobj
# get size via seek
self.fobj.seek(0, 2) # end of file
self.size = self.fobj.tell()
self.fobj.seek(0, 0) # return to begin
self.num_blocks = self.size // self.block_bytes
# file name given
else:
# is readable?
if not os.access(self.file_name, os.R_OK):
raise IOError("Can't read from image file")
# is writeable?
if not os.access(self.file_name, os.W_OK):
self.read_only = True
# is it a block/char device?
st = os.stat(self.file_name)
mode = st.st_mode
if stat.S_ISBLK(mode) or stat.S_ISCHR(mode):
self.size = BlkDevTools.getblkdevsize(self.file_name)
else:
# get size and make sure its not empty
self.size = os.path.getsize(self.file_name)
if self.size == 0:
raise IOError("Empty image file detected!")
self.num_blocks = self.size // self.block_bytes
# open raw file
if self.read_only:
flags = "rb"
else:
flags = "r+b"
self.fh = io.open(self.file_name, flags)
def read_blk(self, blk_num, num_blks=1):
if blk_num >= self.num_blocks:
raise IOError("Invalid image file block num: got %d but max is %d" % (blk_num, self.num_blocks))
off = blk_num * self.block_bytes
if off != self.fh.tell():
self.fh.seek(off, os.SEEK_SET)
num = self.block_bytes * num_blks
data = self.fh.read(num)
return data
def read_blk(self, blk_num, num_blks=1):
if blk_num >= self.num_blocks:
raise IOError(
"Invalid image file block num: got %d but max is %d"
% (blk_num, self.num_blocks)
)
off = blk_num * self.block_bytes
if off != self.fh.tell():
self.fh.seek(off, os.SEEK_SET)
num = self.block_bytes * num_blks
data = self.fh.read(num)
return data
def write_blk(self, blk_num, data, num_blks=1):
if self.read_only:
raise IOError("Can't write block: image file is read-only")
if blk_num >= self.num_blocks:
raise IOError("Invalid image file block num: got %d but max is %d" % (blk_num, self.num_blocks))
if len(data) != (self.block_bytes * num_blks):
raise IOError("Invalid block size written: got %d but size is %d" % (len(data), self.block_bytes))
off = blk_num * self.block_bytes
if off != self.fh.tell():
self.fh.seek(off, os.SEEK_SET)
self.fh.write(data)
def write_blk(self, blk_num, data, num_blks=1):
if self.read_only:
raise IOError("Can't write block: image file is read-only")
if blk_num >= self.num_blocks:
raise IOError(
"Invalid image file block num: got %d but max is %d"
% (blk_num, self.num_blocks)
)
if len(data) != (self.block_bytes * num_blks):
raise IOError(
"Invalid block size written: got %d but size is %d"
% (len(data), self.block_bytes)
)
off = blk_num * self.block_bytes
if off != self.fh.tell():
self.fh.seek(off, os.SEEK_SET)
self.fh.write(data)
def flush(self):
self.fh.flush()
def flush(self):
self.fh.flush()
def close(self):
if self.fh != None:
self.fh.close()
self.fh = None
def close(self):
if self.fh != None:
self.fh.close()
self.fh = None
def create(self, num_blocks):
if self.read_only:
raise IOError("Can't create image file in read only mode")
block = b"\0" * self.block_bytes
if self.fobj is not None:
for i in range(num_blocks):
self.fobj.write(block)
self.fobj.seek(0, 0)
else:
fh = open(self.file_name, "wb")
for i in range(num_blocks):
fh.write(block)
fh.close()
def create(self, num_blocks):
if self.read_only:
raise IOError("Can't create image file in read only mode")
block = b'\0' * self.block_bytes
if self.fobj is not None:
for i in range(num_blocks):
self.fobj.write(block)
self.fobj.seek(0,0)
else:
fh = open(self.file_name, "wb")
for i in range(num_blocks):
fh.write(block)
fh.close()
# --- mini test ---
if __name__ == '__main__':
import sys
for a in sys.argv[1:]:
# read image
im = ImageFile(a)
im.open()
d = im.read_blk(0)
im.write_blk(0,d)
im.close()
# read fobj
fobj = open(a,"r+b")
im = ImageFile(a,fobj=fobj)
im.open()
d = im.read_blk(0)
im.write_blk(0,d)
im.close()
if __name__ == "__main__":
import sys
for a in sys.argv[1:]:
# read image
im = ImageFile(a)
im.open()
d = im.read_blk(0)
im.write_blk(0, d)
im.close()
# read fobj
fobj = open(a, "r+b")
im = ImageFile(a, fobj=fobj)
im.open()
d = im.read_blk(0)
im.write_blk(0, d)
im.close()

View File

@ -1,12 +1,9 @@
from .BlockDevice import BlockDevice
import os.path
import os
class PartBlockDevice(BlockDevice):
def __init__(self, raw_blkdev, part_blk, auto_close=False):
self.raw_blkdev = raw_blkdev
self.part_blk = part_blk
@ -36,8 +33,7 @@ class PartBlockDevice(BlockDevice):
boot_blocks = dos_env.boot_blocks
if boot_blocks == 0:
boot_blocks = 2
self._set_geometry(cyls, heads, secs, block_bytes,
reserved, boot_blocks)
self._set_geometry(cyls, heads, secs, block_bytes, reserved, boot_blocks)
return True
def flush(self):
@ -50,19 +46,25 @@ class PartBlockDevice(BlockDevice):
def read_block(self, blk_num):
if blk_num >= self.num_blocks:
raise ValueError("Invalid Part block num: got %d but max is %d" % (
blk_num, self.num_blocks))
raise ValueError(
"Invalid Part block num: got %d but max is %d"
% (blk_num, self.num_blocks)
)
num_blks = self.sec_per_blk
off = self.blk_off + (blk_num * num_blks)
return self.raw_blkdev.read_block(off, num_blks=num_blks)
def write_block(self, blk_num, data):
if blk_num >= self.num_blocks:
raise ValueError("Invalid Part block num: got %d but max is %d" % (
blk_num, self.num_blocks))
raise ValueError(
"Invalid Part block num: got %d but max is %d"
% (blk_num, self.num_blocks)
)
if len(data) != self.block_bytes:
raise ValueError("Invalid Part block size written: got %d but size is %d" % (
len(data), self.block_bytes))
raise ValueError(
"Invalid Part block size written: got %d but size is %d"
% (len(data), self.block_bytes)
)
num_blks = self.sec_per_blk
off = self.blk_off + (blk_num * num_blks)
self.raw_blkdev.write_block(off, data, num_blks=num_blks)

View File

@ -1,36 +1,34 @@
from .BlockDevice import BlockDevice
import os.path
import os
from .ImageFile import ImageFile
class RawBlockDevice(BlockDevice):
def __init__(self, raw_file, read_only=False, block_bytes=512, fobj=None):
self.img_file = ImageFile(raw_file, read_only, block_bytes, fobj)
def __init__(self, raw_file, read_only=False, block_bytes=512, fobj=None):
self.img_file = ImageFile(raw_file, read_only, block_bytes, fobj)
def create(self, num_blocks):
self.img_file.create(num_blocks)
self.open()
self.num_blocks = num_blocks
def create(self, num_blocks):
self.img_file.create(num_blocks)
self.open()
self.num_blocks = num_blocks
def open(self):
self.img_file.open()
# calc block longs
self.block_bytes = self.img_file.block_bytes
self.block_longs = self.block_bytes // 4
self.num_blocks = self.img_file.num_blocks
def open(self):
self.img_file.open()
# calc block longs
self.block_bytes = self.img_file.block_bytes
self.block_longs = self.block_bytes // 4
self.num_blocks = self.img_file.num_blocks
def flush(self):
self.img_file.flush()
def flush(self):
self.img_file.flush()
def close(self):
self.img_file.close()
def close(self):
self.img_file.close()
def read_block(self, blk_num, num_blks=1):
return self.img_file.read_blk(blk_num, num_blks)
def read_block(self, blk_num, num_blks=1):
return self.img_file.read_blk(blk_num, num_blks)
def write_block(self, blk_num, data, num_blks=1):
self.img_file.write_blk(blk_num, data, num_blks)
def write_block(self, blk_num, data, num_blks=1):
self.img_file.write_blk(blk_num, data, num_blks)

View File

@ -1,35 +1,33 @@
from .Block import Block
class BitmapBlock(Block):
def __init__(self, blkdev, blk_num):
Block.__init__(self, blkdev, blk_num, chk_loc=0)
def set(self, data):
self._set_data(data)
self._read()
def create(self):
self._create_data()
def read(self):
self._read_data()
self._read()
def _read(self):
Block.read(self)
if not self.valid:
return False
self.valid = True
return True
def get_bitmap_data(self):
return self.data[4:]
def set_bitmap_data(self, data):
self.data[4:] = data
def dump(self):
Block.dump(self,"Bitmap")
def __init__(self, blkdev, blk_num):
Block.__init__(self, blkdev, blk_num, chk_loc=0)
def set(self, data):
self._set_data(data)
self._read()
def create(self):
self._create_data()
def read(self):
self._read_data()
self._read()
def _read(self):
Block.read(self)
if not self.valid:
return False
self.valid = True
return True
def get_bitmap_data(self):
return self.data[4:]
def set_bitmap_data(self, data):
self.data[4:] = data
def dump(self):
Block.dump(self, "Bitmap")

View File

@ -1,50 +1,47 @@
from .Block import Block
from amitools.util.HexDump import *
class BitmapExtBlock(Block):
def __init__(self, blkdev, blk_num):
Block.__init__(self, blkdev, blk_num)
def set(self, data):
self._set_data(data)
self._read()
def read(self):
self._read_data()
self._read()
def _read(self):
# read bitmap blk ptrs
self.bitmap_ptrs = []
for i in range(self.blkdev.block_longs-1):
bm_blk = self._get_long(i)
self.bitmap_ptrs.append(bm_blk)
self.bitmap_ext_blk = self._get_long(-1)
self.valid = True
return True
def create(self):
self.bitmap_ptrs = []
for i in range(self.blkdev.block_longs-1):
self.bitmap_ptrs.append(0)
self.bitmap_ext_blk = 0
self.valid = True
return True
def write(self):
self._create_data()
for i in range(self.blkdev.block_longs-1):
self._put_long(i, self.bitmap_ptrs[i])
self._put_long(-1, self.bitmap_ext_blk)
self._write_data()
def dump(self):
Block.dump(self, "BitmapExtBlock", False)
print(" bmp ptrs: %s" % self.bitmap_ptrs)
print(" bmp ext: %d" % self.bitmap_ext_blk)
def __init__(self, blkdev, blk_num):
Block.__init__(self, blkdev, blk_num)
def set(self, data):
self._set_data(data)
self._read()
def read(self):
self._read_data()
self._read()
def _read(self):
# read bitmap blk ptrs
self.bitmap_ptrs = []
for i in range(self.blkdev.block_longs - 1):
bm_blk = self._get_long(i)
self.bitmap_ptrs.append(bm_blk)
self.bitmap_ext_blk = self._get_long(-1)
self.valid = True
return True
def create(self):
self.bitmap_ptrs = []
for i in range(self.blkdev.block_longs - 1):
self.bitmap_ptrs.append(0)
self.bitmap_ext_blk = 0
self.valid = True
return True
def write(self):
self._create_data()
for i in range(self.blkdev.block_longs - 1):
self._put_long(i, self.bitmap_ptrs[i])
self._put_long(-1, self.bitmap_ext_blk)
self._write_data()
def dump(self):
Block.dump(self, "BitmapExtBlock", False)
print(" bmp ptrs: %s" % self.bitmap_ptrs)
print(" bmp ext: %d" % self.bitmap_ext_blk)

View File

@ -1,6 +1,3 @@
import struct
import ctypes
from ..TimeStamp import TimeStamp
@ -8,240 +5,248 @@ from ..FSString import FSString
class Block:
# mark end of block list
no_blk = 0xffffffff
# special blocks
RDSK = 0x5244534b # Rigid Disk Block
BADB = 0x42414442 # Bad Blocks Block
PART = 0x50415254 # Partition Block
FSHD = 0x46534844 # FileSystem Header Block
LSEG = 0x4c534547 # LoadSeg Block
# block types
T_SHORT = 2
T_DATA = 8
T_LIST = 16
T_DIR_CACHE = 33
T_COMMENT = 64
# block sub types
ST_ROOT = 1
ST_USERDIR = 2
ST_FILE = -3 & 0xffffffff
def __init__(self, blkdev, blk_num, is_type=0, is_sub_type=0, chk_loc=5):
self.valid = False
self.blkdev = blkdev
self.blk_num = blk_num
self.block_longs = blkdev.block_longs
self.type = 0
self.sub_type = 0
self.data = None
self.is_type = is_type
self.is_sub_type = is_sub_type
self.chk_loc = chk_loc
def __str__(self):
return "%s:@%d" % (self.__class__.__name__, self.blk_num)
def create(self):
self.type = self.is_type
self.sub_type = self.is_sub_type
def is_root_block(self):
return self.type == Block.T_SHORT and self.sub_type == Block.ST_ROOT
def is_user_dir_block(self):
return self.type == Block.T_SHORT and self.sub_type == Block.ST_USERDIR
def is_file_header_block(self):
return self.type == Block.T_SHORT and self.sub_type == Block.ST_FILE
def is_file_list_block(self):
return self.type == Block.T_LIST and self.sub_type == Block.ST_FILE
def is_file_data_block(self):
return self.type == Block.T_DATA
def is_comment_block(self):
return self.type == Block.T_COMMENT
def read(self):
if self.data == None:
self._read_data()
self._get_types()
self._get_chksum()
self.valid = self.valid_types and self.valid_chksum
def write(self):
if self.data == None:
self._create_data()
self._put_types()
self._put_chksum()
self._write_data()
def _set_data(self, data):
self.data = data
def _read_data(self):
data = self.blkdev.read_block(self.blk_num)
if len(data) != self.blkdev.block_bytes:
raise ValueError("Invalid Block Data: size=%d but expected %d" % (len(data), self.blkdev.block_bytes))
self._create_data()
self.data[:] = data
def _write_data(self):
if self.data != None:
self.blkdev.write_block(self.blk_num, self.data)
def _free_data(self):
self.data = None
def _create_data(self):
num_bytes = self.blkdev.block_bytes
self.data = ctypes.create_string_buffer(num_bytes)
def _put_long(self, num, val):
if num < 0:
num = self.block_longs + num
struct.pack_into(">I",self.data,num*4,val)
def _get_long(self, num):
if num < 0:
num = self.block_longs + num
return struct.unpack_from(">I",self.data,num*4)[0]
# mark end of block list
no_blk = 0xFFFFFFFF
def _put_slong(self, num, val):
if num < 0:
num = self.block_longs + num
struct.pack_into(">i",self.data,num*4,val)
def _get_slong(self, num):
if num < 0:
num = self.block_longs + num
return struct.unpack_from(">i",self.data,num*4)[0]
# special blocks
RDSK = 0x5244534B # Rigid Disk Block
BADB = 0x42414442 # Bad Blocks Block
PART = 0x50415254 # Partition Block
FSHD = 0x46534844 # FileSystem Header Block
LSEG = 0x4C534547 # LoadSeg Block
def _get_types(self):
self.type = self._get_long(0)
self.sub_type = self._get_long(-1)
self.valid_types = True
if self.is_type != 0:
if self.type != self.is_type:
self.valid_types = False
if self.is_sub_type != 0:
if self.sub_type != self.is_sub_type:
self.valid_types = False
def _put_types(self):
if self.is_type != 0:
self._put_long(0, self.is_type)
if self.is_sub_type != 0:
self._put_long(-1, self.is_sub_type)
def _get_chksum(self):
self.got_chksum = self._get_long(self.chk_loc)
self.calc_chksum = self._calc_chksum()
self.valid_chksum = self.got_chksum == self.calc_chksum
def _put_chksum(self):
self.calc_chksum = self._calc_chksum()
self.got_chksum = self.calc_chksum
self.valid_chksum = True
self._put_long(self.chk_loc, self.calc_chksum)
def _calc_chksum(self):
chksum = 0
for i in range(self.block_longs):
if i != self.chk_loc:
chksum += self._get_long(i)
return (-chksum) & 0xffffffff
def _get_timestamp(self, loc):
days = self._get_long(loc)
mins = self._get_long(loc+1)
ticks = self._get_long(loc+2)
return TimeStamp(days, mins, ticks)
def _put_timestamp(self, loc, ts):
if ts == None:
ts = TimeStamp()
self._put_long(loc, ts.days)
self._put_long(loc+1, ts.mins)
self._put_long(loc+2, ts.ticks)
def _get_bytes(self, loc, size):
if loc < 0:
loc = self.block_longs + loc
loc = loc * 4
return self.data[loc:loc+size]
# block types
T_SHORT = 2
T_DATA = 8
T_LIST = 16
T_DIR_CACHE = 33
T_COMMENT = 64
# block sub types
ST_ROOT = 1
ST_USERDIR = 2
ST_FILE = -3 & 0xFFFFFFFF
def _put_bytes(self, loc, data):
if loc < 0:
loc = self.block_longs + loc
loc = loc * 4
size = len(data)
self.data[loc:loc+size] = data
def __init__(self, blkdev, blk_num, is_type=0, is_sub_type=0, chk_loc=5):
self.valid = False
self.blkdev = blkdev
self.blk_num = blk_num
self.block_longs = blkdev.block_longs
self.type = 0
self.sub_type = 0
self.data = None
self.is_type = is_type
self.is_sub_type = is_sub_type
self.chk_loc = chk_loc
def _get_bstr(self, loc, max_size):
if loc < 0:
loc = self.block_longs + loc
loc = loc * 4
size = ord(self.data[loc])
if size > max_size:
return None
if size == 0:
return FSString()
name = self.data[loc+1:loc+1+size]
return FSString(name)
def __str__(self):
return "%s:@%d" % (self.__class__.__name__, self.blk_num)
def _put_bstr(self, loc, max_size, fs_str):
if fs_str is None:
fs_str = FSString()
assert isinstance(fs_str, FSString)
bstr = fs_str.get_ami_str()
assert len(bstr) < 256
n = len(bstr)
if n > max_size:
bstr = bstr[:max_size]
if loc < 0:
loc = self.block_longs + loc
loc = loc * 4
self.data[loc] = len(bstr)
if len(bstr) > 0:
self.data[loc+1:loc+1+len(bstr)] = bstr
def _get_cstr(self, loc, max_size):
n = 0
s = b""
loc = loc * 4
while n < max_size:
c = self.data[loc+n]
if ord(c) == 0:
break
s += c
n += 1
return FSString(s)
def _put_cstr(self, loc, max_size, fs_str):
if fs_str is None:
fs_str = FSString()
assert isinstance(fs_str, FSString)
cstr = fs_str.get_ami_str()
n = min(max_size, len(cstr))
loc = loc * 4
if n > 0:
self.data[loc:loc+n] = cstr
def _dump_ptr(self, ptr):
if ptr == self.no_blk:
return "none"
else:
return "%d" % ptr
def dump(self, name, details=True):
print("%sBlock(%d):" % (name, self.blk_num))
if details:
print(" types: %x/%x (valid: %x/%x)" % (self.type, self.sub_type, self.is_type, self.is_sub_type))
print(" chksum: 0x%08x (got) 0x%08x (calc)" % (self.got_chksum, self.calc_chksum))
print(" valid: %s" % self.valid)
def create(self):
self.type = self.is_type
self.sub_type = self.is_sub_type
def is_root_block(self):
return self.type == Block.T_SHORT and self.sub_type == Block.ST_ROOT
def is_user_dir_block(self):
return self.type == Block.T_SHORT and self.sub_type == Block.ST_USERDIR
def is_file_header_block(self):
return self.type == Block.T_SHORT and self.sub_type == Block.ST_FILE
def is_file_list_block(self):
return self.type == Block.T_LIST and self.sub_type == Block.ST_FILE
def is_file_data_block(self):
return self.type == Block.T_DATA
def is_comment_block(self):
return self.type == Block.T_COMMENT
def read(self):
if self.data == None:
self._read_data()
self._get_types()
self._get_chksum()
self.valid = self.valid_types and self.valid_chksum
def write(self):
if self.data == None:
self._create_data()
self._put_types()
self._put_chksum()
self._write_data()
def _set_data(self, data):
self.data = data
def _read_data(self):
data = self.blkdev.read_block(self.blk_num)
if len(data) != self.blkdev.block_bytes:
raise ValueError(
"Invalid Block Data: size=%d but expected %d"
% (len(data), self.blkdev.block_bytes)
)
self._create_data()
self.data[:] = data
def _write_data(self):
if self.data != None:
self.blkdev.write_block(self.blk_num, self.data)
def _free_data(self):
self.data = None
def _create_data(self):
num_bytes = self.blkdev.block_bytes
self.data = ctypes.create_string_buffer(num_bytes)
def _put_long(self, num, val):
if num < 0:
num = self.block_longs + num
struct.pack_into(">I", self.data, num * 4, val)
def _get_long(self, num):
if num < 0:
num = self.block_longs + num
return struct.unpack_from(">I", self.data, num * 4)[0]
def _put_slong(self, num, val):
if num < 0:
num = self.block_longs + num
struct.pack_into(">i", self.data, num * 4, val)
def _get_slong(self, num):
if num < 0:
num = self.block_longs + num
return struct.unpack_from(">i", self.data, num * 4)[0]
def _get_types(self):
self.type = self._get_long(0)
self.sub_type = self._get_long(-1)
self.valid_types = True
if self.is_type != 0:
if self.type != self.is_type:
self.valid_types = False
if self.is_sub_type != 0:
if self.sub_type != self.is_sub_type:
self.valid_types = False
def _put_types(self):
if self.is_type != 0:
self._put_long(0, self.is_type)
if self.is_sub_type != 0:
self._put_long(-1, self.is_sub_type)
def _get_chksum(self):
self.got_chksum = self._get_long(self.chk_loc)
self.calc_chksum = self._calc_chksum()
self.valid_chksum = self.got_chksum == self.calc_chksum
def _put_chksum(self):
self.calc_chksum = self._calc_chksum()
self.got_chksum = self.calc_chksum
self.valid_chksum = True
self._put_long(self.chk_loc, self.calc_chksum)
def _calc_chksum(self):
chksum = 0
for i in range(self.block_longs):
if i != self.chk_loc:
chksum += self._get_long(i)
return (-chksum) & 0xFFFFFFFF
def _get_timestamp(self, loc):
days = self._get_long(loc)
mins = self._get_long(loc + 1)
ticks = self._get_long(loc + 2)
return TimeStamp(days, mins, ticks)
def _put_timestamp(self, loc, ts):
if ts == None:
ts = TimeStamp()
self._put_long(loc, ts.days)
self._put_long(loc + 1, ts.mins)
self._put_long(loc + 2, ts.ticks)
def _get_bytes(self, loc, size):
if loc < 0:
loc = self.block_longs + loc
loc = loc * 4
return self.data[loc : loc + size]
def _put_bytes(self, loc, data):
if loc < 0:
loc = self.block_longs + loc
loc = loc * 4
size = len(data)
self.data[loc : loc + size] = data
def _get_bstr(self, loc, max_size):
if loc < 0:
loc = self.block_longs + loc
loc = loc * 4
size = ord(self.data[loc])
if size > max_size:
return None
if size == 0:
return FSString()
name = self.data[loc + 1 : loc + 1 + size]
return FSString(name)
def _put_bstr(self, loc, max_size, fs_str):
if fs_str is None:
fs_str = FSString()
assert isinstance(fs_str, FSString)
bstr = fs_str.get_ami_str()
assert len(bstr) < 256
n = len(bstr)
if n > max_size:
bstr = bstr[:max_size]
if loc < 0:
loc = self.block_longs + loc
loc = loc * 4
self.data[loc] = len(bstr)
if len(bstr) > 0:
self.data[loc + 1 : loc + 1 + len(bstr)] = bstr
def _get_cstr(self, loc, max_size):
n = 0
s = b""
loc = loc * 4
while n < max_size:
c = self.data[loc + n]
if ord(c) == 0:
break
s += c
n += 1
return FSString(s)
def _put_cstr(self, loc, max_size, fs_str):
if fs_str is None:
fs_str = FSString()
assert isinstance(fs_str, FSString)
cstr = fs_str.get_ami_str()
n = min(max_size, len(cstr))
loc = loc * 4
if n > 0:
self.data[loc : loc + n] = cstr
def _dump_ptr(self, ptr):
if ptr == self.no_blk:
return "none"
else:
return "%d" % ptr
def dump(self, name, details=True):
print("%sBlock(%d):" % (name, self.blk_num))
if details:
print(
" types: %x/%x (valid: %x/%x)"
% (self.type, self.sub_type, self.is_type, self.is_sub_type)
)
print(
" chksum: 0x%08x (got) 0x%08x (calc)"
% (self.got_chksum, self.calc_chksum)
)
print(" valid: %s" % self.valid)

View File

@ -1,175 +1,178 @@
import os.path
from .Block import Block
import amitools.fs.DosType as DosType
class BootBlock(Block):
def __init__(self, blkdev, blk_num=0):
Block.__init__(self, blkdev, blk_num)
self.dos_type = None
self.got_root_blk = None
self.got_chksum = 0
self.calc_chksum = 0
self.boot_code = None
self.num_extra = self.blkdev.bootblocks - 1
self.max_boot_code = self.blkdev.bootblocks * self.blkdev.block_bytes - 12
self.extra_blks = []
def create(self, dos_type=DosType.DOS0, root_blk=None, boot_code=None):
Block.create(self)
self._create_data()
self.dos_type = dos_type
self.valid_dos_type = True
# root blk
self.calc_root_blk = int(self.blkdev.num_blocks // 2)
if root_blk != None:
self.got_root_blk = root_blk
else:
self.got_root_blk = self.calc_root_blk
# create extra blks
self.extra_blks = []
for i in range(self.num_extra):
b = Block(self.blkdev, self.blk_num + 1 + i)
b._create_data()
self.extra_blks.append(b)
# setup boot code
return self.set_boot_code(boot_code)
def set_boot_code(self, boot_code):
if boot_code != None:
if len(boot_code) <= self.max_boot_code:
self.boot_code = boot_code
self.valid = True
else:
self.valid = False
else:
self.boot_code = None
self.valid = True
return self.valid
def _calc_chksum(self):
all_blks = [self] + self.extra_blks
n = self.blkdev.block_longs
chksum = 0
for blk in all_blks:
for i in range(n):
if i != 1: # skip chksum
chksum += blk._get_long(i)
if chksum > 0xffffffff:
chksum += 1
chksum &= 0xffffffff
return (~chksum) & 0xffffffff
def read(self):
self._read_data()
# read extra boot blocks
self.extra_blks = []
for i in range(self.num_extra):
b = Block(self.blkdev, self.blk_num + 1 + i)
b._read_data()
self.extra_blks.append(b)
self.dos_type = self._get_long(0)
self.got_chksum = self._get_long(1)
self.got_root_blk = self._get_long(2)
self.calc_chksum = self._calc_chksum()
# calc position of root block
self.calc_root_blk = int(self.blkdev.num_blocks // 2)
# check validity
self.valid_chksum = self.got_chksum == self.calc_chksum
self.valid_dos_type = DosType.is_valid(self.dos_type)
self.valid = self.valid_dos_type
class BootBlock(Block):
def __init__(self, blkdev, blk_num=0):
Block.__init__(self, blkdev, blk_num)
self.dos_type = None
self.got_root_blk = None
self.got_chksum = 0
self.calc_chksum = 0
self.boot_code = None
self.num_extra = self.blkdev.bootblocks - 1
self.max_boot_code = self.blkdev.bootblocks * self.blkdev.block_bytes - 12
self.extra_blks = []
# look for boot_code
if self.valid:
self.read_boot_code()
return self.valid
def read_boot_code(self):
boot_code = self.data[12:]
for blk in self.extra_blks:
boot_code += blk.data.raw
# remove nulls at end
pos = len(boot_code) - 4
while pos > 0:
tag = boot_code[pos:pos+3]
if tag != 'DOS' and boot_code[pos] !=0:
pos += 4
break
pos -= 4
# something left
if pos > 0:
boot_code = boot_code[:pos]
self.boot_code = boot_code
def write(self):
self._create_data()
self._put_long(0, self.dos_type)
self._put_long(2, self.got_root_blk)
def create(self, dos_type=DosType.DOS0, root_blk=None, boot_code=None):
Block.create(self)
self._create_data()
self.dos_type = dos_type
self.valid_dos_type = True
# root blk
self.calc_root_blk = int(self.blkdev.num_blocks // 2)
if root_blk != None:
self.got_root_blk = root_blk
else:
self.got_root_blk = self.calc_root_blk
# create extra blks
self.extra_blks = []
for i in range(self.num_extra):
b = Block(self.blkdev, self.blk_num + 1 + i)
b._create_data()
self.extra_blks.append(b)
# setup boot code
return self.set_boot_code(boot_code)
if self.boot_code != None:
self.write_boot_code()
self.calc_chksum = self._calc_chksum()
self._put_long(1, self.calc_chksum)
self.valid_chksum = True
else:
self.calc_chksum = 0
self.valid_chksum = False
def set_boot_code(self, boot_code):
if boot_code != None:
if len(boot_code) <= self.max_boot_code:
self.boot_code = boot_code
self.valid = True
else:
self.valid = False
else:
self.boot_code = None
self.valid = True
return self.valid
self._write_data()
def write_boot_code(self):
n = len(self.boot_code)
bb = self.blkdev.block_bytes
first_size = bb - 12
boot_code = self.boot_code
# spans more blocks
if n > first_size:
extra = boot_code[first_size:]
boot_code = boot_code[:first_size]
# write extra blocks
pos = 0
off = 0
n -= first_size
while n > 0:
num = n
if num > bb:
num = bb
self.extra_blks[pos].data[:num] = extra[off:off+num]
self.extra_blks[pos]._write_data()
off += num
pos += 1
n -= num
# use this for first block
n = first_size
# embed boot code in boot block
self.data[12:12+n] = boot_code
def dump(self):
print("BootBlock(%d):" % self.blk_num)
print(" dos_type: 0x%08x %s (valid: %s) is_ffs=%s is_intl=%s is_dircache=%s" \
% (self.dos_type, DosType.num_to_tag_str(self.dos_type),
self.valid_dos_type,
DosType.is_ffs(self.dos_type),
DosType.is_intl(self.dos_type),
DosType.is_dircache(self.dos_type)))
print(" root_blk: %d (got %d)" % (self.calc_root_blk, self.got_root_blk))
print(" chksum: 0x%08x (got) 0x%08x (calc) -> bootable: %s" % (self.got_chksum, self.calc_chksum, self.valid_chksum))
print(" valid: %s" % self.valid)
if self.boot_code != None:
print(" boot_code: %d bytes" % len(self.boot_code))
def _calc_chksum(self):
all_blks = [self] + self.extra_blks
n = self.blkdev.block_longs
chksum = 0
for blk in all_blks:
for i in range(n):
if i != 1: # skip chksum
chksum += blk._get_long(i)
if chksum > 0xFFFFFFFF:
chksum += 1
chksum &= 0xFFFFFFFF
return (~chksum) & 0xFFFFFFFF
def get_boot_code_dir(self):
my_dir = os.path.dirname(__file__)
bc_dir = os.path.join(my_dir, "bootcode")
if os.path.exists(bc_dir):
return bc_dir
else:
return None
def read(self):
self._read_data()
# read extra boot blocks
self.extra_blks = []
for i in range(self.num_extra):
b = Block(self.blkdev, self.blk_num + 1 + i)
b._read_data()
self.extra_blks.append(b)
self.dos_type = self._get_long(0)
self.got_chksum = self._get_long(1)
self.got_root_blk = self._get_long(2)
self.calc_chksum = self._calc_chksum()
# calc position of root block
self.calc_root_blk = int(self.blkdev.num_blocks // 2)
# check validity
self.valid_chksum = self.got_chksum == self.calc_chksum
self.valid_dos_type = DosType.is_valid(self.dos_type)
self.valid = self.valid_dos_type
# look for boot_code
if self.valid:
self.read_boot_code()
return self.valid
def read_boot_code(self):
boot_code = self.data[12:]
for blk in self.extra_blks:
boot_code += blk.data.raw
# remove nulls at end
pos = len(boot_code) - 4
while pos > 0:
tag = boot_code[pos : pos + 3]
if tag != "DOS" and boot_code[pos] != 0:
pos += 4
break
pos -= 4
# something left
if pos > 0:
boot_code = boot_code[:pos]
self.boot_code = boot_code
def write(self):
self._create_data()
self._put_long(0, self.dos_type)
self._put_long(2, self.got_root_blk)
if self.boot_code != None:
self.write_boot_code()
self.calc_chksum = self._calc_chksum()
self._put_long(1, self.calc_chksum)
self.valid_chksum = True
else:
self.calc_chksum = 0
self.valid_chksum = False
self._write_data()
def write_boot_code(self):
n = len(self.boot_code)
bb = self.blkdev.block_bytes
first_size = bb - 12
boot_code = self.boot_code
# spans more blocks
if n > first_size:
extra = boot_code[first_size:]
boot_code = boot_code[:first_size]
# write extra blocks
pos = 0
off = 0
n -= first_size
while n > 0:
num = n
if num > bb:
num = bb
self.extra_blks[pos].data[:num] = extra[off : off + num]
self.extra_blks[pos]._write_data()
off += num
pos += 1
n -= num
# use this for first block
n = first_size
# embed boot code in boot block
self.data[12 : 12 + n] = boot_code
def dump(self):
print("BootBlock(%d):" % self.blk_num)
print(
" dos_type: 0x%08x %s (valid: %s) is_ffs=%s is_intl=%s is_dircache=%s"
% (
self.dos_type,
DosType.num_to_tag_str(self.dos_type),
self.valid_dos_type,
DosType.is_ffs(self.dos_type),
DosType.is_intl(self.dos_type),
DosType.is_dircache(self.dos_type),
)
)
print(" root_blk: %d (got %d)" % (self.calc_root_blk, self.got_root_blk))
print(
" chksum: 0x%08x (got) 0x%08x (calc) -> bootable: %s"
% (self.got_chksum, self.calc_chksum, self.valid_chksum)
)
print(" valid: %s" % self.valid)
if self.boot_code != None:
print(" boot_code: %d bytes" % len(self.boot_code))
def get_boot_code_dir(self):
my_dir = os.path.dirname(__file__)
bc_dir = os.path.join(my_dir, "bootcode")
if os.path.exists(bc_dir):
return bc_dir
else:
return None

View File

@ -1,50 +1,48 @@
import time
from .Block import Block
class CommentBlock(Block):
def __init__(self, blkdev, blk_num):
Block.__init__(self, blkdev, blk_num, is_type=Block.T_COMMENT)
self.comment = ""
def __init__(self, blkdev, blk_num):
Block.__init__(self, blkdev, blk_num, is_type=Block.T_COMMENT)
self.comment = ""
def create(self, header_key, comment=""):
Block.create(self)
self.own_key = self.blk_num
self.header_key = header_key
self.comment = comment
def create(self, header_key, comment=""):
Block.create(self)
self.own_key = self.blk_num
self.header_key = header_key
self.comment = comment
def set(self, data):
self._set_data(data)
self._read()
def set(self, data):
self._set_data(data)
self._read()
def read(self):
self._read_data()
self._read()
def read(self):
self._read_data()
self._read()
def _read(self):
Block.read(self)
if not self.valid:
return False
def _read(self):
Block.read(self)
if not self.valid:
return False
# Comment fields
self.own_key = self._get_long(1)
self.header_key = self._get_long(2)
self.checksum = self._get_long(5)
self.comment = self._get_bstr(6, 79)
self.valid = (self.own_key == self.blk_num)
return self.valid
# Comment fields
self.own_key = self._get_long(1)
self.header_key = self._get_long(2)
self.checksum = self._get_long(5)
self.comment = self._get_bstr(6, 79)
self.valid = self.own_key == self.blk_num
return self.valid
def write(self):
Block._create_data(self)
self._put_long(1, self.own_key)
self._put_long(2, self.header_key)
self._put_bstr(6, 79, self.comment)
Block.write(self)
def write(self):
Block._create_data(self)
self._put_long(1, self.own_key)
self._put_long(2, self.header_key)
self._put_bstr(6, 79, self.comment)
Block.write(self)
def dump(self):
Block.dump(self,"Comment")
print(" own_key: %d" % (self.own_key))
print(" header_key: %d" % (self.header_key))
print(" comment: '%s'" % self.comment)
def dump(self):
Block.dump(self, "Comment")
print(" own_key: %d" % (self.own_key))
print(" header_key: %d" % (self.header_key))
print(" comment: '%s'" % self.comment)

View File

@ -1,6 +1,3 @@
import time
import struct
@ -9,173 +6,189 @@ from ..ProtectFlags import ProtectFlags
from ..TimeStamp import TimeStamp
from ..FSString import FSString
class DirCacheRecord:
def __init__(self, entry=0, size=0, protect=0, mod_ts=None, sub_type=0, name='', comment=None):
self.entry = entry
self.size = size
self.protect = protect
self.mod_ts = mod_ts
self.sub_type = sub_type
self.name = name
if comment is None:
self.comment = FSString()
else:
self.comment = comment
self.offset = None
def get_size(self):
total_len = 25 + len(self.name.get_ami_str()) + len(self.comment.get_ami_str())
align_len = (total_len + 1) & ~1
return align_len
def get(self, data, off):
self.offset = off
# header
d = struct.unpack_from(">IIIHHHHH",data,offset=off)
self.entry = d[0]
self.size = d[1]
self.protect = d[2]
self.mod_ts = TimeStamp(d[5],d[6],d[7])
self.type = ord(data[off + 22])
# name
name_len = ord(data[off + 23])
name_off = off + 24
self.name = FSString(data[name_off : name_off + name_len])
# comment
comment_len = ord(data[off + name_len + 24])
comment_off = off + 25 + name_len
self.comment = FSString(data[comment_off : comment_off + comment_len])
return off + self.get_size()
def put(self, data, off):
self.offset = off
# header
ts = self.mod_ts
struct.pack_into(">IIIHHHHH",data,off,self.entry,self.size,self.protect,0,0,ts.days,ts.mins,ts.ticks)
# name
name = self.name.get_ami_str()
name_len = len(name)
data[off + 23] = name_len
name_off = off + 24
data[name_off : name_off + name_len] = name
# comment
comment = self.comment.get_ami_str()
comment_len = len(comment)
data[off + 24 + name_len] = comment_len
comment_off = off + 25 + name_len
data[comment_off : comment_off + comment_len] = comment
return off + self.get_size()
def dump(self):
print("DirCacheRecord(%s)(size=%d)" % (self.offset, self.get_size()))
print("\tentry: %s" % self.entry)
print("\tsize: %s" % self.size)
pf = ProtectFlags(self.protect)
print("\tprotect: 0x%x 0b%s %s" % (self.protect, pf.bin_str(), pf))
print("\tmod_ts: %s" % self.mod_ts)
print("\tsub_type: 0x%x" % self.sub_type)
print("\tname: %s" % self.name)
print("\tcomment: %s" % self.comment)
def __init__(
self, entry=0, size=0, protect=0, mod_ts=None, sub_type=0, name="", comment=None
):
self.entry = entry
self.size = size
self.protect = protect
self.mod_ts = mod_ts
self.sub_type = sub_type
self.name = name
if comment is None:
self.comment = FSString()
else:
self.comment = comment
self.offset = None
def get_size(self):
total_len = 25 + len(self.name.get_ami_str()) + len(self.comment.get_ami_str())
align_len = (total_len + 1) & ~1
return align_len
def get(self, data, off):
self.offset = off
# header
d = struct.unpack_from(">IIIHHHHH", data, offset=off)
self.entry = d[0]
self.size = d[1]
self.protect = d[2]
self.mod_ts = TimeStamp(d[5], d[6], d[7])
self.type = ord(data[off + 22])
# name
name_len = ord(data[off + 23])
name_off = off + 24
self.name = FSString(data[name_off : name_off + name_len])
# comment
comment_len = ord(data[off + name_len + 24])
comment_off = off + 25 + name_len
self.comment = FSString(data[comment_off : comment_off + comment_len])
return off + self.get_size()
def put(self, data, off):
self.offset = off
# header
ts = self.mod_ts
struct.pack_into(
">IIIHHHHH",
data,
off,
self.entry,
self.size,
self.protect,
0,
0,
ts.days,
ts.mins,
ts.ticks,
)
# name
name = self.name.get_ami_str()
name_len = len(name)
data[off + 23] = name_len
name_off = off + 24
data[name_off : name_off + name_len] = name
# comment
comment = self.comment.get_ami_str()
comment_len = len(comment)
data[off + 24 + name_len] = comment_len
comment_off = off + 25 + name_len
data[comment_off : comment_off + comment_len] = comment
return off + self.get_size()
def dump(self):
print("DirCacheRecord(%s)(size=%d)" % (self.offset, self.get_size()))
print("\tentry: %s" % self.entry)
print("\tsize: %s" % self.size)
pf = ProtectFlags(self.protect)
print("\tprotect: 0x%x 0b%s %s" % (self.protect, pf.bin_str(), pf))
print("\tmod_ts: %s" % self.mod_ts)
print("\tsub_type: 0x%x" % self.sub_type)
print("\tname: %s" % self.name)
print("\tcomment: %s" % self.comment)
class DirCacheBlock(Block):
def __init__(self, blkdev, blk_num):
Block.__init__(self, blkdev, blk_num, is_type=Block.T_DIR_CACHE)
def set(self, data):
self._set_data(data)
self._read()
def read(self):
self._read_data()
self._read()
def _read(self):
Block.read(self)
if not self.valid:
return False
# fields
self.own_key = self._get_long(1)
self.parent = self._get_long(2)
self.num_records = self._get_long(3)
self.next_cache = self._get_long(4)
self.records = []
def __init__(self, blkdev, blk_num):
Block.__init__(self, blkdev, blk_num, is_type=Block.T_DIR_CACHE)
# get records
off = 24
for i in range(self.num_records):
r = DirCacheRecord()
off = r.get(self.data, off)
if off == -1:
return False
self.records.append(r)
self.valid = True
return True
def get_total_record_size(self):
size = 0
for r in self.records:
size += r.get_size()
return size
def get_free_record_size(self):
return self.blkdev.block_bytes - 24 - self.get_total_record_size()
def set(self, data):
self._set_data(data)
self._read()
def create(self, parent, records=None, next_cache=0):
Block.create(self)
self.own_key = self.blk_num
self.parent = parent
self.next_cache = next_cache
if records == None:
self.num_records = 0
self.records = []
else:
self.num_records = len(records)
self.records = records
self.valid = True
return True
def add_record(self, record):
self.records.append(record)
self.num_records = len(self.records)
def get_record_by_name(self, name):
for r in self.records:
if r.name == name:
return r
return None
def remove_record(self, record):
self.records.remove(record)
self.num_records = len(self.records)
def has_record(self, record):
return record in self.records
def is_empty(self):
return self.num_records == 0
def write(self):
Block._create_data(self)
self._put_long(1, self.own_key)
self._put_long(2, self.parent)
self._put_long(3, self.num_records)
self._put_long(4, self.next_cache)
# put records
off = 24
for r in self.records:
off = r.put(self.data, off)
Block.write(self)
def dump(self):
Block.dump(self,"DirCache")
print(" own_key: %d" % (self.own_key))
print(" parent: %d" % (self.parent))
print(" num_records:%d" % (self.num_records))
print(" next_cache: %d" % (self.next_cache))
print(" num records:%d" % len(self.records))
for r in self.records:
r.dump()
def read(self):
self._read_data()
self._read()
def _read(self):
Block.read(self)
if not self.valid:
return False
# fields
self.own_key = self._get_long(1)
self.parent = self._get_long(2)
self.num_records = self._get_long(3)
self.next_cache = self._get_long(4)
self.records = []
# get records
off = 24
for i in range(self.num_records):
r = DirCacheRecord()
off = r.get(self.data, off)
if off == -1:
return False
self.records.append(r)
self.valid = True
return True
def get_total_record_size(self):
size = 0
for r in self.records:
size += r.get_size()
return size
def get_free_record_size(self):
return self.blkdev.block_bytes - 24 - self.get_total_record_size()
def create(self, parent, records=None, next_cache=0):
Block.create(self)
self.own_key = self.blk_num
self.parent = parent
self.next_cache = next_cache
if records == None:
self.num_records = 0
self.records = []
else:
self.num_records = len(records)
self.records = records
self.valid = True
return True
def add_record(self, record):
self.records.append(record)
self.num_records = len(self.records)
def get_record_by_name(self, name):
for r in self.records:
if r.name == name:
return r
return None
def remove_record(self, record):
self.records.remove(record)
self.num_records = len(self.records)
def has_record(self, record):
return record in self.records
def is_empty(self):
return self.num_records == 0
def write(self):
Block._create_data(self)
self._put_long(1, self.own_key)
self._put_long(2, self.parent)
self._put_long(3, self.num_records)
self._put_long(4, self.next_cache)
# put records
off = 24
for r in self.records:
off = r.put(self.data, off)
Block.write(self)
def dump(self):
Block.dump(self, "DirCache")
print(" own_key: %d" % (self.own_key))
print(" parent: %d" % (self.parent))
print(" num_records:%d" % (self.num_records))
print(" next_cache: %d" % (self.next_cache))
print(" num records:%d" % len(self.records))
for r in self.records:
r.dump()

View File

@ -1,63 +1,62 @@
from .Block import Block
from .CommentBlock import CommentBlock
from ..FSString import FSString
class EntryBlock(Block):
"""Base class for all block types that describe entries within a directory"""
def __init__(self, blkdev, blk_num, is_type, is_sub_type, is_longname=False):
Block.__init__(self, blkdev, blk_num, is_type, is_sub_type)
self.is_longname = is_longname
self.comment_block_id = 0
"""Base class for all block types that describe entries within a directory"""
def _read_nac_modts(self):
"""Reads the name, comment, and modifcation timestamp"""
if self.is_longname:
# In long filename mode, we have a combined field that contains
# the filename and the comment as consequtive BSTR. If the comment does
# not fit in, it is stored in an extra block
nac = self._get_bytes(-46,112)
name_len = nac[0]
self.name = FSString(nac[1:name_len+1])
comment_len = nac[name_len+1]
if comment_len > 0:
self.comment = FSString(nac[name_len+2:name_len+2+comment_len])
else:
# Comment is located in an extra block
self.comment_block_id = self._get_long(-18)
self.comment = FSString()
self.mod_ts = self._get_timestamp(-15)
else:
self.comment = self._get_bstr(-46, 79)
self.name = self._get_bstr(-20, 30)
self.mod_ts = self._get_timestamp(-23)
def __init__(self, blkdev, blk_num, is_type, is_sub_type, is_longname=False):
Block.__init__(self, blkdev, blk_num, is_type, is_sub_type)
self.is_longname = is_longname
self.comment_block_id = 0
def _write_nac_modts(self):
"""Writes the name, comment, and modifcation timestamp"""
if self.is_longname:
nac = bytearray()
name = self.name.get_ami_str()
name_len = len(name)
nac.append(name_len)
nac += name
if self.comment_block_id != 0:
nac.append(0)
else:
comment = self.name.get_ami_str()
comment_len = len(comment)
nac.append(comment_len)
nac += comment
self._put_bytes(-46, nac)
self._put_long(-18, self.comment_block_id)
self._put_timestamp(-15, self.mod_ts)
else:
self._put_bstr(-46, 79, self.comment)
self._put_timestamp(-23, self.mod_ts)
self._put_bstr(-20, 30, self.name)
@staticmethod
def needs_extra_comment_block(name, comment):
"""Returns whether the given name/comment pair requires an extra comment block"""
return len(name) + len(comment) > 110
def _read_nac_modts(self):
"""Reads the name, comment, and modifcation timestamp"""
if self.is_longname:
# In long filename mode, we have a combined field that contains
# the filename and the comment as consequtive BSTR. If the comment does
# not fit in, it is stored in an extra block
nac = self._get_bytes(-46, 112)
name_len = nac[0]
self.name = FSString(nac[1 : name_len + 1])
comment_len = nac[name_len + 1]
if comment_len > 0:
self.comment = FSString(nac[name_len + 2 : name_len + 2 + comment_len])
else:
# Comment is located in an extra block
self.comment_block_id = self._get_long(-18)
self.comment = FSString()
self.mod_ts = self._get_timestamp(-15)
else:
self.comment = self._get_bstr(-46, 79)
self.name = self._get_bstr(-20, 30)
self.mod_ts = self._get_timestamp(-23)
def _write_nac_modts(self):
"""Writes the name, comment, and modifcation timestamp"""
if self.is_longname:
nac = bytearray()
name = self.name.get_ami_str()
name_len = len(name)
nac.append(name_len)
nac += name
if self.comment_block_id != 0:
nac.append(0)
else:
comment = self.name.get_ami_str()
comment_len = len(comment)
nac.append(comment_len)
nac += comment
self._put_bytes(-46, nac)
self._put_long(-18, self.comment_block_id)
self._put_timestamp(-15, self.mod_ts)
else:
self._put_bstr(-46, 79, self.comment)
self._put_timestamp(-23, self.mod_ts)
self._put_bstr(-20, 30, self.name)
@staticmethod
def needs_extra_comment_block(name, comment):
"""Returns whether the given name/comment pair requires an extra comment block"""
return len(name) + len(comment) > 110

View File

@ -1,59 +1,56 @@
from .Block import Block
class FileDataBlock(Block):
def __init__(self, blkdev, blk_num):
Block.__init__(self, blkdev, blk_num, is_type=Block.T_DATA)
def set(self, data):
self._set_data(data)
self._read()
def read(self):
self._read_data()
self._read()
def _read(self):
Block.read(self)
if not self.valid:
return False
# FileData fields
self.hdr_key = self._get_long(1)
self.seq_num = self._get_long(2)
self.data_size = self._get_long(3)
self.next_data = self._get_long(4)
self.valid = True
return self.valid
def __init__(self, blkdev, blk_num):
Block.__init__(self, blkdev, blk_num, is_type=Block.T_DATA)
def create(self, hdr_key, seq_num, data, next_data):
Block.create(self)
self.hdr_key = hdr_key
self.seq_num = seq_num
self.data_size = len(data)
self.next_data = next_data
self.contents = data
def write(self):
Block._create_data(self)
self._put_long(1, self.hdr_key)
self._put_long(2, self.seq_num)
self._put_long(3, self.data_size)
self._put_long(4, self.next_data)
if self.contents != None:
self.data[24:24+self.data_size] = self.contents
Block.write(self)
def set(self, data):
self._set_data(data)
self._read()
def get_block_data(self):
return self.data[24:24+self.data_size]
def dump(self):
Block.dump(self,"FileData")
print(" hdr_key: %d" % self.hdr_key)
print(" seq_num: %d" % self.seq_num)
print(" data size: %d" % self.data_size)
print(" next_data: %d" % self.next_data)
def read(self):
self._read_data()
self._read()
def _read(self):
Block.read(self)
if not self.valid:
return False
# FileData fields
self.hdr_key = self._get_long(1)
self.seq_num = self._get_long(2)
self.data_size = self._get_long(3)
self.next_data = self._get_long(4)
self.valid = True
return self.valid
def create(self, hdr_key, seq_num, data, next_data):
Block.create(self)
self.hdr_key = hdr_key
self.seq_num = seq_num
self.data_size = len(data)
self.next_data = next_data
self.contents = data
def write(self):
Block._create_data(self)
self._put_long(1, self.hdr_key)
self._put_long(2, self.seq_num)
self._put_long(3, self.data_size)
self._put_long(4, self.next_data)
if self.contents != None:
self.data[24 : 24 + self.data_size] = self.contents
Block.write(self)
def get_block_data(self):
return self.data[24 : 24 + self.data_size]
def dump(self):
Block.dump(self, "FileData")
print(" hdr_key: %d" % self.hdr_key)
print(" seq_num: %d" % self.seq_num)
print(" data size: %d" % self.data_size)
print(" next_data: %d" % self.next_data)

View File

@ -1,6 +1,3 @@
import time
from .Block import Block
from .EntryBlock import EntryBlock
@ -9,110 +6,128 @@ from ..ProtectFlags import ProtectFlags
from ..TimeStamp import *
from ..FSString import FSString
class FileHeaderBlock(EntryBlock):
def __init__(self, blkdev, blk_num, is_longname):
EntryBlock.__init__(self, blkdev, blk_num, is_type=Block.T_SHORT, is_sub_type=Block.ST_FILE, is_longname=is_longname)
def set(self, data):
self._set_data(data)
self._read()
def read(self):
self._read_data()
self._read()
def _read(self):
Block.read(self)
if not self.valid:
return False
# FileHeader fields
self.own_key = self._get_long(1)
self.block_count = self._get_long(2)
self.first_data = self._get_long(4)
# read (limited) data blocks table
bc = self.block_count
mbc = self.blkdev.block_longs - 56
if bc > mbc:
bc = mbc
self.data_blocks = []
for i in range(bc):
self.data_blocks.append(self._get_long(-51-i))
self.protect = self._get_long(-48)
self.protect_flags = ProtectFlags(self.protect)
self.byte_size = self._get_long(-47)
self._read_nac_modts()
self.hash_chain = self._get_long(-4)
self.parent = self._get_long(-3)
self.extension = self._get_long(-2)
def __init__(self, blkdev, blk_num, is_longname):
EntryBlock.__init__(
self,
blkdev,
blk_num,
is_type=Block.T_SHORT,
is_sub_type=Block.ST_FILE,
is_longname=is_longname,
)
self.valid = (self.own_key == self.blk_num)
return self.valid
def write(self):
Block._create_data(self)
self._put_long(1, self.own_key)
self._put_long(2, self.block_count)
self._put_long(4, self.first_data)
# data blocks
for i in range(len(self.data_blocks)):
self._put_long(-51-i, self.data_blocks[i])
self._put_long(-48, self.protect)
self._put_long(-47, self.byte_size)
def set(self, data):
self._set_data(data)
self._read()
self._write_nac_modts()
def read(self):
self._read_data()
self._read()
self._put_long(-4, self.hash_chain)
self._put_long(-3, self.parent)
self._put_long(-2, self.extension)
Block.write(self)
def create(self, parent, name, data_blocks, extension, byte_size=0, protect=0, comment=None, mod_ts=None, hash_chain=0):
Block.create(self)
self.own_key = self.blk_num
n = len(data_blocks)
self.block_count = n
if n == 0:
self.first_data = 0
else:
self.first_data = data_blocks[0]
self.data_blocks = data_blocks
self.protect = protect
self.protect_flags = ProtectFlags(self.protect)
self.byte_size = byte_size
if comment is None:
self.comment = FSString()
else:
assert isinstance(comment, FSString)
self.comment = comment
self.mod_ts = mod_ts
assert isinstance(name, FSString)
self.name = name
self.hash_chain = hash_chain
self.parent = parent
self.extension = extension
self.valid = True
return True
def dump(self):
Block.dump(self,"FileHeader")
print(" own_key: %d" % self.own_key)
print(" blk_cnt: %d" % self.block_count)
print(" first_data: %d" % self.first_data)
if self.data_blocks != None:
print(" data blks: %s #%d" % (self.data_blocks, len(self.data_blocks)))
pf = ProtectFlags(self.protect)
print(" protect: 0x%x 0b%s %s" % (self.protect, pf.bin_str(), pf))
print(" byte_size: %d" % self.byte_size)
print(" comment: '%s'" % self.comment)
print(" mod_ts: %s" % self.mod_ts)
print(" name: '%s'" % self.name)
print(" hash_chain: %d" % self.hash_chain)
print(" parent: %d" % self.parent)
print(" extension: %d" % self.extension)
def _read(self):
Block.read(self)
if not self.valid:
return False
# FileHeader fields
self.own_key = self._get_long(1)
self.block_count = self._get_long(2)
self.first_data = self._get_long(4)
# read (limited) data blocks table
bc = self.block_count
mbc = self.blkdev.block_longs - 56
if bc > mbc:
bc = mbc
self.data_blocks = []
for i in range(bc):
self.data_blocks.append(self._get_long(-51 - i))
self.protect = self._get_long(-48)
self.protect_flags = ProtectFlags(self.protect)
self.byte_size = self._get_long(-47)
self._read_nac_modts()
self.hash_chain = self._get_long(-4)
self.parent = self._get_long(-3)
self.extension = self._get_long(-2)
self.valid = self.own_key == self.blk_num
return self.valid
def write(self):
Block._create_data(self)
self._put_long(1, self.own_key)
self._put_long(2, self.block_count)
self._put_long(4, self.first_data)
# data blocks
for i in range(len(self.data_blocks)):
self._put_long(-51 - i, self.data_blocks[i])
self._put_long(-48, self.protect)
self._put_long(-47, self.byte_size)
self._write_nac_modts()
self._put_long(-4, self.hash_chain)
self._put_long(-3, self.parent)
self._put_long(-2, self.extension)
Block.write(self)
def create(
self,
parent,
name,
data_blocks,
extension,
byte_size=0,
protect=0,
comment=None,
mod_ts=None,
hash_chain=0,
):
Block.create(self)
self.own_key = self.blk_num
n = len(data_blocks)
self.block_count = n
if n == 0:
self.first_data = 0
else:
self.first_data = data_blocks[0]
self.data_blocks = data_blocks
self.protect = protect
self.protect_flags = ProtectFlags(self.protect)
self.byte_size = byte_size
if comment is None:
self.comment = FSString()
else:
assert isinstance(comment, FSString)
self.comment = comment
self.mod_ts = mod_ts
assert isinstance(name, FSString)
self.name = name
self.hash_chain = hash_chain
self.parent = parent
self.extension = extension
self.valid = True
return True
def dump(self):
Block.dump(self, "FileHeader")
print(" own_key: %d" % self.own_key)
print(" blk_cnt: %d" % self.block_count)
print(" first_data: %d" % self.first_data)
if self.data_blocks != None:
print(" data blks: %s #%d" % (self.data_blocks, len(self.data_blocks)))
pf = ProtectFlags(self.protect)
print(" protect: 0x%x 0b%s %s" % (self.protect, pf.bin_str(), pf))
print(" byte_size: %d" % self.byte_size)
print(" comment: '%s'" % self.comment)
print(" mod_ts: %s" % self.mod_ts)
print(" name: '%s'" % self.name)
print(" hash_chain: %d" % self.hash_chain)
print(" parent: %d" % self.parent)
print(" extension: %d" % self.extension)

View File

@ -1,72 +1,71 @@
from .Block import Block
class FileListBlock(Block):
def __init__(self, blkdev, blk_num):
Block.__init__(self, blkdev, blk_num, is_type=Block.T_LIST, is_sub_type=Block.ST_FILE)
def set(self, data):
self._set_data(data)
self._read()
def read(self):
self._read_data()
self._read()
def _read(self):
Block.read(self)
if not self.valid:
return False
# FileList fields
self.own_key = self._get_long(1)
self.block_count = self._get_long(2)
# read (limited) data blocks
bc = self.block_count
mbc = self.blkdev.block_longs - 56
if bc > mbc:
bc = mbc
self.data_blocks = []
for i in range(bc):
self.data_blocks.append(self._get_long(-51-i))
self.parent = self._get_long(-3)
self.extension = self._get_long(-2)
self.valid = (self.own_key == self.blk_num)
return self.valid
def create(self, parent, data_blocks, extension):
Block.create(self)
self.own_key = self.blk_num
self.block_count = len(data_blocks)
self.data_blocks = data_blocks
self.parent = parent
self.extension = extension
self.valid = True
return True
def write(self):
Block._create_data(self)
self._put_long(1, self.own_key)
self._put_long(2, self.block_count)
# data blocks
for i in range(len(self.data_blocks)):
self._put_long(-51-i, self.data_blocks[i])
self._put_long(-3, self.parent)
self._put_long(-2, self.extension)
Block.write(self)
def dump(self):
Block.dump(self,"FileList")
print(" own_key: %d" % self.own_key)
print(" blk_cnt: %d" % self.block_count)
print(" data blks: %s" % self.data_blocks)
print(" parent: %d" % self.parent)
print(" extension: %d" % self.extension)
class FileListBlock(Block):
def __init__(self, blkdev, blk_num):
Block.__init__(
self, blkdev, blk_num, is_type=Block.T_LIST, is_sub_type=Block.ST_FILE
)
def set(self, data):
self._set_data(data)
self._read()
def read(self):
self._read_data()
self._read()
def _read(self):
Block.read(self)
if not self.valid:
return False
# FileList fields
self.own_key = self._get_long(1)
self.block_count = self._get_long(2)
# read (limited) data blocks
bc = self.block_count
mbc = self.blkdev.block_longs - 56
if bc > mbc:
bc = mbc
self.data_blocks = []
for i in range(bc):
self.data_blocks.append(self._get_long(-51 - i))
self.parent = self._get_long(-3)
self.extension = self._get_long(-2)
self.valid = self.own_key == self.blk_num
return self.valid
def create(self, parent, data_blocks, extension):
Block.create(self)
self.own_key = self.blk_num
self.block_count = len(data_blocks)
self.data_blocks = data_blocks
self.parent = parent
self.extension = extension
self.valid = True
return True
def write(self):
Block._create_data(self)
self._put_long(1, self.own_key)
self._put_long(2, self.block_count)
# data blocks
for i in range(len(self.data_blocks)):
self._put_long(-51 - i, self.data_blocks[i])
self._put_long(-3, self.parent)
self._put_long(-2, self.extension)
Block.write(self)
def dump(self):
Block.dump(self, "FileList")
print(" own_key: %d" % self.own_key)
print(" blk_cnt: %d" % self.block_count)
print(" data blks: %s" % self.data_blocks)
print(" parent: %d" % self.parent)
print(" extension: %d" % self.extension)

View File

@ -1,6 +1,3 @@
import time
from .Block import Block
@ -8,130 +5,132 @@ from ..TimeStamp import *
class RootBlock(Block):
def __init__(self, blkdev, blk_num):
Block.__init__(self, blkdev, blk_num, is_type=Block.T_SHORT, is_sub_type=Block.ST_ROOT)
def create(self, name, create_ts=None, disk_ts=None, mod_ts=None, extension=0, fstype=0):
Block.create(self)
# init fresh hash table
self.hash_size = self.blkdev.block_longs - 56
self.hash_table = []
for i in range(self.hash_size):
self.hash_table.append(0)
# timestamps
self.mod_ts = mod_ts
self.disk_ts = disk_ts
self.create_ts = create_ts
# name
self.name = name
# bitmap: blank
self.bitmap_flag = 0xffffffff
self.bitmap_ptrs = []
for i in range(25):
self.bitmap_ptrs.append(0)
self.bitmap_ext_blk = 0
# new stuff for DOS6 and DOS7
self.fstype = fstype
self.blocks_used = 0
self.extension = extension
def write(self):
self._create_data()
# hash table
self._put_long(3, self.hash_size)
for i in range(self.hash_size):
self._put_long(6+i, self.hash_table[i])
# bitmap
self._put_long(-50, self.bitmap_flag)
for i in range(25):
self._put_long(-49+i, self.bitmap_ptrs[i])
self._put_long(-24, self.bitmap_ext_blk)
# timestamps
self._put_timestamp(-23, self.mod_ts)
self._put_timestamp(-10, self.disk_ts)
self._put_timestamp(-7, self.create_ts)
# name
self._put_bstr(-20, 30, self.name)
self._put_long(-2, self.extension)
# DOS6 and DOS7 stuff
self._put_long(-11, self.blocks_used)
self._put_long(-4, self.fstype)
def __init__(self, blkdev, blk_num):
Block.__init__(
self, blkdev, blk_num, is_type=Block.T_SHORT, is_sub_type=Block.ST_ROOT
)
Block.write(self)
def set(self, data):
self._set_data(data)
self._read()
def read(self):
self._read_data()
self._read()
def _read(self):
Block.read(self)
if not self.valid:
return False
# name hash (limit to max size)
self.hash_size = self._get_long(3)
# read (limited) hash
hs = self.hash_size
mhs = self.blkdev.block_longs - 56
if hs > mhs:
hs = mhs
self.hash_table = []
for i in range(hs):
self.hash_table.append(self._get_long(6+i))
# bitmap
self.bitmap_flag = self._get_long(-50)
self.bitmap_ptrs = []
for i in range(25):
bm_blk = self._get_long(-49+i)
self.bitmap_ptrs.append(bm_blk)
self.bitmap_ext_blk = self._get_long(-24)
# timestamps
self.mod_ts = self._get_timestamp(-23)
self.disk_ts = self._get_timestamp(-10)
self.create_ts = self._get_timestamp(-7)
# name
self.name = self._get_bstr(-20, 30)
self.extension = self._get_long(-2)
def create(
self, name, create_ts=None, disk_ts=None, mod_ts=None, extension=0, fstype=0
):
Block.create(self)
# init fresh hash table
self.hash_size = self.blkdev.block_longs - 56
self.hash_table = []
for i in range(self.hash_size):
self.hash_table.append(0)
# Number of used blocks (new in DOS6 and DOS7)
self.blocks_used = self._get_long(-11)
# filesystem type (new in DOS6 and DOS7, 0 in others)
self.fstype = self._get_long(-4)
# timestamps
self.mod_ts = mod_ts
self.disk_ts = disk_ts
self.create_ts = create_ts
# check validity
self.valid = True
#self.valid = (self.bitmap_flag == 0xffffffff)
return self.valid
def dump(self):
Block.dump(self, "Root")
print(" hash size: %d" % self.hash_size)
print(" hash table:%s" % self.hash_table)
print(" bmp flag: 0x%08x" % self.bitmap_flag)
print(" bmp ptrs: %s" % self.bitmap_ptrs)
print(" bmp ext: %d" % self.bitmap_ext_blk)
print(" mod_ts: %s" % self.mod_ts)
print(" disk_ts: %s" % self.disk_ts)
print(" create_ts: %s" % self.create_ts)
print(" disk name: %s" % self.name)
print(" extension: %s" % self.extension)
# name
self.name = name
# bitmap: blank
self.bitmap_flag = 0xFFFFFFFF
self.bitmap_ptrs = []
for i in range(25):
self.bitmap_ptrs.append(0)
self.bitmap_ext_blk = 0
# new stuff for DOS6 and DOS7
self.fstype = fstype
self.blocks_used = 0
self.extension = extension
def write(self):
self._create_data()
# hash table
self._put_long(3, self.hash_size)
for i in range(self.hash_size):
self._put_long(6 + i, self.hash_table[i])
# bitmap
self._put_long(-50, self.bitmap_flag)
for i in range(25):
self._put_long(-49 + i, self.bitmap_ptrs[i])
self._put_long(-24, self.bitmap_ext_blk)
# timestamps
self._put_timestamp(-23, self.mod_ts)
self._put_timestamp(-10, self.disk_ts)
self._put_timestamp(-7, self.create_ts)
# name
self._put_bstr(-20, 30, self.name)
self._put_long(-2, self.extension)
# DOS6 and DOS7 stuff
self._put_long(-11, self.blocks_used)
self._put_long(-4, self.fstype)
Block.write(self)
def set(self, data):
self._set_data(data)
self._read()
def read(self):
self._read_data()
self._read()
def _read(self):
Block.read(self)
if not self.valid:
return False
# name hash (limit to max size)
self.hash_size = self._get_long(3)
# read (limited) hash
hs = self.hash_size
mhs = self.blkdev.block_longs - 56
if hs > mhs:
hs = mhs
self.hash_table = []
for i in range(hs):
self.hash_table.append(self._get_long(6 + i))
# bitmap
self.bitmap_flag = self._get_long(-50)
self.bitmap_ptrs = []
for i in range(25):
bm_blk = self._get_long(-49 + i)
self.bitmap_ptrs.append(bm_blk)
self.bitmap_ext_blk = self._get_long(-24)
# timestamps
self.mod_ts = self._get_timestamp(-23)
self.disk_ts = self._get_timestamp(-10)
self.create_ts = self._get_timestamp(-7)
# name
self.name = self._get_bstr(-20, 30)
self.extension = self._get_long(-2)
# Number of used blocks (new in DOS6 and DOS7)
self.blocks_used = self._get_long(-11)
# filesystem type (new in DOS6 and DOS7, 0 in others)
self.fstype = self._get_long(-4)
# check validity
self.valid = True
# self.valid = (self.bitmap_flag == 0xffffffff)
return self.valid
def dump(self):
Block.dump(self, "Root")
print(" hash size: %d" % self.hash_size)
print(" hash table:%s" % self.hash_table)
print(" bmp flag: 0x%08x" % self.bitmap_flag)
print(" bmp ptrs: %s" % self.bitmap_ptrs)
print(" bmp ext: %d" % self.bitmap_ext_blk)
print(" mod_ts: %s" % self.mod_ts)
print(" disk_ts: %s" % self.disk_ts)
print(" create_ts: %s" % self.create_ts)
print(" disk name: %s" % self.name)
print(" extension: %s" % self.extension)

View File

@ -1,90 +1,103 @@
import time
from .Block import Block
from .EntryBlock import EntryBlock
from ..ProtectFlags import ProtectFlags
from ..FSString import FSString
class UserDirBlock(EntryBlock):
def __init__(self, blkdev, blk_num, is_longname):
EntryBlock.__init__(self, blkdev, blk_num, is_type=Block.T_SHORT, is_sub_type=Block.ST_USERDIR,is_longname=is_longname)
def set(self, data):
self._set_data(data)
self._read()
def read(self):
self._read_data()
self._read()
def _read(self):
Block.read(self)
if not self.valid:
return False
def __init__(self, blkdev, blk_num, is_longname):
EntryBlock.__init__(
self,
blkdev,
blk_num,
is_type=Block.T_SHORT,
is_sub_type=Block.ST_USERDIR,
is_longname=is_longname,
)
# UserDir fields
self.own_key = self._get_long(1)
self.protect = self._get_long(-48)
self._read_nac_modts()
self.hash_chain = self._get_long(-4)
self.parent = self._get_long(-3)
self.extension = self._get_long(-2)
def set(self, data):
self._set_data(data)
self._read()
# hash table of entries
self.hash_table = []
self.hash_size = self.blkdev.block_longs - 56
for i in range(self.hash_size):
self.hash_table.append(self._get_long(6+i))
self.valid = (self.own_key == self.blk_num)
return self.valid
def read(self):
self._read_data()
self._read()
def create(self, parent, name, protect=0, comment=None, mod_ts=None, hash_chain=0, extension=0):
Block.create(self)
self.own_key = self.blk_num
self.protect = protect
if comment is None:
self.comment = FSString()
else:
self.comment = comment
# timestamps
self.mod_ts = mod_ts
self.name = name
self.hash_chain = hash_chain
self.parent = parent
self.extension = extension
# empty hash table
self.hash_table = []
self.hash_size = self.blkdev.block_longs - 56
for i in range(self.hash_size):
self.hash_table.append(0)
self.valid = True
return True
def write(self):
Block._create_data(self)
self._put_long(1, self.own_key)
self._put_long(-48, self.protect)
self._write_nac_modts()
self._put_long(-4, self.hash_chain)
self._put_long(-3, self.parent)
self._put_long(-2, self.extension)
# hash table
for i in range(self.hash_size):
self._put_long(6+i, self.hash_table[i])
Block.write(self)
def dump(self):
Block.dump(self,"UserDir")
print(" own_key: %d" % (self.own_key))
pf = ProtectFlags(self.protect)
print(" protect: 0x%x 0b%s %s" % (self.protect, pf.bin_str(), pf))
print(" comment: '%s'" % self.comment)
print(" mod_ts: %s" % self.mod_ts)
print(" name: '%s'" % self.name)
print(" hash_chain: %d" % self.hash_chain)
print(" parent: %d" % self.parent)
print(" extension: %s" % self.extension)
def _read(self):
Block.read(self)
if not self.valid:
return False
# UserDir fields
self.own_key = self._get_long(1)
self.protect = self._get_long(-48)
self._read_nac_modts()
self.hash_chain = self._get_long(-4)
self.parent = self._get_long(-3)
self.extension = self._get_long(-2)
# hash table of entries
self.hash_table = []
self.hash_size = self.blkdev.block_longs - 56
for i in range(self.hash_size):
self.hash_table.append(self._get_long(6 + i))
self.valid = self.own_key == self.blk_num
return self.valid
def create(
self,
parent,
name,
protect=0,
comment=None,
mod_ts=None,
hash_chain=0,
extension=0,
):
Block.create(self)
self.own_key = self.blk_num
self.protect = protect
if comment is None:
self.comment = FSString()
else:
self.comment = comment
# timestamps
self.mod_ts = mod_ts
self.name = name
self.hash_chain = hash_chain
self.parent = parent
self.extension = extension
# empty hash table
self.hash_table = []
self.hash_size = self.blkdev.block_longs - 56
for i in range(self.hash_size):
self.hash_table.append(0)
self.valid = True
return True
def write(self):
Block._create_data(self)
self._put_long(1, self.own_key)
self._put_long(-48, self.protect)
self._write_nac_modts()
self._put_long(-4, self.hash_chain)
self._put_long(-3, self.parent)
self._put_long(-2, self.extension)
# hash table
for i in range(self.hash_size):
self._put_long(6 + i, self.hash_table[i])
Block.write(self)
def dump(self):
Block.dump(self, "UserDir")
print(" own_key: %d" % (self.own_key))
pf = ProtectFlags(self.protect)
print(" protect: 0x%x 0b%s %s" % (self.protect, pf.bin_str(), pf))
print(" comment: '%s'" % self.comment)
print(" mod_ts: %s" % self.mod_ts)
print(" name: '%s'" % self.name)
print(" hash_chain: %d" % self.hash_chain)
print(" parent: %d" % self.parent)
print(" extension: %s" % self.extension)

View File

@ -1,61 +1,59 @@
from ..Block import Block
class BadBlockBlock(Block):
def __init__(self, blkdev, blk_num=0):
Block.__init__(self, blkdev, blk_num, chk_loc=2, is_type=Block.BADB)
def create(self, block_pairs, host_id, size=128, next=0):
Block.create(self)
self.size = size
self.host_id = host_id
self.next = next
self.block_pairs = block_pairs
def write(self):
self._create_data()
self._put_long(1, self.size)
self._put_long(3, self.host_id)
self._put_long(4, self.next)
# block_pairs: bad, good, bad, good ...
off = 6
for b in self.block_pairs:
self._put_long(off, b)
off += 1
Block.write(self)
def read(self):
Block.read(self)
if not self.valid:
return False
self.size = self._get_long(1)
self.host_id = self._get_long(3)
self.next = self._get_long(4)
self.block_pairs = []
off = 6
while off < self.block_longs:
b = self._get_long(off)
if b == 0 or b == 0xffffffff:
break
self.block_pairs.append(b)
return self.valid
def dump(self):
Block.dump(self, "RDBlock")
print(" size: %d" % self.size)
print(" host_id: %d" % self.host_id)
print(" next: %s" % self._dump_ptr(self.next))
n = len(self.block_pairs) // 2
o = 0
for i in range(n):
print(" bad=%d good=%d" % (self.block_pairs[o], self.block_pairs[o+1]))
o += 2
def __init__(self, blkdev, blk_num=0):
Block.__init__(self, blkdev, blk_num, chk_loc=2, is_type=Block.BADB)
def create(self, block_pairs, host_id, size=128, next=0):
Block.create(self)
self.size = size
self.host_id = host_id
self.next = next
self.block_pairs = block_pairs
def write(self):
self._create_data()
self._put_long(1, self.size)
self._put_long(3, self.host_id)
self._put_long(4, self.next)
# block_pairs: bad, good, bad, good ...
off = 6
for b in self.block_pairs:
self._put_long(off, b)
off += 1
Block.write(self)
def read(self):
Block.read(self)
if not self.valid:
return False
self.size = self._get_long(1)
self.host_id = self._get_long(3)
self.next = self._get_long(4)
self.block_pairs = []
off = 6
while off < self.block_longs:
b = self._get_long(off)
if b == 0 or b == 0xFFFFFFFF:
break
self.block_pairs.append(b)
return self.valid
def dump(self):
Block.dump(self, "RDBlock")
print(" size: %d" % self.size)
print(" host_id: %d" % self.host_id)
print(" next: %s" % self._dump_ptr(self.next))
n = len(self.block_pairs) // 2
o = 0
for i in range(n):
print(" bad=%d good=%d" % (self.block_pairs[o], self.block_pairs[o + 1]))
o += 2

View File

@ -1,202 +1,244 @@
from amitools.fs.block.Block import *
import amitools.fs.DosType as DosType
class FSHeaderDeviceNode:
valid_flags = ('type', 'task', 'lock', 'handler', 'stack_size',
'priority', 'startup', 'seg_list_blk', 'global_vec')
valid_flags = (
"type",
"task",
"lock",
"handler",
"stack_size",
"priority",
"startup",
"seg_list_blk",
"global_vec",
)
def __init__(self, type=0, task=0, lock=0, handler=0, stack_size=0, priority=0,
startup=0, seg_list_blk=0, global_vec=0):
self.type = type
self.task = task
self.lock = lock
self.handler = handler
self.stack_size = stack_size
self.priority = priority
self.startup = startup
self.seg_list_blk = seg_list_blk
self.global_vec = global_vec
def __init__(
self,
type=0,
task=0,
lock=0,
handler=0,
stack_size=0,
priority=0,
startup=0,
seg_list_blk=0,
global_vec=0,
):
self.type = type
self.task = task
self.lock = lock
self.handler = handler
self.stack_size = stack_size
self.priority = priority
self.startup = startup
self.seg_list_blk = seg_list_blk
self.global_vec = global_vec
def dump(self):
print("DeviceNode")
print(" type: 0x%08x" % self.type)
print(" task: 0x%08x" % self.task)
print(" lock: 0x%08x" % self.lock)
print(" handler: 0x%08x" % self.handler)
print(" stack_size: 0x%08x" % self.stack_size)
print(" seg_list_blk: 0x%08x" % self.seg_list_blk)
print(" global_vec: 0x%08x" % self.global_vec)
def dump(self):
print("DeviceNode")
print(" type: 0x%08x" % self.type)
print(" task: 0x%08x" % self.task)
print(" lock: 0x%08x" % self.lock)
print(" handler: 0x%08x" % self.handler)
print(" stack_size: 0x%08x" % self.stack_size)
print(" seg_list_blk: 0x%08x" % self.seg_list_blk)
print(" global_vec: 0x%08x" % self.global_vec)
def get_flags(self, patch_flags = 0x1ff):
res = []
if patch_flags & 0x01 == 0x01:
res.append(('type',self.type))
if patch_flags & 0x02 == 0x02:
res.append(('task',self.task))
if patch_flags & 0x04 == 0x04:
res.append(('lock',self.lock))
if patch_flags & 0x08 == 0x08:
res.append(('handler',self.handler))
if patch_flags & 0x10 == 0x10:
res.append(('stack_size',self.stack_size))
if patch_flags & 0x20 == 0x20:
res.append(('priority',self.priority))
if patch_flags & 0x40 == 0x40:
res.append(('startup',self.startup))
if patch_flags & 0x80 == 0x80:
res.append(('seg_list_blk',self.seg_list_blk))
if patch_flags & 0x100 == 0x100:
res.append(('global_vec',self.global_vec))
return res
def get_flags(self, patch_flags=0x1FF):
res = []
if patch_flags & 0x01 == 0x01:
res.append(("type", self.type))
if patch_flags & 0x02 == 0x02:
res.append(("task", self.task))
if patch_flags & 0x04 == 0x04:
res.append(("lock", self.lock))
if patch_flags & 0x08 == 0x08:
res.append(("handler", self.handler))
if patch_flags & 0x10 == 0x10:
res.append(("stack_size", self.stack_size))
if patch_flags & 0x20 == 0x20:
res.append(("priority", self.priority))
if patch_flags & 0x40 == 0x40:
res.append(("startup", self.startup))
if patch_flags & 0x80 == 0x80:
res.append(("seg_list_blk", self.seg_list_blk))
if patch_flags & 0x100 == 0x100:
res.append(("global_vec", self.global_vec))
return res
def set_flags(self, flags):
mask = 0
for f in flags:
key = f[0]
val = int(f[1])
if key == 'type':
self.type = val
mask |= 0x01
elif key == 'task':
self.task = val
mask |= 0x02
elif key == 'lock':
self.lock = val
mask |= 0x04
elif key == 'handler':
self.handler = val
mask |= 0x08
elif key == 'stack_size':
self.stack_size = val
mask |= 0x10
elif key == 'priority':
self.priority = val
mask |= 0x20
elif key == 'startup':
self.startup = val
mask |= 0x40
elif key == 'seg_list_blk':
self.seg_list_blk = val
mask |= 0x80
elif key == 'global_vec':
self.global_vec = val
mask |= 0x100
else:
raise ValueError("Invalid flag: "+key)
return mask
def set_flags(self, flags):
mask = 0
for f in flags:
key = f[0]
val = int(f[1])
if key == "type":
self.type = val
mask |= 0x01
elif key == "task":
self.task = val
mask |= 0x02
elif key == "lock":
self.lock = val
mask |= 0x04
elif key == "handler":
self.handler = val
mask |= 0x08
elif key == "stack_size":
self.stack_size = val
mask |= 0x10
elif key == "priority":
self.priority = val
mask |= 0x20
elif key == "startup":
self.startup = val
mask |= 0x40
elif key == "seg_list_blk":
self.seg_list_blk = val
mask |= 0x80
elif key == "global_vec":
self.global_vec = val
mask |= 0x100
else:
raise ValueError("Invalid flag: " + key)
return mask
def get_valid_flag_names(self):
return ('type', 'task', 'lock', 'handler', 'stack_size', 'priority', 'startup', 'seg_list_blk', 'global_vec')
def get_valid_flag_names(self):
return (
"type",
"task",
"lock",
"handler",
"stack_size",
"priority",
"startup",
"seg_list_blk",
"global_vec",
)
def read(self, blk):
self.type = blk._get_long(11)
self.task = blk._get_long(12)
self.lock = blk._get_long(13)
self.handler = blk._get_long(14)
self.stack_size = blk._get_long(15)
self.priority = blk._get_long(16)
self.startup = blk._get_long(17)
self.seg_list_blk = blk._get_long(18)
self.global_vec = blk._get_long(19)
def read(self, blk):
self.type = blk._get_long(11)
self.task = blk._get_long(12)
self.lock = blk._get_long(13)
self.handler = blk._get_long(14)
self.stack_size = blk._get_long(15)
self.priority = blk._get_long(16)
self.startup = blk._get_long(17)
self.seg_list_blk = blk._get_long(18)
self.global_vec = blk._get_long(19)
def write(self, blk):
blk._put_long(11, self.type)
blk._put_long(12, self.task)
blk._put_long(13, self.lock)
blk._put_long(14, self.handler)
blk._put_long(15, self.stack_size)
blk._put_long(16, self.priority)
blk._put_long(17, self.startup)
blk._put_long(18, self.seg_list_blk)
blk._put_long(19, self.global_vec)
def write(self, blk):
blk._put_long(11, self.type)
blk._put_long(12, self.task)
blk._put_long(13, self.lock)
blk._put_long(14, self.handler)
blk._put_long(15, self.stack_size)
blk._put_long(16, self.priority)
blk._put_long(17, self.startup)
blk._put_long(18, self.seg_list_blk)
blk._put_long(19, self.global_vec)
class FSHeaderBlock(Block):
def __init__(self, blkdev, blk_num):
Block.__init__(self, blkdev, blk_num, chk_loc=2, is_type=Block.FSHD)
def __init__(self, blkdev, blk_num):
Block.__init__(self, blkdev, blk_num, chk_loc=2, is_type=Block.FSHD)
def create(self, host_id=0, next=Block.no_blk, flags=0, dos_type=0, version=0, patch_flags=0,
size=64, dev_node=None):
Block.create(self)
self.size = size
self.host_id = host_id
self.next = next
self.flags = flags
def create(
self,
host_id=0,
next=Block.no_blk,
flags=0,
dos_type=0,
version=0,
patch_flags=0,
size=64,
dev_node=None,
):
Block.create(self)
self.size = size
self.host_id = host_id
self.next = next
self.flags = flags
self.dos_type = dos_type
self.version = version
self.patch_flags = patch_flags
self.dos_type = dos_type
self.version = version
self.patch_flags = patch_flags
if dev_node == None:
dev_node = FSHeaderDeviceNode()
self.dev_node = dev_node
if dev_node == None:
dev_node = FSHeaderDeviceNode()
self.dev_node = dev_node
def write(self):
self._create_data()
def write(self):
self._create_data()
self._put_long(1, self.size)
self._put_long(3, self.host_id)
self._put_long(4, self.next)
self._put_long(5, self.flags)
self._put_long(1, self.size)
self._put_long(3, self.host_id)
self._put_long(4, self.next)
self._put_long(5, self.flags)
self._put_long(8, self.dos_type)
self._put_long(9, self.version)
self._put_long(10, self.patch_flags)
self._put_long(8, self.dos_type)
self._put_long(9, self.version)
self._put_long(10, self.patch_flags)
self.dev_node.write(self)
self.dev_node.write(self)
Block.write(self)
Block.write(self)
def read(self):
Block.read(self)
if not self.valid:
return False
def read(self):
Block.read(self)
if not self.valid:
return False
self.size = self._get_long(1)
self.host_id = self._get_long(3)
self.next = self._get_long(4)
self.flags = self._get_long(5)
self.size = self._get_long(1)
self.host_id = self._get_long(3)
self.next = self._get_long(4)
self.flags = self._get_long(5)
self.dos_type = self._get_long(8)
self.version = self._get_long(9)
self.patch_flags = self._get_long(10)
self.dos_type = self._get_long(8)
self.version = self._get_long(9)
self.patch_flags = self._get_long(10)
self.dev_node = FSHeaderDeviceNode()
self.dev_node.read(self)
self.dev_node = FSHeaderDeviceNode()
self.dev_node.read(self)
return self.valid
return self.valid
def get_version_tuple(self):
return ((self.version >> 16),(self.version & 0xffff))
def get_version_tuple(self):
return ((self.version >> 16), (self.version & 0xFFFF))
def get_version_string(self):
return "%d.%d" % self.get_version_tuple()
def get_version_string(self):
return "%d.%d" % self.get_version_tuple()
def get_flags(self):
return self.dev_node.get_flags(self.patch_flags)
def get_flags(self):
return self.dev_node.get_flags(self.patch_flags)
def get_valid_flag_names(self):
return self.dev_node.get_valid_flag_names()
def get_valid_flag_names(self):
return self.dev_node.get_valid_flag_names()
def dump(self):
Block.dump(self, "FSHeader")
def dump(self):
Block.dump(self, "FSHeader")
print(" size: %d" % self.size)
print(" host_id: %d" % self.host_id)
print(" next: %s" % self._dump_ptr(self.next))
print(" flags: 0x%08x" % self.flags)
print(" dos_type: 0x%08x = %s" % (self.dos_type, DosType.num_to_tag_str(self.dos_type)))
print(" version: 0x%08x = %s" % (self.version, self.get_version_string()))
print(" patch_flags: 0x%08x" % self.patch_flags)
print(" size: %d" % self.size)
print(" host_id: %d" % self.host_id)
print(" next: %s" % self._dump_ptr(self.next))
print(" flags: 0x%08x" % self.flags)
print(
" dos_type: 0x%08x = %s"
% (self.dos_type, DosType.num_to_tag_str(self.dos_type))
)
print(
" version: 0x%08x = %s" % (self.version, self.get_version_string())
)
print(" patch_flags: 0x%08x" % self.patch_flags)
self.dev_node.dump()
self.dev_node.dump()
def set_flags(self, flags):
mask = self.dev_node.set_flags(flags)
self.patch_flags |= mask
def set_flags(self, flags):
mask = self.dev_node.set_flags(flags)
self.patch_flags |= mask
def set_flag(self, key, value):
mask = self.dev_node.set_flags([(key,value)])
self.patch_flags |= mask
def set_flag(self, key, value):
mask = self.dev_node.set_flags([(key, value)])
self.patch_flags |= mask

View File

@ -1,51 +1,49 @@
from ..Block import Block
class LoadSegBlock(Block):
def __init__(self, blkdev, blk_num=0):
Block.__init__(self, blkdev, blk_num, chk_loc=2, is_type=Block.LSEG)
def create(self, host_id=0, next=Block.no_blk, size=128):
Block.create(self)
self.size = size
self.host_id = host_id
self.next = next
def write(self):
if self.data == None:
self._create_data()
self._put_long(1, self.size)
self._put_long(3, self.host_id)
self._put_long(4, self.next)
Block.write(self)
def set_data(self, data):
if self.data == None:
self._create_data()
self.data[20:20+len(data)] = data
self.size = (20 + len(data)) // 4
def get_data(self):
return self.data[20:20+(self.size-5)*4]
def read(self):
Block.read(self)
if not self.valid:
return False
self.size = self._get_long(1)
self.host_id = self._get_long(3)
self.next = self._get_long(4)
return self.valid
def dump(self):
Block.dump(self, "RDBlock")
print(" size: %d" % self.size)
print(" host_id: %d" % self.host_id)
print(" next: %s" % self._dump_ptr(self.next))
def __init__(self, blkdev, blk_num=0):
Block.__init__(self, blkdev, blk_num, chk_loc=2, is_type=Block.LSEG)
def create(self, host_id=0, next=Block.no_blk, size=128):
Block.create(self)
self.size = size
self.host_id = host_id
self.next = next
def write(self):
if self.data == None:
self._create_data()
self._put_long(1, self.size)
self._put_long(3, self.host_id)
self._put_long(4, self.next)
Block.write(self)
def set_data(self, data):
if self.data == None:
self._create_data()
self.data[20 : 20 + len(data)] = data
self.size = (20 + len(data)) // 4
def get_data(self):
return self.data[20 : 20 + (self.size - 5) * 4]
def read(self):
Block.read(self)
if not self.valid:
return False
self.size = self._get_long(1)
self.host_id = self._get_long(3)
self.next = self._get_long(4)
return self.valid
def dump(self):
Block.dump(self, "RDBlock")
print(" size: %d" % self.size)
print(" host_id: %d" % self.host_id)
print(" next: %s" % self._dump_ptr(self.next))

View File

@ -1,170 +1,207 @@
from amitools.fs.block.Block import *
import amitools.fs.DosType as DosType
from amitools.fs.FSString import FSString
class PartitionDosEnv:
valid_keys = ('max_transfer', 'mask', 'num_buffer', 'reserved', 'boot_pri', 'pre_alloc', 'boot_blocks')
valid_keys = (
"max_transfer",
"mask",
"num_buffer",
"reserved",
"boot_pri",
"pre_alloc",
"boot_blocks",
)
def __init__(self, size=16, block_size=128, sec_org=0, surfaces=0, sec_per_blk=1, blk_per_trk=0,
reserved=2, pre_alloc=0, interleave=0, low_cyl=0, high_cyl=0, num_buffer=30,
buf_mem_type=0, max_transfer=0xffffff, mask=0x7ffffffe, boot_pri=0, dos_type=DosType.DOS0,
baud=0, control=0, boot_blocks=0):
self.size = size
self.block_size = block_size
self.sec_org = sec_org
self.surfaces = surfaces
self.sec_per_blk = sec_per_blk
self.blk_per_trk = blk_per_trk
self.reserved = reserved
self.pre_alloc = pre_alloc
self.interleave = interleave
self.low_cyl = low_cyl
self.high_cyl = high_cyl
self.num_buffer = num_buffer
self.buf_mem_type = buf_mem_type
self.max_transfer = max_transfer
self.mask = mask
self.boot_pri = boot_pri
self.dos_type = dos_type
self.baud = baud
self.control = control
self.boot_blocks = boot_blocks
def __init__(
self,
size=16,
block_size=128,
sec_org=0,
surfaces=0,
sec_per_blk=1,
blk_per_trk=0,
reserved=2,
pre_alloc=0,
interleave=0,
low_cyl=0,
high_cyl=0,
num_buffer=30,
buf_mem_type=0,
max_transfer=0xFFFFFF,
mask=0x7FFFFFFE,
boot_pri=0,
dos_type=DosType.DOS0,
baud=0,
control=0,
boot_blocks=0,
):
self.size = size
self.block_size = block_size
self.sec_org = sec_org
self.surfaces = surfaces
self.sec_per_blk = sec_per_blk
self.blk_per_trk = blk_per_trk
self.reserved = reserved
self.pre_alloc = pre_alloc
self.interleave = interleave
self.low_cyl = low_cyl
self.high_cyl = high_cyl
self.num_buffer = num_buffer
self.buf_mem_type = buf_mem_type
self.max_transfer = max_transfer
self.mask = mask
self.boot_pri = boot_pri
self.dos_type = dos_type
self.baud = baud
self.control = control
self.boot_blocks = boot_blocks
def dump(self):
print("DosEnv")
print(" size: %d" % self.size)
print(" block_size: %d" % self.block_size)
print(" sec_org: %d" % self.sec_org)
print(" surfaces: %d" % self.surfaces)
print(" sec_per_blk: %d" % self.sec_per_blk)
print(" blk_per_trk: %d" % self.blk_per_trk)
print(" reserved: %d" % self.reserved)
print(" pre_alloc: %d" % self.pre_alloc)
print(" interleave: %d" % self.interleave)
print(" low_cyl: %d" % self.low_cyl)
print(" high_cyl: %d" % self.high_cyl)
print(" num_buffer: %d" % self.num_buffer)
print(" buf_mem_type: 0x%08x" % self.buf_mem_type)
print(" max_transfer: 0x%08x" % self.max_transfer)
print(" mask: 0x%08x" % self.mask)
print(" boot_pri: %d" % self.boot_pri)
print(" dos_type: 0x%08x = %s" % (self.dos_type, DosType.num_to_tag_str(self.dos_type)))
print(" baud: %d" % self.baud)
print(" control: %d" % self.control)
print(" boot_blocks: %d" % self.boot_blocks)
def dump(self):
print("DosEnv")
print(" size: %d" % self.size)
print(" block_size: %d" % self.block_size)
print(" sec_org: %d" % self.sec_org)
print(" surfaces: %d" % self.surfaces)
print(" sec_per_blk: %d" % self.sec_per_blk)
print(" blk_per_trk: %d" % self.blk_per_trk)
print(" reserved: %d" % self.reserved)
print(" pre_alloc: %d" % self.pre_alloc)
print(" interleave: %d" % self.interleave)
print(" low_cyl: %d" % self.low_cyl)
print(" high_cyl: %d" % self.high_cyl)
print(" num_buffer: %d" % self.num_buffer)
print(" buf_mem_type: 0x%08x" % self.buf_mem_type)
print(" max_transfer: 0x%08x" % self.max_transfer)
print(" mask: 0x%08x" % self.mask)
print(" boot_pri: %d" % self.boot_pri)
print(
" dos_type: 0x%08x = %s"
% (self.dos_type, DosType.num_to_tag_str(self.dos_type))
)
print(" baud: %d" % self.baud)
print(" control: %d" % self.control)
print(" boot_blocks: %d" % self.boot_blocks)
def read(self, blk):
self.size = blk._get_long(32)
self.block_size = blk._get_long(33)
self.sec_org = blk._get_long(34)
self.surfaces = blk._get_long(35)
self.sec_per_blk = blk._get_long(36)
self.blk_per_trk = blk._get_long(37)
self.reserved = blk._get_long(38)
self.pre_alloc = blk._get_long(39)
self.interleave = blk._get_long(40)
self.low_cyl = blk._get_long(41)
self.high_cyl = blk._get_long(42)
self.num_buffer = blk._get_long(43)
self.buf_mem_type = blk._get_long(44)
self.max_transfer = blk._get_long(45)
self.mask = blk._get_long(46)
self.boot_pri = blk._get_slong(47)
self.dos_type = blk._get_long(48)
self.baud = blk._get_long(49)
self.control = blk._get_long(50)
self.boot_blocks = blk._get_long(51)
def read(self, blk):
self.size = blk._get_long(32)
self.block_size = blk._get_long(33)
self.sec_org = blk._get_long(34)
self.surfaces = blk._get_long(35)
self.sec_per_blk = blk._get_long(36)
self.blk_per_trk = blk._get_long(37)
self.reserved = blk._get_long(38)
self.pre_alloc = blk._get_long(39)
self.interleave = blk._get_long(40)
self.low_cyl = blk._get_long(41)
self.high_cyl = blk._get_long(42)
self.num_buffer = blk._get_long(43)
self.buf_mem_type = blk._get_long(44)
self.max_transfer = blk._get_long(45)
self.mask = blk._get_long(46)
self.boot_pri = blk._get_slong(47)
self.dos_type = blk._get_long(48)
self.baud = blk._get_long(49)
self.control = blk._get_long(50)
self.boot_blocks = blk._get_long(51)
def write(self, blk):
blk._put_long(32, self.size)
blk._put_long(33, self.block_size)
blk._put_long(34, self.sec_org)
blk._put_long(35, self.surfaces)
blk._put_long(36, self.sec_per_blk)
blk._put_long(37, self.blk_per_trk)
blk._put_long(38, self.reserved)
blk._put_long(39, self.pre_alloc)
blk._put_long(40, self.interleave)
blk._put_long(41, self.low_cyl)
blk._put_long(42, self.high_cyl)
blk._put_long(43, self.num_buffer)
blk._put_long(44, self.buf_mem_type)
blk._put_long(45, self.max_transfer)
blk._put_long(46, self.mask)
blk._put_slong(47, self.boot_pri)
blk._put_long(48, self.dos_type)
blk._put_long(49, self.baud)
blk._put_long(50, self.control)
blk._put_long(51, self.boot_blocks)
def write(self, blk):
blk._put_long(32, self.size)
blk._put_long(33, self.block_size)
blk._put_long(34, self.sec_org)
blk._put_long(35, self.surfaces)
blk._put_long(36, self.sec_per_blk)
blk._put_long(37, self.blk_per_trk)
blk._put_long(38, self.reserved)
blk._put_long(39, self.pre_alloc)
blk._put_long(40, self.interleave)
blk._put_long(41, self.low_cyl)
blk._put_long(42, self.high_cyl)
blk._put_long(43, self.num_buffer)
blk._put_long(44, self.buf_mem_type)
blk._put_long(45, self.max_transfer)
blk._put_long(46, self.mask)
blk._put_slong(47, self.boot_pri)
blk._put_long(48, self.dos_type)
blk._put_long(49, self.baud)
blk._put_long(50, self.control)
blk._put_long(51, self.boot_blocks)
class PartitionBlock(Block):
FLAG_BOOTABLE = 1
FLAG_NO_AUTOMOUNT = 2
FLAG_BOOTABLE = 1
FLAG_NO_AUTOMOUNT = 2
def __init__(self, blkdev, blk_num):
Block.__init__(self, blkdev, blk_num, chk_loc=2, is_type=Block.PART)
def __init__(self, blkdev, blk_num):
Block.__init__(self, blkdev, blk_num, chk_loc=2, is_type=Block.PART)
def create(self, drv_name, dos_env, host_id=7, next=Block.no_blk, flags=0, dev_flags=0,
size=64):
Block.create(self)
self.size = size
self.host_id = host_id
self.next = next
self.flags = flags
def create(
self,
drv_name,
dos_env,
host_id=7,
next=Block.no_blk,
flags=0,
dev_flags=0,
size=64,
):
Block.create(self)
self.size = size
self.host_id = host_id
self.next = next
self.flags = flags
self.dev_flags = dev_flags
assert isinstance(drv_name, FSString)
self.drv_name = drv_name
self.dev_flags = dev_flags
assert isinstance(drv_name, FSString)
self.drv_name = drv_name
if dos_env == None:
dos_env = PartitionDosEnv()
self.dos_env = dos_env
self.valid = True
if dos_env == None:
dos_env = PartitionDosEnv()
self.dos_env = dos_env
self.valid = True
def write(self):
self._create_data()
def write(self):
self._create_data()
self._put_long(1, self.size)
self._put_long(3, self.host_id)
self._put_long(4, self.next)
self._put_long(5, self.flags)
self._put_long(1, self.size)
self._put_long(3, self.host_id)
self._put_long(4, self.next)
self._put_long(5, self.flags)
self._put_long(8, self.dev_flags)
self._put_bstr(9, 31, self.drv_name)
self._put_long(8, self.dev_flags)
self._put_bstr(9, 31, self.drv_name)
self.dos_env.write(self)
self.dos_env.write(self)
Block.write(self)
Block.write(self)
def read(self):
Block.read(self)
if not self.valid:
return False
def read(self):
Block.read(self)
if not self.valid:
return False
self.size = self._get_long(1)
self.host_id = self._get_long(3)
self.next = self._get_long(4)
self.flags = self._get_long(5)
self.size = self._get_long(1)
self.host_id = self._get_long(3)
self.next = self._get_long(4)
self.flags = self._get_long(5)
self.dev_flags = self._get_long(8)
self.drv_name = self._get_bstr(9, 31)
self.dev_flags = self._get_long(8)
self.drv_name = self._get_bstr(9, 31)
self.dos_env = PartitionDosEnv()
self.dos_env.read(self)
self.dos_env = PartitionDosEnv()
self.dos_env.read(self)
return self.valid
return self.valid
def dump(self):
Block.dump(self, "Partition")
def dump(self):
Block.dump(self, "Partition")
print(" size: %d" % self.size)
print(" host_id: %d" % self.host_id)
print(" next: %s" % self._dump_ptr(self.next))
print(" flags: 0x%08x" % self.flags)
print(" dev_flags: 0x%08x" % self.dev_flags)
print(" drv_name: '%s'" % self.drv_name)
print(" size: %d" % self.size)
print(" host_id: %d" % self.host_id)
print(" next: %s" % self._dump_ptr(self.next))
print(" flags: 0x%08x" % self.flags)
print(" dev_flags: 0x%08x" % self.dev_flags)
print(" drv_name: '%s'" % self.drv_name)
self.dos_env.dump()
self.dos_env.dump()

View File

@ -1,217 +1,249 @@
from ..Block import Block
from amitools.fs.FSString import FSString
class RDBPhysicalDrive:
def __init__(self, cyls=0, heads=0, secs=0,
interleave=1, parking_zone=-1, write_pre_comp=-1, reduced_write=-1, step_rate=3):
if parking_zone == -1:
parking_zone = cyls
if write_pre_comp == -1:
write_pre_comp = cyls
if reduced_write == -1:
reduced_write = cyls
self.cyls = cyls
self.heads = heads
self.secs = secs
self.interleave = interleave
self.parking_zone = parking_zone
self.write_pre_comp = write_pre_comp
self.reduced_write = reduced_write
self.step_rate = step_rate
def dump(self):
print("PhysicalDrive")
print(" cyls: %d" % self.cyls)
print(" heads: %d" % self.heads)
print(" secs: %d" % self.secs)
print(" interleave: %d" % self.interleave)
print(" parking_zone: %d" % self.parking_zone)
print(" write_pre_comp: %d" % self.write_pre_comp)
print(" reduced_write: %d" % self.reduced_write)
print(" step_rate: %d" % self.step_rate)
def read(self, blk):
self.cyls = blk._get_long(16)
self.secs = blk._get_long(17)
self.heads = blk._get_long(18)
self.interleave = blk._get_long(19)
self.parking_zone = blk._get_long(20)
self.write_pre_comp = blk._get_long(24)
self.reduced_write = blk._get_long(25)
self.step_rate = blk._get_long(26)
def write(self, blk):
blk._put_long(16, self.cyls)
blk._put_long(17, self.secs)
blk._put_long(18, self.heads)
blk._put_long(19, self.interleave)
blk._put_long(20, self.parking_zone)
blk._put_long(24, self.write_pre_comp)
blk._put_long(25, self.reduced_write)
blk._put_long(26, self.step_rate)
def __init__(
self,
cyls=0,
heads=0,
secs=0,
interleave=1,
parking_zone=-1,
write_pre_comp=-1,
reduced_write=-1,
step_rate=3,
):
if parking_zone == -1:
parking_zone = cyls
if write_pre_comp == -1:
write_pre_comp = cyls
if reduced_write == -1:
reduced_write = cyls
self.cyls = cyls
self.heads = heads
self.secs = secs
self.interleave = interleave
self.parking_zone = parking_zone
self.write_pre_comp = write_pre_comp
self.reduced_write = reduced_write
self.step_rate = step_rate
def dump(self):
print("PhysicalDrive")
print(" cyls: %d" % self.cyls)
print(" heads: %d" % self.heads)
print(" secs: %d" % self.secs)
print(" interleave: %d" % self.interleave)
print(" parking_zone: %d" % self.parking_zone)
print(" write_pre_comp: %d" % self.write_pre_comp)
print(" reduced_write: %d" % self.reduced_write)
print(" step_rate: %d" % self.step_rate)
def read(self, blk):
self.cyls = blk._get_long(16)
self.secs = blk._get_long(17)
self.heads = blk._get_long(18)
self.interleave = blk._get_long(19)
self.parking_zone = blk._get_long(20)
self.write_pre_comp = blk._get_long(24)
self.reduced_write = blk._get_long(25)
self.step_rate = blk._get_long(26)
def write(self, blk):
blk._put_long(16, self.cyls)
blk._put_long(17, self.secs)
blk._put_long(18, self.heads)
blk._put_long(19, self.interleave)
blk._put_long(20, self.parking_zone)
blk._put_long(24, self.write_pre_comp)
blk._put_long(25, self.reduced_write)
blk._put_long(26, self.step_rate)
class RDBLogicalDrive:
def __init__(self, rdb_blk_lo=0, rdb_blk_hi=0, lo_cyl=0, hi_cyl=0,
cyl_blks=0, high_rdsk_blk=0, auto_park_secs=0):
self.rdb_blk_lo = rdb_blk_lo
self.rdb_blk_hi = rdb_blk_hi
self.lo_cyl = lo_cyl
self.hi_cyl = hi_cyl
self.cyl_blks = cyl_blks
self.high_rdsk_blk = high_rdsk_blk
self.auto_park_secs = auto_park_secs
def __init__(
self,
rdb_blk_lo=0,
rdb_blk_hi=0,
lo_cyl=0,
hi_cyl=0,
cyl_blks=0,
high_rdsk_blk=0,
auto_park_secs=0,
):
self.rdb_blk_lo = rdb_blk_lo
self.rdb_blk_hi = rdb_blk_hi
self.lo_cyl = lo_cyl
self.hi_cyl = hi_cyl
self.cyl_blks = cyl_blks
self.high_rdsk_blk = high_rdsk_blk
self.auto_park_secs = auto_park_secs
def dump(self):
print("LogicalDrive")
print(" rdb_blk_lo: %d" % self.rdb_blk_lo)
print(" rdb_blk_hi: %d" % self.rdb_blk_hi)
print(" lo_cyl: %d" % self.lo_cyl)
print(" hi_cyl: %d" % self.hi_cyl)
print(" cyl_blks: %d" % self.cyl_blks)
print(" high_rdsk_blk: %d" % self.high_rdsk_blk)
print(" auto_park_secs: %d" % self.auto_park_secs)
def dump(self):
print("LogicalDrive")
print(" rdb_blk_lo: %d" % self.rdb_blk_lo)
print(" rdb_blk_hi: %d" % self.rdb_blk_hi)
print(" lo_cyl: %d" % self.lo_cyl)
print(" hi_cyl: %d" % self.hi_cyl)
print(" cyl_blks: %d" % self.cyl_blks)
print(" high_rdsk_blk: %d" % self.high_rdsk_blk)
print(" auto_park_secs: %d" % self.auto_park_secs)
def read(self, blk):
self.rdb_blk_lo = blk._get_long(32)
self.rdb_blk_hi = blk._get_long(33)
self.lo_cyl = blk._get_long(34)
self.hi_cyl = blk._get_long(35)
self.cyl_blks = blk._get_long(36)
self.auto_park_secs = blk._get_long(37)
self.high_rdsk_blk = blk._get_long(38)
def read(self, blk):
self.rdb_blk_lo = blk._get_long(32)
self.rdb_blk_hi = blk._get_long(33)
self.lo_cyl = blk._get_long(34)
self.hi_cyl = blk._get_long(35)
self.cyl_blks = blk._get_long(36)
self.auto_park_secs = blk._get_long(37)
self.high_rdsk_blk = blk._get_long(38)
def write(self, blk):
blk._put_long(32, self.rdb_blk_lo)
blk._put_long(33, self.rdb_blk_hi)
blk._put_long(34, self.lo_cyl)
blk._put_long(35, self.hi_cyl)
blk._put_long(36, self.cyl_blks)
blk._put_long(37, self.auto_park_secs)
blk._put_long(38, self.high_rdsk_blk)
def write(self, blk):
blk._put_long(32, self.rdb_blk_lo)
blk._put_long(33, self.rdb_blk_hi)
blk._put_long(34, self.lo_cyl)
blk._put_long(35, self.hi_cyl)
blk._put_long(36, self.cyl_blks)
blk._put_long(37, self.auto_park_secs)
blk._put_long(38, self.high_rdsk_blk)
class RDBDriveID:
def __init__(self, disk_vendor="", disk_product="", disk_revision="",
ctrl_vendor="", ctrl_product="", ctrl_revision=""):
self.disk_vendor = FSString(disk_vendor)
self.disk_product = FSString(disk_product)
self.disk_revision = FSString(disk_revision)
self.ctrl_vendor = FSString(ctrl_vendor)
self.ctrl_product = FSString(ctrl_product)
self.ctrl_revision = FSString(ctrl_revision)
def dump(self):
print("DriveID")
print(" disk_vendor: '%s'" % self.disk_vendor)
print(" disk_product: '%s'" % self.disk_product)
print(" disk_revision: '%s'" % self.disk_revision)
print(" ctrl_vendor: '%s'" % self.ctrl_vendor)
print(" ctrl_product: '%s'" % self.ctrl_product)
print(" ctrl_revision: '%s'" % self.ctrl_revision)
def read(self, blk):
self.disk_vendor = blk._get_cstr(40, 8)
self.disk_product = blk._get_cstr(42, 16)
self.disk_revision = blk._get_cstr(46, 4)
self.ctrl_vendor = blk._get_cstr(47, 8)
self.ctrl_product = blk._get_cstr(49, 16)
self.ctrl_revision = blk._get_cstr(53, 4)
def __init__(
self,
disk_vendor="",
disk_product="",
disk_revision="",
ctrl_vendor="",
ctrl_product="",
ctrl_revision="",
):
self.disk_vendor = FSString(disk_vendor)
self.disk_product = FSString(disk_product)
self.disk_revision = FSString(disk_revision)
self.ctrl_vendor = FSString(ctrl_vendor)
self.ctrl_product = FSString(ctrl_product)
self.ctrl_revision = FSString(ctrl_revision)
def write(self, blk):
blk._put_cstr(40, 8, self.disk_vendor)
blk._put_cstr(42, 16, self.disk_product)
blk._put_cstr(46, 4, self.disk_revision)
blk._put_cstr(47, 8, self.ctrl_vendor)
blk._put_cstr(49, 16, self.ctrl_product)
blk._put_cstr(53, 4, self.ctrl_revision)
def dump(self):
print("DriveID")
print(" disk_vendor: '%s'" % self.disk_vendor)
print(" disk_product: '%s'" % self.disk_product)
print(" disk_revision: '%s'" % self.disk_revision)
print(" ctrl_vendor: '%s'" % self.ctrl_vendor)
print(" ctrl_product: '%s'" % self.ctrl_product)
print(" ctrl_revision: '%s'" % self.ctrl_revision)
def read(self, blk):
self.disk_vendor = blk._get_cstr(40, 8)
self.disk_product = blk._get_cstr(42, 16)
self.disk_revision = blk._get_cstr(46, 4)
self.ctrl_vendor = blk._get_cstr(47, 8)
self.ctrl_product = blk._get_cstr(49, 16)
self.ctrl_revision = blk._get_cstr(53, 4)
def write(self, blk):
blk._put_cstr(40, 8, self.disk_vendor)
blk._put_cstr(42, 16, self.disk_product)
blk._put_cstr(46, 4, self.disk_revision)
blk._put_cstr(47, 8, self.ctrl_vendor)
blk._put_cstr(49, 16, self.ctrl_product)
blk._put_cstr(53, 4, self.ctrl_revision)
class RDBlock(Block):
def __init__(self, blkdev, blk_num=0):
Block.__init__(self, blkdev, blk_num, chk_loc=2, is_type=Block.RDSK)
def create(self, phy_drv, log_drv, drv_id,
host_id=7, block_size=512, flags=0x17,
badblk_list=Block.no_blk, part_list=Block.no_blk, fs_list=Block.no_blk, init_code=Block.no_blk,
size=64):
Block.create(self)
self.size = size
self.host_id = host_id
self.block_size = block_size
self.flags = flags
self.badblk_list = badblk_list
self.part_list = part_list
self.fs_list = fs_list
self.init_code = init_code
def __init__(self, blkdev, blk_num=0):
Block.__init__(self, blkdev, blk_num, chk_loc=2, is_type=Block.RDSK)
self.phy_drv = phy_drv
self.log_drv = log_drv
self.drv_id = drv_id
self.valid = True
def write(self):
self._create_data()
self._put_long(1, self.size)
self._put_long(3, self.host_id)
self._put_long(4, self.block_size)
self._put_long(5, self.flags)
self._put_long(6, self.badblk_list)
self._put_long(7, self.part_list)
self._put_long(8, self.fs_list)
self._put_long(9, self.init_code)
self.phy_drv.write(self)
self.log_drv.write(self)
self.drv_id.write(self)
Block.write(self)
def read(self):
Block.read(self)
if not self.valid:
return False
self.size = self._get_long(1)
self.host_id = self._get_long(3)
self.block_size = self._get_long(4)
self.flags = self._get_long(5)
self.badblk_list = self._get_long(6)
self.part_list = self._get_long(7)
self.fs_list = self._get_long(8)
self.init_code = self._get_long(9)
self.phy_drv = RDBPhysicalDrive()
self.phy_drv.read(self)
self.log_drv = RDBLogicalDrive()
self.log_drv.read(self)
self.drv_id = RDBDriveID()
self.drv_id.read(self)
return self.valid
def dump(self):
Block.dump(self, "RigidDisk")
print(" size: %d" % self.size)
print(" host_id: %d" % self.host_id)
print(" block_size: %d" % self.block_size)
print(" flags: 0x%08x" % self.flags)
print(" badblk_list: %s" % self._dump_ptr(self.badblk_list))
print(" part_list: %s" % self._dump_ptr(self.part_list))
print(" fs_list: %s" % self._dump_ptr(self.fs_list))
print(" init_code: %s" % self._dump_ptr(self.init_code))
self.phy_drv.dump()
self.log_drv.dump()
self.drv_id.dump()
def create(
self,
phy_drv,
log_drv,
drv_id,
host_id=7,
block_size=512,
flags=0x17,
badblk_list=Block.no_blk,
part_list=Block.no_blk,
fs_list=Block.no_blk,
init_code=Block.no_blk,
size=64,
):
Block.create(self)
self.size = size
self.host_id = host_id
self.block_size = block_size
self.flags = flags
self.badblk_list = badblk_list
self.part_list = part_list
self.fs_list = fs_list
self.init_code = init_code
self.phy_drv = phy_drv
self.log_drv = log_drv
self.drv_id = drv_id
self.valid = True
def write(self):
self._create_data()
self._put_long(1, self.size)
self._put_long(3, self.host_id)
self._put_long(4, self.block_size)
self._put_long(5, self.flags)
self._put_long(6, self.badblk_list)
self._put_long(7, self.part_list)
self._put_long(8, self.fs_list)
self._put_long(9, self.init_code)
self.phy_drv.write(self)
self.log_drv.write(self)
self.drv_id.write(self)
Block.write(self)
def read(self):
Block.read(self)
if not self.valid:
return False
self.size = self._get_long(1)
self.host_id = self._get_long(3)
self.block_size = self._get_long(4)
self.flags = self._get_long(5)
self.badblk_list = self._get_long(6)
self.part_list = self._get_long(7)
self.fs_list = self._get_long(8)
self.init_code = self._get_long(9)
self.phy_drv = RDBPhysicalDrive()
self.phy_drv.read(self)
self.log_drv = RDBLogicalDrive()
self.log_drv.read(self)
self.drv_id = RDBDriveID()
self.drv_id.read(self)
return self.valid
def dump(self):
Block.dump(self, "RigidDisk")
print(" size: %d" % self.size)
print(" host_id: %d" % self.host_id)
print(" block_size: %d" % self.block_size)
print(" flags: 0x%08x" % self.flags)
print(" badblk_list: %s" % self._dump_ptr(self.badblk_list))
print(" part_list: %s" % self._dump_ptr(self.part_list))
print(" fs_list: %s" % self._dump_ptr(self.fs_list))
print(" init_code: %s" % self._dump_ptr(self.init_code))
self.phy_drv.dump()
self.log_drv.dump()
self.drv_id.dump()

View File

@ -1,149 +1,153 @@
from amitools.fs.block.rdb.FSHeaderBlock import *
from amitools.fs.block.rdb.LoadSegBlock import *
from amitools.util.HexDump import *
import amitools.fs.DosType as DosType
class FileSystem:
def __init__(self, blkdev, blk_num, num):
self.blkdev = blkdev
self.blk_num = blk_num
self.num = num
self.fshd = None
self.valid = False
self.lsegs = []
self.data = None
def get_next_fs_blk(self):
if self.fshd != None:
return self.fshd.next
else:
return 0xffffffff
def get_blk_nums(self):
res = [self.blk_num]
for ls in self.lsegs:
res.append(ls.blk_num)
return res
def read(self):
# read fs header
self.fshd = FSHeaderBlock(self.blkdev, self.blk_num)
if not self.fshd.read():
self.valid = False
return False
# read lseg blocks
lseg_blk = self.fshd.dev_node.seg_list_blk
self.lsegs = []
data = b""
while lseg_blk != 0xffffffff:
ls = LoadSegBlock(self.blkdev, lseg_blk)
if not ls.read():
def __init__(self, blkdev, blk_num, num):
self.blkdev = blkdev
self.blk_num = blk_num
self.num = num
self.fshd = None
self.valid = False
return False
lseg_blk = ls.next
data += ls.get_data()
self.lsegs.append(ls)
self.data = data
return True
self.lsegs = []
self.data = None
def get_data(self):
return self.data
def get_next_fs_blk(self):
if self.fshd != None:
return self.fshd.next
else:
return 0xFFFFFFFF
# ----- create ------
def get_blk_nums(self):
res = [self.blk_num]
for ls in self.lsegs:
res.append(ls.blk_num)
return res
def get_total_blocks(self, data):
size = len(data)
lseg_size = self.blkdev.block_bytes - 20
num_lseg = int((size + lseg_size - 1)/lseg_size)
return num_lseg + 1
def read(self):
# read fs header
self.fshd = FSHeaderBlock(self.blkdev, self.blk_num)
if not self.fshd.read():
self.valid = False
return False
# read lseg blocks
lseg_blk = self.fshd.dev_node.seg_list_blk
self.lsegs = []
data = b""
while lseg_blk != 0xFFFFFFFF:
ls = LoadSegBlock(self.blkdev, lseg_blk)
if not ls.read():
self.valid = False
return False
lseg_blk = ls.next
data += ls.get_data()
self.lsegs.append(ls)
self.data = data
return True
def create(self, blks, data, version, dos_type, dev_flags=None):
self.data = data
# create fs header
self.fshd = FSHeaderBlock(self.blkdev, self.blk_num)
self.fshd.create(version=version, dos_type=dos_type)
# store begin of seg list
self.fshd.set_flag('seg_list_blk',blks[0])
self.fshd.set_flag('global_vec', 0xffffffff)
# add custom flags
if dev_flags is not None:
for p in dev_flags:
self.fshd.set_flag(p[0], p[1])
# create lseg blocks
self.lsegs = []
lseg_size = self.blkdev.block_bytes - 20
off = 0
size = len(data)
blk_off = 0
while(off < size):
blk_len = size - off
if blk_len > lseg_size:
blk_len = lseg_size
blk_data = data[off:off+blk_len]
# create new lseg block
ls = LoadSegBlock(self.blkdev, blks[blk_off])
# get next block
if blk_off == len(blks)-1:
next = Block.no_blk
else:
next = blks[blk_off+1]
ls.create(next=next)
ls.set_data(blk_data)
self.lsegs.append(ls)
# next round
off += blk_len
blk_off += 1
def get_data(self):
return self.data
def write(self, only_fshd=False):
self.fshd.write()
if not only_fshd:
for lseg in self.lsegs:
lseg.write()
# ----- create ------
# ----- query -----
def get_total_blocks(self, data):
size = len(data)
lseg_size = self.blkdev.block_bytes - 20
num_lseg = int((size + lseg_size - 1) / lseg_size)
return num_lseg + 1
def dump(self, hex_dump=False):
if self.fshd != None:
self.fshd.dump()
# only dump ids of lseg blocks
print("LoadSegBlocks:")
ids = []
for ls in self.lsegs:
ids.append(str(ls.blk_num))
print(" lseg blks: %s" % ",".join(ids))
print(" data size: %d" % len(self.data))
if hex_dump:
print_hex(self.data)
def create(self, blks, data, version, dos_type, dev_flags=None):
self.data = data
# create fs header
self.fshd = FSHeaderBlock(self.blkdev, self.blk_num)
self.fshd.create(version=version, dos_type=dos_type)
# store begin of seg list
self.fshd.set_flag("seg_list_blk", blks[0])
self.fshd.set_flag("global_vec", 0xFFFFFFFF)
# add custom flags
if dev_flags is not None:
for p in dev_flags:
self.fshd.set_flag(p[0], p[1])
# create lseg blocks
self.lsegs = []
lseg_size = self.blkdev.block_bytes - 20
off = 0
size = len(data)
blk_off = 0
while off < size:
blk_len = size - off
if blk_len > lseg_size:
blk_len = lseg_size
blk_data = data[off : off + blk_len]
# create new lseg block
ls = LoadSegBlock(self.blkdev, blks[blk_off])
# get next block
if blk_off == len(blks) - 1:
next = Block.no_blk
else:
next = blks[blk_off + 1]
ls.create(next=next)
ls.set_data(blk_data)
self.lsegs.append(ls)
# next round
off += blk_len
blk_off += 1
def get_flags_info(self):
flags = self.fshd.get_flags()
res = []
for f in flags:
res.append("%s=0x%x" % f)
return " ".join(res)
def write(self, only_fshd=False):
self.fshd.write()
if not only_fshd:
for lseg in self.lsegs:
lseg.write()
def get_valid_flag_names(self):
return self.fshd.get_valid_flag_names()
# ----- query -----
def get_info(self):
flags = self.get_flags_info()
dt = self.fshd.dos_type
dt_str = DosType.num_to_tag_str(dt)
return "FileSystem #%d %s/0x%04x version=%s size=%d %s" % (self.num, dt_str, dt, self.fshd.get_version_string(), len(self.data), flags)
def dump(self, hex_dump=False):
if self.fshd != None:
self.fshd.dump()
# only dump ids of lseg blocks
print("LoadSegBlocks:")
ids = []
for ls in self.lsegs:
ids.append(str(ls.blk_num))
print(" lseg blks: %s" % ",".join(ids))
print(" data size: %d" % len(self.data))
if hex_dump:
print_hex(self.data)
# ----- edit -----
def get_flags_info(self):
flags = self.fshd.get_flags()
res = []
for f in flags:
res.append("%s=0x%x" % f)
return " ".join(res)
def clear_flags(self):
self.fshd.patch_flags = 0
self.fshd.write()
return True
def get_valid_flag_names(self):
return self.fshd.get_valid_flag_names()
def set_flags(self, flags, clear=False):
if clear:
self.fshd.patch_flags = 0
self.fshd.set_flags(flags)
self.fshd.write()
def get_info(self):
flags = self.get_flags_info()
dt = self.fshd.dos_type
dt_str = DosType.num_to_tag_str(dt)
return "FileSystem #%d %s/0x%04x version=%s size=%d %s" % (
self.num,
dt_str,
dt,
self.fshd.get_version_string(),
len(self.data),
flags,
)
# ----- edit -----
def clear_flags(self):
self.fshd.patch_flags = 0
self.fshd.write()
return True
def set_flags(self, flags, clear=False):
if clear:
self.fshd.patch_flags = 0
self.fshd.set_flags(flags)
self.fshd.write()

View File

@ -3,112 +3,122 @@ from amitools.fs.blkdev.PartBlockDevice import PartBlockDevice
import amitools.util.ByteSize as ByteSize
import amitools.fs.DosType as DosType
class Partition:
def __init__(self, blkdev, blk_num, num, cyl_blks, rdisk):
self.blkdev = blkdev
self.blk_num = blk_num
self.num = num
self.cyl_blks = cyl_blks
self.rdisk = rdisk
self.block_bytes = rdisk.block_bytes
self.part_blk = None
def __init__(self, blkdev, blk_num, num, cyl_blks, rdisk):
self.blkdev = blkdev
self.blk_num = blk_num
self.num = num
self.cyl_blks = cyl_blks
self.rdisk = rdisk
self.block_bytes = rdisk.block_bytes
self.part_blk = None
def get_next_partition_blk(self):
if self.part_blk != None:
return self.part_blk.next
else:
return 0xffffffff
def get_next_partition_blk(self):
if self.part_blk != None:
return self.part_blk.next
else:
return 0xFFFFFFFF
def get_blk_num(self):
"""return the block number of the partition block"""
return self.blk_num
def get_blk_num(self):
"""return the block number of the partition block"""
return self.blk_num
def read(self):
# read fs header
self.part_blk = PartitionBlock(self.blkdev, self.blk_num)
if not self.part_blk.read():
self.valid = False
return False
self.valid = True
return True
def read(self):
# read fs header
self.part_blk = PartitionBlock(self.blkdev, self.blk_num)
if not self.part_blk.read():
self.valid = False
return False
self.valid = True
return True
def create_blkdev(self, auto_close_rdb_blkdev=False):
"""create a block device for accessing this partition"""
return PartBlockDevice(self.blkdev, self.part_blk, auto_close_rdb_blkdev)
def create_blkdev(self, auto_close_rdb_blkdev=False):
"""create a block device for accessing this partition"""
return PartBlockDevice(self.blkdev, self.part_blk, auto_close_rdb_blkdev)
def write(self):
self.part_blk.write()
def write(self):
self.part_blk.write()
# ----- Query -----
# ----- Query -----
def dump(self):
self.part_blk.dump()
def dump(self):
self.part_blk.dump()
def get_num_cyls(self):
p = self.part_blk
return p.dos_env.high_cyl - p.dos_env.low_cyl + 1
def get_num_cyls(self):
p = self.part_blk
return p.dos_env.high_cyl - p.dos_env.low_cyl + 1
def get_num_blocks(self):
"""return total number of blocks in this partition"""
return self.get_num_cyls() * self.cyl_blks
def get_num_blocks(self):
"""return total number of blocks in this partition"""
return self.get_num_cyls() * self.cyl_blks
def get_num_bytes(self):
return self.get_num_blocks() * self.block_bytes
def get_num_bytes(self):
return self.get_num_blocks() * self.block_bytes
def get_drive_name(self):
return self.part_blk.drv_name
def get_drive_name(self):
return self.part_blk.drv_name
def get_flags(self):
return self.part_blk.flags
def get_flags(self):
return self.part_blk.flags
def get_index(self):
return self.num
def get_index(self):
return self.num
def get_cyl_range(self):
de = self.part_blk.dos_env
if de == None:
return None
else:
return (de.low_cyl, de.high_cyl)
def get_cyl_range(self):
de = self.part_blk.dos_env
if de == None:
return None
else:
return (de.low_cyl, de.high_cyl)
def get_info(self, total_blks=0):
"""return a string line with typical info about this partition"""
p = self.part_blk
de = p.dos_env
name = "'%s'" % p.drv_name
part_blks = self.get_num_blocks()
part_bytes = self.get_num_bytes()
extra = ""
if total_blks != 0:
ratio = 100.0 * part_blks / total_blks
extra += "%6.2f%% " % ratio
# add dos type
dos_type = de.dos_type
extra += DosType.num_to_tag_str(dos_type)
extra += "/0x%04x" % dos_type
return "Partition: #%d %-06s %8d %8d %10d %s %s" \
% (self.num, name, de.low_cyl, de.high_cyl, part_blks, ByteSize.to_byte_size_str(part_bytes), extra)
def get_info(self, total_blks=0):
"""return a string line with typical info about this partition"""
p = self.part_blk
de = p.dos_env
name = "'%s'" % p.drv_name
part_blks = self.get_num_blocks()
part_bytes = self.get_num_bytes()
extra = ""
if total_blks != 0:
ratio = 100.0 * part_blks / total_blks
extra += "%6.2f%% " % ratio
# add dos type
dos_type = de.dos_type
extra += DosType.num_to_tag_str(dos_type)
extra += "/0x%04x" % dos_type
return "Partition: #%d %-06s %8d %8d %10d %s %s" % (
self.num,
name,
de.low_cyl,
de.high_cyl,
part_blks,
ByteSize.to_byte_size_str(part_bytes),
extra,
)
def get_extra_infos(self):
result = []
p = self.part_blk
de = p.dos_env
# layout
result.append("blk_longs=%d, sec/blk=%d, surf=%d, blk/trk=%d" % \
(de.block_size, de.sec_per_blk, de.surfaces, de.blk_per_trk))
result.append("fs_block_size=%d" % (de.block_size * 4 * de.sec_per_blk))
# max transfer
result.append("max_transfer=0x%x" % de.max_transfer)
result.append("mask=0x%x" % de.mask)
result.append("num_buffer=%d" % de.num_buffer)
# add flags
flags = p.flags
if flags & PartitionBlock.FLAG_BOOTABLE == PartitionBlock.FLAG_BOOTABLE:
result.append("bootable=1 pri=%d" % de.boot_pri)
else:
result.append("bootable=0")
if flags & PartitionBlock.FLAG_NO_AUTOMOUNT == PartitionBlock.FLAG_NO_AUTOMOUNT:
result.append("automount=0")
else:
result.append("automount=1")
return result
def get_extra_infos(self):
result = []
p = self.part_blk
de = p.dos_env
# layout
result.append(
"blk_longs=%d, sec/blk=%d, surf=%d, blk/trk=%d"
% (de.block_size, de.sec_per_blk, de.surfaces, de.blk_per_trk)
)
result.append("fs_block_size=%d" % (de.block_size * 4 * de.sec_per_blk))
# max transfer
result.append("max_transfer=0x%x" % de.max_transfer)
result.append("mask=0x%x" % de.mask)
result.append("num_buffer=%d" % de.num_buffer)
# add flags
flags = p.flags
if flags & PartitionBlock.FLAG_BOOTABLE == PartitionBlock.FLAG_BOOTABLE:
result.append("bootable=1 pri=%d" % de.boot_pri)
else:
result.append("bootable=0")
if flags & PartitionBlock.FLAG_NO_AUTOMOUNT == PartitionBlock.FLAG_NO_AUTOMOUNT:
result.append("automount=0")
else:
result.append("automount=1")
return result

File diff suppressed because it is too large Load Diff

View File

@ -1,134 +1,176 @@
from amitools.fs.validate.Log import Log
import struct
class BitmapScan:
"""Validate the bitmap of a file system"""
def __init__(self, block_scan, log):
self.block_scan = block_scan
self.log = log
self.bm_blocks = None
def scan_bitmap(self, root):
"""scan the file system bitmap"""
# first check bitmap flag
bm_flag = root.bitmap_flag
if bm_flag != 0xffffffff:
self.log.msg(Log.WARN,"Root bitmap flag not valid (-1)",root.blk_num)
# now calculate the size of the bitmap
num_blks = self.block_scan.blkdev.num_blocks - self.block_scan.blkdev.reserved
block_longs = self.block_scan.blkdev.block_longs - 1 # all longs are available for bitmap
self.num_bm_lwords = int((num_blks + 31) // 32) # 32 blocks fit in a long word
self.num_bm_blocks = int((self.num_bm_lwords + block_longs - 1) // block_longs)
self.log.msg(Log.DEBUG,"Total Bitmap DWORDs: %d (block %d)" % (self.num_bm_lwords, block_longs))
self.log.msg(Log.DEBUG,"Number of Bitmap Blocks: %d" % self.num_bm_blocks)
# calc the bitmask in the last word
last_filled_bits = self.num_bm_lwords * 32 - num_blks
if last_filled_bits == 32:
self.last_mask = 0xffffffff
else:
self.last_mask = (1 << last_filled_bits) - 1
self.log.msg(Log.DEBUG,"Last DWORD mask: %08x" % self.last_mask)
# now scan bitmap blocks and build list of all bitmap blocks
self.read_bitmap_ptrs_and_blocks(root)
found_blocks = len(self.bm_blocks)
self.log.msg(Log.DEBUG,"Found Bitmap Blocks: %d" % found_blocks)
# check number of blocks
if found_blocks != self.num_bm_blocks:
self.log.msg(Log.ERROR,"Invalid number of Bitmap Blocks: found=%d expected=%d" % (found_blocks, self.num_bm_blocks), root.blk_num)
else:
# check bits in bitmap
self.check_bits()
def check_bits(self):
"""calculate allocation bits and verify with stored ones"""
# block range
blkdev = self.block_scan.blkdev
# first bitmap data
cur_pos = 0
bm_blk = 0
cur_data = self.bm_blocks[0].bitmap
blk_size = len(cur_data)
# loop throug all bitmap longwords
lw = 0
blk_num = blkdev.reserved
max_lw = self.num_bm_lwords - 1
while lw < max_lw:
got = struct.unpack_from(">I",cur_data,cur_pos)[0]
expect = self.calc_lword(blk_num)
if got != expect:
self.log.msg(Log.ERROR,"Invalid bitmap allocation (@%d: #%d+%d) blks [%d...%d] got=%08x expect=%08x" \
% (lw, bm_blk, cur_pos/4, blk_num, blk_num+31, got, expect))
lw += 1
blk_num += 32
# fetch next bitmap data block
cur_pos += 4
if cur_pos == blk_size:
bm_blk += 1
cur_data = self.bm_blocks[bm_blk].bitmap
"""Validate the bitmap of a file system"""
def __init__(self, block_scan, log):
self.block_scan = block_scan
self.log = log
self.bm_blocks = None
def scan_bitmap(self, root):
"""scan the file system bitmap"""
# first check bitmap flag
bm_flag = root.bitmap_flag
if bm_flag != 0xFFFFFFFF:
self.log.msg(Log.WARN, "Root bitmap flag not valid (-1)", root.blk_num)
# now calculate the size of the bitmap
num_blks = self.block_scan.blkdev.num_blocks - self.block_scan.blkdev.reserved
block_longs = (
self.block_scan.blkdev.block_longs - 1
) # all longs are available for bitmap
self.num_bm_lwords = int((num_blks + 31) // 32) # 32 blocks fit in a long word
self.num_bm_blocks = int((self.num_bm_lwords + block_longs - 1) // block_longs)
self.log.msg(
Log.DEBUG,
"Total Bitmap DWORDs: %d (block %d)" % (self.num_bm_lwords, block_longs),
)
self.log.msg(Log.DEBUG, "Number of Bitmap Blocks: %d" % self.num_bm_blocks)
# calc the bitmask in the last word
last_filled_bits = self.num_bm_lwords * 32 - num_blks
if last_filled_bits == 32:
self.last_mask = 0xFFFFFFFF
else:
self.last_mask = (1 << last_filled_bits) - 1
self.log.msg(Log.DEBUG, "Last DWORD mask: %08x" % self.last_mask)
# now scan bitmap blocks and build list of all bitmap blocks
self.read_bitmap_ptrs_and_blocks(root)
found_blocks = len(self.bm_blocks)
self.log.msg(Log.DEBUG, "Found Bitmap Blocks: %d" % found_blocks)
# check number of blocks
if found_blocks != self.num_bm_blocks:
self.log.msg(
Log.ERROR,
"Invalid number of Bitmap Blocks: found=%d expected=%d"
% (found_blocks, self.num_bm_blocks),
root.blk_num,
)
else:
# check bits in bitmap
self.check_bits()
def check_bits(self):
"""calculate allocation bits and verify with stored ones"""
# block range
blkdev = self.block_scan.blkdev
# first bitmap data
cur_pos = 0
# the last long word
got = struct.unpack_from(">I",cur_data,cur_pos)[0] & self.last_mask
expect = self.calc_lword(blk_num) & self.last_mask
if got != expect:
self.log.msg(Log.ERROR,"Invalid bitmap allocation (last) (@%d: #%d+%d) blks [%d...%d] got=%08x expect=%08x" \
% (lw, bm_blk, cur_pos/4, blk_num, blkdev.num_blocks-1, got, expect))
def calc_lword(self, blk_num):
"""calcuate the bitmap lword"""
value = 0
for i in range(32):
# set bit in lword if block is available
if not self.block_scan.is_block_available(blk_num + i):
mask = 1 << i
value |= mask
return value
def read_bitmap_ptrs_and_blocks(self, root):
"""build the list of all file system bitmap blocks"""
self.bm_blocks = []
# scan list embedded in root block
self.read_bm_list(root.bitmap_ptrs, root.blk_num)
# now follow bitmap extension blocks
cur_blk_num = root.blk_num
bm_ext = root.bitmap_ext_blk
while bm_ext != 0:
# check ext block
if self.block_scan.is_block_available(bm_ext):
self.log.msg(Log.ERROR,"Bitmap ext block @%d already used?" % bm_block, cur_blk_num)
else:
bi = self.block_scan.read_block(bm_ext, is_bm_ext=True)
if bi == None:
self.log.msg(Log.ERROR,"Error reading bitmap ext block @%d" % bm_ext, cur_blk_num)
break
else:
self.read_bm_list(bi.bitmap_ptrs, bm_ext)
cur_blk_num = bm_ext
bm_ext = bi.next_blk
def read_bm_list(self, ptrs, blk_num):
list_end = False
for bm_block in ptrs:
# still inside the pointer list
if list_end == False:
# add a normal block
if bm_block != 0:
# make sure bitmap block was not used already
if self.block_scan.is_block_available(bm_block):
self.log.msg(Log.ERROR,"Bitmap block @%d already used?" % bm_block, blk_num)
else:
# read bitmap block
bi = self.block_scan.read_block(bm_block, is_bm=True)
if bi == None:
self.log.msg(Log.ERROR,"Error reading bitmap block @%d" % bm_block, blk_num)
bm_blk = 0
cur_data = self.bm_blocks[0].bitmap
blk_size = len(cur_data)
# loop throug all bitmap longwords
lw = 0
blk_num = blkdev.reserved
max_lw = self.num_bm_lwords - 1
while lw < max_lw:
got = struct.unpack_from(">I", cur_data, cur_pos)[0]
expect = self.calc_lword(blk_num)
if got != expect:
self.log.msg(
Log.ERROR,
"Invalid bitmap allocation (@%d: #%d+%d) blks [%d...%d] got=%08x expect=%08x"
% (lw, bm_blk, cur_pos / 4, blk_num, blk_num + 31, got, expect),
)
lw += 1
blk_num += 32
# fetch next bitmap data block
cur_pos += 4
if cur_pos == blk_size:
bm_blk += 1
cur_data = self.bm_blocks[bm_blk].bitmap
cur_pos = 0
# the last long word
got = struct.unpack_from(">I", cur_data, cur_pos)[0] & self.last_mask
expect = self.calc_lword(blk_num) & self.last_mask
if got != expect:
self.log.msg(
Log.ERROR,
"Invalid bitmap allocation (last) (@%d: #%d+%d) blks [%d...%d] got=%08x expect=%08x"
% (
lw,
bm_blk,
cur_pos / 4,
blk_num,
blkdev.num_blocks - 1,
got,
expect,
),
)
def calc_lword(self, blk_num):
"""calcuate the bitmap lword"""
value = 0
for i in range(32):
# set bit in lword if block is available
if not self.block_scan.is_block_available(blk_num + i):
mask = 1 << i
value |= mask
return value
def read_bitmap_ptrs_and_blocks(self, root):
"""build the list of all file system bitmap blocks"""
self.bm_blocks = []
# scan list embedded in root block
self.read_bm_list(root.bitmap_ptrs, root.blk_num)
# now follow bitmap extension blocks
cur_blk_num = root.blk_num
bm_ext = root.bitmap_ext_blk
while bm_ext != 0:
# check ext block
if self.block_scan.is_block_available(bm_ext):
self.log.msg(
Log.ERROR,
"Bitmap ext block @%d already used?" % bm_block,
cur_blk_num,
)
else:
self.bm_blocks.append(bi)
else:
list_end = True
else:
# make sure no blocks are referenced
if bm_block != 0:
self.log.msg(Log.ERROR,"Referenced bitmap block @%d beyond end of list" % bm_block, blk_num)
bi = self.block_scan.read_block(bm_ext, is_bm_ext=True)
if bi == None:
self.log.msg(
Log.ERROR,
"Error reading bitmap ext block @%d" % bm_ext,
cur_blk_num,
)
break
else:
self.read_bm_list(bi.bitmap_ptrs, bm_ext)
cur_blk_num = bm_ext
bm_ext = bi.next_blk
def read_bm_list(self, ptrs, blk_num):
list_end = False
for bm_block in ptrs:
# still inside the pointer list
if list_end == False:
# add a normal block
if bm_block != 0:
# make sure bitmap block was not used already
if self.block_scan.is_block_available(bm_block):
self.log.msg(
Log.ERROR,
"Bitmap block @%d already used?" % bm_block,
blk_num,
)
else:
# read bitmap block
bi = self.block_scan.read_block(bm_block, is_bm=True)
if bi == None:
self.log.msg(
Log.ERROR,
"Error reading bitmap block @%d" % bm_block,
blk_num,
)
else:
self.bm_blocks.append(bi)
else:
list_end = True
else:
# make sure no blocks are referenced
if bm_block != 0:
self.log.msg(
Log.ERROR,
"Referenced bitmap block @%d beyond end of list" % bm_block,
blk_num,
)

View File

@ -1,6 +1,3 @@
import time
from amitools.fs.block.Block import Block
@ -17,232 +14,256 @@ import amitools.fs.DosType as DosType
from amitools.fs.validate.Log import Log
class BlockInfo:
"""Store essential info of a block"""
def __init__(self, blk_num):
self.blk_num = blk_num
self.blk_status = BlockScan.BS_UNKNOWN
self.blk_type = BlockScan.BT_UNKNOWN
self.used = False
self.own_key = None
def __str__(self):
return str(self.__dict__)
"""Store essential info of a block"""
def __init__(self, blk_num):
self.blk_num = blk_num
self.blk_status = BlockScan.BS_UNKNOWN
self.blk_type = BlockScan.BT_UNKNOWN
self.used = False
self.own_key = None
def __str__(self):
return str(self.__dict__)
class BlockScan:
"""Scan a full volume and classify the blocks"""
# block status
BS_UNKNOWN = 0 # undecided or unchecked
BS_READ_ERROR = 1 # error reading block
BS_INVALID = 2 # not a detected AmigaDOS block
BS_VALID = 3 # is a AmigaDOS block structure but type was not detected
BS_TYPE = 4 # detected block type
NUM_BS = 5
# block type
BT_UNKNOWN = 0
BT_ROOT = 1
BT_DIR = 2
BT_FILE_HDR = 3
BT_FILE_LIST = 4
BT_FILE_DATA = 5
BT_BITMAP = 6
BT_BITMAP_EXT = 7
BT_COMMENT = 8
NUM_BT = 9
def __init__(self, blkdev, log, dos_type):
self.blkdev = blkdev
self.log = log
self.dos_type = dos_type
"""Scan a full volume and classify the blocks"""
self.map_status = None
self.map_type = None
self.block_map = [None] * self.blkdev.num_blocks
self.map_status = [[] for i in range(self.NUM_BS)]
self.map_type = [[] for i in range(self.NUM_BT)]
# block status
BS_UNKNOWN = 0 # undecided or unchecked
BS_READ_ERROR = 1 # error reading block
BS_INVALID = 2 # not a detected AmigaDOS block
BS_VALID = 3 # is a AmigaDOS block structure but type was not detected
BS_TYPE = 4 # detected block type
NUM_BS = 5
def scan_all(self, progress=lambda x : x):
"""Scan all blocks of the given block device
# block type
BT_UNKNOWN = 0
BT_ROOT = 1
BT_DIR = 2
BT_FILE_HDR = 3
BT_FILE_LIST = 4
BT_FILE_DATA = 5
BT_BITMAP = 6
BT_BITMAP_EXT = 7
BT_COMMENT = 8
NUM_BT = 9
def __init__(self, blkdev, log, dos_type):
self.blkdev = blkdev
self.log = log
self.dos_type = dos_type
self.map_status = None
self.map_type = None
self.block_map = [None] * self.blkdev.num_blocks
self.map_status = [[] for i in range(self.NUM_BS)]
self.map_type = [[] for i in range(self.NUM_BT)]
def scan_all(self, progress=lambda x: x):
"""Scan all blocks of the given block device
Return True if there is a chance that a file system will be found there
"""
# range to scan
begin_blk = self.blkdev.reserved
num_blk = self.blkdev.num_blocks - self.blkdev.reserved
self.log.msg(Log.DEBUG,"block: checking range: +%d num=%d" % (begin_blk, num_blk))
# range to scan
begin_blk = self.blkdev.reserved
num_blk = self.blkdev.num_blocks - self.blkdev.reserved
self.log.msg(
Log.DEBUG, "block: checking range: +%d num=%d" % (begin_blk, num_blk)
)
# scan all blocks
for n in range(num_blk):
blk_num = n + begin_blk
# read/get block
bi = self.get_block(blk_num)
# own key ok?
if bi != None:
if bi.blk_status == self.BS_TYPE:
if bi.own_key != None and bi.own_key != blk_num:
self.log.msg(Log.ERROR, "Own key is invalid: %d type: %d" % (bi.own_key, bi.blk_type), blk_num)
# first summary after block scan
num_error_blocks = len(self.map_status[self.BS_READ_ERROR])
if num_error_blocks > 0:
self.log.msg(Log.ERROR, "%d unreadable error blocks found" % num_error_blocks)
num_valid_blocks = len(self.map_status[self.BS_VALID])
if num_valid_blocks > 0:
self.log.msg(Log.INFO, "%d valid but unknown blocks found" % num_valid_blocks)
num_invalid_blocks = len(self.map_status[self.BS_INVALID])
if num_invalid_blocks > 0:
self.log.msg(Log.INFO, "%d invalid blocks found" % num_invalid_blocks)
def read_block(self, blk_num, is_bm=False, is_bm_ext=False):
"""read block from device, decode it, and return block info instance"""
try:
# read block from device
if is_bm:
blk = BitmapBlock(self.blkdev, blk_num)
elif is_bm_ext:
blk = BitmapExtBlock(self.blkdev, blk_num)
else:
blk = Block(self.blkdev, blk_num)
blk.read()
data = blk.data
# create block info
bi = BlockInfo(blk_num)
# --- classify block ---
if blk.valid:
# block is valid AmigaDOS
bi.blk_status = self.BS_VALID
# --- bitmap block ---
if is_bm:
bi.blk_type = self.BT_BITMAP
bi.blk_status = self.BS_TYPE
bi.bitmap = blk.get_bitmap_data()
# --- bitmap ext block ---
elif is_bm_ext:
bi.blk_type = self.BT_BITMAP_EXT
bi.blk_status = self.BS_TYPE
bi.bitmap_ptrs = blk.bitmap_ptrs
bi.next_blk = blk.bitmap_ext_blk
# --- root block ---
elif blk.is_root_block():
bi.blk_type = self.BT_ROOT
bi.blk_status = self.BS_TYPE
root = RootBlock(self.blkdev, blk_num)
root.set(data)
bi.name = root.name
bi.hash_table = root.hash_table
bi.parent_blk = 0
self.log.msg(Log.DEBUG, "Found Root: '%s'" % bi.name, blk_num)
# chech hash size
nht = len(root.hash_table)
if root.hash_size != nht:
self.log.msg(Log.ERROR, "Root block hash table size mismatch", blk_num)
eht = self.blkdev.block_longs - 56
if nht != eht:
self.log.msg(Log.WARN, "Root block does not have normal hash size: %d != %d" % (nht, eht), blk_num)
# --- user dir block ---
elif blk.is_user_dir_block():
bi.blk_type = self.BT_DIR
bi.blk_status = self.BS_TYPE
user = UserDirBlock(self.blkdev, blk_num, DosType.is_longname(self.dos_type))
user.set(data)
bi.name = user.name
bi.parent_blk = user.parent
bi.next_blk = user.hash_chain
bi.hash_table = user.hash_table
bi.own_key = user.own_key
self.log.msg(Log.DEBUG, "Found Dir : '%s'" % bi.name, blk_num)
# --- filter header block ---
elif blk.is_file_header_block():
bi.blk_type = self.BT_FILE_HDR
bi.blk_status = self.BS_TYPE
fh = FileHeaderBlock(self.blkdev, blk_num, DosType.is_longname(self.dos_type))
fh.set(data)
bi.name = fh.name
bi.parent_blk = fh.parent
bi.next_blk = fh.hash_chain
bi.own_key = fh.own_key
bi.byte_size = fh.byte_size
bi.data_blocks = fh.data_blocks
bi.extension = fh.extension
self.log.msg(Log.DEBUG, "Found File: '%s'" % bi.name, blk_num)
# --- file list block ---
elif blk.is_file_list_block():
bi.blk_type = self.BT_FILE_LIST
fl = FileListBlock(self.blkdev, blk_num)
fl.set(data)
bi.blk_status = self.BS_TYPE
bi.ext_blk = fl.extension
bi.blk_list = fl.data_blocks
bi.own_key = fl.own_key
bi.data_blocks = fl.data_blocks
bi.extension = fl.extension
bi.parent_blk = fl.parent
# --- file data block (OFS) ---
elif blk.is_file_data_block():
bi.blk_type = self.BT_FILE_DATA
bi.blk_status = self.BS_TYPE
fd = FileDataBlock(self.blkdev, blk_num)
fd.set(data)
bi.data_size = fd.data_size
bi.hdr_key = fd.hdr_key
bi.seq_num = fd.seq_num
elif blk.is_comment_block():
bi.blk_type = self.BT_COMMENT
bi.blk_status = self.BS_TYPE
cblk = CommentBlock(self.blkdev, blk_num)
bi.hdr_key = cblk.hdr_key
bi.own_key = cblk.own_key
except IOError as e:
self.log.msg(Log.ERROR, "Can't read block", blk_num)
bi = BlockInfo(blk_num)
bi.blk_status = BS_READ_ERROR
# sort block info into map and arrays assigned by status/type
self.block_map[blk_num] = bi
self.map_status[bi.blk_status].append(bi)
self.map_type[bi.blk_type].append(bi)
return bi
def any_chance_of_fs(self):
"""is there any chance to find a FS on this block device?"""
num_dirs = len(self.map_type[self.BT_DIR])
num_files = len(self.map_type[self.BT_FILE_HDR])
num_roots = len(self.map_type[self.BT_ROOT])
return (num_files > 0) or ((num_roots + num_dirs) > 0)
def get_blocks_of_type(self, t):
return self.map_type[t]
# scan all blocks
for n in range(num_blk):
blk_num = n + begin_blk
def get_blocks_with_key_value(self, key, value):
res = []
for bi in self.block_map:
if hasattr(bi, key):
v = getattr(bi, key)
if v == value:
res.append(bi)
return res
# read/get block
bi = self.get_block(blk_num)
def is_block_available(self, num):
if num >= 0 and num < len(self.block_map):
return self.block_map[num] != None
else:
return False
# own key ok?
if bi != None:
if bi.blk_status == self.BS_TYPE:
if bi.own_key != None and bi.own_key != blk_num:
self.log.msg(
Log.ERROR,
"Own key is invalid: %d type: %d"
% (bi.own_key, bi.blk_type),
blk_num,
)
def get_block(self, num):
if num >= 0 and num < len(self.block_map):
bi = self.block_map[num]
if bi == None:
bi = self.read_block(num)
return bi
else:
return None
# first summary after block scan
num_error_blocks = len(self.map_status[self.BS_READ_ERROR])
if num_error_blocks > 0:
self.log.msg(
Log.ERROR, "%d unreadable error blocks found" % num_error_blocks
)
num_valid_blocks = len(self.map_status[self.BS_VALID])
if num_valid_blocks > 0:
self.log.msg(
Log.INFO, "%d valid but unknown blocks found" % num_valid_blocks
)
num_invalid_blocks = len(self.map_status[self.BS_INVALID])
if num_invalid_blocks > 0:
self.log.msg(Log.INFO, "%d invalid blocks found" % num_invalid_blocks)
def dump(self):
for b in self.block_map:
if b != None:
print(b)
def read_block(self, blk_num, is_bm=False, is_bm_ext=False):
"""read block from device, decode it, and return block info instance"""
try:
# read block from device
if is_bm:
blk = BitmapBlock(self.blkdev, blk_num)
elif is_bm_ext:
blk = BitmapExtBlock(self.blkdev, blk_num)
else:
blk = Block(self.blkdev, blk_num)
blk.read()
data = blk.data
# create block info
bi = BlockInfo(blk_num)
# --- classify block ---
if blk.valid:
# block is valid AmigaDOS
bi.blk_status = self.BS_VALID
# --- bitmap block ---
if is_bm:
bi.blk_type = self.BT_BITMAP
bi.blk_status = self.BS_TYPE
bi.bitmap = blk.get_bitmap_data()
# --- bitmap ext block ---
elif is_bm_ext:
bi.blk_type = self.BT_BITMAP_EXT
bi.blk_status = self.BS_TYPE
bi.bitmap_ptrs = blk.bitmap_ptrs
bi.next_blk = blk.bitmap_ext_blk
# --- root block ---
elif blk.is_root_block():
bi.blk_type = self.BT_ROOT
bi.blk_status = self.BS_TYPE
root = RootBlock(self.blkdev, blk_num)
root.set(data)
bi.name = root.name
bi.hash_table = root.hash_table
bi.parent_blk = 0
self.log.msg(Log.DEBUG, "Found Root: '%s'" % bi.name, blk_num)
# chech hash size
nht = len(root.hash_table)
if root.hash_size != nht:
self.log.msg(
Log.ERROR, "Root block hash table size mismatch", blk_num
)
eht = self.blkdev.block_longs - 56
if nht != eht:
self.log.msg(
Log.WARN,
"Root block does not have normal hash size: %d != %d"
% (nht, eht),
blk_num,
)
# --- user dir block ---
elif blk.is_user_dir_block():
bi.blk_type = self.BT_DIR
bi.blk_status = self.BS_TYPE
user = UserDirBlock(
self.blkdev, blk_num, DosType.is_longname(self.dos_type)
)
user.set(data)
bi.name = user.name
bi.parent_blk = user.parent
bi.next_blk = user.hash_chain
bi.hash_table = user.hash_table
bi.own_key = user.own_key
self.log.msg(Log.DEBUG, "Found Dir : '%s'" % bi.name, blk_num)
# --- filter header block ---
elif blk.is_file_header_block():
bi.blk_type = self.BT_FILE_HDR
bi.blk_status = self.BS_TYPE
fh = FileHeaderBlock(
self.blkdev, blk_num, DosType.is_longname(self.dos_type)
)
fh.set(data)
bi.name = fh.name
bi.parent_blk = fh.parent
bi.next_blk = fh.hash_chain
bi.own_key = fh.own_key
bi.byte_size = fh.byte_size
bi.data_blocks = fh.data_blocks
bi.extension = fh.extension
self.log.msg(Log.DEBUG, "Found File: '%s'" % bi.name, blk_num)
# --- file list block ---
elif blk.is_file_list_block():
bi.blk_type = self.BT_FILE_LIST
fl = FileListBlock(self.blkdev, blk_num)
fl.set(data)
bi.blk_status = self.BS_TYPE
bi.ext_blk = fl.extension
bi.blk_list = fl.data_blocks
bi.own_key = fl.own_key
bi.data_blocks = fl.data_blocks
bi.extension = fl.extension
bi.parent_blk = fl.parent
# --- file data block (OFS) ---
elif blk.is_file_data_block():
bi.blk_type = self.BT_FILE_DATA
bi.blk_status = self.BS_TYPE
fd = FileDataBlock(self.blkdev, blk_num)
fd.set(data)
bi.data_size = fd.data_size
bi.hdr_key = fd.hdr_key
bi.seq_num = fd.seq_num
elif blk.is_comment_block():
bi.blk_type = self.BT_COMMENT
bi.blk_status = self.BS_TYPE
cblk = CommentBlock(self.blkdev, blk_num)
bi.hdr_key = cblk.hdr_key
bi.own_key = cblk.own_key
except IOError as e:
self.log.msg(Log.ERROR, "Can't read block", blk_num)
bi = BlockInfo(blk_num)
bi.blk_status = BS_READ_ERROR
# sort block info into map and arrays assigned by status/type
self.block_map[blk_num] = bi
self.map_status[bi.blk_status].append(bi)
self.map_type[bi.blk_type].append(bi)
return bi
def any_chance_of_fs(self):
"""is there any chance to find a FS on this block device?"""
num_dirs = len(self.map_type[self.BT_DIR])
num_files = len(self.map_type[self.BT_FILE_HDR])
num_roots = len(self.map_type[self.BT_ROOT])
return (num_files > 0) or ((num_roots + num_dirs) > 0)
def get_blocks_of_type(self, t):
return self.map_type[t]
def get_blocks_with_key_value(self, key, value):
res = []
for bi in self.block_map:
if hasattr(bi, key):
v = getattr(bi, key)
if v == value:
res.append(bi)
return res
def is_block_available(self, num):
if num >= 0 and num < len(self.block_map):
return self.block_map[num] != None
else:
return False
def get_block(self, num):
if num >= 0 and num < len(self.block_map):
bi = self.block_map[num]
if bi == None:
bi = self.read_block(num)
return bi
else:
return None
def dump(self):
for b in self.block_map:
if b != None:
print(b)

View File

@ -1,237 +1,286 @@
from .BlockScan import BlockScan
from amitools.fs.FSString import FSString
from amitools.fs.FileName import FileName
from amitools.fs.validate.Log import Log
import amitools.fs.DosType as DosType
class DirChainEntry:
"""entry of the hash chain"""
def __init__(self, blk_info):
self.blk_info = blk_info
self.parent_ok = False
self.fn_hash_ok = False
self.valid = False
self.end = False
self.orphaned = False
self.sub = None
def __str__(self):
l = []
if self.parent_ok:
l.append("parent_ok")
if self.fn_hash_ok:
l.append("fn_hash_ok")
if self.valid:
l.append("valid")
if self.end:
l.append("end")
if self.orphaned:
l.append("orphaned")
return "[DCE @%d '%s': %s]" % \
(self.blk_info.blk_num, self.blk_info.name, " ".join(l))
"""entry of the hash chain"""
def __init__(self, blk_info):
self.blk_info = blk_info
self.parent_ok = False
self.fn_hash_ok = False
self.valid = False
self.end = False
self.orphaned = False
self.sub = None
def __str__(self):
l = []
if self.parent_ok:
l.append("parent_ok")
if self.fn_hash_ok:
l.append("fn_hash_ok")
if self.valid:
l.append("valid")
if self.end:
l.append("end")
if self.orphaned:
l.append("orphaned")
return "[DCE @%d '%s': %s]" % (
self.blk_info.blk_num,
self.blk_info.name,
" ".join(l),
)
class DirChain:
"""representing a chain of the hashtable in a directory"""
def __init__(self, hash_val):
self.hash_val = hash_val
self.chain = []
def add(self, dce):
self.chain.append(dce)
def get_entries(self):
return self.chain
def __str__(self):
return "{DirChain +%d: #%d}" % (self.hash_val, len(self.chain))
"""representing a chain of the hashtable in a directory"""
def __init__(self, hash_val):
self.hash_val = hash_val
self.chain = []
def add(self, dce):
self.chain.append(dce)
def get_entries(self):
return self.chain
def __str__(self):
return "{DirChain +%d: #%d}" % (self.hash_val, len(self.chain))
class DirInfo:
"""information structure on a directory"""
def __init__(self, blk_info):
self.blk_info = blk_info
self.chains = {}
self.children = []
"""information structure on a directory"""
def add(self, dc):
self.chains[dc.hash_val] = dc
def add_child(self, c):
self.children.append(c)
def get(self, hash_val):
if hash_val in self.chains:
return self.chains[hash_val]
else:
return None
def get_chains(self):
return self.chains
def __str__(self):
bi = self.blk_info
blk_num = bi.blk_num
name = bi.name
parent_blk = bi.parent_blk
return "<DirInfo @%d '%s' #%d parent:%d child:#%d>" % (blk_num, name, len(self.chains), parent_blk, len(self.children))
def __init__(self, blk_info):
self.blk_info = blk_info
self.chains = {}
self.children = []
def add(self, dc):
self.chains[dc.hash_val] = dc
def add_child(self, c):
self.children.append(c)
def get(self, hash_val):
if hash_val in self.chains:
return self.chains[hash_val]
else:
return None
def get_chains(self):
return self.chains
def __str__(self):
bi = self.blk_info
blk_num = bi.blk_num
name = bi.name
parent_blk = bi.parent_blk
return "<DirInfo @%d '%s' #%d parent:%d child:#%d>" % (
blk_num,
name,
len(self.chains),
parent_blk,
len(self.children),
)
class DirScan:
"""directory tree scanner"""
def __init__(self, block_scan, log):
self.log = log
self.block_scan = block_scan
self.root_di = None
self.intl = DosType.is_intl(block_scan.dos_type)
self.files = []
self.dirs = []
def scan_tree(self, root_blk_num, progress=None):
"""scan the root tree"""
# get root block info
root_bi = self.block_scan.get_block(root_blk_num)
if root_bi == None:
self.log.msg(Log.ERROR,"Root block not found?!",root_blk_num)
return None
# do tree scan
if progress != None:
progress.begin("dir")
self.root_di = self.scan_dir(root_bi, progress)
if progress != None:
progress.end()
return self.root_di
def scan_dir(self, dir_bi, progress):
"""check a directory by scanning through the hash table entries and follow the chains
"""directory tree scanner"""
def __init__(self, block_scan, log):
self.log = log
self.block_scan = block_scan
self.root_di = None
self.intl = DosType.is_intl(block_scan.dos_type)
self.files = []
self.dirs = []
def scan_tree(self, root_blk_num, progress=None):
"""scan the root tree"""
# get root block info
root_bi = self.block_scan.get_block(root_blk_num)
if root_bi == None:
self.log.msg(Log.ERROR, "Root block not found?!", root_blk_num)
return None
# do tree scan
if progress != None:
progress.begin("dir")
self.root_di = self.scan_dir(root_bi, progress)
if progress != None:
progress.end()
return self.root_di
def scan_dir(self, dir_bi, progress):
"""check a directory by scanning through the hash table entries and follow the chains
Returns (all_chains_ok, dir_obj)
"""
# create new dir info
di = DirInfo(dir_bi)
self.dirs.append(di)
# run through hash_table of directory and build chains
chains = {}
hash_val = 0
for blk_num in dir_bi.hash_table:
if blk_num != 0:
# build chain
chain = DirChain(hash_val)
self.build_chain(chain, dir_bi, blk_num, progress)
di.add(chain)
hash_val += 1
return di
def build_chain(self, chain, dir_blk_info, blk_num, progress):
"""build a block chain"""
dir_blk_num = dir_blk_info.blk_num
dir_name = dir_blk_info.name
hash_val = chain.hash_val
# make sure entry block is first used
block_used = self.block_scan.is_block_available(blk_num)
# get entry block
blk_info = self.block_scan.read_block(blk_num)
# create new dir info
di = DirInfo(dir_bi)
self.dirs.append(di)
# create dir chain entry
dce = DirChainEntry(blk_info)
chain.add(dce)
# run through hash_table of directory and build chains
chains = {}
hash_val = 0
for blk_num in dir_bi.hash_table:
if blk_num != 0:
# build chain
chain = DirChain(hash_val)
self.build_chain(chain, dir_bi, blk_num, progress)
di.add(chain)
hash_val += 1
# account
if progress != None:
progress.add()
return di
# block already used?
if block_used:
self.log.msg(Log.ERROR, "dir block already used in chain #%d of dir '%s (%d)" % (hash_val, dir_name, dir_blk_num), blk_num)
dce.end = True
return
def build_chain(self, chain, dir_blk_info, blk_num, progress):
"""build a block chain"""
dir_blk_num = dir_blk_info.blk_num
dir_name = dir_blk_info.name
hash_val = chain.hash_val
# self reference?
if blk_num == dir_blk_num:
self.log.msg(Log.ERROR, "dir block in its own chain #%d of dir '%s' (%d)" % (hash_val, dir_name, dir_blk_num), blk_num)
dce.end = True
return
# not a block in range
if blk_info == None:
self.log.msg(Log.ERROR, "out-of-range block terminates chain #%d of dir '%s' (%d)" % (hash_val, dir_name, dir_blk_num), blk_num)
dce.end = True
return
# check type of entry block
b_type = blk_info.blk_type
if b_type not in (BlockScan.BT_DIR, BlockScan.BT_FILE_HDR):
self.log.msg(Log.ERROR, "invalid block terminates chain #%d of dir '%s' (%d)" % (hash_val, dir_name, dir_blk_num), blk_num)
dce.end = True
return
# check referenceed block type in chain
blk_type = blk_info.blk_type
if blk_type in (BlockScan.BT_ROOT, BlockScan.BT_FILE_LIST, BlockScan.BT_FILE_DATA):
self.log.msg(Log.ERROR, "invalid block type %d terminates chain #%d of dir '%s' (%d)" % (blk_type, hash_val, dir_name, dir_blk_num), blk_num)
dce.end = True
return
# make sure entry block is first used
block_used = self.block_scan.is_block_available(blk_num)
# all following are ok
dce.valid = True
# check parent of block
name = blk_info.name
dce.parent_ok = (blk_info.parent_blk == dir_blk_num)
if not dce.parent_ok:
self.log.msg(Log.ERROR, "invalid parent in '%s' chain #%d of dir '%s' (%d)" % (name, hash_val, dir_name, dir_blk_num), blk_num)
# get entry block
blk_info = self.block_scan.read_block(blk_num)
# check name hash
fn = FileName(name, self.intl)
fn_hash = fn.hash()
dce.fn_hash_ok = (fn_hash == hash_val)
if not dce.fn_hash_ok:
self.log.msg(Log.ERROR, "invalid name hash in '%s' chain #%d of dir '%s' (%d)" % (name, hash_val, dir_name, dir_blk_num), blk_num)
# create dir chain entry
dce = DirChainEntry(blk_info)
chain.add(dce)
# recurse into dir?
if blk_type == BlockScan.BT_DIR:
dce.sub = self.scan_dir(blk_info, progress)
elif blk_type == BlockScan.BT_FILE_HDR:
self.files.append(dce)
# account
if progress != None:
progress.add()
# check next block in chain
next_blk = blk_info.next_blk
if next_blk != 0:
self.build_chain(chain, dir_blk_info, next_blk, progress)
else:
dce.end = True
def get_all_file_hdr_blk_infos(self):
"""return all file chain entries"""
result = []
for f in self.files:
result.append(f.blk_info)
return result
def get_all_dir_infos(self):
"""return all dir infos"""
return self.dirs
def dump(self):
"""dump whole dir info structure"""
self.dump_dir_info(self.root_di, 0)
def dump_dir_info(self, di, indent):
"""dump a single dir info structure and its sub dirs"""
istr = " " * indent
print(istr, di)
for hash_value in sorted(di.get_chains().keys()):
dc = di.get(hash_value)
print(istr, " ", dc)
for dce in dc.get_entries():
print(istr, " ", dce)
sub = dce.sub
if sub != None and dce.blk_info.blk_type == BlockScan.BT_DIR:
self.dump_dir_info(sub, indent+1)
# block already used?
if block_used:
self.log.msg(
Log.ERROR,
"dir block already used in chain #%d of dir '%s (%d)"
% (hash_val, dir_name, dir_blk_num),
blk_num,
)
dce.end = True
return
# self reference?
if blk_num == dir_blk_num:
self.log.msg(
Log.ERROR,
"dir block in its own chain #%d of dir '%s' (%d)"
% (hash_val, dir_name, dir_blk_num),
blk_num,
)
dce.end = True
return
# not a block in range
if blk_info == None:
self.log.msg(
Log.ERROR,
"out-of-range block terminates chain #%d of dir '%s' (%d)"
% (hash_val, dir_name, dir_blk_num),
blk_num,
)
dce.end = True
return
# check type of entry block
b_type = blk_info.blk_type
if b_type not in (BlockScan.BT_DIR, BlockScan.BT_FILE_HDR):
self.log.msg(
Log.ERROR,
"invalid block terminates chain #%d of dir '%s' (%d)"
% (hash_val, dir_name, dir_blk_num),
blk_num,
)
dce.end = True
return
# check referenceed block type in chain
blk_type = blk_info.blk_type
if blk_type in (
BlockScan.BT_ROOT,
BlockScan.BT_FILE_LIST,
BlockScan.BT_FILE_DATA,
):
self.log.msg(
Log.ERROR,
"invalid block type %d terminates chain #%d of dir '%s' (%d)"
% (blk_type, hash_val, dir_name, dir_blk_num),
blk_num,
)
dce.end = True
return
# all following are ok
dce.valid = True
# check parent of block
name = blk_info.name
dce.parent_ok = blk_info.parent_blk == dir_blk_num
if not dce.parent_ok:
self.log.msg(
Log.ERROR,
"invalid parent in '%s' chain #%d of dir '%s' (%d)"
% (name, hash_val, dir_name, dir_blk_num),
blk_num,
)
# check name hash
fn = FileName(name, self.intl)
fn_hash = fn.hash()
dce.fn_hash_ok = fn_hash == hash_val
if not dce.fn_hash_ok:
self.log.msg(
Log.ERROR,
"invalid name hash in '%s' chain #%d of dir '%s' (%d)"
% (name, hash_val, dir_name, dir_blk_num),
blk_num,
)
# recurse into dir?
if blk_type == BlockScan.BT_DIR:
dce.sub = self.scan_dir(blk_info, progress)
elif blk_type == BlockScan.BT_FILE_HDR:
self.files.append(dce)
# check next block in chain
next_blk = blk_info.next_blk
if next_blk != 0:
self.build_chain(chain, dir_blk_info, next_blk, progress)
else:
dce.end = True
def get_all_file_hdr_blk_infos(self):
"""return all file chain entries"""
result = []
for f in self.files:
result.append(f.blk_info)
return result
def get_all_dir_infos(self):
"""return all dir infos"""
return self.dirs
def dump(self):
"""dump whole dir info structure"""
self.dump_dir_info(self.root_di, 0)
def dump_dir_info(self, di, indent):
"""dump a single dir info structure and its sub dirs"""
istr = " " * indent
print(istr, di)
for hash_value in sorted(di.get_chains().keys()):
dc = di.get(hash_value)
print(istr, " ", dc)
for dce in dc.get_entries():
print(istr, " ", dce)
sub = dce.sub
if sub != None and dce.blk_info.blk_type == BlockScan.BT_DIR:
self.dump_dir_info(sub, indent + 1)

View File

@ -1,124 +1,168 @@
from .BlockScan import BlockScan
from amitools.fs.FSString import FSString
from amitools.fs.FileName import FileName
from amitools.fs.validate.Log import Log
import amitools.fs.DosType as DosType
class FileData:
def __init__(self, bi):
self.bi = bi
def __init__(self, bi):
self.bi = bi
class FileInfo:
def __init__(self, bi):
self.bi = bi
def __init__(self, bi):
self.bi = bi
class FileScan:
def __init__(self, block_scan, log, dos_type):
self.block_scan = block_scan
self.log = log
self.dos_type = dos_type
self.ffs = DosType.is_ffs(self.dos_type)
self.infos = []
def scan_all_files(self, all_file_hdr_block_infos, progress=None):
"""scan through all files"""
if progress != None:
progress.begin("file")
for bi in all_file_hdr_block_infos:
self.scan_file(bi)
progress.add()
progress.end()
else:
for bi in all_file_hdr_block_infos:
self.scan_file(bi)
def scan_file(self, bi):
"""scan a file header block info and create a FileInfo instance"""
fi = FileInfo(bi)
self.infos.append(fi)
info = "'%s' (@%d)" % (bi.name, bi.blk_num)
# scan for data blocks
linked_data_blocks = bi.data_blocks
blk_num = bi.blk_num
# run through file list blocks linked by extension
sbi = bi
aborted = False
num = 0
while sbi.extension != 0 and sbi.extension < self.block_scan.blkdev.num_blocks:
# check usage of block
if self.block_scan.is_block_available(sbi.extension):
self.log.msg(Log.ERROR, "File ext block #%d of %s already used" % (num, info), sbi.extension)
aborted = True
break
# get block
ebi = self.block_scan.get_block(sbi.extension)
if ebi == None:
aborted = True
break
# check block type
if ebi.blk_type != BlockScan.BT_FILE_LIST:
self.log.msg(Log.ERROR, "File ext block #%d of %s is no ext block" % (num, info), ebi.blk_num)
aborted = True
break
# check for parent link
if ebi.parent_blk != blk_num:
self.log.msg(Log.ERROR, "File ext block #%d of %s has invalid parent: got %d != expect %d" % (num, info, ebi.parent_blk, blk_num), ebi.blk_num)
# warn if data blocks is not full
ndb = len(ebi.data_blocks)
if ebi.extension != 0 and ndb != self.block_scan.blkdev.block_longs - 56:
self.log.msg(Log.WARN, "File ext block #%d of %s has incomplete data refs: got %d" % (num, info, ndb), ebi.blk_num)
# add data blocks
linked_data_blocks += ebi.data_blocks
sbi = ebi
num += 1
# transform the data block numbers to file data
file_datas = []
seq_num = 1
for data_blk in linked_data_blocks:
# get block
block_used = self.block_scan.is_block_available(data_blk)
dbi = self.block_scan.get_block(data_blk)
fd = FileData(dbi)
file_datas.append(fd)
# check usage of block
# is block available
if dbi == None:
self.log.msg(Log.ERROR, "File data block #%d of %s not found" % (seq_num,info), data_blk)
if block_used:
self.log.msg(Log.ERROR, "File data block #%d of %s already used" % (seq_num,info), data_blk)
fd.bi = None
# in ofs check data blocks
if not self.ffs:
# check block type
if dbi.blk_type != BlockScan.BT_FILE_DATA:
self.log.msg(Log.ERROR, "File data block #%d of %s is no data block" % (seq_num,info), data_blk)
fd.bi = None
def __init__(self, block_scan, log, dos_type):
self.block_scan = block_scan
self.log = log
self.dos_type = dos_type
self.ffs = DosType.is_ffs(self.dos_type)
self.infos = []
def scan_all_files(self, all_file_hdr_block_infos, progress=None):
"""scan through all files"""
if progress != None:
progress.begin("file")
for bi in all_file_hdr_block_infos:
self.scan_file(bi)
progress.add()
progress.end()
else:
# check header ref: must point to file header
if dbi.hdr_key != blk_num:
self.log.msg(Log.ERROR, "File data block #%d of %s does not ref header: got %d != expect %d" % (seq_num,info,dbi.hdr_key, blk_num), data_blk)
# check sequence number
if dbi.seq_num != seq_num:
self.log.msg(Log.ERROR, "File data block #%d of %s seq num mismatch: got %d" % (seq_num,info,dbi.seq_num), data_blk)
seq_num += 1
# check size of file in bytes
block_data_bytes = self.block_scan.blkdev.block_bytes
if not self.ffs:
block_data_bytes -= 24
file_est_blocks = (bi.byte_size + block_data_bytes - 1) // block_data_bytes
num_data_blocks = len(linked_data_blocks)
if file_est_blocks != num_data_blocks:
self.log.msg(Log.ERROR, "File %s with %d bytes has wrong number of data blocks: got %d != expect %d" % (info, bi.byte_size, num_data_blocks, file_est_blocks), bi.blk_num)
return fi
def dump(self):
pass
for bi in all_file_hdr_block_infos:
self.scan_file(bi)
def scan_file(self, bi):
"""scan a file header block info and create a FileInfo instance"""
fi = FileInfo(bi)
self.infos.append(fi)
info = "'%s' (@%d)" % (bi.name, bi.blk_num)
# scan for data blocks
linked_data_blocks = bi.data_blocks
blk_num = bi.blk_num
# run through file list blocks linked by extension
sbi = bi
aborted = False
num = 0
while sbi.extension != 0 and sbi.extension < self.block_scan.blkdev.num_blocks:
# check usage of block
if self.block_scan.is_block_available(sbi.extension):
self.log.msg(
Log.ERROR,
"File ext block #%d of %s already used" % (num, info),
sbi.extension,
)
aborted = True
break
# get block
ebi = self.block_scan.get_block(sbi.extension)
if ebi == None:
aborted = True
break
# check block type
if ebi.blk_type != BlockScan.BT_FILE_LIST:
self.log.msg(
Log.ERROR,
"File ext block #%d of %s is no ext block" % (num, info),
ebi.blk_num,
)
aborted = True
break
# check for parent link
if ebi.parent_blk != blk_num:
self.log.msg(
Log.ERROR,
"File ext block #%d of %s has invalid parent: got %d != expect %d"
% (num, info, ebi.parent_blk, blk_num),
ebi.blk_num,
)
# warn if data blocks is not full
ndb = len(ebi.data_blocks)
if ebi.extension != 0 and ndb != self.block_scan.blkdev.block_longs - 56:
self.log.msg(
Log.WARN,
"File ext block #%d of %s has incomplete data refs: got %d"
% (num, info, ndb),
ebi.blk_num,
)
# add data blocks
linked_data_blocks += ebi.data_blocks
sbi = ebi
num += 1
# transform the data block numbers to file data
file_datas = []
seq_num = 1
for data_blk in linked_data_blocks:
# get block
block_used = self.block_scan.is_block_available(data_blk)
dbi = self.block_scan.get_block(data_blk)
fd = FileData(dbi)
file_datas.append(fd)
# check usage of block
# is block available
if dbi == None:
self.log.msg(
Log.ERROR,
"File data block #%d of %s not found" % (seq_num, info),
data_blk,
)
if block_used:
self.log.msg(
Log.ERROR,
"File data block #%d of %s already used" % (seq_num, info),
data_blk,
)
fd.bi = None
# in ofs check data blocks
if not self.ffs:
# check block type
if dbi.blk_type != BlockScan.BT_FILE_DATA:
self.log.msg(
Log.ERROR,
"File data block #%d of %s is no data block" % (seq_num, info),
data_blk,
)
fd.bi = None
else:
# check header ref: must point to file header
if dbi.hdr_key != blk_num:
self.log.msg(
Log.ERROR,
"File data block #%d of %s does not ref header: got %d != expect %d"
% (seq_num, info, dbi.hdr_key, blk_num),
data_blk,
)
# check sequence number
if dbi.seq_num != seq_num:
self.log.msg(
Log.ERROR,
"File data block #%d of %s seq num mismatch: got %d"
% (seq_num, info, dbi.seq_num),
data_blk,
)
seq_num += 1
# check size of file in bytes
block_data_bytes = self.block_scan.blkdev.block_bytes
if not self.ffs:
block_data_bytes -= 24
file_est_blocks = (bi.byte_size + block_data_bytes - 1) // block_data_bytes
num_data_blocks = len(linked_data_blocks)
if file_est_blocks != num_data_blocks:
self.log.msg(
Log.ERROR,
"File %s with %d bytes has wrong number of data blocks: got %d != expect %d"
% (info, bi.byte_size, num_data_blocks, file_est_blocks),
bi.blk_num,
)
return fi
def dump(self):
pass

View File

@ -1,45 +1,45 @@
class LogEntry:
"""A class for a log entry"""
names = ('debug','info ','WARN ','ERROR')
def __init__(self, level, msg, blk_num=-1):
self.blk_num = blk_num
self.level = level
self.msg = msg
def __str__(self):
if self.blk_num == -1:
return "%s%s:%s" % (" "*8, self.names[self.level], self.msg)
else:
return "@%06d:%s:%s" % (self.blk_num, self.names[self.level], self.msg)
"""A class for a log entry"""
names = ("debug", "info ", "WARN ", "ERROR")
def __init__(self, level, msg, blk_num=-1):
self.blk_num = blk_num
self.level = level
self.msg = msg
def __str__(self):
if self.blk_num == -1:
return "%s%s:%s" % (" " * 8, self.names[self.level], self.msg)
else:
return "@%06d:%s:%s" % (self.blk_num, self.names[self.level], self.msg)
class Log:
"""Store a log of entries"""
"""Store a log of entries"""
DEBUG = 0
INFO = 1
WARN = 2
ERROR = 3
DEBUG = 0
INFO = 1
WARN = 2
ERROR = 3
def __init__(self, min_level):
self.entries = []
self.min_level = min_level
def msg(self, level, msg, blk_num = -1):
if level < self.min_level:
return
e = LogEntry(level, msg, blk_num)
self.entries.append(e)
def dump(self):
for e in self.entries:
print(e)
def get_num_level(self, level):
num = 0
for e in self.entries:
if e.level == level:
num += 1
return num
def __init__(self, min_level):
self.entries = []
self.min_level = min_level
def msg(self, level, msg, blk_num=-1):
if level < self.min_level:
return
e = LogEntry(level, msg, blk_num)
self.entries.append(e)
def dump(self):
for e in self.entries:
print(e)
def get_num_level(self, level):
num = 0
for e in self.entries:
if e.level == level:
num += 1
return num

View File

@ -1,16 +1,17 @@
import sys
class Progress:
def __init__(self):
self.num = 0
self.msg = None
def begin(self, msg):
self.num = 0
self.msg = msg
def end(self):
pass
def add(self):
self.num += 1
def __init__(self):
self.num = 0
self.msg = None
def begin(self, msg):
self.num = 0
self.msg = msg
def end(self):
pass
def add(self):
self.num += 1

View File

@ -1,6 +1,3 @@
from amitools.fs.block.BootBlock import BootBlock
from amitools.fs.block.RootBlock import RootBlock
@ -11,99 +8,118 @@ from amitools.fs.validate.FileScan import FileScan
from amitools.fs.validate.BitmapScan import BitmapScan
import amitools.fs.DosType as DosType
class Validator:
"""Validate an AmigaDOS file system"""
def __init__(self, blkdev, min_level, debug=False, progress=None):
self.log = Log(min_level)
self.debug = debug
self.blkdev = blkdev
self.dos_type = None
self.boot = None
self.root = None
self.block_scan = None
self.progress = progress
def scan_boot(self):
"""Step 1: scan boot block.
class Validator:
"""Validate an AmigaDOS file system"""
def __init__(self, blkdev, min_level, debug=False, progress=None):
self.log = Log(min_level)
self.debug = debug
self.blkdev = blkdev
self.dos_type = None
self.boot = None
self.root = None
self.block_scan = None
self.progress = progress
def scan_boot(self):
"""Step 1: scan boot block.
Returns (True, x) if boot block has a valid dos type.
Returns (x, True) if boot block is bootable
Invalid checksum of the block is tolerated but remarked.
"""
# check boot block
boot = BootBlock(self.blkdev)
boot.read()
if boot.valid:
# dos type is valid
self.boot = boot
self.dos_type = boot.dos_type
# give a warning if checksum is not correct
if not boot.valid_chksum:
self.log.msg(Log.INFO,"invalid boot block checksum",0)
self.log.msg(Log.INFO,"dos type is '%s'" % DosType.get_dos_type_str(self.dos_type))
return (True, boot.valid_chksum)
else:
self.log.msg(Log.ERROR,"invalid boot block dos type",0)
return (False, False)
def scan_root(self):
"""Step 2: scan root block.
# check boot block
boot = BootBlock(self.blkdev)
boot.read()
if boot.valid:
# dos type is valid
self.boot = boot
self.dos_type = boot.dos_type
# give a warning if checksum is not correct
if not boot.valid_chksum:
self.log.msg(Log.INFO, "invalid boot block checksum", 0)
self.log.msg(
Log.INFO, "dos type is '%s'" % DosType.get_dos_type_str(self.dos_type)
)
return (True, boot.valid_chksum)
else:
self.log.msg(Log.ERROR, "invalid boot block dos type", 0)
return (False, False)
def scan_root(self):
"""Step 2: scan root block.
Try to determine root block from boot block or guess number.
Returns True if the root block could be decoded.
"""
if self.boot != None:
# retrieve root block number from boot block
root_blk_num = self.boot.got_root_blk
# check root block number
if root_blk_num == 0:
new_root = self.blkdev.num_blocks // 2
self.log.msg(Log.INFO,"Boot contains not Root blk. Using default: %d" % new_root,root_blk_num)
root_blk_num = new_root
elif root_blk_num < self.blkdev.reserved or root_blk_num > self.blkdev.num_blocks:
new_root = self.blkdev.num_blocks // 2
self.log.msg(Log.INFO,"Invalid root block number: given %d using guess %d" % (root_blk_num, new_root),root_blk_num)
root_blk_num = new_root
else:
# guess root block number
root_blk_num = self.blkdev.num_blocks // 2
self.log.msg(Log.INFO,"Guessed root block number",root_blk_num)
# read root block
root = RootBlock(self.blkdev, root_blk_num)
root.read()
if not root.valid:
self.log.msg(Log.INFO,"Root block is not valid -> No file system",root_blk_num)
self.root = None # mode without root
return False
else:
self.root = root
return True
def scan_dir_tree(self):
"""Step 3: scan directory structure
Return false if structure is not healthy"""
self.block_scan = BlockScan(self.blkdev, self.log, self.dos_type)
self.dir_scan = DirScan(self.block_scan, self.log)
ok = self.dir_scan.scan_tree(self.root.blk_num, progress=self.progress)
self.log.msg(Log.INFO,"Scanned %d directories" % len(self.dir_scan.get_all_dir_infos()))
if self.debug:
self.dir_scan.dump()
def scan_files(self):
"""Step 4: scan through all found files"""
self.file_scan = FileScan(self.block_scan, self.log, self.dos_type)
all_files = self.dir_scan.get_all_file_hdr_blk_infos()
self.log.msg(Log.INFO,"Scanning %d files" % len(all_files))
self.file_scan.scan_all_files(all_files, progress=self.progress)
if self.debug:
self.file_scan.dump()
def scan_bitmap(self):
"""Step 5: validate block bitmap"""
self.bitmap_scan = BitmapScan(self.block_scan, self.log)
self.bitmap_scan.scan_bitmap(self.root)
if self.boot != None:
# retrieve root block number from boot block
root_blk_num = self.boot.got_root_blk
# check root block number
if root_blk_num == 0:
new_root = self.blkdev.num_blocks // 2
self.log.msg(
Log.INFO,
"Boot contains not Root blk. Using default: %d" % new_root,
root_blk_num,
)
root_blk_num = new_root
elif (
root_blk_num < self.blkdev.reserved
or root_blk_num > self.blkdev.num_blocks
):
new_root = self.blkdev.num_blocks // 2
self.log.msg(
Log.INFO,
"Invalid root block number: given %d using guess %d"
% (root_blk_num, new_root),
root_blk_num,
)
root_blk_num = new_root
else:
# guess root block number
root_blk_num = self.blkdev.num_blocks // 2
self.log.msg(Log.INFO, "Guessed root block number", root_blk_num)
# read root block
root = RootBlock(self.blkdev, root_blk_num)
root.read()
if not root.valid:
self.log.msg(
Log.INFO, "Root block is not valid -> No file system", root_blk_num
)
self.root = None # mode without root
return False
else:
self.root = root
return True
def get_summary(self):
"""Return (errors, warnings) of log"""
num_errors = self.log.get_num_level(Log.ERROR)
num_warns = self.log.get_num_level(Log.WARN)
return (num_errors, num_warns)
def scan_dir_tree(self):
"""Step 3: scan directory structure
Return false if structure is not healthy"""
self.block_scan = BlockScan(self.blkdev, self.log, self.dos_type)
self.dir_scan = DirScan(self.block_scan, self.log)
ok = self.dir_scan.scan_tree(self.root.blk_num, progress=self.progress)
self.log.msg(
Log.INFO, "Scanned %d directories" % len(self.dir_scan.get_all_dir_infos())
)
if self.debug:
self.dir_scan.dump()
def scan_files(self):
"""Step 4: scan through all found files"""
self.file_scan = FileScan(self.block_scan, self.log, self.dos_type)
all_files = self.dir_scan.get_all_file_hdr_blk_infos()
self.log.msg(Log.INFO, "Scanning %d files" % len(all_files))
self.file_scan.scan_all_files(all_files, progress=self.progress)
if self.debug:
self.file_scan.dump()
def scan_bitmap(self):
"""Step 5: validate block bitmap"""
self.bitmap_scan = BitmapScan(self.block_scan, self.log)
self.bitmap_scan.scan_bitmap(self.root)
def get_summary(self):
"""Return (errors, warnings) of log"""
num_errors = self.log.get_num_level(Log.ERROR)
num_warns = self.log.get_num_level(Log.WARN)
return (num_errors, num_warns)

View File

@ -4,8 +4,8 @@ import struct
import amitools.binfmt.BinImage as BinImage
BKMODULE_ID = 0x707A4E75
BK_MODULE_ID = 0x4afc
BK_PATCH_ID = 0x4e71
BK_MODULE_ID = 0x4AFC
BK_PATCH_ID = 0x4E71
class BlizKickModule:
@ -75,14 +75,15 @@ class BlizKickModule:
# check if we can remove last data segment (contains only version info)
if len(segs) == 2 and segs[1].get_type() == BinImage.SEGMENT_TYPE_DATA:
data = segs[1].get_data()
if data[:5] == '$VER:':
if data[:5] == "$VER:":
self.bin_img.segments = [seg]
# test
if __name__ == '__main__':
if __name__ == "__main__":
import sys
from amitools.binfmt.BinFmt import BinFmt
bfmt = BinFmt()
for f in sys.argv[1:]:
if bfmt.is_image(f):

View File

@ -12,9 +12,9 @@ class KickRomAccess(RomAccess):
EXT_HEADER_SIZE = 0x10
FOOTER_SIZE = 0x18
ROMHDR_SIZE = 8
ROMHDR_256K = 0x11114ef9
ROMHDR_512K = 0x11144ef9
ROMHDR_EXT = 0x11144ef9
ROMHDR_256K = 0x11114EF9
ROMHDR_512K = 0x11144EF9
ROMHDR_EXT = 0x11144EF9
def __init__(self, rom_data):
RomAccess.__init__(self, rom_data)
@ -81,14 +81,14 @@ class KickRomAccess(RomAccess):
return self.read_rom_size_field() == self.size
def check_magic_reset(self):
return self.read_word(0xd0) == 0x4e70
return self.read_word(0xD0) == 0x4E70
def calc_check_sum(self, skip_off=None):
"""Check internal kickstart checksum and return True if is correct"""
chk_sum = 0
num_longs = self.size // 4
off = 0
max_u32 = 0xffffffff
max_u32 = 0xFFFFFFFF
for i in range(num_longs):
val = struct.unpack_from(">I", self.rom_data, off)[0]
if off != skip_off:
@ -132,12 +132,12 @@ class KickRomAccess(RomAccess):
offset = 0
hdr = self.ROMHDR_512K
self.write_long(offset, hdr)
self.write_long(offset+4, jump_addr)
self.write_long(offset + 4, jump_addr)
def write_ext_header(self, jump_addr, rom_rev):
self.write_header(jump_addr)
self.write_word(8, 0)
self.write_word(10, 0xffff)
self.write_word(10, 0xFFFF)
self.write_word(12, rom_rev[0])
self.write_word(14, rom_rev[1])
@ -156,8 +156,7 @@ class KickRomAccess(RomAccess):
def write_rom_ver_rev(self, rom_rev):
"""get (ver, rev) version info from ROM"""
return struct.pack_into(">HH", self.rom_data, 12,
rom_rev[0], rom_rev[1])
return struct.pack_into(">HH", self.rom_data, 12, rom_rev[0], rom_rev[1])
def read_boot_pc(self):
"""return PC for booting the ROM"""
@ -177,11 +176,12 @@ class KickRomAccess(RomAccess):
return self.read_long(off)
def get_base_addr(self):
return self.read_boot_pc() & ~0xffff
return self.read_boot_pc() & ~0xFFFF
class Loader(object):
"""Load kick rom images in different formats"""
@classmethod
def load(cls, kick_file, rom_key_file=None):
raw_img = None
@ -191,7 +191,7 @@ class Loader(object):
raw_img = fh.read()
# coded rom?
need_key = False
if raw_img[:11] == b'AMIROMTYPE1':
if raw_img[:11] == b"AMIROMTYPE1":
rom_img = raw_img[11:]
need_key = True
else:
@ -218,16 +218,17 @@ class Loader(object):
# tiny test
if __name__ == '__main__':
if __name__ == "__main__":
import sys
args = sys.argv
n = len(args)
if n > 1:
ks_file = args[1]
else:
ks_file = 'amiga-os-310-a500.rom'
ks_file = "amiga-os-310-a500.rom"
print(ks_file)
ks = Loader.load(ks_file, 'rom.key')
ks = Loader.load(ks_file, "rom.key")
kh = KickRomAccess(ks)
print("is_kick_rom", kh.is_kick_rom())
print("detect_kick_rom", kh.detect_kick_rom())

View File

@ -3,8 +3,7 @@ import struct
class RemusRom(object):
def __init__(self, sum_off, chk_sum, size, base_addr, name, short_name,
flags):
def __init__(self, sum_off, chk_sum, size, base_addr, name, short_name, flags):
self.sum_off = sum_off
self.chk_sum = chk_sum
self.size = size
@ -15,17 +14,35 @@ class RemusRom(object):
self.modules = []
def __repr__(self):
return "RemusRom(sum_off=%08x,chk_sum=%08x,size=%08x,base_addr=%08x," \
"name=%s,short_name=%s,flags=%x)" % \
(self.sum_off, self.chk_sum, self.size, self.base_addr,
self.name, self.short_name, self.flags)
return (
"RemusRom(sum_off=%08x,chk_sum=%08x,size=%08x,base_addr=%08x,"
"name=%s,short_name=%s,flags=%x)"
% (
self.sum_off,
self.chk_sum,
self.size,
self.base_addr,
self.name,
self.short_name,
self.flags,
)
)
def dump(self):
print("(%04x) #%04x @%08x +%08x =%08x %08x: %08x %-24s %s" %
(self.flags, len(self.modules), self.base_addr, self.size,
self.base_addr + self.size,
self.sum_off, self.chk_sum,
self.short_name, self.name))
print(
"(%04x) #%04x @%08x +%08x =%08x %08x: %08x %-24s %s"
% (
self.flags,
len(self.modules),
self.base_addr,
self.size,
self.base_addr + self.size,
self.sum_off,
self.chk_sum,
self.short_name,
self.name,
)
)
for m in self.modules:
m.dump()
@ -40,22 +57,21 @@ class RemusRomModuleExtra(object):
self.fixes = fixes
def __repr__(self):
return "RemusRomModuleExtra(relocs=%r,patches=%r,chk_sum=%08x," \
"brelocs=%r,fixes=%r)" % \
(self.relocs, self.patches, self.chk_sum, self.brelocs,
self.fixes)
return (
"RemusRomModuleExtra(relocs=%r,patches=%r,chk_sum=%08x,"
"brelocs=%r,fixes=%r)"
% (self.relocs, self.patches, self.chk_sum, self.brelocs, self.fixes)
)
def dump(self):
if len(self.relocs) > 0:
print(" relocs: ", ",".join(["%08x" % x for x in self.relocs]))
if len(self.patches) > 0:
print(" patches:", ",".join(
["%08x:%08x" % x for x in self.patches]))
print(" patches:", ",".join(["%08x:%08x" % x for x in self.patches]))
if len(self.brelocs) > 0:
print(" brelocs:", ",".join(["%08x" % x for x in self.brelocs]))
if len(self.fixes) > 0:
print(" fixes: ", ",".join(
["%08x:%08x" % x for x in self.fixes]))
print(" fixes: ", ",".join(["%08x:%08x" % x for x in self.fixes]))
if self.chk_sum:
print(" chk_sum: %08x" % self.chk_sum)
@ -69,17 +85,22 @@ class RemusRomModule(object):
self.extra = None
def __repr__(self):
return "RemusRomModule(name=%s,offset=%08x,size=%08x,extra_off=%08x)" \
% (self.name, self.offset, self.size, self.extra_off)
return "RemusRomModule(name=%s,offset=%08x,size=%08x,extra_off=%08x)" % (
self.name,
self.offset,
self.size,
self.extra_off,
)
def dump(self):
if self.extra:
flags = self.extra.flags
else:
flags = 0
print(" @%08x +%08x =%08x %s (%02x)" %
(self.offset, self.size, self.offset + self.size, self.name,
flags))
print(
" @%08x +%08x =%08x %s (%02x)"
% (self.offset, self.size, self.offset + self.size, self.name, flags)
)
if self.extra:
self.extra.dump()
@ -98,8 +119,7 @@ class RemusFile(object):
# check header
len_hdr = self._read_long()
if len_hdr != self.header:
raise IOError("Wrong header! %08x != %08x" %
(self.header, len_hdr))
raise IOError("Wrong header! %08x != %08x" % (self.header, len_hdr))
# read version
self.version = self._read_long()
self.path = path
@ -126,14 +146,14 @@ class RemusFile(object):
break
res.append(self.data[pos])
pos += 1
return bytes(res).decode('latin-1')
return bytes(res).decode("latin-1")
class RemusSplitFile(RemusFile):
u32_max = 0xffffffff
u32_max = 0xFFFFFFFF
def __init__(self):
RemusFile.__init__(self, 0x524d5346)
RemusFile.__init__(self, 0x524D5346)
self.roms = []
def load(self, path):
@ -172,8 +192,7 @@ class RemusSplitFile(RemusFile):
flags = 0
name = self._read_string(name_off)
short_name = self._read_string(short_name_off)
rom = RemusRom(sum_off, chk_sum, size, base_addr,
name, short_name, flags)
rom = RemusRom(sum_off, chk_sum, size, base_addr, name, short_name, flags)
# store
string_offs.add(name_off)
string_offs.add(short_name_off)
@ -209,7 +228,7 @@ class RemusSplitFile(RemusFile):
FLAG_SHORT_BCPL_RELOCS = 0x10
FLAG_CHK_SUM = 0x40
FLAG_FIXES = 0x80
FLAG_MASK = 0xdf
FLAG_MASK = 0xDF
# parse extras
extra_map = {}
for extra_off in extra_offs:
@ -269,8 +288,7 @@ class RemusSplitFile(RemusFile):
if flags & FLAG_CHK_SUM:
chk_sum = self._read_long()
# create extra
e = RemusRomModuleExtra(
flags, relocs, patches, chk_sum, brelocs, fixes)
e = RemusRomModuleExtra(flags, relocs, patches, chk_sum, brelocs, fixes)
extra_map[extra_off] = e
# check end of record
# if self.offset not in extra_offs and self.offset != end_off:
@ -306,18 +324,22 @@ class RemusIdEntry(object):
self.name = name
def __repr__(self):
return "RemusIdEntry(count=%x,bogus=%08x,chk_sum=%08x,name=%s" % \
(self.count, self.bogus, self.chk_sum, self.name)
return "RemusIdEntry(count=%x,bogus=%08x,chk_sum=%08x,name=%s" % (
self.count,
self.bogus,
self.chk_sum,
self.name,
)
class RemusIdFile(RemusFile):
def __init__(self):
RemusFile.__init__(self, 0x524d4944)
RemusFile.__init__(self, 0x524D4944)
self.entries = []
def load(self, path):
RemusFile.load(self, path)
u16_max = 0xffff
u16_max = 0xFFFF
# loop: new rom
while True:
# parse rom entry
@ -333,8 +355,7 @@ class RemusIdFile(RemusFile):
def dump(self):
for e in self.entries:
print("%04x %08x %08x %s" %
(e.count, e.bogus, e.chk_sum, e.name))
print("%04x %08x %08x %s" % (e.count, e.bogus, e.chk_sum, e.name))
class RemusFileSet(object):
@ -377,7 +398,7 @@ class RemusFileSet(object):
return roms
if __name__ == '__main__':
if __name__ == "__main__":
import sys
from .kickrom import Loader, KickRomAccess

View File

@ -3,16 +3,16 @@ from .romaccess import RomAccess
RTC_MATCHWORD = 0x4AFC
RTF_AUTOINIT = (1 << 7)
RTF_AFTERDOS = (1 << 2)
RTF_SINGLETASK = (1 << 1)
RTF_COLDSTART = (1 << 0)
RTF_AUTOINIT = 1 << 7
RTF_AFTERDOS = 1 << 2
RTF_SINGLETASK = 1 << 1
RTF_COLDSTART = 1 << 0
flag_names = {
RTF_AUTOINIT: "RTF_AUTOINIT",
RTF_AFTERDOS: "RTF_AFTERDOS",
RTF_SINGLETASK: "RTF_SINGLETASK",
RTF_COLDSTART: "RTF_COLDSTART"
RTF_COLDSTART: "RTF_COLDSTART",
}
NT_UNKNOWN = 0
@ -26,13 +26,14 @@ nt_names = {
NT_TASK: "NT_TASK",
NT_DEVICE: "NT_DEVICE",
NT_RESOURCE: "NT_RESOURCE",
NT_LIBRARY: "NT_LIBRARY"
NT_LIBRARY: "NT_LIBRARY",
}
class Resident:
def __init__(self, off, flags, version, node_type, pri, name, id_string,
init_off, skip_off):
def __init__(
self, off, flags, version, node_type, pri, name, id_string, init_off, skip_off
):
self.off = off
self.flags = flags
self.version = version
@ -44,10 +45,21 @@ class Resident:
self.skip_off = skip_off
def __repr__(self):
return "Resident(@off=%08x,flags=%02x,version=%d,node_type=%d," \
"pri=%d,name=%s,id_string=%s,init_off=%08x,skip_off=%08x)" % \
(self.off, self.flags, self.version, self.node_type, self.pri,
self.name, self.id_string, self.init_off, self.skip_off)
return (
"Resident(@off=%08x,flags=%02x,version=%d,node_type=%d,"
"pri=%d,name=%s,id_string=%s,init_off=%08x,skip_off=%08x)"
% (
self.off,
self.flags,
self.version,
self.node_type,
self.pri,
self.name,
self.id_string,
self.init_off,
self.skip_off,
)
)
def get_flags_strings(self):
f = self.flags
@ -71,26 +83,27 @@ class Resident:
if mw != RTC_MATCHWORD:
raise ValueError("No RTC_MATCHWORD at resident offset!")
# +2 RT_MATCHTAG
tag_ptr = access.read_long(off+2)
tag_ptr = access.read_long(off + 2)
if tag_ptr != base_addr + off:
raise ValueError("Wrong MatchTag pointer in resident!")
# +6 RT_ENDSKIP
end_skip_ptr = access.read_long(off+6)
end_skip_ptr = access.read_long(off + 6)
end_skip_off = end_skip_ptr - base_addr
# +10..13 RT_FLAGS, RT_VERSION, RT_TYPE, RT_PRI
flags = access.read_byte(off+10)
version = access.read_byte(off+11)
rtype = access.read_byte(off+12)
pri = access.read_sbyte(off+13)
flags = access.read_byte(off + 10)
version = access.read_byte(off + 11)
rtype = access.read_byte(off + 12)
pri = access.read_sbyte(off + 13)
# +14: RT_NAME
name = cls._parse_cstr(access, off+14, base_addr)
name = cls._parse_cstr(access, off + 14, base_addr)
# +18: RT_IDSTRING
id_string = cls._parse_cstr(access, off+18, base_addr)
id_string = cls._parse_cstr(access, off + 18, base_addr)
# +22: RT_INIT
init_ptr = access.read_long(off+22)
init_ptr = access.read_long(off + 22)
init_off = init_ptr - base_addr
return Resident(off, flags, version, rtype, pri, name, id_string,
init_off, end_skip_off)
return Resident(
off, flags, version, rtype, pri, name, id_string, init_off, end_skip_off
)
@classmethod
def _parse_cstr(cls, access, off, base_addr):
@ -106,11 +119,10 @@ class Resident:
break
res.append(c)
str_off += 1
return bytes(res).decode('latin-1')
return bytes(res).decode("latin-1")
class ResidentScan:
def __init__(self, rom_data, base_addr=0):
self.access = RomAccess(rom_data)
self.base_addr = base_addr
@ -136,10 +148,10 @@ class ResidentScan:
return None
base_map = {}
for off in offs:
tag_ptr = self.access.read_long(off+2)
tag_off = tag_ptr & 0xffff
tag_ptr = self.access.read_long(off + 2)
tag_off = tag_ptr & 0xFFFF
if tag_off == off:
base_addr = tag_ptr & ~0xffff
base_addr = tag_ptr & ~0xFFFF
if base_addr not in base_map:
base_map[base_addr] = 1
else:
@ -158,7 +170,7 @@ class ResidentScan:
res = []
for off in offs:
# check tag ptr
tag_ptr = self.access.read_long(off+2)
tag_ptr = self.access.read_long(off + 2)
if tag_ptr == self.base_addr + off:
res.append(off)
return res
@ -167,7 +179,7 @@ class ResidentScan:
mw = self.access.read_word(off)
if mw != RTC_MATCHWORD:
return False
tag_ptr = self.access.read_long(off+2)
tag_ptr = self.access.read_long(off + 2)
return tag_ptr == self.base_addr + off
def get_resident(self, off):

View File

@ -52,8 +52,8 @@ class RomEntryRomHdr:
return self.skip + 8
def get_data(self, addr):
data = chr(0xff) * self.skip
hdr = struct.pack(">II", 0x11114ef9, self.jmp_addr)
data = chr(0xFF) * self.skip
hdr = struct.pack(">II", 0x11114EF9, self.jmp_addr)
return data + hdr
@ -70,7 +70,7 @@ class RomEntryPadding:
class RomBuilder:
def __init__(self, size=512, base_addr=0xf80000, fill_byte=0xff):
def __init__(self, size=512, base_addr=0xF80000, fill_byte=0xFF):
self.size = size # in KiB
self.base_addr = base_addr
self.fill_byte = fill_byte
@ -114,7 +114,7 @@ class RomBuilder:
files = []
for mod in names:
# is an index file?
if mod.endswith('.txt'):
if mod.endswith(".txt"):
base_path = os.path.dirname(mod)
with open(mod, "r") as fh:
for line in fh:
@ -145,7 +145,7 @@ class RomBuilder:
off = self.rom_off
for mod in self.modules:
n = mod.get_size()
rom_data[off: off+n] = mod.get_data(addr)
rom_data[off : off + n] = mod.get_data(addr)
off += n
addr += n
# fill empty space
@ -157,8 +157,7 @@ class RomBuilder:
class KickRomBuilder(RomBuilder):
def __init__(self, size, kickety_split=True, rom_ver=None,
**kw_args):
def __init__(self, size, kickety_split=True, rom_ver=None, **kw_args):
RomBuilder.__init__(self, size, **kw_args)
self.rom_ver = rom_ver
# do we need a rom header at 256k border? (the original ROMs do this)
@ -180,8 +179,7 @@ class KickRomBuilder(RomBuilder):
def cross_kickety_split(self, num_bytes):
if self.kickety_split:
new_off = self.data_off + num_bytes
return self.data_off < self.split_offset and \
new_off > self.split_offset
return self.data_off < self.split_offset and new_off > self.split_offset
else:
return False
@ -208,8 +206,9 @@ class KickRomBuilder(RomBuilder):
class ExtRomBuilder(RomBuilder):
def __init__(self, size, rom_ver=None, add_footer=False,
kick_addr=0xf80000, **kw_args):
def __init__(
self, size, rom_ver=None, add_footer=False, kick_addr=0xF80000, **kw_args
):
RomBuilder.__init__(self, size, **kw_args)
# kick addr for jump
self.kick_addr = kick_addr
@ -230,7 +229,7 @@ class ExtRomBuilder(RomBuilder):
rom_data = RomBuilder.build_rom(self)
# write a header
kh = KickRomAccess(rom_data)
kh.write_ext_header(self.kick_addr+2, self.rom_ver)
kh.write_ext_header(self.kick_addr + 2, self.rom_ver)
# write footer
if self.add_footer:
kh.write_ext_footer()

View File

@ -21,30 +21,35 @@ class RomPatch:
class OneMegRomPatch(RomPatch):
def __init__(self):
RomPatch.__init__(
self, "1mb_rom", "Patch Kickstart to support ext ROM with 512 KiB")
self, "1mb_rom", "Patch Kickstart to support ext ROM with 512 KiB"
)
def apply_patch(self, access, args=None):
off = 8
while off < 0x400:
v = access.read_long(off)
if v == 0xf80000:
v4 = access.read_long(off+4)
v8 = access.read_long(off+8)
vc = access.read_long(off+0xc)
v10 = access.read_long(off+0x10)
if v4 == 0x1000000 and v8 == 0xf00000 and \
vc == 0xf80000 and v10 == 0xffffffff:
vp8 = access.read_long(off-8)
if vp8 == 0xf80000:
access.write_long(off-4, 0x1000000)
access.write_long(off, 0xe00000)
access.write_long(off+4, 0xe80000)
if v == 0xF80000:
v4 = access.read_long(off + 4)
v8 = access.read_long(off + 8)
vc = access.read_long(off + 0xC)
v10 = access.read_long(off + 0x10)
if (
v4 == 0x1000000
and v8 == 0xF00000
and vc == 0xF80000
and v10 == 0xFFFFFFFF
):
vp8 = access.read_long(off - 8)
if vp8 == 0xF80000:
access.write_long(off - 4, 0x1000000)
access.write_long(off, 0xE00000)
access.write_long(off + 4, 0xE80000)
logging.info("@%08x Variant A", off)
return True
else:
access.write_long(off, 0xf00000)
access.write_long(off+8, 0xe00000)
access.write_long(off+0xc, 0xe80000)
access.write_long(off, 0xF00000)
access.write_long(off + 8, 0xE00000)
access.write_long(off + 0xC, 0xE80000)
logging.info("@%08x Variant B", off)
return True
off += 2
@ -54,9 +59,12 @@ class OneMegRomPatch(RomPatch):
class BootConRomPatch(RomPatch):
def __init__(self):
RomPatch.__init__(self, "boot_con", "Set the boot console",
{"name": "name of the new console,"
" e.g. 'CON:MyConsole'"})
RomPatch.__init__(
self,
"boot_con",
"Set the boot console",
{"name": "name of the new console," " e.g. 'CON:MyConsole'"},
)
def apply_patch(self, access, args):
# search CON:
@ -66,7 +74,7 @@ class BootConRomPatch(RomPatch):
logging.error("console not found!")
return False
# find terminator
pos = data.find(b'\0', off)
pos = data.find(b"\0", off)
if pos == -1:
logging.error("no console end found!")
return False
@ -75,17 +83,16 @@ class BootConRomPatch(RomPatch):
con_old = data[off:pos]
logging.info("@%08x: +%08x old='%s'" % (off, con_old_len, con_old))
# check new string
if 'name' in args:
con_new = args['name'].encode('latin-1')
if "name" in args:
con_new = args["name"].encode("latin-1")
con_new_len = len(con_new)
if con_new_len > con_old_len:
logging.error("new console name is too long (>%d)!",
con_old_len)
logging.error("new console name is too long (>%d)!", con_old_len)
return False
# pad and write to rom
pad_len = con_old_len - con_new_len + 1
con_new += b'\0' * pad_len
data[off:pos+1] = con_new
con_new += b"\0" * pad_len
data[off : pos + 1] = con_new
logging.info("new='%s'" % (con_new_len))
return True
@ -93,10 +100,7 @@ class BootConRomPatch(RomPatch):
class RomPatcher:
# list of all available patch classes
patches = [
OneMegRomPatch(),
BootConRomPatch()
]
patches = [OneMegRomPatch(), BootConRomPatch()]
def __init__(self, rom):
self.access = RomAccess(rom)

View File

@ -7,10 +7,10 @@ import amitools.util.DataDir as DataDir
from amitools.binfmt.BinImage import (
BinImage,
Segment,
Relocations,
Relocations,
Reloc,
BIN_IMAGE_TYPE_HUNK,
SEGMENT_TYPE_CODE
SEGMENT_TYPE_CODE,
)
@ -50,20 +50,28 @@ class RomSplitter:
def print_rom(self, out, show_entries=False):
rom = self.remus_rom
out("rom @%06x +%06x sum=%08x@%08x %s" %
(rom.base_addr, rom.size, rom.chk_sum, rom.sum_off, rom.name))
out(
"rom @%06x +%06x sum=%08x@%08x %s"
% (rom.base_addr, rom.size, rom.chk_sum, rom.sum_off, rom.name)
)
if show_entries:
for module in rom.modules:
self.print_entry(out, module)
def print_entry(self, out, entry):
out(" @%06x +%06x =%06x (r:%5d,f:%2d,p:%2d) sum=%08x %s" %
(entry.offset, entry.size, entry.offset+entry.size,
len(entry.extra.relocs),
len(entry.extra.fixes),
len(entry.extra.patches),
entry.extra.chk_sum,
entry.name))
out(
" @%06x +%06x =%06x (r:%5d,f:%2d,p:%2d) sum=%08x %s"
% (
entry.offset,
entry.size,
entry.offset + entry.size,
len(entry.extra.relocs),
len(entry.extra.fixes),
len(entry.extra.patches),
entry.extra.chk_sum,
entry.name,
)
)
def print_entries(self, out, entries):
for e in entries:
@ -81,7 +89,7 @@ class RomSplitter:
def extract_entry(self, entry, fixes=True, patches=False):
"""return data, relocs"""
data = self.rom_data[entry.offset:entry.offset+entry.size]
data = self.rom_data[entry.offset : entry.offset + entry.size]
extra = entry.extra
relocs = extra.relocs
entry_addr = self.remus_rom.base_addr + entry.offset
@ -102,8 +110,9 @@ class RomSplitter:
for off in relocs:
addr = struct.unpack_from(">I", data, off)[0]
if addr < base_addr:
raise ValueError("Invalid relocatable address: %08x base=%08x"
% (addr, base_addr))
raise ValueError(
"Invalid relocatable address: %08x base=%08x" % (addr, base_addr)
)
addr -= base_addr
struct.pack_into(">I", data, off, addr)
return data

View File

@ -5,74 +5,84 @@ import io
from amitools.fs.blkdev.BlkDevFactory import BlkDevFactory
from amitools.fs.ADFSVolume import ADFSVolume
class ADFSScanner:
def __init__(self):
self.factory = BlkDevFactory()
def __init__(self):
self.factory = BlkDevFactory()
def can_handle(self, scan_file):
base_name = scan_file.get_basename().lower()
for ext in self.factory.valid_extensions:
if base_name.endswith(ext):
return True
return False
def can_handle(self, scan_file):
base_name = scan_file.get_basename().lower()
for ext in self.factory.valid_extensions:
if base_name.endswith(ext):
return True
return False
def handle(self, scan_file, scanner):
if scan_file.is_seekable():
sf = scan_file
else:
sf = scanner.promote_scan_file(scan_file, seekable=True)
# create blkdev
blkdev = self.factory.open(sf.get_local_path(), fobj=sf.get_fobj())
# create volume
volume = ADFSVolume(blkdev)
volume.open()
# scan volume
node = volume.get_root_dir()
ok = self._scan_node(sf, scanner, node)
# done
volume.close()
blkdev.close()
return ok
def handle(self, scan_file, scanner):
if scan_file.is_seekable():
sf = scan_file
else:
sf = scanner.promote_scan_file(scan_file, seekable=True)
# create blkdev
blkdev = self.factory.open(sf.get_local_path(), fobj=sf.get_fobj())
# create volume
volume = ADFSVolume(blkdev)
volume.open()
# scan volume
node = volume.get_root_dir()
ok = self._scan_node(sf, scanner, node)
# done
volume.close()
blkdev.close()
return ok
def _scan_node(self, scan_file, scanner, node):
if node.is_dir():
# recurse into dir
entries = node.get_entries()
for e in entries:
ok = self._scan_node(scan_file, scanner, e)
if not ok:
return False
return True
elif node.is_file():
# read file in ram fobj
data = node.get_file_data()
node.flush()
size = len(data)
path = node.get_node_path_name().get_unicode()
fobj = io.StringIO(data)
sf = scan_file.create_sub_path(path, fobj, size, True, False)
ok = scanner.scan_obj(sf)
sf.close()
return True
def _scan_node(self, scan_file, scanner, node):
if node.is_dir():
# recurse into dir
entries = node.get_entries()
for e in entries:
ok = self._scan_node(scan_file, scanner, e)
if not ok:
return False
return True
elif node.is_file():
# read file in ram fobj
data = node.get_file_data()
node.flush()
size = len(data)
path = node.get_node_path_name().get_unicode()
fobj = io.StringIO(data)
sf = scan_file.create_sub_path(path, fobj, size, True, False)
ok = scanner.scan_obj(sf)
sf.close()
return True
# mini test
if __name__ == '__main__':
import sys
from .FileScanner import FileScanner
if __name__ == "__main__":
import sys
from .FileScanner import FileScanner
ifs = ['*.txt']
def handler(scan_file):
print(scan_file)
return True
def skip_handler(scan_file):
print(("SKIP:", scan_file))
return True
def error_handler(scan_file, error):
print(("FAILED:", scan_file, error))
raise error
scanners = [ADFSScanner()]
fs = FileScanner(handler, ignore_filters=ifs, error_handler=error_handler,
scanners=scanners, skip_handler=skip_handler)
for a in sys.argv[1:]:
fs.scan(a)
ifs = ["*.txt"]
def handler(scan_file):
print(scan_file)
return True
def skip_handler(scan_file):
print(("SKIP:", scan_file))
return True
def error_handler(scan_file, error):
print(("FAILED:", scan_file, error))
raise error
scanners = [ADFSScanner()]
fs = FileScanner(
handler,
ignore_filters=ifs,
error_handler=error_handler,
scanners=scanners,
skip_handler=skip_handler,
)
for a in sys.argv[1:]:
fs.scan(a)

View File

@ -1,116 +1,123 @@
import zipfile
import io
# optional lhafile
try:
import lhafile
import lhafile
except ImportError:
lhafile = None
lhafile = None
class ArchiveScanner:
"""Scan archives and visit all files"""
"""Scan archives and visit all files"""
exts = [] # valid file extensions
exts = [] # valid file extensions
def _create_archive_obj(self, fobj, scanner):
pass
def _create_archive_obj(self, fobj, scanner):
pass
def _create_entry_scan_file(self, arc, info, sf):
pass
def _create_entry_scan_file(self, arc, info, sf):
pass
def can_handle(self, scan_file):
base_name = scan_file.get_basename().lower()
for ext in self.exts:
if base_name.endswith(ext):
def can_handle(self, scan_file):
base_name = scan_file.get_basename().lower()
for ext in self.exts:
if base_name.endswith(ext):
return True
return False
def handle(self, scan_file, scanner):
"""scan a given archive file"""
# ensure a seekable fobj
if not scan_file.is_seekable():
sf = scanner.promote_scan_file(scan_file, seekable=True)
else:
sf = scan_file
# create archive obj
arc = self._create_archive_obj(sf, scanner)
if arc is None:
return True # simply ignore
# get infos
infos = arc.infolist()
for info in infos:
if info.file_size > 0:
sf = self._create_entry_scan_file(arc, info, scan_file)
ok = scanner.scan_obj(sf)
sf.close()
if not ok:
return False
return True
return False
def handle(self, scan_file, scanner):
"""scan a given archive file"""
# ensure a seekable fobj
if not scan_file.is_seekable():
sf = scanner.promote_scan_file(scan_file, seekable=True)
else:
sf = scan_file
# create archive obj
arc = self._create_archive_obj(sf, scanner)
if arc is None:
return True # simply ignore
# get infos
infos = arc.infolist()
for info in infos:
if info.file_size > 0:
sf = self._create_entry_scan_file(arc, info, scan_file)
ok = scanner.scan_obj(sf)
sf.close()
if not ok:
return False
return True
class ZipScanner(ArchiveScanner):
"""Scan .zip Archives"""
"""Scan .zip Archives"""
exts = [".zip"]
exts = [".zip"]
def _create_archive_obj(self, sf, scanner):
try:
fobj = sf.get_fobj()
return zipfile.ZipFile(fobj, "r")
except Exception as e:
scanner.warning(sf, "error reading archive: %s" % e)
def _create_archive_obj(self, sf, scanner):
try:
fobj = sf.get_fobj()
return zipfile.ZipFile(fobj, "r")
except Exception as e:
scanner.warning(sf, "error reading archive: %s" % e)
def _create_entry_scan_file(self, arc, info, scan_file):
name = info.filename
fobj = arc.open(info)
size = info.file_size
# its a non-seekable file
return scan_file.create_sub_path(name, fobj, size, False, False)
def _create_entry_scan_file(self, arc, info, scan_file):
name = info.filename
fobj = arc.open(info)
size = info.file_size
# its a non-seekable file
return scan_file.create_sub_path(name, fobj, size, False, False)
class LhaScanner(ArchiveScanner):
"""Scan .lha/.lzh Archives"""
"""Scan .lha/.lzh Archives"""
exts = [".lha", ".lzh"]
exts = [".lha", ".lzh"]
def _create_archive_obj(self, sf, scanner):
if lhafile:
try:
fobj = sf.get_fobj()
return lhafile.LhaFile(fobj, "r")
except Exception as e:
scanner.warning(sf, "error reading archive: %s" % e)
else:
scanner.warning(sf, "can't handle archive. missing 'lhafile' module.")
def _create_archive_obj(self, sf, scanner):
if lhafile:
try:
fobj = sf.get_fobj()
return lhafile.LhaFile(fobj, "r")
except Exception as e:
scanner.warning(sf, "error reading archive: %s" % e)
else:
scanner.warning(sf, "can't handle archive. missing 'lhafile' module.")
def _create_entry_scan_file(self, arc, info, scan_file):
data = arc.read(info.filename)
fobj = io.StringIO(data)
size = info.file_size
name = info.filename
return scan_file.create_sub_path(name, fobj, size, True, False)
def _create_entry_scan_file(self, arc, info, scan_file):
data = arc.read(info.filename)
fobj = io.StringIO(data)
size = info.file_size
name = info.filename
return scan_file.create_sub_path(name, fobj, size, True, False)
# mini test
if __name__ == '__main__':
import sys
from .FileScanner import FileScanner
if __name__ == "__main__":
import sys
from .FileScanner import FileScanner
ifs = ['*.txt']
def handler(scan_file):
print(scan_file)
return True
def skip_handler(scan_file):
print("SKIP:", scan_file)
return True
def error_handler(scan_file, error):
print("FAILED:", scan_file, error)
raise error
scanners = [LhaScanner(), ZipScanner()]
fs = FileScanner(handler, ignore_filters=ifs, error_handler=error_handler,
scanners=scanners, skip_handler=skip_handler)
for a in sys.argv[1:]:
fs.scan(a)
ifs = ["*.txt"]
def handler(scan_file):
print(scan_file)
return True
def skip_handler(scan_file):
print("SKIP:", scan_file)
return True
def error_handler(scan_file, error):
print("FAILED:", scan_file, error)
raise error
scanners = [LhaScanner(), ZipScanner()]
fs = FileScanner(
handler,
ignore_filters=ifs,
error_handler=error_handler,
scanners=scanners,
skip_handler=skip_handler,
)
for a in sys.argv[1:]:
fs.scan(a)

View File

@ -1,132 +1,142 @@
# scan a set of file
import os
import fnmatch
import tempfile
from .ScanFile import ScanFile
class FileScanner:
def __init__(self, handler=None, ignore_filters=None, scanners=None,
error_handler=None, ram_bytes=10 * 1024 * 1024,
skip_handler=None, warning_handler=None):
"""the handler will be called with all the scanned files.
class FileScanner:
def __init__(
self,
handler=None,
ignore_filters=None,
scanners=None,
error_handler=None,
ram_bytes=10 * 1024 * 1024,
skip_handler=None,
warning_handler=None,
):
"""the handler will be called with all the scanned files.
the optional ignore_filters contains a list of glob pattern to
ignore file names"""
self.handler = handler
self.error_handler = error_handler
self.warning_handler = warning_handler
self.skip_handler = skip_handler
self.ignore_filters = ignore_filters
self.scanners = scanners
self.ram_bytes = ram_bytes
self.handler = handler
self.error_handler = error_handler
self.warning_handler = warning_handler
self.skip_handler = skip_handler
self.ignore_filters = ignore_filters
self.scanners = scanners
self.ram_bytes = ram_bytes
def scan(self, path):
"""start scanning a path. either a file or directory"""
if os.path.isdir(path):
return self._scan_dir(path)
elif os.path.isfile(path):
return self._scan_file(path)
else:
return True
def scan(self, path):
"""start scanning a path. either a file or directory"""
if os.path.isdir(path):
return self._scan_dir(path)
elif os.path.isfile(path):
return self._scan_file(path)
else:
return True
def scan_obj(self, scan_file, check_ignore=True):
"""pass a ScanFile to check"""
if check_ignore and self._is_ignored(scan_file.get_local_path()):
return False
# does a scanner match?
sf = scan_file
sc = self.scanners
if sc is not None:
for s in sc:
if s.can_handle(sf):
ok = s.handle(sf, self)
sf.close()
return ok
# no match call user's handler
ok = self._call_handler(sf)
sf.close()
return ok
def scan_obj(self, scan_file, check_ignore=True):
"""pass a ScanFile to check"""
if check_ignore and self._is_ignored(scan_file.get_local_path()):
return False
# does a scanner match?
sf = scan_file
sc = self.scanners
if sc is not None:
for s in sc:
if s.can_handle(sf):
ok = s.handle(sf, self)
sf.close()
return ok
# no match call user's handler
ok = self._call_handler(sf)
sf.close()
return ok
def _scan_dir(self, path):
if self._is_ignored(path):
return True
for root, dirs, files in os.walk(path):
for name in files:
if not self._scan_file(os.path.join(root,name)):
return False
for name in dirs:
if not self._scan_dir(os.path.join(root,name)):
return False
return True
def _scan_file(self, path):
if self._is_ignored(path):
return True
# build a scan file
try:
size = os.path.getsize(path)
with open(path, "rb") as fobj:
sf = ScanFile(path, fobj, size, True, True)
return self.scan_obj(sf, False)
except IOError as e:
eh = self.error_handler
if eh is not None:
sf = ScanFile(path, None, 0)
return eh(sf, e)
else:
# ignore error
def _scan_dir(self, path):
if self._is_ignored(path):
return True
for root, dirs, files in os.walk(path):
for name in files:
if not self._scan_file(os.path.join(root, name)):
return False
for name in dirs:
if not self._scan_dir(os.path.join(root, name)):
return False
return True
def _is_ignored(self, path):
if self.ignore_filters is not None:
base = os.path.basename(path)
for f in self.ignore_filters:
if fnmatch.fnmatch(base, f):
return True
return False
def _scan_file(self, path):
if self._is_ignored(path):
return True
# build a scan file
try:
size = os.path.getsize(path)
with open(path, "rb") as fobj:
sf = ScanFile(path, fobj, size, True, True)
return self.scan_obj(sf, False)
except IOError as e:
eh = self.error_handler
if eh is not None:
sf = ScanFile(path, None, 0)
return eh(sf, e)
else:
# ignore error
return True
def _call_handler(self, scan_file):
if self.handler is not None:
return self.handler(scan_file)
else:
return True
def _is_ignored(self, path):
if self.ignore_filters is not None:
base = os.path.basename(path)
for f in self.ignore_filters:
if fnmatch.fnmatch(base, f):
return True
return False
def _call_skip_handler(self, scan_file):
if self.skip_handler is not None:
return self.skip_handler(scan_file)
else:
return True
def _call_handler(self, scan_file):
if self.handler is not None:
return self.handler(scan_file)
else:
return True
def promote_scan_file(self, scan_file, seekable=False, file_based=False):
if not seekable and not file_base:
return scan_file
fb = file_based
if not fb and seekable and scan_file.size > self.ram_bytes:
fb = True
sf = scan_file.create_clone(seekable, fb)
scan_file.close()
return sf
def _call_skip_handler(self, scan_file):
if self.skip_handler is not None:
return self.skip_handler(scan_file)
else:
return True
def warn(self, scan_file, msg):
wh = self.warning_handler
if wh is not None:
wh(scan_file, msg)
def promote_scan_file(self, scan_file, seekable=False, file_based=False):
if not seekable and not file_base:
return scan_file
fb = file_based
if not fb and seekable and scan_file.size > self.ram_bytes:
fb = True
sf = scan_file.create_clone(seekable, fb)
scan_file.close()
return sf
def warn(self, scan_file, msg):
wh = self.warning_handler
if wh is not None:
wh(scan_file, msg)
# mini test
if __name__ == '__main__':
import sys
ifs = ['*.txt']
def handler(scan_file):
print(scan_file)
return True
def error_handler(scan_file, error):
print("FAILED:", scan_file, error)
raise error
fs = FileScanner(handler, ignore_filters=ifs, error_handler=error_handler)
for a in sys.argv[1:]:
fs.scan(a)
if __name__ == "__main__":
import sys
ifs = ["*.txt"]
def handler(scan_file):
print(scan_file)
return True
def error_handler(scan_file, error):
print("FAILED:", scan_file, error)
raise error
fs = FileScanner(handler, ignore_filters=ifs, error_handler=error_handler)
for a in sys.argv[1:]:
fs.scan(a)

View File

@ -1,76 +1,79 @@
import os
import io
class ScanFile:
"""a file that is currently scanned"""
"""a file that is currently scanned"""
def __init__(self, path, fobj, size, seekable=True, file_based=True):
"""create a scan file from a host file object"""
if type(path) is list:
self.paths = path
else:
self.paths = [path]
self.fobj = fobj
self.size = size
self.seekable = seekable
self.file_based = file_based
def __init__(self, path, fobj, size, seekable=True, file_based=True):
"""create a scan file from a host file object"""
if type(path) is list:
self.paths = path
else:
self.paths = [path]
self.fobj = fobj
self.size = size
self.seekable = seekable
self.file_based = file_based
def __str__(self):
return "[%s:%d, seekable=%s, file_based=%s, fobj=%s]" % \
(self.get_path(), self.size, self.seekable, self.file_based,
self.fobj.__class__.__name__)
def __str__(self):
return "[%s:%d, seekable=%s, file_based=%s, fobj=%s]" % (
self.get_path(),
self.size,
self.seekable,
self.file_based,
self.fobj.__class__.__name__,
)
def __repr__(self):
return self.__str__()
def __repr__(self):
return self.__str__()
def is_seekable(self):
return self.seekable
def is_seekable(self):
return self.seekable
def is_file_based(self):
return self.file_based
def is_file_based(self):
return self.file_based
def get_path(self):
return ";".join(self.paths)
def get_path(self):
return ";".join(self.paths)
def get_local_path(self):
return self.paths[-1]
def get_local_path(self):
return self.paths[-1]
def get_basename(self):
return os.path.basename(self.paths[-1])
def get_basename(self):
return os.path.basename(self.paths[-1])
def get_fobj(self):
return self.fobj
def get_fobj(self):
return self.fobj
def is_host_path(self):
return len(self.paths) == 1
def is_host_path(self):
return len(self.paths) == 1
def close(self):
self.fobj.close()
def close(self):
self.fobj.close()
def create_sub_path(self, sub_path, fobj, size, seekable, file_based):
paths = self.paths[:]
paths.append(sub_path)
return ScanFile(paths, fobj, size, seekable, file_based)
def create_sub_path(self, sub_path, fobj, size, seekable, file_based):
paths = self.paths[:]
paths.append(sub_path)
return ScanFile(paths, fobj, size, seekable, file_based)
def create_clone(self, seekable, file_based):
src_fobj = self.fobj
# create a temp file
if file_based:
fobj = tempfile.TemporaryFile()
# copy original file
blk_size = 4096
while True:
buf = src_fobj.read(blk_size)
if len(buf) == 0:
break
fobj.write(buf)
# create a string buffer
else:
data = src_fobj.read()
fobj = io.StringIO(data)
# close old scan file
src_fobj.close()
# create promoted file
return ScanFile(self.paths, fobj, self.size, seekable, file_based)
def create_clone(self, seekable, file_based):
src_fobj = self.fobj
# create a temp file
if file_based:
fobj = tempfile.TemporaryFile()
# copy original file
blk_size = 4096
while True:
buf = src_fobj.read(blk_size)
if len(buf) == 0:
break
fobj.write(buf)
# create a string buffer
else:
data = src_fobj.read()
fobj = io.StringIO(data)
# close old scan file
src_fobj.close()
# create promoted file
return ScanFile(self.paths, fobj, self.size, seekable, file_based)

View File

@ -10,78 +10,114 @@ import amitools.fd.FDFormat as FDFormat
# ----- dump -----
def dump(fname, fd, add_private):
print(fname)
print((" base: %s" % fd.get_base_name()))
funcs = fd.get_funcs()
num = 1
for f in funcs:
if add_private or not f.is_private():
bias = f.get_bias()
print((" #%04d %5d 0x%04x %30s %s" % (num,bias,bias,f.get_name(),f.get_arg_str())))
num += 1
print(fname)
print((" base: %s" % fd.get_base_name()))
funcs = fd.get_funcs()
num = 1
for f in funcs:
if add_private or not f.is_private():
bias = f.get_bias()
print(
(
" #%04d %5d 0x%04x %30s %s"
% (num, bias, bias, f.get_name(), f.get_arg_str())
)
)
num += 1
# ----- generate -----
def generate_python_code(fd, add_private):
funcs = fd.get_funcs()
for f in funcs:
if add_private or not f.is_private():
args = f.get_args()
if len(args)>0:
args = tuple(args)
else:
args = None
print(" (%d, '%s', %s)," % (f.get_bias(),f.get_name(),args))
funcs = fd.get_funcs()
for f in funcs:
if add_private or not f.is_private():
args = f.get_args()
if len(args) > 0:
args = tuple(args)
else:
args = None
print(" (%d, '%s', %s)," % (f.get_bias(), f.get_name(), args))
def generate_sasc_code(fname, fd, add_private, prefix=""):
funcs = fd.get_funcs()
fo = open(fname, "w")
for f in funcs:
if add_private or not f.is_private():
line = "__asm __saveds int %s%s(" % (prefix, f.get_name())
args = f.get_args()
if args != None:
for a in args:
line += "register __%s int %s" % (a[1],a[0])
if a != args[-1]:
line += ", "
else:
line += " void "
line += " )"
fo.write(line)
fo.write("{\n return 0;\n}\n\n")
fo.close()
funcs = fd.get_funcs()
fo = open(fname, "w")
for f in funcs:
if add_private or not f.is_private():
line = "__asm __saveds int %s%s(" % (prefix, f.get_name())
args = f.get_args()
if args != None:
for a in args:
line += "register __%s int %s" % (a[1], a[0])
if a != args[-1]:
line += ", "
else:
line += " void "
line += " )"
fo.write(line)
fo.write("{\n return 0;\n}\n\n")
fo.close()
# ----- main -----
def main():
# parse args
parser = argparse.ArgumentParser()
parser.add_argument('files', nargs='+')
parser.add_argument('-P', '--add-private', action='store_true', default=False, help="add private functions")
parser.add_argument('-p', '--gen-python', action='store_true', default=False, help="generate python code for vamos")
parser.add_argument('-f', '--gen-fd', action='store', default=None, help="generate a new fd file")
parser.add_argument('-c', '--gen-sasc', action='store', default=None, help="generate SAS C code file")
parser.add_argument('-E', '--prefix', action='store', default='', help="add prefix to functions in C")
args = parser.parse_args()
# parse args
parser = argparse.ArgumentParser()
parser.add_argument("files", nargs="+")
parser.add_argument(
"-P",
"--add-private",
action="store_true",
default=False,
help="add private functions",
)
parser.add_argument(
"-p",
"--gen-python",
action="store_true",
default=False,
help="generate python code for vamos",
)
parser.add_argument(
"-f", "--gen-fd", action="store", default=None, help="generate a new fd file"
)
parser.add_argument(
"-c",
"--gen-sasc",
action="store",
default=None,
help="generate SAS C code file",
)
parser.add_argument(
"-E",
"--prefix",
action="store",
default="",
help="add prefix to functions in C",
)
args = parser.parse_args()
# main loop
files = args.files
for fname in files:
fd = FDFormat.read_fd(fname)
code_gen = False
if args.gen_python:
generate_python_code(fd, args.add_private)
code_gen = True
if args.gen_sasc:
generate_sasc_code(args.gen_sasc, fd, args.add_private, args.prefix)
code_gen = True
if args.gen_fd != None:
FDFormat.write_fd(args.gen_fd, fd, args.add_private)
code_gen = True
if not code_gen:
dump(fname, fd, args.add_private)
# main loop
files = args.files
for fname in files:
fd = FDFormat.read_fd(fname)
code_gen = False
if args.gen_python:
generate_python_code(fd, args.add_private)
code_gen = True
if args.gen_sasc:
generate_sasc_code(args.gen_sasc, fd, args.add_private, args.prefix)
code_gen = True
if args.gen_fd != None:
FDFormat.write_fd(args.gen_fd, fd, args.add_private)
code_gen = True
if not code_gen:
dump(fname, fd, args.add_private)
if __name__ == '__main__':
main()
if __name__ == "__main__":
main()

View File

@ -1,8 +1,6 @@
#!/usr/bin/env python3
import sys
import os.path
import amitools.util.KeyValue as KeyValue
@ -10,80 +8,86 @@ import amitools.util.ByteSize as ByteSize
from amitools.fs.blkdev.DiskGeometry import DiskGeometry
from amitools.fs.blkdev.BlkDevFactory import BlkDevFactory
def main():
a = sys.argv
n = len(a)
if n < 3:
print("Usage: (detect <size|file> [options] | setup <options> | open <file> [options] | create <file> <options>)")
print("""Options:
a = sys.argv
n = len(a)
if n < 3:
print(
"Usage: (detect <size|file> [options] | setup <options> | open <file> [options] | create <file> <options>)"
)
print(
"""Options:
size=<size>
chs=<n>,<n>,<n>
c=<n> h=<n> s=<n>
algo=1|2
""")
return 1
else:
cmd = a[1]
# detect disk geometry from given image file
if cmd == 'detect':
if os.path.exists(a[2]):
# its a file
size = os.path.getsize(a[2])
else:
# parse size string
size = ByteSize.parse_byte_size_str(a[2])
if size == None:
print("Invalid size!")
else:
d = DiskGeometry()
opts = None
if n > 3:
opts = KeyValue.parse_key_value_strings(a[3:])
print("size: ",size)
print("opts: ",opts)
size = d.detect(size, opts)
if size != None:
print("geo: ", d)
else:
print("FAILED")
# setup a new disk geometry from options
elif cmd == 'setup' :
d = DiskGeometry()
opts = KeyValue.parse_key_value_strings(a[2:])
print("opts: ", opts)
size = d.setup(opts)
if size != None:
print("setup: ", size, ByteSize.to_byte_size_str(size))
print("geo: ", d)
else:
print("FAILED")
# open a blkdev and detect geometry
elif cmd == 'open':
opts = None
if n > 3:
opts = KeyValue.parse_key_value_strings(a[3:])
print("opts: ", opts)
f = BlkDevFactory()
blkdev = f.open(a[2], options=opts)
if blkdev != None:
print("blkdev: ", blkdev.__class__.__name__)
print("geo: ", blkdev.get_geometry())
blkdev.close()
else:
print("FAILED")
# create a new blkdev with setup geometry
elif cmd == 'create':
opts = KeyValue.parse_key_value_strings(a[3:])
print("opts: ", opts)
f = BlkDevFactory()
blkdev = f.create(a[2], options=opts)
if blkdev != None:
print("blkdev: ",blkdev.__class__.__name__)
print("geo: ",blkdev.get_geometry())
blkdev.close()
else:
print("FAILED")
return 0
"""
)
return 1
else:
cmd = a[1]
# detect disk geometry from given image file
if cmd == "detect":
if os.path.exists(a[2]):
# its a file
size = os.path.getsize(a[2])
else:
# parse size string
size = ByteSize.parse_byte_size_str(a[2])
if size == None:
print("Invalid size!")
else:
d = DiskGeometry()
opts = None
if n > 3:
opts = KeyValue.parse_key_value_strings(a[3:])
print("size: ", size)
print("opts: ", opts)
size = d.detect(size, opts)
if size != None:
print("geo: ", d)
else:
print("FAILED")
# setup a new disk geometry from options
elif cmd == "setup":
d = DiskGeometry()
opts = KeyValue.parse_key_value_strings(a[2:])
print("opts: ", opts)
size = d.setup(opts)
if size != None:
print("setup: ", size, ByteSize.to_byte_size_str(size))
print("geo: ", d)
else:
print("FAILED")
# open a blkdev and detect geometry
elif cmd == "open":
opts = None
if n > 3:
opts = KeyValue.parse_key_value_strings(a[3:])
print("opts: ", opts)
f = BlkDevFactory()
blkdev = f.open(a[2], options=opts)
if blkdev != None:
print("blkdev: ", blkdev.__class__.__name__)
print("geo: ", blkdev.get_geometry())
blkdev.close()
else:
print("FAILED")
# create a new blkdev with setup geometry
elif cmd == "create":
opts = KeyValue.parse_key_value_strings(a[3:])
print("opts: ", opts)
f = BlkDevFactory()
blkdev = f.create(a[2], options=opts)
if blkdev != None:
print("blkdev: ", blkdev.__class__.__name__)
print("geo: ", blkdev.get_geometry())
blkdev.close()
else:
print("FAILED")
return 0
if __name__ == '__main__':
sys.exit(main())
if __name__ == "__main__":
sys.exit(main())

View File

@ -21,221 +21,300 @@ from amitools.binfmt.hunk import HunkRelocate
import amitools.binfmt.elf
from amitools.util.HexDump import *
def print_pretty(data):
pp = pprint.PrettyPrinter(indent=2)
pp.pprint(data)
pp = pprint.PrettyPrinter(indent=2)
pp.pprint(data)
# ----- commands -------------------------------------------------------------
class HunkCommand:
def __init__(self, args):
self.counts = {}
self.args = args
self.failed_files = []
def __init__(self, args):
self.counts = {}
self.args = args
self.failed_files = []
def handle_file(self, path, hunk_file, error_code, delta):
if error_code not in self.counts:
self.counts[error_code] = 0
self.counts[error_code] += 1
def handle_file(self, path, hunk_file, error_code, delta):
if error_code not in self.counts:
self.counts[error_code] = 0
self.counts[error_code] += 1
print("%s (%.4fs)" % (path, delta), end=' ')
print("%s (%.4fs)" % (path, delta), end=" ")
# abort if hunk parser failed!
if error_code != Hunk.RESULT_OK:
print(Hunk.result_names[error_code], hunk_file.error_string)
if self.args.dump:
print_pretty(hunk_file.hunks)
self.failed_files.append( (path, "READ: " + hunk_file.error_string) )
return not self.args.stop
# abort if hunk parser failed!
if error_code != Hunk.RESULT_OK:
print(Hunk.result_names[error_code], hunk_file.error_string)
if self.args.dump:
print_pretty(hunk_file.hunks)
self.failed_files.append((path, "READ: " + hunk_file.error_string))
return not self.args.stop
# if verbose then print block structure
if self.args.verbose:
print()
print(" hunks: ",hunk_file.get_hunk_summary())
if self.args.dump:
print_pretty(hunk_file.hunks)
print(" type: ", end=' ')
# if verbose then print block structure
if self.args.verbose:
print()
print(" hunks: ", hunk_file.get_hunk_summary())
if self.args.dump:
print_pretty(hunk_file.hunks)
print(" type: ", end=" ")
# build segments from hunks
ok = hunk_file.build_segments()
if not ok:
print("BUILD SEGMENTS FAILED: %s" % (hunk_file.error_string))
self.failed_files.append( (path, "BUILD: " + hunk_file.error_string) )
return not self.args.stop
# build segments from hunks
ok = hunk_file.build_segments()
if not ok:
print("BUILD SEGMENTS FAILED: %s" % (hunk_file.error_string))
self.failed_files.append((path, "BUILD: " + hunk_file.error_string))
return not self.args.stop
# print recognized file type name
print(Hunk.type_names[hunk_file.type], end=' ')
# print recognized file type name
print(Hunk.type_names[hunk_file.type], end=" ")
# if verbose then print hunk structure
if self.args.verbose:
print()
print(" segments: ",hunk_file.get_segment_summary())
print(" overlays: ",hunk_file.get_overlay_segment_summary())
print(" libs: ",hunk_file.get_libs_summary())
print(" units: ",hunk_file.get_units_summary())
if self.args.dump:
print_pretty(hunk_file.hunks)
else:
print()
# if verbose then print hunk structure
if self.args.verbose:
print()
print(" segments: ", hunk_file.get_segment_summary())
print(" overlays: ", hunk_file.get_overlay_segment_summary())
print(" libs: ", hunk_file.get_libs_summary())
print(" units: ", hunk_file.get_units_summary())
if self.args.dump:
print_pretty(hunk_file.hunks)
else:
print()
# do special processing on hunk file for command
ok = self.handle_hunk_file(path, hunk_file)
return ok
# do special processing on hunk file for command
ok = self.handle_hunk_file(path, hunk_file)
return ok
def result(self):
for code in list(self.counts.keys()):
print(Hunk.result_names[code],":",self.counts[code])
for failed in self.failed_files:
print(failed[0],failed[1])
return 0
def result(self):
for code in list(self.counts.keys()):
print(Hunk.result_names[code], ":", self.counts[code])
for failed in self.failed_files:
print(failed[0], failed[1])
return 0
def process_file(self, scan_file):
path = scan_file.get_path()
fobj = scan_file.get_fobj()
hunk_file = HunkReader.HunkReader()
start = time.perf_counter()
result = hunk_file.read_file_obj(path,fobj)
end = time.perf_counter()
delta = end - start
# ignore non hunk files
if result == Hunk.RESULT_NO_HUNK_FILE:
return True
return self.handle_file(path, hunk_file, result, delta)
def process_file(self, scan_file):
path = scan_file.get_path()
fobj = scan_file.get_fobj()
hunk_file = HunkReader.HunkReader()
start = time.perf_counter()
result = hunk_file.read_file_obj(path, fobj)
end = time.perf_counter()
delta = end - start
# ignore non hunk files
if result == Hunk.RESULT_NO_HUNK_FILE:
return True
return self.handle_file(path, hunk_file, result, delta)
def run(self):
# setup error handler
def error_handler(sf, e):
print("FAILED", sf.get_path(), e)
return not self.args.stop
def warning_handler(sf, msg):
print("WARNING", sf.get_path(), msg)
# setup scanners
scanners = [ADFSScanner(), ZipScanner(), LhaScanner()]
scanner = FileScanner(
self.process_file,
error_handler=error_handler,
warning_handler=warning_handler,
scanners=scanners,
)
for path in self.args.files:
ok = scanner.scan(path)
if not ok:
print("ABORTED")
return False
return True
def run(self):
# setup error handler
def error_handler(sf, e):
print("FAILED", sf.get_path(), e)
return not self.args.stop
def warning_handler(sf, msg):
print("WARNING", sf.get_path(), msg)
# setup scanners
scanners = [ADFSScanner(), ZipScanner(), LhaScanner()]
scanner = FileScanner(self.process_file,
error_handler=error_handler,
warning_handler=warning_handler,
scanners=scanners)
for path in self.args.files:
ok = scanner.scan(path)
if not ok:
print("ABORTED")
return False
return True
# ----- Validator -----
class Validator(HunkCommand):
def handle_hunk_file(self, path, hunk_file):
# do nothing extra
return True
class Validator(HunkCommand):
def handle_hunk_file(self, path, hunk_file):
# do nothing extra
return True
# ----- Info -----
class Info(HunkCommand):
def handle_hunk_file(self, path, hunk_file):
args = self.args
# verbose all hunk
hs = HunkShow.HunkShow(hunk_file, \
show_relocs=args.show_relocs, show_debug=args.show_debug, \
disassemble=args.disassemble, disassemble_start=args.disassemble_start, \
cpu=args.cpu, \
hexdump=args.hexdump, \
brief=args.brief)
hs.show_segments()
return True
class Info(HunkCommand):
def handle_hunk_file(self, path, hunk_file):
args = self.args
# verbose all hunk
hs = HunkShow.HunkShow(
hunk_file,
show_relocs=args.show_relocs,
show_debug=args.show_debug,
disassemble=args.disassemble,
disassemble_start=args.disassemble_start,
cpu=args.cpu,
hexdump=args.hexdump,
brief=args.brief,
)
hs.show_segments()
return True
# ----- Relocate -----
class Relocate(HunkCommand):
def handle_hunk_file(self, path, hunk_file):
if hunk_file.type != Hunk.TYPE_LOADSEG:
print("ERROR: can only relocate LoadSeg()able files:", path)
return False
def handle_hunk_file(self, path, hunk_file):
if hunk_file.type != Hunk.TYPE_LOADSEG:
print("ERROR: can only relocate LoadSeg()able files:",path);
return False
rel = HunkRelocate.HunkRelocate(hunk_file, verbose=self.args.verbose)
# get sizes of all segments
sizes = rel.get_sizes()
# calc begin addrs for all segments
base_addr = self.args.base_address
addrs = rel.get_seq_addrs(base_addr)
# relocate and return data of segments
datas = rel.relocate(addrs)
if datas == None:
print("ERROR: relocation failed:", path)
return False
else:
print("Relocate to base address", base_addr)
print("Bases: ", " ".join(["%06x" % (x) for x in addrs]))
print("Sizes: ", " ".join(["%06x" % (x) for x in sizes]))
print("Data: ", " ".join(["%06x" % (len(x)) for x in datas]))
print("Total: ", "%06x" % (rel.get_total_size()))
if args.hexdump:
for d in datas:
print_hex(d)
return True
rel = HunkRelocate.HunkRelocate(hunk_file,verbose=self.args.verbose)
# get sizes of all segments
sizes = rel.get_sizes()
# calc begin addrs for all segments
base_addr = self.args.base_address
addrs = rel.get_seq_addrs(base_addr)
# relocate and return data of segments
datas = rel.relocate(addrs)
if datas == None:
print("ERROR: relocation failed:",path)
return False
else:
print("Relocate to base address",base_addr)
print("Bases: "," ".join(["%06x"%(x) for x in addrs]))
print("Sizes: "," ".join(["%06x"%(x) for x in sizes]))
print("Data: "," ".join(["%06x"%(len(x)) for x in datas]))
print("Total: ","%06x"%(rel.get_total_size()))
if args.hexdump:
for d in datas:
print_hex(d)
return True
# ----- Elf2Hunk -----
class ElfInfo:
def __init__(self,args):
self.args = args
def run(self):
for f in args.files:
reader = amitools.binfmt.elf.ELFReader()
elf = reader.load(open(f, "rb"))
if elf is None:
print("ERROR loading ELF:",elf.error_string)
return 1
dumper = amitools.binfmt.elf.ELFDumper(elf)
dumper.dump_sections(show_relocs=args.show_relocs, show_debug=args.show_debug)
dumper.dump_symbols()
dumper.dump_relas()
return 0
class ElfInfo:
def __init__(self, args):
self.args = args
def run(self):
for f in args.files:
reader = amitools.binfmt.elf.ELFReader()
elf = reader.load(open(f, "rb"))
if elf is None:
print("ERROR loading ELF:", elf.error_string)
return 1
dumper = amitools.binfmt.elf.ELFDumper(elf)
dumper.dump_sections(
show_relocs=args.show_relocs, show_debug=args.show_debug
)
dumper.dump_symbols()
dumper.dump_relas()
return 0
# ----- main -----
def main():
# call scanner and process all files with selected command
cmd_map = {
"validate" : Validator,
"info" : Info,
"elfinfo" : ElfInfo,
"relocate" : Relocate
}
# call scanner and process all files with selected command
cmd_map = {
"validate": Validator,
"info": Info,
"elfinfo": ElfInfo,
"relocate": Relocate,
}
parser = argparse.ArgumentParser()
parser.add_argument('command', help="command: "+",".join(list(cmd_map.keys())))
parser.add_argument('files', nargs='+')
parser.add_argument('-d', '--dump', action='store_true', default=False, help="dump the hunk structure")
parser.add_argument('-v', '--verbose', action='store_true', default=False, help="be more verbos")
parser.add_argument('-s', '--stop', action='store_true', default=False, help="stop on error")
parser.add_argument('-R', '--show-relocs', action='store_true', default=False, help="show relocation entries")
parser.add_argument('-D', '--show-debug', action='store_true', default=False, help="show debug info entries")
parser.add_argument('-A', '--disassemble', action='store_true', default=False, help="disassemble code segments")
parser.add_argument('-S', '--disassemble-start', action='store', type=int, default=0, help="start address for dissassembly")
parser.add_argument('-x', '--hexdump', action='store_true', default=False, help="dump segments in hex")
parser.add_argument('-b', '--brief', action='store_true', default=False, help="show only brief information")
parser.add_argument('-B', '--base-address', action='store', type=int, default=0, help="base address for relocation")
parser.add_argument('-c', '--cpu', action='store', default='68000', help="disassemble for given cpu (objdump only)")
args = parser.parse_args()
parser = argparse.ArgumentParser()
parser.add_argument("command", help="command: " + ",".join(list(cmd_map.keys())))
parser.add_argument("files", nargs="+")
parser.add_argument(
"-d",
"--dump",
action="store_true",
default=False,
help="dump the hunk structure",
)
parser.add_argument(
"-v", "--verbose", action="store_true", default=False, help="be more verbos"
)
parser.add_argument(
"-s", "--stop", action="store_true", default=False, help="stop on error"
)
parser.add_argument(
"-R",
"--show-relocs",
action="store_true",
default=False,
help="show relocation entries",
)
parser.add_argument(
"-D",
"--show-debug",
action="store_true",
default=False,
help="show debug info entries",
)
parser.add_argument(
"-A",
"--disassemble",
action="store_true",
default=False,
help="disassemble code segments",
)
parser.add_argument(
"-S",
"--disassemble-start",
action="store",
type=int,
default=0,
help="start address for dissassembly",
)
parser.add_argument(
"-x",
"--hexdump",
action="store_true",
default=False,
help="dump segments in hex",
)
parser.add_argument(
"-b",
"--brief",
action="store_true",
default=False,
help="show only brief information",
)
parser.add_argument(
"-B",
"--base-address",
action="store",
type=int,
default=0,
help="base address for relocation",
)
parser.add_argument(
"-c",
"--cpu",
action="store",
default="68000",
help="disassemble for given cpu (objdump only)",
)
args = parser.parse_args()
cmd = args.command
if cmd not in cmd_map:
print("INVALID COMMAND:",cmd)
print("valid commands are:")
for a in cmd_map:
print(" ",a)
return 1
cmd_cls = cmd_map[cmd]
cmd = args.command
if cmd not in cmd_map:
print("INVALID COMMAND:", cmd)
print("valid commands are:")
for a in cmd_map:
print(" ", a)
return 1
cmd_cls = cmd_map[cmd]
# execute command
cmd = cmd_cls(args)
res = cmd.run()
return res
# execute command
cmd = cmd_cls(args)
res = cmd.run()
return res
if __name__ == '__main__':
sys.exit(main())
if __name__ == "__main__":
sys.exit(main())

File diff suppressed because it is too large Load Diff

View File

@ -72,8 +72,7 @@ def do_split_cmd(args):
logging.info("writing index to '%s'", idx_path)
rs.write_index_file(idx_path)
# extract entries
logging.debug("extract module: fixes=%s, patches=%s",
args.fixes, args.patches)
logging.debug("extract module: fixes=%s, patches=%s", args.fixes, args.patches)
bfh = BinFmtHunk()
for e in entries:
rs.print_entry(logging.info, e)
@ -97,18 +96,31 @@ def do_build_cmd(args):
rom_rev = list(map(int, rom_rev.split(".")))
add_footer = args.add_footer
# select rom builder
if rom_type == 'kick':
if rom_type == "kick":
logging.info("building %d KiB Kick ROM @%08x", rom_size, kick_addr)
rb = rom.KickRomBuilder(rom_size,
base_addr=kick_addr, fill_byte=fill_byte,
kickety_split=kickety_split, rom_ver=rom_rev)
elif rom_type == 'ext':
logging.info("building %d KiB Ext ROM @%08x Rev %r for Kick @%08x",
rom_size, ext_addr, rom_rev, kick_addr)
rb = rom.ExtRomBuilder(rom_size,
base_addr=ext_addr, fill_byte=fill_byte,
add_footer=add_footer, rom_ver=rom_rev,
kick_addr=kick_addr)
rb = rom.KickRomBuilder(
rom_size,
base_addr=kick_addr,
fill_byte=fill_byte,
kickety_split=kickety_split,
rom_ver=rom_rev,
)
elif rom_type == "ext":
logging.info(
"building %d KiB Ext ROM @%08x Rev %r for Kick @%08x",
rom_size,
ext_addr,
rom_rev,
kick_addr,
)
rb = rom.ExtRomBuilder(
rom_size,
base_addr=ext_addr,
fill_byte=fill_byte,
add_footer=add_footer,
rom_ver=rom_rev,
kick_addr=kick_addr,
)
else:
logging.error("Unknown rom_type=%s", rom_type)
return 1
@ -130,8 +142,7 @@ def do_build_cmd(args):
if bkm.get_type() == "module":
bkm.fix_module()
elif bkm.get_type() == "patch":
logging.error("BlizKick Patches are not supported, yet: %s",
name)
logging.error("BlizKick Patches are not supported, yet: %s", name)
return 5
# get image params
size = bin_img.get_size()
@ -160,16 +171,18 @@ def do_build_cmd(args):
logging.info("@%08x: adding module '%s'", off, f)
e = rb.add_bin_img(name, bin_img)
if e is None:
logging.error("@%08x: can't add module '%s': %s",
off, f, rb.get_error())
logging.error(
"@%08x: can't add module '%s': %s", off, f, rb.get_error()
)
return 3
# add data
else:
logging.info("@%08x: adding raw data '%s'", off, f)
e = rb.add_module(name, data)
if e is None:
logging.error("@%08x: can't add raw data '%s': %s",
off, f, rb.get_error())
logging.error(
"@%08x: can't add raw data '%s': %s", off, f, rb.get_error()
)
return 3
# add long word padding?
@ -178,14 +191,14 @@ def do_build_cmd(args):
logging.info("@%08x: adding padding: +%d" % (off, padding))
e = rb.add_padding(padding)
if e is None:
logging.error("@%08x: can't add padding: %s",
off, rb.get_error())
logging.error("@%08x: can't add padding: %s", off, rb.get_error())
return 3
# build rom
off = rb.get_rom_offset()
logging.info("@%08x: padding %d bytes with %02x",
off, rb.get_bytes_left(), fill_byte)
logging.info(
"@%08x: padding %d bytes with %02x", off, rb.get_bytes_left(), fill_byte
)
rom_data = rb.build_rom()
if rom_data is None:
logging.error("building ROM failed: %s", rb.get_error())
@ -212,8 +225,7 @@ def do_diff_cmd(args):
size_a = len(rom_a)
size_b = len(rom_b)
if not args.force and size_a != size_b:
logging.error(
"ROM differ in size (%08x != %08x). Aborting", size_a, size_b)
logging.error("ROM differ in size (%08x != %08x). Aborting", size_a, size_b)
return 2
# do diff
base_addr = 0
@ -226,8 +238,9 @@ def do_diff_cmd(args):
else:
logging.error("Not a KickROM! Can't detect base address.")
return 3
print_hex_diff(rom_a, rom_b, num=args.columns, show_same=args.same,
base_addr=base_addr)
print_hex_diff(
rom_a, rom_b, num=args.columns, show_same=args.same, base_addr=base_addr
)
def do_dump_cmd(args):
@ -266,24 +279,24 @@ def do_info_cmd(args):
rom_img = rom.Loader.load(img)
kh = rom.KickRomAccess(rom_img)
checks = [
('size', kh.check_size()),
('header', kh.check_header()),
('footer', kh.check_footer()),
('size_field', kh.check_size()),
('chk_sum', kh.verify_check_sum()),
('kickety_split', kh.check_kickety_split()),
('magic_reset', kh.check_magic_reset()),
('is_kick', kh.is_kick_rom())
("size", kh.check_size()),
("header", kh.check_header()),
("footer", kh.check_footer()),
("size_field", kh.check_size()),
("chk_sum", kh.verify_check_sum()),
("kickety_split", kh.check_kickety_split()),
("magic_reset", kh.check_magic_reset()),
("is_kick", kh.is_kick_rom()),
]
c = ["%-20s %s" % (x[0], "ok" if x[1] else "NOK") for x in checks]
for i in c:
print(i)
values = [
('check_sum', '%08x', kh.read_check_sum()),
('base_addr', '%08x', kh.get_base_addr()),
('boot_pc', '%08x', kh.read_boot_pc()),
('rom_rev', '%d.%d', kh.read_rom_ver_rev()),
('exec_rev', '%d.%d', kh.read_exec_ver_rev())
("check_sum", "%08x", kh.read_check_sum()),
("base_addr", "%08x", kh.get_base_addr()),
("boot_pc", "%08x", kh.read_boot_pc()),
("rom_rev", "%d.%d", kh.read_rom_ver_rev()),
("exec_rev", "%d.%d", kh.read_exec_ver_rev()),
]
v = ["%-20s %s" % (x[0], x[1] % x[2]) for x in values]
for i in v:
@ -354,7 +367,7 @@ def do_combine_cmd(args):
if ka.get_size_kib() != 512:
logging.error("Not a 512 MiB Kick ROM image!")
return 2
if ka.get_base_addr() != 0xf80000:
if ka.get_base_addr() != 0xF80000:
logging.error("Kick ROM base address is not 0xf80000!")
return 3
# check ext
@ -423,112 +436,176 @@ def do_scan_cmd(args):
print(spc, "init off: %08x" % r.init_off)
print(spc, "skip off: %08x" % r.skip_off)
else:
print("@%08x +%08x %-12s %+4d %s %s" %
(off, r.skip_off, nt, r.pri, name, id_string))
print(
"@%08x +%08x %-12s %+4d %s %s"
% (off, r.skip_off, nt, r.pri, name, id_string)
)
def setup_list_parser(parser):
parser.add_argument('-r', '--rom', default=None,
help='query rom name by wildcard')
parser.add_argument('-m', '--modules', default=False, action='store_true',
help="show entries of ROMs")
parser.add_argument("-r", "--rom", default=None, help="query rom name by wildcard")
parser.add_argument(
"-m",
"--modules",
default=False,
action="store_true",
help="show entries of ROMs",
)
parser.set_defaults(cmd=do_list_cmd)
def setup_query_parser(parser):
parser.add_argument('rom_image',
help='rom image to be checked')
parser.add_argument('-m', '--modules', default=None,
help='query module by wildcard')
parser.add_argument("rom_image", help="rom image to be checked")
parser.add_argument(
"-m", "--modules", default=None, help="query module by wildcard"
)
parser.set_defaults(cmd=do_query_cmd)
def setup_split_parser(parser):
parser.add_argument('rom_image',
help='rom image file to be split')
parser.add_argument('-o', '--output-dir',
help='store modules in this base dir')
parser.add_argument('-m', '--modules', default=None,
help='query module by wildcard')
parser.add_argument('--no-version-dir', default=False, action='store_true',
help="do not create sub directory with version name")
parser.add_argument('--no-index', default=False, action='store_true',
help="do not create an 'index.txt' in output path")
parser.add_argument('-p', '--patches', default=False, action='store_true',
help='apply optional patches to modules')
parser.add_argument('-f', '--no-fixes', dest='fixes', default=True,
action='store_false',
help='do not apply available fixes to modules')
parser.add_argument("rom_image", help="rom image file to be split")
parser.add_argument("-o", "--output-dir", help="store modules in this base dir")
parser.add_argument(
"-m", "--modules", default=None, help="query module by wildcard"
)
parser.add_argument(
"--no-version-dir",
default=False,
action="store_true",
help="do not create sub directory with version name",
)
parser.add_argument(
"--no-index",
default=False,
action="store_true",
help="do not create an 'index.txt' in output path",
)
parser.add_argument(
"-p",
"--patches",
default=False,
action="store_true",
help="apply optional patches to modules",
)
parser.add_argument(
"-f",
"--no-fixes",
dest="fixes",
default=True,
action="store_false",
help="do not apply available fixes to modules",
)
parser.set_defaults(cmd=do_split_cmd)
def setup_build_parser(parser):
parser.add_argument('modules', default=None, nargs='+',
help='modules or index.txt files to be added')
parser.add_argument('-o', '--output',
help='rom image file to be built')
parser.add_argument('-t', '--rom-type', default='kick',
help="what type of ROM to build (kick, ext)")
parser.add_argument('-s', '--rom-size', default=512, type=int,
help="size of ROM in KiB")
parser.add_argument('-a', '--kick-addr', default="f80000",
help="base address of Kick ROM in hex")
parser.add_argument('-e', '--ext-addr', default="e00000",
help="base address of Ext ROM in hex")
parser.add_argument('-f', '--add-footer', default=False,
action='store_true',
help="add footer with check sum to Ext ROM")
parser.add_argument('-r', '--rom-rev', default=None,
help="set ROM revision, e.g. 45.10")
parser.add_argument('-k', '--kickety_split', default=False,
action='store_true',
help="add 'kickety split' romhdr at center of ROM")
parser.add_argument('-b', '--fill-byte', default='ff',
help="fill byte in hex for empty ranges")
parser.add_argument(
"modules",
default=None,
nargs="+",
help="modules or index.txt files to be added",
)
parser.add_argument("-o", "--output", help="rom image file to be built")
parser.add_argument(
"-t", "--rom-type", default="kick", help="what type of ROM to build (kick, ext)"
)
parser.add_argument(
"-s", "--rom-size", default=512, type=int, help="size of ROM in KiB"
)
parser.add_argument(
"-a", "--kick-addr", default="f80000", help="base address of Kick ROM in hex"
)
parser.add_argument(
"-e", "--ext-addr", default="e00000", help="base address of Ext ROM in hex"
)
parser.add_argument(
"-f",
"--add-footer",
default=False,
action="store_true",
help="add footer with check sum to Ext ROM",
)
parser.add_argument(
"-r", "--rom-rev", default=None, help="set ROM revision, e.g. 45.10"
)
parser.add_argument(
"-k",
"--kickety_split",
default=False,
action="store_true",
help="add 'kickety split' romhdr at center of ROM",
)
parser.add_argument(
"-b", "--fill-byte", default="ff", help="fill byte in hex for empty ranges"
)
parser.set_defaults(cmd=do_build_cmd)
def setup_diff_parser(parser):
parser.add_argument('image_a', help='rom image a')
parser.add_argument('image_b', help='rom image b')
parser.add_argument('-s', '--same', default=False, action='store_true',
help="show same lines of ROMs")
parser.add_argument('-a', '--show-address', default=False,
action='store_true',
help="show KickROM address (otherwise image offset)")
parser.add_argument('-b', '--rom-addr', default=None,
help="use hex base address for output")
parser.add_argument('-f', '--force', default=False, action='store_true',
help="diff ROMs even if size differs")
parser.add_argument('-c', '--columns', default=8, type=int,
help="number of bytes shown per line")
parser.add_argument("image_a", help="rom image a")
parser.add_argument("image_b", help="rom image b")
parser.add_argument(
"-s",
"--same",
default=False,
action="store_true",
help="show same lines of ROMs",
)
parser.add_argument(
"-a",
"--show-address",
default=False,
action="store_true",
help="show KickROM address (otherwise image offset)",
)
parser.add_argument(
"-b", "--rom-addr", default=None, help="use hex base address for output"
)
parser.add_argument(
"-f",
"--force",
default=False,
action="store_true",
help="diff ROMs even if size differs",
)
parser.add_argument(
"-c", "--columns", default=8, type=int, help="number of bytes shown per line"
)
parser.set_defaults(cmd=do_diff_cmd)
def setup_dump_parser(parser):
parser.add_argument('image', help='rom image to be dumped')
parser.add_argument('-a', '--show-address', default=False,
action='store_true',
help="show KickROM address (otherwise image offset)")
parser.add_argument('-b', '--rom-addr', default=None,
help="use hex base address for output")
parser.add_argument('-c', '--columns', default=16, type=int,
help="number of bytes shown per line")
parser.add_argument("image", help="rom image to be dumped")
parser.add_argument(
"-a",
"--show-address",
default=False,
action="store_true",
help="show KickROM address (otherwise image offset)",
)
parser.add_argument(
"-b", "--rom-addr", default=None, help="use hex base address for output"
)
parser.add_argument(
"-c", "--columns", default=16, type=int, help="number of bytes shown per line"
)
parser.set_defaults(cmd=do_dump_cmd)
def setup_info_parser(parser):
parser.add_argument('image', help='rom image to be analyzed')
parser.add_argument("image", help="rom image to be analyzed")
parser.set_defaults(cmd=do_info_cmd)
def setup_patch_parser(parser):
parser.add_argument('image',
help='rom image to be patched')
parser.add_argument('patches', default=None, nargs='+',
help='patches to be applied: name[:arg1[=val1],...]')
parser.add_argument('-o', '--output',
help='rom image file to be built')
parser.add_argument("image", help="rom image to be patched")
parser.add_argument(
"patches",
default=None,
nargs="+",
help="patches to be applied: name[:arg1[=val1],...]",
)
parser.add_argument("-o", "--output", help="rom image file to be built")
parser.set_defaults(cmd=do_patch_cmd)
@ -537,28 +614,40 @@ def setup_patches_parser(parser):
def setup_combine_parser(parser):
parser.add_argument('kick_rom', help='kick rom to be combined')
parser.add_argument('ext_rom', help='ext rom to be combined')
parser.add_argument("kick_rom", help="kick rom to be combined")
parser.add_argument("ext_rom", help="ext rom to be combined")
parser.set_defaults(cmd=do_combine_cmd)
parser.add_argument('-o', '--output',
help='rom image file to be built')
parser.add_argument("-o", "--output", help="rom image file to be built")
def setup_scan_parser(parser):
parser.add_argument('image', help='rom image to be scanned')
parser.add_argument('-b', '--rom-addr', default=None,
help="use this base address for ROM. otherwise guess.")
parser.add_argument('-i', '--show-info', default=False,
action='store_true',
help="show more details on resident")
parser.add_argument("image", help="rom image to be scanned")
parser.add_argument(
"-b",
"--rom-addr",
default=None,
help="use this base address for ROM. otherwise guess.",
)
parser.add_argument(
"-i",
"--show-info",
default=False,
action="store_true",
help="show more details on resident",
)
parser.set_defaults(cmd=do_scan_cmd)
def setup_copy_parser(parser):
parser.add_argument('in_image', help='rom image to read')
parser.add_argument('out_image', help='rom image to be written')
parser.add_argument('-c', '--fix-checksum', default=False, action='store_true',
help="fix checksum on written image")
parser.add_argument("in_image", help="rom image to read")
parser.add_argument("out_image", help="rom image to be written")
parser.add_argument(
"-c",
"--fix-checksum",
default=False,
action="store_true",
help="fix checksum on written image",
)
parser.set_defaults(cmd=do_copy_cmd)
@ -567,63 +656,63 @@ def parse_args():
parser = argparse.ArgumentParser(description=DESC)
# global options
parser.add_argument('-k', '--rom-key', default='rom.key',
help='the path of a rom.key file if you want to '
'process crypted ROMs')
parser.add_argument(
"-k",
"--rom-key",
default="rom.key",
help="the path of a rom.key file if you want to " "process crypted ROMs",
)
add_logging_options(parser)
# sub parsers
sub_parsers = parser.add_subparsers(help="sub commands")
# build
build_parser = sub_parsers.add_parser(
'build', help='build a ROM from modules')
build_parser = sub_parsers.add_parser("build", help="build a ROM from modules")
setup_build_parser(build_parser)
# combine
combine_parser = sub_parsers.add_parser(
'combine', help='combine a kick and an ext ROM to a 1 MiB ROM')
"combine", help="combine a kick and an ext ROM to a 1 MiB ROM"
)
setup_combine_parser(combine_parser)
# diff
diff_parser = sub_parsers.add_parser(
'diff', help='show differences in two ROM images')
"diff", help="show differences in two ROM images"
)
setup_diff_parser(diff_parser)
# dump
dump_parser = sub_parsers.add_parser('dump', help='dump a ROM image')
dump_parser = sub_parsers.add_parser("dump", help="dump a ROM image")
setup_dump_parser(dump_parser)
# info
info_parser = sub_parsers.add_parser(
'info', help='print infos on a ROM image')
info_parser = sub_parsers.add_parser("info", help="print infos on a ROM image")
setup_info_parser(info_parser)
# list
list_parser = sub_parsers.add_parser(
'list', help='list ROMs in split data')
list_parser = sub_parsers.add_parser("list", help="list ROMs in split data")
setup_list_parser(list_parser)
# patch
patch_parser = sub_parsers.add_parser('patch', help='patch a ROM image')
patch_parser = sub_parsers.add_parser("patch", help="patch a ROM image")
setup_patch_parser(patch_parser)
# patches
patches_parser = sub_parsers.add_parser(
'patches', help='show available patches')
patches_parser = sub_parsers.add_parser("patches", help="show available patches")
setup_patches_parser(patches_parser)
# query
query_parser = sub_parsers.add_parser(
'query', help='query if ROM is in split data')
query_parser = sub_parsers.add_parser("query", help="query if ROM is in split data")
setup_query_parser(query_parser)
# scan
scan_parser = sub_parsers.add_parser('scan', help='scan ROM for residents')
scan_parser = sub_parsers.add_parser("scan", help="scan ROM for residents")
setup_scan_parser(scan_parser)
# split
split_parser = sub_parsers.add_parser(
'split', help='split a ROM into modules')
split_parser = sub_parsers.add_parser("split", help="split a ROM into modules")
setup_split_parser(split_parser)
# copy
copy_parser = sub_parsers.add_parser(
'copy', help='copy ROM image and fix on the fly')
"copy", help="copy ROM image and fix on the fly"
)
setup_copy_parser(copy_parser)
# parse
args = parser.parse_args()
if 'cmd' not in args:
if "cmd" not in args:
parser.print_help()
sys.exit(1)
return args
@ -645,5 +734,5 @@ def main():
# ----- entry point -----
if __name__ == '__main__':
if __name__ == "__main__":
sys.exit(main())

View File

@ -12,15 +12,15 @@ from amitools.vamos.tools import tools_main, TypeTool
def main():
cfg_files = (
# first look in current dir
os.path.join(os.getcwd(), ".vamosrc"),
# then in home dir
os.path.expanduser("~/.vamosrc"),
)
tools = [TypeTool()]
sys.exit(tools_main(tools, cfg_files))
cfg_files = (
# first look in current dir
os.path.join(os.getcwd(), ".vamosrc"),
# then in home dir
os.path.expanduser("~/.vamosrc"),
)
tools = [TypeTool()]
sys.exit(tools_main(tools, cfg_files))
if __name__ == '__main__':
sys.exit(main())
if __name__ == "__main__":
sys.exit(main())

View File

@ -13,27 +13,25 @@ from amitools.vamos.main import main_profile
def main():
cfg_files = (
# first look in current dir
os.path.join(os.getcwd(), ".vamosrc"),
# then in home dir
os.path.expanduser("~/.vamosrc"),
)
# profile run?
if 'VAMOS_PROFILE' in os.environ:
vamos_profile = os.environ['VAMOS_PROFILE']
if vamos_profile == 'dump':
profile_file = None
cfg_files = (
# first look in current dir
os.path.join(os.getcwd(), ".vamosrc"),
# then in home dir
os.path.expanduser("~/.vamosrc"),
)
# profile run?
if "VAMOS_PROFILE" in os.environ:
vamos_profile = os.environ["VAMOS_PROFILE"]
if vamos_profile == "dump":
profile_file = None
else:
profile_file = vamos_profile
ret_code = main_profile(cfg_files, profile_file=profile_file, dump_profile=True)
# regular run
else:
profile_file = vamos_profile
ret_code = main_profile(cfg_files,
profile_file=profile_file,
dump_profile=True)
# regular run
else:
ret_code = vmain(cfg_files)
sys.exit(ret_code)
ret_code = vmain(cfg_files)
sys.exit(ret_code)
if __name__ == '__main__':
main()
if __name__ == "__main__":
main()

View File

@ -12,15 +12,15 @@ from amitools.vamos.tools import tools_main, PathTool
def main():
cfg_files = (
# first look in current dir
os.path.join(os.getcwd(), ".vamosrc"),
# then in home dir
os.path.expanduser("~/.vamosrc"),
)
tools = [PathTool()]
sys.exit(tools_main(tools, cfg_files))
cfg_files = (
# first look in current dir
os.path.join(os.getcwd(), ".vamosrc"),
# then in home dir
os.path.expanduser("~/.vamosrc"),
)
tools = [PathTool()]
sys.exit(tools_main(tools, cfg_files))
if __name__ == '__main__':
sys.exit(main())
if __name__ == "__main__":
sys.exit(main())

View File

@ -11,15 +11,15 @@ from amitools.vamos.tools import *
def main():
cfg_files = (
# first look in current dir
os.path.join(os.getcwd(), ".vamosrc"),
# then in home dir
os.path.expanduser("~/.vamosrc"),
)
tools = [PathTool(), TypeTool(), LibProfilerTool()]
sys.exit(tools_main(tools, cfg_files))
cfg_files = (
# first look in current dir
os.path.join(os.getcwd(), ".vamosrc"),
# then in home dir
os.path.expanduser("~/.vamosrc"),
)
tools = [PathTool(), TypeTool(), LibProfilerTool()]
sys.exit(tools_main(tools, cfg_files))
if __name__ == '__main__':
sys.exit(main())
if __name__ == "__main__":
sys.exit(main())

View File

@ -3,8 +3,6 @@
# quickly scan large sets of Amiga disk image files
import sys
import argparse
import os.path
@ -16,161 +14,203 @@ from amitools.fs.validate.Progress import Progress
# ----- logging -----
class MyProgress(Progress):
def __init__(self):
Progress.__init__(self)
self.clk = int(time.perf_counter() * 1000)
def begin(self, msg):
Progress.begin(self, msg)
def add(self):
Progress.add(self)
clk = int(time.perf_counter() * 1000)
delta = clk - self.clk
# update display every 250ms
if delta > 250:
self.clk = clk
print("%s: %d\r" % (self.msg, self.num)),
sys.stdout.flush()
def __init__(self):
Progress.__init__(self)
self.clk = int(time.perf_counter() * 1000)
def begin(self, msg):
Progress.begin(self, msg)
def add(self):
Progress.add(self)
clk = int(time.perf_counter() * 1000)
delta = clk - self.clk
# update display every 250ms
if delta > 250:
self.clk = clk
print("%s: %d\r" % (self.msg, self.num)),
sys.stdout.flush()
def pre_log_path(path, msg):
print("%20s %s \r" % (msg, path)),
sys.stdout.flush()
print("%20s %s \r" % (msg, path)),
sys.stdout.flush()
def log_path(path, msg):
print("%20s %s " % (msg, path))
print("%20s %s " % (msg, path))
def print_block(percent):
print("%3.1f%%\r" % (percent / 10.0)),
sys.stdout.flush()
print("%3.1f%%\r" % (percent / 10.0)),
sys.stdout.flush()
# ----- scanner -----
factory = BlkDevFactory()
def scan(path, args):
if not os.path.exists(path):
log_path(path, "DOES NOT EXIST")
return 1
if os.path.isdir(path):
return scan_dir(path, args)
elif os.path.isfile(path):
return scan_file(path, args)
if not os.path.exists(path):
log_path(path, "DOES NOT EXIST")
return 1
if os.path.isdir(path):
return scan_dir(path, args)
elif os.path.isfile(path):
return scan_file(path, args)
def scan_dir(path, args):
for name in sorted(os.listdir(path)):
epath = os.path.join(path, name)
result = scan(epath, args)
if result != 0:
return result
return 0
for name in sorted(os.listdir(path)):
epath = os.path.join(path, name)
result = scan(epath, args)
if result != 0:
return result
return 0
def check_extension(path, args):
ext = []
if not args.skip_disks:
ext += ['.adf','.adz','.adf.gz']
if not args.skip_hds:
ext += ['.hdf']
for a in ext:
if path.endswith(a):
return True
return False
ext = []
if not args.skip_disks:
ext += [".adf", ".adz", ".adf.gz"]
if not args.skip_hds:
ext += [".hdf"]
for a in ext:
if path.endswith(a):
return True
return False
def scan_file(path, args):
if not check_extension(path, args):
return 0
try:
pre_log_path(path,"scan")
ret_code = 0
ret_str = ""
stay = True
if not check_extension(path, args):
return 0
try:
pre_log_path(path, "scan")
ret_code = 0
ret_str = ""
stay = True
# create a block device for image file
blkdev = factory.open(path, read_only=True)
# create a block device for image file
blkdev = factory.open(path, read_only=True)
# create validator
progress = MyProgress()
v = Validator(blkdev, min_level=args.level, debug=args.debug, progress=progress)
# create validator
progress = MyProgress()
v = Validator(blkdev, min_level=args.level, debug=args.debug, progress=progress)
# 1. check boot block
res = []
boot_dos, bootable = v.scan_boot()
if boot_dos:
# 2. check root block
root = v.scan_root()
if not root:
# disk is bootable
if bootable:
res.append("boot")
# 1. check boot block
res = []
boot_dos, bootable = v.scan_boot()
if boot_dos:
# 2. check root block
root = v.scan_root()
if not root:
# disk is bootable
if bootable:
res.append("boot")
else:
res.append(" ")
# invalid root
res.append("nofs")
else:
# 3. scan tree
v.scan_dir_tree()
# 4. scan files
v.scan_files()
# 5. scan_bitmap
v.scan_bitmap()
# summary
e, w = v.get_summary()
if w > 0:
res.append("w%03d" % w)
if e > 0:
res.append("E%03d" % e)
else:
res.append(" ")
# disk is bootable
if bootable:
res.append("boot")
else:
res.append(" ")
if e == 0 and w == 0:
res.append(" ok ")
else:
res.append("NOK ")
else:
res.append(" ")
# invalid root
res.append("nofs")
else:
# 3. scan tree
v.scan_dir_tree()
# 4. scan files
v.scan_files()
# 5. scan_bitmap
v.scan_bitmap()
# boot block is not dos
res.append("NDOS")
# report result
if len(res) == 0:
res.append("done")
log_path(path, " ".join(res))
# summary
e, w = v.get_summary()
if w > 0:
res.append("w%03d" % w)
if e > 0:
res.append("E%03d" % e)
else:
res.append(" ")
# disk is bootable
if bootable:
res.append("boot")
else:
res.append(" ")
if e == 0 and w == 0:
res.append(" ok ")
else:
res.append("NOK ")
else:
# boot block is not dos
res.append("NDOS")
if args.verbose:
v.log.dump()
return ret_code
except IOError as e:
log_path(path, "BLKDEV?")
if args.verbose:
print(e)
return 0
# report result
if len(res) == 0:
res.append("done")
log_path(path," ".join(res))
# summary
if args.verbose:
v.log.dump()
return ret_code
except IOError as e:
log_path(path,"BLKDEV?")
if args.verbose:
print(e)
return 0
# ----- main -----
def main():
parser = argparse.ArgumentParser()
parser.add_argument('input', nargs='+', help="input image file or directory (to scan tree)")
parser.add_argument('-v', '--verbose', action='store_true', default=False, help="be more verbos")
parser.add_argument('-d', '--debug', action='store_true', default=False, help="show debug info")
parser.add_argument('-q', '--quick', action='store_true', default=False, help="quick mode. faster: skip image if root is invalid")
parser.add_argument('-l', '--level', default=2, help="show only level or above (0=debug, 1=info, 2=warn, 3=error)", type=int)
parser.add_argument('-D', '--skip-disks', action='store_true', default=False, help="do not scan disk images")
parser.add_argument('-H', '--skip-hds', action='store_true', default=False, help="do not scan hard disk images")
args = parser.parse_args()
parser = argparse.ArgumentParser()
parser.add_argument(
"input", nargs="+", help="input image file or directory (to scan tree)"
)
parser.add_argument(
"-v", "--verbose", action="store_true", default=False, help="be more verbos"
)
parser.add_argument(
"-d", "--debug", action="store_true", default=False, help="show debug info"
)
parser.add_argument(
"-q",
"--quick",
action="store_true",
default=False,
help="quick mode. faster: skip image if root is invalid",
)
parser.add_argument(
"-l",
"--level",
default=2,
help="show only level or above (0=debug, 1=info, 2=warn, 3=error)",
type=int,
)
parser.add_argument(
"-D",
"--skip-disks",
action="store_true",
default=False,
help="do not scan disk images",
)
parser.add_argument(
"-H",
"--skip-hds",
action="store_true",
default=False,
help="do not scan hard disk images",
)
args = parser.parse_args()
# main scan loop
ret = 0
for i in args.input:
ret = scan(i, args)
if ret != 0:
break
return ret
# main scan loop
ret = 0
for i in args.input:
ret = scan(i, args)
if ret != 0:
break
return ret
if __name__ == '__main__':
try:
sys.exit(main())
except KeyboardInterrupt as e:
print("aborting...")
if __name__ == "__main__":
try:
sys.exit(main())
except KeyboardInterrupt as e:
print("aborting...")

File diff suppressed because it is too large Load Diff

View File

@ -3,70 +3,75 @@
# https://github.com/gopher/iops
import sys
import array
import struct
import os
def getblkdevsize(dev):
"""report the size for a block device"""
size = 0
if sys.platform == 'darwin':
if sys.platform == "darwin":
# mac os x ioctl from sys/disk.h
import fcntl
DKIOCGETBLOCKSIZE = 0x40046418 # _IOR('d', 24, uint32_t)
DKIOCGETBLOCKCOUNT = 0x40086419 # _IOR('d', 25, uint64_t)
DKIOCGETBLOCKCOUNT = 0x40086419 # _IOR('d', 25, uint64_t)
fh = open(dev, 'r')
buf = array.array('B', list(range(0,4))) # uint32
fh = open(dev, "r")
buf = array.array("B", list(range(0, 4))) # uint32
r = fcntl.ioctl(fh.fileno(), DKIOCGETBLOCKSIZE, buf, 1)
blocksize = struct.unpack('I', buf)[0]
buf = array.array('B', list(range(0,8))) # uint64
blocksize = struct.unpack("I", buf)[0]
buf = array.array("B", list(range(0, 8))) # uint64
r = fcntl.ioctl(fh.fileno(), DKIOCGETBLOCKCOUNT, buf, 1)
blockcount = struct.unpack('Q', buf)[0]
blockcount = struct.unpack("Q", buf)[0]
fh.close()
size = blocksize*blockcount
size = blocksize * blockcount
elif sys.platform.startswith('freebsd'):
elif sys.platform.startswith("freebsd"):
# freebsd ioctl from sys/disk.h
import fcntl
DIOCGMEDIASIZE = 0x40086481 # _IOR('d', 129, uint64_t)
fh = open(dev, 'r')
buf = array.array('B', list(range(0,8))) # off_t / int64
DIOCGMEDIASIZE = 0x40086481 # _IOR('d', 129, uint64_t)
fh = open(dev, "r")
buf = array.array("B", list(range(0, 8))) # off_t / int64
r = fcntl.ioctl(fh.fileno(), DIOCGMEDIASIZE, buf, 1)
size = struct.unpack('q', buf)[0]
size = struct.unpack("q", buf)[0]
fh.close()
elif sys.platform == 'win32':
elif sys.platform == "win32":
# win32 ioctl from winioctl.h, requires pywin32
try:
import win32file
except ImportError:
raise SystemExit("Package pywin32 not found, see http://sf.net/projects/pywin32/")
raise SystemExit(
"Package pywin32 not found, see http://sf.net/projects/pywin32/"
)
IOCTL_DISK_GET_DRIVE_GEOMETRY = 0x00070000
dh = win32file.CreateFile(dev, 0, win32file.FILE_SHARE_READ, None, win32file.OPEN_EXISTING, 0, None)
info = win32file.DeviceIoControl(dh, IOCTL_DISK_GET_DRIVE_GEOMETRY, '', 24)
dh = win32file.CreateFile(
dev, 0, win32file.FILE_SHARE_READ, None, win32file.OPEN_EXISTING, 0, None
)
info = win32file.DeviceIoControl(dh, IOCTL_DISK_GET_DRIVE_GEOMETRY, "", 24)
win32file.CloseHandle(dh)
(cyl_lo, cyl_hi, media_type, tps, spt, bps) = struct.unpack('6L', info)
(cyl_lo, cyl_hi, media_type, tps, spt, bps) = struct.unpack("6L", info)
size = ((cyl_hi << 32) + cyl_lo) * tps * spt * bps
else: # linux or compat
else: # linux or compat
# linux 2.6 lseek from fcntl.h
fh = open(dev, 'r')
fh.seek(0,os.SEEK_END)
fh = open(dev, "r")
fh.seek(0, os.SEEK_END)
size = fh.tell()
fh.close()
if not size:
raise Exception("getblkdevsize: Unsupported platform")
return size
# test
if __name__ == '__main__':
for a in sys.argv[1:]:
print(a, getblkdevsize(a))
# test
if __name__ == "__main__":
for a in sys.argv[1:]:
print(a, getblkdevsize(a))

View File

@ -3,80 +3,80 @@
KIB_UNIT = 1024
# map unit extension to power of 1000/KIB_UNIT
scale_map = {
'k' : 1,
'm' : 2,
'g' : 3,
't' : 4
}
scale_map = {"k": 1, "m": 2, "g": 3, "t": 4}
def to_byte_size_str(size, kibi_units=True):
"""convert a byte value into a 5 letter string"""
if kibi_units:
unit = KIB_UNIT
marker = 'i'
else:
unit = 1000
marker = ''
if size < 1000:
return "%3dB%s" % (size, marker)
else:
# run through scales
for scale in "KMGT":
next = size // unit
if next < 10:
frac = float(size) / unit
if frac > 9.9:
frac = 9.9
return "%3.1f%s%s" % (frac, scale, marker)
elif next < 1000:
return "%3d%s%s" % (next, scale, marker)
size = next
return "NaNB%s" % marker
"""convert a byte value into a 5 letter string"""
if kibi_units:
unit = KIB_UNIT
marker = "i"
else:
unit = 1000
marker = ""
if size < 1000:
return "%3dB%s" % (size, marker)
else:
# run through scales
for scale in "KMGT":
next = size // unit
if next < 10:
frac = float(size) / unit
if frac > 9.9:
frac = 9.9
return "%3.1f%s%s" % (frac, scale, marker)
elif next < 1000:
return "%3d%s%s" % (next, scale, marker)
size = next
return "NaNB%s" % marker
def parse_byte_size_str(s):
"""parse a string to derive a byte value.
"""parse a string to derive a byte value.
can read e.g. 10Ki, 2.1k or 2048.
returns None if the string is invalid or a byte value
"""
s = s.lower()
n = len(s)
if n == 0:
return None
# 'i' - use Kibi units
if s[-1] == 'i':
unit = KIB_UNIT
if n == 1:
return None
s = s[:-1]
n -= 1
# else Si units
else:
unit = 1000
# check for scale
scale = s[-1]
if scale in list(scale_map.keys()):
factor = unit ** scale_map[scale]
if n == 1:
return None
s = s[:-1]
n -= 1
else:
factor = 1
# get number
try:
v = int(float(s) * factor)
return v
except ValueError:
return None
s = s.lower()
n = len(s)
if n == 0:
return None
# 'i' - use Kibi units
if s[-1] == "i":
unit = KIB_UNIT
if n == 1:
return None
s = s[:-1]
n -= 1
# else Si units
else:
unit = 1000
# check for scale
scale = s[-1]
if scale in list(scale_map.keys()):
factor = unit ** scale_map[scale]
if n == 1:
return None
s = s[:-1]
n -= 1
else:
factor = 1
# get number
try:
v = int(float(s) * factor)
return v
except ValueError:
return None
# a small test
if __name__ == '__main__':
import sys
for a in sys.argv[1:]:
v = parse_byte_size_str(a)
if v != None:
print((a, ":", v, "=", to_byte_size_str(v), "=", to_byte_size_str(v, False)))
else:
print((a, ":", v))
if __name__ == "__main__":
import sys
for a in sys.argv[1:]:
v = parse_byte_size_str(a)
if v != None:
print(
(a, ":", v, "=", to_byte_size_str(v), "=", to_byte_size_str(v, False))
)
else:
print((a, ":", v))

Some files were not shown because too many files have changed in this diff Show More