Code files added

This commit is contained in:
prashantpackt
2017-09-27 13:47:30 +05:30
parent e4d59d6aa5
commit f4fb994c08
127 changed files with 26904 additions and 0 deletions

BIN
Chapter10/.DS_Store vendored Normal file

Binary file not shown.

162
Chapter10/evt_explorer.py Normal file
View File

@@ -0,0 +1,162 @@
from __future__ import print_function
import argparse
import unicodecsv as csv
import os
import pytsk3
import pyewf
import pyevt
import pyevtx
import sys
from utility.pytskutil import TSKUtil
"""
MIT License
Copyright (c) 2017 Chapin Bryce, Preston Miller
Please share comments and questions at:
https://github.com/PythonForensics/PythonForensicsCookbook
or email pyforcookbook@gmail.com
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
"""
__authors__ = ["Chapin Bryce", "Preston Miller"]
__date__ = 20170815
__description__ = "Script to handle event logs"
def main(evidence, image_type, log, win_event, fuzzy):
# Create TSK object and query event log directory for Windows XP
tsk_util = TSKUtil(evidence, image_type)
event_dir = tsk_util.query_directory(win_event)
if event_dir is not None:
if fuzzy is True:
event_log = tsk_util.recurse_files(log, path=win_event)
else:
event_log = tsk_util.recurse_files(
log, path=win_event, logic="equal")
if event_log is not None:
event_data = []
for hit in event_log:
event_file = hit[2]
temp_evt = write_file(event_file)
if pyevt.check_file_signature(temp_evt):
evt_log = pyevt.open(temp_evt)
print("[+] Identified {} records in {}".format(
evt_log.number_of_records, temp_evt))
for i, record in enumerate(evt_log.records):
strings = ""
for s in record.strings:
if s is not None:
strings += s + "\n"
event_data.append([
i, hit[0], record.computer_name,
record.user_security_identifier,
record.creation_time, record.written_time,
record.event_category, record.source_name,
record.event_identifier, record.event_type,
strings, "",
os.path.join(win_event, hit[1].lstrip("//"))
])
elif pyevtx.check_file_signature(temp_evt):
evtx_log = pyevtx.open(temp_evt)
print("[+] Identified {} records in {}".format(
evtx_log.number_of_records, temp_evt))
for i, record in enumerate(evtx_log.records):
strings = ""
for s in record.strings:
if s is not None:
strings += s + "\n"
event_data.append([
i, hit[0], record.computer_name,
record.user_security_identifier, "",
record.written_time, record.event_level,
record.source_name, record.event_identifier,
"", strings, record.xml_string,
os.path.join(win_event, hit[1].lstrip("//"))
])
else:
print("[-] {} not a valid event log. Removing temp "
"file...".format(temp_evt))
os.remove(temp_evt)
continue
write_output(event_data)
else:
print("[-] {} Event log not found in {} directory".format(
log, win_event))
sys.exit(3)
else:
print("[-] Win XP Event Log Directory {} not found".format(
win_event))
sys.exit(2)
def write_file(event_file):
with open(event_file.info.name.name, "w") as outfile:
outfile.write(event_file.read_random(0, event_file.info.meta.size))
return event_file.info.name.name
def write_output(data):
output_name = "parsed_event_logs.csv"
print("[+] Writing {} to current working directory: {}".format(
output_name, os.getcwd()))
with open(output_name, "wb") as outfile:
writer = csv.writer(outfile)
writer.writerow([
"Index", "File name", "Computer Name", "SID",
"Event Create Date", "Event Written Date",
"Event Category/Level", "Event Source", "Event ID",
"Event Type", "Data", "XML Data", "File Path"
])
writer.writerows(data)
if __name__ == "__main__":
parser = argparse.ArgumentParser(
description=__description__,
epilog="Developed by {} on {}".format(
", ".join(__authors__), __date__)
)
parser.add_argument("EVIDENCE_FILE", help="Evidence file path")
parser.add_argument("TYPE", help="Type of Evidence",
choices=("raw", "ewf"))
parser.add_argument("LOG_NAME",
help="Event Log Name (SecEvent.Evt, SysEvent.Evt, "
"etc.)")
parser.add_argument("-d", help="Event log directory to scan",
default="/WINDOWS/SYSTEM32/WINEVT")
parser.add_argument("-f", help="Enable fuzzy search for either evt or"
" evtx extension", action="store_true")
args = parser.parse_args()
if os.path.exists(args.EVIDENCE_FILE) and \
os.path.isfile(args.EVIDENCE_FILE):
main(args.EVIDENCE_FILE, args.TYPE, args.LOG_NAME, args.d, args.f)
else:
print("[-] Supplied input file {} does not exist or is not a "
"file".format(args.EVIDENCE_FILE))
sys.exit(1)

151
Chapter10/index_parser.py Normal file
View File

@@ -0,0 +1,151 @@
from __future__ import print_function
import argparse
from datetime import datetime, timedelta
import os
import pytsk3
import pyewf
import pymsiecf
import sys
import unicodecsv as csv
from utility.pytskutil import TSKUtil
"""
MIT License
Copyright (c) 2017 Chapin Bryce, Preston Miller
Please share comments and questions at:
https://github.com/PythonForensics/PythonForensicsCookbook
or email pyforcookbook@gmail.com
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
"""
__authors__ = ["Chapin Bryce", "Preston Miller"]
__date__ = 20170815
__description__ = "Script to parse index.dat files"
def main(evidence, image_type, path):
# Create TSK object and query for Internet Explorer index.dat files
tsk_util = TSKUtil(evidence, image_type)
index_dir = tsk_util.query_directory(path)
if index_dir is not None:
index_files = tsk_util.recurse_files("index.dat", path=path,
logic="equal")
if index_files is not None:
print("[+] Identified {} potential index.dat files".format(
len(index_files)))
index_data = []
for hit in index_files:
index_file = hit[2]
temp_index = write_file(index_file)
if pymsiecf.check_file_signature(temp_index):
index_dat = pymsiecf.open(temp_index)
print("[+] Identified {} records in {}".format(
index_dat.number_of_items, temp_index))
for i, record in enumerate(index_dat.items):
try:
data = record.data
if data is not None:
data = data.rstrip("\x00")
except AttributeError:
if isinstance(record, pymsiecf.redirected):
index_data.append([
i, temp_index, "", "", "", "", "",
record.location, "", "", record.offset,
os.path.join(path, hit[1].lstrip("//"))
])
elif isinstance(record, pymsiecf.leak):
index_data.append([
i, temp_index, record.filename, "",
"", "", "", "", "", "", record.offset,
os.path.join(path, hit[1].lstrip("//"))
])
continue
index_data.append([
i, temp_index, record.filename,
record.type, record.primary_time,
record.secondary_time,
record.last_checked_time, record.location,
record.number_of_hits, data, record.offset,
os.path.join(path, hit[1].lstrip("//"))
])
else:
print("[-] {} not a valid index.dat file. Removing "
"temp file..".format(temp_index))
os.remove("index.dat")
continue
os.remove("index.dat")
write_output(index_data)
else:
print("[-] Index.dat files not found in {} directory".format(
path))
sys.exit(3)
else:
print("[-] Directory {} not found".format(win_event))
sys.exit(2)
def write_file(index_file):
with open(index_file.info.name.name, "w") as outfile:
outfile.write(index_file.read_random(0, index_file.info.meta.size))
return index_file.info.name.name
def write_output(data):
output_name = "Internet_Indexdat_Summary_Report.csv"
print("[+] Writing {} with {} parsed index.dat files to current "
"working directory: {}".format(output_name, len(data),
os.getcwd()))
with open(output_name, "wb") as outfile:
writer = csv.writer(outfile)
writer.writerow(["Index", "File Name", "Record Name",
"Record Type", "Primary Date", "Secondary Date",
"Last Checked Date", "Location", "No. of Hits",
"Record Data", "Record Offset", "File Path"])
writer.writerows(data)
if __name__ == "__main__":
parser = argparse.ArgumentParser(
description=__description__,
epilog="Developed by {} on {}".format(
", ".join(__authors__), __date__)
)
parser.add_argument("EVIDENCE_FILE", help="Evidence file path")
parser.add_argument("TYPE", help="Type of Evidence",
choices=("raw", "ewf"))
parser.add_argument("-d", help="Index.dat directory to scan",
default="/USERS")
args = parser.parse_args()
if os.path.exists(args.EVIDENCE_FILE) and os.path.isfile(
args.EVIDENCE_FILE):
main(args.EVIDENCE_FILE, args.TYPE, args.d)
else:
print("[-] Supplied input file {} does not exist or is not a "
"file".format(args.EVIDENCE_FILE))
sys.exit(1)

183
Chapter10/pf_parser.py Normal file
View File

@@ -0,0 +1,183 @@
from __future__ import print_function
import argparse
from datetime import datetime, timedelta
import os
import pytsk3
import pyewf
import struct
import sys
import unicodecsv as csv
from utility.pytskutil import TSKUtil
"""
MIT License
Copyright (c) 2017 Chapin Bryce, Preston Miller
Please share comments and questions at:
https://github.com/PythonForensics/PythonForensicsCookbook
or email pyforcookbook@gmail.com
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
"""
__authors__ = ["Chapin Bryce", "Preston Miller"]
__date__ = 20170815
__description__ = " Read information from prefetch files"
def main(evidence, image_type, output_csv, path):
# Create TSK object and query path for prefetch files
tsk_util = TSKUtil(evidence, image_type)
prefetch_dir = tsk_util.query_directory(path)
prefetch_files = None
if prefetch_dir is not None:
prefetch_files = tsk_util.recurse_files(
".pf", path=path, logic="endswith")
if prefetch_files is None:
print("[-] No .pf files found")
sys.exit(2)
print("[+] Identified {} potential prefetch files".format(
len(prefetch_files)))
prefetch_data = []
for hit in prefetch_files:
prefetch_file = hit[2]
pf_version = check_signature(prefetch_file)
if pf_version is None:
continue
pf_name = hit[0]
if pf_version == 17:
parsed_data = parse_pf_17(prefetch_file, pf_name)
parsed_data.append(os.path.join(path, hit[1].lstrip("//")))
prefetch_data.append(parsed_data)
elif pf_version == 23:
print("[-] Windows Vista / 7 PF file {} -- unsupported".format(
pf_name))
continue
elif pf_version == 26:
print("[-] Windows 8 PF file {} -- unsupported".format(
pf_name))
continue
elif pf_version == 30:
print("[-] Windows 10 PF file {} -- unsupported".format(
pf_name))
continue
else:
print("[-] Signature mismatch - Name: {}\nPath: {}".format(
hit[0], hit[1]))
continue
write_output(prefetch_data, output_csv)
def parse_pf_17(prefetch_file, pf_name):
# Parse Windows XP, 2003 Prefetch File
create = convert_unix(prefetch_file.info.meta.crtime)
modify = convert_unix(prefetch_file.info.meta.mtime)
pf_size, name, vol_info, vol_entries, vol_size, filetime, \
count = struct.unpack("<i60s32x3iq16xi",
prefetch_file.read_random(12, 136))
name = name.decode("utf-16", "ignore").strip("/x00").split("/x00")[0]
vol_name_offset, vol_name_length, vol_create, \
vol_serial = struct.unpack("<2iqi",
prefetch_file.read_random(vol_info, 20))
vol_serial = hex(vol_serial).lstrip("0x")
vol_serial = vol_serial[:4] + "-" + vol_serial[4:]
vol_name = struct.unpack(
"<{}s".format(2 * vol_name_length),
prefetch_file.read_random(vol_info + vol_name_offset,
vol_name_length * 2)
)[0]
vol_name = vol_name.decode("utf-16", "ignore").strip("/x00").split(
"/x00")[0]
return [
pf_name, name, pf_size, create,
modify, convert_filetime(filetime), count, vol_name,
convert_filetime(vol_create), vol_serial
]
def convert_unix(ts):
if int(ts) == 0:
return ""
return datetime.utcfromtimestamp(ts)
def convert_filetime(ts):
if int(ts) == 0:
return ""
return datetime(1601, 1, 1) + timedelta(microseconds=ts / 10)
def check_signature(prefetch_file):
version, signature = struct.unpack(
"<2i", prefetch_file.read_random(0, 8))
if signature == 1094927187:
return version
else:
return None
def write_output(data, output_csv):
print("[+] Writing csv report")
with open(output_csv, "wb") as outfile:
writer = csv.writer(outfile)
writer.writerow([
"File Name", "Prefetch Name", "File Size (bytes)",
"File Create Date (UTC)", "File Modify Date (UTC)",
"Prefetch Last Execution Date (UTC)",
"Prefetch Execution Count", "Volume", "Volume Create Date",
"Volume Serial", "File Path"
])
writer.writerows(data)
if __name__ == "__main__":
parser = argparse.ArgumentParser(
description=__description__,
epilog="Developed by {} on {}".format(
", ".join(__authors__), __date__)
)
parser.add_argument("EVIDENCE_FILE", help="Evidence file path")
parser.add_argument("TYPE", help="Type of Evidence",
choices=("raw", "ewf"))
parser.add_argument("OUTPUT_CSV", help="Path to write output csv")
parser.add_argument("-d", help="Prefetch directory to scan",
default="/WINDOWS/PREFETCH")
args = parser.parse_args()
if os.path.exists(args.EVIDENCE_FILE) and \
os.path.isfile(args.EVIDENCE_FILE):
main(args.EVIDENCE_FILE, args.TYPE, args.OUTPUT_CSV, args.d)
else:
print("[-] Supplied input file {} does not exist or is not a "
"file".format(args.EVIDENCE_FILE))
sys.exit(1)

218
Chapter10/srum_parser.py Normal file
View File

@@ -0,0 +1,218 @@
from __future__ import print_function
import argparse
from datetime import datetime, timedelta
import os
import pytsk3
import pyewf
import pyesedb
import struct
import sys
import unicodecsv as csv
from utility.pytskutil import TSKUtil
"""
MIT License
Copyright (c) 2017 Chapin Bryce, Preston Miller
Please share comments and questions at:
https://github.com/PythonForensics/PythonForensicsCookbook
or email pyforcookbook@gmail.com
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
"""
__authors__ = ["Chapin Bryce", "Preston Miller"]
__date__ = 20170815
__description__ = "Extract information from the SRUM database"
TABLE_LOOKUP = {
"{973F5D5C-1D90-4944-BE8E-24B94231A174}": "Network Data Usage",
"{D10CA2FE-6FCF-4F6D-848E-B2E99266FA86}": "Push Notifications",
"{D10CA2FE-6FCF-4F6D-848E-B2E99266FA89}": "Application Resource Usage",
"{DD6636C4-8929-4683-974E-22C046A43763}": "Network Connectivity Usage",
"{FEE4E14F-02A9-4550-B5CE-5FA2DA202E37}": "Energy Usage"}
APP_ID_LOOKUP = {}
def main(evidence, image_type):
# Create TSK object and query for Internet Explorer index.dat files
tsk_util = TSKUtil(evidence, image_type)
path = "/Windows/System32/sru"
srum_dir = tsk_util.query_directory(path)
if srum_dir is not None:
srum_files = tsk_util.recurse_files("SRUDB.dat", path=path,
logic="equal")
if srum_files is not None:
print("[+] Identified {} potential SRUDB.dat file(s)".format(
len(srum_files)))
for hit in srum_files:
srum_file = hit[2]
srum_tables = {}
temp_srum = write_file(srum_file)
if pyesedb.check_file_signature(temp_srum):
srum_dat = pyesedb.open(temp_srum)
print("[+] Process {} tables within database".format(
srum_dat.number_of_tables))
for table in srum_dat.tables:
if table.name != "SruDbIdMapTable":
continue
global APP_ID_LOOKUP
for entry in table.records:
app_id = entry.get_value_data_as_integer(1)
try:
app = entry.get_value_data(2).replace(
"\x00", "")
except AttributeError:
app = ""
APP_ID_LOOKUP[app_id] = app
for table in srum_dat.tables:
t_name = table.name
print("[+] Processing {} table with {} records"
.format(t_name, table.number_of_records))
srum_tables[t_name] = {"columns": [], "data": []}
columns = [x.name for x in table.columns]
srum_tables[t_name]["columns"] = columns
for entry in table.records:
data = []
for x in range(entry.number_of_values):
data.append(convert_data(
entry.get_value_data(x), columns[x],
entry.get_column_type(x))
)
srum_tables[t_name]["data"].append(data)
write_output(t_name, srum_tables)
else:
print("[-] {} not a valid SRUDB.dat file. Removing "
"temp file...".format(temp_srum))
os.remove(temp_srum)
continue
else:
print("[-] SRUDB.dat files not found in {} "
"directory".format(path))
sys.exit(3)
else:
print("[-] Directory {} not found".format(path))
sys.exit(2)
def convert_data(data, column, col_type):
if data is None:
return ""
elif column == "AppId":
return APP_ID_LOOKUP[struct.unpack("<i", data)[0]]
elif col_type == 0:
return ""
elif col_type == 1:
if data == "*":
return True
else:
return False
elif col_type == 2:
return struct.unpack("<B", data)[0]
elif col_type == 3:
return struct.unpack("<h", data)[0]
elif col_type == 4:
return struct.unpack("<i", data)[0]
elif col_type == 6:
return struct.unpack("<f", data)[0]
elif col_type == 7:
return struct.unpack("<d", data)[0]
elif col_type == 8:
return convert_ole(struct.unpack("<q", data)[0])
elif col_type in [5, 9, 10, 12, 13, 16]:
return data
elif col_type == 11:
return data.replace("\x00", "")
elif col_type == 14:
return struct.unpack("<I", data)[0]
elif col_type == 15:
if column in ["EventTimestamp", "ConnectStartTime"]:
return convert_filetime(struct.unpack("<q", data)[0])
else:
return struct.unpack("<q", data)[0]
elif col_type == 17:
return struct.unpack("<H", data)[0]
else:
return data
def write_file(srum_file):
with open(srum_file.info.name.name, "w") as outfile:
outfile.write(srum_file.read_random(0, srum_file.info.meta.size))
return srum_file.info.name.name
def convert_filetime(ts):
if str(ts) == "0":
return ""
try:
dt = datetime(1601, 1, 1) + timedelta(microseconds=ts / 10)
except OverflowError:
return ts
return dt
def convert_ole(ts):
ole = struct.unpack(">d", struct.pack(">Q", ts))[0]
try:
dt = datetime(1899, 12, 30, 0, 0, 0) + timedelta(days=ole)
except OverflowError:
return ts
return dt
def write_output(table, data):
if len(data[table]["data"]) == 0:
return
if table in TABLE_LOOKUP:
output_name = TABLE_LOOKUP[table] + ".csv"
else:
output_name = "SRUM_Table_{}.csv".format(table)
print("[+] Writing {} to current working directory: {}".format(
output_name, os.getcwd()))
with open(output_name, "wb") as outfile:
writer = csv.writer(outfile)
writer.writerow(data[table]["columns"])
writer.writerows(data[table]["data"])
if __name__ == "__main__":
parser = argparse.ArgumentParser(
description=__description__,
epilog="Developed by {} on {}".format(
", ".join(__authors__), __date__)
)
parser.add_argument("EVIDENCE_FILE", help="Evidence file path")
parser.add_argument("TYPE", help="Type of Evidence",
choices=("raw", "ewf"))
args = parser.parse_args()
if os.path.exists(args.EVIDENCE_FILE) and os.path.isfile(
args.EVIDENCE_FILE):
main(args.EVIDENCE_FILE, args.TYPE)
else:
print("[-] Supplied input file {} does not exist or is not a "
"file".format(args.EVIDENCE_FILE))
sys.exit(1)

View File

@@ -0,0 +1 @@
import pytskutil

View File

@@ -0,0 +1,311 @@
from __future__ import print_function
import os
import pytsk3
import sys
import pyewf
from datetime import datetime
"""
MIT License
Copyright (c) 2017 Chapin Bryce, Preston Miller
Please share comments and questions at:
https://github.com/PythonForensics/PythonForensicsCookbook
or email pyforcookbook@gmail.com
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
"""
class EWFImgInfo(pytsk3.Img_Info):
"""EWF Image Format helper class"""
def __init__(self, ewf_handle):
self._ewf_handle = ewf_handle
super(EWFImgInfo, self).__init__(url="", type=pytsk3.TSK_IMG_TYPE_EXTERNAL)
def close(self):
self._ewf_handle.close()
def read(self, offset, size):
self._ewf_handle.seek(offset)
return self._ewf_handle.read(size)
def get_size(self):
return self._ewf_handle.get_media_size()
class TSKUtil(object):
def __init__(self, evidence, image_type):
self.evidence = evidence
self.image_type = image_type
# Assigned parameters
self.vol = None
self.image_handle = None
self.fs = []
# Prep volume and fs objects
self.run()
def run(self):
self.open_vol()
self.open_FS()
def return_vol(self):
sys.stderr.write("[+] Opening {}\n".format(self.evidence))
# Handle EWF/Raw Images
if self.image_type == "ewf":
try:
filenames = pyewf.glob(self.evidence)
except IOError:
_, e, _ = sys.exc_info()
sys.stderr.write("[-] Invalid EWF format:\n {}\n".format(e))
raise IOError
ewf_handle = pyewf.handle()
ewf_handle.open(filenames)
# Open PYTSK3 handle on EWF Image
self.image_handle = EWFImgInfo(ewf_handle)
else:
self.image_handle = pytsk3.Img_Info(self.evidence)
# Open volume from image
try:
self.vol = pytsk3.Volume_Info(self.image_handle)
except IOError:
return None
return self.vol
def open_vol(self):
sys.stderr.write("[+] Opening {}\n".format(self.evidence))
# Handle EWF/Raw Images
if self.image_type == "ewf":
try:
filenames = pyewf.glob(self.evidence)
except IOError:
_, e, _ = sys.exc_info()
sys.stderr.write("[-] Invalid EWF format:\n {}\n".format(e))
raise IOError
ewf_handle = pyewf.handle()
ewf_handle.open(filenames)
# Open PYTSK3 handle on EWF Image
self.image_handle = EWFImgInfo(ewf_handle)
else:
self.image_handle = pytsk3.Img_Info(self.evidence)
# Open volume from image
try:
self.vol = pytsk3.Volume_Info(self.image_handle)
except IOError:
_, e, _ = sys.exc_info()
sys.stderr.write("[-] Unable to read partition table. Possible logical image:\n {}\n".format(e))
def open_FS(self):
# Open FS and Recurse
if self.vol is not None:
for partition in self.vol:
if partition.len > 2048 and "Unallocated" not in partition.desc and "Extended" not in partition.desc and "Primary Table" not in partition.desc:
try:
self.fs.append(pytsk3.FS_Info(
self.image_handle,
offset=partition.start * self.vol.info.block_size))
except IOError:
_, e, _ = sys.exc_info()
sys.stderr.write("[-] Unable to open FS:\n {}\n".format(e))
else:
try:
self.fs.append(pytsk3.FS_Info(self.image_handle))
except IOError:
_, e, _ = sys.exc_info()
sys.stderr.write("[-] Unable to open FS:\n {}\n".format(e))
def detect_ntfs(self, vol, partition):
try:
block_size = vol.info.block_size
fs_object = pytsk3.FS_Info(self.image_handle, offset=(partition.start * block_size))
except Exception:
sys.stderr.write("[-] Unable to open FS\n")
return False
if fs_object.info.ftype == pytsk3.TSK_FS_TYPE_NTFS_DETECT:
return True
else:
return False
def recurse_files(self, substring, path="/", logic="contains", case=False):
files = []
for i, fs in enumerate(self.fs):
try:
root_dir = fs.open_dir(path)
except IOError:
continue
files += self.recurse_dirs(i, fs, root_dir, [], [], [""], substring, logic, case)
if files == []:
return None
else:
return files
def query_directory(self, path):
dirs = []
for i, fs in enumerate(self.fs):
try:
dirs.append((i, fs.open_dir(path)))
except IOError:
continue
if dirs == []:
return None
else:
return dirs
def recurse_dirs(self, part, fs, root_dir, dirs, data, parent, substring, logic, case):
dirs.append(root_dir.info.fs_file.meta.addr)
for fs_object in root_dir:
# Skip ".", ".." or directory entries without a name.
if not hasattr(fs_object, "info") or not hasattr(fs_object.info, "name") or not hasattr(fs_object.info.name, "name") or fs_object.info.name.name in [".", ".."]:
continue
try:
file_name = fs_object.info.name.name
file_path = "{}/{}".format("/".join(parent), fs_object.info.name.name)
try:
if fs_object.info.meta.type == pytsk3.TSK_FS_META_TYPE_DIR:
f_type = "DIR"
file_ext = ""
else:
f_type = "FILE"
except AttributeError:
continue # Which object has the AttributeError?
if f_type == "FILE":
if logic.lower() == 'contains':
if case is False:
if substring.lower() in file_name.lower():
data.append((file_name, file_path, fs_object, part))
else:
if substring in file_name:
data.append((file_name, file_path, fs_object, part))
elif logic.lower() == 'startswith':
if case is False:
if file_name.lower().startswith(substring.lower()):
data.append((file_name, file_path, fs_object, part))
else:
if file_name.startswith(substring):
data.append((file_name, file_path, fs_object, part))
elif logic.lower() == 'endswith':
if case is False:
if file_name.lower().endswith(substring.lower()):
data.append((file_name, file_path, fs_object, part))
else:
if file_name.endswith(substring):
data.append((file_name, file_path, fs_object, part))
elif logic.lower() == 'equal':
if case is False:
if substring.lower() == file_name.lower():
data.append((file_name, file_path, fs_object, part))
else:
if substring == file_name:
data.append((file_name, file_path, fs_object, part))
else:
sys.stderr.write("[-] Warning invalid logic {} provided\n".format(logic))
sys.exit()
elif f_type == "DIR":
parent.append(fs_object.info.name.name)
sub_directory = fs_object.as_directory()
inode = fs_object.info.meta.addr
# This ensures that we don't recurse into a directory
# above the current level and thus avoid circular loops.
if inode not in dirs:
self.recurse_dirs(part, fs, sub_directory, dirs, data, parent, substring, logic, case)
parent.pop(-1)
except IOError:
pass
dirs.pop(-1)
return data
def openVSSFS(img, count):
# Open FS and Recurse
try:
fs = pytsk3.FS_Info(img)
except IOError:
_, e, _ = sys.exc_info()
sys.stderr.write("[-] Unable to open FS: {}".format(e))
root = fs.open_dir(path="/")
data = recurseFiles(count, fs, root, [], [], [""])
return data
def recurseFiles(count, fs, root_dir, dirs, data, parent):
dirs.append(root_dir.info.fs_file.meta.addr)
for fs_object in root_dir:
# Skip ".", ".." or directory entries without a name.
if not hasattr(fs_object, "info") or not hasattr(fs_object.info, "name") or not hasattr(fs_object.info.name, "name") or fs_object.info.name.name in [".", ".."]:
continue
try:
file_name = fs_object.info.name.name
file_path = "{}/{}".format("/".join(parent), fs_object.info.name.name)
try:
if fs_object.info.meta.type == pytsk3.TSK_FS_META_TYPE_DIR:
f_type = "DIR"
file_ext = ""
else:
f_type = "FILE"
if "." in file_name:
file_ext = file_name.rsplit(".")[-1].lower()
else:
file_ext = ""
except AttributeError:
continue
size = fs_object.info.meta.size
create = convertTime(fs_object.info.meta.crtime)
change = convertTime(fs_object.info.meta.ctime)
modify = convertTime(fs_object.info.meta.mtime)
data.append(["VSS {}".format(count), file_name, file_ext, f_type, create, change, modify, size, file_path])
if f_type == "DIR":
parent.append(fs_object.info.name.name)
sub_directory = fs_object.as_directory()
inode = fs_object.info.meta.addr
# This ensures that we don't recurse into a directory
# above the current level and thus avoid circular loops.
if inode not in dirs:
recurseFiles(count, fs, sub_directory, dirs, data, parent)
parent.pop(-1)
except IOError:
pass
dirs.pop(-1)
return data
def convertTime(ts):
if str(ts) == "0":
return ""
return datetime.utcfromtimestamp(ts)

166
Chapter10/utility/vss.py Normal file
View File

@@ -0,0 +1,166 @@
#!/usr/bin/python
# -*- coding: utf-8 -*-
# Copyright 2012 The Plaso Project Authors.
# Please see the AUTHORS file for details on individual authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#This file was copied from https://code.google.com/p/plaso/source/browse/plaso/pvfs/vss.py
"""This file contains a simple library to read files stored in VSS."""
import logging
import os
import pytsk3
import pyvshadow
class VShadowImgInfo(pytsk3.Img_Info):
"""Extending the TSK Img_Info to allow VSS images to be read in."""
def __init__(self, store):
self._store = store
super(VShadowImgInfo, self).__init__()
# Implementing an interface
def read(self, offset, size):
self._store.seek(offset)
return self._store.read(size)
# Implementing an interface
def get_size(self):
return self._store.get_size()
class VShadowVolume(object):
"""Disk file implementation faking volume file.
pyvhsadow does not support disk images, only volume based ones.
In order for us to be able to use disk images we need to provide
an interface that exposes volumes inside of a disk image.
"""
def __init__(self, file_path, offset=0, sector_size=512):
"""Provide a file like object of a volume inside a disk image.
Args:
file_path: String, denoting the file path to the disk image.
offset: An offset in bytes to the volume within the disk.
sector_size: The size in bytes of a single sector, defaults to 512.
"""
self._block_size = 0
self._offset_start = 0
self._orig_offset = offset
ofs = int(offset / sector_size)
self._block_size, self._image_size = GetImageSize(file_path, ofs)
self._fh = open(file_path, 'rb')
self._fh.seek(0, os.SEEK_END)
self._fh_size = self._fh.tell()
self._image_offset = ofs
if self._block_size:
self._offset_start = self._image_offset * self._block_size
self._fh.seek(self._offset_start, 0)
def read(self, size=None):
""""Return read bytes from volume as denoted by the size parameter."""
if not self._orig_offset:
return self._fh.read(size)
# Check upper bounds, we need to return empty values for above bounds.
if size + self.tell() > self._offset_start + self._image_size:
size = self._offset_start + self._image_size - self.tell()
if size < 1:
return ''
return self._fh.read(size)
def get_size(self):
"""Return the size in bytes of the volume."""
if self._block_size:
return self._block_size * self._image_size
return self._fh_size
def close(self):
self._fh.close()
def seek(self, offset, whence=os.SEEK_SET):
"""Seek into the volume."""
if not self._block_size:
self._fh.seek(offset, whence)
return
ofs = 0
abs_ofs = 0
if whence == os.SEEK_SET:
ofs = offset + self._offset_start
abs_ofs = ofs
elif whence == os.SEEK_CUR:
ofs = offset
abs_ofs = self.tell() + ofs
elif whence == os.SEEK_END:
size_diff = self._fh_size - (self._offset_start + self._image_size)
ofs = offset - size_diff
abs_ofs = self._image_size + self._offset_start + offset
else:
raise RuntimeError('Illegal whence value %s' % whence)
# check boundary
if abs_ofs < self._offset_start:
raise IOError('Invalid seek, out of bounds. Seek before start.')
self._fh.seek(ofs, whence)
def tell(self):
if not self._block_size:
return self._fh.tell()
return self._fh.tell() - self._offset_start
def get_offset(self):
return self.tell()
def GetVssStoreCount(image, offset=0):
"""Return the number of VSS stores available in an image."""
volume = pyvshadow.volume()
fh = VShadowVolume(image, offset)
try:
volume.open_file_object(fh)
return volume.number_of_stores
except IOError as e:
logging.warning('Error while trying to read VSS information: %s', e)
return 0
def GetImageSize(file_path, offset):
"""Read the partition information to gather volume size."""
if not offset:
return 0, 0
img = pytsk3.Img_Info(file_path)
try:
volume = pytsk3.Volume_Info(img)
except IOError:
return 0, 0
size = 0
for vol in volume:
if vol.start == offset:
size = vol.len
break
size *= volume.info.block_size
return volume.info.block_size, size

118
Chapter10/vss_explorer.py Normal file
View File

@@ -0,0 +1,118 @@
from __future__ import print_function
import argparse
from datetime import datetime, timedelta
import os
import pytsk3
import pyewf
import pyvshadow
import sys
import unicodecsv as csv
from utility import vss
from utility.pytskutil import TSKUtil
from utility import pytskutil
"""
MIT License
Copyright (c) 2017 Chapin Bryce, Preston Miller
Please share comments and questions at:
https://github.com/PythonForensics/PythonForensicsCookbook
or email pyforcookbook@gmail.com
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
"""
__authors__ = ["Chapin Bryce", "Preston Miller"]
__date__ = 20170815
__description__ = "Utility to explore VSS on NTFS volumes"
def main(evidence, output):
# Create TSK object and query path for prefetch files
tsk_util = TSKUtil(evidence, "raw")
img_vol = tsk_util.return_vol()
if img_vol is not None:
for part in img_vol:
if tsk_util.detect_ntfs(img_vol, part):
print("Exploring NTFS Partition for VSS")
explore_vss(evidence, part.start * img_vol.info.block_size,
output)
else:
print("[-] Must be a physical preservation to be compatible "
"with this script")
sys.exit(2)
def explore_vss(evidence, part_offset, output):
vss_volume = pyvshadow.volume()
vss_handle = vss.VShadowVolume(evidence, part_offset)
vss_count = vss.GetVssStoreCount(evidence, part_offset)
if vss_count > 0:
vss_volume.open_file_object(vss_handle)
vss_data = []
for x in range(vss_count):
print("Gathering data for VSC {} of {}".format(x, vss_count))
vss_store = vss_volume.get_store(x)
image = vss.VShadowImgInfo(vss_store)
vss_data.append(pytskutil.openVSSFS(image, x))
write_csv(vss_data, output)
def write_csv(data, output):
if data == []:
print("[-] No output results to write")
sys.exit(3)
print("[+] Writing output to {}".format(output))
if os.path.exists(output):
append = True
with open(output, "ab") as csvfile:
csv_writer = csv.writer(csvfile)
headers = ["VSS", "File", "File Ext", "File Type", "Create Date",
"Modify Date", "Change Date", "Size", "File Path"]
if not append:
csv_writer.writerow(headers)
for result_list in data:
csv_writer.writerows(result_list)
if __name__ == "__main__":
parser = argparse.ArgumentParser(
description=__description__,
epilog="Developed by {} on {}".format(
", ".join(__authors__), __date__)
)
parser.add_argument("EVIDENCE_FILE", help="Evidence file path")
parser.add_argument("OUTPUT_CSV",
help="Output CSV with VSS file listing")
args = parser.parse_args()
directory = os.path.dirname(args.OUTPUT_CSV)
if not os.path.exists(directory) and directory != "":
os.makedirs(directory)
if os.path.exists(args.EVIDENCE_FILE) and \
os.path.isfile(args.EVIDENCE_FILE):
main(args.EVIDENCE_FILE, args.OUTPUT_CSV)
else:
print("[-] Supplied input file {} does not exist or is not a "
"file".format(args.EVIDENCE_FILE))
sys.exit(1)