template IDA plugin & development scripts

This commit is contained in:
gaasedelen
2017-02-10 19:19:12 -08:00
parent d1993328d2
commit 57f842fa3f
7 changed files with 208 additions and 0 deletions
+10
View File
@@ -0,0 +1,10 @@
REM - Clean up an existing or past 'test session'
taskkill /F /IM "idaq.exe"
taskkill /F /IM "idaq64.exe"
timeout 1
del "..\..\testcase\*.id0"
del "..\..\testcase\*.id1"
del "..\..\testcase\*.id2"
del "..\..\testcase\*.nam"
del "..\..\testcase\*.til"
del "..\..\testcase\*.$$$"
+16
View File
@@ -0,0 +1,16 @@
REM - Close any running instances of IDA
call close_IDA.bat
REM - Purge old lighthouse log files
del /F /Q "C:\Users\user\AppData\Roaming\Hex-Rays\IDA Pro\lighthouse_logs\*"
REM - Delete the old plugin bits
del /F /Q "C:\tools\disassemblers\IDA 6.8\plugins\lighthouse_plugin.py"
rmdir "C:\tools\disassemblers\IDA 6.8\plugins\lighthouse" /s /q
REM - Copy over the new plugin bits
xcopy /s/y "..\plugin\*" "C:\tools\disassemblers\IDA 6.8\plugins\"
REM - Relaunch two IDA sessions
start "" "C:\tools\disassemblers\IDA 6.8\idaq64.exe" "..\..\testcase\boombox.i64"
+3
View File
@@ -0,0 +1,3 @@
start "" "C:\tools\disassemblers\IDA 6.8\idaq.exe"
View File
+1
View File
@@ -0,0 +1 @@
from log import *
+129
View File
@@ -0,0 +1,129 @@
import os
import sys
import logging
import idaapi
#------------------------------------------------------------------------------
# Log / Print helpers
#------------------------------------------------------------------------------
def lmsg(message):
"""
Print a message to the IDA output window, prefixed with [Lighthouse]
"""
# prefix the message
prefix_message = "[Lighthouse] %s" % message
# only print to IDA if the output window is alive
if idaapi.is_msg_inited():
print prefix_message
else:
logger.info(message)
def get_log_dir():
"""
Return the Lighthouse log directory.
"""
return os.path.join(idaapi.get_user_idadir(), "lighthouse_logs")
#------------------------------------------------------------------------------
# Logger Proxy
#------------------------------------------------------------------------------
class LoggerProxy(object):
"""
Fake file-like stream object that redirects writes to a logger instance.
"""
def __init__(self, logger, stream, log_level=logging.INFO):
self._logger = logger
self._log_level = log_level
self._stream = stream
def write(self, buf):
for line in buf.rstrip().splitlines():
self._logger.log(self._log_level, line.rstrip())
self._stream.write(buf)
def flush(self):
pass
def isatty(self):
pass
#------------------------------------------------------------------------------
# Initialize Logging
#------------------------------------------------------------------------------
MAX_LOGS = 15
def cleanup_log_directory(log_directory):
"""
Retain only the last 15 logs.
"""
filetimes = {}
# build a map of all the files in the directory, and their last modified time
for log_name in os.listdir(log_directory):
filepath = os.path.join(log_directory, log_name)
if os.path.isfile(filepath):
filetimes[os.path.getmtime(filepath)] = filepath
# get the filetimes and check if there's enough enough to warrant cleanup
times = filetimes.keys()
if len(times) < MAX_LOGS:
return
logger.debug("Cleaning logs directory")
# discard the newest 15 logs
times.sort(reverse=True)
times = times[MAX_LOGS:]
# loop through the remaining older logs, and delete them
for log_time in times:
try:
os.remove(filetimes[log_time])
except Exception as e:
logger.error("Failed to delete log %s" % filetimes[log_time])
logger.error(e)
def start_logging():
global logger
# create a directory for lighthouse logs if it does not exist
log_dir = get_log_dir()
if not os.path.exists(log_dir):
os.makedirs(log_dir)
# construct the full log path
log_path = os.path.join(log_dir, "lighthouse.%s.log" % os.getpid())
# config the logger
logging.basicConfig(
filename=log_path,
format='%(asctime)s | %(name)20s | %(levelname)7s: %(message)s',
datefmt='%m-%d-%Y %H:%M:%S',
level=logging.DEBUG
)
# create the Lighthouse logger
logger = logging.getLogger("Lighthouse")
# proxy STDOUT/STDERR to the log files too
stdout_logger = logging.getLogger('Lighthouse.STDOUT')
stderr_logger = logging.getLogger('Lighthouse.STDERR')
sys.stdout = LoggerProxy(stdout_logger, sys.stdout, logging.INFO)
sys.stderr = LoggerProxy(stderr_logger, sys.stderr, logging.ERROR)
# limit the number of logs we keep
cleanup_log_directory(log_dir)
return logger
#------------------------------------------------------------------------------
# Log Helpers
#------------------------------------------------------------------------------
def log_config_warning(self, logger, section, field):
logger.warning("Config missing field '%s' in section '%s", field, section)
+49
View File
@@ -0,0 +1,49 @@
from idaapi import plugin_t
from lighthouse.util import start_logging, lmsg
logger = start_logging()
#------------------------------------------------------------------------------
# IDA Plugin
#------------------------------------------------------------------------------
def PLUGIN_ENTRY():
"""
Required plugin entry point for IDAPython Plugins.
"""
return lighthouse_t()
class lighthouse_t(plugin_t):
"""
The IDA Plugin for Lighthouse.
"""
flags = idaapi.PLUGIN_FIX | idaapi.PLUGIN_MOD
comment = "Code Coverage Visualization"
help = ""
wanted_name = "Lighthouse"
wanted_hotkey = ""
def init(self):
"""
This is called by IDA when it is loading the plugin.
"""
lmsg("Hello World")
logger.info("Successfully initialized")
return idaapi.PLUGIN_KEEP
def run(self, arg):
"""
This is called by IDA when this file is loaded as a script.
Lighthouse should never be run as a script.
"""
msg("Lighthouse cannot be loaded as a script")
def term(self):
"""
This is called by IDA when it is unloading the plugin.
"""
logger.info("-"*70)
logger.info("Lighthouse has terminated")