MOD: more cleanup of print statements, better exception handling, changed switch from "-dd" to "-ad", changed switch from "-pull" to "-android_pull"
This commit is contained in:
parent
2c3ef39e24
commit
98720f214d
67
dbfp.py
67
dbfp.py
|
@ -26,14 +26,12 @@ def compareFingerprint(file_in, file_json):
|
|||
print "Percent match: {}".format(str(percent))
|
||||
|
||||
#
|
||||
def createFingerprint(file_in, verbose, app_name, app_ver, notes):
|
||||
def createFingerprint(file_in, app_name, app_ver, notes):
|
||||
print "Reading database file: {}".format(file_in)
|
||||
db = FingerprintDB()
|
||||
retVal = db.scanDBFile(file_in)
|
||||
|
||||
if (retVal > 0):
|
||||
if verbose:
|
||||
db.debugFingerprint()
|
||||
if app_name:
|
||||
db.setAppName(app_name)
|
||||
if app_ver:
|
||||
|
@ -74,6 +72,8 @@ def compareFingerprintDir(file_in, fp_dir):
|
|||
|
||||
#
|
||||
def androidPull():
|
||||
fin_count = 0
|
||||
print "Android pull started..."
|
||||
ap = android.AndroidAppPull()
|
||||
isRoot = ap.isADBRoot();
|
||||
if (not isRoot):
|
||||
|
@ -89,15 +89,21 @@ def androidPull():
|
|||
|
||||
dir_names = ap.getAppsDir()
|
||||
for dir_name in dir_names:
|
||||
print "Processing directory [{}]".format(dir_name)
|
||||
ap.pullApp(BASE_DIR, dir_name)
|
||||
fq_dir = BASE_DIR + os.path.sep + dir_name
|
||||
__createFingerprint(fq_dir, FP_BASE_DIR, dir_name)
|
||||
count = __createFingerprint(fq_dir, FP_BASE_DIR, dir_name)
|
||||
fin_count += count
|
||||
print "Fingerprints created: {}".format(str(count))
|
||||
|
||||
print "\nTotal Fingerprints created: {}\n".format(str(fin_count))
|
||||
|
||||
#
|
||||
def androidData(data_dir):
|
||||
dir_names = []
|
||||
try:
|
||||
dirs = os.listdir(data_dir)
|
||||
print "Opening directory: [{}], [{} folders found]".format(data_dir, len(dirs))
|
||||
except Exception, ex:
|
||||
print "ERROR opening Android Data Directory (-dd): {}\n{}".format(data_dir, ex)
|
||||
return
|
||||
|
@ -107,29 +113,40 @@ def androidData(data_dir):
|
|||
print "ERROR creating directory: {}".format(FP_BASE_DIR)
|
||||
return
|
||||
|
||||
fin_count = 0
|
||||
for ddir in dirs:
|
||||
in_dir = data_dir + os.path.sep + ddir
|
||||
__createFingerprint(in_dir, out_dir, ddir)
|
||||
count = __createFingerprint(in_dir, out_dir, ddir)
|
||||
fin_count += count
|
||||
|
||||
print "COMPLETED: created {} fingerprints\n".format(str(fin_count))
|
||||
|
||||
# in_dir: fully qualified directory path to find sqlite files
|
||||
def __createFingerprint(in_dir, out_dir, dir_name):
|
||||
fin_count = 0
|
||||
try:
|
||||
db_dir = in_dir + os.path.sep + "databases"
|
||||
logging.info("in_dir=={}".format(db_dir))
|
||||
#logging.info("in_dir=={}".format(db_dir))
|
||||
files = os.listdir(db_dir)
|
||||
except:
|
||||
# not finding a databases folder is normal, not all apps use sqlite
|
||||
return
|
||||
return fin_count
|
||||
for filein in files:
|
||||
db = FingerprintDB()
|
||||
ddir = db_dir + os.path.sep + filein
|
||||
logging.info('Parsing file "{}"'.format(ddir))
|
||||
retVal = db.scanDBFile(ddir)
|
||||
if (retVal > 0):
|
||||
fname = dir_name + "__" + filein + "__dbfp" + ".json"
|
||||
fq_name = out_dir + os.path.sep + fname
|
||||
db.setAppName(dir_name)
|
||||
db.writeFingerprintFile(fq_name)
|
||||
try:
|
||||
db = FingerprintDB()
|
||||
ddir = db_dir + os.path.sep + filein
|
||||
logging.info('Parsing file "{}"'.format(ddir))
|
||||
retVal = db.scanDBFile(ddir)
|
||||
if (retVal > 0):
|
||||
fname = dir_name + "__" + filein + "__dbfp" + ".json"
|
||||
fq_name = out_dir + os.path.sep + fname
|
||||
db.setAppName(dir_name)
|
||||
db.writeFingerprintFile(fq_name)
|
||||
fin_count += 1
|
||||
except Exception as ex:
|
||||
# log error, but move on in hopes of writing more fingerprints
|
||||
logging.error(ex)
|
||||
return fin_count
|
||||
|
||||
#
|
||||
def __getFileName():
|
||||
|
@ -155,13 +172,15 @@ def parseArgs():
|
|||
parser.add_argument('-db', '--database', required=False, help="path to file to be fingerprinted")
|
||||
parser.add_argument('-fd', '--fpdir', required=False, help="path to directory of fingerprint files")
|
||||
parser.add_argument('-fp', '--fingerprint', required=False, help="fingerprint file to use in comparison")
|
||||
parser.add_argument('-dd', '--data_dir', required=False, help="path to a directory with sqlite files")
|
||||
parser.add_argument('-ad', '--android_dir', required=False, help="path to a directory with android folder structure sqlite files")
|
||||
parser.add_argument('-dd', '--data_dir', required=False, help="path to a directory to search for sqlite files")
|
||||
parser.add_argument('-idx', '--index_fingerprints', required=False, help="path to a directory with sqlite files")
|
||||
parser.add_argument('-an', '--app_name', required=False)
|
||||
parser.add_argument('-av', '--app_version', required=False)
|
||||
parser.add_argument('-n', '--notes', required=False)
|
||||
parser.add_argument('-pull', required=False, action='store_true', help="automated pull of applications from a physical android phone")
|
||||
parser.add_argument('-v', '--verbose', action='store_true', help="will set logging level to DEBUG")
|
||||
parser.add_argument('-android_pull', required=False, action='store_true', help="automated pull of applications from a physical android phone")
|
||||
parser.add_argument('-v', '--verbose', action='store_true', help="will set logging level to INFO")
|
||||
parser.add_argument('-vv', '--vverbose', action='store_true', help="will set logging level to DEBUG")
|
||||
parser.add_argument('-l', '--logging', action='store_true', help="will supercede the -v option and send all logging to a file, logging.DEBUG")
|
||||
# parser.add_argument('-t', '--title', required=False)
|
||||
args = parser.parse_args()
|
||||
|
@ -170,6 +189,8 @@ def parseArgs():
|
|||
logging.basicConfig(filename='dbfp.log', level=logging.DEBUG)
|
||||
|
||||
if (args.verbose):
|
||||
logging.basicConfig(level=logging.INFO)
|
||||
elif (args.vverbose):
|
||||
logging.basicConfig(level=logging.DEBUG)
|
||||
else:
|
||||
logging.basicConfig(level=logging.CRITICAL)
|
||||
|
@ -178,14 +199,14 @@ def parseArgs():
|
|||
compareFingerprint(args.database, args.fingerprint)
|
||||
elif (args.database and args.fpdir):
|
||||
compareFingerprintDir(args.database, args.fpdir)
|
||||
elif (args.data_dir):
|
||||
androidData(args.data_dir)
|
||||
elif (args.android_dir):
|
||||
androidData(args.android_dir)
|
||||
elif (args.index_fingerprints):
|
||||
indexFingerprints(args.index_fingerprints)
|
||||
elif (args.pull):
|
||||
elif (args.android_pull):
|
||||
androidPull()
|
||||
elif (args.database):
|
||||
createFingerprint(args.database, args.verbose, args.app_name, args.app_version, args.notes)
|
||||
createFingerprint(args.database, args.app_name, args.app_version, args.notes)
|
||||
else:
|
||||
parser.print_help()
|
||||
|
||||
|
|
|
@ -61,7 +61,7 @@ class FingerprintDB:
|
|||
try:
|
||||
(self.conn, self.cur) = self.__openDB(filein)
|
||||
except Exception, ex:
|
||||
logging.error(ex)
|
||||
logging.info(ex)
|
||||
return -2
|
||||
|
||||
try:
|
||||
|
@ -193,7 +193,7 @@ class FingerprintDB:
|
|||
all_total = 0
|
||||
for tableName in fp.tables.keys():
|
||||
try:
|
||||
print "[[ Comparing Table: " + tableName + " ]]"
|
||||
logging.info("[[ Comparing Table: " + tableName + " ]]")
|
||||
table = self.tables[tableName]
|
||||
if (table):
|
||||
if not (fp.tables[tableName].hash() == table.hash()):
|
||||
|
@ -212,7 +212,7 @@ class FingerprintDB:
|
|||
diff_total += 10
|
||||
logging.info("Table {} not EXISTS (to json fingerprint)".format(tableName))
|
||||
|
||||
print "diff_total=={}, all_total=={}".format(diff_total, all_total)
|
||||
logging.info("diff_total=={}, all_total=={}".format(diff_total, all_total))
|
||||
|
||||
if (diff_total > 0):
|
||||
if (diff_total == all_total):
|
||||
|
@ -395,7 +395,7 @@ class TableSchema:
|
|||
m.update(self.sqlStr)
|
||||
self.sqlStrHash = m.hexdigest()
|
||||
except:
|
||||
print 'WARN: problem hashing sql string: "{}"'.format(self.sqlStr)
|
||||
logging.warn('WARN: problem hashing sql string: "{}"'.format(self.sqlStr))
|
||||
# parse the create string into a structured hash table
|
||||
results = re.search(self.tableschemaregex, sqlStr)
|
||||
if results:
|
||||
|
@ -538,10 +538,10 @@ class TableSchema:
|
|||
return newField
|
||||
return False
|
||||
|
||||
print 'WARN: field definition not recognized: "{}"'.format(sqltext)
|
||||
logging.warn('WARN: field definition not recognized: "{}"'.format(sqltext))
|
||||
except Exception, e:
|
||||
print 'WARN: problem parsing sql create text: "{}"'.format(sqltext)
|
||||
print 'Exception: \n{}'.format(e)
|
||||
logging.warn('WARN: problem parsing sql create text: "{}"'.format(sqltext))
|
||||
logging.warn('Exception: \n{}'.format(e))
|
||||
return None
|
||||
|
||||
return None
|
||||
|
|
|
@ -133,7 +133,7 @@ class FingerprintIndex:
|
|||
if not re.search(r'.*\.json', file):
|
||||
naCount = naCount+1
|
||||
pass
|
||||
print file
|
||||
#print file
|
||||
fq_file = fp_dir + os.path.sep + file
|
||||
db.importJson(fq_file)
|
||||
self.__insertMod_md5_all(db.db_hash, db.table_hashes.values(), file)
|
||||
|
|
Loading…
Reference in New Issue