diff --git a/scripts/artifacts/waze.py b/scripts/artifacts/waze.py index 4193231a..0ddd6839 100644 --- a/scripts/artifacts/waze.py +++ b/scripts/artifacts/waze.py @@ -1,31 +1,129 @@ +#'*/mobile/Containers/Data/Application/*/.com.apple.mobile_container_manager.metadata.plist' unused __artifacts_v2__ = { - "waze": { - "name": "Waze", - "description": "Get account, session, searched locations, recent locations, favorite locations, " - "share locations, text-to-speech navigation and track GPS quality.", + "get_waze_account": { + "name": "Waze - Account", + "description": "Get Waze account information.", "author": "Django Faiola (djangofaiola.blogspot.com @DjangoFaiola)", - "version": "0.1.2", - "date": "2024-02-02", + "creation_date": "2024-02-02", + "last_update_date": "2025-11-20", "requirements": "none", "category": "Waze", "notes": "", - "paths": ('*/mobile/Containers/Data/Application/*/Documents/user.db*', - '*/mobile/Containers/Data/Application/*/.com.apple.mobile_container_manager.metadata.plist'), - "function": "get_waze" + "paths": ('*/mobile/Containers/Data/Application/*/Documents/user',), + "output_types": "standard", + "artifact_icon": "user" + }, + "get_waze_session": { + "name": "Waze - Session", + "description": "Get Waze session information.", + "author": "Django Faiola (djangofaiola.blogspot.com @DjangoFaiola)", + "creation_date": "2024-02-02", + "last_update_date": "2025-11-20", + "requirements": "none", + "category": "Waze", + "notes": "", + "paths": ('*/mobile/Containers/Data/Application/*/Documents/session',), + "output_types": "standard", + "artifact_icon": "globe" + }, + "get_waze_tts": { + "name": "Waze - TTS Navigation", + "description": "Get Waze text-to-speech navigation entries.", + "author": "Django Faiola (djangofaiola.blogspot.com @DjangoFaiola)", + "creation_date": "2024-02-02", + "last_update_date": "2025-11-20", + "requirements": "none", + "category": "Waze", + "notes": "", + "paths": ('*/mobile/Containers/Data/Application/*/Library/Caches/tts/tts.db*',), + "output_types": "standard", + "artifact_icon": "volume-2" + }, + "get_waze_gps_quality": { + "name": "Waze - GPS Quality", + "description": "Get Waze GPS quality track logs.", + "author": "Django Faiola (djangofaiola.blogspot.com @DjangoFaiola)", + "creation_date": "2024-02-02", + "last_update_date": "2025-11-20", + "requirements": "none", + "category": "Waze", + "notes": "", + "paths": ( + '*/mobile/Containers/Data/Application/*/Documents/spdlog*.logdata', + '**/Documents/spdlog*.logdata', + ), + "output_types": "standard", + "artifact_icon": "map" + }, + "get_waze_searched_locations": { + "name": "Waze - Searched Locations", + "description": "Get Waze searched locations from user.db.", + "author": "Django Faiola (djangofaiola.blogspot.com @DjangoFaiola)", + "creation_date": "2024-02-02", + "last_update_date": "2025-11-20", + "requirements": "none", + "category": "Waze", + "notes": "", + "paths": ('*/mobile/Containers/Data/Application/*/Documents/user.db*',), + "output_types": "standard", + "artifact_icon": "search" + }, + "get_waze_recent_locations": { + "name": "Waze - Recent Locations", + "description": "Get Waze recent locations from user.db.", + "author": "Django Faiola (djangofaiola.blogspot.com @DjangoFaiola)", + "creation_date": "2024-02-02", + "last_update_date": "2025-11-20", + "requirements": "none", + "category": "Waze", + "notes": "", + "paths": ('*/mobile/Containers/Data/Application/*/Documents/user.db*',), + "output_types": "standard", + "artifact_icon": "clock" + }, + "get_waze_favorite_locations": { + "name": "Waze - Favorite Locations", + "description": "Get Waze favorite locations from user.db.", + "author": "Django Faiola (djangofaiola.blogspot.com @DjangoFaiola)", + "creation_date": "2024-02-02", + "last_update_date": "2025-11-20", + "requirements": "none", + "category": "Waze", + "notes": "", + "paths": ('*/mobile/Containers/Data/Application/*/Documents/user.db*',), + "output_types": "standard", + "artifact_icon": "star" + }, + "get_waze_shared_locations": { + "name": "Waze - Shared Locations", + "description": "Get Waze shared locations from user.db.", + "author": "Django Faiola (djangofaiola.blogspot.com @DjangoFaiola)", + "creation_date": "2024-02-02", + "last_update_date": "2025-11-20", + "requirements": "none", + "category": "Waze", + "notes": "", + "paths": ('*/mobile/Containers/Data/Application/*/Documents/user.db*',), + "output_types": "standard", + "artifact_icon": "search-2" } } import os import re -import plistlib import pathlib -import shutil import sqlite3 -import textwrap -import datetime -from scripts.artifact_report import ArtifactHtmlReport -from scripts.ilapfuncs import logfunc, tsv, timeline, kmlgen, open_sqlite_db_readonly, convert_ts_int_to_utc, convert_utc_human_to_timezone + +from scripts.ilapfuncs import ( + logfunc, + open_sqlite_db_readonly, + convert_unix_ts_to_utc, + artifact_processor, + get_file_path, + get_sqlite_db_records + ) + # format location def FormatLocation(location, value, tableName, key): @@ -44,570 +142,477 @@ def FormatLocation(location, value, tableName, key): return location + newLocation -def FormatTimestamp(utc, timezone_offset): - if not bool(utc) or (utc == None): - return '' - else: - timestamp = convert_ts_int_to_utc(int(float(utc))) - return convert_utc_human_to_timezone(timestamp, timezone_offset) - - -# account -def get_account(file_found, report_folder, timezone_offset): +@artifact_processor +def get_waze_account(context): + files_found = context.get_files_found() data_list = [] - - f = open(file_found, "r", encoding="utf-8") + source_path = get_file_path(files_found, 'user') + if not source_path: + logfunc('No waze "user" file found') + return (), [], '' + data_headers = ( + 'First name', + 'Last name', + 'User name', + 'Nickname', + ('First launched', 'datetime') + ) + row = [None] * 5 + patternFirstName = 'Realtime.FirstName:' + patternLastName = 'Realtime.LastName:' + patternUserName = 'Realtime.Name:' + patternNickname = 'Realtime.Nickname:' + patternFirstLaunched = 'General.Last upgrade time:' + sep = ': ' try: - row = [ None ] * 5 - patternFirstName = 'Realtime.FirstName:' - patternLastName = 'Realtime.LastName:' - patternUserName = 'Realtime.Name:' - patternNickname = 'Realtime.Nickname:' - patternFirstLaunched = 'General.Last upgrade time:' - sep = ': ' - - data = f.readlines() - for line in data: - root = line.split('.', 1)[0] - if not root in ( 'Realtime', 'General' ): - continue - - # first name - if line.startswith(patternFirstName): - row[0] = line.split(sep, 1)[1] - # last name - elif line.startswith(patternLastName): - row[1] = line.split(sep, 1)[1] - # user name - elif line.startswith(patternUserName): - row[2] = line.split(sep, 1)[1] - # nickname - elif line.startswith(patternNickname): - row[3] = line.split(sep, 1)[1] - # first launched - elif line.startswith(patternFirstLaunched): - timestamp = line.split(sep, 1)[1] - row[4] = FormatTimestamp(timestamp, timezone_offset) - + with open(source_path, "r", encoding="utf-8") as f: + for line in f: + root = line.split('.', 1)[0] + if not root in ('Realtime', 'General'): + continue + # first name + if line.startswith(patternFirstName): + row[0] = line.split(sep, 1)[1] + # last name + elif line.startswith(patternLastName): + row[1] = line.split(sep, 1)[1] + # user name + elif line.startswith(patternUserName): + row[2] = line.split(sep, 1)[1] + # nickname + elif line.startswith(patternNickname): + row[3] = line.split(sep, 1)[1] + # first launched + elif line.startswith(patternFirstLaunched): + timestamp = line.split(sep, 1)[1] + row[4] = convert_unix_ts_to_utc(int(float(timestamp))) # row if row.count(None) != len(row): data_list.append((row[0], row[1], row[2], row[3], row[4])) - - finally: - f.close() - - if len(data_list) > 0: - report = ArtifactHtmlReport('Waze Account') - report.start_artifact_report(report_folder, 'Waze Account') - report.add_script() - data_headers = ('First name', 'Last name', 'User name', 'Nickname', 'First launched') - - report.write_artifact_data_table(data_headers, data_list, file_found) - report.end_artifact_report() - - tsvname = f'Waze Account' - tsv(report_folder, data_headers, data_list, tsvname) - - tlactivity = f'Waze Account' - timeline(report_folder, tlactivity, data_list, data_headers) - else: + except Exception as e: + logfunc(f'Error reading Waze "user" file: {e}') + if not data_list: logfunc('No Waze Account data available') + return data_headers, data_list, source_path -# session -def get_session(file_found, report_folder, timezone_offset): +@artifact_processor +def get_waze_session(context): + files_found = context.get_files_found() data_list = [] - - f = open(file_found, "r", encoding="utf-8") + source_path = get_file_path(files_found, 'session') + if not source_path: + logfunc('No Waze "session" file found.') + return (), [], '' + data_headers = ( + ('Last synced', 'datetime'), + 'Last position', + 'Last navigation coordinates', + 'Last navigation destination', + 'State', + 'City', + 'Street', + 'House' + ) + row = [None] * 8 + patternLastSynced = 'Config.Last synced:' + patternGPSPosition = 'GPS.Position:' + patternLastPosition = 'Navigation.Last position:' + patternLastDestName = 'Navigation.Last dest name:' + patternLastDestState = 'Navigation.Last dest state:' + patternLastDestCity = 'Navigation.Last dest city:' + patternLastDestStreet = 'Navigation.Last dest street:' + patternLastDestHouse = 'Navigation.Last dest number:' + sep = ': ' try: - row = [ None ] * 8 - patternLastSynced = 'Config.Last synced:' - patternGPSPosition = 'GPS.Position:' - patternLastPosition = 'Navigation.Last position:' - patternLastDestName = 'Navigation.Last dest name:' - patternLastDestState = 'Navigation.Last dest state:' - patternLastDestCity = 'Navigation.Last dest city:' - patternLastDestStreet = 'Navigation.Last dest street:' - patternLastDestHouse = 'Navigation.Last dest number:' - sep = ': ' - - data = f.readlines() - for line in data: - root = line.split('.', 1)[0] - if not root in ( 'Config', 'GPS', 'Navigation' ): - continue - - # Last synced (ms) - if line.startswith(patternLastSynced): - timestamp = int(float(line.split(sep, 1)[1]) / 1000) - row[0] = FormatTimestamp(timestamp, timezone_offset) - # last position - elif line.startswith(patternGPSPosition): - coordinates = line.split(sep, 1)[1].split(',') # lon,lat - row[1] = f'{float(coordinates[1]) / 1000000},{float(coordinates[0]) / 1000000}' - # last navigation coordinates - elif line.startswith(patternLastPosition): - coordinates = line.split(sep, 1)[1].split(',') # lon,lat - row[2] = f'{float(coordinates[1]) / 1000000},{float(coordinates[0]) / 1000000}' - # last navigation destination - elif line.startswith(patternLastDestName): - row[3] = line.split(sep, 1)[1] - # state - elif line.startswith(patternLastDestState): - row[4] = line.split(sep, 1)[1] - # city - elif line.startswith(patternLastDestCity): - row[5] = line.split(sep, 1)[1] - # street - elif line.startswith(patternLastDestStreet): - row[6] = line.split(sep, 1)[1] - # house - elif line.startswith(patternLastDestHouse): - row[7] = line.split(sep, 1)[1] - - # row - if row.count(None) != len(row): - data_list.append((row[0], row[1], row[2], row[3], row[4], row[5], row[6], row[7])) - - finally: - f.close() - - if len(data_list) > 0: - report = ArtifactHtmlReport('Waze Session info') - report.start_artifact_report(report_folder, 'Waze Session info') - report.add_script() - data_headers = ('Last synced', 'Last position', 'Last navigation coordinates', 'Last navigation destination', 'State', 'City', 'Street', 'House') - - report.write_artifact_data_table(data_headers, data_list, file_found) - report.end_artifact_report() - - tsvname = f'Waze Session info' - tsv(report_folder, data_headers, data_list, tsvname) - - tlactivity = f'Waze Session info' - timeline(report_folder, tlactivity, data_list, data_headers) + with open(source_path, "r", encoding="utf-8") as f: + for line in f: + root = line.split('.', 1)[0] + if not root in ('Config', 'GPS', 'Navigation'): + continue + # Last synced (ms) + if line.startswith(patternLastSynced): + timestamp = int(float(line.split(sep, 1)[1])) + row[0] = convert_unix_ts_to_utc(timestamp) + # last position + elif line.startswith(patternGPSPosition): + coordinates = line.split(sep, 1)[1].split(',') # lon,lat + row[1] = f'{float(coordinates[1]) / 1000000},{float(coordinates[0]) / 1000000}' + # last navigation coordinates + elif line.startswith(patternLastPosition): + coordinates = line.split(sep, 1)[1].split(',') # lon,lat + row[2] = f'{float(coordinates[1]) / 1000000},{float(coordinates[0]) / 1000000}' + # last navigation destination + elif line.startswith(patternLastDestName): + row[3] = line.split(sep, 1)[1] + # state + elif line.startswith(patternLastDestState): + row[4] = line.split(sep, 1)[1] + # city + elif line.startswith(patternLastDestCity): + row[5] = line.split(sep, 1)[1] + # street + elif line.startswith(patternLastDestStreet): + row[6] = line.split(sep, 1)[1] + # house + elif line.startswith(patternLastDestHouse): + row[7] = line.split(sep, 1)[1] + # row + if row.count(None) != len(row): + data_list.append((row[0], row[1], row[2], row[3], row[4], row[5], row[6], row[7])) + row = [None] * 8 + except Exception as e: + logfunc(f'Error reading Waze "session" file: {e}') + if not data_list: + logfunc('No Wazew Session info data available') + return data_headers, data_list, source_path + + +@artifact_processor +def get_waze_tts(context): + files_found = context.get_files_found() + data_list = [] + source_path = get_file_path(files_found, 'tts.db') + if not source_path: + logfunc('Waze tts.db not found') + return (), [], '' + data_headers = ( + ('Timestamp', 'datetime'), + 'Text', + 'Location' + ) + try: + db = open_sqlite_db_readonly(source_path) + # list tables + cursor = db.execute(f"SELECT name FROM sqlite_master WHERE type='table'") + all_tables = cursor.fetchall() + if len(all_tables) == 0: + logfunc('No Waze Text-To-Speech navigation data available') + return data_headers, [], source_path + for table in all_tables: + table_name = table[0] + cursor = db.cursor() + query = f''' + SELECT + rowid, + update_time, + text + FROM "{table_name}" + ''' + cursor.execute(query) + all_rows = cursor.fetchall() + for row in all_rows: + location = FormatLocation('', str(row[0]), table_name, 'rowid') + timestamp = convert_unix_ts_to_utc(int(float(row[1]))) + data_list.append((timestamp, row[2], location)) + db.close() + except sqlite3.Error as e: + logfunc(f"Error in Waze TTS database: {e}") + if not data_list: + logfunc('No Waze text-to-speech navigation data available') + return data_headers, data_list, source_path + + +@artifact_processor +def get_waze_gps_quality(context): + files_found = context.get_files_found() + data_list = [] + source_files = [] + + data_headers = ( + ('Timestamp', 'datetime'), + 'Latitude', + 'Longitude', + 'Sample count (bad)', + 'Average accuracy (min-max)', + 'Provider', + 'Location' + ) + + if not files_found: + logfunc('No Waze GPS logs found with the provided paths.') + return (), [], '' + + for file_found in files_found: + file_found = str(file_found) + file_name = os.path.basename(file_found) + + if not (file_name.startswith('spdlog') and file_name.endswith('.logdata')): + continue + + try: + with open(file_found, "r", encoding="utf-8", errors="ignore") as f: + row = [None] * 6 + hit_count = 0 + line_count = 0 + + line_filter = re.compile(r'STAT\(buffer#[\d]{1,2}\)\sGPS_QUALITY\s') + values_filter = re.compile(r'(?<=\{)(.*?)(?=\})') + + for line in f: + line_count += 1 + + if not line_filter.search(line): + continue + + hit_count += 1 + location = FormatLocation('', str(line_count), file_name, 'row') + values_iter = values_filter.finditer(line) + + row = [None] * 6 + + for kv in values_iter: + kv_split = kv.group().split('=', 1) + if len(kv_split) < 2: continue + + key = kv_split[0] + val = kv_split[1] + + if key == 'TIMESTAMP': + try: + row[0] = convert_unix_ts_to_utc(int(float(val))) + except ValueError: + row[0] = val + elif key == 'LAT': + try: row[1] = float(val) / 1000000 + except: row[1] = val + elif key == 'LON': + try: row[2] = float(val) / 1000000 + except: row[2] = val + elif key == 'SAMPLE_COUNT': + row[3] = val + elif key == 'BAD_SAMPLE_COUNT': + if row[3]: row[3] += f' ({val})' + else: row[3] = f'({val})' + elif key == 'ACC_AVG': + row[4] = val + elif key == 'ACC_MIN': + if row[4]: row[4] += f' ({val}-' + else: row[4] = f'({val}-' + elif key == 'ACC_MAX': + if row[4]: row[4] += f'{val})' + else: row[4] = f'(??-{val})' + elif key == 'PROVIDER': + row[5] = val + + if row[0] is not None: + data_list.append((row[0], row[1], row[2], row[3], row[4], row[5], location)) + + if hit_count > 0: + source_files.append(file_found) + + except Exception as e: + logfunc(f'Error reading Waze GPS log {file_name}: {e}') + + if not data_list: + logfunc('No Waze Track GPS quality data available in the processed files.') + return data_headers, [], '' + + return data_headers, data_list, ', ' + + +@artifact_processor +def get_waze_searched_locations(context): + files_found = context.get_files_found() + source_path = get_file_path(files_found, 'user.db') + if not source_path: + logfunc('Waze user.db not found') + return (), [], '' + query = ''' + SELECT + P.id, + P.created_time, + P.name, + P.street, + P.house, + P.state, + P.city, + P.country, + CAST((CAST(P.latitude AS REAL) / 1000000) AS TEXT) || "," || CAST((CAST(P.longitude AS REAL) / 1000000) AS TEXT) AS "coordinates" + FROM PLACES AS "P" + ''' + + all_rows = get_sqlite_db_records(source_path, query) + data_list = [] + data_headers = ( + ('Created', 'datetime'), + 'Name', + 'Street', + 'House', + 'State', + 'City', + 'Country', + 'Coordinates', + 'Location' + ) + if all_rows: + for row in all_rows: + # P.id + location = FormatLocation('', str(row[0]), 'PLACES', 'id') + # created + created = convert_unix_ts_to_utc(int(float(row[1]))) + # row + data_list.append(( + created, + row[2], + row[3], + row[4], + row[5], + row[6], + row[7], + row[8], + location + )) else: - logfunc('No Waze Session info data available') + logfunc('No Waze Searched locations data available') + return data_headers, data_list, source_path -# recent locations -def get_recent_locations(file_found, report_folder, database, timezone_offset): - cursor = database.cursor() - cursor.execute(''' - SELECT +@artifact_processor +def get_waze_recent_locations(context): + files_found = context.get_files_found() + data_list = [] + source_path = get_file_path(files_found, 'user.db') + if not source_path: + logfunc('Waze user.db not found') + return (), [], '' + query = ''' + SELECT R.id, P.id, R.access_time, R.name AS "name", CAST((CAST(P.latitude AS REAL) / 1000000) AS TEXT) || "," || CAST((CAST(P.longitude AS REAL) / 1000000) AS TEXT) AS "coordinates", - R.created_time + R.created_time FROM RECENTS AS "R" LEFT JOIN PLACES AS "P" ON (R.place_id = P.id) - ''') - - all_rows = cursor.fetchall() - usageentries = len(all_rows) - if usageentries > 0: - report = ArtifactHtmlReport('Waze Recent locations') - report.start_artifact_report(report_folder, 'Waze Recent locations') - report.add_script() - data_headers = ('Last access', 'Name', 'Coordinates', 'Created', 'Location') - data_list = [] + ''' + all_rows = get_sqlite_db_records(source_path, query) + data_headers = ( + ('Last access', 'datetime'), + 'Name', + 'Coordinates', + ('Created', 'datetime'), + 'Location' + ) + if all_rows: for row in all_rows: # R.id location = FormatLocation('', str(row[0]), 'RECENTS', 'id') - # P.id location = FormatLocation(location, str(row[1]), 'PLACES', 'id') - # last access - lastAccess = FormatTimestamp(row[2], timezone_offset) - + lastAccess = convert_unix_ts_to_utc(row[2]) # created - created = FormatTimestamp(row[5], timezone_offset) - + created = convert_unix_ts_to_utc(row[5]) # row data_list.append((lastAccess, row[3], row[4], created, location)) - - report.write_artifact_data_table(data_headers, data_list, file_found) - report.end_artifact_report() - - tsvname = f'Waze Recent locations' - tsv(report_folder, data_headers, data_list, tsvname) - - tlactivity = f'Waze Recent locations' - timeline(report_folder, tlactivity, data_list, data_headers) else: logfunc('No Waze Recent locations data available') + return data_headers, data_list, source_path -# favorite locations -def get_favorite_locations(file_found, report_folder, database, timezone_offset): - cursor = database.cursor() - cursor.execute(''' - SELECT - F.id, - P.id, +@artifact_processor +def get_waze_favorite_locations(context): + files_found = context.get_files_found() + data_list = [] + source_path = get_file_path(files_found, 'user.db') + if not source_path: + logfunc('Waze user.db not found') + return (), [], '' + query = ''' + SELECT + F.id, + P.id, F.access_time, - F.name AS "name", - CAST((CAST(P.latitude AS REAL) / 1000000) AS TEXT) || "," || CAST((CAST(P.longitude AS REAL) / 1000000) AS TEXT) AS "coordinates", - F.created_time, - F.modified_time + F.name AS "name", + CAST((CAST(P.latitude AS REAL) / 1000000) AS TEXT) || "," || CAST((CAST(P.longitude AS REAL) / 1000000) AS TEXT) AS "coordinates", + F.created_time, + F.modified_time FROM FAVORITES AS "F" LEFT JOIN PLACES AS "P" ON (F.place_id = P.id) - ''') - - all_rows = cursor.fetchall() - usageentries = len(all_rows) - if usageentries > 0: - report = ArtifactHtmlReport('Waze Favorite locations') - report.start_artifact_report(report_folder, 'Waze Favorite locations') - report.add_script() - data_headers = ('Last access', 'Name', 'Coordinates', 'Created', 'Modified', 'Location') - data_list = [] + ''' + all_rows = get_sqlite_db_records(source_path, query) + data_headers = ( + ('Last access', 'datetime'), + 'Name', + 'Coordinates', + ('Created', 'datetime'), + ('Modified', 'datetime'), + 'Location' + ) + if all_rows: for row in all_rows: # F.id location = FormatLocation('', str(row[0]), 'FAVORITES', 'id') - # P.id location = FormatLocation(location, str(row[1]), 'PLACES', 'id') - # last access - lastAccess = FormatTimestamp(row[2], timezone_offset) - - # created - created = FormatTimestamp(row[5], timezone_offset) - + lastAccess = convert_unix_ts_to_utc(row[2]) + created = convert_unix_ts_to_utc(row[5]) # modified - modified = FormatTimestamp(row[6], timezone_offset) - + modified = convert_unix_ts_to_utc(row[6]) # row data_list.append((lastAccess, row[3], row[4], created, modified, location)) - - report.write_artifact_data_table(data_headers, data_list, file_found) - report.end_artifact_report() - - tsvname = f'Waze Favorite locations' - tsv(report_folder, data_headers, data_list, tsvname) - - tlactivity = f'Waze Favorite locations' - timeline(report_folder, tlactivity, data_list, data_headers) else: logfunc('No Waze Favorite locations data available') + return data_headers, data_list, source_path -# shared locations -def get_shared_locations(file_found, report_folder, database, timezone_offset): - cursor = database.cursor() - cursor.execute(''' - SELECT - SP.id, - P.id, +@artifact_processor +def get_waze_shared_locations(context): + files_found = context.get_files_found() + data_list = [] + source_path = get_file_path(files_found, 'user.db') + if not source_path: + logfunc('Waze user.db not found') + return (), [], '' + query = ''' + SELECT + SP.id, + P.id, SP.share_time, - SP.name AS "name", - CAST((CAST(P.latitude AS REAL) / 1000000) AS TEXT) || "," || CAST((CAST(P.longitude AS REAL) / 1000000) AS TEXT) AS "coordinates", - SP.created_time, - SP.modified_time, + SP.name AS "name", + CAST((CAST(P.latitude AS REAL) / 1000000) AS TEXT) || "," || CAST((CAST(P.longitude AS REAL) / 1000000) AS TEXT) AS "coordinates", + SP.created_time, + SP.modified_time, SP.access_time FROM SHARED_PLACES AS "SP" - LEFT JOIN PLACES AS "P" ON (SP.place_id = P.id) - ''') - - all_rows = cursor.fetchall() - usageentries = len(all_rows) - if usageentries > 0: - report = ArtifactHtmlReport('Waze Shared locations') - report.start_artifact_report(report_folder, 'Waze Shared locations') - report.add_script() - data_headers = ('Shared', 'Name', 'Coordinates', 'Created', 'Modified', 'Last access', 'Location') - data_list = [] + LEFT JOIN PLACES AS "P" ON (SP.place_id = P.id) + ''' + all_rows = get_sqlite_db_records(source_path, query) + data_headers = ( + ('Shared', 'datetime'), + 'Name', + 'Coordinates', + ('Created', 'datetime'), + ('Modified', 'datetime'), + ('Last access', 'datetime'), + 'Location' + ) + if all_rows: for row in all_rows: # SP.id location = FormatLocation('', str(row[0]), 'SHARED_PLACES', 'id') - # P.id location = FormatLocation(location, str(row[1]), 'PLACES', 'id') - # shared - shared = FormatTimestamp(row[2], timezone_offset) - - # created - created = FormatTimestamp(row[5], timezone_offset) - + shared = convert_unix_ts_to_utc(row[2]) + created = convert_unix_ts_to_utc(row[5]) # modified - modified = FormatTimestamp(row[6], timezone_offset) - + modified = convert_unix_ts_to_utc(row[6]) # last access - lastAccess = FormatTimestamp(row[7], timezone_offset) - + lastAccess = convert_unix_ts_to_utc(row[7]) # row - data_list.append((shared, row[3], row[4], created, modified, lastAccess, location)) - - report.write_artifact_data_table(data_headers, data_list, file_found) - report.end_artifact_report() - - tsvname = f'Waze Shared locations' - tsv(report_folder, data_headers, data_list, tsvname) - - tlactivity = f'Waze Shared locations' - timeline(report_folder, tlactivity, data_list, data_headers) + data_list.append(( + shared, + row[3], + row[4], + created, + modified, + lastAccess, + location + )) else: logfunc('No Waze Shared locations data available') - - -# searched locations -def get_searched_locations(file_found, report_folder, database, timezone_offset): - cursor = database.cursor() - cursor.execute(''' - SELECT - P.id, - P.created_time, - P.name, - P.street, - P.house, - P.state, - P.city, - P.country, - CAST((CAST(P.latitude AS REAL) / 1000000) AS TEXT) || "," || CAST((CAST(P.longitude AS REAL) / 1000000) AS TEXT) AS "coordinates" - FROM PLACES AS "P" - ''') - - all_rows = cursor.fetchall() - usageentries = len(all_rows) - if usageentries > 0: - report = ArtifactHtmlReport('Waze Searched locations') - report.start_artifact_report(report_folder, 'Waze Searched locations') - report.add_script() - data_headers = ('Created', 'Name', 'Street', 'House', 'State', 'City', 'Country', 'Coordinates', 'Location') - data_list = [] - for row in all_rows: - # P.id - location = FormatLocation('', str(row[0]), 'PLACES', 'id') - - # created - created = FormatTimestamp(row[1], timezone_offset) - - # row - data_list.append((created, row[2], row[3], row[4], row[5], row[6], row[7], row[8], location)) - - report.write_artifact_data_table(data_headers, data_list, file_found) - report.end_artifact_report() - - tsvname = f'Waze Searched locations' - tsv(report_folder, data_headers, data_list, tsvname) - - tlactivity = f'Waze Searched locations' - timeline(report_folder, tlactivity, data_list, data_headers) - else: - logfunc('No Waze Searched locations data available') - - -# text-to-speech navigation -def get_tts(file_found, report_folder, timezone_offset): - db = open_sqlite_db_readonly(file_found) - try: - # list tables - cursor = db.execute(f"SELECT name FROM sqlite_master WHERE type='table'") - all_tables = cursor.fetchall() - if len(all_tables) == 0: - logfunc('No Waze Text-To-Speech navigation data available') - return - - for table in all_tables: - table_name = table[0] - cursor = db.cursor() - cursor.execute(''' - SELECT - rowid, - update_time, - text - FROM {0} - '''.format(table_name)) - - all_rows = cursor.fetchall() - usageentries = len(all_rows) - if usageentries > 0: - report = ArtifactHtmlReport('Waze Text-To-Speech navigation') - report.start_artifact_report(report_folder, 'Waze Text-To-Speech navigation') - report.add_script() - data_headers = ('Timestamp', 'Text', 'Location') - data_list = [] - for row in all_rows: - # rowid - location = FormatLocation('', str(row[0]), table_name, 'rowid') - - # timestamp - timestamp = FormatTimestamp(row[1], timezone_offset) - - # row - data_list.append((timestamp, row[2], location)) - - report.write_artifact_data_table(data_headers, data_list, file_found) - report.end_artifact_report() - - tsvname = f'Waze Text-To-Speech navigation' - tsv(report_folder, data_headers, data_list, tsvname) - - tlactivity = f'Waze Text-To-Speech navigation' - timeline(report_folder, tlactivity, data_list, data_headers) - else: - logfunc('No Waze Text-To-Speech navigation data available') - finally: - db.close() - - -# track gps quality -def get_gps_quality(files_found, report_folder, timezone_offset): - data_list = [] - source_files = [] - - for file_found in files_found: - file_found = str(file_found) - file_name = pathlib.Path(file_found).name - - if not (file_name.startswith('spdlog') and file_name.endswith('.logdata')): - continue - - f = open(file_found, "r", encoding="utf-8") - try: - row = [ None ] * 6 - hit_count = 0 - line_count = 0 - line_filter = re.compile(r'STAT\(buffer#[\d]{1,2}\)\sGPS_QUALITY\s') - values_filter = re.compile(r'(?<=\{)(.*?)(?=\})') - - data = f.readlines() - for line in data: - line_count += 1 - - # gps quality - if not re.search(line_filter, line): - continue - - hit_count += 1 - location = FormatLocation('', str(line_count), file_name, 'row') - - values_iter = re.finditer(values_filter, line) - for kv in values_iter: - kv_split = kv.group().split('=', 1) - - # timestamp - if kv_split[0] == 'TIMESTAMP': - row[0] = FormatTimestamp(kv_split[1], timezone_offset) - - # latitude - elif kv_split[0] == 'LAT': - row[1] = float(kv_split[1]) / 1000000 - - # longitude - elif kv_split[0] == 'LON': - row[2] = float(kv_split[1]) / 1000000 - - # sample count - elif kv_split[0] == 'SAMPLE_COUNT': - row[3] = kv_split[1] - - # bad sample count - elif kv_split[0] == 'BAD_SAMPLE_COUNT': - row[3] += ' (' + kv_split[1] + ')' - - # accuracy "avg (min-max)" - elif kv_split[0] == 'ACC_AVG': - row[4] = kv_split[1] - - # accuracy "avg (min-max)" - elif kv_split[0] == 'ACC_MIN': - row[4] += ' (' + kv_split[1] + '-' - - # accuracy "avg (min-max)" - elif kv_split[0] == 'ACC_MAX': - row[4] += kv_split[1] + ')' - - # provider - elif kv_split[0] == 'PROVIDER': - row[5] = kv_split[1] - - # row - if row.count(None) != len(row): - data_list.append((row[0], row[1], row[2], row[3], row[4], row[5], location)) - - if hit_count > 0: - if file_found.startswith('\\\\?\\'): - source_files.append(file_found[4:]) - else: - source_files.append(file_found) - finally: - f.close() - - if len(data_list) > 0: - report = ArtifactHtmlReport('Waze Track GPS quality') - report.start_artifact_report(report_folder, 'Waze Track GPS quality') - report.add_script() - data_headers = ('Timestamp', 'Latitude', 'Longitude', 'Sample count (bad)', 'Average accuracy (min-max)', 'Provider', 'Location') - - report.write_artifact_data_table(data_headers, data_list, ', '.join(source_files)) - report.end_artifact_report() - - tsvname = f'Waze Track GPS quality' - tsv(report_folder, data_headers, data_list, tsvname) - - tlactivity = f'Waze Track GPS quality' - timeline(report_folder, tlactivity, data_list, data_headers) - - kmlactivity = 'Waze Track GPS quality' - kmlgen(report_folder, kmlactivity, data_list, data_headers) - else: - logfunc('No Waze Track GPS quality data available') - - -# waze -def get_waze(files_found, report_folder, seeker, wrap_text, timezone_offset): - #datos = seeker.search('**/*com.apple.mobile_container_manager.metadata.plist') - for file_foundm in files_found: - if file_foundm.endswith('.com.apple.mobile_container_manager.metadata.plist'): - with open(file_foundm, 'rb') as f: - pl = plistlib.load(f) - if pl['MCMMetadataIdentifier'] == 'com.waze.iphone': - fulldir = (os.path.dirname(file_foundm)) - identifier = (os.path.basename(fulldir)) - - # user - path_list = seeker.search(f'*/{identifier}/Documents/user', True) - if len(path_list) > 0: - get_account(path_list, report_folder, timezone_offset) - - # session - path_list = seeker.search(f'*/{identifier}/Documents/session', True) - if len(path_list) > 0: - get_session(path_list, report_folder, timezone_offset) - - # tts.db - path_list = seeker.search(f'*/{identifier}/Library/Caches/tts/tts.db', True) - if len(path_list) > 0: - get_tts(path_list, report_folder, timezone_offset) - - # spdlog.*logdata - path_list = seeker.search(f'*/{identifier}/Documents/spdlog.*logdata') - if len(path_list) > 0: - get_gps_quality(path_list, report_folder, timezone_offset) - - break - - for file_found in files_found: - # user.db - if file_found.endswith('user.db'): - db = open_sqlite_db_readonly(file_found) - try: - # searched locations - get_searched_locations(file_found, report_folder, db, timezone_offset) - - # recent locations - get_recent_locations(file_found, report_folder, db, timezone_offset) - - # favorite locations - get_favorite_locations(file_found, report_folder, db, timezone_offset) - - # shared locations - get_shared_locations(file_found, report_folder, db, timezone_offset) - finally: - db.close() + return data_headers, data_list, source_path diff --git a/scripts/artifacts/weatherAppLocations.py b/scripts/artifacts/weatherAppLocations.py index c9d00087..a92d6bbe 100644 --- a/scripts/artifacts/weatherAppLocations.py +++ b/scripts/artifacts/weatherAppLocations.py @@ -1,94 +1,111 @@ -import plistlib -import time -import datetime +__artifacts_v2__ = { + "get_weatherAppLocations": { + "name": "Weather App - Location", + "description": "", + "author": "@Anna-Mariya Mateyna", + "creation_date": "2021-01-29", + "last_update_date": "2025-11-20", + "requirements": "none", + "category": "Location", + "notes": "", + "paths": ('*/mobile/Containers/Shared/AppGroup/*/Library/Preferences/group.com.apple.weather.plist',), + "output_types": "standard", + "artifact_icon": "sun" + } +} +from scripts.ilapfuncs import ( + logfunc, + artifact_processor, + get_plist_file_content, + get_file_path, + convert_unix_ts_to_utc, + convert_plist_date_to_utc + ) -from scripts.artifact_report import ArtifactHtmlReport -from scripts.ilapfuncs import logfunc, tsv, kmlgen, timeline, is_platform_windows - -def get_weatherAppLocations(files_found, report_folder, seeker, wrap_text, timezone_offset): +@artifact_processor +def get_weatherAppLocations(context): + files_found = context.get_files_found() data_list = [] - data_list_two_one = [] - for file_found in files_found: - file_found = str(file_found) - with open(file_found, "rb") as plist_file: - plist_content = plistlib.load(plist_file) - - if plist_content.get('PrefsVersion') == '2.1': - lastupdated = (plist_content['LastUpdated']) - - if plist_content.get('Cities', '0') == '0': - logfunc('No cities available') - return - - for x in plist_content['Cities']: - lon = x.get('Lon','') - lat = x.get('Lat','') - name = x.get('Name','') - country = x.get('Country','') - timezone = x.get('TimeZone','') - cityupdate = x.get('CityTimeZoneUpdateDateKey','') - - data_list_two_one.append((lastupdated, name, country, timezone, cityupdate, lat, lon)) - else: - if plist_content.get('Cities', '0') == '0': - logfunc('No cities available') - return - - for city in plist_content['Cities']: - update_time = city.get('UpateTime','') - update_time_formatted = update_time.strftime('%Y-%m-%d %H:%M:%S') - - data_list.append((update_time_formatted, 'Added from User', '', city['Lat'], - city['Lon'], city['Name'], city['Country'], city['SecondsFromGMT'])) - - local_weather = plist_content['LocalWeather'] - local_update_time = local_weather['UpateTime'] - local_update_time_formatted = local_update_time.strftime('%Y-%m-%d %H:%M:%S') - last_location_update = time.strftime("%Y-%m-%d %H:%M:%S", time.gmtime(plist_content.get('LastLocationUpdateTime'))) - - data_list.append((local_update_time_formatted, 'Local', last_location_update, local_weather['Lat'], - local_weather['Lon'], local_weather['Name'], local_weather['Country'], local_weather['SecondsFromGMT'])) - - if len(data_list) > 0: - report = ArtifactHtmlReport('Weather App Locations') - report.start_artifact_report(report_folder, 'Weather App Locations') - report.add_script() - data_headers = ("Update Time", "Type", "Last Location Update", "Latitude", "Longitude", "City", "Country", "Seconds from GMT") - report.write_artifact_data_table(data_headers, data_list, file_found) - report.end_artifact_report() - - tsvname = 'Weather App Locations' - tsv(report_folder, data_headers, data_list, tsvname) + source_path = get_file_path(files_found, 'group.com.apple.weather.plist') + if not source_path: + logfunc('No weather app location plist found') + return (), [], '' + plist_content = get_plist_file_content(source_path) + if plist_content.get('PrefsVersion') == '2.1': + data_headers = ( + ('Update Time', 'datetime'), + 'Name', + 'Country', + 'TimeZone', + ('City Timezone Update Key', 'datetime'), + 'Latitude', + 'Longitude', + ) - tlactivity = 'Weather App Locations' - timeline(report_folder, tlactivity, data_list, data_headers) - - kmlactivity = 'Weather App Locations' - kmlgen(report_folder, kmlactivity, data_list, data_headers) + lastupdated = convert_plist_date_to_utc(plist_content.get('LastUpdated')) + if plist_content.get('Cities', '0') == '0': + logfunc('No cities available') + return - elif len(data_list_two_one) > 0: - report = ArtifactHtmlReport('Weather App Locations') - report.start_artifact_report(report_folder, 'Weather App Locations') - report.add_script() - data_headers = ("Update Time", "Name", "Country", "TimeZone", "City Timezone Update Key","Latitude", "Longitude") - report.write_artifact_data_table(data_headers, data_list_two_one, file_found) - report.end_artifact_report() - - tsvname = 'Weather App Locations' - tsv(report_folder, data_headers, data_list_two_one, tsvname) - - tlactivity = 'Weather App Locations' - timeline(report_folder, tlactivity, data_list_two_one, data_headers) - - kmlactivity = 'Weather App Locations' - kmlgen(report_folder, kmlactivity, data_list_two_one, data_headers) + for x in plist_content['Cities']: + lon = x.get('Lon', '') + lat = x.get('Lat', '') + name = x.get('Name', '') + country = x.get('Country', '') + timezone = x.get('TimeZone', '') + cityupdate = convert_unix_ts_to_utc(x.get('CityTimeZoneUpdateDateKey', '')) + data_list.append(( + lastupdated, + name, + country, + timezone, + cityupdate, + lat, + lon + )) else: - logfunc('No data available for Weather App Locations') + data_headers = ( + ('Update Time', 'datetime'), + 'Type', + ('Last Location Update', 'datetime'), + 'Latitude', + 'Longitude', + 'City', + 'Country', + 'Seconds from GMT', + ) + if plist_content.get('Cities', '0') == '0': + logfunc('No cities available') + return + for city in plist_content['Cities']: + update_time = convert_plist_date_to_utc(city.get('UpateTime', '')) + data_list.append(( + update_time, + 'Added from User', + '', + city['Lat'], + city['Lon'], + city['Name'], + city['Country'], + city['SecondsFromGMT'], + source_path + )) + local_weather = plist_content.get('LocalWeather', {}) + local_update_time = convert_plist_date_to_utc(local_weather.get('UpateTime', '')) + last_location_update = convert_unix_ts_to_utc(plist_content.get('LastLocationUpdateTime')) + data_list.append(( + local_update_time, + 'Local', + last_location_update, + local_weather['Lat'], + local_weather['Lon'], + local_weather['Name'], + local_weather['Country'], + local_weather['SecondsFromGMT'], + )) -__artifacts__ = { - "weatherAppLocations": ( - "Location", - ('*/mobile/Containers/Shared/AppGroup/*/Library/Preferences/group.com.apple.weather.plist'), - get_weatherAppLocations) -} \ No newline at end of file + if not data_list: + logfunc('No weather app location data available') + return (), [], source_path + return data_headers, data_list, source_path diff --git a/scripts/artifacts/webClips.py b/scripts/artifacts/webClips.py index 1f9bba16..0624f57e 100644 --- a/scripts/artifacts/webClips.py +++ b/scripts/artifacts/webClips.py @@ -1,21 +1,44 @@ +__artifacts_v2__ = { + "get_webClips": { + "name": "iOS Screens", + "description": "", + "author": "@AlexisBrignoni", + "creation_date": "2020-04-20", + "last_update_date": "2025-11-20", + "requirements": "none", + "category": "Home Screen", + "notes": "", + "paths": ('*WebClips/*.webclip/*',), + "output_types": "standard", + "artifact_icon": "bookmark" + } +} import os -import plistlib -import base64 +from scripts.ilapfuncs import ( + logfunc, + artifact_processor, + check_in_media, + get_plist_file_content, + ) -from scripts.artifact_report import ArtifactHtmlReport -from scripts.ilapfuncs import logfunc, logdevinfo, tsv, is_platform_windows - -def get_webClips(files_found, report_folder, seeker, wrap_text, timezone_offset): +@artifact_processor +def get_webClips(context): + files_found = context.get_files_found() webclip_data = {} data_list = [] + source_path = '' for path_val in files_found: # Extract the unique identifier pathstr = str(path_val).replace("\\", "/") - - unique_id = pathstr.split("/WebClips/")[1].split(".webclip/")[0] - if unique_id.endswith('.webclip'): - unique_id = unique_id[:-8] + if not source_path: + source_path = pathstr + try: + unique_id = pathstr.split("/WebClips/")[1].split(".webclip/")[0] + if unique_id.endswith('.webclip'): + unique_id = unique_id[:-8] + except IndexError: + continue if unique_id != "" and unique_id not in webclip_data: webclip_data[unique_id] = { "Info": "", @@ -31,45 +54,30 @@ def get_webClips(files_found, report_folder, seeker, wrap_text, timezone_offset) # Is this the path to the icon? if "icon.png" in pathstr: webclip_data[unique_id]["Icon_path"] = path_val - logfunc(f"Webclips found: {len(webclip_data)} ") for unique_id, data in webclip_data.items(): # Info plist information - #logfunc(str(data)) - info_plist_raw = open(data["Info"], "rb") - info_plist = plistlib.load(info_plist_raw) - webclip_data[unique_id]["Title"] = info_plist["Title"] - webclip_data[unique_id]["URL"] = info_plist["URL"] - info_plist_raw.close() - - # Open and convert icon into b64 for serialisation in report - icon_data_raw = open(data["Icon_path"], "rb") - icon_data = base64.b64encode(icon_data_raw.read()).decode("utf-8") - webclip_data[unique_id]["Icon_data"] = icon_data - icon_data_raw.close() - - # Create the report - for unique_id, data in webclip_data.items(): - htmlstring = (f'') - htmlstring = htmlstring +('') - htmlstring = htmlstring +(f'') - htmlstring = htmlstring +(f'') - htmlstring = htmlstring +('') - htmlstring = htmlstring +('
UID:{unique_id}
Title: {data["Title"]}
URL: {data["URL"]}
') - data_list.append((htmlstring,)) - - - report = ArtifactHtmlReport(f'WebClips') - report.start_artifact_report(report_folder, f'WebClips') - report.add_script() - data_headers = ((f'WebClips',)) - report.write_artifact_data_table(data_headers, data_list, files_found[0], html_escape=False) - report.end_artifact_report() - -__artifacts__ = { - "webClips": ( - "iOS Screens", - ('*WebClips/*.webclip/*'), - get_webClips) -} \ No newline at end of file + # logfunc(str(data)) + title = "" + url = "" + icon_ref = "" + if data["Info"]: + info_plist = get_plist_file_content(data["Info"]) + if info_plist: + title = info_plist.get("Title", "") + url = info_plist.get("URL", "") + if data["Icon_path"]: + icon_ref = check_in_media( + data["Icon_path"], + f"{unique_id}_icon.png" + ) + data_list.append((icon_ref, title, url, unique_id, data["Info"])) + data_headers = ( + ('Icon', 'media'), + 'Title', + 'URL', + 'Unique Identifier', + 'Source File' + ) + return data_headers, data_list, '' diff --git a/scripts/artifacts/webkit.py b/scripts/artifacts/webkit.py index 002f2b08..ee0b0f11 100644 --- a/scripts/artifacts/webkit.py +++ b/scripts/artifacts/webkit.py @@ -3,140 +3,157 @@ "name": "WebKit Cache Records", "description": "Extracts detailed information from WebKit Network Cache record files", "author": "@JamesHabben", - "version": "1.0", - "date": "2024-10-24", + "creation_date": "2024-10-24", + "last_update_date": "2025-11-20", "requirements": "none", "category": "Browser", "notes": "", "paths": ('*/Library/Caches/WebKit/NetworkCache/Version*/Records/*/Resource/*',), "output_types": "standard", - "research_mode": False # Set to True to include all fields + "artifact_icon": "globe", + "research_mode": False } } import os import struct import json -from scripts.ilapfuncs import logfunc, artifact_processor -from datetime import datetime, timezone from collections import OrderedDict - +from scripts.ilapfuncs import ( + logfunc, + artifact_processor, + convert_unix_ts_to_utc + ) def read_vf(file): try: length_bytes = file.read(4) if len(length_bytes) < 4: - return None, None # Not enough data to read length + return None, None length = struct.unpack('= 2: + file_data['Response Code'] = struct.unpack('