-
Notifications
You must be signed in to change notification settings - Fork 119
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
Merge pull request #395 from snoop168/master
Add PodcastAddict Artifact
- Loading branch information
Showing
4 changed files
with
217 additions
and
0 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,72 @@ | ||
# Module Description: Parses Podcast Addict Episode Database | ||
# Author: John Hyla | ||
# Date: 2023-07-07 | ||
# Artifact version: 0.0.1 | ||
# Requirements: none | ||
|
||
import os | ||
import sqlite3 | ||
import datetime | ||
|
||
from scripts.artifact_report import ArtifactHtmlReport | ||
from scripts.ilapfuncs import logfunc, tsv, timeline, is_platform_windows, open_sqlite_db_readonly, does_column_exist_in_db | ||
|
||
def get_podcasts(files_found, report_folder, seeker, wrap_text): | ||
|
||
source_file = '' | ||
for file_found in files_found: | ||
file_name = str(file_found) | ||
|
||
db = open_sqlite_db_readonly(file_name) | ||
cursor = db.cursor() | ||
try: | ||
|
||
cursor.execute(''' | ||
SELECT datetime(publication_date/1000, "UNIXEPOCH") as publication_date, | ||
datetime(playbackDate/1000, "UNIXEPOCH") as playbackDate, | ||
name, | ||
duration, | ||
size, | ||
datetime(downloaded_date/1000, "UNIXEPOCH") as downloaded_date, | ||
playing_status, | ||
position_to_resume, | ||
download_url | ||
FROM episodes | ||
''') | ||
|
||
all_rows = cursor.fetchall() | ||
usageentries = len(all_rows) | ||
except Exception as e: | ||
print (e) | ||
usageentries = 0 | ||
|
||
if usageentries > 0: | ||
report = ArtifactHtmlReport('Podcast Addict') | ||
report.start_artifact_report(report_folder, 'Podcast Addict') | ||
report.add_script() | ||
data_headers = ('publication_date', 'playback_date', 'name', 'duration', 'size', 'downloaded_date', 'playing_status', 'position_to_resume', 'download_url') # Don't remove the comma, that is required to make this a tuple as there is only 1 element | ||
data_list = [] | ||
for row in all_rows: | ||
data_list.append((row[0], row[1], row[2], row[3], row[4], row[5], row[6], row[7], row[8])) | ||
|
||
report.write_artifact_data_table(data_headers, data_list, file_found) | ||
report.end_artifact_report() | ||
|
||
tsvname = f'Podcast Addict' | ||
tsv(report_folder, data_headers, data_list, tsvname, source_file) | ||
|
||
else: | ||
logfunc('No Podcast Episodes found') | ||
|
||
|
||
db.close() | ||
|
||
return | ||
|
||
|
||
__artifacts__ = { | ||
"Podcast Addict": ( | ||
"Podcast Addict", | ||
('*/com.bambuna.podcastaddict/databases/podcastAddict.db'), | ||
get_podcasts) | ||
} |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,71 @@ | ||
# Module Description: Parses Verizon RDD Analytics Battery History | ||
# Author: John Hyla | ||
# Date: 2023-07-07 | ||
# Artifact version: 0.0.1 | ||
# Requirements: none | ||
|
||
import os | ||
import sqlite3 | ||
import datetime | ||
|
||
from scripts.artifact_report import ArtifactHtmlReport | ||
from scripts.ilapfuncs import logfunc, tsv, timeline, is_platform_windows, open_sqlite_db_readonly, does_column_exist_in_db | ||
|
||
def get_rdd_analytics(files_found, report_folder, seeker, wrap_text): | ||
|
||
source_file = '' | ||
for file_found in files_found: | ||
file_name = str(file_found) | ||
|
||
db = open_sqlite_db_readonly(file_name) | ||
cursor = db.cursor() | ||
try: | ||
|
||
cursor.execute(''' | ||
SELECT datetime(ActualTime/1000, "UNIXEPOCH") as actual_time, | ||
FormattedTime, | ||
BatteryLevel, | ||
GPS, | ||
Charging, | ||
ScreenOn, | ||
Brightness, | ||
BatteryTemp | ||
FROM TableBatteryHistory | ||
''') | ||
|
||
all_rows = cursor.fetchall() | ||
usageentries = len(all_rows) | ||
except Exception as e: | ||
print (e) | ||
usageentries = 0 | ||
|
||
if usageentries > 0: | ||
report = ArtifactHtmlReport('Verizon RDD - Battery History') | ||
report.start_artifact_report(report_folder, 'Verizon RDD - Battery History') | ||
report.add_script() | ||
data_headers = ('ActualTime', 'FormattedTime', 'BatteryLevel', 'GPS', 'Charging', 'ScreenOn', 'Brightness', 'BatteryTemp') # Don't remove the comma, that is required to make this a tuple as there is only 1 element | ||
data_list = [] | ||
for row in all_rows: | ||
data_list.append((row[0], row[1], row[2], row[3], row[4], row[5], row[6], row[7])) | ||
|
||
report.write_artifact_data_table(data_headers, data_list, file_found) | ||
report.end_artifact_report() | ||
|
||
tsvname = f'Verizon RDD - Battery History' | ||
tsv(report_folder, data_headers, data_list, tsvname, source_file) | ||
|
||
else: | ||
logfunc('No Battery History found') | ||
|
||
|
||
db.close() | ||
|
||
return | ||
|
||
|
||
__artifacts__ = { | ||
"VerizonRDD-Battery": ( | ||
"Verizon RDD Analytics", | ||
('*/com.verizon.mips.services/databases/RDD_ANALYTICS_DATABASE'), | ||
get_rdd_analytics) | ||
} |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,70 @@ | ||
# Module Description: Parses Verizon RDD Wifi Data | ||
# Author: John Hyla | ||
# Date: 2023-07-07 | ||
# Artifact version: 0.0.1 | ||
# Requirements: none | ||
|
||
import os | ||
import sqlite3 | ||
import datetime | ||
import json | ||
|
||
from scripts.artifact_report import ArtifactHtmlReport | ||
from scripts.ilapfuncs import logfunc, tsv, timeline, is_platform_windows, open_sqlite_db_readonly, does_column_exist_in_db | ||
|
||
def get_rdd_wifi(files_found, report_folder, seeker, wrap_text): | ||
|
||
source_file = '' | ||
for file_found in files_found: | ||
file_name = str(file_found) | ||
|
||
db = open_sqlite_db_readonly(file_name) | ||
cursor = db.cursor() | ||
try: | ||
|
||
cursor.execute(''' | ||
SELECT datetime(timestamp/1000, "UNIXEPOCH") as timestamp, | ||
eventid, | ||
event, | ||
data | ||
FROM TABLERDDWIFIDATA | ||
''') | ||
|
||
all_rows = cursor.fetchall() | ||
usageentries = len(all_rows) | ||
except Exception as e: | ||
print (e) | ||
usageentries = 0 | ||
|
||
if usageentries > 0: | ||
report = ArtifactHtmlReport('Verizon RDD - WIFI Data') | ||
report.start_artifact_report(report_folder, 'Verizon RDD - WIFI Data') | ||
report.add_script() | ||
data_headers = ('Timestamp', 'Event ID', 'Event', 'BSSID', 'SSID', 'IP', 'SessionTime', 'DataTx', 'DataRx', 'Cell ID') # Don't remove the comma, that is required to make this a tuple as there is only 1 element | ||
data_list = [] | ||
|
||
for row in all_rows: | ||
json_data = json.loads(row[3]) | ||
data_list.append((row[0], row[1], row[2], json_data.get('wifiinfo').get('bssid'), json_data.get('wifiinfo').get('ssid'), json_data.get('returnedIP'), json_data.get('totalSessionTime'), json_data.get('sessionWifiTx'), json_data.get('sessionWifiRx'), json_data.get('cellId'))) | ||
|
||
report.write_artifact_data_table(data_headers, data_list, file_found) | ||
report.end_artifact_report() | ||
|
||
tsvname = f'Verizon RDD - WIFI Data' | ||
tsv(report_folder, data_headers, data_list, tsvname, source_file) | ||
|
||
else: | ||
logfunc('No WIFI Data found') | ||
|
||
|
||
db.close() | ||
|
||
return | ||
|
||
|
||
__artifacts__ = { | ||
"VerizonRDD-WIFI": ( | ||
"Verizon RDD Analytics", | ||
('*/com.verizon.mips.services/databases/RDD_WIFI_DATA_DATABASE'), | ||
get_rdd_wifi) | ||
} |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters