3131import uuid
3232import pandas as pd
3333import warnings
34+ import threading
3435from ode .renderers .json import print_json
3536from ode .renderers .csv_file import print_csv
3637from ode .renderers .html import print_html
5051 )
5152
5253__author__ = "Brian Maloney"
53- __version__ = "2024.11.01 "
54+ __version__ = "2024.11.12 "
5455__email__ = "bmmaloney97@gmail.com"
5556rbin = []
5657DATParser = dat_parser .DATParser ()
5758OneDriveParser = onedrive_parser .OneDriveParser ()
5859SQLiteParser = sqlite_parser .SQLiteParser ()
60+ result = None
61+ parsing_complete = threading .Event ()
62+ onedrive_complete = threading .Event ()
63+ output_complete = threading .Event ()
5964
6065
6166def spinning_cursor ():
@@ -75,10 +80,36 @@ def guid():
7580 break
7681
7782
83+ def parse_sql_thread (sqlFolder ):
84+ global result
85+ result = SQLiteParser .parse_sql (sqlFolder )
86+ parsing_complete .set () # Signal that parsing is complete
87+
88+
89+ def parse_onedrive_thread (df , df_scope , df_GraphMetadata_Records , scopeID , file , rbin_df , account , reghive , RECYCLE_BIN , localHashAlgorithm ):
90+ global result
91+ result = None
92+ result = OneDriveParser .parse_onedrive (df , df_scope , df_GraphMetadata_Records , scopeID , file , rbin_df , account , reghive , RECYCLE_BIN , localHashAlgorithm )
93+ onedrive_complete .set () # Signal that parsing is complete
94+
95+
7896def main ():
7997 df_GraphMetadata_Records = pd .DataFrame (columns = ['fileName' , 'resourceID' , 'graphMetadataJSON' , 'spoCompositeID' ,
8098 'createdBy' , 'modifiedBy' , 'filePolicies' , 'fileExtension' , 'lastWriteCount' ])
8199
100+ def output_thread ():
101+ delay = time .time ()
102+
103+ threading .Thread (target = output ,
104+ daemon = True ).start ()
105+
106+ while not output_complete .is_set ():
107+ if (time .time () - delay ) > 0.1 :
108+ sys .stdout .write (f'Saving OneDrive data. Please wait.... { next (spinner )} \r ' )
109+ sys .stdout .flush ()
110+ delay = time .time ()
111+ time .sleep (0.2 )
112+
82113 def output ():
83114 if args .csv :
84115 print_csv (df , rbin_df , df_GraphMetadata_Records , name , args .csv , args .csvf )
@@ -91,6 +122,8 @@ def output():
91122 args .json = '.'
92123 print_json (cache , name , args .pretty , args .json )
93124
125+ output_complete .set () # Signal that parsing is complete
126+
94127 try :
95128 file_count = df .Type .value_counts ()['File' ]
96129 except KeyError :
@@ -149,6 +182,8 @@ def output():
149182
150183 args = parser .parse_args ()
151184
185+ spinner = spinning_cursor ()
186+
152187 if args .sync :
153188 update_from_repo (args .gui )
154189 sys .exit ()
@@ -205,16 +240,44 @@ def output():
205240 name = f'{ sql_find [0 ][0 ]} _{ sql_find [0 ][1 ]} '
206241 except Exception :
207242 name = 'SQLite_DB'
208- df , rbin_df , df_scope , df_GraphMetadata_Records , scopeID , account , localHashAlgorithm = SQLiteParser .parse_sql (args .sql )
243+
244+ threading .Thread (target = parse_sql_thread ,
245+ args = (args .sql ,),
246+ daemon = True ).start ()
247+
248+ delay = time .time ()
249+ while not parsing_complete .is_set ():
250+ if (time .time () - delay ) > 0.1 :
251+ sys .stdout .write (f'Parsing SQLite. Please wait.... { next (spinner )} \r ' )
252+ sys .stdout .flush ()
253+ delay = time .time ()
254+ time .sleep (0.2 )
255+
256+ df , rbin_df , df_scope , df_GraphMetadata_Records , scopeID , account , localHashAlgorithm = result
209257
210258 if not df .empty :
211- cache , rbin_df = OneDriveParser .parse_onedrive (df , df_scope , df_GraphMetadata_Records , scopeID , args .sql , rbin_df , account , args .reghive , args .RECYCLE_BIN , localHashAlgorithm )
259+ threading .Thread (target = parse_onedrive_thread ,
260+ args = (df , df_scope , df_GraphMetadata_Records ,
261+ scopeID , args .sql , rbin_df , account ,
262+ args .reghive , args .RECYCLE_BIN ,
263+ localHashAlgorithm ,),
264+ daemon = True ).start ()
265+
266+ delay = time .time ()
267+ while not onedrive_complete .is_set ():
268+ if (time .time () - delay ) > 0.1 :
269+ sys .stdout .write (f'Building folder list. Please wait.... { next (spinner )} \r ' )
270+ sys .stdout .flush ()
271+ delay = time .time ()
272+ time .sleep (0.2 )
273+
274+ cache , rbin_df = result
212275
213276 if df .empty :
214277 print (f'Unable to parse { name } sqlite database.' )
215278 logging .warning (f'Unable to parse { name } sqlite database.' )
216279 else :
217- output ()
280+ output_thread ()
218281
219282 rootDir = args .logs
220283 if rootDir is None :
@@ -241,7 +304,7 @@ def output():
241304 print (f'Unable to parse { filename } .' )
242305 logging .warning (f'Unable to parse { filename } .' )
243306 else :
244- output ()
307+ output_thread ()
245308 rootDir = args .logs
246309 if rootDir is None :
247310 sys .exit ()
@@ -261,7 +324,7 @@ def output():
261324 sql_dir = re .compile (r'\\Users\\(?P<user>.*?)\\AppData\\Local\\Microsoft\\OneDrive\\settings\\(?P<account>Personal|Business[0-9])$' )
262325 log_dir = re .compile (r'\\Users\\(?P<user>.*)?\\AppData\\Local\\Microsoft\\OneDrive\\logs$' )
263326 rootDir = args .dir
264- spinner = spinning_cursor ()
327+ # spinner = spinning_cursor()
265328 delay = time .time ()
266329 for path , subdirs , files in os .walk (rootDir ):
267330 if (time .time () - delay ) > 0.1 :
@@ -323,7 +386,7 @@ def output():
323386 print (f'Unable to parse { filename } .' )
324387 logging .warning (f'Unable to parse { filename } .' )
325388 else :
326- output ()
389+ output_thread ()
327390
328391 if k == 'sql' :
329392 print (f'\n \n Parsing { key } OneDrive\n ' )
@@ -339,7 +402,7 @@ def output():
339402 print (f'Unable to parse { name } sqlite database.' )
340403 logging .warning (f'Unable to parse { name } sqlite database.' )
341404 else :
342- output ()
405+ output_thread ()
343406
344407 if args .logs :
345408 load_cparser (args .cstructs )
0 commit comments