@@ -16,7 +16,7 @@ class gowitness(BaseModule):
1616 flags = ["active" , "safe" , "web-screenshots" ]
1717 meta = {"description" : "Take screenshots of webpages" , "created_date" : "2022-07-08" , "author" : "@TheTechromancer" }
1818 options = {
19- "version" : "2.4.2 " ,
19+ "version" : "3.0.5 " ,
2020 "threads" : 0 ,
2121 "timeout" : 10 ,
2222 "resolution_x" : 1440 ,
@@ -161,6 +161,7 @@ async def handle_batch(self, *events):
161161 key = e .data ["url" ]
162162 event_dict [key ] = e
163163 stdin = "\n " .join (list (event_dict ))
164+ self .hugeinfo (f"Gowitness input: { stdin } " )
164165
165166 try :
166167 async for line in self .run_process_live (self .command , input = stdin , idle_timeout = self .idle_timeout ):
@@ -174,12 +175,14 @@ async def handle_batch(self, *events):
174175 new_screenshots = await self .get_new_screenshots ()
175176 for filename , screenshot in new_screenshots .items ():
176177 url = screenshot ["url" ]
178+ url = self .helpers .clean_url (url ).geturl ()
177179 final_url = screenshot ["final_url" ]
178180 filename = self .screenshot_path / screenshot ["filename" ]
179181 filename = filename .relative_to (self .scan .home )
180182 # NOTE: this prevents long filenames from causing problems in BBOT, but gowitness will still fail to save it.
181183 filename = self .helpers .truncate_filename (filename )
182184 webscreenshot_data = {"path" : str (filename ), "url" : final_url }
185+ self .hugewarning (event_dict )
183186 parent_event = event_dict [url ]
184187 await self .emit_event (
185188 webscreenshot_data ,
@@ -191,11 +194,11 @@ async def handle_batch(self, *events):
191194 # emit URLs
192195 new_network_logs = await self .get_new_network_logs ()
193196 for url , row in new_network_logs .items ():
194- ip = row ["ip " ]
197+ ip = row ["remote_ip " ]
195198 status_code = row ["status_code" ]
196199 tags = [f"status-{ status_code } " , f"ip-{ ip } " , "spider-danger" ]
197200
198- _id = row ["url_id " ]
201+ _id = row ["result_id " ]
199202 parent_url = self .screenshots_taken [_id ]
200203 parent_event = event_dict [parent_url ]
201204 if url and url .startswith ("http" ):
@@ -210,7 +213,7 @@ async def handle_batch(self, *events):
210213 # emit technologies
211214 new_technologies = await self .get_new_technologies ()
212215 for row in new_technologies .values ():
213- parent_id = row ["url_id " ]
216+ parent_id = row ["result_id " ]
214217 parent_url = self .screenshots_taken [parent_id ]
215218 parent_event = event_dict [parent_url ]
216219 technology = row ["value" ]
@@ -224,28 +227,29 @@ async def handle_batch(self, *events):
224227
225228 def construct_command (self ):
226229 # base executable
227- command = ["gowitness" ]
230+ command = ["gowitness" , "scan" ]
228231 # chrome path
229232 if self .chrome_path is not None :
230233 command += ["--chrome-path" , str (self .chrome_path )]
231234 # db path
232- command += ["--db-path" , str (self .db_path )]
235+ command += ["--write-db" ]
236+ command += ["--write-db-uri" , f"sqlite://{ self .db_path } " ]
233237 # screenshot path
234238 command += ["--screenshot-path" , str (self .screenshot_path )]
235239 # user agent
236- command += ["--user-agent" , f"{ self .scan .useragent } " ]
240+ command += ["--chrome- user-agent" , f"{ self .scan .useragent } " ]
237241 # proxy
238242 if self .proxy :
239- command += ["--proxy" , str (self .proxy )]
243+ command += ["--chrome- proxy" , str (self .proxy )]
240244 # resolution
241- command += ["--resolution-x" , str (self .resolution_x )]
242- command += ["--resolution-y" , str (self .resolution_y )]
243- # input
244- command += ["file" , "-f" , "-" ]
245+ command += ["--chrome-window-x" , str (self .resolution_x )]
246+ command += ["--chrome-window-y" , str (self .resolution_y )]
245247 # threads
246248 command += ["--threads" , str (self .threads )]
247249 # timeout
248250 command += ["--timeout" , str (self .timeout )]
251+ # input
252+ command += ["file" , "-f" , "-" ]
249253 return command
250254
251255 async def get_new_screenshots (self ):
@@ -254,8 +258,10 @@ async def get_new_screenshots(self):
254258 async with aiosqlite .connect (str (self .db_path )) as con :
255259 con .row_factory = aiosqlite .Row
256260 con .text_factory = self .helpers .smart_decode
257- async with con .execute ("SELECT * FROM urls" ) as cur :
261+ async with con .execute ("SELECT * FROM results" ) as cur :
262+ self .critical (f"CUR: { cur } " )
258263 async for row in cur :
264+ self .critical (f"SCREENSHOT: { row } " )
259265 row = dict (row )
260266 _id = row ["id" ]
261267 if _id not in self .screenshots_taken :
@@ -270,8 +276,9 @@ async def get_new_network_logs(self):
270276 con .row_factory = aiosqlite .Row
271277 async with con .execute ("SELECT * FROM network_logs" ) as cur :
272278 async for row in cur :
279+ self .critical (f"NETWORK LOG: { row } " )
273280 row = dict (row )
274- url = row ["final_url " ]
281+ url = row ["url " ]
275282 if url not in self .connections_logged :
276283 self .connections_logged .add (url )
277284 network_logs [url ] = row
@@ -284,6 +291,7 @@ async def get_new_technologies(self):
284291 con .row_factory = aiosqlite .Row
285292 async with con .execute ("SELECT * FROM technologies" ) as cur :
286293 async for row in cur :
294+ self .critical (f"TECHNOLOGY: { row } " )
287295 _id = row ["id" ]
288296 if _id not in self .technologies_found :
289297 self .technologies_found .add (_id )
0 commit comments