@@ -26,7 +26,7 @@ def run(gx_context, gx_output, gh_api):
2626 gx_output .r_log (f'{ reponame_msg } ' , 'profiling' )
2727
2828 stargazers_message = f"Stars count: [{ repository .get ('stargazers_count' )} ]"
29- if repository .get ('stargazers_count' ) > 0 :
29+ if repository .get ('stargazers_count' , 0 ) > 0 :
3030 stargazers_message += f" List at: { repository .get ('stargazers_url' )} "
3131 gx_output .r_log (stargazers_message , rtype = "profiling" )
3232
@@ -43,7 +43,7 @@ def run(gx_context, gx_output, gh_api):
4343 # https://api.github.com/repos/infobyte/faraday/issues/comments - and won't allow filtering in a helpful (to us) way
4444 gx_output .stdout (f"\r Getting all repository comments on commits.." + " " * 40 , end = "" )
4545 commit_comments = gh_api .fetch_repository_commit_comments (repository )
46- if len (commit_comments ) > 0 :
46+ if isinstance ( commit_comments , list ) and len (commit_comments ) > 0 :
4747 total_comments = defaultdict (int )
4848 positive_reactions = defaultdict (int )
4949 negative_reactions = defaultdict (int )
@@ -86,7 +86,7 @@ def run(gx_context, gx_output, gh_api):
8686
8787 gx_output .stdout (f"\r Getting all repository comments on issues.." + " " * 30 , end = "" )
8888 issues_comments = gh_api .fetch_repository_issues_comments (repository )
89- if issues_comments != None and len (issues_comments ) > 0 :
89+ if isinstance ( issues_comments , list ) and len (issues_comments ) > 0 :
9090 total_comments = defaultdict (int )
9191 positive_reactions = defaultdict (int )
9292 negative_reactions = defaultdict (int )
@@ -127,7 +127,7 @@ def run(gx_context, gx_output, gh_api):
127127
128128 gx_output .stdout (f"\r Getting all repository comments on pull requests.." + " " * 30 , end = "" )
129129 pulls_comments = gh_api .fetch_repository_pulls_comments (repository )
130- if pulls_comments != None and len (pulls_comments ) > 0 :
130+ if isinstance ( pulls_comments , list ) and len (pulls_comments ) > 0 :
131131 total_comments = defaultdict (int )
132132 positive_reactions = defaultdict (int )
133133 negative_reactions = defaultdict (int )
@@ -184,12 +184,12 @@ def run(gx_context, gx_output, gh_api):
184184 #print(gh_api.fetch_environment_protection_rules(repository, environment.get('name')))
185185
186186 gx_output .stdout (f"\r Checking for repository forks.." + " " * 30 , end = "" )
187- if repository .get ('forks_count' ) > 0 :
187+ if repository .get ('forks_count' , 0 ) > 0 :
188188 gx_output .r_log (f"Repository has { repository .get ('forks_count' )} forks: { repository .get ('forks_url' )} " , rtype = "profiling" )
189189
190190 gx_output .stdout (f"\r Inspecting repository branches.." + " " * 40 , end = "" )
191191 branches = gh_api .fetch_repository_branches (repository )
192- if branches != None and len (branches ) > 0 :
192+ if isinstance ( branches , list ) and len (branches ) > 0 :
193193 gx_output .r_log (f"{ len (branches )} Branches available at: [{ repository .get ('html_url' )} /branches]" , rtype = "branches" )
194194 unprotected_branches = []
195195 protected_branches = []
@@ -204,15 +204,18 @@ def run(gx_context, gx_output, gh_api):
204204
205205 gx_output .stdout (f"\r Inspecting repository labels.." + " " * 40 , end = "" )
206206 labels = gh_api .fetch_repository_labels (repository )
207- if labels != None and len (labels ) > 0 :
207+ if isinstance ( labels , list ) and len (labels ) > 0 :
208208 gx_output .r_log (f"{ len (labels )} Labels available at: [{ repository .get ('html_url' )} /labels]" , rtype = "labels" )
209209 non_default_labels = [label .get ('name' ) for label in labels if label .get ('default' ) == False ]
210210 if len (non_default_labels ) > 0 :
211211 gx_output .r_log (f"{ len (non_default_labels )} Non-default Labels: { non_default_labels } available at: [{ repository .get ('html_url' )} /labels]" , rtype = "labels" )
212212
213213 gx_output .stdout (f"\r Inspecting repository tags.." + " " * 40 , end = "" )
214214 tags = gh_api .fetch_repository_tags (repository )
215- if tags != None and len (tags ) > 0 : gx_output .r_log (f"{ len (tags )} Tags available at: [{ repository .get ('html_url' )} /tags]" , rtype = "tags" )
215+ if isinstance (tags , list ) and len (tags ) > 0 :
216+ gx_output .r_log (f"{ len (tags )} Tags available at: [{ repository .get ('html_url' )} /tags]" , rtype = "tags" )
217+ else :
218+ tags = []
216219 tag_taggers = defaultdict (int )
217220
218221 """ A bit shameful here because we can't really get too much data out of tags because of the way the GH API is implemented.
@@ -236,7 +239,8 @@ def run(gx_context, gx_output, gh_api):
236239
237240 gx_output .stdout (f"\r Inspecting repository releases.." + " " * 40 , end = "" )
238241 releases = gh_api .fetch_repository_releases (repository )
239- if len (releases ) > 0 : gx_output .r_log (f"{ len (releases )} Releases available at: [{ repository .get ('html_url' )} /releases]" , rtype = "releases" )
242+ if isinstance (releases , list ) and len (releases ) > 0 :
243+ gx_output .r_log (f"{ len (releases )} Releases available at: [{ repository .get ('html_url' )} /releases]" , rtype = "releases" )
240244
241245 release_authors = defaultdict (int )
242246 asset_uploaders = defaultdict (int )
@@ -345,11 +349,11 @@ def run(gx_context, gx_output, gh_api):
345349 """
346350
347351 watchers_message = f"Watchers count: [{ repository .get ('subscribers_count' )} ]"
348- if repository .get ('subscribers_count' ) > 0 :
352+ if repository .get ('subscribers_count' , 0 ) > 0 :
349353 watchers_message += f" List at: { repository .get ('subscribers_url' )} "
350354 gx_output .r_log (watchers_message , rtype = "profiling" )
351355
352- if repository .get ('open_issues_count' ) > 0 :
356+ if repository .get ('open_issues_count' , 0 ) > 0 :
353357 gx_output .r_log (f"Repository has { repository .get ('open_issues_count' )} Open Issues: { repository .get ('html_url' )} /issues" , rtype = "profiling" )
354358
355359 if repository .get ('description' ):
@@ -358,7 +362,7 @@ def run(gx_context, gx_output, gh_api):
358362 if repository .get ('topics' ):
359363 gx_output .r_log (f"Topics: { str (repository .get ('topics' ))} " , rtype = "profiling" )
360364
361- if repository .get ('fork' ) != False :
365+ if repository .get ('fork' ) != False and repository . get ( 'fork' ) != None :
362366 parent = repository .get ('parent' ).get ('full_name' )
363367 source = repository .get ('source' ).get ('full_name' )
364368 gx_output .stdout (f"\r Repository is a FORK of a parent named: { repository .get ('parent' ).get ('full_name' )} : { repository .get ('parent' )['html_url' ]} " )
@@ -369,14 +373,14 @@ def run(gx_context, gx_output, gh_api):
369373 gx_output .r_log (f"The parent of this fork comes from SOURCE repo: { repository .get ('source' )['html_url' ]} " , rtype = "fork" )
370374
371375
372- days = (datetime .now (timezone .utc ) - gh_time .parse_date (repository .get ('created_at' ))).days
376+ days = (datetime .now (timezone .utc ) - gh_time .parse_date (repository .get ('created_at' , datetime . utcnow (). isoformat () ))).days
373377 message = f"{ days } days old"
374378 if days > 365 :
375379 years = "{:.2f}" .format (days / 365 )
376380 message = f"{ years } years old"
377381 gx_output .r_log (f"Repository created: { repository .get ('created_at' )} , is { message } ." , rtype = "profiling" )
378382
379- days = (datetime .now (timezone .utc ) - gh_time .parse_date (repository .get ('updated_at' ))).days
383+ days = (datetime .now (timezone .utc ) - gh_time .parse_date (repository .get ('updated_at' , datetime . utcnow (). isoformat () ))).days
380384 message = f"{ days } days ago"
381385 if days > 365 :
382386 years = "{:.2f}" .format (days / 365 )
@@ -393,7 +397,7 @@ def run(gx_context, gx_output, gh_api):
393397 gx_output .r_log (f"Repository's visibility is set to [private]" , rtype = "profiling" )
394398
395399 public_events = gh_api .fetch_repository_public_events (repository )
396- if len (public_events ) > 0 :
400+ if isinstance ( public_events , list ) and len (public_events ) > 0 :
397401 gh_public_events .log_events (public_events , gx_output , for_repository = True )
398402
399403 if repository .get ('organization' ):
@@ -409,66 +413,67 @@ def run(gx_context, gx_output, gh_api):
409413 submitter_contrib_counts = defaultdict (lambda : {'submitted' : 0 , 'accepted' :0 , 'open' : 0 , 'rejected' : 0 })
410414 submitter_notcontrib_counts = defaultdict (lambda : {'submitted' : 0 , 'accepted' :0 , 'open' : 0 , 'rejected' : 0 })
411415 clogins = [c .get ('login' ) for c in contributors ]
412- for pr in prs :
413- try : # quick ugly patch instead of checking all types are dict and keys exist.
414- submitter = pr ['user' ]['login' ]
415- except :
416- continue
417- is_merged = pr ['merged_at' ] is not None
418- if submitter not in clogins :
419- submitter_counts = submitter_notcontrib_counts
420- else :
421- submitter_counts = submitter_contrib_counts
422-
423- submitter_counts [submitter ]['submitted' ] += 1
416+ if isinstance (prs , list ) and len (prs ) > 0 :
417+ for pr in prs :
418+ try : # quick ugly patch instead of checking all types are dict and keys exist.
419+ submitter = pr ['user' ]['login' ]
420+ except :
421+ continue
422+ is_merged = pr ['merged_at' ] is not None
423+ if submitter not in clogins :
424+ submitter_counts = submitter_notcontrib_counts
425+ else :
426+ submitter_counts = submitter_contrib_counts
427+
428+ submitter_counts [submitter ]['submitted' ] += 1
424429
425- if is_merged :
426- submitter_counts [submitter ]['accepted' ] += 1
427- elif pr ['state' ] == 'closed' :
428- submitter_counts [submitter ]['rejected' ] += 1
429- else :
430- submitter_counts [submitter ]['open' ] += 1
431-
432- for submitter_counts in [submitter_contrib_counts , submitter_notcontrib_counts ]:
433- for user , details in submitter_counts .items ():
434- if details ['submitted' ] > 0 :
435- # Only add a link to the URL of PRs if it belongs to a user account
436- if user in clogins :
437- gx_output .c_log (f"{ details ['submitted' ]} Pull Requests by [{ user } ] at: { repository .get ('html_url' )} /pulls?q=author%3a{ user } " , rtype = "prs" , contributor = user )
438- details ['rejected_percent' ] = (details ['rejected' ] / details ['submitted' ]) * 100
430+ if is_merged :
431+ submitter_counts [submitter ]['accepted' ] += 1
432+ elif pr ['state' ] == 'closed' :
433+ submitter_counts [submitter ]['rejected' ] += 1
439434 else :
440- details ['rejected_percent' ] = 0
441-
442- # Used GPT for this, we're automathgically weighting amount AND percentage, and it appears to be working.
443- details ['rejected_score' ] = details ['rejected_percent' ] * math .log1p (details ['rejected' ])
444-
445- sorted_submitters_contrib_rejected = sorted (submitter_contrib_counts .items (), key = lambda x : (- x [1 ]['rejected_score' ], - x [1 ]['submitted' ]))
446- sorted_submitters_notcontrib_rejected = sorted (submitter_notcontrib_counts .items (), key = lambda x : (- x [1 ]['rejected_score' ], - x [1 ]['submitted' ]))
447-
448- # First loop on top 3 to log in Repository output
449- message = []
450- for user , details in sorted_submitters_contrib_rejected [:3 ]:
451- if details ['rejected' ] > 0 :
452- message .append (f"[{ user } { details ['rejected' ]} rejected out of { details ['submitted' ]} ]" )
453- if len (message ) > 0 :
454- gx_output .r_log (f"Top repository contributors with rejected PRs: " + " | " .join (message ), rtype = "contributors" )
455-
456- # Now for NON contributors
457- message = []
458- for user , details in sorted_submitters_notcontrib_rejected [:3 ]:
459- if details ['rejected' ] > 0 :
460- message .append (f"[{ user } { details ['rejected' ]} rejected out of { details ['submitted' ]} ]" )
461- if len (message ) > 0 :
462- gx_output .r_log (f"Top non-contributor GitHub users with rejected PRs: " + " | " .join (message ), rtype = "contributors" )
463-
464- # And now loop on all to log under each user account.
465- for user , details in submitter_contrib_counts .items ():
466- if details ['rejected' ] > 0 :
467- gx_output .c_log (f"The user submitted { details ['submitted' ]} Pull Requests out of which { details ['rejected' ]} were rejected." , rtype = "profiling" , contributor = user )
468- if details ['accepted' ] > 0 :
469- gx_output .c_log (f"The user submitted { details ['submitted' ]} Pull Requests out of which { details ['accepted' ]} were merged." , rtype = "profiling" , contributor = user )
470- if details ['open' ] > 0 :
471- gx_output .c_log (f"The user submitted { details ['submitted' ]} Pull Requests out of which { details ['open' ]} remain open." , rtype = "profiling" , contributor = user )
435+ submitter_counts [submitter ]['open' ] += 1
436+
437+ for submitter_counts in [submitter_contrib_counts , submitter_notcontrib_counts ]:
438+ for user , details in submitter_counts .items ():
439+ if details ['submitted' ] > 0 :
440+ # Only add a link to the URL of PRs if it belongs to a user account
441+ if user in clogins :
442+ gx_output .c_log (f"{ details ['submitted' ]} Pull Requests by [{ user } ] at: { repository .get ('html_url' )} /pulls?q=author%3a{ user } " , rtype = "prs" , contributor = user )
443+ details ['rejected_percent' ] = (details ['rejected' ] / details ['submitted' ]) * 100
444+ else :
445+ details ['rejected_percent' ] = 0
446+
447+ # Used GPT for this, we're automathgically weighting amount AND percentage, and it appears to be working.
448+ details ['rejected_score' ] = details ['rejected_percent' ] * math .log1p (details ['rejected' ])
449+
450+ sorted_submitters_contrib_rejected = sorted (submitter_contrib_counts .items (), key = lambda x : (- x [1 ]['rejected_score' ], - x [1 ]['submitted' ]))
451+ sorted_submitters_notcontrib_rejected = sorted (submitter_notcontrib_counts .items (), key = lambda x : (- x [1 ]['rejected_score' ], - x [1 ]['submitted' ]))
452+
453+ # First loop on top 3 to log in Repository output
454+ message = []
455+ for user , details in sorted_submitters_contrib_rejected [:3 ]:
456+ if details ['rejected' ] > 0 :
457+ message .append (f"[{ user } { details ['rejected' ]} rejected out of { details ['submitted' ]} ]" )
458+ if len (message ) > 0 :
459+ gx_output .r_log (f"Top repository contributors with rejected PRs: " + " | " .join (message ), rtype = "contributors" )
460+
461+ # Now for NON contributors
462+ message = []
463+ for user , details in sorted_submitters_notcontrib_rejected [:3 ]:
464+ if details ['rejected' ] > 0 :
465+ message .append (f"[{ user } { details ['rejected' ]} rejected out of { details ['submitted' ]} ]" )
466+ if len (message ) > 0 :
467+ gx_output .r_log (f"Top non-contributor GitHub users with rejected PRs: " + " | " .join (message ), rtype = "contributors" )
468+
469+ # And now loop on all to log under each user account.
470+ for user , details in submitter_contrib_counts .items ():
471+ if details ['rejected' ] > 0 :
472+ gx_output .c_log (f"The user submitted { details ['submitted' ]} Pull Requests out of which { details ['rejected' ]} were rejected." , rtype = "profiling" , contributor = user )
473+ if details ['accepted' ] > 0 :
474+ gx_output .c_log (f"The user submitted { details ['submitted' ]} Pull Requests out of which { details ['accepted' ]} were merged." , rtype = "profiling" , contributor = user )
475+ if details ['open' ] > 0 :
476+ gx_output .c_log (f"The user submitted { details ['submitted' ]} Pull Requests out of which { details ['open' ]} remain open." , rtype = "profiling" , contributor = user )
472477
473478 # Check if there were any users with mismatches in commits dates in the repository.
474479 for user , dates_mismatch_commits in gx_context .getIdentifierValues ("DATE_MISMATCH_COMMITS" ).items ():
0 commit comments