Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
11 changes: 11 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
@@ -1,5 +1,16 @@
# Changelog

## [v0.4.5](https://github.com/astrochun/github-stats-pages/tree/v0.4.5) (2020-08-28)

**Implemented enhancements:**
- Add more verbose messaging for completeness, troubleshooting
[#69](http://github.com/astrochun/github-stats-pages/pull/69)

**Closed issues:**
- Add more verbose messaging for completeness, troubleshooting
[#67](http://github.com/astrochun/github-stats-pages/issues/67)


## [v0.4.4](https://github.com/astrochun/github-stats-pages/tree/v0.4.4) (2021-08-12)

**Implemented enhancements:**
Expand Down
2 changes: 1 addition & 1 deletion github_stats_pages/__init__.py
Original file line number Diff line number Diff line change
@@ -1 +1 @@
__version__ = "0.4.4"
__version__ = "0.4.5"
2 changes: 2 additions & 0 deletions github_stats_pages/gts_run.py
Original file line number Diff line number Diff line change
Expand Up @@ -35,6 +35,8 @@ def get_top_paths(username: str, token: str, reponame: str,
df.to_csv(path, index=False, header=True)
else:
df.to_csv(path, mode='a', index=False, header=False)
else:
print(f"Empty top paths for {reponame}")


def pandas_write_buffer(df, columns, reponame):
Expand Down
4 changes: 3 additions & 1 deletion github_stats_pages/repo_list.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,8 @@ def get_repo_list(user: str) -> Tuple[list, pd.DataFrame]:
:return repository_df: DataFrame containing public repositories
"""

print("get_repo_list - Retrieving repository list ...")

endpoint = f"https://api.github.com/users/{user}/repos"
params = {'per_page': 100}
response = requests.get(endpoint, params=params)
Expand All @@ -42,5 +44,5 @@ def construct_csv(repository_df: pd.DataFrame, csv_outfile: str):

reduced_df = repository_df[SHORTEN_COLUMNS]

print(f"Writing: {csv_outfile}")
print(f"construct_csv - Writing: {csv_outfile}")
reduced_df.to_csv(csv_outfile, index=False)
17 changes: 15 additions & 2 deletions github_stats_pages/stats_plots.py
Original file line number Diff line number Diff line change
Expand Up @@ -197,6 +197,8 @@ def make_plots(username: str, data_dir: str, out_dir: str, csv_file: str,
)

repository_df = pd.read_csv(csv_file, converters={'description': str})
repository_df = repository_df.loc[(repository_df['fork'] == False) &
(repository_df['archived'] == False)]

dict_df = load_data(data_dir)

Expand All @@ -206,9 +208,20 @@ def make_plots(username: str, data_dir: str, out_dir: str, csv_file: str,
p_repos.mkdir(parents=True)

# Get unique repository names
repo_names = set()
repo_names0 = set()
for key, df in dict_df.items():
repo_names.update(set(df[columns[0]].unique()))
repo_names0.update(set(df[columns[0]].unique()))

repo_names = set(repository_df['name']) & repo_names0

# Additional cleaning up:
clean_up = repo_names0 - set(repository_df['name'])
if len(clean_up) != 0:
for clean in clean_up:
p_exclude = Path(p_repos / f"{clean}.html")
print(f"Deleting: {p_exclude}")
if p_exclude.exists():
p_exclude.unlink()

final_repo_names = get_final_repo_names(p_repos, repo_names,
include_repos=include_repos,
Expand Down
4 changes: 3 additions & 1 deletion scripts/get_repo_list
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@

import argparse

from github_stats_pages import repo_list
from github_stats_pages import repo_list, __version__

if __name__ == "__main__":
parser = argparse.ArgumentParser(
Expand All @@ -11,6 +11,8 @@ if __name__ == "__main__":
help='user or organization name')
args = parser.parse_args()

print(f'Version: {__version__}')
print("Running get_repo_list script ...")
repository_list, repository_df = repo_list.get_repo_list(args.user)

# Write CSV files
Expand Down
11 changes: 10 additions & 1 deletion scripts/gts_run_all_repos
Original file line number Diff line number Diff line change
Expand Up @@ -24,19 +24,28 @@ if __name__ == '__main__':
help='Flag to quickly run a few repositories')
args = parser.parse_args()

print("Running gts_run_all_repos script ...")

df = read_csv(args.csv_file)

# Exclude forks and archived repos
n_forks = df['fork'].values.sum()
n_archived = df['archived'].values.sum()
if n_forks > 0:
print(f"gts_run_all_repos - Excluding forks: {n_forks}")
if n_archived > 0:
print(f"gts_run_all_repos - Excluding archived: {n_archived}")
new_df = df.loc[(df['fork'] == False) &
(df['archived'] == False)]
print(f"Number of repositories: {len(new_df)}")
print(f"gts_run_all_repos : Number of repositories: {len(new_df)}")

if args.test:
repo_list = new_df['name'][0:5]
else:
repo_list = new_df['name']

for repo_name in repo_list:
print(f"Working on : {repo_name} ...")
gts_run.run_each_repo(args.user, args.token, repo_name,
save_csv=True)
gts_run.get_top_paths(args.user, args.token, repo_name,
Expand Down
2 changes: 2 additions & 0 deletions scripts/make_stats_plots
Original file line number Diff line number Diff line change
Expand Up @@ -34,6 +34,8 @@ if __name__ == '__main__':
args = parser.parse_args()
vargs = vars(args)

print("Running make_stats_plots script ...")

if args.include_repos and args.exclude_repos:
raise ValueError(
"Cannot provide include_repos and exclude_repos simultaneously!"
Expand Down
2 changes: 1 addition & 1 deletion setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@

setup(
name='github-stats-pages',
version='0.4.4',
version='0.4.5',
packages=['github_stats_pages'],
scripts=['scripts/get_repo_list',
'scripts/gts_run_all_repos',
Expand Down
4 changes: 2 additions & 2 deletions tests_data/repository.csv
Original file line number Diff line number Diff line change
@@ -1,2 +1,2 @@
id,name,html_url,description,language,fork,stargazers_count,watchers_count,has_issues,has_downloads,has_wiki,has_pages,forks_count,disabled,open_issues_count,license,forks,open_issues,watchers,default_branch
330507480,github-stats-pages,https://github.com/astrochun/github-stats-pages,Retrieve statistics for a user's repositories and populate the information onto a GitHub static page,Python,False,1,1,True,True,True,False,0,False,2,"{'key': 'mit', 'name': 'MIT License', 'spdx_id': 'MIT', 'url': 'https://api.github.com/licenses/mit', 'node_id': 'MDc6TGljZW5zZTEz'}",0,2,1,main
id,name,html_url,description,language,fork,archived,stargazers_count,watchers_count,has_issues,has_downloads,has_wiki,has_pages,forks_count,disabled,open_issues_count,license,forks,open_issues,watchers,default_branch
330507480,github-stats-pages,https://github.com/astrochun/github-stats-pages,Retrieve statistics for a user's repositories and populate the information onto a GitHub static page,Python,False,False,1,1,True,True,True,False,0,False,2,"{'key': 'mit', 'name': 'MIT License', 'spdx_id': 'MIT', 'url': 'https://api.github.com/licenses/mit', 'node_id': 'MDc6TGljZW5zZTEz'}",0,2,1,main