-
Notifications
You must be signed in to change notification settings - Fork 297
/
Copy pathfetch_contributors.py
executable file
·135 lines (105 loc) · 3.85 KB
/
fetch_contributors.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
#!/usr/bin/env python3
#
# SPDX-License-Identifier: Apache-2.0
# Copyright Contributors to the OpenTimelineIO project
import argparse
import json
import urllib.request
import os
CONTRIBUTORS_FILE = "CONTRIBUTORS.md"
def parse_args():
parser = argparse.ArgumentParser(
description='Fetch a list of contributors for a given GitHub repo.'
)
parser.add_argument(
'--repo',
required=True,
help='GitHub Project/Repo name.'
' (e.g. "AcademySoftwareFoundation/OpenTimelineIO")'
)
parser.add_argument(
'--token',
required=False,
default=None,
help='GitHub personal access token, used for authorization.'
' Get one here: https://github.com/settings/tokens/new'
)
return parser.parse_args()
def main():
args = parse_args()
token = args.token or os.environ.get("OTIO_RELEASE_GITHUB_TOKEN")
if not token:
raise RuntimeError(
"Error: a github token is required to run {}. Either pass it in "
"via --token or set $OTIO_RELEASE_GITHUB_TOKEN".format(__file__)
)
# Note: un-authenticated requests have a strict rate limit.
# We avoid this by using authentication for all our requests,
# even the ones that don't need it.
#
# You can fetch your limits with this API:
#
# request = urllib.request.Request(
# "https://api.github.com/rate_limit",
# headers = {"Authorization": "token {}".format(token)}
# )
# response = urllib.request.urlopen(request).read().decode('utf-8')
# print("Rate limit: {}".format(response))
with open(CONTRIBUTORS_FILE) as fi:
input_contributors = fi.read()
request = urllib.request.Request(
f"https://api.github.com/repos/{args.repo}/stats/contributors",
headers={"Authorization": f"token {args.token}"}
)
response = urllib.request.urlopen(request).read().decode('utf-8')
# this just ensures that response is really waited on so that json.loads
# works
print(f"Response size: {len(response)}")
contributors = json.loads(response[:])
output_lines = []
if not contributors:
print("No contributors found, something likely went wrong.")
print(response)
for contributor in contributors:
login = contributor['author']['login']
url = contributor['author']['html_url']
total = contributor['total']
request = urllib.request.Request(
f"https://api.github.com/users/{login}",
headers={"Authorization": f"token {args.token}"}
)
response = urllib.request.urlopen(request).read().decode('utf-8')
user = json.loads(response)
name = user['name'] or "?"
if (
login not in input_contributors
and name not in input_contributors
and "?" not in name
):
print(f"Missing: {name} [{login}] # commits: {total}")
# Print the output in markdown format
output_lines.append(f"* {name} ([{login}]({url}))")
if output_lines:
# split the input_contributors into preamble and contributors list
split_contribs = input_contributors.split('\n')
header = []
body = []
in_body = False
for ln in split_contribs:
if not in_body and ln.startswith("* "):
in_body = True
if not in_body:
header.append(ln)
continue
if ln.strip():
body.append(ln)
body.extend(output_lines)
body.sort(key=lambda v: v.lower())
result = '\n'.join(header + body)
with open(CONTRIBUTORS_FILE, 'w') as fo:
fo.write(result)
else:
print(f"All contributors present in {CONTRIBUTORS_FILE}")
# print("\n".join(sorted(output_lines, key=str.casefold)))
if __name__ == '__main__':
main()