[sysadmin-bin] Fix up the way we define how many pages we should recurse against
- From: Andrea Veri <averi src gnome org>
- To: gnome-sysadmin gnome org,commits-list gnome org
- Subject: [sysadmin-bin] Fix up the way we define how many pages we should recurse against
- Date: Wed, 8 Aug 2018 10:14:14 +0000 (UTC)
commit f8d8aa217430d4fda4cdcb7c623dd2783e774386
Author: Andrea Veri <averi redhat com>
Date: Wed Aug 8 12:13:44 2018 +0200
Fix up the way we define how many pages we should recurse against
close_pull_requests.py | 35 +++++++++++++++++++----------------
1 file changed, 19 insertions(+), 16 deletions(-)
---
diff --git a/close_pull_requests.py b/close_pull_requests.py
index 1411460..6f2832c 100755
--- a/close_pull_requests.py
+++ b/close_pull_requests.py
@@ -48,11 +48,11 @@ MESSAGE = """Thank you for contributing to %(project)s!
If you have never contributed to GNOME before make sure you have read the
getting started documentation:
-http://www.gnome.org/get-involved
+https://www.gnome.org/get-involved
Otherwise please visit
https://wiki.gnome.org/Newcomers
-and follow the instructions there to upload your change to Bugzilla.
+and follow the instructions there to upload your change to https://gitlab.gnome.org.
"""
TEMPLATE = string.Template("""
@@ -78,24 +78,28 @@ def fetch_repositories(url):
req = Request(url)
req.add_header('Authorization', 'token %s' % auth_token)
response = urlopen(req)
- content = response.read()
- parsed_json = json.loads(content)
+ link = response.info().getheader('link')
+ response.close()
- the_page = response.info().getheader('link')
- next_url = the_page.split(';')[0].replace('<','').replace('>','')
- is_last = the_page.split(';')[1].split(',')[0].replace('rel=','').replace('"','').replace(' ','')
+ links = link.split(' ')
+ last_page =
links[-2].replace('<','').replace('>','').replace(';','').split('?')[1].split('&')[1].split('=')[1]
- for repository in parsed_json:
- repo_name = repository['name']
- if options.verbose:
- print 'Appending %s to the repositories list' % repo_name
+ for i in range(1, int(last_page) + 1):
+ _url = github_url + '&page=%i' % i
+ r = Request(_url)
+ r.add_header('Authorization', 'token %s' % auth_token)
+ resp = urlopen(r)
- repositories.append(repo_name)
+ content = resp.read()
+ parsed_json = json.loads(content)
+ resp.close()
- if is_last == 'next':
- url = next_url
+ for repository in parsed_json:
+ repo_name = repository['name']
+ if options.verbose:
+ print 'Appending %s to the repositories list' % repo_name
- fetch_repositories(url)
+ repositories.append(repo_name)
with open('%s' % projects_yaml, 'w') as repo_list:
for repo in repositories:
@@ -108,7 +112,6 @@ def fetch_repositories(url):
has_pull_requests='has-pull-requests'
repo_list.write(TEMPLATE.substitute(project_name = repo,
has_pull_requests=has_pull_requests))
-
def close_pull_requests():
pull_request_text = MESSAGE
[
Date Prev][
Date Next] [
Thread Prev][
Thread Next]
[
Thread Index]
[
Date Index]
[
Author Index]