Merge pull request #48 from albertyw/python3

Support Python 3
This commit is contained in:
Jose Diaz-Gonzalez
2016-10-25 17:38:05 -06:00
committed by GitHub

View File

@@ -16,11 +16,25 @@ import select
import subprocess
import sys
import time
import urlparse
import urllib
import urllib2
try:
# python 3
from urllib.parse import urlparse
from urllib.parse import quote as urlquote
from urllib.parse import urlencode
from urllib.error import HTTPError, URLError
from urllib.request import urlopen
from urllib.request import Request
except ImportError:
# python 2
from urlparse import urlparse
from urllib import quote as urlquote
from urllib import urlencode
from urllib2 import HTTPError, URLError
from urllib2 import urlopen
from urllib2 import Request
from github_backup import __version__
__version__='asdf'
# from github_backup import __version__
FNULL = open(os.devnull, 'w')
@@ -80,8 +94,8 @@ def logging_subprocess(popenargs,
rc = child.wait()
if rc != 0:
print(u'{} returned {}:'.format(popenargs[0], rc), file=sys.stderr)
print('\t', u' '.join(popenargs), file=sys.stderr)
print('{} returned {}:'.format(popenargs[0], rc), file=sys.stderr)
print('\t', ' '.join(popenargs), file=sys.stderr)
return rc
@@ -96,8 +110,9 @@ def mkdir_p(*args):
else:
raise
def mask_password(url, secret='*****'):
parsed = urlparse.urlparse(url)
parsed = urlparse(url)
if not parsed.password:
return url
@@ -106,6 +121,7 @@ def mask_password(url, secret='*****'):
return url.replace(parsed.password, secret)
def parse_args():
parser = argparse.ArgumentParser(description='Backup a github account')
parser.add_argument('user',
@@ -246,7 +262,7 @@ def get_auth(args, encode=True):
if encode:
password = args.password
else:
password = urllib.quote(args.password)
password = urlquote(args.password)
auth = args.username + ':' + password
elif args.password:
log_error('You must specify a username for basic auth')
@@ -254,10 +270,10 @@ def get_auth(args, encode=True):
if not auth:
return None
if encode == False:
if not encode:
return auth
return base64.b64encode(auth)
return base64.b64encode(auth.encode('ascii'))
def get_github_api_host(args):
@@ -277,6 +293,7 @@ def get_github_host(args):
return host
def get_github_repo_url(args, repository):
if args.prefer_ssh:
return repository['ssh_url']
@@ -293,6 +310,7 @@ def get_github_repo_url(args, repository):
return repo_url
def retrieve_data(args, template, query_args=None, single_request=False):
auth = get_auth(args)
query_args = get_query_args(query_args)
@@ -312,7 +330,7 @@ def retrieve_data(args, template, query_args=None, single_request=False):
errors.append(template.format(status_code, r.reason))
log_error(errors)
response = json.loads(r.read())
response = json.loads(r.read().decode('utf-8'))
if len(errors) == 0:
if type(response) == list:
data.extend(response)
@@ -344,11 +362,11 @@ def _get_response(request, auth, template):
while True:
should_continue = False
try:
r = urllib2.urlopen(request)
except urllib2.HTTPError as exc:
r = urlopen(request)
except HTTPError as exc:
errors, should_continue = _request_http_error(exc, auth, errors) # noqa
r = exc
except urllib2.URLError:
except URLError:
should_continue = _request_url_error(template, retry_timeout)
if not should_continue:
raise
@@ -361,14 +379,14 @@ def _get_response(request, auth, template):
def _construct_request(per_page, page, query_args, template, auth):
querystring = urllib.urlencode(dict({
querystring = urlencode(dict(list({
'per_page': per_page,
'page': page
}.items() + query_args.items()))
}.items()) + list(query_args.items())))
request = urllib2.Request(template + '?' + querystring)
request = Request(template + '?' + querystring)
if auth is not None:
request.add_header('Authorization', 'Basic ' + auth)
request.add_header('Authorization', 'Basic '.encode('ascii') + auth)
return request
@@ -513,6 +531,7 @@ def backup_repositories(args, output_directory, repositories):
if args.incremental:
open(last_update_path, 'w').write(last_update)
def backup_issues(args, repo_cwd, repository, repos_template):
has_issues_dir = os.path.isdir('{0}/issues/.git'.format(repo_cwd))
if args.skip_existing and has_issues_dir:
@@ -551,10 +570,10 @@ def backup_issues(args, repo_cwd, repository, repos_template):
if issues_skipped:
issues_skipped_message = ' (skipped {0} pull requests)'.format(issues_skipped)
log_info('Saving {0} issues to disk{1}'.format(len(issues.keys()), issues_skipped_message))
log_info('Saving {0} issues to disk{1}'.format(len(list(issues.keys())), issues_skipped_message))
comments_template = _issue_template + '/{0}/comments'
events_template = _issue_template + '/{0}/events'
for number, issue in issues.iteritems():
for number, issue in list(issues.items()):
if args.include_issue_comments or args.include_everything:
template = comments_template.format(number)
issues[number]['comment_data'] = retrieve_data(args, template)
@@ -597,10 +616,10 @@ def backup_pulls(args, repo_cwd, repository, repos_template):
if not args.since or pull['updated_at'] >= args.since:
pulls[pull['number']] = pull
log_info('Saving {0} pull requests to disk'.format(len(pulls.keys())))
log_info('Saving {0} pull requests to disk'.format(len(list(pulls.keys()))))
comments_template = _pulls_template + '/{0}/comments'
commits_template = _pulls_template + '/{0}/commits'
for number, pull in pulls.iteritems():
for number, pull in list(pulls.items()):
if args.include_pull_comments or args.include_everything:
template = comments_template.format(number)
pulls[number]['comment_data'] = retrieve_data(args, template)
@@ -634,8 +653,8 @@ def backup_milestones(args, repo_cwd, repository, repos_template):
for milestone in _milestones:
milestones[milestone['number']] = milestone
log_info('Saving {0} milestones to disk'.format(len(milestones.keys())))
for number, milestone in milestones.iteritems():
log_info('Saving {0} milestones to disk'.format(len(list(milestones.keys()))))
for number, milestone in list(milestones.items()):
milestone_file = '{0}/{1}.json'.format(milestone_cwd, number)
with codecs.open(milestone_file, 'w', encoding='utf-8') as f:
json_dump(milestone, f)
@@ -661,7 +680,7 @@ def backup_hooks(args, repo_cwd, repository, repos_template):
hook_cwd = os.path.join(repo_cwd, 'hooks')
output_file = '{0}/hooks.json'.format(hook_cwd)
template = '{0}/{1}/hooks'.format(repos_template,
repository['full_name'])
repository['full_name'])
try:
_backup_data(args,
'hooks',