chore: reformat file to fix lint issues

This commit is contained in:
Jose Diaz-Gonzalez
2025-01-03 20:07:40 -05:00
parent eb545c1c2f
commit 0e0197149e

View File

@@ -19,12 +19,12 @@ import ssl
import subprocess import subprocess
import sys import sys
import time import time
from datetime import datetime
from http.client import IncompleteRead from http.client import IncompleteRead
from urllib.error import HTTPError, URLError from urllib.error import HTTPError, URLError
from urllib.parse import quote as urlquote from urllib.parse import quote as urlquote
from urllib.parse import urlencode, urlparse from urllib.parse import urlencode, urlparse
from urllib.request import HTTPRedirectHandler, Request, build_opener, urlopen from urllib.request import HTTPRedirectHandler, Request, build_opener, urlopen
from datetime import datetime
try: try:
from . import __version__ from . import __version__
@@ -40,16 +40,21 @@ logger = logging.getLogger(__name__)
https_ctx = ssl.create_default_context() https_ctx = ssl.create_default_context()
if not https_ctx.get_ca_certs(): if not https_ctx.get_ca_certs():
import warnings import warnings
warnings.warn('\n\nYOUR DEFAULT CA CERTS ARE EMPTY.\n' +
'PLEASE POPULATE ANY OF:' + warnings.warn(
''.join([ "\n\nYOUR DEFAULT CA CERTS ARE EMPTY.\n"
'\n - ' + x + "PLEASE POPULATE ANY OF:"
for x in ssl.get_default_verify_paths() + "".join(
if type(x) is str ["\n - " + x for x in ssl.get_default_verify_paths() if type(x) is str]
]) + '\n', stacklevel=2) )
+ "\n",
stacklevel=2,
)
import certifi import certifi
https_ctx = ssl.create_default_context(cafile=certifi.where()) https_ctx = ssl.create_default_context(cafile=certifi.where())
def logging_subprocess( def logging_subprocess(
popenargs, stdout_log_level=logging.DEBUG, stderr_log_level=logging.ERROR, **kwargs popenargs, stdout_log_level=logging.DEBUG, stderr_log_level=logging.ERROR, **kwargs
): ):
@@ -524,7 +529,7 @@ def get_github_host(args):
def read_file_contents(file_uri): def read_file_contents(file_uri):
return open(file_uri[len(FILE_URI_PREFIX):], "rt").readline().strip() return open(file_uri[len(FILE_URI_PREFIX) :], "rt").readline().strip()
def get_github_repo_url(args, repository): def get_github_repo_url(args, repository):
@@ -795,13 +800,15 @@ def download_file(url, path, auth, as_app=False, fine=False):
if os.path.exists(path): if os.path.exists(path):
return return
request = _construct_request(per_page=100, request = _construct_request(
page=1, per_page=100,
query_args={}, page=1,
template=url, query_args={},
auth=auth, template=url,
as_app=as_app, auth=auth,
fine=fine) as_app=as_app,
fine=fine,
)
request.add_header("Accept", "application/octet-stream") request.add_header("Accept", "application/octet-stream")
opener = build_opener(S3HTTPRedirectHandler) opener = build_opener(S3HTTPRedirectHandler)
@@ -944,11 +951,15 @@ def filter_repositories(args, unfiltered_repositories):
if r.get("language") and r.get("language").lower() in languages if r.get("language") and r.get("language").lower() in languages
] # noqa ] # noqa
if name_regex: if name_regex:
repositories = [r for r in repositories if "name" not in r or name_regex.match(r["name"])] repositories = [
r for r in repositories if "name" not in r or name_regex.match(r["name"])
]
if args.skip_archived: if args.skip_archived:
repositories = [r for r in repositories if not r.get("archived")] repositories = [r for r in repositories if not r.get("archived")]
if args.exclude: if args.exclude:
repositories = [r for r in repositories if "name" not in r or r["name"] not in args.exclude] repositories = [
r for r in repositories if "name" not in r or r["name"] not in args.exclude
]
return repositories return repositories
@@ -1244,10 +1255,16 @@ def backup_releases(args, repo_cwd, repository, repos_template, include_assets=F
if args.skip_prerelease: if args.skip_prerelease:
releases = [r for r in releases if not r["prerelease"] and not r["draft"]] releases = [r for r in releases if not r["prerelease"] and not r["draft"]]
if args.number_of_latest_releases and args.number_of_latest_releases < len(releases): if args.number_of_latest_releases and args.number_of_latest_releases < len(
releases.sort(key=lambda item: datetime.strptime(item["created_at"], "%Y-%m-%dT%H:%M:%SZ"), releases
reverse=True) ):
releases = releases[:args.number_of_latest_releases] releases.sort(
key=lambda item: datetime.strptime(
item["created_at"], "%Y-%m-%dT%H:%M:%SZ"
),
reverse=True,
)
releases = releases[: args.number_of_latest_releases]
logger.info("Saving the latest {0} releases to disk".format(len(releases))) logger.info("Saving the latest {0} releases to disk".format(len(releases)))
else: else:
logger.info("Saving {0} releases to disk".format(len(releases))) logger.info("Saving {0} releases to disk".format(len(releases)))
@@ -1274,7 +1291,7 @@ def backup_releases(args, repo_cwd, repository, repos_template, include_assets=F
os.path.join(release_assets_cwd, asset["name"]), os.path.join(release_assets_cwd, asset["name"]),
get_auth(args, encode=not args.as_app), get_auth(args, encode=not args.as_app),
as_app=args.as_app, as_app=args.as_app,
fine=True if args.token_fine is not None else False fine=True if args.token_fine is not None else False,
) )