mirror of
https://github.com/josegonzalez/python-github-backup.git
synced 2025-12-05 16:18:02 +01:00
Compare commits
437 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
5f07157c9b | ||
|
|
87f5b76c52 | ||
|
|
27eb009e34 | ||
|
|
82c1fc3086 | ||
|
|
a4f15b06d9 | ||
|
|
aa217774ff | ||
|
|
d820dd994d | ||
|
|
1bad563e3f | ||
|
|
175ac19be6 | ||
|
|
773ccecb8c | ||
|
|
e27b5a8ee3 | ||
|
|
fb8945fc09 | ||
|
|
7333458ee4 | ||
|
|
cf8b4c6b45 | ||
|
|
cabf8a770a | ||
|
|
7e0f7d1930 | ||
|
|
a9bdd6feb7 | ||
|
|
fe16d2421c | ||
|
|
16b5b304e7 | ||
|
|
8f58ef6229 | ||
|
|
51cf429dc2 | ||
|
|
53714612d4 | ||
|
|
f6e241833d | ||
|
|
17dc265385 | ||
|
|
704d31cbf7 | ||
|
|
db69f5a5e8 | ||
|
|
ba367a927c | ||
|
|
e8bf4257da | ||
|
|
8eab8d02ce | ||
|
|
e4bd19acea | ||
|
|
176cadfcc4 | ||
|
|
b49544270e | ||
|
|
27fdd358fb | ||
|
|
abe6192ee9 | ||
|
|
0a2d6ed2ca | ||
|
|
1a8eb7a906 | ||
|
|
40e6e34908 | ||
|
|
2885fc6822 | ||
|
|
434b4bf4a0 | ||
|
|
677f3d3287 | ||
|
|
9164f088b8 | ||
|
|
c1f9ea7b9b | ||
|
|
6d51d199c5 | ||
|
|
2b555dc964 | ||
|
|
b818e9b95f | ||
|
|
4157cab89f | ||
|
|
07fd47a596 | ||
|
|
5530a1badd | ||
|
|
90ac4999ea | ||
|
|
f4dfc57ba2 | ||
|
|
3d354beb24 | ||
|
|
552c1051e3 | ||
|
|
c92f5ef0f2 | ||
|
|
095b712a77 | ||
|
|
3a4aebbcfe | ||
|
|
e75021db80 | ||
|
|
0f34ecb77d | ||
|
|
20e4d385a5 | ||
|
|
a49322cf7d | ||
|
|
332c9b586a | ||
|
|
09bf9275d1 | ||
|
|
fcf21f7a2e | ||
|
|
36812a332b | ||
|
|
0e0197149e | ||
|
|
eb545c1c2f | ||
|
|
2e72797984 | ||
|
|
68fe29d1e1 | ||
|
|
3dc3691770 | ||
|
|
5b0608ce14 | ||
|
|
1ce8455860 | ||
|
|
dcb89a5c33 | ||
|
|
b0bfffde1a | ||
|
|
0f3aaa6fc2 | ||
|
|
c39ec9c549 | ||
|
|
e981ce3ff9 | ||
|
|
22d8f8e649 | ||
|
|
aaefac1a66 | ||
|
|
cb66375e1e | ||
|
|
24d7aa83df | ||
|
|
c8c71239c7 | ||
|
|
6ca8030648 | ||
|
|
53f6650f61 | ||
|
|
548a2ec405 | ||
|
|
871d69b99a | ||
|
|
ca3c4fa64b | ||
|
|
0846e7d8e5 | ||
|
|
503444359d | ||
|
|
04c70ce277 | ||
|
|
e774c70275 | ||
|
|
ba46cb87e8 | ||
|
|
883407f8ca | ||
|
|
aacb252e57 | ||
|
|
2623167110 | ||
|
|
f6ad296730 | ||
|
|
c8eef58d76 | ||
|
|
8eb154a540 | ||
|
|
2e9db92b68 | ||
|
|
09bbcfc7b1 | ||
|
|
4e14f5a2c6 | ||
|
|
b474e1654f | ||
|
|
71d70265cc | ||
|
|
2309b0cb76 | ||
|
|
1e14a4eecd | ||
|
|
56d3fd75bf | ||
|
|
c3e470b34e | ||
|
|
4948178a63 | ||
|
|
88de80c480 | ||
|
|
15eeff7879 | ||
|
|
4bb71db468 | ||
|
|
17af2cbc28 | ||
|
|
e0d66daadb | ||
|
|
1971c97b5d | ||
|
|
b1b3df692d | ||
|
|
8d7311efbf | ||
|
|
8449d6352d | ||
|
|
d8c228c83e | ||
|
|
4a134ae2ec | ||
|
|
5cb7c6ad2e | ||
|
|
75382afeae | ||
|
|
f325daa875 | ||
|
|
2cc34de2a3 | ||
|
|
dea87873f9 | ||
|
|
0288b5f553 | ||
|
|
02a07d3f0d | ||
|
|
24a7b1f885 | ||
|
|
22fa2eb97e | ||
|
|
cb147cf6d0 | ||
|
|
298724acfc | ||
|
|
65d541f577 | ||
|
|
8b08685678 | ||
|
|
b18ba6de28 | ||
|
|
358d1e3d3e | ||
|
|
1cd04281e9 | ||
|
|
6630b2b82e | ||
|
|
391f2ba305 | ||
|
|
1f0bf50381 | ||
|
|
eb44c735eb | ||
|
|
caff40e65b | ||
|
|
bba39fb4c8 | ||
|
|
093db93994 | ||
|
|
d835d47c17 | ||
|
|
2cd9061c46 | ||
|
|
0cc50bc4cb | ||
|
|
436e8df0ac | ||
|
|
9812988a4a | ||
|
|
1eccebcb83 | ||
|
|
122eb56aa1 | ||
|
|
a0fdae3314 | ||
|
|
80fa92664c | ||
|
|
7b69394488 | ||
|
|
d1d3d84d95 | ||
|
|
fff2aa4075 | ||
|
|
8eba46d8a7 | ||
|
|
9dc3458dba | ||
|
|
e9d7692123 | ||
|
|
a1ef61f87c | ||
|
|
6b62973997 | ||
|
|
b25af67898 | ||
|
|
0380fb8e35 | ||
|
|
f62fe5e6c9 | ||
|
|
c97598c914 | ||
|
|
c488b0adf9 | ||
|
|
888815c271 | ||
|
|
66e11aa532 | ||
|
|
d1874c0bd9 | ||
|
|
4c07bd1310 | ||
|
|
fd2d398025 | ||
|
|
53d2ceec10 | ||
|
|
421a7ec62b | ||
|
|
ec43649bcd | ||
|
|
e869844dba | ||
|
|
0857a37440 | ||
|
|
585af4c4e3 | ||
|
|
41ec01d5cb | ||
|
|
7dc22358df | ||
|
|
b855bcabf6 | ||
|
|
3c3262ed69 | ||
|
|
42b836f623 | ||
|
|
09f4168db6 | ||
|
|
3e9a4fa0d8 | ||
|
|
ab18e96ea8 | ||
|
|
eb88def888 | ||
|
|
7fe6541291 | ||
|
|
c8b8b270f6 | ||
|
|
a97f15b519 | ||
|
|
500c97c60e | ||
|
|
31a6e52a5e | ||
|
|
4c5187bcff | ||
|
|
2de69beffa | ||
|
|
96592295e1 | ||
|
|
bd65c3d5d6 | ||
|
|
aaf45022cc | ||
|
|
7cdf428e3a | ||
|
|
cfb1f1368b | ||
|
|
4700a26d90 | ||
|
|
f53f7d9b71 | ||
|
|
3b6aa060ba | ||
|
|
76ff7f3b0d | ||
|
|
2615cab114 | ||
|
|
fda71b0467 | ||
|
|
a9f82faa1c | ||
|
|
f17bf19776 | ||
|
|
54c81de3d7 | ||
|
|
f2b4f566a1 | ||
|
|
2724f02b0a | ||
|
|
e0bf80a6aa | ||
|
|
b60034a9d7 | ||
|
|
878713a4e0 | ||
|
|
3b0c08cdc1 | ||
|
|
b52d9bfdc8 | ||
|
|
336b8b746f | ||
|
|
4e7d6f7497 | ||
|
|
7d07cbbe4f | ||
|
|
b80af2a4ca | ||
|
|
5dd0744ce0 | ||
|
|
81876a2bb3 | ||
|
|
a2b13c8109 | ||
|
|
f63be3be24 | ||
|
|
9cf85b087f | ||
|
|
f449d8bbe3 | ||
|
|
7d03e4c9bb | ||
|
|
4406ba7f07 | ||
|
|
febf380c57 | ||
|
|
f9b627c1e4 | ||
|
|
f998943171 | ||
|
|
2bf8898545 | ||
|
|
dbc1619106 | ||
|
|
ec210166f7 | ||
|
|
ea74aa5094 | ||
|
|
7437e3abb1 | ||
|
|
6f3be3d0e8 | ||
|
|
d7ba57075e | ||
|
|
b277baa6ea | ||
|
|
15de769d67 | ||
|
|
a9d35c0fd5 | ||
|
|
20f5fd7a86 | ||
|
|
f12b877509 | ||
|
|
96e6f58159 | ||
|
|
d163cd66a4 | ||
|
|
a8a583bed1 | ||
|
|
68e718010f | ||
|
|
a06c3e9fd3 | ||
|
|
fe07d5ad09 | ||
|
|
12799bb72c | ||
|
|
f1cf4cd315 | ||
|
|
f3340cd9eb | ||
|
|
0ebaffd102 | ||
|
|
2730fc3e5a | ||
|
|
0b2330c2c4 | ||
|
|
82e35fb1cf | ||
|
|
e8f027469e | ||
|
|
37ef0222e1 | ||
|
|
96a73b3fe8 | ||
|
|
8b1bfd433c | ||
|
|
cca8a851ad | ||
|
|
b5d749ec46 | ||
|
|
00e5c019db | ||
|
|
61275c61b2 | ||
|
|
60cb484a19 | ||
|
|
fbb977acf4 | ||
|
|
07e32b186c | ||
|
|
dcc90b747a | ||
|
|
f414fac108 | ||
|
|
38692bc836 | ||
|
|
81362e5596 | ||
|
|
753a26d0d6 | ||
|
|
b629a865f4 | ||
|
|
75ec773a6f | ||
|
|
f8a16ee0f8 | ||
|
|
3d5eb359e2 | ||
|
|
125cfca05e | ||
|
|
63441ebfbc | ||
|
|
7ad324225e | ||
|
|
885e94a102 | ||
|
|
9e1800f56e | ||
|
|
d057ee0d04 | ||
|
|
64562f2460 | ||
|
|
f7f9ffd017 | ||
|
|
048ef04e2a | ||
|
|
b1acfed83a | ||
|
|
18e78a4d66 | ||
|
|
1ed5427043 | ||
|
|
c2e3665ed8 | ||
|
|
0a30a92fe4 | ||
|
|
cc52587f52 | ||
|
|
853b7c46a1 | ||
|
|
e23d12d490 | ||
|
|
f8e1151111 | ||
|
|
664c2a765e | ||
|
|
fa7148d38f | ||
|
|
480ce3ce2a | ||
|
|
943e84e3d9 | ||
|
|
0c924c3158 | ||
|
|
f62c4eaf8b | ||
|
|
a53d7f6849 | ||
|
|
4e571d0735 | ||
|
|
5a71bc5e5a | ||
|
|
794ccf3996 | ||
|
|
977424c153 | ||
|
|
613576dd25 | ||
|
|
638bf7a77e | ||
|
|
725f2c3b8f | ||
|
|
41ece08152 | ||
|
|
3a5ef5158d | ||
|
|
cb1b0b6c6b | ||
|
|
d7f0747432 | ||
|
|
d411e20580 | ||
|
|
d7b85264cd | ||
|
|
031a984434 | ||
|
|
9e16f39e3e | ||
|
|
2de96390be | ||
|
|
78cff47a91 | ||
|
|
fa27988c1c | ||
|
|
bb2e2b8c6f | ||
|
|
8fd0f2b64f | ||
|
|
753a551961 | ||
|
|
607b6ca69b | ||
|
|
ef71655b01 | ||
|
|
d8bcbfa644 | ||
|
|
751b0d6e82 | ||
|
|
ea633ca2bb | ||
|
|
a2115ce3e5 | ||
|
|
8a00bb1903 | ||
|
|
e53f8d4724 | ||
|
|
356f5f674b | ||
|
|
13128635cb | ||
|
|
6e6842b025 | ||
|
|
272177c395 | ||
|
|
70f711ea68 | ||
|
|
3fc9957aac | ||
|
|
78098aae23 | ||
|
|
fb7cc5ed53 | ||
|
|
c0679b9cc3 | ||
|
|
03b9d1b2d8 | ||
|
|
5025f69878 | ||
|
|
a351cdc103 | ||
|
|
85e4399408 | ||
|
|
c8171b692a | ||
|
|
523c811cc6 | ||
|
|
857ad0afab | ||
|
|
3f65eadee1 | ||
|
|
a8e8841b26 | ||
|
|
8e542fd6b6 | ||
|
|
1865941b14 | ||
|
|
03c68561a5 | ||
|
|
196acd0aca | ||
|
|
679ac841f6 | ||
|
|
498d9eba32 | ||
|
|
0f82b1717c | ||
|
|
4d5126f303 | ||
|
|
b864218b44 | ||
|
|
98919c82c9 | ||
|
|
045eacbf18 | ||
|
|
7a234ba7ed | ||
|
|
e8a255b450 | ||
|
|
81a2f762da | ||
|
|
cb0293cbe5 | ||
|
|
252c25461f | ||
|
|
e8ed03fd06 | ||
|
|
38010d7c39 | ||
|
|
71b4288e6b | ||
|
|
ba4fa9fa2d | ||
|
|
869f761c90 | ||
|
|
195e700128 | ||
|
|
27441b71b6 | ||
|
|
cfeaee7309 | ||
|
|
fac8e4274f | ||
|
|
17fee66f31 | ||
|
|
a56d27dd8b | ||
|
|
e57873b6dd | ||
|
|
2658b039a1 | ||
|
|
fd684a71fb | ||
|
|
bacd77030b | ||
|
|
b73079daf2 | ||
|
|
eca8a70666 | ||
|
|
e74765ba7f | ||
|
|
6db5bd731b | ||
|
|
7305871c20 | ||
|
|
baf7b1a9b4 | ||
|
|
121fa68294 | ||
|
|
44dfc79edc | ||
|
|
89f59cc7a2 | ||
|
|
ad8c5b8768 | ||
|
|
921aab3729 | ||
|
|
ea4c3d0f6f | ||
|
|
9b6400932d | ||
|
|
de0c3f46c6 | ||
|
|
73b069f872 | ||
|
|
3d3f512074 | ||
|
|
1c3078992d | ||
|
|
4b40ae94d7 | ||
|
|
a18fda9faf | ||
|
|
41130fc8b0 | ||
|
|
2340a02fc6 | ||
|
|
cafff4ae80 | ||
|
|
3193d120e5 | ||
|
|
da4b29a2d6 | ||
|
|
d05c96ecef | ||
|
|
c86163bfe6 | ||
|
|
eff6e36974 | ||
|
|
63e458bafb | ||
|
|
57ab5ce1a2 | ||
|
|
d148f9b900 | ||
|
|
89ee22c2be | ||
|
|
9e472b74e6 | ||
|
|
4b459f9af8 | ||
|
|
b70ea87db7 | ||
|
|
f8be34562b | ||
|
|
ec05204aa9 | ||
|
|
628f2cbf73 | ||
|
|
38bf438d2f | ||
|
|
899cf42b57 | ||
|
|
b5972aaaf0 | ||
|
|
d860f369e9 | ||
|
|
77ab1bda15 | ||
|
|
4a4a317331 | ||
|
|
5a8e1ac275 | ||
|
|
0de341eab4 | ||
|
|
b0130fdf94 | ||
|
|
b49f399037 | ||
|
|
321414d352 | ||
|
|
413d4381cc | ||
|
|
0110ea40ed | ||
|
|
8d2ef2f528 | ||
|
|
1a79f755a5 | ||
|
|
abf45d5b54 | ||
|
|
fd33037b1c | ||
|
|
87dab293ed | ||
|
|
0244af4e05 | ||
|
|
eca9f0f7df | ||
|
|
afa2a6d587 | ||
|
|
b77ea48d74 | ||
|
|
f378254188 | ||
|
|
83128e986a | ||
|
|
17e4f9a125 | ||
|
|
ef88248c41 | ||
|
|
0a4decfb3b |
117
.gitchangelog.rc
Normal file
117
.gitchangelog.rc
Normal file
@@ -0,0 +1,117 @@
|
|||||||
|
#
|
||||||
|
# Format
|
||||||
|
#
|
||||||
|
# ACTION: [AUDIENCE:] COMMIT_MSG [@TAG ...]
|
||||||
|
#
|
||||||
|
# Description
|
||||||
|
#
|
||||||
|
# ACTION is one of 'chg', 'fix', 'new'
|
||||||
|
#
|
||||||
|
# Is WHAT the change is about.
|
||||||
|
#
|
||||||
|
# 'chg' is for refactor, small improvement, cosmetic changes...
|
||||||
|
# 'fix' is for bug fixes
|
||||||
|
# 'new' is for new features, big improvement
|
||||||
|
#
|
||||||
|
# SUBJECT is optional and one of 'dev', 'usr', 'pkg', 'test', 'doc'
|
||||||
|
#
|
||||||
|
# Is WHO is concerned by the change.
|
||||||
|
#
|
||||||
|
# 'dev' is for developpers (API changes, refactors...)
|
||||||
|
# 'usr' is for final users (UI changes)
|
||||||
|
# 'pkg' is for packagers (packaging changes)
|
||||||
|
# 'test' is for testers (test only related changes)
|
||||||
|
# 'doc' is for doc guys (doc only changes)
|
||||||
|
#
|
||||||
|
# COMMIT_MSG is ... well ... the commit message itself.
|
||||||
|
#
|
||||||
|
# TAGs are additionnal adjective as 'refactor' 'minor' 'cosmetic'
|
||||||
|
#
|
||||||
|
# 'refactor' is obviously for refactoring code only
|
||||||
|
# 'minor' is for a very meaningless change (a typo, adding a comment)
|
||||||
|
# 'cosmetic' is for cosmetic driven change (re-indentation, 80-col...)
|
||||||
|
#
|
||||||
|
# Example:
|
||||||
|
#
|
||||||
|
# new: usr: support of bazaar implemented
|
||||||
|
# chg: re-indentend some lines @cosmetic
|
||||||
|
# new: dev: updated code to be compatible with last version of killer lib.
|
||||||
|
# fix: pkg: updated year of licence coverage.
|
||||||
|
# new: test: added a bunch of test around user usability of feature X.
|
||||||
|
# fix: typo in spelling my name in comment. @minor
|
||||||
|
#
|
||||||
|
# Please note that multi-line commit message are supported, and only the
|
||||||
|
# first line will be considered as the "summary" of the commit message. So
|
||||||
|
# tags, and other rules only applies to the summary. The body of the commit
|
||||||
|
# message will be displayed in the changelog with minor reformating.
|
||||||
|
|
||||||
|
#
|
||||||
|
# ``ignore_regexps`` is a line of regexps
|
||||||
|
#
|
||||||
|
# Any commit having its full commit message matching any regexp listed here
|
||||||
|
# will be ignored and won't be reported in the changelog.
|
||||||
|
#
|
||||||
|
ignore_regexps = [
|
||||||
|
r'(?i)^(Merge pull request|Merge branch|Release|Update)',
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
#
|
||||||
|
# ``replace_regexps`` is a dict associating a regexp pattern and its replacement
|
||||||
|
#
|
||||||
|
# It will be applied to get the summary line from the full commit message.
|
||||||
|
#
|
||||||
|
# Note that you can provide multiple replacement patterns, they will be all
|
||||||
|
# tried. If None matches, the summary line will be the full commit message.
|
||||||
|
#
|
||||||
|
replace_regexps = {
|
||||||
|
# current format (ie: 'chg: dev: my commit msg @tag1 @tag2')
|
||||||
|
|
||||||
|
r'^([cC]hg|[fF]ix|[nN]ew)\s*:\s*((dev|use?r|pkg|test|doc)\s*:\s*)?([^\n@]*)(@[a-z]+\s+)*$':
|
||||||
|
r'\4',
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
# ``section_regexps`` is a list of 2-tuples associating a string label and a
|
||||||
|
# list of regexp
|
||||||
|
#
|
||||||
|
# Commit messages will be classified in sections thanks to this. Section
|
||||||
|
# titles are the label, and a commit is classified under this section if any
|
||||||
|
# of the regexps associated is matching.
|
||||||
|
#
|
||||||
|
section_regexps = [
|
||||||
|
('New', [
|
||||||
|
r'^[nN]ew\s*:\s*((dev|use?r|pkg|test|doc)\s*:\s*)?([^\n]*)$',
|
||||||
|
]),
|
||||||
|
('Changes', [
|
||||||
|
r'^[cC]hg\s*:\s*((dev|use?r|pkg|test|doc)\s*:\s*)?([^\n]*)$',
|
||||||
|
]),
|
||||||
|
('Fix', [
|
||||||
|
r'^[fF]ix\s*:\s*((dev|use?r|pkg|test|doc)\s*:\s*)?([^\n]*)$',
|
||||||
|
]),
|
||||||
|
('Other', None # Match all lines
|
||||||
|
),
|
||||||
|
|
||||||
|
]
|
||||||
|
|
||||||
|
# ``body_split_regexp`` is a regexp
|
||||||
|
#
|
||||||
|
# Commit message body (not the summary) if existing will be split
|
||||||
|
# (new line) on this regexp
|
||||||
|
#
|
||||||
|
body_split_regexp = r'[\n-]'
|
||||||
|
|
||||||
|
|
||||||
|
# ``tag_filter_regexp`` is a regexp
|
||||||
|
#
|
||||||
|
# Tags that will be used for the changelog must match this regexp.
|
||||||
|
#
|
||||||
|
# tag_filter_regexp = r'^[0-9]+$'
|
||||||
|
tag_filter_regexp = r'^(?:[vV])?[0-9\.]+$'
|
||||||
|
|
||||||
|
|
||||||
|
# ``unreleased_version_label`` is a string
|
||||||
|
#
|
||||||
|
# This label will be used as the changelog Title of the last set of changes
|
||||||
|
# between last valid tag and HEAD if any.
|
||||||
|
unreleased_version_label = "%%version%% (unreleased)"
|
||||||
15
.github/dependabot.yml
vendored
Normal file
15
.github/dependabot.yml
vendored
Normal file
@@ -0,0 +1,15 @@
|
|||||||
|
version: 2
|
||||||
|
updates:
|
||||||
|
- package-ecosystem: pip
|
||||||
|
directory: "/"
|
||||||
|
schedule:
|
||||||
|
interval: daily
|
||||||
|
time: "13:00"
|
||||||
|
groups:
|
||||||
|
python-packages:
|
||||||
|
patterns:
|
||||||
|
- "*"
|
||||||
|
- package-ecosystem: "github-actions"
|
||||||
|
directory: "/"
|
||||||
|
schedule:
|
||||||
|
interval: "weekly"
|
||||||
41
.github/workflows/automatic-release.yml
vendored
Normal file
41
.github/workflows/automatic-release.yml
vendored
Normal file
@@ -0,0 +1,41 @@
|
|||||||
|
name: automatic-release
|
||||||
|
|
||||||
|
on:
|
||||||
|
workflow_dispatch:
|
||||||
|
inputs:
|
||||||
|
release_type:
|
||||||
|
description: Release type
|
||||||
|
required: true
|
||||||
|
type: choice
|
||||||
|
options:
|
||||||
|
- patch
|
||||||
|
- minor
|
||||||
|
- major
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
release:
|
||||||
|
name: Release
|
||||||
|
runs-on: ubuntu-24.04
|
||||||
|
steps:
|
||||||
|
- name: Checkout repository
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
with:
|
||||||
|
fetch-depth: 0
|
||||||
|
ssh-key: ${{ secrets.DEPLOY_PRIVATE_KEY }}
|
||||||
|
- name: Setup Git
|
||||||
|
run: |
|
||||||
|
git config --local user.email "action@github.com"
|
||||||
|
git config --local user.name "GitHub Action"
|
||||||
|
- name: Setup Python
|
||||||
|
uses: actions/setup-python@v5
|
||||||
|
with:
|
||||||
|
python-version: '3.12'
|
||||||
|
- name: Install prerequisites
|
||||||
|
run: pip install -r release-requirements.txt
|
||||||
|
- name: Execute release
|
||||||
|
env:
|
||||||
|
SEMVER_BUMP: ${{ github.event.inputs.release_type }}
|
||||||
|
TWINE_REPOSITORY: ${{ vars.TWINE_REPOSITORY }}
|
||||||
|
TWINE_USERNAME: ${{ secrets.TWINE_USERNAME }}
|
||||||
|
TWINE_PASSWORD: ${{ secrets.TWINE_PASSWORD }}
|
||||||
|
run: ./release $SEMVER_BUMP
|
||||||
77
.github/workflows/docker.yml
vendored
Normal file
77
.github/workflows/docker.yml
vendored
Normal file
@@ -0,0 +1,77 @@
|
|||||||
|
# This workflow uses actions that are not certified by GitHub.
|
||||||
|
# They are provided by a third-party and are governed by
|
||||||
|
# separate terms of service, privacy policy, and support
|
||||||
|
# documentation.
|
||||||
|
|
||||||
|
name: Create and publish a Docker image
|
||||||
|
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
branches:
|
||||||
|
- 'master'
|
||||||
|
- 'main'
|
||||||
|
- 'dev'
|
||||||
|
|
||||||
|
tags:
|
||||||
|
- 'v*'
|
||||||
|
- 'v*.*'
|
||||||
|
- 'v*.*.*'
|
||||||
|
- '*'
|
||||||
|
- '*.*'
|
||||||
|
- '*.*.*'
|
||||||
|
pull_request:
|
||||||
|
branches:
|
||||||
|
- 'main'
|
||||||
|
- 'dev'
|
||||||
|
|
||||||
|
|
||||||
|
env:
|
||||||
|
REGISTRY: ghcr.io
|
||||||
|
IMAGE_NAME: ${{ github.repository }}
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
build-and-push-image:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
permissions:
|
||||||
|
contents: read
|
||||||
|
packages: write
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- name: Checkout repository
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
with:
|
||||||
|
persist-credentials: false
|
||||||
|
|
||||||
|
- name: Set up QEMU
|
||||||
|
uses: docker/setup-qemu-action@v3
|
||||||
|
|
||||||
|
- name: Set up Docker Buildx
|
||||||
|
uses: docker/setup-buildx-action@v3
|
||||||
|
|
||||||
|
- name: Log in to the Container registry
|
||||||
|
uses: docker/login-action@v3
|
||||||
|
with:
|
||||||
|
registry: ${{ env.REGISTRY }}
|
||||||
|
username: ${{ github.actor }}
|
||||||
|
password: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
|
||||||
|
- name: Extract metadata (tags, labels) for Docker
|
||||||
|
id: meta
|
||||||
|
uses: docker/metadata-action@v5
|
||||||
|
with:
|
||||||
|
images: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}
|
||||||
|
tags: |
|
||||||
|
type=semver,pattern={{version}}
|
||||||
|
type=semver,pattern={{major}}.{{minor}}
|
||||||
|
type=semver,pattern={{major}}
|
||||||
|
type=sha
|
||||||
|
type=raw,value=latest,enable=${{ github.ref == format('refs/heads/{0}', 'main') }}
|
||||||
|
|
||||||
|
- name: Build and push Docker image
|
||||||
|
uses: docker/build-push-action@v6
|
||||||
|
with:
|
||||||
|
context: .
|
||||||
|
push: true
|
||||||
|
platforms: linux/amd64,linux/arm64
|
||||||
|
tags: ${{ steps.meta.outputs.tags }}
|
||||||
|
labels: ${{ steps.meta.outputs.labels }}
|
||||||
33
.github/workflows/lint.yml
vendored
Normal file
33
.github/workflows/lint.yml
vendored
Normal file
@@ -0,0 +1,33 @@
|
|||||||
|
---
|
||||||
|
name: "lint"
|
||||||
|
|
||||||
|
# yamllint disable-line rule:truthy
|
||||||
|
on:
|
||||||
|
pull_request:
|
||||||
|
branches:
|
||||||
|
- "*"
|
||||||
|
push:
|
||||||
|
branches:
|
||||||
|
- "main"
|
||||||
|
- "master"
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
lint:
|
||||||
|
name: lint
|
||||||
|
runs-on: ubuntu-24.04
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- name: Checkout repository
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
with:
|
||||||
|
fetch-depth: 0
|
||||||
|
- name: Setup Python
|
||||||
|
uses: actions/setup-python@v5
|
||||||
|
with:
|
||||||
|
python-version: "3.12"
|
||||||
|
cache: "pip"
|
||||||
|
- run: pip install -r release-requirements.txt && pip install wheel
|
||||||
|
- run: flake8 --ignore=E501,E203,W503
|
||||||
|
- run: black .
|
||||||
|
- run: rst-lint README.rst
|
||||||
|
- run: python setup.py sdist bdist_wheel && twine check dist/*
|
||||||
19
.github/workflows/tagged-release.yml
vendored
Normal file
19
.github/workflows/tagged-release.yml
vendored
Normal file
@@ -0,0 +1,19 @@
|
|||||||
|
---
|
||||||
|
name: "tagged-release"
|
||||||
|
|
||||||
|
# yamllint disable-line rule:truthy
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
tags:
|
||||||
|
- '*'
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
tagged-release:
|
||||||
|
name: tagged-release
|
||||||
|
runs-on: ubuntu-24.04
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- uses: "marvinpinto/action-automatic-releases@v1.2.1"
|
||||||
|
with:
|
||||||
|
repo_token: "${{ secrets.GITHUB_TOKEN }}"
|
||||||
|
prerelease: false
|
||||||
21
.gitignore
vendored
21
.gitignore
vendored
@@ -18,10 +18,27 @@ pkg
|
|||||||
|
|
||||||
# Debian Files
|
# Debian Files
|
||||||
debian/files
|
debian/files
|
||||||
debian/python-aws-hostname*
|
debian/python-github-backup*
|
||||||
|
|
||||||
# Sphinx build
|
# Sphinx build
|
||||||
doc/_build
|
doc/_build
|
||||||
|
|
||||||
# Generated man page
|
# Generated man page
|
||||||
doc/aws_hostname.1
|
doc/github_backup.1
|
||||||
|
|
||||||
|
# Annoying macOS files
|
||||||
|
.DS_Store
|
||||||
|
._*
|
||||||
|
|
||||||
|
# IDE configuration files
|
||||||
|
.vscode
|
||||||
|
.atom
|
||||||
|
|
||||||
|
README
|
||||||
|
|
||||||
|
# RSA
|
||||||
|
id_rsa
|
||||||
|
id_rsa.pub
|
||||||
|
|
||||||
|
# Virtual env
|
||||||
|
venv
|
||||||
|
|||||||
2538
CHANGES.rst
2538
CHANGES.rst
File diff suppressed because it is too large
Load Diff
16
Dockerfile
Normal file
16
Dockerfile
Normal file
@@ -0,0 +1,16 @@
|
|||||||
|
FROM python:3.9.18-slim
|
||||||
|
|
||||||
|
RUN --mount=type=cache,target=/var/cache/apt \
|
||||||
|
apt-get update && apt-get install -y git git-lfs
|
||||||
|
|
||||||
|
WORKDIR /usr/src/app
|
||||||
|
|
||||||
|
COPY release-requirements.txt .
|
||||||
|
RUN --mount=type=cache,target=/root/.cache/pip \
|
||||||
|
pip install -r release-requirements.txt
|
||||||
|
|
||||||
|
COPY . .
|
||||||
|
RUN --mount=type=cache,target=/root/.cache/pip \
|
||||||
|
pip install .
|
||||||
|
|
||||||
|
ENTRYPOINT [ "github-backup" ]
|
||||||
13
ISSUE_TEMPLATE.md
Normal file
13
ISSUE_TEMPLATE.md
Normal file
@@ -0,0 +1,13 @@
|
|||||||
|
# Important notice regarding filed issues
|
||||||
|
|
||||||
|
This project already fills my needs, and as such I have no real reason to continue it's development. This project is otherwise provided as is, and no support is given.
|
||||||
|
|
||||||
|
If pull requests implementing bug fixes or enhancements are pushed, I am happy to review and merge them (time permitting).
|
||||||
|
|
||||||
|
If you wish to have a bug fixed, you have a few options:
|
||||||
|
|
||||||
|
- Fix it yourself and file a pull request.
|
||||||
|
- File a bug and hope someone else fixes it for you.
|
||||||
|
- Pay me to fix it (my rate is $200 an hour, minimum 1 hour, contact me via my [github email address](https://github.com/josegonzalez) if you want to go this route).
|
||||||
|
|
||||||
|
In all cases, feel free to file an issue, they may be of help to others in the future.
|
||||||
7
PULL_REQUEST.md
Normal file
7
PULL_REQUEST.md
Normal file
@@ -0,0 +1,7 @@
|
|||||||
|
# Important notice regarding filed pull requests
|
||||||
|
|
||||||
|
This project already fills my needs, and as such I have no real reason to continue it's development. This project is otherwise provided as is, and no support is given.
|
||||||
|
|
||||||
|
I will attempt to review pull requests at _my_ earliest convenience. If I am unable to get to your pull request in a timely fashion, it is what it is. This repository does not pay any bills, and I am not required to merge any pull request from any individual.
|
||||||
|
|
||||||
|
If you wish to jump my personal priority queue, you may pay me for my time to review. My rate is $200 an hour - minimum 1 hour - feel free contact me via my github email address if you want to go this route.
|
||||||
269
README.rst
269
README.rst
@@ -4,12 +4,13 @@ github-backup
|
|||||||
|
|
||||||
|PyPI| |Python Versions|
|
|PyPI| |Python Versions|
|
||||||
|
|
||||||
backup a github user or organization
|
The package can be used to backup an *entire* `Github <https://github.com/>`_ organization, repository or user account, including starred repos, issues and wikis in the most appropriate format (clones for wikis, json files for issues).
|
||||||
|
|
||||||
Requirements
|
Requirements
|
||||||
============
|
============
|
||||||
|
|
||||||
- GIT 1.9+
|
- GIT 1.9+
|
||||||
|
- Python
|
||||||
|
|
||||||
Installation
|
Installation
|
||||||
============
|
============
|
||||||
@@ -18,26 +19,40 @@ Using PIP via PyPI::
|
|||||||
|
|
||||||
pip install github-backup
|
pip install github-backup
|
||||||
|
|
||||||
Using PIP via Github::
|
Using PIP via Github (more likely the latest version)::
|
||||||
|
|
||||||
pip install git+https://github.com/josegonzalez/python-github-backup.git#egg=github-backup
|
pip install git+https://github.com/josegonzalez/python-github-backup.git#egg=github-backup
|
||||||
|
|
||||||
|
*Install note for python newcomers:*
|
||||||
|
|
||||||
Usage
|
Python scripts are unlikely to be included in your ``$PATH`` by default, this means it cannot be run directly in terminal with ``$ github-backup ...``, you can either add python's install path to your environments ``$PATH`` or call the script directly e.g. using ``$ ~/.local/bin/github-backup``.*
|
||||||
=====
|
|
||||||
|
|
||||||
CLI Usage is as follows::
|
Basic Help
|
||||||
|
==========
|
||||||
|
|
||||||
github-backup [-h] [-u USERNAME] [-p PASSWORD] [-t TOKEN]
|
Show the CLI help output::
|
||||||
[-o OUTPUT_DIRECTORY] [-i] [--starred] [--watched]
|
|
||||||
[--all] [--issues] [--issue-comments] [--issue-events]
|
github-backup -h
|
||||||
[--pulls] [--pull-comments] [--pull-commits] [--labels]
|
|
||||||
[--hooks] [--milestones] [--repositories] [--bare] [--lfs]
|
CLI Help output::
|
||||||
[--wikis] [--skip-existing] [--all-starred]
|
|
||||||
[-L [LANGUAGES [LANGUAGES ...]]] [-N NAME_REGEX]
|
github-backup [-h] [-u USERNAME] [-p PASSWORD] [-t TOKEN_CLASSIC]
|
||||||
[-H GITHUB_HOST] [-O] [-R REPOSITORY] [-P] [-F]
|
[-f TOKEN_FINE] [--as-app] [-o OUTPUT_DIRECTORY]
|
||||||
[--prefer-ssh] [-v]
|
[-l LOG_LEVEL] [-i] [--starred] [--all-starred]
|
||||||
|
[--watched] [--followers] [--following] [--all] [--issues]
|
||||||
|
[--issue-comments] [--issue-events] [--pulls]
|
||||||
|
[--pull-comments] [--pull-commits] [--pull-details]
|
||||||
|
[--labels] [--hooks] [--milestones] [--repositories]
|
||||||
|
[--bare] [--lfs] [--wikis] [--gists] [--starred-gists]
|
||||||
|
[--skip-archived] [--skip-existing] [-L [LANGUAGES ...]]
|
||||||
|
[-N NAME_REGEX] [-H GITHUB_HOST] [-O] [-R REPOSITORY]
|
||||||
|
[-P] [-F] [--prefer-ssh] [-v]
|
||||||
[--keychain-name OSX_KEYCHAIN_ITEM_NAME]
|
[--keychain-name OSX_KEYCHAIN_ITEM_NAME]
|
||||||
[--keychain-account OSX_KEYCHAIN_ITEM_ACCOUNT]
|
[--keychain-account OSX_KEYCHAIN_ITEM_ACCOUNT]
|
||||||
|
[--releases] [--latest-releases NUMBER_OF_LATEST_RELEASES]
|
||||||
|
[--skip-prerelease] [--assets]
|
||||||
|
[--exclude [REPOSITORY [REPOSITORY ...]]
|
||||||
|
[--throttle-limit THROTTLE_LIMIT] [--throttle-pause THROTTLE_PAUSE]
|
||||||
USER
|
USER
|
||||||
|
|
||||||
Backup a github account
|
Backup a github account
|
||||||
@@ -52,30 +67,45 @@ CLI Usage is as follows::
|
|||||||
-p PASSWORD, --password PASSWORD
|
-p PASSWORD, --password PASSWORD
|
||||||
password for basic auth. If a username is given but
|
password for basic auth. If a username is given but
|
||||||
not a password, the password will be prompted for.
|
not a password, the password will be prompted for.
|
||||||
-t TOKEN, --token TOKEN
|
-f TOKEN_FINE, --token-fine TOKEN_FINE
|
||||||
personal access or OAuth token
|
fine-grained personal access token or path to token
|
||||||
|
(file://...)
|
||||||
|
-t TOKEN_CLASSIC, --token TOKEN_CLASSIC
|
||||||
|
personal access, OAuth, or JSON Web token, or path to
|
||||||
|
token (file://...)
|
||||||
|
--as-app authenticate as github app instead of as a user.
|
||||||
-o OUTPUT_DIRECTORY, --output-directory OUTPUT_DIRECTORY
|
-o OUTPUT_DIRECTORY, --output-directory OUTPUT_DIRECTORY
|
||||||
directory at which to backup the repositories
|
directory at which to backup the repositories
|
||||||
|
-l LOG_LEVEL, --log-level LOG_LEVEL
|
||||||
|
log level to use (default: info, possible levels:
|
||||||
|
debug, info, warning, error, critical)
|
||||||
-i, --incremental incremental backup
|
-i, --incremental incremental backup
|
||||||
|
--incremental-by-files incremental backup using modified time of files
|
||||||
--starred include JSON output of starred repositories in backup
|
--starred include JSON output of starred repositories in backup
|
||||||
--watched include watched repositories in backup
|
--all-starred include starred repositories in backup [*]
|
||||||
--all include everything in backup
|
--watched include JSON output of watched repositories in backup
|
||||||
|
--followers include JSON output of followers in backup
|
||||||
|
--following include JSON output of following users in backup
|
||||||
|
--all include everything in backup (not including [*])
|
||||||
--issues include issues in backup
|
--issues include issues in backup
|
||||||
--issue-comments include issue comments in backup
|
--issue-comments include issue comments in backup
|
||||||
--issue-events include issue events in backup
|
--issue-events include issue events in backup
|
||||||
--pulls include pull requests in backup
|
--pulls include pull requests in backup
|
||||||
--pull-comments include pull request review comments in backup
|
--pull-comments include pull request review comments in backup
|
||||||
--pull-commits include pull request commits in backup
|
--pull-commits include pull request commits in backup
|
||||||
|
--pull-details include more pull request details in backup [*]
|
||||||
--labels include labels in backup
|
--labels include labels in backup
|
||||||
--hooks include hooks in backup (works only when
|
--hooks include hooks in backup (works only when
|
||||||
authenticated)
|
authenticated)
|
||||||
--milestones include milestones in backup
|
--milestones include milestones in backup
|
||||||
--repositories include repository clone in backup
|
--repositories include repository clone in backup
|
||||||
--bare clone bare repositories
|
--bare clone bare repositories
|
||||||
--lfs clone LFS repositories (requires Git LFS to be installed, https://git-lfs.github.com)
|
--lfs clone LFS repositories (requires Git LFS to be
|
||||||
|
installed, https://git-lfs.github.com) [*]
|
||||||
--wikis include wiki clone in backup
|
--wikis include wiki clone in backup
|
||||||
|
--gists include gists in backup [*]
|
||||||
|
--starred-gists include starred gists in backup [*]
|
||||||
--skip-existing skip project if a backup directory exists
|
--skip-existing skip project if a backup directory exists
|
||||||
--all-starred include starred repositories in backup
|
|
||||||
-L [LANGUAGES [LANGUAGES ...]], --languages [LANGUAGES [LANGUAGES ...]]
|
-L [LANGUAGES [LANGUAGES ...]], --languages [LANGUAGES [LANGUAGES ...]]
|
||||||
only allow these languages
|
only allow these languages
|
||||||
-N NAME_REGEX, --name-regex NAME_REGEX
|
-N NAME_REGEX, --name-regex NAME_REGEX
|
||||||
@@ -85,8 +115,8 @@ CLI Usage is as follows::
|
|||||||
-O, --organization whether or not this is an organization user
|
-O, --organization whether or not this is an organization user
|
||||||
-R REPOSITORY, --repository REPOSITORY
|
-R REPOSITORY, --repository REPOSITORY
|
||||||
name of repository to limit backup to
|
name of repository to limit backup to
|
||||||
-P, --private include private repositories
|
-P, --private include private repositories [*]
|
||||||
-F, --fork include forked repositories
|
-F, --fork include forked repositories [*]
|
||||||
--prefer-ssh Clone repositories using SSH instead of HTTPS
|
--prefer-ssh Clone repositories using SSH instead of HTTPS
|
||||||
-v, --version show program's version number and exit
|
-v, --version show program's version number and exit
|
||||||
--keychain-name OSX_KEYCHAIN_ITEM_NAME
|
--keychain-name OSX_KEYCHAIN_ITEM_NAME
|
||||||
@@ -95,17 +125,62 @@ CLI Usage is as follows::
|
|||||||
--keychain-account OSX_KEYCHAIN_ITEM_ACCOUNT
|
--keychain-account OSX_KEYCHAIN_ITEM_ACCOUNT
|
||||||
OSX ONLY: account field of password item in OSX
|
OSX ONLY: account field of password item in OSX
|
||||||
keychain that holds the personal access or OAuth token
|
keychain that holds the personal access or OAuth token
|
||||||
|
--releases include release information, not including assets or
|
||||||
|
binaries
|
||||||
|
--latest-releases NUMBER_OF_LATEST_RELEASES
|
||||||
|
include certain number of the latest releases;
|
||||||
|
only applies if including releases
|
||||||
|
--skip-prerelease skip prerelease and draft versions; only applies if including releases
|
||||||
|
--assets include assets alongside release information; only
|
||||||
|
applies if including releases
|
||||||
|
--exclude [REPOSITORY [REPOSITORY ...]]
|
||||||
|
names of repositories to exclude from backup.
|
||||||
|
--throttle-limit THROTTLE_LIMIT
|
||||||
|
start throttling of GitHub API requests after this
|
||||||
|
amount of API requests remain
|
||||||
|
--throttle-pause THROTTLE_PAUSE
|
||||||
|
wait this amount of seconds when API request
|
||||||
|
throttling is active (default: 30.0, requires
|
||||||
|
--throttle-limit to be set)
|
||||||
|
|
||||||
|
|
||||||
The package can be used to backup an *entire* organization or repository, including issues and wikis in the most appropriate format (clones for wikis, json files for issues).
|
Usage Details
|
||||||
|
=============
|
||||||
|
|
||||||
Authentication
|
Authentication
|
||||||
==============
|
--------------
|
||||||
|
|
||||||
|
**Password-based authentication** will fail if you have two-factor authentication enabled, and will `be deprecated <https://github.blog/2023-03-09-raising-the-bar-for-software-security-github-2fa-begins-march-13/>`_ by 2023 EOY.
|
||||||
|
|
||||||
|
``--username`` is used for basic password authentication and separate from the positional argument ``USER``, which specifies the user account you wish to back up.
|
||||||
|
|
||||||
|
**Classic tokens** are `slightly less secure <https://docs.github.com/en/authentication/keeping-your-account-and-data-secure/managing-your-personal-access-tokens#personal-access-tokens-classic>`_ as they provide very coarse-grained permissions.
|
||||||
|
|
||||||
|
If you need authentication for long-running backups (e.g. for a cron job) it is recommended to use **fine-grained personal access token** ``-f TOKEN_FINE``.
|
||||||
|
|
||||||
|
|
||||||
|
Fine Tokens
|
||||||
|
~~~~~~~~~~~
|
||||||
|
|
||||||
|
You can "generate new token", choosing the repository scope by selecting specific repos or all repos. On Github this is under *Settings -> Developer Settings -> Personal access tokens -> Fine-grained Tokens*
|
||||||
|
|
||||||
|
Customise the permissions for your use case, but for a personal account full backup you'll need to enable the following permissions:
|
||||||
|
|
||||||
|
**User permissions**: Read access to followers, starring, and watching.
|
||||||
|
|
||||||
|
**Repository permissions**: Read access to contents, issues, metadata, pull requests, and webhooks.
|
||||||
|
|
||||||
|
|
||||||
|
Prefer SSH
|
||||||
|
~~~~~~~~~~
|
||||||
|
|
||||||
|
If cloning repos is enabled with ``--repositories``, ``--all-starred``, ``--wikis``, ``--gists``, ``--starred-gists`` using the ``--prefer-ssh`` argument will use ssh for cloning the git repos, but all other connections will still use their own protocol, e.g. API requests for issues uses HTTPS.
|
||||||
|
|
||||||
|
To clone with SSH, you'll need SSH authentication setup `as usual with Github <https://docs.github.com/en/authentication/connecting-to-github-with-ssh>`_, e.g. via SSH public and private keys.
|
||||||
|
|
||||||
Note: Password-based authentication will fail if you have two-factor authentication enabled.
|
|
||||||
|
|
||||||
Using the Keychain on Mac OSX
|
Using the Keychain on Mac OSX
|
||||||
=============================
|
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||||
Note: On Mac OSX the token can be stored securely in the user's keychain. To do this:
|
Note: On Mac OSX the token can be stored securely in the user's keychain. To do this:
|
||||||
|
|
||||||
1. Open Keychain from "Applications -> Utilities -> Keychain Access"
|
1. Open Keychain from "Applications -> Utilities -> Keychain Access"
|
||||||
@@ -119,30 +194,150 @@ Note: When you run github-backup, you will be asked whether you want to allow "
|
|||||||
1. **Allow:** In this case you will need to click "Allow" each time you run `github-backup`
|
1. **Allow:** In this case you will need to click "Allow" each time you run `github-backup`
|
||||||
2. **Always Allow:** In this case, you will not be asked for permission when you run `github-backup` in future. This is less secure, but is required if you want to schedule `github-backup` to run automatically
|
2. **Always Allow:** In this case, you will not be asked for permission when you run `github-backup` in future. This is less secure, but is required if you want to schedule `github-backup` to run automatically
|
||||||
|
|
||||||
About Git LFS
|
|
||||||
=============
|
|
||||||
|
|
||||||
When you use the "--lfs" option, you will need to make sure you have Git LFS installed.
|
Github Rate-limit and Throttling
|
||||||
|
--------------------------------
|
||||||
|
|
||||||
|
"github-backup" will automatically throttle itself based on feedback from the Github API.
|
||||||
|
|
||||||
|
Their API is usually rate-limited to 5000 calls per hour. The API will ask github-backup to pause until a specific time when the limit is reset again (at the start of the next hour). This continues until the backup is complete.
|
||||||
|
|
||||||
|
During a large backup, such as ``--all-starred``, and on a fast connection this can result in (~20 min) pauses with bursts of API calls periodically maxing out the API limit. If this is not suitable `it has been observed <https://github.com/josegonzalez/python-github-backup/issues/76#issuecomment-636158717>`_ under real-world conditions that overriding the throttle with ``--throttle-limit 5000 --throttle-pause 0.6`` provides a smooth rate across the hour, although a ``--throttle-pause 0.72`` (3600 seconds [1 hour] / 5000 limit) is theoretically safer to prevent large rate-limit pauses.
|
||||||
|
|
||||||
|
|
||||||
|
About Git LFS
|
||||||
|
-------------
|
||||||
|
|
||||||
|
When you use the ``--lfs`` option, you will need to make sure you have Git LFS installed.
|
||||||
|
|
||||||
Instructions on how to do this can be found on https://git-lfs.github.com.
|
Instructions on how to do this can be found on https://git-lfs.github.com.
|
||||||
|
|
||||||
Examples
|
|
||||||
========
|
|
||||||
|
|
||||||
Backup all repositories::
|
Run in Docker container
|
||||||
|
-----------------------
|
||||||
|
|
||||||
|
To run the tool in a Docker container use the following command:
|
||||||
|
|
||||||
|
sudo docker run --rm -v /path/to/backup:/data --name github-backup ghcr.io/josegonzalez/python-github-backup -o /data $OPTIONS $USER
|
||||||
|
|
||||||
|
Gotchas / Known-issues
|
||||||
|
======================
|
||||||
|
|
||||||
|
All is not everything
|
||||||
|
---------------------
|
||||||
|
|
||||||
|
The ``--all`` argument does not include: cloning private repos (``-P, --private``), cloning forks (``-F, --fork``), cloning starred repositories (``--all-starred``), ``--pull-details``, cloning LFS repositories (``--lfs``), cloning gists (``--gists``) or cloning starred gist repos (``--starred-gists``). See examples for more.
|
||||||
|
|
||||||
|
Cloning all starred size
|
||||||
|
------------------------
|
||||||
|
|
||||||
|
Using the ``--all-starred`` argument to clone all starred repositories may use a large amount of storage space, especially if ``--all`` or more arguments are used. e.g. commonly starred repos can have tens of thousands of issues, many large assets and the repo itself etc. Consider just storing links to starred repos in JSON format with ``--starred``.
|
||||||
|
|
||||||
|
Incremental Backup
|
||||||
|
------------------
|
||||||
|
|
||||||
|
Using (``-i, --incremental``) will only request new data from the API **since the last run (successful or not)**. e.g. only request issues from the API since the last run.
|
||||||
|
|
||||||
|
This means any blocking errors on previous runs can cause a large amount of missing data in backups.
|
||||||
|
|
||||||
|
Using (``--incremental-by-files``) will request new data from the API **based on when the file was modified on filesystem**. e.g. if you modify the file yourself you may miss something.
|
||||||
|
|
||||||
|
Still saver than the previous version.
|
||||||
|
|
||||||
|
Specifically, issues and pull requests are handled like this.
|
||||||
|
|
||||||
|
Known blocking errors
|
||||||
|
---------------------
|
||||||
|
|
||||||
|
Some errors will block the backup run by exiting the script. e.g. receiving a 403 Forbidden error from the Github API.
|
||||||
|
|
||||||
|
If the incremental argument is used, this will result in the next backup only requesting API data since the last blocked/failed run. Potentially causing unexpected large amounts of missing data.
|
||||||
|
|
||||||
|
It's therefore recommended to only use the incremental argument if the output/result is being actively monitored, or complimented with periodic full non-incremental runs, to avoid unexpected missing data in a regular backup runs.
|
||||||
|
|
||||||
|
1. **Starred public repo hooks blocking**
|
||||||
|
|
||||||
|
Since the ``--all`` argument includes ``--hooks``, if you use ``--all`` and ``--all-starred`` together to clone a users starred public repositories, the backup will likely error and block the backup continuing.
|
||||||
|
|
||||||
|
This is due to needing the correct permission for ``--hooks`` on public repos.
|
||||||
|
|
||||||
|
|
||||||
|
"bare" is actually "mirror"
|
||||||
|
---------------------------
|
||||||
|
|
||||||
|
Using the bare clone argument (``--bare``) will actually call git's ``clone --mirror`` command. There's a subtle difference between `bare <https://www.git-scm.com/docs/git-clone#Documentation/git-clone.txt---bare>`_ and `mirror <https://www.git-scm.com/docs/git-clone#Documentation/git-clone.txt---mirror>`_ clone.
|
||||||
|
|
||||||
|
*From git docs "Compared to --bare, --mirror not only maps local branches of the source to local branches of the target, it maps all refs (including remote-tracking branches, notes etc.) and sets up a refspec configuration such that all these refs are overwritten by a git remote update in the target repository."*
|
||||||
|
|
||||||
|
|
||||||
|
Starred gists vs starred repo behaviour
|
||||||
|
---------------------------------------
|
||||||
|
|
||||||
|
The starred normal repo cloning (``--all-starred``) argument stores starred repos separately to the users own repositories. However, using ``--starred-gists`` will store starred gists within the same directory as the users own gists ``--gists``. Also, all gist repo directory names are IDs not the gist's name.
|
||||||
|
|
||||||
|
|
||||||
|
Skip existing on incomplete backups
|
||||||
|
-----------------------------------
|
||||||
|
|
||||||
|
The ``--skip-existing`` argument will skip a backup if the directory already exists, even if the backup in that directory failed (perhaps due to a blocking error). This may result in unexpected missing data in a regular backup.
|
||||||
|
|
||||||
|
|
||||||
|
Github Backup Examples
|
||||||
|
======================
|
||||||
|
|
||||||
|
Backup all repositories, including private ones using a classic token::
|
||||||
|
|
||||||
export ACCESS_TOKEN=SOME-GITHUB-TOKEN
|
export ACCESS_TOKEN=SOME-GITHUB-TOKEN
|
||||||
github-backup WhiteHouse --token $ACCESS_TOKEN --organization --output-directory /tmp/white-house --repositories
|
github-backup WhiteHouse --token $ACCESS_TOKEN --organization --output-directory /tmp/white-house --repositories --private
|
||||||
|
|
||||||
Backup a single organization repository with everything else (wiki, pull requests, comments, issues etc)::
|
Use a fine-grained access token to backup a single organization repository with everything else (wiki, pull requests, comments, issues etc)::
|
||||||
|
|
||||||
export ACCESS_TOKEN=SOME-GITHUB-TOKEN
|
export FINE_ACCESS_TOKEN=SOME-GITHUB-TOKEN
|
||||||
ORGANIZATION=docker
|
ORGANIZATION=docker
|
||||||
REPO=cli
|
REPO=cli
|
||||||
# e.g. git@github.com:docker/cli.git
|
# e.g. git@github.com:docker/cli.git
|
||||||
github-backup $ORGANIZATION -P -t $ACCESS_TOKEN -o . --all -O -R $REPO
|
github-backup $ORGANIZATION -P -f $FINE_ACCESS_TOKEN -o . --all -O -R $REPO
|
||||||
|
|
||||||
|
Quietly and incrementally backup useful Github user data (public and private repos with SSH) including; all issues, pulls, all public starred repos and gists (omitting "hooks", "releases" and therefore "assets" to prevent blocking). *Great for a cron job.* ::
|
||||||
|
|
||||||
|
export FINE_ACCESS_TOKEN=SOME-GITHUB-TOKEN
|
||||||
|
GH_USER=YOUR-GITHUB-USER
|
||||||
|
|
||||||
|
github-backup -f $FINE_ACCESS_TOKEN --prefer-ssh -o ~/github-backup/ -l error -P -i --all-starred --starred --watched --followers --following --issues --issue-comments --issue-events --pulls --pull-comments --pull-commits --labels --milestones --repositories --wikis --releases --assets --pull-details --gists --starred-gists $GH_USER
|
||||||
|
|
||||||
|
Debug an error/block or incomplete backup into a temporary directory. Omit "incremental" to fill a previous incomplete backup. ::
|
||||||
|
|
||||||
|
export FINE_ACCESS_TOKEN=SOME-GITHUB-TOKEN
|
||||||
|
GH_USER=YOUR-GITHUB-USER
|
||||||
|
|
||||||
|
github-backup -f $FINE_ACCESS_TOKEN -o /tmp/github-backup/ -l debug -P --all-starred --starred --watched --followers --following --issues --issue-comments --issue-events --pulls --pull-comments --pull-commits --labels --milestones --repositories --wikis --releases --assets --pull-details --gists --starred-gists $GH_USER
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
Development
|
||||||
|
===========
|
||||||
|
|
||||||
|
This project is considered feature complete for the primary maintainer @josegonzalez. If you would like a bugfix or enhancement, pull requests are welcome. Feel free to contact the maintainer for consulting estimates if you'd like to sponsor the work instead.
|
||||||
|
|
||||||
|
Contibuters
|
||||||
|
-----------
|
||||||
|
|
||||||
|
A huge thanks to all the contibuters!
|
||||||
|
|
||||||
|
.. image:: https://contrib.rocks/image?repo=josegonzalez/python-github-backup
|
||||||
|
:target: https://github.com/josegonzalez/python-github-backup/graphs/contributors
|
||||||
|
:alt: contributors
|
||||||
|
|
||||||
|
Testing
|
||||||
|
-------
|
||||||
|
|
||||||
|
This project currently contains no unit tests. To run linting::
|
||||||
|
|
||||||
|
pip install flake8
|
||||||
|
flake8 --ignore=E501
|
||||||
|
|
||||||
|
|
||||||
.. |PyPI| image:: https://img.shields.io/pypi/v/github-backup.svg
|
.. |PyPI| image:: https://img.shields.io/pypi/v/github-backup.svg
|
||||||
:target: https://pypi.python.org/pypi/github-backup/
|
:target: https://pypi.python.org/pypi/github-backup/
|
||||||
.. |Python Versions| image:: https://img.shields.io/pypi/pyversions/github-backup.svg
|
.. |Python Versions| image:: https://img.shields.io/pypi/pyversions/github-backup.svg
|
||||||
:target: https://github.com/albertyw/github-backup
|
:target: https://github.com/josegonzalez/python-github-backup
|
||||||
|
|||||||
@@ -1,913 +1,62 @@
|
|||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
from __future__ import print_function
|
|
||||||
|
|
||||||
import argparse
|
|
||||||
import base64
|
|
||||||
import calendar
|
|
||||||
import codecs
|
|
||||||
import errno
|
|
||||||
import getpass
|
|
||||||
import json
|
|
||||||
import logging
|
import logging
|
||||||
import os
|
import os
|
||||||
import re
|
|
||||||
import select
|
|
||||||
import subprocess
|
|
||||||
import sys
|
import sys
|
||||||
import time
|
|
||||||
import platform
|
|
||||||
try:
|
|
||||||
# python 3
|
|
||||||
from urllib.parse import urlparse
|
|
||||||
from urllib.parse import quote as urlquote
|
|
||||||
from urllib.parse import urlencode
|
|
||||||
from urllib.error import HTTPError, URLError
|
|
||||||
from urllib.request import urlopen
|
|
||||||
from urllib.request import Request
|
|
||||||
except ImportError:
|
|
||||||
# python 2
|
|
||||||
from urlparse import urlparse
|
|
||||||
from urllib import quote as urlquote
|
|
||||||
from urllib import urlencode
|
|
||||||
from urllib2 import HTTPError, URLError
|
|
||||||
from urllib2 import urlopen
|
|
||||||
from urllib2 import Request
|
|
||||||
|
|
||||||
from github_backup import __version__
|
from github_backup.github_backup import (
|
||||||
|
backup_account,
|
||||||
FNULL = open(os.devnull, 'w')
|
backup_repositories,
|
||||||
|
check_git_lfs_install,
|
||||||
|
filter_repositories,
|
||||||
def log_error(message):
|
get_authenticated_user,
|
||||||
if type(message) == str:
|
logger,
|
||||||
message = [message]
|
mkdir_p,
|
||||||
|
parse_args,
|
||||||
for msg in message:
|
retrieve_repositories,
|
||||||
sys.stderr.write("{0}\n".format(msg))
|
)
|
||||||
|
|
||||||
sys.exit(1)
|
logging.basicConfig(
|
||||||
|
format="%(asctime)s.%(msecs)03d: %(message)s",
|
||||||
|
datefmt="%Y-%m-%dT%H:%M:%S",
|
||||||
def log_info(message):
|
level=logging.INFO,
|
||||||
if type(message) == str:
|
)
|
||||||
message = [message]
|
|
||||||
|
|
||||||
for msg in message:
|
|
||||||
sys.stdout.write("{0}\n".format(msg))
|
|
||||||
|
|
||||||
|
|
||||||
def logging_subprocess(popenargs,
|
|
||||||
logger,
|
|
||||||
stdout_log_level=logging.DEBUG,
|
|
||||||
stderr_log_level=logging.ERROR,
|
|
||||||
**kwargs):
|
|
||||||
"""
|
|
||||||
Variant of subprocess.call that accepts a logger instead of stdout/stderr,
|
|
||||||
and logs stdout messages via logger.debug and stderr messages via
|
|
||||||
logger.error.
|
|
||||||
"""
|
|
||||||
child = subprocess.Popen(popenargs, stdout=subprocess.PIPE,
|
|
||||||
stderr=subprocess.PIPE, **kwargs)
|
|
||||||
if sys.platform == 'win32':
|
|
||||||
log_info("Windows operating system detected - no subprocess logging will be returned")
|
|
||||||
|
|
||||||
log_level = {child.stdout: stdout_log_level,
|
|
||||||
child.stderr: stderr_log_level}
|
|
||||||
|
|
||||||
def check_io():
|
|
||||||
if sys.platform == 'win32':
|
|
||||||
return
|
|
||||||
ready_to_read = select.select([child.stdout, child.stderr],
|
|
||||||
[],
|
|
||||||
[],
|
|
||||||
1000)[0]
|
|
||||||
for io in ready_to_read:
|
|
||||||
line = io.readline()
|
|
||||||
if not logger:
|
|
||||||
continue
|
|
||||||
if not (io == child.stderr and not line):
|
|
||||||
logger.log(log_level[io], line[:-1])
|
|
||||||
|
|
||||||
# keep checking stdout/stderr until the child exits
|
|
||||||
while child.poll() is None:
|
|
||||||
check_io()
|
|
||||||
|
|
||||||
check_io() # check again to catch anything after the process exits
|
|
||||||
|
|
||||||
rc = child.wait()
|
|
||||||
|
|
||||||
if rc != 0:
|
|
||||||
print('{} returned {}:'.format(popenargs[0], rc), file=sys.stderr)
|
|
||||||
print('\t', ' '.join(popenargs), file=sys.stderr)
|
|
||||||
|
|
||||||
return rc
|
|
||||||
|
|
||||||
|
|
||||||
def mkdir_p(*args):
|
|
||||||
for path in args:
|
|
||||||
try:
|
|
||||||
os.makedirs(path)
|
|
||||||
except OSError as exc: # Python >2.5
|
|
||||||
if exc.errno == errno.EEXIST and os.path.isdir(path):
|
|
||||||
pass
|
|
||||||
else:
|
|
||||||
raise
|
|
||||||
|
|
||||||
|
|
||||||
def mask_password(url, secret='*****'):
|
|
||||||
parsed = urlparse(url)
|
|
||||||
|
|
||||||
if not parsed.password:
|
|
||||||
return url
|
|
||||||
elif parsed.password == 'x-oauth-basic':
|
|
||||||
return url.replace(parsed.username, secret)
|
|
||||||
|
|
||||||
return url.replace(parsed.password, secret)
|
|
||||||
|
|
||||||
|
|
||||||
def parse_args():
|
|
||||||
parser = argparse.ArgumentParser(description='Backup a github account')
|
|
||||||
parser.add_argument('user',
|
|
||||||
metavar='USER',
|
|
||||||
type=str,
|
|
||||||
help='github username')
|
|
||||||
parser.add_argument('-u',
|
|
||||||
'--username',
|
|
||||||
dest='username',
|
|
||||||
help='username for basic auth')
|
|
||||||
parser.add_argument('-p',
|
|
||||||
'--password',
|
|
||||||
dest='password',
|
|
||||||
help='password for basic auth. '
|
|
||||||
'If a username is given but not a password, the '
|
|
||||||
'password will be prompted for.')
|
|
||||||
parser.add_argument('-t',
|
|
||||||
'--token',
|
|
||||||
dest='token',
|
|
||||||
help='personal access or OAuth token, or path to token (file://...)') # noqa
|
|
||||||
parser.add_argument('-o',
|
|
||||||
'--output-directory',
|
|
||||||
default='.',
|
|
||||||
dest='output_directory',
|
|
||||||
help='directory at which to backup the repositories')
|
|
||||||
parser.add_argument('-i',
|
|
||||||
'--incremental',
|
|
||||||
action='store_true',
|
|
||||||
dest='incremental',
|
|
||||||
help='incremental backup')
|
|
||||||
parser.add_argument('--starred',
|
|
||||||
action='store_true',
|
|
||||||
dest='include_starred',
|
|
||||||
help='include JSON output of starred repositories in backup')
|
|
||||||
parser.add_argument('--all-starred',
|
|
||||||
action='store_true',
|
|
||||||
dest='all_starred',
|
|
||||||
help='include starred repositories in backup')
|
|
||||||
parser.add_argument('--watched',
|
|
||||||
action='store_true',
|
|
||||||
dest='include_watched',
|
|
||||||
help='include watched repositories in backup')
|
|
||||||
parser.add_argument('--all',
|
|
||||||
action='store_true',
|
|
||||||
dest='include_everything',
|
|
||||||
help='include everything in backup')
|
|
||||||
parser.add_argument('--issues',
|
|
||||||
action='store_true',
|
|
||||||
dest='include_issues',
|
|
||||||
help='include issues in backup')
|
|
||||||
parser.add_argument('--issue-comments',
|
|
||||||
action='store_true',
|
|
||||||
dest='include_issue_comments',
|
|
||||||
help='include issue comments in backup')
|
|
||||||
parser.add_argument('--issue-events',
|
|
||||||
action='store_true',
|
|
||||||
dest='include_issue_events',
|
|
||||||
help='include issue events in backup')
|
|
||||||
parser.add_argument('--pulls',
|
|
||||||
action='store_true',
|
|
||||||
dest='include_pulls',
|
|
||||||
help='include pull requests in backup')
|
|
||||||
parser.add_argument('--pull-comments',
|
|
||||||
action='store_true',
|
|
||||||
dest='include_pull_comments',
|
|
||||||
help='include pull request review comments in backup')
|
|
||||||
parser.add_argument('--pull-commits',
|
|
||||||
action='store_true',
|
|
||||||
dest='include_pull_commits',
|
|
||||||
help='include pull request commits in backup')
|
|
||||||
parser.add_argument('--labels',
|
|
||||||
action='store_true',
|
|
||||||
dest='include_labels',
|
|
||||||
help='include labels in backup')
|
|
||||||
parser.add_argument('--hooks',
|
|
||||||
action='store_true',
|
|
||||||
dest='include_hooks',
|
|
||||||
help='include hooks in backup (works only when authenticated)') # noqa
|
|
||||||
parser.add_argument('--milestones',
|
|
||||||
action='store_true',
|
|
||||||
dest='include_milestones',
|
|
||||||
help='include milestones in backup')
|
|
||||||
parser.add_argument('--repositories',
|
|
||||||
action='store_true',
|
|
||||||
dest='include_repository',
|
|
||||||
help='include repository clone in backup')
|
|
||||||
parser.add_argument('--bare',
|
|
||||||
action='store_true',
|
|
||||||
dest='bare_clone',
|
|
||||||
help='clone bare repositories')
|
|
||||||
parser.add_argument('--lfs',
|
|
||||||
action='store_true',
|
|
||||||
dest='lfs_clone',
|
|
||||||
help='clone LFS repositories (requires Git LFS to be installed, https://git-lfs.github.com)')
|
|
||||||
parser.add_argument('--wikis',
|
|
||||||
action='store_true',
|
|
||||||
dest='include_wiki',
|
|
||||||
help='include wiki clone in backup')
|
|
||||||
parser.add_argument('--skip-existing',
|
|
||||||
action='store_true',
|
|
||||||
dest='skip_existing',
|
|
||||||
help='skip project if a backup directory exists')
|
|
||||||
parser.add_argument('-L',
|
|
||||||
'--languages',
|
|
||||||
dest='languages',
|
|
||||||
help='only allow these languages',
|
|
||||||
nargs='*')
|
|
||||||
parser.add_argument('-N',
|
|
||||||
'--name-regex',
|
|
||||||
dest='name_regex',
|
|
||||||
help='python regex to match names against')
|
|
||||||
parser.add_argument('-H',
|
|
||||||
'--github-host',
|
|
||||||
dest='github_host',
|
|
||||||
help='GitHub Enterprise hostname')
|
|
||||||
parser.add_argument('-O',
|
|
||||||
'--organization',
|
|
||||||
action='store_true',
|
|
||||||
dest='organization',
|
|
||||||
help='whether or not this is an organization user')
|
|
||||||
parser.add_argument('-R',
|
|
||||||
'--repository',
|
|
||||||
dest='repository',
|
|
||||||
help='name of repository to limit backup to')
|
|
||||||
parser.add_argument('-P', '--private',
|
|
||||||
action='store_true',
|
|
||||||
dest='private',
|
|
||||||
help='include private repositories')
|
|
||||||
parser.add_argument('-F', '--fork',
|
|
||||||
action='store_true',
|
|
||||||
dest='fork',
|
|
||||||
help='include forked repositories')
|
|
||||||
parser.add_argument('--prefer-ssh',
|
|
||||||
action='store_true',
|
|
||||||
help='Clone repositories using SSH instead of HTTPS')
|
|
||||||
parser.add_argument('-v', '--version',
|
|
||||||
action='version',
|
|
||||||
version='%(prog)s ' + __version__)
|
|
||||||
parser.add_argument('--keychain-name',
|
|
||||||
dest='osx_keychain_item_name',
|
|
||||||
help='OSX ONLY: name field of password item in OSX keychain that holds the personal access or OAuth token')
|
|
||||||
parser.add_argument('--keychain-account',
|
|
||||||
dest='osx_keychain_item_account',
|
|
||||||
help='OSX ONLY: account field of password item in OSX keychain that holds the personal access or OAuth token')
|
|
||||||
return parser.parse_args()
|
|
||||||
|
|
||||||
|
|
||||||
def get_auth(args, encode=True):
|
|
||||||
auth = None
|
|
||||||
|
|
||||||
if args.osx_keychain_item_name:
|
|
||||||
if not args.osx_keychain_item_account:
|
|
||||||
log_error('You must specify both name and account fields for osx keychain password items')
|
|
||||||
else:
|
|
||||||
if platform.system() != 'Darwin':
|
|
||||||
log_error("Keychain arguments are only supported on Mac OSX")
|
|
||||||
try:
|
|
||||||
with open(os.devnull,'w') as devnull:
|
|
||||||
token = (subprocess.check_output([
|
|
||||||
'security','find-generic-password',
|
|
||||||
'-s',args.osx_keychain_item_name,
|
|
||||||
'-a',args.osx_keychain_item_account,
|
|
||||||
'-w' ], stderr=devnull).strip())
|
|
||||||
auth = token + ':' + 'x-oauth-basic'
|
|
||||||
except:
|
|
||||||
log_error('No password item matching the provided name and account could be found in the osx keychain.')
|
|
||||||
elif args.osx_keychain_item_account:
|
|
||||||
log_error('You must specify both name and account fields for osx keychain password items')
|
|
||||||
elif args.token:
|
|
||||||
_path_specifier = 'file://'
|
|
||||||
if args.token.startswith(_path_specifier):
|
|
||||||
args.token = open(args.token[len(_path_specifier):],
|
|
||||||
'rt').readline().strip()
|
|
||||||
auth = args.token + ':' + 'x-oauth-basic'
|
|
||||||
elif args.username:
|
|
||||||
if not args.password:
|
|
||||||
args.password = getpass.getpass()
|
|
||||||
if encode:
|
|
||||||
password = args.password
|
|
||||||
else:
|
|
||||||
password = urlquote(args.password)
|
|
||||||
auth = args.username + ':' + password
|
|
||||||
elif args.password:
|
|
||||||
log_error('You must specify a username for basic auth')
|
|
||||||
|
|
||||||
if not auth:
|
|
||||||
return None
|
|
||||||
|
|
||||||
if not encode:
|
|
||||||
return auth
|
|
||||||
|
|
||||||
return base64.b64encode(auth.encode('ascii'))
|
|
||||||
|
|
||||||
|
|
||||||
def get_github_api_host(args):
|
|
||||||
if args.github_host:
|
|
||||||
host = args.github_host + '/api/v3'
|
|
||||||
else:
|
|
||||||
host = 'api.github.com'
|
|
||||||
|
|
||||||
return host
|
|
||||||
|
|
||||||
|
|
||||||
def get_github_host(args):
|
|
||||||
if args.github_host:
|
|
||||||
host = args.github_host
|
|
||||||
else:
|
|
||||||
host = 'github.com'
|
|
||||||
|
|
||||||
return host
|
|
||||||
|
|
||||||
|
|
||||||
def get_github_repo_url(args, repository):
|
|
||||||
if args.prefer_ssh:
|
|
||||||
return repository['ssh_url']
|
|
||||||
|
|
||||||
auth = get_auth(args, False)
|
|
||||||
if auth:
|
|
||||||
repo_url = 'https://{0}@{1}/{2}/{3}.git'.format(
|
|
||||||
auth,
|
|
||||||
get_github_host(args),
|
|
||||||
repository['owner']['login'],
|
|
||||||
repository['name'])
|
|
||||||
else:
|
|
||||||
repo_url = repository['clone_url']
|
|
||||||
|
|
||||||
return repo_url
|
|
||||||
|
|
||||||
|
|
||||||
def retrieve_data(args, template, query_args=None, single_request=False):
|
|
||||||
auth = get_auth(args)
|
|
||||||
query_args = get_query_args(query_args)
|
|
||||||
per_page = 100
|
|
||||||
page = 0
|
|
||||||
data = []
|
|
||||||
|
|
||||||
while True:
|
|
||||||
page = page + 1
|
|
||||||
request = _construct_request(per_page, page, query_args, template, auth) # noqa
|
|
||||||
r, errors = _get_response(request, auth, template)
|
|
||||||
|
|
||||||
status_code = int(r.getcode())
|
|
||||||
|
|
||||||
if status_code != 200:
|
|
||||||
template = 'API request returned HTTP {0}: {1}'
|
|
||||||
errors.append(template.format(status_code, r.reason))
|
|
||||||
log_error(errors)
|
|
||||||
|
|
||||||
response = json.loads(r.read().decode('utf-8'))
|
|
||||||
if len(errors) == 0:
|
|
||||||
if type(response) == list:
|
|
||||||
data.extend(response)
|
|
||||||
if len(response) < per_page:
|
|
||||||
break
|
|
||||||
elif type(response) == dict and single_request:
|
|
||||||
data.append(response)
|
|
||||||
|
|
||||||
if len(errors) > 0:
|
|
||||||
log_error(errors)
|
|
||||||
|
|
||||||
if single_request:
|
|
||||||
break
|
|
||||||
|
|
||||||
return data
|
|
||||||
|
|
||||||
|
|
||||||
def get_query_args(query_args=None):
|
|
||||||
if not query_args:
|
|
||||||
query_args = {}
|
|
||||||
return query_args
|
|
||||||
|
|
||||||
|
|
||||||
def _get_response(request, auth, template):
|
|
||||||
retry_timeout = 3
|
|
||||||
errors = []
|
|
||||||
# We'll make requests in a loop so we can
|
|
||||||
# delay and retry in the case of rate-limiting
|
|
||||||
while True:
|
|
||||||
should_continue = False
|
|
||||||
try:
|
|
||||||
r = urlopen(request)
|
|
||||||
except HTTPError as exc:
|
|
||||||
errors, should_continue = _request_http_error(exc, auth, errors) # noqa
|
|
||||||
r = exc
|
|
||||||
except URLError:
|
|
||||||
should_continue = _request_url_error(template, retry_timeout)
|
|
||||||
if not should_continue:
|
|
||||||
raise
|
|
||||||
|
|
||||||
if should_continue:
|
|
||||||
continue
|
|
||||||
|
|
||||||
break
|
|
||||||
return r, errors
|
|
||||||
|
|
||||||
|
|
||||||
def _construct_request(per_page, page, query_args, template, auth):
|
|
||||||
querystring = urlencode(dict(list({
|
|
||||||
'per_page': per_page,
|
|
||||||
'page': page
|
|
||||||
}.items()) + list(query_args.items())))
|
|
||||||
|
|
||||||
request = Request(template + '?' + querystring)
|
|
||||||
if auth is not None:
|
|
||||||
request.add_header('Authorization', 'Basic '.encode('ascii') + auth)
|
|
||||||
return request
|
|
||||||
|
|
||||||
|
|
||||||
def _request_http_error(exc, auth, errors):
|
|
||||||
# HTTPError behaves like a Response so we can
|
|
||||||
# check the status code and headers to see exactly
|
|
||||||
# what failed.
|
|
||||||
|
|
||||||
should_continue = False
|
|
||||||
headers = exc.headers
|
|
||||||
limit_remaining = int(headers.get('x-ratelimit-remaining', 0))
|
|
||||||
|
|
||||||
if exc.code == 403 and limit_remaining < 1:
|
|
||||||
# The X-RateLimit-Reset header includes a
|
|
||||||
# timestamp telling us when the limit will reset
|
|
||||||
# so we can calculate how long to wait rather
|
|
||||||
# than inefficiently polling:
|
|
||||||
gm_now = calendar.timegm(time.gmtime())
|
|
||||||
reset = int(headers.get('x-ratelimit-reset', 0)) or gm_now
|
|
||||||
# We'll never sleep for less than 10 seconds:
|
|
||||||
delta = max(10, reset - gm_now)
|
|
||||||
|
|
||||||
limit = headers.get('x-ratelimit-limit')
|
|
||||||
print('Exceeded rate limit of {} requests; waiting {} seconds to reset'.format(limit, delta), # noqa
|
|
||||||
file=sys.stderr)
|
|
||||||
|
|
||||||
if auth is None:
|
|
||||||
print('Hint: Authenticate to raise your GitHub rate limit',
|
|
||||||
file=sys.stderr)
|
|
||||||
|
|
||||||
time.sleep(delta)
|
|
||||||
should_continue = True
|
|
||||||
return errors, should_continue
|
|
||||||
|
|
||||||
|
|
||||||
def _request_url_error(template, retry_timeout):
|
|
||||||
# Incase of a connection timing out, we can retry a few time
|
|
||||||
# But we won't crash and not back-up the rest now
|
|
||||||
log_info('{} timed out'.format(template))
|
|
||||||
retry_timeout -= 1
|
|
||||||
|
|
||||||
if retry_timeout >= 0:
|
|
||||||
return True
|
|
||||||
|
|
||||||
log_error('{} timed out to much, skipping!')
|
|
||||||
return False
|
|
||||||
|
|
||||||
|
|
||||||
def check_git_lfs_install():
|
|
||||||
exit_code = subprocess.call(['git', 'lfs', 'version'])
|
|
||||||
if exit_code != 0:
|
|
||||||
log_error('The argument --lfs requires you to have Git LFS installed.\nYou can get it from https://git-lfs.github.com.')
|
|
||||||
sys.exit(1)
|
|
||||||
|
|
||||||
|
|
||||||
def retrieve_repositories(args):
|
|
||||||
log_info('Retrieving repositories')
|
|
||||||
single_request = False
|
|
||||||
template = 'https://{0}/user/repos'.format(
|
|
||||||
get_github_api_host(args))
|
|
||||||
if args.organization:
|
|
||||||
template = 'https://{0}/orgs/{1}/repos'.format(
|
|
||||||
get_github_api_host(args),
|
|
||||||
args.user)
|
|
||||||
|
|
||||||
if args.repository:
|
|
||||||
single_request = True
|
|
||||||
template = 'https://{0}/repos/{1}/{2}'.format(
|
|
||||||
get_github_api_host(args),
|
|
||||||
args.user,
|
|
||||||
args.repository)
|
|
||||||
|
|
||||||
repos = retrieve_data(args, template, single_request=single_request)
|
|
||||||
|
|
||||||
if args.all_starred:
|
|
||||||
starred_template = 'https://{0}/user/starred'.format(
|
|
||||||
get_github_api_host(args))
|
|
||||||
starred_repos = retrieve_data(args, starred_template, single_request=False)
|
|
||||||
# we need to be able to determine this repo was retrieved as a starred repo
|
|
||||||
# later, so add a flag to each item
|
|
||||||
for item in starred_repos:
|
|
||||||
item.update({'is_starred': True})
|
|
||||||
repos.extend(starred_repos)
|
|
||||||
|
|
||||||
return repos
|
|
||||||
|
|
||||||
|
|
||||||
def filter_repositories(args, unfiltered_repositories):
|
|
||||||
log_info('Filtering repositories')
|
|
||||||
|
|
||||||
repositories = []
|
|
||||||
for r in unfiltered_repositories:
|
|
||||||
if r['owner']['login'] == args.user or r.get('is_starred'):
|
|
||||||
repositories.append(r)
|
|
||||||
|
|
||||||
name_regex = None
|
|
||||||
if args.name_regex:
|
|
||||||
name_regex = re.compile(args.name_regex)
|
|
||||||
|
|
||||||
languages = None
|
|
||||||
if args.languages:
|
|
||||||
languages = [x.lower() for x in args.languages]
|
|
||||||
|
|
||||||
if not args.fork:
|
|
||||||
repositories = [r for r in repositories if not r['fork']]
|
|
||||||
if not args.private:
|
|
||||||
repositories = [r for r in repositories if not r['private']]
|
|
||||||
if languages:
|
|
||||||
repositories = [r for r in repositories if r['language'] and r['language'].lower() in languages] # noqa
|
|
||||||
if name_regex:
|
|
||||||
repositories = [r for r in repositories if name_regex.match(r['name'])]
|
|
||||||
|
|
||||||
return repositories
|
|
||||||
|
|
||||||
|
|
||||||
def backup_repositories(args, output_directory, repositories):
|
|
||||||
log_info('Backing up repositories')
|
|
||||||
repos_template = 'https://{0}/repos'.format(get_github_api_host(args))
|
|
||||||
|
|
||||||
if args.incremental:
|
|
||||||
last_update = max(list(repository['updated_at'] for repository in repositories) or [time.strftime('%Y-%m-%dT%H:%M:%SZ', time.localtime())]) # noqa
|
|
||||||
last_update_path = os.path.join(output_directory, 'last_update')
|
|
||||||
if os.path.exists(last_update_path):
|
|
||||||
args.since = open(last_update_path).read().strip()
|
|
||||||
else:
|
|
||||||
args.since = None
|
|
||||||
else:
|
|
||||||
args.since = None
|
|
||||||
|
|
||||||
for repository in repositories:
|
|
||||||
backup_cwd = os.path.join(output_directory, 'repositories')
|
|
||||||
repo_cwd = os.path.join(backup_cwd, repository['name'])
|
|
||||||
|
|
||||||
# put starred repos in -o/starred/${owner}/${repo} to prevent collision of
|
|
||||||
# any repositories with the same name
|
|
||||||
if repository.get('is_starred'):
|
|
||||||
backup_cwd = os.path.join(output_directory, 'starred')
|
|
||||||
repo_cwd = os.path.join(backup_cwd, repository['owner']['login'],
|
|
||||||
repository['name'])
|
|
||||||
|
|
||||||
repo_dir = os.path.join(repo_cwd, 'repository')
|
|
||||||
repo_url = get_github_repo_url(args, repository)
|
|
||||||
|
|
||||||
if args.include_repository or args.include_everything:
|
|
||||||
fetch_repository(repository['name'],
|
|
||||||
repo_url,
|
|
||||||
repo_dir,
|
|
||||||
skip_existing=args.skip_existing,
|
|
||||||
bare_clone=args.bare_clone,
|
|
||||||
lfs_clone=args.lfs_clone)
|
|
||||||
|
|
||||||
download_wiki = (args.include_wiki or args.include_everything)
|
|
||||||
if repository['has_wiki'] and download_wiki:
|
|
||||||
fetch_repository(repository['name'],
|
|
||||||
repo_url.replace('.git', '.wiki.git'),
|
|
||||||
os.path.join(repo_cwd, 'wiki'),
|
|
||||||
skip_existing=args.skip_existing,
|
|
||||||
bare_clone=args.bare_clone,
|
|
||||||
lfs_clone=args.lfs_clone)
|
|
||||||
|
|
||||||
if args.include_issues or args.include_everything:
|
|
||||||
backup_issues(args, repo_cwd, repository, repos_template)
|
|
||||||
|
|
||||||
if args.include_pulls or args.include_everything:
|
|
||||||
backup_pulls(args, repo_cwd, repository, repos_template)
|
|
||||||
|
|
||||||
if args.include_milestones or args.include_everything:
|
|
||||||
backup_milestones(args, repo_cwd, repository, repos_template)
|
|
||||||
|
|
||||||
if args.include_labels or args.include_everything:
|
|
||||||
backup_labels(args, repo_cwd, repository, repos_template)
|
|
||||||
|
|
||||||
if args.include_hooks or args.include_everything:
|
|
||||||
backup_hooks(args, repo_cwd, repository, repos_template)
|
|
||||||
|
|
||||||
if args.incremental:
|
|
||||||
open(last_update_path, 'w').write(last_update)
|
|
||||||
|
|
||||||
|
|
||||||
def backup_issues(args, repo_cwd, repository, repos_template):
|
|
||||||
has_issues_dir = os.path.isdir('{0}/issues/.git'.format(repo_cwd))
|
|
||||||
if args.skip_existing and has_issues_dir:
|
|
||||||
return
|
|
||||||
|
|
||||||
log_info('Retrieving {0} issues'.format(repository['full_name']))
|
|
||||||
issue_cwd = os.path.join(repo_cwd, 'issues')
|
|
||||||
mkdir_p(repo_cwd, issue_cwd)
|
|
||||||
|
|
||||||
issues = {}
|
|
||||||
issues_skipped = 0
|
|
||||||
issues_skipped_message = ''
|
|
||||||
_issue_template = '{0}/{1}/issues'.format(repos_template,
|
|
||||||
repository['full_name'])
|
|
||||||
|
|
||||||
should_include_pulls = args.include_pulls or args.include_everything
|
|
||||||
issue_states = ['open', 'closed']
|
|
||||||
for issue_state in issue_states:
|
|
||||||
query_args = {
|
|
||||||
'filter': 'all',
|
|
||||||
'state': issue_state
|
|
||||||
}
|
|
||||||
if args.since:
|
|
||||||
query_args['since'] = args.since
|
|
||||||
|
|
||||||
_issues = retrieve_data(args,
|
|
||||||
_issue_template,
|
|
||||||
query_args=query_args)
|
|
||||||
for issue in _issues:
|
|
||||||
# skip pull requests which are also returned as issues
|
|
||||||
# if retrieving pull requests is requested as well
|
|
||||||
if 'pull_request' in issue and should_include_pulls:
|
|
||||||
issues_skipped += 1
|
|
||||||
continue
|
|
||||||
|
|
||||||
issues[issue['number']] = issue
|
|
||||||
|
|
||||||
if issues_skipped:
|
|
||||||
issues_skipped_message = ' (skipped {0} pull requests)'.format(
|
|
||||||
issues_skipped)
|
|
||||||
|
|
||||||
log_info('Saving {0} issues to disk{1}'.format(
|
|
||||||
len(list(issues.keys())), issues_skipped_message))
|
|
||||||
comments_template = _issue_template + '/{0}/comments'
|
|
||||||
events_template = _issue_template + '/{0}/events'
|
|
||||||
for number, issue in list(issues.items()):
|
|
||||||
if args.include_issue_comments or args.include_everything:
|
|
||||||
template = comments_template.format(number)
|
|
||||||
issues[number]['comment_data'] = retrieve_data(args, template)
|
|
||||||
if args.include_issue_events or args.include_everything:
|
|
||||||
template = events_template.format(number)
|
|
||||||
issues[number]['event_data'] = retrieve_data(args, template)
|
|
||||||
|
|
||||||
issue_file = '{0}/{1}.json'.format(issue_cwd, number)
|
|
||||||
with codecs.open(issue_file, 'w', encoding='utf-8') as f:
|
|
||||||
json_dump(issue, f)
|
|
||||||
|
|
||||||
|
|
||||||
def backup_pulls(args, repo_cwd, repository, repos_template):
|
|
||||||
has_pulls_dir = os.path.isdir('{0}/pulls/.git'.format(repo_cwd))
|
|
||||||
if args.skip_existing and has_pulls_dir:
|
|
||||||
return
|
|
||||||
|
|
||||||
log_info('Retrieving {0} pull requests'.format(repository['full_name'])) # noqa
|
|
||||||
pulls_cwd = os.path.join(repo_cwd, 'pulls')
|
|
||||||
mkdir_p(repo_cwd, pulls_cwd)
|
|
||||||
|
|
||||||
pulls = {}
|
|
||||||
_pulls_template = '{0}/{1}/pulls'.format(repos_template,
|
|
||||||
repository['full_name'])
|
|
||||||
|
|
||||||
pull_states = ['open', 'closed']
|
|
||||||
for pull_state in pull_states:
|
|
||||||
query_args = {
|
|
||||||
'filter': 'all',
|
|
||||||
'state': pull_state,
|
|
||||||
'sort': 'updated',
|
|
||||||
'direction': 'desc',
|
|
||||||
}
|
|
||||||
|
|
||||||
# It'd be nice to be able to apply the args.since filter here...
|
|
||||||
_pulls = retrieve_data(args,
|
|
||||||
_pulls_template,
|
|
||||||
query_args=query_args)
|
|
||||||
for pull in _pulls:
|
|
||||||
if not args.since or pull['updated_at'] >= args.since:
|
|
||||||
pulls[pull['number']] = pull
|
|
||||||
|
|
||||||
log_info('Saving {0} pull requests to disk'.format(
|
|
||||||
len(list(pulls.keys()))))
|
|
||||||
comments_template = _pulls_template + '/{0}/comments'
|
|
||||||
commits_template = _pulls_template + '/{0}/commits'
|
|
||||||
for number, pull in list(pulls.items()):
|
|
||||||
if args.include_pull_comments or args.include_everything:
|
|
||||||
template = comments_template.format(number)
|
|
||||||
pulls[number]['comment_data'] = retrieve_data(args, template)
|
|
||||||
if args.include_pull_commits or args.include_everything:
|
|
||||||
template = commits_template.format(number)
|
|
||||||
pulls[number]['commit_data'] = retrieve_data(args, template)
|
|
||||||
|
|
||||||
pull_file = '{0}/{1}.json'.format(pulls_cwd, number)
|
|
||||||
with codecs.open(pull_file, 'w', encoding='utf-8') as f:
|
|
||||||
json_dump(pull, f)
|
|
||||||
|
|
||||||
|
|
||||||
def backup_milestones(args, repo_cwd, repository, repos_template):
|
|
||||||
milestone_cwd = os.path.join(repo_cwd, 'milestones')
|
|
||||||
if args.skip_existing and os.path.isdir(milestone_cwd):
|
|
||||||
return
|
|
||||||
|
|
||||||
log_info('Retrieving {0} milestones'.format(repository['full_name']))
|
|
||||||
mkdir_p(repo_cwd, milestone_cwd)
|
|
||||||
|
|
||||||
template = '{0}/{1}/milestones'.format(repos_template,
|
|
||||||
repository['full_name'])
|
|
||||||
|
|
||||||
query_args = {
|
|
||||||
'state': 'all'
|
|
||||||
}
|
|
||||||
|
|
||||||
_milestones = retrieve_data(args, template, query_args=query_args)
|
|
||||||
|
|
||||||
milestones = {}
|
|
||||||
for milestone in _milestones:
|
|
||||||
milestones[milestone['number']] = milestone
|
|
||||||
|
|
||||||
log_info('Saving {0} milestones to disk'.format(
|
|
||||||
len(list(milestones.keys()))))
|
|
||||||
for number, milestone in list(milestones.items()):
|
|
||||||
milestone_file = '{0}/{1}.json'.format(milestone_cwd, number)
|
|
||||||
with codecs.open(milestone_file, 'w', encoding='utf-8') as f:
|
|
||||||
json_dump(milestone, f)
|
|
||||||
|
|
||||||
|
|
||||||
def backup_labels(args, repo_cwd, repository, repos_template):
|
|
||||||
label_cwd = os.path.join(repo_cwd, 'labels')
|
|
||||||
output_file = '{0}/labels.json'.format(label_cwd)
|
|
||||||
template = '{0}/{1}/labels'.format(repos_template,
|
|
||||||
repository['full_name'])
|
|
||||||
_backup_data(args,
|
|
||||||
'labels',
|
|
||||||
template,
|
|
||||||
output_file,
|
|
||||||
label_cwd)
|
|
||||||
|
|
||||||
|
|
||||||
def backup_hooks(args, repo_cwd, repository, repos_template):
|
|
||||||
auth = get_auth(args)
|
|
||||||
if not auth:
|
|
||||||
log_info("Skipping hooks since no authentication provided")
|
|
||||||
return
|
|
||||||
hook_cwd = os.path.join(repo_cwd, 'hooks')
|
|
||||||
output_file = '{0}/hooks.json'.format(hook_cwd)
|
|
||||||
template = '{0}/{1}/hooks'.format(repos_template,
|
|
||||||
repository['full_name'])
|
|
||||||
try:
|
|
||||||
_backup_data(args,
|
|
||||||
'hooks',
|
|
||||||
template,
|
|
||||||
output_file,
|
|
||||||
hook_cwd)
|
|
||||||
except SystemExit:
|
|
||||||
log_info("Unable to read hooks, skipping")
|
|
||||||
|
|
||||||
|
|
||||||
def fetch_repository(name,
|
|
||||||
remote_url,
|
|
||||||
local_dir,
|
|
||||||
skip_existing=False,
|
|
||||||
bare_clone=False,
|
|
||||||
lfs_clone=False):
|
|
||||||
if bare_clone:
|
|
||||||
if os.path.exists(local_dir):
|
|
||||||
clone_exists = subprocess.check_output(['git',
|
|
||||||
'rev-parse',
|
|
||||||
'--is-bare-repository'],
|
|
||||||
cwd=local_dir) == "true\n"
|
|
||||||
else:
|
|
||||||
clone_exists = False
|
|
||||||
else:
|
|
||||||
clone_exists = os.path.exists(os.path.join(local_dir, '.git'))
|
|
||||||
|
|
||||||
if clone_exists and skip_existing:
|
|
||||||
return
|
|
||||||
|
|
||||||
masked_remote_url = mask_password(remote_url)
|
|
||||||
|
|
||||||
initialized = subprocess.call('git ls-remote ' + remote_url,
|
|
||||||
stdout=FNULL,
|
|
||||||
stderr=FNULL,
|
|
||||||
shell=True)
|
|
||||||
if initialized == 128:
|
|
||||||
log_info("Skipping {0} ({1}) since it's not initialized".format(
|
|
||||||
name, masked_remote_url))
|
|
||||||
return
|
|
||||||
|
|
||||||
if clone_exists:
|
|
||||||
log_info('Updating {0} in {1}'.format(name, local_dir))
|
|
||||||
|
|
||||||
remotes = subprocess.check_output(['git', 'remote', 'show'],
|
|
||||||
cwd=local_dir)
|
|
||||||
remotes = [i.strip() for i in remotes.decode('utf-8').splitlines()]
|
|
||||||
|
|
||||||
if 'origin' not in remotes:
|
|
||||||
git_command = ['git', 'remote', 'rm', 'origin']
|
|
||||||
logging_subprocess(git_command, None, cwd=local_dir)
|
|
||||||
git_command = ['git', 'remote', 'add', 'origin', remote_url]
|
|
||||||
logging_subprocess(git_command, None, cwd=local_dir)
|
|
||||||
else:
|
|
||||||
git_command = ['git', 'remote', 'set-url', 'origin', remote_url]
|
|
||||||
logging_subprocess(git_command, None, cwd=local_dir)
|
|
||||||
|
|
||||||
if lfs_clone:
|
|
||||||
git_command = ['git', 'lfs', 'fetch', '--all', '--force', '--tags', '--prune']
|
|
||||||
else:
|
|
||||||
git_command = ['git', 'fetch', '--all', '--force', '--tags', '--prune']
|
|
||||||
logging_subprocess(git_command, None, cwd=local_dir)
|
|
||||||
else:
|
|
||||||
log_info('Cloning {0} repository from {1} to {2}'.format(
|
|
||||||
name,
|
|
||||||
masked_remote_url,
|
|
||||||
local_dir))
|
|
||||||
if bare_clone:
|
|
||||||
if lfs_clone:
|
|
||||||
git_command = ['git', 'lfs', 'clone', '--mirror', remote_url, local_dir]
|
|
||||||
else:
|
|
||||||
git_command = ['git', 'clone', '--mirror', remote_url, local_dir]
|
|
||||||
else:
|
|
||||||
if lfs_clone:
|
|
||||||
git_command = ['git', 'lfs', 'clone', remote_url, local_dir]
|
|
||||||
else:
|
|
||||||
git_command = ['git', 'clone', remote_url, local_dir]
|
|
||||||
logging_subprocess(git_command, None)
|
|
||||||
|
|
||||||
|
|
||||||
def backup_account(args, output_directory):
|
|
||||||
account_cwd = os.path.join(output_directory, 'account')
|
|
||||||
|
|
||||||
if args.include_starred or args.include_everything:
|
|
||||||
output_file = '{0}/starred.json'.format(account_cwd)
|
|
||||||
template = "https://{0}/users/{1}/starred"
|
|
||||||
template = template.format(get_github_api_host(args), args.user)
|
|
||||||
_backup_data(args,
|
|
||||||
'starred repositories',
|
|
||||||
template,
|
|
||||||
output_file,
|
|
||||||
account_cwd)
|
|
||||||
|
|
||||||
if args.include_watched or args.include_everything:
|
|
||||||
output_file = '{0}/watched.json'.format(account_cwd)
|
|
||||||
template = "https://{0}/users/{1}/subscriptions"
|
|
||||||
template = template.format(get_github_api_host(args), args.user)
|
|
||||||
_backup_data(args,
|
|
||||||
'watched repositories',
|
|
||||||
template,
|
|
||||||
output_file,
|
|
||||||
account_cwd)
|
|
||||||
|
|
||||||
|
|
||||||
def _backup_data(args, name, template, output_file, output_directory):
|
|
||||||
skip_existing = args.skip_existing
|
|
||||||
if not skip_existing or not os.path.exists(output_file):
|
|
||||||
log_info('Retrieving {0} {1}'.format(args.user, name))
|
|
||||||
mkdir_p(output_directory)
|
|
||||||
data = retrieve_data(args, template)
|
|
||||||
|
|
||||||
log_info('Writing {0} {1} to disk'.format(len(data), name))
|
|
||||||
with codecs.open(output_file, 'w', encoding='utf-8') as f:
|
|
||||||
json_dump(data, f)
|
|
||||||
|
|
||||||
|
|
||||||
def json_dump(data, output_file):
|
|
||||||
json.dump(data,
|
|
||||||
output_file,
|
|
||||||
ensure_ascii=False,
|
|
||||||
sort_keys=True,
|
|
||||||
indent=4,
|
|
||||||
separators=(',', ': '))
|
|
||||||
|
|
||||||
|
|
||||||
def main():
|
def main():
|
||||||
args = parse_args()
|
args = parse_args()
|
||||||
|
|
||||||
|
if args.quiet:
|
||||||
|
logger.setLevel(logging.WARNING)
|
||||||
|
|
||||||
output_directory = os.path.realpath(args.output_directory)
|
output_directory = os.path.realpath(args.output_directory)
|
||||||
if not os.path.isdir(output_directory):
|
if not os.path.isdir(output_directory):
|
||||||
log_info('Create output directory {0}'.format(output_directory))
|
logger.info("Create output directory {0}".format(output_directory))
|
||||||
mkdir_p(output_directory)
|
mkdir_p(output_directory)
|
||||||
|
|
||||||
if args.lfs_clone:
|
if args.lfs_clone:
|
||||||
check_git_lfs_install()
|
check_git_lfs_install()
|
||||||
|
|
||||||
log_info('Backing up user {0} to {1}'.format(args.user, output_directory))
|
if args.log_level:
|
||||||
|
log_level = logging.getLevelName(args.log_level.upper())
|
||||||
|
if isinstance(log_level, int):
|
||||||
|
logger.root.setLevel(log_level)
|
||||||
|
|
||||||
repositories = retrieve_repositories(args)
|
if not args.as_app:
|
||||||
|
logger.info("Backing up user {0} to {1}".format(args.user, output_directory))
|
||||||
|
authenticated_user = get_authenticated_user(args)
|
||||||
|
else:
|
||||||
|
authenticated_user = {"login": None}
|
||||||
|
|
||||||
|
repositories = retrieve_repositories(args, authenticated_user)
|
||||||
repositories = filter_repositories(args, repositories)
|
repositories = filter_repositories(args, repositories)
|
||||||
backup_repositories(args, output_directory, repositories)
|
backup_repositories(args, output_directory, repositories)
|
||||||
backup_account(args, output_directory)
|
backup_account(args, output_directory)
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == "__main__":
|
||||||
main()
|
try:
|
||||||
|
main()
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(str(e))
|
||||||
|
sys.exit(1)
|
||||||
|
|||||||
@@ -1 +1 @@
|
|||||||
__version__ = '0.16.0'
|
__version__ = "0.50.3"
|
||||||
|
|||||||
1478
github_backup/github_backup.py
Normal file
1478
github_backup/github_backup.py
Normal file
File diff suppressed because it is too large
Load Diff
7
python-github-backup.code-workspace
Executable file
7
python-github-backup.code-workspace
Executable file
@@ -0,0 +1,7 @@
|
|||||||
|
{
|
||||||
|
"folders": [
|
||||||
|
{
|
||||||
|
"path": "."
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
89
release
89
release
@@ -1,31 +1,42 @@
|
|||||||
#!/usr/bin/env bash
|
#!/usr/bin/env bash
|
||||||
set -eo pipefail; [[ $RELEASE_TRACE ]] && set -x
|
set -eo pipefail
|
||||||
|
[[ $RELEASE_TRACE ]] && set -x
|
||||||
|
|
||||||
PACKAGE_NAME='github-backup'
|
if [[ ! -f setup.py ]]; then
|
||||||
INIT_PACKAGE_NAME='github_backup'
|
echo -e "${RED}WARNING: Missing setup.py${COLOR_OFF}\n"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
PACKAGE_NAME="$(cat setup.py | grep 'name="' | head | cut -d '"' -f2)"
|
||||||
|
INIT_PACKAGE_NAME="$(echo "${PACKAGE_NAME//-/_}")"
|
||||||
PUBLIC="true"
|
PUBLIC="true"
|
||||||
|
|
||||||
# Colors
|
# Colors
|
||||||
COLOR_OFF="\033[0m" # unsets color to term fg color
|
COLOR_OFF="\033[0m" # unsets color to term fg color
|
||||||
RED="\033[0;31m" # red
|
RED="\033[0;31m" # red
|
||||||
GREEN="\033[0;32m" # green
|
GREEN="\033[0;32m" # green
|
||||||
YELLOW="\033[0;33m" # yellow
|
YELLOW="\033[0;33m" # yellow
|
||||||
MAGENTA="\033[0;35m" # magenta
|
MAGENTA="\033[0;35m" # magenta
|
||||||
CYAN="\033[0;36m" # cyan
|
CYAN="\033[0;36m" # cyan
|
||||||
|
|
||||||
# ensure wheel is available
|
# ensure wheel is available
|
||||||
pip install wheel > /dev/null
|
pip install wheel >/dev/null
|
||||||
|
|
||||||
command -v gitchangelog >/dev/null 2>&1 || {
|
command -v gitchangelog >/dev/null 2>&1 || {
|
||||||
echo -e "${RED}WARNING: Missing gitchangelog binary, please run: pip install gitchangelog==2.2.0${COLOR_OFF}\n"
|
echo -e "${RED}WARNING: Missing gitchangelog binary, please run: pip install gitchangelog==3.0.4${COLOR_OFF}\n"
|
||||||
exit 1
|
exit 1
|
||||||
}
|
}
|
||||||
|
|
||||||
command -v rst-lint > /dev/null || {
|
command -v rst-lint >/dev/null || {
|
||||||
echo -e "${RED}WARNING: Missing rst-lint binary, please run: pip install restructuredtext_lint${COLOR_OFF}\n"
|
echo -e "${RED}WARNING: Missing rst-lint binary, please run: pip install restructuredtext_lint${COLOR_OFF}\n"
|
||||||
exit 1
|
exit 1
|
||||||
}
|
}
|
||||||
|
|
||||||
|
command -v twine >/dev/null || {
|
||||||
|
echo -e "${RED}WARNING: Missing twine binary, please run: pip install twine==3.2.0${COLOR_OFF}\n"
|
||||||
|
exit 1
|
||||||
|
}
|
||||||
|
|
||||||
if [[ "$@" != "major" ]] && [[ "$@" != "minor" ]] && [[ "$@" != "patch" ]]; then
|
if [[ "$@" != "major" ]] && [[ "$@" != "minor" ]] && [[ "$@" != "patch" ]]; then
|
||||||
echo -e "${RED}WARNING: Invalid release type, must specify 'major', 'minor', or 'patch'${COLOR_OFF}\n"
|
echo -e "${RED}WARNING: Invalid release type, must specify 'major', 'minor', or 'patch'${COLOR_OFF}\n"
|
||||||
exit 1
|
exit 1
|
||||||
@@ -33,41 +44,41 @@ fi
|
|||||||
|
|
||||||
echo -e "\n${GREEN}STARTING RELEASE PROCESS${COLOR_OFF}\n"
|
echo -e "\n${GREEN}STARTING RELEASE PROCESS${COLOR_OFF}\n"
|
||||||
|
|
||||||
set +e;
|
set +e
|
||||||
git status | grep -Eo "working (directory|tree) clean" &> /dev/null
|
git status | grep -Eo "working (directory|tree) clean" &>/dev/null
|
||||||
if [ ! $? -eq 0 ]; then # working directory is NOT clean
|
if [ ! $? -eq 0 ]; then # working directory is NOT clean
|
||||||
echo -e "${RED}WARNING: You have uncomitted changes, you may have forgotten something${COLOR_OFF}\n"
|
echo -e "${RED}WARNING: You have uncomitted changes, you may have forgotten something${COLOR_OFF}\n"
|
||||||
exit 1
|
exit 1
|
||||||
fi
|
fi
|
||||||
set -e;
|
set -e
|
||||||
|
|
||||||
echo -e "${YELLOW}--->${COLOR_OFF} Updating local copy"
|
echo -e "${YELLOW}--->${COLOR_OFF} Updating local copy"
|
||||||
git pull -q origin master
|
git pull -q origin master
|
||||||
|
|
||||||
echo -e "${YELLOW}--->${COLOR_OFF} Retrieving release versions"
|
echo -e "${YELLOW}--->${COLOR_OFF} Retrieving release versions"
|
||||||
|
|
||||||
current_version=$(cat ${INIT_PACKAGE_NAME}/__init__.py |grep '__version__ ='|sed 's/[^0-9.]//g')
|
current_version=$(cat ${INIT_PACKAGE_NAME}/__init__.py | grep '__version__ =' | sed 's/[^0-9.]//g')
|
||||||
major=$(echo $current_version | awk '{split($0,a,"."); print a[1]}')
|
major=$(echo $current_version | awk '{split($0,a,"."); print a[1]}')
|
||||||
minor=$(echo $current_version | awk '{split($0,a,"."); print a[2]}')
|
minor=$(echo $current_version | awk '{split($0,a,"."); print a[2]}')
|
||||||
patch=$(echo $current_version | awk '{split($0,a,"."); print a[3]}')
|
patch=$(echo $current_version | awk '{split($0,a,"."); print a[3]}')
|
||||||
|
|
||||||
if [[ "$@" == "major" ]]; then
|
if [[ "$@" == "major" ]]; then
|
||||||
major=$(($major + 1));
|
major=$(($major + 1))
|
||||||
minor="0"
|
minor="0"
|
||||||
patch="0"
|
patch="0"
|
||||||
elif [[ "$@" == "minor" ]]; then
|
elif [[ "$@" == "minor" ]]; then
|
||||||
minor=$(($minor + 1));
|
minor=$(($minor + 1))
|
||||||
patch="0"
|
patch="0"
|
||||||
elif [[ "$@" == "patch" ]]; then
|
elif [[ "$@" == "patch" ]]; then
|
||||||
patch=$(($patch + 1));
|
patch=$(($patch + 1))
|
||||||
fi
|
fi
|
||||||
|
|
||||||
next_version="${major}.${minor}.${patch}"
|
next_version="${major}.${minor}.${patch}"
|
||||||
|
|
||||||
echo -e "${YELLOW} >${COLOR_OFF} ${MAGENTA}${current_version}${COLOR_OFF} -> ${MAGENTA}${next_version}${COLOR_OFF}"
|
echo -e "${YELLOW} >${COLOR_OFF} ${MAGENTA}${current_version}${COLOR_OFF} -> ${MAGENTA}${next_version}${COLOR_OFF}"
|
||||||
|
|
||||||
echo -e "${YELLOW}--->${COLOR_OFF} Ensuring readme passes lint checks (if this fails, run rst-lint)"
|
echo -e "${YELLOW}--->${COLOR_OFF} Ensuring readme passes lint checks (if this fails, run rst-lint)"
|
||||||
rst-lint README.rst > /dev/null
|
rst-lint README.rst || exit 1
|
||||||
|
|
||||||
echo -e "${YELLOW}--->${COLOR_OFF} Creating necessary temp file"
|
echo -e "${YELLOW}--->${COLOR_OFF} Creating necessary temp file"
|
||||||
tempfoo=$(basename $0)
|
tempfoo=$(basename $0)
|
||||||
@@ -76,33 +87,29 @@ TMPFILE=$(mktemp /tmp/${tempfoo}.XXXXXX) || {
|
|||||||
exit 1
|
exit 1
|
||||||
}
|
}
|
||||||
|
|
||||||
find_this="__version__ = '$current_version'"
|
find_this="__version__ = \"$current_version\""
|
||||||
replace_with="__version__ = '$next_version'"
|
replace_with="__version__ = \"$next_version\""
|
||||||
|
|
||||||
echo -e "${YELLOW}--->${COLOR_OFF} Updating ${INIT_PACKAGE_NAME}/__init__.py"
|
echo -e "${YELLOW}--->${COLOR_OFF} Updating ${INIT_PACKAGE_NAME}/__init__.py"
|
||||||
sed "s/$find_this/$replace_with/" ${INIT_PACKAGE_NAME}/__init__.py > $TMPFILE && mv $TMPFILE ${INIT_PACKAGE_NAME}/__init__.py
|
sed "s/$find_this/$replace_with/" ${INIT_PACKAGE_NAME}/__init__.py >$TMPFILE && mv $TMPFILE ${INIT_PACKAGE_NAME}/__init__.py
|
||||||
|
|
||||||
find_this="${PACKAGE_NAME}.git@$current_version"
|
|
||||||
replace_with="${PACKAGE_NAME}.git@$next_version"
|
|
||||||
|
|
||||||
echo -e "${YELLOW}--->${COLOR_OFF} Updating README.rst"
|
|
||||||
sed "s/$find_this/$replace_with/" README.rst > $TMPFILE && mv $TMPFILE README.rst
|
|
||||||
|
|
||||||
if [ -f docs/conf.py ]; then
|
if [ -f docs/conf.py ]; then
|
||||||
echo -e "${YELLOW}--->${COLOR_OFF} Updating docs"
|
echo -e "${YELLOW}--->${COLOR_OFF} Updating docs"
|
||||||
find_this="version = '${current_version}'"
|
find_this="version = '${current_version}'"
|
||||||
replace_with="version = '${next_version}'"
|
replace_with="version = '${next_version}'"
|
||||||
sed "s/$find_this/$replace_with/" docs/conf.py > $TMPFILE && mv $TMPFILE docs/conf.py
|
sed "s/$find_this/$replace_with/" docs/conf.py >$TMPFILE && mv $TMPFILE docs/conf.py
|
||||||
|
|
||||||
find_this="version = '${current_version}'"
|
find_this="version = '${current_version}'"
|
||||||
replace_with="release = '${next_version}'"
|
replace_with="release = '${next_version}'"
|
||||||
sed "s/$find_this/$replace_with/" docs/conf.py > $TMPFILE && mv $TMPFILE docs/conf.py
|
sed "s/$find_this/$replace_with/" docs/conf.py >$TMPFILE && mv $TMPFILE docs/conf.py
|
||||||
fi
|
fi
|
||||||
|
|
||||||
echo -e "${YELLOW}--->${COLOR_OFF} Updating CHANGES.rst for new release"
|
echo -e "${YELLOW}--->${COLOR_OFF} Updating CHANGES.rst for new release"
|
||||||
version_header="$next_version ($(date +%F))"
|
version_header="$next_version ($(date +%F))"
|
||||||
set +e; dashes=$(yes '-'|head -n ${#version_header}|tr -d '\n') ; set -e
|
set +e
|
||||||
gitchangelog |sed "4s/.*/$version_header/"|sed "5s/.*/$dashes/" > $TMPFILE && mv $TMPFILE CHANGES.rst
|
dashes=$(yes '-' | head -n ${#version_header} | tr -d '\n')
|
||||||
|
set -e
|
||||||
|
gitchangelog | sed "4s/.*/$version_header/" | sed "5s/.*/$dashes/" >$TMPFILE && mv $TMPFILE CHANGES.rst
|
||||||
|
|
||||||
echo -e "${YELLOW}--->${COLOR_OFF} Adding changed files to git"
|
echo -e "${YELLOW}--->${COLOR_OFF} Adding changed files to git"
|
||||||
git add CHANGES.rst README.rst ${INIT_PACKAGE_NAME}/__init__.py
|
git add CHANGES.rst README.rst ${INIT_PACKAGE_NAME}/__init__.py
|
||||||
@@ -111,6 +118,15 @@ if [ -f docs/conf.py ]; then git add docs/conf.py; fi
|
|||||||
echo -e "${YELLOW}--->${COLOR_OFF} Creating release"
|
echo -e "${YELLOW}--->${COLOR_OFF} Creating release"
|
||||||
git commit -q -m "Release version $next_version"
|
git commit -q -m "Release version $next_version"
|
||||||
|
|
||||||
|
if [[ "$PUBLIC" == "true" ]]; then
|
||||||
|
echo -e "${YELLOW}--->${COLOR_OFF} Creating python release files"
|
||||||
|
cp README.rst README
|
||||||
|
python setup.py sdist bdist_wheel >/dev/null
|
||||||
|
|
||||||
|
echo -e "${YELLOW}--->${COLOR_OFF} Validating long_description"
|
||||||
|
twine check dist/*
|
||||||
|
fi
|
||||||
|
|
||||||
echo -e "${YELLOW}--->${COLOR_OFF} Tagging release"
|
echo -e "${YELLOW}--->${COLOR_OFF} Tagging release"
|
||||||
git tag -a $next_version -m "Release version $next_version"
|
git tag -a $next_version -m "Release version $next_version"
|
||||||
|
|
||||||
@@ -118,9 +134,8 @@ echo -e "${YELLOW}--->${COLOR_OFF} Pushing release and tags to github"
|
|||||||
git push -q origin master && git push -q --tags
|
git push -q origin master && git push -q --tags
|
||||||
|
|
||||||
if [[ "$PUBLIC" == "true" ]]; then
|
if [[ "$PUBLIC" == "true" ]]; then
|
||||||
echo -e "${YELLOW}--->${COLOR_OFF} Creating python release"
|
echo -e "${YELLOW}--->${COLOR_OFF} Uploading python release"
|
||||||
cp README.rst README
|
twine upload dist/*
|
||||||
python setup.py sdist bdist_wheel upload > /dev/null
|
|
||||||
rm README
|
rm README
|
||||||
fi
|
fi
|
||||||
|
|
||||||
|
|||||||
39
release-requirements.txt
Normal file
39
release-requirements.txt
Normal file
@@ -0,0 +1,39 @@
|
|||||||
|
autopep8==2.3.2
|
||||||
|
black==25.1.0
|
||||||
|
bleach==6.2.0
|
||||||
|
certifi==2025.7.14
|
||||||
|
charset-normalizer==3.4.2
|
||||||
|
click==8.1.8
|
||||||
|
colorama==0.4.6
|
||||||
|
docutils==0.22
|
||||||
|
flake8==7.3.0
|
||||||
|
gitchangelog==3.0.4
|
||||||
|
idna==3.10
|
||||||
|
importlib-metadata==8.7.0
|
||||||
|
jaraco.classes==3.4.0
|
||||||
|
keyring==25.6.0
|
||||||
|
markdown-it-py==3.0.0
|
||||||
|
mccabe==0.7.0
|
||||||
|
mdurl==0.1.2
|
||||||
|
more-itertools==10.7.0
|
||||||
|
mypy-extensions==1.1.0
|
||||||
|
packaging==25.0
|
||||||
|
pathspec==0.12.1
|
||||||
|
pkginfo==1.12.1.2
|
||||||
|
platformdirs==4.3.8
|
||||||
|
pycodestyle==2.14.0
|
||||||
|
pyflakes==3.4.0
|
||||||
|
Pygments==2.19.2
|
||||||
|
readme-renderer==44.0
|
||||||
|
requests==2.32.4
|
||||||
|
requests-toolbelt==1.0.0
|
||||||
|
restructuredtext-lint==1.4.0
|
||||||
|
rfc3986==2.0.0
|
||||||
|
rich==14.1.0
|
||||||
|
setuptools==80.9.0
|
||||||
|
six==1.17.0
|
||||||
|
tqdm==4.67.1
|
||||||
|
twine==6.1.0
|
||||||
|
urllib3==2.5.0
|
||||||
|
webencodings==0.5.1
|
||||||
|
zipp==3.23.0
|
||||||
39
setup.py
39
setup.py
@@ -1,10 +1,12 @@
|
|||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
|
||||||
import os
|
import os
|
||||||
|
|
||||||
from github_backup import __version__
|
from github_backup import __version__
|
||||||
|
|
||||||
try:
|
try:
|
||||||
from setuptools import setup
|
from setuptools import setup
|
||||||
|
|
||||||
setup # workaround for pyflakes issue #13
|
setup # workaround for pyflakes issue #13
|
||||||
except ImportError:
|
except ImportError:
|
||||||
from distutils.core import setup
|
from distutils.core import setup
|
||||||
@@ -15,6 +17,7 @@ except ImportError:
|
|||||||
# http://www.eby-sarna.com/pipermail/peak/2010-May/003357.html)
|
# http://www.eby-sarna.com/pipermail/peak/2010-May/003357.html)
|
||||||
try:
|
try:
|
||||||
import multiprocessing
|
import multiprocessing
|
||||||
|
|
||||||
multiprocessing
|
multiprocessing
|
||||||
except ImportError:
|
except ImportError:
|
||||||
pass
|
pass
|
||||||
@@ -25,25 +28,27 @@ def open_file(fname):
|
|||||||
|
|
||||||
|
|
||||||
setup(
|
setup(
|
||||||
name='github-backup',
|
name="github-backup",
|
||||||
version=__version__,
|
version=__version__,
|
||||||
author='Jose Diaz-Gonzalez',
|
author="Jose Diaz-Gonzalez",
|
||||||
author_email='github-backup@josediazgonzalez.com',
|
author_email="github-backup@josediazgonzalez.com",
|
||||||
packages=['github_backup'],
|
packages=["github_backup"],
|
||||||
scripts=['bin/github-backup'],
|
scripts=["bin/github-backup"],
|
||||||
url='http://github.com/josegonzalez/python-github-backup',
|
url="http://github.com/josegonzalez/python-github-backup",
|
||||||
license=open('LICENSE.txt').read(),
|
license="MIT",
|
||||||
classifiers=[
|
classifiers=[
|
||||||
'Development Status :: 5 - Production/Stable',
|
"Development Status :: 5 - Production/Stable",
|
||||||
'Topic :: System :: Archiving :: Backup',
|
"Topic :: System :: Archiving :: Backup",
|
||||||
'License :: OSI Approved :: MIT License',
|
"License :: OSI Approved :: MIT License",
|
||||||
'Programming Language :: Python :: 2.6',
|
"Programming Language :: Python :: 3.8",
|
||||||
'Programming Language :: Python :: 2.7',
|
"Programming Language :: Python :: 3.9",
|
||||||
'Programming Language :: Python :: 3.5',
|
"Programming Language :: Python :: 3.10",
|
||||||
'Programming Language :: Python :: 3.6',
|
"Programming Language :: Python :: 3.11",
|
||||||
|
"Programming Language :: Python :: 3.12",
|
||||||
],
|
],
|
||||||
description='backup a github user or organization',
|
description="backup a github user or organization",
|
||||||
long_description=open_file('README.rst').read(),
|
long_description=open_file("README.rst").read(),
|
||||||
install_requires=open_file('requirements.txt').readlines(),
|
long_description_content_type="text/x-rst",
|
||||||
|
install_requires=open_file("requirements.txt").readlines(),
|
||||||
zip_safe=True,
|
zip_safe=True,
|
||||||
)
|
)
|
||||||
|
|||||||
Reference in New Issue
Block a user