mirror of
https://github.com/josegonzalez/python-github-backup.git
synced 2025-12-05 16:18:02 +01:00
Compare commits
254 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
e7880bb056 | ||
|
|
18e3bd574a | ||
|
|
1ed3d66777 | ||
|
|
a194fa48ce | ||
|
|
8f859be355 | ||
|
|
80e00d31d9 | ||
|
|
32202656ba | ||
|
|
875e31819a | ||
|
|
73dc75ab95 | ||
|
|
cd23dd1a16 | ||
|
|
d244de1952 | ||
|
|
4dae43c58e | ||
|
|
b018a91fb4 | ||
|
|
759ec58beb | ||
|
|
b43c998b65 | ||
|
|
38b4a2c106 | ||
|
|
6210ec3845 | ||
|
|
90396d2bdf | ||
|
|
aa35e883b0 | ||
|
|
963ed3e6f6 | ||
|
|
b710547fdc | ||
|
|
64b5667a16 | ||
|
|
b0c8cfe059 | ||
|
|
5bedaf825f | ||
|
|
9d28d9c2b0 | ||
|
|
eb756d665c | ||
|
|
3d5f61aa22 | ||
|
|
d6bf031bf7 | ||
|
|
85ab54e514 | ||
|
|
df4d751be2 | ||
|
|
03c660724d | ||
|
|
39848e650c | ||
|
|
12ac519e9c | ||
|
|
9e25473151 | ||
|
|
d3079bfb74 | ||
|
|
3b9ff1ac14 | ||
|
|
268a989b09 | ||
|
|
45a3b87892 | ||
|
|
1c465f4d35 | ||
|
|
3ad9b02b26 | ||
|
|
8bfad9b5b7 | ||
|
|
985d79c1bc | ||
|
|
7d1b7f20ef | ||
|
|
d3b67f884a | ||
|
|
65749bfde4 | ||
|
|
aeeb0eb9d7 | ||
|
|
f027760ac5 | ||
|
|
a9e48f8c4e | ||
|
|
338d5a956b | ||
|
|
5f07157c9b | ||
|
|
87f5b76c52 | ||
|
|
27eb009e34 | ||
|
|
82c1fc3086 | ||
|
|
a4f15b06d9 | ||
|
|
aa217774ff | ||
|
|
d820dd994d | ||
|
|
1bad563e3f | ||
|
|
175ac19be6 | ||
|
|
773ccecb8c | ||
|
|
e27b5a8ee3 | ||
|
|
fb8945fc09 | ||
|
|
7333458ee4 | ||
|
|
cf8b4c6b45 | ||
|
|
cabf8a770a | ||
|
|
7e0f7d1930 | ||
|
|
a9bdd6feb7 | ||
|
|
fe16d2421c | ||
|
|
16b5b304e7 | ||
|
|
8f58ef6229 | ||
|
|
51cf429dc2 | ||
|
|
53714612d4 | ||
|
|
f6e241833d | ||
|
|
17dc265385 | ||
|
|
704d31cbf7 | ||
|
|
db69f5a5e8 | ||
|
|
ba367a927c | ||
|
|
e8bf4257da | ||
|
|
8eab8d02ce | ||
|
|
e4bd19acea | ||
|
|
176cadfcc4 | ||
|
|
b49544270e | ||
|
|
27fdd358fb | ||
|
|
abe6192ee9 | ||
|
|
0a2d6ed2ca | ||
|
|
1a8eb7a906 | ||
|
|
40e6e34908 | ||
|
|
2885fc6822 | ||
|
|
434b4bf4a0 | ||
|
|
677f3d3287 | ||
|
|
9164f088b8 | ||
|
|
c1f9ea7b9b | ||
|
|
6d51d199c5 | ||
|
|
2b555dc964 | ||
|
|
b818e9b95f | ||
|
|
4157cab89f | ||
|
|
07fd47a596 | ||
|
|
5530a1badd | ||
|
|
90ac4999ea | ||
|
|
f4dfc57ba2 | ||
|
|
3d354beb24 | ||
|
|
552c1051e3 | ||
|
|
c92f5ef0f2 | ||
|
|
095b712a77 | ||
|
|
3a4aebbcfe | ||
|
|
e75021db80 | ||
|
|
0f34ecb77d | ||
|
|
20e4d385a5 | ||
|
|
a49322cf7d | ||
|
|
332c9b586a | ||
|
|
09bf9275d1 | ||
|
|
fcf21f7a2e | ||
|
|
36812a332b | ||
|
|
0e0197149e | ||
|
|
eb545c1c2f | ||
|
|
2e72797984 | ||
|
|
68fe29d1e1 | ||
|
|
3dc3691770 | ||
|
|
5b0608ce14 | ||
|
|
1ce8455860 | ||
|
|
dcb89a5c33 | ||
|
|
b0bfffde1a | ||
|
|
0f3aaa6fc2 | ||
|
|
c39ec9c549 | ||
|
|
e981ce3ff9 | ||
|
|
22d8f8e649 | ||
|
|
aaefac1a66 | ||
|
|
cb66375e1e | ||
|
|
24d7aa83df | ||
|
|
c8c71239c7 | ||
|
|
6ca8030648 | ||
|
|
53f6650f61 | ||
|
|
548a2ec405 | ||
|
|
871d69b99a | ||
|
|
ca3c4fa64b | ||
|
|
0846e7d8e5 | ||
|
|
503444359d | ||
|
|
04c70ce277 | ||
|
|
e774c70275 | ||
|
|
ba46cb87e8 | ||
|
|
883407f8ca | ||
|
|
aacb252e57 | ||
|
|
2623167110 | ||
|
|
f6ad296730 | ||
|
|
c8eef58d76 | ||
|
|
8eb154a540 | ||
|
|
2e9db92b68 | ||
|
|
09bbcfc7b1 | ||
|
|
4e14f5a2c6 | ||
|
|
b474e1654f | ||
|
|
71d70265cc | ||
|
|
2309b0cb76 | ||
|
|
1e14a4eecd | ||
|
|
56d3fd75bf | ||
|
|
c3e470b34e | ||
|
|
4948178a63 | ||
|
|
88de80c480 | ||
|
|
15eeff7879 | ||
|
|
4bb71db468 | ||
|
|
17af2cbc28 | ||
|
|
e0d66daadb | ||
|
|
1971c97b5d | ||
|
|
b1b3df692d | ||
|
|
8d7311efbf | ||
|
|
8449d6352d | ||
|
|
d8c228c83e | ||
|
|
4a134ae2ec | ||
|
|
5cb7c6ad2e | ||
|
|
75382afeae | ||
|
|
f325daa875 | ||
|
|
2cc34de2a3 | ||
|
|
dea87873f9 | ||
|
|
0288b5f553 | ||
|
|
02a07d3f0d | ||
|
|
24a7b1f885 | ||
|
|
22fa2eb97e | ||
|
|
cb147cf6d0 | ||
|
|
298724acfc | ||
|
|
65d541f577 | ||
|
|
8b08685678 | ||
|
|
b18ba6de28 | ||
|
|
358d1e3d3e | ||
|
|
1cd04281e9 | ||
|
|
6630b2b82e | ||
|
|
391f2ba305 | ||
|
|
1f0bf50381 | ||
|
|
eb44c735eb | ||
|
|
caff40e65b | ||
|
|
bba39fb4c8 | ||
|
|
093db93994 | ||
|
|
d835d47c17 | ||
|
|
2cd9061c46 | ||
|
|
0cc50bc4cb | ||
|
|
436e8df0ac | ||
|
|
9812988a4a | ||
|
|
1eccebcb83 | ||
|
|
122eb56aa1 | ||
|
|
a0fdae3314 | ||
|
|
80fa92664c | ||
|
|
7b69394488 | ||
|
|
d1d3d84d95 | ||
|
|
fff2aa4075 | ||
|
|
8eba46d8a7 | ||
|
|
9dc3458dba | ||
|
|
e9d7692123 | ||
|
|
a1ef61f87c | ||
|
|
6b62973997 | ||
|
|
b25af67898 | ||
|
|
0380fb8e35 | ||
|
|
f62fe5e6c9 | ||
|
|
c97598c914 | ||
|
|
c488b0adf9 | ||
|
|
888815c271 | ||
|
|
66e11aa532 | ||
|
|
d1874c0bd9 | ||
|
|
4c07bd1310 | ||
|
|
fd2d398025 | ||
|
|
53d2ceec10 | ||
|
|
421a7ec62b | ||
|
|
ec43649bcd | ||
|
|
e869844dba | ||
|
|
0857a37440 | ||
|
|
585af4c4e3 | ||
|
|
41ec01d5cb | ||
|
|
7dc22358df | ||
|
|
b855bcabf6 | ||
|
|
3c3262ed69 | ||
|
|
42b836f623 | ||
|
|
09f4168db6 | ||
|
|
3e9a4fa0d8 | ||
|
|
ab18e96ea8 | ||
|
|
eb88def888 | ||
|
|
7fe6541291 | ||
|
|
c8b8b270f6 | ||
|
|
a97f15b519 | ||
|
|
500c97c60e | ||
|
|
31a6e52a5e | ||
|
|
4c5187bcff | ||
|
|
2de69beffa | ||
|
|
96592295e1 | ||
|
|
bd65c3d5d6 | ||
|
|
aaf45022cc | ||
|
|
7cdf428e3a | ||
|
|
cfb1f1368b | ||
|
|
4700a26d90 | ||
|
|
f53f7d9b71 | ||
|
|
3b6aa060ba | ||
|
|
76ff7f3b0d | ||
|
|
2615cab114 | ||
|
|
fda71b0467 | ||
|
|
a9f82faa1c | ||
|
|
f17bf19776 | ||
|
|
54c81de3d7 | ||
|
|
f2b4f566a1 | ||
|
|
2724f02b0a |
@@ -1,23 +0,0 @@
|
||||
version: 2.1
|
||||
|
||||
orbs:
|
||||
python: circleci/python@0.3.2
|
||||
|
||||
jobs:
|
||||
build-and-test:
|
||||
executor: python/default
|
||||
steps:
|
||||
- checkout
|
||||
- python/load-cache
|
||||
- run:
|
||||
command: pip install flake8
|
||||
name: Install dependencies
|
||||
- python/save-cache
|
||||
- run:
|
||||
command: flake8 --ignore=E501
|
||||
name: Lint
|
||||
|
||||
workflows:
|
||||
main:
|
||||
jobs:
|
||||
- build-and-test
|
||||
75
.dockerignore
Normal file
75
.dockerignore
Normal file
@@ -0,0 +1,75 @@
|
||||
# Docker ignore file to reduce build context size
|
||||
|
||||
# Temp files
|
||||
*~
|
||||
~*
|
||||
.*~
|
||||
\#*
|
||||
.#*
|
||||
*#
|
||||
dist
|
||||
|
||||
# Build files
|
||||
build
|
||||
dist
|
||||
pkg
|
||||
*.egg
|
||||
*.egg-info
|
||||
|
||||
# Debian Files
|
||||
debian/files
|
||||
debian/python-github-backup*
|
||||
|
||||
# Sphinx build
|
||||
doc/_build
|
||||
|
||||
# Generated man page
|
||||
doc/github_backup.1
|
||||
|
||||
# Annoying macOS files
|
||||
.DS_Store
|
||||
._*
|
||||
|
||||
# IDE configuration files
|
||||
.vscode
|
||||
.atom
|
||||
.idea
|
||||
*.code-workspace
|
||||
|
||||
# RSA
|
||||
id_rsa
|
||||
id_rsa.pub
|
||||
|
||||
# Virtual env
|
||||
venv
|
||||
.venv
|
||||
|
||||
# Git
|
||||
.git
|
||||
.gitignore
|
||||
.gitchangelog.rc
|
||||
.github
|
||||
|
||||
# Documentation
|
||||
*.md
|
||||
!README.md
|
||||
|
||||
# Environment variables files
|
||||
.env
|
||||
.env.*
|
||||
!.env.example
|
||||
*.log
|
||||
|
||||
# Cache files
|
||||
**/__pycache__/
|
||||
*.py[cod]
|
||||
|
||||
# Docker files
|
||||
docker-compose.yml
|
||||
Dockerfile*
|
||||
|
||||
# Other files
|
||||
release
|
||||
*.tar
|
||||
*.zip
|
||||
*.gzip
|
||||
28
.github/ISSUE_TEMPLATE/bug.yaml
vendored
Normal file
28
.github/ISSUE_TEMPLATE/bug.yaml
vendored
Normal file
@@ -0,0 +1,28 @@
|
||||
---
|
||||
name: Bug Report
|
||||
description: File a bug report.
|
||||
body:
|
||||
- type: markdown
|
||||
attributes:
|
||||
value: |
|
||||
# Important notice regarding filed issues
|
||||
|
||||
This project already fills my needs, and as such I have no real reason to continue it's development. This project is otherwise provided as is, and no support is given.
|
||||
|
||||
If pull requests implementing bug fixes or enhancements are pushed, I am happy to review and merge them (time permitting).
|
||||
|
||||
If you wish to have a bug fixed, you have a few options:
|
||||
|
||||
- Fix it yourself and file a pull request.
|
||||
- File a bug and hope someone else fixes it for you.
|
||||
- Pay me to fix it (my rate is $200 an hour, minimum 1 hour, contact me via my [github email address](https://github.com/josegonzalez) if you want to go this route).
|
||||
|
||||
In all cases, feel free to file an issue, they may be of help to others in the future.
|
||||
- type: textarea
|
||||
id: what-happened
|
||||
attributes:
|
||||
label: What happened?
|
||||
description: Also tell us, what did you expect to happen?
|
||||
placeholder: Tell us what you see!
|
||||
validations:
|
||||
required: true
|
||||
27
.github/ISSUE_TEMPLATE/feature.yaml
vendored
Normal file
27
.github/ISSUE_TEMPLATE/feature.yaml
vendored
Normal file
@@ -0,0 +1,27 @@
|
||||
---
|
||||
name: Feature Request
|
||||
description: File a feature request.
|
||||
body:
|
||||
- type: markdown
|
||||
attributes:
|
||||
value: |
|
||||
# Important notice regarding filed issues
|
||||
|
||||
This project already fills my needs, and as such I have no real reason to continue it's development. This project is otherwise provided as is, and no support is given.
|
||||
|
||||
If pull requests implementing bug fixes or enhancements are pushed, I am happy to review and merge them (time permitting).
|
||||
|
||||
If you wish to have a feature implemented, you have a few options:
|
||||
|
||||
- Implement it yourself and file a pull request.
|
||||
- File an issue and hope someone else implements it for you.
|
||||
- Pay me to implement it (my rate is $200 an hour, minimum 1 hour, contact me via my [github email address](https://github.com/josegonzalez) if you want to go this route).
|
||||
|
||||
In all cases, feel free to file an issue, they may be of help to others in the future.
|
||||
- type: textarea
|
||||
id: what-would-you-like-to-happen
|
||||
attributes:
|
||||
label: What would you like to happen?
|
||||
description: Please describe in detail how the new functionality should work as well as any issues with existing functionality.
|
||||
validations:
|
||||
required: true
|
||||
15
.github/dependabot.yml
vendored
Normal file
15
.github/dependabot.yml
vendored
Normal file
@@ -0,0 +1,15 @@
|
||||
version: 2
|
||||
updates:
|
||||
- package-ecosystem: pip
|
||||
directory: "/"
|
||||
schedule:
|
||||
interval: daily
|
||||
time: "13:00"
|
||||
groups:
|
||||
python-packages:
|
||||
patterns:
|
||||
- "*"
|
||||
- package-ecosystem: "github-actions"
|
||||
directory: "/"
|
||||
schedule:
|
||||
interval: "weekly"
|
||||
9
.github/workflows/automatic-release.yml
vendored
9
.github/workflows/automatic-release.yml
vendored
@@ -15,20 +15,21 @@ on:
|
||||
jobs:
|
||||
release:
|
||||
name: Release
|
||||
runs-on: ubuntu-22.04
|
||||
runs-on: ubuntu-24.04
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
uses: actions/checkout@v5
|
||||
with:
|
||||
fetch-depth: 0
|
||||
ssh-key: ${{ secrets.DEPLOY_PRIVATE_KEY }}
|
||||
- name: Setup Git
|
||||
run: |
|
||||
git config --local user.email "action@github.com"
|
||||
git config --local user.name "GitHub Action"
|
||||
- name: Setup Python
|
||||
uses: actions/setup-python@v4
|
||||
uses: actions/setup-python@v6
|
||||
with:
|
||||
python-version: '3.8'
|
||||
python-version: '3.12'
|
||||
- name: Install prerequisites
|
||||
run: pip install -r release-requirements.txt
|
||||
- name: Execute release
|
||||
|
||||
77
.github/workflows/docker.yml
vendored
Normal file
77
.github/workflows/docker.yml
vendored
Normal file
@@ -0,0 +1,77 @@
|
||||
# This workflow uses actions that are not certified by GitHub.
|
||||
# They are provided by a third-party and are governed by
|
||||
# separate terms of service, privacy policy, and support
|
||||
# documentation.
|
||||
|
||||
name: Create and publish a Docker image
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- 'master'
|
||||
- 'main'
|
||||
- 'dev'
|
||||
|
||||
tags:
|
||||
- 'v*'
|
||||
- 'v*.*'
|
||||
- 'v*.*.*'
|
||||
- '*'
|
||||
- '*.*'
|
||||
- '*.*.*'
|
||||
pull_request:
|
||||
branches:
|
||||
- 'main'
|
||||
- 'dev'
|
||||
|
||||
|
||||
env:
|
||||
REGISTRY: ghcr.io
|
||||
IMAGE_NAME: ${{ github.repository }}
|
||||
|
||||
jobs:
|
||||
build-and-push-image:
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: read
|
||||
packages: write
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v5
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v3
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
|
||||
- name: Log in to the Container registry
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
registry: ${{ env.REGISTRY }}
|
||||
username: ${{ github.actor }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Extract metadata (tags, labels) for Docker
|
||||
id: meta
|
||||
uses: docker/metadata-action@v5
|
||||
with:
|
||||
images: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}
|
||||
tags: |
|
||||
type=semver,pattern={{version}}
|
||||
type=semver,pattern={{major}}.{{minor}}
|
||||
type=semver,pattern={{major}}
|
||||
type=sha
|
||||
type=raw,value=latest,enable=${{ github.ref == format('refs/heads/{0}', 'main') }}
|
||||
|
||||
- name: Build and push Docker image
|
||||
uses: docker/build-push-action@v6
|
||||
with:
|
||||
context: .
|
||||
push: true
|
||||
platforms: linux/amd64,linux/arm64
|
||||
tags: ${{ steps.meta.outputs.tags }}
|
||||
labels: ${{ steps.meta.outputs.labels }}
|
||||
36
.github/workflows/lint.yml
vendored
Normal file
36
.github/workflows/lint.yml
vendored
Normal file
@@ -0,0 +1,36 @@
|
||||
---
|
||||
name: "lint"
|
||||
|
||||
# yamllint disable-line rule:truthy
|
||||
on:
|
||||
pull_request:
|
||||
branches:
|
||||
- "*"
|
||||
push:
|
||||
branches:
|
||||
- "main"
|
||||
- "master"
|
||||
|
||||
jobs:
|
||||
lint:
|
||||
name: lint
|
||||
runs-on: ubuntu-24.04
|
||||
strategy:
|
||||
matrix:
|
||||
python-version: ["3.10", "3.11", "3.12", "3.13", "3.14"]
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v5
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: Setup Python
|
||||
uses: actions/setup-python@v6
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
cache: "pip"
|
||||
- run: pip install -r release-requirements.txt && pip install wheel
|
||||
- run: flake8 --ignore=E501,E203,W503
|
||||
- run: black .
|
||||
- run: rst-lint README.rst
|
||||
- run: python setup.py sdist bdist_wheel && twine check dist/*
|
||||
2
.github/workflows/tagged-release.yml
vendored
2
.github/workflows/tagged-release.yml
vendored
@@ -10,7 +10,7 @@ on:
|
||||
jobs:
|
||||
tagged-release:
|
||||
name: tagged-release
|
||||
runs-on: ubuntu-20.04
|
||||
runs-on: ubuntu-24.04
|
||||
|
||||
steps:
|
||||
- uses: "marvinpinto/action-automatic-releases@v1.2.1"
|
||||
|
||||
15
.gitignore
vendored
15
.gitignore
vendored
@@ -1,4 +1,4 @@
|
||||
*.py[oc]
|
||||
*.py[cod]
|
||||
|
||||
# Temp files
|
||||
*~
|
||||
@@ -18,13 +18,13 @@ pkg
|
||||
|
||||
# Debian Files
|
||||
debian/files
|
||||
debian/python-aws-hostname*
|
||||
debian/python-github-backup*
|
||||
|
||||
# Sphinx build
|
||||
doc/_build
|
||||
|
||||
# Generated man page
|
||||
doc/aws_hostname.1
|
||||
doc/github_backup.1
|
||||
|
||||
# Annoying macOS files
|
||||
.DS_Store
|
||||
@@ -33,5 +33,14 @@ doc/aws_hostname.1
|
||||
# IDE configuration files
|
||||
.vscode
|
||||
.atom
|
||||
.idea
|
||||
|
||||
README
|
||||
|
||||
# RSA
|
||||
id_rsa
|
||||
id_rsa.pub
|
||||
|
||||
# Virtual env
|
||||
venv
|
||||
.venv
|
||||
|
||||
2373
CHANGES.rst
2373
CHANGES.rst
File diff suppressed because it is too large
Load Diff
42
Dockerfile
42
Dockerfile
@@ -1,16 +1,38 @@
|
||||
FROM python:3.9.18-slim
|
||||
FROM python:3.12-alpine3.22 AS builder
|
||||
|
||||
RUN --mount=type=cache,target=/var/cache/apt \
|
||||
apt-get update && apt-get install -y git git-lfs
|
||||
RUN pip install --no-cache-dir --upgrade pip \
|
||||
&& pip install --no-cache-dir uv
|
||||
|
||||
WORKDIR /usr/src/app
|
||||
WORKDIR /app
|
||||
|
||||
COPY release-requirements.txt .
|
||||
RUN --mount=type=cache,target=/root/.cache/pip \
|
||||
pip install -r release-requirements.txt
|
||||
RUN --mount=type=cache,target=/root/.cache/uv \
|
||||
--mount=type=bind,source=requirements.txt,target=requirements.txt \
|
||||
--mount=type=bind,source=release-requirements.txt,target=release-requirements.txt \
|
||||
uv venv \
|
||||
&& uv pip install -r release-requirements.txt
|
||||
|
||||
COPY . .
|
||||
RUN --mount=type=cache,target=/root/.cache/pip \
|
||||
pip install .
|
||||
|
||||
ENTRYPOINT [ "github-backup" ]
|
||||
RUN --mount=type=cache,target=/root/.cache/uv \
|
||||
uv pip install .
|
||||
|
||||
|
||||
FROM python:3.12-alpine3.22
|
||||
ENV PYTHONUNBUFFERED=1
|
||||
|
||||
RUN apk add --no-cache \
|
||||
ca-certificates \
|
||||
git \
|
||||
git-lfs \
|
||||
&& addgroup -g 1000 appuser \
|
||||
&& adduser -D -u 1000 -G appuser appuser
|
||||
|
||||
COPY --from=builder --chown=appuser:appuser /app /app
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
USER appuser
|
||||
|
||||
ENV PATH="/app/.venv/bin:$PATH"
|
||||
|
||||
ENTRYPOINT ["github-backup"]
|
||||
|
||||
@@ -1,13 +0,0 @@
|
||||
# Important notice regarding filed issues
|
||||
|
||||
This project already fills my needs, and as such I have no real reason to continue it's development. This project is otherwise provided as is, and no support is given.
|
||||
|
||||
If pull requests implementing bug fixes or enhancements are pushed, I am happy to review and merge them (time permitting).
|
||||
|
||||
If you wish to have a bug fixed, you have a few options:
|
||||
|
||||
- Fix it yourself and file a pull request.
|
||||
- File a bug and hope someone else fixes it for you.
|
||||
- Pay me to fix it (my rate is $200 an hour, minimum 1 hour, contact me via my [github email address](https://github.com/josegonzalez) if you want to go this route).
|
||||
|
||||
In all cases, feel free to file an issue, they may be of help to others in the future.
|
||||
62
README.rst
62
README.rst
@@ -9,8 +9,8 @@ The package can be used to backup an *entire* `Github <https://github.com/>`_ or
|
||||
Requirements
|
||||
============
|
||||
|
||||
- Python 3.10 or higher
|
||||
- GIT 1.9+
|
||||
- Python
|
||||
|
||||
Installation
|
||||
============
|
||||
@@ -49,7 +49,9 @@ CLI Help output::
|
||||
[-P] [-F] [--prefer-ssh] [-v]
|
||||
[--keychain-name OSX_KEYCHAIN_ITEM_NAME]
|
||||
[--keychain-account OSX_KEYCHAIN_ITEM_ACCOUNT]
|
||||
[--releases] [--assets] [--exclude [REPOSITORY [REPOSITORY ...]]
|
||||
[--releases] [--latest-releases NUMBER_OF_LATEST_RELEASES]
|
||||
[--skip-prerelease] [--assets] [--attachments]
|
||||
[--exclude [REPOSITORY [REPOSITORY ...]]
|
||||
[--throttle-limit THROTTLE_LIMIT] [--throttle-pause THROTTLE_PAUSE]
|
||||
USER
|
||||
|
||||
@@ -78,6 +80,7 @@ CLI Help output::
|
||||
log level to use (default: info, possible levels:
|
||||
debug, info, warning, error, critical)
|
||||
-i, --incremental incremental backup
|
||||
--incremental-by-files incremental backup using modified time of files
|
||||
--starred include JSON output of starred repositories in backup
|
||||
--all-starred include starred repositories in backup [*]
|
||||
--watched include JSON output of watched repositories in backup
|
||||
@@ -124,8 +127,15 @@ CLI Help output::
|
||||
keychain that holds the personal access or OAuth token
|
||||
--releases include release information, not including assets or
|
||||
binaries
|
||||
--latest-releases NUMBER_OF_LATEST_RELEASES
|
||||
include certain number of the latest releases;
|
||||
only applies if including releases
|
||||
--skip-prerelease skip prerelease and draft versions; only applies if including releases
|
||||
--assets include assets alongside release information; only
|
||||
applies if including releases
|
||||
--attachments download user-attachments from issues and pull requests
|
||||
to issues/attachments/{issue_number}/ and
|
||||
pulls/attachments/{pull_number}/ directories
|
||||
--exclude [REPOSITORY [REPOSITORY ...]]
|
||||
names of repositories to exclude from backup.
|
||||
--throttle-limit THROTTLE_LIMIT
|
||||
@@ -161,7 +171,7 @@ Customise the permissions for your use case, but for a personal account full bac
|
||||
|
||||
**User permissions**: Read access to followers, starring, and watching.
|
||||
|
||||
**Repository permissions**: Read access to code, commit statuses, issues, metadata, pages, pull requests, and repository hooks.
|
||||
**Repository permissions**: Read access to contents, issues, metadata, pull requests, and webhooks.
|
||||
|
||||
|
||||
Prefer SSH
|
||||
@@ -206,13 +216,43 @@ When you use the ``--lfs`` option, you will need to make sure you have Git LFS i
|
||||
Instructions on how to do this can be found on https://git-lfs.github.com.
|
||||
|
||||
|
||||
About Attachments
|
||||
-----------------
|
||||
|
||||
When you use the ``--attachments`` option with ``--issues`` or ``--pulls``, the tool will download user-uploaded attachments (images, videos, documents, etc.) from issue and pull request descriptions and comments. In some circumstances attachments contain valuable data related to the topic, and without their backup important information or context might be lost inadvertently.
|
||||
|
||||
Attachments are saved to ``issues/attachments/{issue_number}/`` and ``pulls/attachments/{pull_number}/`` directories, where ``{issue_number}`` is the GitHub issue number (e.g., issue #123 saves to ``issues/attachments/123/``). Each attachment directory contains:
|
||||
|
||||
- The downloaded attachment files (named by their GitHub identifier with appropriate file extensions)
|
||||
- If multiple attachments have the same filename, conflicts are resolved with numeric suffixes (e.g., ``report.pdf``, ``report_1.pdf``, ``report_2.pdf``)
|
||||
- A ``manifest.json`` file documenting all downloads, including URLs, file metadata, and download status
|
||||
|
||||
The tool automatically extracts file extensions from HTTP headers to ensure files can be more easily opened by your operating system.
|
||||
|
||||
**Supported URL formats:**
|
||||
|
||||
- Modern: ``github.com/user-attachments/{assets,files}/*``
|
||||
- Legacy: ``user-images.githubusercontent.com/*`` and ``private-user-images.githubusercontent.com/*``
|
||||
- Repo files: ``github.com/{owner}/{repo}/files/*`` (filtered to current repository)
|
||||
- Repo assets: ``github.com/{owner}/{repo}/assets/*`` (filtered to current repository)
|
||||
|
||||
**Repository filtering** for repo files/assets handles renamed and transferred repositories gracefully. URLs are included if they either match the current repository name directly, or redirect to it (e.g., ``willmcgugan/rich`` redirects to ``Textualize/rich`` after transfer).
|
||||
|
||||
|
||||
Run in Docker container
|
||||
-----------------------
|
||||
|
||||
To run the tool in a Docker container use the following command:
|
||||
|
||||
sudo docker run --rm -v /path/to/backup:/data --name github-backup ghcr.io/josegonzalez/python-github-backup -o /data $OPTIONS $USER
|
||||
|
||||
Gotchas / Known-issues
|
||||
======================
|
||||
|
||||
All is not everything
|
||||
---------------------
|
||||
|
||||
The ``--all`` argument does not include; cloning private repos (``-P, --private``), cloning forks (``-F, --fork``) cloning starred repositories (``--all-starred``), ``--pull-details``, cloning LFS repositories (``--lfs``), cloning gists (``--starred-gists``) or cloning starred gist repos (``--starred-gists``). See examples for more.
|
||||
The ``--all`` argument does not include: cloning private repos (``-P, --private``), cloning forks (``-F, --fork``), cloning starred repositories (``--all-starred``), ``--pull-details``, cloning LFS repositories (``--lfs``), cloning gists (``--gists``) or cloning starred gist repos (``--starred-gists``). See examples for more.
|
||||
|
||||
Cloning all starred size
|
||||
------------------------
|
||||
@@ -226,6 +266,12 @@ Using (``-i, --incremental``) will only request new data from the API **since th
|
||||
|
||||
This means any blocking errors on previous runs can cause a large amount of missing data in backups.
|
||||
|
||||
Using (``--incremental-by-files``) will request new data from the API **based on when the file was modified on filesystem**. e.g. if you modify the file yourself you may miss something.
|
||||
|
||||
Still saver than the previous version.
|
||||
|
||||
Specifically, issues and pull requests are handled like this.
|
||||
|
||||
Known blocking errors
|
||||
---------------------
|
||||
|
||||
@@ -241,12 +287,6 @@ It's therefore recommended to only use the incremental argument if the output/re
|
||||
|
||||
This is due to needing the correct permission for ``--hooks`` on public repos.
|
||||
|
||||
2. **Releases blocking**
|
||||
|
||||
A known ``--releases`` (required for ``--assets``) error will sometimes block the backup.
|
||||
|
||||
If you're backing up a lot of repositories with releases e.g. an organisation or ``--all-starred``. You may need to remove ``--releases`` (and therefore ``--assets``) to complete a backup. Documented in `issue 209 <https://github.com/josegonzalez/python-github-backup/issues/209>`_.
|
||||
|
||||
|
||||
"bare" is actually "mirror"
|
||||
---------------------------
|
||||
@@ -289,7 +329,7 @@ Quietly and incrementally backup useful Github user data (public and private rep
|
||||
export FINE_ACCESS_TOKEN=SOME-GITHUB-TOKEN
|
||||
GH_USER=YOUR-GITHUB-USER
|
||||
|
||||
github-backup -f $FINE_ACCESS_TOKEN --prefer-ssh -o ~/github-backup/ -l error -P -i --all-starred --starred --watched --followers --following --issues --issue-comments --issue-events --pulls --pull-comments --pull-commits --labels --milestones --repositories --wikis --releases --assets --pull-details --gists --starred-gists $GH_USER
|
||||
github-backup -f $FINE_ACCESS_TOKEN --prefer-ssh -o ~/github-backup/ -l error -P -i --all-starred --starred --watched --followers --following --issues --issue-comments --issue-events --pulls --pull-comments --pull-commits --labels --milestones --repositories --wikis --releases --assets --attachments --pull-details --gists --starred-gists $GH_USER
|
||||
|
||||
Debug an error/block or incomplete backup into a temporary directory. Omit "incremental" to fill a previous incomplete backup. ::
|
||||
|
||||
|
||||
@@ -1,12 +1,8 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
import os, sys, logging
|
||||
|
||||
logging.basicConfig(
|
||||
format='%(asctime)s.%(msecs)03d: %(message)s',
|
||||
datefmt='%Y-%m-%dT%H:%M:%S',
|
||||
level=logging.INFO
|
||||
)
|
||||
import logging
|
||||
import os
|
||||
import sys
|
||||
|
||||
from github_backup.github_backup import (
|
||||
backup_account,
|
||||
@@ -20,6 +16,12 @@ from github_backup.github_backup import (
|
||||
retrieve_repositories,
|
||||
)
|
||||
|
||||
logging.basicConfig(
|
||||
format="%(asctime)s.%(msecs)03d: %(message)s",
|
||||
datefmt="%Y-%m-%dT%H:%M:%S",
|
||||
level=logging.INFO,
|
||||
)
|
||||
|
||||
|
||||
def main():
|
||||
args = parse_args()
|
||||
@@ -29,7 +31,7 @@ def main():
|
||||
|
||||
output_directory = os.path.realpath(args.output_directory)
|
||||
if not os.path.isdir(output_directory):
|
||||
logger.info('Create output directory {0}'.format(output_directory))
|
||||
logger.info("Create output directory {0}".format(output_directory))
|
||||
mkdir_p(output_directory)
|
||||
|
||||
if args.lfs_clone:
|
||||
@@ -41,10 +43,10 @@ def main():
|
||||
logger.root.setLevel(log_level)
|
||||
|
||||
if not args.as_app:
|
||||
logger.info('Backing up user {0} to {1}'.format(args.user, output_directory))
|
||||
logger.info("Backing up user {0} to {1}".format(args.user, output_directory))
|
||||
authenticated_user = get_authenticated_user(args)
|
||||
else:
|
||||
authenticated_user = {'login': None}
|
||||
authenticated_user = {"login": None}
|
||||
|
||||
repositories = retrieve_repositories(args, authenticated_user)
|
||||
repositories = filter_repositories(args, repositories)
|
||||
@@ -52,7 +54,7 @@ def main():
|
||||
backup_account(args, output_directory)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
if __name__ == "__main__":
|
||||
try:
|
||||
main()
|
||||
except Exception as e:
|
||||
|
||||
@@ -1 +1 @@
|
||||
__version__ = "0.44.0"
|
||||
__version__ = "0.51.0"
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
from __future__ import print_function
|
||||
import socket
|
||||
|
||||
import argparse
|
||||
import base64
|
||||
@@ -10,23 +9,22 @@ import codecs
|
||||
import errno
|
||||
import getpass
|
||||
import json
|
||||
import logging
|
||||
import os
|
||||
import platform
|
||||
import re
|
||||
import select
|
||||
import socket
|
||||
import ssl
|
||||
import subprocess
|
||||
import sys
|
||||
import logging
|
||||
import time
|
||||
import platform
|
||||
from urllib.parse import urlparse
|
||||
from urllib.parse import quote as urlquote
|
||||
from urllib.parse import urlencode
|
||||
from urllib.error import HTTPError, URLError
|
||||
from urllib.request import urlopen
|
||||
from urllib.request import Request
|
||||
from urllib.request import HTTPRedirectHandler
|
||||
from urllib.request import build_opener
|
||||
from datetime import datetime
|
||||
from http.client import IncompleteRead
|
||||
from urllib.error import HTTPError, URLError
|
||||
from urllib.parse import quote as urlquote
|
||||
from urllib.parse import urlencode, urlparse
|
||||
from urllib.request import HTTPRedirectHandler, Request, build_opener, urlopen
|
||||
|
||||
try:
|
||||
from . import __version__
|
||||
@@ -39,12 +37,26 @@ FNULL = open(os.devnull, "w")
|
||||
FILE_URI_PREFIX = "file://"
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
https_ctx = ssl.create_default_context()
|
||||
if not https_ctx.get_ca_certs():
|
||||
import warnings
|
||||
|
||||
warnings.warn(
|
||||
"\n\nYOUR DEFAULT CA CERTS ARE EMPTY.\n"
|
||||
+ "PLEASE POPULATE ANY OF:"
|
||||
+ "".join(
|
||||
["\n - " + x for x in ssl.get_default_verify_paths() if type(x) is str]
|
||||
)
|
||||
+ "\n",
|
||||
stacklevel=2,
|
||||
)
|
||||
import certifi
|
||||
|
||||
https_ctx = ssl.create_default_context(cafile=certifi.where())
|
||||
|
||||
|
||||
def logging_subprocess(
|
||||
popenargs,
|
||||
stdout_log_level=logging.DEBUG,
|
||||
stderr_log_level=logging.ERROR,
|
||||
**kwargs
|
||||
popenargs, stdout_log_level=logging.DEBUG, stderr_log_level=logging.ERROR, **kwargs
|
||||
):
|
||||
"""
|
||||
Variant of subprocess.call that accepts a logger instead of stdout/stderr,
|
||||
@@ -169,6 +181,12 @@ def parse_args(args=None):
|
||||
dest="incremental",
|
||||
help="incremental backup",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--incremental-by-files",
|
||||
action="store_true",
|
||||
dest="incremental_by_files",
|
||||
help="incremental backup based on modification date of files",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--starred",
|
||||
action="store_true",
|
||||
@@ -383,12 +401,31 @@ def parse_args(args=None):
|
||||
dest="include_releases",
|
||||
help="include release information, not including assets or binaries",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--latest-releases",
|
||||
type=int,
|
||||
default=0,
|
||||
dest="number_of_latest_releases",
|
||||
help="include certain number of the latest releases; only applies if including releases",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--skip-prerelease",
|
||||
action="store_true",
|
||||
dest="skip_prerelease",
|
||||
help="skip prerelease and draft versions; only applies if including releases",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--assets",
|
||||
action="store_true",
|
||||
dest="include_assets",
|
||||
help="include assets alongside release information; only applies if including releases",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--attachments",
|
||||
action="store_true",
|
||||
dest="include_attachments",
|
||||
help="download user-attachments from issues and pull requests",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--throttle-limit",
|
||||
dest="throttle_limit",
|
||||
@@ -547,10 +584,15 @@ def retrieve_data_gen(args, template, query_args=None, single_request=False):
|
||||
page = 0
|
||||
|
||||
while True:
|
||||
page = page + 1
|
||||
if single_request:
|
||||
request_page, request_per_page = None, None
|
||||
else:
|
||||
page = page + 1
|
||||
request_page, request_per_page = page, per_page
|
||||
|
||||
request = _construct_request(
|
||||
per_page,
|
||||
page,
|
||||
request_per_page,
|
||||
request_page,
|
||||
query_args,
|
||||
template,
|
||||
auth,
|
||||
@@ -626,12 +668,12 @@ def retrieve_data_gen(args, template, query_args=None, single_request=False):
|
||||
raise Exception(", ".join(errors))
|
||||
|
||||
if len(errors) == 0:
|
||||
if type(response) == list:
|
||||
if type(response) is list:
|
||||
for resp in response:
|
||||
yield resp
|
||||
if len(response) < per_page:
|
||||
break
|
||||
elif type(response) == dict and single_request:
|
||||
elif type(response) is dict and single_request:
|
||||
yield response
|
||||
|
||||
if len(errors) > 0:
|
||||
@@ -659,7 +701,7 @@ def _get_response(request, auth, template):
|
||||
while True:
|
||||
should_continue = False
|
||||
try:
|
||||
r = urlopen(request)
|
||||
r = urlopen(request, context=https_ctx)
|
||||
except HTTPError as exc:
|
||||
errors, should_continue = _request_http_error(exc, auth, errors) # noqa
|
||||
r = exc
|
||||
@@ -684,14 +726,22 @@ def _get_response(request, auth, template):
|
||||
def _construct_request(
|
||||
per_page, page, query_args, template, auth, as_app=None, fine=False
|
||||
):
|
||||
querystring = urlencode(
|
||||
dict(
|
||||
list({"per_page": per_page, "page": page}.items())
|
||||
+ list(query_args.items())
|
||||
)
|
||||
)
|
||||
all_query_args = {}
|
||||
if per_page:
|
||||
all_query_args["per_page"] = per_page
|
||||
if page:
|
||||
all_query_args["page"] = page
|
||||
if query_args:
|
||||
all_query_args.update(query_args)
|
||||
|
||||
request = Request(template + "?" + querystring)
|
||||
request_url = template
|
||||
if all_query_args:
|
||||
querystring = urlencode(all_query_args)
|
||||
request_url = template + "?" + querystring
|
||||
else:
|
||||
querystring = ""
|
||||
|
||||
request = Request(request_url)
|
||||
if auth is not None:
|
||||
if not as_app:
|
||||
if fine:
|
||||
@@ -704,7 +754,11 @@ def _construct_request(
|
||||
request.add_header(
|
||||
"Accept", "application/vnd.github.machine-man-preview+json"
|
||||
)
|
||||
logger.info("Requesting {}?{}".format(template, querystring))
|
||||
|
||||
log_url = template
|
||||
if querystring:
|
||||
log_url += "?" + querystring
|
||||
logger.info("Requesting {}".format(log_url))
|
||||
return request
|
||||
|
||||
|
||||
@@ -766,18 +820,27 @@ class S3HTTPRedirectHandler(HTTPRedirectHandler):
|
||||
request = super(S3HTTPRedirectHandler, self).redirect_request(
|
||||
req, fp, code, msg, headers, newurl
|
||||
)
|
||||
del request.headers["Authorization"]
|
||||
# Only delete Authorization header if it exists (attachments may not have it)
|
||||
if "Authorization" in request.headers:
|
||||
del request.headers["Authorization"]
|
||||
return request
|
||||
|
||||
|
||||
def download_file(url, path, auth):
|
||||
def download_file(url, path, auth, as_app=False, fine=False):
|
||||
# Skip downloading release assets if they already exist on disk so we don't redownload on every sync
|
||||
if os.path.exists(path):
|
||||
return
|
||||
|
||||
request = Request(url)
|
||||
request = _construct_request(
|
||||
per_page=100,
|
||||
page=1,
|
||||
query_args={},
|
||||
template=url,
|
||||
auth=auth,
|
||||
as_app=as_app,
|
||||
fine=fine,
|
||||
)
|
||||
request.add_header("Accept", "application/octet-stream")
|
||||
request.add_header("Authorization", "Basic ".encode("ascii") + auth)
|
||||
opener = build_opener(S3HTTPRedirectHandler)
|
||||
|
||||
try:
|
||||
@@ -812,6 +875,594 @@ def download_file(url, path, auth):
|
||||
)
|
||||
|
||||
|
||||
def download_attachment_file(url, path, auth, as_app=False, fine=False):
|
||||
"""Download attachment file directly (not via GitHub API).
|
||||
|
||||
Similar to download_file() but for direct file URLs, not API endpoints.
|
||||
Attachment URLs (user-images, user-attachments) are direct downloads,
|
||||
not API endpoints, so we skip _construct_request() which adds API params.
|
||||
|
||||
URL Format Support & Authentication Requirements:
|
||||
|
||||
| URL Format | Auth Required | Notes |
|
||||
|----------------------------------------------|---------------|--------------------------|
|
||||
| github.com/user-attachments/assets/* | Private only | Modern format (2024+) |
|
||||
| github.com/user-attachments/files/* | Private only | Modern format (2024+) |
|
||||
| user-images.githubusercontent.com/* | No (public) | Legacy CDN, all eras |
|
||||
| private-user-images.githubusercontent.com/* | JWT in URL | Legacy private (5min) |
|
||||
| github.com/{owner}/{repo}/files/* | Repo filter | Old repo files |
|
||||
|
||||
- Modern user-attachments: Requires GitHub token auth for private repos
|
||||
- Legacy public CDN: No auth needed/accepted (returns 400 with auth header)
|
||||
- Legacy private CDN: Uses JWT token embedded in URL, no GitHub token needed
|
||||
- Repo files: Filtered to current repository only during extraction
|
||||
|
||||
Returns dict with metadata:
|
||||
- success: bool
|
||||
- http_status: int (200, 404, etc.)
|
||||
- content_type: str or None
|
||||
- original_filename: str or None (from Content-Disposition)
|
||||
- size_bytes: int or None
|
||||
- error: str or None
|
||||
"""
|
||||
import re
|
||||
from datetime import datetime, timezone
|
||||
|
||||
metadata = {
|
||||
"url": url,
|
||||
"success": False,
|
||||
"http_status": None,
|
||||
"content_type": None,
|
||||
"original_filename": None,
|
||||
"size_bytes": None,
|
||||
"downloaded_at": datetime.now(timezone.utc).isoformat(),
|
||||
"error": None,
|
||||
}
|
||||
|
||||
if os.path.exists(path):
|
||||
metadata["success"] = True
|
||||
metadata["http_status"] = 200 # Assume success if already exists
|
||||
metadata["size_bytes"] = os.path.getsize(path)
|
||||
return metadata
|
||||
|
||||
# Create simple request (no API query params)
|
||||
request = Request(url)
|
||||
request.add_header("Accept", "application/octet-stream")
|
||||
|
||||
# Add authentication header only for modern github.com/user-attachments URLs
|
||||
# Legacy CDN URLs (user-images.githubusercontent.com) are public and don't need/accept auth
|
||||
# Private CDN URLs (private-user-images) use JWT tokens embedded in the URL
|
||||
if auth is not None and "github.com/user-attachments/" in url:
|
||||
if not as_app:
|
||||
if fine:
|
||||
# Fine-grained token: plain token with "token " prefix
|
||||
request.add_header("Authorization", "token " + auth)
|
||||
else:
|
||||
# Classic token: base64-encoded with "Basic " prefix
|
||||
request.add_header("Authorization", "Basic ".encode("ascii") + auth)
|
||||
else:
|
||||
# App authentication
|
||||
auth = auth.encode("ascii")
|
||||
request.add_header("Authorization", "token ".encode("ascii") + auth)
|
||||
|
||||
# Reuse S3HTTPRedirectHandler from download_file()
|
||||
opener = build_opener(S3HTTPRedirectHandler)
|
||||
|
||||
temp_path = path + ".temp"
|
||||
|
||||
try:
|
||||
response = opener.open(request)
|
||||
metadata["http_status"] = response.getcode()
|
||||
|
||||
# Extract Content-Type
|
||||
content_type = response.headers.get("Content-Type", "").split(";")[0].strip()
|
||||
if content_type:
|
||||
metadata["content_type"] = content_type
|
||||
|
||||
# Extract original filename from Content-Disposition header
|
||||
# Format: attachment; filename=example.mov or attachment;filename="example.mov"
|
||||
content_disposition = response.headers.get("Content-Disposition", "")
|
||||
if content_disposition:
|
||||
# Match: filename=something or filename="something" or filename*=UTF-8''something
|
||||
match = re.search(r'filename\*?=["\']?([^"\';\r\n]+)', content_disposition)
|
||||
if match:
|
||||
original_filename = match.group(1).strip()
|
||||
# Handle RFC 5987 encoding: filename*=UTF-8''example.mov
|
||||
if "UTF-8''" in original_filename:
|
||||
original_filename = original_filename.split("UTF-8''")[1]
|
||||
metadata["original_filename"] = original_filename
|
||||
|
||||
# Fallback: Extract filename from final URL after redirects
|
||||
# This handles user-attachments/assets URLs which redirect to S3 with filename.ext
|
||||
if not metadata["original_filename"]:
|
||||
from urllib.parse import urlparse, unquote
|
||||
|
||||
final_url = response.geturl()
|
||||
parsed = urlparse(final_url)
|
||||
# Get filename from path (last component before query string)
|
||||
path_parts = parsed.path.split("/")
|
||||
if path_parts:
|
||||
# URL might be encoded, decode it
|
||||
filename_from_url = unquote(path_parts[-1])
|
||||
# Only use if it has an extension
|
||||
if "." in filename_from_url:
|
||||
metadata["original_filename"] = filename_from_url
|
||||
|
||||
# Download file to temporary location
|
||||
chunk_size = 16 * 1024
|
||||
bytes_downloaded = 0
|
||||
with open(temp_path, "wb") as f:
|
||||
while True:
|
||||
chunk = response.read(chunk_size)
|
||||
if not chunk:
|
||||
break
|
||||
f.write(chunk)
|
||||
bytes_downloaded += len(chunk)
|
||||
|
||||
# Atomic rename to final location
|
||||
os.rename(temp_path, path)
|
||||
|
||||
metadata["size_bytes"] = bytes_downloaded
|
||||
metadata["success"] = True
|
||||
|
||||
except HTTPError as exc:
|
||||
metadata["http_status"] = exc.code
|
||||
metadata["error"] = str(exc.reason)
|
||||
logger.warning(
|
||||
"Skipping download of attachment {0} due to HTTPError: {1}".format(
|
||||
url, exc.reason
|
||||
)
|
||||
)
|
||||
except URLError as e:
|
||||
metadata["error"] = str(e.reason)
|
||||
logger.warning(
|
||||
"Skipping download of attachment {0} due to URLError: {1}".format(
|
||||
url, e.reason
|
||||
)
|
||||
)
|
||||
except socket.error as e:
|
||||
metadata["error"] = str(e.strerror) if hasattr(e, "strerror") else str(e)
|
||||
logger.warning(
|
||||
"Skipping download of attachment {0} due to socket error: {1}".format(
|
||||
url, e.strerror if hasattr(e, "strerror") else str(e)
|
||||
)
|
||||
)
|
||||
except Exception as e:
|
||||
metadata["error"] = str(e)
|
||||
logger.warning(
|
||||
"Skipping download of attachment {0} due to error: {1}".format(url, str(e))
|
||||
)
|
||||
# Clean up temp file if it was partially created
|
||||
if os.path.exists(temp_path):
|
||||
try:
|
||||
os.remove(temp_path)
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
return metadata
|
||||
|
||||
|
||||
def extract_attachment_urls(item_data, issue_number=None, repository_full_name=None):
|
||||
"""Extract GitHub-hosted attachment URLs from issue/PR body and comments.
|
||||
|
||||
What qualifies as an attachment?
|
||||
There is no "attachment" concept in the GitHub API - it's a user behavior pattern
|
||||
we've identified through analysis of real-world repositories. We define attachments as:
|
||||
|
||||
- User-uploaded files hosted on GitHub's CDN domains
|
||||
- Found outside of code blocks (not examples/documentation)
|
||||
- Matches known GitHub attachment URL patterns
|
||||
|
||||
This intentionally captures bare URLs pasted by users, not just markdown/HTML syntax.
|
||||
Some false positives (example URLs in documentation) may occur - these fail gracefully
|
||||
with HTTP 404 and are logged in the manifest.
|
||||
|
||||
Supported URL formats:
|
||||
- Modern: github.com/user-attachments/{assets,files}/*
|
||||
- Legacy: user-images.githubusercontent.com/* (including private-user-images)
|
||||
- Repo files: github.com/{owner}/{repo}/files/* (filtered to current repo)
|
||||
- Repo assets: github.com/{owner}/{repo}/assets/* (filtered to current repo)
|
||||
|
||||
Repository filtering (repo files/assets only):
|
||||
- Direct match: URL is for current repository → included
|
||||
- Redirect match: URL redirects to current repository → included (handles renames/transfers)
|
||||
- Different repo: URL is for different repository → excluded
|
||||
|
||||
Code block filtering:
|
||||
- Removes fenced code blocks (```) and inline code (`) before extraction
|
||||
- Prevents extracting URLs from code examples and documentation snippets
|
||||
|
||||
Args:
|
||||
item_data: Issue or PR data dict
|
||||
issue_number: Issue/PR number for logging
|
||||
repository_full_name: Full repository name (owner/repo) for filtering repo-scoped URLs
|
||||
"""
|
||||
import re
|
||||
|
||||
urls = []
|
||||
|
||||
# Define all GitHub attachment patterns
|
||||
# Stop at markdown punctuation: whitespace, ), `, ", >, <
|
||||
# Trailing sentence punctuation (. ! ? , ; : ' ") is stripped in post-processing
|
||||
patterns = [
|
||||
r'https://github\.com/user-attachments/(?:assets|files)/[^\s\)`"<>]+', # Modern
|
||||
r'https://(?:private-)?user-images\.githubusercontent\.com/[^\s\)`"<>]+', # Legacy CDN
|
||||
]
|
||||
|
||||
# Add repo-scoped patterns (will be filtered by repository later)
|
||||
# These patterns match ANY repo, then we filter to current repo with redirect checking
|
||||
repo_files_pattern = r'https://github\.com/[^/]+/[^/]+/files/\d+/[^\s\)`"<>]+'
|
||||
repo_assets_pattern = r'https://github\.com/[^/]+/[^/]+/assets/\d+/[^\s\)`"<>]+'
|
||||
patterns.append(repo_files_pattern)
|
||||
patterns.append(repo_assets_pattern)
|
||||
|
||||
def clean_url(url):
|
||||
"""Remove trailing sentence and markdown punctuation that's not part of the URL."""
|
||||
return url.rstrip(".!?,;:'\")")
|
||||
|
||||
def remove_code_blocks(text):
|
||||
"""Remove markdown code blocks (fenced and inline) from text.
|
||||
|
||||
This prevents extracting URLs from code examples like:
|
||||
- Fenced code blocks: ```code```
|
||||
- Inline code: `code`
|
||||
"""
|
||||
# Remove fenced code blocks first (```...```)
|
||||
# DOTALL flag makes . match newlines
|
||||
text = re.sub(r"```.*?```", "", text, flags=re.DOTALL)
|
||||
|
||||
# Remove inline code (`...`)
|
||||
# Non-greedy match between backticks
|
||||
text = re.sub(r"`[^`]*`", "", text)
|
||||
|
||||
return text
|
||||
|
||||
def is_repo_scoped_url(url):
|
||||
"""Check if URL is a repo-scoped attachment (files or assets)."""
|
||||
return bool(
|
||||
re.match(r"https://github\.com/[^/]+/[^/]+/(?:files|assets)/\d+/", url)
|
||||
)
|
||||
|
||||
def check_redirect_to_current_repo(url, current_repo):
|
||||
"""Check if URL redirects to current repository.
|
||||
|
||||
Returns True if:
|
||||
- URL is already for current repo
|
||||
- URL redirects (301/302) to current repo (handles renames/transfers)
|
||||
|
||||
Returns False otherwise (URL is for a different repo).
|
||||
"""
|
||||
# Extract owner/repo from URL
|
||||
match = re.match(r"https://github\.com/([^/]+)/([^/]+)/", url)
|
||||
if not match:
|
||||
return False
|
||||
|
||||
url_owner, url_repo = match.groups()
|
||||
url_repo_full = f"{url_owner}/{url_repo}"
|
||||
|
||||
# Direct match - no need to check redirect
|
||||
if url_repo_full.lower() == current_repo.lower():
|
||||
return True
|
||||
|
||||
# Different repo - check if it redirects to current repo
|
||||
# This handles repository transfers and renames
|
||||
try:
|
||||
import urllib.request
|
||||
import urllib.error
|
||||
|
||||
# Make HEAD request with redirect following disabled
|
||||
# We need to manually handle redirects to see the Location header
|
||||
request = urllib.request.Request(url, method="HEAD")
|
||||
request.add_header("User-Agent", "python-github-backup")
|
||||
|
||||
# Create opener that does NOT follow redirects
|
||||
class NoRedirectHandler(urllib.request.HTTPRedirectHandler):
|
||||
def redirect_request(self, req, fp, code, msg, headers, newurl):
|
||||
return None # Don't follow redirects
|
||||
|
||||
opener = urllib.request.build_opener(NoRedirectHandler)
|
||||
|
||||
try:
|
||||
_ = opener.open(request, timeout=10)
|
||||
# Got 200 - URL works as-is but for different repo
|
||||
return False
|
||||
except urllib.error.HTTPError as e:
|
||||
# Check if it's a redirect (301, 302, 307, 308)
|
||||
if e.code in (301, 302, 307, 308):
|
||||
location = e.headers.get("Location", "")
|
||||
# Check if redirect points to current repo
|
||||
if location:
|
||||
redirect_match = re.match(
|
||||
r"https://github\.com/([^/]+)/([^/]+)/", location
|
||||
)
|
||||
if redirect_match:
|
||||
redirect_owner, redirect_repo = redirect_match.groups()
|
||||
redirect_repo_full = f"{redirect_owner}/{redirect_repo}"
|
||||
return redirect_repo_full.lower() == current_repo.lower()
|
||||
return False
|
||||
except Exception:
|
||||
# On any error (timeout, network issue, etc.), be conservative
|
||||
# and exclude the URL to avoid downloading from wrong repos
|
||||
return False
|
||||
|
||||
# Extract from body
|
||||
body = item_data.get("body") or ""
|
||||
# Remove code blocks before searching for URLs
|
||||
body_cleaned = remove_code_blocks(body)
|
||||
for pattern in patterns:
|
||||
found_urls = re.findall(pattern, body_cleaned)
|
||||
urls.extend([clean_url(url) for url in found_urls])
|
||||
|
||||
# Extract from issue comments
|
||||
if "comment_data" in item_data:
|
||||
for comment in item_data["comment_data"]:
|
||||
comment_body = comment.get("body") or ""
|
||||
# Remove code blocks before searching for URLs
|
||||
comment_cleaned = remove_code_blocks(comment_body)
|
||||
for pattern in patterns:
|
||||
found_urls = re.findall(pattern, comment_cleaned)
|
||||
urls.extend([clean_url(url) for url in found_urls])
|
||||
|
||||
# Extract from PR regular comments
|
||||
if "comment_regular_data" in item_data:
|
||||
for comment in item_data["comment_regular_data"]:
|
||||
comment_body = comment.get("body") or ""
|
||||
# Remove code blocks before searching for URLs
|
||||
comment_cleaned = remove_code_blocks(comment_body)
|
||||
for pattern in patterns:
|
||||
found_urls = re.findall(pattern, comment_cleaned)
|
||||
urls.extend([clean_url(url) for url in found_urls])
|
||||
|
||||
regex_urls = list(set(urls)) # dedupe
|
||||
|
||||
# Filter repo-scoped URLs to current repository only
|
||||
# This handles repository transfers/renames via redirect checking
|
||||
if repository_full_name:
|
||||
filtered_urls = []
|
||||
for url in regex_urls:
|
||||
if is_repo_scoped_url(url):
|
||||
# Check if URL belongs to current repo (or redirects to it)
|
||||
if check_redirect_to_current_repo(url, repository_full_name):
|
||||
filtered_urls.append(url)
|
||||
# else: skip URLs from other repositories
|
||||
else:
|
||||
# Non-repo-scoped URLs (user-attachments, CDN) - always include
|
||||
filtered_urls.append(url)
|
||||
regex_urls = filtered_urls
|
||||
|
||||
return regex_urls
|
||||
|
||||
|
||||
def get_attachment_filename(url):
|
||||
"""Get filename from attachment URL, handling all GitHub formats.
|
||||
|
||||
Formats:
|
||||
- github.com/user-attachments/assets/{uuid} → uuid (add extension later)
|
||||
- github.com/user-attachments/files/{id}/{filename} → filename
|
||||
- github.com/{owner}/{repo}/files/{id}/{filename} → filename
|
||||
- user-images.githubusercontent.com/{user}/{hash}.{ext} → hash.ext
|
||||
- private-user-images.githubusercontent.com/...?jwt=... → extract from path
|
||||
"""
|
||||
from urllib.parse import urlparse
|
||||
|
||||
parsed = urlparse(url)
|
||||
path_parts = parsed.path.split("/")
|
||||
|
||||
# Modern: /user-attachments/files/{id}/{filename}
|
||||
if "user-attachments/files" in parsed.path:
|
||||
return path_parts[-1]
|
||||
|
||||
# Modern: /user-attachments/assets/{uuid}
|
||||
elif "user-attachments/assets" in parsed.path:
|
||||
return path_parts[-1] # extension added later via detect_and_add_extension
|
||||
|
||||
# Repo files: /{owner}/{repo}/files/{id}/{filename}
|
||||
elif "/files/" in parsed.path and len(path_parts) >= 2:
|
||||
return path_parts[-1]
|
||||
|
||||
# Legacy: user-images.githubusercontent.com/{user}/{hash-with-ext}
|
||||
elif "githubusercontent.com" in parsed.netloc:
|
||||
return path_parts[-1] # Already has extension usually
|
||||
|
||||
# Fallback: use last path component
|
||||
return path_parts[-1] if path_parts[-1] else "unknown_attachment"
|
||||
|
||||
|
||||
def resolve_filename_collision(filepath):
|
||||
"""Resolve filename collisions using counter suffix pattern.
|
||||
|
||||
If filepath exists, returns a new filepath with counter suffix.
|
||||
Pattern: report.pdf → report_1.pdf → report_2.pdf
|
||||
|
||||
Also protects against manifest.json collisions by treating it as reserved.
|
||||
|
||||
Args:
|
||||
filepath: Full path to file that might exist
|
||||
|
||||
Returns:
|
||||
filepath that doesn't collide (may be same as input if no collision)
|
||||
"""
|
||||
directory = os.path.dirname(filepath)
|
||||
filename = os.path.basename(filepath)
|
||||
|
||||
# Protect manifest.json - it's a reserved filename
|
||||
if filename == "manifest.json":
|
||||
name, ext = os.path.splitext(filename)
|
||||
counter = 1
|
||||
while True:
|
||||
new_filename = f"{name}_{counter}{ext}"
|
||||
new_filepath = os.path.join(directory, new_filename)
|
||||
if not os.path.exists(new_filepath):
|
||||
return new_filepath
|
||||
counter += 1
|
||||
|
||||
if not os.path.exists(filepath):
|
||||
return filepath
|
||||
|
||||
name, ext = os.path.splitext(filename)
|
||||
|
||||
counter = 1
|
||||
while True:
|
||||
new_filename = f"{name}_{counter}{ext}"
|
||||
new_filepath = os.path.join(directory, new_filename)
|
||||
if not os.path.exists(new_filepath):
|
||||
return new_filepath
|
||||
counter += 1
|
||||
|
||||
|
||||
def download_attachments(
|
||||
args, item_cwd, item_data, number, repository, item_type="issue"
|
||||
):
|
||||
"""Download user-attachments from issue/PR body and comments with manifest.
|
||||
|
||||
Args:
|
||||
args: Command line arguments
|
||||
item_cwd: Working directory (issue_cwd or pulls_cwd)
|
||||
item_data: Issue or PR data dict
|
||||
number: Issue or PR number
|
||||
repository: Repository dict
|
||||
item_type: "issue" or "pull" for logging/manifest
|
||||
"""
|
||||
import json
|
||||
from datetime import datetime, timezone
|
||||
|
||||
item_type_display = "issue" if item_type == "issue" else "pull request"
|
||||
|
||||
urls = extract_attachment_urls(
|
||||
item_data, issue_number=number, repository_full_name=repository["full_name"]
|
||||
)
|
||||
if not urls:
|
||||
return
|
||||
|
||||
attachments_dir = os.path.join(item_cwd, "attachments", str(number))
|
||||
manifest_path = os.path.join(attachments_dir, "manifest.json")
|
||||
|
||||
# Load existing manifest if skip_existing is enabled
|
||||
existing_urls = set()
|
||||
existing_metadata = []
|
||||
if args.skip_existing and os.path.exists(manifest_path):
|
||||
try:
|
||||
with open(manifest_path, "r") as f:
|
||||
existing_manifest = json.load(f)
|
||||
all_metadata = existing_manifest.get("attachments", [])
|
||||
# Only skip URLs that were successfully downloaded OR failed with permanent errors
|
||||
# Retry transient failures (5xx, timeouts, network errors)
|
||||
for item in all_metadata:
|
||||
if item.get("success"):
|
||||
existing_urls.add(item["url"])
|
||||
else:
|
||||
# Check if this is a permanent failure (don't retry) or transient (retry)
|
||||
http_status = item.get("http_status")
|
||||
if http_status in [404, 410, 451]:
|
||||
# Permanent failures - don't retry
|
||||
existing_urls.add(item["url"])
|
||||
# Transient failures (5xx, auth errors, timeouts) will be retried
|
||||
existing_metadata = all_metadata
|
||||
except (json.JSONDecodeError, IOError):
|
||||
# If manifest is corrupted, re-download everything
|
||||
logger.warning(
|
||||
"Corrupted manifest for {0} #{1}, will re-download".format(
|
||||
item_type_display, number
|
||||
)
|
||||
)
|
||||
existing_urls = set()
|
||||
existing_metadata = []
|
||||
|
||||
# Filter to only new URLs
|
||||
new_urls = [url for url in urls if url not in existing_urls]
|
||||
|
||||
if not new_urls and existing_urls:
|
||||
logger.debug(
|
||||
"Skipping attachments for {0} #{1} (all {2} already downloaded)".format(
|
||||
item_type_display, number, len(urls)
|
||||
)
|
||||
)
|
||||
return
|
||||
|
||||
if new_urls:
|
||||
logger.info(
|
||||
"Downloading {0} new attachment(s) for {1} #{2}".format(
|
||||
len(new_urls), item_type_display, number
|
||||
)
|
||||
)
|
||||
|
||||
mkdir_p(item_cwd, attachments_dir)
|
||||
|
||||
# Collect metadata for manifest (start with existing)
|
||||
attachment_metadata_list = existing_metadata[:]
|
||||
|
||||
for url in new_urls:
|
||||
filename = get_attachment_filename(url)
|
||||
filepath = os.path.join(attachments_dir, filename)
|
||||
|
||||
# Check for collision BEFORE downloading
|
||||
filepath = resolve_filename_collision(filepath)
|
||||
|
||||
# Download and get metadata
|
||||
metadata = download_attachment_file(
|
||||
url,
|
||||
filepath,
|
||||
get_auth(args, encode=not args.as_app),
|
||||
as_app=args.as_app,
|
||||
fine=args.token_fine is not None,
|
||||
)
|
||||
|
||||
# If download succeeded but we got an extension from Content-Disposition,
|
||||
# we may need to rename the file to add the extension
|
||||
if metadata["success"] and metadata.get("original_filename"):
|
||||
original_ext = os.path.splitext(metadata["original_filename"])[1]
|
||||
current_ext = os.path.splitext(filepath)[1]
|
||||
|
||||
# Add extension if not present
|
||||
if original_ext and current_ext != original_ext:
|
||||
final_filepath = filepath + original_ext
|
||||
# Check for collision again with new extension
|
||||
final_filepath = resolve_filename_collision(final_filepath)
|
||||
logger.debug(
|
||||
"Adding extension {0} to {1}".format(original_ext, filepath)
|
||||
)
|
||||
|
||||
# Rename to add extension (already atomic from download)
|
||||
try:
|
||||
os.rename(filepath, final_filepath)
|
||||
metadata["saved_as"] = os.path.basename(final_filepath)
|
||||
except Exception as e:
|
||||
logger.warning(
|
||||
"Could not add extension to {0}: {1}".format(filepath, str(e))
|
||||
)
|
||||
metadata["saved_as"] = os.path.basename(filepath)
|
||||
else:
|
||||
metadata["saved_as"] = os.path.basename(filepath)
|
||||
elif metadata["success"]:
|
||||
metadata["saved_as"] = os.path.basename(filepath)
|
||||
else:
|
||||
metadata["saved_as"] = None
|
||||
|
||||
attachment_metadata_list.append(metadata)
|
||||
|
||||
# Write manifest
|
||||
if attachment_metadata_list:
|
||||
manifest = {
|
||||
"issue_number": number,
|
||||
"issue_type": item_type,
|
||||
"repository": f"{args.user}/{args.repository}"
|
||||
if hasattr(args, "repository") and args.repository
|
||||
else args.user,
|
||||
"manifest_updated_at": datetime.now(timezone.utc).isoformat(),
|
||||
"attachments": attachment_metadata_list,
|
||||
}
|
||||
|
||||
manifest_path = os.path.join(attachments_dir, "manifest.json")
|
||||
with open(manifest_path + ".temp", "w") as f:
|
||||
json.dump(manifest, f, indent=2)
|
||||
os.rename(manifest_path + ".temp", manifest_path) # Atomic write
|
||||
logger.debug(
|
||||
"Wrote manifest for {0} #{1}: {2} attachments".format(
|
||||
item_type_display, number, len(attachment_metadata_list)
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
def get_authenticated_user(args):
|
||||
template = "https://{0}/user".format(get_github_api_host(args))
|
||||
data = retrieve_data(args, template, single_request=True)
|
||||
@@ -847,9 +1498,13 @@ def retrieve_repositories(args, authenticated_user):
|
||||
)
|
||||
|
||||
if args.repository:
|
||||
if "/" in args.repository:
|
||||
repo_path = args.repository
|
||||
else:
|
||||
repo_path = "{0}/{1}".format(args.user, args.repository)
|
||||
single_request = True
|
||||
template = "https://{0}/repos/{1}/{2}".format(
|
||||
get_github_api_host(args), args.user, args.repository
|
||||
template = "https://{0}/repos/{1}".format(
|
||||
get_github_api_host(args), repo_path
|
||||
)
|
||||
|
||||
repos = retrieve_data(args, template, single_request=single_request)
|
||||
@@ -890,6 +1545,8 @@ def retrieve_repositories(args, authenticated_user):
|
||||
|
||||
|
||||
def filter_repositories(args, unfiltered_repositories):
|
||||
if args.repository:
|
||||
return unfiltered_repositories
|
||||
logger.info("Filtering repositories")
|
||||
|
||||
repositories = []
|
||||
@@ -919,11 +1576,15 @@ def filter_repositories(args, unfiltered_repositories):
|
||||
if r.get("language") and r.get("language").lower() in languages
|
||||
] # noqa
|
||||
if name_regex:
|
||||
repositories = [r for r in repositories if name_regex.match(r["name"])]
|
||||
repositories = [
|
||||
r for r in repositories if "name" not in r or name_regex.match(r["name"])
|
||||
]
|
||||
if args.skip_archived:
|
||||
repositories = [r for r in repositories if not r.get("archived")]
|
||||
if args.exclude:
|
||||
repositories = [r for r in repositories if r["name"] not in args.exclude]
|
||||
repositories = [
|
||||
r for r in repositories if "name" not in r or r["name"] not in args.exclude
|
||||
]
|
||||
|
||||
return repositories
|
||||
|
||||
@@ -1078,16 +1739,28 @@ def backup_issues(args, repo_cwd, repository, repos_template):
|
||||
comments_template = _issue_template + "/{0}/comments"
|
||||
events_template = _issue_template + "/{0}/events"
|
||||
for number, issue in list(issues.items()):
|
||||
issue_file = "{0}/{1}.json".format(issue_cwd, number)
|
||||
if args.incremental_by_files and os.path.isfile(issue_file):
|
||||
modified = os.path.getmtime(issue_file)
|
||||
modified = datetime.fromtimestamp(modified).strftime("%Y-%m-%dT%H:%M:%SZ")
|
||||
if modified > issue["updated_at"]:
|
||||
logger.info("Skipping issue {0} because it wasn't modified since last backup".format(number))
|
||||
continue
|
||||
|
||||
if args.include_issue_comments or args.include_everything:
|
||||
template = comments_template.format(number)
|
||||
issues[number]["comment_data"] = retrieve_data(args, template)
|
||||
if args.include_issue_events or args.include_everything:
|
||||
template = events_template.format(number)
|
||||
issues[number]["event_data"] = retrieve_data(args, template)
|
||||
if args.include_attachments:
|
||||
download_attachments(
|
||||
args, issue_cwd, issues[number], number, repository, item_type="issue"
|
||||
)
|
||||
|
||||
issue_file = "{0}/{1}.json".format(issue_cwd, number)
|
||||
with codecs.open(issue_file, "w", encoding="utf-8") as f:
|
||||
with codecs.open(issue_file + ".temp", "w", encoding="utf-8") as f:
|
||||
json_dump(issue, f)
|
||||
os.rename(issue_file + ".temp", issue_file) # Unlike json_dump, this is atomic
|
||||
|
||||
|
||||
def backup_pulls(args, repo_cwd, repository, repos_template):
|
||||
@@ -1140,6 +1813,13 @@ def backup_pulls(args, repo_cwd, repository, repos_template):
|
||||
comments_template = _pulls_template + "/{0}/comments"
|
||||
commits_template = _pulls_template + "/{0}/commits"
|
||||
for number, pull in list(pulls.items()):
|
||||
pull_file = "{0}/{1}.json".format(pulls_cwd, number)
|
||||
if args.incremental_by_files and os.path.isfile(pull_file):
|
||||
modified = os.path.getmtime(pull_file)
|
||||
modified = datetime.fromtimestamp(modified).strftime("%Y-%m-%dT%H:%M:%SZ")
|
||||
if modified > pull["updated_at"]:
|
||||
logger.info("Skipping pull request {0} because it wasn't modified since last backup".format(number))
|
||||
continue
|
||||
if args.include_pull_comments or args.include_everything:
|
||||
template = comments_regular_template.format(number)
|
||||
pulls[number]["comment_regular_data"] = retrieve_data(args, template)
|
||||
@@ -1148,10 +1828,14 @@ def backup_pulls(args, repo_cwd, repository, repos_template):
|
||||
if args.include_pull_commits or args.include_everything:
|
||||
template = commits_template.format(number)
|
||||
pulls[number]["commit_data"] = retrieve_data(args, template)
|
||||
if args.include_attachments:
|
||||
download_attachments(
|
||||
args, pulls_cwd, pulls[number], number, repository, item_type="pull"
|
||||
)
|
||||
|
||||
pull_file = "{0}/{1}.json".format(pulls_cwd, number)
|
||||
with codecs.open(pull_file, "w", encoding="utf-8") as f:
|
||||
with codecs.open(pull_file + ".temp", "w", encoding="utf-8") as f:
|
||||
json_dump(pull, f)
|
||||
os.rename(pull_file + ".temp", pull_file) # Unlike json_dump, this is atomic
|
||||
|
||||
|
||||
def backup_milestones(args, repo_cwd, repository, repos_template):
|
||||
@@ -1196,8 +1880,11 @@ def backup_hooks(args, repo_cwd, repository, repos_template):
|
||||
template = "{0}/{1}/hooks".format(repos_template, repository["full_name"])
|
||||
try:
|
||||
_backup_data(args, "hooks", template, output_file, hook_cwd)
|
||||
except SystemExit:
|
||||
logger.info("Unable to read hooks, skipping")
|
||||
except Exception as e:
|
||||
if "404" in str(e):
|
||||
logger.info("Unable to read hooks, skipping")
|
||||
else:
|
||||
raise e
|
||||
|
||||
|
||||
def backup_releases(args, repo_cwd, repository, repos_template, include_assets=False):
|
||||
@@ -1213,8 +1900,24 @@ def backup_releases(args, repo_cwd, repository, repos_template, include_assets=F
|
||||
release_template = "{0}/{1}/releases".format(repos_template, repository_fullname)
|
||||
releases = retrieve_data(args, release_template, query_args=query_args)
|
||||
|
||||
if args.skip_prerelease:
|
||||
releases = [r for r in releases if not r["prerelease"] and not r["draft"]]
|
||||
|
||||
if args.number_of_latest_releases and args.number_of_latest_releases < len(
|
||||
releases
|
||||
):
|
||||
releases.sort(
|
||||
key=lambda item: datetime.strptime(
|
||||
item["created_at"], "%Y-%m-%dT%H:%M:%SZ"
|
||||
),
|
||||
reverse=True,
|
||||
)
|
||||
releases = releases[: args.number_of_latest_releases]
|
||||
logger.info("Saving the latest {0} releases to disk".format(len(releases)))
|
||||
else:
|
||||
logger.info("Saving {0} releases to disk".format(len(releases)))
|
||||
|
||||
# for each release, store it
|
||||
logger.info("Saving {0} releases to disk".format(len(releases)))
|
||||
for release in releases:
|
||||
release_name = release["tag_name"]
|
||||
release_name_safe = release_name.replace("/", "__")
|
||||
@@ -1234,7 +1937,9 @@ def backup_releases(args, repo_cwd, repository, repos_template, include_assets=F
|
||||
download_file(
|
||||
asset["url"],
|
||||
os.path.join(release_assets_cwd, asset["name"]),
|
||||
get_auth(args),
|
||||
get_auth(args, encode=not args.as_app),
|
||||
as_app=args.as_app,
|
||||
fine=True if args.token_fine is not None else False,
|
||||
)
|
||||
|
||||
|
||||
@@ -1291,13 +1996,15 @@ def fetch_repository(
|
||||
git_command = ["git", "remote", "set-url", "origin", remote_url]
|
||||
logging_subprocess(git_command, cwd=local_dir)
|
||||
|
||||
if lfs_clone:
|
||||
git_command = ["git", "lfs", "fetch", "--all", "--prune"]
|
||||
else:
|
||||
git_command = ["git", "fetch", "--all", "--force", "--tags", "--prune"]
|
||||
git_command = ["git", "fetch", "--all", "--force", "--tags", "--prune"]
|
||||
if no_prune:
|
||||
git_command.pop()
|
||||
logging_subprocess(git_command, cwd=local_dir)
|
||||
if lfs_clone:
|
||||
git_command = ["git", "lfs", "fetch", "--all", "--prune"]
|
||||
if no_prune:
|
||||
git_command.pop()
|
||||
logging_subprocess(git_command, cwd=local_dir)
|
||||
else:
|
||||
logger.info(
|
||||
"Cloning {0} repository from {1} to {2}".format(
|
||||
|
||||
7
python-github-backup.code-workspace
Executable file
7
python-github-backup.code-workspace
Executable file
@@ -0,0 +1,7 @@
|
||||
{
|
||||
"folders": [
|
||||
{
|
||||
"path": "."
|
||||
}
|
||||
]
|
||||
}
|
||||
@@ -1,31 +1,39 @@
|
||||
bleach==6.0.0
|
||||
certifi==2023.7.22
|
||||
charset-normalizer==3.1.0
|
||||
autopep8==2.3.2
|
||||
black==25.9.0
|
||||
bleach==6.3.0
|
||||
certifi==2025.10.5
|
||||
charset-normalizer==3.4.4
|
||||
click==8.3.0
|
||||
colorama==0.4.6
|
||||
docutils==0.20.1
|
||||
flake8==6.0.0
|
||||
docutils==0.22.2
|
||||
flake8==7.3.0
|
||||
gitchangelog==3.0.4
|
||||
idna==3.4
|
||||
importlib-metadata==6.6.0
|
||||
jaraco.classes==3.2.3
|
||||
keyring==23.13.1
|
||||
markdown-it-py==2.2.0
|
||||
idna==3.11
|
||||
importlib-metadata==8.7.0
|
||||
jaraco.classes==3.4.0
|
||||
keyring==25.6.0
|
||||
markdown-it-py==4.0.0
|
||||
mccabe==0.7.0
|
||||
mdurl==0.1.2
|
||||
more-itertools==9.1.0
|
||||
pkginfo==1.9.6
|
||||
pycodestyle==2.10.0
|
||||
pyflakes==3.0.1
|
||||
Pygments==2.15.1
|
||||
readme-renderer==37.3
|
||||
requests==2.31.0
|
||||
more-itertools==10.8.0
|
||||
mypy-extensions==1.1.0
|
||||
packaging==25.0
|
||||
pathspec==0.12.1
|
||||
pkginfo==1.12.1.2
|
||||
platformdirs==4.5.0
|
||||
pycodestyle==2.14.0
|
||||
pyflakes==3.4.0
|
||||
Pygments==2.19.2
|
||||
readme-renderer==44.0
|
||||
requests==2.32.5
|
||||
requests-toolbelt==1.0.0
|
||||
restructuredtext-lint==1.4.0
|
||||
rfc3986==2.0.0
|
||||
rich==13.3.5
|
||||
six==1.16.0
|
||||
tqdm==4.65.0
|
||||
twine==4.0.2
|
||||
urllib3==2.0.7
|
||||
rich==14.2.0
|
||||
setuptools==80.9.0
|
||||
six==1.17.0
|
||||
tqdm==4.67.1
|
||||
twine==6.2.0
|
||||
urllib3==2.5.0
|
||||
webencodings==0.5.1
|
||||
zipp==3.15.0
|
||||
zipp==3.23.0
|
||||
|
||||
11
setup.py
11
setup.py
@@ -1,6 +1,7 @@
|
||||
#!/usr/bin/env python
|
||||
# -*- coding: utf-8 -*-
|
||||
import os
|
||||
|
||||
from github_backup import __version__
|
||||
|
||||
try:
|
||||
@@ -39,14 +40,16 @@ setup(
|
||||
"Development Status :: 5 - Production/Stable",
|
||||
"Topic :: System :: Archiving :: Backup",
|
||||
"License :: OSI Approved :: MIT License",
|
||||
"Programming Language :: Python :: 3.5",
|
||||
"Programming Language :: Python :: 3.6",
|
||||
"Programming Language :: Python :: 3.7",
|
||||
"Programming Language :: Python :: 3.8",
|
||||
"Programming Language :: Python :: 3.10",
|
||||
"Programming Language :: Python :: 3.11",
|
||||
"Programming Language :: Python :: 3.12",
|
||||
"Programming Language :: Python :: 3.13",
|
||||
"Programming Language :: Python :: 3.14",
|
||||
],
|
||||
description="backup a github user or organization",
|
||||
long_description=open_file("README.rst").read(),
|
||||
long_description_content_type="text/x-rst",
|
||||
install_requires=open_file("requirements.txt").readlines(),
|
||||
python_requires=">=3.10",
|
||||
zip_safe=True,
|
||||
)
|
||||
|
||||
Reference in New Issue
Block a user