mirror of
https://github.com/josegonzalez/python-github-backup.git
synced 2025-12-05 16:18:02 +01:00
Compare commits
207 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
8f9cf7ff89 | ||
|
|
899ab5fdc2 | ||
|
|
2a9d86a6bf | ||
|
|
4fd3ea9e3c | ||
|
|
041dc013f9 | ||
|
|
12802103c4 | ||
|
|
bf28b46954 | ||
|
|
ff2681e196 | ||
|
|
745b05a63f | ||
|
|
83ff0ae1dd | ||
|
|
6ad1959d43 | ||
|
|
5739ac0745 | ||
|
|
8b7512c8d8 | ||
|
|
995b7ede6c | ||
|
|
7840528fe2 | ||
|
|
6fb0d86977 | ||
|
|
9f6b401171 | ||
|
|
bf638f7aea | ||
|
|
c3855a94f1 | ||
|
|
c3f4bfde0d | ||
|
|
d3edef0622 | ||
|
|
9ef496efad | ||
|
|
42bfe6f79d | ||
|
|
5af522a348 | ||
|
|
6dfba7a783 | ||
|
|
7551829677 | ||
|
|
72d35a9b94 | ||
|
|
3eae9d78ed | ||
|
|
90ba839c7d | ||
|
|
1ec0820936 | ||
|
|
ca463e5cd4 | ||
|
|
1750d0eff1 | ||
|
|
e4d1c78993 | ||
|
|
7a9455db88 | ||
|
|
a98ff7f23d | ||
|
|
7b78f06a68 | ||
|
|
56db3ff0e8 | ||
|
|
5c9c20f6ee | ||
|
|
c8c585cbb5 | ||
|
|
e7880bb056 | ||
|
|
18e3bd574a | ||
|
|
1ed3d66777 | ||
|
|
a194fa48ce | ||
|
|
8f859be355 | ||
|
|
80e00d31d9 | ||
|
|
32202656ba | ||
|
|
875e31819a | ||
|
|
73dc75ab95 | ||
|
|
cd23dd1a16 | ||
|
|
d244de1952 | ||
|
|
4dae43c58e | ||
|
|
b018a91fb4 | ||
|
|
759ec58beb | ||
|
|
b43c998b65 | ||
|
|
38b4a2c106 | ||
|
|
6210ec3845 | ||
|
|
90396d2bdf | ||
|
|
aa35e883b0 | ||
|
|
963ed3e6f6 | ||
|
|
b710547fdc | ||
|
|
64b5667a16 | ||
|
|
b0c8cfe059 | ||
|
|
5bedaf825f | ||
|
|
9d28d9c2b0 | ||
|
|
eb756d665c | ||
|
|
3d5f61aa22 | ||
|
|
d6bf031bf7 | ||
|
|
85ab54e514 | ||
|
|
df4d751be2 | ||
|
|
03c660724d | ||
|
|
39848e650c | ||
|
|
12ac519e9c | ||
|
|
9e25473151 | ||
|
|
d3079bfb74 | ||
|
|
3b9ff1ac14 | ||
|
|
268a989b09 | ||
|
|
45a3b87892 | ||
|
|
1c465f4d35 | ||
|
|
3ad9b02b26 | ||
|
|
8bfad9b5b7 | ||
|
|
985d79c1bc | ||
|
|
7d1b7f20ef | ||
|
|
d3b67f884a | ||
|
|
65749bfde4 | ||
|
|
aeeb0eb9d7 | ||
|
|
f027760ac5 | ||
|
|
a9e48f8c4e | ||
|
|
338d5a956b | ||
|
|
5f07157c9b | ||
|
|
87f5b76c52 | ||
|
|
27eb009e34 | ||
|
|
82c1fc3086 | ||
|
|
a4f15b06d9 | ||
|
|
aa217774ff | ||
|
|
d820dd994d | ||
|
|
1bad563e3f | ||
|
|
175ac19be6 | ||
|
|
773ccecb8c | ||
|
|
e27b5a8ee3 | ||
|
|
fb8945fc09 | ||
|
|
7333458ee4 | ||
|
|
cf8b4c6b45 | ||
|
|
cabf8a770a | ||
|
|
7e0f7d1930 | ||
|
|
a9bdd6feb7 | ||
|
|
fe16d2421c | ||
|
|
16b5b304e7 | ||
|
|
8f58ef6229 | ||
|
|
51cf429dc2 | ||
|
|
53714612d4 | ||
|
|
f6e241833d | ||
|
|
17dc265385 | ||
|
|
704d31cbf7 | ||
|
|
db69f5a5e8 | ||
|
|
ba367a927c | ||
|
|
e8bf4257da | ||
|
|
8eab8d02ce | ||
|
|
e4bd19acea | ||
|
|
176cadfcc4 | ||
|
|
b49544270e | ||
|
|
27fdd358fb | ||
|
|
abe6192ee9 | ||
|
|
0a2d6ed2ca | ||
|
|
1a8eb7a906 | ||
|
|
40e6e34908 | ||
|
|
2885fc6822 | ||
|
|
434b4bf4a0 | ||
|
|
677f3d3287 | ||
|
|
9164f088b8 | ||
|
|
c1f9ea7b9b | ||
|
|
6d51d199c5 | ||
|
|
2b555dc964 | ||
|
|
b818e9b95f | ||
|
|
4157cab89f | ||
|
|
07fd47a596 | ||
|
|
5530a1badd | ||
|
|
90ac4999ea | ||
|
|
f4dfc57ba2 | ||
|
|
3d354beb24 | ||
|
|
552c1051e3 | ||
|
|
c92f5ef0f2 | ||
|
|
095b712a77 | ||
|
|
3a4aebbcfe | ||
|
|
e75021db80 | ||
|
|
0f34ecb77d | ||
|
|
20e4d385a5 | ||
|
|
a49322cf7d | ||
|
|
332c9b586a | ||
|
|
09bf9275d1 | ||
|
|
fcf21f7a2e | ||
|
|
36812a332b | ||
|
|
0e0197149e | ||
|
|
eb545c1c2f | ||
|
|
2e72797984 | ||
|
|
68fe29d1e1 | ||
|
|
3dc3691770 | ||
|
|
5b0608ce14 | ||
|
|
1ce8455860 | ||
|
|
dcb89a5c33 | ||
|
|
b0bfffde1a | ||
|
|
0f3aaa6fc2 | ||
|
|
c39ec9c549 | ||
|
|
e981ce3ff9 | ||
|
|
22d8f8e649 | ||
|
|
aaefac1a66 | ||
|
|
cb66375e1e | ||
|
|
24d7aa83df | ||
|
|
c8c71239c7 | ||
|
|
6ca8030648 | ||
|
|
53f6650f61 | ||
|
|
548a2ec405 | ||
|
|
871d69b99a | ||
|
|
ca3c4fa64b | ||
|
|
0846e7d8e5 | ||
|
|
503444359d | ||
|
|
04c70ce277 | ||
|
|
e774c70275 | ||
|
|
ba46cb87e8 | ||
|
|
883407f8ca | ||
|
|
aacb252e57 | ||
|
|
2623167110 | ||
|
|
f6ad296730 | ||
|
|
c8eef58d76 | ||
|
|
8eb154a540 | ||
|
|
2e9db92b68 | ||
|
|
09bbcfc7b1 | ||
|
|
4e14f5a2c6 | ||
|
|
b474e1654f | ||
|
|
71d70265cc | ||
|
|
2309b0cb76 | ||
|
|
1e14a4eecd | ||
|
|
56d3fd75bf | ||
|
|
c3e470b34e | ||
|
|
4948178a63 | ||
|
|
88de80c480 | ||
|
|
15eeff7879 | ||
|
|
4bb71db468 | ||
|
|
17af2cbc28 | ||
|
|
e0d66daadb | ||
|
|
1971c97b5d | ||
|
|
b1b3df692d | ||
|
|
8d7311efbf | ||
|
|
8449d6352d | ||
|
|
d8c228c83e | ||
|
|
4a134ae2ec | ||
|
|
5cb7c6ad2e | ||
|
|
f325daa875 |
75
.dockerignore
Normal file
75
.dockerignore
Normal file
@@ -0,0 +1,75 @@
|
||||
# Docker ignore file to reduce build context size
|
||||
|
||||
# Temp files
|
||||
*~
|
||||
~*
|
||||
.*~
|
||||
\#*
|
||||
.#*
|
||||
*#
|
||||
dist
|
||||
|
||||
# Build files
|
||||
build
|
||||
dist
|
||||
pkg
|
||||
*.egg
|
||||
*.egg-info
|
||||
|
||||
# Debian Files
|
||||
debian/files
|
||||
debian/python-github-backup*
|
||||
|
||||
# Sphinx build
|
||||
doc/_build
|
||||
|
||||
# Generated man page
|
||||
doc/github_backup.1
|
||||
|
||||
# Annoying macOS files
|
||||
.DS_Store
|
||||
._*
|
||||
|
||||
# IDE configuration files
|
||||
.vscode
|
||||
.atom
|
||||
.idea
|
||||
*.code-workspace
|
||||
|
||||
# RSA
|
||||
id_rsa
|
||||
id_rsa.pub
|
||||
|
||||
# Virtual env
|
||||
venv
|
||||
.venv
|
||||
|
||||
# Git
|
||||
.git
|
||||
.gitignore
|
||||
.gitchangelog.rc
|
||||
.github
|
||||
|
||||
# Documentation
|
||||
*.md
|
||||
!README.md
|
||||
|
||||
# Environment variables files
|
||||
.env
|
||||
.env.*
|
||||
!.env.example
|
||||
*.log
|
||||
|
||||
# Cache files
|
||||
**/__pycache__/
|
||||
*.py[cod]
|
||||
|
||||
# Docker files
|
||||
docker-compose.yml
|
||||
Dockerfile*
|
||||
|
||||
# Other files
|
||||
release
|
||||
*.tar
|
||||
*.zip
|
||||
*.gzip
|
||||
28
.github/ISSUE_TEMPLATE/bug.yaml
vendored
Normal file
28
.github/ISSUE_TEMPLATE/bug.yaml
vendored
Normal file
@@ -0,0 +1,28 @@
|
||||
---
|
||||
name: Bug Report
|
||||
description: File a bug report.
|
||||
body:
|
||||
- type: markdown
|
||||
attributes:
|
||||
value: |
|
||||
# Important notice regarding filed issues
|
||||
|
||||
This project already fills my needs, and as such I have no real reason to continue it's development. This project is otherwise provided as is, and no support is given.
|
||||
|
||||
If pull requests implementing bug fixes or enhancements are pushed, I am happy to review and merge them (time permitting).
|
||||
|
||||
If you wish to have a bug fixed, you have a few options:
|
||||
|
||||
- Fix it yourself and file a pull request.
|
||||
- File a bug and hope someone else fixes it for you.
|
||||
- Pay me to fix it (my rate is $200 an hour, minimum 1 hour, contact me via my [github email address](https://github.com/josegonzalez) if you want to go this route).
|
||||
|
||||
In all cases, feel free to file an issue, they may be of help to others in the future.
|
||||
- type: textarea
|
||||
id: what-happened
|
||||
attributes:
|
||||
label: What happened?
|
||||
description: Also tell us, what did you expect to happen?
|
||||
placeholder: Tell us what you see!
|
||||
validations:
|
||||
required: true
|
||||
27
.github/ISSUE_TEMPLATE/feature.yaml
vendored
Normal file
27
.github/ISSUE_TEMPLATE/feature.yaml
vendored
Normal file
@@ -0,0 +1,27 @@
|
||||
---
|
||||
name: Feature Request
|
||||
description: File a feature request.
|
||||
body:
|
||||
- type: markdown
|
||||
attributes:
|
||||
value: |
|
||||
# Important notice regarding filed issues
|
||||
|
||||
This project already fills my needs, and as such I have no real reason to continue it's development. This project is otherwise provided as is, and no support is given.
|
||||
|
||||
If pull requests implementing bug fixes or enhancements are pushed, I am happy to review and merge them (time permitting).
|
||||
|
||||
If you wish to have a feature implemented, you have a few options:
|
||||
|
||||
- Implement it yourself and file a pull request.
|
||||
- File an issue and hope someone else implements it for you.
|
||||
- Pay me to implement it (my rate is $200 an hour, minimum 1 hour, contact me via my [github email address](https://github.com/josegonzalez) if you want to go this route).
|
||||
|
||||
In all cases, feel free to file an issue, they may be of help to others in the future.
|
||||
- type: textarea
|
||||
id: what-would-you-like-to-happen
|
||||
attributes:
|
||||
label: What would you like to happen?
|
||||
description: Please describe in detail how the new functionality should work as well as any issues with existing functionality.
|
||||
validations:
|
||||
required: true
|
||||
8
.github/workflows/automatic-release.yml
vendored
8
.github/workflows/automatic-release.yml
vendored
@@ -15,10 +15,10 @@ on:
|
||||
jobs:
|
||||
release:
|
||||
name: Release
|
||||
runs-on: ubuntu-22.04
|
||||
runs-on: ubuntu-24.04
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
uses: actions/checkout@v6
|
||||
with:
|
||||
fetch-depth: 0
|
||||
ssh-key: ${{ secrets.DEPLOY_PRIVATE_KEY }}
|
||||
@@ -27,9 +27,9 @@ jobs:
|
||||
git config --local user.email "action@github.com"
|
||||
git config --local user.name "GitHub Action"
|
||||
- name: Setup Python
|
||||
uses: actions/setup-python@v5
|
||||
uses: actions/setup-python@v6
|
||||
with:
|
||||
python-version: '3.8'
|
||||
python-version: '3.12'
|
||||
- name: Install prerequisites
|
||||
run: pip install -r release-requirements.txt
|
||||
- name: Execute release
|
||||
|
||||
4
.github/workflows/docker.yml
vendored
4
.github/workflows/docker.yml
vendored
@@ -38,7 +38,7 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
uses: actions/checkout@v6
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
@@ -68,7 +68,7 @@ jobs:
|
||||
type=raw,value=latest,enable=${{ github.ref == format('refs/heads/{0}', 'main') }}
|
||||
|
||||
- name: Build and push Docker image
|
||||
uses: docker/build-push-action@v5
|
||||
uses: docker/build-push-action@v6
|
||||
with:
|
||||
context: .
|
||||
push: true
|
||||
|
||||
11
.github/workflows/lint.yml
vendored
11
.github/workflows/lint.yml
vendored
@@ -14,17 +14,20 @@ on:
|
||||
jobs:
|
||||
lint:
|
||||
name: lint
|
||||
runs-on: ubuntu-22.04
|
||||
runs-on: ubuntu-24.04
|
||||
strategy:
|
||||
matrix:
|
||||
python-version: ["3.10", "3.11", "3.12", "3.13", "3.14"]
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
uses: actions/checkout@v6
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: Setup Python
|
||||
uses: actions/setup-python@v5
|
||||
uses: actions/setup-python@v6
|
||||
with:
|
||||
python-version: "3.8"
|
||||
python-version: ${{ matrix.python-version }}
|
||||
cache: "pip"
|
||||
- run: pip install -r release-requirements.txt && pip install wheel
|
||||
- run: flake8 --ignore=E501,E203,W503
|
||||
|
||||
2
.github/workflows/tagged-release.yml
vendored
2
.github/workflows/tagged-release.yml
vendored
@@ -10,7 +10,7 @@ on:
|
||||
jobs:
|
||||
tagged-release:
|
||||
name: tagged-release
|
||||
runs-on: ubuntu-20.04
|
||||
runs-on: ubuntu-24.04
|
||||
|
||||
steps:
|
||||
- uses: "marvinpinto/action-automatic-releases@v1.2.1"
|
||||
|
||||
33
.github/workflows/test.yml
vendored
Normal file
33
.github/workflows/test.yml
vendored
Normal file
@@ -0,0 +1,33 @@
|
||||
---
|
||||
name: "test"
|
||||
|
||||
# yamllint disable-line rule:truthy
|
||||
on:
|
||||
pull_request:
|
||||
branches:
|
||||
- "*"
|
||||
push:
|
||||
branches:
|
||||
- "main"
|
||||
- "master"
|
||||
|
||||
jobs:
|
||||
test:
|
||||
name: test
|
||||
runs-on: ubuntu-24.04
|
||||
strategy:
|
||||
matrix:
|
||||
python-version: ["3.10", "3.11", "3.12", "3.13", "3.14"]
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v6
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: Setup Python
|
||||
uses: actions/setup-python@v6
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
cache: "pip"
|
||||
- run: pip install -r release-requirements.txt
|
||||
- run: pytest tests/ -v
|
||||
4
.gitignore
vendored
4
.gitignore
vendored
@@ -1,4 +1,4 @@
|
||||
*.py[oc]
|
||||
*.py[cod]
|
||||
|
||||
# Temp files
|
||||
*~
|
||||
@@ -33,6 +33,7 @@ doc/github_backup.1
|
||||
# IDE configuration files
|
||||
.vscode
|
||||
.atom
|
||||
.idea
|
||||
|
||||
README
|
||||
|
||||
@@ -42,3 +43,4 @@ id_rsa.pub
|
||||
|
||||
# Virtual env
|
||||
venv
|
||||
.venv
|
||||
|
||||
1912
CHANGES.rst
1912
CHANGES.rst
File diff suppressed because it is too large
Load Diff
42
Dockerfile
42
Dockerfile
@@ -1,16 +1,38 @@
|
||||
FROM python:3.9.18-slim
|
||||
FROM python:3.12-alpine3.22 AS builder
|
||||
|
||||
RUN --mount=type=cache,target=/var/cache/apt \
|
||||
apt-get update && apt-get install -y git git-lfs
|
||||
RUN pip install --no-cache-dir --upgrade pip \
|
||||
&& pip install --no-cache-dir uv
|
||||
|
||||
WORKDIR /usr/src/app
|
||||
WORKDIR /app
|
||||
|
||||
COPY release-requirements.txt .
|
||||
RUN --mount=type=cache,target=/root/.cache/pip \
|
||||
pip install -r release-requirements.txt
|
||||
RUN --mount=type=cache,target=/root/.cache/uv \
|
||||
--mount=type=bind,source=requirements.txt,target=requirements.txt \
|
||||
--mount=type=bind,source=release-requirements.txt,target=release-requirements.txt \
|
||||
uv venv \
|
||||
&& uv pip install -r release-requirements.txt
|
||||
|
||||
COPY . .
|
||||
RUN --mount=type=cache,target=/root/.cache/pip \
|
||||
pip install .
|
||||
|
||||
ENTRYPOINT [ "github-backup" ]
|
||||
RUN --mount=type=cache,target=/root/.cache/uv \
|
||||
uv pip install .
|
||||
|
||||
|
||||
FROM python:3.12-alpine3.22
|
||||
ENV PYTHONUNBUFFERED=1
|
||||
|
||||
RUN apk add --no-cache \
|
||||
ca-certificates \
|
||||
git \
|
||||
git-lfs \
|
||||
&& addgroup -g 1000 appuser \
|
||||
&& adduser -D -u 1000 -G appuser appuser
|
||||
|
||||
COPY --from=builder --chown=appuser:appuser /app /app
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
USER appuser
|
||||
|
||||
ENV PATH="/app/.venv/bin:$PATH"
|
||||
|
||||
ENTRYPOINT ["github-backup"]
|
||||
|
||||
@@ -1,13 +0,0 @@
|
||||
# Important notice regarding filed issues
|
||||
|
||||
This project already fills my needs, and as such I have no real reason to continue it's development. This project is otherwise provided as is, and no support is given.
|
||||
|
||||
If pull requests implementing bug fixes or enhancements are pushed, I am happy to review and merge them (time permitting).
|
||||
|
||||
If you wish to have a bug fixed, you have a few options:
|
||||
|
||||
- Fix it yourself and file a pull request.
|
||||
- File a bug and hope someone else fixes it for you.
|
||||
- Pay me to fix it (my rate is $200 an hour, minimum 1 hour, contact me via my [github email address](https://github.com/josegonzalez) if you want to go this route).
|
||||
|
||||
In all cases, feel free to file an issue, they may be of help to others in the future.
|
||||
77
README.rst
77
README.rst
@@ -9,8 +9,8 @@ The package can be used to backup an *entire* `Github <https://github.com/>`_ or
|
||||
Requirements
|
||||
============
|
||||
|
||||
- Python 3.10 or higher
|
||||
- GIT 1.9+
|
||||
- Python
|
||||
|
||||
Installation
|
||||
============
|
||||
@@ -50,7 +50,7 @@ CLI Help output::
|
||||
[--keychain-name OSX_KEYCHAIN_ITEM_NAME]
|
||||
[--keychain-account OSX_KEYCHAIN_ITEM_ACCOUNT]
|
||||
[--releases] [--latest-releases NUMBER_OF_LATEST_RELEASES]
|
||||
[--skip-prerelease] [--assets]
|
||||
[--skip-prerelease] [--assets] [--attachments]
|
||||
[--exclude [REPOSITORY [REPOSITORY ...]]
|
||||
[--throttle-limit THROTTLE_LIMIT] [--throttle-pause THROTTLE_PAUSE]
|
||||
USER
|
||||
@@ -80,6 +80,7 @@ CLI Help output::
|
||||
log level to use (default: info, possible levels:
|
||||
debug, info, warning, error, critical)
|
||||
-i, --incremental incremental backup
|
||||
--incremental-by-files incremental backup using modified time of files
|
||||
--starred include JSON output of starred repositories in backup
|
||||
--all-starred include starred repositories in backup [*]
|
||||
--watched include JSON output of watched repositories in backup
|
||||
@@ -132,6 +133,9 @@ CLI Help output::
|
||||
--skip-prerelease skip prerelease and draft versions; only applies if including releases
|
||||
--assets include assets alongside release information; only
|
||||
applies if including releases
|
||||
--attachments download user-attachments from issues and pull requests
|
||||
to issues/attachments/{issue_number}/ and
|
||||
pulls/attachments/{pull_number}/ directories
|
||||
--exclude [REPOSITORY [REPOSITORY ...]]
|
||||
names of repositories to exclude from backup.
|
||||
--throttle-limit THROTTLE_LIMIT
|
||||
@@ -167,7 +171,7 @@ Customise the permissions for your use case, but for a personal account full bac
|
||||
|
||||
**User permissions**: Read access to followers, starring, and watching.
|
||||
|
||||
**Repository permissions**: Read access to code, commit statuses, issues, metadata, pages, pull requests, and repository hooks.
|
||||
**Repository permissions**: Read access to contents, issues, metadata, pull requests, and webhooks.
|
||||
|
||||
|
||||
Prefer SSH
|
||||
@@ -212,6 +216,29 @@ When you use the ``--lfs`` option, you will need to make sure you have Git LFS i
|
||||
Instructions on how to do this can be found on https://git-lfs.github.com.
|
||||
|
||||
|
||||
About Attachments
|
||||
-----------------
|
||||
|
||||
When you use the ``--attachments`` option with ``--issues`` or ``--pulls``, the tool will download user-uploaded attachments (images, videos, documents, etc.) from issue and pull request descriptions and comments. In some circumstances attachments contain valuable data related to the topic, and without their backup important information or context might be lost inadvertently.
|
||||
|
||||
Attachments are saved to ``issues/attachments/{issue_number}/`` and ``pulls/attachments/{pull_number}/`` directories, where ``{issue_number}`` is the GitHub issue number (e.g., issue #123 saves to ``issues/attachments/123/``). Each attachment directory contains:
|
||||
|
||||
- The downloaded attachment files (named by their GitHub identifier with appropriate file extensions)
|
||||
- If multiple attachments have the same filename, conflicts are resolved with numeric suffixes (e.g., ``report.pdf``, ``report_1.pdf``, ``report_2.pdf``)
|
||||
- A ``manifest.json`` file documenting all downloads, including URLs, file metadata, and download status
|
||||
|
||||
The tool automatically extracts file extensions from HTTP headers to ensure files can be more easily opened by your operating system.
|
||||
|
||||
**Supported URL formats:**
|
||||
|
||||
- Modern: ``github.com/user-attachments/{assets,files}/*``
|
||||
- Legacy: ``user-images.githubusercontent.com/*`` and ``private-user-images.githubusercontent.com/*``
|
||||
- Repo files: ``github.com/{owner}/{repo}/files/*`` (filtered to current repository)
|
||||
- Repo assets: ``github.com/{owner}/{repo}/assets/*`` (filtered to current repository)
|
||||
|
||||
**Repository filtering** for repo files/assets handles renamed and transferred repositories gracefully. URLs are included if they either match the current repository name directly, or redirect to it (e.g., ``willmcgugan/rich`` redirects to ``Textualize/rich`` after transfer).
|
||||
|
||||
|
||||
Run in Docker container
|
||||
-----------------------
|
||||
|
||||
@@ -225,7 +252,7 @@ Gotchas / Known-issues
|
||||
All is not everything
|
||||
---------------------
|
||||
|
||||
The ``--all`` argument does not include; cloning private repos (``-P, --private``), cloning forks (``-F, --fork``) cloning starred repositories (``--all-starred``), ``--pull-details``, cloning LFS repositories (``--lfs``), cloning gists (``--starred-gists``) or cloning starred gist repos (``--starred-gists``). See examples for more.
|
||||
The ``--all`` argument does not include: cloning private repos (``-P, --private``), cloning forks (``-F, --fork``), cloning starred repositories (``--all-starred``), ``--pull-details``, cloning LFS repositories (``--lfs``), cloning gists (``--gists``) or cloning starred gist repos (``--starred-gists``). See examples for more.
|
||||
|
||||
Cloning all starred size
|
||||
------------------------
|
||||
@@ -239,6 +266,12 @@ Using (``-i, --incremental``) will only request new data from the API **since th
|
||||
|
||||
This means any blocking errors on previous runs can cause a large amount of missing data in backups.
|
||||
|
||||
Using (``--incremental-by-files``) will request new data from the API **based on when the file was modified on filesystem**. e.g. if you modify the file yourself you may miss something.
|
||||
|
||||
Still saver than the previous version.
|
||||
|
||||
Specifically, issues and pull requests are handled like this.
|
||||
|
||||
Known blocking errors
|
||||
---------------------
|
||||
|
||||
@@ -254,12 +287,6 @@ It's therefore recommended to only use the incremental argument if the output/re
|
||||
|
||||
This is due to needing the correct permission for ``--hooks`` on public repos.
|
||||
|
||||
2. **Releases blocking**
|
||||
|
||||
A known ``--releases`` (required for ``--assets``) error will sometimes block the backup.
|
||||
|
||||
If you're backing up a lot of repositories with releases e.g. an organisation or ``--all-starred``. You may need to remove ``--releases`` (and therefore ``--assets``) to complete a backup. Documented in `issue 209 <https://github.com/josegonzalez/python-github-backup/issues/209>`_.
|
||||
|
||||
|
||||
"bare" is actually "mirror"
|
||||
---------------------------
|
||||
@@ -274,6 +301,8 @@ Starred gists vs starred repo behaviour
|
||||
|
||||
The starred normal repo cloning (``--all-starred``) argument stores starred repos separately to the users own repositories. However, using ``--starred-gists`` will store starred gists within the same directory as the users own gists ``--gists``. Also, all gist repo directory names are IDs not the gist's name.
|
||||
|
||||
Note: ``--starred-gists`` only retrieves starred gists for the authenticated user, not the target user, due to a GitHub API limitation.
|
||||
|
||||
|
||||
Skip existing on incomplete backups
|
||||
-----------------------------------
|
||||
@@ -281,6 +310,25 @@ Skip existing on incomplete backups
|
||||
The ``--skip-existing`` argument will skip a backup if the directory already exists, even if the backup in that directory failed (perhaps due to a blocking error). This may result in unexpected missing data in a regular backup.
|
||||
|
||||
|
||||
Updates use fetch, not pull
|
||||
---------------------------
|
||||
|
||||
When updating an existing repository backup, ``github-backup`` uses ``git fetch`` rather than ``git pull``. This is intentional - a backup tool should reliably download data without risk of failure. Using ``git pull`` would require handling merge conflicts, which adds complexity and could cause backups to fail unexpectedly.
|
||||
|
||||
With fetch, **all branches and commits are downloaded** safely into remote-tracking branches. The working directory files won't change, but your backup is complete.
|
||||
|
||||
If you look at files directly (e.g., ``cat README.md``), you'll see the old content. The new data is in the remote-tracking branches (confusingly named "remote" but stored locally). To view or use the latest files::
|
||||
|
||||
git show origin/main:README.md # view a file
|
||||
git merge origin/main # update working directory
|
||||
|
||||
All branches are backed up as remote refs (``origin/main``, ``origin/feature-branch``, etc.).
|
||||
|
||||
If you want to browse files directly without merging, consider using ``--bare`` which skips the working directory entirely - the backup is just the git data.
|
||||
|
||||
See `#269 <https://github.com/josegonzalez/python-github-backup/issues/269>`_ for more discussion.
|
||||
|
||||
|
||||
Github Backup Examples
|
||||
======================
|
||||
|
||||
@@ -302,7 +350,7 @@ Quietly and incrementally backup useful Github user data (public and private rep
|
||||
export FINE_ACCESS_TOKEN=SOME-GITHUB-TOKEN
|
||||
GH_USER=YOUR-GITHUB-USER
|
||||
|
||||
github-backup -f $FINE_ACCESS_TOKEN --prefer-ssh -o ~/github-backup/ -l error -P -i --all-starred --starred --watched --followers --following --issues --issue-comments --issue-events --pulls --pull-comments --pull-commits --labels --milestones --repositories --wikis --releases --assets --pull-details --gists --starred-gists $GH_USER
|
||||
github-backup -f $FINE_ACCESS_TOKEN --prefer-ssh -o ~/github-backup/ -l error -P -i --all-starred --starred --watched --followers --following --issues --issue-comments --issue-events --pulls --pull-comments --pull-commits --labels --milestones --repositories --wikis --releases --assets --attachments --pull-details --gists --starred-gists $GH_USER
|
||||
|
||||
Debug an error/block or incomplete backup into a temporary directory. Omit "incremental" to fill a previous incomplete backup. ::
|
||||
|
||||
@@ -330,7 +378,12 @@ A huge thanks to all the contibuters!
|
||||
Testing
|
||||
-------
|
||||
|
||||
This project currently contains no unit tests. To run linting::
|
||||
To run the test suite::
|
||||
|
||||
pip install pytest
|
||||
pytest
|
||||
|
||||
To run linting::
|
||||
|
||||
pip install flake8
|
||||
flake8 --ignore=E501
|
||||
|
||||
@@ -16,12 +16,23 @@ from github_backup.github_backup import (
|
||||
retrieve_repositories,
|
||||
)
|
||||
|
||||
logging.basicConfig(
|
||||
format="%(asctime)s.%(msecs)03d: %(message)s",
|
||||
# INFO and DEBUG go to stdout, WARNING and above go to stderr
|
||||
log_format = logging.Formatter(
|
||||
fmt="%(asctime)s.%(msecs)03d: %(message)s",
|
||||
datefmt="%Y-%m-%dT%H:%M:%S",
|
||||
level=logging.INFO,
|
||||
)
|
||||
|
||||
stdout_handler = logging.StreamHandler(sys.stdout)
|
||||
stdout_handler.setLevel(logging.DEBUG)
|
||||
stdout_handler.addFilter(lambda r: r.levelno < logging.WARNING)
|
||||
stdout_handler.setFormatter(log_format)
|
||||
|
||||
stderr_handler = logging.StreamHandler(sys.stderr)
|
||||
stderr_handler.setLevel(logging.WARNING)
|
||||
stderr_handler.setFormatter(log_format)
|
||||
|
||||
logging.basicConfig(level=logging.INFO, handlers=[stdout_handler, stderr_handler])
|
||||
|
||||
|
||||
def main():
|
||||
args = parse_args()
|
||||
|
||||
@@ -1 +1 @@
|
||||
__version__ = "0.45.2"
|
||||
__version__ = "0.54.0"
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
6
pytest.ini
Normal file
6
pytest.ini
Normal file
@@ -0,0 +1,6 @@
|
||||
[pytest]
|
||||
testpaths = tests
|
||||
python_files = test_*.py
|
||||
python_classes = Test*
|
||||
python_functions = test_*
|
||||
addopts = -v
|
||||
@@ -1,38 +1,40 @@
|
||||
autopep8==2.1.0
|
||||
black==24.3.0
|
||||
bleach==6.1.0
|
||||
certifi==2024.2.2
|
||||
charset-normalizer==3.3.2
|
||||
click==8.1.7
|
||||
autopep8==2.3.2
|
||||
black==25.11.0
|
||||
bleach==6.3.0
|
||||
certifi==2025.11.12
|
||||
charset-normalizer==3.4.4
|
||||
click==8.3.1
|
||||
colorama==0.4.6
|
||||
docutils==0.20.1
|
||||
flake8==7.0.0
|
||||
docutils==0.22.3
|
||||
flake8==7.3.0
|
||||
gitchangelog==3.0.4
|
||||
idna==3.7
|
||||
importlib-metadata==7.1.0
|
||||
pytest==9.0.1
|
||||
idna==3.11
|
||||
importlib-metadata==8.7.0
|
||||
jaraco.classes==3.4.0
|
||||
keyring==25.1.0
|
||||
markdown-it-py==3.0.0
|
||||
keyring==25.7.0
|
||||
markdown-it-py==4.0.0
|
||||
mccabe==0.7.0
|
||||
mdurl==0.1.2
|
||||
more-itertools==10.2.0
|
||||
mypy-extensions==1.0.0
|
||||
packaging==24.0
|
||||
more-itertools==10.8.0
|
||||
mypy-extensions==1.1.0
|
||||
packaging==25.0
|
||||
pathspec==0.12.1
|
||||
pkginfo==1.10.0
|
||||
platformdirs==4.2.0
|
||||
pycodestyle==2.11.1
|
||||
pyflakes==3.2.0
|
||||
Pygments==2.17.2
|
||||
readme-renderer==43.0
|
||||
requests==2.31.0
|
||||
pkginfo==1.12.1.2
|
||||
platformdirs==4.5.0
|
||||
pycodestyle==2.14.0
|
||||
pyflakes==3.4.0
|
||||
Pygments==2.19.2
|
||||
readme-renderer==44.0
|
||||
requests==2.32.5
|
||||
requests-toolbelt==1.0.0
|
||||
restructuredtext-lint==1.4.0
|
||||
restructuredtext-lint==2.0.2
|
||||
rfc3986==2.0.0
|
||||
rich==13.7.1
|
||||
six==1.16.0
|
||||
tqdm==4.66.2
|
||||
twine==5.0.0
|
||||
urllib3==2.2.1
|
||||
rich==14.2.0
|
||||
setuptools==80.9.0
|
||||
six==1.17.0
|
||||
tqdm==4.67.1
|
||||
twine==6.2.0
|
||||
urllib3==2.5.0
|
||||
webencodings==0.5.1
|
||||
zipp==3.18.1
|
||||
zipp==3.23.0
|
||||
|
||||
@@ -1 +0,0 @@
|
||||
|
||||
|
||||
10
setup.py
10
setup.py
@@ -40,14 +40,16 @@ setup(
|
||||
"Development Status :: 5 - Production/Stable",
|
||||
"Topic :: System :: Archiving :: Backup",
|
||||
"License :: OSI Approved :: MIT License",
|
||||
"Programming Language :: Python :: 3.5",
|
||||
"Programming Language :: Python :: 3.6",
|
||||
"Programming Language :: Python :: 3.7",
|
||||
"Programming Language :: Python :: 3.8",
|
||||
"Programming Language :: Python :: 3.10",
|
||||
"Programming Language :: Python :: 3.11",
|
||||
"Programming Language :: Python :: 3.12",
|
||||
"Programming Language :: Python :: 3.13",
|
||||
"Programming Language :: Python :: 3.14",
|
||||
],
|
||||
description="backup a github user or organization",
|
||||
long_description=open_file("README.rst").read(),
|
||||
long_description_content_type="text/x-rst",
|
||||
install_requires=open_file("requirements.txt").readlines(),
|
||||
python_requires=">=3.10",
|
||||
zip_safe=True,
|
||||
)
|
||||
|
||||
1
tests/__init__.py
Normal file
1
tests/__init__.py
Normal file
@@ -0,0 +1 @@
|
||||
"""Tests for python-github-backup."""
|
||||
353
tests/test_attachments.py
Normal file
353
tests/test_attachments.py
Normal file
@@ -0,0 +1,353 @@
|
||||
"""Behavioral tests for attachment functionality."""
|
||||
|
||||
import json
|
||||
import os
|
||||
import tempfile
|
||||
from pathlib import Path
|
||||
from unittest.mock import Mock
|
||||
|
||||
import pytest
|
||||
|
||||
from github_backup import github_backup
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def attachment_test_setup(tmp_path):
|
||||
"""Fixture providing setup and helper for attachment download tests."""
|
||||
from unittest.mock import patch
|
||||
|
||||
issue_cwd = tmp_path / "issues"
|
||||
issue_cwd.mkdir()
|
||||
|
||||
# Mock args
|
||||
args = Mock()
|
||||
args.as_app = False
|
||||
args.token_fine = None
|
||||
args.token_classic = None
|
||||
args.username = None
|
||||
args.password = None
|
||||
args.osx_keychain_item_name = None
|
||||
args.osx_keychain_item_account = None
|
||||
args.user = "testuser"
|
||||
args.repository = "testrepo"
|
||||
|
||||
repository = {"full_name": "testuser/testrepo"}
|
||||
|
||||
def call_download(issue_data, issue_number=123):
|
||||
"""Call download_attachments with mocked HTTP downloads.
|
||||
|
||||
Returns list of URLs that were actually downloaded.
|
||||
"""
|
||||
downloaded_urls = []
|
||||
|
||||
def mock_download(url, path, auth, as_app, fine):
|
||||
downloaded_urls.append(url)
|
||||
return {
|
||||
"success": True,
|
||||
"saved_as": os.path.basename(path),
|
||||
"url": url,
|
||||
}
|
||||
|
||||
with patch(
|
||||
"github_backup.github_backup.download_attachment_file",
|
||||
side_effect=mock_download,
|
||||
):
|
||||
github_backup.download_attachments(
|
||||
args, str(issue_cwd), issue_data, issue_number, repository
|
||||
)
|
||||
|
||||
return downloaded_urls
|
||||
|
||||
return {
|
||||
"issue_cwd": str(issue_cwd),
|
||||
"args": args,
|
||||
"repository": repository,
|
||||
"call_download": call_download,
|
||||
}
|
||||
|
||||
|
||||
class TestURLExtraction:
|
||||
"""Test URL extraction with realistic issue content."""
|
||||
|
||||
def test_mixed_urls(self):
|
||||
issue_data = {
|
||||
"body": """
|
||||
## Bug Report
|
||||
|
||||
When uploading files, I see this error. Here's a screenshot:
|
||||
https://github.com/user-attachments/assets/abc123def456
|
||||
|
||||
The logs show: https://github.com/user-attachments/files/789/error-log.txt
|
||||
|
||||
This is similar to https://github.com/someorg/somerepo/issues/42 but different.
|
||||
|
||||
You can also see the video at https://user-images.githubusercontent.com/12345/video-demo.mov
|
||||
|
||||
Here's how to reproduce:
|
||||
```bash
|
||||
# Don't extract this example URL:
|
||||
curl https://github.com/user-attachments/assets/example999
|
||||
```
|
||||
|
||||
More info at https://docs.example.com/guide
|
||||
|
||||
Also see this inline code `https://github.com/user-attachments/files/111/inline.pdf` should not extract.
|
||||
|
||||
Final attachment: https://github.com/user-attachments/files/222/report.pdf.
|
||||
""",
|
||||
"comment_data": [
|
||||
{
|
||||
"body": "Here's another attachment: https://private-user-images.githubusercontent.com/98765/secret.png?jwt=token123"
|
||||
},
|
||||
{
|
||||
"body": """
|
||||
Example code:
|
||||
```python
|
||||
url = "https://github.com/user-attachments/assets/code-example"
|
||||
```
|
||||
But this is real: https://github.com/user-attachments/files/333/actual.zip
|
||||
"""
|
||||
},
|
||||
],
|
||||
}
|
||||
|
||||
# Extract URLs
|
||||
urls = github_backup.extract_attachment_urls(issue_data)
|
||||
|
||||
expected_urls = [
|
||||
"https://github.com/user-attachments/assets/abc123def456",
|
||||
"https://github.com/user-attachments/files/789/error-log.txt",
|
||||
"https://user-images.githubusercontent.com/12345/video-demo.mov",
|
||||
"https://github.com/user-attachments/files/222/report.pdf",
|
||||
"https://private-user-images.githubusercontent.com/98765/secret.png?jwt=token123",
|
||||
"https://github.com/user-attachments/files/333/actual.zip",
|
||||
]
|
||||
|
||||
assert set(urls) == set(expected_urls)
|
||||
|
||||
def test_trailing_punctuation_stripped(self):
|
||||
"""URLs with trailing punctuation should have punctuation stripped."""
|
||||
issue_data = {
|
||||
"body": """
|
||||
See this file: https://github.com/user-attachments/files/1/doc.pdf.
|
||||
And this one (https://github.com/user-attachments/files/2/image.png).
|
||||
Check it out! https://github.com/user-attachments/files/3/data.csv!
|
||||
"""
|
||||
}
|
||||
|
||||
urls = github_backup.extract_attachment_urls(issue_data)
|
||||
|
||||
expected = [
|
||||
"https://github.com/user-attachments/files/1/doc.pdf",
|
||||
"https://github.com/user-attachments/files/2/image.png",
|
||||
"https://github.com/user-attachments/files/3/data.csv",
|
||||
]
|
||||
assert set(urls) == set(expected)
|
||||
|
||||
def test_deduplication_across_body_and_comments(self):
|
||||
"""Same URL in body and comments should only appear once."""
|
||||
duplicate_url = "https://github.com/user-attachments/assets/abc123"
|
||||
|
||||
issue_data = {
|
||||
"body": f"First mention: {duplicate_url}",
|
||||
"comment_data": [
|
||||
{"body": f"Second mention: {duplicate_url}"},
|
||||
{"body": f"Third mention: {duplicate_url}"},
|
||||
],
|
||||
}
|
||||
|
||||
urls = github_backup.extract_attachment_urls(issue_data)
|
||||
|
||||
assert set(urls) == {duplicate_url}
|
||||
|
||||
|
||||
class TestFilenameExtraction:
|
||||
"""Test filename extraction from different URL types."""
|
||||
|
||||
def test_modern_assets_url(self):
|
||||
"""Modern assets URL returns UUID."""
|
||||
url = "https://github.com/user-attachments/assets/abc123def456"
|
||||
filename = github_backup.get_attachment_filename(url)
|
||||
assert filename == "abc123def456"
|
||||
|
||||
def test_modern_files_url(self):
|
||||
"""Modern files URL returns filename."""
|
||||
url = "https://github.com/user-attachments/files/12345/report.pdf"
|
||||
filename = github_backup.get_attachment_filename(url)
|
||||
assert filename == "report.pdf"
|
||||
|
||||
def test_legacy_cdn_url(self):
|
||||
"""Legacy CDN URL returns filename with extension."""
|
||||
url = "https://user-images.githubusercontent.com/123456/abc-def.png"
|
||||
filename = github_backup.get_attachment_filename(url)
|
||||
assert filename == "abc-def.png"
|
||||
|
||||
def test_private_cdn_url(self):
|
||||
"""Private CDN URL returns filename."""
|
||||
url = "https://private-user-images.githubusercontent.com/98765/secret.png?jwt=token123"
|
||||
filename = github_backup.get_attachment_filename(url)
|
||||
assert filename == "secret.png"
|
||||
|
||||
def test_repo_files_url(self):
|
||||
"""Repo-scoped files URL returns filename."""
|
||||
url = "https://github.com/owner/repo/files/789/document.txt"
|
||||
filename = github_backup.get_attachment_filename(url)
|
||||
assert filename == "document.txt"
|
||||
|
||||
|
||||
class TestFilenameCollision:
|
||||
"""Test filename collision resolution."""
|
||||
|
||||
def test_collision_behavior(self):
|
||||
"""Test filename collision resolution with real files."""
|
||||
with tempfile.TemporaryDirectory() as tmpdir:
|
||||
# No collision - file doesn't exist
|
||||
result = github_backup.resolve_filename_collision(
|
||||
os.path.join(tmpdir, "report.pdf")
|
||||
)
|
||||
assert result == os.path.join(tmpdir, "report.pdf")
|
||||
|
||||
# Create the file, now collision exists
|
||||
Path(os.path.join(tmpdir, "report.pdf")).touch()
|
||||
result = github_backup.resolve_filename_collision(
|
||||
os.path.join(tmpdir, "report.pdf")
|
||||
)
|
||||
assert result == os.path.join(tmpdir, "report_1.pdf")
|
||||
|
||||
# Create report_1.pdf too
|
||||
Path(os.path.join(tmpdir, "report_1.pdf")).touch()
|
||||
result = github_backup.resolve_filename_collision(
|
||||
os.path.join(tmpdir, "report.pdf")
|
||||
)
|
||||
assert result == os.path.join(tmpdir, "report_2.pdf")
|
||||
|
||||
def test_manifest_reserved(self):
|
||||
"""manifest.json is always treated as reserved."""
|
||||
with tempfile.TemporaryDirectory() as tmpdir:
|
||||
# Even if manifest.json doesn't exist, should get manifest_1.json
|
||||
result = github_backup.resolve_filename_collision(
|
||||
os.path.join(tmpdir, "manifest.json")
|
||||
)
|
||||
assert result == os.path.join(tmpdir, "manifest_1.json")
|
||||
|
||||
|
||||
class TestManifestDuplicatePrevention:
|
||||
"""Test that manifest prevents duplicate downloads (the bug fix)."""
|
||||
|
||||
def test_manifest_filters_existing_urls(self, attachment_test_setup):
|
||||
"""URLs in manifest are not re-downloaded."""
|
||||
setup = attachment_test_setup
|
||||
|
||||
# Create manifest with existing URLs
|
||||
attachments_dir = os.path.join(setup["issue_cwd"], "attachments", "123")
|
||||
os.makedirs(attachments_dir)
|
||||
manifest_path = os.path.join(attachments_dir, "manifest.json")
|
||||
|
||||
manifest = {
|
||||
"attachments": [
|
||||
{
|
||||
"url": "https://github.com/user-attachments/assets/old1",
|
||||
"success": True,
|
||||
"saved_as": "old1.pdf",
|
||||
},
|
||||
{
|
||||
"url": "https://github.com/user-attachments/assets/old2",
|
||||
"success": True,
|
||||
"saved_as": "old2.pdf",
|
||||
},
|
||||
]
|
||||
}
|
||||
with open(manifest_path, "w") as f:
|
||||
json.dump(manifest, f)
|
||||
|
||||
# Issue data with 2 old URLs and 1 new URL
|
||||
issue_data = {
|
||||
"body": """
|
||||
Old: https://github.com/user-attachments/assets/old1
|
||||
Old: https://github.com/user-attachments/assets/old2
|
||||
New: https://github.com/user-attachments/assets/new1
|
||||
"""
|
||||
}
|
||||
|
||||
downloaded_urls = setup["call_download"](issue_data)
|
||||
|
||||
# Should only download the NEW URL (old ones filtered by manifest)
|
||||
assert len(downloaded_urls) == 1
|
||||
assert downloaded_urls[0] == "https://github.com/user-attachments/assets/new1"
|
||||
|
||||
def test_no_manifest_downloads_all(self, attachment_test_setup):
|
||||
"""Without manifest, all URLs should be downloaded."""
|
||||
setup = attachment_test_setup
|
||||
|
||||
# Issue data with 2 URLs
|
||||
issue_data = {
|
||||
"body": """
|
||||
https://github.com/user-attachments/assets/url1
|
||||
https://github.com/user-attachments/assets/url2
|
||||
"""
|
||||
}
|
||||
|
||||
downloaded_urls = setup["call_download"](issue_data)
|
||||
|
||||
# Should download ALL URLs (no manifest to filter)
|
||||
assert len(downloaded_urls) == 2
|
||||
assert set(downloaded_urls) == {
|
||||
"https://github.com/user-attachments/assets/url1",
|
||||
"https://github.com/user-attachments/assets/url2",
|
||||
}
|
||||
|
||||
def test_manifest_skips_permanent_failures(self, attachment_test_setup):
|
||||
"""Manifest skips permanent failures (404, 410) but retries transient (503)."""
|
||||
setup = attachment_test_setup
|
||||
|
||||
# Create manifest with different failure types
|
||||
attachments_dir = os.path.join(setup["issue_cwd"], "attachments", "123")
|
||||
os.makedirs(attachments_dir)
|
||||
manifest_path = os.path.join(attachments_dir, "manifest.json")
|
||||
|
||||
manifest = {
|
||||
"attachments": [
|
||||
{
|
||||
"url": "https://github.com/user-attachments/assets/success",
|
||||
"success": True,
|
||||
"saved_as": "success.pdf",
|
||||
},
|
||||
{
|
||||
"url": "https://github.com/user-attachments/assets/notfound",
|
||||
"success": False,
|
||||
"http_status": 404,
|
||||
},
|
||||
{
|
||||
"url": "https://github.com/user-attachments/assets/gone",
|
||||
"success": False,
|
||||
"http_status": 410,
|
||||
},
|
||||
{
|
||||
"url": "https://github.com/user-attachments/assets/unavailable",
|
||||
"success": False,
|
||||
"http_status": 503,
|
||||
},
|
||||
]
|
||||
}
|
||||
with open(manifest_path, "w") as f:
|
||||
json.dump(manifest, f)
|
||||
|
||||
# Issue data has all 4 URLs
|
||||
issue_data = {
|
||||
"body": """
|
||||
https://github.com/user-attachments/assets/success
|
||||
https://github.com/user-attachments/assets/notfound
|
||||
https://github.com/user-attachments/assets/gone
|
||||
https://github.com/user-attachments/assets/unavailable
|
||||
"""
|
||||
}
|
||||
|
||||
downloaded_urls = setup["call_download"](issue_data)
|
||||
|
||||
# Should only retry 503 (transient failure)
|
||||
# Success, 404, and 410 should be skipped
|
||||
assert len(downloaded_urls) == 1
|
||||
assert (
|
||||
downloaded_urls[0]
|
||||
== "https://github.com/user-attachments/assets/unavailable"
|
||||
)
|
||||
143
tests/test_http_451.py
Normal file
143
tests/test_http_451.py
Normal file
@@ -0,0 +1,143 @@
|
||||
"""Tests for HTTP 451 (DMCA takedown) handling."""
|
||||
|
||||
import json
|
||||
from unittest.mock import Mock, patch
|
||||
|
||||
import pytest
|
||||
|
||||
from github_backup import github_backup
|
||||
|
||||
|
||||
class TestHTTP451Exception:
|
||||
"""Test suite for HTTP 451 DMCA takedown exception handling."""
|
||||
|
||||
def test_repository_unavailable_error_raised(self):
|
||||
"""HTTP 451 should raise RepositoryUnavailableError with DMCA URL."""
|
||||
# Create mock args
|
||||
args = Mock()
|
||||
args.as_app = False
|
||||
args.token_fine = None
|
||||
args.token_classic = None
|
||||
args.username = None
|
||||
args.password = None
|
||||
args.osx_keychain_item_name = None
|
||||
args.osx_keychain_item_account = None
|
||||
args.throttle_limit = None
|
||||
args.throttle_pause = 0
|
||||
|
||||
# Mock HTTPError 451 response
|
||||
mock_response = Mock()
|
||||
mock_response.getcode.return_value = 451
|
||||
|
||||
dmca_data = {
|
||||
"message": "Repository access blocked",
|
||||
"block": {
|
||||
"reason": "dmca",
|
||||
"created_at": "2024-11-12T14:38:04Z",
|
||||
"html_url": "https://github.com/github/dmca/blob/master/2024/11/2024-11-04-source-code.md"
|
||||
}
|
||||
}
|
||||
mock_response.read.return_value = json.dumps(dmca_data).encode("utf-8")
|
||||
mock_response.headers = {"x-ratelimit-remaining": "5000"}
|
||||
mock_response.reason = "Unavailable For Legal Reasons"
|
||||
|
||||
def mock_get_response(request, auth, template):
|
||||
return mock_response, []
|
||||
|
||||
with patch("github_backup.github_backup._get_response", side_effect=mock_get_response):
|
||||
with pytest.raises(github_backup.RepositoryUnavailableError) as exc_info:
|
||||
list(github_backup.retrieve_data_gen(args, "https://api.github.com/repos/test/dmca/issues"))
|
||||
|
||||
# Check exception has DMCA URL
|
||||
assert exc_info.value.dmca_url == "https://github.com/github/dmca/blob/master/2024/11/2024-11-04-source-code.md"
|
||||
assert "451" in str(exc_info.value)
|
||||
|
||||
def test_repository_unavailable_error_without_dmca_url(self):
|
||||
"""HTTP 451 without DMCA details should still raise exception."""
|
||||
args = Mock()
|
||||
args.as_app = False
|
||||
args.token_fine = None
|
||||
args.token_classic = None
|
||||
args.username = None
|
||||
args.password = None
|
||||
args.osx_keychain_item_name = None
|
||||
args.osx_keychain_item_account = None
|
||||
args.throttle_limit = None
|
||||
args.throttle_pause = 0
|
||||
|
||||
mock_response = Mock()
|
||||
mock_response.getcode.return_value = 451
|
||||
mock_response.read.return_value = b'{"message": "Blocked"}'
|
||||
mock_response.headers = {"x-ratelimit-remaining": "5000"}
|
||||
mock_response.reason = "Unavailable For Legal Reasons"
|
||||
|
||||
def mock_get_response(request, auth, template):
|
||||
return mock_response, []
|
||||
|
||||
with patch("github_backup.github_backup._get_response", side_effect=mock_get_response):
|
||||
with pytest.raises(github_backup.RepositoryUnavailableError) as exc_info:
|
||||
list(github_backup.retrieve_data_gen(args, "https://api.github.com/repos/test/dmca/issues"))
|
||||
|
||||
# Exception raised even without DMCA URL
|
||||
assert exc_info.value.dmca_url is None
|
||||
assert "451" in str(exc_info.value)
|
||||
|
||||
def test_repository_unavailable_error_with_malformed_json(self):
|
||||
"""HTTP 451 with malformed JSON should still raise exception."""
|
||||
args = Mock()
|
||||
args.as_app = False
|
||||
args.token_fine = None
|
||||
args.token_classic = None
|
||||
args.username = None
|
||||
args.password = None
|
||||
args.osx_keychain_item_name = None
|
||||
args.osx_keychain_item_account = None
|
||||
args.throttle_limit = None
|
||||
args.throttle_pause = 0
|
||||
|
||||
mock_response = Mock()
|
||||
mock_response.getcode.return_value = 451
|
||||
mock_response.read.return_value = b"invalid json {"
|
||||
mock_response.headers = {"x-ratelimit-remaining": "5000"}
|
||||
mock_response.reason = "Unavailable For Legal Reasons"
|
||||
|
||||
def mock_get_response(request, auth, template):
|
||||
return mock_response, []
|
||||
|
||||
with patch("github_backup.github_backup._get_response", side_effect=mock_get_response):
|
||||
with pytest.raises(github_backup.RepositoryUnavailableError):
|
||||
list(github_backup.retrieve_data_gen(args, "https://api.github.com/repos/test/dmca/issues"))
|
||||
|
||||
def test_other_http_errors_unchanged(self):
|
||||
"""Other HTTP errors should still raise generic Exception."""
|
||||
args = Mock()
|
||||
args.as_app = False
|
||||
args.token_fine = None
|
||||
args.token_classic = None
|
||||
args.username = None
|
||||
args.password = None
|
||||
args.osx_keychain_item_name = None
|
||||
args.osx_keychain_item_account = None
|
||||
args.throttle_limit = None
|
||||
args.throttle_pause = 0
|
||||
|
||||
mock_response = Mock()
|
||||
mock_response.getcode.return_value = 404
|
||||
mock_response.read.return_value = b'{"message": "Not Found"}'
|
||||
mock_response.headers = {"x-ratelimit-remaining": "5000"}
|
||||
mock_response.reason = "Not Found"
|
||||
|
||||
def mock_get_response(request, auth, template):
|
||||
return mock_response, []
|
||||
|
||||
with patch("github_backup.github_backup._get_response", side_effect=mock_get_response):
|
||||
# Should raise generic Exception, not RepositoryUnavailableError
|
||||
with pytest.raises(Exception) as exc_info:
|
||||
list(github_backup.retrieve_data_gen(args, "https://api.github.com/repos/test/notfound/issues"))
|
||||
|
||||
assert not isinstance(exc_info.value, github_backup.RepositoryUnavailableError)
|
||||
assert "404" in str(exc_info.value)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
pytest.main([__file__, "-v"])
|
||||
198
tests/test_json_dump_if_changed.py
Normal file
198
tests/test_json_dump_if_changed.py
Normal file
@@ -0,0 +1,198 @@
|
||||
"""Tests for json_dump_if_changed functionality."""
|
||||
|
||||
import codecs
|
||||
import json
|
||||
import os
|
||||
import tempfile
|
||||
|
||||
import pytest
|
||||
|
||||
from github_backup import github_backup
|
||||
|
||||
|
||||
class TestJsonDumpIfChanged:
|
||||
"""Test suite for json_dump_if_changed function."""
|
||||
|
||||
def test_writes_new_file(self):
|
||||
"""Should write file when it doesn't exist."""
|
||||
with tempfile.TemporaryDirectory() as tmpdir:
|
||||
output_file = os.path.join(tmpdir, "test.json")
|
||||
test_data = {"key": "value", "number": 42}
|
||||
|
||||
result = github_backup.json_dump_if_changed(test_data, output_file)
|
||||
|
||||
assert result is True
|
||||
assert os.path.exists(output_file)
|
||||
|
||||
# Verify content matches expected format
|
||||
with codecs.open(output_file, "r", encoding="utf-8") as f:
|
||||
content = f.read()
|
||||
loaded = json.loads(content)
|
||||
assert loaded == test_data
|
||||
|
||||
def test_skips_unchanged_file(self):
|
||||
"""Should skip write when content is identical."""
|
||||
with tempfile.TemporaryDirectory() as tmpdir:
|
||||
output_file = os.path.join(tmpdir, "test.json")
|
||||
test_data = {"key": "value", "number": 42}
|
||||
|
||||
# First write
|
||||
result1 = github_backup.json_dump_if_changed(test_data, output_file)
|
||||
assert result1 is True
|
||||
|
||||
# Get the initial mtime
|
||||
mtime1 = os.path.getmtime(output_file)
|
||||
|
||||
# Second write with same data
|
||||
result2 = github_backup.json_dump_if_changed(test_data, output_file)
|
||||
assert result2 is False
|
||||
|
||||
# File should not have been modified
|
||||
mtime2 = os.path.getmtime(output_file)
|
||||
assert mtime1 == mtime2
|
||||
|
||||
def test_writes_when_content_changed(self):
|
||||
"""Should write file when content has changed."""
|
||||
with tempfile.TemporaryDirectory() as tmpdir:
|
||||
output_file = os.path.join(tmpdir, "test.json")
|
||||
test_data1 = {"key": "value1"}
|
||||
test_data2 = {"key": "value2"}
|
||||
|
||||
# First write
|
||||
result1 = github_backup.json_dump_if_changed(test_data1, output_file)
|
||||
assert result1 is True
|
||||
|
||||
# Second write with different data
|
||||
result2 = github_backup.json_dump_if_changed(test_data2, output_file)
|
||||
assert result2 is True
|
||||
|
||||
# Verify new content
|
||||
with codecs.open(output_file, "r", encoding="utf-8") as f:
|
||||
loaded = json.load(f)
|
||||
assert loaded == test_data2
|
||||
|
||||
def test_uses_consistent_formatting(self):
|
||||
"""Should use same JSON formatting as json_dump."""
|
||||
with tempfile.TemporaryDirectory() as tmpdir:
|
||||
output_file = os.path.join(tmpdir, "test.json")
|
||||
test_data = {"z": "last", "a": "first", "m": "middle"}
|
||||
|
||||
github_backup.json_dump_if_changed(test_data, output_file)
|
||||
|
||||
with codecs.open(output_file, "r", encoding="utf-8") as f:
|
||||
content = f.read()
|
||||
|
||||
# Check for consistent formatting:
|
||||
# - sorted keys
|
||||
# - 4-space indent
|
||||
# - comma-colon-space separator
|
||||
expected = json.dumps(
|
||||
test_data,
|
||||
ensure_ascii=False,
|
||||
sort_keys=True,
|
||||
indent=4,
|
||||
separators=(",", ": "),
|
||||
)
|
||||
assert content == expected
|
||||
|
||||
def test_atomic_write_always_used(self):
|
||||
"""Should always use temp file and rename for atomic writes."""
|
||||
with tempfile.TemporaryDirectory() as tmpdir:
|
||||
output_file = os.path.join(tmpdir, "test.json")
|
||||
test_data = {"key": "value"}
|
||||
|
||||
result = github_backup.json_dump_if_changed(test_data, output_file)
|
||||
|
||||
assert result is True
|
||||
assert os.path.exists(output_file)
|
||||
|
||||
# Temp file should not exist after atomic write
|
||||
temp_file = output_file + ".temp"
|
||||
assert not os.path.exists(temp_file)
|
||||
|
||||
# Verify content
|
||||
with codecs.open(output_file, "r", encoding="utf-8") as f:
|
||||
loaded = json.load(f)
|
||||
assert loaded == test_data
|
||||
|
||||
def test_handles_unicode_content(self):
|
||||
"""Should correctly handle Unicode content."""
|
||||
with tempfile.TemporaryDirectory() as tmpdir:
|
||||
output_file = os.path.join(tmpdir, "test.json")
|
||||
test_data = {
|
||||
"emoji": "🚀",
|
||||
"chinese": "你好",
|
||||
"arabic": "مرحبا",
|
||||
"cyrillic": "Привет",
|
||||
}
|
||||
|
||||
result = github_backup.json_dump_if_changed(test_data, output_file)
|
||||
assert result is True
|
||||
|
||||
# Verify Unicode is preserved
|
||||
with codecs.open(output_file, "r", encoding="utf-8") as f:
|
||||
loaded = json.load(f)
|
||||
assert loaded == test_data
|
||||
|
||||
# Second write should skip
|
||||
result2 = github_backup.json_dump_if_changed(test_data, output_file)
|
||||
assert result2 is False
|
||||
|
||||
def test_handles_complex_nested_data(self):
|
||||
"""Should handle complex nested data structures."""
|
||||
with tempfile.TemporaryDirectory() as tmpdir:
|
||||
output_file = os.path.join(tmpdir, "test.json")
|
||||
test_data = {
|
||||
"users": [
|
||||
{"id": 1, "name": "Alice", "tags": ["admin", "user"]},
|
||||
{"id": 2, "name": "Bob", "tags": ["user"]},
|
||||
],
|
||||
"metadata": {"version": "1.0", "nested": {"deep": {"value": 42}}},
|
||||
}
|
||||
|
||||
result = github_backup.json_dump_if_changed(test_data, output_file)
|
||||
assert result is True
|
||||
|
||||
# Verify structure is preserved
|
||||
with codecs.open(output_file, "r", encoding="utf-8") as f:
|
||||
loaded = json.load(f)
|
||||
assert loaded == test_data
|
||||
|
||||
def test_overwrites_on_unicode_decode_error(self):
|
||||
"""Should overwrite if existing file has invalid UTF-8."""
|
||||
with tempfile.TemporaryDirectory() as tmpdir:
|
||||
output_file = os.path.join(tmpdir, "test.json")
|
||||
test_data = {"key": "value"}
|
||||
|
||||
# Write invalid UTF-8 bytes
|
||||
with open(output_file, "wb") as f:
|
||||
f.write(b"\xff\xfe invalid utf-8")
|
||||
|
||||
# Should catch UnicodeDecodeError and overwrite
|
||||
result = github_backup.json_dump_if_changed(test_data, output_file)
|
||||
assert result is True
|
||||
|
||||
# Verify new content was written
|
||||
with codecs.open(output_file, "r", encoding="utf-8") as f:
|
||||
loaded = json.load(f)
|
||||
assert loaded == test_data
|
||||
|
||||
def test_key_order_independence(self):
|
||||
"""Should treat differently-ordered dicts as same if keys/values match."""
|
||||
with tempfile.TemporaryDirectory() as tmpdir:
|
||||
output_file = os.path.join(tmpdir, "test.json")
|
||||
|
||||
# Write first dict
|
||||
data1 = {"z": 1, "a": 2, "m": 3}
|
||||
github_backup.json_dump_if_changed(data1, output_file)
|
||||
|
||||
# Try to write same data but different order
|
||||
data2 = {"a": 2, "m": 3, "z": 1}
|
||||
result = github_backup.json_dump_if_changed(data2, output_file)
|
||||
|
||||
# Should skip because content is the same (keys are sorted)
|
||||
assert result is False
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
pytest.main([__file__, "-v"])
|
||||
153
tests/test_pagination.py
Normal file
153
tests/test_pagination.py
Normal file
@@ -0,0 +1,153 @@
|
||||
"""Tests for Link header pagination handling."""
|
||||
|
||||
import json
|
||||
from unittest.mock import Mock, patch
|
||||
|
||||
import pytest
|
||||
|
||||
from github_backup import github_backup
|
||||
|
||||
|
||||
class MockHTTPResponse:
|
||||
"""Mock HTTP response for paginated API calls."""
|
||||
|
||||
def __init__(self, data, link_header=None):
|
||||
self._content = json.dumps(data).encode("utf-8")
|
||||
self._link_header = link_header
|
||||
self._read = False
|
||||
self.reason = "OK"
|
||||
|
||||
def getcode(self):
|
||||
return 200
|
||||
|
||||
def read(self):
|
||||
if self._read:
|
||||
return b""
|
||||
self._read = True
|
||||
return self._content
|
||||
|
||||
def get_header(self, name, default=None):
|
||||
"""Mock method for headers.get()."""
|
||||
return self.headers.get(name, default)
|
||||
|
||||
@property
|
||||
def headers(self):
|
||||
headers = {"x-ratelimit-remaining": "5000"}
|
||||
if self._link_header:
|
||||
headers["Link"] = self._link_header
|
||||
return headers
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def mock_args():
|
||||
"""Mock args for retrieve_data_gen."""
|
||||
args = Mock()
|
||||
args.as_app = False
|
||||
args.token_fine = None
|
||||
args.token_classic = "fake_token"
|
||||
args.username = None
|
||||
args.password = None
|
||||
args.osx_keychain_item_name = None
|
||||
args.osx_keychain_item_account = None
|
||||
args.throttle_limit = None
|
||||
args.throttle_pause = 0
|
||||
return args
|
||||
|
||||
|
||||
def test_cursor_based_pagination(mock_args):
|
||||
"""Link header with 'after' cursor parameter works correctly."""
|
||||
|
||||
# Simulate issues endpoint behavior: returns cursor in Link header
|
||||
responses = [
|
||||
# Issues endpoint returns 'after' cursor parameter (not 'page')
|
||||
MockHTTPResponse(
|
||||
data=[{"issue": i} for i in range(1, 101)], # Page 1 contents
|
||||
link_header='<https://api.github.com/repos/owner/repo/issues?per_page=100&after=ABC123&page=2>; rel="next"',
|
||||
),
|
||||
MockHTTPResponse(
|
||||
data=[{"issue": i} for i in range(101, 151)], # Page 2 contents
|
||||
link_header=None, # No Link header - signals end of pagination
|
||||
),
|
||||
]
|
||||
requests_made = []
|
||||
|
||||
def mock_urlopen(request, *args, **kwargs):
|
||||
url = request.get_full_url()
|
||||
requests_made.append(url)
|
||||
return responses[len(requests_made) - 1]
|
||||
|
||||
with patch("github_backup.github_backup.urlopen", side_effect=mock_urlopen):
|
||||
results = list(
|
||||
github_backup.retrieve_data_gen(
|
||||
mock_args, "https://api.github.com/repos/owner/repo/issues"
|
||||
)
|
||||
)
|
||||
|
||||
# Verify all items retrieved and cursor was used in second request
|
||||
assert len(results) == 150
|
||||
assert len(requests_made) == 2
|
||||
assert "after=ABC123" in requests_made[1]
|
||||
|
||||
|
||||
def test_page_based_pagination(mock_args):
|
||||
"""Link header with 'page' parameter works correctly."""
|
||||
|
||||
# Simulate pulls/repos endpoint behavior: returns page numbers in Link header
|
||||
responses = [
|
||||
# Pulls endpoint uses traditional 'page' parameter (not cursor)
|
||||
MockHTTPResponse(
|
||||
data=[{"pull": i} for i in range(1, 101)], # Page 1 contents
|
||||
link_header='<https://api.github.com/repos/owner/repo/pulls?per_page=100&page=2>; rel="next"',
|
||||
),
|
||||
MockHTTPResponse(
|
||||
data=[{"pull": i} for i in range(101, 181)], # Page 2 contents
|
||||
link_header=None, # No Link header - signals end of pagination
|
||||
),
|
||||
]
|
||||
requests_made = []
|
||||
|
||||
def mock_urlopen(request, *args, **kwargs):
|
||||
url = request.get_full_url()
|
||||
requests_made.append(url)
|
||||
return responses[len(requests_made) - 1]
|
||||
|
||||
with patch("github_backup.github_backup.urlopen", side_effect=mock_urlopen):
|
||||
results = list(
|
||||
github_backup.retrieve_data_gen(
|
||||
mock_args, "https://api.github.com/repos/owner/repo/pulls"
|
||||
)
|
||||
)
|
||||
|
||||
# Verify all items retrieved and page parameter was used (not cursor)
|
||||
assert len(results) == 180
|
||||
assert len(requests_made) == 2
|
||||
assert "page=2" in requests_made[1]
|
||||
assert "after" not in requests_made[1]
|
||||
|
||||
|
||||
def test_no_link_header_stops_pagination(mock_args):
|
||||
"""Pagination stops when Link header is absent."""
|
||||
|
||||
# Simulate endpoint with results that fit in a single page
|
||||
responses = [
|
||||
MockHTTPResponse(
|
||||
data=[{"label": i} for i in range(1, 51)], # Page contents
|
||||
link_header=None, # No Link header - signals end of pagination
|
||||
)
|
||||
]
|
||||
requests_made = []
|
||||
|
||||
def mock_urlopen(request, *args, **kwargs):
|
||||
requests_made.append(request.get_full_url())
|
||||
return responses[len(requests_made) - 1]
|
||||
|
||||
with patch("github_backup.github_backup.urlopen", side_effect=mock_urlopen):
|
||||
results = list(
|
||||
github_backup.retrieve_data_gen(
|
||||
mock_args, "https://api.github.com/repos/owner/repo/labels"
|
||||
)
|
||||
)
|
||||
|
||||
# Verify pagination stopped after first request
|
||||
assert len(results) == 50
|
||||
assert len(requests_made) == 1
|
||||
Reference in New Issue
Block a user