mirror of
https://github.com/josegonzalez/python-github-backup.git
synced 2026-04-29 20:15:36 +02:00
Compare commits
52 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
f8cdf55050 | ||
|
|
b59f719f10 | ||
|
|
014eff395a | ||
|
|
9d0cfdb61d | ||
|
|
6cd0ab3633 | ||
|
|
02e833e40a | ||
|
|
b3a8241c9a | ||
|
|
d19e2ad9c5 | ||
|
|
24b3fdb4f3 | ||
|
|
013b27208e | ||
|
|
4d022d94d0 | ||
|
|
ed29a917ca | ||
|
|
f4117990b2 | ||
|
|
4c1f21a306 | ||
|
|
9fde6ed1ff | ||
|
|
9a9b069e14 | ||
|
|
f85c759e5d | ||
|
|
26a6e1df1b | ||
|
|
3d961d1118 | ||
|
|
20f9542063 | ||
|
|
bbf76e70eb | ||
|
|
ca70725449 | ||
|
|
653ceb1e12 | ||
|
|
ba1575538b | ||
|
|
d5be07ec80 | ||
|
|
5758e489e8 | ||
|
|
cceef92346 | ||
|
|
7f1807aaf8 | ||
|
|
8a0553a5b1 | ||
|
|
68af1d406a | ||
|
|
b112b43a08 | ||
|
|
f54a5458f6 | ||
|
|
60067650b0 | ||
|
|
655886fa80 | ||
|
|
0162f7ed46 | ||
|
|
8c1a13475a | ||
|
|
6268a4c5c6 | ||
|
|
4b2295db0d | ||
|
|
be900d1f3f | ||
|
|
9be6282719 | ||
|
|
1102990af0 | ||
|
|
311ffb40cd | ||
|
|
2f5e7c2dcf | ||
|
|
0d8a504b02 | ||
|
|
712d22d124 | ||
|
|
e0c9d65225 | ||
|
|
52d996f784 | ||
|
|
e6283f9384 | ||
|
|
1181f811b7 | ||
|
|
856ad5db41 | ||
|
|
c6fa8c7695 | ||
|
|
93e505c07d |
10
.github/workflows/docker.yml
vendored
10
.github/workflows/docker.yml
vendored
@@ -43,13 +43,13 @@ jobs:
|
||||
persist-credentials: false
|
||||
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v3
|
||||
uses: docker/setup-qemu-action@v4
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
uses: docker/setup-buildx-action@v4
|
||||
|
||||
- name: Log in to the Container registry
|
||||
uses: docker/login-action@v3
|
||||
uses: docker/login-action@v4
|
||||
with:
|
||||
registry: ${{ env.REGISTRY }}
|
||||
username: ${{ github.actor }}
|
||||
@@ -57,7 +57,7 @@ jobs:
|
||||
|
||||
- name: Extract metadata (tags, labels) for Docker
|
||||
id: meta
|
||||
uses: docker/metadata-action@v5
|
||||
uses: docker/metadata-action@v6
|
||||
with:
|
||||
images: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}
|
||||
tags: |
|
||||
@@ -68,7 +68,7 @@ jobs:
|
||||
type=raw,value=latest,enable=${{ github.ref == format('refs/heads/{0}', 'main') }}
|
||||
|
||||
- name: Build and push Docker image
|
||||
uses: docker/build-push-action@v6
|
||||
uses: docker/build-push-action@v7
|
||||
with:
|
||||
context: .
|
||||
push: true
|
||||
|
||||
291
CHANGES.rst
291
CHANGES.rst
@@ -1,9 +1,298 @@
|
||||
Changelog
|
||||
=========
|
||||
|
||||
0.61.1 (2026-01-13)
|
||||
0.62.0 (2026-04-29)
|
||||
-------------------
|
||||
------------------------
|
||||
- Skip checkpoint-equal incremental items. [Duncan Ogilvie]
|
||||
- Avoid redundant release asset list requests. [Duncan Ogilvie]
|
||||
- Reduce unnecessary pull requests with incremental fetching. [Duncan
|
||||
Ogilvie]
|
||||
- Implement per-resource last_update timestamps. [Duncan Ogilvie]
|
||||
|
||||
Closes #62
|
||||
- Add support for pull request reviews. [Duncan Ogilvie]
|
||||
|
||||
Closes #124
|
||||
- Add support for discussions. [Duncan Ogilvie]
|
||||
|
||||
Closes #290
|
||||
- Add --token-from-gh authentication option. [Duncan Ogilvie]
|
||||
- Chore(deps): bump pytest in the python-packages group.
|
||||
[dependabot[bot]]
|
||||
|
||||
Bumps the python-packages group with 1 update: [pytest](https://github.com/pytest-dev/pytest).
|
||||
|
||||
|
||||
Updates `pytest` from 9.0.2 to 9.0.3
|
||||
- [Release notes](https://github.com/pytest-dev/pytest/releases)
|
||||
- [Changelog](https://github.com/pytest-dev/pytest/blob/main/CHANGELOG.rst)
|
||||
- [Commits](https://github.com/pytest-dev/pytest/compare/9.0.2...9.0.3)
|
||||
|
||||
---
|
||||
updated-dependencies:
|
||||
- dependency-name: pytest
|
||||
dependency-version: 9.0.3
|
||||
dependency-type: direct:production
|
||||
update-type: version-update:semver-patch
|
||||
dependency-group: python-packages
|
||||
...
|
||||
- Chore(deps): bump black in the python-packages group.
|
||||
[dependabot[bot]]
|
||||
|
||||
Bumps the python-packages group with 1 update: [black](https://github.com/psf/black).
|
||||
|
||||
|
||||
Updates `black` from 26.3.0 to 26.3.1
|
||||
- [Release notes](https://github.com/psf/black/releases)
|
||||
- [Changelog](https://github.com/psf/black/blob/main/CHANGES.md)
|
||||
- [Commits](https://github.com/psf/black/compare/26.3.0...26.3.1)
|
||||
|
||||
---
|
||||
updated-dependencies:
|
||||
- dependency-name: black
|
||||
dependency-version: 26.3.1
|
||||
dependency-type: direct:production
|
||||
update-type: version-update:semver-patch
|
||||
dependency-group: python-packages
|
||||
...
|
||||
- Chore(deps): bump docker/login-action from 3 to 4. [dependabot[bot]]
|
||||
|
||||
Bumps [docker/login-action](https://github.com/docker/login-action) from 3 to 4.
|
||||
- [Release notes](https://github.com/docker/login-action/releases)
|
||||
- [Commits](https://github.com/docker/login-action/compare/v3...v4)
|
||||
|
||||
---
|
||||
updated-dependencies:
|
||||
- dependency-name: docker/login-action
|
||||
dependency-version: '4'
|
||||
dependency-type: direct:production
|
||||
update-type: version-update:semver-major
|
||||
...
|
||||
- Chore(deps): bump docker/setup-qemu-action from 3 to 4.
|
||||
[dependabot[bot]]
|
||||
|
||||
Bumps [docker/setup-qemu-action](https://github.com/docker/setup-qemu-action) from 3 to 4.
|
||||
- [Release notes](https://github.com/docker/setup-qemu-action/releases)
|
||||
- [Commits](https://github.com/docker/setup-qemu-action/compare/v3...v4)
|
||||
|
||||
---
|
||||
updated-dependencies:
|
||||
- dependency-name: docker/setup-qemu-action
|
||||
dependency-version: '4'
|
||||
dependency-type: direct:production
|
||||
update-type: version-update:semver-major
|
||||
...
|
||||
- Chore(deps): bump docker/build-push-action from 6 to 7.
|
||||
[dependabot[bot]]
|
||||
|
||||
Bumps [docker/build-push-action](https://github.com/docker/build-push-action) from 6 to 7.
|
||||
- [Release notes](https://github.com/docker/build-push-action/releases)
|
||||
- [Commits](https://github.com/docker/build-push-action/compare/v6...v7)
|
||||
|
||||
---
|
||||
updated-dependencies:
|
||||
- dependency-name: docker/build-push-action
|
||||
dependency-version: '7'
|
||||
dependency-type: direct:production
|
||||
update-type: version-update:semver-major
|
||||
...
|
||||
- Chore(deps): bump docker/setup-buildx-action from 3 to 4.
|
||||
[dependabot[bot]]
|
||||
|
||||
Bumps [docker/setup-buildx-action](https://github.com/docker/setup-buildx-action) from 3 to 4.
|
||||
- [Release notes](https://github.com/docker/setup-buildx-action/releases)
|
||||
- [Commits](https://github.com/docker/setup-buildx-action/compare/v3...v4)
|
||||
|
||||
---
|
||||
updated-dependencies:
|
||||
- dependency-name: docker/setup-buildx-action
|
||||
dependency-version: '4'
|
||||
dependency-type: direct:production
|
||||
update-type: version-update:semver-major
|
||||
...
|
||||
- Chore(deps): bump docker/metadata-action from 5 to 6.
|
||||
[dependabot[bot]]
|
||||
|
||||
Bumps [docker/metadata-action](https://github.com/docker/metadata-action) from 5 to 6.
|
||||
- [Release notes](https://github.com/docker/metadata-action/releases)
|
||||
- [Commits](https://github.com/docker/metadata-action/compare/v5...v6)
|
||||
|
||||
---
|
||||
updated-dependencies:
|
||||
- dependency-name: docker/metadata-action
|
||||
dependency-version: '6'
|
||||
dependency-type: direct:production
|
||||
update-type: version-update:semver-major
|
||||
...
|
||||
- Chore(deps): bump the python-packages group with 2 updates.
|
||||
[dependabot[bot]]
|
||||
|
||||
Bumps the python-packages group with 2 updates: [black](https://github.com/psf/black) and [setuptools](https://github.com/pypa/setuptools).
|
||||
|
||||
|
||||
Updates `black` from 26.1.0 to 26.3.0
|
||||
- [Release notes](https://github.com/psf/black/releases)
|
||||
- [Changelog](https://github.com/psf/black/blob/main/CHANGES.md)
|
||||
- [Commits](https://github.com/psf/black/compare/26.1.0...26.3.0)
|
||||
|
||||
Updates `setuptools` from 82.0.0 to 82.0.1
|
||||
- [Release notes](https://github.com/pypa/setuptools/releases)
|
||||
- [Changelog](https://github.com/pypa/setuptools/blob/main/NEWS.rst)
|
||||
- [Commits](https://github.com/pypa/setuptools/compare/v82.0.0...v82.0.1)
|
||||
|
||||
---
|
||||
updated-dependencies:
|
||||
- dependency-name: black
|
||||
dependency-version: 26.3.0
|
||||
dependency-type: direct:production
|
||||
update-type: version-update:semver-minor
|
||||
dependency-group: python-packages
|
||||
- dependency-name: setuptools
|
||||
dependency-version: 82.0.1
|
||||
dependency-type: direct:production
|
||||
update-type: version-update:semver-patch
|
||||
dependency-group: python-packages
|
||||
...
|
||||
|
||||
|
||||
0.61.5 (2026-02-18)
|
||||
-------------------
|
||||
- Fix empty repository crash due to None timestamp comparison (#489)
|
||||
[Rodos]
|
||||
|
||||
Empty repositories have None for pushed_at/updated_at, causing a
|
||||
TypeError when compared to the last_update string. Use .get() with
|
||||
truthiness check to skip None timestamps in incremental tracking.
|
||||
|
||||
|
||||
0.61.4 (2026-02-16)
|
||||
-------------------
|
||||
- Fix HTTP 451 DMCA and 403 TOS handling regression (#487) [Rodos]
|
||||
|
||||
The DMCA handling added in PR #454 had a bug: make_request_with_retry()
|
||||
raises HTTPError before retrieve_data() could check the status code via
|
||||
getcode(), making the case 451 handler dead code. This also affected
|
||||
HTTP 403 TOS violations (e.g. jumoog/MagiskOnWSA).
|
||||
|
||||
Fix by catching HTTPError in retrieve_data() and converting 451 and
|
||||
blocked 403 responses (identified by "block" key in response body) to
|
||||
RepositoryUnavailableError. Non-block 403s (permissions, scopes) still
|
||||
propagate as HTTPError. Also handle RepositoryUnavailableError in
|
||||
retrieve_repositories() for the --repository case.
|
||||
|
||||
Rewrote tests to mock urlopen (not make_request_with_retry) to exercise
|
||||
the real code path that was previously untested.
|
||||
|
||||
Closes #487
|
||||
- Chore(deps): bump setuptools in the python-packages group.
|
||||
[dependabot[bot]]
|
||||
|
||||
Bumps the python-packages group with 1 update: [setuptools](https://github.com/pypa/setuptools).
|
||||
|
||||
|
||||
Updates `setuptools` from 80.10.2 to 82.0.0
|
||||
- [Release notes](https://github.com/pypa/setuptools/releases)
|
||||
- [Changelog](https://github.com/pypa/setuptools/blob/main/NEWS.rst)
|
||||
- [Commits](https://github.com/pypa/setuptools/compare/v80.10.2...v82.0.0)
|
||||
|
||||
---
|
||||
updated-dependencies:
|
||||
- dependency-name: setuptools
|
||||
dependency-version: 82.0.0
|
||||
dependency-type: direct:production
|
||||
update-type: version-update:semver-major
|
||||
dependency-group: python-packages
|
||||
...
|
||||
- Chore(deps): bump setuptools in the python-packages group.
|
||||
[dependabot[bot]]
|
||||
|
||||
Bumps the python-packages group with 1 update: [setuptools](https://github.com/pypa/setuptools).
|
||||
|
||||
|
||||
Updates `setuptools` from 80.10.1 to 80.10.2
|
||||
- [Release notes](https://github.com/pypa/setuptools/releases)
|
||||
- [Changelog](https://github.com/pypa/setuptools/blob/main/NEWS.rst)
|
||||
- [Commits](https://github.com/pypa/setuptools/compare/v80.10.1...v80.10.2)
|
||||
|
||||
---
|
||||
updated-dependencies:
|
||||
- dependency-name: setuptools
|
||||
dependency-version: 80.10.2
|
||||
dependency-type: direct:production
|
||||
update-type: version-update:semver-patch
|
||||
dependency-group: python-packages
|
||||
...
|
||||
|
||||
|
||||
0.61.3 (2026-01-24)
|
||||
-------------------
|
||||
- Fix KeyError: 'Private' when using --all flag (#481) [Rodos]
|
||||
|
||||
The repository dictionary uses lowercase "private" key. Use .get() with
|
||||
the correct case to match the pattern used elsewhere in the codebase.
|
||||
|
||||
The bug only affects --all users since --security-advisories short-circuits
|
||||
before the key access.
|
||||
- Chore(deps): bump setuptools in the python-packages group.
|
||||
[dependabot[bot]]
|
||||
|
||||
Bumps the python-packages group with 1 update: [setuptools](https://github.com/pypa/setuptools).
|
||||
|
||||
|
||||
Updates `setuptools` from 80.9.0 to 80.10.1
|
||||
- [Release notes](https://github.com/pypa/setuptools/releases)
|
||||
- [Changelog](https://github.com/pypa/setuptools/blob/main/NEWS.rst)
|
||||
- [Commits](https://github.com/pypa/setuptools/compare/v80.9.0...v80.10.1)
|
||||
|
||||
---
|
||||
updated-dependencies:
|
||||
- dependency-name: setuptools
|
||||
dependency-version: 80.10.1
|
||||
dependency-type: direct:production
|
||||
update-type: version-update:semver-minor
|
||||
dependency-group: python-packages
|
||||
...
|
||||
|
||||
|
||||
0.61.2 (2026-01-19)
|
||||
-------------------
|
||||
|
||||
Fix
|
||||
~~~
|
||||
- Skip security advisories for private repos unless explicitly
|
||||
requested. [Lukas Bestle]
|
||||
- Handle 404 errors on security advisories. [Lukas Bestle]
|
||||
|
||||
Other
|
||||
~~~~~
|
||||
- Chore(deps): bump black in the python-packages group.
|
||||
[dependabot[bot]]
|
||||
|
||||
Bumps the python-packages group with 1 update: [black](https://github.com/psf/black).
|
||||
|
||||
|
||||
Updates `black` from 25.12.0 to 26.1.0
|
||||
- [Release notes](https://github.com/psf/black/releases)
|
||||
- [Changelog](https://github.com/psf/black/blob/main/CHANGES.md)
|
||||
- [Commits](https://github.com/psf/black/compare/25.12.0...26.1.0)
|
||||
|
||||
---
|
||||
updated-dependencies:
|
||||
- dependency-name: black
|
||||
dependency-version: 26.1.0
|
||||
dependency-type: direct:production
|
||||
update-type: version-update:semver-major
|
||||
dependency-group: python-packages
|
||||
...
|
||||
- Docs: Explain security advisories in README. [Lukas Bestle]
|
||||
- Feat: Only make security advisory dir if successful. [Lukas Bestle]
|
||||
|
||||
Avoids empty directories for private repos
|
||||
|
||||
|
||||
0.61.1 (2026-01-13)
|
||||
-------------------
|
||||
- Refactor test fixtures to use shared create_args helper. [Rodos]
|
||||
|
||||
Uses the real parse_args() function to get CLI defaults, so when
|
||||
|
||||
74
README.rst
74
README.rst
@@ -4,7 +4,7 @@ github-backup
|
||||
|
||||
|PyPI| |Python Versions|
|
||||
|
||||
The package can be used to backup an *entire* `Github <https://github.com/>`_ organization, repository or user account, including starred repos, issues and wikis in the most appropriate format (clones for wikis, json files for issues).
|
||||
The package can be used to backup an *entire* `Github <https://github.com/>`_ organization, repository or user account, including starred repos, issues, discussions and wikis in the most appropriate format (clones for wikis, json files for issues and discussions).
|
||||
|
||||
Requirements
|
||||
============
|
||||
@@ -36,16 +36,18 @@ Show the CLI help output::
|
||||
|
||||
CLI Help output::
|
||||
|
||||
github-backup [-h] [-t TOKEN_CLASSIC] [-f TOKEN_FINE] [-q] [--as-app]
|
||||
[-o OUTPUT_DIRECTORY] [-l LOG_LEVEL] [-i]
|
||||
github-backup [-h] [-t TOKEN_CLASSIC] [-f TOKEN_FINE] [--token-from-gh]
|
||||
[-q] [--as-app] [-o OUTPUT_DIRECTORY] [-l LOG_LEVEL] [-i]
|
||||
[--incremental-by-files]
|
||||
[--starred] [--all-starred] [--starred-skip-size-over MB]
|
||||
[--watched] [--followers] [--following] [--all]
|
||||
[--issues] [--issue-comments] [--issue-events] [--pulls]
|
||||
[--pull-comments] [--pull-commits] [--pull-details]
|
||||
[--pull-comments] [--pull-reviews] [--pull-commits]
|
||||
[--pull-details]
|
||||
[--labels] [--hooks] [--milestones] [--security-advisories]
|
||||
[--repositories] [--bare] [--no-prune] [--lfs] [--wikis]
|
||||
[--gists] [--starred-gists] [--skip-archived] [--skip-existing]
|
||||
[--discussions] [--repositories] [--bare] [--no-prune]
|
||||
[--lfs] [--wikis] [--gists] [--starred-gists]
|
||||
[--skip-archived] [--skip-existing]
|
||||
[-L [LANGUAGES ...]] [-N NAME_REGEX] [-H GITHUB_HOST]
|
||||
[-O] [-R REPOSITORY] [-P] [-F] [--prefer-ssh] [-v]
|
||||
[--keychain-name OSX_KEYCHAIN_ITEM_NAME]
|
||||
@@ -71,6 +73,7 @@ CLI Help output::
|
||||
-f, --token-fine TOKEN_FINE
|
||||
fine-grained personal access token (github_pat_....),
|
||||
or path to token (file://...)
|
||||
--token-from-gh read token from GitHub CLI (gh auth token)
|
||||
-q, --quiet supress log messages less severe than warning, e.g.
|
||||
info
|
||||
--as-app authenticate as github app instead of as a user.
|
||||
@@ -95,6 +98,7 @@ CLI Help output::
|
||||
--issue-events include issue events in backup
|
||||
--pulls include pull requests in backup
|
||||
--pull-comments include pull request review comments in backup
|
||||
--pull-reviews include pull request reviews in backup
|
||||
--pull-commits include pull request commits in backup
|
||||
--pull-details include more pull request details in backup [*]
|
||||
--labels include labels in backup
|
||||
@@ -103,6 +107,7 @@ CLI Help output::
|
||||
--milestones include milestones in backup
|
||||
--security-advisories
|
||||
include security advisories in backup
|
||||
--discussions include discussions in backup
|
||||
--repositories include repository clone in backup
|
||||
--bare clone bare repositories
|
||||
--no-prune disable prune option for git fetch
|
||||
@@ -143,8 +148,8 @@ CLI Help output::
|
||||
applies if including releases
|
||||
--skip-assets-on [SKIP_ASSETS_ON ...]
|
||||
skip asset downloads for these repositories
|
||||
--attachments download user-attachments from issues and pull
|
||||
requests
|
||||
--attachments download user-attachments from issues, pull requests,
|
||||
and discussions
|
||||
--throttle-limit THROTTLE_LIMIT
|
||||
start throttling of GitHub API requests after this
|
||||
amount of API requests remain
|
||||
@@ -171,6 +176,8 @@ The positional argument ``USER`` specifies the user or organization account you
|
||||
|
||||
**Classic tokens** (``-t TOKEN``) are `slightly less secure <https://docs.github.com/en/authentication/keeping-your-account-and-data-secure/managing-your-personal-access-tokens#personal-access-tokens-classic>`_ as they provide very coarse-grained permissions.
|
||||
|
||||
If you already authenticate with the `GitHub CLI <https://cli.github.com/>`_, you can use ``--token-from-gh`` to read the token with ``gh auth token`` instead of passing a token directly. This avoids placing the token in shell history or process arguments. When ``--github-host`` is set, the token is read with ``gh auth token --hostname HOST``.
|
||||
|
||||
|
||||
Fine Tokens
|
||||
~~~~~~~~~~~
|
||||
@@ -181,7 +188,7 @@ Customise the permissions for your use case, but for a personal account full bac
|
||||
|
||||
**User permissions**: Read access to followers, starring, and watching.
|
||||
|
||||
**Repository permissions**: Read access to contents, issues, metadata, pull requests, and webhooks.
|
||||
**Repository permissions**: Read access to contents, discussions, issues, metadata, pull requests, and webhooks.
|
||||
|
||||
|
||||
GitHub Apps
|
||||
@@ -262,9 +269,9 @@ LFS objects are fetched for all refs, not just the current checkout, ensuring a
|
||||
About Attachments
|
||||
-----------------
|
||||
|
||||
When you use the ``--attachments`` option with ``--issues`` or ``--pulls``, the tool will download user-uploaded attachments (images, videos, documents, etc.) from issue and pull request descriptions and comments. In some circumstances attachments contain valuable data related to the topic, and without their backup important information or context might be lost inadvertently.
|
||||
When you use the ``--attachments`` option with ``--issues``, ``--pulls`` or ``--discussions``, the tool will download user-uploaded attachments (images, videos, documents, etc.) from issue, pull request and discussion descriptions and comments. In some circumstances attachments contain valuable data related to the topic, and without their backup important information or context might be lost inadvertently.
|
||||
|
||||
Attachments are saved to ``issues/attachments/{issue_number}/`` and ``pulls/attachments/{pull_number}/`` directories, where ``{issue_number}`` is the GitHub issue number (e.g., issue #123 saves to ``issues/attachments/123/``). Each attachment directory contains:
|
||||
Attachments are saved to ``issues/attachments/{issue_number}/``, ``pulls/attachments/{pull_number}/`` and ``discussions/attachments/{discussion_number}/`` directories, where ``{issue_number}`` is the GitHub issue number (e.g., issue #123 saves to ``issues/attachments/123/``). Each attachment directory contains:
|
||||
|
||||
- The downloaded attachment files (named by their GitHub identifier with appropriate file extensions)
|
||||
- If multiple attachments have the same filename, conflicts are resolved with numeric suffixes (e.g., ``report.pdf``, ``report_1.pdf``, ``report_2.pdf``)
|
||||
@@ -284,6 +291,27 @@ The tool automatically extracts file extensions from HTTP headers to ensure file
|
||||
**Fine-grained token limitation:** Due to a GitHub platform limitation, fine-grained personal access tokens (``github_pat_...``) cannot download attachments from private repositories directly. This affects both ``/assets/`` (images) and ``/files/`` (documents) URLs. The tool implements a workaround for image attachments using GitHub's Markdown API, which converts URLs to temporary JWT-signed URLs that can be downloaded. However, this workaround only works for images - document attachments (PDFs, text files, etc.) will fail with 404 errors when using fine-grained tokens on private repos. For full attachment support on private repositories, use a classic token (``-t``) instead of a fine-grained token (``-f``). See `#477 <https://github.com/josegonzalez/python-github-backup/issues/477>`_ for details.
|
||||
|
||||
|
||||
About Discussions
|
||||
-----------------
|
||||
|
||||
GitHub Discussions are backed up with GitHub's GraphQL API because the REST API does not expose discussions. Use ``--discussions`` to save each discussion as JSON under ``repositories/{repo}/discussions/{number}.json``. Discussion backups include the discussion body and metadata, category information, comments, and comment replies.
|
||||
|
||||
``--discussions`` is included in ``--all``. Unlike most REST API-backed resources, discussions require authentication because GitHub's GraphQL API requires a token. Fine-grained personal access tokens and GitHub Apps need read access to the repository's Discussions permission.
|
||||
|
||||
Incremental backups use a per-repository checkpoint at ``repositories/{repo}/discussions/last_update`` based on discussion ``updatedAt`` timestamps. This is separate from the repository-level ``last_update`` file so discussion activity is not missed if the repository's own update timestamp does not change. If you enable ``--discussions`` on an existing incremental backup, the first run performs a full discussions backup for each repository and creates the discussions checkpoint for future runs.
|
||||
|
||||
|
||||
About security advisories
|
||||
-------------------------
|
||||
|
||||
GitHub security advisories are only available in public repositories. GitHub does not provide the respective API endpoint for private repositories.
|
||||
|
||||
Therefore the logic is implemented as follows:
|
||||
- Security advisories are included in the `--all` option.
|
||||
- If only the `--all` option was provided, backups of security advisories are skipped for private repositories.
|
||||
- If the `--security-advisories` option is provided (on its own or in addition to `--all`), a backup of security advisories is attempted for all repositories, with graceful handling if the GitHub API doesn't return any.
|
||||
|
||||
|
||||
Run in Docker container
|
||||
-----------------------
|
||||
|
||||
@@ -314,12 +342,24 @@ For finer control, avoid using ``--assets`` with starred repos, or use ``--skip-
|
||||
|
||||
Alternatively, consider just storing links to starred repos in JSON format with ``--starred``.
|
||||
|
||||
About pull request reviews
|
||||
--------------------------
|
||||
|
||||
Use ``--pull-reviews`` with ``--pulls`` to include GitHub pull request review metadata under each pull request's ``review_data`` key. Reviews are separate from review comments: ``--pull-comments`` backs up inline review comments via ``comment_data`` and regular PR conversation comments via ``comment_regular_data``, while ``--pull-reviews`` backs up review state, submitted time, commit ID, and the top-level review body.
|
||||
|
||||
``--pull-reviews`` is included in ``--all``. Incremental backups use a per-repository checkpoint at ``repositories/{repo}/pulls/reviews_last_update``. If ``--pull-reviews`` is enabled on an existing incremental backup, the first run performs a one-time backfill for pull request reviews so older PRs are not skipped by the existing pull request checkpoint. Existing ``comment_data``, ``comment_regular_data`` and ``commit_data`` fields are preserved when only review data is being added.
|
||||
|
||||
|
||||
Incremental Backup
|
||||
------------------
|
||||
|
||||
Using (``-i, --incremental``) will only request new data from the API **since the last run (successful or not)**. e.g. only request issues from the API since the last run.
|
||||
Using (``-i, --incremental``) will only request new data from the API **since the last successful resource backup**. e.g. only request issues from the API since the last issue backup for that repository.
|
||||
|
||||
This means any blocking errors on previous runs can cause a large amount of missing data in backups.
|
||||
Incremental checkpoints for issue and pull request API backups are stored per resource in that repository's backup directory (for example ``repositories/{repo}/issues/last_update``, ``repositories/{repo}/pulls/last_update`` or ``starred/{owner}/{repo}/pulls/last_update``). Older versions stored a single global ``last_update`` file in the output directory root. During migration, the legacy global checkpoint is used as a fallback only for resource directories that already contain backup data but do not yet have their own checkpoint. New repositories or newly enabled resources with no existing data get a full backup instead of inheriting an unrelated global checkpoint.
|
||||
|
||||
After all existing issue and pull request resource directories have per-resource checkpoints, the legacy global ``last_update`` file is removed automatically.
|
||||
|
||||
This means any blocking errors on previous runs can cause missing data in backups for the affected repository resource.
|
||||
|
||||
Using (``--incremental-by-files``) will request new data from the API **based on when the file was modified on filesystem**. e.g. if you modify the file yourself you may miss something.
|
||||
|
||||
@@ -332,7 +372,7 @@ Known blocking errors
|
||||
|
||||
Some errors will block the backup run by exiting the script. e.g. receiving a 403 Forbidden error from the Github API.
|
||||
|
||||
If the incremental argument is used, this will result in the next backup only requesting API data since the last blocked/failed run. Potentially causing unexpected large amounts of missing data.
|
||||
If the incremental argument is used, per-resource checkpoints are only advanced after that resource's backup work completes. A blocking error can still abort the overall run, but repositories and resources that were not processed will keep their previous checkpoints.
|
||||
|
||||
It's therefore recommended to only use the incremental argument if the output/result is being actively monitored, or complimented with periodic full non-incremental runs, to avoid unexpected missing data in a regular backup runs.
|
||||
|
||||
@@ -405,14 +445,14 @@ Quietly and incrementally backup useful Github user data (public and private rep
|
||||
export FINE_ACCESS_TOKEN=SOME-GITHUB-TOKEN
|
||||
GH_USER=YOUR-GITHUB-USER
|
||||
|
||||
github-backup -f $FINE_ACCESS_TOKEN --prefer-ssh -o ~/github-backup/ -l error -P -i --all-starred --starred --watched --followers --following --issues --issue-comments --issue-events --pulls --pull-comments --pull-commits --labels --milestones --security-advisories --repositories --wikis --releases --assets --attachments --pull-details --gists --starred-gists $GH_USER
|
||||
github-backup -f $FINE_ACCESS_TOKEN --prefer-ssh -o ~/github-backup/ -l error -P -i --all-starred --starred --watched --followers --following --issues --issue-comments --issue-events --pulls --pull-comments --pull-reviews --pull-commits --labels --milestones --security-advisories --discussions --repositories --wikis --releases --assets --attachments --pull-details --gists --starred-gists $GH_USER
|
||||
|
||||
Debug an error/block or incomplete backup into a temporary directory. Omit "incremental" to fill a previous incomplete backup. ::
|
||||
|
||||
export FINE_ACCESS_TOKEN=SOME-GITHUB-TOKEN
|
||||
GH_USER=YOUR-GITHUB-USER
|
||||
|
||||
github-backup -f $FINE_ACCESS_TOKEN -o /tmp/github-backup/ -l debug -P --all-starred --starred --watched --followers --following --issues --issue-comments --issue-events --pulls --pull-comments --pull-commits --labels --milestones --repositories --wikis --releases --assets --pull-details --gists --starred-gists $GH_USER
|
||||
github-backup -f $FINE_ACCESS_TOKEN -o /tmp/github-backup/ -l debug -P --all-starred --starred --watched --followers --following --issues --issue-comments --issue-events --pulls --pull-comments --pull-reviews --pull-commits --labels --milestones --discussions --repositories --wikis --releases --assets --pull-details --gists --starred-gists $GH_USER
|
||||
|
||||
Pipe a token from stdin to avoid storing it in environment variables or command history (Unix-like systems only)::
|
||||
|
||||
@@ -428,7 +468,7 @@ This tool creates backups only, there is no inbuilt restore command.
|
||||
cd /tmp/white-house/repositories/petitions/repository
|
||||
git push --mirror git@github.com:WhiteHouse/petitions.git
|
||||
|
||||
**Issues, pull requests, comments, and other metadata** are saved as JSON files for archival purposes. The GitHub API does not support recreating this data faithfully, creating issues via the API has limitations:
|
||||
**Issues, pull requests, discussions, comments, and other metadata** are saved as JSON files for archival purposes. The GitHub API does not support recreating this data faithfully, creating issues via the API has limitations:
|
||||
|
||||
- New issue/PR numbers are assigned (original numbers cannot be set)
|
||||
- Timestamps reflect creation time (original dates cannot be set)
|
||||
|
||||
@@ -1 +1 @@
|
||||
__version__ = "0.61.1"
|
||||
__version__ = "0.62.0"
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
292
github_backup/graphql_queries.py
Normal file
292
github_backup/graphql_queries.py
Normal file
@@ -0,0 +1,292 @@
|
||||
"""GraphQL query templates used by github-backup."""
|
||||
|
||||
DISCUSSION_PAGE_SIZE = 100
|
||||
|
||||
DISCUSSION_LIST_QUERY = """
|
||||
query($owner: String!, $name: String!, $after: String, $pageSize: Int!) {
|
||||
repository(owner: $owner, name: $name) {
|
||||
hasDiscussionsEnabled
|
||||
discussions(
|
||||
first: $pageSize,
|
||||
after: $after,
|
||||
orderBy: {field: UPDATED_AT, direction: DESC}
|
||||
) {
|
||||
totalCount
|
||||
nodes {
|
||||
id
|
||||
number
|
||||
title
|
||||
updatedAt
|
||||
}
|
||||
pageInfo {
|
||||
hasNextPage
|
||||
endCursor
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
"""
|
||||
|
||||
DISCUSSION_DETAIL_QUERY = """
|
||||
query(
|
||||
$owner: String!,
|
||||
$name: String!,
|
||||
$number: Int!,
|
||||
$commentsCursor: String,
|
||||
$pageSize: Int!
|
||||
) {
|
||||
repository(owner: $owner, name: $name) {
|
||||
discussion(number: $number) {
|
||||
activeLockReason
|
||||
answer {
|
||||
id
|
||||
databaseId
|
||||
url
|
||||
}
|
||||
answerChosenAt
|
||||
answerChosenBy {
|
||||
...ActorFields
|
||||
}
|
||||
author {
|
||||
...ActorFields
|
||||
}
|
||||
authorAssociation
|
||||
body
|
||||
bodyHTML
|
||||
bodyText
|
||||
category {
|
||||
createdAt
|
||||
description
|
||||
emoji
|
||||
emojiHTML
|
||||
id
|
||||
isAnswerable
|
||||
name
|
||||
slug
|
||||
updatedAt
|
||||
}
|
||||
closed
|
||||
closedAt
|
||||
createdAt
|
||||
createdViaEmail
|
||||
databaseId
|
||||
editor {
|
||||
...ActorFields
|
||||
}
|
||||
id
|
||||
includesCreatedEdit
|
||||
isAnswered
|
||||
labels(first: 100) {
|
||||
totalCount
|
||||
nodes {
|
||||
id
|
||||
name
|
||||
color
|
||||
description
|
||||
}
|
||||
}
|
||||
lastEditedAt
|
||||
locked
|
||||
number
|
||||
poll {
|
||||
id
|
||||
question
|
||||
totalVoteCount
|
||||
options(first: 100) {
|
||||
totalCount
|
||||
nodes {
|
||||
id
|
||||
option
|
||||
totalVoteCount
|
||||
}
|
||||
}
|
||||
}
|
||||
publishedAt
|
||||
reactionGroups {
|
||||
...ReactionGroupFields
|
||||
}
|
||||
resourcePath
|
||||
stateReason
|
||||
title
|
||||
updatedAt
|
||||
upvoteCount
|
||||
url
|
||||
comments(first: $pageSize, after: $commentsCursor) {
|
||||
totalCount
|
||||
nodes {
|
||||
...DiscussionCommentFields
|
||||
replies(first: $pageSize) {
|
||||
totalCount
|
||||
nodes {
|
||||
...DiscussionReplyFields
|
||||
}
|
||||
pageInfo {
|
||||
hasNextPage
|
||||
endCursor
|
||||
}
|
||||
}
|
||||
}
|
||||
pageInfo {
|
||||
hasNextPage
|
||||
endCursor
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fragment ActorFields on Actor {
|
||||
avatarUrl
|
||||
login
|
||||
resourcePath
|
||||
url
|
||||
}
|
||||
|
||||
fragment ReactionGroupFields on ReactionGroup {
|
||||
content
|
||||
reactors {
|
||||
totalCount
|
||||
}
|
||||
}
|
||||
|
||||
fragment DiscussionCommentFields on DiscussionComment {
|
||||
author {
|
||||
...ActorFields
|
||||
}
|
||||
authorAssociation
|
||||
body
|
||||
bodyHTML
|
||||
bodyText
|
||||
createdAt
|
||||
createdViaEmail
|
||||
databaseId
|
||||
deletedAt
|
||||
editor {
|
||||
...ActorFields
|
||||
}
|
||||
id
|
||||
includesCreatedEdit
|
||||
isAnswer
|
||||
isMinimized
|
||||
lastEditedAt
|
||||
minimizedReason
|
||||
publishedAt
|
||||
reactionGroups {
|
||||
...ReactionGroupFields
|
||||
}
|
||||
replyTo {
|
||||
id
|
||||
databaseId
|
||||
url
|
||||
}
|
||||
resourcePath
|
||||
updatedAt
|
||||
upvoteCount
|
||||
url
|
||||
}
|
||||
|
||||
fragment DiscussionReplyFields on DiscussionComment {
|
||||
author {
|
||||
...ActorFields
|
||||
}
|
||||
authorAssociation
|
||||
body
|
||||
bodyHTML
|
||||
bodyText
|
||||
createdAt
|
||||
createdViaEmail
|
||||
databaseId
|
||||
deletedAt
|
||||
editor {
|
||||
...ActorFields
|
||||
}
|
||||
id
|
||||
includesCreatedEdit
|
||||
isAnswer
|
||||
isMinimized
|
||||
lastEditedAt
|
||||
minimizedReason
|
||||
publishedAt
|
||||
reactionGroups {
|
||||
...ReactionGroupFields
|
||||
}
|
||||
replyTo {
|
||||
id
|
||||
databaseId
|
||||
url
|
||||
}
|
||||
resourcePath
|
||||
updatedAt
|
||||
upvoteCount
|
||||
url
|
||||
}
|
||||
"""
|
||||
|
||||
DISCUSSION_REPLIES_QUERY = """
|
||||
query($commentId: ID!, $repliesCursor: String, $pageSize: Int!) {
|
||||
node(id: $commentId) {
|
||||
... on DiscussionComment {
|
||||
replies(first: $pageSize, after: $repliesCursor) {
|
||||
totalCount
|
||||
nodes {
|
||||
...DiscussionReplyFields
|
||||
}
|
||||
pageInfo {
|
||||
hasNextPage
|
||||
endCursor
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fragment ActorFields on Actor {
|
||||
avatarUrl
|
||||
login
|
||||
resourcePath
|
||||
url
|
||||
}
|
||||
|
||||
fragment ReactionGroupFields on ReactionGroup {
|
||||
content
|
||||
reactors {
|
||||
totalCount
|
||||
}
|
||||
}
|
||||
|
||||
fragment DiscussionReplyFields on DiscussionComment {
|
||||
author {
|
||||
...ActorFields
|
||||
}
|
||||
authorAssociation
|
||||
body
|
||||
bodyHTML
|
||||
bodyText
|
||||
createdAt
|
||||
createdViaEmail
|
||||
databaseId
|
||||
deletedAt
|
||||
editor {
|
||||
...ActorFields
|
||||
}
|
||||
id
|
||||
includesCreatedEdit
|
||||
isAnswer
|
||||
isMinimized
|
||||
lastEditedAt
|
||||
minimizedReason
|
||||
publishedAt
|
||||
reactionGroups {
|
||||
...ReactionGroupFields
|
||||
}
|
||||
replyTo {
|
||||
id
|
||||
databaseId
|
||||
url
|
||||
}
|
||||
resourcePath
|
||||
updatedAt
|
||||
upvoteCount
|
||||
url
|
||||
}
|
||||
"""
|
||||
@@ -1,15 +1,15 @@
|
||||
# Linting & Formatting
|
||||
autopep8==2.3.2
|
||||
black==25.12.0
|
||||
black==26.3.1
|
||||
flake8==7.3.0
|
||||
|
||||
# Testing
|
||||
pytest==9.0.2
|
||||
pytest==9.0.3
|
||||
|
||||
# Release & Publishing
|
||||
twine==6.2.0
|
||||
gitchangelog==3.0.4
|
||||
setuptools==80.9.0
|
||||
setuptools==82.0.1
|
||||
|
||||
# Documentation
|
||||
restructuredtext-lint==2.0.2
|
||||
|
||||
75
tests/test_auth.py
Normal file
75
tests/test_auth.py
Normal file
@@ -0,0 +1,75 @@
|
||||
"""Tests for authentication helpers."""
|
||||
|
||||
from unittest.mock import patch
|
||||
|
||||
import pytest
|
||||
|
||||
from github_backup import github_backup
|
||||
|
||||
|
||||
def test_token_from_gh_flag_parses():
|
||||
args = github_backup.parse_args(["--token-from-gh", "testuser"])
|
||||
assert args.token_from_gh is True
|
||||
|
||||
|
||||
def test_get_auth_reads_token_from_gh_cli(create_args):
|
||||
args = create_args(token_from_gh=True)
|
||||
|
||||
with patch(
|
||||
"github_backup.github_backup.subprocess.check_output",
|
||||
return_value=b"gho_test_token\n",
|
||||
) as mock_check_output:
|
||||
auth = github_backup.get_auth(args, encode=False)
|
||||
|
||||
assert auth == "gho_test_token:x-oauth-basic"
|
||||
mock_check_output.assert_called_once_with(
|
||||
["gh", "auth", "token"], stderr=github_backup.subprocess.PIPE
|
||||
)
|
||||
|
||||
|
||||
def test_get_auth_reads_token_from_gh_cli_for_enterprise_host(create_args):
|
||||
args = create_args(token_from_gh=True, github_host="ghe.example.com")
|
||||
|
||||
with patch(
|
||||
"github_backup.github_backup.subprocess.check_output",
|
||||
return_value=b"gho_enterprise_token\n",
|
||||
) as mock_check_output:
|
||||
auth = github_backup.get_auth(args, encode=False)
|
||||
|
||||
assert auth == "gho_enterprise_token:x-oauth-basic"
|
||||
mock_check_output.assert_called_once_with(
|
||||
["gh", "auth", "token", "--hostname", "ghe.example.com"],
|
||||
stderr=github_backup.subprocess.PIPE,
|
||||
)
|
||||
|
||||
|
||||
def test_token_from_gh_is_cached(create_args):
|
||||
args = create_args(token_from_gh=True)
|
||||
|
||||
with patch(
|
||||
"github_backup.github_backup.subprocess.check_output",
|
||||
return_value=b"gho_cached_token\n",
|
||||
) as mock_check_output:
|
||||
assert github_backup.get_auth(args, encode=False) == "gho_cached_token:x-oauth-basic"
|
||||
assert github_backup.get_auth(args, encode=False) == "gho_cached_token:x-oauth-basic"
|
||||
|
||||
mock_check_output.assert_called_once()
|
||||
|
||||
|
||||
def test_graphql_auth_strips_basic_auth_suffix_for_gh_cli_token(create_args):
|
||||
args = create_args(token_from_gh=True)
|
||||
|
||||
with patch(
|
||||
"github_backup.github_backup.subprocess.check_output",
|
||||
return_value=b"gho_graphql_token\n",
|
||||
):
|
||||
assert github_backup.get_graphql_auth(args) == "gho_graphql_token"
|
||||
|
||||
|
||||
def test_token_from_gh_rejects_as_app(create_args):
|
||||
args = create_args(token_from_gh=True, as_app=True)
|
||||
|
||||
with pytest.raises(Exception) as exc_info:
|
||||
github_backup.get_auth(args, encode=False)
|
||||
|
||||
assert "--token-from-gh cannot be used with --as-app" in str(exc_info.value)
|
||||
257
tests/test_discussions.py
Normal file
257
tests/test_discussions.py
Normal file
@@ -0,0 +1,257 @@
|
||||
"""Tests for GitHub Discussions backup support."""
|
||||
|
||||
import json
|
||||
import os
|
||||
from unittest.mock import patch
|
||||
|
||||
from github_backup import github_backup
|
||||
|
||||
|
||||
def test_parse_args_discussions_flag():
|
||||
args = github_backup.parse_args(["--discussions", "testuser"])
|
||||
assert args.include_discussions is True
|
||||
|
||||
|
||||
def test_retrieve_discussion_summaries_stops_at_incremental_since(create_args):
|
||||
args = create_args()
|
||||
repository = {"full_name": "owner/repo"}
|
||||
|
||||
page = {
|
||||
"repository": {
|
||||
"hasDiscussionsEnabled": True,
|
||||
"discussions": {
|
||||
"totalCount": 3,
|
||||
"nodes": [
|
||||
{"number": 3, "title": "new", "updatedAt": "2026-02-01T00:00:00Z"},
|
||||
{"number": 2, "title": "also new", "updatedAt": "2026-01-10T00:00:00Z"},
|
||||
{"number": 1, "title": "old", "updatedAt": "2025-12-01T00:00:00Z"},
|
||||
],
|
||||
"pageInfo": {"hasNextPage": True, "endCursor": "NEXT"},
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
with patch(
|
||||
"github_backup.github_backup.retrieve_graphql_data", return_value=page
|
||||
) as mock_retrieve:
|
||||
summaries, newest, enabled, total = github_backup.retrieve_discussion_summaries(
|
||||
args, repository, since="2026-01-01T00:00:00Z"
|
||||
)
|
||||
|
||||
assert enabled is True
|
||||
assert total == 3
|
||||
assert newest == "2026-02-01T00:00:00Z"
|
||||
assert [item["number"] for item in summaries] == [3, 2]
|
||||
# The old discussion stops pagination, so the next page is not requested.
|
||||
assert mock_retrieve.call_count == 1
|
||||
assert (
|
||||
mock_retrieve.call_args.kwargs["log_context"]
|
||||
== "discussion summaries owner/repo page 1"
|
||||
)
|
||||
|
||||
|
||||
def test_retrieve_discussion_summaries_excludes_checkpoint_timestamp(create_args):
|
||||
args = create_args()
|
||||
repository = {"full_name": "owner/repo"}
|
||||
|
||||
page = {
|
||||
"repository": {
|
||||
"hasDiscussionsEnabled": True,
|
||||
"discussions": {
|
||||
"totalCount": 1,
|
||||
"nodes": [
|
||||
{
|
||||
"number": 1,
|
||||
"title": "already backed up",
|
||||
"updatedAt": "2026-01-01T00:00:00Z",
|
||||
},
|
||||
],
|
||||
"pageInfo": {"hasNextPage": True, "endCursor": "NEXT"},
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
with patch(
|
||||
"github_backup.github_backup.retrieve_graphql_data", return_value=page
|
||||
) as mock_retrieve:
|
||||
summaries, newest, enabled, total = github_backup.retrieve_discussion_summaries(
|
||||
args, repository, since="2026-01-01T00:00:00Z"
|
||||
)
|
||||
|
||||
assert enabled is True
|
||||
assert total == 1
|
||||
assert newest == "2026-01-01T00:00:00Z"
|
||||
assert summaries == []
|
||||
assert mock_retrieve.call_count == 1
|
||||
|
||||
|
||||
def test_retrieve_discussion_summaries_disabled_discussions(create_args):
|
||||
args = create_args()
|
||||
repository = {"full_name": "owner/repo"}
|
||||
|
||||
with patch(
|
||||
"github_backup.github_backup.retrieve_graphql_data",
|
||||
return_value={"repository": {"hasDiscussionsEnabled": False}},
|
||||
):
|
||||
summaries, newest, enabled, total = github_backup.retrieve_discussion_summaries(
|
||||
args, repository
|
||||
)
|
||||
|
||||
assert summaries == []
|
||||
assert newest is None
|
||||
assert enabled is False
|
||||
assert total == 0
|
||||
|
||||
|
||||
def _comment(comment_id, body, replies=None, replies_has_next=False):
|
||||
replies = replies or []
|
||||
return {
|
||||
"id": comment_id,
|
||||
"body": body,
|
||||
"replies": {
|
||||
"totalCount": len(replies) + (1 if replies_has_next else 0),
|
||||
"nodes": replies,
|
||||
"pageInfo": {
|
||||
"hasNextPage": replies_has_next,
|
||||
"endCursor": "REPLIES2" if replies_has_next else None,
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
|
||||
def _discussion_page(comment_nodes, has_next=False):
|
||||
return {
|
||||
"repository": {
|
||||
"discussion": {
|
||||
"number": 42,
|
||||
"title": "Discussion title",
|
||||
"updatedAt": "2026-02-01T00:00:00Z",
|
||||
"comments": {
|
||||
"totalCount": 2,
|
||||
"nodes": comment_nodes,
|
||||
"pageInfo": {
|
||||
"hasNextPage": has_next,
|
||||
"endCursor": "COMMENTS2" if has_next else None,
|
||||
},
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
def test_retrieve_discussion_paginates_comments_and_replies(create_args):
|
||||
args = create_args()
|
||||
repository = {"full_name": "owner/repo"}
|
||||
|
||||
reply_1 = {"id": "reply-1", "body": "first reply"}
|
||||
reply_2 = {"id": "reply-2", "body": "second reply"}
|
||||
comment_1 = _comment("comment-1", "first comment", [reply_1], replies_has_next=True)
|
||||
comment_2 = _comment("comment-2", "second comment")
|
||||
|
||||
responses = [
|
||||
_discussion_page([comment_1], has_next=True),
|
||||
{
|
||||
"node": {
|
||||
"replies": {
|
||||
"totalCount": 2,
|
||||
"nodes": [reply_2],
|
||||
"pageInfo": {"hasNextPage": False, "endCursor": None},
|
||||
}
|
||||
}
|
||||
},
|
||||
_discussion_page([comment_2], has_next=False),
|
||||
]
|
||||
|
||||
with patch(
|
||||
"github_backup.github_backup.retrieve_graphql_data", side_effect=responses
|
||||
) as mock_retrieve:
|
||||
discussion = github_backup.retrieve_discussion(args, repository, 42)
|
||||
|
||||
assert discussion["number"] == 42
|
||||
assert discussion["comment_count"] == 2
|
||||
assert len(discussion["comment_data"]) == 2
|
||||
assert discussion["comment_data"][0]["body"] == "first comment"
|
||||
assert discussion["comment_data"][0]["reply_count"] == 2
|
||||
assert [r["body"] for r in discussion["comment_data"][0]["reply_data"]] == [
|
||||
"first reply",
|
||||
"second reply",
|
||||
]
|
||||
assert discussion["comment_data"][1]["body"] == "second comment"
|
||||
assert mock_retrieve.call_count == 3
|
||||
assert [
|
||||
call.kwargs["log_context"] for call in mock_retrieve.call_args_list
|
||||
] == [
|
||||
"discussion owner/repo#42 details/comments page 1",
|
||||
"discussion owner/repo#42 comment comment-1 replies page 2",
|
||||
"discussion owner/repo#42 details/comments page 2",
|
||||
]
|
||||
|
||||
|
||||
def test_backup_discussions_uses_incremental_checkpoint(create_args, tmp_path):
|
||||
args = create_args(token_classic="fake_token", include_discussions=True, incremental=True)
|
||||
repository = {"full_name": "owner/repo"}
|
||||
discussions_dir = tmp_path / "discussions"
|
||||
discussions_dir.mkdir()
|
||||
(discussions_dir / "last_update").write_text("2026-01-01T00:00:00Z")
|
||||
|
||||
def fake_summaries(passed_args, passed_repository, since=None):
|
||||
assert passed_args is args
|
||||
assert passed_repository == repository
|
||||
assert since == "2026-01-01T00:00:00Z"
|
||||
return (
|
||||
[{"number": 7, "title": "updated", "updatedAt": "2026-02-01T00:00:00Z"}],
|
||||
"2026-02-01T00:00:00Z",
|
||||
True,
|
||||
1,
|
||||
)
|
||||
|
||||
with patch(
|
||||
"github_backup.github_backup.retrieve_discussion_summaries",
|
||||
side_effect=fake_summaries,
|
||||
), patch(
|
||||
"github_backup.github_backup.retrieve_discussion",
|
||||
return_value={"number": 7, "title": "updated"},
|
||||
):
|
||||
github_backup.backup_discussions(args, tmp_path, repository)
|
||||
|
||||
with open(discussions_dir / "7.json", encoding="utf-8") as f:
|
||||
assert json.load(f) == {"number": 7, "title": "updated"}
|
||||
assert (discussions_dir / "last_update").read_text() == "2026-02-01T00:00:00Z"
|
||||
|
||||
|
||||
def test_backup_discussions_does_not_advance_checkpoint_on_discussion_error(
|
||||
create_args, tmp_path
|
||||
):
|
||||
args = create_args(token_classic="fake_token", include_discussions=True, incremental=True)
|
||||
repository = {"full_name": "owner/repo"}
|
||||
discussions_dir = tmp_path / "discussions"
|
||||
discussions_dir.mkdir()
|
||||
(discussions_dir / "last_update").write_text("2026-01-01T00:00:00Z")
|
||||
|
||||
with patch(
|
||||
"github_backup.github_backup.retrieve_discussion_summaries",
|
||||
return_value=(
|
||||
[{"number": 7, "title": "updated", "updatedAt": "2026-02-01T00:00:00Z"}],
|
||||
"2026-02-01T00:00:00Z",
|
||||
True,
|
||||
1,
|
||||
),
|
||||
), patch(
|
||||
"github_backup.github_backup.retrieve_discussion",
|
||||
side_effect=Exception("temporary GraphQL error"),
|
||||
):
|
||||
github_backup.backup_discussions(args, tmp_path, repository)
|
||||
|
||||
assert (discussions_dir / "last_update").read_text() == "2026-01-01T00:00:00Z"
|
||||
assert not os.path.exists(discussions_dir / "7.json")
|
||||
|
||||
|
||||
def test_backup_discussions_skips_without_auth(create_args, tmp_path):
|
||||
args = create_args(include_discussions=True)
|
||||
repository = {"full_name": "owner/repo"}
|
||||
|
||||
with patch("github_backup.github_backup.retrieve_discussion_summaries") as mock_retrieve:
|
||||
github_backup.backup_discussions(args, tmp_path, repository)
|
||||
|
||||
assert not mock_retrieve.called
|
||||
assert not os.path.exists(tmp_path / "discussions")
|
||||
@@ -1,13 +1,28 @@
|
||||
"""Tests for HTTP 451 (DMCA takedown) handling."""
|
||||
"""Tests for HTTP 451 (DMCA takedown) and HTTP 403 (TOS) handling."""
|
||||
|
||||
import io
|
||||
import json
|
||||
from unittest.mock import Mock, patch
|
||||
from unittest.mock import patch
|
||||
from urllib.error import HTTPError
|
||||
|
||||
import pytest
|
||||
|
||||
from github_backup import github_backup
|
||||
|
||||
|
||||
def _make_http_error(code, body_bytes, msg="Error", headers=None):
|
||||
"""Create an HTTPError with a readable body (like a real urllib response)."""
|
||||
if headers is None:
|
||||
headers = {"x-ratelimit-remaining": "5000"}
|
||||
return HTTPError(
|
||||
url="https://api.github.com/repos/test/repo",
|
||||
code=code,
|
||||
msg=msg,
|
||||
hdrs=headers,
|
||||
fp=io.BytesIO(body_bytes),
|
||||
)
|
||||
|
||||
|
||||
class TestHTTP451Exception:
|
||||
"""Test suite for HTTP 451 DMCA takedown exception handling."""
|
||||
|
||||
@@ -15,9 +30,6 @@ class TestHTTP451Exception:
|
||||
"""HTTP 451 should raise RepositoryUnavailableError with DMCA URL."""
|
||||
args = create_args()
|
||||
|
||||
mock_response = Mock()
|
||||
mock_response.getcode.return_value = 451
|
||||
|
||||
dmca_data = {
|
||||
"message": "Repository access blocked",
|
||||
"block": {
|
||||
@@ -26,66 +38,166 @@ class TestHTTP451Exception:
|
||||
"html_url": "https://github.com/github/dmca/blob/master/2024/11/2024-11-04-source-code.md",
|
||||
},
|
||||
}
|
||||
mock_response.read.return_value = json.dumps(dmca_data).encode("utf-8")
|
||||
mock_response.headers = {"x-ratelimit-remaining": "5000"}
|
||||
mock_response.reason = "Unavailable For Legal Reasons"
|
||||
body = json.dumps(dmca_data).encode("utf-8")
|
||||
|
||||
with patch(
|
||||
"github_backup.github_backup.make_request_with_retry",
|
||||
return_value=mock_response,
|
||||
):
|
||||
def mock_urlopen(*a, **kw):
|
||||
raise _make_http_error(451, body, msg="Unavailable For Legal Reasons")
|
||||
|
||||
with patch("github_backup.github_backup.urlopen", side_effect=mock_urlopen):
|
||||
with pytest.raises(github_backup.RepositoryUnavailableError) as exc_info:
|
||||
github_backup.retrieve_data(
|
||||
args, "https://api.github.com/repos/test/dmca/issues"
|
||||
)
|
||||
|
||||
assert (
|
||||
exc_info.value.dmca_url
|
||||
exc_info.value.legal_url
|
||||
== "https://github.com/github/dmca/blob/master/2024/11/2024-11-04-source-code.md"
|
||||
)
|
||||
assert "451" in str(exc_info.value)
|
||||
|
||||
def test_repository_unavailable_error_without_dmca_url(self, create_args):
|
||||
def test_repository_unavailable_error_without_legal_url(self, create_args):
|
||||
"""HTTP 451 without DMCA details should still raise exception."""
|
||||
args = create_args()
|
||||
|
||||
mock_response = Mock()
|
||||
mock_response.getcode.return_value = 451
|
||||
mock_response.read.return_value = b'{"message": "Blocked"}'
|
||||
mock_response.headers = {"x-ratelimit-remaining": "5000"}
|
||||
mock_response.reason = "Unavailable For Legal Reasons"
|
||||
def mock_urlopen(*a, **kw):
|
||||
raise _make_http_error(451, b'{"message": "Blocked"}')
|
||||
|
||||
with patch(
|
||||
"github_backup.github_backup.make_request_with_retry",
|
||||
return_value=mock_response,
|
||||
):
|
||||
with patch("github_backup.github_backup.urlopen", side_effect=mock_urlopen):
|
||||
with pytest.raises(github_backup.RepositoryUnavailableError) as exc_info:
|
||||
github_backup.retrieve_data(
|
||||
args, "https://api.github.com/repos/test/dmca/issues"
|
||||
)
|
||||
|
||||
assert exc_info.value.dmca_url is None
|
||||
assert exc_info.value.legal_url is None
|
||||
assert "451" in str(exc_info.value)
|
||||
|
||||
def test_repository_unavailable_error_with_malformed_json(self, create_args):
|
||||
"""HTTP 451 with malformed JSON should still raise exception."""
|
||||
args = create_args()
|
||||
|
||||
mock_response = Mock()
|
||||
mock_response.getcode.return_value = 451
|
||||
mock_response.read.return_value = b"invalid json {"
|
||||
mock_response.headers = {"x-ratelimit-remaining": "5000"}
|
||||
mock_response.reason = "Unavailable For Legal Reasons"
|
||||
def mock_urlopen(*a, **kw):
|
||||
raise _make_http_error(451, b"invalid json {")
|
||||
|
||||
with patch(
|
||||
"github_backup.github_backup.make_request_with_retry",
|
||||
return_value=mock_response,
|
||||
):
|
||||
with patch("github_backup.github_backup.urlopen", side_effect=mock_urlopen):
|
||||
with pytest.raises(github_backup.RepositoryUnavailableError):
|
||||
github_backup.retrieve_data(
|
||||
args, "https://api.github.com/repos/test/dmca/issues"
|
||||
)
|
||||
|
||||
|
||||
class TestHTTP403TOS:
|
||||
"""Test suite for HTTP 403 TOS violation handling."""
|
||||
|
||||
def test_403_tos_raises_repository_unavailable(self, create_args):
|
||||
"""HTTP 403 (non-rate-limit) should raise RepositoryUnavailableError."""
|
||||
args = create_args()
|
||||
|
||||
tos_data = {
|
||||
"message": "Repository access blocked",
|
||||
"block": {
|
||||
"reason": "tos",
|
||||
"html_url": "https://github.com/contact/tos-violation",
|
||||
},
|
||||
}
|
||||
body = json.dumps(tos_data).encode("utf-8")
|
||||
|
||||
def mock_urlopen(*a, **kw):
|
||||
raise _make_http_error(
|
||||
403,
|
||||
body,
|
||||
msg="Forbidden",
|
||||
headers={"x-ratelimit-remaining": "5000"},
|
||||
)
|
||||
|
||||
with patch("github_backup.github_backup.urlopen", side_effect=mock_urlopen):
|
||||
with pytest.raises(github_backup.RepositoryUnavailableError) as exc_info:
|
||||
github_backup.retrieve_data(
|
||||
args, "https://api.github.com/repos/test/blocked/issues"
|
||||
)
|
||||
|
||||
assert (
|
||||
exc_info.value.legal_url == "https://github.com/contact/tos-violation"
|
||||
)
|
||||
assert "403" in str(exc_info.value)
|
||||
|
||||
def test_403_permission_denied_not_converted(self, create_args):
|
||||
"""HTTP 403 without 'block' in body should propagate as HTTPError, not RepositoryUnavailableError."""
|
||||
args = create_args()
|
||||
|
||||
body = json.dumps({"message": "Must have admin rights to Repository."}).encode(
|
||||
"utf-8"
|
||||
)
|
||||
|
||||
def mock_urlopen(*a, **kw):
|
||||
raise _make_http_error(
|
||||
403,
|
||||
body,
|
||||
msg="Forbidden",
|
||||
headers={"x-ratelimit-remaining": "5000"},
|
||||
)
|
||||
|
||||
with patch("github_backup.github_backup.urlopen", side_effect=mock_urlopen):
|
||||
with pytest.raises(HTTPError) as exc_info:
|
||||
github_backup.retrieve_data(
|
||||
args, "https://api.github.com/repos/test/private/issues"
|
||||
)
|
||||
|
||||
assert exc_info.value.code == 403
|
||||
|
||||
def test_403_rate_limit_not_converted(self, create_args):
|
||||
"""HTTP 403 with rate limit exhausted should NOT become RepositoryUnavailableError."""
|
||||
args = create_args()
|
||||
|
||||
call_count = 0
|
||||
|
||||
def mock_urlopen(*a, **kw):
|
||||
nonlocal call_count
|
||||
call_count += 1
|
||||
raise _make_http_error(
|
||||
403,
|
||||
b'{"message": "rate limit"}',
|
||||
msg="Forbidden",
|
||||
headers={"x-ratelimit-remaining": "0"},
|
||||
)
|
||||
|
||||
with patch("github_backup.github_backup.urlopen", side_effect=mock_urlopen):
|
||||
with patch(
|
||||
"github_backup.github_backup.calculate_retry_delay", return_value=0
|
||||
):
|
||||
with pytest.raises(HTTPError) as exc_info:
|
||||
github_backup.retrieve_data(
|
||||
args, "https://api.github.com/repos/test/ratelimit/issues"
|
||||
)
|
||||
|
||||
assert exc_info.value.code == 403
|
||||
# Should have retried (not raised immediately as RepositoryUnavailableError)
|
||||
assert call_count > 1
|
||||
|
||||
|
||||
class TestRetrieveRepositoriesUnavailable:
|
||||
"""Test that retrieve_repositories handles RepositoryUnavailableError gracefully."""
|
||||
|
||||
def test_unavailable_repo_returns_empty_list(self, create_args):
|
||||
"""retrieve_repositories should return [] when the repo is unavailable."""
|
||||
args = create_args(repository="blocked-repo")
|
||||
|
||||
def mock_urlopen(*a, **kw):
|
||||
raise _make_http_error(
|
||||
451,
|
||||
json.dumps(
|
||||
{
|
||||
"message": "Blocked",
|
||||
"block": {"html_url": "https://example.com/dmca"},
|
||||
}
|
||||
).encode("utf-8"),
|
||||
msg="Unavailable For Legal Reasons",
|
||||
)
|
||||
|
||||
with patch("github_backup.github_backup.urlopen", side_effect=mock_urlopen):
|
||||
repos = github_backup.retrieve_repositories(args, {"login": None})
|
||||
|
||||
assert repos == []
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
pytest.main([__file__, "-v"])
|
||||
|
||||
189
tests/test_incremental_per_repository.py
Normal file
189
tests/test_incremental_per_repository.py
Normal file
@@ -0,0 +1,189 @@
|
||||
"""Tests for per-resource incremental checkpoints."""
|
||||
|
||||
import json
|
||||
import os
|
||||
|
||||
from github_backup import github_backup
|
||||
|
||||
|
||||
def _repo(name, updated_at, pushed_at=None):
|
||||
return {
|
||||
"name": name,
|
||||
"full_name": "owner/{0}".format(name),
|
||||
"owner": {"login": "owner"},
|
||||
"clone_url": "https://github.com/owner/{0}.git".format(name),
|
||||
"private": False,
|
||||
"fork": False,
|
||||
"has_wiki": False,
|
||||
"updated_at": updated_at,
|
||||
"pushed_at": pushed_at,
|
||||
}
|
||||
|
||||
|
||||
def test_incremental_uses_per_resource_last_update(
|
||||
create_args, tmp_path, monkeypatch
|
||||
):
|
||||
args = create_args(incremental=True, include_issues=True)
|
||||
repositories = [
|
||||
_repo("repo-one", "2026-02-01T00:00:00Z"),
|
||||
_repo("repo-two", "2026-03-01T00:00:00Z"),
|
||||
]
|
||||
repo_one_issues = tmp_path / "repositories" / "repo-one" / "issues"
|
||||
repo_two_issues = tmp_path / "repositories" / "repo-two" / "issues"
|
||||
repo_one_issues.mkdir(parents=True)
|
||||
repo_two_issues.mkdir(parents=True)
|
||||
(repo_one_issues / "last_update").write_text("2026-01-01T00:00:00Z")
|
||||
(repo_two_issues / "last_update").write_text("2025-01-01T00:00:00Z")
|
||||
|
||||
seen_since = []
|
||||
|
||||
def fake_backup_issues(passed_args, repo_cwd, repository, repos_template):
|
||||
seen_since.append((repository["name"], passed_args.since))
|
||||
|
||||
monkeypatch.setattr(github_backup, "backup_issues", fake_backup_issues)
|
||||
|
||||
github_backup.backup_repositories(args, tmp_path, repositories)
|
||||
|
||||
assert seen_since == [
|
||||
("repo-one", "2026-01-01T00:00:00Z"),
|
||||
("repo-two", "2025-01-01T00:00:00Z"),
|
||||
]
|
||||
assert (repo_one_issues / "last_update").read_text() == "2026-02-01T00:00:00Z"
|
||||
assert (repo_two_issues / "last_update").read_text() == "2026-03-01T00:00:00Z"
|
||||
assert not os.path.exists(tmp_path / "last_update")
|
||||
|
||||
|
||||
def test_incremental_uses_independent_issue_and_pull_checkpoints(
|
||||
create_args, tmp_path, monkeypatch
|
||||
):
|
||||
args = create_args(incremental=True, include_issues=True, include_pulls=True)
|
||||
repository = _repo("repo-one", "2026-02-01T00:00:00Z")
|
||||
repo_dir = tmp_path / "repositories" / "repo-one"
|
||||
issues_dir = repo_dir / "issues"
|
||||
pulls_dir = repo_dir / "pulls"
|
||||
issues_dir.mkdir(parents=True)
|
||||
pulls_dir.mkdir(parents=True)
|
||||
(issues_dir / "last_update").write_text("2026-01-01T00:00:00Z")
|
||||
(pulls_dir / "last_update").write_text("2025-01-01T00:00:00Z")
|
||||
|
||||
seen_since = []
|
||||
|
||||
def fake_backup_issues(passed_args, repo_cwd, repository, repos_template):
|
||||
seen_since.append(("issues", passed_args.since))
|
||||
|
||||
def fake_backup_pulls(passed_args, repo_cwd, repository, repos_template):
|
||||
seen_since.append(("pulls", passed_args.since))
|
||||
|
||||
monkeypatch.setattr(github_backup, "backup_issues", fake_backup_issues)
|
||||
monkeypatch.setattr(github_backup, "backup_pulls", fake_backup_pulls)
|
||||
|
||||
github_backup.backup_repositories(args, tmp_path, [repository])
|
||||
|
||||
assert seen_since == [
|
||||
("issues", "2026-01-01T00:00:00Z"),
|
||||
("pulls", "2025-01-01T00:00:00Z"),
|
||||
]
|
||||
assert (issues_dir / "last_update").read_text() == "2026-02-01T00:00:00Z"
|
||||
assert (pulls_dir / "last_update").read_text() == "2026-02-01T00:00:00Z"
|
||||
|
||||
|
||||
def test_incremental_uses_legacy_global_last_update_for_existing_resource_backup(
|
||||
create_args, tmp_path, monkeypatch
|
||||
):
|
||||
args = create_args(incremental=True, include_issues=True)
|
||||
repository = _repo("repo-one", "2026-02-01T00:00:00Z")
|
||||
(tmp_path / "last_update").write_text("2026-01-01T00:00:00Z")
|
||||
issues_dir = tmp_path / "repositories" / "repo-one" / "issues"
|
||||
issues_dir.mkdir(parents=True)
|
||||
with open(issues_dir / "1.json", "w", encoding="utf-8") as f:
|
||||
json.dump({"number": 1}, f)
|
||||
|
||||
seen_since = []
|
||||
|
||||
def fake_backup_issues(passed_args, repo_cwd, repository, repos_template):
|
||||
seen_since.append(passed_args.since)
|
||||
|
||||
monkeypatch.setattr(github_backup, "backup_issues", fake_backup_issues)
|
||||
|
||||
github_backup.backup_repositories(args, tmp_path, [repository])
|
||||
|
||||
assert seen_since == ["2026-01-01T00:00:00Z"]
|
||||
assert (issues_dir / "last_update").read_text() == "2026-02-01T00:00:00Z"
|
||||
assert not os.path.exists(tmp_path / "last_update")
|
||||
|
||||
|
||||
def test_incremental_does_not_use_legacy_global_last_update_for_new_resource_backup(
|
||||
create_args, tmp_path, monkeypatch
|
||||
):
|
||||
args = create_args(incremental=True, include_issues=True)
|
||||
repository = _repo("repo-one", "2026-02-01T00:00:00Z")
|
||||
(tmp_path / "last_update").write_text("2099-01-01T00:00:00Z")
|
||||
|
||||
seen_since = []
|
||||
|
||||
def fake_backup_issues(passed_args, repo_cwd, repository, repos_template):
|
||||
seen_since.append(passed_args.since)
|
||||
|
||||
monkeypatch.setattr(github_backup, "backup_issues", fake_backup_issues)
|
||||
|
||||
github_backup.backup_repositories(args, tmp_path, [repository])
|
||||
|
||||
assert seen_since == [None]
|
||||
assert (
|
||||
tmp_path / "repositories" / "repo-one" / "issues" / "last_update"
|
||||
).read_text() == "2026-02-01T00:00:00Z"
|
||||
assert not os.path.exists(tmp_path / "last_update")
|
||||
|
||||
|
||||
def test_incremental_keeps_legacy_global_last_update_until_all_existing_resources_migrated(
|
||||
create_args, tmp_path, monkeypatch
|
||||
):
|
||||
args = create_args(incremental=True, include_issues=True)
|
||||
repository = _repo("repo-one", "2026-02-01T00:00:00Z")
|
||||
(tmp_path / "last_update").write_text("2026-01-01T00:00:00Z")
|
||||
repo_one_issues = tmp_path / "repositories" / "repo-one" / "issues"
|
||||
repo_two_issues = tmp_path / "repositories" / "repo-two" / "issues"
|
||||
repo_one_issues.mkdir(parents=True)
|
||||
repo_two_issues.mkdir(parents=True)
|
||||
with open(repo_one_issues / "1.json", "w", encoding="utf-8") as f:
|
||||
json.dump({"number": 1}, f)
|
||||
with open(repo_two_issues / "2.json", "w", encoding="utf-8") as f:
|
||||
json.dump({"number": 2}, f)
|
||||
|
||||
def fake_backup_issues(passed_args, repo_cwd, repository, repos_template):
|
||||
pass
|
||||
|
||||
monkeypatch.setattr(github_backup, "backup_issues", fake_backup_issues)
|
||||
|
||||
github_backup.backup_repositories(args, tmp_path, [repository])
|
||||
|
||||
assert (repo_one_issues / "last_update").read_text() == "2026-02-01T00:00:00Z"
|
||||
assert not os.path.exists(repo_two_issues / "last_update")
|
||||
assert (tmp_path / "last_update").read_text() == "2026-01-01T00:00:00Z"
|
||||
|
||||
|
||||
def test_incremental_does_not_remove_legacy_checkpoint_without_resource_work(
|
||||
create_args, tmp_path
|
||||
):
|
||||
args = create_args(incremental=True, include_repository=True)
|
||||
repository = _repo("repo-one", "2026-02-01T00:00:00Z")
|
||||
(tmp_path / "last_update").write_text("2026-01-01T00:00:00Z")
|
||||
|
||||
github_backup.backup_repositories(args, tmp_path, [repository])
|
||||
|
||||
assert (tmp_path / "last_update").read_text() == "2026-01-01T00:00:00Z"
|
||||
assert not os.path.exists(
|
||||
tmp_path / "repositories" / "repo-one" / "issues" / "last_update"
|
||||
)
|
||||
|
||||
|
||||
def test_repository_checkpoint_time_uses_newest_available_repo_timestamp():
|
||||
repository = _repo(
|
||||
"repo-one",
|
||||
updated_at="2026-02-01T00:00:00Z",
|
||||
pushed_at="2026-03-01T00:00:00Z",
|
||||
)
|
||||
|
||||
assert github_backup.get_repository_checkpoint_time(repository) == (
|
||||
"2026-03-01T00:00:00Z"
|
||||
)
|
||||
131
tests/test_pull_incremental_pagination.py
Normal file
131
tests/test_pull_incremental_pagination.py
Normal file
@@ -0,0 +1,131 @@
|
||||
"""Tests for incremental pull request pagination."""
|
||||
|
||||
import json
|
||||
import os
|
||||
from unittest.mock import patch
|
||||
|
||||
from github_backup import github_backup
|
||||
|
||||
|
||||
class MockHTTPResponse:
|
||||
def __init__(self, data, link_header=None):
|
||||
self._content = json.dumps(data).encode("utf-8")
|
||||
self._link_header = link_header
|
||||
self._read = False
|
||||
self.reason = "OK"
|
||||
|
||||
def getcode(self):
|
||||
return 200
|
||||
|
||||
def read(self):
|
||||
if self._read:
|
||||
return b""
|
||||
self._read = True
|
||||
return self._content
|
||||
|
||||
@property
|
||||
def headers(self):
|
||||
headers = {"x-ratelimit-remaining": "5000"}
|
||||
if self._link_header:
|
||||
headers["Link"] = self._link_header
|
||||
return headers
|
||||
|
||||
|
||||
def test_backup_pulls_incremental_excludes_checkpoint_timestamp(create_args, tmp_path):
|
||||
args = create_args(include_pulls=True, incremental=True)
|
||||
args.since = "2026-04-26T08:13:46Z"
|
||||
repository = {"full_name": "owner/repo"}
|
||||
|
||||
responses = [
|
||||
MockHTTPResponse([]),
|
||||
MockHTTPResponse(
|
||||
[
|
||||
{
|
||||
"number": 1,
|
||||
"title": "already backed up",
|
||||
"updated_at": "2026-04-26T08:13:46Z",
|
||||
},
|
||||
],
|
||||
link_header='<https://api.github.com/repos/owner/repo/pulls?per_page=100&state=closed&page=2>; rel="next"',
|
||||
),
|
||||
MockHTTPResponse(
|
||||
[
|
||||
{
|
||||
"number": 0,
|
||||
"title": "older pull on page 2",
|
||||
"updated_at": "2026-04-25T07:00:00Z",
|
||||
}
|
||||
]
|
||||
),
|
||||
]
|
||||
requests_made = []
|
||||
|
||||
def mock_urlopen(request, *args, **kwargs):
|
||||
requests_made.append(request.get_full_url())
|
||||
return responses[len(requests_made) - 1]
|
||||
|
||||
with patch("github_backup.github_backup.urlopen", side_effect=mock_urlopen):
|
||||
github_backup.backup_pulls(
|
||||
args, tmp_path, repository, "https://api.github.com/repos"
|
||||
)
|
||||
|
||||
assert len(requests_made) == 2
|
||||
assert "state=open" in requests_made[0]
|
||||
assert "state=closed" in requests_made[1]
|
||||
assert all("page=2" not in url for url in requests_made)
|
||||
assert not os.path.exists(tmp_path / "pulls" / "1.json")
|
||||
assert not os.path.exists(tmp_path / "pulls" / "0.json")
|
||||
|
||||
|
||||
def test_backup_pulls_incremental_stops_before_fetching_old_pages(
|
||||
create_args, tmp_path
|
||||
):
|
||||
args = create_args(include_pulls=True, incremental=True)
|
||||
args.since = "2026-04-26T08:13:46Z"
|
||||
repository = {"full_name": "owner/repo"}
|
||||
|
||||
responses = [
|
||||
MockHTTPResponse([]),
|
||||
MockHTTPResponse(
|
||||
[
|
||||
{
|
||||
"number": 2,
|
||||
"title": "new pull",
|
||||
"updated_at": "2026-04-26T09:00:00Z",
|
||||
},
|
||||
{
|
||||
"number": 1,
|
||||
"title": "old pull",
|
||||
"updated_at": "2026-04-26T07:00:00Z",
|
||||
},
|
||||
],
|
||||
link_header='<https://api.github.com/repos/owner/repo/pulls?per_page=100&state=closed&page=2>; rel="next"',
|
||||
),
|
||||
MockHTTPResponse(
|
||||
[
|
||||
{
|
||||
"number": 0,
|
||||
"title": "older pull on page 2",
|
||||
"updated_at": "2026-04-25T07:00:00Z",
|
||||
}
|
||||
]
|
||||
),
|
||||
]
|
||||
requests_made = []
|
||||
|
||||
def mock_urlopen(request, *args, **kwargs):
|
||||
requests_made.append(request.get_full_url())
|
||||
return responses[len(requests_made) - 1]
|
||||
|
||||
with patch("github_backup.github_backup.urlopen", side_effect=mock_urlopen):
|
||||
github_backup.backup_pulls(
|
||||
args, tmp_path, repository, "https://api.github.com/repos"
|
||||
)
|
||||
|
||||
assert len(requests_made) == 2
|
||||
assert "state=open" in requests_made[0]
|
||||
assert "state=closed" in requests_made[1]
|
||||
assert all("page=2" not in url for url in requests_made)
|
||||
assert os.path.exists(tmp_path / "pulls" / "2.json")
|
||||
assert not os.path.exists(tmp_path / "pulls" / "1.json")
|
||||
assert not os.path.exists(tmp_path / "pulls" / "0.json")
|
||||
237
tests/test_pull_reviews.py
Normal file
237
tests/test_pull_reviews.py
Normal file
@@ -0,0 +1,237 @@
|
||||
"""Tests for pull request review backups."""
|
||||
|
||||
import json
|
||||
import os
|
||||
|
||||
from github_backup import github_backup
|
||||
|
||||
|
||||
def test_parse_args_pull_reviews_flag():
|
||||
args = github_backup.parse_args(["--pull-reviews", "testuser"])
|
||||
assert args.include_pull_reviews is True
|
||||
|
||||
|
||||
def test_backup_pulls_includes_review_data(create_args, tmp_path, monkeypatch):
|
||||
args = create_args(include_pulls=True, include_pull_reviews=True)
|
||||
repository = {"full_name": "owner/repo"}
|
||||
calls = []
|
||||
|
||||
def fake_retrieve_data(passed_args, template, query_args=None, paginated=True, **kwargs):
|
||||
calls.append((template, query_args))
|
||||
if template == "https://api.github.com/repos/owner/repo/pulls":
|
||||
if query_args["state"] == "open":
|
||||
return [
|
||||
{
|
||||
"number": 1,
|
||||
"updated_at": "2026-02-01T00:00:00Z",
|
||||
"title": "Add feature",
|
||||
}
|
||||
]
|
||||
return []
|
||||
if template == "https://api.github.com/repos/owner/repo/pulls/1/reviews":
|
||||
return [
|
||||
{
|
||||
"id": 123,
|
||||
"state": "APPROVED",
|
||||
"body": "Looks good",
|
||||
"submitted_at": "2026-02-01T00:00:00Z",
|
||||
}
|
||||
]
|
||||
raise AssertionError("Unexpected template: {0}".format(template))
|
||||
|
||||
monkeypatch.setattr(github_backup, "retrieve_data", fake_retrieve_data)
|
||||
|
||||
github_backup.backup_pulls(
|
||||
args, tmp_path, repository, "https://api.github.com/repos"
|
||||
)
|
||||
|
||||
with open(tmp_path / "pulls" / "1.json", encoding="utf-8") as f:
|
||||
pull = json.load(f)
|
||||
|
||||
assert pull["review_data"] == [
|
||||
{
|
||||
"body": "Looks good",
|
||||
"id": 123,
|
||||
"state": "APPROVED",
|
||||
"submitted_at": "2026-02-01T00:00:00Z",
|
||||
}
|
||||
]
|
||||
assert (
|
||||
"https://api.github.com/repos/owner/repo/pulls/1/reviews",
|
||||
None,
|
||||
) in calls
|
||||
|
||||
|
||||
def test_pull_reviews_backfill_ignores_repository_checkpoint(
|
||||
create_args, tmp_path, monkeypatch
|
||||
):
|
||||
args = create_args(
|
||||
include_pulls=True,
|
||||
include_pull_reviews=True,
|
||||
incremental=True,
|
||||
)
|
||||
args.since = "2026-01-01T00:00:00Z"
|
||||
repository = {"full_name": "owner/repo"}
|
||||
|
||||
def fake_retrieve_data(passed_args, template, query_args=None, paginated=True, **kwargs):
|
||||
if template == "https://api.github.com/repos/owner/repo/pulls":
|
||||
if query_args["state"] == "open":
|
||||
return [
|
||||
{
|
||||
"number": 1,
|
||||
"updated_at": "2025-01-01T00:00:00Z",
|
||||
"title": "Old pull request",
|
||||
}
|
||||
]
|
||||
return []
|
||||
if template == "https://api.github.com/repos/owner/repo/pulls/1/reviews":
|
||||
return [{"id": 123, "state": "APPROVED"}]
|
||||
raise AssertionError("Unexpected template: {0}".format(template))
|
||||
|
||||
monkeypatch.setattr(github_backup, "retrieve_data", fake_retrieve_data)
|
||||
|
||||
github_backup.backup_pulls(
|
||||
args, tmp_path, repository, "https://api.github.com/repos"
|
||||
)
|
||||
|
||||
with open(tmp_path / "pulls" / "1.json", encoding="utf-8") as f:
|
||||
pull = json.load(f)
|
||||
|
||||
assert pull["review_data"] == [{"id": 123, "state": "APPROVED"}]
|
||||
assert (tmp_path / "pulls" / "reviews_last_update").read_text() == (
|
||||
"2025-01-01T00:00:00Z"
|
||||
)
|
||||
|
||||
|
||||
def test_pull_reviews_uses_review_checkpoint_when_older_than_repository_checkpoint(
|
||||
create_args, tmp_path, monkeypatch
|
||||
):
|
||||
args = create_args(
|
||||
include_pulls=True,
|
||||
include_pull_reviews=True,
|
||||
incremental=True,
|
||||
)
|
||||
args.since = "2026-01-01T00:00:00Z"
|
||||
repository = {"full_name": "owner/repo"}
|
||||
pulls_dir = tmp_path / "pulls"
|
||||
pulls_dir.mkdir()
|
||||
(pulls_dir / "reviews_last_update").write_text("2025-01-01T00:00:00Z")
|
||||
|
||||
def fake_retrieve_data(passed_args, template, query_args=None, paginated=True, **kwargs):
|
||||
if template == "https://api.github.com/repos/owner/repo/pulls":
|
||||
if query_args["state"] == "open":
|
||||
return [
|
||||
{
|
||||
"number": 1,
|
||||
"updated_at": "2025-06-01T00:00:00Z",
|
||||
"title": "Review changed while feature was disabled",
|
||||
},
|
||||
{
|
||||
"number": 2,
|
||||
"updated_at": "2024-12-01T00:00:00Z",
|
||||
"title": "Too old",
|
||||
},
|
||||
]
|
||||
return []
|
||||
if template == "https://api.github.com/repos/owner/repo/pulls/1/reviews":
|
||||
return [{"id": 123, "state": "COMMENTED"}]
|
||||
raise AssertionError("Unexpected template: {0}".format(template))
|
||||
|
||||
monkeypatch.setattr(github_backup, "retrieve_data", fake_retrieve_data)
|
||||
|
||||
github_backup.backup_pulls(
|
||||
args, tmp_path, repository, "https://api.github.com/repos"
|
||||
)
|
||||
|
||||
assert os.path.exists(tmp_path / "pulls" / "1.json")
|
||||
assert not os.path.exists(tmp_path / "pulls" / "2.json")
|
||||
assert (tmp_path / "pulls" / "reviews_last_update").read_text() == (
|
||||
"2025-06-01T00:00:00Z"
|
||||
)
|
||||
|
||||
|
||||
def test_pull_reviews_preserves_existing_optional_pull_data(
|
||||
create_args, tmp_path, monkeypatch
|
||||
):
|
||||
args = create_args(include_pulls=True, include_pull_reviews=True)
|
||||
repository = {"full_name": "owner/repo"}
|
||||
pulls_dir = tmp_path / "pulls"
|
||||
pulls_dir.mkdir()
|
||||
with open(pulls_dir / "1.json", "w", encoding="utf-8") as f:
|
||||
json.dump(
|
||||
{
|
||||
"number": 1,
|
||||
"updated_at": "2026-01-01T00:00:00Z",
|
||||
"comment_data": [{"id": 10, "body": "inline comment"}],
|
||||
"comment_regular_data": [{"id": 11, "body": "regular comment"}],
|
||||
"commit_data": [{"sha": "abc"}],
|
||||
},
|
||||
f,
|
||||
)
|
||||
|
||||
def fake_retrieve_data(passed_args, template, query_args=None, paginated=True, **kwargs):
|
||||
if template == "https://api.github.com/repos/owner/repo/pulls":
|
||||
if query_args["state"] == "open":
|
||||
return [
|
||||
{
|
||||
"number": 1,
|
||||
"updated_at": "2026-02-01T00:00:00Z",
|
||||
"title": "Add reviews",
|
||||
}
|
||||
]
|
||||
return []
|
||||
if template == "https://api.github.com/repos/owner/repo/pulls/1/reviews":
|
||||
return [{"id": 123, "state": "APPROVED"}]
|
||||
raise AssertionError("Unexpected template: {0}".format(template))
|
||||
|
||||
monkeypatch.setattr(github_backup, "retrieve_data", fake_retrieve_data)
|
||||
|
||||
github_backup.backup_pulls(
|
||||
args, tmp_path, repository, "https://api.github.com/repos"
|
||||
)
|
||||
|
||||
with open(pulls_dir / "1.json", encoding="utf-8") as f:
|
||||
pull = json.load(f)
|
||||
|
||||
assert pull["review_data"] == [{"id": 123, "state": "APPROVED"}]
|
||||
assert pull["comment_data"] == [{"id": 10, "body": "inline comment"}]
|
||||
assert pull["comment_regular_data"] == [{"id": 11, "body": "regular comment"}]
|
||||
assert pull["commit_data"] == [{"sha": "abc"}]
|
||||
|
||||
|
||||
def test_pull_reviews_does_not_advance_checkpoint_on_review_error(
|
||||
create_args, tmp_path, monkeypatch
|
||||
):
|
||||
args = create_args(
|
||||
include_pulls=True,
|
||||
include_pull_reviews=True,
|
||||
incremental=True,
|
||||
)
|
||||
args.since = "2026-01-01T00:00:00Z"
|
||||
repository = {"full_name": "owner/repo"}
|
||||
pulls_dir = tmp_path / "pulls"
|
||||
pulls_dir.mkdir()
|
||||
(pulls_dir / "reviews_last_update").write_text("2025-01-01T00:00:00Z")
|
||||
|
||||
def fake_retrieve_data(passed_args, template, query_args=None, paginated=True, **kwargs):
|
||||
if template == "https://api.github.com/repos/owner/repo/pulls":
|
||||
if query_args["state"] == "open":
|
||||
return [
|
||||
{
|
||||
"number": 1,
|
||||
"updated_at": "2025-06-01T00:00:00Z",
|
||||
"title": "Review retrieval fails",
|
||||
}
|
||||
]
|
||||
return []
|
||||
if template == "https://api.github.com/repos/owner/repo/pulls/1/reviews":
|
||||
raise Exception("temporary API failure")
|
||||
raise AssertionError("Unexpected template: {0}".format(template))
|
||||
|
||||
monkeypatch.setattr(github_backup, "retrieve_data", fake_retrieve_data)
|
||||
|
||||
github_backup.backup_pulls(
|
||||
args, tmp_path, repository, "https://api.github.com/repos"
|
||||
)
|
||||
|
||||
assert (pulls_dir / "reviews_last_update").read_text() == "2025-01-01T00:00:00Z"
|
||||
95
tests/test_releases.py
Normal file
95
tests/test_releases.py
Normal file
@@ -0,0 +1,95 @@
|
||||
"""Tests for release backup behavior."""
|
||||
|
||||
from github_backup import github_backup
|
||||
|
||||
|
||||
def test_backup_releases_uses_embedded_assets_without_extra_asset_list_request(
|
||||
create_args, tmp_path, monkeypatch
|
||||
):
|
||||
args = create_args(include_releases=True, include_assets=True)
|
||||
repository = {"full_name": "owner/repo", "name": "repo"}
|
||||
calls = []
|
||||
downloads = []
|
||||
|
||||
def fake_retrieve_data(passed_args, template, query_args=None, paginated=True, **kwargs):
|
||||
calls.append(template)
|
||||
if template == "https://api.github.com/repos/owner/repo/releases":
|
||||
return [
|
||||
{
|
||||
"tag_name": "v1.0.0",
|
||||
"created_at": "2026-01-01T00:00:00Z",
|
||||
"updated_at": "2026-01-01T00:00:00Z",
|
||||
"prerelease": False,
|
||||
"draft": False,
|
||||
"assets_url": "https://api.github.com/repos/owner/repo/releases/1/assets",
|
||||
"assets": [
|
||||
{
|
||||
"name": "artifact.zip",
|
||||
"url": "https://api.github.com/repos/owner/repo/releases/assets/1",
|
||||
}
|
||||
],
|
||||
}
|
||||
]
|
||||
raise AssertionError("Unexpected API request: {0}".format(template))
|
||||
|
||||
def fake_download_file(url, path, auth, as_app=False, fine=False):
|
||||
downloads.append((url, path))
|
||||
|
||||
monkeypatch.setattr(github_backup, "retrieve_data", fake_retrieve_data)
|
||||
monkeypatch.setattr(github_backup, "download_file", fake_download_file)
|
||||
|
||||
github_backup.backup_releases(
|
||||
args,
|
||||
tmp_path,
|
||||
repository,
|
||||
"https://api.github.com/repos",
|
||||
include_assets=True,
|
||||
)
|
||||
|
||||
assert calls == ["https://api.github.com/repos/owner/repo/releases"]
|
||||
assert downloads == [
|
||||
(
|
||||
"https://api.github.com/repos/owner/repo/releases/assets/1",
|
||||
str(tmp_path / "releases" / "v1.0.0" / "artifact.zip"),
|
||||
)
|
||||
]
|
||||
|
||||
|
||||
def test_backup_releases_falls_back_to_assets_url_when_assets_missing(
|
||||
create_args, tmp_path, monkeypatch
|
||||
):
|
||||
args = create_args(include_releases=True, include_assets=True)
|
||||
repository = {"full_name": "owner/repo", "name": "repo"}
|
||||
calls = []
|
||||
|
||||
def fake_retrieve_data(passed_args, template, query_args=None, paginated=True, **kwargs):
|
||||
calls.append(template)
|
||||
if template == "https://api.github.com/repos/owner/repo/releases":
|
||||
return [
|
||||
{
|
||||
"tag_name": "v1.0.0",
|
||||
"created_at": "2026-01-01T00:00:00Z",
|
||||
"updated_at": "2026-01-01T00:00:00Z",
|
||||
"prerelease": False,
|
||||
"draft": False,
|
||||
"assets_url": "https://api.github.com/repos/owner/repo/releases/1/assets",
|
||||
}
|
||||
]
|
||||
if template == "https://api.github.com/repos/owner/repo/releases/1/assets":
|
||||
return []
|
||||
raise AssertionError("Unexpected API request: {0}".format(template))
|
||||
|
||||
monkeypatch.setattr(github_backup, "retrieve_data", fake_retrieve_data)
|
||||
|
||||
github_backup.backup_releases(
|
||||
args,
|
||||
tmp_path,
|
||||
repository,
|
||||
"https://api.github.com/repos",
|
||||
include_assets=True,
|
||||
)
|
||||
|
||||
assert calls == [
|
||||
"https://api.github.com/repos/owner/repo/releases",
|
||||
"https://api.github.com/repos/owner/repo/releases/1/assets",
|
||||
]
|
||||
@@ -1,6 +1,7 @@
|
||||
"""Tests for retrieve_data function."""
|
||||
|
||||
import json
|
||||
import logging
|
||||
import socket
|
||||
from unittest.mock import Mock, patch
|
||||
from urllib.error import HTTPError, URLError
|
||||
@@ -288,6 +289,28 @@ class TestMakeRequestWithRetry:
|
||||
assert exc_info.value.code == 403
|
||||
assert call_count == 1 # No retries
|
||||
|
||||
def test_451_error_not_retried(self):
|
||||
"""HTTP 451 should not be retried - raise immediately."""
|
||||
call_count = 0
|
||||
|
||||
def mock_urlopen(*args, **kwargs):
|
||||
nonlocal call_count
|
||||
call_count += 1
|
||||
raise HTTPError(
|
||||
url="https://api.github.com/test",
|
||||
code=451,
|
||||
msg="Unavailable For Legal Reasons",
|
||||
hdrs={"x-ratelimit-remaining": "5000"},
|
||||
fp=None,
|
||||
)
|
||||
|
||||
with patch("github_backup.github_backup.urlopen", side_effect=mock_urlopen):
|
||||
with pytest.raises(HTTPError) as exc_info:
|
||||
make_request_with_retry(Mock(), None)
|
||||
|
||||
assert exc_info.value.code == 451
|
||||
assert call_count == 1 # No retries
|
||||
|
||||
def test_connection_error_retries_and_succeeds(self):
|
||||
"""URLError (connection error) should retry and succeed if subsequent request works."""
|
||||
good_response = Mock()
|
||||
@@ -333,6 +356,33 @@ class TestMakeRequestWithRetry:
|
||||
) # 1 initial + 5 retries = 6 attempts
|
||||
|
||||
|
||||
class TestRetrieveGraphqlDataLogging:
|
||||
"""Tests for GraphQL request logging."""
|
||||
|
||||
def test_logs_graphql_context(self, create_args, caplog):
|
||||
args = create_args(token_classic="fake_token")
|
||||
mock_response = Mock()
|
||||
mock_response.getcode.return_value = 200
|
||||
mock_response.read.return_value = json.dumps({"data": {}}).encode("utf-8")
|
||||
mock_response.headers = {"x-ratelimit-remaining": "5000"}
|
||||
|
||||
caplog.set_level(logging.INFO, logger="github_backup.github_backup")
|
||||
with patch(
|
||||
"github_backup.github_backup.make_request_with_retry",
|
||||
return_value=mock_response,
|
||||
):
|
||||
github_backup.retrieve_graphql_data(
|
||||
args,
|
||||
"query { viewer { login } }",
|
||||
log_context="discussion owner/repo#1",
|
||||
)
|
||||
|
||||
assert (
|
||||
"Requesting https://api.github.com/graphql (discussion owner/repo#1)"
|
||||
in caplog.text
|
||||
)
|
||||
|
||||
|
||||
class TestRetrieveDataThrottling:
|
||||
"""Tests for throttling behavior in retrieve_data."""
|
||||
|
||||
|
||||
Reference in New Issue
Block a user