mirror of
https://github.com/josegonzalez/python-github-backup.git
synced 2026-05-01 04:55:34 +02:00
Compare commits
77 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
bd6eea02d5 | ||
|
|
72fa5d3dcd | ||
|
|
c5d11c000a | ||
|
|
8e76089565 | ||
|
|
d30d9bfe60 | ||
|
|
a2391a550e | ||
|
|
9340aa3aaa | ||
|
|
543d76f24b | ||
|
|
3cda5a01fd | ||
|
|
17b79fcbef | ||
|
|
f1fca0f9b7 | ||
|
|
ccc27b95f7 | ||
|
|
f3eabf0bfe | ||
|
|
b92aee6f11 | ||
|
|
4d1772319f | ||
|
|
2c7fdab54e | ||
|
|
334c6c6546 | ||
|
|
2f130ecd66 | ||
|
|
ddf7f82e65 | ||
|
|
ddf82f1115 | ||
|
|
0638666bc7 | ||
|
|
f8cdf55050 | ||
|
|
b59f719f10 | ||
|
|
014eff395a | ||
|
|
9d0cfdb61d | ||
|
|
6cd0ab3633 | ||
|
|
02e833e40a | ||
|
|
b3a8241c9a | ||
|
|
d19e2ad9c5 | ||
|
|
24b3fdb4f3 | ||
|
|
013b27208e | ||
|
|
4d022d94d0 | ||
|
|
ed29a917ca | ||
|
|
f4117990b2 | ||
|
|
4c1f21a306 | ||
|
|
9fde6ed1ff | ||
|
|
9a9b069e14 | ||
|
|
f85c759e5d | ||
|
|
26a6e1df1b | ||
|
|
3d961d1118 | ||
|
|
20f9542063 | ||
|
|
bbf76e70eb | ||
|
|
ca70725449 | ||
|
|
653ceb1e12 | ||
|
|
ba1575538b | ||
|
|
d5be07ec80 | ||
|
|
5758e489e8 | ||
|
|
cceef92346 | ||
|
|
7f1807aaf8 | ||
|
|
8a0553a5b1 | ||
|
|
68af1d406a | ||
|
|
b112b43a08 | ||
|
|
f54a5458f6 | ||
|
|
60067650b0 | ||
|
|
655886fa80 | ||
|
|
0162f7ed46 | ||
|
|
8c1a13475a | ||
|
|
6268a4c5c6 | ||
|
|
4b2295db0d | ||
|
|
be900d1f3f | ||
|
|
9be6282719 | ||
|
|
1102990af0 | ||
|
|
311ffb40cd | ||
|
|
2f5e7c2dcf | ||
|
|
0d8a504b02 | ||
|
|
712d22d124 | ||
|
|
e0c9d65225 | ||
|
|
52d996f784 | ||
|
|
e6283f9384 | ||
|
|
1181f811b7 | ||
|
|
856ad5db41 | ||
|
|
c6fa8c7695 | ||
|
|
93e505c07d | ||
|
|
6780d3ad6c | ||
|
|
65bacc27f0 | ||
|
|
ab0eebb175 | ||
|
|
fce4abb74a |
10
.github/workflows/docker.yml
vendored
10
.github/workflows/docker.yml
vendored
@@ -43,13 +43,13 @@ jobs:
|
||||
persist-credentials: false
|
||||
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v3
|
||||
uses: docker/setup-qemu-action@v4
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
uses: docker/setup-buildx-action@v4
|
||||
|
||||
- name: Log in to the Container registry
|
||||
uses: docker/login-action@v3
|
||||
uses: docker/login-action@v4
|
||||
with:
|
||||
registry: ${{ env.REGISTRY }}
|
||||
username: ${{ github.actor }}
|
||||
@@ -57,7 +57,7 @@ jobs:
|
||||
|
||||
- name: Extract metadata (tags, labels) for Docker
|
||||
id: meta
|
||||
uses: docker/metadata-action@v5
|
||||
uses: docker/metadata-action@v6
|
||||
with:
|
||||
images: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}
|
||||
tags: |
|
||||
@@ -68,7 +68,7 @@ jobs:
|
||||
type=raw,value=latest,enable=${{ github.ref == format('refs/heads/{0}', 'main') }}
|
||||
|
||||
- name: Build and push Docker image
|
||||
uses: docker/build-push-action@v6
|
||||
uses: docker/build-push-action@v7
|
||||
with:
|
||||
context: .
|
||||
push: true
|
||||
|
||||
382
CHANGES.rst
382
CHANGES.rst
@@ -1,9 +1,389 @@
|
||||
Changelog
|
||||
=========
|
||||
|
||||
0.61.0 (2026-01-12)
|
||||
0.62.1 (2026-04-30)
|
||||
-------------------
|
||||
------------------------
|
||||
- Document that nothing is saved by default. [Changaco]
|
||||
- Eliminate trailing spaces. [Changaco]
|
||||
- Remove pointless and unsafe `export`s in examples. [Changaco]
|
||||
- Try to clarify what `--incremental` actually does. [Changaco]
|
||||
- Fix a typo in the README. [Changaco]
|
||||
- Document that `--all` doesn't imply `--attachments` [Changaco]
|
||||
- Rename a function to match what it actually does. [Changaco]
|
||||
- Don't leave files open. [Changaco]
|
||||
- Remove legacy code in `mkdir_p` function. [Changaco]
|
||||
- Don't pass stdin when doing so can't do any good. [Changaco]
|
||||
|
||||
When the child process doesn't inherit stderr, it can't ask the user for input, so it shouldn't inherit stdin either.
|
||||
- Use `subprocess.DEVNULL` instead of emulating it. [Changaco]
|
||||
- Remove bad invocation of the system shell. [Changaco]
|
||||
- Add missing `context` argument to `urlopen` call. [Changaco]
|
||||
- Suppress output of call to `git lfs version` [Changaco]
|
||||
- Handle more network errors. [Changaco]
|
||||
|
||||
```python-traceback
|
||||
Traceback (most recent call last):
|
||||
File ".local/bin/github-backup", line 6, in <module>
|
||||
sys.exit(main())
|
||||
~~~~^^
|
||||
File ".local/share/pipx/venvs/github-backup/lib/python3.14/site-packages/github_backup/cli.py", line 83, in main
|
||||
backup_repositories(args, output_directory, repositories)
|
||||
~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
File ".local/share/pipx/venvs/github-backup/lib/python3.14/site-packages/github_backup/github_backup.py", line 1845, in backup_repositories
|
||||
backup_pulls(args, repo_cwd, repository, repos_template)
|
||||
~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
File ".local/share/pipx/venvs/github-backup/lib/python3.14/site-packages/github_backup/github_backup.py", line 2019, in backup_pulls
|
||||
pulls[number]["commit_data"] = retrieve_data(args, template)
|
||||
~~~~~~~~~~~~~^^^^^^^^^^^^^^^^
|
||||
File ".local/share/pipx/venvs/github-backup/lib/python3.14/site-packages/github_backup/github_backup.py", line 766, in retrieve_data
|
||||
return list(fetch_all())
|
||||
File ".local/share/pipx/venvs/github-backup/lib/python3.14/site-packages/github_backup/github_backup.py", line 717, in fetch_all
|
||||
response = json.loads(http_response.read().decode("utf-8"))
|
||||
~~~~~~~~~~~~~~~~~~^^
|
||||
File "/usr/lib/python3.14/http/client.py", line 500, in read
|
||||
s = self._safe_read(self.length)
|
||||
File "/usr/lib/python3.14/http/client.py", line 648, in _safe_read
|
||||
data = self.fp.read(cursize)
|
||||
File "/usr/lib/python3.14/socket.py", line 725, in readinto
|
||||
return self._sock.recv_into(b)
|
||||
~~~~~~~~~~~~~~~~~~~~^^^
|
||||
File "/usr/lib/python3.14/ssl.py", line 1304, in recv_into
|
||||
return self.read(nbytes, buffer)
|
||||
~~~~~~~~~^^^^^^^^^^^^^^^^
|
||||
File "/usr/lib/python3.14/ssl.py", line 1138, in read
|
||||
return self._sslobj.read(len, buffer)
|
||||
~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^
|
||||
ConnectionResetError: [Errno 104] Connection reset by peer
|
||||
```
|
||||
|
||||
|
||||
0.62.0 (2026-04-29)
|
||||
-------------------
|
||||
- Skip checkpoint-equal incremental items. [Duncan Ogilvie]
|
||||
- Avoid redundant release asset list requests. [Duncan Ogilvie]
|
||||
- Reduce unnecessary pull requests with incremental fetching. [Duncan
|
||||
Ogilvie]
|
||||
- Implement per-resource last_update timestamps. [Duncan Ogilvie]
|
||||
|
||||
Closes #62
|
||||
- Add support for pull request reviews. [Duncan Ogilvie]
|
||||
|
||||
Closes #124
|
||||
- Add support for discussions. [Duncan Ogilvie]
|
||||
|
||||
Closes #290
|
||||
- Add --token-from-gh authentication option. [Duncan Ogilvie]
|
||||
- Chore(deps): bump pytest in the python-packages group.
|
||||
[dependabot[bot]]
|
||||
|
||||
Bumps the python-packages group with 1 update: [pytest](https://github.com/pytest-dev/pytest).
|
||||
|
||||
|
||||
Updates `pytest` from 9.0.2 to 9.0.3
|
||||
- [Release notes](https://github.com/pytest-dev/pytest/releases)
|
||||
- [Changelog](https://github.com/pytest-dev/pytest/blob/main/CHANGELOG.rst)
|
||||
- [Commits](https://github.com/pytest-dev/pytest/compare/9.0.2...9.0.3)
|
||||
|
||||
---
|
||||
updated-dependencies:
|
||||
- dependency-name: pytest
|
||||
dependency-version: 9.0.3
|
||||
dependency-type: direct:production
|
||||
update-type: version-update:semver-patch
|
||||
dependency-group: python-packages
|
||||
...
|
||||
- Chore(deps): bump black in the python-packages group.
|
||||
[dependabot[bot]]
|
||||
|
||||
Bumps the python-packages group with 1 update: [black](https://github.com/psf/black).
|
||||
|
||||
|
||||
Updates `black` from 26.3.0 to 26.3.1
|
||||
- [Release notes](https://github.com/psf/black/releases)
|
||||
- [Changelog](https://github.com/psf/black/blob/main/CHANGES.md)
|
||||
- [Commits](https://github.com/psf/black/compare/26.3.0...26.3.1)
|
||||
|
||||
---
|
||||
updated-dependencies:
|
||||
- dependency-name: black
|
||||
dependency-version: 26.3.1
|
||||
dependency-type: direct:production
|
||||
update-type: version-update:semver-patch
|
||||
dependency-group: python-packages
|
||||
...
|
||||
- Chore(deps): bump docker/login-action from 3 to 4. [dependabot[bot]]
|
||||
|
||||
Bumps [docker/login-action](https://github.com/docker/login-action) from 3 to 4.
|
||||
- [Release notes](https://github.com/docker/login-action/releases)
|
||||
- [Commits](https://github.com/docker/login-action/compare/v3...v4)
|
||||
|
||||
---
|
||||
updated-dependencies:
|
||||
- dependency-name: docker/login-action
|
||||
dependency-version: '4'
|
||||
dependency-type: direct:production
|
||||
update-type: version-update:semver-major
|
||||
...
|
||||
- Chore(deps): bump docker/setup-qemu-action from 3 to 4.
|
||||
[dependabot[bot]]
|
||||
|
||||
Bumps [docker/setup-qemu-action](https://github.com/docker/setup-qemu-action) from 3 to 4.
|
||||
- [Release notes](https://github.com/docker/setup-qemu-action/releases)
|
||||
- [Commits](https://github.com/docker/setup-qemu-action/compare/v3...v4)
|
||||
|
||||
---
|
||||
updated-dependencies:
|
||||
- dependency-name: docker/setup-qemu-action
|
||||
dependency-version: '4'
|
||||
dependency-type: direct:production
|
||||
update-type: version-update:semver-major
|
||||
...
|
||||
- Chore(deps): bump docker/build-push-action from 6 to 7.
|
||||
[dependabot[bot]]
|
||||
|
||||
Bumps [docker/build-push-action](https://github.com/docker/build-push-action) from 6 to 7.
|
||||
- [Release notes](https://github.com/docker/build-push-action/releases)
|
||||
- [Commits](https://github.com/docker/build-push-action/compare/v6...v7)
|
||||
|
||||
---
|
||||
updated-dependencies:
|
||||
- dependency-name: docker/build-push-action
|
||||
dependency-version: '7'
|
||||
dependency-type: direct:production
|
||||
update-type: version-update:semver-major
|
||||
...
|
||||
- Chore(deps): bump docker/setup-buildx-action from 3 to 4.
|
||||
[dependabot[bot]]
|
||||
|
||||
Bumps [docker/setup-buildx-action](https://github.com/docker/setup-buildx-action) from 3 to 4.
|
||||
- [Release notes](https://github.com/docker/setup-buildx-action/releases)
|
||||
- [Commits](https://github.com/docker/setup-buildx-action/compare/v3...v4)
|
||||
|
||||
---
|
||||
updated-dependencies:
|
||||
- dependency-name: docker/setup-buildx-action
|
||||
dependency-version: '4'
|
||||
dependency-type: direct:production
|
||||
update-type: version-update:semver-major
|
||||
...
|
||||
- Chore(deps): bump docker/metadata-action from 5 to 6.
|
||||
[dependabot[bot]]
|
||||
|
||||
Bumps [docker/metadata-action](https://github.com/docker/metadata-action) from 5 to 6.
|
||||
- [Release notes](https://github.com/docker/metadata-action/releases)
|
||||
- [Commits](https://github.com/docker/metadata-action/compare/v5...v6)
|
||||
|
||||
---
|
||||
updated-dependencies:
|
||||
- dependency-name: docker/metadata-action
|
||||
dependency-version: '6'
|
||||
dependency-type: direct:production
|
||||
update-type: version-update:semver-major
|
||||
...
|
||||
- Chore(deps): bump the python-packages group with 2 updates.
|
||||
[dependabot[bot]]
|
||||
|
||||
Bumps the python-packages group with 2 updates: [black](https://github.com/psf/black) and [setuptools](https://github.com/pypa/setuptools).
|
||||
|
||||
|
||||
Updates `black` from 26.1.0 to 26.3.0
|
||||
- [Release notes](https://github.com/psf/black/releases)
|
||||
- [Changelog](https://github.com/psf/black/blob/main/CHANGES.md)
|
||||
- [Commits](https://github.com/psf/black/compare/26.1.0...26.3.0)
|
||||
|
||||
Updates `setuptools` from 82.0.0 to 82.0.1
|
||||
- [Release notes](https://github.com/pypa/setuptools/releases)
|
||||
- [Changelog](https://github.com/pypa/setuptools/blob/main/NEWS.rst)
|
||||
- [Commits](https://github.com/pypa/setuptools/compare/v82.0.0...v82.0.1)
|
||||
|
||||
---
|
||||
updated-dependencies:
|
||||
- dependency-name: black
|
||||
dependency-version: 26.3.0
|
||||
dependency-type: direct:production
|
||||
update-type: version-update:semver-minor
|
||||
dependency-group: python-packages
|
||||
- dependency-name: setuptools
|
||||
dependency-version: 82.0.1
|
||||
dependency-type: direct:production
|
||||
update-type: version-update:semver-patch
|
||||
dependency-group: python-packages
|
||||
...
|
||||
|
||||
|
||||
0.61.5 (2026-02-18)
|
||||
-------------------
|
||||
- Fix empty repository crash due to None timestamp comparison (#489)
|
||||
[Rodos]
|
||||
|
||||
Empty repositories have None for pushed_at/updated_at, causing a
|
||||
TypeError when compared to the last_update string. Use .get() with
|
||||
truthiness check to skip None timestamps in incremental tracking.
|
||||
|
||||
|
||||
0.61.4 (2026-02-16)
|
||||
-------------------
|
||||
- Fix HTTP 451 DMCA and 403 TOS handling regression (#487) [Rodos]
|
||||
|
||||
The DMCA handling added in PR #454 had a bug: make_request_with_retry()
|
||||
raises HTTPError before retrieve_data() could check the status code via
|
||||
getcode(), making the case 451 handler dead code. This also affected
|
||||
HTTP 403 TOS violations (e.g. jumoog/MagiskOnWSA).
|
||||
|
||||
Fix by catching HTTPError in retrieve_data() and converting 451 and
|
||||
blocked 403 responses (identified by "block" key in response body) to
|
||||
RepositoryUnavailableError. Non-block 403s (permissions, scopes) still
|
||||
propagate as HTTPError. Also handle RepositoryUnavailableError in
|
||||
retrieve_repositories() for the --repository case.
|
||||
|
||||
Rewrote tests to mock urlopen (not make_request_with_retry) to exercise
|
||||
the real code path that was previously untested.
|
||||
|
||||
Closes #487
|
||||
- Chore(deps): bump setuptools in the python-packages group.
|
||||
[dependabot[bot]]
|
||||
|
||||
Bumps the python-packages group with 1 update: [setuptools](https://github.com/pypa/setuptools).
|
||||
|
||||
|
||||
Updates `setuptools` from 80.10.2 to 82.0.0
|
||||
- [Release notes](https://github.com/pypa/setuptools/releases)
|
||||
- [Changelog](https://github.com/pypa/setuptools/blob/main/NEWS.rst)
|
||||
- [Commits](https://github.com/pypa/setuptools/compare/v80.10.2...v82.0.0)
|
||||
|
||||
---
|
||||
updated-dependencies:
|
||||
- dependency-name: setuptools
|
||||
dependency-version: 82.0.0
|
||||
dependency-type: direct:production
|
||||
update-type: version-update:semver-major
|
||||
dependency-group: python-packages
|
||||
...
|
||||
- Chore(deps): bump setuptools in the python-packages group.
|
||||
[dependabot[bot]]
|
||||
|
||||
Bumps the python-packages group with 1 update: [setuptools](https://github.com/pypa/setuptools).
|
||||
|
||||
|
||||
Updates `setuptools` from 80.10.1 to 80.10.2
|
||||
- [Release notes](https://github.com/pypa/setuptools/releases)
|
||||
- [Changelog](https://github.com/pypa/setuptools/blob/main/NEWS.rst)
|
||||
- [Commits](https://github.com/pypa/setuptools/compare/v80.10.1...v80.10.2)
|
||||
|
||||
---
|
||||
updated-dependencies:
|
||||
- dependency-name: setuptools
|
||||
dependency-version: 80.10.2
|
||||
dependency-type: direct:production
|
||||
update-type: version-update:semver-patch
|
||||
dependency-group: python-packages
|
||||
...
|
||||
|
||||
|
||||
0.61.3 (2026-01-24)
|
||||
-------------------
|
||||
- Fix KeyError: 'Private' when using --all flag (#481) [Rodos]
|
||||
|
||||
The repository dictionary uses lowercase "private" key. Use .get() with
|
||||
the correct case to match the pattern used elsewhere in the codebase.
|
||||
|
||||
The bug only affects --all users since --security-advisories short-circuits
|
||||
before the key access.
|
||||
- Chore(deps): bump setuptools in the python-packages group.
|
||||
[dependabot[bot]]
|
||||
|
||||
Bumps the python-packages group with 1 update: [setuptools](https://github.com/pypa/setuptools).
|
||||
|
||||
|
||||
Updates `setuptools` from 80.9.0 to 80.10.1
|
||||
- [Release notes](https://github.com/pypa/setuptools/releases)
|
||||
- [Changelog](https://github.com/pypa/setuptools/blob/main/NEWS.rst)
|
||||
- [Commits](https://github.com/pypa/setuptools/compare/v80.9.0...v80.10.1)
|
||||
|
||||
---
|
||||
updated-dependencies:
|
||||
- dependency-name: setuptools
|
||||
dependency-version: 80.10.1
|
||||
dependency-type: direct:production
|
||||
update-type: version-update:semver-minor
|
||||
dependency-group: python-packages
|
||||
...
|
||||
|
||||
|
||||
0.61.2 (2026-01-19)
|
||||
-------------------
|
||||
|
||||
Fix
|
||||
~~~
|
||||
- Skip security advisories for private repos unless explicitly
|
||||
requested. [Lukas Bestle]
|
||||
- Handle 404 errors on security advisories. [Lukas Bestle]
|
||||
|
||||
Other
|
||||
~~~~~
|
||||
- Chore(deps): bump black in the python-packages group.
|
||||
[dependabot[bot]]
|
||||
|
||||
Bumps the python-packages group with 1 update: [black](https://github.com/psf/black).
|
||||
|
||||
|
||||
Updates `black` from 25.12.0 to 26.1.0
|
||||
- [Release notes](https://github.com/psf/black/releases)
|
||||
- [Changelog](https://github.com/psf/black/blob/main/CHANGES.md)
|
||||
- [Commits](https://github.com/psf/black/compare/25.12.0...26.1.0)
|
||||
|
||||
---
|
||||
updated-dependencies:
|
||||
- dependency-name: black
|
||||
dependency-version: 26.1.0
|
||||
dependency-type: direct:production
|
||||
update-type: version-update:semver-major
|
||||
dependency-group: python-packages
|
||||
...
|
||||
- Docs: Explain security advisories in README. [Lukas Bestle]
|
||||
- Feat: Only make security advisory dir if successful. [Lukas Bestle]
|
||||
|
||||
Avoids empty directories for private repos
|
||||
|
||||
|
||||
0.61.1 (2026-01-13)
|
||||
-------------------
|
||||
- Refactor test fixtures to use shared create_args helper. [Rodos]
|
||||
|
||||
Uses the real parse_args() function to get CLI defaults, so when
|
||||
new arguments are added they're automatically available to all tests.
|
||||
|
||||
Changes:
|
||||
- Add tests/conftest.py with create_args fixture
|
||||
- Update 8 test files to use shared fixture
|
||||
- Remove duplicate _create_mock_args methods
|
||||
- Remove redundant @pytest.fixture mock_args definitions
|
||||
|
||||
This eliminates the need to update multiple test files when
|
||||
adding new CLI arguments.
|
||||
- Fix fine-grained PAT attachment downloads for private repos (#477)
|
||||
[Rodos]
|
||||
|
||||
Fine-grained personal access tokens cannot download attachments from
|
||||
private repositories directly due to a GitHub platform limitation.
|
||||
|
||||
This adds a workaround for image attachments (/assets/ URLs) using
|
||||
GitHub's Markdown API to convert URLs to JWT-signed URLs that can be
|
||||
downloaded without authentication.
|
||||
|
||||
Changes:
|
||||
- Add get_jwt_signed_url_via_markdown_api() function
|
||||
- Detect fine-grained token + private repo + /assets/ URL upfront
|
||||
- Use JWT workaround for those cases, mark success with jwt_workaround flag
|
||||
- Skip download with skipped_at when workaround fails
|
||||
- Add startup warning when using --attachments with fine-grained tokens
|
||||
- Document limitation in README (file attachments still fail)
|
||||
- Add 6 unit tests for JWT workaround logic
|
||||
|
||||
|
||||
0.61.0 (2026-01-12)
|
||||
-------------------
|
||||
- Docs: Add missing `--retries` argument to README. [Lukas Bestle]
|
||||
- Test: Adapt tests to new argument. [Lukas Bestle]
|
||||
- Feat: Backup of repository security advisories. [Lukas Bestle]
|
||||
|
||||
101
README.rst
101
README.rst
@@ -4,7 +4,7 @@ github-backup
|
||||
|
||||
|PyPI| |Python Versions|
|
||||
|
||||
The package can be used to backup an *entire* `Github <https://github.com/>`_ organization, repository or user account, including starred repos, issues and wikis in the most appropriate format (clones for wikis, json files for issues).
|
||||
The package can be used to backup an *entire* `Github <https://github.com/>`_ organization, repository or user account, including starred repos, issues, discussions and wikis in the most appropriate format (clones for wikis, json files for issues and discussions).
|
||||
|
||||
Requirements
|
||||
============
|
||||
@@ -22,7 +22,7 @@ Using PIP via PyPI::
|
||||
Using PIP via Github (more likely the latest version)::
|
||||
|
||||
pip install git+https://github.com/josegonzalez/python-github-backup.git#egg=github-backup
|
||||
|
||||
|
||||
*Install note for python newcomers:*
|
||||
|
||||
Python scripts are unlikely to be included in your ``$PATH`` by default, this means it cannot be run directly in terminal with ``$ github-backup ...``, you can either add python's install path to your environments ``$PATH`` or call the script directly e.g. using ``$ ~/.local/bin/github-backup``.*
|
||||
@@ -36,16 +36,18 @@ Show the CLI help output::
|
||||
|
||||
CLI Help output::
|
||||
|
||||
github-backup [-h] [-t TOKEN_CLASSIC] [-f TOKEN_FINE] [-q] [--as-app]
|
||||
[-o OUTPUT_DIRECTORY] [-l LOG_LEVEL] [-i]
|
||||
github-backup [-h] [-t TOKEN_CLASSIC] [-f TOKEN_FINE] [--token-from-gh]
|
||||
[-q] [--as-app] [-o OUTPUT_DIRECTORY] [-l LOG_LEVEL] [-i]
|
||||
[--incremental-by-files]
|
||||
[--starred] [--all-starred] [--starred-skip-size-over MB]
|
||||
[--watched] [--followers] [--following] [--all]
|
||||
[--issues] [--issue-comments] [--issue-events] [--pulls]
|
||||
[--pull-comments] [--pull-commits] [--pull-details]
|
||||
[--pull-comments] [--pull-reviews] [--pull-commits]
|
||||
[--pull-details]
|
||||
[--labels] [--hooks] [--milestones] [--security-advisories]
|
||||
[--repositories] [--bare] [--no-prune] [--lfs] [--wikis]
|
||||
[--gists] [--starred-gists] [--skip-archived] [--skip-existing]
|
||||
[--discussions] [--repositories] [--bare] [--no-prune]
|
||||
[--lfs] [--wikis] [--gists] [--starred-gists]
|
||||
[--skip-archived] [--skip-existing]
|
||||
[-L [LANGUAGES ...]] [-N NAME_REGEX] [-H GITHUB_HOST]
|
||||
[-O] [-R REPOSITORY] [-P] [-F] [--prefer-ssh] [-v]
|
||||
[--keychain-name OSX_KEYCHAIN_ITEM_NAME]
|
||||
@@ -71,6 +73,7 @@ CLI Help output::
|
||||
-f, --token-fine TOKEN_FINE
|
||||
fine-grained personal access token (github_pat_....),
|
||||
or path to token (file://...)
|
||||
--token-from-gh read token from GitHub CLI (gh auth token)
|
||||
-q, --quiet supress log messages less severe than warning, e.g.
|
||||
info
|
||||
--as-app authenticate as github app instead of as a user.
|
||||
@@ -95,6 +98,7 @@ CLI Help output::
|
||||
--issue-events include issue events in backup
|
||||
--pulls include pull requests in backup
|
||||
--pull-comments include pull request review comments in backup
|
||||
--pull-reviews include pull request reviews in backup
|
||||
--pull-commits include pull request commits in backup
|
||||
--pull-details include more pull request details in backup [*]
|
||||
--labels include labels in backup
|
||||
@@ -103,6 +107,7 @@ CLI Help output::
|
||||
--milestones include milestones in backup
|
||||
--security-advisories
|
||||
include security advisories in backup
|
||||
--discussions include discussions in backup
|
||||
--repositories include repository clone in backup
|
||||
--bare clone bare repositories
|
||||
--no-prune disable prune option for git fetch
|
||||
@@ -143,8 +148,8 @@ CLI Help output::
|
||||
applies if including releases
|
||||
--skip-assets-on [SKIP_ASSETS_ON ...]
|
||||
skip asset downloads for these repositories
|
||||
--attachments download user-attachments from issues and pull
|
||||
requests
|
||||
--attachments download user-attachments from issues, pull requests,
|
||||
and discussions
|
||||
--throttle-limit THROTTLE_LIMIT
|
||||
start throttling of GitHub API requests after this
|
||||
amount of API requests remain
|
||||
@@ -171,6 +176,8 @@ The positional argument ``USER`` specifies the user or organization account you
|
||||
|
||||
**Classic tokens** (``-t TOKEN``) are `slightly less secure <https://docs.github.com/en/authentication/keeping-your-account-and-data-secure/managing-your-personal-access-tokens#personal-access-tokens-classic>`_ as they provide very coarse-grained permissions.
|
||||
|
||||
If you already authenticate with the `GitHub CLI <https://cli.github.com/>`_, you can use ``--token-from-gh`` to read the token with ``gh auth token`` instead of passing a token directly. This avoids placing the token in shell history or process arguments. When ``--github-host`` is set, the token is read with ``gh auth token --hostname HOST``.
|
||||
|
||||
|
||||
Fine Tokens
|
||||
~~~~~~~~~~~
|
||||
@@ -181,7 +188,7 @@ Customise the permissions for your use case, but for a personal account full bac
|
||||
|
||||
**User permissions**: Read access to followers, starring, and watching.
|
||||
|
||||
**Repository permissions**: Read access to contents, issues, metadata, pull requests, and webhooks.
|
||||
**Repository permissions**: Read access to contents, discussions, issues, metadata, pull requests, and webhooks.
|
||||
|
||||
|
||||
GitHub Apps
|
||||
@@ -242,7 +249,7 @@ Note: When you run github-backup, you will be asked whether you want to allow "
|
||||
Github Rate-limit and Throttling
|
||||
--------------------------------
|
||||
|
||||
"github-backup" will automatically throttle itself based on feedback from the Github API.
|
||||
"github-backup" will automatically throttle itself based on feedback from the Github API.
|
||||
|
||||
Their API is usually rate-limited to 5000 calls per hour. The API will ask github-backup to pause until a specific time when the limit is reset again (at the start of the next hour). This continues until the backup is complete.
|
||||
|
||||
@@ -262,9 +269,9 @@ LFS objects are fetched for all refs, not just the current checkout, ensuring a
|
||||
About Attachments
|
||||
-----------------
|
||||
|
||||
When you use the ``--attachments`` option with ``--issues`` or ``--pulls``, the tool will download user-uploaded attachments (images, videos, documents, etc.) from issue and pull request descriptions and comments. In some circumstances attachments contain valuable data related to the topic, and without their backup important information or context might be lost inadvertently.
|
||||
When you use the ``--attachments`` option with ``--issues``, ``--pulls`` or ``--discussions``, the tool will download user-uploaded attachments (images, videos, documents, etc.) from issue, pull request and discussion descriptions and comments. In some circumstances attachments contain valuable data related to the topic, and without their backup important information or context might be lost inadvertently.
|
||||
|
||||
Attachments are saved to ``issues/attachments/{issue_number}/`` and ``pulls/attachments/{pull_number}/`` directories, where ``{issue_number}`` is the GitHub issue number (e.g., issue #123 saves to ``issues/attachments/123/``). Each attachment directory contains:
|
||||
Attachments are saved to ``issues/attachments/{issue_number}/``, ``pulls/attachments/{pull_number}/`` and ``discussions/attachments/{discussion_number}/`` directories, where ``{issue_number}`` is the GitHub issue number (e.g., issue #123 saves to ``issues/attachments/123/``). Each attachment directory contains:
|
||||
|
||||
- The downloaded attachment files (named by their GitHub identifier with appropriate file extensions)
|
||||
- If multiple attachments have the same filename, conflicts are resolved with numeric suffixes (e.g., ``report.pdf``, ``report_1.pdf``, ``report_2.pdf``)
|
||||
@@ -281,6 +288,29 @@ The tool automatically extracts file extensions from HTTP headers to ensure file
|
||||
|
||||
**Repository filtering** for repo files/assets handles renamed and transferred repositories gracefully. URLs are included if they either match the current repository name directly, or redirect to it (e.g., ``willmcgugan/rich`` redirects to ``Textualize/rich`` after transfer).
|
||||
|
||||
**Fine-grained token limitation:** Due to a GitHub platform limitation, fine-grained personal access tokens (``github_pat_...``) cannot download attachments from private repositories directly. This affects both ``/assets/`` (images) and ``/files/`` (documents) URLs. The tool implements a workaround for image attachments using GitHub's Markdown API, which converts URLs to temporary JWT-signed URLs that can be downloaded. However, this workaround only works for images - document attachments (PDFs, text files, etc.) will fail with 404 errors when using fine-grained tokens on private repos. For full attachment support on private repositories, use a classic token (``-t``) instead of a fine-grained token (``-f``). See `#477 <https://github.com/josegonzalez/python-github-backup/issues/477>`_ for details.
|
||||
|
||||
|
||||
About Discussions
|
||||
-----------------
|
||||
|
||||
GitHub Discussions are backed up with GitHub's GraphQL API because the REST API does not expose discussions. Use ``--discussions`` to save each discussion as JSON under ``repositories/{repo}/discussions/{number}.json``. Discussion backups include the discussion body and metadata, category information, comments, and comment replies.
|
||||
|
||||
``--discussions`` is included in ``--all``. Unlike most REST API-backed resources, discussions require authentication because GitHub's GraphQL API requires a token. Fine-grained personal access tokens and GitHub Apps need read access to the repository's Discussions permission.
|
||||
|
||||
Incremental backups use a per-repository checkpoint at ``repositories/{repo}/discussions/last_update`` based on discussion ``updatedAt`` timestamps. This is separate from the repository-level ``last_update`` file so discussion activity is not missed if the repository's own update timestamp does not change. If you enable ``--discussions`` on an existing incremental backup, the first run performs a full discussions backup for each repository and creates the discussions checkpoint for future runs.
|
||||
|
||||
|
||||
About security advisories
|
||||
-------------------------
|
||||
|
||||
GitHub security advisories are only available in public repositories. GitHub does not provide the respective API endpoint for private repositories.
|
||||
|
||||
Therefore the logic is implemented as follows:
|
||||
- Security advisories are included in the `--all` option.
|
||||
- If only the `--all` option was provided, backups of security advisories are skipped for private repositories.
|
||||
- If the `--security-advisories` option is provided (on its own or in addition to `--all`), a backup of security advisories is attempted for all repositories, with graceful handling if the GitHub API doesn't return any.
|
||||
|
||||
|
||||
Run in Docker container
|
||||
-----------------------
|
||||
@@ -295,7 +325,12 @@ Gotchas / Known-issues
|
||||
All is not everything
|
||||
---------------------
|
||||
|
||||
The ``--all`` argument does not include: cloning private repos (``-P, --private``), cloning forks (``-F, --fork``), cloning starred repositories (``--all-starred``), ``--pull-details``, cloning LFS repositories (``--lfs``), cloning gists (``--gists``) or cloning starred gist repos (``--starred-gists``). See examples for more.
|
||||
The ``--all`` argument does not include: downloading attachments from issue and pull request comments (``--attachments``), cloning private repos (``-P, --private``), cloning forks (``-F, --fork``), cloning starred repositories (``--all-starred``), ``--pull-details``, cloning LFS repositories (``--lfs``), cloning gists (``--gists``) or cloning starred gist repos (``--starred-gists``). See examples for more.
|
||||
|
||||
Saves nothing if no arguments are passed
|
||||
----------------------------------------
|
||||
|
||||
At least one argument like ``--all`` or ``--repositories`` is needed for github-backup to actually save data. Without relevant arguments, github-backup fetches some data from GitHub but doesn't put any of it into files.
|
||||
|
||||
Starred repository size
|
||||
-----------------------
|
||||
@@ -312,25 +347,37 @@ For finer control, avoid using ``--assets`` with starred repos, or use ``--skip-
|
||||
|
||||
Alternatively, consider just storing links to starred repos in JSON format with ``--starred``.
|
||||
|
||||
About pull request reviews
|
||||
--------------------------
|
||||
|
||||
Use ``--pull-reviews`` with ``--pulls`` to include GitHub pull request review metadata under each pull request's ``review_data`` key. Reviews are separate from review comments: ``--pull-comments`` backs up inline review comments via ``comment_data`` and regular PR conversation comments via ``comment_regular_data``, while ``--pull-reviews`` backs up review state, submitted time, commit ID, and the top-level review body.
|
||||
|
||||
``--pull-reviews`` is included in ``--all``. Incremental backups use a per-repository checkpoint at ``repositories/{repo}/pulls/reviews_last_update``. If ``--pull-reviews`` is enabled on an existing incremental backup, the first run performs a one-time backfill for pull request reviews so older PRs are not skipped by the existing pull request checkpoint. Existing ``comment_data``, ``comment_regular_data`` and ``commit_data`` fields are preserved when only review data is being added.
|
||||
|
||||
|
||||
Incremental Backup
|
||||
------------------
|
||||
|
||||
Using (``-i, --incremental``) will only request new data from the API **since the last run (successful or not)**. e.g. only request issues from the API since the last run.
|
||||
Using (``-i, --incremental``) will only request new data from the API **since the last successful resource backup**. e.g. only request issues from the API since the last issue backup for that repository.
|
||||
|
||||
This means any blocking errors on previous runs can cause a large amount of missing data in backups.
|
||||
Incremental checkpoints for issue and pull request API backups are stored per resource in that repository's backup directory (for example ``repositories/{repo}/issues/last_update``, ``repositories/{repo}/pulls/last_update`` or ``starred/{owner}/{repo}/pulls/last_update``). Older versions stored a single global ``last_update`` file in the output directory root. During migration, the legacy global checkpoint is used as a fallback only for resource directories that already contain backup data but do not yet have their own checkpoint. New repositories or newly enabled resources with no existing data get a full backup instead of inheriting an unrelated global checkpoint.
|
||||
|
||||
After all existing issue and pull request resource directories have per-resource checkpoints, the legacy global ``last_update`` file is removed automatically.
|
||||
|
||||
This means any blocking errors on previous runs can cause missing data in backups for the affected repository resource.
|
||||
|
||||
Using (``--incremental-by-files``) will request new data from the API **based on when the file was modified on filesystem**. e.g. if you modify the file yourself you may miss something.
|
||||
|
||||
Still saver than the previous version.
|
||||
Still safer than the previous version.
|
||||
|
||||
Specifically, issues and pull requests are handled like this.
|
||||
Incremental backup only changes how issue and pull request data is fetched.
|
||||
|
||||
Known blocking errors
|
||||
---------------------
|
||||
|
||||
Some errors will block the backup run by exiting the script. e.g. receiving a 403 Forbidden error from the Github API.
|
||||
|
||||
If the incremental argument is used, this will result in the next backup only requesting API data since the last blocked/failed run. Potentially causing unexpected large amounts of missing data.
|
||||
If the incremental argument is used, per-resource checkpoints are only advanced after that resource's backup work completes. A blocking error can still abort the overall run, but repositories and resources that were not processed will keep their previous checkpoints.
|
||||
|
||||
It's therefore recommended to only use the incremental argument if the output/result is being actively monitored, or complimented with periodic full non-incremental runs, to avoid unexpected missing data in a regular backup runs.
|
||||
|
||||
@@ -387,12 +434,12 @@ Github Backup Examples
|
||||
|
||||
Backup all repositories, including private ones using a classic token::
|
||||
|
||||
export ACCESS_TOKEN=SOME-GITHUB-TOKEN
|
||||
ACCESS_TOKEN=SOME-GITHUB-TOKEN
|
||||
github-backup WhiteHouse --token $ACCESS_TOKEN --organization --output-directory /tmp/white-house --repositories --private
|
||||
|
||||
Use a fine-grained access token to backup a single organization repository with everything else (wiki, pull requests, comments, issues etc)::
|
||||
|
||||
export FINE_ACCESS_TOKEN=SOME-GITHUB-TOKEN
|
||||
FINE_ACCESS_TOKEN=SOME-GITHUB-TOKEN
|
||||
ORGANIZATION=docker
|
||||
REPO=cli
|
||||
# e.g. git@github.com:docker/cli.git
|
||||
@@ -400,17 +447,17 @@ Use a fine-grained access token to backup a single organization repository with
|
||||
|
||||
Quietly and incrementally backup useful Github user data (public and private repos with SSH) including; all issues, pulls, all public starred repos and gists (omitting "hooks", "releases" and therefore "assets" to prevent blocking). *Great for a cron job.* ::
|
||||
|
||||
export FINE_ACCESS_TOKEN=SOME-GITHUB-TOKEN
|
||||
FINE_ACCESS_TOKEN=SOME-GITHUB-TOKEN
|
||||
GH_USER=YOUR-GITHUB-USER
|
||||
|
||||
github-backup -f $FINE_ACCESS_TOKEN --prefer-ssh -o ~/github-backup/ -l error -P -i --all-starred --starred --watched --followers --following --issues --issue-comments --issue-events --pulls --pull-comments --pull-commits --labels --milestones --security-advisories --repositories --wikis --releases --assets --attachments --pull-details --gists --starred-gists $GH_USER
|
||||
|
||||
github-backup -f $FINE_ACCESS_TOKEN --prefer-ssh -o ~/github-backup/ -l error -P -i --all-starred --starred --watched --followers --following --issues --issue-comments --issue-events --pulls --pull-comments --pull-reviews --pull-commits --labels --milestones --security-advisories --discussions --repositories --wikis --releases --assets --attachments --pull-details --gists --starred-gists $GH_USER
|
||||
|
||||
Debug an error/block or incomplete backup into a temporary directory. Omit "incremental" to fill a previous incomplete backup. ::
|
||||
|
||||
export FINE_ACCESS_TOKEN=SOME-GITHUB-TOKEN
|
||||
FINE_ACCESS_TOKEN=SOME-GITHUB-TOKEN
|
||||
GH_USER=YOUR-GITHUB-USER
|
||||
|
||||
github-backup -f $FINE_ACCESS_TOKEN -o /tmp/github-backup/ -l debug -P --all-starred --starred --watched --followers --following --issues --issue-comments --issue-events --pulls --pull-comments --pull-commits --labels --milestones --repositories --wikis --releases --assets --pull-details --gists --starred-gists $GH_USER
|
||||
github-backup -f $FINE_ACCESS_TOKEN -o /tmp/github-backup/ -l debug -P --all-starred --starred --watched --followers --following --issues --issue-comments --issue-events --pulls --pull-comments --pull-reviews --pull-commits --labels --milestones --discussions --repositories --wikis --releases --assets --pull-details --gists --starred-gists $GH_USER
|
||||
|
||||
Pipe a token from stdin to avoid storing it in environment variables or command history (Unix-like systems only)::
|
||||
|
||||
@@ -426,7 +473,7 @@ This tool creates backups only, there is no inbuilt restore command.
|
||||
cd /tmp/white-house/repositories/petitions/repository
|
||||
git push --mirror git@github.com:WhiteHouse/petitions.git
|
||||
|
||||
**Issues, pull requests, comments, and other metadata** are saved as JSON files for archival purposes. The GitHub API does not support recreating this data faithfully, creating issues via the API has limitations:
|
||||
**Issues, pull requests, discussions, comments, and other metadata** are saved as JSON files for archival purposes. The GitHub API does not support recreating this data faithfully, creating issues via the API has limitations:
|
||||
|
||||
- New issue/PR numbers are assigned (original numbers cannot be set)
|
||||
- Timestamps reflect creation time (original dates cannot be set)
|
||||
|
||||
@@ -1 +1 @@
|
||||
__version__ = "0.61.0"
|
||||
__version__ = "0.62.1"
|
||||
|
||||
@@ -46,6 +46,16 @@ def main():
|
||||
"Use -t/--token or -f/--token-fine to authenticate."
|
||||
)
|
||||
|
||||
# Issue #477: Fine-grained PATs cannot download all attachment types from
|
||||
# private repos. Image attachments will be retried via Markdown API workaround.
|
||||
if args.include_attachments and args.token_fine:
|
||||
logger.warning(
|
||||
"Using --attachments with fine-grained token. Due to GitHub platform "
|
||||
"limitations, file attachments (PDFs, etc.) from private repos may fail. "
|
||||
"Image attachments will be retried via workaround. For full attachment "
|
||||
"support, use --token-classic instead."
|
||||
)
|
||||
|
||||
if args.quiet:
|
||||
logger.setLevel(logging.WARNING)
|
||||
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
292
github_backup/graphql_queries.py
Normal file
292
github_backup/graphql_queries.py
Normal file
@@ -0,0 +1,292 @@
|
||||
"""GraphQL query templates used by github-backup."""
|
||||
|
||||
DISCUSSION_PAGE_SIZE = 100
|
||||
|
||||
DISCUSSION_LIST_QUERY = """
|
||||
query($owner: String!, $name: String!, $after: String, $pageSize: Int!) {
|
||||
repository(owner: $owner, name: $name) {
|
||||
hasDiscussionsEnabled
|
||||
discussions(
|
||||
first: $pageSize,
|
||||
after: $after,
|
||||
orderBy: {field: UPDATED_AT, direction: DESC}
|
||||
) {
|
||||
totalCount
|
||||
nodes {
|
||||
id
|
||||
number
|
||||
title
|
||||
updatedAt
|
||||
}
|
||||
pageInfo {
|
||||
hasNextPage
|
||||
endCursor
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
"""
|
||||
|
||||
DISCUSSION_DETAIL_QUERY = """
|
||||
query(
|
||||
$owner: String!,
|
||||
$name: String!,
|
||||
$number: Int!,
|
||||
$commentsCursor: String,
|
||||
$pageSize: Int!
|
||||
) {
|
||||
repository(owner: $owner, name: $name) {
|
||||
discussion(number: $number) {
|
||||
activeLockReason
|
||||
answer {
|
||||
id
|
||||
databaseId
|
||||
url
|
||||
}
|
||||
answerChosenAt
|
||||
answerChosenBy {
|
||||
...ActorFields
|
||||
}
|
||||
author {
|
||||
...ActorFields
|
||||
}
|
||||
authorAssociation
|
||||
body
|
||||
bodyHTML
|
||||
bodyText
|
||||
category {
|
||||
createdAt
|
||||
description
|
||||
emoji
|
||||
emojiHTML
|
||||
id
|
||||
isAnswerable
|
||||
name
|
||||
slug
|
||||
updatedAt
|
||||
}
|
||||
closed
|
||||
closedAt
|
||||
createdAt
|
||||
createdViaEmail
|
||||
databaseId
|
||||
editor {
|
||||
...ActorFields
|
||||
}
|
||||
id
|
||||
includesCreatedEdit
|
||||
isAnswered
|
||||
labels(first: 100) {
|
||||
totalCount
|
||||
nodes {
|
||||
id
|
||||
name
|
||||
color
|
||||
description
|
||||
}
|
||||
}
|
||||
lastEditedAt
|
||||
locked
|
||||
number
|
||||
poll {
|
||||
id
|
||||
question
|
||||
totalVoteCount
|
||||
options(first: 100) {
|
||||
totalCount
|
||||
nodes {
|
||||
id
|
||||
option
|
||||
totalVoteCount
|
||||
}
|
||||
}
|
||||
}
|
||||
publishedAt
|
||||
reactionGroups {
|
||||
...ReactionGroupFields
|
||||
}
|
||||
resourcePath
|
||||
stateReason
|
||||
title
|
||||
updatedAt
|
||||
upvoteCount
|
||||
url
|
||||
comments(first: $pageSize, after: $commentsCursor) {
|
||||
totalCount
|
||||
nodes {
|
||||
...DiscussionCommentFields
|
||||
replies(first: $pageSize) {
|
||||
totalCount
|
||||
nodes {
|
||||
...DiscussionReplyFields
|
||||
}
|
||||
pageInfo {
|
||||
hasNextPage
|
||||
endCursor
|
||||
}
|
||||
}
|
||||
}
|
||||
pageInfo {
|
||||
hasNextPage
|
||||
endCursor
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fragment ActorFields on Actor {
|
||||
avatarUrl
|
||||
login
|
||||
resourcePath
|
||||
url
|
||||
}
|
||||
|
||||
fragment ReactionGroupFields on ReactionGroup {
|
||||
content
|
||||
reactors {
|
||||
totalCount
|
||||
}
|
||||
}
|
||||
|
||||
fragment DiscussionCommentFields on DiscussionComment {
|
||||
author {
|
||||
...ActorFields
|
||||
}
|
||||
authorAssociation
|
||||
body
|
||||
bodyHTML
|
||||
bodyText
|
||||
createdAt
|
||||
createdViaEmail
|
||||
databaseId
|
||||
deletedAt
|
||||
editor {
|
||||
...ActorFields
|
||||
}
|
||||
id
|
||||
includesCreatedEdit
|
||||
isAnswer
|
||||
isMinimized
|
||||
lastEditedAt
|
||||
minimizedReason
|
||||
publishedAt
|
||||
reactionGroups {
|
||||
...ReactionGroupFields
|
||||
}
|
||||
replyTo {
|
||||
id
|
||||
databaseId
|
||||
url
|
||||
}
|
||||
resourcePath
|
||||
updatedAt
|
||||
upvoteCount
|
||||
url
|
||||
}
|
||||
|
||||
fragment DiscussionReplyFields on DiscussionComment {
|
||||
author {
|
||||
...ActorFields
|
||||
}
|
||||
authorAssociation
|
||||
body
|
||||
bodyHTML
|
||||
bodyText
|
||||
createdAt
|
||||
createdViaEmail
|
||||
databaseId
|
||||
deletedAt
|
||||
editor {
|
||||
...ActorFields
|
||||
}
|
||||
id
|
||||
includesCreatedEdit
|
||||
isAnswer
|
||||
isMinimized
|
||||
lastEditedAt
|
||||
minimizedReason
|
||||
publishedAt
|
||||
reactionGroups {
|
||||
...ReactionGroupFields
|
||||
}
|
||||
replyTo {
|
||||
id
|
||||
databaseId
|
||||
url
|
||||
}
|
||||
resourcePath
|
||||
updatedAt
|
||||
upvoteCount
|
||||
url
|
||||
}
|
||||
"""
|
||||
|
||||
DISCUSSION_REPLIES_QUERY = """
|
||||
query($commentId: ID!, $repliesCursor: String, $pageSize: Int!) {
|
||||
node(id: $commentId) {
|
||||
... on DiscussionComment {
|
||||
replies(first: $pageSize, after: $repliesCursor) {
|
||||
totalCount
|
||||
nodes {
|
||||
...DiscussionReplyFields
|
||||
}
|
||||
pageInfo {
|
||||
hasNextPage
|
||||
endCursor
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fragment ActorFields on Actor {
|
||||
avatarUrl
|
||||
login
|
||||
resourcePath
|
||||
url
|
||||
}
|
||||
|
||||
fragment ReactionGroupFields on ReactionGroup {
|
||||
content
|
||||
reactors {
|
||||
totalCount
|
||||
}
|
||||
}
|
||||
|
||||
fragment DiscussionReplyFields on DiscussionComment {
|
||||
author {
|
||||
...ActorFields
|
||||
}
|
||||
authorAssociation
|
||||
body
|
||||
bodyHTML
|
||||
bodyText
|
||||
createdAt
|
||||
createdViaEmail
|
||||
databaseId
|
||||
deletedAt
|
||||
editor {
|
||||
...ActorFields
|
||||
}
|
||||
id
|
||||
includesCreatedEdit
|
||||
isAnswer
|
||||
isMinimized
|
||||
lastEditedAt
|
||||
minimizedReason
|
||||
publishedAt
|
||||
reactionGroups {
|
||||
...ReactionGroupFields
|
||||
}
|
||||
replyTo {
|
||||
id
|
||||
databaseId
|
||||
url
|
||||
}
|
||||
resourcePath
|
||||
updatedAt
|
||||
upvoteCount
|
||||
url
|
||||
}
|
||||
"""
|
||||
@@ -1,15 +1,15 @@
|
||||
# Linting & Formatting
|
||||
autopep8==2.3.2
|
||||
black==25.12.0
|
||||
black==26.3.1
|
||||
flake8==7.3.0
|
||||
|
||||
# Testing
|
||||
pytest==9.0.2
|
||||
pytest==9.0.3
|
||||
|
||||
# Release & Publishing
|
||||
twine==6.2.0
|
||||
gitchangelog==3.0.4
|
||||
setuptools==80.9.0
|
||||
setuptools==82.0.1
|
||||
|
||||
# Documentation
|
||||
restructuredtext-lint==2.0.2
|
||||
|
||||
25
tests/conftest.py
Normal file
25
tests/conftest.py
Normal file
@@ -0,0 +1,25 @@
|
||||
"""Shared pytest fixtures for github-backup tests."""
|
||||
|
||||
import pytest
|
||||
|
||||
from github_backup.github_backup import parse_args
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def create_args():
|
||||
"""Factory fixture that creates args with real CLI defaults.
|
||||
|
||||
Uses the actual argument parser so new CLI args are automatically
|
||||
available with their defaults - no test updates needed.
|
||||
|
||||
Usage:
|
||||
def test_something(self, create_args):
|
||||
args = create_args(include_releases=True, user="myuser")
|
||||
"""
|
||||
def _create(**overrides):
|
||||
# Use real parser to get actual defaults
|
||||
args = parse_args(["testuser"])
|
||||
for key, value in overrides.items():
|
||||
setattr(args, key, value)
|
||||
return args
|
||||
return _create
|
||||
@@ -1,7 +1,7 @@
|
||||
"""Tests for --all-starred flag behavior (issue #225)."""
|
||||
|
||||
import pytest
|
||||
from unittest.mock import Mock, patch
|
||||
from unittest.mock import patch
|
||||
|
||||
from github_backup import github_backup
|
||||
|
||||
@@ -12,58 +12,14 @@ class TestAllStarredCloning:
|
||||
Issue #225: --all-starred should clone starred repos without requiring --repositories.
|
||||
"""
|
||||
|
||||
def _create_mock_args(self, **overrides):
|
||||
"""Create a mock args object with sensible defaults."""
|
||||
args = Mock()
|
||||
args.user = "testuser"
|
||||
args.output_directory = "/tmp/backup"
|
||||
args.include_repository = False
|
||||
args.include_everything = False
|
||||
args.include_gists = False
|
||||
args.include_starred_gists = False
|
||||
args.all_starred = False
|
||||
args.skip_existing = False
|
||||
args.bare_clone = False
|
||||
args.lfs_clone = False
|
||||
args.no_prune = False
|
||||
args.include_wiki = False
|
||||
args.include_issues = False
|
||||
args.include_issue_comments = False
|
||||
args.include_issue_events = False
|
||||
args.include_pulls = False
|
||||
args.include_pull_comments = False
|
||||
args.include_pull_commits = False
|
||||
args.include_pull_details = False
|
||||
args.include_labels = False
|
||||
args.include_hooks = False
|
||||
args.include_milestones = False
|
||||
args.include_security_advisories = False
|
||||
args.include_releases = False
|
||||
args.include_assets = False
|
||||
args.include_attachments = False
|
||||
args.incremental = False
|
||||
args.incremental_by_files = False
|
||||
args.github_host = None
|
||||
args.prefer_ssh = False
|
||||
args.token_classic = None
|
||||
args.token_fine = None
|
||||
args.as_app = False
|
||||
args.osx_keychain_item_name = None
|
||||
args.osx_keychain_item_account = None
|
||||
|
||||
for key, value in overrides.items():
|
||||
setattr(args, key, value)
|
||||
|
||||
return args
|
||||
|
||||
@patch('github_backup.github_backup.fetch_repository')
|
||||
@patch('github_backup.github_backup.get_github_repo_url')
|
||||
def test_all_starred_clones_without_repositories_flag(self, mock_get_url, mock_fetch):
|
||||
def test_all_starred_clones_without_repositories_flag(self, mock_get_url, mock_fetch, create_args):
|
||||
"""--all-starred should clone starred repos without --repositories flag.
|
||||
|
||||
This is the core fix for issue #225.
|
||||
"""
|
||||
args = self._create_mock_args(all_starred=True)
|
||||
args = create_args(all_starred=True)
|
||||
mock_get_url.return_value = "https://github.com/otheruser/awesome-project.git"
|
||||
|
||||
# A starred repository (is_starred flag set by retrieve_repositories)
|
||||
@@ -88,9 +44,9 @@ class TestAllStarredCloning:
|
||||
|
||||
@patch('github_backup.github_backup.fetch_repository')
|
||||
@patch('github_backup.github_backup.get_github_repo_url')
|
||||
def test_starred_repo_not_cloned_without_all_starred_flag(self, mock_get_url, mock_fetch):
|
||||
def test_starred_repo_not_cloned_without_all_starred_flag(self, mock_get_url, mock_fetch, create_args):
|
||||
"""Starred repos should NOT be cloned if --all-starred is not set."""
|
||||
args = self._create_mock_args(all_starred=False)
|
||||
args = create_args(all_starred=False)
|
||||
mock_get_url.return_value = "https://github.com/otheruser/awesome-project.git"
|
||||
|
||||
starred_repo = {
|
||||
@@ -111,9 +67,9 @@ class TestAllStarredCloning:
|
||||
|
||||
@patch('github_backup.github_backup.fetch_repository')
|
||||
@patch('github_backup.github_backup.get_github_repo_url')
|
||||
def test_non_starred_repo_not_cloned_with_only_all_starred(self, mock_get_url, mock_fetch):
|
||||
def test_non_starred_repo_not_cloned_with_only_all_starred(self, mock_get_url, mock_fetch, create_args):
|
||||
"""Non-starred repos should NOT be cloned when only --all-starred is set."""
|
||||
args = self._create_mock_args(all_starred=True)
|
||||
args = create_args(all_starred=True)
|
||||
mock_get_url.return_value = "https://github.com/testuser/my-project.git"
|
||||
|
||||
# A regular (non-starred) repository
|
||||
@@ -135,9 +91,9 @@ class TestAllStarredCloning:
|
||||
|
||||
@patch('github_backup.github_backup.fetch_repository')
|
||||
@patch('github_backup.github_backup.get_github_repo_url')
|
||||
def test_repositories_flag_still_works(self, mock_get_url, mock_fetch):
|
||||
def test_repositories_flag_still_works(self, mock_get_url, mock_fetch, create_args):
|
||||
"""--repositories flag should still clone repos as before."""
|
||||
args = self._create_mock_args(include_repository=True)
|
||||
args = create_args(include_repository=True)
|
||||
mock_get_url.return_value = "https://github.com/testuser/my-project.git"
|
||||
|
||||
regular_repo = {
|
||||
|
||||
@@ -4,7 +4,7 @@ import json
|
||||
import os
|
||||
import tempfile
|
||||
from pathlib import Path
|
||||
from unittest.mock import Mock
|
||||
from unittest.mock import Mock, patch
|
||||
|
||||
import pytest
|
||||
|
||||
@@ -12,22 +12,13 @@ from github_backup import github_backup
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def attachment_test_setup(tmp_path):
|
||||
def attachment_test_setup(tmp_path, create_args):
|
||||
"""Fixture providing setup and helper for attachment download tests."""
|
||||
from unittest.mock import patch
|
||||
|
||||
issue_cwd = tmp_path / "issues"
|
||||
issue_cwd.mkdir()
|
||||
|
||||
# Mock args
|
||||
args = Mock()
|
||||
args.as_app = False
|
||||
args.token_fine = None
|
||||
args.token_classic = None
|
||||
args.osx_keychain_item_name = None
|
||||
args.osx_keychain_item_account = None
|
||||
args.user = "testuser"
|
||||
args.repository = "testrepo"
|
||||
# Create args using shared fixture
|
||||
args = create_args(user="testuser", repository="testrepo")
|
||||
|
||||
repository = {"full_name": "testuser/testrepo"}
|
||||
|
||||
@@ -349,3 +340,146 @@ class TestManifestDuplicatePrevention:
|
||||
downloaded_urls[0]
|
||||
== "https://github.com/user-attachments/assets/unavailable"
|
||||
)
|
||||
|
||||
|
||||
class TestJWTWorkaround:
|
||||
"""Test JWT workaround for fine-grained tokens on private repos (issue #477)."""
|
||||
|
||||
def test_markdown_api_extracts_jwt_url(self):
|
||||
"""Markdown API response with JWT URL is extracted correctly."""
|
||||
html_response = (
|
||||
'<p><a href="https://private-user-images.githubusercontent.com'
|
||||
'/123/abc.png?jwt=eyJhbGciOiJ"><img src="https://private-user-'
|
||||
'images.githubusercontent.com/123/abc.png?jwt=eyJhbGciOiJ" '
|
||||
'alt="img"></a></p>'
|
||||
)
|
||||
|
||||
mock_response = Mock()
|
||||
mock_response.read.return_value = html_response.encode("utf-8")
|
||||
|
||||
with patch("github_backup.github_backup.urlopen", return_value=mock_response):
|
||||
result = github_backup.get_jwt_signed_url_via_markdown_api(
|
||||
"https://github.com/user-attachments/assets/abc123",
|
||||
"github_pat_token",
|
||||
"owner/repo"
|
||||
)
|
||||
|
||||
expected = (
|
||||
"https://private-user-images.githubusercontent.com"
|
||||
"/123/abc.png?jwt=eyJhbGciOiJ"
|
||||
)
|
||||
assert result == expected
|
||||
|
||||
def test_markdown_api_returns_none_on_http_error(self):
|
||||
"""HTTP errors return None."""
|
||||
from urllib.error import HTTPError
|
||||
|
||||
error = HTTPError("http://test", 403, "Forbidden", {}, None)
|
||||
with patch("github_backup.github_backup.urlopen", side_effect=error):
|
||||
result = github_backup.get_jwt_signed_url_via_markdown_api(
|
||||
"https://github.com/user-attachments/assets/abc123",
|
||||
"github_pat_token",
|
||||
"owner/repo"
|
||||
)
|
||||
|
||||
assert result is None
|
||||
|
||||
def test_markdown_api_returns_none_when_no_jwt_url(self):
|
||||
"""Response without JWT URL returns None."""
|
||||
mock_response = Mock()
|
||||
mock_response.read.return_value = b"<p>No image here</p>"
|
||||
|
||||
with patch("github_backup.github_backup.urlopen", return_value=mock_response):
|
||||
result = github_backup.get_jwt_signed_url_via_markdown_api(
|
||||
"https://github.com/user-attachments/assets/abc123",
|
||||
"github_pat_token",
|
||||
"owner/repo"
|
||||
)
|
||||
|
||||
assert result is None
|
||||
|
||||
def test_needs_jwt_only_for_fine_grained_private_assets(self):
|
||||
"""needs_jwt is True only for fine-grained + private + /assets/ URL."""
|
||||
assets_url = "https://github.com/user-attachments/assets/abc123"
|
||||
files_url = "https://github.com/user-attachments/files/123/doc.pdf"
|
||||
token_fine = "github_pat_test"
|
||||
private = True
|
||||
public = False
|
||||
|
||||
# Fine-grained + private + assets = True
|
||||
needs_jwt = (
|
||||
token_fine is not None
|
||||
and private
|
||||
and "github.com/user-attachments/assets/" in assets_url
|
||||
)
|
||||
assert needs_jwt is True
|
||||
|
||||
# Fine-grained + private + files = False
|
||||
needs_jwt = (
|
||||
token_fine is not None
|
||||
and private
|
||||
and "github.com/user-attachments/assets/" in files_url
|
||||
)
|
||||
assert needs_jwt is False
|
||||
|
||||
# Fine-grained + public + assets = False
|
||||
needs_jwt = (
|
||||
token_fine is not None
|
||||
and public
|
||||
and "github.com/user-attachments/assets/" in assets_url
|
||||
)
|
||||
assert needs_jwt is False
|
||||
|
||||
def test_jwt_workaround_sets_manifest_flag(self, attachment_test_setup):
|
||||
"""Successful JWT workaround sets jwt_workaround flag in manifest."""
|
||||
setup = attachment_test_setup
|
||||
setup["args"].token_fine = "github_pat_test"
|
||||
setup["repository"]["private"] = True
|
||||
|
||||
issue_data = {"body": "https://github.com/user-attachments/assets/abc123"}
|
||||
|
||||
jwt_url = "https://private-user-images.githubusercontent.com/123/abc.png?jwt=token"
|
||||
|
||||
with patch(
|
||||
"github_backup.github_backup.get_jwt_signed_url_via_markdown_api",
|
||||
return_value=jwt_url
|
||||
), patch(
|
||||
"github_backup.github_backup.download_attachment_file",
|
||||
return_value={"success": True, "http_status": 200, "url": jwt_url}
|
||||
):
|
||||
github_backup.download_attachments(
|
||||
setup["args"], setup["issue_cwd"], issue_data, 123, setup["repository"]
|
||||
)
|
||||
|
||||
manifest_path = os.path.join(setup["issue_cwd"], "attachments", "123", "manifest.json")
|
||||
with open(manifest_path) as f:
|
||||
manifest = json.load(f)
|
||||
|
||||
assert manifest["attachments"][0]["jwt_workaround"] is True
|
||||
assert manifest["attachments"][0]["url"] == "https://github.com/user-attachments/assets/abc123"
|
||||
|
||||
def test_jwt_workaround_failure_uses_skipped_at(self, attachment_test_setup):
|
||||
"""Failed JWT workaround uses skipped_at instead of downloaded_at."""
|
||||
setup = attachment_test_setup
|
||||
setup["args"].token_fine = "github_pat_test"
|
||||
setup["repository"]["private"] = True
|
||||
|
||||
issue_data = {"body": "https://github.com/user-attachments/assets/abc123"}
|
||||
|
||||
with patch(
|
||||
"github_backup.github_backup.get_jwt_signed_url_via_markdown_api",
|
||||
return_value=None # Markdown API failed
|
||||
):
|
||||
github_backup.download_attachments(
|
||||
setup["args"], setup["issue_cwd"], issue_data, 123, setup["repository"]
|
||||
)
|
||||
|
||||
manifest_path = os.path.join(setup["issue_cwd"], "attachments", "123", "manifest.json")
|
||||
with open(manifest_path) as f:
|
||||
manifest = json.load(f)
|
||||
|
||||
attachment = manifest["attachments"][0]
|
||||
assert attachment["success"] is False
|
||||
assert "skipped_at" in attachment
|
||||
assert "downloaded_at" not in attachment
|
||||
assert "Use --token-classic" in attachment["error"]
|
||||
|
||||
75
tests/test_auth.py
Normal file
75
tests/test_auth.py
Normal file
@@ -0,0 +1,75 @@
|
||||
"""Tests for authentication helpers."""
|
||||
|
||||
from unittest.mock import patch
|
||||
|
||||
import pytest
|
||||
|
||||
from github_backup import github_backup
|
||||
|
||||
|
||||
def test_token_from_gh_flag_parses():
|
||||
args = github_backup.parse_args(["--token-from-gh", "testuser"])
|
||||
assert args.token_from_gh is True
|
||||
|
||||
|
||||
def test_get_auth_reads_token_from_gh_cli(create_args):
|
||||
args = create_args(token_from_gh=True)
|
||||
|
||||
with patch(
|
||||
"github_backup.github_backup.subprocess.check_output",
|
||||
return_value=b"gho_test_token\n",
|
||||
) as mock_check_output:
|
||||
auth = github_backup.get_auth(args, encode=False)
|
||||
|
||||
assert auth == "gho_test_token:x-oauth-basic"
|
||||
mock_check_output.assert_called_once_with(
|
||||
["gh", "auth", "token"], stderr=github_backup.subprocess.PIPE
|
||||
)
|
||||
|
||||
|
||||
def test_get_auth_reads_token_from_gh_cli_for_enterprise_host(create_args):
|
||||
args = create_args(token_from_gh=True, github_host="ghe.example.com")
|
||||
|
||||
with patch(
|
||||
"github_backup.github_backup.subprocess.check_output",
|
||||
return_value=b"gho_enterprise_token\n",
|
||||
) as mock_check_output:
|
||||
auth = github_backup.get_auth(args, encode=False)
|
||||
|
||||
assert auth == "gho_enterprise_token:x-oauth-basic"
|
||||
mock_check_output.assert_called_once_with(
|
||||
["gh", "auth", "token", "--hostname", "ghe.example.com"],
|
||||
stderr=github_backup.subprocess.PIPE,
|
||||
)
|
||||
|
||||
|
||||
def test_token_from_gh_is_cached(create_args):
|
||||
args = create_args(token_from_gh=True)
|
||||
|
||||
with patch(
|
||||
"github_backup.github_backup.subprocess.check_output",
|
||||
return_value=b"gho_cached_token\n",
|
||||
) as mock_check_output:
|
||||
assert github_backup.get_auth(args, encode=False) == "gho_cached_token:x-oauth-basic"
|
||||
assert github_backup.get_auth(args, encode=False) == "gho_cached_token:x-oauth-basic"
|
||||
|
||||
mock_check_output.assert_called_once()
|
||||
|
||||
|
||||
def test_graphql_auth_strips_basic_auth_suffix_for_gh_cli_token(create_args):
|
||||
args = create_args(token_from_gh=True)
|
||||
|
||||
with patch(
|
||||
"github_backup.github_backup.subprocess.check_output",
|
||||
return_value=b"gho_graphql_token\n",
|
||||
):
|
||||
assert github_backup.get_graphql_auth(args) == "gho_graphql_token"
|
||||
|
||||
|
||||
def test_token_from_gh_rejects_as_app(create_args):
|
||||
args = create_args(token_from_gh=True, as_app=True)
|
||||
|
||||
with pytest.raises(Exception) as exc_info:
|
||||
github_backup.get_auth(args, encode=False)
|
||||
|
||||
assert "--token-from-gh cannot be used with --as-app" in str(exc_info.value)
|
||||
@@ -1,7 +1,6 @@
|
||||
"""Tests for case-insensitive username/organization filtering."""
|
||||
|
||||
import pytest
|
||||
from unittest.mock import Mock
|
||||
|
||||
from github_backup import github_backup
|
||||
|
||||
@@ -9,25 +8,14 @@ from github_backup import github_backup
|
||||
class TestCaseSensitivity:
|
||||
"""Test suite for case-insensitive username matching in filter_repositories."""
|
||||
|
||||
def test_filter_repositories_case_insensitive_user(self):
|
||||
def test_filter_repositories_case_insensitive_user(self, create_args):
|
||||
"""Should filter repositories case-insensitively for usernames.
|
||||
|
||||
Reproduces issue #198 where typing 'iamrodos' fails to match
|
||||
repositories with owner.login='Iamrodos' (the canonical case from GitHub API).
|
||||
"""
|
||||
# Simulate user typing lowercase username
|
||||
args = Mock()
|
||||
args.user = "iamrodos" # lowercase (what user typed)
|
||||
args.repository = None
|
||||
args.name_regex = None
|
||||
args.languages = None
|
||||
args.exclude = None
|
||||
args.fork = False
|
||||
args.private = False
|
||||
args.public = False
|
||||
args.all = True
|
||||
args.skip_archived = False
|
||||
args.starred_skip_size_over = None
|
||||
args = create_args(user="iamrodos")
|
||||
|
||||
# Simulate GitHub API returning canonical case
|
||||
repos = [
|
||||
@@ -52,23 +40,12 @@ class TestCaseSensitivity:
|
||||
assert filtered[0]["name"] == "repo1"
|
||||
assert filtered[1]["name"] == "repo2"
|
||||
|
||||
def test_filter_repositories_case_insensitive_org(self):
|
||||
def test_filter_repositories_case_insensitive_org(self, create_args):
|
||||
"""Should filter repositories case-insensitively for organizations.
|
||||
|
||||
Tests the example from issue #198 where 'prai-org' doesn't match 'PRAI-Org'.
|
||||
"""
|
||||
args = Mock()
|
||||
args.user = "prai-org" # lowercase (what user typed)
|
||||
args.repository = None
|
||||
args.name_regex = None
|
||||
args.languages = None
|
||||
args.exclude = None
|
||||
args.fork = False
|
||||
args.private = False
|
||||
args.public = False
|
||||
args.all = True
|
||||
args.skip_archived = False
|
||||
args.starred_skip_size_over = None
|
||||
args = create_args(user="prai-org")
|
||||
|
||||
repos = [
|
||||
{
|
||||
@@ -85,20 +62,9 @@ class TestCaseSensitivity:
|
||||
assert len(filtered) == 1
|
||||
assert filtered[0]["name"] == "repo1"
|
||||
|
||||
def test_filter_repositories_case_variations(self):
|
||||
def test_filter_repositories_case_variations(self, create_args):
|
||||
"""Should handle various case combinations correctly."""
|
||||
args = Mock()
|
||||
args.user = "TeSt-UsEr" # Mixed case
|
||||
args.repository = None
|
||||
args.name_regex = None
|
||||
args.languages = None
|
||||
args.exclude = None
|
||||
args.fork = False
|
||||
args.private = False
|
||||
args.public = False
|
||||
args.all = True
|
||||
args.skip_archived = False
|
||||
args.starred_skip_size_over = None
|
||||
args = create_args(user="TeSt-UsEr")
|
||||
|
||||
repos = [
|
||||
{"name": "repo1", "owner": {"login": "test-user"}, "private": False, "fork": False},
|
||||
|
||||
257
tests/test_discussions.py
Normal file
257
tests/test_discussions.py
Normal file
@@ -0,0 +1,257 @@
|
||||
"""Tests for GitHub Discussions backup support."""
|
||||
|
||||
import json
|
||||
import os
|
||||
from unittest.mock import patch
|
||||
|
||||
from github_backup import github_backup
|
||||
|
||||
|
||||
def test_parse_args_discussions_flag():
|
||||
args = github_backup.parse_args(["--discussions", "testuser"])
|
||||
assert args.include_discussions is True
|
||||
|
||||
|
||||
def test_retrieve_discussion_summaries_stops_at_incremental_since(create_args):
|
||||
args = create_args()
|
||||
repository = {"full_name": "owner/repo"}
|
||||
|
||||
page = {
|
||||
"repository": {
|
||||
"hasDiscussionsEnabled": True,
|
||||
"discussions": {
|
||||
"totalCount": 3,
|
||||
"nodes": [
|
||||
{"number": 3, "title": "new", "updatedAt": "2026-02-01T00:00:00Z"},
|
||||
{"number": 2, "title": "also new", "updatedAt": "2026-01-10T00:00:00Z"},
|
||||
{"number": 1, "title": "old", "updatedAt": "2025-12-01T00:00:00Z"},
|
||||
],
|
||||
"pageInfo": {"hasNextPage": True, "endCursor": "NEXT"},
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
with patch(
|
||||
"github_backup.github_backup.retrieve_graphql_data", return_value=page
|
||||
) as mock_retrieve:
|
||||
summaries, newest, enabled, total = github_backup.retrieve_discussion_summaries(
|
||||
args, repository, since="2026-01-01T00:00:00Z"
|
||||
)
|
||||
|
||||
assert enabled is True
|
||||
assert total == 3
|
||||
assert newest == "2026-02-01T00:00:00Z"
|
||||
assert [item["number"] for item in summaries] == [3, 2]
|
||||
# The old discussion stops pagination, so the next page is not requested.
|
||||
assert mock_retrieve.call_count == 1
|
||||
assert (
|
||||
mock_retrieve.call_args.kwargs["log_context"]
|
||||
== "discussion summaries owner/repo page 1"
|
||||
)
|
||||
|
||||
|
||||
def test_retrieve_discussion_summaries_excludes_checkpoint_timestamp(create_args):
|
||||
args = create_args()
|
||||
repository = {"full_name": "owner/repo"}
|
||||
|
||||
page = {
|
||||
"repository": {
|
||||
"hasDiscussionsEnabled": True,
|
||||
"discussions": {
|
||||
"totalCount": 1,
|
||||
"nodes": [
|
||||
{
|
||||
"number": 1,
|
||||
"title": "already backed up",
|
||||
"updatedAt": "2026-01-01T00:00:00Z",
|
||||
},
|
||||
],
|
||||
"pageInfo": {"hasNextPage": True, "endCursor": "NEXT"},
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
with patch(
|
||||
"github_backup.github_backup.retrieve_graphql_data", return_value=page
|
||||
) as mock_retrieve:
|
||||
summaries, newest, enabled, total = github_backup.retrieve_discussion_summaries(
|
||||
args, repository, since="2026-01-01T00:00:00Z"
|
||||
)
|
||||
|
||||
assert enabled is True
|
||||
assert total == 1
|
||||
assert newest == "2026-01-01T00:00:00Z"
|
||||
assert summaries == []
|
||||
assert mock_retrieve.call_count == 1
|
||||
|
||||
|
||||
def test_retrieve_discussion_summaries_disabled_discussions(create_args):
|
||||
args = create_args()
|
||||
repository = {"full_name": "owner/repo"}
|
||||
|
||||
with patch(
|
||||
"github_backup.github_backup.retrieve_graphql_data",
|
||||
return_value={"repository": {"hasDiscussionsEnabled": False}},
|
||||
):
|
||||
summaries, newest, enabled, total = github_backup.retrieve_discussion_summaries(
|
||||
args, repository
|
||||
)
|
||||
|
||||
assert summaries == []
|
||||
assert newest is None
|
||||
assert enabled is False
|
||||
assert total == 0
|
||||
|
||||
|
||||
def _comment(comment_id, body, replies=None, replies_has_next=False):
|
||||
replies = replies or []
|
||||
return {
|
||||
"id": comment_id,
|
||||
"body": body,
|
||||
"replies": {
|
||||
"totalCount": len(replies) + (1 if replies_has_next else 0),
|
||||
"nodes": replies,
|
||||
"pageInfo": {
|
||||
"hasNextPage": replies_has_next,
|
||||
"endCursor": "REPLIES2" if replies_has_next else None,
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
|
||||
def _discussion_page(comment_nodes, has_next=False):
|
||||
return {
|
||||
"repository": {
|
||||
"discussion": {
|
||||
"number": 42,
|
||||
"title": "Discussion title",
|
||||
"updatedAt": "2026-02-01T00:00:00Z",
|
||||
"comments": {
|
||||
"totalCount": 2,
|
||||
"nodes": comment_nodes,
|
||||
"pageInfo": {
|
||||
"hasNextPage": has_next,
|
||||
"endCursor": "COMMENTS2" if has_next else None,
|
||||
},
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
def test_retrieve_discussion_paginates_comments_and_replies(create_args):
|
||||
args = create_args()
|
||||
repository = {"full_name": "owner/repo"}
|
||||
|
||||
reply_1 = {"id": "reply-1", "body": "first reply"}
|
||||
reply_2 = {"id": "reply-2", "body": "second reply"}
|
||||
comment_1 = _comment("comment-1", "first comment", [reply_1], replies_has_next=True)
|
||||
comment_2 = _comment("comment-2", "second comment")
|
||||
|
||||
responses = [
|
||||
_discussion_page([comment_1], has_next=True),
|
||||
{
|
||||
"node": {
|
||||
"replies": {
|
||||
"totalCount": 2,
|
||||
"nodes": [reply_2],
|
||||
"pageInfo": {"hasNextPage": False, "endCursor": None},
|
||||
}
|
||||
}
|
||||
},
|
||||
_discussion_page([comment_2], has_next=False),
|
||||
]
|
||||
|
||||
with patch(
|
||||
"github_backup.github_backup.retrieve_graphql_data", side_effect=responses
|
||||
) as mock_retrieve:
|
||||
discussion = github_backup.retrieve_discussion(args, repository, 42)
|
||||
|
||||
assert discussion["number"] == 42
|
||||
assert discussion["comment_count"] == 2
|
||||
assert len(discussion["comment_data"]) == 2
|
||||
assert discussion["comment_data"][0]["body"] == "first comment"
|
||||
assert discussion["comment_data"][0]["reply_count"] == 2
|
||||
assert [r["body"] for r in discussion["comment_data"][0]["reply_data"]] == [
|
||||
"first reply",
|
||||
"second reply",
|
||||
]
|
||||
assert discussion["comment_data"][1]["body"] == "second comment"
|
||||
assert mock_retrieve.call_count == 3
|
||||
assert [
|
||||
call.kwargs["log_context"] for call in mock_retrieve.call_args_list
|
||||
] == [
|
||||
"discussion owner/repo#42 details/comments page 1",
|
||||
"discussion owner/repo#42 comment comment-1 replies page 2",
|
||||
"discussion owner/repo#42 details/comments page 2",
|
||||
]
|
||||
|
||||
|
||||
def test_backup_discussions_uses_incremental_checkpoint(create_args, tmp_path):
|
||||
args = create_args(token_classic="fake_token", include_discussions=True, incremental=True)
|
||||
repository = {"full_name": "owner/repo"}
|
||||
discussions_dir = tmp_path / "discussions"
|
||||
discussions_dir.mkdir()
|
||||
(discussions_dir / "last_update").write_text("2026-01-01T00:00:00Z")
|
||||
|
||||
def fake_summaries(passed_args, passed_repository, since=None):
|
||||
assert passed_args is args
|
||||
assert passed_repository == repository
|
||||
assert since == "2026-01-01T00:00:00Z"
|
||||
return (
|
||||
[{"number": 7, "title": "updated", "updatedAt": "2026-02-01T00:00:00Z"}],
|
||||
"2026-02-01T00:00:00Z",
|
||||
True,
|
||||
1,
|
||||
)
|
||||
|
||||
with patch(
|
||||
"github_backup.github_backup.retrieve_discussion_summaries",
|
||||
side_effect=fake_summaries,
|
||||
), patch(
|
||||
"github_backup.github_backup.retrieve_discussion",
|
||||
return_value={"number": 7, "title": "updated"},
|
||||
):
|
||||
github_backup.backup_discussions(args, tmp_path, repository)
|
||||
|
||||
with open(discussions_dir / "7.json", encoding="utf-8") as f:
|
||||
assert json.load(f) == {"number": 7, "title": "updated"}
|
||||
assert (discussions_dir / "last_update").read_text() == "2026-02-01T00:00:00Z"
|
||||
|
||||
|
||||
def test_backup_discussions_does_not_advance_checkpoint_on_discussion_error(
|
||||
create_args, tmp_path
|
||||
):
|
||||
args = create_args(token_classic="fake_token", include_discussions=True, incremental=True)
|
||||
repository = {"full_name": "owner/repo"}
|
||||
discussions_dir = tmp_path / "discussions"
|
||||
discussions_dir.mkdir()
|
||||
(discussions_dir / "last_update").write_text("2026-01-01T00:00:00Z")
|
||||
|
||||
with patch(
|
||||
"github_backup.github_backup.retrieve_discussion_summaries",
|
||||
return_value=(
|
||||
[{"number": 7, "title": "updated", "updatedAt": "2026-02-01T00:00:00Z"}],
|
||||
"2026-02-01T00:00:00Z",
|
||||
True,
|
||||
1,
|
||||
),
|
||||
), patch(
|
||||
"github_backup.github_backup.retrieve_discussion",
|
||||
side_effect=Exception("temporary GraphQL error"),
|
||||
):
|
||||
github_backup.backup_discussions(args, tmp_path, repository)
|
||||
|
||||
assert (discussions_dir / "last_update").read_text() == "2026-01-01T00:00:00Z"
|
||||
assert not os.path.exists(discussions_dir / "7.json")
|
||||
|
||||
|
||||
def test_backup_discussions_skips_without_auth(create_args, tmp_path):
|
||||
args = create_args(include_discussions=True)
|
||||
repository = {"full_name": "owner/repo"}
|
||||
|
||||
with patch("github_backup.github_backup.retrieve_discussion_summaries") as mock_retrieve:
|
||||
github_backup.backup_discussions(args, tmp_path, repository)
|
||||
|
||||
assert not mock_retrieve.called
|
||||
assert not os.path.exists(tmp_path / "discussions")
|
||||
@@ -1,30 +1,34 @@
|
||||
"""Tests for HTTP 451 (DMCA takedown) handling."""
|
||||
"""Tests for HTTP 451 (DMCA takedown) and HTTP 403 (TOS) handling."""
|
||||
|
||||
import io
|
||||
import json
|
||||
from unittest.mock import Mock, patch
|
||||
from unittest.mock import patch
|
||||
from urllib.error import HTTPError
|
||||
|
||||
import pytest
|
||||
|
||||
from github_backup import github_backup
|
||||
|
||||
|
||||
def _make_http_error(code, body_bytes, msg="Error", headers=None):
|
||||
"""Create an HTTPError with a readable body (like a real urllib response)."""
|
||||
if headers is None:
|
||||
headers = {"x-ratelimit-remaining": "5000"}
|
||||
return HTTPError(
|
||||
url="https://api.github.com/repos/test/repo",
|
||||
code=code,
|
||||
msg=msg,
|
||||
hdrs=headers,
|
||||
fp=io.BytesIO(body_bytes),
|
||||
)
|
||||
|
||||
|
||||
class TestHTTP451Exception:
|
||||
"""Test suite for HTTP 451 DMCA takedown exception handling."""
|
||||
|
||||
def test_repository_unavailable_error_raised(self):
|
||||
def test_repository_unavailable_error_raised(self, create_args):
|
||||
"""HTTP 451 should raise RepositoryUnavailableError with DMCA URL."""
|
||||
args = Mock()
|
||||
args.as_app = False
|
||||
args.token_fine = None
|
||||
args.token_classic = None
|
||||
args.osx_keychain_item_name = None
|
||||
args.osx_keychain_item_account = None
|
||||
args.throttle_limit = None
|
||||
args.throttle_pause = 0
|
||||
args.max_retries = 5
|
||||
|
||||
mock_response = Mock()
|
||||
mock_response.getcode.return_value = 451
|
||||
args = create_args()
|
||||
|
||||
dmca_data = {
|
||||
"message": "Repository access blocked",
|
||||
@@ -34,82 +38,166 @@ class TestHTTP451Exception:
|
||||
"html_url": "https://github.com/github/dmca/blob/master/2024/11/2024-11-04-source-code.md",
|
||||
},
|
||||
}
|
||||
mock_response.read.return_value = json.dumps(dmca_data).encode("utf-8")
|
||||
mock_response.headers = {"x-ratelimit-remaining": "5000"}
|
||||
mock_response.reason = "Unavailable For Legal Reasons"
|
||||
body = json.dumps(dmca_data).encode("utf-8")
|
||||
|
||||
with patch(
|
||||
"github_backup.github_backup.make_request_with_retry",
|
||||
return_value=mock_response,
|
||||
):
|
||||
def mock_urlopen(*a, **kw):
|
||||
raise _make_http_error(451, body, msg="Unavailable For Legal Reasons")
|
||||
|
||||
with patch("github_backup.github_backup.urlopen", side_effect=mock_urlopen):
|
||||
with pytest.raises(github_backup.RepositoryUnavailableError) as exc_info:
|
||||
github_backup.retrieve_data(
|
||||
args, "https://api.github.com/repos/test/dmca/issues"
|
||||
)
|
||||
|
||||
assert (
|
||||
exc_info.value.dmca_url
|
||||
exc_info.value.legal_url
|
||||
== "https://github.com/github/dmca/blob/master/2024/11/2024-11-04-source-code.md"
|
||||
)
|
||||
assert "451" in str(exc_info.value)
|
||||
|
||||
def test_repository_unavailable_error_without_dmca_url(self):
|
||||
def test_repository_unavailable_error_without_legal_url(self, create_args):
|
||||
"""HTTP 451 without DMCA details should still raise exception."""
|
||||
args = Mock()
|
||||
args.as_app = False
|
||||
args.token_fine = None
|
||||
args.token_classic = None
|
||||
args.osx_keychain_item_name = None
|
||||
args.osx_keychain_item_account = None
|
||||
args.throttle_limit = None
|
||||
args.throttle_pause = 0
|
||||
args.max_retries = 5
|
||||
args = create_args()
|
||||
|
||||
mock_response = Mock()
|
||||
mock_response.getcode.return_value = 451
|
||||
mock_response.read.return_value = b'{"message": "Blocked"}'
|
||||
mock_response.headers = {"x-ratelimit-remaining": "5000"}
|
||||
mock_response.reason = "Unavailable For Legal Reasons"
|
||||
def mock_urlopen(*a, **kw):
|
||||
raise _make_http_error(451, b'{"message": "Blocked"}')
|
||||
|
||||
with patch(
|
||||
"github_backup.github_backup.make_request_with_retry",
|
||||
return_value=mock_response,
|
||||
):
|
||||
with patch("github_backup.github_backup.urlopen", side_effect=mock_urlopen):
|
||||
with pytest.raises(github_backup.RepositoryUnavailableError) as exc_info:
|
||||
github_backup.retrieve_data(
|
||||
args, "https://api.github.com/repos/test/dmca/issues"
|
||||
)
|
||||
|
||||
assert exc_info.value.dmca_url is None
|
||||
assert exc_info.value.legal_url is None
|
||||
assert "451" in str(exc_info.value)
|
||||
|
||||
def test_repository_unavailable_error_with_malformed_json(self):
|
||||
def test_repository_unavailable_error_with_malformed_json(self, create_args):
|
||||
"""HTTP 451 with malformed JSON should still raise exception."""
|
||||
args = Mock()
|
||||
args.as_app = False
|
||||
args.token_fine = None
|
||||
args.token_classic = None
|
||||
args.osx_keychain_item_name = None
|
||||
args.osx_keychain_item_account = None
|
||||
args.throttle_limit = None
|
||||
args.throttle_pause = 0
|
||||
args.max_retries = 5
|
||||
args = create_args()
|
||||
|
||||
mock_response = Mock()
|
||||
mock_response.getcode.return_value = 451
|
||||
mock_response.read.return_value = b"invalid json {"
|
||||
mock_response.headers = {"x-ratelimit-remaining": "5000"}
|
||||
mock_response.reason = "Unavailable For Legal Reasons"
|
||||
def mock_urlopen(*a, **kw):
|
||||
raise _make_http_error(451, b"invalid json {")
|
||||
|
||||
with patch(
|
||||
"github_backup.github_backup.make_request_with_retry",
|
||||
return_value=mock_response,
|
||||
):
|
||||
with patch("github_backup.github_backup.urlopen", side_effect=mock_urlopen):
|
||||
with pytest.raises(github_backup.RepositoryUnavailableError):
|
||||
github_backup.retrieve_data(
|
||||
args, "https://api.github.com/repos/test/dmca/issues"
|
||||
)
|
||||
|
||||
|
||||
class TestHTTP403TOS:
|
||||
"""Test suite for HTTP 403 TOS violation handling."""
|
||||
|
||||
def test_403_tos_raises_repository_unavailable(self, create_args):
|
||||
"""HTTP 403 (non-rate-limit) should raise RepositoryUnavailableError."""
|
||||
args = create_args()
|
||||
|
||||
tos_data = {
|
||||
"message": "Repository access blocked",
|
||||
"block": {
|
||||
"reason": "tos",
|
||||
"html_url": "https://github.com/contact/tos-violation",
|
||||
},
|
||||
}
|
||||
body = json.dumps(tos_data).encode("utf-8")
|
||||
|
||||
def mock_urlopen(*a, **kw):
|
||||
raise _make_http_error(
|
||||
403,
|
||||
body,
|
||||
msg="Forbidden",
|
||||
headers={"x-ratelimit-remaining": "5000"},
|
||||
)
|
||||
|
||||
with patch("github_backup.github_backup.urlopen", side_effect=mock_urlopen):
|
||||
with pytest.raises(github_backup.RepositoryUnavailableError) as exc_info:
|
||||
github_backup.retrieve_data(
|
||||
args, "https://api.github.com/repos/test/blocked/issues"
|
||||
)
|
||||
|
||||
assert (
|
||||
exc_info.value.legal_url == "https://github.com/contact/tos-violation"
|
||||
)
|
||||
assert "403" in str(exc_info.value)
|
||||
|
||||
def test_403_permission_denied_not_converted(self, create_args):
|
||||
"""HTTP 403 without 'block' in body should propagate as HTTPError, not RepositoryUnavailableError."""
|
||||
args = create_args()
|
||||
|
||||
body = json.dumps({"message": "Must have admin rights to Repository."}).encode(
|
||||
"utf-8"
|
||||
)
|
||||
|
||||
def mock_urlopen(*a, **kw):
|
||||
raise _make_http_error(
|
||||
403,
|
||||
body,
|
||||
msg="Forbidden",
|
||||
headers={"x-ratelimit-remaining": "5000"},
|
||||
)
|
||||
|
||||
with patch("github_backup.github_backup.urlopen", side_effect=mock_urlopen):
|
||||
with pytest.raises(HTTPError) as exc_info:
|
||||
github_backup.retrieve_data(
|
||||
args, "https://api.github.com/repos/test/private/issues"
|
||||
)
|
||||
|
||||
assert exc_info.value.code == 403
|
||||
|
||||
def test_403_rate_limit_not_converted(self, create_args):
|
||||
"""HTTP 403 with rate limit exhausted should NOT become RepositoryUnavailableError."""
|
||||
args = create_args()
|
||||
|
||||
call_count = 0
|
||||
|
||||
def mock_urlopen(*a, **kw):
|
||||
nonlocal call_count
|
||||
call_count += 1
|
||||
raise _make_http_error(
|
||||
403,
|
||||
b'{"message": "rate limit"}',
|
||||
msg="Forbidden",
|
||||
headers={"x-ratelimit-remaining": "0"},
|
||||
)
|
||||
|
||||
with patch("github_backup.github_backup.urlopen", side_effect=mock_urlopen):
|
||||
with patch(
|
||||
"github_backup.github_backup.calculate_retry_delay", return_value=0
|
||||
):
|
||||
with pytest.raises(HTTPError) as exc_info:
|
||||
github_backup.retrieve_data(
|
||||
args, "https://api.github.com/repos/test/ratelimit/issues"
|
||||
)
|
||||
|
||||
assert exc_info.value.code == 403
|
||||
# Should have retried (not raised immediately as RepositoryUnavailableError)
|
||||
assert call_count > 1
|
||||
|
||||
|
||||
class TestRetrieveRepositoriesUnavailable:
|
||||
"""Test that retrieve_repositories handles RepositoryUnavailableError gracefully."""
|
||||
|
||||
def test_unavailable_repo_returns_empty_list(self, create_args):
|
||||
"""retrieve_repositories should return [] when the repo is unavailable."""
|
||||
args = create_args(repository="blocked-repo")
|
||||
|
||||
def mock_urlopen(*a, **kw):
|
||||
raise _make_http_error(
|
||||
451,
|
||||
json.dumps(
|
||||
{
|
||||
"message": "Blocked",
|
||||
"block": {"html_url": "https://example.com/dmca"},
|
||||
}
|
||||
).encode("utf-8"),
|
||||
msg="Unavailable For Legal Reasons",
|
||||
)
|
||||
|
||||
with patch("github_backup.github_backup.urlopen", side_effect=mock_urlopen):
|
||||
repos = github_backup.retrieve_repositories(args, {"login": None})
|
||||
|
||||
assert repos == []
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
pytest.main([__file__, "-v"])
|
||||
|
||||
189
tests/test_incremental_per_repository.py
Normal file
189
tests/test_incremental_per_repository.py
Normal file
@@ -0,0 +1,189 @@
|
||||
"""Tests for per-resource incremental checkpoints."""
|
||||
|
||||
import json
|
||||
import os
|
||||
|
||||
from github_backup import github_backup
|
||||
|
||||
|
||||
def _repo(name, updated_at, pushed_at=None):
|
||||
return {
|
||||
"name": name,
|
||||
"full_name": "owner/{0}".format(name),
|
||||
"owner": {"login": "owner"},
|
||||
"clone_url": "https://github.com/owner/{0}.git".format(name),
|
||||
"private": False,
|
||||
"fork": False,
|
||||
"has_wiki": False,
|
||||
"updated_at": updated_at,
|
||||
"pushed_at": pushed_at,
|
||||
}
|
||||
|
||||
|
||||
def test_incremental_uses_per_resource_last_update(
|
||||
create_args, tmp_path, monkeypatch
|
||||
):
|
||||
args = create_args(incremental=True, include_issues=True)
|
||||
repositories = [
|
||||
_repo("repo-one", "2026-02-01T00:00:00Z"),
|
||||
_repo("repo-two", "2026-03-01T00:00:00Z"),
|
||||
]
|
||||
repo_one_issues = tmp_path / "repositories" / "repo-one" / "issues"
|
||||
repo_two_issues = tmp_path / "repositories" / "repo-two" / "issues"
|
||||
repo_one_issues.mkdir(parents=True)
|
||||
repo_two_issues.mkdir(parents=True)
|
||||
(repo_one_issues / "last_update").write_text("2026-01-01T00:00:00Z")
|
||||
(repo_two_issues / "last_update").write_text("2025-01-01T00:00:00Z")
|
||||
|
||||
seen_since = []
|
||||
|
||||
def fake_backup_issues(passed_args, repo_cwd, repository, repos_template):
|
||||
seen_since.append((repository["name"], passed_args.since))
|
||||
|
||||
monkeypatch.setattr(github_backup, "backup_issues", fake_backup_issues)
|
||||
|
||||
github_backup.backup_repositories(args, tmp_path, repositories)
|
||||
|
||||
assert seen_since == [
|
||||
("repo-one", "2026-01-01T00:00:00Z"),
|
||||
("repo-two", "2025-01-01T00:00:00Z"),
|
||||
]
|
||||
assert (repo_one_issues / "last_update").read_text() == "2026-02-01T00:00:00Z"
|
||||
assert (repo_two_issues / "last_update").read_text() == "2026-03-01T00:00:00Z"
|
||||
assert not os.path.exists(tmp_path / "last_update")
|
||||
|
||||
|
||||
def test_incremental_uses_independent_issue_and_pull_checkpoints(
|
||||
create_args, tmp_path, monkeypatch
|
||||
):
|
||||
args = create_args(incremental=True, include_issues=True, include_pulls=True)
|
||||
repository = _repo("repo-one", "2026-02-01T00:00:00Z")
|
||||
repo_dir = tmp_path / "repositories" / "repo-one"
|
||||
issues_dir = repo_dir / "issues"
|
||||
pulls_dir = repo_dir / "pulls"
|
||||
issues_dir.mkdir(parents=True)
|
||||
pulls_dir.mkdir(parents=True)
|
||||
(issues_dir / "last_update").write_text("2026-01-01T00:00:00Z")
|
||||
(pulls_dir / "last_update").write_text("2025-01-01T00:00:00Z")
|
||||
|
||||
seen_since = []
|
||||
|
||||
def fake_backup_issues(passed_args, repo_cwd, repository, repos_template):
|
||||
seen_since.append(("issues", passed_args.since))
|
||||
|
||||
def fake_backup_pulls(passed_args, repo_cwd, repository, repos_template):
|
||||
seen_since.append(("pulls", passed_args.since))
|
||||
|
||||
monkeypatch.setattr(github_backup, "backup_issues", fake_backup_issues)
|
||||
monkeypatch.setattr(github_backup, "backup_pulls", fake_backup_pulls)
|
||||
|
||||
github_backup.backup_repositories(args, tmp_path, [repository])
|
||||
|
||||
assert seen_since == [
|
||||
("issues", "2026-01-01T00:00:00Z"),
|
||||
("pulls", "2025-01-01T00:00:00Z"),
|
||||
]
|
||||
assert (issues_dir / "last_update").read_text() == "2026-02-01T00:00:00Z"
|
||||
assert (pulls_dir / "last_update").read_text() == "2026-02-01T00:00:00Z"
|
||||
|
||||
|
||||
def test_incremental_uses_legacy_global_last_update_for_existing_resource_backup(
|
||||
create_args, tmp_path, monkeypatch
|
||||
):
|
||||
args = create_args(incremental=True, include_issues=True)
|
||||
repository = _repo("repo-one", "2026-02-01T00:00:00Z")
|
||||
(tmp_path / "last_update").write_text("2026-01-01T00:00:00Z")
|
||||
issues_dir = tmp_path / "repositories" / "repo-one" / "issues"
|
||||
issues_dir.mkdir(parents=True)
|
||||
with open(issues_dir / "1.json", "w", encoding="utf-8") as f:
|
||||
json.dump({"number": 1}, f)
|
||||
|
||||
seen_since = []
|
||||
|
||||
def fake_backup_issues(passed_args, repo_cwd, repository, repos_template):
|
||||
seen_since.append(passed_args.since)
|
||||
|
||||
monkeypatch.setattr(github_backup, "backup_issues", fake_backup_issues)
|
||||
|
||||
github_backup.backup_repositories(args, tmp_path, [repository])
|
||||
|
||||
assert seen_since == ["2026-01-01T00:00:00Z"]
|
||||
assert (issues_dir / "last_update").read_text() == "2026-02-01T00:00:00Z"
|
||||
assert not os.path.exists(tmp_path / "last_update")
|
||||
|
||||
|
||||
def test_incremental_does_not_use_legacy_global_last_update_for_new_resource_backup(
|
||||
create_args, tmp_path, monkeypatch
|
||||
):
|
||||
args = create_args(incremental=True, include_issues=True)
|
||||
repository = _repo("repo-one", "2026-02-01T00:00:00Z")
|
||||
(tmp_path / "last_update").write_text("2099-01-01T00:00:00Z")
|
||||
|
||||
seen_since = []
|
||||
|
||||
def fake_backup_issues(passed_args, repo_cwd, repository, repos_template):
|
||||
seen_since.append(passed_args.since)
|
||||
|
||||
monkeypatch.setattr(github_backup, "backup_issues", fake_backup_issues)
|
||||
|
||||
github_backup.backup_repositories(args, tmp_path, [repository])
|
||||
|
||||
assert seen_since == [None]
|
||||
assert (
|
||||
tmp_path / "repositories" / "repo-one" / "issues" / "last_update"
|
||||
).read_text() == "2026-02-01T00:00:00Z"
|
||||
assert not os.path.exists(tmp_path / "last_update")
|
||||
|
||||
|
||||
def test_incremental_keeps_legacy_global_last_update_until_all_existing_resources_migrated(
|
||||
create_args, tmp_path, monkeypatch
|
||||
):
|
||||
args = create_args(incremental=True, include_issues=True)
|
||||
repository = _repo("repo-one", "2026-02-01T00:00:00Z")
|
||||
(tmp_path / "last_update").write_text("2026-01-01T00:00:00Z")
|
||||
repo_one_issues = tmp_path / "repositories" / "repo-one" / "issues"
|
||||
repo_two_issues = tmp_path / "repositories" / "repo-two" / "issues"
|
||||
repo_one_issues.mkdir(parents=True)
|
||||
repo_two_issues.mkdir(parents=True)
|
||||
with open(repo_one_issues / "1.json", "w", encoding="utf-8") as f:
|
||||
json.dump({"number": 1}, f)
|
||||
with open(repo_two_issues / "2.json", "w", encoding="utf-8") as f:
|
||||
json.dump({"number": 2}, f)
|
||||
|
||||
def fake_backup_issues(passed_args, repo_cwd, repository, repos_template):
|
||||
pass
|
||||
|
||||
monkeypatch.setattr(github_backup, "backup_issues", fake_backup_issues)
|
||||
|
||||
github_backup.backup_repositories(args, tmp_path, [repository])
|
||||
|
||||
assert (repo_one_issues / "last_update").read_text() == "2026-02-01T00:00:00Z"
|
||||
assert not os.path.exists(repo_two_issues / "last_update")
|
||||
assert (tmp_path / "last_update").read_text() == "2026-01-01T00:00:00Z"
|
||||
|
||||
|
||||
def test_incremental_does_not_remove_legacy_checkpoint_without_resource_work(
|
||||
create_args, tmp_path
|
||||
):
|
||||
args = create_args(incremental=True, include_repository=True)
|
||||
repository = _repo("repo-one", "2026-02-01T00:00:00Z")
|
||||
(tmp_path / "last_update").write_text("2026-01-01T00:00:00Z")
|
||||
|
||||
github_backup.backup_repositories(args, tmp_path, [repository])
|
||||
|
||||
assert (tmp_path / "last_update").read_text() == "2026-01-01T00:00:00Z"
|
||||
assert not os.path.exists(
|
||||
tmp_path / "repositories" / "repo-one" / "issues" / "last_update"
|
||||
)
|
||||
|
||||
|
||||
def test_repository_checkpoint_time_uses_newest_available_repo_timestamp():
|
||||
repository = _repo(
|
||||
"repo-one",
|
||||
updated_at="2026-02-01T00:00:00Z",
|
||||
pushed_at="2026-03-01T00:00:00Z",
|
||||
)
|
||||
|
||||
assert github_backup.get_repository_checkpoint_time(repository) == (
|
||||
"2026-03-01T00:00:00Z"
|
||||
)
|
||||
@@ -1,9 +1,7 @@
|
||||
"""Tests for Link header pagination handling."""
|
||||
|
||||
import json
|
||||
from unittest.mock import Mock, patch
|
||||
|
||||
import pytest
|
||||
from unittest.mock import patch
|
||||
|
||||
from github_backup import github_backup
|
||||
|
||||
@@ -38,23 +36,9 @@ class MockHTTPResponse:
|
||||
return headers
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def mock_args():
|
||||
"""Mock args for retrieve_data."""
|
||||
args = Mock()
|
||||
args.as_app = False
|
||||
args.token_fine = None
|
||||
args.token_classic = "fake_token"
|
||||
args.osx_keychain_item_name = None
|
||||
args.osx_keychain_item_account = None
|
||||
args.throttle_limit = None
|
||||
args.throttle_pause = 0
|
||||
args.max_retries = 5
|
||||
return args
|
||||
|
||||
|
||||
def test_cursor_based_pagination(mock_args):
|
||||
def test_cursor_based_pagination(create_args):
|
||||
"""Link header with 'after' cursor parameter works correctly."""
|
||||
args = create_args(token_classic="fake_token")
|
||||
|
||||
# Simulate issues endpoint behavior: returns cursor in Link header
|
||||
responses = [
|
||||
@@ -77,7 +61,7 @@ def test_cursor_based_pagination(mock_args):
|
||||
|
||||
with patch("github_backup.github_backup.urlopen", side_effect=mock_urlopen):
|
||||
results = github_backup.retrieve_data(
|
||||
mock_args, "https://api.github.com/repos/owner/repo/issues"
|
||||
args, "https://api.github.com/repos/owner/repo/issues"
|
||||
)
|
||||
|
||||
# Verify all items retrieved and cursor was used in second request
|
||||
@@ -86,8 +70,9 @@ def test_cursor_based_pagination(mock_args):
|
||||
assert "after=ABC123" in requests_made[1]
|
||||
|
||||
|
||||
def test_page_based_pagination(mock_args):
|
||||
def test_page_based_pagination(create_args):
|
||||
"""Link header with 'page' parameter works correctly."""
|
||||
args = create_args(token_classic="fake_token")
|
||||
|
||||
# Simulate pulls/repos endpoint behavior: returns page numbers in Link header
|
||||
responses = [
|
||||
@@ -110,7 +95,7 @@ def test_page_based_pagination(mock_args):
|
||||
|
||||
with patch("github_backup.github_backup.urlopen", side_effect=mock_urlopen):
|
||||
results = github_backup.retrieve_data(
|
||||
mock_args, "https://api.github.com/repos/owner/repo/pulls"
|
||||
args, "https://api.github.com/repos/owner/repo/pulls"
|
||||
)
|
||||
|
||||
# Verify all items retrieved and page parameter was used (not cursor)
|
||||
@@ -120,8 +105,9 @@ def test_page_based_pagination(mock_args):
|
||||
assert "after" not in requests_made[1]
|
||||
|
||||
|
||||
def test_no_link_header_stops_pagination(mock_args):
|
||||
def test_no_link_header_stops_pagination(create_args):
|
||||
"""Pagination stops when Link header is absent."""
|
||||
args = create_args(token_classic="fake_token")
|
||||
|
||||
# Simulate endpoint with results that fit in a single page
|
||||
responses = [
|
||||
@@ -138,7 +124,7 @@ def test_no_link_header_stops_pagination(mock_args):
|
||||
|
||||
with patch("github_backup.github_backup.urlopen", side_effect=mock_urlopen):
|
||||
results = github_backup.retrieve_data(
|
||||
mock_args, "https://api.github.com/repos/owner/repo/labels"
|
||||
args, "https://api.github.com/repos/owner/repo/labels"
|
||||
)
|
||||
|
||||
# Verify pagination stopped after first request
|
||||
|
||||
131
tests/test_pull_incremental_pagination.py
Normal file
131
tests/test_pull_incremental_pagination.py
Normal file
@@ -0,0 +1,131 @@
|
||||
"""Tests for incremental pull request pagination."""
|
||||
|
||||
import json
|
||||
import os
|
||||
from unittest.mock import patch
|
||||
|
||||
from github_backup import github_backup
|
||||
|
||||
|
||||
class MockHTTPResponse:
|
||||
def __init__(self, data, link_header=None):
|
||||
self._content = json.dumps(data).encode("utf-8")
|
||||
self._link_header = link_header
|
||||
self._read = False
|
||||
self.reason = "OK"
|
||||
|
||||
def getcode(self):
|
||||
return 200
|
||||
|
||||
def read(self):
|
||||
if self._read:
|
||||
return b""
|
||||
self._read = True
|
||||
return self._content
|
||||
|
||||
@property
|
||||
def headers(self):
|
||||
headers = {"x-ratelimit-remaining": "5000"}
|
||||
if self._link_header:
|
||||
headers["Link"] = self._link_header
|
||||
return headers
|
||||
|
||||
|
||||
def test_backup_pulls_incremental_excludes_checkpoint_timestamp(create_args, tmp_path):
|
||||
args = create_args(include_pulls=True, incremental=True)
|
||||
args.since = "2026-04-26T08:13:46Z"
|
||||
repository = {"full_name": "owner/repo"}
|
||||
|
||||
responses = [
|
||||
MockHTTPResponse([]),
|
||||
MockHTTPResponse(
|
||||
[
|
||||
{
|
||||
"number": 1,
|
||||
"title": "already backed up",
|
||||
"updated_at": "2026-04-26T08:13:46Z",
|
||||
},
|
||||
],
|
||||
link_header='<https://api.github.com/repos/owner/repo/pulls?per_page=100&state=closed&page=2>; rel="next"',
|
||||
),
|
||||
MockHTTPResponse(
|
||||
[
|
||||
{
|
||||
"number": 0,
|
||||
"title": "older pull on page 2",
|
||||
"updated_at": "2026-04-25T07:00:00Z",
|
||||
}
|
||||
]
|
||||
),
|
||||
]
|
||||
requests_made = []
|
||||
|
||||
def mock_urlopen(request, *args, **kwargs):
|
||||
requests_made.append(request.get_full_url())
|
||||
return responses[len(requests_made) - 1]
|
||||
|
||||
with patch("github_backup.github_backup.urlopen", side_effect=mock_urlopen):
|
||||
github_backup.backup_pulls(
|
||||
args, tmp_path, repository, "https://api.github.com/repos"
|
||||
)
|
||||
|
||||
assert len(requests_made) == 2
|
||||
assert "state=open" in requests_made[0]
|
||||
assert "state=closed" in requests_made[1]
|
||||
assert all("page=2" not in url for url in requests_made)
|
||||
assert not os.path.exists(tmp_path / "pulls" / "1.json")
|
||||
assert not os.path.exists(tmp_path / "pulls" / "0.json")
|
||||
|
||||
|
||||
def test_backup_pulls_incremental_stops_before_fetching_old_pages(
|
||||
create_args, tmp_path
|
||||
):
|
||||
args = create_args(include_pulls=True, incremental=True)
|
||||
args.since = "2026-04-26T08:13:46Z"
|
||||
repository = {"full_name": "owner/repo"}
|
||||
|
||||
responses = [
|
||||
MockHTTPResponse([]),
|
||||
MockHTTPResponse(
|
||||
[
|
||||
{
|
||||
"number": 2,
|
||||
"title": "new pull",
|
||||
"updated_at": "2026-04-26T09:00:00Z",
|
||||
},
|
||||
{
|
||||
"number": 1,
|
||||
"title": "old pull",
|
||||
"updated_at": "2026-04-26T07:00:00Z",
|
||||
},
|
||||
],
|
||||
link_header='<https://api.github.com/repos/owner/repo/pulls?per_page=100&state=closed&page=2>; rel="next"',
|
||||
),
|
||||
MockHTTPResponse(
|
||||
[
|
||||
{
|
||||
"number": 0,
|
||||
"title": "older pull on page 2",
|
||||
"updated_at": "2026-04-25T07:00:00Z",
|
||||
}
|
||||
]
|
||||
),
|
||||
]
|
||||
requests_made = []
|
||||
|
||||
def mock_urlopen(request, *args, **kwargs):
|
||||
requests_made.append(request.get_full_url())
|
||||
return responses[len(requests_made) - 1]
|
||||
|
||||
with patch("github_backup.github_backup.urlopen", side_effect=mock_urlopen):
|
||||
github_backup.backup_pulls(
|
||||
args, tmp_path, repository, "https://api.github.com/repos"
|
||||
)
|
||||
|
||||
assert len(requests_made) == 2
|
||||
assert "state=open" in requests_made[0]
|
||||
assert "state=closed" in requests_made[1]
|
||||
assert all("page=2" not in url for url in requests_made)
|
||||
assert os.path.exists(tmp_path / "pulls" / "2.json")
|
||||
assert not os.path.exists(tmp_path / "pulls" / "1.json")
|
||||
assert not os.path.exists(tmp_path / "pulls" / "0.json")
|
||||
237
tests/test_pull_reviews.py
Normal file
237
tests/test_pull_reviews.py
Normal file
@@ -0,0 +1,237 @@
|
||||
"""Tests for pull request review backups."""
|
||||
|
||||
import json
|
||||
import os
|
||||
|
||||
from github_backup import github_backup
|
||||
|
||||
|
||||
def test_parse_args_pull_reviews_flag():
|
||||
args = github_backup.parse_args(["--pull-reviews", "testuser"])
|
||||
assert args.include_pull_reviews is True
|
||||
|
||||
|
||||
def test_backup_pulls_includes_review_data(create_args, tmp_path, monkeypatch):
|
||||
args = create_args(include_pulls=True, include_pull_reviews=True)
|
||||
repository = {"full_name": "owner/repo"}
|
||||
calls = []
|
||||
|
||||
def fake_retrieve_data(passed_args, template, query_args=None, paginated=True, **kwargs):
|
||||
calls.append((template, query_args))
|
||||
if template == "https://api.github.com/repos/owner/repo/pulls":
|
||||
if query_args["state"] == "open":
|
||||
return [
|
||||
{
|
||||
"number": 1,
|
||||
"updated_at": "2026-02-01T00:00:00Z",
|
||||
"title": "Add feature",
|
||||
}
|
||||
]
|
||||
return []
|
||||
if template == "https://api.github.com/repos/owner/repo/pulls/1/reviews":
|
||||
return [
|
||||
{
|
||||
"id": 123,
|
||||
"state": "APPROVED",
|
||||
"body": "Looks good",
|
||||
"submitted_at": "2026-02-01T00:00:00Z",
|
||||
}
|
||||
]
|
||||
raise AssertionError("Unexpected template: {0}".format(template))
|
||||
|
||||
monkeypatch.setattr(github_backup, "retrieve_data", fake_retrieve_data)
|
||||
|
||||
github_backup.backup_pulls(
|
||||
args, tmp_path, repository, "https://api.github.com/repos"
|
||||
)
|
||||
|
||||
with open(tmp_path / "pulls" / "1.json", encoding="utf-8") as f:
|
||||
pull = json.load(f)
|
||||
|
||||
assert pull["review_data"] == [
|
||||
{
|
||||
"body": "Looks good",
|
||||
"id": 123,
|
||||
"state": "APPROVED",
|
||||
"submitted_at": "2026-02-01T00:00:00Z",
|
||||
}
|
||||
]
|
||||
assert (
|
||||
"https://api.github.com/repos/owner/repo/pulls/1/reviews",
|
||||
None,
|
||||
) in calls
|
||||
|
||||
|
||||
def test_pull_reviews_backfill_ignores_repository_checkpoint(
|
||||
create_args, tmp_path, monkeypatch
|
||||
):
|
||||
args = create_args(
|
||||
include_pulls=True,
|
||||
include_pull_reviews=True,
|
||||
incremental=True,
|
||||
)
|
||||
args.since = "2026-01-01T00:00:00Z"
|
||||
repository = {"full_name": "owner/repo"}
|
||||
|
||||
def fake_retrieve_data(passed_args, template, query_args=None, paginated=True, **kwargs):
|
||||
if template == "https://api.github.com/repos/owner/repo/pulls":
|
||||
if query_args["state"] == "open":
|
||||
return [
|
||||
{
|
||||
"number": 1,
|
||||
"updated_at": "2025-01-01T00:00:00Z",
|
||||
"title": "Old pull request",
|
||||
}
|
||||
]
|
||||
return []
|
||||
if template == "https://api.github.com/repos/owner/repo/pulls/1/reviews":
|
||||
return [{"id": 123, "state": "APPROVED"}]
|
||||
raise AssertionError("Unexpected template: {0}".format(template))
|
||||
|
||||
monkeypatch.setattr(github_backup, "retrieve_data", fake_retrieve_data)
|
||||
|
||||
github_backup.backup_pulls(
|
||||
args, tmp_path, repository, "https://api.github.com/repos"
|
||||
)
|
||||
|
||||
with open(tmp_path / "pulls" / "1.json", encoding="utf-8") as f:
|
||||
pull = json.load(f)
|
||||
|
||||
assert pull["review_data"] == [{"id": 123, "state": "APPROVED"}]
|
||||
assert (tmp_path / "pulls" / "reviews_last_update").read_text() == (
|
||||
"2025-01-01T00:00:00Z"
|
||||
)
|
||||
|
||||
|
||||
def test_pull_reviews_uses_review_checkpoint_when_older_than_repository_checkpoint(
|
||||
create_args, tmp_path, monkeypatch
|
||||
):
|
||||
args = create_args(
|
||||
include_pulls=True,
|
||||
include_pull_reviews=True,
|
||||
incremental=True,
|
||||
)
|
||||
args.since = "2026-01-01T00:00:00Z"
|
||||
repository = {"full_name": "owner/repo"}
|
||||
pulls_dir = tmp_path / "pulls"
|
||||
pulls_dir.mkdir()
|
||||
(pulls_dir / "reviews_last_update").write_text("2025-01-01T00:00:00Z")
|
||||
|
||||
def fake_retrieve_data(passed_args, template, query_args=None, paginated=True, **kwargs):
|
||||
if template == "https://api.github.com/repos/owner/repo/pulls":
|
||||
if query_args["state"] == "open":
|
||||
return [
|
||||
{
|
||||
"number": 1,
|
||||
"updated_at": "2025-06-01T00:00:00Z",
|
||||
"title": "Review changed while feature was disabled",
|
||||
},
|
||||
{
|
||||
"number": 2,
|
||||
"updated_at": "2024-12-01T00:00:00Z",
|
||||
"title": "Too old",
|
||||
},
|
||||
]
|
||||
return []
|
||||
if template == "https://api.github.com/repos/owner/repo/pulls/1/reviews":
|
||||
return [{"id": 123, "state": "COMMENTED"}]
|
||||
raise AssertionError("Unexpected template: {0}".format(template))
|
||||
|
||||
monkeypatch.setattr(github_backup, "retrieve_data", fake_retrieve_data)
|
||||
|
||||
github_backup.backup_pulls(
|
||||
args, tmp_path, repository, "https://api.github.com/repos"
|
||||
)
|
||||
|
||||
assert os.path.exists(tmp_path / "pulls" / "1.json")
|
||||
assert not os.path.exists(tmp_path / "pulls" / "2.json")
|
||||
assert (tmp_path / "pulls" / "reviews_last_update").read_text() == (
|
||||
"2025-06-01T00:00:00Z"
|
||||
)
|
||||
|
||||
|
||||
def test_pull_reviews_preserves_existing_optional_pull_data(
|
||||
create_args, tmp_path, monkeypatch
|
||||
):
|
||||
args = create_args(include_pulls=True, include_pull_reviews=True)
|
||||
repository = {"full_name": "owner/repo"}
|
||||
pulls_dir = tmp_path / "pulls"
|
||||
pulls_dir.mkdir()
|
||||
with open(pulls_dir / "1.json", "w", encoding="utf-8") as f:
|
||||
json.dump(
|
||||
{
|
||||
"number": 1,
|
||||
"updated_at": "2026-01-01T00:00:00Z",
|
||||
"comment_data": [{"id": 10, "body": "inline comment"}],
|
||||
"comment_regular_data": [{"id": 11, "body": "regular comment"}],
|
||||
"commit_data": [{"sha": "abc"}],
|
||||
},
|
||||
f,
|
||||
)
|
||||
|
||||
def fake_retrieve_data(passed_args, template, query_args=None, paginated=True, **kwargs):
|
||||
if template == "https://api.github.com/repos/owner/repo/pulls":
|
||||
if query_args["state"] == "open":
|
||||
return [
|
||||
{
|
||||
"number": 1,
|
||||
"updated_at": "2026-02-01T00:00:00Z",
|
||||
"title": "Add reviews",
|
||||
}
|
||||
]
|
||||
return []
|
||||
if template == "https://api.github.com/repos/owner/repo/pulls/1/reviews":
|
||||
return [{"id": 123, "state": "APPROVED"}]
|
||||
raise AssertionError("Unexpected template: {0}".format(template))
|
||||
|
||||
monkeypatch.setattr(github_backup, "retrieve_data", fake_retrieve_data)
|
||||
|
||||
github_backup.backup_pulls(
|
||||
args, tmp_path, repository, "https://api.github.com/repos"
|
||||
)
|
||||
|
||||
with open(pulls_dir / "1.json", encoding="utf-8") as f:
|
||||
pull = json.load(f)
|
||||
|
||||
assert pull["review_data"] == [{"id": 123, "state": "APPROVED"}]
|
||||
assert pull["comment_data"] == [{"id": 10, "body": "inline comment"}]
|
||||
assert pull["comment_regular_data"] == [{"id": 11, "body": "regular comment"}]
|
||||
assert pull["commit_data"] == [{"sha": "abc"}]
|
||||
|
||||
|
||||
def test_pull_reviews_does_not_advance_checkpoint_on_review_error(
|
||||
create_args, tmp_path, monkeypatch
|
||||
):
|
||||
args = create_args(
|
||||
include_pulls=True,
|
||||
include_pull_reviews=True,
|
||||
incremental=True,
|
||||
)
|
||||
args.since = "2026-01-01T00:00:00Z"
|
||||
repository = {"full_name": "owner/repo"}
|
||||
pulls_dir = tmp_path / "pulls"
|
||||
pulls_dir.mkdir()
|
||||
(pulls_dir / "reviews_last_update").write_text("2025-01-01T00:00:00Z")
|
||||
|
||||
def fake_retrieve_data(passed_args, template, query_args=None, paginated=True, **kwargs):
|
||||
if template == "https://api.github.com/repos/owner/repo/pulls":
|
||||
if query_args["state"] == "open":
|
||||
return [
|
||||
{
|
||||
"number": 1,
|
||||
"updated_at": "2025-06-01T00:00:00Z",
|
||||
"title": "Review retrieval fails",
|
||||
}
|
||||
]
|
||||
return []
|
||||
if template == "https://api.github.com/repos/owner/repo/pulls/1/reviews":
|
||||
raise Exception("temporary API failure")
|
||||
raise AssertionError("Unexpected template: {0}".format(template))
|
||||
|
||||
monkeypatch.setattr(github_backup, "retrieve_data", fake_retrieve_data)
|
||||
|
||||
github_backup.backup_pulls(
|
||||
args, tmp_path, repository, "https://api.github.com/repos"
|
||||
)
|
||||
|
||||
assert (pulls_dir / "reviews_last_update").read_text() == "2025-01-01T00:00:00Z"
|
||||
95
tests/test_releases.py
Normal file
95
tests/test_releases.py
Normal file
@@ -0,0 +1,95 @@
|
||||
"""Tests for release backup behavior."""
|
||||
|
||||
from github_backup import github_backup
|
||||
|
||||
|
||||
def test_backup_releases_uses_embedded_assets_without_extra_asset_list_request(
|
||||
create_args, tmp_path, monkeypatch
|
||||
):
|
||||
args = create_args(include_releases=True, include_assets=True)
|
||||
repository = {"full_name": "owner/repo", "name": "repo"}
|
||||
calls = []
|
||||
downloads = []
|
||||
|
||||
def fake_retrieve_data(passed_args, template, query_args=None, paginated=True, **kwargs):
|
||||
calls.append(template)
|
||||
if template == "https://api.github.com/repos/owner/repo/releases":
|
||||
return [
|
||||
{
|
||||
"tag_name": "v1.0.0",
|
||||
"created_at": "2026-01-01T00:00:00Z",
|
||||
"updated_at": "2026-01-01T00:00:00Z",
|
||||
"prerelease": False,
|
||||
"draft": False,
|
||||
"assets_url": "https://api.github.com/repos/owner/repo/releases/1/assets",
|
||||
"assets": [
|
||||
{
|
||||
"name": "artifact.zip",
|
||||
"url": "https://api.github.com/repos/owner/repo/releases/assets/1",
|
||||
}
|
||||
],
|
||||
}
|
||||
]
|
||||
raise AssertionError("Unexpected API request: {0}".format(template))
|
||||
|
||||
def fake_download_file(url, path, auth, as_app=False, fine=False):
|
||||
downloads.append((url, path))
|
||||
|
||||
monkeypatch.setattr(github_backup, "retrieve_data", fake_retrieve_data)
|
||||
monkeypatch.setattr(github_backup, "download_file", fake_download_file)
|
||||
|
||||
github_backup.backup_releases(
|
||||
args,
|
||||
tmp_path,
|
||||
repository,
|
||||
"https://api.github.com/repos",
|
||||
include_assets=True,
|
||||
)
|
||||
|
||||
assert calls == ["https://api.github.com/repos/owner/repo/releases"]
|
||||
assert downloads == [
|
||||
(
|
||||
"https://api.github.com/repos/owner/repo/releases/assets/1",
|
||||
str(tmp_path / "releases" / "v1.0.0" / "artifact.zip"),
|
||||
)
|
||||
]
|
||||
|
||||
|
||||
def test_backup_releases_falls_back_to_assets_url_when_assets_missing(
|
||||
create_args, tmp_path, monkeypatch
|
||||
):
|
||||
args = create_args(include_releases=True, include_assets=True)
|
||||
repository = {"full_name": "owner/repo", "name": "repo"}
|
||||
calls = []
|
||||
|
||||
def fake_retrieve_data(passed_args, template, query_args=None, paginated=True, **kwargs):
|
||||
calls.append(template)
|
||||
if template == "https://api.github.com/repos/owner/repo/releases":
|
||||
return [
|
||||
{
|
||||
"tag_name": "v1.0.0",
|
||||
"created_at": "2026-01-01T00:00:00Z",
|
||||
"updated_at": "2026-01-01T00:00:00Z",
|
||||
"prerelease": False,
|
||||
"draft": False,
|
||||
"assets_url": "https://api.github.com/repos/owner/repo/releases/1/assets",
|
||||
}
|
||||
]
|
||||
if template == "https://api.github.com/repos/owner/repo/releases/1/assets":
|
||||
return []
|
||||
raise AssertionError("Unexpected API request: {0}".format(template))
|
||||
|
||||
monkeypatch.setattr(github_backup, "retrieve_data", fake_retrieve_data)
|
||||
|
||||
github_backup.backup_releases(
|
||||
args,
|
||||
tmp_path,
|
||||
repository,
|
||||
"https://api.github.com/repos",
|
||||
include_assets=True,
|
||||
)
|
||||
|
||||
assert calls == [
|
||||
"https://api.github.com/repos/owner/repo/releases",
|
||||
"https://api.github.com/repos/owner/repo/releases/1/assets",
|
||||
]
|
||||
@@ -1,6 +1,7 @@
|
||||
"""Tests for retrieve_data function."""
|
||||
|
||||
import json
|
||||
import logging
|
||||
import socket
|
||||
from unittest.mock import Mock, patch
|
||||
from urllib.error import HTTPError, URLError
|
||||
@@ -63,21 +64,9 @@ class TestCalculateRetryDelay:
|
||||
class TestRetrieveDataRetry:
|
||||
"""Tests for retry behavior in retrieve_data."""
|
||||
|
||||
@pytest.fixture
|
||||
def mock_args(self):
|
||||
args = Mock()
|
||||
args.as_app = False
|
||||
args.token_fine = None
|
||||
args.token_classic = "fake_token"
|
||||
args.osx_keychain_item_name = None
|
||||
args.osx_keychain_item_account = None
|
||||
args.throttle_limit = None
|
||||
args.throttle_pause = 0
|
||||
args.max_retries = DEFAULT_MAX_RETRIES
|
||||
return args
|
||||
|
||||
def test_json_parse_error_retries_and_fails(self, mock_args):
|
||||
def test_json_parse_error_retries_and_fails(self, create_args):
|
||||
"""HTTP 200 with invalid JSON should retry and eventually fail."""
|
||||
args = create_args(token_classic="fake_token")
|
||||
mock_response = Mock()
|
||||
mock_response.getcode.return_value = 200
|
||||
mock_response.read.return_value = b"not valid json {"
|
||||
@@ -85,7 +74,7 @@ class TestRetrieveDataRetry:
|
||||
|
||||
call_count = 0
|
||||
|
||||
def mock_make_request(*args, **kwargs):
|
||||
def mock_make_request(*a, **kw):
|
||||
nonlocal call_count
|
||||
call_count += 1
|
||||
return mock_response
|
||||
@@ -99,7 +88,7 @@ class TestRetrieveDataRetry:
|
||||
): # No delay in tests
|
||||
with pytest.raises(Exception) as exc_info:
|
||||
github_backup.retrieve_data(
|
||||
mock_args, "https://api.github.com/repos/test/repo/issues"
|
||||
args, "https://api.github.com/repos/test/repo/issues"
|
||||
)
|
||||
|
||||
assert "Failed to read response after" in str(exc_info.value)
|
||||
@@ -107,8 +96,9 @@ class TestRetrieveDataRetry:
|
||||
call_count == DEFAULT_MAX_RETRIES + 1
|
||||
) # 1 initial + 5 retries = 6 attempts
|
||||
|
||||
def test_json_parse_error_recovers_on_retry(self, mock_args):
|
||||
def test_json_parse_error_recovers_on_retry(self, create_args):
|
||||
"""HTTP 200 with invalid JSON should succeed if retry returns valid JSON."""
|
||||
args = create_args(token_classic="fake_token")
|
||||
bad_response = Mock()
|
||||
bad_response.getcode.return_value = 200
|
||||
bad_response.read.return_value = b"not valid json {"
|
||||
@@ -122,7 +112,7 @@ class TestRetrieveDataRetry:
|
||||
responses = [bad_response, bad_response, good_response]
|
||||
call_count = 0
|
||||
|
||||
def mock_make_request(*args, **kwargs):
|
||||
def mock_make_request(*a, **kw):
|
||||
nonlocal call_count
|
||||
result = responses[call_count]
|
||||
call_count += 1
|
||||
@@ -136,14 +126,15 @@ class TestRetrieveDataRetry:
|
||||
"github_backup.github_backup.calculate_retry_delay", return_value=0
|
||||
):
|
||||
result = github_backup.retrieve_data(
|
||||
mock_args, "https://api.github.com/repos/test/repo/issues"
|
||||
args, "https://api.github.com/repos/test/repo/issues"
|
||||
)
|
||||
|
||||
assert result == [{"id": 1}]
|
||||
assert call_count == 3 # Failed twice, succeeded on third
|
||||
|
||||
def test_http_error_raises_exception(self, mock_args):
|
||||
def test_http_error_raises_exception(self, create_args):
|
||||
"""Non-success HTTP status codes should raise Exception."""
|
||||
args = create_args(token_classic="fake_token")
|
||||
mock_response = Mock()
|
||||
mock_response.getcode.return_value = 404
|
||||
mock_response.read.return_value = b'{"message": "Not Found"}'
|
||||
@@ -156,7 +147,7 @@ class TestRetrieveDataRetry:
|
||||
):
|
||||
with pytest.raises(Exception) as exc_info:
|
||||
github_backup.retrieve_data(
|
||||
mock_args, "https://api.github.com/repos/test/notfound/issues"
|
||||
args, "https://api.github.com/repos/test/notfound/issues"
|
||||
)
|
||||
|
||||
assert not isinstance(
|
||||
@@ -298,6 +289,28 @@ class TestMakeRequestWithRetry:
|
||||
assert exc_info.value.code == 403
|
||||
assert call_count == 1 # No retries
|
||||
|
||||
def test_451_error_not_retried(self):
|
||||
"""HTTP 451 should not be retried - raise immediately."""
|
||||
call_count = 0
|
||||
|
||||
def mock_urlopen(*args, **kwargs):
|
||||
nonlocal call_count
|
||||
call_count += 1
|
||||
raise HTTPError(
|
||||
url="https://api.github.com/test",
|
||||
code=451,
|
||||
msg="Unavailable For Legal Reasons",
|
||||
hdrs={"x-ratelimit-remaining": "5000"},
|
||||
fp=None,
|
||||
)
|
||||
|
||||
with patch("github_backup.github_backup.urlopen", side_effect=mock_urlopen):
|
||||
with pytest.raises(HTTPError) as exc_info:
|
||||
make_request_with_retry(Mock(), None)
|
||||
|
||||
assert exc_info.value.code == 451
|
||||
assert call_count == 1 # No retries
|
||||
|
||||
def test_connection_error_retries_and_succeeds(self):
|
||||
"""URLError (connection error) should retry and succeed if subsequent request works."""
|
||||
good_response = Mock()
|
||||
@@ -343,24 +356,43 @@ class TestMakeRequestWithRetry:
|
||||
) # 1 initial + 5 retries = 6 attempts
|
||||
|
||||
|
||||
class TestRetrieveGraphqlDataLogging:
|
||||
"""Tests for GraphQL request logging."""
|
||||
|
||||
def test_logs_graphql_context(self, create_args, caplog):
|
||||
args = create_args(token_classic="fake_token")
|
||||
mock_response = Mock()
|
||||
mock_response.getcode.return_value = 200
|
||||
mock_response.read.return_value = json.dumps({"data": {}}).encode("utf-8")
|
||||
mock_response.headers = {"x-ratelimit-remaining": "5000"}
|
||||
|
||||
caplog.set_level(logging.INFO, logger="github_backup.github_backup")
|
||||
with patch(
|
||||
"github_backup.github_backup.make_request_with_retry",
|
||||
return_value=mock_response,
|
||||
):
|
||||
github_backup.retrieve_graphql_data(
|
||||
args,
|
||||
"query { viewer { login } }",
|
||||
log_context="discussion owner/repo#1",
|
||||
)
|
||||
|
||||
assert (
|
||||
"Requesting https://api.github.com/graphql (discussion owner/repo#1)"
|
||||
in caplog.text
|
||||
)
|
||||
|
||||
|
||||
class TestRetrieveDataThrottling:
|
||||
"""Tests for throttling behavior in retrieve_data."""
|
||||
|
||||
@pytest.fixture
|
||||
def mock_args(self):
|
||||
args = Mock()
|
||||
args.as_app = False
|
||||
args.token_fine = None
|
||||
args.token_classic = "fake_token"
|
||||
args.osx_keychain_item_name = None
|
||||
args.osx_keychain_item_account = None
|
||||
args.throttle_limit = 10 # Throttle when remaining <= 10
|
||||
args.throttle_pause = 5 # Pause 5 seconds
|
||||
args.max_retries = DEFAULT_MAX_RETRIES
|
||||
return args
|
||||
|
||||
def test_throttling_pauses_when_rate_limit_low(self, mock_args):
|
||||
def test_throttling_pauses_when_rate_limit_low(self, create_args):
|
||||
"""Should pause when x-ratelimit-remaining is at or below throttle_limit."""
|
||||
args = create_args(
|
||||
token_classic="fake_token",
|
||||
throttle_limit=10,
|
||||
throttle_pause=5,
|
||||
)
|
||||
mock_response = Mock()
|
||||
mock_response.getcode.return_value = 200
|
||||
mock_response.read.return_value = json.dumps([{"id": 1}]).encode("utf-8")
|
||||
@@ -375,7 +407,7 @@ class TestRetrieveDataThrottling:
|
||||
):
|
||||
with patch("github_backup.github_backup.time.sleep") as mock_sleep:
|
||||
github_backup.retrieve_data(
|
||||
mock_args, "https://api.github.com/repos/test/repo/issues"
|
||||
args, "https://api.github.com/repos/test/repo/issues"
|
||||
)
|
||||
|
||||
mock_sleep.assert_called_once_with(5) # throttle_pause value
|
||||
@@ -384,21 +416,9 @@ class TestRetrieveDataThrottling:
|
||||
class TestRetrieveDataSingleItem:
|
||||
"""Tests for single item (dict) responses in retrieve_data."""
|
||||
|
||||
@pytest.fixture
|
||||
def mock_args(self):
|
||||
args = Mock()
|
||||
args.as_app = False
|
||||
args.token_fine = None
|
||||
args.token_classic = "fake_token"
|
||||
args.osx_keychain_item_name = None
|
||||
args.osx_keychain_item_account = None
|
||||
args.throttle_limit = None
|
||||
args.throttle_pause = 0
|
||||
args.max_retries = DEFAULT_MAX_RETRIES
|
||||
return args
|
||||
|
||||
def test_dict_response_returned_as_list(self, mock_args):
|
||||
def test_dict_response_returned_as_list(self, create_args):
|
||||
"""Single dict response should be returned as a list with one item."""
|
||||
args = create_args(token_classic="fake_token")
|
||||
mock_response = Mock()
|
||||
mock_response.getcode.return_value = 200
|
||||
mock_response.read.return_value = json.dumps(
|
||||
@@ -411,7 +431,7 @@ class TestRetrieveDataSingleItem:
|
||||
return_value=mock_response,
|
||||
):
|
||||
result = github_backup.retrieve_data(
|
||||
mock_args, "https://api.github.com/user"
|
||||
args, "https://api.github.com/user"
|
||||
)
|
||||
|
||||
assert result == [{"login": "testuser", "id": 123}]
|
||||
@@ -474,17 +494,12 @@ class TestRetriesCliArgument:
|
||||
assert result == good_response
|
||||
assert call_count == 2 # 1 initial + 1 retry = 2 attempts
|
||||
|
||||
def test_custom_retry_count_limits_attempts(self):
|
||||
def test_custom_retry_count_limits_attempts(self, create_args):
|
||||
"""Custom --retries value should limit actual retry attempts."""
|
||||
args = Mock()
|
||||
args.as_app = False
|
||||
args.token_fine = None
|
||||
args.token_classic = "fake_token"
|
||||
args.osx_keychain_item_name = None
|
||||
args.osx_keychain_item_account = None
|
||||
args.throttle_limit = None
|
||||
args.throttle_pause = 0
|
||||
args.max_retries = 2 # 2 retries = 3 total attempts (1 initial + 2 retries)
|
||||
args = create_args(
|
||||
token_classic="fake_token",
|
||||
max_retries=2, # 2 retries = 3 total attempts (1 initial + 2 retries)
|
||||
)
|
||||
|
||||
mock_response = Mock()
|
||||
mock_response.getcode.return_value = 200
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
"""Tests for --skip-assets-on flag behavior (issue #135)."""
|
||||
|
||||
import pytest
|
||||
from unittest.mock import Mock, patch
|
||||
from unittest.mock import patch
|
||||
|
||||
from github_backup import github_backup
|
||||
|
||||
@@ -13,52 +13,6 @@ class TestSkipAssetsOn:
|
||||
while still backing up release metadata.
|
||||
"""
|
||||
|
||||
def _create_mock_args(self, **overrides):
|
||||
"""Create a mock args object with sensible defaults."""
|
||||
args = Mock()
|
||||
args.user = "testuser"
|
||||
args.output_directory = "/tmp/backup"
|
||||
args.include_repository = False
|
||||
args.include_everything = False
|
||||
args.include_gists = False
|
||||
args.include_starred_gists = False
|
||||
args.all_starred = False
|
||||
args.skip_existing = False
|
||||
args.bare_clone = False
|
||||
args.lfs_clone = False
|
||||
args.no_prune = False
|
||||
args.include_wiki = False
|
||||
args.include_issues = False
|
||||
args.include_issue_comments = False
|
||||
args.include_issue_events = False
|
||||
args.include_pulls = False
|
||||
args.include_pull_comments = False
|
||||
args.include_pull_commits = False
|
||||
args.include_pull_details = False
|
||||
args.include_labels = False
|
||||
args.include_hooks = False
|
||||
args.include_milestones = False
|
||||
args.include_releases = True
|
||||
args.include_assets = True
|
||||
args.skip_assets_on = []
|
||||
args.include_attachments = False
|
||||
args.incremental = False
|
||||
args.incremental_by_files = False
|
||||
args.github_host = None
|
||||
args.prefer_ssh = False
|
||||
args.token_classic = "test-token"
|
||||
args.token_fine = None
|
||||
args.as_app = False
|
||||
args.osx_keychain_item_name = None
|
||||
args.osx_keychain_item_account = None
|
||||
args.skip_prerelease = False
|
||||
args.number_of_latest_releases = None
|
||||
|
||||
for key, value in overrides.items():
|
||||
setattr(args, key, value)
|
||||
|
||||
return args
|
||||
|
||||
def _create_mock_repository(self, name="test-repo", owner="testuser"):
|
||||
"""Create a mock repository object."""
|
||||
return {
|
||||
@@ -123,10 +77,10 @@ class TestSkipAssetsOnBehavior(TestSkipAssetsOn):
|
||||
@patch("github_backup.github_backup.mkdir_p")
|
||||
@patch("github_backup.github_backup.json_dump_if_changed")
|
||||
def test_assets_downloaded_when_not_skipped(
|
||||
self, mock_json_dump, mock_mkdir, mock_retrieve, mock_download
|
||||
self, mock_json_dump, mock_mkdir, mock_retrieve, mock_download, create_args
|
||||
):
|
||||
"""Assets should be downloaded when repo is not in skip list."""
|
||||
args = self._create_mock_args(skip_assets_on=[])
|
||||
args = create_args(skip_assets_on=[])
|
||||
repository = self._create_mock_repository(name="normal-repo")
|
||||
release = self._create_mock_release()
|
||||
asset = self._create_mock_asset()
|
||||
@@ -154,10 +108,10 @@ class TestSkipAssetsOnBehavior(TestSkipAssetsOn):
|
||||
@patch("github_backup.github_backup.mkdir_p")
|
||||
@patch("github_backup.github_backup.json_dump_if_changed")
|
||||
def test_assets_skipped_when_repo_name_matches(
|
||||
self, mock_json_dump, mock_mkdir, mock_retrieve, mock_download
|
||||
self, mock_json_dump, mock_mkdir, mock_retrieve, mock_download, create_args
|
||||
):
|
||||
"""Assets should be skipped when repo name is in skip list."""
|
||||
args = self._create_mock_args(skip_assets_on=["big-repo"])
|
||||
args = create_args(skip_assets_on=["big-repo"])
|
||||
repository = self._create_mock_repository(name="big-repo")
|
||||
release = self._create_mock_release()
|
||||
|
||||
@@ -180,10 +134,10 @@ class TestSkipAssetsOnBehavior(TestSkipAssetsOn):
|
||||
@patch("github_backup.github_backup.mkdir_p")
|
||||
@patch("github_backup.github_backup.json_dump_if_changed")
|
||||
def test_assets_skipped_when_full_name_matches(
|
||||
self, mock_json_dump, mock_mkdir, mock_retrieve, mock_download
|
||||
self, mock_json_dump, mock_mkdir, mock_retrieve, mock_download, create_args
|
||||
):
|
||||
"""Assets should be skipped when owner/repo format matches."""
|
||||
args = self._create_mock_args(skip_assets_on=["otheruser/big-repo"])
|
||||
args = create_args(skip_assets_on=["otheruser/big-repo"])
|
||||
repository = self._create_mock_repository(name="big-repo", owner="otheruser")
|
||||
release = self._create_mock_release()
|
||||
|
||||
@@ -206,11 +160,11 @@ class TestSkipAssetsOnBehavior(TestSkipAssetsOn):
|
||||
@patch("github_backup.github_backup.mkdir_p")
|
||||
@patch("github_backup.github_backup.json_dump_if_changed")
|
||||
def test_case_insensitive_matching(
|
||||
self, mock_json_dump, mock_mkdir, mock_retrieve, mock_download
|
||||
self, mock_json_dump, mock_mkdir, mock_retrieve, mock_download, create_args
|
||||
):
|
||||
"""Skip matching should be case-insensitive."""
|
||||
# User types uppercase, repo name is lowercase
|
||||
args = self._create_mock_args(skip_assets_on=["BIG-REPO"])
|
||||
args = create_args(skip_assets_on=["BIG-REPO"])
|
||||
repository = self._create_mock_repository(name="big-repo")
|
||||
release = self._create_mock_release()
|
||||
|
||||
@@ -233,10 +187,10 @@ class TestSkipAssetsOnBehavior(TestSkipAssetsOn):
|
||||
@patch("github_backup.github_backup.mkdir_p")
|
||||
@patch("github_backup.github_backup.json_dump_if_changed")
|
||||
def test_multiple_skip_repos(
|
||||
self, mock_json_dump, mock_mkdir, mock_retrieve, mock_download
|
||||
self, mock_json_dump, mock_mkdir, mock_retrieve, mock_download, create_args
|
||||
):
|
||||
"""Multiple repos in skip list should all be skipped."""
|
||||
args = self._create_mock_args(skip_assets_on=["repo1", "repo2", "repo3"])
|
||||
args = create_args(skip_assets_on=["repo1", "repo2", "repo3"])
|
||||
repository = self._create_mock_repository(name="repo2")
|
||||
release = self._create_mock_release()
|
||||
|
||||
@@ -259,10 +213,10 @@ class TestSkipAssetsOnBehavior(TestSkipAssetsOn):
|
||||
@patch("github_backup.github_backup.mkdir_p")
|
||||
@patch("github_backup.github_backup.json_dump_if_changed")
|
||||
def test_release_metadata_still_saved_when_assets_skipped(
|
||||
self, mock_json_dump, mock_mkdir, mock_retrieve, mock_download
|
||||
self, mock_json_dump, mock_mkdir, mock_retrieve, mock_download, create_args
|
||||
):
|
||||
"""Release JSON should still be saved even when assets are skipped."""
|
||||
args = self._create_mock_args(skip_assets_on=["big-repo"])
|
||||
args = create_args(skip_assets_on=["big-repo"])
|
||||
repository = self._create_mock_repository(name="big-repo")
|
||||
release = self._create_mock_release()
|
||||
|
||||
@@ -287,10 +241,10 @@ class TestSkipAssetsOnBehavior(TestSkipAssetsOn):
|
||||
@patch("github_backup.github_backup.mkdir_p")
|
||||
@patch("github_backup.github_backup.json_dump_if_changed")
|
||||
def test_non_matching_repo_still_downloads_assets(
|
||||
self, mock_json_dump, mock_mkdir, mock_retrieve, mock_download
|
||||
self, mock_json_dump, mock_mkdir, mock_retrieve, mock_download, create_args
|
||||
):
|
||||
"""Repos not in skip list should still download assets."""
|
||||
args = self._create_mock_args(skip_assets_on=["other-repo"])
|
||||
args = create_args(skip_assets_on=["other-repo"])
|
||||
repository = self._create_mock_repository(name="normal-repo")
|
||||
release = self._create_mock_release()
|
||||
asset = self._create_mock_asset()
|
||||
|
||||
@@ -1,39 +1,11 @@
|
||||
"""Tests for --starred-skip-size-over flag behavior (issue #108)."""
|
||||
|
||||
import pytest
|
||||
from unittest.mock import Mock
|
||||
|
||||
from github_backup import github_backup
|
||||
|
||||
|
||||
class TestStarredSkipSizeOver:
|
||||
"""Test suite for --starred-skip-size-over flag.
|
||||
|
||||
Issue #108: Allow restricting size of starred repositories before cloning.
|
||||
The size is based on the GitHub API's 'size' field (in KB), but the CLI
|
||||
argument accepts MB for user convenience.
|
||||
"""
|
||||
|
||||
def _create_mock_args(self, **overrides):
|
||||
"""Create a mock args object with sensible defaults."""
|
||||
args = Mock()
|
||||
args.user = "testuser"
|
||||
args.repository = None
|
||||
args.name_regex = None
|
||||
args.languages = None
|
||||
args.fork = False
|
||||
args.private = False
|
||||
args.skip_archived = False
|
||||
args.starred_skip_size_over = None
|
||||
args.exclude = None
|
||||
|
||||
for key, value in overrides.items():
|
||||
setattr(args, key, value)
|
||||
|
||||
return args
|
||||
|
||||
|
||||
class TestStarredSkipSizeOverArgumentParsing(TestStarredSkipSizeOver):
|
||||
class TestStarredSkipSizeOverArgumentParsing:
|
||||
"""Tests for --starred-skip-size-over argument parsing."""
|
||||
|
||||
def test_starred_skip_size_over_not_set_defaults_to_none(self):
|
||||
@@ -52,12 +24,17 @@ class TestStarredSkipSizeOverArgumentParsing(TestStarredSkipSizeOver):
|
||||
github_backup.parse_args(["testuser", "--starred-skip-size-over", "abc"])
|
||||
|
||||
|
||||
class TestStarredSkipSizeOverFiltering(TestStarredSkipSizeOver):
|
||||
"""Tests for --starred-skip-size-over filtering behavior."""
|
||||
class TestStarredSkipSizeOverFiltering:
|
||||
"""Tests for --starred-skip-size-over filtering behavior.
|
||||
|
||||
def test_starred_repo_under_limit_is_kept(self):
|
||||
Issue #108: Allow restricting size of starred repositories before cloning.
|
||||
The size is based on the GitHub API's 'size' field (in KB), but the CLI
|
||||
argument accepts MB for user convenience.
|
||||
"""
|
||||
|
||||
def test_starred_repo_under_limit_is_kept(self, create_args):
|
||||
"""Starred repos under the size limit should be kept."""
|
||||
args = self._create_mock_args(starred_skip_size_over=500)
|
||||
args = create_args(starred_skip_size_over=500)
|
||||
|
||||
repos = [
|
||||
{
|
||||
@@ -72,9 +49,9 @@ class TestStarredSkipSizeOverFiltering(TestStarredSkipSizeOver):
|
||||
assert len(result) == 1
|
||||
assert result[0]["name"] == "small-repo"
|
||||
|
||||
def test_starred_repo_over_limit_is_filtered(self):
|
||||
def test_starred_repo_over_limit_is_filtered(self, create_args):
|
||||
"""Starred repos over the size limit should be filtered out."""
|
||||
args = self._create_mock_args(starred_skip_size_over=500)
|
||||
args = create_args(starred_skip_size_over=500)
|
||||
|
||||
repos = [
|
||||
{
|
||||
@@ -88,9 +65,9 @@ class TestStarredSkipSizeOverFiltering(TestStarredSkipSizeOver):
|
||||
result = github_backup.filter_repositories(args, repos)
|
||||
assert len(result) == 0
|
||||
|
||||
def test_own_repo_over_limit_is_kept(self):
|
||||
def test_own_repo_over_limit_is_kept(self, create_args):
|
||||
"""User's own repos should not be affected by the size limit."""
|
||||
args = self._create_mock_args(starred_skip_size_over=500)
|
||||
args = create_args(starred_skip_size_over=500)
|
||||
|
||||
repos = [
|
||||
{
|
||||
@@ -105,9 +82,9 @@ class TestStarredSkipSizeOverFiltering(TestStarredSkipSizeOver):
|
||||
assert len(result) == 1
|
||||
assert result[0]["name"] == "my-huge-repo"
|
||||
|
||||
def test_starred_repo_at_exact_limit_is_kept(self):
|
||||
def test_starred_repo_at_exact_limit_is_kept(self, create_args):
|
||||
"""Starred repos at exactly the size limit should be kept."""
|
||||
args = self._create_mock_args(starred_skip_size_over=500)
|
||||
args = create_args(starred_skip_size_over=500)
|
||||
|
||||
repos = [
|
||||
{
|
||||
@@ -122,9 +99,9 @@ class TestStarredSkipSizeOverFiltering(TestStarredSkipSizeOver):
|
||||
assert len(result) == 1
|
||||
assert result[0]["name"] == "exact-limit-repo"
|
||||
|
||||
def test_mixed_repos_filtered_correctly(self):
|
||||
def test_mixed_repos_filtered_correctly(self, create_args):
|
||||
"""Mix of own and starred repos should be filtered correctly."""
|
||||
args = self._create_mock_args(starred_skip_size_over=500)
|
||||
args = create_args(starred_skip_size_over=500)
|
||||
|
||||
repos = [
|
||||
{
|
||||
@@ -153,9 +130,9 @@ class TestStarredSkipSizeOverFiltering(TestStarredSkipSizeOver):
|
||||
assert "starred-small" in names
|
||||
assert "starred-huge" not in names
|
||||
|
||||
def test_no_size_limit_keeps_all_starred(self):
|
||||
def test_no_size_limit_keeps_all_starred(self, create_args):
|
||||
"""When no size limit is set, all starred repos should be kept."""
|
||||
args = self._create_mock_args(starred_skip_size_over=None)
|
||||
args = create_args(starred_skip_size_over=None)
|
||||
|
||||
repos = [
|
||||
{
|
||||
@@ -169,9 +146,9 @@ class TestStarredSkipSizeOverFiltering(TestStarredSkipSizeOver):
|
||||
result = github_backup.filter_repositories(args, repos)
|
||||
assert len(result) == 1
|
||||
|
||||
def test_repo_without_size_field_is_kept(self):
|
||||
def test_repo_without_size_field_is_kept(self, create_args):
|
||||
"""Repos without a size field should be kept (size defaults to 0)."""
|
||||
args = self._create_mock_args(starred_skip_size_over=500)
|
||||
args = create_args(starred_skip_size_over=500)
|
||||
|
||||
repos = [
|
||||
{
|
||||
@@ -185,9 +162,9 @@ class TestStarredSkipSizeOverFiltering(TestStarredSkipSizeOver):
|
||||
result = github_backup.filter_repositories(args, repos)
|
||||
assert len(result) == 1
|
||||
|
||||
def test_zero_value_warns_and_is_ignored(self, caplog):
|
||||
def test_zero_value_warns_and_is_ignored(self, create_args, caplog):
|
||||
"""Zero value should warn and keep all repos."""
|
||||
args = self._create_mock_args(starred_skip_size_over=0)
|
||||
args = create_args(starred_skip_size_over=0)
|
||||
|
||||
repos = [
|
||||
{
|
||||
@@ -202,9 +179,9 @@ class TestStarredSkipSizeOverFiltering(TestStarredSkipSizeOver):
|
||||
assert len(result) == 1
|
||||
assert "must be greater than 0" in caplog.text
|
||||
|
||||
def test_negative_value_warns_and_is_ignored(self, caplog):
|
||||
def test_negative_value_warns_and_is_ignored(self, create_args, caplog):
|
||||
"""Negative value should warn and keep all repos."""
|
||||
args = self._create_mock_args(starred_skip_size_over=-5)
|
||||
args = create_args(starred_skip_size_over=-5)
|
||||
|
||||
repos = [
|
||||
{
|
||||
|
||||
Reference in New Issue
Block a user