mirror of
https://github.com/FlareSolverr/FlareSolverr.git
synced 2025-12-05 17:18:19 +01:00
Compare commits
56 Commits
v3.0.0
...
v3.2.2-hot
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
c920bea4ca | ||
|
|
a785f83034 | ||
|
|
b42c22f5b1 | ||
|
|
9c62410a8b | ||
|
|
b8768ae17d | ||
|
|
9b2c602a1f | ||
|
|
8316350b98 | ||
|
|
33307ce461 | ||
|
|
cedb7bc54e | ||
|
|
6ecaf2362c | ||
|
|
3c97c9603a | ||
|
|
efaa5f31b6 | ||
|
|
4db85a2d0f | ||
|
|
66b9db21e5 | ||
|
|
ab0fe58d4a | ||
|
|
f68ddb7573 | ||
|
|
ac77110578 | ||
|
|
a9d1a2de2d | ||
|
|
ab5f14d6c3 | ||
|
|
e0bf02fb8b | ||
|
|
82a1cd835a | ||
|
|
7017715e21 | ||
|
|
ae18559db1 | ||
|
|
2680521008 | ||
|
|
2297bab185 | ||
|
|
8d9bac9dd4 | ||
|
|
30ccf18e85 | ||
|
|
a15d041a0c | ||
|
|
c6c74e7c9d | ||
|
|
49fd1aacfc | ||
|
|
f6879c70de | ||
|
|
24f59a39cb | ||
|
|
4d16105176 | ||
|
|
5957b7b3bc | ||
|
|
8de16058d0 | ||
|
|
5fc4f966a5 | ||
|
|
b903a5dd84 | ||
|
|
7e9d5f424f | ||
|
|
fc6d2d9095 | ||
|
|
aef9b2d4d6 | ||
|
|
6dc279a9d3 | ||
|
|
96fcd21174 | ||
|
|
3a6e8e0f92 | ||
|
|
2d97f88276 | ||
|
|
ac5c64319e | ||
|
|
c93834e2f0 | ||
|
|
e3b4200d94 | ||
|
|
0941861f80 | ||
|
|
8a10eb27a6 | ||
|
|
e9c08c84ef | ||
|
|
2aa1744476 | ||
|
|
a89679a52d | ||
|
|
410ee7981f | ||
|
|
e163019f28 | ||
|
|
7d84f1b663 | ||
|
|
4807e9dbe2 |
10
.github/ISSUE_TEMPLATE/bug_report.yml
vendored
10
.github/ISSUE_TEMPLATE/bug_report.yml
vendored
@@ -8,6 +8,13 @@ body:
|
|||||||
options:
|
options:
|
||||||
- label: I have checked the README
|
- label: I have checked the README
|
||||||
required: true
|
required: true
|
||||||
|
- type: checkboxes
|
||||||
|
attributes:
|
||||||
|
label: Have you followed our Troubleshooting?
|
||||||
|
description: Please follow our <a href="https://github.com/FlareSolverr/FlareSolverr/wiki/Troubleshooting">Troubleshooting</a>.
|
||||||
|
options:
|
||||||
|
- label: I have followed your Troubleshooting
|
||||||
|
required: true
|
||||||
- type: checkboxes
|
- type: checkboxes
|
||||||
attributes:
|
attributes:
|
||||||
label: Is there already an issue for your problem?
|
label: Is there already an issue for your problem?
|
||||||
@@ -32,7 +39,8 @@ body:
|
|||||||
- Operating system:
|
- Operating system:
|
||||||
- Are you using Docker: [yes/no]
|
- Are you using Docker: [yes/no]
|
||||||
- FlareSolverr User-Agent (see log traces or / endpoint):
|
- FlareSolverr User-Agent (see log traces or / endpoint):
|
||||||
- Are you using a proxy or VPN: [yes/no]
|
- Are you using a VPN: [yes/no]
|
||||||
|
- Are you using a Proxy: [yes/no]
|
||||||
- Are you using Captcha Solver: [yes/no]
|
- Are you using Captcha Solver: [yes/no]
|
||||||
- If using captcha solver, which one:
|
- If using captcha solver, which one:
|
||||||
- URL to test this issue:
|
- URL to test this issue:
|
||||||
|
|||||||
4
.github/workflows/autotag.yml
vendored
4
.github/workflows/autotag.yml
vendored
@@ -6,12 +6,12 @@ on:
|
|||||||
- "master"
|
- "master"
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
build:
|
tag-release:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
-
|
-
|
||||||
name: Checkout
|
name: Checkout
|
||||||
uses: actions/checkout@v2
|
uses: actions/checkout@v3
|
||||||
-
|
-
|
||||||
name: Auto Tag
|
name: Auto Tag
|
||||||
uses: Klemensas/action-autotag@stable
|
uses: Klemensas/action-autotag@stable
|
||||||
|
|||||||
18
.github/workflows/release-docker.yml
vendored
18
.github/workflows/release-docker.yml
vendored
@@ -6,44 +6,44 @@ on:
|
|||||||
- 'v*.*.*'
|
- 'v*.*.*'
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
build:
|
build-docker-images:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-22.04
|
||||||
steps:
|
steps:
|
||||||
-
|
-
|
||||||
name: Checkout
|
name: Checkout
|
||||||
uses: actions/checkout@v2
|
uses: actions/checkout@v3
|
||||||
-
|
-
|
||||||
name: Downcase repo
|
name: Downcase repo
|
||||||
run: echo REPOSITORY=$(echo ${{ github.repository }} | tr '[:upper:]' '[:lower:]') >> $GITHUB_ENV
|
run: echo REPOSITORY=$(echo ${{ github.repository }} | tr '[:upper:]' '[:lower:]') >> $GITHUB_ENV
|
||||||
-
|
-
|
||||||
name: Docker meta
|
name: Docker meta
|
||||||
id: docker_meta
|
id: docker_meta
|
||||||
uses: crazy-max/ghaction-docker-meta@v1
|
uses: crazy-max/ghaction-docker-meta@v3
|
||||||
with:
|
with:
|
||||||
images: ${{ env.REPOSITORY }},ghcr.io/${{ env.REPOSITORY }}
|
images: ${{ env.REPOSITORY }},ghcr.io/${{ env.REPOSITORY }}
|
||||||
tag-sha: false
|
tag-sha: false
|
||||||
-
|
-
|
||||||
name: Set up QEMU
|
name: Set up QEMU
|
||||||
uses: docker/setup-qemu-action@v1.0.1
|
uses: docker/setup-qemu-action@v2
|
||||||
-
|
-
|
||||||
name: Set up Docker Buildx
|
name: Set up Docker Buildx
|
||||||
uses: docker/setup-buildx-action@v1
|
uses: docker/setup-buildx-action@v2
|
||||||
-
|
-
|
||||||
name: Login to DockerHub
|
name: Login to DockerHub
|
||||||
uses: docker/login-action@v1
|
uses: docker/login-action@v2
|
||||||
with:
|
with:
|
||||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||||
-
|
-
|
||||||
name: Login to GitHub Container Registry
|
name: Login to GitHub Container Registry
|
||||||
uses: docker/login-action@v1
|
uses: docker/login-action@v2
|
||||||
with:
|
with:
|
||||||
registry: ghcr.io
|
registry: ghcr.io
|
||||||
username: ${{ github.repository_owner }}
|
username: ${{ github.repository_owner }}
|
||||||
password: ${{ secrets.GH_PAT }}
|
password: ${{ secrets.GH_PAT }}
|
||||||
-
|
-
|
||||||
name: Build and push
|
name: Build and push
|
||||||
uses: docker/build-push-action@v2
|
uses: docker/build-push-action@v3
|
||||||
with:
|
with:
|
||||||
context: .
|
context: .
|
||||||
file: ./Dockerfile
|
file: ./Dockerfile
|
||||||
|
|||||||
72
.github/workflows/release.yml
vendored
72
.github/workflows/release.yml
vendored
@@ -6,26 +6,15 @@ on:
|
|||||||
- 'v*.*.*'
|
- 'v*.*.*'
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
build:
|
create-release:
|
||||||
name: Create release
|
name: Create release
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-22.04
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout code
|
- name: Checkout code
|
||||||
uses: actions/checkout@v2
|
uses: actions/checkout@v3
|
||||||
with:
|
with:
|
||||||
fetch-depth: 0 # get all commits, branches and tags (required for the changelog)
|
fetch-depth: 0 # get all commits, branches and tags (required for the changelog)
|
||||||
|
|
||||||
- name: Setup Node
|
|
||||||
uses: actions/setup-node@v2
|
|
||||||
with:
|
|
||||||
node-version: '16'
|
|
||||||
|
|
||||||
- name: Build artifacts
|
|
||||||
run: |
|
|
||||||
npm install
|
|
||||||
npm run build
|
|
||||||
npm run package
|
|
||||||
|
|
||||||
- name: Build changelog
|
- name: Build changelog
|
||||||
id: github_changelog
|
id: github_changelog
|
||||||
run: |
|
run: |
|
||||||
@@ -47,9 +36,60 @@ jobs:
|
|||||||
draft: false
|
draft: false
|
||||||
prerelease: false
|
prerelease: false
|
||||||
|
|
||||||
|
build-linux-package:
|
||||||
|
name: Build Linux binary
|
||||||
|
needs: create-release
|
||||||
|
runs-on: ubuntu-22.04
|
||||||
|
steps:
|
||||||
|
- name: Checkout code
|
||||||
|
uses: actions/checkout@v3
|
||||||
|
with:
|
||||||
|
fetch-depth: 0 # get all commits, branches and tags (required for the changelog)
|
||||||
|
|
||||||
|
- name: Setup Python
|
||||||
|
uses: actions/setup-python@v4
|
||||||
|
with:
|
||||||
|
python-version: '3.11'
|
||||||
|
|
||||||
|
- name: Build artifacts
|
||||||
|
run: |
|
||||||
|
python -m pip install -r requirements.txt
|
||||||
|
python -m pip install pyinstaller==5.9.0
|
||||||
|
cd src
|
||||||
|
python build_package.py
|
||||||
|
|
||||||
- name: Upload release artifacts
|
- name: Upload release artifacts
|
||||||
uses: alexellis/upload-assets@0.2.2
|
uses: alexellis/upload-assets@0.4.0
|
||||||
env:
|
env:
|
||||||
GITHUB_TOKEN: ${{ secrets.GH_PAT }}
|
GITHUB_TOKEN: ${{ secrets.GH_PAT }}
|
||||||
with:
|
with:
|
||||||
asset_paths: '["./bin/*.zip"]'
|
asset_paths: '["./dist/flaresolverr_*"]'
|
||||||
|
|
||||||
|
build-windows-package:
|
||||||
|
name: Build Windows binary
|
||||||
|
needs: create-release
|
||||||
|
runs-on: windows-2022
|
||||||
|
steps:
|
||||||
|
- name: Checkout code
|
||||||
|
uses: actions/checkout@v3
|
||||||
|
with:
|
||||||
|
fetch-depth: 0 # get all commits, branches and tags (required for the changelog)
|
||||||
|
|
||||||
|
- name: Setup Python
|
||||||
|
uses: actions/setup-python@v4
|
||||||
|
with:
|
||||||
|
python-version: '3.11'
|
||||||
|
|
||||||
|
- name: Build artifacts
|
||||||
|
run: |
|
||||||
|
python -m pip install -r requirements.txt
|
||||||
|
python -m pip install pyinstaller==5.9.0
|
||||||
|
cd src
|
||||||
|
python build_package.py
|
||||||
|
|
||||||
|
- name: Upload release artifacts
|
||||||
|
uses: alexellis/upload-assets@0.4.0
|
||||||
|
env:
|
||||||
|
GITHUB_TOKEN: ${{ secrets.GH_PAT }}
|
||||||
|
with:
|
||||||
|
asset_paths: '["./dist/flaresolverr_*"]'
|
||||||
|
|||||||
4
.gitignore
vendored
4
.gitignore
vendored
@@ -25,6 +25,7 @@ __pycache__/
|
|||||||
build/
|
build/
|
||||||
develop-eggs/
|
develop-eggs/
|
||||||
dist/
|
dist/
|
||||||
|
dist_chrome/
|
||||||
downloads/
|
downloads/
|
||||||
eggs/
|
eggs/
|
||||||
.eggs/
|
.eggs/
|
||||||
@@ -123,3 +124,6 @@ venv.bak/
|
|||||||
.mypy_cache/
|
.mypy_cache/
|
||||||
.dmypy.json
|
.dmypy.json
|
||||||
dmypy.json
|
dmypy.json
|
||||||
|
|
||||||
|
# node
|
||||||
|
node_modules/
|
||||||
62
CHANGELOG.md
62
CHANGELOG.md
@@ -1,5 +1,67 @@
|
|||||||
# Changelog
|
# Changelog
|
||||||
|
|
||||||
|
## v3.2.2 (2023/07/16)
|
||||||
|
|
||||||
|
* Workaround for updated 'verify your are human' check
|
||||||
|
|
||||||
|
## v3.2.1 (2023/06/10)
|
||||||
|
|
||||||
|
* Kill dead Chrome processes in Windows
|
||||||
|
* Fix Chrome GL erros in ASUSTOR NAS
|
||||||
|
|
||||||
|
## v3.2.0 (2023/05/23)
|
||||||
|
|
||||||
|
* Support "proxy" param in requests and sessions
|
||||||
|
* Support "cookies" param in requests
|
||||||
|
* Fix Chromium exec permissions in Linux package
|
||||||
|
* Update Python dependencies
|
||||||
|
|
||||||
|
## v3.1.2 (2023/04/02)
|
||||||
|
|
||||||
|
* Fix headless mode in macOS
|
||||||
|
* Remove redundant artifact from Windows binary package
|
||||||
|
* Bump Selenium dependency
|
||||||
|
|
||||||
|
## v3.1.1 (2023/03/25)
|
||||||
|
|
||||||
|
* Distribute binary executables in compressed package
|
||||||
|
* Add icon for binary executable
|
||||||
|
* Include information about supported architectures in the readme
|
||||||
|
* Check Python version on start
|
||||||
|
|
||||||
|
## v3.1.0 (2023/03/20)
|
||||||
|
|
||||||
|
* Build binaries for Linux x64 and Windows x64
|
||||||
|
* Sessions with auto-creation on fetch request and TTL
|
||||||
|
* Fix error trace: Crash Reports/pending No such file or directory
|
||||||
|
* Fix Waitress server error with asyncore_use_poll=true
|
||||||
|
* Attempt to fix Docker ARM32 build
|
||||||
|
* Print platform information on start up
|
||||||
|
* Add Fairlane challenge selector
|
||||||
|
* Update DDOS-GUARD title
|
||||||
|
* Update dependencies
|
||||||
|
|
||||||
|
## v3.0.4 (2023/03/07)
|
||||||
|
|
||||||
|
* Click on the Cloudflare's 'Verify you are human' button if necessary
|
||||||
|
|
||||||
|
## v3.0.3 (2023/03/06)
|
||||||
|
|
||||||
|
* Update undetected_chromedriver version to 3.4.6
|
||||||
|
|
||||||
|
## v3.0.2 (2023/01/08)
|
||||||
|
|
||||||
|
* Detect Cloudflare blocked access
|
||||||
|
* Check Chrome / Chromium web browser is installed correctly
|
||||||
|
|
||||||
|
## v3.0.1 (2023/01/06)
|
||||||
|
|
||||||
|
* Kill Chromium processes properly to avoid defunct/zombie processes
|
||||||
|
* Update undetected-chromedriver
|
||||||
|
* Disable Zygote sandbox in Chromium browser
|
||||||
|
* Add more selectors to detect blocked access
|
||||||
|
* Include procps (ps), curl and vim packages in the Docker image
|
||||||
|
|
||||||
## v3.0.0 (2023/01/04)
|
## v3.0.0 (2023/01/04)
|
||||||
|
|
||||||
* This is the first release of FlareSolverr v3. There are some breaking changes
|
* This is the first release of FlareSolverr v3. There are some breaking changes
|
||||||
|
|||||||
20
Dockerfile
20
Dockerfile
@@ -29,7 +29,8 @@ RUN dpkg -i /libgl1-mesa-dri.deb \
|
|||||||
&& dpkg -i /adwaita-icon-theme.deb \
|
&& dpkg -i /adwaita-icon-theme.deb \
|
||||||
# Install dependencies
|
# Install dependencies
|
||||||
&& apt-get update \
|
&& apt-get update \
|
||||||
&& apt-get install -y --no-install-recommends chromium chromium-common chromium-driver xvfb \
|
&& apt-get install -y --no-install-recommends chromium chromium-common chromium-driver xvfb dumb-init \
|
||||||
|
procps curl vim xauth \
|
||||||
# Remove temporary files and hardware decoding libraries
|
# Remove temporary files and hardware decoding libraries
|
||||||
&& rm -rf /var/lib/apt/lists/* \
|
&& rm -rf /var/lib/apt/lists/* \
|
||||||
&& rm -f /usr/lib/x86_64-linux-gnu/libmfxhw* \
|
&& rm -f /usr/lib/x86_64-linux-gnu/libmfxhw* \
|
||||||
@@ -47,19 +48,30 @@ RUN pip install -r requirements.txt \
|
|||||||
|
|
||||||
USER flaresolverr
|
USER flaresolverr
|
||||||
|
|
||||||
|
RUN mkdir -p "/app/.config/chromium/Crash Reports/pending"
|
||||||
|
|
||||||
COPY src .
|
COPY src .
|
||||||
COPY package.json ../
|
COPY package.json ../
|
||||||
|
|
||||||
EXPOSE 8191
|
EXPOSE 8191
|
||||||
|
|
||||||
|
# dumb-init avoids zombie chromium processes
|
||||||
|
ENTRYPOINT ["/usr/bin/dumb-init", "--"]
|
||||||
|
|
||||||
CMD ["/usr/local/bin/python", "-u", "/app/flaresolverr.py"]
|
CMD ["/usr/local/bin/python", "-u", "/app/flaresolverr.py"]
|
||||||
|
|
||||||
# Local build
|
# Local build
|
||||||
# docker build -t ngosang/flaresolverr:3.0.0 .
|
# docker build -t ngosang/flaresolverr:3.2.0 .
|
||||||
# docker run -p 8191:8191 ngosang/flaresolverr:3.0.0
|
# docker run -p 8191:8191 ngosang/flaresolverr:3.2.0
|
||||||
|
|
||||||
# Multi-arch build
|
# Multi-arch build
|
||||||
# docker run --rm --privileged multiarch/qemu-user-static --reset -p yes
|
# docker run --rm --privileged multiarch/qemu-user-static --reset -p yes
|
||||||
# docker buildx create --use
|
# docker buildx create --use
|
||||||
# docker buildx build -t ngosang/flaresolverr:3.0.0 --platform linux/386,linux/amd64,linux/arm/v7,linux/arm64/v8 .
|
# docker buildx build -t ngosang/flaresolverr:3.2.0 --platform linux/386,linux/amd64,linux/arm/v7,linux/arm64/v8 .
|
||||||
# add --push to publish in DockerHub
|
# add --push to publish in DockerHub
|
||||||
|
|
||||||
|
# Test multi-arch build
|
||||||
|
# docker run --rm --privileged multiarch/qemu-user-static --reset -p yes
|
||||||
|
# docker buildx create --use
|
||||||
|
# docker buildx build -t ngosang/flaresolverr:3.2.0 --platform linux/arm/v7 --load .
|
||||||
|
# docker run -p 8191:8191 --platform linux/arm/v7 ngosang/flaresolverr:3.2.0
|
||||||
|
|||||||
39
README.md
39
README.md
@@ -64,16 +64,20 @@ Remember to restart the Docker daemon and the container after the update.
|
|||||||
|
|
||||||
### Precompiled binaries
|
### Precompiled binaries
|
||||||
|
|
||||||
|
> **Warning**
|
||||||
|
> Precompiled binaries are only available for x64 architecture. For other architectures see Docker images.
|
||||||
|
|
||||||
This is the recommended way for Windows users.
|
This is the recommended way for Windows users.
|
||||||
* Download the [FlareSolverr zip](https://github.com/FlareSolverr/FlareSolverr/releases) from the release's assets. It is available for Windows and Linux.
|
* Download the [FlareSolverr executable](https://github.com/FlareSolverr/FlareSolverr/releases) from the release's page. It is available for Windows x64 and Linux x64.
|
||||||
* Extract the zip file. FlareSolverr executable and firefox folder must be in the same directory.
|
|
||||||
* Execute FlareSolverr binary. In the environment variables section you can find how to change the configuration.
|
* Execute FlareSolverr binary. In the environment variables section you can find how to change the configuration.
|
||||||
|
|
||||||
### From source code
|
### From source code
|
||||||
|
|
||||||
This is the recommended way for macOS users and for developers.
|
> **Warning**
|
||||||
* Install [Python 3.10](https://www.python.org/downloads/).
|
> Installing from source code only works for x64 architecture. For other architectures see Docker images.
|
||||||
* Install [Chrome](https://www.google.com/intl/en_us/chrome/) or [Chromium](https://www.chromium.org/getting-involved/download-chromium/) web browser.
|
|
||||||
|
* Install [Python 3.11](https://www.python.org/downloads/).
|
||||||
|
* Install [Chrome](https://www.google.com/intl/en_us/chrome/) (all OS) or [Chromium](https://www.chromium.org/getting-involved/download-chromium/) (just Linux, it doesn't work in Windows) web browser.
|
||||||
* (Only in Linux / macOS) Install [Xvfb](https://en.wikipedia.org/wiki/Xvfb) package.
|
* (Only in Linux / macOS) Install [Xvfb](https://en.wikipedia.org/wiki/Xvfb) package.
|
||||||
* Clone this repository and open a shell in that path.
|
* Clone this repository and open a shell in that path.
|
||||||
* Run `pip install -r requirements.txt` command to install FlareSolverr dependencies.
|
* Run `pip install -r requirements.txt` command to install FlareSolverr dependencies.
|
||||||
@@ -140,16 +144,18 @@ session. When you no longer need to use a session you should make sure to close
|
|||||||
|
|
||||||
#### + `request.get`
|
#### + `request.get`
|
||||||
|
|
||||||
| Parameter | Notes |
|
| Parameter | Notes |
|
||||||
|-------------------|----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
|
|---------------------|----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
|
||||||
| url | Mandatory |
|
| url | Mandatory |
|
||||||
| session | Optional. Will send the request from and existing browser instance. If one is not sent it will create a temporary instance that will be destroyed immediately after the request is completed. |
|
| session | Optional. Will send the request from and existing browser instance. If one is not sent it will create a temporary instance that will be destroyed immediately after the request is completed. |
|
||||||
| maxTimeout | Optional, default value 60000. Max timeout to solve the challenge in milliseconds. |
|
| session_ttl_minutes | Optional. FlareSolverr will automatically rotate expired sessions based on the TTL provided in minutes. |
|
||||||
| cookies | Optional. Will be used by the headless browser. Follow [this](https://github.com/puppeteer/puppeteer/blob/v3.3.0/docs/api.md#pagesetcookiecookies) format. |
|
| maxTimeout | Optional, default value 60000. Max timeout to solve the challenge in milliseconds. |
|
||||||
| returnOnlyCookies | Optional, default false. Only returns the cookies. Response data, headers and other parts of the response are removed. |
|
| cookies | Optional. Will be used by the headless browser. Eg: `"cookies": [{"name": "cookie1", "value": "value1"}, {"name": "cookie2", "value": "value2"}]`. |
|
||||||
| proxy | Optional, default disabled. Eg: `"proxy": {"url": "http://127.0.0.1:8888"}`. You must include the proxy schema in the URL: `http://`, `socks4://` or `socks5://`. Authorization (username/password) is not supported. (When the `session` parameter is set, the proxy is ignored; a session specific proxy can be set in `sessions.create`.) |
|
| returnOnlyCookies | Optional, default false. Only returns the cookies. Response data, headers and other parts of the response are removed. |
|
||||||
|
| proxy | Optional, default disabled. Eg: `"proxy": {"url": "http://127.0.0.1:8888"}`. You must include the proxy schema in the URL: `http://`, `socks4://` or `socks5://`. Authorization (username/password) is not supported. (When the `session` parameter is set, the proxy is ignored; a session specific proxy can be set in `sessions.create`.) |
|
||||||
|
|
||||||
:warning: If you want to use Cloudflare clearance cookie in your scripts, make sure you use the FlareSolverr User-Agent too. If they don't match you will see the challenge.
|
> **Warning**
|
||||||
|
> If you want to use Cloudflare clearance cookie in your scripts, make sure you use the FlareSolverr User-Agent too. If they don't match you will see the challenge.
|
||||||
|
|
||||||
Example response from running the `curl` above:
|
Example response from running the `curl` above:
|
||||||
|
|
||||||
@@ -239,13 +245,14 @@ Environment variables are set differently depending on the operating system. Som
|
|||||||
|
|
||||||
## Captcha Solvers
|
## Captcha Solvers
|
||||||
|
|
||||||
:warning: At this time none of the captcha solvers work. You can check the status in the open issues. Any help is welcome.
|
> **Warning**
|
||||||
|
> At this time none of the captcha solvers work. You can check the status in the open issues. Any help is welcome.
|
||||||
|
|
||||||
Sometimes CloudFlare not only gives mathematical computations and browser tests, sometimes they also require the user to
|
Sometimes CloudFlare not only gives mathematical computations and browser tests, sometimes they also require the user to
|
||||||
solve a captcha.
|
solve a captcha.
|
||||||
If this is the case, FlareSolverr will return the error `Captcha detected but no automatic solver is configured.`
|
If this is the case, FlareSolverr will return the error `Captcha detected but no automatic solver is configured.`
|
||||||
|
|
||||||
FlareSolverr can be customized to solve the captchas automatically by setting the environment variable `CAPTCHA_SOLVER`
|
FlareSolverr can be customized to solve the CAPTCHA automatically by setting the environment variable `CAPTCHA_SOLVER`
|
||||||
to the file name of one of the adapters inside the [/captcha](src/captcha) directory.
|
to the file name of one of the adapters inside the [/captcha](src/captcha) directory.
|
||||||
|
|
||||||
## Related projects
|
## Related projects
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
{
|
{
|
||||||
"name": "flaresolverr",
|
"name": "flaresolverr",
|
||||||
"version": "3.0.0",
|
"version": "3.2.2",
|
||||||
"description": "Proxy server to bypass Cloudflare protection",
|
"description": "Proxy server to bypass Cloudflare protection",
|
||||||
"author": "Diego Heras (ngosang / ngosang@hotmail.es)",
|
"author": "Diego Heras (ngosang / ngosang@hotmail.es)",
|
||||||
"license": "MIT"
|
"license": "MIT"
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,9 +1,12 @@
|
|||||||
bottle==0.12.23
|
bottle==0.12.25
|
||||||
waitress==2.1.2
|
waitress==2.1.2
|
||||||
selenium==4.4.3
|
selenium==4.9.1
|
||||||
func-timeout==4.3.5
|
func-timeout==4.3.5
|
||||||
# required by undetected_chromedriver
|
# required by undetected_chromedriver
|
||||||
requests==2.28.1
|
requests==2.31.0
|
||||||
websockets==10.3
|
certifi==2023.5.7
|
||||||
|
websockets==11.0.3
|
||||||
# only required for linux
|
# only required for linux
|
||||||
xvfbwrapper==0.2.9
|
xvfbwrapper==0.2.9
|
||||||
|
# only required for windows
|
||||||
|
pefile==2023.2.7
|
||||||
|
|||||||
BIN
resources/flaresolverr_logo.ico
Normal file
BIN
resources/flaresolverr_logo.ico
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 8.8 KiB |
108
src/build_package.py
Normal file
108
src/build_package.py
Normal file
@@ -0,0 +1,108 @@
|
|||||||
|
import os
|
||||||
|
import platform
|
||||||
|
import shutil
|
||||||
|
import subprocess
|
||||||
|
import sys
|
||||||
|
import zipfile
|
||||||
|
|
||||||
|
import requests
|
||||||
|
|
||||||
|
|
||||||
|
def clean_files():
|
||||||
|
try:
|
||||||
|
shutil.rmtree(os.path.join(os.path.dirname(os.path.abspath(__file__)), os.pardir, 'build'))
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
|
try:
|
||||||
|
shutil.rmtree(os.path.join(os.path.dirname(os.path.abspath(__file__)), os.pardir, 'dist'))
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
|
try:
|
||||||
|
shutil.rmtree(os.path.join(os.path.dirname(os.path.abspath(__file__)), os.pardir, 'dist_chrome'))
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
def download_chromium():
|
||||||
|
# https://commondatastorage.googleapis.com/chromium-browser-snapshots/index.html?prefix=Linux_x64/
|
||||||
|
revision = "1090006" if os.name == 'nt' else '1090007'
|
||||||
|
arch = 'Win' if os.name == 'nt' else 'Linux_x64'
|
||||||
|
dl_file = 'chrome-win' if os.name == 'nt' else 'chrome-linux'
|
||||||
|
dl_path = os.path.join(os.path.dirname(os.path.abspath(__file__)), os.pardir, 'dist_chrome')
|
||||||
|
dl_path_folder = os.path.join(dl_path, dl_file)
|
||||||
|
dl_path_zip = dl_path_folder + '.zip'
|
||||||
|
|
||||||
|
# response = requests.get(
|
||||||
|
# f'https://commondatastorage.googleapis.com/chromium-browser-snapshots/{arch}/LAST_CHANGE',
|
||||||
|
# timeout=30)
|
||||||
|
# revision = response.text.strip()
|
||||||
|
print("Downloading revision: " + revision)
|
||||||
|
|
||||||
|
os.mkdir(dl_path)
|
||||||
|
with requests.get(
|
||||||
|
f'https://commondatastorage.googleapis.com/chromium-browser-snapshots/{arch}/{revision}/{dl_file}.zip',
|
||||||
|
stream=True) as r:
|
||||||
|
r.raise_for_status()
|
||||||
|
with open(dl_path_zip, 'wb') as f:
|
||||||
|
for chunk in r.iter_content(chunk_size=8192):
|
||||||
|
f.write(chunk)
|
||||||
|
print("File downloaded: " + dl_path_zip)
|
||||||
|
with zipfile.ZipFile(dl_path_zip, 'r') as zip_ref:
|
||||||
|
zip_ref.extractall(dl_path)
|
||||||
|
os.remove(dl_path_zip)
|
||||||
|
|
||||||
|
chrome_path = os.path.join(dl_path, "chrome")
|
||||||
|
shutil.move(dl_path_folder, chrome_path)
|
||||||
|
print("Extracted in: " + chrome_path)
|
||||||
|
|
||||||
|
if os.name != 'nt':
|
||||||
|
# Give executable permissions for *nix
|
||||||
|
# file * | grep executable | cut -d: -f1
|
||||||
|
print("Giving executable permissions...")
|
||||||
|
execs = ['chrome', 'chrome_crashpad_handler', 'chrome_sandbox', 'chrome-wrapper', 'nacl_helper',
|
||||||
|
'nacl_helper_bootstrap', 'nacl_irt_x86_64.nexe', 'xdg-mime', 'xdg-settings']
|
||||||
|
for exec_file in execs:
|
||||||
|
exec_path = os.path.join(chrome_path, exec_file)
|
||||||
|
os.chmod(exec_path, 0o755)
|
||||||
|
|
||||||
|
|
||||||
|
def run_pyinstaller():
|
||||||
|
sep = ';' if os.name == 'nt' else ':'
|
||||||
|
subprocess.check_call([sys.executable, "-m", "PyInstaller",
|
||||||
|
"--icon", "resources/flaresolverr_logo.ico",
|
||||||
|
"--add-data", f"package.json{sep}.",
|
||||||
|
"--add-data", f"{os.path.join('dist_chrome', 'chrome')}{sep}chrome",
|
||||||
|
os.path.join("src", "flaresolverr.py")],
|
||||||
|
cwd=os.pardir)
|
||||||
|
|
||||||
|
|
||||||
|
def compress_package():
|
||||||
|
dist_folder = os.path.join(os.path.dirname(os.path.abspath(__file__)), os.pardir, 'dist')
|
||||||
|
package_folder = os.path.join(dist_folder, 'package')
|
||||||
|
shutil.move(os.path.join(dist_folder, 'flaresolverr'), os.path.join(package_folder, 'flaresolverr'))
|
||||||
|
print("Package folder: " + package_folder)
|
||||||
|
|
||||||
|
compr_format = 'zip' if os.name == 'nt' else 'gztar'
|
||||||
|
compr_file_name = 'flaresolverr_windows_x64' if os.name == 'nt' else 'flaresolverr_linux_x64'
|
||||||
|
compr_file_path = os.path.join(dist_folder, compr_file_name)
|
||||||
|
shutil.make_archive(compr_file_path, compr_format, package_folder)
|
||||||
|
print("Compressed file path: " + compr_file_path)
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
print("Building package...")
|
||||||
|
print("Platform: " + platform.platform())
|
||||||
|
|
||||||
|
print("Cleaning previous build...")
|
||||||
|
clean_files()
|
||||||
|
|
||||||
|
print("Downloading Chromium...")
|
||||||
|
download_chromium()
|
||||||
|
|
||||||
|
print("Building pyinstaller executable... ")
|
||||||
|
run_pyinstaller()
|
||||||
|
|
||||||
|
print("Compressing package... ")
|
||||||
|
compress_package()
|
||||||
|
|
||||||
|
# NOTE: python -m pip install pyinstaller
|
||||||
@@ -33,6 +33,7 @@ class V1RequestBase(object):
|
|||||||
maxTimeout: int = None
|
maxTimeout: int = None
|
||||||
proxy: dict = None
|
proxy: dict = None
|
||||||
session: str = None
|
session: str = None
|
||||||
|
session_ttl_minutes: int = None
|
||||||
headers: list = None # deprecated v2.0.0, not used
|
headers: list = None # deprecated v2.0.0, not used
|
||||||
userAgent: str = None # deprecated v2.0.0, not used
|
userAgent: str = None # deprecated v2.0.0, not used
|
||||||
|
|
||||||
@@ -51,6 +52,8 @@ class V1ResponseBase(object):
|
|||||||
# V1ResponseBase
|
# V1ResponseBase
|
||||||
status: str = None
|
status: str = None
|
||||||
message: str = None
|
message: str = None
|
||||||
|
session: str = None
|
||||||
|
sessions: list[str] = None
|
||||||
startTimestamp: int = None
|
startTimestamp: int = None
|
||||||
endTimestamp: int = None
|
endTimestamp: int = None
|
||||||
version: str = None
|
version: str = None
|
||||||
|
|||||||
@@ -3,11 +3,12 @@ import logging
|
|||||||
import os
|
import os
|
||||||
import sys
|
import sys
|
||||||
|
|
||||||
from bottle import run, response, Bottle, request
|
import certifi
|
||||||
|
from bottle import run, response, Bottle, request, ServerAdapter
|
||||||
|
|
||||||
from bottle_plugins.error_plugin import error_plugin
|
from bottle_plugins.error_plugin import error_plugin
|
||||||
from bottle_plugins.logger_plugin import logger_plugin
|
from bottle_plugins.logger_plugin import logger_plugin
|
||||||
from dtos import IndexResponse, V1RequestBase
|
from dtos import V1RequestBase
|
||||||
import flaresolverr_service
|
import flaresolverr_service
|
||||||
import utils
|
import utils
|
||||||
|
|
||||||
@@ -60,6 +61,16 @@ def controller_v1():
|
|||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
|
# check python version
|
||||||
|
if sys.version_info < (3, 9):
|
||||||
|
raise Exception("The Python version is less than 3.9, a version equal to or higher is required.")
|
||||||
|
|
||||||
|
# fix ssl certificates for compiled binaries
|
||||||
|
# https://github.com/pyinstaller/pyinstaller/issues/7229
|
||||||
|
# https://stackoverflow.com/questions/55736855/how-to-change-the-cafile-argument-in-the-ssl-module-in-python3
|
||||||
|
os.environ["REQUESTS_CA_BUNDLE"] = certifi.where()
|
||||||
|
os.environ["SSL_CERT_FILE"] = certifi.where()
|
||||||
|
|
||||||
# validate configuration
|
# validate configuration
|
||||||
log_level = os.environ.get('LOG_LEVEL', 'info').upper()
|
log_level = os.environ.get('LOG_LEVEL', 'info').upper()
|
||||||
log_html = utils.get_config_log_html()
|
log_html = utils.get_config_log_html()
|
||||||
@@ -92,4 +103,10 @@ if __name__ == "__main__":
|
|||||||
|
|
||||||
# start webserver
|
# start webserver
|
||||||
# default server 'wsgiref' does not support concurrent requests
|
# default server 'wsgiref' does not support concurrent requests
|
||||||
run(app, host=server_host, port=server_port, quiet=True, server='waitress')
|
# https://github.com/FlareSolverr/FlareSolverr/issues/680
|
||||||
|
# https://github.com/Pylons/waitress/issues/31
|
||||||
|
class WaitressServerPoll(ServerAdapter):
|
||||||
|
def run(self, handler):
|
||||||
|
from waitress import serve
|
||||||
|
serve(handler, host=self.host, port=self.port, asyncore_use_poll=True)
|
||||||
|
run(app, host=server_host, port=server_port, quiet=True, server=WaitressServerPoll)
|
||||||
|
|||||||
@@ -1,42 +1,77 @@
|
|||||||
import logging
|
import logging
|
||||||
|
import platform
|
||||||
|
import sys
|
||||||
import time
|
import time
|
||||||
|
from datetime import timedelta
|
||||||
from urllib.parse import unquote
|
from urllib.parse import unquote
|
||||||
|
|
||||||
from func_timeout import func_timeout, FunctionTimedOut
|
from func_timeout import FunctionTimedOut, func_timeout
|
||||||
from selenium.common import TimeoutException
|
from selenium.common import TimeoutException
|
||||||
from selenium.webdriver.chrome.webdriver import WebDriver
|
from selenium.webdriver.chrome.webdriver import WebDriver
|
||||||
from selenium.webdriver.common.by import By
|
from selenium.webdriver.common.by import By
|
||||||
|
from selenium.webdriver.support.expected_conditions import (
|
||||||
|
presence_of_element_located, staleness_of, title_is)
|
||||||
|
from selenium.webdriver.common.action_chains import ActionChains
|
||||||
from selenium.webdriver.support.wait import WebDriverWait
|
from selenium.webdriver.support.wait import WebDriverWait
|
||||||
from selenium.webdriver.support.expected_conditions import presence_of_element_located, staleness_of, title_is
|
|
||||||
|
|
||||||
from dtos import V1RequestBase, V1ResponseBase, ChallengeResolutionT, ChallengeResolutionResultT, IndexResponse, \
|
|
||||||
HealthResponse, STATUS_OK, STATUS_ERROR
|
|
||||||
import utils
|
import utils
|
||||||
|
from dtos import (STATUS_ERROR, STATUS_OK, ChallengeResolutionResultT,
|
||||||
|
ChallengeResolutionT, HealthResponse, IndexResponse,
|
||||||
|
V1RequestBase, V1ResponseBase)
|
||||||
|
from sessions import SessionsStorage
|
||||||
|
|
||||||
|
ACCESS_DENIED_TITLES = [
|
||||||
|
# Cloudflare
|
||||||
|
'Access denied',
|
||||||
|
# Cloudflare http://bitturk.net/ Firefox
|
||||||
|
'Attention Required! | Cloudflare'
|
||||||
|
]
|
||||||
ACCESS_DENIED_SELECTORS = [
|
ACCESS_DENIED_SELECTORS = [
|
||||||
# Cloudflare
|
# Cloudflare
|
||||||
'div.cf-error-title span.cf-code-label span'
|
'div.cf-error-title span.cf-code-label span',
|
||||||
|
# Cloudflare http://bitturk.net/ Firefox
|
||||||
|
'#cf-error-details div.cf-error-overview h1'
|
||||||
]
|
]
|
||||||
CHALLENGE_TITLE = [
|
CHALLENGE_TITLES = [
|
||||||
# Cloudflare
|
# Cloudflare
|
||||||
'Just a moment...',
|
'Just a moment...',
|
||||||
# DDoS-GUARD
|
# DDoS-GUARD
|
||||||
'DDOS-GUARD',
|
'DDoS-Guard'
|
||||||
]
|
]
|
||||||
CHALLENGE_SELECTORS = [
|
CHALLENGE_SELECTORS = [
|
||||||
# Cloudflare
|
# Cloudflare
|
||||||
'#cf-challenge-running', '.ray_id', '.attack-box', '#cf-please-wait', '#challenge-spinner', '#trk_jschal_js',
|
'#cf-challenge-running', '.ray_id', '.attack-box', '#cf-please-wait', '#challenge-spinner', '#trk_jschal_js',
|
||||||
# Custom CloudFlare for EbookParadijs, Film-Paleis, MuziekFabriek and Puur-Hollands
|
# Custom CloudFlare for EbookParadijs, Film-Paleis, MuziekFabriek and Puur-Hollands
|
||||||
'td.info #js_info'
|
'td.info #js_info',
|
||||||
|
# Fairlane / pararius.com
|
||||||
|
'div.vc div.text-box h2'
|
||||||
]
|
]
|
||||||
SHORT_TIMEOUT = 10
|
SHORT_TIMEOUT = 10
|
||||||
|
SESSIONS_STORAGE = SessionsStorage()
|
||||||
|
|
||||||
|
|
||||||
def test_browser_installation():
|
def test_browser_installation():
|
||||||
logging.info("Testing web browser installation...")
|
logging.info("Testing web browser installation...")
|
||||||
|
logging.info("Platform: " + platform.platform())
|
||||||
|
|
||||||
|
chrome_exe_path = utils.get_chrome_exe_path()
|
||||||
|
if chrome_exe_path is None:
|
||||||
|
logging.error("Chrome / Chromium web browser not installed!")
|
||||||
|
sys.exit(1)
|
||||||
|
else:
|
||||||
|
logging.info("Chrome / Chromium path: " + chrome_exe_path)
|
||||||
|
|
||||||
|
chrome_major_version = utils.get_chrome_major_version()
|
||||||
|
if chrome_major_version == '':
|
||||||
|
logging.error("Chrome / Chromium version not detected!")
|
||||||
|
sys.exit(1)
|
||||||
|
else:
|
||||||
|
logging.info("Chrome / Chromium major version: " + chrome_major_version)
|
||||||
|
|
||||||
|
logging.info("Launching web browser...")
|
||||||
user_agent = utils.get_user_agent()
|
user_agent = utils.get_user_agent()
|
||||||
logging.info("FlareSolverr User-Agent: " + user_agent)
|
logging.info("FlareSolverr User-Agent: " + user_agent)
|
||||||
logging.info("Test successful")
|
logging.info("Test successful!")
|
||||||
|
|
||||||
|
|
||||||
def index_endpoint() -> IndexResponse:
|
def index_endpoint() -> IndexResponse:
|
||||||
@@ -90,11 +125,11 @@ def _controller_v1_handler(req: V1RequestBase) -> V1ResponseBase:
|
|||||||
# execute the command
|
# execute the command
|
||||||
res: V1ResponseBase
|
res: V1ResponseBase
|
||||||
if req.cmd == 'sessions.create':
|
if req.cmd == 'sessions.create':
|
||||||
raise Exception("Not implemented yet.")
|
res = _cmd_sessions_create(req)
|
||||||
elif req.cmd == 'sessions.list':
|
elif req.cmd == 'sessions.list':
|
||||||
raise Exception("Not implemented yet.")
|
res = _cmd_sessions_list(req)
|
||||||
elif req.cmd == 'sessions.destroy':
|
elif req.cmd == 'sessions.destroy':
|
||||||
raise Exception("Not implemented yet.")
|
res = _cmd_sessions_destroy(req)
|
||||||
elif req.cmd == 'request.get':
|
elif req.cmd == 'request.get':
|
||||||
res = _cmd_request_get(req)
|
res = _cmd_request_get(req)
|
||||||
elif req.cmd == 'request.post':
|
elif req.cmd == 'request.post':
|
||||||
@@ -141,19 +176,116 @@ def _cmd_request_post(req: V1RequestBase) -> V1ResponseBase:
|
|||||||
return res
|
return res
|
||||||
|
|
||||||
|
|
||||||
|
def _cmd_sessions_create(req: V1RequestBase) -> V1ResponseBase:
|
||||||
|
logging.debug("Creating new session...")
|
||||||
|
|
||||||
|
session, fresh = SESSIONS_STORAGE.create(session_id=req.session, proxy=req.proxy)
|
||||||
|
session_id = session.session_id
|
||||||
|
|
||||||
|
if not fresh:
|
||||||
|
return V1ResponseBase({
|
||||||
|
"status": STATUS_OK,
|
||||||
|
"message": "Session already exists.",
|
||||||
|
"session": session_id
|
||||||
|
})
|
||||||
|
|
||||||
|
return V1ResponseBase({
|
||||||
|
"status": STATUS_OK,
|
||||||
|
"message": "Session created successfully.",
|
||||||
|
"session": session_id
|
||||||
|
})
|
||||||
|
|
||||||
|
|
||||||
|
def _cmd_sessions_list(req: V1RequestBase) -> V1ResponseBase:
|
||||||
|
session_ids = SESSIONS_STORAGE.session_ids()
|
||||||
|
|
||||||
|
return V1ResponseBase({
|
||||||
|
"status": STATUS_OK,
|
||||||
|
"message": "",
|
||||||
|
"sessions": session_ids
|
||||||
|
})
|
||||||
|
|
||||||
|
|
||||||
|
def _cmd_sessions_destroy(req: V1RequestBase) -> V1ResponseBase:
|
||||||
|
session_id = req.session
|
||||||
|
existed = SESSIONS_STORAGE.destroy(session_id)
|
||||||
|
|
||||||
|
if not existed:
|
||||||
|
raise Exception("The session doesn't exist.")
|
||||||
|
|
||||||
|
return V1ResponseBase({
|
||||||
|
"status": STATUS_OK,
|
||||||
|
"message": "The session has been removed."
|
||||||
|
})
|
||||||
|
|
||||||
|
|
||||||
def _resolve_challenge(req: V1RequestBase, method: str) -> ChallengeResolutionT:
|
def _resolve_challenge(req: V1RequestBase, method: str) -> ChallengeResolutionT:
|
||||||
timeout = req.maxTimeout / 1000
|
timeout = req.maxTimeout / 1000
|
||||||
driver = None
|
driver = None
|
||||||
try:
|
try:
|
||||||
driver = utils.get_webdriver()
|
if req.session:
|
||||||
|
session_id = req.session
|
||||||
|
ttl = timedelta(minutes=req.session_ttl_minutes) if req.session_ttl_minutes else None
|
||||||
|
session, fresh = SESSIONS_STORAGE.get(session_id, ttl)
|
||||||
|
|
||||||
|
if fresh:
|
||||||
|
logging.debug(f"new session created to perform the request (session_id={session_id})")
|
||||||
|
else:
|
||||||
|
logging.debug(f"existing session is used to perform the request (session_id={session_id}, "
|
||||||
|
f"lifetime={str(session.lifetime())}, ttl={str(ttl)})")
|
||||||
|
|
||||||
|
driver = session.driver
|
||||||
|
else:
|
||||||
|
driver = utils.get_webdriver(req.proxy)
|
||||||
|
logging.debug('New instance of webdriver has been created to perform the request')
|
||||||
return func_timeout(timeout, _evil_logic, (req, driver, method))
|
return func_timeout(timeout, _evil_logic, (req, driver, method))
|
||||||
except FunctionTimedOut:
|
except FunctionTimedOut:
|
||||||
raise Exception(f'Error solving the challenge. Timeout after {timeout} seconds.')
|
raise Exception(f'Error solving the challenge. Timeout after {timeout} seconds.')
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
raise Exception('Error solving the challenge. ' + str(e))
|
raise Exception('Error solving the challenge. ' + str(e).replace('\n', '\\n'))
|
||||||
finally:
|
finally:
|
||||||
if driver is not None:
|
if not req.session and driver is not None:
|
||||||
driver.quit()
|
driver.quit()
|
||||||
|
logging.debug('A used instance of webdriver has been destroyed')
|
||||||
|
|
||||||
|
|
||||||
|
def click_verify(driver: WebDriver):
|
||||||
|
try:
|
||||||
|
logging.debug("Try to find the Cloudflare verify checkbox")
|
||||||
|
iframe = driver.find_element(By.XPATH, "//iframe[@title='Widget containing a Cloudflare security challenge']")
|
||||||
|
driver.switch_to.frame(iframe)
|
||||||
|
checkbox = driver.find_element(
|
||||||
|
by=By.XPATH,
|
||||||
|
value='//*[@id="cf-stage"]//label[@class="ctp-checkbox-label"]/input',
|
||||||
|
)
|
||||||
|
if checkbox:
|
||||||
|
actions = ActionChains(driver)
|
||||||
|
actions.move_to_element_with_offset(checkbox, 5, 7)
|
||||||
|
actions.click(checkbox)
|
||||||
|
actions.perform()
|
||||||
|
logging.debug("Cloudflare verify checkbox found and clicked")
|
||||||
|
except Exception:
|
||||||
|
logging.debug("Cloudflare verify checkbox not found on the page")
|
||||||
|
finally:
|
||||||
|
driver.switch_to.default_content()
|
||||||
|
|
||||||
|
try:
|
||||||
|
logging.debug("Try to find the Cloudflare 'Verify you are human' button")
|
||||||
|
button = driver.find_element(
|
||||||
|
by=By.XPATH,
|
||||||
|
value="//input[@type='button' and @value='Verify you are human']",
|
||||||
|
)
|
||||||
|
if button:
|
||||||
|
actions = ActionChains(driver)
|
||||||
|
actions.move_to_element_with_offset(button, 5, 7)
|
||||||
|
actions.click(button)
|
||||||
|
actions.perform()
|
||||||
|
logging.debug("The Cloudflare 'Verify you are human' button found and clicked")
|
||||||
|
except Exception as e:
|
||||||
|
logging.debug("The Cloudflare 'Verify you are human' button not found on the page")
|
||||||
|
# print(e)
|
||||||
|
|
||||||
|
time.sleep(2)
|
||||||
|
|
||||||
|
|
||||||
def _evil_logic(req: V1RequestBase, driver: WebDriver, method: str) -> ChallengeResolutionT:
|
def _evil_logic(req: V1RequestBase, driver: WebDriver, method: str) -> ChallengeResolutionT:
|
||||||
@@ -167,12 +299,30 @@ def _evil_logic(req: V1RequestBase, driver: WebDriver, method: str) -> Challenge
|
|||||||
_post_request(req, driver)
|
_post_request(req, driver)
|
||||||
else:
|
else:
|
||||||
driver.get(req.url)
|
driver.get(req.url)
|
||||||
if utils.get_config_log_html():
|
|
||||||
logging.debug(f"Response HTML:\n{driver.page_source}")
|
# set cookies if required
|
||||||
|
if req.cookies is not None and len(req.cookies) > 0:
|
||||||
|
logging.debug(f'Setting cookies...')
|
||||||
|
for cookie in req.cookies:
|
||||||
|
driver.delete_cookie(cookie['name'])
|
||||||
|
driver.add_cookie(cookie)
|
||||||
|
# reload the page
|
||||||
|
if method == 'POST':
|
||||||
|
_post_request(req, driver)
|
||||||
|
else:
|
||||||
|
driver.get(req.url)
|
||||||
|
|
||||||
# wait for the page
|
# wait for the page
|
||||||
|
if utils.get_config_log_html():
|
||||||
|
logging.debug(f"Response HTML:\n{driver.page_source}")
|
||||||
html_element = driver.find_element(By.TAG_NAME, "html")
|
html_element = driver.find_element(By.TAG_NAME, "html")
|
||||||
|
page_title = driver.title
|
||||||
|
|
||||||
|
# find access denied titles
|
||||||
|
for title in ACCESS_DENIED_TITLES:
|
||||||
|
if title == page_title:
|
||||||
|
raise Exception('Cloudflare has blocked this request. '
|
||||||
|
'Probably your IP is banned for this site, check in your web browser.')
|
||||||
# find access denied selectors
|
# find access denied selectors
|
||||||
for selector in ACCESS_DENIED_SELECTORS:
|
for selector in ACCESS_DENIED_SELECTORS:
|
||||||
found_elements = driver.find_elements(By.CSS_SELECTOR, selector)
|
found_elements = driver.find_elements(By.CSS_SELECTOR, selector)
|
||||||
@@ -182,11 +332,10 @@ def _evil_logic(req: V1RequestBase, driver: WebDriver, method: str) -> Challenge
|
|||||||
|
|
||||||
# find challenge by title
|
# find challenge by title
|
||||||
challenge_found = False
|
challenge_found = False
|
||||||
page_title = driver.title
|
for title in CHALLENGE_TITLES:
|
||||||
for title in CHALLENGE_TITLE:
|
if title.lower() == page_title.lower():
|
||||||
if title == page_title:
|
|
||||||
challenge_found = True
|
challenge_found = True
|
||||||
logging.info("Challenge detected. Title found: " + title)
|
logging.info("Challenge detected. Title found: " + page_title)
|
||||||
break
|
break
|
||||||
if not challenge_found:
|
if not challenge_found:
|
||||||
# find challenge by selectors
|
# find challenge by selectors
|
||||||
@@ -197,17 +346,19 @@ def _evil_logic(req: V1RequestBase, driver: WebDriver, method: str) -> Challenge
|
|||||||
logging.info("Challenge detected. Selector found: " + selector)
|
logging.info("Challenge detected. Selector found: " + selector)
|
||||||
break
|
break
|
||||||
|
|
||||||
|
attempt = 0
|
||||||
if challenge_found:
|
if challenge_found:
|
||||||
while True:
|
while True:
|
||||||
try:
|
try:
|
||||||
# wait until the title change
|
attempt = attempt + 1
|
||||||
for title in CHALLENGE_TITLE:
|
# wait until the title changes
|
||||||
logging.debug("Waiting for title: " + title)
|
for title in CHALLENGE_TITLES:
|
||||||
|
logging.debug("Waiting for title (attempt " + str(attempt) + "): " + title)
|
||||||
WebDriverWait(driver, SHORT_TIMEOUT).until_not(title_is(title))
|
WebDriverWait(driver, SHORT_TIMEOUT).until_not(title_is(title))
|
||||||
|
|
||||||
# then wait until all the selectors disappear
|
# then wait until all the selectors disappear
|
||||||
for selector in CHALLENGE_SELECTORS:
|
for selector in CHALLENGE_SELECTORS:
|
||||||
logging.debug("Waiting for selector: " + selector)
|
logging.debug("Waiting for selector (attempt " + str(attempt) + "): " + selector)
|
||||||
WebDriverWait(driver, SHORT_TIMEOUT).until_not(
|
WebDriverWait(driver, SHORT_TIMEOUT).until_not(
|
||||||
presence_of_element_located((By.CSS_SELECTOR, selector)))
|
presence_of_element_located((By.CSS_SELECTOR, selector)))
|
||||||
|
|
||||||
@@ -216,6 +367,9 @@ def _evil_logic(req: V1RequestBase, driver: WebDriver, method: str) -> Challenge
|
|||||||
|
|
||||||
except TimeoutException:
|
except TimeoutException:
|
||||||
logging.debug("Timeout waiting for selector")
|
logging.debug("Timeout waiting for selector")
|
||||||
|
|
||||||
|
click_verify(driver)
|
||||||
|
|
||||||
# update the html (cloudflare reloads the page every 5 s)
|
# update the html (cloudflare reloads the page every 5 s)
|
||||||
html_element = driver.find_element(By.TAG_NAME, "html")
|
html_element = driver.find_element(By.TAG_NAME, "html")
|
||||||
|
|
||||||
|
|||||||
82
src/sessions.py
Normal file
82
src/sessions.py
Normal file
@@ -0,0 +1,82 @@
|
|||||||
|
import logging
|
||||||
|
from dataclasses import dataclass
|
||||||
|
from datetime import datetime, timedelta
|
||||||
|
from typing import Optional, Tuple
|
||||||
|
from uuid import uuid1
|
||||||
|
|
||||||
|
from selenium.webdriver.chrome.webdriver import WebDriver
|
||||||
|
|
||||||
|
import utils
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class Session:
|
||||||
|
session_id: str
|
||||||
|
driver: WebDriver
|
||||||
|
created_at: datetime
|
||||||
|
|
||||||
|
def lifetime(self) -> timedelta:
|
||||||
|
return datetime.now() - self.created_at
|
||||||
|
|
||||||
|
|
||||||
|
class SessionsStorage:
|
||||||
|
"""SessionsStorage creates, stores and process all the sessions"""
|
||||||
|
|
||||||
|
def __init__(self):
|
||||||
|
self.sessions = {}
|
||||||
|
|
||||||
|
def create(self, session_id: Optional[str] = None, proxy: Optional[dict] = None,
|
||||||
|
force_new: Optional[bool] = False) -> Tuple[Session, bool]:
|
||||||
|
"""create creates new instance of WebDriver if necessary,
|
||||||
|
assign defined (or newly generated) session_id to the instance
|
||||||
|
and returns the session object. If a new session has been created
|
||||||
|
second argument is set to True.
|
||||||
|
|
||||||
|
Note: The function is idempotent, so in case if session_id
|
||||||
|
already exists in the storage a new instance of WebDriver won't be created
|
||||||
|
and existing session will be returned. Second argument defines if
|
||||||
|
new session has been created (True) or an existing one was used (False).
|
||||||
|
"""
|
||||||
|
session_id = session_id or str(uuid1())
|
||||||
|
|
||||||
|
if force_new:
|
||||||
|
self.destroy(session_id)
|
||||||
|
|
||||||
|
if self.exists(session_id):
|
||||||
|
return self.sessions[session_id], False
|
||||||
|
|
||||||
|
driver = utils.get_webdriver(proxy)
|
||||||
|
created_at = datetime.now()
|
||||||
|
session = Session(session_id, driver, created_at)
|
||||||
|
|
||||||
|
self.sessions[session_id] = session
|
||||||
|
|
||||||
|
return session, True
|
||||||
|
|
||||||
|
def exists(self, session_id: str) -> bool:
|
||||||
|
return session_id in self.sessions
|
||||||
|
|
||||||
|
def destroy(self, session_id: str) -> bool:
|
||||||
|
"""destroy closes the driver instance and removes session from the storage.
|
||||||
|
The function is noop if session_id doesn't exist.
|
||||||
|
The function returns True if session was found and destroyed,
|
||||||
|
and False if session_id wasn't found.
|
||||||
|
"""
|
||||||
|
if not self.exists(session_id):
|
||||||
|
return False
|
||||||
|
|
||||||
|
session = self.sessions.pop(session_id)
|
||||||
|
session.driver.quit()
|
||||||
|
return True
|
||||||
|
|
||||||
|
def get(self, session_id: str, ttl: Optional[timedelta] = None) -> Tuple[Session, bool]:
|
||||||
|
session, fresh = self.create(session_id)
|
||||||
|
|
||||||
|
if ttl is not None and not fresh and session.lifetime() > ttl:
|
||||||
|
logging.debug(f'session\'s lifetime has expired, so the session is recreated (session_id={session_id})')
|
||||||
|
session, fresh = self.create(session_id, force_new=True)
|
||||||
|
|
||||||
|
return session, fresh
|
||||||
|
|
||||||
|
def session_ids(self) -> list[str]:
|
||||||
|
return list(self.sessions.keys())
|
||||||
269
src/tests.py
269
src/tests.py
@@ -1,4 +1,5 @@
|
|||||||
import unittest
|
import unittest
|
||||||
|
from typing import Optional
|
||||||
|
|
||||||
from webtest import TestApp
|
from webtest import TestApp
|
||||||
|
|
||||||
@@ -7,7 +8,7 @@ import flaresolverr
|
|||||||
import utils
|
import utils
|
||||||
|
|
||||||
|
|
||||||
def _find_obj_by_key(key: str, value: str, _list: list) -> dict | None:
|
def _find_obj_by_key(key: str, value: str, _list: list) -> Optional[dict]:
|
||||||
for obj in _list:
|
for obj in _list:
|
||||||
if obj[key] == value:
|
if obj[key] == value:
|
||||||
return obj
|
return obj
|
||||||
@@ -23,10 +24,13 @@ class TestFlareSolverr(unittest.TestCase):
|
|||||||
cloudflare_url = "https://nowsecure.nl"
|
cloudflare_url = "https://nowsecure.nl"
|
||||||
cloudflare_url_2 = "https://idope.se/torrent-list/harry/"
|
cloudflare_url_2 = "https://idope.se/torrent-list/harry/"
|
||||||
ddos_guard_url = "https://anidex.info/"
|
ddos_guard_url = "https://anidex.info/"
|
||||||
|
fairlane_url = "https://www.pararius.com/apartments/amsterdam"
|
||||||
custom_cloudflare_url = "https://www.muziekfabriek.org"
|
custom_cloudflare_url = "https://www.muziekfabriek.org"
|
||||||
cloudflare_blocked_url = "https://cpasbiens3.fr/index.php?do=search&subaction=search"
|
cloudflare_blocked_url = "https://cpasbiens3.fr/index.php?do=search&subaction=search"
|
||||||
|
|
||||||
app = TestApp(flaresolverr.app)
|
app = TestApp(flaresolverr.app)
|
||||||
|
# wait until the server is ready
|
||||||
|
app.get('/')
|
||||||
|
|
||||||
def test_wrong_endpoint(self):
|
def test_wrong_endpoint(self):
|
||||||
res = self.app.get('/wrong', status=404)
|
res = self.app.get('/wrong', status=404)
|
||||||
@@ -64,7 +68,7 @@ class TestFlareSolverr(unittest.TestCase):
|
|||||||
self.assertEqual("Error: Request parameter 'cmd' = 'request.bad' is invalid.", body.message)
|
self.assertEqual("Error: Request parameter 'cmd' = 'request.bad' is invalid.", body.message)
|
||||||
self.assertGreater(body.startTimestamp, 10000)
|
self.assertGreater(body.startTimestamp, 10000)
|
||||||
self.assertGreaterEqual(body.endTimestamp, body.startTimestamp)
|
self.assertGreaterEqual(body.endTimestamp, body.startTimestamp)
|
||||||
self.assertEqual(utils.get_flaresolverr_version(), body.version)
|
self.assertEqual(utils.get_flaresolverr_version(), body.version)
|
||||||
|
|
||||||
def test_v1_endpoint_request_get_no_cloudflare(self):
|
def test_v1_endpoint_request_get_no_cloudflare(self):
|
||||||
res = self.app.post_json('/v1', {
|
res = self.app.post_json('/v1', {
|
||||||
@@ -78,7 +82,7 @@ class TestFlareSolverr(unittest.TestCase):
|
|||||||
self.assertEqual("Challenge not detected!", body.message)
|
self.assertEqual("Challenge not detected!", body.message)
|
||||||
self.assertGreater(body.startTimestamp, 10000)
|
self.assertGreater(body.startTimestamp, 10000)
|
||||||
self.assertGreaterEqual(body.endTimestamp, body.startTimestamp)
|
self.assertGreaterEqual(body.endTimestamp, body.startTimestamp)
|
||||||
self.assertEqual(utils.get_flaresolverr_version(), body.version)
|
self.assertEqual(utils.get_flaresolverr_version(), body.version)
|
||||||
|
|
||||||
solution = body.solution
|
solution = body.solution
|
||||||
self.assertIn(self.google_url, solution.url)
|
self.assertIn(self.google_url, solution.url)
|
||||||
@@ -100,7 +104,7 @@ class TestFlareSolverr(unittest.TestCase):
|
|||||||
self.assertEqual("Challenge solved!", body.message)
|
self.assertEqual("Challenge solved!", body.message)
|
||||||
self.assertGreater(body.startTimestamp, 10000)
|
self.assertGreater(body.startTimestamp, 10000)
|
||||||
self.assertGreaterEqual(body.endTimestamp, body.startTimestamp)
|
self.assertGreaterEqual(body.endTimestamp, body.startTimestamp)
|
||||||
self.assertEqual(utils.get_flaresolverr_version(), body.version)
|
self.assertEqual(utils.get_flaresolverr_version(), body.version)
|
||||||
|
|
||||||
solution = body.solution
|
solution = body.solution
|
||||||
self.assertIn(self.cloudflare_url, solution.url)
|
self.assertIn(self.cloudflare_url, solution.url)
|
||||||
@@ -126,7 +130,7 @@ class TestFlareSolverr(unittest.TestCase):
|
|||||||
self.assertEqual("Challenge solved!", body.message)
|
self.assertEqual("Challenge solved!", body.message)
|
||||||
self.assertGreater(body.startTimestamp, 10000)
|
self.assertGreater(body.startTimestamp, 10000)
|
||||||
self.assertGreaterEqual(body.endTimestamp, body.startTimestamp)
|
self.assertGreaterEqual(body.endTimestamp, body.startTimestamp)
|
||||||
self.assertEqual(utils.get_flaresolverr_version(), body.version)
|
self.assertEqual(utils.get_flaresolverr_version(), body.version)
|
||||||
|
|
||||||
solution = body.solution
|
solution = body.solution
|
||||||
self.assertIn(self.cloudflare_url_2, solution.url)
|
self.assertIn(self.cloudflare_url_2, solution.url)
|
||||||
@@ -152,7 +156,7 @@ class TestFlareSolverr(unittest.TestCase):
|
|||||||
self.assertEqual("Challenge solved!", body.message)
|
self.assertEqual("Challenge solved!", body.message)
|
||||||
self.assertGreater(body.startTimestamp, 10000)
|
self.assertGreater(body.startTimestamp, 10000)
|
||||||
self.assertGreaterEqual(body.endTimestamp, body.startTimestamp)
|
self.assertGreaterEqual(body.endTimestamp, body.startTimestamp)
|
||||||
self.assertEqual(utils.get_flaresolverr_version(), body.version)
|
self.assertEqual(utils.get_flaresolverr_version(), body.version)
|
||||||
|
|
||||||
solution = body.solution
|
solution = body.solution
|
||||||
self.assertIn(self.ddos_guard_url, solution.url)
|
self.assertIn(self.ddos_guard_url, solution.url)
|
||||||
@@ -166,6 +170,32 @@ class TestFlareSolverr(unittest.TestCase):
|
|||||||
self.assertIsNotNone(cf_cookie, "DDOS-Guard cookie not found")
|
self.assertIsNotNone(cf_cookie, "DDOS-Guard cookie not found")
|
||||||
self.assertGreater(len(cf_cookie["value"]), 10)
|
self.assertGreater(len(cf_cookie["value"]), 10)
|
||||||
|
|
||||||
|
def test_v1_endpoint_request_get_fairlane_js(self):
|
||||||
|
res = self.app.post_json('/v1', {
|
||||||
|
"cmd": "request.get",
|
||||||
|
"url": self.fairlane_url
|
||||||
|
})
|
||||||
|
self.assertEqual(res.status_code, 200)
|
||||||
|
|
||||||
|
body = V1ResponseBase(res.json)
|
||||||
|
self.assertEqual(STATUS_OK, body.status)
|
||||||
|
self.assertEqual("Challenge solved!", body.message)
|
||||||
|
self.assertGreater(body.startTimestamp, 10000)
|
||||||
|
self.assertGreaterEqual(body.endTimestamp, body.startTimestamp)
|
||||||
|
self.assertEqual(utils.get_flaresolverr_version(), body.version)
|
||||||
|
|
||||||
|
solution = body.solution
|
||||||
|
self.assertIn(self.fairlane_url, solution.url)
|
||||||
|
self.assertEqual(solution.status, 200)
|
||||||
|
self.assertIs(len(solution.headers), 0)
|
||||||
|
self.assertIn("<title>Rental Apartments Amsterdam</title>", solution.response)
|
||||||
|
self.assertGreater(len(solution.cookies), 0)
|
||||||
|
self.assertIn("Chrome/", solution.userAgent)
|
||||||
|
|
||||||
|
cf_cookie = _find_obj_by_key("name", "fl_pass_v2_b", solution.cookies)
|
||||||
|
self.assertIsNotNone(cf_cookie, "Fairlane cookie not found")
|
||||||
|
self.assertGreater(len(cf_cookie["value"]), 50)
|
||||||
|
|
||||||
def test_v1_endpoint_request_get_custom_cloudflare_js(self):
|
def test_v1_endpoint_request_get_custom_cloudflare_js(self):
|
||||||
res = self.app.post_json('/v1', {
|
res = self.app.post_json('/v1', {
|
||||||
"cmd": "request.get",
|
"cmd": "request.get",
|
||||||
@@ -178,7 +208,7 @@ class TestFlareSolverr(unittest.TestCase):
|
|||||||
self.assertEqual("Challenge solved!", body.message)
|
self.assertEqual("Challenge solved!", body.message)
|
||||||
self.assertGreater(body.startTimestamp, 10000)
|
self.assertGreater(body.startTimestamp, 10000)
|
||||||
self.assertGreaterEqual(body.endTimestamp, body.startTimestamp)
|
self.assertGreaterEqual(body.endTimestamp, body.startTimestamp)
|
||||||
self.assertEqual(utils.get_flaresolverr_version(), body.version)
|
self.assertEqual(utils.get_flaresolverr_version(), body.version)
|
||||||
|
|
||||||
solution = body.solution
|
solution = body.solution
|
||||||
self.assertIn(self.custom_cloudflare_url, solution.url)
|
self.assertIn(self.custom_cloudflare_url, solution.url)
|
||||||
@@ -209,7 +239,45 @@ class TestFlareSolverr(unittest.TestCase):
|
|||||||
self.assertGreaterEqual(body.endTimestamp, body.startTimestamp)
|
self.assertGreaterEqual(body.endTimestamp, body.startTimestamp)
|
||||||
self.assertEqual(utils.get_flaresolverr_version(), body.version)
|
self.assertEqual(utils.get_flaresolverr_version(), body.version)
|
||||||
|
|
||||||
# todo: test Cmd 'request.get' should return OK with 'cookies' param
|
def test_v1_endpoint_request_get_cookies_param(self):
|
||||||
|
res = self.app.post_json('/v1', {
|
||||||
|
"cmd": "request.get",
|
||||||
|
"url": self.google_url,
|
||||||
|
"cookies": [
|
||||||
|
{
|
||||||
|
"name": "testcookie1",
|
||||||
|
"value": "testvalue1"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "testcookie2",
|
||||||
|
"value": "testvalue2"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
})
|
||||||
|
self.assertEqual(res.status_code, 200)
|
||||||
|
|
||||||
|
body = V1ResponseBase(res.json)
|
||||||
|
self.assertEqual(STATUS_OK, body.status)
|
||||||
|
self.assertEqual("Challenge not detected!", body.message)
|
||||||
|
self.assertGreater(body.startTimestamp, 10000)
|
||||||
|
self.assertGreaterEqual(body.endTimestamp, body.startTimestamp)
|
||||||
|
self.assertEqual(utils.get_flaresolverr_version(), body.version)
|
||||||
|
|
||||||
|
solution = body.solution
|
||||||
|
self.assertIn(self.google_url, solution.url)
|
||||||
|
self.assertEqual(solution.status, 200)
|
||||||
|
self.assertIs(len(solution.headers), 0)
|
||||||
|
self.assertIn("<title>Google</title>", solution.response)
|
||||||
|
self.assertGreater(len(solution.cookies), 1)
|
||||||
|
self.assertIn("Chrome/", solution.userAgent)
|
||||||
|
|
||||||
|
user_cookie1 = _find_obj_by_key("name", "testcookie1", solution.cookies)
|
||||||
|
self.assertIsNotNone(user_cookie1, "User cookie 1 not found")
|
||||||
|
self.assertEqual("testvalue1", user_cookie1["value"])
|
||||||
|
|
||||||
|
user_cookie2 = _find_obj_by_key("name", "testcookie2", solution.cookies)
|
||||||
|
self.assertIsNotNone(user_cookie2, "User cookie 2 not found")
|
||||||
|
self.assertEqual("testvalue2", user_cookie2["value"])
|
||||||
|
|
||||||
def test_v1_endpoint_request_get_returnOnlyCookies_param(self):
|
def test_v1_endpoint_request_get_returnOnlyCookies_param(self):
|
||||||
res = self.app.post_json('/v1', {
|
res = self.app.post_json('/v1', {
|
||||||
@@ -234,10 +302,88 @@ class TestFlareSolverr(unittest.TestCase):
|
|||||||
self.assertGreater(len(solution.cookies), 0)
|
self.assertGreater(len(solution.cookies), 0)
|
||||||
self.assertIn("Chrome/", solution.userAgent)
|
self.assertIn("Chrome/", solution.userAgent)
|
||||||
|
|
||||||
# todo: test Cmd 'request.get' should return OK with HTTP 'proxy' param
|
def test_v1_endpoint_request_get_proxy_http_param(self):
|
||||||
# todo: test Cmd 'request.get' should return OK with HTTP 'proxy' param with credentials
|
"""
|
||||||
# todo: test Cmd 'request.get' should return OK with SOCKSv5 'proxy' param
|
To configure TinyProxy in local:
|
||||||
# todo: test Cmd 'request.get' should fail with wrong 'proxy' param
|
* sudo vim /etc/tinyproxy/tinyproxy.conf
|
||||||
|
* edit => LogFile "/tmp/tinyproxy.log"
|
||||||
|
* edit => Syslog Off
|
||||||
|
* sudo tinyproxy -d
|
||||||
|
* sudo tail -f /tmp/tinyproxy.log
|
||||||
|
"""
|
||||||
|
res = self.app.post_json('/v1', {
|
||||||
|
"cmd": "request.get",
|
||||||
|
"url": self.google_url,
|
||||||
|
"proxy": {
|
||||||
|
"url": self.proxy_url
|
||||||
|
}
|
||||||
|
})
|
||||||
|
self.assertEqual(res.status_code, 200)
|
||||||
|
|
||||||
|
body = V1ResponseBase(res.json)
|
||||||
|
self.assertEqual(STATUS_OK, body.status)
|
||||||
|
self.assertEqual("Challenge not detected!", body.message)
|
||||||
|
self.assertGreater(body.startTimestamp, 10000)
|
||||||
|
self.assertGreaterEqual(body.endTimestamp, body.startTimestamp)
|
||||||
|
self.assertEqual(utils.get_flaresolverr_version(), body.version)
|
||||||
|
|
||||||
|
solution = body.solution
|
||||||
|
self.assertIn(self.google_url, solution.url)
|
||||||
|
self.assertEqual(solution.status, 200)
|
||||||
|
self.assertIs(len(solution.headers), 0)
|
||||||
|
self.assertIn("<title>Google</title>", solution.response)
|
||||||
|
self.assertGreater(len(solution.cookies), 0)
|
||||||
|
self.assertIn("Chrome/", solution.userAgent)
|
||||||
|
|
||||||
|
def test_v1_endpoint_request_get_proxy_socks_param(self):
|
||||||
|
"""
|
||||||
|
To configure Dante in local:
|
||||||
|
* https://linuxhint.com/set-up-a-socks5-proxy-on-ubuntu-with-dante/
|
||||||
|
* sudo vim /etc/sockd.conf
|
||||||
|
* sudo systemctl restart sockd.service
|
||||||
|
* curl --socks5 socks5://127.0.0.1:1080 https://www.google.com
|
||||||
|
"""
|
||||||
|
res = self.app.post_json('/v1', {
|
||||||
|
"cmd": "request.get",
|
||||||
|
"url": self.google_url,
|
||||||
|
"proxy": {
|
||||||
|
"url": self.proxy_socks_url
|
||||||
|
}
|
||||||
|
})
|
||||||
|
self.assertEqual(res.status_code, 200)
|
||||||
|
|
||||||
|
body = V1ResponseBase(res.json)
|
||||||
|
self.assertEqual(STATUS_OK, body.status)
|
||||||
|
self.assertEqual("Challenge not detected!", body.message)
|
||||||
|
self.assertGreater(body.startTimestamp, 10000)
|
||||||
|
self.assertGreaterEqual(body.endTimestamp, body.startTimestamp)
|
||||||
|
self.assertEqual(utils.get_flaresolverr_version(), body.version)
|
||||||
|
|
||||||
|
solution = body.solution
|
||||||
|
self.assertIn(self.google_url, solution.url)
|
||||||
|
self.assertEqual(solution.status, 200)
|
||||||
|
self.assertIs(len(solution.headers), 0)
|
||||||
|
self.assertIn("<title>Google</title>", solution.response)
|
||||||
|
self.assertGreater(len(solution.cookies), 0)
|
||||||
|
self.assertIn("Chrome/", solution.userAgent)
|
||||||
|
|
||||||
|
def test_v1_endpoint_request_get_proxy_wrong_param(self):
|
||||||
|
res = self.app.post_json('/v1', {
|
||||||
|
"cmd": "request.get",
|
||||||
|
"url": self.google_url,
|
||||||
|
"proxy": {
|
||||||
|
"url": "http://127.0.0.1:43210"
|
||||||
|
}
|
||||||
|
}, status=500)
|
||||||
|
self.assertEqual(res.status_code, 500)
|
||||||
|
|
||||||
|
body = V1ResponseBase(res.json)
|
||||||
|
self.assertEqual(STATUS_ERROR, body.status)
|
||||||
|
self.assertIn("Error: Error solving the challenge. Message: unknown error: net::ERR_PROXY_CONNECTION_FAILED",
|
||||||
|
body.message)
|
||||||
|
self.assertGreater(body.startTimestamp, 10000)
|
||||||
|
self.assertGreaterEqual(body.endTimestamp, body.startTimestamp)
|
||||||
|
self.assertEqual(utils.get_flaresolverr_version(), body.version)
|
||||||
|
|
||||||
def test_v1_endpoint_request_get_fail_timeout(self):
|
def test_v1_endpoint_request_get_fail_timeout(self):
|
||||||
res = self.app.post_json('/v1', {
|
res = self.app.post_json('/v1', {
|
||||||
@@ -351,12 +497,99 @@ class TestFlareSolverr(unittest.TestCase):
|
|||||||
self.assertEqual(STATUS_OK, body.status)
|
self.assertEqual(STATUS_OK, body.status)
|
||||||
self.assertEqual("Challenge not detected!", body.message)
|
self.assertEqual("Challenge not detected!", body.message)
|
||||||
|
|
||||||
# todo: test Cmd 'sessions.create' should return OK
|
def test_v1_endpoint_sessions_create_without_session(self):
|
||||||
# todo: test Cmd 'sessions.create' should return OK with session
|
res = self.app.post_json('/v1', {
|
||||||
# todo: test Cmd 'sessions.list' should return OK
|
"cmd": "sessions.create"
|
||||||
# todo: test Cmd 'sessions.destroy' should return OK
|
})
|
||||||
# todo: test Cmd 'sessions.destroy' should fail
|
self.assertEqual(res.status_code, 200)
|
||||||
# todo: test Cmd 'request.get' should use session
|
|
||||||
|
body = V1ResponseBase(res.json)
|
||||||
|
self.assertEqual(STATUS_OK, body.status)
|
||||||
|
self.assertEqual("Session created successfully.", body.message)
|
||||||
|
self.assertIsNotNone(body.session)
|
||||||
|
|
||||||
|
def test_v1_endpoint_sessions_create_with_session(self):
|
||||||
|
res = self.app.post_json('/v1', {
|
||||||
|
"cmd": "sessions.create",
|
||||||
|
"session": "test_create_session"
|
||||||
|
})
|
||||||
|
self.assertEqual(res.status_code, 200)
|
||||||
|
|
||||||
|
body = V1ResponseBase(res.json)
|
||||||
|
self.assertEqual(STATUS_OK, body.status)
|
||||||
|
self.assertEqual("Session created successfully.", body.message)
|
||||||
|
self.assertEqual(body.session, "test_create_session")
|
||||||
|
|
||||||
|
def test_v1_endpoint_sessions_create_with_proxy(self):
|
||||||
|
res = self.app.post_json('/v1', {
|
||||||
|
"cmd": "sessions.create",
|
||||||
|
"proxy": {
|
||||||
|
"url": self.proxy_url
|
||||||
|
}
|
||||||
|
})
|
||||||
|
self.assertEqual(res.status_code, 200)
|
||||||
|
|
||||||
|
body = V1ResponseBase(res.json)
|
||||||
|
self.assertEqual(STATUS_OK, body.status)
|
||||||
|
self.assertEqual("Session created successfully.", body.message)
|
||||||
|
self.assertIsNotNone(body.session)
|
||||||
|
|
||||||
|
def test_v1_endpoint_sessions_list(self):
|
||||||
|
self.app.post_json('/v1', {
|
||||||
|
"cmd": "sessions.create",
|
||||||
|
"session": "test_list_sessions"
|
||||||
|
})
|
||||||
|
res = self.app.post_json('/v1', {
|
||||||
|
"cmd": "sessions.list"
|
||||||
|
})
|
||||||
|
self.assertEqual(res.status_code, 200)
|
||||||
|
|
||||||
|
body = V1ResponseBase(res.json)
|
||||||
|
self.assertEqual(STATUS_OK, body.status)
|
||||||
|
self.assertEqual("", body.message)
|
||||||
|
self.assertGreaterEqual(len(body.sessions), 1)
|
||||||
|
self.assertIn("test_list_sessions", body.sessions)
|
||||||
|
|
||||||
|
def test_v1_endpoint_sessions_destroy_existing_session(self):
|
||||||
|
self.app.post_json('/v1', {
|
||||||
|
"cmd": "sessions.create",
|
||||||
|
"session": "test_destroy_sessions"
|
||||||
|
})
|
||||||
|
res = self.app.post_json('/v1', {
|
||||||
|
"cmd": "sessions.destroy",
|
||||||
|
"session": "test_destroy_sessions"
|
||||||
|
})
|
||||||
|
self.assertEqual(res.status_code, 200)
|
||||||
|
|
||||||
|
body = V1ResponseBase(res.json)
|
||||||
|
self.assertEqual(STATUS_OK, body.status)
|
||||||
|
self.assertEqual("The session has been removed.", body.message)
|
||||||
|
|
||||||
|
def test_v1_endpoint_sessions_destroy_non_existing_session(self):
|
||||||
|
res = self.app.post_json('/v1', {
|
||||||
|
"cmd": "sessions.destroy",
|
||||||
|
"session": "non_existing_session_name"
|
||||||
|
}, status=500)
|
||||||
|
self.assertEqual(res.status_code, 500)
|
||||||
|
|
||||||
|
body = V1ResponseBase(res.json)
|
||||||
|
self.assertEqual(STATUS_ERROR, body.status)
|
||||||
|
self.assertEqual("Error: The session doesn't exist.", body.message)
|
||||||
|
|
||||||
|
def test_v1_endpoint_request_get_with_session(self):
|
||||||
|
self.app.post_json('/v1', {
|
||||||
|
"cmd": "sessions.create",
|
||||||
|
"session": "test_request_sessions"
|
||||||
|
})
|
||||||
|
res = self.app.post_json('/v1', {
|
||||||
|
"cmd": "request.get",
|
||||||
|
"session": "test_request_sessions",
|
||||||
|
"url": self.google_url
|
||||||
|
})
|
||||||
|
self.assertEqual(res.status_code, 200)
|
||||||
|
|
||||||
|
body = V1ResponseBase(res.json)
|
||||||
|
self.assertEqual(STATUS_OK, body.status)
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
|
|||||||
@@ -39,6 +39,8 @@ def asset_cloudflare_solution(self, res, site_url, site_text):
|
|||||||
|
|
||||||
class TestFlareSolverr(unittest.TestCase):
|
class TestFlareSolverr(unittest.TestCase):
|
||||||
app = TestApp(flaresolverr.app)
|
app = TestApp(flaresolverr.app)
|
||||||
|
# wait until the server is ready
|
||||||
|
app.get('/')
|
||||||
|
|
||||||
def test_v1_endpoint_request_get_cloudflare(self):
|
def test_v1_endpoint_request_get_cloudflare(self):
|
||||||
sites_get = [
|
sites_get = [
|
||||||
|
|||||||
File diff suppressed because it is too large
Load Diff
@@ -1,259 +0,0 @@
|
|||||||
#!/usr/bin/env python3
|
|
||||||
# this module is part of undetected_chromedriver
|
|
||||||
|
|
||||||
|
|
||||||
"""
|
|
||||||
|
|
||||||
888 888 d8b
|
|
||||||
888 888 Y8P
|
|
||||||
888 888
|
|
||||||
.d8888b 88888b. 888d888 .d88b. 88888b.d88b. .d88b. .d88888 888d888 888 888 888 .d88b. 888d888
|
|
||||||
d88P" 888 "88b 888P" d88""88b 888 "888 "88b d8P Y8b d88" 888 888P" 888 888 888 d8P Y8b 888P"
|
|
||||||
888 888 888 888 888 888 888 888 888 88888888 888 888 888 888 Y88 88P 88888888 888
|
|
||||||
Y88b. 888 888 888 Y88..88P 888 888 888 Y8b. Y88b 888 888 888 Y8bd8P Y8b. 888
|
|
||||||
"Y8888P 888 888 888 "Y88P" 888 888 888 "Y8888 "Y88888 888 888 Y88P "Y8888 888 88888888
|
|
||||||
|
|
||||||
by UltrafunkAmsterdam (https://github.com/ultrafunkamsterdam)
|
|
||||||
|
|
||||||
"""
|
|
||||||
|
|
||||||
import io
|
|
||||||
import logging
|
|
||||||
import os
|
|
||||||
import random
|
|
||||||
import re
|
|
||||||
import string
|
|
||||||
import sys
|
|
||||||
import zipfile
|
|
||||||
from distutils.version import LooseVersion
|
|
||||||
from urllib.request import urlopen, urlretrieve
|
|
||||||
|
|
||||||
from selenium.webdriver import Chrome as _Chrome, ChromeOptions as _ChromeOptions
|
|
||||||
|
|
||||||
TARGET_VERSION = 0
|
|
||||||
logger = logging.getLogger("uc")
|
|
||||||
|
|
||||||
|
|
||||||
class Chrome:
|
|
||||||
def __new__(cls, *args, emulate_touch=False, **kwargs):
|
|
||||||
|
|
||||||
if not ChromeDriverManager.installed:
|
|
||||||
ChromeDriverManager(*args, **kwargs).install()
|
|
||||||
if not ChromeDriverManager.selenium_patched:
|
|
||||||
ChromeDriverManager(*args, **kwargs).patch_selenium_webdriver()
|
|
||||||
if not kwargs.get("executable_path"):
|
|
||||||
kwargs["executable_path"] = "./{}".format(
|
|
||||||
ChromeDriverManager(*args, **kwargs).executable_path
|
|
||||||
)
|
|
||||||
if not kwargs.get("options"):
|
|
||||||
kwargs["options"] = ChromeOptions()
|
|
||||||
instance = object.__new__(_Chrome)
|
|
||||||
instance.__init__(*args, **kwargs)
|
|
||||||
|
|
||||||
instance._orig_get = instance.get
|
|
||||||
|
|
||||||
def _get_wrapped(*args, **kwargs):
|
|
||||||
if instance.execute_script("return navigator.webdriver"):
|
|
||||||
instance.execute_cdp_cmd(
|
|
||||||
"Page.addScriptToEvaluateOnNewDocument",
|
|
||||||
{
|
|
||||||
"source": """
|
|
||||||
|
|
||||||
Object.defineProperty(window, 'navigator', {
|
|
||||||
value: new Proxy(navigator, {
|
|
||||||
has: (target, key) => (key === 'webdriver' ? false : key in target),
|
|
||||||
get: (target, key) =>
|
|
||||||
key === 'webdriver'
|
|
||||||
? undefined
|
|
||||||
: typeof target[key] === 'function'
|
|
||||||
? target[key].bind(target)
|
|
||||||
: target[key]
|
|
||||||
})
|
|
||||||
});
|
|
||||||
|
|
||||||
|
|
||||||
"""
|
|
||||||
},
|
|
||||||
)
|
|
||||||
return instance._orig_get(*args, **kwargs)
|
|
||||||
|
|
||||||
instance.get = _get_wrapped
|
|
||||||
instance.get = _get_wrapped
|
|
||||||
instance.get = _get_wrapped
|
|
||||||
|
|
||||||
original_user_agent_string = instance.execute_script(
|
|
||||||
"return navigator.userAgent"
|
|
||||||
)
|
|
||||||
instance.execute_cdp_cmd(
|
|
||||||
"Network.setUserAgentOverride",
|
|
||||||
{
|
|
||||||
"userAgent": original_user_agent_string.replace("Headless", ""),
|
|
||||||
},
|
|
||||||
)
|
|
||||||
if emulate_touch:
|
|
||||||
instance.execute_cdp_cmd(
|
|
||||||
"Page.addScriptToEvaluateOnNewDocument",
|
|
||||||
{
|
|
||||||
"source": """
|
|
||||||
Object.defineProperty(navigator, 'maxTouchPoints', {
|
|
||||||
get: () => 1
|
|
||||||
})"""
|
|
||||||
},
|
|
||||||
)
|
|
||||||
logger.info(f"starting undetected_chromedriver.Chrome({args}, {kwargs})")
|
|
||||||
return instance
|
|
||||||
|
|
||||||
|
|
||||||
class ChromeOptions:
|
|
||||||
def __new__(cls, *args, **kwargs):
|
|
||||||
if not ChromeDriverManager.installed:
|
|
||||||
ChromeDriverManager(*args, **kwargs).install()
|
|
||||||
if not ChromeDriverManager.selenium_patched:
|
|
||||||
ChromeDriverManager(*args, **kwargs).patch_selenium_webdriver()
|
|
||||||
|
|
||||||
instance = object.__new__(_ChromeOptions)
|
|
||||||
instance.__init__()
|
|
||||||
instance.add_argument("start-maximized")
|
|
||||||
instance.add_experimental_option("excludeSwitches", ["enable-automation"])
|
|
||||||
instance.add_argument("--disable-blink-features=AutomationControlled")
|
|
||||||
return instance
|
|
||||||
|
|
||||||
|
|
||||||
class ChromeDriverManager(object):
|
|
||||||
installed = False
|
|
||||||
selenium_patched = False
|
|
||||||
target_version = None
|
|
||||||
|
|
||||||
DL_BASE = "https://chromedriver.storage.googleapis.com/"
|
|
||||||
|
|
||||||
def __init__(self, executable_path=None, target_version=None, *args, **kwargs):
|
|
||||||
|
|
||||||
_platform = sys.platform
|
|
||||||
|
|
||||||
if TARGET_VERSION:
|
|
||||||
# use global if set
|
|
||||||
self.target_version = TARGET_VERSION
|
|
||||||
|
|
||||||
if target_version:
|
|
||||||
# use explicitly passed target
|
|
||||||
self.target_version = target_version # user override
|
|
||||||
|
|
||||||
if not self.target_version:
|
|
||||||
# none of the above (default) and just get current version
|
|
||||||
self.target_version = self.get_release_version_number().version[
|
|
||||||
0
|
|
||||||
] # only major version int
|
|
||||||
|
|
||||||
self._base = base_ = "chromedriver{}"
|
|
||||||
|
|
||||||
exe_name = self._base
|
|
||||||
if _platform in ("win32",):
|
|
||||||
exe_name = base_.format(".exe")
|
|
||||||
if _platform in ("linux",):
|
|
||||||
_platform += "64"
|
|
||||||
exe_name = exe_name.format("")
|
|
||||||
if _platform in ("darwin",):
|
|
||||||
_platform = "mac64"
|
|
||||||
exe_name = exe_name.format("")
|
|
||||||
self.platform = _platform
|
|
||||||
self.executable_path = executable_path or exe_name
|
|
||||||
self._exe_name = exe_name
|
|
||||||
|
|
||||||
def patch_selenium_webdriver(self_):
|
|
||||||
"""
|
|
||||||
Patches selenium package Chrome, ChromeOptions classes for current session
|
|
||||||
|
|
||||||
:return:
|
|
||||||
"""
|
|
||||||
import selenium.webdriver.chrome.service
|
|
||||||
import selenium.webdriver
|
|
||||||
|
|
||||||
selenium.webdriver.Chrome = Chrome
|
|
||||||
selenium.webdriver.ChromeOptions = ChromeOptions
|
|
||||||
logger.info("Selenium patched. Safe to import Chrome / ChromeOptions")
|
|
||||||
self_.__class__.selenium_patched = True
|
|
||||||
|
|
||||||
def install(self, patch_selenium=True):
|
|
||||||
"""
|
|
||||||
Initialize the patch
|
|
||||||
|
|
||||||
This will:
|
|
||||||
download chromedriver if not present
|
|
||||||
patch the downloaded chromedriver
|
|
||||||
patch selenium package if <patch_selenium> is True (default)
|
|
||||||
|
|
||||||
:param patch_selenium: patch selenium webdriver classes for Chrome and ChromeDriver (for current python session)
|
|
||||||
:return:
|
|
||||||
"""
|
|
||||||
if not os.path.exists(self.executable_path):
|
|
||||||
self.fetch_chromedriver()
|
|
||||||
if not self.__class__.installed:
|
|
||||||
if self.patch_binary():
|
|
||||||
self.__class__.installed = True
|
|
||||||
|
|
||||||
if patch_selenium:
|
|
||||||
self.patch_selenium_webdriver()
|
|
||||||
|
|
||||||
def get_release_version_number(self):
|
|
||||||
"""
|
|
||||||
Gets the latest major version available, or the latest major version of self.target_version if set explicitly.
|
|
||||||
|
|
||||||
:return: version string
|
|
||||||
"""
|
|
||||||
path = (
|
|
||||||
"LATEST_RELEASE"
|
|
||||||
if not self.target_version
|
|
||||||
else f"LATEST_RELEASE_{self.target_version}"
|
|
||||||
)
|
|
||||||
return LooseVersion(urlopen(self.__class__.DL_BASE + path).read().decode())
|
|
||||||
|
|
||||||
def fetch_chromedriver(self):
|
|
||||||
"""
|
|
||||||
Downloads ChromeDriver from source and unpacks the executable
|
|
||||||
|
|
||||||
:return: on success, name of the unpacked executable
|
|
||||||
"""
|
|
||||||
base_ = self._base
|
|
||||||
zip_name = base_.format(".zip")
|
|
||||||
ver = self.get_release_version_number().vstring
|
|
||||||
if os.path.exists(self.executable_path):
|
|
||||||
return self.executable_path
|
|
||||||
urlretrieve(
|
|
||||||
f"{self.__class__.DL_BASE}{ver}/{base_.format(f'_{self.platform}')}.zip",
|
|
||||||
filename=zip_name,
|
|
||||||
)
|
|
||||||
with zipfile.ZipFile(zip_name) as zf:
|
|
||||||
zf.extract(self._exe_name)
|
|
||||||
os.remove(zip_name)
|
|
||||||
if sys.platform != "win32":
|
|
||||||
os.chmod(self._exe_name, 0o755)
|
|
||||||
return self._exe_name
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def random_cdc():
|
|
||||||
cdc = random.choices(string.ascii_lowercase, k=26)
|
|
||||||
cdc[-6:-4] = map(str.upper, cdc[-6:-4])
|
|
||||||
cdc[2] = cdc[0]
|
|
||||||
cdc[3] = "_"
|
|
||||||
return "".join(cdc).encode()
|
|
||||||
|
|
||||||
def patch_binary(self):
|
|
||||||
"""
|
|
||||||
Patches the ChromeDriver binary
|
|
||||||
|
|
||||||
:return: False on failure, binary name on success
|
|
||||||
"""
|
|
||||||
linect = 0
|
|
||||||
replacement = self.random_cdc()
|
|
||||||
with io.open(self.executable_path, "r+b") as fh:
|
|
||||||
for line in iter(lambda: fh.readline(), b""):
|
|
||||||
if b"cdc_" in line:
|
|
||||||
fh.seek(-len(line), 1)
|
|
||||||
newline = re.sub(b"cdc_.{22}", replacement, line)
|
|
||||||
fh.write(newline)
|
|
||||||
linect += 1
|
|
||||||
return linect
|
|
||||||
|
|
||||||
|
|
||||||
def install(executable_path=None, target_version=None, *args, **kwargs):
|
|
||||||
ChromeDriverManager(executable_path, target_version, *args, **kwargs).install()
|
|
||||||
@@ -1,112 +1,112 @@
|
|||||||
#!/usr/bin/env python3
|
#!/usr/bin/env python3
|
||||||
# this module is part of undetected_chromedriver
|
# this module is part of undetected_chromedriver
|
||||||
|
|
||||||
import json
|
import json
|
||||||
import logging
|
import logging
|
||||||
from collections.abc import Mapping, Sequence
|
|
||||||
|
import requests
|
||||||
import requests
|
import websockets
|
||||||
import websockets
|
|
||||||
|
|
||||||
log = logging.getLogger(__name__)
|
log = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
class CDPObject(dict):
|
class CDPObject(dict):
|
||||||
def __init__(self, *a, **k):
|
def __init__(self, *a, **k):
|
||||||
super().__init__(*a, **k)
|
super().__init__(*a, **k)
|
||||||
self.__dict__ = self
|
self.__dict__ = self
|
||||||
for k in self.__dict__:
|
for k in self.__dict__:
|
||||||
if isinstance(self.__dict__[k], dict):
|
if isinstance(self.__dict__[k], dict):
|
||||||
self.__dict__[k] = CDPObject(self.__dict__[k])
|
self.__dict__[k] = CDPObject(self.__dict__[k])
|
||||||
elif isinstance(self.__dict__[k], list):
|
elif isinstance(self.__dict__[k], list):
|
||||||
for i in range(len(self.__dict__[k])):
|
for i in range(len(self.__dict__[k])):
|
||||||
if isinstance(self.__dict__[k][i], dict):
|
if isinstance(self.__dict__[k][i], dict):
|
||||||
self.__dict__[k][i] = CDPObject(self)
|
self.__dict__[k][i] = CDPObject(self)
|
||||||
|
|
||||||
def __repr__(self):
|
def __repr__(self):
|
||||||
tpl = f"{self.__class__.__name__}(\n\t{{}}\n\t)"
|
tpl = f"{self.__class__.__name__}(\n\t{{}}\n\t)"
|
||||||
return tpl.format("\n ".join(f"{k} = {v}" for k, v in self.items()))
|
return tpl.format("\n ".join(f"{k} = {v}" for k, v in self.items()))
|
||||||
|
|
||||||
|
|
||||||
class PageElement(CDPObject):
|
class PageElement(CDPObject):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
class CDP:
|
class CDP:
|
||||||
log = logging.getLogger("CDP")
|
log = logging.getLogger("CDP")
|
||||||
|
|
||||||
endpoints = CDPObject(
|
endpoints = CDPObject(
|
||||||
{
|
{
|
||||||
"json": "/json",
|
"json": "/json",
|
||||||
"protocol": "/json/protocol",
|
"protocol": "/json/protocol",
|
||||||
"list": "/json/list",
|
"list": "/json/list",
|
||||||
"new": "/json/new?{url}",
|
"new": "/json/new?{url}",
|
||||||
"activate": "/json/activate/{id}",
|
"activate": "/json/activate/{id}",
|
||||||
"close": "/json/close/{id}",
|
"close": "/json/close/{id}",
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
|
|
||||||
def __init__(self, options: "ChromeOptions"): # noqa
|
def __init__(self, options: "ChromeOptions"): # noqa
|
||||||
self.server_addr = "http://{0}:{1}".format(*options.debugger_address.split(":"))
|
self.server_addr = "http://{0}:{1}".format(*options.debugger_address.split(":"))
|
||||||
|
|
||||||
self._reqid = 0
|
self._reqid = 0
|
||||||
self._session = requests.Session()
|
self._session = requests.Session()
|
||||||
self._last_resp = None
|
self._last_resp = None
|
||||||
self._last_json = None
|
self._last_json = None
|
||||||
|
|
||||||
resp = self.get(self.endpoints.json) # noqa
|
resp = self.get(self.endpoints.json) # noqa
|
||||||
self.sessionId = resp[0]["id"]
|
self.sessionId = resp[0]["id"]
|
||||||
self.wsurl = resp[0]["webSocketDebuggerUrl"]
|
self.wsurl = resp[0]["webSocketDebuggerUrl"]
|
||||||
|
|
||||||
def tab_activate(self, id=None):
|
def tab_activate(self, id=None):
|
||||||
if not id:
|
if not id:
|
||||||
active_tab = self.tab_list()[0]
|
active_tab = self.tab_list()[0]
|
||||||
id = active_tab.id # noqa
|
id = active_tab.id # noqa
|
||||||
self.wsurl = active_tab.webSocketDebuggerUrl # noqa
|
self.wsurl = active_tab.webSocketDebuggerUrl # noqa
|
||||||
return self.post(self.endpoints["activate"].format(id=id))
|
return self.post(self.endpoints["activate"].format(id=id))
|
||||||
|
|
||||||
def tab_list(self):
|
def tab_list(self):
|
||||||
retval = self.get(self.endpoints["list"])
|
retval = self.get(self.endpoints["list"])
|
||||||
return [PageElement(o) for o in retval]
|
return [PageElement(o) for o in retval]
|
||||||
|
|
||||||
def tab_new(self, url):
|
def tab_new(self, url):
|
||||||
return self.post(self.endpoints["new"].format(url=url))
|
return self.post(self.endpoints["new"].format(url=url))
|
||||||
|
|
||||||
def tab_close_last_opened(self):
|
def tab_close_last_opened(self):
|
||||||
sessions = self.tab_list()
|
sessions = self.tab_list()
|
||||||
opentabs = [s for s in sessions if s["type"] == "page"]
|
opentabs = [s for s in sessions if s["type"] == "page"]
|
||||||
return self.post(self.endpoints["close"].format(id=opentabs[-1]["id"]))
|
return self.post(self.endpoints["close"].format(id=opentabs[-1]["id"]))
|
||||||
|
|
||||||
async def send(self, method: str, params: dict):
|
async def send(self, method: str, params: dict):
|
||||||
self._reqid += 1
|
self._reqid += 1
|
||||||
async with websockets.connect(self.wsurl) as ws:
|
async with websockets.connect(self.wsurl) as ws:
|
||||||
await ws.send(
|
await ws.send(
|
||||||
json.dumps({"method": method, "params": params, "id": self._reqid})
|
json.dumps({"method": method, "params": params, "id": self._reqid})
|
||||||
)
|
)
|
||||||
self._last_resp = await ws.recv()
|
self._last_resp = await ws.recv()
|
||||||
self._last_json = json.loads(self._last_resp)
|
self._last_json = json.loads(self._last_resp)
|
||||||
self.log.info(self._last_json)
|
self.log.info(self._last_json)
|
||||||
|
|
||||||
def get(self, uri):
|
def get(self, uri):
|
||||||
resp = self._session.get(self.server_addr + uri)
|
resp = self._session.get(self.server_addr + uri)
|
||||||
try:
|
try:
|
||||||
self._last_resp = resp
|
self._last_resp = resp
|
||||||
self._last_json = resp.json()
|
self._last_json = resp.json()
|
||||||
except Exception:
|
except Exception:
|
||||||
return
|
return
|
||||||
else:
|
else:
|
||||||
return self._last_json
|
return self._last_json
|
||||||
|
|
||||||
def post(self, uri, data: dict = None):
|
def post(self, uri, data: dict = None):
|
||||||
if not data:
|
if not data:
|
||||||
data = {}
|
data = {}
|
||||||
resp = self._session.post(self.server_addr + uri, json=data)
|
resp = self._session.post(self.server_addr + uri, json=data)
|
||||||
try:
|
try:
|
||||||
self._last_resp = resp
|
self._last_resp = resp
|
||||||
self._last_json = resp.json()
|
self._last_json = resp.json()
|
||||||
except Exception:
|
except Exception:
|
||||||
return self._last_resp
|
return self._last_resp
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def last_json(self):
|
def last_json(self):
|
||||||
return self._last_json
|
return self._last_json
|
||||||
|
|||||||
@@ -1,191 +1,190 @@
|
|||||||
import asyncio
|
import asyncio
|
||||||
import logging
|
from collections.abc import Mapping
|
||||||
import time
|
from collections.abc import Sequence
|
||||||
import traceback
|
from functools import wraps
|
||||||
from collections.abc import Mapping
|
import logging
|
||||||
from collections.abc import Sequence
|
import threading
|
||||||
from typing import Any
|
import time
|
||||||
from typing import Awaitable
|
import traceback
|
||||||
from typing import Callable
|
from typing import Any
|
||||||
from typing import List
|
from typing import Awaitable
|
||||||
from typing import Optional
|
from typing import Callable
|
||||||
from contextlib import ExitStack
|
from typing import List
|
||||||
import threading
|
from typing import Optional
|
||||||
from functools import wraps, partial
|
|
||||||
|
|
||||||
|
class Structure(dict):
|
||||||
class Structure(dict):
|
"""
|
||||||
"""
|
This is a dict-like object structure, which you should subclass
|
||||||
This is a dict-like object structure, which you should subclass
|
Only properties defined in the class context are used on initialization.
|
||||||
Only properties defined in the class context are used on initialization.
|
|
||||||
|
See example
|
||||||
See example
|
"""
|
||||||
"""
|
|
||||||
|
_store = {}
|
||||||
_store = {}
|
|
||||||
|
def __init__(self, *a, **kw):
|
||||||
def __init__(self, *a, **kw):
|
"""
|
||||||
"""
|
Instantiate a new instance.
|
||||||
Instantiate a new instance.
|
|
||||||
|
:param a:
|
||||||
:param a:
|
:param kw:
|
||||||
:param kw:
|
"""
|
||||||
"""
|
|
||||||
|
super().__init__()
|
||||||
super().__init__()
|
|
||||||
|
# auxiliar dict
|
||||||
# auxiliar dict
|
d = dict(*a, **kw)
|
||||||
d = dict(*a, **kw)
|
for k, v in d.items():
|
||||||
for k, v in d.items():
|
if isinstance(v, Mapping):
|
||||||
if isinstance(v, Mapping):
|
self[k] = self.__class__(v)
|
||||||
self[k] = self.__class__(v)
|
elif isinstance(v, Sequence) and not isinstance(v, (str, bytes)):
|
||||||
elif isinstance(v, Sequence) and not isinstance(v, (str, bytes)):
|
self[k] = [self.__class__(i) for i in v]
|
||||||
self[k] = [self.__class__(i) for i in v]
|
else:
|
||||||
else:
|
self[k] = v
|
||||||
self[k] = v
|
super().__setattr__("__dict__", self)
|
||||||
super().__setattr__("__dict__", self)
|
|
||||||
|
def __getattr__(self, item):
|
||||||
def __getattr__(self, item):
|
return getattr(super(), item)
|
||||||
return getattr(super(), item)
|
|
||||||
|
def __getitem__(self, item):
|
||||||
def __getitem__(self, item):
|
return super().__getitem__(item)
|
||||||
return super().__getitem__(item)
|
|
||||||
|
def __setattr__(self, key, value):
|
||||||
def __setattr__(self, key, value):
|
self.__setitem__(key, value)
|
||||||
self.__setitem__(key, value)
|
|
||||||
|
def __setitem__(self, key, value):
|
||||||
def __setitem__(self, key, value):
|
super().__setitem__(key, value)
|
||||||
super().__setitem__(key, value)
|
|
||||||
|
def update(self, *a, **kw):
|
||||||
def update(self, *a, **kw):
|
super().update(*a, **kw)
|
||||||
super().update(*a, **kw)
|
|
||||||
|
def __eq__(self, other):
|
||||||
def __eq__(self, other):
|
return frozenset(other.items()) == frozenset(self.items())
|
||||||
return frozenset(other.items()) == frozenset(self.items())
|
|
||||||
|
def __hash__(self):
|
||||||
def __hash__(self):
|
return hash(frozenset(self.items()))
|
||||||
return hash(frozenset(self.items()))
|
|
||||||
|
@classmethod
|
||||||
@classmethod
|
def __init_subclass__(cls, **kwargs):
|
||||||
def __init_subclass__(cls, **kwargs):
|
cls._store = {}
|
||||||
cls._store = {}
|
|
||||||
|
def _normalize_strings(self):
|
||||||
def _normalize_strings(self):
|
for k, v in self.copy().items():
|
||||||
for k, v in self.copy().items():
|
if isinstance(v, (str)):
|
||||||
if isinstance(v, (str)):
|
self[k] = v.strip()
|
||||||
self[k] = v.strip()
|
|
||||||
|
|
||||||
|
def timeout(seconds=3, on_timeout: Optional[Callable[[callable], Any]] = None):
|
||||||
def timeout(seconds=3, on_timeout: Optional[Callable[[callable], Any]] = None):
|
def wrapper(func):
|
||||||
def wrapper(func):
|
@wraps(func)
|
||||||
@wraps(func)
|
def wrapped(*args, **kwargs):
|
||||||
def wrapped(*args, **kwargs):
|
def function_reached_timeout():
|
||||||
def function_reached_timeout():
|
if on_timeout:
|
||||||
if on_timeout:
|
on_timeout(func)
|
||||||
on_timeout(func)
|
else:
|
||||||
else:
|
raise TimeoutError("function call timed out")
|
||||||
raise TimeoutError("function call timed out")
|
|
||||||
|
t = threading.Timer(interval=seconds, function=function_reached_timeout)
|
||||||
t = threading.Timer(interval=seconds, function=function_reached_timeout)
|
t.start()
|
||||||
t.start()
|
try:
|
||||||
try:
|
return func(*args, **kwargs)
|
||||||
return func(*args, **kwargs)
|
except:
|
||||||
except:
|
t.cancel()
|
||||||
t.cancel()
|
raise
|
||||||
raise
|
finally:
|
||||||
finally:
|
t.cancel()
|
||||||
t.cancel()
|
|
||||||
|
return wrapped
|
||||||
return wrapped
|
|
||||||
|
return wrapper
|
||||||
return wrapper
|
|
||||||
|
|
||||||
|
def test():
|
||||||
def test():
|
import sys, os
|
||||||
import sys, os
|
|
||||||
|
sys.path.insert(0, os.path.abspath(os.path.dirname(__file__)))
|
||||||
sys.path.insert(0, os.path.abspath(os.path.dirname(__file__)))
|
import undetected_chromedriver as uc
|
||||||
import undetected_chromedriver as uc
|
import threading
|
||||||
import threading
|
|
||||||
|
def collector(
|
||||||
def collector(
|
driver: uc.Chrome,
|
||||||
driver: uc.Chrome,
|
stop_event: threading.Event,
|
||||||
stop_event: threading.Event,
|
on_event_coro: Optional[Callable[[List[str]], Awaitable[Any]]] = None,
|
||||||
on_event_coro: Optional[Callable[[List[str]], Awaitable[Any]]] = None,
|
listen_events: Sequence = ("browser", "network", "performance"),
|
||||||
listen_events: Sequence = ("browser", "network", "performance"),
|
):
|
||||||
):
|
def threaded(driver, stop_event, on_event_coro):
|
||||||
def threaded(driver, stop_event, on_event_coro):
|
async def _ensure_service_started():
|
||||||
async def _ensure_service_started():
|
while (
|
||||||
while (
|
getattr(driver, "service", False)
|
||||||
getattr(driver, "service", False)
|
and getattr(driver.service, "process", False)
|
||||||
and getattr(driver.service, "process", False)
|
and driver.service.process.poll()
|
||||||
and driver.service.process.poll()
|
):
|
||||||
):
|
print("waiting for driver service to come back on")
|
||||||
print("waiting for driver service to come back on")
|
await asyncio.sleep(0.05)
|
||||||
await asyncio.sleep(0.05)
|
# await asyncio.sleep(driver._delay or .25)
|
||||||
# await asyncio.sleep(driver._delay or .25)
|
|
||||||
|
async def get_log_lines(typ):
|
||||||
async def get_log_lines(typ):
|
await _ensure_service_started()
|
||||||
await _ensure_service_started()
|
return driver.get_log(typ)
|
||||||
return driver.get_log(typ)
|
|
||||||
|
async def looper():
|
||||||
async def looper():
|
while not stop_event.is_set():
|
||||||
while not stop_event.is_set():
|
log_lines = []
|
||||||
log_lines = []
|
try:
|
||||||
try:
|
for _ in listen_events:
|
||||||
for _ in listen_events:
|
try:
|
||||||
try:
|
log_lines += await get_log_lines(_)
|
||||||
log_lines += await get_log_lines(_)
|
except:
|
||||||
except:
|
if logging.getLogger().getEffectiveLevel() <= 10:
|
||||||
if logging.getLogger().getEffectiveLevel() <= 10:
|
traceback.print_exc()
|
||||||
traceback.print_exc()
|
continue
|
||||||
continue
|
if log_lines and on_event_coro:
|
||||||
if log_lines and on_event_coro:
|
await on_event_coro(log_lines)
|
||||||
await on_event_coro(log_lines)
|
except Exception as e:
|
||||||
except Exception as e:
|
if logging.getLogger().getEffectiveLevel() <= 10:
|
||||||
if logging.getLogger().getEffectiveLevel() <= 10:
|
traceback.print_exc()
|
||||||
traceback.print_exc()
|
|
||||||
|
loop = asyncio.new_event_loop()
|
||||||
loop = asyncio.new_event_loop()
|
asyncio.set_event_loop(loop)
|
||||||
asyncio.set_event_loop(loop)
|
loop.run_until_complete(looper())
|
||||||
loop.run_until_complete(looper())
|
|
||||||
|
t = threading.Thread(target=threaded, args=(driver, stop_event, on_event_coro))
|
||||||
t = threading.Thread(target=threaded, args=(driver, stop_event, on_event_coro))
|
t.start()
|
||||||
t.start()
|
|
||||||
|
async def on_event(data):
|
||||||
async def on_event(data):
|
print("on_event")
|
||||||
print("on_event")
|
print("data:", data)
|
||||||
print("data:", data)
|
|
||||||
|
def func_called(fn):
|
||||||
def func_called(fn):
|
def wrapped(*args, **kwargs):
|
||||||
def wrapped(*args, **kwargs):
|
print(
|
||||||
print(
|
"func called! %s (args: %s, kwargs: %s)" % (fn.__name__, args, kwargs)
|
||||||
"func called! %s (args: %s, kwargs: %s)" % (fn.__name__, args, kwargs)
|
)
|
||||||
)
|
while driver.service.process and driver.service.process.poll() is not None:
|
||||||
while driver.service.process and driver.service.process.poll() is not None:
|
time.sleep(0.1)
|
||||||
time.sleep(0.1)
|
res = fn(*args, **kwargs)
|
||||||
res = fn(*args, **kwargs)
|
print("func completed! (result: %s)" % res)
|
||||||
print("func completed! (result: %s)" % res)
|
return res
|
||||||
return res
|
|
||||||
|
return wrapped
|
||||||
return wrapped
|
|
||||||
|
logging.basicConfig(level=10)
|
||||||
logging.basicConfig(level=10)
|
|
||||||
|
options = uc.ChromeOptions()
|
||||||
options = uc.ChromeOptions()
|
options.set_capability(
|
||||||
options.set_capability(
|
"goog:loggingPrefs", {"performance": "ALL", "browser": "ALL", "network": "ALL"}
|
||||||
"goog:loggingPrefs", {"performance": "ALL", "browser": "ALL", "network": "ALL"}
|
)
|
||||||
)
|
|
||||||
|
driver = uc.Chrome(version_main=96, options=options)
|
||||||
driver = uc.Chrome(version_main=96, options=options)
|
|
||||||
|
# driver.command_executor._request = timeout(seconds=1)(driver.command_executor._request)
|
||||||
# driver.command_executor._request = timeout(seconds=1)(driver.command_executor._request)
|
driver.command_executor._request = func_called(driver.command_executor._request)
|
||||||
driver.command_executor._request = func_called(driver.command_executor._request)
|
collector_stop = threading.Event()
|
||||||
collector_stop = threading.Event()
|
collector(driver, collector_stop, on_event)
|
||||||
collector(driver, collector_stop, on_event)
|
|
||||||
|
driver.get("https://nowsecure.nl")
|
||||||
driver.get("https://nowsecure.nl")
|
|
||||||
|
time.sleep(10)
|
||||||
time.sleep(10)
|
|
||||||
|
driver.quit()
|
||||||
driver.quit()
|
|
||||||
|
|||||||
@@ -1,75 +1,76 @@
|
|||||||
import multiprocessing
|
import atexit
|
||||||
import os
|
import logging
|
||||||
import platform
|
import multiprocessing
|
||||||
import sys
|
import os
|
||||||
from subprocess import PIPE
|
import platform
|
||||||
from subprocess import Popen
|
import signal
|
||||||
import atexit
|
from subprocess import PIPE
|
||||||
import traceback
|
from subprocess import Popen
|
||||||
import logging
|
import sys
|
||||||
import signal
|
|
||||||
|
|
||||||
CREATE_NEW_PROCESS_GROUP = 0x00000200
|
CREATE_NEW_PROCESS_GROUP = 0x00000200
|
||||||
DETACHED_PROCESS = 0x00000008
|
DETACHED_PROCESS = 0x00000008
|
||||||
|
|
||||||
REGISTERED = []
|
REGISTERED = []
|
||||||
|
|
||||||
|
|
||||||
def start_detached(executable, *args):
|
def start_detached(executable, *args):
|
||||||
"""
|
"""
|
||||||
Starts a fully independent subprocess (with no parent)
|
Starts a fully independent subprocess (with no parent)
|
||||||
:param executable: executable
|
:param executable: executable
|
||||||
:param args: arguments to the executable, eg: ['--param1_key=param1_val', '-vvv' ...]
|
:param args: arguments to the executable, eg: ['--param1_key=param1_val', '-vvv' ...]
|
||||||
:return: pid of the grandchild process
|
:return: pid of the grandchild process
|
||||||
"""
|
"""
|
||||||
|
|
||||||
# create pipe
|
# create pipe
|
||||||
reader, writer = multiprocessing.Pipe(False)
|
reader, writer = multiprocessing.Pipe(False)
|
||||||
|
|
||||||
# do not keep reference
|
# do not keep reference
|
||||||
multiprocessing.Process(
|
process = multiprocessing.Process(
|
||||||
target=_start_detached,
|
target=_start_detached,
|
||||||
args=(executable, *args),
|
args=(executable, *args),
|
||||||
kwargs={"writer": writer},
|
kwargs={"writer": writer},
|
||||||
daemon=True,
|
daemon=True,
|
||||||
).start()
|
)
|
||||||
# receive pid from pipe
|
process.start()
|
||||||
pid = reader.recv()
|
process.join()
|
||||||
REGISTERED.append(pid)
|
# receive pid from pipe
|
||||||
# close pipes
|
pid = reader.recv()
|
||||||
writer.close()
|
REGISTERED.append(pid)
|
||||||
reader.close()
|
# close pipes
|
||||||
|
writer.close()
|
||||||
return pid
|
reader.close()
|
||||||
|
|
||||||
|
return pid
|
||||||
def _start_detached(executable, *args, writer: multiprocessing.Pipe = None):
|
|
||||||
|
|
||||||
# configure launch
|
def _start_detached(executable, *args, writer: multiprocessing.Pipe = None):
|
||||||
kwargs = {}
|
# configure launch
|
||||||
if platform.system() == "Windows":
|
kwargs = {}
|
||||||
kwargs.update(creationflags=DETACHED_PROCESS | CREATE_NEW_PROCESS_GROUP)
|
if platform.system() == "Windows":
|
||||||
elif sys.version_info < (3, 2):
|
kwargs.update(creationflags=DETACHED_PROCESS | CREATE_NEW_PROCESS_GROUP)
|
||||||
# assume posix
|
elif sys.version_info < (3, 2):
|
||||||
kwargs.update(preexec_fn=os.setsid)
|
# assume posix
|
||||||
else: # Python 3.2+ and Unix
|
kwargs.update(preexec_fn=os.setsid)
|
||||||
kwargs.update(start_new_session=True)
|
else: # Python 3.2+ and Unix
|
||||||
|
kwargs.update(start_new_session=True)
|
||||||
# run
|
|
||||||
p = Popen([executable, *args], stdin=PIPE, stdout=PIPE, stderr=PIPE, **kwargs)
|
# run
|
||||||
|
p = Popen([executable, *args], stdin=PIPE, stdout=PIPE, stderr=PIPE, **kwargs)
|
||||||
# send pid to pipe
|
|
||||||
writer.send(p.pid)
|
# send pid to pipe
|
||||||
sys.exit()
|
writer.send(p.pid)
|
||||||
|
sys.exit()
|
||||||
|
|
||||||
def _cleanup():
|
|
||||||
for pid in REGISTERED:
|
def _cleanup():
|
||||||
try:
|
for pid in REGISTERED:
|
||||||
logging.getLogger(__name__).debug("cleaning up pid %d " % pid)
|
try:
|
||||||
os.kill(pid, signal.SIGTERM)
|
logging.getLogger(__name__).debug("cleaning up pid %d " % pid)
|
||||||
except: # noqa
|
os.kill(pid, signal.SIGTERM)
|
||||||
pass
|
except: # noqa
|
||||||
|
pass
|
||||||
|
|
||||||
atexit.register(_cleanup)
|
|
||||||
|
atexit.register(_cleanup)
|
||||||
|
|||||||
@@ -1,70 +1,85 @@
|
|||||||
#!/usr/bin/env python3
|
#!/usr/bin/env python3
|
||||||
# this module is part of undetected_chromedriver
|
# this module is part of undetected_chromedriver
|
||||||
|
|
||||||
|
|
||||||
import json
|
import json
|
||||||
import os
|
import os
|
||||||
|
|
||||||
from selenium.webdriver.chromium.options import ChromiumOptions as _ChromiumOptions
|
from selenium.webdriver.chromium.options import ChromiumOptions as _ChromiumOptions
|
||||||
|
|
||||||
|
|
||||||
class ChromeOptions(_ChromiumOptions):
|
class ChromeOptions(_ChromiumOptions):
|
||||||
_session = None
|
_session = None
|
||||||
_user_data_dir = None
|
_user_data_dir = None
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def user_data_dir(self):
|
def user_data_dir(self):
|
||||||
return self._user_data_dir
|
return self._user_data_dir
|
||||||
|
|
||||||
@user_data_dir.setter
|
@user_data_dir.setter
|
||||||
def user_data_dir(self, path: str):
|
def user_data_dir(self, path: str):
|
||||||
"""
|
"""
|
||||||
Sets the browser profile folder to use, or creates a new profile
|
Sets the browser profile folder to use, or creates a new profile
|
||||||
at given <path>.
|
at given <path>.
|
||||||
|
|
||||||
Parameters
|
Parameters
|
||||||
----------
|
----------
|
||||||
path: str
|
path: str
|
||||||
the path to a chrome profile folder
|
the path to a chrome profile folder
|
||||||
if it does not exist, a new profile will be created at given location
|
if it does not exist, a new profile will be created at given location
|
||||||
"""
|
"""
|
||||||
apath = os.path.abspath(path)
|
apath = os.path.abspath(path)
|
||||||
self._user_data_dir = os.path.normpath(apath)
|
self._user_data_dir = os.path.normpath(apath)
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def _undot_key(key, value):
|
def _undot_key(key, value):
|
||||||
"""turn a (dotted key, value) into a proper nested dict"""
|
"""turn a (dotted key, value) into a proper nested dict"""
|
||||||
if "." in key:
|
if "." in key:
|
||||||
key, rest = key.split(".", 1)
|
key, rest = key.split(".", 1)
|
||||||
value = ChromeOptions._undot_key(rest, value)
|
value = ChromeOptions._undot_key(rest, value)
|
||||||
return {key: value}
|
return {key: value}
|
||||||
|
|
||||||
def handle_prefs(self, user_data_dir):
|
@staticmethod
|
||||||
prefs = self.experimental_options.get("prefs")
|
def _merge_nested(a, b):
|
||||||
if prefs:
|
"""
|
||||||
|
merges b into a
|
||||||
user_data_dir = user_data_dir or self._user_data_dir
|
leaf values in a are overwritten with values from b
|
||||||
default_path = os.path.join(user_data_dir, "Default")
|
"""
|
||||||
os.makedirs(default_path, exist_ok=True)
|
for key in b:
|
||||||
|
if key in a:
|
||||||
# undot prefs dict keys
|
if isinstance(a[key], dict) and isinstance(b[key], dict):
|
||||||
undot_prefs = {}
|
ChromeOptions._merge_nested(a[key], b[key])
|
||||||
for key, value in prefs.items():
|
continue
|
||||||
undot_prefs.update(self._undot_key(key, value))
|
a[key] = b[key]
|
||||||
|
return a
|
||||||
prefs_file = os.path.join(default_path, "Preferences")
|
|
||||||
if os.path.exists(prefs_file):
|
def handle_prefs(self, user_data_dir):
|
||||||
with open(prefs_file, encoding="latin1", mode="r") as f:
|
prefs = self.experimental_options.get("prefs")
|
||||||
undot_prefs.update(json.load(f))
|
if prefs:
|
||||||
|
user_data_dir = user_data_dir or self._user_data_dir
|
||||||
with open(prefs_file, encoding="latin1", mode="w") as f:
|
default_path = os.path.join(user_data_dir, "Default")
|
||||||
json.dump(undot_prefs, f)
|
os.makedirs(default_path, exist_ok=True)
|
||||||
|
|
||||||
# remove the experimental_options to avoid an error
|
# undot prefs dict keys
|
||||||
del self._experimental_options["prefs"]
|
undot_prefs = {}
|
||||||
|
for key, value in prefs.items():
|
||||||
@classmethod
|
undot_prefs = self._merge_nested(
|
||||||
def from_options(cls, options):
|
undot_prefs, self._undot_key(key, value)
|
||||||
o = cls()
|
)
|
||||||
o.__dict__.update(options.__dict__)
|
|
||||||
return o
|
prefs_file = os.path.join(default_path, "Preferences")
|
||||||
|
if os.path.exists(prefs_file):
|
||||||
|
with open(prefs_file, encoding="latin1", mode="r") as f:
|
||||||
|
undot_prefs = self._merge_nested(json.load(f), undot_prefs)
|
||||||
|
|
||||||
|
with open(prefs_file, encoding="latin1", mode="w") as f:
|
||||||
|
json.dump(undot_prefs, f)
|
||||||
|
|
||||||
|
# remove the experimental_options to avoid an error
|
||||||
|
del self._experimental_options["prefs"]
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def from_options(cls, options):
|
||||||
|
o = cls()
|
||||||
|
o.__dict__.update(options.__dict__)
|
||||||
|
return o
|
||||||
|
|||||||
@@ -1,276 +1,275 @@
|
|||||||
#!/usr/bin/env python3
|
#!/usr/bin/env python3
|
||||||
# this module is part of undetected_chromedriver
|
# this module is part of undetected_chromedriver
|
||||||
|
|
||||||
import io
|
from distutils.version import LooseVersion
|
||||||
import logging
|
import io
|
||||||
import os
|
import logging
|
||||||
import random
|
import os
|
||||||
import re
|
import random
|
||||||
import string
|
import re
|
||||||
import sys
|
import string
|
||||||
import time
|
import sys
|
||||||
import zipfile
|
import time
|
||||||
from distutils.version import LooseVersion
|
from urllib.request import urlopen
|
||||||
from urllib.request import urlopen, urlretrieve
|
from urllib.request import urlretrieve
|
||||||
import secrets
|
import zipfile
|
||||||
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
IS_POSIX = sys.platform.startswith(("darwin", "cygwin", "linux"))
|
IS_POSIX = sys.platform.startswith(("darwin", "cygwin", "linux", "linux2"))
|
||||||
|
|
||||||
|
|
||||||
class Patcher(object):
|
class Patcher(object):
|
||||||
url_repo = "https://chromedriver.storage.googleapis.com"
|
url_repo = "https://chromedriver.storage.googleapis.com"
|
||||||
zip_name = "chromedriver_%s.zip"
|
zip_name = "chromedriver_%s.zip"
|
||||||
exe_name = "chromedriver%s"
|
exe_name = "chromedriver%s"
|
||||||
|
|
||||||
platform = sys.platform
|
platform = sys.platform
|
||||||
if platform.endswith("win32"):
|
if platform.endswith("win32"):
|
||||||
zip_name %= "win32"
|
zip_name %= "win32"
|
||||||
exe_name %= ".exe"
|
exe_name %= ".exe"
|
||||||
if platform.endswith("linux"):
|
if platform.endswith(("linux", "linux2")):
|
||||||
zip_name %= "linux64"
|
zip_name %= "linux64"
|
||||||
exe_name %= ""
|
exe_name %= ""
|
||||||
if platform.endswith("darwin"):
|
if platform.endswith("darwin"):
|
||||||
zip_name %= "mac64"
|
zip_name %= "mac64"
|
||||||
exe_name %= ""
|
exe_name %= ""
|
||||||
|
|
||||||
if platform.endswith("win32"):
|
if platform.endswith("win32"):
|
||||||
d = "~/appdata/roaming/undetected_chromedriver"
|
d = "~/appdata/roaming/undetected_chromedriver"
|
||||||
elif platform.startswith("linux"):
|
elif "LAMBDA_TASK_ROOT" in os.environ:
|
||||||
d = "~/.local/share/undetected_chromedriver"
|
d = "/tmp/undetected_chromedriver"
|
||||||
elif platform.endswith("darwin"):
|
elif platform.startswith(("linux", "linux2")):
|
||||||
d = "~/Library/Application Support/undetected_chromedriver"
|
d = "~/.local/share/undetected_chromedriver"
|
||||||
else:
|
elif platform.endswith("darwin"):
|
||||||
d = "~/.undetected_chromedriver"
|
d = "~/Library/Application Support/undetected_chromedriver"
|
||||||
data_path = os.path.abspath(os.path.expanduser(d))
|
else:
|
||||||
|
d = "~/.undetected_chromedriver"
|
||||||
def __init__(self, executable_path=None, force=False, version_main: int = 0):
|
data_path = os.path.abspath(os.path.expanduser(d))
|
||||||
"""
|
|
||||||
|
def __init__(self, executable_path=None, force=False, version_main: int = 0):
|
||||||
Args:
|
"""
|
||||||
executable_path: None = automatic
|
Args:
|
||||||
a full file path to the chromedriver executable
|
executable_path: None = automatic
|
||||||
force: False
|
a full file path to the chromedriver executable
|
||||||
terminate processes which are holding lock
|
force: False
|
||||||
version_main: 0 = auto
|
terminate processes which are holding lock
|
||||||
specify main chrome version (rounded, ex: 82)
|
version_main: 0 = auto
|
||||||
"""
|
specify main chrome version (rounded, ex: 82)
|
||||||
|
"""
|
||||||
self.force = force
|
self.force = force
|
||||||
self.executable_path = None
|
self._custom_exe_path = False
|
||||||
prefix = secrets.token_hex(8)
|
prefix = "undetected"
|
||||||
|
|
||||||
if not os.path.exists(self.data_path):
|
if not os.path.exists(self.data_path):
|
||||||
os.makedirs(self.data_path, exist_ok=True)
|
os.makedirs(self.data_path, exist_ok=True)
|
||||||
|
|
||||||
if not executable_path:
|
if not executable_path:
|
||||||
self.executable_path = os.path.join(
|
self.executable_path = os.path.join(
|
||||||
self.data_path, "_".join([prefix, self.exe_name])
|
self.data_path, "_".join([prefix, self.exe_name])
|
||||||
)
|
)
|
||||||
|
|
||||||
if not IS_POSIX:
|
if not IS_POSIX:
|
||||||
if executable_path:
|
if executable_path:
|
||||||
if not executable_path[-4:] == ".exe":
|
if not executable_path[-4:] == ".exe":
|
||||||
executable_path += ".exe"
|
executable_path += ".exe"
|
||||||
|
|
||||||
self.zip_path = os.path.join(self.data_path, prefix)
|
self.zip_path = os.path.join(self.data_path, prefix)
|
||||||
|
|
||||||
if not executable_path:
|
if not executable_path:
|
||||||
self.executable_path = os.path.abspath(
|
self.executable_path = os.path.abspath(
|
||||||
os.path.join(".", self.executable_path)
|
os.path.join(".", self.executable_path)
|
||||||
)
|
)
|
||||||
|
|
||||||
self._custom_exe_path = False
|
if executable_path:
|
||||||
|
self._custom_exe_path = True
|
||||||
if executable_path:
|
self.executable_path = executable_path
|
||||||
self._custom_exe_path = True
|
self.version_main = version_main
|
||||||
self.executable_path = executable_path
|
self.version_full = None
|
||||||
self.version_main = version_main
|
|
||||||
self.version_full = None
|
def auto(self, executable_path=None, force=False, version_main=None):
|
||||||
|
if executable_path:
|
||||||
def auto(self, executable_path=None, force=False, version_main=None):
|
self.executable_path = executable_path
|
||||||
""""""
|
self._custom_exe_path = True
|
||||||
if executable_path:
|
|
||||||
self.executable_path = executable_path
|
if self._custom_exe_path:
|
||||||
self._custom_exe_path = True
|
ispatched = self.is_binary_patched(self.executable_path)
|
||||||
|
if not ispatched:
|
||||||
if self._custom_exe_path:
|
return self.patch_exe()
|
||||||
ispatched = self.is_binary_patched(self.executable_path)
|
else:
|
||||||
if not ispatched:
|
return
|
||||||
return self.patch_exe()
|
|
||||||
else:
|
if version_main:
|
||||||
return
|
self.version_main = version_main
|
||||||
|
if force is True:
|
||||||
if version_main:
|
self.force = force
|
||||||
self.version_main = version_main
|
|
||||||
if force is True:
|
try:
|
||||||
self.force = force
|
os.unlink(self.executable_path)
|
||||||
|
except PermissionError:
|
||||||
try:
|
if self.force:
|
||||||
os.unlink(self.executable_path)
|
self.force_kill_instances(self.executable_path)
|
||||||
except PermissionError:
|
return self.auto(force=not self.force)
|
||||||
if self.force:
|
try:
|
||||||
self.force_kill_instances(self.executable_path)
|
if self.is_binary_patched():
|
||||||
return self.auto(force=not self.force)
|
# assumes already running AND patched
|
||||||
try:
|
return True
|
||||||
if self.is_binary_patched():
|
except PermissionError:
|
||||||
# assumes already running AND patched
|
pass
|
||||||
return True
|
# return False
|
||||||
except PermissionError:
|
except FileNotFoundError:
|
||||||
pass
|
pass
|
||||||
# return False
|
|
||||||
except FileNotFoundError:
|
release = self.fetch_release_number()
|
||||||
pass
|
self.version_main = release.version[0]
|
||||||
|
self.version_full = release
|
||||||
release = self.fetch_release_number()
|
self.unzip_package(self.fetch_package())
|
||||||
self.version_main = release.version[0]
|
return self.patch()
|
||||||
self.version_full = release
|
|
||||||
self.unzip_package(self.fetch_package())
|
def patch(self):
|
||||||
return self.patch()
|
self.patch_exe()
|
||||||
|
return self.is_binary_patched()
|
||||||
def patch(self):
|
|
||||||
self.patch_exe()
|
def fetch_release_number(self):
|
||||||
return self.is_binary_patched()
|
"""
|
||||||
|
Gets the latest major version available, or the latest major version of self.target_version if set explicitly.
|
||||||
def fetch_release_number(self):
|
:return: version string
|
||||||
"""
|
:rtype: LooseVersion
|
||||||
Gets the latest major version available, or the latest major version of self.target_version if set explicitly.
|
"""
|
||||||
:return: version string
|
path = "/latest_release"
|
||||||
:rtype: LooseVersion
|
if self.version_main:
|
||||||
"""
|
path += f"_{self.version_main}"
|
||||||
path = "/latest_release"
|
path = path.upper()
|
||||||
if self.version_main:
|
logger.debug("getting release number from %s" % path)
|
||||||
path += f"_{self.version_main}"
|
return LooseVersion(urlopen(self.url_repo + path).read().decode())
|
||||||
path = path.upper()
|
|
||||||
logger.debug("getting release number from %s" % path)
|
def parse_exe_version(self):
|
||||||
return LooseVersion(urlopen(self.url_repo + path).read().decode())
|
with io.open(self.executable_path, "rb") as f:
|
||||||
|
for line in iter(lambda: f.readline(), b""):
|
||||||
def parse_exe_version(self):
|
match = re.search(rb"platform_handle\x00content\x00([0-9.]*)", line)
|
||||||
with io.open(self.executable_path, "rb") as f:
|
if match:
|
||||||
for line in iter(lambda: f.readline(), b""):
|
return LooseVersion(match[1].decode())
|
||||||
match = re.search(rb"platform_handle\x00content\x00([0-9.]*)", line)
|
|
||||||
if match:
|
def fetch_package(self):
|
||||||
return LooseVersion(match[1].decode())
|
"""
|
||||||
|
Downloads ChromeDriver from source
|
||||||
def fetch_package(self):
|
|
||||||
"""
|
:return: path to downloaded file
|
||||||
Downloads ChromeDriver from source
|
"""
|
||||||
|
u = "%s/%s/%s" % (self.url_repo, self.version_full.vstring, self.zip_name)
|
||||||
:return: path to downloaded file
|
logger.debug("downloading from %s" % u)
|
||||||
"""
|
# return urlretrieve(u, filename=self.data_path)[0]
|
||||||
u = "%s/%s/%s" % (self.url_repo, self.version_full.vstring, self.zip_name)
|
return urlretrieve(u)[0]
|
||||||
logger.debug("downloading from %s" % u)
|
|
||||||
# return urlretrieve(u, filename=self.data_path)[0]
|
def unzip_package(self, fp):
|
||||||
return urlretrieve(u)[0]
|
"""
|
||||||
|
Does what it says
|
||||||
def unzip_package(self, fp):
|
|
||||||
"""
|
:return: path to unpacked executable
|
||||||
Does what it says
|
"""
|
||||||
|
logger.debug("unzipping %s" % fp)
|
||||||
:return: path to unpacked executable
|
try:
|
||||||
"""
|
os.unlink(self.zip_path)
|
||||||
logger.debug("unzipping %s" % fp)
|
except (FileNotFoundError, OSError):
|
||||||
try:
|
pass
|
||||||
os.unlink(self.zip_path)
|
|
||||||
except (FileNotFoundError, OSError):
|
os.makedirs(self.zip_path, mode=0o755, exist_ok=True)
|
||||||
pass
|
with zipfile.ZipFile(fp, mode="r") as zf:
|
||||||
|
zf.extract(self.exe_name, self.zip_path)
|
||||||
os.makedirs(self.zip_path, mode=0o755, exist_ok=True)
|
os.rename(os.path.join(self.zip_path, self.exe_name), self.executable_path)
|
||||||
with zipfile.ZipFile(fp, mode="r") as zf:
|
os.remove(fp)
|
||||||
zf.extract(self.exe_name, self.zip_path)
|
os.rmdir(self.zip_path)
|
||||||
os.rename(os.path.join(self.zip_path, self.exe_name), self.executable_path)
|
os.chmod(self.executable_path, 0o755)
|
||||||
os.remove(fp)
|
return self.executable_path
|
||||||
os.rmdir(self.zip_path)
|
|
||||||
os.chmod(self.executable_path, 0o755)
|
@staticmethod
|
||||||
return self.executable_path
|
def force_kill_instances(exe_name):
|
||||||
|
"""
|
||||||
@staticmethod
|
kills running instances.
|
||||||
def force_kill_instances(exe_name):
|
:param: executable name to kill, may be a path as well
|
||||||
"""
|
|
||||||
kills running instances.
|
:return: True on success else False
|
||||||
:param: executable name to kill, may be a path as well
|
"""
|
||||||
|
exe_name = os.path.basename(exe_name)
|
||||||
:return: True on success else False
|
if IS_POSIX:
|
||||||
"""
|
r = os.system("kill -f -9 $(pidof %s)" % exe_name)
|
||||||
exe_name = os.path.basename(exe_name)
|
else:
|
||||||
if IS_POSIX:
|
r = os.system("taskkill /f /im %s" % exe_name)
|
||||||
r = os.system("kill -f -9 $(pidof %s)" % exe_name)
|
return not r
|
||||||
else:
|
|
||||||
r = os.system("taskkill /f /im %s" % exe_name)
|
@staticmethod
|
||||||
return not r
|
def gen_random_cdc():
|
||||||
|
cdc = random.choices(string.ascii_letters, k=27)
|
||||||
@staticmethod
|
return "".join(cdc).encode()
|
||||||
def gen_random_cdc():
|
|
||||||
cdc = random.choices(string.ascii_lowercase, k=26)
|
def is_binary_patched(self, executable_path=None):
|
||||||
cdc[-6:-4] = map(str.upper, cdc[-6:-4])
|
executable_path = executable_path or self.executable_path
|
||||||
cdc[2] = cdc[0]
|
try:
|
||||||
cdc[3] = "_"
|
with io.open(executable_path, "rb") as fh:
|
||||||
return "".join(cdc).encode()
|
return fh.read().find(b"undetected chromedriver") != -1
|
||||||
|
except FileNotFoundError:
|
||||||
def is_binary_patched(self, executable_path=None):
|
return False
|
||||||
"""simple check if executable is patched.
|
|
||||||
|
def patch_exe(self):
|
||||||
:return: False if not patched, else True
|
start = time.perf_counter()
|
||||||
"""
|
logger.info("patching driver executable %s" % self.executable_path)
|
||||||
executable_path = executable_path or self.executable_path
|
with io.open(self.executable_path, "r+b") as fh:
|
||||||
with io.open(executable_path, "rb") as fh:
|
content = fh.read()
|
||||||
for line in iter(lambda: fh.readline(), b""):
|
# match_injected_codeblock = re.search(rb"{window.*;}", content)
|
||||||
if b"cdc_" in line:
|
match_injected_codeblock = re.search(rb"\{window\.cdc.*?;\}", content)
|
||||||
return False
|
if match_injected_codeblock:
|
||||||
else:
|
target_bytes = match_injected_codeblock[0]
|
||||||
return True
|
new_target_bytes = (
|
||||||
|
b'{console.log("undetected chromedriver 1337!")}'.ljust(
|
||||||
def patch_exe(self):
|
len(target_bytes), b" "
|
||||||
"""
|
)
|
||||||
Patches the ChromeDriver binary
|
)
|
||||||
|
new_content = content.replace(target_bytes, new_target_bytes)
|
||||||
:return: False on failure, binary name on success
|
if new_content == content:
|
||||||
"""
|
logger.warning(
|
||||||
logger.info("patching driver executable %s" % self.executable_path)
|
"something went wrong patching the driver binary. could not find injection code block"
|
||||||
|
)
|
||||||
linect = 0
|
else:
|
||||||
replacement = self.gen_random_cdc()
|
logger.debug(
|
||||||
with io.open(self.executable_path, "r+b") as fh:
|
"found block:\n%s\nreplacing with:\n%s"
|
||||||
for line in iter(lambda: fh.readline(), b""):
|
% (target_bytes, new_target_bytes)
|
||||||
if b"cdc_" in line:
|
)
|
||||||
fh.seek(-len(line), 1)
|
fh.seek(0)
|
||||||
newline = re.sub(b"cdc_.{22}", replacement, line)
|
fh.write(new_content)
|
||||||
fh.write(newline)
|
logger.debug(
|
||||||
linect += 1
|
"patching took us {:.2f} seconds".format(time.perf_counter() - start)
|
||||||
return linect
|
)
|
||||||
|
|
||||||
def __repr__(self):
|
def __repr__(self):
|
||||||
return "{0:s}({1:s})".format(
|
return "{0:s}({1:s})".format(
|
||||||
self.__class__.__name__,
|
self.__class__.__name__,
|
||||||
self.executable_path,
|
self.executable_path,
|
||||||
)
|
)
|
||||||
|
|
||||||
def __del__(self):
|
def __del__(self):
|
||||||
|
if self._custom_exe_path:
|
||||||
if self._custom_exe_path:
|
# if the driver binary is specified by user
|
||||||
# if the driver binary is specified by user
|
# we assume it is important enough to not delete it
|
||||||
# we assume it is important enough to not delete it
|
return
|
||||||
return
|
else:
|
||||||
else:
|
timeout = 3 # stop trying after this many seconds
|
||||||
timeout = 3 # stop trying after this many seconds
|
t = time.monotonic()
|
||||||
t = time.monotonic()
|
while True:
|
||||||
while True:
|
now = time.monotonic()
|
||||||
now = time.monotonic()
|
if now - t > timeout:
|
||||||
if now - t > timeout:
|
# we don't want to wait until the end of time
|
||||||
# we don't want to wait until the end of time
|
logger.debug(
|
||||||
logger.debug(
|
"could not unlink %s in time (%d seconds)"
|
||||||
"could not unlink %s in time (%d seconds)"
|
% (self.executable_path, timeout)
|
||||||
% (self.executable_path, timeout)
|
)
|
||||||
)
|
break
|
||||||
break
|
try:
|
||||||
try:
|
os.unlink(self.executable_path)
|
||||||
os.unlink(self.executable_path)
|
logger.debug("successfully unlinked %s" % self.executable_path)
|
||||||
logger.debug("successfully unlinked %s" % self.executable_path)
|
break
|
||||||
break
|
except (OSError, RuntimeError, PermissionError):
|
||||||
except (OSError, RuntimeError, PermissionError):
|
time.sleep(0.1)
|
||||||
time.sleep(0.1)
|
continue
|
||||||
continue
|
except FileNotFoundError:
|
||||||
except FileNotFoundError:
|
break
|
||||||
break
|
|
||||||
|
|||||||
@@ -1,102 +1,99 @@
|
|||||||
#!/usr/bin/env python3
|
#!/usr/bin/env python3
|
||||||
# this module is part of undetected_chromedriver
|
# this module is part of undetected_chromedriver
|
||||||
|
|
||||||
import asyncio
|
import asyncio
|
||||||
import json
|
import json
|
||||||
import logging
|
import logging
|
||||||
import threading
|
import threading
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
class Reactor(threading.Thread):
|
|
||||||
def __init__(self, driver: "Chrome"):
|
class Reactor(threading.Thread):
|
||||||
super().__init__()
|
def __init__(self, driver: "Chrome"):
|
||||||
|
super().__init__()
|
||||||
self.driver = driver
|
|
||||||
self.loop = asyncio.new_event_loop()
|
self.driver = driver
|
||||||
|
self.loop = asyncio.new_event_loop()
|
||||||
self.lock = threading.Lock()
|
|
||||||
self.event = threading.Event()
|
self.lock = threading.Lock()
|
||||||
self.daemon = True
|
self.event = threading.Event()
|
||||||
self.handlers = {}
|
self.daemon = True
|
||||||
|
self.handlers = {}
|
||||||
def add_event_handler(self, method_name, callback: callable):
|
|
||||||
"""
|
def add_event_handler(self, method_name, callback: callable):
|
||||||
|
"""
|
||||||
Parameters
|
|
||||||
----------
|
Parameters
|
||||||
event_name: str
|
----------
|
||||||
example "Network.responseReceived"
|
event_name: str
|
||||||
|
example "Network.responseReceived"
|
||||||
callback: callable
|
|
||||||
callable which accepts 1 parameter: the message object dictionary
|
callback: callable
|
||||||
|
callable which accepts 1 parameter: the message object dictionary
|
||||||
Returns
|
|
||||||
-------
|
Returns
|
||||||
|
-------
|
||||||
"""
|
|
||||||
with self.lock:
|
"""
|
||||||
self.handlers[method_name.lower()] = callback
|
with self.lock:
|
||||||
|
self.handlers[method_name.lower()] = callback
|
||||||
@property
|
|
||||||
def running(self):
|
@property
|
||||||
return not self.event.is_set()
|
def running(self):
|
||||||
|
return not self.event.is_set()
|
||||||
def run(self):
|
|
||||||
try:
|
def run(self):
|
||||||
asyncio.set_event_loop(self.loop)
|
try:
|
||||||
self.loop.run_until_complete(self.listen())
|
asyncio.set_event_loop(self.loop)
|
||||||
except Exception as e:
|
self.loop.run_until_complete(self.listen())
|
||||||
logger.warning("Reactor.run() => %s", e)
|
except Exception as e:
|
||||||
|
logger.warning("Reactor.run() => %s", e)
|
||||||
async def _wait_service_started(self):
|
|
||||||
while True:
|
async def _wait_service_started(self):
|
||||||
with self.lock:
|
while True:
|
||||||
if (
|
with self.lock:
|
||||||
getattr(self.driver, "service", None)
|
if (
|
||||||
and getattr(self.driver.service, "process", None)
|
getattr(self.driver, "service", None)
|
||||||
and self.driver.service.process.poll()
|
and getattr(self.driver.service, "process", None)
|
||||||
):
|
and self.driver.service.process.poll()
|
||||||
await asyncio.sleep(self.driver._delay or 0.25)
|
):
|
||||||
else:
|
await asyncio.sleep(self.driver._delay or 0.25)
|
||||||
break
|
else:
|
||||||
|
break
|
||||||
async def listen(self):
|
|
||||||
|
async def listen(self):
|
||||||
while self.running:
|
while self.running:
|
||||||
|
await self._wait_service_started()
|
||||||
await self._wait_service_started()
|
await asyncio.sleep(1)
|
||||||
await asyncio.sleep(1)
|
|
||||||
|
try:
|
||||||
try:
|
with self.lock:
|
||||||
with self.lock:
|
log_entries = self.driver.get_log("performance")
|
||||||
log_entries = self.driver.get_log("performance")
|
|
||||||
|
for entry in log_entries:
|
||||||
for entry in log_entries:
|
try:
|
||||||
|
obj_serialized: str = entry.get("message")
|
||||||
try:
|
obj = json.loads(obj_serialized)
|
||||||
|
message = obj.get("message")
|
||||||
obj_serialized: str = entry.get("message")
|
method = message.get("method")
|
||||||
obj = json.loads(obj_serialized)
|
|
||||||
message = obj.get("message")
|
if "*" in self.handlers:
|
||||||
method = message.get("method")
|
await self.loop.run_in_executor(
|
||||||
|
None, self.handlers["*"], message
|
||||||
if "*" in self.handlers:
|
)
|
||||||
await self.loop.run_in_executor(
|
elif method.lower() in self.handlers:
|
||||||
None, self.handlers["*"], message
|
await self.loop.run_in_executor(
|
||||||
)
|
None, self.handlers[method.lower()], message
|
||||||
elif method.lower() in self.handlers:
|
)
|
||||||
await self.loop.run_in_executor(
|
|
||||||
None, self.handlers[method.lower()], message
|
# print(type(message), message)
|
||||||
)
|
except Exception as e:
|
||||||
|
raise e from None
|
||||||
# print(type(message), message)
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
raise e from None
|
if "invalid session id" in str(e):
|
||||||
|
pass
|
||||||
except Exception as e:
|
else:
|
||||||
if "invalid session id" in str(e):
|
logging.debug("exception ignored :", e)
|
||||||
pass
|
|
||||||
else:
|
|
||||||
logging.debug("exception ignored :", e)
|
|
||||||
|
|||||||
@@ -1,4 +0,0 @@
|
|||||||
# for backward compatibility
|
|
||||||
import sys
|
|
||||||
|
|
||||||
sys.modules[__name__] = sys.modules[__package__]
|
|
||||||
@@ -1,37 +1,86 @@
|
|||||||
import selenium.webdriver.remote.webelement
|
from typing import List
|
||||||
|
|
||||||
|
from selenium.webdriver.common.by import By
|
||||||
class WebElement(selenium.webdriver.remote.webelement.WebElement):
|
import selenium.webdriver.remote.webelement
|
||||||
"""
|
|
||||||
Custom WebElement class which makes it easier to view elements when
|
|
||||||
working in an interactive environment.
|
class WebElement(selenium.webdriver.remote.webelement.WebElement):
|
||||||
|
def click_safe(self):
|
||||||
standard webelement repr:
|
super().click()
|
||||||
<selenium.webdriver.remote.webelement.WebElement (session="85ff0f671512fa535630e71ee951b1f2", element="6357cb55-92c3-4c0f-9416-b174f9c1b8c4")>
|
self._parent.reconnect(0.1)
|
||||||
|
|
||||||
using this WebElement class:
|
def children(
|
||||||
<WebElement(<a class="mobile-show-inline-block mc-update-infos init-ok" href="#" id="main-cat-switcher-mobile">)>
|
self, tag=None, recursive=False
|
||||||
|
) -> List[selenium.webdriver.remote.webelement.WebElement]:
|
||||||
"""
|
"""
|
||||||
|
returns direct child elements of current element
|
||||||
@property
|
:param tag: str, if supplied, returns <tag> nodes only
|
||||||
def attrs(self):
|
"""
|
||||||
if not hasattr(self, "_attrs"):
|
script = "return [... arguments[0].children]"
|
||||||
self._attrs = self._parent.execute_script(
|
if tag:
|
||||||
"""
|
script += ".filter( node => node.tagName === '%s')" % tag.upper()
|
||||||
var items = {};
|
if recursive:
|
||||||
for (index = 0; index < arguments[0].attributes.length; ++index)
|
return list(_recursive_children(self, tag))
|
||||||
{
|
return list(self._parent.execute_script(script, self))
|
||||||
items[arguments[0].attributes[index].name] = arguments[0].attributes[index].value
|
|
||||||
};
|
|
||||||
return items;
|
class UCWebElement(WebElement):
|
||||||
""",
|
"""
|
||||||
self,
|
Custom WebElement class which makes it easier to view elements when
|
||||||
)
|
working in an interactive environment.
|
||||||
return self._attrs
|
|
||||||
|
standard webelement repr:
|
||||||
def __repr__(self):
|
<selenium.webdriver.remote.webelement.WebElement (session="85ff0f671512fa535630e71ee951b1f2", element="6357cb55-92c3-4c0f-9416-b174f9c1b8c4")>
|
||||||
strattrs = " ".join([f'{k}="{v}"' for k, v in self.attrs.items()])
|
|
||||||
if strattrs:
|
using this WebElement class:
|
||||||
strattrs = " " + strattrs
|
<WebElement(<a class="mobile-show-inline-block mc-update-infos init-ok" href="#" id="main-cat-switcher-mobile">)>
|
||||||
return f"{self.__class__.__name__} <{self.tag_name}{strattrs}>"
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, parent, id_):
|
||||||
|
super().__init__(parent, id_)
|
||||||
|
self._attrs = None
|
||||||
|
|
||||||
|
@property
|
||||||
|
def attrs(self):
|
||||||
|
if not self._attrs:
|
||||||
|
self._attrs = self._parent.execute_script(
|
||||||
|
"""
|
||||||
|
var items = {};
|
||||||
|
for (index = 0; index < arguments[0].attributes.length; ++index)
|
||||||
|
{
|
||||||
|
items[arguments[0].attributes[index].name] = arguments[0].attributes[index].value
|
||||||
|
};
|
||||||
|
return items;
|
||||||
|
""",
|
||||||
|
self,
|
||||||
|
)
|
||||||
|
return self._attrs
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
strattrs = " ".join([f'{k}="{v}"' for k, v in self.attrs.items()])
|
||||||
|
if strattrs:
|
||||||
|
strattrs = " " + strattrs
|
||||||
|
return f"{self.__class__.__name__} <{self.tag_name}{strattrs}>"
|
||||||
|
|
||||||
|
|
||||||
|
def _recursive_children(element, tag: str = None, _results=None):
|
||||||
|
"""
|
||||||
|
returns all children of <element> recursively
|
||||||
|
|
||||||
|
:param element: `WebElement` object.
|
||||||
|
find children below this <element>
|
||||||
|
|
||||||
|
:param tag: str = None.
|
||||||
|
if provided, return only <tag> elements. example: 'a', or 'img'
|
||||||
|
:param _results: do not use!
|
||||||
|
"""
|
||||||
|
results = _results or set()
|
||||||
|
for element in element.children():
|
||||||
|
if tag:
|
||||||
|
if element.tag_name == tag:
|
||||||
|
results.add(element)
|
||||||
|
else:
|
||||||
|
results.add(element)
|
||||||
|
results |= _recursive_children(element, tag, results)
|
||||||
|
return results
|
||||||
|
|||||||
111
src/utils.py
111
src/utils.py
@@ -8,6 +8,7 @@ from selenium.webdriver.chrome.webdriver import WebDriver
|
|||||||
import undetected_chromedriver as uc
|
import undetected_chromedriver as uc
|
||||||
|
|
||||||
FLARESOLVERR_VERSION = None
|
FLARESOLVERR_VERSION = None
|
||||||
|
CHROME_EXE_PATH = None
|
||||||
CHROME_MAJOR_VERSION = None
|
CHROME_MAJOR_VERSION = None
|
||||||
USER_AGENT = None
|
USER_AGENT = None
|
||||||
XVFB_DISPLAY = None
|
XVFB_DISPLAY = None
|
||||||
@@ -28,12 +29,14 @@ def get_flaresolverr_version() -> str:
|
|||||||
return FLARESOLVERR_VERSION
|
return FLARESOLVERR_VERSION
|
||||||
|
|
||||||
package_path = os.path.join(os.path.dirname(os.path.abspath(__file__)), os.pardir, 'package.json')
|
package_path = os.path.join(os.path.dirname(os.path.abspath(__file__)), os.pardir, 'package.json')
|
||||||
|
if not os.path.isfile(package_path):
|
||||||
|
package_path = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'package.json')
|
||||||
with open(package_path) as f:
|
with open(package_path) as f:
|
||||||
FLARESOLVERR_VERSION = json.loads(f.read())['version']
|
FLARESOLVERR_VERSION = json.loads(f.read())['version']
|
||||||
return FLARESOLVERR_VERSION
|
return FLARESOLVERR_VERSION
|
||||||
|
|
||||||
|
|
||||||
def get_webdriver() -> WebDriver:
|
def get_webdriver(proxy: dict = None) -> WebDriver:
|
||||||
global PATCHED_DRIVER_PATH
|
global PATCHED_DRIVER_PATH
|
||||||
logging.debug('Launching web browser...')
|
logging.debug('Launching web browser...')
|
||||||
|
|
||||||
@@ -44,6 +47,27 @@ def get_webdriver() -> WebDriver:
|
|||||||
# todo: this param shows a warning in chrome head-full
|
# todo: this param shows a warning in chrome head-full
|
||||||
options.add_argument('--disable-setuid-sandbox')
|
options.add_argument('--disable-setuid-sandbox')
|
||||||
options.add_argument('--disable-dev-shm-usage')
|
options.add_argument('--disable-dev-shm-usage')
|
||||||
|
# this option removes the zygote sandbox (it seems that the resolution is a bit faster)
|
||||||
|
options.add_argument('--no-zygote')
|
||||||
|
# attempt to fix Docker ARM32 build
|
||||||
|
options.add_argument('--disable-gpu-sandbox')
|
||||||
|
options.add_argument('--disable-software-rasterizer')
|
||||||
|
options.add_argument('--ignore-certificate-errors')
|
||||||
|
options.add_argument('--ignore-ssl-errors')
|
||||||
|
# fix GL errors in ASUSTOR NAS
|
||||||
|
# https://github.com/FlareSolverr/FlareSolverr/issues/782
|
||||||
|
# https://github.com/microsoft/vscode/issues/127800#issuecomment-873342069
|
||||||
|
# https://peter.sh/experiments/chromium-command-line-switches/#use-gl
|
||||||
|
options.add_argument('--use-gl=swiftshader')
|
||||||
|
# workaround for updated 'verify your are human' check
|
||||||
|
# https://github.com/FlareSolverr/FlareSolverr/issues/811
|
||||||
|
options.add_argument('--auto-open-devtools-for-tabs')
|
||||||
|
options.add_argument('--headless=true')
|
||||||
|
|
||||||
|
if proxy and 'url' in proxy:
|
||||||
|
proxy_url = proxy['url']
|
||||||
|
logging.debug("Using webdriver proxy: %s", proxy_url)
|
||||||
|
options.add_argument('--proxy-server=%s' % proxy_url)
|
||||||
|
|
||||||
# note: headless mode is detected (options.headless = True)
|
# note: headless mode is detected (options.headless = True)
|
||||||
# we launch the browser in head-full mode with the window hidden
|
# we launch the browser in head-full mode with the window hidden
|
||||||
@@ -65,15 +89,26 @@ def get_webdriver() -> WebDriver:
|
|||||||
if PATCHED_DRIVER_PATH is not None:
|
if PATCHED_DRIVER_PATH is not None:
|
||||||
driver_exe_path = PATCHED_DRIVER_PATH
|
driver_exe_path = PATCHED_DRIVER_PATH
|
||||||
|
|
||||||
|
# detect chrome path
|
||||||
|
browser_executable_path = get_chrome_exe_path()
|
||||||
|
|
||||||
# downloads and patches the chromedriver
|
# downloads and patches the chromedriver
|
||||||
# if we don't set driver_executable_path it downloads, patches, and deletes the driver each time
|
# if we don't set driver_executable_path it downloads, patches, and deletes the driver each time
|
||||||
driver = uc.Chrome(options=options, driver_executable_path=driver_exe_path, version_main=version_main,
|
driver = uc.Chrome(options=options, browser_executable_path=browser_executable_path,
|
||||||
|
driver_executable_path=driver_exe_path, version_main=version_main,
|
||||||
windows_headless=windows_headless)
|
windows_headless=windows_headless)
|
||||||
|
|
||||||
# save the patched driver to avoid re-downloads
|
# save the patched driver to avoid re-downloads
|
||||||
if driver_exe_path is None:
|
if driver_exe_path is None:
|
||||||
PATCHED_DRIVER_PATH = os.path.join(driver.patcher.data_path, driver.patcher.exe_name)
|
PATCHED_DRIVER_PATH = os.path.join(driver.patcher.data_path, driver.patcher.exe_name)
|
||||||
shutil.copy(driver.patcher.executable_path, PATCHED_DRIVER_PATH)
|
shutil.copy(driver.patcher.executable_path, PATCHED_DRIVER_PATH)
|
||||||
|
|
||||||
|
# workaround for updated 'verify your are human' check
|
||||||
|
# https://github.com/FlareSolverr/FlareSolverr/issues/811
|
||||||
|
driver.execute_script('''window.open("","_blank");''')
|
||||||
|
driver.switch_to.window(window_name=driver.window_handles[0])
|
||||||
|
driver.close()
|
||||||
|
driver.switch_to.window(window_name=driver.window_handles[0])
|
||||||
|
|
||||||
# selenium vanilla
|
# selenium vanilla
|
||||||
# options = webdriver.ChromeOptions()
|
# options = webdriver.ChromeOptions()
|
||||||
@@ -86,23 +121,45 @@ def get_webdriver() -> WebDriver:
|
|||||||
return driver
|
return driver
|
||||||
|
|
||||||
|
|
||||||
|
def get_chrome_exe_path() -> str:
|
||||||
|
global CHROME_EXE_PATH
|
||||||
|
if CHROME_EXE_PATH is not None:
|
||||||
|
return CHROME_EXE_PATH
|
||||||
|
# linux pyinstaller bundle
|
||||||
|
chrome_path = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'chrome', "chrome")
|
||||||
|
if os.path.exists(chrome_path):
|
||||||
|
if not os.access(chrome_path, os.X_OK):
|
||||||
|
raise Exception(f'Chrome binary "{chrome_path}" is not executable. '
|
||||||
|
f'Please, extract the archive with "tar xzf <file.tar.gz>".')
|
||||||
|
CHROME_EXE_PATH = chrome_path
|
||||||
|
return CHROME_EXE_PATH
|
||||||
|
# windows pyinstaller bundle
|
||||||
|
chrome_path = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'chrome', "chrome.exe")
|
||||||
|
if os.path.exists(chrome_path):
|
||||||
|
CHROME_EXE_PATH = chrome_path
|
||||||
|
return CHROME_EXE_PATH
|
||||||
|
# system
|
||||||
|
CHROME_EXE_PATH = uc.find_chrome_executable()
|
||||||
|
return CHROME_EXE_PATH
|
||||||
|
|
||||||
|
|
||||||
def get_chrome_major_version() -> str:
|
def get_chrome_major_version() -> str:
|
||||||
global CHROME_MAJOR_VERSION
|
global CHROME_MAJOR_VERSION
|
||||||
if CHROME_MAJOR_VERSION is not None:
|
if CHROME_MAJOR_VERSION is not None:
|
||||||
return CHROME_MAJOR_VERSION
|
return CHROME_MAJOR_VERSION
|
||||||
|
|
||||||
if os.name == 'nt':
|
if os.name == 'nt':
|
||||||
|
# Example: '104.0.5112.79'
|
||||||
try:
|
try:
|
||||||
stream = os.popen(
|
complete_version = extract_version_nt_executable(get_chrome_exe_path())
|
||||||
'reg query "HKLM\\SOFTWARE\\Wow6432Node\\Microsoft\\Windows\\CurrentVersion\\Uninstall\\Google Chrome"')
|
|
||||||
output = stream.read()
|
|
||||||
# Example: '104.0.5112.79'
|
|
||||||
complete_version = extract_version_registry(output)
|
|
||||||
except Exception:
|
except Exception:
|
||||||
# Example: '104.0.5112.79'
|
try:
|
||||||
complete_version = extract_version_folder()
|
complete_version = extract_version_nt_registry()
|
||||||
|
except Exception:
|
||||||
|
# Example: '104.0.5112.79'
|
||||||
|
complete_version = extract_version_nt_folder()
|
||||||
else:
|
else:
|
||||||
chrome_path = uc.find_chrome_executable()
|
chrome_path = get_chrome_exe_path()
|
||||||
process = os.popen(f'"{chrome_path}" --version')
|
process = os.popen(f'"{chrome_path}" --version')
|
||||||
# Example 1: 'Chromium 104.0.5112.79 Arch Linux\n'
|
# Example 1: 'Chromium 104.0.5112.79 Arch Linux\n'
|
||||||
# Example 2: 'Google Chrome 104.0.5112.79 Arch Linux\n'
|
# Example 2: 'Google Chrome 104.0.5112.79 Arch Linux\n'
|
||||||
@@ -110,24 +167,32 @@ def get_chrome_major_version() -> str:
|
|||||||
process.close()
|
process.close()
|
||||||
|
|
||||||
CHROME_MAJOR_VERSION = complete_version.split('.')[0].split(' ')[-1]
|
CHROME_MAJOR_VERSION = complete_version.split('.')[0].split(' ')[-1]
|
||||||
logging.info(f"Chrome major version: {CHROME_MAJOR_VERSION}")
|
|
||||||
return CHROME_MAJOR_VERSION
|
return CHROME_MAJOR_VERSION
|
||||||
|
|
||||||
|
|
||||||
def extract_version_registry(output) -> str:
|
def extract_version_nt_executable(exe_path: str) -> str:
|
||||||
try:
|
import pefile
|
||||||
google_version = ''
|
pe = pefile.PE(exe_path, fast_load=True)
|
||||||
for letter in output[output.rindex('DisplayVersion REG_SZ') + 24:]:
|
pe.parse_data_directories(
|
||||||
if letter != '\n':
|
directories=[pefile.DIRECTORY_ENTRY["IMAGE_DIRECTORY_ENTRY_RESOURCE"]]
|
||||||
google_version += letter
|
)
|
||||||
else:
|
return pe.FileInfo[0][0].StringTable[0].entries[b"FileVersion"].decode('utf-8')
|
||||||
break
|
|
||||||
return google_version.strip()
|
|
||||||
except TypeError:
|
|
||||||
return ''
|
|
||||||
|
|
||||||
|
|
||||||
def extract_version_folder() -> str:
|
def extract_version_nt_registry() -> str:
|
||||||
|
stream = os.popen(
|
||||||
|
'reg query "HKLM\\SOFTWARE\\Wow6432Node\\Microsoft\\Windows\\CurrentVersion\\Uninstall\\Google Chrome"')
|
||||||
|
output = stream.read()
|
||||||
|
google_version = ''
|
||||||
|
for letter in output[output.rindex('DisplayVersion REG_SZ') + 24:]:
|
||||||
|
if letter != '\n':
|
||||||
|
google_version += letter
|
||||||
|
else:
|
||||||
|
break
|
||||||
|
return google_version.strip()
|
||||||
|
|
||||||
|
|
||||||
|
def extract_version_nt_folder() -> str:
|
||||||
# Check if the Chrome folder exists in the x32 or x64 Program Files folders.
|
# Check if the Chrome folder exists in the x32 or x64 Program Files folders.
|
||||||
for i in range(2):
|
for i in range(2):
|
||||||
path = 'C:\\Program Files' + (' (x86)' if i else '') + '\\Google\\Chrome\\Application'
|
path = 'C:\\Program Files' + (' (x86)' if i else '') + '\\Google\\Chrome\\Application'
|
||||||
|
|||||||
Reference in New Issue
Block a user