[fix] pexels: circumvent botdetection by passing referer header
As a side effect, Cloudscraper is no longer needed. It probably only ever worked by setting the correct request headers, so we don't really need it since we can just set the right request headers and ciphersuites ourselves.
This commit is contained in:
@@ -17,4 +17,3 @@ typer==0.24.1
|
||||
isodate==0.7.2
|
||||
whitenoise==6.12.0
|
||||
typing-extensions==4.15.0
|
||||
cloudscraper==1.2.71
|
||||
|
||||
@@ -6,10 +6,8 @@ import re
|
||||
from urllib.parse import urlencode
|
||||
from lxml import html
|
||||
|
||||
import cloudscraper
|
||||
|
||||
from searx.result_types import EngineResults
|
||||
from searx.utils import eval_xpath_list
|
||||
from searx.utils import eval_xpath_list, gen_useragent
|
||||
from searx.enginelib import EngineCache
|
||||
from searx.exceptions import SearxEngineAPIException
|
||||
from searx.network import get
|
||||
@@ -40,6 +38,8 @@ SECRET_KEY_DB_KEY = "secret-key"
|
||||
CACHE: EngineCache
|
||||
"""Cache to store the secret API key for the engine."""
|
||||
|
||||
enable_http2 = False
|
||||
|
||||
|
||||
def init(engine_settings):
|
||||
global CACHE # pylint: disable=global-statement
|
||||
@@ -47,8 +47,15 @@ def init(engine_settings):
|
||||
|
||||
|
||||
def _get_secret_key():
|
||||
scraper = cloudscraper.create_scraper()
|
||||
resp = scraper.get(base_url)
|
||||
resp = get(
|
||||
base_url,
|
||||
headers={
|
||||
# circumvents Cloudflare bot protections
|
||||
"User-Agent": gen_useragent(),
|
||||
"Referer": base_url,
|
||||
},
|
||||
)
|
||||
|
||||
if resp.status_code != 200:
|
||||
raise SearxEngineAPIException("failed to obtain secret key")
|
||||
|
||||
|
||||
Reference in New Issue
Block a user