summaryrefslogtreecommitdiffstats
path: root/yt_dlp/networking
diff options
context:
space:
mode:
authorDaniel Baumann <daniel.baumann@progress-linux.org>2024-04-17 07:54:10 +0000
committerDaniel Baumann <daniel.baumann@progress-linux.org>2024-04-17 07:54:10 +0000
commit51b156e61c2761f69bb4ca39e115077705083406 (patch)
tree25bcc4265216ca429a2275c577fc631098a181c0 /yt_dlp/networking
parentReleasing progress-linux version 2024.03.10-1~progress7.99u1. (diff)
downloadyt-dlp-51b156e61c2761f69bb4ca39e115077705083406.tar.xz
yt-dlp-51b156e61c2761f69bb4ca39e115077705083406.zip
Merging upstream version 2024.04.09.
Signed-off-by: Daniel Baumann <daniel.baumann@progress-linux.org>
Diffstat (limited to 'yt_dlp/networking')
-rw-r--r--yt_dlp/networking/__init__.py7
-rw-r--r--yt_dlp/networking/_curlcffi.py221
-rw-r--r--yt_dlp/networking/_helper.py4
-rw-r--r--yt_dlp/networking/_requests.py7
-rw-r--r--yt_dlp/networking/_urllib.py6
-rw-r--r--yt_dlp/networking/_websockets.py38
-rw-r--r--yt_dlp/networking/common.py14
-rw-r--r--yt_dlp/networking/impersonate.py141
8 files changed, 417 insertions, 21 deletions
diff --git a/yt_dlp/networking/__init__.py b/yt_dlp/networking/__init__.py
index acadc01..356712c 100644
--- a/yt_dlp/networking/__init__.py
+++ b/yt_dlp/networking/__init__.py
@@ -28,3 +28,10 @@ except ImportError:
pass
except Exception as e:
warnings.warn(f'Failed to import "websockets" request handler: {e}' + bug_reports_message())
+
+try:
+ from . import _curlcffi # noqa: F401
+except ImportError:
+ pass
+except Exception as e:
+ warnings.warn(f'Failed to import "curl_cffi" request handler: {e}' + bug_reports_message())
diff --git a/yt_dlp/networking/_curlcffi.py b/yt_dlp/networking/_curlcffi.py
new file mode 100644
index 0000000..39d1f70
--- /dev/null
+++ b/yt_dlp/networking/_curlcffi.py
@@ -0,0 +1,221 @@
+from __future__ import annotations
+
+import io
+import math
+import urllib.parse
+
+from ._helper import InstanceStoreMixin, select_proxy
+from .common import (
+ Features,
+ Request,
+ Response,
+ register_preference,
+ register_rh,
+)
+from .exceptions import (
+ CertificateVerifyError,
+ HTTPError,
+ IncompleteRead,
+ ProxyError,
+ SSLError,
+ TransportError,
+)
+from .impersonate import ImpersonateRequestHandler, ImpersonateTarget
+from ..dependencies import curl_cffi
+from ..utils import int_or_none
+
+if curl_cffi is None:
+ raise ImportError('curl_cffi is not installed')
+
+curl_cffi_version = tuple(int_or_none(x, default=0) for x in curl_cffi.__version__.split('.'))
+
+if curl_cffi_version != (0, 5, 10):
+ curl_cffi._yt_dlp__version = f'{curl_cffi.__version__} (unsupported)'
+ raise ImportError('Only curl_cffi 0.5.10 is supported')
+
+import curl_cffi.requests
+from curl_cffi.const import CurlECode, CurlOpt
+
+
+class CurlCFFIResponseReader(io.IOBase):
+ def __init__(self, response: curl_cffi.requests.Response):
+ self._response = response
+ self._iterator = response.iter_content()
+ self._buffer = b''
+ self.bytes_read = 0
+
+ def readable(self):
+ return True
+
+ def read(self, size=None):
+ exception_raised = True
+ try:
+ while self._iterator and (size is None or len(self._buffer) < size):
+ chunk = next(self._iterator, None)
+ if chunk is None:
+ self._iterator = None
+ break
+ self._buffer += chunk
+ self.bytes_read += len(chunk)
+
+ if size is None:
+ size = len(self._buffer)
+ data = self._buffer[:size]
+ self._buffer = self._buffer[size:]
+
+ # "free" the curl instance if the response is fully read.
+ # curl_cffi doesn't do this automatically and only allows one open response per thread
+ if not self._iterator and not self._buffer:
+ self.close()
+ exception_raised = False
+ return data
+ finally:
+ if exception_raised:
+ self.close()
+
+ def close(self):
+ if not self.closed:
+ self._response.close()
+ self._buffer = b''
+ super().close()
+
+
+class CurlCFFIResponseAdapter(Response):
+ fp: CurlCFFIResponseReader
+
+ def __init__(self, response: curl_cffi.requests.Response):
+ super().__init__(
+ fp=CurlCFFIResponseReader(response),
+ headers=response.headers,
+ url=response.url,
+ status=response.status_code)
+
+ def read(self, amt=None):
+ try:
+ return self.fp.read(amt)
+ except curl_cffi.requests.errors.RequestsError as e:
+ if e.code == CurlECode.PARTIAL_FILE:
+ content_length = int_or_none(e.response.headers.get('Content-Length'))
+ raise IncompleteRead(
+ partial=self.fp.bytes_read,
+ expected=content_length - self.fp.bytes_read if content_length is not None else None,
+ cause=e) from e
+ raise TransportError(cause=e) from e
+
+
+@register_rh
+class CurlCFFIRH(ImpersonateRequestHandler, InstanceStoreMixin):
+ RH_NAME = 'curl_cffi'
+ _SUPPORTED_URL_SCHEMES = ('http', 'https')
+ _SUPPORTED_FEATURES = (Features.NO_PROXY, Features.ALL_PROXY)
+ _SUPPORTED_PROXY_SCHEMES = ('http', 'https', 'socks4', 'socks4a', 'socks5', 'socks5h')
+ _SUPPORTED_IMPERSONATE_TARGET_MAP = {
+ ImpersonateTarget('chrome', '110', 'windows', '10'): curl_cffi.requests.BrowserType.chrome110,
+ ImpersonateTarget('chrome', '107', 'windows', '10'): curl_cffi.requests.BrowserType.chrome107,
+ ImpersonateTarget('chrome', '104', 'windows', '10'): curl_cffi.requests.BrowserType.chrome104,
+ ImpersonateTarget('chrome', '101', 'windows', '10'): curl_cffi.requests.BrowserType.chrome101,
+ ImpersonateTarget('chrome', '100', 'windows', '10'): curl_cffi.requests.BrowserType.chrome100,
+ ImpersonateTarget('chrome', '99', 'windows', '10'): curl_cffi.requests.BrowserType.chrome99,
+ ImpersonateTarget('edge', '101', 'windows', '10'): curl_cffi.requests.BrowserType.edge101,
+ ImpersonateTarget('edge', '99', 'windows', '10'): curl_cffi.requests.BrowserType.edge99,
+ ImpersonateTarget('safari', '15.5', 'macos', '12'): curl_cffi.requests.BrowserType.safari15_5,
+ ImpersonateTarget('safari', '15.3', 'macos', '11'): curl_cffi.requests.BrowserType.safari15_3,
+ ImpersonateTarget('chrome', '99', 'android', '12'): curl_cffi.requests.BrowserType.chrome99_android,
+ }
+
+ def _create_instance(self, cookiejar=None):
+ return curl_cffi.requests.Session(cookies=cookiejar)
+
+ def _check_extensions(self, extensions):
+ super()._check_extensions(extensions)
+ extensions.pop('impersonate', None)
+ extensions.pop('cookiejar', None)
+ extensions.pop('timeout', None)
+
+ def _send(self, request: Request):
+ max_redirects_exceeded = False
+ session: curl_cffi.requests.Session = self._get_instance(
+ cookiejar=self._get_cookiejar(request) if 'cookie' not in request.headers else None)
+
+ if self.verbose:
+ session.curl.setopt(CurlOpt.VERBOSE, 1)
+
+ proxies = self._get_proxies(request)
+ if 'no' in proxies:
+ session.curl.setopt(CurlOpt.NOPROXY, proxies['no'])
+ proxies.pop('no', None)
+
+ # curl doesn't support per protocol proxies, so we select the one that matches the request protocol
+ proxy = select_proxy(request.url, proxies=proxies)
+ if proxy:
+ session.curl.setopt(CurlOpt.PROXY, proxy)
+ scheme = urllib.parse.urlparse(request.url).scheme.lower()
+ if scheme != 'http':
+ # Enable HTTP CONNECT for HTTPS urls.
+ # Don't use CONNECT for http for compatibility with urllib behaviour.
+ # See: https://curl.se/libcurl/c/CURLOPT_HTTPPROXYTUNNEL.html
+ session.curl.setopt(CurlOpt.HTTPPROXYTUNNEL, 1)
+
+ headers = self._get_impersonate_headers(request)
+
+ if self._client_cert:
+ session.curl.setopt(CurlOpt.SSLCERT, self._client_cert['client_certificate'])
+ client_certificate_key = self._client_cert.get('client_certificate_key')
+ client_certificate_password = self._client_cert.get('client_certificate_password')
+ if client_certificate_key:
+ session.curl.setopt(CurlOpt.SSLKEY, client_certificate_key)
+ if client_certificate_password:
+ session.curl.setopt(CurlOpt.KEYPASSWD, client_certificate_password)
+
+ timeout = self._calculate_timeout(request)
+
+ # set CURLOPT_LOW_SPEED_LIMIT and CURLOPT_LOW_SPEED_TIME to act as a read timeout. [1]
+ # curl_cffi does not currently do this. [2]
+ # Note: CURLOPT_LOW_SPEED_TIME is in seconds, so we need to round up to the nearest second. [3]
+ # [1] https://unix.stackexchange.com/a/305311
+ # [2] https://github.com/yifeikong/curl_cffi/issues/156
+ # [3] https://curl.se/libcurl/c/CURLOPT_LOW_SPEED_TIME.html
+ session.curl.setopt(CurlOpt.LOW_SPEED_LIMIT, 1) # 1 byte per second
+ session.curl.setopt(CurlOpt.LOW_SPEED_TIME, math.ceil(timeout))
+
+ try:
+ curl_response = session.request(
+ method=request.method,
+ url=request.url,
+ headers=headers,
+ data=request.data,
+ verify=self.verify,
+ max_redirects=5,
+ timeout=timeout,
+ impersonate=self._SUPPORTED_IMPERSONATE_TARGET_MAP.get(
+ self._get_request_target(request)),
+ interface=self.source_address,
+ stream=True
+ )
+ except curl_cffi.requests.errors.RequestsError as e:
+ if e.code == CurlECode.PEER_FAILED_VERIFICATION:
+ raise CertificateVerifyError(cause=e) from e
+
+ elif e.code == CurlECode.SSL_CONNECT_ERROR:
+ raise SSLError(cause=e) from e
+
+ elif e.code == CurlECode.TOO_MANY_REDIRECTS:
+ max_redirects_exceeded = True
+ curl_response = e.response
+
+ elif e.code == CurlECode.PROXY:
+ raise ProxyError(cause=e) from e
+ else:
+ raise TransportError(cause=e) from e
+
+ response = CurlCFFIResponseAdapter(curl_response)
+
+ if not 200 <= response.status < 300:
+ raise HTTPError(response, redirect_loop=max_redirects_exceeded)
+
+ return response
+
+
+@register_preference(CurlCFFIRH)
+def curl_cffi_preference(rh, request):
+ return -100
diff --git a/yt_dlp/networking/_helper.py b/yt_dlp/networking/_helper.py
index d79dd79..8e678b2 100644
--- a/yt_dlp/networking/_helper.py
+++ b/yt_dlp/networking/_helper.py
@@ -2,6 +2,7 @@ from __future__ import annotations
import contextlib
import functools
+import os
import socket
import ssl
import sys
@@ -121,6 +122,9 @@ def make_ssl_context(
context = ssl.SSLContext(ssl.PROTOCOL_TLS_CLIENT)
context.check_hostname = verify
context.verify_mode = ssl.CERT_REQUIRED if verify else ssl.CERT_NONE
+ # OpenSSL 1.1.1+ Python 3.8+ keylog file
+ if hasattr(context, 'keylog_filename'):
+ context.keylog_filename = os.environ.get('SSLKEYLOGFILE') or None
# Some servers may reject requests if ALPN extension is not sent. See:
# https://github.com/python/cpython/issues/85140
diff --git a/yt_dlp/networking/_requests.py b/yt_dlp/networking/_requests.py
index 6545028..e3edc77 100644
--- a/yt_dlp/networking/_requests.py
+++ b/yt_dlp/networking/_requests.py
@@ -307,8 +307,7 @@ class RequestsRH(RequestHandler, InstanceStoreMixin):
max_redirects_exceeded = False
- session = self._get_instance(
- cookiejar=request.extensions.get('cookiejar') or self.cookiejar)
+ session = self._get_instance(cookiejar=self._get_cookiejar(request))
try:
requests_res = session.request(
@@ -316,8 +315,8 @@ class RequestsRH(RequestHandler, InstanceStoreMixin):
url=request.url,
data=request.data,
headers=headers,
- timeout=float(request.extensions.get('timeout') or self.timeout),
- proxies=request.proxies or self.proxies,
+ timeout=self._calculate_timeout(request),
+ proxies=self._get_proxies(request),
allow_redirects=True,
stream=True
)
diff --git a/yt_dlp/networking/_urllib.py b/yt_dlp/networking/_urllib.py
index cb4dae3..ff110dc 100644
--- a/yt_dlp/networking/_urllib.py
+++ b/yt_dlp/networking/_urllib.py
@@ -389,11 +389,11 @@ class UrllibRH(RequestHandler, InstanceStoreMixin):
)
opener = self._get_instance(
- proxies=request.proxies or self.proxies,
- cookiejar=request.extensions.get('cookiejar') or self.cookiejar
+ proxies=self._get_proxies(request),
+ cookiejar=self._get_cookiejar(request)
)
try:
- res = opener.open(urllib_req, timeout=float(request.extensions.get('timeout') or self.timeout))
+ res = opener.open(urllib_req, timeout=self._calculate_timeout(request))
except urllib.error.HTTPError as e:
if isinstance(e.fp, (http.client.HTTPResponse, urllib.response.addinfourl)):
# Prevent file object from being closed when urllib.error.HTTPError is destroyed.
diff --git a/yt_dlp/networking/_websockets.py b/yt_dlp/networking/_websockets.py
index 1597932..6e235b0 100644
--- a/yt_dlp/networking/_websockets.py
+++ b/yt_dlp/networking/_websockets.py
@@ -1,5 +1,6 @@
from __future__ import annotations
+import contextlib
import io
import logging
import ssl
@@ -38,27 +39,40 @@ if websockets_version < (12, 0):
import websockets.sync.client
from websockets.uri import parse_uri
+# In websockets Connection, recv_exc and recv_events_exc are defined
+# after the recv events handler thread is started [1].
+# On our CI using PyPy, in some cases a race condition may occur
+# where the recv events handler thread tries to use these attributes before they are defined [2].
+# 1: https://github.com/python-websockets/websockets/blame/de768cf65e7e2b1a3b67854fb9e08816a5ff7050/src/websockets/sync/connection.py#L93
+# 2: "AttributeError: 'ClientConnection' object has no attribute 'recv_events_exc'. Did you mean: 'recv_events'?"
+import websockets.sync.connection # isort: split
+with contextlib.suppress(Exception):
+ # > 12.0
+ websockets.sync.connection.Connection.recv_exc = None
+ # 12.0
+ websockets.sync.connection.Connection.recv_events_exc = None
+
class WebsocketsResponseAdapter(WebSocketResponse):
- def __init__(self, wsw: websockets.sync.client.ClientConnection, url):
+ def __init__(self, ws: websockets.sync.client.ClientConnection, url):
super().__init__(
- fp=io.BytesIO(wsw.response.body or b''),
+ fp=io.BytesIO(ws.response.body or b''),
url=url,
- headers=wsw.response.headers,
- status=wsw.response.status_code,
- reason=wsw.response.reason_phrase,
+ headers=ws.response.headers,
+ status=ws.response.status_code,
+ reason=ws.response.reason_phrase,
)
- self.wsw = wsw
+ self._ws = ws
def close(self):
- self.wsw.close()
+ self._ws.close()
super().close()
def send(self, message):
# https://websockets.readthedocs.io/en/stable/reference/sync/client.html#websockets.sync.client.ClientConnection.send
try:
- return self.wsw.send(message)
+ return self._ws.send(message)
except (websockets.exceptions.WebSocketException, RuntimeError, TimeoutError) as e:
raise TransportError(cause=e) from e
except SocksProxyError as e:
@@ -69,7 +83,7 @@ class WebsocketsResponseAdapter(WebSocketResponse):
def recv(self):
# https://websockets.readthedocs.io/en/stable/reference/sync/client.html#websockets.sync.client.ClientConnection.recv
try:
- return self.wsw.recv()
+ return self._ws.recv()
except SocksProxyError as e:
raise ProxyError(cause=e) from e
except (websockets.exceptions.WebSocketException, RuntimeError, TimeoutError) as e:
@@ -112,10 +126,10 @@ class WebsocketsRH(WebSocketRequestHandler):
logging.getLogger(name).removeHandler(handler)
def _send(self, request):
- timeout = float(request.extensions.get('timeout') or self.timeout)
+ timeout = self._calculate_timeout(request)
headers = self._merge_headers(request.headers)
if 'cookie' not in headers:
- cookiejar = request.extensions.get('cookiejar') or self.cookiejar
+ cookiejar = self._get_cookiejar(request)
cookie_header = cookiejar.get_cookie_header(request.url)
if cookie_header:
headers['cookie'] = cookie_header
@@ -125,7 +139,7 @@ class WebsocketsRH(WebSocketRequestHandler):
'source_address': (self.source_address, 0) if self.source_address else None,
'timeout': timeout
}
- proxy = select_proxy(request.url, request.proxies or self.proxies or {})
+ proxy = select_proxy(request.url, self._get_proxies(request))
try:
if proxy:
socks_proxy_options = make_socks_proxy_opts(proxy)
diff --git a/yt_dlp/networking/common.py b/yt_dlp/networking/common.py
index 39442ba..4c66ba6 100644
--- a/yt_dlp/networking/common.py
+++ b/yt_dlp/networking/common.py
@@ -256,6 +256,15 @@ class RequestHandler(abc.ABC):
def _merge_headers(self, request_headers):
return HTTPHeaderDict(self.headers, request_headers)
+ def _calculate_timeout(self, request):
+ return float(request.extensions.get('timeout') or self.timeout)
+
+ def _get_cookiejar(self, request):
+ return request.extensions.get('cookiejar') or self.cookiejar
+
+ def _get_proxies(self, request):
+ return (request.proxies or self.proxies).copy()
+
def _check_url_scheme(self, request: Request):
scheme = urllib.parse.urlparse(request.url).scheme.lower()
if self._SUPPORTED_URL_SCHEMES is not None and scheme not in self._SUPPORTED_URL_SCHEMES:
@@ -454,9 +463,10 @@ class Request:
else:
raise TypeError('headers must be a mapping')
- def update(self, url=None, data=None, headers=None, query=None):
+ def update(self, url=None, data=None, headers=None, query=None, extensions=None):
self.data = data if data is not None else self.data
self.headers.update(headers or {})
+ self.extensions.update(extensions or {})
self.url = update_url_query(url or self.url, query or {})
def copy(self):
@@ -491,7 +501,7 @@ class Response(io.IOBase):
def __init__(
self,
- fp: typing.IO,
+ fp: io.IOBase,
url: str,
headers: Mapping[str, str],
status: int = 200,
diff --git a/yt_dlp/networking/impersonate.py b/yt_dlp/networking/impersonate.py
new file mode 100644
index 0000000..ca66180
--- /dev/null
+++ b/yt_dlp/networking/impersonate.py
@@ -0,0 +1,141 @@
+from __future__ import annotations
+
+import re
+from abc import ABC
+from dataclasses import dataclass
+from typing import Any
+
+from .common import RequestHandler, register_preference
+from .exceptions import UnsupportedRequest
+from ..compat.types import NoneType
+from ..utils import classproperty, join_nonempty
+from ..utils.networking import std_headers
+
+
+@dataclass(order=True, frozen=True)
+class ImpersonateTarget:
+ """
+ A target for browser impersonation.
+
+ Parameters:
+ @param client: the client to impersonate
+ @param version: the client version to impersonate
+ @param os: the client OS to impersonate
+ @param os_version: the client OS version to impersonate
+
+ Note: None is used to indicate to match any.
+
+ """
+ client: str | None = None
+ version: str | None = None
+ os: str | None = None
+ os_version: str | None = None
+
+ def __post_init__(self):
+ if self.version and not self.client:
+ raise ValueError('client is required if version is set')
+ if self.os_version and not self.os:
+ raise ValueError('os is required if os_version is set')
+
+ def __contains__(self, target: ImpersonateTarget):
+ if not isinstance(target, ImpersonateTarget):
+ return False
+ return (
+ (self.client is None or target.client is None or self.client == target.client)
+ and (self.version is None or target.version is None or self.version == target.version)
+ and (self.os is None or target.os is None or self.os == target.os)
+ and (self.os_version is None or target.os_version is None or self.os_version == target.os_version)
+ )
+
+ def __str__(self):
+ return f'{join_nonempty(self.client, self.version)}:{join_nonempty(self.os, self.os_version)}'.rstrip(':')
+
+ @classmethod
+ def from_str(cls, target: str):
+ mobj = re.fullmatch(r'(?:(?P<client>[^:-]+)(?:-(?P<version>[^:-]+))?)?(?::(?:(?P<os>[^:-]+)(?:-(?P<os_version>[^:-]+))?)?)?', target)
+ if not mobj:
+ raise ValueError(f'Invalid impersonate target "{target}"')
+ return cls(**mobj.groupdict())
+
+
+class ImpersonateRequestHandler(RequestHandler, ABC):
+ """
+ Base class for request handlers that support browser impersonation.
+
+ This provides a method for checking the validity of the impersonate extension,
+ which can be used in _check_extensions.
+
+ Impersonate targets consist of a client, version, os and os_ver.
+ See the ImpersonateTarget class for more details.
+
+ The following may be defined:
+ - `_SUPPORTED_IMPERSONATE_TARGET_MAP`: a dict mapping supported targets to custom object.
+ Any Request with an impersonate target not in this list will raise an UnsupportedRequest.
+ Set to None to disable this check.
+ Note: Entries are in order of preference
+
+ Parameters:
+ @param impersonate: the default impersonate target to use for requests.
+ Set to None to disable impersonation.
+ """
+ _SUPPORTED_IMPERSONATE_TARGET_MAP: dict[ImpersonateTarget, Any] = {}
+
+ def __init__(self, *, impersonate: ImpersonateTarget = None, **kwargs):
+ super().__init__(**kwargs)
+ self.impersonate = impersonate
+
+ def _check_impersonate_target(self, target: ImpersonateTarget):
+ assert isinstance(target, (ImpersonateTarget, NoneType))
+ if target is None or not self.supported_targets:
+ return
+ if not self.is_supported_target(target):
+ raise UnsupportedRequest(f'Unsupported impersonate target: {target}')
+
+ def _check_extensions(self, extensions):
+ super()._check_extensions(extensions)
+ if 'impersonate' in extensions:
+ self._check_impersonate_target(extensions.get('impersonate'))
+
+ def _validate(self, request):
+ super()._validate(request)
+ self._check_impersonate_target(self.impersonate)
+
+ def _resolve_target(self, target: ImpersonateTarget | None):
+ """Resolve a target to a supported target."""
+ if target is None:
+ return
+ for supported_target in self.supported_targets:
+ if target in supported_target:
+ if self.verbose:
+ self._logger.stdout(
+ f'{self.RH_NAME}: resolved impersonate target {target} to {supported_target}')
+ return supported_target
+
+ @classproperty
+ def supported_targets(self) -> tuple[ImpersonateTarget, ...]:
+ return tuple(self._SUPPORTED_IMPERSONATE_TARGET_MAP.keys())
+
+ def is_supported_target(self, target: ImpersonateTarget):
+ assert isinstance(target, ImpersonateTarget)
+ return self._resolve_target(target) is not None
+
+ def _get_request_target(self, request):
+ """Get the requested target for the request"""
+ return self._resolve_target(request.extensions.get('impersonate') or self.impersonate)
+
+ def _get_impersonate_headers(self, request):
+ headers = self._merge_headers(request.headers)
+ if self._get_request_target(request) is not None:
+ # remove all headers present in std_headers
+ # todo: change this to not depend on std_headers
+ for k, v in std_headers.items():
+ if headers.get(k) == v:
+ headers.pop(k)
+ return headers
+
+
+@register_preference(ImpersonateRequestHandler)
+def impersonate_preference(rh, request):
+ if request.extensions.get('impersonate') or rh.impersonate:
+ return 1000
+ return 0