dependency: remove pycurl package (#30771)

* Update qcomgpsd.py

* Update url_file.py

* remove pycurl

* requests -> urllib3

* unused

* redundant

* fix import

* Revert "requests -> urllib3"

This reverts commit 7ca39e6189.

* headless

* fix trail

* use requests.exceptions.RequestException

* use fp.tell

* fix indents

* reorder imports

* change timeout

* fix debug timing

* remove exception

* add timeout

* missing headers

* move to constructor

* move import

* unused import

* fix debug

* try

* no retries
old-commit-hash: 70624ffc81
chrysler-long2
royjr 1 year ago committed by GitHub
parent feb03f3625
commit d41f215df8
  1. 4
      poetry.lock
  2. 2
      pyproject.toml
  3. 31
      system/qcomgpsd/qcomgpsd.py
  4. 70
      tools/lib/url_file.py

4
poetry.lock generated

@ -1,3 +1,3 @@
version https://git-lfs.github.com/spec/v1 version https://git-lfs.github.com/spec/v1
oid sha256:8fe4d2310328e34ec37f396db589ef55ad34a3d27c7add141abd9235fd38eb11 oid sha256:004ffae57f0d3e8cdfea59b0aa549a2ce089adeb8e80fb5a64671828eaaf2f9a
size 434974 size 434276

@ -110,7 +110,6 @@ polyline = "*"
# these should be removed # these should be removed
markdown-it-py = "*" markdown-it-py = "*"
timezonefinder = "*" timezonefinder = "*"
pycurl = "*"
setproctitle = "*" setproctitle = "*"
@ -153,7 +152,6 @@ sphinx-rtd-theme = "*"
sphinx-sitemap = "*" sphinx-sitemap = "*"
tabulate = "*" tabulate = "*"
tenacity = "*" tenacity = "*"
types-pycurl = "*"
types-requests = "*" types-requests = "*"
types-tabulate = "*" types-tabulate = "*"
tqdm = "*" tqdm = "*"

@ -5,7 +5,7 @@ import signal
import itertools import itertools
import math import math
import time import time
import pycurl import requests
import shutil import shutil
import subprocess import subprocess
import datetime import datetime
@ -102,27 +102,18 @@ def gps_enabled() -> bool:
def download_assistance(): def download_assistance():
try: try:
c = pycurl.Curl() response = requests.get(ASSISTANCE_URL, timeout=5, stream=True)
c.setopt(pycurl.URL, ASSISTANCE_URL)
c.setopt(pycurl.NOBODY, 1)
c.setopt(pycurl.CONNECTTIMEOUT, 2)
c.perform()
bytes_n = c.getinfo(pycurl.CONTENT_LENGTH_DOWNLOAD)
c.close()
if bytes_n > 1e5:
cloudlog.error("Qcom assistance data larger than expected")
return
with open(ASSIST_DATA_FILE_DOWNLOAD, 'wb') as fp: with open(ASSIST_DATA_FILE_DOWNLOAD, 'wb') as fp:
c = pycurl.Curl() for chunk in response.iter_content(chunk_size=8192):
c.setopt(pycurl.URL, ASSISTANCE_URL) fp.write(chunk)
c.setopt(pycurl.CONNECTTIMEOUT, 5) if fp.tell() > 1e5:
cloudlog.error("Qcom assistance data larger than expected")
c.setopt(pycurl.WRITEDATA, fp) return
c.perform()
c.close() os.rename(ASSIST_DATA_FILE_DOWNLOAD, ASSIST_DATA_FILE)
os.rename(ASSIST_DATA_FILE_DOWNLOAD, ASSIST_DATA_FILE)
except pycurl.error: except requests.exceptions.RequestException:
cloudlog.exception("Failed to download assistance file") cloudlog.exception("Failed to download assistance file")
return return

@ -1,10 +1,11 @@
import os import os
import time import time
import threading import threading
import pycurl
from hashlib import sha256 from hashlib import sha256
from io import BytesIO from urllib3 import PoolManager, Retry
from urllib3.util import Timeout
from tenacity import retry, wait_random_exponential, stop_after_attempt from tenacity import retry, wait_random_exponential, stop_after_attempt
from openpilot.common.file_helpers import atomic_write_in_dir from openpilot.common.file_helpers import atomic_write_in_dir
from openpilot.system.hardware.hw import Paths from openpilot.system.hardware.hw import Paths
# Cache chunk size # Cache chunk size
@ -35,13 +36,14 @@ class URLFile:
if cache is not None: if cache is not None:
self._force_download = not cache self._force_download = not cache
try:
self._curl = self._tlocal.curl
except AttributeError:
self._curl = self._tlocal.curl = pycurl.Curl()
if not self._force_download: if not self._force_download:
os.makedirs(Paths.download_cache_root(), exist_ok=True) os.makedirs(Paths.download_cache_root(), exist_ok=True)
try:
self._http_client = URLFile._tlocal.http_client
except AttributeError:
self._http_client = URLFile._tlocal.http_client = PoolManager()
def __enter__(self): def __enter__(self):
return self return self
@ -53,17 +55,10 @@ class URLFile:
@retry(wait=wait_random_exponential(multiplier=1, max=5), stop=stop_after_attempt(3), reraise=True) @retry(wait=wait_random_exponential(multiplier=1, max=5), stop=stop_after_attempt(3), reraise=True)
def get_length_online(self): def get_length_online(self):
c = self._curl timeout = Timeout(connect=50.0, read=500.0)
c.reset() response = self._http_client.request('HEAD', self._url, timeout=timeout, preload_content=False)
c.setopt(pycurl.NOSIGNAL, 1) length = response.headers.get('content-length', 0)
c.setopt(pycurl.TIMEOUT_MS, 500000) return int(length)
c.setopt(pycurl.FOLLOWLOCATION, True)
c.setopt(pycurl.URL, self._url)
c.setopt(c.NOBODY, 1)
c.perform()
length = int(c.getinfo(c.CONTENT_LENGTH_DOWNLOAD))
c.reset()
return length
def get_length(self): def get_length(self):
if self._length is not None: if self._length is not None:
@ -117,7 +112,7 @@ class URLFile:
@retry(wait=wait_random_exponential(multiplier=1, max=5), stop=stop_after_attempt(3), reraise=True) @retry(wait=wait_random_exponential(multiplier=1, max=5), stop=stop_after_attempt(3), reraise=True)
def read_aux(self, ll=None): def read_aux(self, ll=None):
download_range = False download_range = False
headers = ["Connection: keep-alive"] headers = {'Connection': 'keep-alive'}
if self._pos != 0 or ll is not None: if self._pos != 0 or ll is not None:
if ll is None: if ll is None:
end = self.get_length() - 1 end = self.get_length() - 1
@ -125,50 +120,29 @@ class URLFile:
end = min(self._pos + ll, self.get_length()) - 1 end = min(self._pos + ll, self.get_length()) - 1
if self._pos >= end: if self._pos >= end:
return b"" return b""
headers.append(f"Range: bytes={self._pos}-{end}") headers['Range'] = f"bytes={self._pos}-{end}"
download_range = True download_range = True
dats = BytesIO()
c = self._curl
c.setopt(pycurl.URL, self._url)
c.setopt(pycurl.WRITEDATA, dats)
c.setopt(pycurl.NOSIGNAL, 1)
c.setopt(pycurl.TIMEOUT_MS, 500000)
c.setopt(pycurl.HTTPHEADER, headers)
c.setopt(pycurl.FOLLOWLOCATION, True)
if self._debug: if self._debug:
print("downloading", self._url)
def header(x):
if b'MISS' in x:
print(x.strip())
c.setopt(pycurl.HEADERFUNCTION, header)
def test(debug_type, debug_msg):
print(" debug(%d): %s" % (debug_type, debug_msg.strip()))
c.setopt(pycurl.VERBOSE, 1)
c.setopt(pycurl.DEBUGFUNCTION, test)
t1 = time.time() t1 = time.time()
c.perform() timeout = Timeout(connect=50.0, read=500.0)
response = self._http_client.request('GET', self._url, timeout=timeout, preload_content=False, headers=headers)
ret = response.data
if self._debug: if self._debug:
t2 = time.time() t2 = time.time()
if t2 - t1 > 0.1: if t2 - t1 > 0.1:
print(f"get {self._url} {headers!r} {t2 - t1:.f} slow") print(f"get {self._url} {headers!r} {t2 - t1:.3f} slow")
response_code = c.getinfo(pycurl.RESPONSE_CODE) response_code = response.status
if response_code == 416: # Requested Range Not Satisfiable if response_code == 416: # Requested Range Not Satisfiable
raise URLFileException(f"Error, range out of bounds {response_code} {headers} ({self._url}): {repr(dats.getvalue())[:500]}") raise URLFileException(f"Error, range out of bounds {response_code} {headers} ({self._url}): {repr(ret)[:500]}")
if download_range and response_code != 206: # Partial Content if download_range and response_code != 206: # Partial Content
raise URLFileException(f"Error, requested range but got unexpected response {response_code} {headers} ({self._url}): {repr(dats.getvalue())[:500]}") raise URLFileException(f"Error, requested range but got unexpected response {response_code} {headers} ({self._url}): {repr(ret)[:500]}")
if (not download_range) and response_code != 200: # OK if (not download_range) and response_code != 200: # OK
raise URLFileException(f"Error {response_code} {headers} ({self._url}): {repr(dats.getvalue())[:500]}") raise URLFileException(f"Error {response_code} {headers} ({self._url}): {repr(ret)[:500]}")
ret = dats.getvalue()
self._pos += len(ret) self._pos += len(ret)
return ret return ret

Loading…
Cancel
Save