System: use paths for download folders too (#29818)

* use paths for download folders too

* trailing slash

* reset that
pull/29800/head
Justin Newberry 2 years ago committed by GitHub
parent bfe990b112
commit 73eda51a11
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
  1. 10
      selfdrive/locationd/laikad.py
  2. 43
      selfdrive/test/helpers.py
  3. 7
      system/hardware/hw.h
  4. 8
      system/hardware/hw.py
  5. 4
      system/loggerd/bootlog.cc
  6. 2
      system/loggerd/logger.h
  7. 2
      system/loggerd/loggerd.cc
  8. 3
      system/loggerd/tests/test_loggerd.py
  9. 7
      system/ubloxd/tests/test_ublox_processing.py
  10. 4
      tools/gpstest/rpc_server.py
  11. 3
      tools/lib/tests/test_caching.py
  12. 9
      tools/lib/url_file.py
  13. 3
      tools/replay/filereader.cc

@ -24,11 +24,11 @@ from laika.opt import calc_pos_fix, get_posfix_sympy_fun, calc_vel_fix, get_velf
from openpilot.selfdrive.locationd.models.constants import GENERATED_DIR, ObservationKind from openpilot.selfdrive.locationd.models.constants import GENERATED_DIR, ObservationKind
from openpilot.selfdrive.locationd.models.gnss_kf import GNSSKalman from openpilot.selfdrive.locationd.models.gnss_kf import GNSSKalman
from openpilot.selfdrive.locationd.models.gnss_kf import States as GStates from openpilot.selfdrive.locationd.models.gnss_kf import States as GStates
from openpilot.system.hardware.hw import Paths
from openpilot.system.swaglog import cloudlog from openpilot.system.swaglog import cloudlog
MAX_TIME_GAP = 10 MAX_TIME_GAP = 10
EPHEMERIS_CACHE = 'LaikadEphemerisV3' EPHEMERIS_CACHE = 'LaikadEphemerisV3'
DOWNLOADS_CACHE_FOLDER = "/tmp/comma_download_cache/"
CACHE_VERSION = 0.2 CACHE_VERSION = 0.2
POS_FIX_RESIDUAL_THRESHOLD = 100.0 POS_FIX_RESIDUAL_THRESHOLD = 100.0
@ -83,7 +83,7 @@ class Laikad:
save_ephemeris: If true saves and loads nav and orbit ephemeris to cache. save_ephemeris: If true saves and loads nav and orbit ephemeris to cache.
""" """
self.astro_dog = AstroDog(valid_const=valid_const, auto_update=auto_update, valid_ephem_types=valid_ephem_types, self.astro_dog = AstroDog(valid_const=valid_const, auto_update=auto_update, valid_ephem_types=valid_ephem_types,
clear_old_ephemeris=True, cache_dir=DOWNLOADS_CACHE_FOLDER) clear_old_ephemeris=True, cache_dir=Paths.download_cache_root())
self.gnss_kf = GNSSKalman(GENERATED_DIR, cython=True, erratic_clock=use_qcom) self.gnss_kf = GNSSKalman(GENERATED_DIR, cython=True, erratic_clock=use_qcom)
self.auto_fetch_navs = auto_fetch_navs self.auto_fetch_navs = auto_fetch_navs
@ -435,9 +435,9 @@ def kf_add_observations(gnss_kf: GNSSKalman, t: float, measurements: List[GNSSMe
def clear_tmp_cache(): def clear_tmp_cache():
if os.path.exists(DOWNLOADS_CACHE_FOLDER): if os.path.exists(Paths.download_cache_root()):
shutil.rmtree(DOWNLOADS_CACHE_FOLDER) shutil.rmtree(Paths.download_cache_root())
os.mkdir(DOWNLOADS_CACHE_FOLDER) os.mkdir(Paths.download_cache_root())
def main(sm=None, pm=None): def main(sm=None, pm=None):

@ -1,9 +1,6 @@
import os import os
import time import time
import tempfile
from typing import List, Union
from unittest import mock
from functools import wraps from functools import wraps
import cereal.messaging as messaging import cereal.messaging as messaging
@ -72,43 +69,3 @@ def with_processes(processes, init_time=0, ignore_stopped=None):
return wrap return wrap
return wrapper return wrapper
def temporary_mock_dir(mock_paths_in: Union[List[str], str], kwarg: Union[str, None] = None, generator = tempfile.TemporaryDirectory):
"""
mock_paths_in: string or string list representing the full path of the variable you want to mock.
kwarg: str or None representing the kwarg that gets passed into the test function, in case the test needs access to the temporary directory.
generator: a context to use to generate the temporary directory
"""
def wrapper(func):
@wraps(func)
def wrap(*args, **kwargs):
mock_paths = mock_paths_in if isinstance(mock_paths_in, list) else [mock_paths_in]
with generator() as temp_dir:
mocks = []
for mock_path in mock_paths:
mocks.append(mock.patch(mock_path, str(temp_dir)))
[mock.start() for mock in mocks]
try:
if kwarg is not None:
kwargs[kwarg] = temp_dir
func(*args, **kwargs)
finally:
[mock.stop() for mock in mocks]
return wrap
return wrapper
def string_context(context):
class StringContext:
def __enter__(self):
return context
def __exit__(self, *args):
pass
return StringContext
temporary_dir = temporary_mock_dir([], "temp_dir")
temporary_cache_dir = temporary_mock_dir("openpilot.tools.lib.url_file.CACHE_DIR")
temporary_laikad_downloads_dir = temporary_mock_dir("openpilot.selfdrive.locationd.laikad.DOWNLOADS_CACHE_FOLDER")

@ -40,4 +40,11 @@ namespace Path {
inline std::string swaglog_ipc() { inline std::string swaglog_ipc() {
return "ipc:///tmp/logmessage" + Path::openpilot_prefix(); return "ipc:///tmp/logmessage" + Path::openpilot_prefix();
} }
inline std::string download_cache_root() {
if (const char *env = getenv("COMMA_CACHE")) {
return env;
}
return "/tmp/comma_download_cache" + Path::openpilot_prefix() + "/";
}
} // namespace Path } // namespace Path

@ -26,4 +26,10 @@ class Paths:
@staticmethod @staticmethod
def swaglog_ipc() -> str: def swaglog_ipc() -> str:
return "ipc:///tmp/logmessage" + os.environ.get("OPENPILOT_PREFIX", "") return "ipc:///tmp/logmessage" + os.environ.get("OPENPILOT_PREFIX", "")
@staticmethod
def download_cache_root() -> str:
if os.environ.get('COMMA_CACHE', False):
return os.environ['COMMA_CACHE']
return "/tmp/comma_download_cache" + os.environ.get("OPENPILOT_PREFIX", "") + "/"

@ -50,11 +50,11 @@ static kj::Array<capnp::word> build_boot_log() {
int main(int argc, char** argv) { int main(int argc, char** argv) {
const std::string timestr = logger_get_route_name(); const std::string timestr = logger_get_route_name();
const std::string path = LOG_ROOT + "/boot/" + timestr; const std::string path = Path::log_root() + "/boot/" + timestr;
LOGW("bootlog to %s", path.c_str()); LOGW("bootlog to %s", path.c_str());
// Open bootlog // Open bootlog
bool r = util::create_directories(LOG_ROOT + "/boot/", 0775); bool r = util::create_directories(Path::log_root() + "/boot/", 0775);
assert(r); assert(r);
RawFile file(path.c_str()); RawFile file(path.c_str());

@ -16,8 +16,6 @@
#include "common/swaglog.h" #include "common/swaglog.h"
#include "system/hardware/hw.h" #include "system/hardware/hw.h"
const std::string LOG_ROOT = Path::log_root();
#define LOGGER_MAX_HANDLES 16 #define LOGGER_MAX_HANDLES 16
class RawFile { class RawFile {

@ -24,7 +24,7 @@ struct LoggerdState {
void logger_rotate(LoggerdState *s) { void logger_rotate(LoggerdState *s) {
int segment = -1; int segment = -1;
int err = logger_next(&s->logger, LOG_ROOT.c_str(), s->segment_path, sizeof(s->segment_path), &segment); int err = logger_next(&s->logger, Path::log_root().c_str(), s->segment_path, sizeof(s->segment_path), &segment);
assert(err == 0); assert(err == 0);
s->rotate_segment = segment; s->rotate_segment = segment;
s->ready_to_rotate = 0; s->ready_to_rotate = 0;

@ -32,6 +32,9 @@ CEREAL_SERVICES = [f for f in log.Event.schema.union_fields if f in service_list
class TestLoggerd(unittest.TestCase): class TestLoggerd(unittest.TestCase):
def setUp(self):
os.environ.pop("LOG_ROOT", None)
def _get_latest_log_dir(self): def _get_latest_log_dir(self):
log_dirs = sorted(Path(Paths.log_root()).iterdir(), key=lambda f: f.stat().st_mtime) log_dirs = sorted(Path(Paths.log_root()).iterdir(), key=lambda f: f.stat().st_mtime)
return log_dirs[-1] return log_dirs[-1]

@ -8,9 +8,9 @@ from laika.helpers import ConstellationId
from laika.raw_gnss import correct_measurements, process_measurements, read_raw_ublox from laika.raw_gnss import correct_measurements, process_measurements, read_raw_ublox
from laika.opt import calc_pos_fix from laika.opt import calc_pos_fix
from openpilot.selfdrive.test.openpilotci import get_url from openpilot.selfdrive.test.openpilotci import get_url
from openpilot.system.hardware.hw import Paths
from openpilot.tools.lib.logreader import LogReader from openpilot.tools.lib.logreader import LogReader
from openpilot.selfdrive.test.helpers import with_processes from openpilot.selfdrive.test.helpers import with_processes
from openpilot.selfdrive.test.helpers import temporary_dir
import cereal.messaging as messaging import cereal.messaging as messaging
def get_gnss_measurements(log_reader): def get_gnss_measurements(log_reader):
@ -56,9 +56,8 @@ class TestUbloxProcessing(unittest.TestCase):
self.assertEqual(count_gps, 5036) self.assertEqual(count_gps, 5036)
self.assertEqual(count_glonass, 3651) self.assertEqual(count_glonass, 3651)
@temporary_dir def test_get_fix(self):
def test_get_fix(self, temp_dir): dog = AstroDog(cache_dir=Paths.download_cache_root())
dog = AstroDog(cache_dir=temp_dir)
position_fix_found = 0 position_fix_found = 0
count_processed_measurements = 0 count_processed_measurements = 0
count_corrected_measurements = 0 count_corrected_measurements = 0

@ -3,6 +3,7 @@ import time
import shutil import shutil
from datetime import datetime from datetime import datetime
from collections import defaultdict from collections import defaultdict
from openpilot.system.hardware.hw import Paths
import rpyc import rpyc
from rpyc.utils.server import ThreadedServer from rpyc.utils.server import ThreadedServer
@ -18,7 +19,6 @@ MATCH_NUM = 10
REPORT_STATS = 10 REPORT_STATS = 10
EPHEM_CACHE = "/data/params/d/LaikadEphemerisV3" EPHEM_CACHE = "/data/params/d/LaikadEphemerisV3"
DOWNLOAD_CACHE = "/tmp/comma_download_cache"
SERVER_LOG_FILE = "/tmp/fuzzy_server.log" SERVER_LOG_FILE = "/tmp/fuzzy_server.log"
server_log = open(SERVER_LOG_FILE, "w+") server_log = open(SERVER_LOG_FILE, "w+")
@ -162,7 +162,7 @@ class RemoteCheckerService(rpyc.Service):
if os.path.exists(EPHEM_CACHE): if os.path.exists(EPHEM_CACHE):
os.remove(EPHEM_CACHE) os.remove(EPHEM_CACHE)
shutil.rmtree(DOWNLOAD_CACHE, ignore_errors=True) shutil.rmtree(Paths.download_cache_root(), ignore_errors=True)
ret = self.run_checker(slat, slon, salt, sockets, procs, timeout) ret = self.run_checker(slat, slon, salt, sockets, procs, timeout)
kill_procs(procs) kill_procs(procs)

@ -2,7 +2,6 @@
import os import os
import unittest import unittest
from openpilot.tools.lib.url_file import URLFile from openpilot.tools.lib.url_file import URLFile
from openpilot.selfdrive.test.helpers import temporary_cache_dir
class TestFileDownload(unittest.TestCase): class TestFileDownload(unittest.TestCase):
@ -31,7 +30,6 @@ class TestFileDownload(unittest.TestCase):
self.assertEqual(file_cached.get_length(), file_downloaded.get_length()) self.assertEqual(file_cached.get_length(), file_downloaded.get_length())
self.assertEqual(response_cached, response_downloaded) self.assertEqual(response_cached, response_downloaded)
@temporary_cache_dir
def test_small_file(self): def test_small_file(self):
# Make sure we don't force cache # Make sure we don't force cache
os.environ["FILEREADER_CACHE"] = "0" os.environ["FILEREADER_CACHE"] = "0"
@ -52,7 +50,6 @@ class TestFileDownload(unittest.TestCase):
for i in range(length // 100): for i in range(length // 100):
self.compare_loads(small_file_url, 100 * i, 100) self.compare_loads(small_file_url, 100 * i, 100)
@temporary_cache_dir
def test_large_file(self): def test_large_file(self):
large_file_url = "https://commadataci.blob.core.windows.net/openpilotci/0375fdf7b1ce594d/2019-06-13--08-32-25/3/qlog.bz2" large_file_url = "https://commadataci.blob.core.windows.net/openpilotci/0375fdf7b1ce594d/2019-06-13--08-32-25/3/qlog.bz2"
# Load the end 100 bytes of both files # Load the end 100 bytes of both files

@ -8,12 +8,11 @@ from hashlib import sha256
from io import BytesIO from io import BytesIO
from tenacity import retry, wait_random_exponential, stop_after_attempt from tenacity import retry, wait_random_exponential, stop_after_attempt
from openpilot.common.file_helpers import mkdirs_exists_ok, atomic_write_in_dir from openpilot.common.file_helpers import mkdirs_exists_ok, atomic_write_in_dir
from openpilot.system.hardware.hw import Paths
# Cache chunk size # Cache chunk size
K = 1000 K = 1000
CHUNK_SIZE = 1000 * K CHUNK_SIZE = 1000 * K
CACHE_DIR = os.environ.get("COMMA_CACHE", "/tmp/comma_download_cache/")
def hash_256(link): def hash_256(link):
hsh = str(sha256((link.split("?")[0]).encode('utf-8')).hexdigest()) hsh = str(sha256((link.split("?")[0]).encode('utf-8')).hexdigest())
@ -38,7 +37,7 @@ class URLFile:
self._curl = self._tlocal.curl self._curl = self._tlocal.curl
except AttributeError: except AttributeError:
self._curl = self._tlocal.curl = pycurl.Curl() self._curl = self._tlocal.curl = pycurl.Curl()
mkdirs_exists_ok(CACHE_DIR) mkdirs_exists_ok(Paths.download_cache_root())
def __enter__(self): def __enter__(self):
return self return self
@ -66,7 +65,7 @@ class URLFile:
def get_length(self): def get_length(self):
if self._length is not None: if self._length is not None:
return self._length return self._length
file_length_path = os.path.join(CACHE_DIR, hash_256(self._url) + "_length") file_length_path = os.path.join(Paths.download_cache_root(), hash_256(self._url) + "_length")
if os.path.exists(file_length_path) and not self._force_download: if os.path.exists(file_length_path) and not self._force_download:
with open(file_length_path) as file_length: with open(file_length_path) as file_length:
content = file_length.read() content = file_length.read()
@ -93,7 +92,7 @@ class URLFile:
self._pos = position self._pos = position
chunk_number = self._pos / CHUNK_SIZE chunk_number = self._pos / CHUNK_SIZE
file_name = hash_256(self._url) + "_" + str(chunk_number) file_name = hash_256(self._url) + "_" + str(chunk_number)
full_path = os.path.join(CACHE_DIR, str(file_name)) full_path = os.path.join(Paths.download_cache_root(), str(file_name))
data = None data = None
# If we don't have a file, download it # If we don't have a file, download it
if not os.path.exists(full_path): if not os.path.exists(full_path):

@ -3,11 +3,12 @@
#include <fstream> #include <fstream>
#include "common/util.h" #include "common/util.h"
#include "system/hardware/hw.h"
#include "tools/replay/util.h" #include "tools/replay/util.h"
std::string cacheFilePath(const std::string &url) { std::string cacheFilePath(const std::string &url) {
static std::string cache_path = [] { static std::string cache_path = [] {
const std::string comma_cache = util::getenv("COMMA_CACHE", "/tmp/comma_download_cache/"); const std::string comma_cache = Path::download_cache_root();
util::create_directories(comma_cache, 0755); util::create_directories(comma_cache, 0755);
return comma_cache.back() == '/' ? comma_cache : comma_cache + "/"; return comma_cache.back() == '/' ? comma_cache : comma_cache + "/";
}(); }();

Loading…
Cancel
Save