mirror of
https://github.com/sunnypilot/sunnypilot.git
synced 2026-02-18 21:14:01 +08:00
replace common.file_helpers.mkdirs_exists_ok with python os.makedirs funtion (#30618)
replace common.file_helpers.mkdirs_exists_ok with python os.makedirs function
old-commit-hash: db35dcd0b5
This commit is contained in:
@@ -4,16 +4,6 @@ import tempfile
|
||||
from atomicwrites import AtomicWriter
|
||||
|
||||
|
||||
def mkdirs_exists_ok(path):
|
||||
if path.startswith(('http://', 'https://')):
|
||||
raise ValueError('URL path')
|
||||
try:
|
||||
os.makedirs(path)
|
||||
except OSError:
|
||||
if not os.path.isdir(path):
|
||||
raise
|
||||
|
||||
|
||||
def rm_not_exists_ok(path):
|
||||
try:
|
||||
os.remove(path)
|
||||
|
||||
@@ -9,7 +9,6 @@ import time
|
||||
import glob
|
||||
from typing import NoReturn
|
||||
|
||||
from openpilot.common.file_helpers import mkdirs_exists_ok
|
||||
import openpilot.selfdrive.sentry as sentry
|
||||
from openpilot.system.hardware.hw import Paths
|
||||
from openpilot.system.swaglog import cloudlog
|
||||
@@ -128,7 +127,7 @@ def report_tombstone_apport(fn):
|
||||
new_fn = f"{date}_{get_commit(default='nocommit')[:8]}_{safe_fn(clean_path)}"[:MAX_TOMBSTONE_FN_LEN]
|
||||
|
||||
crashlog_dir = os.path.join(Paths.log_root(), "crash")
|
||||
mkdirs_exists_ok(crashlog_dir)
|
||||
os.makedirs(crashlog_dir, exist_ok=True)
|
||||
|
||||
# Files could be on different filesystems, copy, then delete
|
||||
shutil.copy(fn, os.path.join(crashlog_dir, new_fn))
|
||||
|
||||
@@ -4,7 +4,6 @@ import os
|
||||
|
||||
from tqdm import tqdm
|
||||
|
||||
from openpilot.common.file_helpers import mkdirs_exists_ok
|
||||
from openpilot.tools.lib.logreader import LogReader
|
||||
from openpilot.tools.lib.route import Route
|
||||
|
||||
@@ -17,7 +16,7 @@ if __name__ == "__main__":
|
||||
args = parser.parse_args()
|
||||
|
||||
out_path = os.path.join("jpegs", f"{args.route.replace('|', '_')}_{args.segment}")
|
||||
mkdirs_exists_ok(out_path)
|
||||
os.makedirs(out_path, exist_ok=True)
|
||||
|
||||
r = Route(args.route)
|
||||
path = r.log_paths()[args.segment] or r.qlog_paths()[args.segment]
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
import json
|
||||
import os
|
||||
from openpilot.common.file_helpers import mkdirs_exists_ok
|
||||
from openpilot.system.hardware.hw import Paths
|
||||
|
||||
|
||||
@@ -18,7 +17,7 @@ def get_token():
|
||||
|
||||
|
||||
def set_token(token):
|
||||
mkdirs_exists_ok(Paths.config_root())
|
||||
os.makedirs(Paths.config_root(), exist_ok=True)
|
||||
with open(os.path.join(Paths.config_root(), 'auth.json'), 'w') as f:
|
||||
json.dump({'access_token': token}, f)
|
||||
|
||||
|
||||
@@ -1,12 +1,11 @@
|
||||
import os
|
||||
import urllib.parse
|
||||
from openpilot.common.file_helpers import mkdirs_exists_ok
|
||||
|
||||
DEFAULT_CACHE_DIR = os.getenv("CACHE_ROOT", os.path.expanduser("~/.commacache"))
|
||||
|
||||
def cache_path_for_file_path(fn, cache_dir=DEFAULT_CACHE_DIR):
|
||||
dir_ = os.path.join(cache_dir, "local")
|
||||
mkdirs_exists_ok(dir_)
|
||||
os.makedirs(dir_, exist_ok=True)
|
||||
fn_parsed = urllib.parse.urlparse(fn)
|
||||
if fn_parsed.scheme == '':
|
||||
cache_fn = os.path.abspath(fn).replace("/", "_")
|
||||
|
||||
@@ -5,7 +5,7 @@ import pycurl
|
||||
from hashlib import sha256
|
||||
from io import BytesIO
|
||||
from tenacity import retry, wait_random_exponential, stop_after_attempt
|
||||
from openpilot.common.file_helpers import mkdirs_exists_ok, atomic_write_in_dir
|
||||
from openpilot.common.file_helpers import atomic_write_in_dir
|
||||
from openpilot.system.hardware.hw import Paths
|
||||
# Cache chunk size
|
||||
K = 1000
|
||||
@@ -40,7 +40,7 @@ class URLFile:
|
||||
except AttributeError:
|
||||
self._curl = self._tlocal.curl = pycurl.Curl()
|
||||
if not self._force_download:
|
||||
mkdirs_exists_ok(Paths.download_cache_root())
|
||||
os.makedirs(Paths.download_cache_root(), exist_ok=True)
|
||||
|
||||
def __enter__(self):
|
||||
return self
|
||||
|
||||
Reference in New Issue
Block a user