Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
7 changes: 1 addition & 6 deletions Dockerfile.openpilot
Original file line number Diff line number Diff line change
Expand Up @@ -8,11 +8,6 @@ ENV PYTHONPATH ${OPENPILOT_PATH}:${PYTHONPATH}
RUN mkdir -p ${OPENPILOT_PATH}
WORKDIR ${OPENPILOT_PATH}

COPY Pipfile Pipfile.lock $OPENPILOT_PATH
RUN pip install --no-cache-dir pipenv==2021.5.29 pip==21.3.1 && \
pipenv install --system --deploy --dev --clear && \
pip uninstall -y pipenv

COPY SConstruct ${OPENPILOT_PATH}

COPY ./pyextra ${OPENPILOT_PATH}/pyextra
Expand All @@ -30,4 +25,4 @@ COPY ./panda ${OPENPILOT_PATH}/panda
COPY ./selfdrive ${OPENPILOT_PATH}/selfdrive
COPY ./system ${OPENPILOT_PATH}/system

RUN scons -j$(nproc)
RUN scons --cache-readonly -j$(nproc)
6 changes: 4 additions & 2 deletions Jenkinsfile
Original file line number Diff line number Diff line change
Expand Up @@ -89,8 +89,10 @@ pipeline {
sh "git config --global --add safe.directory ${WORKSPACE}"
sh "git lfs pull"
sh "${WORKSPACE}/tools/sim/build_container.sh"
sh "DETACH=1 ${WORKSPACE}/tools/sim/start_carla.sh"
sh "${WORKSPACE}/tools/sim/start_openpilot_docker.sh"
lock(resource: "", label: "simulator", inversePrecedence: true, quantity: 1) {
sh "DETACH=1 ${WORKSPACE}/tools/sim/start_carla.sh"
sh "${WORKSPACE}/tools/sim/start_openpilot_docker.sh"
}
}

post {
Expand Down
2 changes: 1 addition & 1 deletion SConstruct
Original file line number Diff line number Diff line change
Expand Up @@ -359,14 +359,14 @@ Export('cereal', 'messaging', 'visionipc')
rednose_config = {
'generated_folder': '#selfdrive/locationd/models/generated',
'to_build': {
'gnss': ('#selfdrive/locationd/models/gnss_kf.py', True, []),
'live': ('#selfdrive/locationd/models/live_kf.py', True, ['live_kf_constants.h']),
'car': ('#selfdrive/locationd/models/car_kf.py', True, []),
},
}

if arch != "larch64":
rednose_config['to_build'].update({
'gnss': ('#selfdrive/locationd/models/gnss_kf.py', True, []),
'loc_4': ('#selfdrive/locationd/models/loc_kf.py', True, []),
'pos_computer_4': ('#rednose/helpers/lst_sq_computer.py', False, []),
'pos_computer_5': ('#rednose/helpers/lst_sq_computer.py', False, []),
Expand Down
2 changes: 1 addition & 1 deletion cereal
2 changes: 1 addition & 1 deletion laika_repo
1 change: 0 additions & 1 deletion release/files_common
Original file line number Diff line number Diff line change
Expand Up @@ -473,7 +473,6 @@ opendbc/gm_global_a_powertrain_generated.dbc
opendbc/gm_global_a_object.dbc
opendbc/gm_global_a_chassis.dbc

opendbc/ford_fusion_2018_adas.dbc
opendbc/ford_lincoln_base_pt.dbc

opendbc/honda_accord_2018_can_generated.dbc
Expand Down
7 changes: 2 additions & 5 deletions selfdrive/camerad/cameras/camera_qcom2.cc
Original file line number Diff line number Diff line change
Expand Up @@ -837,7 +837,6 @@ void cameras_init(VisionIpcServer *v, MultiCameraState *s, cl_device_id device_i
s->road_cam.camera_init(s, v, CAMERA_ID_AR0231, 1, 20, device_id, ctx, VISION_STREAM_RGB_ROAD, VISION_STREAM_ROAD, !env_disable_road);
s->wide_road_cam.camera_init(s, v, CAMERA_ID_AR0231, 0, 20, device_id, ctx, VISION_STREAM_RGB_WIDE_ROAD, VISION_STREAM_WIDE_ROAD, !env_disable_wide_road);

s->sm = new SubMaster({"driverState"});
s->pm = new PubMaster({"roadCameraState", "driverCameraState", "wideRoadCameraState", "thumbnail"});
}

Expand Down Expand Up @@ -948,7 +947,6 @@ void cameras_close(MultiCameraState *s) {
s->road_cam.camera_close();
s->wide_road_cam.camera_close();

delete s->sm;
delete s->pm;
}

Expand Down Expand Up @@ -1221,16 +1219,15 @@ static void ar0231_process_registers(MultiCameraState *s, CameraState *c, cereal
framed.setTemperaturesC({temp_0, temp_1});
}

static void driver_cam_auto_exposure(CameraState *c, SubMaster &sm) {
static void driver_cam_auto_exposure(CameraState *c) {
struct ExpRect {int x1, x2, x_skip, y1, y2, y_skip;};
const CameraBuf *b = &c->buf;
static ExpRect rect = {96, 1832, 2, 242, 1148, 4};
camera_autoexposure(c, set_exposure_target(b, rect.x1, rect.x2, rect.x_skip, rect.y1, rect.y2, rect.y_skip));
}

static void process_driver_camera(MultiCameraState *s, CameraState *c, int cnt) {
s->sm->update(0);
driver_cam_auto_exposure(c, *(s->sm));
driver_cam_auto_exposure(c);

MessageBuilder msg;
auto framed = msg.initEvent().initDriverCameraState();
Expand Down
1 change: 0 additions & 1 deletion selfdrive/camerad/cameras/camera_qcom2.h
Original file line number Diff line number Diff line change
Expand Up @@ -106,6 +106,5 @@ typedef struct MultiCameraState {
CameraState wide_road_cam;
CameraState driver_cam;

SubMaster *sm;
PubMaster *pm;
} MultiCameraState;
1 change: 0 additions & 1 deletion selfdrive/camerad/test/camera_test.h
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,6 @@ typedef struct MultiCameraState {
CameraState road_cam;
CameraState driver_cam;

SubMaster *sm = nullptr;
PubMaster *pm = nullptr;
} MultiCameraState;

Expand Down
127 changes: 71 additions & 56 deletions selfdrive/locationd/laikad.py
Original file line number Diff line number Diff line change
@@ -1,14 +1,16 @@
#!/usr/bin/env python3
import time
from multiprocessing import Process, Queue
from concurrent.futures import Future, ProcessPoolExecutor
from typing import List, Optional

import numpy as np
from collections import defaultdict

from numpy.linalg import linalg

from cereal import log, messaging
from laika import AstroDog
from laika.constants import SECS_IN_MIN
from laika.constants import SECS_IN_HR, SECS_IN_MIN
from laika.ephemeris import EphemerisType, convert_ublox_ephem
from laika.gps_time import GPSTime
from laika.helpers import ConstellationId
Expand All @@ -27,8 +29,9 @@ class Laikad:
def __init__(self, valid_const=("GPS", "GLONASS"), auto_update=False, valid_ephem_types=(EphemerisType.ULTRA_RAPID_ORBIT, EphemerisType.NAV)):
self.astro_dog = AstroDog(valid_const=valid_const, auto_update=auto_update, valid_ephem_types=valid_ephem_types)
self.gnss_kf = GNSSKalman(GENERATED_DIR)
self.orbit_p: Optional[Process] = None
self.orbit_q = Queue()
self.orbit_fetch_executor = ProcessPoolExecutor()
self.orbit_fetch_future: Optional[Future] = None
self.last_fetch_orbits_t = None

def process_ublox_msg(self, ublox_msg, ublox_mono_time: int, block=False):
if ublox_msg.which == 'measurementReport':
Expand All @@ -37,68 +40,83 @@ def process_ublox_msg(self, ublox_msg, ublox_mono_time: int, block=False):
latest_msg_t = GPSTime(report.gpsWeek, report.rcvTow)
self.fetch_orbits(latest_msg_t + SECS_IN_MIN, block)
new_meas = read_raw_ublox(report)
processed_measurements = process_measurements(new_meas, self.astro_dog)
pos_fix = calc_pos_fix(processed_measurements, min_measurements=4)

measurements = process_measurements(new_meas, self.astro_dog)
pos_fix = calc_pos_fix(measurements, min_measurements=4)
# To get a position fix a minimum of 5 measurements are needed.
# Each report can contain less and some measurements can't be processed.
t = ublox_mono_time * 1e-9
kf_pos_std = None
if all(self.kf_valid(t)):
self.gnss_kf.predict(t)
kf_pos_std = np.sqrt(abs(self.gnss_kf.P[GStates.ECEF_POS].diagonal()))
# If localizer is valid use its position to correct measurements
if kf_pos_std is not None and linalg.norm(kf_pos_std) < 100:
est_pos = self.gnss_kf.x[GStates.ECEF_POS]
elif len(pos_fix) > 0 and abs(np.array(pos_fix[1])).mean() < 1000:
est_pos = pos_fix[0][:3]
else:
est_pos = None
corrected_measurements = []
if len(pos_fix) > 0 and abs(np.array(pos_fix[1])).mean() < 1000:
corrected_measurements = correct_measurements(measurements, pos_fix[0][:3], self.astro_dog)
if est_pos is not None:
corrected_measurements = correct_measurements(processed_measurements, est_pos, self.astro_dog)

t = ublox_mono_time * 1e-9
self.update_localizer(pos_fix, t, corrected_measurements)
localizer_valid = self.localizer_valid(t)
kf_valid = all(self.kf_valid(t))

ecef_pos = self.gnss_kf.x[GStates.ECEF_POS].tolist()
ecef_vel = self.gnss_kf.x[GStates.ECEF_VELOCITY].tolist()

pos_std = self.gnss_kf.P[GStates.ECEF_POS].flatten().tolist()
vel_std = self.gnss_kf.P[GStates.ECEF_VELOCITY].flatten().tolist()
pos_std = np.sqrt(abs(self.gnss_kf.P[GStates.ECEF_POS].diagonal())).tolist()
vel_std = np.sqrt(abs(self.gnss_kf.P[GStates.ECEF_VELOCITY].diagonal())).tolist()

bearing_deg, bearing_std = get_bearing_from_gnss(ecef_pos, ecef_vel, vel_std)

meas_msgs = [create_measurement_msg(m) for m in corrected_measurements]
dat = messaging.new_message("gnssMeasurements")
measurement_msg = log.LiveLocationKalman.Measurement.new_message
dat.gnssMeasurements = {
"positionECEF": measurement_msg(value=ecef_pos, std=pos_std, valid=localizer_valid),
"velocityECEF": measurement_msg(value=ecef_vel, std=vel_std, valid=localizer_valid),
"bearingDeg": measurement_msg(value=[bearing_deg], std=[bearing_std], valid=localizer_valid),
"positionECEF": measurement_msg(value=ecef_pos, std=pos_std, valid=kf_valid),
"velocityECEF": measurement_msg(value=ecef_vel, std=vel_std, valid=kf_valid),
"bearingDeg": measurement_msg(value=[bearing_deg], std=[bearing_std], valid=kf_valid),
"ubloxMonoTime": ublox_mono_time,
"correctedMeasurements": meas_msgs
}
return dat
elif ublox_msg.which == 'ephemeris':
ephem = convert_ublox_ephem(ublox_msg.ephemeris)
self.astro_dog.add_ephems([ephem], self.astro_dog.nav)
self.astro_dog.add_navs([ephem])
# elif ublox_msg.which == 'ionoData':
# todo add this. Needed to better correct messages offline. First fix ublox_msg.cc to sent them.

def update_localizer(self, pos_fix, t: float, measurements: List[GNSSMeasurement]):
# Check time and outputs are valid
if not self.localizer_valid(t):
# A position fix is needed when resetting the kalman filter.
if len(pos_fix) == 0:
return
post_est = pos_fix[0][:3].tolist()
filter_time = self.gnss_kf.filter.filter_time
if filter_time is None:
valid = self.kf_valid(t)
if not all(valid):
if not valid[0]:
cloudlog.info("Init gnss kalman filter")
elif abs(t - filter_time) > MAX_TIME_GAP:
elif not valid[1]:
cloudlog.error("Time gap of over 10s detected, gnss kalman reset")
else:
elif not valid[2]:
cloudlog.error("Gnss kalman filter state is nan")
else:
cloudlog.error("Gnss kalman std too far")

if len(pos_fix) == 0:
cloudlog.warning("Position fix not available when resetting kalman filter")
return
post_est = pos_fix[0][:3].tolist()
self.init_gnss_localizer(post_est)
if len(measurements) > 0:
kf_add_observations(self.gnss_kf, t, measurements)
else:
# Ensure gnss filter is updated even with no new measurements
self.gnss_kf.predict(t)

def localizer_valid(self, t: float):
def kf_valid(self, t: float):
filter_time = self.gnss_kf.filter.filter_time
return filter_time is not None and (t - filter_time) < MAX_TIME_GAP and all(np.isfinite(self.gnss_kf.x[GStates.ECEF_POS]))
return [filter_time is not None,
filter_time is not None and abs(t - filter_time) < MAX_TIME_GAP,
all(np.isfinite(self.gnss_kf.x[GStates.ECEF_POS])),
linalg.norm(self.gnss_kf.P[GStates.ECEF_POS]) < 1e5]

def init_gnss_localizer(self, est_pos):
x_initial, p_initial_diag = np.copy(GNSSKalman.x_initial), np.copy(np.diagonal(GNSSKalman.P_initial))
Expand All @@ -107,36 +125,33 @@ def init_gnss_localizer(self, est_pos):

self.gnss_kf.init_state(x_initial, covs_diag=p_initial_diag)

def get_orbit_data(self, t: GPSTime, queue):
cloudlog.info(f"Start to download/parse orbits for time {t.as_datetime()}")
start_time = time.monotonic()
try:
self.astro_dog.get_orbit_data(t, only_predictions=True)
except RuntimeError as e:
cloudlog.info(f"No orbit data found. {e}")
return
cloudlog.info(f"Done parsing orbits. Took {time.monotonic() - start_time:.2f}s")
if queue is not None:
queue.put((self.astro_dog.orbits, self.astro_dog.orbit_fetched_times))

def fetch_orbits(self, t: GPSTime, block):
if t not in self.astro_dog.orbit_fetched_times:
if block:
self.get_orbit_data(t, None)
return
if self.orbit_p is None:
self.orbit_p = Process(target=self.get_orbit_data, args=(t, self.orbit_q))
self.orbit_p.start()
if not self.orbit_q.empty():
ret = self.orbit_q.get()
if t not in self.astro_dog.orbit_fetched_times and (self.last_fetch_orbits_t is None or t - self.last_fetch_orbits_t > SECS_IN_HR):
astro_dog_vars = self.astro_dog.valid_const, self.astro_dog.auto_update, self.astro_dog.valid_ephem_types
if self.orbit_fetch_future is None:
self.orbit_fetch_future = self.orbit_fetch_executor.submit(get_orbit_data, t, *astro_dog_vars)
if block:
self.orbit_fetch_future.result()
if self.orbit_fetch_future.done():
ret = self.orbit_fetch_future.result()
if ret:
self.astro_dog.orbits, self.astro_dog.orbit_fetched_times = ret
self.orbit_p.join()
self.orbit_p = None

def __del__(self):
if self.orbit_p is not None:
self.orbit_p.kill()
self.orbit_fetch_future = None
self.last_fetch_orbits_t = t


def get_orbit_data(t: GPSTime, valid_const, auto_update, valid_ephem_types):
astro_dog = AstroDog(valid_const=valid_const, auto_update=auto_update, valid_ephem_types=valid_ephem_types)
cloudlog.info(f"Start to download/parse orbits for time {t.as_datetime()}")
start_time = time.monotonic()
data = None
try:
astro_dog.get_orbit_data(t, only_predictions=True)
data = (astro_dog.orbits, astro_dog.orbit_fetched_times)
except RuntimeError as e:
cloudlog.info(f"No orbit data found. {e}")
cloudlog.info(f"Done parsing orbits. Took {time.monotonic() - start_time:.1f}s")
return data


def create_measurement_msg(meas: GNSSMeasurement):
Expand Down
33 changes: 25 additions & 8 deletions selfdrive/locationd/test/test_laikad.py
Original file line number Diff line number Diff line change
Expand Up @@ -69,29 +69,46 @@ def test_laika_offline(self, downloader_mock):
self.assertEqual(256, len(correct_msgs))
self.assertEqual(256, len([m for m in correct_msgs if m.gnssMeasurements.positionECEF.valid]))

def test_laika_get_orbits(self):
laikad = Laikad(auto_update=False)
first_gps_time = None
def get_first_gps_time(self):
for m in self.logs:
if m.ubloxGnss.which == 'measurementReport':
new_meas = read_raw_ublox(m.ubloxGnss.measurementReport)
if len(new_meas) != 0:
first_gps_time = new_meas[0].recv_time
break
return new_meas[0].recv_time

def test_laika_get_orbits(self):
laikad = Laikad(auto_update=False)
first_gps_time = self.get_first_gps_time()
# Pretend process has loaded the orbits on startup by using the time of the first gps message.
laikad.fetch_orbits(first_gps_time, block=True)
self.assertEqual(29, len(laikad.astro_dog.orbits.keys()))
self.assertEqual(29, len(laikad.astro_dog.orbits.values()))
self.assertGreater(min([len(v) for v in laikad.astro_dog.orbits.values()]), 0)

@unittest.skip("Use to debug live data")
def test_laika_get_orbits_now(self):
laikad = Laikad(auto_update=False)
laikad.fetch_orbits(GPSTime.from_datetime(datetime.utcnow()), block=True)
prn = "G01"
self.assertLess(0, len(laikad.astro_dog.orbits[prn]))
self.assertGreater(len(laikad.astro_dog.orbits[prn]), 0)
prn = "R01"
self.assertLess(0, len(laikad.astro_dog.orbits[prn]))
self.assertGreater(len(laikad.astro_dog.orbits[prn]), 0)
print(min(laikad.astro_dog.orbits[prn], key=lambda e: e.epoch).epoch.as_datetime())

def test_get_orbits_in_process(self):
laikad = Laikad(auto_update=False)
has_orbits = False
for m in self.logs:
laikad.process_ublox_msg(m.ubloxGnss, m.logMonoTime, block=False)
if laikad.orbit_fetch_future is not None:
laikad.orbit_fetch_future.result()
vals = laikad.astro_dog.orbits.values()
has_orbits = len(vals) > 0 and max([len(v) for v in vals]) > 0
if has_orbits:
break
self.assertTrue(has_orbits)
self.assertGreater(len(laikad.astro_dog.orbit_fetched_times._ranges), 0)
self.assertEqual(None, laikad.orbit_fetch_future)


if __name__ == "__main__":
unittest.main()
8 changes: 2 additions & 6 deletions selfdrive/manager/test/test_manager.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,14 +5,13 @@
import unittest

import selfdrive.manager.manager as manager
from system.hardware import AGNOS, HARDWARE
from selfdrive.manager.process import DaemonProcess
from selfdrive.manager.process_config import managed_processes
from system.hardware import AGNOS, HARDWARE

os.environ['FAKEUPLOAD'] = "1"

# TODO: make eon fast
MAX_STARTUP_TIME = 15
MAX_STARTUP_TIME = 3
ALL_PROCESSES = [p.name for p in managed_processes.values() if (type(p) is not DaemonProcess) and p.enabled and (p.name not in ['updated', 'pandad'])]


Expand Down Expand Up @@ -54,9 +53,6 @@ def test_clean_exit(self):
# TODO: make Qt UI exit gracefully
continue

# Make sure the process is actually dead
managed_processes[p].stop()

# TODO: interrupted blocking read exits with 1 in cereal. use a more unique return code
exit_codes = [0, 1]
if managed_processes[p].sigkill:
Expand Down
Loading