From 48c86d4ad56c1996860d159f6517268531a09023 Mon Sep 17 00:00:00 2001 From: Andrew Sidhu Date: Sat, 18 Apr 2020 02:38:14 -0400 Subject: [PATCH] Fix Memory leaks from merge issues. update scons Update SConstruct Update Scons, Fix runner, fix args in rgb_to_yuv.c more scons fixes Fix Memory leaks from merge issues. update scons --- README.md | 112 ++++++++--------- RELEASES.md | 5 +- SConstruct | 118 ++++++++++-------- common/android.py | 4 +- common/realtime.py | 4 +- selfdrive/boardd/boardd_setup.py | 2 +- selfdrive/camerad/SConscript | 12 +- selfdrive/camerad/transforms/rgb_to_yuv.c | 2 +- selfdrive/car/tesla/readconfig.sh | 6 +- selfdrive/common/SConscript | 4 +- selfdrive/controls/controlsd.py | 5 +- selfdrive/controls/lib/alerts.py | 21 +++- selfdrive/controls/lib/pathplanner.py | 6 +- selfdrive/loggerd/SConscript | 9 +- selfdrive/modeld/SConscript | 4 +- selfdrive/modeld/dmonitoringmodeld.cc | 1 + selfdrive/modeld/modeld.cc | 2 +- selfdrive/modeld/runners/keras_runner.py | 3 +- selfdrive/test/process_replay/ref_commit | 2 +- selfdrive/ui/SConscript | 8 +- selfdrive/ui/paint.cc | 140 +++++++++++++--------- selfdrive/updated.py | 42 ++----- tools/webcam/README.md | 4 +- tools/webcam/front_mount_helper.py | 6 +- tools/webcam/jetson_test_cam.py | 17 +-- tools/webcam/warp_vis.py | 19 ++- 26 files changed, 284 insertions(+), 274 deletions(-) diff --git a/README.md b/README.md index 575fa77b2a431b..3916ac4c529b0f 100644 --- a/README.md +++ b/README.md @@ -63,75 +63,67 @@ openpilot should preserve all other vehicle's stock features, including, but are Supported Hardware ------ -At the moment, openpilot supports the [EON DevKit](https://comma.ai/shop/products/eon-dashcam-devkit) and the [comma two](https://comma.ai/shop/products/comma-two-devkit). A [car harness](https://comma.ai/shop/products/car-harness) is recommended to connect the EON or comma two to the car. In the future, we'd like to support other platforms as well, like gaming PCs. +At the moment, openpilot supports the [EON DevKit](https://comma.ai/shop/products/eon-dashcam-devkit) and the [comma two](https://comma.ai/shop/products/comma-two-devkit). A [car harness](https://comma.ai/shop/products/car-harness) is recommended to connect the EON or comma two to the car. For experimental purposes, openpilot can also run on an Ubuntu computer with external [webcams](https://github.com/commaai/openpilot/tree/master/tools/webcam). Supported Cars ------ | Make | Model (US Market Reference) | Supported Package | ACC | No ACC accel below | No ALC below | | ----------| ------------------------------| ------------------| -----------------| -------------------| ------------------| -| Acura | ILX 2016-18 | AcuraWatch Plus | openpilot | 25mph6 | 25mph | -| Acura | RDX 2016-18 | AcuraWatch Plus | openpilot | 25mph6 | 12mph | -| Chrysler | Pacifica 2017-18 | Adaptive Cruise | Stock | 0mph | 9mph | -| Chrysler | Pacifica Hybrid 2017-18 | Adaptive Cruise | Stock | 0mph | 9mph | -| Chrysler | Pacifica Hybrid 2019-20 | Adaptive Cruise | Stock | 0mph | 39mph | +| Acura | ILX 2016-18 | AcuraWatch Plus | openpilot | 25mph1 | 25mph | +| Acura | RDX 2016-18 | AcuraWatch Plus | openpilot | 25mph1 | 12mph | | Honda | Accord 2018-19 | All | Stock | 0mph | 3mph | | Honda | Accord Hybrid 2018-19 | All | Stock | 0mph | 3mph | | Honda | Civic Hatchback 2017-19 | Honda Sensing | Stock | 0mph | 12mph | | Honda | Civic Sedan/Coupe 2016-18 | Honda Sensing | openpilot | 0mph | 12mph | -| Honda | Civic Sedan/Coupe 2019 | Honda Sensing | Stock | 0mph | 2mph4 | -| Honda | CR-V 2015-16 | Touring | openpilot | 25mph6 | 12mph | +| Honda | Civic Sedan/Coupe 2019-20 | Honda Sensing | Stock | 0mph | 2mph2 | +| Honda | CR-V 2015-16 | Touring | openpilot | 25mph1 | 12mph | | Honda | CR-V 2017-19 | Honda Sensing | Stock | 0mph | 12mph | | Honda | CR-V Hybrid 2017-2019 | Honda Sensing | Stock | 0mph | 12mph | -| Honda | Fit 2018-19 | Honda Sensing | openpilot | 25mph6 | 12mph | +| Honda | Fit 2018-19 | Honda Sensing | openpilot | 25mph1 | 12mph | | Honda | Insight 2019 | Honda Sensing | Stock | 0mph | 3mph | | Honda | Odyssey 2018-20 | Honda Sensing | openpilot | 25mph1 | 0mph | | Honda | Passport 2019 | All | openpilot | 25mph1 | 12mph | | Honda | Pilot 2016-18 | Honda Sensing | openpilot | 25mph1 | 12mph | | Honda | Pilot 2019 | All | openpilot | 25mph1 | 12mph | | Honda | Ridgeline 2017-20 | Honda Sensing | openpilot | 25mph1 | 12mph | -| Hyundai | Sonata 2020 | All | Stock | 0mph | 0mph | -| Hyundai | Palisade 2020 | All | Stock | 0mph | 0mph | | Lexus | CT Hybrid 2017-18 | All | Stock3| 0mph | 0mph | | Lexus | ES 2019 | All | openpilot | 0mph | 0mph | | Lexus | ES Hybrid 2019 | All | openpilot | 0mph | 0mph | | Lexus | IS 2017-2019 | All | Stock | 22mph | 0mph | | Lexus | IS Hybrid 2017 | All | Stock | 0mph | 0mph | -| Lexus | NX Hybrid 2018 | All | Stock5| 0mph | 0mph | -| Lexus | RX 2016-17 | All | Stock5| 0mph | 0mph | +| Lexus | NX Hybrid 2018 | All | Stock3| 0mph | 0mph | +| Lexus | RX 2016-17 | All | Stock3| 0mph | 0mph | | Lexus | RX 2020 | All | openpilot | 0mph | 0mph | -| Lexus | RX Hybrid 2016-19 | All | Stock5| 0mph | 0mph | -| Subaru | Crosstrek 2018-19 | EyeSight | Stock | 0mph | 0mph | -| Subaru | Impreza 2019-20 | EyeSight | Stock | 0mph | 0mph | -| Toyota | Avalon 2016 | TSS-P | Stock5| 20mph6 | 0mph | -| Toyota | Avalon 2017-18 | All | Stock5| 20mph6 | 0mph | -| Toyota | Camry 2018-19 | All | Stock | 0mph2 | 0mph | -| Toyota | Camry Hybrid 2018-19 | All | Stock | 0mph2 | 0mph | +| Lexus | RX Hybrid 2016-19 | All | Stock3| 0mph | 0mph | +| Toyota | Avalon 2016 | TSS-P | Stock3| 20mph1 | 0mph | +| Toyota | Avalon 2017-18 | All | Stock3| 20mph1 | 0mph | +| Toyota | Camry 2018-20 | All | Stock | 0mph4 | 0mph | +| Toyota | Camry Hybrid 2018-19 | All | Stock | 0mph4 | 0mph | | Toyota | C-HR 2017-19 | All | Stock | 0mph | 0mph | | Toyota | C-HR Hybrid 2017-19 | All | Stock | 0mph | 0mph | -| Toyota | Corolla 2017-19 | All | Stock5| 20mph6 | 0mph | +| Toyota | Corolla 2017-19 | All | Stock3| 20mph1 | 0mph | | Toyota | Corolla 2020 | All | openpilot | 0mph | 0mph | | Toyota | Corolla Hatchback 2019-20 | All | openpilot | 0mph | 0mph | | Toyota | Corolla Hybrid 2020 | All | openpilot | 0mph | 0mph | -| Toyota | Highlander 2017-19 | All | Stock5| 0mph | 0mph | -| Toyota | Highlander Hybrid 2017-19 | All | Stock5| 0mph | 0mph | +| Toyota | Highlander 2017-19 | All | Stock3| 0mph | 0mph | +| Toyota | Highlander Hybrid 2017-19 | All | Stock3| 0mph | 0mph | | Toyota | Highlander 2020 | All | openpilot | 0mph | 0mph | -| Toyota | Prius 2016 | TSS-P | Stock5| 0mph | 0mph | -| Toyota | Prius 2017-19 | All | Stock5| 0mph | 0mph | -| Toyota | Prius Prime 2017-20 | All | Stock5| 0mph | 0mph | -| Toyota | Rav4 2016 | TSS-P | Stock5| 20mph6 | 0mph | -| Toyota | Rav4 2017-18 | All | Stock5| 20mph6 | 0mph | -| Toyota | Rav4 2019 | All | openpilot | 0mph | 0mph | -| Toyota | Rav4 Hybrid 2016 | TSS-P | Stock5| 0mph | 0mph | -| Toyota | Rav4 Hybrid 2017-18 | All | Stock5| 0mph | 0mph | +| Toyota | Prius 2016 | TSS-P | Stock3| 0mph | 0mph | +| Toyota | Prius 2017-19 | All | Stock3| 0mph | 0mph | +| Toyota | Prius Prime 2017-20 | All | Stock3| 0mph | 0mph | +| Toyota | Rav4 2016 | TSS-P | Stock3| 20mph1 | 0mph | +| Toyota | Rav4 2017-18 | All | Stock3| 20mph1 | 0mph | +| Toyota | Rav4 2019-20 | All | openpilot | 0mph | 0mph | +| Toyota | Rav4 Hybrid 2016 | TSS-P | Stock3| 0mph | 0mph | +| Toyota | Rav4 Hybrid 2017-18 | All | Stock3| 0mph | 0mph | | Toyota | Rav4 Hybrid 2019-20 | All | openpilot | 0mph | 0mph | -| Toyota | Sienna 2018 | All | Stock5| 0mph | 0mph | -| Volkswagen| Golf 2016-193 | Driver Assistance | Stock | 0mph | 0mph | +| Toyota | Sienna 2018 | All | Stock3| 0mph | 0mph | -1Requires a [panda](https://comma.ai/shop/products/panda-obd-ii-dongle) and open sourced [Hyundai giraffe](https://github.com/commaai/neo/tree/master/giraffe/hyundai), designed for the 2019 Sante Fe; pinout may differ for other Hyundai and Kia models.
-228mph for Camry 4CYL L, 4CYL LE and 4CYL SE which don't have Full-Speed Range Dynamic Radar Cruise Control.
-3Requires a [custom connector](https://community.comma.ai/wiki/index.php/Volkswagen#Integration_at_R242_Camera) for the [car harness](https://comma.ai/shop/products/car-harness)
-42019 Honda Civic 1.6L Diesel Sedan does not have ALC below 12mph.
+1[Comma Pedal](https://community.comma.ai/wiki/index.php/Comma_Pedal) is used to provide stop-and-go capability to some of the openpilot-supported cars that don't currently support stop-and-go. Here is how to [build a Comma Pedal](https://medium.com/@jfrux/comma-pedal-building-with-macrofab-6328bea791e8). ***NOTE: The Comma Pedal is not officially supported by [comma](https://comma.ai).***
+22019 Honda Civic 1.6L Diesel Sedan does not have ALC below 12mph.
+3When disconnecting the Driver Support Unit (DSU), openpilot ACC will replace stock ACC. For DSU locations, see [Toyota Wiki page](https://community.comma.ai/wiki/index.php/Toyota). ***NOTE: disconnecting the DSU disables Automatic Emergency Braking (AEB).***
+428mph for Camry 4CYL L, 4CYL LE and 4CYL SE which don't have Full-Speed Range Dynamic Radar Cruise Control.
Community Maintained Cars and Features ------ @@ -146,23 +138,15 @@ Community Maintained Cars and Features | Chrysler | Pacifica 2020 | Adaptive Cruise | Stock | 0mph | 39mph | | Chrysler | Pacifica Hybrid 2017-18 | Adaptive Cruise | Stock | 0mph | 9mph | | Chrysler | Pacifica Hybrid 2019-20 | Adaptive Cruise | Stock | 0mph | 39mph | -| Genesis | G80 20182 | All | Stock | 0mph | 0mph | -| Genesis | G90 20182 | All | Stock | 0mph | 0mph | | GMC | Acadia Denali 20183| Adaptive Cruise | openpilot | 0mph | 7mph | | Holden | Astra 20171 | Adaptive Cruise | openpilot | 0mph | 7mph | | Hyundai | Elantra 2017-192 | SCC + LKAS | Stock | 19mph | 34mph | -| Hyundai | Genesis 2015-162 | SCC + LKAS | Stock | 19mph | 37mph | -| Hyundai | Ioniq 20172 | SCC + LKAS | Stock | 0mph | 32mph | -| Hyundai | Ioniq 2019 EV2 | SCC + LKAS | Stock | 0mph | 32mph | -| Hyundai | Kona 2017-192 | SCC + LKAS | Stock | 22mph | 0mph | -| Hyundai | Kona 2019 EV2 | SCC + LKAS | Stock | 0mph | 0mph | +| Hyundai | Genesis 20182 | All | Stock | 19mph | 34mph | | Hyundai | Santa Fe 20192 | All | Stock | 0mph | 0mph | | Jeep | Grand Cherokee 2016-18 | Adaptive Cruise | Stock | 0mph | 9mph | | Jeep | Grand Cherokee 2019 | Adaptive Cruise | Stock | 0mph | 39mph | -| Kia | Forte 20182 | SCC + LKAS | Stock | 0mph | 0mph | -| Kia | Optima 20172 | SCC + LKAS/LDWS | Stock | 0mph | 32mph | | Kia | Optima 20192 | SCC + LKAS | Stock | 0mph | 0mph | -| Kia | Sorento 20182 | SCC + LKAS | Stock | 0mph | 0mph | +| Kia | Sorento 20182 | All | Stock | 0mph | 0mph | | Kia | Stinger 20182 | SCC + LKAS | Stock | 0mph | 0mph | | Nissan | Leaf 2019 | Propilot | Stock | 0mph | 0mph | | Nissan | X-Trail 2018 | Propilot | Stock | 0mph | 0mph | @@ -170,18 +154,22 @@ Community Maintained Cars and Features | Subaru | Impreza 2019-20 | EyeSight | Stock | 0mph | 0mph | | Volkswagen| Golf 2016-193 | Driver Assistance | Stock | 0mph | 0mph | -5When disconnecting the Driver Support Unit (DSU), openpilot ACC will replace stock ACC. For DSU locations, see [Toyota Wiki page](https://community.comma.ai/wiki/index.php/Toyota). ***NOTE: disconnecting the DSU disables Automatic Emergency Braking (AEB).***
-6[Comma Pedal](https://community.comma.ai/wiki/index.php/Comma_Pedal) is used to provide stop-and-go capability to some of the openpilot-supported cars that don't currently support stop-and-go. Here is how to [build a Comma Pedal](https://medium.com/@jfrux/comma-pedal-building-with-macrofab-6328bea791e8). ***NOTE: The Comma Pedal is not officially supported by [comma](https://comma.ai).***
-7Requires a [panda](https://comma.ai/shop/products/panda-obd-ii-dongle) and [community built giraffe](https://zoneos.com/volt/). ***NOTE: disconnecting the ASCM disables Automatic Emergency Braking (AEB).***
+1Requires a [panda](https://comma.ai/shop/products/panda-obd-ii-dongle) and [community built giraffe](https://zoneos.com/volt/). ***NOTE: disconnecting the ASCM disables Automatic Emergency Braking (AEB).***
+2Requires a [panda](https://comma.ai/shop/products/panda-obd-ii-dongle) and open sourced [Hyundai giraffe](https://github.com/commaai/neo/tree/master/giraffe/hyundai), designed for the 2019 Sante Fe; pinout may differ for other Hyundai and Kia models.
+3Requires a [custom connector](https://community.comma.ai/wiki/index.php/Volkswagen#Integration_at_R242_Camera) for the [car harness](https://comma.ai/shop/products/car-harness)
+ +Although it's not upstream, there's a community of people getting openpilot to run on Tesla's [here](https://tinkla.us/) Community Maintained Cars and Features are not verified by comma to meet our [safety model](SAFETY.md). Be extra cautious using them. They are only available after enabling the toggle in `Settings->Developer->Enable Community Features`. +To promote a car from community maintained, it must meet a few requirements. We must own one from the brand, we must sell the harness for it, has full ISO26262 in both panda and openpilot, there must be a path forward for longitudinal control, it must have AEB still enabled, and it must support fingerprinting 2.0 + Installation Instructions ------ -Install openpilot on a EON by entering ``https://openpilot.comma.ai`` during the installer setup. +Install openpilot on an EON or comma two by entering ``https://openpilot.comma.ai`` during the installer setup. -Follow this [video instructions](https://youtu.be/3nlkomHathI) to properly mount the EON on the windshield. Note: openpilot features an automatic pose calibration routine and openpilot performance should not be affected by small pitch and yaw misalignments caused by imprecise EON mounting. +Follow these [video instructions](https://youtu.be/3nlkomHathI) to properly mount the device on the windshield. Note: openpilot features an automatic pose calibration routine and openpilot performance should not be affected by small pitch and yaw misalignments caused by imprecise device mounting. Before placing the device on your windshield, check the state and local laws and ordinances where you drive. Some state laws prohibit or restrict the placement of objects on the windshield of a motor vehicle. @@ -199,7 +187,7 @@ Many factors can impact the performance of openpilot ALC and openpilot LDW, caus * Poor visibility (heavy rain, snow, fog, etc.) or weather conditions that may interfere with sensor operation. * The road facing camera is obstructed, covered or damaged by mud, ice, snow, etc. * Obstruction caused by applying excessive paint or adhesive products (such as wraps, stickers, rubber coating, etc.) onto the vehicle. -* The EON is mounted incorrectly. +* The device is mounted incorrectly. * When in sharp curves, like on-off ramps, intersections etc...; openpilot is designed to be limited in the amount of steering torque it can produce. * In the presence of restricted lanes or construction zones. * When driving on highly banked roads or in presence of strong cross-wind. @@ -219,7 +207,7 @@ Many factors can impact the performance of openpilot ACC and openpilot FCW, caus * Poor visibility (heavy rain, snow, fog, etc.) or weather conditions that may interfere with sensor operation. * The road facing camera or radar are obstructed, covered, or damaged by mud, ice, snow, etc. * Obstruction caused by applying excessive paint or adhesive products (such as wraps, stickers, rubber coating, etc.) onto the vehicle. -* The EON is mounted incorrectly. +* The device is mounted incorrectly. * Approaching a toll booth, a bridge or a large metal plate. * When driving on roads with pedestrians, cyclists, etc... * In presence of traffic signs or stop lights, which are not detected by openpilot at this time. @@ -237,13 +225,13 @@ The list above does not represent an exhaustive list of situations that may inte Limitations of openpilot DM ------ -openpilot DM should not be considered an exact measurements of the status of alertness of the driver. +openpilot DM should not be considered an exact measurement of the alertness of the driver. Many factors can impact the performance of openpilot DM, causing it to be unable to function as intended. These include, but are not limited to: * Low light conditions, such as driving at night or in dark tunnels. * Bright light (due to oncoming headlights, direct sunlight, etc.). -* The driver face is partially or completely outside field of view of the driver facing camera. +* The driver's face is partially or completely outside field of view of the driver facing camera. * Right hand driving vehicles. * The driver facing camera is obstructed, covered, or damaged. @@ -265,7 +253,7 @@ Safety and Testing ---- * openpilot observes ISO26262 guidelines, see [SAFETY.md](SAFETY.md) for more detail. -* openpilot has software in the loop [tests](run_docker_tests.sh) that run on every commit. +* openpilot has software in the loop [tests](.github/workflows/test.yaml) that run on every commit. * The safety model code lives in panda and is written in C, see [code rigor](https://github.com/commaai/panda#code-rigor) for more details. * panda has software in the loop [safety tests](https://github.com/commaai/panda/tree/master/tests/safety). * Internally, we have a hardware in the loop Jenkins test suite that builds and unit tests the various processes. @@ -292,15 +280,15 @@ Directory Structure ------ . ├── apk # The apk files used for the UI - ├── cereal # The messaging spec and libs used for all logs on EON + ├── cereal # The messaging spec and libs used for all logs ├── common # Library like functionality we've developed here ├── installer/updater # Manages auto-updates of openpilot ├── opendbc # Files showing how to interpret data from cars ├── panda # Code used to communicate on CAN - ├── phonelibs # Libraries used on EON - ├── pyextra # Libraries used on EON + ├── phonelibs # Libraries used on NEOS devices + ├── pyextra # Libraries used on NEOS devices └── selfdrive # Code needed to drive the car - ├── assets # Fonts and images for UI + ├── assets # Fonts, images, and sounds for UI ├── athena # Allows communication with the app ├── boardd # Daemon to talk to the board ├── camerad # Driver to capture images from the camera sensors @@ -314,7 +302,7 @@ Directory Structure ├── modeld # Driving and monitoring model runners ├── proclogd # Logs information from proc ├── sensord # IMU / GPS interface code - ├── tests # Unit tests, system tests and a car simulator + ├── test # Unit tests, system tests and a car simulator └── ui # The UI To understand how the services interact, see `cereal/service_list.yaml`. diff --git a/RELEASES.md b/RELEASES.md index 7cbbe4a3208212..7c1b817ef6a856 100644 --- a/RELEASES.md +++ b/RELEASES.md @@ -1,11 +1,10 @@ -Version 0.7.5 (2020-04-23) +Version 0.7.5 (2020-xx-xx) ======================== * Right-Hand Drive support for both driving and driver monitoring! +* New driving model: * New driver monitoring model: overall improvement on comma two * Driver camera preview in settings to improve mounting position -* Added support for many Hyundai, Kia, Genesis models thanks to xx979xx! * 2019 Nissan X-Trail and 2018 Nissan Leaf support thanks to avolmensky! -* Improved lateral tuning for 2020 Toyota Rav 4 (hybrid) Version 0.7.4 (2020-03-20) ======================== diff --git a/SConstruct b/SConstruct index 06cb21eed807e9..a6d5930c0aab06 100644 --- a/SConstruct +++ b/SConstruct @@ -13,33 +13,28 @@ AddOption('--asan', arch = subprocess.check_output(["uname", "-m"], encoding='utf8').rstrip() is_tbp = os.path.isfile('/data/tinkla_buddy_pro') +if arch == "aarch64" and is_tbp: + arch = "jarch64" if platform.system() == "Darwin": arch = "Darwin" -if arch == "aarch64" and (not os.path.isdir("/system")) and not is_tbp: +if arch == "aarch64" and (not os.path.isdir("/system")): arch = "larch64" webcam = bool(ARGUMENTS.get("use_webcam", 0)) -if arch == "aarch64": - if is_tbp: - webcam=True - lenv = { - "LD_LIBRARY_PATH": '/usr/lib:/usr/local/lib/:/usr/lib/aarch64-linux-gnu', - "PATH": os.environ['PATH'], - "ANDROID_DATA": "/data", - "ANDROID_ROOT": "/", - } - else: - lenv = { - "LD_LIBRARY_PATH": '/data/data/com.termux/files/usr/lib', - "PATH": os.environ['PATH'], - "ANDROID_DATA": os.environ['ANDROID_DATA'], - "ANDROID_ROOT": os.environ['ANDROID_ROOT'], - } +if arch == "aarch64" or arch == "larch64": + lenv = { + "LD_LIBRARY_PATH": '/data/data/com.termux/files/usr/lib', + "PATH": os.environ['PATH'], + } + + if arch == "aarch64": + # android + lenv["ANDROID_DATA"] = os.environ['ANDROID_DATA'] + lenv["ANDROID_ROOT"] = os.environ['ANDROID_ROOT'] cpppath = [ "#phonelibs/opencl/include", - "#phonelibs/snpe/include", ] libpath = [ @@ -48,43 +43,57 @@ if arch == "aarch64": "/system/vendor/lib64", "/system/comma/usr/lib", "#phonelibs/nanovg", - "#phonelibs/snpe/aarch64", - "#phonelibs/libyuv/lib", ] - - if is_tbp: - cflags = [] - cxxflags = [] - cpppath = [ - "/usr/local/include", - "/usr/local/include/opencv4", - "/usr/include/khronos-api", - "/usr/include", - "#phonelibs/snpe/include", - "/usr/include/aarch64-linux-gnu", - ] - libpath = [ - "/usr/local/lib", - "/usr/lib/aarch64-linux-gnu", - "/usr/lib", - "/data/op_rk3399_setup/external/snpe/lib/lib", - "/data/data/com.termux/files/usr/lib", - "#phonelibs/nanovg", - "/data/op_rk3399_setup/support_files/lib", - ] - rpath = ["/system/vendor/lib64", - "/usr/local/lib", - "/usr/lib/aarch64-linux-gnu", - "/usr/lib", - "/data/op_rk3399_setup/external/snpe/lib/lib", - "/data/op_rk3399_setup/support_files/lib", - "cereal", - "selfdrive/common", - ] + + if arch == "larch64": + cpppath += ["#phonelibs/capnp-cpp/include", "#phonelibs/capnp-c/include"] + libpath += ["#phonelibs/snpe/larch64"] + libpath += ["#phonelibs/libyuv/larch64/lib"] + libpath += ["#external/capnparm/lib", "/usr/lib/aarch64-linux-gnu"] + cflags = ["-DQCOM2", "-mcpu=cortex-a57"] + cxxflags = ["-DQCOM2", "-mcpu=cortex-a57"] + rpath = ["/usr/local/lib"] else: + libpath += ["#phonelibs/snpe/aarch64"] + libpath += ["#phonelibs/libyuv/lib"] cflags = ["-DQCOM", "-mcpu=cortex-a57"] cxxflags = ["-DQCOM", "-mcpu=cortex-a57"] rpath = ["/system/vendor/lib64"] +elif arch == "jarch64": + webcam=True + lenv = { + "LD_LIBRARY_PATH": '/usr/lib:/usr/local/lib/:/usr/lib/aarch64-linux-gnu', + "PATH": os.environ['PATH'], + "ANDROID_DATA": "/data", + "ANDROID_ROOT": "/", + } + cflags = [] + cxxflags = [] + cpppath = [ + "/usr/local/include", + "/usr/local/include/opencv4", + "/usr/include/khronos-api", + "/usr/include", + "#phonelibs/snpe/include", + "/usr/include/aarch64-linux-gnu", + ] + libpath = [ + "/usr/local/lib", + "/usr/lib/aarch64-linux-gnu", + "/usr/lib", + "/data/op_rk3399_setup/external/snpe/lib/lib", + "/data/data/com.termux/files/usr/lib", + "#phonelibs/nanovg", + "/data/op_rk3399_setup/support_files/lib", + ] + rpath = ["/usr/local/lib", + "/usr/lib/aarch64-linux-gnu", + "/usr/lib", + "/data/op_rk3399_setup/external/snpe/lib/lib", + "/data/op_rk3399_setup/support_files/lib", + "cereal", + "selfdrive/common", + ] else: lenv = { "PATH": "#external/bin:" + os.environ['PATH'], @@ -221,10 +230,13 @@ def abspath(x): return x[0].path.rsplit("/", 1)[1][:-3] # still needed for apks -zmq = FindFile("libzmq.a", libpath) +if arch == 'larch64': + zmq = 'zmq' +else: + zmq = FindFile("libzmq.a", libpath) if is_tbp: zmq = FindFile("libzmq.so", libpath) -Export('env', 'arch', 'zmq', 'SHARED', 'webcam', 'is_tbp') +Export('env', 'arch', 'zmq', 'SHARED', 'webcam') # cereal and messaging are shared with the system SConscript(['cereal/SConscript']) @@ -269,7 +281,7 @@ SConscript(['selfdrive/proclogd/SConscript']) SConscript(['selfdrive/ui/SConscript']) SConscript(['selfdrive/loggerd/SConscript']) -if arch == "aarch64" and not is_tbp: +if arch == "aarch64": SConscript(['selfdrive/logcatd/SConscript']) SConscript(['selfdrive/sensord/SConscript']) SConscript(['selfdrive/clocksd/SConscript']) diff --git a/common/android.py b/common/android.py index 16e60bf2497634..5e26924f2a50ef 100644 --- a/common/android.py +++ b/common/android.py @@ -232,7 +232,7 @@ def get_cdma_level(cdmadbm, cdmaecio): if network_type == NetworkType.none: return network_strength if network_type == NetworkType.wifi: - out = subprocess.check_output('dumpsys connectivity', shell=True).decode('utf-8') + out = subprocess.check_output('dumpsys connectivity', shell=True).decode('ascii') network_strength = NetworkStrength.unknown for line in out.split('\n'): signal_str = "SignalStrength: " @@ -251,7 +251,7 @@ def get_cdma_level(cdmadbm, cdmaecio): return network_strength else: # check cell strength - out = subprocess.check_output('dumpsys telephony.registry', shell=True).decode('utf-8') + out = subprocess.check_output('dumpsys telephony.registry', shell=True).decode('ascii') for line in out.split('\n'): if "mSignalStrength" in line: arr = line.split(' ') diff --git a/common/realtime.py b/common/realtime.py index d97eb88b28f677..c21222e88c2285 100644 --- a/common/realtime.py +++ b/common/realtime.py @@ -55,12 +55,10 @@ def remaining(self): return self._remaining # Maintain loop rate by calling this at the end of each loop - def keep_time(self, offset=0.): + def keep_time(self): lagged = self.monitor_time() if self._remaining > 0: time.sleep(self._remaining) - elif not offset == 0.: - self._next_frame_time += offset return lagged # this only monitor the cumulative lag, but does not enforce a rate diff --git a/selfdrive/boardd/boardd_setup.py b/selfdrive/boardd/boardd_setup.py index e19f99dbf82989..b8997f85427905 100644 --- a/selfdrive/boardd/boardd_setup.py +++ b/selfdrive/boardd/boardd_setup.py @@ -21,7 +21,7 @@ ARCH_DIR = 'x64' else: libraries = [':libcan_list_to_can_capnp.a', 'capnp', 'kj'] - if os.path.isdir("/system") or os.path.isdir('/data/oprun'): + if os.path.isdir("/system"): ARCH_DIR = 'aarch64' else: ARCH_DIR = 'larch64' diff --git a/selfdrive/camerad/SConscript b/selfdrive/camerad/SConscript index 891bc4944adf5c..e30a04fd337788 100644 --- a/selfdrive/camerad/SConscript +++ b/selfdrive/camerad/SConscript @@ -1,18 +1,14 @@ -Import('env', 'arch', 'messaging', 'is_tbp', 'common', 'gpucommon', 'visionipc', 'cereal', 'webcam') +Import('env', 'arch', 'messaging', 'common', 'gpucommon', 'visionipc', 'cereal', 'webcam') libs = ['m', 'pthread', common, 'jpeg', 'json', cereal, 'OpenCL', messaging, 'czmq', 'zmq', 'capnp', 'kj', 'capnp_c', visionipc, gpucommon] -if is_tbp: - larch = "aarch64_TBP" -else: - larch = arch -if larch == "aarch64": +if arch == "aarch64": libs += ['gsl', 'CB', 'adreno_utils', 'EGL', 'GLESv3', 'cutils', 'ui'] cameras = ['cameras/camera_qcom.c'] -elif larch == "larch64": +elif arch == "larch64": libs += [] cameras = ['cameras/camera_qcom2.c'] -elif larch == "aarch64_TBP": +elif arch == "jarch64": libs += ['opencv_core', 'opencv_highgui', 'opencv_video', 'opencv_imgproc', 'opencv_videoio'] cameras = ['cameras/camera_webcam.cc'] env = env.Clone() diff --git a/selfdrive/camerad/transforms/rgb_to_yuv.c b/selfdrive/camerad/transforms/rgb_to_yuv.c index 88cbeca9261bf2..5bea8088b70ebd 100644 --- a/selfdrive/camerad/transforms/rgb_to_yuv.c +++ b/selfdrive/camerad/transforms/rgb_to_yuv.c @@ -13,7 +13,7 @@ void rgb_to_yuv_init(RGBToYUVState* s, cl_context ctx, cl_device_id device_id, i assert(height % 2 == 0); s->width = width; s->height = height; - char args[8024]; + char args[1024]; printf("snprintf"); snprintf(args, sizeof(args), "-cl-fast-relaxed-math -cl-denorms-are-zero " diff --git a/selfdrive/car/tesla/readconfig.sh b/selfdrive/car/tesla/readconfig.sh index 3c3184d09dbcd1..4f03db0ded194b 100755 --- a/selfdrive/car/tesla/readconfig.sh +++ b/selfdrive/car/tesla/readconfig.sh @@ -1,3 +1,7 @@ CFG_FILE=/data/bb_openpilot.cfg -CFG_CONTENT=$(cat $CFG_FILE | sed -r '/[^=]+=[^=]+/!d' | sed -r 's/\s+=\s/=/g') +CFG_CONTENT=$(cat $CFG_FILE | sed -r "s/'/SINGLE_Q/" | sed -r '/[^=]+=[^=]+/!d' | sed -r 's/\s+=\s/=/g' | sed -e 's/[[:space:]]*\=[[:space:]]*/=/g' \ + -e 's/#.*$//' \ + -e 's/[[:space:]]*$//' \ + -e 's/^[[:space:]]*//' \ + -e "s/^\(.*\)=\([^\"']*\)$/\1=\"\2\"/" | sed -r "s/SINGLE_Q/'/" ) eval "export $CFG_CONTENT" diff --git a/selfdrive/common/SConscript b/selfdrive/common/SConscript index 680843aecd975c..c6c9f978464949 100644 --- a/selfdrive/common/SConscript +++ b/selfdrive/common/SConscript @@ -1,4 +1,4 @@ -Import('env', 'arch', 'SHARED','is_tbp') +Import('env', 'arch', 'SHARED') if SHARED: fxn = env.SharedLibrary @@ -17,7 +17,7 @@ files = [ ] -if is_tbp: +if arch == "jarch64": defines = {} #{"CLU_NO_CACHE": None} files += [ 'visionbuf_cl.c', diff --git a/selfdrive/controls/controlsd.py b/selfdrive/controls/controlsd.py index 8709c7929e546d..f82d6bf740675d 100755 --- a/selfdrive/controls/controlsd.py +++ b/selfdrive/controls/controlsd.py @@ -30,7 +30,7 @@ LANE_DEPARTURE_THRESHOLD = 0.1 -EER_ANGLE_SATURATION_TIMEOUT = 1.0 / DT_CTRL +STEER_ANGLE_SATURATION_TIMEOUT = 1.0 / DT_CTRL STEER_ANGLE_SATURATION_THRESHOLD = 250 # Degrees ThermalStatus = log.ThermalData.ThermalStatus @@ -119,7 +119,7 @@ def data_sample(CI, CC, sm, can_sock, state, mismatch_counter, can_error_counter else: events.append(create_event('calibrationInvalid', [ET.NO_ENTRY, ET.SOFT_DISABLE])) -if CS.vEgo > 150 * CV.MPH_TO_MS: + if CS.vEgo > 150 * CV.MPH_TO_MS: events.append(create_event('speedTooHigh', [ET.NO_ENTRY, ET.SOFT_DISABLE])) # When the panda and controlsd do not agree on controls_allowed @@ -544,7 +544,6 @@ def controlsd_thread(sm=None, pm=None, can_sock=None): # controlsd is driven by can recv, expected at 100Hz rk = Ratekeeper(100, print_delay_threshold=None) - internet_needed = params.get("Offroad_ConnectivityNeeded", encoding='utf8') is not None prof = Profiler(False) # off by default diff --git a/selfdrive/controls/lib/alerts.py b/selfdrive/controls/lib/alerts.py index 4bd125abc142a0..95385173b3dda8 100644 --- a/selfdrive/controls/lib/alerts.py +++ b/selfdrive/controls/lib/alerts.py @@ -110,7 +110,7 @@ def __gt__(self, alert2): Alert( "preDriverDistracted", - "KEEP EYES ON ROAD: Driver Appears Distracted", + "KEEP EYES ON ROAD: Driver Distracted", "", AlertStatus.normal, AlertSize.small, Priority.LOW, VisualAlert.steerRequired, AudibleAlert.none, .0, .1, .1, alert_rate=0.75), @@ -272,7 +272,7 @@ def __gt__(self, alert2): Alert( "dataNeededNoEntry", "openpilot Unavailable", - "Data Needed for Calibration. Upload Drive, Try Again", + "Calibration Needs Data. Upload Drive, Try Again", AlertStatus.normal, AlertSize.mid, Priority.LOW, VisualAlert.none, AudibleAlert.chimeError, .4, 0., 3.), @@ -526,6 +526,13 @@ def __gt__(self, alert2): AlertStatus.normal, AlertSize.mid, Priority.HIGH, VisualAlert.none, AudibleAlert.chimeDisengage, .4, 2., 3.), + Alert( + "speedTooHigh", + "Speed Too High", + "Slow down to resume operation", + AlertStatus.normal, AlertSize.mid, + Priority.HIGH, VisualAlert.none, AudibleAlert.chimeDisengage, .4, 2., 3.), + # Cancellation alerts causing non-entry Alert( "overheatNoEntry", @@ -544,7 +551,7 @@ def __gt__(self, alert2): Alert( "calibrationInvalidNoEntry", "openpilot Unavailable", - "Calibration Invalid: Reposition Device and Recalibrate", + "Calibration Invalid: Reposition Device & Recalibrate", AlertStatus.normal, AlertSize.mid, Priority.LOW, VisualAlert.none, AudibleAlert.chimeError, .4, 2., 3.), @@ -745,6 +752,13 @@ def __gt__(self, alert2): AlertStatus.normal, AlertSize.mid, Priority.LOWER, VisualAlert.none, AudibleAlert.none, 0., 0., .2), + Alert( + "invalidLkasSettingPermanent", + "Stock LKAS is turned on", + "Turn off stock LKAS to engage", + AlertStatus.normal, AlertSize.mid, + Priority.LOWER, VisualAlert.none, AudibleAlert.none, 0., 0., .2), + Alert( "internetConnectivityNeededPermanent", "Please connect to Internet", @@ -801,6 +815,7 @@ def __gt__(self, alert2): AlertStatus.normal, AlertSize.small, Priority.LOWEST, VisualAlert.steerRequired, AudibleAlert.none, .0, .0, .1), + # offroad alerts Alert( "ldwPermanent", "TAKE CONTROL", diff --git a/selfdrive/controls/lib/pathplanner.py b/selfdrive/controls/lib/pathplanner.py index 3107928be4f77c..4c54eb88e503c6 100644 --- a/selfdrive/controls/lib/pathplanner.py +++ b/selfdrive/controls/lib/pathplanner.py @@ -129,8 +129,8 @@ def update(self, sm, pm, CP, VM): # starting elif self.lane_change_state == LaneChangeState.laneChangeStarting: - # fade out lanelines over .5s - self.lane_change_ll_prob = max(self.lane_change_ll_prob - 2*DT_MDL, 0.0) + # fade out lanelines over 1s + self.lane_change_ll_prob = max(self.lane_change_ll_prob - DT_MDL, 0.0) # 98% certainty if lane_change_prob < 0.02 and self.lane_change_ll_prob < 0.01: self.lane_change_state = LaneChangeState.laneChangeFinishing @@ -157,7 +157,7 @@ def update(self, sm, pm, CP, VM): if desire == log.PathPlan.Desire.laneChangeRight or desire == log.PathPlan.Desire.laneChangeLeft: self.LP.l_prob *= self.lane_change_ll_prob self.LP.r_prob *= self.lane_change_ll_prob - self.libmpc.init_weights(MPC_COST_LAT.PATH / 3.0, MPC_COST_LAT.LANE, MPC_COST_LAT.HEADING, self.steer_rate_cost) + self.libmpc.init_weights(MPC_COST_LAT.PATH / 10.0, MPC_COST_LAT.LANE, MPC_COST_LAT.HEADING, self.steer_rate_cost) else: self.libmpc.init_weights(MPC_COST_LAT.PATH, MPC_COST_LAT.LANE, MPC_COST_LAT.HEADING, self.steer_rate_cost) diff --git a/selfdrive/loggerd/SConscript b/selfdrive/loggerd/SConscript index d45ee20f9a84d7..48403214627b48 100644 --- a/selfdrive/loggerd/SConscript +++ b/selfdrive/loggerd/SConscript @@ -1,4 +1,4 @@ -Import('env', 'arch', 'is_tbp', 'messaging', 'common', 'visionipc') +Import('env', 'arch', 'messaging', 'common', 'visionipc') src = ['loggerd.cc', 'logger.cc'] libs = ['zmq', 'czmq', 'capnp', 'kj', 'z', @@ -6,12 +6,13 @@ libs = ['zmq', 'czmq', 'capnp', 'kj', 'z', 'yuv', 'bz2', common, 'json', messaging, visionipc] if arch == "aarch64": - if not is_tbp: - src += ['encoder.c', 'raw_logger.cc'] - libs += ['OmxVenc', 'OmxCore', 'cutils'] + src += ['encoder.c', 'raw_logger.cc'] + libs += ['OmxVenc', 'OmxCore', 'cutils'] elif arch == "larch64": src += ['encoder.c', 'raw_logger.cc'] libs += ['OmxVenc', 'OmxCore', 'pthread'] +elif arch == "jarch64": + src += [] else: libs += ['pthread'] diff --git a/selfdrive/modeld/SConscript b/selfdrive/modeld/SConscript index 84b30d5b45e2ea..b4ba483e428c38 100644 --- a/selfdrive/modeld/SConscript +++ b/selfdrive/modeld/SConscript @@ -1,4 +1,4 @@ -Import('env', 'arch','is_tbp', 'messaging', 'common', 'gpucommon', 'visionipc') +Import('env', 'arch', 'messaging', 'common', 'gpucommon', 'visionipc') lenv = env.Clone() libs = [messaging, common, 'OpenCL', 'capnp', 'zmq', 'kj', 'yuv', gpucommon, visionipc] @@ -9,7 +9,7 @@ common_src = [ "transforms/loadyuv.c", "transforms/transform.c"] -if is_tbp: +if arch == "jarch64": libs += ['pthread'] # for tensorflow support common_src = [ diff --git a/selfdrive/modeld/dmonitoringmodeld.cc b/selfdrive/modeld/dmonitoringmodeld.cc index 0bed9cc156ff54..8cabfc60461680 100644 --- a/selfdrive/modeld/dmonitoringmodeld.cc +++ b/selfdrive/modeld/dmonitoringmodeld.cc @@ -94,6 +94,7 @@ int main(int argc, char **argv) { visionstream_destroy(&stream); delete dmonitoring_sock; + delete dmonstate_sock; delete msg_context; dmonitoring_free(&dmonitoringmodel); diff --git a/selfdrive/modeld/modeld.cc b/selfdrive/modeld/modeld.cc index 0bc2ecbcdb37d3..7fa8a634a79e80 100644 --- a/selfdrive/modeld/modeld.cc +++ b/selfdrive/modeld/modeld.cc @@ -85,7 +85,7 @@ void* live_thread(void *arg) { } } - + delete live_calibration_sock; delete poller; delete c; diff --git a/selfdrive/modeld/runners/keras_runner.py b/selfdrive/modeld/runners/keras_runner.py index d4b2a4a0104d16..6c2398b5ec203b 100755 --- a/selfdrive/modeld/runners/keras_runner.py +++ b/selfdrive/modeld/runners/keras_runner.py @@ -1,6 +1,5 @@ #!/usr/bin/env python # TODO: why are the keras models saved with python 2? -from __future__ import print_function import tensorflow as tf import os @@ -8,7 +7,7 @@ import tensorflow.keras as keras import numpy as np from tensorflow.keras.models import Model -from tensorflow.keras.models import model_from_json +from tensorflow.keras.models import model_from_json, loadmodel def read(sz): dd = [] gt = 0 diff --git a/selfdrive/test/process_replay/ref_commit b/selfdrive/test/process_replay/ref_commit index 9ddc64f2c61de8..c6b5354ad2d908 100644 --- a/selfdrive/test/process_replay/ref_commit +++ b/selfdrive/test/process_replay/ref_commit @@ -1 +1 @@ -d39595cf10a7813e55139320daa252278dd7567d \ No newline at end of file +3523742130b9e0554bab4ac5bc5ab535f1342e90 \ No newline at end of file diff --git a/selfdrive/ui/SConscript b/selfdrive/ui/SConscript index 5f2773437daf22..336859e5be3161 100644 --- a/selfdrive/ui/SConscript +++ b/selfdrive/ui/SConscript @@ -1,13 +1,9 @@ -Import('env', 'arch','is_tbp', 'common', 'messaging', 'gpucommon', 'visionipc', 'cereal') +Import('env', 'arch', 'common', 'messaging', 'gpucommon', 'visionipc', 'cereal') src = ['ui.cc', 'paint.cc', 'sidebar.cc', '#phonelibs/nanovg/nanovg.c'] libs = [common, 'zmq', 'czmq', 'capnp', 'capnp_c', 'm', cereal, messaging, gpucommon, visionipc] -if is_tbp: - larch = "aarch64_TBP" -else: - larch = arch -if larch == "aarch64": +if arch == "aarch64": src += ['sound.cc', 'slplay.c'] libs += ['EGL', 'GLESv3', 'gnustl_shared', 'log', 'utils', 'gui', 'hardware', 'ui', 'CB', 'gsl', 'adreno_utils', 'OpenSLES', 'cutils', 'uuid', 'OpenCL'] linkflags = ['-Wl,-rpath=/system/lib64,-rpath=/system/comma/usr/lib'] diff --git a/selfdrive/ui/paint.cc b/selfdrive/ui/paint.cc index a354d6a8b1572e..0e0911d230eb63 100644 --- a/selfdrive/ui/paint.cc +++ b/selfdrive/ui/paint.cc @@ -38,75 +38,94 @@ static float lerp(float v0, float v1, float t) { static void draw_chevron(UIState *s, float x_in, float y_in, float sz, NVGcolor fillColor, NVGcolor glowColor) { + const UIScene *scene = &s->scene; + + nvgSave(s->vg); + + nvgTranslate(s->vg, 240.0f, 0.0); + nvgTranslate(s->vg, -1440.0f / 2, -1080.0f / 2); + nvgScale(s->vg, 2.0, 2.0); + nvgScale(s->vg, 1440.0f / s->rgb_width, 1080.0f / s->rgb_height); + const vec4 p_car_space = (vec4){{x_in, y_in, 0., 1.}}; const vec3 p_full_frame = car_space_to_full_frame(s, p_car_space); - float x = p_full_frame.v[0]; - float y = p_full_frame.v[1]; - if (x < 0 || y < 0.){ - return; - } - sz *= 30; sz /= (x_in / 3 + 30); if (sz > 30) sz = 30; if (sz < 15) sz = 15; - + + float x = p_full_frame.v[0]; + float y = p_full_frame.v[1]; + // glow + nvgBeginPath(s->vg); float g_xo = sz/5; float g_yo = sz/10; - nvgBeginPath(s->vg); - nvgMoveTo(s->vg, x+(sz*1.35)+g_xo, y+sz+g_yo); - nvgLineTo(s->vg, x, y-g_xo); - nvgLineTo(s->vg, x-(sz*1.35)-g_xo, y+sz+g_yo); - nvgClosePath(s->vg); + if (x >= 0 && y >= 0.) { + nvgMoveTo(s->vg, x+(sz*1.35)+g_xo, y+sz+g_yo); + nvgLineTo(s->vg, x, y-g_xo); + nvgLineTo(s->vg, x-(sz*1.35)-g_xo, y+sz+g_yo); + nvgClosePath(s->vg); + } nvgFillColor(s->vg, glowColor); nvgFill(s->vg); // chevron nvgBeginPath(s->vg); - nvgMoveTo(s->vg, x+(sz*1.25), y+sz); - nvgLineTo(s->vg, x, y); - nvgLineTo(s->vg, x-(sz*1.25), y+sz); - nvgClosePath(s->vg); + if (x >= 0 && y >= 0.) { + nvgMoveTo(s->vg, x+(sz*1.25), y+sz); + nvgLineTo(s->vg, x, y); + nvgLineTo(s->vg, x-(sz*1.25), y+sz); + nvgClosePath(s->vg); + } nvgFillColor(s->vg, fillColor); nvgFill(s->vg); + + nvgRestore(s->vg); } static void draw_lead(UIState *s, float d_rel, float v_rel, float y_rel){ - // Draw lead car indicator - float fillAlpha = 0; - float speedBuff = 10.; - float leadBuff = 40.; - if (d_rel < leadBuff) { - fillAlpha = 255*(1.0-(d_rel/leadBuff)); - if (v_rel < 0) { - fillAlpha += 255*(-1*(v_rel/speedBuff)); + // Draw lead car indicator + float fillAlpha = 0; + float speedBuff = 10.; + float leadBuff = 40.; + if (d_rel < leadBuff) { + fillAlpha = 255*(1.0-(d_rel/leadBuff)); + if (v_rel < 0) { + fillAlpha += 255*(-1*(v_rel/speedBuff)); + } + fillAlpha = (int)(fmin(fillAlpha, 255)); } - fillAlpha = (int)(fmin(fillAlpha, 255)); - } - draw_chevron(s, d_rel, y_rel, 25, nvgRGBA(201, 34, 49, fillAlpha), COLOR_YELLOW); + draw_chevron(s, d_rel, y_rel, 25, + nvgRGBA(201, 34, 49, fillAlpha), COLOR_YELLOW); } static void ui_draw_lane_line(UIState *s, const model_path_vertices_data *pvd, NVGcolor color) { + nvgSave(s->vg); + nvgTranslate(s->vg, 240.0f, 0.0); // rgb-box space + nvgTranslate(s->vg, -1440.0f / 2, -1080.0f / 2); // zoom 2x + nvgScale(s->vg, 2.0, 2.0); + nvgScale(s->vg, 1440.0f / s->rgb_width, 1080.0f / s->rgb_height); nvgBeginPath(s->vg); + bool started = false; for (int i=0; icnt; i++) { - float x = pvd->v[i].x; - float y = pvd->v[i].y; - if (x < 0 || y < 0.) { + if (pvd->v[i].x < 0 || pvd->v[i].y < 0.) { continue; } if (!started) { - nvgMoveTo(s->vg, x, y); + nvgMoveTo(s->vg, pvd->v[i].x, pvd->v[i].y); started = true; } else { - nvgLineTo(s->vg, x, y); + nvgLineTo(s->vg, pvd->v[i].x, pvd->v[i].y); } } + nvgClosePath(s->vg); nvgFillColor(s->vg, color); nvgFill(s->vg); + nvgRestore(s->vg); } static void update_track_data(UIState *s, bool is_mpc, track_vertices_data *pvd) { @@ -176,21 +195,37 @@ static void update_all_track_data(UIState *s) { static void ui_draw_track(UIState *s, bool is_mpc, track_vertices_data *pvd) { + const UIScene *scene = &s->scene; + const PathData path = scene->model.path; + const float *mpc_x_coords = &scene->mpc_x[0]; + const float *mpc_y_coords = &scene->mpc_y[0]; + + nvgSave(s->vg); + nvgTranslate(s->vg, 240.0f, 0.0); // rgb-box space + nvgTranslate(s->vg, -1440.0f / 2, -1080.0f / 2); // zoom 2x + nvgScale(s->vg, 2.0, 2.0); + nvgScale(s->vg, 1440.0f / s->rgb_width, 1080.0f / s->rgb_height); nvgBeginPath(s->vg); + bool started = false; + float off = is_mpc?0.3:0.5; + float lead_d = scene->lead_d_rel*2.; + float path_height = is_mpc?(lead_d>5.)?fmin(lead_d, 25.)-fmin(lead_d*0.35, 10.):20. + :(lead_d>0.)?fmin(lead_d, 50.)-fmin(lead_d*0.35, 10.):49.; + int vi = 0; for(int i = 0;i < pvd->cnt;i++) { - float x = pvd->v[i].x; - float y = pvd->v[i].y; - if (x < 0 || y < 0) { + if (pvd->v[i].x < 0 || pvd->v[i].y < 0) { continue; } + if (!started) { - nvgMoveTo(s->vg, x, y); + nvgMoveTo(s->vg, pvd->v[i].x, pvd->v[i].y); started = true; } else { - nvgLineTo(s->vg, x, y); + nvgLineTo(s->vg, pvd->v[i].x, pvd->v[i].y); } } + nvgClosePath(s->vg); NVGpaint track_bg; @@ -204,8 +239,10 @@ static void ui_draw_track(UIState *s, bool is_mpc, track_vertices_data *pvd) { track_bg = nvgLinearGradient(s->vg, vwp_w, vwp_h, vwp_w, vwp_h*.4, COLOR_WHITE, COLOR_WHITE_ALPHA(0)); } + nvgFillPaint(s->vg, track_bg); nvgFill(s->vg); + nvgRestore(s->vg); } static void draw_steering(UIState *s, float curvature) { @@ -339,21 +376,6 @@ static void ui_draw_world(UIState *s) { return; } - const int inner_height = viz_w*9/16; - const int ui_viz_rx = scene->ui_viz_rx; - const int ui_viz_rw = scene->ui_viz_rw; - const int ui_viz_ro = scene->ui_viz_ro; - - nvgSave(s->vg); - nvgScissor(s->vg, ui_viz_rx, box_y, ui_viz_rw, box_h); - - nvgTranslate(s->vg, ui_viz_rx+ui_viz_ro, box_y + (box_h-inner_height)/2.0); - nvgScale(s->vg, (float)viz_w / s->fb_w, (float)inner_height / s->fb_h); - nvgTranslate(s->vg, 240.0f, 0.0); - nvgTranslate(s->vg, -1440.0f / 2, -1080.0f / 2); - nvgScale(s->vg, 2.0, 2.0); - nvgScale(s->vg, 1440.0f / s->rgb_width, 1080.0f / s->rgb_height); - // Draw lane edges and vision/mpc tracks ui_draw_vision_lanes(s); @@ -363,7 +385,6 @@ static void ui_draw_world(UIState *s) { if ((scene->lead_status2) && (fabs(scene->lead_d_rel - scene->lead_d_rel2) > 3.0)) { draw_lead(s, scene->lead_d_rel2, scene->lead_v_rel2, scene->lead_y_rel2); } - nvgRestore(s->vg); } static void ui_draw_vision_maxspeed(UIState *s) { @@ -880,10 +901,16 @@ static void ui_draw_vision(UIState *s) { nvgSave(s->vg); // Draw augmented elements + const int inner_height = viz_w*9/16; + nvgScissor(s->vg, ui_viz_rx, box_y, ui_viz_rw, box_h); + nvgTranslate(s->vg, ui_viz_rx+ui_viz_ro, box_y + (box_h-inner_height)/2.0); + nvgScale(s->vg, (float)viz_w / s->fb_w, (float)inner_height / s->fb_h); if (!scene->frontview && !scene->fullview) { ui_draw_world(s); } + nvgRestore(s->vg); + // Set Speed, Current Speed, Status/Events if (!scene->frontview) { ui_draw_vision_header(s); @@ -891,14 +918,15 @@ static void ui_draw_vision(UIState *s) { ui_draw_driver_view(s); } - if (scene->alert_size != ALERTSIZE_NONE) { + if (s->scene.alert_size != ALERTSIZE_NONE) { // Controls Alerts - ui_draw_vision_alert(s, scene->alert_size, s->status, - scene->alert_text1, scene->alert_text2); + ui_draw_vision_alert(s, s->scene.alert_size, s->status, + s->scene.alert_text1, s->scene.alert_text2); } else { if (!scene->frontview){ui_draw_vision_footer(s);} } + nvgEndFrame(s->vg); glDisable(GL_BLEND); } diff --git a/selfdrive/updated.py b/selfdrive/updated.py index ca6dd3f2dd426a..5f82c89e6d61ee 100755 --- a/selfdrive/updated.py +++ b/selfdrive/updated.py @@ -133,35 +133,6 @@ def dismount_ovfs(): run(["umount", "-l", OVERLAY_MERGED]) -def setup_git_options(cwd): - # We sync FS object atimes (which EON doesn't use) and mtimes, but ctimes - # are outside user control. Make sure Git is set up to ignore system ctimes, - # because they change when we make hard links during finalize. Otherwise, - # there is a lot of unnecessary churn. This appears to be a common need on - # OSX as well: https://www.git-tower.com/blog/make-git-rebase-safe-on-osx/ - try: - trustctime = run(["git", "config", "--get", "core.trustctime"], cwd) - trustctime_set = (trustctime.strip() == "false") - except subprocess.CalledProcessError: - trustctime_set = False - - if not trustctime_set: - cloudlog.info("Setting core.trustctime false") - run(["git", "config", "core.trustctime", "false"], cwd) - - # We are temporarily using copytree to copy the directory, which also changes - # inode numbers. Ignore those changes too. - try: - checkstat = run(["git", "config", "--get", "core.checkStat"], cwd) - checkstat_set = (checkstat.strip() == "minimal") - except subprocess.CalledProcessError: - checkstat_set = False - - if not checkstat_set: - cloudlog.info("Setting core.checkState minimal") - run(["git", "config", "core.checkStat", "minimal"], cwd) - - def init_ovfs(): cloudlog.info("preparing new safe staging area") Params().put("UpdateAvailable", "0") @@ -181,6 +152,17 @@ def init_ovfs(): if os.path.isfile(os.path.join(BASEDIR, ".overlay_consistent")): os.remove(os.path.join(BASEDIR, ".overlay_consistent")) + # We sync FS object atimes (which EON doesn't use) and mtimes, but ctimes + # are outside user control. Make sure Git is set up to ignore system ctimes, + # because they change when we make hard links during finalize. Otherwise, + # there is a lot of unnecessary churn. This appears to be a common need on + # OSX as well: https://www.git-tower.com/blog/make-git-rebase-safe-on-osx/ + run(["git", "config", "core.trustctime", "false"], BASEDIR) + + # We are temporarily using copytree to copy the directory, which also changes + # inode numbers. Ignore those changes too. + run(["git", "config", "core.checkStat", "minimal"], BASEDIR) + # Leave a timestamped canary in BASEDIR to check at startup. The EON clock # should be correct by the time we get here. If the init file disappears, or # critical mtimes in BASEDIR are newer than .overlay_init, continue.sh can @@ -272,8 +254,6 @@ def finalize_from_ovfs_copy(): def attempt_update(): cloudlog.info("attempting git update inside staging overlay") - setup_git_options(OVERLAY_MERGED) - git_fetch_output = run(NICE_LOW_PRIORITY + ["git", "fetch"], OVERLAY_MERGED) cloudlog.info("git fetch success: %s", git_fetch_output) diff --git a/tools/webcam/README.md b/tools/webcam/README.md index 14574c16c63ac1..89f1ac3a625ea0 100644 --- a/tools/webcam/README.md +++ b/tools/webcam/README.md @@ -17,10 +17,10 @@ git clone https://github.com/commaai/openpilot.git ``` - Follow [this readme](https://github.com/commaai/openpilot/tree/master/tools) to install the requirements - Add line "export PYTHONPATH=$HOME/openpilot" to your ~/.bashrc -- You also need to install tensorflow-gpu 2.1.0 and nvidia drivers: nvidia-xxx/cuda10.0/cudnn7.6.5 +- You also need to install tensorflow-gpu 2.1.0 (if not working, try 2.0.0) and nvidia drivers: nvidia-xxx/cuda10.0/cudnn7.6.5 - Install [OpenCL Driver](http://registrationcenter-download.intel.com/akdlm/irc_nas/12556/opencl_runtime_16.1.2_x64_rh_6.4.0.37.tgz) - (Note: the code assumes cl platforms order to be 0.GPU/1.CPU when running clinfo; if reverse, change the -1 to -2 in selfdrive/modeld/modeld.cc#L130; helping us refactor this mess is encouraged) -- Install [OpenCV4](https://www.pyimagesearch.com/2018/08/15/how-to-install-opencv-4-on-ubuntu/) +- Install [OpenCV4](https://www.pyimagesearch.com/2018/08/15/how-to-install-opencv-4-on-ubuntu/) (ignore the Python part) ## Build openpilot for webcam ``` diff --git a/tools/webcam/front_mount_helper.py b/tools/webcam/front_mount_helper.py index 0ac32acc25e0ae..1fdca3225aef0b 100755 --- a/tools/webcam/front_mount_helper.py +++ b/tools/webcam/front_mount_helper.py @@ -1,4 +1,4 @@ -#!/usr/bin/env python3.6 +#!/usr/bin/env python import numpy as np # copied from common.transformations/camera.py @@ -15,7 +15,7 @@ [ 0., webcam_focal_length, 720/2.], [ 0., 0., 1.]]) -cam_id = 1 +cam_id = 2 if __name__ == "__main__": import cv2 @@ -32,4 +32,4 @@ img = cv2.warpPerspective(img, trans_webcam_to_eon_front, (1152,864), borderMode=cv2.BORDER_CONSTANT, borderValue=0) img = img[:,-864//2:,:] cv2.imshow('preview', img) - cv2.waitKey(10) + cv2.waitKey(10) \ No newline at end of file diff --git a/tools/webcam/jetson_test_cam.py b/tools/webcam/jetson_test_cam.py index 89bda70e0c9fcb..d07d5f4905c659 100755 --- a/tools/webcam/jetson_test_cam.py +++ b/tools/webcam/jetson_test_cam.py @@ -11,8 +11,7 @@ " nvvidconv ! video/x-raw(memory:NVMM),format=I420 !" " nvvidconv ! video/x-raw,format=BGRx !" " videoconvert ! video/x-raw,format=BGR !" - " videoscale ! video/x-raw,width=%d,height=%d ! %s" - " videobox autocrop=true ! video/x-raw,width=%d,height=%d !" + " videoscale ! video/x-raw,width=%d,height=%d !" " appsink ") cs = CarSettings() if len(sys.argv) != 2: @@ -22,15 +21,11 @@ if cam == "road" or cam == "driver": print ("Processing camera [%s]\n" % cam) if cam == "road": - flip_command = "" - if cs.roadCameraFlip == 1: - flip_command = "videoflip method=rotate-180 ! " - strm = strm_template % (cs.roadCameraID, 800, 600, 20, round(1164*cs.roadCameraFx*1.5),round(874*cs.roadCameraFx*1.5),flip_command,1164,874) + strm = strm_template % (cs.roadCameraID, 800, 600, 20, 1164,874) + dx = round(1164/4) + dy = round(874/4) else: - flip_command = "" - if cs.driverCameraFlip == 1: - flip_command = "videoflip method=180 ! " - strm = strm_template % (cs.driverCameraID, 640, 480, 10, round(1152*cs.driverCameraFx),round(864*cs.driverCameraFx),flip_command,1152,864) + strm = strm_template % (cs.driverCameraID, 640, 480, 10, 1152,864) dx = round(1152/4) dy = round(864/4) print("Capturing with stream [%s}\n" % strm) @@ -40,7 +35,7 @@ while True: ret, frame = cap.read() if cam == "road": - img = frame + img = frame[ dy:3*dy, dx:3*dx] else: img = frame[:,-864//2:,:] if ret: diff --git a/tools/webcam/warp_vis.py b/tools/webcam/warp_vis.py index fb838b873a30e9..547f1be99ada23 100755 --- a/tools/webcam/warp_vis.py +++ b/tools/webcam/warp_vis.py @@ -5,7 +5,7 @@ eon_focal_length = 910.0 # pixels eon_dcam_focal_length = 860.0 # pixels -webcam_focal_length = 1408.0/1.5 # pixels +webcam_focal_length = -908.0/1.5 # pixels eon_intrinsics = np.array([ [eon_focal_length, 0., 1164/2.], @@ -18,8 +18,8 @@ [ 0, 0, 1]]) webcam_intrinsics = np.array([ - [webcam_focal_length, 0., 960/2/1.5], - [ 0., webcam_focal_length, 544/2/1.5], + [webcam_focal_length, 0., 1280/2/1.5], + [ 0., webcam_focal_length, 720/2/1.5], [ 0., 0., 1.]]) if __name__ == "__main__": @@ -29,18 +29,17 @@ print("trans_webcam_to_eon_rear:\n", trans_webcam_to_eon_rear) print("trans_webcam_to_eon_front:\n", trans_webcam_to_eon_front) - cap = cv2.VideoCapture(2) - cap.set(cv2.CAP_PROP_FRAME_WIDTH, 960) - cap.set(cv2.CAP_PROP_FRAME_HEIGHT, 544) + cap = cv2.VideoCapture(1) + cap.set(cv2.CAP_PROP_FRAME_WIDTH, 853) + cap.set(cv2.CAP_PROP_FRAME_HEIGHT, 480) while (True): - ret, img = cap.read(1) + ret, img = cap.read() if ret: - #img = cv2.warpPerspective(img, trans_webcam_to_eon_rear, (1164,874), borderMode=cv2.BORDER_CONSTANT, borderValue=0) - img = cv2.warpPerspective(img, trans_webcam_to_eon_front, (1164,874), borderMode=cv2.BORDER_CONSTANT, borderValue=100) + # img = cv2.warpPerspective(img, trans_webcam_to_eon_rear, (1164,874), borderMode=cv2.BORDER_CONSTANT, borderValue=0) + img = cv2.warpPerspective(img, trans_webcam_to_eon_front, (1164,874), borderMode=cv2.BORDER_CONSTANT, borderValue=0) print(img.shape, end='\r') cv2.imshow('preview', img) cv2.waitKey(10) -