parent
7a15aae6dc
commit
81dd5a50cf
2160 changed files with 0 additions and 165267 deletions
@ -1,25 +0,0 @@ |
|||||||
--- |
|
||||||
name: Bug report |
|
||||||
about: Create a report to help us improve openpilot |
|
||||||
title: '' |
|
||||||
labels: 'bug' |
|
||||||
assignees: '' |
|
||||||
|
|
||||||
--- |
|
||||||
|
|
||||||
**Describe the bug** |
|
||||||
A clear and concise description of what the bug is. |
|
||||||
|
|
||||||
**How to reproduce or log data** |
|
||||||
Steps to reproduce the behavior, or a explorer/cabana link to the exact drive and timestamp of when the bug occurred. |
|
||||||
|
|
||||||
**Expected behavior** |
|
||||||
A clear and concise description of what you expected to happen. |
|
||||||
|
|
||||||
**Device/Version information (please complete the following information):** |
|
||||||
- Device: [e.g. EON/EON Gold] |
|
||||||
- Version: [e.g. 0.6.4], or commit hash when on devel |
|
||||||
- Car make/model [e.g. Toyota Prius 2016] |
|
||||||
|
|
||||||
**Additional context** |
|
||||||
Add any other context about the problem here. |
|
@ -1,21 +0,0 @@ |
|||||||
Choose one of the templates below: |
|
||||||
|
|
||||||
# Fingerprint |
|
||||||
This pull requests adds a fingerprint for <Make - Model - Year - Trim>. |
|
||||||
|
|
||||||
This is an explorer link to a drive with the stock system enabled: ... |
|
||||||
|
|
||||||
# Car support |
|
||||||
This pull requests adds support for <Make - Model - Year - Trim>. |
|
||||||
|
|
||||||
This is an explorer link to a drive with the stock system enabled: ... |
|
||||||
This is an explorer link to a drive with openpilot system enabled: ... |
|
||||||
|
|
||||||
# Feature |
|
||||||
This pull requests adds feature X |
|
||||||
|
|
||||||
## Description |
|
||||||
Explain what the feature does |
|
||||||
|
|
||||||
## Testing |
|
||||||
Explain how the feature was tested. Either by the added unit tests, or what tests were performed while driving. |
|
@ -1,59 +0,0 @@ |
|||||||
venv/ |
|
||||||
.DS_Store |
|
||||||
.tags |
|
||||||
.ipynb_checkpoints |
|
||||||
.idea |
|
||||||
.overlay_init |
|
||||||
.overlay_consistent |
|
||||||
.sconsign.dblite |
|
||||||
.vscode |
|
||||||
model2.png |
|
||||||
a.out |
|
||||||
|
|
||||||
*.dylib |
|
||||||
*.DSYM |
|
||||||
*.d |
|
||||||
*.pyc |
|
||||||
*.pyo |
|
||||||
.*.swp |
|
||||||
.*.swo |
|
||||||
.*.un~ |
|
||||||
*.tmp |
|
||||||
*.o |
|
||||||
*.os |
|
||||||
*.so |
|
||||||
*.a |
|
||||||
*.clb |
|
||||||
*.class |
|
||||||
*.pyxbldc |
|
||||||
*.vcd |
|
||||||
config.json |
|
||||||
clcache |
|
||||||
|
|
||||||
persist |
|
||||||
board/obj/ |
|
||||||
selfdrive/boardd/boardd |
|
||||||
selfdrive/logcatd/logcatd |
|
||||||
selfdrive/mapd/default_speeds_by_region.json |
|
||||||
selfdrive/proclogd/proclogd |
|
||||||
selfdrive/ui/_ui |
|
||||||
selfdrive/test/longitudinal_maneuvers/out |
|
||||||
selfdrive/visiond/visiond |
|
||||||
selfdrive/loggerd/loggerd |
|
||||||
selfdrive/sensord/_gpsd |
|
||||||
selfdrive/sensord/_sensord |
|
||||||
selfdrive/camerad/camerad |
|
||||||
selfdrive/modeld/_modeld |
|
||||||
selfdrive/modeld/_monitoringd |
|
||||||
/src/ |
|
||||||
|
|
||||||
one |
|
||||||
openpilot |
|
||||||
notebooks |
|
||||||
xx |
|
||||||
panda_jungle |
|
||||||
|
|
||||||
.coverage* |
|
||||||
htmlcov |
|
||||||
pandaextra |
|
||||||
|
|
@ -1,585 +0,0 @@ |
|||||||
[MASTER] |
|
||||||
|
|
||||||
# A comma-separated list of package or module names from where C extensions may |
|
||||||
# be loaded. Extensions are loading into the active Python interpreter and may |
|
||||||
# run arbitrary code |
|
||||||
extension-pkg-whitelist=scipy |
|
||||||
|
|
||||||
# Add files or directories to the blacklist. They should be base names, not |
|
||||||
# paths. |
|
||||||
ignore=CVS |
|
||||||
|
|
||||||
# Add files or directories matching the regex patterns to the blacklist. The |
|
||||||
# regex matches against base names, not paths. |
|
||||||
ignore-patterns= |
|
||||||
|
|
||||||
# Python code to execute, usually for sys.path manipulation such as |
|
||||||
# pygtk.require(). |
|
||||||
#init-hook= |
|
||||||
|
|
||||||
# Use multiple processes to speed up Pylint. |
|
||||||
jobs=4 |
|
||||||
|
|
||||||
# List of plugins (as comma separated values of python modules names) to load, |
|
||||||
# usually to register additional checkers. |
|
||||||
load-plugins= |
|
||||||
|
|
||||||
# Pickle collected data for later comparisons. |
|
||||||
persistent=yes |
|
||||||
|
|
||||||
# Specify a configuration file. |
|
||||||
#rcfile= |
|
||||||
|
|
||||||
# When enabled, pylint would attempt to guess common misconfiguration and emit |
|
||||||
# user-friendly hints instead of false-positive error messages |
|
||||||
suggestion-mode=yes |
|
||||||
|
|
||||||
# Allow loading of arbitrary C extensions. Extensions are imported into the |
|
||||||
# active Python interpreter and may run arbitrary code. |
|
||||||
unsafe-load-any-extension=no |
|
||||||
|
|
||||||
|
|
||||||
[MESSAGES CONTROL] |
|
||||||
|
|
||||||
# Only show warnings with the listed confidence levels. Leave empty to show |
|
||||||
# all. Valid levels: HIGH, INFERENCE, INFERENCE_FAILURE, UNDEFINED |
|
||||||
confidence= |
|
||||||
|
|
||||||
# Disable the message, report, category or checker with the given id(s). You |
|
||||||
# can either give multiple identifiers separated by comma (,) or put this |
|
||||||
# option multiple times (only on the command line, not in the configuration |
|
||||||
# file where it should appear only once).You can also use "--disable=all" to |
|
||||||
# disable everything first and then reenable specific checks. For example, if |
|
||||||
# you want to run only the similarities checker, you can use "--disable=all |
|
||||||
# --enable=similarities". If you want to run only the classes checker, but have |
|
||||||
# no Warning level messages displayed, use"--disable=all --enable=classes |
|
||||||
# --disable=W" |
|
||||||
disable=print-statement, |
|
||||||
parameter-unpacking, |
|
||||||
unpacking-in-except, |
|
||||||
old-raise-syntax, |
|
||||||
backtick, |
|
||||||
long-suffix, |
|
||||||
old-ne-operator, |
|
||||||
old-octal-literal, |
|
||||||
import-star-module-level, |
|
||||||
non-ascii-bytes-literal, |
|
||||||
raw-checker-failed, |
|
||||||
bad-inline-option, |
|
||||||
locally-disabled, |
|
||||||
locally-enabled, |
|
||||||
file-ignored, |
|
||||||
suppressed-message, |
|
||||||
useless-suppression, |
|
||||||
deprecated-pragma, |
|
||||||
apply-builtin, |
|
||||||
basestring-builtin, |
|
||||||
buffer-builtin, |
|
||||||
cmp-builtin, |
|
||||||
coerce-builtin, |
|
||||||
execfile-builtin, |
|
||||||
file-builtin, |
|
||||||
long-builtin, |
|
||||||
raw_input-builtin, |
|
||||||
reduce-builtin, |
|
||||||
standarderror-builtin, |
|
||||||
unicode-builtin, |
|
||||||
xrange-builtin, |
|
||||||
coerce-method, |
|
||||||
delslice-method, |
|
||||||
getslice-method, |
|
||||||
setslice-method, |
|
||||||
no-absolute-import, |
|
||||||
old-division, |
|
||||||
dict-iter-method, |
|
||||||
dict-view-method, |
|
||||||
next-method-called, |
|
||||||
metaclass-assignment, |
|
||||||
indexing-exception, |
|
||||||
raising-string, |
|
||||||
reload-builtin, |
|
||||||
oct-method, |
|
||||||
hex-method, |
|
||||||
nonzero-method, |
|
||||||
cmp-method, |
|
||||||
input-builtin, |
|
||||||
round-builtin, |
|
||||||
intern-builtin, |
|
||||||
unichr-builtin, |
|
||||||
map-builtin-not-iterating, |
|
||||||
zip-builtin-not-iterating, |
|
||||||
range-builtin-not-iterating, |
|
||||||
filter-builtin-not-iterating, |
|
||||||
using-cmp-argument, |
|
||||||
eq-without-hash, |
|
||||||
div-method, |
|
||||||
idiv-method, |
|
||||||
rdiv-method, |
|
||||||
exception-message-attribute, |
|
||||||
invalid-str-codec, |
|
||||||
sys-max-int, |
|
||||||
bad-python3-import, |
|
||||||
deprecated-string-function, |
|
||||||
deprecated-str-translate-call, |
|
||||||
deprecated-itertools-function, |
|
||||||
deprecated-types-field, |
|
||||||
next-method-defined, |
|
||||||
dict-items-not-iterating, |
|
||||||
dict-keys-not-iterating, |
|
||||||
dict-values-not-iterating, |
|
||||||
bad-indentation, |
|
||||||
line-too-long, |
|
||||||
missing-docstring, |
|
||||||
multiple-statements, |
|
||||||
bad-continuation, |
|
||||||
invalid-name, |
|
||||||
too-many-arguments, |
|
||||||
too-many-locals, |
|
||||||
superfluous-parens, |
|
||||||
bad-whitespace, |
|
||||||
too-many-instance-attributes, |
|
||||||
wrong-import-position, |
|
||||||
ungrouped-imports, |
|
||||||
wrong-import-order, |
|
||||||
protected-access, |
|
||||||
trailing-whitespace, |
|
||||||
too-many-branches, |
|
||||||
too-few-public-methods, |
|
||||||
too-many-statements, |
|
||||||
trailing-newlines, |
|
||||||
attribute-defined-outside-init, |
|
||||||
too-many-return-statements, |
|
||||||
too-many-public-methods, |
|
||||||
unused-argument, |
|
||||||
old-style-class, |
|
||||||
no-init, |
|
||||||
len-as-condition, |
|
||||||
unneeded-not, |
|
||||||
no-self-use, |
|
||||||
multiple-imports, |
|
||||||
no-else-return, |
|
||||||
logging-not-lazy, |
|
||||||
fixme, |
|
||||||
redefined-outer-name, |
|
||||||
unused-variable, |
|
||||||
unsubscriptable-object, |
|
||||||
expression-not-assigned, |
|
||||||
too-many-boolean-expressions, |
|
||||||
consider-using-ternary, |
|
||||||
invalid-unary-operand-type, |
|
||||||
relative-import, |
|
||||||
deprecated-lambda |
|
||||||
|
|
||||||
|
|
||||||
# Enable the message, report, category or checker with the given id(s). You can |
|
||||||
# either give multiple identifier separated by comma (,) or put this option |
|
||||||
# multiple time (only on the command line, not in the configuration file where |
|
||||||
# it should appear only once). See also the "--disable" option for examples. |
|
||||||
enable=c-extension-no-member |
|
||||||
|
|
||||||
|
|
||||||
[REPORTS] |
|
||||||
|
|
||||||
# Python expression which should return a note less than 10 (10 is the highest |
|
||||||
# note). You have access to the variables errors warning, statement which |
|
||||||
# respectively contain the number of errors / warnings messages and the total |
|
||||||
# number of statements analyzed. This is used by the global evaluation report |
|
||||||
# (RP0004). |
|
||||||
evaluation=10.0 - ((float(5 * error + warning + refactor + convention) / statement) * 10) |
|
||||||
|
|
||||||
# Template used to display messages. This is a python new-style format string |
|
||||||
# used to format the message information. See doc for all details |
|
||||||
#msg-template= |
|
||||||
|
|
||||||
# Set the output format. Available formats are text, parseable, colorized, json |
|
||||||
# and msvs (visual studio).You can also give a reporter class, eg |
|
||||||
# mypackage.mymodule.MyReporterClass. |
|
||||||
output-format=text |
|
||||||
|
|
||||||
# Tells whether to display a full report or only the messages |
|
||||||
reports=no |
|
||||||
|
|
||||||
# Activate the evaluation score. |
|
||||||
score=yes |
|
||||||
|
|
||||||
|
|
||||||
[REFACTORING] |
|
||||||
|
|
||||||
# Maximum number of nested blocks for function / method body |
|
||||||
max-nested-blocks=5 |
|
||||||
|
|
||||||
# Complete name of functions that never returns. When checking for |
|
||||||
# inconsistent-return-statements if a never returning function is called then |
|
||||||
# it will be considered as an explicit return statement and no message will be |
|
||||||
# printed. |
|
||||||
never-returning-functions=optparse.Values,sys.exit |
|
||||||
|
|
||||||
|
|
||||||
[LOGGING] |
|
||||||
|
|
||||||
# Logging modules to check that the string format arguments are in logging |
|
||||||
# function parameter format |
|
||||||
logging-modules=logging |
|
||||||
|
|
||||||
|
|
||||||
[SPELLING] |
|
||||||
|
|
||||||
# Limits count of emitted suggestions for spelling mistakes |
|
||||||
max-spelling-suggestions=4 |
|
||||||
|
|
||||||
# Spelling dictionary name. Available dictionaries: none. To make it working |
|
||||||
# install python-enchant package. |
|
||||||
spelling-dict= |
|
||||||
|
|
||||||
# List of comma separated words that should not be checked. |
|
||||||
spelling-ignore-words= |
|
||||||
|
|
||||||
# A path to a file that contains private dictionary; one word per line. |
|
||||||
spelling-private-dict-file= |
|
||||||
|
|
||||||
# Tells whether to store unknown words to indicated private dictionary in |
|
||||||
# --spelling-private-dict-file option instead of raising a message. |
|
||||||
spelling-store-unknown-words=no |
|
||||||
|
|
||||||
|
|
||||||
[MISCELLANEOUS] |
|
||||||
|
|
||||||
# List of note tags to take in consideration, separated by a comma. |
|
||||||
notes=FIXME, |
|
||||||
XXX, |
|
||||||
TODO |
|
||||||
|
|
||||||
|
|
||||||
[SIMILARITIES] |
|
||||||
|
|
||||||
# Ignore comments when computing similarities. |
|
||||||
ignore-comments=yes |
|
||||||
|
|
||||||
# Ignore docstrings when computing similarities. |
|
||||||
ignore-docstrings=yes |
|
||||||
|
|
||||||
# Ignore imports when computing similarities. |
|
||||||
ignore-imports=no |
|
||||||
|
|
||||||
# Minimum lines number of a similarity. |
|
||||||
min-similarity-lines=4 |
|
||||||
|
|
||||||
|
|
||||||
[TYPECHECK] |
|
||||||
|
|
||||||
# List of decorators that produce context managers, such as |
|
||||||
# contextlib.contextmanager. Add to this list to register other decorators that |
|
||||||
# produce valid context managers. |
|
||||||
contextmanager-decorators=contextlib.contextmanager |
|
||||||
|
|
||||||
# List of members which are set dynamically and missed by pylint inference |
|
||||||
# system, and so shouldn't trigger E1101 when accessed. Python regular |
|
||||||
# expressions are accepted. |
|
||||||
generated-members=capnp.* cereal.* pygame.* zmq.* setproctitle.* smbus2.* usb1.* serial.* cv2.* |
|
||||||
|
|
||||||
# Tells whether missing members accessed in mixin class should be ignored. A |
|
||||||
# mixin class is detected if its name ends with "mixin" (case insensitive). |
|
||||||
ignore-mixin-members=yes |
|
||||||
|
|
||||||
# This flag controls whether pylint should warn about no-member and similar |
|
||||||
# checks whenever an opaque object is returned when inferring. The inference |
|
||||||
# can return multiple potential results while evaluating a Python object, but |
|
||||||
# some branches might not be evaluated, which results in partial inference. In |
|
||||||
# that case, it might be useful to still emit no-member and other checks for |
|
||||||
# the rest of the inferred objects. |
|
||||||
ignore-on-opaque-inference=yes |
|
||||||
|
|
||||||
# List of class names for which member attributes should not be checked (useful |
|
||||||
# for classes with dynamically set attributes). This supports the use of |
|
||||||
# qualified names. |
|
||||||
ignored-classes=optparse.Values,thread._local,_thread._local |
|
||||||
|
|
||||||
# List of module names for which member attributes should not be checked |
|
||||||
# (useful for modules/projects where namespaces are manipulated during runtime |
|
||||||
# and thus existing member attributes cannot be deduced by static analysis. It |
|
||||||
# supports qualified module names, as well as Unix pattern matching. |
|
||||||
ignored-modules=flask setproctitle usb1 flask.ext.socketio smbus2 usb1.* |
|
||||||
|
|
||||||
# Show a hint with possible names when a member name was not found. The aspect |
|
||||||
# of finding the hint is based on edit distance. |
|
||||||
missing-member-hint=yes |
|
||||||
|
|
||||||
# The minimum edit distance a name should have in order to be considered a |
|
||||||
# similar match for a missing member name. |
|
||||||
missing-member-hint-distance=1 |
|
||||||
|
|
||||||
# The total number of similar names that should be taken in consideration when |
|
||||||
# showing a hint for a missing member. |
|
||||||
missing-member-max-choices=1 |
|
||||||
|
|
||||||
|
|
||||||
[VARIABLES] |
|
||||||
|
|
||||||
# List of additional names supposed to be defined in builtins. Remember that |
|
||||||
# you should avoid to define new builtins when possible. |
|
||||||
additional-builtins= |
|
||||||
|
|
||||||
# Tells whether unused global variables should be treated as a violation. |
|
||||||
allow-global-unused-variables=yes |
|
||||||
|
|
||||||
# List of strings which can identify a callback function by name. A callback |
|
||||||
# name must start or end with one of those strings. |
|
||||||
callbacks=cb_, |
|
||||||
_cb |
|
||||||
|
|
||||||
# A regular expression matching the name of dummy variables (i.e. expectedly |
|
||||||
# not used). |
|
||||||
dummy-variables-rgx=_+$|(_[a-zA-Z0-9_]*[a-zA-Z0-9]+?$)|dummy|^ignored_|^unused_ |
|
||||||
|
|
||||||
# Argument names that match this expression will be ignored. Default to name |
|
||||||
# with leading underscore |
|
||||||
ignored-argument-names=_.*|^ignored_|^unused_ |
|
||||||
|
|
||||||
# Tells whether we should check for unused import in __init__ files. |
|
||||||
init-import=no |
|
||||||
|
|
||||||
# List of qualified module names which can have objects that can redefine |
|
||||||
# builtins. |
|
||||||
redefining-builtins-modules=six.moves,past.builtins,future.builtins |
|
||||||
|
|
||||||
|
|
||||||
[FORMAT] |
|
||||||
|
|
||||||
# Expected format of line ending, e.g. empty (any line ending), LF or CRLF. |
|
||||||
expected-line-ending-format= |
|
||||||
|
|
||||||
# Regexp for a line that is allowed to be longer than the limit. |
|
||||||
ignore-long-lines=^\s*(# )?<?https?://\S+>?$ |
|
||||||
|
|
||||||
# Number of spaces of indent required inside a hanging or continued line. |
|
||||||
indent-after-paren=4 |
|
||||||
|
|
||||||
# String used as indentation unit. This is usually " " (4 spaces) or "\t" (1 |
|
||||||
# tab). |
|
||||||
indent-string=' ' |
|
||||||
|
|
||||||
# Maximum number of characters on a single line. |
|
||||||
max-line-length=100 |
|
||||||
|
|
||||||
# Maximum number of lines in a module |
|
||||||
max-module-lines=1000 |
|
||||||
|
|
||||||
# List of optional constructs for which whitespace checking is disabled. `dict- |
|
||||||
# separator` is used to allow tabulation in dicts, etc.: {1 : 1,\n222: 2}. |
|
||||||
# `trailing-comma` allows a space between comma and closing bracket: (a, ). |
|
||||||
# `empty-line` allows space-only lines. |
|
||||||
no-space-check=trailing-comma, |
|
||||||
dict-separator |
|
||||||
|
|
||||||
# Allow the body of a class to be on the same line as the declaration if body |
|
||||||
# contains single statement. |
|
||||||
single-line-class-stmt=no |
|
||||||
|
|
||||||
# Allow the body of an if to be on the same line as the test if there is no |
|
||||||
# else. |
|
||||||
single-line-if-stmt=no |
|
||||||
|
|
||||||
|
|
||||||
[BASIC] |
|
||||||
|
|
||||||
# Naming style matching correct argument names |
|
||||||
argument-naming-style=snake_case |
|
||||||
|
|
||||||
# Regular expression matching correct argument names. Overrides argument- |
|
||||||
# naming-style |
|
||||||
#argument-rgx= |
|
||||||
|
|
||||||
# Naming style matching correct attribute names |
|
||||||
attr-naming-style=snake_case |
|
||||||
|
|
||||||
# Regular expression matching correct attribute names. Overrides attr-naming- |
|
||||||
# style |
|
||||||
#attr-rgx= |
|
||||||
|
|
||||||
# Bad variable names which should always be refused, separated by a comma |
|
||||||
bad-names=foo, |
|
||||||
bar, |
|
||||||
baz, |
|
||||||
toto, |
|
||||||
tutu, |
|
||||||
tata |
|
||||||
|
|
||||||
# Naming style matching correct class attribute names |
|
||||||
class-attribute-naming-style=any |
|
||||||
|
|
||||||
# Regular expression matching correct class attribute names. Overrides class- |
|
||||||
# attribute-naming-style |
|
||||||
#class-attribute-rgx= |
|
||||||
|
|
||||||
# Naming style matching correct class names |
|
||||||
class-naming-style=PascalCase |
|
||||||
|
|
||||||
# Regular expression matching correct class names. Overrides class-naming-style |
|
||||||
#class-rgx= |
|
||||||
|
|
||||||
# Naming style matching correct constant names |
|
||||||
const-naming-style=UPPER_CASE |
|
||||||
|
|
||||||
# Regular expression matching correct constant names. Overrides const-naming- |
|
||||||
# style |
|
||||||
#const-rgx= |
|
||||||
|
|
||||||
# Minimum line length for functions/classes that require docstrings, shorter |
|
||||||
# ones are exempt. |
|
||||||
docstring-min-length=-1 |
|
||||||
|
|
||||||
# Naming style matching correct function names |
|
||||||
function-naming-style=snake_case |
|
||||||
|
|
||||||
# Regular expression matching correct function names. Overrides function- |
|
||||||
# naming-style |
|
||||||
#function-rgx= |
|
||||||
|
|
||||||
# Good variable names which should always be accepted, separated by a comma |
|
||||||
good-names=i, |
|
||||||
j, |
|
||||||
k, |
|
||||||
ex, |
|
||||||
Run, |
|
||||||
_ |
|
||||||
|
|
||||||
# Include a hint for the correct naming format with invalid-name |
|
||||||
include-naming-hint=no |
|
||||||
|
|
||||||
# Naming style matching correct inline iteration names |
|
||||||
inlinevar-naming-style=any |
|
||||||
|
|
||||||
# Regular expression matching correct inline iteration names. Overrides |
|
||||||
# inlinevar-naming-style |
|
||||||
#inlinevar-rgx= |
|
||||||
|
|
||||||
# Naming style matching correct method names |
|
||||||
method-naming-style=snake_case |
|
||||||
|
|
||||||
# Regular expression matching correct method names. Overrides method-naming- |
|
||||||
# style |
|
||||||
#method-rgx= |
|
||||||
|
|
||||||
# Naming style matching correct module names |
|
||||||
module-naming-style=snake_case |
|
||||||
|
|
||||||
# Regular expression matching correct module names. Overrides module-naming- |
|
||||||
# style |
|
||||||
#module-rgx= |
|
||||||
|
|
||||||
# Colon-delimited sets of names that determine each other's naming style when |
|
||||||
# the name regexes allow several styles. |
|
||||||
name-group= |
|
||||||
|
|
||||||
# Regular expression which should only match function or class names that do |
|
||||||
# not require a docstring. |
|
||||||
no-docstring-rgx=^_ |
|
||||||
|
|
||||||
# List of decorators that produce properties, such as abc.abstractproperty. Add |
|
||||||
# to this list to register other decorators that produce valid properties. |
|
||||||
property-classes=abc.abstractproperty |
|
||||||
|
|
||||||
# Naming style matching correct variable names |
|
||||||
variable-naming-style=snake_case |
|
||||||
|
|
||||||
# Regular expression matching correct variable names. Overrides variable- |
|
||||||
# naming-style |
|
||||||
#variable-rgx= |
|
||||||
|
|
||||||
|
|
||||||
[DESIGN] |
|
||||||
|
|
||||||
# Maximum number of arguments for function / method |
|
||||||
max-args=5 |
|
||||||
|
|
||||||
# Maximum number of attributes for a class (see R0902). |
|
||||||
max-attributes=7 |
|
||||||
|
|
||||||
# Maximum number of boolean expressions in a if statement |
|
||||||
max-bool-expr=5 |
|
||||||
|
|
||||||
# Maximum number of branch for function / method body |
|
||||||
max-branches=12 |
|
||||||
|
|
||||||
# Maximum number of locals for function / method body |
|
||||||
max-locals=15 |
|
||||||
|
|
||||||
# Maximum number of parents for a class (see R0901). |
|
||||||
max-parents=7 |
|
||||||
|
|
||||||
# Maximum number of public methods for a class (see R0904). |
|
||||||
max-public-methods=20 |
|
||||||
|
|
||||||
# Maximum number of return / yield for function / method body |
|
||||||
max-returns=6 |
|
||||||
|
|
||||||
# Maximum number of statements in function / method body |
|
||||||
max-statements=50 |
|
||||||
|
|
||||||
# Minimum number of public methods for a class (see R0903). |
|
||||||
min-public-methods=2 |
|
||||||
|
|
||||||
|
|
||||||
[CLASSES] |
|
||||||
|
|
||||||
# List of method names used to declare (i.e. assign) instance attributes. |
|
||||||
defining-attr-methods=__init__, |
|
||||||
__new__, |
|
||||||
setUp |
|
||||||
|
|
||||||
# List of member names, which should be excluded from the protected access |
|
||||||
# warning. |
|
||||||
exclude-protected=_asdict, |
|
||||||
_fields, |
|
||||||
_replace, |
|
||||||
_source, |
|
||||||
_make |
|
||||||
|
|
||||||
# List of valid names for the first argument in a class method. |
|
||||||
valid-classmethod-first-arg=cls |
|
||||||
|
|
||||||
# List of valid names for the first argument in a metaclass class method. |
|
||||||
valid-metaclass-classmethod-first-arg=mcs |
|
||||||
|
|
||||||
|
|
||||||
[IMPORTS] |
|
||||||
|
|
||||||
# Allow wildcard imports from modules that define __all__. |
|
||||||
allow-wildcard-with-all=no |
|
||||||
|
|
||||||
# Analyse import fallback blocks. This can be used to support both Python 2 and |
|
||||||
# 3 compatible code, which means that the block might have code that exists |
|
||||||
# only in one or another interpreter, leading to false positives when analysed. |
|
||||||
analyse-fallback-blocks=no |
|
||||||
|
|
||||||
# Deprecated modules which should not be used, separated by a comma |
|
||||||
deprecated-modules=regsub, |
|
||||||
TERMIOS, |
|
||||||
Bastion, |
|
||||||
rexec |
|
||||||
|
|
||||||
# Create a graph of external dependencies in the given file (report RP0402 must |
|
||||||
# not be disabled) |
|
||||||
ext-import-graph= |
|
||||||
|
|
||||||
# Create a graph of every (i.e. internal and external) dependencies in the |
|
||||||
# given file (report RP0402 must not be disabled) |
|
||||||
import-graph= |
|
||||||
|
|
||||||
# Create a graph of internal dependencies in the given file (report RP0402 must |
|
||||||
# not be disabled) |
|
||||||
int-import-graph= |
|
||||||
|
|
||||||
# Force import order to recognize a module as part of the standard |
|
||||||
# compatibility libraries. |
|
||||||
known-standard-library= |
|
||||||
|
|
||||||
# Force import order to recognize a module as part of a third party library. |
|
||||||
known-third-party=enchant |
|
||||||
|
|
||||||
|
|
||||||
[EXCEPTIONS] |
|
||||||
|
|
||||||
# Exceptions that will emit a warning when being caught. Defaults to |
|
||||||
# "Exception" |
|
||||||
overgeneral-exceptions=Exception |
|
@ -1,7 +0,0 @@ |
|||||||
sudo: required |
|
||||||
|
|
||||||
services: |
|
||||||
- docker |
|
||||||
|
|
||||||
script: |
|
||||||
- ./run_docker_tests.sh |
|
@ -1,31 +0,0 @@ |
|||||||
# How to contribute |
|
||||||
|
|
||||||
Our software is open source so you can solve your own problems without needing help from others. And if you solve a problem and are so kind, you can upstream it for the rest of the world to use. |
|
||||||
|
|
||||||
Most open source development activity is coordinated through our [Discord](https://discord.comma.ai). A lot of documentation is available on our [medium](https://medium.com/@comma_ai/) |
|
||||||
|
|
||||||
## Getting Started |
|
||||||
|
|
||||||
* Join our [Discord](https://discord.comma.ai) |
|
||||||
* Make sure you have a [GitHub account](https://github.com/signup/free) |
|
||||||
* Fork [our repositories](https://github.com/commaai) on GitHub |
|
||||||
|
|
||||||
## Testing |
|
||||||
|
|
||||||
### Local Testing |
|
||||||
|
|
||||||
You can test your changes on your machine by running `run_docker_tests.sh`. This will run some automated tests in docker against your code. |
|
||||||
|
|
||||||
### Automated Testing |
|
||||||
|
|
||||||
All PRs are automatically checked by travis. Check out `.travis.yml` for what travis runs. Any new tests sould be added to travis. |
|
||||||
|
|
||||||
### Code Style and Linting |
|
||||||
|
|
||||||
Code is automatically check for style by travis as part of the automated tests. You can also run these yourself by running `check_code_quality.sh`. |
|
||||||
|
|
||||||
## Car Ports (openpilot) |
|
||||||
|
|
||||||
We've released a [Model Port guide](https://medium.com/@comma_ai/openpilot-port-guide-for-toyota-models-e5467f4b5fe6) for porting to Toyota/Lexus models. |
|
||||||
|
|
||||||
If you port openpilot to a substantially new car brand, see this more generic [Brand Port guide](https://medium.com/@comma_ai/how-to-write-a-car-port-for-openpilot-7ce0785eda84). You might also be eligible for a bounty. See our bounties at [comma.ai/bounties.html](https://comma.ai/bounties.html) |
|
@ -1,84 +0,0 @@ |
|||||||
FROM ubuntu:16.04 |
|
||||||
ENV PYTHONUNBUFFERED 1 |
|
||||||
|
|
||||||
RUN apt-get update && apt-get install -y \ |
|
||||||
autoconf \ |
|
||||||
build-essential \ |
|
||||||
bzip2 \ |
|
||||||
clang \ |
|
||||||
cmake \ |
|
||||||
curl \ |
|
||||||
ffmpeg \ |
|
||||||
git \ |
|
||||||
libarchive-dev \ |
|
||||||
libbz2-dev \ |
|
||||||
libcurl4-openssl-dev \ |
|
||||||
libeigen3-dev \ |
|
||||||
libffi-dev \ |
|
||||||
libglew-dev \ |
|
||||||
libgles2-mesa-dev \ |
|
||||||
libglfw3-dev \ |
|
||||||
libglib2.0-0 \ |
|
||||||
liblzma-dev \ |
|
||||||
libmysqlclient-dev \ |
|
||||||
libomp-dev \ |
|
||||||
libopencv-dev \ |
|
||||||
libssl-dev \ |
|
||||||
libsqlite3-dev \ |
|
||||||
libtool \ |
|
||||||
libusb-1.0-0-dev \ |
|
||||||
libzmq5-dev \ |
|
||||||
locales \ |
|
||||||
ocl-icd-libopencl1 \ |
|
||||||
ocl-icd-opencl-dev \ |
|
||||||
opencl-headers \ |
|
||||||
python-dev \ |
|
||||||
python-pip \ |
|
||||||
screen \ |
|
||||||
sudo \ |
|
||||||
vim \ |
|
||||||
wget |
|
||||||
|
|
||||||
|
|
||||||
RUN sed -i -e 's/# en_US.UTF-8 UTF-8/en_US.UTF-8 UTF-8/' /etc/locale.gen && locale-gen |
|
||||||
ENV LANG en_US.UTF-8 |
|
||||||
ENV LANGUAGE en_US:en |
|
||||||
ENV LC_ALL en_US.UTF-8 |
|
||||||
|
|
||||||
RUN curl -L https://github.com/pyenv/pyenv-installer/raw/master/bin/pyenv-installer | bash |
|
||||||
|
|
||||||
ENV PATH="/root/.pyenv/bin:/root/.pyenv/shims:${PATH}" |
|
||||||
RUN pyenv install 3.7.3 |
|
||||||
RUN pyenv global 3.7.3 |
|
||||||
RUN pyenv rehash |
|
||||||
|
|
||||||
RUN pip install pipenv==2018.11.26 |
|
||||||
|
|
||||||
COPY Pipfile /tmp/ |
|
||||||
COPY Pipfile.lock /tmp/ |
|
||||||
|
|
||||||
RUN python --version |
|
||||||
RUN cd /tmp && pipenv install --system --deploy |
|
||||||
|
|
||||||
# Install subset of dev dependencies needed for CI |
|
||||||
RUN pip install matplotlib==3.1.1 dictdiffer==0.8.0 fastcluster==1.1.25 aenum==2.2.1 scipy==1.3.1 lru-dict==1.1.6 tenacity==5.1.1 azure-common==1.1.23 azure-nspkg==3.0.2 azure-storage-blob==2.1.0 azure-storage-common==2.1.0 azure-storage-nspkg==3.1.0 pycurl==7.43.0.3 |
|
||||||
|
|
||||||
COPY phonelibs/install_capnp.sh /tmp/install_capnp.sh |
|
||||||
RUN /tmp/install_capnp.sh |
|
||||||
|
|
||||||
RUN git clone --branch v0.7 https://github.com/commaai/openpilot-tools.git /tmp/openpilot/tools |
|
||||||
|
|
||||||
ENV PYTHONPATH /tmp/openpilot:${PYTHONPATH} |
|
||||||
COPY ./.pylintrc /tmp/openpilot/.pylintrc |
|
||||||
COPY ./common /tmp/openpilot/common |
|
||||||
COPY ./cereal /tmp/openpilot/cereal |
|
||||||
COPY ./opendbc /tmp/openpilot/opendbc |
|
||||||
COPY ./selfdrive /tmp/openpilot/selfdrive |
|
||||||
COPY ./phonelibs /tmp/openpilot/phonelibs |
|
||||||
COPY ./pyextra /tmp/openpilot/pyextra |
|
||||||
COPY ./panda /tmp/openpilot/panda |
|
||||||
|
|
||||||
COPY SConstruct /tmp/openpilot/SConstruct |
|
||||||
|
|
||||||
RUN mkdir -p /tmp/openpilot/selfdrive/test/out |
|
||||||
RUN cd /tmp/openpilot && scons -j$(nproc) |
|
@ -1,7 +0,0 @@ |
|||||||
Copyright (c) 2018, Comma.ai, Inc. |
|
||||||
|
|
||||||
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: |
|
||||||
|
|
||||||
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. |
|
||||||
|
|
||||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. |
|
@ -1,9 +0,0 @@ |
|||||||
|
|
||||||
code_dir := $(shell pwd)
|
|
||||||
|
|
||||||
# TODO: Add a global build system
|
|
||||||
|
|
||||||
.PHONY: all |
|
||||||
all: |
|
||||||
cd selfdrive && PYTHONPATH=$(code_dir) PREPAREONLY=1 ./manager.py
|
|
||||||
|
|
@ -1,3 +0,0 @@ |
|||||||
version https://git-lfs.github.com/spec/v1 |
|
||||||
oid sha256:a1fd3e30b499e9e5387495544631cb334b62521a5be6668b98d52e3ef5d6e448 |
|
||||||
size 2308 |
|
@ -1,3 +0,0 @@ |
|||||||
version https://git-lfs.github.com/spec/v1 |
|
||||||
oid sha256:451e503913e7579c3241b5fec46c81f832735ecd9fc0ec6dffdf246852e146d1 |
|
||||||
size 165792 |
|
@ -1,302 +0,0 @@ |
|||||||
[](#) |
|
||||||
|
|
||||||
Table of Contents |
|
||||||
======================= |
|
||||||
|
|
||||||
* [What is openpilot?](#what-is-openpilot) |
|
||||||
* [Integration with Stock Features](#integration-with-stock-features) |
|
||||||
* [Supported Hardware](#supported-hardware) |
|
||||||
* [Supported Cars](#supported-cars) |
|
||||||
* [Community Maintained Cars and Features](#community-maintained-cars-and-features) |
|
||||||
* [Installation Instructions](#installation-instructions) |
|
||||||
* [Limitations of openpilot ALC and LDW](#limitations-of-openpilot-alc-and-ldw) |
|
||||||
* [Limitations of openpilot ACC and FCW](#limitations-of-openpilot-acc-and-fcw) |
|
||||||
* [Limitations of openpilot DM](#limitations-of-openpilot-dm) |
|
||||||
* [User Data and comma Account](#user-data-and-comma-account) |
|
||||||
* [Safety and Testing](#safety-and-testing) |
|
||||||
* [Testing on PC](#testing-on-pc) |
|
||||||
* [Community and Contributing](#community-and-contributing) |
|
||||||
* [Directory Structure](#directory-structure) |
|
||||||
* [Licensing](#licensing) |
|
||||||
|
|
||||||
--- |
|
||||||
|
|
||||||
What is openpilot? |
|
||||||
------ |
|
||||||
|
|
||||||
[openpilot](http://github.com/commaai/openpilot) is an open source driver assistance system. Currently, openpilot performs the functions of Adaptive Cruise Control (ACC), Automated Lane Centering (ALC), Forward Collision Warning (FCW) and Lane Departure Warning (LDW) for a growing variety of supported [car makes, models and model years](#supported-cars). In addition, while openpilot is engaged, a camera based Driver Monitoring (DM) feature alerts distracted and asleep drivers. |
|
||||||
|
|
||||||
<table> |
|
||||||
<tr> |
|
||||||
<td><a href="https://www.youtube.com/watch?v=mgAbfr42oI8" title="YouTube" rel="noopener"><img src="https://i.imgur.com/kAtT6Ei.png"></a></td> |
|
||||||
<td><a href="https://www.youtube.com/watch?v=394rJKeh76k" title="YouTube" rel="noopener"><img src="https://i.imgur.com/lTt8cS2.png"></a></td> |
|
||||||
<td><a href="https://www.youtube.com/watch?v=1iNOc3cq8cs" title="YouTube" rel="noopener"><img src="https://i.imgur.com/ANnuSpe.png"></a></td> |
|
||||||
<td><a href="https://www.youtube.com/watch?v=Vr6NgrB-zHw" title="YouTube" rel="noopener"><img src="https://i.imgur.com/Qypanuq.png"></a></td> |
|
||||||
</tr> |
|
||||||
<tr> |
|
||||||
<td><a href="https://www.youtube.com/watch?v=Ug41KIKF0oo" title="YouTube" rel="noopener"><img src="https://i.imgur.com/3caZ7xM.png"></a></td> |
|
||||||
<td><a href="https://www.youtube.com/watch?v=NVR_CdG1FRg" title="YouTube" rel="noopener"><img src="https://i.imgur.com/bAZOwql.png"></a></td> |
|
||||||
<td><a href="https://www.youtube.com/watch?v=tkEvIdzdfUE" title="YouTube" rel="noopener"><img src="https://i.imgur.com/EFINEzG.png"></a></td> |
|
||||||
<td><a href="https://www.youtube.com/watch?v=_P-N1ewNne4" title="YouTube" rel="noopener"><img src="https://i.imgur.com/gAyAq22.png"></a></td> |
|
||||||
</tr> |
|
||||||
</table> |
|
||||||
|
|
||||||
Integration with Stock Features |
|
||||||
------ |
|
||||||
|
|
||||||
In all supported cars: |
|
||||||
* Stock Lane Keep Assist (LKA) and stock ALC are replaced by openpilot ALC, which only functions when openpilot is engaged by the user. |
|
||||||
* Stock LDW is replaced by openpilot LDW. |
|
||||||
|
|
||||||
Additionally, on specific supported cars (see ACC column in [supported cars](#supported-cars)): |
|
||||||
* Stock ACC is replaced by openpilot ACC. |
|
||||||
* openpilot FCW operates in addition to stock FCW. |
|
||||||
|
|
||||||
openpilot should preserve all other vehicle's stock features, including, but are not limited to: FCW, Automatic Emergency Braking (AEB), auto high-beam, blind spot warning, and side collision warning. |
|
||||||
|
|
||||||
Supported Hardware |
|
||||||
------ |
|
||||||
|
|
||||||
At the moment, openpilot supports the [EON DevKit](https://comma.ai/shop/products/eon-dashcam-devkit) and the [comma two](https://comma.ai/shop/products/comma-two-devkit). A [car harness](https://comma.ai/shop/products/car-harness) is recommended to connect the EON or comma two to the car. In the future, we'd like to support other platforms as well, like gaming PCs. |
|
||||||
|
|
||||||
Supported Cars |
|
||||||
------ |
|
||||||
|
|
||||||
| Make | Model (US Market Reference) | Supported Package | ACC | No ACC accel below | No ALC below | |
|
||||||
| ----------| ------------------------------| ------------------| -----------------| -------------------| -------------| |
|
||||||
| Acura | ILX 2016-18 | AcuraWatch Plus | openpilot | 25mph<sup>5</sup> | 25mph | |
|
||||||
| Acura | RDX 2016-18 | AcuraWatch Plus | openpilot | 25mph<sup>5</sup> | 12mph | |
|
||||||
| Chrysler | Pacifica 2017-18 | Adaptive Cruise | Stock | 0mph | 9mph | |
|
||||||
| Chrysler | Pacifica Hybrid 2017-18 | Adaptive Cruise | Stock | 0mph | 9mph | |
|
||||||
| Chrysler | Pacifica Hybrid 2019 | Adaptive Cruise | Stock | 0mph | 39mph | |
|
||||||
| Honda | Accord 2018-19 | All | Stock | 0mph | 3mph | |
|
||||||
| Honda | Accord Hybrid 2018-19 | All | Stock | 0mph | 3mph | |
|
||||||
| Honda | Civic Sedan/Coupe 2016-18 | Honda Sensing | openpilot | 0mph | 12mph | |
|
||||||
| Honda | Civic Sedan/Coupe 2019 | Honda Sensing | Stock | 0mph | 2mph | |
|
||||||
| Honda | Civic Hatchback 2017-19 | Honda Sensing | Stock | 0mph | 12mph | |
|
||||||
| Honda | CR-V 2015-16 | Touring | openpilot | 25mph<sup>5</sup> | 12mph | |
|
||||||
| Honda | CR-V 2017-19 | Honda Sensing | Stock | 0mph | 12mph | |
|
||||||
| Honda | CR-V Hybrid 2017-2019 | Honda Sensing | Stock | 0mph | 12mph | |
|
||||||
| Honda | Fit 2018-19 | Honda Sensing | openpilot | 25mph<sup>5</sup> | 12mph | |
|
||||||
| Honda | Odyssey 2018-20 | Honda Sensing | openpilot | 25mph<sup>5</sup> | 0mph | |
|
||||||
| Honda | Passport 2019 | All | openpilot | 25mph<sup>5</sup> | 12mph | |
|
||||||
| Honda | Pilot 2016-18 | Honda Sensing | openpilot | 25mph<sup>5</sup> | 12mph | |
|
||||||
| Honda | Pilot 2019 | All | openpilot | 25mph<sup>5</sup> | 12mph | |
|
||||||
| Honda | Ridgeline 2017-19 | Honda Sensing | openpilot | 25mph<sup>5</sup> | 12mph | |
|
||||||
| Hyundai | Santa Fe 2019<sup>1</sup> | All | Stock | 0mph | 0mph | |
|
||||||
| Hyundai | Elantra 2017-19<sup>1</sup> | SCC + LKAS | Stock | 19mph | 34mph | |
|
||||||
| Hyundai | Genesis 2018<sup>1</sup> | All | Stock | 19mph | 34mph | |
|
||||||
| Jeep | Grand Cherokee 2016-18 | Adaptive Cruise | Stock | 0mph | 9mph | |
|
||||||
| Jeep | Grand Cherokee 2019 | Adaptive Cruise | Stock | 0mph | 39mph | |
|
||||||
| Kia | Optima 2019<sup>1</sup> | SCC + LKAS | Stock | 0mph | 0mph | |
|
||||||
| Kia | Sorento 2018<sup>1</sup> | All | Stock | 0mph | 0mph | |
|
||||||
| Kia | Stinger 2018<sup>1</sup> | SCC + LKAS | Stock | 0mph | 0mph | |
|
||||||
| Lexus | CT Hybrid 2017-18 | All | Stock<sup>4</sup>| 0mph | 0mph | |
|
||||||
| Lexus | ES Hybrid 2019 | All | openpilot | 0mph | 0mph | |
|
||||||
| Lexus | RX Hybrid 2016-19 | All | Stock<sup>4</sup>| 0mph | 0mph | |
|
||||||
| Lexus | IS 2017-2019 | All | Stock | 22mph | 0mph | |
|
||||||
| Lexus | IS Hybrid 2017 | All | Stock | 0mph | 0mph | |
|
||||||
| Subaru | Crosstrek 2018-19 | EyeSight | Stock | 0mph | 0mph | |
|
||||||
| Subaru | Impreza 2019-20 | EyeSight | Stock | 0mph | 0mph | |
|
||||||
| Toyota | Avalon 2016 | TSS-P | Stock<sup>4</sup>| 20mph<sup>5</sup> | 0mph | |
|
||||||
| Toyota | Avalon 2017-18 | All | Stock<sup>4</sup>| 20mph<sup>5</sup> | 0mph | |
|
||||||
| Toyota | Camry 2018-19 | All | Stock | 0mph<sup>2</sup> | 0mph | |
|
||||||
| Toyota | Camry Hybrid 2018-19 | All | Stock | 0mph<sup>2</sup> | 0mph | |
|
||||||
| Toyota | C-HR 2017-19 | All | Stock | 0mph | 0mph | |
|
||||||
| Toyota | C-HR Hybrid 2017-19 | All | Stock | 0mph | 0mph | |
|
||||||
| Toyota | Corolla 2017-19 | All | Stock<sup>4</sup>| 20mph<sup>5</sup> | 0mph | |
|
||||||
| Toyota | Corolla 2020 | All | openpilot | 0mph | 0mph | |
|
||||||
| Toyota | Corolla Hatchback 2019 | All | openpilot | 0mph | 0mph | |
|
||||||
| Toyota | Corolla Hybrid 2020 | All | openpilot | 0mph | 0mph | |
|
||||||
| Toyota | Highlander 2017-19 | All | Stock<sup>4</sup>| 0mph | 0mph | |
|
||||||
| Toyota | Highlander Hybrid 2017-19 | All | Stock<sup>4</sup>| 0mph | 0mph | |
|
||||||
| Toyota | Prius 2016 | TSS-P | Stock<sup>4</sup>| 0mph | 0mph | |
|
||||||
| Toyota | Prius 2017-19 | All | Stock<sup>4</sup>| 0mph | 0mph | |
|
||||||
| Toyota | Prius Prime 2017-20 | All | Stock<sup>4</sup>| 0mph | 0mph | |
|
||||||
| Toyota | Rav4 2016 | TSS-P | Stock<sup>4</sup>| 20mph<sup>5</sup> | 0mph | |
|
||||||
| Toyota | Rav4 2017-18 | All | Stock<sup>4</sup>| 20mph<sup>5</sup> | 0mph | |
|
||||||
| Toyota | Rav4 2019 | All | openpilot | 0mph | 0mph | |
|
||||||
| Toyota | Rav4 Hybrid 2016 | TSS-P | Stock<sup>4</sup>| 0mph | 0mph | |
|
||||||
| Toyota | Rav4 Hybrid 2017-18 | All | Stock<sup>4</sup>| 0mph | 0mph | |
|
||||||
| Toyota | Sienna 2018 | All | Stock<sup>4</sup>| 0mph | 0mph | |
|
||||||
| Volkswagen| Golf 2016-19<sup>3</sup> | Driver Assistance | Stock | 0mph | 0mph | |
|
||||||
|
|
||||||
<sup>1</sup>Requires a [panda](https://comma.ai/shop/products/panda-obd-ii-dongle) and open sourced [Hyundai giraffe](https://github.com/commaai/neo/tree/master/giraffe/hyundai), designed for the 2019 Sante Fe; pinout may differ for other Hyundai and Kia models. <br /> |
|
||||||
<sup>2</sup>28mph for Camry 4CYL L, 4CYL LE and 4CYL SE which don't have Full-Speed Range Dynamic Radar Cruise Control. <br /> |
|
||||||
<sup>3</sup>Requires a [custom connector](https://community.comma.ai/wiki/index.php/Volkswagen#Integration_at_R242_Camera) for the [car harness](https://comma.ai/shop/products/car-harness) <br /> |
|
||||||
|
|
||||||
Community Maintained Cars and Features |
|
||||||
------ |
|
||||||
|
|
||||||
| Make | Model (US Market Reference) | Supported Package | ACC | No ACC accel below | No ALC below | |
|
||||||
| ----------| ------------------------------| ------------------| -----------------| -------------------| -------------| |
|
||||||
| Buick | Regal 2018<sup>6</sup> | Adaptive Cruise | openpilot | 0mph | 7mph | |
|
||||||
| Chevrolet | Malibu 2017<sup>6</sup> | Adaptive Cruise | openpilot | 0mph | 7mph | |
|
||||||
| Chevrolet | Volt 2017-18<sup>6</sup> | Adaptive Cruise | openpilot | 0mph | 7mph | |
|
||||||
| Cadillac | ATS 2018<sup>6</sup> | Adaptive Cruise | openpilot | 0mph | 7mph | |
|
||||||
| GMC | Acadia Denali 2018<sup>6</sup>| Adaptive Cruise | openpilot | 0mph | 7mph | |
|
||||||
| Holden | Astra 2017<sup>6</sup> | Adaptive Cruise | openpilot | 0mph | 7mph | |
|
||||||
|
|
||||||
<sup>4</sup>When disconnecting the Driver Support Unit (DSU), openpilot ACC will replace stock ACC. For DSU locations, see [Toyota Wiki page](https://community.comma.ai/wiki/index.php/Toyota). ***NOTE: disconnecting the DSU disables Automatic Emergency Braking (AEB).*** <br /> |
|
||||||
<sup>5</sup>[Comma Pedal](https://community.comma.ai/wiki/index.php/Comma_Pedal) is used to provide stop-and-go capability to some of the openpilot-supported cars that don't currently support stop-and-go. Here is how to [build a Comma Pedal](https://medium.com/@jfrux/comma-pedal-building-with-macrofab-6328bea791e8). ***NOTE: The Comma Pedal is not officially supported by [comma](https://comma.ai).*** <br /> |
|
||||||
<sup>6</sup>Requires a [panda](https://comma.ai/shop/products/panda-obd-ii-dongle) and [community built giraffe](https://zoneos.com/volt/). ***NOTE: disconnecting the ASCM disables Automatic Emergency Braking (AEB).*** <br /> |
|
||||||
|
|
||||||
Community Maintained Cars and Features are not verified by comma to meet our [safety model](SAFETY.md). Be extra cautious using them. They are only available after enabling the toggle in `Settings->Developer->Enable Community Features`. |
|
||||||
|
|
||||||
Installation Instructions |
|
||||||
------ |
|
||||||
|
|
||||||
Install openpilot on a EON by entering ``https://openpilot.comma.ai`` during the installer setup. |
|
||||||
|
|
||||||
Follow this [video instructions](https://youtu.be/3nlkomHathI) to properly mount the EON on the windshield. Note: openpilot features an automatic pose calibration routine and openpilot performance should not be affected by small pitch and yaw misalignments caused by imprecise EON mounting. |
|
||||||
|
|
||||||
Before placing the device on your windshield, check the state and local laws and ordinances where you drive. Some state laws prohibit or restrict the placement of objects on the windshield of a motor vehicle. |
|
||||||
|
|
||||||
You will be able to engage openpilot after reviewing the onboarding screens and finishing the calibration procedure. |
|
||||||
|
|
||||||
Limitations of openpilot ALC and LDW |
|
||||||
------ |
|
||||||
|
|
||||||
openpilot ALC and openpilot LDW do not automatically drive the vehicle or reduce the amount of attention that must be paid to operate your vehicle. The driver must always keep control of the steering wheel and be ready to correct the openpilot ALC action at all times. |
|
||||||
|
|
||||||
While changing lanes, openpilot is not capable of looking next to you or checking your blind spot. Only nudge the wheel to initiate a lane change after you have confirmed it's safe to do so. |
|
||||||
|
|
||||||
Many factors can impact the performance of openpilot ALC and openpilot LDW, causing them to be unable to function as intended. These include, but are not limited to: |
|
||||||
|
|
||||||
* Poor visibility (heavy rain, snow, fog, etc.) or weather conditions that may interfere with sensor operation. |
|
||||||
* The road facing camera is obstructed, covered or damaged by mud, ice, snow, etc. |
|
||||||
* Obstruction caused by applying excessive paint or adhesive products (such as wraps, stickers, rubber coating, etc.) onto the vehicle. |
|
||||||
* The EON is mounted incorrectly. |
|
||||||
* When in sharp curves, like on-off ramps, intersections etc...; openpilot is designed to be limited in the amount of steering torque it can produce. |
|
||||||
* In the presence of restricted lanes or construction zones. |
|
||||||
* When driving on highly banked roads or in presence of strong cross-wind. |
|
||||||
* Extremely hot or cold temperatures. |
|
||||||
* Bright light (due to oncoming headlights, direct sunlight, etc.). |
|
||||||
* Driving on hills, narrow, or winding roads. |
|
||||||
|
|
||||||
The list above does not represent an exhaustive list of situations that may interfere with proper operation of openpilot components. It is the driver's responsibility to be in control of the vehicle at all times. |
|
||||||
|
|
||||||
Limitations of openpilot ACC and FCW |
|
||||||
------ |
|
||||||
|
|
||||||
openpilot ACC and openpilot FCW are not systems that allow careless or inattentive driving. It is still necessary for the driver to pay close attention to the vehicle’s surroundings and to be ready to re-take control of the gas and the brake at all times. |
|
||||||
|
|
||||||
Many factors can impact the performance of openpilot ACC and openpilot FCW, causing them to be unable to function as intended. These include, but are not limited to: |
|
||||||
|
|
||||||
* Poor visibility (heavy rain, snow, fog, etc.) or weather conditions that may interfere with sensor operation. |
|
||||||
* The road facing camera or radar are obstructed, covered, or damaged by mud, ice, snow, etc. |
|
||||||
* Obstruction caused by applying excessive paint or adhesive products (such as wraps, stickers, rubber coating, etc.) onto the vehicle. |
|
||||||
* The EON is mounted incorrectly. |
|
||||||
* Approaching a toll booth, a bridge or a large metal plate. |
|
||||||
* When driving on roads with pedestrians, cyclists, etc... |
|
||||||
* In presence of traffic signs or stop lights, which are not detected by openpilot at this time. |
|
||||||
* When the posted speed limit is below the user selected set speed. openpilot does not detect speed limits at this time. |
|
||||||
* In presence of vehicles in the same lane that are not moving. |
|
||||||
* When abrupt braking maneuvers are required. openpilot is designed to be limited in the amount of deceleration and acceleration that it can produce. |
|
||||||
* When surrounding vehicles perform close cut-ins from neighbor lanes. |
|
||||||
* Driving on hills, narrow, or winding roads. |
|
||||||
* Extremely hot or cold temperatures. |
|
||||||
* Bright light (due to oncoming headlights, direct sunlight, etc.). |
|
||||||
* Interference from other equipment that generates radar waves. |
|
||||||
|
|
||||||
The list above does not represent an exhaustive list of situations that may interfere with proper operation of openpilot components. It is the driver's responsibility to be in control of the vehicle at all times. |
|
||||||
|
|
||||||
Limitations of openpilot DM |
|
||||||
------ |
|
||||||
|
|
||||||
openpilot DM should not be considered an exact measurements of the status of alertness of the driver. |
|
||||||
|
|
||||||
Many factors can impact the performance of openpilot DM, causing it to be unable to function as intended. These include, but are not limited to: |
|
||||||
|
|
||||||
* Low light conditions, such as driving at night or in dark tunnels. |
|
||||||
* Bright light (due to oncoming headlights, direct sunlight, etc.). |
|
||||||
* The driver face is partially or completely outside field of view of the driver facing camera. |
|
||||||
* Right hand driving vehicles. |
|
||||||
* The driver facing camera is obstructed, covered, or damaged. |
|
||||||
|
|
||||||
The list above does not represent an exhaustive list of situations that may interfere with proper operation of openpilot components. A driver should not rely on openpilot DM to assess their level of attention. |
|
||||||
|
|
||||||
User Data and comma Account |
|
||||||
------ |
|
||||||
|
|
||||||
By default, openpilot uploads the driving data to our servers. You can also access your data by pairing with the comma connect app ([iOS](https://apps.apple.com/us/app/comma-connect/id1456551889), [Android](https://play.google.com/store/apps/details?id=ai.comma.connect&hl=en_US)). We use your data to train better models and improve openpilot for everyone. |
|
||||||
|
|
||||||
openpilot is open source software: the user is free to disable data collection if they wish to do so. |
|
||||||
|
|
||||||
openpilot logs the road facing camera, CAN, GPS, IMU, magnetometer, thermal sensors, crashes, and operating system logs. |
|
||||||
The driver facing camera is only logged if you explicitly opt-in in settings. The microphone is not recorded. |
|
||||||
|
|
||||||
By using openpilot, you agree to [our Privacy Policy](https://my.comma.ai/privacy). You understand that use of this software or its related services will generate certain types of user data, which may be logged and stored at the sole discretion of comma. By accepting this agreement, you grant an irrevocable, perpetual, worldwide right to comma for the use of this data. |
|
||||||
|
|
||||||
Safety and Testing |
|
||||||
---- |
|
||||||
|
|
||||||
* openpilot observes ISO26262 guidelines, see [SAFETY.md](SAFETY.md) for more detail. |
|
||||||
* openpilot has software in the loop [tests](run_docker_tests.sh) that run on every commit. |
|
||||||
* The safety model code lives in panda and is written in C, see [code rigor](https://github.com/commaai/panda#code-rigor) for more details. |
|
||||||
* panda has software in the loop [safety tests](https://github.com/commaai/panda/tree/master/tests/safety). |
|
||||||
* Internally, we have a hardware in the loop Jenkins test suite that builds and unit tests the various processes. |
|
||||||
* panda has additional hardware in the loop [tests](https://github.com/commaai/panda/blob/master/Jenkinsfile). |
|
||||||
* We run the latest openpilot in a testing closet containing 10 EONs continuously replaying routes. |
|
||||||
|
|
||||||
Testing on PC |
|
||||||
------ |
|
||||||
|
|
||||||
Check out [openpilot-tools](https://github.com/commaai/openpilot-tools): lots of tools you can use to replay driving data, test and develop openpilot from your pc. |
|
||||||
|
|
||||||
Community and Contributing |
|
||||||
------ |
|
||||||
|
|
||||||
openpilot is developed by [comma](https://comma.ai/) and by users like you. We welcome both pull requests and issues on [GitHub](http://github.com/commaai/openpilot). Bug fixes and new car ports are encouraged. |
|
||||||
|
|
||||||
You can add support for your car by following guides we have written for [Brand](https://medium.com/@comma_ai/how-to-write-a-car-port-for-openpilot-7ce0785eda84) and [Model](https://medium.com/@comma_ai/openpilot-port-guide-for-toyota-models-e5467f4b5fe6) ports. Generally, a car with adaptive cruise control and lane keep assist is a good candidate. [Join our Discord](https://discord.comma.ai) to discuss car ports: most car makes have a dedicated channel. |
|
||||||
|
|
||||||
Want to get paid to work on openpilot? [comma is hiring](https://comma.ai/jobs/). We also have a [bounty program](https://comma.ai/bounties.html). |
|
||||||
|
|
||||||
And [follow us on Twitter](https://twitter.com/comma_ai). |
|
||||||
|
|
||||||
Directory Structure |
|
||||||
------ |
|
||||||
. |
|
||||||
├── apk # The apk files used for the UI |
|
||||||
├── cereal # The messaging spec and libs used for all logs on EON |
|
||||||
├── common # Library like functionality we've developed here |
|
||||||
├── installer/updater # Manages auto-updates of openpilot |
|
||||||
├── opendbc # Files showing how to interpret data from cars |
|
||||||
├── panda # Code used to communicate on CAN |
|
||||||
├── phonelibs # Libraries used on EON |
|
||||||
├── pyextra # Libraries used on EON |
|
||||||
└── selfdrive # Code needed to drive the car |
|
||||||
├── assets # Fonts and images for UI |
|
||||||
├── athena # Allows communication with the app |
|
||||||
├── boardd # Daemon to talk to the board |
|
||||||
├── camerad # Driver to capture images from the camera sensors |
|
||||||
├── car # Car specific code to read states and control actuators |
|
||||||
├── common # Shared C/C++ code for the daemons |
|
||||||
├── controls # Perception, planning and controls |
|
||||||
├── debug # Tools to help you debug and do car ports |
|
||||||
├── locationd # Soon to be home of precise location |
|
||||||
├── logcatd # Android logcat as a service |
|
||||||
├── loggerd # Logger and uploader of car data |
|
||||||
├── modeld # Driving and monitoring model runners |
|
||||||
├── proclogd # Logs information from proc |
|
||||||
├── sensord # IMU / GPS interface code |
|
||||||
├── tests # Unit tests, system tests and a car simulator |
|
||||||
└── ui # The UI |
|
||||||
|
|
||||||
To understand how the services interact, see `cereal/service_list.yaml`. |
|
||||||
|
|
||||||
Licensing |
|
||||||
------ |
|
||||||
|
|
||||||
openpilot is released under the MIT license. Some parts of the software are released under other licenses as specified. |
|
||||||
|
|
||||||
Any user of this software shall indemnify and hold harmless comma.ai, Inc. and its directors, officers, employees, agents, stockholders, affiliates, subcontractors and customers from and against all allegations, claims, actions, suits, demands, damages, liabilities, obligations, losses, settlements, judgments, costs and expenses (including without limitation attorneys’ fees and costs) which arise out of, relate to or result from any use of this software by user. |
|
||||||
|
|
||||||
**THIS IS ALPHA QUALITY SOFTWARE FOR RESEARCH PURPOSES ONLY. THIS IS NOT A PRODUCT. |
|
||||||
YOU ARE RESPONSIBLE FOR COMPLYING WITH LOCAL LAWS AND REGULATIONS. |
|
||||||
NO WARRANTY EXPRESSED OR IMPLIED.** |
|
||||||
|
|
||||||
--- |
|
||||||
|
|
||||||
<img src="https://d1qb2nb5cznatu.cloudfront.net/startups/i/1061157-bc7e9bf3b246ece7322e6ffe653f6af8-medium_jpg.jpg?buster=1458363130" width="75"></img> <img src="https://cdn-images-1.medium.com/max/1600/1*C87EjxGeMPrkTuVRVWVg4w.png" width="225"></img> |
|
@ -1,500 +0,0 @@ |
|||||||
Version 0.7.1 (2020-01-20) |
|
||||||
======================== |
|
||||||
* comma two support! |
|
||||||
* Lane Change Assist above 45 mph! |
|
||||||
* Replace zmq with custom messaging library, msgq! |
|
||||||
* Supercombo model: calibration and driving models are combined for better lead estimate |
|
||||||
* More robust updater thanks to jyoung8607! Requires NEOS update |
|
||||||
* Improve low speed ACC tuning |
|
||||||
|
|
||||||
Version 0.7 (2019-12-13) |
|
||||||
======================== |
|
||||||
* Move to SCons build system! |
|
||||||
* Add Lane Departure Warning (LDW) for all supported vehicles! |
|
||||||
* NEOS update: increase wifi speed thanks to jyoung8607! |
|
||||||
* Adaptive driver monitoring based on scene |
|
||||||
* New driving model trained end-to-end: improve lane lines and lead detection |
|
||||||
* Smarter torque limit alerts for all cars |
|
||||||
* Improve GM longitudinal control: proper computations for 15Hz radar |
|
||||||
* Move GM port, Toyota with DSU removed, comma pedal in community features; toggle switch required |
|
||||||
* Remove upload over cellular toggle: only upload qlog and qcamera files if not on wifi |
|
||||||
* Refactor Panda code towards ISO26262 and SIL2 compliancy |
|
||||||
* Forward stock FCW for Honda Nidec |
|
||||||
* Volkswagen port now standard: comma Harness intercepts stock camera |
|
||||||
|
|
||||||
Version 0.6.6 (2019-11-05) |
|
||||||
======================== |
|
||||||
* Volkswagen support thanks to jyoung8607! |
|
||||||
* Toyota Corolla Hybrid with TSS 2.0 support thanks to u8511049! |
|
||||||
* Lexus ES with TSS 2.0 support thanks to energee! |
|
||||||
* Fix GM ignition detection and lock safety mode not required anymore |
|
||||||
* Log panda firmware and dongle ID thanks to martinl! |
|
||||||
* New driving model: improve path prediction and lead detection |
|
||||||
* New driver monitoring model, 4x smaller and running on DSP |
|
||||||
* Display an alert and don't start openpilot if panda has wrong firmware |
|
||||||
* Fix bug preventing EON from terminating processes after a drive |
|
||||||
* Remove support for Toyota giraffe without the 120Ohm resistor |
|
||||||
|
|
||||||
Version 0.6.5 (2019-10-07) |
|
||||||
======================== |
|
||||||
* NEOS update: upgrade to Python3 and new installer! |
|
||||||
* comma Harness support! |
|
||||||
* New driving model: improve path prediction |
|
||||||
* New driver monitoring model: more accurate face and eye detection |
|
||||||
* Redesign offroad screen to display updates and alerts |
|
||||||
* Increase maximum allowed acceleration |
|
||||||
* Prevent car 12V battery drain by cutting off EON charge after 3 days of no drive |
|
||||||
* Lexus CT Hybrid support thanks to thomaspich! |
|
||||||
* Louder chime for critical alerts |
|
||||||
* Add toggle to switch to dashcam mode |
|
||||||
* Fix "invalid vehicle params" error on DSU-less Toyota |
|
||||||
|
|
||||||
Version 0.6.4 (2019-09-08) |
|
||||||
======================== |
|
||||||
* Forward stock AEB for Honda Nidec |
|
||||||
* Improve lane centering on banked roads |
|
||||||
* Always-on forward collision warning |
|
||||||
* Always-on driver monitoring, except for right hand drive countries |
|
||||||
* Driver monitoring learns the user's normal driving position |
|
||||||
* Honda Fit support thanks to energee! |
|
||||||
* Lexus IS support |
|
||||||
|
|
||||||
Version 0.6.3 (2019-08-12) |
|
||||||
======================== |
|
||||||
* Alert sounds from EON: requires NEOS update |
|
||||||
* Improve driver monitoring: eye tracking and improved awareness logic |
|
||||||
* Improve path prediction with new driving model |
|
||||||
* Improve lane positioning with wide lanes and exits |
|
||||||
* Improve lateral control on RAV4 |
|
||||||
* Slow down for turns using model |
|
||||||
* Open sourced regression test to verify outputs against reference logs |
|
||||||
* Open sourced regression test to sanity check all car models |
|
||||||
|
|
||||||
Version 0.6.2 (2019-07-29) |
|
||||||
======================== |
|
||||||
* New driving model! |
|
||||||
* Improve lane tracking with double lines |
|
||||||
* Strongly improve stationary vehicle detection |
|
||||||
* Strongly reduce cases of braking due to false leads |
|
||||||
* Better lead tracking around turns |
|
||||||
* Improve cut-in prediction by using neural network |
|
||||||
* Improve lateral control on Toyota Camry and C-HR thanks to zorrobyte! |
|
||||||
* Fix unintended openpilot disengagements on Jeep thanks to adhintz! |
|
||||||
* Fix delayed transition to offroad when car is turned off |
|
||||||
|
|
||||||
Version 0.6.1 (2019-07-21) |
|
||||||
======================== |
|
||||||
* Remote SSH with comma prime and [ssh.comma.ai](https://ssh.comma.ai) |
|
||||||
* Panda code Misra-c2012 compliance, tested against cppcheck coverage |
|
||||||
* Lockout openpilot after 3 terminal alerts for driver distracted or unresponsive |
|
||||||
* Toyota Sienna support thanks to wocsor! |
|
||||||
|
|
||||||
Version 0.6 (2019-07-01) |
|
||||||
======================== |
|
||||||
* New model, with double the pixels and ten times the temporal context! |
|
||||||
* Car should not take exits when in the right lane |
|
||||||
* openpilot uses only ~65% of the CPU (down from 75%) |
|
||||||
* Routes visible in connect/explorer after only 0.2% is uploaded (qlogs) |
|
||||||
* loggerd and sensord are open source, every line of openpilot is now open |
|
||||||
* Panda safety code is MISRA compliant and ships with a signed version on release2 |
|
||||||
* New NEOS is 500MB smaller and has a reproducible usr/pipenv |
|
||||||
* Lexus ES Hybrid support thanks to wocsor! |
|
||||||
* Improve tuning for supported Toyota with TSS 2.0 |
|
||||||
* Various other stability improvements |
|
||||||
|
|
||||||
Version 0.5.13 (2019-05-31) |
|
||||||
========================== |
|
||||||
* Reduce panda power consumption by 70%, down to 80mW, when car is off (not for GM) |
|
||||||
* Reduce EON power consumption by 40%, down to 1100mW, when car is off |
|
||||||
* Reduce CPU utilization by 20% and improve stability |
|
||||||
* Temporarily remove mapd functionalities to improve stability |
|
||||||
* Add openpilot record-only mode for unsupported cars |
|
||||||
* Synchronize controlsd to boardd to reduce latency |
|
||||||
* Remove panda support for Subaru giraffe |
|
||||||
|
|
||||||
Version 0.5.12 (2019-05-16) |
|
||||||
========================== |
|
||||||
* Improve lateral control for the Prius and Prius Prime |
|
||||||
* Compress logs before writing to disk |
|
||||||
* Remove old driving data when storage reaches 90% full |
|
||||||
* Fix small offset in following distance |
|
||||||
* Various small CPU optimizations |
|
||||||
* Improve offroad power consumption: require NEOS Update |
|
||||||
* Add default speed limits for Estonia thanks to martinl! |
|
||||||
* Subaru Crosstrek support thanks to martinl! |
|
||||||
* Toyota Avalon support thanks to njbrown09! |
|
||||||
* Toyota Rav4 with TSS 2.0 support thanks to wocsor! |
|
||||||
* Toyota Corolla with TSS 2.0 support thanks to wocsor! |
|
||||||
|
|
||||||
Version 0.5.11 (2019-04-17) |
|
||||||
======================== |
|
||||||
* Add support for Subaru |
|
||||||
* Reduce panda power consumption by 60% when car is off |
|
||||||
* Fix controlsd lag every 6 minutes. This would sometimes cause disengagements |
|
||||||
* Fix bug in controls with new angle-offset learner in MPC |
|
||||||
* Reduce cpu consumption of ubloxd by rewriting it in C++ |
|
||||||
* Improve driver monitoring model and face detection |
|
||||||
* Improve performance of visiond and ui |
|
||||||
* Honda Passport 2019 support |
|
||||||
* Lexus RX Hybrid 2019 support thanks to schomems! |
|
||||||
* Improve road selection heuristic in mapd |
|
||||||
* Add Lane Departure Warning to dashboard for Toyota thanks to arne182 |
|
||||||
|
|
||||||
Version 0.5.10 (2019-03-19) |
|
||||||
======================== |
|
||||||
* Self-tuning vehicle parameters: steering offset, tire stiffness and steering ratio |
|
||||||
* Improve longitudinal control at low speed when lead vehicle harshly decelerates |
|
||||||
* Fix panda bug going unexpectedly in DCP mode when EON is connected |
|
||||||
* Reduce white panda power consumption by 500mW when EON is disconnected by turning off WIFI |
|
||||||
* New Driver Monitoring Model |
|
||||||
* Support QR codes for login using comma connect |
|
||||||
* Refactor comma pedal FW and use CRC-8 checksum algorithm for safety. Reflashing pedal is required. |
|
||||||
Please see `#hw-pedal` on [discord](discord.comma.ai) for assistance updating comma pedal. |
|
||||||
* Additional speed limit rules for Germany thanks to arne182 |
|
||||||
* Allow negative speed limit offsets |
|
||||||
|
|
||||||
Version 0.5.9 (2019-02-10) |
|
||||||
======================== |
|
||||||
* Improve calibration using a dedicated neural network |
|
||||||
* Abstract planner in its own process to remove lags in controls process |
|
||||||
* Improve speed limits with country/region defaults by road type |
|
||||||
* Reduce mapd data usage with gzip thanks to eFiniLan |
|
||||||
* Zip log files in the background to reduce disk usage |
|
||||||
* Kia Optima support thanks to emmertex! |
|
||||||
* Buick Regal 2018 support thanks to HOYS! |
|
||||||
* Comma pedal support for Toyota thanks to wocsor! Note: tuning needed and not maintained by comma |
|
||||||
* Chrysler Pacifica and Jeep Grand Cherokee support thanks to adhintz! |
|
||||||
|
|
||||||
Version 0.5.8 (2019-01-17) |
|
||||||
======================== |
|
||||||
* Open sourced visiond |
|
||||||
* Auto-slowdown for upcoming turns |
|
||||||
* Chrysler/Jeep/Fiat support thanks to adhintz! |
|
||||||
* Honda Civic 2019 support thanks to csouers! |
|
||||||
* Improve use of car display in Toyota thanks to arne182! |
|
||||||
* No data upload when connected to Android or iOS hotspots and "Enable Upload Over Cellular" setting is off |
|
||||||
* EON stops charging when 12V battery drops below 11.8V |
|
||||||
|
|
||||||
Version 0.5.7 (2018-12-06) |
|
||||||
======================== |
|
||||||
* Speed limit from OpenStreetMap added to UI |
|
||||||
* Highlight speed limit when speed exceeds road speed limit plus a delta |
|
||||||
* Option to limit openpilot max speed to road speed limit plus a delta |
|
||||||
* Cadillac ATS support thanks to vntarasov! |
|
||||||
* GMC Acadia support thanks to CryptoKylan! |
|
||||||
* Decrease GPU power consumption |
|
||||||
* NEOSv8 autoupdate |
|
||||||
|
|
||||||
Version 0.5.6 (2018-11-16) |
|
||||||
======================== |
|
||||||
* Refresh settings layout and add feature descriptions |
|
||||||
* In Honda, keep stock camera on for logging and extra stock features; new openpilot giraffe setting is 0111! |
|
||||||
* In Toyota, option to keep stock camera on for logging and extra stock features (e.g. AHB); 120Ohm resistor required on giraffe. |
|
||||||
* Improve camera calibration stability |
|
||||||
* More tuning to Honda positive accelerations |
|
||||||
* Reduce brake pump use on Hondas |
|
||||||
* Chevrolet Malibu support thanks to tylergets! |
|
||||||
* Holden Astra support thanks to AlexHill! |
|
||||||
|
|
||||||
Version 0.5.5 (2018-10-20) |
|
||||||
======================== |
|
||||||
* Increase allowed Honda positive accelerations |
|
||||||
* Fix sporadic unexpected braking when passing semi-trucks in Toyota |
|
||||||
* Fix gear reading bug in Hyundai Elantra thanks to emmertex! |
|
||||||
|
|
||||||
Version 0.5.4 (2018-09-25) |
|
||||||
======================== |
|
||||||
* New Driving Model |
|
||||||
* New Driver Monitoring Model |
|
||||||
* Improve longitudinal mpc in mid-low speed braking |
|
||||||
* Honda Accord hybrid support thanks to energee! |
|
||||||
* Ship mpc binaries and sensibly reduce build time |
|
||||||
* Calibration more stable |
|
||||||
* More Hyundai and Kia cars supported thanks to emmertex! |
|
||||||
* Various GM Volt improvements thanks to vntarasov! |
|
||||||
|
|
||||||
Version 0.5.3 (2018-09-03) |
|
||||||
======================== |
|
||||||
* Hyundai Santa Fe support! |
|
||||||
* Honda Pilot 2019 support thanks to energee! |
|
||||||
* Toyota Highlander support thanks to daehahn! |
|
||||||
* Improve steering tuning for Honda Odyssey |
|
||||||
|
|
||||||
Version 0.5.2 (2018-08-16) |
|
||||||
======================== |
|
||||||
* New calibration: more accurate, a lot faster, open source! |
|
||||||
* Enable orbd |
|
||||||
* Add little endian support to CAN packer |
|
||||||
* Fix fingerprint for Honda Accord 1.5T |
|
||||||
* Improve driver monitoring model |
|
||||||
|
|
||||||
Version 0.5.1 (2018-08-01) |
|
||||||
======================== |
|
||||||
* Fix radar error on Civic sedan 2018 |
|
||||||
* Improve thermal management logic |
|
||||||
* Alpha Toyota C-HR and Camry support! |
|
||||||
* Auto-switch Driver Monitoring to 3 min counter when inaccurate |
|
||||||
|
|
||||||
Version 0.5 (2018-07-11) |
|
||||||
======================== |
|
||||||
* Driver Monitoring (beta) option in settings! |
|
||||||
* Make visiond, loggerd and UI use less resources |
|
||||||
* 60 FPS UI |
|
||||||
* Better car parameters for most cars |
|
||||||
* New sidebar with stats |
|
||||||
* Remove Waze and Spotify to free up system resources |
|
||||||
* Remove rear view mirror option |
|
||||||
* Calibration 3x faster |
|
||||||
|
|
||||||
Version 0.4.7.2 (2018-06-25) |
|
||||||
========================== |
|
||||||
* Fix loggerd lag issue |
|
||||||
* No longer prompt for updates |
|
||||||
* Mitigate right lane hugging for properly mounted EON (procedure on wiki) |
|
||||||
|
|
||||||
Version 0.4.7.1 (2018-06-18) |
|
||||||
========================== |
|
||||||
* Fix Acura ILX steer faults |
|
||||||
* Fix bug in mock car |
|
||||||
|
|
||||||
Version 0.4.7 (2018-06-15) |
|
||||||
========================== |
|
||||||
* New model! |
|
||||||
* GM Volt (and CT6 lateral) support! |
|
||||||
* Honda Bosch lateral support! |
|
||||||
* Improve actuator modeling to reduce lateral wobble |
|
||||||
* Minor refactor of car abstraction layer |
|
||||||
* Hack around orbd startup issue |
|
||||||
|
|
||||||
Version 0.4.6 (2018-05-18) |
|
||||||
========================== |
|
||||||
* NEOSv6 required! Will autoupdate |
|
||||||
* Stability improvements |
|
||||||
* Fix all memory leaks |
|
||||||
* Update C++ compiler to clang6 |
|
||||||
* Improve front camera exposure |
|
||||||
|
|
||||||
Version 0.4.5 (2018-04-27) |
|
||||||
========================== |
|
||||||
* Release notes added to the update popup |
|
||||||
* Improve auto shut-off logic to disallow empty battery |
|
||||||
* Added onboarding instructions |
|
||||||
* Include orbd, the first piece of new calibration algorithm |
|
||||||
* Show remaining upload data instead of file numbers |
|
||||||
* Fix UI bugs |
|
||||||
* Fix memory leaks |
|
||||||
|
|
||||||
Version 0.4.4 (2018-04-13) |
|
||||||
========================== |
|
||||||
* EON are flipped! Flip your EON's mount! |
|
||||||
* Alpha Honda Ridgeline support thanks to energee! |
|
||||||
* Support optional front camera recording |
|
||||||
* Upload over cellular toggle now applies to all files, not just video |
|
||||||
* Increase acceleration when closing lead gap |
|
||||||
* User now prompted for future updates |
|
||||||
* NEO no longer supported :( |
|
||||||
|
|
||||||
Version 0.4.3.2 (2018-03-29) |
|
||||||
============================ |
|
||||||
* Improve autofocus |
|
||||||
* Improve driving when only one lane line is detected |
|
||||||
* Added fingerprint for Toyota Corolla LE |
|
||||||
* Fixed Toyota Corolla steer error |
|
||||||
* Full-screen driving UI |
|
||||||
* Improved path drawing |
|
||||||
|
|
||||||
Version 0.4.3.1 (2018-03-19) |
|
||||||
============================ |
|
||||||
* Improve autofocus |
|
||||||
* Add check for MPC solution error |
|
||||||
* Make first distracted warning visual only |
|
||||||
|
|
||||||
Version 0.4.3 (2018-03-13) |
|
||||||
========================== |
|
||||||
* Add HDR and autofocus |
|
||||||
* Update UI aesthetic |
|
||||||
* Grey panda works in Waze |
|
||||||
* Add alpha support for 2017 Honda Pilot |
|
||||||
* Slight increase in acceleration response from stop |
|
||||||
* Switch CAN sending to use CANPacker |
|
||||||
* Fix pulsing acceleration regression on Honda |
|
||||||
* Fix openpilot bugs when stock system is in use |
|
||||||
* Change starting logic for chffrplus to use battery voltage |
|
||||||
|
|
||||||
Version 0.4.2 (2018-02-05) |
|
||||||
========================== |
|
||||||
* Add alpha support for 2017 Lexus RX Hybrid |
|
||||||
* Add alpha support for 2018 ACURA RDX |
|
||||||
* Updated fingerprint to include Toyota Rav4 SE and Prius Prime |
|
||||||
* Bugfixes for Acura ILX and Honda Odyssey |
|
||||||
|
|
||||||
Version 0.4.1 (2018-01-30) |
|
||||||
========================== |
|
||||||
* Add alpha support for 2017 Toyota Corolla |
|
||||||
* Add alpha support for 2018 Honda Odyssey with Honda Sensing |
|
||||||
* Add alpha support for Grey Panda |
|
||||||
* Refactored car abstraction layer to make car ports easier |
|
||||||
* Increased steering torque limit on Honda CR-V by 30% |
|
||||||
|
|
||||||
Version 0.4.0.2 (2018-01-18) |
|
||||||
========================== |
|
||||||
* Add focus adjustment slider |
|
||||||
* Minor bugfixes |
|
||||||
|
|
||||||
Version 0.4.0.1 (2017-12-21) |
|
||||||
========================== |
|
||||||
* New UI to match chffrplus |
|
||||||
* Improved lateral control tuning to fix oscillations on Civic |
|
||||||
* Add alpha support for 2017 Toyota Rav4 Hybrid |
|
||||||
* Reduced CPU usage |
|
||||||
* Removed unnecessary utilization of fan at max speed |
|
||||||
* Minor bug fixes |
|
||||||
|
|
||||||
Version 0.3.9 (2017-11-21) |
|
||||||
========================== |
|
||||||
* Add alpha support for 2017 Toyota Prius |
|
||||||
* Improved longitudinal control using model predictive control |
|
||||||
* Enable Forward Collision Warning |
|
||||||
* Acura ILX now maintains openpilot engaged at standstill when brakes are applied |
|
||||||
|
|
||||||
Version 0.3.8.2 (2017-10-30) |
|
||||||
========================== |
|
||||||
* Add alpha support for 2017 Toyota RAV4 |
|
||||||
* Smoother lateral control |
|
||||||
* Stay silent if stock system is connected through giraffe |
|
||||||
* Minor bug fixes |
|
||||||
|
|
||||||
Version 0.3.7 (2017-09-30) |
|
||||||
========================== |
|
||||||
* Improved lateral control using model predictive control |
|
||||||
* Improved lane centering |
|
||||||
* Improved GPS |
|
||||||
* Reduced tendency of path deviation near right side exits |
|
||||||
* Enable engagement while the accelerator pedal is pressed |
|
||||||
* Enable engagement while the brake pedal is pressed, when stationary and with lead vehicle within 5m |
|
||||||
* Disable engagement when park brake or brake hold are active |
|
||||||
* Fixed sporadic longitudinal pulsing in Civic |
|
||||||
* Cleanups to vehicle interface |
|
||||||
|
|
||||||
Version 0.3.6.1 (2017-08-15) |
|
||||||
============================ |
|
||||||
* Mitigate low speed steering oscillations on some vehicles |
|
||||||
* Include board steering check for CR-V |
|
||||||
|
|
||||||
Version 0.3.6 (2017-08-08) |
|
||||||
========================== |
|
||||||
* Fix alpha CR-V support |
|
||||||
* Improved GPS |
|
||||||
* Fix display of target speed not always matching HUD |
|
||||||
* Increased acceleration after stop |
|
||||||
* Mitigated some vehicles driving too close to the right line |
|
||||||
|
|
||||||
Version 0.3.5 (2017-07-30) |
|
||||||
========================== |
|
||||||
* Fix bug where new devices would not begin calibration |
|
||||||
* Minor robustness improvements |
|
||||||
|
|
||||||
Version 0.3.4 (2017-07-28) |
|
||||||
========================== |
|
||||||
* Improved model trained on more data |
|
||||||
* Much improved controls tuning |
|
||||||
* Performance improvements |
|
||||||
* Bugfixes and improvements to calibration |
|
||||||
* Driving log can play back video |
|
||||||
* Acura only: system now stays engaged below 25mph as long as brakes are applied |
|
||||||
|
|
||||||
Version 0.3.3 (2017-06-28) |
|
||||||
=========================== |
|
||||||
* Improved model trained on more data |
|
||||||
* Alpha CR-V support thanks to energee and johnnwvs! |
|
||||||
* Using the opendbc project for DBC files |
|
||||||
* Minor performance improvements |
|
||||||
* UI update thanks to pjlao307 |
|
||||||
* Power off button |
|
||||||
* 6% more torque on the Civic |
|
||||||
|
|
||||||
Version 0.3.2 (2017-05-22) |
|
||||||
=========================== |
|
||||||
* Minor stability bugfixes |
|
||||||
* Added metrics and rear view mirror disable to settings |
|
||||||
* Update model with more crowdsourced data |
|
||||||
|
|
||||||
Version 0.3.1 (2017-05-17) |
|
||||||
=========================== |
|
||||||
* visiond stability bugfix |
|
||||||
* Add logging for angle and flashing |
|
||||||
|
|
||||||
Version 0.3.0 (2017-05-12) |
|
||||||
=========================== |
|
||||||
* Add CarParams struct to improve the abstraction layer |
|
||||||
* Refactor visiond IPC to support multiple clients |
|
||||||
* Add raw GPS and beginning support for navigation |
|
||||||
* Improve model in visiond using crowdsourced data |
|
||||||
* Add improved system logging to diagnose instability |
|
||||||
* Rewrite baseui in React Native |
|
||||||
* Moved calibration to the cloud |
|
||||||
|
|
||||||
Version 0.2.9 (2017-03-01) |
|
||||||
=========================== |
|
||||||
* Retain compatibility with NEOS v1 |
|
||||||
|
|
||||||
Version 0.2.8 (2017-02-27) |
|
||||||
=========================== |
|
||||||
* Fix bug where frames were being dropped in minute 71 |
|
||||||
|
|
||||||
Version 0.2.7 (2017-02-08) |
|
||||||
=========================== |
|
||||||
* Better performance and pictures at night |
|
||||||
* Fix ptr alignment issue in boardd |
|
||||||
* Fix brake error light, fix crash if too cold |
|
||||||
|
|
||||||
Version 0.2.6 (2017-01-31) |
|
||||||
=========================== |
|
||||||
* Fix bug in visiond model execution |
|
||||||
|
|
||||||
Version 0.2.5 (2017-01-30) |
|
||||||
=========================== |
|
||||||
* Fix race condition in manager |
|
||||||
|
|
||||||
Version 0.2.4 (2017-01-27) |
|
||||||
=========================== |
|
||||||
* OnePlus 3T support |
|
||||||
* Enable installation as NEOS app |
|
||||||
* Various minor bugfixes |
|
||||||
|
|
||||||
Version 0.2.3 (2017-01-11) |
|
||||||
=========================== |
|
||||||
* Reduce space usage by 80% |
|
||||||
* Add better logging |
|
||||||
* Add Travis CI |
|
||||||
|
|
||||||
Version 0.2.2 (2017-01-10) |
|
||||||
=========================== |
|
||||||
* Board triggers started signal on CAN messages |
|
||||||
* Improved autoexposure |
|
||||||
* Handle out of space, improve upload status |
|
||||||
|
|
||||||
Version 0.2.1 (2016-12-14) |
|
||||||
=========================== |
|
||||||
* Performance improvements, removal of more numpy |
|
||||||
* Fix boardd process priority |
|
||||||
* Make counter timer reset on use of steering wheel |
|
||||||
|
|
||||||
Version 0.2 (2016-12-12) |
|
||||||
========================= |
|
||||||
* Car/Radar abstraction layers have shipped, see cereal/car.capnp |
|
||||||
* controlsd has been refactored |
|
||||||
* Shipped plant model and testing maneuvers |
|
||||||
* visiond exits more gracefully now |
|
||||||
* Hardware encoder in visiond should always init |
|
||||||
* ui now turns off the screen after 30 seconds |
|
||||||
* Switch to openpilot release branch for future releases |
|
||||||
* Added preliminary Docker container to run tests on PC |
|
||||||
|
|
||||||
Version 0.1 (2016-11-29) |
|
||||||
========================= |
|
||||||
* Initial release of openpilot |
|
||||||
* Adaptive cruise control is working |
|
||||||
* Lane keep assist is working |
|
||||||
* Support for Acura ILX 2016 with AcuraWatch Plus |
|
||||||
* Support for Honda Civic 2016 Touring Edition |
|
@ -1,34 +0,0 @@ |
|||||||
openpilot Safety |
|
||||||
====== |
|
||||||
|
|
||||||
openpilot is an Adaptive Cruise Control (ACC) and Automated Lane Centering (ALC) system. |
|
||||||
Like other ACC and ALC systems, openpilot is a failsafe passive system and it requires the |
|
||||||
driver to be alert and to pay attention at all times. |
|
||||||
|
|
||||||
In order to enforce driver alertness, openpilot includes a driver monitoring feature |
|
||||||
that alerts the driver when distracted. |
|
||||||
|
|
||||||
However, even with an attentive driver, we must make further efforts for the system to be |
|
||||||
safe. We repeat, **driver alertness is necessary, but not sufficient, for openpilot to be |
|
||||||
used safely** and openpilot is provided with no warranty of fitness for any purpose. |
|
||||||
|
|
||||||
openpilot is developed in good faith to be compliant with FMVSS requirements and to follow |
|
||||||
industry standards of safety for Level 2 Driver Assistance Systems. In particular, we observe |
|
||||||
ISO26262 guidelines, including those from [pertinent documents](https://www.nhtsa.gov/sites/nhtsa.dot.gov/files/documents/13498a_812_573_alcsystemreport.pdf) |
|
||||||
released by NHTSA. In addition, we impose strict coding guidelines (like [MISRA C : 2012](https://www.misra.org.uk/MISRAHome/MISRAC2012/tabid/196/Default.aspx)) |
|
||||||
on parts of openpilot that are safety relevant. We also perform software-in-the-loop, |
|
||||||
hardware-in-the-loop and in-vehicle tests before each software release. |
|
||||||
|
|
||||||
Following Hazard and Risk Analysis and FMEA, at a very high level, we have designed openpilot |
|
||||||
ensuring two main safety requirements. |
|
||||||
|
|
||||||
1. The driver must always be capable to immediately retake manual control of the vehicle, |
|
||||||
by stepping on either pedal or by pressing the cancel button. |
|
||||||
2. The vehicle must not alter its trajectory too quickly for the driver to safely |
|
||||||
react. This means that while the system is engaged, the actuators are constrained |
|
||||||
to operate within reasonable limits. |
|
||||||
|
|
||||||
For vehicle specific implementation of the safety concept, refer to `panda/board/safety/`. |
|
||||||
|
|
||||||
**Extra note**: comma.ai strongly discourages the use of openpilot forks with safety code either missing or |
|
||||||
not fully meeting the above requirements. |
|
@ -1,217 +0,0 @@ |
|||||||
import os |
|
||||||
import subprocess |
|
||||||
import sys |
|
||||||
|
|
||||||
AddOption('--test', |
|
||||||
action='store_true', |
|
||||||
help='build test files') |
|
||||||
|
|
||||||
AddOption('--asan', |
|
||||||
action='store_true', |
|
||||||
help='turn on ASAN') |
|
||||||
|
|
||||||
arch = subprocess.check_output(["uname", "-m"], encoding='utf8').rstrip() |
|
||||||
|
|
||||||
if arch == "aarch64": |
|
||||||
lenv = { |
|
||||||
"LD_LIBRARY_PATH": '/data/data/com.termux/files/usr/lib', |
|
||||||
"PATH": os.environ['PATH'], |
|
||||||
"ANDROID_DATA": os.environ['ANDROID_DATA'], |
|
||||||
"ANDROID_ROOT": os.environ['ANDROID_ROOT'], |
|
||||||
} |
|
||||||
|
|
||||||
cpppath = [ |
|
||||||
"#phonelibs/opencl/include", |
|
||||||
] |
|
||||||
libpath = [ |
|
||||||
"#phonelibs/snpe/aarch64-android-clang3.8", |
|
||||||
"/usr/lib", |
|
||||||
"/data/data/com.termux/files/usr/lib", |
|
||||||
"/system/vendor/lib64", |
|
||||||
"/system/comma/usr/lib", |
|
||||||
"#phonelibs/yaml-cpp/lib", |
|
||||||
"#phonelibs/nanovg", |
|
||||||
"#phonelibs/libyuv/lib", |
|
||||||
] |
|
||||||
|
|
||||||
cflags = ["-DQCOM", "-mcpu=cortex-a57"] |
|
||||||
cxxflags = ["-DQCOM", "-mcpu=cortex-a57"] |
|
||||||
|
|
||||||
rpath = ["/system/vendor/lib64"] |
|
||||||
else: |
|
||||||
lenv = { |
|
||||||
"PATH": "#external/bin:" + os.environ['PATH'], |
|
||||||
} |
|
||||||
cpppath = [ |
|
||||||
"#phonelibs/capnp-cpp/include", |
|
||||||
"#phonelibs/capnp-c/include", |
|
||||||
"#phonelibs/zmq/x64/include", |
|
||||||
"#external/tensorflow/include", |
|
||||||
] |
|
||||||
libpath = [ |
|
||||||
"#phonelibs/capnp-cpp/x64/lib", |
|
||||||
"#phonelibs/capnp-c/x64/lib", |
|
||||||
"#phonelibs/yaml-cpp/x64/lib", |
|
||||||
"#phonelibs/snpe/x86_64-linux-clang", |
|
||||||
"#phonelibs/zmq/x64/lib", |
|
||||||
"#phonelibs/libyuv/x64/lib", |
|
||||||
"#external/zmq/lib", |
|
||||||
"#external/tensorflow/lib", |
|
||||||
"#cereal", |
|
||||||
"#selfdrive/common", |
|
||||||
"/usr/lib", |
|
||||||
"/usr/local/lib", |
|
||||||
] |
|
||||||
|
|
||||||
rpath = ["phonelibs/capnp-cpp/x64/lib", |
|
||||||
"external/tensorflow/lib", |
|
||||||
"cereal", |
|
||||||
"selfdrive/common"] |
|
||||||
|
|
||||||
# allows shared libraries to work globally |
|
||||||
rpath = [os.path.join(os.getcwd(), x) for x in rpath] |
|
||||||
|
|
||||||
cflags = [] |
|
||||||
cxxflags = [] |
|
||||||
|
|
||||||
ccflags_asan = ["-fsanitize=address", "-fno-omit-frame-pointer"] if GetOption('asan') else [] |
|
||||||
ldflags_asan = ["-fsanitize=address"] if GetOption('asan') else [] |
|
||||||
|
|
||||||
# change pythonpath to this |
|
||||||
lenv["PYTHONPATH"] = Dir("#").path |
|
||||||
|
|
||||||
env = Environment( |
|
||||||
ENV=lenv, |
|
||||||
CCFLAGS=[ |
|
||||||
"-g", |
|
||||||
"-fPIC", |
|
||||||
"-O2", |
|
||||||
"-Werror=implicit-function-declaration", |
|
||||||
"-Werror=incompatible-pointer-types", |
|
||||||
"-Werror=int-conversion", |
|
||||||
"-Werror=return-type", |
|
||||||
"-Werror=format-extra-args", |
|
||||||
] + cflags + ccflags_asan, |
|
||||||
|
|
||||||
CPPPATH=cpppath + [ |
|
||||||
"#", |
|
||||||
"#selfdrive", |
|
||||||
"#phonelibs/bzip2", |
|
||||||
"#phonelibs/libyuv/include", |
|
||||||
"#phonelibs/yaml-cpp/include", |
|
||||||
"#phonelibs/openmax/include", |
|
||||||
"#phonelibs/json/src", |
|
||||||
"#phonelibs/json11", |
|
||||||
"#phonelibs/eigen", |
|
||||||
"#phonelibs/curl/include", |
|
||||||
"#phonelibs/opencv/include", |
|
||||||
"#phonelibs/libgralloc/include", |
|
||||||
"#phonelibs/android_frameworks_native/include", |
|
||||||
"#phonelibs/android_hardware_libhardware/include", |
|
||||||
"#phonelibs/android_system_core/include", |
|
||||||
"#phonelibs/linux/include", |
|
||||||
"#phonelibs/snpe/include", |
|
||||||
"#phonelibs/nanovg", |
|
||||||
"#selfdrive/common", |
|
||||||
"#selfdrive/camerad", |
|
||||||
"#selfdrive/camerad/include", |
|
||||||
"#selfdrive/loggerd/include", |
|
||||||
"#selfdrive/modeld", |
|
||||||
"#cereal/messaging", |
|
||||||
"#cereal", |
|
||||||
"#opendbc/can", |
|
||||||
], |
|
||||||
|
|
||||||
CC='clang', |
|
||||||
CXX='clang++', |
|
||||||
LINKFLAGS=ldflags_asan, |
|
||||||
|
|
||||||
RPATH=rpath, |
|
||||||
|
|
||||||
CFLAGS=["-std=gnu11"] + cflags, |
|
||||||
CXXFLAGS=["-std=c++14"] + cxxflags, |
|
||||||
LIBPATH=libpath + |
|
||||||
[ |
|
||||||
"#cereal", |
|
||||||
"#selfdrive/common", |
|
||||||
"#phonelibs", |
|
||||||
] |
|
||||||
) |
|
||||||
|
|
||||||
if os.environ.get('SCONS_CACHE'): |
|
||||||
CacheDir('/tmp/scons_cache') |
|
||||||
|
|
||||||
node_interval = 5 |
|
||||||
node_count = 0 |
|
||||||
def progress_function(node): |
|
||||||
global node_count |
|
||||||
node_count += node_interval |
|
||||||
sys.stderr.write("progress: %d\n" % node_count) |
|
||||||
|
|
||||||
if os.environ.get('SCONS_PROGRESS'): |
|
||||||
Progress(progress_function, interval=node_interval) |
|
||||||
|
|
||||||
SHARED = False |
|
||||||
|
|
||||||
def abspath(x): |
|
||||||
if arch == 'aarch64': |
|
||||||
pth = os.path.join("/data/pythonpath", x[0].path) |
|
||||||
env.Depends(pth, x) |
|
||||||
return File(pth) |
|
||||||
else: |
|
||||||
# rpath works elsewhere |
|
||||||
return x[0].path.rsplit("/", 1)[1][:-3] |
|
||||||
|
|
||||||
#zmq = 'zmq' |
|
||||||
# still needed for apks |
|
||||||
zmq = FindFile("libzmq.a", libpath) |
|
||||||
Export('env', 'arch', 'zmq', 'SHARED') |
|
||||||
|
|
||||||
# cereal and messaging are shared with the system |
|
||||||
SConscript(['cereal/SConscript']) |
|
||||||
if SHARED: |
|
||||||
cereal = abspath([File('cereal/libcereal_shared.so')]) |
|
||||||
messaging = abspath([File('cereal/libmessaging_shared.so')]) |
|
||||||
else: |
|
||||||
cereal = [File('#cereal/libcereal.a')] |
|
||||||
messaging = [File('#cereal/libmessaging.a')] |
|
||||||
Export('cereal', 'messaging') |
|
||||||
|
|
||||||
SConscript(['selfdrive/common/SConscript']) |
|
||||||
Import('_common', '_visionipc', '_gpucommon', '_gpu_libs') |
|
||||||
|
|
||||||
if SHARED: |
|
||||||
common, visionipc, gpucommon = abspath(common), abspath(visionipc), abspath(gpucommon) |
|
||||||
else: |
|
||||||
common = [_common, 'json'] |
|
||||||
visionipc = _visionipc |
|
||||||
gpucommon = [_gpucommon] + _gpu_libs |
|
||||||
|
|
||||||
Export('common', 'visionipc', 'gpucommon') |
|
||||||
|
|
||||||
SConscript(['opendbc/can/SConscript']) |
|
||||||
|
|
||||||
SConscript(['common/SConscript']) |
|
||||||
SConscript(['common/kalman/SConscript']) |
|
||||||
SConscript(['phonelibs/SConscript']) |
|
||||||
|
|
||||||
SConscript(['selfdrive/modeld/SConscript']) |
|
||||||
SConscript(['selfdrive/camerad/SConscript']) |
|
||||||
SConscript(['selfdrive/controls/lib/cluster/SConscript']) |
|
||||||
SConscript(['selfdrive/controls/lib/lateral_mpc/SConscript']) |
|
||||||
SConscript(['selfdrive/controls/lib/longitudinal_mpc/SConscript']) |
|
||||||
|
|
||||||
SConscript(['selfdrive/boardd/SConscript']) |
|
||||||
SConscript(['selfdrive/proclogd/SConscript']) |
|
||||||
|
|
||||||
SConscript(['selfdrive/ui/SConscript']) |
|
||||||
SConscript(['selfdrive/loggerd/SConscript']) |
|
||||||
|
|
||||||
if arch == "aarch64": |
|
||||||
SConscript(['selfdrive/logcatd/SConscript']) |
|
||||||
SConscript(['selfdrive/sensord/SConscript']) |
|
||||||
SConscript(['selfdrive/clocksd/SConscript']) |
|
||||||
|
|
||||||
SConscript(['selfdrive/locationd/SConscript']) |
|
||||||
|
|
||||||
# TODO: finish cereal, dbcbuilder, MPC |
|
@ -1,3 +0,0 @@ |
|||||||
version https://git-lfs.github.com/spec/v1 |
|
||||||
oid sha256:309b46b7c38f10da52b18b0340eb3c57b633558a9a27c3ca4116474969ebb456 |
|
||||||
size 84675 |
|
@ -1,3 +0,0 @@ |
|||||||
version https://git-lfs.github.com/spec/v1 |
|
||||||
oid sha256:43243741214262f5229cc5a05fc220000987c9af2e66243ac465ffa405ce043a |
|
||||||
size 2850138 |
|
@ -1,3 +0,0 @@ |
|||||||
version https://git-lfs.github.com/spec/v1 |
|
||||||
oid sha256:7e5868c567d76e080768f4fda323d42f4dd889bb6002f8f84864f5a2e51069b5 |
|
||||||
size 16726995 |
|
@ -1 +0,0 @@ |
|||||||
.sconsign.dblite |
|
@ -1,14 +0,0 @@ |
|||||||
gen |
|
||||||
node_modules |
|
||||||
package-lock.json |
|
||||||
*.pyc |
|
||||||
__pycache__ |
|
||||||
.*.swp |
|
||||||
.*.swo |
|
||||||
libcereal*.a |
|
||||||
libmessaging.* |
|
||||||
libmessaging_shared.* |
|
||||||
services.h |
|
||||||
.sconsign.dblite |
|
||||||
libcereal_shared.* |
|
||||||
|
|
@ -1,19 +0,0 @@ |
|||||||
from ubuntu:16.04 |
|
||||||
|
|
||||||
RUN apt-get update && apt-get install -y libzmq3-dev clang wget git autoconf libtool curl make build-essential libssl-dev zlib1g-dev libbz2-dev libreadline-dev libsqlite3-dev llvm libncurses5-dev libncursesw5-dev xz-utils tk-dev libffi-dev liblzma-dev python-openssl |
|
||||||
|
|
||||||
RUN curl -L https://github.com/pyenv/pyenv-installer/raw/master/bin/pyenv-installer | bash |
|
||||||
ENV PATH="/root/.pyenv/bin:/root/.pyenv/shims:${PATH}" |
|
||||||
RUN pyenv install 3.7.3 |
|
||||||
RUN pyenv global 3.7.3 |
|
||||||
RUN pyenv rehash |
|
||||||
RUN pip3 install pyyaml==5.1.2 Cython==0.29.14 scons==3.1.1 pycapnp==0.6.4 |
|
||||||
|
|
||||||
WORKDIR /project/cereal |
|
||||||
COPY install_capnp.sh . |
|
||||||
RUN ./install_capnp.sh |
|
||||||
|
|
||||||
ENV PYTHONPATH=/project |
|
||||||
|
|
||||||
COPY . . |
|
||||||
RUN scons -c && scons -j$(nproc) |
|
@ -1,42 +0,0 @@ |
|||||||
What is cereal? |
|
||||||
---- |
|
||||||
|
|
||||||
cereal is both a messaging spec for robotics systems as well as generic high performance IPC pub sub messaging with a single publisher and multiple subscribers. |
|
||||||
|
|
||||||
Imagine this use case: |
|
||||||
* A sensor process reads gyro measurements directly from an IMU and publishes a sensorEvents packet |
|
||||||
* A calibration process subscribes to the sensorEvents packet to use the IMU |
|
||||||
* A localization process subscribes to the sensorEvents packet to use the IMU also |
|
||||||
|
|
||||||
|
|
||||||
Messaging Spec |
|
||||||
---- |
|
||||||
|
|
||||||
You'll find the message types in [log.capnp](log.capnp). It uses [Cap'n proto](https://capnproto.org/capnp-tool.html) and defines one struct called Event. |
|
||||||
|
|
||||||
All Events have a logMonoTime and a valid. Then a big union defines the packet type. |
|
||||||
|
|
||||||
|
|
||||||
Pub Sub Backends |
|
||||||
---- |
|
||||||
|
|
||||||
cereal supports two backends, one based on [zmq](https://zeromq.org/), the other called msgq, a custom pub sub based on shared memory that doesn't require the bytes to pass through the kernel. |
|
||||||
|
|
||||||
Example |
|
||||||
--- |
|
||||||
```python |
|
||||||
import cereal.messaging as messaging |
|
||||||
|
|
||||||
# in subscriber |
|
||||||
sm = messaging.SubMaster(['sensorEvents']) |
|
||||||
while 1: |
|
||||||
sm.update() |
|
||||||
print(sm['sensorEvents']) |
|
||||||
|
|
||||||
# in publisher |
|
||||||
pm = messaging.PubMaster(['sensorEvents']) |
|
||||||
dat = messaging.new_message() |
|
||||||
dat.init('sensorEvents', 1) |
|
||||||
dat.sensorEvents[0] = {"gyro": {"v": [0.1, -0.1, 0.1]}} |
|
||||||
pm.send('sensorEvents', dat) |
|
||||||
``` |
|
@ -1,68 +0,0 @@ |
|||||||
Import('env', 'arch', 'zmq') |
|
||||||
|
|
||||||
gen_dir = Dir('gen') |
|
||||||
messaging_dir = Dir('messaging') |
|
||||||
|
|
||||||
# TODO: remove src-prefix and cereal from command string. can we set working directory? |
|
||||||
env.Command(["gen/c/include/c++.capnp.h", "gen/c/include/java.capnp.h"], [], "mkdir -p " + gen_dir.path + "/c/include && touch $TARGETS") |
|
||||||
env.Command( |
|
||||||
['gen/c/car.capnp.c', 'gen/c/log.capnp.c', 'gen/c/car.capnp.h', 'gen/c/log.capnp.h'], |
|
||||||
['car.capnp', 'log.capnp'], |
|
||||||
'capnpc $SOURCES --src-prefix=cereal -o c:' + gen_dir.path + '/c/') |
|
||||||
env.Command( |
|
||||||
['gen/cpp/car.capnp.c++', 'gen/cpp/log.capnp.c++', 'gen/cpp/car.capnp.h', 'gen/cpp/log.capnp.h'], |
|
||||||
['car.capnp', 'log.capnp'], |
|
||||||
'capnpc $SOURCES --src-prefix=cereal -o c++:' + gen_dir.path + '/cpp/') |
|
||||||
import shutil |
|
||||||
if shutil.which('capnpc-java'): |
|
||||||
env.Command( |
|
||||||
['gen/java/Car.java', 'gen/java/Log.java'], |
|
||||||
['car.capnp', 'log.capnp'], |
|
||||||
'capnpc $SOURCES --src-prefix=cereal -o java:' + gen_dir.path + '/java/') |
|
||||||
|
|
||||||
# TODO: remove non shared cereal and messaging |
|
||||||
cereal_objects = env.SharedObject([ |
|
||||||
'gen/c/car.capnp.c', |
|
||||||
'gen/c/log.capnp.c', |
|
||||||
'gen/cpp/car.capnp.c++', |
|
||||||
'gen/cpp/log.capnp.c++', |
|
||||||
]) |
|
||||||
|
|
||||||
env.Library('cereal', cereal_objects) |
|
||||||
env.SharedLibrary('cereal_shared', cereal_objects, LIBS=["capnp_c"]) |
|
||||||
|
|
||||||
cereal_dir = Dir('.') |
|
||||||
services_h = env.Command( |
|
||||||
['services.h'], |
|
||||||
['service_list.yaml', 'services.py'], |
|
||||||
'python3 ' + cereal_dir.path + '/services.py > $TARGET') |
|
||||||
|
|
||||||
messaging_objects = env.SharedObject([ |
|
||||||
'messaging/messaging.cc', |
|
||||||
'messaging/impl_zmq.cc', |
|
||||||
'messaging/impl_msgq.cc', |
|
||||||
'messaging/msgq.cc', |
|
||||||
]) |
|
||||||
|
|
||||||
messaging_lib = env.Library('messaging', messaging_objects) |
|
||||||
Depends('messaging/impl_zmq.cc', services_h) |
|
||||||
|
|
||||||
# note, this rebuilds the deps shared, zmq is statically linked to make APK happy |
|
||||||
# TODO: get APK to load system zmq to remove the static link |
|
||||||
shared_lib_shared_lib = [zmq, 'm', 'stdc++'] + ["gnustl_shared"] if arch == "aarch64" else [zmq] |
|
||||||
env.SharedLibrary('messaging_shared', messaging_objects, LIBS=shared_lib_shared_lib) |
|
||||||
|
|
||||||
env.Program('messaging/bridge', ['messaging/bridge.cc'], LIBS=[messaging_lib, 'zmq']) |
|
||||||
Depends('messaging/bridge.cc', services_h) |
|
||||||
|
|
||||||
# different target? |
|
||||||
#env.Program('messaging/demo', ['messaging/demo.cc'], LIBS=[messaging_lib, 'zmq']) |
|
||||||
|
|
||||||
|
|
||||||
env.Command(['messaging/messaging_pyx.so'], |
|
||||||
[messaging_lib, 'messaging/messaging_pyx_setup.py', 'messaging/messaging_pyx.pyx', 'messaging/messaging.pxd'], |
|
||||||
"cd " + messaging_dir.path + " && python3 messaging_pyx_setup.py build_ext --inplace") |
|
||||||
|
|
||||||
|
|
||||||
if GetOption('test'): |
|
||||||
env.Program('messaging/test_runner', ['messaging/test_runner.cc', 'messaging/msgq_tests.cc'], LIBS=[messaging_lib]) |
|
@ -1,49 +0,0 @@ |
|||||||
import os |
|
||||||
import subprocess |
|
||||||
|
|
||||||
zmq = 'zmq' |
|
||||||
arch = subprocess.check_output(["uname", "-m"], encoding='utf8').rstrip() |
|
||||||
|
|
||||||
cereal_dir = Dir('.') |
|
||||||
|
|
||||||
cpppath = [ |
|
||||||
cereal_dir, |
|
||||||
'/usr/lib/include', |
|
||||||
] |
|
||||||
|
|
||||||
AddOption('--test', |
|
||||||
action='store_true', |
|
||||||
help='build test files') |
|
||||||
|
|
||||||
AddOption('--asan', |
|
||||||
action='store_true', |
|
||||||
help='turn on ASAN') |
|
||||||
|
|
||||||
ccflags_asan = ["-fsanitize=address", "-fno-omit-frame-pointer"] if GetOption('asan') else [] |
|
||||||
ldflags_asan = ["-fsanitize=address"] if GetOption('asan') else [] |
|
||||||
|
|
||||||
env = Environment( |
|
||||||
ENV=os.environ, |
|
||||||
CC='clang', |
|
||||||
CXX='clang++', |
|
||||||
CCFLAGS=[ |
|
||||||
"-g", |
|
||||||
"-fPIC", |
|
||||||
"-O2", |
|
||||||
"-Werror=implicit-function-declaration", |
|
||||||
"-Werror=incompatible-pointer-types", |
|
||||||
"-Werror=int-conversion", |
|
||||||
"-Werror=return-type", |
|
||||||
"-Werror=format-extra-args", |
|
||||||
] + ccflags_asan, |
|
||||||
LDFLAGS=ldflags_asan, |
|
||||||
LINKFLAGS=ldflags_asan, |
|
||||||
|
|
||||||
CFLAGS="-std=gnu11", |
|
||||||
CXXFLAGS="-std=c++14", |
|
||||||
CPPPATH=cpppath, |
|
||||||
) |
|
||||||
|
|
||||||
|
|
||||||
Export('env', 'zmq', 'arch') |
|
||||||
SConscript(['SConscript']) |
|
@ -1,8 +0,0 @@ |
|||||||
import os |
|
||||||
import capnp |
|
||||||
|
|
||||||
CEREAL_PATH = os.path.dirname(os.path.abspath(__file__)) |
|
||||||
capnp.remove_import_hook() |
|
||||||
|
|
||||||
log = capnp.load(os.path.join(CEREAL_PATH, "log.capnp")) |
|
||||||
car = capnp.load(os.path.join(CEREAL_PATH, "car.capnp")) |
|
@ -1,14 +0,0 @@ |
|||||||
pr: none |
|
||||||
|
|
||||||
pool: |
|
||||||
vmImage: 'ubuntu-16.04' |
|
||||||
|
|
||||||
steps: |
|
||||||
- script: | |
|
||||||
set -e |
|
||||||
docker build -t cereal . |
|
||||||
docker run cereal bash -c "scons --test --asan -j$(nproc) && messaging/test_runner" |
|
||||||
docker run cereal bash -c "ZMQ=1 python -m unittest discover ." |
|
||||||
docker run cereal bash -c "MSGQ=1 python -m unittest discover ." |
|
||||||
|
|
||||||
displayName: 'Run Tests' |
|
@ -1,468 +0,0 @@ |
|||||||
using Cxx = import "./include/c++.capnp"; |
|
||||||
$Cxx.namespace("cereal"); |
|
||||||
|
|
||||||
using Java = import "./include/java.capnp"; |
|
||||||
$Java.package("ai.comma.openpilot.cereal"); |
|
||||||
$Java.outerClassname("Car"); |
|
||||||
|
|
||||||
@0x8e2af1e708af8b8d; |
|
||||||
|
|
||||||
# ******* events causing controls state machine transition ******* |
|
||||||
|
|
||||||
struct CarEvent @0x9b1657f34caf3ad3 { |
|
||||||
name @0 :EventName; |
|
||||||
enable @1 :Bool; |
|
||||||
noEntry @2 :Bool; |
|
||||||
warning @3 :Bool; |
|
||||||
userDisable @4 :Bool; |
|
||||||
softDisable @5 :Bool; |
|
||||||
immediateDisable @6 :Bool; |
|
||||||
preEnable @7 :Bool; |
|
||||||
permanent @8 :Bool; |
|
||||||
|
|
||||||
enum EventName @0xbaa8c5d505f727de { |
|
||||||
# TODO: copy from error list |
|
||||||
canError @0; |
|
||||||
steerUnavailable @1; |
|
||||||
brakeUnavailable @2; |
|
||||||
gasUnavailable @3; |
|
||||||
wrongGear @4; |
|
||||||
doorOpen @5; |
|
||||||
seatbeltNotLatched @6; |
|
||||||
espDisabled @7; |
|
||||||
wrongCarMode @8; |
|
||||||
steerTempUnavailable @9; |
|
||||||
reverseGear @10; |
|
||||||
buttonCancel @11; |
|
||||||
buttonEnable @12; |
|
||||||
pedalPressed @13; |
|
||||||
cruiseDisabled @14; |
|
||||||
radarCanError @15; |
|
||||||
dataNeeded @16; |
|
||||||
speedTooLow @17; |
|
||||||
outOfSpace @18; |
|
||||||
overheat @19; |
|
||||||
calibrationIncomplete @20; |
|
||||||
calibrationInvalid @21; |
|
||||||
controlsMismatch @22; |
|
||||||
pcmEnable @23; |
|
||||||
pcmDisable @24; |
|
||||||
noTarget @25; |
|
||||||
radarFault @26; |
|
||||||
modelCommIssueDEPRECATED @27; |
|
||||||
brakeHold @28; |
|
||||||
parkBrake @29; |
|
||||||
manualRestart @30; |
|
||||||
lowSpeedLockout @31; |
|
||||||
plannerError @32; |
|
||||||
ipasOverride @33; |
|
||||||
debugAlert @34; |
|
||||||
steerTempUnavailableMute @35; |
|
||||||
resumeRequired @36; |
|
||||||
preDriverDistracted @37; |
|
||||||
promptDriverDistracted @38; |
|
||||||
driverDistracted @39; |
|
||||||
geofence @40; |
|
||||||
driverMonitorOn @41; |
|
||||||
driverMonitorOff @42; |
|
||||||
preDriverUnresponsive @43; |
|
||||||
promptDriverUnresponsive @44; |
|
||||||
driverUnresponsive @45; |
|
||||||
belowSteerSpeed @46; |
|
||||||
calibrationProgress @47; |
|
||||||
lowBattery @48; |
|
||||||
invalidGiraffeHonda @49; |
|
||||||
vehicleModelInvalid @50; |
|
||||||
controlsFailed @51; |
|
||||||
sensorDataInvalid @52; |
|
||||||
commIssue @53; |
|
||||||
tooDistracted @54; |
|
||||||
posenetInvalid @55; |
|
||||||
soundsUnavailable @56; |
|
||||||
preLaneChangeLeft @57; |
|
||||||
preLaneChangeRight @58; |
|
||||||
laneChange @59; |
|
||||||
invalidGiraffeToyota @60; |
|
||||||
internetConnectivityNeeded @61; |
|
||||||
communityFeatureDisallowed @62; |
|
||||||
lowMemory @63; |
|
||||||
stockAeb @64; |
|
||||||
ldw @65; |
|
||||||
carUnrecognized @66; |
|
||||||
radarCommIssue @67; |
|
||||||
} |
|
||||||
} |
|
||||||
|
|
||||||
# ******* main car state @ 100hz ******* |
|
||||||
# all speeds in m/s |
|
||||||
|
|
||||||
struct CarState { |
|
||||||
errorsDEPRECATED @0 :List(CarEvent.EventName); |
|
||||||
events @13 :List(CarEvent); |
|
||||||
|
|
||||||
# car speed |
|
||||||
vEgo @1 :Float32; # best estimate of speed |
|
||||||
aEgo @16 :Float32; # best estimate of acceleration |
|
||||||
vEgoRaw @17 :Float32; # unfiltered speed from CAN sensors |
|
||||||
yawRate @22 :Float32; # best estimate of yaw rate |
|
||||||
standstill @18 :Bool; |
|
||||||
wheelSpeeds @2 :WheelSpeeds; |
|
||||||
|
|
||||||
# gas pedal, 0.0-1.0 |
|
||||||
gas @3 :Float32; # this is user + computer |
|
||||||
gasPressed @4 :Bool; # this is user pedal only |
|
||||||
|
|
||||||
# brake pedal, 0.0-1.0 |
|
||||||
brake @5 :Float32; # this is user pedal only |
|
||||||
brakePressed @6 :Bool; # this is user pedal only |
|
||||||
brakeLights @19 :Bool; |
|
||||||
|
|
||||||
# steering wheel |
|
||||||
steeringAngle @7 :Float32; # deg |
|
||||||
steeringRate @15 :Float32; # deg/s |
|
||||||
steeringTorque @8 :Float32; # TODO: standardize units |
|
||||||
steeringTorqueEps @27 :Float32; # TODO: standardize units |
|
||||||
steeringPressed @9 :Bool; # if the user is using the steering wheel |
|
||||||
steeringRateLimited @29 :Bool; # if the torque is limited by the rate limiter |
|
||||||
stockAeb @30 :Bool; |
|
||||||
stockFcw @31 :Bool; |
|
||||||
|
|
||||||
# cruise state |
|
||||||
cruiseState @10 :CruiseState; |
|
||||||
|
|
||||||
# gear |
|
||||||
gearShifter @14 :GearShifter; |
|
||||||
|
|
||||||
# button presses |
|
||||||
buttonEvents @11 :List(ButtonEvent); |
|
||||||
leftBlinker @20 :Bool; |
|
||||||
rightBlinker @21 :Bool; |
|
||||||
genericToggle @23 :Bool; |
|
||||||
|
|
||||||
# lock info |
|
||||||
doorOpen @24 :Bool; |
|
||||||
seatbeltUnlatched @25 :Bool; |
|
||||||
canValid @26 :Bool; |
|
||||||
|
|
||||||
# clutch (manual transmission only) |
|
||||||
clutchPressed @28 :Bool; |
|
||||||
|
|
||||||
# which packets this state came from |
|
||||||
canMonoTimes @12: List(UInt64); |
|
||||||
|
|
||||||
struct WheelSpeeds { |
|
||||||
# optional wheel speeds |
|
||||||
fl @0 :Float32; |
|
||||||
fr @1 :Float32; |
|
||||||
rl @2 :Float32; |
|
||||||
rr @3 :Float32; |
|
||||||
} |
|
||||||
|
|
||||||
struct CruiseState { |
|
||||||
enabled @0 :Bool; |
|
||||||
speed @1 :Float32; |
|
||||||
available @2 :Bool; |
|
||||||
speedOffset @3 :Float32; |
|
||||||
standstill @4 :Bool; |
|
||||||
} |
|
||||||
|
|
||||||
enum GearShifter { |
|
||||||
unknown @0; |
|
||||||
park @1; |
|
||||||
drive @2; |
|
||||||
neutral @3; |
|
||||||
reverse @4; |
|
||||||
sport @5; |
|
||||||
low @6; |
|
||||||
brake @7; |
|
||||||
eco @8; |
|
||||||
manumatic @9; |
|
||||||
} |
|
||||||
|
|
||||||
# send on change |
|
||||||
struct ButtonEvent { |
|
||||||
pressed @0 :Bool; |
|
||||||
type @1 :Type; |
|
||||||
|
|
||||||
enum Type { |
|
||||||
unknown @0; |
|
||||||
leftBlinker @1; |
|
||||||
rightBlinker @2; |
|
||||||
accelCruise @3; |
|
||||||
decelCruise @4; |
|
||||||
cancel @5; |
|
||||||
altButton1 @6; |
|
||||||
altButton2 @7; |
|
||||||
altButton3 @8; |
|
||||||
setCruise @9; |
|
||||||
resumeCruise @10; |
|
||||||
gapAdjustCruise @11; |
|
||||||
} |
|
||||||
} |
|
||||||
} |
|
||||||
|
|
||||||
# ******* radar state @ 20hz ******* |
|
||||||
|
|
||||||
struct RadarData @0x888ad6581cf0aacb { |
|
||||||
errors @0 :List(Error); |
|
||||||
points @1 :List(RadarPoint); |
|
||||||
|
|
||||||
# which packets this state came from |
|
||||||
canMonoTimes @2 :List(UInt64); |
|
||||||
|
|
||||||
enum Error { |
|
||||||
canError @0; |
|
||||||
fault @1; |
|
||||||
wrongConfig @2; |
|
||||||
} |
|
||||||
|
|
||||||
# similar to LiveTracks |
|
||||||
# is one timestamp valid for all? I think so |
|
||||||
struct RadarPoint { |
|
||||||
trackId @0 :UInt64; # no trackId reuse |
|
||||||
|
|
||||||
# these 3 are the minimum required |
|
||||||
dRel @1 :Float32; # m from the front bumper of the car |
|
||||||
yRel @2 :Float32; # m |
|
||||||
vRel @3 :Float32; # m/s |
|
||||||
|
|
||||||
# these are optional and valid if they are not NaN |
|
||||||
aRel @4 :Float32; # m/s^2 |
|
||||||
yvRel @5 :Float32; # m/s |
|
||||||
|
|
||||||
# some radars flag measurements VS estimates |
|
||||||
measured @6 :Bool; |
|
||||||
} |
|
||||||
} |
|
||||||
|
|
||||||
# ******* car controls @ 100hz ******* |
|
||||||
|
|
||||||
struct CarControl { |
|
||||||
# must be true for any actuator commands to work |
|
||||||
enabled @0 :Bool; |
|
||||||
active @7 :Bool; |
|
||||||
|
|
||||||
gasDEPRECATED @1 :Float32; |
|
||||||
brakeDEPRECATED @2 :Float32; |
|
||||||
steeringTorqueDEPRECATED @3 :Float32; |
|
||||||
|
|
||||||
actuators @6 :Actuators; |
|
||||||
|
|
||||||
cruiseControl @4 :CruiseControl; |
|
||||||
hudControl @5 :HUDControl; |
|
||||||
|
|
||||||
struct Actuators { |
|
||||||
# range from 0.0 - 1.0 |
|
||||||
gas @0: Float32; |
|
||||||
brake @1: Float32; |
|
||||||
# range from -1.0 - 1.0 |
|
||||||
steer @2: Float32; |
|
||||||
steerAngle @3: Float32; |
|
||||||
} |
|
||||||
|
|
||||||
struct CruiseControl { |
|
||||||
cancel @0: Bool; |
|
||||||
override @1: Bool; |
|
||||||
speedOverride @2: Float32; |
|
||||||
accelOverride @3: Float32; |
|
||||||
} |
|
||||||
|
|
||||||
struct HUDControl { |
|
||||||
speedVisible @0: Bool; |
|
||||||
setSpeed @1: Float32; |
|
||||||
lanesVisible @2: Bool; |
|
||||||
leadVisible @3: Bool; |
|
||||||
visualAlert @4: VisualAlert; |
|
||||||
audibleAlert @5: AudibleAlert; |
|
||||||
rightLaneVisible @6: Bool; |
|
||||||
leftLaneVisible @7: Bool; |
|
||||||
rightLaneDepart @8: Bool; |
|
||||||
leftLaneDepart @9: Bool; |
|
||||||
|
|
||||||
enum VisualAlert { |
|
||||||
# these are the choices from the Honda |
|
||||||
# map as good as you can for your car |
|
||||||
none @0; |
|
||||||
fcw @1; |
|
||||||
steerRequired @2; |
|
||||||
brakePressed @3; |
|
||||||
wrongGear @4; |
|
||||||
seatbeltUnbuckled @5; |
|
||||||
speedTooHigh @6; |
|
||||||
ldw @7; |
|
||||||
} |
|
||||||
|
|
||||||
enum AudibleAlert { |
|
||||||
# these are the choices from the Honda |
|
||||||
# map as good as you can for your car |
|
||||||
none @0; |
|
||||||
chimeEngage @1; |
|
||||||
chimeDisengage @2; |
|
||||||
chimeError @3; |
|
||||||
chimeWarning1 @4; |
|
||||||
chimeWarning2 @5; |
|
||||||
chimeWarningRepeat @6; |
|
||||||
chimePrompt @7; |
|
||||||
} |
|
||||||
} |
|
||||||
} |
|
||||||
|
|
||||||
# ****** car param ****** |
|
||||||
|
|
||||||
struct CarParams { |
|
||||||
carName @0 :Text; |
|
||||||
carFingerprint @1 :Text; |
|
||||||
|
|
||||||
enableGasInterceptor @2 :Bool; |
|
||||||
enableCruise @3 :Bool; |
|
||||||
enableCamera @4 :Bool; |
|
||||||
enableDsu @5 :Bool; # driving support unit |
|
||||||
enableApgs @6 :Bool; # advanced parking guidance system |
|
||||||
|
|
||||||
minEnableSpeed @7 :Float32; |
|
||||||
minSteerSpeed @8 :Float32; |
|
||||||
safetyModel @9 :SafetyModel; |
|
||||||
safetyModelPassive @42 :SafetyModel = silent; |
|
||||||
safetyParam @10 :Int16; |
|
||||||
|
|
||||||
steerMaxBP @11 :List(Float32); |
|
||||||
steerMaxV @12 :List(Float32); |
|
||||||
gasMaxBP @13 :List(Float32); |
|
||||||
gasMaxV @14 :List(Float32); |
|
||||||
brakeMaxBP @15 :List(Float32); |
|
||||||
brakeMaxV @16 :List(Float32); |
|
||||||
|
|
||||||
# things about the car in the manual |
|
||||||
mass @17 :Float32; # [kg] running weight |
|
||||||
wheelbase @18 :Float32; # [m] distance from rear to front axle |
|
||||||
centerToFront @19 :Float32; # [m] GC distance to front axle |
|
||||||
steerRatio @20 :Float32; # [] ratio between front wheels and steering wheel angles |
|
||||||
steerRatioRear @21 :Float32; # [] rear steering ratio wrt front steering (usually 0) |
|
||||||
|
|
||||||
# things we can derive |
|
||||||
rotationalInertia @22 :Float32; # [kg*m2] body rotational inertia |
|
||||||
tireStiffnessFront @23 :Float32; # [N/rad] front tire coeff of stiff |
|
||||||
tireStiffnessRear @24 :Float32; # [N/rad] rear tire coeff of stiff |
|
||||||
|
|
||||||
longitudinalTuning @25 :LongitudinalPIDTuning; |
|
||||||
lateralTuning :union { |
|
||||||
pid @26 :LateralPIDTuning; |
|
||||||
indi @27 :LateralINDITuning; |
|
||||||
lqr @40 :LateralLQRTuning; |
|
||||||
} |
|
||||||
|
|
||||||
steerLimitAlert @28 :Bool; |
|
||||||
steerLimitTimer @47 :Float32; # time before steerLimitAlert is issued |
|
||||||
|
|
||||||
vEgoStopping @29 :Float32; # Speed at which the car goes into stopping state |
|
||||||
directAccelControl @30 :Bool; # Does the car have direct accel control or just gas/brake |
|
||||||
stoppingControl @31 :Bool; # Does the car allows full control even at lows speeds when stopping |
|
||||||
startAccel @32 :Float32; # Required acceleraton to overcome creep braking |
|
||||||
steerRateCost @33 :Float32; # Lateral MPC cost on steering rate |
|
||||||
steerControlType @34 :SteerControlType; |
|
||||||
radarOffCan @35 :Bool; # True when radar objects aren't visible on CAN |
|
||||||
|
|
||||||
steerActuatorDelay @36 :Float32; # Steering wheel actuator delay in seconds |
|
||||||
openpilotLongitudinalControl @37 :Bool; # is openpilot doing the longitudinal control? |
|
||||||
carVin @38 :Text; # VIN number queried during fingerprinting |
|
||||||
isPandaBlack @39: Bool; |
|
||||||
dashcamOnly @41: Bool; |
|
||||||
transmissionType @43 :TransmissionType; |
|
||||||
carFw @44 :List(CarFw); |
|
||||||
radarTimeStep @45: Float32 = 0.05; # time delta between radar updates, 20Hz is very standard |
|
||||||
communityFeature @46: Bool; # true if a community maintained feature is detected |
|
||||||
|
|
||||||
struct LateralPIDTuning { |
|
||||||
kpBP @0 :List(Float32); |
|
||||||
kpV @1 :List(Float32); |
|
||||||
kiBP @2 :List(Float32); |
|
||||||
kiV @3 :List(Float32); |
|
||||||
kf @4 :Float32; |
|
||||||
} |
|
||||||
|
|
||||||
struct LongitudinalPIDTuning { |
|
||||||
kpBP @0 :List(Float32); |
|
||||||
kpV @1 :List(Float32); |
|
||||||
kiBP @2 :List(Float32); |
|
||||||
kiV @3 :List(Float32); |
|
||||||
deadzoneBP @4 :List(Float32); |
|
||||||
deadzoneV @5 :List(Float32); |
|
||||||
} |
|
||||||
|
|
||||||
struct LateralINDITuning { |
|
||||||
outerLoopGain @0 :Float32; |
|
||||||
innerLoopGain @1 :Float32; |
|
||||||
timeConstant @2 :Float32; |
|
||||||
actuatorEffectiveness @3 :Float32; |
|
||||||
} |
|
||||||
|
|
||||||
struct LateralLQRTuning { |
|
||||||
scale @0 :Float32; |
|
||||||
ki @1 :Float32; |
|
||||||
dcGain @2 :Float32; |
|
||||||
|
|
||||||
# State space system |
|
||||||
a @3 :List(Float32); |
|
||||||
b @4 :List(Float32); |
|
||||||
c @5 :List(Float32); |
|
||||||
|
|
||||||
k @6 :List(Float32); # LQR gain |
|
||||||
l @7 :List(Float32); # Kalman gain |
|
||||||
} |
|
||||||
|
|
||||||
enum SafetyModel { |
|
||||||
silent @0; |
|
||||||
hondaNidec @1; |
|
||||||
toyota @2; |
|
||||||
elm327 @3; |
|
||||||
gm @4; |
|
||||||
hondaBoschGiraffe @5; |
|
||||||
ford @6; |
|
||||||
cadillac @7; |
|
||||||
hyundai @8; |
|
||||||
chrysler @9; |
|
||||||
tesla @10; |
|
||||||
subaru @11; |
|
||||||
gmPassive @12; |
|
||||||
mazda @13; |
|
||||||
nissan @14; |
|
||||||
volkswagen @15; |
|
||||||
toyotaIpas @16; |
|
||||||
allOutput @17; |
|
||||||
gmAscm @18; |
|
||||||
noOutput @19; # like silent but without silent CAN TXs |
|
||||||
hondaBoschHarness @20; |
|
||||||
volkswagenPq @21; |
|
||||||
} |
|
||||||
|
|
||||||
enum SteerControlType { |
|
||||||
torque @0; |
|
||||||
angle @1; |
|
||||||
} |
|
||||||
|
|
||||||
enum TransmissionType { |
|
||||||
unknown @0; |
|
||||||
automatic @1; |
|
||||||
manual @2; |
|
||||||
} |
|
||||||
|
|
||||||
struct CarFw { |
|
||||||
ecu @0 :Ecu; |
|
||||||
fwVersion @1 :Data; |
|
||||||
address @2: UInt32; |
|
||||||
subAddress @3: UInt8; |
|
||||||
} |
|
||||||
|
|
||||||
enum Ecu { |
|
||||||
eps @0; |
|
||||||
esp @1; |
|
||||||
fwdRadar @2; |
|
||||||
fwdCamera @3; |
|
||||||
engine @4; |
|
||||||
unknown @5; |
|
||||||
|
|
||||||
# Toyota only |
|
||||||
dsu @6; |
|
||||||
apgs @7; |
|
||||||
} |
|
||||||
} |
|
@ -1,26 +0,0 @@ |
|||||||
#!/bin/bash |
|
||||||
|
|
||||||
rm -r gen/ts |
|
||||||
rm -r gen/js |
|
||||||
|
|
||||||
mkdir gen/ts |
|
||||||
mkdir gen/js |
|
||||||
|
|
||||||
echo "Installing needed npm modules" |
|
||||||
npm i capnpc-ts capnp-ts |
|
||||||
|
|
||||||
capnpc -o node_modules/.bin/capnpc-ts:gen/ts log.capnp car.capnp |
|
||||||
capnpc -o node_modules/.bin/capnpc-ts:gen/ts car.capnp |
|
||||||
|
|
||||||
cat log.capnp | egrep '\([a-zA-Z]*\.[^\s]+\.[^s]+\)' | sed 's/^.*([a-zA-Z]*\.\([a-zA-Z.]*\)).*/\1/' | while read line |
|
||||||
do |
|
||||||
TOKEN=`echo $line | sed 's/\./_/g'` |
|
||||||
ROOT=`echo $line | sed 's/\..*$//g'` |
|
||||||
cat gen/ts/log.capnp.ts | grep '^import.*'${TOKEN} |
|
||||||
if [[ "$?" == "1" ]] |
|
||||||
then |
|
||||||
sed -i 's/^\(import {.*\)'${ROOT}'\(,*\) \(.*\)$/\1'${ROOT}', '${TOKEN}'\2 \3/' ./gen/ts/log.capnp.ts |
|
||||||
fi |
|
||||||
done |
|
||||||
|
|
||||||
tsc ./gen/ts/* --lib es2015 --outDir ./gen/js |
|
@ -1,26 +0,0 @@ |
|||||||
# Copyright (c) 2013-2014 Sandstorm Development Group, Inc. and contributors |
|
||||||
# Licensed under the MIT License: |
|
||||||
# |
|
||||||
# Permission is hereby granted, free of charge, to any person obtaining a copy |
|
||||||
# of this software and associated documentation files (the "Software"), to deal |
|
||||||
# in the Software without restriction, including without limitation the rights |
|
||||||
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell |
|
||||||
# copies of the Software, and to permit persons to whom the Software is |
|
||||||
# furnished to do so, subject to the following conditions: |
|
||||||
# |
|
||||||
# The above copyright notice and this permission notice shall be included in |
|
||||||
# all copies or substantial portions of the Software. |
|
||||||
# |
|
||||||
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR |
|
||||||
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, |
|
||||||
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE |
|
||||||
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER |
|
||||||
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, |
|
||||||
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN |
|
||||||
# THE SOFTWARE. |
|
||||||
|
|
||||||
@0xbdf87d7bb8304e81; |
|
||||||
$namespace("capnp::annotations"); |
|
||||||
|
|
||||||
annotation namespace(file): Text; |
|
||||||
annotation name(field, enumerant, struct, enum, interface, method, param, group, union): Text; |
|
@ -1,28 +0,0 @@ |
|||||||
# Copyright (c) 2013-2015 Sandstorm Development Group, Inc. and contributors |
|
||||||
# Licensed under the MIT License: |
|
||||||
# |
|
||||||
# Permission is hereby granted, free of charge, to any person obtaining a copy |
|
||||||
# of this software and associated documentation files (the "Software"), to deal |
|
||||||
# in the Software without restriction, including without limitation the rights |
|
||||||
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell |
|
||||||
# copies of the Software, and to permit persons to whom the Software is |
|
||||||
# furnished to do so, subject to the following conditions: |
|
||||||
# |
|
||||||
# The above copyright notice and this permission notice shall be included in |
|
||||||
# all copies or substantial portions of the Software. |
|
||||||
# |
|
||||||
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR |
|
||||||
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, |
|
||||||
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE |
|
||||||
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER |
|
||||||
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, |
|
||||||
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN |
|
||||||
# THE SOFTWARE. |
|
||||||
|
|
||||||
@0xc5f1af96651f70ea; |
|
||||||
|
|
||||||
annotation package @0x9ee4c8f803b3b596 (file) : Text; |
|
||||||
# Name of the package, such as "org.example.foo", in which the generated code will reside. |
|
||||||
|
|
||||||
annotation outerClassname @0x9b066bb4881f7cd3 (file) : Text; |
|
||||||
# Name of the outer class that will wrap the generated code. |
|
@ -1,40 +0,0 @@ |
|||||||
set -e |
|
||||||
echo "Installing capnp" |
|
||||||
|
|
||||||
cd /tmp |
|
||||||
VERSION=0.6.1 |
|
||||||
wget https://capnproto.org/capnproto-c++-${VERSION}.tar.gz |
|
||||||
tar xvf capnproto-c++-${VERSION}.tar.gz |
|
||||||
cd capnproto-c++-${VERSION} |
|
||||||
CXXFLAGS="-fPIC" ./configure |
|
||||||
|
|
||||||
make -j$(nproc) |
|
||||||
make install |
|
||||||
|
|
||||||
# manually build binaries statically |
|
||||||
g++ -std=gnu++11 -I./src -I./src -DKJ_HEADER_WARNINGS -DCAPNP_HEADER_WARNINGS -DCAPNP_INCLUDE_DIR=\"/usr/local/include\" -pthread -O2 -DNDEBUG -pthread -pthread -o .libs/capnp src/capnp/compiler/module-loader.o src/capnp/compiler/capnp.o ./.libs/libcapnpc.a ./.libs/libcapnp.a ./.libs/libkj.a -lpthread -pthread |
|
||||||
|
|
||||||
g++ -std=gnu++11 -I./src -I./src -DKJ_HEADER_WARNINGS -DCAPNP_HEADER_WARNINGS -DCAPNP_INCLUDE_DIR=\"/usr/local/include\" -pthread -O2 -DNDEBUG -pthread -pthread -o .libs/capnpc-c++ src/capnp/compiler/capnpc-c++.o ./.libs/libcapnp.a ./.libs/libkj.a -lpthread -pthread |
|
||||||
|
|
||||||
g++ -std=gnu++11 -I./src -I./src -DKJ_HEADER_WARNINGS -DCAPNP_HEADER_WARNINGS -DCAPNP_INCLUDE_DIR=\"/usr/local/include\" -pthread -O2 -DNDEBUG -pthread -pthread -o .libs/capnpc-capnp src/capnp/compiler/capnpc-capnp.o ./.libs/libcapnp.a ./.libs/libkj.a -lpthread -pthread |
|
||||||
|
|
||||||
cp .libs/capnp /usr/local/bin/ |
|
||||||
cp .libs/capnpc-c++ /usr/local/bin/ |
|
||||||
cp .libs/capnpc-capnp /usr/local/bin/ |
|
||||||
cp .libs/*.a /usr/local/lib |
|
||||||
|
|
||||||
cd /tmp |
|
||||||
echo "Installing c-capnp" |
|
||||||
git clone https://github.com/commaai/c-capnproto.git |
|
||||||
cd c-capnproto |
|
||||||
git submodule update --init --recursive |
|
||||||
autoreconf -f -i -s |
|
||||||
CXXFLAGS="-fPIC" ./configure |
|
||||||
make -j$(nproc) |
|
||||||
make install |
|
||||||
|
|
||||||
# manually build binaries statically |
|
||||||
gcc -fPIC -o .libs/capnpc-c compiler/capnpc-c.o compiler/schema.capnp.o compiler/str.o ./.libs/libcapnp_c.a |
|
||||||
|
|
||||||
cp .libs/capnpc-c /usr/local/bin/ |
|
||||||
cp .libs/*.a /usr/local/lib |
|
File diff suppressed because it is too large
Load Diff
@ -1,53 +0,0 @@ |
|||||||
using Cxx = import "./include/c++.capnp"; |
|
||||||
$Cxx.namespace("cereal"); |
|
||||||
|
|
||||||
using Java = import "./include/java.capnp"; |
|
||||||
$Java.package("ai.comma.openpilot.cereal"); |
|
||||||
$Java.outerClassname("Map"); |
|
||||||
|
|
||||||
@0xa086df597ef5d7a0; |
|
||||||
|
|
||||||
# Geometry |
|
||||||
struct Point { |
|
||||||
x @0: Float64; |
|
||||||
y @1: Float64; |
|
||||||
z @2: Float64; |
|
||||||
} |
|
||||||
|
|
||||||
struct PolyLine { |
|
||||||
points @0: List(Point); |
|
||||||
} |
|
||||||
|
|
||||||
# Map features |
|
||||||
struct Lane { |
|
||||||
id @0 :Text; |
|
||||||
|
|
||||||
leftBoundary @1 :LaneBoundary; |
|
||||||
rightBoundary @2 :LaneBoundary; |
|
||||||
|
|
||||||
leftAdjacentId @3 :Text; |
|
||||||
rightAdjacentId @4 :Text; |
|
||||||
|
|
||||||
inboundIds @5 :List(Text); |
|
||||||
outboundIds @6 :List(Text); |
|
||||||
|
|
||||||
struct LaneBoundary { |
|
||||||
polyLine @0 :PolyLine; |
|
||||||
startHeading @1 :Float32; # WRT north |
|
||||||
} |
|
||||||
} |
|
||||||
|
|
||||||
# Map tiles |
|
||||||
struct TileSummary { |
|
||||||
version @0 :Text; |
|
||||||
updatedAt @1 :UInt64; # Millis since epoch |
|
||||||
|
|
||||||
level @2 :UInt8; |
|
||||||
x @3 :UInt16; |
|
||||||
y @4 :UInt16; |
|
||||||
} |
|
||||||
|
|
||||||
struct MapTile { |
|
||||||
summary @0 :TileSummary; |
|
||||||
lanes @1 :List(Lane); |
|
||||||
} |
|
@ -1,10 +0,0 @@ |
|||||||
demo |
|
||||||
bridge |
|
||||||
test_runner |
|
||||||
*.o |
|
||||||
*.os |
|
||||||
*.d |
|
||||||
*.a |
|
||||||
*.so |
|
||||||
messaging_pyx.cpp |
|
||||||
build/ |
|
@ -1,221 +0,0 @@ |
|||||||
# must be build with scons |
|
||||||
from .messaging_pyx import Context, Poller, SubSocket, PubSocket # pylint: disable=no-name-in-module, import-error |
|
||||||
from .messaging_pyx import MultiplePublishersError, MessagingError # pylint: disable=no-name-in-module, import-error |
|
||||||
import capnp |
|
||||||
|
|
||||||
assert MultiplePublishersError |
|
||||||
assert MessagingError |
|
||||||
|
|
||||||
from cereal import log |
|
||||||
from cereal.services import service_list |
|
||||||
|
|
||||||
# sec_since_boot is faster, but allow to run standalone too |
|
||||||
try: |
|
||||||
from common.realtime import sec_since_boot |
|
||||||
except ImportError: |
|
||||||
import time |
|
||||||
sec_since_boot = time.time |
|
||||||
print("Warning, using python time.time() instead of faster sec_since_boot") |
|
||||||
|
|
||||||
context = Context() |
|
||||||
|
|
||||||
def new_message(): |
|
||||||
dat = log.Event.new_message() |
|
||||||
dat.logMonoTime = int(sec_since_boot() * 1e9) |
|
||||||
dat.valid = True |
|
||||||
return dat |
|
||||||
|
|
||||||
def pub_sock(endpoint): |
|
||||||
sock = PubSocket() |
|
||||||
sock.connect(context, endpoint) |
|
||||||
return sock |
|
||||||
|
|
||||||
def sub_sock(endpoint, poller=None, addr="127.0.0.1", conflate=False, timeout=None): |
|
||||||
sock = SubSocket() |
|
||||||
addr = addr.encode('utf8') |
|
||||||
sock.connect(context, endpoint, addr, conflate) |
|
||||||
|
|
||||||
if timeout is not None: |
|
||||||
sock.setTimeout(timeout) |
|
||||||
|
|
||||||
if poller is not None: |
|
||||||
poller.registerSocket(sock) |
|
||||||
return sock |
|
||||||
|
|
||||||
|
|
||||||
def drain_sock_raw(sock, wait_for_one=False): |
|
||||||
"""Receive all message currently available on the queue""" |
|
||||||
ret = [] |
|
||||||
while 1: |
|
||||||
if wait_for_one and len(ret) == 0: |
|
||||||
dat = sock.receive() |
|
||||||
else: |
|
||||||
dat = sock.receive(non_blocking=True) |
|
||||||
|
|
||||||
if dat is None: |
|
||||||
break |
|
||||||
|
|
||||||
ret.append(dat) |
|
||||||
|
|
||||||
return ret |
|
||||||
|
|
||||||
def drain_sock(sock, wait_for_one=False): |
|
||||||
"""Receive all message currently available on the queue""" |
|
||||||
ret = [] |
|
||||||
while 1: |
|
||||||
if wait_for_one and len(ret) == 0: |
|
||||||
dat = sock.receive() |
|
||||||
else: |
|
||||||
dat = sock.receive(non_blocking=True) |
|
||||||
|
|
||||||
if dat is None: # Timeout hit |
|
||||||
break |
|
||||||
|
|
||||||
dat = log.Event.from_bytes(dat) |
|
||||||
ret.append(dat) |
|
||||||
|
|
||||||
return ret |
|
||||||
|
|
||||||
|
|
||||||
# TODO: print when we drop packets? |
|
||||||
def recv_sock(sock, wait=False): |
|
||||||
"""Same as drain sock, but only returns latest message. Consider using conflate instead.""" |
|
||||||
dat = None |
|
||||||
|
|
||||||
while 1: |
|
||||||
if wait and dat is None: |
|
||||||
rcv = sock.receive() |
|
||||||
else: |
|
||||||
rcv = sock.receive(non_blocking=True) |
|
||||||
|
|
||||||
if rcv is None: # Timeout hit |
|
||||||
break |
|
||||||
|
|
||||||
dat = rcv |
|
||||||
|
|
||||||
if dat is not None: |
|
||||||
dat = log.Event.from_bytes(dat) |
|
||||||
|
|
||||||
return dat |
|
||||||
|
|
||||||
def recv_one(sock): |
|
||||||
dat = sock.receive() |
|
||||||
if dat is not None: |
|
||||||
dat = log.Event.from_bytes(dat) |
|
||||||
return dat |
|
||||||
|
|
||||||
def recv_one_or_none(sock): |
|
||||||
dat = sock.receive(non_blocking=True) |
|
||||||
if dat is not None: |
|
||||||
dat = log.Event.from_bytes(dat) |
|
||||||
return dat |
|
||||||
|
|
||||||
def recv_one_retry(sock): |
|
||||||
"""Keep receiving until we get a message""" |
|
||||||
while True: |
|
||||||
dat = sock.receive() |
|
||||||
if dat is not None: |
|
||||||
return log.Event.from_bytes(dat) |
|
||||||
|
|
||||||
# TODO: This does not belong in messaging |
|
||||||
def get_one_can(logcan): |
|
||||||
while True: |
|
||||||
can = recv_one_retry(logcan) |
|
||||||
if len(can.can) > 0: |
|
||||||
return can |
|
||||||
|
|
||||||
class SubMaster(): |
|
||||||
def __init__(self, services, ignore_alive=None, addr="127.0.0.1"): |
|
||||||
self.poller = Poller() |
|
||||||
self.frame = -1 |
|
||||||
self.updated = {s : False for s in services} |
|
||||||
self.rcv_time = {s : 0. for s in services} |
|
||||||
self.rcv_frame = {s : 0 for s in services} |
|
||||||
self.alive = {s : False for s in services} |
|
||||||
self.sock = {} |
|
||||||
self.freq = {} |
|
||||||
self.data = {} |
|
||||||
self.logMonoTime = {} |
|
||||||
self.valid = {} |
|
||||||
|
|
||||||
if ignore_alive is not None: |
|
||||||
self.ignore_alive = ignore_alive |
|
||||||
else: |
|
||||||
self.ignore_alive = [] |
|
||||||
|
|
||||||
for s in services: |
|
||||||
if addr is not None: |
|
||||||
self.sock[s] = sub_sock(s, poller=self.poller, addr=addr, conflate=True) |
|
||||||
self.freq[s] = service_list[s].frequency |
|
||||||
|
|
||||||
data = new_message() |
|
||||||
try: |
|
||||||
data.init(s) |
|
||||||
except capnp.lib.capnp.KjException: |
|
||||||
# lists |
|
||||||
data.init(s, 0) |
|
||||||
|
|
||||||
self.data[s] = getattr(data, s) |
|
||||||
self.logMonoTime[s] = 0 |
|
||||||
self.valid[s] = data.valid |
|
||||||
|
|
||||||
def __getitem__(self, s): |
|
||||||
return self.data[s] |
|
||||||
|
|
||||||
def update(self, timeout=1000): |
|
||||||
msgs = [] |
|
||||||
for sock in self.poller.poll(timeout): |
|
||||||
msgs.append(recv_one_or_none(sock)) |
|
||||||
self.update_msgs(sec_since_boot(), msgs) |
|
||||||
|
|
||||||
def update_msgs(self, cur_time, msgs): |
|
||||||
# TODO: add optional input that specify the service to wait for |
|
||||||
self.frame += 1 |
|
||||||
self.updated = dict.fromkeys(self.updated, False) |
|
||||||
for msg in msgs: |
|
||||||
if msg is None: |
|
||||||
continue |
|
||||||
|
|
||||||
s = msg.which() |
|
||||||
self.updated[s] = True |
|
||||||
self.rcv_time[s] = cur_time |
|
||||||
self.rcv_frame[s] = self.frame |
|
||||||
self.data[s] = getattr(msg, s) |
|
||||||
self.logMonoTime[s] = msg.logMonoTime |
|
||||||
self.valid[s] = msg.valid |
|
||||||
|
|
||||||
for s in self.data: |
|
||||||
# arbitrary small number to avoid float comparison. If freq is 0, we can skip the check |
|
||||||
if self.freq[s] > 1e-5: |
|
||||||
# alive if delay is within 10x the expected frequency |
|
||||||
self.alive[s] = (cur_time - self.rcv_time[s]) < (10. / self.freq[s]) |
|
||||||
else: |
|
||||||
self.alive[s] = True |
|
||||||
|
|
||||||
def all_alive(self, service_list=None): |
|
||||||
if service_list is None: # check all |
|
||||||
service_list = self.alive.keys() |
|
||||||
return all(self.alive[s] for s in service_list if s not in self.ignore_alive) |
|
||||||
|
|
||||||
def all_valid(self, service_list=None): |
|
||||||
if service_list is None: # check all |
|
||||||
service_list = self.valid.keys() |
|
||||||
return all(self.valid[s] for s in service_list) |
|
||||||
|
|
||||||
def all_alive_and_valid(self, service_list=None): |
|
||||||
if service_list is None: # check all |
|
||||||
service_list = self.alive.keys() |
|
||||||
return self.all_alive(service_list=service_list) and self.all_valid(service_list=service_list) |
|
||||||
|
|
||||||
|
|
||||||
class PubMaster(): |
|
||||||
def __init__(self, services): |
|
||||||
self.sock = {} |
|
||||||
for s in services: |
|
||||||
self.sock[s] = pub_sock(s) |
|
||||||
|
|
||||||
def send(self, s, dat): |
|
||||||
# accept either bytes or capnp builder |
|
||||||
if not isinstance(dat, bytes): |
|
||||||
dat = dat.to_bytes() |
|
||||||
self.sock[s].send(dat) |
|
@ -1,64 +0,0 @@ |
|||||||
#include <iostream> |
|
||||||
#include <string> |
|
||||||
#include <cassert> |
|
||||||
#include <csignal> |
|
||||||
#include <map> |
|
||||||
|
|
||||||
typedef void (*sighandler_t)(int sig); |
|
||||||
|
|
||||||
#include "services.h" |
|
||||||
|
|
||||||
#include "impl_msgq.hpp" |
|
||||||
#include "impl_zmq.hpp" |
|
||||||
|
|
||||||
void sigpipe_handler(int sig) { |
|
||||||
assert(sig == SIGPIPE); |
|
||||||
std::cout << "SIGPIPE received" << std::endl; |
|
||||||
} |
|
||||||
|
|
||||||
static std::vector<std::string> get_services() { |
|
||||||
std::vector<std::string> name_list; |
|
||||||
|
|
||||||
for (const auto& it : services) { |
|
||||||
std::string name = it.name; |
|
||||||
if (name == "plusFrame" || name == "uiLayoutState") continue; |
|
||||||
name_list.push_back(name); |
|
||||||
} |
|
||||||
|
|
||||||
return name_list; |
|
||||||
} |
|
||||||
|
|
||||||
|
|
||||||
int main(void){ |
|
||||||
signal(SIGPIPE, (sighandler_t)sigpipe_handler); |
|
||||||
|
|
||||||
auto endpoints = get_services(); |
|
||||||
|
|
||||||
std::map<SubSocket*, PubSocket*> sub2pub; |
|
||||||
|
|
||||||
Context *zmq_context = new ZMQContext(); |
|
||||||
Context *msgq_context = new MSGQContext(); |
|
||||||
Poller *poller = new MSGQPoller(); |
|
||||||
|
|
||||||
for (auto endpoint: endpoints){ |
|
||||||
SubSocket * msgq_sock = new MSGQSubSocket(); |
|
||||||
msgq_sock->connect(msgq_context, endpoint, "127.0.0.1", false); |
|
||||||
poller->registerSocket(msgq_sock); |
|
||||||
|
|
||||||
PubSocket * zmq_sock = new ZMQPubSocket(); |
|
||||||
zmq_sock->connect(zmq_context, endpoint); |
|
||||||
|
|
||||||
sub2pub[msgq_sock] = zmq_sock; |
|
||||||
} |
|
||||||
|
|
||||||
|
|
||||||
while (true){ |
|
||||||
for (auto sub_sock : poller->poll(100)){ |
|
||||||
Message * msg = sub_sock->receive(); |
|
||||||
if (msg == NULL) continue; |
|
||||||
sub2pub[sub_sock]->sendMessage(msg); |
|
||||||
delete msg; |
|
||||||
} |
|
||||||
} |
|
||||||
return 0; |
|
||||||
} |
|
File diff suppressed because it is too large
Load Diff
@ -1,50 +0,0 @@ |
|||||||
#include <iostream> |
|
||||||
#include <cstddef> |
|
||||||
#include <chrono> |
|
||||||
#include <thread> |
|
||||||
#include <cassert> |
|
||||||
|
|
||||||
#include "messaging.hpp" |
|
||||||
#include "impl_zmq.hpp" |
|
||||||
|
|
||||||
#define MSGS 1e5 |
|
||||||
|
|
||||||
int main() { |
|
||||||
Context * c = Context::create(); |
|
||||||
SubSocket * sub_sock = SubSocket::create(c, "controlsState"); |
|
||||||
PubSocket * pub_sock = PubSocket::create(c, "controlsState"); |
|
||||||
|
|
||||||
char data[8]; |
|
||||||
|
|
||||||
Poller * poller = Poller::create({sub_sock}); |
|
||||||
|
|
||||||
auto start = std::chrono::steady_clock::now(); |
|
||||||
|
|
||||||
for (uint64_t i = 0; i < MSGS; i++){ |
|
||||||
*(uint64_t*)data = i; |
|
||||||
pub_sock->send(data, 8); |
|
||||||
|
|
||||||
auto r = poller->poll(100); |
|
||||||
|
|
||||||
for (auto p : r){ |
|
||||||
Message * m = p->receive(); |
|
||||||
uint64_t ii = *(uint64_t*)m->getData(); |
|
||||||
assert(i == ii); |
|
||||||
delete m; |
|
||||||
} |
|
||||||
} |
|
||||||
|
|
||||||
|
|
||||||
auto end = std::chrono::steady_clock::now(); |
|
||||||
double elapsed = std::chrono::duration_cast<std::chrono::nanoseconds>(end - start).count() / 1e9; |
|
||||||
double throughput = ((double) MSGS / (double) elapsed); |
|
||||||
std::cout << throughput << " msg/s" << std::endl; |
|
||||||
|
|
||||||
delete poller; |
|
||||||
delete sub_sock; |
|
||||||
delete pub_sock; |
|
||||||
delete c; |
|
||||||
|
|
||||||
|
|
||||||
return 0; |
|
||||||
} |
|
@ -1,30 +0,0 @@ |
|||||||
import time |
|
||||||
|
|
||||||
from messaging_pyx import Context, Poller, SubSocket, PubSocket # pylint: disable=no-name-in-module, import-error |
|
||||||
|
|
||||||
MSGS = 1e5 |
|
||||||
|
|
||||||
if __name__ == "__main__": |
|
||||||
c = Context() |
|
||||||
sub_sock = SubSocket() |
|
||||||
pub_sock = PubSocket() |
|
||||||
|
|
||||||
sub_sock.connect(c, "controlsState") |
|
||||||
pub_sock.connect(c, "controlsState") |
|
||||||
|
|
||||||
|
|
||||||
poller = Poller() |
|
||||||
poller.registerSocket(sub_sock) |
|
||||||
|
|
||||||
t = time.time() |
|
||||||
for i in range(int(MSGS)): |
|
||||||
bts = i.to_bytes(4, 'little') |
|
||||||
pub_sock.send(bts) |
|
||||||
|
|
||||||
for s in poller.poll(100): |
|
||||||
dat = s.receive() |
|
||||||
ii = int.from_bytes(dat, 'little') |
|
||||||
assert(i == ii) |
|
||||||
|
|
||||||
dt = time.time() - t |
|
||||||
print("%.1f msg/s" % (MSGS / dt)) |
|
@ -1,195 +0,0 @@ |
|||||||
#include <cassert> |
|
||||||
#include <cstring> |
|
||||||
#include <iostream> |
|
||||||
#include <cstdlib> |
|
||||||
#include <csignal> |
|
||||||
#include <cerrno> |
|
||||||
|
|
||||||
|
|
||||||
#include "impl_msgq.hpp" |
|
||||||
|
|
||||||
volatile sig_atomic_t msgq_do_exit = 0; |
|
||||||
|
|
||||||
void sig_handler(int signal) { |
|
||||||
assert(signal == SIGINT || signal == SIGTERM); |
|
||||||
msgq_do_exit = 1; |
|
||||||
} |
|
||||||
|
|
||||||
|
|
||||||
MSGQContext::MSGQContext() { |
|
||||||
} |
|
||||||
|
|
||||||
MSGQContext::~MSGQContext() { |
|
||||||
} |
|
||||||
|
|
||||||
void MSGQMessage::init(size_t sz) { |
|
||||||
size = sz; |
|
||||||
data = new char[size]; |
|
||||||
} |
|
||||||
|
|
||||||
void MSGQMessage::init(char * d, size_t sz) { |
|
||||||
size = sz; |
|
||||||
data = new char[size]; |
|
||||||
memcpy(data, d, size); |
|
||||||
} |
|
||||||
|
|
||||||
void MSGQMessage::takeOwnership(char * d, size_t sz) { |
|
||||||
size = sz; |
|
||||||
data = d; |
|
||||||
} |
|
||||||
|
|
||||||
void MSGQMessage::close() { |
|
||||||
if (size > 0){ |
|
||||||
delete[] data; |
|
||||||
} |
|
||||||
size = 0; |
|
||||||
} |
|
||||||
|
|
||||||
MSGQMessage::~MSGQMessage() { |
|
||||||
this->close(); |
|
||||||
} |
|
||||||
|
|
||||||
|
|
||||||
int MSGQSubSocket::connect(Context *context, std::string endpoint, std::string address, bool conflate){ |
|
||||||
assert(context); |
|
||||||
assert(address == "127.0.0.1"); |
|
||||||
|
|
||||||
q = new msgq_queue_t; |
|
||||||
int r = msgq_new_queue(q, endpoint.c_str(), DEFAULT_SEGMENT_SIZE); |
|
||||||
if (r != 0){ |
|
||||||
return r; |
|
||||||
} |
|
||||||
|
|
||||||
msgq_init_subscriber(q); |
|
||||||
|
|
||||||
if (conflate){ |
|
||||||
q->read_conflate = true; |
|
||||||
} |
|
||||||
|
|
||||||
timeout = -1; |
|
||||||
|
|
||||||
return 0; |
|
||||||
} |
|
||||||
|
|
||||||
|
|
||||||
Message * MSGQSubSocket::receive(bool non_blocking){ |
|
||||||
msgq_do_exit = 0; |
|
||||||
|
|
||||||
void (*prev_handler_sigint)(int); |
|
||||||
void (*prev_handler_sigterm)(int); |
|
||||||
if (!non_blocking){ |
|
||||||
prev_handler_sigint = std::signal(SIGINT, sig_handler); |
|
||||||
prev_handler_sigterm = std::signal(SIGTERM, sig_handler); |
|
||||||
} |
|
||||||
|
|
||||||
msgq_msg_t msg; |
|
||||||
|
|
||||||
MSGQMessage *r = NULL; |
|
||||||
|
|
||||||
int rc = msgq_msg_recv(&msg, q); |
|
||||||
|
|
||||||
// Hack to implement blocking read with a poller. Don't use this
|
|
||||||
while (!non_blocking && rc == 0 && msgq_do_exit == 0){ |
|
||||||
msgq_pollitem_t items[1]; |
|
||||||
items[0].q = q; |
|
||||||
|
|
||||||
int t = (timeout != -1) ? timeout : 100; |
|
||||||
|
|
||||||
int n = msgq_poll(items, 1, t); |
|
||||||
rc = msgq_msg_recv(&msg, q); |
|
||||||
|
|
||||||
// The poll indicated a message was ready, but the receive failed. Try again
|
|
||||||
if (n == 1 && rc == 0){ |
|
||||||
continue; |
|
||||||
} |
|
||||||
|
|
||||||
if (timeout != -1){ |
|
||||||
break; |
|
||||||
} |
|
||||||
} |
|
||||||
|
|
||||||
|
|
||||||
if (!non_blocking){ |
|
||||||
std::signal(SIGINT, prev_handler_sigint); |
|
||||||
std::signal(SIGTERM, prev_handler_sigterm); |
|
||||||
} |
|
||||||
|
|
||||||
errno = msgq_do_exit ? EINTR : 0; |
|
||||||
|
|
||||||
if (rc > 0){ |
|
||||||
if (msgq_do_exit){ |
|
||||||
msgq_msg_close(&msg); // Free unused message on exit
|
|
||||||
} else { |
|
||||||
r = new MSGQMessage; |
|
||||||
r->takeOwnership(msg.data, msg.size); |
|
||||||
} |
|
||||||
} |
|
||||||
|
|
||||||
return (Message*)r; |
|
||||||
} |
|
||||||
|
|
||||||
void MSGQSubSocket::setTimeout(int t){ |
|
||||||
timeout = t; |
|
||||||
} |
|
||||||
|
|
||||||
MSGQSubSocket::~MSGQSubSocket(){ |
|
||||||
if (q != NULL){ |
|
||||||
msgq_close_queue(q); |
|
||||||
delete q; |
|
||||||
} |
|
||||||
} |
|
||||||
|
|
||||||
int MSGQPubSocket::connect(Context *context, std::string endpoint){ |
|
||||||
assert(context); |
|
||||||
|
|
||||||
q = new msgq_queue_t; |
|
||||||
msgq_new_queue(q, endpoint.c_str(), DEFAULT_SEGMENT_SIZE); |
|
||||||
msgq_init_publisher(q); |
|
||||||
|
|
||||||
return 0; |
|
||||||
} |
|
||||||
|
|
||||||
int MSGQPubSocket::sendMessage(Message *message){ |
|
||||||
msgq_msg_t msg; |
|
||||||
msg.data = message->getData(); |
|
||||||
msg.size = message->getSize(); |
|
||||||
|
|
||||||
return msgq_msg_send(&msg, q); |
|
||||||
} |
|
||||||
|
|
||||||
int MSGQPubSocket::send(char *data, size_t size){ |
|
||||||
msgq_msg_t msg; |
|
||||||
msg.data = data; |
|
||||||
msg.size = size; |
|
||||||
|
|
||||||
return msgq_msg_send(&msg, q); |
|
||||||
} |
|
||||||
|
|
||||||
MSGQPubSocket::~MSGQPubSocket(){ |
|
||||||
if (q != NULL){ |
|
||||||
msgq_close_queue(q); |
|
||||||
delete q; |
|
||||||
} |
|
||||||
} |
|
||||||
|
|
||||||
|
|
||||||
void MSGQPoller::registerSocket(SubSocket * socket){ |
|
||||||
assert(num_polls + 1 < MAX_POLLERS); |
|
||||||
polls[num_polls].q = (msgq_queue_t*)socket->getRawSocket(); |
|
||||||
|
|
||||||
sockets.push_back(socket); |
|
||||||
num_polls++; |
|
||||||
} |
|
||||||
|
|
||||||
std::vector<SubSocket*> MSGQPoller::poll(int timeout){ |
|
||||||
std::vector<SubSocket*> r; |
|
||||||
|
|
||||||
msgq_poll(polls, num_polls, timeout); |
|
||||||
for (size_t i = 0; i < num_polls; i++){ |
|
||||||
if (polls[i].revents){ |
|
||||||
r.push_back(sockets[i]); |
|
||||||
} |
|
||||||
} |
|
||||||
|
|
||||||
return r; |
|
||||||
} |
|
@ -1,64 +0,0 @@ |
|||||||
#pragma once |
|
||||||
#include "messaging.hpp" |
|
||||||
#include "msgq.hpp" |
|
||||||
#include <zmq.h> |
|
||||||
#include <string> |
|
||||||
|
|
||||||
#define MAX_POLLERS 128 |
|
||||||
|
|
||||||
class MSGQContext : public Context { |
|
||||||
private: |
|
||||||
void * context = NULL; |
|
||||||
public: |
|
||||||
MSGQContext(); |
|
||||||
void * getRawContext() {return context;} |
|
||||||
~MSGQContext(); |
|
||||||
}; |
|
||||||
|
|
||||||
class MSGQMessage : public Message { |
|
||||||
private: |
|
||||||
char * data; |
|
||||||
size_t size; |
|
||||||
public: |
|
||||||
void init(size_t size); |
|
||||||
void init(char *data, size_t size); |
|
||||||
void takeOwnership(char *data, size_t size); |
|
||||||
size_t getSize(){return size;} |
|
||||||
char * getData(){return data;} |
|
||||||
void close(); |
|
||||||
~MSGQMessage(); |
|
||||||
}; |
|
||||||
|
|
||||||
class MSGQSubSocket : public SubSocket { |
|
||||||
private: |
|
||||||
msgq_queue_t * q = NULL; |
|
||||||
int timeout; |
|
||||||
public: |
|
||||||
int connect(Context *context, std::string endpoint, std::string address, bool conflate=false); |
|
||||||
void setTimeout(int timeout); |
|
||||||
void * getRawSocket() {return (void*)q;} |
|
||||||
Message *receive(bool non_blocking=false); |
|
||||||
~MSGQSubSocket(); |
|
||||||
}; |
|
||||||
|
|
||||||
class MSGQPubSocket : public PubSocket { |
|
||||||
private: |
|
||||||
msgq_queue_t * q = NULL; |
|
||||||
public: |
|
||||||
int connect(Context *context, std::string endpoint); |
|
||||||
int sendMessage(Message *message); |
|
||||||
int send(char *data, size_t size); |
|
||||||
~MSGQPubSocket(); |
|
||||||
}; |
|
||||||
|
|
||||||
class MSGQPoller : public Poller { |
|
||||||
private: |
|
||||||
std::vector<SubSocket*> sockets; |
|
||||||
msgq_pollitem_t polls[MAX_POLLERS]; |
|
||||||
size_t num_polls = 0; |
|
||||||
|
|
||||||
public: |
|
||||||
void registerSocket(SubSocket *socket); |
|
||||||
std::vector<SubSocket*> poll(int timeout); |
|
||||||
~MSGQPoller(){}; |
|
||||||
}; |
|
@ -1,155 +0,0 @@ |
|||||||
#include <cassert> |
|
||||||
#include <cstring> |
|
||||||
#include <iostream> |
|
||||||
#include <cstdlib> |
|
||||||
#include <cerrno> |
|
||||||
|
|
||||||
#include <zmq.h> |
|
||||||
|
|
||||||
#include "services.h" |
|
||||||
#include "impl_zmq.hpp" |
|
||||||
|
|
||||||
static int get_port(std::string endpoint) { |
|
||||||
int port = -1; |
|
||||||
for (const auto& it : services) { |
|
||||||
std::string name = it.name; |
|
||||||
if (name == endpoint) { |
|
||||||
port = it.port; |
|
||||||
break; |
|
||||||
} |
|
||||||
} |
|
||||||
|
|
||||||
assert(port >= 0); |
|
||||||
return port; |
|
||||||
} |
|
||||||
|
|
||||||
ZMQContext::ZMQContext() { |
|
||||||
context = zmq_ctx_new(); |
|
||||||
} |
|
||||||
|
|
||||||
ZMQContext::~ZMQContext() { |
|
||||||
zmq_ctx_term(context); |
|
||||||
} |
|
||||||
|
|
||||||
void ZMQMessage::init(size_t sz) { |
|
||||||
size = sz; |
|
||||||
data = new char[size]; |
|
||||||
} |
|
||||||
|
|
||||||
void ZMQMessage::init(char * d, size_t sz) { |
|
||||||
size = sz; |
|
||||||
data = new char[size]; |
|
||||||
memcpy(data, d, size); |
|
||||||
} |
|
||||||
|
|
||||||
void ZMQMessage::close() { |
|
||||||
if (size > 0){ |
|
||||||
delete[] data; |
|
||||||
} |
|
||||||
size = 0; |
|
||||||
} |
|
||||||
|
|
||||||
ZMQMessage::~ZMQMessage() { |
|
||||||
this->close(); |
|
||||||
} |
|
||||||
|
|
||||||
|
|
||||||
int ZMQSubSocket::connect(Context *context, std::string endpoint, std::string address, bool conflate){ |
|
||||||
sock = zmq_socket(context->getRawContext(), ZMQ_SUB); |
|
||||||
if (sock == NULL){ |
|
||||||
return -1; |
|
||||||
} |
|
||||||
|
|
||||||
zmq_setsockopt(sock, ZMQ_SUBSCRIBE, "", 0); |
|
||||||
|
|
||||||
if (conflate){ |
|
||||||
int arg = 1; |
|
||||||
zmq_setsockopt(sock, ZMQ_CONFLATE, &arg, sizeof(int)); |
|
||||||
} |
|
||||||
|
|
||||||
int reconnect_ivl = 500; |
|
||||||
zmq_setsockopt(sock, ZMQ_RECONNECT_IVL_MAX, &reconnect_ivl, sizeof(reconnect_ivl)); |
|
||||||
|
|
||||||
full_endpoint = "tcp://" + address + ":"; |
|
||||||
full_endpoint += std::to_string(get_port(endpoint)); |
|
||||||
|
|
||||||
return zmq_connect(sock, full_endpoint.c_str()); |
|
||||||
} |
|
||||||
|
|
||||||
|
|
||||||
Message * ZMQSubSocket::receive(bool non_blocking){ |
|
||||||
zmq_msg_t msg; |
|
||||||
assert(zmq_msg_init(&msg) == 0); |
|
||||||
|
|
||||||
int flags = non_blocking ? ZMQ_DONTWAIT : 0; |
|
||||||
int rc = zmq_msg_recv(&msg, sock, flags); |
|
||||||
Message *r = NULL; |
|
||||||
|
|
||||||
if (rc >= 0){ |
|
||||||
// Make a copy to ensure the data is aligned
|
|
||||||
r = new ZMQMessage; |
|
||||||
r->init((char*)zmq_msg_data(&msg), zmq_msg_size(&msg)); |
|
||||||
} |
|
||||||
|
|
||||||
zmq_msg_close(&msg); |
|
||||||
return r; |
|
||||||
} |
|
||||||
|
|
||||||
void ZMQSubSocket::setTimeout(int timeout){ |
|
||||||
zmq_setsockopt(sock, ZMQ_RCVTIMEO, &timeout, sizeof(int)); |
|
||||||
} |
|
||||||
|
|
||||||
ZMQSubSocket::~ZMQSubSocket(){ |
|
||||||
zmq_close(sock); |
|
||||||
} |
|
||||||
|
|
||||||
int ZMQPubSocket::connect(Context *context, std::string endpoint){ |
|
||||||
sock = zmq_socket(context->getRawContext(), ZMQ_PUB); |
|
||||||
if (sock == NULL){ |
|
||||||
return -1; |
|
||||||
} |
|
||||||
|
|
||||||
full_endpoint = "tcp://*:"; |
|
||||||
full_endpoint += std::to_string(get_port(endpoint)); |
|
||||||
|
|
||||||
return zmq_bind(sock, full_endpoint.c_str()); |
|
||||||
} |
|
||||||
|
|
||||||
int ZMQPubSocket::sendMessage(Message *message){ |
|
||||||
return zmq_send(sock, message->getData(), message->getSize(), ZMQ_DONTWAIT); |
|
||||||
} |
|
||||||
|
|
||||||
int ZMQPubSocket::send(char *data, size_t size){ |
|
||||||
return zmq_send(sock, data, size, ZMQ_DONTWAIT); |
|
||||||
} |
|
||||||
|
|
||||||
ZMQPubSocket::~ZMQPubSocket(){ |
|
||||||
zmq_close(sock); |
|
||||||
} |
|
||||||
|
|
||||||
|
|
||||||
void ZMQPoller::registerSocket(SubSocket * socket){ |
|
||||||
assert(num_polls + 1 < MAX_POLLERS); |
|
||||||
polls[num_polls].socket = socket->getRawSocket(); |
|
||||||
polls[num_polls].events = ZMQ_POLLIN; |
|
||||||
|
|
||||||
sockets.push_back(socket); |
|
||||||
num_polls++; |
|
||||||
} |
|
||||||
|
|
||||||
std::vector<SubSocket*> ZMQPoller::poll(int timeout){ |
|
||||||
std::vector<SubSocket*> r; |
|
||||||
|
|
||||||
int rc = zmq_poll(polls, num_polls, timeout); |
|
||||||
if (rc < 0){ |
|
||||||
return r; |
|
||||||
} |
|
||||||
|
|
||||||
for (size_t i = 0; i < num_polls; i++){ |
|
||||||
if (polls[i].revents){ |
|
||||||
r.push_back(sockets[i]); |
|
||||||
} |
|
||||||
} |
|
||||||
|
|
||||||
return r; |
|
||||||
} |
|
@ -1,63 +0,0 @@ |
|||||||
#pragma once |
|
||||||
#include "messaging.hpp" |
|
||||||
#include <zmq.h> |
|
||||||
#include <string> |
|
||||||
|
|
||||||
#define MAX_POLLERS 128 |
|
||||||
|
|
||||||
class ZMQContext : public Context { |
|
||||||
private: |
|
||||||
void * context = NULL; |
|
||||||
public: |
|
||||||
ZMQContext(); |
|
||||||
void * getRawContext() {return context;} |
|
||||||
~ZMQContext(); |
|
||||||
}; |
|
||||||
|
|
||||||
class ZMQMessage : public Message { |
|
||||||
private: |
|
||||||
char * data; |
|
||||||
size_t size; |
|
||||||
public: |
|
||||||
void init(size_t size); |
|
||||||
void init(char *data, size_t size); |
|
||||||
size_t getSize(){return size;} |
|
||||||
char * getData(){return data;} |
|
||||||
void close(); |
|
||||||
~ZMQMessage(); |
|
||||||
}; |
|
||||||
|
|
||||||
class ZMQSubSocket : public SubSocket { |
|
||||||
private: |
|
||||||
void * sock; |
|
||||||
std::string full_endpoint; |
|
||||||
public: |
|
||||||
int connect(Context *context, std::string endpoint, std::string address, bool conflate=false); |
|
||||||
void setTimeout(int timeout); |
|
||||||
void * getRawSocket() {return sock;} |
|
||||||
Message *receive(bool non_blocking=false); |
|
||||||
~ZMQSubSocket(); |
|
||||||
}; |
|
||||||
|
|
||||||
class ZMQPubSocket : public PubSocket { |
|
||||||
private: |
|
||||||
void * sock; |
|
||||||
std::string full_endpoint; |
|
||||||
public: |
|
||||||
int connect(Context *context, std::string endpoint); |
|
||||||
int sendMessage(Message *message); |
|
||||||
int send(char *data, size_t size); |
|
||||||
~ZMQPubSocket(); |
|
||||||
}; |
|
||||||
|
|
||||||
class ZMQPoller : public Poller { |
|
||||||
private: |
|
||||||
std::vector<SubSocket*> sockets; |
|
||||||
zmq_pollitem_t polls[MAX_POLLERS]; |
|
||||||
size_t num_polls = 0; |
|
||||||
|
|
||||||
public: |
|
||||||
void registerSocket(SubSocket *socket); |
|
||||||
std::vector<SubSocket*> poll(int timeout); |
|
||||||
~ZMQPoller(){}; |
|
||||||
}; |
|
@ -1,117 +0,0 @@ |
|||||||
#include "messaging.hpp" |
|
||||||
#include "impl_zmq.hpp" |
|
||||||
#include "impl_msgq.hpp" |
|
||||||
|
|
||||||
Context * Context::create(){ |
|
||||||
Context * c; |
|
||||||
if (std::getenv("ZMQ")){ |
|
||||||
c = new ZMQContext(); |
|
||||||
} else { |
|
||||||
c = new MSGQContext(); |
|
||||||
} |
|
||||||
return c; |
|
||||||
} |
|
||||||
|
|
||||||
SubSocket * SubSocket::create(){ |
|
||||||
SubSocket * s; |
|
||||||
if (std::getenv("ZMQ")){ |
|
||||||
s = new ZMQSubSocket(); |
|
||||||
} else { |
|
||||||
s = new MSGQSubSocket(); |
|
||||||
} |
|
||||||
return s; |
|
||||||
} |
|
||||||
|
|
||||||
SubSocket * SubSocket::create(Context * context, std::string endpoint){ |
|
||||||
SubSocket *s = SubSocket::create(); |
|
||||||
int r = s->connect(context, endpoint, "127.0.0.1"); |
|
||||||
|
|
||||||
if (r == 0) { |
|
||||||
return s; |
|
||||||
} else { |
|
||||||
delete s; |
|
||||||
return NULL; |
|
||||||
} |
|
||||||
} |
|
||||||
|
|
||||||
SubSocket * SubSocket::create(Context * context, std::string endpoint, std::string address){ |
|
||||||
SubSocket *s = SubSocket::create(); |
|
||||||
int r = s->connect(context, endpoint, address); |
|
||||||
|
|
||||||
if (r == 0) { |
|
||||||
return s; |
|
||||||
} else { |
|
||||||
delete s; |
|
||||||
return NULL; |
|
||||||
} |
|
||||||
} |
|
||||||
|
|
||||||
SubSocket * SubSocket::create(Context * context, std::string endpoint, std::string address, bool conflate){ |
|
||||||
SubSocket *s = SubSocket::create(); |
|
||||||
int r = s->connect(context, endpoint, address, conflate); |
|
||||||
|
|
||||||
if (r == 0) { |
|
||||||
return s; |
|
||||||
} else { |
|
||||||
delete s; |
|
||||||
return NULL; |
|
||||||
} |
|
||||||
} |
|
||||||
|
|
||||||
PubSocket * PubSocket::create(){ |
|
||||||
PubSocket * s; |
|
||||||
if (std::getenv("ZMQ")){ |
|
||||||
s = new ZMQPubSocket(); |
|
||||||
} else { |
|
||||||
s = new MSGQPubSocket(); |
|
||||||
} |
|
||||||
return s; |
|
||||||
} |
|
||||||
|
|
||||||
PubSocket * PubSocket::create(Context * context, std::string endpoint){ |
|
||||||
PubSocket *s = PubSocket::create(); |
|
||||||
int r = s->connect(context, endpoint); |
|
||||||
|
|
||||||
if (r == 0) { |
|
||||||
return s; |
|
||||||
} else { |
|
||||||
delete s; |
|
||||||
return NULL; |
|
||||||
} |
|
||||||
} |
|
||||||
|
|
||||||
Poller * Poller::create(){ |
|
||||||
Poller * p; |
|
||||||
if (std::getenv("ZMQ")){ |
|
||||||
p = new ZMQPoller(); |
|
||||||
} else { |
|
||||||
p = new MSGQPoller(); |
|
||||||
} |
|
||||||
return p; |
|
||||||
} |
|
||||||
|
|
||||||
Poller * Poller::create(std::vector<SubSocket*> sockets){ |
|
||||||
Poller * p = Poller::create(); |
|
||||||
|
|
||||||
for (auto s : sockets){ |
|
||||||
p->registerSocket(s); |
|
||||||
} |
|
||||||
return p; |
|
||||||
} |
|
||||||
|
|
||||||
extern "C" Context * messaging_context_create() { |
|
||||||
return Context::create(); |
|
||||||
} |
|
||||||
|
|
||||||
extern "C" SubSocket * messaging_subsocket_create(Context* context, const char* endpoint) { |
|
||||||
return SubSocket::create(context, std::string(endpoint)); |
|
||||||
} |
|
||||||
|
|
||||||
extern "C" PubSocket * messaging_pubsocket_create(Context* context, const char* endpoint) { |
|
||||||
return PubSocket::create(context, std::string(endpoint)); |
|
||||||
} |
|
||||||
|
|
||||||
extern "C" Poller * messaging_poller_create(SubSocket** sockets, int size) { |
|
||||||
std::vector<SubSocket*> socketsVec(sockets, sockets + size); |
|
||||||
return Poller::create(socketsVec); |
|
||||||
} |
|
@ -1,56 +0,0 @@ |
|||||||
#pragma once |
|
||||||
#include <cstddef> |
|
||||||
#include <vector> |
|
||||||
#include <string> |
|
||||||
|
|
||||||
#define MSG_MULTIPLE_PUBLISHERS 100 |
|
||||||
|
|
||||||
class Context { |
|
||||||
public: |
|
||||||
virtual void * getRawContext() = 0; |
|
||||||
static Context * create(); |
|
||||||
virtual ~Context(){}; |
|
||||||
}; |
|
||||||
|
|
||||||
class Message { |
|
||||||
public: |
|
||||||
virtual void init(size_t size) = 0; |
|
||||||
virtual void init(char * data, size_t size) = 0; |
|
||||||
virtual void close() = 0; |
|
||||||
virtual size_t getSize() = 0; |
|
||||||
virtual char * getData() = 0; |
|
||||||
virtual ~Message(){}; |
|
||||||
}; |
|
||||||
|
|
||||||
|
|
||||||
class SubSocket { |
|
||||||
public: |
|
||||||
virtual int connect(Context *context, std::string endpoint, std::string address, bool conflate=false) = 0; |
|
||||||
virtual void setTimeout(int timeout) = 0; |
|
||||||
virtual Message *receive(bool non_blocking=false) = 0; |
|
||||||
virtual void * getRawSocket() = 0; |
|
||||||
static SubSocket * create(); |
|
||||||
static SubSocket * create(Context * context, std::string endpoint); |
|
||||||
static SubSocket * create(Context * context, std::string endpoint, std::string address); |
|
||||||
static SubSocket * create(Context * context, std::string endpoint, std::string address, bool conflate); |
|
||||||
virtual ~SubSocket(){}; |
|
||||||
}; |
|
||||||
|
|
||||||
class PubSocket { |
|
||||||
public: |
|
||||||
virtual int connect(Context *context, std::string endpoint) = 0; |
|
||||||
virtual int sendMessage(Message *message) = 0; |
|
||||||
virtual int send(char *data, size_t size) = 0; |
|
||||||
static PubSocket * create(); |
|
||||||
static PubSocket * create(Context * context, std::string endpoint); |
|
||||||
virtual ~PubSocket(){}; |
|
||||||
}; |
|
||||||
|
|
||||||
class Poller { |
|
||||||
public: |
|
||||||
virtual void registerSocket(SubSocket *socket) = 0; |
|
||||||
virtual std::vector<SubSocket*> poll(int timeout) = 0; |
|
||||||
static Poller * create(); |
|
||||||
static Poller * create(std::vector<SubSocket*> sockets); |
|
||||||
virtual ~Poller(){}; |
|
||||||
}; |
|
@ -1,39 +0,0 @@ |
|||||||
# distutils: language = c++ |
|
||||||
#cython: language_level=3 |
|
||||||
|
|
||||||
from libcpp.string cimport string |
|
||||||
from libcpp.vector cimport vector |
|
||||||
from libcpp cimport bool |
|
||||||
|
|
||||||
|
|
||||||
cdef extern from "messaging.hpp": |
|
||||||
cdef cppclass Context: |
|
||||||
@staticmethod |
|
||||||
Context * create() |
|
||||||
|
|
||||||
cdef cppclass Message: |
|
||||||
void init(size_t) |
|
||||||
void init(char *, size_t) |
|
||||||
void close() |
|
||||||
size_t getSize() |
|
||||||
char *getData() |
|
||||||
|
|
||||||
cdef cppclass SubSocket: |
|
||||||
@staticmethod |
|
||||||
SubSocket * create() |
|
||||||
int connect(Context *, string, string, bool) |
|
||||||
Message * receive(bool) |
|
||||||
void setTimeout(int) |
|
||||||
|
|
||||||
cdef cppclass PubSocket: |
|
||||||
@staticmethod |
|
||||||
PubSocket * create() |
|
||||||
int connect(Context *, string) |
|
||||||
int sendMessage(Message *) |
|
||||||
int send(char *, size_t) |
|
||||||
|
|
||||||
cdef cppclass Poller: |
|
||||||
@staticmethod |
|
||||||
Poller * create() |
|
||||||
void registerSocket(SubSocket *) |
|
||||||
vector[SubSocket*] poll(int) nogil |
|
@ -1,151 +0,0 @@ |
|||||||
# distutils: language = c++ |
|
||||||
# cython: c_string_encoding=ascii, language_level=3 |
|
||||||
|
|
||||||
import sys |
|
||||||
from libcpp.string cimport string |
|
||||||
from libcpp cimport bool |
|
||||||
from libc cimport errno |
|
||||||
|
|
||||||
|
|
||||||
from messaging cimport Context as cppContext |
|
||||||
from messaging cimport SubSocket as cppSubSocket |
|
||||||
from messaging cimport PubSocket as cppPubSocket |
|
||||||
from messaging cimport Poller as cppPoller |
|
||||||
from messaging cimport Message as cppMessage |
|
||||||
|
|
||||||
|
|
||||||
class MessagingError(Exception): |
|
||||||
pass |
|
||||||
|
|
||||||
|
|
||||||
class MultiplePublishersError(MessagingError): |
|
||||||
pass |
|
||||||
|
|
||||||
|
|
||||||
cdef class Context: |
|
||||||
cdef cppContext * context |
|
||||||
|
|
||||||
def __cinit__(self): |
|
||||||
self.context = cppContext.create() |
|
||||||
|
|
||||||
def term(self): |
|
||||||
del self.context |
|
||||||
self.context = NULL |
|
||||||
|
|
||||||
def __dealloc__(self): |
|
||||||
pass |
|
||||||
# Deleting the context will hang if sockets are still active |
|
||||||
# TODO: Figure out a way to make sure the context is closed last |
|
||||||
# del self.context |
|
||||||
|
|
||||||
|
|
||||||
cdef class Poller: |
|
||||||
cdef cppPoller * poller |
|
||||||
cdef list sub_sockets |
|
||||||
|
|
||||||
def __cinit__(self): |
|
||||||
self.sub_sockets = [] |
|
||||||
self.poller = cppPoller.create() |
|
||||||
|
|
||||||
def __dealloc__(self): |
|
||||||
del self.poller |
|
||||||
|
|
||||||
def registerSocket(self, SubSocket socket): |
|
||||||
self.sub_sockets.append(socket) |
|
||||||
self.poller.registerSocket(socket.socket) |
|
||||||
|
|
||||||
def poll(self, timeout): |
|
||||||
sockets = [] |
|
||||||
cdef int t = timeout |
|
||||||
|
|
||||||
with nogil: |
|
||||||
result = self.poller.poll(t) |
|
||||||
|
|
||||||
for s in result: |
|
||||||
socket = SubSocket() |
|
||||||
socket.setPtr(s) |
|
||||||
sockets.append(socket) |
|
||||||
|
|
||||||
return sockets |
|
||||||
|
|
||||||
cdef class SubSocket: |
|
||||||
cdef cppSubSocket * socket |
|
||||||
cdef bool is_owner |
|
||||||
|
|
||||||
def __cinit__(self): |
|
||||||
self.socket = cppSubSocket.create() |
|
||||||
self.is_owner = True |
|
||||||
|
|
||||||
if self.socket == NULL: |
|
||||||
raise MessagingError |
|
||||||
|
|
||||||
def __dealloc__(self): |
|
||||||
if self.is_owner: |
|
||||||
del self.socket |
|
||||||
|
|
||||||
cdef setPtr(self, cppSubSocket * ptr): |
|
||||||
if self.is_owner: |
|
||||||
del self.socket |
|
||||||
|
|
||||||
self.is_owner = False |
|
||||||
self.socket = ptr |
|
||||||
|
|
||||||
def connect(self, Context context, string endpoint, string address=b"127.0.0.1", bool conflate=False): |
|
||||||
r = self.socket.connect(context.context, endpoint, address, conflate) |
|
||||||
|
|
||||||
if r != 0: |
|
||||||
if errno.errno == errno.EADDRINUSE: |
|
||||||
raise MultiplePublishersError |
|
||||||
else: |
|
||||||
raise MessagingError |
|
||||||
|
|
||||||
def setTimeout(self, int timeout): |
|
||||||
self.socket.setTimeout(timeout) |
|
||||||
|
|
||||||
def receive(self, bool non_blocking=False): |
|
||||||
msg = self.socket.receive(non_blocking) |
|
||||||
|
|
||||||
if msg == NULL: |
|
||||||
# If a blocking read returns no message check errno if SIGINT was caught in the C++ code |
|
||||||
if errno.errno == errno.EINTR: |
|
||||||
print("SIGINT received, exiting") |
|
||||||
sys.exit(1) |
|
||||||
|
|
||||||
return None |
|
||||||
else: |
|
||||||
sz = msg.getSize() |
|
||||||
m = msg.getData()[:sz] |
|
||||||
del msg |
|
||||||
|
|
||||||
return m |
|
||||||
|
|
||||||
|
|
||||||
cdef class PubSocket: |
|
||||||
cdef cppPubSocket * socket |
|
||||||
|
|
||||||
def __cinit__(self): |
|
||||||
self.socket = cppPubSocket.create() |
|
||||||
if self.socket == NULL: |
|
||||||
raise MessagingError |
|
||||||
|
|
||||||
def __dealloc__(self): |
|
||||||
del self.socket |
|
||||||
|
|
||||||
def connect(self, Context context, string endpoint): |
|
||||||
r = self.socket.connect(context.context, endpoint) |
|
||||||
|
|
||||||
if r != 0: |
|
||||||
if errno.errno == errno.EADDRINUSE: |
|
||||||
raise MultiplePublishersError |
|
||||||
else: |
|
||||||
raise MessagingError |
|
||||||
|
|
||||||
def send(self, string data): |
|
||||||
length = len(data) |
|
||||||
r = self.socket.send(<char*>data.c_str(), length) |
|
||||||
|
|
||||||
if r != length: |
|
||||||
if errno.errno == errno.EADDRINUSE: |
|
||||||
raise MultiplePublishersError |
|
||||||
else: |
|
||||||
raise MessagingError |
|
@ -1,56 +0,0 @@ |
|||||||
import os |
|
||||||
import subprocess |
|
||||||
import sysconfig |
|
||||||
from distutils.core import Extension, setup # pylint: disable=import-error,no-name-in-module |
|
||||||
|
|
||||||
from Cython.Build import cythonize |
|
||||||
from Cython.Distutils import build_ext |
|
||||||
|
|
||||||
|
|
||||||
def get_ext_filename_without_platform_suffix(filename): |
|
||||||
name, ext = os.path.splitext(filename) |
|
||||||
ext_suffix = sysconfig.get_config_var('EXT_SUFFIX') |
|
||||||
|
|
||||||
if ext_suffix == ext: |
|
||||||
return filename |
|
||||||
|
|
||||||
ext_suffix = ext_suffix.replace(ext, '') |
|
||||||
idx = name.find(ext_suffix) |
|
||||||
|
|
||||||
if idx == -1: |
|
||||||
return filename |
|
||||||
else: |
|
||||||
return name[:idx] + ext |
|
||||||
|
|
||||||
|
|
||||||
class BuildExtWithoutPlatformSuffix(build_ext): |
|
||||||
def get_ext_filename(self, ext_name): |
|
||||||
filename = super().get_ext_filename(ext_name) |
|
||||||
return get_ext_filename_without_platform_suffix(filename) |
|
||||||
|
|
||||||
|
|
||||||
sourcefiles = ['messaging_pyx.pyx'] |
|
||||||
extra_compile_args = ["-std=c++11"] |
|
||||||
libraries = ['zmq'] |
|
||||||
ARCH = subprocess.check_output(["uname", "-m"], encoding='utf8').rstrip() # pylint: disable=unexpected-keyword-arg |
|
||||||
|
|
||||||
if ARCH == "aarch64": |
|
||||||
extra_compile_args += ["-Wno-deprecated-register"] |
|
||||||
libraries += ['gnustl_shared'] |
|
||||||
|
|
||||||
setup(name='CAN parser', |
|
||||||
cmdclass={'build_ext': BuildExtWithoutPlatformSuffix}, |
|
||||||
ext_modules=cythonize( |
|
||||||
Extension( |
|
||||||
"messaging_pyx", |
|
||||||
language="c++", |
|
||||||
sources=sourcefiles, |
|
||||||
extra_compile_args=extra_compile_args, |
|
||||||
libraries=libraries, |
|
||||||
extra_objects=[ |
|
||||||
os.path.join(os.path.dirname(os.path.realpath(__file__)), '../', 'libmessaging.a'), |
|
||||||
] |
|
||||||
) |
|
||||||
), |
|
||||||
nthreads=4, |
|
||||||
) |
|
@ -1,449 +0,0 @@ |
|||||||
#include <iostream> |
|
||||||
#include <cassert> |
|
||||||
#include <cerrno> |
|
||||||
#include <cmath> |
|
||||||
#include <cstring> |
|
||||||
#include <cstdint> |
|
||||||
#include <chrono> |
|
||||||
#include <algorithm> |
|
||||||
#include <cstdlib> |
|
||||||
#include <csignal> |
|
||||||
#include <random> |
|
||||||
|
|
||||||
#include <poll.h> |
|
||||||
#include <sys/ioctl.h> |
|
||||||
#include <sys/mman.h> |
|
||||||
#include <sys/stat.h> |
|
||||||
#include <sys/types.h> |
|
||||||
#include <sys/syscall.h> |
|
||||||
#include <fcntl.h> |
|
||||||
#include <unistd.h> |
|
||||||
|
|
||||||
#include <stdio.h> |
|
||||||
|
|
||||||
#include "msgq.hpp" |
|
||||||
|
|
||||||
void sigusr2_handler(int signal) { |
|
||||||
assert(signal == SIGUSR2); |
|
||||||
} |
|
||||||
|
|
||||||
uint64_t msgq_get_uid(void){ |
|
||||||
std::random_device rd("/dev/urandom"); |
|
||||||
std::uniform_int_distribution<uint64_t> distribution(0,std::numeric_limits<uint32_t>::max()); |
|
||||||
|
|
||||||
uint64_t uid = distribution(rd) << 32 | syscall(SYS_gettid); |
|
||||||
return uid; |
|
||||||
} |
|
||||||
|
|
||||||
int msgq_msg_init_size(msgq_msg_t * msg, size_t size){ |
|
||||||
msg->size = size; |
|
||||||
msg->data = new(std::nothrow) char[size]; |
|
||||||
|
|
||||||
return (msg->data == NULL) ? -1 : 0; |
|
||||||
} |
|
||||||
|
|
||||||
|
|
||||||
int msgq_msg_init_data(msgq_msg_t * msg, char * data, size_t size) { |
|
||||||
int r = msgq_msg_init_size(msg, size); |
|
||||||
|
|
||||||
if (r == 0) |
|
||||||
memcpy(msg->data, data, size); |
|
||||||
|
|
||||||
return r; |
|
||||||
} |
|
||||||
|
|
||||||
int msgq_msg_close(msgq_msg_t * msg){ |
|
||||||
if (msg->size > 0) |
|
||||||
delete[] msg->data; |
|
||||||
|
|
||||||
msg->size = 0; |
|
||||||
|
|
||||||
return 0; |
|
||||||
} |
|
||||||
|
|
||||||
void msgq_reset_reader(msgq_queue_t * q){ |
|
||||||
int id = q->reader_id; |
|
||||||
q->read_valids[id]->store(true); |
|
||||||
q->read_pointers[id]->store(*q->write_pointer); |
|
||||||
} |
|
||||||
|
|
||||||
void msgq_wait_for_subscriber(msgq_queue_t *q){ |
|
||||||
while (*q->num_readers == 0){ |
|
||||||
; |
|
||||||
} |
|
||||||
|
|
||||||
return; |
|
||||||
} |
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
int msgq_new_queue(msgq_queue_t * q, const char * path, size_t size){ |
|
||||||
assert(size < 0xFFFFFFFF); // Buffer must be smaller than 2^32 bytes
|
|
||||||
|
|
||||||
std::signal(SIGUSR2, sigusr2_handler); |
|
||||||
|
|
||||||
const char * prefix = "/dev/shm/"; |
|
||||||
char * full_path = new char[strlen(path) + strlen(prefix) + 1]; |
|
||||||
strcpy(full_path, prefix); |
|
||||||
strcat(full_path, path); |
|
||||||
|
|
||||||
auto fd = open(full_path, O_RDWR | O_CREAT, 0777); |
|
||||||
delete[] full_path; |
|
||||||
|
|
||||||
if (fd < 0) |
|
||||||
return -1; |
|
||||||
|
|
||||||
int rc = ftruncate(fd, size + sizeof(msgq_header_t)); |
|
||||||
if (rc < 0) |
|
||||||
return -1; |
|
||||||
|
|
||||||
char * mem = (char*)mmap(NULL, size + sizeof(msgq_header_t), PROT_READ | PROT_WRITE, MAP_SHARED, fd, 0); |
|
||||||
close(fd); |
|
||||||
|
|
||||||
if (mem == NULL) |
|
||||||
return -1; |
|
||||||
|
|
||||||
q->mmap_p = mem; |
|
||||||
|
|
||||||
msgq_header_t *header = (msgq_header_t *)mem; |
|
||||||
|
|
||||||
// Setup pointers to header segment
|
|
||||||
q->num_readers = reinterpret_cast<std::atomic<uint64_t>*>(&header->num_readers); |
|
||||||
q->write_pointer = reinterpret_cast<std::atomic<uint64_t>*>(&header->write_pointer); |
|
||||||
q->write_uid = reinterpret_cast<std::atomic<uint64_t>*>(&header->write_uid); |
|
||||||
|
|
||||||
for (size_t i = 0; i < NUM_READERS; i++){ |
|
||||||
q->read_pointers[i] = reinterpret_cast<std::atomic<uint64_t>*>(&header->read_pointers[i]); |
|
||||||
q->read_valids[i] = reinterpret_cast<std::atomic<uint64_t>*>(&header->read_valids[i]); |
|
||||||
q->read_uids[i] = reinterpret_cast<std::atomic<uint64_t>*>(&header->read_uids[i]); |
|
||||||
} |
|
||||||
|
|
||||||
q->data = mem + sizeof(msgq_header_t); |
|
||||||
q->size = size; |
|
||||||
q->reader_id = -1; |
|
||||||
|
|
||||||
q->endpoint = path; |
|
||||||
q->read_conflate = false; |
|
||||||
|
|
||||||
return 0; |
|
||||||
} |
|
||||||
|
|
||||||
void msgq_close_queue(msgq_queue_t *q){ |
|
||||||
if (q->mmap_p != NULL){ |
|
||||||
munmap(q->mmap_p, q->size + sizeof(msgq_header_t)); |
|
||||||
} |
|
||||||
} |
|
||||||
|
|
||||||
|
|
||||||
void msgq_init_publisher(msgq_queue_t * q) { |
|
||||||
//std::cout << "Starting publisher" << std::endl;
|
|
||||||
uint64_t uid = msgq_get_uid(); |
|
||||||
|
|
||||||
*q->write_uid = uid; |
|
||||||
*q->num_readers = 0; |
|
||||||
|
|
||||||
for (size_t i = 0; i < NUM_READERS; i++){ |
|
||||||
*q->read_valids[i] = false; |
|
||||||
*q->read_uids[i] = 0; |
|
||||||
} |
|
||||||
|
|
||||||
q->write_uid_local = uid; |
|
||||||
} |
|
||||||
|
|
||||||
static void thread_signal(uint32_t tid) { |
|
||||||
#ifndef SYS_tkill |
|
||||||
// TODO: this won't work for multithreaded programs
|
|
||||||
kill(tid, SIGUSR2); |
|
||||||
#else |
|
||||||
syscall(SYS_tkill, tid, SIGUSR2); |
|
||||||
#endif |
|
||||||
} |
|
||||||
|
|
||||||
void msgq_init_subscriber(msgq_queue_t * q) { |
|
||||||
assert(q != NULL); |
|
||||||
assert(q->num_readers != NULL); |
|
||||||
|
|
||||||
uint64_t uid = msgq_get_uid(); |
|
||||||
|
|
||||||
// Get reader id
|
|
||||||
while (true){ |
|
||||||
uint64_t cur_num_readers = *q->num_readers; |
|
||||||
uint64_t new_num_readers = cur_num_readers + 1; |
|
||||||
|
|
||||||
// No more slots available. Reset all subscribers to kick out inactive ones
|
|
||||||
if (new_num_readers > NUM_READERS){ |
|
||||||
std::cout << "Warning, evicting all subscribers!" << std::endl; |
|
||||||
*q->num_readers = 0; |
|
||||||
|
|
||||||
for (size_t i = 0; i < NUM_READERS; i++){ |
|
||||||
*q->read_valids[i] = false; |
|
||||||
|
|
||||||
uint64_t old_uid = *q->read_uids[i]; |
|
||||||
*q->read_uids[i] = 0; |
|
||||||
|
|
||||||
// Wake up reader in case they are in a poll
|
|
||||||
thread_signal(old_uid & 0xFFFFFFFF); |
|
||||||
} |
|
||||||
|
|
||||||
continue; |
|
||||||
} |
|
||||||
|
|
||||||
// Use atomic compare and swap to handle race condition
|
|
||||||
// where two subscribers start at the same time
|
|
||||||
if (std::atomic_compare_exchange_strong(q->num_readers, |
|
||||||
&cur_num_readers, |
|
||||||
new_num_readers)){ |
|
||||||
q->reader_id = cur_num_readers; |
|
||||||
q->read_uid_local = uid; |
|
||||||
|
|
||||||
// We start with read_valid = false,
|
|
||||||
// on the first read the read pointer will be synchronized with the write pointer
|
|
||||||
*q->read_valids[cur_num_readers] = false; |
|
||||||
*q->read_pointers[cur_num_readers] = 0; |
|
||||||
*q->read_uids[cur_num_readers] = uid; |
|
||||||
break; |
|
||||||
} |
|
||||||
} |
|
||||||
|
|
||||||
//std::cout << "New subscriber id: " << q->reader_id << " uid: " << q->read_uid_local << " " << q->endpoint << std::endl;
|
|
||||||
msgq_reset_reader(q); |
|
||||||
} |
|
||||||
|
|
||||||
int msgq_msg_send(msgq_msg_t * msg, msgq_queue_t *q){ |
|
||||||
// Die if we are no longer the active publisher
|
|
||||||
if (q->write_uid_local != *q->write_uid){ |
|
||||||
std::cout << "Killing old publisher: " << q->endpoint << std::endl; |
|
||||||
errno = EADDRINUSE; |
|
||||||
return -1; |
|
||||||
} |
|
||||||
|
|
||||||
uint64_t total_msg_size = ALIGN(msg->size + sizeof(int64_t)); |
|
||||||
|
|
||||||
// We need to fit at least three messages in the queue,
|
|
||||||
// then we can always safely access the last message
|
|
||||||
assert(3 * total_msg_size <= q->size); |
|
||||||
|
|
||||||
uint64_t num_readers = *q->num_readers; |
|
||||||
|
|
||||||
uint32_t write_cycles, write_pointer; |
|
||||||
UNPACK64(write_cycles, write_pointer, *q->write_pointer); |
|
||||||
|
|
||||||
char *p = q->data + write_pointer; // add base offset
|
|
||||||
|
|
||||||
// Check remaining space
|
|
||||||
// Always leave space for a wraparound tag for the next message, including alignment
|
|
||||||
int64_t remaining_space = q->size - write_pointer - total_msg_size - sizeof(int64_t); |
|
||||||
if (remaining_space <= 0){ |
|
||||||
// Write -1 size tag indicating wraparound
|
|
||||||
*(int64_t*)p = -1; |
|
||||||
|
|
||||||
// Invalidate all readers that are beyond the write pointer
|
|
||||||
// TODO: should we handle the case where a new reader shows up while this is running?
|
|
||||||
for (uint64_t i = 0; i < num_readers; i++){ |
|
||||||
uint64_t read_pointer = *q->read_pointers[i]; |
|
||||||
uint64_t read_cycles = read_pointer >> 32; |
|
||||||
read_pointer &= 0xFFFFFFFF; |
|
||||||
|
|
||||||
if ((read_pointer > write_pointer) && (read_cycles != write_cycles)) { |
|
||||||
*q->read_valids[i] = false; |
|
||||||
} |
|
||||||
} |
|
||||||
|
|
||||||
// Update global and local copies of write pointer and write_cycles
|
|
||||||
write_pointer = 0; |
|
||||||
write_cycles = write_cycles + 1; |
|
||||||
PACK64(*q->write_pointer, write_cycles, write_pointer); |
|
||||||
|
|
||||||
// Set actual pointer to the beginning of the data segment
|
|
||||||
p = q->data; |
|
||||||
} |
|
||||||
|
|
||||||
// Invalidate readers that are in the area that will be written
|
|
||||||
uint64_t start = write_pointer; |
|
||||||
uint64_t end = ALIGN(start + sizeof(int64_t) + msg->size); |
|
||||||
|
|
||||||
for (uint64_t i = 0; i < num_readers; i++){ |
|
||||||
uint32_t read_cycles, read_pointer; |
|
||||||
UNPACK64(read_cycles, read_pointer, *q->read_pointers[i]); |
|
||||||
|
|
||||||
if ((read_pointer >= start) && (read_pointer < end) && (read_cycles != write_cycles)) { |
|
||||||
*q->read_valids[i] = false; |
|
||||||
} |
|
||||||
} |
|
||||||
|
|
||||||
|
|
||||||
// Write size tag
|
|
||||||
std::atomic<int64_t> *size_p = reinterpret_cast<std::atomic<int64_t>*>(p); |
|
||||||
*size_p = msg->size; |
|
||||||
|
|
||||||
// Copy data
|
|
||||||
memcpy(p + sizeof(int64_t), msg->data, msg->size); |
|
||||||
__sync_synchronize(); |
|
||||||
|
|
||||||
// Update write pointer
|
|
||||||
uint32_t new_ptr = ALIGN(write_pointer + msg->size + sizeof(int64_t)); |
|
||||||
PACK64(*q->write_pointer, write_cycles, new_ptr); |
|
||||||
|
|
||||||
// Notify readers
|
|
||||||
for (uint64_t i = 0; i < num_readers; i++){ |
|
||||||
uint64_t reader_uid = *q->read_uids[i]; |
|
||||||
thread_signal(reader_uid & 0xFFFFFFFF); |
|
||||||
} |
|
||||||
|
|
||||||
return msg->size; |
|
||||||
} |
|
||||||
|
|
||||||
|
|
||||||
int msgq_msg_ready(msgq_queue_t * q){ |
|
||||||
start: |
|
||||||
int id = q->reader_id; |
|
||||||
assert(id >= 0); // Make sure subscriber is initialized
|
|
||||||
|
|
||||||
if (q->read_uid_local != *q->read_uids[id]){ |
|
||||||
std::cout << q->endpoint << ": Reader was evicted, reconnecting" << std::endl; |
|
||||||
msgq_init_subscriber(q); |
|
||||||
goto start; |
|
||||||
} |
|
||||||
|
|
||||||
// Check valid
|
|
||||||
if (!*q->read_valids[id]){ |
|
||||||
msgq_reset_reader(q); |
|
||||||
goto start; |
|
||||||
} |
|
||||||
|
|
||||||
uint32_t read_cycles, read_pointer; |
|
||||||
UNPACK64(read_cycles, read_pointer, *q->read_pointers[id]); |
|
||||||
|
|
||||||
uint32_t write_cycles, write_pointer; |
|
||||||
UNPACK64(write_cycles, write_pointer, *q->write_pointer); |
|
||||||
|
|
||||||
// Check if new message is available
|
|
||||||
return (read_pointer != write_pointer); |
|
||||||
} |
|
||||||
|
|
||||||
int msgq_msg_recv(msgq_msg_t * msg, msgq_queue_t * q){ |
|
||||||
start: |
|
||||||
int id = q->reader_id; |
|
||||||
assert(id >= 0); // Make sure subscriber is initialized
|
|
||||||
|
|
||||||
if (q->read_uid_local != *q->read_uids[id]){ |
|
||||||
std::cout << q->endpoint << ": Reader was evicted, reconnecting" << std::endl; |
|
||||||
msgq_init_subscriber(q); |
|
||||||
goto start; |
|
||||||
} |
|
||||||
|
|
||||||
// Check valid
|
|
||||||
if (!*q->read_valids[id]){ |
|
||||||
msgq_reset_reader(q); |
|
||||||
goto start; |
|
||||||
} |
|
||||||
|
|
||||||
uint32_t read_cycles, read_pointer; |
|
||||||
UNPACK64(read_cycles, read_pointer, *q->read_pointers[id]); |
|
||||||
|
|
||||||
uint32_t write_cycles, write_pointer; |
|
||||||
UNPACK64(write_cycles, write_pointer, *q->write_pointer); |
|
||||||
|
|
||||||
char * p = q->data + read_pointer; |
|
||||||
|
|
||||||
// Check if new message is available
|
|
||||||
if (read_pointer == write_pointer) { |
|
||||||
msg->size = 0; |
|
||||||
return 0; |
|
||||||
} |
|
||||||
|
|
||||||
// Read potential message size
|
|
||||||
std::atomic<int64_t> *size_p = reinterpret_cast<std::atomic<int64_t>*>(p); |
|
||||||
std::int64_t size = *size_p; |
|
||||||
|
|
||||||
// Check if the size that was read is valid
|
|
||||||
if (!*q->read_valids[id]){ |
|
||||||
msgq_reset_reader(q); |
|
||||||
goto start; |
|
||||||
} |
|
||||||
|
|
||||||
// If size is -1 the buffer was full, and we need to wrap around
|
|
||||||
if (size == -1){ |
|
||||||
read_cycles++; |
|
||||||
PACK64(*q->read_pointers[id], read_cycles, 0); |
|
||||||
goto start; |
|
||||||
} |
|
||||||
|
|
||||||
// crashing is better than passing garbage data to the consumer
|
|
||||||
// the size will have weird value if it was overwritten by data accidentally
|
|
||||||
assert((uint64_t)size < q->size); |
|
||||||
assert(size > 0); |
|
||||||
|
|
||||||
uint32_t new_read_pointer = ALIGN(read_pointer + sizeof(std::int64_t) + size); |
|
||||||
|
|
||||||
// If conflate is true, check if this is the latest message, else start over
|
|
||||||
if (q->read_conflate){ |
|
||||||
if (new_read_pointer != write_pointer){ |
|
||||||
// Update read pointer
|
|
||||||
PACK64(*q->read_pointers[id], read_cycles, new_read_pointer); |
|
||||||
goto start; |
|
||||||
} |
|
||||||
} |
|
||||||
|
|
||||||
// Copy message
|
|
||||||
if (msgq_msg_init_size(msg, size) < 0) |
|
||||||
return -1; |
|
||||||
|
|
||||||
__sync_synchronize(); |
|
||||||
memcpy(msg->data, p + sizeof(int64_t), size); |
|
||||||
__sync_synchronize(); |
|
||||||
|
|
||||||
// Update read pointer
|
|
||||||
PACK64(*q->read_pointers[id], read_cycles, new_read_pointer); |
|
||||||
|
|
||||||
// Check if the actual data that was copied is valid
|
|
||||||
if (!*q->read_valids[id]){ |
|
||||||
msgq_msg_close(msg); |
|
||||||
msgq_reset_reader(q); |
|
||||||
goto start; |
|
||||||
} |
|
||||||
|
|
||||||
|
|
||||||
return msg->size; |
|
||||||
} |
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
int msgq_poll(msgq_pollitem_t * items, size_t nitems, int timeout){ |
|
||||||
assert(timeout >= 0); |
|
||||||
|
|
||||||
int num = 0; |
|
||||||
|
|
||||||
// Check if messages ready
|
|
||||||
for (size_t i = 0; i < nitems; i++) { |
|
||||||
items[i].revents = msgq_msg_ready(items[i].q); |
|
||||||
if (items[i].revents) num++; |
|
||||||
} |
|
||||||
|
|
||||||
int ms = (timeout == -1) ? 100 : timeout; |
|
||||||
struct timespec ts; |
|
||||||
ts.tv_sec = ms / 1000; |
|
||||||
ts.tv_nsec = (ms % 1000) * 1000 * 1000; |
|
||||||
|
|
||||||
|
|
||||||
while (num == 0) { |
|
||||||
int ret; |
|
||||||
|
|
||||||
ret = nanosleep(&ts, &ts); |
|
||||||
|
|
||||||
// Check if messages ready
|
|
||||||
for (size_t i = 0; i < nitems; i++) { |
|
||||||
if (items[i].revents == 0 && msgq_msg_ready(items[i].q)){ |
|
||||||
num += 1; |
|
||||||
items[i].revents = 1; |
|
||||||
} |
|
||||||
} |
|
||||||
|
|
||||||
// exit if we had a timeout and the sleep finished
|
|
||||||
if (timeout != -1 && ret == 0){ |
|
||||||
break; |
|
||||||
} |
|
||||||
} |
|
||||||
|
|
||||||
return num; |
|
||||||
} |
|
@ -1,66 +0,0 @@ |
|||||||
#pragma once |
|
||||||
#include <cstdint> |
|
||||||
#include <cstring> |
|
||||||
#include <string> |
|
||||||
#include <atomic> |
|
||||||
|
|
||||||
#define DEFAULT_SEGMENT_SIZE (10 * 1024 * 1024) |
|
||||||
#define NUM_READERS 8 |
|
||||||
#define ALIGN(n) ((n + (8 - 1)) & -8) |
|
||||||
|
|
||||||
#define UNPACK64(higher, lower, input) do {uint64_t tmp = input; higher = tmp >> 32; lower = tmp & 0xFFFFFFFF;} while (0) |
|
||||||
#define PACK64(output, higher, lower) output = ((uint64_t)higher << 32 ) | ((uint64_t)lower & 0xFFFFFFFF) |
|
||||||
|
|
||||||
struct msgq_header_t { |
|
||||||
uint64_t num_readers; |
|
||||||
uint64_t write_pointer; |
|
||||||
uint64_t write_uid; |
|
||||||
uint64_t read_pointers[NUM_READERS]; |
|
||||||
uint64_t read_valids[NUM_READERS]; |
|
||||||
uint64_t read_uids[NUM_READERS]; |
|
||||||
}; |
|
||||||
|
|
||||||
struct msgq_queue_t { |
|
||||||
std::atomic<uint64_t> *num_readers; |
|
||||||
std::atomic<uint64_t> *write_pointer; |
|
||||||
std::atomic<uint64_t> *write_uid; |
|
||||||
std::atomic<uint64_t> *read_pointers[NUM_READERS]; |
|
||||||
std::atomic<uint64_t> *read_valids[NUM_READERS]; |
|
||||||
std::atomic<uint64_t> *read_uids[NUM_READERS]; |
|
||||||
char * mmap_p; |
|
||||||
char * data; |
|
||||||
size_t size; |
|
||||||
int reader_id; |
|
||||||
uint64_t read_uid_local; |
|
||||||
uint64_t write_uid_local; |
|
||||||
|
|
||||||
bool read_conflate; |
|
||||||
std::string endpoint; |
|
||||||
}; |
|
||||||
|
|
||||||
struct msgq_msg_t { |
|
||||||
size_t size; |
|
||||||
char * data; |
|
||||||
}; |
|
||||||
|
|
||||||
struct msgq_pollitem_t { |
|
||||||
msgq_queue_t *q; |
|
||||||
int revents; |
|
||||||
}; |
|
||||||
|
|
||||||
void msgq_wait_for_subscriber(msgq_queue_t *q); |
|
||||||
void msgq_reset_reader(msgq_queue_t *q); |
|
||||||
|
|
||||||
int msgq_msg_init_size(msgq_msg_t *msg, size_t size); |
|
||||||
int msgq_msg_init_data(msgq_msg_t *msg, char * data, size_t size); |
|
||||||
int msgq_msg_close(msgq_msg_t *msg); |
|
||||||
|
|
||||||
int msgq_new_queue(msgq_queue_t * q, const char * path, size_t size); |
|
||||||
void msgq_close_queue(msgq_queue_t *q); |
|
||||||
void msgq_init_publisher(msgq_queue_t * q); |
|
||||||
void msgq_init_subscriber(msgq_queue_t * q); |
|
||||||
|
|
||||||
int msgq_msg_send(msgq_msg_t *msg, msgq_queue_t *q); |
|
||||||
int msgq_msg_recv(msgq_msg_t *msg, msgq_queue_t *q); |
|
||||||
int msgq_msg_ready(msgq_queue_t * q); |
|
||||||
int msgq_poll(msgq_pollitem_t * items, size_t nitems, int timeout); |
|
@ -1,56 +0,0 @@ |
|||||||
# MSGQ: A lock free single producer multi consumer message queue |
|
||||||
|
|
||||||
[](https://dev.azure.com/commaai/default/_build/latest?definitionId=21&branchName=master) |
|
||||||
|
|
||||||
## What is MSGQ? |
|
||||||
MSGQ is a system to pass messages from a single producer to multiple consumers. All the consumers need to be able to receive all the messages. It is designed to be a high performance replacement for ZMQ-like SUB/PUB patterns. It uses a ring buffer in shared memory to efficiently read and write data. Each read requires a copy. Writing can be done without a copy, as long as the size of the data is known in advance. |
|
||||||
|
|
||||||
## Storage |
|
||||||
The storage for the queue consists of an area of metadata, and the actual buffer. The metadata contains: |
|
||||||
|
|
||||||
1. A counter to the number of readers that are active |
|
||||||
2. A pointer to the head of the queue for writing. From now on referred to as *write pointer* |
|
||||||
3. A cycle counter for the writer. This counter is incremented when the writer wraps around |
|
||||||
4. N pointers, pointing to the current read position for all the readers. From now on referred to as *read pointer* |
|
||||||
5. N counters, counting the number of cycles for all the readers |
|
||||||
6. N booleans, indicating validity for all the readers. From now on referred to as *validity flag* |
|
||||||
|
|
||||||
The counter and the pointer are both 32 bit values, packed into 64 bit so they can be read and written atomically. |
|
||||||
|
|
||||||
The data buffer is a ring buffer. All messages are prefixed by an 8 byte size field, followed by the data. A size of -1 indicates a wrap-around, and means the next message is stored at the beginning of the buffer. |
|
||||||
|
|
||||||
|
|
||||||
## Writing |
|
||||||
Writing involves the following steps: |
|
||||||
|
|
||||||
1. Check if the area that is to be written overlaps with any of the read pointers, mark those readers as invalid by clearing the validity flag. |
|
||||||
2. Write the message |
|
||||||
3. Increase the write pointer by the size of the message |
|
||||||
|
|
||||||
In case there is not enough space at the end of the buffer, a special empty message with a prefix of -1 is written. The cycle counter is incremented by one. In this case step 1 will check there are no read pointers pointing to the remainder of the buffer. Then another write cycle will start with the actual message. |
|
||||||
|
|
||||||
There always needs to be 8 bytes of empty space at the end of the buffer. By doing this there is always space to write the -1. |
|
||||||
|
|
||||||
## Reset reader |
|
||||||
When the reader is lagging too much behind the read pointer becomes invalid and no longer points to the beginning of a valid message. To reset a reader to the current write pointer, the following steps are performed: |
|
||||||
|
|
||||||
1. Set valid flag |
|
||||||
2. Set read cycle counter to that of the writer |
|
||||||
3. Set read pointer to write pointer |
|
||||||
|
|
||||||
## Reading |
|
||||||
Reading involves the following steps: |
|
||||||
|
|
||||||
1. Read the size field at the current read pointer |
|
||||||
2. Read the validity flag |
|
||||||
3. Copy the data out of the buffer |
|
||||||
4. Increase the read pointer by the size of the message |
|
||||||
5. Check the validity flag again |
|
||||||
|
|
||||||
Before starting the copy, the valid flag is checked. This is to prevent a race condition where the size prefix was invalid, and the read could read outside of the buffer. Make sure that step 1 and 2 are not reordered by your compiler or CPU. |
|
||||||
|
|
||||||
If a writer overwrites the data while it's being copied out, the data will be invalid. Therefore the validity flag is also checked after reading it. The order of step 4 and 5 does not matter. |
|
||||||
|
|
||||||
If at steps 2 or 5 the validity flag is not set, the reader is reset. Any data that was already read is discarded. After the reader is reset, the reading starts from the beginning. |
|
||||||
|
|
||||||
If a message with size -1 is encountered, step 3 and 4 are replaced by increasing the cycle counter and setting the read pointer to the beginning of the buffer. After that another read is performed. |
|
@ -1,395 +0,0 @@ |
|||||||
#include "catch2/catch.hpp" |
|
||||||
#include "msgq.hpp" |
|
||||||
|
|
||||||
TEST_CASE("ALIGN"){ |
|
||||||
REQUIRE(ALIGN(0) == 0); |
|
||||||
REQUIRE(ALIGN(1) == 8); |
|
||||||
REQUIRE(ALIGN(7) == 8); |
|
||||||
REQUIRE(ALIGN(8) == 8); |
|
||||||
REQUIRE(ALIGN(99999) == 100000); |
|
||||||
} |
|
||||||
|
|
||||||
TEST_CASE("msgq_msg_init_size"){ |
|
||||||
const size_t msg_size = 30; |
|
||||||
msgq_msg_t msg; |
|
||||||
|
|
||||||
msgq_msg_init_size(&msg, msg_size); |
|
||||||
REQUIRE(msg.size == msg_size); |
|
||||||
|
|
||||||
msgq_msg_close(&msg); |
|
||||||
} |
|
||||||
|
|
||||||
TEST_CASE("msgq_msg_init_data"){ |
|
||||||
const size_t msg_size = 30; |
|
||||||
char * data = new char[msg_size]; |
|
||||||
|
|
||||||
for (size_t i = 0; i < msg_size; i++){ |
|
||||||
data[i] = i; |
|
||||||
} |
|
||||||
|
|
||||||
msgq_msg_t msg; |
|
||||||
msgq_msg_init_data(&msg, data, msg_size); |
|
||||||
|
|
||||||
REQUIRE(msg.size == msg_size); |
|
||||||
REQUIRE(memcmp(msg.data, data, msg_size) == 0); |
|
||||||
|
|
||||||
delete[] data; |
|
||||||
msgq_msg_close(&msg); |
|
||||||
} |
|
||||||
|
|
||||||
|
|
||||||
TEST_CASE("msgq_init_subscriber"){ |
|
||||||
remove("/dev/shm/test_queue"); |
|
||||||
msgq_queue_t q; |
|
||||||
msgq_new_queue(&q, "test_queue", 1024); |
|
||||||
REQUIRE(*q.num_readers == 0); |
|
||||||
|
|
||||||
q.reader_id = 1; |
|
||||||
*q.read_valids[0] = false; |
|
||||||
*q.read_pointers[0] = ((uint64_t)1 << 32); |
|
||||||
|
|
||||||
*q.write_pointer = 255; |
|
||||||
|
|
||||||
msgq_init_subscriber(&q); |
|
||||||
REQUIRE(q.read_conflate == false); |
|
||||||
REQUIRE(*q.read_valids[0] == true); |
|
||||||
REQUIRE((*q.read_pointers[0] >> 32) == 0); |
|
||||||
REQUIRE((*q.read_pointers[0] & 0xFFFFFFFF) == 255); |
|
||||||
} |
|
||||||
|
|
||||||
TEST_CASE("msgq_msg_send first message"){ |
|
||||||
remove("/dev/shm/test_queue"); |
|
||||||
msgq_queue_t q; |
|
||||||
msgq_new_queue(&q, "test_queue", 1024); |
|
||||||
msgq_init_publisher(&q); |
|
||||||
|
|
||||||
REQUIRE(*q.write_pointer == 0); |
|
||||||
|
|
||||||
size_t msg_size = 128; |
|
||||||
|
|
||||||
SECTION("Aligned message size"){ |
|
||||||
} |
|
||||||
SECTION("Unaligned message size"){ |
|
||||||
msg_size--; |
|
||||||
} |
|
||||||
|
|
||||||
char * data = new char[msg_size]; |
|
||||||
|
|
||||||
for (size_t i = 0; i < msg_size; i++){ |
|
||||||
data[i] = i; |
|
||||||
} |
|
||||||
|
|
||||||
msgq_msg_t msg; |
|
||||||
msgq_msg_init_data(&msg, data, msg_size); |
|
||||||
|
|
||||||
|
|
||||||
msgq_msg_send(&msg, &q); |
|
||||||
REQUIRE(*(int64_t*)q.data == msg_size); // Check size tag
|
|
||||||
REQUIRE(*q.write_pointer == 128 + sizeof(int64_t)); |
|
||||||
REQUIRE(memcmp(q.data + sizeof(int64_t), data, msg_size) == 0); |
|
||||||
|
|
||||||
delete[] data; |
|
||||||
msgq_msg_close(&msg); |
|
||||||
} |
|
||||||
|
|
||||||
TEST_CASE("msgq_msg_send test wraparound"){ |
|
||||||
remove("/dev/shm/test_queue"); |
|
||||||
msgq_queue_t q; |
|
||||||
msgq_new_queue(&q, "test_queue", 1024); |
|
||||||
msgq_init_publisher(&q); |
|
||||||
|
|
||||||
REQUIRE((*q.write_pointer & 0xFFFFFFFF) == 0); |
|
||||||
REQUIRE((*q.write_pointer >> 32) == 0); |
|
||||||
|
|
||||||
const size_t msg_size = 120; |
|
||||||
msgq_msg_t msg; |
|
||||||
msgq_msg_init_size(&msg, msg_size); |
|
||||||
|
|
||||||
for (int i = 0; i < 8; i++) { |
|
||||||
msgq_msg_send(&msg, &q); |
|
||||||
} |
|
||||||
// Check 8th message was written at the beginning
|
|
||||||
REQUIRE((*q.write_pointer & 0xFFFFFFFF) == msg_size + sizeof(int64_t)); |
|
||||||
|
|
||||||
// Check cycle count
|
|
||||||
REQUIRE((*q.write_pointer >> 32) == 1); |
|
||||||
|
|
||||||
// Check wraparound tag
|
|
||||||
char * tag_location = q.data; |
|
||||||
tag_location += 7 * (msg_size + sizeof(int64_t)); |
|
||||||
REQUIRE(*(int64_t*)tag_location == -1); |
|
||||||
|
|
||||||
msgq_msg_close(&msg); |
|
||||||
} |
|
||||||
|
|
||||||
TEST_CASE("msgq_msg_recv test wraparound"){ |
|
||||||
remove("/dev/shm/test_queue"); |
|
||||||
msgq_queue_t q_pub, q_sub; |
|
||||||
msgq_new_queue(&q_pub, "test_queue", 1024); |
|
||||||
msgq_new_queue(&q_sub, "test_queue", 1024); |
|
||||||
|
|
||||||
msgq_init_publisher(&q_pub); |
|
||||||
msgq_init_subscriber(&q_sub); |
|
||||||
|
|
||||||
REQUIRE((*q_pub.write_pointer >> 32) == 0); |
|
||||||
REQUIRE((*q_sub.read_pointers[0] >> 32) == 0); |
|
||||||
|
|
||||||
const size_t msg_size = 120; |
|
||||||
msgq_msg_t msg1; |
|
||||||
msgq_msg_init_size(&msg1, msg_size); |
|
||||||
|
|
||||||
|
|
||||||
SECTION("Check cycle counter after reset") { |
|
||||||
for (int i = 0; i < 8; i++) { |
|
||||||
msgq_msg_send(&msg1, &q_pub); |
|
||||||
} |
|
||||||
|
|
||||||
msgq_msg_t msg2; |
|
||||||
msgq_msg_recv(&msg2, &q_sub); |
|
||||||
REQUIRE(msg2.size == 0); // Reader had to reset
|
|
||||||
msgq_msg_close(&msg2); |
|
||||||
|
|
||||||
} |
|
||||||
SECTION("Check cycle counter while keeping up with writer") { |
|
||||||
for (int i = 0; i < 8; i++) { |
|
||||||
msgq_msg_send(&msg1, &q_pub); |
|
||||||
|
|
||||||
msgq_msg_t msg2; |
|
||||||
msgq_msg_recv(&msg2, &q_sub); |
|
||||||
REQUIRE(msg2.size > 0); |
|
||||||
msgq_msg_close(&msg2); |
|
||||||
} |
|
||||||
} |
|
||||||
|
|
||||||
REQUIRE((*q_sub.read_pointers[0] >> 32) == 1); |
|
||||||
msgq_msg_close(&msg1); |
|
||||||
} |
|
||||||
|
|
||||||
TEST_CASE("msgq_msg_send test invalidation"){ |
|
||||||
remove("/dev/shm/test_queue"); |
|
||||||
msgq_queue_t q_pub, q_sub; |
|
||||||
msgq_new_queue(&q_pub, "test_queue", 1024); |
|
||||||
msgq_new_queue(&q_sub, "test_queue", 1024); |
|
||||||
|
|
||||||
msgq_init_publisher(&q_pub); |
|
||||||
msgq_init_subscriber(&q_sub); |
|
||||||
*q_sub.write_pointer = (uint64_t)1 << 32; |
|
||||||
|
|
||||||
REQUIRE(*q_sub.read_valids[0] == true); |
|
||||||
|
|
||||||
SECTION("read pointer in tag"){ |
|
||||||
*q_sub.read_pointers[0] = 0; |
|
||||||
} |
|
||||||
SECTION("read pointer in data section"){ |
|
||||||
*q_sub.read_pointers[0] = 64; |
|
||||||
} |
|
||||||
SECTION("read pointer in wraparound section"){ |
|
||||||
*q_pub.write_pointer = ((uint64_t)1 << 32) | 1000; // Writer is one cycle ahead
|
|
||||||
*q_sub.read_pointers[0] = 1020; |
|
||||||
} |
|
||||||
|
|
||||||
msgq_msg_t msg; |
|
||||||
msgq_msg_init_size(&msg, 128); |
|
||||||
msgq_msg_send(&msg, &q_pub); |
|
||||||
|
|
||||||
REQUIRE(*q_sub.read_valids[0] == false); |
|
||||||
|
|
||||||
msgq_msg_close(&msg); |
|
||||||
} |
|
||||||
|
|
||||||
TEST_CASE("msgq_init_subscriber init 2 subscribers"){ |
|
||||||
remove("/dev/shm/test_queue"); |
|
||||||
msgq_queue_t q1, q2; |
|
||||||
msgq_new_queue(&q1, "test_queue", 1024); |
|
||||||
msgq_new_queue(&q2, "test_queue", 1024); |
|
||||||
|
|
||||||
*q1.num_readers = 0; |
|
||||||
|
|
||||||
REQUIRE(*q1.num_readers == 0); |
|
||||||
REQUIRE(*q2.num_readers == 0); |
|
||||||
|
|
||||||
msgq_init_subscriber(&q1); |
|
||||||
REQUIRE(*q1.num_readers == 1); |
|
||||||
REQUIRE(*q2.num_readers == 1); |
|
||||||
REQUIRE(q1.reader_id == 0); |
|
||||||
|
|
||||||
msgq_init_subscriber(&q2); |
|
||||||
REQUIRE(*q1.num_readers == 2); |
|
||||||
REQUIRE(*q2.num_readers == 2); |
|
||||||
REQUIRE(q2.reader_id == 1); |
|
||||||
} |
|
||||||
|
|
||||||
|
|
||||||
TEST_CASE("Write 1 msg, read 1 msg", "[integration]"){ |
|
||||||
remove("/dev/shm/test_queue"); |
|
||||||
const size_t msg_size = 128; |
|
||||||
msgq_queue_t writer, reader; |
|
||||||
|
|
||||||
msgq_new_queue(&writer, "test_queue", 1024); |
|
||||||
msgq_new_queue(&reader, "test_queue", 1024); |
|
||||||
|
|
||||||
msgq_init_publisher(&writer); |
|
||||||
msgq_init_subscriber(&reader); |
|
||||||
|
|
||||||
// Build 128 byte message
|
|
||||||
msgq_msg_t outgoing_msg; |
|
||||||
msgq_msg_init_size(&outgoing_msg, msg_size); |
|
||||||
|
|
||||||
for (size_t i = 0; i < msg_size; i++){ |
|
||||||
outgoing_msg.data[i] = i; |
|
||||||
} |
|
||||||
|
|
||||||
REQUIRE(msgq_msg_send(&outgoing_msg, &writer) == msg_size); |
|
||||||
|
|
||||||
msgq_msg_t incoming_msg1; |
|
||||||
REQUIRE(msgq_msg_recv(&incoming_msg1, &reader) == msg_size); |
|
||||||
REQUIRE(memcmp(incoming_msg1.data, outgoing_msg.data, msg_size) == 0); |
|
||||||
|
|
||||||
// Verify that there are no more messages
|
|
||||||
msgq_msg_t incoming_msg2; |
|
||||||
REQUIRE(msgq_msg_recv(&incoming_msg2, &reader) == 0); |
|
||||||
|
|
||||||
msgq_msg_close(&outgoing_msg); |
|
||||||
msgq_msg_close(&incoming_msg1); |
|
||||||
msgq_msg_close(&incoming_msg2); |
|
||||||
} |
|
||||||
|
|
||||||
TEST_CASE("Write 2 msg, read 2 msg - conflate = false", "[integration]"){ |
|
||||||
remove("/dev/shm/test_queue"); |
|
||||||
const size_t msg_size = 128; |
|
||||||
msgq_queue_t writer, reader; |
|
||||||
|
|
||||||
msgq_new_queue(&writer, "test_queue", 1024); |
|
||||||
msgq_new_queue(&reader, "test_queue", 1024); |
|
||||||
|
|
||||||
msgq_init_publisher(&writer); |
|
||||||
msgq_init_subscriber(&reader); |
|
||||||
|
|
||||||
// Build 128 byte message
|
|
||||||
msgq_msg_t outgoing_msg; |
|
||||||
msgq_msg_init_size(&outgoing_msg, msg_size); |
|
||||||
|
|
||||||
for (size_t i = 0; i < msg_size; i++){ |
|
||||||
outgoing_msg.data[i] = i; |
|
||||||
} |
|
||||||
|
|
||||||
REQUIRE(msgq_msg_send(&outgoing_msg, &writer) == msg_size); |
|
||||||
REQUIRE(msgq_msg_send(&outgoing_msg, &writer) == msg_size); |
|
||||||
|
|
||||||
msgq_msg_t incoming_msg1; |
|
||||||
REQUIRE(msgq_msg_recv(&incoming_msg1, &reader) == msg_size); |
|
||||||
REQUIRE(memcmp(incoming_msg1.data, outgoing_msg.data, msg_size) == 0); |
|
||||||
|
|
||||||
msgq_msg_t incoming_msg2; |
|
||||||
REQUIRE(msgq_msg_recv(&incoming_msg2, &reader) == msg_size); |
|
||||||
REQUIRE(memcmp(incoming_msg2.data, outgoing_msg.data, msg_size) == 0); |
|
||||||
|
|
||||||
msgq_msg_close(&outgoing_msg); |
|
||||||
msgq_msg_close(&incoming_msg1); |
|
||||||
msgq_msg_close(&incoming_msg2); |
|
||||||
} |
|
||||||
|
|
||||||
TEST_CASE("Write 2 msg, read 2 msg - conflate = true", "[integration]"){ |
|
||||||
remove("/dev/shm/test_queue"); |
|
||||||
const size_t msg_size = 128; |
|
||||||
msgq_queue_t writer, reader; |
|
||||||
|
|
||||||
msgq_new_queue(&writer, "test_queue", 1024); |
|
||||||
msgq_new_queue(&reader, "test_queue", 1024); |
|
||||||
|
|
||||||
msgq_init_publisher(&writer); |
|
||||||
msgq_init_subscriber(&reader); |
|
||||||
reader.read_conflate = true; |
|
||||||
|
|
||||||
// Build 128 byte message
|
|
||||||
msgq_msg_t outgoing_msg; |
|
||||||
msgq_msg_init_size(&outgoing_msg, msg_size); |
|
||||||
|
|
||||||
for (size_t i = 0; i < msg_size; i++){ |
|
||||||
outgoing_msg.data[i] = i; |
|
||||||
} |
|
||||||
|
|
||||||
REQUIRE(msgq_msg_send(&outgoing_msg, &writer) == msg_size); |
|
||||||
REQUIRE(msgq_msg_send(&outgoing_msg, &writer) == msg_size); |
|
||||||
|
|
||||||
msgq_msg_t incoming_msg1; |
|
||||||
REQUIRE(msgq_msg_recv(&incoming_msg1, &reader) == msg_size); |
|
||||||
REQUIRE(memcmp(incoming_msg1.data, outgoing_msg.data, msg_size) == 0); |
|
||||||
|
|
||||||
// Verify that there are no more messages
|
|
||||||
msgq_msg_t incoming_msg2; |
|
||||||
REQUIRE(msgq_msg_recv(&incoming_msg2, &reader) == 0); |
|
||||||
|
|
||||||
msgq_msg_close(&outgoing_msg); |
|
||||||
msgq_msg_close(&incoming_msg1); |
|
||||||
msgq_msg_close(&incoming_msg2); |
|
||||||
} |
|
||||||
|
|
||||||
TEST_CASE("1 publisher, 1 slow subscriber", "[integration]"){ |
|
||||||
remove("/dev/shm/test_queue"); |
|
||||||
msgq_queue_t writer, reader; |
|
||||||
|
|
||||||
msgq_new_queue(&writer, "test_queue", 1024); |
|
||||||
msgq_new_queue(&reader, "test_queue", 1024); |
|
||||||
|
|
||||||
msgq_init_publisher(&writer); |
|
||||||
msgq_init_subscriber(&reader); |
|
||||||
|
|
||||||
int n_received = 0; |
|
||||||
int n_skipped = 0; |
|
||||||
|
|
||||||
for (uint64_t i = 0; i < 1e5; i++) { |
|
||||||
msgq_msg_t outgoing_msg; |
|
||||||
msgq_msg_init_data(&outgoing_msg, (char*)&i, sizeof(uint64_t)); |
|
||||||
msgq_msg_send(&outgoing_msg, &writer); |
|
||||||
msgq_msg_close(&outgoing_msg); |
|
||||||
|
|
||||||
if (i % 10 == 0){ |
|
||||||
msgq_msg_t msg1; |
|
||||||
msgq_msg_recv(&msg1, &reader); |
|
||||||
|
|
||||||
if (msg1.size == 0){ |
|
||||||
n_skipped++; |
|
||||||
} else { |
|
||||||
n_received++; |
|
||||||
} |
|
||||||
msgq_msg_close(&msg1); |
|
||||||
} |
|
||||||
} |
|
||||||
|
|
||||||
// TODO: verify these numbers by hand
|
|
||||||
REQUIRE(n_received == 8572); |
|
||||||
REQUIRE(n_skipped == 1428); |
|
||||||
} |
|
||||||
|
|
||||||
TEST_CASE("1 publisher, 2 subscribers", "[integration]"){ |
|
||||||
remove("/dev/shm/test_queue"); |
|
||||||
msgq_queue_t writer, reader1, reader2; |
|
||||||
|
|
||||||
msgq_new_queue(&writer, "test_queue", 1024); |
|
||||||
msgq_new_queue(&reader1, "test_queue", 1024); |
|
||||||
msgq_new_queue(&reader2, "test_queue", 1024); |
|
||||||
|
|
||||||
msgq_init_publisher(&writer); |
|
||||||
msgq_init_subscriber(&reader1); |
|
||||||
msgq_init_subscriber(&reader2); |
|
||||||
|
|
||||||
for (uint64_t i = 0; i < 1024 * 3; i++) { |
|
||||||
msgq_msg_t outgoing_msg; |
|
||||||
msgq_msg_init_data(&outgoing_msg, (char*)&i, sizeof(uint64_t)); |
|
||||||
msgq_msg_send(&outgoing_msg, &writer); |
|
||||||
|
|
||||||
msgq_msg_t msg1, msg2; |
|
||||||
msgq_msg_recv(&msg1, &reader1); |
|
||||||
msgq_msg_recv(&msg2, &reader2); |
|
||||||
|
|
||||||
REQUIRE(msg1.size == sizeof(uint64_t)); |
|
||||||
REQUIRE(msg2.size == sizeof(uint64_t)); |
|
||||||
REQUIRE(*(uint64_t*)msg1.data == i); |
|
||||||
REQUIRE(*(uint64_t*)msg2.data == i); |
|
||||||
|
|
||||||
msgq_msg_close(&outgoing_msg); |
|
||||||
msgq_msg_close(&msg1); |
|
||||||
msgq_msg_close(&msg2); |
|
||||||
} |
|
||||||
} |
|
@ -1,14 +0,0 @@ |
|||||||
from messaging_pyx import Context, SubSocket, PubSocket # pylint: disable=no-name-in-module, import-error |
|
||||||
|
|
||||||
if __name__ == "__main__": |
|
||||||
c = Context() |
|
||||||
pub_sock = PubSocket() |
|
||||||
pub_sock.connect(c, "controlsState") |
|
||||||
|
|
||||||
for i in range(int(1e10)): |
|
||||||
print(i) |
|
||||||
sub_sock = SubSocket() |
|
||||||
sub_sock.connect(c, "controlsState") |
|
||||||
|
|
||||||
pub_sock.send(b'a') |
|
||||||
print(sub_sock.receive()) |
|
@ -1,2 +0,0 @@ |
|||||||
#define CATCH_CONFIG_MAIN |
|
||||||
#include "catch2/catch.hpp" |
|
@ -1,142 +0,0 @@ |
|||||||
import unittest |
|
||||||
import time |
|
||||||
import cereal.messaging as messaging |
|
||||||
|
|
||||||
import concurrent.futures |
|
||||||
|
|
||||||
|
|
||||||
def poller(): |
|
||||||
context = messaging.Context() |
|
||||||
|
|
||||||
p = messaging.Poller() |
|
||||||
|
|
||||||
sub = messaging.SubSocket() |
|
||||||
sub.connect(context, 'controlsState') |
|
||||||
p.registerSocket(sub) |
|
||||||
|
|
||||||
socks = p.poll(10000) |
|
||||||
r = [s.receive(non_blocking=True) for s in socks] |
|
||||||
|
|
||||||
return r |
|
||||||
|
|
||||||
|
|
||||||
class TestPoller(unittest.TestCase): |
|
||||||
def test_poll_once(self): |
|
||||||
context = messaging.Context() |
|
||||||
|
|
||||||
pub = messaging.PubSocket() |
|
||||||
pub.connect(context, 'controlsState') |
|
||||||
|
|
||||||
with concurrent.futures.ThreadPoolExecutor() as e: |
|
||||||
poll = e.submit(poller) |
|
||||||
|
|
||||||
time.sleep(0.1) # Slow joiner syndrome |
|
||||||
|
|
||||||
# Send message |
|
||||||
pub.send("a") |
|
||||||
|
|
||||||
# Wait for poll result |
|
||||||
result = poll.result() |
|
||||||
|
|
||||||
del pub |
|
||||||
context.term() |
|
||||||
|
|
||||||
self.assertEqual(result, [b"a"]) |
|
||||||
|
|
||||||
def test_poll_and_create_many_subscribers(self): |
|
||||||
context = messaging.Context() |
|
||||||
|
|
||||||
pub = messaging.PubSocket() |
|
||||||
pub.connect(context, 'controlsState') |
|
||||||
|
|
||||||
with concurrent.futures.ThreadPoolExecutor() as e: |
|
||||||
poll = e.submit(poller) |
|
||||||
|
|
||||||
time.sleep(0.1) # Slow joiner syndrome |
|
||||||
c = messaging.Context() |
|
||||||
for _ in range(10): |
|
||||||
messaging.SubSocket().connect(c, 'controlsState') |
|
||||||
|
|
||||||
time.sleep(0.1) |
|
||||||
|
|
||||||
# Send message |
|
||||||
pub.send("a") |
|
||||||
|
|
||||||
# Wait for poll result |
|
||||||
result = poll.result() |
|
||||||
|
|
||||||
del pub |
|
||||||
context.term() |
|
||||||
|
|
||||||
self.assertEqual(result, [b"a"]) |
|
||||||
|
|
||||||
def test_multiple_publishers_exception(self): |
|
||||||
context = messaging.Context() |
|
||||||
|
|
||||||
with self.assertRaises(messaging.MultiplePublishersError): |
|
||||||
pub1 = messaging.PubSocket() |
|
||||||
pub1.connect(context, 'controlsState') |
|
||||||
|
|
||||||
pub2 = messaging.PubSocket() |
|
||||||
pub2.connect(context, 'controlsState') |
|
||||||
|
|
||||||
pub1.send("a") |
|
||||||
|
|
||||||
del pub1 |
|
||||||
del pub2 |
|
||||||
context.term() |
|
||||||
|
|
||||||
def test_multiple_messages(self): |
|
||||||
context = messaging.Context() |
|
||||||
|
|
||||||
pub = messaging.PubSocket() |
|
||||||
pub.connect(context, 'controlsState') |
|
||||||
|
|
||||||
sub = messaging.SubSocket() |
|
||||||
sub.connect(context, 'controlsState') |
|
||||||
|
|
||||||
time.sleep(0.1) # Slow joiner |
|
||||||
|
|
||||||
for i in range(100): |
|
||||||
pub.send(str(i)) |
|
||||||
|
|
||||||
msg_seen = False |
|
||||||
i = 0 |
|
||||||
while True: |
|
||||||
r = sub.receive(non_blocking=True) |
|
||||||
|
|
||||||
if r is not None: |
|
||||||
self.assertEqual(str(i), r.decode('utf8')) |
|
||||||
|
|
||||||
msg_seen = True |
|
||||||
i += 1 |
|
||||||
|
|
||||||
if r is None and msg_seen: # ZMQ sometimes receives nothing on the first receive |
|
||||||
break |
|
||||||
|
|
||||||
del pub |
|
||||||
del sub |
|
||||||
context.term() |
|
||||||
|
|
||||||
def test_conflate(self): |
|
||||||
context = messaging.Context() |
|
||||||
|
|
||||||
pub = messaging.PubSocket() |
|
||||||
pub.connect(context, 'controlsState') |
|
||||||
|
|
||||||
sub = messaging.SubSocket() |
|
||||||
sub.connect(context, 'controlsState', conflate=True) |
|
||||||
|
|
||||||
time.sleep(0.1) # Slow joiner |
|
||||||
pub.send('a') |
|
||||||
pub.send('b') |
|
||||||
|
|
||||||
self.assertEqual(b'b', sub.receive()) |
|
||||||
|
|
||||||
del pub |
|
||||||
del sub |
|
||||||
context.term() |
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__": |
|
||||||
unittest.main() |
|
@ -1,163 +0,0 @@ |
|||||||
# TODO: these port numbers are hardcoded in c, fix this |
|
||||||
|
|
||||||
# LogRotate: 8001 is a PUSH PULL socket between loggerd and visiond |
|
||||||
|
|
||||||
# all ZMQ pub sub: port, should_log, frequency, (qlog_decimation) |
|
||||||
|
|
||||||
# frame syncing packet |
|
||||||
frame: [8002, true, 20., 1] |
|
||||||
# accel, gyro, and compass |
|
||||||
sensorEvents: [8003, true, 100., 100] |
|
||||||
# GPS data, also global timestamp |
|
||||||
gpsNMEA: [8004, true, 9.] # 9 msgs each sec |
|
||||||
# CPU+MEM+GPU+BAT temps |
|
||||||
thermal: [8005, true, 2., 1] |
|
||||||
# List(CanData), list of can messages |
|
||||||
can: [8006, true, 100.] |
|
||||||
controlsState: [8007, true, 100., 100] |
|
||||||
#liveEvent: [8008, true, 0.] |
|
||||||
model: [8009, true, 20., 5] |
|
||||||
features: [8010, true, 0.] |
|
||||||
health: [8011, true, 2., 1] |
|
||||||
radarState: [8012, true, 20.] |
|
||||||
#liveUI: [8014, true, 0.] |
|
||||||
encodeIdx: [8015, true, 20.] |
|
||||||
liveTracks: [8016, true, 20.] |
|
||||||
sendcan: [8017, true, 100.] |
|
||||||
logMessage: [8018, true, 0.] |
|
||||||
liveCalibration: [8019, true, 4., 4] |
|
||||||
androidLog: [8020, true, 0.] |
|
||||||
carState: [8021, true, 100., 10] |
|
||||||
# 8022 is reserved for sshd |
|
||||||
carControl: [8023, true, 100., 10] |
|
||||||
plan: [8024, true, 20.] |
|
||||||
liveLocation: [8025, true, 0.] |
|
||||||
gpsLocation: [8026, true, 1., 1] |
|
||||||
ethernetData: [8027, true, 0.] |
|
||||||
navUpdate: [8028, true, 0.] |
|
||||||
qcomGnss: [8029, true, 0.] |
|
||||||
lidarPts: [8030, true, 0.] |
|
||||||
procLog: [8031, true, 0.5] |
|
||||||
gpsLocationExternal: [8032, true, 10., 1] |
|
||||||
ubloxGnss: [8033, true, 10.] |
|
||||||
clocks: [8034, true, 1., 1] |
|
||||||
liveMpc: [8035, false, 20.] |
|
||||||
liveLongitudinalMpc: [8036, false, 20.] |
|
||||||
navStatus: [8038, true, 0.] |
|
||||||
gpsLocationTrimble: [8039, true, 0.] |
|
||||||
trimbleGnss: [8041, true, 0.] |
|
||||||
ubloxRaw: [8042, true, 20.] |
|
||||||
gpsPlannerPoints: [8043, true, 0.] |
|
||||||
gpsPlannerPlan: [8044, true, 0.] |
|
||||||
applanixRaw: [8046, true, 0.] |
|
||||||
orbLocation: [8047, true, 0.] |
|
||||||
trafficEvents: [8048, true, 0.] |
|
||||||
liveLocationTiming: [8049, true, 0.] |
|
||||||
orbslamCorrection: [8050, true, 0.] |
|
||||||
liveLocationCorrected: [8051, true, 0.] |
|
||||||
orbObservation: [8052, true, 0.] |
|
||||||
applanixLocation: [8053, true, 0.] |
|
||||||
liveLocationKalman: [8054, true, 0.] |
|
||||||
uiNavigationEvent: [8055, true, 0.] |
|
||||||
orbOdometry: [8057, true, 0.] |
|
||||||
orbFeatures: [8058, false, 0.] |
|
||||||
orbKeyFrame: [8059, true, 0.] |
|
||||||
uiLayoutState: [8060, true, 0.] |
|
||||||
frontEncodeIdx: [8061, true, 5.] |
|
||||||
orbFeaturesSummary: [8062, true, 0.] |
|
||||||
driverMonitoring: [8063, true, 5., 1] |
|
||||||
liveParameters: [8064, true, 10.] |
|
||||||
liveMapData: [8065, true, 0.] |
|
||||||
cameraOdometry: [8066, true, 20.] |
|
||||||
pathPlan: [8067, true, 20.] |
|
||||||
kalmanOdometry: [8068, true, 0.] |
|
||||||
thumbnail: [8069, true, 0.2, 1] |
|
||||||
carEvents: [8070, true, 1., 1] |
|
||||||
carParams: [8071, true, 0.02, 1] |
|
||||||
frontFrame: [8072, true, 10.] |
|
||||||
|
|
||||||
testModel: [8040, false, 0.] |
|
||||||
testLiveLocation: [8045, false, 0.] |
|
||||||
testJoystick: [8056, false, 0.] |
|
||||||
|
|
||||||
# 8080 is reserved for slave testing daemon |
|
||||||
# 8762 is reserved for logserver |
|
||||||
|
|
||||||
# manager -- base process to manage starting and stopping of all others |
|
||||||
# subscribes: thermal |
|
||||||
|
|
||||||
# **** processes that communicate with the outside world **** |
|
||||||
|
|
||||||
# thermald -- decides when to start and stop onroad |
|
||||||
# subscribes: health, location |
|
||||||
# publishes: thermal |
|
||||||
|
|
||||||
# boardd -- communicates with the car |
|
||||||
# subscribes: sendcan |
|
||||||
# publishes: can, health, ubloxRaw |
|
||||||
|
|
||||||
# sensord -- publishes IMU and Magnetometer |
|
||||||
# publishes: sensorEvents |
|
||||||
|
|
||||||
# gpsd -- publishes EON's gps |
|
||||||
# publishes: gpsNMEA |
|
||||||
|
|
||||||
# visiond -- talks to the cameras, runs the model, saves the videos |
|
||||||
# publishes: frame, model, driverMonitoring, cameraOdometry, thumbnail |
|
||||||
|
|
||||||
# **** stateful data transformers **** |
|
||||||
|
|
||||||
# plannerd -- decides where to drive the car |
|
||||||
# subscribes: carState, model, radarState, controlsState, liveParameters |
|
||||||
# publishes: plan, pathPlan, liveMpc, liveLongitudinalMpc |
|
||||||
|
|
||||||
# controlsd -- drives the car by sending CAN messages to panda |
|
||||||
# subscribes: can, thermal, health, plan, pathPlan, driverMonitoring, liveCalibration |
|
||||||
# publishes: carState, carControl, sendcan, controlsState, carEvents, carParams |
|
||||||
|
|
||||||
# radard -- processes the radar and vision data |
|
||||||
# subscribes: can, controlsState, model, liveParameters |
|
||||||
# publishes: radarState, liveTracks |
|
||||||
|
|
||||||
# params_learner -- learns vehicle params by observing the vehicle dynamics |
|
||||||
# subscribes: controlsState, sensorEvents, cameraOdometry |
|
||||||
# publishes: liveParameters |
|
||||||
|
|
||||||
# calibrationd -- reads posenet and applies a temporal filter on the frame region to look at |
|
||||||
# subscribes: cameraOdometry |
|
||||||
# publishes: liveCalibration |
|
||||||
|
|
||||||
# ubloxd -- read raw ublox data and converts them in readable format |
|
||||||
# subscribes: ubloxRaw |
|
||||||
# publishes: ubloxGnss |
|
||||||
|
|
||||||
# **** LOGGING SERVICE **** |
|
||||||
|
|
||||||
# loggerd |
|
||||||
# subscribes: EVERYTHING |
|
||||||
|
|
||||||
# **** NON VITAL SERVICES **** |
|
||||||
|
|
||||||
# ui |
|
||||||
# subscribes: thermal, model, controlsState, uiLayout, liveCalibration, radarState, liveMpc, plusFrame, liveMapData |
|
||||||
|
|
||||||
# uploader |
|
||||||
# communicates through file system with loggerd |
|
||||||
|
|
||||||
# deleter |
|
||||||
# communicates through file system with loggerd and uploader |
|
||||||
|
|
||||||
# logmessaged -- central logging service, can log to cloud |
|
||||||
# publishes: logMessage |
|
||||||
|
|
||||||
# logcatd -- fetches logcat info from android |
|
||||||
# publishes: androidLog |
|
||||||
|
|
||||||
# proclogd -- fetches process information |
|
||||||
# publishes: procLog |
|
||||||
|
|
||||||
# tombstoned -- reports native crashes |
|
||||||
|
|
||||||
# athenad -- on request, open a sub socket and return the value |
|
||||||
|
|
||||||
# updated -- waits for network access and tries to update every hour |
|
@ -1,33 +0,0 @@ |
|||||||
#!/usr/bin/env python3 |
|
||||||
import os |
|
||||||
import yaml |
|
||||||
|
|
||||||
class Service(): |
|
||||||
def __init__(self, port, should_log, frequency, decimation=None): |
|
||||||
self.port = port |
|
||||||
self.should_log = should_log |
|
||||||
self.frequency = frequency |
|
||||||
self.decimation = decimation |
|
||||||
|
|
||||||
service_list_path = os.path.join(os.path.dirname(__file__), "service_list.yaml") |
|
||||||
|
|
||||||
service_list = {} |
|
||||||
with open(service_list_path, "r") as f: |
|
||||||
for k, v in yaml.safe_load(f).items(): |
|
||||||
decimation = None |
|
||||||
if len(v) == 4: |
|
||||||
decimation = v[3] |
|
||||||
|
|
||||||
service_list[k] = Service(v[0], v[1], v[2], decimation) |
|
||||||
|
|
||||||
if __name__ == "__main__": |
|
||||||
print("/* THIS IS AN AUTOGENERATED FILE, PLEASE EDIT service_list.yaml */") |
|
||||||
print("#ifndef __SERVICES_H") |
|
||||||
print("#define __SERVICES_H") |
|
||||||
print("struct service { int port; bool should_log; int frequency; int decimation; char name[0x100]; };") |
|
||||||
print("static struct service services[] = {") |
|
||||||
for k, v in service_list.items(): |
|
||||||
print(' { .name = "%s", .port = %d, .should_log = %s, .frequency = %d, .decimation = %d },' % (k, v.port, "true" if v.should_log else "false", v.frequency, -1 if v.decimation is None else v.decimation)) |
|
||||||
print("};") |
|
||||||
print("#endif") |
|
||||||
|
|
@ -1,12 +0,0 @@ |
|||||||
#!/bin/bash |
|
||||||
|
|
||||||
# Only pyflakes checks (--select=F) |
|
||||||
flake8 --select=F $(find . -iname "*.py" | grep -vi "^\./pyextra.*" | grep -vi "^\./panda") |
|
||||||
RESULT=$? |
|
||||||
if [ $RESULT -eq 0 ]; then |
|
||||||
pylint $(find . -iname "*.py" | grep -vi "^\./pyextra.*" | grep -vi "^\./panda") |
|
||||||
RESULT=$? & 3 |
|
||||||
fi |
|
||||||
|
|
||||||
[ $RESULT -ne 0 ] && exit 1 |
|
||||||
exit 0 |
|
@ -1 +0,0 @@ |
|||||||
*.cpp |
|
@ -1,6 +0,0 @@ |
|||||||
Import('env') |
|
||||||
|
|
||||||
# parser |
|
||||||
env.Command(['common_pyx.so'], |
|
||||||
['common_pyx_setup.py', 'clock.pyx'], |
|
||||||
"cd common && python3 common_pyx_setup.py build_ext --inplace") |
|
@ -1,91 +0,0 @@ |
|||||||
import os |
|
||||||
import binascii |
|
||||||
import itertools |
|
||||||
import re |
|
||||||
import struct |
|
||||||
import subprocess |
|
||||||
import random |
|
||||||
|
|
||||||
ANDROID = os.path.isfile('/EON') |
|
||||||
|
|
||||||
def getprop(key): |
|
||||||
if not ANDROID: |
|
||||||
return "" |
|
||||||
return subprocess.check_output(["getprop", key], encoding='utf8').strip() |
|
||||||
|
|
||||||
def get_imei(slot): |
|
||||||
slot = str(slot) |
|
||||||
if slot not in ("0", "1"): |
|
||||||
raise ValueError("SIM slot must be 0 or 1") |
|
||||||
|
|
||||||
ret = parse_service_call_string(service_call(["iphonesubinfo", "3" ,"i32", str(slot)])) |
|
||||||
if not ret: |
|
||||||
# allow non android to be identified differently |
|
||||||
ret = "%015d" % random.randint(0, 1<<32) |
|
||||||
return ret |
|
||||||
|
|
||||||
def get_serial(): |
|
||||||
ret = getprop("ro.serialno") |
|
||||||
if ret == "": |
|
||||||
ret = "cccccccc" |
|
||||||
return ret |
|
||||||
|
|
||||||
def get_subscriber_info(): |
|
||||||
ret = parse_service_call_string(service_call(["iphonesubinfo", "7"])) |
|
||||||
if ret is None or len(ret) < 8: |
|
||||||
return "" |
|
||||||
return ret |
|
||||||
|
|
||||||
def reboot(reason=None): |
|
||||||
if reason is None: |
|
||||||
reason_args = ["null"] |
|
||||||
else: |
|
||||||
reason_args = ["s16", reason] |
|
||||||
|
|
||||||
subprocess.check_output([ |
|
||||||
"service", "call", "power", "16", # IPowerManager.reboot |
|
||||||
"i32", "0", # no confirmation, |
|
||||||
*reason_args, |
|
||||||
"i32", "1" # wait |
|
||||||
]) |
|
||||||
|
|
||||||
def service_call(call): |
|
||||||
if not ANDROID: |
|
||||||
return None |
|
||||||
|
|
||||||
ret = subprocess.check_output(["service", "call", *call], encoding='utf8').strip() |
|
||||||
if 'Parcel' not in ret: |
|
||||||
return None |
|
||||||
|
|
||||||
return parse_service_call_bytes(ret) |
|
||||||
|
|
||||||
def parse_service_call_unpack(r, fmt): |
|
||||||
try: |
|
||||||
return struct.unpack(fmt, r)[0] |
|
||||||
except Exception: |
|
||||||
return None |
|
||||||
|
|
||||||
def parse_service_call_string(r): |
|
||||||
try: |
|
||||||
r = r[8:] # Cut off length field |
|
||||||
r = r.decode('utf_16_be') |
|
||||||
|
|
||||||
# All pairs of two characters seem to be swapped. Not sure why |
|
||||||
result = "" |
|
||||||
for a, b, in itertools.zip_longest(r[::2], r[1::2], fillvalue='\x00'): |
|
||||||
result += b + a |
|
||||||
|
|
||||||
result = result.replace('\x00', '') |
|
||||||
|
|
||||||
return result |
|
||||||
except Exception: |
|
||||||
return None |
|
||||||
|
|
||||||
def parse_service_call_bytes(ret): |
|
||||||
try: |
|
||||||
r = b"" |
|
||||||
for hex_part in re.findall(r'[ (]([0-9a-f]{8})', ret): |
|
||||||
r += binascii.unhexlify(hex_part) |
|
||||||
return r |
|
||||||
except Exception: |
|
||||||
return None |
|
@ -1,42 +0,0 @@ |
|||||||
import jwt |
|
||||||
import requests |
|
||||||
from datetime import datetime, timedelta |
|
||||||
|
|
||||||
from selfdrive.version import version |
|
||||||
|
|
||||||
class Api(): |
|
||||||
def __init__(self, dongle_id): |
|
||||||
self.dongle_id = dongle_id |
|
||||||
with open('/persist/comma/id_rsa') as f: |
|
||||||
self.private_key = f.read() |
|
||||||
|
|
||||||
def get(self, *args, **kwargs): |
|
||||||
return self.request('GET', *args, **kwargs) |
|
||||||
|
|
||||||
def post(self, *args, **kwargs): |
|
||||||
return self.request('POST', *args, **kwargs) |
|
||||||
|
|
||||||
def request(self, method, endpoint, timeout=None, access_token=None, **params): |
|
||||||
return api_get(endpoint, method=method, timeout=timeout, access_token=access_token, **params) |
|
||||||
|
|
||||||
def get_token(self): |
|
||||||
now = datetime.utcnow() |
|
||||||
payload = { |
|
||||||
'identity': self.dongle_id, |
|
||||||
'nbf': now, |
|
||||||
'iat': now, |
|
||||||
'exp': now + timedelta(hours=1) |
|
||||||
} |
|
||||||
return jwt.encode(payload, self.private_key, algorithm='RS256').decode('utf8') |
|
||||||
|
|
||||||
def api_get(endpoint, method='GET', timeout=None, access_token=None, **params): |
|
||||||
backend = "https://api.commadotai.com/" |
|
||||||
|
|
||||||
headers = {} |
|
||||||
if access_token is not None: |
|
||||||
headers['Authorization'] = "JWT "+access_token |
|
||||||
|
|
||||||
headers['User-Agent'] = "openpilot-" + version |
|
||||||
|
|
||||||
return requests.request(method, backend+endpoint, timeout=timeout, headers = headers, params=params) |
|
||||||
|
|
@ -1,99 +0,0 @@ |
|||||||
import os |
|
||||||
import subprocess |
|
||||||
import glob |
|
||||||
import hashlib |
|
||||||
import shutil |
|
||||||
from common.basedir import BASEDIR |
|
||||||
from selfdrive.swaglog import cloudlog |
|
||||||
|
|
||||||
android_packages = ("ai.comma.plus.offroad", "ai.comma.plus.frame") |
|
||||||
|
|
||||||
def get_installed_apks(): |
|
||||||
dat = subprocess.check_output(["pm", "list", "packages", "-f"], encoding='utf8').strip().split("\n") |
|
||||||
ret = {} |
|
||||||
for x in dat: |
|
||||||
if x.startswith("package:"): |
|
||||||
v,k = x.split("package:")[1].split("=") |
|
||||||
ret[k] = v |
|
||||||
return ret |
|
||||||
|
|
||||||
def install_apk(path): |
|
||||||
# can only install from world readable path |
|
||||||
install_path = "/sdcard/%s" % os.path.basename(path) |
|
||||||
shutil.copyfile(path, install_path) |
|
||||||
|
|
||||||
ret = subprocess.call(["pm", "install", "-r", install_path]) |
|
||||||
os.remove(install_path) |
|
||||||
return ret == 0 |
|
||||||
|
|
||||||
def start_frame(): |
|
||||||
set_package_permissions() |
|
||||||
system("am start -n ai.comma.plus.frame/.MainActivity") |
|
||||||
|
|
||||||
def set_package_permissions(): |
|
||||||
pm_grant("ai.comma.plus.offroad", "android.permission.ACCESS_FINE_LOCATION") |
|
||||||
pm_grant("ai.comma.plus.offroad", "android.permission.READ_PHONE_STATE") |
|
||||||
appops_set("ai.comma.plus.offroad", "SU", "allow") |
|
||||||
appops_set("ai.comma.plus.offroad", "WIFI_SCAN", "allow") |
|
||||||
appops_set("ai.comma.plus.offroad", "READ_EXTERNAL_STORAGE", "allow") |
|
||||||
appops_set("ai.comma.plus.offroad", "WRITE_EXTERNAL_STORAGE", "allow") |
|
||||||
|
|
||||||
def appops_set(package, op, mode): |
|
||||||
system(f"LD_LIBRARY_PATH= appops set {package} {op} {mode}") |
|
||||||
|
|
||||||
def pm_grant(package, permission): |
|
||||||
system(f"pm grant {package} {permission}") |
|
||||||
|
|
||||||
def system(cmd): |
|
||||||
try: |
|
||||||
cloudlog.info("running %s" % cmd) |
|
||||||
subprocess.check_output(cmd, stderr=subprocess.STDOUT, shell=True) |
|
||||||
except subprocess.CalledProcessError as e: |
|
||||||
cloudlog.event("running failed", |
|
||||||
cmd=e.cmd, |
|
||||||
output=e.output[-1024:], |
|
||||||
returncode=e.returncode) |
|
||||||
|
|
||||||
# *** external functions *** |
|
||||||
|
|
||||||
def update_apks(): |
|
||||||
# install apks |
|
||||||
installed = get_installed_apks() |
|
||||||
|
|
||||||
install_apks = glob.glob(os.path.join(BASEDIR, "apk/*.apk")) |
|
||||||
for apk in install_apks: |
|
||||||
app = os.path.basename(apk)[:-4] |
|
||||||
if app not in installed: |
|
||||||
installed[app] = None |
|
||||||
|
|
||||||
cloudlog.info("installed apks %s" % (str(installed), )) |
|
||||||
|
|
||||||
for app in installed.keys(): |
|
||||||
apk_path = os.path.join(BASEDIR, "apk/"+app+".apk") |
|
||||||
if not os.path.exists(apk_path): |
|
||||||
continue |
|
||||||
|
|
||||||
h1 = hashlib.sha1(open(apk_path, 'rb').read()).hexdigest() |
|
||||||
h2 = None |
|
||||||
if installed[app] is not None: |
|
||||||
h2 = hashlib.sha1(open(installed[app], 'rb').read()).hexdigest() |
|
||||||
cloudlog.info("comparing version of %s %s vs %s" % (app, h1, h2)) |
|
||||||
|
|
||||||
if h2 is None or h1 != h2: |
|
||||||
cloudlog.info("installing %s" % app) |
|
||||||
|
|
||||||
success = install_apk(apk_path) |
|
||||||
if not success: |
|
||||||
cloudlog.info("needing to uninstall %s" % app) |
|
||||||
system("pm uninstall %s" % app) |
|
||||||
success = install_apk(apk_path) |
|
||||||
|
|
||||||
assert success |
|
||||||
|
|
||||||
def pm_apply_packages(cmd): |
|
||||||
for p in android_packages: |
|
||||||
system("pm %s %s" % (cmd, p)) |
|
||||||
|
|
||||||
if __name__ == "__main__": |
|
||||||
update_apks() |
|
||||||
|
|
@ -1,4 +0,0 @@ |
|||||||
import os |
|
||||||
BASEDIR = os.path.abspath(os.path.join(os.path.dirname(os.path.realpath(__file__)), "../")) |
|
||||||
|
|
||||||
|
|
@ -1,16 +0,0 @@ |
|||||||
from posix.time cimport clock_gettime, timespec, CLOCK_BOOTTIME, CLOCK_MONOTONIC_RAW |
|
||||||
|
|
||||||
cdef double readclock(int clock_id): |
|
||||||
cdef timespec ts |
|
||||||
cdef double current |
|
||||||
|
|
||||||
clock_gettime(clock_id, &ts) |
|
||||||
current = ts.tv_sec + (ts.tv_nsec / 1000000000.) |
|
||||||
return current |
|
||||||
|
|
||||||
|
|
||||||
def monotonic_time(): |
|
||||||
return readclock(CLOCK_MONOTONIC_RAW) |
|
||||||
|
|
||||||
def sec_since_boot(): |
|
||||||
return readclock(CLOCK_BOOTTIME) |
|
@ -1,20 +0,0 @@ |
|||||||
from distutils.core import Extension, setup # pylint: disable=import-error,no-name-in-module |
|
||||||
from Cython.Build import cythonize |
|
||||||
|
|
||||||
from common.cython_hacks import BuildExtWithoutPlatformSuffix |
|
||||||
|
|
||||||
sourcefiles = ['clock.pyx'] |
|
||||||
extra_compile_args = ["-std=c++11"] |
|
||||||
|
|
||||||
setup(name='Common', |
|
||||||
cmdclass={'build_ext': BuildExtWithoutPlatformSuffix}, |
|
||||||
ext_modules=cythonize( |
|
||||||
Extension( |
|
||||||
"common_pyx", |
|
||||||
language="c++", |
|
||||||
sources=sourcefiles, |
|
||||||
extra_compile_args=extra_compile_args, |
|
||||||
) |
|
||||||
), |
|
||||||
nthreads=4, |
|
||||||
) |
|
@ -1,23 +0,0 @@ |
|||||||
import os |
|
||||||
import sysconfig |
|
||||||
from Cython.Distutils import build_ext |
|
||||||
|
|
||||||
def get_ext_filename_without_platform_suffix(filename): |
|
||||||
name, ext = os.path.splitext(filename) |
|
||||||
ext_suffix = sysconfig.get_config_var('EXT_SUFFIX') |
|
||||||
|
|
||||||
if ext_suffix == ext: |
|
||||||
return filename |
|
||||||
|
|
||||||
ext_suffix = ext_suffix.replace(ext, '') |
|
||||||
idx = name.find(ext_suffix) |
|
||||||
|
|
||||||
if idx == -1: |
|
||||||
return filename |
|
||||||
else: |
|
||||||
return name[:idx] + ext |
|
||||||
|
|
||||||
class BuildExtWithoutPlatformSuffix(build_ext): |
|
||||||
def get_ext_filename(self, ext_name): |
|
||||||
filename = super().get_ext_filename(ext_name) |
|
||||||
return get_ext_filename_without_platform_suffix(filename) |
|
@ -1,49 +0,0 @@ |
|||||||
import os |
|
||||||
import sys |
|
||||||
import fcntl |
|
||||||
import hashlib |
|
||||||
from cffi import FFI |
|
||||||
|
|
||||||
|
|
||||||
def ffi_wrap(name, c_code, c_header, tmpdir="/tmp/ccache", cflags="", libraries=None): |
|
||||||
if libraries is None: |
|
||||||
libraries = [] |
|
||||||
|
|
||||||
cache = name + "_" + hashlib.sha1(c_code.encode('utf-8')).hexdigest() |
|
||||||
try: |
|
||||||
os.mkdir(tmpdir) |
|
||||||
except OSError: |
|
||||||
pass |
|
||||||
|
|
||||||
fd = os.open(tmpdir, 0) |
|
||||||
fcntl.flock(fd, fcntl.LOCK_EX) |
|
||||||
try: |
|
||||||
sys.path.append(tmpdir) |
|
||||||
try: |
|
||||||
mod = __import__(cache) |
|
||||||
except Exception: |
|
||||||
print("cache miss {0}".format(cache)) |
|
||||||
compile_code(cache, c_code, c_header, tmpdir, cflags, libraries) |
|
||||||
mod = __import__(cache) |
|
||||||
finally: |
|
||||||
os.close(fd) |
|
||||||
|
|
||||||
return mod.ffi, mod.lib |
|
||||||
|
|
||||||
|
|
||||||
def compile_code(name, c_code, c_header, directory, cflags="", libraries=None): |
|
||||||
if libraries is None: |
|
||||||
libraries = [] |
|
||||||
|
|
||||||
ffibuilder = FFI() |
|
||||||
ffibuilder.set_source(name, c_code, source_extension='.cpp', libraries=libraries) |
|
||||||
ffibuilder.cdef(c_header) |
|
||||||
os.environ['OPT'] = "-fwrapv -O2 -DNDEBUG -std=c++11" |
|
||||||
os.environ['CFLAGS'] = cflags |
|
||||||
ffibuilder.compile(verbose=True, debug=False, tmpdir=directory) |
|
||||||
|
|
||||||
|
|
||||||
def wrap_compiled(name, directory): |
|
||||||
sys.path.append(directory) |
|
||||||
mod = __import__(name) |
|
||||||
return mod.ffi, mod.lib |
|
@ -1,109 +0,0 @@ |
|||||||
import os |
|
||||||
import shutil |
|
||||||
import tempfile |
|
||||||
from atomicwrites import AtomicWriter |
|
||||||
|
|
||||||
def mkdirs_exists_ok(path): |
|
||||||
try: |
|
||||||
os.makedirs(path) |
|
||||||
except OSError: |
|
||||||
if not os.path.isdir(path): |
|
||||||
raise |
|
||||||
|
|
||||||
def rm_not_exists_ok(path): |
|
||||||
try: |
|
||||||
os.remove(path) |
|
||||||
except OSError: |
|
||||||
if os.path.exists(path): |
|
||||||
raise |
|
||||||
|
|
||||||
def rm_tree_or_link(path): |
|
||||||
if os.path.islink(path): |
|
||||||
os.unlink(path) |
|
||||||
elif os.path.isdir(path): |
|
||||||
shutil.rmtree(path) |
|
||||||
|
|
||||||
def get_tmpdir_on_same_filesystem(path): |
|
||||||
normpath = os.path.normpath(path) |
|
||||||
parts = normpath.split("/") |
|
||||||
if len(parts) > 1 and parts[1] == "scratch": |
|
||||||
return "/scratch/tmp" |
|
||||||
elif len(parts) > 2 and parts[2] == "runner": |
|
||||||
return "/{}/runner/tmp".format(parts[1]) |
|
||||||
return "/tmp" |
|
||||||
|
|
||||||
class AutoMoveTempdir(): |
|
||||||
def __init__(self, target_path, temp_dir=None): |
|
||||||
self._target_path = target_path |
|
||||||
self._path = tempfile.mkdtemp(dir=temp_dir) |
|
||||||
|
|
||||||
@property |
|
||||||
def name(self): |
|
||||||
return self._path |
|
||||||
|
|
||||||
def close(self): |
|
||||||
os.rename(self._path, self._target_path) |
|
||||||
|
|
||||||
def __enter__(self): return self |
|
||||||
|
|
||||||
def __exit__(self, type, value, traceback): |
|
||||||
if type is None: |
|
||||||
self.close() |
|
||||||
else: |
|
||||||
shutil.rmtree(self._path) |
|
||||||
|
|
||||||
class NamedTemporaryDir(): |
|
||||||
def __init__(self, temp_dir=None): |
|
||||||
self._path = tempfile.mkdtemp(dir=temp_dir) |
|
||||||
|
|
||||||
@property |
|
||||||
def name(self): |
|
||||||
return self._path |
|
||||||
|
|
||||||
def close(self): |
|
||||||
shutil.rmtree(self._path) |
|
||||||
|
|
||||||
def __enter__(self): return self |
|
||||||
|
|
||||||
def __exit__(self, type, value, traceback): |
|
||||||
self.close() |
|
||||||
|
|
||||||
def _get_fileobject_func(writer, temp_dir): |
|
||||||
def _get_fileobject(): |
|
||||||
file_obj = writer.get_fileobject(dir=temp_dir) |
|
||||||
os.chmod(file_obj.name, 0o644) |
|
||||||
return file_obj |
|
||||||
return _get_fileobject |
|
||||||
|
|
||||||
def atomic_write_on_fs_tmp(path, **kwargs): |
|
||||||
"""Creates an atomic writer using a temporary file in a temporary directory |
|
||||||
on the same filesystem as path. |
|
||||||
""" |
|
||||||
# TODO(mgraczyk): This use of AtomicWriter relies on implementation details to set the temp |
|
||||||
# directory. |
|
||||||
writer = AtomicWriter(path, **kwargs) |
|
||||||
return writer._open(_get_fileobject_func(writer, get_tmpdir_on_same_filesystem(path))) |
|
||||||
|
|
||||||
|
|
||||||
def atomic_write_in_dir(path, **kwargs): |
|
||||||
"""Creates an atomic writer using a temporary file in the same directory |
|
||||||
as the destination file. |
|
||||||
""" |
|
||||||
writer = AtomicWriter(path, **kwargs) |
|
||||||
return writer._open(_get_fileobject_func(writer, os.path.dirname(path))) |
|
||||||
|
|
||||||
def atomic_write_in_dir_neos(path, contents, mode=None): |
|
||||||
""" |
|
||||||
Atomically writes contents to path using a temporary file in the same directory |
|
||||||
as path. Useful on NEOS, where `os.link` (required by atomic_write_in_dir) is missing. |
|
||||||
""" |
|
||||||
|
|
||||||
f = tempfile.NamedTemporaryFile(delete=False, prefix=".tmp", dir=os.path.dirname(path)) |
|
||||||
f.write(contents) |
|
||||||
f.flush() |
|
||||||
if mode is not None: |
|
||||||
os.fchmod(f.fileno(), mode) |
|
||||||
os.fsync(f.fileno()) |
|
||||||
f.close() |
|
||||||
|
|
||||||
os.rename(f.name, path) |
|
@ -1,10 +0,0 @@ |
|||||||
class FirstOrderFilter(): |
|
||||||
# first order filter |
|
||||||
def __init__(self, x0, ts, dt): |
|
||||||
self.k = (dt / ts) / (1. + dt / ts) |
|
||||||
self.x = x0 |
|
||||||
|
|
||||||
def update(self, x): |
|
||||||
self.x = (1. - self.k) * self.x + self.k * x |
|
||||||
|
|
||||||
|
|
@ -1 +0,0 @@ |
|||||||
simple_kalman_impl.c |
|
@ -1,6 +0,0 @@ |
|||||||
Import('env') |
|
||||||
|
|
||||||
env.Command(['simple_kalman_impl.so'], |
|
||||||
['simple_kalman_impl.pyx', 'simple_kalman_impl.pxd', 'simple_kalman_setup.py'], |
|
||||||
"cd common/kalman && python3 simple_kalman_setup.py build_ext --inplace") |
|
||||||
|
|
@ -1,3 +0,0 @@ |
|||||||
# pylint: skip-file |
|
||||||
from common.kalman.simple_kalman_impl import KF1D as KF1D |
|
||||||
assert KF1D |
|
@ -1,16 +0,0 @@ |
|||||||
cdef class KF1D: |
|
||||||
cdef public: |
|
||||||
double x0_0 |
|
||||||
double x1_0 |
|
||||||
double K0_0 |
|
||||||
double K1_0 |
|
||||||
double A0_0 |
|
||||||
double A0_1 |
|
||||||
double A1_0 |
|
||||||
double A1_1 |
|
||||||
double C0_0 |
|
||||||
double C0_1 |
|
||||||
double A_K_0 |
|
||||||
double A_K_1 |
|
||||||
double A_K_2 |
|
||||||
double A_K_3 |
|
@ -1,36 +0,0 @@ |
|||||||
# cython: language_level=3 |
|
||||||
|
|
||||||
cdef class KF1D: |
|
||||||
def __init__(self, x0, A, C, K): |
|
||||||
self.x0_0 = x0[0][0] |
|
||||||
self.x1_0 = x0[1][0] |
|
||||||
self.A0_0 = A[0][0] |
|
||||||
self.A0_1 = A[0][1] |
|
||||||
self.A1_0 = A[1][0] |
|
||||||
self.A1_1 = A[1][1] |
|
||||||
self.C0_0 = C[0] |
|
||||||
self.C0_1 = C[1] |
|
||||||
self.K0_0 = K[0][0] |
|
||||||
self.K1_0 = K[1][0] |
|
||||||
|
|
||||||
self.A_K_0 = self.A0_0 - self.K0_0 * self.C0_0 |
|
||||||
self.A_K_1 = self.A0_1 - self.K0_0 * self.C0_1 |
|
||||||
self.A_K_2 = self.A1_0 - self.K1_0 * self.C0_0 |
|
||||||
self.A_K_3 = self.A1_1 - self.K1_0 * self.C0_1 |
|
||||||
|
|
||||||
def update(self, meas): |
|
||||||
cdef double x0_0 = self.A_K_0 * self.x0_0 + self.A_K_1 * self.x1_0 + self.K0_0 * meas |
|
||||||
cdef double x1_0 = self.A_K_2 * self.x0_0 + self.A_K_3 * self.x1_0 + self.K1_0 * meas |
|
||||||
self.x0_0 = x0_0 |
|
||||||
self.x1_0 = x1_0 |
|
||||||
|
|
||||||
return [self.x0_0, self.x1_0] |
|
||||||
|
|
||||||
@property |
|
||||||
def x(self): |
|
||||||
return [[self.x0_0], [self.x1_0]] |
|
||||||
|
|
||||||
@x.setter |
|
||||||
def x(self, x): |
|
||||||
self.x0_0 = x[0][0] |
|
||||||
self.x1_0 = x[1][0] |
|
@ -1,23 +0,0 @@ |
|||||||
import numpy as np |
|
||||||
|
|
||||||
|
|
||||||
class KF1D: |
|
||||||
# this EKF assumes constant covariance matrix, so calculations are much simpler |
|
||||||
# the Kalman gain also needs to be precomputed using the control module |
|
||||||
|
|
||||||
def __init__(self, x0, A, C, K): |
|
||||||
self.x = x0 |
|
||||||
self.A = A |
|
||||||
self.C = C |
|
||||||
self.K = K |
|
||||||
|
|
||||||
self.A_K = self.A - np.dot(self.K, self.C) |
|
||||||
|
|
||||||
# K matrix needs to be pre-computed as follow: |
|
||||||
# import control |
|
||||||
# (x, l, K) = control.dare(np.transpose(self.A), np.transpose(self.C), Q, R) |
|
||||||
# self.K = np.transpose(K) |
|
||||||
|
|
||||||
def update(self, meas): |
|
||||||
self.x = np.dot(self.A_K, self.x) + np.dot(self.K, meas) |
|
||||||
return self.x |
|
@ -1,9 +0,0 @@ |
|||||||
from distutils.core import Extension, setup |
|
||||||
|
|
||||||
from Cython.Build import cythonize |
|
||||||
|
|
||||||
from common.cython_hacks import BuildExtWithoutPlatformSuffix |
|
||||||
|
|
||||||
setup(name='Simple Kalman Implementation', |
|
||||||
cmdclass={'build_ext': BuildExtWithoutPlatformSuffix}, |
|
||||||
ext_modules=cythonize(Extension("simple_kalman_impl", ["simple_kalman_impl.pyx"]))) |
|
@ -1,85 +0,0 @@ |
|||||||
import unittest |
|
||||||
import random |
|
||||||
import timeit |
|
||||||
import numpy as np |
|
||||||
|
|
||||||
from common.kalman.simple_kalman import KF1D |
|
||||||
from common.kalman.simple_kalman_old import KF1D as KF1D_old |
|
||||||
|
|
||||||
|
|
||||||
class TestSimpleKalman(unittest.TestCase): |
|
||||||
def setUp(self): |
|
||||||
dt = 0.01 |
|
||||||
x0_0 = 0.0 |
|
||||||
x1_0 = 0.0 |
|
||||||
A0_0 = 1.0 |
|
||||||
A0_1 = dt |
|
||||||
A1_0 = 0.0 |
|
||||||
A1_1 = 1.0 |
|
||||||
C0_0 = 1.0 |
|
||||||
C0_1 = 0.0 |
|
||||||
K0_0 = 0.12287673 |
|
||||||
K1_0 = 0.29666309 |
|
||||||
|
|
||||||
self.kf_old = KF1D_old(x0=np.matrix([[x0_0], [x1_0]]), |
|
||||||
A=np.matrix([[A0_0, A0_1], [A1_0, A1_1]]), |
|
||||||
C=np.matrix([C0_0, C0_1]), |
|
||||||
K=np.matrix([[K0_0], [K1_0]])) |
|
||||||
|
|
||||||
self.kf = KF1D(x0=[[x0_0], [x1_0]], |
|
||||||
A=[[A0_0, A0_1], [A1_0, A1_1]], |
|
||||||
C=[C0_0, C0_1], |
|
||||||
K=[[K0_0], [K1_0]]) |
|
||||||
|
|
||||||
def test_getter_setter(self): |
|
||||||
self.kf.x = [[1.0], [1.0]] |
|
||||||
self.assertEqual(self.kf.x, [[1.0], [1.0]]) |
|
||||||
|
|
||||||
def update_returns_state(self): |
|
||||||
x = self.kf.update(100) |
|
||||||
self.assertEqual(x, self.kf.x) |
|
||||||
|
|
||||||
def test_old_equal_new(self): |
|
||||||
for _ in range(1000): |
|
||||||
v_wheel = random.uniform(0, 200) |
|
||||||
|
|
||||||
x_old = self.kf_old.update(v_wheel) |
|
||||||
x = self.kf.update(v_wheel) |
|
||||||
|
|
||||||
# Compare the output x, verify that the error is less than 1e-4 |
|
||||||
self.assertAlmostEqual(x_old[0], x[0]) |
|
||||||
self.assertAlmostEqual(x_old[1], x[1]) |
|
||||||
|
|
||||||
|
|
||||||
def test_new_is_faster(self): |
|
||||||
setup = """ |
|
||||||
import numpy as np |
|
||||||
|
|
||||||
from common.kalman.simple_kalman import KF1D |
|
||||||
from common.kalman.simple_kalman_old import KF1D as KF1D_old |
|
||||||
|
|
||||||
dt = 0.01 |
|
||||||
x0_0 = 0.0 |
|
||||||
x1_0 = 0.0 |
|
||||||
A0_0 = 1.0 |
|
||||||
A0_1 = dt |
|
||||||
A1_0 = 0.0 |
|
||||||
A1_1 = 1.0 |
|
||||||
C0_0 = 1.0 |
|
||||||
C0_1 = 0.0 |
|
||||||
K0_0 = 0.12287673 |
|
||||||
K1_0 = 0.29666309 |
|
||||||
|
|
||||||
kf_old = KF1D_old(x0=np.matrix([[x0_0], [x1_0]]), |
|
||||||
A=np.matrix([[A0_0, A0_1], [A1_0, A1_1]]), |
|
||||||
C=np.matrix([C0_0, C0_1]), |
|
||||||
K=np.matrix([[K0_0], [K1_0]])) |
|
||||||
|
|
||||||
kf = KF1D(x0=[[x0_0], [x1_0]], |
|
||||||
A=[[A0_0, A0_1], [A1_0, A1_1]], |
|
||||||
C=[C0_0, C0_1], |
|
||||||
K=[[K0_0], [K1_0]]) |
|
||||||
""" |
|
||||||
kf_speed = timeit.timeit("kf.update(1234)", setup=setup, number=10000) |
|
||||||
kf_old_speed = timeit.timeit("kf_old.update(1234)", setup=setup, number=10000) |
|
||||||
self.assertTrue(kf_speed < kf_old_speed / 4) |
|
@ -1,152 +0,0 @@ |
|||||||
import os |
|
||||||
import sys |
|
||||||
import copy |
|
||||||
import json |
|
||||||
import socket |
|
||||||
import logging |
|
||||||
from threading import local |
|
||||||
from collections import OrderedDict |
|
||||||
from contextlib import contextmanager |
|
||||||
|
|
||||||
def json_handler(obj): |
|
||||||
# if isinstance(obj, (datetime.date, datetime.time)): |
|
||||||
# return obj.isoformat() |
|
||||||
return repr(obj) |
|
||||||
|
|
||||||
def json_robust_dumps(obj): |
|
||||||
return json.dumps(obj, default=json_handler) |
|
||||||
|
|
||||||
class NiceOrderedDict(OrderedDict): |
|
||||||
def __str__(self): |
|
||||||
return json_robust_dumps(self) |
|
||||||
|
|
||||||
class SwagFormatter(logging.Formatter): |
|
||||||
def __init__(self, swaglogger): |
|
||||||
logging.Formatter.__init__(self, None, '%a %b %d %H:%M:%S %Z %Y') |
|
||||||
|
|
||||||
self.swaglogger = swaglogger |
|
||||||
self.host = socket.gethostname() |
|
||||||
|
|
||||||
def format_dict(self, record): |
|
||||||
record_dict = NiceOrderedDict() |
|
||||||
|
|
||||||
if isinstance(record.msg, dict): |
|
||||||
record_dict['msg'] = record.msg |
|
||||||
else: |
|
||||||
try: |
|
||||||
record_dict['msg'] = record.getMessage() |
|
||||||
except (ValueError, TypeError): |
|
||||||
record_dict['msg'] = [record.msg]+record.args |
|
||||||
|
|
||||||
record_dict['ctx'] = self.swaglogger.get_ctx() |
|
||||||
|
|
||||||
if record.exc_info: |
|
||||||
record_dict['exc_info'] = self.formatException(record.exc_info) |
|
||||||
|
|
||||||
record_dict['level'] = record.levelname |
|
||||||
record_dict['levelnum'] = record.levelno |
|
||||||
record_dict['name'] = record.name |
|
||||||
record_dict['filename'] = record.filename |
|
||||||
record_dict['lineno'] = record.lineno |
|
||||||
record_dict['pathname'] = record.pathname |
|
||||||
record_dict['module'] = record.module |
|
||||||
record_dict['funcName'] = record.funcName |
|
||||||
record_dict['host'] = self.host |
|
||||||
record_dict['process'] = record.process |
|
||||||
record_dict['thread'] = record.thread |
|
||||||
record_dict['threadName'] = record.threadName |
|
||||||
record_dict['created'] = record.created |
|
||||||
|
|
||||||
return record_dict |
|
||||||
|
|
||||||
def format(self, record): |
|
||||||
return json_robust_dumps(self.format_dict(record)) |
|
||||||
|
|
||||||
class SwagErrorFilter(logging.Filter): |
|
||||||
def filter(self, record): |
|
||||||
return record.levelno < logging.ERROR |
|
||||||
|
|
||||||
_tmpfunc = lambda: 0 |
|
||||||
_srcfile = os.path.normcase(_tmpfunc.__code__.co_filename) |
|
||||||
|
|
||||||
class SwagLogger(logging.Logger): |
|
||||||
def __init__(self): |
|
||||||
logging.Logger.__init__(self, "swaglog") |
|
||||||
|
|
||||||
self.global_ctx = {} |
|
||||||
|
|
||||||
self.log_local = local() |
|
||||||
self.log_local.ctx = {} |
|
||||||
|
|
||||||
def local_ctx(self): |
|
||||||
try: |
|
||||||
return self.log_local.ctx |
|
||||||
except AttributeError: |
|
||||||
self.log_local.ctx = {} |
|
||||||
return self.log_local.ctx |
|
||||||
|
|
||||||
def get_ctx(self): |
|
||||||
return dict(self.local_ctx(), **self.global_ctx) |
|
||||||
|
|
||||||
@contextmanager |
|
||||||
def ctx(self, **kwargs): |
|
||||||
old_ctx = self.local_ctx() |
|
||||||
self.log_local.ctx = copy.copy(old_ctx) or {} |
|
||||||
self.log_local.ctx.update(kwargs) |
|
||||||
try: |
|
||||||
yield |
|
||||||
finally: |
|
||||||
self.log_local.ctx = old_ctx |
|
||||||
|
|
||||||
def bind(self, **kwargs): |
|
||||||
self.local_ctx().update(kwargs) |
|
||||||
|
|
||||||
def bind_global(self, **kwargs): |
|
||||||
self.global_ctx.update(kwargs) |
|
||||||
|
|
||||||
def event(self, event_name, *args, **kwargs): |
|
||||||
evt = NiceOrderedDict() |
|
||||||
evt['event'] = event_name |
|
||||||
if args: |
|
||||||
evt['args'] = args |
|
||||||
evt.update(kwargs) |
|
||||||
ctx = self.get_ctx() |
|
||||||
if ctx: |
|
||||||
evt['ctx'] = self.get_ctx() |
|
||||||
if 'error' in kwargs: |
|
||||||
self.error(evt) |
|
||||||
else: |
|
||||||
self.info(evt) |
|
||||||
|
|
||||||
if __name__ == "__main__": |
|
||||||
log = SwagLogger() |
|
||||||
|
|
||||||
stdout_handler = logging.StreamHandler(sys.stdout) |
|
||||||
stdout_handler.setLevel(logging.INFO) |
|
||||||
stdout_handler.addFilter(SwagErrorFilter()) |
|
||||||
log.addHandler(stdout_handler) |
|
||||||
|
|
||||||
stderr_handler = logging.StreamHandler(sys.stderr) |
|
||||||
stderr_handler.setLevel(logging.ERROR) |
|
||||||
log.addHandler(stderr_handler) |
|
||||||
|
|
||||||
log.info("asdasd %s", "a") |
|
||||||
log.info({'wut': 1}) |
|
||||||
log.warning("warning") |
|
||||||
log.error("error") |
|
||||||
log.critical("critical") |
|
||||||
log.event("test", x="y") |
|
||||||
|
|
||||||
with log.ctx(): |
|
||||||
stdout_handler.setFormatter(SwagFormatter(log)) |
|
||||||
stderr_handler.setFormatter(SwagFormatter(log)) |
|
||||||
log.bind(user="some user") |
|
||||||
log.info("in req") |
|
||||||
print("") |
|
||||||
log.warning("warning") |
|
||||||
print("") |
|
||||||
log.error("error") |
|
||||||
print("") |
|
||||||
log.critical("critical") |
|
||||||
print("") |
|
||||||
log.event("do_req", a=1, b="c") |
|
@ -1,21 +0,0 @@ |
|||||||
def int_rnd(x): |
|
||||||
return int(round(x)) |
|
||||||
|
|
||||||
def clip(x, lo, hi): |
|
||||||
return max(lo, min(hi, x)) |
|
||||||
|
|
||||||
def interp(x, xp, fp): |
|
||||||
N = len(xp) |
|
||||||
def get_interp(xv): |
|
||||||
hi = 0 |
|
||||||
while hi < N and xv > xp[hi]: |
|
||||||
hi += 1 |
|
||||||
low = hi - 1 |
|
||||||
return fp[-1] if hi == N and xv > xp[low] else ( |
|
||||||
fp[0] if hi == 0 else |
|
||||||
(xv - xp[low]) * (fp[hi] - fp[low]) / (xp[hi] - xp[low]) + fp[low]) |
|
||||||
return [get_interp(v) for v in x] if hasattr( |
|
||||||
x, '__iter__') else get_interp(x) |
|
||||||
|
|
||||||
def mean(x): |
|
||||||
return sum(x) / len(x) |
|
@ -1,409 +0,0 @@ |
|||||||
#!/usr/bin/env python3 |
|
||||||
"""ROS has a parameter server, we have files. |
|
||||||
|
|
||||||
The parameter store is a persistent key value store, implemented as a directory with a writer lock. |
|
||||||
On Android, we store params under params_dir = /data/params. The writer lock is a file |
|
||||||
"<params_dir>/.lock" taken using flock(), and data is stored in a directory symlinked to by |
|
||||||
"<params_dir>/d". |
|
||||||
|
|
||||||
Each key, value pair is stored as a file with named <key> with contents <value>, located in |
|
||||||
<params_dir>/d/<key> |
|
||||||
|
|
||||||
Readers of a single key can just open("<params_dir>/d/<key>") and read the file contents. |
|
||||||
Readers who want a consistent snapshot of multiple keys should take the lock. |
|
||||||
|
|
||||||
Writers should take the lock before modifying anything. Writers should also leave the DB in a |
|
||||||
consistent state after a crash. The implementation below does this by copying all params to a temp |
|
||||||
directory <params_dir>/<tmp>, then atomically symlinking <params_dir>/<d> to <params_dir>/<tmp> |
|
||||||
before deleting the old <params_dir>/<d> directory. |
|
||||||
|
|
||||||
Writers that only modify a single key can simply take the lock, then swap the corresponding value |
|
||||||
file in place without messing with <params_dir>/d. |
|
||||||
""" |
|
||||||
import time |
|
||||||
import os |
|
||||||
import errno |
|
||||||
import sys |
|
||||||
import shutil |
|
||||||
import fcntl |
|
||||||
import tempfile |
|
||||||
import threading |
|
||||||
from enum import Enum |
|
||||||
|
|
||||||
|
|
||||||
def mkdirs_exists_ok(path): |
|
||||||
try: |
|
||||||
os.makedirs(path) |
|
||||||
except OSError: |
|
||||||
if not os.path.isdir(path): |
|
||||||
raise |
|
||||||
|
|
||||||
|
|
||||||
class TxType(Enum): |
|
||||||
PERSISTENT = 1 |
|
||||||
CLEAR_ON_MANAGER_START = 2 |
|
||||||
CLEAR_ON_PANDA_DISCONNECT = 3 |
|
||||||
|
|
||||||
|
|
||||||
class UnknownKeyName(Exception): |
|
||||||
pass |
|
||||||
|
|
||||||
|
|
||||||
keys = { |
|
||||||
"AccessToken": [TxType.PERSISTENT], |
|
||||||
"AthenadPid": [TxType.PERSISTENT], |
|
||||||
"CalibrationParams": [TxType.PERSISTENT], |
|
||||||
"CarParams": [TxType.CLEAR_ON_MANAGER_START, TxType.CLEAR_ON_PANDA_DISCONNECT], |
|
||||||
"CarVin": [TxType.CLEAR_ON_MANAGER_START, TxType.CLEAR_ON_PANDA_DISCONNECT], |
|
||||||
"CommunityFeaturesToggle": [TxType.PERSISTENT], |
|
||||||
"CompletedTrainingVersion": [TxType.PERSISTENT], |
|
||||||
"ControlsParams": [TxType.PERSISTENT], |
|
||||||
"DoUninstall": [TxType.CLEAR_ON_MANAGER_START], |
|
||||||
"DongleId": [TxType.PERSISTENT], |
|
||||||
"GitBranch": [TxType.PERSISTENT], |
|
||||||
"GitCommit": [TxType.PERSISTENT], |
|
||||||
"GitRemote": [TxType.PERSISTENT], |
|
||||||
"GithubSshKeys": [TxType.PERSISTENT], |
|
||||||
"HasAcceptedTerms": [TxType.PERSISTENT], |
|
||||||
"HasCompletedSetup": [TxType.PERSISTENT], |
|
||||||
"IsLdwEnabled": [TxType.PERSISTENT], |
|
||||||
"IsGeofenceEnabled": [TxType.PERSISTENT], |
|
||||||
"IsMetric": [TxType.PERSISTENT], |
|
||||||
"IsOffroad": [TxType.CLEAR_ON_MANAGER_START], |
|
||||||
"IsRHD": [TxType.PERSISTENT], |
|
||||||
"IsTakingSnapshot": [TxType.CLEAR_ON_MANAGER_START], |
|
||||||
"IsUpdateAvailable": [TxType.CLEAR_ON_MANAGER_START], |
|
||||||
"IsUploadRawEnabled": [TxType.PERSISTENT], |
|
||||||
"LastUpdateTime": [TxType.PERSISTENT], |
|
||||||
"LimitSetSpeed": [TxType.PERSISTENT], |
|
||||||
"LimitSetSpeedNeural": [TxType.PERSISTENT], |
|
||||||
"LiveParameters": [TxType.PERSISTENT], |
|
||||||
"LongitudinalControl": [TxType.PERSISTENT], |
|
||||||
"OpenpilotEnabledToggle": [TxType.PERSISTENT], |
|
||||||
"PandaFirmware": [TxType.CLEAR_ON_MANAGER_START, TxType.CLEAR_ON_PANDA_DISCONNECT], |
|
||||||
"PandaFirmwareHex": [TxType.CLEAR_ON_MANAGER_START, TxType.CLEAR_ON_PANDA_DISCONNECT], |
|
||||||
"PandaDongleId": [TxType.CLEAR_ON_MANAGER_START, TxType.CLEAR_ON_PANDA_DISCONNECT], |
|
||||||
"Passive": [TxType.PERSISTENT], |
|
||||||
"RecordFront": [TxType.PERSISTENT], |
|
||||||
"ReleaseNotes": [TxType.PERSISTENT], |
|
||||||
"ShouldDoUpdate": [TxType.CLEAR_ON_MANAGER_START], |
|
||||||
"SpeedLimitOffset": [TxType.PERSISTENT], |
|
||||||
"SubscriberInfo": [TxType.PERSISTENT], |
|
||||||
"TermsVersion": [TxType.PERSISTENT], |
|
||||||
"TrainingVersion": [TxType.PERSISTENT], |
|
||||||
"UpdateAvailable": [TxType.CLEAR_ON_MANAGER_START], |
|
||||||
"Version": [TxType.PERSISTENT], |
|
||||||
"Offroad_ChargeDisabled": [TxType.CLEAR_ON_MANAGER_START, TxType.CLEAR_ON_PANDA_DISCONNECT], |
|
||||||
"Offroad_ConnectivityNeeded": [TxType.CLEAR_ON_MANAGER_START], |
|
||||||
"Offroad_ConnectivityNeededPrompt": [TxType.CLEAR_ON_MANAGER_START], |
|
||||||
"Offroad_TemperatureTooHigh": [TxType.CLEAR_ON_MANAGER_START], |
|
||||||
"Offroad_PandaFirmwareMismatch": [TxType.CLEAR_ON_MANAGER_START, TxType.CLEAR_ON_PANDA_DISCONNECT], |
|
||||||
"Offroad_InvalidTime": [TxType.CLEAR_ON_MANAGER_START], |
|
||||||
"Offroad_IsTakingSnapshot": [TxType.CLEAR_ON_MANAGER_START], |
|
||||||
} |
|
||||||
|
|
||||||
|
|
||||||
def fsync_dir(path): |
|
||||||
fd = os.open(path, os.O_RDONLY) |
|
||||||
try: |
|
||||||
os.fsync(fd) |
|
||||||
finally: |
|
||||||
os.close(fd) |
|
||||||
|
|
||||||
|
|
||||||
class FileLock(): |
|
||||||
def __init__(self, path, create): |
|
||||||
self._path = path |
|
||||||
self._create = create |
|
||||||
self._fd = None |
|
||||||
|
|
||||||
def acquire(self): |
|
||||||
self._fd = os.open(self._path, os.O_CREAT if self._create else 0) |
|
||||||
fcntl.flock(self._fd, fcntl.LOCK_EX) |
|
||||||
|
|
||||||
def release(self): |
|
||||||
if self._fd is not None: |
|
||||||
os.close(self._fd) |
|
||||||
self._fd = None |
|
||||||
|
|
||||||
|
|
||||||
class DBAccessor(): |
|
||||||
def __init__(self, path): |
|
||||||
self._path = path |
|
||||||
self._vals = None |
|
||||||
|
|
||||||
def keys(self): |
|
||||||
self._check_entered() |
|
||||||
return self._vals.keys() |
|
||||||
|
|
||||||
def get(self, key): |
|
||||||
self._check_entered() |
|
||||||
try: |
|
||||||
return self._vals[key] |
|
||||||
except KeyError: |
|
||||||
return None |
|
||||||
|
|
||||||
def _get_lock(self, create): |
|
||||||
lock = FileLock(os.path.join(self._path, ".lock"), create) |
|
||||||
lock.acquire() |
|
||||||
return lock |
|
||||||
|
|
||||||
def _read_values_locked(self): |
|
||||||
"""Callers should hold a lock while calling this method.""" |
|
||||||
vals = {} |
|
||||||
try: |
|
||||||
data_path = self._data_path() |
|
||||||
keys = os.listdir(data_path) |
|
||||||
for key in keys: |
|
||||||
with open(os.path.join(data_path, key), "rb") as f: |
|
||||||
vals[key] = f.read() |
|
||||||
except (OSError, IOError) as e: |
|
||||||
# Either the DB hasn't been created yet, or somebody wrote a bug and left the DB in an |
|
||||||
# inconsistent state. Either way, return empty. |
|
||||||
if e.errno == errno.ENOENT: |
|
||||||
return {} |
|
||||||
|
|
||||||
return vals |
|
||||||
|
|
||||||
def _data_path(self): |
|
||||||
return os.path.join(self._path, "d") |
|
||||||
|
|
||||||
def _check_entered(self): |
|
||||||
if self._vals is None: |
|
||||||
raise Exception("Must call __enter__ before using DB") |
|
||||||
|
|
||||||
|
|
||||||
class DBReader(DBAccessor): |
|
||||||
def __enter__(self): |
|
||||||
try: |
|
||||||
lock = self._get_lock(False) |
|
||||||
except OSError as e: |
|
||||||
# Do not create lock if it does not exist. |
|
||||||
if e.errno == errno.ENOENT: |
|
||||||
self._vals = {} |
|
||||||
return self |
|
||||||
|
|
||||||
try: |
|
||||||
# Read everything. |
|
||||||
self._vals = self._read_values_locked() |
|
||||||
return self |
|
||||||
finally: |
|
||||||
lock.release() |
|
||||||
|
|
||||||
def __exit__(self, type, value, traceback): pass |
|
||||||
|
|
||||||
|
|
||||||
class DBWriter(DBAccessor): |
|
||||||
def __init__(self, path): |
|
||||||
super(DBWriter, self).__init__(path) |
|
||||||
self._lock = None |
|
||||||
self._prev_umask = None |
|
||||||
|
|
||||||
def put(self, key, value): |
|
||||||
self._vals[key] = value |
|
||||||
|
|
||||||
def delete(self, key): |
|
||||||
self._vals.pop(key, None) |
|
||||||
|
|
||||||
def __enter__(self): |
|
||||||
mkdirs_exists_ok(self._path) |
|
||||||
|
|
||||||
# Make sure we can write and that permissions are correct. |
|
||||||
self._prev_umask = os.umask(0) |
|
||||||
|
|
||||||
try: |
|
||||||
os.chmod(self._path, 0o777) |
|
||||||
self._lock = self._get_lock(True) |
|
||||||
self._vals = self._read_values_locked() |
|
||||||
except: |
|
||||||
os.umask(self._prev_umask) |
|
||||||
self._prev_umask = None |
|
||||||
raise |
|
||||||
|
|
||||||
return self |
|
||||||
|
|
||||||
def __exit__(self, type, value, traceback): |
|
||||||
self._check_entered() |
|
||||||
|
|
||||||
try: |
|
||||||
# data_path refers to the externally used path to the params. It is a symlink. |
|
||||||
# old_data_path is the path currently pointed to by data_path. |
|
||||||
# tempdir_path is a path where the new params will go, which the new data path will point to. |
|
||||||
# new_data_path is a temporary symlink that will atomically overwrite data_path. |
|
||||||
# |
|
||||||
# The current situation is: |
|
||||||
# data_path -> old_data_path |
|
||||||
# We're going to write params data to tempdir_path |
|
||||||
# tempdir_path -> params data |
|
||||||
# Then point new_data_path to tempdir_path |
|
||||||
# new_data_path -> tempdir_path |
|
||||||
# Then atomically overwrite data_path with new_data_path |
|
||||||
# data_path -> tempdir_path |
|
||||||
old_data_path = None |
|
||||||
new_data_path = None |
|
||||||
tempdir_path = tempfile.mkdtemp(prefix=".tmp", dir=self._path) |
|
||||||
|
|
||||||
try: |
|
||||||
# Write back all keys. |
|
||||||
os.chmod(tempdir_path, 0o777) |
|
||||||
for k, v in self._vals.items(): |
|
||||||
with open(os.path.join(tempdir_path, k), "wb") as f: |
|
||||||
f.write(v) |
|
||||||
f.flush() |
|
||||||
os.fsync(f.fileno()) |
|
||||||
fsync_dir(tempdir_path) |
|
||||||
|
|
||||||
data_path = self._data_path() |
|
||||||
try: |
|
||||||
old_data_path = os.path.join(self._path, os.readlink(data_path)) |
|
||||||
except (OSError, IOError): |
|
||||||
# NOTE(mgraczyk): If other DB implementations have bugs, this could cause |
|
||||||
# copies to be left behind, but we still want to overwrite. |
|
||||||
pass |
|
||||||
|
|
||||||
new_data_path = "{}.link".format(tempdir_path) |
|
||||||
os.symlink(os.path.basename(tempdir_path), new_data_path) |
|
||||||
os.rename(new_data_path, data_path) |
|
||||||
fsync_dir(self._path) |
|
||||||
finally: |
|
||||||
# If the rename worked, we can delete the old data. Otherwise delete the new one. |
|
||||||
success = new_data_path is not None and os.path.exists(data_path) and ( |
|
||||||
os.readlink(data_path) == os.path.basename(tempdir_path)) |
|
||||||
|
|
||||||
if success: |
|
||||||
if old_data_path is not None: |
|
||||||
shutil.rmtree(old_data_path) |
|
||||||
else: |
|
||||||
shutil.rmtree(tempdir_path) |
|
||||||
|
|
||||||
# Regardless of what happened above, there should be no link at new_data_path. |
|
||||||
if new_data_path is not None and os.path.islink(new_data_path): |
|
||||||
os.remove(new_data_path) |
|
||||||
finally: |
|
||||||
os.umask(self._prev_umask) |
|
||||||
self._prev_umask = None |
|
||||||
|
|
||||||
# Always release the lock. |
|
||||||
self._lock.release() |
|
||||||
self._lock = None |
|
||||||
|
|
||||||
|
|
||||||
def read_db(params_path, key): |
|
||||||
path = "%s/d/%s" % (params_path, key) |
|
||||||
try: |
|
||||||
with open(path, "rb") as f: |
|
||||||
return f.read() |
|
||||||
except IOError: |
|
||||||
return None |
|
||||||
|
|
||||||
def write_db(params_path, key, value): |
|
||||||
if isinstance(value, str): |
|
||||||
value = value.encode('utf8') |
|
||||||
|
|
||||||
prev_umask = os.umask(0) |
|
||||||
lock = FileLock(params_path+"/.lock", True) |
|
||||||
lock.acquire() |
|
||||||
|
|
||||||
try: |
|
||||||
tmp_path = tempfile.mktemp(prefix=".tmp", dir=params_path) |
|
||||||
with open(tmp_path, "wb") as f: |
|
||||||
f.write(value) |
|
||||||
f.flush() |
|
||||||
os.fsync(f.fileno()) |
|
||||||
|
|
||||||
path = "%s/d/%s" % (params_path, key) |
|
||||||
os.rename(tmp_path, path) |
|
||||||
fsync_dir(os.path.dirname(path)) |
|
||||||
finally: |
|
||||||
os.umask(prev_umask) |
|
||||||
lock.release() |
|
||||||
|
|
||||||
class Params(): |
|
||||||
def __init__(self, db='/data/params'): |
|
||||||
self.db = db |
|
||||||
|
|
||||||
# create the database if it doesn't exist... |
|
||||||
if not os.path.exists(self.db+"/d"): |
|
||||||
with self.transaction(write=True): |
|
||||||
pass |
|
||||||
|
|
||||||
def transaction(self, write=False): |
|
||||||
if write: |
|
||||||
return DBWriter(self.db) |
|
||||||
else: |
|
||||||
return DBReader(self.db) |
|
||||||
|
|
||||||
def _clear_keys_with_type(self, tx_type): |
|
||||||
with self.transaction(write=True) as txn: |
|
||||||
for key in keys: |
|
||||||
if tx_type in keys[key]: |
|
||||||
txn.delete(key) |
|
||||||
|
|
||||||
def manager_start(self): |
|
||||||
self._clear_keys_with_type(TxType.CLEAR_ON_MANAGER_START) |
|
||||||
|
|
||||||
def panda_disconnect(self): |
|
||||||
self._clear_keys_with_type(TxType.CLEAR_ON_PANDA_DISCONNECT) |
|
||||||
|
|
||||||
def delete(self, key): |
|
||||||
with self.transaction(write=True) as txn: |
|
||||||
txn.delete(key) |
|
||||||
|
|
||||||
def get(self, key, block=False, encoding=None): |
|
||||||
if key not in keys: |
|
||||||
raise UnknownKeyName(key) |
|
||||||
|
|
||||||
while 1: |
|
||||||
ret = read_db(self.db, key) |
|
||||||
if not block or ret is not None: |
|
||||||
break |
|
||||||
# is polling really the best we can do? |
|
||||||
time.sleep(0.05) |
|
||||||
|
|
||||||
if ret is not None and encoding is not None: |
|
||||||
ret = ret.decode(encoding) |
|
||||||
|
|
||||||
return ret |
|
||||||
|
|
||||||
def put(self, key, dat): |
|
||||||
""" |
|
||||||
Warning: This function blocks until the param is written to disk! |
|
||||||
In very rare cases this can take over a second, and your code will hang. |
|
||||||
|
|
||||||
Use the put_nonblocking helper function in time sensitive code, but |
|
||||||
in general try to avoid writing params as much as possible. |
|
||||||
""" |
|
||||||
|
|
||||||
if key not in keys: |
|
||||||
raise UnknownKeyName(key) |
|
||||||
|
|
||||||
write_db(self.db, key, dat) |
|
||||||
|
|
||||||
|
|
||||||
def put_nonblocking(key, val): |
|
||||||
def f(key, val): |
|
||||||
params = Params() |
|
||||||
params.put(key, val) |
|
||||||
|
|
||||||
t = threading.Thread(target=f, args=(key, val)) |
|
||||||
t.start() |
|
||||||
return t |
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__": |
|
||||||
params = Params() |
|
||||||
if len(sys.argv) > 2: |
|
||||||
params.put(sys.argv[1], sys.argv[2]) |
|
||||||
else: |
|
||||||
for k in keys: |
|
||||||
pp = params.get(k) |
|
||||||
if pp is None: |
|
||||||
print("%s is None" % k) |
|
||||||
elif all(ord(c) < 128 and ord(c) >= 32 for c in pp): |
|
||||||
print("%s = %s" % (k, pp)) |
|
||||||
else: |
|
||||||
print("%s = %s" % (k, pp.encode("hex"))) |
|
||||||
|
|
||||||
# Test multiprocess: |
|
||||||
# seq 0 100000 | xargs -P20 -I{} python common/params.py DongleId {} && sleep 0.05 |
|
||||||
# while python common/params.py DongleId; do sleep 0.05; done |
|
@ -1,46 +0,0 @@ |
|||||||
import time |
|
||||||
|
|
||||||
class Profiler(): |
|
||||||
def __init__(self, enabled=False): |
|
||||||
self.enabled = enabled |
|
||||||
self.cp = {} |
|
||||||
self.cp_ignored = [] |
|
||||||
self.iter = 0 |
|
||||||
self.start_time = time.time() |
|
||||||
self.last_time = self.start_time |
|
||||||
self.tot = 0. |
|
||||||
|
|
||||||
def reset(self, enabled=False): |
|
||||||
self.enabled = enabled |
|
||||||
self.cp = {} |
|
||||||
self.cp_ignored = [] |
|
||||||
self.iter = 0 |
|
||||||
self.start_time = time.time() |
|
||||||
self.last_time = self.start_time |
|
||||||
|
|
||||||
def checkpoint(self, name, ignore=False): |
|
||||||
# ignore flag needed when benchmarking threads with ratekeeper |
|
||||||
if not self.enabled: |
|
||||||
return |
|
||||||
tt = time.time() |
|
||||||
if name not in self.cp: |
|
||||||
self.cp[name] = 0. |
|
||||||
if ignore: |
|
||||||
self.cp_ignored.append(name) |
|
||||||
self.cp[name] += tt - self.last_time |
|
||||||
if not ignore: |
|
||||||
self.tot += tt - self.last_time |
|
||||||
self.last_time = tt |
|
||||||
|
|
||||||
def display(self): |
|
||||||
if not self.enabled: |
|
||||||
return |
|
||||||
self.iter += 1 |
|
||||||
print("******* Profiling *******") |
|
||||||
for n, ms in sorted(self.cp.items(), key=lambda x: -x[1]): |
|
||||||
if n in self.cp_ignored: |
|
||||||
print("%30s: %7.2f percent: %3.0f IGNORED" % (n, ms*1000.0, ms/self.tot*100)) |
|
||||||
else: |
|
||||||
print("%30s: %7.2f percent: %3.0f" % (n, ms*1000.0, ms/self.tot*100)) |
|
||||||
print("Iter clock: %2.6f TOTAL: %2.2f" % (self.tot/self.iter, self.tot)) |
|
||||||
|
|
@ -1,74 +0,0 @@ |
|||||||
"""Utilities for reading real time clocks and keeping soft real time constraints.""" |
|
||||||
import os |
|
||||||
import time |
|
||||||
import platform |
|
||||||
import subprocess |
|
||||||
import multiprocessing |
|
||||||
from cffi import FFI |
|
||||||
|
|
||||||
from common.common_pyx import sec_since_boot # pylint: disable=no-name-in-module, import-error |
|
||||||
|
|
||||||
|
|
||||||
# time step for each process |
|
||||||
DT_CTRL = 0.01 # controlsd |
|
||||||
DT_MDL = 0.05 # model |
|
||||||
DT_DMON = 0.1 # driver monitoring |
|
||||||
DT_TRML = 0.5 # thermald and manager |
|
||||||
|
|
||||||
|
|
||||||
ffi = FFI() |
|
||||||
ffi.cdef("long syscall(long number, ...);") |
|
||||||
libc = ffi.dlopen(None) |
|
||||||
|
|
||||||
|
|
||||||
def set_realtime_priority(level): |
|
||||||
if os.getuid() != 0: |
|
||||||
print("not setting priority, not root") |
|
||||||
return |
|
||||||
if platform.machine() == "x86_64": |
|
||||||
NR_gettid = 186 |
|
||||||
elif platform.machine() == "aarch64": |
|
||||||
NR_gettid = 178 |
|
||||||
else: |
|
||||||
raise NotImplementedError |
|
||||||
|
|
||||||
tid = libc.syscall(NR_gettid) |
|
||||||
return subprocess.call(['chrt', '-f', '-p', str(level), str(tid)]) |
|
||||||
|
|
||||||
|
|
||||||
class Ratekeeper(): |
|
||||||
def __init__(self, rate, print_delay_threshold=0.): |
|
||||||
"""Rate in Hz for ratekeeping. print_delay_threshold must be nonnegative.""" |
|
||||||
self._interval = 1. / rate |
|
||||||
self._next_frame_time = sec_since_boot() + self._interval |
|
||||||
self._print_delay_threshold = print_delay_threshold |
|
||||||
self._frame = 0 |
|
||||||
self._remaining = 0 |
|
||||||
self._process_name = multiprocessing.current_process().name |
|
||||||
|
|
||||||
@property |
|
||||||
def frame(self): |
|
||||||
return self._frame |
|
||||||
|
|
||||||
@property |
|
||||||
def remaining(self): |
|
||||||
return self._remaining |
|
||||||
|
|
||||||
# Maintain loop rate by calling this at the end of each loop |
|
||||||
def keep_time(self): |
|
||||||
lagged = self.monitor_time() |
|
||||||
if self._remaining > 0: |
|
||||||
time.sleep(self._remaining) |
|
||||||
return lagged |
|
||||||
|
|
||||||
# this only monitor the cumulative lag, but does not enforce a rate |
|
||||||
def monitor_time(self): |
|
||||||
lagged = False |
|
||||||
remaining = self._next_frame_time - sec_since_boot() |
|
||||||
self._next_frame_time += self._interval |
|
||||||
if self._print_delay_threshold is not None and remaining < -self._print_delay_threshold: |
|
||||||
print("%s lagging by %.2f ms" % (self._process_name, -remaining * 1000)) |
|
||||||
lagged = True |
|
||||||
self._frame += 1 |
|
||||||
self._remaining = remaining |
|
||||||
return lagged |
|
@ -1,63 +0,0 @@ |
|||||||
import os |
|
||||||
import subprocess |
|
||||||
from common.basedir import BASEDIR |
|
||||||
|
|
||||||
|
|
||||||
class Spinner(): |
|
||||||
def __init__(self): |
|
||||||
try: |
|
||||||
self.spinner_proc = subprocess.Popen(["./spinner"], |
|
||||||
stdin=subprocess.PIPE, |
|
||||||
cwd=os.path.join(BASEDIR, "selfdrive", "ui", "spinner"), |
|
||||||
close_fds=True) |
|
||||||
except OSError: |
|
||||||
self.spinner_proc = None |
|
||||||
|
|
||||||
def __enter__(self): |
|
||||||
return self |
|
||||||
|
|
||||||
def update(self, spinner_text): |
|
||||||
if self.spinner_proc is not None: |
|
||||||
self.spinner_proc.stdin.write(spinner_text.encode('utf8') + b"\n") |
|
||||||
try: |
|
||||||
self.spinner_proc.stdin.flush() |
|
||||||
except BrokenPipeError: |
|
||||||
pass |
|
||||||
|
|
||||||
def close(self): |
|
||||||
if self.spinner_proc is not None: |
|
||||||
try: |
|
||||||
self.spinner_proc.stdin.close() |
|
||||||
except BrokenPipeError: |
|
||||||
pass |
|
||||||
self.spinner_proc.terminate() |
|
||||||
self.spinner_proc = None |
|
||||||
|
|
||||||
def __del__(self): |
|
||||||
self.close() |
|
||||||
|
|
||||||
def __exit__(self, type, value, traceback): |
|
||||||
self.close() |
|
||||||
|
|
||||||
|
|
||||||
class FakeSpinner(): |
|
||||||
def __init__(self): |
|
||||||
pass |
|
||||||
|
|
||||||
def __enter__(self): |
|
||||||
return self |
|
||||||
|
|
||||||
def update(self, _): |
|
||||||
pass |
|
||||||
|
|
||||||
def __exit__(self, type, value, traceback): |
|
||||||
pass |
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__": |
|
||||||
import time |
|
||||||
with Spinner() as s: |
|
||||||
s.update("Spinner text") |
|
||||||
time.sleep(5.0) |
|
||||||
print("gone") |
|
||||||
time.sleep(5.0) |
|
@ -1,73 +0,0 @@ |
|||||||
import numpy as np |
|
||||||
|
|
||||||
class RunningStat(): |
|
||||||
# tracks realtime mean and standard deviation without storing any data |
|
||||||
def __init__(self, priors=None, max_trackable=-1): |
|
||||||
self.max_trackable = max_trackable |
|
||||||
if priors is not None: |
|
||||||
# initialize from history |
|
||||||
self.M = priors[0] |
|
||||||
self.S = priors[1] |
|
||||||
self.n = priors[2] |
|
||||||
self.M_last = self.M |
|
||||||
self.S_last = self.S |
|
||||||
|
|
||||||
else: |
|
||||||
self.reset() |
|
||||||
|
|
||||||
def reset(self): |
|
||||||
self.M = 0. |
|
||||||
self.S = 0. |
|
||||||
self.M_last = 0. |
|
||||||
self.S_last = 0. |
|
||||||
self.n = 0 |
|
||||||
|
|
||||||
def push_data(self, new_data): |
|
||||||
# short term memory hack |
|
||||||
if self.max_trackable < 0 or self.n < self.max_trackable: |
|
||||||
self.n += 1 |
|
||||||
if self.n == 0: |
|
||||||
self.M_last = new_data |
|
||||||
self.M = self.M_last |
|
||||||
self.S_last = 0. |
|
||||||
else: |
|
||||||
self.M = self.M_last + (new_data - self.M_last) / self.n |
|
||||||
self.S = self.S_last + (new_data - self.M_last) * (new_data - self.M); |
|
||||||
self.M_last = self.M |
|
||||||
self.S_last = self.S |
|
||||||
|
|
||||||
def mean(self): |
|
||||||
return self.M |
|
||||||
|
|
||||||
def variance(self): |
|
||||||
if self.n >= 2: |
|
||||||
return self.S / (self.n - 1.) |
|
||||||
else: |
|
||||||
return 0 |
|
||||||
|
|
||||||
def std(self): |
|
||||||
return np.sqrt(self.variance()) |
|
||||||
|
|
||||||
def params_to_save(self): |
|
||||||
return [self.M, self.S, self.n] |
|
||||||
|
|
||||||
class RunningStatFilter(): |
|
||||||
def __init__(self, raw_priors=None, filtered_priors=None, max_trackable=-1): |
|
||||||
self.raw_stat = RunningStat(raw_priors, max_trackable) |
|
||||||
self.filtered_stat = RunningStat(filtered_priors, max_trackable) |
|
||||||
|
|
||||||
def reset(self): |
|
||||||
self.raw_stat.reset() |
|
||||||
self.filtered_stat.reset() |
|
||||||
|
|
||||||
def push_and_update(self, new_data): |
|
||||||
_std_last = self.raw_stat.std() |
|
||||||
self.raw_stat.push_data(new_data) |
|
||||||
_delta_std = self.raw_stat.std() - _std_last |
|
||||||
if _delta_std<=0: |
|
||||||
self.filtered_stat.push_data(new_data) |
|
||||||
else: |
|
||||||
pass |
|
||||||
# self.filtered_stat.push_data(self.filtered_stat.mean()) |
|
||||||
|
|
||||||
# class SequentialBayesian(): |
|
@ -1,81 +0,0 @@ |
|||||||
#!/usr/bin/env python3 |
|
||||||
import sympy as sp |
|
||||||
import numpy as np |
|
||||||
|
|
||||||
def cross(x): |
|
||||||
ret = sp.Matrix(np.zeros((3,3))) |
|
||||||
ret[0,1], ret[0,2] = -x[2], x[1] |
|
||||||
ret[1,0], ret[1,2] = x[2], -x[0] |
|
||||||
ret[2,0], ret[2,1] = -x[1], x[0] |
|
||||||
return ret |
|
||||||
|
|
||||||
def euler_rotate(roll, pitch, yaw): |
|
||||||
# make symbolic rotation matrix from eulers |
|
||||||
matrix_roll = sp.Matrix([[1, 0, 0], |
|
||||||
[0, sp.cos(roll), -sp.sin(roll)], |
|
||||||
[0, sp.sin(roll), sp.cos(roll)]]) |
|
||||||
matrix_pitch = sp.Matrix([[sp.cos(pitch), 0, sp.sin(pitch)], |
|
||||||
[0, 1, 0], |
|
||||||
[-sp.sin(pitch), 0, sp.cos(pitch)]]) |
|
||||||
matrix_yaw = sp.Matrix([[sp.cos(yaw), -sp.sin(yaw), 0], |
|
||||||
[sp.sin(yaw), sp.cos(yaw), 0], |
|
||||||
[0, 0, 1]]) |
|
||||||
return matrix_yaw*matrix_pitch*matrix_roll |
|
||||||
|
|
||||||
def quat_rotate(q0, q1, q2, q3): |
|
||||||
# make symbolic rotation matrix from quat |
|
||||||
return sp.Matrix([[q0**2 + q1**2 - q2**2 - q3**2, 2*(q1*q2 + q0*q3), 2*(q1*q3 - q0*q2)], |
|
||||||
[2*(q1*q2 - q0*q3), q0**2 - q1**2 + q2**2 - q3**2, 2*(q2*q3 + q0*q1)], |
|
||||||
[2*(q1*q3 + q0*q2), 2*(q2*q3 - q0*q1), q0**2 - q1**2 - q2**2 + q3**2]]).T |
|
||||||
|
|
||||||
def quat_matrix_l(p): |
|
||||||
return sp.Matrix([[p[0], -p[1], -p[2], -p[3]], |
|
||||||
[p[1], p[0], -p[3], p[2]], |
|
||||||
[p[2], p[3], p[0], -p[1]], |
|
||||||
[p[3], -p[2], p[1], p[0]]]) |
|
||||||
|
|
||||||
def quat_matrix_r(p): |
|
||||||
return sp.Matrix([[p[0], -p[1], -p[2], -p[3]], |
|
||||||
[p[1], p[0], p[3], -p[2]], |
|
||||||
[p[2], -p[3], p[0], p[1]], |
|
||||||
[p[3], p[2], -p[1], p[0]]]) |
|
||||||
|
|
||||||
|
|
||||||
def sympy_into_c(sympy_functions): |
|
||||||
from sympy.utilities import codegen |
|
||||||
routines = [] |
|
||||||
for name, expr, args in sympy_functions: |
|
||||||
r = codegen.make_routine(name, expr, language="C99") |
|
||||||
|
|
||||||
# argument ordering input to sympy is broken with function with output arguments |
|
||||||
nargs = [] |
|
||||||
# reorder the input arguments |
|
||||||
for aa in args: |
|
||||||
if aa is None: |
|
||||||
nargs.append(codegen.InputArgument(sp.Symbol('unused'), dimensions=[1,1])) |
|
||||||
continue |
|
||||||
found = False |
|
||||||
for a in r.arguments: |
|
||||||
if str(aa.name) == str(a.name): |
|
||||||
nargs.append(a) |
|
||||||
found = True |
|
||||||
break |
|
||||||
if not found: |
|
||||||
# [1,1] is a hack for Matrices |
|
||||||
nargs.append(codegen.InputArgument(aa, dimensions=[1,1])) |
|
||||||
# add the output arguments |
|
||||||
for a in r.arguments: |
|
||||||
if type(a) == codegen.OutputArgument: |
|
||||||
nargs.append(a) |
|
||||||
|
|
||||||
#assert len(r.arguments) == len(args)+1 |
|
||||||
r.arguments = nargs |
|
||||||
|
|
||||||
# add routine to list |
|
||||||
routines.append(r) |
|
||||||
|
|
||||||
[(c_name, c_code), (h_name, c_header)] = codegen.get_code_generator('C', 'ekf', 'C99').write(routines, "ekf") |
|
||||||
c_code = '\n'.join(x for x in c_code.split("\n") if len(x) > 0 and x[0] != '#') |
|
||||||
c_header = '\n'.join(x for x in c_header.split("\n") if len(x) > 0 and x[0] != '#') |
|
||||||
|
|
||||||
return c_header, c_code |
|
@ -1,9 +0,0 @@ |
|||||||
import os |
|
||||||
from nose.tools import nottest |
|
||||||
|
|
||||||
def phone_only(x): |
|
||||||
if os.path.isfile("/init.qcom.rc"): |
|
||||||
return x |
|
||||||
else: |
|
||||||
return nottest(x) |
|
||||||
|
|
@ -1,28 +0,0 @@ |
|||||||
import signal |
|
||||||
|
|
||||||
class TimeoutException(Exception): |
|
||||||
pass |
|
||||||
|
|
||||||
class Timeout: |
|
||||||
""" |
|
||||||
Timeout context manager. |
|
||||||
For example this code will raise a TimeoutException: |
|
||||||
with Timeout(seconds=5, error_msg="Sleep was too long"): |
|
||||||
time.sleep(10) |
|
||||||
""" |
|
||||||
def __init__(self, seconds, error_msg=None): |
|
||||||
if error_msg is None: |
|
||||||
error_msg = 'Timed out after {} seconds'.format(seconds) |
|
||||||
self.seconds = seconds |
|
||||||
self.error_msg = error_msg |
|
||||||
|
|
||||||
def handle_timeout(self, signume, frame): |
|
||||||
raise TimeoutException(self.error_msg) |
|
||||||
|
|
||||||
def __enter__(self): |
|
||||||
signal.signal(signal.SIGALRM, self.handle_timeout) |
|
||||||
signal.alarm(self.seconds) |
|
||||||
|
|
||||||
def __exit__(self, exc_type, exc_val, exc_tb): |
|
||||||
signal.alarm(0) |
|
||||||
|
|
@ -1,258 +0,0 @@ |
|||||||
import numpy as np |
|
||||||
import common.transformations.orientation as orient |
|
||||||
import math |
|
||||||
|
|
||||||
FULL_FRAME_SIZE = (1164, 874) |
|
||||||
W, H = FULL_FRAME_SIZE[0], FULL_FRAME_SIZE[1] |
|
||||||
eon_focal_length = FOCAL = 910.0 |
|
||||||
|
|
||||||
# aka 'K' aka camera_frame_from_view_frame |
|
||||||
eon_intrinsics = np.array([ |
|
||||||
[FOCAL, 0., W/2.], |
|
||||||
[ 0., FOCAL, H/2.], |
|
||||||
[ 0., 0., 1.]]) |
|
||||||
|
|
||||||
|
|
||||||
leon_dcam_intrinsics = np.array([ |
|
||||||
[650, 0, 816//2], |
|
||||||
[ 0, 650, 612//2], |
|
||||||
[ 0, 0, 1]]) |
|
||||||
|
|
||||||
eon_dcam_intrinsics = np.array([ |
|
||||||
[860, 0, 1152//2], |
|
||||||
[ 0, 860, 864//2], |
|
||||||
[ 0, 0, 1]]) |
|
||||||
|
|
||||||
# aka 'K_inv' aka view_frame_from_camera_frame |
|
||||||
eon_intrinsics_inv = np.linalg.inv(eon_intrinsics) |
|
||||||
|
|
||||||
|
|
||||||
# device/mesh : x->forward, y-> right, z->down |
|
||||||
# view : x->right, y->down, z->forward |
|
||||||
device_frame_from_view_frame = np.array([ |
|
||||||
[ 0., 0., 1.], |
|
||||||
[ 1., 0., 0.], |
|
||||||
[ 0., 1., 0.] |
|
||||||
]) |
|
||||||
view_frame_from_device_frame = device_frame_from_view_frame.T |
|
||||||
|
|
||||||
|
|
||||||
def get_calib_from_vp(vp): |
|
||||||
vp_norm = normalize(vp) |
|
||||||
yaw_calib = np.arctan(vp_norm[0]) |
|
||||||
pitch_calib = -np.arctan(vp_norm[1]*np.cos(yaw_calib)) |
|
||||||
roll_calib = 0 |
|
||||||
return roll_calib, pitch_calib, yaw_calib |
|
||||||
|
|
||||||
|
|
||||||
# aka 'extrinsic_matrix' |
|
||||||
# road : x->forward, y -> left, z->up |
|
||||||
def get_view_frame_from_road_frame(roll, pitch, yaw, height): |
|
||||||
device_from_road = orient.rot_from_euler([roll, pitch, yaw]).dot(np.diag([1, -1, -1])) |
|
||||||
view_from_road = view_frame_from_device_frame.dot(device_from_road) |
|
||||||
return np.hstack((view_from_road, [[0], [height], [0]])) |
|
||||||
|
|
||||||
|
|
||||||
def vp_from_ke(m): |
|
||||||
""" |
|
||||||
Computes the vanishing point from the product of the intrinsic and extrinsic |
|
||||||
matrices C = KE. |
|
||||||
|
|
||||||
The vanishing point is defined as lim x->infinity C (x, 0, 0, 1).T |
|
||||||
""" |
|
||||||
return (m[0, 0]/m[2,0], m[1,0]/m[2,0]) |
|
||||||
|
|
||||||
|
|
||||||
def vp_from_rpy(rpy): |
|
||||||
e = get_view_frame_from_road_frame(rpy[0], rpy[1], rpy[2], 1.22) |
|
||||||
ke = np.dot(eon_intrinsics, e) |
|
||||||
return vp_from_ke(ke) |
|
||||||
|
|
||||||
|
|
||||||
def roll_from_ke(m): |
|
||||||
# note: different from calibration.h/RollAnglefromKE: i think that one's just wrong |
|
||||||
return np.arctan2(-(m[1, 0] - m[1, 1] * m[2, 0] / m[2, 1]), |
|
||||||
-(m[0, 0] - m[0, 1] * m[2, 0] / m[2, 1])) |
|
||||||
|
|
||||||
|
|
||||||
def normalize(img_pts, intrinsics=eon_intrinsics): |
|
||||||
# normalizes image coordinates |
|
||||||
# accepts single pt or array of pts |
|
||||||
intrinsics_inv = np.linalg.inv(intrinsics) |
|
||||||
img_pts = np.array(img_pts) |
|
||||||
input_shape = img_pts.shape |
|
||||||
img_pts = np.atleast_2d(img_pts) |
|
||||||
img_pts = np.hstack((img_pts, np.ones((img_pts.shape[0],1)))) |
|
||||||
img_pts_normalized = img_pts.dot(intrinsics_inv.T) |
|
||||||
img_pts_normalized[(img_pts < 0).any(axis=1)] = np.nan |
|
||||||
return img_pts_normalized[:,:2].reshape(input_shape) |
|
||||||
|
|
||||||
|
|
||||||
def denormalize(img_pts, intrinsics=eon_intrinsics): |
|
||||||
# denormalizes image coordinates |
|
||||||
# accepts single pt or array of pts |
|
||||||
img_pts = np.array(img_pts) |
|
||||||
input_shape = img_pts.shape |
|
||||||
img_pts = np.atleast_2d(img_pts) |
|
||||||
img_pts = np.hstack((img_pts, np.ones((img_pts.shape[0],1)))) |
|
||||||
img_pts_denormalized = img_pts.dot(intrinsics.T) |
|
||||||
img_pts_denormalized[img_pts_denormalized[:,0] > W] = np.nan |
|
||||||
img_pts_denormalized[img_pts_denormalized[:,0] < 0] = np.nan |
|
||||||
img_pts_denormalized[img_pts_denormalized[:,1] > H] = np.nan |
|
||||||
img_pts_denormalized[img_pts_denormalized[:,1] < 0] = np.nan |
|
||||||
return img_pts_denormalized[:,:2].reshape(input_shape) |
|
||||||
|
|
||||||
|
|
||||||
def device_from_ecef(pos_ecef, orientation_ecef, pt_ecef): |
|
||||||
# device from ecef frame |
|
||||||
# device frame is x -> forward, y-> right, z -> down |
|
||||||
# accepts single pt or array of pts |
|
||||||
input_shape = pt_ecef.shape |
|
||||||
pt_ecef = np.atleast_2d(pt_ecef) |
|
||||||
ecef_from_device_rot = orient.rotations_from_quats(orientation_ecef) |
|
||||||
device_from_ecef_rot = ecef_from_device_rot.T |
|
||||||
pt_ecef_rel = pt_ecef - pos_ecef |
|
||||||
pt_device = np.einsum('jk,ik->ij', device_from_ecef_rot, pt_ecef_rel) |
|
||||||
return pt_device.reshape(input_shape) |
|
||||||
|
|
||||||
|
|
||||||
def img_from_device(pt_device): |
|
||||||
# img coordinates from pts in device frame |
|
||||||
# first transforms to view frame, then to img coords |
|
||||||
# accepts single pt or array of pts |
|
||||||
input_shape = pt_device.shape |
|
||||||
pt_device = np.atleast_2d(pt_device) |
|
||||||
pt_view = np.einsum('jk,ik->ij', view_frame_from_device_frame, pt_device) |
|
||||||
|
|
||||||
# This function should never return negative depths |
|
||||||
pt_view[pt_view[:,2] < 0] = np.nan |
|
||||||
|
|
||||||
pt_img = pt_view/pt_view[:,2:3] |
|
||||||
return pt_img.reshape(input_shape)[:,:2] |
|
||||||
|
|
||||||
|
|
||||||
#TODO please use generic img transform below |
|
||||||
def rotate_img(img, eulers, crop=None, intrinsics=eon_intrinsics): |
|
||||||
import cv2 # pylint: disable=import-error |
|
||||||
|
|
||||||
size = img.shape[:2] |
|
||||||
rot = orient.rot_from_euler(eulers) |
|
||||||
quadrangle = np.array([[0, 0], |
|
||||||
[size[1]-1, 0], |
|
||||||
[0, size[0]-1], |
|
||||||
[size[1]-1, size[0]-1]], dtype=np.float32) |
|
||||||
quadrangle_norm = np.hstack((normalize(quadrangle, intrinsics=intrinsics), np.ones((4,1)))) |
|
||||||
warped_quadrangle_full = np.einsum('ij, kj->ki', intrinsics.dot(rot), quadrangle_norm) |
|
||||||
warped_quadrangle = np.column_stack((warped_quadrangle_full[:,0]/warped_quadrangle_full[:,2], |
|
||||||
warped_quadrangle_full[:,1]/warped_quadrangle_full[:,2])).astype(np.float32) |
|
||||||
if crop: |
|
||||||
W_border = (size[1] - crop[0])//2 |
|
||||||
H_border = (size[0] - crop[1])//2 |
|
||||||
outside_crop = (((warped_quadrangle[:,0] < W_border) | |
|
||||||
(warped_quadrangle[:,0] >= size[1] - W_border)) & |
|
||||||
((warped_quadrangle[:,1] < H_border) | |
|
||||||
(warped_quadrangle[:,1] >= size[0] - H_border))) |
|
||||||
if not outside_crop.all(): |
|
||||||
raise ValueError("warped image not contained inside crop") |
|
||||||
else: |
|
||||||
H_border, W_border = 0, 0 |
|
||||||
M = cv2.getPerspectiveTransform(quadrangle, warped_quadrangle) |
|
||||||
img_warped = cv2.warpPerspective(img, M, size[::-1]) |
|
||||||
return img_warped[H_border: size[0] - H_border, |
|
||||||
W_border: size[1] - W_border] |
|
||||||
|
|
||||||
|
|
||||||
def get_camera_frame_from_calib_frame(camera_frame_from_road_frame): |
|
||||||
camera_frame_from_ground = camera_frame_from_road_frame[:, (0, 1, 3)] |
|
||||||
calib_frame_from_ground = np.dot(eon_intrinsics, |
|
||||||
get_view_frame_from_road_frame(0, 0, 0, 1.22))[:, (0, 1, 3)] |
|
||||||
ground_from_calib_frame = np.linalg.inv(calib_frame_from_ground) |
|
||||||
camera_frame_from_calib_frame = np.dot(camera_frame_from_ground, ground_from_calib_frame) |
|
||||||
return camera_frame_from_calib_frame |
|
||||||
|
|
||||||
|
|
||||||
def pretransform_from_calib(calib): |
|
||||||
roll, pitch, yaw, height = calib |
|
||||||
view_frame_from_road_frame = get_view_frame_from_road_frame(roll, pitch, yaw, height) |
|
||||||
camera_frame_from_road_frame = np.dot(eon_intrinsics, view_frame_from_road_frame) |
|
||||||
camera_frame_from_calib_frame = get_camera_frame_from_calib_frame(camera_frame_from_road_frame) |
|
||||||
return np.linalg.inv(camera_frame_from_calib_frame) |
|
||||||
|
|
||||||
|
|
||||||
def transform_img(base_img, |
|
||||||
augment_trans=np.array([0,0,0]), |
|
||||||
augment_eulers=np.array([0,0,0]), |
|
||||||
from_intr=eon_intrinsics, |
|
||||||
to_intr=eon_intrinsics, |
|
||||||
output_size=None, |
|
||||||
pretransform=None, |
|
||||||
top_hacks=False, |
|
||||||
yuv=False, |
|
||||||
alpha=1.0, |
|
||||||
beta=0, |
|
||||||
blur=0): |
|
||||||
import cv2 # pylint: disable=import-error |
|
||||||
cv2.setNumThreads(1) |
|
||||||
|
|
||||||
if yuv: |
|
||||||
base_img = cv2.cvtColor(base_img, cv2.COLOR_YUV2RGB_I420) |
|
||||||
|
|
||||||
size = base_img.shape[:2] |
|
||||||
if not output_size: |
|
||||||
output_size = size[::-1] |
|
||||||
|
|
||||||
cy = from_intr[1,2] |
|
||||||
def get_M(h=1.22): |
|
||||||
quadrangle = np.array([[0, cy + 20], |
|
||||||
[size[1]-1, cy + 20], |
|
||||||
[0, size[0]-1], |
|
||||||
[size[1]-1, size[0]-1]], dtype=np.float32) |
|
||||||
quadrangle_norm = np.hstack((normalize(quadrangle, intrinsics=from_intr), np.ones((4,1)))) |
|
||||||
quadrangle_world = np.column_stack((h*quadrangle_norm[:,0]/quadrangle_norm[:,1], |
|
||||||
h*np.ones(4), |
|
||||||
h/quadrangle_norm[:,1])) |
|
||||||
rot = orient.rot_from_euler(augment_eulers) |
|
||||||
to_extrinsics = np.hstack((rot.T, -augment_trans[:,None])) |
|
||||||
to_KE = to_intr.dot(to_extrinsics) |
|
||||||
warped_quadrangle_full = np.einsum('jk,ik->ij', to_KE, np.hstack((quadrangle_world, np.ones((4,1))))) |
|
||||||
warped_quadrangle = np.column_stack((warped_quadrangle_full[:,0]/warped_quadrangle_full[:,2], |
|
||||||
warped_quadrangle_full[:,1]/warped_quadrangle_full[:,2])).astype(np.float32) |
|
||||||
M = cv2.getPerspectiveTransform(quadrangle, warped_quadrangle.astype(np.float32)) |
|
||||||
return M |
|
||||||
|
|
||||||
M = get_M() |
|
||||||
if pretransform is not None: |
|
||||||
M = M.dot(pretransform) |
|
||||||
augmented_rgb = cv2.warpPerspective(base_img, M, output_size, borderMode=cv2.BORDER_REPLICATE) |
|
||||||
|
|
||||||
if top_hacks: |
|
||||||
cyy = int(math.ceil(to_intr[1,2])) |
|
||||||
M = get_M(1000) |
|
||||||
if pretransform is not None: |
|
||||||
M = M.dot(pretransform) |
|
||||||
augmented_rgb[:cyy] = cv2.warpPerspective(base_img, M, (output_size[0], cyy), borderMode=cv2.BORDER_REPLICATE) |
|
||||||
|
|
||||||
# brightness and contrast augment |
|
||||||
augmented_rgb = np.clip((float(alpha)*augmented_rgb + beta), 0, 255).astype(np.uint8) |
|
||||||
|
|
||||||
# gaussian blur |
|
||||||
if blur > 0: |
|
||||||
augmented_rgb = cv2.GaussianBlur(augmented_rgb,(blur*2+1,blur*2+1),cv2.BORDER_DEFAULT) |
|
||||||
|
|
||||||
if yuv: |
|
||||||
augmented_img = cv2.cvtColor(augmented_rgb, cv2.COLOR_RGB2YUV_I420) |
|
||||||
else: |
|
||||||
augmented_img = augmented_rgb |
|
||||||
return augmented_img |
|
||||||
|
|
||||||
|
|
||||||
def yuv_crop(frame, output_size, center=None): |
|
||||||
# output_size in camera coordinates so u,v |
|
||||||
# center in array coordinates so row, column |
|
||||||
import cv2 # pylint: disable=import-error |
|
||||||
rgb = cv2.cvtColor(frame, cv2.COLOR_YUV2RGB_I420) |
|
||||||
if not center: |
|
||||||
center = (rgb.shape[0]/2, rgb.shape[1]/2) |
|
||||||
rgb_crop = rgb[center[0] - output_size[1]/2: center[0] + output_size[1]/2, |
|
||||||
center[1] - output_size[0]/2: center[1] + output_size[0]/2] |
|
||||||
return cv2.cvtColor(rgb_crop, cv2.COLOR_RGB2YUV_I420) |
|
@ -1,108 +0,0 @@ |
|||||||
import numpy as np |
|
||||||
""" |
|
||||||
Coordinate transformation module. All methods accept arrays as input |
|
||||||
with each row as a position. |
|
||||||
""" |
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
a = 6378137 |
|
||||||
b = 6356752.3142 |
|
||||||
esq = 6.69437999014 * 0.001 |
|
||||||
e1sq = 6.73949674228 * 0.001 |
|
||||||
|
|
||||||
|
|
||||||
def geodetic2ecef(geodetic, radians=False): |
|
||||||
geodetic = np.array(geodetic) |
|
||||||
input_shape = geodetic.shape |
|
||||||
geodetic = np.atleast_2d(geodetic) |
|
||||||
|
|
||||||
ratio = 1.0 if radians else (np.pi / 180.0) |
|
||||||
lat = ratio*geodetic[:,0] |
|
||||||
lon = ratio*geodetic[:,1] |
|
||||||
alt = geodetic[:,2] |
|
||||||
|
|
||||||
xi = np.sqrt(1 - esq * np.sin(lat)**2) |
|
||||||
x = (a / xi + alt) * np.cos(lat) * np.cos(lon) |
|
||||||
y = (a / xi + alt) * np.cos(lat) * np.sin(lon) |
|
||||||
z = (a / xi * (1 - esq) + alt) * np.sin(lat) |
|
||||||
ecef = np.array([x, y, z]).T |
|
||||||
return ecef.reshape(input_shape) |
|
||||||
|
|
||||||
|
|
||||||
def ecef2geodetic(ecef, radians=False): |
|
||||||
""" |
|
||||||
Convert ECEF coordinates to geodetic using ferrari's method |
|
||||||
""" |
|
||||||
# Save shape and export column |
|
||||||
ecef = np.atleast_1d(ecef) |
|
||||||
input_shape = ecef.shape |
|
||||||
ecef = np.atleast_2d(ecef) |
|
||||||
x, y, z = ecef[:, 0], ecef[:, 1], ecef[:, 2] |
|
||||||
|
|
||||||
ratio = 1.0 if radians else (180.0 / np.pi) |
|
||||||
|
|
||||||
# Conver from ECEF to geodetic using Ferrari's methods |
|
||||||
# https://en.wikipedia.org/wiki/Geographic_coordinate_conversion#Ferrari.27s_solution |
|
||||||
r = np.sqrt(x * x + y * y) |
|
||||||
Esq = a * a - b * b |
|
||||||
F = 54 * b * b * z * z |
|
||||||
G = r * r + (1 - esq) * z * z - esq * Esq |
|
||||||
C = (esq * esq * F * r * r) / (pow(G, 3)) |
|
||||||
S = np.cbrt(1 + C + np.sqrt(C * C + 2 * C)) |
|
||||||
P = F / (3 * pow((S + 1 / S + 1), 2) * G * G) |
|
||||||
Q = np.sqrt(1 + 2 * esq * esq * P) |
|
||||||
r_0 = -(P * esq * r) / (1 + Q) + np.sqrt(0.5 * a * a*(1 + 1.0 / Q) - \ |
|
||||||
P * (1 - esq) * z * z / (Q * (1 + Q)) - 0.5 * P * r * r) |
|
||||||
U = np.sqrt(pow((r - esq * r_0), 2) + z * z) |
|
||||||
V = np.sqrt(pow((r - esq * r_0), 2) + (1 - esq) * z * z) |
|
||||||
Z_0 = b * b * z / (a * V) |
|
||||||
h = U * (1 - b * b / (a * V)) |
|
||||||
lat = ratio*np.arctan((z + e1sq * Z_0) / r) |
|
||||||
lon = ratio*np.arctan2(y, x) |
|
||||||
|
|
||||||
# stack the new columns and return to the original shape |
|
||||||
geodetic = np.column_stack((lat, lon, h)) |
|
||||||
return geodetic.reshape(input_shape) |
|
||||||
|
|
||||||
class LocalCoord(): |
|
||||||
""" |
|
||||||
Allows conversions to local frames. In this case NED. |
|
||||||
That is: North East Down from the start position in |
|
||||||
meters. |
|
||||||
""" |
|
||||||
def __init__(self, init_geodetic, init_ecef): |
|
||||||
self.init_ecef = init_ecef |
|
||||||
lat, lon, _ = (np.pi/180)*np.array(init_geodetic) |
|
||||||
self.ned2ecef_matrix = np.array([[-np.sin(lat)*np.cos(lon), -np.sin(lon), -np.cos(lat)*np.cos(lon)], |
|
||||||
[-np.sin(lat)*np.sin(lon), np.cos(lon), -np.cos(lat)*np.sin(lon)], |
|
||||||
[np.cos(lat), 0, -np.sin(lat)]]) |
|
||||||
self.ecef2ned_matrix = self.ned2ecef_matrix.T |
|
||||||
|
|
||||||
@classmethod |
|
||||||
def from_geodetic(cls, init_geodetic): |
|
||||||
init_ecef = geodetic2ecef(init_geodetic) |
|
||||||
return LocalCoord(init_geodetic, init_ecef) |
|
||||||
|
|
||||||
@classmethod |
|
||||||
def from_ecef(cls, init_ecef): |
|
||||||
init_geodetic = ecef2geodetic(init_ecef) |
|
||||||
return LocalCoord(init_geodetic, init_ecef) |
|
||||||
|
|
||||||
|
|
||||||
def ecef2ned(self, ecef): |
|
||||||
ecef = np.array(ecef) |
|
||||||
return np.dot(self.ecef2ned_matrix, (ecef - self.init_ecef).T).T |
|
||||||
|
|
||||||
def ned2ecef(self, ned): |
|
||||||
ned = np.array(ned) |
|
||||||
# Transpose so that init_ecef will broadcast correctly for 1d or 2d ned. |
|
||||||
return (np.dot(self.ned2ecef_matrix, ned.T).T + self.init_ecef) |
|
||||||
|
|
||||||
def geodetic2ned(self, geodetic): |
|
||||||
ecef = geodetic2ecef(geodetic) |
|
||||||
return self.ecef2ned(ecef) |
|
||||||
|
|
||||||
def ned2geodetic(self, ned): |
|
||||||
ecef = self.ned2ecef(ned) |
|
||||||
return ecef2geodetic(ecef) |
|
@ -1,150 +0,0 @@ |
|||||||
import numpy as np |
|
||||||
|
|
||||||
from common.transformations.camera import (FULL_FRAME_SIZE, eon_focal_length, |
|
||||||
get_view_frame_from_road_frame, |
|
||||||
vp_from_ke) |
|
||||||
|
|
||||||
# segnet |
|
||||||
|
|
||||||
SEGNET_SIZE = (512, 384) |
|
||||||
|
|
||||||
segnet_frame_from_camera_frame = np.array([ |
|
||||||
[float(SEGNET_SIZE[0])/FULL_FRAME_SIZE[0], 0., ], |
|
||||||
[ 0., float(SEGNET_SIZE[1])/FULL_FRAME_SIZE[1]]]) |
|
||||||
|
|
||||||
|
|
||||||
# model |
|
||||||
|
|
||||||
MODEL_INPUT_SIZE = (320, 160) |
|
||||||
MODEL_YUV_SIZE = (MODEL_INPUT_SIZE[0], MODEL_INPUT_SIZE[1] * 3 // 2) |
|
||||||
MODEL_CX = MODEL_INPUT_SIZE[0]/2. |
|
||||||
MODEL_CY = 21. |
|
||||||
|
|
||||||
model_zoom = 1.25 |
|
||||||
model_height = 1.22 |
|
||||||
|
|
||||||
# canonical model transform |
|
||||||
model_intrinsics = np.array( |
|
||||||
[[ eon_focal_length / model_zoom, 0. , MODEL_CX], |
|
||||||
[ 0. , eon_focal_length / model_zoom, MODEL_CY], |
|
||||||
[ 0. , 0. , 1.]]) |
|
||||||
|
|
||||||
|
|
||||||
# MED model |
|
||||||
MEDMODEL_INPUT_SIZE = (512, 256) |
|
||||||
MEDMODEL_YUV_SIZE = (MEDMODEL_INPUT_SIZE[0], MEDMODEL_INPUT_SIZE[1] * 3 // 2) |
|
||||||
MEDMODEL_CY = 47.6 |
|
||||||
|
|
||||||
medmodel_zoom = 1. |
|
||||||
medmodel_intrinsics = np.array( |
|
||||||
[[ eon_focal_length / medmodel_zoom, 0. , 0.5 * MEDMODEL_INPUT_SIZE[0]], |
|
||||||
[ 0. , eon_focal_length / medmodel_zoom, MEDMODEL_CY], |
|
||||||
[ 0. , 0. , 1.]]) |
|
||||||
|
|
||||||
|
|
||||||
# BIG model |
|
||||||
|
|
||||||
BIGMODEL_INPUT_SIZE = (864, 288) |
|
||||||
BIGMODEL_YUV_SIZE = (BIGMODEL_INPUT_SIZE[0], BIGMODEL_INPUT_SIZE[1] * 3 // 2) |
|
||||||
|
|
||||||
bigmodel_zoom = 1. |
|
||||||
bigmodel_intrinsics = np.array( |
|
||||||
[[ eon_focal_length / bigmodel_zoom, 0. , 0.5 * BIGMODEL_INPUT_SIZE[0]], |
|
||||||
[ 0. , eon_focal_length / bigmodel_zoom, 0.2 * BIGMODEL_INPUT_SIZE[1]], |
|
||||||
[ 0. , 0. , 1.]]) |
|
||||||
|
|
||||||
|
|
||||||
bigmodel_border = np.array([ |
|
||||||
[0,0,1], |
|
||||||
[BIGMODEL_INPUT_SIZE[0], 0, 1], |
|
||||||
[BIGMODEL_INPUT_SIZE[0], BIGMODEL_INPUT_SIZE[1], 1], |
|
||||||
[0, BIGMODEL_INPUT_SIZE[1], 1], |
|
||||||
]) |
|
||||||
|
|
||||||
|
|
||||||
model_frame_from_road_frame = np.dot(model_intrinsics, |
|
||||||
get_view_frame_from_road_frame(0, 0, 0, model_height)) |
|
||||||
|
|
||||||
bigmodel_frame_from_road_frame = np.dot(bigmodel_intrinsics, |
|
||||||
get_view_frame_from_road_frame(0, 0, 0, model_height)) |
|
||||||
|
|
||||||
medmodel_frame_from_road_frame = np.dot(medmodel_intrinsics, |
|
||||||
get_view_frame_from_road_frame(0, 0, 0, model_height)) |
|
||||||
|
|
||||||
model_frame_from_bigmodel_frame = np.dot(model_intrinsics, np.linalg.inv(bigmodel_intrinsics)) |
|
||||||
|
|
||||||
# 'camera from model camera' |
|
||||||
def get_model_height_transform(camera_frame_from_road_frame, height): |
|
||||||
camera_frame_from_road_ground = np.dot(camera_frame_from_road_frame, np.array([ |
|
||||||
[1, 0, 0], |
|
||||||
[0, 1, 0], |
|
||||||
[0, 0, 0], |
|
||||||
[0, 0, 1], |
|
||||||
])) |
|
||||||
|
|
||||||
camera_frame_from_road_high = np.dot(camera_frame_from_road_frame, np.array([ |
|
||||||
[1, 0, 0], |
|
||||||
[0, 1, 0], |
|
||||||
[0, 0, height - model_height], |
|
||||||
[0, 0, 1], |
|
||||||
])) |
|
||||||
|
|
||||||
road_high_from_camera_frame = np.linalg.inv(camera_frame_from_road_high) |
|
||||||
high_camera_from_low_camera = np.dot(camera_frame_from_road_ground, road_high_from_camera_frame) |
|
||||||
|
|
||||||
return high_camera_from_low_camera |
|
||||||
|
|
||||||
|
|
||||||
# camera_frame_from_model_frame aka 'warp matrix' |
|
||||||
# was: calibration.h/CalibrationTransform |
|
||||||
def get_camera_frame_from_model_frame(camera_frame_from_road_frame, height=model_height): |
|
||||||
vp = vp_from_ke(camera_frame_from_road_frame) |
|
||||||
|
|
||||||
model_camera_from_model_frame = np.array([ |
|
||||||
[model_zoom, 0., vp[0] - MODEL_CX * model_zoom], |
|
||||||
[ 0., model_zoom, vp[1] - MODEL_CY * model_zoom], |
|
||||||
[ 0., 0., 1.], |
|
||||||
]) |
|
||||||
|
|
||||||
# This function is super slow, so skip it if height is very close to canonical |
|
||||||
# TODO: speed it up! |
|
||||||
if abs(height - model_height) > 0.001: # |
|
||||||
camera_from_model_camera = get_model_height_transform(camera_frame_from_road_frame, height) |
|
||||||
else: |
|
||||||
camera_from_model_camera = np.eye(3) |
|
||||||
|
|
||||||
return np.dot(camera_from_model_camera, model_camera_from_model_frame) |
|
||||||
|
|
||||||
|
|
||||||
def get_camera_frame_from_medmodel_frame(camera_frame_from_road_frame): |
|
||||||
camera_frame_from_ground = camera_frame_from_road_frame[:, (0, 1, 3)] |
|
||||||
medmodel_frame_from_ground = medmodel_frame_from_road_frame[:, (0, 1, 3)] |
|
||||||
|
|
||||||
ground_from_medmodel_frame = np.linalg.inv(medmodel_frame_from_ground) |
|
||||||
camera_frame_from_medmodel_frame = np.dot(camera_frame_from_ground, ground_from_medmodel_frame) |
|
||||||
|
|
||||||
return camera_frame_from_medmodel_frame |
|
||||||
|
|
||||||
|
|
||||||
def get_camera_frame_from_bigmodel_frame(camera_frame_from_road_frame): |
|
||||||
camera_frame_from_ground = camera_frame_from_road_frame[:, (0, 1, 3)] |
|
||||||
bigmodel_frame_from_ground = bigmodel_frame_from_road_frame[:, (0, 1, 3)] |
|
||||||
|
|
||||||
ground_from_bigmodel_frame = np.linalg.inv(bigmodel_frame_from_ground) |
|
||||||
camera_frame_from_bigmodel_frame = np.dot(camera_frame_from_ground, ground_from_bigmodel_frame) |
|
||||||
|
|
||||||
return camera_frame_from_bigmodel_frame |
|
||||||
|
|
||||||
|
|
||||||
def get_model_frame(snu_full, camera_frame_from_model_frame, size): |
|
||||||
idxs = camera_frame_from_model_frame.dot(np.column_stack([np.tile(np.arange(size[0]), size[1]), |
|
||||||
np.tile(np.arange(size[1]), (size[0],1)).T.flatten(), |
|
||||||
np.ones(size[0] * size[1])]).T).T.astype(int) |
|
||||||
calib_flat = snu_full[idxs[:,1], idxs[:,0]] |
|
||||||
if len(snu_full.shape) == 3: |
|
||||||
calib = calib_flat.reshape((size[1], size[0], 3)) |
|
||||||
elif len(snu_full.shape) == 2: |
|
||||||
calib = calib_flat.reshape((size[1], size[0])) |
|
||||||
else: |
|
||||||
raise ValueError("shape of input img is weird") |
|
||||||
return calib |
|
@ -1,295 +0,0 @@ |
|||||||
import numpy as np |
|
||||||
from numpy import dot, inner, array, linalg |
|
||||||
from common.transformations.coordinates import LocalCoord |
|
||||||
|
|
||||||
|
|
||||||
''' |
|
||||||
Vectorized functions that transform between |
|
||||||
rotation matrices, euler angles and quaternions. |
|
||||||
All support lists, array or array of arrays as inputs. |
|
||||||
Supports both x2y and y_from_x format (y_from_x preferred!). |
|
||||||
''' |
|
||||||
|
|
||||||
def euler2quat(eulers): |
|
||||||
eulers = array(eulers) |
|
||||||
if len(eulers.shape) > 1: |
|
||||||
output_shape = (-1,4) |
|
||||||
else: |
|
||||||
output_shape = (4,) |
|
||||||
eulers = np.atleast_2d(eulers) |
|
||||||
gamma, theta, psi = eulers[:,0], eulers[:,1], eulers[:,2] |
|
||||||
|
|
||||||
q0 = np.cos(gamma / 2) * np.cos(theta / 2) * np.cos(psi / 2) + \ |
|
||||||
np.sin(gamma / 2) * np.sin(theta / 2) * np.sin(psi / 2) |
|
||||||
q1 = np.sin(gamma / 2) * np.cos(theta / 2) * np.cos(psi / 2) - \ |
|
||||||
np.cos(gamma / 2) * np.sin(theta / 2) * np.sin(psi / 2) |
|
||||||
q2 = np.cos(gamma / 2) * np.sin(theta / 2) * np.cos(psi / 2) + \ |
|
||||||
np.sin(gamma / 2) * np.cos(theta / 2) * np.sin(psi / 2) |
|
||||||
q3 = np.cos(gamma / 2) * np.cos(theta / 2) * np.sin(psi / 2) - \ |
|
||||||
np.sin(gamma / 2) * np.sin(theta / 2) * np.cos(psi / 2) |
|
||||||
|
|
||||||
quats = array([q0, q1, q2, q3]).T |
|
||||||
for i in range(len(quats)): |
|
||||||
if quats[i,0] < 0: |
|
||||||
quats[i] = -quats[i] |
|
||||||
return quats.reshape(output_shape) |
|
||||||
|
|
||||||
|
|
||||||
def quat2euler(quats): |
|
||||||
quats = array(quats) |
|
||||||
if len(quats.shape) > 1: |
|
||||||
output_shape = (-1,3) |
|
||||||
else: |
|
||||||
output_shape = (3,) |
|
||||||
quats = np.atleast_2d(quats) |
|
||||||
q0, q1, q2, q3 = quats[:,0], quats[:,1], quats[:,2], quats[:,3] |
|
||||||
|
|
||||||
gamma = np.arctan2(2 * (q0 * q1 + q2 * q3), 1 - 2 * (q1**2 + q2**2)) |
|
||||||
theta = np.arcsin(2 * (q0 * q2 - q3 * q1)) |
|
||||||
psi = np.arctan2(2 * (q0 * q3 + q1 * q2), 1 - 2 * (q2**2 + q3**2)) |
|
||||||
|
|
||||||
eulers = array([gamma, theta, psi]).T |
|
||||||
return eulers.reshape(output_shape) |
|
||||||
|
|
||||||
|
|
||||||
def quat2rot(quats): |
|
||||||
quats = array(quats) |
|
||||||
input_shape = quats.shape |
|
||||||
quats = np.atleast_2d(quats) |
|
||||||
Rs = np.zeros((quats.shape[0], 3, 3)) |
|
||||||
q0 = quats[:, 0] |
|
||||||
q1 = quats[:, 1] |
|
||||||
q2 = quats[:, 2] |
|
||||||
q3 = quats[:, 3] |
|
||||||
Rs[:, 0, 0] = q0 * q0 + q1 * q1 - q2 * q2 - q3 * q3 |
|
||||||
Rs[:, 0, 1] = 2 * (q1 * q2 - q0 * q3) |
|
||||||
Rs[:, 0, 2] = 2 * (q0 * q2 + q1 * q3) |
|
||||||
Rs[:, 1, 0] = 2 * (q1 * q2 + q0 * q3) |
|
||||||
Rs[:, 1, 1] = q0 * q0 - q1 * q1 + q2 * q2 - q3 * q3 |
|
||||||
Rs[:, 1, 2] = 2 * (q2 * q3 - q0 * q1) |
|
||||||
Rs[:, 2, 0] = 2 * (q1 * q3 - q0 * q2) |
|
||||||
Rs[:, 2, 1] = 2 * (q0 * q1 + q2 * q3) |
|
||||||
Rs[:, 2, 2] = q0 * q0 - q1 * q1 - q2 * q2 + q3 * q3 |
|
||||||
|
|
||||||
if len(input_shape) < 2: |
|
||||||
return Rs[0] |
|
||||||
else: |
|
||||||
return Rs |
|
||||||
|
|
||||||
|
|
||||||
def rot2quat(rots): |
|
||||||
input_shape = rots.shape |
|
||||||
if len(input_shape) < 3: |
|
||||||
rots = array([rots]) |
|
||||||
K3 = np.empty((len(rots), 4, 4)) |
|
||||||
K3[:, 0, 0] = (rots[:, 0, 0] - rots[:, 1, 1] - rots[:, 2, 2]) / 3.0 |
|
||||||
K3[:, 0, 1] = (rots[:, 1, 0] + rots[:, 0, 1]) / 3.0 |
|
||||||
K3[:, 0, 2] = (rots[:, 2, 0] + rots[:, 0, 2]) / 3.0 |
|
||||||
K3[:, 0, 3] = (rots[:, 1, 2] - rots[:, 2, 1]) / 3.0 |
|
||||||
K3[:, 1, 0] = K3[:, 0, 1] |
|
||||||
K3[:, 1, 1] = (rots[:, 1, 1] - rots[:, 0, 0] - rots[:, 2, 2]) / 3.0 |
|
||||||
K3[:, 1, 2] = (rots[:, 2, 1] + rots[:, 1, 2]) / 3.0 |
|
||||||
K3[:, 1, 3] = (rots[:, 2, 0] - rots[:, 0, 2]) / 3.0 |
|
||||||
K3[:, 2, 0] = K3[:, 0, 2] |
|
||||||
K3[:, 2, 1] = K3[:, 1, 2] |
|
||||||
K3[:, 2, 2] = (rots[:, 2, 2] - rots[:, 0, 0] - rots[:, 1, 1]) / 3.0 |
|
||||||
K3[:, 2, 3] = (rots[:, 0, 1] - rots[:, 1, 0]) / 3.0 |
|
||||||
K3[:, 3, 0] = K3[:, 0, 3] |
|
||||||
K3[:, 3, 1] = K3[:, 1, 3] |
|
||||||
K3[:, 3, 2] = K3[:, 2, 3] |
|
||||||
K3[:, 3, 3] = (rots[:, 0, 0] + rots[:, 1, 1] + rots[:, 2, 2]) / 3.0 |
|
||||||
q = np.empty((len(rots), 4)) |
|
||||||
for i in range(len(rots)): |
|
||||||
_, eigvecs = linalg.eigh(K3[i].T) |
|
||||||
eigvecs = eigvecs[:,3:] |
|
||||||
q[i, 0] = eigvecs[-1] |
|
||||||
q[i, 1:] = -eigvecs[:-1].flatten() |
|
||||||
if q[i, 0] < 0: |
|
||||||
q[i] = -q[i] |
|
||||||
|
|
||||||
if len(input_shape) < 3: |
|
||||||
return q[0] |
|
||||||
else: |
|
||||||
return q |
|
||||||
|
|
||||||
|
|
||||||
def euler2rot(eulers): |
|
||||||
return rotations_from_quats(euler2quat(eulers)) |
|
||||||
|
|
||||||
|
|
||||||
def rot2euler(rots): |
|
||||||
return quat2euler(quats_from_rotations(rots)) |
|
||||||
|
|
||||||
|
|
||||||
quats_from_rotations = rot2quat |
|
||||||
quat_from_rot = rot2quat |
|
||||||
rotations_from_quats = quat2rot |
|
||||||
rot_from_quat= quat2rot |
|
||||||
rot_from_quat= quat2rot |
|
||||||
euler_from_rot = rot2euler |
|
||||||
euler_from_quat = quat2euler |
|
||||||
rot_from_euler = euler2rot |
|
||||||
quat_from_euler = euler2quat |
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
''' |
|
||||||
Random helpers below |
|
||||||
''' |
|
||||||
|
|
||||||
|
|
||||||
def quat_product(q, r): |
|
||||||
t = np.zeros(4) |
|
||||||
t[0] = r[0] * q[0] - r[1] * q[1] - r[2] * q[2] - r[3] * q[3] |
|
||||||
t[1] = r[0] * q[1] + r[1] * q[0] - r[2] * q[3] + r[3] * q[2] |
|
||||||
t[2] = r[0] * q[2] + r[1] * q[3] + r[2] * q[0] - r[3] * q[1] |
|
||||||
t[3] = r[0] * q[3] - r[1] * q[2] + r[2] * q[1] + r[3] * q[0] |
|
||||||
return t |
|
||||||
|
|
||||||
|
|
||||||
def rot_matrix(roll, pitch, yaw): |
|
||||||
cr, sr = np.cos(roll), np.sin(roll) |
|
||||||
cp, sp = np.cos(pitch), np.sin(pitch) |
|
||||||
cy, sy = np.cos(yaw), np.sin(yaw) |
|
||||||
rr = array([[1,0,0],[0, cr,-sr],[0, sr, cr]]) |
|
||||||
rp = array([[cp,0,sp],[0, 1,0],[-sp, 0, cp]]) |
|
||||||
ry = array([[cy,-sy,0],[sy, cy,0],[0, 0, 1]]) |
|
||||||
return ry.dot(rp.dot(rr)) |
|
||||||
|
|
||||||
|
|
||||||
def rot(axis, angle): |
|
||||||
# Rotates around an arbitrary axis |
|
||||||
ret_1 = (1 - np.cos(angle)) * array([[axis[0]**2, axis[0] * axis[1], axis[0] * axis[2]], [ |
|
||||||
axis[1] * axis[0], axis[1]**2, axis[1] * axis[2] |
|
||||||
], [axis[2] * axis[0], axis[2] * axis[1], axis[2]**2]]) |
|
||||||
ret_2 = np.cos(angle) * np.eye(3) |
|
||||||
ret_3 = np.sin(angle) * array([[0, -axis[2], axis[1]], [axis[2], 0, -axis[0]], |
|
||||||
[-axis[1], axis[0], 0]]) |
|
||||||
return ret_1 + ret_2 + ret_3 |
|
||||||
|
|
||||||
|
|
||||||
def ecef_euler_from_ned(ned_ecef_init, ned_pose): |
|
||||||
''' |
|
||||||
Got it from here: |
|
||||||
Using Rotations to Build Aerospace Coordinate Systems |
|
||||||
-Don Koks |
|
||||||
''' |
|
||||||
converter = LocalCoord.from_ecef(ned_ecef_init) |
|
||||||
x0 = converter.ned2ecef([1, 0, 0]) - converter.ned2ecef([0, 0, 0]) |
|
||||||
y0 = converter.ned2ecef([0, 1, 0]) - converter.ned2ecef([0, 0, 0]) |
|
||||||
z0 = converter.ned2ecef([0, 0, 1]) - converter.ned2ecef([0, 0, 0]) |
|
||||||
|
|
||||||
x1 = rot(z0, ned_pose[2]).dot(x0) |
|
||||||
y1 = rot(z0, ned_pose[2]).dot(y0) |
|
||||||
z1 = rot(z0, ned_pose[2]).dot(z0) |
|
||||||
|
|
||||||
x2 = rot(y1, ned_pose[1]).dot(x1) |
|
||||||
y2 = rot(y1, ned_pose[1]).dot(y1) |
|
||||||
z2 = rot(y1, ned_pose[1]).dot(z1) |
|
||||||
|
|
||||||
x3 = rot(x2, ned_pose[0]).dot(x2) |
|
||||||
y3 = rot(x2, ned_pose[0]).dot(y2) |
|
||||||
#z3 = rot(x2, ned_pose[0]).dot(z2) |
|
||||||
|
|
||||||
x0 = array([1, 0, 0]) |
|
||||||
y0 = array([0, 1, 0]) |
|
||||||
z0 = array([0, 0, 1]) |
|
||||||
|
|
||||||
psi = np.arctan2(inner(x3, y0), inner(x3, x0)) |
|
||||||
theta = np.arctan2(-inner(x3, z0), np.sqrt(inner(x3, x0)**2 + inner(x3, y0)**2)) |
|
||||||
y2 = rot(z0, psi).dot(y0) |
|
||||||
z2 = rot(y2, theta).dot(z0) |
|
||||||
phi = np.arctan2(inner(y3, z2), inner(y3, y2)) |
|
||||||
|
|
||||||
ret = array([phi, theta, psi]) |
|
||||||
return ret |
|
||||||
|
|
||||||
|
|
||||||
def ned_euler_from_ecef(ned_ecef_init, ecef_poses): |
|
||||||
''' |
|
||||||
Got the math from here: |
|
||||||
Using Rotations to Build Aerospace Coordinate Systems |
|
||||||
-Don Koks |
|
||||||
|
|
||||||
Also accepts array of ecef_poses and array of ned_ecef_inits. |
|
||||||
Where each row is a pose and an ecef_init. |
|
||||||
''' |
|
||||||
ned_ecef_init = array(ned_ecef_init) |
|
||||||
ecef_poses = array(ecef_poses) |
|
||||||
output_shape = ecef_poses.shape |
|
||||||
ned_ecef_init = np.atleast_2d(ned_ecef_init) |
|
||||||
if ned_ecef_init.shape[0] == 1: |
|
||||||
ned_ecef_init = np.tile(ned_ecef_init[0], (output_shape[0], 1)) |
|
||||||
ecef_poses = np.atleast_2d(ecef_poses) |
|
||||||
|
|
||||||
ned_poses = np.zeros(ecef_poses.shape) |
|
||||||
for i, ecef_pose in enumerate(ecef_poses): |
|
||||||
converter = LocalCoord.from_ecef(ned_ecef_init[i]) |
|
||||||
x0 = array([1, 0, 0]) |
|
||||||
y0 = array([0, 1, 0]) |
|
||||||
z0 = array([0, 0, 1]) |
|
||||||
|
|
||||||
x1 = rot(z0, ecef_pose[2]).dot(x0) |
|
||||||
y1 = rot(z0, ecef_pose[2]).dot(y0) |
|
||||||
z1 = rot(z0, ecef_pose[2]).dot(z0) |
|
||||||
|
|
||||||
x2 = rot(y1, ecef_pose[1]).dot(x1) |
|
||||||
y2 = rot(y1, ecef_pose[1]).dot(y1) |
|
||||||
z2 = rot(y1, ecef_pose[1]).dot(z1) |
|
||||||
|
|
||||||
x3 = rot(x2, ecef_pose[0]).dot(x2) |
|
||||||
y3 = rot(x2, ecef_pose[0]).dot(y2) |
|
||||||
#z3 = rot(x2, ecef_pose[0]).dot(z2) |
|
||||||
|
|
||||||
x0 = converter.ned2ecef([1, 0, 0]) - converter.ned2ecef([0, 0, 0]) |
|
||||||
y0 = converter.ned2ecef([0, 1, 0]) - converter.ned2ecef([0, 0, 0]) |
|
||||||
z0 = converter.ned2ecef([0, 0, 1]) - converter.ned2ecef([0, 0, 0]) |
|
||||||
|
|
||||||
psi = np.arctan2(inner(x3, y0), inner(x3, x0)) |
|
||||||
theta = np.arctan2(-inner(x3, z0), np.sqrt(inner(x3, x0)**2 + inner(x3, y0)**2)) |
|
||||||
y2 = rot(z0, psi).dot(y0) |
|
||||||
z2 = rot(y2, theta).dot(z0) |
|
||||||
phi = np.arctan2(inner(y3, z2), inner(y3, y2)) |
|
||||||
ned_poses[i] = array([phi, theta, psi]) |
|
||||||
|
|
||||||
return ned_poses.reshape(output_shape) |
|
||||||
|
|
||||||
|
|
||||||
def ecef2car(car_ecef, psi, theta, points_ecef, ned_converter): |
|
||||||
""" |
|
||||||
TODO: add roll rotation |
|
||||||
Converts an array of points in ecef coordinates into |
|
||||||
x-forward, y-left, z-up coordinates |
|
||||||
Parameters |
|
||||||
---------- |
|
||||||
psi: yaw, radian |
|
||||||
theta: pitch, radian |
|
||||||
Returns |
|
||||||
------- |
|
||||||
[x, y, z] coordinates in car frame |
|
||||||
""" |
|
||||||
|
|
||||||
# input is an array of points in ecef cocrdinates |
|
||||||
# output is an array of points in car's coordinate (x-front, y-left, z-up) |
|
||||||
|
|
||||||
# convert points to NED |
|
||||||
points_ned = [] |
|
||||||
for p in points_ecef: |
|
||||||
points_ned.append(ned_converter.ecef2ned_matrix.dot(array(p) - car_ecef)) |
|
||||||
|
|
||||||
points_ned = np.vstack(points_ned).T |
|
||||||
|
|
||||||
# n, e, d -> x, y, z |
|
||||||
# Calculate relative postions and rotate wrt to heading and pitch of car |
|
||||||
invert_R = array([[1., 0., 0.], [0., -1., 0.], [0., 0., -1.]]) |
|
||||||
|
|
||||||
c, s = np.cos(psi), np.sin(psi) |
|
||||||
yaw_R = array([[c, s, 0.], [-s, c, 0.], [0., 0., 1.]]) |
|
||||||
|
|
||||||
c, s = np.cos(theta), np.sin(theta) |
|
||||||
pitch_R = array([[c, 0., -s], [0., 1., 0.], [s, 0., c]]) |
|
||||||
|
|
||||||
return dot(pitch_R, dot(yaw_R, dot(invert_R, points_ned))) |
|
@ -1,98 +0,0 @@ |
|||||||
CC = clang
|
|
||||||
CXX = clang++
|
|
||||||
|
|
||||||
PHONELIBS = ../../phonelibs
|
|
||||||
|
|
||||||
WARN_FLAGS = -Werror=implicit-function-declaration \
|
|
||||||
-Werror=incompatible-pointer-types \
|
|
||||||
-Werror=int-conversion \
|
|
||||||
-Werror=return-type \
|
|
||||||
-Werror=format-extra-args
|
|
||||||
|
|
||||||
CFLAGS = -std=gnu11 -g -fPIC -O2 $(WARN_FLAGS)
|
|
||||||
CXXFLAGS = -std=c++11 -g -fPIC -O2 $(WARN_FLAGS)
|
|
||||||
|
|
||||||
CURL_FLAGS = -I$(PHONELIBS)/curl/include
|
|
||||||
CURL_LIBS = $(PHONELIBS)/curl/lib/libcurl.a \
|
|
||||||
$(PHONELIBS)/zlib/lib/libz.a
|
|
||||||
|
|
||||||
BORINGSSL_FLAGS = -I$(PHONELIBS)/boringssl/include
|
|
||||||
BORINGSSL_LIBS = $(PHONELIBS)/boringssl/lib/libssl_static.a \
|
|
||||||
$(PHONELIBS)/boringssl/lib/libcrypto_static.a \
|
|
||||||
|
|
||||||
NANOVG_FLAGS = -I$(PHONELIBS)/nanovg
|
|
||||||
|
|
||||||
JSON11_FLAGS = -I$(PHONELIBS)/json11
|
|
||||||
|
|
||||||
OPENGL_LIBS = -lGLESv3
|
|
||||||
|
|
||||||
FRAMEBUFFER_LIBS = -lutils -lgui -lEGL
|
|
||||||
|
|
||||||
.PHONY: all |
|
||||||
all: updater |
|
||||||
|
|
||||||
OBJS = opensans_regular.ttf.o \
|
|
||||||
opensans_semibold.ttf.o \
|
|
||||||
opensans_bold.ttf.o \
|
|
||||||
../../selfdrive/common/touch.o \
|
|
||||||
../../selfdrive/common/framebuffer.o \
|
|
||||||
$(PHONELIBS)/json11/json11.o \
|
|
||||||
$(PHONELIBS)/nanovg/nanovg.o
|
|
||||||
|
|
||||||
DEPS := $(OBJS:.o=.d)
|
|
||||||
|
|
||||||
updater: updater.o $(OBJS) |
|
||||||
@echo "[ LINK ] $@"
|
|
||||||
$(CXX) $(CPPFLAGS) -fPIC -o 'updater' $^ \
|
|
||||||
$(FRAMEBUFFER_LIBS) \
|
|
||||||
$(CURL_LIBS) \
|
|
||||||
$(BORINGSSL_LIBS) \
|
|
||||||
-L/system/vendor/lib64 \
|
|
||||||
$(OPENGL_LIBS) \
|
|
||||||
-lcutils -lm -llog
|
|
||||||
strip updater
|
|
||||||
|
|
||||||
opensans_regular.ttf.o: ../../selfdrive/assets/fonts/opensans_regular.ttf |
|
||||||
@echo "[ bin2o ] $@"
|
|
||||||
cd '$(dir $<)' && ld -r -b binary '$(notdir $<)' -o '$(abspath $@)'
|
|
||||||
|
|
||||||
opensans_bold.ttf.o: ../../selfdrive/assets/fonts/opensans_bold.ttf |
|
||||||
@echo "[ bin2o ] $@"
|
|
||||||
cd '$(dir $<)' && ld -r -b binary '$(notdir $<)' -o '$(abspath $@)'
|
|
||||||
|
|
||||||
opensans_semibold.ttf.o: ../../selfdrive/assets/fonts/opensans_semibold.ttf |
|
||||||
@echo "[ bin2o ] $@"
|
|
||||||
cd '$(dir $<)' && ld -r -b binary '$(notdir $<)' -o '$(abspath $@)'
|
|
||||||
|
|
||||||
%.o: %.c |
|
||||||
mkdir -p $(@D)
|
|
||||||
@echo "[ CC ] $@"
|
|
||||||
$(CC) $(CPPFLAGS) $(CFLAGS) \
|
|
||||||
-I../.. \
|
|
||||||
-I$(PHONELIBS)/android_frameworks_native/include \
|
|
||||||
-I$(PHONELIBS)/android_system_core/include \
|
|
||||||
-I$(PHONELIBS)/android_hardware_libhardware/include \
|
|
||||||
$(NANOVG_FLAGS) \
|
|
||||||
-c -o '$@' '$<'
|
|
||||||
|
|
||||||
%.o: %.cc |
|
||||||
mkdir -p $(@D)
|
|
||||||
@echo "[ CXX ] $@"
|
|
||||||
$(CXX) $(CPPFLAGS) $(CXXFLAGS) \
|
|
||||||
-I../../selfdrive \
|
|
||||||
-I../../ \
|
|
||||||
-I$(PHONELIBS)/android_frameworks_native/include \
|
|
||||||
-I$(PHONELIBS)/android_system_core/include \
|
|
||||||
-I$(PHONELIBS)/android_hardware_libhardware/include \
|
|
||||||
$(NANOVG_FLAGS) \
|
|
||||||
$(JSON11_FLAGS) \
|
|
||||||
$(CURL_FLAGS) \
|
|
||||||
$(BORINGSSL_FLAGS) \
|
|
||||||
-c -o '$@' '$<'
|
|
||||||
|
|
||||||
|
|
||||||
.PHONY: clean |
|
||||||
clean: |
|
||||||
rm -f $(OBJS) $(DEPS)
|
|
||||||
|
|
||||||
-include $(DEPS) |
|
@ -1,7 +0,0 @@ |
|||||||
{ |
|
||||||
"ota_url": "https://commadist.azureedge.net/neosupdate/ota-signed-efdf7de63b1aef63d68301e6175930991bf9a5927d16ec6fcc69287e2ee7ca4a.zip", |
|
||||||
"ota_hash": "efdf7de63b1aef63d68301e6175930991bf9a5927d16ec6fcc69287e2ee7ca4a", |
|
||||||
"recovery_url": "https://commadist.azureedge.net/neosupdate/recovery-97c27e6ed04ed6bb0608b845a2d4100912093f9380c3f2ba6b56bccd608e5f6e.img", |
|
||||||
"recovery_len": 15861036, |
|
||||||
"recovery_hash": "97c27e6ed04ed6bb0608b845a2d4100912093f9380c3f2ba6b56bccd608e5f6e" |
|
||||||
} |
|
@ -1,3 +0,0 @@ |
|||||||
version https://git-lfs.github.com/spec/v1 |
|
||||||
oid sha256:9d5a44fd0dcf94172c1637c951e1da3b52d6a7049c9c12e14a02900a0f9e9aa4 |
|
||||||
size 2468632 |
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in new issue