diff --git a/drivers/sensor/akm09918c/akm09918c.c b/drivers/sensor/akm09918c/akm09918c.c index f9c72ec21b9f46..1225d69a3d24de 100644 --- a/drivers/sensor/akm09918c/akm09918c.c +++ b/drivers/sensor/akm09918c/akm09918c.c @@ -27,7 +27,7 @@ static int akm09918c_sample_fetch(const struct device *dev, enum sensor_channel if (chan != SENSOR_CHAN_ALL && chan != SENSOR_CHAN_MAGN_X && chan != SENSOR_CHAN_MAGN_Y && chan != SENSOR_CHAN_MAGN_Z && chan != SENSOR_CHAN_MAGN_XYZ) { - LOG_WRN("Invalid channel %d", chan); + LOG_DBG("Invalid channel %d", chan); return -EINVAL; } @@ -85,7 +85,7 @@ static int akm09918c_channel_get(const struct device *dev, enum sensor_channel c } else if (chan == SENSOR_CHAN_MAGN_Z) { akm09918c_convert(val, data->z_sample); } else { - LOG_WRN("Invalid channel %d", chan); + LOG_DBG("Invalid channel %d", chan); return -ENOTSUP; } diff --git a/drivers/sensor/default_rtio_sensor.c b/drivers/sensor/default_rtio_sensor.c index 55db893f4d89c0..60534c84d282e8 100644 --- a/drivers/sensor/default_rtio_sensor.c +++ b/drivers/sensor/default_rtio_sensor.c @@ -251,66 +251,205 @@ void sensor_processing_with_callback(struct rtio *ctx, sensor_processing_callbac * Default reader can only ever service a single frame at a time. * * @param[in] buffer The data buffer to parse + * @param[in] channel The channel to get the count for + * @param[in] channel_idx The index of the channel * @param[out] frame_count The number of frames in the buffer (always 1) * @return 0 in all cases */ -static int get_frame_count(const uint8_t *buffer, uint16_t *frame_count) +static int get_frame_count(const uint8_t *buffer, enum sensor_channel channel, size_t channel_idx, + uint16_t *frame_count) { - ARG_UNUSED(buffer); - *frame_count = 1; - return 0; + struct sensor_data_generic_header *header = (struct sensor_data_generic_header *)buffer; + size_t count = 0; + + switch (channel) { + case SENSOR_CHAN_ACCEL_XYZ: + channel = SENSOR_CHAN_ACCEL_X; + break; + case SENSOR_CHAN_GYRO_XYZ: + channel = SENSOR_CHAN_GYRO_X; + break; + case SENSOR_CHAN_MAGN_XYZ: + channel = SENSOR_CHAN_MAGN_X; + break; + default: + break; + } + for (size_t i = 0; i < header->num_channels; ++i) { + if (header->channels[i] == channel) { + if (channel_idx == count) { + *frame_count = 1; + return 0; + } + ++count; + } + } + + return -ENOTSUP; } -/** - * @brief Default decoder get the timestamp of the first frame - * - * @param[in] buffer The data buffer to parse - * @param[out] timestamp_ns The timestamp of the first frame - * @return 0 in all cases - */ -static int get_timestamp(const uint8_t *buffer, uint64_t *timestamp_ns) +int sensor_natively_supported_channel_size_info(enum sensor_channel channel, size_t *base_size, + size_t *frame_size) { - *timestamp_ns = ((struct sensor_data_generic_header *)buffer)->timestamp_ns; - return 0; + __ASSERT_NO_MSG(base_size != NULL); + __ASSERT_NO_MSG(frame_size != NULL); + + switch (channel) { + case SENSOR_CHAN_ACCEL_X: + case SENSOR_CHAN_ACCEL_Y: + case SENSOR_CHAN_ACCEL_Z: + case SENSOR_CHAN_ACCEL_XYZ: + case SENSOR_CHAN_GYRO_X: + case SENSOR_CHAN_GYRO_Y: + case SENSOR_CHAN_GYRO_Z: + case SENSOR_CHAN_GYRO_XYZ: + case SENSOR_CHAN_MAGN_X: + case SENSOR_CHAN_MAGN_Y: + case SENSOR_CHAN_MAGN_Z: + case SENSOR_CHAN_MAGN_XYZ: + case SENSOR_CHAN_POS_DX: + case SENSOR_CHAN_POS_DY: + case SENSOR_CHAN_POS_DZ: + *base_size = sizeof(struct sensor_three_axis_data); + *frame_size = sizeof(struct sensor_three_axis_sample_data); + return 0; + case SENSOR_CHAN_DIE_TEMP: + case SENSOR_CHAN_AMBIENT_TEMP: + case SENSOR_CHAN_PRESS: + case SENSOR_CHAN_HUMIDITY: + case SENSOR_CHAN_LIGHT: + case SENSOR_CHAN_IR: + case SENSOR_CHAN_RED: + case SENSOR_CHAN_GREEN: + case SENSOR_CHAN_BLUE: + case SENSOR_CHAN_ALTITUDE: + case SENSOR_CHAN_PM_1_0: + case SENSOR_CHAN_PM_2_5: + case SENSOR_CHAN_PM_10: + case SENSOR_CHAN_DISTANCE: + case SENSOR_CHAN_CO2: + case SENSOR_CHAN_VOC: + case SENSOR_CHAN_GAS_RES: + case SENSOR_CHAN_VOLTAGE: + case SENSOR_CHAN_CURRENT: + case SENSOR_CHAN_POWER: + case SENSOR_CHAN_RESISTANCE: + case SENSOR_CHAN_ROTATION: + case SENSOR_CHAN_RPM: + case SENSOR_CHAN_GAUGE_VOLTAGE: + case SENSOR_CHAN_GAUGE_AVG_CURRENT: + case SENSOR_CHAN_GAUGE_STDBY_CURRENT: + case SENSOR_CHAN_GAUGE_MAX_LOAD_CURRENT: + case SENSOR_CHAN_GAUGE_TEMP: + case SENSOR_CHAN_GAUGE_STATE_OF_CHARGE: + case SENSOR_CHAN_GAUGE_FULL_CHARGE_CAPACITY: + case SENSOR_CHAN_GAUGE_REMAINING_CHARGE_CAPACITY: + case SENSOR_CHAN_GAUGE_NOM_AVAIL_CAPACITY: + case SENSOR_CHAN_GAUGE_FULL_AVAIL_CAPACITY: + case SENSOR_CHAN_GAUGE_AVG_POWER: + case SENSOR_CHAN_GAUGE_STATE_OF_HEALTH: + case SENSOR_CHAN_GAUGE_TIME_TO_EMPTY: + case SENSOR_CHAN_GAUGE_TIME_TO_FULL: + case SENSOR_CHAN_GAUGE_DESIGN_VOLTAGE: + case SENSOR_CHAN_GAUGE_DESIRED_VOLTAGE: + case SENSOR_CHAN_GAUGE_DESIRED_CHARGING_CURRENT: + *base_size = sizeof(struct sensor_q31_data); + *frame_size = sizeof(struct sensor_q31_sample_data); + return 0; + case SENSOR_CHAN_PROX: + *base_size = sizeof(struct sensor_byte_data); + *frame_size = sizeof(struct sensor_byte_sample_data); + return 0; + case SENSOR_CHAN_GAUGE_CYCLE_COUNT: + *base_size = sizeof(struct sensor_uint64_data); + *frame_size = sizeof(struct sensor_uint64_sample_data); + return 0; + default: + return -ENOTSUP; + } } -/** - * @brief Default decoder get the bitshift of the given channel (if possible) - * - * @param[in] buffer The data buffer to parse - * @param[in] channel_type The channel to query - * @param[out] shift The bitshift for the q31 value - * @return 0 on success - * @return -EINVAL if the @p channel_type couldn't be found - */ -static int get_shift(const uint8_t *buffer, enum sensor_channel channel_type, int8_t *shift) +static int get_q31_value(const struct sensor_data_generic_header *header, const q31_t *values, + enum sensor_channel channel, size_t channel_idx, q31_t *out) { - struct sensor_data_generic_header *header = (struct sensor_data_generic_header *)buffer; + size_t count = 0; - ARG_UNUSED(channel_type); - *shift = header->shift; - return 0; + for (size_t i = 0; i < header->num_channels; ++i) { + if (channel != header->channels[i]) { + continue; + } + if (count == channel_idx) { + *out = values[i]; + return 0; + } + ++count; + } + return -EINVAL; +} + +static int decode_three_axis(const struct sensor_data_generic_header *header, const q31_t *values, + struct sensor_three_axis_data *data_out, enum sensor_channel x, + enum sensor_channel y, enum sensor_channel z, size_t channel_idx) +{ + int rc; + + data_out->header.base_timestamp_ns = header->timestamp_ns; + data_out->header.reading_count = 1; + data_out->shift = header->shift; + data_out->readings[0].timestamp_delta = 0; + + rc = get_q31_value(header, values, x, channel_idx, &data_out->readings[0].values[0]); + if (rc < 0) { + return rc; + } + rc = get_q31_value(header, values, y, channel_idx, &data_out->readings[0].values[1]); + if (rc < 0) { + return rc; + } + rc = get_q31_value(header, values, z, channel_idx, &data_out->readings[0].values[2]); + if (rc < 0) { + return rc; + } + return 1; +} + +static int decode_q31(const struct sensor_data_generic_header *header, const q31_t *values, + struct sensor_q31_data *data_out, enum sensor_channel channel, + size_t channel_idx) +{ + int rc; + + data_out->header.base_timestamp_ns = header->timestamp_ns; + data_out->header.reading_count = 1; + data_out->shift = header->shift; + data_out->readings[0].timestamp_delta = 0; + + rc = get_q31_value(header, values, channel, channel_idx, &data_out->readings[0].value); + if (rc < 0) { + return rc; + } + return 1; } /** - * @brief Default decoder decode N samples + * @brief Decode up to N samples from the buffer * - * Decode up to N samples starting at the provided @p fit and @p cit. The appropriate channel types - * and q31 values will be placed in @p values and @p channels respectively. + * This function will never wrap frames. If 1 channel is available in the current frame and + * @p max_count is 2, only 1 channel will be decoded and the frame iterator will be modified + * so that the next call to decode will begin at the next frame. * - * @param[in] buffer The data buffer to decode - * @param[in,out] fit The starting frame iterator - * @param[in,out] cit The starting channel iterator - * @param[out] channels The decoded channel types - * @param[out] values The decoded q31 values - * @param[in] max_count The maximum number of values to decode - * @return > 0 The number of decoded values - * @return 0 Nothing else to decode on this @p buffer - * @return < 0 Error + * @param[in] buffer The buffer provided on the :c:struct:`rtio` context + * @param[in] channel The channel to decode + * @param[in] channel_idx The index of the channel + * @param[in,out] fit The current frame iterator + * @param[in] max_count The maximum number of channels to decode. + * @param[out] data_out The decoded data + * @return 0 no more samples to decode + * @return >0 the number of decoded frames + * @return <0 on error */ -static int decode(const uint8_t *buffer, sensor_frame_iterator_t *fit, - sensor_channel_iterator_t *cit, enum sensor_channel *channels, q31_t *values, - uint8_t max_count) +static int decode(const uint8_t *buffer, enum sensor_channel channel, size_t channel_idx, + uint32_t *fit, uint16_t max_count, void *data_out) { const struct sensor_data_generic_header *header = (const struct sensor_data_generic_header *)buffer; @@ -319,32 +458,92 @@ static int decode(const uint8_t *buffer, sensor_frame_iterator_t *fit, header->num_channels * sizeof(enum sensor_channel)); int count = 0; - if (*fit != 0 || *cit >= header->num_channels) { + if (*fit != 0 || max_count < 1) { return -EINVAL; } - /* Skip invalid channels */ - while (*cit < header->num_channels && header->channels[*cit] == SENSOR_CHAN_MAX) { - *cit += 1; + /* Check for 3d channel mappings */ + switch (channel) { + case SENSOR_CHAN_ACCEL_X: + case SENSOR_CHAN_ACCEL_Y: + case SENSOR_CHAN_ACCEL_Z: + case SENSOR_CHAN_ACCEL_XYZ: + count = decode_three_axis(header, q, data_out, SENSOR_CHAN_ACCEL_X, + SENSOR_CHAN_ACCEL_Y, SENSOR_CHAN_ACCEL_Z, channel_idx); + break; + case SENSOR_CHAN_GYRO_X: + case SENSOR_CHAN_GYRO_Y: + case SENSOR_CHAN_GYRO_Z: + case SENSOR_CHAN_GYRO_XYZ: + count = decode_three_axis(header, q, data_out, SENSOR_CHAN_GYRO_X, + SENSOR_CHAN_GYRO_Y, SENSOR_CHAN_GYRO_Z, channel_idx); + break; + case SENSOR_CHAN_MAGN_X: + case SENSOR_CHAN_MAGN_Y: + case SENSOR_CHAN_MAGN_Z: + case SENSOR_CHAN_MAGN_XYZ: + count = decode_three_axis(header, q, data_out, SENSOR_CHAN_MAGN_X, + SENSOR_CHAN_MAGN_Y, SENSOR_CHAN_MAGN_Z, channel_idx); + break; + case SENSOR_CHAN_POS_DX: + case SENSOR_CHAN_POS_DY: + case SENSOR_CHAN_POS_DZ: + count = decode_three_axis(header, q, data_out, SENSOR_CHAN_POS_DX, + SENSOR_CHAN_POS_DY, SENSOR_CHAN_POS_DZ, channel_idx); + break; + case SENSOR_CHAN_DIE_TEMP: + case SENSOR_CHAN_AMBIENT_TEMP: + case SENSOR_CHAN_PRESS: + case SENSOR_CHAN_HUMIDITY: + case SENSOR_CHAN_LIGHT: + case SENSOR_CHAN_IR: + case SENSOR_CHAN_RED: + case SENSOR_CHAN_GREEN: + case SENSOR_CHAN_BLUE: + case SENSOR_CHAN_ALTITUDE: + case SENSOR_CHAN_PM_1_0: + case SENSOR_CHAN_PM_2_5: + case SENSOR_CHAN_PM_10: + case SENSOR_CHAN_DISTANCE: + case SENSOR_CHAN_CO2: + case SENSOR_CHAN_VOC: + case SENSOR_CHAN_GAS_RES: + case SENSOR_CHAN_VOLTAGE: + case SENSOR_CHAN_CURRENT: + case SENSOR_CHAN_POWER: + case SENSOR_CHAN_RESISTANCE: + case SENSOR_CHAN_ROTATION: + case SENSOR_CHAN_RPM: + case SENSOR_CHAN_GAUGE_VOLTAGE: + case SENSOR_CHAN_GAUGE_AVG_CURRENT: + case SENSOR_CHAN_GAUGE_STDBY_CURRENT: + case SENSOR_CHAN_GAUGE_MAX_LOAD_CURRENT: + case SENSOR_CHAN_GAUGE_TEMP: + case SENSOR_CHAN_GAUGE_STATE_OF_CHARGE: + case SENSOR_CHAN_GAUGE_FULL_CHARGE_CAPACITY: + case SENSOR_CHAN_GAUGE_REMAINING_CHARGE_CAPACITY: + case SENSOR_CHAN_GAUGE_NOM_AVAIL_CAPACITY: + case SENSOR_CHAN_GAUGE_FULL_AVAIL_CAPACITY: + case SENSOR_CHAN_GAUGE_AVG_POWER: + case SENSOR_CHAN_GAUGE_STATE_OF_HEALTH: + case SENSOR_CHAN_GAUGE_TIME_TO_EMPTY: + case SENSOR_CHAN_GAUGE_TIME_TO_FULL: + case SENSOR_CHAN_GAUGE_DESIGN_VOLTAGE: + case SENSOR_CHAN_GAUGE_DESIRED_VOLTAGE: + case SENSOR_CHAN_GAUGE_DESIRED_CHARGING_CURRENT: + count = decode_q31(header, q, data_out, channel, channel_idx); + break; + default: + break; } - - for (; *cit < header->num_channels && count < max_count; ++count) { - channels[count] = header->channels[*cit]; - values[count] = q[*cit]; - LOG_DBG("Decoding q[%u]@%p=%d", *cit, (void *)&q[*cit], q[*cit]); - *cit += 1; - } - - if (*cit >= header->num_channels) { + if (count > 0) { *fit = 1; - *cit = 0; } return count; } const struct sensor_decoder_api __sensor_default_decoder = { .get_frame_count = get_frame_count, - .get_timestamp = get_timestamp, - .get_shift = get_shift, + .get_size_info = sensor_natively_supported_channel_size_info, .decode = decode, }; diff --git a/drivers/sensor/icm42688/icm42688_decoder.c b/drivers/sensor/icm42688/icm42688_decoder.c index 781e7567ffd53d..91fbfc6f683cb5 100644 --- a/drivers/sensor/icm42688/icm42688_decoder.c +++ b/drivers/sensor/icm42688/icm42688_decoder.c @@ -220,91 +220,183 @@ int icm42688_encode(const struct device *dev, const enum sensor_channel *const c return 0; } -static int icm42688_one_shot_decode(const uint8_t *buffer, sensor_frame_iterator_t *fit, - sensor_channel_iterator_t *cit, enum sensor_channel *channels, - q31_t *values, uint8_t max_count) +static int icm42688_one_shot_decode(const uint8_t *buffer, enum sensor_channel channel, + size_t channel_idx, sensor_frame_iterator_t *fit, + uint16_t max_count, void *data_out) { const struct icm42688_encoded_data *edata = (const struct icm42688_encoded_data *)buffer; - uint8_t channel_pos_read = edata->channels; + const struct icm42688_decoder_header *header = &edata->header; struct icm42688_cfg cfg = { .accel_fs = edata->header.accel_fs, .gyro_fs = edata->header.gyro_fs, }; - enum sensor_channel chan; - int pos; - int count = 0; - int num_samples = __builtin_popcount(channel_pos_read); - - channel_pos_read = edata->channels; + uint8_t channel_request; + int rc; if (*fit != 0) { return 0; } - - /* Skip channels already decoded */ - for (int i = 0; i < *cit && channel_pos_read; i++) { - pos = __builtin_ctz(channel_pos_read); - channel_pos_read &= ~BIT(pos); + if (max_count == 0 || channel_idx != 0) { + return -EINVAL; } - /* Decode remaining channels */ - while (channel_pos_read && *cit < num_samples && count < max_count) { - pos = __builtin_ctz(channel_pos_read); - chan = icm42688_get_channel_from_position(pos); + switch (channel) { + case SENSOR_CHAN_ACCEL_X: + case SENSOR_CHAN_ACCEL_Y: + case SENSOR_CHAN_ACCEL_Z: + case SENSOR_CHAN_ACCEL_XYZ: { + channel_request = icm42688_encode_channel(SENSOR_CHAN_ACCEL_XYZ); + if ((channel_request & edata->channels) != channel_request) { + return -ENODATA; + } - channels[count] = chan; + struct sensor_three_axis_data *out = data_out; - icm42688_convert_raw_to_q31(&cfg, chan, edata->readings[pos], &values[count]); + out->header.base_timestamp_ns = edata->header.timestamp; + out->header.reading_count = 1; + rc = icm42688_get_shift(SENSOR_CHAN_ACCEL_XYZ, header->accel_fs, header->gyro_fs, + &out->shift); + if (rc != 0) { + return -EINVAL; + } - count++; - channel_pos_read &= ~BIT(pos); - *cit += 1; + icm42688_convert_raw_to_q31( + &cfg, SENSOR_CHAN_ACCEL_X, + edata->readings[icm42688_get_channel_position(SENSOR_CHAN_ACCEL_X)], + &out->readings[0].x); + icm42688_convert_raw_to_q31( + &cfg, SENSOR_CHAN_ACCEL_Y, + edata->readings[icm42688_get_channel_position(SENSOR_CHAN_ACCEL_Y)], + &out->readings[0].y); + icm42688_convert_raw_to_q31( + &cfg, SENSOR_CHAN_ACCEL_Z, + edata->readings[icm42688_get_channel_position(SENSOR_CHAN_ACCEL_Z)], + &out->readings[0].z); + *fit = 1; + return 1; } + case SENSOR_CHAN_GYRO_X: + case SENSOR_CHAN_GYRO_Y: + case SENSOR_CHAN_GYRO_Z: + case SENSOR_CHAN_GYRO_XYZ: { + channel_request = icm42688_encode_channel(SENSOR_CHAN_GYRO_XYZ); + if ((channel_request & edata->channels) != channel_request) { + return -ENODATA; + } - if (*cit >= __builtin_popcount(edata->channels)) { - *fit += 1; - *cit = 0; + struct sensor_three_axis_data *out = data_out; + + out->header.base_timestamp_ns = edata->header.timestamp; + out->header.reading_count = 1; + rc = icm42688_get_shift(SENSOR_CHAN_GYRO_XYZ, header->accel_fs, header->gyro_fs, + &out->shift); + if (rc != 0) { + return -EINVAL; + } + + out->readings[0].timestamp_delta = 0; + icm42688_convert_raw_to_q31( + &cfg, SENSOR_CHAN_GYRO_X, + edata->readings[icm42688_get_channel_position(SENSOR_CHAN_GYRO_X)], + &out->readings[0].x); + icm42688_convert_raw_to_q31( + &cfg, SENSOR_CHAN_GYRO_Y, + edata->readings[icm42688_get_channel_position(SENSOR_CHAN_GYRO_Y)], + &out->readings[0].y); + icm42688_convert_raw_to_q31( + &cfg, SENSOR_CHAN_GYRO_Z, + edata->readings[icm42688_get_channel_position(SENSOR_CHAN_GYRO_Z)], + &out->readings[0].z); + *fit = 1; + return 1; } + case SENSOR_CHAN_DIE_TEMP: { + channel_request = icm42688_encode_channel(SENSOR_CHAN_DIE_TEMP); + if ((channel_request & edata->channels) != channel_request) { + return -ENODATA; + } - return count; -} + struct sensor_q31_data *out = data_out; -static int icm42688_decoder_decode(const uint8_t *buffer, sensor_frame_iterator_t *fit, - sensor_channel_iterator_t *cit, enum sensor_channel *channels, - q31_t *values, uint8_t max_count) -{ - return icm42688_one_shot_decode(buffer, fit, cit, channels, values, max_count); + out->header.base_timestamp_ns = edata->header.timestamp; + out->header.reading_count = 1; + + rc = icm42688_get_shift(SENSOR_CHAN_DIE_TEMP, header->accel_fs, header->gyro_fs, + &out->shift); + if (rc != 0) { + return -EINVAL; + } + out->readings[0].timestamp_delta = 0; + icm42688_convert_raw_to_q31( + &cfg, SENSOR_CHAN_DIE_TEMP, + edata->readings[icm42688_get_channel_position(SENSOR_CHAN_DIE_TEMP)], + &out->readings[0].temperature); + *fit = 1; + return 1; + } + default: + return -EINVAL; + } } -static int icm42688_decoder_get_frame_count(const uint8_t *buffer, uint16_t *frame_count) +static int icm42688_decoder_decode(const uint8_t *buffer, enum sensor_channel channel, + size_t channel_idx, sensor_frame_iterator_t *fit, + uint16_t max_count, void *data_out) { - ARG_UNUSED(buffer); - *frame_count = 1; - return 0; + return icm42688_one_shot_decode(buffer, channel, channel_idx, fit, max_count, data_out); } -static int icm42688_decoder_get_timestamp(const uint8_t *buffer, uint64_t *timestamp_ns) +static int icm42688_decoder_get_frame_count(const uint8_t *buffer, enum sensor_channel channel, + size_t channel_idx, uint16_t *frame_count) { - const struct icm42688_decoder_header *header = - (const struct icm42688_decoder_header *)buffer; - - *timestamp_ns = header->timestamp; - return 0; + ARG_UNUSED(buffer); + if (channel_idx != 0) { + return -ENOTSUP; + } + switch (channel) { + case SENSOR_CHAN_ACCEL_X: + case SENSOR_CHAN_ACCEL_Y: + case SENSOR_CHAN_ACCEL_Z: + case SENSOR_CHAN_ACCEL_XYZ: + case SENSOR_CHAN_GYRO_X: + case SENSOR_CHAN_GYRO_Y: + case SENSOR_CHAN_GYRO_Z: + case SENSOR_CHAN_GYRO_XYZ: + case SENSOR_CHAN_DIE_TEMP: + *frame_count = 1; + return 0; + default: + return -ENOTSUP; + } } -static int icm42688_decoder_get_shift(const uint8_t *buffer, enum sensor_channel channel_type, - int8_t *shift) +static int icm42688_decoder_get_size_info(enum sensor_channel channel, size_t *base_size, + size_t *frame_size) { - const struct icm42688_decoder_header *header = - (const struct icm42688_decoder_header *)buffer; - - return icm42688_get_shift(channel_type, header->accel_fs, header->gyro_fs, shift); + switch (channel) { + case SENSOR_CHAN_ACCEL_X: + case SENSOR_CHAN_ACCEL_Y: + case SENSOR_CHAN_ACCEL_Z: + case SENSOR_CHAN_ACCEL_XYZ: + case SENSOR_CHAN_GYRO_X: + case SENSOR_CHAN_GYRO_Y: + case SENSOR_CHAN_GYRO_Z: + case SENSOR_CHAN_GYRO_XYZ: + *base_size = sizeof(struct sensor_three_axis_data); + *frame_size = sizeof(struct sensor_three_axis_sample_data); + return 0; + case SENSOR_CHAN_DIE_TEMP: + *base_size = sizeof(struct sensor_q31_data); + *frame_size = sizeof(struct sensor_q31_sample_data); + return 0; + default: + return -ENOTSUP; + } } SENSOR_DECODER_API_DT_DEFINE() = { .get_frame_count = icm42688_decoder_get_frame_count, - .get_timestamp = icm42688_decoder_get_timestamp, - .get_shift = icm42688_decoder_get_shift, + .get_size_info = icm42688_decoder_get_size_info, .decode = icm42688_decoder_decode, }; diff --git a/drivers/sensor/sensor_shell.c b/drivers/sensor/sensor_shell.c index 459ea0882f7a8d..a85eab23e43ec3 100644 --- a/drivers/sensor/sensor_shell.c +++ b/drivers/sensor/sensor_shell.c @@ -244,11 +244,7 @@ static void sensor_shell_processing_callback(int result, uint8_t *buf, uint32_t { struct sensor_shell_processing_context *ctx = userdata; const struct sensor_decoder_api *decoder; - sensor_frame_iterator_t fit = {0}; - sensor_channel_iterator_t cit = {0}; - uint64_t timestamp; - enum sensor_channel channel; - q31_t q; + uint8_t decoded_buffer[128]; int rc; ARG_UNUSED(buf_len); @@ -264,45 +260,85 @@ static void sensor_shell_processing_callback(int result, uint8_t *buf, uint32_t return; } - rc = decoder->get_timestamp(buf, ×tamp); - if (rc != 0) { - shell_error(ctx->sh, "Failed to get fetch timestamp for '%s'", ctx->dev->name); - return; - } - shell_print(ctx->sh, "Got samples at %" PRIu64 " ns", timestamp); + for (int channel = 0; channel < SENSOR_CHAN_ALL; ++channel) { + uint32_t fit = 0; + size_t base_size; + size_t frame_size; + size_t channel_idx = 0; + uint16_t frame_count; - while (decoder->decode(buf, &fit, &cit, &channel, &q, 1) > 0) { - int8_t shift; + if (channel == SENSOR_CHAN_ACCEL_X || channel == SENSOR_CHAN_ACCEL_Y || + channel == SENSOR_CHAN_ACCEL_Z || channel == SENSOR_CHAN_GYRO_X || + channel == SENSOR_CHAN_GYRO_Y || channel == SENSOR_CHAN_GYRO_Z || + channel == SENSOR_CHAN_MAGN_X || channel == SENSOR_CHAN_MAGN_Y || + channel == SENSOR_CHAN_MAGN_Z || channel == SENSOR_CHAN_POS_DY || + channel == SENSOR_CHAN_POS_DZ) { + continue; + } - rc = decoder->get_shift(buf, channel, &shift); + rc = decoder->get_size_info(channel, &base_size, &frame_size); if (rc != 0) { - shell_error(ctx->sh, "Failed to get bitshift for channel %d", channel); + /* Channel not supported, skipping */ continue; } - int64_t scaled_value = (int64_t)q << shift; - bool is_negative = scaled_value < 0; - int numerator; - int denominator; - - scaled_value = llabs(scaled_value); - numerator = (int)FIELD_GET(GENMASK64(31 + shift, 31), scaled_value); - denominator = (int)DIV_ROUND_CLOSEST( - FIELD_GET(GENMASK64(30, 0), scaled_value) * 1000000, - INT32_MAX); - - if (denominator == 1000000) { - numerator++; - denominator = 0; + if (base_size > ARRAY_SIZE(decoded_buffer)) { + shell_error(ctx->sh, + "Channel (%d) requires %zu bytes to decode, but only %zu are " + "available", + channel, base_size, ARRAY_SIZE(decoded_buffer)); + continue; } - if (channel >= ARRAY_SIZE(sensor_channel_name)) { - shell_print(ctx->sh, "channel idx=%d value=%s%d.%06d", channel, - is_negative ? "-" : "", numerator, denominator); - } else { - shell_print(ctx->sh, "channel idx=%d %s value=%s%d.%06d", channel, - sensor_channel_name[channel], is_negative ? "-" : "", numerator, - denominator); + while (decoder->get_frame_count(buf, channel, channel_idx, &frame_count) == 0) { + fit = 0; + while (decoder->decode(buf, channel, channel_idx, &fit, 1, decoded_buffer) > + 0) { + + switch (channel) { + case SENSOR_CHAN_ACCEL_XYZ: + case SENSOR_CHAN_GYRO_XYZ: + case SENSOR_CHAN_MAGN_XYZ: + case SENSOR_CHAN_POS_DX: { + struct sensor_three_axis_data *data = + (struct sensor_three_axis_data *)decoded_buffer; + + shell_info(ctx->sh, + "channel idx=%d %s shift=%d " + "value=%" PRIsensor_three_axis_data, + channel, sensor_channel_name[channel], + data->shift, + PRIsensor_three_axis_data_arg(*data, 0)); + break; + } + case SENSOR_CHAN_PROX: { + struct sensor_byte_data *data = + (struct sensor_byte_data *)decoded_buffer; + + shell_info(ctx->sh, + "channel idx=%d %s value=%" PRIsensor_byte_data( + is_near), + channel, sensor_channel_name[channel], + PRIsensor_byte_data_arg(*data, 0, is_near)); + break; + } + default: { + struct sensor_q31_data *data = + (struct sensor_q31_data *)decoded_buffer; + + shell_info(ctx->sh, + "channel idx=%d %s shift=%d " + "value=%" PRIsensor_q31_data, + channel, + (channel >= ARRAY_SIZE(sensor_channel_name)) + ? "" + : sensor_channel_name[channel], + data->shift, PRIsensor_q31_data_arg(*data, 0)); + break; + } + } + } + ++channel_idx; } } } @@ -621,8 +657,7 @@ static int cmd_get_sensor_info(const struct shell *sh, size_t argc, char **argv) #ifdef CONFIG_SENSOR_INFO const char *null_str = "(null)"; - STRUCT_SECTION_FOREACH(sensor_info, sensor) - { + STRUCT_SECTION_FOREACH(sensor_info, sensor) { shell_print(sh, "device name: %s, vendor: %s, model: %s, " "friendly name: %s", diff --git a/include/zephyr/drivers/sensor.h b/include/zephyr/drivers/sensor.h index 59a84931645818..50e35b89dace94 100644 --- a/include/zephyr/drivers/sensor.h +++ b/include/zephyr/drivers/sensor.h @@ -29,6 +29,7 @@ #include #include +#include #include #include #include @@ -409,46 +410,6 @@ typedef int (*sensor_channel_get_t)(const struct device *dev, enum sensor_channel chan, struct sensor_value *val); -/** - * @typedef sensor_frame_iterator_t - * @brief Used for iterating over the data frames via the sensor_decoder_api. - * - * Example usage: - * - * @code(.c) - * sensor_frame_iterator_t fit = {0}, fit_last; - * sensor_channel_iterator_t cit = {0}, cit_last; - * - * while (true) { - * int num_decoded_channels; - * enum sensor_channel channel; - * q31_t value; - * - * fit_last = fit; - * num_decoded_channels = decoder->decode(buffer, &fit, &cit, &channel, &value, 1); - * - * if (num_decoded_channels <= 0) { - * printk("Done decoding buffer\n"); - * break; - * } - * - * printk("Decoded channel (%d) with value %s0.%06" PRIi64 "\n", q < 0 ? "-" : "", - * abs(q) * INT64_C(1000000) / (INT64_C(1) << 31)); - * - * if (fit_last != fit) { - * printk("Finished decoding frame\n"); - * } - * } - * @endcode - */ -typedef uint32_t sensor_frame_iterator_t; - -/** - * @typedef sensor_channel_iterator_t - * @brief Used for iterating over data channels in the same frame via sensor_decoder_api - */ -typedef uint32_t sensor_channel_iterator_t; - /** * @brief Decodes a single raw data buffer * @@ -460,60 +421,120 @@ struct sensor_decoder_api { * @brief Get the number of frames in the current buffer. * * @param[in] buffer The buffer provided on the @ref rtio context. + * @param[in] channel The channel to get the count for + * @param[in] channel_idx The index of the channel * @param[out] frame_count The number of frames on the buffer (at least 1) * @return 0 on success - * @return <0 on error + * @return -ENOTSUP if the channel/channel_idx aren't found */ - int (*get_frame_count)(const uint8_t *buffer, uint16_t *frame_count); + int (*get_frame_count)(const uint8_t *buffer, enum sensor_channel channel, + size_t channel_idx, uint16_t *frame_count); /** - * @brief Get the timestamp associated with the first frame. + * @brief Get the size required to decode a given channel * - * @param[in] buffer The buffer provided on the @ref rtio context. - * @param[out] timestamp_ns The closest timestamp for when the first frame was generated - * as attained by k_uptime_ticks(). + * When decoding a single frame, use @p base_size. For every additional frame, add another + * @p frame_size. As an example, to decode 3 frames use: 'base_size + 2 * frame_size'. + * + * @param[in] channel The channel to query + * @param[out] base_size The size of decoding the first frame + * @param[out] frame_size The additional size of every additional frame * @return 0 on success - * @return <0 on error + * @return -ENOTSUP if the channel is not supported */ - int (*get_timestamp)(const uint8_t *buffer, uint64_t *timestamp_ns); + int (*get_size_info)(enum sensor_channel channel, size_t *base_size, size_t *frame_size); /** - * @brief Get the shift count of a particular channel (multiplier) + * @brief Decode up to @p max_count samples from the buffer * - * This value can be used by shifting the q31_t value resulting in the SI unit of the - * reading. It is guaranteed that the shift for a channel will not change between frames. + * Decode samples of channel @ref sensor_channel across multiple frames. If there exist + * multiple instances of the same channel, @p channel_index is used to differentiate them. + * As an example, assume a sensor provides 2 distance measurements: * - * @param[in] buffer The buffer provided on the @ref rtio context. - * @param[in] channel_type The c:enum:`sensor_channel` to query - * @param[out] shift The bit shift of the channel for this data buffer. - * @return 0 on success - * @return -EINVAL if the @p channel_type doesn't exist in the buffer - * @return <0 on error - */ - int (*get_shift)(const uint8_t *buffer, enum sensor_channel channel_type, int8_t *shift); - - /** - * @brief Decode up to N samples from the buffer + * @code{.c} + * // Decode the first channel instance of 'distance' + * decoder->decode(buffer, SENSOR_CHAN_DISTANCE, 0, &fit, 5, out); + * ... * - * This function will never wrap frames. If 1 channel is available in the current frame and - * @p max_count is 2, only 1 channel will be decoded and the frame iterator will be modified - * so that the next call to decode will begin at the next frame. + * // Decode the second channel instance of 'distance' + * decoder->decode(buffer, SENSOR_CHAN_DISTANCE, 1, &fit, 5, out); + * @endcode * * @param[in] buffer The buffer provided on the @ref rtio context + * @param[in] channel The channel to decode + * @param[in] channel_idx The index of the channel * @param[in,out] fit The current frame iterator - * @param[in,out] cit The current channel iterator - * @param[out] channels The channels that were decoded - * @param[out] values The scaled data that was decoded * @param[in] max_count The maximum number of channels to decode. - * @retval > 0 The number of decoded values - * @retval 0 Nothing else to decode on the @p buffer - * @retval < 0 Error + * @param[out] data_out The decoded data + * @return 0 no more samples to decode + * @return >0 the number of decoded frames + * @return <0 on error */ - int (*decode)(const uint8_t *buffer, sensor_frame_iterator_t *fit, - sensor_channel_iterator_t *cit, enum sensor_channel *channels, q31_t *values, - uint8_t max_count); + int (*decode)(const uint8_t *buffer, enum sensor_channel channel, size_t channel_idx, + uint32_t *fit, uint16_t max_count, void *data_out); }; +/** + * @brief Used for iterating over the data frames via the sensor_decoder_api. + * + * Example usage: + * + * @code(.c) + * struct sensor_decode_context ctx = SENSOR_DECODE_CONTEXT_INIT( + * decoder, buffer, SENSOR_CHAN_ACCEL_XYZ, 0); + * + * while (true) { + * struct sensor_three_axis_data accel_out_data; + * + * num_decoded_channels = sensor_decode(ctx, &accel_out_data, 1); + * + * if (num_decoded_channels <= 0) { + * printk("Done decoding buffer\n"); + * break; + * } + * + * printk("Decoded (%" PRId32 ", %" PRId32 ", %" PRId32 ")\n", accel_out_data.readings[0].x, + * accel_out_data.readings[0].y, accel_out_data.readings[0].z); + * } + * @endcode + */ +struct sensor_decode_context { + const struct sensor_decoder_api *decoder; + const uint8_t *buffer; + enum sensor_channel channel; + size_t channel_idx; + uint32_t fit; +}; + +/** + * @brief Initialize a sensor_decode_context + */ +#define SENSOR_DECODE_CONTEXT_INIT(decoder_, buffer_, channel_, channel_index_) \ + { \ + .decoder = (decoder_), \ + .buffer = (buffer_), \ + .channel = (channel_), \ + .channel_idx = (channel_index_), \ + .fit = 0, \ + } + +/** + * @brief Decode N frames using a sensor_decode_context + * + * @param[in,out] ctx The context to use for decoding + * @param[out] out The output buffer + * @param[in] max_count Maximum number of frames to decode + * @return The decode result from sensor_decoder_api's decode function + */ +static inline int sensor_decode(struct sensor_decode_context *ctx, void *out, uint16_t max_count) +{ + return ctx->decoder->decode(ctx->buffer, ctx->channel, ctx->channel_idx, &ctx->fit, + max_count, out); +} + +int sensor_natively_supported_channel_size_info(enum sensor_channel channel, size_t *base_size, + size_t *frame_size); + /** * @typedef sensor_get_decoder_t * @brief Get the decoder associate with the given device diff --git a/include/zephyr/drivers/sensor_data_types.h b/include/zephyr/drivers/sensor_data_types.h new file mode 100644 index 00000000000000..dd7f1f39c6d185 --- /dev/null +++ b/include/zephyr/drivers/sensor_data_types.h @@ -0,0 +1,168 @@ +/* + * Copyright (c) 2023 Google LLC + * + * SPDX-License-Identifier: Apache-2.0 + */ + +#ifndef ZEPHYR_INCLUDE_DRIVERS_SENSOR_DATA_TYPES_H +#define ZEPHYR_INCLUDE_DRIVERS_SENSOR_DATA_TYPES_H + +#include +#include + +#include + +#ifdef __cplusplus +extern "C" { +#endif + +struct sensor_data_header { + /** + * The closest timestamp for when the first frame was generated as attained by + * :c:func:`k_uptime_ticks`. + */ + uint64_t base_timestamp_ns; + /** + * The number of elements in the 'readings' array. + * + * This must be at least 1 + */ + uint16_t reading_count; +}; + +/** + * Data for a sensor channel which reports on three axes. This is used by: + * - :c:enum:`SENSOR_CHAN_ACCEL_X` + * - :c:enum:`SENSOR_CHAN_ACCEL_Y` + * - :c:enum:`SENSOR_CHAN_ACCEL_Z` + * - :c:enum:`SENSOR_CHAN_ACCEL_XYZ` + * - :c:enum:`SENSOR_CHAN_GYRO_X` + * - :c:enum:`SENSOR_CHAN_GYRO_Y` + * - :c:enum:`SENSOR_CHAN_GYRO_Z` + * - :c:enum:`SENSOR_CHAN_GYRO_XYZ` + * - :c:enum:`SENSOR_CHAN_MAGN_X` + * - :c:enum:`SENSOR_CHAN_MAGN_Y` + * - :c:enum:`SENSOR_CHAN_MAGN_Z` + * - :c:enum:`SENSOR_CHAN_MAGN_XYZ` + * - :c:enum:`SENSOR_CHAN_POS_DX` + * - :c:enum:`SENSOR_CHAN_POS_DY` + * - :c:enum:`SENSOR_CHAN_POS_DZ` + */ +struct sensor_three_axis_data { + struct sensor_data_header header; + int8_t shift; + struct sensor_three_axis_sample_data { + uint32_t timestamp_delta; + union { + q31_t values[3]; + q31_t v[3]; + struct { + q31_t x; + q31_t y; + q31_t z; + }; + }; + } readings[1]; +}; + +#define PRIsensor_three_axis_data PRIu64 "ns, (%" PRIq(6) ", %" PRIq(6) ", %" PRIq(6) ")" + +#define PRIsensor_three_axis_data_arg(data_, readings_offset_) \ + (data_).header.base_timestamp_ns + (data_).readings[(readings_offset_)].timestamp_delta, \ + PRIq_arg((data_).readings[(readings_offset_)].x, 6, (data_).shift), \ + PRIq_arg((data_).readings[(readings_offset_)].y, 6, (data_).shift), \ + PRIq_arg((data_).readings[(readings_offset_)].z, 6, (data_).shift) + +/** + * Data from a sensor where we only care about an event occurring. This is used to report triggers. + */ +struct sensor_occurrence_data { + struct sensor_data_header header; + struct sensor_occurrence_sample_data { + uint32_t timestamp_delta; + } readings[1]; +}; + +#define PRIsensor_occurrence_data PRIu64 "ns" + +#define PRIsensor_occurrence_data_arg(data_, readings_offset_) \ + (data_).header.base_timestamp_ns + (data_).readings[(readings_offset_)].timestamp_delta + +struct sensor_q31_data { + struct sensor_data_header header; + int8_t shift; + struct sensor_q31_sample_data { + uint32_t timestamp_delta; + union { + q31_t value; + q31_t light; /**< Unit: lux */ + q31_t pressure; /**< Unit: kilopascal */ + q31_t temperature; /**< Unit: degrees Celsius */ + q31_t percent; /**< Unit: percent */ + q31_t distance; /**< Unit: meters */ + q31_t density; /**< Unit: ug/m^3 */ + q31_t density_ppm; /**< Unit: parts per million */ + q31_t density_ppb; /**< Unit: parts per billion */ + q31_t resistance; /**< Unit: ohms */ + q31_t voltage; /**< Unit: volts */ + q31_t current; /**< Unit: amps */ + q31_t power; /**< Unit: watts */ + q31_t angle; /**< Unit: degrees */ + q31_t electric_charge; /**< Unit: mAh */ + }; + } readings[1]; +}; + +#define PRIsensor_q31_data PRIu64 "ns (%" PRIq(6) ")" + +#define PRIsensor_q31_data_arg(data_, readings_offset_) \ + (data_).header.base_timestamp_ns + (data_).readings[(readings_offset_)].timestamp_delta, \ + PRIq_arg((data_).readings[(readings_offset_)].value, 6, (data_).shift) + +/** + * Data from a sensor that produces a byte of data. This is used by: + * - :c:enum:`SENSOR_CHAN_PROX` + */ +struct sensor_byte_data { + struct sensor_data_header header; + struct sensor_byte_sample_data { + uint32_t timestamp_delta; + union { + uint8_t value; + struct { + uint8_t is_near: 1; + uint8_t padding: 7; + }; + }; + } readings[1]; +}; + +#define PRIsensor_byte_data(field_name_) PRIu64 "ns (" STRINGIFY(field_name_) " = %" PRIu8 ")" + +#define PRIsensor_byte_data_arg(data_, readings_offset_, field_name_) \ + (data_).header.base_timestamp_ns + (data_).readings[(readings_offset_)].timestamp_delta, \ + (data_).readings[(readings_offset_)].field_name_ + +/** + * Data from a sensor that produces a count like value. This is used by: + * - :c:enum:`SENSOR_CHAN_GAUGE_CYCLE_COUNT` + */ +struct sensor_uint64_data { + struct sensor_data_header header; + struct sensor_uint64_sample_data { + uint32_t timestamp_delta; + uint64_t value; + } readings[1]; +}; + +#define PRIsensor_uint64_data PRIu64 "ns (%" PRIu64 ")" + +#define PRIsensor_uint64_data_arg(data_, readings_offset_) \ + (data_).header.base_timestamp_ns + (data_).readings[(readings_offset_)].timestamp_delta, \ + (data_).readings[(readings_offset_)].value + +#ifdef __cplusplus +} +#endif + +#endif /* ZEPHYR_INCLUDE_DRIVERS_SENSOR_DATA_TYPES_H */