extmod/machine_i2s: Factor stream and ring-buf code.

Signed-off-by: Damien George <damien@micropython.org>
pull/12630/head
Damien George 2023-10-09 12:48:45 +11:00
rodzic f2f3ef162d
commit a2d7e0da0b
8 zmienionych plików z 399 dodań i 1089 usunięć

Wyświetl plik

@ -26,11 +26,37 @@
*/
#include "py/runtime.h"
#include "py/stream.h"
#if MICROPY_PY_MACHINE_I2S
#include "extmod/modmachine.h"
#if MICROPY_PY_MACHINE_I2S_RING_BUF
typedef struct _ring_buf_t {
uint8_t *buffer;
size_t head;
size_t tail;
size_t size;
} ring_buf_t;
typedef struct _non_blocking_descriptor_t {
mp_buffer_info_t appbuf;
uint32_t index;
bool copy_in_progress;
} non_blocking_descriptor_t;
STATIC void ringbuf_init(ring_buf_t *rbuf, uint8_t *buffer, size_t size);
STATIC bool ringbuf_push(ring_buf_t *rbuf, uint8_t data);
STATIC bool ringbuf_pop(ring_buf_t *rbuf, uint8_t *data);
STATIC size_t ringbuf_available_data(ring_buf_t *rbuf);
STATIC size_t ringbuf_available_space(ring_buf_t *rbuf);
STATIC void fill_appbuf_from_ringbuf_non_blocking(machine_i2s_obj_t *self);
STATIC void copy_appbuf_to_ringbuf_non_blocking(machine_i2s_obj_t *self);
#endif // MICROPY_PY_MACHINE_I2S_RING_BUF
// The port must provide implementations of these low-level I2S functions.
STATIC void mp_machine_i2s_init_helper(machine_i2s_obj_t *self, size_t n_args, const mp_obj_t *pos_args, mp_map_t *kw_args);
STATIC machine_i2s_obj_t *mp_machine_i2s_make_new_instance(mp_int_t i2s_id);
@ -39,6 +65,220 @@ STATIC void mp_machine_i2s_deinit(machine_i2s_obj_t *self);
// The port provides implementations of the above in this file.
#include MICROPY_PY_MACHINE_I2S_INCLUDEFILE
#if MICROPY_PY_MACHINE_I2S_RING_BUF
// Ring Buffer
// Thread safe when used with these constraints:
// - Single Producer, Single Consumer
// - Sequential atomic operations
// One byte of capacity is used to detect buffer empty/full
STATIC void ringbuf_init(ring_buf_t *rbuf, uint8_t *buffer, size_t size) {
rbuf->buffer = buffer;
rbuf->size = size;
rbuf->head = 0;
rbuf->tail = 0;
}
STATIC bool ringbuf_push(ring_buf_t *rbuf, uint8_t data) {
size_t next_tail = (rbuf->tail + 1) % rbuf->size;
if (next_tail != rbuf->head) {
rbuf->buffer[rbuf->tail] = data;
rbuf->tail = next_tail;
return true;
}
// full
return false;
}
STATIC bool ringbuf_pop(ring_buf_t *rbuf, uint8_t *data) {
if (rbuf->head == rbuf->tail) {
// empty
return false;
}
*data = rbuf->buffer[rbuf->head];
rbuf->head = (rbuf->head + 1) % rbuf->size;
return true;
}
STATIC bool ringbuf_is_empty(ring_buf_t *rbuf) {
return rbuf->head == rbuf->tail;
}
STATIC bool ringbuf_is_full(ring_buf_t *rbuf) {
return ((rbuf->tail + 1) % rbuf->size) == rbuf->head;
}
STATIC size_t ringbuf_available_data(ring_buf_t *rbuf) {
return (rbuf->tail - rbuf->head + rbuf->size) % rbuf->size;
}
STATIC size_t ringbuf_available_space(ring_buf_t *rbuf) {
return rbuf->size - ringbuf_available_data(rbuf) - 1;
}
STATIC uint32_t fill_appbuf_from_ringbuf(machine_i2s_obj_t *self, mp_buffer_info_t *appbuf) {
// copy audio samples from the ring buffer to the app buffer
// loop, copying samples until the app buffer is filled
// For asyncio mode, the loop will make an early exit if the ring buffer becomes empty
// Example:
// a MicroPython I2S object is configured for 16-bit mono (2 bytes per audio sample).
// For every frame coming from the ring buffer (8 bytes), 2 bytes are "cherry picked" and
// copied to the supplied app buffer.
// Thus, for every 1 byte copied to the app buffer, 4 bytes are read from the ring buffer.
// If a 8kB app buffer is supplied, 32kB of audio samples is read from the ring buffer.
uint32_t num_bytes_copied_to_appbuf = 0;
uint8_t *app_p = (uint8_t *)appbuf->buf;
uint8_t appbuf_sample_size_in_bytes = (self->bits == 16? 2 : 4) * (self->format == STEREO ? 2: 1);
uint32_t num_bytes_needed_from_ringbuf = appbuf->len * (I2S_RX_FRAME_SIZE_IN_BYTES / appbuf_sample_size_in_bytes);
uint8_t discard_byte;
while (num_bytes_needed_from_ringbuf) {
uint8_t f_index = get_frame_mapping_index(self->bits, self->format);
for (uint8_t i = 0; i < I2S_RX_FRAME_SIZE_IN_BYTES; i++) {
int8_t r_to_a_mapping = i2s_frame_map[f_index][i];
if (r_to_a_mapping != -1) {
if (self->io_mode == BLOCKING) {
// poll the ringbuf until a sample becomes available, copy into appbuf using the mapping transform
while (ringbuf_pop(&self->ring_buffer, app_p + r_to_a_mapping) == false) {
;
}
num_bytes_copied_to_appbuf++;
} else if (self->io_mode == ASYNCIO) {
if (ringbuf_pop(&self->ring_buffer, app_p + r_to_a_mapping) == false) {
// ring buffer is empty, exit
goto exit;
} else {
num_bytes_copied_to_appbuf++;
}
} else {
return 0; // should never get here (non-blocking mode does not use this function)
}
} else { // r_a_mapping == -1
// discard unused byte from ring buffer
if (self->io_mode == BLOCKING) {
// poll the ringbuf until a sample becomes available
while (ringbuf_pop(&self->ring_buffer, &discard_byte) == false) {
;
}
} else if (self->io_mode == ASYNCIO) {
if (ringbuf_pop(&self->ring_buffer, &discard_byte) == false) {
// ring buffer is empty, exit
goto exit;
}
} else {
return 0; // should never get here (non-blocking mode does not use this function)
}
}
num_bytes_needed_from_ringbuf--;
}
app_p += appbuf_sample_size_in_bytes;
}
exit:
return num_bytes_copied_to_appbuf;
}
// function is used in IRQ context
STATIC void fill_appbuf_from_ringbuf_non_blocking(machine_i2s_obj_t *self) {
// attempt to copy a block of audio samples from the ring buffer to the supplied app buffer.
// audio samples will be formatted as part of the copy operation
uint32_t num_bytes_copied_to_appbuf = 0;
uint8_t *app_p = &(((uint8_t *)self->non_blocking_descriptor.appbuf.buf)[self->non_blocking_descriptor.index]);
uint8_t appbuf_sample_size_in_bytes = (self->bits == 16? 2 : 4) * (self->format == STEREO ? 2: 1);
uint32_t num_bytes_remaining_to_copy_to_appbuf = self->non_blocking_descriptor.appbuf.len - self->non_blocking_descriptor.index;
uint32_t num_bytes_remaining_to_copy_from_ring_buffer = num_bytes_remaining_to_copy_to_appbuf *
(I2S_RX_FRAME_SIZE_IN_BYTES / appbuf_sample_size_in_bytes);
uint32_t num_bytes_needed_from_ringbuf = MIN(SIZEOF_NON_BLOCKING_COPY_IN_BYTES, num_bytes_remaining_to_copy_from_ring_buffer);
uint8_t discard_byte;
if (ringbuf_available_data(&self->ring_buffer) >= num_bytes_needed_from_ringbuf) {
while (num_bytes_needed_from_ringbuf) {
uint8_t f_index = get_frame_mapping_index(self->bits, self->format);
for (uint8_t i = 0; i < I2S_RX_FRAME_SIZE_IN_BYTES; i++) {
int8_t r_to_a_mapping = i2s_frame_map[f_index][i];
if (r_to_a_mapping != -1) {
ringbuf_pop(&self->ring_buffer, app_p + r_to_a_mapping);
num_bytes_copied_to_appbuf++;
} else { // r_a_mapping == -1
// discard unused byte from ring buffer
ringbuf_pop(&self->ring_buffer, &discard_byte);
}
num_bytes_needed_from_ringbuf--;
}
app_p += appbuf_sample_size_in_bytes;
}
self->non_blocking_descriptor.index += num_bytes_copied_to_appbuf;
if (self->non_blocking_descriptor.index >= self->non_blocking_descriptor.appbuf.len) {
self->non_blocking_descriptor.copy_in_progress = false;
mp_sched_schedule(self->callback_for_non_blocking, MP_OBJ_FROM_PTR(self));
}
}
}
STATIC uint32_t copy_appbuf_to_ringbuf(machine_i2s_obj_t *self, mp_buffer_info_t *appbuf) {
// copy audio samples from the app buffer to the ring buffer
// loop, reading samples until the app buffer is emptied
// for asyncio mode, the loop will make an early exit if the ring buffer becomes full
uint32_t a_index = 0;
while (a_index < appbuf->len) {
if (self->io_mode == BLOCKING) {
// copy a byte to the ringbuf when space becomes available
while (ringbuf_push(&self->ring_buffer, ((uint8_t *)appbuf->buf)[a_index]) == false) {
;
}
a_index++;
} else if (self->io_mode == ASYNCIO) {
if (ringbuf_push(&self->ring_buffer, ((uint8_t *)appbuf->buf)[a_index]) == false) {
// ring buffer is full, exit
break;
} else {
a_index++;
}
} else {
return 0; // should never get here (non-blocking mode does not use this function)
}
}
return a_index;
}
// function is used in IRQ context
STATIC void copy_appbuf_to_ringbuf_non_blocking(machine_i2s_obj_t *self) {
// copy audio samples from app buffer into ring buffer
uint32_t num_bytes_remaining_to_copy = self->non_blocking_descriptor.appbuf.len - self->non_blocking_descriptor.index;
uint32_t num_bytes_to_copy = MIN(SIZEOF_NON_BLOCKING_COPY_IN_BYTES, num_bytes_remaining_to_copy);
if (ringbuf_available_space(&self->ring_buffer) >= num_bytes_to_copy) {
for (uint32_t i = 0; i < num_bytes_to_copy; i++) {
ringbuf_push(&self->ring_buffer,
((uint8_t *)self->non_blocking_descriptor.appbuf.buf)[self->non_blocking_descriptor.index + i]);
}
self->non_blocking_descriptor.index += num_bytes_to_copy;
if (self->non_blocking_descriptor.index >= self->non_blocking_descriptor.appbuf.len) {
self->non_blocking_descriptor.copy_in_progress = false;
mp_sched_schedule(self->callback_for_non_blocking, MP_OBJ_FROM_PTR(self));
}
}
}
#endif // MICROPY_PY_MACHINE_I2S_RING_BUF
STATIC mp_obj_t machine_i2s_make_new(const mp_obj_type_t *type, size_t n_pos_args, size_t n_kw_args, const mp_obj_t *args) {
mp_arg_check_num(n_pos_args, n_kw_args, 1, MP_OBJ_FUN_ARGS_MAX, true);
mp_int_t i2s_id = mp_obj_get_int(args[0]);
@ -91,6 +331,162 @@ STATIC const mp_rom_map_elem_t machine_i2s_locals_dict_table[] = {
};
MP_DEFINE_CONST_DICT(machine_i2s_locals_dict, machine_i2s_locals_dict_table);
STATIC mp_uint_t machine_i2s_stream_read(mp_obj_t self_in, void *buf_in, mp_uint_t size, int *errcode) {
machine_i2s_obj_t *self = MP_OBJ_TO_PTR(self_in);
if (self->mode != MICROPY_PY_MACHINE_I2S_CONSTANT_RX) {
*errcode = MP_EPERM;
return MP_STREAM_ERROR;
}
uint8_t appbuf_sample_size_in_bytes = (self->bits / 8) * (self->format == STEREO ? 2: 1);
if (size % appbuf_sample_size_in_bytes != 0) {
*errcode = MP_EINVAL;
return MP_STREAM_ERROR;
}
if (size == 0) {
return 0;
}
if (self->io_mode == NON_BLOCKING) {
#if MICROPY_PY_MACHINE_I2S_RING_BUF
self->non_blocking_descriptor.appbuf.buf = (void *)buf_in;
self->non_blocking_descriptor.appbuf.len = size;
self->non_blocking_descriptor.index = 0;
self->non_blocking_descriptor.copy_in_progress = true;
#else
non_blocking_descriptor_t descriptor;
descriptor.appbuf.buf = (void *)buf_in;
descriptor.appbuf.len = size;
descriptor.callback = self->callback_for_non_blocking;
descriptor.direction = I2S_RX_TRANSFER;
// send the descriptor to the task that handles non-blocking mode
xQueueSend(self->non_blocking_mode_queue, &descriptor, 0);
#endif
return size;
} else { // blocking or asyncio mode
mp_buffer_info_t appbuf;
appbuf.buf = (void *)buf_in;
appbuf.len = size;
#if MICROPY_PY_MACHINE_I2S_RING_BUF
uint32_t num_bytes_read = fill_appbuf_from_ringbuf(self, &appbuf);
#else
uint32_t num_bytes_read = fill_appbuf_from_dma(self, &appbuf);
#endif
return num_bytes_read;
}
}
STATIC mp_uint_t machine_i2s_stream_write(mp_obj_t self_in, const void *buf_in, mp_uint_t size, int *errcode) {
machine_i2s_obj_t *self = MP_OBJ_TO_PTR(self_in);
if (self->mode != MICROPY_PY_MACHINE_I2S_CONSTANT_TX) {
*errcode = MP_EPERM;
return MP_STREAM_ERROR;
}
if (size == 0) {
return 0;
}
if (self->io_mode == NON_BLOCKING) {
#if MICROPY_PY_MACHINE_I2S_RING_BUF
self->non_blocking_descriptor.appbuf.buf = (void *)buf_in;
self->non_blocking_descriptor.appbuf.len = size;
self->non_blocking_descriptor.index = 0;
self->non_blocking_descriptor.copy_in_progress = true;
#else
non_blocking_descriptor_t descriptor;
descriptor.appbuf.buf = (void *)buf_in;
descriptor.appbuf.len = size;
descriptor.callback = self->callback_for_non_blocking;
descriptor.direction = I2S_TX_TRANSFER;
// send the descriptor to the task that handles non-blocking mode
xQueueSend(self->non_blocking_mode_queue, &descriptor, 0);
#endif
return size;
} else { // blocking or asyncio mode
mp_buffer_info_t appbuf;
appbuf.buf = (void *)buf_in;
appbuf.len = size;
#if MICROPY_PY_MACHINE_I2S_RING_BUF
uint32_t num_bytes_written = copy_appbuf_to_ringbuf(self, &appbuf);
#else
uint32_t num_bytes_written = copy_appbuf_to_dma(self, &appbuf);
#endif
return num_bytes_written;
}
}
STATIC mp_uint_t machine_i2s_ioctl(mp_obj_t self_in, mp_uint_t request, uintptr_t arg, int *errcode) {
machine_i2s_obj_t *self = MP_OBJ_TO_PTR(self_in);
mp_uint_t ret;
uintptr_t flags = arg;
self->io_mode = ASYNCIO; // a call to ioctl() is an indication that asyncio is being used
if (request == MP_STREAM_POLL) {
ret = 0;
if (flags & MP_STREAM_POLL_RD) {
if (self->mode != MICROPY_PY_MACHINE_I2S_CONSTANT_RX) {
*errcode = MP_EPERM;
return MP_STREAM_ERROR;
}
#if MICROPY_PY_MACHINE_I2S_RING_BUF
if (!ringbuf_is_empty(&self->ring_buffer)) {
ret |= MP_STREAM_POLL_RD;
}
#else
// check event queue to determine if a DMA buffer has been filled
// (which is an indication that at least one DMA buffer is available to be read)
// note: timeout = 0 so the call is non-blocking
i2s_event_t i2s_event;
if (xQueueReceive(self->i2s_event_queue, &i2s_event, 0)) {
if (i2s_event.type == I2S_EVENT_RX_DONE) {
// getting here means that at least one DMA buffer is now full
// indicating that audio samples can be read from the I2S object
ret |= MP_STREAM_POLL_RD;
}
}
#endif
}
if (flags & MP_STREAM_POLL_WR) {
if (self->mode != MICROPY_PY_MACHINE_I2S_CONSTANT_TX) {
*errcode = MP_EPERM;
return MP_STREAM_ERROR;
}
#if MICROPY_PY_MACHINE_I2S_RING_BUF
if (!ringbuf_is_full(&self->ring_buffer)) {
ret |= MP_STREAM_POLL_WR;
}
#else
// check event queue to determine if a DMA buffer has been emptied
// (which is an indication that at least one DMA buffer is available to be written)
// note: timeout = 0 so the call is non-blocking
i2s_event_t i2s_event;
if (xQueueReceive(self->i2s_event_queue, &i2s_event, 0)) {
if (i2s_event.type == I2S_EVENT_TX_DONE) {
// getting here means that at least one DMA buffer is now empty
// indicating that audio samples can be written to the I2S object
ret |= MP_STREAM_POLL_WR;
}
}
#endif
}
} else {
*errcode = MP_EINVAL;
ret = MP_STREAM_ERROR;
}
return ret;
}
STATIC const mp_stream_p_t i2s_stream_p = {
.read = machine_i2s_stream_read,
.write = machine_i2s_stream_write,

Wyświetl plik

@ -28,7 +28,6 @@
// extmod/machine_i2s.c via MICROPY_PY_MACHINE_I2S_INCLUDEFILE.
#include "py/mphal.h"
#include "py/stream.h"
#if MICROPY_PY_MACHINE_I2S
@ -635,128 +634,6 @@ STATIC mp_obj_t machine_i2s_shift(size_t n_args, const mp_obj_t *pos_args, mp_ma
STATIC MP_DEFINE_CONST_FUN_OBJ_KW(machine_i2s_shift_fun_obj, 0, machine_i2s_shift);
STATIC MP_DEFINE_CONST_STATICMETHOD_OBJ(machine_i2s_shift_obj, MP_ROM_PTR(&machine_i2s_shift_fun_obj));
STATIC mp_uint_t machine_i2s_stream_read(mp_obj_t self_in, void *buf_in, mp_uint_t size, int *errcode) {
machine_i2s_obj_t *self = MP_OBJ_TO_PTR(self_in);
if (self->mode != (I2S_MODE_MASTER | I2S_MODE_RX)) {
*errcode = MP_EPERM;
return MP_STREAM_ERROR;
}
uint8_t appbuf_sample_size_in_bytes = (self->bits / 8) * (self->format == STEREO ? 2: 1);
if (size % appbuf_sample_size_in_bytes != 0) {
*errcode = MP_EINVAL;
return MP_STREAM_ERROR;
}
if (size == 0) {
return 0;
}
if (self->io_mode == NON_BLOCKING) {
non_blocking_descriptor_t descriptor;
descriptor.appbuf.buf = (void *)buf_in;
descriptor.appbuf.len = size;
descriptor.callback = self->callback_for_non_blocking;
descriptor.direction = I2S_RX_TRANSFER;
// send the descriptor to the task that handles non-blocking mode
xQueueSend(self->non_blocking_mode_queue, &descriptor, 0);
return size;
} else { // blocking or asyncio mode
mp_buffer_info_t appbuf;
appbuf.buf = (void *)buf_in;
appbuf.len = size;
uint32_t num_bytes_read = fill_appbuf_from_dma(self, &appbuf);
return num_bytes_read;
}
}
STATIC mp_uint_t machine_i2s_stream_write(mp_obj_t self_in, const void *buf_in, mp_uint_t size, int *errcode) {
machine_i2s_obj_t *self = MP_OBJ_TO_PTR(self_in);
if (self->mode != (I2S_MODE_MASTER | I2S_MODE_TX)) {
*errcode = MP_EPERM;
return MP_STREAM_ERROR;
}
if (size == 0) {
return 0;
}
if (self->io_mode == NON_BLOCKING) {
non_blocking_descriptor_t descriptor;
descriptor.appbuf.buf = (void *)buf_in;
descriptor.appbuf.len = size;
descriptor.callback = self->callback_for_non_blocking;
descriptor.direction = I2S_TX_TRANSFER;
// send the descriptor to the task that handles non-blocking mode
xQueueSend(self->non_blocking_mode_queue, &descriptor, 0);
return size;
} else { // blocking or asyncio mode
mp_buffer_info_t appbuf;
appbuf.buf = (void *)buf_in;
appbuf.len = size;
size_t num_bytes_written = copy_appbuf_to_dma(self, &appbuf);
return num_bytes_written;
}
}
STATIC mp_uint_t machine_i2s_ioctl(mp_obj_t self_in, mp_uint_t request, uintptr_t arg, int *errcode) {
machine_i2s_obj_t *self = MP_OBJ_TO_PTR(self_in);
mp_uint_t ret;
mp_uint_t flags = arg;
self->io_mode = ASYNCIO; // a call to ioctl() is an indication that asyncio is being used
if (request == MP_STREAM_POLL) {
ret = 0;
if (flags & MP_STREAM_POLL_RD) {
if (self->mode != (I2S_MODE_MASTER | I2S_MODE_RX)) {
*errcode = MP_EPERM;
return MP_STREAM_ERROR;
}
i2s_event_t i2s_event;
// check event queue to determine if a DMA buffer has been filled
// (which is an indication that at least one DMA buffer is available to be read)
// note: timeout = 0 so the call is non-blocking
if (xQueueReceive(self->i2s_event_queue, &i2s_event, 0)) {
if (i2s_event.type == I2S_EVENT_RX_DONE) {
// getting here means that at least one DMA buffer is now full
// indicating that audio samples can be read from the I2S object
ret |= MP_STREAM_POLL_RD;
}
}
}
if (flags & MP_STREAM_POLL_WR) {
if (self->mode != (I2S_MODE_MASTER | I2S_MODE_TX)) {
*errcode = MP_EPERM;
return MP_STREAM_ERROR;
}
i2s_event_t i2s_event;
// check event queue to determine if a DMA buffer has been emptied
// (which is an indication that at least one DMA buffer is available to be written)
// note: timeout = 0 so the call is non-blocking
if (xQueueReceive(self->i2s_event_queue, &i2s_event, 0)) {
if (i2s_event.type == I2S_EVENT_TX_DONE) {
// getting here means that at least one DMA buffer is now empty
// indicating that audio samples can be written to the I2S object
ret |= MP_STREAM_POLL_WR;
}
}
}
} else {
*errcode = MP_EINVAL;
ret = MP_STREAM_ERROR;
}
return ret;
}
MP_REGISTER_ROOT_POINTER(struct _machine_i2s_obj_t *machine_i2s_obj[I2S_NUM_AUTO]);
#endif // MICROPY_PY_MACHINE_I2S

Wyświetl plik

@ -29,7 +29,6 @@
// extmod/machine_i2s.c via MICROPY_PY_MACHINE_I2S_INCLUDEFILE.
#include "py/mphal.h"
#include "py/stream.h"
#include "dma_manager.h"
#include CLOCK_CONFIG_H
@ -122,19 +121,6 @@ typedef enum {
BOTTOM_HALF
} ping_pong_t;
typedef struct _ring_buf_t {
uint8_t *buffer;
size_t head;
size_t tail;
size_t size;
} ring_buf_t;
typedef struct _non_blocking_descriptor_t {
mp_buffer_info_t appbuf;
uint32_t index;
bool copy_in_progress;
} non_blocking_descriptor_t;
typedef struct _machine_i2s_obj_t {
mp_obj_base_t base;
uint8_t i2s_id;
@ -285,59 +271,6 @@ void machine_i2s_deinit_all(void) {
}
}
// Ring Buffer
// Thread safe when used with these constraints:
// - Single Producer, Single Consumer
// - Sequential atomic operations
// One byte of capacity is used to detect buffer empty/full
STATIC void ringbuf_init(ring_buf_t *rbuf, uint8_t *buffer, size_t size) {
rbuf->buffer = buffer;
rbuf->size = size;
rbuf->head = 0;
rbuf->tail = 0;
}
STATIC bool ringbuf_push(ring_buf_t *rbuf, uint8_t data) {
size_t next_tail = (rbuf->tail + 1) % rbuf->size;
if (next_tail != rbuf->head) {
rbuf->buffer[rbuf->tail] = data;
rbuf->tail = next_tail;
return true;
}
// full
return false;
}
STATIC bool ringbuf_pop(ring_buf_t *rbuf, uint8_t *data) {
if (rbuf->head == rbuf->tail) {
// empty
return false;
}
*data = rbuf->buffer[rbuf->head];
rbuf->head = (rbuf->head + 1) % rbuf->size;
return true;
}
STATIC bool ringbuf_is_empty(ring_buf_t *rbuf) {
return rbuf->head == rbuf->tail;
}
STATIC bool ringbuf_is_full(ring_buf_t *rbuf) {
return ((rbuf->tail + 1) % rbuf->size) == rbuf->head;
}
STATIC size_t ringbuf_available_data(ring_buf_t *rbuf) {
return (rbuf->tail - rbuf->head + rbuf->size) % rbuf->size;
}
STATIC size_t ringbuf_available_space(ring_buf_t *rbuf) {
return rbuf->size - ringbuf_available_data(rbuf) - 1;
}
STATIC int8_t get_frame_mapping_index(int8_t bits, format_t format) {
if (format == MONO) {
if (bits == 16) {
@ -429,163 +362,6 @@ STATIC const uint32_t get_clock_divider(int32_t rate) {
return 0;
}
STATIC uint32_t fill_appbuf_from_ringbuf(machine_i2s_obj_t *self, mp_buffer_info_t *appbuf) {
// copy audio samples from the ring buffer to the app buffer
// loop, copying samples until the app buffer is filled
// For asyncio mode, the loop will make an early exit if the ring buffer becomes empty
// Example:
// a MicroPython I2S object is configured for 16-bit mono (2 bytes per audio sample).
// For every frame coming from the ring buffer (8 bytes), 2 bytes are "cherry picked" and
// copied to the supplied app buffer.
// Thus, for every 1 byte copied to the app buffer, 4 bytes are read from the ring buffer.
// If a 8kB app buffer is supplied, 32kB of audio samples is read from the ring buffer.
uint32_t num_bytes_copied_to_appbuf = 0;
uint8_t *app_p = (uint8_t *)appbuf->buf;
uint8_t appbuf_sample_size_in_bytes = (self->bits == 16? 2 : 4) * (self->format == STEREO ? 2: 1);
uint32_t num_bytes_needed_from_ringbuf = appbuf->len * (I2S_RX_FRAME_SIZE_IN_BYTES / appbuf_sample_size_in_bytes);
uint8_t discard_byte;
while (num_bytes_needed_from_ringbuf) {
uint8_t f_index = get_frame_mapping_index(self->bits, self->format);
for (uint8_t i = 0; i < I2S_RX_FRAME_SIZE_IN_BYTES; i++) {
int8_t r_to_a_mapping = i2s_frame_map[f_index][i];
if (r_to_a_mapping != -1) {
if (self->io_mode == BLOCKING) {
// poll the ringbuf until a sample becomes available, copy into appbuf using the mapping transform
while (ringbuf_pop(&self->ring_buffer, app_p + r_to_a_mapping) == false) {
;
}
num_bytes_copied_to_appbuf++;
} else if (self->io_mode == ASYNCIO) {
if (ringbuf_pop(&self->ring_buffer, app_p + r_to_a_mapping) == false) {
// ring buffer is empty, exit
goto exit;
} else {
num_bytes_copied_to_appbuf++;
}
} else {
return 0; // should never get here (non-blocking mode does not use this function)
}
} else { // r_a_mapping == -1
// discard unused byte from ring buffer
if (self->io_mode == BLOCKING) {
// poll the ringbuf until a sample becomes available
while (ringbuf_pop(&self->ring_buffer, &discard_byte) == false) {
;
}
} else if (self->io_mode == ASYNCIO) {
if (ringbuf_pop(&self->ring_buffer, &discard_byte) == false) {
// ring buffer is empty, exit
goto exit;
}
} else {
return 0; // should never get here (non-blocking mode does not use this function)
}
}
num_bytes_needed_from_ringbuf--;
}
app_p += appbuf_sample_size_in_bytes;
}
exit:
return num_bytes_copied_to_appbuf;
}
// function is used in IRQ context
STATIC void fill_appbuf_from_ringbuf_non_blocking(machine_i2s_obj_t *self) {
// attempt to copy a block of audio samples from the ring buffer to the supplied app buffer.
// audio samples will be formatted as part of the copy operation
uint32_t num_bytes_copied_to_appbuf = 0;
uint8_t *app_p = &(((uint8_t *)self->non_blocking_descriptor.appbuf.buf)[self->non_blocking_descriptor.index]);
uint8_t appbuf_sample_size_in_bytes = (self->bits == 16? 2 : 4) * (self->format == STEREO ? 2: 1);
uint32_t num_bytes_remaining_to_copy_to_appbuf = self->non_blocking_descriptor.appbuf.len - self->non_blocking_descriptor.index;
uint32_t num_bytes_remaining_to_copy_from_ring_buffer = num_bytes_remaining_to_copy_to_appbuf *
(I2S_RX_FRAME_SIZE_IN_BYTES / appbuf_sample_size_in_bytes);
uint32_t num_bytes_needed_from_ringbuf = MIN(SIZEOF_NON_BLOCKING_COPY_IN_BYTES, num_bytes_remaining_to_copy_from_ring_buffer);
uint8_t discard_byte;
if (ringbuf_available_data(&self->ring_buffer) >= num_bytes_needed_from_ringbuf) {
while (num_bytes_needed_from_ringbuf) {
uint8_t f_index = get_frame_mapping_index(self->bits, self->format);
for (uint8_t i = 0; i < I2S_RX_FRAME_SIZE_IN_BYTES; i++) {
int8_t r_to_a_mapping = i2s_frame_map[f_index][i];
if (r_to_a_mapping != -1) {
ringbuf_pop(&self->ring_buffer, app_p + r_to_a_mapping);
num_bytes_copied_to_appbuf++;
} else { // r_a_mapping == -1
// discard unused byte from ring buffer
ringbuf_pop(&self->ring_buffer, &discard_byte);
}
num_bytes_needed_from_ringbuf--;
}
app_p += appbuf_sample_size_in_bytes;
}
self->non_blocking_descriptor.index += num_bytes_copied_to_appbuf;
if (self->non_blocking_descriptor.index >= self->non_blocking_descriptor.appbuf.len) {
self->non_blocking_descriptor.copy_in_progress = false;
mp_sched_schedule(self->callback_for_non_blocking, MP_OBJ_FROM_PTR(self));
}
}
}
STATIC uint32_t copy_appbuf_to_ringbuf(machine_i2s_obj_t *self, mp_buffer_info_t *appbuf) {
// copy audio samples from the app buffer to the ring buffer
// loop, reading samples until the app buffer is emptied
// for asyncio mode, the loop will make an early exit if the ring buffer becomes full
uint32_t a_index = 0;
while (a_index < appbuf->len) {
if (self->io_mode == BLOCKING) {
// copy a byte to the ringbuf when space becomes available
while (ringbuf_push(&self->ring_buffer, ((uint8_t *)appbuf->buf)[a_index]) == false) {
;
}
a_index++;
} else if (self->io_mode == ASYNCIO) {
if (ringbuf_push(&self->ring_buffer, ((uint8_t *)appbuf->buf)[a_index]) == false) {
// ring buffer is full, exit
break;
} else {
a_index++;
}
} else {
return 0; // should never get here (non-blocking mode does not use this function)
}
}
return a_index;
}
// function is used in IRQ context
STATIC void copy_appbuf_to_ringbuf_non_blocking(machine_i2s_obj_t *self) {
// copy audio samples from app buffer into ring buffer
uint32_t num_bytes_remaining_to_copy = self->non_blocking_descriptor.appbuf.len - self->non_blocking_descriptor.index;
uint32_t num_bytes_to_copy = MIN(SIZEOF_NON_BLOCKING_COPY_IN_BYTES, num_bytes_remaining_to_copy);
if (ringbuf_available_space(&self->ring_buffer) >= num_bytes_to_copy) {
for (uint32_t i = 0; i < num_bytes_to_copy; i++) {
ringbuf_push(&self->ring_buffer,
((uint8_t *)self->non_blocking_descriptor.appbuf.buf)[self->non_blocking_descriptor.index + i]);
}
self->non_blocking_descriptor.index += num_bytes_to_copy;
if (self->non_blocking_descriptor.index >= self->non_blocking_descriptor.appbuf.len) {
self->non_blocking_descriptor.copy_in_progress = false;
mp_sched_schedule(self->callback_for_non_blocking, MP_OBJ_FROM_PTR(self));
}
}
}
// function is used in IRQ context
STATIC void empty_dma(machine_i2s_obj_t *self, ping_pong_t dma_ping_pong) {
uint16_t dma_buffer_offset = 0;
@ -1116,104 +892,6 @@ STATIC mp_obj_t machine_i2s_shift(size_t n_args, const mp_obj_t *pos_args, mp_ma
STATIC MP_DEFINE_CONST_FUN_OBJ_KW(machine_i2s_shift_fun_obj, 0, machine_i2s_shift);
STATIC MP_DEFINE_CONST_STATICMETHOD_OBJ(machine_i2s_shift_obj, MP_ROM_PTR(&machine_i2s_shift_fun_obj));
STATIC mp_uint_t machine_i2s_stream_read(mp_obj_t self_in, void *buf_in, mp_uint_t size, int *errcode) {
machine_i2s_obj_t *self = MP_OBJ_TO_PTR(self_in);
if (self->mode != RX) {
*errcode = MP_EPERM;
return MP_STREAM_ERROR;
}
uint8_t appbuf_sample_size_in_bytes = (self->bits / 8) * (self->format == STEREO ? 2: 1);
if (size % appbuf_sample_size_in_bytes != 0) {
*errcode = MP_EINVAL;
return MP_STREAM_ERROR;
}
if (size == 0) {
return 0;
}
if (self->io_mode == NON_BLOCKING) {
self->non_blocking_descriptor.appbuf.buf = (void *)buf_in;
self->non_blocking_descriptor.appbuf.len = size;
self->non_blocking_descriptor.index = 0;
self->non_blocking_descriptor.copy_in_progress = true;
return size;
} else { // blocking or asyncio mode
mp_buffer_info_t appbuf;
appbuf.buf = (void *)buf_in;
appbuf.len = size;
uint32_t num_bytes_read = fill_appbuf_from_ringbuf(self, &appbuf);
return num_bytes_read;
}
}
STATIC mp_uint_t machine_i2s_stream_write(mp_obj_t self_in, const void *buf_in, mp_uint_t size, int *errcode) {
machine_i2s_obj_t *self = MP_OBJ_TO_PTR(self_in);
if (self->mode != TX) {
*errcode = MP_EPERM;
return MP_STREAM_ERROR;
}
if (size == 0) {
return 0;
}
if (self->io_mode == NON_BLOCKING) {
self->non_blocking_descriptor.appbuf.buf = (void *)buf_in;
self->non_blocking_descriptor.appbuf.len = size;
self->non_blocking_descriptor.index = 0;
self->non_blocking_descriptor.copy_in_progress = true;
return size;
} else { // blocking or asyncio mode
mp_buffer_info_t appbuf;
appbuf.buf = (void *)buf_in;
appbuf.len = size;
uint32_t num_bytes_written = copy_appbuf_to_ringbuf(self, &appbuf);
return num_bytes_written;
}
}
STATIC mp_uint_t machine_i2s_ioctl(mp_obj_t self_in, mp_uint_t request, uintptr_t arg, int *errcode) {
machine_i2s_obj_t *self = MP_OBJ_TO_PTR(self_in);
mp_uint_t ret;
uintptr_t flags = arg;
self->io_mode = ASYNCIO; // a call to ioctl() is an indication that asyncio is being used
if (request == MP_STREAM_POLL) {
ret = 0;
if (flags & MP_STREAM_POLL_RD) {
if (self->mode != RX) {
*errcode = MP_EPERM;
return MP_STREAM_ERROR;
}
if (!ringbuf_is_empty(&self->ring_buffer)) {
ret |= MP_STREAM_POLL_RD;
}
}
if (flags & MP_STREAM_POLL_WR) {
if (self->mode != TX) {
*errcode = MP_EPERM;
return MP_STREAM_ERROR;
}
if (!ringbuf_is_full(&self->ring_buffer)) {
ret |= MP_STREAM_POLL_WR;
}
}
} else {
*errcode = MP_EINVAL;
ret = MP_STREAM_ERROR;
}
return ret;
}
MP_REGISTER_ROOT_POINTER(struct _machine_i2s_obj_t *machine_i2s_obj[MICROPY_HW_I2S_NUM]);
#endif // MICROPY_PY_MACHINE_I2S

Wyświetl plik

@ -90,6 +90,7 @@ uint32_t trng_random_u32(void);
#define MICROPY_PY_MACHINE_I2S_INCLUDEFILE "ports/mimxrt/machine_i2s.c"
#define MICROPY_PY_MACHINE_I2S_CONSTANT_RX (RX)
#define MICROPY_PY_MACHINE_I2S_CONSTANT_TX (TX)
#define MICROPY_PY_MACHINE_I2S_RING_BUF (1)
#ifndef MICROPY_PY_MACHINE_SDCARD
#define MICROPY_PY_MACHINE_SDCARD (1)
#endif

Wyświetl plik

@ -30,7 +30,6 @@
#include <stdlib.h>
#include <string.h>
#include "py/mphal.h"
#include "py/stream.h"
#include "hardware/pio.h"
#include "hardware/clocks.h"
@ -114,19 +113,6 @@ typedef enum {
GP_OUTPUT = 1
} gpio_dir_t;
typedef struct _ring_buf_t {
uint8_t *buffer;
size_t head;
size_t tail;
size_t size;
} ring_buf_t;
typedef struct _non_blocking_descriptor_t {
mp_buffer_info_t appbuf;
uint32_t index;
bool copy_in_progress;
} non_blocking_descriptor_t;
typedef struct _machine_i2s_obj_t {
mp_obj_base_t base;
uint8_t i2s_id;
@ -228,59 +214,6 @@ void machine_i2s_init0(void) {
}
}
// Ring Buffer
// Thread safe when used with these constraints:
// - Single Producer, Single Consumer
// - Sequential atomic operations
// One byte of capacity is used to detect buffer empty/full
STATIC void ringbuf_init(ring_buf_t *rbuf, uint8_t *buffer, size_t size) {
rbuf->buffer = buffer;
rbuf->size = size;
rbuf->head = 0;
rbuf->tail = 0;
}
STATIC bool ringbuf_push(ring_buf_t *rbuf, uint8_t data) {
size_t next_tail = (rbuf->tail + 1) % rbuf->size;
if (next_tail != rbuf->head) {
rbuf->buffer[rbuf->tail] = data;
rbuf->tail = next_tail;
return true;
}
// full
return false;
}
STATIC bool ringbuf_pop(ring_buf_t *rbuf, uint8_t *data) {
if (rbuf->head == rbuf->tail) {
// empty
return false;
}
*data = rbuf->buffer[rbuf->head];
rbuf->head = (rbuf->head + 1) % rbuf->size;
return true;
}
STATIC bool ringbuf_is_empty(ring_buf_t *rbuf) {
return rbuf->head == rbuf->tail;
}
STATIC bool ringbuf_is_full(ring_buf_t *rbuf) {
return ((rbuf->tail + 1) % rbuf->size) == rbuf->head;
}
STATIC size_t ringbuf_available_data(ring_buf_t *rbuf) {
return (rbuf->tail - rbuf->head + rbuf->size) % rbuf->size;
}
STATIC size_t ringbuf_available_space(ring_buf_t *rbuf) {
return rbuf->size - ringbuf_available_data(rbuf) - 1;
}
STATIC int8_t get_frame_mapping_index(int8_t bits, format_t format) {
if (format == MONO) {
if (bits == 16) {
@ -297,163 +230,6 @@ STATIC int8_t get_frame_mapping_index(int8_t bits, format_t format) {
}
}
STATIC uint32_t fill_appbuf_from_ringbuf(machine_i2s_obj_t *self, mp_buffer_info_t *appbuf) {
// copy audio samples from the ring buffer to the app buffer
// loop, copying samples until the app buffer is filled
// For asyncio mode, the loop will make an early exit if the ring buffer becomes empty
// Example:
// a MicroPython I2S object is configured for 16-bit mono (2 bytes per audio sample).
// For every frame coming from the ring buffer (8 bytes), 2 bytes are "cherry picked" and
// copied to the supplied app buffer.
// Thus, for every 1 byte copied to the app buffer, 4 bytes are read from the ring buffer.
// If a 8kB app buffer is supplied, 32kB of audio samples is read from the ring buffer.
uint32_t num_bytes_copied_to_appbuf = 0;
uint8_t *app_p = (uint8_t *)appbuf->buf;
uint8_t appbuf_sample_size_in_bytes = (self->bits == 16? 2 : 4) * (self->format == STEREO ? 2: 1);
uint32_t num_bytes_needed_from_ringbuf = appbuf->len * (I2S_RX_FRAME_SIZE_IN_BYTES / appbuf_sample_size_in_bytes);
uint8_t discard_byte;
while (num_bytes_needed_from_ringbuf) {
uint8_t f_index = get_frame_mapping_index(self->bits, self->format);
for (uint8_t i = 0; i < I2S_RX_FRAME_SIZE_IN_BYTES; i++) {
int8_t r_to_a_mapping = i2s_frame_map[f_index][i];
if (r_to_a_mapping != -1) {
if (self->io_mode == BLOCKING) {
// poll the ringbuf until a sample becomes available, copy into appbuf using the mapping transform
while (ringbuf_pop(&self->ring_buffer, app_p + r_to_a_mapping) == false) {
;
}
num_bytes_copied_to_appbuf++;
} else if (self->io_mode == ASYNCIO) {
if (ringbuf_pop(&self->ring_buffer, app_p + r_to_a_mapping) == false) {
// ring buffer is empty, exit
goto exit;
} else {
num_bytes_copied_to_appbuf++;
}
} else {
return 0; // should never get here (non-blocking mode does not use this function)
}
} else { // r_a_mapping == -1
// discard unused byte from ring buffer
if (self->io_mode == BLOCKING) {
// poll the ringbuf until a sample becomes available
while (ringbuf_pop(&self->ring_buffer, &discard_byte) == false) {
;
}
} else if (self->io_mode == ASYNCIO) {
if (ringbuf_pop(&self->ring_buffer, &discard_byte) == false) {
// ring buffer is empty, exit
goto exit;
}
} else {
return 0; // should never get here (non-blocking mode does not use this function)
}
}
num_bytes_needed_from_ringbuf--;
}
app_p += appbuf_sample_size_in_bytes;
}
exit:
return num_bytes_copied_to_appbuf;
}
// function is used in IRQ context
STATIC void fill_appbuf_from_ringbuf_non_blocking(machine_i2s_obj_t *self) {
// attempt to copy a block of audio samples from the ring buffer to the supplied app buffer.
// audio samples will be formatted as part of the copy operation
uint32_t num_bytes_copied_to_appbuf = 0;
uint8_t *app_p = &(((uint8_t *)self->non_blocking_descriptor.appbuf.buf)[self->non_blocking_descriptor.index]);
uint8_t appbuf_sample_size_in_bytes = (self->bits == 16? 2 : 4) * (self->format == STEREO ? 2: 1);
uint32_t num_bytes_remaining_to_copy_to_appbuf = self->non_blocking_descriptor.appbuf.len - self->non_blocking_descriptor.index;
uint32_t num_bytes_remaining_to_copy_from_ring_buffer = num_bytes_remaining_to_copy_to_appbuf *
(I2S_RX_FRAME_SIZE_IN_BYTES / appbuf_sample_size_in_bytes);
uint32_t num_bytes_needed_from_ringbuf = MIN(SIZEOF_NON_BLOCKING_COPY_IN_BYTES, num_bytes_remaining_to_copy_from_ring_buffer);
uint8_t discard_byte;
if (ringbuf_available_data(&self->ring_buffer) >= num_bytes_needed_from_ringbuf) {
while (num_bytes_needed_from_ringbuf) {
uint8_t f_index = get_frame_mapping_index(self->bits, self->format);
for (uint8_t i = 0; i < I2S_RX_FRAME_SIZE_IN_BYTES; i++) {
int8_t r_to_a_mapping = i2s_frame_map[f_index][i];
if (r_to_a_mapping != -1) {
ringbuf_pop(&self->ring_buffer, app_p + r_to_a_mapping);
num_bytes_copied_to_appbuf++;
} else { // r_a_mapping == -1
// discard unused byte from ring buffer
ringbuf_pop(&self->ring_buffer, &discard_byte);
}
num_bytes_needed_from_ringbuf--;
}
app_p += appbuf_sample_size_in_bytes;
}
self->non_blocking_descriptor.index += num_bytes_copied_to_appbuf;
if (self->non_blocking_descriptor.index >= self->non_blocking_descriptor.appbuf.len) {
self->non_blocking_descriptor.copy_in_progress = false;
mp_sched_schedule(self->callback_for_non_blocking, MP_OBJ_FROM_PTR(self));
}
}
}
STATIC uint32_t copy_appbuf_to_ringbuf(machine_i2s_obj_t *self, mp_buffer_info_t *appbuf) {
// copy audio samples from the app buffer to the ring buffer
// loop, reading samples until the app buffer is emptied
// for asyncio mode, the loop will make an early exit if the ring buffer becomes full
uint32_t a_index = 0;
while (a_index < appbuf->len) {
if (self->io_mode == BLOCKING) {
// copy a byte to the ringbuf when space becomes available
while (ringbuf_push(&self->ring_buffer, ((uint8_t *)appbuf->buf)[a_index]) == false) {
;
}
a_index++;
} else if (self->io_mode == ASYNCIO) {
if (ringbuf_push(&self->ring_buffer, ((uint8_t *)appbuf->buf)[a_index]) == false) {
// ring buffer is full, exit
break;
} else {
a_index++;
}
} else {
return 0; // should never get here (non-blocking mode does not use this function)
}
}
return a_index;
}
// function is used in IRQ context
STATIC void copy_appbuf_to_ringbuf_non_blocking(machine_i2s_obj_t *self) {
// copy audio samples from app buffer into ring buffer
uint32_t num_bytes_remaining_to_copy = self->non_blocking_descriptor.appbuf.len - self->non_blocking_descriptor.index;
uint32_t num_bytes_to_copy = MIN(SIZEOF_NON_BLOCKING_COPY_IN_BYTES, num_bytes_remaining_to_copy);
if (ringbuf_available_space(&self->ring_buffer) >= num_bytes_to_copy) {
for (uint32_t i = 0; i < num_bytes_to_copy; i++) {
ringbuf_push(&self->ring_buffer,
((uint8_t *)self->non_blocking_descriptor.appbuf.buf)[self->non_blocking_descriptor.index + i]);
}
self->non_blocking_descriptor.index += num_bytes_to_copy;
if (self->non_blocking_descriptor.index >= self->non_blocking_descriptor.appbuf.len) {
self->non_blocking_descriptor.copy_in_progress = false;
mp_sched_schedule(self->callback_for_non_blocking, MP_OBJ_FROM_PTR(self));
}
}
}
// function is used in IRQ context
STATIC void empty_dma(machine_i2s_obj_t *self, uint8_t *dma_buffer_p) {
// when space exists, copy samples into ring buffer
@ -989,102 +765,4 @@ STATIC mp_obj_t machine_i2s_shift(size_t n_args, const mp_obj_t *pos_args, mp_ma
STATIC MP_DEFINE_CONST_FUN_OBJ_KW(machine_i2s_shift_fun_obj, 0, machine_i2s_shift);
STATIC MP_DEFINE_CONST_STATICMETHOD_OBJ(machine_i2s_shift_obj, MP_ROM_PTR(&machine_i2s_shift_fun_obj));
STATIC mp_uint_t machine_i2s_stream_read(mp_obj_t self_in, void *buf_in, mp_uint_t size, int *errcode) {
machine_i2s_obj_t *self = MP_OBJ_TO_PTR(self_in);
if (self->mode != RX) {
*errcode = MP_EPERM;
return MP_STREAM_ERROR;
}
uint8_t appbuf_sample_size_in_bytes = (self->bits / 8) * (self->format == STEREO ? 2: 1);
if (size % appbuf_sample_size_in_bytes != 0) {
*errcode = MP_EINVAL;
return MP_STREAM_ERROR;
}
if (size == 0) {
return 0;
}
if (self->io_mode == NON_BLOCKING) {
self->non_blocking_descriptor.appbuf.buf = (void *)buf_in;
self->non_blocking_descriptor.appbuf.len = size;
self->non_blocking_descriptor.index = 0;
self->non_blocking_descriptor.copy_in_progress = true;
return size;
} else { // blocking or asyncio mode
mp_buffer_info_t appbuf;
appbuf.buf = (void *)buf_in;
appbuf.len = size;
uint32_t num_bytes_read = fill_appbuf_from_ringbuf(self, &appbuf);
return num_bytes_read;
}
}
STATIC mp_uint_t machine_i2s_stream_write(mp_obj_t self_in, const void *buf_in, mp_uint_t size, int *errcode) {
machine_i2s_obj_t *self = MP_OBJ_TO_PTR(self_in);
if (self->mode != TX) {
*errcode = MP_EPERM;
return MP_STREAM_ERROR;
}
if (size == 0) {
return 0;
}
if (self->io_mode == NON_BLOCKING) {
self->non_blocking_descriptor.appbuf.buf = (void *)buf_in;
self->non_blocking_descriptor.appbuf.len = size;
self->non_blocking_descriptor.index = 0;
self->non_blocking_descriptor.copy_in_progress = true;
return size;
} else { // blocking or asyncio mode
mp_buffer_info_t appbuf;
appbuf.buf = (void *)buf_in;
appbuf.len = size;
uint32_t num_bytes_written = copy_appbuf_to_ringbuf(self, &appbuf);
return num_bytes_written;
}
}
STATIC mp_uint_t machine_i2s_ioctl(mp_obj_t self_in, mp_uint_t request, uintptr_t arg, int *errcode) {
machine_i2s_obj_t *self = MP_OBJ_TO_PTR(self_in);
mp_uint_t ret;
uintptr_t flags = arg;
self->io_mode = ASYNCIO; // a call to ioctl() is an indication that asyncio is being used
if (request == MP_STREAM_POLL) {
ret = 0;
if (flags & MP_STREAM_POLL_RD) {
if (self->mode != RX) {
*errcode = MP_EPERM;
return MP_STREAM_ERROR;
}
if (!ringbuf_is_empty(&self->ring_buffer)) {
ret |= MP_STREAM_POLL_RD;
}
}
if (flags & MP_STREAM_POLL_WR) {
if (self->mode != TX) {
*errcode = MP_EPERM;
return MP_STREAM_ERROR;
}
if (!ringbuf_is_full(&self->ring_buffer)) {
ret |= MP_STREAM_POLL_WR;
}
}
} else {
*errcode = MP_EINVAL;
ret = MP_STREAM_ERROR;
}
return ret;
}
MP_REGISTER_ROOT_POINTER(void *machine_i2s_obj[2]);

Wyświetl plik

@ -122,6 +122,7 @@
#define MICROPY_PY_MACHINE_I2S_INCLUDEFILE "ports/rp2/machine_i2s.c"
#define MICROPY_PY_MACHINE_I2S_CONSTANT_RX (RX)
#define MICROPY_PY_MACHINE_I2S_CONSTANT_TX (TX)
#define MICROPY_PY_MACHINE_I2S_RING_BUF (1)
#define MICROPY_PY_MACHINE_SPI (1)
#define MICROPY_PY_MACHINE_SPI_MSB (SPI_MSB_FIRST)
#define MICROPY_PY_MACHINE_SPI_LSB (SPI_LSB_FIRST)

Wyświetl plik

@ -30,7 +30,6 @@
#include <stdlib.h>
#include "py/mphal.h"
#include "py/stream.h"
#include "pin.h"
#include "dma.h"
@ -106,19 +105,6 @@ typedef enum {
BOTTOM_HALF
} ping_pong_t;
typedef struct _ring_buf_t {
uint8_t *buffer;
size_t head;
size_t tail;
size_t size;
} ring_buf_t;
typedef struct _non_blocking_descriptor_t {
mp_buffer_info_t appbuf;
uint32_t index;
bool copy_in_progress;
} non_blocking_descriptor_t;
typedef struct _machine_i2s_obj_t {
mp_obj_base_t base;
uint8_t i2s_id;
@ -162,59 +148,6 @@ void machine_i2s_init0() {
}
}
// Ring Buffer
// Thread safe when used with these constraints:
// - Single Producer, Single Consumer
// - Sequential atomic operations
// One byte of capacity is used to detect buffer empty/full
STATIC void ringbuf_init(ring_buf_t *rbuf, uint8_t *buffer, size_t size) {
rbuf->buffer = buffer;
rbuf->size = size;
rbuf->head = 0;
rbuf->tail = 0;
}
STATIC bool ringbuf_push(ring_buf_t *rbuf, uint8_t data) {
size_t next_tail = (rbuf->tail + 1) % rbuf->size;
if (next_tail != rbuf->head) {
rbuf->buffer[rbuf->tail] = data;
rbuf->tail = next_tail;
return true;
}
// full
return false;
}
STATIC bool ringbuf_pop(ring_buf_t *rbuf, uint8_t *data) {
if (rbuf->head == rbuf->tail) {
// empty
return false;
}
*data = rbuf->buffer[rbuf->head];
rbuf->head = (rbuf->head + 1) % rbuf->size;
return true;
}
STATIC bool ringbuf_is_empty(ring_buf_t *rbuf) {
return rbuf->head == rbuf->tail;
}
STATIC bool ringbuf_is_full(ring_buf_t *rbuf) {
return ((rbuf->tail + 1) % rbuf->size) == rbuf->head;
}
STATIC size_t ringbuf_available_data(ring_buf_t *rbuf) {
return (rbuf->tail - rbuf->head + rbuf->size) % rbuf->size;
}
STATIC size_t ringbuf_available_space(ring_buf_t *rbuf) {
return rbuf->size - ringbuf_available_data(rbuf) - 1;
}
// For 32-bit audio samples, the STM32 HAL API expects each 32-bit sample to be encoded
// in an unusual byte ordering: Byte_2, Byte_3, Byte_0, Byte_1
// where: Byte_0 is the least significant byte of the 32-bit sample
@ -277,163 +210,6 @@ STATIC int8_t get_dma_bits(uint16_t mode, int8_t bits) {
}
}
STATIC uint32_t fill_appbuf_from_ringbuf(machine_i2s_obj_t *self, mp_buffer_info_t *appbuf) {
// copy audio samples from the ring buffer to the app buffer
// loop, copying samples until the app buffer is filled
// For asyncio mode, the loop will make an early exit if the ring buffer becomes empty
// Example:
// a MicroPython I2S object is configured for 16-bit mono (2 bytes per audio sample).
// For every frame coming from the ring buffer (8 bytes), 2 bytes are "cherry picked" and
// copied to the supplied app buffer.
// Thus, for every 1 byte copied to the app buffer, 4 bytes are read from the ring buffer.
// If a 8kB app buffer is supplied, 32kB of audio samples is read from the ring buffer.
uint32_t num_bytes_copied_to_appbuf = 0;
uint8_t *app_p = (uint8_t *)appbuf->buf;
uint8_t appbuf_sample_size_in_bytes = (self->bits == 16? 2 : 4) * (self->format == STEREO ? 2: 1);
uint32_t num_bytes_needed_from_ringbuf = appbuf->len * (I2S_RX_FRAME_SIZE_IN_BYTES / appbuf_sample_size_in_bytes);
uint8_t discard_byte;
while (num_bytes_needed_from_ringbuf) {
uint8_t f_index = get_frame_mapping_index(self->bits, self->format);
for (uint8_t i = 0; i < I2S_RX_FRAME_SIZE_IN_BYTES; i++) {
int8_t r_to_a_mapping = i2s_frame_map[f_index][i];
if (r_to_a_mapping != -1) {
if (self->io_mode == BLOCKING) {
// poll the ringbuf until a sample becomes available, copy into appbuf using the mapping transform
while (ringbuf_pop(&self->ring_buffer, app_p + r_to_a_mapping) == false) {
;
}
num_bytes_copied_to_appbuf++;
} else if (self->io_mode == ASYNCIO) {
if (ringbuf_pop(&self->ring_buffer, app_p + r_to_a_mapping) == false) {
// ring buffer is empty, exit
goto exit;
} else {
num_bytes_copied_to_appbuf++;
}
} else {
return 0; // should never get here (non-blocking mode does not use this function)
}
} else { // r_a_mapping == -1
// discard unused byte from ring buffer
if (self->io_mode == BLOCKING) {
// poll the ringbuf until a sample becomes available
while (ringbuf_pop(&self->ring_buffer, &discard_byte) == false) {
;
}
} else if (self->io_mode == ASYNCIO) {
if (ringbuf_pop(&self->ring_buffer, &discard_byte) == false) {
// ring buffer is empty, exit
goto exit;
}
} else {
return 0; // should never get here (non-blocking mode does not use this function)
}
}
num_bytes_needed_from_ringbuf--;
}
app_p += appbuf_sample_size_in_bytes;
}
exit:
return num_bytes_copied_to_appbuf;
}
// function is used in IRQ context
STATIC void fill_appbuf_from_ringbuf_non_blocking(machine_i2s_obj_t *self) {
// attempt to copy a block of audio samples from the ring buffer to the supplied app buffer.
// audio samples will be formatted as part of the copy operation
uint32_t num_bytes_copied_to_appbuf = 0;
uint8_t *app_p = &(((uint8_t *)self->non_blocking_descriptor.appbuf.buf)[self->non_blocking_descriptor.index]);
uint8_t appbuf_sample_size_in_bytes = (self->bits == 16? 2 : 4) * (self->format == STEREO ? 2: 1);
uint32_t num_bytes_remaining_to_copy_to_appbuf = self->non_blocking_descriptor.appbuf.len - self->non_blocking_descriptor.index;
uint32_t num_bytes_remaining_to_copy_from_ring_buffer = num_bytes_remaining_to_copy_to_appbuf *
(I2S_RX_FRAME_SIZE_IN_BYTES / appbuf_sample_size_in_bytes);
uint32_t num_bytes_needed_from_ringbuf = MIN(SIZEOF_NON_BLOCKING_COPY_IN_BYTES, num_bytes_remaining_to_copy_from_ring_buffer);
uint8_t discard_byte;
if (ringbuf_available_data(&self->ring_buffer) >= num_bytes_needed_from_ringbuf) {
while (num_bytes_needed_from_ringbuf) {
uint8_t f_index = get_frame_mapping_index(self->bits, self->format);
for (uint8_t i = 0; i < I2S_RX_FRAME_SIZE_IN_BYTES; i++) {
int8_t r_to_a_mapping = i2s_frame_map[f_index][i];
if (r_to_a_mapping != -1) {
ringbuf_pop(&self->ring_buffer, app_p + r_to_a_mapping);
num_bytes_copied_to_appbuf++;
} else { // r_a_mapping == -1
// discard unused byte from ring buffer
ringbuf_pop(&self->ring_buffer, &discard_byte);
}
num_bytes_needed_from_ringbuf--;
}
app_p += appbuf_sample_size_in_bytes;
}
self->non_blocking_descriptor.index += num_bytes_copied_to_appbuf;
if (self->non_blocking_descriptor.index >= self->non_blocking_descriptor.appbuf.len) {
self->non_blocking_descriptor.copy_in_progress = false;
mp_sched_schedule(self->callback_for_non_blocking, MP_OBJ_FROM_PTR(self));
}
}
}
STATIC uint32_t copy_appbuf_to_ringbuf(machine_i2s_obj_t *self, mp_buffer_info_t *appbuf) {
// copy audio samples from the app buffer to the ring buffer
// loop, reading samples until the app buffer is emptied
// for asyncio mode, the loop will make an early exit if the ring buffer becomes full
uint32_t a_index = 0;
while (a_index < appbuf->len) {
if (self->io_mode == BLOCKING) {
// copy a byte to the ringbuf when space becomes available
while (ringbuf_push(&self->ring_buffer, ((uint8_t *)appbuf->buf)[a_index]) == false) {
;
}
a_index++;
} else if (self->io_mode == ASYNCIO) {
if (ringbuf_push(&self->ring_buffer, ((uint8_t *)appbuf->buf)[a_index]) == false) {
// ring buffer is full, exit
break;
} else {
a_index++;
}
} else {
return 0; // should never get here (non-blocking mode does not use this function)
}
}
return a_index;
}
// function is used in IRQ context
STATIC void copy_appbuf_to_ringbuf_non_blocking(machine_i2s_obj_t *self) {
// copy audio samples from app buffer into ring buffer
uint32_t num_bytes_remaining_to_copy = self->non_blocking_descriptor.appbuf.len - self->non_blocking_descriptor.index;
uint32_t num_bytes_to_copy = MIN(SIZEOF_NON_BLOCKING_COPY_IN_BYTES, num_bytes_remaining_to_copy);
if (ringbuf_available_space(&self->ring_buffer) >= num_bytes_to_copy) {
for (uint32_t i = 0; i < num_bytes_to_copy; i++) {
ringbuf_push(&self->ring_buffer,
((uint8_t *)self->non_blocking_descriptor.appbuf.buf)[self->non_blocking_descriptor.index + i]);
}
self->non_blocking_descriptor.index += num_bytes_to_copy;
if (self->non_blocking_descriptor.index >= self->non_blocking_descriptor.appbuf.len) {
self->non_blocking_descriptor.copy_in_progress = false;
mp_sched_schedule(self->callback_for_non_blocking, MP_OBJ_FROM_PTR(self));
}
}
}
// function is used in IRQ context
STATIC void empty_dma(machine_i2s_obj_t *self, ping_pong_t dma_ping_pong) {
uint16_t dma_buffer_offset = 0;
@ -965,104 +741,6 @@ STATIC mp_obj_t machine_i2s_shift(size_t n_args, const mp_obj_t *pos_args, mp_ma
STATIC MP_DEFINE_CONST_FUN_OBJ_KW(machine_i2s_shift_fun_obj, 0, machine_i2s_shift);
STATIC MP_DEFINE_CONST_STATICMETHOD_OBJ(machine_i2s_shift_obj, MP_ROM_PTR(&machine_i2s_shift_fun_obj));
STATIC mp_uint_t machine_i2s_stream_read(mp_obj_t self_in, void *buf_in, mp_uint_t size, int *errcode) {
machine_i2s_obj_t *self = MP_OBJ_TO_PTR(self_in);
if (self->mode != I2S_MODE_MASTER_RX) {
*errcode = MP_EPERM;
return MP_STREAM_ERROR;
}
uint8_t appbuf_sample_size_in_bytes = (self->bits / 8) * (self->format == STEREO ? 2: 1);
if (size % appbuf_sample_size_in_bytes != 0) {
*errcode = MP_EINVAL;
return MP_STREAM_ERROR;
}
if (size == 0) {
return 0;
}
if (self->io_mode == NON_BLOCKING) {
self->non_blocking_descriptor.appbuf.buf = (void *)buf_in;
self->non_blocking_descriptor.appbuf.len = size;
self->non_blocking_descriptor.index = 0;
self->non_blocking_descriptor.copy_in_progress = true;
return size;
} else { // blocking or asyncio mode
mp_buffer_info_t appbuf;
appbuf.buf = (void *)buf_in;
appbuf.len = size;
uint32_t num_bytes_read = fill_appbuf_from_ringbuf(self, &appbuf);
return num_bytes_read;
}
}
STATIC mp_uint_t machine_i2s_stream_write(mp_obj_t self_in, const void *buf_in, mp_uint_t size, int *errcode) {
machine_i2s_obj_t *self = MP_OBJ_TO_PTR(self_in);
if (self->mode != I2S_MODE_MASTER_TX) {
*errcode = MP_EPERM;
return MP_STREAM_ERROR;
}
if (size == 0) {
return 0;
}
if (self->io_mode == NON_BLOCKING) {
self->non_blocking_descriptor.appbuf.buf = (void *)buf_in;
self->non_blocking_descriptor.appbuf.len = size;
self->non_blocking_descriptor.index = 0;
self->non_blocking_descriptor.copy_in_progress = true;
return size;
} else { // blocking or asyncio mode
mp_buffer_info_t appbuf;
appbuf.buf = (void *)buf_in;
appbuf.len = size;
uint32_t num_bytes_written = copy_appbuf_to_ringbuf(self, &appbuf);
return num_bytes_written;
}
}
STATIC mp_uint_t machine_i2s_ioctl(mp_obj_t self_in, mp_uint_t request, uintptr_t arg, int *errcode) {
machine_i2s_obj_t *self = MP_OBJ_TO_PTR(self_in);
mp_uint_t ret;
uintptr_t flags = arg;
self->io_mode = ASYNCIO; // a call to ioctl() is an indication that asyncio is being used
if (request == MP_STREAM_POLL) {
ret = 0;
if (flags & MP_STREAM_POLL_RD) {
if (self->mode != I2S_MODE_MASTER_RX) {
*errcode = MP_EPERM;
return MP_STREAM_ERROR;
}
if (!ringbuf_is_empty(&self->ring_buffer)) {
ret |= MP_STREAM_POLL_RD;
}
}
if (flags & MP_STREAM_POLL_WR) {
if (self->mode != I2S_MODE_MASTER_TX) {
*errcode = MP_EPERM;
return MP_STREAM_ERROR;
}
if (!ringbuf_is_full(&self->ring_buffer)) {
ret |= MP_STREAM_POLL_WR;
}
}
} else {
*errcode = MP_EINVAL;
ret = MP_STREAM_ERROR;
}
return ret;
}
MP_REGISTER_ROOT_POINTER(struct _machine_i2s_obj_t *machine_i2s_obj[MICROPY_HW_MAX_I2S]);
#endif // MICROPY_PY_MACHINE_I2S

Wyświetl plik

@ -120,6 +120,7 @@
#define MICROPY_PY_MACHINE_I2S_INCLUDEFILE "ports/stm32/machine_i2s.c"
#define MICROPY_PY_MACHINE_I2S_CONSTANT_RX (I2S_MODE_MASTER_RX)
#define MICROPY_PY_MACHINE_I2S_CONSTANT_TX (I2S_MODE_MASTER_TX)
#define MICROPY_PY_MACHINE_I2S_RING_BUF (1)
#define MICROPY_PY_MACHINE_SPI (1)
#define MICROPY_PY_MACHINE_SPI_MSB (SPI_FIRSTBIT_MSB)
#define MICROPY_PY_MACHINE_SPI_LSB (SPI_FIRSTBIT_LSB)