2022-01-12 03:30:29 +00:00
|
|
|
/*
|
2023-07-05 01:46:21 +00:00
|
|
|
* SPDX-FileCopyrightText: 2015-2023 Espressif Systems (Shanghai) CO LTD
|
2022-01-12 03:30:29 +00:00
|
|
|
*
|
|
|
|
* SPDX-License-Identifier: Apache-2.0
|
|
|
|
*/
|
2019-01-23 09:07:03 +00:00
|
|
|
|
|
|
|
// The HAL layer for SPI (common part, in iram)
|
|
|
|
// make these functions in a seperate file to make sure all LL functions are in the IRAM.
|
|
|
|
|
|
|
|
#include "hal/spi_hal.h"
|
2021-05-19 02:53:21 +00:00
|
|
|
#include "hal/assert.h"
|
2023-08-31 11:17:40 +00:00
|
|
|
#include "soc/ext_mem_defs.h"
|
2020-09-23 13:01:13 +00:00
|
|
|
#include "soc/soc_caps.h"
|
|
|
|
|
2020-09-08 09:05:49 +00:00
|
|
|
//This GDMA related part will be introduced by GDMA dedicated APIs in the future. Here we temporarily use macros.
|
2023-08-31 11:17:40 +00:00
|
|
|
#if SOC_GDMA_SUPPORTED
|
|
|
|
#if (SOC_GDMA_TRIG_PERIPH_SPI2_BUS == SOC_GDMA_BUS_AHB) && (SOC_AHB_GDMA_VERSION == 1)
|
2020-09-23 13:01:13 +00:00
|
|
|
#include "soc/gdma_struct.h"
|
|
|
|
#include "hal/gdma_ll.h"
|
2021-01-27 13:56:16 +00:00
|
|
|
#define spi_dma_ll_rx_reset(dev, chan) gdma_ll_rx_reset_channel(&GDMA, chan)
|
|
|
|
#define spi_dma_ll_tx_reset(dev, chan) gdma_ll_tx_reset_channel(&GDMA, chan);
|
|
|
|
#define spi_dma_ll_rx_start(dev, chan, addr) do {\
|
|
|
|
gdma_ll_rx_set_desc_addr(&GDMA, chan, (uint32_t)addr);\
|
|
|
|
gdma_ll_rx_start(&GDMA, chan);\
|
2020-09-23 13:01:13 +00:00
|
|
|
} while (0)
|
2021-01-27 13:56:16 +00:00
|
|
|
#define spi_dma_ll_tx_start(dev, chan, addr) do {\
|
|
|
|
gdma_ll_tx_set_desc_addr(&GDMA, chan, (uint32_t)addr);\
|
|
|
|
gdma_ll_tx_start(&GDMA, chan);\
|
2020-09-23 13:01:13 +00:00
|
|
|
} while (0)
|
2023-08-31 11:17:40 +00:00
|
|
|
|
|
|
|
#elif (SOC_GDMA_TRIG_PERIPH_SPI2_BUS == SOC_GDMA_BUS_AXI) //TODO: IDF-6152, refactor spi hal layer
|
|
|
|
#include "hal/axi_dma_ll.h"
|
|
|
|
#define spi_dma_ll_rx_reset(dev, chan) axi_dma_ll_rx_reset_channel(&AXI_DMA, chan)
|
|
|
|
#define spi_dma_ll_tx_reset(dev, chan) axi_dma_ll_tx_reset_channel(&AXI_DMA, chan);
|
|
|
|
#define spi_dma_ll_rx_start(dev, chan, addr) do {\
|
|
|
|
axi_dma_ll_rx_set_desc_addr(&AXI_DMA, chan, (uint32_t)addr);\
|
|
|
|
axi_dma_ll_rx_start(&AXI_DMA, chan);\
|
|
|
|
} while (0)
|
|
|
|
#define spi_dma_ll_tx_start(dev, chan, addr) do {\
|
|
|
|
axi_dma_ll_tx_set_desc_addr(&AXI_DMA, chan, (uint32_t)addr);\
|
|
|
|
axi_dma_ll_tx_start(&AXI_DMA, chan);\
|
|
|
|
} while (0)
|
2020-09-23 13:01:13 +00:00
|
|
|
#endif
|
2023-08-31 11:17:40 +00:00
|
|
|
#endif //SOC_GDMA_SUPPORTED
|
2019-01-23 09:07:03 +00:00
|
|
|
|
2020-09-09 02:21:49 +00:00
|
|
|
void spi_hal_setup_device(spi_hal_context_t *hal, const spi_hal_dev_config_t *dev)
|
2019-01-23 09:07:03 +00:00
|
|
|
{
|
|
|
|
//Configure clock settings
|
|
|
|
spi_dev_t *hw = hal->hw;
|
2022-01-12 03:30:29 +00:00
|
|
|
#if SOC_SPI_AS_CS_SUPPORTED
|
2020-09-09 02:21:49 +00:00
|
|
|
spi_ll_master_set_cksel(hw, dev->cs_pin_id, dev->as_cs);
|
2019-06-13 06:12:54 +00:00
|
|
|
#endif
|
2020-09-09 02:21:49 +00:00
|
|
|
spi_ll_master_set_pos_cs(hw, dev->cs_pin_id, dev->positive_cs);
|
|
|
|
spi_ll_master_set_clock_by_reg(hw, &dev->timing_conf.clock_reg);
|
2019-01-23 09:07:03 +00:00
|
|
|
//Configure bit order
|
2020-09-09 02:21:49 +00:00
|
|
|
spi_ll_set_rx_lsbfirst(hw, dev->rx_lsbfirst);
|
|
|
|
spi_ll_set_tx_lsbfirst(hw, dev->tx_lsbfirst);
|
|
|
|
spi_ll_master_set_mode(hw, dev->mode);
|
2019-01-23 09:07:03 +00:00
|
|
|
//Configure misc stuff
|
2020-09-09 02:21:49 +00:00
|
|
|
spi_ll_set_half_duplex(hw, dev->half_duplex);
|
|
|
|
spi_ll_set_sio_mode(hw, dev->sio);
|
2019-01-23 09:07:03 +00:00
|
|
|
//Configure CS pin and timing
|
2020-09-09 02:21:49 +00:00
|
|
|
spi_ll_master_set_cs_setup(hw, dev->cs_setup);
|
|
|
|
spi_ll_master_set_cs_hold(hw, dev->cs_hold);
|
|
|
|
spi_ll_master_select_cs(hw, dev->cs_pin_id);
|
2019-01-23 09:07:03 +00:00
|
|
|
}
|
|
|
|
|
2020-09-09 02:21:49 +00:00
|
|
|
void spi_hal_setup_trans(spi_hal_context_t *hal, const spi_hal_dev_config_t *dev, const spi_hal_trans_config_t *trans)
|
2019-01-23 09:07:03 +00:00
|
|
|
{
|
|
|
|
spi_dev_t *hw = hal->hw;
|
|
|
|
|
2019-04-26 17:29:48 +00:00
|
|
|
//clear int bit
|
2019-01-23 09:07:03 +00:00
|
|
|
spi_ll_clear_int_stat(hal->hw);
|
2019-04-26 17:29:48 +00:00
|
|
|
//We should be done with the transmission.
|
2021-05-19 02:53:21 +00:00
|
|
|
HAL_ASSERT(spi_ll_get_running_cmd(hw) == 0);
|
2021-07-09 08:46:27 +00:00
|
|
|
//set transaction line mode
|
|
|
|
spi_ll_master_set_line_mode(hw, trans->line_mode);
|
2019-01-23 09:07:03 +00:00
|
|
|
|
|
|
|
int extra_dummy = 0;
|
|
|
|
//when no_dummy is not set and in half-duplex mode, sets the dummy bit if RX phase exist
|
2020-09-09 02:21:49 +00:00
|
|
|
if (trans->rcv_buffer && !dev->no_compensate && dev->half_duplex) {
|
|
|
|
extra_dummy = dev->timing_conf.timing_dummy;
|
2019-01-23 09:07:03 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
//SPI iface needs to be configured for a delay in some cases.
|
|
|
|
//configure dummy bits
|
2020-09-09 02:21:49 +00:00
|
|
|
spi_ll_set_dummy(hw, extra_dummy + trans->dummy_bits);
|
2019-01-23 09:07:03 +00:00
|
|
|
|
|
|
|
uint32_t miso_delay_num = 0;
|
|
|
|
uint32_t miso_delay_mode = 0;
|
2020-09-09 02:21:49 +00:00
|
|
|
if (dev->timing_conf.timing_miso_delay < 0) {
|
2019-01-23 09:07:03 +00:00
|
|
|
//if the data comes too late, delay half a SPI clock to improve reading
|
2020-09-09 02:21:49 +00:00
|
|
|
switch (dev->mode) {
|
2019-01-23 09:07:03 +00:00
|
|
|
case 0:
|
|
|
|
miso_delay_mode = 2;
|
|
|
|
break;
|
|
|
|
case 1:
|
|
|
|
miso_delay_mode = 1;
|
|
|
|
break;
|
|
|
|
case 2:
|
|
|
|
miso_delay_mode = 1;
|
|
|
|
break;
|
|
|
|
case 3:
|
|
|
|
miso_delay_mode = 2;
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
miso_delay_num = 0;
|
|
|
|
} else {
|
|
|
|
//if the data is so fast that dummy_bit is used, delay some apb clocks to meet the timing
|
2020-09-09 02:21:49 +00:00
|
|
|
miso_delay_num = extra_dummy ? dev->timing_conf.timing_miso_delay : 0;
|
2019-01-23 09:07:03 +00:00
|
|
|
miso_delay_mode = 0;
|
|
|
|
}
|
|
|
|
spi_ll_set_miso_delay(hw, miso_delay_mode, miso_delay_num);
|
|
|
|
|
2020-09-09 02:21:49 +00:00
|
|
|
spi_ll_set_mosi_bitlen(hw, trans->tx_bitlen);
|
2019-01-23 09:07:03 +00:00
|
|
|
|
2020-09-09 02:21:49 +00:00
|
|
|
if (dev->half_duplex) {
|
|
|
|
spi_ll_set_miso_bitlen(hw, trans->rx_bitlen);
|
2019-01-23 09:07:03 +00:00
|
|
|
} else {
|
|
|
|
//rxlength is not used in full-duplex mode
|
2020-09-09 02:21:49 +00:00
|
|
|
spi_ll_set_miso_bitlen(hw, trans->tx_bitlen);
|
2019-01-23 09:07:03 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
//Configure bit sizes, load addr and command
|
2020-09-09 02:21:49 +00:00
|
|
|
int cmdlen = trans->cmd_bits;
|
|
|
|
int addrlen = trans->addr_bits;
|
|
|
|
if (!dev->half_duplex && dev->cs_setup != 0) {
|
2019-01-23 09:07:03 +00:00
|
|
|
/* The command and address phase is not compatible with cs_ena_pretrans
|
|
|
|
* in full duplex mode.
|
|
|
|
*/
|
|
|
|
cmdlen = 0;
|
|
|
|
addrlen = 0;
|
|
|
|
}
|
|
|
|
|
|
|
|
spi_ll_set_addr_bitlen(hw, addrlen);
|
|
|
|
spi_ll_set_command_bitlen(hw, cmdlen);
|
|
|
|
|
2020-09-09 02:21:49 +00:00
|
|
|
spi_ll_set_command(hw, trans->cmd, cmdlen, dev->tx_lsbfirst);
|
|
|
|
spi_ll_set_address(hw, trans->addr, addrlen, dev->tx_lsbfirst);
|
|
|
|
|
2021-05-13 03:53:44 +00:00
|
|
|
//Configure keep active CS
|
|
|
|
spi_ll_master_keep_cs(hw, trans->cs_keep_active);
|
|
|
|
|
2020-09-09 02:21:49 +00:00
|
|
|
//Save the transaction attributes for internal usage.
|
|
|
|
memcpy(&hal->trans_config, trans, sizeof(spi_hal_trans_config_t));
|
2019-01-23 09:07:03 +00:00
|
|
|
}
|
|
|
|
|
2023-08-31 11:17:40 +00:00
|
|
|
#if SOC_NON_CACHEABLE_OFFSET
|
|
|
|
#define ADDR_DMA_2_CPU(addr) ((typeof(addr))((uint32_t)(addr) + SOC_NON_CACHEABLE_OFFSET))
|
|
|
|
#define ADDR_CPU_2_DMA(addr) ((typeof(addr))((uint32_t)(addr) - SOC_NON_CACHEABLE_OFFSET))
|
|
|
|
#else
|
|
|
|
#define ADDR_DMA_2_CPU(addr) (addr)
|
|
|
|
#define ADDR_CPU_2_DMA(addr) (addr)
|
|
|
|
#endif
|
|
|
|
//TODO: IDF-6152, refactor spi hal layer
|
|
|
|
static void s_spi_hal_dma_desc_setup_link(spi_dma_desc_t *dmadesc, const void *data, int len, bool is_rx)
|
|
|
|
{
|
|
|
|
dmadesc = ADDR_DMA_2_CPU(dmadesc);
|
|
|
|
int n = 0;
|
|
|
|
while (len) {
|
|
|
|
int dmachunklen = len;
|
|
|
|
if (dmachunklen > DMA_DESCRIPTOR_BUFFER_MAX_SIZE_4B_ALIGNED) {
|
|
|
|
dmachunklen = DMA_DESCRIPTOR_BUFFER_MAX_SIZE_4B_ALIGNED;
|
|
|
|
}
|
|
|
|
if (is_rx) {
|
|
|
|
//Receive needs DMA length rounded to next 32-bit boundary
|
|
|
|
dmadesc[n].dw0.size = (dmachunklen + 3) & (~3);
|
|
|
|
dmadesc[n].dw0.length = (dmachunklen + 3) & (~3);
|
|
|
|
} else {
|
|
|
|
dmadesc[n].dw0.size = dmachunklen;
|
|
|
|
dmadesc[n].dw0.length = dmachunklen;
|
|
|
|
}
|
|
|
|
dmadesc[n].buffer = (uint8_t *)data;
|
|
|
|
dmadesc[n].dw0.suc_eof = 0;
|
|
|
|
dmadesc[n].dw0.owner = 1;
|
|
|
|
dmadesc[n].next = ADDR_CPU_2_DMA(&dmadesc[n + 1]);
|
|
|
|
len -= dmachunklen;
|
|
|
|
data += dmachunklen;
|
|
|
|
n++;
|
|
|
|
}
|
|
|
|
dmadesc[n - 1].dw0.suc_eof = 1; //Mark last DMA desc as end of stream.
|
|
|
|
dmadesc[n - 1].next = NULL;
|
|
|
|
}
|
|
|
|
|
2020-09-09 02:21:49 +00:00
|
|
|
void spi_hal_prepare_data(spi_hal_context_t *hal, const spi_hal_dev_config_t *dev, const spi_hal_trans_config_t *trans)
|
2019-01-23 09:07:03 +00:00
|
|
|
{
|
|
|
|
spi_dev_t *hw = hal->hw;
|
2020-09-14 09:33:10 +00:00
|
|
|
|
2019-01-23 09:07:03 +00:00
|
|
|
//Fill DMA descriptors
|
2020-09-09 02:21:49 +00:00
|
|
|
if (trans->rcv_buffer) {
|
2019-01-23 09:07:03 +00:00
|
|
|
if (!hal->dma_enabled) {
|
|
|
|
//No need to setup anything; we'll copy the result out of the work registers directly later.
|
|
|
|
} else {
|
2023-08-31 11:17:40 +00:00
|
|
|
s_spi_hal_dma_desc_setup_link(hal->dmadesc_rx, trans->rcv_buffer, ((trans->rx_bitlen + 7) / 8), true);
|
2020-11-10 07:40:01 +00:00
|
|
|
|
2021-01-27 13:56:16 +00:00
|
|
|
spi_dma_ll_rx_reset(hal->dma_in, hal->rx_dma_chan);
|
2021-07-01 01:31:26 +00:00
|
|
|
spi_ll_dma_rx_fifo_reset(hal->hw);
|
|
|
|
spi_ll_infifo_full_clr(hal->hw);
|
2020-11-10 07:40:01 +00:00
|
|
|
spi_ll_dma_rx_enable(hal->hw, 1);
|
2023-08-31 11:17:40 +00:00
|
|
|
spi_dma_ll_rx_start(hal->dma_in, hal->rx_dma_chan, (lldesc_t *)hal->dmadesc_rx);
|
2019-01-23 09:07:03 +00:00
|
|
|
}
|
2020-09-14 09:33:10 +00:00
|
|
|
|
2020-11-24 07:49:38 +00:00
|
|
|
}
|
|
|
|
#if CONFIG_IDF_TARGET_ESP32
|
|
|
|
else {
|
2019-01-23 09:07:03 +00:00
|
|
|
//DMA temporary workaround: let RX DMA work somehow to avoid the issue in ESP32 v0/v1 silicon
|
2020-11-24 07:49:38 +00:00
|
|
|
if (hal->dma_enabled && !dev->half_duplex) {
|
2020-09-14 09:33:10 +00:00
|
|
|
spi_ll_dma_rx_enable(hal->hw, 1);
|
2021-01-27 13:56:16 +00:00
|
|
|
spi_dma_ll_rx_start(hal->dma_in, hal->rx_dma_chan, 0);
|
2019-01-23 09:07:03 +00:00
|
|
|
}
|
|
|
|
}
|
2020-11-24 07:49:38 +00:00
|
|
|
#endif
|
2019-01-23 09:07:03 +00:00
|
|
|
|
2020-09-09 02:21:49 +00:00
|
|
|
if (trans->send_buffer) {
|
2019-01-23 09:07:03 +00:00
|
|
|
if (!hal->dma_enabled) {
|
|
|
|
//Need to copy data to registers manually
|
2020-09-09 02:21:49 +00:00
|
|
|
spi_ll_write_buffer(hw, trans->send_buffer, trans->tx_bitlen);
|
2019-01-23 09:07:03 +00:00
|
|
|
} else {
|
2023-08-31 11:17:40 +00:00
|
|
|
s_spi_hal_dma_desc_setup_link(hal->dmadesc_tx, trans->send_buffer, (trans->tx_bitlen + 7) / 8, false);
|
2020-11-10 07:40:01 +00:00
|
|
|
|
2021-01-27 13:56:16 +00:00
|
|
|
spi_dma_ll_tx_reset(hal->dma_out, hal->tx_dma_chan);
|
2021-07-01 01:31:26 +00:00
|
|
|
spi_ll_dma_tx_fifo_reset(hal->hw);
|
|
|
|
spi_ll_outfifo_empty_clr(hal->hw);
|
2020-09-09 02:21:49 +00:00
|
|
|
spi_ll_dma_tx_enable(hal->hw, 1);
|
2023-08-31 11:17:40 +00:00
|
|
|
spi_dma_ll_tx_start(hal->dma_out, hal->tx_dma_chan, (lldesc_t *)hal->dmadesc_tx);
|
2019-01-23 09:07:03 +00:00
|
|
|
}
|
|
|
|
}
|
2020-09-14 09:33:10 +00:00
|
|
|
|
2019-01-23 09:07:03 +00:00
|
|
|
//in ESP32 these registers should be configured after the DMA is set
|
2020-09-09 02:21:49 +00:00
|
|
|
if ((!dev->half_duplex && trans->rcv_buffer) || trans->send_buffer) {
|
2019-01-23 09:07:03 +00:00
|
|
|
spi_ll_enable_mosi(hw, 1);
|
|
|
|
} else {
|
|
|
|
spi_ll_enable_mosi(hw, 0);
|
|
|
|
}
|
2020-09-09 02:21:49 +00:00
|
|
|
spi_ll_enable_miso(hw, (trans->rcv_buffer) ? 1 : 0);
|
2019-01-23 09:07:03 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
void spi_hal_user_start(const spi_hal_context_t *hal)
|
|
|
|
{
|
2023-07-05 01:46:21 +00:00
|
|
|
spi_ll_apply_config(hal->hw);
|
|
|
|
spi_ll_user_start(hal->hw);
|
2019-01-23 09:07:03 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
bool spi_hal_usr_is_done(const spi_hal_context_t *hal)
|
|
|
|
{
|
|
|
|
return spi_ll_usr_is_done(hal->hw);
|
|
|
|
}
|
|
|
|
|
|
|
|
void spi_hal_fetch_result(const spi_hal_context_t *hal)
|
|
|
|
{
|
2020-09-09 02:21:49 +00:00
|
|
|
const spi_hal_trans_config_t *trans = &hal->trans_config;
|
|
|
|
|
|
|
|
if (trans->rcv_buffer && !hal->dma_enabled) {
|
2019-01-23 09:07:03 +00:00
|
|
|
//Need to copy from SPI regs to result buffer.
|
2020-09-09 02:21:49 +00:00
|
|
|
spi_ll_read_buffer(hal->hw, trans->rcv_buffer, trans->rx_bitlen);
|
2019-01-23 09:07:03 +00:00
|
|
|
}
|
|
|
|
}
|