1 /*
2  * SPDX-FileCopyrightText: 2020-2022 Espressif Systems (Shanghai) CO LTD
3  *
4  * SPDX-License-Identifier: Apache-2.0
5  */
6 
7 #pragma once
8 
9 #ifdef __cplusplus
10 extern "C" {
11 #endif
12 
13 #include <stdint.h>
14 #include <stdbool.h>
15 #include "soc/cp_dma_struct.h"
16 
17 #define CP_DMA_LL_EVENT_RX_DONE       (1 << 0)
18 #define CP_DMA_LL_EVENT_RX_EOF        (1 << 1)
19 #define CP_DMA_LL_EVENT_TX_DONE       (1 << 2)
20 #define CP_DMA_LL_EVENT_TX_EOF        (1 << 3)
21 #define CP_DMA_LL_EVENT_RX_DESC_ERR   (1 << 4)
22 #define CP_DMA_LL_EVENT_TX_DESC_ERR   (1 << 5)
23 #define CP_DMA_LL_EVENT_RX_DESC_EMPTY (1 << 6)
24 #define CP_DMA_LL_EVENT_TX_TOTAL_EOF  (1 << 7)
25 #define CP_DMA_LL_EVENT_ALL           (0xFF)
26 
27 /**
28  * Copy DMA firstly reads data to be transferred from internal RAM,
29  * stores the data into DMA FIFO via an outlink,
30  * and then writes the data to the destination internal RAM via an inlink.
31  */
32 
cp_dma_ll_reset_in_link(cp_dma_dev_t * dev)33 static inline void cp_dma_ll_reset_in_link(cp_dma_dev_t *dev)
34 {
35     dev->dma_conf.dma_in_rst = 1;
36     dev->dma_conf.dma_in_rst = 0;
37 }
38 
cp_dma_ll_reset_out_link(cp_dma_dev_t * dev)39 static inline void cp_dma_ll_reset_out_link(cp_dma_dev_t *dev)
40 {
41     dev->dma_conf.dma_out_rst = 1;
42     dev->dma_conf.dma_out_rst = 0;
43 }
44 
cp_dma_ll_reset_fifo(cp_dma_dev_t * dev)45 static inline void cp_dma_ll_reset_fifo(cp_dma_dev_t *dev)
46 {
47     dev->dma_conf.dma_fifo_rst = 1;
48     dev->dma_conf.dma_fifo_rst = 0;
49 }
50 
cp_dma_ll_reset_cmd_fifo(cp_dma_dev_t * dev)51 static inline void cp_dma_ll_reset_cmd_fifo(cp_dma_dev_t *dev)
52 {
53     dev->dma_conf.dma_cmdfifo_rst = 1;
54     dev->dma_conf.dma_cmdfifo_rst = 0;
55 }
56 
cp_dma_ll_enable_owner_check(cp_dma_dev_t * dev,bool enable)57 static inline void cp_dma_ll_enable_owner_check(cp_dma_dev_t *dev, bool enable)
58 {
59     dev->dma_conf.dma_check_owner = enable;
60     dev->dma_conf.dma_out_auto_wrback = 1;
61     dev->dma_conf.dma_out_owner = 0;
62     dev->dma_conf.dma_in_owner = 0;
63 }
64 
cp_dma_ll_enable_clock(cp_dma_dev_t * dev,bool enable)65 static inline void cp_dma_ll_enable_clock(cp_dma_dev_t *dev, bool enable)
66 {
67     dev->dma_conf.dma_clk_en = enable;
68 }
69 
cp_dma_ll_enable_intr(cp_dma_dev_t * dev,uint32_t mask,bool enable)70 static inline void cp_dma_ll_enable_intr(cp_dma_dev_t *dev, uint32_t mask, bool enable)
71 {
72     if (enable) {
73         dev->dma_int_ena.val |= mask;
74     } else {
75         dev->dma_int_ena.val &= ~mask;
76     }
77 }
78 
cp_dma_ll_get_intr_status(cp_dma_dev_t * dev)79 static inline __attribute__((always_inline)) uint32_t cp_dma_ll_get_intr_status(cp_dma_dev_t *dev)
80 {
81     return dev->dma_int_st.val;
82 }
83 
cp_dma_ll_clear_intr_status(cp_dma_dev_t * dev,uint32_t mask)84 static inline __attribute__((always_inline)) void cp_dma_ll_clear_intr_status(cp_dma_dev_t *dev, uint32_t mask)
85 {
86     dev->dma_int_clr.val = mask;
87 }
88 
cp_dma_ll_tx_set_descriptor_base_addr(cp_dma_dev_t * dev,uint32_t address)89 static inline void cp_dma_ll_tx_set_descriptor_base_addr(cp_dma_dev_t *dev, uint32_t address)
90 {
91     dev->dma_out_link.dma_outlink_addr = address;
92 }
93 
cp_dma_ll_rx_set_descriptor_base_addr(cp_dma_dev_t * dev,uint32_t address)94 static inline void cp_dma_ll_rx_set_descriptor_base_addr(cp_dma_dev_t *dev, uint32_t address)
95 {
96     dev->dma_in_link.dma_inlink_addr = address;
97 }
98 
cp_dma_ll_start_tx(cp_dma_dev_t * dev,bool enable)99 static inline void cp_dma_ll_start_tx(cp_dma_dev_t *dev, bool enable)
100 {
101     if (enable) {
102         dev->dma_out_link.dma_outlink_start = 1; // cleared automatically by HW
103     } else {
104         dev->dma_out_link.dma_outlink_stop = 1; // cleared automatically by HW
105     }
106 }
107 
cp_dma_ll_start_rx(cp_dma_dev_t * dev,bool enable)108 static inline void cp_dma_ll_start_rx(cp_dma_dev_t *dev, bool enable)
109 {
110     if (enable) {
111         dev->dma_in_link.dma_inlink_start = 1; // cleared automatically by HW
112     } else {
113         dev->dma_in_link.dma_inlink_stop = 1; // cleared automatically by HW
114     }
115 }
116 
cp_dma_ll_restart_tx(cp_dma_dev_t * dev)117 static inline void cp_dma_ll_restart_tx(cp_dma_dev_t *dev)
118 {
119     dev->dma_out_link.dma_outlink_restart = 1; // cleared automatically by HW
120 }
121 
cp_dma_ll_restart_rx(cp_dma_dev_t * dev)122 static inline void cp_dma_ll_restart_rx(cp_dma_dev_t *dev)
123 {
124     dev->dma_in_link.dma_inlink_restart = 1; // cleared automatically by HW
125 }
126 
127 // get the address of last rx descriptor
cp_dma_ll_get_rx_eof_descriptor_address(cp_dma_dev_t * dev)128 static inline uint32_t cp_dma_ll_get_rx_eof_descriptor_address(cp_dma_dev_t *dev)
129 {
130     return dev->dma_in_eof_des_addr.dma_in_suc_eof_des_addr;
131 }
132 
133 // get the address of last tx descriptor
cp_dma_ll_get_tx_eof_descriptor_address(cp_dma_dev_t * dev)134 static inline uint32_t cp_dma_ll_get_tx_eof_descriptor_address(cp_dma_dev_t *dev)
135 {
136     return dev->dma_out_eof_des_addr.dma_out_eof_des_addr;
137 }
138 
cp_dma_ll_get_tx_status(cp_dma_dev_t * dev)139 static inline uint32_t cp_dma_ll_get_tx_status(cp_dma_dev_t *dev)
140 {
141     return dev->dma_out_st.val;
142 }
143 
cp_dma_ll_get_rx_status(cp_dma_dev_t * dev)144 static inline uint32_t cp_dma_ll_get_rx_status(cp_dma_dev_t *dev)
145 {
146     return dev->dma_in_st.val;
147 }
148 
149 #ifdef __cplusplus
150 }
151 #endif
152