1 /*
2 * Copyright (c) 2022 Renesas Electronics Corporation
3 *
4 * SPDX-License-Identifier: Apache-2.0
5 */
6
7 /**
8 * @file usb_dc_smartbond.c
9 * @brief SmartBond USB device controller driver
10 *
11 */
12
13 #include <stdio.h>
14 #include <string.h>
15
16 #include <zephyr/drivers/clock_control.h>
17 #include <zephyr/drivers/usb/usb_dc.h>
18 #include <zephyr/init.h>
19 #include <zephyr/kernel.h>
20 #include <zephyr/usb/usb_device.h>
21
22 #include <DA1469xAB.h>
23 #include <soc.h>
24 #include <da1469x_clock.h>
25 #include <da1469x_pd.h>
26
27 #include <zephyr/logging/log.h>
28 #include <zephyr/drivers/gpio.h>
29 #include <zephyr/drivers/clock_control/smartbond_clock_control.h>
30 #include <zephyr/pm/policy.h>
31
32 #include <zephyr/drivers/dma.h>
33
34 LOG_MODULE_REGISTER(usb_dc_smartbond, CONFIG_USB_DRIVER_LOG_LEVEL);
35
36 /* USB device controller access from devicetree */
37 #define DT_DRV_COMPAT renesas_smartbond_usbd
38
39 #define USB_IRQ DT_INST_IRQ_BY_IDX(0, 0, irq)
40 #define USB_IRQ_PRI DT_INST_IRQ_BY_IDX(0, 0, priority)
41 #define VBUS_IRQ DT_INST_IRQ_BY_IDX(0, 1, irq)
42 #define VBUS_IRQ_PRI DT_INST_IRQ_BY_IDX(0, 1, priority)
43
44 /*
45 * Minimal transfer size needed to use DMA. For short transfers
46 * it may be simpler to just fill hardware FIFO with data instead
47 * of programming DMA registers.
48 */
49 #define DMA_MIN_TRANSFER_SIZE DT_INST_PROP(0, dma_min_transfer_size)
50 #define FIFO_READ_THRESHOLD DT_INST_PROP(0, fifo_read_threshold)
51
52 /* Size of hardware RX and TX FIFO. */
53 #define EP0_FIFO_SIZE 8
54 #define EP_FIFO_SIZE 64
55
56 #define EP0_OUT_BUF_SIZE EP0_FIFO_SIZE
57 #define EP1_OUT_BUF_SIZE DT_INST_PROP_BY_IDX(0, ep_out_buf_size, 1)
58 #define EP2_OUT_BUF_SIZE DT_INST_PROP_BY_IDX(0, ep_out_buf_size, 2)
59 #define EP3_OUT_BUF_SIZE DT_INST_PROP_BY_IDX(0, ep_out_buf_size, 3)
60
61 #define EP0_IDX 0
62 #define EP0_IN USB_CONTROL_EP_IN
63 #define EP0_OUT USB_CONTROL_EP_OUT
64 #define EP_MAX 4
65
66 /* EP OUT buffers */
67 static uint8_t ep0_out_buf[EP0_OUT_BUF_SIZE];
68 static uint8_t ep1_out_buf[EP1_OUT_BUF_SIZE];
69 static uint8_t ep2_out_buf[EP2_OUT_BUF_SIZE];
70 static uint8_t ep3_out_buf[EP3_OUT_BUF_SIZE];
71
72 static uint8_t *const ep_out_bufs[4] = {
73 ep0_out_buf, ep1_out_buf, ep2_out_buf, ep3_out_buf,
74 };
75
76 static const uint16_t ep_out_buf_size[4] = {
77 EP0_OUT_BUF_SIZE, EP1_OUT_BUF_SIZE, EP2_OUT_BUF_SIZE, EP3_OUT_BUF_SIZE,
78 };
79
80 /* Node functional states */
81 #define NFSR_NODE_RESET 0
82 #define NFSR_NODE_RESUME 1
83 #define NFSR_NODE_OPERATIONAL 2
84 #define NFSR_NODE_SUSPEND 3
85 /*
86 * Those two following states are added to allow going out of sleep mode
87 * using frame interrupt. On remove wakeup RESUME state must be kept for
88 * at least 1ms. It is accomplished by using FRAME interrupt that goes
89 * through those two fake states before entering OPERATIONAL state.
90 */
91 #define NFSR_NODE_WAKING (0x10 | (NFSR_NODE_RESUME))
92 #define NFSR_NODE_WAKING2 (0x20 | (NFSR_NODE_RESUME))
93
94 struct smartbond_ep_reg_set {
95 volatile uint32_t epc_in;
96 volatile uint32_t txd;
97 volatile uint32_t txs;
98 volatile uint32_t txc;
99 volatile uint32_t epc_out;
100 volatile uint32_t rxd;
101 volatile uint32_t rxs;
102 volatile uint32_t rxc;
103 };
104
105 static struct smartbond_ep_reg_set *const reg_sets[4] = {
106 (struct smartbond_ep_reg_set *)&USB->USB_EPC0_REG,
107 (struct smartbond_ep_reg_set *)&USB->USB_EPC1_REG,
108 (struct smartbond_ep_reg_set *)&USB->USB_EPC3_REG,
109 (struct smartbond_ep_reg_set *)&USB->USB_EPC5_REG,
110 };
111
112 struct smartbond_ep_state {
113 atomic_t busy;
114 uint8_t *buffer;
115 uint16_t total_len; /** Total length of current transfer */
116 uint16_t transferred; /** Bytes transferred so far */
117 uint16_t mps; /** Endpoint max packet size */
118 /** Packet size sent or received so far. It is used to modify transferred field
119 * after ACK is received or when filling ISO endpoint with size larger then
120 * FIFO size.
121 */
122 uint16_t last_packet_size;
123
124 usb_dc_ep_callback cb; /** Endpoint callback function */
125
126 uint8_t data1 : 1; /** DATA0/1 toggle bit 1 DATA1 is expected or transmitted */
127 uint8_t stall : 1; /** Endpoint is stalled */
128 uint8_t iso : 1; /** ISO endpoint */
129 uint8_t enabled : 1; /** Endpoint is enabled */
130 uint8_t ep_addr; /** EP address */
131 struct smartbond_ep_reg_set *regs;
132 };
133
134 static struct usb_smartbond_dma_cfg {
135 int tx_chan;
136 int rx_chan;
137 uint8_t tx_slot_mux;
138 uint8_t rx_slot_mux;
139 const struct device *tx_dev;
140 const struct device *rx_dev;
141 struct dma_config tx_cfg;
142 struct dma_config rx_cfg;
143 struct dma_block_config tx_block_cfg;
144 struct dma_block_config rx_block_cfg;
145 } usbd_dma_cfg = {
146 .tx_chan =
147 DT_DMAS_CELL_BY_NAME(DT_NODELABEL(usbd), tx, channel),
148 .tx_slot_mux =
149 DT_DMAS_CELL_BY_NAME(DT_NODELABEL(usbd), tx, config),
150 .tx_dev =
151 DEVICE_DT_GET(DT_DMAS_CTLR_BY_NAME(DT_NODELABEL(usbd), tx)),
152 .rx_chan =
153 DT_DMAS_CELL_BY_NAME(DT_NODELABEL(usbd), rx, channel),
154 .rx_slot_mux =
155 DT_DMAS_CELL_BY_NAME(DT_NODELABEL(usbd), rx, config),
156 .rx_dev =
157 DEVICE_DT_GET(DT_DMAS_CTLR_BY_NAME(DT_NODELABEL(usbd), rx)),
158 };
159
160 struct usb_dc_state {
161 bool vbus_present;
162 bool attached;
163 atomic_t clk_requested;
164 uint8_t nfsr;
165 usb_dc_status_callback status_cb;
166 struct smartbond_ep_state ep_state[2][4];
167 /** Bitmask of EP OUT endpoints that received data during interrupt */
168 uint8_t ep_out_data;
169 atomic_ptr_t dma_ep[2]; /** DMA used by channel */
170 };
171
172 static struct usb_dc_state dev_state;
173
174 /*
175 * DA146xx register fields and bit mask are very long. Filed masks repeat register names.
176 * Those convenience macros are a way to reduce complexity of register modification lines.
177 */
178 #define GET_BIT(val, field) (val & field ## _Msk) >> field ## _Pos
179 #define REG_GET_BIT(reg, field) (USB->reg & USB_ ## reg ## _ ## field ## _Msk)
180 #define REG_SET_BIT(reg, field) (USB->reg |= USB_ ## reg ## _ ## field ## _Msk)
181 #define REG_CLR_BIT(reg, field) (USB->reg &= ~USB_ ## reg ## _ ## field ## _Msk)
182 #define REG_SET_VAL(reg, field, val) \
183 (USB->reg = (USB->reg & ~USB_##reg##_##field##_Msk) | \
184 (val << USB_##reg##_##field##_Pos))
185
usb_smartbond_dma_validate(void)186 static int usb_smartbond_dma_validate(void)
187 {
188 /*
189 * DMA RX should be assigned an even number and
190 * DMA TX should be assigned the right next
191 * channel (odd number).
192 */
193 if (!(usbd_dma_cfg.tx_chan & 0x1) ||
194 (usbd_dma_cfg.rx_chan & 0x1) ||
195 (usbd_dma_cfg.tx_chan != (usbd_dma_cfg.rx_chan + 1))) {
196 LOG_ERR("Invalid RX/TX channel selection");
197 return -EINVAL;
198 }
199
200 if (usbd_dma_cfg.rx_slot_mux != usbd_dma_cfg.tx_slot_mux) {
201 LOG_ERR("TX/RX DMA slots mismatch");
202 return -EINVAL;
203 }
204
205 if (!device_is_ready(usbd_dma_cfg.tx_dev) ||
206 !device_is_ready(usbd_dma_cfg.rx_dev)) {
207 LOG_ERR("TX/RX DMA device is not ready");
208 return -ENODEV;
209 }
210
211 return 0;
212 }
213
usb_smartbond_dma_config(void)214 static int usb_smartbond_dma_config(void)
215 {
216 struct dma_config *tx = &usbd_dma_cfg.tx_cfg;
217 struct dma_config *rx = &usbd_dma_cfg.rx_cfg;
218 struct dma_block_config *tx_block = &usbd_dma_cfg.tx_block_cfg;
219 struct dma_block_config *rx_block = &usbd_dma_cfg.rx_block_cfg;
220
221 if (dma_request_channel(usbd_dma_cfg.rx_dev,
222 (void *)&usbd_dma_cfg.rx_chan) < 0) {
223 LOG_ERR("RX DMA channel is already occupied");
224 return -EIO;
225 }
226
227 if (dma_request_channel(usbd_dma_cfg.tx_dev,
228 (void *)&usbd_dma_cfg.tx_chan) < 0) {
229 LOG_ERR("TX DMA channel is already occupied");
230 return -EIO;
231 }
232
233 tx->channel_direction = MEMORY_TO_PERIPHERAL;
234 tx->dma_callback = NULL;
235 tx->user_data = NULL;
236 tx->block_count = 1;
237 tx->head_block = tx_block;
238
239 tx->error_callback_dis = 1;
240 /* DMA callback is not used */
241 tx->complete_callback_en = 1;
242
243 tx->dma_slot = usbd_dma_cfg.tx_slot_mux;
244 tx->channel_priority = 7;
245
246 /* Burst mode is not using when DREQ is one */
247 tx->source_burst_length = 1;
248 tx->dest_burst_length = 1;
249 /* USB is byte-oriented protocol */
250 tx->source_data_size = 1;
251 tx->dest_data_size = 1;
252
253 /* Do not change */
254 tx_block->dest_addr_adj = 0x2;
255 /* Incremental */
256 tx_block->source_addr_adj = 0x0;
257
258 /* Should reflect TX buffer */
259 tx_block->source_address = 0;
260 /* Should reflect USB TX FIFO. Temporarily assign an SRAM location. */
261 tx_block->dest_address = MCU_SYSRAM_M_BASE;
262 /* Should reflect total bytes to be transmitted */
263 tx_block->block_size = 0;
264
265 rx->channel_direction = PERIPHERAL_TO_MEMORY;
266 rx->dma_callback = NULL;
267 rx->user_data = NULL;
268 rx->block_count = 1;
269 rx->head_block = rx_block;
270
271 rx->error_callback_dis = 1;
272 /* DMA callback is not used */
273 rx->complete_callback_en = 1;
274
275 rx->dma_slot = usbd_dma_cfg.rx_slot_mux;
276 rx->channel_priority = 2;
277
278 /* Burst mode is not using when DREQ is one */
279 rx->source_burst_length = 1;
280 rx->dest_burst_length = 1;
281 /* USB is byte-oriented protocol */
282 rx->source_data_size = 1;
283 rx->dest_data_size = 1;
284
285 /* Do not change */
286 rx_block->source_addr_adj = 0x2;
287 /* Incremenetal */
288 rx_block->dest_addr_adj = 0x0;
289
290 /* Should reflect USB RX FIFO */
291 rx_block->source_address = 0;
292 /* Should reflect RX buffer. Temporarily assign an SRAM location. */
293 rx_block->dest_address = MCU_SYSRAM_M_BASE;
294 /* Should reflect total bytes to be received */
295 rx_block->block_size = 0;
296
297 if (dma_config(usbd_dma_cfg.rx_dev, usbd_dma_cfg.rx_chan, rx) < 0) {
298 LOG_ERR("RX DMA configuration failed");
299 return -EINVAL;
300 }
301
302 if (dma_config(usbd_dma_cfg.tx_dev, usbd_dma_cfg.tx_chan, tx) < 0) {
303 LOG_ERR("TX DMA configuration failed");
304 return -EINVAL;
305 }
306
307 return 0;
308 }
309
usb_smartbond_dma_deconfig(void)310 static void usb_smartbond_dma_deconfig(void)
311 {
312 dma_stop(usbd_dma_cfg.tx_dev, usbd_dma_cfg.tx_chan);
313 dma_stop(usbd_dma_cfg.rx_dev, usbd_dma_cfg.rx_chan);
314
315 dma_release_channel(usbd_dma_cfg.tx_dev, usbd_dma_cfg.tx_chan);
316 dma_release_channel(usbd_dma_cfg.rx_dev, usbd_dma_cfg.rx_chan);
317 }
318
usb_dc_get_ep_state(uint8_t ep)319 static struct smartbond_ep_state *usb_dc_get_ep_state(uint8_t ep)
320 {
321 uint8_t ep_idx = USB_EP_GET_IDX(ep);
322 uint8_t ep_dir = USB_EP_GET_DIR(ep) ? 1 : 0;
323
324 return (ep_idx < EP_MAX) ? &dev_state.ep_state[ep_dir][ep_idx] : NULL;
325 }
326
usb_dc_get_ep_out_state(uint8_t ep)327 static struct smartbond_ep_state *usb_dc_get_ep_out_state(uint8_t ep)
328 {
329 uint8_t ep_idx = USB_EP_GET_IDX(ep);
330
331 return (ep_idx < EP_MAX && USB_EP_DIR_IS_OUT(ep)) ?
332 &dev_state.ep_state[0][ep_idx] : NULL;
333 }
334
usb_dc_get_ep_in_state(uint8_t ep)335 static struct smartbond_ep_state *usb_dc_get_ep_in_state(uint8_t ep)
336 {
337 uint8_t ep_idx = USB_EP_GET_IDX(ep);
338
339 return ep_idx < EP_MAX || USB_EP_DIR_IS_IN(ep) ?
340 &dev_state.ep_state[1][ep_idx] : NULL;
341 }
342
dev_attached(void)343 static inline bool dev_attached(void)
344 {
345 return dev_state.attached;
346 }
347
dev_ready(void)348 static inline bool dev_ready(void)
349 {
350 return dev_state.vbus_present;
351 }
352
set_nfsr(uint8_t val)353 static void set_nfsr(uint8_t val)
354 {
355 dev_state.nfsr = val;
356 /*
357 * Write only lower 2 bits to register, higher bits are used
358 * to count down till OPERATIONAL state can be entered when
359 * remote wakeup activated.
360 */
361 USB->USB_NFSR_REG = val & 3;
362 }
363
fill_tx_fifo(struct smartbond_ep_state * ep_state)364 static void fill_tx_fifo(struct smartbond_ep_state *ep_state)
365 {
366 int remaining;
367 const uint8_t *src;
368 uint8_t ep_idx = USB_EP_GET_IDX(ep_state->ep_addr);
369 struct smartbond_ep_reg_set *regs = ep_state->regs;
370
371 src = &ep_state->buffer[ep_state->transferred];
372 remaining = ep_state->total_len - ep_state->transferred;
373 if (remaining > ep_state->mps - ep_state->last_packet_size) {
374 remaining = ep_state->mps - ep_state->last_packet_size;
375 }
376
377 /*
378 * Loop checks TCOUNT all the time since this value is saturated to 31
379 * and can't be read just once before.
380 */
381 while ((regs->txs & USB_USB_TXS1_REG_USB_TCOUNT_Msk) > 0 &&
382 remaining > 0) {
383 regs->txd = *src++;
384 ep_state->last_packet_size++;
385 remaining--;
386 }
387
388 if (ep_idx != 0) {
389 if (remaining > 0) {
390 /*
391 * Max packet size is set to value greater then FIFO.
392 * Enable fifo level warning to handle larger packets.
393 */
394 regs->txc |= (3 << USB_USB_TXC1_REG_USB_TFWL_Pos);
395 USB->USB_FWMSK_REG |=
396 BIT(ep_idx - 1 + USB_USB_FWMSK_REG_USB_M_TXWARN31_Pos);
397 } else {
398 regs->txc &= ~USB_USB_TXC1_REG_USB_TFWL_Msk;
399 USB->USB_FWMSK_REG &=
400 ~(BIT(ep_idx - 1 + USB_USB_FWMSK_REG_USB_M_TXWARN31_Pos));
401 /* Whole packet already in fifo, no need to
402 * refill it later. Mark last.
403 */
404 regs->txc |= USB_USB_TXC1_REG_USB_LAST_Msk;
405 }
406 }
407 }
408
try_allocate_dma(struct smartbond_ep_state * ep_state,uint8_t dir)409 static bool try_allocate_dma(struct smartbond_ep_state *ep_state, uint8_t dir)
410 {
411 uint8_t ep_idx = USB_EP_GET_IDX(ep_state->ep_addr);
412 uint8_t dir_ix = dir == USB_EP_DIR_OUT ? 0 : 1;
413
414 if (atomic_ptr_cas(&dev_state.dma_ep[dir_ix], NULL, ep_state)) {
415 if (dir == USB_EP_DIR_OUT) {
416 USB->USB_DMA_CTRL_REG =
417 (USB->USB_DMA_CTRL_REG & ~USB_USB_DMA_CTRL_REG_USB_DMA_RX_Msk) |
418 ((ep_idx - 1) << USB_USB_DMA_CTRL_REG_USB_DMA_RX_Pos);
419 } else {
420 USB->USB_DMA_CTRL_REG =
421 (USB->USB_DMA_CTRL_REG & ~USB_USB_DMA_CTRL_REG_USB_DMA_TX_Msk) |
422 ((ep_idx - 1) << USB_USB_DMA_CTRL_REG_USB_DMA_TX_Pos);
423 }
424 USB->USB_DMA_CTRL_REG |= USB_USB_DMA_CTRL_REG_USB_DMA_EN_Msk;
425 return true;
426 } else {
427 return false;
428 }
429 }
430
start_rx_dma(volatile void * src,void * dst,uint16_t size)431 static void start_rx_dma(volatile void *src, void *dst, uint16_t size)
432 {
433 if (dma_reload(usbd_dma_cfg.rx_dev, usbd_dma_cfg.rx_chan,
434 (uint32_t)src, (uint32_t)dst, size) < 0) {
435 LOG_ERR("Failed to reload RX DMA");
436 } else {
437 dma_start(usbd_dma_cfg.rx_dev, usbd_dma_cfg.rx_chan);
438 }
439 }
440
start_rx_packet(struct smartbond_ep_state * ep_state)441 static void start_rx_packet(struct smartbond_ep_state *ep_state)
442 {
443 uint8_t ep_idx = USB_EP_GET_IDX(ep_state->ep_addr);
444 struct smartbond_ep_reg_set *regs = ep_state->regs;
445
446 LOG_DBG("%02x", ep_state->ep_addr);
447
448 ep_state->last_packet_size = 0;
449 ep_state->transferred = 0;
450 ep_state->total_len = 0;
451
452 if (ep_state->mps > DMA_MIN_TRANSFER_SIZE) {
453 if (try_allocate_dma(ep_state, USB_EP_DIR_OUT)) {
454 start_rx_dma(®s->rxd,
455 ep_state->buffer,
456 ep_state->mps);
457 } else if (ep_state->mps > EP_FIFO_SIZE) {
458 /*
459 * Other endpoint is using DMA in that direction,
460 * fall back to interrupts.
461 * For endpoint size greater than FIFO size,
462 * enable FIFO level warning interrupt when FIFO
463 * has less than 17 bytes free.
464 */
465 regs->rxc |= USB_USB_RXC1_REG_USB_RFWL_Msk;
466 USB->USB_FWMSK_REG |=
467 BIT(ep_idx - 1 + USB_USB_FWMSK_REG_USB_M_RXWARN31_Pos);
468 }
469 } else if (ep_idx != 0) {
470 /* If max_packet_size would fit in FIFO no need
471 * for FIFO level warning interrupt.
472 */
473 regs->rxc &= ~USB_USB_RXC1_REG_USB_RFWL_Msk;
474 USB->USB_FWMSK_REG &= ~(BIT(ep_idx - 1 + USB_USB_FWMSK_REG_USB_M_RXWARN31_Pos));
475 }
476
477 regs->rxc |= USB_USB_RXC1_REG_USB_RX_EN_Msk;
478 }
479
start_tx_dma(void * src,volatile void * dst,uint16_t size)480 static void start_tx_dma(void *src, volatile void *dst, uint16_t size)
481 {
482 if (dma_reload(usbd_dma_cfg.tx_dev, usbd_dma_cfg.tx_chan,
483 (uint32_t)src, (uint32_t)dst, size) < 0) {
484 LOG_ERR("Failed to reload TX DMA");
485 } else {
486 dma_start(usbd_dma_cfg.tx_dev, usbd_dma_cfg.tx_chan);
487 }
488 }
489
start_tx_packet(struct smartbond_ep_state * ep_state)490 static void start_tx_packet(struct smartbond_ep_state *ep_state)
491 {
492 struct smartbond_ep_reg_set *regs = ep_state->regs;
493 uint16_t remaining = ep_state->total_len - ep_state->transferred;
494 uint16_t size = MIN(remaining, ep_state->mps);
495
496 LOG_DBG("%02x %d/%d", ep_state->ep_addr, size, remaining);
497
498 ep_state->last_packet_size = 0;
499
500 regs->txc = USB_USB_TXC1_REG_USB_FLUSH_Msk;
501 regs->txc = USB_USB_TXC1_REG_USB_IGN_ISOMSK_Msk;
502 if (ep_state->data1) {
503 regs->txc |= USB_USB_TXC1_REG_USB_TOGGLE_TX_Msk;
504 }
505
506 if (ep_state->ep_addr != EP0_IN &&
507 remaining > DMA_MIN_TRANSFER_SIZE &&
508 (uint32_t)(ep_state->buffer) >= CONFIG_SRAM_BASE_ADDRESS &&
509 try_allocate_dma(ep_state, USB_EP_DIR_IN)) {
510 /*
511 * Whole packet will be put in FIFO by DMA.
512 * Set LAST bit before start.
513 */
514 start_tx_dma(ep_state->buffer + ep_state->transferred,
515 ®s->txd, size);
516 regs->txc |= USB_USB_TXC1_REG_USB_LAST_Msk;
517 } else {
518 fill_tx_fifo(ep_state);
519 }
520
521 regs->txc |= USB_USB_TXC1_REG_USB_TX_EN_Msk;
522 }
523
read_rx_fifo(struct smartbond_ep_state * ep_state,uint16_t bytes_in_fifo)524 static uint16_t read_rx_fifo(struct smartbond_ep_state *ep_state,
525 uint16_t bytes_in_fifo)
526 {
527 struct smartbond_ep_reg_set *regs = ep_state->regs;
528 uint16_t remaining = ep_state->mps - ep_state->last_packet_size;
529 uint16_t receive_this_time = bytes_in_fifo;
530 uint8_t *buf = ep_state->buffer + ep_state->last_packet_size;
531
532 if (remaining < bytes_in_fifo) {
533 receive_this_time = remaining;
534 }
535
536 for (int i = 0; i < receive_this_time; ++i) {
537 buf[i] = regs->rxd;
538 }
539
540 ep_state->last_packet_size += receive_this_time;
541
542 return bytes_in_fifo - receive_this_time;
543 }
544
handle_ep0_rx(void)545 static void handle_ep0_rx(void)
546 {
547 int fifo_bytes;
548 uint32_t rxs0 = USB->USB_RXS0_REG;
549 struct smartbond_ep_state *ep0_out_state = usb_dc_get_ep_out_state(0);
550 struct smartbond_ep_state *ep0_in_state;
551
552 fifo_bytes = GET_BIT(rxs0, USB_USB_RXS0_REG_USB_RCOUNT);
553
554 if (rxs0 & USB_USB_RXS0_REG_USB_SETUP_Msk) {
555 ep0_in_state = usb_dc_get_ep_in_state(0);
556 read_rx_fifo(ep0_out_state, EP0_FIFO_SIZE);
557
558 ep0_out_state->stall = 0;
559 ep0_out_state->data1 = 1;
560 ep0_in_state->stall = 0;
561 ep0_in_state->data1 = 1;
562 REG_SET_BIT(USB_TXC0_REG, USB_TOGGLE_TX0);
563 REG_CLR_BIT(USB_EPC0_REG, USB_STALL);
564 LOG_DBG("Setup %02x %02x %02x %02x %02x %02x %02x %02x",
565 ep0_out_state->buffer[0],
566 ep0_out_state->buffer[1],
567 ep0_out_state->buffer[2],
568 ep0_out_state->buffer[3],
569 ep0_out_state->buffer[4],
570 ep0_out_state->buffer[5],
571 ep0_out_state->buffer[6],
572 ep0_out_state->buffer[7]);
573 ep0_out_state->cb(EP0_OUT, USB_DC_EP_SETUP);
574 } else {
575 if (GET_BIT(rxs0, USB_USB_RXS0_REG_USB_TOGGLE_RX0) !=
576 ep0_out_state->data1) {
577 /* Toggle bit does not match discard packet */
578 REG_SET_BIT(USB_RXC0_REG, USB_FLUSH);
579 ep0_out_state->last_packet_size = 0;
580 } else {
581 read_rx_fifo(ep0_out_state, fifo_bytes);
582 if (rxs0 & USB_USB_RXS0_REG_USB_RX_LAST_Msk) {
583 ep0_out_state->data1 ^= 1;
584 dev_state.ep_out_data |= 1;
585 }
586 }
587 }
588 }
589
handle_ep0_tx(void)590 static void handle_ep0_tx(void)
591 {
592 uint32_t txs0;
593 struct smartbond_ep_state *ep0_in_state = usb_dc_get_ep_in_state(0);
594 struct smartbond_ep_state *ep0_out_state = usb_dc_get_ep_out_state(0);
595 struct smartbond_ep_reg_set *regs = ep0_in_state->regs;
596
597 txs0 = regs->txs;
598
599 LOG_DBG("%02x %02x", ep0_in_state->ep_addr, txs0);
600
601 if (GET_BIT(txs0, USB_USB_TXS0_REG_USB_TX_DONE)) {
602 /* ACK received */
603 if (GET_BIT(txs0, USB_USB_TXS0_REG_USB_ACK_STAT)) {
604 ep0_in_state->transferred +=
605 ep0_in_state->last_packet_size;
606 ep0_in_state->last_packet_size = 0;
607 ep0_in_state->data1 ^= 1;
608 REG_SET_VAL(USB_TXC0_REG, USB_TOGGLE_TX0,
609 ep0_in_state->data1);
610 if (ep0_in_state->transferred == ep0_in_state->total_len) {
611 /* For control endpoint get ready for ACK stage
612 * from host.
613 */
614 ep0_out_state = usb_dc_get_ep_out_state(EP0_IDX);
615 ep0_out_state->transferred = 0;
616 ep0_out_state->total_len = 0;
617 ep0_out_state->last_packet_size = 0;
618 REG_SET_BIT(USB_RXC0_REG, USB_RX_EN);
619
620 atomic_clear(&ep0_in_state->busy);
621 ep0_in_state->cb(EP0_IN, USB_DC_EP_DATA_IN);
622 return;
623 }
624 } else {
625 /* Start from the beginning */
626 ep0_in_state->last_packet_size = 0;
627 }
628 start_tx_packet(ep0_in_state);
629 }
630 }
631
handle_epx_rx_ev(uint8_t ep_idx)632 static void handle_epx_rx_ev(uint8_t ep_idx)
633 {
634 uint32_t rxs;
635 int fifo_bytes;
636 struct smartbond_ep_state *ep_state = usb_dc_get_ep_out_state(ep_idx);
637 struct smartbond_ep_reg_set *regs = ep_state->regs;
638
639 do {
640 rxs = regs->rxs;
641
642 if (GET_BIT(rxs, USB_USB_RXS1_REG_USB_RX_ERR)) {
643 regs->rxc |= USB_USB_RXC1_REG_USB_FLUSH_Msk;
644 ep_state->last_packet_size = 0;
645 if (dev_state.dma_ep[0] == ep_state) {
646 /* Stop DMA */
647 dma_stop(usbd_dma_cfg.rx_dev, usbd_dma_cfg.rx_chan);
648 /* Restart DMA since packet was dropped,
649 * all parameters should still work.
650 */
651 dma_start(usbd_dma_cfg.rx_dev, usbd_dma_cfg.rx_chan);
652 }
653 break;
654 }
655
656 if (dev_state.dma_ep[0] == ep_state) {
657 struct dma_status rx_dma_status;
658
659 dma_get_status(usbd_dma_cfg.rx_dev, usbd_dma_cfg.rx_chan, &rx_dma_status);
660 /*
661 * Disable DMA and update last_packet_size
662 * with what DMA reported.
663 */
664 dma_stop(usbd_dma_cfg.rx_dev, usbd_dma_cfg.rx_chan);
665 ep_state->last_packet_size = rx_dma_status.total_copied;
666
667 /*
668 * When DMA did not finished (packet was smaller then MPS),
669 * dma_idx holds exact number of bytes transmitted. When DMA
670 * finished value in dma_idx is one less then actual number of
671 * transmitted bytes.
672 */
673 if (ep_state->last_packet_size ==
674 (rx_dma_status.total_copied + rx_dma_status.pending_length)) {
675 ep_state->last_packet_size++;
676 }
677 /* Release DMA to use by other endpoints. */
678 dev_state.dma_ep[0] = NULL;
679 }
680 fifo_bytes = GET_BIT(rxs, USB_USB_RXS1_REG_USB_RXCOUNT);
681 /*
682 * FIFO maybe empty if DMA read it before or
683 * it's final iteration and function already read all
684 * that was to read.
685 */
686 if (fifo_bytes > 0) {
687 fifo_bytes = read_rx_fifo(ep_state, fifo_bytes);
688 }
689
690 if (GET_BIT(rxs, USB_USB_RXS1_REG_USB_RX_LAST)) {
691 if (!ep_state->iso &&
692 GET_BIT(rxs, USB_USB_RXS1_REG_USB_TOGGLE_RX) !=
693 ep_state->data1) {
694 /* Toggle bit does not match discard packet */
695 regs->rxc |= USB_USB_RXC1_REG_USB_FLUSH_Msk;
696 ep_state->last_packet_size = 0;
697 /* Re-enable reception */
698 start_rx_packet(ep_state);
699 } else {
700 ep_state->data1 ^= 1;
701 atomic_clear(&ep_state->busy);
702 dev_state.ep_out_data |= BIT(ep_idx);
703 }
704 }
705 } while (fifo_bytes > FIFO_READ_THRESHOLD);
706 }
707
handle_rx_ev(void)708 static void handle_rx_ev(void)
709 {
710 if (USB->USB_RXEV_REG & BIT(0)) {
711 handle_epx_rx_ev(1);
712 }
713
714 if (USB->USB_RXEV_REG & BIT(1)) {
715 handle_epx_rx_ev(2);
716 }
717
718 if (USB->USB_RXEV_REG & BIT(2)) {
719 handle_epx_rx_ev(3);
720 }
721 }
722
handle_epx_tx_ev(struct smartbond_ep_state * ep_state)723 static void handle_epx_tx_ev(struct smartbond_ep_state *ep_state)
724 {
725 uint32_t txs;
726 struct smartbond_ep_reg_set *regs = ep_state->regs;
727
728 txs = regs->txs;
729
730 if (GET_BIT(txs, USB_USB_TXS1_REG_USB_TX_DONE)) {
731 if (dev_state.dma_ep[1] == ep_state) {
732 struct dma_status tx_dma_status;
733
734 dma_get_status(usbd_dma_cfg.tx_dev, usbd_dma_cfg.tx_chan, &tx_dma_status);
735 /*
736 * Disable DMA and update last_packet_size with what
737 * DMA reported.
738 */
739 dma_stop(usbd_dma_cfg.tx_dev, usbd_dma_cfg.tx_chan);
740 ep_state->last_packet_size = tx_dma_status.total_copied + 1;
741 /* Release DMA to used by other endpoints. */
742 dev_state.dma_ep[1] = NULL;
743 }
744
745 if (GET_BIT(txs, USB_USB_TXS1_REG_USB_ACK_STAT)) {
746 /* ACK received, update transfer state and DATA0/1 bit */
747 ep_state->transferred += ep_state->last_packet_size;
748 ep_state->last_packet_size = 0;
749 ep_state->data1 ^= 1;
750
751 if (ep_state->transferred == ep_state->total_len) {
752 atomic_clear(&ep_state->busy);
753 ep_state->cb(ep_state->ep_addr, USB_DC_EP_DATA_IN);
754 return;
755 }
756 } else if (regs->epc_in & USB_USB_EPC1_REG_USB_STALL_Msk) {
757 /*
758 * TX_DONE also indicates that STALL packet was just sent,
759 * there is no point to put anything into transmit FIFO.
760 * It could result in empty packet being scheduled.
761 */
762 return;
763 }
764 }
765
766 if (txs & USB_USB_TXS1_REG_USB_TX_URUN_Msk) {
767 LOG_DBG("EP 0x%02x FIFO underrun\n", ep_state->ep_addr);
768 }
769 /* Start next or repeated packet. */
770 start_tx_packet(ep_state);
771 }
772
handle_tx_ev(void)773 static void handle_tx_ev(void)
774 {
775 if (USB->USB_TXEV_REG & BIT(0)) {
776 handle_epx_tx_ev(usb_dc_get_ep_in_state(1));
777 }
778 if (USB->USB_TXEV_REG & BIT(1)) {
779 handle_epx_tx_ev(usb_dc_get_ep_in_state(2));
780 }
781 if (USB->USB_TXEV_REG & BIT(2)) {
782 handle_epx_tx_ev(usb_dc_get_ep_in_state(3));
783 }
784 }
785
check_reset_end(uint32_t alt_ev)786 static uint32_t check_reset_end(uint32_t alt_ev)
787 {
788 if (dev_state.nfsr == NFSR_NODE_RESET) {
789 if (GET_BIT(alt_ev, USB_USB_ALTEV_REG_USB_RESET)) {
790 /*
791 * Could be still in reset, but since USB_M_RESET is
792 * disabled it can be also old reset state that was not
793 * cleared yet.
794 * If (after reading USB_ALTEV_REG register again)
795 * bit is cleared reset state just ended.
796 * Keep non-reset bits combined from two previous
797 * ALTEV read and one from the next line.
798 */
799 alt_ev = (alt_ev & ~USB_USB_ALTEV_REG_USB_RESET_Msk) |
800 USB->USB_ALTEV_REG;
801 }
802
803 if (GET_BIT(alt_ev, USB_USB_ALTEV_REG_USB_RESET) == 0) {
804 USB->USB_ALTMSK_REG = USB_USB_ALTMSK_REG_USB_M_RESET_Msk |
805 USB_USB_ALTEV_REG_USB_SD3_Msk;
806 if (dev_state.ep_state[0][0].buffer != NULL) {
807 USB->USB_MAMSK_REG |=
808 USB_USB_MAMSK_REG_USB_M_EP0_RX_Msk;
809 }
810 LOG_INF("Set operational %02x", USB->USB_MAMSK_REG);
811 set_nfsr(NFSR_NODE_OPERATIONAL);
812 dev_state.status_cb(USB_DC_CONNECTED, NULL);
813 }
814 }
815 return alt_ev;
816 }
817
handle_bus_reset(void)818 static void handle_bus_reset(void)
819 {
820 uint32_t alt_ev;
821
822 USB->USB_NFSR_REG = 0;
823 USB->USB_FAR_REG = 0x80;
824 USB->USB_ALTMSK_REG = 0;
825 USB->USB_NFSR_REG = NFSR_NODE_RESET;
826 USB->USB_TXMSK_REG = 0;
827 USB->USB_RXMSK_REG = 0;
828 set_nfsr(NFSR_NODE_RESET);
829
830 for (int i = 0; i < EP_MAX; ++i) {
831 dev_state.ep_state[1][i].buffer = NULL;
832 dev_state.ep_state[1][i].transferred = 0;
833 dev_state.ep_state[1][i].total_len = 0;
834 atomic_clear(&dev_state.ep_state[1][i].busy);
835 }
836
837 LOG_INF("send USB_DC_RESET");
838 dev_state.status_cb(USB_DC_RESET, NULL);
839 USB->USB_DMA_CTRL_REG = 0;
840
841 USB->USB_MAMSK_REG = USB_USB_MAMSK_REG_USB_M_INTR_Msk |
842 USB_USB_MAMSK_REG_USB_M_FRAME_Msk |
843 USB_USB_MAMSK_REG_USB_M_WARN_Msk |
844 USB_USB_MAMSK_REG_USB_M_ALT_Msk |
845 USB_USB_MAMSK_REG_USB_M_EP0_RX_Msk |
846 USB_USB_MAMSK_REG_USB_M_EP0_TX_Msk;
847 USB->USB_ALTMSK_REG = USB_USB_ALTMSK_REG_USB_M_RESUME_Msk;
848 alt_ev = USB->USB_ALTEV_REG;
849 check_reset_end(alt_ev);
850 }
851
usb_clock_on(void)852 static void usb_clock_on(void)
853 {
854 if (atomic_cas(&dev_state.clk_requested, 0, 1)) {
855 clock_control_on(DEVICE_DT_GET(DT_NODELABEL(osc)),
856 (clock_control_subsys_rate_t)SMARTBOND_CLK_USB);
857 }
858 }
859
usb_clock_off(void)860 static void usb_clock_off(void)
861 {
862 if (atomic_cas(&dev_state.clk_requested, 1, 0)) {
863 clock_control_off(DEVICE_DT_GET(DT_NODELABEL(osc)),
864 (clock_control_subsys_rate_t)SMARTBOND_CLK_USB);
865 }
866 }
867
handle_alt_ev(void)868 static void handle_alt_ev(void)
869 {
870 struct smartbond_ep_state *ep_state;
871 uint32_t alt_ev = USB->USB_ALTEV_REG;
872
873 if (USB->USB_NFSR_REG == NFSR_NODE_SUSPEND) {
874 usb_clock_on();
875 }
876 alt_ev = check_reset_end(alt_ev);
877 if (GET_BIT(alt_ev, USB_USB_ALTEV_REG_USB_RESET) &&
878 dev_state.nfsr != NFSR_NODE_RESET) {
879 handle_bus_reset();
880 } else if (GET_BIT(alt_ev, USB_USB_ALTEV_REG_USB_RESUME)) {
881 if (USB->USB_NFSR_REG == NFSR_NODE_SUSPEND) {
882 set_nfsr(NFSR_NODE_OPERATIONAL);
883 if (dev_state.ep_state[0][0].buffer != NULL) {
884 USB->USB_MAMSK_REG |=
885 USB_USB_MAMSK_REG_USB_M_EP0_RX_Msk;
886 }
887 USB->USB_ALTMSK_REG = USB_USB_ALTMSK_REG_USB_M_RESET_Msk |
888 USB_USB_ALTMSK_REG_USB_M_SD3_Msk;
889 /* Re-enable reception of endpoint with pending transfer */
890 for (int ep_num = 1; ep_num < EP_MAX; ++ep_num) {
891 ep_state = usb_dc_get_ep_out_state(ep_num);
892 if (ep_state->enabled) {
893 start_rx_packet(ep_state);
894 }
895 }
896 dev_state.status_cb(USB_DC_RESUME, NULL);
897 }
898 } else if (GET_BIT(alt_ev, USB_USB_ALTEV_REG_USB_SD3)) {
899 set_nfsr(NFSR_NODE_SUSPEND);
900 USB->USB_ALTMSK_REG =
901 USB_USB_ALTMSK_REG_USB_M_RESET_Msk |
902 USB_USB_ALTMSK_REG_USB_M_RESUME_Msk;
903 usb_clock_off();
904 dev_state.status_cb(USB_DC_SUSPEND, NULL);
905 }
906 }
907
handle_epx_tx_warn_ev(uint8_t ep_idx)908 static void handle_epx_tx_warn_ev(uint8_t ep_idx)
909 {
910 fill_tx_fifo(usb_dc_get_ep_in_state(ep_idx));
911 }
912
handle_fifo_warning(void)913 static void handle_fifo_warning(void)
914 {
915 uint32_t fifo_warning = USB->USB_FWEV_REG;
916
917 if (fifo_warning & BIT(0)) {
918 handle_epx_tx_warn_ev(1);
919 }
920
921 if (fifo_warning & BIT(1)) {
922 handle_epx_tx_warn_ev(2);
923 }
924
925 if (fifo_warning & BIT(2)) {
926 handle_epx_tx_warn_ev(3);
927 }
928
929 if (fifo_warning & BIT(4)) {
930 handle_epx_rx_ev(1);
931 }
932
933 if (fifo_warning & BIT(5)) {
934 handle_epx_rx_ev(2);
935 }
936
937 if (fifo_warning & BIT(6)) {
938 handle_epx_rx_ev(3);
939 }
940 }
941
handle_ep0_nak(void)942 static void handle_ep0_nak(void)
943 {
944 uint32_t ep0_nak = USB->USB_EP0_NAK_REG;
945
946 if (REG_GET_BIT(USB_EPC0_REG, USB_STALL)) {
947 if (GET_BIT(ep0_nak, USB_USB_EP0_NAK_REG_USB_EP0_INNAK)) {
948 /*
949 * EP0 is stalled and NAK was sent, it means that
950 * RX is enabled. Disable RX for now.
951 */
952 REG_CLR_BIT(USB_RXC0_REG, USB_RX_EN);
953 REG_SET_BIT(USB_TXC0_REG, USB_TX_EN);
954 }
955
956 if (GET_BIT(ep0_nak, USB_USB_EP0_NAK_REG_USB_EP0_OUTNAK)) {
957 REG_SET_BIT(USB_RXC0_REG, USB_RX_EN);
958 }
959 } else {
960 if (REG_GET_BIT(USB_RXC0_REG, USB_RX_EN) == 0 &&
961 GET_BIT(ep0_nak, USB_USB_EP0_NAK_REG_USB_EP0_OUTNAK)) {
962 /* NAK over EP0 was sent, receive should conclude */
963 USB->USB_TXC0_REG = USB_USB_TXC0_REG_USB_FLUSH_Msk;
964 REG_SET_BIT(USB_RXC0_REG, USB_RX_EN);
965 REG_CLR_BIT(USB_MAMSK_REG, USB_M_EP0_NAK);
966 }
967 }
968 }
969
usb_dc_smartbond_isr(void)970 static void usb_dc_smartbond_isr(void)
971 {
972 uint32_t int_status = USB->USB_MAEV_REG & USB->USB_MAMSK_REG;
973
974 if (GET_BIT(int_status, USB_USB_MAEV_REG_USB_WARN)) {
975 handle_fifo_warning();
976 }
977
978 if (GET_BIT(int_status, USB_USB_MAEV_REG_USB_CH_EV)) {
979 /* For now just clear interrupt */
980 (void)USB->USB_CHARGER_STAT_REG;
981 }
982
983 if (GET_BIT(int_status, USB_USB_MAEV_REG_USB_EP0_TX)) {
984 handle_ep0_tx();
985 }
986
987 if (GET_BIT(int_status, USB_USB_MAEV_REG_USB_EP0_RX)) {
988 handle_ep0_rx();
989 }
990
991 if (GET_BIT(int_status, USB_USB_MAEV_REG_USB_EP0_NAK)) {
992 handle_ep0_nak();
993 }
994
995 if (GET_BIT(int_status, USB_USB_MAEV_REG_USB_RX_EV)) {
996 handle_rx_ev();
997 }
998
999 if (GET_BIT(int_status, USB_USB_MAEV_REG_USB_NAK)) {
1000 (void)USB->USB_NAKEV_REG;
1001 }
1002
1003 if (GET_BIT(int_status, USB_USB_MAEV_REG_USB_FRAME)) {
1004 if (dev_state.nfsr == NFSR_NODE_RESET) {
1005 /*
1006 * During reset FRAME interrupt is enabled to periodically
1007 * check when reset state ends.
1008 * FRAME interrupt is generated every 1ms without host sending
1009 * actual SOF.
1010 */
1011 check_reset_end(USB_USB_ALTEV_REG_USB_RESET_Msk);
1012 } else if (dev_state.nfsr == NFSR_NODE_WAKING) {
1013 /* No need to call set_nfsr, just set state */
1014 dev_state.nfsr = NFSR_NODE_WAKING2;
1015 } else if (dev_state.nfsr == NFSR_NODE_WAKING2) {
1016 /* No need to call set_nfsr, just set state */
1017 dev_state.nfsr = NFSR_NODE_RESUME;
1018 LOG_DBG("dev_state.nfsr = NFSR_NODE_RESUME %02x",
1019 USB->USB_MAMSK_REG);
1020 } else if (dev_state.nfsr == NFSR_NODE_RESUME) {
1021 set_nfsr(NFSR_NODE_OPERATIONAL);
1022 if (dev_state.ep_state[0][0].buffer != NULL) {
1023 USB->USB_MAMSK_REG |= USB_USB_MAMSK_REG_USB_M_EP0_RX_Msk;
1024 }
1025 LOG_DBG("Set operational %02x", USB->USB_MAMSK_REG);
1026 } else {
1027 USB->USB_MAMSK_REG &= ~USB_USB_MAMSK_REG_USB_M_FRAME_Msk;
1028 }
1029 }
1030
1031 if (GET_BIT(int_status, USB_USB_MAEV_REG_USB_TX_EV)) {
1032 handle_tx_ev();
1033 }
1034
1035 if (GET_BIT(int_status, USB_USB_MAEV_REG_USB_ALT)) {
1036 handle_alt_ev();
1037 }
1038
1039 for (int i = 0; dev_state.ep_out_data && i < 4; ++i) {
1040 uint8_t mask = BIT(i);
1041
1042 if (dev_state.ep_out_data & mask) {
1043 dev_state.ep_out_data ^= mask;
1044 dev_state.ep_state[0][i].cb(dev_state.ep_state[0][i].ep_addr,
1045 USB_DC_EP_DATA_OUT);
1046 }
1047 }
1048 }
1049
1050 /**
1051 * USB functionality can be disabled from HOST and DEVICE side.
1052 * Host side is indicated by VBUS line.
1053 * Device side is decided by pair of calls usb_dc_attach()/usb_dc_detach,
1054 * USB will only work when application calls usb_dc_attach() and VBUS is present.
1055 * When both conditions are not met USB clock (PLL) is released, and peripheral
1056 * remain in reset state.
1057 */
usb_change_state(bool attached,bool vbus_present)1058 static void usb_change_state(bool attached, bool vbus_present)
1059 {
1060 if (dev_state.attached == attached && dev_state.vbus_present == vbus_present) {
1061 return;
1062 }
1063
1064 if (attached && vbus_present) {
1065 dev_state.attached = true;
1066 dev_state.vbus_present = true;
1067 /*
1068 * Prevent transition to standby, this greatly reduces
1069 * IRQ response time
1070 */
1071 pm_policy_state_lock_get(PM_STATE_STANDBY, PM_ALL_SUBSTATES);
1072 usb_smartbond_dma_config();
1073 usb_clock_on();
1074 dev_state.status_cb(USB_DC_CONNECTED, NULL);
1075 USB->USB_MCTRL_REG = USB_USB_MCTRL_REG_USBEN_Msk;
1076 USB->USB_NFSR_REG = 0;
1077 USB->USB_FAR_REG = 0x80;
1078 USB->USB_TXMSK_REG = 0;
1079 USB->USB_RXMSK_REG = 0;
1080
1081 USB->USB_MAMSK_REG = USB_USB_MAMSK_REG_USB_M_INTR_Msk |
1082 USB_USB_MAMSK_REG_USB_M_ALT_Msk |
1083 USB_USB_MAMSK_REG_USB_M_WARN_Msk;
1084 USB->USB_ALTMSK_REG = USB_USB_ALTMSK_REG_USB_M_RESET_Msk |
1085 USB_USB_ALTEV_REG_USB_SD3_Msk;
1086
1087 USB->USB_MCTRL_REG = USB_USB_MCTRL_REG_USBEN_Msk |
1088 USB_USB_MCTRL_REG_USB_NAT_Msk;
1089 } else if (dev_state.attached && dev_state.vbus_present) {
1090 /*
1091 * USB was previously in use now either VBUS is gone or application
1092 * requested detach, put it down
1093 */
1094 dev_state.attached = attached;
1095 dev_state.vbus_present = vbus_present;
1096 /*
1097 * It's imperative that USB_NAT bit-field is updated with the
1098 * USBEN bit-field being set. As such, zeroing the control
1099 * register at once will result in leaving the USB tranceivers
1100 * in a floating state. Such an action, will induce incorect
1101 * behavior for subsequent charger detection operations and given
1102 * that the device does not enter the sleep state (thus powering off
1103 * PD_SYS and resetting the controller along with its tranceivers).
1104 */
1105 REG_CLR_BIT(USB_MCTRL_REG, USB_NAT);
1106 USB->USB_MCTRL_REG = 0;
1107 usb_clock_off();
1108 dev_state.status_cb(USB_DC_DISCONNECTED, NULL);
1109 usb_smartbond_dma_deconfig();
1110 /* Allow standby USB not in use or not connected */
1111 pm_policy_state_lock_put(PM_STATE_STANDBY, PM_ALL_SUBSTATES);
1112 } else {
1113 /* USB still not activated, keep track of what's on and off */
1114 dev_state.attached = attached;
1115 dev_state.vbus_present = vbus_present;
1116 }
1117 }
1118
usb_dc_smartbond_vbus_isr(void)1119 static void usb_dc_smartbond_vbus_isr(void)
1120 {
1121 LOG_DBG("VBUS_ISR");
1122
1123 CRG_TOP->VBUS_IRQ_CLEAR_REG = 1;
1124 usb_change_state(dev_state.attached,
1125 (CRG_TOP->ANA_STATUS_REG &
1126 CRG_TOP_ANA_STATUS_REG_VBUS_AVAILABLE_Msk) != 0);
1127 }
1128
usb_init(void)1129 static int usb_init(void)
1130 {
1131 int ret = 0;
1132
1133 BUILD_ASSERT(DT_DMAS_HAS_NAME(DT_NODELABEL(usbd), tx), "Unasigned TX DMA");
1134 BUILD_ASSERT(DT_DMAS_HAS_NAME(DT_NODELABEL(usbd), rx), "Unasigned RX DMA");
1135
1136 ret = usb_smartbond_dma_validate();
1137 if (ret != 0) {
1138 return ret;
1139 }
1140
1141 for (int i = 0; i < EP_MAX; ++i) {
1142 dev_state.ep_state[0][i].regs = reg_sets[i];
1143 dev_state.ep_state[0][i].ep_addr = i | USB_EP_DIR_OUT;
1144 dev_state.ep_state[0][i].buffer = ep_out_bufs[i];
1145 dev_state.ep_state[1][i].regs = reg_sets[i];
1146 dev_state.ep_state[1][i].ep_addr = i | USB_EP_DIR_IN;
1147 }
1148
1149 /* Max packet size for EP0 is hardwired to 8 */
1150 dev_state.ep_state[0][0].mps = EP0_FIFO_SIZE;
1151 dev_state.ep_state[1][0].mps = EP0_FIFO_SIZE;
1152
1153 IRQ_CONNECT(VBUS_IRQ, VBUS_IRQ_PRI, usb_dc_smartbond_vbus_isr, 0, 0);
1154 CRG_TOP->VBUS_IRQ_CLEAR_REG = 1;
1155 NVIC_ClearPendingIRQ(VBUS_IRQ);
1156 /* Both connect and disconnect needs to be handled */
1157 CRG_TOP->VBUS_IRQ_MASK_REG = CRG_TOP_VBUS_IRQ_MASK_REG_VBUS_IRQ_EN_FALL_Msk |
1158 CRG_TOP_VBUS_IRQ_MASK_REG_VBUS_IRQ_EN_RISE_Msk;
1159 irq_enable(VBUS_IRQn);
1160
1161 IRQ_CONNECT(USB_IRQ, USB_IRQ_PRI, usb_dc_smartbond_isr, 0, 0);
1162 irq_enable(USB_IRQ);
1163
1164 return ret;
1165 }
1166
usb_dc_ep_disable(const uint8_t ep)1167 int usb_dc_ep_disable(const uint8_t ep)
1168 {
1169 struct smartbond_ep_state *ep_state = usb_dc_get_ep_state(ep);
1170
1171 LOG_DBG("%02x", ep);
1172
1173 if (ep_state == NULL) {
1174 LOG_ERR("Not valid endpoint: %02x", ep);
1175 return -EINVAL;
1176 }
1177
1178 ep_state->enabled = 0;
1179 if (ep_state->ep_addr == EP0_IN) {
1180 REG_SET_BIT(USB_TXC0_REG, USB_IGN_IN);
1181 } else if (ep_state->ep_addr == EP0_OUT) {
1182 USB->USB_RXC0_REG = USB_USB_RXC0_REG_USB_IGN_SETUP_Msk |
1183 USB_USB_RXC0_REG_USB_IGN_OUT_Msk;
1184 } else if (USB_EP_DIR_IS_OUT(ep)) {
1185 ep_state->regs->epc_out &= ~USB_USB_EPC2_REG_USB_EP_EN_Msk;
1186 } else {
1187 ep_state->regs->epc_in &= ~USB_USB_EPC1_REG_USB_EP_EN_Msk;
1188 }
1189
1190 return 0;
1191 }
1192
usb_dc_ep_mps(const uint8_t ep)1193 int usb_dc_ep_mps(const uint8_t ep)
1194 {
1195 struct smartbond_ep_state *ep_state = usb_dc_get_ep_state(ep);
1196
1197 if (ep_state == NULL) {
1198 LOG_ERR("Not valid endpoint: %02x", ep);
1199 return -EINVAL;
1200 }
1201
1202 return ep_state->mps;
1203 }
1204
usb_dc_ep_read_continue(uint8_t ep)1205 int usb_dc_ep_read_continue(uint8_t ep)
1206 {
1207 struct smartbond_ep_state *ep_state = usb_dc_get_ep_out_state(ep);
1208
1209 if (ep_state == NULL) {
1210 LOG_ERR("Not valid endpoint: %02x", ep);
1211 return -EINVAL;
1212 }
1213
1214 LOG_DBG("ep 0x%02x", ep);
1215
1216 /* If no more data in the buffer, start a new read transaction.
1217 * DataOutStageCallback will called on transaction complete.
1218 */
1219 if (ep_state->transferred >= ep_state->last_packet_size) {
1220 start_rx_packet(ep_state);
1221 }
1222
1223 return 0;
1224 }
1225
usb_dc_ep_read_wait(uint8_t ep,uint8_t * data,uint32_t max_data_len,uint32_t * read_bytes)1226 int usb_dc_ep_read_wait(uint8_t ep, uint8_t *data, uint32_t max_data_len,
1227 uint32_t *read_bytes)
1228 {
1229 struct smartbond_ep_state *ep_state = usb_dc_get_ep_out_state(ep);
1230 uint16_t read_count;
1231
1232 if (ep_state == NULL) {
1233 LOG_ERR("Invalid Endpoint %x", ep);
1234 return -EINVAL;
1235 }
1236
1237 LOG_DBG("ep 0x%02x, %u bytes, %p", ep, max_data_len, (void *)data);
1238
1239 read_count = ep_state->last_packet_size - ep_state->transferred;
1240
1241 /* When both buffer and max data to read are zero, just ignore reading
1242 * and return available data in buffer. Otherwise, return data
1243 * previously stored in the buffer.
1244 */
1245 if (data) {
1246 read_count = MIN(read_count, max_data_len);
1247 memcpy(data, ep_state->buffer + ep_state->transferred, read_count);
1248 ep_state->transferred += read_count;
1249 } else if (max_data_len) {
1250 LOG_ERR("Wrong arguments");
1251 }
1252
1253 if (read_bytes) {
1254 *read_bytes = read_count;
1255 }
1256
1257 return 0;
1258 }
1259
usb_dc_ep_read(const uint8_t ep,uint8_t * const data,const uint32_t max_data_len,uint32_t * const read_bytes)1260 int usb_dc_ep_read(const uint8_t ep, uint8_t *const data,
1261 const uint32_t max_data_len, uint32_t *const read_bytes)
1262 {
1263 if (usb_dc_ep_read_wait(ep, data, max_data_len, read_bytes) != 0) {
1264 return -EINVAL;
1265 }
1266
1267 if (usb_dc_ep_read_continue(ep) != 0) {
1268 return -EINVAL;
1269 }
1270
1271 return 0;
1272 }
1273
usb_dc_ep_check_cap(const struct usb_dc_ep_cfg_data * const cfg)1274 int usb_dc_ep_check_cap(const struct usb_dc_ep_cfg_data *const cfg)
1275 {
1276 uint8_t ep_idx = USB_EP_GET_IDX(cfg->ep_addr);
1277
1278 LOG_DBG("ep %x, mps %d, type %d", cfg->ep_addr, cfg->ep_mps, cfg->ep_type);
1279
1280 if ((cfg->ep_type == USB_DC_EP_CONTROL && ep_idx != 0) ||
1281 (cfg->ep_type != USB_DC_EP_CONTROL && ep_idx == 0)) {
1282 LOG_ERR("invalid endpoint configuration");
1283 return -EINVAL;
1284 }
1285
1286 if (ep_idx > 3) {
1287 LOG_ERR("endpoint address out of range");
1288 return -EINVAL;
1289 }
1290
1291 if (ep_out_buf_size[ep_idx] < cfg->ep_mps) {
1292 LOG_ERR("endpoint size too big");
1293 return -EINVAL;
1294 }
1295
1296 return 0;
1297 }
1298
usb_dc_ep_set_callback(const uint8_t ep,const usb_dc_ep_callback cb)1299 int usb_dc_ep_set_callback(const uint8_t ep, const usb_dc_ep_callback cb)
1300 {
1301 struct smartbond_ep_state *ep_state = usb_dc_get_ep_state(ep);
1302
1303 LOG_DBG("%02x %p", ep, (void *)cb);
1304
1305 if (ep_state == NULL) {
1306 LOG_ERR("Not valid endpoint: %02x", ep);
1307 return -EINVAL;
1308 }
1309
1310 ep_state->cb = cb;
1311
1312 return 0;
1313 }
1314
usb_dc_set_status_callback(const usb_dc_status_callback cb)1315 void usb_dc_set_status_callback(const usb_dc_status_callback cb)
1316 {
1317 dev_state.status_cb = cb;
1318
1319 LOG_DBG("%p", cb);
1320
1321 /* Manually call IRQ handler in case when VBUS is already present */
1322 usb_dc_smartbond_vbus_isr();
1323 }
1324
usb_dc_reset(void)1325 int usb_dc_reset(void)
1326 {
1327 int ret;
1328
1329 LOG_DBG("");
1330
1331 if (!dev_attached() || !dev_ready()) {
1332 return -ENODEV;
1333 }
1334
1335 ret = usb_dc_detach();
1336 if (ret) {
1337 return ret;
1338 }
1339
1340 ret = usb_dc_attach();
1341 if (ret) {
1342 return ret;
1343 }
1344
1345 return 0;
1346 }
1347
usb_dc_set_address(const uint8_t addr)1348 int usb_dc_set_address(const uint8_t addr)
1349 {
1350 LOG_DBG("%d", addr);
1351
1352 /* Set default address for one ZLP */
1353 USB->USB_EPC0_REG = USB_USB_EPC0_REG_USB_DEF_Msk;
1354 USB->USB_FAR_REG = (addr & USB_USB_FAR_REG_USB_AD_Msk) |
1355 USB_USB_FAR_REG_USB_AD_EN_Msk;
1356
1357 return 0;
1358 }
1359
usb_dc_ep_clear_stall(const uint8_t ep)1360 int usb_dc_ep_clear_stall(const uint8_t ep)
1361 {
1362 uint8_t ep_idx = USB_EP_GET_IDX(ep);
1363 uint8_t ep_dir = USB_EP_GET_DIR(ep);
1364 struct smartbond_ep_state *ep_state = usb_dc_get_ep_state(ep);
1365 struct smartbond_ep_reg_set *regs;
1366
1367 LOG_DBG("%02x", ep);
1368
1369 if (ep_state == NULL) {
1370 LOG_ERR("Not valid endpoint: %02x", ep);
1371 return -EINVAL;
1372 }
1373 regs = ep_state->regs;
1374
1375 /* Clear stall is called in response to Clear Feature ENDPOINT_HALT,
1376 * reset toggle
1377 */
1378 ep_state->stall = false;
1379 ep_state->data1 = 0;
1380
1381 if (ep_dir == USB_EP_DIR_OUT) {
1382 regs->epc_out &= ~USB_USB_EPC1_REG_USB_STALL_Msk;
1383 } else {
1384 regs->epc_in &= ~USB_USB_EPC1_REG_USB_STALL_Msk;
1385 }
1386
1387 if (ep_idx == 0) {
1388 REG_CLR_BIT(USB_MAMSK_REG, USB_M_EP0_NAK);
1389 }
1390 return 0;
1391 }
1392
usb_dc_ep_enable(const uint8_t ep)1393 int usb_dc_ep_enable(const uint8_t ep)
1394 {
1395 struct smartbond_ep_state *ep_state = usb_dc_get_ep_state(ep);
1396 uint8_t ep_idx = USB_EP_GET_IDX(ep);
1397 uint8_t ep_dir = USB_EP_GET_DIR(ep);
1398
1399 if (ep_state == NULL) {
1400 LOG_ERR("Not valid endpoint: %02x", ep);
1401 return -EINVAL;
1402 }
1403
1404 LOG_DBG("%02x", ep);
1405
1406 if (ep_state->ep_addr == EP0_IN) {
1407 USB->USB_MAMSK_REG |= USB_USB_MAMSK_REG_USB_M_EP0_TX_Msk;
1408 } else if (ep_state->ep_addr == EP0_OUT) {
1409 USB->USB_MAMSK_REG |= USB_USB_MAMSK_REG_USB_M_EP0_RX_Msk;
1410 /* Clear USB_IGN_SETUP and USB_IGN_OUT */
1411 USB->USB_RXC0_REG = 0;
1412 ep_state->last_packet_size = 0;
1413 ep_state->transferred = 0;
1414 ep_state->total_len = 0;
1415 } else if (ep_dir == USB_EP_DIR_OUT) {
1416 USB->USB_RXMSK_REG |= 0x11 << (ep_idx - 1);
1417 REG_SET_BIT(USB_MAMSK_REG, USB_M_RX_EV);
1418 ep_state->regs->epc_out |= USB_USB_EPC1_REG_USB_EP_EN_Msk;
1419
1420 if (ep_state->busy) {
1421 return 0;
1422 }
1423
1424 start_rx_packet(ep_state);
1425 } else {
1426 USB->USB_TXMSK_REG |= 0x11 << (ep_idx - 1);
1427 REG_SET_BIT(USB_MAMSK_REG, USB_M_TX_EV);
1428 ep_state->regs->epc_in |= USB_USB_EPC2_REG_USB_EP_EN_Msk;
1429 }
1430 ep_state->enabled = 1;
1431
1432 return 0;
1433 }
1434
usb_dc_ep_configure(const struct usb_dc_ep_cfg_data * const ep_cfg)1435 int usb_dc_ep_configure(const struct usb_dc_ep_cfg_data *const ep_cfg)
1436 {
1437 struct smartbond_ep_state *ep_state = usb_dc_get_ep_state(ep_cfg->ep_addr);
1438 uint8_t ep_idx = USB_EP_GET_IDX(ep_cfg->ep_addr);
1439 uint8_t ep_dir = USB_EP_GET_DIR(ep_cfg->ep_addr);
1440 uint8_t iso_mask;
1441
1442 if (ep_state == NULL) {
1443 return -EINVAL;
1444 }
1445
1446 LOG_DBG("%02x", ep_cfg->ep_addr);
1447
1448 ep_state->iso = ep_cfg->ep_type == USB_DC_EP_ISOCHRONOUS;
1449 iso_mask = (ep_state->iso ? USB_USB_EPC2_REG_USB_ISO_Msk : 0);
1450
1451 if (ep_cfg->ep_type == USB_DC_EP_CONTROL) {
1452 ep_state->mps = EP0_FIFO_SIZE;
1453 } else {
1454 ep_state->mps = ep_cfg->ep_mps;
1455 }
1456
1457 ep_state->data1 = 0;
1458
1459 if (ep_dir == USB_EP_DIR_OUT) {
1460 if (ep_cfg->ep_mps > ep_out_buf_size[ep_idx]) {
1461 return -EINVAL;
1462 }
1463
1464 ep_state->regs->epc_out = ep_idx | iso_mask;
1465 } else {
1466 ep_state->regs->epc_in = ep_idx | iso_mask;
1467 }
1468
1469 return 0;
1470 }
1471
usb_dc_detach(void)1472 int usb_dc_detach(void)
1473 {
1474 LOG_DBG("Detach");
1475
1476 usb_change_state(false, dev_state.vbus_present);
1477
1478 return 0;
1479 }
1480
usb_dc_attach(void)1481 int usb_dc_attach(void)
1482 {
1483 LOG_INF("Attach");
1484
1485 usb_change_state(true, dev_state.vbus_present);
1486
1487 return 0;
1488 }
1489
usb_dc_ep_write(const uint8_t ep,const uint8_t * const data,const uint32_t data_len,uint32_t * const ret_bytes)1490 int usb_dc_ep_write(const uint8_t ep, const uint8_t *const data, const uint32_t data_len,
1491 uint32_t *const ret_bytes)
1492 {
1493 struct smartbond_ep_state *ep_state = usb_dc_get_ep_state(ep);
1494
1495 if (ep_state == NULL) {
1496 LOG_ERR("%02x no ep_state", ep);
1497 return -EINVAL;
1498 }
1499
1500 LOG_DBG("%02x %d bytes", ep, (int)data_len);
1501 if (!atomic_cas(&ep_state->busy, 0, 1)) {
1502 LOG_DBG("%02x transfer already in progress", ep);
1503 return -EAGAIN;
1504 }
1505
1506 ep_state->buffer = (uint8_t *)data;
1507 ep_state->transferred = 0;
1508 ep_state->total_len = data_len;
1509 ep_state->last_packet_size = 0;
1510
1511 if (ep == EP0_IN) {
1512 /* RX has priority over TX to send packet RX needs to be off */
1513 REG_CLR_BIT(USB_RXC0_REG, USB_RX_EN);
1514 /* Handle case when device expect to send more data and
1515 * host already send ZLP to confirm reception (that means
1516 * that it will no longer try to read).
1517 * Enable EP0_NAK.
1518 */
1519 (void)USB->USB_EP0_NAK_REG;
1520 REG_SET_BIT(USB_MAMSK_REG, USB_M_EP0_NAK);
1521 }
1522 start_tx_packet(ep_state);
1523
1524 if (ret_bytes) {
1525 *ret_bytes = data_len;
1526 }
1527
1528 return 0;
1529 }
1530
usb_dc_ep_set_stall(const uint8_t ep)1531 int usb_dc_ep_set_stall(const uint8_t ep)
1532 {
1533 uint8_t ep_idx = USB_EP_GET_IDX(ep);
1534 uint8_t ep_dir = USB_EP_GET_DIR(ep);
1535 struct smartbond_ep_state *ep_state = usb_dc_get_ep_state(ep);
1536 struct smartbond_ep_reg_set *regs;
1537
1538 LOG_DBG("%02x", ep);
1539
1540 if (!dev_attached() || !dev_ready()) {
1541 return -ENODEV;
1542 }
1543
1544 if (ep_state == NULL) {
1545 LOG_ERR("Not valid endpoint: %02x", ep);
1546 return -EINVAL;
1547 }
1548
1549 regs = ep_state->regs;
1550 ep_state->stall = 1;
1551
1552 if (ep_idx == 0) {
1553 /* EP0 has just one registers to control stall for IN and OUT */
1554 if (ep_dir == USB_EP_DIR_OUT) {
1555 regs->rxc = USB_USB_RXC0_REG_USB_RX_EN_Msk;
1556 REG_SET_BIT(USB_EPC0_REG, USB_STALL);
1557 } else {
1558 regs->rxc = 0;
1559 regs->txc = USB_USB_TXC0_REG_USB_TX_EN_Msk;
1560 REG_SET_BIT(USB_EPC0_REG, USB_STALL);
1561 }
1562 } else {
1563 if (ep_dir == USB_EP_DIR_OUT) {
1564 regs->epc_out |= USB_USB_EPC1_REG_USB_STALL_Msk;
1565 regs->rxc |= USB_USB_RXC1_REG_USB_RX_EN_Msk;
1566 } else {
1567 regs->epc_in |= USB_USB_EPC1_REG_USB_STALL_Msk;
1568 regs->txc |= USB_USB_TXC1_REG_USB_TX_EN_Msk | USB_USB_TXC1_REG_USB_LAST_Msk;
1569 }
1570 }
1571 return 0;
1572 }
1573
usb_dc_ep_is_stalled(const uint8_t ep,uint8_t * const stalled)1574 int usb_dc_ep_is_stalled(const uint8_t ep, uint8_t *const stalled)
1575 {
1576 struct smartbond_ep_state *ep_state = usb_dc_get_ep_state(ep);
1577
1578 if (!dev_attached() || !dev_ready()) {
1579 return -ENODEV;
1580 }
1581
1582 if (NULL == ep_state || NULL == stalled) {
1583 return -EINVAL;
1584 }
1585
1586 *stalled = ep_state->stall;
1587
1588 return 0;
1589 }
1590
usb_dc_ep_halt(const uint8_t ep)1591 int usb_dc_ep_halt(const uint8_t ep)
1592 {
1593 return usb_dc_ep_set_stall(ep);
1594 }
1595
usb_dc_ep_flush(const uint8_t ep)1596 int usb_dc_ep_flush(const uint8_t ep)
1597 {
1598 struct smartbond_ep_state *ep_state = usb_dc_get_ep_state(ep);
1599
1600 if (ep_state == NULL) {
1601 LOG_ERR("Not valid endpoint: %02x", ep);
1602 return -EINVAL;
1603 }
1604
1605 LOG_ERR("Not implemented");
1606
1607 return 0;
1608 }
1609
1610 SYS_INIT(usb_init, POST_KERNEL, CONFIG_KERNEL_INIT_PRIORITY_DEVICE);
1611