1 /*
2  * Copyright (c) 2022 Nordic Semiconductor ASA
3  *
4  * SPDX-License-Identifier: Apache-2.0
5  */
6 
7 #define DT_DRV_COMPAT zephyr_cdc_acm_uart
8 
9 #include <zephyr/init.h>
10 #include <zephyr/kernel.h>
11 #include <zephyr/drivers/uart.h>
12 #include <zephyr/sys/ring_buffer.h>
13 #include <zephyr/sys/byteorder.h>
14 
15 #include <zephyr/usb/usbd.h>
16 #include <zephyr/usb/usb_ch9.h>
17 #include <zephyr/usb/class/usb_cdc.h>
18 
19 #include <zephyr/drivers/usb/udc.h>
20 
21 #include "usbd_msg.h"
22 
23 #include <zephyr/logging/log.h>
24 /* Prevent endless recursive logging loop and warn user about it */
25 #if defined(CONFIG_USBD_CDC_ACM_LOG_LEVEL) && CONFIG_USBD_CDC_ACM_LOG_LEVEL != LOG_LEVEL_NONE
26 #define CHOSEN_CONSOLE DT_NODE_HAS_COMPAT(DT_CHOSEN(zephyr_console), zephyr_cdc_acm_uart)
27 #define CHOSEN_SHELL   DT_NODE_HAS_COMPAT(DT_CHOSEN(zephyr_shell_uart), zephyr_cdc_acm_uart)
28 #if (CHOSEN_CONSOLE && defined(CONFIG_LOG_BACKEND_UART)) || \
29 	(CHOSEN_SHELL && defined(CONFIG_SHELL_LOG_BACKEND))
30 #warning "USBD_CDC_ACM_LOG_LEVEL forced to LOG_LEVEL_NONE"
31 #undef CONFIG_USBD_CDC_ACM_LOG_LEVEL
32 #define CONFIG_USBD_CDC_ACM_LOG_LEVEL LOG_LEVEL_NONE
33 #endif
34 #endif
35 LOG_MODULE_REGISTER(usbd_cdc_acm, CONFIG_USBD_CDC_ACM_LOG_LEVEL);
36 
37 UDC_BUF_POOL_DEFINE(cdc_acm_ep_pool,
38 		    DT_NUM_INST_STATUS_OKAY(DT_DRV_COMPAT) * 2,
39 		    512, sizeof(struct udc_buf_info), NULL);
40 
41 #define CDC_ACM_DEFAULT_LINECODING	{sys_cpu_to_le32(115200), 0, 0, 8}
42 #define CDC_ACM_DEFAULT_INT_EP_MPS	16
43 #define CDC_ACM_INTERVAL_DEFAULT	10000UL
44 #define CDC_ACM_FS_INT_EP_INTERVAL	USB_FS_INT_EP_INTERVAL(10000U)
45 #define CDC_ACM_HS_INT_EP_INTERVAL	USB_HS_INT_EP_INTERVAL(10000U)
46 
47 #define CDC_ACM_CLASS_ENABLED		0
48 #define CDC_ACM_CLASS_SUSPENDED		1
49 #define CDC_ACM_IRQ_RX_ENABLED		2
50 #define CDC_ACM_IRQ_TX_ENABLED		3
51 #define CDC_ACM_RX_FIFO_BUSY		4
52 #define CDC_ACM_TX_FIFO_BUSY		5
53 
54 static struct k_work_q cdc_acm_work_q;
55 static K_KERNEL_STACK_DEFINE(cdc_acm_stack,
56 			     CONFIG_USBD_CDC_ACM_STACK_SIZE);
57 
58 struct cdc_acm_uart_fifo {
59 	struct ring_buf *rb;
60 	bool irq;
61 	bool altered;
62 };
63 
64 struct usbd_cdc_acm_desc {
65 	struct usb_association_descriptor iad;
66 	struct usb_if_descriptor if0;
67 	struct cdc_header_descriptor if0_header;
68 	struct cdc_cm_descriptor if0_cm;
69 	struct cdc_acm_descriptor if0_acm;
70 	struct cdc_union_descriptor if0_union;
71 	struct usb_ep_descriptor if0_int_ep;
72 	struct usb_ep_descriptor if0_hs_int_ep;
73 
74 	struct usb_if_descriptor if1;
75 	struct usb_ep_descriptor if1_in_ep;
76 	struct usb_ep_descriptor if1_out_ep;
77 	struct usb_ep_descriptor if1_hs_in_ep;
78 	struct usb_ep_descriptor if1_hs_out_ep;
79 
80 	struct usb_desc_header nil_desc;
81 };
82 
83 struct cdc_acm_uart_config {
84 	/* Pointer to the associated USBD class node */
85 	struct usbd_class_data *c_data;
86 	/* Pointer to the interface description node or NULL */
87 	struct usbd_desc_node *const if_desc_data;
88 	/* Pointer to the class interface descriptors */
89 	struct usbd_cdc_acm_desc *const desc;
90 	const struct usb_desc_header **const fs_desc;
91 	const struct usb_desc_header **const hs_desc;
92 };
93 
94 struct cdc_acm_uart_data {
95 	const struct device *dev;
96 	/* Line Coding Structure */
97 	struct cdc_acm_line_coding line_coding;
98 	/* SetControlLineState bitmap */
99 	uint16_t line_state;
100 	/* Serial state bitmap */
101 	uint16_t serial_state;
102 	/* UART actual configuration */
103 	struct uart_config uart_cfg;
104 	/* UART actual RTS state */
105 	bool line_state_rts;
106 	/* UART actual DTR state */
107 	bool line_state_dtr;
108 	/* When flow_ctrl is set, poll out is blocked when the buffer is full,
109 	 * roughly emulating flow control.
110 	 */
111 	bool flow_ctrl;
112 	/* Used to enqueue a ZLP transfer when the previous IN transfer length
113 	 * was a multiple of the endpoint MPS and no more data is added to
114 	 * the TX FIFO during the user callback execution.
115 	 */
116 	bool zlp_needed;
117 	/* UART API IRQ callback */
118 	uart_irq_callback_user_data_t cb;
119 	/* UART API user callback data */
120 	void *cb_data;
121 	/* UART API IRQ callback work */
122 	struct k_work irq_cb_work;
123 	struct cdc_acm_uart_fifo rx_fifo;
124 	struct cdc_acm_uart_fifo tx_fifo;
125 	/* USBD CDC ACM TX fifo work */
126 	struct k_work_delayable tx_fifo_work;
127 	/* USBD CDC ACM RX fifo work */
128 	struct k_work rx_fifo_work;
129 	atomic_t state;
130 	struct k_sem notif_sem;
131 };
132 
133 static void cdc_acm_irq_rx_enable(const struct device *dev);
134 
cdc_acm_buf_alloc(const uint8_t ep)135 struct net_buf *cdc_acm_buf_alloc(const uint8_t ep)
136 {
137 	struct net_buf *buf = NULL;
138 	struct udc_buf_info *bi;
139 
140 	buf = net_buf_alloc(&cdc_acm_ep_pool, K_NO_WAIT);
141 	if (!buf) {
142 		return NULL;
143 	}
144 
145 	bi = udc_get_buf_info(buf);
146 	bi->ep = ep;
147 
148 	return buf;
149 }
150 
cdc_acm_work_submit(struct k_work * work)151 static ALWAYS_INLINE int cdc_acm_work_submit(struct k_work *work)
152 {
153 	return k_work_submit_to_queue(&cdc_acm_work_q, work);
154 }
155 
cdc_acm_work_schedule(struct k_work_delayable * work,k_timeout_t delay)156 static ALWAYS_INLINE int cdc_acm_work_schedule(struct k_work_delayable *work,
157 					       k_timeout_t delay)
158 {
159 	return k_work_schedule_for_queue(&cdc_acm_work_q, work, delay);
160 }
161 
check_wq_ctx(const struct device * dev)162 static ALWAYS_INLINE bool check_wq_ctx(const struct device *dev)
163 {
164 	return k_current_get() == k_work_queue_thread_get(&cdc_acm_work_q);
165 }
166 
cdc_acm_get_int_in(struct usbd_class_data * const c_data)167 static uint8_t cdc_acm_get_int_in(struct usbd_class_data *const c_data)
168 {
169 	struct usbd_context *uds_ctx = usbd_class_get_ctx(c_data);
170 	const struct device *dev = usbd_class_get_private(c_data);
171 	const struct cdc_acm_uart_config *cfg = dev->config;
172 	struct usbd_cdc_acm_desc *desc = cfg->desc;
173 
174 	if (usbd_bus_speed(uds_ctx) == USBD_SPEED_HS) {
175 		return desc->if0_hs_int_ep.bEndpointAddress;
176 	}
177 
178 	return desc->if0_int_ep.bEndpointAddress;
179 }
180 
cdc_acm_get_bulk_in(struct usbd_class_data * const c_data)181 static uint8_t cdc_acm_get_bulk_in(struct usbd_class_data *const c_data)
182 {
183 	struct usbd_context *uds_ctx = usbd_class_get_ctx(c_data);
184 	const struct device *dev = usbd_class_get_private(c_data);
185 	const struct cdc_acm_uart_config *cfg = dev->config;
186 	struct usbd_cdc_acm_desc *desc = cfg->desc;
187 
188 	if (usbd_bus_speed(uds_ctx) == USBD_SPEED_HS) {
189 		return desc->if1_hs_in_ep.bEndpointAddress;
190 	}
191 
192 	return desc->if1_in_ep.bEndpointAddress;
193 }
194 
cdc_acm_get_bulk_out(struct usbd_class_data * const c_data)195 static uint8_t cdc_acm_get_bulk_out(struct usbd_class_data *const c_data)
196 {
197 	struct usbd_context *uds_ctx = usbd_class_get_ctx(c_data);
198 	const struct device *dev = usbd_class_get_private(c_data);
199 	const struct cdc_acm_uart_config *cfg = dev->config;
200 	struct usbd_cdc_acm_desc *desc = cfg->desc;
201 
202 	if (usbd_bus_speed(uds_ctx) == USBD_SPEED_HS) {
203 		return desc->if1_hs_out_ep.bEndpointAddress;
204 	}
205 
206 	return desc->if1_out_ep.bEndpointAddress;
207 }
208 
cdc_acm_get_bulk_mps(struct usbd_class_data * const c_data)209 static size_t cdc_acm_get_bulk_mps(struct usbd_class_data *const c_data)
210 {
211 	struct usbd_context *uds_ctx = usbd_class_get_ctx(c_data);
212 
213 	if (usbd_bus_speed(uds_ctx) == USBD_SPEED_HS) {
214 		return 512U;
215 	}
216 
217 	return 64U;
218 }
219 
usbd_cdc_acm_request(struct usbd_class_data * const c_data,struct net_buf * buf,int err)220 static int usbd_cdc_acm_request(struct usbd_class_data *const c_data,
221 				struct net_buf *buf, int err)
222 {
223 	struct usbd_context *uds_ctx = usbd_class_get_ctx(c_data);
224 	const struct device *dev = usbd_class_get_private(c_data);
225 	struct cdc_acm_uart_data *data = dev->data;
226 	struct udc_buf_info *bi;
227 
228 	bi = udc_get_buf_info(buf);
229 	if (err) {
230 		if (err == -ECONNABORTED) {
231 			LOG_WRN("request ep 0x%02x, len %u cancelled",
232 				bi->ep, buf->len);
233 		} else {
234 			LOG_ERR("request ep 0x%02x, len %u failed",
235 				bi->ep, buf->len);
236 		}
237 
238 		if (bi->ep == cdc_acm_get_bulk_out(c_data)) {
239 			atomic_clear_bit(&data->state, CDC_ACM_RX_FIFO_BUSY);
240 		}
241 
242 		if (bi->ep == cdc_acm_get_bulk_in(c_data)) {
243 			atomic_clear_bit(&data->state, CDC_ACM_TX_FIFO_BUSY);
244 		}
245 
246 		if (bi->ep == cdc_acm_get_int_in(c_data)) {
247 			k_sem_reset(&data->notif_sem);
248 		}
249 
250 		goto ep_request_error;
251 	}
252 
253 	if (bi->ep == cdc_acm_get_bulk_out(c_data)) {
254 		/* RX transfer completion */
255 		size_t done;
256 
257 		LOG_HEXDUMP_INF(buf->data, buf->len, "");
258 		done = ring_buf_put(data->rx_fifo.rb, buf->data, buf->len);
259 		if (done && data->cb) {
260 			cdc_acm_work_submit(&data->irq_cb_work);
261 		}
262 
263 		atomic_clear_bit(&data->state, CDC_ACM_RX_FIFO_BUSY);
264 		cdc_acm_work_submit(&data->rx_fifo_work);
265 	}
266 
267 	if (bi->ep == cdc_acm_get_bulk_in(c_data)) {
268 		/* TX transfer completion */
269 		if (data->cb) {
270 			cdc_acm_work_submit(&data->irq_cb_work);
271 		}
272 
273 		atomic_clear_bit(&data->state, CDC_ACM_TX_FIFO_BUSY);
274 
275 		if (!ring_buf_is_empty(data->tx_fifo.rb)) {
276 			/* Queue pending TX data on IN endpoint */
277 			cdc_acm_work_schedule(&data->tx_fifo_work, K_NO_WAIT);
278 		}
279 
280 	}
281 
282 	if (bi->ep == cdc_acm_get_int_in(c_data)) {
283 		k_sem_give(&data->notif_sem);
284 	}
285 
286 ep_request_error:
287 	return usbd_ep_buf_free(uds_ctx, buf);
288 }
289 
usbd_cdc_acm_update(struct usbd_class_data * const c_data,uint8_t iface,uint8_t alternate)290 static void usbd_cdc_acm_update(struct usbd_class_data *const c_data,
291 				uint8_t iface, uint8_t alternate)
292 {
293 	LOG_DBG("New configuration, interface %u alternate %u",
294 		iface, alternate);
295 }
296 
usbd_cdc_acm_enable(struct usbd_class_data * const c_data)297 static void usbd_cdc_acm_enable(struct usbd_class_data *const c_data)
298 {
299 	const struct device *dev = usbd_class_get_private(c_data);
300 	struct cdc_acm_uart_data *data = dev->data;
301 
302 	atomic_set_bit(&data->state, CDC_ACM_CLASS_ENABLED);
303 	LOG_INF("Configuration enabled");
304 
305 	if (atomic_test_bit(&data->state, CDC_ACM_IRQ_RX_ENABLED)) {
306 		cdc_acm_irq_rx_enable(dev);
307 	}
308 
309 	if (atomic_test_bit(&data->state, CDC_ACM_IRQ_TX_ENABLED)) {
310 		if (ring_buf_space_get(data->tx_fifo.rb)) {
311 			/* Raise TX ready interrupt */
312 			cdc_acm_work_submit(&data->irq_cb_work);
313 		} else {
314 			/* Queue pending TX data on IN endpoint */
315 			cdc_acm_work_schedule(&data->tx_fifo_work, K_NO_WAIT);
316 		}
317 	}
318 }
319 
usbd_cdc_acm_disable(struct usbd_class_data * const c_data)320 static void usbd_cdc_acm_disable(struct usbd_class_data *const c_data)
321 {
322 	const struct device *dev = usbd_class_get_private(c_data);
323 	struct cdc_acm_uart_data *data = dev->data;
324 
325 	atomic_clear_bit(&data->state, CDC_ACM_CLASS_ENABLED);
326 	atomic_clear_bit(&data->state, CDC_ACM_CLASS_SUSPENDED);
327 	LOG_INF("Configuration disabled");
328 }
329 
usbd_cdc_acm_suspended(struct usbd_class_data * const c_data)330 static void usbd_cdc_acm_suspended(struct usbd_class_data *const c_data)
331 {
332 	const struct device *dev = usbd_class_get_private(c_data);
333 	struct cdc_acm_uart_data *data = dev->data;
334 
335 	/* FIXME: filter stray suspended events earlier */
336 	atomic_set_bit(&data->state, CDC_ACM_CLASS_SUSPENDED);
337 }
338 
usbd_cdc_acm_resumed(struct usbd_class_data * const c_data)339 static void usbd_cdc_acm_resumed(struct usbd_class_data *const c_data)
340 {
341 	const struct device *dev = usbd_class_get_private(c_data);
342 	struct cdc_acm_uart_data *data = dev->data;
343 
344 	atomic_clear_bit(&data->state, CDC_ACM_CLASS_SUSPENDED);
345 }
346 
usbd_cdc_acm_get_desc(struct usbd_class_data * const c_data,const enum usbd_speed speed)347 static void *usbd_cdc_acm_get_desc(struct usbd_class_data *const c_data,
348 				   const enum usbd_speed speed)
349 {
350 	const struct device *dev = usbd_class_get_private(c_data);
351 	const struct cdc_acm_uart_config *cfg = dev->config;
352 
353 	if (speed == USBD_SPEED_HS) {
354 		return cfg->hs_desc;
355 	}
356 
357 	return cfg->fs_desc;
358 }
359 
cdc_acm_update_uart_cfg(struct cdc_acm_uart_data * const data)360 static void cdc_acm_update_uart_cfg(struct cdc_acm_uart_data *const data)
361 {
362 	struct uart_config *const cfg = &data->uart_cfg;
363 
364 	cfg->baudrate = sys_le32_to_cpu(data->line_coding.dwDTERate);
365 
366 	switch (data->line_coding.bCharFormat) {
367 	case USB_CDC_LINE_CODING_STOP_BITS_1:
368 		cfg->stop_bits = UART_CFG_STOP_BITS_1;
369 		break;
370 	case USB_CDC_LINE_CODING_STOP_BITS_1_5:
371 		cfg->stop_bits = UART_CFG_STOP_BITS_1_5;
372 		break;
373 	case USB_CDC_LINE_CODING_STOP_BITS_2:
374 	default:
375 		cfg->stop_bits = UART_CFG_STOP_BITS_2;
376 		break;
377 	};
378 
379 	switch (data->line_coding.bParityType) {
380 	case USB_CDC_LINE_CODING_PARITY_NO:
381 	default:
382 		cfg->parity = UART_CFG_PARITY_NONE;
383 		break;
384 	case USB_CDC_LINE_CODING_PARITY_ODD:
385 		cfg->parity = UART_CFG_PARITY_ODD;
386 		break;
387 	case USB_CDC_LINE_CODING_PARITY_EVEN:
388 		cfg->parity = UART_CFG_PARITY_EVEN;
389 		break;
390 	case USB_CDC_LINE_CODING_PARITY_MARK:
391 		cfg->parity = UART_CFG_PARITY_MARK;
392 		break;
393 	case USB_CDC_LINE_CODING_PARITY_SPACE:
394 		cfg->parity = UART_CFG_PARITY_SPACE;
395 		break;
396 	};
397 
398 	switch (data->line_coding.bDataBits) {
399 	case USB_CDC_LINE_CODING_DATA_BITS_5:
400 		cfg->data_bits = UART_CFG_DATA_BITS_5;
401 		break;
402 	case USB_CDC_LINE_CODING_DATA_BITS_6:
403 		cfg->data_bits = UART_CFG_DATA_BITS_6;
404 		break;
405 	case USB_CDC_LINE_CODING_DATA_BITS_7:
406 		cfg->data_bits = UART_CFG_DATA_BITS_7;
407 		break;
408 	case USB_CDC_LINE_CODING_DATA_BITS_8:
409 	default:
410 		cfg->data_bits = UART_CFG_DATA_BITS_8;
411 		break;
412 	};
413 
414 	cfg->flow_ctrl = data->flow_ctrl ? UART_CFG_FLOW_CTRL_RTS_CTS :
415 					   UART_CFG_FLOW_CTRL_NONE;
416 }
417 
cdc_acm_update_linestate(struct cdc_acm_uart_data * const data)418 static void cdc_acm_update_linestate(struct cdc_acm_uart_data *const data)
419 {
420 	if (data->line_state & SET_CONTROL_LINE_STATE_RTS) {
421 		data->line_state_rts = true;
422 	} else {
423 		data->line_state_rts = false;
424 	}
425 
426 	if (data->line_state & SET_CONTROL_LINE_STATE_DTR) {
427 		data->line_state_dtr = true;
428 	} else {
429 		data->line_state_dtr = false;
430 	}
431 }
432 
usbd_cdc_acm_cth(struct usbd_class_data * const c_data,const struct usb_setup_packet * const setup,struct net_buf * const buf)433 static int usbd_cdc_acm_cth(struct usbd_class_data *const c_data,
434 			    const struct usb_setup_packet *const setup,
435 			    struct net_buf *const buf)
436 {
437 	const struct device *dev = usbd_class_get_private(c_data);
438 	struct cdc_acm_uart_data *data = dev->data;
439 	size_t min_len;
440 
441 	if (setup->bRequest == GET_LINE_CODING) {
442 		if (buf == NULL) {
443 			errno = -ENOMEM;
444 			return 0;
445 		}
446 
447 		min_len = MIN(sizeof(data->line_coding), setup->wLength);
448 		net_buf_add_mem(buf, &data->line_coding, min_len);
449 
450 		return 0;
451 	}
452 
453 	LOG_DBG("bmRequestType 0x%02x bRequest 0x%02x unsupported",
454 		setup->bmRequestType, setup->bRequest);
455 	errno = -ENOTSUP;
456 
457 	return 0;
458 }
459 
usbd_cdc_acm_ctd(struct usbd_class_data * const c_data,const struct usb_setup_packet * const setup,const struct net_buf * const buf)460 static int usbd_cdc_acm_ctd(struct usbd_class_data *const c_data,
461 			    const struct usb_setup_packet *const setup,
462 			    const struct net_buf *const buf)
463 {
464 	struct usbd_context *uds_ctx = usbd_class_get_ctx(c_data);
465 	const struct device *dev = usbd_class_get_private(c_data);
466 	struct cdc_acm_uart_data *data = dev->data;
467 	size_t len;
468 
469 	switch (setup->bRequest) {
470 	case SET_LINE_CODING:
471 		len = sizeof(data->line_coding);
472 		if (setup->wLength != len) {
473 			errno = -ENOTSUP;
474 			return 0;
475 		}
476 
477 		memcpy(&data->line_coding, buf->data, len);
478 		cdc_acm_update_uart_cfg(data);
479 		usbd_msg_pub_device(uds_ctx, USBD_MSG_CDC_ACM_LINE_CODING, dev);
480 		return 0;
481 
482 	case SET_CONTROL_LINE_STATE:
483 		data->line_state = setup->wValue;
484 		cdc_acm_update_linestate(data);
485 		usbd_msg_pub_device(uds_ctx, USBD_MSG_CDC_ACM_CONTROL_LINE_STATE, dev);
486 		return 0;
487 
488 	default:
489 		break;
490 	}
491 
492 	LOG_DBG("bmRequestType 0x%02x bRequest 0x%02x unsupported",
493 		setup->bmRequestType, setup->bRequest);
494 	errno = -ENOTSUP;
495 
496 	return 0;
497 }
498 
usbd_cdc_acm_init(struct usbd_class_data * const c_data)499 static int usbd_cdc_acm_init(struct usbd_class_data *const c_data)
500 {
501 	struct usbd_context *uds_ctx = usbd_class_get_ctx(c_data);
502 	const struct device *dev = usbd_class_get_private(c_data);
503 	const struct cdc_acm_uart_config *cfg = dev->config;
504 	struct usbd_cdc_acm_desc *desc = cfg->desc;
505 
506 	desc->if0_union.bControlInterface = desc->if0.bInterfaceNumber;
507 	desc->if0_union.bSubordinateInterface0 = desc->if1.bInterfaceNumber;
508 
509 	if (cfg->if_desc_data != NULL) {
510 		if (usbd_add_descriptor(uds_ctx, cfg->if_desc_data)) {
511 			LOG_ERR("Failed to add interface string descriptor");
512 		} else {
513 			desc->if0.iInterface = usbd_str_desc_get_idx(cfg->if_desc_data);
514 		}
515 	}
516 
517 	return 0;
518 }
519 
cdc_acm_send_notification(const struct device * dev,const uint16_t serial_state)520 static inline int cdc_acm_send_notification(const struct device *dev,
521 					    const uint16_t serial_state)
522 {
523 	struct cdc_acm_notification notification = {
524 		.bmRequestType = 0xA1,
525 		.bNotificationType = USB_CDC_SERIAL_STATE,
526 		.wValue = 0,
527 		.wIndex = 0,
528 		.wLength = sys_cpu_to_le16(sizeof(uint16_t)),
529 		.data = sys_cpu_to_le16(serial_state),
530 	};
531 	struct cdc_acm_uart_data *data = dev->data;
532 	const struct cdc_acm_uart_config *cfg = dev->config;
533 	struct usbd_class_data *c_data = cfg->c_data;
534 	struct net_buf *buf;
535 	uint8_t ep;
536 	int ret;
537 
538 	if (!atomic_test_bit(&data->state, CDC_ACM_CLASS_ENABLED)) {
539 		LOG_INF("USB configuration is not enabled");
540 		return -EACCES;
541 	}
542 
543 	if (atomic_test_bit(&data->state, CDC_ACM_CLASS_SUSPENDED)) {
544 		LOG_INF("USB support is suspended (FIXME)");
545 		return -EACCES;
546 	}
547 
548 	ep = cdc_acm_get_int_in(c_data);
549 	buf = usbd_ep_buf_alloc(c_data, ep, sizeof(struct cdc_acm_notification));
550 	if (buf == NULL) {
551 		return -ENOMEM;
552 	}
553 
554 	net_buf_add_mem(buf, &notification, sizeof(struct cdc_acm_notification));
555 	ret = usbd_ep_enqueue(c_data, buf);
556 	if (ret) {
557 		net_buf_unref(buf);
558 		return ret;
559 	}
560 
561 	if (k_sem_take(&data->notif_sem, K_FOREVER) == -EAGAIN) {
562 		return -ECANCELED;
563 	}
564 
565 	return ret;
566 }
567 
568 /*
569  * TX handler is triggered when the state of TX fifo has been altered.
570  */
cdc_acm_tx_fifo_handler(struct k_work * work)571 static void cdc_acm_tx_fifo_handler(struct k_work *work)
572 {
573 	struct k_work_delayable *dwork = k_work_delayable_from_work(work);
574 	struct cdc_acm_uart_data *data;
575 	const struct cdc_acm_uart_config *cfg;
576 	struct usbd_class_data *c_data;
577 	struct net_buf *buf;
578 	size_t len;
579 	int ret;
580 
581 	data = CONTAINER_OF(dwork, struct cdc_acm_uart_data, tx_fifo_work);
582 	cfg = data->dev->config;
583 	c_data = cfg->c_data;
584 
585 	if (!atomic_test_bit(&data->state, CDC_ACM_CLASS_ENABLED)) {
586 		LOG_DBG("USB configuration is not enabled");
587 		return;
588 	}
589 
590 	if (atomic_test_bit(&data->state, CDC_ACM_CLASS_SUSPENDED)) {
591 		LOG_INF("USB support is suspended (FIXME: submit rwup)");
592 		return;
593 	}
594 
595 	if (atomic_test_and_set_bit(&data->state, CDC_ACM_TX_FIFO_BUSY)) {
596 		LOG_DBG("TX transfer already in progress");
597 		return;
598 	}
599 
600 	buf = cdc_acm_buf_alloc(cdc_acm_get_bulk_in(c_data));
601 	if (buf == NULL) {
602 		atomic_clear_bit(&data->state, CDC_ACM_TX_FIFO_BUSY);
603 		cdc_acm_work_schedule(&data->tx_fifo_work, K_MSEC(1));
604 		return;
605 	}
606 
607 	len = ring_buf_get(data->tx_fifo.rb, buf->data, buf->size);
608 	net_buf_add(buf, len);
609 
610 	data->zlp_needed = len != 0 && len % cdc_acm_get_bulk_mps(c_data) == 0;
611 
612 	ret = usbd_ep_enqueue(c_data, buf);
613 	if (ret) {
614 		LOG_ERR("Failed to enqueue");
615 		net_buf_unref(buf);
616 		atomic_clear_bit(&data->state, CDC_ACM_TX_FIFO_BUSY);
617 	}
618 }
619 
620 /*
621  * RX handler should be conditionally triggered at:
622  *  - (x) cdc_acm_irq_rx_enable()
623  *  - (x) RX transfer completion
624  *  - (x) the end of cdc_acm_irq_cb_handler
625  *  - (x) USBD class API enable call
626  *  - ( ) USBD class API resumed call (TODO)
627  */
cdc_acm_rx_fifo_handler(struct k_work * work)628 static void cdc_acm_rx_fifo_handler(struct k_work *work)
629 {
630 	struct cdc_acm_uart_data *data;
631 	const struct cdc_acm_uart_config *cfg;
632 	struct usbd_class_data *c_data;
633 	struct net_buf *buf;
634 	uint8_t ep;
635 	int ret;
636 
637 	data = CONTAINER_OF(work, struct cdc_acm_uart_data, rx_fifo_work);
638 	cfg = data->dev->config;
639 	c_data = cfg->c_data;
640 
641 	if (!atomic_test_bit(&data->state, CDC_ACM_CLASS_ENABLED) ||
642 	    atomic_test_bit(&data->state, CDC_ACM_CLASS_SUSPENDED)) {
643 		LOG_INF("USB configuration is not enabled or suspended");
644 		return;
645 	}
646 
647 	if (ring_buf_space_get(data->rx_fifo.rb) < cdc_acm_get_bulk_mps(c_data)) {
648 		LOG_INF("RX buffer to small, throttle");
649 		return;
650 	}
651 
652 	if (atomic_test_and_set_bit(&data->state, CDC_ACM_RX_FIFO_BUSY)) {
653 		LOG_WRN("RX transfer already in progress");
654 		return;
655 	}
656 
657 	ep = cdc_acm_get_bulk_out(c_data);
658 	buf = cdc_acm_buf_alloc(ep);
659 	if (buf == NULL) {
660 		return;
661 	}
662 
663 	/* Shrink the buffer size if operating on a full speed bus */
664 	buf->size = MIN(cdc_acm_get_bulk_mps(c_data), buf->size);
665 
666 	ret = usbd_ep_enqueue(c_data, buf);
667 	if (ret) {
668 		LOG_ERR("Failed to enqueue net_buf for 0x%02x", ep);
669 		net_buf_unref(buf);
670 	}
671 }
672 
cdc_acm_irq_tx_enable(const struct device * dev)673 static void cdc_acm_irq_tx_enable(const struct device *dev)
674 {
675 	struct cdc_acm_uart_data *const data = dev->data;
676 
677 	atomic_set_bit(&data->state, CDC_ACM_IRQ_TX_ENABLED);
678 
679 	if (ring_buf_space_get(data->tx_fifo.rb)) {
680 		LOG_INF("tx_en: trigger irq_cb_work");
681 		cdc_acm_work_submit(&data->irq_cb_work);
682 	}
683 }
684 
cdc_acm_irq_tx_disable(const struct device * dev)685 static void cdc_acm_irq_tx_disable(const struct device *dev)
686 {
687 	struct cdc_acm_uart_data *const data = dev->data;
688 
689 	atomic_clear_bit(&data->state, CDC_ACM_IRQ_TX_ENABLED);
690 }
691 
cdc_acm_irq_rx_enable(const struct device * dev)692 static void cdc_acm_irq_rx_enable(const struct device *dev)
693 {
694 	struct cdc_acm_uart_data *const data = dev->data;
695 
696 	atomic_set_bit(&data->state, CDC_ACM_IRQ_RX_ENABLED);
697 
698 	/* Permit buffer to be drained regardless of USB state */
699 	if (!ring_buf_is_empty(data->rx_fifo.rb)) {
700 		LOG_INF("rx_en: trigger irq_cb_work");
701 		cdc_acm_work_submit(&data->irq_cb_work);
702 	}
703 
704 	if (!atomic_test_bit(&data->state, CDC_ACM_RX_FIFO_BUSY)) {
705 		LOG_INF("rx_en: trigger rx_fifo_work");
706 		cdc_acm_work_submit(&data->rx_fifo_work);
707 	}
708 }
709 
cdc_acm_irq_rx_disable(const struct device * dev)710 static void cdc_acm_irq_rx_disable(const struct device *dev)
711 {
712 	struct cdc_acm_uart_data *const data = dev->data;
713 
714 	atomic_clear_bit(&data->state, CDC_ACM_IRQ_RX_ENABLED);
715 }
716 
cdc_acm_fifo_fill(const struct device * dev,const uint8_t * const tx_data,const int len)717 static int cdc_acm_fifo_fill(const struct device *dev,
718 			     const uint8_t *const tx_data,
719 			     const int len)
720 {
721 	struct cdc_acm_uart_data *const data = dev->data;
722 	unsigned int lock;
723 	uint32_t done;
724 
725 	if (!check_wq_ctx(dev)) {
726 		LOG_WRN("Invoked by inappropriate context");
727 		__ASSERT_NO_MSG(false);
728 		return 0;
729 	}
730 
731 	lock = irq_lock();
732 	done = ring_buf_put(data->tx_fifo.rb, tx_data, len);
733 	irq_unlock(lock);
734 	if (done) {
735 		data->tx_fifo.altered = true;
736 	}
737 
738 	LOG_INF("UART dev %p, len %d, remaining space %u",
739 		dev, len, ring_buf_space_get(data->tx_fifo.rb));
740 
741 	return done;
742 }
743 
cdc_acm_fifo_read(const struct device * dev,uint8_t * const rx_data,const int size)744 static int cdc_acm_fifo_read(const struct device *dev,
745 			     uint8_t *const rx_data,
746 			     const int size)
747 {
748 	struct cdc_acm_uart_data *const data = dev->data;
749 	uint32_t len;
750 
751 	LOG_INF("UART dev %p size %d length %u",
752 		dev, size, ring_buf_size_get(data->rx_fifo.rb));
753 
754 	if (!check_wq_ctx(dev)) {
755 		LOG_WRN("Invoked by inappropriate context");
756 		__ASSERT_NO_MSG(false);
757 		return 0;
758 	}
759 
760 	len = ring_buf_get(data->rx_fifo.rb, rx_data, size);
761 	if (len) {
762 		data->rx_fifo.altered = true;
763 	}
764 
765 	return len;
766 }
767 
cdc_acm_irq_tx_ready(const struct device * dev)768 static int cdc_acm_irq_tx_ready(const struct device *dev)
769 {
770 	struct cdc_acm_uart_data *const data = dev->data;
771 
772 	if (check_wq_ctx(dev)) {
773 		if (data->tx_fifo.irq) {
774 			return ring_buf_space_get(data->tx_fifo.rb);
775 		}
776 	} else {
777 		LOG_WRN("Invoked by inappropriate context");
778 		__ASSERT_NO_MSG(false);
779 	}
780 
781 	return 0;
782 }
783 
cdc_acm_irq_rx_ready(const struct device * dev)784 static int cdc_acm_irq_rx_ready(const struct device *dev)
785 {
786 	struct cdc_acm_uart_data *const data = dev->data;
787 
788 	if (check_wq_ctx(dev)) {
789 		if (data->rx_fifo.irq) {
790 			return 1;
791 		}
792 	} else {
793 		LOG_WRN("Invoked by inappropriate context");
794 		__ASSERT_NO_MSG(false);
795 	}
796 
797 
798 	return 0;
799 }
800 
cdc_acm_irq_is_pending(const struct device * dev)801 static int cdc_acm_irq_is_pending(const struct device *dev)
802 {
803 	struct cdc_acm_uart_data *const data = dev->data;
804 
805 	if (check_wq_ctx(dev)) {
806 		if (data->tx_fifo.irq || data->rx_fifo.irq) {
807 			return 1;
808 		}
809 	} else {
810 		LOG_WRN("Invoked by inappropriate context");
811 		__ASSERT_NO_MSG(false);
812 	}
813 
814 	return 0;
815 }
816 
cdc_acm_irq_update(const struct device * dev)817 static int cdc_acm_irq_update(const struct device *dev)
818 {
819 	struct cdc_acm_uart_data *const data = dev->data;
820 
821 	if (!check_wq_ctx(dev)) {
822 		LOG_WRN("Invoked by inappropriate context");
823 		__ASSERT_NO_MSG(false);
824 		return 0;
825 	}
826 
827 	if (atomic_test_bit(&data->state, CDC_ACM_IRQ_RX_ENABLED) &&
828 	    !ring_buf_is_empty(data->rx_fifo.rb)) {
829 		data->rx_fifo.irq = true;
830 	} else {
831 		data->rx_fifo.irq = false;
832 	}
833 
834 	if (atomic_test_bit(&data->state, CDC_ACM_IRQ_TX_ENABLED) &&
835 	    ring_buf_space_get(data->tx_fifo.rb)) {
836 		data->tx_fifo.irq = true;
837 	} else {
838 		data->tx_fifo.irq = false;
839 	}
840 
841 	return 1;
842 }
843 
844 /*
845  * IRQ handler should be conditionally triggered for the TX path at:
846  *  - cdc_acm_irq_tx_enable()
847  *  - TX transfer completion
848  *  - TX buffer is empty
849  *  - USBD class API enable and resumed calls
850  *
851  * for RX path, if enabled, at:
852  *  - cdc_acm_irq_rx_enable()
853  *  - RX transfer completion
854  *  - RX buffer is not empty
855  */
cdc_acm_irq_cb_handler(struct k_work * work)856 static void cdc_acm_irq_cb_handler(struct k_work *work)
857 {
858 	struct cdc_acm_uart_data *data;
859 	const struct cdc_acm_uart_config *cfg;
860 	struct usbd_class_data *c_data;
861 
862 	data = CONTAINER_OF(work, struct cdc_acm_uart_data, irq_cb_work);
863 	cfg = data->dev->config;
864 	c_data = cfg->c_data;
865 
866 	if (data->cb == NULL) {
867 		LOG_ERR("IRQ callback is not set");
868 		return;
869 	}
870 
871 	data->tx_fifo.altered = false;
872 	data->rx_fifo.altered = false;
873 	data->rx_fifo.irq = false;
874 	data->tx_fifo.irq = false;
875 
876 	if (atomic_test_bit(&data->state, CDC_ACM_IRQ_RX_ENABLED) ||
877 	    atomic_test_bit(&data->state, CDC_ACM_IRQ_TX_ENABLED)) {
878 		data->cb(usbd_class_get_private(c_data), data->cb_data);
879 	}
880 
881 	if (data->rx_fifo.altered) {
882 		LOG_DBG("rx fifo altered, submit work");
883 		cdc_acm_work_submit(&data->rx_fifo_work);
884 	}
885 
886 	if (!atomic_test_bit(&data->state, CDC_ACM_TX_FIFO_BUSY)) {
887 		if (data->tx_fifo.altered) {
888 			LOG_DBG("tx fifo altered, submit work");
889 			cdc_acm_work_schedule(&data->tx_fifo_work, K_NO_WAIT);
890 		} else if (data->zlp_needed) {
891 			LOG_DBG("zlp needed, submit work");
892 			cdc_acm_work_schedule(&data->tx_fifo_work, K_NO_WAIT);
893 		}
894 	}
895 
896 	if (atomic_test_bit(&data->state, CDC_ACM_IRQ_RX_ENABLED) &&
897 	    !ring_buf_is_empty(data->rx_fifo.rb)) {
898 		LOG_DBG("rx irq pending, submit irq_cb_work");
899 		cdc_acm_work_submit(&data->irq_cb_work);
900 	}
901 
902 	if (atomic_test_bit(&data->state, CDC_ACM_IRQ_TX_ENABLED) &&
903 	    ring_buf_space_get(data->tx_fifo.rb)) {
904 		LOG_DBG("tx irq pending, submit irq_cb_work");
905 		cdc_acm_work_submit(&data->irq_cb_work);
906 	}
907 }
908 
cdc_acm_irq_callback_set(const struct device * dev,const uart_irq_callback_user_data_t cb,void * const cb_data)909 static void cdc_acm_irq_callback_set(const struct device *dev,
910 				     const uart_irq_callback_user_data_t cb,
911 				     void *const cb_data)
912 {
913 	struct cdc_acm_uart_data *const data = dev->data;
914 
915 	data->cb = cb;
916 	data->cb_data = cb_data;
917 }
918 
cdc_acm_poll_in(const struct device * dev,unsigned char * const c)919 static int cdc_acm_poll_in(const struct device *dev, unsigned char *const c)
920 {
921 	struct cdc_acm_uart_data *const data = dev->data;
922 	uint32_t len;
923 	int ret = -1;
924 
925 	if (ring_buf_is_empty(data->rx_fifo.rb)) {
926 		return ret;
927 	}
928 
929 	len = ring_buf_get(data->rx_fifo.rb, c, 1);
930 	if (len) {
931 		cdc_acm_work_submit(&data->rx_fifo_work);
932 		ret = 0;
933 	}
934 
935 	return ret;
936 }
937 
cdc_acm_poll_out(const struct device * dev,const unsigned char c)938 static void cdc_acm_poll_out(const struct device *dev, const unsigned char c)
939 {
940 	struct cdc_acm_uart_data *const data = dev->data;
941 	unsigned int lock;
942 	uint32_t wrote;
943 
944 	while (true) {
945 		lock = irq_lock();
946 		wrote = ring_buf_put(data->tx_fifo.rb, &c, 1);
947 		irq_unlock(lock);
948 
949 		if (wrote == 1) {
950 			break;
951 		}
952 
953 		if (k_is_in_isr() || !data->flow_ctrl) {
954 			LOG_WRN_ONCE("Ring buffer full, discard data");
955 			break;
956 		}
957 
958 		k_msleep(1);
959 	}
960 
961 	/* Schedule with minimal timeout to make it possible to send more than
962 	 * one byte per USB transfer. The latency increase is negligible while
963 	 * the increased throughput and reduced CPU usage is easily observable.
964 	 */
965 	cdc_acm_work_schedule(&data->tx_fifo_work, K_MSEC(1));
966 }
967 
968 #ifdef CONFIG_UART_LINE_CTRL
cdc_acm_line_ctrl_set(const struct device * dev,const uint32_t ctrl,const uint32_t val)969 static int cdc_acm_line_ctrl_set(const struct device *dev,
970 				 const uint32_t ctrl, const uint32_t val)
971 {
972 	struct cdc_acm_uart_data *const data = dev->data;
973 	uint32_t flag = 0;
974 
975 	switch (ctrl) {
976 	case USB_CDC_LINE_CTRL_BAUD_RATE:
977 		/* Ignore since it can not be used for notification anyway */
978 		return 0;
979 	case USB_CDC_LINE_CTRL_DCD:
980 		flag = USB_CDC_SERIAL_STATE_RXCARRIER;
981 		break;
982 	case USB_CDC_LINE_CTRL_DSR:
983 		flag = USB_CDC_SERIAL_STATE_TXCARRIER;
984 		break;
985 	case USB_CDC_LINE_CTRL_BREAK:
986 		flag = USB_CDC_SERIAL_STATE_BREAK;
987 		break;
988 	case USB_CDC_LINE_CTRL_RING_SIGNAL:
989 		flag = USB_CDC_SERIAL_STATE_RINGSIGNAL;
990 		break;
991 	case USB_CDC_LINE_CTRL_FRAMING:
992 		flag = USB_CDC_SERIAL_STATE_FRAMING;
993 		break;
994 	case USB_CDC_LINE_CTRL_PARITY:
995 		flag = USB_CDC_SERIAL_STATE_PARITY;
996 		break;
997 	case USB_CDC_LINE_CTRL_OVER_RUN:
998 		flag = USB_CDC_SERIAL_STATE_OVERRUN;
999 		break;
1000 	default:
1001 		return -EINVAL;
1002 	}
1003 
1004 	if (val) {
1005 		data->serial_state |= flag;
1006 	} else {
1007 		data->serial_state &= ~flag;
1008 	}
1009 
1010 	return cdc_acm_send_notification(dev, data->serial_state);
1011 }
1012 
cdc_acm_line_ctrl_get(const struct device * dev,const uint32_t ctrl,uint32_t * const val)1013 static int cdc_acm_line_ctrl_get(const struct device *dev,
1014 				 const uint32_t ctrl, uint32_t *const val)
1015 {
1016 	struct cdc_acm_uart_data *const data = dev->data;
1017 
1018 	switch (ctrl) {
1019 	case UART_LINE_CTRL_BAUD_RATE:
1020 		*val = data->uart_cfg.baudrate;
1021 		return 0;
1022 	case UART_LINE_CTRL_RTS:
1023 		*val = data->line_state_rts;
1024 		return 0;
1025 	case UART_LINE_CTRL_DTR:
1026 		*val = data->line_state_dtr;
1027 		return 0;
1028 	}
1029 
1030 	return -ENOTSUP;
1031 }
1032 #endif
1033 
1034 #ifdef CONFIG_UART_USE_RUNTIME_CONFIGURE
cdc_acm_configure(const struct device * dev,const struct uart_config * const cfg)1035 static int cdc_acm_configure(const struct device *dev,
1036 			     const struct uart_config *const cfg)
1037 {
1038 	struct cdc_acm_uart_data *const data = dev->data;
1039 
1040 	switch (cfg->flow_ctrl) {
1041 	case UART_CFG_FLOW_CTRL_NONE:
1042 		data->flow_ctrl = false;
1043 		break;
1044 	case UART_CFG_FLOW_CTRL_RTS_CTS:
1045 		data->flow_ctrl = true;
1046 		break;
1047 	default:
1048 		return -ENOTSUP;
1049 	}
1050 
1051 	return 0;
1052 }
1053 
cdc_acm_config_get(const struct device * dev,struct uart_config * const cfg)1054 static int cdc_acm_config_get(const struct device *dev,
1055 			      struct uart_config *const cfg)
1056 {
1057 	struct cdc_acm_uart_data *const data = dev->data;
1058 
1059 	memcpy(cfg, &data->uart_cfg, sizeof(struct uart_config));
1060 
1061 	return 0;
1062 }
1063 #endif /* CONFIG_UART_USE_RUNTIME_CONFIGURE */
1064 
usbd_cdc_acm_init_wq(void)1065 static int usbd_cdc_acm_init_wq(void)
1066 {
1067 	k_work_queue_init(&cdc_acm_work_q);
1068 	k_work_queue_start(&cdc_acm_work_q, cdc_acm_stack,
1069 			   K_KERNEL_STACK_SIZEOF(cdc_acm_stack),
1070 			   CONFIG_SYSTEM_WORKQUEUE_PRIORITY, NULL);
1071 	k_thread_name_set(&cdc_acm_work_q.thread, "cdc_acm_work_q");
1072 
1073 	return 0;
1074 }
1075 
usbd_cdc_acm_preinit(const struct device * dev)1076 static int usbd_cdc_acm_preinit(const struct device *dev)
1077 {
1078 	struct cdc_acm_uart_data *const data = dev->data;
1079 
1080 	ring_buf_reset(data->tx_fifo.rb);
1081 	ring_buf_reset(data->rx_fifo.rb);
1082 
1083 	k_work_init_delayable(&data->tx_fifo_work, cdc_acm_tx_fifo_handler);
1084 	k_work_init(&data->rx_fifo_work, cdc_acm_rx_fifo_handler);
1085 	k_work_init(&data->irq_cb_work, cdc_acm_irq_cb_handler);
1086 
1087 	return 0;
1088 }
1089 
1090 static DEVICE_API(uart, cdc_acm_uart_api) = {
1091 	.irq_tx_enable = cdc_acm_irq_tx_enable,
1092 	.irq_tx_disable = cdc_acm_irq_tx_disable,
1093 	.irq_tx_ready = cdc_acm_irq_tx_ready,
1094 	.irq_rx_enable = cdc_acm_irq_rx_enable,
1095 	.irq_rx_disable = cdc_acm_irq_rx_disable,
1096 	.irq_rx_ready = cdc_acm_irq_rx_ready,
1097 	.irq_is_pending = cdc_acm_irq_is_pending,
1098 	.irq_update = cdc_acm_irq_update,
1099 	.irq_callback_set = cdc_acm_irq_callback_set,
1100 	.poll_in = cdc_acm_poll_in,
1101 	.poll_out = cdc_acm_poll_out,
1102 	.fifo_fill = cdc_acm_fifo_fill,
1103 	.fifo_read = cdc_acm_fifo_read,
1104 #ifdef CONFIG_UART_LINE_CTRL
1105 	.line_ctrl_set = cdc_acm_line_ctrl_set,
1106 	.line_ctrl_get = cdc_acm_line_ctrl_get,
1107 #endif
1108 #ifdef CONFIG_UART_USE_RUNTIME_CONFIGURE
1109 	.configure = cdc_acm_configure,
1110 	.config_get = cdc_acm_config_get,
1111 #endif
1112 };
1113 
1114 struct usbd_class_api usbd_cdc_acm_api = {
1115 	.request = usbd_cdc_acm_request,
1116 	.update = usbd_cdc_acm_update,
1117 	.enable = usbd_cdc_acm_enable,
1118 	.disable = usbd_cdc_acm_disable,
1119 	.suspended = usbd_cdc_acm_suspended,
1120 	.resumed = usbd_cdc_acm_resumed,
1121 	.control_to_host = usbd_cdc_acm_cth,
1122 	.control_to_dev = usbd_cdc_acm_ctd,
1123 	.init = usbd_cdc_acm_init,
1124 	.get_desc = usbd_cdc_acm_get_desc,
1125 };
1126 
1127 #define CDC_ACM_DEFINE_DESCRIPTOR(n)						\
1128 static struct usbd_cdc_acm_desc cdc_acm_desc_##n = {				\
1129 	.iad = {								\
1130 		.bLength = sizeof(struct usb_association_descriptor),		\
1131 		.bDescriptorType = USB_DESC_INTERFACE_ASSOC,			\
1132 		.bFirstInterface = 0,						\
1133 		.bInterfaceCount = 0x02,					\
1134 		.bFunctionClass = USB_BCC_CDC_CONTROL,				\
1135 		.bFunctionSubClass = ACM_SUBCLASS,				\
1136 		.bFunctionProtocol = 0,						\
1137 		.iFunction = 0,							\
1138 	},									\
1139 										\
1140 	.if0 = {								\
1141 		.bLength = sizeof(struct usb_if_descriptor),			\
1142 		.bDescriptorType = USB_DESC_INTERFACE,				\
1143 		.bInterfaceNumber = 0,						\
1144 		.bAlternateSetting = 0,						\
1145 		.bNumEndpoints = 1,						\
1146 		.bInterfaceClass = USB_BCC_CDC_CONTROL,				\
1147 		.bInterfaceSubClass = ACM_SUBCLASS,				\
1148 		.bInterfaceProtocol = 0,					\
1149 		.iInterface = 0,						\
1150 	},									\
1151 										\
1152 	.if0_header = {								\
1153 		.bFunctionLength = sizeof(struct cdc_header_descriptor),	\
1154 		.bDescriptorType = USB_DESC_CS_INTERFACE,			\
1155 		.bDescriptorSubtype = HEADER_FUNC_DESC,				\
1156 		.bcdCDC = sys_cpu_to_le16(USB_SRN_1_1),				\
1157 	},									\
1158 										\
1159 	.if0_cm = {								\
1160 		.bFunctionLength = sizeof(struct cdc_cm_descriptor),		\
1161 		.bDescriptorType = USB_DESC_CS_INTERFACE,			\
1162 		.bDescriptorSubtype = CALL_MANAGEMENT_FUNC_DESC,		\
1163 		.bmCapabilities = 0,						\
1164 		.bDataInterface = 1,						\
1165 	},									\
1166 										\
1167 	.if0_acm = {								\
1168 		.bFunctionLength = sizeof(struct cdc_acm_descriptor),		\
1169 		.bDescriptorType = USB_DESC_CS_INTERFACE,			\
1170 		.bDescriptorSubtype = ACM_FUNC_DESC,				\
1171 		/* See CDC PSTN Subclass Chapter 5.3.2 */			\
1172 		.bmCapabilities = BIT(1),					\
1173 	},									\
1174 										\
1175 	.if0_union = {								\
1176 		.bFunctionLength = sizeof(struct cdc_union_descriptor),		\
1177 		.bDescriptorType = USB_DESC_CS_INTERFACE,			\
1178 		.bDescriptorSubtype = UNION_FUNC_DESC,				\
1179 		.bControlInterface = 0,						\
1180 		.bSubordinateInterface0 = 1,					\
1181 	},									\
1182 										\
1183 	.if0_int_ep = {								\
1184 		.bLength = sizeof(struct usb_ep_descriptor),			\
1185 		.bDescriptorType = USB_DESC_ENDPOINT,				\
1186 		.bEndpointAddress = 0x81,					\
1187 		.bmAttributes = USB_EP_TYPE_INTERRUPT,				\
1188 		.wMaxPacketSize = sys_cpu_to_le16(CDC_ACM_DEFAULT_INT_EP_MPS),	\
1189 		.bInterval = CDC_ACM_FS_INT_EP_INTERVAL,			\
1190 	},									\
1191 										\
1192 	.if0_hs_int_ep = {							\
1193 		.bLength = sizeof(struct usb_ep_descriptor),			\
1194 		.bDescriptorType = USB_DESC_ENDPOINT,				\
1195 		.bEndpointAddress = 0x81,					\
1196 		.bmAttributes = USB_EP_TYPE_INTERRUPT,				\
1197 		.wMaxPacketSize = sys_cpu_to_le16(CDC_ACM_DEFAULT_INT_EP_MPS),	\
1198 		.bInterval = CDC_ACM_HS_INT_EP_INTERVAL,			\
1199 	},									\
1200 										\
1201 	.if1 = {								\
1202 		.bLength = sizeof(struct usb_if_descriptor),			\
1203 		.bDescriptorType = USB_DESC_INTERFACE,				\
1204 		.bInterfaceNumber = 1,						\
1205 		.bAlternateSetting = 0,						\
1206 		.bNumEndpoints = 2,						\
1207 		.bInterfaceClass = USB_BCC_CDC_DATA,				\
1208 		.bInterfaceSubClass = 0,					\
1209 		.bInterfaceProtocol = 0,					\
1210 		.iInterface = 0,						\
1211 	},									\
1212 										\
1213 	.if1_in_ep = {								\
1214 		.bLength = sizeof(struct usb_ep_descriptor),			\
1215 		.bDescriptorType = USB_DESC_ENDPOINT,				\
1216 		.bEndpointAddress = 0x82,					\
1217 		.bmAttributes = USB_EP_TYPE_BULK,				\
1218 		.wMaxPacketSize = sys_cpu_to_le16(64U),				\
1219 		.bInterval = 0,							\
1220 	},									\
1221 										\
1222 	.if1_out_ep = {								\
1223 		.bLength = sizeof(struct usb_ep_descriptor),			\
1224 		.bDescriptorType = USB_DESC_ENDPOINT,				\
1225 		.bEndpointAddress = 0x01,					\
1226 		.bmAttributes = USB_EP_TYPE_BULK,				\
1227 		.wMaxPacketSize = sys_cpu_to_le16(64U),				\
1228 		.bInterval = 0,							\
1229 	},									\
1230 										\
1231 	.if1_hs_in_ep = {							\
1232 		.bLength = sizeof(struct usb_ep_descriptor),			\
1233 		.bDescriptorType = USB_DESC_ENDPOINT,				\
1234 		.bEndpointAddress = 0x82,					\
1235 		.bmAttributes = USB_EP_TYPE_BULK,				\
1236 		.wMaxPacketSize = sys_cpu_to_le16(512U),			\
1237 		.bInterval = 0,							\
1238 	},									\
1239 										\
1240 	.if1_hs_out_ep = {							\
1241 		.bLength = sizeof(struct usb_ep_descriptor),			\
1242 		.bDescriptorType = USB_DESC_ENDPOINT,				\
1243 		.bEndpointAddress = 0x01,					\
1244 		.bmAttributes = USB_EP_TYPE_BULK,				\
1245 		.wMaxPacketSize = sys_cpu_to_le16(512U),			\
1246 		.bInterval = 0,							\
1247 	},									\
1248 										\
1249 	.nil_desc = {								\
1250 		.bLength = 0,							\
1251 		.bDescriptorType = 0,						\
1252 	},									\
1253 };										\
1254 										\
1255 const static struct usb_desc_header *cdc_acm_fs_desc_##n[] = {			\
1256 	(struct usb_desc_header *) &cdc_acm_desc_##n.iad,			\
1257 	(struct usb_desc_header *) &cdc_acm_desc_##n.if0,			\
1258 	(struct usb_desc_header *) &cdc_acm_desc_##n.if0_header,		\
1259 	(struct usb_desc_header *) &cdc_acm_desc_##n.if0_cm,			\
1260 	(struct usb_desc_header *) &cdc_acm_desc_##n.if0_acm,			\
1261 	(struct usb_desc_header *) &cdc_acm_desc_##n.if0_union,			\
1262 	(struct usb_desc_header *) &cdc_acm_desc_##n.if0_int_ep,		\
1263 	(struct usb_desc_header *) &cdc_acm_desc_##n.if1,			\
1264 	(struct usb_desc_header *) &cdc_acm_desc_##n.if1_in_ep,			\
1265 	(struct usb_desc_header *) &cdc_acm_desc_##n.if1_out_ep,		\
1266 	(struct usb_desc_header *) &cdc_acm_desc_##n.nil_desc,			\
1267 };										\
1268 										\
1269 const static struct usb_desc_header *cdc_acm_hs_desc_##n[] = {			\
1270 	(struct usb_desc_header *) &cdc_acm_desc_##n.iad,			\
1271 	(struct usb_desc_header *) &cdc_acm_desc_##n.if0,			\
1272 	(struct usb_desc_header *) &cdc_acm_desc_##n.if0_header,		\
1273 	(struct usb_desc_header *) &cdc_acm_desc_##n.if0_cm,			\
1274 	(struct usb_desc_header *) &cdc_acm_desc_##n.if0_acm,			\
1275 	(struct usb_desc_header *) &cdc_acm_desc_##n.if0_union,			\
1276 	(struct usb_desc_header *) &cdc_acm_desc_##n.if0_hs_int_ep,		\
1277 	(struct usb_desc_header *) &cdc_acm_desc_##n.if1,			\
1278 	(struct usb_desc_header *) &cdc_acm_desc_##n.if1_hs_in_ep,		\
1279 	(struct usb_desc_header *) &cdc_acm_desc_##n.if1_hs_out_ep,		\
1280 	(struct usb_desc_header *) &cdc_acm_desc_##n.nil_desc,			\
1281 }
1282 
1283 #define USBD_CDC_ACM_DT_DEVICE_DEFINE(n)					\
1284 	BUILD_ASSERT(DT_INST_ON_BUS(n, usb),					\
1285 		     "node " DT_NODE_PATH(DT_DRV_INST(n))			\
1286 		     " is not assigned to a USB device controller");		\
1287 										\
1288 	CDC_ACM_DEFINE_DESCRIPTOR(n);						\
1289 										\
1290 	USBD_DEFINE_CLASS(cdc_acm_##n,						\
1291 			  &usbd_cdc_acm_api,					\
1292 			  (void *)DEVICE_DT_GET(DT_DRV_INST(n)), NULL);		\
1293 										\
1294 	IF_ENABLED(DT_INST_NODE_HAS_PROP(n, label), (				\
1295 	USBD_DESC_STRING_DEFINE(cdc_acm_if_desc_data_##n,			\
1296 				DT_INST_PROP(n, label),				\
1297 				USBD_DUT_STRING_INTERFACE);			\
1298 	))									\
1299 										\
1300 	RING_BUF_DECLARE(cdc_acm_rb_rx_##n, DT_INST_PROP(n, rx_fifo_size));	\
1301 	RING_BUF_DECLARE(cdc_acm_rb_tx_##n, DT_INST_PROP(n, tx_fifo_size));	\
1302 										\
1303 	static const struct cdc_acm_uart_config uart_config_##n = {		\
1304 		.c_data = &cdc_acm_##n,						\
1305 		IF_ENABLED(DT_INST_NODE_HAS_PROP(n, label), (			\
1306 		.if_desc_data = &cdc_acm_if_desc_data_##n,			\
1307 		))								\
1308 		.desc = &cdc_acm_desc_##n,					\
1309 		.fs_desc = cdc_acm_fs_desc_##n,					\
1310 		.hs_desc = cdc_acm_hs_desc_##n,					\
1311 	};									\
1312 										\
1313 	static struct cdc_acm_uart_data uart_data_##n = {			\
1314 		.dev = DEVICE_DT_GET(DT_DRV_INST(n)),				\
1315 		.line_coding = CDC_ACM_DEFAULT_LINECODING,			\
1316 		.rx_fifo.rb = &cdc_acm_rb_rx_##n,				\
1317 		.tx_fifo.rb = &cdc_acm_rb_tx_##n,				\
1318 		.flow_ctrl = DT_INST_PROP(n, hw_flow_control),			\
1319 		.notif_sem = Z_SEM_INITIALIZER(uart_data_##n.notif_sem, 0, 1),	\
1320 	};									\
1321 										\
1322 	DEVICE_DT_INST_DEFINE(n, usbd_cdc_acm_preinit, NULL,			\
1323 		&uart_data_##n, &uart_config_##n,				\
1324 		PRE_KERNEL_1, CONFIG_SERIAL_INIT_PRIORITY,			\
1325 		&cdc_acm_uart_api);
1326 
1327 DT_INST_FOREACH_STATUS_OKAY(USBD_CDC_ACM_DT_DEVICE_DEFINE);
1328 
1329 SYS_INIT(usbd_cdc_acm_init_wq, POST_KERNEL, CONFIG_KERNEL_INIT_PRIORITY_DEFAULT);
1330