Home
last modified time | relevance | path

Searched refs:ib (Results 1 – 25 of 112) sorted by relevance

12345

/Linux-v4.19/drivers/gpu/drm/radeon/
Dradeon_ib.c56 struct radeon_ib *ib, struct radeon_vm *vm, in radeon_ib_get() argument
61 r = radeon_sa_bo_new(rdev, &rdev->ring_tmp_bo, &ib->sa_bo, size, 256); in radeon_ib_get()
67 radeon_sync_create(&ib->sync); in radeon_ib_get()
69 ib->ring = ring; in radeon_ib_get()
70 ib->fence = NULL; in radeon_ib_get()
71 ib->ptr = radeon_sa_bo_cpu_addr(ib->sa_bo); in radeon_ib_get()
72 ib->vm = vm; in radeon_ib_get()
77 ib->gpu_addr = ib->sa_bo->soffset + RADEON_VA_IB_OFFSET; in radeon_ib_get()
79 ib->gpu_addr = radeon_sa_bo_gpu_addr(ib->sa_bo); in radeon_ib_get()
81 ib->is_const_ib = false; in radeon_ib_get()
[all …]
Dsi_dma.c70 struct radeon_ib *ib, in si_dma_vm_copy_pages() argument
79 ib->ptr[ib->length_dw++] = DMA_PACKET(DMA_PACKET_COPY, in si_dma_vm_copy_pages()
81 ib->ptr[ib->length_dw++] = lower_32_bits(pe); in si_dma_vm_copy_pages()
82 ib->ptr[ib->length_dw++] = lower_32_bits(src); in si_dma_vm_copy_pages()
83 ib->ptr[ib->length_dw++] = upper_32_bits(pe) & 0xff; in si_dma_vm_copy_pages()
84 ib->ptr[ib->length_dw++] = upper_32_bits(src) & 0xff; in si_dma_vm_copy_pages()
106 struct radeon_ib *ib, in si_dma_vm_write_pages() argument
120 ib->ptr[ib->length_dw++] = DMA_PACKET(DMA_PACKET_WRITE, 0, 0, 0, ndw); in si_dma_vm_write_pages()
121 ib->ptr[ib->length_dw++] = pe; in si_dma_vm_write_pages()
122 ib->ptr[ib->length_dw++] = upper_32_bits(pe) & 0xff; in si_dma_vm_write_pages()
[all …]
Dni_dma.c123 struct radeon_ib *ib) in cayman_dma_ring_ib_execute() argument
125 struct radeon_ring *ring = &rdev->ring[ib->ring]; in cayman_dma_ring_ib_execute()
126 unsigned vm_id = ib->vm ? ib->vm->ids[ib->ring].id : 0; in cayman_dma_ring_ib_execute()
145 radeon_ring_write(ring, (ib->gpu_addr & 0xFFFFFFE0)); in cayman_dma_ring_ib_execute()
146 radeon_ring_write(ring, (ib->length_dw << 12) | (upper_32_bits(ib->gpu_addr) & 0xFF)); in cayman_dma_ring_ib_execute()
316 struct radeon_ib *ib, in cayman_dma_vm_copy_pages() argument
327 ib->ptr[ib->length_dw++] = DMA_PACKET(DMA_PACKET_COPY, in cayman_dma_vm_copy_pages()
329 ib->ptr[ib->length_dw++] = lower_32_bits(pe); in cayman_dma_vm_copy_pages()
330 ib->ptr[ib->length_dw++] = lower_32_bits(src); in cayman_dma_vm_copy_pages()
331 ib->ptr[ib->length_dw++] = upper_32_bits(pe) & 0xff; in cayman_dma_vm_copy_pages()
[all …]
Dradeon_vce.c350 struct radeon_ib ib; in radeon_vce_get_create_msg() local
354 r = radeon_ib_get(rdev, ring, &ib, NULL, ib_size_dw * 4); in radeon_vce_get_create_msg()
360 dummy = ib.gpu_addr + 1024; in radeon_vce_get_create_msg()
363 ib.length_dw = 0; in radeon_vce_get_create_msg()
364 ib.ptr[ib.length_dw++] = cpu_to_le32(0x0000000c); /* len */ in radeon_vce_get_create_msg()
365 ib.ptr[ib.length_dw++] = cpu_to_le32(0x00000001); /* session cmd */ in radeon_vce_get_create_msg()
366 ib.ptr[ib.length_dw++] = cpu_to_le32(handle); in radeon_vce_get_create_msg()
368 ib.ptr[ib.length_dw++] = cpu_to_le32(0x00000030); /* len */ in radeon_vce_get_create_msg()
369 ib.ptr[ib.length_dw++] = cpu_to_le32(0x01000001); /* create cmd */ in radeon_vce_get_create_msg()
370 ib.ptr[ib.length_dw++] = cpu_to_le32(0x00000000); in radeon_vce_get_create_msg()
[all …]
Dcik_sdma.c134 struct radeon_ib *ib) in cik_sdma_ring_ib_execute() argument
136 struct radeon_ring *ring = &rdev->ring[ib->ring]; in cik_sdma_ring_ib_execute()
137 u32 extra_bits = (ib->vm ? ib->vm->ids[ib->ring].id : 0) & 0xf; in cik_sdma_ring_ib_execute()
155 radeon_ring_write(ring, ib->gpu_addr & 0xffffffe0); /* base must be 32 byte aligned */ in cik_sdma_ring_ib_execute()
156 radeon_ring_write(ring, upper_32_bits(ib->gpu_addr)); in cik_sdma_ring_ib_execute()
157 radeon_ring_write(ring, ib->length_dw); in cik_sdma_ring_ib_execute()
704 struct radeon_ib ib; in cik_sdma_ib_test() local
721 r = radeon_ib_get(rdev, ring->idx, &ib, NULL, 256); in cik_sdma_ib_test()
727 ib.ptr[0] = SDMA_PACKET(SDMA_OPCODE_WRITE, SDMA_WRITE_SUB_OPCODE_LINEAR, 0); in cik_sdma_ib_test()
728 ib.ptr[1] = lower_32_bits(gpu_addr); in cik_sdma_ib_test()
[all …]
Dradeon_vm.c360 struct radeon_ib *ib, in radeon_vm_set_pages() argument
369 radeon_asic_vm_copy_pages(rdev, ib, pe, src, count); in radeon_vm_set_pages()
372 radeon_asic_vm_write_pages(rdev, ib, pe, addr, in radeon_vm_set_pages()
376 radeon_asic_vm_set_pages(rdev, ib, pe, addr, in radeon_vm_set_pages()
391 struct radeon_ib ib; in radeon_vm_clear_bo() local
407 r = radeon_ib_get(rdev, R600_RING_TYPE_DMA_INDEX, &ib, NULL, 256); in radeon_vm_clear_bo()
411 ib.length_dw = 0; in radeon_vm_clear_bo()
413 radeon_vm_set_pages(rdev, &ib, addr, 0, entries, 0, 0); in radeon_vm_clear_bo()
414 radeon_asic_vm_pad_ib(rdev, &ib); in radeon_vm_clear_bo()
415 WARN_ON(ib.length_dw > 64); in radeon_vm_clear_bo()
[all …]
Devergreen_cs.c451 uint32_t *ib = p->ib.ptr; in evergreen_cs_track_validate_cb() local
473 ib[track->cb_color_slice_idx[id]] = slice; in evergreen_cs_track_validate_cb()
1098 u32 tmp, *ib; in evergreen_cs_handle_reg() local
1101 ib = p->ib.ptr; in evergreen_cs_handle_reg()
1149 ib[idx] += (u32)((reloc->gpu_offset >> 8) & 0xffffffff); in evergreen_cs_handle_reg()
1178 ib[idx] &= ~Z_ARRAY_MODE(0xf); in evergreen_cs_handle_reg()
1180 ib[idx] |= Z_ARRAY_MODE(evergreen_cs_get_aray_mode(reloc->tiling_flags)); in evergreen_cs_handle_reg()
1188 ib[idx] |= DB_NUM_BANKS(evergreen_cs_get_num_banks(track->nbanks)); in evergreen_cs_handle_reg()
1189 ib[idx] |= DB_TILE_SPLIT(tile_split) | in evergreen_cs_handle_reg()
1221 ib[idx] += (u32)((reloc->gpu_offset >> 8) & 0xffffffff); in evergreen_cs_handle_reg()
[all …]
Dr600_cs.c357 volatile u32 *ib = p->ib.ptr; in r600_cs_track_validate_cb() local
468 ib[track->cb_color_size_idx[i]] = tmp; in r600_cs_track_validate_cb()
527 volatile u32 *ib = p->ib.ptr; in r600_cs_track_validate_db() local
565 ib[track->db_depth_size_idx] = S_028000_SLICE_TILE_MAX(tmp - 1) | (track->db_depth_size & 0x3FF); in r600_cs_track_validate_db()
835 volatile uint32_t *ib; in r600_cs_common_vline_parse() local
837 ib = p->ib.ptr; in r600_cs_common_vline_parse()
900 ib[h_idx + 2] = PACKET2(0); in r600_cs_common_vline_parse()
901 ib[h_idx + 3] = PACKET2(0); in r600_cs_common_vline_parse()
902 ib[h_idx + 4] = PACKET2(0); in r600_cs_common_vline_parse()
903 ib[h_idx + 5] = PACKET2(0); in r600_cs_common_vline_parse()
[all …]
Dr600_dma.c339 struct radeon_ib ib; in r600_dma_ib_test() local
353 r = radeon_ib_get(rdev, ring->idx, &ib, NULL, 256); in r600_dma_ib_test()
359 ib.ptr[0] = DMA_PACKET(DMA_PACKET_WRITE, 0, 0, 1); in r600_dma_ib_test()
360 ib.ptr[1] = lower_32_bits(gpu_addr); in r600_dma_ib_test()
361 ib.ptr[2] = upper_32_bits(gpu_addr) & 0xff; in r600_dma_ib_test()
362 ib.ptr[3] = 0xDEADBEEF; in r600_dma_ib_test()
363 ib.length_dw = 4; in r600_dma_ib_test()
365 r = radeon_ib_schedule(rdev, &ib, NULL, false); in r600_dma_ib_test()
367 radeon_ib_free(rdev, &ib); in r600_dma_ib_test()
371 r = radeon_fence_wait_timeout(ib.fence, false, usecs_to_jiffies( in r600_dma_ib_test()
[all …]
/Linux-v4.19/arch/s390/include/asm/
Didals.h120 struct idal_buffer *ib; in idal_buffer_alloc() local
125 ib = kmalloc(sizeof(struct idal_buffer) + nr_ptrs*sizeof(void *), in idal_buffer_alloc()
127 if (ib == NULL) in idal_buffer_alloc()
129 ib->size = size; in idal_buffer_alloc()
130 ib->page_order = page_order; in idal_buffer_alloc()
133 ib->data[i] = ib->data[i-1] + IDA_BLOCK_SIZE; in idal_buffer_alloc()
136 ib->data[i] = (void *) in idal_buffer_alloc()
138 if (ib->data[i] != NULL) in idal_buffer_alloc()
143 free_pages((unsigned long) ib->data[i], in idal_buffer_alloc()
144 ib->page_order); in idal_buffer_alloc()
[all …]
/Linux-v4.19/drivers/gpu/drm/amd/amdgpu/
Damdgpu_vcn.c292 struct amdgpu_ib *ib; in amdgpu_vcn_dec_send_msg() local
300 ib = &job->ibs[0]; in amdgpu_vcn_dec_send_msg()
302 ib->ptr[0] = PACKET0(SOC15_REG_OFFSET(UVD, 0, mmUVD_GPCOM_VCPU_DATA0), 0); in amdgpu_vcn_dec_send_msg()
303 ib->ptr[1] = addr; in amdgpu_vcn_dec_send_msg()
304 ib->ptr[2] = PACKET0(SOC15_REG_OFFSET(UVD, 0, mmUVD_GPCOM_VCPU_DATA1), 0); in amdgpu_vcn_dec_send_msg()
305 ib->ptr[3] = addr >> 32; in amdgpu_vcn_dec_send_msg()
306 ib->ptr[4] = PACKET0(SOC15_REG_OFFSET(UVD, 0, mmUVD_GPCOM_VCPU_CMD), 0); in amdgpu_vcn_dec_send_msg()
307 ib->ptr[5] = 0; in amdgpu_vcn_dec_send_msg()
309 ib->ptr[i] = PACKET0(SOC15_REG_OFFSET(UVD, 0, mmUVD_NO_OP), 0); in amdgpu_vcn_dec_send_msg()
310 ib->ptr[i+1] = 0; in amdgpu_vcn_dec_send_msg()
[all …]
Damdgpu_vce.c436 struct amdgpu_ib *ib; in amdgpu_vce_get_create_msg() local
445 ib = &job->ibs[0]; in amdgpu_vce_get_create_msg()
447 dummy = ib->gpu_addr + 1024; in amdgpu_vce_get_create_msg()
450 ib->length_dw = 0; in amdgpu_vce_get_create_msg()
451 ib->ptr[ib->length_dw++] = 0x0000000c; /* len */ in amdgpu_vce_get_create_msg()
452 ib->ptr[ib->length_dw++] = 0x00000001; /* session cmd */ in amdgpu_vce_get_create_msg()
453 ib->ptr[ib->length_dw++] = handle; in amdgpu_vce_get_create_msg()
456 ib->ptr[ib->length_dw++] = 0x00000040; /* len */ in amdgpu_vce_get_create_msg()
458 ib->ptr[ib->length_dw++] = 0x00000030; /* len */ in amdgpu_vce_get_create_msg()
459 ib->ptr[ib->length_dw++] = 0x01000001; /* create cmd */ in amdgpu_vce_get_create_msg()
[all …]
Dsi_dma.c64 struct amdgpu_ib *ib, in si_dma_ring_emit_ib() argument
73 amdgpu_ring_write(ring, (ib->gpu_addr & 0xFFFFFFE0)); in si_dma_ring_emit_ib()
74 amdgpu_ring_write(ring, (ib->length_dw << 12) | (upper_32_bits(ib->gpu_addr) & 0xFF)); in si_dma_ring_emit_ib()
264 struct amdgpu_ib ib; in si_dma_ring_test_ib() local
280 memset(&ib, 0, sizeof(ib)); in si_dma_ring_test_ib()
281 r = amdgpu_ib_get(adev, NULL, 256, &ib); in si_dma_ring_test_ib()
287 ib.ptr[0] = DMA_PACKET(DMA_PACKET_WRITE, 0, 0, 0, 1); in si_dma_ring_test_ib()
288 ib.ptr[1] = lower_32_bits(gpu_addr); in si_dma_ring_test_ib()
289 ib.ptr[2] = upper_32_bits(gpu_addr) & 0xff; in si_dma_ring_test_ib()
290 ib.ptr[3] = 0xDEADBEEF; in si_dma_ring_test_ib()
[all …]
Dsdma_v2_4.c248 struct amdgpu_ib *ib, in sdma_v2_4_ring_emit_ib() argument
257 amdgpu_ring_write(ring, lower_32_bits(ib->gpu_addr) & 0xffffffe0); in sdma_v2_4_ring_emit_ib()
258 amdgpu_ring_write(ring, upper_32_bits(ib->gpu_addr)); in sdma_v2_4_ring_emit_ib()
259 amdgpu_ring_write(ring, ib->length_dw); in sdma_v2_4_ring_emit_ib()
649 struct amdgpu_ib ib; in sdma_v2_4_ring_test_ib() local
665 memset(&ib, 0, sizeof(ib)); in sdma_v2_4_ring_test_ib()
666 r = amdgpu_ib_get(adev, NULL, 256, &ib); in sdma_v2_4_ring_test_ib()
672 ib.ptr[0] = SDMA_PKT_HEADER_OP(SDMA_OP_WRITE) | in sdma_v2_4_ring_test_ib()
674 ib.ptr[1] = lower_32_bits(gpu_addr); in sdma_v2_4_ring_test_ib()
675 ib.ptr[2] = upper_32_bits(gpu_addr); in sdma_v2_4_ring_test_ib()
[all …]
Dcik_sdma.c221 struct amdgpu_ib *ib, in cik_sdma_ring_emit_ib() argument
230 amdgpu_ring_write(ring, ib->gpu_addr & 0xffffffe0); /* base must be 32 byte aligned */ in cik_sdma_ring_emit_ib()
231 amdgpu_ring_write(ring, upper_32_bits(ib->gpu_addr) & 0xffffffff); in cik_sdma_ring_emit_ib()
232 amdgpu_ring_write(ring, ib->length_dw); in cik_sdma_ring_emit_ib()
673 struct amdgpu_ib ib; in cik_sdma_ring_test_ib() local
689 memset(&ib, 0, sizeof(ib)); in cik_sdma_ring_test_ib()
690 r = amdgpu_ib_get(adev, NULL, 256, &ib); in cik_sdma_ring_test_ib()
696 ib.ptr[0] = SDMA_PACKET(SDMA_OPCODE_WRITE, in cik_sdma_ring_test_ib()
698 ib.ptr[1] = lower_32_bits(gpu_addr); in cik_sdma_ring_test_ib()
699 ib.ptr[2] = upper_32_bits(gpu_addr); in cik_sdma_ring_test_ib()
[all …]
Dsdma_v3_0.c423 struct amdgpu_ib *ib, in sdma_v3_0_ring_emit_ib() argument
432 amdgpu_ring_write(ring, lower_32_bits(ib->gpu_addr) & 0xffffffe0); in sdma_v3_0_ring_emit_ib()
433 amdgpu_ring_write(ring, upper_32_bits(ib->gpu_addr)); in sdma_v3_0_ring_emit_ib()
434 amdgpu_ring_write(ring, ib->length_dw); in sdma_v3_0_ring_emit_ib()
922 struct amdgpu_ib ib; in sdma_v3_0_ring_test_ib() local
938 memset(&ib, 0, sizeof(ib)); in sdma_v3_0_ring_test_ib()
939 r = amdgpu_ib_get(adev, NULL, 256, &ib); in sdma_v3_0_ring_test_ib()
945 ib.ptr[0] = SDMA_PKT_HEADER_OP(SDMA_OP_WRITE) | in sdma_v3_0_ring_test_ib()
947 ib.ptr[1] = lower_32_bits(gpu_addr); in sdma_v3_0_ring_test_ib()
948 ib.ptr[2] = upper_32_bits(gpu_addr); in sdma_v3_0_ring_test_ib()
[all …]
Dsdma_v4_0.c381 struct amdgpu_ib *ib, in sdma_v4_0_ring_emit_ib() argument
390 amdgpu_ring_write(ring, lower_32_bits(ib->gpu_addr) & 0xffffffe0); in sdma_v4_0_ring_emit_ib()
391 amdgpu_ring_write(ring, upper_32_bits(ib->gpu_addr)); in sdma_v4_0_ring_emit_ib()
392 amdgpu_ring_write(ring, ib->length_dw); in sdma_v4_0_ring_emit_ib()
984 struct amdgpu_ib ib; in sdma_v4_0_ring_test_ib() local
1000 memset(&ib, 0, sizeof(ib)); in sdma_v4_0_ring_test_ib()
1001 r = amdgpu_ib_get(adev, NULL, 256, &ib); in sdma_v4_0_ring_test_ib()
1007 ib.ptr[0] = SDMA_PKT_HEADER_OP(SDMA_OP_WRITE) | in sdma_v4_0_ring_test_ib()
1009 ib.ptr[1] = lower_32_bits(gpu_addr); in sdma_v4_0_ring_test_ib()
1010 ib.ptr[2] = upper_32_bits(gpu_addr); in sdma_v4_0_ring_test_ib()
[all …]
Duvd_v6_0.c219 struct amdgpu_ib *ib; in uvd_v6_0_enc_get_create_msg() local
228 ib = &job->ibs[0]; in uvd_v6_0_enc_get_create_msg()
229 dummy = ib->gpu_addr + 1024; in uvd_v6_0_enc_get_create_msg()
231 ib->length_dw = 0; in uvd_v6_0_enc_get_create_msg()
232 ib->ptr[ib->length_dw++] = 0x00000018; in uvd_v6_0_enc_get_create_msg()
233 ib->ptr[ib->length_dw++] = 0x00000001; /* session info */ in uvd_v6_0_enc_get_create_msg()
234 ib->ptr[ib->length_dw++] = handle; in uvd_v6_0_enc_get_create_msg()
235 ib->ptr[ib->length_dw++] = 0x00010000; in uvd_v6_0_enc_get_create_msg()
236 ib->ptr[ib->length_dw++] = upper_32_bits(dummy); in uvd_v6_0_enc_get_create_msg()
237 ib->ptr[ib->length_dw++] = dummy; in uvd_v6_0_enc_get_create_msg()
[all …]
Duvd_v7_0.c227 struct amdgpu_ib *ib; in uvd_v7_0_enc_get_create_msg() local
236 ib = &job->ibs[0]; in uvd_v7_0_enc_get_create_msg()
237 dummy = ib->gpu_addr + 1024; in uvd_v7_0_enc_get_create_msg()
239 ib->length_dw = 0; in uvd_v7_0_enc_get_create_msg()
240 ib->ptr[ib->length_dw++] = 0x00000018; in uvd_v7_0_enc_get_create_msg()
241 ib->ptr[ib->length_dw++] = 0x00000001; /* session info */ in uvd_v7_0_enc_get_create_msg()
242 ib->ptr[ib->length_dw++] = handle; in uvd_v7_0_enc_get_create_msg()
243 ib->ptr[ib->length_dw++] = 0x00000000; in uvd_v7_0_enc_get_create_msg()
244 ib->ptr[ib->length_dw++] = upper_32_bits(dummy); in uvd_v7_0_enc_get_create_msg()
245 ib->ptr[ib->length_dw++] = dummy; in uvd_v7_0_enc_get_create_msg()
[all …]
/Linux-v4.19/drivers/net/ethernet/amd/
D7990.c99 t, ib->brx_ring[t].rmd1_hadr, ib->brx_ring[t].rmd0, \
100 ib->brx_ring[t].length, \
101 ib->brx_ring[t].mblength, ib->brx_ring[t].rmd1_bits); \
105 t, ib->btx_ring[t].tmd1_hadr, ib->btx_ring[t].tmd0, \
106 ib->btx_ring[t].length, \
107 ib->btx_ring[t].misc, ib->btx_ring[t].tmd1_bits); \
139 volatile struct lance_init_block *ib = lp->init_block; in lance_init_ring() local
149 ib->mode = LE_MO_PROM; /* normal, enable Tx & Rx */ in lance_init_ring()
162 ib->phys_addr[0] = dev->dev_addr[1]; in lance_init_ring()
163 ib->phys_addr[1] = dev->dev_addr[0]; in lance_init_ring()
[all …]
Dsunlance.c323 struct lance_init_block *ib = lp->init_block_mem; in lance_init_ring_dvma() local
336 ib->phys_addr [0] = dev->dev_addr [1]; in lance_init_ring_dvma()
337 ib->phys_addr [1] = dev->dev_addr [0]; in lance_init_ring_dvma()
338 ib->phys_addr [2] = dev->dev_addr [3]; in lance_init_ring_dvma()
339 ib->phys_addr [3] = dev->dev_addr [2]; in lance_init_ring_dvma()
340 ib->phys_addr [4] = dev->dev_addr [5]; in lance_init_ring_dvma()
341 ib->phys_addr [5] = dev->dev_addr [4]; in lance_init_ring_dvma()
346 ib->btx_ring [i].tmd0 = leptr; in lance_init_ring_dvma()
347 ib->btx_ring [i].tmd1_hadr = leptr >> 16; in lance_init_ring_dvma()
348 ib->btx_ring [i].tmd1_bits = 0; in lance_init_ring_dvma()
[all …]
Da2065.c153 volatile struct lance_init_block *ib = lp->init_block; in lance_init_ring() local
164 ib->mode = 0; in lance_init_ring()
169 ib->phys_addr[0] = dev->dev_addr[1]; in lance_init_ring()
170 ib->phys_addr[1] = dev->dev_addr[0]; in lance_init_ring()
171 ib->phys_addr[2] = dev->dev_addr[3]; in lance_init_ring()
172 ib->phys_addr[3] = dev->dev_addr[2]; in lance_init_ring()
173 ib->phys_addr[4] = dev->dev_addr[5]; in lance_init_ring()
174 ib->phys_addr[5] = dev->dev_addr[4]; in lance_init_ring()
180 ib->btx_ring[i].tmd0 = leptr; in lance_init_ring()
181 ib->btx_ring[i].tmd1_hadr = leptr >> 16; in lance_init_ring()
[all …]
Ddeclance.c234 #define lib_ptr(ib, rt, type) \ argument
235 ((volatile u16 *)((u8 *)(ib) + lib_off(rt, type)))
452 volatile u16 *ib = (volatile u16 *)dev->mem_start; in lance_init_ring() local
464 *lib_ptr(ib, phys_addr[0], lp->type) = (dev->dev_addr[1] << 8) | in lance_init_ring()
466 *lib_ptr(ib, phys_addr[1], lp->type) = (dev->dev_addr[3] << 8) | in lance_init_ring()
468 *lib_ptr(ib, phys_addr[2], lp->type) = (dev->dev_addr[5] << 8) | in lance_init_ring()
474 *lib_ptr(ib, rx_len, lp->type) = (LANCE_LOG_RX_BUFFERS << 13) | in lance_init_ring()
476 *lib_ptr(ib, rx_ptr, lp->type) = leptr; in lance_init_ring()
483 *lib_ptr(ib, tx_len, lp->type) = (LANCE_LOG_TX_BUFFERS << 13) | in lance_init_ring()
485 *lib_ptr(ib, tx_ptr, lp->type) = leptr; in lance_init_ring()
[all …]
/Linux-v4.19/include/rdma/
Dib_sa.h211 struct sa_path_rec_ib ib; member
244 static inline void path_conv_opa_to_ib(struct sa_path_rec *ib, in path_conv_opa_to_ib() argument
252 ib->dgid.global.interface_id in path_conv_opa_to_ib()
254 ib->dgid.global.subnet_prefix in path_conv_opa_to_ib()
256 ib->sgid.global.interface_id in path_conv_opa_to_ib()
258 ib->dgid.global.subnet_prefix in path_conv_opa_to_ib()
260 ib->ib.dlid = 0; in path_conv_opa_to_ib()
262 ib->ib.slid = 0; in path_conv_opa_to_ib()
264 ib->ib.dlid = htons(ntohl(opa->opa.dlid)); in path_conv_opa_to_ib()
265 ib->ib.slid = htons(ntohl(opa->opa.slid)); in path_conv_opa_to_ib()
[all …]
/Linux-v4.19/drivers/infiniband/hw/mlx4/
Dah.c49 ah->av.ib.port_pd = cpu_to_be32(to_mpd(pd)->pdn | in create_ib_ah()
51 ah->av.ib.g_slid = rdma_ah_get_path_bits(ah_attr); in create_ib_ah()
52 ah->av.ib.sl_tclass_flowlabel = in create_ib_ah()
57 ah->av.ib.g_slid |= 0x80; in create_ib_ah()
58 ah->av.ib.gid_index = grh->sgid_index; in create_ib_ah()
59 ah->av.ib.hop_limit = grh->hop_limit; in create_ib_ah()
60 ah->av.ib.sl_tclass_flowlabel |= in create_ib_ah()
63 memcpy(ah->av.ib.dgid, grh->dgid.raw, 16); in create_ib_ah()
66 ah->av.ib.dlid = cpu_to_be16(rdma_ah_get_dlid(ah_attr)); in create_ib_ah()
74 ah->av.ib.stat_rate = static_rate; in create_ib_ah()
[all …]

12345