Lines Matching refs:reloc

970 	struct radeon_bo_list *reloc;  in r600_cs_check_reg()  local
1015 r = radeon_cs_packet_next_reloc(p, &reloc, 0); in r600_cs_check_reg()
1021 ib[idx] += (u32)((reloc->gpu_offset >> 8) & 0xffffffff); in r600_cs_check_reg()
1033 r = radeon_cs_packet_next_reloc(p, &reloc, r600_nomm); in r600_cs_check_reg()
1042 if (reloc->tiling_flags & RADEON_TILING_MACRO) { in r600_cs_check_reg()
1075 r = radeon_cs_packet_next_reloc(p, &reloc, r600_nomm); in r600_cs_check_reg()
1083 ib[idx] += (u32)((reloc->gpu_offset >> 8) & 0xffffffff); in r600_cs_check_reg()
1084 track->vgt_strmout_bo[tmp] = reloc->robj; in r600_cs_check_reg()
1085 track->vgt_strmout_bo_mc[tmp] = reloc->gpu_offset; in r600_cs_check_reg()
1098 r = radeon_cs_packet_next_reloc(p, &reloc, r600_nomm); in r600_cs_check_reg()
1104 ib[idx] += (u32)((reloc->gpu_offset >> 8) & 0xffffffff); in r600_cs_check_reg()
1134 r = radeon_cs_packet_next_reloc(p, &reloc, r600_nomm); in r600_cs_check_reg()
1141 if (reloc->tiling_flags & RADEON_TILING_MACRO) { in r600_cs_check_reg()
1144 } else if (reloc->tiling_flags & RADEON_TILING_MICRO) { in r600_cs_check_reg()
1206 r = radeon_cs_packet_next_reloc(p, &reloc, r600_nomm); in r600_cs_check_reg()
1211 track->cb_color_frag_bo[tmp] = reloc->robj; in r600_cs_check_reg()
1213 ib[idx] += (u32)((reloc->gpu_offset >> 8) & 0xffffffff); in r600_cs_check_reg()
1237 r = radeon_cs_packet_next_reloc(p, &reloc, r600_nomm); in r600_cs_check_reg()
1242 track->cb_color_tile_bo[tmp] = reloc->robj; in r600_cs_check_reg()
1244 ib[idx] += (u32)((reloc->gpu_offset >> 8) & 0xffffffff); in r600_cs_check_reg()
1272 r = radeon_cs_packet_next_reloc(p, &reloc, r600_nomm); in r600_cs_check_reg()
1280 ib[idx] += (u32)((reloc->gpu_offset >> 8) & 0xffffffff); in r600_cs_check_reg()
1282 track->cb_color_bo[tmp] = reloc->robj; in r600_cs_check_reg()
1283 track->cb_color_bo_mc[tmp] = reloc->gpu_offset; in r600_cs_check_reg()
1287 r = radeon_cs_packet_next_reloc(p, &reloc, r600_nomm); in r600_cs_check_reg()
1294 ib[idx] += (u32)((reloc->gpu_offset >> 8) & 0xffffffff); in r600_cs_check_reg()
1295 track->db_bo = reloc->robj; in r600_cs_check_reg()
1296 track->db_bo_mc = reloc->gpu_offset; in r600_cs_check_reg()
1300 r = radeon_cs_packet_next_reloc(p, &reloc, r600_nomm); in r600_cs_check_reg()
1307 ib[idx] += (u32)((reloc->gpu_offset >> 8) & 0xffffffff); in r600_cs_check_reg()
1308 track->htile_bo = reloc->robj; in r600_cs_check_reg()
1370 r = radeon_cs_packet_next_reloc(p, &reloc, r600_nomm); in r600_cs_check_reg()
1376 ib[idx] += (u32)((reloc->gpu_offset >> 8) & 0xffffffff); in r600_cs_check_reg()
1379 r = radeon_cs_packet_next_reloc(p, &reloc, r600_nomm); in r600_cs_check_reg()
1385 ib[idx] += (u32)((reloc->gpu_offset >> 8) & 0xffffffff); in r600_cs_check_reg()
1627 struct radeon_bo_list *reloc; in r600_packet3_check() local
1665 r = radeon_cs_packet_next_reloc(p, &reloc, r600_nomm); in r600_packet3_check()
1671 offset = reloc->gpu_offset + in r600_packet3_check()
1706 r = radeon_cs_packet_next_reloc(p, &reloc, r600_nomm); in r600_packet3_check()
1712 offset = reloc->gpu_offset + in r600_packet3_check()
1758 r = radeon_cs_packet_next_reloc(p, &reloc, r600_nomm); in r600_packet3_check()
1764 offset = reloc->gpu_offset + in r600_packet3_check()
1795 r = radeon_cs_packet_next_reloc(p, &reloc, r600_nomm); in r600_packet3_check()
1804 offset = reloc->gpu_offset + tmp; in r600_packet3_check()
1806 if ((tmp + size) > radeon_bo_size(reloc->robj)) { in r600_packet3_check()
1808 tmp + size, radeon_bo_size(reloc->robj)); in r600_packet3_check()
1825 r = radeon_cs_packet_next_reloc(p, &reloc, r600_nomm); in r600_packet3_check()
1834 offset = reloc->gpu_offset + tmp; in r600_packet3_check()
1836 if ((tmp + size) > radeon_bo_size(reloc->robj)) { in r600_packet3_check()
1838 tmp + size, radeon_bo_size(reloc->robj)); in r600_packet3_check()
1855 r = radeon_cs_packet_next_reloc(p, &reloc, r600_nomm); in r600_packet3_check()
1860 ib[idx+2] += (u32)((reloc->gpu_offset >> 8) & 0xffffffff); in r600_packet3_check()
1871 r = radeon_cs_packet_next_reloc(p, &reloc, r600_nomm); in r600_packet3_check()
1876 offset = reloc->gpu_offset + in r600_packet3_check()
1892 r = radeon_cs_packet_next_reloc(p, &reloc, r600_nomm); in r600_packet3_check()
1898 offset = reloc->gpu_offset + in r600_packet3_check()
1958 r = radeon_cs_packet_next_reloc(p, &reloc, r600_nomm); in r600_packet3_check()
1963 base_offset = (u32)((reloc->gpu_offset >> 8) & 0xffffffff); in r600_packet3_check()
1965 if (reloc->tiling_flags & RADEON_TILING_MACRO) in r600_packet3_check()
1967 else if (reloc->tiling_flags & RADEON_TILING_MICRO) in r600_packet3_check()
1970 texture = reloc->robj; in r600_packet3_check()
1972 r = radeon_cs_packet_next_reloc(p, &reloc, r600_nomm); in r600_packet3_check()
1977 mip_offset = (u32)((reloc->gpu_offset >> 8) & 0xffffffff); in r600_packet3_check()
1978 mipmap = reloc->robj; in r600_packet3_check()
1983 reloc->tiling_flags); in r600_packet3_check()
1993 r = radeon_cs_packet_next_reloc(p, &reloc, r600_nomm); in r600_packet3_check()
2000 if (p->rdev && (size + offset) > radeon_bo_size(reloc->robj)) { in r600_packet3_check()
2003 size + offset, radeon_bo_size(reloc->robj)); in r600_packet3_check()
2004 ib[idx+1+(i*7)+1] = radeon_bo_size(reloc->robj) - offset; in r600_packet3_check()
2007 offset64 = reloc->gpu_offset + offset; in r600_packet3_check()
2094 r = radeon_cs_packet_next_reloc(p, &reloc, r600_nomm); in r600_packet3_check()
2100 if (reloc->robj != track->vgt_strmout_bo[idx_value]) { in r600_packet3_check()
2112 if ((offset + 4) > radeon_bo_size(reloc->robj)) { in r600_packet3_check()
2114 offset + 4, radeon_bo_size(reloc->robj)); in r600_packet3_check()
2117 ib[idx+1] += (u32)((reloc->gpu_offset >> 8) & 0xffffffff); in r600_packet3_check()
2138 r = radeon_cs_packet_next_reloc(p, &reloc, r600_nomm); in r600_packet3_check()
2145 if ((offset + 4) > radeon_bo_size(reloc->robj)) { in r600_packet3_check()
2147 offset + 4, radeon_bo_size(reloc->robj)); in r600_packet3_check()
2150 offset += reloc->gpu_offset; in r600_packet3_check()
2157 r = radeon_cs_packet_next_reloc(p, &reloc, r600_nomm); in r600_packet3_check()
2164 if ((offset + 4) > radeon_bo_size(reloc->robj)) { in r600_packet3_check()
2166 offset + 4, radeon_bo_size(reloc->robj)); in r600_packet3_check()
2169 offset += reloc->gpu_offset; in r600_packet3_check()
2182 r = radeon_cs_packet_next_reloc(p, &reloc, r600_nomm); in r600_packet3_check()
2193 if ((offset + 8) > radeon_bo_size(reloc->robj)) { in r600_packet3_check()
2195 offset + 8, radeon_bo_size(reloc->robj)); in r600_packet3_check()
2198 offset += reloc->gpu_offset; in r600_packet3_check()
2211 r = radeon_cs_packet_next_reloc(p, &reloc, r600_nomm); in r600_packet3_check()
2218 if ((offset + 4) > radeon_bo_size(reloc->robj)) { in r600_packet3_check()
2220 offset + 4, radeon_bo_size(reloc->robj)); in r600_packet3_check()
2223 offset += reloc->gpu_offset; in r600_packet3_check()
2235 r = radeon_cs_packet_next_reloc(p, &reloc, r600_nomm); in r600_packet3_check()
2242 if ((offset + 4) > radeon_bo_size(reloc->robj)) { in r600_packet3_check()
2244 offset + 4, radeon_bo_size(reloc->robj)); in r600_packet3_check()
2247 offset += reloc->gpu_offset; in r600_packet3_check()