Lines Matching refs:stream
414 static u32 gen12_oa_hw_tail_read(struct i915_perf_stream *stream) in gen12_oa_hw_tail_read() argument
416 struct intel_uncore *uncore = stream->uncore; in gen12_oa_hw_tail_read()
422 static u32 gen8_oa_hw_tail_read(struct i915_perf_stream *stream) in gen8_oa_hw_tail_read() argument
424 struct intel_uncore *uncore = stream->uncore; in gen8_oa_hw_tail_read()
429 static u32 gen7_oa_hw_tail_read(struct i915_perf_stream *stream) in gen7_oa_hw_tail_read() argument
431 struct intel_uncore *uncore = stream->uncore; in gen7_oa_hw_tail_read()
461 static bool oa_buffer_check_unlocked(struct i915_perf_stream *stream) in oa_buffer_check_unlocked() argument
463 u32 gtt_offset = i915_ggtt_offset(stream->oa_buffer.vma); in oa_buffer_check_unlocked()
464 int report_size = stream->oa_buffer.format_size; in oa_buffer_check_unlocked()
474 spin_lock_irqsave(&stream->oa_buffer.ptr_lock, flags); in oa_buffer_check_unlocked()
476 hw_tail = stream->perf->ops.oa_hw_tail_read(stream); in oa_buffer_check_unlocked()
485 if (hw_tail == stream->oa_buffer.aging_tail && in oa_buffer_check_unlocked()
486 (now - stream->oa_buffer.aging_timestamp) > OA_TAIL_MARGIN_NSEC) { in oa_buffer_check_unlocked()
491 stream->oa_buffer.tail = stream->oa_buffer.aging_tail; in oa_buffer_check_unlocked()
499 head = stream->oa_buffer.head - gtt_offset; in oa_buffer_check_unlocked()
500 aged_tail = stream->oa_buffer.tail - gtt_offset; in oa_buffer_check_unlocked()
517 u32 *report32 = (void *)(stream->oa_buffer.vaddr + tail); in oa_buffer_check_unlocked()
526 __ratelimit(&stream->perf->tail_pointer_race)) in oa_buffer_check_unlocked()
531 stream->oa_buffer.tail = gtt_offset + tail; in oa_buffer_check_unlocked()
532 stream->oa_buffer.aging_tail = gtt_offset + hw_tail; in oa_buffer_check_unlocked()
533 stream->oa_buffer.aging_timestamp = now; in oa_buffer_check_unlocked()
536 pollin = OA_TAKEN(stream->oa_buffer.tail - gtt_offset, in oa_buffer_check_unlocked()
537 stream->oa_buffer.head - gtt_offset) >= report_size; in oa_buffer_check_unlocked()
539 spin_unlock_irqrestore(&stream->oa_buffer.ptr_lock, flags); in oa_buffer_check_unlocked()
559 static int append_oa_status(struct i915_perf_stream *stream, in append_oa_status() argument
595 static int append_oa_sample(struct i915_perf_stream *stream, in append_oa_sample() argument
601 int report_size = stream->oa_buffer.format_size; in append_oa_sample()
603 u32 sample_flags = stream->sample_flags; in append_oa_sample()
607 header.size = stream->sample_size; in append_oa_sample()
647 static int gen8_append_oa_reports(struct i915_perf_stream *stream, in gen8_append_oa_reports() argument
652 struct intel_uncore *uncore = stream->uncore; in gen8_append_oa_reports()
653 int report_size = stream->oa_buffer.format_size; in gen8_append_oa_reports()
654 u8 *oa_buf_base = stream->oa_buffer.vaddr; in gen8_append_oa_reports()
655 u32 gtt_offset = i915_ggtt_offset(stream->oa_buffer.vma); in gen8_append_oa_reports()
663 if (drm_WARN_ON(&uncore->i915->drm, !stream->enabled)) in gen8_append_oa_reports()
666 spin_lock_irqsave(&stream->oa_buffer.ptr_lock, flags); in gen8_append_oa_reports()
668 head = stream->oa_buffer.head; in gen8_append_oa_reports()
669 tail = stream->oa_buffer.tail; in gen8_append_oa_reports()
671 spin_unlock_irqrestore(&stream->oa_buffer.ptr_lock, flags); in gen8_append_oa_reports()
729 (IS_GEN(stream->perf->i915, 12) ? in gen8_append_oa_reports()
733 if (__ratelimit(&stream->perf->spurious_report_rs)) in gen8_append_oa_reports()
738 ctx_id = report32[2] & stream->specific_ctx_id_mask; in gen8_append_oa_reports()
748 if (!(report32[0] & stream->perf->gen8_valid_ctx_bit) && in gen8_append_oa_reports()
749 INTEL_GEN(stream->perf->i915) <= 11) in gen8_append_oa_reports()
783 if (!stream->perf->exclusive_stream->ctx || in gen8_append_oa_reports()
784 stream->specific_ctx_id == ctx_id || in gen8_append_oa_reports()
785 stream->oa_buffer.last_ctx_id == stream->specific_ctx_id || in gen8_append_oa_reports()
792 if (stream->perf->exclusive_stream->ctx && in gen8_append_oa_reports()
793 stream->specific_ctx_id != ctx_id) { in gen8_append_oa_reports()
797 ret = append_oa_sample(stream, buf, count, offset, in gen8_append_oa_reports()
802 stream->oa_buffer.last_ctx_id = ctx_id; in gen8_append_oa_reports()
816 oaheadptr = IS_GEN(stream->perf->i915, 12) ? in gen8_append_oa_reports()
819 spin_lock_irqsave(&stream->oa_buffer.ptr_lock, flags); in gen8_append_oa_reports()
828 stream->oa_buffer.head = head; in gen8_append_oa_reports()
830 spin_unlock_irqrestore(&stream->oa_buffer.ptr_lock, flags); in gen8_append_oa_reports()
856 static int gen8_oa_read(struct i915_perf_stream *stream, in gen8_oa_read() argument
861 struct intel_uncore *uncore = stream->uncore; in gen8_oa_read()
866 if (drm_WARN_ON(&uncore->i915->drm, !stream->oa_buffer.vaddr)) in gen8_oa_read()
869 oastatus_reg = IS_GEN(stream->perf->i915, 12) ? in gen8_oa_read()
889 ret = append_oa_status(stream, buf, count, offset, in gen8_oa_read()
895 stream->period_exponent); in gen8_oa_read()
897 stream->perf->ops.oa_disable(stream); in gen8_oa_read()
898 stream->perf->ops.oa_enable(stream); in gen8_oa_read()
908 ret = append_oa_status(stream, buf, count, offset, in gen8_oa_read()
921 return gen8_append_oa_reports(stream, buf, count, offset); in gen8_oa_read()
944 static int gen7_append_oa_reports(struct i915_perf_stream *stream, in gen7_append_oa_reports() argument
949 struct intel_uncore *uncore = stream->uncore; in gen7_append_oa_reports()
950 int report_size = stream->oa_buffer.format_size; in gen7_append_oa_reports()
951 u8 *oa_buf_base = stream->oa_buffer.vaddr; in gen7_append_oa_reports()
952 u32 gtt_offset = i915_ggtt_offset(stream->oa_buffer.vma); in gen7_append_oa_reports()
960 if (drm_WARN_ON(&uncore->i915->drm, !stream->enabled)) in gen7_append_oa_reports()
963 spin_lock_irqsave(&stream->oa_buffer.ptr_lock, flags); in gen7_append_oa_reports()
965 head = stream->oa_buffer.head; in gen7_append_oa_reports()
966 tail = stream->oa_buffer.tail; in gen7_append_oa_reports()
968 spin_unlock_irqrestore(&stream->oa_buffer.ptr_lock, flags); in gen7_append_oa_reports()
1018 if (__ratelimit(&stream->perf->spurious_report_rs)) in gen7_append_oa_reports()
1023 ret = append_oa_sample(stream, buf, count, offset, report); in gen7_append_oa_reports()
1035 spin_lock_irqsave(&stream->oa_buffer.ptr_lock, flags); in gen7_append_oa_reports()
1045 stream->oa_buffer.head = head; in gen7_append_oa_reports()
1047 spin_unlock_irqrestore(&stream->oa_buffer.ptr_lock, flags); in gen7_append_oa_reports()
1069 static int gen7_oa_read(struct i915_perf_stream *stream, in gen7_oa_read() argument
1074 struct intel_uncore *uncore = stream->uncore; in gen7_oa_read()
1078 if (drm_WARN_ON(&uncore->i915->drm, !stream->oa_buffer.vaddr)) in gen7_oa_read()
1088 oastatus1 &= ~stream->perf->gen7_latched_oastatus1; in gen7_oa_read()
1111 ret = append_oa_status(stream, buf, count, offset, in gen7_oa_read()
1117 stream->period_exponent); in gen7_oa_read()
1119 stream->perf->ops.oa_disable(stream); in gen7_oa_read()
1120 stream->perf->ops.oa_enable(stream); in gen7_oa_read()
1126 ret = append_oa_status(stream, buf, count, offset, in gen7_oa_read()
1130 stream->perf->gen7_latched_oastatus1 |= in gen7_oa_read()
1134 return gen7_append_oa_reports(stream, buf, count, offset); in gen7_oa_read()
1151 static int i915_oa_wait_unlocked(struct i915_perf_stream *stream) in i915_oa_wait_unlocked() argument
1154 if (!stream->periodic) in i915_oa_wait_unlocked()
1157 return wait_event_interruptible(stream->poll_wq, in i915_oa_wait_unlocked()
1158 oa_buffer_check_unlocked(stream)); in i915_oa_wait_unlocked()
1171 static void i915_oa_poll_wait(struct i915_perf_stream *stream, in i915_oa_poll_wait() argument
1175 poll_wait(file, &stream->poll_wq, wait); in i915_oa_poll_wait()
1190 static int i915_oa_read(struct i915_perf_stream *stream, in i915_oa_read() argument
1195 return stream->perf->ops.read(stream, buf, count, offset); in i915_oa_read()
1198 static struct intel_context *oa_pin_context(struct i915_perf_stream *stream) in oa_pin_context() argument
1201 struct i915_gem_context *ctx = stream->ctx; in oa_pin_context()
1207 if (ce->engine != stream->engine) /* first match! */ in oa_pin_context()
1235 stream->pinned_ctx = ce; in oa_pin_context()
1236 return stream->pinned_ctx; in oa_pin_context()
1249 static int oa_get_render_ctx_id(struct i915_perf_stream *stream) in oa_get_render_ctx_id() argument
1253 ce = oa_pin_context(stream); in oa_get_render_ctx_id()
1263 stream->specific_ctx_id = i915_ggtt_offset(ce->state); in oa_get_render_ctx_id()
1264 stream->specific_ctx_id_mask = 0; in oa_get_render_ctx_id()
1272 stream->specific_ctx_id_mask = in oa_get_render_ctx_id()
1274 stream->specific_ctx_id = stream->specific_ctx_id_mask; in oa_get_render_ctx_id()
1286 stream->specific_ctx_id = ce->lrc.lrca >> 12; in oa_get_render_ctx_id()
1292 stream->specific_ctx_id_mask = in oa_get_render_ctx_id()
1299 stream->specific_ctx_id_mask = in oa_get_render_ctx_id()
1306 stream->specific_ctx_id = (GEN12_MAX_CONTEXT_HW_ID - 1) << (GEN11_SW_CTX_ID_SHIFT - 32); in oa_get_render_ctx_id()
1314 ce->tag = stream->specific_ctx_id; in oa_get_render_ctx_id()
1316 drm_dbg(&stream->perf->i915->drm, in oa_get_render_ctx_id()
1318 stream->specific_ctx_id, in oa_get_render_ctx_id()
1319 stream->specific_ctx_id_mask); in oa_get_render_ctx_id()
1331 static void oa_put_render_ctx_id(struct i915_perf_stream *stream) in oa_put_render_ctx_id() argument
1335 ce = fetch_and_zero(&stream->pinned_ctx); in oa_put_render_ctx_id()
1341 stream->specific_ctx_id = INVALID_CTX_ID; in oa_put_render_ctx_id()
1342 stream->specific_ctx_id_mask = 0; in oa_put_render_ctx_id()
1346 free_oa_buffer(struct i915_perf_stream *stream) in free_oa_buffer() argument
1348 i915_vma_unpin_and_release(&stream->oa_buffer.vma, in free_oa_buffer()
1351 stream->oa_buffer.vaddr = NULL; in free_oa_buffer()
1355 free_oa_configs(struct i915_perf_stream *stream) in free_oa_configs() argument
1359 i915_oa_config_put(stream->oa_config); in free_oa_configs()
1360 llist_for_each_entry_safe(oa_bo, tmp, stream->oa_config_bos.first, node) in free_oa_configs()
1365 free_noa_wait(struct i915_perf_stream *stream) in free_noa_wait() argument
1367 i915_vma_unpin_and_release(&stream->noa_wait, 0); in free_noa_wait()
1370 static void i915_oa_stream_destroy(struct i915_perf_stream *stream) in i915_oa_stream_destroy() argument
1372 struct i915_perf *perf = stream->perf; in i915_oa_stream_destroy()
1374 BUG_ON(stream != perf->exclusive_stream); in i915_oa_stream_destroy()
1383 perf->ops.disable_metric_set(stream); in i915_oa_stream_destroy()
1385 free_oa_buffer(stream); in i915_oa_stream_destroy()
1387 intel_uncore_forcewake_put(stream->uncore, FORCEWAKE_ALL); in i915_oa_stream_destroy()
1388 intel_engine_pm_put(stream->engine); in i915_oa_stream_destroy()
1390 if (stream->ctx) in i915_oa_stream_destroy()
1391 oa_put_render_ctx_id(stream); in i915_oa_stream_destroy()
1393 free_oa_configs(stream); in i915_oa_stream_destroy()
1394 free_noa_wait(stream); in i915_oa_stream_destroy()
1402 static void gen7_init_oa_buffer(struct i915_perf_stream *stream) in gen7_init_oa_buffer() argument
1404 struct intel_uncore *uncore = stream->uncore; in gen7_init_oa_buffer()
1405 u32 gtt_offset = i915_ggtt_offset(stream->oa_buffer.vma); in gen7_init_oa_buffer()
1408 spin_lock_irqsave(&stream->oa_buffer.ptr_lock, flags); in gen7_init_oa_buffer()
1415 stream->oa_buffer.head = gtt_offset; in gen7_init_oa_buffer()
1423 stream->oa_buffer.aging_tail = INVALID_TAIL_PTR; in gen7_init_oa_buffer()
1424 stream->oa_buffer.tail = gtt_offset; in gen7_init_oa_buffer()
1426 spin_unlock_irqrestore(&stream->oa_buffer.ptr_lock, flags); in gen7_init_oa_buffer()
1432 stream->perf->gen7_latched_oastatus1 = 0; in gen7_init_oa_buffer()
1445 memset(stream->oa_buffer.vaddr, 0, OA_BUFFER_SIZE); in gen7_init_oa_buffer()
1448 static void gen8_init_oa_buffer(struct i915_perf_stream *stream) in gen8_init_oa_buffer() argument
1450 struct intel_uncore *uncore = stream->uncore; in gen8_init_oa_buffer()
1451 u32 gtt_offset = i915_ggtt_offset(stream->oa_buffer.vma); in gen8_init_oa_buffer()
1454 spin_lock_irqsave(&stream->oa_buffer.ptr_lock, flags); in gen8_init_oa_buffer()
1458 stream->oa_buffer.head = gtt_offset; in gen8_init_oa_buffer()
1475 stream->oa_buffer.aging_tail = INVALID_TAIL_PTR; in gen8_init_oa_buffer()
1476 stream->oa_buffer.tail = gtt_offset; in gen8_init_oa_buffer()
1483 stream->oa_buffer.last_ctx_id = INVALID_CTX_ID; in gen8_init_oa_buffer()
1485 spin_unlock_irqrestore(&stream->oa_buffer.ptr_lock, flags); in gen8_init_oa_buffer()
1499 memset(stream->oa_buffer.vaddr, 0, OA_BUFFER_SIZE); in gen8_init_oa_buffer()
1502 static void gen12_init_oa_buffer(struct i915_perf_stream *stream) in gen12_init_oa_buffer() argument
1504 struct intel_uncore *uncore = stream->uncore; in gen12_init_oa_buffer()
1505 u32 gtt_offset = i915_ggtt_offset(stream->oa_buffer.vma); in gen12_init_oa_buffer()
1508 spin_lock_irqsave(&stream->oa_buffer.ptr_lock, flags); in gen12_init_oa_buffer()
1513 stream->oa_buffer.head = gtt_offset; in gen12_init_oa_buffer()
1529 stream->oa_buffer.aging_tail = INVALID_TAIL_PTR; in gen12_init_oa_buffer()
1530 stream->oa_buffer.tail = gtt_offset; in gen12_init_oa_buffer()
1537 stream->oa_buffer.last_ctx_id = INVALID_CTX_ID; in gen12_init_oa_buffer()
1539 spin_unlock_irqrestore(&stream->oa_buffer.ptr_lock, flags); in gen12_init_oa_buffer()
1553 memset(stream->oa_buffer.vaddr, 0, in gen12_init_oa_buffer()
1554 stream->oa_buffer.vma->size); in gen12_init_oa_buffer()
1557 static int alloc_oa_buffer(struct i915_perf_stream *stream) in alloc_oa_buffer() argument
1559 struct drm_i915_private *i915 = stream->perf->i915; in alloc_oa_buffer()
1564 if (drm_WARN_ON(&i915->drm, stream->oa_buffer.vma)) in alloc_oa_buffer()
1570 bo = i915_gem_object_create_shmem(stream->perf->i915, OA_BUFFER_SIZE); in alloc_oa_buffer()
1584 stream->oa_buffer.vma = vma; in alloc_oa_buffer()
1586 stream->oa_buffer.vaddr = in alloc_oa_buffer()
1588 if (IS_ERR(stream->oa_buffer.vaddr)) { in alloc_oa_buffer()
1589 ret = PTR_ERR(stream->oa_buffer.vaddr); in alloc_oa_buffer()
1601 stream->oa_buffer.vaddr = NULL; in alloc_oa_buffer()
1602 stream->oa_buffer.vma = NULL; in alloc_oa_buffer()
1607 static u32 *save_restore_register(struct i915_perf_stream *stream, u32 *cs, in save_restore_register() argument
1616 if (INTEL_GEN(stream->perf->i915) >= 8) in save_restore_register()
1622 *cs++ = intel_gt_scratch_offset(stream->engine->gt, in save_restore_register()
1630 static int alloc_noa_wait(struct i915_perf_stream *stream) in alloc_noa_wait() argument
1632 struct drm_i915_private *i915 = stream->perf->i915; in alloc_noa_wait()
1636 i915_cs_timestamp_ns_to_ticks(i915, atomic64_read(&stream->perf->noa_programming_delay)); in alloc_noa_wait()
1637 const u32 base = stream->engine->mmio_base; in alloc_noa_wait()
1677 stream, cs, true /* save */, CS_GPR(i), in alloc_noa_wait()
1680 stream, cs, true /* save */, MI_PREDICATE_RESULT_1, in alloc_noa_wait()
1784 stream, cs, false /* restore */, CS_GPR(i), in alloc_noa_wait()
1787 stream, cs, false /* restore */, MI_PREDICATE_RESULT_1, in alloc_noa_wait()
1798 stream->noa_wait = vma; in alloc_noa_wait()
1842 alloc_oa_config_buffer(struct i915_perf_stream *stream, in alloc_oa_config_buffer() argument
1861 obj = i915_gem_object_create_shmem(stream->perf->i915, config_length); in alloc_oa_config_buffer()
1884 *cs++ = (INTEL_GEN(stream->perf->i915) < 8 ? in alloc_oa_config_buffer()
1887 *cs++ = i915_ggtt_offset(stream->noa_wait); in alloc_oa_config_buffer()
1894 &stream->engine->gt->ggtt->vm, in alloc_oa_config_buffer()
1902 llist_add(&oa_bo->node, &stream->oa_config_bos); in alloc_oa_config_buffer()
1914 get_oa_vma(struct i915_perf_stream *stream, struct i915_oa_config *oa_config) in get_oa_vma() argument
1922 llist_for_each_entry(oa_bo, stream->oa_config_bos.first, node) { in get_oa_vma()
1930 oa_bo = alloc_oa_config_buffer(stream, oa_config); in get_oa_vma()
1939 emit_oa_config(struct i915_perf_stream *stream, in emit_oa_config() argument
1949 vma = get_oa_vma(stream, oa_config); in emit_oa_config()
2011 static struct intel_context *oa_context(struct i915_perf_stream *stream) in oa_context() argument
2013 return stream->pinned_ctx ?: stream->engine->kernel_context; in oa_context()
2017 hsw_enable_metric_set(struct i915_perf_stream *stream, in hsw_enable_metric_set() argument
2020 struct intel_uncore *uncore = stream->uncore; in hsw_enable_metric_set()
2037 return emit_oa_config(stream, in hsw_enable_metric_set()
2038 stream->oa_config, oa_context(stream), in hsw_enable_metric_set()
2042 static void hsw_disable_metric_set(struct i915_perf_stream *stream) in hsw_disable_metric_set() argument
2044 struct intel_uncore *uncore = stream->uncore; in hsw_disable_metric_set()
2084 const struct i915_perf_stream *stream) in gen8_update_reg_state_unlocked() argument
2086 u32 ctx_oactxctrl = stream->perf->ctx_oactxctrl_offset; in gen8_update_reg_state_unlocked()
2087 u32 ctx_flexeu0 = stream->perf->ctx_flexeu0_offset; in gen8_update_reg_state_unlocked()
2102 (stream->period_exponent << GEN8_OA_TIMER_PERIOD_SHIFT) | in gen8_update_reg_state_unlocked()
2103 (stream->periodic ? GEN8_OA_TIMER_ENABLE : 0) | in gen8_update_reg_state_unlocked()
2108 oa_config_flex_reg(stream->oa_config, flex_regs[i]); in gen8_update_reg_state_unlocked()
2244 static int gen12_configure_oar_context(struct i915_perf_stream *stream, in gen12_configure_oar_context() argument
2248 struct intel_context *ce = stream->pinned_ctx; in gen12_configure_oar_context()
2249 u32 format = stream->oa_buffer.format; in gen12_configure_oar_context()
2253 stream->perf->ctx_oactxctrl_offset + 1, in gen12_configure_oar_context()
2318 oa_configure_all_contexts(struct i915_perf_stream *stream, in oa_configure_all_contexts() argument
2323 struct drm_i915_private *i915 = stream->perf->i915; in oa_configure_all_contexts()
2328 lockdep_assert_held(&stream->perf->lock); in oa_configure_all_contexts()
2387 gen12_configure_all_contexts(struct i915_perf_stream *stream, in gen12_configure_all_contexts() argument
2398 return oa_configure_all_contexts(stream, in gen12_configure_all_contexts()
2404 lrc_configure_all_contexts(struct i915_perf_stream *stream, in lrc_configure_all_contexts() argument
2409 const u32 ctx_flexeu0 = stream->perf->ctx_flexeu0_offset; in lrc_configure_all_contexts()
2418 stream->perf->ctx_oactxctrl_offset + 1, in lrc_configure_all_contexts()
2432 (stream->period_exponent << GEN8_OA_TIMER_PERIOD_SHIFT) | in lrc_configure_all_contexts()
2433 (stream->periodic ? GEN8_OA_TIMER_ENABLE : 0) | in lrc_configure_all_contexts()
2439 return oa_configure_all_contexts(stream, in lrc_configure_all_contexts()
2445 gen8_enable_metric_set(struct i915_perf_stream *stream, in gen8_enable_metric_set() argument
2448 struct intel_uncore *uncore = stream->uncore; in gen8_enable_metric_set()
2449 struct i915_oa_config *oa_config = stream->oa_config; in gen8_enable_metric_set()
2475 if (IS_GEN_RANGE(stream->perf->i915, 9, 11)) { in gen8_enable_metric_set()
2486 ret = lrc_configure_all_contexts(stream, oa_config, active); in gen8_enable_metric_set()
2490 return emit_oa_config(stream, in gen8_enable_metric_set()
2491 stream->oa_config, oa_context(stream), in gen8_enable_metric_set()
2495 static u32 oag_report_ctx_switches(const struct i915_perf_stream *stream) in oag_report_ctx_switches() argument
2498 (stream->sample_flags & SAMPLE_OA_REPORT) ? in oag_report_ctx_switches()
2503 gen12_enable_metric_set(struct i915_perf_stream *stream, in gen12_enable_metric_set() argument
2506 struct intel_uncore *uncore = stream->uncore; in gen12_enable_metric_set()
2507 struct i915_oa_config *oa_config = stream->oa_config; in gen12_enable_metric_set()
2508 bool periodic = stream->periodic; in gen12_enable_metric_set()
2509 u32 period_exponent = stream->period_exponent; in gen12_enable_metric_set()
2520 oag_report_ctx_switches(stream)); in gen12_enable_metric_set()
2533 ret = gen12_configure_all_contexts(stream, oa_config, active); in gen12_enable_metric_set()
2542 if (stream->ctx) { in gen12_enable_metric_set()
2543 ret = gen12_configure_oar_context(stream, active); in gen12_enable_metric_set()
2548 return emit_oa_config(stream, in gen12_enable_metric_set()
2549 stream->oa_config, oa_context(stream), in gen12_enable_metric_set()
2553 static void gen8_disable_metric_set(struct i915_perf_stream *stream) in gen8_disable_metric_set() argument
2555 struct intel_uncore *uncore = stream->uncore; in gen8_disable_metric_set()
2558 lrc_configure_all_contexts(stream, NULL, NULL); in gen8_disable_metric_set()
2563 static void gen10_disable_metric_set(struct i915_perf_stream *stream) in gen10_disable_metric_set() argument
2565 struct intel_uncore *uncore = stream->uncore; in gen10_disable_metric_set()
2568 lrc_configure_all_contexts(stream, NULL, NULL); in gen10_disable_metric_set()
2574 static void gen12_disable_metric_set(struct i915_perf_stream *stream) in gen12_disable_metric_set() argument
2576 struct intel_uncore *uncore = stream->uncore; in gen12_disable_metric_set()
2579 gen12_configure_all_contexts(stream, NULL, NULL); in gen12_disable_metric_set()
2582 if (stream->ctx) in gen12_disable_metric_set()
2583 gen12_configure_oar_context(stream, NULL); in gen12_disable_metric_set()
2589 static void gen7_oa_enable(struct i915_perf_stream *stream) in gen7_oa_enable() argument
2591 struct intel_uncore *uncore = stream->uncore; in gen7_oa_enable()
2592 struct i915_gem_context *ctx = stream->ctx; in gen7_oa_enable()
2593 u32 ctx_id = stream->specific_ctx_id; in gen7_oa_enable()
2594 bool periodic = stream->periodic; in gen7_oa_enable()
2595 u32 period_exponent = stream->period_exponent; in gen7_oa_enable()
2596 u32 report_format = stream->oa_buffer.format; in gen7_oa_enable()
2607 gen7_init_oa_buffer(stream); in gen7_oa_enable()
2619 static void gen8_oa_enable(struct i915_perf_stream *stream) in gen8_oa_enable() argument
2621 struct intel_uncore *uncore = stream->uncore; in gen8_oa_enable()
2622 u32 report_format = stream->oa_buffer.format; in gen8_oa_enable()
2633 gen8_init_oa_buffer(stream); in gen8_oa_enable()
2645 static void gen12_oa_enable(struct i915_perf_stream *stream) in gen12_oa_enable() argument
2647 struct intel_uncore *uncore = stream->uncore; in gen12_oa_enable()
2648 u32 report_format = stream->oa_buffer.format; in gen12_oa_enable()
2654 if (!(stream->sample_flags & SAMPLE_OA_REPORT)) in gen12_oa_enable()
2657 gen12_init_oa_buffer(stream); in gen12_oa_enable()
2673 static void i915_oa_stream_enable(struct i915_perf_stream *stream) in i915_oa_stream_enable() argument
2675 stream->pollin = false; in i915_oa_stream_enable()
2677 stream->perf->ops.oa_enable(stream); in i915_oa_stream_enable()
2679 if (stream->periodic) in i915_oa_stream_enable()
2680 hrtimer_start(&stream->poll_check_timer, in i915_oa_stream_enable()
2681 ns_to_ktime(stream->poll_oa_period), in i915_oa_stream_enable()
2685 static void gen7_oa_disable(struct i915_perf_stream *stream) in gen7_oa_disable() argument
2687 struct intel_uncore *uncore = stream->uncore; in gen7_oa_disable()
2693 drm_err(&stream->perf->i915->drm, in gen7_oa_disable()
2697 static void gen8_oa_disable(struct i915_perf_stream *stream) in gen8_oa_disable() argument
2699 struct intel_uncore *uncore = stream->uncore; in gen8_oa_disable()
2705 drm_err(&stream->perf->i915->drm, in gen8_oa_disable()
2709 static void gen12_oa_disable(struct i915_perf_stream *stream) in gen12_oa_disable() argument
2711 struct intel_uncore *uncore = stream->uncore; in gen12_oa_disable()
2718 drm_err(&stream->perf->i915->drm, in gen12_oa_disable()
2726 drm_err(&stream->perf->i915->drm, in gen12_oa_disable()
2738 static void i915_oa_stream_disable(struct i915_perf_stream *stream) in i915_oa_stream_disable() argument
2740 stream->perf->ops.oa_disable(stream); in i915_oa_stream_disable()
2742 if (stream->periodic) in i915_oa_stream_disable()
2743 hrtimer_cancel(&stream->poll_check_timer); in i915_oa_stream_disable()
2755 static int i915_perf_stream_enable_sync(struct i915_perf_stream *stream) in i915_perf_stream_enable_sync() argument
2764 err = stream->perf->ops.enable_metric_set(stream, active); in i915_perf_stream_enable_sync()
2822 static int i915_oa_stream_init(struct i915_perf_stream *stream, in i915_oa_stream_init() argument
2826 struct drm_i915_private *i915 = stream->perf->i915; in i915_oa_stream_init()
2827 struct i915_perf *perf = stream->perf; in i915_oa_stream_init()
2847 (INTEL_GEN(perf->i915) < 12 || !stream->ctx)) { in i915_oa_stream_init()
2872 stream->engine = props->engine; in i915_oa_stream_init()
2873 stream->uncore = stream->engine->gt->uncore; in i915_oa_stream_init()
2875 stream->sample_size = sizeof(struct drm_i915_perf_record_header); in i915_oa_stream_init()
2879 stream->sample_flags = props->sample_flags; in i915_oa_stream_init()
2880 stream->sample_size += format_size; in i915_oa_stream_init()
2882 stream->oa_buffer.format_size = format_size; in i915_oa_stream_init()
2883 if (drm_WARN_ON(&i915->drm, stream->oa_buffer.format_size == 0)) in i915_oa_stream_init()
2886 stream->hold_preemption = props->hold_preemption; in i915_oa_stream_init()
2888 stream->oa_buffer.format = in i915_oa_stream_init()
2891 stream->periodic = props->oa_periodic; in i915_oa_stream_init()
2892 if (stream->periodic) in i915_oa_stream_init()
2893 stream->period_exponent = props->oa_period_exponent; in i915_oa_stream_init()
2895 if (stream->ctx) { in i915_oa_stream_init()
2896 ret = oa_get_render_ctx_id(stream); in i915_oa_stream_init()
2903 ret = alloc_noa_wait(stream); in i915_oa_stream_init()
2909 stream->oa_config = i915_perf_get_oa_config(perf, props->metrics_set); in i915_oa_stream_init()
2910 if (!stream->oa_config) { in i915_oa_stream_init()
2928 intel_engine_pm_get(stream->engine); in i915_oa_stream_init()
2929 intel_uncore_forcewake_get(stream->uncore, FORCEWAKE_ALL); in i915_oa_stream_init()
2931 ret = alloc_oa_buffer(stream); in i915_oa_stream_init()
2935 stream->ops = &i915_oa_stream_ops; in i915_oa_stream_init()
2938 WRITE_ONCE(perf->exclusive_stream, stream); in i915_oa_stream_init()
2940 ret = i915_perf_stream_enable_sync(stream); in i915_oa_stream_init()
2947 stream->oa_config->uuid); in i915_oa_stream_init()
2949 hrtimer_init(&stream->poll_check_timer, in i915_oa_stream_init()
2951 stream->poll_check_timer.function = oa_poll_check_timer_cb; in i915_oa_stream_init()
2952 init_waitqueue_head(&stream->poll_wq); in i915_oa_stream_init()
2953 spin_lock_init(&stream->oa_buffer.ptr_lock); in i915_oa_stream_init()
2959 perf->ops.disable_metric_set(stream); in i915_oa_stream_init()
2961 free_oa_buffer(stream); in i915_oa_stream_init()
2964 free_oa_configs(stream); in i915_oa_stream_init()
2966 intel_uncore_forcewake_put(stream->uncore, FORCEWAKE_ALL); in i915_oa_stream_init()
2967 intel_engine_pm_put(stream->engine); in i915_oa_stream_init()
2970 free_noa_wait(stream); in i915_oa_stream_init()
2973 if (stream->ctx) in i915_oa_stream_init()
2974 oa_put_render_ctx_id(stream); in i915_oa_stream_init()
2982 struct i915_perf_stream *stream; in i915_oa_init_reg_state() local
2988 stream = READ_ONCE(engine->i915->perf.exclusive_stream); in i915_oa_init_reg_state()
2989 if (stream && INTEL_GEN(stream->perf->i915) < 12) in i915_oa_init_reg_state()
2990 gen8_update_reg_state_unlocked(ce, stream); in i915_oa_init_reg_state()
3016 struct i915_perf_stream *stream = file->private_data; in i915_perf_read() local
3017 struct i915_perf *perf = stream->perf; in i915_perf_read()
3025 if (!stream->enabled) in i915_perf_read()
3037 ret = stream->ops->wait_unlocked(stream); in i915_perf_read()
3042 ret = stream->ops->read(stream, buf, count, &offset); in i915_perf_read()
3047 ret = stream->ops->read(stream, buf, count, &offset); in i915_perf_read()
3063 stream->pollin = false; in i915_perf_read()
3071 struct i915_perf_stream *stream = in oa_poll_check_timer_cb() local
3072 container_of(hrtimer, typeof(*stream), poll_check_timer); in oa_poll_check_timer_cb()
3074 if (oa_buffer_check_unlocked(stream)) { in oa_poll_check_timer_cb()
3075 stream->pollin = true; in oa_poll_check_timer_cb()
3076 wake_up(&stream->poll_wq); in oa_poll_check_timer_cb()
3080 ns_to_ktime(stream->poll_oa_period)); in oa_poll_check_timer_cb()
3100 static __poll_t i915_perf_poll_locked(struct i915_perf_stream *stream, in i915_perf_poll_locked() argument
3106 stream->ops->poll_wait(stream, file, wait); in i915_perf_poll_locked()
3114 if (stream->pollin) in i915_perf_poll_locked()
3135 struct i915_perf_stream *stream = file->private_data; in i915_perf_poll() local
3136 struct i915_perf *perf = stream->perf; in i915_perf_poll()
3140 ret = i915_perf_poll_locked(stream, file, wait); in i915_perf_poll()
3156 static void i915_perf_enable_locked(struct i915_perf_stream *stream) in i915_perf_enable_locked() argument
3158 if (stream->enabled) in i915_perf_enable_locked()
3162 stream->enabled = true; in i915_perf_enable_locked()
3164 if (stream->ops->enable) in i915_perf_enable_locked()
3165 stream->ops->enable(stream); in i915_perf_enable_locked()
3167 if (stream->hold_preemption) in i915_perf_enable_locked()
3168 intel_context_set_nopreempt(stream->pinned_ctx); in i915_perf_enable_locked()
3185 static void i915_perf_disable_locked(struct i915_perf_stream *stream) in i915_perf_disable_locked() argument
3187 if (!stream->enabled) in i915_perf_disable_locked()
3191 stream->enabled = false; in i915_perf_disable_locked()
3193 if (stream->hold_preemption) in i915_perf_disable_locked()
3194 intel_context_clear_nopreempt(stream->pinned_ctx); in i915_perf_disable_locked()
3196 if (stream->ops->disable) in i915_perf_disable_locked()
3197 stream->ops->disable(stream); in i915_perf_disable_locked()
3200 static long i915_perf_config_locked(struct i915_perf_stream *stream, in i915_perf_config_locked() argument
3204 long ret = stream->oa_config->id; in i915_perf_config_locked()
3206 config = i915_perf_get_oa_config(stream->perf, metrics_set); in i915_perf_config_locked()
3210 if (config != stream->oa_config) { in i915_perf_config_locked()
3222 err = emit_oa_config(stream, config, oa_context(stream), NULL); in i915_perf_config_locked()
3224 config = xchg(&stream->oa_config, config); in i915_perf_config_locked()
3246 static long i915_perf_ioctl_locked(struct i915_perf_stream *stream, in i915_perf_ioctl_locked() argument
3252 i915_perf_enable_locked(stream); in i915_perf_ioctl_locked()
3255 i915_perf_disable_locked(stream); in i915_perf_ioctl_locked()
3258 return i915_perf_config_locked(stream, arg); in i915_perf_ioctl_locked()
3279 struct i915_perf_stream *stream = file->private_data; in i915_perf_ioctl() local
3280 struct i915_perf *perf = stream->perf; in i915_perf_ioctl()
3284 ret = i915_perf_ioctl_locked(stream, cmd, arg); in i915_perf_ioctl()
3300 static void i915_perf_destroy_locked(struct i915_perf_stream *stream) in i915_perf_destroy_locked() argument
3302 if (stream->enabled) in i915_perf_destroy_locked()
3303 i915_perf_disable_locked(stream); in i915_perf_destroy_locked()
3305 if (stream->ops->destroy) in i915_perf_destroy_locked()
3306 stream->ops->destroy(stream); in i915_perf_destroy_locked()
3308 if (stream->ctx) in i915_perf_destroy_locked()
3309 i915_gem_context_put(stream->ctx); in i915_perf_destroy_locked()
3311 kfree(stream); in i915_perf_destroy_locked()
3327 struct i915_perf_stream *stream = file->private_data; in i915_perf_release() local
3328 struct i915_perf *perf = stream->perf; in i915_perf_release()
3331 i915_perf_destroy_locked(stream); in i915_perf_release()
3386 struct i915_perf_stream *stream = NULL; in i915_perf_open_ioctl_locked() local
3459 stream = kzalloc(sizeof(*stream), GFP_KERNEL); in i915_perf_open_ioctl_locked()
3460 if (!stream) { in i915_perf_open_ioctl_locked()
3465 stream->perf = perf; in i915_perf_open_ioctl_locked()
3466 stream->ctx = specific_ctx; in i915_perf_open_ioctl_locked()
3467 stream->poll_oa_period = props->poll_oa_period; in i915_perf_open_ioctl_locked()
3469 ret = i915_oa_stream_init(stream, param, props); in i915_perf_open_ioctl_locked()
3477 if (WARN_ON(stream->sample_flags != props->sample_flags)) { in i915_perf_open_ioctl_locked()
3487 stream_fd = anon_inode_getfd("[i915_perf]", &fops, stream, f_flags); in i915_perf_open_ioctl_locked()
3494 i915_perf_enable_locked(stream); in i915_perf_open_ioctl_locked()
3504 if (stream->ops->destroy) in i915_perf_open_ioctl_locked()
3505 stream->ops->destroy(stream); in i915_perf_open_ioctl_locked()
3507 kfree(stream); in i915_perf_open_ioctl_locked()