Lines Matching full:dsm

81 			      struct resource *dsm)  in i915_adjust_stolen()  argument
87 if (dsm->start == 0 || dsm->end <= dsm->start) in i915_adjust_stolen()
98 struct resource stolen[2] = {*dsm, *dsm}; in i915_adjust_stolen()
120 *dsm = stolen[0]; in i915_adjust_stolen()
122 *dsm = stolen[1]; in i915_adjust_stolen()
130 dsm); in i915_adjust_stolen()
148 r = devm_request_mem_region(i915->drm.dev, dsm->start, in i915_adjust_stolen()
149 resource_size(dsm), in i915_adjust_stolen()
161 r = devm_request_mem_region(i915->drm.dev, dsm->start + 1, in i915_adjust_stolen()
162 resource_size(dsm) - 2, in i915_adjust_stolen()
171 dsm); in i915_adjust_stolen()
197 resource_size_t stolen_top = i915->dsm.end + 1; in g4x_get_stolen_reserved()
262 resource_size_t stolen_top = i915->dsm.end + 1; in vlv_get_stolen_reserved()
351 resource_size_t stolen_top = i915->dsm.end + 1; in bdw_get_stolen_reserved()
421 i915->dsm = mem->region; in i915_gem_init_stolen()
423 if (i915_adjust_stolen(i915, &i915->dsm)) in i915_gem_init_stolen()
426 GEM_BUG_ON(i915->dsm.start == 0); in i915_gem_init_stolen()
427 GEM_BUG_ON(i915->dsm.end <= i915->dsm.start); in i915_gem_init_stolen()
429 stolen_top = i915->dsm.end + 1; in i915_gem_init_stolen()
474 if (!resource_contains(&i915->dsm, &i915->dsm_reserved)) { in i915_gem_init_stolen()
477 &i915->dsm_reserved, &i915->dsm); in i915_gem_init_stolen()
490 resource_size(&i915->dsm) - reserved_total; in i915_gem_init_stolen()
494 (u64)resource_size(&i915->dsm) >> 10, in i915_gem_init_stolen()
551 GEM_BUG_ON(range_overflows(offset, size, resource_size(&i915->dsm))); in i915_pages_create_for_stolen()
571 sg_dma_address(sg) = (dma_addr_t)i915->dsm.start + offset; in i915_pages_create_for_stolen()
763 * TODO: For stolen lmem we mostly just care about populating the dsm in init_stolen_lmem()
816 /* Use DSM base address instead for stolen memory */ in i915_gem_stolen_lmem_setup()
858 drm_dbg(&i915->drm, "Stolen Local DSM base: %pa\n", &dsm_base); in i915_gem_stolen_lmem_setup()