@@ -217,7 +217,16 @@ i915_gem_object_lmem_io_map_page_atomic(struct drm_i915_gem_object *obj,
bool i915_gem_object_is_lmem(struct drm_i915_gem_object *obj)
{
- return obj->ops == &i915_gem_lmem_obj_ops;
+ struct intel_memory_region *region = obj->mm.region;
+
+ return region && (region->is_devmem || region->type == INTEL_MEMORY_LOCAL);
+}
+
+bool i915_gem_object_is_devmem(struct drm_i915_gem_object *obj)
+{
+ struct intel_memory_region *region = obj->mm.region;
+
+ return region && region->is_devmem;
}
struct drm_i915_gem_object *
@@ -21,6 +21,7 @@ i915_gem_object_lmem_io_map_page_atomic(struct drm_i915_gem_object *obj,
unsigned long n);
bool i915_gem_object_is_lmem(struct drm_i915_gem_object *obj);
+bool i915_gem_object_is_devmem(struct drm_i915_gem_object *obj);
struct drm_i915_gem_object *
i915_gem_object_create_lmem(struct drm_i915_private *i915,
@@ -456,7 +456,7 @@ static void ggtt_bind_vma(struct i915_address_space *vm,
pte_flags = 0;
if (vma->vm->has_read_only && i915_gem_object_is_readonly(obj))
pte_flags |= PTE_READ_ONLY;
- if (i915_gem_object_is_lmem(obj))
+ if (i915_gem_object_is_devmem(obj))
pte_flags |= PTE_LM;
vm->insert_entries(vm, vma, cache_level, pte_flags);
@@ -195,7 +195,7 @@ void ppgtt_bind_vma(struct i915_address_space *vm,
pte_flags = 0;
if (i915_gem_object_is_readonly(vma->obj))
pte_flags |= PTE_READ_ONLY;
- if (i915_gem_object_is_lmem(vma->obj))
+ if (i915_gem_object_is_devmem(vma->obj))
pte_flags |= PTE_LM;
vm->insert_entries(vm, vma, cache_level, pte_flags);
@@ -92,6 +92,7 @@ struct intel_memory_region {
enum intel_region_id id;
char name[8];
struct intel_gt *gt; /* GT closest to this region. */
+ bool is_devmem; /* true for device memory */
dma_addr_t remap_addr;
@@ -166,6 +166,9 @@ setup_lmem(struct drm_i915_private *dev_priv)
(u64)mem->io_start);
DRM_INFO("Intel graphics LMEM size: %llx\n",
(u64)lmem_size);
+
+ /* this is real device memory */
+ mem->is_devmem = true;
}
return mem;