@@ -256,8 +256,8 @@ uint64_t msm_gem_mmap_offset(struct drm_gem_object *obj)
* That means when I do eventually need to add support for unpinning
* the refcnt counter needs to be atomic_t.
*/
-int etnaviv_gem_get_iova_locked(struct etnaviv_gpu * gpu, struct drm_gem_object *obj,
- uint32_t *iova)
+int etnaviv_gem_get_iova_locked(struct etnaviv_gpu *gpu,
+ struct drm_gem_object *obj, uint32_t *iova)
{
struct etnaviv_gem_object *etnaviv_obj = to_etnaviv_bo(obj);
int ret = 0;
@@ -317,12 +317,14 @@ int etnaviv_gem_get_iova(struct etnaviv_gpu *gpu, struct drm_gem_object *obj, in
void etnaviv_gem_put_iova(struct drm_gem_object *obj)
{
- // XXX TODO ..
- // NOTE: probably don't need a _locked() version.. we wouldn't
- // normally unmap here, but instead just mark that it could be
- // unmapped (if the iova refcnt drops to zero), but then later
- // if another _get_iova_locked() fails we can start unmapping
- // things that are no longer needed..
+ /*
+ * XXX TODO ..
+ * NOTE: probably don't need a _locked() version.. we wouldn't
+ * normally unmap here, but instead just mark that it could be
+ * unmapped (if the iova refcnt drops to zero), but then later
+ * if another _get_iova_locked() fails we can start unmapping
+ * things that are no longer needed..
+ */
}
int msm_gem_dumb_create(struct drm_file *file, struct drm_device *dev,
@@ -21,7 +21,7 @@
struct sg_table *msm_gem_prime_get_sg_table(struct drm_gem_object *obj)
{
- struct etnaviv_gem_object *etnaviv_obj= to_etnaviv_bo(obj);
+ struct etnaviv_gem_object *etnaviv_obj = to_etnaviv_bo(obj);
BUG_ON(!etnaviv_obj->sgt); /* should have already pinned! */
return etnaviv_obj->sgt;
}
@@ -68,7 +68,7 @@ struct drm_etnaviv_timespec {
#define ETNAVIV_PARAM_GPU_INSTRUCTION_COUNT 0x18
#define ETNAVIV_PARAM_GPU_NUM_CONSTANTS 0x19
-//#define MSM_PARAM_GMEM_SIZE 0x02
+/* #define MSM_PARAM_GMEM_SIZE 0x02 */
struct drm_etnaviv_param {
uint32_t pipe; /* in, ETNA_PIPE_x */
@@ -217,9 +217,9 @@ struct drm_etnaviv_wait_fence {
#define DRM_IOCTL_ETNAVIV_GET_PARAM DRM_IOWR(DRM_COMMAND_BASE + DRM_ETNAVIV_GET_PARAM, struct drm_etnaviv_param)
#define DRM_IOCTL_ETNAVIV_GEM_NEW DRM_IOWR(DRM_COMMAND_BASE + DRM_ETNAVIV_GEM_NEW, struct drm_etnaviv_gem_new)
#define DRM_IOCTL_ETNAVIV_GEM_INFO DRM_IOWR(DRM_COMMAND_BASE + DRM_ETNAVIV_GEM_INFO, struct drm_etnaviv_gem_info)
-#define DRM_IOCTL_ETNAVIV_GEM_CPU_PREP DRM_IOW (DRM_COMMAND_BASE + DRM_ETNAVIV_GEM_CPU_PREP, struct drm_etnaviv_gem_cpu_prep)
-#define DRM_IOCTL_ETNAVIV_GEM_CPU_FINI DRM_IOW (DRM_COMMAND_BASE + DRM_ETNAVIV_GEM_CPU_FINI, struct drm_etnaviv_gem_cpu_fini)
+#define DRM_IOCTL_ETNAVIV_GEM_CPU_PREP DRM_IOW(DRM_COMMAND_BASE + DRM_ETNAVIV_GEM_CPU_PREP, struct drm_etnaviv_gem_cpu_prep)
+#define DRM_IOCTL_ETNAVIV_GEM_CPU_FINI DRM_IOW(DRM_COMMAND_BASE + DRM_ETNAVIV_GEM_CPU_FINI, struct drm_etnaviv_gem_cpu_fini)
#define DRM_IOCTL_ETNAVIV_GEM_SUBMIT DRM_IOWR(DRM_COMMAND_BASE + DRM_ETNAVIV_GEM_SUBMIT, struct drm_etnaviv_gem_submit)
-#define DRM_IOCTL_ETNAVIV_WAIT_FENCE DRM_IOW (DRM_COMMAND_BASE + DRM_ETNAVIV_WAIT_FENCE, struct drm_etnaviv_wait_fence)
+#define DRM_IOCTL_ETNAVIV_WAIT_FENCE DRM_IOW(DRM_COMMAND_BASE + DRM_ETNAVIV_WAIT_FENCE, struct drm_etnaviv_wait_fence)
#endif /* __ETNAVIV_DRM_H__ */