linux/drivers/gpu/drm/nouveau/nvkm/subdev/bus/hwsq.h
<<
>>
Prefs
   1/* SPDX-License-Identifier: MIT */
   2#ifndef __NVKM_BUS_HWSQ_H__
   3#define __NVKM_BUS_HWSQ_H__
   4#include <subdev/bus.h>
   5
   6struct hwsq {
   7        struct nvkm_subdev *subdev;
   8        struct nvkm_hwsq *hwsq;
   9        int sequence;
  10};
  11
  12struct hwsq_reg {
  13        int sequence;
  14        bool force;
  15        u32 addr;
  16        u32 stride; /* in bytes */
  17        u32 mask;
  18        u32 data;
  19};
  20
  21static inline struct hwsq_reg
  22hwsq_stride(u32 addr, u32 stride, u32 mask)
  23{
  24        return (struct hwsq_reg) {
  25                .sequence = 0,
  26                .force = 0,
  27                .addr = addr,
  28                .stride = stride,
  29                .mask = mask,
  30                .data = 0xdeadbeef,
  31        };
  32}
  33
  34static inline struct hwsq_reg
  35hwsq_reg2(u32 addr1, u32 addr2)
  36{
  37        return (struct hwsq_reg) {
  38                .sequence = 0,
  39                .force = 0,
  40                .addr = addr1,
  41                .stride = addr2 - addr1,
  42                .mask = 0x3,
  43                .data = 0xdeadbeef,
  44        };
  45}
  46
  47static inline struct hwsq_reg
  48hwsq_reg(u32 addr)
  49{
  50        return (struct hwsq_reg) {
  51                .sequence = 0,
  52                .force = 0,
  53                .addr = addr,
  54                .stride = 0,
  55                .mask = 0x1,
  56                .data = 0xdeadbeef,
  57        };
  58}
  59
  60static inline int
  61hwsq_init(struct hwsq *ram, struct nvkm_subdev *subdev)
  62{
  63        int ret;
  64
  65        ret = nvkm_hwsq_init(subdev, &ram->hwsq);
  66        if (ret)
  67                return ret;
  68
  69        ram->sequence++;
  70        ram->subdev = subdev;
  71        return 0;
  72}
  73
  74static inline int
  75hwsq_exec(struct hwsq *ram, bool exec)
  76{
  77        int ret = 0;
  78        if (ram->subdev) {
  79                ret = nvkm_hwsq_fini(&ram->hwsq, exec);
  80                ram->subdev = NULL;
  81        }
  82        return ret;
  83}
  84
  85static inline u32
  86hwsq_rd32(struct hwsq *ram, struct hwsq_reg *reg)
  87{
  88        struct nvkm_device *device = ram->subdev->device;
  89        if (reg->sequence != ram->sequence)
  90                reg->data = nvkm_rd32(device, reg->addr);
  91        return reg->data;
  92}
  93
  94static inline void
  95hwsq_wr32(struct hwsq *ram, struct hwsq_reg *reg, u32 data)
  96{
  97        u32 mask, off = 0;
  98
  99        reg->sequence = ram->sequence;
 100        reg->data = data;
 101
 102        for (mask = reg->mask; mask > 0; mask = (mask & ~1) >> 1) {
 103                if (mask & 1)
 104                        nvkm_hwsq_wr32(ram->hwsq, reg->addr+off, reg->data);
 105
 106                off += reg->stride;
 107        }
 108}
 109
 110static inline void
 111hwsq_nuke(struct hwsq *ram, struct hwsq_reg *reg)
 112{
 113        reg->force = true;
 114}
 115
 116static inline u32
 117hwsq_mask(struct hwsq *ram, struct hwsq_reg *reg, u32 mask, u32 data)
 118{
 119        u32 temp = hwsq_rd32(ram, reg);
 120        if (temp != ((temp & ~mask) | data) || reg->force)
 121                hwsq_wr32(ram, reg, (temp & ~mask) | data);
 122        return temp;
 123}
 124
 125static inline void
 126hwsq_setf(struct hwsq *ram, u8 flag, int data)
 127{
 128        nvkm_hwsq_setf(ram->hwsq, flag, data);
 129}
 130
 131static inline void
 132hwsq_wait(struct hwsq *ram, u8 flag, u8 data)
 133{
 134        nvkm_hwsq_wait(ram->hwsq, flag, data);
 135}
 136
 137static inline void
 138hwsq_wait_vblank(struct hwsq *ram)
 139{
 140        nvkm_hwsq_wait_vblank(ram->hwsq);
 141}
 142
 143static inline void
 144hwsq_nsec(struct hwsq *ram, u32 nsec)
 145{
 146        nvkm_hwsq_nsec(ram->hwsq, nsec);
 147}
 148#endif
 149