linux/drivers/gpu/drm/nouveau/nvkm/subdev/bus/hwsq.h
<<
>>
Prefs
   1#ifndef __NVKM_BUS_HWSQ_H__
   2#define __NVKM_BUS_HWSQ_H__
   3#include <subdev/bus.h>
   4
   5struct hwsq {
   6        struct nvkm_subdev *subdev;
   7        struct nvkm_hwsq *hwsq;
   8        int sequence;
   9};
  10
  11struct hwsq_reg {
  12        int sequence;
  13        bool force;
  14        u32 addr;
  15        u32 stride; /* in bytes */
  16        u32 mask;
  17        u32 data;
  18};
  19
  20static inline struct hwsq_reg
  21hwsq_stride(u32 addr, u32 stride, u32 mask)
  22{
  23        return (struct hwsq_reg) {
  24                .sequence = 0,
  25                .force = 0,
  26                .addr = addr,
  27                .stride = stride,
  28                .mask = mask,
  29                .data = 0xdeadbeef,
  30        };
  31}
  32
  33static inline struct hwsq_reg
  34hwsq_reg2(u32 addr1, u32 addr2)
  35{
  36        return (struct hwsq_reg) {
  37                .sequence = 0,
  38                .force = 0,
  39                .addr = addr1,
  40                .stride = addr2 - addr1,
  41                .mask = 0x3,
  42                .data = 0xdeadbeef,
  43        };
  44}
  45
  46static inline struct hwsq_reg
  47hwsq_reg(u32 addr)
  48{
  49        return (struct hwsq_reg) {
  50                .sequence = 0,
  51                .force = 0,
  52                .addr = addr,
  53                .stride = 0,
  54                .mask = 0x1,
  55                .data = 0xdeadbeef,
  56        };
  57}
  58
  59static inline int
  60hwsq_init(struct hwsq *ram, struct nvkm_subdev *subdev)
  61{
  62        int ret;
  63
  64        ret = nvkm_hwsq_init(subdev, &ram->hwsq);
  65        if (ret)
  66                return ret;
  67
  68        ram->sequence++;
  69        ram->subdev = subdev;
  70        return 0;
  71}
  72
  73static inline int
  74hwsq_exec(struct hwsq *ram, bool exec)
  75{
  76        int ret = 0;
  77        if (ram->subdev) {
  78                ret = nvkm_hwsq_fini(&ram->hwsq, exec);
  79                ram->subdev = NULL;
  80        }
  81        return ret;
  82}
  83
  84static inline u32
  85hwsq_rd32(struct hwsq *ram, struct hwsq_reg *reg)
  86{
  87        struct nvkm_device *device = ram->subdev->device;
  88        if (reg->sequence != ram->sequence)
  89                reg->data = nvkm_rd32(device, reg->addr);
  90        return reg->data;
  91}
  92
  93static inline void
  94hwsq_wr32(struct hwsq *ram, struct hwsq_reg *reg, u32 data)
  95{
  96        u32 mask, off = 0;
  97
  98        reg->sequence = ram->sequence;
  99        reg->data = data;
 100
 101        for (mask = reg->mask; mask > 0; mask = (mask & ~1) >> 1) {
 102                if (mask & 1)
 103                        nvkm_hwsq_wr32(ram->hwsq, reg->addr+off, reg->data);
 104
 105                off += reg->stride;
 106        }
 107}
 108
 109static inline void
 110hwsq_nuke(struct hwsq *ram, struct hwsq_reg *reg)
 111{
 112        reg->force = true;
 113}
 114
 115static inline u32
 116hwsq_mask(struct hwsq *ram, struct hwsq_reg *reg, u32 mask, u32 data)
 117{
 118        u32 temp = hwsq_rd32(ram, reg);
 119        if (temp != ((temp & ~mask) | data) || reg->force)
 120                hwsq_wr32(ram, reg, (temp & ~mask) | data);
 121        return temp;
 122}
 123
 124static inline void
 125hwsq_setf(struct hwsq *ram, u8 flag, int data)
 126{
 127        nvkm_hwsq_setf(ram->hwsq, flag, data);
 128}
 129
 130static inline void
 131hwsq_wait(struct hwsq *ram, u8 flag, u8 data)
 132{
 133        nvkm_hwsq_wait(ram->hwsq, flag, data);
 134}
 135
 136static inline void
 137hwsq_wait_vblank(struct hwsq *ram)
 138{
 139        nvkm_hwsq_wait_vblank(ram->hwsq);
 140}
 141
 142static inline void
 143hwsq_nsec(struct hwsq *ram, u32 nsec)
 144{
 145        nvkm_hwsq_nsec(ram->hwsq, nsec);
 146}
 147#endif
 148