linux/drivers/clk/qcom/clk-rcg2.c
<<
>>
Prefs
   1// SPDX-License-Identifier: GPL-2.0
   2/*
   3 * Copyright (c) 2013, 2018, The Linux Foundation. All rights reserved.
   4 */
   5
   6#include <linux/kernel.h>
   7#include <linux/bitops.h>
   8#include <linux/err.h>
   9#include <linux/bug.h>
  10#include <linux/export.h>
  11#include <linux/clk-provider.h>
  12#include <linux/delay.h>
  13#include <linux/rational.h>
  14#include <linux/regmap.h>
  15#include <linux/math64.h>
  16#include <linux/slab.h>
  17
  18#include <asm/div64.h>
  19
  20#include "clk-rcg.h"
  21#include "common.h"
  22
  23#define CMD_REG                 0x0
  24#define CMD_UPDATE              BIT(0)
  25#define CMD_ROOT_EN             BIT(1)
  26#define CMD_DIRTY_CFG           BIT(4)
  27#define CMD_DIRTY_N             BIT(5)
  28#define CMD_DIRTY_M             BIT(6)
  29#define CMD_DIRTY_D             BIT(7)
  30#define CMD_ROOT_OFF            BIT(31)
  31
  32#define CFG_REG                 0x4
  33#define CFG_SRC_DIV_SHIFT       0
  34#define CFG_SRC_SEL_SHIFT       8
  35#define CFG_SRC_SEL_MASK        (0x7 << CFG_SRC_SEL_SHIFT)
  36#define CFG_MODE_SHIFT          12
  37#define CFG_MODE_MASK           (0x3 << CFG_MODE_SHIFT)
  38#define CFG_MODE_DUAL_EDGE      (0x2 << CFG_MODE_SHIFT)
  39#define CFG_HW_CLK_CTRL_MASK    BIT(20)
  40
  41#define M_REG                   0x8
  42#define N_REG                   0xc
  43#define D_REG                   0x10
  44
  45#define RCG_CFG_OFFSET(rcg)     ((rcg)->cmd_rcgr + (rcg)->cfg_off + CFG_REG)
  46#define RCG_M_OFFSET(rcg)       ((rcg)->cmd_rcgr + (rcg)->cfg_off + M_REG)
  47#define RCG_N_OFFSET(rcg)       ((rcg)->cmd_rcgr + (rcg)->cfg_off + N_REG)
  48#define RCG_D_OFFSET(rcg)       ((rcg)->cmd_rcgr + (rcg)->cfg_off + D_REG)
  49
  50/* Dynamic Frequency Scaling */
  51#define MAX_PERF_LEVEL          8
  52#define SE_CMD_DFSR_OFFSET      0x14
  53#define SE_CMD_DFS_EN           BIT(0)
  54#define SE_PERF_DFSR(level)     (0x1c + 0x4 * (level))
  55#define SE_PERF_M_DFSR(level)   (0x5c + 0x4 * (level))
  56#define SE_PERF_N_DFSR(level)   (0x9c + 0x4 * (level))
  57
  58enum freq_policy {
  59        FLOOR,
  60        CEIL,
  61};
  62
  63static int clk_rcg2_is_enabled(struct clk_hw *hw)
  64{
  65        struct clk_rcg2 *rcg = to_clk_rcg2(hw);
  66        u32 cmd;
  67        int ret;
  68
  69        ret = regmap_read(rcg->clkr.regmap, rcg->cmd_rcgr + CMD_REG, &cmd);
  70        if (ret)
  71                return ret;
  72
  73        return (cmd & CMD_ROOT_OFF) == 0;
  74}
  75
  76static u8 clk_rcg2_get_parent(struct clk_hw *hw)
  77{
  78        struct clk_rcg2 *rcg = to_clk_rcg2(hw);
  79        int num_parents = clk_hw_get_num_parents(hw);
  80        u32 cfg;
  81        int i, ret;
  82
  83        ret = regmap_read(rcg->clkr.regmap, RCG_CFG_OFFSET(rcg), &cfg);
  84        if (ret)
  85                goto err;
  86
  87        cfg &= CFG_SRC_SEL_MASK;
  88        cfg >>= CFG_SRC_SEL_SHIFT;
  89
  90        for (i = 0; i < num_parents; i++)
  91                if (cfg == rcg->parent_map[i].cfg)
  92                        return i;
  93
  94err:
  95        pr_debug("%s: Clock %s has invalid parent, using default.\n",
  96                 __func__, clk_hw_get_name(hw));
  97        return 0;
  98}
  99
 100static int update_config(struct clk_rcg2 *rcg)
 101{
 102        int count, ret;
 103        u32 cmd;
 104        struct clk_hw *hw = &rcg->clkr.hw;
 105        const char *name = clk_hw_get_name(hw);
 106
 107        ret = regmap_update_bits(rcg->clkr.regmap, rcg->cmd_rcgr + CMD_REG,
 108                                 CMD_UPDATE, CMD_UPDATE);
 109        if (ret)
 110                return ret;
 111
 112        /* Wait for update to take effect */
 113        for (count = 500; count > 0; count--) {
 114                ret = regmap_read(rcg->clkr.regmap, rcg->cmd_rcgr + CMD_REG, &cmd);
 115                if (ret)
 116                        return ret;
 117                if (!(cmd & CMD_UPDATE))
 118                        return 0;
 119                udelay(1);
 120        }
 121
 122        WARN(1, "%s: rcg didn't update its configuration.", name);
 123        return -EBUSY;
 124}
 125
 126static int clk_rcg2_set_parent(struct clk_hw *hw, u8 index)
 127{
 128        struct clk_rcg2 *rcg = to_clk_rcg2(hw);
 129        int ret;
 130        u32 cfg = rcg->parent_map[index].cfg << CFG_SRC_SEL_SHIFT;
 131
 132        ret = regmap_update_bits(rcg->clkr.regmap, RCG_CFG_OFFSET(rcg),
 133                                 CFG_SRC_SEL_MASK, cfg);
 134        if (ret)
 135                return ret;
 136
 137        return update_config(rcg);
 138}
 139
 140/*
 141 * Calculate m/n:d rate
 142 *
 143 *          parent_rate     m
 144 *   rate = ----------- x  ---
 145 *            hid_div       n
 146 */
 147static unsigned long
 148calc_rate(unsigned long rate, u32 m, u32 n, u32 mode, u32 hid_div)
 149{
 150        if (hid_div) {
 151                rate *= 2;
 152                rate /= hid_div + 1;
 153        }
 154
 155        if (mode) {
 156                u64 tmp = rate;
 157                tmp *= m;
 158                do_div(tmp, n);
 159                rate = tmp;
 160        }
 161
 162        return rate;
 163}
 164
 165static unsigned long
 166clk_rcg2_recalc_rate(struct clk_hw *hw, unsigned long parent_rate)
 167{
 168        struct clk_rcg2 *rcg = to_clk_rcg2(hw);
 169        u32 cfg, hid_div, m = 0, n = 0, mode = 0, mask;
 170
 171        regmap_read(rcg->clkr.regmap, RCG_CFG_OFFSET(rcg), &cfg);
 172
 173        if (rcg->mnd_width) {
 174                mask = BIT(rcg->mnd_width) - 1;
 175                regmap_read(rcg->clkr.regmap, RCG_M_OFFSET(rcg), &m);
 176                m &= mask;
 177                regmap_read(rcg->clkr.regmap, RCG_N_OFFSET(rcg), &n);
 178                n =  ~n;
 179                n &= mask;
 180                n += m;
 181                mode = cfg & CFG_MODE_MASK;
 182                mode >>= CFG_MODE_SHIFT;
 183        }
 184
 185        mask = BIT(rcg->hid_width) - 1;
 186        hid_div = cfg >> CFG_SRC_DIV_SHIFT;
 187        hid_div &= mask;
 188
 189        return calc_rate(parent_rate, m, n, mode, hid_div);
 190}
 191
 192static int _freq_tbl_determine_rate(struct clk_hw *hw, const struct freq_tbl *f,
 193                                    struct clk_rate_request *req,
 194                                    enum freq_policy policy)
 195{
 196        unsigned long clk_flags, rate = req->rate;
 197        struct clk_hw *p;
 198        struct clk_rcg2 *rcg = to_clk_rcg2(hw);
 199        int index;
 200
 201        switch (policy) {
 202        case FLOOR:
 203                f = qcom_find_freq_floor(f, rate);
 204                break;
 205        case CEIL:
 206                f = qcom_find_freq(f, rate);
 207                break;
 208        default:
 209                return -EINVAL;
 210        }
 211
 212        if (!f)
 213                return -EINVAL;
 214
 215        index = qcom_find_src_index(hw, rcg->parent_map, f->src);
 216        if (index < 0)
 217                return index;
 218
 219        clk_flags = clk_hw_get_flags(hw);
 220        p = clk_hw_get_parent_by_index(hw, index);
 221        if (!p)
 222                return -EINVAL;
 223
 224        if (clk_flags & CLK_SET_RATE_PARENT) {
 225                rate = f->freq;
 226                if (f->pre_div) {
 227                        if (!rate)
 228                                rate = req->rate;
 229                        rate /= 2;
 230                        rate *= f->pre_div + 1;
 231                }
 232
 233                if (f->n) {
 234                        u64 tmp = rate;
 235                        tmp = tmp * f->n;
 236                        do_div(tmp, f->m);
 237                        rate = tmp;
 238                }
 239        } else {
 240                rate =  clk_hw_get_rate(p);
 241        }
 242        req->best_parent_hw = p;
 243        req->best_parent_rate = rate;
 244        req->rate = f->freq;
 245
 246        return 0;
 247}
 248
 249static int clk_rcg2_determine_rate(struct clk_hw *hw,
 250                                   struct clk_rate_request *req)
 251{
 252        struct clk_rcg2 *rcg = to_clk_rcg2(hw);
 253
 254        return _freq_tbl_determine_rate(hw, rcg->freq_tbl, req, CEIL);
 255}
 256
 257static int clk_rcg2_determine_floor_rate(struct clk_hw *hw,
 258                                         struct clk_rate_request *req)
 259{
 260        struct clk_rcg2 *rcg = to_clk_rcg2(hw);
 261
 262        return _freq_tbl_determine_rate(hw, rcg->freq_tbl, req, FLOOR);
 263}
 264
 265static int __clk_rcg2_configure(struct clk_rcg2 *rcg, const struct freq_tbl *f)
 266{
 267        u32 cfg, mask;
 268        struct clk_hw *hw = &rcg->clkr.hw;
 269        int ret, index = qcom_find_src_index(hw, rcg->parent_map, f->src);
 270
 271        if (index < 0)
 272                return index;
 273
 274        if (rcg->mnd_width && f->n) {
 275                mask = BIT(rcg->mnd_width) - 1;
 276                ret = regmap_update_bits(rcg->clkr.regmap,
 277                                RCG_M_OFFSET(rcg), mask, f->m);
 278                if (ret)
 279                        return ret;
 280
 281                ret = regmap_update_bits(rcg->clkr.regmap,
 282                                RCG_N_OFFSET(rcg), mask, ~(f->n - f->m));
 283                if (ret)
 284                        return ret;
 285
 286                ret = regmap_update_bits(rcg->clkr.regmap,
 287                                RCG_D_OFFSET(rcg), mask, ~f->n);
 288                if (ret)
 289                        return ret;
 290        }
 291
 292        mask = BIT(rcg->hid_width) - 1;
 293        mask |= CFG_SRC_SEL_MASK | CFG_MODE_MASK | CFG_HW_CLK_CTRL_MASK;
 294        cfg = f->pre_div << CFG_SRC_DIV_SHIFT;
 295        cfg |= rcg->parent_map[index].cfg << CFG_SRC_SEL_SHIFT;
 296        if (rcg->mnd_width && f->n && (f->m != f->n))
 297                cfg |= CFG_MODE_DUAL_EDGE;
 298        return regmap_update_bits(rcg->clkr.regmap, RCG_CFG_OFFSET(rcg),
 299                                        mask, cfg);
 300}
 301
 302static int clk_rcg2_configure(struct clk_rcg2 *rcg, const struct freq_tbl *f)
 303{
 304        int ret;
 305
 306        ret = __clk_rcg2_configure(rcg, f);
 307        if (ret)
 308                return ret;
 309
 310        return update_config(rcg);
 311}
 312
 313static int __clk_rcg2_set_rate(struct clk_hw *hw, unsigned long rate,
 314                               enum freq_policy policy)
 315{
 316        struct clk_rcg2 *rcg = to_clk_rcg2(hw);
 317        const struct freq_tbl *f;
 318
 319        switch (policy) {
 320        case FLOOR:
 321                f = qcom_find_freq_floor(rcg->freq_tbl, rate);
 322                break;
 323        case CEIL:
 324                f = qcom_find_freq(rcg->freq_tbl, rate);
 325                break;
 326        default:
 327                return -EINVAL;
 328        }
 329
 330        if (!f)
 331                return -EINVAL;
 332
 333        return clk_rcg2_configure(rcg, f);
 334}
 335
 336static int clk_rcg2_set_rate(struct clk_hw *hw, unsigned long rate,
 337                            unsigned long parent_rate)
 338{
 339        return __clk_rcg2_set_rate(hw, rate, CEIL);
 340}
 341
 342static int clk_rcg2_set_floor_rate(struct clk_hw *hw, unsigned long rate,
 343                                   unsigned long parent_rate)
 344{
 345        return __clk_rcg2_set_rate(hw, rate, FLOOR);
 346}
 347
 348static int clk_rcg2_set_rate_and_parent(struct clk_hw *hw,
 349                unsigned long rate, unsigned long parent_rate, u8 index)
 350{
 351        return __clk_rcg2_set_rate(hw, rate, CEIL);
 352}
 353
 354static int clk_rcg2_set_floor_rate_and_parent(struct clk_hw *hw,
 355                unsigned long rate, unsigned long parent_rate, u8 index)
 356{
 357        return __clk_rcg2_set_rate(hw, rate, FLOOR);
 358}
 359
 360const struct clk_ops clk_rcg2_ops = {
 361        .is_enabled = clk_rcg2_is_enabled,
 362        .get_parent = clk_rcg2_get_parent,
 363        .set_parent = clk_rcg2_set_parent,
 364        .recalc_rate = clk_rcg2_recalc_rate,
 365        .determine_rate = clk_rcg2_determine_rate,
 366        .set_rate = clk_rcg2_set_rate,
 367        .set_rate_and_parent = clk_rcg2_set_rate_and_parent,
 368};
 369EXPORT_SYMBOL_GPL(clk_rcg2_ops);
 370
 371const struct clk_ops clk_rcg2_floor_ops = {
 372        .is_enabled = clk_rcg2_is_enabled,
 373        .get_parent = clk_rcg2_get_parent,
 374        .set_parent = clk_rcg2_set_parent,
 375        .recalc_rate = clk_rcg2_recalc_rate,
 376        .determine_rate = clk_rcg2_determine_floor_rate,
 377        .set_rate = clk_rcg2_set_floor_rate,
 378        .set_rate_and_parent = clk_rcg2_set_floor_rate_and_parent,
 379};
 380EXPORT_SYMBOL_GPL(clk_rcg2_floor_ops);
 381
 382struct frac_entry {
 383        int num;
 384        int den;
 385};
 386
 387static const struct frac_entry frac_table_675m[] = {    /* link rate of 270M */
 388        { 52, 295 },    /* 119 M */
 389        { 11, 57 },     /* 130.25 M */
 390        { 63, 307 },    /* 138.50 M */
 391        { 11, 50 },     /* 148.50 M */
 392        { 47, 206 },    /* 154 M */
 393        { 31, 100 },    /* 205.25 M */
 394        { 107, 269 },   /* 268.50 M */
 395        { },
 396};
 397
 398static struct frac_entry frac_table_810m[] = { /* Link rate of 162M */
 399        { 31, 211 },    /* 119 M */
 400        { 32, 199 },    /* 130.25 M */
 401        { 63, 307 },    /* 138.50 M */
 402        { 11, 60 },     /* 148.50 M */
 403        { 50, 263 },    /* 154 M */
 404        { 31, 120 },    /* 205.25 M */
 405        { 119, 359 },   /* 268.50 M */
 406        { },
 407};
 408
 409static int clk_edp_pixel_set_rate(struct clk_hw *hw, unsigned long rate,
 410                              unsigned long parent_rate)
 411{
 412        struct clk_rcg2 *rcg = to_clk_rcg2(hw);
 413        struct freq_tbl f = *rcg->freq_tbl;
 414        const struct frac_entry *frac;
 415        int delta = 100000;
 416        s64 src_rate = parent_rate;
 417        s64 request;
 418        u32 mask = BIT(rcg->hid_width) - 1;
 419        u32 hid_div;
 420
 421        if (src_rate == 810000000)
 422                frac = frac_table_810m;
 423        else
 424                frac = frac_table_675m;
 425
 426        for (; frac->num; frac++) {
 427                request = rate;
 428                request *= frac->den;
 429                request = div_s64(request, frac->num);
 430                if ((src_rate < (request - delta)) ||
 431                    (src_rate > (request + delta)))
 432                        continue;
 433
 434                regmap_read(rcg->clkr.regmap, rcg->cmd_rcgr + CFG_REG,
 435                                &hid_div);
 436                f.pre_div = hid_div;
 437                f.pre_div >>= CFG_SRC_DIV_SHIFT;
 438                f.pre_div &= mask;
 439                f.m = frac->num;
 440                f.n = frac->den;
 441
 442                return clk_rcg2_configure(rcg, &f);
 443        }
 444
 445        return -EINVAL;
 446}
 447
 448static int clk_edp_pixel_set_rate_and_parent(struct clk_hw *hw,
 449                unsigned long rate, unsigned long parent_rate, u8 index)
 450{
 451        /* Parent index is set statically in frequency table */
 452        return clk_edp_pixel_set_rate(hw, rate, parent_rate);
 453}
 454
 455static int clk_edp_pixel_determine_rate(struct clk_hw *hw,
 456                                        struct clk_rate_request *req)
 457{
 458        struct clk_rcg2 *rcg = to_clk_rcg2(hw);
 459        const struct freq_tbl *f = rcg->freq_tbl;
 460        const struct frac_entry *frac;
 461        int delta = 100000;
 462        s64 request;
 463        u32 mask = BIT(rcg->hid_width) - 1;
 464        u32 hid_div;
 465        int index = qcom_find_src_index(hw, rcg->parent_map, f->src);
 466
 467        /* Force the correct parent */
 468        req->best_parent_hw = clk_hw_get_parent_by_index(hw, index);
 469        req->best_parent_rate = clk_hw_get_rate(req->best_parent_hw);
 470
 471        if (req->best_parent_rate == 810000000)
 472                frac = frac_table_810m;
 473        else
 474                frac = frac_table_675m;
 475
 476        for (; frac->num; frac++) {
 477                request = req->rate;
 478                request *= frac->den;
 479                request = div_s64(request, frac->num);
 480                if ((req->best_parent_rate < (request - delta)) ||
 481                    (req->best_parent_rate > (request + delta)))
 482                        continue;
 483
 484                regmap_read(rcg->clkr.regmap, rcg->cmd_rcgr + CFG_REG,
 485                                &hid_div);
 486                hid_div >>= CFG_SRC_DIV_SHIFT;
 487                hid_div &= mask;
 488
 489                req->rate = calc_rate(req->best_parent_rate,
 490                                      frac->num, frac->den,
 491                                      !!frac->den, hid_div);
 492                return 0;
 493        }
 494
 495        return -EINVAL;
 496}
 497
 498const struct clk_ops clk_edp_pixel_ops = {
 499        .is_enabled = clk_rcg2_is_enabled,
 500        .get_parent = clk_rcg2_get_parent,
 501        .set_parent = clk_rcg2_set_parent,
 502        .recalc_rate = clk_rcg2_recalc_rate,
 503        .set_rate = clk_edp_pixel_set_rate,
 504        .set_rate_and_parent = clk_edp_pixel_set_rate_and_parent,
 505        .determine_rate = clk_edp_pixel_determine_rate,
 506};
 507EXPORT_SYMBOL_GPL(clk_edp_pixel_ops);
 508
 509static int clk_byte_determine_rate(struct clk_hw *hw,
 510                                   struct clk_rate_request *req)
 511{
 512        struct clk_rcg2 *rcg = to_clk_rcg2(hw);
 513        const struct freq_tbl *f = rcg->freq_tbl;
 514        int index = qcom_find_src_index(hw, rcg->parent_map, f->src);
 515        unsigned long parent_rate, div;
 516        u32 mask = BIT(rcg->hid_width) - 1;
 517        struct clk_hw *p;
 518
 519        if (req->rate == 0)
 520                return -EINVAL;
 521
 522        req->best_parent_hw = p = clk_hw_get_parent_by_index(hw, index);
 523        req->best_parent_rate = parent_rate = clk_hw_round_rate(p, req->rate);
 524
 525        div = DIV_ROUND_UP((2 * parent_rate), req->rate) - 1;
 526        div = min_t(u32, div, mask);
 527
 528        req->rate = calc_rate(parent_rate, 0, 0, 0, div);
 529
 530        return 0;
 531}
 532
 533static int clk_byte_set_rate(struct clk_hw *hw, unsigned long rate,
 534                         unsigned long parent_rate)
 535{
 536        struct clk_rcg2 *rcg = to_clk_rcg2(hw);
 537        struct freq_tbl f = *rcg->freq_tbl;
 538        unsigned long div;
 539        u32 mask = BIT(rcg->hid_width) - 1;
 540
 541        div = DIV_ROUND_UP((2 * parent_rate), rate) - 1;
 542        div = min_t(u32, div, mask);
 543
 544        f.pre_div = div;
 545
 546        return clk_rcg2_configure(rcg, &f);
 547}
 548
 549static int clk_byte_set_rate_and_parent(struct clk_hw *hw,
 550                unsigned long rate, unsigned long parent_rate, u8 index)
 551{
 552        /* Parent index is set statically in frequency table */
 553        return clk_byte_set_rate(hw, rate, parent_rate);
 554}
 555
 556const struct clk_ops clk_byte_ops = {
 557        .is_enabled = clk_rcg2_is_enabled,
 558        .get_parent = clk_rcg2_get_parent,
 559        .set_parent = clk_rcg2_set_parent,
 560        .recalc_rate = clk_rcg2_recalc_rate,
 561        .set_rate = clk_byte_set_rate,
 562        .set_rate_and_parent = clk_byte_set_rate_and_parent,
 563        .determine_rate = clk_byte_determine_rate,
 564};
 565EXPORT_SYMBOL_GPL(clk_byte_ops);
 566
 567static int clk_byte2_determine_rate(struct clk_hw *hw,
 568                                    struct clk_rate_request *req)
 569{
 570        struct clk_rcg2 *rcg = to_clk_rcg2(hw);
 571        unsigned long parent_rate, div;
 572        u32 mask = BIT(rcg->hid_width) - 1;
 573        struct clk_hw *p;
 574        unsigned long rate = req->rate;
 575
 576        if (rate == 0)
 577                return -EINVAL;
 578
 579        p = req->best_parent_hw;
 580        req->best_parent_rate = parent_rate = clk_hw_round_rate(p, rate);
 581
 582        div = DIV_ROUND_UP((2 * parent_rate), rate) - 1;
 583        div = min_t(u32, div, mask);
 584
 585        req->rate = calc_rate(parent_rate, 0, 0, 0, div);
 586
 587        return 0;
 588}
 589
 590static int clk_byte2_set_rate(struct clk_hw *hw, unsigned long rate,
 591                         unsigned long parent_rate)
 592{
 593        struct clk_rcg2 *rcg = to_clk_rcg2(hw);
 594        struct freq_tbl f = { 0 };
 595        unsigned long div;
 596        int i, num_parents = clk_hw_get_num_parents(hw);
 597        u32 mask = BIT(rcg->hid_width) - 1;
 598        u32 cfg;
 599
 600        div = DIV_ROUND_UP((2 * parent_rate), rate) - 1;
 601        div = min_t(u32, div, mask);
 602
 603        f.pre_div = div;
 604
 605        regmap_read(rcg->clkr.regmap, rcg->cmd_rcgr + CFG_REG, &cfg);
 606        cfg &= CFG_SRC_SEL_MASK;
 607        cfg >>= CFG_SRC_SEL_SHIFT;
 608
 609        for (i = 0; i < num_parents; i++) {
 610                if (cfg == rcg->parent_map[i].cfg) {
 611                        f.src = rcg->parent_map[i].src;
 612                        return clk_rcg2_configure(rcg, &f);
 613                }
 614        }
 615
 616        return -EINVAL;
 617}
 618
 619static int clk_byte2_set_rate_and_parent(struct clk_hw *hw,
 620                unsigned long rate, unsigned long parent_rate, u8 index)
 621{
 622        /* Read the hardware to determine parent during set_rate */
 623        return clk_byte2_set_rate(hw, rate, parent_rate);
 624}
 625
 626const struct clk_ops clk_byte2_ops = {
 627        .is_enabled = clk_rcg2_is_enabled,
 628        .get_parent = clk_rcg2_get_parent,
 629        .set_parent = clk_rcg2_set_parent,
 630        .recalc_rate = clk_rcg2_recalc_rate,
 631        .set_rate = clk_byte2_set_rate,
 632        .set_rate_and_parent = clk_byte2_set_rate_and_parent,
 633        .determine_rate = clk_byte2_determine_rate,
 634};
 635EXPORT_SYMBOL_GPL(clk_byte2_ops);
 636
 637static const struct frac_entry frac_table_pixel[] = {
 638        { 3, 8 },
 639        { 2, 9 },
 640        { 4, 9 },
 641        { 1, 1 },
 642        { }
 643};
 644
 645static int clk_pixel_determine_rate(struct clk_hw *hw,
 646                                    struct clk_rate_request *req)
 647{
 648        unsigned long request, src_rate;
 649        int delta = 100000;
 650        const struct frac_entry *frac = frac_table_pixel;
 651
 652        for (; frac->num; frac++) {
 653                request = (req->rate * frac->den) / frac->num;
 654
 655                src_rate = clk_hw_round_rate(req->best_parent_hw, request);
 656                if ((src_rate < (request - delta)) ||
 657                        (src_rate > (request + delta)))
 658                        continue;
 659
 660                req->best_parent_rate = src_rate;
 661                req->rate = (src_rate * frac->num) / frac->den;
 662                return 0;
 663        }
 664
 665        return -EINVAL;
 666}
 667
 668static int clk_pixel_set_rate(struct clk_hw *hw, unsigned long rate,
 669                unsigned long parent_rate)
 670{
 671        struct clk_rcg2 *rcg = to_clk_rcg2(hw);
 672        struct freq_tbl f = { 0 };
 673        const struct frac_entry *frac = frac_table_pixel;
 674        unsigned long request;
 675        int delta = 100000;
 676        u32 mask = BIT(rcg->hid_width) - 1;
 677        u32 hid_div, cfg;
 678        int i, num_parents = clk_hw_get_num_parents(hw);
 679
 680        regmap_read(rcg->clkr.regmap, rcg->cmd_rcgr + CFG_REG, &cfg);
 681        cfg &= CFG_SRC_SEL_MASK;
 682        cfg >>= CFG_SRC_SEL_SHIFT;
 683
 684        for (i = 0; i < num_parents; i++)
 685                if (cfg == rcg->parent_map[i].cfg) {
 686                        f.src = rcg->parent_map[i].src;
 687                        break;
 688                }
 689
 690        for (; frac->num; frac++) {
 691                request = (rate * frac->den) / frac->num;
 692
 693                if ((parent_rate < (request - delta)) ||
 694                        (parent_rate > (request + delta)))
 695                        continue;
 696
 697                regmap_read(rcg->clkr.regmap, rcg->cmd_rcgr + CFG_REG,
 698                                &hid_div);
 699                f.pre_div = hid_div;
 700                f.pre_div >>= CFG_SRC_DIV_SHIFT;
 701                f.pre_div &= mask;
 702                f.m = frac->num;
 703                f.n = frac->den;
 704
 705                return clk_rcg2_configure(rcg, &f);
 706        }
 707        return -EINVAL;
 708}
 709
 710static int clk_pixel_set_rate_and_parent(struct clk_hw *hw, unsigned long rate,
 711                unsigned long parent_rate, u8 index)
 712{
 713        return clk_pixel_set_rate(hw, rate, parent_rate);
 714}
 715
 716const struct clk_ops clk_pixel_ops = {
 717        .is_enabled = clk_rcg2_is_enabled,
 718        .get_parent = clk_rcg2_get_parent,
 719        .set_parent = clk_rcg2_set_parent,
 720        .recalc_rate = clk_rcg2_recalc_rate,
 721        .set_rate = clk_pixel_set_rate,
 722        .set_rate_and_parent = clk_pixel_set_rate_and_parent,
 723        .determine_rate = clk_pixel_determine_rate,
 724};
 725EXPORT_SYMBOL_GPL(clk_pixel_ops);
 726
 727static int clk_gfx3d_determine_rate(struct clk_hw *hw,
 728                                    struct clk_rate_request *req)
 729{
 730        struct clk_rate_request parent_req = { };
 731        struct clk_hw *p2, *p8, *p9, *xo;
 732        unsigned long p9_rate;
 733        int ret;
 734
 735        xo = clk_hw_get_parent_by_index(hw, 0);
 736        if (req->rate == clk_hw_get_rate(xo)) {
 737                req->best_parent_hw = xo;
 738                return 0;
 739        }
 740
 741        p9 = clk_hw_get_parent_by_index(hw, 2);
 742        p2 = clk_hw_get_parent_by_index(hw, 3);
 743        p8 = clk_hw_get_parent_by_index(hw, 4);
 744
 745        /* PLL9 is a fixed rate PLL */
 746        p9_rate = clk_hw_get_rate(p9);
 747
 748        parent_req.rate = req->rate = min(req->rate, p9_rate);
 749        if (req->rate == p9_rate) {
 750                req->rate = req->best_parent_rate = p9_rate;
 751                req->best_parent_hw = p9;
 752                return 0;
 753        }
 754
 755        if (req->best_parent_hw == p9) {
 756                /* Are we going back to a previously used rate? */
 757                if (clk_hw_get_rate(p8) == req->rate)
 758                        req->best_parent_hw = p8;
 759                else
 760                        req->best_parent_hw = p2;
 761        } else if (req->best_parent_hw == p8) {
 762                req->best_parent_hw = p2;
 763        } else {
 764                req->best_parent_hw = p8;
 765        }
 766
 767        ret = __clk_determine_rate(req->best_parent_hw, &parent_req);
 768        if (ret)
 769                return ret;
 770
 771        req->rate = req->best_parent_rate = parent_req.rate;
 772
 773        return 0;
 774}
 775
 776static int clk_gfx3d_set_rate_and_parent(struct clk_hw *hw, unsigned long rate,
 777                unsigned long parent_rate, u8 index)
 778{
 779        struct clk_rcg2 *rcg = to_clk_rcg2(hw);
 780        u32 cfg;
 781        int ret;
 782
 783        /* Just mux it, we don't use the division or m/n hardware */
 784        cfg = rcg->parent_map[index].cfg << CFG_SRC_SEL_SHIFT;
 785        ret = regmap_write(rcg->clkr.regmap, rcg->cmd_rcgr + CFG_REG, cfg);
 786        if (ret)
 787                return ret;
 788
 789        return update_config(rcg);
 790}
 791
 792static int clk_gfx3d_set_rate(struct clk_hw *hw, unsigned long rate,
 793                              unsigned long parent_rate)
 794{
 795        /*
 796         * We should never get here; clk_gfx3d_determine_rate() should always
 797         * make us use a different parent than what we're currently using, so
 798         * clk_gfx3d_set_rate_and_parent() should always be called.
 799         */
 800        return 0;
 801}
 802
 803const struct clk_ops clk_gfx3d_ops = {
 804        .is_enabled = clk_rcg2_is_enabled,
 805        .get_parent = clk_rcg2_get_parent,
 806        .set_parent = clk_rcg2_set_parent,
 807        .recalc_rate = clk_rcg2_recalc_rate,
 808        .set_rate = clk_gfx3d_set_rate,
 809        .set_rate_and_parent = clk_gfx3d_set_rate_and_parent,
 810        .determine_rate = clk_gfx3d_determine_rate,
 811};
 812EXPORT_SYMBOL_GPL(clk_gfx3d_ops);
 813
 814static int clk_rcg2_set_force_enable(struct clk_hw *hw)
 815{
 816        struct clk_rcg2 *rcg = to_clk_rcg2(hw);
 817        const char *name = clk_hw_get_name(hw);
 818        int ret, count;
 819
 820        ret = regmap_update_bits(rcg->clkr.regmap, rcg->cmd_rcgr + CMD_REG,
 821                                 CMD_ROOT_EN, CMD_ROOT_EN);
 822        if (ret)
 823                return ret;
 824
 825        /* wait for RCG to turn ON */
 826        for (count = 500; count > 0; count--) {
 827                if (clk_rcg2_is_enabled(hw))
 828                        return 0;
 829
 830                udelay(1);
 831        }
 832
 833        pr_err("%s: RCG did not turn on\n", name);
 834        return -ETIMEDOUT;
 835}
 836
 837static int clk_rcg2_clear_force_enable(struct clk_hw *hw)
 838{
 839        struct clk_rcg2 *rcg = to_clk_rcg2(hw);
 840
 841        return regmap_update_bits(rcg->clkr.regmap, rcg->cmd_rcgr + CMD_REG,
 842                                        CMD_ROOT_EN, 0);
 843}
 844
 845static int
 846clk_rcg2_shared_force_enable_clear(struct clk_hw *hw, const struct freq_tbl *f)
 847{
 848        struct clk_rcg2 *rcg = to_clk_rcg2(hw);
 849        int ret;
 850
 851        ret = clk_rcg2_set_force_enable(hw);
 852        if (ret)
 853                return ret;
 854
 855        ret = clk_rcg2_configure(rcg, f);
 856        if (ret)
 857                return ret;
 858
 859        return clk_rcg2_clear_force_enable(hw);
 860}
 861
 862static int clk_rcg2_shared_set_rate(struct clk_hw *hw, unsigned long rate,
 863                                    unsigned long parent_rate)
 864{
 865        struct clk_rcg2 *rcg = to_clk_rcg2(hw);
 866        const struct freq_tbl *f;
 867
 868        f = qcom_find_freq(rcg->freq_tbl, rate);
 869        if (!f)
 870                return -EINVAL;
 871
 872        /*
 873         * In case clock is disabled, update the CFG, M, N and D registers
 874         * and don't hit the update bit of CMD register.
 875         */
 876        if (!__clk_is_enabled(hw->clk))
 877                return __clk_rcg2_configure(rcg, f);
 878
 879        return clk_rcg2_shared_force_enable_clear(hw, f);
 880}
 881
 882static int clk_rcg2_shared_set_rate_and_parent(struct clk_hw *hw,
 883                unsigned long rate, unsigned long parent_rate, u8 index)
 884{
 885        return clk_rcg2_shared_set_rate(hw, rate, parent_rate);
 886}
 887
 888static int clk_rcg2_shared_enable(struct clk_hw *hw)
 889{
 890        struct clk_rcg2 *rcg = to_clk_rcg2(hw);
 891        int ret;
 892
 893        /*
 894         * Set the update bit because required configuration has already
 895         * been written in clk_rcg2_shared_set_rate()
 896         */
 897        ret = clk_rcg2_set_force_enable(hw);
 898        if (ret)
 899                return ret;
 900
 901        ret = update_config(rcg);
 902        if (ret)
 903                return ret;
 904
 905        return clk_rcg2_clear_force_enable(hw);
 906}
 907
 908static void clk_rcg2_shared_disable(struct clk_hw *hw)
 909{
 910        struct clk_rcg2 *rcg = to_clk_rcg2(hw);
 911        u32 cfg;
 912
 913        /*
 914         * Store current configuration as switching to safe source would clear
 915         * the SRC and DIV of CFG register
 916         */
 917        regmap_read(rcg->clkr.regmap, rcg->cmd_rcgr + CFG_REG, &cfg);
 918
 919        /*
 920         * Park the RCG at a safe configuration - sourced off of safe source.
 921         * Force enable and disable the RCG while configuring it to safeguard
 922         * against any update signal coming from the downstream clock.
 923         * The current parent is still prepared and enabled at this point, and
 924         * the safe source is always on while application processor subsystem
 925         * is online. Therefore, the RCG can safely switch its parent.
 926         */
 927        clk_rcg2_set_force_enable(hw);
 928
 929        regmap_write(rcg->clkr.regmap, rcg->cmd_rcgr + CFG_REG,
 930                     rcg->safe_src_index << CFG_SRC_SEL_SHIFT);
 931
 932        update_config(rcg);
 933
 934        clk_rcg2_clear_force_enable(hw);
 935
 936        /* Write back the stored configuration corresponding to current rate */
 937        regmap_write(rcg->clkr.regmap, rcg->cmd_rcgr + CFG_REG, cfg);
 938}
 939
 940const struct clk_ops clk_rcg2_shared_ops = {
 941        .enable = clk_rcg2_shared_enable,
 942        .disable = clk_rcg2_shared_disable,
 943        .get_parent = clk_rcg2_get_parent,
 944        .set_parent = clk_rcg2_set_parent,
 945        .recalc_rate = clk_rcg2_recalc_rate,
 946        .determine_rate = clk_rcg2_determine_rate,
 947        .set_rate = clk_rcg2_shared_set_rate,
 948        .set_rate_and_parent = clk_rcg2_shared_set_rate_and_parent,
 949};
 950EXPORT_SYMBOL_GPL(clk_rcg2_shared_ops);
 951
 952/* Common APIs to be used for DFS based RCGR */
 953static void clk_rcg2_dfs_populate_freq(struct clk_hw *hw, unsigned int l,
 954                                       struct freq_tbl *f)
 955{
 956        struct clk_rcg2 *rcg = to_clk_rcg2(hw);
 957        struct clk_hw *p;
 958        unsigned long prate = 0;
 959        u32 val, mask, cfg, mode, src;
 960        int i, num_parents;
 961
 962        regmap_read(rcg->clkr.regmap, rcg->cmd_rcgr + SE_PERF_DFSR(l), &cfg);
 963
 964        mask = BIT(rcg->hid_width) - 1;
 965        f->pre_div = 1;
 966        if (cfg & mask)
 967                f->pre_div = cfg & mask;
 968
 969        src = cfg & CFG_SRC_SEL_MASK;
 970        src >>= CFG_SRC_SEL_SHIFT;
 971
 972        num_parents = clk_hw_get_num_parents(hw);
 973        for (i = 0; i < num_parents; i++) {
 974                if (src == rcg->parent_map[i].cfg) {
 975                        f->src = rcg->parent_map[i].src;
 976                        p = clk_hw_get_parent_by_index(&rcg->clkr.hw, i);
 977                        prate = clk_hw_get_rate(p);
 978                }
 979        }
 980
 981        mode = cfg & CFG_MODE_MASK;
 982        mode >>= CFG_MODE_SHIFT;
 983        if (mode) {
 984                mask = BIT(rcg->mnd_width) - 1;
 985                regmap_read(rcg->clkr.regmap, rcg->cmd_rcgr + SE_PERF_M_DFSR(l),
 986                            &val);
 987                val &= mask;
 988                f->m = val;
 989
 990                regmap_read(rcg->clkr.regmap, rcg->cmd_rcgr + SE_PERF_N_DFSR(l),
 991                            &val);
 992                val = ~val;
 993                val &= mask;
 994                val += f->m;
 995                f->n = val;
 996        }
 997
 998        f->freq = calc_rate(prate, f->m, f->n, mode, f->pre_div);
 999}
1000
1001static int clk_rcg2_dfs_populate_freq_table(struct clk_rcg2 *rcg)
1002{
1003        struct freq_tbl *freq_tbl;
1004        int i;
1005
1006        /* Allocate space for 1 extra since table is NULL terminated */
1007        freq_tbl = kcalloc(MAX_PERF_LEVEL + 1, sizeof(*freq_tbl), GFP_KERNEL);
1008        if (!freq_tbl)
1009                return -ENOMEM;
1010        rcg->freq_tbl = freq_tbl;
1011
1012        for (i = 0; i < MAX_PERF_LEVEL; i++)
1013                clk_rcg2_dfs_populate_freq(&rcg->clkr.hw, i, freq_tbl + i);
1014
1015        return 0;
1016}
1017
1018static int clk_rcg2_dfs_determine_rate(struct clk_hw *hw,
1019                                   struct clk_rate_request *req)
1020{
1021        struct clk_rcg2 *rcg = to_clk_rcg2(hw);
1022        int ret;
1023
1024        if (!rcg->freq_tbl) {
1025                ret = clk_rcg2_dfs_populate_freq_table(rcg);
1026                if (ret) {
1027                        pr_err("Failed to update DFS tables for %s\n",
1028                                        clk_hw_get_name(hw));
1029                        return ret;
1030                }
1031        }
1032
1033        return clk_rcg2_determine_rate(hw, req);
1034}
1035
1036static unsigned long
1037clk_rcg2_dfs_recalc_rate(struct clk_hw *hw, unsigned long parent_rate)
1038{
1039        struct clk_rcg2 *rcg = to_clk_rcg2(hw);
1040        u32 level, mask, cfg, m = 0, n = 0, mode, pre_div;
1041
1042        regmap_read(rcg->clkr.regmap,
1043                    rcg->cmd_rcgr + SE_CMD_DFSR_OFFSET, &level);
1044        level &= GENMASK(4, 1);
1045        level >>= 1;
1046
1047        if (rcg->freq_tbl)
1048                return rcg->freq_tbl[level].freq;
1049
1050        /*
1051         * Assume that parent_rate is actually the parent because
1052         * we can't do any better at figuring it out when the table
1053         * hasn't been populated yet. We only populate the table
1054         * in determine_rate because we can't guarantee the parents
1055         * will be registered with the framework until then.
1056         */
1057        regmap_read(rcg->clkr.regmap, rcg->cmd_rcgr + SE_PERF_DFSR(level),
1058                    &cfg);
1059
1060        mask = BIT(rcg->hid_width) - 1;
1061        pre_div = 1;
1062        if (cfg & mask)
1063                pre_div = cfg & mask;
1064
1065        mode = cfg & CFG_MODE_MASK;
1066        mode >>= CFG_MODE_SHIFT;
1067        if (mode) {
1068                mask = BIT(rcg->mnd_width) - 1;
1069                regmap_read(rcg->clkr.regmap,
1070                            rcg->cmd_rcgr + SE_PERF_M_DFSR(level), &m);
1071                m &= mask;
1072
1073                regmap_read(rcg->clkr.regmap,
1074                            rcg->cmd_rcgr + SE_PERF_N_DFSR(level), &n);
1075                n = ~n;
1076                n &= mask;
1077                n += m;
1078        }
1079
1080        return calc_rate(parent_rate, m, n, mode, pre_div);
1081}
1082
1083static const struct clk_ops clk_rcg2_dfs_ops = {
1084        .is_enabled = clk_rcg2_is_enabled,
1085        .get_parent = clk_rcg2_get_parent,
1086        .determine_rate = clk_rcg2_dfs_determine_rate,
1087        .recalc_rate = clk_rcg2_dfs_recalc_rate,
1088};
1089
1090static int clk_rcg2_enable_dfs(const struct clk_rcg_dfs_data *data,
1091                               struct regmap *regmap)
1092{
1093        struct clk_rcg2 *rcg = data->rcg;
1094        struct clk_init_data *init = data->init;
1095        u32 val;
1096        int ret;
1097
1098        ret = regmap_read(regmap, rcg->cmd_rcgr + SE_CMD_DFSR_OFFSET, &val);
1099        if (ret)
1100                return -EINVAL;
1101
1102        if (!(val & SE_CMD_DFS_EN))
1103                return 0;
1104
1105        /*
1106         * Rate changes with consumer writing a register in
1107         * their own I/O region
1108         */
1109        init->flags |= CLK_GET_RATE_NOCACHE;
1110        init->ops = &clk_rcg2_dfs_ops;
1111
1112        rcg->freq_tbl = NULL;
1113
1114        return 0;
1115}
1116
1117int qcom_cc_register_rcg_dfs(struct regmap *regmap,
1118                             const struct clk_rcg_dfs_data *rcgs, size_t len)
1119{
1120        int i, ret;
1121
1122        for (i = 0; i < len; i++) {
1123                ret = clk_rcg2_enable_dfs(&rcgs[i], regmap);
1124                if (ret)
1125                        return ret;
1126        }
1127
1128        return 0;
1129}
1130EXPORT_SYMBOL_GPL(qcom_cc_register_rcg_dfs);
1131
1132static int clk_rcg2_dp_set_rate(struct clk_hw *hw, unsigned long rate,
1133                        unsigned long parent_rate)
1134{
1135        struct clk_rcg2 *rcg = to_clk_rcg2(hw);
1136        struct freq_tbl f = { 0 };
1137        u32 mask = BIT(rcg->hid_width) - 1;
1138        u32 hid_div, cfg;
1139        int i, num_parents = clk_hw_get_num_parents(hw);
1140        unsigned long num, den;
1141
1142        rational_best_approximation(parent_rate, rate,
1143                        GENMASK(rcg->mnd_width - 1, 0),
1144                        GENMASK(rcg->mnd_width - 1, 0), &den, &num);
1145
1146        if (!num || !den)
1147                return -EINVAL;
1148
1149        regmap_read(rcg->clkr.regmap, rcg->cmd_rcgr + CFG_REG, &cfg);
1150        hid_div = cfg;
1151        cfg &= CFG_SRC_SEL_MASK;
1152        cfg >>= CFG_SRC_SEL_SHIFT;
1153
1154        for (i = 0; i < num_parents; i++) {
1155                if (cfg == rcg->parent_map[i].cfg) {
1156                        f.src = rcg->parent_map[i].src;
1157                        break;
1158                }
1159        }
1160
1161        f.pre_div = hid_div;
1162        f.pre_div >>= CFG_SRC_DIV_SHIFT;
1163        f.pre_div &= mask;
1164
1165        if (num != den) {
1166                f.m = num;
1167                f.n = den;
1168        } else {
1169                f.m = 0;
1170                f.n = 0;
1171        }
1172
1173        return clk_rcg2_configure(rcg, &f);
1174}
1175
1176static int clk_rcg2_dp_set_rate_and_parent(struct clk_hw *hw,
1177                unsigned long rate, unsigned long parent_rate, u8 index)
1178{
1179        return clk_rcg2_dp_set_rate(hw, rate, parent_rate);
1180}
1181
1182static int clk_rcg2_dp_determine_rate(struct clk_hw *hw,
1183                                struct clk_rate_request *req)
1184{
1185        struct clk_rate_request parent_req = *req;
1186        int ret;
1187
1188        ret = __clk_determine_rate(clk_hw_get_parent(hw), &parent_req);
1189        if (ret)
1190                return ret;
1191
1192        req->best_parent_rate = parent_req.rate;
1193
1194        return 0;
1195}
1196
1197const struct clk_ops clk_dp_ops = {
1198        .is_enabled = clk_rcg2_is_enabled,
1199        .get_parent = clk_rcg2_get_parent,
1200        .set_parent = clk_rcg2_set_parent,
1201        .recalc_rate = clk_rcg2_recalc_rate,
1202        .set_rate = clk_rcg2_dp_set_rate,
1203        .set_rate_and_parent = clk_rcg2_dp_set_rate_and_parent,
1204        .determine_rate = clk_rcg2_dp_determine_rate,
1205};
1206EXPORT_SYMBOL_GPL(clk_dp_ops);
1207