uboot/drivers/clk/clk_zynq.c
<<
>>
Prefs
   1// SPDX-License-Identifier: GPL-2.0+
   2/*
   3 * Copyright (C) 2017 Weidmüller Interface GmbH & Co. KG
   4 * Stefan Herbrechtsmeier <stefan.herbrechtsmeier@weidmueller.com>
   5 *
   6 * Copyright (C) 2013 Soren Brinkmann <soren.brinkmann@xilinx.com>
   7 * Copyright (C) 2013 Xilinx, Inc. All rights reserved.
   8 */
   9
  10#include <common.h>
  11#include <clk-uclass.h>
  12#include <dm.h>
  13#include <log.h>
  14#include <asm/global_data.h>
  15#include <dm/device_compat.h>
  16#include <dm/lists.h>
  17#include <errno.h>
  18#include <asm/io.h>
  19#include <asm/arch/clk.h>
  20#include <asm/arch/hardware.h>
  21#include <asm/arch/sys_proto.h>
  22
  23/* Register bitfield defines */
  24#define PLLCTRL_FBDIV_MASK      0x7f000
  25#define PLLCTRL_FBDIV_SHIFT     12
  26#define PLLCTRL_BPFORCE_MASK    (1 << 4)
  27#define PLLCTRL_PWRDWN_MASK     2
  28#define PLLCTRL_PWRDWN_SHIFT    1
  29#define PLLCTRL_RESET_MASK      1
  30#define PLLCTRL_RESET_SHIFT     0
  31
  32#define ZYNQ_CLK_MAXDIV         0x3f
  33#define CLK_CTRL_DIV1_SHIFT     20
  34#define CLK_CTRL_DIV1_MASK      (ZYNQ_CLK_MAXDIV << CLK_CTRL_DIV1_SHIFT)
  35#define CLK_CTRL_DIV0_SHIFT     8
  36#define CLK_CTRL_DIV0_MASK      (ZYNQ_CLK_MAXDIV << CLK_CTRL_DIV0_SHIFT)
  37#define CLK_CTRL_SRCSEL_SHIFT   4
  38#define CLK_CTRL_SRCSEL_MASK    (0x3 << CLK_CTRL_SRCSEL_SHIFT)
  39
  40#define CLK_CTRL_DIV2X_SHIFT    26
  41#define CLK_CTRL_DIV2X_MASK     (ZYNQ_CLK_MAXDIV << CLK_CTRL_DIV2X_SHIFT)
  42#define CLK_CTRL_DIV3X_SHIFT    20
  43#define CLK_CTRL_DIV3X_MASK     (ZYNQ_CLK_MAXDIV << CLK_CTRL_DIV3X_SHIFT)
  44
  45DECLARE_GLOBAL_DATA_PTR;
  46
  47#ifndef CONFIG_SPL_BUILD
  48enum zynq_clk_rclk {mio_clk, emio_clk};
  49#endif
  50
  51struct zynq_clk_priv {
  52        ulong ps_clk_freq;
  53#ifndef CONFIG_SPL_BUILD
  54        struct clk gem_emio_clk[2];
  55#endif
  56};
  57
  58static void *zynq_clk_get_register(enum zynq_clk id)
  59{
  60        switch (id) {
  61        case armpll_clk:
  62                return &slcr_base->arm_pll_ctrl;
  63        case ddrpll_clk:
  64                return &slcr_base->ddr_pll_ctrl;
  65        case iopll_clk:
  66                return &slcr_base->io_pll_ctrl;
  67        case lqspi_clk:
  68                return &slcr_base->lqspi_clk_ctrl;
  69        case smc_clk:
  70                return &slcr_base->smc_clk_ctrl;
  71        case pcap_clk:
  72                return &slcr_base->pcap_clk_ctrl;
  73        case sdio0_clk ... sdio1_clk:
  74                return &slcr_base->sdio_clk_ctrl;
  75        case uart0_clk ... uart1_clk:
  76                return &slcr_base->uart_clk_ctrl;
  77        case spi0_clk ... spi1_clk:
  78                return &slcr_base->spi_clk_ctrl;
  79#ifndef CONFIG_SPL_BUILD
  80        case dci_clk:
  81                return &slcr_base->dci_clk_ctrl;
  82        case gem0_clk:
  83                return &slcr_base->gem0_clk_ctrl;
  84        case gem1_clk:
  85                return &slcr_base->gem1_clk_ctrl;
  86        case fclk0_clk:
  87                return &slcr_base->fpga0_clk_ctrl;
  88        case fclk1_clk:
  89                return &slcr_base->fpga1_clk_ctrl;
  90        case fclk2_clk:
  91                return &slcr_base->fpga2_clk_ctrl;
  92        case fclk3_clk:
  93                return &slcr_base->fpga3_clk_ctrl;
  94        case can0_clk ... can1_clk:
  95                return &slcr_base->can_clk_ctrl;
  96        case dbg_trc_clk ... dbg_apb_clk:
  97                /* fall through */
  98#endif
  99        default:
 100                return &slcr_base->dbg_clk_ctrl;
 101        }
 102}
 103
 104static enum zynq_clk zynq_clk_get_cpu_pll(u32 clk_ctrl)
 105{
 106        u32 srcsel = (clk_ctrl & CLK_CTRL_SRCSEL_MASK) >> CLK_CTRL_SRCSEL_SHIFT;
 107
 108        switch (srcsel) {
 109        case 2:
 110                return ddrpll_clk;
 111        case 3:
 112                return iopll_clk;
 113        case 0 ... 1:
 114        default:
 115                return armpll_clk;
 116        }
 117}
 118
 119static enum zynq_clk zynq_clk_get_peripheral_pll(u32 clk_ctrl)
 120{
 121        u32 srcsel = (clk_ctrl & CLK_CTRL_SRCSEL_MASK) >> CLK_CTRL_SRCSEL_SHIFT;
 122
 123        switch (srcsel) {
 124        case 2:
 125                return armpll_clk;
 126        case 3:
 127                return ddrpll_clk;
 128        case 0 ... 1:
 129        default:
 130                return iopll_clk;
 131        }
 132}
 133
 134static ulong zynq_clk_get_pll_rate(struct zynq_clk_priv *priv, enum zynq_clk id)
 135{
 136        u32 clk_ctrl, reset, pwrdwn, mul, bypass;
 137
 138        clk_ctrl = readl(zynq_clk_get_register(id));
 139
 140        reset = (clk_ctrl & PLLCTRL_RESET_MASK) >> PLLCTRL_RESET_SHIFT;
 141        pwrdwn = (clk_ctrl & PLLCTRL_PWRDWN_MASK) >> PLLCTRL_PWRDWN_SHIFT;
 142        if (reset || pwrdwn)
 143                return 0;
 144
 145        bypass = clk_ctrl & PLLCTRL_BPFORCE_MASK;
 146        if (bypass)
 147                mul = 1;
 148        else
 149                mul = (clk_ctrl & PLLCTRL_FBDIV_MASK) >> PLLCTRL_FBDIV_SHIFT;
 150
 151        return priv->ps_clk_freq * mul;
 152}
 153
 154#ifndef CONFIG_SPL_BUILD
 155static enum zynq_clk_rclk zynq_clk_get_gem_rclk(enum zynq_clk id)
 156{
 157        u32 clk_ctrl, srcsel;
 158
 159        if (id == gem0_clk)
 160                clk_ctrl = readl(&slcr_base->gem0_rclk_ctrl);
 161        else
 162                clk_ctrl = readl(&slcr_base->gem1_rclk_ctrl);
 163
 164        srcsel = (clk_ctrl & CLK_CTRL_SRCSEL_MASK) >> CLK_CTRL_SRCSEL_SHIFT;
 165        if (srcsel)
 166                return emio_clk;
 167        else
 168                return mio_clk;
 169}
 170#endif
 171
 172static ulong zynq_clk_get_cpu_rate(struct zynq_clk_priv *priv, enum zynq_clk id)
 173{
 174        u32 clk_621, clk_ctrl, div;
 175        enum zynq_clk pll;
 176
 177        clk_ctrl = readl(&slcr_base->arm_clk_ctrl);
 178
 179        div = (clk_ctrl & CLK_CTRL_DIV0_MASK) >> CLK_CTRL_DIV0_SHIFT;
 180
 181        switch (id) {
 182        case cpu_1x_clk:
 183                div *= 2;
 184                /* fall through */
 185        case cpu_2x_clk:
 186                clk_621 = readl(&slcr_base->clk_621_true) & 1;
 187                div *= 2 + clk_621;
 188                break;
 189        case cpu_3or2x_clk:
 190                div *= 2;
 191                /* fall through */
 192        case cpu_6or4x_clk:
 193                break;
 194        default:
 195                return 0;
 196        }
 197
 198        pll = zynq_clk_get_cpu_pll(clk_ctrl);
 199
 200        return DIV_ROUND_CLOSEST(zynq_clk_get_pll_rate(priv, pll), div);
 201}
 202
 203#ifndef CONFIG_SPL_BUILD
 204static ulong zynq_clk_get_ddr2x_rate(struct zynq_clk_priv *priv)
 205{
 206        u32 clk_ctrl, div;
 207
 208        clk_ctrl = readl(&slcr_base->ddr_clk_ctrl);
 209
 210        div = (clk_ctrl & CLK_CTRL_DIV2X_MASK) >> CLK_CTRL_DIV2X_SHIFT;
 211
 212        return DIV_ROUND_CLOSEST(zynq_clk_get_pll_rate(priv, ddrpll_clk), div);
 213}
 214#endif
 215
 216static ulong zynq_clk_get_ddr3x_rate(struct zynq_clk_priv *priv)
 217{
 218        u32 clk_ctrl, div;
 219
 220        clk_ctrl = readl(&slcr_base->ddr_clk_ctrl);
 221
 222        div = (clk_ctrl & CLK_CTRL_DIV3X_MASK) >> CLK_CTRL_DIV3X_SHIFT;
 223
 224        return DIV_ROUND_CLOSEST(zynq_clk_get_pll_rate(priv, ddrpll_clk), div);
 225}
 226
 227#ifndef CONFIG_SPL_BUILD
 228static ulong zynq_clk_get_dci_rate(struct zynq_clk_priv *priv)
 229{
 230        u32 clk_ctrl, div0, div1;
 231
 232        clk_ctrl = readl(&slcr_base->dci_clk_ctrl);
 233
 234        div0 = (clk_ctrl & CLK_CTRL_DIV0_MASK) >> CLK_CTRL_DIV0_SHIFT;
 235        div1 = (clk_ctrl & CLK_CTRL_DIV1_MASK) >> CLK_CTRL_DIV1_SHIFT;
 236
 237        return DIV_ROUND_CLOSEST(DIV_ROUND_CLOSEST(
 238                zynq_clk_get_pll_rate(priv, ddrpll_clk), div0), div1);
 239}
 240#endif
 241
 242static ulong zynq_clk_get_peripheral_rate(struct zynq_clk_priv *priv,
 243                                          enum zynq_clk id, bool two_divs)
 244{
 245        enum zynq_clk pll;
 246        u32 clk_ctrl, div0;
 247        u32 div1 = 1;
 248
 249        clk_ctrl = readl(zynq_clk_get_register(id));
 250
 251        div0 = (clk_ctrl & CLK_CTRL_DIV0_MASK) >> CLK_CTRL_DIV0_SHIFT;
 252        if (!div0)
 253                div0 = 1;
 254
 255#ifndef CONFIG_SPL_BUILD
 256        if (two_divs) {
 257                div1 = (clk_ctrl & CLK_CTRL_DIV1_MASK) >> CLK_CTRL_DIV1_SHIFT;
 258                if (!div1)
 259                        div1 = 1;
 260        }
 261#endif
 262
 263        pll = zynq_clk_get_peripheral_pll(clk_ctrl);
 264
 265        return
 266                DIV_ROUND_CLOSEST(
 267                        DIV_ROUND_CLOSEST(
 268                                zynq_clk_get_pll_rate(priv, pll), div0),
 269                        div1);
 270}
 271
 272#ifndef CONFIG_SPL_BUILD
 273static ulong zynq_clk_get_gem_rate(struct zynq_clk_priv *priv, enum zynq_clk id)
 274{
 275        struct clk *parent;
 276
 277        if (zynq_clk_get_gem_rclk(id) == mio_clk)
 278                return zynq_clk_get_peripheral_rate(priv, id, true);
 279
 280        parent = &priv->gem_emio_clk[id - gem0_clk];
 281        if (parent->dev)
 282                return clk_get_rate(parent);
 283
 284        debug("%s: gem%d emio rx clock source unknown\n", __func__,
 285              id - gem0_clk);
 286
 287        return -ENOSYS;
 288}
 289
 290static unsigned long zynq_clk_calc_peripheral_two_divs(ulong rate,
 291                                                       ulong pll_rate,
 292                                                       u32 *div0, u32 *div1)
 293{
 294        long new_err, best_err = (long)(~0UL >> 1);
 295        ulong new_rate, best_rate = 0;
 296        u32 d0, d1;
 297
 298        for (d0 = 1; d0 <= ZYNQ_CLK_MAXDIV; d0++) {
 299                for (d1 = 1; d1 <= ZYNQ_CLK_MAXDIV >> 1; d1++) {
 300                        new_rate = DIV_ROUND_CLOSEST(
 301                                        DIV_ROUND_CLOSEST(pll_rate, d0), d1);
 302                        new_err = abs(new_rate - rate);
 303
 304                        if (new_err < best_err) {
 305                                *div0 = d0;
 306                                *div1 = d1;
 307                                best_err = new_err;
 308                                best_rate = new_rate;
 309                        }
 310                }
 311        }
 312
 313        return best_rate;
 314}
 315
 316static ulong zynq_clk_set_peripheral_rate(struct zynq_clk_priv *priv,
 317                                          enum zynq_clk id, ulong rate,
 318                                          bool two_divs)
 319{
 320        enum zynq_clk pll;
 321        u32 clk_ctrl, div0 = 0, div1 = 0;
 322        ulong pll_rate, new_rate;
 323        u32 *reg;
 324
 325        reg = zynq_clk_get_register(id);
 326        clk_ctrl = readl(reg);
 327
 328        pll = zynq_clk_get_peripheral_pll(clk_ctrl);
 329        pll_rate = zynq_clk_get_pll_rate(priv, pll);
 330        clk_ctrl &= ~CLK_CTRL_DIV0_MASK;
 331        if (two_divs) {
 332                clk_ctrl &= ~CLK_CTRL_DIV1_MASK;
 333                new_rate = zynq_clk_calc_peripheral_two_divs(rate, pll_rate,
 334                                &div0, &div1);
 335                clk_ctrl |= div1 << CLK_CTRL_DIV1_SHIFT;
 336        } else {
 337                div0 = DIV_ROUND_CLOSEST(pll_rate, rate);
 338                if (div0 > ZYNQ_CLK_MAXDIV)
 339                        div0 = ZYNQ_CLK_MAXDIV;
 340                new_rate = DIV_ROUND_CLOSEST(rate, div0);
 341        }
 342        clk_ctrl |= div0 << CLK_CTRL_DIV0_SHIFT;
 343
 344        zynq_slcr_unlock();
 345        writel(clk_ctrl, reg);
 346        zynq_slcr_lock();
 347
 348        return new_rate;
 349}
 350
 351static ulong zynq_clk_set_gem_rate(struct zynq_clk_priv *priv, enum zynq_clk id,
 352                                   ulong rate)
 353{
 354        struct clk *parent;
 355
 356        if (zynq_clk_get_gem_rclk(id) == mio_clk)
 357                return zynq_clk_set_peripheral_rate(priv, id, rate, true);
 358
 359        parent = &priv->gem_emio_clk[id - gem0_clk];
 360        if (parent->dev)
 361                return clk_set_rate(parent, rate);
 362
 363        debug("%s: gem%d emio rx clock source unknown\n", __func__,
 364              id - gem0_clk);
 365
 366        return -ENOSYS;
 367}
 368#endif
 369
 370#ifndef CONFIG_SPL_BUILD
 371static ulong zynq_clk_get_rate(struct clk *clk)
 372{
 373        struct zynq_clk_priv *priv = dev_get_priv(clk->dev);
 374        enum zynq_clk id = clk->id;
 375        bool two_divs = false;
 376
 377        switch (id) {
 378        case armpll_clk ... iopll_clk:
 379                return zynq_clk_get_pll_rate(priv, id);
 380        case cpu_6or4x_clk ... cpu_1x_clk:
 381                return zynq_clk_get_cpu_rate(priv, id);
 382        case ddr2x_clk:
 383                return zynq_clk_get_ddr2x_rate(priv);
 384        case ddr3x_clk:
 385                return zynq_clk_get_ddr3x_rate(priv);
 386        case dci_clk:
 387                return zynq_clk_get_dci_rate(priv);
 388        case gem0_clk ... gem1_clk:
 389                return zynq_clk_get_gem_rate(priv, id);
 390        case fclk0_clk ... can1_clk:
 391                two_divs = true;
 392                /* fall through */
 393        case dbg_trc_clk ... dbg_apb_clk:
 394        case lqspi_clk ... pcap_clk:
 395        case sdio0_clk ... spi1_clk:
 396                return zynq_clk_get_peripheral_rate(priv, id, two_divs);
 397        case dma_clk:
 398                return zynq_clk_get_cpu_rate(priv, cpu_2x_clk);
 399        case usb0_aper_clk ... swdt_clk:
 400                return zynq_clk_get_cpu_rate(priv, cpu_1x_clk);
 401        default:
 402                return -ENXIO;
 403        }
 404}
 405
 406static ulong zynq_clk_set_rate(struct clk *clk, ulong rate)
 407{
 408        struct zynq_clk_priv *priv = dev_get_priv(clk->dev);
 409        enum zynq_clk id = clk->id;
 410        bool two_divs = false;
 411
 412        switch (id) {
 413        case gem0_clk ... gem1_clk:
 414                return zynq_clk_set_gem_rate(priv, id, rate);
 415        case fclk0_clk ... can1_clk:
 416                two_divs = true;
 417                /* fall through */
 418        case lqspi_clk ... pcap_clk:
 419        case sdio0_clk ... spi1_clk:
 420        case dbg_trc_clk ... dbg_apb_clk:
 421                return zynq_clk_set_peripheral_rate(priv, id, rate, two_divs);
 422        default:
 423                return -ENXIO;
 424        }
 425}
 426#else
 427static ulong zynq_clk_get_rate(struct clk *clk)
 428{
 429        struct zynq_clk_priv *priv = dev_get_priv(clk->dev);
 430        enum zynq_clk id = clk->id;
 431
 432        switch (id) {
 433        case cpu_6or4x_clk ... cpu_1x_clk:
 434                return zynq_clk_get_cpu_rate(priv, id);
 435        case ddr3x_clk:
 436                return zynq_clk_get_ddr3x_rate(priv);
 437        case lqspi_clk ... pcap_clk:
 438        case sdio0_clk ... spi1_clk:
 439                return zynq_clk_get_peripheral_rate(priv, id, 0);
 440        case i2c0_aper_clk ... i2c1_aper_clk:
 441                return zynq_clk_get_cpu_rate(priv, cpu_1x_clk);
 442        default:
 443                return -ENXIO;
 444        }
 445}
 446#endif
 447
 448static int dummy_enable(struct clk *clk)
 449{
 450        /*
 451         * Add implementation but by default all clocks are enabled
 452         * after power up which is only one supported case now.
 453         */
 454        return 0;
 455}
 456
 457static struct clk_ops zynq_clk_ops = {
 458        .get_rate = zynq_clk_get_rate,
 459#ifndef CONFIG_SPL_BUILD
 460        .set_rate = zynq_clk_set_rate,
 461#endif
 462        .enable = dummy_enable,
 463};
 464
 465static int zynq_clk_probe(struct udevice *dev)
 466{
 467        struct zynq_clk_priv *priv = dev_get_priv(dev);
 468#ifndef CONFIG_SPL_BUILD
 469        unsigned int i;
 470        char name[16];
 471        int ret;
 472
 473        for (i = 0; i < 2; i++) {
 474                sprintf(name, "gem%d_emio_clk", i);
 475                ret = clk_get_by_name(dev, name, &priv->gem_emio_clk[i]);
 476                if (ret < 0 && ret != -ENODATA) {
 477                        dev_err(dev, "failed to get %s clock\n", name);
 478                        return ret;
 479                }
 480        }
 481#endif
 482
 483        priv->ps_clk_freq = fdtdec_get_uint(gd->fdt_blob, dev_of_offset(dev),
 484                                            "ps-clk-frequency", 33333333UL);
 485
 486        return 0;
 487}
 488
 489static const struct udevice_id zynq_clk_ids[] = {
 490        { .compatible = "xlnx,ps7-clkc"},
 491        {}
 492};
 493
 494U_BOOT_DRIVER(zynq_clk) = {
 495        .name           = "zynq_clk",
 496        .id             = UCLASS_CLK,
 497        .of_match       = zynq_clk_ids,
 498        .ops            = &zynq_clk_ops,
 499        .priv_auto      = sizeof(struct zynq_clk_priv),
 500        .probe          = zynq_clk_probe,
 501};
 502