linux/arch/arm/mach-omap2/sleep43xx.S
<<
>>
Prefs
   1/* SPDX-License-Identifier: GPL-2.0 */
   2/*
   3 * Low level suspend code for AM43XX SoCs
   4 *
   5 * Copyright (C) 2013-2018 Texas Instruments Incorporated - https://www.ti.com/
   6 *      Dave Gerlach, Vaibhav Bedia
   7 */
   8
   9#include <linux/linkage.h>
  10#include <linux/ti-emif-sram.h>
  11#include <linux/platform_data/pm33xx.h>
  12#include <asm/assembler.h>
  13#include <asm/hardware/cache-l2x0.h>
  14#include <asm/memory.h>
  15
  16#include "cm33xx.h"
  17#include "common.h"
  18#include "iomap.h"
  19#include "omap-secure.h"
  20#include "omap44xx.h"
  21#include "pm-asm-offsets.h"
  22#include "prm33xx.h"
  23#include "prcm43xx.h"
  24
  25/* replicated define because linux/bitops.h cannot be included in assembly */
  26#define BIT(nr)                 (1 << (nr))
  27
  28#define AM33XX_CM_CLKCTRL_MODULESTATE_DISABLED          0x00030000
  29#define AM33XX_CM_CLKCTRL_MODULEMODE_DISABLE            0x0003
  30#define AM33XX_CM_CLKCTRL_MODULEMODE_ENABLE             0x0002
  31
  32#define AM43XX_EMIF_POWEROFF_ENABLE                     0x1
  33#define AM43XX_EMIF_POWEROFF_DISABLE                    0x0
  34
  35#define AM43XX_CM_CLKSTCTRL_CLKTRCTRL_SW_SLEEP          0x1
  36#define AM43XX_CM_CLKSTCTRL_CLKTRCTRL_HW_AUTO           0x3
  37
  38#define AM43XX_CM_BASE                                  0x44DF0000
  39
  40#define AM43XX_CM_REGADDR(inst, reg)                           \
  41       AM33XX_L4_WK_IO_ADDRESS(AM43XX_CM_BASE + (inst) + (reg))
  42
  43#define AM43XX_CM_MPU_CLKSTCTRL AM43XX_CM_REGADDR(AM43XX_CM_MPU_INST, \
  44                                        AM43XX_CM_MPU_MPU_CDOFFS)
  45#define AM43XX_CM_MPU_MPU_CLKCTRL AM43XX_CM_REGADDR(AM43XX_CM_MPU_INST, \
  46                                        AM43XX_CM_MPU_MPU_CLKCTRL_OFFSET)
  47#define AM43XX_CM_PER_EMIF_CLKCTRL  AM43XX_CM_REGADDR(AM43XX_CM_PER_INST, \
  48                                        AM43XX_CM_PER_EMIF_CLKCTRL_OFFSET)
  49#define AM43XX_PRM_EMIF_CTRL_OFFSET                     0x0030
  50
  51#define RTC_SECONDS_REG                                 0x0
  52#define RTC_PMIC_REG                                    0x98
  53#define RTC_PMIC_POWER_EN                               BIT(16)
  54#define RTC_PMIC_EXT_WAKEUP_STS                         BIT(12)
  55#define RTC_PMIC_EXT_WAKEUP_POL                         BIT(4)
  56#define RTC_PMIC_EXT_WAKEUP_EN                          BIT(0)
  57
  58        .arm
  59        .arch armv7-a
  60        .arch_extension sec
  61        .align 3
  62
  63ENTRY(am43xx_do_wfi)
  64        stmfd   sp!, {r4 - r11, lr}     @ save registers on stack
  65
  66        /* Save wfi_flags arg to data space */
  67        mov     r4, r0
  68        adr     r3, am43xx_pm_ro_sram_data
  69        ldr     r2, [r3, #AMX3_PM_RO_SRAM_DATA_VIRT_OFFSET]
  70        str     r4, [r2, #AMX3_PM_WFI_FLAGS_OFFSET]
  71
  72#ifdef CONFIG_CACHE_L2X0
  73        /* Retrieve l2 cache virt address BEFORE we shut off EMIF */
  74        ldr     r1, get_l2cache_base
  75        blx     r1
  76        mov     r8, r0
  77#endif
  78
  79        /* Only flush cache is we know we are losing MPU context */
  80        tst     r4, #WFI_FLAG_FLUSH_CACHE
  81        beq     cache_skip_flush
  82
  83        /*
  84         * Flush all data from the L1 and L2 data cache before disabling
  85         * SCTLR.C bit.
  86         */
  87        ldr     r1, kernel_flush
  88        blx     r1
  89
  90        /*
  91         * Clear the SCTLR.C bit to prevent further data cache
  92         * allocation. Clearing SCTLR.C would make all the data accesses
  93         * strongly ordered and would not hit the cache.
  94         */
  95        mrc     p15, 0, r0, c1, c0, 0
  96        bic     r0, r0, #(1 << 2)       @ Disable the C bit
  97        mcr     p15, 0, r0, c1, c0, 0
  98        isb
  99        dsb
 100
 101        /*
 102         * Invalidate L1 and L2 data cache.
 103         */
 104        ldr     r1, kernel_flush
 105        blx     r1
 106
 107#ifdef CONFIG_CACHE_L2X0
 108        /*
 109         * Clean and invalidate the L2 cache.
 110         */
 111#ifdef CONFIG_PL310_ERRATA_727915
 112        mov     r0, #0x03
 113        mov     r12, #OMAP4_MON_L2X0_DBG_CTRL_INDEX
 114        dsb
 115        smc     #0
 116        dsb
 117#endif
 118        mov     r0, r8
 119        adr     r4, am43xx_pm_ro_sram_data
 120        ldr     r3, [r4, #AMX3_PM_RO_SRAM_DATA_VIRT_OFFSET]
 121
 122        mov     r2, r0
 123        ldr     r0, [r2, #L2X0_AUX_CTRL]
 124        str     r0, [r3, #AMX3_PM_L2_AUX_CTRL_VAL_OFFSET]
 125        ldr     r0, [r2, #L310_PREFETCH_CTRL]
 126        str     r0, [r3, #AMX3_PM_L2_PREFETCH_CTRL_VAL_OFFSET]
 127
 128        ldr     r0, l2_val
 129        str     r0, [r2, #L2X0_CLEAN_INV_WAY]
 130wait:
 131        ldr     r0, [r2, #L2X0_CLEAN_INV_WAY]
 132        ldr     r1, l2_val
 133        ands    r0, r0, r1
 134        bne     wait
 135#ifdef CONFIG_PL310_ERRATA_727915
 136        mov     r0, #0x00
 137        mov     r12, #OMAP4_MON_L2X0_DBG_CTRL_INDEX
 138        dsb
 139        smc     #0
 140        dsb
 141#endif
 142l2x_sync:
 143        mov     r0, r8
 144        mov     r2, r0
 145        mov     r0, #0x0
 146        str     r0, [r2, #L2X0_CACHE_SYNC]
 147sync:
 148        ldr     r0, [r2, #L2X0_CACHE_SYNC]
 149        ands    r0, r0, #0x1
 150        bne     sync
 151#endif
 152
 153        /* Restore wfi_flags */
 154        adr     r3, am43xx_pm_ro_sram_data
 155        ldr     r2, [r3, #AMX3_PM_RO_SRAM_DATA_VIRT_OFFSET]
 156        ldr     r4, [r2, #AMX3_PM_WFI_FLAGS_OFFSET]
 157
 158cache_skip_flush:
 159        /*
 160         * If we are trying to enter RTC+DDR mode we must perform
 161         * a read from the rtc address space to ensure translation
 162         * presence in the TLB to avoid page table walk after DDR
 163         * is unavailable.
 164         */
 165        tst     r4, #WFI_FLAG_RTC_ONLY
 166        beq     skip_rtc_va_refresh
 167
 168        adr     r3, am43xx_pm_ro_sram_data
 169        ldr     r1, [r3, #AMX3_PM_RTC_BASE_VIRT_OFFSET]
 170        ldr     r0, [r1]
 171
 172skip_rtc_va_refresh:
 173        /* Check if we want self refresh */
 174        tst     r4, #WFI_FLAG_SELF_REFRESH
 175        beq     emif_skip_enter_sr
 176
 177        adr     r9, am43xx_emif_sram_table
 178
 179        ldr     r3, [r9, #EMIF_PM_ENTER_SR_OFFSET]
 180        blx     r3
 181
 182emif_skip_enter_sr:
 183        /* Only necessary if PER is losing context */
 184        tst     r4, #WFI_FLAG_SAVE_EMIF
 185        beq     emif_skip_save
 186
 187        ldr     r3, [r9, #EMIF_PM_SAVE_CONTEXT_OFFSET]
 188        blx     r3
 189
 190emif_skip_save:
 191        /* Only can disable EMIF if we have entered self refresh */
 192        tst     r4, #WFI_FLAG_SELF_REFRESH
 193        beq     emif_skip_disable
 194
 195        /* Disable EMIF */
 196        ldr     r1, am43xx_virt_emif_clkctrl
 197        ldr     r2, [r1]
 198        bic     r2, r2, #AM33XX_CM_CLKCTRL_MODULEMODE_DISABLE
 199        str     r2, [r1]
 200
 201wait_emif_disable:
 202        ldr     r2, [r1]
 203        mov     r3, #AM33XX_CM_CLKCTRL_MODULESTATE_DISABLED
 204        cmp     r2, r3
 205        bne     wait_emif_disable
 206
 207emif_skip_disable:
 208        tst     r4, #WFI_FLAG_RTC_ONLY
 209        beq     skip_rtc_only
 210
 211        adr     r3, am43xx_pm_ro_sram_data
 212        ldr     r1, [r3, #AMX3_PM_RTC_BASE_VIRT_OFFSET]
 213
 214        ldr     r0, [r1, #RTC_PMIC_REG]
 215        orr     r0, r0, #RTC_PMIC_POWER_EN
 216        orr     r0, r0, #RTC_PMIC_EXT_WAKEUP_STS
 217        orr     r0, r0, #RTC_PMIC_EXT_WAKEUP_EN
 218        orr     r0, r0, #RTC_PMIC_EXT_WAKEUP_POL
 219        str     r0, [r1, #RTC_PMIC_REG]
 220        ldr     r0, [r1, #RTC_PMIC_REG]
 221        /* Wait for 2 seconds to lose power */
 222        mov     r3, #2
 223        ldr     r2, [r1, #RTC_SECONDS_REG]
 224rtc_loop:
 225        ldr     r0, [r1, #RTC_SECONDS_REG]
 226        cmp     r0, r2
 227        beq     rtc_loop
 228        mov     r2, r0
 229        subs    r3, r3, #1
 230        bne     rtc_loop
 231
 232        b       re_enable_emif
 233
 234skip_rtc_only:
 235
 236        tst     r4, #WFI_FLAG_WAKE_M3
 237        beq     wkup_m3_skip
 238
 239        /*
 240         * For the MPU WFI to be registered as an interrupt
 241         * to WKUP_M3, MPU_CLKCTRL.MODULEMODE needs to be set
 242         * to DISABLED
 243         */
 244        ldr     r1, am43xx_virt_mpu_clkctrl
 245        ldr     r2, [r1]
 246        bic     r2, r2, #AM33XX_CM_CLKCTRL_MODULEMODE_DISABLE
 247        str     r2, [r1]
 248
 249        /*
 250         * Put MPU CLKDM to SW_SLEEP
 251         */
 252        ldr     r1, am43xx_virt_mpu_clkstctrl
 253        mov     r2, #AM43XX_CM_CLKSTCTRL_CLKTRCTRL_SW_SLEEP
 254        str     r2, [r1]
 255
 256wkup_m3_skip:
 257        /*
 258         * Execute a barrier instruction to ensure that all cache,
 259         * TLB and branch predictor maintenance operations issued
 260         * have completed.
 261         */
 262        dsb
 263        dmb
 264
 265        /*
 266         * Execute a WFI instruction and wait until the
 267         * STANDBYWFI output is asserted to indicate that the
 268         * CPU is in idle and low power state. CPU can specualatively
 269         * prefetch the instructions so add NOPs after WFI. Sixteen
 270         * NOPs as per Cortex-A9 pipeline.
 271         */
 272        wfi
 273
 274        nop
 275        nop
 276        nop
 277        nop
 278        nop
 279        nop
 280        nop
 281        nop
 282        nop
 283        nop
 284        nop
 285        nop
 286        nop
 287        nop
 288        nop
 289        nop
 290
 291        /* We come here in case of an abort due to a late interrupt */
 292        ldr     r1, am43xx_virt_mpu_clkstctrl
 293        mov     r2, #AM43XX_CM_CLKSTCTRL_CLKTRCTRL_HW_AUTO
 294        str     r2, [r1]
 295
 296        /* Set MPU_CLKCTRL.MODULEMODE back to ENABLE */
 297        ldr     r1, am43xx_virt_mpu_clkctrl
 298        mov     r2, #AM33XX_CM_CLKCTRL_MODULEMODE_ENABLE
 299        str     r2, [r1]
 300
 301re_enable_emif:
 302        /* Re-enable EMIF */
 303        ldr     r1, am43xx_virt_emif_clkctrl
 304        mov     r2, #AM33XX_CM_CLKCTRL_MODULEMODE_ENABLE
 305        str     r2, [r1]
 306wait_emif_enable:
 307        ldr     r3, [r1]
 308        cmp     r2, r3
 309        bne     wait_emif_enable
 310
 311        tst     r4, #WFI_FLAG_FLUSH_CACHE
 312        beq     cache_skip_restore
 313
 314        /*
 315         * Set SCTLR.C bit to allow data cache allocation
 316         */
 317        mrc     p15, 0, r0, c1, c0, 0
 318        orr     r0, r0, #(1 << 2)       @ Enable the C bit
 319        mcr     p15, 0, r0, c1, c0, 0
 320        isb
 321
 322cache_skip_restore:
 323        /* Only necessary if PER is losing context */
 324        tst     r4, #WFI_FLAG_SELF_REFRESH
 325        beq     emif_skip_exit_sr_abt
 326
 327        adr     r9, am43xx_emif_sram_table
 328        ldr     r1, [r9, #EMIF_PM_ABORT_SR_OFFSET]
 329        blx     r1
 330
 331emif_skip_exit_sr_abt:
 332        /* Let the suspend code know about the abort */
 333        mov     r0, #1
 334        ldmfd   sp!, {r4 - r11, pc}     @ restore regs and return
 335ENDPROC(am43xx_do_wfi)
 336
 337        .align
 338ENTRY(am43xx_resume_offset)
 339        .word . - am43xx_do_wfi
 340
 341ENTRY(am43xx_resume_from_deep_sleep)
 342        /* Set MPU CLKSTCTRL to HW AUTO so that CPUidle works properly */
 343        ldr     r1, am43xx_virt_mpu_clkstctrl
 344        mov     r2, #AM43XX_CM_CLKSTCTRL_CLKTRCTRL_HW_AUTO
 345        str     r2, [r1]
 346
 347        /* For AM43xx, use EMIF power down until context is restored */
 348        ldr     r2, am43xx_phys_emif_poweroff
 349        mov     r1, #AM43XX_EMIF_POWEROFF_ENABLE
 350        str     r1, [r2, #0x0]
 351
 352        /* Re-enable EMIF */
 353        ldr     r1, am43xx_phys_emif_clkctrl
 354        mov     r2, #AM33XX_CM_CLKCTRL_MODULEMODE_ENABLE
 355        str     r2, [r1]
 356wait_emif_enable1:
 357        ldr     r3, [r1]
 358        cmp     r2, r3
 359        bne     wait_emif_enable1
 360
 361        adr     r9, am43xx_emif_sram_table
 362
 363        ldr     r1, [r9, #EMIF_PM_RESTORE_CONTEXT_OFFSET]
 364        blx     r1
 365
 366        ldr     r1, [r9, #EMIF_PM_EXIT_SR_OFFSET]
 367        blx     r1
 368
 369        ldr     r2, am43xx_phys_emif_poweroff
 370        mov     r1, #AM43XX_EMIF_POWEROFF_DISABLE
 371        str     r1, [r2, #0x0]
 372
 373        ldr     r1, [r9, #EMIF_PM_RUN_HW_LEVELING]
 374        blx     r1
 375
 376#ifdef CONFIG_CACHE_L2X0
 377        ldr     r2, l2_cache_base
 378        ldr     r0, [r2, #L2X0_CTRL]
 379        and     r0, #0x0f
 380        cmp     r0, #1
 381        beq     skip_l2en                       @ Skip if already enabled
 382
 383        adr     r4, am43xx_pm_ro_sram_data
 384        ldr     r3, [r4, #AMX3_PM_RO_SRAM_DATA_PHYS_OFFSET]
 385        ldr     r0, [r3, #AMX3_PM_L2_PREFETCH_CTRL_VAL_OFFSET]
 386
 387        ldr     r12, l2_smc1
 388        dsb
 389        smc     #0
 390        dsb
 391set_aux_ctrl:
 392        ldr     r0, [r3, #AMX3_PM_L2_AUX_CTRL_VAL_OFFSET]
 393        ldr     r12, l2_smc2
 394        dsb
 395        smc     #0
 396        dsb
 397
 398        /* L2 invalidate on resume */
 399        ldr     r0, l2_val
 400        ldr     r2, l2_cache_base
 401        str     r0, [r2, #L2X0_INV_WAY]
 402wait2:
 403        ldr     r0, [r2, #L2X0_INV_WAY]
 404        ldr     r1, l2_val
 405        ands    r0, r0, r1
 406        bne     wait2
 407#ifdef CONFIG_PL310_ERRATA_727915
 408        mov     r0, #0x00
 409        mov     r12, #OMAP4_MON_L2X0_DBG_CTRL_INDEX
 410        dsb
 411        smc     #0
 412        dsb
 413#endif
 414l2x_sync2:
 415        ldr     r2, l2_cache_base
 416        mov     r0, #0x0
 417        str     r0, [r2, #L2X0_CACHE_SYNC]
 418sync2:
 419        ldr     r0, [r2, #L2X0_CACHE_SYNC]
 420        ands    r0, r0, #0x1
 421        bne     sync2
 422
 423        mov     r0, #0x1
 424        ldr     r12, l2_smc3
 425        dsb
 426        smc     #0
 427        dsb
 428#endif
 429skip_l2en:
 430        /* We are back. Branch to the common CPU resume routine */
 431        mov     r0, #0
 432        ldr     pc, resume_addr
 433ENDPROC(am43xx_resume_from_deep_sleep)
 434
 435/*
 436 * Local variables
 437 */
 438        .align
 439kernel_flush:
 440        .word   v7_flush_dcache_all
 441ddr_start:
 442        .word   PAGE_OFFSET
 443
 444am43xx_phys_emif_poweroff:
 445        .word   (AM43XX_CM_BASE + AM43XX_PRM_DEVICE_INST + \
 446                 AM43XX_PRM_EMIF_CTRL_OFFSET)
 447am43xx_virt_mpu_clkstctrl:
 448        .word   (AM43XX_CM_MPU_CLKSTCTRL)
 449am43xx_virt_mpu_clkctrl:
 450        .word   (AM43XX_CM_MPU_MPU_CLKCTRL)
 451am43xx_virt_emif_clkctrl:
 452        .word   (AM43XX_CM_PER_EMIF_CLKCTRL)
 453am43xx_phys_emif_clkctrl:
 454        .word   (AM43XX_CM_BASE + AM43XX_CM_PER_INST + \
 455                 AM43XX_CM_PER_EMIF_CLKCTRL_OFFSET)
 456
 457#ifdef CONFIG_CACHE_L2X0
 458/* L2 cache related defines for AM437x */
 459get_l2cache_base:
 460        .word   omap4_get_l2cache_base
 461l2_cache_base:
 462        .word   OMAP44XX_L2CACHE_BASE
 463l2_smc1:
 464        .word   OMAP4_MON_L2X0_PREFETCH_INDEX
 465l2_smc2:
 466        .word   OMAP4_MON_L2X0_AUXCTRL_INDEX
 467l2_smc3:
 468        .word   OMAP4_MON_L2X0_CTRL_INDEX
 469l2_val:
 470        .word   0xffff
 471#endif
 472
 473.align 3
 474/* DDR related defines */
 475ENTRY(am43xx_emif_sram_table)
 476        .space EMIF_PM_FUNCTIONS_SIZE
 477
 478ENTRY(am43xx_pm_sram)
 479        .word am43xx_do_wfi
 480        .word am43xx_do_wfi_sz
 481        .word am43xx_resume_offset
 482        .word am43xx_emif_sram_table
 483        .word am43xx_pm_ro_sram_data
 484
 485resume_addr:
 486        .word   cpu_resume - PAGE_OFFSET + 0x80000000
 487.align 3
 488
 489ENTRY(am43xx_pm_ro_sram_data)
 490        .space AMX3_PM_RO_SRAM_DATA_SIZE
 491
 492ENTRY(am43xx_do_wfi_sz)
 493        .word   . - am43xx_do_wfi
 494