linux/drivers/staging/spectra/lld_nand.c
<<
>>
Prefs
   1/*
   2 * NAND Flash Controller Device Driver
   3 * Copyright (c) 2009, Intel Corporation and its suppliers.
   4 *
   5 * This program is free software; you can redistribute it and/or modify it
   6 * under the terms and conditions of the GNU General Public License,
   7 * version 2, as published by the Free Software Foundation.
   8 *
   9 * This program is distributed in the hope it will be useful, but WITHOUT
  10 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
  11 * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License for
  12 * more details.
  13 *
  14 * You should have received a copy of the GNU General Public License along with
  15 * this program; if not, write to the Free Software Foundation, Inc.,
  16 * 51 Franklin St - Fifth Floor, Boston, MA 02110-1301 USA.
  17 *
  18 */
  19
  20#include "lld.h"
  21#include "lld_nand.h"
  22#include "lld_cdma.h"
  23
  24#include "spectraswconfig.h"
  25#include "flash.h"
  26#include "ffsdefs.h"
  27
  28#include <linux/interrupt.h>
  29#include <linux/delay.h>
  30#include <linux/wait.h>
  31#include <linux/mutex.h>
  32
  33#include "nand_regs.h"
  34
  35#define SPECTRA_NAND_NAME    "nd"
  36
  37#define CEIL_DIV(X, Y) (((X)%(Y)) ? ((X)/(Y)+1) : ((X)/(Y)))
  38#define MAX_PAGES_PER_RW        128
  39
  40#define INT_IDLE_STATE                 0
  41#define INT_READ_PAGE_MAIN    0x01
  42#define INT_WRITE_PAGE_MAIN    0x02
  43#define INT_PIPELINE_READ_AHEAD    0x04
  44#define INT_PIPELINE_WRITE_AHEAD    0x08
  45#define INT_MULTI_PLANE_READ    0x10
  46#define INT_MULTI_PLANE_WRITE    0x11
  47
  48static u32 enable_ecc;
  49
  50struct mrst_nand_info info;
  51
  52int totalUsedBanks;
  53u32 GLOB_valid_banks[LLD_MAX_FLASH_BANKS];
  54
  55void __iomem *FlashReg;
  56void __iomem *FlashMem;
  57
  58u16 conf_parameters[] = {
  59        0x0000,
  60        0x0000,
  61        0x01F4,
  62        0x01F4,
  63        0x01F4,
  64        0x01F4,
  65        0x0000,
  66        0x0000,
  67        0x0001,
  68        0x0000,
  69        0x0000,
  70        0x0000,
  71        0x0000,
  72        0x0040,
  73        0x0001,
  74        0x000A,
  75        0x000A,
  76        0x000A,
  77        0x0000,
  78        0x0000,
  79        0x0005,
  80        0x0012,
  81        0x000C
  82};
  83
  84u16   NAND_Get_Bad_Block(u32 block)
  85{
  86        u32 status = PASS;
  87        u32 flag_bytes  = 0;
  88        u32 skip_bytes  = DeviceInfo.wSpareSkipBytes;
  89        u32 page, i;
  90        u8 *pReadSpareBuf = buf_get_bad_block;
  91
  92        if (enable_ecc)
  93                flag_bytes = DeviceInfo.wNumPageSpareFlag;
  94
  95        for (page = 0; page < 2; page++) {
  96                status = NAND_Read_Page_Spare(pReadSpareBuf, block, page, 1);
  97                if (status != PASS)
  98                        return READ_ERROR;
  99                for (i = flag_bytes; i < (flag_bytes + skip_bytes); i++)
 100                        if (pReadSpareBuf[i] != 0xff)
 101                                return DEFECTIVE_BLOCK;
 102        }
 103
 104        for (page = 1; page < 3; page++) {
 105                status = NAND_Read_Page_Spare(pReadSpareBuf, block,
 106                        DeviceInfo.wPagesPerBlock - page , 1);
 107                if (status != PASS)
 108                        return READ_ERROR;
 109                for (i = flag_bytes; i < (flag_bytes + skip_bytes); i++)
 110                        if (pReadSpareBuf[i] != 0xff)
 111                                return DEFECTIVE_BLOCK;
 112        }
 113
 114        return GOOD_BLOCK;
 115}
 116
 117
 118u16 NAND_Flash_Reset(void)
 119{
 120        u32 i;
 121        u32 intr_status_rst_comp[4] = {INTR_STATUS0__RST_COMP,
 122                INTR_STATUS1__RST_COMP,
 123                INTR_STATUS2__RST_COMP,
 124                INTR_STATUS3__RST_COMP};
 125        u32 intr_status_time_out[4] = {INTR_STATUS0__TIME_OUT,
 126                INTR_STATUS1__TIME_OUT,
 127                INTR_STATUS2__TIME_OUT,
 128                INTR_STATUS3__TIME_OUT};
 129        u32 intr_status[4] = {INTR_STATUS0, INTR_STATUS1,
 130                INTR_STATUS2, INTR_STATUS3};
 131        u32 device_reset_banks[4] = {DEVICE_RESET__BANK0,
 132                DEVICE_RESET__BANK1,
 133                DEVICE_RESET__BANK2,
 134                DEVICE_RESET__BANK3};
 135
 136        nand_dbg_print(NAND_DBG_TRACE, "%s, Line %d, Function: %s\n",
 137                       __FILE__, __LINE__, __func__);
 138
 139        for (i = 0 ; i < LLD_MAX_FLASH_BANKS; i++)
 140                iowrite32(intr_status_rst_comp[i] | intr_status_time_out[i],
 141                FlashReg + intr_status[i]);
 142
 143        for (i = 0 ; i < LLD_MAX_FLASH_BANKS; i++) {
 144                iowrite32(device_reset_banks[i], FlashReg + DEVICE_RESET);
 145                while (!(ioread32(FlashReg + intr_status[i]) &
 146                        (intr_status_rst_comp[i] | intr_status_time_out[i])))
 147                        ;
 148                if (ioread32(FlashReg + intr_status[i]) &
 149                        intr_status_time_out[i])
 150                        nand_dbg_print(NAND_DBG_WARN,
 151                        "NAND Reset operation timed out on bank %d\n", i);
 152        }
 153
 154        for (i = 0; i < LLD_MAX_FLASH_BANKS; i++)
 155                iowrite32(intr_status_rst_comp[i] | intr_status_time_out[i],
 156                        FlashReg + intr_status[i]);
 157
 158        return PASS;
 159}
 160
 161static void NAND_ONFi_Timing_Mode(u16 mode)
 162{
 163        u16 Trea[6] = {40, 30, 25, 20, 20, 16};
 164        u16 Trp[6] = {50, 25, 17, 15, 12, 10};
 165        u16 Treh[6] = {30, 15, 15, 10, 10, 7};
 166        u16 Trc[6] = {100, 50, 35, 30, 25, 20};
 167        u16 Trhoh[6] = {0, 15, 15, 15, 15, 15};
 168        u16 Trloh[6] = {0, 0, 0, 0, 5, 5};
 169        u16 Tcea[6] = {100, 45, 30, 25, 25, 25};
 170        u16 Tadl[6] = {200, 100, 100, 100, 70, 70};
 171        u16 Trhw[6] = {200, 100, 100, 100, 100, 100};
 172        u16 Trhz[6] = {200, 100, 100, 100, 100, 100};
 173        u16 Twhr[6] = {120, 80, 80, 60, 60, 60};
 174        u16 Tcs[6] = {70, 35, 25, 25, 20, 15};
 175
 176        u16 TclsRising = 1;
 177        u16 data_invalid_rhoh, data_invalid_rloh, data_invalid;
 178        u16 dv_window = 0;
 179        u16 en_lo, en_hi;
 180        u16 acc_clks;
 181        u16 addr_2_data, re_2_we, re_2_re, we_2_re, cs_cnt;
 182
 183        nand_dbg_print(NAND_DBG_TRACE, "%s, Line %d, Function: %s\n",
 184                       __FILE__, __LINE__, __func__);
 185
 186        en_lo = CEIL_DIV(Trp[mode], CLK_X);
 187        en_hi = CEIL_DIV(Treh[mode], CLK_X);
 188
 189#if ONFI_BLOOM_TIME
 190        if ((en_hi * CLK_X) < (Treh[mode] + 2))
 191                en_hi++;
 192#endif
 193
 194        if ((en_lo + en_hi) * CLK_X < Trc[mode])
 195                en_lo += CEIL_DIV((Trc[mode] - (en_lo + en_hi) * CLK_X), CLK_X);
 196
 197        if ((en_lo + en_hi) < CLK_MULTI)
 198                en_lo += CLK_MULTI - en_lo - en_hi;
 199
 200        while (dv_window < 8) {
 201                data_invalid_rhoh = en_lo * CLK_X + Trhoh[mode];
 202
 203                data_invalid_rloh = (en_lo + en_hi) * CLK_X + Trloh[mode];
 204
 205                data_invalid =
 206                    data_invalid_rhoh <
 207                    data_invalid_rloh ? data_invalid_rhoh : data_invalid_rloh;
 208
 209                dv_window = data_invalid - Trea[mode];
 210
 211                if (dv_window < 8)
 212                        en_lo++;
 213        }
 214
 215        acc_clks = CEIL_DIV(Trea[mode], CLK_X);
 216
 217        while (((acc_clks * CLK_X) - Trea[mode]) < 3)
 218                acc_clks++;
 219
 220        if ((data_invalid - acc_clks * CLK_X) < 2)
 221                nand_dbg_print(NAND_DBG_WARN, "%s, Line %d: Warning!\n",
 222                        __FILE__, __LINE__);
 223
 224        addr_2_data = CEIL_DIV(Tadl[mode], CLK_X);
 225        re_2_we = CEIL_DIV(Trhw[mode], CLK_X);
 226        re_2_re = CEIL_DIV(Trhz[mode], CLK_X);
 227        we_2_re = CEIL_DIV(Twhr[mode], CLK_X);
 228        cs_cnt = CEIL_DIV((Tcs[mode] - Trp[mode]), CLK_X);
 229        if (!TclsRising)
 230                cs_cnt = CEIL_DIV(Tcs[mode], CLK_X);
 231        if (cs_cnt == 0)
 232                cs_cnt = 1;
 233
 234        if (Tcea[mode]) {
 235                while (((cs_cnt * CLK_X) + Trea[mode]) < Tcea[mode])
 236                        cs_cnt++;
 237        }
 238
 239#if MODE5_WORKAROUND
 240        if (mode == 5)
 241                acc_clks = 5;
 242#endif
 243
 244        /* Sighting 3462430: Temporary hack for MT29F128G08CJABAWP:B */
 245        if ((ioread32(FlashReg + MANUFACTURER_ID) == 0) &&
 246                (ioread32(FlashReg + DEVICE_ID) == 0x88))
 247                acc_clks = 6;
 248
 249        iowrite32(acc_clks, FlashReg + ACC_CLKS);
 250        iowrite32(re_2_we, FlashReg + RE_2_WE);
 251        iowrite32(re_2_re, FlashReg + RE_2_RE);
 252        iowrite32(we_2_re, FlashReg + WE_2_RE);
 253        iowrite32(addr_2_data, FlashReg + ADDR_2_DATA);
 254        iowrite32(en_lo, FlashReg + RDWR_EN_LO_CNT);
 255        iowrite32(en_hi, FlashReg + RDWR_EN_HI_CNT);
 256        iowrite32(cs_cnt, FlashReg + CS_SETUP_CNT);
 257}
 258
 259static void index_addr(u32 address, u32 data)
 260{
 261        iowrite32(address, FlashMem);
 262        iowrite32(data, FlashMem + 0x10);
 263}
 264
 265static void index_addr_read_data(u32 address, u32 *pdata)
 266{
 267        iowrite32(address, FlashMem);
 268        *pdata = ioread32(FlashMem + 0x10);
 269}
 270
 271static void set_ecc_config(void)
 272{
 273#if SUPPORT_8BITECC
 274        if ((ioread32(FlashReg + DEVICE_MAIN_AREA_SIZE) < 4096) ||
 275                (ioread32(FlashReg + DEVICE_SPARE_AREA_SIZE) <= 128))
 276                iowrite32(8, FlashReg + ECC_CORRECTION);
 277#endif
 278
 279        if ((ioread32(FlashReg + ECC_CORRECTION) & ECC_CORRECTION__VALUE)
 280                == 1) {
 281                DeviceInfo.wECCBytesPerSector = 4;
 282                DeviceInfo.wECCBytesPerSector *= DeviceInfo.wDevicesConnected;
 283                DeviceInfo.wNumPageSpareFlag =
 284                        DeviceInfo.wPageSpareSize -
 285                        DeviceInfo.wPageDataSize /
 286                        (ECC_SECTOR_SIZE * DeviceInfo.wDevicesConnected) *
 287                        DeviceInfo.wECCBytesPerSector
 288                        - DeviceInfo.wSpareSkipBytes;
 289        } else {
 290                DeviceInfo.wECCBytesPerSector =
 291                        (ioread32(FlashReg + ECC_CORRECTION) &
 292                        ECC_CORRECTION__VALUE) * 13 / 8;
 293                if ((DeviceInfo.wECCBytesPerSector) % 2 == 0)
 294                        DeviceInfo.wECCBytesPerSector += 2;
 295                else
 296                        DeviceInfo.wECCBytesPerSector += 1;
 297
 298                DeviceInfo.wECCBytesPerSector *= DeviceInfo.wDevicesConnected;
 299                DeviceInfo.wNumPageSpareFlag = DeviceInfo.wPageSpareSize -
 300                        DeviceInfo.wPageDataSize /
 301                        (ECC_SECTOR_SIZE * DeviceInfo.wDevicesConnected) *
 302                        DeviceInfo.wECCBytesPerSector
 303                        - DeviceInfo.wSpareSkipBytes;
 304        }
 305}
 306
 307static u16 get_onfi_nand_para(void)
 308{
 309        int i;
 310        u16 blks_lun_l, blks_lun_h, n_of_luns;
 311        u32 blockperlun, id;
 312
 313        iowrite32(DEVICE_RESET__BANK0, FlashReg + DEVICE_RESET);
 314
 315        while (!((ioread32(FlashReg + INTR_STATUS0) &
 316                INTR_STATUS0__RST_COMP) |
 317                (ioread32(FlashReg + INTR_STATUS0) &
 318                INTR_STATUS0__TIME_OUT)))
 319                ;
 320
 321        if (ioread32(FlashReg + INTR_STATUS0) & INTR_STATUS0__RST_COMP) {
 322                iowrite32(DEVICE_RESET__BANK1, FlashReg + DEVICE_RESET);
 323                while (!((ioread32(FlashReg + INTR_STATUS1) &
 324                        INTR_STATUS1__RST_COMP) |
 325                        (ioread32(FlashReg + INTR_STATUS1) &
 326                        INTR_STATUS1__TIME_OUT)))
 327                        ;
 328
 329                if (ioread32(FlashReg + INTR_STATUS1) &
 330                        INTR_STATUS1__RST_COMP) {
 331                        iowrite32(DEVICE_RESET__BANK2,
 332                                FlashReg + DEVICE_RESET);
 333                        while (!((ioread32(FlashReg + INTR_STATUS2) &
 334                                INTR_STATUS2__RST_COMP) |
 335                                (ioread32(FlashReg + INTR_STATUS2) &
 336                                INTR_STATUS2__TIME_OUT)))
 337                                ;
 338
 339                        if (ioread32(FlashReg + INTR_STATUS2) &
 340                                INTR_STATUS2__RST_COMP) {
 341                                iowrite32(DEVICE_RESET__BANK3,
 342                                        FlashReg + DEVICE_RESET);
 343                                while (!((ioread32(FlashReg + INTR_STATUS3) &
 344                                        INTR_STATUS3__RST_COMP) |
 345                                        (ioread32(FlashReg + INTR_STATUS3) &
 346                                        INTR_STATUS3__TIME_OUT)))
 347                                        ;
 348                        } else {
 349                                printk(KERN_ERR "Getting a time out for bank 2!\n");
 350                        }
 351                } else {
 352                        printk(KERN_ERR "Getting a time out for bank 1!\n");
 353                }
 354        }
 355
 356        iowrite32(INTR_STATUS0__TIME_OUT, FlashReg + INTR_STATUS0);
 357        iowrite32(INTR_STATUS1__TIME_OUT, FlashReg + INTR_STATUS1);
 358        iowrite32(INTR_STATUS2__TIME_OUT, FlashReg + INTR_STATUS2);
 359        iowrite32(INTR_STATUS3__TIME_OUT, FlashReg + INTR_STATUS3);
 360
 361        DeviceInfo.wONFIDevFeatures =
 362                ioread32(FlashReg + ONFI_DEVICE_FEATURES);
 363        DeviceInfo.wONFIOptCommands =
 364                ioread32(FlashReg + ONFI_OPTIONAL_COMMANDS);
 365        DeviceInfo.wONFITimingMode =
 366                ioread32(FlashReg + ONFI_TIMING_MODE);
 367        DeviceInfo.wONFIPgmCacheTimingMode =
 368                ioread32(FlashReg + ONFI_PGM_CACHE_TIMING_MODE);
 369
 370        n_of_luns = ioread32(FlashReg + ONFI_DEVICE_NO_OF_LUNS) &
 371                ONFI_DEVICE_NO_OF_LUNS__NO_OF_LUNS;
 372        blks_lun_l = ioread32(FlashReg + ONFI_DEVICE_NO_OF_BLOCKS_PER_LUN_L);
 373        blks_lun_h = ioread32(FlashReg + ONFI_DEVICE_NO_OF_BLOCKS_PER_LUN_U);
 374
 375        blockperlun = (blks_lun_h << 16) | blks_lun_l;
 376
 377        DeviceInfo.wTotalBlocks = n_of_luns * blockperlun;
 378
 379        if (!(ioread32(FlashReg + ONFI_TIMING_MODE) &
 380                ONFI_TIMING_MODE__VALUE))
 381                return FAIL;
 382
 383        for (i = 5; i > 0; i--) {
 384                if (ioread32(FlashReg + ONFI_TIMING_MODE) & (0x01 << i))
 385                        break;
 386        }
 387
 388        NAND_ONFi_Timing_Mode(i);
 389
 390        index_addr(MODE_11 | 0, 0x90);
 391        index_addr(MODE_11 | 1, 0);
 392
 393        for (i = 0; i < 3; i++)
 394                index_addr_read_data(MODE_11 | 2, &id);
 395
 396        nand_dbg_print(NAND_DBG_DEBUG, "3rd ID: 0x%x\n", id);
 397
 398        DeviceInfo.MLCDevice = id & 0x0C;
 399
 400        /* By now, all the ONFI devices we know support the page cache */
 401        /* rw feature. So here we enable the pipeline_rw_ahead feature */
 402        /* iowrite32(1, FlashReg + CACHE_WRITE_ENABLE); */
 403        /* iowrite32(1, FlashReg + CACHE_READ_ENABLE);  */
 404
 405        return PASS;
 406}
 407
 408static void get_samsung_nand_para(void)
 409{
 410        u8 no_of_planes;
 411        u32 blk_size;
 412        u64 plane_size, capacity;
 413        u32 id_bytes[5];
 414        int i;
 415
 416        index_addr((u32)(MODE_11 | 0), 0x90);
 417        index_addr((u32)(MODE_11 | 1), 0);
 418        for (i = 0; i < 5; i++)
 419                index_addr_read_data((u32)(MODE_11 | 2), &id_bytes[i]);
 420
 421        nand_dbg_print(NAND_DBG_DEBUG,
 422                "ID bytes: 0x%x, 0x%x, 0x%x, 0x%x, 0x%x\n",
 423                id_bytes[0], id_bytes[1], id_bytes[2],
 424                id_bytes[3], id_bytes[4]);
 425
 426        if ((id_bytes[1] & 0xff) == 0xd3) { /* Samsung K9WAG08U1A */
 427                /* Set timing register values according to datasheet */
 428                iowrite32(5, FlashReg + ACC_CLKS);
 429                iowrite32(20, FlashReg + RE_2_WE);
 430                iowrite32(12, FlashReg + WE_2_RE);
 431                iowrite32(14, FlashReg + ADDR_2_DATA);
 432                iowrite32(3, FlashReg + RDWR_EN_LO_CNT);
 433                iowrite32(2, FlashReg + RDWR_EN_HI_CNT);
 434                iowrite32(2, FlashReg + CS_SETUP_CNT);
 435        }
 436
 437        no_of_planes = 1 << ((id_bytes[4] & 0x0c) >> 2);
 438        plane_size  = (u64)64 << ((id_bytes[4] & 0x70) >> 4);
 439        blk_size = 64 << ((ioread32(FlashReg + DEVICE_PARAM_1) & 0x30) >> 4);
 440        capacity = (u64)128 * plane_size * no_of_planes;
 441
 442        DeviceInfo.wTotalBlocks = (u32)GLOB_u64_Div(capacity, blk_size);
 443}
 444
 445static void get_toshiba_nand_para(void)
 446{
 447        void __iomem *scratch_reg;
 448        u32 tmp;
 449
 450        /* Workaround to fix a controller bug which reports a wrong */
 451        /* spare area size for some kind of Toshiba NAND device */
 452        if ((ioread32(FlashReg + DEVICE_MAIN_AREA_SIZE) == 4096) &&
 453                (ioread32(FlashReg + DEVICE_SPARE_AREA_SIZE) == 64)) {
 454                iowrite32(216, FlashReg + DEVICE_SPARE_AREA_SIZE);
 455                tmp = ioread32(FlashReg + DEVICES_CONNECTED) *
 456                        ioread32(FlashReg + DEVICE_SPARE_AREA_SIZE);
 457                iowrite32(tmp, FlashReg + LOGICAL_PAGE_SPARE_SIZE);
 458#if SUPPORT_15BITECC
 459                iowrite32(15, FlashReg + ECC_CORRECTION);
 460#elif SUPPORT_8BITECC
 461                iowrite32(8, FlashReg + ECC_CORRECTION);
 462#endif
 463        }
 464
 465        /* As Toshiba NAND can not provide it's block number, */
 466        /* so here we need user to provide the correct block */
 467        /* number in a scratch register before the Linux NAND */
 468        /* driver is loaded. If no valid value found in the scratch */
 469        /* register, then we use default block number value */
 470        scratch_reg = ioremap_nocache(SCRATCH_REG_ADDR, SCRATCH_REG_SIZE);
 471        if (!scratch_reg) {
 472                printk(KERN_ERR "Spectra: ioremap failed in %s, Line %d",
 473                        __FILE__, __LINE__);
 474                DeviceInfo.wTotalBlocks = GLOB_HWCTL_DEFAULT_BLKS;
 475        } else {
 476                nand_dbg_print(NAND_DBG_WARN,
 477                        "Spectra: ioremap reg address: 0x%p\n", scratch_reg);
 478                DeviceInfo.wTotalBlocks = 1 << ioread8(scratch_reg);
 479                if (DeviceInfo.wTotalBlocks < 512)
 480                        DeviceInfo.wTotalBlocks = GLOB_HWCTL_DEFAULT_BLKS;
 481                iounmap(scratch_reg);
 482        }
 483}
 484
 485static void get_hynix_nand_para(void)
 486{
 487        void __iomem *scratch_reg;
 488        u32 main_size, spare_size;
 489
 490        switch (DeviceInfo.wDeviceID) {
 491        case 0xD5: /* Hynix H27UAG8T2A, H27UBG8U5A or H27UCG8VFA */
 492        case 0xD7: /* Hynix H27UDG8VEM, H27UCG8UDM or H27UCG8V5A */
 493                iowrite32(128, FlashReg + PAGES_PER_BLOCK);
 494                iowrite32(4096, FlashReg + DEVICE_MAIN_AREA_SIZE);
 495                iowrite32(224, FlashReg + DEVICE_SPARE_AREA_SIZE);
 496                main_size = 4096 * ioread32(FlashReg + DEVICES_CONNECTED);
 497                spare_size = 224 * ioread32(FlashReg + DEVICES_CONNECTED);
 498                iowrite32(main_size, FlashReg + LOGICAL_PAGE_DATA_SIZE);
 499                iowrite32(spare_size, FlashReg + LOGICAL_PAGE_SPARE_SIZE);
 500                iowrite32(0, FlashReg + DEVICE_WIDTH);
 501#if SUPPORT_15BITECC
 502                iowrite32(15, FlashReg + ECC_CORRECTION);
 503#elif SUPPORT_8BITECC
 504                iowrite32(8, FlashReg + ECC_CORRECTION);
 505#endif
 506                DeviceInfo.MLCDevice  = 1;
 507                break;
 508        default:
 509                nand_dbg_print(NAND_DBG_WARN,
 510                        "Spectra: Unknown Hynix NAND (Device ID: 0x%x)."
 511                        "Will use default parameter values instead.\n",
 512                        DeviceInfo.wDeviceID);
 513        }
 514
 515        scratch_reg = ioremap_nocache(SCRATCH_REG_ADDR, SCRATCH_REG_SIZE);
 516        if (!scratch_reg) {
 517                printk(KERN_ERR "Spectra: ioremap failed in %s, Line %d",
 518                        __FILE__, __LINE__);
 519                DeviceInfo.wTotalBlocks = GLOB_HWCTL_DEFAULT_BLKS;
 520        } else {
 521                nand_dbg_print(NAND_DBG_WARN,
 522                        "Spectra: ioremap reg address: 0x%p\n", scratch_reg);
 523                DeviceInfo.wTotalBlocks = 1 << ioread8(scratch_reg);
 524                if (DeviceInfo.wTotalBlocks < 512)
 525                        DeviceInfo.wTotalBlocks = GLOB_HWCTL_DEFAULT_BLKS;
 526                iounmap(scratch_reg);
 527        }
 528}
 529
 530static void find_valid_banks(void)
 531{
 532        u32 id[LLD_MAX_FLASH_BANKS];
 533        int i;
 534
 535        totalUsedBanks = 0;
 536        for (i = 0; i < LLD_MAX_FLASH_BANKS; i++) {
 537                index_addr((u32)(MODE_11 | (i << 24) | 0), 0x90);
 538                index_addr((u32)(MODE_11 | (i << 24) | 1), 0);
 539                index_addr_read_data((u32)(MODE_11 | (i << 24) | 2), &id[i]);
 540
 541                nand_dbg_print(NAND_DBG_DEBUG,
 542                        "Return 1st ID for bank[%d]: %x\n", i, id[i]);
 543
 544                if (i == 0) {
 545                        if (id[i] & 0x0ff)
 546                                GLOB_valid_banks[i] = 1;
 547                } else {
 548                        if ((id[i] & 0x0ff) == (id[0] & 0x0ff))
 549                                GLOB_valid_banks[i] = 1;
 550                }
 551
 552                totalUsedBanks += GLOB_valid_banks[i];
 553        }
 554
 555        nand_dbg_print(NAND_DBG_DEBUG,
 556                "totalUsedBanks: %d\n", totalUsedBanks);
 557}
 558
 559static void detect_partition_feature(void)
 560{
 561        if (ioread32(FlashReg + FEATURES) & FEATURES__PARTITION) {
 562                if ((ioread32(FlashReg + PERM_SRC_ID_1) &
 563                        PERM_SRC_ID_1__SRCID) == SPECTRA_PARTITION_ID) {
 564                        DeviceInfo.wSpectraStartBlock =
 565                            ((ioread32(FlashReg + MIN_MAX_BANK_1) &
 566                              MIN_MAX_BANK_1__MIN_VALUE) *
 567                             DeviceInfo.wTotalBlocks)
 568                            +
 569                            (ioread32(FlashReg + MIN_BLK_ADDR_1) &
 570                            MIN_BLK_ADDR_1__VALUE);
 571
 572                        DeviceInfo.wSpectraEndBlock =
 573                            (((ioread32(FlashReg + MIN_MAX_BANK_1) &
 574                               MIN_MAX_BANK_1__MAX_VALUE) >> 2) *
 575                             DeviceInfo.wTotalBlocks)
 576                            +
 577                            (ioread32(FlashReg + MAX_BLK_ADDR_1) &
 578                            MAX_BLK_ADDR_1__VALUE);
 579
 580                        DeviceInfo.wTotalBlocks *= totalUsedBanks;
 581
 582                        if (DeviceInfo.wSpectraEndBlock >=
 583                            DeviceInfo.wTotalBlocks) {
 584                                DeviceInfo.wSpectraEndBlock =
 585                                    DeviceInfo.wTotalBlocks - 1;
 586                        }
 587
 588                        DeviceInfo.wDataBlockNum =
 589                                DeviceInfo.wSpectraEndBlock -
 590                                DeviceInfo.wSpectraStartBlock + 1;
 591                } else {
 592                        DeviceInfo.wTotalBlocks *= totalUsedBanks;
 593                        DeviceInfo.wSpectraStartBlock = SPECTRA_START_BLOCK;
 594                        DeviceInfo.wSpectraEndBlock =
 595                                DeviceInfo.wTotalBlocks - 1;
 596                        DeviceInfo.wDataBlockNum =
 597                                DeviceInfo.wSpectraEndBlock -
 598                                DeviceInfo.wSpectraStartBlock + 1;
 599                }
 600        } else {
 601                DeviceInfo.wTotalBlocks *= totalUsedBanks;
 602                DeviceInfo.wSpectraStartBlock = SPECTRA_START_BLOCK;
 603                DeviceInfo.wSpectraEndBlock = DeviceInfo.wTotalBlocks - 1;
 604                DeviceInfo.wDataBlockNum =
 605                        DeviceInfo.wSpectraEndBlock -
 606                        DeviceInfo.wSpectraStartBlock + 1;
 607        }
 608}
 609
 610static void dump_device_info(void)
 611{
 612        nand_dbg_print(NAND_DBG_DEBUG, "DeviceInfo:\n");
 613        nand_dbg_print(NAND_DBG_DEBUG, "DeviceMaker: 0x%x\n",
 614                DeviceInfo.wDeviceMaker);
 615        nand_dbg_print(NAND_DBG_DEBUG, "DeviceID: 0x%x\n",
 616                DeviceInfo.wDeviceID);
 617        nand_dbg_print(NAND_DBG_DEBUG, "DeviceType: 0x%x\n",
 618                DeviceInfo.wDeviceType);
 619        nand_dbg_print(NAND_DBG_DEBUG, "SpectraStartBlock: %d\n",
 620                DeviceInfo.wSpectraStartBlock);
 621        nand_dbg_print(NAND_DBG_DEBUG, "SpectraEndBlock: %d\n",
 622                DeviceInfo.wSpectraEndBlock);
 623        nand_dbg_print(NAND_DBG_DEBUG, "TotalBlocks: %d\n",
 624                DeviceInfo.wTotalBlocks);
 625        nand_dbg_print(NAND_DBG_DEBUG, "PagesPerBlock: %d\n",
 626                DeviceInfo.wPagesPerBlock);
 627        nand_dbg_print(NAND_DBG_DEBUG, "PageSize: %d\n",
 628                DeviceInfo.wPageSize);
 629        nand_dbg_print(NAND_DBG_DEBUG, "PageDataSize: %d\n",
 630                DeviceInfo.wPageDataSize);
 631        nand_dbg_print(NAND_DBG_DEBUG, "PageSpareSize: %d\n",
 632                DeviceInfo.wPageSpareSize);
 633        nand_dbg_print(NAND_DBG_DEBUG, "NumPageSpareFlag: %d\n",
 634                DeviceInfo.wNumPageSpareFlag);
 635        nand_dbg_print(NAND_DBG_DEBUG, "ECCBytesPerSector: %d\n",
 636                DeviceInfo.wECCBytesPerSector);
 637        nand_dbg_print(NAND_DBG_DEBUG, "BlockSize: %d\n",
 638                DeviceInfo.wBlockSize);
 639        nand_dbg_print(NAND_DBG_DEBUG, "BlockDataSize: %d\n",
 640                DeviceInfo.wBlockDataSize);
 641        nand_dbg_print(NAND_DBG_DEBUG, "DataBlockNum: %d\n",
 642                DeviceInfo.wDataBlockNum);
 643        nand_dbg_print(NAND_DBG_DEBUG, "PlaneNum: %d\n",
 644                DeviceInfo.bPlaneNum);
 645        nand_dbg_print(NAND_DBG_DEBUG, "DeviceMainAreaSize: %d\n",
 646                DeviceInfo.wDeviceMainAreaSize);
 647        nand_dbg_print(NAND_DBG_DEBUG, "DeviceSpareAreaSize: %d\n",
 648                DeviceInfo.wDeviceSpareAreaSize);
 649        nand_dbg_print(NAND_DBG_DEBUG, "DevicesConnected: %d\n",
 650                DeviceInfo.wDevicesConnected);
 651        nand_dbg_print(NAND_DBG_DEBUG, "DeviceWidth: %d\n",
 652                DeviceInfo.wDeviceWidth);
 653        nand_dbg_print(NAND_DBG_DEBUG, "HWRevision: 0x%x\n",
 654                DeviceInfo.wHWRevision);
 655        nand_dbg_print(NAND_DBG_DEBUG, "HWFeatures: 0x%x\n",
 656                DeviceInfo.wHWFeatures);
 657        nand_dbg_print(NAND_DBG_DEBUG, "ONFIDevFeatures: 0x%x\n",
 658                DeviceInfo.wONFIDevFeatures);
 659        nand_dbg_print(NAND_DBG_DEBUG, "ONFIOptCommands: 0x%x\n",
 660                DeviceInfo.wONFIOptCommands);
 661        nand_dbg_print(NAND_DBG_DEBUG, "ONFITimingMode: 0x%x\n",
 662                DeviceInfo.wONFITimingMode);
 663        nand_dbg_print(NAND_DBG_DEBUG, "ONFIPgmCacheTimingMode: 0x%x\n",
 664                DeviceInfo.wONFIPgmCacheTimingMode);
 665        nand_dbg_print(NAND_DBG_DEBUG, "MLCDevice: %s\n",
 666                DeviceInfo.MLCDevice ? "Yes" : "No");
 667        nand_dbg_print(NAND_DBG_DEBUG, "SpareSkipBytes: %d\n",
 668                DeviceInfo.wSpareSkipBytes);
 669        nand_dbg_print(NAND_DBG_DEBUG, "BitsInPageNumber: %d\n",
 670                DeviceInfo.nBitsInPageNumber);
 671        nand_dbg_print(NAND_DBG_DEBUG, "BitsInPageDataSize: %d\n",
 672                DeviceInfo.nBitsInPageDataSize);
 673        nand_dbg_print(NAND_DBG_DEBUG, "BitsInBlockDataSize: %d\n",
 674                DeviceInfo.nBitsInBlockDataSize);
 675}
 676
 677u16 NAND_Read_Device_ID(void)
 678{
 679        u16 status = PASS;
 680        u8 no_of_planes;
 681
 682        nand_dbg_print(NAND_DBG_TRACE, "%s, Line %d, Function: %s\n",
 683                       __FILE__, __LINE__, __func__);
 684
 685        iowrite32(0x02, FlashReg + SPARE_AREA_SKIP_BYTES);
 686        iowrite32(0xffff, FlashReg + SPARE_AREA_MARKER);
 687        DeviceInfo.wDeviceMaker = ioread32(FlashReg + MANUFACTURER_ID);
 688        DeviceInfo.wDeviceID = ioread32(FlashReg + DEVICE_ID);
 689        DeviceInfo.MLCDevice = ioread32(FlashReg + DEVICE_PARAM_0) & 0x0c;
 690
 691        if (ioread32(FlashReg + ONFI_DEVICE_NO_OF_LUNS) &
 692                ONFI_DEVICE_NO_OF_LUNS__ONFI_DEVICE) { /* ONFI 1.0 NAND */
 693                if (FAIL == get_onfi_nand_para())
 694                        return FAIL;
 695        } else if (DeviceInfo.wDeviceMaker == 0xEC) { /* Samsung NAND */
 696                get_samsung_nand_para();
 697        } else if (DeviceInfo.wDeviceMaker == 0x98) { /* Toshiba NAND */
 698                get_toshiba_nand_para();
 699        } else if (DeviceInfo.wDeviceMaker == 0xAD) { /* Hynix NAND */
 700                get_hynix_nand_para();
 701        } else {
 702                DeviceInfo.wTotalBlocks = GLOB_HWCTL_DEFAULT_BLKS;
 703        }
 704
 705        nand_dbg_print(NAND_DBG_DEBUG, "Dump timing register values:"
 706                        "acc_clks: %d, re_2_we: %d, we_2_re: %d,"
 707                        "addr_2_data: %d, rdwr_en_lo_cnt: %d, "
 708                        "rdwr_en_hi_cnt: %d, cs_setup_cnt: %d\n",
 709                        ioread32(FlashReg + ACC_CLKS),
 710                        ioread32(FlashReg + RE_2_WE),
 711                        ioread32(FlashReg + WE_2_RE),
 712                        ioread32(FlashReg + ADDR_2_DATA),
 713                        ioread32(FlashReg + RDWR_EN_LO_CNT),
 714                        ioread32(FlashReg + RDWR_EN_HI_CNT),
 715                        ioread32(FlashReg + CS_SETUP_CNT));
 716
 717        DeviceInfo.wHWRevision = ioread32(FlashReg + REVISION);
 718        DeviceInfo.wHWFeatures = ioread32(FlashReg + FEATURES);
 719
 720        DeviceInfo.wDeviceMainAreaSize =
 721                ioread32(FlashReg + DEVICE_MAIN_AREA_SIZE);
 722        DeviceInfo.wDeviceSpareAreaSize =
 723                ioread32(FlashReg + DEVICE_SPARE_AREA_SIZE);
 724
 725        DeviceInfo.wPageDataSize =
 726                ioread32(FlashReg + LOGICAL_PAGE_DATA_SIZE);
 727
 728        /* Note: When using the Micon 4K NAND device, the controller will report
 729         * Page Spare Size as 216 bytes. But Micron's Spec say it's 218 bytes.
 730         * And if force set it to 218 bytes, the controller can not work
 731         * correctly. So just let it be. But keep in mind that this bug may
 732         * cause
 733         * other problems in future.       - Yunpeng  2008-10-10
 734         */
 735        DeviceInfo.wPageSpareSize =
 736                ioread32(FlashReg + LOGICAL_PAGE_SPARE_SIZE);
 737
 738        DeviceInfo.wPagesPerBlock = ioread32(FlashReg + PAGES_PER_BLOCK);
 739
 740        DeviceInfo.wPageSize =
 741            DeviceInfo.wPageDataSize + DeviceInfo.wPageSpareSize;
 742        DeviceInfo.wBlockSize =
 743            DeviceInfo.wPageSize * DeviceInfo.wPagesPerBlock;
 744        DeviceInfo.wBlockDataSize =
 745            DeviceInfo.wPagesPerBlock * DeviceInfo.wPageDataSize;
 746
 747        DeviceInfo.wDeviceWidth = ioread32(FlashReg + DEVICE_WIDTH);
 748        DeviceInfo.wDeviceType =
 749                ((ioread32(FlashReg + DEVICE_WIDTH) > 0) ? 16 : 8);
 750
 751        DeviceInfo.wDevicesConnected = ioread32(FlashReg + DEVICES_CONNECTED);
 752
 753        DeviceInfo.wSpareSkipBytes =
 754                ioread32(FlashReg + SPARE_AREA_SKIP_BYTES) *
 755                DeviceInfo.wDevicesConnected;
 756
 757        DeviceInfo.nBitsInPageNumber =
 758                (u8)GLOB_Calc_Used_Bits(DeviceInfo.wPagesPerBlock);
 759        DeviceInfo.nBitsInPageDataSize =
 760                (u8)GLOB_Calc_Used_Bits(DeviceInfo.wPageDataSize);
 761        DeviceInfo.nBitsInBlockDataSize =
 762                (u8)GLOB_Calc_Used_Bits(DeviceInfo.wBlockDataSize);
 763
 764        set_ecc_config();
 765
 766        no_of_planes = ioread32(FlashReg + NUMBER_OF_PLANES) &
 767                NUMBER_OF_PLANES__VALUE;
 768
 769        switch (no_of_planes) {
 770        case 0:
 771        case 1:
 772        case 3:
 773        case 7:
 774                DeviceInfo.bPlaneNum = no_of_planes + 1;
 775                break;
 776        default:
 777                status = FAIL;
 778                break;
 779        }
 780
 781        find_valid_banks();
 782
 783        detect_partition_feature();
 784
 785        dump_device_info();
 786
 787        return status;
 788}
 789
 790u16 NAND_UnlockArrayAll(void)
 791{
 792        u64 start_addr, end_addr;
 793
 794        nand_dbg_print(NAND_DBG_TRACE, "%s, Line %d, Function: %s\n",
 795                       __FILE__, __LINE__, __func__);
 796
 797        start_addr = 0;
 798        end_addr = ((u64)DeviceInfo.wBlockSize *
 799                (DeviceInfo.wTotalBlocks - 1)) >>
 800                DeviceInfo.nBitsInPageDataSize;
 801
 802        index_addr((u32)(MODE_10 | (u32)start_addr), 0x10);
 803        index_addr((u32)(MODE_10 | (u32)end_addr), 0x11);
 804
 805        return PASS;
 806}
 807
 808void NAND_LLD_Enable_Disable_Interrupts(u16 INT_ENABLE)
 809{
 810        nand_dbg_print(NAND_DBG_TRACE, "%s, Line %d, Function: %s\n",
 811                       __FILE__, __LINE__, __func__);
 812
 813        if (INT_ENABLE)
 814                iowrite32(1, FlashReg + GLOBAL_INT_ENABLE);
 815        else
 816                iowrite32(0, FlashReg + GLOBAL_INT_ENABLE);
 817}
 818
 819u16 NAND_Erase_Block(u32 block)
 820{
 821        u16 status = PASS;
 822        u64 flash_add;
 823        u16 flash_bank;
 824        u32 intr_status = 0;
 825        u32 intr_status_addresses[4] = {INTR_STATUS0,
 826                INTR_STATUS1, INTR_STATUS2, INTR_STATUS3};
 827
 828        nand_dbg_print(NAND_DBG_TRACE, "%s, Line %d, Function: %s\n",
 829                       __FILE__, __LINE__, __func__);
 830
 831        flash_add = (u64)(block % (DeviceInfo.wTotalBlocks / totalUsedBanks))
 832                * DeviceInfo.wBlockDataSize;
 833
 834        flash_bank = block / (DeviceInfo.wTotalBlocks / totalUsedBanks);
 835
 836        if (block >= DeviceInfo.wTotalBlocks)
 837                status = FAIL;
 838
 839        if (status == PASS) {
 840                intr_status = intr_status_addresses[flash_bank];
 841
 842                iowrite32(INTR_STATUS0__ERASE_COMP | INTR_STATUS0__ERASE_FAIL,
 843                        FlashReg + intr_status);
 844
 845                index_addr((u32)(MODE_10 | (flash_bank << 24) |
 846                        (flash_add >> DeviceInfo.nBitsInPageDataSize)), 1);
 847
 848                while (!(ioread32(FlashReg + intr_status) &
 849                        (INTR_STATUS0__ERASE_COMP | INTR_STATUS0__ERASE_FAIL)))
 850                        ;
 851
 852                if (ioread32(FlashReg + intr_status) &
 853                        INTR_STATUS0__ERASE_FAIL)
 854                        status = FAIL;
 855
 856                iowrite32(INTR_STATUS0__ERASE_COMP | INTR_STATUS0__ERASE_FAIL,
 857                        FlashReg + intr_status);
 858        }
 859
 860        return status;
 861}
 862
 863static u32 Boundary_Check_Block_Page(u32 block, u16 page,
 864                                                u16 page_count)
 865{
 866        u32 status = PASS;
 867
 868        if (block >= DeviceInfo.wTotalBlocks)
 869                status = FAIL;
 870
 871        if (page + page_count > DeviceInfo.wPagesPerBlock)
 872                status = FAIL;
 873
 874        return status;
 875}
 876
 877u16 NAND_Read_Page_Spare(u8 *read_data, u32 block, u16 page,
 878                            u16 page_count)
 879{
 880        u32 status = PASS;
 881        u32 i;
 882        u64 flash_add;
 883        u32 PageSpareSize = DeviceInfo.wPageSpareSize;
 884        u32 spareFlagBytes = DeviceInfo.wNumPageSpareFlag;
 885        u32 flash_bank;
 886        u32 intr_status = 0;
 887        u32 intr_status_addresses[4] = {INTR_STATUS0,
 888                INTR_STATUS1, INTR_STATUS2, INTR_STATUS3};
 889        u8 *page_spare = buf_read_page_spare;
 890
 891        if (block >= DeviceInfo.wTotalBlocks) {
 892                printk(KERN_ERR "block too big: %d\n", (int)block);
 893                status = FAIL;
 894        }
 895
 896        if (page >= DeviceInfo.wPagesPerBlock) {
 897                printk(KERN_ERR "page too big: %d\n", page);
 898                status = FAIL;
 899        }
 900
 901        if (page_count > 1) {
 902                printk(KERN_ERR "page count too big: %d\n", page_count);
 903                status = FAIL;
 904        }
 905
 906        flash_add = (u64)(block % (DeviceInfo.wTotalBlocks / totalUsedBanks))
 907                * DeviceInfo.wBlockDataSize +
 908                (u64)page * DeviceInfo.wPageDataSize;
 909
 910        flash_bank = block / (DeviceInfo.wTotalBlocks / totalUsedBanks);
 911
 912        if (status == PASS) {
 913                intr_status = intr_status_addresses[flash_bank];
 914                iowrite32(ioread32(FlashReg + intr_status),
 915                        FlashReg + intr_status);
 916
 917                index_addr((u32)(MODE_10 | (flash_bank << 24) |
 918                        (flash_add >> DeviceInfo.nBitsInPageDataSize)),
 919                        0x41);
 920                index_addr((u32)(MODE_10 | (flash_bank << 24) |
 921                        (flash_add >> DeviceInfo.nBitsInPageDataSize)),
 922                        0x2000 | page_count);
 923                while (!(ioread32(FlashReg + intr_status) &
 924                        INTR_STATUS0__LOAD_COMP))
 925                        ;
 926
 927                iowrite32((u32)(MODE_01 | (flash_bank << 24) |
 928                        (flash_add >> DeviceInfo.nBitsInPageDataSize)),
 929                        FlashMem);
 930
 931                for (i = 0; i < (PageSpareSize / 4); i++)
 932                        *((u32 *)page_spare + i) =
 933                                        ioread32(FlashMem + 0x10);
 934
 935                if (enable_ecc) {
 936                        for (i = 0; i < spareFlagBytes; i++)
 937                                read_data[i] =
 938                                        page_spare[PageSpareSize -
 939                                                spareFlagBytes + i];
 940                        for (i = 0; i < (PageSpareSize - spareFlagBytes); i++)
 941                                read_data[spareFlagBytes + i] =
 942                                                        page_spare[i];
 943                } else {
 944                        for (i = 0; i < PageSpareSize; i++)
 945                                read_data[i] = page_spare[i];
 946                }
 947
 948                index_addr((u32)(MODE_10 | (flash_bank << 24) |
 949                        (flash_add >> DeviceInfo.nBitsInPageDataSize)), 0x42);
 950        }
 951
 952        return status;
 953}
 954
 955/* No use function. Should be removed later */
 956u16 NAND_Write_Page_Spare(u8 *write_data, u32 block, u16 page,
 957                             u16 page_count)
 958{
 959        printk(KERN_ERR
 960               "Error! This function (NAND_Write_Page_Spare) should never"
 961                " be called!\n");
 962        return ERR;
 963}
 964
 965/* op value:  0 - DDMA read;  1 - DDMA write */
 966static void ddma_trans(u8 *data, u64 flash_add,
 967                        u32 flash_bank, int op, u32 numPages)
 968{
 969        u32 data_addr;
 970
 971        /* Map virtual address to bus address for DDMA */
 972        data_addr = virt_to_bus(data);
 973
 974        index_addr((u32)(MODE_10 | (flash_bank << 24) |
 975                (flash_add >> DeviceInfo.nBitsInPageDataSize)),
 976                (u16)(2 << 12) | (op << 8) | numPages);
 977
 978        index_addr((u32)(MODE_10 | (flash_bank << 24) |
 979                ((u16)(0x0FFFF & (data_addr >> 16)) << 8)),
 980                (u16)(2 << 12) | (2 << 8) | 0);
 981
 982        index_addr((u32)(MODE_10 | (flash_bank << 24) |
 983                ((u16)(0x0FFFF & data_addr) << 8)),
 984                (u16)(2 << 12) | (3 << 8) | 0);
 985
 986        index_addr((u32)(MODE_10 | (flash_bank << 24) |
 987                (1 << 16) | (0x40 << 8)),
 988                (u16)(2 << 12) | (4 << 8) | 0);
 989}
 990
 991/* If data in buf are all 0xff, then return 1; otherwise return 0 */
 992static int check_all_1(u8 *buf)
 993{
 994        int i, j, cnt;
 995
 996        for (i = 0; i < DeviceInfo.wPageDataSize; i++) {
 997                if (buf[i] != 0xff) {
 998                        cnt = 0;
 999                        nand_dbg_print(NAND_DBG_WARN,
1000                                "the first non-0xff data byte is: %d\n", i);
1001                        for (j = i; j < DeviceInfo.wPageDataSize; j++) {
1002                                nand_dbg_print(NAND_DBG_WARN, "0x%x ", buf[j]);
1003                                cnt++;
1004                                if (cnt > 8)
1005                                        break;
1006                        }
1007                        nand_dbg_print(NAND_DBG_WARN, "\n");
1008                        return 0;
1009                }
1010        }
1011
1012        return 1;
1013}
1014
1015static int do_ecc_new(unsigned long bank, u8 *buf,
1016                                u32 block, u16 page)
1017{
1018        int status = PASS;
1019        u16 err_page = 0;
1020        u16 err_byte;
1021        u8 err_sect;
1022        u8 err_dev;
1023        u16 err_fix_info;
1024        u16 err_addr;
1025        u32 ecc_sect_size;
1026        u8 *err_pos;
1027        u32 err_page_addr[4] = {ERR_PAGE_ADDR0,
1028                ERR_PAGE_ADDR1, ERR_PAGE_ADDR2, ERR_PAGE_ADDR3};
1029
1030        ecc_sect_size = ECC_SECTOR_SIZE * (DeviceInfo.wDevicesConnected);
1031
1032        do {
1033                err_page = ioread32(FlashReg + err_page_addr[bank]);
1034                err_addr = ioread32(FlashReg + ECC_ERROR_ADDRESS);
1035                err_byte = err_addr & ECC_ERROR_ADDRESS__OFFSET;
1036                err_sect = ((err_addr & ECC_ERROR_ADDRESS__SECTOR_NR) >> 12);
1037                err_fix_info = ioread32(FlashReg + ERR_CORRECTION_INFO);
1038                err_dev = ((err_fix_info & ERR_CORRECTION_INFO__DEVICE_NR)
1039                        >> 8);
1040                if (err_fix_info & ERR_CORRECTION_INFO__ERROR_TYPE) {
1041                        nand_dbg_print(NAND_DBG_WARN,
1042                                "%s, Line %d Uncorrectable ECC error "
1043                                "when read block %d page %d."
1044                                "PTN_INTR register: 0x%x "
1045                                "err_page: %d, err_sect: %d, err_byte: %d, "
1046                                "err_dev: %d, ecc_sect_size: %d, "
1047                                "err_fix_info: 0x%x\n",
1048                                __FILE__, __LINE__, block, page,
1049                                ioread32(FlashReg + PTN_INTR),
1050                                err_page, err_sect, err_byte, err_dev,
1051                                ecc_sect_size, (u32)err_fix_info);
1052
1053                        if (check_all_1(buf))
1054                                nand_dbg_print(NAND_DBG_WARN, "%s, Line %d"
1055                                               "All 0xff!\n",
1056                                               __FILE__, __LINE__);
1057                        else
1058                                nand_dbg_print(NAND_DBG_WARN, "%s, Line %d"
1059                                               "Not all 0xff!\n",
1060                                               __FILE__, __LINE__);
1061                        status = FAIL;
1062                } else {
1063                        nand_dbg_print(NAND_DBG_WARN,
1064                                "%s, Line %d Found ECC error "
1065                                "when read block %d page %d."
1066                                "err_page: %d, err_sect: %d, err_byte: %d, "
1067                                "err_dev: %d, ecc_sect_size: %d, "
1068                                "err_fix_info: 0x%x\n",
1069                                __FILE__, __LINE__, block, page,
1070                                err_page, err_sect, err_byte, err_dev,
1071                                ecc_sect_size, (u32)err_fix_info);
1072                        if (err_byte < ECC_SECTOR_SIZE) {
1073                                err_pos = buf +
1074                                        (err_page - page) *
1075                                        DeviceInfo.wPageDataSize +
1076                                        err_sect * ecc_sect_size +
1077                                        err_byte *
1078                                        DeviceInfo.wDevicesConnected +
1079                                        err_dev;
1080
1081                                *err_pos ^= err_fix_info &
1082                                        ERR_CORRECTION_INFO__BYTEMASK;
1083                        }
1084                }
1085        } while (!(err_fix_info & ERR_CORRECTION_INFO__LAST_ERR_INFO));
1086
1087        return status;
1088}
1089
1090u16 NAND_Read_Page_Main_Polling(u8 *read_data,
1091                u32 block, u16 page, u16 page_count)
1092{
1093        u32 status = PASS;
1094        u64 flash_add;
1095        u32 intr_status = 0;
1096        u32 flash_bank;
1097        u32 intr_status_addresses[4] = {INTR_STATUS0,
1098                INTR_STATUS1, INTR_STATUS2, INTR_STATUS3};
1099        u8 *read_data_l;
1100
1101        nand_dbg_print(NAND_DBG_WARN, "%s, Line %d, Function: %s\n",
1102                       __FILE__, __LINE__, __func__);
1103
1104        status = Boundary_Check_Block_Page(block, page, page_count);
1105        if (status != PASS)
1106                return status;
1107
1108        flash_add = (u64)(block % (DeviceInfo.wTotalBlocks / totalUsedBanks))
1109                * DeviceInfo.wBlockDataSize +
1110                (u64)page * DeviceInfo.wPageDataSize;
1111        flash_bank = block / (DeviceInfo.wTotalBlocks / totalUsedBanks);
1112
1113        iowrite32(0, FlashReg + TRANSFER_SPARE_REG);
1114
1115        intr_status = intr_status_addresses[flash_bank];
1116        iowrite32(ioread32(FlashReg + intr_status), FlashReg + intr_status);
1117
1118        if (page_count > 1) {
1119                read_data_l = read_data;
1120                while (page_count > MAX_PAGES_PER_RW) {
1121                        if (ioread32(FlashReg + MULTIPLANE_OPERATION))
1122                                status = NAND_Multiplane_Read(read_data_l,
1123                                        block, page, MAX_PAGES_PER_RW);
1124                        else
1125                                status = NAND_Pipeline_Read_Ahead_Polling(
1126                                        read_data_l, block, page,
1127                                        MAX_PAGES_PER_RW);
1128
1129                        if (status == FAIL)
1130                                return status;
1131
1132                        read_data_l += DeviceInfo.wPageDataSize *
1133                                        MAX_PAGES_PER_RW;
1134                        page_count -= MAX_PAGES_PER_RW;
1135                        page += MAX_PAGES_PER_RW;
1136                }
1137                if (ioread32(FlashReg + MULTIPLANE_OPERATION))
1138                        status = NAND_Multiplane_Read(read_data_l,
1139                                        block, page, page_count);
1140                else
1141                        status = NAND_Pipeline_Read_Ahead_Polling(
1142                                        read_data_l, block, page, page_count);
1143
1144                return status;
1145        }
1146
1147        iowrite32(1, FlashReg + DMA_ENABLE);
1148        while (!(ioread32(FlashReg + DMA_ENABLE) & DMA_ENABLE__FLAG))
1149                ;
1150
1151        iowrite32(0, FlashReg + TRANSFER_SPARE_REG);
1152        iowrite32(ioread32(FlashReg + intr_status), FlashReg + intr_status);
1153
1154        ddma_trans(read_data, flash_add, flash_bank, 0, 1);
1155
1156        if (enable_ecc) {
1157                while (!(ioread32(FlashReg + intr_status) &
1158                        (INTR_STATUS0__ECC_TRANSACTION_DONE |
1159                        INTR_STATUS0__ECC_ERR)))
1160                        ;
1161
1162                if (ioread32(FlashReg + intr_status) &
1163                        INTR_STATUS0__ECC_ERR) {
1164                        iowrite32(INTR_STATUS0__ECC_ERR,
1165                                FlashReg + intr_status);
1166                        status = do_ecc_new(flash_bank, read_data,
1167                                        block, page);
1168                }
1169
1170                if (ioread32(FlashReg + intr_status) &
1171                        INTR_STATUS0__ECC_TRANSACTION_DONE &
1172                        INTR_STATUS0__ECC_ERR)
1173                        iowrite32(INTR_STATUS0__ECC_TRANSACTION_DONE |
1174                                INTR_STATUS0__ECC_ERR,
1175                                FlashReg + intr_status);
1176                else if (ioread32(FlashReg + intr_status) &
1177                        INTR_STATUS0__ECC_TRANSACTION_DONE)
1178                        iowrite32(INTR_STATUS0__ECC_TRANSACTION_DONE,
1179                                FlashReg + intr_status);
1180                else if (ioread32(FlashReg + intr_status) &
1181                        INTR_STATUS0__ECC_ERR)
1182                        iowrite32(INTR_STATUS0__ECC_ERR,
1183                                FlashReg + intr_status);
1184        } else {
1185                while (!(ioread32(FlashReg + intr_status) &
1186                        INTR_STATUS0__DMA_CMD_COMP))
1187                        ;
1188                iowrite32(INTR_STATUS0__DMA_CMD_COMP, FlashReg + intr_status);
1189        }
1190
1191        iowrite32(ioread32(FlashReg + intr_status), FlashReg + intr_status);
1192
1193        iowrite32(0, FlashReg + DMA_ENABLE);
1194        while ((ioread32(FlashReg + DMA_ENABLE) & DMA_ENABLE__FLAG))
1195                ;
1196
1197        return status;
1198}
1199
1200u16 NAND_Pipeline_Read_Ahead_Polling(u8 *read_data,
1201                        u32 block, u16 page, u16 page_count)
1202{
1203        u32 status = PASS;
1204        u32 NumPages = page_count;
1205        u64 flash_add;
1206        u32 flash_bank;
1207        u32 intr_status = 0;
1208        u32 intr_status_addresses[4] = {INTR_STATUS0,
1209                INTR_STATUS1, INTR_STATUS2, INTR_STATUS3};
1210        u32 ecc_done_OR_dma_comp;
1211
1212        nand_dbg_print(NAND_DBG_WARN, "%s, Line %d, Function: %s\n",
1213                       __FILE__, __LINE__, __func__);
1214
1215        status = Boundary_Check_Block_Page(block, page, page_count);
1216
1217        if (page_count < 2)
1218                status = FAIL;
1219
1220        flash_add = (u64)(block % (DeviceInfo.wTotalBlocks / totalUsedBanks))
1221                *DeviceInfo.wBlockDataSize +
1222                (u64)page * DeviceInfo.wPageDataSize;
1223
1224        flash_bank = block / (DeviceInfo.wTotalBlocks / totalUsedBanks);
1225
1226        if (status == PASS) {
1227                intr_status = intr_status_addresses[flash_bank];
1228                iowrite32(ioread32(FlashReg + intr_status),
1229                        FlashReg + intr_status);
1230
1231                iowrite32(1, FlashReg + DMA_ENABLE);
1232                while (!(ioread32(FlashReg + DMA_ENABLE) & DMA_ENABLE__FLAG))
1233                        ;
1234
1235                iowrite32(0, FlashReg + TRANSFER_SPARE_REG);
1236
1237                index_addr((u32)(MODE_10 | (flash_bank << 24) |
1238                        (flash_add >> DeviceInfo.nBitsInPageDataSize)), 0x42);
1239                ddma_trans(read_data, flash_add, flash_bank, 0, NumPages);
1240
1241                ecc_done_OR_dma_comp = 0;
1242                while (1) {
1243                        if (enable_ecc) {
1244                                while (!ioread32(FlashReg + intr_status))
1245                                        ;
1246
1247                                if (ioread32(FlashReg + intr_status) &
1248                                        INTR_STATUS0__ECC_ERR) {
1249                                        iowrite32(INTR_STATUS0__ECC_ERR,
1250                                                FlashReg + intr_status);
1251                                        status = do_ecc_new(flash_bank,
1252                                                read_data, block, page);
1253                                } else if (ioread32(FlashReg + intr_status) &
1254                                        INTR_STATUS0__DMA_CMD_COMP) {
1255                                        iowrite32(INTR_STATUS0__DMA_CMD_COMP,
1256                                                FlashReg + intr_status);
1257
1258                                        if (1 == ecc_done_OR_dma_comp)
1259                                                break;
1260
1261                                        ecc_done_OR_dma_comp = 1;
1262                                } else if (ioread32(FlashReg + intr_status) &
1263                                        INTR_STATUS0__ECC_TRANSACTION_DONE) {
1264                                        iowrite32(
1265                                        INTR_STATUS0__ECC_TRANSACTION_DONE,
1266                                        FlashReg + intr_status);
1267
1268                                        if (1 == ecc_done_OR_dma_comp)
1269                                                break;
1270
1271                                        ecc_done_OR_dma_comp = 1;
1272                                }
1273                        } else {
1274                                while (!(ioread32(FlashReg + intr_status) &
1275                                        INTR_STATUS0__DMA_CMD_COMP))
1276                                        ;
1277
1278                                iowrite32(INTR_STATUS0__DMA_CMD_COMP,
1279                                        FlashReg + intr_status);
1280                                break;
1281                        }
1282
1283                        iowrite32((~INTR_STATUS0__ECC_ERR) &
1284                                (~INTR_STATUS0__ECC_TRANSACTION_DONE) &
1285                                (~INTR_STATUS0__DMA_CMD_COMP),
1286                                FlashReg + intr_status);
1287
1288                }
1289
1290                iowrite32(ioread32(FlashReg + intr_status),
1291                        FlashReg + intr_status);
1292
1293                iowrite32(0, FlashReg + DMA_ENABLE);
1294
1295                while ((ioread32(FlashReg + DMA_ENABLE) & DMA_ENABLE__FLAG))
1296                        ;
1297        }
1298        return status;
1299}
1300
1301u16 NAND_Read_Page_Main(u8 *read_data, u32 block, u16 page,
1302                           u16 page_count)
1303{
1304        u32 status = PASS;
1305        u64 flash_add;
1306        u32 intr_status = 0;
1307        u32 flash_bank;
1308        u32 intr_status_addresses[4] = {INTR_STATUS0,
1309                INTR_STATUS1, INTR_STATUS2, INTR_STATUS3};
1310        int ret;
1311        u8 *read_data_l;
1312
1313        nand_dbg_print(NAND_DBG_DEBUG, "%s, Line %d, Function: %s\n",
1314                       __FILE__, __LINE__, __func__);
1315
1316        status = Boundary_Check_Block_Page(block, page, page_count);
1317        if (status != PASS)
1318                return status;
1319
1320        flash_add = (u64)(block % (DeviceInfo.wTotalBlocks / totalUsedBanks))
1321                * DeviceInfo.wBlockDataSize +
1322                (u64)page * DeviceInfo.wPageDataSize;
1323        flash_bank = block / (DeviceInfo.wTotalBlocks / totalUsedBanks);
1324
1325        iowrite32(0, FlashReg + TRANSFER_SPARE_REG);
1326
1327        intr_status = intr_status_addresses[flash_bank];
1328        iowrite32(ioread32(FlashReg + intr_status), FlashReg + intr_status);
1329
1330        if (page_count > 1) {
1331                read_data_l = read_data;
1332                while (page_count > MAX_PAGES_PER_RW) {
1333                        if (ioread32(FlashReg + MULTIPLANE_OPERATION))
1334                                status = NAND_Multiplane_Read(read_data_l,
1335                                        block, page, MAX_PAGES_PER_RW);
1336                        else
1337                                status = NAND_Pipeline_Read_Ahead(
1338                                        read_data_l, block, page,
1339                                        MAX_PAGES_PER_RW);
1340
1341                        if (status == FAIL)
1342                                return status;
1343
1344                        read_data_l += DeviceInfo.wPageDataSize *
1345                                        MAX_PAGES_PER_RW;
1346                        page_count -= MAX_PAGES_PER_RW;
1347                        page += MAX_PAGES_PER_RW;
1348                }
1349                if (ioread32(FlashReg + MULTIPLANE_OPERATION))
1350                        status = NAND_Multiplane_Read(read_data_l,
1351                                        block, page, page_count);
1352                else
1353                        status = NAND_Pipeline_Read_Ahead(
1354                                        read_data_l, block, page, page_count);
1355
1356                return status;
1357        }
1358
1359        iowrite32(1, FlashReg + DMA_ENABLE);
1360        while (!(ioread32(FlashReg + DMA_ENABLE) & DMA_ENABLE__FLAG))
1361                ;
1362
1363        iowrite32(0, FlashReg + TRANSFER_SPARE_REG);
1364        iowrite32(ioread32(FlashReg + intr_status), FlashReg + intr_status);
1365
1366        /* Fill the mrst_nand_info structure */
1367        info.state = INT_READ_PAGE_MAIN;
1368        info.read_data = read_data;
1369        info.flash_bank = flash_bank;
1370        info.block = block;
1371        info.page = page;
1372        info.ret = PASS;
1373
1374        ddma_trans(read_data, flash_add, flash_bank, 0, 1);
1375
1376        iowrite32(1, FlashReg + GLOBAL_INT_ENABLE); /* Enable Interrupt */
1377
1378        ret = wait_for_completion_timeout(&info.complete, 10 * HZ);
1379        if (!ret) {
1380                printk(KERN_ERR "Wait for completion timeout "
1381                        "in %s, Line %d\n", __FILE__, __LINE__);
1382                status = ERR;
1383        } else {
1384                status = info.ret;
1385        }
1386
1387        iowrite32(ioread32(FlashReg + intr_status), FlashReg + intr_status);
1388
1389        iowrite32(0, FlashReg + DMA_ENABLE);
1390        while ((ioread32(FlashReg + DMA_ENABLE) & DMA_ENABLE__FLAG))
1391                ;
1392
1393        return status;
1394}
1395
1396void Conv_Spare_Data_Log2Phy_Format(u8 *data)
1397{
1398        int i;
1399        const u32 spareFlagBytes = DeviceInfo.wNumPageSpareFlag;
1400        const u32 PageSpareSize  = DeviceInfo.wPageSpareSize;
1401
1402        if (enable_ecc) {
1403                for (i = spareFlagBytes - 1; i >= 0; i--)
1404                        data[PageSpareSize - spareFlagBytes + i] = data[i];
1405        }
1406}
1407
1408void Conv_Spare_Data_Phy2Log_Format(u8 *data)
1409{
1410        int i;
1411        const u32 spareFlagBytes = DeviceInfo.wNumPageSpareFlag;
1412        const u32 PageSpareSize = DeviceInfo.wPageSpareSize;
1413
1414        if (enable_ecc) {
1415                for (i = 0; i < spareFlagBytes; i++)
1416                        data[i] = data[PageSpareSize - spareFlagBytes + i];
1417        }
1418}
1419
1420
1421void Conv_Main_Spare_Data_Log2Phy_Format(u8 *data, u16 page_count)
1422{
1423        const u32 PageSize = DeviceInfo.wPageSize;
1424        const u32 PageDataSize = DeviceInfo.wPageDataSize;
1425        const u32 eccBytes = DeviceInfo.wECCBytesPerSector;
1426        const u32 spareSkipBytes = DeviceInfo.wSpareSkipBytes;
1427        const u32 spareFlagBytes = DeviceInfo.wNumPageSpareFlag;
1428        u32 eccSectorSize;
1429        u32 page_offset;
1430        int i, j;
1431
1432        eccSectorSize = ECC_SECTOR_SIZE * (DeviceInfo.wDevicesConnected);
1433        if (enable_ecc) {
1434                while (page_count > 0) {
1435                        page_offset = (page_count - 1) * PageSize;
1436                        j = (DeviceInfo.wPageDataSize / eccSectorSize);
1437                        for (i = spareFlagBytes - 1; i >= 0; i--)
1438                                data[page_offset +
1439                                        (eccSectorSize + eccBytes) * j + i] =
1440                                        data[page_offset + PageDataSize + i];
1441                        for (j--; j >= 1; j--) {
1442                                for (i = eccSectorSize - 1; i >= 0; i--)
1443                                        data[page_offset +
1444                                        (eccSectorSize + eccBytes) * j + i] =
1445                                                data[page_offset +
1446                                                eccSectorSize * j + i];
1447                        }
1448                        for (i = (PageSize - spareSkipBytes) - 1;
1449                                i >= PageDataSize; i--)
1450                                data[page_offset + i + spareSkipBytes] =
1451                                        data[page_offset + i];
1452                        page_count--;
1453                }
1454        }
1455}
1456
1457void Conv_Main_Spare_Data_Phy2Log_Format(u8 *data, u16 page_count)
1458{
1459        const u32 PageSize = DeviceInfo.wPageSize;
1460        const u32 PageDataSize = DeviceInfo.wPageDataSize;
1461        const u32 eccBytes = DeviceInfo.wECCBytesPerSector;
1462        const u32 spareSkipBytes = DeviceInfo.wSpareSkipBytes;
1463        const u32 spareFlagBytes = DeviceInfo.wNumPageSpareFlag;
1464        u32 eccSectorSize;
1465        u32 page_offset;
1466        int i, j;
1467
1468        eccSectorSize = ECC_SECTOR_SIZE * (DeviceInfo.wDevicesConnected);
1469        if (enable_ecc) {
1470                while (page_count > 0) {
1471                        page_offset = (page_count - 1) * PageSize;
1472                        for (i = PageDataSize;
1473                                i < PageSize - spareSkipBytes;
1474                                i++)
1475                                data[page_offset + i] =
1476                                        data[page_offset + i +
1477                                        spareSkipBytes];
1478                        for (j = 1;
1479                        j < DeviceInfo.wPageDataSize / eccSectorSize;
1480                        j++) {
1481                                for (i = 0; i < eccSectorSize; i++)
1482                                        data[page_offset +
1483                                        eccSectorSize * j + i] =
1484                                                data[page_offset +
1485                                                (eccSectorSize + eccBytes) * j
1486                                                + i];
1487                        }
1488                        for (i = 0; i < spareFlagBytes; i++)
1489                                data[page_offset + PageDataSize + i] =
1490                                        data[page_offset +
1491                                        (eccSectorSize + eccBytes) * j + i];
1492                        page_count--;
1493                }
1494        }
1495}
1496
1497/* Un-tested function */
1498u16 NAND_Multiplane_Read(u8 *read_data, u32 block, u16 page,
1499                            u16 page_count)
1500{
1501        u32 status = PASS;
1502        u32 NumPages = page_count;
1503        u64 flash_add;
1504        u32 flash_bank;
1505        u32 intr_status = 0;
1506        u32 intr_status_addresses[4] = {INTR_STATUS0,
1507                INTR_STATUS1, INTR_STATUS2, INTR_STATUS3};
1508        u32 ecc_done_OR_dma_comp;
1509
1510        nand_dbg_print(NAND_DBG_WARN, "%s, Line %d, Function: %s\n",
1511                       __FILE__, __LINE__, __func__);
1512
1513        status = Boundary_Check_Block_Page(block, page, page_count);
1514
1515        flash_add = (u64)(block % (DeviceInfo.wTotalBlocks / totalUsedBanks))
1516                * DeviceInfo.wBlockDataSize +
1517                (u64)page * DeviceInfo.wPageDataSize;
1518
1519        flash_bank = block / (DeviceInfo.wTotalBlocks / totalUsedBanks);
1520
1521        if (status == PASS) {
1522                intr_status = intr_status_addresses[flash_bank];
1523                iowrite32(ioread32(FlashReg + intr_status),
1524                        FlashReg + intr_status);
1525
1526                iowrite32(0, FlashReg + TRANSFER_SPARE_REG);
1527                iowrite32(0x01, FlashReg + MULTIPLANE_OPERATION);
1528
1529                iowrite32(1, FlashReg + DMA_ENABLE);
1530                while (!(ioread32(FlashReg + DMA_ENABLE) & DMA_ENABLE__FLAG))
1531                        ;
1532                index_addr((u32)(MODE_10 | (flash_bank << 24) |
1533                        (flash_add >> DeviceInfo.nBitsInPageDataSize)), 0x42);
1534                ddma_trans(read_data, flash_add, flash_bank, 0, NumPages);
1535
1536                ecc_done_OR_dma_comp = 0;
1537                while (1) {
1538                        if (enable_ecc) {
1539                                while (!ioread32(FlashReg + intr_status))
1540                                        ;
1541
1542                                if (ioread32(FlashReg + intr_status) &
1543                                        INTR_STATUS0__ECC_ERR) {
1544                                        iowrite32(INTR_STATUS0__ECC_ERR,
1545                                                FlashReg + intr_status);
1546                                        status = do_ecc_new(flash_bank,
1547                                                read_data, block, page);
1548                                } else if (ioread32(FlashReg + intr_status) &
1549                                        INTR_STATUS0__DMA_CMD_COMP) {
1550                                        iowrite32(INTR_STATUS0__DMA_CMD_COMP,
1551                                                FlashReg + intr_status);
1552
1553                                        if (1 == ecc_done_OR_dma_comp)
1554                                                break;
1555
1556                                        ecc_done_OR_dma_comp = 1;
1557                                } else if (ioread32(FlashReg + intr_status) &
1558                                        INTR_STATUS0__ECC_TRANSACTION_DONE) {
1559                                        iowrite32(
1560                                        INTR_STATUS0__ECC_TRANSACTION_DONE,
1561                                        FlashReg + intr_status);
1562
1563                                        if (1 == ecc_done_OR_dma_comp)
1564                                                break;
1565
1566                                        ecc_done_OR_dma_comp = 1;
1567                                }
1568                        } else {
1569                                while (!(ioread32(FlashReg + intr_status) &
1570                                        INTR_STATUS0__DMA_CMD_COMP))
1571                                        ;
1572                                iowrite32(INTR_STATUS0__DMA_CMD_COMP,
1573                                        FlashReg + intr_status);
1574                                break;
1575                        }
1576
1577                        iowrite32((~INTR_STATUS0__ECC_ERR) &
1578                                (~INTR_STATUS0__ECC_TRANSACTION_DONE) &
1579                                (~INTR_STATUS0__DMA_CMD_COMP),
1580                                FlashReg + intr_status);
1581
1582                }
1583
1584                iowrite32(ioread32(FlashReg + intr_status),
1585                        FlashReg + intr_status);
1586
1587                iowrite32(0, FlashReg + DMA_ENABLE);
1588
1589                while ((ioread32(FlashReg + DMA_ENABLE) & DMA_ENABLE__FLAG))
1590                        ;
1591
1592                iowrite32(0, FlashReg + MULTIPLANE_OPERATION);
1593        }
1594
1595        return status;
1596}
1597
1598u16 NAND_Pipeline_Read_Ahead(u8 *read_data, u32 block,
1599                                u16 page, u16 page_count)
1600{
1601        u32 status = PASS;
1602        u32 NumPages = page_count;
1603        u64 flash_add;
1604        u32 flash_bank;
1605        u32 intr_status = 0;
1606        u32 intr_status_addresses[4] = {INTR_STATUS0,
1607                INTR_STATUS1, INTR_STATUS2, INTR_STATUS3};
1608        int ret;
1609
1610        nand_dbg_print(NAND_DBG_DEBUG, "%s, Line %d, Function: %s\n",
1611                       __FILE__, __LINE__, __func__);
1612
1613        status = Boundary_Check_Block_Page(block, page, page_count);
1614
1615        if (page_count < 2)
1616                status = FAIL;
1617
1618        if (status != PASS)
1619                return status;
1620
1621        flash_add = (u64)(block % (DeviceInfo.wTotalBlocks / totalUsedBanks))
1622                *DeviceInfo.wBlockDataSize +
1623                (u64)page * DeviceInfo.wPageDataSize;
1624
1625        flash_bank = block / (DeviceInfo.wTotalBlocks / totalUsedBanks);
1626
1627        intr_status = intr_status_addresses[flash_bank];
1628        iowrite32(ioread32(FlashReg + intr_status), FlashReg + intr_status);
1629
1630        iowrite32(1, FlashReg + DMA_ENABLE);
1631        while (!(ioread32(FlashReg + DMA_ENABLE) & DMA_ENABLE__FLAG))
1632                ;
1633
1634        iowrite32(0, FlashReg + TRANSFER_SPARE_REG);
1635
1636        /* Fill the mrst_nand_info structure */
1637        info.state = INT_PIPELINE_READ_AHEAD;
1638        info.read_data = read_data;
1639        info.flash_bank = flash_bank;
1640        info.block = block;
1641        info.page = page;
1642        info.ret = PASS;
1643
1644        index_addr((u32)(MODE_10 | (flash_bank << 24) |
1645                (flash_add >> DeviceInfo.nBitsInPageDataSize)), 0x42);
1646
1647        ddma_trans(read_data, flash_add, flash_bank, 0, NumPages);
1648
1649        iowrite32(1, FlashReg + GLOBAL_INT_ENABLE); /* Enable Interrupt */
1650
1651        ret = wait_for_completion_timeout(&info.complete, 10 * HZ);
1652        if (!ret) {
1653                printk(KERN_ERR "Wait for completion timeout "
1654                        "in %s, Line %d\n", __FILE__, __LINE__);
1655                status = ERR;
1656        } else {
1657                status = info.ret;
1658        }
1659
1660        iowrite32(ioread32(FlashReg + intr_status), FlashReg + intr_status);
1661
1662        iowrite32(0, FlashReg + DMA_ENABLE);
1663
1664        while ((ioread32(FlashReg + DMA_ENABLE) & DMA_ENABLE__FLAG))
1665                ;
1666
1667        return status;
1668}
1669
1670
1671u16 NAND_Write_Page_Main(u8 *write_data, u32 block, u16 page,
1672                            u16 page_count)
1673{
1674        u32 status = PASS;
1675        u64 flash_add;
1676        u32 intr_status = 0;
1677        u32 flash_bank;
1678        u32 intr_status_addresses[4] = {INTR_STATUS0,
1679                INTR_STATUS1, INTR_STATUS2, INTR_STATUS3};
1680        int ret;
1681        u8 *write_data_l;
1682
1683        nand_dbg_print(NAND_DBG_DEBUG, "%s, Line %d, Function: %s\n",
1684                       __FILE__, __LINE__, __func__);
1685
1686        status = Boundary_Check_Block_Page(block, page, page_count);
1687        if (status != PASS)
1688                return status;
1689
1690        flash_add = (u64)(block % (DeviceInfo.wTotalBlocks / totalUsedBanks))
1691                * DeviceInfo.wBlockDataSize +
1692                (u64)page * DeviceInfo.wPageDataSize;
1693
1694        flash_bank = block / (DeviceInfo.wTotalBlocks / totalUsedBanks);
1695
1696        intr_status = intr_status_addresses[flash_bank];
1697
1698        iowrite32(0, FlashReg + TRANSFER_SPARE_REG);
1699
1700        iowrite32(INTR_STATUS0__PROGRAM_COMP |
1701                INTR_STATUS0__PROGRAM_FAIL, FlashReg + intr_status);
1702
1703        if (page_count > 1) {
1704                write_data_l = write_data;
1705                while (page_count > MAX_PAGES_PER_RW) {
1706                        if (ioread32(FlashReg + MULTIPLANE_OPERATION))
1707                                status = NAND_Multiplane_Write(write_data_l,
1708                                        block, page, MAX_PAGES_PER_RW);
1709                        else
1710                                status = NAND_Pipeline_Write_Ahead(
1711                                        write_data_l, block, page,
1712                                        MAX_PAGES_PER_RW);
1713                        if (status == FAIL)
1714                                return status;
1715
1716                        write_data_l += DeviceInfo.wPageDataSize *
1717                                        MAX_PAGES_PER_RW;
1718                        page_count -= MAX_PAGES_PER_RW;
1719                        page += MAX_PAGES_PER_RW;
1720                }
1721                if (ioread32(FlashReg + MULTIPLANE_OPERATION))
1722                        status = NAND_Multiplane_Write(write_data_l,
1723                                block, page, page_count);
1724                else
1725                        status = NAND_Pipeline_Write_Ahead(write_data_l,
1726                                block, page, page_count);
1727
1728                return status;
1729        }
1730
1731        iowrite32(1, FlashReg + DMA_ENABLE);
1732        while (!(ioread32(FlashReg + DMA_ENABLE) & DMA_ENABLE__FLAG))
1733                ;
1734
1735        iowrite32(0, FlashReg + TRANSFER_SPARE_REG);
1736
1737        iowrite32(ioread32(FlashReg + intr_status), FlashReg + intr_status);
1738
1739        /* Fill the mrst_nand_info structure */
1740        info.state = INT_WRITE_PAGE_MAIN;
1741        info.write_data = write_data;
1742        info.flash_bank = flash_bank;
1743        info.block = block;
1744        info.page = page;
1745        info.ret = PASS;
1746
1747        ddma_trans(write_data, flash_add, flash_bank, 1, 1);
1748
1749        iowrite32(1, FlashReg + GLOBAL_INT_ENABLE); /* Enable interrupt */
1750
1751        ret = wait_for_completion_timeout(&info.complete, 10 * HZ);
1752        if (!ret) {
1753                printk(KERN_ERR "Wait for completion timeout "
1754                        "in %s, Line %d\n", __FILE__, __LINE__);
1755                status = ERR;
1756        } else {
1757                status = info.ret;
1758        }
1759
1760        iowrite32(ioread32(FlashReg + intr_status), FlashReg + intr_status);
1761
1762        iowrite32(0, FlashReg + DMA_ENABLE);
1763        while (ioread32(FlashReg + DMA_ENABLE) & DMA_ENABLE__FLAG)
1764                ;
1765
1766        return status;
1767}
1768
1769void NAND_ECC_Ctrl(int enable)
1770{
1771        if (enable) {
1772                nand_dbg_print(NAND_DBG_WARN,
1773                        "Will enable ECC in %s, Line %d, Function: %s\n",
1774                        __FILE__, __LINE__, __func__);
1775                iowrite32(1, FlashReg + ECC_ENABLE);
1776                enable_ecc = 1;
1777        } else {
1778                nand_dbg_print(NAND_DBG_WARN,
1779                        "Will disable ECC in %s, Line %d, Function: %s\n",
1780                        __FILE__, __LINE__, __func__);
1781                iowrite32(0, FlashReg + ECC_ENABLE);
1782                enable_ecc = 0;
1783        }
1784}
1785
1786u16 NAND_Write_Page_Main_Spare(u8 *write_data, u32 block,
1787                                        u16 page, u16 page_count)
1788{
1789        u32 status = PASS;
1790        u32 i, j, page_num = 0;
1791        u32 PageSize = DeviceInfo.wPageSize;
1792        u32 PageDataSize = DeviceInfo.wPageDataSize;
1793        u32 eccBytes = DeviceInfo.wECCBytesPerSector;
1794        u32 spareFlagBytes = DeviceInfo.wNumPageSpareFlag;
1795        u32 spareSkipBytes  = DeviceInfo.wSpareSkipBytes;
1796        u64 flash_add;
1797        u32 eccSectorSize;
1798        u32 flash_bank;
1799        u32 intr_status = 0;
1800        u32 intr_status_addresses[4] = {INTR_STATUS0,
1801                INTR_STATUS1, INTR_STATUS2, INTR_STATUS3};
1802        u8 *page_main_spare = buf_write_page_main_spare;
1803
1804        nand_dbg_print(NAND_DBG_WARN, "%s, Line %d, Function: %s\n",
1805                       __FILE__, __LINE__, __func__);
1806
1807        eccSectorSize = ECC_SECTOR_SIZE * (DeviceInfo.wDevicesConnected);
1808
1809        status = Boundary_Check_Block_Page(block, page, page_count);
1810
1811        flash_bank = block / (DeviceInfo.wTotalBlocks / totalUsedBanks);
1812
1813        if (status == PASS) {
1814                intr_status = intr_status_addresses[flash_bank];
1815
1816                iowrite32(1, FlashReg + TRANSFER_SPARE_REG);
1817
1818                while ((status != FAIL) && (page_count > 0)) {
1819                        flash_add = (u64)(block %
1820                        (DeviceInfo.wTotalBlocks / totalUsedBanks)) *
1821                        DeviceInfo.wBlockDataSize +
1822                        (u64)page * DeviceInfo.wPageDataSize;
1823
1824                        iowrite32(ioread32(FlashReg + intr_status),
1825                                FlashReg + intr_status);
1826
1827                        iowrite32((u32)(MODE_01 | (flash_bank << 24) |
1828                                (flash_add >>
1829                                DeviceInfo.nBitsInPageDataSize)),
1830                                FlashMem);
1831
1832                        if (enable_ecc) {
1833                                for (j = 0;
1834                                     j <
1835                                     DeviceInfo.wPageDataSize / eccSectorSize;
1836                                     j++) {
1837                                        for (i = 0; i < eccSectorSize; i++)
1838                                                page_main_spare[(eccSectorSize +
1839                                                                 eccBytes) * j +
1840                                                                i] =
1841                                                    write_data[eccSectorSize *
1842                                                               j + i];
1843
1844                                        for (i = 0; i < eccBytes; i++)
1845                                                page_main_spare[(eccSectorSize +
1846                                                                 eccBytes) * j +
1847                                                                eccSectorSize +
1848                                                                i] =
1849                                                    write_data[PageDataSize +
1850                                                               spareFlagBytes +
1851                                                               eccBytes * j +
1852                                                               i];
1853                                }
1854
1855                                for (i = 0; i < spareFlagBytes; i++)
1856                                        page_main_spare[(eccSectorSize +
1857                                                         eccBytes) * j + i] =
1858                                            write_data[PageDataSize + i];
1859
1860                                for (i = PageSize - 1; i >= PageDataSize +
1861                                                        spareSkipBytes; i--)
1862                                        page_main_spare[i] = page_main_spare[i -
1863                                                                spareSkipBytes];
1864
1865                                for (i = PageDataSize; i < PageDataSize +
1866                                                        spareSkipBytes; i++)
1867                                        page_main_spare[i] = 0xff;
1868
1869                                for (i = 0; i < PageSize / 4; i++)
1870                                        iowrite32(
1871                                        *((u32 *)page_main_spare + i),
1872                                        FlashMem + 0x10);
1873                        } else {
1874
1875                                for (i = 0; i < PageSize / 4; i++)
1876                                        iowrite32(*((u32 *)write_data + i),
1877                                                FlashMem + 0x10);
1878                        }
1879
1880                        while (!(ioread32(FlashReg + intr_status) &
1881                                (INTR_STATUS0__PROGRAM_COMP |
1882                                INTR_STATUS0__PROGRAM_FAIL)))
1883                                ;
1884
1885                        if (ioread32(FlashReg + intr_status) &
1886                                INTR_STATUS0__PROGRAM_FAIL)
1887                                status = FAIL;
1888
1889                        iowrite32(ioread32(FlashReg + intr_status),
1890                                        FlashReg + intr_status);
1891
1892                        page_num++;
1893                        page_count--;
1894                        write_data += PageSize;
1895                }
1896
1897                iowrite32(0, FlashReg + TRANSFER_SPARE_REG);
1898        }
1899
1900        return status;
1901}
1902
1903u16 NAND_Read_Page_Main_Spare(u8 *read_data, u32 block, u16 page,
1904                                 u16 page_count)
1905{
1906        u32 status = PASS;
1907        u32 i, j;
1908        u64 flash_add = 0;
1909        u32 PageSize = DeviceInfo.wPageSize;
1910        u32 PageDataSize = DeviceInfo.wPageDataSize;
1911        u32 PageSpareSize = DeviceInfo.wPageSpareSize;
1912        u32 eccBytes = DeviceInfo.wECCBytesPerSector;
1913        u32 spareFlagBytes = DeviceInfo.wNumPageSpareFlag;
1914        u32 spareSkipBytes  = DeviceInfo.wSpareSkipBytes;
1915        u32 eccSectorSize;
1916        u32 flash_bank;
1917        u32 intr_status = 0;
1918        u8 *read_data_l = read_data;
1919        u32 intr_status_addresses[4] = {INTR_STATUS0,
1920                INTR_STATUS1, INTR_STATUS2, INTR_STATUS3};
1921        u8 *page_main_spare = buf_read_page_main_spare;
1922
1923        nand_dbg_print(NAND_DBG_WARN, "%s, Line %d, Function: %s\n",
1924                       __FILE__, __LINE__, __func__);
1925
1926        eccSectorSize = ECC_SECTOR_SIZE * (DeviceInfo.wDevicesConnected);
1927
1928        status = Boundary_Check_Block_Page(block, page, page_count);
1929
1930        flash_bank = block / (DeviceInfo.wTotalBlocks / totalUsedBanks);
1931
1932        if (status == PASS) {
1933                intr_status = intr_status_addresses[flash_bank];
1934
1935                iowrite32(1, FlashReg + TRANSFER_SPARE_REG);
1936
1937                iowrite32(ioread32(FlashReg + intr_status),
1938                                FlashReg + intr_status);
1939
1940                while ((status != FAIL) && (page_count > 0)) {
1941                        flash_add = (u64)(block %
1942                                (DeviceInfo.wTotalBlocks / totalUsedBanks))
1943                                * DeviceInfo.wBlockDataSize +
1944                                (u64)page * DeviceInfo.wPageDataSize;
1945
1946                        index_addr((u32)(MODE_10 | (flash_bank << 24) |
1947                                (flash_add >> DeviceInfo.nBitsInPageDataSize)),
1948                                0x43);
1949                        index_addr((u32)(MODE_10 | (flash_bank << 24) |
1950                                (flash_add >> DeviceInfo.nBitsInPageDataSize)),
1951                                0x2000 | page_count);
1952
1953                        while (!(ioread32(FlashReg + intr_status) &
1954                                INTR_STATUS0__LOAD_COMP))
1955                                ;
1956
1957                        iowrite32((u32)(MODE_01 | (flash_bank << 24) |
1958                                (flash_add >>
1959                                DeviceInfo.nBitsInPageDataSize)),
1960                                FlashMem);
1961
1962                        for (i = 0; i < PageSize / 4; i++)
1963                                *(((u32 *)page_main_spare) + i) =
1964                                        ioread32(FlashMem + 0x10);
1965
1966                        if (enable_ecc) {
1967                                for (i = PageDataSize;  i < PageSize -
1968                                                        spareSkipBytes; i++)
1969                                        page_main_spare[i] = page_main_spare[i +
1970                                                                spareSkipBytes];
1971
1972                                for (j = 0;
1973                                j < DeviceInfo.wPageDataSize / eccSectorSize;
1974                                j++) {
1975
1976                                        for (i = 0; i < eccSectorSize; i++)
1977                                                read_data_l[eccSectorSize * j +
1978                                                            i] =
1979                                                    page_main_spare[
1980                                                        (eccSectorSize +
1981                                                        eccBytes) * j + i];
1982
1983                                        for (i = 0; i < eccBytes; i++)
1984                                                read_data_l[PageDataSize +
1985                                                            spareFlagBytes +
1986                                                            eccBytes * j + i] =
1987                                                    page_main_spare[
1988                                                        (eccSectorSize +
1989                                                        eccBytes) * j +
1990                                                        eccSectorSize + i];
1991                                }
1992
1993                                for (i = 0; i < spareFlagBytes; i++)
1994                                        read_data_l[PageDataSize + i] =
1995                                            page_main_spare[(eccSectorSize +
1996                                                             eccBytes) * j + i];
1997                        } else {
1998                                for (i = 0; i < (PageDataSize + PageSpareSize);
1999                                     i++)
2000                                        read_data_l[i] = page_main_spare[i];
2001
2002                        }
2003
2004                        if (enable_ecc) {
2005                                while (!(ioread32(FlashReg + intr_status) &
2006                                        (INTR_STATUS0__ECC_TRANSACTION_DONE |
2007                                        INTR_STATUS0__ECC_ERR)))
2008                                        ;
2009
2010                                if (ioread32(FlashReg + intr_status) &
2011                                        INTR_STATUS0__ECC_ERR) {
2012                                        iowrite32(INTR_STATUS0__ECC_ERR,
2013                                                FlashReg + intr_status);
2014                                        status = do_ecc_new(flash_bank,
2015                                                read_data, block, page);
2016                                }
2017
2018                                if (ioread32(FlashReg + intr_status) &
2019                                        INTR_STATUS0__ECC_TRANSACTION_DONE &
2020                                        INTR_STATUS0__ECC_ERR) {
2021                                        iowrite32(INTR_STATUS0__ECC_ERR |
2022                                        INTR_STATUS0__ECC_TRANSACTION_DONE,
2023                                        FlashReg + intr_status);
2024                                } else if (ioread32(FlashReg + intr_status) &
2025                                        INTR_STATUS0__ECC_TRANSACTION_DONE) {
2026                                        iowrite32(
2027                                        INTR_STATUS0__ECC_TRANSACTION_DONE,
2028                                        FlashReg + intr_status);
2029                                } else if (ioread32(FlashReg + intr_status) &
2030                                        INTR_STATUS0__ECC_ERR) {
2031                                        iowrite32(INTR_STATUS0__ECC_ERR,
2032                                                FlashReg + intr_status);
2033                                }
2034                        }
2035
2036                        page++;
2037                        page_count--;
2038                        read_data_l += PageSize;
2039                }
2040        }
2041
2042        iowrite32(0, FlashReg + TRANSFER_SPARE_REG);
2043
2044        index_addr((u32)(MODE_10 | (flash_bank << 24) |
2045                (flash_add >> DeviceInfo.nBitsInPageDataSize)), 0x42);
2046
2047        return status;
2048}
2049
2050u16 NAND_Pipeline_Write_Ahead(u8 *write_data, u32 block,
2051                        u16 page, u16 page_count)
2052{
2053        u16 status = PASS;
2054        u32 NumPages = page_count;
2055        u64 flash_add;
2056        u32 flash_bank;
2057        u32 intr_status = 0;
2058        u32 intr_status_addresses[4] = {INTR_STATUS0,
2059                INTR_STATUS1, INTR_STATUS2, INTR_STATUS3};
2060        int ret;
2061
2062        nand_dbg_print(NAND_DBG_DEBUG, "%s, Line %d, Function: %s\n",
2063                       __FILE__, __LINE__, __func__);
2064
2065        status = Boundary_Check_Block_Page(block, page, page_count);
2066
2067        if (page_count < 2)
2068                status = FAIL;
2069
2070        if (status != PASS)
2071                return status;
2072
2073        flash_add = (u64)(block % (DeviceInfo.wTotalBlocks / totalUsedBanks))
2074                * DeviceInfo.wBlockDataSize +
2075                (u64)page * DeviceInfo.wPageDataSize;
2076
2077        flash_bank = block / (DeviceInfo.wTotalBlocks / totalUsedBanks);
2078
2079        intr_status = intr_status_addresses[flash_bank];
2080        iowrite32(ioread32(FlashReg + intr_status), FlashReg + intr_status);
2081
2082        iowrite32(1, FlashReg + DMA_ENABLE);
2083        while (!(ioread32(FlashReg + DMA_ENABLE) & DMA_ENABLE__FLAG))
2084                ;
2085
2086        iowrite32(0, FlashReg + TRANSFER_SPARE_REG);
2087
2088        /* Fill the mrst_nand_info structure */
2089        info.state = INT_PIPELINE_WRITE_AHEAD;
2090        info.write_data = write_data;
2091        info.flash_bank = flash_bank;
2092        info.block = block;
2093        info.page = page;
2094        info.ret = PASS;
2095
2096        index_addr((u32)(MODE_10 | (flash_bank << 24) |
2097                (flash_add >> DeviceInfo.nBitsInPageDataSize)), 0x42);
2098
2099        ddma_trans(write_data, flash_add, flash_bank, 1, NumPages);
2100
2101        iowrite32(1, FlashReg + GLOBAL_INT_ENABLE); /* Enable interrupt */
2102
2103        ret = wait_for_completion_timeout(&info.complete, 10 * HZ);
2104        if (!ret) {
2105                printk(KERN_ERR "Wait for completion timeout "
2106                        "in %s, Line %d\n", __FILE__, __LINE__);
2107                status = ERR;
2108        } else {
2109                status = info.ret;
2110        }
2111
2112        iowrite32(ioread32(FlashReg + intr_status), FlashReg + intr_status);
2113
2114        iowrite32(0, FlashReg + DMA_ENABLE);
2115        while ((ioread32(FlashReg + DMA_ENABLE) & DMA_ENABLE__FLAG))
2116                ;
2117
2118        return status;
2119}
2120
2121/* Un-tested function */
2122u16 NAND_Multiplane_Write(u8 *write_data, u32 block, u16 page,
2123                             u16 page_count)
2124{
2125        u16 status = PASS;
2126        u32 NumPages = page_count;
2127        u64 flash_add;
2128        u32 flash_bank;
2129        u32 intr_status = 0;
2130        u32 intr_status_addresses[4] = {INTR_STATUS0,
2131                INTR_STATUS1, INTR_STATUS2, INTR_STATUS3};
2132        u16 status2 = PASS;
2133        u32 t;
2134
2135        nand_dbg_print(NAND_DBG_WARN, "%s, Line %d, Function: %s\n",
2136                       __FILE__, __LINE__, __func__);
2137
2138        status = Boundary_Check_Block_Page(block, page, page_count);
2139        if (status != PASS)
2140                return status;
2141
2142        flash_add = (u64)(block % (DeviceInfo.wTotalBlocks / totalUsedBanks))
2143                * DeviceInfo.wBlockDataSize +
2144                (u64)page * DeviceInfo.wPageDataSize;
2145
2146        flash_bank = block / (DeviceInfo.wTotalBlocks / totalUsedBanks);
2147
2148        intr_status = intr_status_addresses[flash_bank];
2149        iowrite32(ioread32(FlashReg + intr_status), FlashReg + intr_status);
2150
2151        iowrite32(0, FlashReg + TRANSFER_SPARE_REG);
2152        iowrite32(0x01, FlashReg + MULTIPLANE_OPERATION);
2153
2154        iowrite32(1, FlashReg + DMA_ENABLE);
2155        while (!(ioread32(FlashReg + DMA_ENABLE) & DMA_ENABLE__FLAG))
2156                ;
2157
2158        iowrite32(0, FlashReg + TRANSFER_SPARE_REG);
2159
2160        index_addr((u32)(MODE_10 | (flash_bank << 24) |
2161                (flash_add >> DeviceInfo.nBitsInPageDataSize)), 0x42);
2162
2163        ddma_trans(write_data, flash_add, flash_bank, 1, NumPages);
2164
2165        while (1) {
2166                while (!ioread32(FlashReg + intr_status))
2167                        ;
2168
2169                if (ioread32(FlashReg + intr_status) &
2170                        INTR_STATUS0__DMA_CMD_COMP) {
2171                        iowrite32(INTR_STATUS0__DMA_CMD_COMP,
2172                                FlashReg + intr_status);
2173                        status = PASS;
2174                        if (status2 == FAIL)
2175                                status = FAIL;
2176                        break;
2177                } else if (ioread32(FlashReg + intr_status) &
2178                                INTR_STATUS0__PROGRAM_FAIL) {
2179                        status2 = FAIL;
2180                        status = FAIL;
2181                        t = ioread32(FlashReg + intr_status) &
2182                                INTR_STATUS0__PROGRAM_FAIL;
2183                        iowrite32(t, FlashReg + intr_status);
2184                } else {
2185                        iowrite32((~INTR_STATUS0__PROGRAM_FAIL) &
2186                                (~INTR_STATUS0__DMA_CMD_COMP),
2187                                FlashReg + intr_status);
2188                }
2189        }
2190
2191        iowrite32(ioread32(FlashReg + intr_status), FlashReg + intr_status);
2192
2193        iowrite32(0, FlashReg + DMA_ENABLE);
2194
2195        while ((ioread32(FlashReg + DMA_ENABLE) & DMA_ENABLE__FLAG))
2196                ;
2197
2198        iowrite32(0, FlashReg + MULTIPLANE_OPERATION);
2199
2200        return status;
2201}
2202
2203
2204#if CMD_DMA
2205static irqreturn_t cdma_isr(int irq, void *dev_id)
2206{
2207        struct mrst_nand_info *dev = dev_id;
2208        int first_failed_cmd;
2209
2210        nand_dbg_print(NAND_DBG_DEBUG, "%s, Line %d, Function: %s\n",
2211                       __FILE__, __LINE__, __func__);
2212
2213        if (!is_cdma_interrupt())
2214                return IRQ_NONE;
2215
2216        /* Disable controller interrupts */
2217        iowrite32(0, FlashReg + GLOBAL_INT_ENABLE);
2218        GLOB_FTL_Event_Status(&first_failed_cmd);
2219        complete(&dev->complete);
2220
2221        return IRQ_HANDLED;
2222}
2223#else
2224static void handle_nand_int_read(struct mrst_nand_info *dev)
2225{
2226        u32 intr_status_addresses[4] = {INTR_STATUS0,
2227                INTR_STATUS1, INTR_STATUS2, INTR_STATUS3};
2228        u32 intr_status;
2229        u32 ecc_done_OR_dma_comp = 0;
2230
2231        nand_dbg_print(NAND_DBG_DEBUG, "%s, Line %d, Function: %s\n",
2232                       __FILE__, __LINE__, __func__);
2233
2234        dev->ret = PASS;
2235        intr_status = intr_status_addresses[dev->flash_bank];
2236
2237        while (1) {
2238                if (enable_ecc) {
2239                        if (ioread32(FlashReg + intr_status) &
2240                                INTR_STATUS0__ECC_ERR) {
2241                                iowrite32(INTR_STATUS0__ECC_ERR,
2242                                        FlashReg + intr_status);
2243                                dev->ret = do_ecc_new(dev->flash_bank,
2244                                                dev->read_data,
2245                                                dev->block, dev->page);
2246                        } else if (ioread32(FlashReg + intr_status) &
2247                                INTR_STATUS0__DMA_CMD_COMP) {
2248                                iowrite32(INTR_STATUS0__DMA_CMD_COMP,
2249                                        FlashReg + intr_status);
2250                                if (1 == ecc_done_OR_dma_comp)
2251                                        break;
2252                                ecc_done_OR_dma_comp = 1;
2253                        } else if (ioread32(FlashReg + intr_status) &
2254                                INTR_STATUS0__ECC_TRANSACTION_DONE) {
2255                                iowrite32(INTR_STATUS0__ECC_TRANSACTION_DONE,
2256                                        FlashReg + intr_status);
2257                                if (1 == ecc_done_OR_dma_comp)
2258                                        break;
2259                                ecc_done_OR_dma_comp = 1;
2260                        }
2261                } else {
2262                        if (ioread32(FlashReg + intr_status) &
2263                                INTR_STATUS0__DMA_CMD_COMP) {
2264                                iowrite32(INTR_STATUS0__DMA_CMD_COMP,
2265                                        FlashReg + intr_status);
2266                                break;
2267                        } else {
2268                                printk(KERN_ERR "Illegal INTS "
2269                                        "(offset addr 0x%x) value: 0x%x\n",
2270                                        intr_status,
2271                                        ioread32(FlashReg + intr_status));
2272                        }
2273                }
2274
2275                iowrite32((~INTR_STATUS0__ECC_ERR) &
2276                (~INTR_STATUS0__ECC_TRANSACTION_DONE) &
2277                (~INTR_STATUS0__DMA_CMD_COMP),
2278                FlashReg + intr_status);
2279        }
2280}
2281
2282static void handle_nand_int_write(struct mrst_nand_info *dev)
2283{
2284        u32 intr_status;
2285        u32 intr[4] = {INTR_STATUS0, INTR_STATUS1,
2286                INTR_STATUS2, INTR_STATUS3};
2287        int status = PASS;
2288
2289        nand_dbg_print(NAND_DBG_DEBUG, "%s, Line %d, Function: %s\n",
2290                       __FILE__, __LINE__, __func__);
2291
2292        dev->ret = PASS;
2293        intr_status = intr[dev->flash_bank];
2294
2295        while (1) {
2296                while (!ioread32(FlashReg + intr_status))
2297                        ;
2298
2299                if (ioread32(FlashReg + intr_status) &
2300                        INTR_STATUS0__DMA_CMD_COMP) {
2301                        iowrite32(INTR_STATUS0__DMA_CMD_COMP,
2302                                FlashReg + intr_status);
2303                        if (FAIL == status)
2304                                dev->ret = FAIL;
2305                        break;
2306                } else if (ioread32(FlashReg + intr_status) &
2307                        INTR_STATUS0__PROGRAM_FAIL) {
2308                        status = FAIL;
2309                        iowrite32(INTR_STATUS0__PROGRAM_FAIL,
2310                                FlashReg + intr_status);
2311                } else {
2312                        iowrite32((~INTR_STATUS0__PROGRAM_FAIL) &
2313                                (~INTR_STATUS0__DMA_CMD_COMP),
2314                                FlashReg + intr_status);
2315                }
2316        }
2317}
2318
2319static irqreturn_t ddma_isr(int irq, void *dev_id)
2320{
2321        struct mrst_nand_info *dev = dev_id;
2322        u32 int_mask, ints0, ints1, ints2, ints3, ints_offset;
2323        u32 intr[4] = {INTR_STATUS0, INTR_STATUS1,
2324                INTR_STATUS2, INTR_STATUS3};
2325
2326        int_mask = INTR_STATUS0__DMA_CMD_COMP |
2327                INTR_STATUS0__ECC_TRANSACTION_DONE |
2328                INTR_STATUS0__ECC_ERR |
2329                INTR_STATUS0__PROGRAM_FAIL |
2330                INTR_STATUS0__ERASE_FAIL;
2331
2332        ints0 = ioread32(FlashReg + INTR_STATUS0);
2333        ints1 = ioread32(FlashReg + INTR_STATUS1);
2334        ints2 = ioread32(FlashReg + INTR_STATUS2);
2335        ints3 = ioread32(FlashReg + INTR_STATUS3);
2336
2337        ints_offset = intr[dev->flash_bank];
2338
2339        nand_dbg_print(NAND_DBG_DEBUG,
2340                "INTR0: 0x%x, INTR1: 0x%x, INTR2: 0x%x, INTR3: 0x%x, "
2341                "DMA_INTR: 0x%x, "
2342                "dev->state: 0x%x, dev->flash_bank: %d\n",
2343                ints0, ints1, ints2, ints3,
2344                ioread32(FlashReg + DMA_INTR),
2345                dev->state, dev->flash_bank);
2346
2347        if (!(ioread32(FlashReg + ints_offset) & int_mask)) {
2348                iowrite32(ints0, FlashReg + INTR_STATUS0);
2349                iowrite32(ints1, FlashReg + INTR_STATUS1);
2350                iowrite32(ints2, FlashReg + INTR_STATUS2);
2351                iowrite32(ints3, FlashReg + INTR_STATUS3);
2352                nand_dbg_print(NAND_DBG_WARN,
2353                        "ddma_isr: Invalid interrupt for NAND controller. "
2354                        "Ignore it\n");
2355                return IRQ_NONE;
2356        }
2357
2358        switch (dev->state) {
2359        case INT_READ_PAGE_MAIN:
2360        case INT_PIPELINE_READ_AHEAD:
2361                /* Disable controller interrupts */
2362                iowrite32(0, FlashReg + GLOBAL_INT_ENABLE);
2363                handle_nand_int_read(dev);
2364                break;
2365        case INT_WRITE_PAGE_MAIN:
2366        case INT_PIPELINE_WRITE_AHEAD:
2367                iowrite32(0, FlashReg + GLOBAL_INT_ENABLE);
2368                handle_nand_int_write(dev);
2369                break;
2370        default:
2371                printk(KERN_ERR "ddma_isr - Illegal state: 0x%x\n",
2372                        dev->state);
2373                return IRQ_NONE;
2374        }
2375
2376        dev->state = INT_IDLE_STATE;
2377        complete(&dev->complete);
2378        return IRQ_HANDLED;
2379}
2380#endif
2381
2382static const struct pci_device_id nand_pci_ids[] = {
2383        {
2384         .vendor = 0x8086,
2385         .device = 0x0809,
2386         .subvendor = PCI_ANY_ID,
2387         .subdevice = PCI_ANY_ID,
2388         },
2389        { /* end: all zeroes */ }
2390};
2391
2392static int nand_pci_probe(struct pci_dev *dev, const struct pci_device_id *id)
2393{
2394        int ret = -ENODEV;
2395        unsigned long csr_base;
2396        unsigned long csr_len;
2397        struct mrst_nand_info *pndev = &info;
2398        u32 int_mask;
2399
2400        ret = pci_enable_device(dev);
2401        if (ret) {
2402                printk(KERN_ERR "Spectra: pci_enable_device failed.\n");
2403                return ret;
2404        }
2405
2406        nand_dbg_print(NAND_DBG_WARN, "%s, Line %d, Function: %s\n",
2407                       __FILE__, __LINE__, __func__);
2408
2409        FlashReg = ioremap_nocache(GLOB_HWCTL_REG_BASE,
2410                        GLOB_HWCTL_REG_SIZE);
2411        if (!FlashReg) {
2412                printk(KERN_ERR "Spectra: ioremap_nocache failed!");
2413                goto failed_disable;
2414        }
2415        nand_dbg_print(NAND_DBG_WARN,
2416                "Spectra: Remapped reg base address: "
2417                "0x%p, len: %d\n",
2418                FlashReg, GLOB_HWCTL_REG_SIZE);
2419
2420        FlashMem = ioremap_nocache(GLOB_HWCTL_MEM_BASE,
2421                        GLOB_HWCTL_MEM_SIZE);
2422        if (!FlashMem) {
2423                printk(KERN_ERR "Spectra: ioremap_nocache failed!");
2424                iounmap(FlashReg);
2425                goto failed_disable;
2426        }
2427        nand_dbg_print(NAND_DBG_WARN,
2428                "Spectra: Remapped flash base address: "
2429                "0x%p, len: %d\n",
2430                (void *)FlashMem, GLOB_HWCTL_MEM_SIZE);
2431
2432        nand_dbg_print(NAND_DBG_DEBUG, "Dump timing register values:"
2433                        "acc_clks: %d, re_2_we: %d, we_2_re: %d,"
2434                        "addr_2_data: %d, rdwr_en_lo_cnt: %d, "
2435                        "rdwr_en_hi_cnt: %d, cs_setup_cnt: %d\n",
2436                        ioread32(FlashReg + ACC_CLKS),
2437                        ioread32(FlashReg + RE_2_WE),
2438                        ioread32(FlashReg + WE_2_RE),
2439                        ioread32(FlashReg + ADDR_2_DATA),
2440                        ioread32(FlashReg + RDWR_EN_LO_CNT),
2441                        ioread32(FlashReg + RDWR_EN_HI_CNT),
2442                        ioread32(FlashReg + CS_SETUP_CNT));
2443
2444        NAND_Flash_Reset();
2445
2446        iowrite32(0, FlashReg + GLOBAL_INT_ENABLE);
2447
2448#if CMD_DMA
2449        info.pcmds_num = 0;
2450        info.flash_bank = 0;
2451        info.cdma_num = 0;
2452        int_mask = (DMA_INTR__DESC_COMP_CHANNEL0 |
2453                DMA_INTR__DESC_COMP_CHANNEL1 |
2454                DMA_INTR__DESC_COMP_CHANNEL2 |
2455                DMA_INTR__DESC_COMP_CHANNEL3 |
2456                DMA_INTR__MEMCOPY_DESC_COMP);
2457        iowrite32(int_mask, FlashReg + DMA_INTR_EN);
2458        iowrite32(0xFFFF, FlashReg + DMA_INTR);
2459
2460        int_mask = (INTR_STATUS0__ECC_ERR |
2461                INTR_STATUS0__PROGRAM_FAIL |
2462                INTR_STATUS0__ERASE_FAIL);
2463#else
2464        int_mask = INTR_STATUS0__DMA_CMD_COMP |
2465                INTR_STATUS0__ECC_TRANSACTION_DONE |
2466                INTR_STATUS0__ECC_ERR |
2467                INTR_STATUS0__PROGRAM_FAIL |
2468                INTR_STATUS0__ERASE_FAIL;
2469#endif
2470        iowrite32(int_mask, FlashReg + INTR_EN0);
2471        iowrite32(int_mask, FlashReg + INTR_EN1);
2472        iowrite32(int_mask, FlashReg + INTR_EN2);
2473        iowrite32(int_mask, FlashReg + INTR_EN3);
2474
2475        /* Clear all status bits */
2476        iowrite32(0xFFFF, FlashReg + INTR_STATUS0);
2477        iowrite32(0xFFFF, FlashReg + INTR_STATUS1);
2478        iowrite32(0xFFFF, FlashReg + INTR_STATUS2);
2479        iowrite32(0xFFFF, FlashReg + INTR_STATUS3);
2480
2481        iowrite32(0x0F, FlashReg + RB_PIN_ENABLED);
2482        iowrite32(CHIP_EN_DONT_CARE__FLAG, FlashReg + CHIP_ENABLE_DONT_CARE);
2483
2484        /* Should set value for these registers when init */
2485        iowrite32(0, FlashReg + TWO_ROW_ADDR_CYCLES);
2486        iowrite32(1, FlashReg + ECC_ENABLE);
2487        enable_ecc = 1;
2488
2489        pci_set_master(dev);
2490        pndev->dev = dev;
2491
2492        csr_base = pci_resource_start(dev, 0);
2493        if (!csr_base) {
2494                printk(KERN_ERR "Spectra: pci_resource_start failed!\n");
2495                ret = -ENODEV;
2496                goto failed_req_csr;
2497        }
2498
2499        csr_len = pci_resource_len(dev, 0);
2500        if (!csr_len) {
2501                printk(KERN_ERR "Spectra: pci_resource_len failed!\n");
2502                ret = -ENODEV;
2503                goto failed_req_csr;
2504        }
2505
2506        ret = pci_request_regions(dev, SPECTRA_NAND_NAME);
2507        if (ret) {
2508                printk(KERN_ERR "Spectra: Unable to request "
2509                       "memory region\n");
2510                goto failed_req_csr;
2511        }
2512
2513        pndev->ioaddr = ioremap_nocache(csr_base, csr_len);
2514        if (!pndev->ioaddr) {
2515                printk(KERN_ERR "Spectra: Unable to remap memory region\n");
2516                ret = -ENOMEM;
2517                goto failed_remap_csr;
2518        }
2519        nand_dbg_print(NAND_DBG_DEBUG, "Spectra: CSR 0x%08lx -> 0x%p (0x%lx)\n",
2520                       csr_base, pndev->ioaddr, csr_len);
2521
2522        init_completion(&pndev->complete);
2523        nand_dbg_print(NAND_DBG_DEBUG, "Spectra: IRQ %d\n", dev->irq);
2524
2525#if CMD_DMA
2526        if (request_irq(dev->irq, cdma_isr, IRQF_SHARED,
2527                        SPECTRA_NAND_NAME, &info)) {
2528                printk(KERN_ERR "Spectra: Unable to allocate IRQ\n");
2529                ret = -ENODEV;
2530                iounmap(pndev->ioaddr);
2531                goto failed_remap_csr;
2532        }
2533#else
2534        if (request_irq(dev->irq, ddma_isr, IRQF_SHARED,
2535                        SPECTRA_NAND_NAME, &info)) {
2536                printk(KERN_ERR "Spectra: Unable to allocate IRQ\n");
2537                ret = -ENODEV;
2538                iounmap(pndev->ioaddr);
2539                goto failed_remap_csr;
2540        }
2541#endif
2542
2543        pci_set_drvdata(dev, pndev);
2544
2545        ret = GLOB_LLD_Read_Device_ID();
2546        if (ret) {
2547                iounmap(pndev->ioaddr);
2548                goto failed_remap_csr;
2549        }
2550
2551        ret = register_spectra_ftl();
2552        if (ret) {
2553                iounmap(pndev->ioaddr);
2554                goto failed_remap_csr;
2555        }
2556
2557        return 0;
2558
2559failed_remap_csr:
2560        pci_release_regions(dev);
2561failed_req_csr:
2562        iounmap(FlashMem);
2563        iounmap(FlashReg);
2564failed_disable:
2565        pci_disable_device(dev);
2566
2567        return ret;
2568}
2569
2570static void nand_pci_remove(struct pci_dev *dev)
2571{
2572        struct mrst_nand_info *pndev = pci_get_drvdata(dev);
2573
2574        nand_dbg_print(NAND_DBG_WARN, "%s, Line %d, Function: %s\n",
2575                       __FILE__, __LINE__, __func__);
2576
2577#if CMD_DMA
2578        free_irq(dev->irq, pndev);
2579#endif
2580        iounmap(pndev->ioaddr);
2581        pci_release_regions(dev);
2582        pci_disable_device(dev);
2583}
2584
2585MODULE_DEVICE_TABLE(pci, nand_pci_ids);
2586
2587static struct pci_driver nand_pci_driver = {
2588        .name = SPECTRA_NAND_NAME,
2589        .id_table = nand_pci_ids,
2590        .probe = nand_pci_probe,
2591        .remove = nand_pci_remove,
2592};
2593
2594int NAND_Flash_Init(void)
2595{
2596        int retval;
2597
2598        nand_dbg_print(NAND_DBG_TRACE, "%s, Line %d, Function: %s\n",
2599                       __FILE__, __LINE__, __func__);
2600
2601        retval = pci_register_driver(&nand_pci_driver);
2602        if (retval)
2603                return -ENOMEM;
2604
2605        return PASS;
2606}
2607
2608/* Free memory */
2609int nand_release_spectra(void)
2610{
2611        pci_unregister_driver(&nand_pci_driver);
2612        iounmap(FlashMem);
2613        iounmap(FlashReg);
2614
2615        return 0;
2616}
2617
2618
2619
2620