Commit 9719e19a authored by Joanna Farley's avatar Joanna Farley Committed by TrustedFirmware Code Review
Browse files

Merge changes I500ddbe9,I9c10dac9,I53bfff85,I06f7594d,I24bff8d4, ... into integration

* changes:
  nxp lx2160a-aqds: new plat based on soc lx2160a
  NXP lx2160a-rdb: new plat based on SoC lx2160a
  nxp lx2162aqds: new plat based on soc lx2160a
  nxp: errata handling at soc level for lx2160a
  nxp: make file for loading additional ddr image
  nxp: adding support of soc lx2160a
  nxp: deflt hdr files for soc & their platforms
  nxp: platform files for bl2 and bl31 setup
  nxp: warm reset support to retain ddr content
  nxp: nv storage api on platforms
  nxp: supports two mode of trusted board boot
  nxp: fip-handler for additional fip_fuse.bin
  nxp: fip-handler for additional ddr-fip.bin
  nxp: image loader for loading fip image
  nxp: svp & sip smc handling
  nxp: psci platform functions used by lib/psci
  nxp: helper function used by plat & common code
  nxp: add data handler used by bl31
  nxp: adding the driver.mk file
  nxp-tool: for creating pbl file from bl2
  nxp: adding the smmu driver
  nxp: cot using nxp internal and mbedtls
  nxp:driver for crypto h/w accelerator caam
  nxp:add driver support for sd and emmc
  nxp:add qspi driver
  nxp: add flexspi driver support
  nxp: adding gic apis for nxp soc
  nxp: gpio driver support
  nxp: added csu driver
  nxp: driver pmu for nxp soc
  nxp: ddr driver enablement for nxp layerscape soc
  nxp: i2c driver support.
  NXP: Driver for NXP Security Monitor
  NXP: SFP driver support for NXP SoC
  NXP: Interconnect API based on ARM CCN-CCI driver
  NXP: TZC API to configure ddr region
  NXP: Timer API added to enable ARM generic timer
  nxp: add dcfg driver
  nxp:add console driver for nxp platform
  tools: add mechanism to allow platform specific image UUID
  tbbr-cot: conditional definition for the macro
  tbbr-cot: fix the issue of compiling time define
  cert_create: updated tool for platform defined certs, keys & extensions
  tbbr-tools: enable override TRUSTED_KEY_CERT
parents b59444ea f359a382
/*
* Copyright 2020 NXP
*
* SPDX-License-Identifier: BSD-3-Clause
*
*/
#include <common/debug.h>
#include "dcfg.h"
#include <lib/mmio.h>
#ifdef NXP_SFP_ENABLED
#include <sfp.h>
#endif
static soc_info_t soc_info = {0};
static devdisr5_info_t devdisr5_info = {0};
static dcfg_init_info_t *dcfg_init_info;
/* Read the PORSR1 register */
uint32_t read_reg_porsr1(void)
{
unsigned int *porsr1_addr = NULL;
if (dcfg_init_info->porsr1 != 0U) {
return dcfg_init_info->porsr1;
}
porsr1_addr = (void *)
(dcfg_init_info->g_nxp_dcfg_addr + DCFG_PORSR1_OFFSET);
dcfg_init_info->porsr1 = gur_in32(porsr1_addr);
return dcfg_init_info->porsr1;
}
const soc_info_t *get_soc_info(void)
{
uint32_t reg;
if (soc_info.is_populated == true) {
return (const soc_info_t *) &soc_info;
}
reg = gur_in32(dcfg_init_info->g_nxp_dcfg_addr + DCFG_SVR_OFFSET);
soc_info.mfr_id = (reg & SVR_MFR_ID_MASK) >> SVR_MFR_ID_SHIFT;
#if defined(CONFIG_CHASSIS_3_2)
soc_info.family = (reg & SVR_FAMILY_MASK) >> SVR_FAMILY_SHIFT;
soc_info.dev_id = (reg & SVR_DEV_ID_MASK) >> SVR_DEV_ID_SHIFT;
#endif
/* zero means SEC enabled. */
soc_info.sec_enabled =
(((reg & SVR_SEC_MASK) >> SVR_SEC_SHIFT) == 0) ? true : false;
soc_info.personality = (reg & SVR_PERSONALITY_MASK)
>> SVR_PERSONALITY_SHIFT;
soc_info.maj_ver = (reg & SVR_MAJ_VER_MASK) >> SVR_MAJ_VER_SHIFT;
soc_info.min_ver = reg & SVR_MIN_VER_MASK;
soc_info.is_populated = true;
return (const soc_info_t *) &soc_info;
}
void dcfg_init(dcfg_init_info_t *dcfg_init_data)
{
dcfg_init_info = dcfg_init_data;
read_reg_porsr1();
get_soc_info();
}
bool is_sec_enabled(void)
{
return soc_info.sec_enabled;
}
const devdisr5_info_t *get_devdisr5_info(void)
{
uint32_t reg;
if (devdisr5_info.is_populated == true)
return (const devdisr5_info_t *) &devdisr5_info;
reg = gur_in32(dcfg_init_info->g_nxp_dcfg_addr + DCFG_DEVDISR5_OFFSET);
#if defined(CONFIG_CHASSIS_3_2)
devdisr5_info.ddrc1_present = (reg & DISR5_DDRC1_MASK) ? 0 : 1;
devdisr5_info.ddrc2_present = (reg & DISR5_DDRC2_MASK) ? 0 : 1;
devdisr5_info.ocram_present = (reg & DISR5_OCRAM_MASK) ? 0 : 1;
#elif defined(CONFIG_CHASSIS_2)
devdisr5_info.ddrc1_present = (reg & DISR5_DDRC1_MASK) ? 0 : 1;
devdisr5_info.ocram_present = (reg & DISR5_OCRAM_MASK) ? 0 : 1;
#endif
devdisr5_info.is_populated = true;
return (const devdisr5_info_t *) &devdisr5_info;
}
int get_clocks(struct sysinfo *sys)
{
unsigned int *rcwsr0 = NULL;
const unsigned long sysclk = dcfg_init_info->nxp_sysclk_freq;
const unsigned long ddrclk = dcfg_init_info->nxp_ddrclk_freq;
rcwsr0 = (void *)(dcfg_init_info->g_nxp_dcfg_addr + RCWSR0_OFFSET);
sys->freq_platform = sysclk;
sys->freq_ddr_pll0 = ddrclk;
sys->freq_ddr_pll1 = ddrclk;
sys->freq_platform *= (gur_in32(rcwsr0) >>
RCWSR0_SYS_PLL_RAT_SHIFT) &
RCWSR0_SYS_PLL_RAT_MASK;
sys->freq_platform /= dcfg_init_info->nxp_plat_clk_divider;
sys->freq_ddr_pll0 *= (gur_in32(rcwsr0) >>
RCWSR0_MEM_PLL_RAT_SHIFT) &
RCWSR0_MEM_PLL_RAT_MASK;
sys->freq_ddr_pll1 *= (gur_in32(rcwsr0) >>
RCWSR0_MEM2_PLL_RAT_SHIFT) &
RCWSR0_MEM2_PLL_RAT_MASK;
if (sys->freq_platform == 0) {
return 1;
} else {
return 0;
}
}
#ifdef NXP_SFP_ENABLED
/*******************************************************************************
* Returns true if secur eboot is enabled on board
* mode = 0 (development mode - sb_en = 1)
* mode = 1 (production mode - ITS = 1)
******************************************************************************/
bool check_boot_mode_secure(uint32_t *mode)
{
uint32_t val = 0U;
uint32_t *rcwsr = NULL;
*mode = 0U;
if (sfp_check_its() == 1) {
/* ITS =1 , Production mode */
*mode = 1U;
return true;
}
rcwsr = (void *)(dcfg_init_info->g_nxp_dcfg_addr + RCWSR_SB_EN_OFFSET);
val = (gur_in32(rcwsr) >> RCWSR_SBEN_SHIFT) &
RCWSR_SBEN_MASK;
if (val == RCWSR_SBEN_MASK) {
*mode = 0U;
return true;
}
return false;
}
#endif
void error_handler(int error_code)
{
/* Dump error code in SCRATCH4 register */
INFO("Error in Fuse Provisioning: %x\n", error_code);
gur_out32((void *)
(dcfg_init_info->g_nxp_dcfg_addr + DCFG_SCRATCH4_OFFSET),
error_code);
}
/*
* Copyright 2018-2020 NXP
*
* SPDX-License-Identifier: BSD-3-Clause
*
*/
#ifndef DCFG_H
#define DCFG_H
#include <endian.h>
#if defined(CONFIG_CHASSIS_2)
#include <dcfg_lsch2.h>
#elif defined(CONFIG_CHASSIS_3_2)
#include <dcfg_lsch3.h>
#endif
#ifdef NXP_GUR_BE
#define gur_in32(a) bswap32(mmio_read_32((uintptr_t)(a)))
#define gur_out32(a, v) mmio_write_32((uintptr_t)(a), bswap32(v))
#elif defined(NXP_GUR_LE)
#define gur_in32(a) mmio_read_32((uintptr_t)(a))
#define gur_out32(a, v) mmio_write_32((uintptr_t)(a), v)
#else
#error Please define CCSR GUR register endianness
#endif
typedef struct {
bool is_populated;
uint8_t mfr_id;
#if defined(CONFIG_CHASSIS_3_2)
uint8_t family;
uint8_t dev_id;
#endif
uint8_t personality;
bool sec_enabled;
uint8_t maj_ver;
uint8_t min_ver;
} soc_info_t;
typedef struct {
bool is_populated;
uint8_t ocram_present;
uint8_t ddrc1_present;
#if defined(CONFIG_CHASSIS_3_2)
uint8_t ddrc2_present;
#endif
} devdisr5_info_t;
typedef struct {
uint32_t porsr1;
uintptr_t g_nxp_dcfg_addr;
unsigned long nxp_sysclk_freq;
unsigned long nxp_ddrclk_freq;
unsigned int nxp_plat_clk_divider;
} dcfg_init_info_t;
struct sysinfo {
unsigned long freq_platform;
unsigned long freq_ddr_pll0;
unsigned long freq_ddr_pll1;
};
int get_clocks(struct sysinfo *sys);
/* Read the PORSR1 register */
uint32_t read_reg_porsr1(void);
/*******************************************************************************
* Returns true if secur eboot is enabled on board
* mode = 0 (development mode - sb_en = 1)
* mode = 1 (production mode - ITS = 1)
******************************************************************************/
bool check_boot_mode_secure(uint32_t *mode);
const soc_info_t *get_soc_info();
const devdisr5_info_t *get_devdisr5_info();
void dcfg_init(dcfg_init_info_t *dcfg_init_data);
bool is_sec_enabled(void);
void error_handler(int error_code);
#endif /* DCFG_H */
#
# Copyright 2020 NXP
#
# SPDX-License-Identifier: BSD-3-Clause
#
ifeq (${ADD_DCFG},)
ADD_DCFG := 1
DCFG_DRIVERS_PATH := ${PLAT_DRIVERS_PATH}/dcfg
PLAT_INCLUDES += -I$(DCFG_DRIVERS_PATH)
DCFG_SOURCES += $(DCFG_DRIVERS_PATH)/dcfg.c
ifeq (${BL_COMM_DCFG_NEEDED},yes)
BL_COMMON_SOURCES += ${DCFG_SOURCES}
else
ifeq (${BL2_DCFG_NEEDED},yes)
BL2_SOURCES += ${DCFG_SOURCES}
endif
ifeq (${BL31_DCFG_NEEDED},yes)
BL31_SOURCES += ${DCFG_SOURCES}
endif
endif
endif
/*
* Copyright 2020 NXP
*
* SPDX-License-Identifier: BSD-3-Clause
*
*/
#ifndef DCFG_LSCH2_H
#define DCFG_LSCH2_H
/* dcfg block register offsets and bitfields */
#define DCFG_PORSR1_OFFSET 0x00
#define DCFG_DEVDISR1_OFFSET 0x070
#define DCFG_DEVDISR4_OFFSET 0x07C
#define DCFG_DEVDISR5_OFFSET 0x080
#define DCFG_COREDISR_OFFSET 0x094
#define RCWSR0_OFFSET 0x100
#define RCWSR5_OFFSET 0x118
#define DCFG_BOOTLOCPTRL_OFFSET 0x400
#define DCFG_BOOTLOCPTRH_OFFSET 0x404
#define DCFG_COREDISABLEDSR_OFFSET 0x990
#define DCFG_SCRATCH4_OFFSET 0x20C
#define DCFG_SVR_OFFSET 0x0A4
#define DCFG_BRR_OFFSET 0x0E4
#define DCFG_RSTCR_OFFSET 0x0B0
#define RSTCR_RESET_REQ 0x2
#define DCFG_RSTRQSR1_OFFSET 0x0C8
#define DCFG_RSTRQMR1_OFFSET 0x0C0
/* DCFG DCSR Macros */
#define DCFG_DCSR_PORCR1_OFFSET 0x0
#define SVR_MFR_ID_MASK 0xF0000000
#define SVR_MFR_ID_SHIFT 28
#define SVR_FAMILY_MASK 0xF000000
#define SVR_FAMILY_SHIFT 24
#define SVR_DEV_ID_MASK 0x3F0000
#define SVR_DEV_ID_SHIFT 16
#define SVR_PERSONALITY_MASK 0x3E00
#define SVR_PERSONALITY_SHIFT 9
#define SVR_SEC_MASK 0x100
#define SVR_SEC_SHIFT 8
#define SVR_MAJ_VER_MASK 0xF0
#define SVR_MAJ_VER_SHIFT 4
#define SVR_MIN_VER_MASK 0xF
#define DISR5_DDRC1_MASK 0x1
#define DISR5_OCRAM_MASK 0x40
/* DCFG regsiters bit masks */
#define RCWSR0_SYS_PLL_RAT_SHIFT 25
#define RCWSR0_SYS_PLL_RAT_MASK 0x1f
#define RCWSR0_MEM_PLL_RAT_SHIFT 16
#define RCWSR0_MEM_PLL_RAT_MASK 0x3f
#define RCWSR0_MEM2_PLL_RAT_SHIFT 18
#define RCWSR0_MEM2_PLL_RAT_MASK 0x3f
#define RCWSR_SB_EN_OFFSET RCWSR5_OFFSET
#define RCWSR_SBEN_MASK 0x1
#define RCWSR_SBEN_SHIFT 21
/* RCW SRC NAND */
#define RCW_SRC_NAND_MASK (0x100)
#define RCW_SRC_NAND_VAL (0x100)
#define NAND_RESERVED_MASK (0xFC)
#define NAND_RESERVED_1 (0x0)
#define NAND_RESERVED_2 (0x80)
/* RCW SRC NOR */
#define RCW_SRC_NOR_MASK (0x1F0)
#define NOR_8B_VAL (0x10)
#define NOR_16B_VAL (0x20)
#define SD_VAL (0x40)
#define QSPI_VAL1 (0x44)
#define QSPI_VAL2 (0x45)
#endif /* DCFG_LSCH2_H */
/*
* Copyright 2020 NXP
*
* SPDX-License-Identifier: BSD-3-Clause
*
*/
#ifndef DCFG_LSCH3_H
#define DCFG_LSCH3_H
/* dcfg block register offsets and bitfields */
#define DCFG_PORSR1_OFFSET 0x00
#define DCFG_DEVDISR1_OFFSET 0x70
#define DCFG_DEVDISR1_SEC (1 << 22)
#define DCFG_DEVDISR2_OFFSET 0x74
#define DCFG_DEVDISR3_OFFSET 0x78
#define DCFG_DEVDISR3_QBMAIN (1 << 12)
#define DCFG_DEVDISR4_OFFSET 0x7C
#define DCFG_DEVDISR4_SPI_QSPI (1 << 4 | 1 << 5)
#define DCFG_DEVDISR5_OFFSET 0x80
#define DISR5_DDRC1_MASK 0x1
#define DISR5_DDRC2_MASK 0x2
#define DISR5_OCRAM_MASK 0x1000
#define DEVDISR5_MASK_ALL_MEM 0x00001003
#define DEVDISR5_MASK_DDR 0x00000003
#define DEVDISR5_MASK_DBG 0x00000400
#define DCFG_DEVDISR6_OFFSET 0x84
//#define DEVDISR6_MASK 0x00000001
#define DCFG_COREDISR_OFFSET 0x94
#define DCFG_SVR_OFFSET 0x0A4
#define SVR_MFR_ID_MASK 0xF0000000
#define SVR_MFR_ID_SHIFT 28
#define SVR_FAMILY_MASK 0xF000000
#define SVR_FAMILY_SHIFT 24
#define SVR_DEV_ID_MASK 0x3F0000
#define SVR_DEV_ID_SHIFT 16
#define SVR_PERSONALITY_MASK 0x3E00
#define SVR_PERSONALITY_SHIFT 9
#define SVR_SEC_MASK 0x100
#define SVR_SEC_SHIFT 8
#define SVR_MAJ_VER_MASK 0xF0
#define SVR_MAJ_VER_SHIFT 4
#define SVR_MIN_VER_MASK 0xF
#define RCWSR0_OFFSET 0x100
#define RCWSR0_SYS_PLL_RAT_SHIFT 2
#define RCWSR0_SYS_PLL_RAT_MASK 0x1f
#define RCWSR0_MEM_PLL_RAT_SHIFT 10
#define RCWSR0_MEM_PLL_RAT_MASK 0x3f
#define RCWSR0_MEM2_PLL_RAT_SHIFT 18
#define RCWSR0_MEM2_PLL_RAT_MASK 0x3f
#define RCWSR5_OFFSET 0x110
#define RCWSR9_OFFSET 0x120
#define RCWSR_SB_EN_OFFSET RCWSR9_OFFSET
#define RCWSR_SBEN_MASK 0x1
#define RCWSR_SBEN_SHIFT 10
#define RCW_SR27_OFFSET 0x168
/* DCFG register to dump error code */
#define DCFG_SCRATCH4_OFFSET 0x20C
#define DCFG_SCRATCHRW5_OFFSET 0x210
#define DCFG_SCRATCHRW6_OFFSET 0x214
#define DCFG_SCRATCHRW7_OFFSET 0x218
#define DCFG_BOOTLOCPTRL_OFFSET 0x400
#define DCFG_BOOTLOCPTRH_OFFSET 0x404
#define DCFG_COREDISABLEDSR_OFFSET 0x990
#endif /* DCFG_LSCH3_H */
/*
* Copyright 2020 NXP
*
* SPDX-License-Identifier: BSD-3-Clause
*
*/
#ifndef SCFG_H
#define SCFG_H
#ifdef CONFIG_CHASSIS_2
/* SCFG register offsets */
#define SCFG_CORE0_SFT_RST_OFFSET 0x0130
#define SCFG_SNPCNFGCR_OFFSET 0x01A4
#define SCFG_CORESRENCR_OFFSET 0x0204
#define SCFG_RVBAR0_0_OFFSET 0x0220
#define SCFG_RVBAR0_1_OFFSET 0x0224
#define SCFG_COREBCR_OFFSET 0x0680
#define SCFG_RETREQCR_OFFSET 0x0424
#define SCFG_COREPMCR_OFFSET 0x042C
#define COREPMCR_WFIL2 0x1
#define SCFG_GIC400_ADDR_ALIGN_OFFSET 0x0188
#define SCFG_BOOTLOCPTRH_OFFSET 0x0600
#define SCFG_BOOTLOCPTRL_OFFSET 0x0604
#define SCFG_SCRATCHRW2_OFFSET 0x0608
#define SCFG_SCRATCHRW3_OFFSET 0x060C
/* SCFG bit fields */
#define SCFG_SNPCNFGCR_SECRDSNP 0x80000000
#define SCFG_SNPCNFGCR_SECWRSNP 0x40000000
#endif /* CONFIG_CHASSIS_2 */
#ifndef __ASSEMBLER__
#include <endian.h>
#include <lib/mmio.h>
#ifdef NXP_SCFG_BE
#define scfg_in32(a) bswap32(mmio_read_32((uintptr_t)(a)))
#define scfg_out32(a, v) mmio_write_32((uintptr_t)(a), bswap32(v))
#define scfg_setbits32(a, v) mmio_setbits_32((uintptr_t)(a), v)
#define scfg_clrbits32(a, v) mmio_clrbits_32((uintptr_t)(a), v)
#define scfg_clrsetbits32(a, clear, set) \
mmio_clrsetbits_32((uintptr_t)(a), clear, set)
#elif defined(NXP_GUR_LE)
#define scfg_in32(a) mmio_read_32((uintptr_t)(a))
#define scfg_out32(a, v) mmio_write_32((uintptr_t)(a), v)
#define scfg_setbits32(a, v) mmio_setbits_32((uintptr_t)(a), v)
#define scfg_clrbits32(a, v) mmio_clrbits_32((uintptr_t)(a), v)
#define scfg_clrsetbits32(a, clear, set) \
mmio_clrsetbits_32((uintptr_t)(a), clear, set)
#else
#error Please define CCSR SCFG register endianness
#endif
#endif /* __ASSEMBLER__ */
#endif /* SCFG_H */
#
# Copyright 2021 NXP
#
# SPDX-License-Identifier: BSD-3-Clause
#
#-----------------------------------------------------------------------------
# MMDC ddr cntlr driver files
DDR_DRIVERS_PATH := drivers/nxp/ddr
DDR_CNTLR_SOURCES := ${DDR_DRIVERS_PATH}/fsl-mmdc/fsl_mmdc.c \
${DDR_DRIVERS_PATH}/nxp-ddr/utility.c \
${DDR_DRIVERS_PATH}/nxp-ddr/ddr.c \
${DDR_DRIVERS_PATH}/nxp-ddr/ddrc.c
PLAT_INCLUDES += -I$(DDR_DRIVERS_PATH)/include \
-I$(DDR_DRIVERS_PATH)/fsl-mmdc
#------------------------------------------------
/*
* Copyright 2021 NXP
*
* SPDX-License-Identifier: BSD-3-Clause
*
*/
/*
* Generic driver for Freescale MMDC(Multi Mode DDR Controller).
*/
#include <errno.h>
#include <stdint.h>
#include <stdio.h>
#include <stdlib.h>
#include <string.h>
#include <common/debug.h>
#include "ddr_io.h"
#include <drivers/delay_timer.h>
#include <fsl_mmdc.h>
static void set_wait_for_bits_clear(void *ptr, unsigned int value,
unsigned int bits)
{
int timeout = 1000;
ddr_out32(ptr, value);
while ((ddr_in32(ptr) & bits) != 0) {
udelay(100);
timeout--;
}
if (timeout <= 0) {
INFO("Error: %llx", (unsigned long long)ptr);
INFO(" wait for clear timeout.\n");
}
}
void mmdc_init(const struct fsl_mmdc_info *priv, uintptr_t nxp_ddr_addr)
{
struct mmdc_regs *mmdc = (struct mmdc_regs *)nxp_ddr_addr;
unsigned int tmp;
/* 1. set configuration request */
ddr_out32(&mmdc->mdscr, MDSCR_ENABLE_CON_REQ);
/* 2. configure the desired timing parameters */
ddr_out32(&mmdc->mdotc, priv->mdotc);
ddr_out32(&mmdc->mdcfg0, priv->mdcfg0);
ddr_out32(&mmdc->mdcfg1, priv->mdcfg1);
ddr_out32(&mmdc->mdcfg2, priv->mdcfg2);
/* 3. configure DDR type and other miscellaneous parameters */
ddr_out32(&mmdc->mdmisc, priv->mdmisc);
ddr_out32(&mmdc->mpmur0, MMDC_MPMUR0_FRC_MSR);
ddr_out32(&mmdc->mdrwd, priv->mdrwd);
ddr_out32(&mmdc->mpodtctrl, priv->mpodtctrl);
/* 4. configure the required delay while leaving reset */
ddr_out32(&mmdc->mdor, priv->mdor);
/* 5. configure DDR physical parameters */
/* set row/column address width, burst length, data bus width */
tmp = priv->mdctl & ~(MDCTL_SDE0 | MDCTL_SDE1);
ddr_out32(&mmdc->mdctl, tmp);
/* configure address space partition */
ddr_out32(&mmdc->mdasp, priv->mdasp);
/* 6. perform a ZQ calibration - not needed here, doing in #8b */
/* 7. enable MMDC with the desired chip select */
#if (DDRC_NUM_CS == 1)
ddr_out32(&mmdc->mdctl, tmp | MDCTL_SDE0);
#elif (DDRC_NUM_CS == 2)
ddr_out32(&mmdc->mdctl, tmp | MDCTL_SDE0 | MDCTL_SDE1);
#else
#error "Unsupported DDRC_NUM_CS"
#endif
/* 8a. dram init sequence: update MRs for ZQ, ODT, PRE, etc */
ddr_out32(&mmdc->mdscr, CMD_ADDR_LSB_MR_ADDR(8) |
MDSCR_ENABLE_CON_REQ |
CMD_LOAD_MODE_REG |
CMD_BANK_ADDR_2);
ddr_out32(&mmdc->mdscr, CMD_ADDR_LSB_MR_ADDR(0) |
MDSCR_ENABLE_CON_REQ |
CMD_LOAD_MODE_REG |
CMD_BANK_ADDR_3);
ddr_out32(&mmdc->mdscr, CMD_ADDR_LSB_MR_ADDR(4) |
MDSCR_ENABLE_CON_REQ |
CMD_LOAD_MODE_REG |
CMD_BANK_ADDR_1);
ddr_out32(&mmdc->mdscr, CMD_ADDR_MSB_MR_OP(0x19) |
CMD_ADDR_LSB_MR_ADDR(0x30) |
MDSCR_ENABLE_CON_REQ |
CMD_LOAD_MODE_REG | CMD_BANK_ADDR_0);
/* 8b. ZQ calibration */
ddr_out32(&mmdc->mdscr, CMD_ADDR_MSB_MR_OP(0x4) |
MDSCR_ENABLE_CON_REQ |
CMD_ZQ_CALIBRATION | CMD_BANK_ADDR_0);
set_wait_for_bits_clear(&mmdc->mpzqhwctrl, priv->mpzqhwctrl,
MPZQHWCTRL_ZQ_HW_FORCE);
/* 9a. calibrations now, wr lvl */
ddr_out32(&mmdc->mdscr, CMD_ADDR_LSB_MR_ADDR(0x84) | MDSCR_WL_EN |
MDSCR_ENABLE_CON_REQ |
CMD_LOAD_MODE_REG | CMD_BANK_ADDR_1);
set_wait_for_bits_clear(&mmdc->mpwlgcr, MPWLGCR_HW_WL_EN,
MPWLGCR_HW_WL_EN);
mdelay(1);
ddr_out32(&mmdc->mdscr, CMD_ADDR_LSB_MR_ADDR(4) |
MDSCR_ENABLE_CON_REQ |
CMD_LOAD_MODE_REG | CMD_BANK_ADDR_1);
ddr_out32(&mmdc->mdscr, MDSCR_ENABLE_CON_REQ);
mdelay(1);
/* 9b. read DQS gating calibration */
ddr_out32(&mmdc->mdscr, CMD_ADDR_MSB_MR_OP(4) | MDSCR_ENABLE_CON_REQ |
CMD_PRECHARGE_BANK_OPEN | CMD_BANK_ADDR_0);
ddr_out32(&mmdc->mdscr, CMD_ADDR_LSB_MR_ADDR(4) | MDSCR_ENABLE_CON_REQ |
CMD_LOAD_MODE_REG | CMD_BANK_ADDR_3);
ddr_out32(&mmdc->mppdcmpr2, MPPDCMPR2_MPR_COMPARE_EN);
/* set absolute read delay offset */
if (priv->mprddlctl != 0) {
ddr_out32(&mmdc->mprddlctl, priv->mprddlctl);
} else {
ddr_out32(&mmdc->mprddlctl, MMDC_MPRDDLCTL_DEFAULT_DELAY);
}
set_wait_for_bits_clear(&mmdc->mpdgctrl0,
AUTO_RD_DQS_GATING_CALIBRATION_EN,
AUTO_RD_DQS_GATING_CALIBRATION_EN);
ddr_out32(&mmdc->mdscr, MDSCR_ENABLE_CON_REQ | CMD_LOAD_MODE_REG |
CMD_BANK_ADDR_3);
/* 9c. read calibration */
ddr_out32(&mmdc->mdscr, CMD_ADDR_MSB_MR_OP(4) | MDSCR_ENABLE_CON_REQ |
CMD_PRECHARGE_BANK_OPEN | CMD_BANK_ADDR_0);
ddr_out32(&mmdc->mdscr, CMD_ADDR_LSB_MR_ADDR(4) | MDSCR_ENABLE_CON_REQ |
CMD_LOAD_MODE_REG | CMD_BANK_ADDR_3);
ddr_out32(&mmdc->mppdcmpr2, MPPDCMPR2_MPR_COMPARE_EN);
set_wait_for_bits_clear(&mmdc->mprddlhwctl,
MPRDDLHWCTL_AUTO_RD_CALIBRATION_EN,
MPRDDLHWCTL_AUTO_RD_CALIBRATION_EN);
ddr_out32(&mmdc->mdscr, MDSCR_ENABLE_CON_REQ | CMD_LOAD_MODE_REG |
CMD_BANK_ADDR_3);
/* 10. configure power-down, self-refresh entry, exit parameters */
ddr_out32(&mmdc->mdpdc, priv->mdpdc);
ddr_out32(&mmdc->mapsr, MMDC_MAPSR_PWR_SAV_CTRL_STAT);
/* 11. ZQ config again? do nothing here */
/* 12. refresh scheme */
set_wait_for_bits_clear(&mmdc->mdref, priv->mdref,
MDREF_START_REFRESH);
/* 13. disable CON_REQ */
ddr_out32(&mmdc->mdscr, MDSCR_DISABLE_CFG_REQ);
}
/*
* Copyright 2021 NXP
*
* SPDX-License-Identifier: BSD-3-Clause
*
*/
#ifndef FSL_MMDC_H
#define FSL_MMDC_H
/* PHY Write Leveling Configuration and Error Status Register (MPWLGCR) */
#define MPWLGCR_HW_WL_EN (1 << 0)
/* PHY Pre-defined Compare and CA delay-line Configuration (MPPDCMPR2) */
#define MPPDCMPR2_MPR_COMPARE_EN (1 << 0)
/* MMDC PHY Read DQS gating control register 0 (MPDGCTRL0) */
#define AUTO_RD_DQS_GATING_CALIBRATION_EN (1 << 28)
/* MMDC PHY Read Delay HW Calibration Control Register (MPRDDLHWCTL) */
#define MPRDDLHWCTL_AUTO_RD_CALIBRATION_EN (1 << 4)
/* MMDC Core Power Saving Control and Status Register (MMDC_MAPSR) */
#define MMDC_MAPSR_PWR_SAV_CTRL_STAT 0x00001067
/* MMDC Core Refresh Control Register (MMDC_MDREF) */
#define MDREF_START_REFRESH (1 << 0)
/* MMDC Core Special Command Register (MDSCR) */
#define CMD_ADDR_MSB_MR_OP(x) (x << 24)
#define CMD_ADDR_LSB_MR_ADDR(x) (x << 16)
#define MDSCR_DISABLE_CFG_REQ (0 << 15)
#define MDSCR_ENABLE_CON_REQ (1 << 15)
#define MDSCR_CON_ACK (1 << 14)
#define MDSCR_WL_EN (1 << 9)
#define CMD_NORMAL (0 << 4)
#define CMD_PRECHARGE (1 << 4)
#define CMD_AUTO_REFRESH (2 << 4)
#define CMD_LOAD_MODE_REG (3 << 4)
#define CMD_ZQ_CALIBRATION (4 << 4)
#define CMD_PRECHARGE_BANK_OPEN (5 << 4)
#define CMD_MRR (6 << 4)
#define CMD_BANK_ADDR_0 0x0
#define CMD_BANK_ADDR_1 0x1
#define CMD_BANK_ADDR_2 0x2
#define CMD_BANK_ADDR_3 0x3
#define CMD_BANK_ADDR_4 0x4
#define CMD_BANK_ADDR_5 0x5
#define CMD_BANK_ADDR_6 0x6
#define CMD_BANK_ADDR_7 0x7
/* MMDC Core Control Register (MDCTL) */
#define MDCTL_SDE0 (U(1) << 31)
#define MDCTL_SDE1 (1 << 30)
/* MMDC PHY ZQ HW control register (MMDC_MPZQHWCTRL) */
#define MPZQHWCTRL_ZQ_HW_FORCE (1 << 16)
/* MMDC PHY Measure Unit Register (MMDC_MPMUR0) */
#define MMDC_MPMUR0_FRC_MSR (1 << 11)
/* MMDC PHY Read delay-lines Configuration Register (MMDC_MPRDDLCTL) */
/* default 64 for a quarter cycle delay */
#define MMDC_MPRDDLCTL_DEFAULT_DELAY 0x40404040
/* MMDC Registers */
struct mmdc_regs {
unsigned int mdctl;
unsigned int mdpdc;
unsigned int mdotc;
unsigned int mdcfg0;
unsigned int mdcfg1;
unsigned int mdcfg2;
unsigned int mdmisc;
unsigned int mdscr;
unsigned int mdref;
unsigned int res1[2];
unsigned int mdrwd;
unsigned int mdor;
unsigned int mdmrr;
unsigned int mdcfg3lp;
unsigned int mdmr4;
unsigned int mdasp;
unsigned int res2[239];
unsigned int maarcr;
unsigned int mapsr;
unsigned int maexidr0;
unsigned int maexidr1;
unsigned int madpcr0;
unsigned int madpcr1;
unsigned int madpsr0;
unsigned int madpsr1;
unsigned int madpsr2;
unsigned int madpsr3;
unsigned int madpsr4;
unsigned int madpsr5;
unsigned int masbs0;
unsigned int masbs1;
unsigned int res3[2];
unsigned int magenp;
unsigned int res4[239];
unsigned int mpzqhwctrl;
unsigned int mpzqswctrl;
unsigned int mpwlgcr;
unsigned int mpwldectrl0;
unsigned int mpwldectrl1;
unsigned int mpwldlst;
unsigned int mpodtctrl;
unsigned int mprddqby0dl;
unsigned int mprddqby1dl;
unsigned int mprddqby2dl;
unsigned int mprddqby3dl;
unsigned int mpwrdqby0dl;
unsigned int mpwrdqby1dl;
unsigned int mpwrdqby2dl;
unsigned int mpwrdqby3dl;
unsigned int mpdgctrl0;
unsigned int mpdgctrl1;
unsigned int mpdgdlst0;
unsigned int mprddlctl;
unsigned int mprddlst;
unsigned int mpwrdlctl;
unsigned int mpwrdlst;
unsigned int mpsdctrl;
unsigned int mpzqlp2ctl;
unsigned int mprddlhwctl;
unsigned int mpwrdlhwctl;
unsigned int mprddlhwst0;
unsigned int mprddlhwst1;
unsigned int mpwrdlhwst0;
unsigned int mpwrdlhwst1;
unsigned int mpwlhwerr;
unsigned int mpdghwst0;
unsigned int mpdghwst1;
unsigned int mpdghwst2;
unsigned int mpdghwst3;
unsigned int mppdcmpr1;
unsigned int mppdcmpr2;
unsigned int mpswdar0;
unsigned int mpswdrdr0;
unsigned int mpswdrdr1;
unsigned int mpswdrdr2;
unsigned int mpswdrdr3;
unsigned int mpswdrdr4;
unsigned int mpswdrdr5;
unsigned int mpswdrdr6;
unsigned int mpswdrdr7;
unsigned int mpmur0;
unsigned int mpwrcadl;
unsigned int mpdccr;
};
struct fsl_mmdc_info {
unsigned int mdctl;
unsigned int mdpdc;
unsigned int mdotc;
unsigned int mdcfg0;
unsigned int mdcfg1;
unsigned int mdcfg2;
unsigned int mdmisc;
unsigned int mdref;
unsigned int mdrwd;
unsigned int mdor;
unsigned int mdasp;
unsigned int mpodtctrl;
unsigned int mpzqhwctrl;
unsigned int mprddlctl;
};
void mmdc_init(const struct fsl_mmdc_info *priv, uintptr_t nxp_ddr_addr);
#endif /* FSL_MMDC_H */
/*
* Copyright 2021 NXP
*
* SPDX-License-Identifier: BSD-3-Clause
*
*/
#ifndef DDR_H
#define DDR_H
#include "ddr_io.h"
#include "dimm.h"
#include "immap.h"
#ifndef DDRC_NUM_CS
#define DDRC_NUM_CS 4
#endif
/*
* This is irrespective of what is the number of DDR controller,
* number of DIMM used. This is set to maximum
* Max controllers = 2
* Max num of DIMM per controlle = 2
* MAX NUM CS = 4
* Not to be changed.
*/
#define MAX_DDRC_NUM 2
#define MAX_DIMM_NUM 2
#define MAX_CS_NUM 4
#include "opts.h"
#include "regs.h"
#include "utility.h"
#ifdef DDR_DEBUG
#define debug(...) INFO(__VA_ARGS__)
#else
#define debug(...) VERBOSE(__VA_ARGS__)
#endif
#ifndef DDRC_NUM_DIMM
#define DDRC_NUM_DIMM 1
#endif
#define CONFIG_CS_PER_SLOT \
(DDRC_NUM_CS / DDRC_NUM_DIMM)
/* Record of register values computed */
struct ddr_cfg_regs {
struct {
unsigned int bnds;
unsigned int config;
unsigned int config_2;
} cs[MAX_CS_NUM];
unsigned int dec[10];
unsigned int timing_cfg[10];
unsigned int sdram_cfg[3];
unsigned int sdram_mode[16];
unsigned int md_cntl;
unsigned int interval;
unsigned int data_init;
unsigned int clk_cntl;
unsigned int init_addr;
unsigned int init_ext_addr;
unsigned int zq_cntl;
unsigned int wrlvl_cntl[3];
unsigned int ddr_sr_cntr;
unsigned int sdram_rcw[6];
unsigned int dq_map[4];
unsigned int eor;
unsigned int cdr[2];
unsigned int err_disable;
unsigned int err_int_en;
unsigned int tx_cfg[4];
unsigned int debug[64];
};
struct ddr_conf {
int dimm_in_use[MAX_DIMM_NUM];
int cs_in_use; /* bitmask, bit 0 for cs0, bit 1 for cs1, etc. */
int cs_on_dimm[MAX_DIMM_NUM]; /* bitmask */
unsigned long long cs_base_addr[MAX_CS_NUM];
unsigned long long cs_size[MAX_CS_NUM];
unsigned long long base_addr;
unsigned long long total_mem;
};
struct ddr_info {
unsigned long clk;
unsigned long long mem_base;
unsigned int num_ctlrs;
unsigned int dimm_on_ctlr;
struct dimm_params dimm;
struct memctl_opt opt;
struct ddr_conf conf;
struct ddr_cfg_regs ddr_reg;
struct ccsr_ddr *ddr[MAX_DDRC_NUM];
uint16_t *phy[MAX_DDRC_NUM];
int *spd_addr;
unsigned int ip_rev;
uintptr_t phy_gen2_fw_img_buf;
void *img_loadr;
int warm_boot_flag;
};
struct rc_timing {
unsigned int speed_bin;
unsigned int clk_adj;
unsigned int wrlvl;
};
struct board_timing {
unsigned int rc;
struct rc_timing const *p;
unsigned int add1;
unsigned int add2;
};
enum warm_boot {
DDR_COLD_BOOT = 0,
DDR_WARM_BOOT = 1,
DDR_WRM_BOOT_NT_SUPPORTED = -1,
};
int disable_unused_ddrc(struct ddr_info *priv, int mask,
uintptr_t nxp_ccn_hn_f0_addr);
int ddr_board_options(struct ddr_info *priv);
int compute_ddrc(const unsigned long clk,
const struct memctl_opt *popts,
const struct ddr_conf *conf,
struct ddr_cfg_regs *ddr,
const struct dimm_params *dimm_params,
const unsigned int ip_rev);
int compute_ddr_phy(struct ddr_info *priv);
int ddrc_set_regs(const unsigned long clk,
const struct ddr_cfg_regs *regs,
const struct ccsr_ddr *ddr,
int twopass);
int cal_board_params(struct ddr_info *priv,
const struct board_timing *dimm,
int len);
/* return bit mask of used DIMM(s) */
int ddr_get_ddr_params(struct dimm_params *pdimm, struct ddr_conf *conf);
long long dram_init(struct ddr_info *priv
#if defined(NXP_HAS_CCN504) || defined(NXP_HAS_CCN508)
, uintptr_t nxp_ccn_hn_f0_addr
#endif
);
long long board_static_ddr(struct ddr_info *info);
#endif /* DDR_H */
/*
* Copyright 2021 NXP
*
* SPDX-License-Identifier: BSD-3-Clause
*
*/
#ifndef DDR_IO_H
#define DDR_IO_H
#include <endian.h>
#include <lib/mmio.h>
#define min(a, b) (((a) > (b)) ? (b) : (a))
#define max(a, b) (((a) > (b)) ? (a) : (b))
/* macro for memory barrier */
#define mb() asm volatile("dsb sy" : : : "memory")
#ifdef NXP_DDR_BE
#define ddr_in32(a) bswap32(mmio_read_32((uintptr_t)(a)))
#define ddr_out32(a, v) mmio_write_32((uintptr_t)(a),\
bswap32(v))
#elif defined(NXP_DDR_LE)
#define ddr_in32(a) mmio_read_32((uintptr_t)(a))
#define ddr_out32(a, v) mmio_write_32((uintptr_t)(a), v)
#else
#error Please define CCSR DDR register endianness
#endif
#define ddr_setbits32(a, v) ddr_out32((a), ddr_in32(a) | (v))
#define ddr_clrbits32(a, v) ddr_out32((a), ddr_in32(a) & ~(v))
#define ddr_clrsetbits32(a, c, s) ddr_out32((a), (ddr_in32(a) & ~(c)) \
| (s))
#endif /* DDR_IO_H */
/*
* Copyright 2021 NXP
*
* SPDX-License-Identifier: BSD-3-Clause
*
*/
#ifndef DIMM_H
#define DIMM_H
#define SPD_MEMTYPE_DDR4 0x0C
#define DDR4_SPD_MODULETYPE_MASK 0x0f
#define DDR4_SPD_MODULETYPE_EXT 0x00
#define DDR4_SPD_RDIMM 0x01
#define DDR4_SPD_UDIMM 0x02
#define DDR4_SPD_SO_DIMM 0x03
#define DDR4_SPD_LRDIMM 0x04
#define DDR4_SPD_MINI_RDIMM 0x05
#define DDR4_SPD_MINI_UDIMM 0x06
#define DDR4_SPD_72B_SO_RDIMM 0x08
#define DDR4_SPD_72B_SO_UDIMM 0x09
#define DDR4_SPD_16B_SO_DIMM 0x0c
#define DDR4_SPD_32B_SO_DIMM 0x0d
#define SPD_SPA0_ADDRESS 0x36
#define SPD_SPA1_ADDRESS 0x37
#define spd_to_ps(mtb, ftb) \
((mtb) * pdimm->mtb_ps + ((ftb) * pdimm->ftb_10th_ps) / 10)
#ifdef DDR_DEBUG
#define dump_spd(spd, len) { \
register int i; \
register unsigned char *buf = (void *)(spd); \
\
for (i = 0; i < (len); i++) { \
print_uint(i); \
puts("\t: 0x"); \
print_hex(buf[i]); \
puts("\n"); \
} \
}
#else
#define dump_spd(spd, len) {}
#endif
/* From JEEC Standard No. 21-C release 23A */
struct ddr4_spd {
/* General Section: Bytes 0-127 */
unsigned char info_size_crc; /* 0 # bytes */
unsigned char spd_rev; /* 1 Total # bytes of SPD */
unsigned char mem_type; /* 2 Key Byte / mem type */
unsigned char module_type; /* 3 Key Byte / Module Type */
unsigned char density_banks; /* 4 Density and Banks */
unsigned char addressing; /* 5 Addressing */
unsigned char package_type; /* 6 Package type */
unsigned char opt_feature; /* 7 Optional features */
unsigned char thermal_ref; /* 8 Thermal and refresh */
unsigned char oth_opt_features; /* 9 Other optional features */
unsigned char res_10; /* 10 Reserved */
unsigned char module_vdd; /* 11 Module nominal voltage */
unsigned char organization; /* 12 Module Organization */
unsigned char bus_width; /* 13 Module Memory Bus Width */
unsigned char therm_sensor; /* 14 Module Thermal Sensor */
unsigned char ext_type; /* 15 Extended module type */
unsigned char res_16;
unsigned char timebases; /* 17 MTb and FTB */
unsigned char tck_min; /* 18 tCKAVGmin */
unsigned char tck_max; /* 19 TCKAVGmax */
unsigned char caslat_b1; /* 20 CAS latencies, 1st byte */
unsigned char caslat_b2; /* 21 CAS latencies, 2nd byte */
unsigned char caslat_b3; /* 22 CAS latencies, 3rd byte */
unsigned char caslat_b4; /* 23 CAS latencies, 4th byte */
unsigned char taa_min; /* 24 Min CAS Latency Time */
unsigned char trcd_min; /* 25 Min RAS# to CAS# Delay Time */
unsigned char trp_min; /* 26 Min Row Precharge Delay Time */
unsigned char tras_trc_ext; /* 27 Upper Nibbles for tRAS and tRC */
unsigned char tras_min_lsb; /* 28 tRASmin, lsb */
unsigned char trc_min_lsb; /* 29 tRCmin, lsb */
unsigned char trfc1_min_lsb; /* 30 Min Refresh Recovery Delay Time */
unsigned char trfc1_min_msb; /* 31 Min Refresh Recovery Delay Time */
unsigned char trfc2_min_lsb; /* 32 Min Refresh Recovery Delay Time */
unsigned char trfc2_min_msb; /* 33 Min Refresh Recovery Delay Time */
unsigned char trfc4_min_lsb; /* 34 Min Refresh Recovery Delay Time */
unsigned char trfc4_min_msb; /* 35 Min Refresh Recovery Delay Time */
unsigned char tfaw_msb; /* 36 Upper Nibble for tFAW */
unsigned char tfaw_min; /* 37 tFAW, lsb */
unsigned char trrds_min; /* 38 tRRD_Smin, MTB */
unsigned char trrdl_min; /* 39 tRRD_Lmin, MTB */
unsigned char tccdl_min; /* 40 tCCS_Lmin, MTB */
unsigned char res_41[60-41]; /* 41 Rserved */
unsigned char mapping[78-60]; /* 60~77 Connector to SDRAM bit map */
unsigned char res_78[117-78]; /* 78~116, Reserved */
signed char fine_tccdl_min; /* 117 Fine offset for tCCD_Lmin */
signed char fine_trrdl_min; /* 118 Fine offset for tRRD_Lmin */
signed char fine_trrds_min; /* 119 Fine offset for tRRD_Smin */
signed char fine_trc_min; /* 120 Fine offset for tRCmin */
signed char fine_trp_min; /* 121 Fine offset for tRPmin */
signed char fine_trcd_min; /* 122 Fine offset for tRCDmin */
signed char fine_taa_min; /* 123 Fine offset for tAAmin */
signed char fine_tck_max; /* 124 Fine offset for tCKAVGmax */
signed char fine_tck_min; /* 125 Fine offset for tCKAVGmin */
/* CRC: Bytes 126-127 */
unsigned char crc[2]; /* 126-127 SPD CRC */
/* Module-Specific Section: Bytes 128-255 */
union {
struct {
/* 128 (Unbuffered) Module Nominal Height */
unsigned char mod_height;
/* 129 (Unbuffered) Module Maximum Thickness */
unsigned char mod_thickness;
/* 130 (Unbuffered) Reference Raw Card Used */
unsigned char ref_raw_card;
/* 131 (Unbuffered) Address Mapping from
* Edge Connector to DRAM
*/
unsigned char addr_mapping;
/* 132~253 (Unbuffered) Reserved */
unsigned char res_132[254-132];
/* 254~255 CRC */
unsigned char crc[2];
} unbuffered;
struct {
/* 128 (Registered) Module Nominal Height */
unsigned char mod_height;
/* 129 (Registered) Module Maximum Thickness */
unsigned char mod_thickness;
/* 130 (Registered) Reference Raw Card Used */
unsigned char ref_raw_card;
/* 131 DIMM Module Attributes */
unsigned char modu_attr;
/* 132 RDIMM Thermal Heat Spreader Solution */
unsigned char thermal;
/* 133 Register Manufacturer ID Code, LSB */
unsigned char reg_id_lo;
/* 134 Register Manufacturer ID Code, MSB */
unsigned char reg_id_hi;
/* 135 Register Revision Number */
unsigned char reg_rev;
/* 136 Address mapping from register to DRAM */
unsigned char reg_map;
unsigned char ca_stren;
unsigned char clk_stren;
/* 139~253 Reserved */
unsigned char res_139[254-139];
/* 254~255 CRC */
unsigned char crc[2];
} registered;
struct {
/* 128 (Loadreduced) Module Nominal Height */
unsigned char mod_height;
/* 129 (Loadreduced) Module Maximum Thickness */
unsigned char mod_thickness;
/* 130 (Loadreduced) Reference Raw Card Used */
unsigned char ref_raw_card;
/* 131 DIMM Module Attributes */
unsigned char modu_attr;
/* 132 RDIMM Thermal Heat Spreader Solution */
unsigned char thermal;
/* 133 Register Manufacturer ID Code, LSB */
unsigned char reg_id_lo;
/* 134 Register Manufacturer ID Code, MSB */
unsigned char reg_id_hi;
/* 135 Register Revision Number */
unsigned char reg_rev;
/* 136 Address mapping from register to DRAM */
unsigned char reg_map;
/* 137 Register Output Drive Strength for CMD/Add*/
unsigned char reg_drv;
/* 138 Register Output Drive Strength for CK */
unsigned char reg_drv_ck;
/* 139 Data Buffer Revision Number */
unsigned char data_buf_rev;
/* 140 DRAM VrefDQ for Package Rank 0 */
unsigned char vrefqe_r0;
/* 141 DRAM VrefDQ for Package Rank 1 */
unsigned char vrefqe_r1;
/* 142 DRAM VrefDQ for Package Rank 2 */
unsigned char vrefqe_r2;
/* 143 DRAM VrefDQ for Package Rank 3 */
unsigned char vrefqe_r3;
/* 144 Data Buffer VrefDQ for DRAM Interface */
unsigned char data_intf;
/*
* 145 Data Buffer MDQ Drive Strength and RTT
* for data rate <= 1866
*/
unsigned char data_drv_1866;
/*
* 146 Data Buffer MDQ Drive Strength and RTT
* for 1866 < data rate <= 2400
*/
unsigned char data_drv_2400;
/*
* 147 Data Buffer MDQ Drive Strength and RTT
* for 2400 < data rate <= 3200
*/
unsigned char data_drv_3200;
/* 148 DRAM Drive Strength */
unsigned char dram_drv;
/*
* 149 DRAM ODT (RTT_WR, RTT_NOM)
* for data rate <= 1866
*/
unsigned char dram_odt_1866;
/*
* 150 DRAM ODT (RTT_WR, RTT_NOM)
* for 1866 < data rate <= 2400
*/
unsigned char dram_odt_2400;
/*
* 151 DRAM ODT (RTT_WR, RTT_NOM)
* for 2400 < data rate <= 3200
*/
unsigned char dram_odt_3200;
/*
* 152 DRAM ODT (RTT_PARK)
* for data rate <= 1866
*/
unsigned char dram_odt_park_1866;
/*
* 153 DRAM ODT (RTT_PARK)
* for 1866 < data rate <= 2400
*/
unsigned char dram_odt_park_2400;
/*
* 154 DRAM ODT (RTT_PARK)
* for 2400 < data rate <= 3200
*/
unsigned char dram_odt_park_3200;
unsigned char res_155[254-155]; /* Reserved */
/* 254~255 CRC */
unsigned char crc[2];
} loadreduced;
unsigned char uc[128]; /* 128-255 Module-Specific Section */
} mod_section;
unsigned char res_256[320-256]; /* 256~319 Reserved */
/* Module supplier's data: Byte 320~383 */
unsigned char mmid_lsb; /* 320 Module MfgID Code LSB */
unsigned char mmid_msb; /* 321 Module MfgID Code MSB */
unsigned char mloc; /* 322 Mfg Location */
unsigned char mdate[2]; /* 323~324 Mfg Date */
unsigned char sernum[4]; /* 325~328 Module Serial Number */
unsigned char mpart[20]; /* 329~348 Mfg's Module Part Number */
unsigned char mrev; /* 349 Module Revision Code */
unsigned char dmid_lsb; /* 350 DRAM MfgID Code LSB */
unsigned char dmid_msb; /* 351 DRAM MfgID Code MSB */
unsigned char stepping; /* 352 DRAM stepping */
unsigned char msd[29]; /* 353~381 Mfg's Specific Data */
unsigned char res_382[2]; /* 382~383 Reserved */
};
/* Parameters for a DDR dimm computed from the SPD */
struct dimm_params {
/* DIMM organization parameters */
char mpart[19]; /* guaranteed null terminated */
unsigned int n_ranks;
unsigned int die_density;
unsigned long long rank_density;
unsigned long long capacity;
unsigned int primary_sdram_width;
unsigned int ec_sdram_width;
unsigned int rdimm;
unsigned int package_3ds; /* number of dies in 3DS */
unsigned int device_width; /* x4, x8, x16 components */
unsigned int rc;
/* SDRAM device parameters */
unsigned int n_row_addr;
unsigned int n_col_addr;
unsigned int edc_config; /* 0 = none, 1 = parity, 2 = ECC */
unsigned int bank_addr_bits;
unsigned int bank_group_bits;
unsigned int burst_lengths_bitmask; /* BL=4 bit 2, BL=8 = bit 3 */
/* mirrored DIMMs */
unsigned int mirrored_dimm; /* only for ddr3 */
/* DIMM timing parameters */
int mtb_ps; /* medium timebase ps */
int ftb_10th_ps; /* fine timebase, in 1/10 ps */
int taa_ps; /* minimum CAS latency time */
int tfaw_ps; /* four active window delay */
/*
* SDRAM clock periods
* The range for these are 1000-10000 so a short should be sufficient
*/
int tckmin_x_ps;
int tckmax_ps;
/* SPD-defined CAS latencies */
unsigned int caslat_x;
/* basic timing parameters */
int trcd_ps;
int trp_ps;
int tras_ps;
int trfc1_ps;
int trfc2_ps;
int trfc4_ps;
int trrds_ps;
int trrdl_ps;
int tccdl_ps;
int trfc_slr_ps;
int trc_ps; /* maximum = 254 ns + .75 ns = 254750 ps */
int twr_ps; /* 15ns for all speed bins */
unsigned int refresh_rate_ps;
unsigned int extended_op_srt;
/* RDIMM */
unsigned char rcw[16]; /* Register Control Word 0-15 */
unsigned int dq_mapping[18];
unsigned int dq_mapping_ors;
};
int read_spd(unsigned char chip, void *buf, int len);
int crc16(unsigned char *ptr, int count);
int cal_dimm_params(const struct ddr4_spd *spd, struct dimm_params *pdimm);
#endif /* DIMM_H */
/*
* Copyright 2021 NXP
*
* SPDX-License-Identifier: BSD-3-Clause
*
*/
#ifndef DDR_IMMAP_H
#define DDR_IMMAP_H
#define DDR_DBUS_64 0
#define DDR_DBUS_32 1
#define DDR_DBUS_16 2
/*
* DDRC register file for DDRC 5.0 and above
*/
struct ccsr_ddr {
struct {
unsigned int a; /* 0x0, 0x8, 0x10, 0x18 */
unsigned int res; /* 0x4, 0xc, 0x14, 0x1c */
} bnds[4];
unsigned char res_20[0x40 - 0x20];
unsigned int dec[10]; /* 0x40 */
unsigned char res_68[0x80 - 0x68];
unsigned int csn_cfg[4]; /* 0x80, 0x84, 0x88, 0x8c */
unsigned char res_90[48];
unsigned int csn_cfg_2[4]; /* 0xc0, 0xc4, 0xc8, 0xcc */
unsigned char res_d0[48];
unsigned int timing_cfg_3; /* SDRAM Timing Configuration 3 */
unsigned int timing_cfg_0; /* SDRAM Timing Configuration 0 */
unsigned int timing_cfg_1; /* SDRAM Timing Configuration 1 */
unsigned int timing_cfg_2; /* SDRAM Timing Configuration 2 */
unsigned int sdram_cfg; /* SDRAM Control Configuration */
unsigned int sdram_cfg_2; /* SDRAM Control Configuration 2 */
unsigned int sdram_mode; /* SDRAM Mode Configuration */
unsigned int sdram_mode_2; /* SDRAM Mode Configuration 2 */
unsigned int sdram_md_cntl; /* SDRAM Mode Control */
unsigned int sdram_interval; /* SDRAM Interval Configuration */
unsigned int sdram_data_init; /* SDRAM Data initialization */
unsigned char res_12c[4];
unsigned int sdram_clk_cntl; /* SDRAM Clock Control */
unsigned char res_134[20];
unsigned int init_addr; /* training init addr */
unsigned int init_ext_addr; /* training init extended addr */
unsigned char res_150[16];
unsigned int timing_cfg_4; /* SDRAM Timing Configuration 4 */
unsigned int timing_cfg_5; /* SDRAM Timing Configuration 5 */
unsigned int timing_cfg_6; /* SDRAM Timing Configuration 6 */
unsigned int timing_cfg_7; /* SDRAM Timing Configuration 7 */
unsigned int zq_cntl; /* ZQ calibration control*/
unsigned int wrlvl_cntl; /* write leveling control*/
unsigned char reg_178[4];
unsigned int ddr_sr_cntr; /* self refresh counter */
unsigned int ddr_sdram_rcw_1; /* Control Words 1 */
unsigned int ddr_sdram_rcw_2; /* Control Words 2 */
unsigned char reg_188[8];
unsigned int ddr_wrlvl_cntl_2; /* write leveling control 2 */
unsigned int ddr_wrlvl_cntl_3; /* write leveling control 3 */
unsigned char res_198[0x1a0-0x198];
unsigned int ddr_sdram_rcw_3;
unsigned int ddr_sdram_rcw_4;
unsigned int ddr_sdram_rcw_5;
unsigned int ddr_sdram_rcw_6;
unsigned char res_1b0[0x200-0x1b0];
unsigned int sdram_mode_3; /* SDRAM Mode Configuration 3 */
unsigned int sdram_mode_4; /* SDRAM Mode Configuration 4 */
unsigned int sdram_mode_5; /* SDRAM Mode Configuration 5 */
unsigned int sdram_mode_6; /* SDRAM Mode Configuration 6 */
unsigned int sdram_mode_7; /* SDRAM Mode Configuration 7 */
unsigned int sdram_mode_8; /* SDRAM Mode Configuration 8 */
unsigned char res_218[0x220-0x218];
unsigned int sdram_mode_9; /* SDRAM Mode Configuration 9 */
unsigned int sdram_mode_10; /* SDRAM Mode Configuration 10 */
unsigned int sdram_mode_11; /* SDRAM Mode Configuration 11 */
unsigned int sdram_mode_12; /* SDRAM Mode Configuration 12 */
unsigned int sdram_mode_13; /* SDRAM Mode Configuration 13 */
unsigned int sdram_mode_14; /* SDRAM Mode Configuration 14 */
unsigned int sdram_mode_15; /* SDRAM Mode Configuration 15 */
unsigned int sdram_mode_16; /* SDRAM Mode Configuration 16 */
unsigned char res_240[0x250-0x240];
unsigned int timing_cfg_8; /* SDRAM Timing Configuration 8 */
unsigned int timing_cfg_9; /* SDRAM Timing Configuration 9 */
unsigned int timing_cfg_10; /* SDRAM Timing COnfigurtion 10 */
unsigned char res_258[0x260-0x25c];
unsigned int sdram_cfg_3;
unsigned char res_264[0x270-0x264];
unsigned int sdram_md_cntl_2;
unsigned char res_274[0x400-0x274];
unsigned int dq_map[4];
unsigned char res_410[0x800-0x410];
unsigned int tx_cfg[4];
unsigned char res_810[0xb20-0x810];
unsigned int ddr_dsr1; /* Debug Status 1 */
unsigned int ddr_dsr2; /* Debug Status 2 */
unsigned int ddr_cdr1; /* Control Driver 1 */
unsigned int ddr_cdr2; /* Control Driver 2 */
unsigned char res_b30[200];
unsigned int ip_rev1; /* IP Block Revision 1 */
unsigned int ip_rev2; /* IP Block Revision 2 */
unsigned int eor; /* Enhanced Optimization Register */
unsigned char res_c04[252];
unsigned int mtcr; /* Memory Test Control Register */
unsigned char res_d04[28];
unsigned int mtp[10]; /* Memory Test Patterns */
unsigned char res_d48[184];
unsigned int data_err_inject_hi; /* Data Path Err Injection Mask Hi*/
unsigned int data_err_inject_lo;/* Data Path Err Injection Mask Lo*/
unsigned int ecc_err_inject; /* Data Path Err Injection Mask ECC */
unsigned char res_e0c[20];
unsigned int capture_data_hi; /* Data Path Read Capture High */
unsigned int capture_data_lo; /* Data Path Read Capture Low */
unsigned int capture_ecc; /* Data Path Read Capture ECC */
unsigned char res_e2c[20];
unsigned int err_detect; /* Error Detect */
unsigned int err_disable; /* Error Disable */
unsigned int err_int_en;
unsigned int capture_attributes; /* Error Attrs Capture */
unsigned int capture_address; /* Error Addr Capture */
unsigned int capture_ext_address; /* Error Extended Addr Capture */
unsigned int err_sbe; /* Single-Bit ECC Error Management */
unsigned char res_e5c[164];
unsigned int debug[64]; /* debug_1 to debug_64 */
};
#endif /* DDR_IMMAP_H */
/*
* Copyright 2021 NXP
*
* SPDX-License-Identifier: BSD-3-Clause
*
*/
#ifndef DDR_OPTS_H
#define DDR_OPTS_H
#define SDRAM_TYPE_DDR4 5 /* sdram_cfg register */
#define DDR_BC4 4 /* burst chop */
#define DDR_OTF 6 /* on-the-fly BC4 and BL8 */
#define DDR_BL8 8 /* burst length 8 */
#define DDR4_RTT_OFF 0
#define DDR4_RTT_60_OHM 1 /* RZQ/4 */
#define DDR4_RTT_120_OHM 2 /* RZQ/2 */
#define DDR4_RTT_40_OHM 3 /* RZQ/6 */
#define DDR4_RTT_240_OHM 4 /* RZQ/1 */
#define DDR4_RTT_48_OHM 5 /* RZQ/5 */
#define DDR4_RTT_80_OHM 6 /* RZQ/3 */
#define DDR4_RTT_34_OHM 7 /* RZQ/7 */
#define DDR4_RTT_WR_OFF 0
#define DDR4_RTT_WR_120_OHM 1
#define DDR4_RTT_WR_240_OHM 2
#define DDR4_RTT_WR_HZ 3
#define DDR4_RTT_WR_80_OHM 4
#define DDR_ODT_NEVER 0x0
#define DDR_ODT_CS 0x1
#define DDR_ODT_ALL_OTHER_CS 0x2
#define DDR_ODT_OTHER_DIMM 0x3
#define DDR_ODT_ALL 0x4
#define DDR_ODT_SAME_DIMM 0x5
#define DDR_ODT_CS_AND_OTHER_DIMM 0x6
#define DDR_ODT_OTHER_CS_ONSAMEDIMM 0x7
#define DDR_BA_INTLV_CS01 0x40
#define DDR_BA_INTLV_CS0123 0x64
#define DDR_BA_NONE 0
#define DDR_256B_INTLV 0x8
struct memctl_opt {
int rdimm;
unsigned int dbw_cap_shift;
struct local_opts_s {
unsigned int auto_precharge;
unsigned int odt_rd_cfg;
unsigned int odt_wr_cfg;
unsigned int odt_rtt_norm;
unsigned int odt_rtt_wr;
} cs_odt[DDRC_NUM_CS];
int ctlr_intlv;
unsigned int ctlr_intlv_mode;
unsigned int ba_intlv;
int addr_hash;
int ecc_mode;
int ctlr_init_ecc;
int self_refresh_in_sleep;
int self_refresh_irq_en;
int dynamic_power;
/* memory data width 0 = 64-bit, 1 = 32-bit, 2 = 16-bit */
unsigned int data_bus_dimm;
unsigned int data_bus_used; /* on individual board */
unsigned int burst_length; /* BC4, OTF and BL8 */
int otf_burst_chop_en;
int mirrored_dimm;
int quad_rank_present;
int output_driver_impedance;
int ap_en;
int x4_en;
int caslat_override;
unsigned int caslat_override_value;
int addt_lat_override;
unsigned int addt_lat_override_value;
unsigned int clk_adj;
unsigned int cpo_sample;
unsigned int wr_data_delay;
unsigned int cswl_override;
unsigned int wrlvl_override;
unsigned int wrlvl_sample;
unsigned int wrlvl_start;
unsigned int wrlvl_ctl_2;
unsigned int wrlvl_ctl_3;
int half_strength_drive_en;
int twot_en;
int threet_en;
unsigned int bstopre;
unsigned int tfaw_ps;
int rtt_override;
unsigned int rtt_override_value;
unsigned int rtt_wr_override_value;
unsigned int rtt_park;
int auto_self_refresh_en;
unsigned int sr_it;
unsigned int ddr_cdr1;
unsigned int ddr_cdr2;
unsigned int trwt_override;
unsigned int trwt;
unsigned int twrt;
unsigned int trrt;
unsigned int twwt;
unsigned int vref_phy;
unsigned int vref_dimm;
unsigned int odt;
unsigned int phy_tx_impedance;
unsigned int phy_atx_impedance;
unsigned int skip2d;
};
#endif /* DDR_OPTS_H */
/*
* Copyright 2021 NXP
*
* SPDX-License-Identifier: BSD-3-Clause
*
*/
#ifndef DDR_REG_H
#define DDR_REG_H
#define SDRAM_CS_CONFIG_EN 0x80000000
/* DDR_SDRAM_CFG - DDR SDRAM Control Configuration
*/
#define SDRAM_CFG_MEM_EN 0x80000000
#define SDRAM_CFG_SREN 0x40000000
#define SDRAM_CFG_ECC_EN 0x20000000
#define SDRAM_CFG_RD_EN 0x10000000
#define SDRAM_CFG_SDRAM_TYPE_MASK 0x07000000
#define SDRAM_CFG_SDRAM_TYPE_SHIFT 24
#define SDRAM_CFG_DYN_PWR 0x00200000
#define SDRAM_CFG_DBW_MASK 0x00180000
#define SDRAM_CFG_DBW_SHIFT 19
#define SDRAM_CFG_32_BW 0x00080000
#define SDRAM_CFG_16_BW 0x00100000
#define SDRAM_CFG_8_BW 0x00180000
#define SDRAM_CFG_8_BE 0x00040000
#define SDRAM_CFG_2T_EN 0x00008000
#define SDRAM_CFG_MEM_HLT 0x00000002
#define SDRAM_CFG_BI 0x00000001
#define SDRAM_CFG2_FRC_SR 0x80000000
#define SDRAM_CFG2_FRC_SR_CLEAR ~(SDRAM_CFG2_FRC_SR)
#define SDRAM_CFG2_D_INIT 0x00000010
#define SDRAM_CFG2_AP_EN 0x00000020
#define SDRAM_CFG2_ODT_ONLY_READ 2
#define SDRAM_CFG3_DDRC_RST 0x80000000
#define SDRAM_INTERVAL_REFINT 0xFFFF0000
#define SDRAM_INTERVAL_REFINT_CLEAR ~(SDRAM_INTERVAL_REFINT)
#define SDRAM_INTERVAL_BSTOPRE 0x3FFF
/* DDR_MD_CNTL */
#define MD_CNTL_MD_EN 0x80000000
#define MD_CNTL_CS_SEL(x) (((x) & 0x7) << 28)
#define MD_CNTL_MD_SEL(x) (((x) & 0xf) << 24)
#define MD_CNTL_CKE(x) (((x) & 0x3) << 20)
/* DDR_CDR1 */
#define DDR_CDR1_DHC_EN 0x80000000
#define DDR_CDR1_ODT_SHIFT 17
#define DDR_CDR1_ODT_MASK 0x6
#define DDR_CDR2_ODT_MASK 0x1
#define DDR_CDR1_ODT(x) ((x & DDR_CDR1_ODT_MASK) << DDR_CDR1_ODT_SHIFT)
#define DDR_CDR2_ODT(x) (x & DDR_CDR2_ODT_MASK)
#define DDR_CDR2_VREF_OVRD(x) (0x00008080 | ((((x) - 37) & 0x3F) << 8))
#define DDR_CDR2_VREF_TRAIN_EN 0x00000080
#define DDR_CDR2_VREF_RANGE_2 0x00000040
#define DDR_CDR_ODT_OFF 0x0
#define DDR_CDR_ODT_100ohm 0x1
#define DDR_CDR_ODT_120OHM 0x2
#define DDR_CDR_ODT_80ohm 0x3
#define DDR_CDR_ODT_60ohm 0x4
#define DDR_CDR_ODT_40ohm 0x5
#define DDR_CDR_ODT_50ohm 0x6
#define DDR_CDR_ODT_30ohm 0x7
/* DDR ERR_DISABLE */
#define DDR_ERR_DISABLE_APED (1 << 8) /* Address parity error disable */
#define DDR_ERR_DISABLE_SBED (1 << 2) /* Address parity error disable */
#define DDR_ERR_DISABLE_MBED (1 << 3) /* Address parity error disable */
/* Mode Registers */
#define DDR_MR5_CA_PARITY_LAT_4_CLK 0x1 /* for DDR4-1600/1866/2133 */
#define DDR_MR5_CA_PARITY_LAT_5_CLK 0x2 /* for DDR4-2400 */
/* DDR DSR2 register */
#define DDR_DSR_2_PHY_INIT_CMPLT 0x4
/* SDRAM TIMING_CFG_10 register */
#define DDR_TIMING_CFG_10_T_STAB 0x7FFF
/* DEBUG 2 register */
#define DDR_DBG_2_MEM_IDLE 0x00000002
/* DEBUG 26 register */
#define DDR_DEBUG_26_BIT_6 (0x1 << 6)
#define DDR_DEBUG_26_BIT_7 (0x1 << 7)
#define DDR_DEBUG_26_BIT_12 (0x1 << 12)
#define DDR_DEBUG_26_BIT_13 (0x1 << 13)
#define DDR_DEBUG_26_BIT_14 (0x1 << 14)
#define DDR_DEBUG_26_BIT_15 (0x1 << 15)
#define DDR_DEBUG_26_BIT_16 (0x1 << 16)
#define DDR_DEBUG_26_BIT_17 (0x1 << 17)
#define DDR_DEBUG_26_BIT_18 (0x1 << 18)
#define DDR_DEBUG_26_BIT_19 (0x1 << 19)
#define DDR_DEBUG_26_BIT_24 (0x1 << 24)
#define DDR_DEBUG_26_BIT_25 (0x1 << 25)
#define DDR_DEBUG_26_BIT_24_CLEAR ~(DDR_DEBUG_26_BIT_24)
/* DEBUG_29 register */
#define DDR_TX_BD_DIS (1 << 10) /* Transmit Bit Deskew Disable */
#define DDR_INIT_ADDR_EXT_UIA (1 << 31)
#endif /* DDR_REG_H */
/*
* Copyright 2021 NXP
*
* SPDX-License-Identifier: BSD-3-Clause
*
*/
#ifndef UTILITY_H
#define UTILITY_H
#include <dcfg.h>
#if defined(NXP_HAS_CCN504) || defined(NXP_HAS_CCN508)
#define CCN_HN_F_SAM_CTL 0x8
#define CCN_HN_F_REGION_SIZE 0x10000
#endif
unsigned long get_ddr_freq(struct sysinfo *sys, int ctrl_num);
unsigned int get_memory_clk_ps(unsigned long clk);
unsigned int picos_to_mclk(unsigned long data_rate, unsigned int picos);
unsigned int get_ddrc_version(const struct ccsr_ddr *ddr);
void print_ddr_info(struct ccsr_ddr *ddr);
#endif
Table for dynamic ODT for DDR4 with PHY generation 2
====================================================
Two-slot system
Only symmetric configurations are supported for interleaving. Non-symmetric
configurations are possible but not covered here. First slot empty is possbile
but prohibited for simplicity.
+-----------------------+-------------+---------------+-----------------------------+-----------------------------+
| Configuration | |DRAM controller| Slot 1 | Slot 2 |
+-----------+-----------+-------------+-------+-------+--------------+--------------+--------------+--------------+
| | | | | | Rank 1 | Rank 2 | Rank 1 | Rank 2 |
| Slot 1 | Slot 2 | Write/Read | Write | Read |-------+------+-------+------+-------+------+-------+------+
| | | | | | Write | Read | Write | Read | Write | Read | Write | Read |
+-----------+-----------+------+------+-------+-------+-------+------+-------+------+-------+------+-------+------+
| | | |Rank 1| off | 60 | 240 | off | 60 | 240 | 60 | 60 | 60 | 60 |
| | |Slot 1|------+-------+-------+-------+------+-------+------+-------+------+-------+------+
| | | |Rank 2| off | 60 | 60 | 240 | 240 | off | 60 | 60 | 60 | 60 |
| Dual Rank | Dual Rank |------+------+-------+-------+-------+------+-------+------+-------+------+-------+------+
| | | |Rank 1| off | 60 | 60 | 60 | 60 | 60 | 240 | off | 60 | 240 |
| | |Slot 2|------+-------+-------+-------+------+-------+------+-------+------+-------+------+
| | | |Rank 2| off | 60 | 60 | 60 | 60 | 60 | 60 | 240 | 240 | off |
+-----------+-----------+------+------+-------+-------+-------+------+-------+------+-------+------+-------+------+
| | | Slot 1 | off | 60 | 80 | off | | | | | | |
|Single Rank|Single Rank|-------------+-------+-------+-------+------+-------+------+-------+------+-------+------+
| | | Slot 2 | off | 60 | | | | | 80 | off |
+-----------+-----------+------+------+-------+-------+-------+------+-------+------+-------+------+
| | | |Rank 1| off | 80 | 80 | off | off | off |
| Dual Rank | |Slot 1|------+-------+-------+-------+------+-------+------+
| | | |Rank 2| off | 80 | 80 | off | off | off |
+-----------+-----------+-------------+-------+-------+-------+------+-------+------+
|Single Rank| | Slot 1 | off | 80 | 80 | off |
+-----------+-----------+-------------+-------+-------+-------+------+
/*
* Copyright 2021 NXP
*
* SPDX-License-Identifier: BSD-3-Clause
*/
#include <errno.h>
#include <stdint.h>
#include <stdio.h>
#include <stdlib.h>
#include <string.h>
#include <common/debug.h>
#include <ddr.h>
#ifndef CONFIG_DDR_NODIMM
#include <i2c.h>
#endif
#include <nxp_timer.h>
struct dynamic_odt {
unsigned int odt_rd_cfg;
unsigned int odt_wr_cfg;
unsigned int odt_rtt_norm;
unsigned int odt_rtt_wr;
};
#ifndef CONFIG_STATIC_DDR
#if defined(PHY_GEN2_FW_IMAGE_BUFFER) && !defined(NXP_DDR_PHY_GEN2)
#error Missing NXP_DDR_PHY_GEN2
#endif
#ifdef NXP_DDR_PHY_GEN2
static const struct dynamic_odt single_D[4] = {
{ /* cs0 */
DDR_ODT_NEVER,
DDR_ODT_ALL,
DDR4_RTT_80_OHM,
DDR4_RTT_WR_OFF
},
{ /* cs1 */
DDR_ODT_NEVER,
DDR_ODT_NEVER,
DDR4_RTT_OFF,
DDR4_RTT_WR_OFF
},
{},
{}
};
static const struct dynamic_odt single_S[4] = {
{ /* cs0 */
DDR_ODT_NEVER,
DDR_ODT_ALL,
DDR4_RTT_80_OHM,
DDR4_RTT_WR_OFF
},
{},
{},
{},
};
static const struct dynamic_odt dual_DD[4] = {
{ /* cs0 */
DDR_ODT_OTHER_DIMM,
DDR_ODT_ALL,
DDR4_RTT_60_OHM,
DDR4_RTT_WR_240_OHM
},
{ /* cs1 */
DDR_ODT_OTHER_DIMM,
DDR_ODT_ALL,
DDR4_RTT_60_OHM,
DDR4_RTT_WR_240_OHM
},
{ /* cs2 */
DDR_ODT_OTHER_DIMM,
DDR_ODT_ALL,
DDR4_RTT_60_OHM,
DDR4_RTT_WR_240_OHM
},
{ /* cs3 */
DDR_ODT_OTHER_DIMM,
DDR_ODT_ALL,
DDR4_RTT_60_OHM,
DDR4_RTT_WR_240_OHM
}
};
static const struct dynamic_odt dual_SS[4] = {
{ /* cs0 */
DDR_ODT_NEVER,
DDR_ODT_ALL,
DDR4_RTT_80_OHM,
DDR4_RTT_WR_OFF
},
{},
{ /* cs2 */
DDR_ODT_NEVER,
DDR_ODT_ALL,
DDR4_RTT_80_OHM,
DDR4_RTT_WR_OFF
},
{}
};
static const struct dynamic_odt dual_D0[4] = {
{ /* cs0 */
DDR_ODT_NEVER,
DDR_ODT_SAME_DIMM,
DDR4_RTT_80_OHM,
DDR4_RTT_WR_OFF
},
{ /* cs1 */
DDR_ODT_NEVER,
DDR_ODT_NEVER,
DDR4_RTT_80_OHM,
DDR4_RTT_WR_OFF
},
{},
{}
};
static const struct dynamic_odt dual_S0[4] = {
{ /* cs0 */
DDR_ODT_NEVER,
DDR_ODT_CS,
DDR4_RTT_80_OHM,
DDR4_RTT_WR_OFF
},
{},
{},
{}
};
#else
static const struct dynamic_odt single_D[4] = {
{ /* cs0 */
DDR_ODT_NEVER,
DDR_ODT_ALL,
DDR4_RTT_40_OHM,
DDR4_RTT_WR_OFF
},
{ /* cs1 */
DDR_ODT_NEVER,
DDR_ODT_NEVER,
DDR4_RTT_OFF,
DDR4_RTT_WR_OFF
},
{},
{}
};
static const struct dynamic_odt single_S[4] = {
{ /* cs0 */
DDR_ODT_NEVER,
DDR_ODT_ALL,
DDR4_RTT_40_OHM,
DDR4_RTT_WR_OFF
},
{},
{},
{},
};
static const struct dynamic_odt dual_DD[4] = {
{ /* cs0 */
DDR_ODT_NEVER,
DDR_ODT_SAME_DIMM,
DDR4_RTT_120_OHM,
DDR4_RTT_WR_OFF
},
{ /* cs1 */
DDR_ODT_OTHER_DIMM,
DDR_ODT_OTHER_DIMM,
DDR4_RTT_34_OHM,
DDR4_RTT_WR_OFF
},
{ /* cs2 */
DDR_ODT_NEVER,
DDR_ODT_SAME_DIMM,
DDR4_RTT_120_OHM,
DDR4_RTT_WR_OFF
},
{ /* cs3 */
DDR_ODT_OTHER_DIMM,
DDR_ODT_OTHER_DIMM,
DDR4_RTT_34_OHM,
DDR4_RTT_WR_OFF
}
};
static const struct dynamic_odt dual_SS[4] = {
{ /* cs0 */
DDR_ODT_OTHER_DIMM,
DDR_ODT_ALL,
DDR4_RTT_34_OHM,
DDR4_RTT_WR_120_OHM
},
{},
{ /* cs2 */
DDR_ODT_OTHER_DIMM,
DDR_ODT_ALL,
DDR4_RTT_34_OHM,
DDR4_RTT_WR_120_OHM
},
{}
};
static const struct dynamic_odt dual_D0[4] = {
{ /* cs0 */
DDR_ODT_NEVER,
DDR_ODT_SAME_DIMM,
DDR4_RTT_40_OHM,
DDR4_RTT_WR_OFF
},
{ /* cs1 */
DDR_ODT_NEVER,
DDR_ODT_NEVER,
DDR4_RTT_OFF,
DDR4_RTT_WR_OFF
},
{},
{}
};
static const struct dynamic_odt dual_S0[4] = {
{ /* cs0 */
DDR_ODT_NEVER,
DDR_ODT_CS,
DDR4_RTT_40_OHM,
DDR4_RTT_WR_OFF
},
{},
{},
{}
};
#endif /* NXP_DDR_PHY_GEN2 */
/*
* Automatically select bank interleaving mode based on DIMMs
* in this order: cs0_cs1_cs2_cs3, cs0_cs1, null.
* This function only deal with one or two slots per controller.
*/
static inline unsigned int auto_bank_intlv(const int cs_in_use,
const struct dimm_params *pdimm)
{
switch (cs_in_use) {
case 0xf:
return DDR_BA_INTLV_CS0123;
case 0x3:
return DDR_BA_INTLV_CS01;
case 0x1:
return DDR_BA_NONE;
case 0x5:
return DDR_BA_NONE;
default:
break;
}
return 0U;
}
static int cal_odt(const unsigned int clk,
struct memctl_opt *popts,
struct ddr_conf *conf,
struct dimm_params *pdimm,
const int dimm_slot_per_ctrl)
{
unsigned int i;
const struct dynamic_odt *pdodt = NULL;
const static struct dynamic_odt *table[2][5] = {
{single_S, single_D, NULL, NULL},
{dual_SS, dual_DD, NULL, NULL},
};
if (dimm_slot_per_ctrl != 1 && dimm_slot_per_ctrl != 2) {
ERROR("Unsupported number of DIMMs\n");
return -EINVAL;
}
pdodt = table[dimm_slot_per_ctrl - 1][pdimm->n_ranks - 1];
if (pdodt == dual_SS) {
pdodt = (conf->cs_in_use == 0x5) ? dual_SS :
((conf->cs_in_use == 0x1) ? dual_S0 : NULL);
} else if (pdodt == dual_DD) {
pdodt = (conf->cs_in_use == 0xf) ? dual_DD :
((conf->cs_in_use == 0x3) ? dual_D0 : NULL);
}
if (pdodt == dual_DD && pdimm->package_3ds) {
ERROR("Too many 3DS DIMMs.\n");
return -EINVAL;
}
if (pdodt == NULL) {
ERROR("Error determing ODT.\n");
return -EINVAL;
}
/* Pick chip-select local options. */
for (i = 0U; i < DDRC_NUM_CS; i++) {
debug("cs %d\n", i);
popts->cs_odt[i].odt_rd_cfg = pdodt[i].odt_rd_cfg;
debug(" odt_rd_cfg 0x%x\n",
popts->cs_odt[i].odt_rd_cfg);
popts->cs_odt[i].odt_wr_cfg = pdodt[i].odt_wr_cfg;
debug(" odt_wr_cfg 0x%x\n",
popts->cs_odt[i].odt_wr_cfg);
popts->cs_odt[i].odt_rtt_norm = pdodt[i].odt_rtt_norm;
debug(" odt_rtt_norm 0x%x\n",
popts->cs_odt[i].odt_rtt_norm);
popts->cs_odt[i].odt_rtt_wr = pdodt[i].odt_rtt_wr;
debug(" odt_rtt_wr 0x%x\n",
popts->cs_odt[i].odt_rtt_wr);
popts->cs_odt[i].auto_precharge = 0;
debug(" auto_precharge %d\n",
popts->cs_odt[i].auto_precharge);
}
return 0;
}
static int cal_opts(const unsigned int clk,
struct memctl_opt *popts,
struct ddr_conf *conf,
struct dimm_params *pdimm,
const int dimm_slot_per_ctrl,
const unsigned int ip_rev)
{
popts->rdimm = pdimm->rdimm;
popts->mirrored_dimm = pdimm->mirrored_dimm;
#ifdef CONFIG_DDR_ECC_EN
popts->ecc_mode = pdimm->edc_config == 0x02 ? 1 : 0;
#endif
popts->ctlr_init_ecc = popts->ecc_mode;
debug("ctlr_init_ecc %d\n", popts->ctlr_init_ecc);
popts->self_refresh_in_sleep = 1;
popts->dynamic_power = 0;
/*
* check sdram width, allow platform override
* 0 = 64-bit, 1 = 32-bit, 2 = 16-bit
*/
if (pdimm->primary_sdram_width == 64) {
popts->data_bus_dimm = DDR_DBUS_64;
popts->otf_burst_chop_en = 1;
} else if (pdimm->primary_sdram_width == 32) {
popts->data_bus_dimm = DDR_DBUS_32;
popts->otf_burst_chop_en = 0;
} else if (pdimm->primary_sdram_width == 16) {
popts->data_bus_dimm = DDR_DBUS_16;
popts->otf_burst_chop_en = 0;
} else {
ERROR("primary sdram width invalid!\n");
return -EINVAL;
}
popts->data_bus_used = popts->data_bus_dimm;
popts->x4_en = (pdimm->device_width == 4) ? 1 : 0;
debug("x4_en %d\n", popts->x4_en);
/* for RDIMM and DDR4 UDIMM/discrete memory, address parity enable */
if (popts->rdimm != 0) {
popts->ap_en = 1; /* 0 = disable, 1 = enable */
} else {
popts->ap_en = 0; /* disabled for DDR4 UDIMM/discrete default */
}
if (ip_rev == 0x50500) {
popts->ap_en = 0;
}
debug("ap_en %d\n", popts->ap_en);
/* BSTTOPRE precharge interval uses 1/4 of refint value. */
popts->bstopre = picos_to_mclk(clk, pdimm->refresh_rate_ps) >> 2;
popts->tfaw_ps = pdimm->tfaw_ps;
return 0;
}
static void cal_intlv(const int num_ctlrs,
struct memctl_opt *popts,
struct ddr_conf *conf,
struct dimm_params *pdimm)
{
#ifdef NXP_DDR_INTLV_256B
if (num_ctlrs == 2) {
popts->ctlr_intlv = 1;
popts->ctlr_intlv_mode = DDR_256B_INTLV;
}
#endif
debug("ctlr_intlv %d\n", popts->ctlr_intlv);
debug("ctlr_intlv_mode %d\n", popts->ctlr_intlv_mode);
popts->ba_intlv = auto_bank_intlv(conf->cs_in_use, pdimm);
debug("ba_intlv 0x%x\n", popts->ba_intlv);
}
static int update_burst_length(struct memctl_opt *popts)
{
/* Choose burst length. */
if ((popts->data_bus_used == DDR_DBUS_32) ||
(popts->data_bus_used == DDR_DBUS_16)) {
/* 32-bit or 16-bit bus */
popts->otf_burst_chop_en = 0;
popts->burst_length = DDR_BL8;
} else if (popts->otf_burst_chop_en != 0) { /* on-the-fly burst chop */
popts->burst_length = DDR_OTF; /* on-the-fly BC4 and BL8 */
} else {
popts->burst_length = DDR_BL8;
}
debug("data_bus_used %d\n", popts->data_bus_used);
debug("otf_burst_chop_en %d\n", popts->otf_burst_chop_en);
debug("burst_length 0x%x\n", popts->burst_length);
/*
* If a reduced data width is requested, but the SPD
* specifies a physically wider device, adjust the
* computed dimm capacities accordingly before
* assigning addresses.
* 0 = 64-bit, 1 = 32-bit, 2 = 16-bit
*/
if (popts->data_bus_dimm > popts->data_bus_used) {
ERROR("Data bus configuration error\n");
return -EINVAL;
}
popts->dbw_cap_shift = popts->data_bus_used - popts->data_bus_dimm;
debug("dbw_cap_shift %d\n", popts->dbw_cap_shift);
return 0;
}
int cal_board_params(struct ddr_info *priv,
const struct board_timing *dimm,
int len)
{
const unsigned long speed = priv->clk / 1000000;
const struct dimm_params *pdimm = &priv->dimm;
struct memctl_opt *popts = &priv->opt;
struct rc_timing const *prt = NULL;
struct rc_timing const *chosen = NULL;
int i;
for (i = 0; i < len; i++) {
if (pdimm->rc == dimm[i].rc) {
prt = dimm[i].p;
break;
}
}
if (prt == NULL) {
ERROR("Board parameters no match.\n");
return -EINVAL;
}
while (prt->speed_bin != 0) {
if (speed <= prt->speed_bin) {
chosen = prt;
break;
}
prt++;
}
if (chosen == NULL) {
ERROR("timing no match for speed %lu\n", speed);
return -EINVAL;
}
popts->clk_adj = prt->clk_adj;
popts->wrlvl_start = prt->wrlvl;
popts->wrlvl_ctl_2 = (prt->wrlvl * 0x01010101 + dimm[i].add1) &
0xFFFFFFFF;
popts->wrlvl_ctl_3 = (prt->wrlvl * 0x01010101 + dimm[i].add2) &
0xFFFFFFFF;
return 0;
}
static int synthesize_ctlr(struct ddr_info *priv)
{
int ret;
ret = cal_odt(priv->clk,
&priv->opt,
&priv->conf,
&priv->dimm,
priv->dimm_on_ctlr);
if (ret != 0) {
return ret;
}
ret = cal_opts(priv->clk,
&priv->opt,
&priv->conf,
&priv->dimm,
priv->dimm_on_ctlr,
priv->ip_rev);
if (ret != 0) {
return ret;
}
cal_intlv(priv->num_ctlrs, &priv->opt, &priv->conf, &priv->dimm);
ret = ddr_board_options(priv);
if (ret != 0) {
ERROR("Failed matching board timing.\n");
}
ret = update_burst_length(&priv->opt);
return ret;
}
/* Return the bit mask of valid DIMMs found */
static int parse_spd(struct ddr_info *priv)
{
struct ddr_conf *conf = &priv->conf;
struct dimm_params *dimm = &priv->dimm;
int j, valid_mask = 0;
#ifdef CONFIG_DDR_NODIMM
valid_mask = ddr_get_ddr_params(dimm, conf);
if (valid_mask < 0) {
ERROR("DDR params error\n");
return valid_mask;
}
#else
const int *spd_addr = priv->spd_addr;
const int num_ctlrs = priv->num_ctlrs;
const int num_dimm = priv->dimm_on_ctlr;
struct ddr4_spd spd[2];
unsigned int spd_checksum[2];
int addr_idx = 0;
int spd_idx = 0;
int ret, addr, i;
/* Scan all DIMMs */
for (i = 0; i < num_ctlrs; i++) {
debug("Controller %d\n", i);
for (j = 0; j < num_dimm; j++, addr_idx++) {
debug("DIMM %d\n", j);
addr = spd_addr[addr_idx];
if (addr == 0) {
if (j == 0) {
ERROR("First SPD addr wrong.\n");
return -EINVAL;
}
continue;
}
debug("addr 0x%x\n", addr);
ret = read_spd(addr, &spd[spd_idx],
sizeof(struct ddr4_spd));
if (ret != 0) { /* invalid */
debug("Invalid SPD at address 0x%x\n", addr);
continue;
}
spd_checksum[spd_idx] =
(spd[spd_idx].crc[1] << 24) |
(spd[spd_idx].crc[0] << 16) |
(spd[spd_idx].mod_section.uc[127] << 8) |
(spd[spd_idx].mod_section.uc[126] << 0);
debug("checksum 0x%x\n", spd_checksum[spd_idx]);
if (spd_checksum[spd_idx] == 0) {
debug("Bad checksum, ignored.\n");
continue;
}
if (spd_idx == 0) {
/* first valid SPD */
ret = cal_dimm_params(&spd[0], dimm);
if (ret != 0) {
ERROR("SPD calculation error\n");
return -EINVAL;
}
}
if (spd_idx != 0 && spd_checksum[0] !=
spd_checksum[spd_idx]) {
ERROR("Not identical DIMMs.\n");
return -EINVAL;
}
conf->dimm_in_use[j] = 1;
valid_mask |= 1 << addr_idx;
spd_idx = 1;
}
debug("done with controller %d\n", i);
}
switch (num_ctlrs) {
case 1:
if ((valid_mask & 0x1) == 0) {
ERROR("First slot cannot be empty.\n");
return -EINVAL;
}
break;
case 2:
switch (num_dimm) {
case 1:
if (valid_mask == 0) {
ERROR("Both slot empty\n");
return -EINVAL;
}
break;
case 2:
if (valid_mask != 0x5 &&
valid_mask != 0xf &&
(valid_mask & 0x7) != 0x4 &&
(valid_mask & 0xd) != 0x1) {
ERROR("Invalid DIMM combination.\n");
return -EINVAL;
}
break;
default:
ERROR("Invalid number of DIMMs.\n");
return -EINVAL;
}
break;
default:
ERROR("Invalid number of controllers.\n");
return -EINVAL;
}
/* now we have valid and identical DIMMs on controllers */
#endif /* CONFIG_DDR_NODIMM */
debug("cal cs\n");
conf->cs_in_use = 0;
for (j = 0; j < DDRC_NUM_DIMM; j++) {
if (conf->dimm_in_use[j] == 0) {
continue;
}
switch (dimm->n_ranks) {
case 4:
ERROR("Quad-rank DIMM not supported\n");
return -EINVAL;
case 2:
conf->cs_on_dimm[j] = 0x3 << (j * CONFIG_CS_PER_SLOT);
conf->cs_in_use |= conf->cs_on_dimm[j];
break;
case 1:
conf->cs_on_dimm[j] = 0x1 << (j * CONFIG_CS_PER_SLOT);
conf->cs_in_use |= conf->cs_on_dimm[j];
break;
default:
ERROR("SPD error with n_ranks\n");
return -EINVAL;
}
debug("cs_in_use = %x\n", conf->cs_in_use);
debug("cs_on_dimm[%d] = %x\n", j, conf->cs_on_dimm[j]);
}
#ifndef CONFIG_DDR_NODIMM
if (priv->dimm.rdimm != 0) {
NOTICE("RDIMM %s\n", priv->dimm.mpart);
} else {
NOTICE("UDIMM %s\n", priv->dimm.mpart);
}
#else
NOTICE("%s\n", priv->dimm.mpart);
#endif
return valid_mask;
}
static unsigned long long assign_intlv_addr(
const struct dimm_params *pdimm,
const struct memctl_opt *opt,
struct ddr_conf *conf,
const unsigned long long current_mem_base)
{
int i;
int ctlr_density_mul = 0;
const unsigned long long rank_density = pdimm->rank_density >>
opt->dbw_cap_shift;
unsigned long long total_ctlr_mem;
debug("rank density 0x%llx\n", rank_density);
switch (opt->ba_intlv & DDR_BA_INTLV_CS0123) {
case DDR_BA_INTLV_CS0123:
ctlr_density_mul = 4;
break;
case DDR_BA_INTLV_CS01:
ctlr_density_mul = 2;
break;
default:
ctlr_density_mul = 1;
break;
}
debug("ctlr density mul %d\n", ctlr_density_mul);
switch (opt->ctlr_intlv_mode) {
case DDR_256B_INTLV:
total_ctlr_mem = 2 * ctlr_density_mul * rank_density;
break;
default:
ERROR("Unknown interleaving mode");
return 0;
}
conf->base_addr = current_mem_base;
conf->total_mem = total_ctlr_mem;
/* overwrite cs_in_use bitmask with controller interleaving */
conf->cs_in_use = (1 << ctlr_density_mul) - 1;
debug("Overwrite cs_in_use as %x\n", conf->cs_in_use);
/* Fill addr with each cs in use */
for (i = 0; i < ctlr_density_mul; i++) {
conf->cs_base_addr[i] = current_mem_base;
conf->cs_size[i] = total_ctlr_mem;
debug("CS %d\n", i);
debug(" base_addr 0x%llx\n", conf->cs_base_addr[i]);
debug(" size 0x%llx\n", conf->cs_size[i]);
}
return total_ctlr_mem;
}
static unsigned long long assign_non_intlv_addr(
const struct dimm_params *pdimm,
const struct memctl_opt *opt,
struct ddr_conf *conf,
unsigned long long current_mem_base)
{
int i;
const unsigned long long rank_density = pdimm->rank_density >>
opt->dbw_cap_shift;
unsigned long long total_ctlr_mem = 0ULL;
debug("rank density 0x%llx\n", rank_density);
conf->base_addr = current_mem_base;
/* assign each cs */
switch (opt->ba_intlv & DDR_BA_INTLV_CS0123) {
case DDR_BA_INTLV_CS0123:
for (i = 0; i < DDRC_NUM_CS; i++) {
conf->cs_base_addr[i] = current_mem_base;
conf->cs_size[i] = rank_density << 2;
total_ctlr_mem += rank_density;
}
break;
case DDR_BA_INTLV_CS01:
for (i = 0; ((conf->cs_in_use & (1 << i)) != 0) && i < 2; i++) {
conf->cs_base_addr[i] = current_mem_base;
conf->cs_size[i] = rank_density << 1;
total_ctlr_mem += rank_density;
}
current_mem_base += total_ctlr_mem;
for (; ((conf->cs_in_use & (1 << i)) != 0) && i < DDRC_NUM_CS;
i++) {
conf->cs_base_addr[i] = current_mem_base;
conf->cs_size[i] = rank_density;
total_ctlr_mem += rank_density;
current_mem_base += rank_density;
}
break;
case DDR_BA_NONE:
for (i = 0; ((conf->cs_in_use & (1 << i)) != 0) &&
(i < DDRC_NUM_CS); i++) {
conf->cs_base_addr[i] = current_mem_base;
conf->cs_size[i] = rank_density;
current_mem_base += rank_density;
total_ctlr_mem += rank_density;
}
break;
default:
ERROR("Unsupported bank interleaving\n");
return 0;
}
for (i = 0; ((conf->cs_in_use & (1 << i)) != 0) &&
(i < DDRC_NUM_CS); i++) {
debug("CS %d\n", i);
debug(" base_addr 0x%llx\n", conf->cs_base_addr[i]);
debug(" size 0x%llx\n", conf->cs_size[i]);
}
return total_ctlr_mem;
}
unsigned long long assign_addresses(struct ddr_info *priv)
__attribute__ ((weak));
unsigned long long assign_addresses(struct ddr_info *priv)
{
struct memctl_opt *opt = &priv->opt;
const struct dimm_params *dimm = &priv->dimm;
struct ddr_conf *conf = &priv->conf;
unsigned long long current_mem_base = priv->mem_base;
unsigned long long total_mem;
total_mem = 0ULL;
debug("ctlr_intlv %d\n", opt->ctlr_intlv);
if (opt->ctlr_intlv != 0) {
total_mem = assign_intlv_addr(dimm, opt, conf,
current_mem_base);
} else {
/*
* Simple linear assignment if memory controllers are not
* interleaved. This is only valid for SoCs with single DDRC.
*/
total_mem = assign_non_intlv_addr(dimm, opt, conf,
current_mem_base);
}
conf->total_mem = total_mem;
debug("base 0x%llx\n", current_mem_base);
debug("Total mem by assignment is 0x%llx\n", total_mem);
return total_mem;
}
static int cal_ddrc_regs(struct ddr_info *priv)
{
int ret;
ret = compute_ddrc(priv->clk,
&priv->opt,
&priv->conf,
&priv->ddr_reg,
&priv->dimm,
priv->ip_rev);
if (ret != 0) {
ERROR("Calculating DDR registers failed\n");
}
return ret;
}
#endif /* CONFIG_STATIC_DDR */
static int write_ddrc_regs(struct ddr_info *priv)
{
int i;
int ret;
for (i = 0; i < priv->num_ctlrs; i++) {
ret = ddrc_set_regs(priv->clk, &priv->ddr_reg, priv->ddr[i], 0);
if (ret != 0) {
ERROR("Writing DDR register(s) failed\n");
return ret;
}
}
return 0;
}
long long dram_init(struct ddr_info *priv
#if defined(NXP_HAS_CCN504) || defined(NXP_HAS_CCN508)
, uintptr_t nxp_ccn_hn_f0_addr
#endif
)
{
uint64_t time __unused;
long long dram_size;
int ret;
const uint64_t time_base = get_timer_val(0);
unsigned int ip_rev = get_ddrc_version(priv->ddr[0]);
int valid_spd_mask __unused;
int scratch = 0x0;
priv->ip_rev = ip_rev;
#ifndef CONFIG_STATIC_DDR
INFO("time base %llu ms\n", time_base);
debug("Parse DIMM SPD(s)\n");
valid_spd_mask = parse_spd(priv);
if (valid_spd_mask < 0) {
ERROR("Parsing DIMM Error\n");
return valid_spd_mask;
}
#if defined(NXP_HAS_CCN504) || defined(NXP_HAS_CCN508)
if (priv->num_ctlrs == 2 || priv->num_ctlrs == 1) {
ret = disable_unused_ddrc(priv, valid_spd_mask,
nxp_ccn_hn_f0_addr);
if (ret != 0) {
return ret;
}
}
#endif
time = get_timer_val(time_base);
INFO("Time after parsing SPD %llu ms\n", time);
debug("Synthesize configurations\n");
ret = synthesize_ctlr(priv);
if (ret != 0) {
ERROR("Synthesize config error\n");
return ret;
}
debug("Assign binding addresses\n");
dram_size = assign_addresses(priv);
if (dram_size == 0) {
ERROR("Assigning address error\n");
return -EINVAL;
}
debug("Calculate controller registers\n");
ret = cal_ddrc_regs(priv);
if (ret != 0) {
ERROR("Calculate register error\n");
return ret;
}
ret = compute_ddr_phy(priv);
if (ret != 0)
ERROR("Calculating DDR PHY registers failed.\n");
#else
dram_size = board_static_ddr(priv);
if (dram_size == 0) {
ERROR("Error getting static DDR settings.\n");
return -EINVAL;
}
#endif
if (priv->warm_boot_flag == DDR_WARM_BOOT) {
scratch = (priv->ddr_reg).sdram_cfg[1];
scratch = scratch & ~(SDRAM_CFG2_D_INIT);
priv->ddr_reg.sdram_cfg[1] = scratch;
}
time = get_timer_val(time_base);
INFO("Time before programming controller %llu ms\n", time);
debug("Program controller registers\n");
ret = write_ddrc_regs(priv);
if (ret != 0) {
ERROR("Programing DDRC error\n");
return ret;
}
puts("");
NOTICE("%lld GB ", dram_size >> 30);
print_ddr_info(priv->ddr[0]);
time = get_timer_val(time_base);
INFO("Time used by DDR driver %llu ms\n", time);
return dram_size;
}
#
# Copyright 2021 NXP
#
# SPDX-License-Identifier: BSD-3-Clause
#
DDR_DRIVERS_PATH := ${PLAT_DRIVERS_PATH}/ddr
ifeq ($(PLAT_DDR_PHY), PHY_GEN2)
$(eval $(call add_define, PHY_GEN2))
PLAT_DDR_PHY_DIR := phy-gen2
ifeq (${APPLY_MAX_CDD},yes)
$(eval $(call add_define,NXP_APPLY_MAX_CDD))
endif
ifeq (${ERRATA_DDR_A011396}, 1)
$(eval $(call add_define,ERRATA_DDR_A011396))
endif
ifeq (${ERRATA_DDR_A050450}, 1)
$(eval $(call add_define,ERRATA_DDR_A050450))
endif
endif
ifeq ($(PLAT_DDR_PHY), PHY_GEN1)
PLAT_DDR_PHY_DIR := phy-gen1
ifeq (${ERRATA_DDR_A008511},1)
$(eval $(call add_define,ERRATA_DDR_A008511))
endif
ifeq (${ERRATA_DDR_A009803},1)
$(eval $(call add_define,ERRATA_DDR_A009803))
endif
ifeq (${ERRATA_DDR_A009942},1)
$(eval $(call add_define,ERRATA_DDR_A009942))
endif
ifeq (${ERRATA_DDR_A010165},1)
$(eval $(call add_define,ERRATA_DDR_A010165))
endif
endif
ifeq ($(DDR_BIST), yes)
$(eval $(call add_define, BIST_EN))
endif
ifeq ($(DDR_DEBUG), yes)
$(eval $(call add_define, DDR_DEBUG))
endif
ifeq ($(DDR_PHY_DEBUG), yes)
$(eval $(call add_define, DDR_PHY_DEBUG))
endif
ifeq ($(DEBUG_PHY_IO), yes)
$(eval $(call add_define, DEBUG_PHY_IO))
endif
ifeq ($(DEBUG_WARM_RESET), yes)
$(eval $(call add_define, DEBUG_WARM_RESET))
endif
ifeq ($(DEBUG_DDR_INPUT_CONFIG), yes)
$(eval $(call add_define, DEBUG_DDR_INPUT_CONFIG))
endif
DDR_CNTLR_SOURCES := $(DDR_DRIVERS_PATH)/nxp-ddr/ddr.c \
$(DDR_DRIVERS_PATH)/nxp-ddr/ddrc.c \
$(DDR_DRIVERS_PATH)/nxp-ddr/dimm.c \
$(DDR_DRIVERS_PATH)/nxp-ddr/regs.c \
$(DDR_DRIVERS_PATH)/nxp-ddr/utility.c \
$(DDR_DRIVERS_PATH)/$(PLAT_DDR_PHY_DIR)/phy.c
PLAT_INCLUDES += -I$(DDR_DRIVERS_PATH)/nxp-ddr \
-I$(DDR_DRIVERS_PATH)/include
/*
* Copyright 2021 NXP
*
* SPDX-License-Identifier: BSD-3-Clause
*
*/
#include <errno.h>
#include <stdbool.h>
#include <stdint.h>
#include <stdio.h>
#include <stdlib.h>
#include <common/debug.h>
#include <ddr.h>
#include <drivers/delay_timer.h>
#include <immap.h>
#define BIST_CR 0x80060000
#define BIST_CR_EN 0x80000000
#define BIST_CR_STAT 0x00000001
#define CTLR_INTLV_MASK 0x20000000
#pragma weak run_bist
bool run_bist(void)
{
#ifdef BIST_EN
return true;
#else
return false;
#endif
}
/*
* Perform build-in test on memory
* timeout value in 10ms
*/
int bist(const struct ccsr_ddr *ddr, int timeout)
{
const unsigned int test_pattern[10] = {
0xffffffff,
0x00000000,
0xaaaaaaaa,
0x55555555,
0xcccccccc,
0x33333333,
0x12345678,
0xabcdef01,
0xaa55aa55,
0x55aa55aa
};
unsigned int mtcr, err_detect, err_sbe;
unsigned int cs0_config;
unsigned int csn_bnds[4];
int ret = 0;
uint32_t i;
#ifdef CONFIG_DDR_ADDR_DEC
uint32_t dec_9 = ddr_in32(&ddr->dec[9]);
uint32_t pos = 0U;
uint32_t map_save = 0U;
uint32_t temp32 = 0U;
uint32_t map, shift, highest;
#endif
cs0_config = ddr_in32(&ddr->csn_cfg[0]);
if ((cs0_config & CTLR_INTLV_MASK) != 0U) {
/* set bnds to non-interleaving */
for (i = 0U; i < 4U; i++) {
csn_bnds[i] = ddr_in32(&ddr->bnds[i].a);
ddr_out32(&ddr->bnds[i].a,
(csn_bnds[i] & U(0xfffefffe)) >> 1U);
}
ddr_out32(&ddr->csn_cfg[0], cs0_config & ~CTLR_INTLV_MASK);
#ifdef CONFIG_DDR_ADDR_DEC
if ((dec_9 & 0x1U) != 0U) {
highest = (dec_9 >> 26U) == U(0x3F) ? 0U : dec_9 >> 26U;
pos = 37U;
for (i = 0U; i < 36U; i++) { /* Go through all 37 */
if ((i % 4U) == 0U) {
temp32 = ddr_in32(&ddr->dec[i >> 2U]);
}
shift = (3U - i % 4U) * 8U + 2U;
map = (temp32 >> shift) & U(0x3F);
if (map > highest && map != U(0x3F)) {
highest = map;
pos = i;
}
}
debug("\nFound highest position %d, mapping to %d, ",
pos, highest);
map_save = ddr_in32(&ddr->dec[pos >> 2]);
shift = (3U - pos % 4U) * 8U + 2U;
debug("in dec[%d], bit %d (0x%x)\n",
pos >> 2U, shift, map_save);
temp32 = map_save & ~(U(0x3F) << shift);
temp32 |= 8U << shift;
ddr_out32(&ddr->dec[pos >> 2U], temp32);
timeout <<= 2U;
debug("Increase wait time to %d ms\n", timeout * 10);
}
#endif
}
for (i = 0U; i < 10U; i++) {
ddr_out32(&ddr->mtp[i], test_pattern[i]);
}
mtcr = BIST_CR;
ddr_out32(&ddr->mtcr, mtcr);
do {
mdelay(10);
mtcr = ddr_in32(&ddr->mtcr);
} while (timeout-- > 0 && ((mtcr & BIST_CR_EN) != 0));
if (timeout <= 0) {
ERROR("Timeout\n");
} else {
debug("Timer remains %d\n", timeout);
}
err_detect = ddr_in32(&ddr->err_detect);
err_sbe = ddr_in32(&ddr->err_sbe);
if (err_detect != 0U || ((err_sbe & U(0xffff)) != 0U)) {
ERROR("ECC error detected\n");
ret = -EIO;
}
if ((cs0_config & CTLR_INTLV_MASK) != 0) {
for (i = 0U; i < 4U; i++) {
ddr_out32(&ddr->bnds[i].a, csn_bnds[i]);
}
ddr_out32(&ddr->csn_cfg[0], cs0_config);
#ifdef CONFIG_DDR_ADDR_DEC
if ((dec_9 & U(0x1)) != 0U) {
ddr_out32(&ddr->dec[pos >> 2], map_save);
}
#endif
}
if ((mtcr & BIST_CR_STAT) != 0) {
ERROR("Built-in self test failed\n");
ret = -EIO;
} else {
NOTICE("Build-in self test passed\n");
}
return ret;
}
void dump_ddrc(unsigned int *ddr)
{
#ifdef DDR_DEBUG
uint32_t i;
unsigned long val;
for (i = 0U; i < U(0x400); i++, ddr++) {
val = ddr_in32(ddr);
if (val != 0U) { /* skip zeros */
debug("*0x%lx = 0x%lx\n", (unsigned long)ddr, val);
}
}
#endif
}
#ifdef ERRATA_DDR_A009803
static void set_wait_for_bits_clear(const void *ptr,
unsigned int value,
unsigned int bits)
{
int timeout = 1000;
ddr_out32(ptr, value);
do {
udelay(100);
} while (timeout-- > 0 && ((ddr_in32(ptr) & bits) != 0));
if (timeout <= 0) {
ERROR("wait for clear timeout.\n");
}
}
#endif
#if (DDRC_NUM_CS > 4)
#error Invalid setting for DDRC_NUM_CS
#endif
/*
* If supported by the platform, writing to DDR controller takes two
* passes to deassert DDR reset to comply with JEDEC specs for RDIMMs.
*/
int ddrc_set_regs(const unsigned long clk,
const struct ddr_cfg_regs *regs,
const struct ccsr_ddr *ddr,
int twopass)
{
unsigned int i, bus_width;
unsigned int temp_sdram_cfg;
unsigned int total_mem_per_ctrl, total_mem_per_ctrl_adj;
const int mod_bnds = regs->cs[0].config & CTLR_INTLV_MASK;
int timeout;
int ret = 0;
#if defined(ERRATA_DDR_A009942) || defined(ERRATA_DDR_A010165)
unsigned long ddr_freq;
unsigned int tmp;
#ifdef ERRATA_DDR_A009942
unsigned int check;
unsigned int cpo_min = U(0xff);
unsigned int cpo_max = 0U;
#endif
#endif
if (twopass == 2U) {
goto after_reset;
}
/* Set cdr1 first in case 0.9v VDD is enabled for some SoCs*/
ddr_out32(&ddr->ddr_cdr1, regs->cdr[0]);
ddr_out32(&ddr->sdram_clk_cntl, regs->clk_cntl);
for (i = 0U; i < DDRC_NUM_CS; i++) {
if (mod_bnds != 0U) {
ddr_out32(&ddr->bnds[i].a,
(regs->cs[i].bnds & U(0xfffefffe)) >> 1U);
} else {
ddr_out32(&ddr->bnds[i].a, regs->cs[i].bnds);
}
ddr_out32(&ddr->csn_cfg_2[i], regs->cs[i].config_2);
}
ddr_out32(&ddr->timing_cfg_0, regs->timing_cfg[0]);
ddr_out32(&ddr->timing_cfg_1, regs->timing_cfg[1]);
ddr_out32(&ddr->timing_cfg_2, regs->timing_cfg[2]);
ddr_out32(&ddr->timing_cfg_3, regs->timing_cfg[3]);
ddr_out32(&ddr->timing_cfg_4, regs->timing_cfg[4]);
ddr_out32(&ddr->timing_cfg_5, regs->timing_cfg[5]);
ddr_out32(&ddr->timing_cfg_6, regs->timing_cfg[6]);
ddr_out32(&ddr->timing_cfg_7, regs->timing_cfg[7]);
ddr_out32(&ddr->timing_cfg_8, regs->timing_cfg[8]);
ddr_out32(&ddr->timing_cfg_9, regs->timing_cfg[9]);
ddr_out32(&ddr->zq_cntl, regs->zq_cntl);
for (i = 0U; i < 4U; i++) {
ddr_out32(&ddr->dq_map[i], regs->dq_map[i]);
}
ddr_out32(&ddr->sdram_cfg_3, regs->sdram_cfg[2]);
ddr_out32(&ddr->sdram_mode, regs->sdram_mode[0]);
ddr_out32(&ddr->sdram_mode_2, regs->sdram_mode[1]);
ddr_out32(&ddr->sdram_mode_3, regs->sdram_mode[2]);
ddr_out32(&ddr->sdram_mode_4, regs->sdram_mode[3]);
ddr_out32(&ddr->sdram_mode_5, regs->sdram_mode[4]);
ddr_out32(&ddr->sdram_mode_6, regs->sdram_mode[5]);
ddr_out32(&ddr->sdram_mode_7, regs->sdram_mode[6]);
ddr_out32(&ddr->sdram_mode_8, regs->sdram_mode[7]);
ddr_out32(&ddr->sdram_mode_9, regs->sdram_mode[8]);
ddr_out32(&ddr->sdram_mode_10, regs->sdram_mode[9]);
ddr_out32(&ddr->sdram_mode_11, regs->sdram_mode[10]);
ddr_out32(&ddr->sdram_mode_12, regs->sdram_mode[11]);
ddr_out32(&ddr->sdram_mode_13, regs->sdram_mode[12]);
ddr_out32(&ddr->sdram_mode_14, regs->sdram_mode[13]);
ddr_out32(&ddr->sdram_mode_15, regs->sdram_mode[14]);
ddr_out32(&ddr->sdram_mode_16, regs->sdram_mode[15]);
ddr_out32(&ddr->sdram_md_cntl, regs->md_cntl);
#ifdef ERRATA_DDR_A009663
ddr_out32(&ddr->sdram_interval,
regs->interval & ~SDRAM_INTERVAL_BSTOPRE);
#else
ddr_out32(&ddr->sdram_interval, regs->interval);
#endif
ddr_out32(&ddr->sdram_data_init, regs->data_init);
if (regs->eor != 0) {
ddr_out32(&ddr->eor, regs->eor);
}
ddr_out32(&ddr->wrlvl_cntl, regs->wrlvl_cntl[0]);
#ifndef NXP_DDR_EMU
/*
* Skip these two registers if running on emulator
* because emulator doesn't have skew between bytes.
*/
if (regs->wrlvl_cntl[1] != 0) {
ddr_out32(&ddr->ddr_wrlvl_cntl_2, regs->wrlvl_cntl[1]);
}
if (regs->wrlvl_cntl[2] != 0) {
ddr_out32(&ddr->ddr_wrlvl_cntl_3, regs->wrlvl_cntl[2]);
}
#endif
ddr_out32(&ddr->ddr_sr_cntr, regs->ddr_sr_cntr);
ddr_out32(&ddr->ddr_sdram_rcw_1, regs->sdram_rcw[0]);
ddr_out32(&ddr->ddr_sdram_rcw_2, regs->sdram_rcw[1]);
ddr_out32(&ddr->ddr_sdram_rcw_3, regs->sdram_rcw[2]);
ddr_out32(&ddr->ddr_sdram_rcw_4, regs->sdram_rcw[3]);
ddr_out32(&ddr->ddr_sdram_rcw_5, regs->sdram_rcw[4]);
ddr_out32(&ddr->ddr_sdram_rcw_6, regs->sdram_rcw[5]);
ddr_out32(&ddr->ddr_cdr2, regs->cdr[1]);
ddr_out32(&ddr->sdram_cfg_2, regs->sdram_cfg[1]);
ddr_out32(&ddr->init_addr, regs->init_addr);
ddr_out32(&ddr->init_ext_addr, regs->init_ext_addr);
#ifdef ERRATA_DDR_A009803
/* part 1 of 2 */
if ((regs->sdram_cfg[1] & SDRAM_CFG2_AP_EN) != 0) {
if ((regs->sdram_cfg[0] & SDRAM_CFG_RD_EN) != 0) {
ddr_out32(&ddr->ddr_sdram_rcw_2,
regs->sdram_rcw[1] & ~0xf0);
}
ddr_out32(&ddr->err_disable,
regs->err_disable | DDR_ERR_DISABLE_APED);
}
#else
ddr_out32(&ddr->err_disable, regs->err_disable);
#endif
ddr_out32(&ddr->err_int_en, regs->err_int_en);
/* For DDRC 5.05 only */
if (get_ddrc_version(ddr) == 0x50500) {
ddr_out32(&ddr->tx_cfg[1], 0x1f1f1f1f);
ddr_out32(&ddr->debug[3], 0x124a02c0);
}
for (i = 0U; i < 4U; i++) {
if (regs->tx_cfg[i] != 0) {
ddr_out32(&ddr->tx_cfg[i], regs->tx_cfg[i]);
}
}
for (i = 0U; i < 64U; i++) {
if (regs->debug[i] != 0) {
#ifdef ERRATA_DDR_A009942
if (i == 28U) {
continue;
}
#endif
ddr_out32(&ddr->debug[i], regs->debug[i]);
}
}
#ifdef CONFIG_DDR_ADDR_DEC
if ((regs->dec[9] & 1) != 0U) {
for (i = 0U; i < 10U; i++) {
ddr_out32(&ddr->dec[i], regs->dec[i]);
}
if (mod_bnds != 0) {
debug("Disable address decoding\n");
ddr_out32(&ddr->dec[9], 0);
}
}
#endif
#ifdef ERRATA_DDR_A008511
/* Part 1 of 2 */
/* This erraum only applies to verion 5.2.1 */
if (get_ddrc_version(ddr) == 0x50200) {
ERROR("Unsupported SoC.\n");
} else if (get_ddrc_version(ddr) == 0x50201) {
ddr_out32(&ddr->debug[37], (U(1) << 31));
ddr_out32(&ddr->ddr_cdr2,
regs->cdr[1] | DDR_CDR2_VREF_TRAIN_EN);
} else {
debug("Erratum A008511 doesn't apply.\n");
}
#endif
#ifdef ERRATA_DDR_A009942
ddr_freq = clk / 1000000U;
tmp = ddr_in32(&ddr->debug[28]);
tmp &= U(0xff0fff00);
tmp |= ddr_freq <= 1333U ? U(0x0080006a) :
(ddr_freq <= 1600U ? U(0x0070006f) :
(ddr_freq <= 1867U ? U(0x00700076) : U(0x0060007b)));
if (regs->debug[28] != 0) {
tmp &= ~0xff;
tmp |= regs->debug[28] & 0xff;
} else {
WARN("Warning: Optimal CPO value not set.\n");
}
ddr_out32(&ddr->debug[28], tmp);
#endif
#ifdef ERRATA_DDR_A010165
ddr_freq = clk / 1000000U;
if ((ddr_freq > 1900) && (ddr_freq < 2300)) {
tmp = ddr_in32(&ddr->debug[28]);
ddr_out32(&ddr->debug[28], tmp | 0x000a0000);
}
#endif
/*
* For RDIMMs, JEDEC spec requires clocks to be stable before reset is
* deasserted. Clocks start when any chip select is enabled and clock
* control register is set. Because all DDR components are connected to
* one reset signal, this needs to be done in two steps. Step 1 is to
* get the clocks started. Step 2 resumes after reset signal is
* deasserted.
*/
if (twopass == 1) {
udelay(200);
return 0;
}
/* As per new sequence flow shall be write CSn_CONFIG registers needs to
* be set after all the other DDR controller registers are set, then poll
* for PHY_INIT_CMPLT = 1 , then wait at least 100us (micro seconds),
* then set the MEM_EN = 1
*/
for (i = 0U; i < DDRC_NUM_CS; i++) {
if (mod_bnds != 0U && i == 0U) {
ddr_out32(&ddr->csn_cfg[i],
(regs->cs[i].config & ~CTLR_INTLV_MASK));
} else {
ddr_out32(&ddr->csn_cfg[i], regs->cs[i].config);
}
}
after_reset:
/* Set, but do not enable the memory */
temp_sdram_cfg = regs->sdram_cfg[0];
temp_sdram_cfg &= ~(SDRAM_CFG_MEM_EN);
ddr_out32(&ddr->sdram_cfg, temp_sdram_cfg);
if (get_ddrc_version(ddr) < U(0x50500)) {
/*
* 500 painful micro-seconds must elapse between
* the DDR clock setup and the DDR config enable.
* DDR2 need 200 us, and DDR3 need 500 us from spec,
* we choose the max, that is 500 us for all of case.
*/
udelay(500);
/* applied memory barrier */
mb();
isb();
} else {
/* wait for PHY complete */
timeout = 40;
while (((ddr_in32(&ddr->ddr_dsr2) & 0x4) != 0) &&
(timeout > 0)) {
udelay(500);
timeout--;
}
if (timeout <= 0) {
printf("PHY handshake timeout, ddr_dsr2 = %x\n",
ddr_in32(&ddr->ddr_dsr2));
} else {
debug("PHY handshake completed, timer remains %d\n",
timeout);
}
}
temp_sdram_cfg = ddr_in32(&ddr->sdram_cfg);
/* Let the controller go */
udelay(100);
ddr_out32(&ddr->sdram_cfg, temp_sdram_cfg | SDRAM_CFG_MEM_EN);
/* applied memory barrier */
mb();
isb();
total_mem_per_ctrl = 0;
for (i = 0; i < DDRC_NUM_CS; i++) {
if ((regs->cs[i].config & 0x80000000) == 0) {
continue;
}
total_mem_per_ctrl += 1 << (
((regs->cs[i].config >> 14) & 0x3) + 2 +
((regs->cs[i].config >> 8) & 0x7) + 12 +
((regs->cs[i].config >> 4) & 0x3) + 0 +
((regs->cs[i].config >> 0) & 0x7) + 8 +
((regs->sdram_cfg[2] >> 4) & 0x3) +
3 - ((regs->sdram_cfg[0] >> 19) & 0x3) -
26); /* minus 26 (count of 64M) */
}
total_mem_per_ctrl_adj = total_mem_per_ctrl;
/*
* total memory / bus width = transactions needed
* transactions needed / data rate = seconds
* to add plenty of buffer, double the time
* For example, 2GB on 666MT/s 64-bit bus takes about 402ms
* Let's wait for 800ms
*/
bus_width = 3 - ((ddr_in32(&ddr->sdram_cfg) & SDRAM_CFG_DBW_MASK)
>> SDRAM_CFG_DBW_SHIFT);
timeout = ((total_mem_per_ctrl_adj << (6 - bus_width)) * 100 /
(clk >> 20)) << 2;
total_mem_per_ctrl_adj >>= 4; /* shift down to gb size */
if ((ddr_in32(&ddr->sdram_cfg_2) & SDRAM_CFG2_D_INIT) != 0) {
debug("total size %d GB\n", total_mem_per_ctrl_adj);
debug("Need to wait up to %d ms\n", timeout * 10);
do {
mdelay(10);
} while (timeout-- > 0 &&
((ddr_in32(&ddr->sdram_cfg_2) & SDRAM_CFG2_D_INIT)) != 0);
if (timeout <= 0) {
if (ddr_in32(&ddr->debug[1]) & 0x3d00) {
ERROR("Found training error(s): 0x%x\n",
ddr_in32(&ddr->debug[1]));
}
ERROR("Error: Waiting for D_INIT timeout.\n");
return -EIO;
}
}
if (mod_bnds != 0U) {
debug("Restore original bnds\n");
for (i = 0U; i < DDRC_NUM_CS; i++) {
ddr_out32(&ddr->bnds[i].a, regs->cs[i].bnds);
}
ddr_out32(&ddr->csn_cfg[0], regs->cs[0].config);
#ifdef CONFIG_DDR_ADDR_DEC
if ((regs->dec[9] & U(0x1)) != 0U) {
debug("Restore address decoding\n");
ddr_out32(&ddr->dec[9], regs->dec[9]);
}
#endif
}
#ifdef ERRATA_DDR_A009803
/* Part 2 of 2 */
if ((regs->sdram_cfg[1] & SDRAM_CFG2_AP_EN) != 0) {
timeout = 400;
do {
mdelay(1);
} while (timeout-- > 0 && ((ddr_in32(&ddr->debug[1]) & 0x2) == 0));
if ((regs->sdram_cfg[0] & SDRAM_CFG_RD_EN) != 0) {
for (i = 0U; i < DDRC_NUM_CS; i++) {
if ((regs->cs[i].config & SDRAM_CS_CONFIG_EN) == 0) {
continue;
}
set_wait_for_bits_clear(&ddr->sdram_md_cntl,
MD_CNTL_MD_EN |
MD_CNTL_CS_SEL(i) |
0x070000ed,
MD_CNTL_MD_EN);
udelay(1);
}
}
ddr_out32(&ddr->err_disable,
regs->err_disable & ~DDR_ERR_DISABLE_APED);
}
#endif
#ifdef ERRATA_DDR_A009663
ddr_out32(&ddr->sdram_interval, regs->interval);
#endif
#ifdef ERRATA_DDR_A009942
timeout = 400;
do {
mdelay(1);
} while (timeout-- > 0 && ((ddr_in32(&ddr->debug[1]) & 0x2) == 0));
tmp = (regs->sdram_cfg[0] >> 19) & 0x3;
check = (tmp == DDR_DBUS_64) ? 4 : ((tmp == DDR_DBUS_32) ? 2 : 1);
for (i = 0; i < check; i++) {
tmp = ddr_in32(&ddr->debug[9 + i]);
debug("Reading debug[%d] as 0x%x\n", i + 9, tmp);
cpo_min = min(cpo_min,
min((tmp >> 24) & 0xff, (tmp >> 8) & 0xff));
cpo_max = max(cpo_max,
max((tmp >> 24) & 0xff, (tmp >> 8) & 0xff));
}
if ((regs->sdram_cfg[0] & SDRAM_CFG_ECC_EN) != 0) {
tmp = ddr_in32(&ddr->debug[13]);
cpo_min = min(cpo_min, (tmp >> 24) & 0xff);
cpo_max = max(cpo_max, (tmp >> 24) & 0xff);
}
debug("cpo_min 0x%x\n", cpo_min);
debug("cpo_max 0x%x\n", cpo_max);
tmp = ddr_in32(&ddr->debug[28]);
debug("debug[28] 0x%x\n", tmp);
if ((cpo_min + 0x3B) < (tmp & 0xff)) {
WARN("Warning: A009942 requires setting cpo_sample to 0x%x\n",
(cpo_min + cpo_max) / 2 + 0x27);
} else {
debug("Optimal cpo_sample 0x%x\n",
(cpo_min + cpo_max) / 2 + 0x27);
}
#endif
if (run_bist() != 0) {
if ((ddr_in32(&ddr->debug[1]) &
((get_ddrc_version(ddr) == 0x50500) ? 0x3c00 : 0x3d00)) != 0) {
ERROR("Found training error(s): 0x%x\n",
ddr_in32(&ddr->debug[1]));
return -EIO;
}
INFO("Running built-in self test ...\n");
/* give it 10x time to cover whole memory */
timeout = ((total_mem_per_ctrl << (6 - bus_width)) *
100 / (clk >> 20)) * 10;
INFO("\tWait up to %d ms\n", timeout * 10);
ret = bist(ddr, timeout);
}
dump_ddrc((void *)ddr);
return ret;
}
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment