Commit b35ce0c4 authored by Pankaj Gupta's avatar Pankaj Gupta
Browse files

nxp: ddr driver enablement for nxp layerscape soc



DDR driver for NXP layerscape SoC(s):
 - lx2160aqds
 - lx2162aqds
 - lx2160ardb
 - Other Board with SoC(s) like ls1046a, ls1043a etc;
	-- These other boards are not verified yet.
Signed-off-by: default avatarRajesh Bhagat <rajesh.bhagat@nxp.com>
Signed-off-by: default avatarYork Sun <york.sun@nxp.com>
Signed-off-by: default avatarUdit Agarwal <udit.agarwal@nxp.com>
Signed-off-by: default avatarPankaj Gupta <pankaj.gupta@nxp.com>
Change-Id: Ic84a63cb30eba054f432d479862cd4d1097cbbaf
parent c6d9fdbc
#
# Copyright 2021 NXP
#
# SPDX-License-Identifier: BSD-3-Clause
#
#-----------------------------------------------------------------------------
# MMDC ddr cntlr driver files
DDR_DRIVERS_PATH := drivers/nxp/ddr
DDR_CNTLR_SOURCES := ${DDR_DRIVERS_PATH}/fsl-mmdc/fsl_mmdc.c \
${DDR_DRIVERS_PATH}/nxp-ddr/utility.c \
${DDR_DRIVERS_PATH}/nxp-ddr/ddr.c \
${DDR_DRIVERS_PATH}/nxp-ddr/ddrc.c
PLAT_INCLUDES += -I$(DDR_DRIVERS_PATH)/include \
-I$(DDR_DRIVERS_PATH)/fsl-mmdc
#------------------------------------------------
/*
* Copyright 2021 NXP
*
* SPDX-License-Identifier: BSD-3-Clause
*
*/
/*
* Generic driver for Freescale MMDC(Multi Mode DDR Controller).
*/
#include <errno.h>
#include <stdint.h>
#include <stdio.h>
#include <stdlib.h>
#include <string.h>
#include <common/debug.h>
#include "ddr_io.h"
#include <drivers/delay_timer.h>
#include <fsl_mmdc.h>
static void set_wait_for_bits_clear(void *ptr, unsigned int value,
unsigned int bits)
{
int timeout = 1000;
ddr_out32(ptr, value);
while ((ddr_in32(ptr) & bits) != 0) {
udelay(100);
timeout--;
}
if (timeout <= 0) {
INFO("Error: %llx", (unsigned long long)ptr);
INFO(" wait for clear timeout.\n");
}
}
void mmdc_init(const struct fsl_mmdc_info *priv, uintptr_t nxp_ddr_addr)
{
struct mmdc_regs *mmdc = (struct mmdc_regs *)nxp_ddr_addr;
unsigned int tmp;
/* 1. set configuration request */
ddr_out32(&mmdc->mdscr, MDSCR_ENABLE_CON_REQ);
/* 2. configure the desired timing parameters */
ddr_out32(&mmdc->mdotc, priv->mdotc);
ddr_out32(&mmdc->mdcfg0, priv->mdcfg0);
ddr_out32(&mmdc->mdcfg1, priv->mdcfg1);
ddr_out32(&mmdc->mdcfg2, priv->mdcfg2);
/* 3. configure DDR type and other miscellaneous parameters */
ddr_out32(&mmdc->mdmisc, priv->mdmisc);
ddr_out32(&mmdc->mpmur0, MMDC_MPMUR0_FRC_MSR);
ddr_out32(&mmdc->mdrwd, priv->mdrwd);
ddr_out32(&mmdc->mpodtctrl, priv->mpodtctrl);
/* 4. configure the required delay while leaving reset */
ddr_out32(&mmdc->mdor, priv->mdor);
/* 5. configure DDR physical parameters */
/* set row/column address width, burst length, data bus width */
tmp = priv->mdctl & ~(MDCTL_SDE0 | MDCTL_SDE1);
ddr_out32(&mmdc->mdctl, tmp);
/* configure address space partition */
ddr_out32(&mmdc->mdasp, priv->mdasp);
/* 6. perform a ZQ calibration - not needed here, doing in #8b */
/* 7. enable MMDC with the desired chip select */
#if (DDRC_NUM_CS == 1)
ddr_out32(&mmdc->mdctl, tmp | MDCTL_SDE0);
#elif (DDRC_NUM_CS == 2)
ddr_out32(&mmdc->mdctl, tmp | MDCTL_SDE0 | MDCTL_SDE1);
#else
#error "Unsupported DDRC_NUM_CS"
#endif
/* 8a. dram init sequence: update MRs for ZQ, ODT, PRE, etc */
ddr_out32(&mmdc->mdscr, CMD_ADDR_LSB_MR_ADDR(8) |
MDSCR_ENABLE_CON_REQ |
CMD_LOAD_MODE_REG |
CMD_BANK_ADDR_2);
ddr_out32(&mmdc->mdscr, CMD_ADDR_LSB_MR_ADDR(0) |
MDSCR_ENABLE_CON_REQ |
CMD_LOAD_MODE_REG |
CMD_BANK_ADDR_3);
ddr_out32(&mmdc->mdscr, CMD_ADDR_LSB_MR_ADDR(4) |
MDSCR_ENABLE_CON_REQ |
CMD_LOAD_MODE_REG |
CMD_BANK_ADDR_1);
ddr_out32(&mmdc->mdscr, CMD_ADDR_MSB_MR_OP(0x19) |
CMD_ADDR_LSB_MR_ADDR(0x30) |
MDSCR_ENABLE_CON_REQ |
CMD_LOAD_MODE_REG | CMD_BANK_ADDR_0);
/* 8b. ZQ calibration */
ddr_out32(&mmdc->mdscr, CMD_ADDR_MSB_MR_OP(0x4) |
MDSCR_ENABLE_CON_REQ |
CMD_ZQ_CALIBRATION | CMD_BANK_ADDR_0);
set_wait_for_bits_clear(&mmdc->mpzqhwctrl, priv->mpzqhwctrl,
MPZQHWCTRL_ZQ_HW_FORCE);
/* 9a. calibrations now, wr lvl */
ddr_out32(&mmdc->mdscr, CMD_ADDR_LSB_MR_ADDR(0x84) | MDSCR_WL_EN |
MDSCR_ENABLE_CON_REQ |
CMD_LOAD_MODE_REG | CMD_BANK_ADDR_1);
set_wait_for_bits_clear(&mmdc->mpwlgcr, MPWLGCR_HW_WL_EN,
MPWLGCR_HW_WL_EN);
mdelay(1);
ddr_out32(&mmdc->mdscr, CMD_ADDR_LSB_MR_ADDR(4) |
MDSCR_ENABLE_CON_REQ |
CMD_LOAD_MODE_REG | CMD_BANK_ADDR_1);
ddr_out32(&mmdc->mdscr, MDSCR_ENABLE_CON_REQ);
mdelay(1);
/* 9b. read DQS gating calibration */
ddr_out32(&mmdc->mdscr, CMD_ADDR_MSB_MR_OP(4) | MDSCR_ENABLE_CON_REQ |
CMD_PRECHARGE_BANK_OPEN | CMD_BANK_ADDR_0);
ddr_out32(&mmdc->mdscr, CMD_ADDR_LSB_MR_ADDR(4) | MDSCR_ENABLE_CON_REQ |
CMD_LOAD_MODE_REG | CMD_BANK_ADDR_3);
ddr_out32(&mmdc->mppdcmpr2, MPPDCMPR2_MPR_COMPARE_EN);
/* set absolute read delay offset */
if (priv->mprddlctl != 0) {
ddr_out32(&mmdc->mprddlctl, priv->mprddlctl);
} else {
ddr_out32(&mmdc->mprddlctl, MMDC_MPRDDLCTL_DEFAULT_DELAY);
}
set_wait_for_bits_clear(&mmdc->mpdgctrl0,
AUTO_RD_DQS_GATING_CALIBRATION_EN,
AUTO_RD_DQS_GATING_CALIBRATION_EN);
ddr_out32(&mmdc->mdscr, MDSCR_ENABLE_CON_REQ | CMD_LOAD_MODE_REG |
CMD_BANK_ADDR_3);
/* 9c. read calibration */
ddr_out32(&mmdc->mdscr, CMD_ADDR_MSB_MR_OP(4) | MDSCR_ENABLE_CON_REQ |
CMD_PRECHARGE_BANK_OPEN | CMD_BANK_ADDR_0);
ddr_out32(&mmdc->mdscr, CMD_ADDR_LSB_MR_ADDR(4) | MDSCR_ENABLE_CON_REQ |
CMD_LOAD_MODE_REG | CMD_BANK_ADDR_3);
ddr_out32(&mmdc->mppdcmpr2, MPPDCMPR2_MPR_COMPARE_EN);
set_wait_for_bits_clear(&mmdc->mprddlhwctl,
MPRDDLHWCTL_AUTO_RD_CALIBRATION_EN,
MPRDDLHWCTL_AUTO_RD_CALIBRATION_EN);
ddr_out32(&mmdc->mdscr, MDSCR_ENABLE_CON_REQ | CMD_LOAD_MODE_REG |
CMD_BANK_ADDR_3);
/* 10. configure power-down, self-refresh entry, exit parameters */
ddr_out32(&mmdc->mdpdc, priv->mdpdc);
ddr_out32(&mmdc->mapsr, MMDC_MAPSR_PWR_SAV_CTRL_STAT);
/* 11. ZQ config again? do nothing here */
/* 12. refresh scheme */
set_wait_for_bits_clear(&mmdc->mdref, priv->mdref,
MDREF_START_REFRESH);
/* 13. disable CON_REQ */
ddr_out32(&mmdc->mdscr, MDSCR_DISABLE_CFG_REQ);
}
/*
* Copyright 2021 NXP
*
* SPDX-License-Identifier: BSD-3-Clause
*
*/
#ifndef FSL_MMDC_H
#define FSL_MMDC_H
/* PHY Write Leveling Configuration and Error Status Register (MPWLGCR) */
#define MPWLGCR_HW_WL_EN (1 << 0)
/* PHY Pre-defined Compare and CA delay-line Configuration (MPPDCMPR2) */
#define MPPDCMPR2_MPR_COMPARE_EN (1 << 0)
/* MMDC PHY Read DQS gating control register 0 (MPDGCTRL0) */
#define AUTO_RD_DQS_GATING_CALIBRATION_EN (1 << 28)
/* MMDC PHY Read Delay HW Calibration Control Register (MPRDDLHWCTL) */
#define MPRDDLHWCTL_AUTO_RD_CALIBRATION_EN (1 << 4)
/* MMDC Core Power Saving Control and Status Register (MMDC_MAPSR) */
#define MMDC_MAPSR_PWR_SAV_CTRL_STAT 0x00001067
/* MMDC Core Refresh Control Register (MMDC_MDREF) */
#define MDREF_START_REFRESH (1 << 0)
/* MMDC Core Special Command Register (MDSCR) */
#define CMD_ADDR_MSB_MR_OP(x) (x << 24)
#define CMD_ADDR_LSB_MR_ADDR(x) (x << 16)
#define MDSCR_DISABLE_CFG_REQ (0 << 15)
#define MDSCR_ENABLE_CON_REQ (1 << 15)
#define MDSCR_CON_ACK (1 << 14)
#define MDSCR_WL_EN (1 << 9)
#define CMD_NORMAL (0 << 4)
#define CMD_PRECHARGE (1 << 4)
#define CMD_AUTO_REFRESH (2 << 4)
#define CMD_LOAD_MODE_REG (3 << 4)
#define CMD_ZQ_CALIBRATION (4 << 4)
#define CMD_PRECHARGE_BANK_OPEN (5 << 4)
#define CMD_MRR (6 << 4)
#define CMD_BANK_ADDR_0 0x0
#define CMD_BANK_ADDR_1 0x1
#define CMD_BANK_ADDR_2 0x2
#define CMD_BANK_ADDR_3 0x3
#define CMD_BANK_ADDR_4 0x4
#define CMD_BANK_ADDR_5 0x5
#define CMD_BANK_ADDR_6 0x6
#define CMD_BANK_ADDR_7 0x7
/* MMDC Core Control Register (MDCTL) */
#define MDCTL_SDE0 (U(1) << 31)
#define MDCTL_SDE1 (1 << 30)
/* MMDC PHY ZQ HW control register (MMDC_MPZQHWCTRL) */
#define MPZQHWCTRL_ZQ_HW_FORCE (1 << 16)
/* MMDC PHY Measure Unit Register (MMDC_MPMUR0) */
#define MMDC_MPMUR0_FRC_MSR (1 << 11)
/* MMDC PHY Read delay-lines Configuration Register (MMDC_MPRDDLCTL) */
/* default 64 for a quarter cycle delay */
#define MMDC_MPRDDLCTL_DEFAULT_DELAY 0x40404040
/* MMDC Registers */
struct mmdc_regs {
unsigned int mdctl;
unsigned int mdpdc;
unsigned int mdotc;
unsigned int mdcfg0;
unsigned int mdcfg1;
unsigned int mdcfg2;
unsigned int mdmisc;
unsigned int mdscr;
unsigned int mdref;
unsigned int res1[2];
unsigned int mdrwd;
unsigned int mdor;
unsigned int mdmrr;
unsigned int mdcfg3lp;
unsigned int mdmr4;
unsigned int mdasp;
unsigned int res2[239];
unsigned int maarcr;
unsigned int mapsr;
unsigned int maexidr0;
unsigned int maexidr1;
unsigned int madpcr0;
unsigned int madpcr1;
unsigned int madpsr0;
unsigned int madpsr1;
unsigned int madpsr2;
unsigned int madpsr3;
unsigned int madpsr4;
unsigned int madpsr5;
unsigned int masbs0;
unsigned int masbs1;
unsigned int res3[2];
unsigned int magenp;
unsigned int res4[239];
unsigned int mpzqhwctrl;
unsigned int mpzqswctrl;
unsigned int mpwlgcr;
unsigned int mpwldectrl0;
unsigned int mpwldectrl1;
unsigned int mpwldlst;
unsigned int mpodtctrl;
unsigned int mprddqby0dl;
unsigned int mprddqby1dl;
unsigned int mprddqby2dl;
unsigned int mprddqby3dl;
unsigned int mpwrdqby0dl;
unsigned int mpwrdqby1dl;
unsigned int mpwrdqby2dl;
unsigned int mpwrdqby3dl;
unsigned int mpdgctrl0;
unsigned int mpdgctrl1;
unsigned int mpdgdlst0;
unsigned int mprddlctl;
unsigned int mprddlst;
unsigned int mpwrdlctl;
unsigned int mpwrdlst;
unsigned int mpsdctrl;
unsigned int mpzqlp2ctl;
unsigned int mprddlhwctl;
unsigned int mpwrdlhwctl;
unsigned int mprddlhwst0;
unsigned int mprddlhwst1;
unsigned int mpwrdlhwst0;
unsigned int mpwrdlhwst1;
unsigned int mpwlhwerr;
unsigned int mpdghwst0;
unsigned int mpdghwst1;
unsigned int mpdghwst2;
unsigned int mpdghwst3;
unsigned int mppdcmpr1;
unsigned int mppdcmpr2;
unsigned int mpswdar0;
unsigned int mpswdrdr0;
unsigned int mpswdrdr1;
unsigned int mpswdrdr2;
unsigned int mpswdrdr3;
unsigned int mpswdrdr4;
unsigned int mpswdrdr5;
unsigned int mpswdrdr6;
unsigned int mpswdrdr7;
unsigned int mpmur0;
unsigned int mpwrcadl;
unsigned int mpdccr;
};
struct fsl_mmdc_info {
unsigned int mdctl;
unsigned int mdpdc;
unsigned int mdotc;
unsigned int mdcfg0;
unsigned int mdcfg1;
unsigned int mdcfg2;
unsigned int mdmisc;
unsigned int mdref;
unsigned int mdrwd;
unsigned int mdor;
unsigned int mdasp;
unsigned int mpodtctrl;
unsigned int mpzqhwctrl;
unsigned int mprddlctl;
};
void mmdc_init(const struct fsl_mmdc_info *priv, uintptr_t nxp_ddr_addr);
#endif /* FSL_MMDC_H */
/*
* Copyright 2021 NXP
*
* SPDX-License-Identifier: BSD-3-Clause
*
*/
#ifndef DDR_H
#define DDR_H
#include "ddr_io.h"
#include "dimm.h"
#include "immap.h"
#ifndef DDRC_NUM_CS
#define DDRC_NUM_CS 4
#endif
/*
* This is irrespective of what is the number of DDR controller,
* number of DIMM used. This is set to maximum
* Max controllers = 2
* Max num of DIMM per controlle = 2
* MAX NUM CS = 4
* Not to be changed.
*/
#define MAX_DDRC_NUM 2
#define MAX_DIMM_NUM 2
#define MAX_CS_NUM 4
#include "opts.h"
#include "regs.h"
#include "utility.h"
#ifdef DDR_DEBUG
#define debug(...) INFO(__VA_ARGS__)
#else
#define debug(...) VERBOSE(__VA_ARGS__)
#endif
#ifndef DDRC_NUM_DIMM
#define DDRC_NUM_DIMM 1
#endif
#define CONFIG_CS_PER_SLOT \
(DDRC_NUM_CS / DDRC_NUM_DIMM)
/* Record of register values computed */
struct ddr_cfg_regs {
struct {
unsigned int bnds;
unsigned int config;
unsigned int config_2;
} cs[MAX_CS_NUM];
unsigned int dec[10];
unsigned int timing_cfg[10];
unsigned int sdram_cfg[3];
unsigned int sdram_mode[16];
unsigned int md_cntl;
unsigned int interval;
unsigned int data_init;
unsigned int clk_cntl;
unsigned int init_addr;
unsigned int init_ext_addr;
unsigned int zq_cntl;
unsigned int wrlvl_cntl[3];
unsigned int ddr_sr_cntr;
unsigned int sdram_rcw[6];
unsigned int dq_map[4];
unsigned int eor;
unsigned int cdr[2];
unsigned int err_disable;
unsigned int err_int_en;
unsigned int tx_cfg[4];
unsigned int debug[64];
};
struct ddr_conf {
int dimm_in_use[MAX_DIMM_NUM];
int cs_in_use; /* bitmask, bit 0 for cs0, bit 1 for cs1, etc. */
int cs_on_dimm[MAX_DIMM_NUM]; /* bitmask */
unsigned long long cs_base_addr[MAX_CS_NUM];
unsigned long long cs_size[MAX_CS_NUM];
unsigned long long base_addr;
unsigned long long total_mem;
};
struct ddr_info {
unsigned long clk;
unsigned long long mem_base;
unsigned int num_ctlrs;
unsigned int dimm_on_ctlr;
struct dimm_params dimm;
struct memctl_opt opt;
struct ddr_conf conf;
struct ddr_cfg_regs ddr_reg;
struct ccsr_ddr *ddr[MAX_DDRC_NUM];
uint16_t *phy[MAX_DDRC_NUM];
int *spd_addr;
unsigned int ip_rev;
uintptr_t phy_gen2_fw_img_buf;
void *img_loadr;
int warm_boot_flag;
};
struct rc_timing {
unsigned int speed_bin;
unsigned int clk_adj;
unsigned int wrlvl;
};
struct board_timing {
unsigned int rc;
struct rc_timing const *p;
unsigned int add1;
unsigned int add2;
};
enum warm_boot {
DDR_COLD_BOOT = 0,
DDR_WARM_BOOT = 1,
DDR_WRM_BOOT_NT_SUPPORTED = -1,
};
int disable_unused_ddrc(struct ddr_info *priv, int mask,
uintptr_t nxp_ccn_hn_f0_addr);
int ddr_board_options(struct ddr_info *priv);
int compute_ddrc(const unsigned long clk,
const struct memctl_opt *popts,
const struct ddr_conf *conf,
struct ddr_cfg_regs *ddr,
const struct dimm_params *dimm_params,
const unsigned int ip_rev);
int compute_ddr_phy(struct ddr_info *priv);
int ddrc_set_regs(const unsigned long clk,
const struct ddr_cfg_regs *regs,
const struct ccsr_ddr *ddr,
int twopass);
int cal_board_params(struct ddr_info *priv,
const struct board_timing *dimm,
int len);
/* return bit mask of used DIMM(s) */
int ddr_get_ddr_params(struct dimm_params *pdimm, struct ddr_conf *conf);
long long dram_init(struct ddr_info *priv
#if defined(NXP_HAS_CCN504) || defined(NXP_HAS_CCN508)
, uintptr_t nxp_ccn_hn_f0_addr
#endif
);
long long board_static_ddr(struct ddr_info *info);
#endif /* DDR_H */
/*
* Copyright 2021 NXP
*
* SPDX-License-Identifier: BSD-3-Clause
*
*/
#ifndef DDR_IO_H
#define DDR_IO_H
#include <endian.h>
#include <lib/mmio.h>
#define min(a, b) (((a) > (b)) ? (b) : (a))
#define max(a, b) (((a) > (b)) ? (a) : (b))
/* macro for memory barrier */
#define mb() asm volatile("dsb sy" : : : "memory")
#ifdef NXP_DDR_BE
#define ddr_in32(a) bswap32(mmio_read_32((uintptr_t)(a)))
#define ddr_out32(a, v) mmio_write_32((uintptr_t)(a),\
bswap32(v))
#elif defined(NXP_DDR_LE)
#define ddr_in32(a) mmio_read_32((uintptr_t)(a))
#define ddr_out32(a, v) mmio_write_32((uintptr_t)(a), v)
#else
#error Please define CCSR DDR register endianness
#endif
#define ddr_setbits32(a, v) ddr_out32((a), ddr_in32(a) | (v))
#define ddr_clrbits32(a, v) ddr_out32((a), ddr_in32(a) & ~(v))
#define ddr_clrsetbits32(a, c, s) ddr_out32((a), (ddr_in32(a) & ~(c)) \
| (s))
#endif /* DDR_IO_H */
/*
* Copyright 2021 NXP
*
* SPDX-License-Identifier: BSD-3-Clause
*
*/
#ifndef DIMM_H
#define DIMM_H
#define SPD_MEMTYPE_DDR4 0x0C
#define DDR4_SPD_MODULETYPE_MASK 0x0f
#define DDR4_SPD_MODULETYPE_EXT 0x00
#define DDR4_SPD_RDIMM 0x01
#define DDR4_SPD_UDIMM 0x02
#define DDR4_SPD_SO_DIMM 0x03
#define DDR4_SPD_LRDIMM 0x04
#define DDR4_SPD_MINI_RDIMM 0x05
#define DDR4_SPD_MINI_UDIMM 0x06
#define DDR4_SPD_72B_SO_RDIMM 0x08
#define DDR4_SPD_72B_SO_UDIMM 0x09
#define DDR4_SPD_16B_SO_DIMM 0x0c
#define DDR4_SPD_32B_SO_DIMM 0x0d
#define SPD_SPA0_ADDRESS 0x36
#define SPD_SPA1_ADDRESS 0x37
#define spd_to_ps(mtb, ftb) \
((mtb) * pdimm->mtb_ps + ((ftb) * pdimm->ftb_10th_ps) / 10)
#ifdef DDR_DEBUG
#define dump_spd(spd, len) { \
register int i; \
register unsigned char *buf = (void *)(spd); \
\
for (i = 0; i < (len); i++) { \
print_uint(i); \
puts("\t: 0x"); \
print_hex(buf[i]); \
puts("\n"); \
} \
}
#else
#define dump_spd(spd, len) {}
#endif
/* From JEEC Standard No. 21-C release 23A */
struct ddr4_spd {
/* General Section: Bytes 0-127 */
unsigned char info_size_crc; /* 0 # bytes */
unsigned char spd_rev; /* 1 Total # bytes of SPD */
unsigned char mem_type; /* 2 Key Byte / mem type */
unsigned char module_type; /* 3 Key Byte / Module Type */
unsigned char density_banks; /* 4 Density and Banks */
unsigned char addressing; /* 5 Addressing */
unsigned char package_type; /* 6 Package type */
unsigned char opt_feature; /* 7 Optional features */
unsigned char thermal_ref; /* 8 Thermal and refresh */
unsigned char oth_opt_features; /* 9 Other optional features */
unsigned char res_10; /* 10 Reserved */
unsigned char module_vdd; /* 11 Module nominal voltage */
unsigned char organization; /* 12 Module Organization */
unsigned char bus_width; /* 13 Module Memory Bus Width */
unsigned char therm_sensor; /* 14 Module Thermal Sensor */
unsigned char ext_type; /* 15 Extended module type */
unsigned char res_16;
unsigned char timebases; /* 17 MTb and FTB */
unsigned char tck_min; /* 18 tCKAVGmin */
unsigned char tck_max; /* 19 TCKAVGmax */
unsigned char caslat_b1; /* 20 CAS latencies, 1st byte */
unsigned char caslat_b2; /* 21 CAS latencies, 2nd byte */
unsigned char caslat_b3; /* 22 CAS latencies, 3rd byte */
unsigned char caslat_b4; /* 23 CAS latencies, 4th byte */
unsigned char taa_min; /* 24 Min CAS Latency Time */
unsigned char trcd_min; /* 25 Min RAS# to CAS# Delay Time */
unsigned char trp_min; /* 26 Min Row Precharge Delay Time */
unsigned char tras_trc_ext; /* 27 Upper Nibbles for tRAS and tRC */
unsigned char tras_min_lsb; /* 28 tRASmin, lsb */
unsigned char trc_min_lsb; /* 29 tRCmin, lsb */
unsigned char trfc1_min_lsb; /* 30 Min Refresh Recovery Delay Time */
unsigned char trfc1_min_msb; /* 31 Min Refresh Recovery Delay Time */
unsigned char trfc2_min_lsb; /* 32 Min Refresh Recovery Delay Time */
unsigned char trfc2_min_msb; /* 33 Min Refresh Recovery Delay Time */
unsigned char trfc4_min_lsb; /* 34 Min Refresh Recovery Delay Time */
unsigned char trfc4_min_msb; /* 35 Min Refresh Recovery Delay Time */
unsigned char tfaw_msb; /* 36 Upper Nibble for tFAW */
unsigned char tfaw_min; /* 37 tFAW, lsb */
unsigned char trrds_min; /* 38 tRRD_Smin, MTB */
unsigned char trrdl_min; /* 39 tRRD_Lmin, MTB */
unsigned char tccdl_min; /* 40 tCCS_Lmin, MTB */
unsigned char res_41[60-41]; /* 41 Rserved */
unsigned char mapping[78-60]; /* 60~77 Connector to SDRAM bit map */
unsigned char res_78[117-78]; /* 78~116, Reserved */
signed char fine_tccdl_min; /* 117 Fine offset for tCCD_Lmin */
signed char fine_trrdl_min; /* 118 Fine offset for tRRD_Lmin */
signed char fine_trrds_min; /* 119 Fine offset for tRRD_Smin */
signed char fine_trc_min; /* 120 Fine offset for tRCmin */
signed char fine_trp_min; /* 121 Fine offset for tRPmin */
signed char fine_trcd_min; /* 122 Fine offset for tRCDmin */
signed char fine_taa_min; /* 123 Fine offset for tAAmin */
signed char fine_tck_max; /* 124 Fine offset for tCKAVGmax */
signed char fine_tck_min; /* 125 Fine offset for tCKAVGmin */
/* CRC: Bytes 126-127 */
unsigned char crc[2]; /* 126-127 SPD CRC */
/* Module-Specific Section: Bytes 128-255 */
union {
struct {
/* 128 (Unbuffered) Module Nominal Height */
unsigned char mod_height;
/* 129 (Unbuffered) Module Maximum Thickness */
unsigned char mod_thickness;
/* 130 (Unbuffered) Reference Raw Card Used */
unsigned char ref_raw_card;
/* 131 (Unbuffered) Address Mapping from
* Edge Connector to DRAM
*/
unsigned char addr_mapping;
/* 132~253 (Unbuffered) Reserved */
unsigned char res_132[254-132];
/* 254~255 CRC */
unsigned char crc[2];
} unbuffered;
struct {
/* 128 (Registered) Module Nominal Height */
unsigned char mod_height;
/* 129 (Registered) Module Maximum Thickness */
unsigned char mod_thickness;
/* 130 (Registered) Reference Raw Card Used */
unsigned char ref_raw_card;
/* 131 DIMM Module Attributes */
unsigned char modu_attr;
/* 132 RDIMM Thermal Heat Spreader Solution */
unsigned char thermal;
/* 133 Register Manufacturer ID Code, LSB */
unsigned char reg_id_lo;
/* 134 Register Manufacturer ID Code, MSB */
unsigned char reg_id_hi;
/* 135 Register Revision Number */
unsigned char reg_rev;
/* 136 Address mapping from register to DRAM */
unsigned char reg_map;
unsigned char ca_stren;
unsigned char clk_stren;
/* 139~253 Reserved */
unsigned char res_139[254-139];
/* 254~255 CRC */
unsigned char crc[2];
} registered;
struct {
/* 128 (Loadreduced) Module Nominal Height */
unsigned char mod_height;
/* 129 (Loadreduced) Module Maximum Thickness */
unsigned char mod_thickness;
/* 130 (Loadreduced) Reference Raw Card Used */
unsigned char ref_raw_card;
/* 131 DIMM Module Attributes */
unsigned char modu_attr;
/* 132 RDIMM Thermal Heat Spreader Solution */
unsigned char thermal;
/* 133 Register Manufacturer ID Code, LSB */
unsigned char reg_id_lo;
/* 134 Register Manufacturer ID Code, MSB */
unsigned char reg_id_hi;
/* 135 Register Revision Number */
unsigned char reg_rev;
/* 136 Address mapping from register to DRAM */
unsigned char reg_map;
/* 137 Register Output Drive Strength for CMD/Add*/
unsigned char reg_drv;
/* 138 Register Output Drive Strength for CK */
unsigned char reg_drv_ck;
/* 139 Data Buffer Revision Number */
unsigned char data_buf_rev;
/* 140 DRAM VrefDQ for Package Rank 0 */
unsigned char vrefqe_r0;
/* 141 DRAM VrefDQ for Package Rank 1 */
unsigned char vrefqe_r1;
/* 142 DRAM VrefDQ for Package Rank 2 */
unsigned char vrefqe_r2;
/* 143 DRAM VrefDQ for Package Rank 3 */
unsigned char vrefqe_r3;
/* 144 Data Buffer VrefDQ for DRAM Interface */
unsigned char data_intf;
/*
* 145 Data Buffer MDQ Drive Strength and RTT
* for data rate <= 1866
*/
unsigned char data_drv_1866;
/*
* 146 Data Buffer MDQ Drive Strength and RTT
* for 1866 < data rate <= 2400
*/
unsigned char data_drv_2400;
/*
* 147 Data Buffer MDQ Drive Strength and RTT
* for 2400 < data rate <= 3200
*/
unsigned char data_drv_3200;
/* 148 DRAM Drive Strength */
unsigned char dram_drv;
/*
* 149 DRAM ODT (RTT_WR, RTT_NOM)
* for data rate <= 1866
*/
unsigned char dram_odt_1866;
/*
* 150 DRAM ODT (RTT_WR, RTT_NOM)
* for 1866 < data rate <= 2400
*/
unsigned char dram_odt_2400;
/*
* 151 DRAM ODT (RTT_WR, RTT_NOM)
* for 2400 < data rate <= 3200
*/
unsigned char dram_odt_3200;
/*
* 152 DRAM ODT (RTT_PARK)
* for data rate <= 1866
*/
unsigned char dram_odt_park_1866;
/*
* 153 DRAM ODT (RTT_PARK)
* for 1866 < data rate <= 2400
*/
unsigned char dram_odt_park_2400;
/*
* 154 DRAM ODT (RTT_PARK)
* for 2400 < data rate <= 3200
*/
unsigned char dram_odt_park_3200;
unsigned char res_155[254-155]; /* Reserved */
/* 254~255 CRC */
unsigned char crc[2];
} loadreduced;
unsigned char uc[128]; /* 128-255 Module-Specific Section */
} mod_section;
unsigned char res_256[320-256]; /* 256~319 Reserved */
/* Module supplier's data: Byte 320~383 */
unsigned char mmid_lsb; /* 320 Module MfgID Code LSB */
unsigned char mmid_msb; /* 321 Module MfgID Code MSB */
unsigned char mloc; /* 322 Mfg Location */
unsigned char mdate[2]; /* 323~324 Mfg Date */
unsigned char sernum[4]; /* 325~328 Module Serial Number */
unsigned char mpart[20]; /* 329~348 Mfg's Module Part Number */
unsigned char mrev; /* 349 Module Revision Code */
unsigned char dmid_lsb; /* 350 DRAM MfgID Code LSB */
unsigned char dmid_msb; /* 351 DRAM MfgID Code MSB */
unsigned char stepping; /* 352 DRAM stepping */
unsigned char msd[29]; /* 353~381 Mfg's Specific Data */
unsigned char res_382[2]; /* 382~383 Reserved */
};
/* Parameters for a DDR dimm computed from the SPD */
struct dimm_params {
/* DIMM organization parameters */
char mpart[19]; /* guaranteed null terminated */
unsigned int n_ranks;
unsigned int die_density;
unsigned long long rank_density;
unsigned long long capacity;
unsigned int primary_sdram_width;
unsigned int ec_sdram_width;
unsigned int rdimm;
unsigned int package_3ds; /* number of dies in 3DS */
unsigned int device_width; /* x4, x8, x16 components */
unsigned int rc;
/* SDRAM device parameters */
unsigned int n_row_addr;
unsigned int n_col_addr;
unsigned int edc_config; /* 0 = none, 1 = parity, 2 = ECC */
unsigned int bank_addr_bits;
unsigned int bank_group_bits;
unsigned int burst_lengths_bitmask; /* BL=4 bit 2, BL=8 = bit 3 */
/* mirrored DIMMs */
unsigned int mirrored_dimm; /* only for ddr3 */
/* DIMM timing parameters */
int mtb_ps; /* medium timebase ps */
int ftb_10th_ps; /* fine timebase, in 1/10 ps */
int taa_ps; /* minimum CAS latency time */
int tfaw_ps; /* four active window delay */
/*
* SDRAM clock periods
* The range for these are 1000-10000 so a short should be sufficient
*/
int tckmin_x_ps;
int tckmax_ps;
/* SPD-defined CAS latencies */
unsigned int caslat_x;
/* basic timing parameters */
int trcd_ps;
int trp_ps;
int tras_ps;
int trfc1_ps;
int trfc2_ps;
int trfc4_ps;
int trrds_ps;
int trrdl_ps;
int tccdl_ps;
int trfc_slr_ps;
int trc_ps; /* maximum = 254 ns + .75 ns = 254750 ps */
int twr_ps; /* 15ns for all speed bins */
unsigned int refresh_rate_ps;
unsigned int extended_op_srt;
/* RDIMM */
unsigned char rcw[16]; /* Register Control Word 0-15 */
unsigned int dq_mapping[18];
unsigned int dq_mapping_ors;
};
int read_spd(unsigned char chip, void *buf, int len);
int crc16(unsigned char *ptr, int count);
int cal_dimm_params(const struct ddr4_spd *spd, struct dimm_params *pdimm);
#endif /* DIMM_H */
/*
* Copyright 2021 NXP
*
* SPDX-License-Identifier: BSD-3-Clause
*
*/
#ifndef DDR_IMMAP_H
#define DDR_IMMAP_H
#define DDR_DBUS_64 0
#define DDR_DBUS_32 1
#define DDR_DBUS_16 2
/*
* DDRC register file for DDRC 5.0 and above
*/
struct ccsr_ddr {
struct {
unsigned int a; /* 0x0, 0x8, 0x10, 0x18 */
unsigned int res; /* 0x4, 0xc, 0x14, 0x1c */
} bnds[4];
unsigned char res_20[0x40 - 0x20];
unsigned int dec[10]; /* 0x40 */
unsigned char res_68[0x80 - 0x68];
unsigned int csn_cfg[4]; /* 0x80, 0x84, 0x88, 0x8c */
unsigned char res_90[48];
unsigned int csn_cfg_2[4]; /* 0xc0, 0xc4, 0xc8, 0xcc */
unsigned char res_d0[48];
unsigned int timing_cfg_3; /* SDRAM Timing Configuration 3 */
unsigned int timing_cfg_0; /* SDRAM Timing Configuration 0 */
unsigned int timing_cfg_1; /* SDRAM Timing Configuration 1 */
unsigned int timing_cfg_2; /* SDRAM Timing Configuration 2 */
unsigned int sdram_cfg; /* SDRAM Control Configuration */
unsigned int sdram_cfg_2; /* SDRAM Control Configuration 2 */
unsigned int sdram_mode; /* SDRAM Mode Configuration */
unsigned int sdram_mode_2; /* SDRAM Mode Configuration 2 */
unsigned int sdram_md_cntl; /* SDRAM Mode Control */
unsigned int sdram_interval; /* SDRAM Interval Configuration */
unsigned int sdram_data_init; /* SDRAM Data initialization */
unsigned char res_12c[4];
unsigned int sdram_clk_cntl; /* SDRAM Clock Control */
unsigned char res_134[20];
unsigned int init_addr; /* training init addr */
unsigned int init_ext_addr; /* training init extended addr */
unsigned char res_150[16];
unsigned int timing_cfg_4; /* SDRAM Timing Configuration 4 */
unsigned int timing_cfg_5; /* SDRAM Timing Configuration 5 */
unsigned int timing_cfg_6; /* SDRAM Timing Configuration 6 */
unsigned int timing_cfg_7; /* SDRAM Timing Configuration 7 */
unsigned int zq_cntl; /* ZQ calibration control*/
unsigned int wrlvl_cntl; /* write leveling control*/
unsigned char reg_178[4];
unsigned int ddr_sr_cntr; /* self refresh counter */
unsigned int ddr_sdram_rcw_1; /* Control Words 1 */
unsigned int ddr_sdram_rcw_2; /* Control Words 2 */
unsigned char reg_188[8];
unsigned int ddr_wrlvl_cntl_2; /* write leveling control 2 */
unsigned int ddr_wrlvl_cntl_3; /* write leveling control 3 */
unsigned char res_198[0x1a0-0x198];
unsigned int ddr_sdram_rcw_3;
unsigned int ddr_sdram_rcw_4;
unsigned int ddr_sdram_rcw_5;
unsigned int ddr_sdram_rcw_6;
unsigned char res_1b0[0x200-0x1b0];
unsigned int sdram_mode_3; /* SDRAM Mode Configuration 3 */
unsigned int sdram_mode_4; /* SDRAM Mode Configuration 4 */
unsigned int sdram_mode_5; /* SDRAM Mode Configuration 5 */
unsigned int sdram_mode_6; /* SDRAM Mode Configuration 6 */
unsigned int sdram_mode_7; /* SDRAM Mode Configuration 7 */
unsigned int sdram_mode_8; /* SDRAM Mode Configuration 8 */
unsigned char res_218[0x220-0x218];
unsigned int sdram_mode_9; /* SDRAM Mode Configuration 9 */
unsigned int sdram_mode_10; /* SDRAM Mode Configuration 10 */
unsigned int sdram_mode_11; /* SDRAM Mode Configuration 11 */
unsigned int sdram_mode_12; /* SDRAM Mode Configuration 12 */
unsigned int sdram_mode_13; /* SDRAM Mode Configuration 13 */
unsigned int sdram_mode_14; /* SDRAM Mode Configuration 14 */
unsigned int sdram_mode_15; /* SDRAM Mode Configuration 15 */
unsigned int sdram_mode_16; /* SDRAM Mode Configuration 16 */
unsigned char res_240[0x250-0x240];
unsigned int timing_cfg_8; /* SDRAM Timing Configuration 8 */
unsigned int timing_cfg_9; /* SDRAM Timing Configuration 9 */
unsigned int timing_cfg_10; /* SDRAM Timing COnfigurtion 10 */
unsigned char res_258[0x260-0x25c];
unsigned int sdram_cfg_3;
unsigned char res_264[0x270-0x264];
unsigned int sdram_md_cntl_2;
unsigned char res_274[0x400-0x274];
unsigned int dq_map[4];
unsigned char res_410[0x800-0x410];
unsigned int tx_cfg[4];
unsigned char res_810[0xb20-0x810];
unsigned int ddr_dsr1; /* Debug Status 1 */
unsigned int ddr_dsr2; /* Debug Status 2 */
unsigned int ddr_cdr1; /* Control Driver 1 */
unsigned int ddr_cdr2; /* Control Driver 2 */
unsigned char res_b30[200];
unsigned int ip_rev1; /* IP Block Revision 1 */
unsigned int ip_rev2; /* IP Block Revision 2 */
unsigned int eor; /* Enhanced Optimization Register */
unsigned char res_c04[252];
unsigned int mtcr; /* Memory Test Control Register */
unsigned char res_d04[28];
unsigned int mtp[10]; /* Memory Test Patterns */
unsigned char res_d48[184];
unsigned int data_err_inject_hi; /* Data Path Err Injection Mask Hi*/
unsigned int data_err_inject_lo;/* Data Path Err Injection Mask Lo*/
unsigned int ecc_err_inject; /* Data Path Err Injection Mask ECC */
unsigned char res_e0c[20];
unsigned int capture_data_hi; /* Data Path Read Capture High */
unsigned int capture_data_lo; /* Data Path Read Capture Low */
unsigned int capture_ecc; /* Data Path Read Capture ECC */
unsigned char res_e2c[20];
unsigned int err_detect; /* Error Detect */
unsigned int err_disable; /* Error Disable */
unsigned int err_int_en;
unsigned int capture_attributes; /* Error Attrs Capture */
unsigned int capture_address; /* Error Addr Capture */
unsigned int capture_ext_address; /* Error Extended Addr Capture */
unsigned int err_sbe; /* Single-Bit ECC Error Management */
unsigned char res_e5c[164];
unsigned int debug[64]; /* debug_1 to debug_64 */
};
#endif /* DDR_IMMAP_H */
/*
* Copyright 2021 NXP
*
* SPDX-License-Identifier: BSD-3-Clause
*
*/
#ifndef DDR_OPTS_H
#define DDR_OPTS_H
#define SDRAM_TYPE_DDR4 5 /* sdram_cfg register */
#define DDR_BC4 4 /* burst chop */
#define DDR_OTF 6 /* on-the-fly BC4 and BL8 */
#define DDR_BL8 8 /* burst length 8 */
#define DDR4_RTT_OFF 0
#define DDR4_RTT_60_OHM 1 /* RZQ/4 */
#define DDR4_RTT_120_OHM 2 /* RZQ/2 */
#define DDR4_RTT_40_OHM 3 /* RZQ/6 */
#define DDR4_RTT_240_OHM 4 /* RZQ/1 */
#define DDR4_RTT_48_OHM 5 /* RZQ/5 */
#define DDR4_RTT_80_OHM 6 /* RZQ/3 */
#define DDR4_RTT_34_OHM 7 /* RZQ/7 */
#define DDR4_RTT_WR_OFF 0
#define DDR4_RTT_WR_120_OHM 1
#define DDR4_RTT_WR_240_OHM 2
#define DDR4_RTT_WR_HZ 3
#define DDR4_RTT_WR_80_OHM 4
#define DDR_ODT_NEVER 0x0
#define DDR_ODT_CS 0x1
#define DDR_ODT_ALL_OTHER_CS 0x2
#define DDR_ODT_OTHER_DIMM 0x3
#define DDR_ODT_ALL 0x4
#define DDR_ODT_SAME_DIMM 0x5
#define DDR_ODT_CS_AND_OTHER_DIMM 0x6
#define DDR_ODT_OTHER_CS_ONSAMEDIMM 0x7
#define DDR_BA_INTLV_CS01 0x40
#define DDR_BA_INTLV_CS0123 0x64
#define DDR_BA_NONE 0
#define DDR_256B_INTLV 0x8
struct memctl_opt {
int rdimm;
unsigned int dbw_cap_shift;
struct local_opts_s {
unsigned int auto_precharge;
unsigned int odt_rd_cfg;
unsigned int odt_wr_cfg;
unsigned int odt_rtt_norm;
unsigned int odt_rtt_wr;
} cs_odt[DDRC_NUM_CS];
int ctlr_intlv;
unsigned int ctlr_intlv_mode;
unsigned int ba_intlv;
int addr_hash;
int ecc_mode;
int ctlr_init_ecc;
int self_refresh_in_sleep;
int self_refresh_irq_en;
int dynamic_power;
/* memory data width 0 = 64-bit, 1 = 32-bit, 2 = 16-bit */
unsigned int data_bus_dimm;
unsigned int data_bus_used; /* on individual board */
unsigned int burst_length; /* BC4, OTF and BL8 */
int otf_burst_chop_en;
int mirrored_dimm;
int quad_rank_present;
int output_driver_impedance;
int ap_en;
int x4_en;
int caslat_override;
unsigned int caslat_override_value;
int addt_lat_override;
unsigned int addt_lat_override_value;
unsigned int clk_adj;
unsigned int cpo_sample;
unsigned int wr_data_delay;
unsigned int cswl_override;
unsigned int wrlvl_override;
unsigned int wrlvl_sample;
unsigned int wrlvl_start;
unsigned int wrlvl_ctl_2;
unsigned int wrlvl_ctl_3;
int half_strength_drive_en;
int twot_en;
int threet_en;
unsigned int bstopre;
unsigned int tfaw_ps;
int rtt_override;
unsigned int rtt_override_value;
unsigned int rtt_wr_override_value;
unsigned int rtt_park;
int auto_self_refresh_en;
unsigned int sr_it;
unsigned int ddr_cdr1;
unsigned int ddr_cdr2;
unsigned int trwt_override;
unsigned int trwt;
unsigned int twrt;
unsigned int trrt;
unsigned int twwt;
unsigned int vref_phy;
unsigned int vref_dimm;
unsigned int odt;
unsigned int phy_tx_impedance;
unsigned int phy_atx_impedance;
unsigned int skip2d;
};
#endif /* DDR_OPTS_H */
/*
* Copyright 2021 NXP
*
* SPDX-License-Identifier: BSD-3-Clause
*
*/
#ifndef DDR_REG_H
#define DDR_REG_H
#define SDRAM_CS_CONFIG_EN 0x80000000
/* DDR_SDRAM_CFG - DDR SDRAM Control Configuration
*/
#define SDRAM_CFG_MEM_EN 0x80000000
#define SDRAM_CFG_SREN 0x40000000
#define SDRAM_CFG_ECC_EN 0x20000000
#define SDRAM_CFG_RD_EN 0x10000000
#define SDRAM_CFG_SDRAM_TYPE_MASK 0x07000000
#define SDRAM_CFG_SDRAM_TYPE_SHIFT 24
#define SDRAM_CFG_DYN_PWR 0x00200000
#define SDRAM_CFG_DBW_MASK 0x00180000
#define SDRAM_CFG_DBW_SHIFT 19
#define SDRAM_CFG_32_BW 0x00080000
#define SDRAM_CFG_16_BW 0x00100000
#define SDRAM_CFG_8_BW 0x00180000
#define SDRAM_CFG_8_BE 0x00040000
#define SDRAM_CFG_2T_EN 0x00008000
#define SDRAM_CFG_MEM_HLT 0x00000002
#define SDRAM_CFG_BI 0x00000001
#define SDRAM_CFG2_FRC_SR 0x80000000
#define SDRAM_CFG2_FRC_SR_CLEAR ~(SDRAM_CFG2_FRC_SR)
#define SDRAM_CFG2_D_INIT 0x00000010
#define SDRAM_CFG2_AP_EN 0x00000020
#define SDRAM_CFG2_ODT_ONLY_READ 2
#define SDRAM_CFG3_DDRC_RST 0x80000000
#define SDRAM_INTERVAL_REFINT 0xFFFF0000
#define SDRAM_INTERVAL_REFINT_CLEAR ~(SDRAM_INTERVAL_REFINT)
#define SDRAM_INTERVAL_BSTOPRE 0x3FFF
/* DDR_MD_CNTL */
#define MD_CNTL_MD_EN 0x80000000
#define MD_CNTL_CS_SEL(x) (((x) & 0x7) << 28)
#define MD_CNTL_MD_SEL(x) (((x) & 0xf) << 24)
#define MD_CNTL_CKE(x) (((x) & 0x3) << 20)
/* DDR_CDR1 */
#define DDR_CDR1_DHC_EN 0x80000000
#define DDR_CDR1_ODT_SHIFT 17
#define DDR_CDR1_ODT_MASK 0x6
#define DDR_CDR2_ODT_MASK 0x1
#define DDR_CDR1_ODT(x) ((x & DDR_CDR1_ODT_MASK) << DDR_CDR1_ODT_SHIFT)
#define DDR_CDR2_ODT(x) (x & DDR_CDR2_ODT_MASK)
#define DDR_CDR2_VREF_OVRD(x) (0x00008080 | ((((x) - 37) & 0x3F) << 8))
#define DDR_CDR2_VREF_TRAIN_EN 0x00000080
#define DDR_CDR2_VREF_RANGE_2 0x00000040
#define DDR_CDR_ODT_OFF 0x0
#define DDR_CDR_ODT_100ohm 0x1
#define DDR_CDR_ODT_120OHM 0x2
#define DDR_CDR_ODT_80ohm 0x3
#define DDR_CDR_ODT_60ohm 0x4
#define DDR_CDR_ODT_40ohm 0x5
#define DDR_CDR_ODT_50ohm 0x6
#define DDR_CDR_ODT_30ohm 0x7
/* DDR ERR_DISABLE */
#define DDR_ERR_DISABLE_APED (1 << 8) /* Address parity error disable */
#define DDR_ERR_DISABLE_SBED (1 << 2) /* Address parity error disable */
#define DDR_ERR_DISABLE_MBED (1 << 3) /* Address parity error disable */
/* Mode Registers */
#define DDR_MR5_CA_PARITY_LAT_4_CLK 0x1 /* for DDR4-1600/1866/2133 */
#define DDR_MR5_CA_PARITY_LAT_5_CLK 0x2 /* for DDR4-2400 */
/* DDR DSR2 register */
#define DDR_DSR_2_PHY_INIT_CMPLT 0x4
/* SDRAM TIMING_CFG_10 register */
#define DDR_TIMING_CFG_10_T_STAB 0x7FFF
/* DEBUG 2 register */
#define DDR_DBG_2_MEM_IDLE 0x00000002
/* DEBUG 26 register */
#define DDR_DEBUG_26_BIT_6 (0x1 << 6)
#define DDR_DEBUG_26_BIT_7 (0x1 << 7)
#define DDR_DEBUG_26_BIT_12 (0x1 << 12)
#define DDR_DEBUG_26_BIT_13 (0x1 << 13)
#define DDR_DEBUG_26_BIT_14 (0x1 << 14)
#define DDR_DEBUG_26_BIT_15 (0x1 << 15)
#define DDR_DEBUG_26_BIT_16 (0x1 << 16)
#define DDR_DEBUG_26_BIT_17 (0x1 << 17)
#define DDR_DEBUG_26_BIT_18 (0x1 << 18)
#define DDR_DEBUG_26_BIT_19 (0x1 << 19)
#define DDR_DEBUG_26_BIT_24 (0x1 << 24)
#define DDR_DEBUG_26_BIT_25 (0x1 << 25)
#define DDR_DEBUG_26_BIT_24_CLEAR ~(DDR_DEBUG_26_BIT_24)
/* DEBUG_29 register */
#define DDR_TX_BD_DIS (1 << 10) /* Transmit Bit Deskew Disable */
#define DDR_INIT_ADDR_EXT_UIA (1 << 31)
#endif /* DDR_REG_H */
/*
* Copyright 2021 NXP
*
* SPDX-License-Identifier: BSD-3-Clause
*
*/
#ifndef UTILITY_H
#define UTILITY_H
#include <dcfg.h>
#if defined(NXP_HAS_CCN504) || defined(NXP_HAS_CCN508)
#define CCN_HN_F_SAM_CTL 0x8
#define CCN_HN_F_REGION_SIZE 0x10000
#endif
unsigned long get_ddr_freq(struct sysinfo *sys, int ctrl_num);
unsigned int get_memory_clk_ps(unsigned long clk);
unsigned int picos_to_mclk(unsigned long data_rate, unsigned int picos);
unsigned int get_ddrc_version(const struct ccsr_ddr *ddr);
void print_ddr_info(struct ccsr_ddr *ddr);
#endif
Table for dynamic ODT for DDR4 with PHY generation 2
====================================================
Two-slot system
Only symmetric configurations are supported for interleaving. Non-symmetric
configurations are possible but not covered here. First slot empty is possbile
but prohibited for simplicity.
+-----------------------+-------------+---------------+-----------------------------+-----------------------------+
| Configuration | |DRAM controller| Slot 1 | Slot 2 |
+-----------+-----------+-------------+-------+-------+--------------+--------------+--------------+--------------+
| | | | | | Rank 1 | Rank 2 | Rank 1 | Rank 2 |
| Slot 1 | Slot 2 | Write/Read | Write | Read |-------+------+-------+------+-------+------+-------+------+
| | | | | | Write | Read | Write | Read | Write | Read | Write | Read |
+-----------+-----------+------+------+-------+-------+-------+------+-------+------+-------+------+-------+------+
| | | |Rank 1| off | 60 | 240 | off | 60 | 240 | 60 | 60 | 60 | 60 |
| | |Slot 1|------+-------+-------+-------+------+-------+------+-------+------+-------+------+
| | | |Rank 2| off | 60 | 60 | 240 | 240 | off | 60 | 60 | 60 | 60 |
| Dual Rank | Dual Rank |------+------+-------+-------+-------+------+-------+------+-------+------+-------+------+
| | | |Rank 1| off | 60 | 60 | 60 | 60 | 60 | 240 | off | 60 | 240 |
| | |Slot 2|------+-------+-------+-------+------+-------+------+-------+------+-------+------+
| | | |Rank 2| off | 60 | 60 | 60 | 60 | 60 | 60 | 240 | 240 | off |
+-----------+-----------+------+------+-------+-------+-------+------+-------+------+-------+------+-------+------+
| | | Slot 1 | off | 60 | 80 | off | | | | | | |
|Single Rank|Single Rank|-------------+-------+-------+-------+------+-------+------+-------+------+-------+------+
| | | Slot 2 | off | 60 | | | | | 80 | off |
+-----------+-----------+------+------+-------+-------+-------+------+-------+------+-------+------+
| | | |Rank 1| off | 80 | 80 | off | off | off |
| Dual Rank | |Slot 1|------+-------+-------+-------+------+-------+------+
| | | |Rank 2| off | 80 | 80 | off | off | off |
+-----------+-----------+-------------+-------+-------+-------+------+-------+------+
|Single Rank| | Slot 1 | off | 80 | 80 | off |
+-----------+-----------+-------------+-------+-------+-------+------+
This diff is collapsed.
#
# Copyright 2021 NXP
#
# SPDX-License-Identifier: BSD-3-Clause
#
DDR_DRIVERS_PATH := ${PLAT_DRIVERS_PATH}/ddr
ifeq ($(PLAT_DDR_PHY), PHY_GEN2)
$(eval $(call add_define, PHY_GEN2))
PLAT_DDR_PHY_DIR := phy-gen2
ifeq (${APPLY_MAX_CDD},yes)
$(eval $(call add_define,NXP_APPLY_MAX_CDD))
endif
ifeq (${ERRATA_DDR_A011396}, 1)
$(eval $(call add_define,ERRATA_DDR_A011396))
endif
ifeq (${ERRATA_DDR_A050450}, 1)
$(eval $(call add_define,ERRATA_DDR_A050450))
endif
endif
ifeq ($(PLAT_DDR_PHY), PHY_GEN1)
PLAT_DDR_PHY_DIR := phy-gen1
ifeq (${ERRATA_DDR_A008511},1)
$(eval $(call add_define,ERRATA_DDR_A008511))
endif
ifeq (${ERRATA_DDR_A009803},1)
$(eval $(call add_define,ERRATA_DDR_A009803))
endif
ifeq (${ERRATA_DDR_A009942},1)
$(eval $(call add_define,ERRATA_DDR_A009942))
endif
ifeq (${ERRATA_DDR_A010165},1)
$(eval $(call add_define,ERRATA_DDR_A010165))
endif
endif
ifeq ($(DDR_BIST), yes)
$(eval $(call add_define, BIST_EN))
endif
ifeq ($(DDR_DEBUG), yes)
$(eval $(call add_define, DDR_DEBUG))
endif
ifeq ($(DDR_PHY_DEBUG), yes)
$(eval $(call add_define, DDR_PHY_DEBUG))
endif
ifeq ($(DEBUG_PHY_IO), yes)
$(eval $(call add_define, DEBUG_PHY_IO))
endif
ifeq ($(DEBUG_WARM_RESET), yes)
$(eval $(call add_define, DEBUG_WARM_RESET))
endif
ifeq ($(DEBUG_DDR_INPUT_CONFIG), yes)
$(eval $(call add_define, DEBUG_DDR_INPUT_CONFIG))
endif
DDR_CNTLR_SOURCES := $(DDR_DRIVERS_PATH)/nxp-ddr/ddr.c \
$(DDR_DRIVERS_PATH)/nxp-ddr/ddrc.c \
$(DDR_DRIVERS_PATH)/nxp-ddr/dimm.c \
$(DDR_DRIVERS_PATH)/nxp-ddr/regs.c \
$(DDR_DRIVERS_PATH)/nxp-ddr/utility.c \
$(DDR_DRIVERS_PATH)/$(PLAT_DDR_PHY_DIR)/phy.c
PLAT_INCLUDES += -I$(DDR_DRIVERS_PATH)/nxp-ddr \
-I$(DDR_DRIVERS_PATH)/include
/*
* Copyright 2021 NXP
*
* SPDX-License-Identifier: BSD-3-Clause
*
*/
#include <errno.h>
#include <stdbool.h>
#include <stdint.h>
#include <stdio.h>
#include <stdlib.h>
#include <common/debug.h>
#include <ddr.h>
#include <drivers/delay_timer.h>
#include <immap.h>
#define BIST_CR 0x80060000
#define BIST_CR_EN 0x80000000
#define BIST_CR_STAT 0x00000001
#define CTLR_INTLV_MASK 0x20000000
#pragma weak run_bist
bool run_bist(void)
{
#ifdef BIST_EN
return true;
#else
return false;
#endif
}
/*
* Perform build-in test on memory
* timeout value in 10ms
*/
int bist(const struct ccsr_ddr *ddr, int timeout)
{
const unsigned int test_pattern[10] = {
0xffffffff,
0x00000000,
0xaaaaaaaa,
0x55555555,
0xcccccccc,
0x33333333,
0x12345678,
0xabcdef01,
0xaa55aa55,
0x55aa55aa
};
unsigned int mtcr, err_detect, err_sbe;
unsigned int cs0_config;
unsigned int csn_bnds[4];
int ret = 0;
uint32_t i;
#ifdef CONFIG_DDR_ADDR_DEC
uint32_t dec_9 = ddr_in32(&ddr->dec[9]);
uint32_t pos = 0U;
uint32_t map_save = 0U;
uint32_t temp32 = 0U;
uint32_t map, shift, highest;
#endif
cs0_config = ddr_in32(&ddr->csn_cfg[0]);
if ((cs0_config & CTLR_INTLV_MASK) != 0U) {
/* set bnds to non-interleaving */
for (i = 0U; i < 4U; i++) {
csn_bnds[i] = ddr_in32(&ddr->bnds[i].a);
ddr_out32(&ddr->bnds[i].a,
(csn_bnds[i] & U(0xfffefffe)) >> 1U);
}
ddr_out32(&ddr->csn_cfg[0], cs0_config & ~CTLR_INTLV_MASK);
#ifdef CONFIG_DDR_ADDR_DEC
if ((dec_9 & 0x1U) != 0U) {
highest = (dec_9 >> 26U) == U(0x3F) ? 0U : dec_9 >> 26U;
pos = 37U;
for (i = 0U; i < 36U; i++) { /* Go through all 37 */
if ((i % 4U) == 0U) {
temp32 = ddr_in32(&ddr->dec[i >> 2U]);
}
shift = (3U - i % 4U) * 8U + 2U;
map = (temp32 >> shift) & U(0x3F);
if (map > highest && map != U(0x3F)) {
highest = map;
pos = i;
}
}
debug("\nFound highest position %d, mapping to %d, ",
pos, highest);
map_save = ddr_in32(&ddr->dec[pos >> 2]);
shift = (3U - pos % 4U) * 8U + 2U;
debug("in dec[%d], bit %d (0x%x)\n",
pos >> 2U, shift, map_save);
temp32 = map_save & ~(U(0x3F) << shift);
temp32 |= 8U << shift;
ddr_out32(&ddr->dec[pos >> 2U], temp32);
timeout <<= 2U;
debug("Increase wait time to %d ms\n", timeout * 10);
}
#endif
}
for (i = 0U; i < 10U; i++) {
ddr_out32(&ddr->mtp[i], test_pattern[i]);
}
mtcr = BIST_CR;
ddr_out32(&ddr->mtcr, mtcr);
do {
mdelay(10);
mtcr = ddr_in32(&ddr->mtcr);
} while (timeout-- > 0 && ((mtcr & BIST_CR_EN) != 0));
if (timeout <= 0) {
ERROR("Timeout\n");
} else {
debug("Timer remains %d\n", timeout);
}
err_detect = ddr_in32(&ddr->err_detect);
err_sbe = ddr_in32(&ddr->err_sbe);
if (err_detect != 0U || ((err_sbe & U(0xffff)) != 0U)) {
ERROR("ECC error detected\n");
ret = -EIO;
}
if ((cs0_config & CTLR_INTLV_MASK) != 0) {
for (i = 0U; i < 4U; i++) {
ddr_out32(&ddr->bnds[i].a, csn_bnds[i]);
}
ddr_out32(&ddr->csn_cfg[0], cs0_config);
#ifdef CONFIG_DDR_ADDR_DEC
if ((dec_9 & U(0x1)) != 0U) {
ddr_out32(&ddr->dec[pos >> 2], map_save);
}
#endif
}
if ((mtcr & BIST_CR_STAT) != 0) {
ERROR("Built-in self test failed\n");
ret = -EIO;
} else {
NOTICE("Build-in self test passed\n");
}
return ret;
}
void dump_ddrc(unsigned int *ddr)
{
#ifdef DDR_DEBUG
uint32_t i;
unsigned long val;
for (i = 0U; i < U(0x400); i++, ddr++) {
val = ddr_in32(ddr);
if (val != 0U) { /* skip zeros */
debug("*0x%lx = 0x%lx\n", (unsigned long)ddr, val);
}
}
#endif
}
#ifdef ERRATA_DDR_A009803
static void set_wait_for_bits_clear(const void *ptr,
unsigned int value,
unsigned int bits)
{
int timeout = 1000;
ddr_out32(ptr, value);
do {
udelay(100);
} while (timeout-- > 0 && ((ddr_in32(ptr) & bits) != 0));
if (timeout <= 0) {
ERROR("wait for clear timeout.\n");
}
}
#endif
#if (DDRC_NUM_CS > 4)
#error Invalid setting for DDRC_NUM_CS
#endif
/*
* If supported by the platform, writing to DDR controller takes two
* passes to deassert DDR reset to comply with JEDEC specs for RDIMMs.
*/
int ddrc_set_regs(const unsigned long clk,
const struct ddr_cfg_regs *regs,
const struct ccsr_ddr *ddr,
int twopass)
{
unsigned int i, bus_width;
unsigned int temp_sdram_cfg;
unsigned int total_mem_per_ctrl, total_mem_per_ctrl_adj;
const int mod_bnds = regs->cs[0].config & CTLR_INTLV_MASK;
int timeout;
int ret = 0;
#if defined(ERRATA_DDR_A009942) || defined(ERRATA_DDR_A010165)
unsigned long ddr_freq;
unsigned int tmp;
#ifdef ERRATA_DDR_A009942
unsigned int check;
unsigned int cpo_min = U(0xff);
unsigned int cpo_max = 0U;
#endif
#endif
if (twopass == 2U) {
goto after_reset;
}
/* Set cdr1 first in case 0.9v VDD is enabled for some SoCs*/
ddr_out32(&ddr->ddr_cdr1, regs->cdr[0]);
ddr_out32(&ddr->sdram_clk_cntl, regs->clk_cntl);
for (i = 0U; i < DDRC_NUM_CS; i++) {
if (mod_bnds != 0U) {
ddr_out32(&ddr->bnds[i].a,
(regs->cs[i].bnds & U(0xfffefffe)) >> 1U);
} else {
ddr_out32(&ddr->bnds[i].a, regs->cs[i].bnds);
}
ddr_out32(&ddr->csn_cfg_2[i], regs->cs[i].config_2);
}
ddr_out32(&ddr->timing_cfg_0, regs->timing_cfg[0]);
ddr_out32(&ddr->timing_cfg_1, regs->timing_cfg[1]);
ddr_out32(&ddr->timing_cfg_2, regs->timing_cfg[2]);
ddr_out32(&ddr->timing_cfg_3, regs->timing_cfg[3]);
ddr_out32(&ddr->timing_cfg_4, regs->timing_cfg[4]);
ddr_out32(&ddr->timing_cfg_5, regs->timing_cfg[5]);
ddr_out32(&ddr->timing_cfg_6, regs->timing_cfg[6]);
ddr_out32(&ddr->timing_cfg_7, regs->timing_cfg[7]);
ddr_out32(&ddr->timing_cfg_8, regs->timing_cfg[8]);
ddr_out32(&ddr->timing_cfg_9, regs->timing_cfg[9]);
ddr_out32(&ddr->zq_cntl, regs->zq_cntl);
for (i = 0U; i < 4U; i++) {
ddr_out32(&ddr->dq_map[i], regs->dq_map[i]);
}
ddr_out32(&ddr->sdram_cfg_3, regs->sdram_cfg[2]);
ddr_out32(&ddr->sdram_mode, regs->sdram_mode[0]);
ddr_out32(&ddr->sdram_mode_2, regs->sdram_mode[1]);
ddr_out32(&ddr->sdram_mode_3, regs->sdram_mode[2]);
ddr_out32(&ddr->sdram_mode_4, regs->sdram_mode[3]);
ddr_out32(&ddr->sdram_mode_5, regs->sdram_mode[4]);
ddr_out32(&ddr->sdram_mode_6, regs->sdram_mode[5]);
ddr_out32(&ddr->sdram_mode_7, regs->sdram_mode[6]);
ddr_out32(&ddr->sdram_mode_8, regs->sdram_mode[7]);
ddr_out32(&ddr->sdram_mode_9, regs->sdram_mode[8]);
ddr_out32(&ddr->sdram_mode_10, regs->sdram_mode[9]);
ddr_out32(&ddr->sdram_mode_11, regs->sdram_mode[10]);
ddr_out32(&ddr->sdram_mode_12, regs->sdram_mode[11]);
ddr_out32(&ddr->sdram_mode_13, regs->sdram_mode[12]);
ddr_out32(&ddr->sdram_mode_14, regs->sdram_mode[13]);
ddr_out32(&ddr->sdram_mode_15, regs->sdram_mode[14]);
ddr_out32(&ddr->sdram_mode_16, regs->sdram_mode[15]);
ddr_out32(&ddr->sdram_md_cntl, regs->md_cntl);
#ifdef ERRATA_DDR_A009663
ddr_out32(&ddr->sdram_interval,
regs->interval & ~SDRAM_INTERVAL_BSTOPRE);
#else
ddr_out32(&ddr->sdram_interval, regs->interval);
#endif
ddr_out32(&ddr->sdram_data_init, regs->data_init);
if (regs->eor != 0) {
ddr_out32(&ddr->eor, regs->eor);
}
ddr_out32(&ddr->wrlvl_cntl, regs->wrlvl_cntl[0]);
#ifndef NXP_DDR_EMU
/*
* Skip these two registers if running on emulator
* because emulator doesn't have skew between bytes.
*/
if (regs->wrlvl_cntl[1] != 0) {
ddr_out32(&ddr->ddr_wrlvl_cntl_2, regs->wrlvl_cntl[1]);
}
if (regs->wrlvl_cntl[2] != 0) {
ddr_out32(&ddr->ddr_wrlvl_cntl_3, regs->wrlvl_cntl[2]);
}
#endif
ddr_out32(&ddr->ddr_sr_cntr, regs->ddr_sr_cntr);
ddr_out32(&ddr->ddr_sdram_rcw_1, regs->sdram_rcw[0]);
ddr_out32(&ddr->ddr_sdram_rcw_2, regs->sdram_rcw[1]);
ddr_out32(&ddr->ddr_sdram_rcw_3, regs->sdram_rcw[2]);
ddr_out32(&ddr->ddr_sdram_rcw_4, regs->sdram_rcw[3]);
ddr_out32(&ddr->ddr_sdram_rcw_5, regs->sdram_rcw[4]);
ddr_out32(&ddr->ddr_sdram_rcw_6, regs->sdram_rcw[5]);
ddr_out32(&ddr->ddr_cdr2, regs->cdr[1]);
ddr_out32(&ddr->sdram_cfg_2, regs->sdram_cfg[1]);
ddr_out32(&ddr->init_addr, regs->init_addr);
ddr_out32(&ddr->init_ext_addr, regs->init_ext_addr);
#ifdef ERRATA_DDR_A009803
/* part 1 of 2 */
if ((regs->sdram_cfg[1] & SDRAM_CFG2_AP_EN) != 0) {
if ((regs->sdram_cfg[0] & SDRAM_CFG_RD_EN) != 0) {
ddr_out32(&ddr->ddr_sdram_rcw_2,
regs->sdram_rcw[1] & ~0xf0);
}
ddr_out32(&ddr->err_disable,
regs->err_disable | DDR_ERR_DISABLE_APED);
}
#else
ddr_out32(&ddr->err_disable, regs->err_disable);
#endif
ddr_out32(&ddr->err_int_en, regs->err_int_en);
/* For DDRC 5.05 only */
if (get_ddrc_version(ddr) == 0x50500) {
ddr_out32(&ddr->tx_cfg[1], 0x1f1f1f1f);
ddr_out32(&ddr->debug[3], 0x124a02c0);
}
for (i = 0U; i < 4U; i++) {
if (regs->tx_cfg[i] != 0) {
ddr_out32(&ddr->tx_cfg[i], regs->tx_cfg[i]);
}
}
for (i = 0U; i < 64U; i++) {
if (regs->debug[i] != 0) {
#ifdef ERRATA_DDR_A009942
if (i == 28U) {
continue;
}
#endif
ddr_out32(&ddr->debug[i], regs->debug[i]);
}
}
#ifdef CONFIG_DDR_ADDR_DEC
if ((regs->dec[9] & 1) != 0U) {
for (i = 0U; i < 10U; i++) {
ddr_out32(&ddr->dec[i], regs->dec[i]);
}
if (mod_bnds != 0) {
debug("Disable address decoding\n");
ddr_out32(&ddr->dec[9], 0);
}
}
#endif
#ifdef ERRATA_DDR_A008511
/* Part 1 of 2 */
/* This erraum only applies to verion 5.2.1 */
if (get_ddrc_version(ddr) == 0x50200) {
ERROR("Unsupported SoC.\n");
} else if (get_ddrc_version(ddr) == 0x50201) {
ddr_out32(&ddr->debug[37], (U(1) << 31));
ddr_out32(&ddr->ddr_cdr2,
regs->cdr[1] | DDR_CDR2_VREF_TRAIN_EN);
} else {
debug("Erratum A008511 doesn't apply.\n");
}
#endif
#ifdef ERRATA_DDR_A009942
ddr_freq = clk / 1000000U;
tmp = ddr_in32(&ddr->debug[28]);
tmp &= U(0xff0fff00);
tmp |= ddr_freq <= 1333U ? U(0x0080006a) :
(ddr_freq <= 1600U ? U(0x0070006f) :
(ddr_freq <= 1867U ? U(0x00700076) : U(0x0060007b)));
if (regs->debug[28] != 0) {
tmp &= ~0xff;
tmp |= regs->debug[28] & 0xff;
} else {
WARN("Warning: Optimal CPO value not set.\n");
}
ddr_out32(&ddr->debug[28], tmp);
#endif
#ifdef ERRATA_DDR_A010165
ddr_freq = clk / 1000000U;
if ((ddr_freq > 1900) && (ddr_freq < 2300)) {
tmp = ddr_in32(&ddr->debug[28]);
ddr_out32(&ddr->debug[28], tmp | 0x000a0000);
}
#endif
/*
* For RDIMMs, JEDEC spec requires clocks to be stable before reset is
* deasserted. Clocks start when any chip select is enabled and clock
* control register is set. Because all DDR components are connected to
* one reset signal, this needs to be done in two steps. Step 1 is to
* get the clocks started. Step 2 resumes after reset signal is
* deasserted.
*/
if (twopass == 1) {
udelay(200);
return 0;
}
/* As per new sequence flow shall be write CSn_CONFIG registers needs to
* be set after all the other DDR controller registers are set, then poll
* for PHY_INIT_CMPLT = 1 , then wait at least 100us (micro seconds),
* then set the MEM_EN = 1
*/
for (i = 0U; i < DDRC_NUM_CS; i++) {
if (mod_bnds != 0U && i == 0U) {
ddr_out32(&ddr->csn_cfg[i],
(regs->cs[i].config & ~CTLR_INTLV_MASK));
} else {
ddr_out32(&ddr->csn_cfg[i], regs->cs[i].config);
}
}
after_reset:
/* Set, but do not enable the memory */
temp_sdram_cfg = regs->sdram_cfg[0];
temp_sdram_cfg &= ~(SDRAM_CFG_MEM_EN);
ddr_out32(&ddr->sdram_cfg, temp_sdram_cfg);
if (get_ddrc_version(ddr) < U(0x50500)) {
/*
* 500 painful micro-seconds must elapse between
* the DDR clock setup and the DDR config enable.
* DDR2 need 200 us, and DDR3 need 500 us from spec,
* we choose the max, that is 500 us for all of case.
*/
udelay(500);
/* applied memory barrier */
mb();
isb();
} else {
/* wait for PHY complete */
timeout = 40;
while (((ddr_in32(&ddr->ddr_dsr2) & 0x4) != 0) &&
(timeout > 0)) {
udelay(500);
timeout--;
}
if (timeout <= 0) {
printf("PHY handshake timeout, ddr_dsr2 = %x\n",
ddr_in32(&ddr->ddr_dsr2));
} else {
debug("PHY handshake completed, timer remains %d\n",
timeout);
}
}
temp_sdram_cfg = ddr_in32(&ddr->sdram_cfg);
/* Let the controller go */
udelay(100);
ddr_out32(&ddr->sdram_cfg, temp_sdram_cfg | SDRAM_CFG_MEM_EN);
/* applied memory barrier */
mb();
isb();
total_mem_per_ctrl = 0;
for (i = 0; i < DDRC_NUM_CS; i++) {
if ((regs->cs[i].config & 0x80000000) == 0) {
continue;
}
total_mem_per_ctrl += 1 << (
((regs->cs[i].config >> 14) & 0x3) + 2 +
((regs->cs[i].config >> 8) & 0x7) + 12 +
((regs->cs[i].config >> 4) & 0x3) + 0 +
((regs->cs[i].config >> 0) & 0x7) + 8 +
((regs->sdram_cfg[2] >> 4) & 0x3) +
3 - ((regs->sdram_cfg[0] >> 19) & 0x3) -
26); /* minus 26 (count of 64M) */
}
total_mem_per_ctrl_adj = total_mem_per_ctrl;
/*
* total memory / bus width = transactions needed
* transactions needed / data rate = seconds
* to add plenty of buffer, double the time
* For example, 2GB on 666MT/s 64-bit bus takes about 402ms
* Let's wait for 800ms
*/
bus_width = 3 - ((ddr_in32(&ddr->sdram_cfg) & SDRAM_CFG_DBW_MASK)
>> SDRAM_CFG_DBW_SHIFT);
timeout = ((total_mem_per_ctrl_adj << (6 - bus_width)) * 100 /
(clk >> 20)) << 2;
total_mem_per_ctrl_adj >>= 4; /* shift down to gb size */
if ((ddr_in32(&ddr->sdram_cfg_2) & SDRAM_CFG2_D_INIT) != 0) {
debug("total size %d GB\n", total_mem_per_ctrl_adj);
debug("Need to wait up to %d ms\n", timeout * 10);
do {
mdelay(10);
} while (timeout-- > 0 &&
((ddr_in32(&ddr->sdram_cfg_2) & SDRAM_CFG2_D_INIT)) != 0);
if (timeout <= 0) {
if (ddr_in32(&ddr->debug[1]) & 0x3d00) {
ERROR("Found training error(s): 0x%x\n",
ddr_in32(&ddr->debug[1]));
}
ERROR("Error: Waiting for D_INIT timeout.\n");
return -EIO;
}
}
if (mod_bnds != 0U) {
debug("Restore original bnds\n");
for (i = 0U; i < DDRC_NUM_CS; i++) {
ddr_out32(&ddr->bnds[i].a, regs->cs[i].bnds);
}
ddr_out32(&ddr->csn_cfg[0], regs->cs[0].config);
#ifdef CONFIG_DDR_ADDR_DEC
if ((regs->dec[9] & U(0x1)) != 0U) {
debug("Restore address decoding\n");
ddr_out32(&ddr->dec[9], regs->dec[9]);
}
#endif
}
#ifdef ERRATA_DDR_A009803
/* Part 2 of 2 */
if ((regs->sdram_cfg[1] & SDRAM_CFG2_AP_EN) != 0) {
timeout = 400;
do {
mdelay(1);
} while (timeout-- > 0 && ((ddr_in32(&ddr->debug[1]) & 0x2) == 0));
if ((regs->sdram_cfg[0] & SDRAM_CFG_RD_EN) != 0) {
for (i = 0U; i < DDRC_NUM_CS; i++) {
if ((regs->cs[i].config & SDRAM_CS_CONFIG_EN) == 0) {
continue;
}
set_wait_for_bits_clear(&ddr->sdram_md_cntl,
MD_CNTL_MD_EN |
MD_CNTL_CS_SEL(i) |
0x070000ed,
MD_CNTL_MD_EN);
udelay(1);
}
}
ddr_out32(&ddr->err_disable,
regs->err_disable & ~DDR_ERR_DISABLE_APED);
}
#endif
#ifdef ERRATA_DDR_A009663
ddr_out32(&ddr->sdram_interval, regs->interval);
#endif
#ifdef ERRATA_DDR_A009942
timeout = 400;
do {
mdelay(1);
} while (timeout-- > 0 && ((ddr_in32(&ddr->debug[1]) & 0x2) == 0));
tmp = (regs->sdram_cfg[0] >> 19) & 0x3;
check = (tmp == DDR_DBUS_64) ? 4 : ((tmp == DDR_DBUS_32) ? 2 : 1);
for (i = 0; i < check; i++) {
tmp = ddr_in32(&ddr->debug[9 + i]);
debug("Reading debug[%d] as 0x%x\n", i + 9, tmp);
cpo_min = min(cpo_min,
min((tmp >> 24) & 0xff, (tmp >> 8) & 0xff));
cpo_max = max(cpo_max,
max((tmp >> 24) & 0xff, (tmp >> 8) & 0xff));
}
if ((regs->sdram_cfg[0] & SDRAM_CFG_ECC_EN) != 0) {
tmp = ddr_in32(&ddr->debug[13]);
cpo_min = min(cpo_min, (tmp >> 24) & 0xff);
cpo_max = max(cpo_max, (tmp >> 24) & 0xff);
}
debug("cpo_min 0x%x\n", cpo_min);
debug("cpo_max 0x%x\n", cpo_max);
tmp = ddr_in32(&ddr->debug[28]);
debug("debug[28] 0x%x\n", tmp);
if ((cpo_min + 0x3B) < (tmp & 0xff)) {
WARN("Warning: A009942 requires setting cpo_sample to 0x%x\n",
(cpo_min + cpo_max) / 2 + 0x27);
} else {
debug("Optimal cpo_sample 0x%x\n",
(cpo_min + cpo_max) / 2 + 0x27);
}
#endif
if (run_bist() != 0) {
if ((ddr_in32(&ddr->debug[1]) &
((get_ddrc_version(ddr) == 0x50500) ? 0x3c00 : 0x3d00)) != 0) {
ERROR("Found training error(s): 0x%x\n",
ddr_in32(&ddr->debug[1]));
return -EIO;
}
INFO("Running built-in self test ...\n");
/* give it 10x time to cover whole memory */
timeout = ((total_mem_per_ctrl << (6 - bus_width)) *
100 / (clk >> 20)) * 10;
INFO("\tWait up to %d ms\n", timeout * 10);
ret = bist(ddr, timeout);
}
dump_ddrc((void *)ddr);
return ret;
}
/*
* Copyright 2021 NXP
*
* SPDX-License-Identifier: BSD-3-Clause
*
*/
#include <errno.h>
#include <stdbool.h>
#include <stdint.h>
#include <stdio.h>
#include <stdlib.h>
#include <string.h>
#include <common/debug.h>
#include <ddr.h>
#include <dimm.h>
#include <i2c.h>
#include <lib/utils.h>
int read_spd(unsigned char chip, void *buf, int len)
{
unsigned char dummy = 0U;
int ret;
if (len < 256) {
ERROR("Invalid SPD length\n");
return -EINVAL;
}
i2c_write(SPD_SPA0_ADDRESS, 0, 1, &dummy, 1);
ret = i2c_read(chip, 0, 1, buf, 256);
if (ret == 0) {
i2c_write(SPD_SPA1_ADDRESS, 0, 1, &dummy, 1);
ret = i2c_read(chip, 0, 1, buf + 256, min(256, len - 256));
}
if (ret != 0) {
zeromem(buf, len);
}
return ret;
}
int crc16(unsigned char *ptr, int count)
{
int i;
int crc = 0;
while (--count >= 0) {
crc = crc ^ (int)*ptr++ << 8;
for (i = 0; i < 8; ++i) {
if ((crc & 0x8000) != 0) {
crc = crc << 1 ^ 0x1021;
} else {
crc = crc << 1;
}
}
}
return crc & 0xffff;
}
static int ddr4_spd_check(const struct ddr4_spd *spd)
{
void *p = (void *)spd;
int csum16;
int len;
char crc_lsb; /* byte 126 */
char crc_msb; /* byte 127 */
len = 126;
csum16 = crc16(p, len);
crc_lsb = (char) (csum16 & 0xff);
crc_msb = (char) (csum16 >> 8);
if (spd->crc[0] != crc_lsb || spd->crc[1] != crc_msb) {
ERROR("SPD CRC = 0x%x%x, computed CRC = 0x%x%x\n",
spd->crc[1], spd->crc[0], crc_msb, crc_lsb);
return -EINVAL;
}
p = (void *)spd + 128;
len = 126;
csum16 = crc16(p, len);
crc_lsb = (char) (csum16 & 0xff);
crc_msb = (char) (csum16 >> 8);
if (spd->mod_section.uc[126] != crc_lsb ||
spd->mod_section.uc[127] != crc_msb) {
ERROR("SPD CRC = 0x%x%x, computed CRC = 0x%x%x\n",
spd->mod_section.uc[127], spd->mod_section.uc[126],
crc_msb, crc_lsb);
return -EINVAL;
}
return 0;
}
static unsigned long long
compute_ranksize(const struct ddr4_spd *spd)
{
unsigned long long bsize;
int nbit_sdram_cap_bsize = 0;
int nbit_primary_bus_width = 0;
int nbit_sdram_width = 0;
int die_count = 0;
bool package_3ds;
if ((spd->density_banks & 0xf) <= 7) {
nbit_sdram_cap_bsize = (spd->density_banks & 0xf) + 28;
}
if ((spd->bus_width & 0x7) < 4) {
nbit_primary_bus_width = (spd->bus_width & 0x7) + 3;
}
if ((spd->organization & 0x7) < 4) {
nbit_sdram_width = (spd->organization & 0x7) + 2;
}
package_3ds = (spd->package_type & 0x3) == 0x2;
if (package_3ds) {
die_count = (spd->package_type >> 4) & 0x7;
}
bsize = 1ULL << (nbit_sdram_cap_bsize - 3 +
nbit_primary_bus_width - nbit_sdram_width +
die_count);
return bsize;
}
int cal_dimm_params(const struct ddr4_spd *spd, struct dimm_params *pdimm)
{
int ret;
int i;
static const unsigned char udimm_rc_e_dq[18] = {
0x0c, 0x2c, 0x15, 0x35, 0x15, 0x35, 0x0b, 0x2c, 0x15,
0x35, 0x0b, 0x35, 0x0b, 0x2c, 0x0b, 0x35, 0x15, 0x36
};
int spd_error = 0;
unsigned char *ptr;
unsigned char val;
if (spd->mem_type != SPD_MEMTYPE_DDR4) {
ERROR("Not a DDR4 DIMM.\n");
return -EINVAL;
}
ret = ddr4_spd_check(spd);
if (ret != 0) {
ERROR("DIMM SPD checksum mismatch\n");
return -EINVAL;
}
/*
* The part name in ASCII in the SPD EEPROM is not null terminated.
* Guarantee null termination here by presetting all bytes to 0
* and copying the part name in ASCII from the SPD onto it
*/
if ((spd->info_size_crc & 0xF) > 2) {
memcpy(pdimm->mpart, spd->mpart, sizeof(pdimm->mpart) - 1);
}
/* DIMM organization parameters */
pdimm->n_ranks = ((spd->organization >> 3) & 0x7) + 1;
debug("n_ranks %d\n", pdimm->n_ranks);
pdimm->rank_density = compute_ranksize(spd);
if (pdimm->rank_density == 0) {
return -EINVAL;
}
debug("rank_density 0x%llx\n", pdimm->rank_density);
pdimm->capacity = pdimm->n_ranks * pdimm->rank_density;
debug("capacity 0x%llx\n", pdimm->capacity);
pdimm->die_density = spd->density_banks & 0xf;
debug("die density 0x%x\n", pdimm->die_density);
pdimm->primary_sdram_width = 1 << (3 + (spd->bus_width & 0x7));
debug("primary_sdram_width %d\n", pdimm->primary_sdram_width);
if (((spd->bus_width >> 3) & 0x3) != 0) {
pdimm->ec_sdram_width = 8;
} else {
pdimm->ec_sdram_width = 0;
}
debug("ec_sdram_width %d\n", pdimm->ec_sdram_width);
pdimm->device_width = 1 << ((spd->organization & 0x7) + 2);
debug("device_width %d\n", pdimm->device_width);
pdimm->package_3ds = (spd->package_type & 0x3) == 0x2 ?
(spd->package_type >> 4) & 0x7 : 0;
debug("package_3ds %d\n", pdimm->package_3ds);
switch (spd->module_type & DDR4_SPD_MODULETYPE_MASK) {
case DDR4_SPD_RDIMM:
case DDR4_SPD_MINI_RDIMM:
case DDR4_SPD_72B_SO_RDIMM:
pdimm->rdimm = 1;
pdimm->rc = spd->mod_section.registered.ref_raw_card & 0x8f;
if ((spd->mod_section.registered.reg_map & 0x1) != 0) {
pdimm->mirrored_dimm = 1;
}
val = spd->mod_section.registered.ca_stren;
pdimm->rcw[3] = val >> 4;
pdimm->rcw[4] = ((val & 0x3) << 2) | ((val & 0xc) >> 2);
val = spd->mod_section.registered.clk_stren;
pdimm->rcw[5] = ((val & 0x3) << 2) | ((val & 0xc) >> 2);
pdimm->rcw[6] = 0xf;
/* A17 used for 16Gb+, C[2:0] used for 3DS */
pdimm->rcw[8] = pdimm->die_density >= 0x6 ? 0x0 : 0x8 |
(pdimm->package_3ds > 0x3 ? 0x0 :
(pdimm->package_3ds > 0x1 ? 0x1 :
(pdimm->package_3ds > 0 ? 0x2 : 0x3)));
if (pdimm->package_3ds != 0 || pdimm->n_ranks != 4) {
pdimm->rcw[13] = 0x4;
} else {
pdimm->rcw[13] = 0x5;
}
pdimm->rcw[13] |= pdimm->mirrored_dimm ? 0x8 : 0;
break;
case DDR4_SPD_UDIMM:
case DDR4_SPD_SO_DIMM:
case DDR4_SPD_MINI_UDIMM:
case DDR4_SPD_72B_SO_UDIMM:
case DDR4_SPD_16B_SO_DIMM:
case DDR4_SPD_32B_SO_DIMM:
pdimm->rc = spd->mod_section.unbuffered.ref_raw_card & 0x8f;
if ((spd->mod_section.unbuffered.addr_mapping & 0x1) != 0) {
pdimm->mirrored_dimm = 1;
}
if ((spd->mod_section.unbuffered.mod_height & 0xe0) == 0 &&
(spd->mod_section.unbuffered.ref_raw_card == 0x04)) {
/* Fix SPD error found on DIMMs with raw card E0 */
for (i = 0; i < 18; i++) {
if (spd->mapping[i] == udimm_rc_e_dq[i]) {
continue;
}
spd_error = 1;
ptr = (unsigned char *)&spd->mapping[i];
*ptr = udimm_rc_e_dq[i];
}
if (spd_error != 0) {
INFO("SPD DQ mapping error fixed\n");
}
}
break;
default:
ERROR("Unknown module_type 0x%x\n", spd->module_type);
return -EINVAL;
}
debug("rdimm %d\n", pdimm->rdimm);
debug("mirrored_dimm %d\n", pdimm->mirrored_dimm);
debug("rc 0x%x\n", pdimm->rc);
/* SDRAM device parameters */
pdimm->n_row_addr = ((spd->addressing >> 3) & 0x7) + 12;
debug("n_row_addr %d\n", pdimm->n_row_addr);
pdimm->n_col_addr = (spd->addressing & 0x7) + 9;
debug("n_col_addr %d\n", pdimm->n_col_addr);
pdimm->bank_addr_bits = (spd->density_banks >> 4) & 0x3;
debug("bank_addr_bits %d\n", pdimm->bank_addr_bits);
pdimm->bank_group_bits = (spd->density_banks >> 6) & 0x3;
debug("bank_group_bits %d\n", pdimm->bank_group_bits);
if (pdimm->ec_sdram_width != 0) {
pdimm->edc_config = 0x02;
} else {
pdimm->edc_config = 0x00;
}
debug("edc_config %d\n", pdimm->edc_config);
/* DDR4 spec has BL8 -bit3, BC4 -bit2 */
pdimm->burst_lengths_bitmask = 0x0c;
debug("burst_lengths_bitmask 0x%x\n", pdimm->burst_lengths_bitmask);
/* MTB - medium timebase
* The MTB in the SPD spec is 125ps,
*
* FTB - fine timebase
* use 1/10th of ps as our unit to avoid floating point
* eg, 10 for 1ps, 25 for 2.5ps, 50 for 5ps
*/
if ((spd->timebases & 0xf) == 0x0) {
pdimm->mtb_ps = 125;
pdimm->ftb_10th_ps = 10;
} else {
ERROR("Unknown Timebases\n");
return -EINVAL;
}
/* sdram minimum cycle time */
pdimm->tckmin_x_ps = spd_to_ps(spd->tck_min, spd->fine_tck_min);
debug("tckmin_x_ps %d\n", pdimm->tckmin_x_ps);
/* sdram max cycle time */
pdimm->tckmax_ps = spd_to_ps(spd->tck_max, spd->fine_tck_max);
debug("tckmax_ps %d\n", pdimm->tckmax_ps);
/*
* CAS latency supported
* bit0 - CL7
* bit4 - CL11
* bit8 - CL15
* bit12- CL19
* bit16- CL23
*/
pdimm->caslat_x = (spd->caslat_b1 << 7) |
(spd->caslat_b2 << 15) |
(spd->caslat_b3 << 23);
debug("caslat_x 0x%x\n", pdimm->caslat_x);
if (spd->caslat_b4 != 0) {
WARN("Unhandled caslat_b4 value\n");
}
/*
* min CAS latency time
*/
pdimm->taa_ps = spd_to_ps(spd->taa_min, spd->fine_taa_min);
debug("taa_ps %d\n", pdimm->taa_ps);
/*
* min RAS to CAS delay time
*/
pdimm->trcd_ps = spd_to_ps(spd->trcd_min, spd->fine_trcd_min);
debug("trcd_ps %d\n", pdimm->trcd_ps);
/*
* Min Row Precharge Delay Time
*/
pdimm->trp_ps = spd_to_ps(spd->trp_min, spd->fine_trp_min);
debug("trp_ps %d\n", pdimm->trp_ps);
/* min active to precharge delay time */
pdimm->tras_ps = (((spd->tras_trc_ext & 0xf) << 8) +
spd->tras_min_lsb) * pdimm->mtb_ps;
debug("tras_ps %d\n", pdimm->tras_ps);
/* min active to actice/refresh delay time */
pdimm->trc_ps = spd_to_ps((((spd->tras_trc_ext & 0xf0) << 4) +
spd->trc_min_lsb), spd->fine_trc_min);
debug("trc_ps %d\n", pdimm->trc_ps);
/* Min Refresh Recovery Delay Time */
pdimm->trfc1_ps = ((spd->trfc1_min_msb << 8) | (spd->trfc1_min_lsb)) *
pdimm->mtb_ps;
debug("trfc1_ps %d\n", pdimm->trfc1_ps);
pdimm->trfc2_ps = ((spd->trfc2_min_msb << 8) | (spd->trfc2_min_lsb)) *
pdimm->mtb_ps;
debug("trfc2_ps %d\n", pdimm->trfc2_ps);
pdimm->trfc4_ps = ((spd->trfc4_min_msb << 8) | (spd->trfc4_min_lsb)) *
pdimm->mtb_ps;
debug("trfc4_ps %d\n", pdimm->trfc4_ps);
/* min four active window delay time */
pdimm->tfaw_ps = (((spd->tfaw_msb & 0xf) << 8) | spd->tfaw_min) *
pdimm->mtb_ps;
debug("tfaw_ps %d\n", pdimm->tfaw_ps);
/* min row active to row active delay time, different bank group */
pdimm->trrds_ps = spd_to_ps(spd->trrds_min, spd->fine_trrds_min);
debug("trrds_ps %d\n", pdimm->trrds_ps);
/* min row active to row active delay time, same bank group */
pdimm->trrdl_ps = spd_to_ps(spd->trrdl_min, spd->fine_trrdl_min);
debug("trrdl_ps %d\n", pdimm->trrdl_ps);
/* min CAS to CAS Delay Time (tCCD_Lmin), same bank group */
pdimm->tccdl_ps = spd_to_ps(spd->tccdl_min, spd->fine_tccdl_min);
debug("tccdl_ps %d\n", pdimm->tccdl_ps);
if (pdimm->package_3ds != 0) {
if (pdimm->die_density > 5) {
debug("Unsupported logical rank density 0x%x\n",
pdimm->die_density);
return -EINVAL;
}
pdimm->trfc_slr_ps = (pdimm->die_density <= 4) ?
260000 : 350000;
}
debug("trfc_slr_ps %d\n", pdimm->trfc_slr_ps);
/* 15ns for all speed bins */
pdimm->twr_ps = 15000;
debug("twr_ps %d\n", pdimm->twr_ps);
/*
* Average periodic refresh interval
* tREFI = 7.8 us at normal temperature range
*/
pdimm->refresh_rate_ps = 7800000;
debug("refresh_rate_ps %d\n", pdimm->refresh_rate_ps);
for (i = 0; i < 18; i++) {
pdimm->dq_mapping[i] = spd->mapping[i];
debug("dq_mapping 0x%x\n", pdimm->dq_mapping[i]);
}
pdimm->dq_mapping_ors = ((spd->mapping[0] >> 6) & 0x3) == 0 ? 1 : 0;
debug("dq_mapping_ors %d\n", pdimm->dq_mapping_ors);
return 0;
}
This diff is collapsed.
/*
* Copyright 2021 NXP
*
* SPDX-License-Identifier: BSD-3-Clause
*
*/
#include <errno.h>
#include <stdint.h>
#include <stdio.h>
#include <stdlib.h>
#include <common/debug.h>
#include <ddr.h>
#include <immap.h>
#include <lib/mmio.h>
#define UL_5POW12 244140625UL
#define ULL_2E12 2000000000000ULL
#define UL_2POW13 (1UL << 13)
#define ULL_8FS 0xFFFFFFFFULL
#define do_div(n, base) ({ \
unsigned int __base = (base); \
unsigned int __rem; \
__rem = ((unsigned long long)(n)) % __base; \
(n) = ((unsigned long long)(n)) / __base; \
__rem; \
})
#define CCN_HN_F_SAM_NODEID_MASK 0x7f
#ifdef NXP_HAS_CCN504
#define CCN_HN_F_SAM_NODEID_DDR0 0x4
#define CCN_HN_F_SAM_NODEID_DDR1 0xe
#elif defined(NXP_HAS_CCN508)
#define CCN_HN_F_SAM_NODEID_DDR0 0x8
#define CCN_HN_F_SAM_NODEID_DDR1 0x18
#endif
unsigned long get_ddr_freq(struct sysinfo *sys, int ctrl_num)
{
if (sys->freq_ddr_pll0 == 0) {
get_clocks(sys);
}
switch (ctrl_num) {
case 0:
return sys->freq_ddr_pll0;
case 1:
return sys->freq_ddr_pll0;
case 2:
return sys->freq_ddr_pll1;
}
return 0;
}
unsigned int get_memory_clk_ps(const unsigned long data_rate)
{
unsigned int result;
/* Round to nearest 10ps, being careful about 64-bit multiply/divide */
unsigned long long rem, mclk_ps = ULL_2E12;
/* Now perform the big divide, the result fits in 32-bits */
rem = do_div(mclk_ps, data_rate);
result = (rem >= (data_rate >> 1)) ? mclk_ps + 1 : mclk_ps;
return result;
}
unsigned int picos_to_mclk(unsigned long data_rate, unsigned int picos)
{
unsigned long long clks, clks_rem;
/* Short circuit for zero picos */
if ((picos == 0U) || (data_rate == 0UL)) {
return 0U;
}
/* First multiply the time by the data rate (32x32 => 64) */
clks = picos * (unsigned long long)data_rate;
/*
* Now divide by 5^12 and track the 32-bit remainder, then divide
* by 2*(2^12) using shifts (and updating the remainder).
*/
clks_rem = do_div(clks, UL_5POW12);
clks_rem += (clks & (UL_2POW13-1)) * UL_5POW12;
clks >>= 13U;
/* If we had a remainder greater than the 1ps error, then round up */
if (clks_rem > data_rate) {
clks++;
}
/* Clamp to the maximum representable value */
if (clks > ULL_8FS) {
clks = ULL_8FS;
}
return (unsigned int) clks;
}
/* valid_spd_mask has been checked by parse_spd */
int disable_unused_ddrc(struct ddr_info *priv,
int valid_spd_mask, uintptr_t nxp_ccn_hn_f0_addr)
{
#if defined(NXP_HAS_CCN504) || defined(NXP_HAS_CCN508)
void *hnf_sam_ctrl = (void *)(nxp_ccn_hn_f0_addr + CCN_HN_F_SAM_CTL);
uint32_t val, nodeid;
#ifdef NXP_HAS_CCN504
uint32_t num_hnf_nodes = 4U;
#else
uint32_t num_hnf_nodes = 8U;
#endif
int disable_ddrc = 0;
int i;
if (priv->num_ctlrs < 2) {
debug("%s: nothing to do.\n", __func__);
}
switch (priv->dimm_on_ctlr) {
case 1:
disable_ddrc = ((valid_spd_mask &0x2) == 0) ? 2 : 0;
disable_ddrc = ((valid_spd_mask &0x1) == 0) ? 1 : disable_ddrc;
break;
case 2:
disable_ddrc = ((valid_spd_mask &0x4) == 0) ? 2 : 0;
disable_ddrc = ((valid_spd_mask &0x1) == 0) ? 1 : disable_ddrc;
break;
default:
ERROR("Invalid number of DIMMs %d\n", priv->dimm_on_ctlr);
return -EINVAL;
}
if (disable_ddrc != 0) {
debug("valid_spd_mask = 0x%x\n", valid_spd_mask);
}
switch (disable_ddrc) {
case 1:
priv->num_ctlrs = 1;
priv->spd_addr = &priv->spd_addr[priv->dimm_on_ctlr];
priv->ddr[0] = priv->ddr[1];
priv->ddr[1] = NULL;
priv->phy[0] = priv->phy[0];
priv->phy[1] = NULL;
debug("Disable first DDR controller\n");
break;
case 2:
priv->num_ctlrs = 1;
priv->ddr[1] = NULL;
priv->phy[1] = NULL;
debug("Disable second DDR controller\n");
/* fallthrough */
case 0:
break;
default:
ERROR("Program error.\n");
return -EINVAL;
}
if (disable_ddrc == 0) {
debug("Both controllers in use.\n");
return 0;
}
for (i = 0; i < num_hnf_nodes; i++) {
val = mmio_read_64((uintptr_t)hnf_sam_ctrl);
nodeid = disable_ddrc == 1 ? CCN_HN_F_SAM_NODEID_DDR1 :
(disable_ddrc == 2 ? CCN_HN_F_SAM_NODEID_DDR0 :
(i < 4 ? CCN_HN_F_SAM_NODEID_DDR0
: CCN_HN_F_SAM_NODEID_DDR1));
if (nodeid != (val & CCN_HN_F_SAM_NODEID_MASK)) {
debug("Setting HN-F node %d\n", i);
debug("nodeid = 0x%x\n", nodeid);
val &= ~CCN_HN_F_SAM_NODEID_MASK;
val |= nodeid;
mmio_write_64((uintptr_t)hnf_sam_ctrl, val);
}
hnf_sam_ctrl += CCN_HN_F_REGION_SIZE;
}
#endif
return 0;
}
unsigned int get_ddrc_version(const struct ccsr_ddr *ddr)
{
unsigned int ver;
ver = (ddr_in32(&ddr->ip_rev1) & 0xFFFF) << 8U;
ver |= (ddr_in32(&ddr->ip_rev2) & 0xFF00) >> 8U;
return ver;
}
void print_ddr_info(struct ccsr_ddr *ddr)
{
unsigned int cs0_config = ddr_in32(&ddr->csn_cfg[0]);
unsigned int sdram_cfg = ddr_in32(&ddr->sdram_cfg);
int cas_lat;
if ((sdram_cfg & SDRAM_CFG_MEM_EN) == 0U) {
printf(" (DDR not enabled)\n");
return;
}
printf("DDR");
switch ((sdram_cfg & SDRAM_CFG_SDRAM_TYPE_MASK) >>
SDRAM_CFG_SDRAM_TYPE_SHIFT) {
case SDRAM_TYPE_DDR4:
printf("4");
break;
default:
printf("?");
break;
}
switch (sdram_cfg & SDRAM_CFG_DBW_MASK) {
case SDRAM_CFG_32_BW:
printf(", 32-bit");
break;
case SDRAM_CFG_16_BW:
printf(", 16-bit");
break;
case SDRAM_CFG_8_BW:
printf(", 8-bit");
break;
default:
printf(", 64-bit");
break;
}
/* Calculate CAS latency based on timing cfg values */
cas_lat = ((ddr_in32(&ddr->timing_cfg_1) >> 16) & 0xf);
cas_lat += 2; /* for DDRC newer than 4.4 */
cas_lat += ((ddr_in32(&ddr->timing_cfg_3) >> 12) & 3) << 4;
printf(", CL=%d", cas_lat >> 1);
if ((cas_lat & 0x1) != 0) {
printf(".5");
}
if ((sdram_cfg & SDRAM_CFG_ECC_EN) != 0) {
printf(", ECC on");
} else {
printf(", ECC off");
}
if ((cs0_config & 0x20000000) != 0) {
printf(", ");
switch ((cs0_config >> 24) & 0xf) {
case DDR_256B_INTLV:
printf("256B");
break;
default:
printf("invalid");
break;
}
}
if (((sdram_cfg >> 8) & 0x7f) != 0) {
printf(", ");
switch (sdram_cfg >> 8 & 0x7f) {
case DDR_BA_INTLV_CS0123:
printf("CS0+CS1+CS2+CS3");
break;
case DDR_BA_INTLV_CS01:
printf("CS0+CS1");
break;
default:
printf("invalid");
break;
}
}
printf("\n");
}
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment