1 // SPDX-License-Identifier: GPL-2.0
2 /*
3 * Author:
4 * Chuanhong Guo <gch981213@gmail.com>
5 */
6
7 #include <linux/device.h>
8 #include <linux/kernel.h>
9 #include <linux/mtd/spinand.h>
10
11 #define SPINAND_MFR_GIGADEVICE 0xC8
12
13 #define GD5FXGQ4XA_STATUS_ECC_1_7_BITFLIPS (1 << 4)
14 #define GD5FXGQ4XA_STATUS_ECC_8_BITFLIPS (3 << 4)
15
16 #define GD5FXGQ5XE_STATUS_ECC_1_4_BITFLIPS (1 << 4)
17 #define GD5FXGQ5XE_STATUS_ECC_4_BITFLIPS (3 << 4)
18
19 #define GD5FXGQXXEXXG_REG_STATUS2 0xf0
20
21 #define GD5FXGQ4UXFXXG_STATUS_ECC_MASK (7 << 4)
22 #define GD5FXGQ4UXFXXG_STATUS_ECC_NO_BITFLIPS (0 << 4)
23 #define GD5FXGQ4UXFXXG_STATUS_ECC_1_3_BITFLIPS (1 << 4)
24 #define GD5FXGQ4UXFXXG_STATUS_ECC_UNCOR_ERROR (7 << 4)
25
26 static SPINAND_OP_VARIANTS(read_cache_variants,
27 SPINAND_PAGE_READ_FROM_CACHE_QUADIO_OP(0, 1, NULL, 0),
28 SPINAND_PAGE_READ_FROM_CACHE_X4_OP(0, 1, NULL, 0),
29 SPINAND_PAGE_READ_FROM_CACHE_DUALIO_OP(0, 1, NULL, 0),
30 SPINAND_PAGE_READ_FROM_CACHE_X2_OP(0, 1, NULL, 0),
31 SPINAND_PAGE_READ_FROM_CACHE_OP(true, 0, 1, NULL, 0),
32 SPINAND_PAGE_READ_FROM_CACHE_OP(false, 0, 1, NULL, 0));
33
34 static SPINAND_OP_VARIANTS(read_cache_variants_f,
35 SPINAND_PAGE_READ_FROM_CACHE_QUADIO_OP(0, 1, NULL, 0),
36 SPINAND_PAGE_READ_FROM_CACHE_X4_OP_3A(0, 1, NULL, 0),
37 SPINAND_PAGE_READ_FROM_CACHE_DUALIO_OP(0, 1, NULL, 0),
38 SPINAND_PAGE_READ_FROM_CACHE_X2_OP_3A(0, 1, NULL, 0),
39 SPINAND_PAGE_READ_FROM_CACHE_OP_3A(true, 0, 1, NULL, 0),
40 SPINAND_PAGE_READ_FROM_CACHE_OP_3A(false, 0, 0, NULL, 0));
41
42 static SPINAND_OP_VARIANTS(read_cache_variants_1gq5,
43 SPINAND_PAGE_READ_FROM_CACHE_QUADIO_OP(0, 2, NULL, 0),
44 SPINAND_PAGE_READ_FROM_CACHE_X4_OP(0, 1, NULL, 0),
45 SPINAND_PAGE_READ_FROM_CACHE_DUALIO_OP(0, 1, NULL, 0),
46 SPINAND_PAGE_READ_FROM_CACHE_X2_OP(0, 1, NULL, 0),
47 SPINAND_PAGE_READ_FROM_CACHE_OP(true, 0, 1, NULL, 0),
48 SPINAND_PAGE_READ_FROM_CACHE_OP(false, 0, 1, NULL, 0));
49
50 static SPINAND_OP_VARIANTS(read_cache_variants_2gq5,
51 SPINAND_PAGE_READ_FROM_CACHE_QUADIO_OP(0, 4, NULL, 0),
52 SPINAND_PAGE_READ_FROM_CACHE_X4_OP(0, 1, NULL, 0),
53 SPINAND_PAGE_READ_FROM_CACHE_DUALIO_OP(0, 2, NULL, 0),
54 SPINAND_PAGE_READ_FROM_CACHE_X2_OP(0, 1, NULL, 0),
55 SPINAND_PAGE_READ_FROM_CACHE_OP(true, 0, 1, NULL, 0),
56 SPINAND_PAGE_READ_FROM_CACHE_OP(false, 0, 1, NULL, 0));
57
58 static SPINAND_OP_VARIANTS(write_cache_variants,
59 SPINAND_PROG_LOAD_X4(true, 0, NULL, 0),
60 SPINAND_PROG_LOAD(true, 0, NULL, 0));
61
62 static SPINAND_OP_VARIANTS(update_cache_variants,
63 SPINAND_PROG_LOAD_X4(false, 0, NULL, 0),
64 SPINAND_PROG_LOAD(false, 0, NULL, 0));
65
gd5fxgq4xa_ooblayout_ecc(struct mtd_info * mtd,int section,struct mtd_oob_region * region)66 static int gd5fxgq4xa_ooblayout_ecc(struct mtd_info *mtd, int section,
67 struct mtd_oob_region *region)
68 {
69 if (section > 3)
70 return -ERANGE;
71
72 region->offset = (16 * section) + 8;
73 region->length = 8;
74
75 return 0;
76 }
77
gd5fxgq4xa_ooblayout_free(struct mtd_info * mtd,int section,struct mtd_oob_region * region)78 static int gd5fxgq4xa_ooblayout_free(struct mtd_info *mtd, int section,
79 struct mtd_oob_region *region)
80 {
81 if (section > 3)
82 return -ERANGE;
83
84 if (section) {
85 region->offset = 16 * section;
86 region->length = 8;
87 } else {
88 /* section 0 has one byte reserved for bad block mark */
89 region->offset = 1;
90 region->length = 7;
91 }
92 return 0;
93 }
94
95 static const struct mtd_ooblayout_ops gd5fxgq4xa_ooblayout = {
96 .ecc = gd5fxgq4xa_ooblayout_ecc,
97 .free = gd5fxgq4xa_ooblayout_free,
98 };
99
gd5fxgq4xa_ecc_get_status(struct spinand_device * spinand,u8 status)100 static int gd5fxgq4xa_ecc_get_status(struct spinand_device *spinand,
101 u8 status)
102 {
103 switch (status & STATUS_ECC_MASK) {
104 case STATUS_ECC_NO_BITFLIPS:
105 return 0;
106
107 case GD5FXGQ4XA_STATUS_ECC_1_7_BITFLIPS:
108 /* 1-7 bits are flipped. return the maximum. */
109 return 7;
110
111 case GD5FXGQ4XA_STATUS_ECC_8_BITFLIPS:
112 return 8;
113
114 case STATUS_ECC_UNCOR_ERROR:
115 return -EBADMSG;
116
117 default:
118 break;
119 }
120
121 return -EINVAL;
122 }
123
gd5fxgqx_variant2_ooblayout_ecc(struct mtd_info * mtd,int section,struct mtd_oob_region * region)124 static int gd5fxgqx_variant2_ooblayout_ecc(struct mtd_info *mtd, int section,
125 struct mtd_oob_region *region)
126 {
127 if (section)
128 return -ERANGE;
129
130 region->offset = 64;
131 region->length = 64;
132
133 return 0;
134 }
135
gd5fxgqx_variant2_ooblayout_free(struct mtd_info * mtd,int section,struct mtd_oob_region * region)136 static int gd5fxgqx_variant2_ooblayout_free(struct mtd_info *mtd, int section,
137 struct mtd_oob_region *region)
138 {
139 if (section)
140 return -ERANGE;
141
142 /* Reserve 1 bytes for the BBM. */
143 region->offset = 1;
144 region->length = 63;
145
146 return 0;
147 }
148
149 /* Valid for Q4/Q5 and Q6 (untested) devices */
150 static const struct mtd_ooblayout_ops gd5fxgqx_variant2_ooblayout = {
151 .ecc = gd5fxgqx_variant2_ooblayout_ecc,
152 .free = gd5fxgqx_variant2_ooblayout_free,
153 };
154
gd5fxgq4xc_ooblayout_256_ecc(struct mtd_info * mtd,int section,struct mtd_oob_region * oobregion)155 static int gd5fxgq4xc_ooblayout_256_ecc(struct mtd_info *mtd, int section,
156 struct mtd_oob_region *oobregion)
157 {
158 if (section)
159 return -ERANGE;
160
161 oobregion->offset = 128;
162 oobregion->length = 128;
163
164 return 0;
165 }
166
gd5fxgq4xc_ooblayout_256_free(struct mtd_info * mtd,int section,struct mtd_oob_region * oobregion)167 static int gd5fxgq4xc_ooblayout_256_free(struct mtd_info *mtd, int section,
168 struct mtd_oob_region *oobregion)
169 {
170 if (section)
171 return -ERANGE;
172
173 oobregion->offset = 1;
174 oobregion->length = 127;
175
176 return 0;
177 }
178
179 static const struct mtd_ooblayout_ops gd5fxgq4xc_oob_256_ops = {
180 .ecc = gd5fxgq4xc_ooblayout_256_ecc,
181 .free = gd5fxgq4xc_ooblayout_256_free,
182 };
183
gd5fxgq4uexxg_ecc_get_status(struct spinand_device * spinand,u8 status)184 static int gd5fxgq4uexxg_ecc_get_status(struct spinand_device *spinand,
185 u8 status)
186 {
187 u8 status2;
188 struct spi_mem_op op = SPINAND_GET_FEATURE_OP(GD5FXGQXXEXXG_REG_STATUS2,
189 &status2);
190 int ret;
191
192 switch (status & STATUS_ECC_MASK) {
193 case STATUS_ECC_NO_BITFLIPS:
194 return 0;
195
196 case GD5FXGQ4XA_STATUS_ECC_1_7_BITFLIPS:
197 /*
198 * Read status2 register to determine a more fine grained
199 * bit error status
200 */
201 ret = spi_mem_exec_op(spinand->spimem, &op);
202 if (ret)
203 return ret;
204
205 /*
206 * 4 ... 7 bits are flipped (1..4 can't be detected, so
207 * report the maximum of 4 in this case
208 */
209 /* bits sorted this way (3...0): ECCS1,ECCS0,ECCSE1,ECCSE0 */
210 return ((status & STATUS_ECC_MASK) >> 2) |
211 ((status2 & STATUS_ECC_MASK) >> 4);
212
213 case GD5FXGQ4XA_STATUS_ECC_8_BITFLIPS:
214 return 8;
215
216 case STATUS_ECC_UNCOR_ERROR:
217 return -EBADMSG;
218
219 default:
220 break;
221 }
222
223 return -EINVAL;
224 }
225
gd5fxgq5xexxg_ecc_get_status(struct spinand_device * spinand,u8 status)226 static int gd5fxgq5xexxg_ecc_get_status(struct spinand_device *spinand,
227 u8 status)
228 {
229 u8 status2;
230 struct spi_mem_op op = SPINAND_GET_FEATURE_OP(GD5FXGQXXEXXG_REG_STATUS2,
231 &status2);
232 int ret;
233
234 switch (status & STATUS_ECC_MASK) {
235 case STATUS_ECC_NO_BITFLIPS:
236 return 0;
237
238 case GD5FXGQ5XE_STATUS_ECC_1_4_BITFLIPS:
239 /*
240 * Read status2 register to determine a more fine grained
241 * bit error status
242 */
243 ret = spi_mem_exec_op(spinand->spimem, &op);
244 if (ret)
245 return ret;
246
247 /*
248 * 1 ... 4 bits are flipped (and corrected)
249 */
250 /* bits sorted this way (1...0): ECCSE1, ECCSE0 */
251 return ((status2 & STATUS_ECC_MASK) >> 4) + 1;
252
253 case STATUS_ECC_UNCOR_ERROR:
254 return -EBADMSG;
255
256 default:
257 break;
258 }
259
260 return -EINVAL;
261 }
262
gd5fxgq4ufxxg_ecc_get_status(struct spinand_device * spinand,u8 status)263 static int gd5fxgq4ufxxg_ecc_get_status(struct spinand_device *spinand,
264 u8 status)
265 {
266 switch (status & GD5FXGQ4UXFXXG_STATUS_ECC_MASK) {
267 case GD5FXGQ4UXFXXG_STATUS_ECC_NO_BITFLIPS:
268 return 0;
269
270 case GD5FXGQ4UXFXXG_STATUS_ECC_1_3_BITFLIPS:
271 return 3;
272
273 case GD5FXGQ4UXFXXG_STATUS_ECC_UNCOR_ERROR:
274 return -EBADMSG;
275
276 default: /* (2 << 4) through (6 << 4) are 4-8 corrected errors */
277 return ((status & GD5FXGQ4UXFXXG_STATUS_ECC_MASK) >> 4) + 2;
278 }
279
280 return -EINVAL;
281 }
282
283 static const struct spinand_info gigadevice_spinand_table[] = {
284 SPINAND_INFO("GD5F1GQ4xA",
285 SPINAND_ID(SPINAND_READID_METHOD_OPCODE_ADDR, 0xf1),
286 NAND_MEMORG(1, 2048, 64, 64, 1024, 20, 1, 1, 1),
287 NAND_ECCREQ(8, 512),
288 SPINAND_INFO_OP_VARIANTS(&read_cache_variants,
289 &write_cache_variants,
290 &update_cache_variants),
291 SPINAND_HAS_QE_BIT,
292 SPINAND_ECCINFO(&gd5fxgq4xa_ooblayout,
293 gd5fxgq4xa_ecc_get_status)),
294 SPINAND_INFO("GD5F2GQ4xA",
295 SPINAND_ID(SPINAND_READID_METHOD_OPCODE_ADDR, 0xf2),
296 NAND_MEMORG(1, 2048, 64, 64, 2048, 40, 1, 1, 1),
297 NAND_ECCREQ(8, 512),
298 SPINAND_INFO_OP_VARIANTS(&read_cache_variants,
299 &write_cache_variants,
300 &update_cache_variants),
301 SPINAND_HAS_QE_BIT,
302 SPINAND_ECCINFO(&gd5fxgq4xa_ooblayout,
303 gd5fxgq4xa_ecc_get_status)),
304 SPINAND_INFO("GD5F4GQ4xA",
305 SPINAND_ID(SPINAND_READID_METHOD_OPCODE_ADDR, 0xf4),
306 NAND_MEMORG(1, 2048, 64, 64, 4096, 80, 1, 1, 1),
307 NAND_ECCREQ(8, 512),
308 SPINAND_INFO_OP_VARIANTS(&read_cache_variants,
309 &write_cache_variants,
310 &update_cache_variants),
311 SPINAND_HAS_QE_BIT,
312 SPINAND_ECCINFO(&gd5fxgq4xa_ooblayout,
313 gd5fxgq4xa_ecc_get_status)),
314 SPINAND_INFO("GD5F4GQ4RC",
315 SPINAND_ID(SPINAND_READID_METHOD_OPCODE, 0xa4, 0x68),
316 NAND_MEMORG(1, 4096, 256, 64, 2048, 40, 1, 1, 1),
317 NAND_ECCREQ(8, 512),
318 SPINAND_INFO_OP_VARIANTS(&read_cache_variants_f,
319 &write_cache_variants,
320 &update_cache_variants),
321 SPINAND_HAS_QE_BIT,
322 SPINAND_ECCINFO(&gd5fxgq4xc_oob_256_ops,
323 gd5fxgq4ufxxg_ecc_get_status)),
324 SPINAND_INFO("GD5F4GQ4UC",
325 SPINAND_ID(SPINAND_READID_METHOD_OPCODE, 0xb4, 0x68),
326 NAND_MEMORG(1, 4096, 256, 64, 2048, 40, 1, 1, 1),
327 NAND_ECCREQ(8, 512),
328 SPINAND_INFO_OP_VARIANTS(&read_cache_variants_f,
329 &write_cache_variants,
330 &update_cache_variants),
331 SPINAND_HAS_QE_BIT,
332 SPINAND_ECCINFO(&gd5fxgq4xc_oob_256_ops,
333 gd5fxgq4ufxxg_ecc_get_status)),
334 SPINAND_INFO("GD5F1GQ4UExxG",
335 SPINAND_ID(SPINAND_READID_METHOD_OPCODE_ADDR, 0xd1),
336 NAND_MEMORG(1, 2048, 128, 64, 1024, 20, 1, 1, 1),
337 NAND_ECCREQ(8, 512),
338 SPINAND_INFO_OP_VARIANTS(&read_cache_variants,
339 &write_cache_variants,
340 &update_cache_variants),
341 SPINAND_HAS_QE_BIT,
342 SPINAND_ECCINFO(&gd5fxgqx_variant2_ooblayout,
343 gd5fxgq4uexxg_ecc_get_status)),
344 SPINAND_INFO("GD5F1GQ4RExxG",
345 SPINAND_ID(SPINAND_READID_METHOD_OPCODE_ADDR, 0xc1),
346 NAND_MEMORG(1, 2048, 128, 64, 1024, 20, 1, 1, 1),
347 NAND_ECCREQ(8, 512),
348 SPINAND_INFO_OP_VARIANTS(&read_cache_variants,
349 &write_cache_variants,
350 &update_cache_variants),
351 SPINAND_HAS_QE_BIT,
352 SPINAND_ECCINFO(&gd5fxgqx_variant2_ooblayout,
353 gd5fxgq4uexxg_ecc_get_status)),
354 SPINAND_INFO("GD5F2GQ4UExxG",
355 SPINAND_ID(SPINAND_READID_METHOD_OPCODE_ADDR, 0xd2),
356 NAND_MEMORG(1, 2048, 128, 64, 2048, 40, 1, 1, 1),
357 NAND_ECCREQ(8, 512),
358 SPINAND_INFO_OP_VARIANTS(&read_cache_variants,
359 &write_cache_variants,
360 &update_cache_variants),
361 SPINAND_HAS_QE_BIT,
362 SPINAND_ECCINFO(&gd5fxgqx_variant2_ooblayout,
363 gd5fxgq4uexxg_ecc_get_status)),
364 SPINAND_INFO("GD5F2GQ4RExxG",
365 SPINAND_ID(SPINAND_READID_METHOD_OPCODE_ADDR, 0xc2),
366 NAND_MEMORG(1, 2048, 128, 64, 2048, 40, 1, 1, 1),
367 NAND_ECCREQ(8, 512),
368 SPINAND_INFO_OP_VARIANTS(&read_cache_variants,
369 &write_cache_variants,
370 &update_cache_variants),
371 SPINAND_HAS_QE_BIT,
372 SPINAND_ECCINFO(&gd5fxgqx_variant2_ooblayout,
373 gd5fxgq4uexxg_ecc_get_status)),
374 SPINAND_INFO("GD5F1GQ4UFxxG",
375 SPINAND_ID(SPINAND_READID_METHOD_OPCODE, 0xb1, 0x48),
376 NAND_MEMORG(1, 2048, 128, 64, 1024, 20, 1, 1, 1),
377 NAND_ECCREQ(8, 512),
378 SPINAND_INFO_OP_VARIANTS(&read_cache_variants_f,
379 &write_cache_variants,
380 &update_cache_variants),
381 SPINAND_HAS_QE_BIT,
382 SPINAND_ECCINFO(&gd5fxgqx_variant2_ooblayout,
383 gd5fxgq4ufxxg_ecc_get_status)),
384 SPINAND_INFO("GD5F1GQ5UExxG",
385 SPINAND_ID(SPINAND_READID_METHOD_OPCODE_DUMMY, 0x51),
386 NAND_MEMORG(1, 2048, 128, 64, 1024, 20, 1, 1, 1),
387 NAND_ECCREQ(4, 512),
388 SPINAND_INFO_OP_VARIANTS(&read_cache_variants_1gq5,
389 &write_cache_variants,
390 &update_cache_variants),
391 SPINAND_HAS_QE_BIT,
392 SPINAND_ECCINFO(&gd5fxgqx_variant2_ooblayout,
393 gd5fxgq5xexxg_ecc_get_status)),
394 SPINAND_INFO("GD5F1GQ5RExxG",
395 SPINAND_ID(SPINAND_READID_METHOD_OPCODE_DUMMY, 0x41),
396 NAND_MEMORG(1, 2048, 128, 64, 1024, 20, 1, 1, 1),
397 NAND_ECCREQ(4, 512),
398 SPINAND_INFO_OP_VARIANTS(&read_cache_variants_1gq5,
399 &write_cache_variants,
400 &update_cache_variants),
401 SPINAND_HAS_QE_BIT,
402 SPINAND_ECCINFO(&gd5fxgqx_variant2_ooblayout,
403 gd5fxgq5xexxg_ecc_get_status)),
404 SPINAND_INFO("GD5F2GQ5UExxG",
405 SPINAND_ID(SPINAND_READID_METHOD_OPCODE_DUMMY, 0x52),
406 NAND_MEMORG(1, 2048, 128, 64, 2048, 40, 1, 1, 1),
407 NAND_ECCREQ(4, 512),
408 SPINAND_INFO_OP_VARIANTS(&read_cache_variants_2gq5,
409 &write_cache_variants,
410 &update_cache_variants),
411 SPINAND_HAS_QE_BIT,
412 SPINAND_ECCINFO(&gd5fxgqx_variant2_ooblayout,
413 gd5fxgq5xexxg_ecc_get_status)),
414 SPINAND_INFO("GD5F2GQ5RExxG",
415 SPINAND_ID(SPINAND_READID_METHOD_OPCODE_DUMMY, 0x42),
416 NAND_MEMORG(1, 2048, 128, 64, 2048, 40, 1, 1, 1),
417 NAND_ECCREQ(4, 512),
418 SPINAND_INFO_OP_VARIANTS(&read_cache_variants_2gq5,
419 &write_cache_variants,
420 &update_cache_variants),
421 SPINAND_HAS_QE_BIT,
422 SPINAND_ECCINFO(&gd5fxgqx_variant2_ooblayout,
423 gd5fxgq5xexxg_ecc_get_status)),
424 SPINAND_INFO("GD5F4GQ6UExxG",
425 SPINAND_ID(SPINAND_READID_METHOD_OPCODE_DUMMY, 0x55),
426 NAND_MEMORG(1, 2048, 128, 64, 2048, 40, 1, 2, 1),
427 NAND_ECCREQ(4, 512),
428 SPINAND_INFO_OP_VARIANTS(&read_cache_variants_2gq5,
429 &write_cache_variants,
430 &update_cache_variants),
431 SPINAND_HAS_QE_BIT,
432 SPINAND_ECCINFO(&gd5fxgqx_variant2_ooblayout,
433 gd5fxgq5xexxg_ecc_get_status)),
434 SPINAND_INFO("GD5F4GQ6RExxG",
435 SPINAND_ID(SPINAND_READID_METHOD_OPCODE_DUMMY, 0x45),
436 NAND_MEMORG(1, 2048, 128, 64, 2048, 40, 1, 2, 1),
437 NAND_ECCREQ(4, 512),
438 SPINAND_INFO_OP_VARIANTS(&read_cache_variants_2gq5,
439 &write_cache_variants,
440 &update_cache_variants),
441 SPINAND_HAS_QE_BIT,
442 SPINAND_ECCINFO(&gd5fxgqx_variant2_ooblayout,
443 gd5fxgq5xexxg_ecc_get_status)),
444 SPINAND_INFO("GD5F1GM7UExxG",
445 SPINAND_ID(SPINAND_READID_METHOD_OPCODE_DUMMY, 0x91),
446 NAND_MEMORG(1, 2048, 128, 64, 1024, 20, 1, 1, 1),
447 NAND_ECCREQ(8, 512),
448 SPINAND_INFO_OP_VARIANTS(&read_cache_variants_1gq5,
449 &write_cache_variants,
450 &update_cache_variants),
451 SPINAND_HAS_QE_BIT,
452 SPINAND_ECCINFO(&gd5fxgqx_variant2_ooblayout,
453 gd5fxgq4uexxg_ecc_get_status)),
454 SPINAND_INFO("GD5F1GM7RExxG",
455 SPINAND_ID(SPINAND_READID_METHOD_OPCODE_DUMMY, 0x81),
456 NAND_MEMORG(1, 2048, 128, 64, 1024, 20, 1, 1, 1),
457 NAND_ECCREQ(8, 512),
458 SPINAND_INFO_OP_VARIANTS(&read_cache_variants_1gq5,
459 &write_cache_variants,
460 &update_cache_variants),
461 SPINAND_HAS_QE_BIT,
462 SPINAND_ECCINFO(&gd5fxgqx_variant2_ooblayout,
463 gd5fxgq4uexxg_ecc_get_status)),
464 SPINAND_INFO("GD5F2GM7UExxG",
465 SPINAND_ID(SPINAND_READID_METHOD_OPCODE_DUMMY, 0x92),
466 NAND_MEMORG(1, 2048, 128, 64, 2048, 40, 1, 1, 1),
467 NAND_ECCREQ(8, 512),
468 SPINAND_INFO_OP_VARIANTS(&read_cache_variants_1gq5,
469 &write_cache_variants,
470 &update_cache_variants),
471 SPINAND_HAS_QE_BIT,
472 SPINAND_ECCINFO(&gd5fxgqx_variant2_ooblayout,
473 gd5fxgq4uexxg_ecc_get_status)),
474 SPINAND_INFO("GD5F2GM7RExxG",
475 SPINAND_ID(SPINAND_READID_METHOD_OPCODE_DUMMY, 0x82),
476 NAND_MEMORG(1, 2048, 128, 64, 2048, 40, 1, 1, 1),
477 NAND_ECCREQ(8, 512),
478 SPINAND_INFO_OP_VARIANTS(&read_cache_variants_1gq5,
479 &write_cache_variants,
480 &update_cache_variants),
481 SPINAND_HAS_QE_BIT,
482 SPINAND_ECCINFO(&gd5fxgqx_variant2_ooblayout,
483 gd5fxgq4uexxg_ecc_get_status)),
484 SPINAND_INFO("GD5F4GM8UExxG",
485 SPINAND_ID(SPINAND_READID_METHOD_OPCODE_DUMMY, 0x95),
486 NAND_MEMORG(1, 2048, 128, 64, 4096, 80, 1, 1, 1),
487 NAND_ECCREQ(8, 512),
488 SPINAND_INFO_OP_VARIANTS(&read_cache_variants_1gq5,
489 &write_cache_variants,
490 &update_cache_variants),
491 SPINAND_HAS_QE_BIT,
492 SPINAND_ECCINFO(&gd5fxgqx_variant2_ooblayout,
493 gd5fxgq4uexxg_ecc_get_status)),
494 SPINAND_INFO("GD5F4GM8RExxG",
495 SPINAND_ID(SPINAND_READID_METHOD_OPCODE_DUMMY, 0x85),
496 NAND_MEMORG(1, 2048, 128, 64, 4096, 80, 1, 1, 1),
497 NAND_ECCREQ(8, 512),
498 SPINAND_INFO_OP_VARIANTS(&read_cache_variants_1gq5,
499 &write_cache_variants,
500 &update_cache_variants),
501 SPINAND_HAS_QE_BIT,
502 SPINAND_ECCINFO(&gd5fxgqx_variant2_ooblayout,
503 gd5fxgq4uexxg_ecc_get_status)),
504 };
505
506 static const struct spinand_manufacturer_ops gigadevice_spinand_manuf_ops = {
507 };
508
509 const struct spinand_manufacturer gigadevice_spinand_manufacturer = {
510 .id = SPINAND_MFR_GIGADEVICE,
511 .name = "GigaDevice",
512 .chips = gigadevice_spinand_table,
513 .nchips = ARRAY_SIZE(gigadevice_spinand_table),
514 .ops = &gigadevice_spinand_manuf_ops,
515 };
516