1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23static inline uint32_t glue(address_space_ldl_internal, SUFFIX)(ARG1_DECL,
24 hwaddr addr, MemTxAttrs attrs, MemTxResult *result,
25 enum device_endian endian)
26{
27 uint8_t *ptr;
28 uint64_t val;
29 MemoryRegion *mr;
30 hwaddr l = 4;
31 hwaddr addr1;
32 MemTxResult r;
33 bool release_lock = false;
34
35 RCU_READ_LOCK();
36 mr = TRANSLATE(addr, &addr1, &l, false, attrs);
37 if (l < 4 || !memory_access_is_direct(mr, false)) {
38 release_lock |= prepare_mmio_access(mr);
39
40
41 r = memory_region_dispatch_read(mr, addr1, &val, 4, attrs);
42#if defined(TARGET_WORDS_BIGENDIAN)
43 if (endian == DEVICE_LITTLE_ENDIAN) {
44 val = bswap32(val);
45 }
46#else
47 if (endian == DEVICE_BIG_ENDIAN) {
48 val = bswap32(val);
49 }
50#endif
51 } else {
52
53 ptr = qemu_map_ram_ptr(mr->ram_block, addr1);
54 switch (endian) {
55 case DEVICE_LITTLE_ENDIAN:
56 val = ldl_le_p(ptr);
57 break;
58 case DEVICE_BIG_ENDIAN:
59 val = ldl_be_p(ptr);
60 break;
61 default:
62 val = ldl_p(ptr);
63 break;
64 }
65 r = MEMTX_OK;
66 }
67 if (result) {
68 *result = r;
69 }
70 if (release_lock) {
71 qemu_mutex_unlock_iothread();
72 }
73 RCU_READ_UNLOCK();
74 return val;
75}
76
77uint32_t glue(address_space_ldl, SUFFIX)(ARG1_DECL,
78 hwaddr addr, MemTxAttrs attrs, MemTxResult *result)
79{
80 return glue(address_space_ldl_internal, SUFFIX)(ARG1, addr, attrs, result,
81 DEVICE_NATIVE_ENDIAN);
82}
83
84uint32_t glue(address_space_ldl_le, SUFFIX)(ARG1_DECL,
85 hwaddr addr, MemTxAttrs attrs, MemTxResult *result)
86{
87 return glue(address_space_ldl_internal, SUFFIX)(ARG1, addr, attrs, result,
88 DEVICE_LITTLE_ENDIAN);
89}
90
91uint32_t glue(address_space_ldl_be, SUFFIX)(ARG1_DECL,
92 hwaddr addr, MemTxAttrs attrs, MemTxResult *result)
93{
94 return glue(address_space_ldl_internal, SUFFIX)(ARG1, addr, attrs, result,
95 DEVICE_BIG_ENDIAN);
96}
97
98
99static inline uint64_t glue(address_space_ldq_internal, SUFFIX)(ARG1_DECL,
100 hwaddr addr, MemTxAttrs attrs, MemTxResult *result,
101 enum device_endian endian)
102{
103 uint8_t *ptr;
104 uint64_t val;
105 MemoryRegion *mr;
106 hwaddr l = 8;
107 hwaddr addr1;
108 MemTxResult r;
109 bool release_lock = false;
110
111 RCU_READ_LOCK();
112 mr = TRANSLATE(addr, &addr1, &l, false, attrs);
113 if (l < 8 || !memory_access_is_direct(mr, false)) {
114 release_lock |= prepare_mmio_access(mr);
115
116
117 r = memory_region_dispatch_read(mr, addr1, &val, 8, attrs);
118#if defined(TARGET_WORDS_BIGENDIAN)
119 if (endian == DEVICE_LITTLE_ENDIAN) {
120 val = bswap64(val);
121 }
122#else
123 if (endian == DEVICE_BIG_ENDIAN) {
124 val = bswap64(val);
125 }
126#endif
127 } else {
128
129 ptr = qemu_map_ram_ptr(mr->ram_block, addr1);
130 switch (endian) {
131 case DEVICE_LITTLE_ENDIAN:
132 val = ldq_le_p(ptr);
133 break;
134 case DEVICE_BIG_ENDIAN:
135 val = ldq_be_p(ptr);
136 break;
137 default:
138 val = ldq_p(ptr);
139 break;
140 }
141 r = MEMTX_OK;
142 }
143 if (result) {
144 *result = r;
145 }
146 if (release_lock) {
147 qemu_mutex_unlock_iothread();
148 }
149 RCU_READ_UNLOCK();
150 return val;
151}
152
153uint64_t glue(address_space_ldq, SUFFIX)(ARG1_DECL,
154 hwaddr addr, MemTxAttrs attrs, MemTxResult *result)
155{
156 return glue(address_space_ldq_internal, SUFFIX)(ARG1, addr, attrs, result,
157 DEVICE_NATIVE_ENDIAN);
158}
159
160uint64_t glue(address_space_ldq_le, SUFFIX)(ARG1_DECL,
161 hwaddr addr, MemTxAttrs attrs, MemTxResult *result)
162{
163 return glue(address_space_ldq_internal, SUFFIX)(ARG1, addr, attrs, result,
164 DEVICE_LITTLE_ENDIAN);
165}
166
167uint64_t glue(address_space_ldq_be, SUFFIX)(ARG1_DECL,
168 hwaddr addr, MemTxAttrs attrs, MemTxResult *result)
169{
170 return glue(address_space_ldq_internal, SUFFIX)(ARG1, addr, attrs, result,
171 DEVICE_BIG_ENDIAN);
172}
173
174uint32_t glue(address_space_ldub, SUFFIX)(ARG1_DECL,
175 hwaddr addr, MemTxAttrs attrs, MemTxResult *result)
176{
177 uint8_t *ptr;
178 uint64_t val;
179 MemoryRegion *mr;
180 hwaddr l = 1;
181 hwaddr addr1;
182 MemTxResult r;
183 bool release_lock = false;
184
185 RCU_READ_LOCK();
186 mr = TRANSLATE(addr, &addr1, &l, false, attrs);
187 if (!memory_access_is_direct(mr, false)) {
188 release_lock |= prepare_mmio_access(mr);
189
190
191 r = memory_region_dispatch_read(mr, addr1, &val, 1, attrs);
192 } else {
193
194 ptr = qemu_map_ram_ptr(mr->ram_block, addr1);
195 val = ldub_p(ptr);
196 r = MEMTX_OK;
197 }
198 if (result) {
199 *result = r;
200 }
201 if (release_lock) {
202 qemu_mutex_unlock_iothread();
203 }
204 RCU_READ_UNLOCK();
205 return val;
206}
207
208
209static inline uint32_t glue(address_space_lduw_internal, SUFFIX)(ARG1_DECL,
210 hwaddr addr, MemTxAttrs attrs, MemTxResult *result,
211 enum device_endian endian)
212{
213 uint8_t *ptr;
214 uint64_t val;
215 MemoryRegion *mr;
216 hwaddr l = 2;
217 hwaddr addr1;
218 MemTxResult r;
219 bool release_lock = false;
220
221 RCU_READ_LOCK();
222 mr = TRANSLATE(addr, &addr1, &l, false, attrs);
223 if (l < 2 || !memory_access_is_direct(mr, false)) {
224 release_lock |= prepare_mmio_access(mr);
225
226
227 r = memory_region_dispatch_read(mr, addr1, &val, 2, attrs);
228#if defined(TARGET_WORDS_BIGENDIAN)
229 if (endian == DEVICE_LITTLE_ENDIAN) {
230 val = bswap16(val);
231 }
232#else
233 if (endian == DEVICE_BIG_ENDIAN) {
234 val = bswap16(val);
235 }
236#endif
237 } else {
238
239 ptr = qemu_map_ram_ptr(mr->ram_block, addr1);
240 switch (endian) {
241 case DEVICE_LITTLE_ENDIAN:
242 val = lduw_le_p(ptr);
243 break;
244 case DEVICE_BIG_ENDIAN:
245 val = lduw_be_p(ptr);
246 break;
247 default:
248 val = lduw_p(ptr);
249 break;
250 }
251 r = MEMTX_OK;
252 }
253 if (result) {
254 *result = r;
255 }
256 if (release_lock) {
257 qemu_mutex_unlock_iothread();
258 }
259 RCU_READ_UNLOCK();
260 return val;
261}
262
263uint32_t glue(address_space_lduw, SUFFIX)(ARG1_DECL,
264 hwaddr addr, MemTxAttrs attrs, MemTxResult *result)
265{
266 return glue(address_space_lduw_internal, SUFFIX)(ARG1, addr, attrs, result,
267 DEVICE_NATIVE_ENDIAN);
268}
269
270uint32_t glue(address_space_lduw_le, SUFFIX)(ARG1_DECL,
271 hwaddr addr, MemTxAttrs attrs, MemTxResult *result)
272{
273 return glue(address_space_lduw_internal, SUFFIX)(ARG1, addr, attrs, result,
274 DEVICE_LITTLE_ENDIAN);
275}
276
277uint32_t glue(address_space_lduw_be, SUFFIX)(ARG1_DECL,
278 hwaddr addr, MemTxAttrs attrs, MemTxResult *result)
279{
280 return glue(address_space_lduw_internal, SUFFIX)(ARG1, addr, attrs, result,
281 DEVICE_BIG_ENDIAN);
282}
283
284
285
286
287void glue(address_space_stl_notdirty, SUFFIX)(ARG1_DECL,
288 hwaddr addr, uint32_t val, MemTxAttrs attrs, MemTxResult *result)
289{
290 uint8_t *ptr;
291 MemoryRegion *mr;
292 hwaddr l = 4;
293 hwaddr addr1;
294 MemTxResult r;
295 uint8_t dirty_log_mask;
296 bool release_lock = false;
297
298 RCU_READ_LOCK();
299 mr = TRANSLATE(addr, &addr1, &l, true, attrs);
300 if (l < 4 || !memory_access_is_direct(mr, true)) {
301 release_lock |= prepare_mmio_access(mr);
302
303 r = memory_region_dispatch_write(mr, addr1, val, 4, attrs);
304 } else {
305 ptr = qemu_map_ram_ptr(mr->ram_block, addr1);
306 stl_p(ptr, val);
307
308 dirty_log_mask = memory_region_get_dirty_log_mask(mr);
309 dirty_log_mask &= ~(1 << DIRTY_MEMORY_CODE);
310 cpu_physical_memory_set_dirty_range(memory_region_get_ram_addr(mr) + addr,
311 4, dirty_log_mask);
312 r = MEMTX_OK;
313 }
314 if (result) {
315 *result = r;
316 }
317 if (release_lock) {
318 qemu_mutex_unlock_iothread();
319 }
320 RCU_READ_UNLOCK();
321}
322
323
324static inline void glue(address_space_stl_internal, SUFFIX)(ARG1_DECL,
325 hwaddr addr, uint32_t val, MemTxAttrs attrs,
326 MemTxResult *result, enum device_endian endian)
327{
328 uint8_t *ptr;
329 MemoryRegion *mr;
330 hwaddr l = 4;
331 hwaddr addr1;
332 MemTxResult r;
333 bool release_lock = false;
334
335 RCU_READ_LOCK();
336 mr = TRANSLATE(addr, &addr1, &l, true, attrs);
337 if (l < 4 || !memory_access_is_direct(mr, true)) {
338 release_lock |= prepare_mmio_access(mr);
339
340#if defined(TARGET_WORDS_BIGENDIAN)
341 if (endian == DEVICE_LITTLE_ENDIAN) {
342 val = bswap32(val);
343 }
344#else
345 if (endian == DEVICE_BIG_ENDIAN) {
346 val = bswap32(val);
347 }
348#endif
349 r = memory_region_dispatch_write(mr, addr1, val, 4, attrs);
350 } else {
351
352 ptr = qemu_map_ram_ptr(mr->ram_block, addr1);
353 switch (endian) {
354 case DEVICE_LITTLE_ENDIAN:
355 stl_le_p(ptr, val);
356 break;
357 case DEVICE_BIG_ENDIAN:
358 stl_be_p(ptr, val);
359 break;
360 default:
361 stl_p(ptr, val);
362 break;
363 }
364 invalidate_and_set_dirty(mr, addr1, 4);
365 r = MEMTX_OK;
366 }
367 if (result) {
368 *result = r;
369 }
370 if (release_lock) {
371 qemu_mutex_unlock_iothread();
372 }
373 RCU_READ_UNLOCK();
374}
375
376void glue(address_space_stl, SUFFIX)(ARG1_DECL,
377 hwaddr addr, uint32_t val, MemTxAttrs attrs, MemTxResult *result)
378{
379 glue(address_space_stl_internal, SUFFIX)(ARG1, addr, val, attrs,
380 result, DEVICE_NATIVE_ENDIAN);
381}
382
383void glue(address_space_stl_le, SUFFIX)(ARG1_DECL,
384 hwaddr addr, uint32_t val, MemTxAttrs attrs, MemTxResult *result)
385{
386 glue(address_space_stl_internal, SUFFIX)(ARG1, addr, val, attrs,
387 result, DEVICE_LITTLE_ENDIAN);
388}
389
390void glue(address_space_stl_be, SUFFIX)(ARG1_DECL,
391 hwaddr addr, uint32_t val, MemTxAttrs attrs, MemTxResult *result)
392{
393 glue(address_space_stl_internal, SUFFIX)(ARG1, addr, val, attrs,
394 result, DEVICE_BIG_ENDIAN);
395}
396
397void glue(address_space_stb, SUFFIX)(ARG1_DECL,
398 hwaddr addr, uint32_t val, MemTxAttrs attrs, MemTxResult *result)
399{
400 uint8_t *ptr;
401 MemoryRegion *mr;
402 hwaddr l = 1;
403 hwaddr addr1;
404 MemTxResult r;
405 bool release_lock = false;
406
407 RCU_READ_LOCK();
408 mr = TRANSLATE(addr, &addr1, &l, true, attrs);
409 if (!memory_access_is_direct(mr, true)) {
410 release_lock |= prepare_mmio_access(mr);
411 r = memory_region_dispatch_write(mr, addr1, val, 1, attrs);
412 } else {
413
414 ptr = qemu_map_ram_ptr(mr->ram_block, addr1);
415 stb_p(ptr, val);
416 invalidate_and_set_dirty(mr, addr1, 1);
417 r = MEMTX_OK;
418 }
419 if (result) {
420 *result = r;
421 }
422 if (release_lock) {
423 qemu_mutex_unlock_iothread();
424 }
425 RCU_READ_UNLOCK();
426}
427
428
429static inline void glue(address_space_stw_internal, SUFFIX)(ARG1_DECL,
430 hwaddr addr, uint32_t val, MemTxAttrs attrs,
431 MemTxResult *result, enum device_endian endian)
432{
433 uint8_t *ptr;
434 MemoryRegion *mr;
435 hwaddr l = 2;
436 hwaddr addr1;
437 MemTxResult r;
438 bool release_lock = false;
439
440 RCU_READ_LOCK();
441 mr = TRANSLATE(addr, &addr1, &l, true, attrs);
442 if (l < 2 || !memory_access_is_direct(mr, true)) {
443 release_lock |= prepare_mmio_access(mr);
444
445#if defined(TARGET_WORDS_BIGENDIAN)
446 if (endian == DEVICE_LITTLE_ENDIAN) {
447 val = bswap16(val);
448 }
449#else
450 if (endian == DEVICE_BIG_ENDIAN) {
451 val = bswap16(val);
452 }
453#endif
454 r = memory_region_dispatch_write(mr, addr1, val, 2, attrs);
455 } else {
456
457 ptr = qemu_map_ram_ptr(mr->ram_block, addr1);
458 switch (endian) {
459 case DEVICE_LITTLE_ENDIAN:
460 stw_le_p(ptr, val);
461 break;
462 case DEVICE_BIG_ENDIAN:
463 stw_be_p(ptr, val);
464 break;
465 default:
466 stw_p(ptr, val);
467 break;
468 }
469 invalidate_and_set_dirty(mr, addr1, 2);
470 r = MEMTX_OK;
471 }
472 if (result) {
473 *result = r;
474 }
475 if (release_lock) {
476 qemu_mutex_unlock_iothread();
477 }
478 RCU_READ_UNLOCK();
479}
480
481void glue(address_space_stw, SUFFIX)(ARG1_DECL,
482 hwaddr addr, uint32_t val, MemTxAttrs attrs, MemTxResult *result)
483{
484 glue(address_space_stw_internal, SUFFIX)(ARG1, addr, val, attrs, result,
485 DEVICE_NATIVE_ENDIAN);
486}
487
488void glue(address_space_stw_le, SUFFIX)(ARG1_DECL,
489 hwaddr addr, uint32_t val, MemTxAttrs attrs, MemTxResult *result)
490{
491 glue(address_space_stw_internal, SUFFIX)(ARG1, addr, val, attrs, result,
492 DEVICE_LITTLE_ENDIAN);
493}
494
495void glue(address_space_stw_be, SUFFIX)(ARG1_DECL,
496 hwaddr addr, uint32_t val, MemTxAttrs attrs, MemTxResult *result)
497{
498 glue(address_space_stw_internal, SUFFIX)(ARG1, addr, val, attrs, result,
499 DEVICE_BIG_ENDIAN);
500}
501
502static void glue(address_space_stq_internal, SUFFIX)(ARG1_DECL,
503 hwaddr addr, uint64_t val, MemTxAttrs attrs,
504 MemTxResult *result, enum device_endian endian)
505{
506 uint8_t *ptr;
507 MemoryRegion *mr;
508 hwaddr l = 8;
509 hwaddr addr1;
510 MemTxResult r;
511 bool release_lock = false;
512
513 RCU_READ_LOCK();
514 mr = TRANSLATE(addr, &addr1, &l, true, attrs);
515 if (l < 8 || !memory_access_is_direct(mr, true)) {
516 release_lock |= prepare_mmio_access(mr);
517
518#if defined(TARGET_WORDS_BIGENDIAN)
519 if (endian == DEVICE_LITTLE_ENDIAN) {
520 val = bswap64(val);
521 }
522#else
523 if (endian == DEVICE_BIG_ENDIAN) {
524 val = bswap64(val);
525 }
526#endif
527 r = memory_region_dispatch_write(mr, addr1, val, 8, attrs);
528 } else {
529
530 ptr = qemu_map_ram_ptr(mr->ram_block, addr1);
531 switch (endian) {
532 case DEVICE_LITTLE_ENDIAN:
533 stq_le_p(ptr, val);
534 break;
535 case DEVICE_BIG_ENDIAN:
536 stq_be_p(ptr, val);
537 break;
538 default:
539 stq_p(ptr, val);
540 break;
541 }
542 invalidate_and_set_dirty(mr, addr1, 8);
543 r = MEMTX_OK;
544 }
545 if (result) {
546 *result = r;
547 }
548 if (release_lock) {
549 qemu_mutex_unlock_iothread();
550 }
551 RCU_READ_UNLOCK();
552}
553
554void glue(address_space_stq, SUFFIX)(ARG1_DECL,
555 hwaddr addr, uint64_t val, MemTxAttrs attrs, MemTxResult *result)
556{
557 glue(address_space_stq_internal, SUFFIX)(ARG1, addr, val, attrs, result,
558 DEVICE_NATIVE_ENDIAN);
559}
560
561void glue(address_space_stq_le, SUFFIX)(ARG1_DECL,
562 hwaddr addr, uint64_t val, MemTxAttrs attrs, MemTxResult *result)
563{
564 glue(address_space_stq_internal, SUFFIX)(ARG1, addr, val, attrs, result,
565 DEVICE_LITTLE_ENDIAN);
566}
567
568void glue(address_space_stq_be, SUFFIX)(ARG1_DECL,
569 hwaddr addr, uint64_t val, MemTxAttrs attrs, MemTxResult *result)
570{
571 glue(address_space_stq_internal, SUFFIX)(ARG1, addr, val, attrs, result,
572 DEVICE_BIG_ENDIAN);
573}
574
575#undef ARG1_DECL
576#undef ARG1
577#undef SUFFIX
578#undef TRANSLATE
579#undef RCU_READ_LOCK
580#undef RCU_READ_UNLOCK
581