Add missing end itemize.
[deliverable/binutils-gdb.git] / sim / common / sim-bits.h
CommitLineData
d6fea803
AC
1/* This file is part of the program psim.
2
3 Copyright (C) 1994-1996, Andrew Cagney <cagney@highland.com.au>
4 Copyright (C) 1997, Free Software Foundation, Inc.
5
6 This program is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2 of the License, or
9 (at your option) any later version.
10
11 This program is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with this program; if not, write to the Free Software
18 Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
19
20 */
21
22
23#ifndef _SIM_BITS_H_
24#define _SIM_BITS_H_
25
26
27/* bit manipulation routines:
28
29 Bit numbering: The bits are numbered according to the target ISA's
30 convention. That being controlled by WITH_TARGET_WORD_MSB. For
31 the PowerPC (WITH_TARGET_WORD_MSB == 0) the numbering is 0..31
32 while for the MIPS (WITH_TARGET_WORD_MSB == 31) it is 31..0.
33
34 Size convention: Each macro is in three forms - <MACRO>32 which
35 operates in 32bit quantity (bits are numbered 0..31); <MACRO>64
36 which operates using 64bit quantites (and bits are numbered 0..63);
37 and <MACRO> which operates using the bit size of the target
38 architecture (bits are still numbered 0..63), with 32bit
39 architectures ignoring the first 32bits leaving bit 32 as the most
40 significant.
41
42 BIT*(POS): Constant with just 1 bit set.
43
44 LSBIT*(OFFSET): Constant with just 1 bit set - LS bit is zero.
45
46 MSBIT*(OFFSET): Constant with just 1 bit set - MS bit is zero.
47
48 MASK*(FIRST, LAST): Constant with bits [FIRST .. LAST] set. The
49 <MACRO> (no size) version permits FIRST >= LAST and generates a
50 wrapped bit mask vis ([0..LAST] | [FIRST..LSB]).
51
52 LSMASK*(NR_BITS): Like MASK only NR least significant bits are set.
53
54 MSMASK*(NR_BITS): Like MASK only NR most significant bits are set.
55
56 MASKED*(VALUE, FIRST, LAST): Masks out all but bits [FIRST
57 .. LAST].
58
59 LSMASKED*(VALUE, NR_BITS): Mask out all but the least significant
60 NR_BITS of the value.
61
62 MSMASKED*(VALUE, NR_BITS): Mask out all but the most significant
63 NR_BITS of the value.
64
65 EXTRACTED*(VALUE, FIRST, LAST): Masks out bits [FIRST .. LAST] but
66 also right shifts the masked value so that bit LAST becomes the
67 least significant (right most).
68
69 SHUFFLED**(VALUE, OLD, NEW): Mask then move a single bit from OLD
70 new NEW.
71
72 MOVED**(VALUE, OLD_FIRST, OLD_LAST, NEW_FIRST, NEW_LAST): Moves
73 things around so that bits OLD_FIRST..OLD_LAST are masked then
74 moved to NEW_FIRST..NEW_LAST.
75
76 INSERTED*(VALUE, FIRST, LAST): Takes VALUE and `inserts' the (LAST
77 - FIRST + 1) least significant bits into bit positions [ FIRST
78 .. LAST ]. This is almost the complement to EXTRACTED.
79
80 IEA_MASKED(SHOULD_MASK, ADDR): Convert the address to the targets
81 natural size. If in 32bit mode, discard the high 32bits.
82
83 EXTENDED(VALUE): Convert VALUE (32bits of it) to the targets
84 natural size. If in 64bit mode, sign extend the value.
85
86 ALIGN_*(VALUE): Round upwards the value so that it is aligned.
87
88 FLOOR_*(VALUE): Truncate the value so that it is aligned.
89
90 ROTL*(VALUE, NR_BITS): Return the value rotated by NR_BITS left.
91
92 ROTR*(VALUE, NR_BITS): Return the value rotated by NR_BITS right.
93
94 SEXT*(VAL, SIGN_BIT): Treat SIGN_BIT as the sign, extend it.
95
96 Note: Only the BIT* and MASK* macros return a constant that can be
97 used in variable declarations.
98
99 */
100
101
102/* compute the number of bits between START and STOP */
103
104#if (WITH_TARGET_WORD_MSB == 0)
105#define _MAKE_WIDTH(START, STOP) (STOP - START + 1)
106#else
107#define _MAKE_WIDTH(START, STOP) (START - STOP + 1)
108#endif
109
110
111
112/* compute the number shifts required to move a bit between LSB (MSB)
113 and POS */
114
115#if (WITH_TARGET_WORD_MSB == 0)
116#define _LSB_SHIFT(WIDTH, POS) (WIDTH - 1 - POS)
117#else
118#define _LSB_SHIFT(WIDTH, POS) (POS)
119#endif
120
121#if (WITH_TARGET_WORD_MSB == 0)
122#define _MSB_SHIFT(WIDTH, POS) (POS)
123#else
124#define _MSB_SHIFT(WIDTH, POS) (WIDTH - 1 - POS)
125#endif
126
127
128/* compute the absolute bit position given the OFFSET from the MSB(LSB)
129 NB: _MAKE_xxx_POS (WIDTH, _MAKE_xxx_SHIFT (WIDTH, POS)) == POS */
130
131#if (WITH_TARGET_WORD_MSB == 0)
132#define _MSB_POS(WIDTH, SHIFT) (SHIFT)
133#else
134#define _MSB_POS(WIDTH, SHIFT) (WIDTH - 1 - SHIFT)
135#endif
136
137#if (WITH_TARGET_WORD_MSB == 0)
138#define _LSB_POS(WIDTH, SHIFT) (WIDTH - 1 - SHIFT)
139#else
140#define _LSB_POS(WIDTH, SHIFT) (SHIFT)
141#endif
142
143
144/* convert a 64 bit position into a corresponding 32bit position. MSB
145 pos handles the posibility that the bit lies beyond the 32bit
146 boundary */
147
148#if (WITH_TARGET_WORD_MSB == 0)
149#define _MSB_32(START, STOP) (START <= STOP \
150 ? (START < 32 ? 0 : START - 32) \
151 : (STOP < 32 ? 0 : STOP - 32))
152#else
153#define _MSB_32(START, STOP) (START >= STOP \
154 ? (START >= 32 ? 31 : START) \
155 : (STOP >= 32 ? 31 : STOP))
156#endif
157
158#if (WITH_TARGET_WORD_MSB == 0)
159#define _LSB_32(START, STOP) (START <= STOP \
160 ? (STOP < 32 ? 0 : STOP - 32) \
161 : (START < 32 ? 0 : START - 32))
162#else
163#define _LSB_32(START, STOP) (START >= STOP \
164 ? (STOP >= 32 ? 31 : STOP) \
165 : (START >= 32 ? 31 : START))
166#endif
167
168#if (WITH_TARGET_WORD_MSB == 0)
169#define _MSB(START, STOP) (START <= STOP ? START : STOP)
170#else
171#define _MSB(START, STOP) (START >= STOP ? START : STOP)
172#endif
173
174#if (WITH_TARGET_WORD_MSB == 0)
175#define _LSB(START, STOP) (START <= STOP ? STOP : START)
176#else
177#define _LSB(START, STOP) (START >= STOP ? STOP : START)
178#endif
179
180
181/* Bit operations */
182
183#define _BITn(WIDTH, POS) ((natural##WIDTH)1 \
184 << _LSB_SHIFT (WIDTH, POS))
185
186#define BIT4(POS) (1 << _LSB_SHIFT (4, (POS)))
187#define BIT5(POS) (1 << _LSB_SHIFT (5, (POS)))
188#define BIT8(POS) (1 << _LSB_SHIFT (8, (POS)))
189#define BIT10(POS) (1 << _LSB_SHIFT (10, (POS)))
190#define BIT16(POS) _BITn (16, (POS))
191#define BIT32(POS) _BITn (32, (POS))
192#define BIT64(POS) _BITn (64, (POS))
193
194#if (WITH_TARGET_WORD_BITSIZE == 64)
195#define BIT(POS) BIT64(POS)
196#endif
197#if (WITH_TARGET_WORD_BITSIZE == 32)
198#if (WITH_TARGET_WORD_MSB == 0)
199#define BIT(POS) ((POS) < 32 \
200 ? 0 \
201 : (1 << ((POS) < 32 ? 0 : _LSB_SHIFT(64, (POS)))))
202#else
203#define BIT(POS) ((POS) >= 32 \
204 ? 0 \
205 : (1 << ((POS) >= 32 ? 0 : (POS))))
206#endif
207#endif
208#if !defined (BIT)
209#error "BIT never defined"
210#endif
211
212
213/* LS/MS Bit operations */
214
215#define LSBIT8(POS) ((unsigned8)1 << (POS))
216#define LSBIT16(POS) ((unsigned16)1 << (POS))
217#define LSBIT32(POS) ((unsigned32)1 << (POS))
218#define LSBIT64(POS) ((unsigned64)1 << (POS))
219#define LSBIT(POS) ((unsigned_word)1 << (POS))
220
221#define MSBIT8(POS) ((unsigned8)1 << (8 - 1 - (POS)))
222#define MSBIT16(POS) ((unsigned16)1 << (16 - 1 - (POS)))
223#define MSBIT32(POS) ((unsigned32)1 << (32 - 1 - (POS)))
224#define MSBIT64(POS) ((unsigned64)1 << (64 - 1 - (POS)))
225#define MSBIT(POS) ((unsigned_word)1 << (WITH_TARGET_WORD_BITSIZE - 1 - (POS)))
226
227
228
229/* multi bit mask */
230
231/* 111111 -> mmll11 -> mm11ll */
232#define _MASKn(WIDTH, START, STOP) (((unsigned##WIDTH)(-1) \
233 >> (_MSB_SHIFT (WIDTH, START) \
234 + _LSB_SHIFT (WIDTH, STOP))) \
235 << _LSB_SHIFT (WIDTH, STOP))
236
237#define MASK16(START, STOP) _MASKn(16, (START), (STOP))
238#define MASK32(START, STOP) _MASKn(32, (START), (STOP))
239#define MASK64(START, STOP) _MASKn(64, (START), (STOP))
240
241#if (WITH_TARGET_WORD_MSB == 0)
242#define _POS_LE(START, STOP) (START <= STOP)
243#else
244#define _POS_LE(START, STOP) (STOP <= START)
245#endif
246
247#if (WITH_TARGET_WORD_BITSIZE == 64)
248#define MASK(START, STOP) \
249 (_POS_LE ((START), (STOP)) \
250 ? _MASKn(64, \
251 _MSB ((START), (STOP)), \
252 _LSB ((START), (STOP)) ) \
253 : (_MASKn(64, _MSB_POS (64, 0), (STOP)) \
254 | _MASKn(64, (START), _LSB_POS (64, 0))))
255#endif
256#if (WITH_TARGET_WORD_BITSIZE == 32)
257#define MASK(START, STOP) \
258 (_POS_LE ((START), (STOP)) \
259 ? (_POS_LE ((STOP), _MSB_POS (64, 31)) \
260 ? 0 \
261 : _MASKn (32, \
262 _MSB_32 ((START), (STOP)), \
263 _LSB_32 ((START), (STOP)))) \
264 : (_MASKn (32, \
265 _LSB_32 ((START), (STOP)), \
266 _LSB_POS (32, 0)) \
267 | (_POS_LE ((STOP), _MSB_POS (64, 31)) \
268 ? 0 \
269 : _MASKn (32, \
270 _MSB_POS (32, 0), \
271 _MSB_32 ((START), (STOP))))))
272#endif
273#if !defined (MASK)
274#error "MASK never undefined"
275#endif
276
277
278
279/* Multi-bit mask on least significant bits */
280
281#if (WITH_TARGET_WORD_MSB == 0)
282#define _LSMASKn(WIDTH, NR_BITS) _MASKn(WIDTH, (WIDTH - NR_BITS), (WIDTH - 1))
283#else
284#define _LSMASKn(WIDTH, NR_BITS) _MASKn(WIDTH, (NR_BITS - 1), 0)
285#endif
286
287#define LSMASK16(NR_BITS) _LSMASKn (16, (NR_BITS))
288#define LSMASK32(NR_BITS) _LSMASKn (32, (NR_BITS))
289#define LSMASK64(NR_BITS) _LSMASKn (64, (NR_BITS))
290
291#if (WITH_TARGET_WORD_BITSIZE == 64)
292#define LSMASK(NR_BITS) ((NR_BITS) < 1 \
293 ? 0 \
294 : _MASKn (64, \
295 _LSB_POS (64, \
296 ((NR_BITS) < 1 ? 0 \
297 : (NR_BITS) - 1)), \
298 _LSB_POS (64, 0)))
299#endif
300#if (WITH_TARGET_WORD_BITSIZE == 32)
301#define LSMASK(NR_BITS) ((NR_BITS) < 1 \
302 ? 0 \
303 : _MASKn (32, \
304 _LSB_POS (32, \
305 ((NR_BITS) > 32 ? 31 \
306 : (NR_BITS) < 1 ? 0 \
307 : ((NR_BITS) - 1))), \
308 _LSB_POS (32, 0)))
309#endif
310#if !defined (LSMASK)
311#error "LSMASK never defined"
312#endif
313
314
315/* Multi-bit mask on most significant bits */
316
317#if (WITH_TARGET_WORD_MSB == 0)
318#define _MSMASKn(WIDTH, NR_BITS) _MASKn (WIDTH, 0, (NR_BITS - 1))
319#else
320#define _MSMASKn(WIDTH, NR_BITS) _MASKn (WIDTH, (WIDTH - 1), (WIDTH - NR_BITS))
321#endif
322
323#define MSMASK16(NR_BITS) _MSMASKn (16, (NR_BITS))
324#define MSMASK32(NR_BITS) _MSMASKn (32, (NR_BITS))
325#define MSMASK64(NR_BITS) _MSMASKn (64, (NR_BITS))
326
327#if (WITH_TARGET_WORD_BITSIZE == 64)
328#define MSMASK(NR_BITS) (NR_BITS < 1 \
329 ? 0 \
330 : _MASKn (64, \
331 _MSB_POS (64, 0), \
332 _MSB_POS (64, \
333 ((NR_BITS) < 1 ? 0 \
334 : (NR_BITS) - 1))))
335#endif
336#if (WITH_TARGET_WORD_BITSIZE == 32)
337#define MSMASK(NR_BITS) (NR_BITS <= 32 \
338 ? 0 \
339 : _MASKn (32, \
340 _MSB_POS (32, 0), \
341 _MSB_POS (32, \
342 ((NR_BITS) <= 32 ? 0 \
343 : (NR_BITS) - 33))))
344#endif
345#if !defined (MSMASK)
346#error "MSMASK never defined"
347#endif
348
349
350/* mask the required bits, leaving them in place */
351
352INLINE_SIM_BITS(unsigned16) MASKED16 (unsigned16 word, unsigned start, unsigned stop);
353INLINE_SIM_BITS(unsigned32) MASKED32 (unsigned32 word, unsigned start, unsigned stop);
354INLINE_SIM_BITS(unsigned64) MASKED64 (unsigned64 word, unsigned start, unsigned stop);
355
356INLINE_SIM_BITS(unsigned_word) MASKED (unsigned_word word, unsigned start, unsigned stop);
357
358
359/* Ditto but nr of ls-bits specified */
360
361INLINE_SIM_BITS(unsigned16) LSMASKED16 (unsigned16 word, unsigned nr_bits);
362INLINE_SIM_BITS(unsigned32) LSMASKED32 (unsigned32 word, unsigned nr_bits);
363INLINE_SIM_BITS(unsigned64) LSMASKED64 (unsigned64 word, unsigned nr_bits);
364
365INLINE_SIM_BITS(unsigned_word) LSMASKED (unsigned_word word, unsigned nr_bits);
366
367
368/* Ditto but nr of ms-bits specified */
369
370INLINE_SIM_BITS(unsigned16) MSMASKED16 (unsigned16 word, unsigned nr_bits);
371INLINE_SIM_BITS(unsigned32) MSMASKED32 (unsigned32 word, unsigned nr_bits);
372INLINE_SIM_BITS(unsigned64) MSMASKED64 (unsigned64 word, unsigned nr_bits);
373
374INLINE_SIM_BITS(unsigned_word) MSMASKED (unsigned_word word, unsigned nr_bits);
375
376
377
378/* extract the required bits aligning them with the lsb */
379
380INLINE_SIM_BITS(unsigned16) EXTRACTED16 (unsigned16 val, unsigned start, unsigned stop);
381INLINE_SIM_BITS(unsigned32) EXTRACTED32 (unsigned32 val, unsigned start, unsigned stop);
382INLINE_SIM_BITS(unsigned64) EXTRACTED64 (unsigned64 val, unsigned start, unsigned stop);
383
384INLINE_SIM_BITS(unsigned_word) EXTRACTED (unsigned_word val, unsigned start, unsigned stop);
385
386
387
388/* move a single bit around */
389/* NB: the wierdness (N>O?N-O:0) is to stop a warning from GCC */
390#define _SHUFFLEDn(N, WORD, OLD, NEW) \
391((OLD) < (NEW) \
392 ? (((unsigned##N)(WORD) \
393 >> (((NEW) > (OLD)) ? ((NEW) - (OLD)) : 0)) \
394 & MASK32((NEW), (NEW))) \
395 : (((unsigned##N)(WORD) \
396 << (((OLD) > (NEW)) ? ((OLD) - (NEW)) : 0)) \
397 & MASK32((NEW), (NEW))))
398
399#define SHUFFLED32(WORD, OLD, NEW) _SHUFFLEDn (32, WORD, OLD, NEW)
400#define SHUFFLED64(WORD, OLD, NEW) _SHUFFLEDn (64, WORD, OLD, NEW)
401
402#define SHUFFLED(WORD, OLD, NEW) _SHUFFLEDn (_word, WORD, OLD, NEW)
403
404
405/* move a group of bits around */
406
407INLINE_SIM_BITS(unsigned16) INSERTED16 (unsigned16 val, unsigned start, unsigned stop);
408INLINE_SIM_BITS(unsigned32) INSERTED32 (unsigned32 val, unsigned start, unsigned stop);
409INLINE_SIM_BITS(unsigned64) INSERTED64 (unsigned64 val, unsigned start, unsigned stop);
410
411INLINE_SIM_BITS(unsigned_word) INSERTED (unsigned_word val, unsigned start, unsigned stop);
412
413
414
415/* depending on MODE return a 64bit or 32bit (sign extended) value */
416#if (WITH_TARGET_WORD_BITSIZE == 64)
417#define EXTENDED(X) ((signed64)(signed32)(X))
418#endif
419#if (WITH_TARGET_WORD_BITSIZE == 32)
420#define EXTENDED(X) (X)
421#endif
422
423
424/* memory alignment macro's */
425#define _ALIGNa(A,X) (((X) + ((A) - 1)) & ~((A) - 1))
426#define _FLOORa(A,X) ((X) & ~((A) - 1))
427
428#define ALIGN_8(X) _ALIGNa (8, X)
429#define ALIGN_16(X) _ALIGNa (16, X)
430
431#define ALIGN_PAGE(X) _ALIGNa (0x1000, X)
432#define FLOOR_PAGE(X) ((X) & ~(0x1000 - 1))
433
434
435/* bit bliting macro's */
436#define BLIT32(V, POS, BIT) \
437do { \
438 if (BIT) \
439 V |= BIT32 (POS); \
440 else \
441 V &= ~BIT32 (POS); \
442} while (0)
443#define MBLIT32(V, LO, HI, VAL) \
444do { \
445 (V) = (((V) & ~MASK32 ((LO), (HI))) \
446 | INSERTED32 (VAL, LO, HI)); \
447} while (0)
448
449
450
451/* some rotate functions. The generic macro's ROT, ROTL, ROTR are
452 intentionally omited. */
453
454
455INLINE_SIM_BITS(unsigned16) ROT16 (unsigned16 val, int shift);
456INLINE_SIM_BITS(unsigned32) ROT32 (unsigned32 val, int shift);
457INLINE_SIM_BITS(unsigned64) ROT64 (unsigned64 val, int shift);
458
459
460INLINE_SIM_BITS(unsigned16) ROTL16 (unsigned16 val, unsigned shift);
461INLINE_SIM_BITS(unsigned32) ROTL32 (unsigned32 val, unsigned shift);
462INLINE_SIM_BITS(unsigned64) ROTL64 (unsigned64 val, unsigned shift);
463
464
465INLINE_SIM_BITS(unsigned16) ROTR16 (unsigned16 val, unsigned shift);
466INLINE_SIM_BITS(unsigned32) ROTR32 (unsigned32 val, unsigned shift);
467INLINE_SIM_BITS(unsigned64) ROTR64 (unsigned64 val, unsigned shift);
468
469
470
471/* Sign extension operations */
472
473INLINE_SIM_BITS(unsigned16) SEXT16 (signed16 val, unsigned sign_bit);
474INLINE_SIM_BITS(unsigned32) SEXT32 (signed32 val, unsigned sign_bit);
475INLINE_SIM_BITS(unsigned64) SEXT64 (signed64 val, unsigned sign_bit);
476
477INLINE_SIM_BITS(unsigned_word) SEXT (signed_word val, unsigned sign_bit);
478
479
480
481#if ((SIM_BITS_INLINE & INCLUDE_MODULE) && (SIM_BITS_INLINE & INCLUDED_BY_MODULE))
482#include "sim-bits.c"
483#endif
484
485#endif /* _SIM_BITS_H_ */
This page took 0.039889 seconds and 4 git commands to generate.