vnc: palette: use a pool to reduce memory allocations
[qemu/ar7.git] / host-utils.c
blobdc9612387b3ebe46ce52be2e5bc9104b2999da4f
1 /*
2 * Utility compute operations used by translated code.
4 * Copyright (c) 2003 Fabrice Bellard
5 * Copyright (c) 2007 Aurelien Jarno
7 * Permission is hereby granted, free of charge, to any person obtaining a copy
8 * of this software and associated documentation files (the "Software"), to deal
9 * in the Software without restriction, including without limitation the rights
10 * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
11 * copies of the Software, and to permit persons to whom the Software is
12 * furnished to do so, subject to the following conditions:
14 * The above copyright notice and this permission notice shall be included in
15 * all copies or substantial portions of the Software.
17 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
18 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
19 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
20 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
21 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
22 * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
23 * THE SOFTWARE.
26 #include <stdlib.h>
27 #include <stdint.h>
28 #include "host-utils.h"
30 //#define DEBUG_MULDIV
32 /* Long integer helpers */
33 #if !defined(__x86_64__)
34 static void add128 (uint64_t *plow, uint64_t *phigh, uint64_t a, uint64_t b)
36 *plow += a;
37 /* carry test */
38 if (*plow < a)
39 (*phigh)++;
40 *phigh += b;
43 static void neg128 (uint64_t *plow, uint64_t *phigh)
45 *plow = ~*plow;
46 *phigh = ~*phigh;
47 add128(plow, phigh, 1, 0);
50 static void mul64 (uint64_t *plow, uint64_t *phigh, uint64_t a, uint64_t b)
52 uint32_t a0, a1, b0, b1;
53 uint64_t v;
55 a0 = a;
56 a1 = a >> 32;
58 b0 = b;
59 b1 = b >> 32;
61 v = (uint64_t)a0 * (uint64_t)b0;
62 *plow = v;
63 *phigh = 0;
65 v = (uint64_t)a0 * (uint64_t)b1;
66 add128(plow, phigh, v << 32, v >> 32);
68 v = (uint64_t)a1 * (uint64_t)b0;
69 add128(plow, phigh, v << 32, v >> 32);
71 v = (uint64_t)a1 * (uint64_t)b1;
72 *phigh += v;
75 /* Unsigned 64x64 -> 128 multiplication */
76 void mulu64 (uint64_t *plow, uint64_t *phigh, uint64_t a, uint64_t b)
78 mul64(plow, phigh, a, b);
79 #if defined(DEBUG_MULDIV)
80 printf("mulu64: 0x%016llx * 0x%016llx = 0x%016llx%016llx\n",
81 a, b, *phigh, *plow);
82 #endif
85 /* Signed 64x64 -> 128 multiplication */
86 void muls64 (uint64_t *plow, uint64_t *phigh, int64_t a, int64_t b)
88 int sa, sb;
90 sa = (a < 0);
91 if (sa)
92 a = -a;
93 sb = (b < 0);
94 if (sb)
95 b = -b;
96 mul64(plow, phigh, a, b);
97 if (sa ^ sb) {
98 neg128(plow, phigh);
100 #if defined(DEBUG_MULDIV)
101 printf("muls64: 0x%016llx * 0x%016llx = 0x%016llx%016llx\n",
102 a, b, *phigh, *plow);
103 #endif
105 #endif /* !defined(__x86_64__) */