xref: /freebsd/sys/compat/linuxkpi/common/include/linux/math64.h (revision cf16d65c2e3a7099319447f7ac464b9839af868b)
18d59ecb2SHans Petter Selasky /*-
28d59ecb2SHans Petter Selasky  * Copyright (c) 2007 Cisco Systems, Inc.  All rights reserved.
38d59ecb2SHans Petter Selasky  * Copyright (c) 2014-2015 Mellanox Technologies, Ltd. All rights reserved.
48d59ecb2SHans Petter Selasky  * All rights reserved.
58d59ecb2SHans Petter Selasky  *
68d59ecb2SHans Petter Selasky  * Redistribution and use in source and binary forms, with or without
78d59ecb2SHans Petter Selasky  * modification, are permitted provided that the following conditions
88d59ecb2SHans Petter Selasky  * are met:
98d59ecb2SHans Petter Selasky  * 1. Redistributions of source code must retain the above copyright
108d59ecb2SHans Petter Selasky  *    notice unmodified, this list of conditions, and the following
118d59ecb2SHans Petter Selasky  *    disclaimer.
128d59ecb2SHans Petter Selasky  * 2. Redistributions in binary form must reproduce the above copyright
138d59ecb2SHans Petter Selasky  *    notice, this list of conditions and the following disclaimer in the
148d59ecb2SHans Petter Selasky  *    documentation and/or other materials provided with the distribution.
158d59ecb2SHans Petter Selasky  *
168d59ecb2SHans Petter Selasky  * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR
178d59ecb2SHans Petter Selasky  * IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
188d59ecb2SHans Petter Selasky  * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
198d59ecb2SHans Petter Selasky  * IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT,
208d59ecb2SHans Petter Selasky  * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
218d59ecb2SHans Petter Selasky  * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
228d59ecb2SHans Petter Selasky  * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
238d59ecb2SHans Petter Selasky  * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
248d59ecb2SHans Petter Selasky  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
258d59ecb2SHans Petter Selasky  * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
268d59ecb2SHans Petter Selasky  */
2770bb2cdbSMark Johnston 
28307f78f3SVladimir Kondratyev #ifndef _LINUXKPI_LINUX_MATH64_H
29307f78f3SVladimir Kondratyev #define	_LINUXKPI_LINUX_MATH64_H
308d59ecb2SHans Petter Selasky 
318d59ecb2SHans Petter Selasky #include <sys/stdint.h>
32b6a61d68SDoug Moore #include <sys/systm.h>
338d59ecb2SHans Petter Selasky 
348d59ecb2SHans Petter Selasky #define	do_div(n, base) ({			\
358d59ecb2SHans Petter Selasky 	uint32_t __base = (base);		\
368d59ecb2SHans Petter Selasky 	uint32_t __rem;				\
378d59ecb2SHans Petter Selasky 	__rem = ((uint64_t)(n)) % __base;	\
388d59ecb2SHans Petter Selasky 	(n) = ((uint64_t)(n)) / __base;		\
398d59ecb2SHans Petter Selasky 	__rem;					\
408d59ecb2SHans Petter Selasky })
418d59ecb2SHans Petter Selasky 
428d59ecb2SHans Petter Selasky static inline uint64_t
div64_u64_rem(uint64_t dividend,uint64_t divisor,uint64_t * remainder)4370bb2cdbSMark Johnston div64_u64_rem(uint64_t dividend, uint64_t divisor, uint64_t *remainder)
4470bb2cdbSMark Johnston {
4570bb2cdbSMark Johnston 
4670bb2cdbSMark Johnston 	*remainder = dividend % divisor;
4770bb2cdbSMark Johnston 	return (dividend / divisor);
4870bb2cdbSMark Johnston }
4970bb2cdbSMark Johnston 
5070bb2cdbSMark Johnston static inline int64_t
div64_s64(int64_t dividend,int64_t divisor)5170bb2cdbSMark Johnston div64_s64(int64_t dividend, int64_t divisor)
5270bb2cdbSMark Johnston {
5370bb2cdbSMark Johnston 
5470bb2cdbSMark Johnston 	return (dividend / divisor);
5570bb2cdbSMark Johnston }
5670bb2cdbSMark Johnston 
5770bb2cdbSMark Johnston static inline uint64_t
div64_u64(uint64_t dividend,uint64_t divisor)5870bb2cdbSMark Johnston div64_u64(uint64_t dividend, uint64_t divisor)
5970bb2cdbSMark Johnston {
6070bb2cdbSMark Johnston 
6170bb2cdbSMark Johnston 	return (dividend / divisor);
6270bb2cdbSMark Johnston }
6370bb2cdbSMark Johnston 
64*cf16d65cSJean-Sébastien Pédron #define	div64_ul(x, y)	div64_u64((x), (y))
65*cf16d65cSJean-Sébastien Pédron 
6670bb2cdbSMark Johnston static inline uint64_t
div_u64_rem(uint64_t dividend,uint32_t divisor,uint32_t * remainder)678d59ecb2SHans Petter Selasky div_u64_rem(uint64_t dividend, uint32_t divisor, uint32_t *remainder)
688d59ecb2SHans Petter Selasky {
6970bb2cdbSMark Johnston 
708d59ecb2SHans Petter Selasky 	*remainder = dividend % divisor;
718d59ecb2SHans Petter Selasky 	return (dividend / divisor);
728d59ecb2SHans Petter Selasky }
738d59ecb2SHans Petter Selasky 
7470bb2cdbSMark Johnston static inline int64_t
div_s64(int64_t dividend,int32_t divisor)7570bb2cdbSMark Johnston div_s64(int64_t dividend, int32_t divisor)
7670bb2cdbSMark Johnston {
7770bb2cdbSMark Johnston 
7870bb2cdbSMark Johnston 	return (dividend / divisor);
7970bb2cdbSMark Johnston }
8070bb2cdbSMark Johnston 
818d59ecb2SHans Petter Selasky static inline uint64_t
div_u64(uint64_t dividend,uint32_t divisor)828d59ecb2SHans Petter Selasky div_u64(uint64_t dividend, uint32_t divisor)
838d59ecb2SHans Petter Selasky {
8470bb2cdbSMark Johnston 
858d59ecb2SHans Petter Selasky 	return (dividend / divisor);
868d59ecb2SHans Petter Selasky }
878d59ecb2SHans Petter Selasky 
880d2dce0bSHans Petter Selasky static inline uint64_t
mul_u32_u32(uint32_t a,uint32_t b)890d2dce0bSHans Petter Selasky mul_u32_u32(uint32_t a, uint32_t b)
900d2dce0bSHans Petter Selasky {
910d2dce0bSHans Petter Selasky 
920d2dce0bSHans Petter Selasky 	return ((uint64_t)a * b);
930d2dce0bSHans Petter Selasky }
940d2dce0bSHans Petter Selasky 
953ff7ec1cSHans Petter Selasky static inline uint64_t
div64_u64_round_up(uint64_t dividend,uint64_t divisor)963ff7ec1cSHans Petter Selasky div64_u64_round_up(uint64_t dividend, uint64_t divisor)
973ff7ec1cSHans Petter Selasky {
983ff7ec1cSHans Petter Selasky 	return ((dividend + divisor - 1) / divisor);
993ff7ec1cSHans Petter Selasky }
1003ff7ec1cSHans Petter Selasky 
1013ff7ec1cSHans Petter Selasky #define	DIV64_U64_ROUND_UP(...) \
1023ff7ec1cSHans Petter Selasky 	div64_u64_round_up(__VA_ARGS__)
1033ff7ec1cSHans Petter Selasky 
104885ab0dbSEmmanuel Vadot static inline uint64_t
mul_u64_u32_div(uint64_t x,uint32_t y,uint32_t div)105885ab0dbSEmmanuel Vadot mul_u64_u32_div(uint64_t x, uint32_t y, uint32_t div)
106885ab0dbSEmmanuel Vadot {
107885ab0dbSEmmanuel Vadot 	const uint64_t rem = x % div;
108885ab0dbSEmmanuel Vadot 
109885ab0dbSEmmanuel Vadot 	return ((x / div) * y + (rem * y) / div);
110885ab0dbSEmmanuel Vadot }
111885ab0dbSEmmanuel Vadot 
112885ab0dbSEmmanuel Vadot static inline uint64_t
mul_u64_u64_div_u64(uint64_t x,uint64_t y,uint64_t z)113b80ea452SBjoern A. Zeeb mul_u64_u64_div_u64(uint64_t x, uint64_t y, uint64_t z)
114b80ea452SBjoern A. Zeeb {
115b80ea452SBjoern A. Zeeb 	uint64_t res, rem;
116b80ea452SBjoern A. Zeeb 	uint64_t x1, y1, y1z;
117b80ea452SBjoern A. Zeeb 
118b80ea452SBjoern A. Zeeb 	res = rem = 0;
119b80ea452SBjoern A. Zeeb 	x1 = x;
120b80ea452SBjoern A. Zeeb 	y1z = y / z;
121b80ea452SBjoern A. Zeeb 	y1 = y - y1z * z;
122b80ea452SBjoern A. Zeeb 
123b80ea452SBjoern A. Zeeb 	/*
124b80ea452SBjoern A. Zeeb 	 * INVARIANT: x * y = res * z + rem + (y1 + y1z * z) * x1
125b80ea452SBjoern A. Zeeb 	 * INVARIANT: y1 < z
126b80ea452SBjoern A. Zeeb 	 * INVARIANT: rem < z
127b80ea452SBjoern A. Zeeb 	 */
128b80ea452SBjoern A. Zeeb 	while (x1 > 0) {
129b80ea452SBjoern A. Zeeb 		/* Handle low bit. */
130b80ea452SBjoern A. Zeeb 		if (x1 & 1) {
131b80ea452SBjoern A. Zeeb 			x1 &= ~1;
132b80ea452SBjoern A. Zeeb 			res += y1z;
133b80ea452SBjoern A. Zeeb 			rem += y1;
134b80ea452SBjoern A. Zeeb 			if ((rem < y1) || (rem >= z)) {
135b80ea452SBjoern A. Zeeb 				res += 1;
136b80ea452SBjoern A. Zeeb 				rem -= z;
137b80ea452SBjoern A. Zeeb 			}
138b80ea452SBjoern A. Zeeb 		}
139b80ea452SBjoern A. Zeeb 
140b80ea452SBjoern A. Zeeb 		/* Shift x1 right and (y1 + y1z * z) left */
141b80ea452SBjoern A. Zeeb 		x1 >>= 1;
142b80ea452SBjoern A. Zeeb 		if ((y1 * 2 < y1) || (y1 * 2 >= z)) {
143b80ea452SBjoern A. Zeeb 			y1z = y1z * 2 + 1;
144b80ea452SBjoern A. Zeeb 			y1 = y1 * 2 - z;
145b80ea452SBjoern A. Zeeb 		} else {
146b80ea452SBjoern A. Zeeb 			y1z *= 2;
147b80ea452SBjoern A. Zeeb 			y1 *= 2;
148b80ea452SBjoern A. Zeeb 		}
149b80ea452SBjoern A. Zeeb 	}
150b80ea452SBjoern A. Zeeb 
151b80ea452SBjoern A. Zeeb 	KASSERT(res * z + rem == x * y, ("%s: res %ju * z %ju + rem %ju != "
152b80ea452SBjoern A. Zeeb 	    "x %ju * y %ju", __func__, (uintmax_t)res, (uintmax_t)z,
153b80ea452SBjoern A. Zeeb 	    (uintmax_t)rem, (uintmax_t)x, (uintmax_t)y));
154b80ea452SBjoern A. Zeeb 	KASSERT(rem < z, ("%s: rem %ju >= z %ju\n", __func__,
155dabbbebcSKyle Evans 	    (uintmax_t)rem, (uintmax_t)z));
156b80ea452SBjoern A. Zeeb 
157b80ea452SBjoern A. Zeeb 	return (res);
158b80ea452SBjoern A. Zeeb }
159b80ea452SBjoern A. Zeeb 
160b80ea452SBjoern A. Zeeb static inline uint64_t
mul_u64_u32_shr(uint64_t x,uint32_t y,unsigned int shift)161885ab0dbSEmmanuel Vadot mul_u64_u32_shr(uint64_t x, uint32_t y, unsigned int shift)
162885ab0dbSEmmanuel Vadot {
163885ab0dbSEmmanuel Vadot 	uint32_t hi, lo;
164885ab0dbSEmmanuel Vadot 	hi = x >> 32;
165885ab0dbSEmmanuel Vadot 	lo = x & 0xffffffff;
166885ab0dbSEmmanuel Vadot 
167885ab0dbSEmmanuel Vadot 	return (mul_u32_u32(lo, y) >> shift) +
168885ab0dbSEmmanuel Vadot 		(mul_u32_u32(hi, y) << (32 - shift));
169885ab0dbSEmmanuel Vadot }
170885ab0dbSEmmanuel Vadot 
171307f78f3SVladimir Kondratyev #endif /* _LINUXKPI_LINUX_MATH64_H */
172