[PATCH 06/20] Use "gmp-renamei.h" for renaming the internal routines
Richard Henderson
rth at twiddle.net
Mon Mar 4 19:41:36 CET 2013
Similar to how we approach the public routines, all in
one go this time.
---
Makefile.am | 5 +-
gen-renamei.c | 29 ++
gmp-impl.h | 1219 ++++++++++++++++++++++++++++++---------------------------
3 files changed, 686 insertions(+), 567 deletions(-)
create mode 100644 gen-renamei.c
diff --git a/Makefile.am b/Makefile.am
index c68c62a..5b2df66 100644
--- a/Makefile.am
+++ b/Makefile.am
@@ -117,7 +117,7 @@ include_HEADERS = $(GMPXX_HEADERS_OPTION)
nodist_includeexec_HEADERS = gmp.h gmp-rename.h
lib_LTLIBRARIES = libgmp.la $(GMPXX_LTLIBRARIES_OPTION)
-BUILT_SOURCES = gmp.h gmp-rename.h
+BUILT_SOURCES = gmp.h gmp-rename.h gmp-renamei.h
DISTCLEANFILES = $(BUILT_SOURCES) config.m4 @gmp_srclinks@
@@ -320,6 +320,9 @@ EXTRA_DIST += bootstrap.c
gmp-rename.h: gen-rename.c gen-rename.awk gmp.h
$(COMPILE) -E $< | $(AWK) -f $(srcdir)/gen-rename.awk > $@ || (rm -f $@; exit 1)
+gmp-renamei.h: gen-renamei.c gen-rename.awk gmp.h gmp-impl.h
+ $(COMPILE) -E $< | $(AWK) -f $(srcdir)/gen-rename.awk > $@ || (rm -f $@; exit 1)
+
fac_table.h: gen-fac$(EXEEXT_FOR_BUILD)
./gen-fac $(GMP_LIMB_BITS) $(GMP_NAIL_BITS) >fac_table.h || (rm -f fac_table.h; exit 1)
BUILT_SOURCES += fac_table.h
diff --git a/gen-renamei.c b/gen-renamei.c
new file mode 100644
index 0000000..12a831f
--- /dev/null
+++ b/gen-renamei.c
@@ -0,0 +1,29 @@
+/* Generate preprocessor defines for renaming symbols.
+ Copyright 2013 Free Software Foundation, Inc.
+
+This file is part of the GNU MP Library.
+
+The GNU MP Library is free software; you can redistribute it and/or modify
+it under the terms of the GNU Lesser General Public License as published by
+the Free Software Foundation; either version 3 of the License, or (at your
+option) any later version.
+
+The GNU MP Library is distributed in the hope that it will be useful, but
+WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public
+License for more details.
+
+You should have received a copy of the GNU Lesser General Public License
+along with the GNU MP Library. If not, see http://www.gnu.org/licenses/. */
+
+#include "config.h"
+#include <stdio.h>
+#include <stdarg.h>
+#include <obstack.h>
+
+#define __GMP_WITHIN_CONFIGURE
+#define __GMP_INTERN_FULL(TYPE, API_NAME, ARGS, NT, ATTRS, ABI_NAME) \
+ __GMP_XYZZY API_NAME ABI_NAME
+
+#include "gmp.h"
+#include "gmp-impl.h"
diff --git a/gmp-impl.h b/gmp-impl.h
index 21462e0..fda8bca 100644
--- a/gmp-impl.h
+++ b/gmp-impl.h
@@ -56,6 +56,42 @@ along with the GNU MP Library. If not, see http://www.gnu.org/licenses/. */
#define __MPN(x) __gmpn_##x
#endif
+/* __GMP_INTERN handles symbols private to the GMP implementation.
+ It comes in several variants to make the usage less arduous:
+ __GMP_INTERN - for normal functions
+ __GMP_INTERN_A - for functions with attributes
+ __GMP_INTERN_NT - for "no-throw" functions
+ __GMP_INTERN_NT_A - for "no-throw" functions with attributes
+ __GMP_INTERN_DATA - for declaring data variables
+*/
+#if !defined(__GMP_WITHIN_CONFIGURE) || !defined(__GMP_INTERN_FULL)
+#define __GMP_INTERN_FULL(TYPE, API_NAME, ARGS, NT, ATTRS, ABI_NAME) \
+ __GMP_DECLSPEC TYPE API_NAME ARGS NT ATTRS
+#endif
+
+#define __GMP_INTERN_NT_A(TYPE, API_NAME, ARGS, ATTRS) \
+ __GMP_INTERN_FULL(TYPE, API_NAME, ARGS, __GMP_NOTHROW, ATTRS, \
+ __GMP_GLUE(__GMP_ABI_PREFIX, API_NAME))
+#define __GMP_INTERN_A(TYPE, API_NAME, ARGS, ATTRS) \
+ __GMP_INTERN_FULL(TYPE, API_NAME, ARGS, , ATTRS, \
+ __GMP_GLUE(__GMP_ABI_PREFIX, API_NAME))
+#define __GMP_INTERN_NT(TYPE, API_NAME, ARGS) \
+ __GMP_INTERN_NT_A(TYPE, API_NAME, ARGS, )
+#define __GMP_INTERN(TYPE, API_NAME, ARGS) \
+ __GMP_INTERN_A(TYPE, API_NAME, ARGS, )
+
+#define __GMP_INTERN_DATA(TYPE, API_NAME) \
+ __GMP_INTERN_FULL(extern TYPE, API_NAME, , , , \
+ __GMP_GLUE(__GMP_ABI_PREFIX, API_NAME))
+
+/* When using GCC, we'll adjust the API symbol to point to the ABI symbol
+ automatically. Otherwise, we'll have generated and installed a set of
+ defines that will perform the remapping at the preprocessor level. */
+#if !defined(__GMP_WITHIN_CONFIGURE)
+# include "gmp-renamei.h"
+#endif
+
+
/* For fat.h and other fat binary stuff.
No need for __GMP_ATTRIBUTE_PURE or __GMP_NOTHROW, since functions
declared this way are only used to set function pointers in __gmpn_cpuvec,
@@ -357,8 +393,14 @@ struct tmp_reentrant_t {
struct tmp_reentrant_t *next;
size_t size; /* bytes, including header */
};
-__GMP_DECLSPEC void *__gmp_tmp_reentrant_alloc (struct tmp_reentrant_t **, size_t) ATTRIBUTE_MALLOC;
-__GMP_DECLSPEC void __gmp_tmp_reentrant_free (struct tmp_reentrant_t *);
+
+#undef __GMP_ABI_PREFIX
+#define __GMP_ABI_PREFIX
+
+__GMP_INTERN_A (void *, __gmp_tmp_reentrant_alloc,
+ (struct tmp_reentrant_t **, size_t),
+ ATTRIBUTE_MALLOC);
+__GMP_INTERN (void, __gmp_tmp_reentrant_free, (struct tmp_reentrant_t *));
#endif
#if WANT_TMP_ALLOCA
@@ -396,9 +438,11 @@ struct tmp_marker
struct tmp_stack *which_chunk;
void *alloc_point;
};
-__GMP_DECLSPEC void *__gmp_tmp_alloc (unsigned long) ATTRIBUTE_MALLOC;
-__GMP_DECLSPEC void __gmp_tmp_mark (struct tmp_marker *);
-__GMP_DECLSPEC void __gmp_tmp_free (struct tmp_marker *);
+
+__GMP_INTERN_A (void *, __gmp_tmp_alloc, (unsigned long), ATTRIBUTE_MALLOC);
+__GMP_INTERN (void, __gmp_tmp_mark, (struct tmp_marker *));
+__GMP_INTERN (void, __gmp_tmp_free, (struct tmp_marker *));
+
#define TMP_SDECL TMP_DECL
#define TMP_DECL struct tmp_marker __tmp_marker
#define TMP_SMARK TMP_MARK
@@ -423,15 +467,16 @@ struct tmp_debug_entry_t {
char *block;
size_t size;
};
-__GMP_DECLSPEC void __gmp_tmp_debug_mark (const char *, int, struct tmp_debug_t **,
- struct tmp_debug_t *,
- const char *, const char *);
-__GMP_DECLSPEC void *__gmp_tmp_debug_alloc (const char *, int, int,
- struct tmp_debug_t **, const char *,
- size_t) ATTRIBUTE_MALLOC;
-__GMP_DECLSPEC void __gmp_tmp_debug_free (const char *, int, int,
- struct tmp_debug_t **,
- const char *, const char *);
+
+__GMP_INTERN (void, __gmp_tmp_debug_mark,
+ (const char *, int, struct tmp_debug_t **,
+ struct tmp_debug_t *, const char *, const char *));
+__GMP_INTERN_A (void *, __gmp_tmp_debug_alloc,
+ (const char *, int, int, struct tmp_debug_t **,
+ const char *, size_t), ATTRIBUTE_MALLOC);
+__GMP_INTERN (void, __gmp_tmp_debug_free,
+ (const char *, int, int, struct tmp_debug_t **,
+ const char *, const char *));
#define TMP_SDECL TMP_DECL_NAME(__tmp_xmarker, "__tmp_marker")
#define TMP_DECL TMP_DECL_NAME(__tmp_xmarker, "__tmp_marker")
#define TMP_SMARK TMP_MARK_NAME(__tmp_xmarker, "__tmp_marker")
@@ -696,13 +741,18 @@ __GMP_DECLSPEC void __gmp_tmp_debug_free (const char *, int, int,
/* Enhancement: __gmp_allocate_func could have "__attribute__ ((malloc))",
but current gcc (3.0) doesn't seem to support that. */
-__GMP_DECLSPEC extern void * (*__gmp_allocate_func) (size_t);
-__GMP_DECLSPEC extern void * (*__gmp_reallocate_func) (void *, size_t, size_t);
-__GMP_DECLSPEC extern void (*__gmp_free_func) (void *, size_t);
+typedef void * (*gmp_allocate_type) (size_t);
+typedef void * (*gmp_reallocate_type) (void *, size_t, size_t);
+typedef void (*gmp_free_type) (void *, size_t);
+
+/* ??? Test programs reference these directly, and break if hidden. */
+__GMP_DECLSPEC extern gmp_allocate_type __gmp_allocate_func;
+__GMP_DECLSPEC extern gmp_reallocate_type __gmp_reallocate_func;
+__GMP_DECLSPEC extern gmp_free_type __gmp_free_func;
-__GMP_DECLSPEC void *__gmp_default_allocate (size_t);
-__GMP_DECLSPEC void *__gmp_default_reallocate (void *, size_t, size_t);
-__GMP_DECLSPEC void __gmp_default_free (void *, size_t);
+__GMP_INTERN (void *, __gmp_default_allocate, (size_t));
+__GMP_INTERN (void *, __gmp_default_reallocate, (void *, size_t, size_t));
+__GMP_INTERN (void, __gmp_default_free, (void *, size_t));
#define __GMP_ALLOCATE_FUNC_TYPE(n,type) \
((type *) (*__gmp_allocate_func) ((n) * sizeof (type)))
@@ -802,391 +852,399 @@ __GMP_DECLSPEC void __gmp_default_free (void *, size_t);
#endif
#endif
+#undef __GMP_ABI_PREFIX
+#define __GMP_ABI_PREFIX __g
-__GMP_DECLSPEC void __gmpz_aorsmul_1 (REGPARM_3_1 (mpz_ptr, mpz_srcptr, mp_limb_t, mp_size_t)) REGPARM_ATTR(1);
+__GMP_INTERN_FULL (void, __gmpz_aorsmul_1,
+ (REGPARM_3_1 (mpz_ptr, mpz_srcptr, mp_limb_t, mp_size_t)),
+ /* !nothrow */, REGPARM_ATTR(1), __gmpz_aorsmul_1);
#define mpz_aorsmul_1(w,u,v,sub) __gmpz_aorsmul_1 (REGPARM_3_1 (w, u, v, sub))
-#define mpz_n_pow_ui __gmpz_n_pow_ui
-__GMP_DECLSPEC void mpz_n_pow_ui (mpz_ptr, mp_srcptr, mp_size_t, unsigned long);
-
+__GMP_INTERN (void, mpz_n_pow_ui,
+ (mpz_ptr, mp_srcptr, mp_size_t, unsigned long));
-#define mpn_addmul_1c __MPN(addmul_1c)
-__GMP_DECLSPEC mp_limb_t mpn_addmul_1c (mp_ptr, mp_srcptr, mp_size_t, mp_limb_t, mp_limb_t);
+__GMP_INTERN (mp_limb_t, mpn_addmul_1c,
+ (mp_ptr, mp_srcptr, mp_size_t, mp_limb_t, mp_limb_t));
#ifndef mpn_addmul_2 /* if not done with cpuvec in a fat binary */
-#define mpn_addmul_2 __MPN(addmul_2)
-__GMP_DECLSPEC mp_limb_t mpn_addmul_2 (mp_ptr, mp_srcptr, mp_size_t, mp_srcptr);
-#endif
-
-#define mpn_addmul_3 __MPN(addmul_3)
-__GMP_DECLSPEC mp_limb_t mpn_addmul_3 (mp_ptr, mp_srcptr, mp_size_t, mp_srcptr);
-
-#define mpn_addmul_4 __MPN(addmul_4)
-__GMP_DECLSPEC mp_limb_t mpn_addmul_4 (mp_ptr, mp_srcptr, mp_size_t, mp_srcptr);
-
-#define mpn_addmul_5 __MPN(addmul_5)
-__GMP_DECLSPEC mp_limb_t mpn_addmul_5 (mp_ptr, mp_srcptr, mp_size_t, mp_srcptr);
-
-#define mpn_addmul_6 __MPN(addmul_6)
-__GMP_DECLSPEC mp_limb_t mpn_addmul_6 (mp_ptr, mp_srcptr, mp_size_t, mp_srcptr);
-
-#define mpn_addmul_7 __MPN(addmul_7)
-__GMP_DECLSPEC mp_limb_t mpn_addmul_7 (mp_ptr, mp_srcptr, mp_size_t, mp_srcptr);
-
-#define mpn_addmul_8 __MPN(addmul_8)
-__GMP_DECLSPEC mp_limb_t mpn_addmul_8 (mp_ptr, mp_srcptr, mp_size_t, mp_srcptr);
+__GMP_INTERN (mp_limb_t, mpn_addmul_2,
+ (mp_ptr, mp_srcptr, mp_size_t, mp_srcptr));
+#endif
+
+__GMP_INTERN (mp_limb_t, mpn_addmul_3,
+ (mp_ptr, mp_srcptr, mp_size_t, mp_srcptr));
+__GMP_INTERN (mp_limb_t, mpn_addmul_4,
+ (mp_ptr, mp_srcptr, mp_size_t, mp_srcptr));
+__GMP_INTERN (mp_limb_t, mpn_addmul_5,
+ (mp_ptr, mp_srcptr, mp_size_t, mp_srcptr));
+__GMP_INTERN (mp_limb_t, mpn_addmul_6,
+ (mp_ptr, mp_srcptr, mp_size_t, mp_srcptr));
+__GMP_INTERN (mp_limb_t, mpn_addmul_7,
+ (mp_ptr, mp_srcptr, mp_size_t, mp_srcptr));
+__GMP_INTERN (mp_limb_t, mpn_addmul_8,
+ (mp_ptr, mp_srcptr, mp_size_t, mp_srcptr));
/* Alternative entry point in mpn_addmul_2 for the benefit of mpn_sqr_basecase. */
-#define mpn_addmul_2s __MPN(addmul_2s)
-__GMP_DECLSPEC mp_limb_t mpn_addmul_2s (mp_ptr, mp_srcptr, mp_size_t, mp_srcptr);
+__GMP_INTERN (mp_limb_t, mpn_addmul_2s,
+ (mp_ptr, mp_srcptr, mp_size_t, mp_srcptr));
/* mpn_addlsh1_n(c,a,b,n), when it exists, sets {c,n} to {a,n}+2*{b,n}, and
returns the carry out (0, 1 or 2). Use _ip1 when a=c. */
#ifndef mpn_addlsh1_n /* if not done with cpuvec in a fat binary */
-#define mpn_addlsh1_n __MPN(addlsh1_n)
-__GMP_DECLSPEC mp_limb_t mpn_addlsh1_n (mp_ptr, mp_srcptr, mp_srcptr, mp_size_t);
+__GMP_INTERN (mp_limb_t, mpn_addlsh1_n,
+ (mp_ptr, mp_srcptr, mp_srcptr, mp_size_t));
#endif
-#define mpn_addlsh1_nc __MPN(addlsh1_nc)
-__GMP_DECLSPEC mp_limb_t mpn_addlsh1_nc (mp_ptr, mp_srcptr, mp_srcptr, mp_size_t, mp_limb_t);
+
+__GMP_INTERN (mp_limb_t, mpn_addlsh1_nc,
+ (mp_ptr, mp_srcptr, mp_srcptr, mp_size_t, mp_limb_t));
+
#if HAVE_NATIVE_mpn_addlsh1_n && ! HAVE_NATIVE_mpn_addlsh1_n_ip1
#define mpn_addlsh1_n_ip1(dst,src,n) mpn_addlsh1_n(dst,dst,src,n)
#define HAVE_NATIVE_mpn_addlsh1_n_ip1 1
#else
-#define mpn_addlsh1_n_ip1 __MPN(addlsh1_n_ip1)
-__GMP_DECLSPEC mp_limb_t mpn_addlsh1_n_ip1 (mp_ptr, mp_srcptr, mp_size_t);
+__GMP_INTERN (mp_limb_t, mpn_addlsh1_n_ip1, (mp_ptr, mp_srcptr, mp_size_t));
#endif
+
#if HAVE_NATIVE_mpn_addlsh1_nc && ! HAVE_NATIVE_mpn_addlsh1_nc_ip1
#define mpn_addlsh1_nc_ip1(dst,src,n,c) mpn_addlsh1_nc(dst,dst,src,n,c)
#define HAVE_NATIVE_mpn_addlsh1_nc_ip1 1
#else
-#define mpn_addlsh1_nc_ip1 __MPN(addlsh1_nc_ip1)
-__GMP_DECLSPEC mp_limb_t mpn_addlsh1_nc_ip1 (mp_ptr, mp_srcptr, mp_size_t, mp_limb_t);
+__GMP_INTERN (mp_limb_t, mpn_addlsh1_nc_ip1,
+ (mp_ptr, mp_srcptr, mp_size_t, mp_limb_t));
#endif
#ifndef mpn_addlsh2_n /* if not done with cpuvec in a fat binary */
/* mpn_addlsh2_n(c,a,b,n), when it exists, sets {c,n} to {a,n}+4*{b,n}, and
returns the carry out (0, ..., 4). Use _ip1 when a=c. */
-#define mpn_addlsh2_n __MPN(addlsh2_n)
-__GMP_DECLSPEC mp_limb_t mpn_addlsh2_n (mp_ptr, mp_srcptr, mp_srcptr, mp_size_t);
+__GMP_INTERN (mp_limb_t, mpn_addlsh2_n,
+ (mp_ptr, mp_srcptr, mp_srcptr, mp_size_t));
#endif
-#define mpn_addlsh2_nc __MPN(addlsh2_nc)
-__GMP_DECLSPEC mp_limb_t mpn_addlsh2_nc (mp_ptr, mp_srcptr, mp_srcptr, mp_size_t, mp_limb_t);
+
+__GMP_INTERN (mp_limb_t, mpn_addlsh2_nc,
+ (mp_ptr, mp_srcptr, mp_srcptr, mp_size_t, mp_limb_t));
+
#if HAVE_NATIVE_mpn_addlsh2_n && ! HAVE_NATIVE_mpn_addlsh2_n_ip1
#define mpn_addlsh2_n_ip1(dst,src,n) mpn_addlsh2_n(dst,dst,src,n)
#define HAVE_NATIVE_mpn_addlsh2_n_ip1 1
#else
-#define mpn_addlsh2_n_ip1 __MPN(addlsh2_n_ip1)
-__GMP_DECLSPEC mp_limb_t mpn_addlsh2_n_ip1 (mp_ptr, mp_srcptr, mp_size_t);
+__GMP_INTERN (mp_limb_t, mpn_addlsh2_n_ip1, (mp_ptr, mp_srcptr, mp_size_t));
#endif
+
#if HAVE_NATIVE_mpn_addlsh2_nc && ! HAVE_NATIVE_mpn_addlsh2_nc_ip1
#define mpn_addlsh2_nc_ip1(dst,src,n,c) mpn_addlsh2_nc(dst,dst,src,n,c)
#define HAVE_NATIVE_mpn_addlsh2_nc_ip1 1
#else
-#define mpn_addlsh2_nc_ip1 __MPN(addlsh2_nc_ip1)
-__GMP_DECLSPEC mp_limb_t mpn_addlsh2_nc_ip1 (mp_ptr, mp_srcptr, mp_size_t, mp_limb_t);
+__GMP_INTERN (mp_limb_t, mpn_addlsh2_nc_ip1,
+ (mp_ptr, mp_srcptr, mp_size_t, mp_limb_t));
#endif
-/* mpn_addlsh_n(c,a,b,n,k), when it exists, sets {c,n} to {a,n}+2^k*{b,n}, and
- returns the carry out (0, ..., 2^k). Use _ip1 when a=c. */
-#define mpn_addlsh_n __MPN(addlsh_n)
-__GMP_DECLSPEC mp_limb_t mpn_addlsh_n (mp_ptr, mp_srcptr, mp_srcptr, mp_size_t, unsigned int);
-#define mpn_addlsh_nc __MPN(addlsh_nc)
-__GMP_DECLSPEC mp_limb_t mpn_addlsh_nc (mp_ptr, mp_srcptr, mp_srcptr, mp_size_t, unsigned int, mp_limb_t);
+/* mpn_addlsh_n(c,a,b,n,k), when it exists, sets {c,n} to {a,n}+2^k*{b,n},
+ and returns the carry out (0, ..., 2^k). Use _ip1 when a=c. */
+__GMP_INTERN (mp_limb_t, mpn_addlsh_n,
+ (mp_ptr, mp_srcptr, mp_srcptr, mp_size_t, unsigned int));
+__GMP_INTERN (mp_limb_t, mpn_addlsh_nc,
+ (mp_ptr, mp_srcptr, mp_srcptr, mp_size_t,
+ unsigned int, mp_limb_t));
+
#if HAVE_NATIVE_mpn_addlsh_n && ! HAVE_NATIVE_mpn_addlsh_n_ip1
#define mpn_addlsh_n_ip1(dst,src,n,s) mpn_addlsh_n(dst,dst,src,n,s)
#define HAVE_NATIVE_mpn_addlsh_n_ip1 1
#else
-#define mpn_addlsh_n_ip1 __MPN(addlsh_n_ip1)
- __GMP_DECLSPEC mp_limb_t mpn_addlsh_n_ip1 (mp_ptr, mp_srcptr, mp_size_t, unsigned int);
+__GMP_INTERN (mp_limb_t, mpn_addlsh_n_ip1,
+ (mp_ptr, mp_srcptr, mp_size_t, unsigned int));
#endif
+
#if HAVE_NATIVE_mpn_addlsh_nc && ! HAVE_NATIVE_mpn_addlsh_nc_ip1
#define mpn_addlsh_nc_ip1(dst,src,n,s,c) mpn_addlsh_nc(dst,dst,src,n,s,c)
#define HAVE_NATIVE_mpn_addlsh_nc_ip1 1
#else
-#define mpn_addlsh_nc_ip1 __MPN(addlsh_nc_ip1)
-__GMP_DECLSPEC mp_limb_t mpn_addlsh_nc_ip1 (mp_ptr, mp_srcptr, mp_size_t, unsigned int, mp_limb_t);
+__GMP_INTERN (mp_limb_t, mpn_addlsh_nc_ip1,
+ (mp_ptr, mp_srcptr, mp_size_t, unsigned int, mp_limb_t));
#endif
#ifndef mpn_sublsh1_n /* if not done with cpuvec in a fat binary */
/* mpn_sublsh1_n(c,a,b,n), when it exists, sets {c,n} to {a,n}-2*{b,n}, and
returns the borrow out (0, 1 or 2). Use _ip1 when a=c. */
-#define mpn_sublsh1_n __MPN(sublsh1_n)
-__GMP_DECLSPEC mp_limb_t mpn_sublsh1_n (mp_ptr, mp_srcptr, mp_srcptr, mp_size_t);
+__GMP_INTERN (mp_limb_t, mpn_sublsh1_n,
+ (mp_ptr, mp_srcptr, mp_srcptr, mp_size_t));
#endif
-#define mpn_sublsh1_nc __MPN(sublsh1_nc)
-__GMP_DECLSPEC mp_limb_t mpn_sublsh1_nc (mp_ptr, mp_srcptr, mp_srcptr, mp_size_t, mp_limb_t);
+
+__GMP_INTERN (mp_limb_t, mpn_sublsh1_nc,
+ (mp_ptr, mp_srcptr, mp_srcptr, mp_size_t, mp_limb_t));
+
#if HAVE_NATIVE_mpn_sublsh1_n && ! HAVE_NATIVE_mpn_sublsh1_n_ip1
#define mpn_sublsh1_n_ip1(dst,src,n) mpn_sublsh1_n(dst,dst,src,n)
#define HAVE_NATIVE_mpn_sublsh1_n_ip1 1
#else
-#define mpn_sublsh1_n_ip1 __MPN(sublsh1_n_ip1)
-__GMP_DECLSPEC mp_limb_t mpn_sublsh1_n_ip1 (mp_ptr, mp_srcptr, mp_size_t);
+__GMP_INTERN (mp_limb_t, mpn_sublsh1_n_ip1, (mp_ptr, mp_srcptr, mp_size_t));
#endif
+
#if HAVE_NATIVE_mpn_sublsh1_nc && ! HAVE_NATIVE_mpn_sublsh1_nc_ip1
#define mpn_sublsh1_nc_ip1(dst,src,n,c) mpn_sublsh1_nc(dst,dst,src,n,c)
#define HAVE_NATIVE_mpn_sublsh1_nc_ip1 1
#else
-#define mpn_sublsh1_nc_ip1 __MPN(sublsh1_nc_ip1)
-__GMP_DECLSPEC mp_limb_t mpn_sublsh1_nc_ip1 (mp_ptr, mp_srcptr, mp_size_t, mp_limb_t);
+__GMP_INTERN (mp_limb_t, mpn_sublsh1_nc_ip1,
+ (mp_ptr, mp_srcptr, mp_size_t, mp_limb_t));
#endif
/* mpn_rsblsh1_n(c,a,b,n), when it exists, sets {c,n} to 2*{b,n}-{a,n}, and
returns the carry out (-1, 0, 1). */
-#define mpn_rsblsh1_n __MPN(rsblsh1_n)
-__GMP_DECLSPEC mp_limb_signed_t mpn_rsblsh1_n (mp_ptr, mp_srcptr, mp_srcptr, mp_size_t);
-#define mpn_rsblsh1_nc __MPN(rsblsh1_nc)
-__GMP_DECLSPEC mp_limb_signed_t mpn_rsblsh1_nc (mp_ptr, mp_srcptr, mp_srcptr, mp_size_t, mp_limb_t);
+__GMP_INTERN (mp_limb_signed_t, mpn_rsblsh1_n,
+ (mp_ptr, mp_srcptr, mp_srcptr, mp_size_t));
+__GMP_INTERN (mp_limb_signed_t, mpn_rsblsh1_nc,
+ (mp_ptr, mp_srcptr, mp_srcptr, mp_size_t, mp_limb_t));
/* mpn_sublsh2_n(c,a,b,n), when it exists, sets {c,n} to {a,n}-4*{b,n}, and
returns the borrow out (0, ..., 4). Use _ip1 when a=c. */
-#define mpn_sublsh2_n __MPN(sublsh2_n)
-__GMP_DECLSPEC mp_limb_t mpn_sublsh2_n (mp_ptr, mp_srcptr, mp_srcptr, mp_size_t);
-#define mpn_sublsh2_nc __MPN(sublsh2_nc)
-__GMP_DECLSPEC mp_limb_t mpn_sublsh2_nc (mp_ptr, mp_srcptr, mp_srcptr, mp_size_t, mp_limb_t);
+__GMP_INTERN (mp_limb_t, mpn_sublsh2_n,
+ (mp_ptr, mp_srcptr, mp_srcptr, mp_size_t));
+__GMP_INTERN (mp_limb_t, mpn_sublsh2_nc,
+ (mp_ptr, mp_srcptr, mp_srcptr, mp_size_t, mp_limb_t));
+
#if HAVE_NATIVE_mpn_sublsh2_n && ! HAVE_NATIVE_mpn_sublsh2_n_ip1
#define mpn_sublsh2_n_ip1(dst,src,n) mpn_sublsh2_n(dst,dst,src,n)
#define HAVE_NATIVE_mpn_sublsh2_n_ip1 1
#else
-#define mpn_sublsh2_n_ip1 __MPN(sublsh2_n_ip1)
-__GMP_DECLSPEC mp_limb_t mpn_sublsh2_n_ip1 (mp_ptr, mp_srcptr, mp_size_t);
+__GMP_INTERN (mp_limb_t, mpn_sublsh2_n_ip1, (mp_ptr, mp_srcptr, mp_size_t));
#endif
+
#if HAVE_NATIVE_mpn_sublsh2_nc && ! HAVE_NATIVE_mpn_sublsh2_nc_ip1
#define mpn_sublsh2_nc_ip1(dst,src,n,c) mpn_sublsh2_nc(dst,dst,src,n,c)
#define HAVE_NATIVE_mpn_sublsh2_nc_ip1 1
#else
-#define mpn_sublsh2_nc_ip1 __MPN(sublsh2_nc_ip1)
-__GMP_DECLSPEC mp_limb_t mpn_sublsh2_nc_ip1 (mp_ptr, mp_srcptr, mp_size_t, mp_limb_t);
+__GMP_INTERN (mp_limb_t, mpn_sublsh2_nc_ip1,
+ (mp_ptr, mp_srcptr, mp_size_t, mp_limb_t));
#endif
/* mpn_sublsh_n(c,a,b,n,k), when it exists, sets {c,n} to {a,n}-2^k*{b,n}, and
returns the carry out (0, ..., 2^k). Use _ip1 when a=c. */
-#define mpn_sublsh_n __MPN(sublsh_n)
-__GMP_DECLSPEC mp_limb_t mpn_sublsh_n (mp_ptr, mp_srcptr, mp_srcptr, mp_size_t, unsigned int);
+__GMP_INTERN (mp_limb_t, mpn_sublsh_n,
+ (mp_ptr, mp_srcptr, mp_srcptr, mp_size_t, unsigned int));
+
#if HAVE_NATIVE_mpn_sublsh_n && ! HAVE_NATIVE_mpn_sublsh_n_ip1
#define mpn_sublsh_n_ip1(dst,src,n,s) mpn_sublsh_n(dst,dst,src,n,s)
#define HAVE_NATIVE_mpn_sublsh_n_ip1 1
#else
-#define mpn_sublsh_n_ip1 __MPN(sublsh_n_ip1)
-__GMP_DECLSPEC mp_limb_t mpn_sublsh_n_ip1 (mp_ptr, mp_srcptr, mp_size_t, unsigned int);
+__GMP_INTERN (mp_limb_t, mpn_sublsh_n_ip1,
+ (mp_ptr, mp_srcptr, mp_size_t, unsigned int));
#endif
+
#if HAVE_NATIVE_mpn_sublsh_nc && ! HAVE_NATIVE_mpn_sublsh_nc_ip1
#define mpn_sublsh_nc_ip1(dst,src,n,s,c) mpn_sublsh_nc(dst,dst,src,n,s,c)
#define HAVE_NATIVE_mpn_sublsh_nc_ip1 1
#else
-#define mpn_sublsh_nc_ip1 __MPN(sublsh_nc_ip1)
-__GMP_DECLSPEC mp_limb_t mpn_sublsh_nc_ip1 (mp_ptr, mp_srcptr, mp_size_t, unsigned int, mp_limb_t);
+__GMP_INTERN (mp_limb_t, mpn_sublsh_nc_ip1,
+ (mp_ptr, mp_srcptr, mp_size_t, unsigned int, mp_limb_t));
#endif
/* mpn_rsblsh2_n(c,a,b,n), when it exists, sets {c,n} to 4*{b,n}-{a,n}, and
returns the carry out (-1, ..., 3). */
-#define mpn_rsblsh2_n __MPN(rsblsh2_n)
-__GMP_DECLSPEC mp_limb_signed_t mpn_rsblsh2_n (mp_ptr, mp_srcptr, mp_srcptr, mp_size_t);
-#define mpn_rsblsh2_nc __MPN(rsblsh2_nc)
-__GMP_DECLSPEC mp_limb_signed_t mpn_rsblsh2_nc (mp_ptr, mp_srcptr, mp_srcptr, mp_size_t, mp_limb_t);
+__GMP_INTERN (mp_limb_signed_t, mpn_rsblsh2_n,
+ (mp_ptr, mp_srcptr, mp_srcptr, mp_size_t));
+__GMP_INTERN (mp_limb_signed_t, mpn_rsblsh2_nc,
+ (mp_ptr, mp_srcptr, mp_srcptr, mp_size_t, mp_limb_t));
/* mpn_rsblsh_n(c,a,b,n,k), when it exists, sets {c,n} to 2^k*{b,n}-{a,n}, and
returns the carry out (-1, 0, ..., 2^k-1). */
-#define mpn_rsblsh_n __MPN(rsblsh_n)
-__GMP_DECLSPEC mp_limb_signed_t mpn_rsblsh_n (mp_ptr, mp_srcptr, mp_srcptr, mp_size_t, unsigned int);
-#define mpn_rsblsh_nc __MPN(rsblsh_nc)
-__GMP_DECLSPEC mp_limb_signed_t mpn_rsblsh_nc (mp_ptr, mp_srcptr, mp_srcptr, mp_size_t, unsigned int, mp_limb_t);
+__GMP_INTERN (mp_limb_signed_t, mpn_rsblsh_n,
+ (mp_ptr, mp_srcptr, mp_srcptr, mp_size_t, unsigned int));
+__GMP_INTERN (mp_limb_signed_t, mpn_rsblsh_nc,
+ (mp_ptr, mp_srcptr, mp_srcptr, mp_size_t,
+ unsigned int, mp_limb_t));
/* mpn_rsh1add_n(c,a,b,n), when it exists, sets {c,n} to ({a,n} + {b,n}) >> 1,
and returns the bit rshifted out (0 or 1). */
-#define mpn_rsh1add_n __MPN(rsh1add_n)
-__GMP_DECLSPEC mp_limb_t mpn_rsh1add_n (mp_ptr, mp_srcptr, mp_srcptr, mp_size_t);
-#define mpn_rsh1add_nc __MPN(rsh1add_nc)
-__GMP_DECLSPEC mp_limb_t mpn_rsh1add_nc (mp_ptr, mp_srcptr, mp_srcptr, mp_size_t, mp_limb_t);
+__GMP_INTERN (mp_limb_t, mpn_rsh1add_n,
+ (mp_ptr, mp_srcptr, mp_srcptr, mp_size_t));
+__GMP_INTERN (mp_limb_t, mpn_rsh1add_nc,
+ (mp_ptr, mp_srcptr, mp_srcptr, mp_size_t, mp_limb_t));
/* mpn_rsh1sub_n(c,a,b,n), when it exists, sets {c,n} to ({a,n} - {b,n}) >> 1,
and returns the bit rshifted out (0 or 1). If there's a borrow from the
subtract, it's stored as a 1 in the high bit of c[n-1], like a twos
complement negative. */
-#define mpn_rsh1sub_n __MPN(rsh1sub_n)
-__GMP_DECLSPEC mp_limb_t mpn_rsh1sub_n (mp_ptr, mp_srcptr, mp_srcptr, mp_size_t);
-#define mpn_rsh1sub_nc __MPN(rsh1sub_nc)
-__GMP_DECLSPEC mp_limb_t mpn_rsh1sub_nc (mp_ptr, mp_srcptr, mp_srcptr, mp_size_t, mp_limb_t);
+__GMP_INTERN (mp_limb_t, mpn_rsh1sub_n,
+ (mp_ptr, mp_srcptr, mp_srcptr, mp_size_t));
+__GMP_INTERN (mp_limb_t, mpn_rsh1sub_nc,
+ (mp_ptr, mp_srcptr, mp_srcptr, mp_size_t, mp_limb_t));
#ifndef mpn_lshiftc /* if not done with cpuvec in a fat binary */
-#define mpn_lshiftc __MPN(lshiftc)
-__GMP_DECLSPEC mp_limb_t mpn_lshiftc (mp_ptr, mp_srcptr, mp_size_t, unsigned int);
+__GMP_INTERN (mp_limb_t, mpn_lshiftc,
+ (mp_ptr, mp_srcptr, mp_size_t, unsigned int));
#endif
-#define mpn_add_err1_n __MPN(add_err1_n)
-__GMP_DECLSPEC mp_limb_t mpn_add_err1_n (mp_ptr, mp_srcptr, mp_srcptr, mp_ptr, mp_srcptr, mp_size_t, mp_limb_t);
+__GMP_INTERN (mp_limb_t, mpn_add_err1_n,
+ (mp_ptr, mp_srcptr, mp_srcptr, mp_ptr, mp_srcptr,
+ mp_size_t, mp_limb_t));
-#define mpn_add_err2_n __MPN(add_err2_n)
-__GMP_DECLSPEC mp_limb_t mpn_add_err2_n (mp_ptr, mp_srcptr, mp_srcptr, mp_ptr, mp_srcptr, mp_srcptr, mp_size_t, mp_limb_t);
+__GMP_INTERN (mp_limb_t, mpn_add_err2_n,
+ (mp_ptr, mp_srcptr, mp_srcptr, mp_ptr, mp_srcptr,
+ mp_srcptr, mp_size_t, mp_limb_t));
-#define mpn_add_err3_n __MPN(add_err3_n)
-__GMP_DECLSPEC mp_limb_t mpn_add_err3_n (mp_ptr, mp_srcptr, mp_srcptr, mp_ptr, mp_srcptr, mp_srcptr, mp_srcptr, mp_size_t, mp_limb_t);
+__GMP_INTERN (mp_limb_t, mpn_add_err3_n,
+ (mp_ptr, mp_srcptr, mp_srcptr, mp_ptr, mp_srcptr,
+ mp_srcptr, mp_srcptr, mp_size_t, mp_limb_t));
-#define mpn_sub_err1_n __MPN(sub_err1_n)
-__GMP_DECLSPEC mp_limb_t mpn_sub_err1_n (mp_ptr, mp_srcptr, mp_srcptr, mp_ptr, mp_srcptr, mp_size_t, mp_limb_t);
+__GMP_INTERN (mp_limb_t, mpn_sub_err1_n,
+ (mp_ptr, mp_srcptr, mp_srcptr, mp_ptr, mp_srcptr,
+ mp_size_t, mp_limb_t));
-#define mpn_sub_err2_n __MPN(sub_err2_n)
-__GMP_DECLSPEC mp_limb_t mpn_sub_err2_n (mp_ptr, mp_srcptr, mp_srcptr, mp_ptr, mp_srcptr, mp_srcptr, mp_size_t, mp_limb_t);
+__GMP_INTERN (mp_limb_t, mpn_sub_err2_n,
+ (mp_ptr, mp_srcptr, mp_srcptr, mp_ptr, mp_srcptr,
+ mp_srcptr, mp_size_t, mp_limb_t));
-#define mpn_sub_err3_n __MPN(sub_err3_n)
-__GMP_DECLSPEC mp_limb_t mpn_sub_err3_n (mp_ptr, mp_srcptr, mp_srcptr, mp_ptr, mp_srcptr, mp_srcptr, mp_srcptr, mp_size_t, mp_limb_t);
+__GMP_INTERN (mp_limb_t, mpn_sub_err3_n,
+ (mp_ptr, mp_srcptr, mp_srcptr, mp_ptr, mp_srcptr,
+ mp_srcptr, mp_srcptr, mp_size_t, mp_limb_t));
-#define mpn_add_n_sub_n __MPN(add_n_sub_n)
-__GMP_DECLSPEC mp_limb_t mpn_add_n_sub_n (mp_ptr, mp_ptr, mp_srcptr, mp_srcptr, mp_size_t);
+__GMP_INTERN (mp_limb_t, mpn_add_n_sub_n,
+ (mp_ptr, mp_ptr, mp_srcptr, mp_srcptr, mp_size_t));
-#define mpn_add_n_sub_nc __MPN(add_n_sub_nc)
-__GMP_DECLSPEC mp_limb_t mpn_add_n_sub_nc (mp_ptr, mp_ptr, mp_srcptr, mp_srcptr, mp_size_t, mp_limb_t);
+__GMP_INTERN (mp_limb_t, mpn_add_n_sub_nc,
+ (mp_ptr, mp_ptr, mp_srcptr, mp_srcptr, mp_size_t, mp_limb_t));
-#define mpn_addaddmul_1msb0 __MPN(addaddmul_1msb0)
-__GMP_DECLSPEC mp_limb_t mpn_addaddmul_1msb0 (mp_ptr, mp_srcptr, mp_srcptr, mp_size_t, mp_limb_t, mp_limb_t);
+__GMP_INTERN (mp_limb_t, mpn_addaddmul_1msb0,
+ (mp_ptr, mp_srcptr, mp_srcptr, mp_size_t, mp_limb_t, mp_limb_t));
-#define mpn_divrem_1c __MPN(divrem_1c)
-__GMP_DECLSPEC mp_limb_t mpn_divrem_1c (mp_ptr, mp_size_t, mp_srcptr, mp_size_t, mp_limb_t, mp_limb_t);
+__GMP_INTERN (mp_limb_t, mpn_divrem_1c,
+ (mp_ptr, mp_size_t, mp_srcptr, mp_size_t, mp_limb_t, mp_limb_t));
-#define mpn_dump __MPN(dump)
-__GMP_DECLSPEC void mpn_dump (mp_srcptr, mp_size_t);
+__GMP_INTERN (void, mpn_dump, (mp_srcptr, mp_size_t));
-#define mpn_fib2_ui __MPN(fib2_ui)
-__GMP_DECLSPEC mp_size_t mpn_fib2_ui (mp_ptr, mp_ptr, unsigned long);
+__GMP_INTERN (mp_size_t, mpn_fib2_ui, (mp_ptr, mp_ptr, unsigned long));
/* Remap names of internal mpn functions. */
#define __clz_tab __MPN(clz_tab)
#define mpn_udiv_w_sdiv __MPN(udiv_w_sdiv)
-#define mpn_jacobi_base __MPN(jacobi_base)
-__GMP_DECLSPEC int mpn_jacobi_base (mp_limb_t, mp_limb_t, int) ATTRIBUTE_CONST;
+__GMP_INTERN_A (int, mpn_jacobi_base, (mp_limb_t, mp_limb_t, int),
+ ATTRIBUTE_CONST);
-#define mpn_jacobi_2 __MPN(jacobi_2)
-__GMP_DECLSPEC int mpn_jacobi_2 (mp_srcptr, mp_srcptr, unsigned);
+__GMP_INTERN (int, mpn_jacobi_2, (mp_srcptr, mp_srcptr, unsigned));
-#define mpn_jacobi_n __MPN(jacobi_n)
-__GMP_DECLSPEC int mpn_jacobi_n (mp_ptr, mp_ptr, mp_size_t, unsigned);
+__GMP_INTERN (int, mpn_jacobi_n, (mp_ptr, mp_ptr, mp_size_t, unsigned));
-#define mpn_mod_1c __MPN(mod_1c)
-__GMP_DECLSPEC mp_limb_t mpn_mod_1c (mp_srcptr, mp_size_t, mp_limb_t, mp_limb_t) __GMP_ATTRIBUTE_PURE;
+__GMP_INTERN_A (mp_limb_t, mpn_mod_1c,
+ (mp_srcptr, mp_size_t, mp_limb_t, mp_limb_t),
+ __GMP_ATTRIBUTE_PURE);
-#define mpn_mul_1c __MPN(mul_1c)
-__GMP_DECLSPEC mp_limb_t mpn_mul_1c (mp_ptr, mp_srcptr, mp_size_t, mp_limb_t, mp_limb_t);
+__GMP_INTERN (mp_limb_t, mpn_mul_1c,
+ (mp_ptr, mp_srcptr, mp_size_t, mp_limb_t, mp_limb_t));
-#define mpn_mul_2 __MPN(mul_2)
-__GMP_DECLSPEC mp_limb_t mpn_mul_2 (mp_ptr, mp_srcptr, mp_size_t, mp_srcptr);
+__GMP_INTERN (mp_limb_t, mpn_mul_2, (mp_ptr, mp_srcptr, mp_size_t, mp_srcptr));
-#define mpn_mul_3 __MPN(mul_3)
-__GMP_DECLSPEC mp_limb_t mpn_mul_3 (mp_ptr, mp_srcptr, mp_size_t, mp_srcptr);
+__GMP_INTERN (mp_limb_t, mpn_mul_3, (mp_ptr, mp_srcptr, mp_size_t, mp_srcptr));
-#define mpn_mul_4 __MPN(mul_4)
-__GMP_DECLSPEC mp_limb_t mpn_mul_4 (mp_ptr, mp_srcptr, mp_size_t, mp_srcptr);
+__GMP_INTERN (mp_limb_t, mpn_mul_4, (mp_ptr, mp_srcptr, mp_size_t, mp_srcptr));
-#define mpn_mul_5 __MPN(mul_5)
-__GMP_DECLSPEC mp_limb_t mpn_mul_5 (mp_ptr, mp_srcptr, mp_size_t, mp_srcptr);
+__GMP_INTERN (mp_limb_t, mpn_mul_5, (mp_ptr, mp_srcptr, mp_size_t, mp_srcptr));
-#define mpn_mul_6 __MPN(mul_6)
-__GMP_DECLSPEC mp_limb_t mpn_mul_6 (mp_ptr, mp_srcptr, mp_size_t, mp_srcptr);
+__GMP_INTERN (mp_limb_t, mpn_mul_6, (mp_ptr, mp_srcptr, mp_size_t, mp_srcptr));
#ifndef mpn_mul_basecase /* if not done with cpuvec in a fat binary */
-#define mpn_mul_basecase __MPN(mul_basecase)
-__GMP_DECLSPEC void mpn_mul_basecase (mp_ptr, mp_srcptr, mp_size_t, mp_srcptr, mp_size_t);
+__GMP_INTERN (void, mpn_mul_basecase,
+ (mp_ptr, mp_srcptr, mp_size_t, mp_srcptr, mp_size_t));
#endif
-#define mpn_mullo_n __MPN(mullo_n)
-__GMP_DECLSPEC void mpn_mullo_n (mp_ptr, mp_srcptr, mp_srcptr, mp_size_t);
+__GMP_INTERN (void, mpn_mullo_n, (mp_ptr, mp_srcptr, mp_srcptr, mp_size_t));
#ifndef mpn_mullo_basecase /* if not done with cpuvec in a fat binary */
-#define mpn_mullo_basecase __MPN(mullo_basecase)
-__GMP_DECLSPEC void mpn_mullo_basecase (mp_ptr, mp_srcptr, mp_srcptr, mp_size_t);
+__GMP_INTERN (void, mpn_mullo_basecase,
+ (mp_ptr, mp_srcptr, mp_srcptr, mp_size_t));
#endif
#ifndef mpn_sqr_basecase /* if not done with cpuvec in a fat binary */
-#define mpn_sqr_basecase __MPN(sqr_basecase)
-__GMP_DECLSPEC void mpn_sqr_basecase (mp_ptr, mp_srcptr, mp_size_t);
+__GMP_INTERN (void, mpn_sqr_basecase, (mp_ptr, mp_srcptr, mp_size_t));
#endif
-#define mpn_mulmid_basecase __MPN(mulmid_basecase)
-__GMP_DECLSPEC void mpn_mulmid_basecase (mp_ptr, mp_srcptr, mp_size_t, mp_srcptr, mp_size_t);
+__GMP_INTERN (void, mpn_mulmid_basecase,
+ (mp_ptr, mp_srcptr, mp_size_t, mp_srcptr, mp_size_t));
-#define mpn_mulmid_n __MPN(mulmid_n)
-__GMP_DECLSPEC void mpn_mulmid_n (mp_ptr, mp_srcptr, mp_srcptr, mp_size_t);
+__GMP_INTERN (void, mpn_mulmid_n, (mp_ptr, mp_srcptr, mp_srcptr, mp_size_t));
-#define mpn_mulmid __MPN(mulmid)
-__GMP_DECLSPEC void mpn_mulmid (mp_ptr, mp_srcptr, mp_size_t, mp_srcptr, mp_size_t);
+__GMP_INTERN (void, mpn_mulmid,
+ (mp_ptr, mp_srcptr, mp_size_t, mp_srcptr, mp_size_t));
-#define mpn_submul_1c __MPN(submul_1c)
-__GMP_DECLSPEC mp_limb_t mpn_submul_1c (mp_ptr, mp_srcptr, mp_size_t, mp_limb_t, mp_limb_t);
+__GMP_INTERN (mp_limb_t, mpn_submul_1c,
+ (mp_ptr, mp_srcptr, mp_size_t, mp_limb_t, mp_limb_t));
#ifndef mpn_redc_1 /* if not done with cpuvec in a fat binary */
-#define mpn_redc_1 __MPN(redc_1)
-__GMP_DECLSPEC mp_limb_t mpn_redc_1 (mp_ptr, mp_ptr, mp_srcptr, mp_size_t, mp_limb_t);
+__GMP_INTERN (mp_limb_t, mpn_redc_1,
+ (mp_ptr, mp_ptr, mp_srcptr, mp_size_t, mp_limb_t));
#endif
#ifndef mpn_redc_2 /* if not done with cpuvec in a fat binary */
-#define mpn_redc_2 __MPN(redc_2)
-__GMP_DECLSPEC mp_limb_t mpn_redc_2 (mp_ptr, mp_ptr, mp_srcptr, mp_size_t, mp_srcptr);
+__GMP_INTERN (mp_limb_t, mpn_redc_2,
+ (mp_ptr, mp_ptr, mp_srcptr, mp_size_t, mp_srcptr));
#endif
-#define mpn_redc_n __MPN(redc_n)
-__GMP_DECLSPEC void mpn_redc_n (mp_ptr, mp_ptr, mp_srcptr, mp_size_t, mp_srcptr);
-
+__GMP_INTERN (void, mpn_redc_n,
+ (mp_ptr, mp_ptr, mp_srcptr, mp_size_t, mp_srcptr));
#ifndef mpn_mod_1_1p_cps /* if not done with cpuvec in a fat binary */
-#define mpn_mod_1_1p_cps __MPN(mod_1_1p_cps)
-__GMP_DECLSPEC void mpn_mod_1_1p_cps (mp_limb_t [4], mp_limb_t);
+__GMP_INTERN (void, mpn_mod_1_1p_cps, (mp_limb_t [4], mp_limb_t));
#endif
#ifndef mpn_mod_1_1p /* if not done with cpuvec in a fat binary */
-#define mpn_mod_1_1p __MPN(mod_1_1p)
-__GMP_DECLSPEC mp_limb_t mpn_mod_1_1p (mp_srcptr, mp_size_t, mp_limb_t, mp_limb_t [4]) __GMP_ATTRIBUTE_PURE;
+__GMP_INTERN_A (mp_limb_t, mpn_mod_1_1p,
+ (mp_srcptr, mp_size_t, mp_limb_t, mp_limb_t [4]),
+ __GMP_ATTRIBUTE_PURE);
#endif
#ifndef mpn_mod_1s_2p_cps /* if not done with cpuvec in a fat binary */
-#define mpn_mod_1s_2p_cps __MPN(mod_1s_2p_cps)
-__GMP_DECLSPEC void mpn_mod_1s_2p_cps (mp_limb_t [5], mp_limb_t);
+__GMP_INTERN (void, mpn_mod_1s_2p_cps, (mp_limb_t [5], mp_limb_t));
#endif
#ifndef mpn_mod_1s_2p /* if not done with cpuvec in a fat binary */
-#define mpn_mod_1s_2p __MPN(mod_1s_2p)
-__GMP_DECLSPEC mp_limb_t mpn_mod_1s_2p (mp_srcptr, mp_size_t, mp_limb_t, mp_limb_t [5]) __GMP_ATTRIBUTE_PURE;
+__GMP_INTERN_A (mp_limb_t, mpn_mod_1s_2p,
+ (mp_srcptr, mp_size_t, mp_limb_t, mp_limb_t [5]),
+ __GMP_ATTRIBUTE_PURE);
#endif
#ifndef mpn_mod_1s_3p_cps /* if not done with cpuvec in a fat binary */
-#define mpn_mod_1s_3p_cps __MPN(mod_1s_3p_cps)
-__GMP_DECLSPEC void mpn_mod_1s_3p_cps (mp_limb_t [6], mp_limb_t);
+__GMP_INTERN (void, mpn_mod_1s_3p_cps, (mp_limb_t [6], mp_limb_t));
#endif
#ifndef mpn_mod_1s_3p /* if not done with cpuvec in a fat binary */
-#define mpn_mod_1s_3p __MPN(mod_1s_3p)
-__GMP_DECLSPEC mp_limb_t mpn_mod_1s_3p (mp_srcptr, mp_size_t, mp_limb_t, mp_limb_t [6]) __GMP_ATTRIBUTE_PURE;
+__GMP_INTERN_A (mp_limb_t, mpn_mod_1s_3p,
+ (mp_srcptr, mp_size_t, mp_limb_t, mp_limb_t [6]),
+ __GMP_ATTRIBUTE_PURE);
#endif
#ifndef mpn_mod_1s_4p_cps /* if not done with cpuvec in a fat binary */
-#define mpn_mod_1s_4p_cps __MPN(mod_1s_4p_cps)
-__GMP_DECLSPEC void mpn_mod_1s_4p_cps (mp_limb_t [7], mp_limb_t);
+__GMP_INTERN (void, mpn_mod_1s_4p_cps, (mp_limb_t [7], mp_limb_t));
#endif
#ifndef mpn_mod_1s_4p /* if not done with cpuvec in a fat binary */
-#define mpn_mod_1s_4p __MPN(mod_1s_4p)
-__GMP_DECLSPEC mp_limb_t mpn_mod_1s_4p (mp_srcptr, mp_size_t, mp_limb_t, mp_limb_t [7]) __GMP_ATTRIBUTE_PURE;
+__GMP_INTERN_A (mp_limb_t, mpn_mod_1s_4p,
+ (mp_srcptr, mp_size_t, mp_limb_t, mp_limb_t [7]),
+ __GMP_ATTRIBUTE_PURE);
#endif
-#define mpn_bc_mulmod_bnm1 __MPN(bc_mulmod_bnm1)
-__GMP_DECLSPEC void mpn_bc_mulmod_bnm1 (mp_ptr, mp_srcptr, mp_srcptr, mp_size_t, mp_ptr);
-#define mpn_mulmod_bnm1 __MPN(mulmod_bnm1)
-__GMP_DECLSPEC void mpn_mulmod_bnm1 (mp_ptr, mp_size_t, mp_srcptr, mp_size_t, mp_srcptr, mp_size_t, mp_ptr);
-#define mpn_mulmod_bnm1_next_size __MPN(mulmod_bnm1_next_size)
-__GMP_DECLSPEC mp_size_t mpn_mulmod_bnm1_next_size (mp_size_t) ATTRIBUTE_CONST;
+__GMP_INTERN (void, mpn_bc_mulmod_bnm1,
+ (mp_ptr, mp_srcptr, mp_srcptr, mp_size_t, mp_ptr));
+__GMP_INTERN (void, mpn_mulmod_bnm1,
+ (mp_ptr, mp_size_t, mp_srcptr, mp_size_t, mp_srcptr,
+ mp_size_t, mp_ptr));
+__GMP_INTERN_A (mp_size_t, mpn_mulmod_bnm1_next_size, (mp_size_t),
+ ATTRIBUTE_CONST);
+
static inline mp_size_t
-mpn_mulmod_bnm1_itch (mp_size_t rn, mp_size_t an, mp_size_t bn) {
+mpn_mulmod_bnm1_itch (mp_size_t rn, mp_size_t an, mp_size_t bn)
+{
mp_size_t n, itch;
n = rn >> 1;
- itch = rn + 4 +
- (an > n ? (bn > n ? rn : n) : 0);
+ itch = rn + 4 + (an > n ? (bn > n ? rn : n) : 0);
return itch;
}
-#define mpn_sqrmod_bnm1 __MPN(sqrmod_bnm1)
-__GMP_DECLSPEC void mpn_sqrmod_bnm1 (mp_ptr, mp_size_t, mp_srcptr, mp_size_t, mp_ptr);
-#define mpn_sqrmod_bnm1_next_size __MPN(sqrmod_bnm1_next_size)
-__GMP_DECLSPEC mp_size_t mpn_sqrmod_bnm1_next_size (mp_size_t) ATTRIBUTE_CONST;
+__GMP_INTERN (void, mpn_sqrmod_bnm1,
+ (mp_ptr, mp_size_t, mp_srcptr, mp_size_t, mp_ptr));
+__GMP_INTERN_A (mp_size_t, mpn_sqrmod_bnm1_next_size, (mp_size_t),
+ ATTRIBUTE_CONST);
+
static inline mp_size_t
-mpn_sqrmod_bnm1_itch (mp_size_t rn, mp_size_t an) {
+mpn_sqrmod_bnm1_itch (mp_size_t rn, mp_size_t an)
+{
mp_size_t n, itch;
n = rn >> 1;
- itch = rn + 3 +
- (an > n ? an : 0);
+ itch = rn + 3 + (an > n ? an : 0);
return itch;
}
@@ -1216,7 +1274,10 @@ typedef struct {
(__rstate, rp, bits); \
} while (0)
-__GMP_DECLSPEC void __gmp_randinit_mt_noseed (gmp_randstate_t);
+#undef __GMP_ABI_PREFIX
+#define __GMP_ABI_PREFIX
+
+__GMP_INTERN (void, __gmp_randinit_mt_noseed, (gmp_randstate_t));
/* __gmp_rands is the global state for the old-style random functions, and
@@ -1231,8 +1292,9 @@ __GMP_DECLSPEC void __gmp_randinit_mt_noseed (gmp_randstate_t);
functions are recommended to applications which care about randomness, so
the old functions aren't too important. */
-__GMP_DECLSPEC extern char __gmp_rands_initialized;
-__GMP_DECLSPEC extern gmp_randstate_t __gmp_rands;
+/* ??? Test programs reference these directly, and break if hidden. */
+__GMP_DECLSPEC extern char __gmp_rands_initialized;
+__GMP_DECLSPEC extern gmp_randstate_t __gmp_rands;
#define RANDS \
((__gmp_rands_initialized ? 0 \
@@ -1294,281 +1356,284 @@ __GMP_DECLSPEC extern gmp_randstate_t __gmp_rands;
#define MPN_TOOM42_MULMID_MINSIZE 4
-#define mpn_sqr_diagonal __MPN(sqr_diagonal)
-__GMP_DECLSPEC void mpn_sqr_diagonal (mp_ptr, mp_srcptr, mp_size_t);
+#undef __GMP_ABI_PREFIX
+#define __GMP_ABI_PREFIX __g
+
+__GMP_INTERN (void, mpn_sqr_diagonal, (mp_ptr, mp_srcptr, mp_size_t));
-#define mpn_sqr_diag_addlsh1 __MPN(sqr_diag_addlsh1)
-__GMP_DECLSPEC void mpn_sqr_diag_addlsh1 (mp_ptr, mp_srcptr, mp_srcptr, mp_size_t);
+__GMP_INTERN (void, mpn_sqr_diag_addlsh1,
+ (mp_ptr, mp_srcptr, mp_srcptr, mp_size_t));
-#define mpn_toom_interpolate_5pts __MPN(toom_interpolate_5pts)
-__GMP_DECLSPEC void mpn_toom_interpolate_5pts (mp_ptr, mp_ptr, mp_ptr, mp_size_t, mp_size_t, int, mp_limb_t);
+__GMP_INTERN (void, mpn_toom_interpolate_5pts,
+ (mp_ptr, mp_ptr, mp_ptr, mp_size_t, mp_size_t, int, mp_limb_t));
enum toom6_flags {toom6_all_pos = 0, toom6_vm1_neg = 1, toom6_vm2_neg = 2};
-#define mpn_toom_interpolate_6pts __MPN(toom_interpolate_6pts)
-__GMP_DECLSPEC void mpn_toom_interpolate_6pts (mp_ptr, mp_size_t, enum toom6_flags, mp_ptr, mp_ptr, mp_ptr, mp_size_t);
+__GMP_INTERN (void, mpn_toom_interpolate_6pts,
+ (mp_ptr, mp_size_t, enum toom6_flags, mp_ptr, mp_ptr,
+ mp_ptr, mp_size_t));
enum toom7_flags { toom7_w1_neg = 1, toom7_w3_neg = 2 };
-#define mpn_toom_interpolate_7pts __MPN(toom_interpolate_7pts)
-__GMP_DECLSPEC void mpn_toom_interpolate_7pts (mp_ptr, mp_size_t, enum toom7_flags, mp_ptr, mp_ptr, mp_ptr, mp_ptr, mp_size_t, mp_ptr);
+__GMP_INTERN (void, mpn_toom_interpolate_7pts,
+ (mp_ptr, mp_size_t, enum toom7_flags, mp_ptr, mp_ptr,
+ mp_ptr, mp_ptr, mp_size_t, mp_ptr));
-#define mpn_toom_interpolate_8pts __MPN(toom_interpolate_8pts)
-__GMP_DECLSPEC void mpn_toom_interpolate_8pts (mp_ptr, mp_size_t, mp_ptr, mp_ptr, mp_size_t, mp_ptr);
+__GMP_INTERN (void, mpn_toom_interpolate_8pts,
+ (mp_ptr, mp_size_t, mp_ptr, mp_ptr, mp_size_t, mp_ptr));
-#define mpn_toom_interpolate_12pts __MPN(toom_interpolate_12pts)
-__GMP_DECLSPEC void mpn_toom_interpolate_12pts (mp_ptr, mp_ptr, mp_ptr, mp_ptr, mp_size_t, mp_size_t, int, mp_ptr);
+__GMP_INTERN (void, mpn_toom_interpolate_12pts,
+ (mp_ptr, mp_ptr, mp_ptr, mp_ptr, mp_size_t, mp_size_t,
+ int, mp_ptr));
-#define mpn_toom_interpolate_16pts __MPN(toom_interpolate_16pts)
-__GMP_DECLSPEC void mpn_toom_interpolate_16pts (mp_ptr, mp_ptr, mp_ptr, mp_ptr, mp_ptr, mp_size_t, mp_size_t, int, mp_ptr);
+__GMP_INTERN (void, mpn_toom_interpolate_16pts,
+ (mp_ptr, mp_ptr, mp_ptr, mp_ptr, mp_ptr, mp_size_t,
+ mp_size_t, int, mp_ptr));
-#define mpn_toom_couple_handling __MPN(toom_couple_handling)
-__GMP_DECLSPEC void mpn_toom_couple_handling (mp_ptr, mp_size_t, mp_ptr, int, mp_size_t, int, int);
+__GMP_INTERN (void, mpn_toom_couple_handling,
+ (mp_ptr, mp_size_t, mp_ptr, int, mp_size_t, int, int));
-#define mpn_toom_eval_dgr3_pm1 __MPN(toom_eval_dgr3_pm1)
-__GMP_DECLSPEC int mpn_toom_eval_dgr3_pm1 (mp_ptr, mp_ptr, mp_srcptr, mp_size_t, mp_size_t, mp_ptr);
+__GMP_INTERN (int, mpn_toom_eval_dgr3_pm1,
+ (mp_ptr, mp_ptr, mp_srcptr, mp_size_t, mp_size_t, mp_ptr));
-#define mpn_toom_eval_dgr3_pm2 __MPN(toom_eval_dgr3_pm2)
-__GMP_DECLSPEC int mpn_toom_eval_dgr3_pm2 (mp_ptr, mp_ptr, mp_srcptr, mp_size_t, mp_size_t, mp_ptr);
+__GMP_INTERN (int, mpn_toom_eval_dgr3_pm2,
+ (mp_ptr, mp_ptr, mp_srcptr, mp_size_t, mp_size_t, mp_ptr));
-#define mpn_toom_eval_pm1 __MPN(toom_eval_pm1)
-__GMP_DECLSPEC int mpn_toom_eval_pm1 (mp_ptr, mp_ptr, unsigned, mp_srcptr, mp_size_t, mp_size_t, mp_ptr);
+__GMP_INTERN (int, mpn_toom_eval_pm1,
+ (mp_ptr, mp_ptr, unsigned, mp_srcptr, mp_size_t,
+ mp_size_t, mp_ptr));
-#define mpn_toom_eval_pm2 __MPN(toom_eval_pm2)
-__GMP_DECLSPEC int mpn_toom_eval_pm2 (mp_ptr, mp_ptr, unsigned, mp_srcptr, mp_size_t, mp_size_t, mp_ptr);
+__GMP_INTERN (int, mpn_toom_eval_pm2,
+ (mp_ptr, mp_ptr, unsigned, mp_srcptr, mp_size_t,
+ mp_size_t, mp_ptr));
-#define mpn_toom_eval_pm2exp __MPN(toom_eval_pm2exp)
-__GMP_DECLSPEC int mpn_toom_eval_pm2exp (mp_ptr, mp_ptr, unsigned, mp_srcptr, mp_size_t, mp_size_t, unsigned, mp_ptr);
+__GMP_INTERN (int, mpn_toom_eval_pm2exp,
+ (mp_ptr, mp_ptr, unsigned, mp_srcptr, mp_size_t,
+ mp_size_t, unsigned, mp_ptr));
-#define mpn_toom_eval_pm2rexp __MPN(toom_eval_pm2rexp)
-__GMP_DECLSPEC int mpn_toom_eval_pm2rexp (mp_ptr, mp_ptr, unsigned, mp_srcptr, mp_size_t, mp_size_t, unsigned, mp_ptr);
+__GMP_INTERN (int, mpn_toom_eval_pm2rexp,
+ (mp_ptr, mp_ptr, unsigned, mp_srcptr, mp_size_t,
+ mp_size_t, unsigned, mp_ptr));
-#define mpn_toom22_mul __MPN(toom22_mul)
-__GMP_DECLSPEC void mpn_toom22_mul (mp_ptr, mp_srcptr, mp_size_t, mp_srcptr, mp_size_t, mp_ptr);
+__GMP_INTERN (void, mpn_toom22_mul,
+ (mp_ptr, mp_srcptr, mp_size_t, mp_srcptr, mp_size_t, mp_ptr));
-#define mpn_toom32_mul __MPN(toom32_mul)
-__GMP_DECLSPEC void mpn_toom32_mul (mp_ptr, mp_srcptr, mp_size_t, mp_srcptr, mp_size_t, mp_ptr);
+__GMP_INTERN (void, mpn_toom32_mul,
+ (mp_ptr, mp_srcptr, mp_size_t, mp_srcptr, mp_size_t, mp_ptr));
-#define mpn_toom42_mul __MPN(toom42_mul)
-__GMP_DECLSPEC void mpn_toom42_mul (mp_ptr, mp_srcptr, mp_size_t, mp_srcptr, mp_size_t, mp_ptr);
+__GMP_INTERN (void, mpn_toom42_mul,
+ (mp_ptr, mp_srcptr, mp_size_t, mp_srcptr, mp_size_t, mp_ptr));
-#define mpn_toom52_mul __MPN(toom52_mul)
-__GMP_DECLSPEC void mpn_toom52_mul (mp_ptr, mp_srcptr, mp_size_t, mp_srcptr, mp_size_t, mp_ptr);
+__GMP_INTERN (void, mpn_toom52_mul,
+ (mp_ptr, mp_srcptr, mp_size_t, mp_srcptr, mp_size_t, mp_ptr));
-#define mpn_toom62_mul __MPN(toom62_mul)
-__GMP_DECLSPEC void mpn_toom62_mul (mp_ptr, mp_srcptr, mp_size_t, mp_srcptr, mp_size_t, mp_ptr);
+__GMP_INTERN (void, mpn_toom62_mul,
+ (mp_ptr, mp_srcptr, mp_size_t, mp_srcptr, mp_size_t, mp_ptr));
-#define mpn_toom2_sqr __MPN(toom2_sqr)
-__GMP_DECLSPEC void mpn_toom2_sqr (mp_ptr, mp_srcptr, mp_size_t, mp_ptr);
+__GMP_INTERN (void, mpn_toom2_sqr, (mp_ptr, mp_srcptr, mp_size_t, mp_ptr));
-#define mpn_toom33_mul __MPN(toom33_mul)
-__GMP_DECLSPEC void mpn_toom33_mul (mp_ptr, mp_srcptr, mp_size_t, mp_srcptr, mp_size_t, mp_ptr);
+__GMP_INTERN (void, mpn_toom33_mul,
+ (mp_ptr, mp_srcptr, mp_size_t, mp_srcptr, mp_size_t, mp_ptr));
-#define mpn_toom43_mul __MPN(toom43_mul)
-__GMP_DECLSPEC void mpn_toom43_mul (mp_ptr, mp_srcptr, mp_size_t, mp_srcptr, mp_size_t, mp_ptr);
+__GMP_INTERN (void, mpn_toom43_mul,
+ (mp_ptr, mp_srcptr, mp_size_t, mp_srcptr, mp_size_t, mp_ptr));
-#define mpn_toom53_mul __MPN(toom53_mul)
-__GMP_DECLSPEC void mpn_toom53_mul (mp_ptr, mp_srcptr, mp_size_t, mp_srcptr, mp_size_t, mp_ptr);
+__GMP_INTERN (void, mpn_toom53_mul,
+ (mp_ptr, mp_srcptr, mp_size_t, mp_srcptr, mp_size_t, mp_ptr));
-#define mpn_toom54_mul __MPN(toom54_mul)
-__GMP_DECLSPEC void mpn_toom54_mul (mp_ptr, mp_srcptr, mp_size_t, mp_srcptr, mp_size_t, mp_ptr);
+__GMP_INTERN (void, mpn_toom54_mul,
+ (mp_ptr, mp_srcptr, mp_size_t, mp_srcptr, mp_size_t, mp_ptr));
-#define mpn_toom63_mul __MPN(toom63_mul)
-__GMP_DECLSPEC void mpn_toom63_mul (mp_ptr, mp_srcptr, mp_size_t, mp_srcptr, mp_size_t, mp_ptr);
+__GMP_INTERN (void, mpn_toom63_mul,
+ (mp_ptr, mp_srcptr, mp_size_t, mp_srcptr, mp_size_t, mp_ptr));
-#define mpn_toom3_sqr __MPN(toom3_sqr)
-__GMP_DECLSPEC void mpn_toom3_sqr (mp_ptr, mp_srcptr, mp_size_t, mp_ptr);
+__GMP_INTERN (void, mpn_toom3_sqr, (mp_ptr, mp_srcptr, mp_size_t, mp_ptr));
-#define mpn_toom44_mul __MPN(toom44_mul)
-__GMP_DECLSPEC void mpn_toom44_mul (mp_ptr, mp_srcptr, mp_size_t, mp_srcptr, mp_size_t, mp_ptr);
+__GMP_INTERN (void, mpn_toom44_mul,
+ (mp_ptr, mp_srcptr, mp_size_t, mp_srcptr, mp_size_t, mp_ptr));
-#define mpn_toom4_sqr __MPN(toom4_sqr)
-__GMP_DECLSPEC void mpn_toom4_sqr (mp_ptr, mp_srcptr, mp_size_t, mp_ptr);
+__GMP_INTERN (void, mpn_toom4_sqr, (mp_ptr, mp_srcptr, mp_size_t, mp_ptr));
-#define mpn_toom6h_mul __MPN(toom6h_mul)
-__GMP_DECLSPEC void mpn_toom6h_mul (mp_ptr, mp_srcptr, mp_size_t, mp_srcptr, mp_size_t, mp_ptr);
+__GMP_INTERN (void, mpn_toom6h_mul,
+ (mp_ptr, mp_srcptr, mp_size_t, mp_srcptr, mp_size_t, mp_ptr));
-#define mpn_toom6_sqr __MPN(toom6_sqr)
-__GMP_DECLSPEC void mpn_toom6_sqr (mp_ptr, mp_srcptr, mp_size_t, mp_ptr);
+__GMP_INTERN (void, mpn_toom6_sqr, (mp_ptr, mp_srcptr, mp_size_t, mp_ptr));
-#define mpn_toom8h_mul __MPN(toom8h_mul)
-__GMP_DECLSPEC void mpn_toom8h_mul (mp_ptr, mp_srcptr, mp_size_t, mp_srcptr, mp_size_t, mp_ptr);
+__GMP_INTERN (void, mpn_toom8h_mul,
+ (mp_ptr, mp_srcptr, mp_size_t, mp_srcptr, mp_size_t, mp_ptr));
-#define mpn_toom8_sqr __MPN(toom8_sqr)
-__GMP_DECLSPEC void mpn_toom8_sqr (mp_ptr, mp_srcptr, mp_size_t, mp_ptr);
+__GMP_INTERN (void, mpn_toom8_sqr, (mp_ptr, mp_srcptr, mp_size_t, mp_ptr));
-#define mpn_toom42_mulmid __MPN(toom42_mulmid)
-__GMP_DECLSPEC void mpn_toom42_mulmid (mp_ptr, mp_srcptr, mp_srcptr, mp_size_t, mp_ptr);
+__GMP_INTERN (void, mpn_toom42_mulmid,
+ (mp_ptr, mp_srcptr, mp_srcptr, mp_size_t, mp_ptr));
-#define mpn_fft_best_k __MPN(fft_best_k)
-__GMP_DECLSPEC int mpn_fft_best_k (mp_size_t, int) ATTRIBUTE_CONST;
+__GMP_INTERN_A (int, mpn_fft_best_k, (mp_size_t, int), ATTRIBUTE_CONST);
-#define mpn_mul_fft __MPN(mul_fft)
-__GMP_DECLSPEC mp_limb_t mpn_mul_fft (mp_ptr, mp_size_t, mp_srcptr, mp_size_t, mp_srcptr, mp_size_t, int);
+__GMP_INTERN (mp_limb_t, mpn_mul_fft,
+ (mp_ptr, mp_size_t, mp_srcptr, mp_size_t, mp_srcptr,
+ mp_size_t, int));
-#define mpn_mul_fft_full __MPN(mul_fft_full)
-__GMP_DECLSPEC void mpn_mul_fft_full (mp_ptr, mp_srcptr, mp_size_t, mp_srcptr, mp_size_t);
+__GMP_INTERN (void, mpn_mul_fft_full,
+ (mp_ptr, mp_srcptr, mp_size_t, mp_srcptr, mp_size_t));
-#define mpn_nussbaumer_mul __MPN(nussbaumer_mul)
-__GMP_DECLSPEC void mpn_nussbaumer_mul (mp_ptr, mp_srcptr, mp_size_t, mp_srcptr, mp_size_t);
+__GMP_INTERN (void, mpn_nussbaumer_mul,
+ (mp_ptr, mp_srcptr, mp_size_t, mp_srcptr, mp_size_t));
-#define mpn_fft_next_size __MPN(fft_next_size)
-__GMP_DECLSPEC mp_size_t mpn_fft_next_size (mp_size_t, int) ATTRIBUTE_CONST;
+__GMP_INTERN_A (mp_size_t, mpn_fft_next_size, (mp_size_t, int),
+ ATTRIBUTE_CONST);
-#define mpn_div_qr_2n_pi1 __MPN(div_qr_2n_pi1)
- __GMP_DECLSPEC mp_limb_t mpn_div_qr_2n_pi1 (mp_ptr, mp_ptr, mp_srcptr, mp_size_t, mp_limb_t, mp_limb_t, mp_limb_t);
+__GMP_INTERN (mp_limb_t, mpn_div_qr_2n_pi1,
+ (mp_ptr, mp_ptr, mp_srcptr, mp_size_t, mp_limb_t,
+ mp_limb_t, mp_limb_t));
-#define mpn_div_qr_2u_pi1 __MPN(div_qr_2u_pi1)
- __GMP_DECLSPEC mp_limb_t mpn_div_qr_2u_pi1 (mp_ptr, mp_ptr, mp_srcptr, mp_size_t, mp_limb_t, mp_limb_t, int, mp_limb_t);
+__GMP_INTERN (mp_limb_t, mpn_div_qr_2u_pi1,
+ (mp_ptr, mp_ptr, mp_srcptr, mp_size_t, mp_limb_t,
+ mp_limb_t, int, mp_limb_t));
-#define mpn_sbpi1_div_qr __MPN(sbpi1_div_qr)
-__GMP_DECLSPEC mp_limb_t mpn_sbpi1_div_qr (mp_ptr, mp_ptr, mp_size_t, mp_srcptr, mp_size_t, mp_limb_t);
+__GMP_INTERN (mp_limb_t, mpn_sbpi1_div_qr,
+ (mp_ptr, mp_ptr, mp_size_t, mp_srcptr, mp_size_t, mp_limb_t));
-#define mpn_sbpi1_div_q __MPN(sbpi1_div_q)
-__GMP_DECLSPEC mp_limb_t mpn_sbpi1_div_q (mp_ptr, mp_ptr, mp_size_t, mp_srcptr, mp_size_t, mp_limb_t);
+__GMP_INTERN (mp_limb_t, mpn_sbpi1_div_q,
+ (mp_ptr, mp_ptr, mp_size_t, mp_srcptr, mp_size_t, mp_limb_t));
-#define mpn_sbpi1_divappr_q __MPN(sbpi1_divappr_q)
-__GMP_DECLSPEC mp_limb_t mpn_sbpi1_divappr_q (mp_ptr, mp_ptr, mp_size_t, mp_srcptr, mp_size_t, mp_limb_t);
+__GMP_INTERN (mp_limb_t, mpn_sbpi1_divappr_q,
+ (mp_ptr, mp_ptr, mp_size_t, mp_srcptr, mp_size_t, mp_limb_t));
-#define mpn_dcpi1_div_qr __MPN(dcpi1_div_qr)
-__GMP_DECLSPEC mp_limb_t mpn_dcpi1_div_qr (mp_ptr, mp_ptr, mp_size_t, mp_srcptr, mp_size_t, gmp_pi1_t *);
-#define mpn_dcpi1_div_qr_n __MPN(dcpi1_div_qr_n)
-__GMP_DECLSPEC mp_limb_t mpn_dcpi1_div_qr_n (mp_ptr, mp_ptr, mp_srcptr, mp_size_t, gmp_pi1_t *, mp_ptr);
+__GMP_INTERN (mp_limb_t, mpn_dcpi1_div_qr,
+ (mp_ptr, mp_ptr, mp_size_t, mp_srcptr, mp_size_t, gmp_pi1_t *));
-#define mpn_dcpi1_div_q __MPN(dcpi1_div_q)
-__GMP_DECLSPEC mp_limb_t mpn_dcpi1_div_q (mp_ptr, mp_ptr, mp_size_t, mp_srcptr, mp_size_t, gmp_pi1_t *);
+__GMP_INTERN (mp_limb_t, mpn_dcpi1_div_qr_n,
+ (mp_ptr, mp_ptr, mp_srcptr, mp_size_t, gmp_pi1_t *, mp_ptr));
-#define mpn_dcpi1_divappr_q __MPN(dcpi1_divappr_q)
-__GMP_DECLSPEC mp_limb_t mpn_dcpi1_divappr_q (mp_ptr, mp_ptr, mp_size_t, mp_srcptr, mp_size_t, gmp_pi1_t *);
-#define mpn_dcpi1_divappr_q_n __MPN(dcpi1_divappr_q_n)
-__GMP_DECLSPEC mp_limb_t mpn_dcpi1_divappr_q_n (mp_ptr, mp_ptr, mp_srcptr, mp_size_t, gmp_pi1_t *, mp_ptr);
+__GMP_INTERN (mp_limb_t, mpn_dcpi1_div_q,
+ (mp_ptr, mp_ptr, mp_size_t, mp_srcptr, mp_size_t, gmp_pi1_t *));
-#define mpn_mu_div_qr __MPN(mu_div_qr)
-__GMP_DECLSPEC mp_limb_t mpn_mu_div_qr (mp_ptr, mp_ptr, mp_srcptr, mp_size_t, mp_srcptr, mp_size_t, mp_ptr);
-#define mpn_mu_div_qr_itch __MPN(mu_div_qr_itch)
-__GMP_DECLSPEC mp_size_t mpn_mu_div_qr_itch (mp_size_t, mp_size_t, int);
-#define mpn_mu_div_qr_choose_in __MPN(mu_div_qr_choose_in)
-__GMP_DECLSPEC mp_size_t mpn_mu_div_qr_choose_in (mp_size_t, mp_size_t, int);
+__GMP_INTERN (mp_limb_t, mpn_dcpi1_divappr_q,
+ (mp_ptr, mp_ptr, mp_size_t, mp_srcptr, mp_size_t, gmp_pi1_t *));
+__GMP_INTERN (mp_limb_t, mpn_dcpi1_divappr_q_n,
+ (mp_ptr, mp_ptr, mp_srcptr, mp_size_t, gmp_pi1_t *, mp_ptr));
-#define mpn_preinv_mu_div_qr __MPN(preinv_mu_div_qr)
-__GMP_DECLSPEC mp_limb_t mpn_preinv_mu_div_qr (mp_ptr, mp_ptr, mp_srcptr, mp_size_t, mp_srcptr, mp_size_t, mp_srcptr, mp_size_t, mp_ptr);
-#define mpn_preinv_mu_div_qr_itch __MPN(preinv_mu_div_qr_itch)
-__GMP_DECLSPEC mp_size_t mpn_preinv_mu_div_qr_itch (mp_size_t, mp_size_t, mp_size_t);
+__GMP_INTERN (mp_limb_t, mpn_mu_div_qr,
+ (mp_ptr, mp_ptr, mp_srcptr, mp_size_t, mp_srcptr,
+ mp_size_t, mp_ptr));
+__GMP_INTERN (mp_size_t, mpn_mu_div_qr_itch, (mp_size_t, mp_size_t, int));
+__GMP_INTERN (mp_size_t, mpn_mu_div_qr_choose_in, (mp_size_t, mp_size_t, int));
-#define mpn_mu_divappr_q __MPN(mu_divappr_q)
-__GMP_DECLSPEC mp_limb_t mpn_mu_divappr_q (mp_ptr, mp_srcptr, mp_size_t, mp_srcptr, mp_size_t, mp_ptr);
-#define mpn_mu_divappr_q_itch __MPN(mu_divappr_q_itch)
-__GMP_DECLSPEC mp_size_t mpn_mu_divappr_q_itch (mp_size_t, mp_size_t, int);
-#define mpn_mu_divappr_q_choose_in __MPN(mu_divappr_q_choose_in)
-__GMP_DECLSPEC mp_size_t mpn_mu_divappr_q_choose_in (mp_size_t, mp_size_t, int);
+__GMP_INTERN (mp_limb_t, mpn_preinv_mu_div_qr,
+ (mp_ptr, mp_ptr, mp_srcptr, mp_size_t, mp_srcptr,
+ mp_size_t, mp_srcptr, mp_size_t, mp_ptr));
+__GMP_INTERN (mp_size_t, mpn_preinv_mu_div_qr_itch,
+ (mp_size_t, mp_size_t, mp_size_t));
-#define mpn_preinv_mu_divappr_q __MPN(preinv_mu_divappr_q)
-__GMP_DECLSPEC mp_limb_t mpn_preinv_mu_divappr_q (mp_ptr, mp_srcptr, mp_size_t, mp_srcptr, mp_size_t, mp_srcptr, mp_size_t, mp_ptr);
+__GMP_INTERN (mp_limb_t, mpn_mu_divappr_q,
+ (mp_ptr, mp_srcptr, mp_size_t, mp_srcptr, mp_size_t, mp_ptr));
+__GMP_INTERN (mp_size_t, mpn_mu_divappr_q_itch, (mp_size_t, mp_size_t, int));
+__GMP_INTERN (mp_size_t, mpn_mu_divappr_q_choose_in,
+ (mp_size_t, mp_size_t, int));
-#define mpn_mu_div_q __MPN(mu_div_q)
-__GMP_DECLSPEC mp_limb_t mpn_mu_div_q (mp_ptr, mp_srcptr, mp_size_t, mp_srcptr, mp_size_t, mp_ptr);
-#define mpn_mu_div_q_itch __MPN(mu_div_q_itch)
-__GMP_DECLSPEC mp_size_t mpn_mu_div_q_itch (mp_size_t, mp_size_t, int);
+__GMP_INTERN (mp_limb_t, mpn_preinv_mu_divappr_q,
+ (mp_ptr, mp_srcptr, mp_size_t, mp_srcptr, mp_size_t,
+ mp_srcptr, mp_size_t, mp_ptr));
-#define mpn_div_q __MPN(div_q)
-__GMP_DECLSPEC void mpn_div_q (mp_ptr, mp_srcptr, mp_size_t, mp_srcptr, mp_size_t, mp_ptr);
+__GMP_INTERN (mp_limb_t, mpn_mu_div_q,
+ (mp_ptr, mp_srcptr, mp_size_t, mp_srcptr, mp_size_t, mp_ptr));
+__GMP_INTERN (mp_size_t, mpn_mu_div_q_itch, (mp_size_t, mp_size_t, int));
-#define mpn_invert __MPN(invert)
-__GMP_DECLSPEC void mpn_invert (mp_ptr, mp_srcptr, mp_size_t, mp_ptr);
+__GMP_INTERN (void, mpn_div_q,
+ (mp_ptr, mp_srcptr, mp_size_t, mp_srcptr, mp_size_t, mp_ptr));
+
+__GMP_INTERN (void, mpn_invert, (mp_ptr, mp_srcptr, mp_size_t, mp_ptr));
#define mpn_invert_itch(n) mpn_invertappr_itch(n)
-#define mpn_ni_invertappr __MPN(ni_invertappr)
-__GMP_DECLSPEC mp_limb_t mpn_ni_invertappr (mp_ptr, mp_srcptr, mp_size_t, mp_ptr);
-#define mpn_invertappr __MPN(invertappr)
-__GMP_DECLSPEC mp_limb_t mpn_invertappr (mp_ptr, mp_srcptr, mp_size_t, mp_ptr);
+__GMP_INTERN (mp_limb_t, mpn_ni_invertappr,
+ (mp_ptr, mp_srcptr, mp_size_t, mp_ptr));
+__GMP_INTERN (mp_limb_t, mpn_invertappr,
+ (mp_ptr, mp_srcptr, mp_size_t, mp_ptr));
#define mpn_invertappr_itch(n) (3 * (n) + 2)
-#define mpn_binvert __MPN(binvert)
-__GMP_DECLSPEC void mpn_binvert (mp_ptr, mp_srcptr, mp_size_t, mp_ptr);
-#define mpn_binvert_itch __MPN(binvert_itch)
-__GMP_DECLSPEC mp_size_t mpn_binvert_itch (mp_size_t);
+__GMP_INTERN (void, mpn_binvert, (mp_ptr, mp_srcptr, mp_size_t, mp_ptr));
+__GMP_INTERN (mp_size_t, mpn_binvert_itch, (mp_size_t));
-#define mpn_bdiv_q_1 __MPN(bdiv_q_1)
-__GMP_DECLSPEC mp_limb_t mpn_bdiv_q_1 (mp_ptr, mp_srcptr, mp_size_t, mp_limb_t);
+__GMP_INTERN (mp_limb_t, mpn_bdiv_q_1,
+ (mp_ptr, mp_srcptr, mp_size_t, mp_limb_t));
-#define mpn_pi1_bdiv_q_1 __MPN(pi1_bdiv_q_1)
-__GMP_DECLSPEC mp_limb_t mpn_pi1_bdiv_q_1 (mp_ptr, mp_srcptr, mp_size_t, mp_limb_t, mp_limb_t, int);
+__GMP_INTERN (mp_limb_t, mpn_pi1_bdiv_q_1,
+ (mp_ptr, mp_srcptr, mp_size_t, mp_limb_t, mp_limb_t, int));
-#define mpn_sbpi1_bdiv_qr __MPN(sbpi1_bdiv_qr)
-__GMP_DECLSPEC mp_limb_t mpn_sbpi1_bdiv_qr (mp_ptr, mp_ptr, mp_size_t, mp_srcptr, mp_size_t, mp_limb_t);
+__GMP_INTERN (mp_limb_t, mpn_sbpi1_bdiv_qr,
+ (mp_ptr, mp_ptr, mp_size_t, mp_srcptr, mp_size_t, mp_limb_t));
-#define mpn_sbpi1_bdiv_q __MPN(sbpi1_bdiv_q)
-__GMP_DECLSPEC void mpn_sbpi1_bdiv_q (mp_ptr, mp_ptr, mp_size_t, mp_srcptr, mp_size_t, mp_limb_t);
+__GMP_INTERN (void, mpn_sbpi1_bdiv_q,
+ (mp_ptr, mp_ptr, mp_size_t, mp_srcptr, mp_size_t, mp_limb_t));
-#define mpn_dcpi1_bdiv_qr __MPN(dcpi1_bdiv_qr)
-__GMP_DECLSPEC mp_limb_t mpn_dcpi1_bdiv_qr (mp_ptr, mp_ptr, mp_size_t, mp_srcptr, mp_size_t, mp_limb_t);
-#define mpn_dcpi1_bdiv_qr_n_itch __MPN(dcpi1_bdiv_qr_n_itch)
-__GMP_DECLSPEC mp_size_t mpn_dcpi1_bdiv_qr_n_itch (mp_size_t);
+__GMP_INTERN (mp_limb_t, mpn_dcpi1_bdiv_qr,
+ (mp_ptr, mp_ptr, mp_size_t, mp_srcptr, mp_size_t, mp_limb_t));
+__GMP_INTERN (mp_size_t, mpn_dcpi1_bdiv_qr_n_itch, (mp_size_t));
-#define mpn_dcpi1_bdiv_qr_n __MPN(dcpi1_bdiv_qr_n)
-__GMP_DECLSPEC mp_limb_t mpn_dcpi1_bdiv_qr_n (mp_ptr, mp_ptr, mp_srcptr, mp_size_t, mp_limb_t, mp_ptr);
-#define mpn_dcpi1_bdiv_q __MPN(dcpi1_bdiv_q)
-__GMP_DECLSPEC void mpn_dcpi1_bdiv_q (mp_ptr, mp_ptr, mp_size_t, mp_srcptr, mp_size_t, mp_limb_t);
+__GMP_INTERN (mp_limb_t, mpn_dcpi1_bdiv_qr_n,
+ (mp_ptr, mp_ptr, mp_srcptr, mp_size_t, mp_limb_t, mp_ptr));
+__GMP_INTERN (void, mpn_dcpi1_bdiv_q,
+ (mp_ptr, mp_ptr, mp_size_t, mp_srcptr, mp_size_t, mp_limb_t));
-#define mpn_dcpi1_bdiv_q_n_itch __MPN(dcpi1_bdiv_q_n_itch)
-__GMP_DECLSPEC mp_size_t mpn_dcpi1_bdiv_q_n_itch (mp_size_t);
-#define mpn_dcpi1_bdiv_q_n __MPN(dcpi1_bdiv_q_n)
-__GMP_DECLSPEC void mpn_dcpi1_bdiv_q_n (mp_ptr, mp_ptr, mp_srcptr, mp_size_t, mp_limb_t, mp_ptr);
+__GMP_INTERN (mp_size_t, mpn_dcpi1_bdiv_q_n_itch, (mp_size_t));
+__GMP_INTERN (void, mpn_dcpi1_bdiv_q_n,
+ (mp_ptr, mp_ptr, mp_srcptr, mp_size_t, mp_limb_t, mp_ptr));
-#define mpn_mu_bdiv_qr __MPN(mu_bdiv_qr)
-__GMP_DECLSPEC mp_limb_t mpn_mu_bdiv_qr (mp_ptr, mp_ptr, mp_srcptr, mp_size_t, mp_srcptr, mp_size_t, mp_ptr);
-#define mpn_mu_bdiv_qr_itch __MPN(mu_bdiv_qr_itch)
-__GMP_DECLSPEC mp_size_t mpn_mu_bdiv_qr_itch (mp_size_t, mp_size_t);
+__GMP_INTERN (mp_limb_t, mpn_mu_bdiv_qr,
+ (mp_ptr, mp_ptr, mp_srcptr, mp_size_t, mp_srcptr,
+ mp_size_t, mp_ptr));
+__GMP_INTERN (mp_size_t, mpn_mu_bdiv_qr_itch, (mp_size_t, mp_size_t));
-#define mpn_mu_bdiv_q __MPN(mu_bdiv_q)
-__GMP_DECLSPEC void mpn_mu_bdiv_q (mp_ptr, mp_srcptr, mp_size_t, mp_srcptr, mp_size_t, mp_ptr);
-#define mpn_mu_bdiv_q_itch __MPN(mu_bdiv_q_itch)
-__GMP_DECLSPEC mp_size_t mpn_mu_bdiv_q_itch (mp_size_t, mp_size_t);
+__GMP_INTERN (void, mpn_mu_bdiv_q,
+ (mp_ptr, mp_srcptr, mp_size_t, mp_srcptr, mp_size_t, mp_ptr));
+__GMP_INTERN (mp_size_t, mpn_mu_bdiv_q_itch, (mp_size_t, mp_size_t));
-#define mpn_bdiv_qr __MPN(bdiv_qr)
-__GMP_DECLSPEC mp_limb_t mpn_bdiv_qr (mp_ptr, mp_ptr, mp_srcptr, mp_size_t, mp_srcptr, mp_size_t, mp_ptr);
-#define mpn_bdiv_qr_itch __MPN(bdiv_qr_itch)
-__GMP_DECLSPEC mp_size_t mpn_bdiv_qr_itch (mp_size_t, mp_size_t);
+__GMP_INTERN (mp_limb_t, mpn_bdiv_qr,
+ (mp_ptr, mp_ptr, mp_srcptr, mp_size_t, mp_srcptr,
+ mp_size_t, mp_ptr));
+__GMP_INTERN (mp_size_t, mpn_bdiv_qr_itch, (mp_size_t, mp_size_t));
-#define mpn_bdiv_q __MPN(bdiv_q)
-__GMP_DECLSPEC void mpn_bdiv_q (mp_ptr, mp_srcptr, mp_size_t, mp_srcptr, mp_size_t, mp_ptr);
-#define mpn_bdiv_q_itch __MPN(bdiv_q_itch)
-__GMP_DECLSPEC mp_size_t mpn_bdiv_q_itch (mp_size_t, mp_size_t);
+__GMP_INTERN (void, mpn_bdiv_q,
+ (mp_ptr, mp_srcptr, mp_size_t, mp_srcptr, mp_size_t, mp_ptr));
+__GMP_INTERN (mp_size_t, mpn_bdiv_q_itch, (mp_size_t, mp_size_t));
-#define mpn_divexact __MPN(divexact)
-__GMP_DECLSPEC void mpn_divexact (mp_ptr, mp_srcptr, mp_size_t, mp_srcptr, mp_size_t);
-#define mpn_divexact_itch __MPN(divexact_itch)
-__GMP_DECLSPEC mp_size_t mpn_divexact_itch (mp_size_t, mp_size_t);
+__GMP_INTERN (void, mpn_divexact,
+ (mp_ptr, mp_srcptr, mp_size_t, mp_srcptr, mp_size_t));
+__GMP_INTERN (mp_size_t, mpn_divexact_itch, (mp_size_t, mp_size_t));
#ifndef mpn_bdiv_dbm1c /* if not done with cpuvec in a fat binary */
-#define mpn_bdiv_dbm1c __MPN(bdiv_dbm1c)
-__GMP_DECLSPEC mp_limb_t mpn_bdiv_dbm1c (mp_ptr, mp_srcptr, mp_size_t, mp_limb_t, mp_limb_t);
+__GMP_INTERN (mp_limb_t, mpn_bdiv_dbm1c,
+ (mp_ptr, mp_srcptr, mp_size_t, mp_limb_t, mp_limb_t));
#endif
#define mpn_bdiv_dbm1(dst, src, size, divisor) \
mpn_bdiv_dbm1c (dst, src, size, divisor, __GMP_CAST (mp_limb_t, 0))
-#define mpn_powm __MPN(powm)
-__GMP_DECLSPEC void mpn_powm (mp_ptr, mp_srcptr, mp_size_t, mp_srcptr, mp_size_t, mp_srcptr, mp_size_t, mp_ptr);
-#define mpn_powlo __MPN(powlo)
-__GMP_DECLSPEC void mpn_powlo (mp_ptr, mp_srcptr, mp_srcptr, mp_size_t, mp_size_t, mp_ptr);
-#define mpn_powm_sec __MPN(powm_sec)
-__GMP_DECLSPEC void mpn_powm_sec (mp_ptr, mp_srcptr, mp_size_t, mp_srcptr, mp_size_t, mp_srcptr, mp_size_t, mp_ptr);
-#define mpn_powm_sec_itch __MPN(powm_sec_itch)
-__GMP_DECLSPEC mp_size_t mpn_powm_sec_itch (mp_size_t, mp_size_t, mp_size_t);
-#define mpn_tabselect __MPN(tabselect)
-__GMP_DECLSPEC void mpn_tabselect (volatile mp_limb_t *, volatile mp_limb_t *, mp_size_t, mp_size_t, mp_size_t);
-#define mpn_addcnd_n __MPN(addcnd_n)
-__GMP_DECLSPEC mp_limb_t mpn_addcnd_n (mp_ptr, mp_srcptr, mp_srcptr, mp_size_t, mp_limb_t);
-#define mpn_subcnd_n __MPN(subcnd_n)
-__GMP_DECLSPEC mp_limb_t mpn_subcnd_n (mp_ptr, mp_srcptr, mp_srcptr, mp_size_t, mp_limb_t);
-
-#define mpn_sb_div_qr_sec __MPN(sb_div_qr_sec)
-__GMP_DECLSPEC void mpn_sb_div_qr_sec (mp_ptr, mp_ptr, mp_size_t, mp_srcptr, mp_size_t, mp_ptr);
-#define mpn_sbpi1_div_qr_sec __MPN(sbpi1_div_qr_sec)
-__GMP_DECLSPEC mp_limb_t mpn_sbpi1_div_qr_sec (mp_ptr, mp_ptr, mp_size_t, mp_srcptr, mp_size_t, mp_limb_t, mp_ptr);
-#define mpn_sb_div_r_sec __MPN(sb_div_r_sec)
-__GMP_DECLSPEC void mpn_sb_div_r_sec (mp_ptr, mp_size_t, mp_srcptr, mp_size_t, mp_ptr);
-#define mpn_sbpi1_div_r_sec __MPN(sbpi1_div_r_sec)
-__GMP_DECLSPEC void mpn_sbpi1_div_r_sec (mp_ptr, mp_size_t, mp_srcptr, mp_size_t, mp_limb_t, mp_ptr);
+__GMP_INTERN (void, mpn_powm,
+ (mp_ptr, mp_srcptr, mp_size_t, mp_srcptr, mp_size_t,
+ mp_srcptr, mp_size_t, mp_ptr));
+__GMP_INTERN (void, mpn_powlo,
+ (mp_ptr, mp_srcptr, mp_srcptr, mp_size_t, mp_size_t, mp_ptr));
+__GMP_INTERN (void, mpn_powm_sec,
+ (mp_ptr, mp_srcptr, mp_size_t, mp_srcptr, mp_size_t,
+ mp_srcptr, mp_size_t, mp_ptr));
+__GMP_INTERN (mp_size_t, mpn_powm_sec_itch,
+ (mp_size_t, mp_size_t, mp_size_t));
+__GMP_INTERN (void, mpn_tabselect,
+ (volatile mp_limb_t *, volatile mp_limb_t *, mp_size_t,
+ mp_size_t, mp_size_t));
+__GMP_INTERN (mp_limb_t, mpn_addcnd_n,
+ (mp_ptr, mp_srcptr, mp_srcptr, mp_size_t, mp_limb_t));
+__GMP_INTERN (mp_limb_t, mpn_subcnd_n,
+ (mp_ptr, mp_srcptr, mp_srcptr, mp_size_t, mp_limb_t));
+
+__GMP_INTERN (void, mpn_sb_div_qr_sec,
+ (mp_ptr, mp_ptr, mp_size_t, mp_srcptr, mp_size_t, mp_ptr));
+__GMP_INTERN (mp_limb_t, mpn_sbpi1_div_qr_sec,
+ (mp_ptr, mp_ptr, mp_size_t, mp_srcptr, mp_size_t,
+ mp_limb_t, mp_ptr));
+__GMP_INTERN (void, mpn_sb_div_r_sec,
+ (mp_ptr, mp_size_t, mp_srcptr, mp_size_t, mp_ptr));
+__GMP_INTERN (void, mpn_sbpi1_div_r_sec,
+ (mp_ptr, mp_size_t, mp_srcptr, mp_size_t, mp_limb_t, mp_ptr));
#ifndef DIVEXACT_BY3_METHOD
@@ -1621,40 +1686,33 @@ __GMP_DECLSPEC void mpn_sbpi1_div_r_sec (mp_ptr, mp_size_t, mp_srcptr, mp_size_t
(15 & 1 * mpn_bdiv_dbm1 (dst, src, size, __GMP_CAST (mp_limb_t, GMP_NUMB_MASK / 15)))
#endif
-#define mpz_divexact_gcd __gmpz_divexact_gcd
-__GMP_DECLSPEC void mpz_divexact_gcd (mpz_ptr, mpz_srcptr, mpz_srcptr);
+__GMP_INTERN (void, mpz_divexact_gcd, (mpz_ptr, mpz_srcptr, mpz_srcptr));
-#define mpz_prodlimbs __gmpz_prodlimbs
-__GMP_DECLSPEC mp_size_t mpz_prodlimbs (mpz_ptr, mp_ptr, mp_size_t);
+__GMP_INTERN (mp_size_t, mpz_prodlimbs, (mpz_ptr, mp_ptr, mp_size_t));
-#define mpz_oddfac_1 __gmpz_oddfac_1
-__GMP_DECLSPEC void mpz_oddfac_1 (mpz_ptr, mp_limb_t, unsigned);
+__GMP_INTERN (void, mpz_oddfac_1, (mpz_ptr, mp_limb_t, unsigned));
-#define mpz_inp_str_nowhite __gmpz_inp_str_nowhite
#ifdef _GMP_H_HAVE_FILE
-__GMP_DECLSPEC size_t mpz_inp_str_nowhite (mpz_ptr, FILE *, int, int, size_t);
+__GMP_INTERN (size_t, mpz_inp_str_nowhite, (mpz_ptr, FILE *, int, int, size_t));
#endif
-#define mpn_divisible_p __MPN(divisible_p)
-__GMP_DECLSPEC int mpn_divisible_p (mp_srcptr, mp_size_t, mp_srcptr, mp_size_t) __GMP_ATTRIBUTE_PURE;
+__GMP_INTERN_A (int, mpn_divisible_p,
+ (mp_srcptr, mp_size_t, mp_srcptr, mp_size_t),
+ __GMP_ATTRIBUTE_PURE);
-#define mpn_rootrem __MPN(rootrem)
-__GMP_DECLSPEC mp_size_t mpn_rootrem (mp_ptr, mp_ptr, mp_srcptr, mp_size_t, mp_limb_t);
+__GMP_INTERN (mp_size_t, mpn_rootrem,
+ (mp_ptr, mp_ptr, mp_srcptr, mp_size_t, mp_limb_t));
-#define mpn_broot __MPN(broot)
-__GMP_DECLSPEC void mpn_broot (mp_ptr, mp_srcptr, mp_size_t, mp_limb_t);
+__GMP_INTERN (void, mpn_broot, (mp_ptr, mp_srcptr, mp_size_t, mp_limb_t));
-#define mpn_broot_invm1 __MPN(broot_invm1)
-__GMP_DECLSPEC void mpn_broot_invm1 (mp_ptr, mp_srcptr, mp_size_t, mp_limb_t);
+__GMP_INTERN (void, mpn_broot_invm1, (mp_ptr, mp_srcptr, mp_size_t, mp_limb_t));
-#define mpn_brootinv __MPN(brootinv)
-__GMP_DECLSPEC void mpn_brootinv (mp_ptr, mp_srcptr, mp_size_t, mp_limb_t, mp_ptr);
+__GMP_INTERN (void, mpn_brootinv,
+ (mp_ptr, mp_srcptr, mp_size_t, mp_limb_t, mp_ptr));
-#define mpn_bsqrt __MPN(bsqrt)
-__GMP_DECLSPEC void mpn_bsqrt (mp_ptr, mp_srcptr, mp_bitcnt_t, mp_ptr);
+__GMP_INTERN (void, mpn_bsqrt, (mp_ptr, mp_srcptr, mp_bitcnt_t, mp_ptr));
-#define mpn_bsqrtinv __MPN(bsqrtinv)
-__GMP_DECLSPEC int mpn_bsqrtinv (mp_ptr, mp_srcptr, mp_bitcnt_t, mp_ptr);
+__GMP_INTERN (int, mpn_bsqrtinv, (mp_ptr, mp_srcptr, mp_bitcnt_t, mp_ptr));
#if defined (_CRAY)
#define MPN_COPY_INCR(dst, src, n) \
@@ -1666,12 +1724,6 @@ __GMP_DECLSPEC int mpn_bsqrtinv (mp_ptr, mp_srcptr, mp_bitcnt_t, mp_ptr);
} while (0)
#endif
-/* used by test programs, hence __GMP_DECLSPEC */
-#ifndef mpn_copyi /* if not done with cpuvec in a fat binary */
-#define mpn_copyi __MPN(copyi)
-__GMP_DECLSPEC void mpn_copyi (mp_ptr, mp_srcptr, mp_size_t);
-#endif
-
#if ! defined (MPN_COPY_INCR) && HAVE_NATIVE_mpn_copyi
#define MPN_COPY_INCR(dst, src, size) \
do { \
@@ -1719,12 +1771,6 @@ __GMP_DECLSPEC void mpn_copyi (mp_ptr, mp_srcptr, mp_size_t);
} while (0)
#endif
-/* used by test programs, hence __GMP_DECLSPEC */
-#ifndef mpn_copyd /* if not done with cpuvec in a fat binary */
-#define mpn_copyd __MPN(copyd)
-__GMP_DECLSPEC void mpn_copyd (mp_ptr, mp_srcptr, mp_size_t);
-#endif
-
#if ! defined (MPN_COPY_DECR) && HAVE_NATIVE_mpn_copyd
#define MPN_COPY_DECR(dst, src, size) \
do { \
@@ -1959,13 +2005,16 @@ _mpz_newalloc (mpz_ptr z, mp_size_t n)
FIB_TABLE_LUCNUM_LIMIT (in fib_table.h) is the largest n for which L[n] =
F[n] + 2*F[n-1] fits in a limb. */
-__GMP_DECLSPEC extern const mp_limb_t __gmp_fib_table[];
+#undef __GMP_ABI_PREFIX
+#define __GMP_ABI_PREFIX
+
+__GMP_INTERN (extern const mp_limb_t, __gmp_fib_table, []);
#define FIB_TABLE(n) (__gmp_fib_table[(n)+1])
-extern const mp_limb_t __gmp_oddfac_table[];
-extern const mp_limb_t __gmp_odd2fac_table[];
-extern const unsigned char __gmp_fac2cnt_table[];
-extern const mp_limb_t __gmp_limbroots_table[];
+__GMP_INTERN (extern const mp_limb_t, __gmp_oddfac_table, []);
+__GMP_INTERN (extern const mp_limb_t, __gmp_odd2fac_table, []);
+__GMP_INTERN (extern const unsigned char, __gmp_fac2cnt_table, []);
+__GMP_INTERN (extern const mp_limb_t, __gmp_limbroots_table, []);
/* n^log <= GMP_NUMB_MAX, a limb can store log factors less than n */
static inline unsigned
@@ -1985,14 +2034,14 @@ typedef struct
unsigned char s[SIEVESIZE + 1]; /* sieve table */
} gmp_primesieve_t;
-#define gmp_init_primesieve __gmp_init_primesieve
-__GMP_DECLSPEC void gmp_init_primesieve (gmp_primesieve_t *);
+#undef __GMP_ABI_PREFIX
+#define __GMP_ABI_PREFIX __
+
+__GMP_INTERN (void, gmp_init_primesieve, (gmp_primesieve_t *));
-#define gmp_nextprime __gmp_nextprime
-__GMP_DECLSPEC unsigned long int gmp_nextprime (gmp_primesieve_t *);
+__GMP_INTERN (unsigned long int, gmp_nextprime, (gmp_primesieve_t *));
-#define gmp_primesieve __gmp_primesieve
-__GMP_DECLSPEC mp_limb_t gmp_primesieve (mp_ptr, mp_limb_t);
+__GMP_INTERN (mp_limb_t, gmp_primesieve, (mp_ptr, mp_limb_t));
#ifndef MUL_TOOM22_THRESHOLD
@@ -2318,8 +2367,12 @@ struct fft_table_nk
#define ASSERT_FILE ""
#endif
-__GMP_DECLSPEC void __gmp_assert_header (const char *, int);
-__GMP_DECLSPEC void __gmp_assert_fail (const char *, int, const char *) ATTRIBUTE_NORETURN;
+#undef __GMP_ABI_PREFIX
+#define __GMP_ABI_PREFIX
+
+__GMP_INTERN (void, __gmp_assert_header, (const char *, int));
+__GMP_INTERN_A (void, __gmp_assert_fail,
+ (const char *, int, const char *), ATTRIBUTE_NORETURN);
#if HAVE_STRINGIZE
#define ASSERT_FAIL(expr) __gmp_assert_fail (ASSERT_FILE, ASSERT_LINE, #expr)
@@ -2529,11 +2582,15 @@ __GMP_DECLSPEC void __gmp_assert_fail (const char *, int, const char *) ATTRIBUT
MPN_LOGOPS_N_INLINE (rp, up, vp, n, ~(__a ^ __b) & GMP_NUMB_MASK)
#endif
-#define mpn_trialdiv __MPN(trialdiv)
-__GMP_DECLSPEC mp_limb_t mpn_trialdiv (mp_srcptr, mp_size_t, mp_size_t, int *);
+#undef __GMP_ABI_PREFIX
+#define __GMP_ABI_PREFIX __g
+
+__GMP_INTERN (mp_limb_t, mpn_trialdiv,
+ (mp_srcptr, mp_size_t, mp_size_t, int *));
-#define mpn_remove __MPN(remove)
-__GMP_DECLSPEC mp_bitcnt_t mpn_remove (mp_ptr, mp_size_t *, mp_ptr, mp_size_t, mp_ptr, mp_size_t, mp_bitcnt_t);
+__GMP_INTERN (mp_bitcnt_t, mpn_remove,
+ (mp_ptr, mp_size_t *, mp_ptr, mp_size_t, mp_ptr,
+ mp_size_t, mp_bitcnt_t));
/* ADDC_LIMB sets w=x+y and cout to 0 or 1 for a carry from that addition. */
@@ -2802,8 +2859,8 @@ struct bases
mp_limb_t big_base_inverted;
};
-#define mp_bases __MPN(bases)
-__GMP_DECLSPEC extern const struct bases mp_bases[257];
+__GMP_INTERN_FULL (extern const struct bases, mp_bases, [257],
+ , , __gmpn_bases);
/* Compute the number of digits in base for nbits bits, making sure the result
@@ -2938,8 +2995,8 @@ __GMP_DECLSPEC extern const struct bases mp_bases[257];
/* Use a library function for invert_limb, if available. */
-#define mpn_invert_limb __MPN(invert_limb)
-__GMP_DECLSPEC mp_limb_t mpn_invert_limb (mp_limb_t) ATTRIBUTE_CONST;
+__GMP_INTERN_A (mp_limb_t, mpn_invert_limb, (mp_limb_t), ATTRIBUTE_CONST);
+
#if ! defined (invert_limb) && HAVE_NATIVE_mpn_invert_limb
#define invert_limb(invxl,xl) \
do { \
@@ -3096,8 +3153,9 @@ __GMP_DECLSPEC mp_limb_t mpn_invert_limb (mp_limb_t) ATTRIBUTE_CONST;
} while (0)
#ifndef mpn_preinv_divrem_1 /* if not done with cpuvec in a fat binary */
-#define mpn_preinv_divrem_1 __MPN(preinv_divrem_1)
-__GMP_DECLSPEC mp_limb_t mpn_preinv_divrem_1 (mp_ptr, mp_size_t, mp_srcptr, mp_size_t, mp_limb_t, mp_limb_t, int);
+__GMP_INTERN (mp_limb_t, mpn_preinv_divrem_1,
+ (mp_ptr, mp_size_t, mp_srcptr, mp_size_t, mp_limb_t,
+ mp_limb_t, int));
#endif
@@ -3129,8 +3187,8 @@ __GMP_DECLSPEC mp_limb_t mpn_preinv_divrem_1 (mp_ptr, mp_size_t, mp_srcptr, mp_s
#ifndef mpn_mod_34lsub1 /* if not done with cpuvec in a fat binary */
-#define mpn_mod_34lsub1 __MPN(mod_34lsub1)
-__GMP_DECLSPEC mp_limb_t mpn_mod_34lsub1 (mp_srcptr, mp_size_t) __GMP_ATTRIBUTE_PURE;
+__GMP_INTERN_A (mp_limb_t, mpn_mod_34lsub1, (mp_srcptr, mp_size_t),
+ __GMP_ATTRIBUTE_PURE);
#endif
@@ -3147,8 +3205,7 @@ __GMP_DECLSPEC mp_limb_t mpn_mod_34lsub1 (mp_srcptr, mp_size_t) __GMP_ATTRIBUTE_
#endif
#ifndef mpn_divexact_1 /* if not done with cpuvec in a fat binary */
-#define mpn_divexact_1 __MPN(divexact_1)
-__GMP_DECLSPEC void mpn_divexact_1 (mp_ptr, mp_srcptr, mp_size_t, mp_limb_t);
+__GMP_INTERN (void, mpn_divexact_1, (mp_ptr, mp_srcptr, mp_size_t, mp_limb_t));
#endif
#define MPN_DIVREM_OR_DIVEXACT_1(rp, up, n, d) \
@@ -3163,13 +3220,15 @@ __GMP_DECLSPEC void mpn_divexact_1 (mp_ptr, mp_srcptr, mp_size_t, mp_limb_t);
} while (0)
#ifndef mpn_modexact_1c_odd /* if not done with cpuvec in a fat binary */
-#define mpn_modexact_1c_odd __MPN(modexact_1c_odd)
-__GMP_DECLSPEC mp_limb_t mpn_modexact_1c_odd (mp_srcptr, mp_size_t, mp_limb_t, mp_limb_t) __GMP_ATTRIBUTE_PURE;
+__GMP_INTERN_A (mp_limb_t, mpn_modexact_1c_odd,
+ (mp_srcptr, mp_size_t, mp_limb_t, mp_limb_t),
+ __GMP_ATTRIBUTE_PURE);
#endif
#if HAVE_NATIVE_mpn_modexact_1_odd
-#define mpn_modexact_1_odd __MPN(modexact_1_odd)
-__GMP_DECLSPEC mp_limb_t mpn_modexact_1_odd (mp_srcptr, mp_size_t, mp_limb_t) __GMP_ATTRIBUTE_PURE;
+__GMP_INTERN_A (mp_limb_t, mpn_modexact_1_odd,
+ (mp_srcptr, mp_size_t, mp_limb_t),
+ __GMP_ATTRIBUTE_PURE);
#else
#define mpn_modexact_1_odd(src,size,divisor) \
mpn_modexact_1c_odd (src, size, divisor, CNST_LIMB(0))
@@ -3200,8 +3259,8 @@ __GMP_DECLSPEC mp_limb_t mpn_modexact_1_odd (mp_srcptr, mp_size_t, mp_limb_t) __
A 4-bit inverse can be obtained effectively from xoring bits 1 and 2 into
bit 3, for instance with (((n + 2) & 4) << 1) ^ n. */
-#define binvert_limb_table __gmp_binvert_limb_table
-__GMP_DECLSPEC extern const unsigned char binvert_limb_table[128];
+__GMP_INTERN_FULL (extern const unsigned char, binvert_limb_table, [128],
+ , , __gmp_binvert_limb_table);
#define binvert_limb(inv,n) \
do { \
@@ -3747,10 +3806,11 @@ union double_extract
We assume doubles have 53 mantissa bits. */
#define LIMBS_PER_DOUBLE ((53 + GMP_NUMB_BITS - 2) / GMP_NUMB_BITS + 1)
-__GMP_DECLSPEC int __gmp_extract_double (mp_ptr, double);
+__GMP_INTERN_FULL (int, __gmp_extract_double, (mp_ptr, double),
+ , , __gmp_extract_double);
-#define mpn_get_d __gmpn_get_d
-__GMP_DECLSPEC double mpn_get_d (mp_srcptr, mp_size_t, mp_size_t, long) __GMP_ATTRIBUTE_PURE;
+__GMP_INTERN_A (double, mpn_get_d, (mp_srcptr, mp_size_t, mp_size_t, long),
+ __GMP_ATTRIBUTE_PURE);
/* DOUBLE_NAN_INF_ACTION executes code a_nan if x is a NaN, or executes
@@ -3818,15 +3878,17 @@ __GMP_DECLSPEC double mpn_get_d (mp_srcptr, mp_size_t, mp_size_t, long) __GMP_AT
#define FORCE_DOUBLE(d) do { } while (0)
#endif
+#undef __GMP_ABI_PREFIX
+#define __GMP_ABI_PREFIX
-__GMP_DECLSPEC extern const unsigned char __gmp_digit_value_tab[];
+__GMP_INTERN (extern const unsigned char, __gmp_digit_value_tab, []);
-__GMP_DECLSPEC extern int __gmp_junk;
-__GMP_DECLSPEC extern const int __gmp_0;
-__GMP_DECLSPEC void __gmp_exception (int) ATTRIBUTE_NORETURN;
-__GMP_DECLSPEC void __gmp_divide_by_zero (void) ATTRIBUTE_NORETURN;
-__GMP_DECLSPEC void __gmp_sqrt_of_negative (void) ATTRIBUTE_NORETURN;
-__GMP_DECLSPEC void __gmp_invalid_operation (void) ATTRIBUTE_NORETURN;
+__GMP_INTERN_DATA (int, __gmp_junk);
+__GMP_INTERN_DATA (const int, __gmp_0);
+__GMP_INTERN_A (void, __gmp_exception, (int), ATTRIBUTE_NORETURN);
+__GMP_INTERN_A (void, __gmp_divide_by_zero, (void), ATTRIBUTE_NORETURN);
+__GMP_INTERN_A (void, __gmp_sqrt_of_negative, (void), ATTRIBUTE_NORETURN);
+__GMP_INTERN_A (void, __gmp_invalid_operation, (void), ATTRIBUTE_NORETURN);
#define GMP_ERROR(code) __gmp_exception (code)
#define DIVIDE_BY_ZERO __gmp_divide_by_zero ()
#define SQRT_OF_NEGATIVE __gmp_sqrt_of_negative ()
@@ -4016,8 +4078,8 @@ __GMP_DECLSPEC void __gmp_invalid_operation (void) ATTRIBUTE_NORETURN;
} while (0)
/* State for the Jacobi computation using Lehmer. */
-#define jacobi_table __gmp_jacobi_table
-__GMP_DECLSPEC extern const unsigned char jacobi_table[208];
+__GMP_INTERN_FULL (extern const unsigned char, jacobi_table, [208],
+ , , __gmp_jacobi_table);
/* Bit layout for the initial state. b must be odd.
@@ -4079,13 +4141,17 @@ mpn_jacobi_update (unsigned bits, unsigned denominator, unsigned q)
return bits = jacobi_table[(bits << 3) + (denominator << 2) + q];
}
+#undef __GMP_ABI_PREFIX
+#define __GMP_ABI_PREFIX __g
+
/* Matrix multiplication */
-#define mpn_matrix22_mul __MPN(matrix22_mul)
-__GMP_DECLSPEC void mpn_matrix22_mul (mp_ptr, mp_ptr, mp_ptr, mp_ptr, mp_size_t, mp_srcptr, mp_srcptr, mp_srcptr, mp_srcptr, mp_size_t, mp_ptr);
-#define mpn_matrix22_mul_strassen __MPN(matrix22_mul_strassen)
-__GMP_DECLSPEC void mpn_matrix22_mul_strassen (mp_ptr, mp_ptr, mp_ptr, mp_ptr, mp_size_t, mp_srcptr, mp_srcptr, mp_srcptr, mp_srcptr, mp_size_t, mp_ptr);
-#define mpn_matrix22_mul_itch __MPN(matrix22_mul_itch)
-__GMP_DECLSPEC mp_size_t mpn_matrix22_mul_itch (mp_size_t, mp_size_t);
+__GMP_INTERN (void, mpn_matrix22_mul,
+ (mp_ptr, mp_ptr, mp_ptr, mp_ptr, mp_size_t, mp_srcptr,
+ mp_srcptr, mp_srcptr, mp_srcptr, mp_size_t, mp_ptr));
+__GMP_INTERN (void, mpn_matrix22_mul_strassen,
+ (mp_ptr, mp_ptr, mp_ptr, mp_ptr, mp_size_t, mp_srcptr,
+ mp_srcptr, mp_srcptr, mp_srcptr, mp_size_t, mp_ptr));
+__GMP_INTERN (mp_size_t, mpn_matrix22_mul_itch, (mp_size_t, mp_size_t));
#ifndef MATRIX22_STRASSEN_THRESHOLD
#define MATRIX22_STRASSEN_THRESHOLD 30
@@ -4122,17 +4188,21 @@ struct hgcd_matrix1
mp_limb_t u[2][2];
};
-#define mpn_hgcd2 __MPN (hgcd2)
-__GMP_DECLSPEC int mpn_hgcd2 (mp_limb_t, mp_limb_t, mp_limb_t, mp_limb_t, struct hgcd_matrix1 *);
+__GMP_INTERN (int, mpn_hgcd2,
+ (mp_limb_t, mp_limb_t, mp_limb_t, mp_limb_t,
+ struct hgcd_matrix1 *));
-#define mpn_hgcd_mul_matrix1_vector __MPN (hgcd_mul_matrix1_vector)
-__GMP_DECLSPEC mp_size_t mpn_hgcd_mul_matrix1_vector (const struct hgcd_matrix1 *, mp_ptr, mp_srcptr, mp_ptr, mp_size_t);
+__GMP_INTERN (mp_size_t, mpn_hgcd_mul_matrix1_vector,
+ (const struct hgcd_matrix1 *, mp_ptr, mp_srcptr,
+ mp_ptr, mp_size_t));
-#define mpn_matrix22_mul1_inverse_vector __MPN (matrix22_mul1_inverse_vector)
-__GMP_DECLSPEC mp_size_t mpn_matrix22_mul1_inverse_vector (const struct hgcd_matrix1 *, mp_ptr, mp_srcptr, mp_ptr, mp_size_t);
+__GMP_INTERN (mp_size_t, mpn_matrix22_mul1_inverse_vector,
+ (const struct hgcd_matrix1 *, mp_ptr, mp_srcptr,
+ mp_ptr, mp_size_t));
-#define mpn_hgcd2_jacobi __MPN (hgcd2_jacobi)
-__GMP_DECLSPEC int mpn_hgcd2_jacobi (mp_limb_t, mp_limb_t, mp_limb_t, mp_limb_t, struct hgcd_matrix1 *, unsigned *);
+__GMP_INTERN (int, mpn_hgcd2_jacobi,
+ (mp_limb_t, mp_limb_t, mp_limb_t, mp_limb_t,
+ struct hgcd_matrix1 *, unsigned *));
struct hgcd_matrix
{
@@ -4143,52 +4213,55 @@ struct hgcd_matrix
#define MPN_HGCD_MATRIX_INIT_ITCH(n) (4 * ((n+1)/2 + 1))
-#define mpn_hgcd_matrix_init __MPN (hgcd_matrix_init)
-__GMP_DECLSPEC void mpn_hgcd_matrix_init (struct hgcd_matrix *, mp_size_t, mp_ptr);
+__GMP_INTERN (void, mpn_hgcd_matrix_init,
+ (struct hgcd_matrix *, mp_size_t, mp_ptr));
-#define mpn_hgcd_matrix_update_q __MPN (hgcd_matrix_update_q)
-__GMP_DECLSPEC void mpn_hgcd_matrix_update_q (struct hgcd_matrix *, mp_srcptr, mp_size_t, unsigned, mp_ptr);
+__GMP_INTERN (void, mpn_hgcd_matrix_update_q,
+ (struct hgcd_matrix *, mp_srcptr, mp_size_t, unsigned, mp_ptr));
-#define mpn_hgcd_matrix_mul_1 __MPN (hgcd_matrix_mul_1)
-__GMP_DECLSPEC void mpn_hgcd_matrix_mul_1 (struct hgcd_matrix *, const struct hgcd_matrix1 *, mp_ptr);
+__GMP_INTERN (void, mpn_hgcd_matrix_mul_1,
+ (struct hgcd_matrix *, const struct hgcd_matrix1 *, mp_ptr));
-#define mpn_hgcd_matrix_mul __MPN (hgcd_matrix_mul)
-__GMP_DECLSPEC void mpn_hgcd_matrix_mul (struct hgcd_matrix *, const struct hgcd_matrix *, mp_ptr);
+__GMP_INTERN (void, mpn_hgcd_matrix_mul,
+ (struct hgcd_matrix *, const struct hgcd_matrix *, mp_ptr));
-#define mpn_hgcd_matrix_adjust __MPN (hgcd_matrix_adjust)
-__GMP_DECLSPEC mp_size_t mpn_hgcd_matrix_adjust (const struct hgcd_matrix *, mp_size_t, mp_ptr, mp_ptr, mp_size_t, mp_ptr);
+__GMP_INTERN (mp_size_t, mpn_hgcd_matrix_adjust,
+ (const struct hgcd_matrix *, mp_size_t, mp_ptr, mp_ptr,
+ mp_size_t, mp_ptr));
-#define mpn_hgcd_step __MPN(hgcd_step)
-__GMP_DECLSPEC mp_size_t mpn_hgcd_step (mp_size_t, mp_ptr, mp_ptr, mp_size_t, struct hgcd_matrix *, mp_ptr);
+__GMP_INTERN (mp_size_t, mpn_hgcd_step,
+ (mp_size_t, mp_ptr, mp_ptr, mp_size_t,
+ struct hgcd_matrix *, mp_ptr));
-#define mpn_hgcd_reduce __MPN(hgcd_reduce)
-__GMP_DECLSPEC mp_size_t mpn_hgcd_reduce (struct hgcd_matrix *, mp_ptr, mp_ptr, mp_size_t, mp_size_t, mp_ptr);
+__GMP_INTERN (mp_size_t, mpn_hgcd_reduce,
+ (struct hgcd_matrix *, mp_ptr, mp_ptr, mp_size_t,
+ mp_size_t, mp_ptr));
-#define mpn_hgcd_reduce_itch __MPN(hgcd_reduce_itch)
-__GMP_DECLSPEC mp_size_t mpn_hgcd_reduce_itch (mp_size_t, mp_size_t);
+__GMP_INTERN (mp_size_t, mpn_hgcd_reduce_itch, (mp_size_t, mp_size_t));
-#define mpn_hgcd_itch __MPN (hgcd_itch)
-__GMP_DECLSPEC mp_size_t mpn_hgcd_itch (mp_size_t);
+__GMP_INTERN (mp_size_t, mpn_hgcd_itch, (mp_size_t));
-#define mpn_hgcd __MPN (hgcd)
-__GMP_DECLSPEC mp_size_t mpn_hgcd (mp_ptr, mp_ptr, mp_size_t, struct hgcd_matrix *, mp_ptr);
+__GMP_INTERN (mp_size_t, mpn_hgcd,
+ (mp_ptr, mp_ptr, mp_size_t, struct hgcd_matrix *, mp_ptr));
-#define mpn_hgcd_appr_itch __MPN (hgcd_appr_itch)
-__GMP_DECLSPEC mp_size_t mpn_hgcd_appr_itch (mp_size_t);
+__GMP_INTERN (mp_size_t, mpn_hgcd_appr_itch, (mp_size_t));
-#define mpn_hgcd_appr __MPN (hgcd_appr)
-__GMP_DECLSPEC int mpn_hgcd_appr (mp_ptr, mp_ptr, mp_size_t, struct hgcd_matrix *, mp_ptr);
+__GMP_INTERN (int, mpn_hgcd_appr,
+ (mp_ptr, mp_ptr, mp_size_t, struct hgcd_matrix *, mp_ptr));
-#define mpn_hgcd_jacobi __MPN (hgcd_jacobi)
-__GMP_DECLSPEC mp_size_t mpn_hgcd_jacobi (mp_ptr, mp_ptr, mp_size_t, struct hgcd_matrix *, unsigned *, mp_ptr);
+__GMP_INTERN (mp_size_t, mpn_hgcd_jacobi,
+ (mp_ptr, mp_ptr, mp_size_t, struct hgcd_matrix *,
+ unsigned *, mp_ptr));
-typedef void gcd_subdiv_step_hook(void *, mp_srcptr, mp_size_t, mp_srcptr, mp_size_t, int);
+typedef void gcd_subdiv_step_hook (void *, mp_srcptr, mp_size_t,
+ mp_srcptr, mp_size_t, int);
/* Needs storage for the quotient */
#define MPN_GCD_SUBDIV_STEP_ITCH(n) (n)
-#define mpn_gcd_subdiv_step __MPN(gcd_subdiv_step)
-__GMP_DECLSPEC mp_size_t mpn_gcd_subdiv_step (mp_ptr, mp_ptr, mp_size_t, mp_size_t, gcd_subdiv_step_hook *, void *, mp_ptr);
+__GMP_INTERN (mp_size_t, mpn_gcd_subdiv_step,
+ (mp_ptr, mp_ptr, mp_size_t, mp_size_t,
+ gcd_subdiv_step_hook *, void *, mp_ptr));
struct gcdext_ctx
{
@@ -4203,13 +4276,12 @@ struct gcdext_ctx
mp_ptr u0, u1, tp;
};
-#define mpn_gcdext_hook __MPN (gcdext_hook)
-gcd_subdiv_step_hook mpn_gcdext_hook;
+__GMP_INTERN_DATA (gcd_subdiv_step_hook, mpn_gcdext_hook);
#define MPN_GCDEXT_LEHMER_N_ITCH(n) (4*(n) + 3)
-#define mpn_gcdext_lehmer_n __MPN(gcdext_lehmer_n)
-__GMP_DECLSPEC mp_size_t mpn_gcdext_lehmer_n (mp_ptr, mp_ptr, mp_size_t *, mp_ptr, mp_ptr, mp_size_t, mp_ptr);
+__GMP_INTERN (mp_size_t, mpn_gcdext_lehmer_n,
+ (mp_ptr, mp_ptr, mp_size_t *, mp_ptr, mp_ptr, mp_size_t, mp_ptr));
/* 4*(an + 1) + 4*(bn + 1) + an */
#define MPN_GCDEXT_LEHMER_ITCH(an, bn) (5*(an) + 4*(bn) + 8)
@@ -4249,12 +4321,13 @@ typedef struct powers powers_t;
#define mpn_dc_get_str_powtab_alloc(n) ((n) + 2 * GMP_LIMB_BITS)
#define mpn_dc_get_str_itch(n) ((n) + GMP_LIMB_BITS)
-#define mpn_dc_set_str __MPN(dc_set_str)
-__GMP_DECLSPEC mp_size_t mpn_dc_set_str (mp_ptr, const unsigned char *, size_t, const powers_t *, mp_ptr);
-#define mpn_bc_set_str __MPN(bc_set_str)
-__GMP_DECLSPEC mp_size_t mpn_bc_set_str (mp_ptr, const unsigned char *, size_t, int);
-#define mpn_set_str_compute_powtab __MPN(set_str_compute_powtab)
-__GMP_DECLSPEC void mpn_set_str_compute_powtab (powers_t *, mp_ptr, mp_size_t, int);
+__GMP_INTERN (mp_size_t, mpn_dc_set_str,
+ (mp_ptr, const unsigned char *, size_t,
+ const powers_t *, mp_ptr));
+__GMP_INTERN (mp_size_t, mpn_bc_set_str,
+ (mp_ptr, const unsigned char *, size_t, int));
+__GMP_INTERN (void, mpn_set_str_compute_powtab,
+ (powers_t *, mp_ptr, mp_size_t, int));
/* __GMPF_BITS_TO_PREC applies a minimum 53 bits, rounds upwards to a whole
@@ -4266,7 +4339,10 @@ __GMP_DECLSPEC void mpn_set_str_compute_powtab (powers_t *, mp_ptr, mp_size
#define __GMPF_PREC_TO_BITS(n) \
((mp_bitcnt_t) (n) * GMP_NUMB_BITS - GMP_NUMB_BITS)
-__GMP_DECLSPEC extern mp_size_t __gmp_default_fp_limb_precision;
+#undef __GMP_ABI_PREFIX
+#define __GMP_ABI_PREFIX
+
+__GMP_INTERN_DATA (mp_size_t, __gmp_default_fp_limb_precision);
/* Compute the number of base-b digits corresponding to nlimbs limbs, rounding
down. */
@@ -4419,9 +4495,10 @@ struct gmp_asprintf_t {
} \
} while (0)
-__GMP_DECLSPEC int __gmp_asprintf_memory (struct gmp_asprintf_t *, const char *, size_t);
-__GMP_DECLSPEC int __gmp_asprintf_reps (struct gmp_asprintf_t *, int, int);
-__GMP_DECLSPEC int __gmp_asprintf_final (struct gmp_asprintf_t *);
+__GMP_INTERN (int, __gmp_asprintf_memory,
+ (struct gmp_asprintf_t *, const char *, size_t));
+__GMP_INTERN (int, __gmp_asprintf_reps, (struct gmp_asprintf_t *, int, int));
+__GMP_INTERN (int, __gmp_asprintf_final, (struct gmp_asprintf_t *));
/* buf is where to write the next output, and size is how much space is left
there. If the application passed size==0 then that's what we'll have
@@ -4467,13 +4544,19 @@ struct gmp_snprintf_t {
DOPRNT_MEMORY (ptr, len); \
} while (0)
-__GMP_DECLSPEC int __gmp_doprnt (const struct doprnt_funs_t *, void *, const char *, va_list);
-__GMP_DECLSPEC int __gmp_doprnt_integer (const struct doprnt_funs_t *, void *, const struct doprnt_params_t *, const char *);
+__GMP_INTERN (int, __gmp_doprnt,
+ (const struct doprnt_funs_t *, void *, const char *, va_list));
+__GMP_INTERN (int, __gmp_doprnt_integer,
+ (const struct doprnt_funs_t *, void *,
+ const struct doprnt_params_t *, const char *));
-#define __gmp_doprnt_mpf __gmp_doprnt_mpf2
-__GMP_DECLSPEC int __gmp_doprnt_mpf (const struct doprnt_funs_t *, void *, const struct doprnt_params_t *, const char *, mpf_srcptr);
+__GMP_INTERN_FULL (int, __gmp_doprnt_mpf,
+ (const struct doprnt_funs_t *, void *,
+ const struct doprnt_params_t *, const char *, mpf_srcptr),
+ , , __gmp_doprnt_mpf2);
-__GMP_DECLSPEC int __gmp_replacement_vsnprintf (char *, size_t, const char *, va_list);
+__GMP_INTERN (int, __gmp_replacement_vsnprintf,
+ (char *, size_t, const char *, va_list));
#endif /* _GMP_H_HAVE_VA_LIST */
@@ -4488,11 +4571,13 @@ struct gmp_doscan_funs_t {
gmp_doscan_get_t get;
gmp_doscan_unget_t unget;
};
-extern const struct gmp_doscan_funs_t __gmp_fscanf_funs;
-extern const struct gmp_doscan_funs_t __gmp_sscanf_funs;
+__GMP_INTERN_DATA (const struct gmp_doscan_funs_t, __gmp_fscanf_funs);
+__GMP_INTERN_DATA (const struct gmp_doscan_funs_t, __gmp_sscanf_funs);
#if _GMP_H_HAVE_VA_LIST
-__GMP_DECLSPEC int __gmp_doscan (const struct gmp_doscan_funs_t *, void *, const char *, va_list);
+__GMP_INTERN (int, __gmp_doscan,
+ (const struct gmp_doscan_funs_t *, void *,
+ const char *, va_list));
#endif
@@ -4601,10 +4686,12 @@ __GMP_DECLSPEC void __gmpn_cpuvec_init (void);
((LIKELY (__gmpn_cpuvec_initialized) ? 0 : (__gmpn_cpuvec_init (), 0)), \
__gmpn_cpuvec.field)
+#undef __GMP_ABI_PREFIX
+#define __GMP_ABI_PREFIX __g
#if HAVE_NATIVE_mpn_add_nc
-#define mpn_add_nc __MPN(add_nc)
-__GMP_DECLSPEC mp_limb_t mpn_add_nc (mp_ptr, mp_srcptr, mp_srcptr, mp_size_t, mp_limb_t);
+__GMP_INTERN (mp_limb_t, mpn_add_nc,
+ (mp_ptr, mp_srcptr, mp_srcptr, mp_size_t, mp_limb_t));
#else
static inline
mp_limb_t
@@ -4618,8 +4705,8 @@ mpn_add_nc (mp_ptr rp, mp_srcptr up, mp_srcptr vp, mp_size_t n, mp_limb_t ci)
#endif
#if HAVE_NATIVE_mpn_sub_nc
-#define mpn_sub_nc __MPN(sub_nc)
-__GMP_DECLSPEC mp_limb_t mpn_sub_nc (mp_ptr, mp_srcptr, mp_srcptr, mp_size_t, mp_limb_t);
+__GMP_INTERN (mp_limb_t, mpn_sub_nc,
+ (mp_ptr, mp_srcptr, mp_srcptr, mp_size_t, mp_limb_t));
#else
static inline mp_limb_t
mpn_sub_nc (mp_ptr rp, mp_srcptr up, mp_srcptr vp, mp_size_t n, mp_limb_t ci)
--
1.8.1.2
More information about the gmp-devel
mailing list