blob: 761a3034dd254c3bc63e66094ad6f84f59ebe696 [file] [log] [blame] [raw]
Fix gcc(1) warning 'always_inline function might not be inlinable'.
diff -ru --exclude-from freebsd-src-diff-exclude-names /var/archive3/public/freebsd-releng-10.4-src/sys/contrib/xz-embedded/linux/lib/xz/xz_dec_lzma2.c freebsd-10.4/sys/contrib/xz-embedded/linux/lib/xz/xz_dec_lzma2.c
--- /var/archive3/public/freebsd-releng-10.4-src/sys/contrib/xz-embedded/linux/lib/xz/xz_dec_lzma2.c 2017-09-29 08:19:59.000000000 +0800
+++ freebsd-10.4/sys/contrib/xz-embedded/linux/lib/xz/xz_dec_lzma2.c 2019-08-17 23:17:02.867406078 +0800
@@ -475,7 +475,7 @@
}
/* Read the next input byte if needed. */
-static __always_inline void rc_normalize(struct rc_dec *rc)
+static inline __always_inline void rc_normalize(struct rc_dec *rc)
{
if (rc->range < RC_TOP_VALUE) {
rc->range <<= RC_SHIFT_BITS;
@@ -494,7 +494,7 @@
* of the code generated by GCC 3.x decreases 10-15 %. (GCC 4.3 doesn't care,
* and it generates 10-20 % faster code than GCC 3.x from this file anyway.)
*/
-static __always_inline int rc_bit(struct rc_dec *rc, uint16_t *prob)
+static inline __always_inline int rc_bit(struct rc_dec *rc, uint16_t *prob)
{
uint32_t bound;
int bit;
@@ -516,7 +516,7 @@
}
/* Decode a bittree starting from the most significant bit. */
-static __always_inline uint32_t rc_bittree(struct rc_dec *rc,
+static inline __always_inline uint32_t rc_bittree(struct rc_dec *rc,
uint16_t *probs, uint32_t limit)
{
uint32_t symbol = 1;
@@ -532,7 +532,7 @@
}
/* Decode a bittree starting from the least significant bit. */
-static __always_inline void rc_bittree_reverse(struct rc_dec *rc,
+static inline __always_inline void rc_bittree_reverse(struct rc_dec *rc,
uint16_t *probs,
uint32_t *dest, uint32_t limit)
{
diff -ru --exclude-from freebsd-src-diff-exclude-names /var/archive3/public/freebsd-releng-10.4-src/sys/kern/sched_ule.c freebsd-10.4/sys/kern/sched_ule.c
--- /var/archive3/public/freebsd-releng-10.4-src/sys/kern/sched_ule.c 2017-09-29 08:19:46.000000000 +0800
+++ freebsd-10.4/sys/kern/sched_ule.c 2019-08-17 23:17:02.868410531 +0800
@@ -622,7 +622,7 @@
for ((cpu) = 0; (cpu) <= mp_maxid; (cpu)++) \
if (CPU_ISSET(cpu, &mask))
-static __always_inline int cpu_search(const struct cpu_group *cg,
+static inline __always_inline int cpu_search(const struct cpu_group *cg,
struct cpu_search *low, struct cpu_search *high, const int match);
int __noinline cpu_search_lowest(const struct cpu_group *cg,
struct cpu_search *low);
@@ -642,7 +642,7 @@
* match argument. It is reduced to the minimum set for each case. It is
* also recursive to the depth of the tree.
*/
-static __always_inline int
+static inline __always_inline int
cpu_search(const struct cpu_group *cg, struct cpu_search *low,
struct cpu_search *high, const int match)
{