blob: 924569444c57edf6c689eaaa01b2a29202d53883 [file] [log] [blame] [raw]
Fix gcc(1) warning 'always_inline function might not be inlinable'.
diff -ru --exclude-from freebsd-src-diff-exclude-names /var/tmp/freebsd-10.3-src/usr/src/sys/contrib/xz-embedded/linux/lib/xz/xz_dec_lzma2.c freebsd-10.3/usr/src/sys/contrib/xz-embedded/linux/lib/xz/xz_dec_lzma2.c
--- /var/tmp/freebsd-10.3-src/usr/src/sys/contrib/xz-embedded/linux/lib/xz/xz_dec_lzma2.c 2016-03-25 09:09:50.000000000 +0800
+++ freebsd-10.3/usr/src/sys/contrib/xz-embedded/linux/lib/xz/xz_dec_lzma2.c 2019-03-23 15:15:26.433584781 +0800
@@ -475,7 +475,7 @@
}
/* Read the next input byte if needed. */
-static __always_inline void rc_normalize(struct rc_dec *rc)
+static inline __always_inline void rc_normalize(struct rc_dec *rc)
{
if (rc->range < RC_TOP_VALUE) {
rc->range <<= RC_SHIFT_BITS;
@@ -494,7 +494,7 @@
* of the code generated by GCC 3.x decreases 10-15 %. (GCC 4.3 doesn't care,
* and it generates 10-20 % faster code than GCC 3.x from this file anyway.)
*/
-static __always_inline int rc_bit(struct rc_dec *rc, uint16_t *prob)
+static inline __always_inline int rc_bit(struct rc_dec *rc, uint16_t *prob)
{
uint32_t bound;
int bit;
@@ -516,7 +516,7 @@
}
/* Decode a bittree starting from the most significant bit. */
-static __always_inline uint32_t rc_bittree(struct rc_dec *rc,
+static inline __always_inline uint32_t rc_bittree(struct rc_dec *rc,
uint16_t *probs, uint32_t limit)
{
uint32_t symbol = 1;
@@ -532,7 +532,7 @@
}
/* Decode a bittree starting from the least significant bit. */
-static __always_inline void rc_bittree_reverse(struct rc_dec *rc,
+static inline __always_inline void rc_bittree_reverse(struct rc_dec *rc,
uint16_t *probs,
uint32_t *dest, uint32_t limit)
{
diff -ru --exclude-from freebsd-src-diff-exclude-names /var/tmp/freebsd-10.3-src/usr/src/sys/kern/sched_ule.c freebsd-10.3/usr/src/sys/kern/sched_ule.c
--- /var/tmp/freebsd-10.3-src/usr/src/sys/kern/sched_ule.c 2016-03-25 09:09:26.000000000 +0800
+++ freebsd-10.3/usr/src/sys/kern/sched_ule.c 2019-03-11 20:06:15.381789355 +0800
@@ -622,7 +622,7 @@
for ((cpu) = 0; (cpu) <= mp_maxid; (cpu)++) \
if (CPU_ISSET(cpu, &mask))
-static __always_inline int cpu_search(const struct cpu_group *cg,
+static inline __always_inline int cpu_search(const struct cpu_group *cg,
struct cpu_search *low, struct cpu_search *high, const int match);
int __noinline cpu_search_lowest(const struct cpu_group *cg,
struct cpu_search *low);
@@ -642,7 +642,7 @@
* match argument. It is reduced to the minimum set for each case. It is
* also recursive to the depth of the tree.
*/
-static __always_inline int
+static inline __always_inline int
cpu_search(const struct cpu_group *cg, struct cpu_search *low,
struct cpu_search *high, const int match)
{