Displaying 6 results from an estimated 6 matches for "_mm_srai_epi16".
2020 May 18
6
[PATCH] SSE2/SSSE3 optimized version of get_checksum1() for x86-64
...28i sse_interleave_even_epi16(__m128i a, __m128i b) {
+ return sse_interleave_odd_epi16(
+ _mm_slli_si128(a, 2),
+ _mm_slli_si128(b, 2)
+ );
+}
+
+static inline __m128i sse_mulu_odd_epi8(__m128i a, __m128i b) {
+ return _mm_mullo_epi16(
+ _mm_srli_epi16(a, 8),
+ _mm_srai_epi16(b, 8)
+ );
+}
+
+static inline __m128i sse_mulu_even_epi8(__m128i a, __m128i b) {
+ return _mm_mullo_epi16(
+ _mm_and_si128(a, _mm_set1_epi16(0xFF)),
+ _mm_srai_epi16(_mm_slli_si128(b, 1), 8)
+ );
+}
+#endif
+
+static inline __m128i sse_hadds_epi16(__m128i a, __m128i b) {
+#i...
2020 May 18
0
[PATCH] SSE2/SSSE3 optimized version of get_checksum1() for x86-64
...) {
> + return sse_interleave_odd_epi16(
> + _mm_slli_si128(a, 2),
> + _mm_slli_si128(b, 2)
> + );
> +}
> +
> +static inline __m128i sse_mulu_odd_epi8(__m128i a, __m128i b) {
> + return _mm_mullo_epi16(
> + _mm_srli_epi16(a, 8),
> + _mm_srai_epi16(b, 8)
> + );
> +}
> +
> +static inline __m128i sse_mulu_even_epi8(__m128i a, __m128i b) {
> + return _mm_mullo_epi16(
> + _mm_and_si128(a, _mm_set1_epi16(0xFF)),
> + _mm_srai_epi16(_mm_slli_si128(b, 1), 8)
> + );
> +}
> +#endif
> +
> +stati...
2020 May 19
5
[PATCHv2] SSE2/SSSE3 optimized version of get_checksum1() for x86-64
...128i b) {
+ return sse_interleave_odd_epi16(
+ _mm_slli_si128(a, 2),
+ _mm_slli_si128(b, 2)
+ );
+}
+
+__attribute__ ((target ("sse2"))) static inline __m128i
sse_mulu_odd_epi8(__m128i a, __m128i b) {
+ return _mm_mullo_epi16(
+ _mm_srli_epi16(a, 8),
+ _mm_srai_epi16(b, 8)
+ );
+}
+
+__attribute__ ((target ("sse2"))) static inline __m128i
sse_mulu_even_epi8(__m128i a, __m128i b) {
+ return _mm_mullo_epi16(
+ _mm_and_si128(a, _mm_set1_epi16(0xFF)),
+ _mm_srai_epi16(_mm_slli_si128(b, 1), 8)
+ );
+}
+
+__attribute__ ((target ("...
2020 May 18
2
[PATCH] SSE2/SSSE3 optimized version of get_checksum1() for x86-64
...odd_epi16(
>> + _mm_slli_si128(a, 2),
>> + _mm_slli_si128(b, 2)
>> + );
>> +}
>> +
>> +static inline __m128i sse_mulu_odd_epi8(__m128i a, __m128i b) {
>> + return _mm_mullo_epi16(
>> + _mm_srli_epi16(a, 8),
>> + _mm_srai_epi16(b, 8)
>> + );
>> +}
>> +
>> +static inline __m128i sse_mulu_even_epi8(__m128i a, __m128i b) {
>> + return _mm_mullo_epi16(
>> + _mm_and_si128(a, _mm_set1_epi16(0xFF)),
>> + _mm_srai_epi16(_mm_slli_si128(b, 1), 8)
>> + );
>>...
2020 May 20
0
[PATCHv2] SSE2/SSSE3 optimized version of get_checksum1() for x86-64
...t; + _mm_slli_si128(a, 2),
> + _mm_slli_si128(b, 2)
> + );
> +}
> +
> +__attribute__ ((target ("sse2"))) static inline __m128i
> sse_mulu_odd_epi8(__m128i a, __m128i b) {
> + return _mm_mullo_epi16(
> + _mm_srli_epi16(a, 8),
> + _mm_srai_epi16(b, 8)
> + );
> +}
> +
> +__attribute__ ((target ("sse2"))) static inline __m128i
> sse_mulu_even_epi8(__m128i a, __m128i b) {
> + return _mm_mullo_epi16(
> + _mm_and_si128(a, _mm_set1_epi16(0xFF)),
> + _mm_srai_epi16(_mm_slli_si128(b, 1), 8)
>...
2020 May 18
3
[PATCH] SSE2/SSSE3 optimized version of get_checksum1() for x86-64
What do you base this on?
Per https://gcc.gnu.org/onlinedocs/gcc/x86-Options.html :
"For the x86-32 compiler, you must use -march=cpu-type, -msse or
-msse2 switches to enable SSE extensions and make this option
effective. For the x86-64 compiler, these extensions are enabled by
default."
That reads to me like we're fine for SSE2. As stated in my comments,
SSSE3 support must be