FFmpeg
 All Data Structures Namespaces Files Functions Variables Typedefs Enumerations Enumerator Macros Groups Pages
videodsp_init.c
Go to the documentation of this file.
1 /*
2  * Copyright (C) 2002-2012 Michael Niedermayer
3  * Copyright (C) 2012 Ronald S. Bultje
4  *
5  * This file is part of FFmpeg.
6  *
7  * FFmpeg is free software; you can redistribute it and/or
8  * modify it under the terms of the GNU Lesser General Public
9  * License as published by the Free Software Foundation; either
10  * version 2.1 of the License, or (at your option) any later version.
11  *
12  * FFmpeg is distributed in the hope that it will be useful,
13  * but WITHOUT ANY WARRANTY; without even the implied warranty of
14  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15  * Lesser General Public License for more details.
16  *
17  * You should have received a copy of the GNU Lesser General Public
18  * License along with FFmpeg; if not, write to the Free Software
19  * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
20  */
21 
22 #include "config.h"
23 #include "libavutil/attributes.h"
24 #include "libavutil/avassert.h"
25 #include "libavutil/common.h"
26 #include "libavutil/cpu.h"
27 #include "libavutil/mem.h"
28 #include "libavutil/x86/asm.h"
29 #include "libavutil/x86/cpu.h"
30 #include "libavcodec/videodsp.h"
31 
32 #if HAVE_YASM
33 typedef void emu_edge_vfix_func(uint8_t *dst, x86_reg dst_stride,
34  const uint8_t *src, x86_reg src_stride,
35  x86_reg start_y, x86_reg end_y, x86_reg bh);
36 typedef void emu_edge_vvar_func(uint8_t *dst, x86_reg dst_stride,
37  const uint8_t *src, x86_reg src_stride,
38  x86_reg start_y, x86_reg end_y, x86_reg bh,
39  x86_reg w);
40 
41 extern emu_edge_vfix_func ff_emu_edge_vfix1_mmx;
42 extern emu_edge_vfix_func ff_emu_edge_vfix2_mmx;
43 extern emu_edge_vfix_func ff_emu_edge_vfix3_mmx;
44 extern emu_edge_vfix_func ff_emu_edge_vfix4_mmx;
45 extern emu_edge_vfix_func ff_emu_edge_vfix5_mmx;
46 extern emu_edge_vfix_func ff_emu_edge_vfix6_mmx;
47 extern emu_edge_vfix_func ff_emu_edge_vfix7_mmx;
48 extern emu_edge_vfix_func ff_emu_edge_vfix8_mmx;
49 extern emu_edge_vfix_func ff_emu_edge_vfix9_mmx;
50 extern emu_edge_vfix_func ff_emu_edge_vfix10_mmx;
51 extern emu_edge_vfix_func ff_emu_edge_vfix11_mmx;
52 extern emu_edge_vfix_func ff_emu_edge_vfix12_mmx;
53 extern emu_edge_vfix_func ff_emu_edge_vfix13_mmx;
54 extern emu_edge_vfix_func ff_emu_edge_vfix14_mmx;
55 extern emu_edge_vfix_func ff_emu_edge_vfix15_mmx;
56 extern emu_edge_vfix_func ff_emu_edge_vfix16_mmx;
57 extern emu_edge_vfix_func ff_emu_edge_vfix17_mmx;
58 extern emu_edge_vfix_func ff_emu_edge_vfix18_mmx;
59 extern emu_edge_vfix_func ff_emu_edge_vfix19_mmx;
60 extern emu_edge_vfix_func ff_emu_edge_vfix20_mmx;
61 extern emu_edge_vfix_func ff_emu_edge_vfix21_mmx;
62 extern emu_edge_vfix_func ff_emu_edge_vfix22_mmx;
63 #if ARCH_X86_32
64 static emu_edge_vfix_func *vfixtbl_mmx[22] = {
65  &ff_emu_edge_vfix1_mmx, &ff_emu_edge_vfix2_mmx, &ff_emu_edge_vfix3_mmx,
66  &ff_emu_edge_vfix4_mmx, &ff_emu_edge_vfix5_mmx, &ff_emu_edge_vfix6_mmx,
67  &ff_emu_edge_vfix7_mmx, &ff_emu_edge_vfix8_mmx, &ff_emu_edge_vfix9_mmx,
68  &ff_emu_edge_vfix10_mmx, &ff_emu_edge_vfix11_mmx, &ff_emu_edge_vfix12_mmx,
69  &ff_emu_edge_vfix13_mmx, &ff_emu_edge_vfix14_mmx, &ff_emu_edge_vfix15_mmx,
70  &ff_emu_edge_vfix16_mmx, &ff_emu_edge_vfix17_mmx, &ff_emu_edge_vfix18_mmx,
71  &ff_emu_edge_vfix19_mmx, &ff_emu_edge_vfix20_mmx, &ff_emu_edge_vfix21_mmx,
72  &ff_emu_edge_vfix22_mmx
73 };
74 #endif
75 extern emu_edge_vvar_func ff_emu_edge_vvar_mmx;
76 extern emu_edge_vfix_func ff_emu_edge_vfix16_sse;
77 extern emu_edge_vfix_func ff_emu_edge_vfix17_sse;
78 extern emu_edge_vfix_func ff_emu_edge_vfix18_sse;
79 extern emu_edge_vfix_func ff_emu_edge_vfix19_sse;
80 extern emu_edge_vfix_func ff_emu_edge_vfix20_sse;
81 extern emu_edge_vfix_func ff_emu_edge_vfix21_sse;
82 extern emu_edge_vfix_func ff_emu_edge_vfix22_sse;
83 static emu_edge_vfix_func *vfixtbl_sse[22] = {
84  ff_emu_edge_vfix1_mmx, ff_emu_edge_vfix2_mmx, ff_emu_edge_vfix3_mmx,
85  ff_emu_edge_vfix4_mmx, ff_emu_edge_vfix5_mmx, ff_emu_edge_vfix6_mmx,
86  ff_emu_edge_vfix7_mmx, ff_emu_edge_vfix8_mmx, ff_emu_edge_vfix9_mmx,
87  ff_emu_edge_vfix10_mmx, ff_emu_edge_vfix11_mmx, ff_emu_edge_vfix12_mmx,
88  ff_emu_edge_vfix13_mmx, ff_emu_edge_vfix14_mmx, ff_emu_edge_vfix15_mmx,
89  ff_emu_edge_vfix16_sse, ff_emu_edge_vfix17_sse, ff_emu_edge_vfix18_sse,
90  ff_emu_edge_vfix19_sse, ff_emu_edge_vfix20_sse, ff_emu_edge_vfix21_sse,
91  ff_emu_edge_vfix22_sse
92 };
93 extern emu_edge_vvar_func ff_emu_edge_vvar_sse;
94 
95 typedef void emu_edge_hfix_func(uint8_t *dst, x86_reg dst_stride,
96  x86_reg start_x, x86_reg bh);
97 typedef void emu_edge_hvar_func(uint8_t *dst, x86_reg dst_stride,
98  x86_reg start_x, x86_reg n_words, x86_reg bh);
99 
100 extern emu_edge_hfix_func ff_emu_edge_hfix2_mmx;
101 extern emu_edge_hfix_func ff_emu_edge_hfix4_mmx;
102 extern emu_edge_hfix_func ff_emu_edge_hfix6_mmx;
103 extern emu_edge_hfix_func ff_emu_edge_hfix8_mmx;
104 extern emu_edge_hfix_func ff_emu_edge_hfix10_mmx;
105 extern emu_edge_hfix_func ff_emu_edge_hfix12_mmx;
106 extern emu_edge_hfix_func ff_emu_edge_hfix14_mmx;
107 extern emu_edge_hfix_func ff_emu_edge_hfix16_mmx;
108 extern emu_edge_hfix_func ff_emu_edge_hfix18_mmx;
109 extern emu_edge_hfix_func ff_emu_edge_hfix20_mmx;
110 extern emu_edge_hfix_func ff_emu_edge_hfix22_mmx;
111 #if ARCH_X86_32
112 static emu_edge_hfix_func *hfixtbl_mmx[11] = {
113  ff_emu_edge_hfix2_mmx, ff_emu_edge_hfix4_mmx, ff_emu_edge_hfix6_mmx,
114  ff_emu_edge_hfix8_mmx, ff_emu_edge_hfix10_mmx, ff_emu_edge_hfix12_mmx,
115  ff_emu_edge_hfix14_mmx, ff_emu_edge_hfix16_mmx, ff_emu_edge_hfix18_mmx,
116  ff_emu_edge_hfix20_mmx, ff_emu_edge_hfix22_mmx
117 };
118 #endif
119 extern emu_edge_hvar_func ff_emu_edge_hvar_mmx;
120 extern emu_edge_hfix_func ff_emu_edge_hfix16_sse2;
121 extern emu_edge_hfix_func ff_emu_edge_hfix18_sse2;
122 extern emu_edge_hfix_func ff_emu_edge_hfix20_sse2;
123 extern emu_edge_hfix_func ff_emu_edge_hfix22_sse2;
124 static emu_edge_hfix_func *hfixtbl_sse2[11] = {
125  ff_emu_edge_hfix2_mmx, ff_emu_edge_hfix4_mmx, ff_emu_edge_hfix6_mmx,
126  ff_emu_edge_hfix8_mmx, ff_emu_edge_hfix10_mmx, ff_emu_edge_hfix12_mmx,
127  ff_emu_edge_hfix14_mmx, ff_emu_edge_hfix16_sse2, ff_emu_edge_hfix18_sse2,
128  ff_emu_edge_hfix20_sse2, ff_emu_edge_hfix22_sse2
129 };
130 extern emu_edge_hvar_func ff_emu_edge_hvar_sse2;
131 
132 static av_always_inline void emulated_edge_mc(uint8_t *dst, ptrdiff_t dst_stride,
133  const uint8_t *src, ptrdiff_t src_stride,
134  x86_reg block_w, x86_reg block_h,
135  x86_reg src_x, x86_reg src_y,
136  x86_reg w, x86_reg h,
137  emu_edge_vfix_func **vfix_tbl,
138  emu_edge_vvar_func *v_extend_var,
139  emu_edge_hfix_func **hfix_tbl,
140  emu_edge_hvar_func *h_extend_var)
141 {
142  x86_reg start_y, start_x, end_y, end_x, src_y_add = 0, p;
143 
144  if(!w || !h)
145  return;
146 
147  if (src_y >= h) {
148  src -= src_y*src_stride;
149  src_y_add = h - 1;
150  src_y = h - 1;
151  } else if (src_y <= -block_h) {
152  src -= src_y*src_stride;
153  src_y_add = 1 - block_h;
154  src_y = 1 - block_h;
155  }
156  if (src_x >= w) {
157  src += w - 1 - src_x;
158  src_x = w - 1;
159  } else if (src_x <= -block_w) {
160  src += 1 - block_w - src_x;
161  src_x = 1 - block_w;
162  }
163 
164  start_y = FFMAX(0, -src_y);
165  start_x = FFMAX(0, -src_x);
166  end_y = FFMIN(block_h, h-src_y);
167  end_x = FFMIN(block_w, w-src_x);
168  av_assert2(start_x < end_x && block_w > 0);
169  av_assert2(start_y < end_y && block_h > 0);
170 
171  // fill in the to-be-copied part plus all above/below
172  src += (src_y_add + start_y) * src_stride + start_x;
173  w = end_x - start_x;
174  if (w <= 22) {
175  vfix_tbl[w - 1](dst + start_x, dst_stride, src, src_stride,
176  start_y, end_y, block_h);
177  } else {
178  v_extend_var(dst + start_x, dst_stride, src, src_stride,
179  start_y, end_y, block_h, w);
180  }
181 
182  // fill left
183  if (start_x) {
184  if (start_x <= 22) {
185  hfix_tbl[(start_x - 1) >> 1](dst, dst_stride, start_x, block_h);
186  } else {
187  h_extend_var(dst, dst_stride,
188  start_x, (start_x + 1) >> 1, block_h);
189  }
190  }
191 
192  // fill right
193  p = block_w - end_x;
194  if (p) {
195  if (p <= 22) {
196  hfix_tbl[(p - 1) >> 1](dst + end_x - (p & 1), dst_stride,
197  -!(p & 1), block_h);
198  } else {
199  h_extend_var(dst + end_x - (p & 1), dst_stride,
200  -!(p & 1), (p + 1) >> 1, block_h);
201  }
202  }
203 }
204 
205 #if ARCH_X86_32
206 static av_noinline void emulated_edge_mc_mmx(uint8_t *buf, ptrdiff_t buf_stride,
207  const uint8_t *src, ptrdiff_t src_stride,
208  int block_w, int block_h,
209  int src_x, int src_y, int w, int h)
210 {
211  emulated_edge_mc(buf, buf_stride, src, src_stride, block_w, block_h,
212  src_x, src_y, w, h, vfixtbl_mmx, &ff_emu_edge_vvar_mmx,
213  hfixtbl_mmx, &ff_emu_edge_hvar_mmx);
214 }
215 
216 static av_noinline void emulated_edge_mc_sse(uint8_t *buf, ptrdiff_t buf_stride,
217  const uint8_t *src, ptrdiff_t src_stride,
218  int block_w, int block_h,
219  int src_x, int src_y, int w, int h)
220 {
221  emulated_edge_mc(buf, buf_stride, src, src_stride, block_w, block_h,
222  src_x, src_y, w, h, vfixtbl_sse, &ff_emu_edge_vvar_sse,
223  hfixtbl_mmx, &ff_emu_edge_hvar_mmx);
224 }
225 #endif
226 
227 static av_noinline void emulated_edge_mc_sse2(uint8_t *buf, ptrdiff_t buf_stride,
228  const uint8_t *src, ptrdiff_t src_stride,
229  int block_w, int block_h,
230  int src_x, int src_y, int w, int h)
231 {
232  emulated_edge_mc(buf, buf_stride, src, src_stride, block_w, block_h, src_x,
233  src_y, w, h, vfixtbl_sse, &ff_emu_edge_vvar_sse,
234  hfixtbl_sse2, &ff_emu_edge_hvar_sse2);
235 }
236 #endif /* HAVE_YASM */
237 
238 void ff_prefetch_mmxext(uint8_t *buf, ptrdiff_t stride, int h);
239 void ff_prefetch_3dnow(uint8_t *buf, ptrdiff_t stride, int h);
240 
242 {
243 #if HAVE_YASM
244  int cpu_flags = av_get_cpu_flags();
245 
246 #if ARCH_X86_32
247  if (EXTERNAL_MMX(cpu_flags) && bpc <= 8) {
248  ctx->emulated_edge_mc = emulated_edge_mc_mmx;
249  }
250  if (EXTERNAL_AMD3DNOW(cpu_flags)) {
252  }
253 #endif /* ARCH_X86_32 */
254  if (EXTERNAL_MMXEXT(cpu_flags)) {
256  }
257 #if ARCH_X86_32
258  if (EXTERNAL_SSE(cpu_flags) && bpc <= 8) {
259  ctx->emulated_edge_mc = emulated_edge_mc_sse;
260  }
261 #endif /* ARCH_X86_32 */
262  if (EXTERNAL_SSE2(cpu_flags) && bpc <= 8) {
263  ctx->emulated_edge_mc = emulated_edge_mc_sse2;
264  }
265 #endif /* HAVE_YASM */
266 }