Gentoo Websites Logo
Go to: Gentoo Home Documentation Forums Lists Bugs Planet Store Wiki Get Gentoo!
View | Details | Raw Unified | Return to bug 259009 | Differences between
and this patch

Collapse All | Expand All

(-)mythtv.ori/configure (-1 / +15 lines)
Lines 170-175 Link Here
170
  echo "  --disable-xvmcw          disable XvMC Wrapper, use --xvmc-lib if autodetection fails"
170
  echo "  --disable-xvmcw          disable XvMC Wrapper, use --xvmc-lib if autodetection fails"
171
  echo "  --enable-xvmc-pro        enable XvMC for the unichrome pro (NOT unichrome) chipset"
171
  echo "  --enable-xvmc-pro        enable XvMC for the unichrome pro (NOT unichrome) chipset"
172
  echo "  --enable-xvmc-opengl     enable nVidia XvMC OpenGL texture method"
172
  echo "  --enable-xvmc-opengl     enable nVidia XvMC OpenGL texture method"
173
  echo "  --enable-vdpau           enable NVidia VDPAU hardware acceleration."
173
  echo "  --xvmc-lib=LIB           XvMC library override (for crosscompiling)"
174
  echo "  --xvmc-lib=LIB           XvMC library override (for crosscompiling)"
174
  #echo "  --disable-xvmc-vld       disable XvMC-VLD (VIA Epia MPEG accel.)"
175
  #echo "  --disable-xvmc-vld       disable XvMC-VLD (VIA Epia MPEG accel.)"
175
  echo "  --enable-opengl-video    enable OpenGL based video display "
176
  echo "  --enable-opengl-video    enable OpenGL based video display "
Lines 878-883 Link Here
878
    termios_h
879
    termios_h
879
    threads
880
    threads
880
    winsock2_h
881
    winsock2_h
882
    vdpau    
881
"
883
"
882
884
883
MYTHTV_CONFIG_LIST='
885
MYTHTV_CONFIG_LIST='
Lines 929-934 Link Here
929
    darwin_da
931
    darwin_da
930
    dvdv
932
    dvdv
931
    opengl
933
    opengl
934
    vdpau
932
'
935
'
933
936
934
CMDLINE_SELECT="
937
CMDLINE_SELECT="
Lines 942-947 Link Here
942
    optimizations
945
    optimizations
943
    shared
946
    shared
944
    static
947
    static
948
    vdpau
945
"
949
"
946
950
947
# code dependency declarations
951
# code dependency declarations
Lines 980-985 Link Here
980
wmv1_decoder_deps="h263_decoder"
984
wmv1_decoder_deps="h263_decoder"
981
wmv2_decoder_deps="h263_decoder"
985
wmv2_decoder_deps="h263_decoder"
982
wmv3_decoder_deps="h263_decoder"
986
wmv3_decoder_deps="h263_decoder"
987
vc1_vdpau_decoder_deps="vdpau"
988
wmv3_vdpau_decoder_deps="vdpau"
983
zmbv_decoder_deps="zlib"
989
zmbv_decoder_deps="zlib"
984
zmbv_encoder_deps="zlib"
990
zmbv_encoder_deps="zlib"
985
991
Lines 1054-1059 Link Here
1054
opengl_video_deps="opengl xv"
1060
opengl_video_deps="opengl xv"
1055
opengl_vsync_deps="opengl"
1061
opengl_vsync_deps="opengl"
1056
v4l_deps="backend"
1062
v4l_deps="backend"
1063
vdpau_deps="x11 vdpau_vdpau_h"
1057
xrandr_deps="x11"
1064
xrandr_deps="x11"
1058
xv_deps="x11"
1065
xv_deps="x11"
1059
xvmc_deps="xv X11_extensions_XvMClib_h"
1066
xvmc_deps="xv X11_extensions_XvMClib_h"
Lines 1188-1193 Link Here
1188
x11="yes"
1195
x11="yes"
1189
x11_include_path="/usr/X11R6/include"
1196
x11_include_path="/usr/X11R6/include"
1190
xrandr="yes"
1197
xrandr="yes"
1198
vdpau_deps="x11 vdpau_vdpau_h"
1191
xv="yes"
1199
xv="yes"
1192
xvmc="yes"
1200
xvmc="yes"
1193
xvmc_lib=""
1201
xvmc_lib=""
Lines 1210-1215 Link Here
1210
vhook="default"
1218
vhook="default"
1211
1219
1212
# build settings
1220
# build settings
1221
add_cflags -D_ISOC99_SOURCE -D_POSIX_C_SOURCE=200112
1213
SHFLAGS='-shared -Wl,-soname,$@'
1222
SHFLAGS='-shared -Wl,-soname,$@'
1214
VHOOKSHFLAGS='$(SHFLAGS)'
1223
VHOOKSHFLAGS='$(SHFLAGS)'
1215
LDLATEFLAGS='-Wl,-rpath-link,\$(BUILD_ROOT)/libavcodec -Wl,-rpath-link,\$(BUILD_ROOT)/libavformat -Wl,-rpath-link,\$(BUILD_ROOT)/libavutil'
1224
LDLATEFLAGS='-Wl,-rpath-link,\$(BUILD_ROOT)/libavcodec -Wl,-rpath-link,\$(BUILD_ROOT)/libavformat -Wl,-rpath-link,\$(BUILD_ROOT)/libavutil'
Lines 2757-2767 Link Here
2757
    disable xvmc_vld xvmc_pro
2766
    disable xvmc_vld xvmc_pro
2758
elif enabled xvmc_opengl ; then
2767
elif enabled xvmc_opengl ; then
2759
    disable xvmc_opengl
2768
    disable xvmc_opengl
2760
    echo "Disabling XvMC-opengl. It is only available when linking against libXvMCNIVIDIA"
2769
    echo "Disabling XvMC-opengl. It is only available when linking against libXvMCNVIDIA"
2761
fi
2770
fi
2762
2771
2763
enabled xvmc_pro && enable xvmc_vld
2772
enabled xvmc_pro && enable xvmc_vld
2764
2773
2774
check_header vdpau/vdpau.h
2775
enabled vdpau && has_library libvdpau || disable vdpau
2765
2776
2766
# Can only do Mac accel on Mac platform
2777
# Can only do Mac accel on Mac platform
2767
enabled dvdv && test $targetos = darwin || disable dvdv
2778
enabled dvdv && test $targetos = darwin || disable dvdv
Lines 3015-3020 Link Here
3015
  echo
3026
  echo
3016
  echo "# Video Output Support"
3027
  echo "# Video Output Support"
3017
  echo "x11 support               ${x11-no}"
3028
  echo "x11 support               ${x11-no}"
3029
if enabled x11 ; then
3018
  echo "xrandr support            ${xrandr-no}"
3030
  echo "xrandr support            ${xrandr-no}"
3019
  echo "xv support                ${xv-no}"
3031
  echo "xv support                ${xv-no}"
3020
  echo "XvMC support              ${xvmc-no}"
3032
  echo "XvMC support              ${xvmc-no}"
Lines 3024-3029 Link Here
3024
if test "$VENDOR_XVMC_LIBS" != "" ; then
3036
if test "$VENDOR_XVMC_LIBS" != "" ; then
3025
  echo "XvMC libs                 $VENDOR_XVMC_LIBS"
3037
  echo "XvMC libs                 $VENDOR_XVMC_LIBS"
3026
fi
3038
fi
3039
  echo "VDPAU support             ${vdpau-no}"
3040
fi
3027
  echo "OpenGL video              ${opengl_video-no}"
3041
  echo "OpenGL video              ${opengl_video-no}"
3028
  if test x"$targetos" = x"darwin" ; then
3042
  if test x"$targetos" = x"darwin" ; then
3029
    echo "Mac acceleration          ${dvdv-no}"
3043
    echo "Mac acceleration          ${dvdv-no}"
(-)mythtv.ori/libs/libavcodec/allcodecs.c (+12 lines)
Lines 91-96 Link Here
91
    REGISTER_DECODER (H263I, h263i);
91
    REGISTER_DECODER (H263I, h263i);
92
    REGISTER_ENCODER (H263P, h263p);
92
    REGISTER_ENCODER (H263P, h263p);
93
    REGISTER_DECODER (H264, h264);
93
    REGISTER_DECODER (H264, h264);
94
#ifdef HAVE_VDPAU
95
    REGISTER_DECODER (H264_VDPAU, h264_vdpau);
96
#endif
94
    REGISTER_ENCDEC  (HUFFYUV, huffyuv);
97
    REGISTER_ENCDEC  (HUFFYUV, huffyuv);
95
    REGISTER_DECODER (IDCIN, idcin);
98
    REGISTER_DECODER (IDCIN, idcin);
96
    REGISTER_DECODER (INDEO2, indeo2);
99
    REGISTER_DECODER (INDEO2, indeo2);
Lines 105-110 Link Here
105
    REGISTER_DECODER (MJPEGB, mjpegb);
108
    REGISTER_DECODER (MJPEGB, mjpegb);
106
    REGISTER_DECODER (MMVIDEO, mmvideo);
109
    REGISTER_DECODER (MMVIDEO, mmvideo);
107
    REGISTER_DECODER (MPEG_DVDV, mpeg_dvdv);
110
    REGISTER_DECODER (MPEG_DVDV, mpeg_dvdv);
111
#ifdef HAVE_VDPAU
112
    REGISTER_DECODER (MPEG_VDPAU, mpeg_vdpau);
113
#endif
108
    REGISTER_DECODER (MPEG_XVMC, mpeg_xvmc);
114
    REGISTER_DECODER (MPEG_XVMC, mpeg_xvmc);
109
    REGISTER_DECODER (MPEG_XVMC_VLD, mpeg_xvmc_vld);
115
    REGISTER_DECODER (MPEG_XVMC_VLD, mpeg_xvmc_vld);
110
    REGISTER_ENCDEC  (MPEG1VIDEO, mpeg1video);
116
    REGISTER_ENCDEC  (MPEG1VIDEO, mpeg1video);
Lines 152-157 Link Here
152
    REGISTER_DECODER (ULTI, ulti);
158
    REGISTER_DECODER (ULTI, ulti);
153
    REGISTER_DECODER (VB, vb);
159
    REGISTER_DECODER (VB, vb);
154
    REGISTER_DECODER (VC1, vc1);
160
    REGISTER_DECODER (VC1, vc1);
161
#ifdef HAVE_VDPAU
162
    REGISTER_DECODER (VC1_VDPAU, vc1_vdpau);
163
#endif
155
    REGISTER_DECODER (VCR1, vcr1);
164
    REGISTER_DECODER (VCR1, vcr1);
156
    REGISTER_DECODER (VMDVIDEO, vmdvideo);
165
    REGISTER_DECODER (VMDVIDEO, vmdvideo);
157
    REGISTER_DECODER (VMNC, vmnc);
166
    REGISTER_DECODER (VMNC, vmnc);
Lines 164-169 Link Here
164
    REGISTER_ENCDEC  (WMV1, wmv1);
173
    REGISTER_ENCDEC  (WMV1, wmv1);
165
    REGISTER_ENCDEC  (WMV2, wmv2);
174
    REGISTER_ENCDEC  (WMV2, wmv2);
166
    REGISTER_DECODER (WMV3, wmv3);
175
    REGISTER_DECODER (WMV3, wmv3);
176
#ifdef HAVE_VDPAU
177
    REGISTER_DECODER (WMV3_VDPAU, wmv3_vdpau);
178
#endif
167
    REGISTER_DECODER (WNV1, wnv1);
179
    REGISTER_DECODER (WNV1, wnv1);
168
    REGISTER_DECODER (XAN_WC3, xan_wc3);
180
    REGISTER_DECODER (XAN_WC3, xan_wc3);
169
    REGISTER_DECODER (XL, xl);
181
    REGISTER_DECODER (XL, xl);
(-)mythtv.ori/libs/libavcodec/avcodec.h (-7 / +21 lines)
Lines 171-176 Link Here
171
    CODEC_ID_VP6A,
171
    CODEC_ID_VP6A,
172
    CODEC_ID_AMV,
172
    CODEC_ID_AMV,
173
    CODEC_ID_VB,
173
    CODEC_ID_VB,
174
    CODEC_ID_MPEGVIDEO_VDPAU,
175
    CODEC_ID_H264_VDPAU,
176
    CODEC_ID_VC1_VDPAU,
177
    CODEC_ID_WMV3_VDPAU,
174
178
175
    /* various PCM "codecs" */
179
    /* various PCM "codecs" */
176
    CODEC_ID_PCM_S16LE= 0x10000,
180
    CODEC_ID_PCM_S16LE= 0x10000,
Lines 461-466 Link Here
461
 * This can be used to prevent truncation of the last audio samples.
465
 * This can be used to prevent truncation of the last audio samples.
462
 */
466
 */
463
#define CODEC_CAP_SMALL_LAST_FRAME 0x0040
467
#define CODEC_CAP_SMALL_LAST_FRAME 0x0040
468
/* Codec can export data for HW decoding (VDPAU). */
469
#define CODEC_CAP_HWACCEL_VDPAU    0x0080
464
470
465
//The following defines may change, don't expect compatibility if you use them.
471
//The following defines may change, don't expect compatibility if you use them.
466
#define MB_TYPE_INTRA4x4   0x0001
472
#define MB_TYPE_INTRA4x4   0x0001
Lines 747-758 Link Here
747
#define FF_BUFFER_TYPE_COPY     8 ///< Just a (modified) copy of some other buffer, don't deallocate anything.
753
#define FF_BUFFER_TYPE_COPY     8 ///< Just a (modified) copy of some other buffer, don't deallocate anything.
748
754
749
755
750
#define FF_I_TYPE  1 // Intra
756
#define FF_I_TYPE  1 ///< Intra
751
#define FF_P_TYPE  2 // Predicted
757
#define FF_P_TYPE  2 ///< Predicted
752
#define FF_B_TYPE  3 // Bi-dir predicted
758
#define FF_B_TYPE  3 ///< Bi-dir predicted
753
#define FF_S_TYPE  4 // S(GMC)-VOP MPEG4
759
#define FF_S_TYPE  4 ///< S(GMC)-VOP MPEG4
754
#define FF_SI_TYPE 5
760
#define FF_SI_TYPE 5 ///< Switching Intra
755
#define FF_SP_TYPE 6
761
#define FF_SP_TYPE 6 ///< Switching Predicted
762
#define FF_BI_TYPE 7
756
763
757
#define FF_BUFFER_HINTS_VALID    0x01 // Buffer hints value is meaningful (if 0 ignore).
764
#define FF_BUFFER_HINTS_VALID    0x01 // Buffer hints value is meaningful (if 0 ignore).
758
#define FF_BUFFER_HINTS_READABLE 0x02 // Codec will read from buffer.
765
#define FF_BUFFER_HINTS_READABLE 0x02 // Codec will read from buffer.
Lines 2199-2204 Link Here
2199
     * - decoding: set by decoder
2206
     * - decoding: set by decoder
2200
     */
2207
     */
2201
    void *dvdv;  /* This is actually a pointer to a DVDV_CurPtrs */
2208
    void *dvdv;  /* This is actually a pointer to a DVDV_CurPtrs */
2209
2210
    /**
2211
     * VDPAU Acceleration
2212
     * - encoding: forbidden
2213
     * - decoding: set by decoder
2214
     */
2215
    int vdpau_acceleration;
2202
} AVCodecContext;
2216
} AVCodecContext;
2203
2217
2204
/**
2218
/**
Lines 2823-2829 Link Here
2823
} AVCodecParserContext;
2837
} AVCodecParserContext;
2824
2838
2825
typedef struct AVCodecParser {
2839
typedef struct AVCodecParser {
2826
    int codec_ids[5]; /* several codec IDs are permitted */
2840
    int codec_ids[6]; /* several codec IDs are permitted */
2827
    int priv_data_size;
2841
    int priv_data_size;
2828
    int (*parser_init)(AVCodecParserContext *s);
2842
    int (*parser_init)(AVCodecParserContext *s);
2829
    int (*parser_parse)(AVCodecParserContext *s,
2843
    int (*parser_parse)(AVCodecParserContext *s,
(-)mythtv.ori/libs/libavcodec/h263dec.c (+2 lines)
Lines 92-97 Link Here
92
        break;
92
        break;
93
    case CODEC_ID_VC1:
93
    case CODEC_ID_VC1:
94
    case CODEC_ID_WMV3:
94
    case CODEC_ID_WMV3:
95
    case CODEC_ID_VC1_VDPAU:
96
    case CODEC_ID_WMV3_VDPAU:
95
        s->h263_msmpeg4 = 1;
97
        s->h263_msmpeg4 = 1;
96
        s->h263_pred = 1;
98
        s->h263_pred = 1;
97
        s->msmpeg4_version=6;
99
        s->msmpeg4_version=6;
(-)mythtv.ori/libs/libavcodec/h264.c (-13 / +118 lines)
Lines 72-77 Link Here
72
static VLC_TYPE run7_vlc_table[96][2];
72
static VLC_TYPE run7_vlc_table[96][2];
73
static const int run7_vlc_table_size = 96;
73
static const int run7_vlc_table_size = 96;
74
74
75
extern int VDPAU_h264_add_data_chunk(H264Context *h, const uint8_t *buf, int buf_size);
76
extern int VDPAU_h264_picture_complete(H264Context *h);
77
75
static void svq3_luma_dc_dequant_idct_c(DCTELEM *block, int qp);
78
static void svq3_luma_dc_dequant_idct_c(DCTELEM *block, int qp);
76
static void svq3_add_idct_c(uint8_t *dst, DCTELEM *block, int stride, int qp, int dc);
79
static void svq3_add_idct_c(uint8_t *dst, DCTELEM *block, int stride, int qp, int dc);
77
static void filter_mb( H264Context *h, int mb_x, int mb_y, uint8_t *img_y, uint8_t *img_cb, uint8_t *img_cr, unsigned int linesize, unsigned int uvlinesize);
80
static void filter_mb( H264Context *h, int mb_x, int mb_y, uint8_t *img_y, uint8_t *img_cb, uint8_t *img_cr, unsigned int linesize, unsigned int uvlinesize);
Lines 101-106 Link Here
101
    {0,2,0,2,7,10,7,10}
104
    {0,2,0,2,7,10,7,10}
102
};
105
};
103
106
107
static const enum PixelFormat pixfmt_vdpau_h264_baseline_420[] = {
108
                                           PIX_FMT_VDPAU_H264_BASELINE,
109
                                           PIX_FMT_NONE};
110
static const enum PixelFormat pixfmt_vdpau_h264_main_420[] = {
111
                                           PIX_FMT_VDPAU_H264_MAIN,
112
                                           PIX_FMT_NONE};
113
static const enum PixelFormat pixfmt_vdpau_h264_high_420[] = {
114
                                           PIX_FMT_VDPAU_H264_HIGH,
115
                                           PIX_FMT_NONE};
116
104
static void fill_caches(H264Context *h, int mb_type, int for_deblock){
117
static void fill_caches(H264Context *h, int mb_type, int for_deblock){
105
    MpegEncContext * const s = &h->s;
118
    MpegEncContext * const s = &h->s;
106
    const int mb_xy= h->mb_xy;
119
    const int mb_xy= h->mb_xy;
Lines 116-122 Link Here
116
    if(for_deblock && (h->slice_num == 1 || h->slice_table[mb_xy] == h->slice_table[top_xy]) && !FRAME_MBAFF)
129
    if(for_deblock && (h->slice_num == 1 || h->slice_table[mb_xy] == h->slice_table[top_xy]) && !FRAME_MBAFF)
117
        return;
130
        return;
118
131
119
    //wow what a mess, why didn't they simplify the interlacing&intra stuff, i can't imagine that these complex rules are worth it
132
    /* Wow, what a mess, why didn't they simplify the interlacing & intra
133
     * stuff, I can't imagine that these complex rules are worth it. */
120
134
121
    topleft_xy = top_xy - 1;
135
    topleft_xy = top_xy - 1;
122
    topright_xy= top_xy + 1;
136
    topright_xy= top_xy + 1;
Lines 2226-2235 Link Here
2226
    s->quarter_sample = 1;
2240
    s->quarter_sample = 1;
2227
    s->low_delay= 1;
2241
    s->low_delay= 1;
2228
2242
2229
    if(avctx->codec_id == CODEC_ID_SVQ3)
2243
    // Set in decode_postinit() once initial parsing is complete
2230
        avctx->pix_fmt= PIX_FMT_YUVJ420P;
2244
    avctx->pix_fmt = PIX_FMT_NONE;
2231
    else
2232
        avctx->pix_fmt= PIX_FMT_YUV420P;
2233
2245
2234
    decode_init_vlc();
2246
    decode_init_vlc();
2235
2247
Lines 2247-2252 Link Here
2247
    return 0;
2259
    return 0;
2248
}
2260
}
2249
2261
2262
static int decode_postinit(H264Context *h, SPS *sps){
2263
    AVCodecContext * const avctx= h->s.avctx;
2264
2265
    if (avctx->pix_fmt != PIX_FMT_NONE){
2266
        return 0;
2267
    }
2268
2269
    if (avctx->vdpau_acceleration) {
2270
        if(h->s.chroma_format >= 2) {
2271
            return -2;
2272
        }
2273
        if (sps->profile_idc == 66) {
2274
            avctx->pix_fmt = avctx->get_format(avctx, pixfmt_vdpau_h264_baseline_420);
2275
        } else if (sps->profile_idc == 77) {
2276
            avctx->pix_fmt = avctx->get_format(avctx, pixfmt_vdpau_h264_main_420);
2277
        } else if (sps->profile_idc == 100) {
2278
            avctx->pix_fmt = avctx->get_format(avctx, pixfmt_vdpau_h264_high_420);
2279
        } else {
2280
            return -2;
2281
        }
2282
    } else if (avctx->codec_id == CODEC_ID_SVQ3) {
2283
        avctx->pix_fmt= PIX_FMT_YUVJ420P;
2284
    } else {
2285
        avctx->pix_fmt= PIX_FMT_YUV420P;
2286
    }
2287
2288
    return 0;
2289
}
2290
2250
static int frame_start(H264Context *h){
2291
static int frame_start(H264Context *h){
2251
    MpegEncContext * const s = &h->s;
2292
    MpegEncContext * const s = &h->s;
2252
    int i;
2293
    int i;
Lines 7145-7150 Link Here
7145
               ((const char*[]){"Gray","420","422","444"})[sps->chroma_format_idc]
7186
               ((const char*[]){"Gray","420","422","444"})[sps->chroma_format_idc]
7146
               );
7187
               );
7147
    }
7188
    }
7189
7190
    if (decode_postinit(h, sps) < 0)
7191
        return -1;
7192
7148
    return 0;
7193
    return 0;
7149
}
7194
}
7150
7195
Lines 7277-7283 Link Here
7277
    H264Context *hx;
7322
    H264Context *hx;
7278
    int i;
7323
    int i;
7279
7324
7280
    if(context_count == 1) {
7325
    if(avctx->vdpau_acceleration) {
7326
        return;
7327
    } else if(context_count == 1) {
7281
        decode_slice(avctx, h);
7328
        decode_slice(avctx, h);
7282
    } else {
7329
    } else {
7283
        for(i = 1; i < context_count; i++) {
7330
        for(i = 1; i < context_count; i++) {
Lines 7404-7411 Link Here
7404
               && (avctx->skip_frame < AVDISCARD_NONREF || hx->nal_ref_idc)
7451
               && (avctx->skip_frame < AVDISCARD_NONREF || hx->nal_ref_idc)
7405
               && (avctx->skip_frame < AVDISCARD_BIDIR  || hx->slice_type_nos!=B_TYPE)
7452
               && (avctx->skip_frame < AVDISCARD_BIDIR  || hx->slice_type_nos!=B_TYPE)
7406
               && (avctx->skip_frame < AVDISCARD_NONKEY || hx->slice_type_nos==I_TYPE)
7453
               && (avctx->skip_frame < AVDISCARD_NONKEY || hx->slice_type_nos==I_TYPE)
7407
               && avctx->skip_frame < AVDISCARD_ALL)
7454
               && avctx->skip_frame < AVDISCARD_ALL) {
7408
                context_count++;
7455
#ifdef HAVE_VDPAU
7456
                if (avctx->vdpau_acceleration) {
7457
                    if(h->is_avc) {
7458
                        static const uint8_t start_code[] = {0x00, 0x00, 0x01};
7459
                        VDPAU_h264_add_data_chunk(h, start_code, sizeof(start_code));
7460
                        VDPAU_h264_add_data_chunk(h, &buf[buf_index - consumed], consumed );
7461
                    }
7462
                    else
7463
                    {
7464
                        // +/-3: Add back 00 00 01 to start of data
7465
                        VDPAU_h264_add_data_chunk(h, &buf[buf_index - consumed - 3], consumed + 3);
7466
                    }
7467
                }
7468
                else
7469
#endif
7470
                {
7471
                    context_count++;
7472
                }
7473
            }
7409
            break;
7474
            break;
7410
        case NAL_DPA:
7475
        case NAL_DPA:
7411
            init_get_bits(&hx->s.gb, ptr, bit_length);
7476
            init_get_bits(&hx->s.gb, ptr, bit_length);
Lines 7600-7612 Link Here
7600
        s->current_picture_ptr->qscale_type= FF_QSCALE_TYPE_H264;
7665
        s->current_picture_ptr->qscale_type= FF_QSCALE_TYPE_H264;
7601
        s->current_picture_ptr->pict_type= s->pict_type;
7666
        s->current_picture_ptr->pict_type= s->pict_type;
7602
7667
7668
        h->prev_frame_num_offset= h->frame_num_offset;
7669
        h->prev_frame_num= h->frame_num;
7670
7603
        if(!s->dropable) {
7671
        if(!s->dropable) {
7604
            execute_ref_pic_marking(h, h->mmco, h->mmco_index);
7605
            h->prev_poc_msb= h->poc_msb;
7672
            h->prev_poc_msb= h->poc_msb;
7606
            h->prev_poc_lsb= h->poc_lsb;
7673
            h->prev_poc_lsb= h->poc_lsb;
7674
            execute_ref_pic_marking(h, h->mmco, h->mmco_index);
7607
        }
7675
        }
7608
        h->prev_frame_num_offset= h->frame_num_offset;
7676
7609
        h->prev_frame_num= h->frame_num;
7677
#ifdef HAVE_VDPAU
7678
        if (avctx->vdpau_acceleration) {
7679
            VDPAU_h264_picture_complete(h);
7680
        }
7681
#endif
7610
7682
7611
        /*
7683
        /*
7612
         * FIXME: Error handling code does not seem to support interlaced
7684
         * FIXME: Error handling code does not seem to support interlaced
Lines 7620-7627 Link Here
7620
         * past end by one (callers fault) and resync_mb_y != 0
7692
         * past end by one (callers fault) and resync_mb_y != 0
7621
         * causes problems for the first MB line, too.
7693
         * causes problems for the first MB line, too.
7622
         */
7694
         */
7623
        if (!FIELD_PICTURE)
7695
#ifdef HAVE_VDPAU
7624
            ff_er_frame_end(s);
7696
        if (!avctx->vdpau_acceleration)
7697
#endif
7698
            if (!FIELD_PICTURE)
7699
                ff_er_frame_end(s);
7625
7700
7626
        MPV_frame_end(s);
7701
        MPV_frame_end(s);
7627
7702
Lines 7933-7936 Link Here
7933
    .flush= flush_dpb,
8008
    .flush= flush_dpb,
7934
};
8009
};
7935
8010
8011
#ifdef HAVE_VDPAU
8012
static int h264_vdpau_decode_init(AVCodecContext *avctx){
8013
    if( avctx->thread_count > 1)
8014
        return -1;
8015
    if( !(avctx->slice_flags & SLICE_FLAG_CODED_ORDER) )
8016
        return -1;
8017
    if( !(avctx->slice_flags & SLICE_FLAG_ALLOW_FIELD) ){
8018
        dprintf(avctx, "h264.c: VDPAU decoder does not set SLICE_FLAG_ALLOW_FIELD\n");
8019
    }
8020
    decode_init(avctx);
8021
8022
    avctx->vdpau_acceleration = 1;
8023
8024
    return 0;
8025
}
8026
8027
AVCodec h264_vdpau_decoder = {
8028
    "h264_vdpau",
8029
    CODEC_TYPE_VIDEO,
8030
    CODEC_ID_H264_VDPAU,
8031
    sizeof(H264Context),
8032
    h264_vdpau_decode_init,
8033
    NULL,
8034
    decode_end,
8035
    decode_frame,
8036
    CODEC_CAP_DR1 | CODEC_CAP_DELAY | CODEC_CAP_HWACCEL_VDPAU,
8037
    .flush= flush_dpb,
8038
};
8039
#endif
8040
7936
#include "svq3.c"
8041
#include "svq3.c"
(-)mythtv.ori/libs/libavcodec/h264_parser.c (-1 / +3 lines)
Lines 139-145 Link Here
139
139
140
140
141
AVCodecParser h264_parser = {
141
AVCodecParser h264_parser = {
142
    { CODEC_ID_H264 },
142
    { CODEC_ID_H264,
143
      CODEC_ID_H264_VDPAU
144
     },
143
    sizeof(H264Context),
145
    sizeof(H264Context),
144
    NULL,
146
    NULL,
145
    h264_parse,
147
    h264_parse,
(-)mythtv.ori/libs/libavcodec/imgconvert.c (+27 lines)
Lines 266-271 Link Here
266
    [PIX_FMT_XVMC_MPEG2_IDCT] = {
266
    [PIX_FMT_XVMC_MPEG2_IDCT] = {
267
        .name = "xvmcidct",
267
        .name = "xvmcidct",
268
    },
268
    },
269
    [PIX_FMT_VDPAU_MPEG1] = {
270
        .name = "vdpau_mpeg1",
271
    },
272
    [PIX_FMT_VDPAU_MPEG2_SIMPLE] = {
273
        .name = "vdpau_mpeg2_simple",
274
    },
275
    [PIX_FMT_VDPAU_MPEG2_MAIN] = {
276
        .name = "vdpau_mpeg2_main",
277
    },
278
    [PIX_FMT_VDPAU_H264_BASELINE] = {
279
        .name = "vdpau_h264_baseline",
280
    },
281
    [PIX_FMT_VDPAU_H264_MAIN] = {
282
        .name = "vdpau_h264_main",
283
    },
284
    [PIX_FMT_VDPAU_H264_HIGH] = {
285
        .name = "vdpau_h264_high",
286
    },
287
    [PIX_FMT_VDPAU_VC1_SIMPLE] = {
288
        .name = "vdpau_vc1_simple",
289
    },
290
    [PIX_FMT_VDPAU_VC1_MAIN] = {
291
        .name = "vdpau_vc1_main",
292
    },
293
    [PIX_FMT_VDPAU_VC1_ADVANCED] = {
294
        .name = "vdpau_vc1_advanced",
295
    },
269
    [PIX_FMT_UYYVYY411] = {
296
    [PIX_FMT_UYYVYY411] = {
270
        .name = "uyyvyy411",
297
        .name = "uyyvyy411",
271
        .nb_channels = 1,
298
        .nb_channels = 1,
(-)mythtv.ori/libs/libavcodec/libavcodec.pro (+2 lines)
Lines 372-377 Link Here
372
    DEFINES += HAVE_DVDV
372
    DEFINES += HAVE_DVDV
373
}
373
}
374
374
375
contains( HAVE_VDPAU, yes )                     { SOURCES *= vdpauvideo.c }
376
375
!contains( CONFIG_SWSCALER, yes )               { SOURCES *= imgresample.c }
377
!contains( CONFIG_SWSCALER, yes )               { SOURCES *= imgresample.c }
376
378
377
contains( HAVE_GPROF, yes ) {
379
contains( HAVE_GPROF, yes ) {
(-)mythtv.ori/libs/libavcodec/mpeg12.c (-25 / +114 lines)
Lines 88-97 Link Here
88
static const enum PixelFormat pixfmt_yuv_420[]= {PIX_FMT_YUV420P,-1};
88
static const enum PixelFormat pixfmt_yuv_420[]= {PIX_FMT_YUV420P,-1};
89
static const enum PixelFormat pixfmt_yuv_422[]= {PIX_FMT_YUV422P,-1};
89
static const enum PixelFormat pixfmt_yuv_422[]= {PIX_FMT_YUV422P,-1};
90
static const enum PixelFormat pixfmt_yuv_444[]= {PIX_FMT_YUV444P,-1};
90
static const enum PixelFormat pixfmt_yuv_444[]= {PIX_FMT_YUV444P,-1};
91
92
extern int VDPAU_mpeg_field_start(MpegEncContext *s);
93
extern void VDPAU_mpeg_picture_complete(MpegEncContext *s, const uint8_t *buf, int buf_size, int slice_count);
94
91
static const enum PixelFormat pixfmt_xvmc_mpg2_420[] = {
95
static const enum PixelFormat pixfmt_xvmc_mpg2_420[] = {
92
                                           PIX_FMT_XVMC_MPEG2_IDCT,
96
                                           PIX_FMT_XVMC_MPEG2_IDCT,
93
                                           PIX_FMT_XVMC_MPEG2_MC,
97
                                           PIX_FMT_XVMC_MPEG2_MC,
94
                                           -1};
98
                                           PIX_FMT_NONE};
99
static const enum PixelFormat pixfmt_vdpau_mpg1_420[] = {
100
                                           PIX_FMT_VDPAU_MPEG1,
101
                                           PIX_FMT_NONE};
102
static const enum PixelFormat pixfmt_vdpau_mpg2simple_420[] = {
103
                                           PIX_FMT_VDPAU_MPEG2_SIMPLE,
104
                                           PIX_FMT_NONE};
105
static const enum PixelFormat pixfmt_vdpau_mpg2main_420[] = {
106
                                           PIX_FMT_VDPAU_MPEG2_MAIN,
107
                                           PIX_FMT_NONE};
95
108
96
uint8_t ff_mpeg12_static_rl_table_store[2][2][2*MAX_RUN + MAX_LEVEL + 3];
109
uint8_t ff_mpeg12_static_rl_table_store[2][2][2*MAX_RUN + MAX_LEVEL + 3];
97
110
Lines 1380-1387 Link Here
1380
    }
1393
    }
1381
}
1394
}
1382
1395
1383
//Call this function when we know all parameters
1396
static void mpeg_set_pixelformat(AVCodecContext *avctx){
1384
//it may be called in different places for mpeg1 and mpeg2
1397
    Mpeg1Context *s1 = avctx->priv_data;
1398
    MpegEncContext *s = &s1->mpeg_enc_ctx;
1399
1400
    if(avctx->vdpau_acceleration){
1401
        if(s->chroma_format >= 2){
1402
            return -2;
1403
        }
1404
        if(avctx->sub_id == 1){
1405
            avctx->pix_fmt = avctx->get_format(avctx,pixfmt_vdpau_mpg1_420);
1406
        }else{
1407
            if(avctx->profile == 5){
1408
                avctx->pix_fmt = avctx->get_format(avctx,pixfmt_vdpau_mpg2simple_420);
1409
            }else if(avctx->profile == 4){
1410
                avctx->pix_fmt = avctx->get_format(avctx,pixfmt_vdpau_mpg2main_420);
1411
            }else{
1412
                return -2;
1413
            }
1414
        }
1415
    }else if(avctx->xvmc_acceleration){
1416
        avctx->pix_fmt = avctx->get_format(avctx,pixfmt_xvmc_mpg2_420);
1417
    }else{
1418
        if(s->chroma_format <  2){
1419
            avctx->pix_fmt = avctx->get_format(avctx,pixfmt_yuv_420);
1420
        }else if(s->chroma_format == 2){
1421
            avctx->pix_fmt = avctx->get_format(avctx,pixfmt_yuv_422);
1422
        }else if(s->chroma_format >  2){
1423
            avctx->pix_fmt = avctx->get_format(avctx,pixfmt_yuv_444);
1424
        }
1425
    }
1426
}
1427
1428
/* Call this function when we know all parameters.
1429
 * It may be called in different places for MPEG-1 and MPEG-2. */
1385
static int mpeg_decode_postinit(AVCodecContext *avctx){
1430
static int mpeg_decode_postinit(AVCodecContext *avctx){
1386
    Mpeg1Context *s1 = avctx->priv_data;
1431
    Mpeg1Context *s1 = avctx->priv_data;
1387
    MpegEncContext *s = &s1->mpeg_enc_ctx;
1432
    MpegEncContext *s = &s1->mpeg_enc_ctx;
Lines 1452-1482 Link Here
1452
                s->avctx->sample_aspect_ratio=
1497
                s->avctx->sample_aspect_ratio=
1453
                    ff_mpeg2_aspect[s->aspect_ratio_info];
1498
                    ff_mpeg2_aspect[s->aspect_ratio_info];
1454
            }
1499
            }
1455
        }//mpeg2
1500
        }//MPEG-2
1501
1502
        mpeg_set_pixelformat(avctx);
1456
1503
1457
        if(avctx->xvmc_acceleration){
1458
            avctx->pix_fmt = avctx->get_format(avctx,pixfmt_xvmc_mpg2_420);
1459
        }else{
1460
            if(s->chroma_format <  2){
1461
                avctx->pix_fmt = avctx->get_format(avctx,pixfmt_yuv_420);
1462
            }else
1463
            if(s->chroma_format == 2){
1464
                avctx->pix_fmt = avctx->get_format(avctx,pixfmt_yuv_422);
1465
            }else
1466
            if(s->chroma_format >  2){
1467
                avctx->pix_fmt = avctx->get_format(avctx,pixfmt_yuv_444);
1468
            }
1469
        }
1470
        //until then pix_fmt may be changed right after codec init
1504
        //until then pix_fmt may be changed right after codec init
1471
        if( avctx->pix_fmt == PIX_FMT_XVMC_MPEG2_IDCT )
1505
        if( avctx->pix_fmt == PIX_FMT_XVMC_MPEG2_IDCT )
1472
            if( avctx->idct_algo == FF_IDCT_AUTO )
1506
            if( avctx->idct_algo == FF_IDCT_AUTO )
1473
                avctx->idct_algo = FF_IDCT_SIMPLE;
1507
                avctx->idct_algo = FF_IDCT_SIMPLE;
1474
1508
1509
        if( avctx->vdpau_acceleration)
1510
            avctx->idct_algo = FF_IDCT_SIMPLE;
1511
1475
        if( avctx->xvmc_vld_hwslice == 1)
1512
        if( avctx->xvmc_vld_hwslice == 1)
1476
            avctx->idct_algo = FF_IDCT_LIBMPEG2MMX;
1513
            avctx->idct_algo = FF_IDCT_LIBMPEG2MMX;
1477
1514
1478
        //quantization matrixes may need reordering
1515
        /* Quantization matrices may need reordering
1479
        //if dct permutation is changed
1516
         * if DCT permutation is changed. */
1480
        memcpy(old_permutation,s->dsp.idct_permutation,64*sizeof(uint8_t));
1517
        memcpy(old_permutation,s->dsp.idct_permutation,64*sizeof(uint8_t));
1481
1518
1482
        if (MPV_common_init(s) < 0)
1519
        if (MPV_common_init(s) < 0)
Lines 1811-1816 Link Here
1811
         XVMC_VLD_field_start(s, avctx);
1848
         XVMC_VLD_field_start(s, avctx);
1812
#endif
1849
#endif
1813
1850
1851
#ifdef HAVE_VDPAU
1852
    if(s->avctx->vdpau_acceleration)
1853
         VDPAU_mpeg_field_start(s);
1854
#endif
1855
1814
    return 0;
1856
    return 0;
1815
}
1857
}
1816
1858
Lines 2113-2119 Link Here
2113
2155
2114
        s->current_picture_ptr->qscale_type= FF_QSCALE_TYPE_MPEG2;
2156
        s->current_picture_ptr->qscale_type= FF_QSCALE_TYPE_MPEG2;
2115
2157
2116
        ff_er_frame_end(s);
2158
#ifdef HAVE_VDPAU
2159
        if(!s->avctx->vdpau_acceleration)
2160
#endif
2161
            ff_er_frame_end(s);
2117
2162
2118
        MPV_frame_end(s);
2163
        MPV_frame_end(s);
2119
2164
Lines 2258-2273 Link Here
2258
    avctx->has_b_frames= 0; //true?
2303
    avctx->has_b_frames= 0; //true?
2259
    s->low_delay= 1;
2304
    s->low_delay= 1;
2260
2305
2261
    if(avctx->xvmc_acceleration){
2306
    mpeg_set_pixelformat(avctx);
2262
        avctx->pix_fmt = avctx->get_format(avctx,pixfmt_xvmc_mpg2_420);
2263
    }else{
2264
        avctx->pix_fmt = avctx->get_format(avctx,pixfmt_yuv_420);
2265
    }
2266
2307
2267
    if( avctx->pix_fmt == PIX_FMT_XVMC_MPEG2_IDCT )
2308
    if( avctx->pix_fmt == PIX_FMT_XVMC_MPEG2_IDCT )
2268
        if( avctx->idct_algo == FF_IDCT_AUTO )
2309
        if( avctx->idct_algo == FF_IDCT_AUTO )
2269
            avctx->idct_algo = FF_IDCT_SIMPLE;
2310
            avctx->idct_algo = FF_IDCT_SIMPLE;
2270
2311
2312
    if( avctx->vdpau_acceleration == 1)
2313
        avctx->idct_algo = FF_IDCT_SIMPLE;
2314
2271
    if( avctx->xvmc_vld_hwslice == 1)
2315
    if( avctx->xvmc_vld_hwslice == 1)
2272
        avctx->idct_algo = FF_IDCT_LIBMPEG2MMX;
2316
        avctx->idct_algo = FF_IDCT_LIBMPEG2MMX;
2273
2317
Lines 2555-2560 Link Here
2555
                    for(i=0; i<s->slice_count; i++)
2599
                    for(i=0; i<s->slice_count; i++)
2556
                        s2->error_count += s2->thread_context[i]->error_count;
2600
                        s2->error_count += s2->thread_context[i]->error_count;
2557
                }
2601
                }
2602
2603
#ifdef HAVE_VDPAU
2604
                if (avctx->vdpau_acceleration) {
2605
                    /* Fills mpeg12 picture informations before returing from libavcodec. */
2606
                    VDPAU_mpeg_picture_complete(s2, buf, buf_size, s->slice_count);
2607
                }
2608
#endif
2609
2558
                if (slice_end(avctx, picture)) {
2610
                if (slice_end(avctx, picture)) {
2559
                    if(s2->last_picture_ptr || s2->low_delay) //FIXME merge with the stuff in mpeg_decode_slice
2611
                    if(s2->last_picture_ptr || s2->low_delay) //FIXME merge with the stuff in mpeg_decode_slice
2560
                        *data_size = sizeof(AVPicture);
2612
                        *data_size = sizeof(AVPicture);
Lines 2631-2636 Link Here
2631
                    return -1;
2683
                    return -1;
2632
                }
2684
                }
2633
2685
2686
                if (avctx->vdpau_acceleration) {
2687
                    s->slice_count++;
2688
                    break;
2689
                }
2690
2634
                if(avctx->thread_count > 1){
2691
                if(avctx->thread_count > 1){
2635
                    int threshold= (s2->mb_height*s->slice_count + avctx->thread_count/2) / avctx->thread_count;
2692
                    int threshold= (s2->mb_height*s->slice_count + avctx->thread_count/2) / avctx->thread_count;
2636
                    if(threshold <= mb_y){
2693
                    if(threshold <= mb_y){
Lines 2818-2823 Link Here
2818
#endif
2875
#endif
2819
2876
2820
2877
2878
#ifdef HAVE_VDPAU
2879
static int mpeg_vdpau_decode_init(AVCodecContext *avctx){
2880
    if( avctx->thread_count > 1)
2881
        return -1;
2882
    if( !(avctx->slice_flags & SLICE_FLAG_CODED_ORDER) )
2883
        return -1;
2884
    if( !(avctx->slice_flags & SLICE_FLAG_ALLOW_FIELD) ){
2885
        dprintf(avctx, "mpeg12.c: VDPAU decoder does not set SLICE_FLAG_ALLOW_FIELD\n");
2886
    }
2887
    mpeg_decode_init(avctx);
2888
2889
    // Set in mpeg_decode_postinit() once initial parsing is complete
2890
    avctx->pix_fmt = PIX_FMT_NONE;
2891
    avctx->vdpau_acceleration = 1;
2892
2893
    return 0;
2894
}
2895
2896
AVCodec mpeg_vdpau_decoder = {
2897
    "mpegvideo_vdpau",
2898
    CODEC_TYPE_VIDEO,
2899
    CODEC_ID_MPEGVIDEO_VDPAU,
2900
    sizeof(Mpeg1Context),
2901
    mpeg_vdpau_decode_init,
2902
    NULL,
2903
    mpeg_decode_end,
2904
    mpeg_decode_frame,
2905
    CODEC_CAP_DR1 | CODEC_CAP_TRUNCATED | CODEC_CAP_HWACCEL_VDPAU | CODEC_CAP_DELAY,
2906
    .flush= ff_mpeg_flush,
2907
};
2908
#endif
2909
2821
/* this is ugly i know, but the alternative is too make
2910
/* this is ugly i know, but the alternative is too make
2822
   hundreds of vars global and prefix them with ff_mpeg1_
2911
   hundreds of vars global and prefix them with ff_mpeg1_
2823
   which is far uglier. */
2912
   which is far uglier. */
(-)mythtv.ori/libs/libavcodec/mpegvideo.c (+5 lines)
Lines 63-68 Link Here
63
63
64
void (*draw_edges)(uint8_t *buf, int wrap, int width, int height, int w)= draw_edges_c;
64
void (*draw_edges)(uint8_t *buf, int wrap, int width, int height, int w)= draw_edges_c;
65
65
66
extern int VDPAU_mpeg_field_start(MpegEncContext *s);
66
67
67
/* enable all paranoid tests for rounding, overflows, etc... */
68
/* enable all paranoid tests for rounding, overflows, etc... */
68
//#define PARANOID
69
//#define PARANOID
Lines 1031-1036 Link Here
1031
        XVMC_field_end(s);
1032
        XVMC_field_end(s);
1032
    }else
1033
    }else
1033
#endif
1034
#endif
1035
#ifdef HAVE_VDPAU
1036
    if(s->avctx->vdpau_acceleration){
1037
    }else
1038
#endif
1034
    if(s->unrestricted_mv && s->current_picture.reference && !s->intra_only && !(s->flags&CODEC_FLAG_EMU_EDGE)) {
1039
    if(s->unrestricted_mv && s->current_picture.reference && !s->intra_only && !(s->flags&CODEC_FLAG_EMU_EDGE)) {
1035
            draw_edges(s->current_picture.data[0], s->linesize  , s->h_edge_pos   , s->v_edge_pos   , EDGE_WIDTH  );
1040
            draw_edges(s->current_picture.data[0], s->linesize  , s->h_edge_pos   , s->v_edge_pos   , EDGE_WIDTH  );
1036
            draw_edges(s->current_picture.data[1], s->uvlinesize, s->h_edge_pos>>1, s->v_edge_pos>>1, EDGE_WIDTH/2);
1041
            draw_edges(s->current_picture.data[1], s->uvlinesize, s->h_edge_pos>>1, s->v_edge_pos>>1, EDGE_WIDTH/2);
(-)mythtv.ori/libs/libavcodec/mpegvideo_parser.c (+1 lines)
Lines 181-186 Link Here
181
      CODEC_ID_MPEG2VIDEO_DVDV,
181
      CODEC_ID_MPEG2VIDEO_DVDV,
182
      CODEC_ID_MPEG2VIDEO_XVMC,
182
      CODEC_ID_MPEG2VIDEO_XVMC,
183
      CODEC_ID_MPEG2VIDEO_XVMC_VLD,
183
      CODEC_ID_MPEG2VIDEO_XVMC_VLD,
184
      CODEC_ID_MPEGVIDEO_VDPAU
184
    },
185
    },
185
    sizeof(ParseContext1),
186
    sizeof(ParseContext1),
186
    NULL,
187
    NULL,
(-)mythtv.ori/libs/libavcodec/myth_utils.c (+4 lines)
Lines 41-46 Link Here
41
            return "MPEG2VIDEO_XVMC";
41
            return "MPEG2VIDEO_XVMC";
42
        case CODEC_ID_MPEG2VIDEO_XVMC_VLD:
42
        case CODEC_ID_MPEG2VIDEO_XVMC_VLD:
43
            return "MPEG2VIDEO_XVMC_VLD";
43
            return "MPEG2VIDEO_XVMC_VLD";
44
        case CODEC_ID_MPEGVIDEO_VDPAU:  return "MPEGVIDEO_VDPAU";
44
        case CODEC_ID_H261:             return "H261";
45
        case CODEC_ID_H261:             return "H261";
45
        case CODEC_ID_H263:             return "H263";
46
        case CODEC_ID_H263:             return "H263";
46
        case CODEC_ID_RV10:             return "RV10";
47
        case CODEC_ID_RV10:             return "RV10";
Lines 65-70 Link Here
65
        case CODEC_ID_HUFFYUV:          return "HUFFYUV";
66
        case CODEC_ID_HUFFYUV:          return "HUFFYUV";
66
        case CODEC_ID_CYUV:             return "CYUV";
67
        case CODEC_ID_CYUV:             return "CYUV";
67
        case CODEC_ID_H264:             return "H264";
68
        case CODEC_ID_H264:             return "H264";
69
        case CODEC_ID_H264_VDPAU:       return "H264_VDPAU";
68
        case CODEC_ID_INDEO3:           return "INDEO3";
70
        case CODEC_ID_INDEO3:           return "INDEO3";
69
        case CODEC_ID_VP3:              return "VP3";
71
        case CODEC_ID_VP3:              return "VP3";
70
        case CODEC_ID_THEORA:           return "THEORA";
72
        case CODEC_ID_THEORA:           return "THEORA";
Lines 111-117 Link Here
111
        case CODEC_ID_RV30:             return "RV30";
113
        case CODEC_ID_RV30:             return "RV30";
112
        case CODEC_ID_RV40:             return "RV40";
114
        case CODEC_ID_RV40:             return "RV40";
113
        case CODEC_ID_VC1:              return "VC1";
115
        case CODEC_ID_VC1:              return "VC1";
116
        case CODEC_ID_VC1_VDPAU:        return "VC1_VDPAU";
114
        case CODEC_ID_WMV3:             return "WMV3";
117
        case CODEC_ID_WMV3:             return "WMV3";
118
        case CODEC_ID_WMV3_VDPAU:       return "WMV3_VDPAU";
115
        case CODEC_ID_LOCO:             return "LOCO";
119
        case CODEC_ID_LOCO:             return "LOCO";
116
        case CODEC_ID_WNV1:             return "WNV1";
120
        case CODEC_ID_WNV1:             return "WNV1";
117
        case CODEC_ID_AASC:             return "AASC";
121
        case CODEC_ID_AASC:             return "AASC";
(-)mythtv.ori/libs/libavcodec/parser.c (-1 / +2 lines)
Lines 44-50 Link Here
44
            parser->codec_ids[1] == codec_id ||
44
            parser->codec_ids[1] == codec_id ||
45
            parser->codec_ids[2] == codec_id ||
45
            parser->codec_ids[2] == codec_id ||
46
            parser->codec_ids[3] == codec_id ||
46
            parser->codec_ids[3] == codec_id ||
47
            parser->codec_ids[4] == codec_id)
47
            parser->codec_ids[4] == codec_id ||
48
            parser->codec_ids[5] == codec_id)
48
            goto found;
49
            goto found;
49
    }
50
    }
50
    return NULL;
51
    return NULL;
(-)mythtv.ori/libs/libavcodec/utils.c (+1 lines)
Lines 673-678 Link Here
673
{"context", "context model", OFFSET(context_model), FF_OPT_TYPE_INT, DEFAULT, INT_MIN, INT_MAX, V|E},
673
{"context", "context model", OFFSET(context_model), FF_OPT_TYPE_INT, DEFAULT, INT_MIN, INT_MAX, V|E},
674
{"slice_flags", NULL, OFFSET(slice_flags), FF_OPT_TYPE_INT, DEFAULT, INT_MIN, INT_MAX},
674
{"slice_flags", NULL, OFFSET(slice_flags), FF_OPT_TYPE_INT, DEFAULT, INT_MIN, INT_MAX},
675
{"xvmc_acceleration", NULL, OFFSET(xvmc_acceleration), FF_OPT_TYPE_INT, DEFAULT, INT_MIN, INT_MAX},
675
{"xvmc_acceleration", NULL, OFFSET(xvmc_acceleration), FF_OPT_TYPE_INT, DEFAULT, INT_MIN, INT_MAX},
676
{"vdpau_acceleration", NULL, OFFSET(vdpau_acceleration), FF_OPT_TYPE_INT, DEFAULT, INT_MIN, INT_MAX},
676
{"mbd", "macroblock decision algorithm (high quality mode)", OFFSET(mb_decision), FF_OPT_TYPE_INT, DEFAULT, INT_MIN, INT_MAX, V|E, "mbd"},
677
{"mbd", "macroblock decision algorithm (high quality mode)", OFFSET(mb_decision), FF_OPT_TYPE_INT, DEFAULT, INT_MIN, INT_MAX, V|E, "mbd"},
677
{"simple", "use mbcmp (default)", 0, FF_OPT_TYPE_CONST, FF_MB_DECISION_SIMPLE, INT_MIN, INT_MAX, V|E, "mbd"},
678
{"simple", "use mbcmp (default)", 0, FF_OPT_TYPE_CONST, FF_MB_DECISION_SIMPLE, INT_MIN, INT_MAX, V|E, "mbd"},
678
{"bits", "use fewest bits", 0, FF_OPT_TYPE_CONST, FF_MB_DECISION_BITS, INT_MIN, INT_MAX, V|E, "mbd"},
679
{"bits", "use fewest bits", 0, FF_OPT_TYPE_CONST, FF_MB_DECISION_BITS, INT_MIN, INT_MAX, V|E, "mbd"},
(-)mythtv.ori/libs/libavcodec/vc1.c (-19 / +148 lines)
Lines 41-48 Link Here
41
#define MB_INTRA_VLC_BITS 9
41
#define MB_INTRA_VLC_BITS 9
42
#define DC_VLC_BITS 9
42
#define DC_VLC_BITS 9
43
#define AC_VLC_BITS 9
43
#define AC_VLC_BITS 9
44
45
extern int VDPAU_vc1_decode_picture(MpegEncContext *s, AVCodecContext *avctx, VC1Context *v, const uint8_t *buf, int buf_size);
46
44
static const uint16_t table_mb_intra[64][2];
47
static const uint16_t table_mb_intra[64][2];
45
48
49
#ifdef HAVE_VDPAU
50
static const enum PixelFormat pixfmt_vdpau_vc1_simple_420[] = {
51
                                           PIX_FMT_VDPAU_VC1_SIMPLE,
52
                                           PIX_FMT_NONE};
53
static const enum PixelFormat pixfmt_vdpau_vc1_main_420[] = {
54
                                           PIX_FMT_VDPAU_VC1_MAIN,
55
                                           PIX_FMT_NONE};
56
static const enum PixelFormat pixfmt_vdpau_vc1_advanced_420[] = {
57
                                           PIX_FMT_VDPAU_VC1_ADVANCED,
58
                                           PIX_FMT_NONE};
59
#endif
46
60
47
static inline int decode210(GetBitContext *gb){
61
static inline int decode210(GetBitContext *gb){
48
    if (get_bits1(gb))
62
    if (get_bits1(gb))
Lines 762-767 Link Here
762
    }
776
    }
763
}
777
}
764
778
779
#ifdef HAVE_VDPAU
780
static int decode_postinit(VC1Context *v, AVCodecContext *avctx)
781
{
782
    if (avctx->pix_fmt != PIX_FMT_NONE){
783
        return 0;
784
    }
785
786
    if (avctx->vdpau_acceleration) { // VC1
787
        if (v->profile == 0) {
788
            avctx->pix_fmt = avctx->get_format(avctx, pixfmt_vdpau_vc1_simple_420);
789
        } else if (v->profile == 1) {
790
            avctx->pix_fmt = avctx->get_format(avctx, pixfmt_vdpau_vc1_main_420);
791
        } else if (v->profile == 3) {
792
            avctx->pix_fmt = avctx->get_format(avctx, pixfmt_vdpau_vc1_advanced_420);
793
        } else {
794
            return -2;
795
        }
796
    }
797
798
    return 0;
799
}
800
#endif
801
765
static int decode_sequence_header_adv(VC1Context *v, GetBitContext *gb);
802
static int decode_sequence_header_adv(VC1Context *v, GetBitContext *gb);
766
803
767
/**
804
/**
Lines 932-939 Link Here
932
    if(get_bits1(gb)) { //Display Info - decoding is not affected by it
969
    if(get_bits1(gb)) { //Display Info - decoding is not affected by it
933
        int w, h, ar = 0;
970
        int w, h, ar = 0;
934
        av_log(v->s.avctx, AV_LOG_DEBUG, "Display extended info:\n");
971
        av_log(v->s.avctx, AV_LOG_DEBUG, "Display extended info:\n");
935
        v->s.avctx->width  = v->s.width  = w = get_bits(gb, 14) + 1;
972
        // FIXME: The w/h parsed here are the *display* width/height, not the
936
        v->s.avctx->height = v->s.height = h = get_bits(gb, 14) + 1;
973
        // coded width/height. Ideally, we should make the commented
974
        // assignments below, but that causes problems:
975
        // * The SW decoder in this file experiences errors, because it
976
        //   assumes these assigned values are the coded size:
977
        //   [vc1 @ 0x86f2130]concealing 150 DC, 150 AC, 150 MV errors
978
        // * VDPAU also assumes these are the coded size, since this is the
979
        //   only size passed to vo_vdpau.c:config(). This causes errors
980
        //   during the decode process.
981
        // However, simply removing these assignments is not the complete fix,
982
        // because without them, the stream is displayed at its coded size,
983
        // not this requested display size. Ideally, setting:
984
        // sample_aspect_ratio = (AVRational){w, h}
985
        // in the case when ar is not present/set would persuade other modules
986
        // to scale to this requested size. However, sample_aspect_ratio
987
        // appears to be completely ignored elsewhere.
988
        /*v->s.avctx->width  = v->s.width  =*/ w = get_bits(gb, 14) + 1;
989
        /*v->s.avctx->height = v->s.height =*/ h = get_bits(gb, 14) + 1;
937
        av_log(v->s.avctx, AV_LOG_DEBUG, "Display dimensions: %ix%i\n", w, h);
990
        av_log(v->s.avctx, AV_LOG_DEBUG, "Display dimensions: %ix%i\n", w, h);
938
        if(get_bits1(gb))
991
        if(get_bits1(gb))
939
            ar = get_bits(gb, 4);
992
            ar = get_bits(gb, 4);
Lines 984-996 Link Here
984
static int decode_entry_point(AVCodecContext *avctx, GetBitContext *gb)
1037
static int decode_entry_point(AVCodecContext *avctx, GetBitContext *gb)
985
{
1038
{
986
    VC1Context *v = avctx->priv_data;
1039
    VC1Context *v = avctx->priv_data;
987
    int i, blink, clentry, refdist;
1040
    int i, blink, clentry;
988
1041
989
    av_log(avctx, AV_LOG_DEBUG, "Entry point: %08X\n", show_bits_long(gb, 32));
1042
    av_log(avctx, AV_LOG_DEBUG, "Entry point: %08X\n", show_bits_long(gb, 32));
990
    blink = get_bits1(gb); // broken link
1043
    blink = get_bits1(gb); // broken link
991
    clentry = get_bits1(gb); // closed entry
1044
    clentry = get_bits1(gb); // closed entry
992
    v->panscanflag = get_bits1(gb);
1045
    v->panscanflag = get_bits1(gb);
993
    refdist = get_bits1(gb); // refdist flag
1046
    v->refdist_flag = get_bits1(gb);
994
    v->s.loop_filter = get_bits1(gb);
1047
    v->s.loop_filter = get_bits1(gb);
995
    v->fastuvmc = get_bits1(gb);
1048
    v->fastuvmc = get_bits1(gb);
996
    v->extended_mv = get_bits1(gb);
1049
    v->extended_mv = get_bits1(gb);
Lines 1011-1030 Link Here
1011
    }
1064
    }
1012
    if(v->extended_mv)
1065
    if(v->extended_mv)
1013
        v->extended_dmv = get_bits1(gb);
1066
        v->extended_dmv = get_bits1(gb);
1014
    if(get_bits1(gb)) {
1067
    v->range_mapy_flag = get_bits1(gb);
1068
    if(v->range_mapy_flag) {
1015
        av_log(avctx, AV_LOG_ERROR, "Luma scaling is not supported, expect wrong picture\n");
1069
        av_log(avctx, AV_LOG_ERROR, "Luma scaling is not supported, expect wrong picture\n");
1016
        skip_bits(gb, 3); // Y range, ignored for now
1070
        v->range_mapy = get_bits(gb, 3);
1017
    }
1071
    }
1018
    if(get_bits1(gb)) {
1072
    v->range_mapuv_flag = get_bits1(gb);
1073
    if(v->range_mapuv_flag) {
1019
        av_log(avctx, AV_LOG_ERROR, "Chroma scaling is not supported, expect wrong picture\n");
1074
        av_log(avctx, AV_LOG_ERROR, "Chroma scaling is not supported, expect wrong picture\n");
1020
        skip_bits(gb, 3); // UV range, ignored for now
1075
        v->range_mapuv = get_bits(gb, 3);
1021
    }
1076
    }
1022
1077
1023
    av_log(avctx, AV_LOG_DEBUG, "Entry point info:\n"
1078
    av_log(avctx, AV_LOG_DEBUG, "Entry point info:\n"
1024
        "BrokenLink=%i, ClosedEntry=%i, PanscanFlag=%i\n"
1079
        "BrokenLink=%i, ClosedEntry=%i, PanscanFlag=%i\n"
1025
        "RefDist=%i, Postproc=%i, FastUVMC=%i, ExtMV=%i\n"
1080
        "RefDist=%i, Postproc=%i, FastUVMC=%i, ExtMV=%i\n"
1026
        "DQuant=%i, VSTransform=%i, Overlap=%i, Qmode=%i\n",
1081
        "DQuant=%i, VSTransform=%i, Overlap=%i, Qmode=%i\n",
1027
        blink, clentry, v->panscanflag, refdist, v->s.loop_filter,
1082
        blink, clentry, v->panscanflag, v->refdist_flag, v->s.loop_filter,
1028
        v->fastuvmc, v->extended_mv, v->dquant, v->vstransform, v->overlap, v->quantizer_mode);
1083
        v->fastuvmc, v->extended_mv, v->dquant, v->vstransform, v->overlap, v->quantizer_mode);
1029
1084
1030
    return 0;
1085
    return 0;
Lines 1320-1325 Link Here
1320
1375
1321
    if(v->s.pict_type == I_TYPE || v->s.pict_type == P_TYPE) v->use_ic = 0;
1376
    if(v->s.pict_type == I_TYPE || v->s.pict_type == P_TYPE) v->use_ic = 0;
1322
1377
1378
    if(v->postprocflag)
1379
        v->postproc = get_bits(gb, 2);
1380
1323
    switch(v->s.pict_type) {
1381
    switch(v->s.pict_type) {
1324
    case I_TYPE:
1382
    case I_TYPE:
1325
    case BI_TYPE:
1383
    case BI_TYPE:
Lines 1339-1346 Link Here
1339
        }
1397
        }
1340
        break;
1398
        break;
1341
    case P_TYPE:
1399
    case P_TYPE:
1342
        if(v->postprocflag)
1343
            v->postproc = get_bits1(gb);
1344
        if (v->extended_mv) v->mvrange = get_unary(gb, 0, 3);
1400
        if (v->extended_mv) v->mvrange = get_unary(gb, 0, 3);
1345
        else v->mvrange = 0;
1401
        else v->mvrange = 0;
1346
        v->k_x = v->mvrange + 9 + (v->mvrange >> 1); //k_x can be 9 10 12 13
1402
        v->k_x = v->mvrange + 9 + (v->mvrange >> 1); //k_x can be 9 10 12 13
Lines 1430-1437 Link Here
1430
        }
1486
        }
1431
        break;
1487
        break;
1432
    case B_TYPE:
1488
    case B_TYPE:
1433
        if(v->postprocflag)
1434
            v->postproc = get_bits1(gb);
1435
        if (v->extended_mv) v->mvrange = get_unary(gb, 0, 3);
1489
        if (v->extended_mv) v->mvrange = get_unary(gb, 0, 3);
1436
        else v->mvrange = 0;
1490
        else v->mvrange = 0;
1437
        v->k_x = v->mvrange + 9 + (v->mvrange >> 1); //k_x can be 9 10 12 13
1491
        v->k_x = v->mvrange + 9 + (v->mvrange >> 1); //k_x can be 9 10 12 13
Lines 3843-3849 Link Here
3843
3897
3844
    avctx->coded_width = avctx->width;
3898
    avctx->coded_width = avctx->width;
3845
    avctx->coded_height = avctx->height;
3899
    avctx->coded_height = avctx->height;
3846
    if (avctx->codec_id == CODEC_ID_WMV3)
3900
    if ((avctx->codec_id == CODEC_ID_WMV3) || (avctx->codec_id == CODEC_ID_WMV3_VDPAU))
3847
    {
3901
    {
3848
        int count = 0;
3902
        int count = 0;
3849
3903
Lines 3953-3958 Link Here
3953
    MpegEncContext *s = &v->s;
4007
    MpegEncContext *s = &v->s;
3954
    AVFrame *pict = data;
4008
    AVFrame *pict = data;
3955
    uint8_t *buf2 = NULL;
4009
    uint8_t *buf2 = NULL;
4010
#ifdef HAVE_VDPAU
4011
    uint8_t *buf_vdpau = buf;
4012
#endif
3956
4013
3957
    /* no supplementary picture */
4014
    /* no supplementary picture */
3958
    if (buf_size == 0) {
4015
    if (buf_size == 0) {
Lines 3974-3981 Link Here
3974
        s->current_picture_ptr= &s->picture[i];
4031
        s->current_picture_ptr= &s->picture[i];
3975
    }
4032
    }
3976
4033
4034
#ifdef HAVE_VDPAU
4035
    // pxt_fmt calculation for VDPAU.
4036
    if (decode_postinit(v, avctx) < 0)
4037
        return -1;
4038
#endif
4039
3977
    //for advanced profile we may need to parse and unescape data
4040
    //for advanced profile we may need to parse and unescape data
3978
    if (avctx->codec_id == CODEC_ID_VC1) {
4041
    if ((avctx->codec_id == CODEC_ID_VC1) || (avctx->codec_id == CODEC_ID_VC1_VDPAU)) {
3979
        int buf_size2 = 0;
4042
        int buf_size2 = 0;
3980
        buf2 = av_mallocz(buf_size + FF_INPUT_BUFFER_PADDING_SIZE);
4043
        buf2 = av_mallocz(buf_size + FF_INPUT_BUFFER_PADDING_SIZE);
3981
4044
Lines 3990-3995 Link Here
3990
                if(size <= 0) continue;
4053
                if(size <= 0) continue;
3991
                switch(AV_RB32(start)){
4054
                switch(AV_RB32(start)){
3992
                case VC1_CODE_FRAME:
4055
                case VC1_CODE_FRAME:
4056
#ifdef HAVE_VDPAU
4057
                    buf_vdpau = start;
4058
#endif
3993
                    buf_size2 = vc1_unescape_buffer(start + 4, size, buf2);
4059
                    buf_size2 = vc1_unescape_buffer(start + 4, size, buf2);
3994
                    break;
4060
                    break;
3995
                case VC1_CODE_ENTRYPOINT: /* it should be before frame data */
4061
                case VC1_CODE_ENTRYPOINT: /* it should be before frame data */
Lines 4009-4014 Link Here
4009
            divider = find_next_marker(buf, buf + buf_size);
4075
            divider = find_next_marker(buf, buf + buf_size);
4010
            if((divider == (buf + buf_size)) || AV_RB32(divider) != VC1_CODE_FIELD){
4076
            if((divider == (buf + buf_size)) || AV_RB32(divider) != VC1_CODE_FIELD){
4011
                av_log(avctx, AV_LOG_ERROR, "Error in WVC1 interlaced frame\n");
4077
                av_log(avctx, AV_LOG_ERROR, "Error in WVC1 interlaced frame\n");
4078
                av_free(buf2);
4012
                return -1;
4079
                return -1;
4013
            }
4080
            }
4014
4081
Lines 4074-4090 Link Here
4074
        return -1;
4141
        return -1;
4075
    }
4142
    }
4076
4143
4144
#ifdef HAVE_VDPAU
4145
    // MPV_frame_start() calls to  get_buffer/videoSurfaces. Now we call
4146
    // VDPAU_vc1_field_start where picture-parameters are filled.
4147
    // VDPAU_vc1_picture_complete calls to vdpau_decoder_render.
4148
4149
    if (avctx->vdpau_acceleration) {
4150
        if (VDPAU_vc1_decode_picture(s, avctx, v, buf_vdpau, (buf + buf_size) - buf_vdpau) < 0) {
4151
            av_free(buf2);
4152
            return -1;
4153
        }
4154
    }
4155
#endif
4156
4077
    s->me.qpel_put= s->dsp.put_qpel_pixels_tab;
4157
    s->me.qpel_put= s->dsp.put_qpel_pixels_tab;
4078
    s->me.qpel_avg= s->dsp.avg_qpel_pixels_tab;
4158
    s->me.qpel_avg= s->dsp.avg_qpel_pixels_tab;
4079
4159
4080
    ff_er_frame_start(s);
4160
#ifdef HAVE_VDPAU
4161
    if (!avctx->vdpau_acceleration) {
4162
#endif
4163
        ff_er_frame_start(s);
4081
4164
4082
    v->bits = buf_size * 8;
4165
        v->bits = buf_size * 8;
4083
    vc1_decode_blocks(v);
4166
        vc1_decode_blocks(v);
4084
//av_log(s->avctx, AV_LOG_INFO, "Consumed %i/%i bits\n", get_bits_count(&s->gb), buf_size*8);
4167
//av_log(s->avctx, AV_LOG_INFO, "Consumed %i/%i bits\n", get_bits_count(&s->gb), buf_size*8);
4085
//  if(get_bits_count(&s->gb) > buf_size * 8)
4168
//  if(get_bits_count(&s->gb) > buf_size * 8)
4086
//      return -1;
4169
//      return -1;
4087
    ff_er_frame_end(s);
4170
        ff_er_frame_end(s);
4171
#ifdef HAVE_VDPAU
4172
    }
4173
#endif
4088
4174
4089
    MPV_frame_end(s);
4175
    MPV_frame_end(s);
4090
4176
Lines 4154-4156 Link Here
4154
    CODEC_CAP_DELAY,
4240
    CODEC_CAP_DELAY,
4155
    NULL
4241
    NULL
4156
};
4242
};
4243
4244
#ifdef HAVE_VDPAU
4245
static int vc1_vdpau_decode_init(AVCodecContext *avctx){
4246
    if( avctx->thread_count > 1)
4247
        return -1;
4248
    if( !(avctx->slice_flags & SLICE_FLAG_CODED_ORDER) )
4249
        return -1;
4250
    if( !(avctx->slice_flags & SLICE_FLAG_ALLOW_FIELD) ){
4251
        dprintf(avctx, "vc1.c: VDPAU decoder does not set SLICE_FLAG_ALLOW_FIELD\n");
4252
    }
4253
    avctx->vdpau_acceleration = 1;
4254
    vc1_decode_init(avctx);
4255
    avctx->pix_fmt = PIX_FMT_NONE;
4256
4257
    return 0;
4258
}
4259
4260
AVCodec wmv3_vdpau_decoder = {
4261
    "wmv3_vdpau",
4262
    CODEC_TYPE_VIDEO,
4263
    CODEC_ID_WMV3_VDPAU,
4264
    sizeof(VC1Context),
4265
    vc1_vdpau_decode_init,
4266
    NULL,
4267
    vc1_decode_end,
4268
    vc1_decode_frame,
4269
    CODEC_CAP_DR1 | CODEC_CAP_DELAY | CODEC_CAP_HWACCEL_VDPAU,
4270
    NULL,
4271
};
4272
4273
AVCodec vc1_vdpau_decoder = {
4274
    "vc1_vdpau",
4275
    CODEC_TYPE_VIDEO,
4276
    CODEC_ID_VC1_VDPAU,
4277
    sizeof(VC1Context),
4278
    vc1_vdpau_decode_init,
4279
    NULL,
4280
    vc1_decode_end,
4281
    vc1_decode_frame,
4282
    CODEC_CAP_DR1 | CODEC_CAP_DELAY | CODEC_CAP_HWACCEL_VDPAU,
4283
    NULL,
4284
};
4285
#endif
(-)mythtv.ori/libs/libavcodec/vc1.h (+1 lines)
Lines 183-188 Link Here
183
    int interlace;        ///< Progressive/interlaced (RPTFTM syntax element)
183
    int interlace;        ///< Progressive/interlaced (RPTFTM syntax element)
184
    int tfcntrflag;       ///< TFCNTR present
184
    int tfcntrflag;       ///< TFCNTR present
185
    int panscanflag;      ///< NUMPANSCANWIN, TOPLEFT{X,Y}, BOTRIGHT{X,Y} present
185
    int panscanflag;      ///< NUMPANSCANWIN, TOPLEFT{X,Y}, BOTRIGHT{X,Y} present
186
    int refdist_flag;     ///<
186
    int extended_dmv;     ///< Additional extended dmv range at P/B frame-level
187
    int extended_dmv;     ///< Additional extended dmv range at P/B frame-level
187
    int color_prim;       ///< 8bits, chroma coordinates of the color primaries
188
    int color_prim;       ///< 8bits, chroma coordinates of the color primaries
188
    int transfer_char;    ///< 8bits, Opto-electronic transfer characteristics
189
    int transfer_char;    ///< 8bits, Opto-electronic transfer characteristics
(-)mythtv.ori/libs/libavcodec/vc1_parser.c (-1 / +3 lines)
Lines 109-115 Link Here
109
}
109
}
110
110
111
AVCodecParser vc1_parser = {
111
AVCodecParser vc1_parser = {
112
    { CODEC_ID_VC1 },
112
    { CODEC_ID_VC1,
113
      CODEC_ID_VC1_VDPAU
114
    },
113
    sizeof(ParseContext1),
115
    sizeof(ParseContext1),
114
    NULL,
116
    NULL,
115
    vc1_parse,
117
    vc1_parse,
(-)mythtv.ori/libs/libavcodec/vdpau_render.h (+61 lines)
Line 0 Link Here
1
/*
2
 * Video Decode and Presentation API for UNIX (VDPAU) is used for
3
 * HW decode acceleration for MPEG-1/2, H.264 and VC-1.
4
 *
5
 * Copyright (C) 2008 NVIDIA.
6
 *
7
 * This file is part of FFmpeg.
8
 *
9
 * FFmpeg is free software; you can redistribute it and/or
10
 * modify it under the terms of the GNU Lesser General Public
11
 * License as published by the Free Software Foundation; either
12
 * version 2.1 of the License, or (at your option) any later version.
13
 *
14
 * FFmpeg is distributed in the hope that it will be useful,
15
 * but WITHOUT ANY WARRANTY; without even the implied warranty of
16
 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
17
 * Lesser General Public License for more details.
18
 *
19
 * You should have received a copy of the GNU Lesser General Public
20
 * License along with FFmpeg; if not, write to the Free Software
21
 * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
22
 */
23
24
#ifndef FFMPEG_VDPAU_RENDER_H
25
#define FFMPEG_VDPAU_RENDER_H
26
27
#include "vdpau/vdpau.h"
28
#include "vdpau/vdpau_x11.h"
29
30
/**
31
 * \brief The videoSurface is used for render.
32
 */
33
#define MP_VDPAU_STATE_USED_FOR_RENDER 1
34
35
/**
36
 * \brief The videoSurface is needed for reference/prediction,
37
 * codec manipulates this.
38
 */
39
#define MP_VDPAU_STATE_USED_FOR_REFERENCE 2
40
41
#define MP_VDPAU_RENDER_MAGIC 0x1DC8E14B
42
43
typedef struct {
44
    int  magic;
45
46
    VdpVideoSurface surface; //used as rendered surface, never changed.
47
48
    int state; // Holds MP_VDPAU_STATE_* values
49
50
    union _VdpPictureInfo {
51
        VdpPictureInfoMPEG1Or2 mpeg;
52
        VdpPictureInfoH264     h264;
53
        VdpPictureInfoVC1       vc1;
54
    } info;
55
56
    int bitstreamBuffersAlloced;
57
    int bitstreamBuffersUsed;
58
    VdpBitstreamBuffer *bitstreamBuffers;
59
} vdpau_render_state_t;
60
61
#endif /* FFMPEG_VDPAU_RENDER_H */
(-)mythtv.ori/libs/libavcodec/vdpauvideo.c (+428 lines)
Line 0 Link Here
1
/*
2
 * Video Decode and Presentation API for UNIX (VDPAU) is used for
3
 * HW decode acceleration for MPEG-1/2, H.264 and VC-1.
4
 *
5
 * Copyright (c) 2008 NVIDIA.
6
 *
7
 * This file is part of FFmpeg.
8
 *  
9
 * FFmpeg is free software; you can redistribute it and/or
10
 * modify it under the terms of the GNU Lesser General Public
11
 * License as published by the Free Software Foundation; either
12
 * version 2.1 of the License, or (at your option) any later version.
13
 *
14
 * FFmpeg is distributed in the hope that it will be useful,
15
 * but WITHOUT ANY WARRANTY; without even the implied warranty of
16
 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
17
 * Lesser General Public License for more details.
18
 *
19
 * You should have received a copy of the GNU Lesser General Public
20
 * License along with FFmpeg; if not, write to the Free Software
21
 * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
22
 */ 
23
24
#include <limits.h>
25
#include "avcodec.h"
26
#include "dsputil.h"
27
#include "mpegvideo.h"
28
#include "h264.h"
29
#include "vc1.h"
30
31
#undef NDEBUG
32
#include <assert.h>
33
34
#include "vdpau_render.h"
35
36
#define ARSIZE(_x_) (sizeof(_x_) / sizeof((_x_)[0]))
37
38
static void VDPAU_ensure_has_buffers(vdpau_render_state_t * render, int need_entries)
39
{
40
    int new_alloced;
41
42
    if (render->bitstreamBuffersAlloced >= need_entries) {
43
        return;
44
    }
45
46
    if (!render->bitstreamBuffersAlloced || !render->bitstreamBuffers) {
47
        new_alloced = 4;
48
    }
49
    else {
50
        new_alloced = render->bitstreamBuffersAlloced * 2;
51
    }
52
53
    render->bitstreamBuffers = av_realloc(
54
        render->bitstreamBuffers,
55
        new_alloced * sizeof(render->bitstreamBuffers[0])
56
    );
57
    render->bitstreamBuffersAlloced = new_alloced;
58
}
59
60
int VDPAU_mpeg_field_start(MpegEncContext *s)
61
{
62
    vdpau_render_state_t * render,* last, * next;
63
    int i;
64
    
65
    render = (vdpau_render_state_t*)s->current_picture.data[2];
66
    assert(render != NULL);
67
    assert(render->magic == MP_VDPAU_RENDER_MAGIC);
68
    if ((render == NULL) || (render->magic != MP_VDPAU_RENDER_MAGIC)) {
69
        return -1; // make sure that this is render packet
70
    }
71
72
    /* fill VdpPictureInfoMPEG1Or2 struct */
73
    render->info.mpeg.picture_structure          = s->picture_structure;
74
    render->info.mpeg.picture_coding_type        = s->pict_type;
75
    render->info.mpeg.intra_dc_precision         = s->intra_dc_precision;
76
    render->info.mpeg.frame_pred_frame_dct       = s->frame_pred_frame_dct;
77
    render->info.mpeg.concealment_motion_vectors = s->concealment_motion_vectors;
78
    render->info.mpeg.intra_vlc_format           = s->intra_vlc_format;
79
    render->info.mpeg.alternate_scan             = s->alternate_scan;
80
    render->info.mpeg.q_scale_type               = s->q_scale_type;
81
    render->info.mpeg.top_field_first            = s->top_field_first;
82
    render->info.mpeg.full_pel_forward_vector    = s->full_pel[0]; // MPEG-1 only.  Set 0 for MPEG-2
83
    render->info.mpeg.full_pel_backward_vector   = s->full_pel[1]; // MPEG-1 only.  Set 0 for MPEG-2
84
    render->info.mpeg.f_code[0][0]               = s->mpeg_f_code[0][0]; // For MPEG-1 fill both horiz. & vert.
85
    render->info.mpeg.f_code[0][1]               = s->mpeg_f_code[0][1];
86
    render->info.mpeg.f_code[1][0]               = s->mpeg_f_code[1][0];
87
    render->info.mpeg.f_code[1][1]               = s->mpeg_f_code[1][1];
88
    for (i = 0; i < 64; ++i) {
89
        render->info.mpeg.intra_quantizer_matrix[i]     = s->intra_matrix[i];
90
        render->info.mpeg.non_intra_quantizer_matrix[i] = s->inter_matrix[i];
91
    }
92
93
    render->info.mpeg.forward_reference  = VDP_INVALID_HANDLE;
94
    render->info.mpeg.backward_reference = VDP_INVALID_HANDLE;
95
96
    switch(s->pict_type){
97
    case  FF_I_TYPE:
98
        return 0; // no prediction from other frames
99
    case  FF_B_TYPE:
100
        next = (vdpau_render_state_t*)s->next_picture.data[2];
101
        assert(next != NULL);
102
        assert(next->magic == MP_VDPAU_RENDER_MAGIC);
103
        if ((next == NULL) || (next->magic != MP_VDPAU_RENDER_MAGIC)) {
104
            return -1;
105
        }
106
        render->info.mpeg.backward_reference = next->surface;
107
        // no return here, going to set forward prediction
108
    case  FF_P_TYPE:
109
        last = (vdpau_render_state_t*)s->last_picture.data[2];
110
        assert(last->magic == MP_VDPAU_RENDER_MAGIC);
111
        if (last->magic != MP_VDPAU_RENDER_MAGIC) {
112
            return -1;
113
        }
114
        if (last == NULL) { // FIXME: Does this test make sense?
115
            last = render; // predict second field from the first
116
        }
117
        render->info.mpeg.forward_reference = last->surface;
118
        return 0;
119
    }
120
121
    return -1;
122
}
123
124
int VDPAU_mpeg_picture_complete(MpegEncContext *s, const uint8_t *buf, int buf_size, int slice_count)
125
{
126
    vdpau_render_state_t * render;
127
128
    if (!(s->current_picture_ptr))
129
        return -1;
130
131
    render = (vdpau_render_state_t*)s->current_picture_ptr->data[2];
132
    assert(render != NULL);
133
    assert(render->magic == MP_VDPAU_RENDER_MAGIC);
134
    if ((render == NULL) || (render->magic != MP_VDPAU_RENDER_MAGIC)) {
135
        return -1; // make sure that this is render packet
136
    }
137
138
    VDPAU_ensure_has_buffers(render, 1);
139
140
    render->bitstreamBuffers[0].struct_version  = VDP_BITSTREAM_BUFFER_VERSION;
141
    render->bitstreamBuffers[0].bitstream_bytes = buf_size;
142
    render->bitstreamBuffers[0].bitstream       = buf;
143
    render->bitstreamBuffersUsed                = 1;
144
145
    render->info.mpeg.slice_count               = slice_count;
146
147
    if (slice_count > 0) {
148
        ff_draw_horiz_band(s, 0, s->avctx->height);
149
    }
150
    render->bitstreamBuffersUsed = 0;
151
152
    return 0;
153
}
154
155
int VDPAU_h264_set_reference_frames(H264Context *h)
156
{
157
    MpegEncContext * s = &h->s;
158
    vdpau_render_state_t * render, * render_ref;
159
    VdpReferenceFrameH264 * rf, * rf2;
160
    Picture * pic;
161
    int i, list;
162
163
    render = (vdpau_render_state_t*)s->current_picture_ptr->data[2];
164
    assert(render != NULL);
165
    assert(render->magic == MP_VDPAU_RENDER_MAGIC);
166
    if ((render == NULL) || (render->magic != MP_VDPAU_RENDER_MAGIC))
167
        return -1; // make sure that this is render packet
168
169
    rf = &render->info.h264.referenceFrames[0];
170
#define H264_RF_COUNT ARSIZE(render->info.h264.referenceFrames)
171
172
    for (list = 0; list < 2; ++list) {
173
        Picture **lp = list ? h->long_ref : h->short_ref;
174
        int ls = list ? h->long_ref_count : h->short_ref_count;
175
176
        for (i = 0; i < ls; ++i) {
177
            pic = lp[i];
178
            if (!pic || !pic->reference) {
179
                continue;
180
            }
181
182
            render_ref = (vdpau_render_state_t*)pic->data[2];
183
            assert(render_ref != NULL);
184
            if (render_ref == NULL)
185
                return -1; // make sure that this is render packet
186
187
            rf2 = &render->info.h264.referenceFrames[0];
188
            while (rf2 != rf) {
189
                if (
190
                    (rf2->surface == render_ref->surface)
191
                    && (rf2->is_long_term == pic->long_ref)
192
                    && (rf2->frame_idx == pic->frame_num)
193
                ) {
194
                    break;
195
                }
196
                ++rf2;
197
            }
198
            if (rf2 != rf) {
199
                rf2->top_is_reference |= (pic->reference & PICT_TOP_FIELD) ? VDP_TRUE : VDP_FALSE;
200
                rf2->bottom_is_reference |= (pic->reference & PICT_BOTTOM_FIELD) ? VDP_TRUE : VDP_FALSE;
201
                continue;
202
            }
203
204
            if (rf >= &render->info.h264.referenceFrames[H264_RF_COUNT]) {
205
                continue;
206
            }
207
208
            rf->surface             = render_ref->surface;
209
            rf->is_long_term        = pic->long_ref;
210
            rf->top_is_reference    = (pic->reference & PICT_TOP_FIELD) ? VDP_TRUE : VDP_FALSE;
211
            rf->bottom_is_reference = (pic->reference & PICT_BOTTOM_FIELD) ? VDP_TRUE : VDP_FALSE;
212
            rf->field_order_cnt[0]  = pic->field_poc[0];
213
            rf->field_order_cnt[1]  = pic->field_poc[1];
214
            rf->frame_idx           = pic->frame_num;
215
216
            ++rf;
217
        }
218
    }
219
220
    for (; rf < &render->info.h264.referenceFrames[H264_RF_COUNT]; ++rf) {
221
        rf->surface             = VDP_INVALID_HANDLE;
222
        rf->is_long_term        = 0;
223
        rf->top_is_reference    = 0;
224
        rf->bottom_is_reference = 0;
225
        rf->field_order_cnt[0]  = 0;
226
        rf->field_order_cnt[1]  = 0;
227
        rf->frame_idx           = 0;
228
    }
229
230
    return 0;
231
}
232
233
extern int VDPAU_h264_add_data_chunk(H264Context *h, const uint8_t *buf, int buf_size)
234
{
235
    MpegEncContext * s = &h->s;
236
    vdpau_render_state_t * render;
237
238
    render = (vdpau_render_state_t*)s->current_picture_ptr->data[2];
239
    assert(render != NULL);
240
    assert(render->magic == MP_VDPAU_RENDER_MAGIC);
241
    if ((render == NULL) || (render->magic != MP_VDPAU_RENDER_MAGIC))
242
        return -1; // make sure that this is render packet
243
244
    if (!render->bitstreamBuffersUsed) {
245
        VDPAU_h264_set_reference_frames(h);
246
    }
247
248
    VDPAU_ensure_has_buffers(render, render->bitstreamBuffersUsed + 1);
249
250
    render->bitstreamBuffers[render->bitstreamBuffersUsed].struct_version  = VDP_BITSTREAM_BUFFER_VERSION;
251
    render->bitstreamBuffers[render->bitstreamBuffersUsed].bitstream       = buf;
252
    render->bitstreamBuffers[render->bitstreamBuffersUsed].bitstream_bytes = buf_size;
253
    render->bitstreamBuffersUsed++;
254
255
    return 0;
256
}
257
258
int VDPAU_h264_picture_complete(H264Context *h)
259
{
260
    MpegEncContext * s = &h->s;
261
    vdpau_render_state_t * render;
262
263
    render = (vdpau_render_state_t*)s->current_picture_ptr->data[2];
264
    assert(render != NULL);
265
    assert(render->magic == MP_VDPAU_RENDER_MAGIC);
266
    if ((render == NULL) || (render->magic != MP_VDPAU_RENDER_MAGIC))
267
        return -1; // make sure that this is render packet
268
269
    render->info.h264.slice_count = h->slice_num;
270
    if (render->info.h264.slice_count < 1)
271
        return 0;
272
273
    int i;
274
    for (i = 0; i < 2; ++i) {
275
        int foc = s->current_picture_ptr->field_poc[i];
276
        if (foc == INT_MAX) {
277
            foc = 0;
278
        }
279
        render->info.h264.field_order_cnt[i] = foc;
280
    }
281
282
    render->info.h264.is_reference                           = s->current_picture_ptr->reference ? VDP_TRUE : VDP_FALSE;
283
    render->info.h264.frame_num                              = h->frame_num;
284
    render->info.h264.field_pic_flag                         = (s->picture_structure != PICT_FRAME) ? 1 : 0;
285
    render->info.h264.bottom_field_flag                      = (s->picture_structure == PICT_BOTTOM_FIELD) ? 1 : 0;
286
    render->info.h264.num_ref_frames                         = h->sps.ref_frame_count;
287
    render->info.h264.mb_adaptive_frame_field_flag           = h->sps.mb_aff;
288
    render->info.h264.constrained_intra_pred_flag            = h->pps.constrained_intra_pred;
289
    render->info.h264.weighted_pred_flag                     = h->pps.weighted_pred;
290
    render->info.h264.weighted_bipred_idc                    = h->pps.weighted_bipred_idc;
291
    render->info.h264.frame_mbs_only_flag                    = h->sps.frame_mbs_only_flag;
292
    render->info.h264.transform_8x8_mode_flag                = h->pps.transform_8x8_mode;
293
    render->info.h264.chroma_qp_index_offset                 = h->pps.chroma_qp_index_offset[0];
294
    render->info.h264.second_chroma_qp_index_offset          = h->pps.chroma_qp_index_offset[1];
295
    render->info.h264.pic_init_qp_minus26                    = h->pps.init_qp - 26;
296
    render->info.h264.num_ref_idx_l0_active_minus1           = h->pps.ref_count[0] - 1;
297
    render->info.h264.num_ref_idx_l1_active_minus1           = h->pps.ref_count[1] - 1;
298
    render->info.h264.log2_max_frame_num_minus4              = h->sps.log2_max_frame_num - 4;
299
    render->info.h264.pic_order_cnt_type                     = h->sps.poc_type;
300
    render->info.h264.log2_max_pic_order_cnt_lsb_minus4      = h->sps.log2_max_poc_lsb - 4;
301
    render->info.h264.delta_pic_order_always_zero_flag       = h->sps.delta_pic_order_always_zero_flag;
302
    render->info.h264.direct_8x8_inference_flag              = h->sps.direct_8x8_inference_flag;
303
    render->info.h264.entropy_coding_mode_flag               = h->pps.cabac;
304
    render->info.h264.pic_order_present_flag                 = h->pps.pic_order_present;
305
    render->info.h264.deblocking_filter_control_present_flag = h->pps.deblocking_filter_parameters_present;
306
    render->info.h264.redundant_pic_cnt_present_flag = h->pps.redundant_pic_cnt_present;
307
    memcpy(render->info.h264.scaling_lists_4x4, h->pps.scaling_matrix4, sizeof(render->info.h264.scaling_lists_4x4));
308
    memcpy(render->info.h264.scaling_lists_8x8, h->pps.scaling_matrix8, sizeof(render->info.h264.scaling_lists_8x8));
309
310
    ff_draw_horiz_band(s, 0, s->avctx->height);
311
    render->bitstreamBuffersUsed = 0;
312
313
    return 0;
314
}
315
316
int VDPAU_vc1_decode_picture(MpegEncContext *s, AVCodecContext *avctx, VC1Context *v, const uint8_t *buf, int buf_size)
317
{
318
   // VC1Context *v = avctx->priv_data;
319
    vdpau_render_state_t * render,* last, * next;
320
321
    render = (vdpau_render_state_t*)s->current_picture.data[2];
322
    assert(render != NULL);
323
    assert(render->magic == MP_VDPAU_RENDER_MAGIC);
324
    if ((render == NULL) || (render->magic != MP_VDPAU_RENDER_MAGIC)) {
325
        return -1; // make sure that this is render packet
326
    }
327
    memset(&(render->info), 0 , sizeof(VdpPictureInfoVC1));
328
329
    /*  fill LvPictureInfoVC1 struct */
330
    render->info.vc1.frame_coding_mode  = v->fcm;
331
    render->info.vc1.postprocflag       = v->postprocflag;
332
    render->info.vc1.pulldown           = v->broadcast;
333
    render->info.vc1.interlace          = v->interlace;
334
    render->info.vc1.tfcntrflag         = v->tfcntrflag;
335
    render->info.vc1.finterpflag        = v->finterpflag;
336
    render->info.vc1.psf                = v->psf;
337
    render->info.vc1.dquant             = v->dquant;
338
    render->info.vc1.panscan_flag       = v->panscanflag;
339
    render->info.vc1.refdist_flag       = v->refdist_flag;
340
    render->info.vc1.quantizer          = v->quantizer_mode;
341
    render->info.vc1.extended_mv        = v->extended_mv;
342
    render->info.vc1.extended_dmv       = v->extended_dmv;
343
    render->info.vc1.overlap            = v->overlap;
344
    render->info.vc1.vstransform        = v->vstransform;
345
    render->info.vc1.loopfilter         = v->s.loop_filter;
346
    render->info.vc1.fastuvmc           = v->fastuvmc;
347
    render->info.vc1.range_mapy_flag    = v->range_mapy_flag;
348
    render->info.vc1.range_mapy         = v->range_mapy;
349
    render->info.vc1.range_mapuv_flag   = v->range_mapuv_flag;
350
    render->info.vc1.range_mapuv        = v->range_mapuv;
351
    /* Specific to simple/main profile only */
352
    render->info.vc1.multires           = v->multires;
353
    render->info.vc1.syncmarker         = v->s.resync_marker;
354
    render->info.vc1.rangered           = v->rangered;
355
    render->info.vc1.maxbframes         = v->s.max_b_frames;
356
    /* Presently, making these as 0 */
357
    render->info.vc1.deblockEnable      = 0;
358
    render->info.vc1.pquant             = 0;
359
360
    render->info.vc1.forward_reference  = VDP_INVALID_HANDLE;
361
    render->info.vc1.backward_reference = VDP_INVALID_HANDLE;
362
363
    switch(s->pict_type){
364
    case  FF_I_TYPE:
365
        render->info.vc1.picture_type = 0;
366
        break;
367
    case  FF_B_TYPE:
368
        if (v->bi_type) {
369
            render->info.vc1.picture_type = 4;
370
        }
371
        else {
372
            render->info.vc1.picture_type = 3;
373
        }
374
        break;
375
    case  FF_P_TYPE:
376
        render->info.vc1.picture_type = 1;
377
        break;
378
    case  FF_BI_TYPE:
379
        render->info.vc1.picture_type = 4;
380
        break;
381
    default:
382
        return -1;
383
    }
384
385
    switch(s->pict_type){
386
    case  FF_I_TYPE:
387
    case  FF_BI_TYPE:
388
        break;
389
    case  FF_B_TYPE:
390
        next = (vdpau_render_state_t*)s->next_picture.data[2];
391
        assert(next != NULL);
392
        assert(next->magic == MP_VDPAU_RENDER_MAGIC);
393
        if ((next == NULL) || (next->magic != MP_VDPAU_RENDER_MAGIC)) {
394
            return -1;
395
        }
396
        render->info.vc1.backward_reference = next->surface;
397
        // no break here, going to set forward prediction
398
    case  FF_P_TYPE:
399
        last = (vdpau_render_state_t*)s->last_picture.data[2];
400
        assert(last->magic == MP_VDPAU_RENDER_MAGIC);
401
        if (last->magic != MP_VDPAU_RENDER_MAGIC) {
402
            return -1;
403
        }
404
        if (last == NULL) { // FIXME: Does this test make sense?
405
            last = render; // predict second field from the first
406
        }
407
        render->info.vc1.forward_reference = last->surface;
408
        break;
409
    default:
410
        return -1;
411
    }
412
413
    VDPAU_ensure_has_buffers(render, 1);
414
415
    render->bitstreamBuffers[0].struct_version  = VDP_BITSTREAM_BUFFER_VERSION;
416
    render->bitstreamBuffers[0].bitstream_bytes = buf_size;
417
    render->bitstreamBuffers[0].bitstream       = buf;
418
    render->bitstreamBuffersUsed                = 1;
419
420
    // FIXME: I am not sure about how MPlayer calculates slice number.
421
    render->info.vc1.slice_count                = 1;
422
423
    ff_draw_horiz_band(s, 0, s->avctx->height);  
424
    render->bitstreamBuffersUsed = 0;
425
426
    return 0;
427
}
428
(-)mythtv.ori/libs/libavutil/avutil.h (+9 lines)
Lines 106-111 Link Here
106
    PIX_FMT_YUV440P,   ///< Planar YUV 4:4:0 (1 Cr & Cb sample per 1x2 Y samples)
106
    PIX_FMT_YUV440P,   ///< Planar YUV 4:4:0 (1 Cr & Cb sample per 1x2 Y samples)
107
    PIX_FMT_YUVJ440P,  ///< Planar YUV 4:4:0 full scale (jpeg)
107
    PIX_FMT_YUVJ440P,  ///< Planar YUV 4:4:0 full scale (jpeg)
108
    PIX_FMT_YUVA420P,  ///< Planar YUV 4:2:0, 20bpp, (1 Cr & Cb sample per 2x2 Y & A samples)
108
    PIX_FMT_YUVA420P,  ///< Planar YUV 4:2:0, 20bpp, (1 Cr & Cb sample per 2x2 Y & A samples)
109
    PIX_FMT_VDPAU_MPEG1,
110
    PIX_FMT_VDPAU_MPEG2_SIMPLE,
111
    PIX_FMT_VDPAU_MPEG2_MAIN,
112
    PIX_FMT_VDPAU_H264_BASELINE,
113
    PIX_FMT_VDPAU_H264_MAIN,
114
    PIX_FMT_VDPAU_H264_HIGH,
115
    PIX_FMT_VDPAU_VC1_SIMPLE,
116
    PIX_FMT_VDPAU_VC1_MAIN,
117
    PIX_FMT_VDPAU_VC1_ADVANCED,
109
    PIX_FMT_NB,        ///< number of pixel formats, DO NOT USE THIS if you want to link with shared libav* because the number of formats might differ between versions
118
    PIX_FMT_NB,        ///< number of pixel formats, DO NOT USE THIS if you want to link with shared libav* because the number of formats might differ between versions
110
};
119
};
111
120
(-)mythtv.ori/libs/libmythtv/NuppelVideoPlayer.cpp (+5 lines)
Lines 5641-5651 Link Here
5641
        case kCodec_MPEG1_IDCT:
5641
        case kCodec_MPEG1_IDCT:
5642
        case kCodec_MPEG1_VLD:
5642
        case kCodec_MPEG1_VLD:
5643
        case kCodec_MPEG1_DVDV:
5643
        case kCodec_MPEG1_DVDV:
5644
        case kCodec_MPEG1_VDPAU:
5644
        case kCodec_MPEG2:
5645
        case kCodec_MPEG2:
5645
        case kCodec_MPEG2_XVMC:
5646
        case kCodec_MPEG2_XVMC:
5646
        case kCodec_MPEG2_IDCT:
5647
        case kCodec_MPEG2_IDCT:
5647
        case kCodec_MPEG2_VLD:
5648
        case kCodec_MPEG2_VLD:
5648
        case kCodec_MPEG2_DVDV:
5649
        case kCodec_MPEG2_DVDV:
5650
        case kCodec_MPEG2_VDPAU:
5649
            return "MPEG-2";
5651
            return "MPEG-2";
5650
5652
5651
        case kCodec_H263:
5653
        case kCodec_H263:
Lines 5653-5658 Link Here
5653
        case kCodec_H263_IDCT:
5655
        case kCodec_H263_IDCT:
5654
        case kCodec_H263_VLD:
5656
        case kCodec_H263_VLD:
5655
        case kCodec_H263_DVDV:
5657
        case kCodec_H263_DVDV:
5658
        case kCodec_H263_VDPAU:
5656
            return "H.263";
5659
            return "H.263";
5657
5660
5658
        case kCodec_NUV_MPEG4:
5661
        case kCodec_NUV_MPEG4:
Lines 5661-5666 Link Here
5661
        case kCodec_MPEG4_XVMC:
5664
        case kCodec_MPEG4_XVMC:
5662
        case kCodec_MPEG4_VLD:
5665
        case kCodec_MPEG4_VLD:
5663
        case kCodec_MPEG4_DVDV:
5666
        case kCodec_MPEG4_DVDV:
5667
        case kCodec_MPEG4_VDPAU:
5664
            return "MPEG-4";
5668
            return "MPEG-4";
5665
5669
5666
        case kCodec_H264:
5670
        case kCodec_H264:
Lines 5668-5673 Link Here
5668
        case kCodec_H264_IDCT:
5672
        case kCodec_H264_IDCT:
5669
        case kCodec_H264_VLD:
5673
        case kCodec_H264_VLD:
5670
        case kCodec_H264_DVDV:
5674
        case kCodec_H264_DVDV:
5675
        case kCodec_H264_VDPAU:
5671
            return "H.264";
5676
            return "H.264";
5672
5677
5673
        case kCodec_NONE:
5678
        case kCodec_NONE:
(-)mythtv.ori/libs/libmythtv/avformatdecoder.cpp (-24 / +145 lines)
Lines 37-49 Link Here
37
}
37
}
38
#endif // USING_XVMC
38
#endif // USING_XVMC
39
39
40
#ifdef USING_VDPAU
41
#include "videoout_xv.h"
42
extern "C" {
43
#include "libavcodec/vdpau_render.h"
44
}
45
#endif // USING_VDPAU
46
40
extern "C" {
47
extern "C" {
41
#include "../libavutil/avutil.h"
48
#include "../libavutil/avutil.h"
42
#include "../libavcodec/ac3_parser.h"
49
#include "../libavcodec/ac3_parser.h"
50
#include "../libavcodec/mpegvideo.h"
51
#include "../libavformat/avio.h"
43
#include "../libmythmpeg2/mpeg2.h"
52
#include "../libmythmpeg2/mpeg2.h"
44
#include "ivtv_myth.h"
53
#include "ivtv_myth.h"
45
// from libavcodec
54
//// from libavcodec
46
extern const uint8_t *ff_find_start_code(const uint8_t * restrict p, const uint8_t *end, uint32_t * restrict state);
55
//extern const uint8_t *ff_find_start_code(const uint8_t * restrict p, const uint8_t *end, uint32_t * restrict state);
47
}
56
}
48
57
49
#define LOC QString("AFD: ")
58
#define LOC QString("AFD: ")
Lines 73-78 Link Here
73
                       int offset[4], int y, int type, int height);
82
                       int offset[4], int y, int type, int height);
74
void decode_cc_dvd(struct AVCodecContext *c, const uint8_t *buf, int buf_size);
83
void decode_cc_dvd(struct AVCodecContext *c, const uint8_t *buf, int buf_size);
75
84
85
int get_avf_buffer_vdpau(struct AVCodecContext *c, AVFrame *pic);
86
void release_avf_buffer_vdpau(struct AVCodecContext *c, AVFrame *pic);
87
void render_slice_vdpau(struct AVCodecContext *s, const AVFrame *src,
88
                        int offset[4], int y, int type, int height);
89
76
static void myth_av_log(void *ptr, int level, const char* fmt, va_list vl)
90
static void myth_av_log(void *ptr, int level, const char* fmt, va_list vl)
77
{
91
{
78
    static QString full_line("");
92
    static QString full_line("");
Lines 1117-1122 Link Here
1117
        enc->draw_horiz_band  = NULL;
1131
        enc->draw_horiz_band  = NULL;
1118
        directrendering      |= selectedStream;
1132
        directrendering      |= selectedStream;
1119
    }
1133
    }
1134
    else if (codec && (codec->id == CODEC_ID_MPEGVIDEO_VDPAU ||
1135
                       codec->id == CODEC_ID_H264_VDPAU ||
1136
                       codec->id == CODEC_ID_VC1_VDPAU ||
1137
                       codec->id == CODEC_ID_WMV3_VDPAU))
1138
    {
1139
        enc->get_buffer      = get_avf_buffer_vdpau;
1140
        enc->release_buffer  = release_avf_buffer_vdpau;
1141
        enc->draw_horiz_band = render_slice_vdpau;
1142
        enc->slice_flags     = SLICE_FLAG_CODED_ORDER | SLICE_FLAG_ALLOW_FIELD;
1143
        directrendering     |= selectedStream;
1144
    }
1120
    else if (codec && codec->capabilities & CODEC_CAP_DR1)
1145
    else if (codec && codec->capabilities & CODEC_CAP_DR1)
1121
    {
1146
    {
1122
        enc->flags          |= CODEC_FLAG_EMU_EDGE;
1147
        enc->flags          |= CODEC_FLAG_EMU_EDGE;
Lines 1148-1154 Link Here
1148
    }
1173
    }
1149
}
1174
}
1150
1175
1151
#if defined(USING_XVMC) || defined(USING_DVDV)
1176
#if defined(USING_XVMC) || defined(USING_DVDV) || defined(USING_VDPAU)
1152
static int mpeg_version(int codec_id)
1177
static int mpeg_version(int codec_id)
1153
{
1178
{
1154
    switch (codec_id)
1179
    switch (codec_id)
Lines 1159-1181 Link Here
1159
        case CODEC_ID_MPEG2VIDEO_XVMC:
1184
        case CODEC_ID_MPEG2VIDEO_XVMC:
1160
        case CODEC_ID_MPEG2VIDEO_XVMC_VLD:
1185
        case CODEC_ID_MPEG2VIDEO_XVMC_VLD:
1161
        case CODEC_ID_MPEG2VIDEO_DVDV:
1186
        case CODEC_ID_MPEG2VIDEO_DVDV:
1187
        case CODEC_ID_MPEGVIDEO_VDPAU:
1162
            return 2;
1188
            return 2;
1163
        case CODEC_ID_H263:
1189
        case CODEC_ID_H263:
1164
            return 3;
1190
            return 3;
1165
        case CODEC_ID_MPEG4:
1191
        case CODEC_ID_MPEG4:
1166
            return 4;
1192
            return 4;
1167
        case CODEC_ID_H264:
1193
        case CODEC_ID_H264:
1194
        case CODEC_ID_H264_VDPAU:
1168
            return 5;
1195
            return 5;
1196
        case CODEC_ID_VC1:
1197
        case CODEC_ID_VC1_VDPAU:
1198
            return 6;
1199
        case CODEC_ID_WMV3:
1200
        case CODEC_ID_WMV3_VDPAU:
1201
            return 7;
1202
        default:
1203
            break;
1169
    }
1204
    }
1170
    return 0;
1205
    return 0;
1171
}
1206
}
1172
#endif // defined(USING_XVMC) || defined(USING_DVDV)
1173
1207
1174
#ifdef USING_XVMC
1175
static int xvmc_pixel_format(enum PixelFormat pix_fmt)
1208
static int xvmc_pixel_format(enum PixelFormat pix_fmt)
1176
{
1209
{
1177
    (void) pix_fmt;
1210
    (void) pix_fmt;
1211
#ifdef USING_XVMC
1178
    int xvmc_chroma = XVMC_CHROMA_FORMAT_420;
1212
    int xvmc_chroma = XVMC_CHROMA_FORMAT_420;
1213
#else
1214
    int xvmc_chroma = 0;
1215
#endif
1216
1179
#if 0
1217
#if 0
1180
// We don't support other chromas yet
1218
// We don't support other chromas yet
1181
    if (PIX_FMT_YUV420P == pix_fmt)
1219
    if (PIX_FMT_YUV420P == pix_fmt)
Lines 1187-1193 Link Here
1187
#endif
1225
#endif
1188
    return xvmc_chroma;
1226
    return xvmc_chroma;
1189
}
1227
}
1190
#endif // USING_XVMC
1228
#endif
1191
1229
1192
void default_captions(sinfo_vec_t *tracks, int av_index)
1230
void default_captions(sinfo_vec_t *tracks, int av_index)
1193
{
1231
{
Lines 1377-1383 Link Here
1377
 */
1415
 */
1378
void AvFormatDecoder::ScanDSMCCStreams(void)
1416
void AvFormatDecoder::ScanDSMCCStreams(void)
1379
{
1417
{
1380
    if (!ic->cur_pmt_sect)
1418
    if (!ic || !ic->cur_pmt_sect)
1381
        return;
1419
        return;
1382
1420
1383
    if (!itv && ! (itv = GetNVP()->GetInteractiveTV()))
1421
    if (!itv && ! (itv = GetNVP()->GetInteractiveTV()))
Lines 1490-1510 Link Here
1490
1528
1491
                uint width  = max(enc->width, 16);
1529
                uint width  = max(enc->width, 16);
1492
                uint height = max(enc->height, 16);
1530
                uint height = max(enc->height, 16);
1493
                VideoDisplayProfile vdp;
1531
                QString dec = "ffmpeg";
1494
                vdp.SetInput(QSize(width, height));
1532
                uint thread_count = 1;
1495
                QString dec = vdp.GetDecoder();
1496
                uint thread_count = vdp.GetMaxCPUs();
1497
                VERBOSE(VB_PLAYBACK, QString("Using %1 CPUs for decoding")
1498
                        .arg(ENABLE_THREADS ? thread_count : 1));
1499
1500
                if (ENABLE_THREADS && thread_count > 1)
1501
                {
1502
                    avcodec_thread_init(enc, thread_count);
1503
                    enc->thread_count = thread_count;
1504
                }
1505
1533
1506
                bool handled = false;
1534
                bool handled = false;
1507
#ifdef USING_XVMC
1535
#if defined(USING_VDPAU) || defined(USING_XVMC)
1508
                if (!using_null_videoout && mpeg_version(enc->codec_id))
1536
                if (!using_null_videoout && mpeg_version(enc->codec_id))
1509
                {
1537
                {
1510
                    // HACK -- begin
1538
                    // HACK -- begin
Lines 1538-1546 Link Here
1538
                        /* xvmc pix fmt */ xvmc_pixel_format(enc->pix_fmt),
1566
                        /* xvmc pix fmt */ xvmc_pixel_format(enc->pix_fmt),
1539
                        /* test surface */ kCodec_NORMAL_END > video_codec_id,
1567
                        /* test surface */ kCodec_NORMAL_END > video_codec_id,
1540
                        /* force_xv     */ force_xv);
1568
                        /* force_xv     */ force_xv);
1541
                    bool vcd, idct, mc;
1569
                    bool vcd, idct, mc, vdpau;
1542
                    enc->codec_id = (CodecID)
1570
                    enc->codec_id = (CodecID)
1543
                        myth2av_codecid(mcid, vcd, idct, mc);
1571
                        myth2av_codecid(mcid, vcd, idct, mc, vdpau);
1544
1572
1545
                    if (ringBuffer && ringBuffer->isDVD() && 
1573
                    if (ringBuffer && ringBuffer->isDVD() && 
1546
                        (mcid == video_codec_id) &&
1574
                        (mcid == video_codec_id) &&
Lines 1576-1581 Link Here
1576
                }
1604
                }
1577
#endif // USING_XVMC || USING_DVDV
1605
#endif // USING_XVMC || USING_DVDV
1578
1606
1607
                VideoDisplayProfile vdp;
1608
                vdp.SetInput(QSize(width, height));
1609
                dec = vdp.GetDecoder();
1610
                thread_count = vdp.GetMaxCPUs();
1611
1612
                if (video_codec_id > kCodec_NORMAL_END)
1613
                    thread_count = 1;
1614
1615
                VERBOSE(VB_PLAYBACK, QString("Using %1 CPUs for decoding")
1616
                        .arg(ENABLE_THREADS ? thread_count : 1));
1617
1618
                if (ENABLE_THREADS && thread_count > 1)
1619
                {
1620
                    avcodec_thread_init(enc, thread_count);
1621
                    enc->thread_count = thread_count;
1622
                }
1623
1579
                if (!handled)
1624
                if (!handled)
1580
                {
1625
                {
1581
                    if (CODEC_ID_H264 == enc->codec_id)
1626
                    if (CODEC_ID_H264 == enc->codec_id)
Lines 2102-2107 Link Here
2102
    }
2147
    }
2103
}
2148
}
2104
2149
2150
int get_avf_buffer_vdpau(struct AVCodecContext *c, AVFrame *pic)
2151
{
2152
    AvFormatDecoder *nd = (AvFormatDecoder *)(c->opaque);
2153
    VideoFrame *frame = nd->GetNVP()->GetNextVideoFrame(false);
2154
2155
    pic->data[0] = frame->priv[0];
2156
    pic->data[1] = frame->priv[1];
2157
    pic->data[2] = frame->buf;
2158
2159
    pic->linesize[0] = 0;
2160
    pic->linesize[1] = 0;
2161
    pic->linesize[2] = 0;
2162
2163
    pic->opaque = frame;
2164
    pic->type = FF_BUFFER_TYPE_USER;
2165
2166
    pic->age = 256 * 256 * 256 * 64;
2167
2168
    frame->pix_fmt = c->pix_fmt;
2169
2170
#ifdef USING_VDPAU
2171
    vdpau_render_state_t *render = (vdpau_render_state_t *)frame->buf;
2172
    render->state |= MP_VDPAU_STATE_USED_FOR_REFERENCE;
2173
#endif
2174
2175
    return 0;
2176
}
2177
2178
void release_avf_buffer_vdpau(struct AVCodecContext *c, AVFrame *pic)
2179
{
2180
    assert(pic->type == FF_BUFFER_TYPE_USER);
2181
2182
#ifdef USING_VDPAU
2183
    vdpau_render_state_t *render = (vdpau_render_state_t *)pic->data[2];
2184
    render->state &= ~MP_VDPAU_STATE_USED_FOR_REFERENCE;
2185
#endif
2186
2187
    AvFormatDecoder *nd = (AvFormatDecoder *)(c->opaque);
2188
    if (nd && nd->GetNVP() && nd->GetNVP()->getVideoOutput())
2189
        nd->GetNVP()->getVideoOutput()->DeLimboFrame((VideoFrame*)pic->opaque);
2190
2191
    for (uint i = 0; i < 4; i++)
2192
        pic->data[i] = NULL;
2193
}
2194
2195
void render_slice_vdpau(struct AVCodecContext *s, const AVFrame *src,
2196
                        int offset[4], int y, int type, int height)
2197
{
2198
    if (!src)
2199
        return;
2200
2201
    (void)offset;
2202
    (void)type;
2203
2204
    if (s && src && s->opaque && src->opaque)
2205
    {
2206
        AvFormatDecoder *nd = (AvFormatDecoder *)(s->opaque);
2207
2208
        int width = s->width;
2209
2210
        VideoFrame *frame = (VideoFrame *)src->opaque;
2211
        nd->GetNVP()->DrawSlice(frame, 0, y, width, height);
2212
    }
2213
    else
2214
    {
2215
        VERBOSE(VB_IMPORTANT, LOC +
2216
                "render_slice_xvmc called with bad avctx or src");
2217
    }
2218
}
2219
2105
void decode_cc_dvd(struct AVCodecContext *s, const uint8_t *buf, int buf_size)
2220
void decode_cc_dvd(struct AVCodecContext *s, const uint8_t *buf, int buf_size)
2106
{
2221
{
2107
    // taken from xine-lib libspucc by Christian Vogler
2222
    // taken from xine-lib libspucc by Christian Vogler
Lines 3247-3258 Link Here
3247
            if (context->codec_id == CODEC_ID_MPEG1VIDEO ||
3362
            if (context->codec_id == CODEC_ID_MPEG1VIDEO ||
3248
                context->codec_id == CODEC_ID_MPEG2VIDEO ||
3363
                context->codec_id == CODEC_ID_MPEG2VIDEO ||
3249
                context->codec_id == CODEC_ID_MPEG2VIDEO_XVMC ||
3364
                context->codec_id == CODEC_ID_MPEG2VIDEO_XVMC ||
3250
                context->codec_id == CODEC_ID_MPEG2VIDEO_XVMC_VLD)
3365
                context->codec_id == CODEC_ID_MPEG2VIDEO_XVMC_VLD ||
3366
                context->codec_id == CODEC_ID_MPEGVIDEO_VDPAU)
3251
            {
3367
            {
3252
                if (!ringBuffer->isDVD())
3368
                if (!ringBuffer->isDVD())
3253
                    MpegPreProcessPkt(curstream, pkt);
3369
                    MpegPreProcessPkt(curstream, pkt);
3254
            }
3370
            }
3255
            else if (context->codec_id == CODEC_ID_H264)
3371
            else if (context->codec_id == CODEC_ID_H264 ||
3372
                     context->codec_id == CODEC_ID_H264_VDPAU)
3256
            {
3373
            {
3257
                H264PreProcessPkt(curstream, pkt);
3374
                H264PreProcessPkt(curstream, pkt);
3258
            }
3375
            }
Lines 3926-3931 Link Here
3926
        (video_codec_id < kCodec_VLD_END))
4043
        (video_codec_id < kCodec_VLD_END))
3927
        return "xvmc-vld";
4044
        return "xvmc-vld";
3928
4045
4046
    if ((video_codec_id > kCodec_DVDV_END) &&
4047
        (video_codec_id < kCodec_VDPAU_END))
4048
        return "vdpau";
4049
3929
    return "ffmpeg";
4050
    return "ffmpeg";
3930
}
4051
}
3931
4052
(-)mythtv.ori/libs/libmythtv/avformatdecoder.h (+2 lines)
Lines 21-30 Link Here
21
#define CODEC_IS_MPEG(c)     (c == CODEC_ID_MPEG1VIDEO      || \
21
#define CODEC_IS_MPEG(c)     (c == CODEC_ID_MPEG1VIDEO      || \
22
                              c == CODEC_ID_MPEG2VIDEO      || \
22
                              c == CODEC_ID_MPEG2VIDEO      || \
23
                              c == CODEC_ID_MPEG2VIDEO_DVDV || \
23
                              c == CODEC_ID_MPEG2VIDEO_DVDV || \
24
                              c == CODEC_ID_MPEGVIDEO_VDPAU || \
24
                              c == CODEC_ID_MPEG2VIDEO_XVMC || \
25
                              c == CODEC_ID_MPEG2VIDEO_XVMC || \
25
                              c == CODEC_ID_MPEG2VIDEO_XVMC_VLD)
26
                              c == CODEC_ID_MPEG2VIDEO_XVMC_VLD)
26
27
27
#define CODEC_IS_HW_ACCEL(c) (c == CODEC_ID_MPEG2VIDEO_DVDV || \
28
#define CODEC_IS_HW_ACCEL(c) (c == CODEC_ID_MPEG2VIDEO_DVDV || \
29
                              c == CODEC_ID_MPEGVIDEO_VDPAU || \
28
                              c == CODEC_ID_MPEG2VIDEO_XVMC || \
30
                              c == CODEC_ID_MPEG2VIDEO_XVMC || \
29
                              c == CODEC_ID_MPEG2VIDEO_XVMC_VLD)
31
                              c == CODEC_ID_MPEG2VIDEO_XVMC_VLD)
30
32
(-)mythtv.ori/libs/libmythtv/dtvrecorder.cpp (-2 / +1 lines)
Lines 13-20 Link Here
13
#include "tv_rec.h"
13
#include "tv_rec.h"
14
14
15
extern "C" {
15
extern "C" {
16
// from libavcodec
16
#include "../libavcodec/mpegvideo.h"
17
extern const uint8_t *ff_find_start_code(const uint8_t * restrict p, const uint8_t *end, uint32_t * restrict state);
18
}
17
}
19
18
20
#define LOC QString("DTVRec(%1): ").arg(tvrec->GetCaptureCardNum())
19
#define LOC QString("DTVRec(%1): ").arg(tvrec->GetCaptureCardNum())
(-)mythtv.ori/libs/libmythtv/filtermanager.cpp (+4 lines)
Lines 174-179 Link Here
174
    {
174
    {
175
        QString FiltName = (*i).section('=', 0, 0);
175
        QString FiltName = (*i).section('=', 0, 0);
176
        QString FiltOpts = (*i).section('=', 1);
176
        QString FiltOpts = (*i).section('=', 1);
177
178
        if (FiltName.contains("opengl"))
179
            continue;
180
177
        FI = GetFilterInfoByName(FiltName);
181
        FI = GetFilterInfoByName(FiltName);
178
182
179
        if (FI)
183
        if (FI)
(-)mythtv.ori/libs/libmythtv/frame.h (-5 / +9 lines)
Lines 21-27 Link Here
21
    FMT_ARGB32,
21
    FMT_ARGB32,
22
    FMT_RGBA32,
22
    FMT_RGBA32,
23
    FMT_YUV422P,
23
    FMT_YUV422P,
24
    FMT_ALPHA,
24
    FMT_BGRA,
25
    FMT_VDPAU
25
} VideoFrameType;
26
} VideoFrameType;
26
27
27
typedef struct VideoFrame_
28
typedef struct VideoFrame_
Lines 49-54 Link Here
49
50
50
    int pitches[3]; // Y, U, & V pitches
51
    int pitches[3]; // Y, U, & V pitches
51
    int offsets[3]; // Y, U, & V offsets
52
    int offsets[3]; // Y, U, & V offsets
53
54
    int pix_fmt;
52
} VideoFrame;
55
} VideoFrame;
53
56
54
#ifdef __cplusplus
57
#ifdef __cplusplus
Lines 88-95 Link Here
88
    vf->top_field_first  = 1;
91
    vf->top_field_first  = 1;
89
    vf->repeat_pict      = 0;
92
    vf->repeat_pict      = 0;
90
    vf->forcekey         = 0;
93
    vf->forcekey         = 0;
94
    vf->pix_fmt          = 0;
91
95
92
    // MS Windows doesn't like bzero()..
93
    memset(vf->priv, 0, 4 * sizeof(unsigned char *));
96
    memset(vf->priv, 0, 4 * sizeof(unsigned char *));
94
97
95
    if (p)
98
    if (p)
Lines 145-153 Link Here
145
    {
148
    {
146
        int uv_height = vf->height >> 1;
149
        int uv_height = vf->height >> 1;
147
        // MS Windows doesn't like bzero()..
150
        // MS Windows doesn't like bzero()..
148
        memset(vf->buf + vf->offsets[0],   0, vf->pitches[0] * vf->height);
151
        // JYA: Patch to prevent some crashes with VDPAU, don't fix cause of problems
149
        memset(vf->buf + vf->offsets[1], 127, vf->pitches[1] * uv_height);
152
        memset(vf->buf + vf->offsets[0],   0, vf->pitches[0] * vf->height > 0 ? vf->pitches[0] * vf->height : 0);
150
        memset(vf->buf + vf->offsets[2], 127, vf->pitches[2] * uv_height);
153
        memset(vf->buf + vf->offsets[1], 127, vf->pitches[1] * uv_height > 0 ? vf->pitches[1] * uv_height : 0);
154
        memset(vf->buf + vf->offsets[2], 127, vf->pitches[2] * uv_height > 0 ? vf->pitches[2] * uv_height : 0);
151
    }
155
    }
152
}
156
}
153
157
(-)mythtv.ori/libs/libmythtv/libmythtv.pro (+20 lines)
Lines 7-12 Link Here
7
target.path = $${LIBDIR}
7
target.path = $${LIBDIR}
8
INSTALLS = target
8
INSTALLS = target
9
9
10
POSTINC = 
11
12
contains(INCLUDEPATH, /usr/include) {
13
  POSTINC += /usr/include
14
  INCLUDEPATH -= /usr/include
15
}
16
contains(INCLUDEPATH, /usr/local/include) {
17
  POSTINC += /usr/local/include
18
  INCLUDEPATH -= /usr/local/include
19
}
20
10
INCLUDEPATH += ../.. .. .
21
INCLUDEPATH += ../.. .. .
11
INCLUDEPATH += ../libmyth ../libavcodec ../libavutil ../libmythmpeg2
22
INCLUDEPATH += ../libmyth ../libavcodec ../libavutil ../libmythmpeg2
12
INCLUDEPATH += ./dvbdev ./mpeg ./iptv
23
INCLUDEPATH += ./dvbdev ./mpeg ./iptv
Lines 14-19 Link Here
14
INCLUDEPATH += ../libmythlivemedia/groupsock/include
25
INCLUDEPATH += ../libmythlivemedia/groupsock/include
15
INCLUDEPATH += ../libmythlivemedia/liveMedia/include
26
INCLUDEPATH += ../libmythlivemedia/liveMedia/include
16
INCLUDEPATH += ../libmythlivemedia/UsageEnvironment/include
27
INCLUDEPATH += ../libmythlivemedia/UsageEnvironment/include
28
INCLUDEPATH += ../libmythui
29
INCLUDEPATH += $$POSTINC
17
30
18
DEPENDPATH  += ../libmyth ../libavcodec ../libavformat ../libavutil
31
DEPENDPATH  += ../libmyth ../libavcodec ../libavformat ../libavutil
19
DEPENDPATH  += ../libmythmpeg2 ../libmythdvdnav
32
DEPENDPATH  += ../libmythmpeg2 ../libmythdvdnav
Lines 301-306 Link Here
301
    using_xvmc_vld:DEFINES += USING_XVMC_VLD
314
    using_xvmc_vld:DEFINES += USING_XVMC_VLD
302
    using_xvmc_pbuffer:DEFINES += USING_XVMC_PBUFFER
315
    using_xvmc_pbuffer:DEFINES += USING_XVMC_PBUFFER
303
316
317
    using_vdpau {
318
        DEFINES += USING_VDPAU
319
        HEADERS += util-vdpau.h
320
        SOURCES += util-vdpau.cpp
321
        LIBS += -lvdpau
322
    }
323
304
    using_opengl {
324
    using_opengl {
305
        CONFIG += opengl
325
        CONFIG += opengl
306
        DEFINES += USING_OPENGL
326
        DEFINES += USING_OPENGL
(-)mythtv.ori/libs/libmythtv/mythcodecid.cpp (-3 / +54 lines)
Lines 28-33 Link Here
28
            return "MPEG4";
28
            return "MPEG4";
29
        case kCodec_H264:
29
        case kCodec_H264:
30
            return "H.264";
30
            return "H.264";
31
        case kCodec_VC1:
32
            return "VC-1";
33
        case kCodec_WMV3:
34
            return "WMV3";
31
    
35
    
32
        case kCodec_MPEG1_XVMC:
36
        case kCodec_MPEG1_XVMC:
33
            return "MPEG1 XvMC";
37
            return "MPEG1 XvMC";
Lines 73-78 Link Here
73
        case kCodec_H264_DVDV:
77
        case kCodec_H264_DVDV:
74
            return "H.264 DVDV";
78
            return "H.264 DVDV";
75
79
80
        case kCodec_MPEG1_VDPAU:
81
            return "MPEG1 VDPAU";
82
        case kCodec_MPEG2_VDPAU:
83
            return "MPEG2 VDPAU";
84
        case kCodec_H263_VDPAU:
85
            return "H.263 VDPAU";
86
        case kCodec_MPEG4_VDPAU:
87
            return "MPEG4 VDPAU";
88
        case kCodec_H264_VDPAU:
89
            return "H.264 VDPAU";
90
        case kCodec_VC1_VDPAU:
91
            return "VC1 VDPAU";
92
        case kCodec_WMV3_VDPAU:
93
            return "WMV3 VDPAU";
94
76
        default:
95
        default:
77
            break;
96
            break;
78
    }
97
    }
Lines 81-89 Link Here
81
}
100
}
82
101
83
int myth2av_codecid(MythCodecID codec_id,
102
int myth2av_codecid(MythCodecID codec_id,
84
                    bool &vld, bool &idct, bool &mc)
103
                    bool &vld, bool &idct, bool &mc, bool &vdpau)
85
{
104
{
86
    vld = idct = mc = false;
105
    vld = idct = mc = vdpau = false;
87
    CodecID ret = CODEC_ID_NONE;
106
    CodecID ret = CODEC_ID_NONE;
88
    switch (codec_id)
107
    switch (codec_id)
89
    {
108
    {
Lines 109-114 Link Here
109
            ret = CODEC_ID_H264;
128
            ret = CODEC_ID_H264;
110
            break;
129
            break;
111
130
131
        case kCodec_VC1:
132
            ret = CODEC_ID_VC1;
133
            break;
134
        case kCodec_WMV3:
135
            ret = CODEC_ID_WMV3;
136
            break;
137
112
        case kCodec_MPEG1_XVMC:
138
        case kCodec_MPEG1_XVMC:
113
        case kCodec_MPEG2_XVMC:
139
        case kCodec_MPEG2_XVMC:
114
            mc = true;
140
            mc = true;
Lines 168-179 Link Here
168
            VERBOSE(VB_IMPORTANT, "Error: DVDV H.263 not supported by ffmpeg");
194
            VERBOSE(VB_IMPORTANT, "Error: DVDV H.263 not supported by ffmpeg");
169
            break;
195
            break;
170
        case kCodec_MPEG4_DVDV:
196
        case kCodec_MPEG4_DVDV:
171
            VERBOSE(VB_IMPORTANT, "Error: DVDV MPEG not supported by ffmpeg");
197
            VERBOSE(VB_IMPORTANT, "Error: DVDV MPEG4 not supported by ffmpeg");
172
            break;
198
            break;
173
        case kCodec_H264_DVDV:
199
        case kCodec_H264_DVDV:
174
            VERBOSE(VB_IMPORTANT, "Error: DVDV H.265 not supported by ffmpeg");
200
            VERBOSE(VB_IMPORTANT, "Error: DVDV H.265 not supported by ffmpeg");
175
            break;
201
            break;
176
202
203
        case kCodec_MPEG1_VDPAU:
204
        case kCodec_MPEG2_VDPAU:
205
            ret = CODEC_ID_MPEGVIDEO_VDPAU;
206
            vdpau = true;
207
            break;
208
        case kCodec_H263_VDPAU:
209
            VERBOSE(VB_IMPORTANT, "Error: VDPAU H.263 not supported by ffmpeg");
210
            break;
211
        case kCodec_MPEG4_VDPAU:
212
            VERBOSE(VB_IMPORTANT, "Error: VDPAU MPEG4 not supported by ffmpeg");
213
            break;
214
215
        case kCodec_H264_VDPAU:
216
            ret = CODEC_ID_H264_VDPAU;
217
            vdpau = true;
218
            break;
219
        case kCodec_VC1_VDPAU:
220
            ret = CODEC_ID_VC1_VDPAU;
221
            vdpau = true;
222
            break;
223
        case kCodec_WMV3_VDPAU:
224
            ret = CODEC_ID_WMV3_VDPAU;
225
            vdpau = true;
226
            break;
227
177
        default:
228
        default:
178
            VERBOSE(VB_IMPORTANT,
229
            VERBOSE(VB_IMPORTANT,
179
                    QString("Error: MythCodecID %1 has not been "
230
                    QString("Error: MythCodecID %1 has not been "
(-)mythtv.ori/libs/libmythtv/mythcodecid.h (-4 / +27 lines)
Lines 9-14 Link Here
9
// myth2av_codecid, and NuppelVideoPlayer::GetEncodingType()
9
// myth2av_codecid, and NuppelVideoPlayer::GetEncodingType()
10
    kCodec_NONE = 0,
10
    kCodec_NONE = 0,
11
11
12
    kCodec_NORMAL_BEGIN = kCodec_NONE,
13
12
    kCodec_NUV_RTjpeg,
14
    kCodec_NUV_RTjpeg,
13
    kCodec_NUV_MPEG4,
15
    kCodec_NUV_MPEG4,
14
16
Lines 17-25 Link Here
17
    kCodec_H263,
19
    kCodec_H263,
18
    kCodec_MPEG4,
20
    kCodec_MPEG4,
19
    kCodec_H264,
21
    kCodec_H264,
22
    kCodec_VC1,
23
    kCodec_WMV3,
20
    
24
    
21
    kCodec_NORMAL_END,
25
    kCodec_NORMAL_END,
22
26
27
    kCodec_STD_XVMC_BEGIN = kCodec_NORMAL_END,
28
23
    kCodec_MPEG1_XVMC,
29
    kCodec_MPEG1_XVMC,
24
    kCodec_MPEG2_XVMC,
30
    kCodec_MPEG2_XVMC,
25
    kCodec_H263_XVMC,
31
    kCodec_H263_XVMC,
Lines 34-39 Link Here
34
40
35
    kCodec_STD_XVMC_END,
41
    kCodec_STD_XVMC_END,
36
42
43
    kCodec_VLD_BEGIN = kCodec_STD_XVMC_END,
44
37
    kCodec_MPEG1_VLD,
45
    kCodec_MPEG1_VLD,
38
    kCodec_MPEG2_VLD,
46
    kCodec_MPEG2_VLD,
39
    kCodec_H263_VLD,
47
    kCodec_H263_VLD,
Lines 42-63 Link Here
42
50
43
    kCodec_VLD_END,
51
    kCodec_VLD_END,
44
52
53
    kCodec_DVDV_BEGIN = kCodec_VLD_END,
54
45
    kCodec_MPEG1_DVDV,
55
    kCodec_MPEG1_DVDV,
46
    kCodec_MPEG2_DVDV,
56
    kCodec_MPEG2_DVDV,
47
    kCodec_H263_DVDV,
57
    kCodec_H263_DVDV,
48
    kCodec_MPEG4_DVDV,
58
    kCodec_MPEG4_DVDV,
49
    kCodec_H264_DVDV,
59
    kCodec_H264_DVDV,
50
60
51
    kCodec_DVDV_END
61
    kCodec_DVDV_END,
62
63
    kCodec_VDPAU_BEGIN = kCodec_DVDV_END,
64
65
    kCodec_MPEG1_VDPAU,
66
    kCodec_MPEG2_VDPAU,
67
    kCodec_H263_VDPAU,
68
    kCodec_MPEG4_VDPAU,
69
    kCodec_H264_VDPAU,
70
    kCodec_VC1_VDPAU,
71
    kCodec_WMV3_VDPAU,
72
73
    kCodec_VDPAU_END,
52
74
53
} MythCodecID;
75
} MythCodecID;
54
76
55
QString toString(MythCodecID codecid);
77
QString toString(MythCodecID codecid);
56
int myth2av_codecid(MythCodecID codec_id, bool &vld, bool &idct, bool &mc);
78
int myth2av_codecid(MythCodecID codec_id, bool &vld, bool &idct, bool &mc,
79
                    bool &vdpau);
57
inline int myth2av_codecid(MythCodecID codec_id)
80
inline int myth2av_codecid(MythCodecID codec_id)
58
{
81
{
59
    bool vld, idct, mc;
82
    bool vld, idct, mc, vdpau;
60
    return myth2av_codecid(codec_id, vld, idct, mc);
83
    return myth2av_codecid(codec_id, vld, idct, mc, vdpau);
61
}
84
}
62
85
63
#endif // _MYTH_CODEC_ID_H_
86
#endif // _MYTH_CODEC_ID_H_
(-)mythtv.ori/libs/libmythtv/openglcontext.cpp (-101 / +631 lines)
Lines 4-19 Link Here
4
4
5
#include "util-opengl.h"
5
#include "util-opengl.h"
6
6
7
7
#define LOC QString("GLCtx: ")
8
#define LOC QString("GLCtx: ")
8
#define LOC_ERR QString("GLCtx, Error: ")
9
#define LOC_ERR QString("GLCtx, Error: ")
9
10
11
OpenGLContextLocker::OpenGLContextLocker(OpenGLContext *ctx)
12
              : m_ctx(ctx)
13
{
14
    if (m_ctx)
15
        m_ctx->MakeCurrent(true);
16
}
17
OpenGLContextLocker::~OpenGLContextLocker()
18
{
19
    if (m_ctx)
20
        m_ctx->MakeCurrent(false);
21
}
22
23
class MythGLTexture
24
{
25
  public:
26
    MythGLTexture() :
27
        m_type(GL_TEXTURE_2D), m_data(NULL), m_data_size(0),
28
        m_data_type(GL_UNSIGNED_BYTE), m_data_fmt(GL_BGRA),
29
        m_internal_fmt(GL_RGBA8), m_pbo(0),
30
        m_filter(GL_LINEAR), m_wrap(GL_CLAMP_TO_EDGE),
31
        m_size(0,0), m_vid_size(0,0)
32
    {
33
    }
34
35
    ~MythGLTexture()
36
    {
37
    }
38
39
    GLuint  m_type;
40
    unsigned char *m_data;
41
    uint    m_data_size;
42
    GLuint  m_data_type;
43
    GLuint  m_data_fmt;
44
    GLuint  m_internal_fmt;
45
    GLuint  m_pbo;
46
    GLuint  m_filter;
47
    GLuint  m_wrap;
48
    QSize   m_size;
49
    QSize   m_vid_size;
50
};
51
10
class PrivateContext
52
class PrivateContext
11
{
53
{
12
  public:
54
  public:
13
    PrivateContext() :
55
    PrivateContext() :
14
        m_glx_fbconfig(0), m_gl_window(0), m_glx_window(0),
56
        m_glx_fbconfig(0), m_gl_window(0), m_glx_window(0),
15
        m_glx_context(NULL),
57
        m_glx_context(NULL), m_texture_type(0),
16
        m_texture_type(GL_TEXTURE_2D), m_textures_enabled(false),
17
        m_vis_info(NULL), m_attr_list(NULL)
58
        m_vis_info(NULL), m_attr_list(NULL)
18
    {
59
    {
19
    }
60
    }
Lines 27-47 Link Here
27
    GLXWindow    m_glx_window;
68
    GLXWindow    m_glx_window;
28
    GLXContext   m_glx_context;
69
    GLXContext   m_glx_context;
29
    int          m_texture_type;
70
    int          m_texture_type;
30
    bool         m_textures_enabled;
31
    XVisualInfo *m_vis_info;
71
    XVisualInfo *m_vis_info;
32
    int const   *m_attr_list;
72
    int const   *m_attr_list;
33
73
34
    vector<GLuint> m_textures;
74
    map<GLuint, MythGLTexture> m_textures;
35
    vector<GLuint> m_programs;
75
    vector<GLuint> m_programs;
36
    vector<GLuint> m_framebuffers;
76
    vector<GLuint> m_framebuffers;
77
    GLuint         m_fence;
37
};
78
};
38
79
39
OpenGLContext::OpenGLContext() :
80
OpenGLContext::OpenGLContext(QMutex *lock) :
40
    m_priv(new PrivateContext()),
81
    m_priv(new PrivateContext()),
41
    m_display(NULL), m_screen_num(0),
82
    m_display(NULL), m_screen_num(0),
42
    m_major_ver(1), m_minor_ver(2),
83
    m_major_ver(1), m_minor_ver(2),
43
    m_extensions(QString::null), m_ext_supported(0),
84
    m_extensions(QString::null), m_ext_supported(0),
44
    m_visible(true), m_max_tex_size(0)
85
    m_ext_used(0),
86
    m_max_tex_size(0), m_viewport(0,0),
87
    m_lock(lock), m_lock_level(0),
88
    m_colour_control(false)
45
{
89
{
46
    if (!init_opengl())
90
    if (!init_opengl())
47
        VERBOSE(VB_PLAYBACK, LOC_ERR + "Failed to initialize OpenGL support.");
91
        VERBOSE(VB_PLAYBACK, LOC_ERR + "Failed to initialize OpenGL support.");
Lines 56-66 Link Here
56
        DeletePrograms();
100
        DeletePrograms();
57
        DeleteTextures();
101
        DeleteTextures();
58
        DeleteFrameBuffers();
102
        DeleteFrameBuffers();
59
    }
60
103
61
    glFlush();
104
        Flush(true);
62
105
63
    MakeCurrent(false);
106
        if (m_priv->m_fence &&
107
            (m_ext_supported & kGLNVFence))
108
        {
109
            gMythGLDeleteFencesNV(1, &(m_priv->m_fence));
110
        }
111
    }
112
113
    Flush(false);
64
114
65
    if (m_priv->m_glx_window)
115
    if (m_priv->m_glx_window)
66
    {
116
    {
Lines 74-79 Link Here
74
        m_priv->m_gl_window = 0;
124
        m_priv->m_gl_window = 0;
75
    }
125
    }
76
126
127
    MakeCurrent(false);
128
77
    if (m_priv->m_glx_context)
129
    if (m_priv->m_glx_context)
78
    {
130
    {
79
        X11S(glXDestroyContext(m_display, m_priv->m_glx_context));
131
        X11S(glXDestroyContext(m_display, m_priv->m_glx_context));
Lines 89-110 Link Here
89
141
90
void OpenGLContext::Hide(void)
142
void OpenGLContext::Hide(void)
91
{
143
{
144
    MakeCurrent(true);
92
    X11S(XUnmapWindow(m_display, m_priv->m_gl_window));
145
    X11S(XUnmapWindow(m_display, m_priv->m_gl_window));
146
    MakeCurrent(false);
93
}
147
}
94
148
95
void OpenGLContext::Show(void)
149
void OpenGLContext::Show(void)
96
{
150
{
151
    MakeCurrent(true);
97
    X11S(XMapWindow(m_display, m_priv->m_gl_window));
152
    X11S(XMapWindow(m_display, m_priv->m_gl_window));
153
    MakeCurrent(false);
98
}
154
}
99
155
100
// locking ok
156
// locking ok
101
bool OpenGLContext::Create(
157
bool OpenGLContext::Create(
102
    Display *XJ_disp, Window XJ_curwin, uint screen_num,
158
    Display *XJ_disp, Window XJ_curwin, uint screen_num,
103
    const QSize &display_visible_size, bool visible)
159
    const QRect &display_visible, bool colour_control)
104
{
160
{
105
    static bool debugged = false;
161
    static bool debugged = false;
106
162
107
    m_visible = visible;
163
    m_colour_control = colour_control;
108
    m_display = XJ_disp;
164
    m_display = XJ_disp;
109
    m_screen_num = screen_num;
165
    m_screen_num = screen_num;
110
    uint major, minor;
166
    uint major, minor;
Lines 180-186 Link Here
180
    }
236
    }
181
237
182
    m_priv->m_gl_window = get_gl_window(
238
    m_priv->m_gl_window = get_gl_window(
183
        XJ_disp, XJ_curwin, m_priv->m_vis_info, display_visible_size, visible);
239
        XJ_disp, XJ_curwin, m_priv->m_vis_info, display_visible);
184
    
240
    
185
    if (!m_priv->m_gl_window)
241
    if (!m_priv->m_gl_window)
186
    {
242
    {
Lines 202-209 Link Here
202
        }
258
        }
203
    }
259
    }
204
   
260
   
205
    VERBOSE(VB_PLAYBACK, LOC + QString("Created window%1 and context.")
261
    VERBOSE(VB_PLAYBACK, LOC + QString("Created window and context."));
206
            .arg(m_visible ? "" : " (Offscreen)"));
207
262
208
    {
263
    {
209
        MakeCurrent(true);
264
        MakeCurrent(true);
Lines 237-251 Link Here
237
        MakeCurrent(false);
292
        MakeCurrent(false);
238
    }
293
    }
239
294
240
    int tex_type = get_gl_texture_rect_type(m_extensions);
241
    m_priv->m_texture_type = (tex_type) ? tex_type : m_priv->m_texture_type;
242
243
    m_ext_supported =
295
    m_ext_supported =
244
        ((tex_type) ? kGLExtRect : 0) |
296
        ((get_gl_texture_rect_type(m_extensions)) ? kGLExtRect : 0) |
245
        ((has_gl_fragment_program_support(m_extensions)) ?
297
        ((has_gl_fragment_program_support(m_extensions)) ?
246
         kGLExtFragProg : 0) |
298
         kGLExtFragProg : 0) |
299
        ((has_gl_pixelbuffer_object_support(m_extensions)) ?
300
         kGLExtPBufObj : 0) |
247
        ((has_gl_fbuffer_object_support(m_extensions)) ? kGLExtFBufObj : 0) |
301
        ((has_gl_fbuffer_object_support(m_extensions)) ? kGLExtFBufObj : 0) |
248
        ((minor >= 3) ? kGLXPBuffer : 0);
302
        ((has_gl_nvfence_support(m_extensions)) ? kGLNVFence : 0) |
303
        ((minor >= 3) ? kGLXPBuffer : 0) | kGLFinish;
304
305
    m_ext_used = m_ext_supported;
306
307
    MakeCurrent(true);
308
309
    if (m_ext_used & kGLNVFence)
310
    {
311
        gMythGLGenFencesNV(1, &(m_priv->m_fence));
312
        if (m_priv->m_fence)
313
            VERBOSE(VB_PLAYBACK, LOC + "Using GL_NV_fence");
314
    }
315
316
    Init2DState();
317
    MakeCurrent(false);
249
318
250
    return true;
319
    return true;
251
}
320
}
Lines 253-282 Link Here
253
// locking ok
322
// locking ok
254
bool OpenGLContext::MakeCurrent(bool current)
323
bool OpenGLContext::MakeCurrent(bool current)
255
{
324
{
256
    bool ok;
325
    bool ok = true;
257
326
258
    if (current)
327
    if (current)
259
    {
328
    {
260
        if (IsGLXSupported(1,3))
329
        m_lock->lock();
261
        {
330
        if (m_lock_level == 0)
262
            X11S(ok = glXMakeCurrent(m_display,
263
                                     m_priv->m_glx_window,
264
                                     m_priv->m_glx_context));
265
        }
266
        else
267
        {
331
        {
268
            X11S(ok = glXMakeCurrent(m_display,
332
            if (IsGLXSupported(1,3))
269
                                     m_priv->m_gl_window,
333
            {
270
                                     m_priv->m_glx_context));
334
                X11S(ok = glXMakeContextCurrent(m_display,
335
                                         m_priv->m_glx_window,
336
                                         m_priv->m_glx_window,
337
                                         m_priv->m_glx_context));
338
            }
339
            else
340
            {
341
                X11S(ok = glXMakeCurrent(m_display,
342
                                         m_priv->m_gl_window,
343
                                         m_priv->m_glx_context));
344
            }
271
        }
345
        }
346
        m_lock_level++;
272
    }
347
    }
273
    else
348
    else
274
    {
349
    {
275
        X11S(ok = glXMakeCurrent(m_display, None, NULL));
350
        m_lock_level--;
351
        if (m_lock_level == 0)
352
        {
353
            if (IsGLXSupported(1,3))
354
            {
355
                X11S(ok = glXMakeContextCurrent(m_display, None, None, NULL));
356
            }
357
            else
358
            {
359
                X11S(ok = glXMakeCurrent(m_display, None, NULL));
360
            }
361
        }
362
        else if (m_lock_level < 0)
363
        {
364
            VERBOSE(VB_PLAYBACK, LOC_ERR + "Mis-matched calls to MakeCurrent");
365
        }
366
        m_lock->unlock();
276
    }
367
    }
277
368
278
    if (!ok)
369
    if (!ok)
279
        VERBOSE(VB_PLAYBACK, LOC + "Could not make context current.");
370
        VERBOSE(VB_PLAYBACK, LOC_ERR + "Could not make context current.");
280
371
281
    return ok;
372
    return ok;
282
}
373
}
Lines 284-331 Link Here
284
// locking ok
375
// locking ok
285
void OpenGLContext::SwapBuffers(void)
376
void OpenGLContext::SwapBuffers(void)
286
{
377
{
287
    if (m_visible)
378
    MakeCurrent(true);
288
    {
289
        MakeCurrent(true);
290
379
380
    if (m_ext_used & kGLFinish)
291
        glFinish();
381
        glFinish();
292
        if (IsGLXSupported(1,3))
293
            X11S(glXSwapBuffers(m_display, m_priv->m_glx_window));
294
        else
295
            X11S(glXSwapBuffers(m_display, m_priv->m_gl_window));
296
382
297
        MakeCurrent(false);
383
    if (IsGLXSupported(1,3))
298
    }
384
        X11S(glXSwapBuffers(m_display, m_priv->m_glx_window));
385
    else
386
        X11S(glXSwapBuffers(m_display, m_priv->m_gl_window));
387
388
    MakeCurrent(false);
299
}
389
}
300
390
301
// locking ok
391
// locking ok
302
void OpenGLContext::Flush(void)
392
void OpenGLContext::Flush(bool use_fence)
303
{
393
{
304
    glFlush();
394
    MakeCurrent(true);
395
396
    if ((m_ext_used & kGLNVFence) &&
397
        m_priv->m_fence && use_fence)
398
    {
399
        gMythGLSetFenceNV(m_priv->m_fence, GL_ALL_COMPLETED_NV);
400
        gMythGLFinishFenceNV(m_priv->m_fence);
401
    }
402
    else
403
    {
404
        glFlush();
405
    }
406
    
407
    MakeCurrent(false);
305
}
408
}
306
409
307
// locking ok
410
// locking ok
308
void OpenGLContext::EnableTextures(void)
411
void OpenGLContext::EnableTextures(uint tex, uint tex_type)
309
{
412
{
310
    if (!m_priv->m_textures_enabled)
413
    MakeCurrent(true);
414
415
    int type = tex ? m_priv->m_textures[tex].m_type : tex_type;
416
417
    if (type != m_priv->m_texture_type)
311
    {
418
    {
312
        m_priv->m_textures_enabled = true;
419
        if (m_priv->m_texture_type)
420
        {
421
            glDisable(m_priv->m_texture_type);
422
        }
423
        glEnable(type);
424
        m_priv->m_texture_type = type;
425
    }
313
426
314
        MakeCurrent(true);
427
    MakeCurrent(false);
315
        glEnable(GetTextureType());
428
}
316
        MakeCurrent(false);
429
430
void OpenGLContext::DisableTextures(void)
431
{
432
    MakeCurrent(true);
433
434
    glDisable(m_priv->m_texture_type);
435
    m_priv->m_texture_type = 0;
436
437
    MakeCurrent(false);
438
}
439
440
void OpenGLContext::UpdateTexture(uint tex,
441
                       const unsigned char *buf,
442
                       const int *offsets,
443
                       const int *pitches,
444
                       VideoFrameType fmt,
445
                       bool interlaced,
446
                       const unsigned char* alpha)
447
{
448
    MakeCurrent(true);
449
450
    MythGLTexture *tmp_tex = &m_priv->m_textures[tex];
451
    QSize size = tmp_tex->m_vid_size;
452
453
    EnableTextures(tex);
454
    glBindTexture(tmp_tex->m_type, tex);
455
456
    if (tmp_tex->m_pbo)
457
    {
458
        void *pboMemory;
459
460
        gMythGLBindBufferARB(GL_PIXEL_UNPACK_BUFFER_ARB, tmp_tex->m_pbo);
461
        gMythGLBufferDataARB(GL_PIXEL_UNPACK_BUFFER_ARB,
462
                             tmp_tex->m_data_size, NULL, GL_STREAM_DRAW);
463
464
        pboMemory = gMythGLMapBufferARB(GL_PIXEL_UNPACK_BUFFER_ARB,
465
                                        GL_WRITE_ONLY);
466
467
        if (FMT_BGRA == fmt)
468
        {
469
            memcpy(pboMemory, buf, tmp_tex->m_data_size);
470
        }
471
        else if (interlaced)
472
        {
473
            pack_yv12interlaced(buf, (unsigned char *)pboMemory,
474
                                offsets, pitches, size);
475
        }
476
        else
477
        {
478
            pack_yv12alpha(buf, (unsigned char *)pboMemory,
479
                           offsets, pitches, size, alpha);
480
        }
481
482
        gMythGLUnmapBufferARB(GL_PIXEL_UNPACK_BUFFER_ARB);
483
484
        glTexSubImage2D(tmp_tex->m_type, 0, 0, 0, size.width(), size.height(),
485
                        tmp_tex->m_data_fmt, tmp_tex->m_data_type, 0);
486
487
        gMythGLBindBufferARB(GL_PIXEL_UNPACK_BUFFER_ARB, 0);
317
    }
488
    }
489
    else
490
    {
491
        if (!tmp_tex->m_data)
492
        {
493
            unsigned char *scratch = new unsigned char[tmp_tex->m_data_size];
494
            if (scratch)
495
            {
496
                bzero(scratch, tmp_tex->m_data_size);
497
                tmp_tex->m_data = scratch;
498
            }
499
        }
500
501
        if (tmp_tex->m_data)
502
        {
503
            const unsigned char *tmp = tmp_tex->m_data;
504
505
            if (FMT_BGRA == fmt)
506
            {
507
                tmp = buf;
508
            }
509
            else if (interlaced)
510
            {
511
                pack_yv12interlaced(buf, tmp,
512
                                    offsets, pitches, size);
513
            }
514
            else
515
            {
516
                pack_yv12alpha(buf, tmp, offsets,
517
                               pitches, size, alpha);
518
            }
519
520
            glTexSubImage2D(tmp_tex->m_type, 0, 0, 0,
521
                            size.width(), size.height(),
522
                            tmp_tex->m_data_fmt, tmp_tex->m_data_type,
523
                            tmp);
524
        }
525
    }
526
527
    MakeCurrent(false);
318
}
528
}
319
529
320
// locking ok
530
// locking ok
321
uint OpenGLContext::CreateTexture(void)
531
uint OpenGLContext::CreateTexture(QSize tot_size, QSize vid_size,
322
{
532
                                  bool use_pbo,
533
                                  uint type, uint data_type,
534
                                  uint data_fmt, uint internal_fmt,
535
                                  uint filter, uint wrap)
536
{
537
    if ((uint)tot_size.width() > m_max_tex_size ||
538
        (uint)tot_size.height() > m_max_tex_size)
539
        return 0;
540
323
    MakeCurrent(true);
541
    MakeCurrent(true);
324
542
543
    EnableTextures(0, type);
544
325
    GLuint tex;
545
    GLuint tex;
326
    glGenTextures(1, &tex);
546
    glGenTextures(1, &tex);
327
    SetupTextureFilters(tex, GL_LINEAR);
547
    glBindTexture(type, tex);
328
    m_priv->m_textures.push_back(tex);
548
549
    if (tex)
550
    {
551
        MythGLTexture *texture = new MythGLTexture();
552
        texture->m_type = type;
553
        texture->m_data_type = data_type;
554
        texture->m_data_fmt = data_fmt;
555
        texture->m_internal_fmt = internal_fmt;
556
        texture->m_size = tot_size;
557
        texture->m_vid_size = vid_size;
558
        texture->m_data_size = GetBufferSize(vid_size, data_fmt, data_type);
559
        m_priv->m_textures[tex] = *texture;
560
561
        if (ClearTexture(tex) && m_priv->m_textures[tex].m_data_size)
562
        {
563
            SetTextureFilters(tex, filter, wrap);
564
            if (use_pbo)
565
                m_priv->m_textures[tex].m_pbo = CreatePBO(tex);
566
        }
567
        else
568
        {
569
            DeleteTexture(tex);
570
            tex = 0;
571
        }
572
573
        delete texture;
574
    }
575
576
    Flush(true);
329
577
330
    MakeCurrent(false);
578
    MakeCurrent(false);
331
579
Lines 333-415 Link Here
333
}
581
}
334
582
335
// locking ok
583
// locking ok
336
bool OpenGLContext::SetupTexture(const QSize &size, uint tex, int filt)
584
uint OpenGLContext::GetBufferSize(QSize size, uint fmt, uint type)
337
{
585
{
338
    unsigned char *scratch =
586
    uint bytes;
339
        new unsigned char[(size.width() * size.height() * 4) + 128];
587
    uint bpp;
340
588
341
    bzero(scratch, size.width() * size.height() * 4);
589
    switch (fmt)
590
    {
591
        case GL_BGRA:
592
        case GL_RGBA:
593
            bpp = 4;
594
            break;
595
        default:
596
            bpp =0;
597
    }
342
598
343
    GLint check;
599
    switch (type)
600
    {
601
        case GL_UNSIGNED_BYTE:
602
            bytes = sizeof(GLubyte);
603
            break;
604
        case GL_FLOAT:
605
            bytes = sizeof(GLfloat);
606
            break;
607
        default:
608
            bytes = 0;
609
    }
344
610
345
    MakeCurrent(true);
611
    if (!bpp || !bytes || size.width() < 1 || size.height() < 1)
346
    SetupTextureFilters(tex, filt);
612
        return 0;
347
    glTexImage2D(GetTextureType(), 0, GL_RGBA8, size.width(), size.height(),
613
348
                 0, GL_RGB , GL_UNSIGNED_BYTE, scratch);
614
    return size.width() * size.height() * bpp * bytes;
349
    glGetTexLevelParameteriv(GetTextureType(), 0, GL_TEXTURE_WIDTH, &check);
615
}
350
    MakeCurrent(false);
351
616
352
    if (scratch)
617
// locking ok
618
bool OpenGLContext::ClearTexture(uint tex)
619
{
620
    MythGLTexture *tmp = &m_priv->m_textures[tex];
621
    QSize size = tmp->m_size;
622
623
    uint tmp_size = GetBufferSize(size, tmp->m_data_fmt, tmp->m_data_type);
624
625
    if (!tmp_size)
626
        return false;
627
628
    unsigned char *scratch = new unsigned char[tmp_size];
629
630
    if (!scratch)
631
        return false;
632
633
    bzero(scratch, tmp_size);
634
635
    GLint check;
636
    if (tmp->m_type == GL_TEXTURE_1D)
353
    {
637
    {
354
        delete scratch;
638
        glTexImage1D(tmp->m_type, 0, tmp->m_internal_fmt,
355
        scratch = NULL;
639
                     size.width(), 0,
640
                     tmp->m_data_fmt , tmp->m_data_type, scratch);
356
    }
641
    }
642
    else
643
    {
644
        glTexImage2D(tmp->m_type, 0, tmp->m_internal_fmt,
645
                     size.width(), size.height(), 0,
646
                     tmp->m_data_fmt , tmp->m_data_type, scratch);
647
    }
648
    glGetTexLevelParameteriv(tmp->m_type, 0, GL_TEXTURE_WIDTH, &check);
649
650
    delete [] scratch;
357
651
358
    return (check == size.width());
652
    return (check == size.width());
359
}
653
}
360
654
361
// locking ok
655
// locking ok
362
void OpenGLContext::SetupTextureFilters(uint tex, int filt)
656
void OpenGLContext::SetTextureFilters(uint tex, uint filt, uint wrap)
363
{
657
{
364
    glBindTexture(GetTextureType(), tex);
658
    if (!m_priv->m_textures.count(tex))
365
    glTexParameteri(GetTextureType(), GL_TEXTURE_MIN_FILTER, filt);
659
        return;
366
    glTexParameteri(GetTextureType(), GL_TEXTURE_MAG_FILTER, filt);
660
367
    glTexParameteri(GetTextureType(), GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
661
    MakeCurrent(true);
368
    glTexParameteri(GetTextureType(), GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
662
663
    EnableTextures(tex);
664
665
    m_priv->m_textures[tex].m_filter = filt;
666
    m_priv->m_textures[tex].m_wrap = wrap;
667
668
    uint type = m_priv->m_textures[tex].m_type;
669
670
    glBindTexture(type, tex);
671
    glTexParameteri(type, GL_TEXTURE_MIN_FILTER, filt);
672
    glTexParameteri(type, GL_TEXTURE_MAG_FILTER, filt);
673
    glTexParameteri(type, GL_TEXTURE_WRAP_S, wrap);
674
    if (type != GL_TEXTURE_1D)
675
        glTexParameteri(type, GL_TEXTURE_WRAP_T, wrap);
676
677
    MakeCurrent(false);
369
}
678
}
370
679
371
// locking ok
680
// locking ok
372
void OpenGLContext::DeleteTexture(uint tex)
681
void OpenGLContext::DeleteTexture(uint tex)
373
{
682
{
683
    if (!m_priv->m_textures.count(tex))
684
        return;
685
374
    MakeCurrent(true);
686
    MakeCurrent(true);
375
687
376
    vector<GLuint>::iterator it;
688
    GLuint gltex = tex;
377
    for (it = m_priv->m_textures.begin(); it !=m_priv->m_textures.end(); it++)
689
    glDeleteTextures(1, &gltex);
690
691
    if (m_priv->m_textures[tex].m_data)
378
    {
692
    {
379
        if (*(it) == tex)
693
        delete m_priv->m_textures[tex].m_data;
380
        {
694
    }
381
            GLuint gltex = tex;
695
382
            glDeleteTextures(1, &gltex);
696
    if (m_priv->m_textures[tex].m_pbo)
383
            m_priv->m_textures.erase(it);
697
    {
384
            break;
698
        gMythGLDeleteBuffersARB(1, &(m_priv->m_textures[tex].m_pbo));
385
        }
386
    }
699
    }
387
700
701
    m_priv->m_textures.erase(tex);
702
703
    Flush(true);
704
388
    MakeCurrent(false);
705
    MakeCurrent(false);
389
}
706
}
390
707
391
// locking ok
708
// locking ok
392
void OpenGLContext::DeleteTextures(void)
709
void OpenGLContext::DeleteTextures(void)
393
{
710
{
394
    MakeCurrent(true);
711
    map<GLuint, MythGLTexture>::iterator it;
395
396
    vector<GLuint>::iterator it;
397
    for (it = m_priv->m_textures.begin(); it !=m_priv->m_textures.end(); it++)
712
    for (it = m_priv->m_textures.begin(); it !=m_priv->m_textures.end(); it++)
398
        glDeleteTextures(1, &(*(it)));
713
    {
714
        GLuint gltex = it->first;
715
        glDeleteTextures(1, &gltex);
716
717
        if (it->second.m_data)
718
        {
719
            delete it->second.m_data;
720
        }
721
722
        if (it->second.m_pbo)
723
        {
724
            gltex = it->second.m_pbo;
725
            gMythGLDeleteBuffersARB(1, &gltex);
726
        }
727
    }
399
    m_priv->m_textures.clear();
728
    m_priv->m_textures.clear();
400
729
401
    MakeCurrent(false);
730
    Flush(true);
402
}
731
}
403
732
404
int OpenGLContext::GetTextureType(void) const
733
void OpenGLContext::GetTextureType(uint &current, bool &rect)
405
{
734
{
406
    return m_priv->m_texture_type;
735
    uint type = get_gl_texture_rect_type(m_extensions);
736
    if (type)
737
    {
738
        rect = true;
739
        current = type;
740
        return;
741
    }
742
743
    rect = false;
744
    return;
407
}
745
}
408
746
409
// locking ok
747
// locking ok
410
bool OpenGLContext::CreateFragmentProgram(const QString &program, uint &fp)
748
bool OpenGLContext::CreateFragmentProgram(const QString &program, uint &fp)
411
{
749
{
412
    bool success = true;
750
    bool success = true;
751
752
    if (!(m_ext_used & kGLExtFragProg))
753
        return false;
754
413
    GLint error;
755
    GLint error;
414
756
415
    MakeCurrent(true);
757
    MakeCurrent(true);
Lines 449-454 Link Here
449
        gMythGLDeleteProgramsARB(1, &glfp);
791
        gMythGLDeleteProgramsARB(1, &glfp);
450
    }
792
    }
451
793
794
    Flush(true);
795
452
    MakeCurrent(false);
796
    MakeCurrent(false);
453
797
454
    fp = glfp;
798
    fp = glfp;
Lines 473-525 Link Here
473
        }
817
        }
474
    }
818
    }
475
819
820
    Flush(true);
821
476
    MakeCurrent(false);
822
    MakeCurrent(false);
477
}
823
}
478
824
479
void OpenGLContext::BindFragmentProgram(uint fp)
825
void OpenGLContext::BindFragmentProgram(uint fp)
480
{
826
{
827
    MakeCurrent(true);
481
    gMythGLBindProgramARB(GL_FRAGMENT_PROGRAM_ARB, fp);
828
    gMythGLBindProgramARB(GL_FRAGMENT_PROGRAM_ARB, fp);
829
    MakeCurrent(false);
482
}
830
}
483
831
484
void OpenGLContext::InitFragmentParams(
832
void OpenGLContext::InitFragmentParams(
485
    uint fp, float a, float b, float c, float d)
833
    uint fp, float a, float b, float c, float d)
486
{
834
{
835
    MakeCurrent(true);
487
    gMythGLProgramEnvParameter4fARB(
836
    gMythGLProgramEnvParameter4fARB(
488
        GL_FRAGMENT_PROGRAM_ARB, fp, a, b, c, d);
837
        GL_FRAGMENT_PROGRAM_ARB, fp, a, b, c, d);
838
    MakeCurrent(false);
489
}
839
}
490
840
491
void OpenGLContext::DeletePrograms(void)
841
void OpenGLContext::DeletePrograms(void)
492
{
842
{
493
    MakeCurrent(true);
494
495
    vector<GLuint>::iterator it;
843
    vector<GLuint>::iterator it;
496
    for (it = m_priv->m_programs.begin(); it != m_priv->m_programs.end(); it++)
844
    for (it = m_priv->m_programs.begin(); it != m_priv->m_programs.end(); it++)
497
        gMythGLDeleteProgramsARB(1, &(*(it)));
845
        gMythGLDeleteProgramsARB(1, &(*(it)));
498
    m_priv->m_programs.clear();
846
    m_priv->m_programs.clear();
499
847
500
    MakeCurrent(false);
848
    Flush(true);
501
}
849
}
502
850
503
// locking ok
851
// locking ok
504
bool OpenGLContext::CreateFrameBuffer(uint &fb, uint tex, const QSize &size)
852
bool OpenGLContext::CreateFrameBuffer(uint &fb, uint tex)
505
{
853
{
854
    if (!(m_ext_used & kGLExtFBufObj))
855
        return false;
856
857
    if (!m_priv->m_textures.count(tex))
858
        return false;
859
860
    MythGLTexture *tmp = &m_priv->m_textures[tex];
861
    QSize size = tmp->m_size;
506
    GLuint glfb;
862
    GLuint glfb;
507
863
508
    MakeCurrent(true);
864
    MakeCurrent(true);
865
    glCheck();
509
866
510
    SetupTextureFilters(tex, GL_LINEAR);
867
    EnableTextures(tex);
511
868
512
    glPushAttrib(GL_VIEWPORT_BIT);
869
    glPushAttrib(GL_VIEWPORT_BIT);
513
    glViewport(0, 0, size.width(), size.height());
870
    glViewport(0, 0, size.width(), size.height());
514
    gMythGLGenFramebuffersEXT(1, &glfb);
871
    gMythGLGenFramebuffersEXT(1, &glfb);
515
    gMythGLBindFramebufferEXT(GL_FRAMEBUFFER_EXT, glfb);
872
    gMythGLBindFramebufferEXT(GL_FRAMEBUFFER_EXT, glfb);
516
    glBindTexture(GetTextureType(), tex);
873
    glBindTexture(tmp->m_type, tex);
517
    glTexImage2D(GetTextureType(), 0, GL_RGBA8,
874
    glTexImage2D(tmp->m_type, 0, tmp->m_internal_fmt,
518
                 (GLint) size.width(), (GLint) size.height(), 0,
875
                 (GLint) size.width(), (GLint) size.height(), 0,
519
                 GL_RGB, GL_UNSIGNED_BYTE, NULL);
876
                 tmp->m_data_fmt, tmp->m_data_type, NULL);
520
    gMythGLFramebufferTexture2DEXT(
877
    gMythGLFramebufferTexture2DEXT(
521
        GL_FRAMEBUFFER_EXT, GL_COLOR_ATTACHMENT0_EXT,
878
        GL_FRAMEBUFFER_EXT, GL_COLOR_ATTACHMENT0_EXT,
522
        GetTextureType(), tex, 0);
879
        tmp->m_type, tex, 0);
523
880
524
    GLenum status;
881
    GLenum status;
525
    status = gMythGLCheckFramebufferStatusEXT(GL_FRAMEBUFFER_EXT);
882
    status = gMythGLCheckFramebufferStatusEXT(GL_FRAMEBUFFER_EXT);
Lines 575-580 Link Here
575
    else
932
    else
576
        gMythGLDeleteFramebuffersEXT(1, &glfb);
933
        gMythGLDeleteFramebuffersEXT(1, &glfb);
577
934
935
    Flush(true);
936
937
    glCheck();
578
    MakeCurrent(false);
938
    MakeCurrent(false);
579
939
580
    fb = glfb;
940
    fb = glfb;
Lines 600-612 Link Here
600
        }
960
        }
601
    }
961
    }
602
962
963
    Flush(true);
964
603
    MakeCurrent(false);
965
    MakeCurrent(false);
604
}
966
}
605
967
606
void OpenGLContext::DeleteFrameBuffers(void)
968
void OpenGLContext::DeleteFrameBuffers(void)
607
{
969
{
608
    MakeCurrent(true);
609
610
    vector<GLuint>::iterator it;
970
    vector<GLuint>::iterator it;
611
    for (it = m_priv->m_framebuffers.begin();
971
    for (it = m_priv->m_framebuffers.begin();
612
         it != m_priv->m_framebuffers.end(); it++)
972
         it != m_priv->m_framebuffers.end(); it++)
Lines 615-627 Link Here
615
    }
975
    }
616
    m_priv->m_framebuffers.clear();
976
    m_priv->m_framebuffers.clear();
617
977
618
    MakeCurrent(false);
978
    Flush(true);
619
}
979
}
620
980
621
// locking ok
981
// locking ok
622
void OpenGLContext::BindFramebuffer(uint fb)
982
void OpenGLContext::BindFramebuffer(uint fb)
623
{
983
{
984
    MakeCurrent(true);
624
    gMythGLBindFramebufferEXT(GL_FRAMEBUFFER_EXT, fb);
985
    gMythGLBindFramebufferEXT(GL_FRAMEBUFFER_EXT, fb);
986
    MakeCurrent(false);
625
}
987
}
626
988
627
bool OpenGLContext::IsGLXSupported(
989
bool OpenGLContext::IsGLXSupported(
Lines 636-638 Link Here
636
998
637
    return false;
999
    return false;
638
}
1000
}
1001
1002
void OpenGLContext::Init2DState(void)
1003
{
1004
    glPixelStorei(GL_UNPACK_ALIGNMENT, 1);
1005
    glDisable(GL_BLEND);
1006
    glBlendFunc(GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA); // for gl osd
1007
    glDisable(GL_DEPTH_TEST);
1008
    glDepthMask(GL_FALSE);
1009
    glDisable(GL_CULL_FACE);
1010
    glShadeModel(GL_FLAT);
1011
    glDisable(GL_POLYGON_SMOOTH);
1012
    glDisable(GL_LINE_SMOOTH);
1013
    glDisable(GL_POINT_SMOOTH);
1014
    glClearColor(0.0f, 0.0f, 0.0f, 0.0f);
1015
    glClear(GL_COLOR_BUFFER_BIT);
1016
    Flush(true);
1017
}
1018
1019
void OpenGLContext::SetViewPort(const QSize &size)
1020
{
1021
    if (size.width() == m_viewport.width() &&
1022
        size.height() == m_viewport.height())
1023
        return;
1024
1025
    MakeCurrent(true);
1026
1027
    m_viewport = size;
1028
1029
    glViewport(0, 0, size.width(), size.height());
1030
    glMatrixMode(GL_PROJECTION);
1031
    glLoadIdentity();
1032
    glOrtho(0, size.width() - 1,
1033
            0, size.height() - 1, 1, -1); // aargh...
1034
    glMatrixMode(GL_MODELVIEW);
1035
    glLoadIdentity();
1036
1037
    MakeCurrent(false);
1038
}
1039
1040
uint OpenGLContext::CreatePBO(uint tex)
1041
{
1042
    if (!(m_ext_used & kGLExtPBufObj))
1043
        return 0;
1044
1045
    if (!m_priv->m_textures.count(tex))
1046
        return 0;
1047
1048
    MythGLTexture *tmp = &m_priv->m_textures[tex];
1049
1050
    gMythGLBindBufferARB(GL_PIXEL_UNPACK_BUFFER_ARB, 0);
1051
    glTexImage2D(tmp->m_type, 0, tmp->m_internal_fmt,
1052
                 tmp->m_size.width(), tmp->m_size.height(), 0,
1053
                 tmp->m_data_fmt, tmp->m_data_type, NULL);
1054
1055
    GLuint tmp_pbo;
1056
    gMythGLGenBuffersARB(1, &tmp_pbo);
1057
1058
    gMythGLBindBufferARB(GL_PIXEL_UNPACK_BUFFER_ARB, 0);
1059
1060
    Flush(true);
1061
1062
    return tmp_pbo;
1063
}
1064
1065
uint OpenGLContext::CreateHelperTexture(void)
1066
{
1067
    MakeCurrent(true);
1068
1069
    uint width = m_max_tex_size;
1070
1071
    uint tmp_tex = CreateTexture(QSize(width, 1), QSize(width, 1),
1072
                                false,
1073
                                GL_TEXTURE_1D, GL_FLOAT,
1074
                                GL_RGBA, GL_RGBA16,
1075
                                GL_NEAREST, GL_REPEAT);
1076
1077
    if (!tmp_tex)
1078
    {
1079
        DeleteTexture(tmp_tex);
1080
        return 0;
1081
    }
1082
1083
    float *buf = NULL;
1084
    buf = new float[m_priv->m_textures[tmp_tex].m_data_size];
1085
    float *ref = buf;
1086
1087
    for (uint i = 0; i < width; i++)
1088
    {
1089
        float x = (((float)i) + 0.5f) / (float)width;
1090
        store_bicubic_weights(x, ref);
1091
        ref += 4;
1092
    }
1093
    store_bicubic_weights(0, buf);
1094
    store_bicubic_weights(1, &buf[(width - 1) << 2]);
1095
1096
    EnableTextures(tmp_tex);
1097
    glBindTexture(m_priv->m_textures[tmp_tex].m_type, tmp_tex);
1098
    glTexImage1D(GL_TEXTURE_1D, 0, GL_RGBA16, width, 0, GL_RGBA, GL_FLOAT, buf);
1099
1100
    VERBOSE(VB_PLAYBACK, LOC +
1101
            QString("Created bicubic helper texture (%1 samples)")
1102
            .arg(width));
1103
1104
    delete [] buf;
1105
1106
    MakeCurrent(false);
1107
1108
    return tmp_tex;
1109
}
1110
1111
int OpenGLContext::SetPictureAttribute(
1112
    PictureAttribute attribute, int newValue)
1113
{
1114
    if (!m_colour_control)
1115
        return -1;
1116
1117
    MakeCurrent(true);
1118
1119
    int ret = -1;
1120
    switch (attribute)
1121
    {
1122
        case kPictureAttribute_Brightness:
1123
            ret = newValue;
1124
            pictureAttribs[attribute] = (newValue * 0.02f) - 0.5f;
1125
            break;
1126
        case kPictureAttribute_Contrast:
1127
        case kPictureAttribute_Colour:
1128
            ret = newValue;
1129
            pictureAttribs[attribute] = (newValue * 0.02f);
1130
            break;
1131
        case kPictureAttribute_Hue: // not supported yet...
1132
            break;
1133
        default:
1134
            break;
1135
    }
1136
1137
    MakeCurrent(false);
1138
1139
    return ret;
1140
}
1141
1142
PictureAttributeSupported 
1143
OpenGLContext::GetSupportedPictureAttributes(void) const
1144
{
1145
    return (!m_colour_control) ?
1146
        kPictureAttributeSupported_None :
1147
        (PictureAttributeSupported) 
1148
        (kPictureAttributeSupported_Brightness |
1149
         kPictureAttributeSupported_Contrast |
1150
         kPictureAttributeSupported_Colour);
1151
}
1152
1153
void OpenGLContext::SetColourParams(void)
1154
{
1155
    if (!m_colour_control)
1156
        return;
1157
1158
    MakeCurrent(true);
1159
1160
    InitFragmentParams(0,
1161
        pictureAttribs[kPictureAttribute_Brightness],
1162
        pictureAttribs[kPictureAttribute_Contrast],
1163
        pictureAttribs[kPictureAttribute_Colour],
1164
        0.5f);
1165
1166
    MakeCurrent(false);    
1167
}
1168
(-)mythtv.ori/libs/libmythtv/openglcontext.h (-26 / +101 lines)
Lines 11-16 Link Here
11
11
12
// MythTV headers
12
// MythTV headers
13
#include "util-x11.h"
13
#include "util-x11.h"
14
#include "frame.h"
15
#include "videooutbase.h"
16
17
#ifndef GL_BGRA
18
#define GL_BGRA                           0x80E1
19
#endif
20
#ifndef GL_UNSIGNED_BYTE
21
#define GL_UNSIGNED_BYTE                  0x1401
22
#endif
23
#ifndef GL_RGBA8
24
#define GL_RGBA8                          0x8058
25
#endif
26
#ifndef GL_LINEAR
27
#define GL_LINEAR                         0x2601
28
#endif
29
#ifndef GL_CLAMP_TO_EDGE
30
#define GL_CLAMP_TO_EDGE                  0x812F
31
#endif
14
32
15
class OpenGLVideo;
33
class OpenGLVideo;
16
class PrivateContext;
34
class PrivateContext;
Lines 21-79 Link Here
21
    kGLExtFragProg = 0x02,
39
    kGLExtFragProg = 0x02,
22
    kGLExtFBufObj  = 0x04,
40
    kGLExtFBufObj  = 0x04,
23
    kGLXPBuffer    = 0x08,
41
    kGLXPBuffer    = 0x08,
42
    kGLExtPBufObj  = 0x10,
43
    kGLNVFence     = 0x20,
44
    kGLFinish      = 0x40,
45
    kGLMaxFeat     = 0x80,
24
} GLFeatures;
46
} GLFeatures;
25
47
48
class OpenGLContext;
49
50
class OpenGLContextLocker
51
{
52
    public:
53
        OpenGLContextLocker(OpenGLContext *ctx);
54
        ~OpenGLContextLocker();
55
56
    private:
57
        OpenGLContext *m_ctx;
58
};
59
26
#ifdef USING_OPENGL
60
#ifdef USING_OPENGL
27
61
28
class OpenGLContext
62
class OpenGLContext
29
{
63
{
30
  public:
64
  public:
31
    OpenGLContext();
65
    OpenGLContext(QMutex *lock);
32
    ~OpenGLContext();
66
    ~OpenGLContext();
33
67
34
    bool Create(Display *display, Window window, uint screen_num,
68
    bool Create(Display *display, Window window, uint screen_num,
35
                const QSize &display_visible_size, bool visible);
69
                const QRect &display_visible, bool colour_control = false);
70
    void SetViewPort(const QSize &size);
36
    void Hide(void);
71
    void Hide(void);
37
    void Show(void);
72
    void Show(void);
38
73
39
    bool MakeCurrent(bool current);
74
    bool MakeCurrent(bool current);
40
    void SwapBuffers(void);
75
    void SwapBuffers(void);
41
    void Flush(void);
76
    void Flush(bool use_fence);
42
77
43
    uint GetMaxTexSize(void) const { return m_max_tex_size; }
44
    uint GetScreenNum(void)  const { return m_screen_num;   }
78
    uint GetScreenNum(void)  const { return m_screen_num;   }
45
79
46
    uint CreateTexture(void);
80
    void UpdateTexture(uint tex, const unsigned char *buf,
47
    bool SetupTexture(const QSize &size, uint tex, int filt);
81
                       const int *offsets,
48
    void SetupTextureFilters(uint tex, int filt);
82
                       const int *pitches,
83
                       VideoFrameType fmt,
84
                       bool interlaced = FALSE,
85
                       const unsigned char* alpha = NULL);
86
    uint CreateTexture(QSize tot_size, QSize vid_size,
87
                       bool use_pbo, uint type,
88
                       uint data_type = GL_UNSIGNED_BYTE,
89
                       uint data_fmt = GL_BGRA,
90
                       uint internal_fmt = GL_RGBA8,
91
                       uint filter = GL_LINEAR,
92
                       uint wrap = GL_CLAMP_TO_EDGE);
93
    void SetTextureFilters(uint tex, uint filt, uint wrap);
49
    void DeleteTexture(uint tex);
94
    void DeleteTexture(uint tex);
50
    int  GetTextureType(void) const;
95
    void GetTextureType(uint &current, bool &rect);
51
    void EnableTextures(void);
96
    void EnableTextures(uint type, uint tex_type = 0);
97
    void DisableTextures(void);
52
98
53
    bool CreateFragmentProgram(const QString &program, uint &prog);
99
    bool CreateFragmentProgram(const QString &program, uint &prog);
54
    void DeleteFragmentProgram(uint prog);
100
    void DeleteFragmentProgram(uint prog);
55
    void BindFragmentProgram(uint fp);
101
    void BindFragmentProgram(uint fp);
56
    void InitFragmentParams(uint fp, float a, float b, float c, float d);
102
    void InitFragmentParams(uint fp, float a, float b, float c, float d);
57
103
58
    bool CreateFrameBuffer(uint &fb, uint tex, const QSize &size);
104
    bool CreateFrameBuffer(uint &fb, uint tex);
59
    void DeleteFrameBuffer(uint fb);
105
    void DeleteFrameBuffer(uint fb);
60
    void BindFramebuffer(uint fb);
106
    void BindFramebuffer(uint fb);
61
107
    uint GetFeatures(void) { return m_ext_supported; }
62
    bool IsFeatureSupported(GLFeatures feature) const
108
    void SetFeatures(uint features) { m_ext_used = features; }
63
        { return m_ext_supported & feature; }
64
109
65
    static bool IsGLXSupported(Display *display, uint major, uint minor);
110
    static bool IsGLXSupported(Display *display, uint major, uint minor);
66
111
112
    int SetPictureAttribute(PictureAttribute attributeType, int newValue);
113
    PictureAttributeSupported GetSupportedPictureAttributes(void) const;
114
    void SetColourParams(void);
115
    uint CreateHelperTexture(void);
116
67
  private:
117
  private:
118
    void Init2DState(void);
68
    bool IsGLXSupported(uint major, uint minor) const
119
    bool IsGLXSupported(uint major, uint minor) const
69
    {
120
    {
70
        return (m_major_ver > major) ||
121
        return (m_major_ver > major) ||
71
            ((m_major_ver == major) && (m_minor_ver >= minor));
122
            ((m_major_ver == major) && (m_minor_ver >= minor));
72
    }
123
    }
73
124
125
    uint CreatePBO(uint tex);
126
74
    void DeleteTextures(void);
127
    void DeleteTextures(void);
75
    void DeletePrograms(void);
128
    void DeletePrograms(void);
76
    void DeleteFrameBuffers(void);
129
    void DeleteFrameBuffers(void);
130
    uint GetBufferSize(QSize size, uint fmt, uint type);
131
    bool ClearTexture(uint tex);
77
132
78
    PrivateContext *m_priv;
133
    PrivateContext *m_priv;
79
134
Lines 83-90 Link Here
83
    uint            m_minor_ver;
138
    uint            m_minor_ver;
84
    QString         m_extensions;
139
    QString         m_extensions;
85
    uint            m_ext_supported;
140
    uint            m_ext_supported;
86
    bool            m_visible;
141
    uint            m_ext_used;
87
    uint            m_max_tex_size;
142
    uint            m_max_tex_size;
143
    QSize           m_viewport;
144
    QMutex         *m_lock;
145
    int             m_lock_level;
146
    bool            m_colour_control;
147
148
    float pictureAttribs[kPictureAttribute_MAX];
88
};
149
};
89
150
90
#else // if !USING_OPENGL
151
#else // if !USING_OPENGL
Lines 92-128 Link Here
92
class OpenGLContext
153
class OpenGLContext
93
{
154
{
94
  public:
155
  public:
95
    OpenGLContext() { }
156
    OpenGLContext(QMutex*) { }
96
    ~OpenGLContext() { }
157
    ~OpenGLContext() { }
97
158
98
    bool Create(Display*, Window, uint, const QSize&, bool) { return false; }
159
    bool Create(Display*, Window, uint, const Rect&, bool = false) 
160
        { return false; }
161
    void SetViewPort(const QSize&) { }
162
    void Hide(void) { }
163
    void Show(void) { }
99
164
100
    bool MakeCurrent(bool) { return false; }
165
    bool MakeCurrent(bool) { return false; }
101
    void SwapBuffers(void) { }
166
    void SwapBuffers(void) { }
102
    void Flush(void) { }
167
    void Flush(bool) { }
103
168
104
    uint GetMaxTexSize(void) const { return 0; }
105
    uint GetScreenNum(void)  const { return 0; }
169
    uint GetScreenNum(void)  const { return 0; }
106
170
107
    uint CreateTexture(void) { return 0; }
171
    void UpdateTexture(uint, const unsigned char*,
108
    bool SetupTexture(const QSize&, uint, int) { return false; }
172
                       const int *, const int *,
109
    void SetupTextureFilters(uint, int) { }
173
                       VideoFrameType, bool = FALSE,
174
                       const unsigned char* = NULL) { }
175
    uint CreateTexture(QSize, QSize, bool, uint,
176
                       uint = 0, uint = 0, uint = 0,
177
                       uint = 0, uint = 0) { return 0; }
178
    void SetTextureFilters(uint, uint, uint) { }
110
    void DeleteTexture(uint) { }
179
    void DeleteTexture(uint) { }
111
    int  GetTextureType(void) const { return 0; }
180
    void GetTextureType(uint&, bool&) { }
112
    void EnableTextures(void) { }
181
    void EnableTextures(uint, uint = 0) { }
113
182
114
    bool CreateFragmentProgram(const QString&, uint&) { return false; }
183
    bool CreateFragmentProgram(const QString&, uint&) { return false; }
115
    void DeleteFragmentProgram(uint) { }
184
    void DeleteFragmentProgram(uint) { }
116
    void BindFragmentProgram(uint) { }
185
    void BindFragmentProgram(uint) { }
117
    void InitFragmentParams(uint, float, float, float, float) { }
186
    void InitFragmentParams(uint, float, float, float, float) { }
118
187
119
    bool CreateFrameBuffer(uint&, uint, const QSize&) { return false; }
188
    bool CreateFrameBuffer(uint&, uint) { return false; }
120
    void DeleteFrameBuffer(uint);
189
    void DeleteFrameBuffer(uint);
121
    void BindFramebuffer(uint);
190
    void BindFramebuffer(uint);
122
191
123
    bool IsFeatureSupported(GLFeatures) const { return false; }
192
    uint GetFeatures(void) { return 0; }
124
193
    void SetFeatures(uint) { }
125
    static bool IsGLXSupported(Display*, uint, uint) { return false; }
194
    static bool IsGLXSupported(Display*, uint, uint) { return false; }
195
196
    int SetPictureAttribute(PictureAttribute, int) { return -1; }
197
    PictureAttributeSupported GetSupportedPictureAttributes(void) const
198
        { return kPictureAttributeSupported_None; }
199
    void SetColourParams(void);
200
    uint CreateHelperTexture(void);
126
};
201
};
127
202
128
#endif //!USING_OPENGL
203
#endif //!USING_OPENGL
(-)mythtv.ori/libs/libmythtv/openglvideo.cpp (-811 / +940 lines)
Lines 26-60 Link Here
26
class OpenGLFilter
26
class OpenGLFilter
27
{
27
{
28
    public:
28
    public:
29
        GLuint         fragmentProgram;
29
        vector<GLuint> fragmentPrograms;
30
        uint           numInputs;
30
        uint           numInputs;
31
        bool           rotateFrameBuffers;
32
        vector<GLuint> frameBuffers;
31
        vector<GLuint> frameBuffers;
33
        vector<GLuint> frameBufferTextures;
32
        vector<GLuint> frameBufferTextures;
34
        DisplayBuffer  outputBuffer;
33
        DisplayBuffer  outputBuffer;
35
};
34
};
36
35
37
OpenGLVideo::OpenGLVideo() :
36
OpenGLVideo::OpenGLVideo() :
38
    gl_context(NULL),         videoSize(0,0),
37
    gl_context(NULL),         video_dim(0,0),
39
    viewportSize(0,0),        masterViewportSize(0,0),
38
    actual_video_dim(0,0),    viewportSize(0,0),
40
    visibleRect(0,0,0,0),     videoRect(0,0,0,0),
39
    masterViewportSize(0,0),  display_visible_rect(0,0,0,0),
41
    frameRect(0,0,0,0),
40
    display_video_rect(0,0,0,0), video_rect(0,0,0,0),
42
    frameBufferRect(0,0,0,0), invertVideo(false),
41
    frameBufferRect(0,0,0,0), softwareDeinterlacer(QString::null),
43
    softwareDeinterlacer(QString::null),
42
    hardwareDeinterlacer(QString::null), hardwareDeinterlacing(false),
44
    hardwareDeinterlacing(false),
45
    useColourControl(false),  viewportControl(false),
43
    useColourControl(false),  viewportControl(false),
46
    frameBuffer(0),           frameBufferTexture(0),
47
    inputTextureSize(0,0),    currentFrameNum(0),
44
    inputTextureSize(0,0),    currentFrameNum(0),
48
    inputUpdated(false),
45
    inputUpdated(false),      
49
46
    textureRects(false),      textureType(GL_TEXTURE_2D),
47
    helperTexture(0),         defaultUpsize(kGLFilterResize),
50
    convertSize(0,0),         convertBuf(NULL),
48
    convertSize(0,0),         convertBuf(NULL),
51
49
    videoResize(false),       videoResizeRect(0,0,0,0),
52
    videoResize(false),       videoResizeRect(0,0,0,0)
50
    gl_features(0)
53
{
51
{
54
}
52
}
55
53
56
OpenGLVideo::~OpenGLVideo()
54
OpenGLVideo::~OpenGLVideo()
57
{
55
{
56
    OpenGLContextLocker ctx_lock(gl_context);
58
    Teardown();
57
    Teardown();
59
}
58
}
60
59
Lines 63-117 Link Here
63
{
62
{
64
    ShutDownYUV2RGB();
63
    ShutDownYUV2RGB();
65
64
66
    gl_context->MakeCurrent(true);
65
    if (helperTexture)
67
66
        gl_context->DeleteTexture(helperTexture);
68
    if (frameBuffer)
67
    helperTexture = 0;
69
        gl_context->DeleteFrameBuffer(frameBuffer);
70
68
71
    if (frameBufferTexture)
69
    DeleteTextures(&inputTextures);
72
        gl_context->DeleteTexture(frameBufferTexture);
70
    DeleteTextures(&referenceTextures);
73
74
    for (uint i = 0; i < inputTextures.size(); i++)
75
        gl_context->DeleteTexture(inputTextures[i]);
76
    inputTextures.clear();
77
71
78
    if (!filters.empty())
72
    if (!filters.empty())
79
    {
73
    {
80
        glfilt_map_t::iterator it;
74
        glfilt_map_t::iterator it;
81
        for (it = filters.begin(); it != filters.end(); ++it)
75
        for (it = filters.begin(); it != filters.end(); it++)
82
        {
76
        {
83
            if (it->second->fragmentProgram)
77
            RemoveFilter(it->first);
84
                gl_context->DeleteFragmentProgram(it->second->fragmentProgram);
85
            vector<GLuint> temp = it->second->frameBuffers;
86
            for (uint i = 0; i < temp.size(); i++)
87
                gl_context->DeleteFrameBuffer(temp[i]);
88
            temp = it->second->frameBufferTextures;
89
            for (uint i = 0; i < temp.size(); i++)
90
                gl_context->DeleteTexture((temp[i]));
91
        }
78
        }
92
    }
79
    }
93
    filters.clear();
80
    filters.clear();
94
95
    gl_context->MakeCurrent(false);
96
}
81
}
97
82
98
// locking ok
83
// locking ok
99
bool OpenGLVideo::Init(OpenGLContext *glcontext, bool colour_control,
84
bool OpenGLVideo::Init(OpenGLContext *glcontext, bool colour_control,
100
                       bool onscreen, QSize video_size, QRect visible_rect,
85
                       QSize videoDim, QRect displayVisibleRect,
101
                       QRect video_rect, QRect frame_rect,
86
                       QRect displayVideoRect, QRect videoRect,
102
                       bool viewport_control, bool osd)
87
                       bool viewport_control, QString options, bool osd)
103
{
88
{
104
    gl_context            = glcontext;
89
    gl_context            = glcontext;
105
    videoSize             = video_size;
90
    if (!gl_context)
106
    visibleRect           = visible_rect;
91
        return false;
107
    videoRect             = video_rect;
108
    frameRect             = frame_rect;
109
    masterViewportSize    = QSize(1920, 1080);
110
92
111
    QSize rect            = GetTextureSize(videoSize);
93
    OpenGLContextLocker ctx_lock(gl_context);
112
94
113
    frameBufferRect       = QRect(QPoint(0,0), rect);
95
    actual_video_dim      = videoDim;
114
    invertVideo           = true;
96
    video_dim             = videoDim;
97
    if (video_dim.height() == 1088)
98
        video_dim.setHeight(1080);
99
    display_visible_rect  = displayVisibleRect;
100
    display_video_rect    = displayVideoRect;
101
    video_rect            = videoRect;
102
    masterViewportSize    = QSize(1920, 1080);
103
    frameBufferRect       = QRect(QPoint(0,0), video_dim);
115
    softwareDeinterlacer  = "";
104
    softwareDeinterlacer  = "";
116
    hardwareDeinterlacing = false;
105
    hardwareDeinterlacing = false;
117
    useColourControl      = colour_control;
106
    useColourControl      = colour_control;
Lines 120-173 Link Here
120
    convertSize           = QSize(0,0);
109
    convertSize           = QSize(0,0);
121
    videoResize           = false;
110
    videoResize           = false;
122
    videoResizeRect       = QRect(0,0,0,0);
111
    videoResizeRect       = QRect(0,0,0,0);
123
    frameBuffer           = 0;
124
    currentFrameNum       = -1;
112
    currentFrameNum       = -1;
125
    inputUpdated          = false;
113
    inputUpdated          = false;
126
114
127
    if (!onscreen)
115
    gl_features = ParseOptions(options) &
128
    {
116
                  gl_context->GetFeatures();
129
        QSize fb_size = GetTextureSize(visibleRect.size());
117
130
        if (!AddFrameBuffer(frameBuffer, frameBufferTexture, fb_size))
118
    if (viewportControl)
131
            return false;
119
        gl_context->SetFeatures(gl_features);
132
    }
120
121
    if (options.contains("openglbicubic"))
122
        defaultUpsize = kGLFilterBicubic;
123
124
    if ((defaultUpsize != kGLFilterBicubic) && (gl_features & kGLExtRect))
125
        gl_context->GetTextureType(textureType, textureRects);
126
127
    SetViewPort(display_visible_rect.size());
133
128
134
    SetViewPort(visibleRect.size());
129
    bool use_pbo = gl_features & kGLExtPBufObj;
135
    InitOpenGL();
136
130
137
    if (osd)
131
    if (osd)
138
    {
132
    {
139
        QSize osdsize = visibleRect.size();
133
        QSize osdsize = display_visible_rect.size();
140
        QSize half_size(osdsize.width() >> 1, osdsize.height() >>1);
134
        GLuint tex = CreateVideoTexture(osdsize, inputTextureSize, use_pbo);
141
        GLuint alphatex = CreateVideoTexture(osdsize, inputTextureSize);
135
142
        GLuint utex = CreateVideoTexture(half_size, inputTextureSize);
136
        if (tex &&
143
        GLuint vtex = CreateVideoTexture(half_size, inputTextureSize);
137
            AddFilter(kGLFilterYUV2RGBA) &&
144
        GLuint ytex = CreateVideoTexture(osdsize, inputTextureSize);
138
            AddFilter(kGLFilterResize))
145
139
        {
146
        if ((alphatex && ytex && utex && vtex) && AddFilter(kGLFilterYUV2RGBA))
140
            inputTextures.push_back(tex);
147
        {
141
        }
148
            inputTextures.push_back(ytex);
142
        else
149
            inputTextures.push_back(utex);
143
        {
150
            inputTextures.push_back(vtex);
144
            Teardown();
151
            inputTextures.push_back(alphatex);
152
            if (!AddFilter(kGLFilterResize))
153
            {
154
                Teardown();
155
                return false;
156
            }
157
        }
145
        }
158
    }
146
    }
159
    else
147
    else
160
    {
148
    {
161
        QSize half_size(videoSize.width() >> 1, videoSize.height() >>1);
149
        GLuint tex = CreateVideoTexture(actual_video_dim,
162
        GLuint utex = CreateVideoTexture(half_size, inputTextureSize);
150
                                        inputTextureSize, use_pbo);
163
        GLuint vtex = CreateVideoTexture(half_size, inputTextureSize);
151
164
        GLuint ytex = CreateVideoTexture(videoSize, inputTextureSize);;
152
        if (tex && AddFilter(kGLFilterYUV2RGB))
165
153
        {
166
        if ((ytex && utex && vtex) && AddFilter(kGLFilterYUV2RGB))
154
            inputTextures.push_back(tex);
167
        {
155
        }
168
            inputTextures.push_back(ytex);
156
        else
169
            inputTextures.push_back(utex);
157
        {
170
            inputTextures.push_back(vtex);
158
            Teardown();
171
        }
159
        }
172
    }
160
    }
173
161
Lines 184-194 Link Here
184
                "Falling back to software conversion.\n\t\t\t"
172
                "Falling back to software conversion.\n\t\t\t"
185
                "Any opengl filters will also be disabled.");
173
                "Any opengl filters will also be disabled.");
186
174
187
        GLuint rgb24tex = CreateVideoTexture(videoSize, inputTextureSize);
175
        GLuint bgra32tex = CreateVideoTexture(actual_video_dim,
176
                                             inputTextureSize, use_pbo);
188
177
189
        if (rgb24tex && AddFilter(kGLFilterResize))
178
        if (bgra32tex && AddFilter(kGLFilterResize))
190
        {
179
        {
191
            inputTextures.push_back(rgb24tex);
180
            inputTextures.push_back(bgra32tex);
192
        }
181
        }
193
        else
182
        else
194
        {
183
        {
Lines 198-265 Link Here
198
        }
187
        }
199
    }
188
    }
200
189
190
#ifdef MMX
191
    bool mmx = true;
192
#else
193
    bool mmx = false;
194
#endif
195
196
    CheckResize(false);
197
198
    VERBOSE(VB_PLAYBACK, LOC + 
199
            QString("Using packed textures with%1 mmx and with%2 PBOs")
200
            .arg(mmx ? "" : "out").arg(use_pbo ? "" : "out"));
201
201
    return true;
202
    return true;
202
}
203
}
203
204
204
OpenGLFilterType OpenGLVideo::GetDeintFilter(void) const
205
void OpenGLVideo::CheckResize(bool deinterlacing)
205
{
206
{
206
    if (filters.count(kGLFilterKernelDeint))
207
    // to improve performance on slower cards
207
        return kGLFilterKernelDeint;
208
    bool resize_up = (video_dim.height() < display_video_rect.height()) ||
208
    if (filters.count(kGLFilterLinearBlendDeint))
209
                     (video_dim.width()  < display_video_rect.width());
209
        return kGLFilterLinearBlendDeint;
210
    if (filters.count(kGLFilterOneFieldDeint))
211
        return kGLFilterOneFieldDeint;
212
    if (filters.count(kGLFilterBobDeintDFR))
213
        return kGLFilterBobDeintDFR;
214
    if (filters.count(kGLFilterOneFieldDeintDFR))
215
        return kGLFilterOneFieldDeintDFR;
216
    if (filters.count(kGLFilterLinearBlendDeintDFR))
217
        return kGLFilterLinearBlendDeintDFR;
218
    if (filters.count(kGLFilterKernelDeintDFR))
219
        return kGLFilterKernelDeintDFR;
220
    if (filters.count(kGLFilterFieldOrderDFR))
221
        return kGLFilterFieldOrderDFR;
222
210
223
    return kGLFilterNone;
211
    // to ensure deinterlacing works correctly
224
}
212
    bool resize_down = (video_dim.height() > display_video_rect.height()) &&
213
                        deinterlacing;
225
214
226
bool OpenGLVideo::OptimiseFilters(void)
215
    if (resize_up && (defaultUpsize == kGLFilterBicubic))
227
{
228
    // if video height does not match display rect height, add resize stage
229
    // to preserve field information N.B. assumes interlaced
230
    // if video rectangle is smaller than display rectangle, add resize stage
231
    // to improve performance
232
233
    bool needResize =  ((videoSize.height() != videoRect.height()) ||
234
                        (videoSize.width()  <  videoRect.width()));
235
    if (needResize && !filters.count(kGLFilterResize) &&
236
        !(AddFilter(kGLFilterResize)))
237
    {
216
    {
238
        return false;
217
        RemoveFilter(kGLFilterResize);
218
        filters.erase(kGLFilterResize);
219
        AddFilter(kGLFilterBicubic);
220
        return;
221
    }
222
223
    if ((resize_up && (defaultUpsize == kGLFilterResize)) || resize_down)
224
    {
225
        RemoveFilter(kGLFilterBicubic);
226
        filters.erase(kGLFilterBicubic);
227
        AddFilter(kGLFilterResize);
228
        return;
239
    }
229
    }
240
230
231
    if (!filters.count(kGLFilterYUV2RGBA))
232
    {
233
        RemoveFilter(kGLFilterResize);
234
        filters.erase(kGLFilterResize);
235
    }
236
237
    RemoveFilter(kGLFilterBicubic);
238
    filters.erase(kGLFilterBicubic);
239
240
    OptimiseFilters();
241
}
242
243
bool OpenGLVideo::OptimiseFilters(void)
244
{
241
    glfilt_map_t::reverse_iterator it;
245
    glfilt_map_t::reverse_iterator it;
242
246
243
    // add/remove required frame buffer objects
247
    // add/remove required frame buffer objects
244
    // and link filters
248
    // and link filters
245
    uint buffers_needed = 1;
249
    uint buffers_needed = 1;
246
    bool last_filter    = true;
250
    bool last_filter    = true;
247
    bool needtorotate   = false;
248
    for (it = filters.rbegin(); it != filters.rend(); it++)
251
    for (it = filters.rbegin(); it != filters.rend(); it++)
249
    {
252
    {
250
        it->second->outputBuffer = kFrameBufferObject;
251
        it->second->rotateFrameBuffers = needtorotate;
252
        if (!last_filter)
253
        if (!last_filter)
253
        {
254
        {
255
            it->second->outputBuffer = kFrameBufferObject;
254
            uint buffers_have = it->second->frameBuffers.size();
256
            uint buffers_have = it->second->frameBuffers.size();
255
            int buffers_diff = buffers_needed - buffers_have;
257
            int buffers_diff = buffers_needed - buffers_have;
256
            if (buffers_diff > 0)
258
            if (buffers_diff > 0)
257
            {
259
            {
258
                uint tmp_buf, tmp_tex;
260
                uint tmp_buf, tmp_tex;
259
                QSize fb_size = GetTextureSize(videoSize);
261
                QSize fb_size = GetTextureSize(video_dim);
260
                for (int i = 0; i < buffers_diff; i++)
262
                for (int i = 0; i < buffers_diff; i++)
261
                {
263
                {
262
                    if (!AddFrameBuffer(tmp_buf, tmp_tex, fb_size))
264
                    if (!AddFrameBuffer(tmp_buf, fb_size, tmp_tex, video_dim))
263
                        return false;
265
                        return false;
264
                    else
266
                    else
265
                    {
267
                    {
Lines 286-310 Link Here
286
        }
288
        }
287
        else
289
        else
288
        {
290
        {
291
            it->second->outputBuffer = kDefaultBuffer;
289
            last_filter = false;
292
            last_filter = false;
290
        }
293
        }
291
292
        buffers_needed = it->second->numInputs;
294
        buffers_needed = it->second->numInputs;
293
        needtorotate = (it->first == kGLFilterKernelDeint ||
294
                        it->first == kGLFilterLinearBlendDeint ||
295
                        it->first == kGLFilterOneFieldDeintDFR ||
296
                        it->first == kGLFilterLinearBlendDeintDFR ||
297
                        it->first == kGLFilterKernelDeintDFR ||
298
                        it->first == kGLFilterFieldOrderDFR);
299
300
    }
295
    }
301
296
302
    bool deinterlacing = hardwareDeinterlacing;
297
    SetFiltering();
303
    hardwareDeinterlacing = true;
304
305
    SetDeinterlacing(false);
306
    if (deinterlacing)
307
        SetDeinterlacing(deinterlacing);
308
298
309
    return true;
299
    return true;
310
}
300
}
Lines 314-388 Link Here
314
{
304
{
315
    // filter settings included for performance only
305
    // filter settings included for performance only
316
    // no (obvious) quality improvement over GL_LINEAR throughout
306
    // no (obvious) quality improvement over GL_LINEAR throughout
317
    if (filters.empty())
307
    if (filters.empty() || filters.size() == 1)
318
        return;
319
320
    if (filters.size() == 1)
321
    {
308
    {
322
        SetTextureFilters(&inputTextures, GL_LINEAR);
309
        SetTextureFilters(&inputTextures, GL_LINEAR, GL_CLAMP_TO_EDGE);
323
        return;
310
        return;
324
    }
311
    }
325
312
326
    SetTextureFilters(&inputTextures, GL_NEAREST);
313
    SetTextureFilters(&inputTextures, GL_NEAREST, GL_CLAMP_TO_EDGE);
327
    vector<GLuint> textures;
328
    glfilt_map_t::iterator it;
329
    for (it = filters.begin(); it != filters.end(); it++)
330
        SetTextureFilters(&(it->second->frameBufferTextures), GL_NEAREST);
331
314
332
    // resize or last active (ie don't need resize) need GL_LINEAR
333
    glfilt_map_t::reverse_iterator rit;
315
    glfilt_map_t::reverse_iterator rit;
334
    bool next = false;
316
    int last_filter = 0;
335
    bool resize = filters.count(kGLFilterResize);
317
336
    for (rit = filters.rbegin(); rit != filters.rend(); rit++)
318
    for (rit = filters.rbegin(); rit != filters.rend(); rit++)
337
    {
319
    {
338
        if (next && (rit->second->outputBuffer != kNoBuffer))
320
        if (last_filter == 1)
339
        {
321
        {
340
            SetTextureFilters(&(rit->second->frameBufferTextures), GL_LINEAR);
322
            SetTextureFilters(&(rit->second->frameBufferTextures),
341
            return;
323
                              GL_LINEAR, GL_CLAMP_TO_EDGE);
342
        }
324
        }
343
325
        else if (last_filter > 1)
344
        if (resize)
345
        {
326
        {
346
            next |= ((rit->first == kGLFilterResize) ||
327
            SetTextureFilters(&(rit->second->frameBufferTextures),
347
                     (rit->second->outputBuffer == kDefaultBuffer));
328
                              GL_NEAREST, GL_CLAMP_TO_EDGE);
348
        }
329
        }
349
    }
330
    }
350
351
    SetTextureFilters(&inputTextures, GL_LINEAR);
352
}
353
354
// locking ok
355
bool OpenGLVideo::ReInit(OpenGLContext *glcontext, bool colour_control,
356
                         bool onscreen, QSize video_size, QRect visible_rect,
357
                         QRect video_rect, QRect frame_rect,
358
                         bool viewport_control, bool osd)
359
{
360
    VERBOSE(VB_PLAYBACK, LOC + "Reinit");
361
362
    gl_context->MakeCurrent(true);
363
364
    QString harddeint   = GetDeinterlacer(); // only adds back deinterlacer
365
    QString softdeint   = softwareDeinterlacer;
366
    bool    interlacing = hardwareDeinterlacing;
367
    bool    resize      = videoResize;
368
    QRect   resize_rect = videoResizeRect;
369
370
    Teardown();
371
372
    bool success = Init(glcontext, colour_control, onscreen, video_size,
373
                        visible_rect, video_rect, frame_rect,
374
                        viewport_control, osd);
375
376
    if (harddeint != "")
377
        success &= AddDeinterlacer(harddeint);
378
379
    softwareDeinterlacer = softdeint;
380
    SetDeinterlacing(interlacing);
381
382
    if (resize)
383
        SetVideoResize(resize_rect);
384
385
    return success;
386
}
331
}
387
332
388
// locking ok
333
// locking ok
Lines 391-444 Link Here
391
    if (filters.count(filter))
336
    if (filters.count(filter))
392
        return true;
337
        return true;
393
338
339
    bool success = true;
340
394
    VERBOSE(VB_PLAYBACK, LOC + QString("Creating %1 filter.")
341
    VERBOSE(VB_PLAYBACK, LOC + QString("Creating %1 filter.")
395
            .arg(FilterToString(filter)));
342
            .arg(FilterToString(filter)));
396
343
397
    gl_context->MakeCurrent(true);
398
399
    OpenGLFilter *temp = new OpenGLFilter();
344
    OpenGLFilter *temp = new OpenGLFilter();
400
345
401
    temp->numInputs = 1;
346
    temp->numInputs = 1;
347
    GLuint program = 0;
402
348
403
    if ((filter == kGLFilterLinearBlendDeint) ||
349
    if (filter == kGLFilterBicubic)
404
        (filter == kGLFilterKernelDeint) ||
405
        (filter == kGLFilterFieldOrderDFR))
406
    {
407
        temp->numInputs = 2;
408
    }
409
    else if ((filter == kGLFilterYUV2RGB) ||
410
             (filter == kGLFilterOneFieldDeintDFR) ||
411
             (filter == kGLFilterKernelDeintDFR) ||
412
             (filter == kGLFilterLinearBlendDeintDFR))
413
    {
350
    {
414
        temp->numInputs = 3;
351
        if (helperTexture)
415
    }
352
            gl_context->DeleteTexture(helperTexture);
416
    else if ((filter == kGLFilterYUV2RGBA))
353
 
417
    {
354
        helperTexture = gl_context->CreateHelperTexture();
418
        temp->numInputs = 4;
355
        if (!helperTexture)
356
            success = false;
419
    }
357
    }
420
358
421
    GLuint program = 0;
422
    if (filter != kGLFilterNone && filter != kGLFilterResize)
359
    if (filter != kGLFilterNone && filter != kGLFilterResize)
423
    {
360
    {
424
        program = AddFragmentProgram(filter);
361
        program = AddFragmentProgram(filter);
425
        if (!program)
362
        if (!program)
426
            return false;
363
            success = false;
364
        else
365
            temp->fragmentPrograms.push_back(program);
427
    }
366
    }
428
367
429
    temp->fragmentProgram    = program;
430
    temp->outputBuffer       = kDefaultBuffer;
368
    temp->outputBuffer       = kDefaultBuffer;
431
    temp->rotateFrameBuffers = false;
432
369
433
    temp->frameBuffers.clear();
370
    temp->frameBuffers.clear();
434
    temp->frameBufferTextures.clear();
371
    temp->frameBufferTextures.clear();
435
372
436
    filters[filter] = temp;
373
    filters[filter] = temp;
437
374
438
    if (OptimiseFilters())
375
    success &= OptimiseFilters();
376
377
    if (success)
439
        return true;
378
        return true;
440
379
441
    RemoveFilter(filter);
380
    RemoveFilter(filter);
381
    filters.erase(filter);
442
382
443
    return false;
383
    return false;
444
}
384
}
Lines 449-531 Link Here
449
    if (!filters.count(filter))
389
    if (!filters.count(filter))
450
        return true;
390
        return true;
451
391
452
    VERBOSE(VB_PLAYBACK, QString("Removing %1 filter")
392
    VERBOSE(VB_PLAYBACK, LOC + QString("Removing %1 filter")
453
            .arg(FilterToString(filter)));
393
            .arg(FilterToString(filter)));
454
394
455
    gl_context->MakeCurrent(true);
456
457
    gl_context->DeleteFragmentProgram(filters[filter]->fragmentProgram);
458
459
    vector<GLuint> temp;
395
    vector<GLuint> temp;
460
    vector<GLuint>::iterator it;
396
    vector<GLuint>::iterator it;
461
397
462
    temp = filters[filter]->frameBuffers;
398
    temp = filters[filter]->fragmentPrograms;
463
    for (it = temp.begin(); it != temp.end(); it++)
399
    for (it = temp.begin(); it != temp.end(); it++)
464
        gl_context->DeleteFrameBuffer(*it);
400
        gl_context->DeleteFragmentProgram(*it);
401
    filters[filter]->fragmentPrograms.clear();
465
402
466
    temp = filters[filter]->frameBufferTextures;
403
    temp = filters[filter]->frameBuffers;
467
    for (it = temp.begin(); it != temp.end(); it++)
404
    for (it = temp.begin(); it != temp.end(); it++)
468
        gl_context->DeleteTexture((*(it)));
405
        gl_context->DeleteFrameBuffer(*it);
406
    filters[filter]->frameBuffers.clear();
469
407
470
    filters.erase(filter);
408
    DeleteTextures(&(filters[filter]->frameBufferTextures));
471
409
472
    gl_context->MakeCurrent(false);
410
    delete filters[filter];
473
411
474
    return true;
412
    return true;
475
}
413
}
476
414
477
// locking ok
415
// locking ok
478
bool OpenGLVideo::AddDeinterlacer(const QString &filter)
416
void OpenGLVideo::TearDownDeinterlacer(void)
479
{
417
{
480
    QString current_deinterlacer = GetDeinterlacer();
418
    if (!filters.count(kGLFilterYUV2RGB))
419
        return;
481
420
482
    if (current_deinterlacer == filter)
421
    OpenGLFilter *tmp = filters[kGLFilterYUV2RGB];
422
423
    if (tmp->fragmentPrograms.size() == 3)
424
    {
425
        gl_context->DeleteFragmentProgram(tmp->fragmentPrograms[2]);
426
        tmp->fragmentPrograms.pop_back();
427
    }
428
429
    if (tmp->fragmentPrograms.size() == 2)
430
    {
431
        gl_context->DeleteFragmentProgram(tmp->fragmentPrograms[1]);
432
        tmp->fragmentPrograms.pop_back();
433
    }
434
           
435
    DeleteTextures(&referenceTextures);
436
}
437
438
bool OpenGLVideo::AddDeinterlacer(const QString &deinterlacer)
439
{
440
    OpenGLContextLocker ctx_lock(gl_context);
441
442
    if (!filters.count(kGLFilterYUV2RGB))
443
        return false;
444
445
    if (hardwareDeinterlacer == deinterlacer)
483
        return true;
446
        return true;
484
447
485
    if (!current_deinterlacer.isEmpty())
448
    TearDownDeinterlacer();
486
        RemoveFilter(current_deinterlacer);
449
450
    bool success = true;
487
451
488
    return AddFilter(filter);
452
    uint ref_size = 2;
453
454
    if (deinterlacer == "openglbobdeint" ||
455
        deinterlacer == "openglonefield" ||
456
        deinterlacer == "opengldoubleratefieldorder")
457
    {
458
        ref_size = 0;
459
    }
460
461
    if (ref_size > 0)
462
    {
463
        bool use_pbo = gl_features & kGLExtPBufObj;
464
465
        for (; ref_size > 0; ref_size--)
466
        {
467
            GLuint tex = CreateVideoTexture(actual_video_dim, inputTextureSize, use_pbo);
468
            if (tex)
469
            {
470
                referenceTextures.push_back(tex);
471
            }
472
            else
473
            {
474
                success = false;
475
            }
476
        }
477
    }
478
479
    uint prog1 = AddFragmentProgram(kGLFilterYUV2RGB,
480
                                    deinterlacer, kScan_Interlaced);
481
    uint prog2 = AddFragmentProgram(kGLFilterYUV2RGB,
482
                                    deinterlacer, kScan_Intr2ndField);
483
 
484
    if (prog1 && prog2)
485
    {
486
        filters[kGLFilterYUV2RGB]->fragmentPrograms.push_back(prog1);
487
        filters[kGLFilterYUV2RGB]->fragmentPrograms.push_back(prog2);
488
    }
489
    else
490
    {
491
        success = false;
492
    }
493
494
    if (success)
495
    {
496
        CheckResize(hardwareDeinterlacing);
497
        hardwareDeinterlacer = deinterlacer;
498
        return true;
499
    }
500
501
    hardwareDeinterlacer = "";
502
    TearDownDeinterlacer();
503
 
504
    return false;
489
}
505
}
490
506
491
// locking ok
507
// locking ok
492
uint OpenGLVideo::AddFragmentProgram(OpenGLFilterType name)
508
uint OpenGLVideo::AddFragmentProgram(OpenGLFilterType name,
509
                                     QString deint, FrameScanType field)
493
{
510
{
494
    if (!gl_context->IsFeatureSupported(kGLExtFragProg))
511
    if (!(gl_features & kGLExtFragProg))
495
    {
512
    {
496
        VERBOSE(VB_PLAYBACK, LOC_ERR + "Fragment programs not supported");
513
        VERBOSE(VB_PLAYBACK, LOC_ERR + "Fragment programs not supported");
497
        return 0;
514
        return 0;
498
    }
515
    }
499
516
500
    QString program = GetProgramString(name);
517
    QString program = GetProgramString(name, deint, field);
501
    QString texType = (gl_context->IsFeatureSupported(kGLExtRect)) ? "RECT" : "2D";
502
    program.replace("%1", texType);
503
518
504
    uint ret;
519
    uint ret;
505
    if (gl_context->CreateFragmentProgram(program, ret))
520
    if (gl_context->CreateFragmentProgram(program, ret))
506
    {
507
        VERBOSE(VB_PLAYBACK, LOC + QString("Created fragment program %1.")
508
                .arg(FilterToString(name)));
509
510
        return ret;
521
        return ret;
511
    }
512
522
513
    return 0;
523
    return 0;
514
}
524
}
515
525
516
// locking ok
526
// locking ok
517
bool OpenGLVideo::AddFrameBuffer(uint &framebuffer,
527
bool OpenGLVideo::AddFrameBuffer(uint &framebuffer, QSize fb_size,
518
                                 uint &texture, QSize size)
528
                                 uint &texture, QSize vid_size)
519
{
529
{
520
    if (!gl_context->IsFeatureSupported(kGLExtFBufObj))
530
    if (!(gl_features & kGLExtFBufObj))
521
    {
531
    {
522
        VERBOSE(VB_PLAYBACK, LOC_ERR + "Framebuffer binding not supported.");
532
        VERBOSE(VB_PLAYBACK, LOC_ERR + "Framebuffer binding not supported.");
523
        return false;
533
        return false;
524
    }
534
    }
525
535
526
    texture = gl_context->CreateTexture();
536
    texture = gl_context->CreateTexture(fb_size, vid_size, false, textureType);
527
537
528
    bool ok = gl_context->CreateFrameBuffer(framebuffer, texture, size);
538
    bool ok = gl_context->CreateFrameBuffer(framebuffer, texture);
529
539
530
    if (!ok)
540
    if (!ok)
531
        gl_context->DeleteTexture(texture);
541
        gl_context->DeleteTexture(texture);
Lines 536-543 Link Here
536
// locking ok
546
// locking ok
537
void OpenGLVideo::SetViewPort(const QSize &viewPortSize)
547
void OpenGLVideo::SetViewPort(const QSize &viewPortSize)
538
{
548
{
539
    uint w = max(viewPortSize.width(),  videoSize.width());
549
    uint w = max(viewPortSize.width(),  video_dim.width());
540
    uint h = max(viewPortSize.height(), videoSize.height());
550
    uint h = max(viewPortSize.height(), video_dim.height());
541
551
542
    viewportSize = QSize(w, h);
552
    viewportSize = QSize(w, h);
543
553
Lines 546-605 Link Here
546
556
547
    VERBOSE(VB_PLAYBACK, LOC + QString("Viewport: %1x%2")
557
    VERBOSE(VB_PLAYBACK, LOC + QString("Viewport: %1x%2")
548
            .arg(w).arg(h));
558
            .arg(w).arg(h));
549
559
    gl_context->SetViewPort(viewportSize);
550
    SetViewPortPrivate(viewportSize);
551
}
552
553
void OpenGLVideo::SetViewPortPrivate(const QSize &viewPortSize)
554
{
555
    glViewport(0, 0, viewPortSize.width(), viewPortSize.height());
556
    glMatrixMode(GL_PROJECTION);
557
    glLoadIdentity();
558
    glOrtho(0, viewPortSize.width() - 1,
559
            0, viewPortSize.height() - 1, 1, -1); // aargh...
560
    glMatrixMode(GL_MODELVIEW);
561
    glLoadIdentity();
562
}
563
564
// locking ok
565
void OpenGLVideo::InitOpenGL(void)
566
{
567
    gl_context->MakeCurrent(true);
568
    glDisable(GL_BLEND);
569
    glBlendFunc(GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA); // for gl osd
570
    glDisable(GL_DEPTH_TEST);
571
    glDepthMask(GL_FALSE);
572
    glDisable(GL_CULL_FACE);
573
    gl_context->EnableTextures();;
574
    glShadeModel(GL_FLAT);
575
    glDisable(GL_POLYGON_SMOOTH);
576
    glDisable(GL_LINE_SMOOTH);
577
    glDisable(GL_POINT_SMOOTH);
578
    glClearColor(0.0f, 0.0f, 0.0f, 0.0f);
579
    glClear(GL_COLOR_BUFFER_BIT);
580
    glFlush();
581
    gl_context->MakeCurrent(false);
582
}
560
}
583
561
584
// locking ok
562
// locking ok
585
uint OpenGLVideo::CreateVideoTexture(QSize size, QSize &tex_size)
563
uint OpenGLVideo::CreateVideoTexture(QSize size, QSize &tex_size,
564
                                     bool use_pbo)
586
{
565
{
587
    uint tmp_tex = gl_context->CreateTexture();
588
589
    QSize temp = GetTextureSize(size);
566
    QSize temp = GetTextureSize(size);
567
    uint tmp_tex = gl_context->CreateTexture(temp, size, use_pbo,
568
                                             textureType);
590
569
591
    if ((temp.width()  > (int)gl_context->GetMaxTexSize()) ||
570
    if (!tmp_tex)
592
        (temp.height() > (int)gl_context->GetMaxTexSize()) ||
593
        !gl_context->SetupTexture(temp, tmp_tex, GL_LINEAR))
594
    {
571
    {
595
        VERBOSE(VB_PLAYBACK, LOC_ERR + "Could not create texture.");
572
        VERBOSE(VB_PLAYBACK, LOC_ERR + "Could not create texture.");
596
        gl_context->DeleteTexture(tmp_tex);
597
        return 0;
573
        return 0;
598
    }
574
    }
599
575
600
    tex_size = temp;
576
    tex_size = temp;
601
577
602
    VERBOSE(VB_PLAYBACK, LOC + QString("Created main input texture %1x%2")
578
    VERBOSE(VB_PLAYBACK, LOC + QString("Created texture (%1x%2)")
603
            .arg(temp.width()).arg(temp.height()));
579
            .arg(temp.width()).arg(temp.height()));
604
580
605
    return tmp_tex;
581
    return tmp_tex;
Lines 608-614 Link Here
608
// locking ok
584
// locking ok
609
QSize OpenGLVideo::GetTextureSize(const QSize &size)
585
QSize OpenGLVideo::GetTextureSize(const QSize &size)
610
{
586
{
611
    if (gl_context->IsFeatureSupported(kGLExtRect))
587
    if (textureRects)
612
        return size;
588
        return size;
613
589
614
    int w = 64;
590
    int w = 64;
Lines 628-637 Link Here
628
}
604
}
629
605
630
// locking ok
606
// locking ok
631
void OpenGLVideo::UpdateInputFrame(const VideoFrame *frame)
607
void OpenGLVideo::UpdateInputFrame(const VideoFrame *frame, bool soft_bob)
632
{
608
{
633
    if (frame->width  != videoSize.width()  ||
609
    OpenGLContextLocker ctx_lock(gl_context);
634
        frame->height != videoSize.height() ||
610
611
    if (frame->width  != actual_video_dim.width()  ||
612
        frame->height != actual_video_dim.height() ||
635
        frame->width  < 1 ||
613
        frame->width  < 1 ||
636
        frame->height < 1)
614
        frame->height < 1)
637
    {
615
    {
Lines 641-704 Link Here
641
619
642
    if (filters.count(kGLFilterYUV2RGB) && (frame->codec == FMT_YV12))
620
    if (filters.count(kGLFilterYUV2RGB) && (frame->codec == FMT_YV12))
643
    {
621
    {
644
        UpdateInput(frame->buf, frame->offsets, 0, FMT_YV12, videoSize);
622
        if (hardwareDeinterlacing)
623
            RotateTextures();
624
625
        gl_context->UpdateTexture(inputTextures[0], frame->buf,
626
                                  frame->offsets, frame->pitches, FMT_YV12,
627
                                  frame->interlaced_frame && !soft_bob);
628
        inputUpdated = true;
645
        return;
629
        return;
646
    }
630
    }
647
631
648
    // software yuv2rgb
632
    // software yuv2rgb
649
    if (convertSize != videoSize)
633
    if (convertSize != actual_video_dim)
650
    {
634
    {
651
        ShutDownYUV2RGB();
635
        ShutDownYUV2RGB();
652
636
653
        VERBOSE(VB_PLAYBACK, LOC + "Init software conversion.");
637
        VERBOSE(VB_PLAYBACK, LOC + "Init software conversion.");
654
638
655
        convertSize = videoSize;
639
        convertSize = actual_video_dim;
656
        convertBuf = new unsigned char[
640
        convertBuf = new unsigned char[
657
            (videoSize.width() * videoSize.height() * 3) + 128];
641
            (actual_video_dim.width() * actual_video_dim.height() * 4) + 128];
658
    }
642
    }
659
643
660
    if (convertBuf)
644
    if (convertBuf)
661
    {
645
    {
662
        AVPicture img_in, img_out;
646
        AVPicture img_in, img_out;
663
647
664
        avpicture_fill(&img_out, (uint8_t *)convertBuf, PIX_FMT_RGB24,
648
        avpicture_fill(&img_out, (uint8_t *)convertBuf, PIX_FMT_BGRA,
665
                       convertSize.width(), convertSize.height());
649
                       convertSize.width(), convertSize.height());
666
        avpicture_fill(&img_in, (uint8_t *)frame->buf, PIX_FMT_YUV420P,
650
        avpicture_fill(&img_in, (uint8_t *)frame->buf, PIX_FMT_YUV420P,
667
                       convertSize.width(), convertSize.height());
651
                       convertSize.width(), convertSize.height());
668
        img_convert(&img_out, PIX_FMT_RGB24,
652
        img_convert(&img_out, PIX_FMT_BGRA,
669
                    &img_in,  PIX_FMT_YUV420P,
653
                    &img_in,  PIX_FMT_YUV420P,
670
                    convertSize.width(), convertSize.height());
654
                    convertSize.width(), convertSize.height());
671
655
672
        int offset = 0;
656
        int offset = 0;
673
        UpdateInput(convertBuf, &offset, 0, FMT_RGB24, convertSize);
657
        gl_context->UpdateTexture(inputTextures[0], convertBuf,
658
                                  &offset, &offset, FMT_BGRA);
674
    }
659
    }
660
661
    inputUpdated = true;
675
}
662
}
676
663
677
// locking ok
664
// locking ok
678
void OpenGLVideo::UpdateInput(const unsigned char *buf, const int *offsets,
665
void OpenGLVideo::UpdateInput(const unsigned char *buf, const int *offsets,
679
                              uint texture_index, int format, QSize size)
666
                              int format, QSize size,
667
                              const unsigned char *alpha)
680
{
668
{
681
    inputUpdated = false;
669
    OpenGLContextLocker ctx_lock(gl_context);
682
670
683
    if (texture_index >= inputTextures.size())
671
    if (size.width()  != actual_video_dim.width()  ||
672
        size.height() != actual_video_dim.height() ||
673
        format != FMT_YV12 || !alpha)
684
        return;
674
        return;
685
675
686
    copy_pixels_to_texture(
676
    int pitches[3] = {size.width(), size.width() >> 1, size.width() >> 1};
687
        buf + offsets[0], format, size,
677
688
        inputTextures[texture_index], gl_context->GetTextureType());
678
    gl_context->UpdateTexture(inputTextures[0], buf,
689
679
                              offsets, pitches, FMT_YV12,
690
    if (FMT_YV12 == format)
680
                              false, alpha);
691
    {
692
        QSize chroma_size(size.width() >> 1, size.height() >> 1);
693
        copy_pixels_to_texture(
694
            buf + offsets[1], format, chroma_size,
695
            inputTextures[texture_index + 1],
696
            gl_context->GetTextureType());
697
        copy_pixels_to_texture(
698
            buf + offsets[2], format, chroma_size,
699
            inputTextures[texture_index + 2],
700
            gl_context->GetTextureType());
701
    }
702
681
703
    inputUpdated = true;
682
    inputUpdated = true;
704
}
683
}
Lines 718-731 Link Here
718
// TODO shouldn't this take a QSize, not QRect?
697
// TODO shouldn't this take a QSize, not QRect?
719
void OpenGLVideo::SetVideoResize(const QRect &rect)
698
void OpenGLVideo::SetVideoResize(const QRect &rect)
720
{
699
{
721
    bool abort = ((rect.right()  > videoSize.width())  ||
700
    OpenGLContextLocker ctx_lock(gl_context);
722
                  (rect.bottom() > videoSize.height()) ||
701
723
                  (rect.width()  > videoSize.width())  ||
702
    bool abort = ((rect.right()  > video_dim.width())  ||
724
                  (rect.height() > videoSize.height()));
703
                  (rect.bottom() > video_dim.height()) ||
704
                  (rect.width()  > video_dim.width())  ||
705
                  (rect.height() > video_dim.height()));
725
706
726
    // if resize == existing frame, no need to carry on
707
    // if resize == existing frame, no need to carry on
727
708
728
    abort |= !rect.left() && !rect.top() && (rect.size() == videoSize);
709
    abort |= !rect.left() && !rect.top() && (rect.size() == video_dim);
729
710
730
    if (!abort)
711
    if (!abort)
731
    {
712
    {
Lines 740-745 Link Here
740
// locking ok
721
// locking ok
741
void OpenGLVideo::DisableVideoResize(void)
722
void OpenGLVideo::DisableVideoResize(void)
742
{
723
{
724
    OpenGLContextLocker ctx_lock(gl_context);
725
743
    videoResize     = false;
726
    videoResize     = false;
744
    videoResizeRect = QRect(0, 0, 0, 0);
727
    videoResizeRect = QRect(0, 0, 0, 0);
745
}
728
}
Lines 749-767 Link Here
749
{
732
{
750
    // FIXME video aspect == display aspect
733
    // FIXME video aspect == display aspect
751
734
752
    if ((videoSize.height() <= 0) || (videoSize.width() <= 0))
735
    if ((video_dim.height() <= 0) || (video_dim.width() <= 0))
753
        return;
736
        return;
754
737
755
    float height     = visibleRect.height();
738
    float height     = display_visible_rect.height();
756
    float new_top    = height - ((float)videoResizeRect.bottom() /
739
    float new_top    = height - ((float)videoResizeRect.bottom() /
757
                                 (float)videoSize.height()) * height;
740
                                 (float)video_dim.height()) * height;
758
    float new_bottom = height - ((float)videoResizeRect.top() /
741
    float new_bottom = height - ((float)videoResizeRect.top() /
759
                                 (float)videoSize.height()) * height;
742
                                 (float)video_dim.height()) * height;
760
743
761
    left   = (((float) videoResizeRect.left() / (float) videoSize.width()) *
744
    left   = (((float) videoResizeRect.left() / (float) video_dim.width()) *
762
              visibleRect.width());
745
              display_visible_rect.width());
763
    right  = (((float) videoResizeRect.right() / (float) videoSize.width()) *
746
    right  = (((float) videoResizeRect.right() / (float) video_dim.width()) *
764
              visibleRect.width());
747
              display_visible_rect.width());
765
748
766
    top    = new_top;
749
    top    = new_top;
767
    bottom = new_bottom;
750
    bottom = new_bottom;
Lines 773-809 Link Here
773
    if (deinterlacing == hardwareDeinterlacing)
756
    if (deinterlacing == hardwareDeinterlacing)
774
        return;
757
        return;
775
758
776
    VERBOSE(VB_PLAYBACK, LOC + QString("Turning %1 deinterlacing.")
777
            .arg(deinterlacing ? "on" : "off"));
778
779
    hardwareDeinterlacing = deinterlacing;
759
    hardwareDeinterlacing = deinterlacing;
780
760
781
    glfilt_map_t::iterator it = filters.begin();
761
    OpenGLContextLocker ctx_lock(gl_context);
782
    for (; it != filters.end(); it++)
762
    CheckResize(hardwareDeinterlacing);
783
    {
784
        it->second->outputBuffer = kFrameBufferObject;
785
786
        if ((it->first >= kGLFilterLinearBlendDeint) &&
787
            (it->first <= kGLFilterOneFieldDeintDFR) &&
788
            !deinterlacing)
789
        {
790
            it->second->outputBuffer = kNoBuffer;
791
        }
792
    }
793
794
    glfilt_map_t::reverse_iterator rit = filters.rbegin();
795
    for (; rit != filters.rend(); rit++)
796
    {
797
        if (rit->second->outputBuffer == kFrameBufferObject)
798
        {
799
            rit->second->outputBuffer = kDefaultBuffer;
800
            break;
801
        }
802
    }
803
804
    gl_context->MakeCurrent(true);
805
    SetFiltering();
806
    gl_context->MakeCurrent(false);
807
}
763
}
808
764
809
// locking ok
765
// locking ok
Lines 813-836 Link Here
813
    if (inputTextures.empty() || filters.empty())
769
    if (inputTextures.empty() || filters.empty())
814
        return;
770
        return;
815
771
772
    OpenGLContextLocker ctx_lock(gl_context);
773
774
    // enable correct texture type
775
    gl_context->EnableTextures(inputTextures[0]);
776
816
    vector<GLuint> inputs = inputTextures;
777
    vector<GLuint> inputs = inputTextures;
817
    QSize inputsize = inputTextureSize;
778
    QSize inputsize = inputTextureSize;
779
    QSize realsize  = GetTextureSize(video_dim);
818
    uint  numfilters = filters.size();
780
    uint  numfilters = filters.size();
819
781
820
    glfilt_map_t::iterator it;
782
    glfilt_map_t::iterator it;
821
    for (it = filters.begin(); it != filters.end(); it++)
783
    for (it = filters.begin(); it != filters.end(); it++)
822
    {
784
    {
823
        if (it->second->rotateFrameBuffers &&
824
            !(it->first == kGLFilterYUV2RGB && scan == kScan_Intr2ndField))
825
        {
826
            Rotate(&(it->second->frameBufferTextures));
827
            Rotate(&(it->second->frameBuffers));
828
        }
829
830
        // skip disabled filters
831
        if (it->second->outputBuffer == kNoBuffer)
832
            continue;
833
834
        OpenGLFilterType type = it->first;
785
        OpenGLFilterType type = it->first;
835
        OpenGLFilter *filter = it->second;
786
        OpenGLFilter *filter = it->second;
836
787
Lines 838-874 Link Here
838
        if (!inputUpdated && type == kGLFilterYUV2RGBA)
789
        if (!inputUpdated && type == kGLFilterYUV2RGBA)
839
        {
790
        {
840
            inputs = filter->frameBufferTextures;
791
            inputs = filter->frameBufferTextures;
841
            inputsize = videoSize;
792
            inputsize = realsize;
842
            continue;
843
        }
844
845
        // skip colour conversion for frames already in frame buffer
846
        if (!inputUpdated && (frame == currentFrameNum) &&
847
            (type == kGLFilterYUV2RGB) && (frame != 0) &&
848
            (!(softwareDeinterlacing && softwareDeinterlacer == "bobdeint")))
849
        {
850
            inputs = filter->frameBufferTextures;
851
            inputsize = videoSize;
852
            continue;
793
            continue;
853
        }
794
        }
854
795
855
        // texture coordinates
796
        // texture coordinates
856
        float t_right = (float)videoSize.width();
797
        float t_right = (float)video_dim.width();
857
        float t_bottom  = (float)videoSize.height();
798
        float t_bottom  = (float)video_dim.height();
858
        float t_top = 0.0f;
799
        float t_top = 0.0f;
859
        float t_left = 0.0f;
800
        float t_left = 0.0f;
860
        float trueheight = (float)videoSize.height();
801
        float trueheight = (float)video_dim.height();
861
802
862
        // only apply overscan on last filter
803
        // only apply overscan on last filter
863
        if (filter->outputBuffer == kDefaultBuffer)
804
        if (filter->outputBuffer == kDefaultBuffer)
864
        {
805
        {
865
            t_left   = (float)frameRect.left();
806
            t_left   = (float)video_rect.left();
866
            t_right  = (float)frameRect.width() + t_left;
807
            t_right  = (float)video_rect.width() + t_left;
867
            t_top    = (float)frameRect.top();
808
            t_top    = (float)video_rect.top();
868
            t_bottom = (float)frameRect.height() + t_top;
809
            t_bottom = (float)video_rect.height() + t_top;
869
        }
810
        }
870
811
871
        if (!gl_context->IsFeatureSupported(kGLExtRect) &&
812
        if (!textureRects &&
872
            (inputsize.width() > 0) && (inputsize.height() > 0))
813
            (inputsize.width() > 0) && (inputsize.height() > 0))
873
        {
814
        {
874
            t_right  /= inputsize.width();
815
            t_right  /= inputsize.width();
Lines 878-913 Link Here
878
            trueheight /= inputsize.height();
819
            trueheight /= inputsize.height();
879
        }
820
        }
880
821
881
        float line_height = (trueheight / (float)videoSize.height());
822
        // software bobdeint
882
        float bob = line_height / 2.0f;
823
        if ((softwareDeinterlacer == "bobdeint") &&
883
824
            softwareDeinterlacing &&
884
        if (type == kGLFilterBobDeintDFR)
825
            (filter->outputBuffer == kDefaultBuffer))
885
        {
826
        {
886
            if (scan == kScan_Interlaced)
827
            float bob = (trueheight / (float)video_dim.height()) / 4.0f;
887
            {
888
                t_bottom += bob;
889
                t_top += bob;
890
            }
891
            if (scan == kScan_Intr2ndField)
828
            if (scan == kScan_Intr2ndField)
892
            {
829
            {
893
                t_bottom -= bob;
894
                t_top -= bob;
895
            }
896
        }
897
898
        if (softwareDeinterlacer == "bobdeint" &&
899
            softwareDeinterlacing && (type == kGLFilterYUV2RGB ||
900
            (type == kGLFilterResize && numfilters == 1)))
901
        {
902
            bob = line_height / 4.0f;
903
            if (scan == kScan_Interlaced)
904
            {
905
                t_top /= 2;
830
                t_top /= 2;
906
                t_bottom /= 2;
831
                t_bottom /= 2;
907
                t_bottom += bob;
832
                t_bottom += bob;
908
                t_top    += bob;
833
                t_top    += bob;
909
            }
834
            }
910
            if (scan == kScan_Intr2ndField)
835
            if (scan == kScan_Interlaced)
911
            {
836
            {
912
                t_top = (trueheight / 2) + (t_top / 2);
837
                t_top = (trueheight / 2) + (t_top / 2);
913
                t_bottom = (trueheight / 2) + (t_bottom / 2);
838
                t_bottom = (trueheight / 2) + (t_bottom / 2);
Lines 916-951 Link Here
916
            }
841
            }
917
        }
842
        }
918
843
919
        float t_right_uv = t_right;
920
        float t_top_uv   = t_top;
921
        float t_bottom_uv = t_bottom;
922
        float t_left_uv  = t_left;
923
924
        if (gl_context->IsFeatureSupported(kGLExtRect))
925
        {
926
            t_right_uv  /= 2;
927
            t_top_uv    /= 2;
928
            t_bottom_uv /= 2;
929
            t_left_uv   /= 2;
930
        }
931
932
        // vertex coordinates
844
        // vertex coordinates
933
        QRect display = (filter->frameBuffers.empty() || 
845
        QRect display = (filter->frameBuffers.empty() ||
934
                        filter->outputBuffer == kDefaultBuffer) ?
846
                         filter->outputBuffer == kDefaultBuffer) ?
935
            videoRect : frameBufferRect;
847
                         display_video_rect : frameBufferRect;
936
848
937
        float vleft  = display.left();
849
        float vleft  = display.left();
938
        float vright = display.right();
850
        float vright = display.right();
939
        float vtop   = display.top();
851
        float vtop   = display.top();
940
        float vbot   = display.bottom();
852
        float vbot   = display.bottom();
941
853
854
        // hardware bobdeint
855
        if (filter->outputBuffer == kDefaultBuffer &&
856
            hardwareDeinterlacing &&
857
            hardwareDeinterlacer == "openglbobdeint")
858
        {
859
            float bob = ((float)display.height() / (float)video_dim.height())
860
                        / 2.0f;
861
            if (scan == kScan_Interlaced)
862
            {
863
                vbot -= bob;
864
                vtop -= bob;
865
            }
866
            if (scan == kScan_Intr2ndField)
867
            {
868
                vbot += bob;
869
                vtop += bob;
870
            }
871
        }
872
942
        // resize for interactive tv
873
        // resize for interactive tv
943
        if (videoResize && filter->outputBuffer == kDefaultBuffer)
874
        if (videoResize && filter->outputBuffer == kDefaultBuffer)
944
            CalculateResize(vleft, vtop, vright, vbot);
875
            CalculateResize(vleft, vtop, vright, vbot);
945
876
946
        if (invertVideo &&
877
        // invert horizontally 
947
            ((type == kGLFilterYUV2RGB) || (type == kGLFilterYUV2RGBA)) ||
878
        if (((type == kGLFilterYUV2RGB) || (type == kGLFilterYUV2RGBA)) ||
948
            ((type == kGLFilterResize) && (numfilters == 1)))
879
            (filter->outputBuffer == kDefaultBuffer && numfilters == 1))
949
        {
880
        {
950
            float temp = vtop;
881
            float temp = vtop;
951
            vtop = vbot;
882
            vtop = vbot;
Lines 956-973 Link Here
956
        switch (filter->outputBuffer)
887
        switch (filter->outputBuffer)
957
        {
888
        {
958
            case kDefaultBuffer:
889
            case kDefaultBuffer:
959
                if (frameBuffer)
960
                    gl_context->BindFramebuffer(frameBuffer);
961
962
                // clear the buffer
890
                // clear the buffer
963
                if (viewportControl)
891
                if (viewportControl)
964
                {
892
                {
965
                    glClear(GL_COLOR_BUFFER_BIT);
893
                    glClear(GL_COLOR_BUFFER_BIT);
966
                    SetViewPortPrivate(visibleRect.size());
894
                    gl_context->SetViewPort(display_visible_rect.size());
967
                }
895
                }
968
                else
896
                else
969
                {
897
                {
970
                    SetViewPortPrivate(masterViewportSize);
898
                    gl_context->SetViewPort(masterViewportSize);
971
                }
899
                }
972
900
973
                break;
901
                break;
Lines 976-1031 Link Here
976
                if (!filter->frameBuffers.empty())
904
                if (!filter->frameBuffers.empty())
977
                {
905
                {
978
                    gl_context->BindFramebuffer(filter->frameBuffers[0]);
906
                    gl_context->BindFramebuffer(filter->frameBuffers[0]);
979
                    SetViewPortPrivate(frameBufferRect.size());
907
                    gl_context->SetViewPort(frameBufferRect.size());
980
                }
908
                }
981
                break;
909
                break;
982
910
983
            case kNoBuffer:
911
            default:
984
                continue;
912
                continue;
985
        }
913
        }
986
914
987
        // bind correct textures
915
        // bind correct textures
988
        for (uint i = 0; i < inputs.size(); i++)
916
        uint active_tex = 0;
917
        for (; active_tex < inputs.size(); active_tex++)
989
        {
918
        {
990
            glActiveTexture(GL_TEXTURE0 + i);
919
            glActiveTexture(GL_TEXTURE0 + active_tex);
991
            glBindTexture(gl_context->GetTextureType(), inputs[i]);
920
            glBindTexture(textureType, inputs[active_tex]);
921
        }
922
923
        if (!referenceTextures.empty() &&
924
            hardwareDeinterlacing &&
925
            type == kGLFilterYUV2RGB)
926
        {
927
            uint max = inputs.size() + referenceTextures.size();
928
            uint ref = 0;
929
            for (; active_tex < max; active_tex++, ref++)
930
            {
931
                glActiveTexture(GL_TEXTURE0 + active_tex);
932
                glBindTexture(textureType, referenceTextures[ref]);
933
            }
934
        }
935
936
        if (helperTexture && type == kGLFilterBicubic)
937
        {
938
            glActiveTexture(GL_TEXTURE0 + active_tex);
939
            glBindTexture(GL_TEXTURE_1D/*N.B.*/, helperTexture);
992
        }
940
        }
993
941
994
        // enable fragment program and set any environment variables
942
        // enable fragment program and set any environment variables
995
        if ((type != kGLFilterNone) && (type != kGLFilterResize))
943
        if ((type != kGLFilterNone) && (type != kGLFilterResize))
996
        {
944
        {
997
            glEnable(GL_FRAGMENT_PROGRAM_ARB);
945
            glEnable(GL_FRAGMENT_PROGRAM_ARB);
998
            gl_context->BindFragmentProgram(filter->fragmentProgram);
946
            GLuint program = 0;
999
            float field = -line_height;
947
948
            if (type == kGLFilterYUV2RGB)
949
            {
950
                if (hardwareDeinterlacing &&
951
                    filter->fragmentPrograms.size() == 3)
952
                {
953
                    if (scan == kScan_Interlaced)
954
                        program = 1;
955
                    else if (scan == kScan_Intr2ndField)
956
                        program = 2;
957
                }
958
            }
959
960
            gl_context->BindFragmentProgram(filter->fragmentPrograms[program]);
1000
961
1001
            switch (type)
962
            switch (type)
1002
            {
963
            {
1003
                case kGLFilterYUV2RGB:
964
                case kGLFilterYUV2RGB:
1004
                case kGLFilterYUV2RGBA:
965
                case kGLFilterYUV2RGBA:
1005
                    if (useColourControl)
966
                    if (useColourControl)
1006
                    {
967
                        gl_context->SetColourParams();
1007
                        gl_context->InitFragmentParams(
1008
                            0,
1009
                            pictureAttribs[kPictureAttribute_Brightness],
1010
                            pictureAttribs[kPictureAttribute_Contrast],
1011
                            pictureAttribs[kPictureAttribute_Colour],
1012
                            0.0f);
1013
                    }
1014
                    break;
1015
1016
                case kGLFilterBobDeintDFR:
1017
                case kGLFilterOneFieldDeintDFR:
1018
                case kGLFilterKernelDeintDFR:
1019
                case kGLFilterFieldOrderDFR:
1020
                case kGLFilterLinearBlendDeintDFR:
1021
                    if (scan == kScan_Intr2ndField)
1022
                        field *= -1;
1023
1024
                case kGLFilterOneFieldDeint:
1025
                case kGLFilterKernelDeint:
1026
                case kGLFilterLinearBlendDeint:
1027
                    gl_context->InitFragmentParams(
1028
                        0, line_height * 2.0f, field, 0.0f, 0.0f);
1029
                    break;
968
                    break;
1030
969
1031
                case kGLFilterNone:
970
                case kGLFilterNone:
Lines 1041-1083 Link Here
1041
        // draw quad
980
        // draw quad
1042
        glBegin(GL_QUADS);
981
        glBegin(GL_QUADS);
1043
        glTexCoord2f(t_left, t_top);
982
        glTexCoord2f(t_left, t_top);
1044
        if (type == kGLFilterYUV2RGB || type == kGLFilterYUV2RGBA)
1045
        {
1046
            glMultiTexCoord2f(GL_TEXTURE1, t_left_uv, t_top_uv);
1047
            glMultiTexCoord2f(GL_TEXTURE2, t_left_uv, t_top_uv);
1048
            if (type == kGLFilterYUV2RGBA)
1049
                glMultiTexCoord2f(GL_TEXTURE3, t_left_uv, t_top_uv);
1050
        }
1051
        glVertex2f(vleft,  vtop);
983
        glVertex2f(vleft,  vtop);
1052
984
1053
        glTexCoord2f(t_right, t_top);
985
        glTexCoord2f(t_right, t_top);
1054
        if (type == kGLFilterYUV2RGB || type == kGLFilterYUV2RGBA)
1055
        {
1056
            glMultiTexCoord2f(GL_TEXTURE1, t_right_uv, t_top_uv);
1057
            glMultiTexCoord2f(GL_TEXTURE2, t_right_uv, t_top_uv);
1058
            if (type == kGLFilterYUV2RGBA)
1059
                glMultiTexCoord2f(GL_TEXTURE3, t_right, t_top);
1060
        }
1061
        glVertex2f(vright, vtop);
986
        glVertex2f(vright, vtop);
1062
987
1063
        glTexCoord2f(t_right, t_bottom);
988
        glTexCoord2f(t_right, t_bottom);
1064
        if (type == kGLFilterYUV2RGB || type == kGLFilterYUV2RGBA)
1065
        {
1066
            glMultiTexCoord2f(GL_TEXTURE1, t_right_uv, t_bottom_uv);
1067
            glMultiTexCoord2f(GL_TEXTURE2, t_right_uv, t_bottom_uv);
1068
            if (type == kGLFilterYUV2RGBA)
1069
                glMultiTexCoord2f(GL_TEXTURE3, t_right, t_bottom);
1070
        }
1071
        glVertex2f(vright, vbot);
989
        glVertex2f(vright, vbot);
1072
990
1073
        glTexCoord2f(t_left, t_bottom);
991
        glTexCoord2f(t_left, t_bottom);
1074
        if (type == kGLFilterYUV2RGB || type == kGLFilterYUV2RGBA)
1075
        {
1076
            glMultiTexCoord2f(GL_TEXTURE1, t_left_uv, t_bottom_uv);
1077
            glMultiTexCoord2f(GL_TEXTURE2, t_left_uv, t_bottom_uv);
1078
            if (type == kGLFilterYUV2RGBA)
1079
                glMultiTexCoord2f(GL_TEXTURE3, t_left_uv, t_bottom);
1080
        }
1081
        glVertex2f(vleft,  vbot);
992
        glVertex2f(vleft,  vbot);
1082
        glEnd();
993
        glEnd();
1083
994
Lines 1093-1168 Link Here
1093
        }
1004
        }
1094
1005
1095
        // switch back to default framebuffer
1006
        // switch back to default framebuffer
1096
        if (filter->outputBuffer != kDefaultBuffer || frameBuffer)
1007
        if (filter->outputBuffer != kDefaultBuffer)
1097
            gl_context->BindFramebuffer(0);
1008
            gl_context->BindFramebuffer(0);
1098
1009
1099
        inputs = filter->frameBufferTextures;
1010
        inputs = filter->frameBufferTextures;
1100
        inputsize = videoSize;
1011
        inputsize = realsize;
1101
    }
1012
    }
1102
1013
1103
    currentFrameNum = frame;
1014
    currentFrameNum = frame;
1104
    inputUpdated = false;
1015
    inputUpdated = false;
1105
}
1016
}
1106
1017
1107
void OpenGLVideo::Rotate(vector<GLuint> *target)
1018
void OpenGLVideo::RotateTextures(void)
1108
{
1019
{
1109
    if (target->size() < 2)
1020
   if (referenceTextures.size() < 2)
1110
        return;
1021
        return;
1111
1022
1112
    GLuint tmp = (*target)[target->size() - 1];
1023
    GLuint tmp = referenceTextures[referenceTextures.size() - 1];
1113
    for (uint i = target->size() - 1; i > 0;  i--)
1114
        (*target)[i] = (*target)[i - 1];
1115
1024
1116
    (*target)[0] = tmp;
1025
    for (uint i = referenceTextures.size() - 1; i > 0;  i--)
1026
        referenceTextures[i] = referenceTextures[i - 1];
1027
 
1028
    referenceTextures[0] = inputTextures[0];
1029
    inputTextures[0] = tmp;
1117
}
1030
}
1118
1031
1119
// locking ok
1032
void OpenGLVideo::DeleteTextures(vector<uint> *textures)
1120
int OpenGLVideo::SetPictureAttribute(
1121
    PictureAttribute attribute, int newValue)
1122
{
1033
{
1123
    if (!useColourControl)
1034
    if ((*textures).empty())
1124
        return -1;
1035
        return;
1125
1126
    int ret = -1;
1127
    switch (attribute)
1128
    {
1129
        case kPictureAttribute_Brightness:
1130
            ret = newValue;
1131
            pictureAttribs[attribute] = (newValue * 0.02f) - 0.5f;
1132
            break;
1133
        case kPictureAttribute_Contrast:
1134
        case kPictureAttribute_Colour:
1135
            ret = newValue;
1136
            pictureAttribs[attribute] = (newValue * 0.02f);
1137
            break;
1138
        case kPictureAttribute_Hue: // not supported yet...
1139
            break;
1140
        default:
1141
            break;
1142
    }
1143
1144
    return ret;
1145
}
1146
1036
1147
PictureAttributeSupported 
1037
    for (uint i = 0; i < (*textures).size(); i++)
1148
OpenGLVideo::GetSupportedPictureAttributes(void) const
1038
        gl_context->DeleteTexture((*textures)[i]);
1149
{
1039
    (*textures).clear();
1150
    return (!useColourControl) ?
1151
        kPictureAttributeSupported_None :
1152
        (PictureAttributeSupported) 
1153
        (kPictureAttributeSupported_Brightness |
1154
         kPictureAttributeSupported_Contrast |
1155
         kPictureAttributeSupported_Colour);
1156
}
1040
}
1157
1041
1158
// locking ok
1042
// locking ok
1159
void OpenGLVideo::SetTextureFilters(vector<GLuint> *textures, int filt)
1043
void OpenGLVideo::SetTextureFilters(vector<GLuint> *textures,
1044
                                    int filt, int wrap)
1160
{
1045
{
1161
    if (textures->empty())
1046
    if (textures->empty())
1162
        return;
1047
        return;
1163
1048
1164
    for (uint i = 0; i < textures->size(); i++)
1049
    for (uint i = 0; i < textures->size(); i++)
1165
        gl_context->SetupTextureFilters((*textures)[i], filt);
1050
        gl_context->SetTextureFilters((*textures)[i], filt, wrap);
1166
}
1051
}
1167
1052
1168
// locking ok
1053
// locking ok
Lines 1174-1197 Link Here
1174
        ret = kGLFilterYUV2RGB;
1059
        ret = kGLFilterYUV2RGB;
1175
    else if (filter.contains("osd"))
1060
    else if (filter.contains("osd"))
1176
        ret = kGLFilterYUV2RGBA;
1061
        ret = kGLFilterYUV2RGBA;
1177
    else if (filter.contains("openglkerneldeint"))
1178
        ret = kGLFilterKernelDeint;
1179
    else if (filter.contains("opengllinearblend"))
1180
        ret = kGLFilterLinearBlendDeint;
1181
    else if (filter.contains("openglonefield"))
1182
        ret = kGLFilterOneFieldDeint;
1183
    else if (filter.contains("openglbobdeint"))
1184
        ret = kGLFilterBobDeintDFR;
1185
    else if (filter.contains("opengldoubleratelinearblend"))
1186
        ret = kGLFilterLinearBlendDeintDFR;
1187
    else if (filter.contains("opengldoubleratekerneldeint"))
1188
        ret = kGLFilterKernelDeintDFR;
1189
    else if (filter.contains("opengldoublerateonefield"))
1190
        ret = kGLFilterOneFieldDeintDFR;
1191
    else if (filter.contains("opengldoubleratefieldorder"))
1192
        ret = kGLFilterFieldOrderDFR;
1193
    else if (filter.contains("resize"))
1062
    else if (filter.contains("resize"))
1194
        ret = kGLFilterResize;
1063
        ret = kGLFilterResize;
1064
    else if (filter.contains("bicubic"))
1065
        ret = kGLFilterBicubic;
1195
1066
1196
    return ret;
1067
    return ret;
1197
}
1068
}
Lines 1207-1500 Link Here
1207
            return "master";
1078
            return "master";
1208
        case kGLFilterYUV2RGBA:
1079
        case kGLFilterYUV2RGBA:
1209
            return "osd";
1080
            return "osd";
1210
        case kGLFilterKernelDeint:
1211
            return "openglkerneldeint";
1212
        case kGLFilterLinearBlendDeint:
1213
            return "opengllinearblend";
1214
        case kGLFilterOneFieldDeint:
1215
            return "openglonefield";
1216
        case kGLFilterBobDeintDFR:
1217
            return "openglbobdeint";
1218
        case kGLFilterLinearBlendDeintDFR:
1219
            return "opengldoubleratelinearblend";
1220
        case kGLFilterKernelDeintDFR:
1221
            return "opengldoubleratekerneldeint";
1222
        case kGLFilterOneFieldDeintDFR:
1223
            return "opengldoublerateonefield";
1224
        case kGLFilterFieldOrderDFR:
1225
            return "opengldoubleratefieldorder";
1226
        case kGLFilterResize:
1081
        case kGLFilterResize:
1227
            return "resize";
1082
            return "resize";
1083
        case kGLFilterBicubic:
1084
            return "bicubic";
1228
    }
1085
    }
1229
1086
1230
    return "";
1087
    return "";
1231
}
1088
}
1232
1089
1233
static const QString yuv2rgb1a =
1090
static const QString attrib_fast = 
1234
"ATTRIB ytex  = fragment.texcoord[0];"
1091
"ATTRIB tex  = fragment.texcoord[0];\n";
1235
"ATTRIB uvtex = fragment.texcoord[1];"
1236
"TEMP res, tmp;";
1237
1238
static const QString yuv2rgb1b =
1239
"TEMP alpha;"
1240
"TEX alpha, ytex, texture[3], %1;";
1241
1242
static const QString yuv2rgb1c =
1243
"TEX res,   ytex,  texture[0], %1;"
1244
"TEX tmp.x, uvtex, texture[1], %1;"
1245
"TEX tmp.y, uvtex, texture[2], %1;";
1246
1247
static const QString yuv2rgb2 =
1248
"PARAM  adj  = program.env[0];"
1249
"SUB res, res, 0.5;"
1250
"MAD res, res, adj.yyyy, adj.xxxx;"
1251
"SUB tmp, tmp, { 0.5, 0.5 };"
1252
"MAD tmp, adj.zzzz, tmp, 0.5;";
1253
1254
static const QString yuv2rgb3 =
1255
"MAD res, res, 1.164, -0.063;"
1256
"SUB tmp, tmp, { 0.5, 0.5 };"
1257
"MAD res, { 0, -.392, 2.017 }, tmp.xxxw, res;";
1258
1259
static const QString yuv2rgb4 =
1260
"MAD result.color, { 1.596, -.813, 0, 0 }, tmp.yyyw, res;";
1261
1092
1262
static const QString yuv2rgb5 =
1093
static const QString var_alpha =
1263
"MAD result.color, { 0, -.813, 1.596, 0 }, tmp.yyyw, res.bgra;";
1094
"TEMP alpha;\n";
1264
1095
1265
static const QString yuv2rgb6 =
1096
static const QString tex_alpha = 
1266
"MOV result.color.a, alpha.a;";
1097
"TEX alpha, tex, texture[3], %1;\n";
1267
1098
1268
// locking ok
1099
static const QString tex_fast =
1269
QString OpenGLVideo::GetProgramString(OpenGLFilterType name)
1100
"TEX res, tex, texture[0], %1;\n";
1101
1102
static const QString param_colour =
1103
"PARAM  adj  = program.env[0];\n";
1104
1105
static const QString calc_colour_fast =
1106
"SUB res, res, 0.5;\n"
1107
"MAD res, res, adj.zzzy, adj.wwwx;\n";
1108
1109
static const QString end_alpha =
1110
"MOV result.color.a, alpha.a;\n";
1111
1112
static const QString var_fast =
1113
"TEMP tmp, res;\n";
1114
1115
static const QString calc_fast_alpha = 
1116
"MOV result.color.a, res.g;\n";
1117
1118
static const QString end_fast =
1119
"SUB tmp, res.rbgg, { 0.5, 0.5 };\n"
1120
"MAD res, res.a, 1.164, -0.063;\n"
1121
"MAD res, { 0, -.392, 2.017 }, tmp.xxxw, res;\n"
1122
"MAD result.color, { 1.596, -.813, 0, 0 }, tmp.yyyw, res;\n";
1123
1124
static const QString end_fast_alpha =
1125
"SUB tmp, res.rbgg, { 0.5, 0.5 };\n"
1126
"MAD res, res.a, 1.164, -0.063;\n"
1127
"MAD res, { 0, -.392, 2.017 }, tmp.xxxw, res;\n"
1128
"MAD result.color.rgb, { 1.596, -.813, 0, 0 }, tmp.yyyw, res;\n";
1129
1130
static const QString var_deint =
1131
"TEMP other, current, mov, prev;\n";
1132
1133
static const QString field_calc =
1134
"MUL prev, tex.yyyy, %2;\n"
1135
"FRC prev, prev;\n"
1136
"SUB prev, prev, 0.5;\n";
1137
1138
static const QString bobdeint[2] = {
1139
field_calc +
1140
"ADD other, tex, {0.0, %3, 0.0, 0.0};\n"
1141
"TEX other, other, texture[0], %1;\n"
1142
"CMP res, prev, res, other;\n",
1143
field_calc +
1144
"SUB other, tex, {0.0, %3, 0.0, 0.0};\n"
1145
"TEX other, other, texture[0], %1;\n"
1146
"CMP res, prev, other, res;\n"
1147
};
1148
1149
static const QString deint_end_top =
1150
"CMP other, mov, current, other;\n"
1151
"CMP res, prev, current, other;\n";
1152
1153
static const QString deint_end_bot =
1154
"CMP other, mov, current, other;\n"
1155
"CMP res, prev, other, current;\n";
1156
1157
static const QString motion_calc =
1158
"ABS mov, mov;\n"
1159
"SUB mov, mov, 0.07;\n";
1160
1161
static const QString motion_top =
1162
"SUB mov, prev, current;\n" + motion_calc;
1163
1164
static const QString motion_bot =
1165
"SUB mov, res, current;\n" + motion_calc;
1166
1167
static const QString doublerateonefield[2] = {
1168
"TEX current, tex, texture[1], %1;\n"
1169
"TEX prev, tex, texture[2], %1;\n"
1170
"ADD other, tex, {0.0, %3, 0.0, 0.0};\n"
1171
"TEX other, other, texture[1], %1;\n"
1172
+ motion_top + field_calc + deint_end_top,
1173
1174
"TEX current, tex, texture[1], %1;\n"
1175
"SUB other, tex, {0.0, %3, 0.0, 0.0};\n"
1176
"TEX other, other, texture[1], %1;\n"
1177
+ motion_bot + field_calc + deint_end_bot
1178
};
1179
1180
static const QString linearblend[2] = {
1181
"TEX current, tex, texture[1], %1;\n"
1182
"TEX prev, tex, texture[2], %1;\n"
1183
"ADD other, tex, {0.0, %3, 0.0, 0.0};\n"
1184
"TEX other, other, texture[1], %1;\n"
1185
"SUB mov, tex, {0.0, %3, 0.0, 0.0};\n"
1186
"TEX mov, mov, texture[1], %1;\n"
1187
"LRP other, 0.5, other, mov;\n"
1188
+ motion_top + field_calc + deint_end_top,
1189
1190
"TEX current, tex, texture[1], %1;\n"
1191
"SUB other, tex, {0.0, %3, 0.0, 0.0};\n"
1192
"TEX other, other, texture[1], %1;\n"
1193
"ADD mov, tex, {0.0, %3, 0.0, 0.0};\n"
1194
"TEX mov, mov, texture[1], %1;\n"
1195
"LRP other, 0.5, other, mov;\n"
1196
+ motion_bot + field_calc + deint_end_bot
1197
};
1198
1199
static const QString kerneldeint[2] = {
1200
"TEX current, tex, texture[1], %1;\n"
1201
"TEX prev, tex, texture[2], %1;\n"
1202
+ motion_top +
1203
"MUL other, 0.125, prev;\n"
1204
"MAD other, 0.125, current, other;\n"
1205
"ADD prev, tex, {0.0, %3, 0.0, 0.0};\n"
1206
"TEX prev, prev, texture[1], %1;\n"
1207
"MAD other, 0.5, prev, other;\n"
1208
"SUB prev, tex, {0.0, %3, 0.0, 0.0};\n"
1209
"TEX prev, prev, texture[1], %1;\n"
1210
"MAD other, 0.5, prev, other;\n"
1211
"ADD prev, tex, {0.0, %4, 0.0, 0.0};\n"
1212
"TEX mov, prev, texture[1], %1;\n"
1213
"MAD other, -0.0625, mov, other;\n"
1214
"TEX mov, prev, texture[2], %1;\n"
1215
"MAD other, -0.0625, mov, other;\n"
1216
"SUB prev, tex, {0.0, %4, 0.0, 0.0};\n"
1217
"TEX mov, prev, texture[1], %1;\n"
1218
"MAD other, -0.0625, mov, other;\n"
1219
"TEX mov, prev, texture[2], %1;\n"
1220
"MAD other, -0.0625, mov, other;\n"
1221
+ field_calc + deint_end_top,
1222
1223
"TEX current, tex, texture[1], %1;\n"
1224
+ motion_bot +
1225
"MUL other, 0.125, res;\n"
1226
"MAD other, 0.125, current, other;\n"
1227
"ADD prev, tex, {0.0, %3, 0.0, 0.0};\n"
1228
"TEX prev, prev, texture[1], %1;\n"
1229
"MAD other, 0.5, prev, other;\n"
1230
"SUB prev, tex, {0.0, %3, 0.0, 0.0};\n"
1231
"TEX prev, prev, texture[1], %1;\n"
1232
"MAD other, 0.5, prev, other;\n"
1233
"ADD prev, tex, {0.0, %4, 0.0, 0.0};\n"
1234
"TEX mov, prev, texture[1], %1;\n"
1235
"MAD other, -0.0625, mov, other;\n"
1236
"TEX mov, prev, texture[0], %1;\n"
1237
"MAD other, -0.0625, mov, other;\n"
1238
"SUB prev, tex, {0.0, %4, 0.0, 0.0};\n"
1239
"TEX mov, prev, texture[1], %1;\n"
1240
"MAD other, -0.0625, mov, other;\n"
1241
"TEX mov, prev, texture[0], %1;\n"
1242
"MAD other, -0.0625, mov, other;\n"
1243
+ field_calc + deint_end_bot
1244
};
1245
1246
static const QString yadif_setup =
1247
"TEMP a,b,c,e,f,g,h,j,k,l;\n"
1248
"TEMP a1,b1,f1,g1,h1,i1,j1,l1,m1,n1;\n"
1249
"ALIAS d1 = f;\n"
1250
"ALIAS k1 = g;\n"
1251
"ALIAS c1 = prev;\n"
1252
"ALIAS e1 = mov;\n"
1253
"ALIAS p0 = res;\n"
1254
"ALIAS p1 = c;\n"
1255
"ALIAS p3 = h;\n"
1256
"ALIAS spred1 = a;\n"
1257
"ALIAS spred2 = b;\n"
1258
"ALIAS spred3 = c;\n"
1259
"ALIAS spred4 = e;\n"
1260
"ALIAS spred5 = f;\n"
1261
"ALIAS sscore = g;\n"
1262
"ALIAS score1 = h;\n"
1263
"ALIAS score2 = j;\n"
1264
"ALIAS score3 = k;\n"
1265
"ALIAS score4 = l;\n"
1266
"ALIAS if1 = a1;\n"
1267
"ALIAS if2 = b1;\n"
1268
"TEMP p2, p4;\n"
1269
"ALIAS diff1 = a;\n"
1270
"ALIAS diff2 = b;\n"
1271
"TEMP diff0;\n";
1272
1273
static const QString yadif_spatial_sample =
1274
"ADD tmp, tex, {%5, %3, 0.0, 0.0};\n"
1275
"TEX e1, tmp, texture[1], %1;\n"
1276
"ADD tmp, tmp, {%5, 0.0, 0.0, 0.0};\n"
1277
"TEX f1, tmp, texture[1], %1;\n"
1278
"ADD tmp, tmp, {%5, 0.0, 0.0, 0.0};\n"
1279
"TEX g1, tmp, texture[1], %1;\n"
1280
"SUB tmp, tmp, {0.0, %4, 0.0, 0.0};\n"
1281
"TEX n1, tmp, texture[1], %1;\n"
1282
"SUB tmp, tmp, {%5, 0.0, 0.0, 0.0};\n"
1283
"TEX m1, tmp, texture[1], %1;\n"
1284
"SUB tmp, tmp, {%5, 0.0, 0.0, 0.0};\n"
1285
"TEX l1, tmp, texture[1], %1;\n"
1286
1287
"SUB tmp, tex, {%5, %3, 0.0, 0.0};\n"
1288
"TEX j1, tmp, texture[1], %1;\n"
1289
"SUB tmp, tmp, {%5, 0.0, 0.0, 0.0};\n"
1290
"TEX i1, tmp, texture[1], %1;\n"
1291
"SUB tmp, tmp, {%5, 0.0, 0.0, 0.0};\n"
1292
"TEX h1, tmp, texture[1], %1;\n"
1293
"ADD tmp, tmp, {0.0, %4, 0.0, 0.0};\n"
1294
"TEX a1, tmp, texture[1], %1;\n"
1295
"ADD tmp, tmp, {%5, 0.0, 0.0, 0.0};\n"
1296
"TEX b1, tmp, texture[1], %1;\n"
1297
"ADD tmp, tmp, {%5, 0.0, 0.0, 0.0};\n"
1298
"TEX c1, tmp, texture[1], %1;\n";
1299
1300
static const QString yadif_calc =
1301
"LRP p0, 0.5, c, h;\n"
1302
"MOV p1, f;\n"
1303
"LRP p2, 0.5, d, i;\n"
1304
"MOV p3, g;\n"
1305
"LRP p4, 0.5, e, j;\n"
1306
1307
"SUB diff0, d, i;\n"
1308
"ABS diff0, diff0;\n"
1309
"SUB tmp, a, f;\n"
1310
"ABS tmp, tmp;\n"
1311
"SUB diff1, b, g;\n"
1312
"ABS diff1, diff1;\n"
1313
"LRP diff1, 0.5, diff1, tmp;\n"
1314
"SUB tmp, k, f;\n"
1315
"ABS tmp, tmp;\n"
1316
"SUB diff2, g, l;\n"
1317
"ABS diff2, diff2;\n"
1318
"LRP diff2, 0.5, diff2, tmp;\n"
1319
"MAX diff0, diff0, diff1;\n"
1320
"MAX diff0, diff0, diff2;\n"
1321
1322
// mode < 2
1323
"SUB tmp, p0, p1;\n"
1324
"SUB other, p4, p3;\n"
1325
"MIN spred1, tmp, other;\n"
1326
"MAX spred2, tmp, other;\n"
1327
"SUB tmp, p2, p1;\n"
1328
"SUB other, p2, p3;\n"
1329
"MAX spred1, spred1, tmp;\n"
1330
"MAX spred1, spred1, other;\n"
1331
"MIN spred2, spred2, tmp;\n"
1332
"MIN spred2, spred2, other;\n"
1333
"MAX spred1, spred2, -spred1;\n"
1334
"MAX diff0, diff0, spred1;\n"
1335
1336
// spatial prediction
1337
"LRP spred1, 0.5, d1, k1;\n"
1338
"LRP spred2, 0.5, c1, l1;\n"
1339
"LRP spred3, 0.5, b1, m1;\n"
1340
"LRP spred4, 0.5, e1, j1;\n"
1341
"LRP spred5, 0.5, f1, i1;\n"
1342
1343
"SUB sscore, c1, j1;\n"
1344
"ABS sscore, sscore;\n"
1345
"SUB tmp, d1, k1;\n"
1346
"ABS tmp, tmp;\n"
1347
"ADD sscore, sscore, tmp;\n"
1348
"SUB tmp, e1, l1;\n"
1349
"ABS tmp, tmp;\n"
1350
"ADD sscore, sscore, tmp;\n"
1351
"SUB sscore, sscore, 1.0;\n"
1352
1353
"SUB score1, b1, k1;\n"
1354
"ABS score1, score1;\n"
1355
"SUB tmp, c1, l1;\n"
1356
"ABS tmp, tmp;\n"
1357
"ADD score1, score1, tmp;\n"
1358
"SUB tmp, d1, m1;\n"
1359
"ABS tmp, tmp;\n"
1360
"ADD score1, score1, tmp;\n"
1361
1362
"SUB score2, a1, l1;\n"
1363
"ABS score2, score2;\n"
1364
"SUB tmp, b1, m1;\n"
1365
"ABS tmp, tmp;\n"
1366
"ADD score2, score2, tmp;\n"
1367
"SUB tmp, c1, n1;\n"
1368
"ABS tmp, tmp;\n"
1369
"ADD score2, score2, tmp;\n"
1370
1371
"SUB score3, d1, i1;\n"
1372
"ABS score3, score3;\n"
1373
"SUB tmp, e1, j1;\n"
1374
"ABS tmp, tmp;\n"
1375
"ADD score3, score3, tmp;\n"
1376
"SUB tmp, f1, k1;\n"
1377
"ABS tmp, tmp;\n"
1378
"ADD score3, score3, tmp;\n"
1379
1380
"SUB score4, e1, h1;\n"
1381
"ABS score4, score4;\n"
1382
"SUB tmp, f1, i1;\n"
1383
"ABS tmp, tmp;\n"
1384
"ADD score4, score4, tmp;\n"
1385
"SUB tmp, g1, j1;\n"
1386
"ABS tmp, tmp;\n"
1387
"ADD score4, score4, tmp;\n"
1388
"SUB if1, sscore, score1;\n"
1389
"SUB if2, score1, score2;\n"
1390
"CMP if2, if1, -1.0, if2;\n"
1391
"CMP spred1, if1, spred1, spred2;\n"
1392
"CMP spred1, if2, spred1, spred3;\n"
1393
"CMP sscore, if1, sscore, score1;\n"
1394
"CMP sscore, if2, sscore, score2;\n"
1395
"SUB if1, sscore, score3;\n"
1396
"SUB if2, score3, score4;\n"
1397
"CMP if2, if1, -1.0, if2;\n"
1398
"CMP spred1, if1, spred1, spred4;\n"
1399
"CMP spred1, if2, spred1, spred5;\n"
1400
"ADD spred4, p2, diff0;\n"
1401
"SUB spred5, p2, diff0;\n"
1402
"SUB if1, spred4, spred1;\n"
1403
"SUB if2, spred1, spred5;\n"
1404
"CMP spred1, if1, spred4, spred1;\n"
1405
"CMP spred1, if2, spred5, spred1;\n";
1406
1407
static const QString yadif[2] = {
1408
yadif_setup +
1409
"TEMP d;\n"
1410
"ALIAS i = current;\n"
1411
"TEX current, tex, texture[1], %1;\n"
1412
"TEX d, tex, texture[2], %1;\n"
1413
"ADD tmp, tex, {0.0, %3, 0.0, 0.0};\n"
1414
"TEX a, tmp, texture[2], %1;\n"
1415
"TEX f, tmp, texture[1], %1;\n"
1416
"TEX k, tmp, texture[0], %1;\n"
1417
"ADD tmp, tex, {0.0, %4, 0.0, 0.0};\n"
1418
"TEX c, tmp, texture[2], %1;\n"
1419
"TEX h, tmp, texture[1], %1;\n"
1420
"SUB tmp, tex, {0.0, %3, 0.0, 0.0};\n"
1421
"TEX b, tmp, texture[2], %1;\n"
1422
"TEX g, tmp, texture[1], %1;\n"
1423
"TEX l, tmp, texture[0], %1;\n"
1424
"SUB tmp, tex, {0.0, %4, 0.0, 0.0};\n"
1425
"TEX e, tmp, texture[2], %1;\n"
1426
"TEX j, tmp, texture[1], %1;\n"
1427
+ yadif_spatial_sample
1428
+ yadif_calc
1429
+ field_calc +
1430
"CMP res, prev, current, spred1;\n"
1431
,
1432
yadif_setup +
1433
"TEMP i;\n"
1434
"ALIAS d = current;\n"
1435
"TEX current, tex, texture[1], %1;\n"
1436
"TEX i, tex, texture[0], %1;\n"
1437
"ADD tmp, tex, {0.0, %3, 0.0, 0.0};\n"
1438
"TEX a, tmp, texture[2], %1;\n"
1439
"TEX f, tmp, texture[1], %1;\n"
1440
"TEX k, tmp, texture[0], %1;\n"
1441
"ADD tmp, tex, {0.0, %4, 0.0, 0.0};\n"
1442
"TEX c, tmp, texture[1], %1;\n"
1443
"TEX h, tmp, texture[0], %1;\n"
1444
"SUB tmp, tex, {0.0, %3, 0.0, 0.0};\n"
1445
"TEX b, tmp, texture[2], %1;\n"
1446
"TEX g, tmp, texture[1], %1;\n"
1447
"TEX l, tmp, texture[0], %1;\n"
1448
"SUB tmp, tex, {0.0, %4, 0.0, 0.0};\n"
1449
"TEX e, tmp, texture[1], %1;\n"
1450
"TEX j, tmp, texture[0], %1;\n"
1451
+ yadif_spatial_sample
1452
+ yadif_calc
1453
+ field_calc +
1454
"CMP res, prev, spred1, current;\n"
1455
};
1456
1457
static const QString bicubic =
1458
"TEMP coord, coord2, cdelta, parmx, parmy, a, b, c, d;\n"
1459
"MAD coord.xy, fragment.texcoord[0], {%6, %7}, {0.5, 0.5};\n"
1460
"TEX parmx, coord.x, texture[1], 1D;\n"
1461
"TEX parmy, coord.y, texture[1], 1D;\n"
1462
"MUL cdelta.xz, parmx.rrgg, {-%5, 0, %5, 0};\n"
1463
"MUL cdelta.yw, parmy.rrgg, {0, -%3, 0, %3};\n"
1464
"ADD coord, fragment.texcoord[0].xyxy, cdelta.xyxw;\n"
1465
"ADD coord2, fragment.texcoord[0].xyxy, cdelta.zyzw;\n"
1466
"TEX a, coord.xyxy, texture[0], 2D;\n"
1467
"TEX b, coord.zwzw, texture[0], 2D;\n"
1468
"TEX c, coord2.xyxy, texture[0], 2D;\n"
1469
"TEX d, coord2.zwzw, texture[0], 2D;\n"
1470
"LRP a, parmy.b, a, b;\n"
1471
"LRP c, parmy.b, c, d;\n"
1472
"LRP result.color, parmx.b, a, c;\n";
1473
1474
QString OpenGLVideo::GetProgramString(OpenGLFilterType name,
1475
                                      QString deint, FrameScanType field)
1270
{
1476
{
1271
    QString ret =
1477
    QString ret =
1272
        "!!ARBfp1.0\n"
1478
        "!!ARBfp1.0\n"
1273
        "OPTION ARB_precision_hint_fastest;";
1479
        "OPTION ARB_precision_hint_fastest;\n";
1274
1480
1275
    switch (name)
1481
    switch (name)
1276
    {
1482
    {
1277
        case kGLFilterYUV2RGB:
1483
        case kGLFilterYUV2RGB:
1278
            ret = ret + yuv2rgb1a + yuv2rgb1c;
1484
        {
1279
            if (useColourControl)
1485
            bool need_tex = true;
1280
                ret += yuv2rgb2;
1486
            QString deint_bit = "";
1281
            ret += yuv2rgb3;
1487
            if (deint != "")
1282
            ret += frameBuffer ? yuv2rgb5 : yuv2rgb4;
1488
            {
1283
            break;
1489
                uint tmp_field = 0;
1490
                if (field == kScan_Intr2ndField)
1491
                    tmp_field = 1;
1492
                if (deint == "openglbobdeint" ||
1493
                    deint == "openglonefield" ||
1494
                    deint == "opengldoubleratefieldorder")
1495
                {
1496
                    deint_bit = bobdeint[tmp_field];
1497
                }
1498
                else if (deint == "opengldoublerateonefield")
1499
                {
1500
                    deint_bit = doublerateonefield[tmp_field];
1501
                    if (!tmp_field) { need_tex = false; }
1502
                }
1503
                else if (deint == "opengllinearblend" ||
1504
                         deint == "opengldoubleratelinearblend")
1505
                {
1506
                    deint_bit = linearblend[tmp_field];
1507
                    if (!tmp_field) { need_tex = false; }
1508
                }
1509
                else if (deint == "openglkerneldeint" ||
1510
                         deint == "opengldoubleratekerneldeint")
1511
                {
1512
                    deint_bit = kerneldeint[tmp_field];
1513
                    if (!tmp_field) { need_tex = false; }
1514
                }
1515
                else if (deint == "openglyadif" ||
1516
                         deint == "opengldoublerateyadif")
1517
                {
1518
                    deint_bit = yadif[tmp_field];
1519
                    need_tex = false;
1520
                }
1521
                else
1522
                {
1523
                    VERBOSE(VB_PLAYBACK, LOC +
1524
                        "Unrecognised OpenGL deinterlacer");
1525
                }
1526
            }
1284
1527
1285
        case kGLFilterYUV2RGBA:
1528
            ret += attrib_fast;
1286
            ret = ret + yuv2rgb1a + yuv2rgb1b + yuv2rgb1c;
1529
            ret += useColourControl ? param_colour : "";
1287
            if (useColourControl)
1530
            ret += (deint != "") ? var_deint : "";
1288
                ret += yuv2rgb2;
1531
            ret += var_fast + (need_tex ? tex_fast : "");
1289
            ret = ret + yuv2rgb3 + yuv2rgb4 + yuv2rgb6;
1532
            ret += deint_bit;
1533
            ret += useColourControl ? calc_colour_fast : "";
1534
            ret += end_fast;
1535
        }
1290
            break;
1536
            break;
1537
        case kGLFilterYUV2RGBA:
1291
1538
1292
        case kGLFilterKernelDeint:
1539
            ret += attrib_fast;
1293
            ret +=
1540
            ret += useColourControl ? param_colour : "";
1294
                "ATTRIB tex = fragment.texcoord[0];"
1541
            ret += var_fast + tex_fast + calc_fast_alpha;
1295
                "PARAM  off = program.env[0];"
1542
            ret += useColourControl ? calc_colour_fast : "";
1296
                "TEMP sam, pos, cum, cur, field, mov;"
1543
            ret += end_fast_alpha;
1297
                "RCP field, off.x;"
1298
                "MUL field, tex.yyyy, field;"
1299
                "FRC field, field;"
1300
                "SUB field, field, 0.5;"
1301
                "TEX sam, tex, texture[1], %1;"
1302
                "TEX cur, tex, texture[0], %1;"
1303
                "SUB mov, cur, sam;"
1304
                "MUL cum, sam, 0.125;"
1305
                "MAD cum, cur, 0.125, cum;"
1306
                "ABS mov, mov;"
1307
                "SUB mov, mov, 0.12;"
1308
                "ADD pos, tex, off.wyww;"
1309
                "TEX sam, pos, texture[0], %1;"
1310
                "MAD cum, sam, 0.5, cum;"
1311
                "SUB pos, tex, off.wyww;"
1312
                "TEX sam, pos, texture[0], %1;"
1313
                "MAD cum, sam, 0.5, cum;"
1314
                "MAD pos, off.wyww, 2.0, tex;"
1315
                "TEX sam, pos, texture[0], %1;"
1316
                "MAD cum, sam, -0.0625, cum;"
1317
                "TEX sam, pos, texture[1], %1;"
1318
                "MAD cum, sam, -0.0625, cum;"
1319
                "MAD pos, off.wyww, -2.0, tex;"
1320
                "TEX sam, pos, texture[0], %1;"
1321
                "MAD cum, sam, -0.0625, cum;"
1322
                "TEX sam, pos, texture[1], %1;"
1323
                "MAD cum, sam, -0.0625, cum;"
1324
                "CMP cum, mov, cur, cum;"
1325
                "CMP result.color, field, cum, cur;";
1326
            break;
1327
1544
1328
        case kGLFilterLinearBlendDeintDFR:
1329
            ret +=
1330
                "ATTRIB tex = fragment.texcoord[0];"
1331
                "PARAM  off  = program.env[0];"
1332
                "TEMP field, top, bot, current, previous, next, other, mov;"
1333
                "TEX next, tex, texture[0], %1;"
1334
                "TEX current, tex, texture[1], %1;"
1335
                "TEX previous, tex, texture[2], %1;"
1336
                "ADD top, tex, off.wyww;"
1337
                "TEX other, top, texture[1], %1;"
1338
                "SUB top, tex, off.wyww;"
1339
                "TEX bot, top, texture[1], %1;"
1340
                "LRP other, 0.5, other, bot;"
1341
                "RCP field, off.x;"
1342
                "MUL field, tex.yyyy, field;"
1343
                "FRC field, field;"
1344
                "SUB field, field, 0.5;"
1345
                "SUB top, current, next;"
1346
                "SUB bot, current, previous;"
1347
                "CMP mov, field, bot, top;"
1348
                "ABS mov, mov;"
1349
                "SUB mov, mov, 0.12;"
1350
                "CMP other, mov, current, other;"
1351
                "CMP top, field, other, current;"
1352
                "CMP bot, field, current, other;"
1353
                "CMP result.color, off.y, top, bot;";
1354
            break;
1545
            break;
1355
1546
1356
        case kGLFilterOneFieldDeintDFR:
1547
        case kGLFilterNone:
1357
            ret +=
1548
        case kGLFilterResize:
1358
                "ATTRIB tex = fragment.texcoord[0];"
1359
                "PARAM  off  = program.env[0];"
1360
                "TEMP field, top, bot, current, previous, next, other, mov;"
1361
                "TEX next, tex, texture[0], %1;"
1362
                "TEX current, tex, texture[1], %1;"
1363
                "TEX previous, tex, texture[2], %1;"
1364
                "ADD top, tex, off.wyww;"
1365
                "TEX other, top, texture[1], %1;"
1366
                "RCP field, off.x;"
1367
                "MUL field, tex.yyyy, field;"
1368
                "FRC field, field;"
1369
                "SUB field, field, 0.5;"
1370
                "SUB top, current, next;"
1371
                "SUB bot, current, previous;"
1372
                "CMP mov, field, bot, top;"
1373
                "ABS mov, mov;"
1374
                "SUB mov, mov, 0.12;"
1375
                "CMP other, mov, current, other;"
1376
                "CMP top, field, other, current;"
1377
                "CMP bot, field, current, other;"
1378
                "CMP result.color, off.y, top, bot;";
1379
            break;
1549
            break;
1380
1550
1381
        case kGLFilterKernelDeintDFR:
1551
        case kGLFilterBicubic:
1382
            ret +=
1552
 
1383
                "ATTRIB tex = fragment.texcoord[0];"
1553
            ret += bicubic;
1384
                "PARAM  off = program.env[0];"
1385
                "TEMP sam, pos, bot, top, cur, pre, nex, field, mov;"
1386
                "RCP field, off.x;"
1387
                "MUL field, tex.yyyy, field;"
1388
                "FRC field, field;"
1389
                "SUB field, field, 0.5;"
1390
                "TEX pre, tex, texture[2], %1;" // -1,0
1391
                "TEX cur, tex, texture[1], %1;" //  0,0
1392
                "TEX nex, tex, texture[0], %1;" // +1,0
1393
                "SUB top, nex, cur;"
1394
                "SUB bot, pre, cur;"
1395
                "CMP mov, field, bot, top;"
1396
                "ABS mov, mov;"
1397
                "SUB mov, mov, 0.12;"
1398
                "MUL bot, pre, 0.125;"          // BOT -1,0
1399
                "MAD bot, cur, 0.125, bot;"     // BOT +1,0
1400
                "MUL top, cur, 0.125;"          // TOP -1,0
1401
                "MAD top, nex, 0.125, top;"     // TOP +1,0
1402
                "ADD pos, tex, off.wyww;"
1403
                "TEX sam, pos, texture[1], %1;" // 0,+1
1404
                "MAD bot, sam, 0.5, bot;"       // BOT 0,+1
1405
                "MAD top, sam, 0.5, top;"       // TOP 0,+1
1406
                "SUB pos, tex, off.wyww;"
1407
                "TEX sam, pos, texture[1], %1;" // 0,-1
1408
                "MAD bot, sam, 0.5, bot;"       // BOT 0,-1
1409
                "MAD top, sam, 0.5, top;"       // TOP 0,-1
1410
                "MAD pos, off.wyww, 2.0, tex;"
1411
                "TEX sam, pos, texture[1], %1;" // 0,+2
1412
                "MAD bot, sam, -0.0625, bot;"   // BOT +1,+2
1413
                "MAD top, sam, -0.0625, top;"   // TOP -1,+2
1414
                "TEX sam, pos, texture[2], %1;" // -1,+2
1415
                "MAD bot, sam, -0.0625, bot;"   // BOT -1,+2
1416
                "TEX sam, pos, texture[0], %1;" // +1,+2
1417
                "MAD top, sam, -0.0625, top;"   // TOP +1,+2
1418
                "MAD pos, off.wyww, -2.0, tex;"
1419
                "TEX sam, pos, texture[1], %1;" // +1,-2
1420
                "MAD bot, sam, -0.0625, bot;"   // BOT +1,-2
1421
                "MAD top, sam, -0.0625, top;"   // TOP -1,-2
1422
                "TEX sam, pos, texture[2], %1;" // -1, -2 row
1423
                "MAD bot, sam, -0.0625, bot;"   // BOT -1,-2
1424
                "TEX sam, pos, texture[0], %1;" // +1,-2
1425
                "MAD top, sam, -0.0625, top;"   // TOP +1,-2
1426
                "CMP top, mov, cur, top;"
1427
                "CMP bot, mov, cur, bot;"
1428
                "CMP top, field, top, cur;"
1429
                "CMP bot, field, cur, bot;"
1430
                "CMP result.color, off.y, top, bot;";
1431
            break;
1554
            break;
1432
1555
1433
        case kGLFilterBobDeintDFR:
1556
        default:
1434
        case kGLFilterOneFieldDeint:
1557
            VERBOSE(VB_PLAYBACK, LOC_ERR + "Unknown fragment program.");
1435
            ret +=
1436
                "ATTRIB tex = fragment.texcoord[0];"
1437
                "PARAM  off = program.env[0];"
1438
                "TEMP field, top, bottom, current, other;"
1439
                "TEX current, tex, texture[0], %1;"
1440
                "RCP field, off.x;"
1441
                "MUL field, tex.yyyy, field;"
1442
                "FRC field, field;"
1443
                "SUB field, field, 0.5;"
1444
                "ADD top, tex, off.wyww;"
1445
                "TEX other, top, texture[0], %1;"
1446
                "CMP top, field, other, current;"
1447
                "CMP bottom, field, current, other;"
1448
                "CMP result.color, off.y, top, bottom;";
1449
            break;
1558
            break;
1559
    }
1450
1560
1451
        case kGLFilterLinearBlendDeint:
1561
    QString temp = textureRects ? "RECT" : "2D";
1452
            ret +=
1562
    ret.replace("%1", temp);
1453
                "ATTRIB tex = fragment.texcoord[0];"
1454
                "PARAM  off  = program.env[0];"
1455
                "TEMP mov, field, cur, pre, pos;"
1456
                "RCP field, off.x;"
1457
                "MUL field, tex.yyyy, field;"
1458
                "FRC field, field;"
1459
                "SUB field, field, 0.5;"
1460
                "TEX cur, tex, texture[0], %1;"
1461
                "TEX pre, tex, texture[1], %1;"
1462
                "SUB mov, cur, pre;"
1463
                "ABS mov, mov;"
1464
                "SUB mov, mov, 0.12;"
1465
                "ADD pos, tex, off.wyww;"
1466
                "TEX pre, pos, texture[0], %1;"
1467
                "SUB pos, tex, off.wyww;"
1468
                "TEX pos, pos, texture[0], %1;"
1469
                "LRP pre, 0.5, pos, pre;"
1470
                "CMP pre, field, pre, cur;"
1471
                "CMP result.color, mov, cur, pre;";
1472
            break;
1473
1563
1474
        case kGLFilterFieldOrderDFR:
1564
    float lineHeight = 1.0f;
1475
            ret +=
1565
    float colWidth   = 1.0f;
1476
                "ATTRIB tex = fragment.texcoord[0];"
1566
    QSize fb_size = GetTextureSize(video_dim);
1477
                "PARAM  off  = program.env[0];"
1478
                "TEMP field, cur, pre, bot;"
1479
                "TEX cur, tex, texture[0], %1;"
1480
                "TEX pre, tex, texture[1], %1;"
1481
                "RCP field, off.x;"
1482
                "MUL field, tex.yyyy, field;"
1483
                "FRC field, field;"
1484
                "SUB field, field, 0.5;"
1485
                "CMP bot, off.y, pre, cur;"
1486
                "CMP result.color, field, bot, cur;";
1487
1567
1488
            break;
1568
    if (!textureRects &&
1569
       (inputTextureSize.height() > 0))
1570
    {
1571
        lineHeight /= inputTextureSize.height();
1572
        colWidth   /= inputTextureSize.width();
1573
    }
1489
1574
1490
        case kGLFilterNone:
1575
    float fieldSize = 1.0f / (lineHeight * 2.0);
1491
        case kGLFilterResize:
1492
            break;
1493
1576
1494
        default:
1577
    ret.replace("%2", temp.setNum(fieldSize, 'f', 8));
1495
            VERBOSE(VB_PLAYBACK, LOC_ERR + "Unknown fragment program.");
1578
    ret.replace("%3", temp.setNum(lineHeight, 'f', 8));
1496
            break;
1579
    ret.replace("%4", temp.setNum(lineHeight * 2.0, 'f', 8));
1580
    ret.replace("%5", temp.setNum(colWidth, 'f', 8));
1581
    ret.replace("%6", temp.setNum((float)fb_size.width(), 'f', 1));
1582
    ret.replace("%7", temp.setNum((float)fb_size.height(), 'f', 1));
1583
1584
    ret += "END";
1585
1586
    VERBOSE(VB_PLAYBACK, LOC + QString("Created %1 fragment program %2")
1587
                .arg(FilterToString(name)).arg(deint));
1588
1589
    return ret;
1590
}
1591
1592
uint OpenGLVideo::ParseOptions(QString options)
1593
{
1594
    uint ret = kGLMaxFeat - 1;
1595
1596
    QStringList list = QStringList::split(",", options);
1597
1598
    if (list.empty())
1599
        return ret;
1600
1601
    for (QStringList::Iterator i = list.begin();
1602
         i != list.end(); ++i)
1603
    {
1604
        QString name = (*i).section('=', 0, 0);
1605
        QString opts = (*i).section('=', 1);
1606
1607
        if (name == "opengloptions")
1608
        {
1609
            if (opts.contains("nofinish"))
1610
                ret -= kGLFinish;
1611
            if (opts.contains("nofence"))
1612
                ret -= kGLNVFence;
1613
            if (opts.contains("nopbo"))
1614
                ret -= kGLExtPBufObj;
1615
            if (opts.contains("nopbuf"))
1616
                ret -= kGLXPBuffer;
1617
            if (opts.contains("nofbo"))
1618
                ret -= kGLExtFBufObj;
1619
            if (opts.contains("nofrag"))
1620
                ret -= kGLExtFragProg;
1621
            if (opts.contains("norect"))
1622
                ret -= kGLExtRect;
1623
            return ret;
1624
        }
1497
    }
1625
    }
1498
1626
1499
    return ret + "END";
1627
    return ret;
1500
}
1628
}
1629
(-)mythtv.ori/libs/libmythtv/openglvideo.h (-62 / +50 lines)
Lines 9-14 Link Here
9
#include <qmap.h>
9
#include <qmap.h>
10
10
11
#include "videooutbase.h"
11
#include "videooutbase.h"
12
#include "videoouttypes.h"
12
13
13
enum OpenGLFilterType
14
enum OpenGLFilterType
14
{
15
{
Lines 18-42 Link Here
18
    kGLFilterYUV2RGB,
19
    kGLFilterYUV2RGB,
19
    kGLFilterYUV2RGBA,
20
    kGLFilterYUV2RGBA,
20
21
21
    // Frame rate preserving deinterlacers
22
    kGLFilterLinearBlendDeint,
23
    kGLFilterKernelDeint,
24
    kGLFilterOneFieldDeint,
25
26
    // Frame rate doubling deinterlacers
27
    kGLFilterBobDeintDFR,
28
    kGLFilterLinearBlendDeintDFR,
29
    kGLFilterKernelDeintDFR,
30
    kGLFilterFieldOrderDFR,
31
    kGLFilterOneFieldDeintDFR,
32
33
    // Frame scaling/resizing filters
22
    // Frame scaling/resizing filters
34
    kGLFilterResize,
23
    kGLFilterResize,
24
    kGLFilterBicubic,
35
};
25
};
36
26
37
enum DisplayBuffer
27
enum DisplayBuffer
38
{
28
{
39
    kNoBuffer = 0,    // disable filter
40
    kDefaultBuffer,
29
    kDefaultBuffer,
41
    kFrameBufferObject
30
    kFrameBufferObject
42
};
31
};
Lines 54-81 Link Here
54
    OpenGLVideo();
43
    OpenGLVideo();
55
   ~OpenGLVideo();
44
   ~OpenGLVideo();
56
45
57
    bool Init(OpenGLContext *glcontext, bool colour_control, bool onscreen,
46
    bool Init(OpenGLContext *glcontext, bool colour_control,
58
              QSize video_size, QRect visible_rect,
47
              QSize videoDim, QRect displayVisibleRect,
59
              QRect video_rect, QRect frame_rect,
48
              QRect displayVideoRect, QRect videoRect,
60
              bool viewport_control, bool osd = FALSE);
49
              bool viewport_control, QString options, bool osd = FALSE);
61
    bool ReInit(OpenGLContext *gl, bool colour_control, bool onscreen,
62
              QSize video_size, QRect visible_rect,
63
              QRect video_rect, QRect frame_rect,
64
              bool viewport_control, bool osd = FALSE);
65
50
66
    void UpdateInputFrame(const VideoFrame *frame);
51
    void UpdateInputFrame(const VideoFrame *frame, bool soft_bob = FALSE);
67
    void UpdateInput(const unsigned char *buf, const int *offsets,
52
    void UpdateInput(const unsigned char *buf, const int *offsets,
68
                     uint texture_index, int format, QSize size);
53
                     int format, QSize size,
54
                     const unsigned char *alpha);
69
55
70
    bool AddFilter(const QString &filter)
56
    bool AddFilter(const QString &filter)
71
         { return AddFilter(StringToFilter(filter)); }
57
         { return AddFilter(StringToFilter(filter)); }
72
    bool RemoveFilter(const QString &filter)
58
    bool RemoveFilter(const QString &filter)
73
         { return RemoveFilter(StringToFilter(filter)); }
59
         { return RemoveFilter(StringToFilter(filter)); }
74
60
75
    bool AddDeinterlacer(const QString &filter);
61
    bool AddDeinterlacer(const QString &deinterlacer);
76
    void SetDeinterlacing(bool deinterlacing);
62
    void SetDeinterlacing(bool deinterlacing);
77
    QString GetDeinterlacer(void) const
63
    QString GetDeinterlacer(void) const
78
         { return FilterToString(GetDeintFilter()); };
64
         { return hardwareDeinterlacer; }
79
    void SetSoftwareDeinterlacer(const QString &filter)
65
    void SetSoftwareDeinterlacer(const QString &filter)
80
         { softwareDeinterlacer = QDeepCopy<QString>(filter); };
66
         { softwareDeinterlacer = QDeepCopy<QString>(filter); };
81
67
Lines 84-150 Link Here
84
70
85
    void  SetMasterViewport(QSize size)   { masterViewportSize = size; }
71
    void  SetMasterViewport(QSize size)   { masterViewportSize = size; }
86
    QSize GetViewPort(void)         const { return viewportSize; }
72
    QSize GetViewPort(void)         const { return viewportSize; }
87
    void  SetVideoRect(const QRect &vidrect, const QRect &framerect)
73
    void  SetVideoRect(const QRect &dispvidrect, const QRect &vidrect)
88
        { videoRect = vidrect; frameRect = framerect;}
74
                      { display_video_rect = dispvidrect; video_rect = vidrect;}
89
    QSize GetVideoSize(void)        const { return videoSize; }
75
    QSize GetVideoSize(void)        const { return actual_video_dim;}
90
    void SetVideoResize(const QRect &rect);
76
    void SetVideoResize(const QRect &rect);
91
    void DisableVideoResize(void);
77
    void DisableVideoResize(void);
92
    int SetPictureAttribute(PictureAttribute attributeType, int newValue);
93
    PictureAttributeSupported GetSupportedPictureAttributes(void) const;
94
78
95
  private:
79
  private:
96
    void Teardown(void);
80
    void Teardown(void);
97
    void SetViewPort(const QSize &new_viewport_size);
81
    void SetViewPort(const QSize &new_viewport_size);
98
    void SetViewPortPrivate(const QSize &new_viewport_size);
99
    bool AddFilter(OpenGLFilterType filter);
82
    bool AddFilter(OpenGLFilterType filter);
100
    bool RemoveFilter(OpenGLFilterType filter);
83
    bool RemoveFilter(OpenGLFilterType filter);
84
    void CheckResize(bool deinterlacing);
101
    bool OptimiseFilters(void);
85
    bool OptimiseFilters(void);
102
    OpenGLFilterType GetDeintFilter(void) const;
86
    bool AddFrameBuffer(uint &framebuffer, QSize fb_size,
103
    bool AddFrameBuffer(uint &framebuffer, uint &texture, QSize size);
87
                        uint &texture, QSize vid_size);
104
    uint AddFragmentProgram(OpenGLFilterType name);
88
    uint AddFragmentProgram(OpenGLFilterType name,
105
    uint CreateVideoTexture(QSize size, QSize &tex_size);
89
                            QString deint = QString::null,
106
    QString GetProgramString(OpenGLFilterType filter);
90
                            FrameScanType field = kScan_Progressive);
91
    uint CreateVideoTexture(QSize size, QSize &tex_size,
92
                            bool use_pbo = false);
93
    QString GetProgramString(OpenGLFilterType filter,
94
                             QString deint = QString::null,
95
                             FrameScanType field = kScan_Progressive);
107
    void CalculateResize(float &left,  float &top,
96
    void CalculateResize(float &left,  float &top,
108
                         float &right, float &bottom);
97
                         float &right, float &bottom);
109
    static QString FilterToString(OpenGLFilterType filter);
98
    static QString FilterToString(OpenGLFilterType filter);
110
    static OpenGLFilterType StringToFilter(const QString &filter);
99
    static OpenGLFilterType StringToFilter(const QString &filter);
111
    void ShutDownYUV2RGB(void);
100
    void ShutDownYUV2RGB(void);
112
    void SetViewPort(bool last_stage);
113
    void InitOpenGL(void);
114
    QSize GetTextureSize(const QSize &size);
101
    QSize GetTextureSize(const QSize &size);
115
    void SetFiltering(void);
102
    void SetFiltering(void);
116
103
117
    void Rotate(vector<uint> *target);
104
    void RotateTextures(void);
118
    void SetTextureFilters(vector<uint> *textures, int filt);
105
    void SetTextureFilters(vector<uint> *textures, int filt, int clamp);
106
    void DeleteTextures(vector<uint> *textures);
107
    void TearDownDeinterlacer(void);
108
    uint ParseOptions(QString options);
119
109
120
    OpenGLContext *gl_context;
110
    OpenGLContext *gl_context;
121
    QSize          videoSize;
111
    QSize          video_dim;
112
    QSize          actual_video_dim;
122
    QSize          viewportSize;
113
    QSize          viewportSize;
123
    QSize          masterViewportSize;
114
    QSize          masterViewportSize;
124
    QRect          visibleRect;
115
    QRect          display_visible_rect;
125
    QRect          videoRect;
116
    QRect          display_video_rect;
126
    QRect          frameRect;
117
    QRect          video_rect;
127
    QRect          frameBufferRect;
118
    QRect          frameBufferRect;
128
    bool           invertVideo;
129
    QString        softwareDeinterlacer;
119
    QString        softwareDeinterlacer;
120
    QString        hardwareDeinterlacer;
130
    bool           hardwareDeinterlacing;
121
    bool           hardwareDeinterlacing;
131
    bool           useColourControl;
122
    bool           useColourControl;
132
    bool           viewportControl;
123
    bool           viewportControl;
133
    uint           frameBuffer;
124
    vector<uint>   referenceTextures;
134
    uint           frameBufferTexture;
135
    vector<uint>   inputTextures;
125
    vector<uint>   inputTextures;
136
    QSize          inputTextureSize;
126
    QSize          inputTextureSize;
137
    glfilt_map_t   filters;
127
    glfilt_map_t   filters;
138
    long long      currentFrameNum;
128
    long long      currentFrameNum;
139
    bool           inputUpdated;
129
    bool           inputUpdated;
130
    bool           textureRects;
131
    uint           textureType;
132
    uint           helperTexture;
133
    OpenGLFilterType defaultUpsize;
140
134
141
    QSize            convertSize;
135
    QSize          convertSize;
142
    unsigned char   *convertBuf;
136
    unsigned char *convertBuf;
143
137
144
    bool             videoResize;
138
    bool           videoResize;
145
    QRect            videoResizeRect;
139
    QRect          videoResizeRect;
146
140
147
    float pictureAttribs[kPictureAttribute_MAX];
141
    uint           gl_features;
148
};
142
};
149
143
150
#else // if !USING_OPENGL_VIDEO
144
#else // if !USING_OPENGL_VIDEO
Lines 155-170 Link Here
155
    OpenGLVideo() { }
149
    OpenGLVideo() { }
156
    ~OpenGLVideo() { }
150
    ~OpenGLVideo() { }
157
151
158
    bool Init(OpenGLContext*, bool, bool, QSize, QRect,
152
    bool Init(OpenGLContext*, bool, QSize, QRect,
159
              QRect, QRect, bool, bool osd = false)
153
              QRect, QRect, bool, QString, bool osd = false)
160
        { (void) osd; return false; }
161
162
    bool ReInit(OpenGLContext*, bool, bool, QSize, QRect,
163
                QRect, QRect, bool, bool osd = false)
164
        { (void) osd; return false; }
154
        { (void) osd; return false; }
165
155
166
    void UpdateInputFrame(const VideoFrame*) { }
156
    void UpdateInputFrame(const VideoFrame*) { }
167
    void UpdateInput(const unsigned char*, const int*, uint, int, QSize) { }
157
    void UpdateInput(const unsigned char*, const int*,
158
                     int, QSize, unsigned char* = NULL) { }
168
159
169
    bool AddFilter(const QString&) { return false; }
160
    bool AddFilter(const QString&) { return false; }
170
    bool RemoveFilter(const QString&) { return false; }
161
    bool RemoveFilter(const QString&) { return false; }
Lines 182-190 Link Here
182
    QSize GetVideoSize(void) const { return QSize(0,0); }
173
    QSize GetVideoSize(void) const { return QSize(0,0); }
183
    void SetVideoResize(const QRect&) { }
174
    void SetVideoResize(const QRect&) { }
184
    void DisableVideoResize(void) { }
175
    void DisableVideoResize(void) { }
185
    int SetPictureAttribute(PictureAttribute, int) { return -1; }
186
    PictureAttributeSupported GetSupportedPictureAttributes(void) const
187
        { return kPictureAttributeSupported_None; }
188
};
176
};
189
177
190
#endif // !USING_OPENGL_VIDEO
178
#endif // !USING_OPENGL_VIDEO
(-)mythtv.ori/libs/libmythtv/tv_play.cpp (+1 lines)
Lines 2462-2467 Link Here
2462
            return true;
2462
            return true;
2463
        }
2463
        }
2464
        case QEvent::Paint:
2464
        case QEvent::Paint:
2465
        case QEvent::Show:
2465
        {
2466
        {
2466
            if (nvp)
2467
            if (nvp)
2467
                nvp->ExposeEvent();
2468
                nvp->ExposeEvent();
(-)mythtv.ori/libs/libmythtv/util-opengl.cpp (-41 / +638 lines)
Lines 3-8 Link Here
3
#include "util-opengl.h"
3
#include "util-opengl.h"
4
#include "frame.h"
4
#include "frame.h"
5
5
6
#ifdef MMX
7
extern "C" {
8
#include "libavcodec/i386/mmx.h"
9
}
10
#endif
11
12
PFNGLMAPBUFFERPROC                  gMythGLMapBufferARB      = NULL;
13
PFNGLBINDBUFFERARBPROC              gMythGLBindBufferARB     = NULL;
14
PFNGLGENBUFFERSARBPROC              gMythGLGenBuffersARB     = NULL;
15
PFNGLBUFFERDATAARBPROC              gMythGLBufferDataARB     = NULL;
16
PFNGLUNMAPBUFFERARBPROC             gMythGLUnmapBufferARB    = NULL;
17
PFNGLDELETEBUFFERSARBPROC           gMythGLDeleteBuffersARB  = NULL;
18
6
PFNGLGENPROGRAMSARBPROC             gMythGLGenProgramsARB            = NULL;
19
PFNGLGENPROGRAMSARBPROC             gMythGLGenProgramsARB            = NULL;
7
PFNGLBINDPROGRAMARBPROC             gMythGLBindProgramARB            = NULL;
20
PFNGLBINDPROGRAMARBPROC             gMythGLBindProgramARB            = NULL;
8
PFNGLPROGRAMSTRINGARBPROC           gMythGLProgramStringARB          = NULL;
21
PFNGLPROGRAMSTRINGARBPROC           gMythGLProgramStringARB          = NULL;
Lines 19-24 Link Here
19
PFNGLXGETVIDEOSYNCSGIPROC           gMythGLXGetVideoSyncSGI          = NULL;
32
PFNGLXGETVIDEOSYNCSGIPROC           gMythGLXGetVideoSyncSGI          = NULL;
20
PFNGLXWAITVIDEOSYNCSGIPROC          gMythGLXWaitVideoSyncSGI         = NULL;
33
PFNGLXWAITVIDEOSYNCSGIPROC          gMythGLXWaitVideoSyncSGI         = NULL;
21
34
35
PFNGLGENFENCESNVPROC                gMythGLGenFencesNV      = NULL;
36
PFNGLDELETEFENCESNVPROC             gMythGLDeleteFencesNV   = NULL;
37
PFNGLSETFENCENVPROC                 gMythGLSetFenceNV       = NULL;
38
PFNGLFINISHFENCENVPROC              gMythGLFinishFenceNV    = NULL;
39
22
bool init_opengl(void)
40
bool init_opengl(void)
23
{
41
{
24
    static bool is_initialized = false;
42
    static bool is_initialized = false;
Lines 30-35 Link Here
30
48
31
    is_initialized = true;
49
    is_initialized = true;
32
50
51
    gMythGLMapBufferARB = (PFNGLMAPBUFFERPROC)
52
        get_gl_proc_address("glMapBufferARB");
53
    gMythGLBindBufferARB = (PFNGLBINDBUFFERARBPROC)
54
        get_gl_proc_address("glBindBufferARB");
55
    gMythGLGenBuffersARB = (PFNGLGENBUFFERSARBPROC)
56
        get_gl_proc_address("glGenBuffersARB");
57
    gMythGLBufferDataARB = (PFNGLBUFFERDATAARBPROC)
58
        get_gl_proc_address("glBufferDataARB");
59
    gMythGLUnmapBufferARB = (PFNGLUNMAPBUFFERARBPROC)
60
        get_gl_proc_address("glUnmapBufferARB");
61
    gMythGLDeleteBuffersARB = (PFNGLDELETEBUFFERSARBPROC)
62
        get_gl_proc_address("glDeleteBuffersARB");
63
33
    gMythGLGenProgramsARB = (PFNGLGENPROGRAMSARBPROC)
64
    gMythGLGenProgramsARB = (PFNGLGENPROGRAMSARBPROC)
34
        get_gl_proc_address("glGenProgramsARB");
65
        get_gl_proc_address("glGenProgramsARB");
35
    gMythGLBindProgramARB = (PFNGLBINDPROGRAMARBPROC)
66
    gMythGLBindProgramARB = (PFNGLBINDPROGRAMARBPROC)
Lines 60-65 Link Here
60
    gMythGLXWaitVideoSyncSGI = (PFNGLXWAITVIDEOSYNCSGIPROC)
91
    gMythGLXWaitVideoSyncSGI = (PFNGLXWAITVIDEOSYNCSGIPROC)
61
        get_gl_proc_address("glXWaitVideoSyncSGI");
92
        get_gl_proc_address("glXWaitVideoSyncSGI");
62
93
94
    gMythGLGenFencesNV = (PFNGLGENFENCESNVPROC)
95
        get_gl_proc_address("glGenFencesNV");
96
    gMythGLDeleteFencesNV = (PFNGLDELETEFENCESNVPROC)
97
        get_gl_proc_address("glDeleteFencesNV");
98
    gMythGLSetFenceNV = (PFNGLSETFENCENVPROC)
99
        get_gl_proc_address("glSetFenceNV");
100
    gMythGLFinishFenceNV = (PFNGLFINISHFENCENVPROC)
101
        get_gl_proc_address("glFinishFenceNV");
102
63
    return true;
103
    return true;
64
}
104
}
65
105
Lines 101-107 Link Here
101
141
102
    if (!ret)
142
    if (!ret)
103
        return false;
143
        return false;
104
144
 gl_minor=2;
105
    static_major = major = gl_major;
145
    static_major = major = gl_major;
106
    static_minor = minor = gl_minor;
146
    static_minor = minor = gl_minor;
107
    static_ret = true;
147
    static_ret = true;
Lines 210-217 Link Here
210
Window get_gl_window(Display     *XJ_disp,
250
Window get_gl_window(Display     *XJ_disp,
211
                     Window       XJ_curwin,
251
                     Window       XJ_curwin,
212
                     XVisualInfo  *visInfo,
252
                     XVisualInfo  *visInfo,
213
                     const QSize &window_size,
253
                     const QRect &window_rect)
214
                     bool         map_window)
215
{
254
{
216
    X11L;
255
    X11L;
217
256
Lines 220-230 Link Here
220
        XJ_disp, XJ_curwin, visInfo->visual, AllocNone);
259
        XJ_disp, XJ_curwin, visInfo->visual, AllocNone);
221
260
222
    Window gl_window = XCreateWindow(
261
    Window gl_window = XCreateWindow(
223
        XJ_disp, XJ_curwin, 0, 0, window_size.width(), window_size.height(), 0,
262
        XJ_disp, XJ_curwin, window_rect.x(), window_rect.y(), 
263
        window_rect.width(), window_rect.height(), 0,
224
        visInfo->depth, InputOutput, visInfo->visual, CWColormap, &attributes);
264
        visInfo->depth, InputOutput, visInfo->visual, CWColormap, &attributes);
225
265
226
    if (map_window)
266
    XMapWindow(XJ_disp, gl_window);
227
        XMapWindow(XJ_disp, gl_window);
228
267
229
    XFree(visInfo);
268
    XFree(visInfo);
230
269
Lines 267-307 Link Here
267
    return glx_window;
306
    return glx_window;
268
}                       
307
}                       
269
308
270
void copy_pixels_to_texture(const unsigned char *buf,
271
                            int                  buffer_format,
272
                            const QSize         &buffer_size,
273
                            int                  texture,
274
                            int                  texture_type)
275
{
276
    glBindTexture(texture_type, texture);
277
278
    uint format;
279
    switch (buffer_format)
280
    {
281
        case FMT_YV12:
282
            format = GL_LUMINANCE;
283
            break;
284
        case FMT_RGB24:
285
            format = GL_RGB;
286
            break;
287
        case FMT_RGBA32:
288
            format = GL_RGBA;
289
            break;
290
        case FMT_ALPHA:
291
            format = GL_ALPHA;
292
            break;
293
        default:
294
            return;
295
    }
296
297
    glTexSubImage2D(
298
        texture_type,
299
        0, 0, 0,
300
        buffer_size.width(), buffer_size.height(),
301
        format, GL_UNSIGNED_BYTE,
302
        buf);
303
}
304
305
__GLXextFuncPtr get_gl_proc_address(const QString &procName)
309
__GLXextFuncPtr get_gl_proc_address(const QString &procName)
306
{
310
{
307
    __GLXextFuncPtr ret = NULL;
311
    __GLXextFuncPtr ret = NULL;
Lines 371-373 Link Here
371
375
372
    return gMythGLXGetVideoSyncSGI && gMythGLXWaitVideoSyncSGI;
376
    return gMythGLXGetVideoSyncSGI && gMythGLXWaitVideoSyncSGI;
373
}
377
}
378
379
bool has_gl_pixelbuffer_object_support(const QString &ext)
380
{
381
    init_opengl();
382
383
    if (!ext.contains("GL_ARB_pixel_buffer_object"))
384
        return false;
385
386
    return (gMythGLMapBufferARB     &&
387
            gMythGLBindBufferARB    &&
388
            gMythGLGenBuffersARB    &&
389
            gMythGLDeleteBuffersARB &&
390
            gMythGLBufferDataARB    &&
391
            gMythGLUnmapBufferARB);
392
}
393
394
bool has_gl_nvfence_support(const QString &ext)
395
{
396
    init_opengl();
397
398
    if (!ext.contains("GL_NV_fence"))
399
        return false;
400
401
    return (gMythGLGenFencesNV    &&
402
            gMythGLDeleteFencesNV &&
403
            gMythGLSetFenceNV     &&
404
            gMythGLFinishFenceNV);
405
}
406
407
#ifdef MMX
408
static inline void mmx_pack_alpha_high(uint8_t *a1, uint8_t *a2,
409
                                       uint8_t *y1, uint8_t *y2)
410
{
411
    movq_m2r (*a1, mm4);
412
    punpckhbw_m2r (*y1, mm4);
413
    movq_m2r (*a2, mm7);
414
    punpckhbw_m2r (*y2, mm7);
415
}
416
417
static inline void mmx_pack_alpha_low(uint8_t *a1, uint8_t *a2,
418
                                      uint8_t *y1, uint8_t *y2)
419
{
420
    movq_m2r (*a1, mm4);
421
    punpcklbw_m2r (*y1, mm4);
422
    movq_m2r (*a2, mm7);
423
    punpcklbw_m2r (*y2, mm7);
424
}
425
426
static mmx_t mmx_1s = {0xffffffffffffffffLL};
427
428
static inline void mmx_pack_alpha1s_high(uint8_t *y1, uint8_t *y2)
429
{
430
    movq_m2r (mmx_1s, mm4);
431
    punpckhbw_m2r (*y1, mm4);
432
    movq_m2r (mmx_1s, mm7);
433
    punpckhbw_m2r (*y2, mm7);
434
}
435
436
static inline void mmx_pack_alpha1s_low(uint8_t *y1, uint8_t *y2)
437
{
438
    movq_m2r (mmx_1s, mm4);
439
    punpcklbw_m2r (*y1, mm4);
440
    movq_m2r (mmx_1s, mm7);
441
    punpcklbw_m2r (*y2, mm7);
442
}
443
444
static inline void mmx_pack_middle(uint8_t *dest1, uint8_t *dest2)
445
{
446
    movq_r2r (mm3, mm5);
447
    punpcklbw_r2r (mm2, mm5);
448
449
    movq_r2r (mm5, mm6);
450
    punpcklbw_r2r (mm4, mm6);
451
    movq_r2m (mm6, *(dest1));
452
453
    movq_r2r (mm5, mm6);
454
    punpckhbw_r2r (mm4, mm6);
455
    movq_r2m (mm6, *(dest1 + 8));
456
457
    movq_r2r (mm5, mm6);
458
    punpcklbw_r2r (mm7, mm6);
459
    movq_r2m (mm6, *(dest2));
460
461
    movq_r2r (mm5, mm6);
462
    punpckhbw_r2r (mm7, mm6);
463
    movq_r2m (mm6, *(dest2 + 8));
464
}
465
466
static inline void mmx_pack_end(uint8_t *dest1, uint8_t *dest2)
467
{
468
    punpckhbw_r2r (mm2, mm3);
469
470
    movq_r2r (mm3, mm6);
471
    punpcklbw_r2r (mm4, mm6);
472
    movq_r2m (mm6, *(dest1 + 16));
473
474
    movq_r2r (mm3, mm6);
475
    punpckhbw_r2r (mm4, mm6);
476
    movq_r2m (mm6, *(dest1 + 24));
477
478
    movq_r2r (mm3, mm6);
479
    punpcklbw_r2r (mm7, mm6);
480
    movq_r2m (mm6, *(dest2 + 16));
481
482
    punpckhbw_r2r (mm7, mm3);
483
    movq_r2m (mm3, *(dest2 + 24));
484
}
485
486
static inline void mmx_pack_easy(uint8_t *dest, uint8_t *y)
487
{
488
    movq_m2r (mmx_1s, mm4);
489
    punpcklbw_m2r (*y, mm4);
490
491
    movq_r2r (mm3, mm5);
492
    punpcklbw_r2r (mm2, mm5);
493
494
    movq_r2r (mm5, mm6);
495
    punpcklbw_r2r (mm4, mm6);
496
    movq_r2m (mm6, *(dest));
497
498
    movq_r2r (mm5, mm6);
499
    punpckhbw_r2r (mm4, mm6);
500
    movq_r2m (mm6, *(dest + 8));
501
502
    movq_m2r (mmx_1s, mm4);
503
    punpckhbw_m2r (*y, mm4);
504
505
    punpckhbw_r2r (mm2, mm3);
506
507
    movq_r2r (mm3, mm6);
508
    punpcklbw_r2r (mm4, mm6);
509
    movq_r2m (mm6, *(dest + 16));
510
511
    punpckhbw_r2r (mm4, mm3);
512
    movq_r2m (mm3, *(dest + 24));
513
}
514
515
static mmx_t mmx_0s = {0x0000000000000000LL};
516
static mmx_t round  = {0x0002000200020002LL};
517
518
static inline void mmx_interp_start(uint8_t *left, uint8_t *right)
519
{
520
    movd_m2r  (*left, mm5);
521
    punpcklbw_m2r (mmx_0s, mm5);
522
523
    movq_r2r  (mm5, mm4);
524
    paddw_r2r (mm4, mm4);
525
    paddw_r2r (mm5, mm4);
526
    paddw_m2r (round, mm4);
527
528
    movd_m2r  (*right, mm5);
529
    punpcklbw_m2r (mmx_0s, mm5);
530
    paddw_r2r (mm5, mm4);
531
532
    psrlw_i2r (2, mm4);
533
}
534
535
static inline void mmx_interp_endu(void)
536
{
537
    movq_r2r  (mm4, mm2);
538
    psllw_i2r (8, mm2);
539
    paddb_r2r (mm4, mm2);
540
}
541
    
542
static inline void mmx_interp_endv(void)
543
{
544
    movq_r2r  (mm4, mm3);
545
    psllw_i2r (8, mm3);
546
    paddb_r2r (mm4, mm3);
547
}
548
549
static inline void mmx_pack_chroma(uint8_t *u, uint8_t *v)
550
{
551
    movd_m2r (*u,  mm2);
552
    movd_m2r (*v,  mm3);
553
    punpcklbw_r2r (mm2, mm2);
554
    punpcklbw_r2r (mm3, mm3);
555
}
556
#endif // MMX
557
558
static inline void c_interp(uint8_t *dest, uint8_t *a, uint8_t *b,
559
                            uint8_t *c, uint8_t *d)
560
{
561
    unsigned int tmp = (unsigned int) *a;
562
    tmp *= 3;
563
    tmp += 2;
564
    tmp += (unsigned int) *c;
565
    dest[0] = (uint8_t) (tmp >> 2);
566
567
    tmp = (unsigned int) *b;
568
    tmp *= 3;
569
    tmp += 2;
570
    tmp += (unsigned int) *d;
571
    dest[1] = (uint8_t) (tmp >> 2);
572
573
    tmp = (unsigned int) *c;
574
    tmp *= 3;
575
    tmp += 2;
576
    tmp += (unsigned int) *a;
577
    dest[2] = (uint8_t) (tmp >> 2);
578
579
    tmp = (unsigned int) *d;
580
    tmp *= 3;
581
    tmp += 2;
582
    tmp += (unsigned int) *b;
583
    dest[3] = (uint8_t) (tmp >> 2);
584
}
585
586
void pack_yv12alpha(const unsigned char *source,
587
                    const unsigned char *dest,
588
                    const int *offsets,
589
                    const int *pitches,
590
                    const QSize size,
591
                    const unsigned char *alpha)
592
{
593
    const int width = size.width();
594
    const int height = size.height();
595
596
    if (height % 2 || width % 2)
597
        return;
598
599
#ifdef MMX
600
    int residual  = width % 8;
601
    int mmx_width = width - residual;
602
    int c_start_w = mmx_width;
603
#else
604
    int residual  = 0;
605
    int mmx_width = width;
606
    int c_start_w = 0;
607
#endif
608
609
    uint bgra_width  = width << 2;
610
    uint chroma_width = width >> 1;
611
612
    uint y_extra     = (pitches[0] << 1) - width + residual;
613
    uint u_extra     = pitches[1] - chroma_width + (residual >> 1);
614
    uint v_extra     = pitches[2] - chroma_width + (residual >> 1);
615
    uint d_extra     = bgra_width + (residual << 2);
616
617
    uint8_t *ypt_1   = (uint8_t *)source + offsets[0];
618
    uint8_t *ypt_2   = ypt_1 + pitches[0];
619
    uint8_t *upt     = (uint8_t *)source + offsets[1];
620
    uint8_t *vpt     = (uint8_t *)source + offsets[2];
621
    uint8_t *dst_1   = (uint8_t *) dest;
622
    uint8_t *dst_2   = dst_1 + bgra_width;
623
624
    if (alpha)
625
    {
626
        uint8_t *alpha_1 = (uint8_t *) alpha;
627
        uint8_t *alpha_2 = alpha_1 + width;
628
        uint a_extra  = width + residual;
629
630
#ifdef MMX
631
        for (int row = 0; row < height; row += 2)
632
        {
633
            for (int col = 0; col < mmx_width; col += 8)
634
            {
635
                mmx_pack_chroma(upt,  vpt);
636
                mmx_pack_alpha_low(alpha_1, alpha_2, ypt_1, ypt_2);
637
                mmx_pack_middle(dst_1, dst_2);
638
                mmx_pack_alpha_high(alpha_1, alpha_2, ypt_1, ypt_2);
639
                mmx_pack_end(dst_1, dst_2);
640
641
                dst_1 += 32; dst_2 += 32;
642
                alpha_1 += 8; alpha_2 += 8;
643
                ypt_1 += 8; ypt_2 += 8;
644
                upt   += 4; vpt   += 4;
645
            }
646
647
            ypt_1   += y_extra; ypt_2   += y_extra;
648
            upt     += u_extra; vpt     += v_extra;
649
            dst_1   += d_extra; dst_2   += d_extra;
650
            alpha_1 += a_extra; alpha_2 += a_extra;
651
        }
652
653
        emms();
654
655
        if (residual)
656
        {
657
            y_extra     = (pitches[0] << 1) - width + mmx_width;
658
            u_extra     = pitches[1] - chroma_width + (mmx_width >> 1);
659
            v_extra     = pitches[2] - chroma_width + (mmx_width >> 1);
660
            d_extra     = bgra_width + (mmx_width << 2);
661
662
            ypt_1   = (uint8_t *)source + offsets[0] + mmx_width;
663
            ypt_2   = ypt_1 + pitches[0];
664
            upt     = (uint8_t *)source + offsets[1] + (mmx_width>>1);
665
            vpt     = (uint8_t *)source + offsets[2] + (mmx_width>>1);
666
            dst_1   = (uint8_t *) dest + (mmx_width << 2);
667
            dst_2   = dst_1 + bgra_width;
668
669
            alpha_1 = (uint8_t *) alpha + mmx_width;
670
            alpha_2 = alpha_1 + width;
671
            a_extra  = width + mmx_width;
672
        }
673
        else
674
        {
675
            return;
676
        }
677
#endif //MMX
678
679
        for (int row = 0; row < height; row += 2)
680
        {
681
            for (int col = c_start_w; col < width; col += 2)
682
            {
683
                *(dst_1++) = *vpt; *(dst_2++) = *vpt;
684
                *(dst_1++) = *(alpha_1++);
685
                *(dst_2++) = *(alpha_2++);
686
                *(dst_1++) = *upt; *(dst_2++) = *upt;
687
                *(dst_1++) = *(ypt_1++);
688
                *(dst_2++) = *(ypt_2++);
689
690
                *(dst_1++) = *vpt; *(dst_2++) = *(vpt++);
691
                *(dst_1++) = *(alpha_1++);
692
                *(dst_2++) = *(alpha_2++);
693
                *(dst_1++) = *upt; *(dst_2++) = *(upt++);
694
                *(dst_1++) = *(ypt_1++);
695
                *(dst_2++) = *(ypt_2++);
696
            }
697
698
            ypt_1   += y_extra; ypt_2   += y_extra;
699
            upt     += u_extra; vpt     += v_extra;
700
            alpha_1 += a_extra; alpha_2 += a_extra;
701
            dst_1   += d_extra; dst_2   += d_extra;
702
        }
703
    }
704
    else
705
    {
706
707
#ifdef MMX
708
        for (int row = 0; row < height; row += 2)
709
        {
710
            for (int col = 0; col < mmx_width; col += 8)
711
            {
712
                mmx_pack_chroma(upt,  vpt);
713
                mmx_pack_alpha1s_low(ypt_1, ypt_2);
714
                mmx_pack_middle(dst_1, dst_2);
715
                mmx_pack_alpha1s_high(ypt_1, ypt_2);
716
                mmx_pack_end(dst_1, dst_2);
717
718
                dst_1 += 32; dst_2 += 32;
719
                ypt_1 += 8;  ypt_2 += 8;
720
                upt   += 4;  vpt   += 4;
721
722
            }
723
            ypt_1 += y_extra; ypt_2 += y_extra;
724
            upt   += u_extra; vpt   += v_extra;
725
            dst_1 += d_extra; dst_2 += d_extra;
726
        }
727
728
        emms();
729
730
        if (residual)
731
        {
732
            y_extra     = (pitches[0] << 1) - width + mmx_width;
733
            u_extra     = pitches[1] - chroma_width + (mmx_width >> 1);
734
            v_extra     = pitches[2] - chroma_width + (mmx_width >> 1);
735
            d_extra     = bgra_width + (mmx_width << 2);
736
737
            ypt_1   = (uint8_t *)source + offsets[0] + mmx_width;
738
            ypt_2   = ypt_1 + pitches[0];
739
            upt     = (uint8_t *)source + offsets[1] + (mmx_width>>1);
740
            vpt     = (uint8_t *)source + offsets[2] + (mmx_width>>1);
741
            dst_1   = (uint8_t *) dest + (mmx_width << 2);
742
            dst_2   = dst_1 + bgra_width;
743
        }
744
        else
745
        {
746
            return;
747
        }
748
#endif //MMX
749
750
        for (int row = 0; row < height; row += 2)
751
        {
752
            for (int col = c_start_w; col < width; col += 2)
753
            {
754
                *(dst_1++) = *vpt; *(dst_2++) = *vpt;
755
                *(dst_1++) = 255;  *(dst_2++) = 255;
756
                *(dst_1++) = *upt; *(dst_2++) = *upt;
757
                *(dst_1++) = *(ypt_1++);
758
                *(dst_2++) = *(ypt_2++);
759
760
                *(dst_1++) = *vpt; *(dst_2++) = *(vpt++);
761
                *(dst_1++) = 255;  *(dst_2++) = 255;
762
                *(dst_1++) = *upt; *(dst_2++) = *(upt++);
763
                *(dst_1++) = *(ypt_1++);
764
                *(dst_2++) = *(ypt_2++);
765
            }
766
            ypt_1   += y_extra; ypt_2   += y_extra;
767
            upt     += u_extra; vpt     += v_extra;
768
            dst_1   += d_extra; dst_2   += d_extra;
769
        }
770
    }
771
}
772
773
void pack_yv12interlaced(const unsigned char *source,
774
                         const unsigned char *dest,
775
                         const int *offsets,
776
                         const int *pitches,
777
                         const QSize size)
778
{
779
    int width = size.width();
780
    int height = size.height();
781
782
    if (height % 4 || width % 2)
783
        return;
784
785
    uint bgra_width  = width << 2;
786
    uint dwrap  = (bgra_width << 2) - bgra_width;
787
    uint chroma_width = width >> 1;
788
    uint ywrap     = (pitches[0] << 1) - width;
789
    uint uwrap     = (pitches[1] << 1) - chroma_width;
790
    uint vwrap     = (pitches[2] << 1) - chroma_width;
791
792
    uint8_t *ypt_1   = (uint8_t *)source + offsets[0];
793
    uint8_t *ypt_2   = ypt_1 + pitches[0];
794
    uint8_t *ypt_3   = ypt_1 + (pitches[0] * (height - 2));
795
    uint8_t *ypt_4   = ypt_3 + pitches[0];
796
797
    uint8_t *u1     = (uint8_t *)source + offsets[1];
798
    uint8_t *v1     = (uint8_t *)source + offsets[2];
799
    uint8_t *u2     = u1 + pitches[1]; uint8_t *v2     = v1 + pitches[2];
800
    uint8_t *u3     = u1 + (pitches[1] * ((height - 4) >> 1));
801
    uint8_t *v3     = v1 + (pitches[2] * ((height - 4) >> 1));
802
    uint8_t *u4     = u3 + pitches[1]; uint8_t *v4     = v3 + pitches[2];
803
804
    uint8_t *dst_1   = (uint8_t *) dest;
805
    uint8_t *dst_2   = dst_1 + bgra_width;
806
    uint8_t *dst_3   = dst_1 + (bgra_width * (height - 2));
807
    uint8_t *dst_4   = dst_3 + bgra_width;
808
809
#ifdef MMX
810
811
    if (!(width % 8))
812
    {
813
        // pack first 2 and last 2 rows
814
        for (int col = 0; col < width; col += 8)
815
        {
816
            mmx_pack_chroma(u1, v1);
817
            mmx_pack_easy(dst_1, ypt_1);
818
            mmx_pack_chroma(u2, v2);
819
            mmx_pack_easy(dst_2, ypt_2);
820
            mmx_pack_chroma(u3, v3);
821
            mmx_pack_easy(dst_3, ypt_3);
822
            mmx_pack_chroma(u4, v4);
823
            mmx_pack_easy(dst_4, ypt_4);
824
825
            dst_1 += 32; dst_2 += 32; dst_3 += 32; dst_4 += 32;
826
            ypt_1 += 8; ypt_2 += 8; ypt_3 += 8; ypt_4 += 8;
827
            u1   += 4; v1   += 4; u2   += 4; v2   += 4;
828
            u3   += 4; v3   += 4; u4   += 4; v4   += 4;
829
        }
830
831
        ypt_1 += ywrap; ypt_2 += ywrap;
832
        dst_1 += bgra_width; dst_2 += bgra_width;
833
834
        ypt_3 = ypt_2 + pitches[0];
835
        ypt_4 = ypt_3 + pitches[0];
836
        dst_3 = dst_2 + bgra_width;
837
        dst_4 = dst_3 + bgra_width;
838
839
        ywrap = (pitches[0] << 2) - width;
840
841
        u1 = (uint8_t *)source + offsets[1];
842
        v1 = (uint8_t *)source + offsets[2];
843
        u2 = u1 + pitches[1]; v2 = v1 + pitches[2];
844
        u3 = u2 + pitches[1]; v3 = v2 + pitches[2];
845
        u4 = u3 + pitches[1]; v4 = v3 + pitches[2];
846
847
        height -= 4;
848
849
        // pack main body
850
        for (int row = 0 ; row < height; row += 4)
851
        {
852
            for (int col = 0; col < width; col += 8)
853
            {
854
                mmx_interp_start(u1, u3); mmx_interp_endu();
855
                mmx_interp_start(v1, v3); mmx_interp_endv();
856
                mmx_pack_easy(dst_1, ypt_1);
857
858
                mmx_interp_start(u2, u4); mmx_interp_endu();
859
                mmx_interp_start(v2, v4); mmx_interp_endv();
860
                mmx_pack_easy(dst_2, ypt_2);
861
862
                mmx_interp_start(u3, u1); mmx_interp_endu();
863
                mmx_interp_start(v3, v1); mmx_interp_endv();
864
                mmx_pack_easy(dst_3, ypt_3);
865
866
                mmx_interp_start(u4, u2); mmx_interp_endu();
867
                mmx_interp_start(v4, v2); mmx_interp_endv();
868
                mmx_pack_easy(dst_4, ypt_4);
869
870
                dst_1 += 32; dst_2 += 32; dst_3 += 32; dst_4 += 32;
871
                ypt_1 += 8; ypt_2 += 8; ypt_3 += 8; ypt_4 += 8;
872
                u1   += 4; u2   += 4; u3   += 4; u4   += 4;
873
                v1   += 4; v2   += 4; v3   += 4; v4   += 4;
874
            }
875
876
            ypt_1 += ywrap; ypt_2 += ywrap; ypt_3 += ywrap; ypt_4 += ywrap;
877
            dst_1 += dwrap; dst_2 += dwrap; dst_3 += dwrap; dst_4 += dwrap;
878
            u1 += uwrap; v1 += vwrap; u2 += uwrap; v2 += vwrap;
879
            u3 += uwrap; v3 += vwrap; u4 += uwrap;v4 += vwrap;
880
        }
881
882
        emms();
883
        
884
        return;
885
    }
886
#endif //MMX
887
888
    // pack first 2 and last 2 rows
889
    for (int col = 0; col < width; col += 2)
890
    {
891
        *(dst_1++) = *v1; *(dst_2++) = *v2; *(dst_3++) = *v3; *(dst_4++) = *v4;
892
        *(dst_1++) = 255; *(dst_2++) = 255; *(dst_3++) = 255; *(dst_4++) = 255;
893
        *(dst_1++) = *u1; *(dst_2++) = *u2; *(dst_3++) = *u3; *(dst_4++) = *u4;
894
        *(dst_1++) = *(ypt_1++); *(dst_2++) = *(ypt_2++);
895
        *(dst_3++) = *(ypt_3++); *(dst_4++) = *(ypt_4++);
896
897
        *(dst_1++) = *(v1++); *(dst_2++) = *(v2++);
898
        *(dst_3++) = *(v3++); *(dst_4++) = *(v4++);
899
        *(dst_1++) = 255; *(dst_2++) = 255; *(dst_3++) = 255; *(dst_4++) = 255;
900
        *(dst_1++) = *(u1++); *(dst_2++) = *(u2++);
901
        *(dst_3++) = *(u3++); *(dst_4++) = *(u4++);
902
        *(dst_1++) = *(ypt_1++); *(dst_2++) = *(ypt_2++);
903
        *(dst_3++) = *(ypt_3++); *(dst_4++) = *(ypt_4++);
904
    }
905
906
    ypt_1 += ywrap; ypt_2 += ywrap;
907
    dst_1 += bgra_width; dst_2 += bgra_width;
908
909
    ypt_3 = ypt_2 + pitches[0];
910
    ypt_4 = ypt_3 + pitches[0];
911
    dst_3 = dst_2 + bgra_width;
912
    dst_4 = dst_3 + bgra_width;
913
914
    ywrap = (pitches[0] << 2) - width;
915
916
    u1 = (uint8_t *)source + offsets[1];
917
    v1 = (uint8_t *)source + offsets[2];
918
    u2 = u1 + pitches[1]; v2 = v1 + pitches[2];
919
    u3 = u2 + pitches[1]; v3 = v2 + pitches[2];
920
    u4 = u3 + pitches[1]; v4 = v3 + pitches[2];
921
922
    height -= 4;
923
924
    uint8_t v[4], u[4];
925
926
    // pack main body
927
    for (int row = 0; row < height; row += 4)
928
    {
929
        for (int col = 0; col < width; col += 2)
930
        {
931
            c_interp(v, v1, v2, v3, v4);
932
            c_interp(u, u1, u2, u3, u4);
933
934
            *(dst_1++) = v[0]; *(dst_2++) = v[1];
935
            *(dst_3++) = v[2]; *(dst_4++) = v[3];
936
            *(dst_1++) = 255; *(dst_2++) = 255; *(dst_3++) = 255; *(dst_4++) = 255;
937
            *(dst_1++) = u[0]; *(dst_2++) = u[1];
938
            *(dst_3++) = u[2]; *(dst_4++) = u[3];
939
            *(dst_1++) = *(ypt_1++); *(dst_2++) = *(ypt_2++);
940
            *(dst_3++) = *(ypt_3++); *(dst_4++) = *(ypt_4++);
941
942
            *(dst_1++) = v[0]; *(dst_2++) = v[1];
943
            *(dst_3++) = v[2]; *(dst_4++) = v[3];
944
            *(dst_1++) = 255; *(dst_2++) = 255; *(dst_3++) = 255; *(dst_4++) = 255;
945
            *(dst_1++) = u[0]; *(dst_2++) = u[1];
946
            *(dst_3++) = u[2]; *(dst_4++) = u[3];
947
            *(dst_1++) = *(ypt_1++); *(dst_2++) = *(ypt_2++);
948
            *(dst_3++) = *(ypt_3++); *(dst_4++) = *(ypt_4++);
949
950
            v1++; v2++; v3++; v4++;
951
            u1++; u2++; u3++; u4++;
952
        }
953
        ypt_1 += ywrap; ypt_2 += ywrap; ypt_3 += ywrap; ypt_4 += ywrap;
954
        u1 += uwrap; u2 += uwrap; u3 += uwrap; u4 += uwrap;
955
        v1 += vwrap; v2 += vwrap; v3 += vwrap; v4 += vwrap;
956
        dst_1 += dwrap; dst_2 += dwrap; dst_3 += dwrap; dst_4 += dwrap;
957
    }
958
}
959
960
void store_bicubic_weights(float x, float *dst)
961
{
962
    float w0 = (((-1 * x + 3) * x - 3) * x + 1) / 6;
963
    float w1 = ((( 3 * x - 6) * x + 0) * x + 4) / 6;
964
    float w2 = (((-3 * x + 3) * x + 3) * x + 1) / 6;
965
    float w3 = ((( 1 * x + 0) * x + 0) * x + 0) / 6;
966
    *dst++ = 1 + x - w1 / (w0 + w1);
967
    *dst++ = 1 - x + w3 / (w2 + w3);
968
    *dst++ = w0 + w1;
969
    *dst++ = 0;
970
}
(-)mythtv.ori/libs/libmythtv/util-opengl.h (-18 / +32 lines)
Lines 8-13 Link Here
8
// MythTV headers
8
// MythTV headers
9
#include "mythcontext.h"
9
#include "mythcontext.h"
10
#include "util-x11.h"
10
#include "util-x11.h"
11
#include "frame.h"
11
12
12
// GLX headers
13
// GLX headers
13
#define GLX_GLXEXT_PROTOTYPES
14
#define GLX_GLXEXT_PROTOTYPES
Lines 17-29 Link Here
17
// Qt headers
18
// Qt headers
18
#include <qstring.h>
19
#include <qstring.h>
19
20
20
#ifndef APIENTRY
21
#define APIENTRY
22
#endif
23
#ifndef APIENTRYP
24
#define APIENTRYP APIENTRY *
25
#endif
26
27
#ifndef GL_TEXTURE_RECTANGLE_ARB
21
#ifndef GL_TEXTURE_RECTANGLE_ARB
28
#define GL_TEXTURE_RECTANGLE_ARB 0x84F5
22
#define GL_TEXTURE_RECTANGLE_ARB 0x84F5
29
#endif
23
#endif
Lines 40-49 Link Here
40
#define GL_FRAMEBUFFER_INCOMPLETE_DUPLICATE_ATTACHMENT_EXT 0x8CD8
34
#define GL_FRAMEBUFFER_INCOMPLETE_DUPLICATE_ATTACHMENT_EXT 0x8CD8
41
#endif
35
#endif
42
36
43
#ifndef GL_FRAGMENT_PROGRAM_ARB
44
#define GL_FRAGMENT_PROGRAM_ARB           0x8804
45
#endif
46
47
// Not all platforms with OpenGL that MythTV supports have the
37
// Not all platforms with OpenGL that MythTV supports have the
48
// GL_EXT_framebuffer_object extension so we need to define these..
38
// GL_EXT_framebuffer_object extension so we need to define these..
49
#ifndef GL_FRAMEBUFFER_EXT
39
#ifndef GL_FRAMEBUFFER_EXT
Lines 77-82 Link Here
77
#define GL_FRAMEBUFFER_UNSUPPORTED_EXT    0x8CDD
67
#define GL_FRAMEBUFFER_UNSUPPORTED_EXT    0x8CDD
78
#endif
68
#endif
79
69
70
#ifndef GL_NV_fence
71
#define GL_ALL_COMPLETED_NV               0x84F2
72
#endif
80
73
81
#ifndef GLX_VERSION_1_3
74
#ifndef GLX_VERSION_1_3
82
typedef XID GLXPbuffer;
75
typedef XID GLXPbuffer;
Lines 113-120 Link Here
113
Window get_gl_window(Display     *XJ_disp,
106
Window get_gl_window(Display     *XJ_disp,
114
                     Window       XJ_curwin,
107
                     Window       XJ_curwin,
115
                     XVisualInfo  *visinfo,
108
                     XVisualInfo  *visinfo,
116
                     const QSize &window_size,
109
                     const QRect &window_rect);
117
                     bool         map_window);
118
110
119
GLXWindow get_glx_window(Display     *XJ_disp,
111
GLXWindow get_glx_window(Display     *XJ_disp,
120
                         GLXFBConfig  glx_fbconfig,
112
                         GLXFBConfig  glx_fbconfig,
Lines 123-133 Link Here
123
                         GLXPbuffer   glx_pbuffer,
115
                         GLXPbuffer   glx_pbuffer,
124
                         const QSize &window_size);
116
                         const QSize &window_size);
125
117
126
void copy_pixels_to_texture(const unsigned char *buf,
118
void pack_yv12alpha(const unsigned char *source,
127
                            int          buffer_format,
119
                 const unsigned char *dest,
128
                            const QSize &buffer_size,
120
                 const int *offsets,
129
                            int          texture,
121
                 const int *pitches,
130
                            int          texture_type);
122
                 const QSize size,
123
                 const unsigned char *alpha = NULL);
124
125
void pack_yv12interlaced(const unsigned char *source,
126
                 const unsigned char *dest,
127
                 const int *offsets,
128
                 const int *pitches,
129
                 const QSize size);
130
131
void store_bicubic_weights(float x, float *dst);
131
132
132
__GLXextFuncPtr get_gl_proc_address(const QString &procName);
133
__GLXextFuncPtr get_gl_proc_address(const QString &procName);
133
134
Lines 135-140 Link Here
135
bool has_gl_fbuffer_object_support(const QString &extensions);
136
bool has_gl_fbuffer_object_support(const QString &extensions);
136
bool has_gl_fragment_program_support(const QString &extensions);
137
bool has_gl_fragment_program_support(const QString &extensions);
137
bool has_glx_video_sync_support(const QString &glx_extensions);
138
bool has_glx_video_sync_support(const QString &glx_extensions);
139
bool has_gl_pixelbuffer_object_support(const QString &extensions);
140
bool has_gl_nvfence_support(const QString &extensions);
138
141
139
extern QString                             gMythGLExtensions;
142
extern QString                             gMythGLExtensions;
140
extern uint                                gMythGLExtSupported;
143
extern uint                                gMythGLExtSupported;
Lines 146-151 Link Here
146
extern PFNGLDELETEPROGRAMSARBPROC          gMythGLDeleteProgramsARB;
149
extern PFNGLDELETEPROGRAMSARBPROC          gMythGLDeleteProgramsARB;
147
extern PFNGLGETPROGRAMIVARBPROC            gMythGLGetProgramivARB;
150
extern PFNGLGETPROGRAMIVARBPROC            gMythGLGetProgramivARB;
148
151
152
extern PFNGLMAPBUFFERPROC                  gMythGLMapBufferARB;
153
extern PFNGLBINDBUFFERARBPROC              gMythGLBindBufferARB;
154
extern PFNGLGENBUFFERSARBPROC              gMythGLGenBuffersARB;
155
extern PFNGLBUFFERDATAARBPROC              gMythGLBufferDataARB;
156
extern PFNGLUNMAPBUFFERARBPROC             gMythGLUnmapBufferARB;
157
extern PFNGLDELETEBUFFERSARBPROC           gMythGLDeleteBuffersARB;
158
149
// Not all platforms with OpenGL that MythTV supports have the
159
// Not all platforms with OpenGL that MythTV supports have the
150
// GL_EXT_framebuffer_object extension so we need to define these..
160
// GL_EXT_framebuffer_object extension so we need to define these..
151
typedef void (APIENTRYP MYTH_GLGENFRAMEBUFFERSEXTPROC)
161
typedef void (APIENTRYP MYTH_GLGENFRAMEBUFFERSEXTPROC)
Lines 169-174 Link Here
169
extern PFNGLXGETVIDEOSYNCSGIPROC           gMythGLXGetVideoSyncSGI;
179
extern PFNGLXGETVIDEOSYNCSGIPROC           gMythGLXGetVideoSyncSGI;
170
extern PFNGLXWAITVIDEOSYNCSGIPROC          gMythGLXWaitVideoSyncSGI;
180
extern PFNGLXWAITVIDEOSYNCSGIPROC          gMythGLXWaitVideoSyncSGI;
171
181
182
extern PFNGLGENFENCESNVPROC                gMythGLGenFencesNV;
183
extern PFNGLDELETEFENCESNVPROC             gMythGLDeleteFencesNV;
184
extern PFNGLSETFENCENVPROC                 gMythGLSetFenceNV;
185
extern PFNGLFINISHFENCENVPROC              gMythGLFinishFenceNV;
172
#endif // USING_OPENGL
186
#endif // USING_OPENGL
173
187
174
#endif // _UTIL_OPENGL_H_
188
#endif // _UTIL_OPENGL_H_
(-)mythtv.ori/libs/libmythtv/util-vdpau.cpp (+2159 lines)
Line 0 Link Here
1
#include <cstdio>
2
#include <cstdlib>
3
#include <cassert>
4
5
#include "mythcontext.h"
6
extern "C" {
7
#include "frame.h"
8
#include "avutil.h"
9
#include "vdpau_render.h"
10
}
11
12
#include "videoouttypes.h"
13
#include "mythcodecid.h"
14
#include "util-x11.h"
15
#include "util-vdpau.h"
16
17
#define LOC QString("VDPAU: ")
18
#define LOC_ERR QString("VDPAU Error: ")
19
20
#define MIN_OUTPUT_SURFACES 2
21
#define MAX_OUTPUT_SURFACES 4
22
#define NUM_REFERENCE_FRAMES 3
23
24
#define ARSIZE(x) (sizeof(x) / sizeof((x)[0]))
25
26
/* MACRO for error check */
27
#define CHECK_ST \
28
  ok &= (vdp_st == VDP_STATUS_OK); \
29
  if (!ok) { \
30
      VERBOSE(VB_PLAYBACK, LOC_ERR + QString("Error at %1:%2 (#%3, %4)") \
31
              .arg(__FILE__).arg( __LINE__).arg(vdp_st) \
32
              .arg(vdp_get_error_string(vdp_st))); \
33
  }
34
35
static const VdpChromaType vdp_chroma_type = VDP_CHROMA_TYPE_420;
36
static const VdpOutputSurfaceRenderBlendState osd_blend =
37
    {
38
        VDP_OUTPUT_SURFACE_RENDER_BLEND_STATE_VERSION,
39
        VDP_OUTPUT_SURFACE_RENDER_BLEND_FACTOR_ZERO,
40
        VDP_OUTPUT_SURFACE_RENDER_BLEND_FACTOR_ONE,
41
        VDP_OUTPUT_SURFACE_RENDER_BLEND_FACTOR_ONE,
42
        VDP_OUTPUT_SURFACE_RENDER_BLEND_FACTOR_ZERO,
43
        VDP_OUTPUT_SURFACE_RENDER_BLEND_EQUATION_ADD,
44
        VDP_OUTPUT_SURFACE_RENDER_BLEND_EQUATION_ADD
45
    };        
46
47
static const VdpOutputSurfaceRenderBlendState pip_blend =
48
    {
49
        VDP_OUTPUT_SURFACE_RENDER_BLEND_STATE_VERSION,
50
        VDP_OUTPUT_SURFACE_RENDER_BLEND_FACTOR_ONE,
51
        VDP_OUTPUT_SURFACE_RENDER_BLEND_FACTOR_ZERO,
52
        VDP_OUTPUT_SURFACE_RENDER_BLEND_FACTOR_ONE,
53
        VDP_OUTPUT_SURFACE_RENDER_BLEND_FACTOR_ZERO,
54
        VDP_OUTPUT_SURFACE_RENDER_BLEND_EQUATION_ADD,
55
        VDP_OUTPUT_SURFACE_RENDER_BLEND_EQUATION_ADD
56
    };
57
58
static void vdpau_preemption_callback(VdpDevice device, void *vdpau_ctx)
59
{
60
    (void)device;
61
    VERBOSE(VB_IMPORTANT, LOC_ERR + QString("DISPLAY PRE-EMPTED. Aborting playback."));
62
    VDPAUContext *ctx = (VDPAUContext*)vdpau_ctx;
63
    // TODO this should really kick off re-initialisation
64
    if (ctx)
65
        ctx->SetErrored();
66
}
67
68
VDPAUContext::VDPAUContext()
69
  : nextframedelay(0),      lastframetime(0),
70
    pix_fmt(-1),            maxVideoWidth(0),  maxVideoHeight(0),
71
    videoSurfaces(0),       surface_render(0), checkVideoSurfaces(8),
72
    numSurfaces(0),
73
    videoSurface(0),        outputSurface(0),  checkOutputSurfaces(false),
74
    outputSize(QSize(0,0)), decoder(0),        maxReferences(2),
75
    videoMixer(0),          surfaceNum(0),     osdVideoSurface(0),
76
    osdOutputSurface(0),    osdVideoMixer(0),  osdAlpha(0),
77
    osdReady(false),        osdSize(QSize(0,0)) ,deintAvail(false),
78
    deinterlacer("notset"), deinterlacing(false), currentFrameNum(-1),
79
    needDeintRefs(false),   useColorControl(false),
80
    pipFrameSize(QSize(0,0)), pipVideoSurface(0),
81
    pipOutputSurface(0),    pipAlpha(0),
82
    pipVideoMixer(0),       pipReady(0),
83
    vdp_flip_target(NULL),  vdp_flip_queue(NULL),
84
    vdpauDecode(false),     vdp_device(NULL),  errored(false),
85
    vdp_get_proc_address(NULL),       vdp_device_destroy(NULL),
86
    vdp_get_error_string(NULL),       vdp_get_api_version(NULL),
87
    vdp_get_information_string(NULL), vdp_video_surface_create(NULL),
88
    vdp_video_surface_destroy(NULL),  vdp_video_surface_put_bits_y_cb_cr(NULL),
89
    vdp_video_surface_get_bits_y_cb_cr(NULL),
90
    vdp_video_surface_query_get_put_bits_y_cb_cr_capabilities(NULL),
91
    vdp_video_surface_query_capabilities(NULL),
92
    vdp_output_surface_put_bits_y_cb_cr(NULL),
93
    vdp_output_surface_put_bits_native(NULL), vdp_output_surface_create(NULL),
94
    vdp_output_surface_destroy(NULL),
95
    vdp_output_surface_render_bitmap_surface(NULL),
96
    vdp_output_surface_query_capabilities(NULL), vdp_video_mixer_create(NULL),
97
    vdp_video_mixer_set_feature_enables(NULL), vdp_video_mixer_destroy(NULL),
98
    vdp_video_mixer_render(NULL), vdp_video_mixer_set_attribute_values(NULL),
99
    vdp_video_mixer_query_feature_support(NULL),
100
    vdp_video_mixer_query_attribute_support(NULL),
101
    vdp_video_mixer_query_parameter_support(NULL),
102
    vdp_generate_csc_matrix(NULL),
103
    vdp_presentation_queue_target_destroy(NULL),
104
    vdp_presentation_queue_create(NULL),
105
    vdp_presentation_queue_destroy(NULL), vdp_presentation_queue_display(NULL),
106
    vdp_presentation_queue_block_until_surface_idle(NULL),
107
    vdp_presentation_queue_target_create_x11(NULL),
108
    vdp_presentation_queue_query_surface_status(NULL),
109
    vdp_presentation_queue_get_time(NULL),
110
    vdp_presentation_queue_set_background_color(NULL),
111
    vdp_decoder_create(NULL), vdp_decoder_destroy(NULL),
112
    vdp_decoder_render(NULL), vdp_bitmap_surface_create(NULL),
113
    vdp_bitmap_surface_destroy(NULL), vdp_bitmap_surface_put_bits_native(NULL),
114
    vdp_bitmap_surface_query_capabilities(NULL),
115
    vdp_preemption_callback_register(NULL)
116
{
117
}
118
119
VDPAUContext::~VDPAUContext()
120
{
121
}
122
123
bool VDPAUContext::Init(Display *disp, int screen,
124
                        Window win, QSize screen_size,
125
                        bool color_control, MythCodecID mcodecid)
126
{
127
    outputSize = screen_size;
128
129
    if ((kCodec_VDPAU_BEGIN < mcodecid) && (mcodecid < kCodec_VDPAU_END))
130
        vdpauDecode = true;
131
132
    bool ok;
133
134
    ok = InitProcs(disp, screen);
135
    if (!ok)
136
        return ok;
137
138
    ok = InitFlipQueue(win);
139
    if (!ok)
140
        return ok;
141
142
    ok = InitOutput(screen_size);
143
    if (!ok)
144
        return ok;
145
146
    if (color_control)
147
        useColorControl = InitColorControl();
148
149
    return ok;
150
}
151
152
void VDPAUContext::Deinit(void)
153
{
154
    if (decoder)
155
    {
156
        vdp_decoder_destroy(decoder);
157
        decoder = NULL;
158
        pix_fmt = -1;
159
    }
160
    ClearReferenceFrames();
161
    DeinitOSD();
162
    FreeOutput();
163
    DeinitFlipQueue();
164
    DeinitPip();
165
    DeinitProcs();
166
    outputSize =  QSize(0,0);
167
}
168
169
static const char* dummy_get_error_string(VdpStatus status)
170
{
171
    static const char dummy[] = "Unknown";
172
    return &dummy[0];
173
}
174
175
bool VDPAUContext::InitProcs(Display *disp, int screen)
176
{
177
    VdpStatus vdp_st;
178
    bool ok = true;
179
    vdp_get_error_string = &dummy_get_error_string;
180
181
    vdp_st = vdp_device_create_x11(
182
        disp,
183
        screen,
184
        &vdp_device,
185
        &vdp_get_proc_address
186
    );
187
    CHECK_ST
188
    if (!ok)
189
    {
190
        VERBOSE(VB_PLAYBACK, LOC_ERR +
191
            QString("Failed to create VDP Device."));
192
        return false;
193
    }
194
195
    vdp_st = vdp_get_proc_address(
196
        vdp_device,
197
        VDP_FUNC_ID_GET_ERROR_STRING,
198
        (void **)&vdp_get_error_string
199
    );
200
    ok &= (vdp_st == VDP_STATUS_OK);
201
    if (!ok)
202
        vdp_get_error_string = &dummy_get_error_string;
203
204
    // non-fatal debugging info
205
    vdp_st = vdp_get_proc_address(
206
        vdp_device,
207
        VDP_FUNC_ID_GET_API_VERSION,
208
        (void **)&vdp_get_api_version
209
    );
210
211
    vdp_st = vdp_get_proc_address(
212
        vdp_device,
213
        VDP_FUNC_ID_GET_INFORMATION_STRING,
214
        (void **)&vdp_get_information_string
215
    );
216
217
    static bool debugged = false;
218
219
    if (!debugged)
220
    {
221
        debugged = true;
222
        if (vdp_get_api_version)
223
        {
224
            uint version;
225
            vdp_get_api_version(&version);
226
            VERBOSE(VB_PLAYBACK, LOC + QString("Version %1").arg(version));
227
        }
228
        if (vdp_get_information_string)
229
        {
230
            const char * info;
231
            vdp_get_information_string(&info);
232
            VERBOSE(VB_PLAYBACK, LOC + QString("Information %2").arg(info));
233
        }
234
    }
235
236
    // non-fatal callback registration
237
    vdp_get_proc_address(
238
        vdp_device,
239
        VDP_FUNC_ID_PREEMPTION_CALLBACK_REGISTER,
240
        (void **)&vdp_preemption_callback_register
241
    );
242
243
    if (vdp_preemption_callback_register)
244
    {
245
        vdp_preemption_callback_register(
246
            vdp_device,
247
            &vdpau_preemption_callback,
248
            (void*)this
249
        );
250
    }
251
252
    vdp_st = vdp_get_proc_address(
253
        vdp_device,
254
        VDP_FUNC_ID_DEVICE_DESTROY,
255
        (void **)&vdp_device_destroy
256
    );
257
    CHECK_ST
258
259
    vdp_st = vdp_get_proc_address(
260
        vdp_device,
261
        VDP_FUNC_ID_VIDEO_SURFACE_CREATE,
262
        (void **)&vdp_video_surface_create
263
    );
264
    CHECK_ST
265
266
    vdp_st = vdp_get_proc_address(
267
        vdp_device,
268
        VDP_FUNC_ID_VIDEO_SURFACE_DESTROY,
269
        (void **)&vdp_video_surface_destroy
270
    );
271
    CHECK_ST
272
273
    vdp_st = vdp_get_proc_address(
274
        vdp_device,
275
        VDP_FUNC_ID_VIDEO_SURFACE_PUT_BITS_Y_CB_CR,
276
        (void **)&vdp_video_surface_put_bits_y_cb_cr
277
    );
278
    CHECK_ST
279
280
    vdp_st = vdp_get_proc_address(
281
        vdp_device,
282
        VDP_FUNC_ID_VIDEO_SURFACE_GET_BITS_Y_CB_CR,
283
        (void **)&vdp_video_surface_get_bits_y_cb_cr
284
    );
285
    CHECK_ST
286
287
    vdp_st = vdp_get_proc_address(
288
        vdp_device,
289
        VDP_FUNC_ID_VIDEO_SURFACE_QUERY_CAPABILITIES,
290
        (void **)&vdp_video_surface_query_capabilities
291
    );
292
    CHECK_ST
293
294
    vdp_st = vdp_get_proc_address(
295
        vdp_device,
296
        VDP_FUNC_ID_OUTPUT_SURFACE_PUT_BITS_Y_CB_CR,
297
        (void **)&vdp_output_surface_put_bits_y_cb_cr
298
    );
299
    CHECK_ST
300
301
    vdp_st = vdp_get_proc_address(
302
        vdp_device,
303
        VDP_FUNC_ID_OUTPUT_SURFACE_PUT_BITS_NATIVE,
304
        (void **)&vdp_output_surface_put_bits_native
305
    );
306
    CHECK_ST
307
308
    vdp_st = vdp_get_proc_address(
309
        vdp_device,
310
        VDP_FUNC_ID_OUTPUT_SURFACE_CREATE,
311
        (void **)&vdp_output_surface_create
312
    );
313
    CHECK_ST
314
315
    vdp_st = vdp_get_proc_address(
316
        vdp_device,
317
        VDP_FUNC_ID_OUTPUT_SURFACE_DESTROY,
318
        (void **)&vdp_output_surface_destroy
319
    );
320
    CHECK_ST
321
322
    vdp_st = vdp_get_proc_address(
323
        vdp_device,
324
        VDP_FUNC_ID_OUTPUT_SURFACE_RENDER_BITMAP_SURFACE,
325
        (void **)&vdp_output_surface_render_bitmap_surface
326
    );
327
    CHECK_ST
328
329
    vdp_st = vdp_get_proc_address(
330
        vdp_device,
331
        VDP_FUNC_ID_VIDEO_SURFACE_QUERY_CAPABILITIES,
332
        (void **)&vdp_output_surface_query_capabilities
333
    );
334
    CHECK_ST
335
336
    vdp_st = vdp_get_proc_address(
337
        vdp_device,
338
        VDP_FUNC_ID_VIDEO_MIXER_CREATE,
339
        (void **)&vdp_video_mixer_create
340
    );
341
    CHECK_ST
342
343
    vdp_st = vdp_get_proc_address(
344
        vdp_device,
345
        VDP_FUNC_ID_VIDEO_MIXER_SET_FEATURE_ENABLES,
346
        (void **)&vdp_video_mixer_set_feature_enables
347
    );
348
    CHECK_ST
349
350
    vdp_st = vdp_get_proc_address(
351
        vdp_device,
352
        VDP_FUNC_ID_VIDEO_MIXER_DESTROY,
353
        (void **)&vdp_video_mixer_destroy
354
    );
355
    CHECK_ST
356
357
    vdp_st = vdp_get_proc_address(
358
        vdp_device,
359
        VDP_FUNC_ID_VIDEO_MIXER_RENDER,
360
        (void **)&vdp_video_mixer_render
361
    );
362
    CHECK_ST
363
364
    vdp_st = vdp_get_proc_address(
365
        vdp_device,
366
        VDP_FUNC_ID_VIDEO_MIXER_SET_ATTRIBUTE_VALUES,
367
        (void **)&vdp_video_mixer_set_attribute_values
368
    );
369
    CHECK_ST
370
371
    vdp_st = vdp_get_proc_address(
372
        vdp_device,
373
        VDP_FUNC_ID_VIDEO_MIXER_QUERY_FEATURE_SUPPORT,
374
        (void **)&vdp_video_mixer_query_feature_support
375
    );
376
    CHECK_ST
377
378
    vdp_st = vdp_get_proc_address(
379
        vdp_device,
380
        VDP_FUNC_ID_VIDEO_MIXER_QUERY_PARAMETER_SUPPORT,
381
        (void **)&vdp_video_mixer_query_parameter_support
382
    );
383
    CHECK_ST
384
385
    vdp_st = vdp_get_proc_address(
386
        vdp_device,
387
        VDP_FUNC_ID_VIDEO_MIXER_QUERY_ATTRIBUTE_SUPPORT,
388
        (void **)&vdp_video_mixer_query_attribute_support
389
    );
390
    CHECK_ST
391
392
    vdp_st = vdp_get_proc_address(
393
        vdp_device,
394
        VDP_FUNC_ID_GENERATE_CSC_MATRIX,
395
        (void **)&vdp_generate_csc_matrix
396
    );
397
    CHECK_ST
398
399
    vdp_st = vdp_get_proc_address(
400
        vdp_device,
401
        VDP_FUNC_ID_PRESENTATION_QUEUE_TARGET_DESTROY,
402
        (void **)&vdp_presentation_queue_target_destroy
403
    );
404
    CHECK_ST
405
406
    vdp_st = vdp_get_proc_address(
407
        vdp_device,
408
        VDP_FUNC_ID_PRESENTATION_QUEUE_CREATE,
409
        (void **)&vdp_presentation_queue_create
410
    );
411
    CHECK_ST
412
413
    vdp_st = vdp_get_proc_address(
414
        vdp_device,
415
        VDP_FUNC_ID_PRESENTATION_QUEUE_DESTROY,
416
        (void **)&vdp_presentation_queue_destroy
417
    );
418
    CHECK_ST
419
420
    vdp_st = vdp_get_proc_address(
421
        vdp_device,
422
        VDP_FUNC_ID_PRESENTATION_QUEUE_DISPLAY,
423
        (void **)&vdp_presentation_queue_display
424
    );
425
    CHECK_ST
426
427
    vdp_st = vdp_get_proc_address(
428
        vdp_device, 
429
        VDP_FUNC_ID_PRESENTATION_QUEUE_GET_TIME,
430
        (void **)&vdp_presentation_queue_get_time
431
    );
432
    CHECK_ST
433
434
    vdp_st = vdp_get_proc_address(
435
        vdp_device,
436
        VDP_FUNC_ID_PRESENTATION_QUEUE_BLOCK_UNTIL_SURFACE_IDLE,
437
        (void **)&vdp_presentation_queue_block_until_surface_idle
438
    );
439
    CHECK_ST
440
441
    vdp_st = vdp_get_proc_address(
442
        vdp_device,
443
        VDP_FUNC_ID_PRESENTATION_QUEUE_TARGET_CREATE_X11,
444
        (void **)&vdp_presentation_queue_target_create_x11
445
    );
446
    CHECK_ST
447
448
    vdp_st = vdp_get_proc_address(
449
        vdp_device,
450
        VDP_FUNC_ID_PRESENTATION_QUEUE_SET_BACKGROUND_COLOR,
451
        (void **)&vdp_presentation_queue_set_background_color
452
    );
453
    CHECK_ST
454
455
    vdp_st = vdp_get_proc_address(
456
        vdp_device,
457
        VDP_FUNC_ID_DECODER_CREATE,
458
        (void **)&vdp_decoder_create
459
    );
460
    CHECK_ST
461
462
    vdp_st = vdp_get_proc_address(
463
        vdp_device,
464
        VDP_FUNC_ID_DECODER_DESTROY,
465
        (void **)&vdp_decoder_destroy
466
    );
467
    CHECK_ST
468
469
    vdp_st = vdp_get_proc_address(
470
        vdp_device,
471
        VDP_FUNC_ID_DECODER_RENDER,
472
        (void **)&vdp_decoder_render
473
    );
474
    CHECK_ST
475
476
    vdp_st = vdp_get_proc_address(
477
        vdp_device,
478
        VDP_FUNC_ID_PRESENTATION_QUEUE_QUERY_SURFACE_STATUS,
479
        (void **)&vdp_presentation_queue_query_surface_status
480
    );
481
    CHECK_ST
482
483
    vdp_st = vdp_get_proc_address(
484
        vdp_device,
485
        VDP_FUNC_ID_VIDEO_SURFACE_QUERY_GET_PUT_BITS_Y_CB_CR_CAPABILITIES,
486
        (void **)&vdp_video_surface_query_get_put_bits_y_cb_cr_capabilities);
487
    CHECK_ST
488
489
    vdp_st = vdp_get_proc_address(
490
        vdp_device,
491
        VDP_FUNC_ID_BITMAP_SURFACE_CREATE,
492
        (void **)&vdp_bitmap_surface_create);
493
    CHECK_ST
494
495
    vdp_st = vdp_get_proc_address(
496
        vdp_device,
497
        VDP_FUNC_ID_BITMAP_SURFACE_PUT_BITS_NATIVE,
498
        (void **)&vdp_bitmap_surface_put_bits_native);
499
    CHECK_ST
500
501
    vdp_st = vdp_get_proc_address(
502
        vdp_device,
503
        VDP_FUNC_ID_BITMAP_SURFACE_QUERY_CAPABILITIES,
504
        (void **)&vdp_bitmap_surface_query_capabilities);
505
    CHECK_ST
506
507
    vdp_st = vdp_get_proc_address(
508
        vdp_device,
509
        VDP_FUNC_ID_BITMAP_SURFACE_DESTROY,
510
        (void **)&vdp_bitmap_surface_destroy);
511
    CHECK_ST
512
513
    return ok;
514
}
515
516
void VDPAUContext::DeinitProcs(void)
517
{
518
    if (vdp_device && vdp_device_destroy)
519
    {
520
        vdp_device_destroy(vdp_device);
521
        vdp_device = 0;
522
    }
523
}
524
525
bool VDPAUContext::InitFlipQueue(Window win)
526
{
527
    VdpStatus vdp_st;
528
    bool ok = true;
529
530
    vdp_st = vdp_presentation_queue_target_create_x11(
531
        vdp_device,
532
        win,
533
        &vdp_flip_target
534
    );
535
    CHECK_ST
536
537
    vdp_st = vdp_presentation_queue_create(
538
        vdp_device,
539
        vdp_flip_target,
540
        &vdp_flip_queue
541
    );
542
    CHECK_ST
543
544
    float tmp = 2.0 / 255.0;
545
    VdpColor background;
546
    background.red = tmp;
547
    background.green = tmp;
548
    background.blue = tmp;
549
    background.alpha = 1.0f;
550
551
    if (ok)
552
    {
553
        vdp_st = vdp_presentation_queue_set_background_color(
554
            vdp_flip_queue,
555
            &background
556
        );
557
        CHECK_ST
558
    }
559
560
    return ok;
561
}
562
563
void VDPAUContext::DeinitFlipQueue(void)
564
{
565
    VdpStatus vdp_st;
566
    bool ok = true;
567
568
    if (vdp_flip_queue)
569
    {
570
        vdp_st = vdp_presentation_queue_destroy(
571
            vdp_flip_queue);
572
        vdp_flip_queue = 0;
573
        CHECK_ST
574
    }
575
576
    if (vdp_flip_target)
577
    {
578
        vdp_st = vdp_presentation_queue_target_destroy(
579
        vdp_flip_target);
580
        vdp_flip_target = 0;
581
        CHECK_ST
582
    }
583
}
584
585
bool VDPAUContext::InitBuffers(int width, int height, int numbufs,
586
                               LetterBoxColour letterbox_colour)
587
{
588
    int num_bufs = numbufs;
589
590
    // for software decode, create enough surfaces for deinterlacing
591
    // TODO only create when actually deinterlacing
592
    if (!vdpauDecode)
593
        num_bufs = NUM_REFERENCE_FRAMES;
594
595
    VdpStatus vdp_st;
596
    bool ok = true;
597
598
    int i;
599
600
    VdpBool supported;
601
    vdp_st = vdp_video_surface_query_capabilities(
602
        vdp_device,
603
        vdp_chroma_type,
604
        &supported,
605
        &maxVideoWidth,
606
        &maxVideoHeight
607
        );
608
    CHECK_ST
609
610
    if (!supported || !ok)
611
    {
612
        VERBOSE(VB_PLAYBACK, LOC_ERR +
613
            QString("Video surface -chroma type not supported."));
614
        return false;
615
    }
616
    else if (maxVideoWidth  < (uint)width ||
617
             maxVideoHeight < (uint)height)
618
    {
619
        VERBOSE(VB_PLAYBACK, LOC_ERR +
620
            QString("Video surface - too large (%1x%2 > %3x%4).")
621
            .arg(width).arg(height)
622
            .arg(maxVideoWidth).arg(maxVideoHeight));
623
        return false;
624
    }
625
626
    videoSurfaces = (VdpVideoSurface *)malloc(sizeof(VdpVideoSurface) * num_bufs);
627
    if (vdpauDecode)
628
    {
629
        surface_render = (vdpau_render_state_t*)malloc(sizeof(vdpau_render_state_t) * num_bufs);
630
        memset(surface_render, 0, sizeof(vdpau_render_state_t) * num_bufs);
631
    }
632
633
    numSurfaces = num_bufs;
634
635
    for (i = 0; i < num_bufs; i++)
636
    {
637
        vdp_st = vdp_video_surface_create(
638
            vdp_device,
639
            vdp_chroma_type,
640
            width,
641
            height,
642
            &(videoSurfaces[i])
643
        );
644
        CHECK_ST
645
646
        if (!ok)
647
        {
648
            VERBOSE(VB_PLAYBACK, LOC_ERR +
649
                QString("Failed to create video surface."));
650
            return false;
651
        }
652
653
        if (vdpauDecode)
654
        {
655
            surface_render[i].magic = MP_VDPAU_RENDER_MAGIC;
656
            surface_render[i].state = 0;
657
            surface_render[i].surface = videoSurfaces[i];
658
        }
659
    }
660
661
    // clear video surfaces to black
662
    vdp_st = vdp_video_surface_query_get_put_bits_y_cb_cr_capabilities(
663
                vdp_device,
664
                vdp_chroma_type,
665
                VDP_YCBCR_FORMAT_YV12,
666
                &supported);
667
668
    if (supported && (vdp_st == VDP_STATUS_OK))
669
    {
670
        unsigned char *tmp =
671
            new unsigned char[(width * height * 3)>>1];
672
        if (tmp)
673
        {
674
            bzero(tmp, width * height);
675
            memset(tmp + (width * height), 127, (width * height)>>1);
676
            uint32_t pitches[3] = {width, width, width>>1};
677
            void* const planes[3] = 
678
                        {tmp, tmp + (width * height), tmp + (width * height)};
679
            for (i = 0; i < num_bufs; i++)
680
            {
681
                vdp_video_surface_put_bits_y_cb_cr(
682
                    videoSurfaces[i],
683
                    VDP_YCBCR_FORMAT_YV12,
684
                    planes,
685
                    pitches
686
                );
687
            }
688
            delete [] tmp;
689
        }
690
691
    }
692
693
    // TODO video capability/parameter check 
694
    // but should just fail gracefully anyway
695
696
    uint32_t num_layers = 2; // PiP and OSD
697
    VdpVideoMixerParameter parameters[] = {
698
        VDP_VIDEO_MIXER_PARAMETER_VIDEO_SURFACE_WIDTH,
699
        VDP_VIDEO_MIXER_PARAMETER_VIDEO_SURFACE_HEIGHT,
700
        VDP_VIDEO_MIXER_PARAMETER_CHROMA_TYPE,
701
        VDP_VIDEO_MIXER_PARAMETER_LAYERS,
702
    };
703
704
    void const * parameter_values[] = {
705
        &width,
706
        &height,
707
        &vdp_chroma_type,
708
        &num_layers
709
    };
710
711
    // check deinterlacers available
712
    vdp_st = vdp_video_mixer_query_parameter_support(
713
        vdp_device,
714
        VDP_VIDEO_MIXER_FEATURE_DEINTERLACE_TEMPORAL,
715
        &supported
716
    );
717
    CHECK_ST
718
    deintAvail = (ok && supported);
719
    vdp_st = vdp_video_mixer_query_parameter_support(
720
        vdp_device,
721
        VDP_VIDEO_MIXER_FEATURE_DEINTERLACE_TEMPORAL_SPATIAL,
722
        &supported
723
    );
724
    CHECK_ST
725
    deintAvail &= (ok && supported);
726
727
    VdpVideoMixerFeature features[] = {
728
        VDP_VIDEO_MIXER_FEATURE_DEINTERLACE_TEMPORAL,
729
        VDP_VIDEO_MIXER_FEATURE_DEINTERLACE_TEMPORAL_SPATIAL,
730
    };
731
732
    vdp_st = vdp_video_mixer_create(
733
        vdp_device,
734
        deintAvail ? ARSIZE(features) : 0,
735
        deintAvail ? features : NULL,
736
        ARSIZE(parameters),
737
        parameters,
738
        parameter_values,
739
        &videoMixer
740
    );
741
    CHECK_ST
742
743
    if (!ok && videoMixer)
744
    {
745
        VERBOSE(VB_IMPORTANT, LOC_ERR +
746
            QString("Create video mixer - errored but returned handle."));
747
    }
748
749
    // minimise green screen
750
    if (ok)
751
        ClearScreen();
752
753
    // set letterbox colour
754
    if (ok && (letterbox_colour == kLetterBoxColour_Gray25))
755
    {
756
        VdpColor gray;
757
        gray.red = 0.5f;
758
        gray.green = 0.5f;
759
        gray.blue = 0.5f;
760
        gray.alpha = 1.0f;
761
762
        VdpVideoMixerAttribute attributes[] = {
763
            VDP_VIDEO_MIXER_ATTRIBUTE_BACKGROUND_COLOR,
764
        };
765
        void const * attribute_values[] = { &gray };
766
767
        vdp_st = vdp_video_mixer_set_attribute_values(
768
           videoMixer,
769
           ARSIZE(attributes),
770
           attributes,
771
           attribute_values
772
        );
773
        CHECK_ST
774
    }
775
776
    return ok;
777
}
778
779
void VDPAUContext::FreeBuffers(void)
780
{
781
    VdpStatus vdp_st;
782
    bool ok = true;
783
784
    int i;
785
786
    if (videoMixer)
787
    {
788
        vdp_st = vdp_video_mixer_destroy(
789
            videoMixer
790
        );
791
        videoMixer = 0;
792
        CHECK_ST
793
    }
794
795
    if (videoSurfaces)
796
    {
797
        for (i = 0; i < numSurfaces; i++)
798
        {
799
            if (videoSurfaces[i])
800
            {
801
                vdp_st = vdp_video_surface_destroy(
802
                    videoSurfaces[i]);
803
                CHECK_ST
804
            }
805
        }
806
        free(videoSurfaces);
807
        videoSurfaces = NULL;
808
    }
809
810
    if (surface_render)
811
        free(surface_render);
812
    surface_render = NULL;
813
}
814
815
bool VDPAUContext::InitOutput(QSize size)
816
{
817
    VdpStatus vdp_st;
818
    bool ok = true;
819
    int i;
820
821
    VdpBool supported;
822
    uint max_width, max_height;
823
    vdp_st = vdp_output_surface_query_capabilities(
824
        vdp_device,
825
        VDP_RGBA_FORMAT_B8G8R8A8,
826
        &supported,
827
        &max_width,
828
        &max_height
829
    );
830
    CHECK_ST
831
832
    if (!supported || !ok)
833
    {
834
        VERBOSE(VB_PLAYBACK, LOC_ERR +
835
            QString("Output surface chroma format not supported."));
836
        return false;
837
    }
838
    else if (max_width  < (uint)size.width() ||
839
             max_height < (uint)size.height())
840
    {
841
        VERBOSE(VB_PLAYBACK, LOC_ERR +
842
            QString("Output surface - too large (%1x%2 > %3x%4).")
843
            .arg(size.width()).arg(size.height())
844
            .arg(max_width).arg(max_height));
845
        return false;
846
    }
847
    
848
    for (i = 0; i < MIN_OUTPUT_SURFACES; i++)
849
    {
850
        VdpOutputSurface tmp;
851
        vdp_st = vdp_output_surface_create(
852
            vdp_device,
853
            VDP_RGBA_FORMAT_B8G8R8A8,
854
            size.width(),
855
            size.height(),
856
            &tmp
857
        );
858
        CHECK_ST
859
860
        if (!ok)
861
        {
862
            VERBOSE(VB_PLAYBACK, LOC_ERR +
863
                QString("Failed to create output surface."));
864
            return false;
865
        }
866
        outputSurfaces.push_back(tmp);
867
    }
868
869
    outRect.x0 = 0;
870
    outRect.y0 = 0;
871
    outRect.x1 = size.width();
872
    outRect.y1 = size.height();
873
    surfaceNum = 0;
874
    return ok;
875
}
876
877
void VDPAUContext::FreeOutput(void)
878
{
879
    if (!vdp_output_surface_destroy)
880
        return;
881
882
    VdpStatus vdp_st;
883
    bool ok = true;
884
    uint i;
885
886
    for (i = 0; i < outputSurfaces.size(); i++)
887
    {
888
        if (outputSurfaces[i])
889
        {
890
            vdp_st = vdp_output_surface_destroy(
891
                outputSurfaces[i]);
892
            CHECK_ST
893
        }
894
    }
895
    outputSurfaces.clear();
896
    checkOutputSurfaces = false;
897
}
898
899
void VDPAUContext::Decode(VideoFrame *frame)
900
{
901
    if (!vdpauDecode)
902
    {
903
        VERBOSE(VB_IMPORTANT, LOC_ERR +
904
            QString("VDPAUContext::Decode called for cpu decode."));
905
        return;
906
    }
907
908
    VdpStatus vdp_st;
909
    bool ok = true;
910
    vdpau_render_state_t *render = (vdpau_render_state_t *)frame->buf;
911
912
    if (frame->pix_fmt != pix_fmt)
913
    {
914
        if (frame->pix_fmt == PIX_FMT_VDPAU_H264_MAIN ||
915
            frame->pix_fmt == PIX_FMT_VDPAU_H264_HIGH)
916
        {
917
            if (render)
918
                maxReferences = render->info.h264.num_ref_frames;
919
920
            if (maxReferences < 1 || maxReferences > 16)
921
            {
922
                uint32_t round_width = (frame->width + 15) & ~15;
923
                uint32_t round_height = (frame->height + 15) & ~15;
924
                uint32_t surf_size = (round_width * round_height * 3) / 2;
925
                maxReferences = (12 * 1024 * 1024) / surf_size;
926
            }
927
            if (maxReferences > 16)
928
                maxReferences = 16;
929
        }
930
931
        VdpDecoderProfile vdp_decoder_profile;
932
        switch (frame->pix_fmt)
933
        {
934
            case PIX_FMT_VDPAU_MPEG1: vdp_decoder_profile = VDP_DECODER_PROFILE_MPEG1; break;
935
            case PIX_FMT_VDPAU_MPEG2_SIMPLE: vdp_decoder_profile = VDP_DECODER_PROFILE_MPEG2_SIMPLE; break;
936
            case PIX_FMT_VDPAU_MPEG2_MAIN: vdp_decoder_profile = VDP_DECODER_PROFILE_MPEG2_MAIN; break;
937
            case PIX_FMT_VDPAU_H264_BASELINE: vdp_decoder_profile = VDP_DECODER_PROFILE_H264_BASELINE; break;
938
            case PIX_FMT_VDPAU_H264_MAIN: vdp_decoder_profile = VDP_DECODER_PROFILE_H264_MAIN; break;
939
            case PIX_FMT_VDPAU_H264_HIGH: vdp_decoder_profile = VDP_DECODER_PROFILE_H264_HIGH; break;
940
            case PIX_FMT_VDPAU_VC1_SIMPLE: vdp_decoder_profile = VDP_DECODER_PROFILE_VC1_SIMPLE; break;
941
            case PIX_FMT_VDPAU_VC1_MAIN: vdp_decoder_profile = VDP_DECODER_PROFILE_VC1_MAIN; break;
942
            case PIX_FMT_VDPAU_VC1_ADVANCED: vdp_decoder_profile = VDP_DECODER_PROFILE_VC1_ADVANCED; break;
943
            default:
944
                assert(0);
945
                return;
946
        }
947
948
        // generic capability pre-checked but specific profile may now fail
949
        vdp_st = vdp_decoder_create(
950
            vdp_device,
951
            vdp_decoder_profile,
952
            frame->width,
953
            frame->height,
954
            maxReferences,
955
            &decoder
956
        );
957
        CHECK_ST
958
959
        if (ok)
960
        {
961
            pix_fmt = frame->pix_fmt;
962
            VERBOSE(VB_PLAYBACK, LOC +
963
                QString("Created VDPAU decoder (%1 ref frames)")
964
                .arg(maxReferences));
965
        }
966
        else
967
        {
968
            VERBOSE(VB_PLAYBACK, LOC_ERR + QString("Failed to create decoder."));
969
            errored = true;
970
        }
971
    }
972
973
    render = (vdpau_render_state_t *)frame->buf;
974
    if (!render || !decoder)
975
        return;
976
977
    vdp_st = vdp_decoder_render(
978
        decoder,
979
        render->surface,
980
        (VdpPictureInfo const *)&(render->info),
981
        render->bitstreamBuffersUsed,
982
        render->bitstreamBuffers
983
    );
984
    CHECK_ST
985
}
986
987
void VDPAUContext::PrepareVideo(VideoFrame *frame, QRect video_rect,
988
                                QRect display_video_rect,
989
                                QSize screen_size, FrameScanType scan,
990
                                bool pause_frame)
991
{
992
    if (checkVideoSurfaces == 1)
993
        checkOutputSurfaces = true;
994
995
    if (checkVideoSurfaces > 0)
996
        checkVideoSurfaces--;
997
998
    VdpStatus vdp_st;
999
    bool ok = true;
1000
    VdpTime dummy;
1001
    vdpau_render_state_t *render;
1002
1003
    bool new_frame = true;
1004
    bool deint = (deinterlacing && needDeintRefs && !pause_frame);
1005
    if (deint && frame)
1006
    {
1007
        new_frame = UpdateReferenceFrames(frame);
1008
        if (vdpauDecode && (referenceFrames.size() != NUM_REFERENCE_FRAMES))
1009
            deint = false;
1010
    }
1011
1012
    if (vdpauDecode && frame)
1013
    {
1014
        render = (vdpau_render_state_t *)frame->buf;
1015
        if (!render)
1016
            return;
1017
1018
        videoSurface = render->surface;
1019
    }
1020
    else if (new_frame && frame)
1021
    {
1022
        int surf = 0;
1023
        if (deint)
1024
            surf = (currentFrameNum + 1) % NUM_REFERENCE_FRAMES;
1025
1026
        videoSurface = videoSurfaces[surf];
1027
1028
        uint32_t pitches[3] = {
1029
            frame->pitches[0],
1030
            frame->pitches[2],
1031
            frame->pitches[1]
1032
        };
1033
        void* const planes[3] = {
1034
            frame->buf,
1035
            frame->buf + frame->offsets[2],
1036
            frame->buf + frame->offsets[1]
1037
        };
1038
        vdp_st = vdp_video_surface_put_bits_y_cb_cr(
1039
            videoSurface,
1040
            VDP_YCBCR_FORMAT_YV12,
1041
            planes,
1042
            pitches);
1043
        CHECK_ST;
1044
        if (!ok)
1045
            return;
1046
    }
1047
    else if (!frame)
1048
    {
1049
        deint = false;
1050
        if (!videoSurface)
1051
            videoSurface = videoSurfaces[0];
1052
    }
1053
1054
    if (outRect.x1 != (uint)screen_size.width() ||
1055
        outRect.y1 != (uint)screen_size.height())
1056
    {
1057
        FreeOutput();
1058
        InitOutput(screen_size);
1059
    }
1060
1061
    // fix broken/missing negative rect clipping in vdpau
1062
    if (display_video_rect.top() < 0 && display_video_rect.height() > 0)
1063
    {
1064
        float yscale = (float)video_rect.height() /
1065
                       (float)display_video_rect.height();
1066
        int tmp = video_rect.top() -
1067
                  (int)((float)display_video_rect.top() * yscale);
1068
        video_rect.setTop(max(0, tmp));
1069
        display_video_rect.setTop(0);
1070
    }
1071
1072
    if (display_video_rect.left() < 0 && display_video_rect.width() > 0)
1073
    {
1074
        float xscale = (float)video_rect.width() /
1075
                       (float)display_video_rect.width();
1076
        int tmp = video_rect.left() -
1077
                  (int)((float)display_video_rect.left() * xscale);
1078
        video_rect.setLeft(max(0, tmp));
1079
        display_video_rect.setLeft(0);
1080
    }
1081
1082
    outRect.x0 = 0;
1083
    outRect.y0 = 0;
1084
    outRect.x1 = screen_size.width();
1085
    outRect.y1 = screen_size.height();
1086
1087
    VdpRect srcRect;
1088
    srcRect.x0 = video_rect.left();
1089
    srcRect.y0 = video_rect.top();
1090
    srcRect.x1 = video_rect.left() + video_rect.width();
1091
    srcRect.y1 = video_rect.top() + video_rect.height();
1092
1093
    outRectVid.x0 = display_video_rect.left();
1094
    outRectVid.y0 = display_video_rect.top();
1095
    outRectVid.x1 = display_video_rect.left() + display_video_rect.width();
1096
    outRectVid.y1 = display_video_rect.top() + display_video_rect.height();
1097
1098
    VdpVideoMixerPictureStructure field =
1099
        VDP_VIDEO_MIXER_PICTURE_STRUCTURE_FRAME;
1100
1101
    if (scan == kScan_Interlaced && deinterlacing)
1102
        field = VDP_VIDEO_MIXER_PICTURE_STRUCTURE_TOP_FIELD;
1103
    else if (scan == kScan_Intr2ndField && deinterlacing)
1104
        field = VDP_VIDEO_MIXER_PICTURE_STRUCTURE_BOTTOM_FIELD;
1105
1106
    outputSurface = outputSurfaces[surfaceNum];
1107
    usleep(2000);
1108
    vdp_st = vdp_presentation_queue_block_until_surface_idle(
1109
        vdp_flip_queue,
1110
        outputSurface, 
1111
        &dummy
1112
    );
1113
    CHECK_ST
1114
1115
    VdpVideoSurface past_surfaces[2] = { VDP_INVALID_HANDLE,
1116
                                         VDP_INVALID_HANDLE };
1117
    VdpVideoSurface future_surfaces[1] = { VDP_INVALID_HANDLE };
1118
1119
    if (deint)
1120
    {
1121
        VdpVideoSurface refs[NUM_REFERENCE_FRAMES];
1122
        for (int i = 0; i < NUM_REFERENCE_FRAMES; i++)
1123
        {
1124
            if (vdpauDecode)
1125
            {
1126
                vdpau_render_state_t *render;
1127
                render = (vdpau_render_state_t *)referenceFrames[i]->buf;
1128
                refs[i] = render ? render->surface : VDP_INVALID_HANDLE;
1129
            }
1130
            else
1131
            {
1132
                int ref = (currentFrameNum + i - 1) % NUM_REFERENCE_FRAMES;
1133
                if (ref < 0)
1134
                    ref = 0;
1135
                refs[i] = videoSurfaces[ref];
1136
            }
1137
        }
1138
1139
        videoSurface = refs[1];
1140
 
1141
        if (scan == kScan_Interlaced)
1142
        {
1143
            // next field is in the current frame
1144
            future_surfaces[0] = refs[1];
1145
            // previous two fields are in the previous frame
1146
            past_surfaces[0] = refs[0];
1147
            past_surfaces[1] = refs[0];
1148
        }
1149
        else
1150
        {
1151
            // next field is in the next frame
1152
            future_surfaces[0] = refs[2];
1153
            // previous field is in the current frame
1154
            past_surfaces[0] = refs[1];
1155
            // field before that is in the previous frame
1156
            past_surfaces[1] = refs[0];
1157
        }
1158
    }
1159
1160
    uint num_layers  = 0;
1161
1162
    if (osdReady) { num_layers++; }
1163
    if (pipReady) { num_layers++; }
1164
1165
    VdpLayer layers[2];
1166
    
1167
    if (num_layers == 1)
1168
    {
1169
        if (osdReady)
1170
            memcpy(&(layers[0]), &osdLayer, sizeof(osdLayer));
1171
        if (pipReady)
1172
            memcpy(&(layers[0]), &pipLayer, sizeof(pipLayer));
1173
    }
1174
    else if (num_layers == 2)
1175
    {
1176
        memcpy(&(layers[0]), &pipLayer, sizeof(pipLayer));
1177
        memcpy(&(layers[1]), &osdLayer, sizeof(osdLayer));
1178
    }
1179
1180
    vdp_st = vdp_video_mixer_render(
1181
        videoMixer,
1182
        VDP_INVALID_HANDLE,
1183
        NULL,
1184
        field,
1185
        deint ? ARSIZE(past_surfaces) : 0,
1186
        deint ? past_surfaces : NULL,
1187
        videoSurface,
1188
        deint ? ARSIZE(future_surfaces) : 0,
1189
        deint ? future_surfaces : NULL,
1190
        &srcRect,
1191
        outputSurface,
1192
        &outRect,
1193
        &outRectVid,
1194
        num_layers,
1195
        num_layers ? layers : NULL
1196
    );
1197
    CHECK_ST
1198
1199
    if (pipReady)
1200
        pipReady--;
1201
}
1202
1203
void VDPAUContext::DisplayNextFrame(void)
1204
{
1205
    if (!outputSurface)
1206
        return;
1207
1208
    VdpStatus vdp_st;
1209
    bool ok = true;
1210
    VdpTime now = 0;
1211
1212
    if (nextframedelay > 0)
1213
    {
1214
        vdp_st = vdp_presentation_queue_get_time(
1215
            vdp_flip_queue,
1216
            &now
1217
        );
1218
        CHECK_ST
1219
1220
        if (lastframetime == 0)
1221
            lastframetime = now;
1222
1223
        now += nextframedelay * 1000;
1224
    }
1225
1226
    vdp_st = vdp_presentation_queue_display(
1227
        vdp_flip_queue,
1228
        outputSurface,
1229
        outRect.x1,
1230
        outRect.y1,
1231
        now
1232
    );
1233
    CHECK_ST
1234
1235
    surfaceNum++;
1236
    if (surfaceNum >= (int)(outputSurfaces.size()))
1237
        surfaceNum = 0;;
1238
1239
    if (checkOutputSurfaces)
1240
        AddOutputSurfaces();
1241
}
1242
1243
void VDPAUContext::AddOutputSurfaces(void)
1244
{
1245
    checkOutputSurfaces = false;
1246
    VdpStatus vdp_st;
1247
    bool ok = true;
1248
1249
    int cnt = 0;
1250
    int extra = MAX_OUTPUT_SURFACES - outputSurfaces.size();
1251
    if (extra <= 0)
1252
        return;
1253
1254
    for (int i = 0; i < extra; i++)
1255
    {
1256
        VdpOutputSurface tmp;
1257
        vdp_st = vdp_output_surface_create(
1258
            vdp_device,
1259
            VDP_RGBA_FORMAT_B8G8R8A8,
1260
            outputSize.width(),
1261
            outputSize.height(),
1262
            &tmp
1263
        );
1264
        // suppress non-fatal error messages
1265
        ok &= (vdp_st == VDP_STATUS_OK);
1266
1267
        if (!ok)
1268
            break;
1269
1270
        outputSurfaces.push_back(tmp);
1271
        cnt++;
1272
    }
1273
    VERBOSE(VB_PLAYBACK, LOC + QString("Using %1 output surfaces (max %2)")
1274
        .arg(outputSurfaces.size()).arg(MAX_OUTPUT_SURFACES));
1275
}
1276
1277
void VDPAUContext::SetNextFrameDisplayTimeOffset(int delayus)
1278
{
1279
    nextframedelay = delayus;
1280
}
1281
1282
bool VDPAUContext::InitOSD(QSize size)
1283
{
1284
    if (!vdp_device)
1285
        return false;
1286
1287
    VdpStatus vdp_st;
1288
    bool ok = true;
1289
1290
    uint width = size.width();
1291
    uint height = size.height();
1292
    VdpBool supported = false;
1293
1294
    vdp_st = vdp_video_surface_query_get_put_bits_y_cb_cr_capabilities(
1295
        vdp_device,
1296
        vdp_chroma_type,
1297
        VDP_YCBCR_FORMAT_YV12,
1298
        &supported
1299
    );
1300
    CHECK_ST
1301
    if (!supported || !ok)
1302
    {
1303
        VERBOSE(VB_PLAYBACK, LOC_ERR +
1304
                    QString("YV12 upload to video surface not supported."));
1305
        return false;
1306
    }
1307
1308
    uint32_t max_width, max_height;
1309
    vdp_st = vdp_bitmap_surface_query_capabilities(
1310
        vdp_device,
1311
        VDP_RGBA_FORMAT_A8,
1312
        &supported,
1313
        &max_width,
1314
        &max_height
1315
    );
1316
    CHECK_ST
1317
    if (!supported || !ok)
1318
    {
1319
        VERBOSE(VB_PLAYBACK, LOC_ERR +
1320
                    QString("Alpha transparency bitmaps not supported."));
1321
        return false;
1322
    }
1323
    else if (max_width  < width ||
1324
             max_height < height)
1325
    {
1326
        VERBOSE(VB_PLAYBACK, LOC_ERR +
1327
                    QString("Alpha bitmap too large (%1x%2 > %3x%4).")
1328
                    .arg(width).arg(height).arg(max_width).arg(max_height));
1329
        return false;
1330
    }
1331
1332
    if (maxVideoWidth  < width ||
1333
        maxVideoHeight < height)
1334
    {
1335
        VERBOSE(VB_PLAYBACK, LOC_ERR +
1336
            QString("OSD size too large for video surface."));
1337
        return false;
1338
    }
1339
1340
    // capability already checked in InitOutput
1341
    vdp_st = vdp_output_surface_create(
1342
        vdp_device,
1343
        VDP_RGBA_FORMAT_B8G8R8A8,
1344
        width,
1345
        height,
1346
        &osdOutputSurface
1347
    );
1348
    CHECK_ST;
1349
1350
    if (!ok)
1351
    {
1352
        VERBOSE(VB_PLAYBACK, LOC_ERR +
1353
            QString("Failed to create output surface."));
1354
    }
1355
    else
1356
    {
1357
        vdp_st = vdp_video_surface_create(
1358
            vdp_device,
1359
            vdp_chroma_type,
1360
            width,
1361
            height,
1362
            &osdVideoSurface
1363
        );
1364
        CHECK_ST
1365
    }
1366
1367
    if (!ok)
1368
    {
1369
        VERBOSE(VB_PLAYBACK, LOC_ERR +
1370
            QString("Failed to create video surface."));
1371
    }
1372
    else
1373
    {
1374
        vdp_st = vdp_bitmap_surface_create(
1375
            vdp_device,
1376
            VDP_RGBA_FORMAT_A8,
1377
            width,
1378
            height,
1379
            false,
1380
            &osdAlpha
1381
        );
1382
        CHECK_ST
1383
    }
1384
1385
    VdpVideoMixerParameter parameters[] = {
1386
        VDP_VIDEO_MIXER_PARAMETER_VIDEO_SURFACE_WIDTH,
1387
        VDP_VIDEO_MIXER_PARAMETER_VIDEO_SURFACE_HEIGHT,
1388
        VDP_VIDEO_MIXER_PARAMETER_CHROMA_TYPE
1389
    };
1390
1391
    void const * parameter_values[] = {
1392
        &width,
1393
        &height,
1394
        &vdp_chroma_type
1395
    };
1396
1397
    if (!ok)
1398
    {
1399
        VERBOSE(VB_PLAYBACK, LOC_ERR +
1400
            QString("Failed to create bitmap surface."));
1401
    }
1402
    else
1403
    {
1404
        vdp_st = vdp_video_mixer_create(
1405
            vdp_device,
1406
            0,
1407
            0,
1408
            ARSIZE(parameters),
1409
            parameters,
1410
            parameter_values,
1411
            &osdVideoMixer
1412
        );
1413
        CHECK_ST
1414
    }
1415
1416
    if (!ok)
1417
    {
1418
        VERBOSE(VB_PLAYBACK, LOC_ERR +
1419
            QString("Failed to create video mixer."));
1420
    }
1421
    else
1422
    {
1423
        osdSize = size;
1424
        osdRect.x0 = 0;
1425
        osdRect.y0 = 0;
1426
        osdRect.x1 = width;
1427
        osdRect.y1 = height;
1428
        osdLayer.struct_version = VDP_LAYER_VERSION;
1429
        osdLayer.source_surface = osdOutputSurface;
1430
        osdLayer.source_rect    = &osdRect;
1431
        osdLayer.destination_rect = &osdRect;
1432
        VERBOSE(VB_PLAYBACK, LOC + QString("Created OSD (%1x%2)")
1433
                    .arg(width).arg(height));
1434
        return ok;
1435
    }
1436
1437
    osdSize = QSize(0,0);
1438
    return ok;
1439
}
1440
1441
void VDPAUContext::UpdateOSD(void* const planes[3],
1442
                             QSize size,
1443
                             void* const alpha[1])
1444
{
1445
    if (size != osdSize)
1446
    {
1447
        DeinitOSD();
1448
        if (!InitOSD(size))
1449
            return;
1450
    }
1451
1452
    VdpStatus vdp_st;
1453
    bool ok = true;
1454
1455
    // upload OSD YV12 data
1456
    uint32_t pitches[3] = {osdSize.width(),
1457
                           osdSize.width()>>1,
1458
                           osdSize.width()>>1};
1459
    void * const realplanes[3] = { planes[0], planes[2], planes[1] };
1460
1461
    vdp_st = vdp_video_surface_put_bits_y_cb_cr(osdVideoSurface,
1462
                                                VDP_YCBCR_FORMAT_YV12,
1463
                                                realplanes,
1464
                                                pitches);
1465
    CHECK_ST;
1466
1467
    // osd YV12 colourspace conversion
1468
    if (ok)
1469
    {
1470
        vdp_st = vdp_video_mixer_render(
1471
            osdVideoMixer,
1472
            VDP_INVALID_HANDLE,
1473
            NULL,
1474
            VDP_VIDEO_MIXER_PICTURE_STRUCTURE_FRAME,
1475
            0,
1476
            NULL,
1477
            osdVideoSurface,
1478
            0,
1479
            NULL,
1480
            NULL,
1481
            osdOutputSurface,
1482
            &osdRect,
1483
            &osdRect,
1484
            0,
1485
            NULL
1486
        );
1487
        CHECK_ST
1488
    }
1489
1490
    // upload OSD alpha data
1491
    if (ok)
1492
    {
1493
        uint32_t pitch[1] = {osdSize.width()};
1494
        vdp_st = vdp_bitmap_surface_put_bits_native(
1495
            osdAlpha,
1496
            alpha,
1497
            pitch,
1498
            NULL
1499
        );
1500
        CHECK_ST
1501
    }
1502
1503
    // blend alpha into osd
1504
    if (ok)
1505
    {
1506
        vdp_st = vdp_output_surface_render_bitmap_surface(
1507
            osdOutputSurface,
1508
            NULL,
1509
            osdAlpha,
1510
            NULL,
1511
            NULL,
1512
            &osd_blend,
1513
            0
1514
        );
1515
        CHECK_ST
1516
    }
1517
        
1518
    osdReady = ok;
1519
}
1520
1521
void VDPAUContext::DeinitOSD(void)
1522
{
1523
    if (osdOutputSurface)
1524
    {
1525
        vdp_output_surface_destroy(osdOutputSurface);
1526
        osdOutputSurface = 0;
1527
    }
1528
1529
    if (osdVideoSurface)
1530
    {
1531
        vdp_video_surface_destroy(osdVideoSurface);
1532
        osdVideoSurface = 0;
1533
    }
1534
1535
    if (osdVideoMixer)
1536
    {
1537
        vdp_video_mixer_destroy(osdVideoMixer);
1538
        osdVideoMixer = 0;
1539
    }
1540
1541
    if (osdAlpha)
1542
    {
1543
        vdp_bitmap_surface_destroy(osdAlpha);
1544
        osdAlpha = 0;
1545
    }
1546
    osdSize = QSize(0,0);
1547
}
1548
1549
bool VDPAUContext::SetDeinterlacer(const QString &deint)
1550
{
1551
    deinterlacer = QDeepCopy<QString>(deint);
1552
    return true;
1553
}
1554
1555
bool VDPAUContext::SetDeinterlacing(bool interlaced)
1556
{
1557
    if (!deintAvail)
1558
        return false;
1559
1560
    if (!deinterlacer.contains("vdpau"))
1561
        interlaced = false;
1562
1563
    VdpStatus vdp_st;
1564
    bool ok = interlaced;
1565
1566
    VdpVideoMixerFeature features[] = {
1567
        VDP_VIDEO_MIXER_FEATURE_DEINTERLACE_TEMPORAL,
1568
        VDP_VIDEO_MIXER_FEATURE_DEINTERLACE_TEMPORAL_SPATIAL,
1569
    };
1570
1571
    VdpBool temporal = false;
1572
    VdpBool spatial  = false;
1573
    if (deinterlacer.contains("basic"))
1574
    {
1575
        temporal = interlaced;
1576
    }
1577
    else if (deinterlacer.contains("advanced"))
1578
    {
1579
        temporal = interlaced;
1580
        spatial  = interlaced;
1581
    }
1582
    
1583
    const VdpBool feature_values[] = {
1584
        temporal,
1585
        spatial,
1586
    };
1587
1588
    // the following call generates a VDPAU error when both temporal
1589
    // and spatial are false (i.e. when disabling deinterlacing)
1590
    vdp_st = vdp_video_mixer_set_feature_enables(
1591
        videoMixer,
1592
        ARSIZE(features),
1593
        features,
1594
        feature_values
1595
    );
1596
    CHECK_ST
1597
1598
    deinterlacing = (interlaced & ok);
1599
    needDeintRefs = false;
1600
    if (!deinterlacing)
1601
    {
1602
        ClearReferenceFrames();
1603
    }
1604
    else
1605
    {
1606
        if (deinterlacer.contains("advanced") ||
1607
            deinterlacer.contains("basic"))
1608
            needDeintRefs = true;
1609
    }
1610
    return deinterlacing;
1611
}
1612
1613
bool VDPAUContext::UpdateReferenceFrames(VideoFrame *frame)
1614
{
1615
    if (frame->frameNumber == currentFrameNum)
1616
        return false;
1617
1618
    currentFrameNum = frame->frameNumber;
1619
1620
    if (vdpauDecode)
1621
    {
1622
        while (referenceFrames.size() > (NUM_REFERENCE_FRAMES - 1))
1623
            referenceFrames.pop_front();
1624
        referenceFrames.push_back(frame);
1625
    }
1626
1627
    return true;
1628
}
1629
1630
bool VDPAUContext::IsBeingUsed(VideoFrame *frame)
1631
{
1632
    if (!frame || !vdpauDecode)
1633
        return false;
1634
1635
    return referenceFrames.contains(frame);
1636
}
1637
1638
bool VDPAUContext::CheckCodecSupported(MythCodecID myth_codec_id)
1639
{
1640
    bool ok = true;
1641
1642
    Display *disp = MythXOpenDisplay();
1643
    if (!disp)
1644
        return false;
1645
1646
    int screen;
1647
    X11S(screen = DefaultScreen(disp));
1648
1649
    VdpDevice device;
1650
    VdpGetProcAddress * vdp_proc_address;
1651
    VdpStatus vdp_st;
1652
    VdpGetErrorString * vdp_get_error_string;
1653
    vdp_get_error_string = &dummy_get_error_string;
1654
1655
    if (ok)
1656
    {
1657
        vdp_st = vdp_device_create_x11(
1658
            disp,
1659
            screen,
1660
            &device,
1661
            &vdp_proc_address
1662
        );
1663
        CHECK_ST
1664
    }
1665
1666
    VdpDecoderQueryCapabilities * decoder_query;
1667
    VdpDeviceDestroy * device_destroy;
1668
1669
    if (ok)
1670
    {
1671
        vdp_st = vdp_proc_address(
1672
            device,
1673
            VDP_FUNC_ID_DECODER_QUERY_CAPABILITIES,
1674
            (void **)&decoder_query
1675
        );
1676
        CHECK_ST
1677
    }
1678
1679
    if (ok)
1680
    {
1681
        vdp_st = vdp_proc_address(
1682
            device,
1683
            VDP_FUNC_ID_DEVICE_DESTROY,
1684
            (void **)&device_destroy
1685
        );
1686
        CHECK_ST
1687
    }
1688
1689
    if (ok)
1690
    {
1691
        int support = 0;
1692
        VdpBool supported;
1693
        // not checked yet
1694
        uint level, refs, width, height;
1695
        switch (myth_codec_id)
1696
        {
1697
            case kCodec_MPEG1_VDPAU:
1698
            case kCodec_MPEG2_VDPAU:
1699
                vdp_st = decoder_query(
1700
                    device,
1701
                    VDP_DECODER_PROFILE_MPEG1,
1702
                    &supported,
1703
                    &level, &refs, &width, &height);
1704
                CHECK_ST
1705
                support += supported;
1706
                vdp_st = decoder_query(
1707
                    device,
1708
                    VDP_DECODER_PROFILE_MPEG2_SIMPLE,
1709
                    &supported,
1710
                    &level, &refs, &width, &height);
1711
                CHECK_ST
1712
                support += supported;
1713
                vdp_st = decoder_query(
1714
                    device,
1715
                    VDP_DECODER_PROFILE_MPEG2_MAIN,
1716
                    &supported,
1717
                    &level, &refs, &width, &height);
1718
                CHECK_ST
1719
                support += supported;
1720
                break;
1721
1722
            case kCodec_H264_VDPAU:
1723
                vdp_st = decoder_query(
1724
                    device,
1725
                    VDP_DECODER_PROFILE_H264_BASELINE,
1726
                    &supported,
1727
                    &level, &refs, &width, &height);
1728
                CHECK_ST
1729
                support += supported;
1730
                vdp_st = decoder_query(
1731
                    device,
1732
                    VDP_DECODER_PROFILE_H264_MAIN,
1733
                    &supported,
1734
                    &level, &refs, &width, &height);
1735
                CHECK_ST
1736
                support += supported;
1737
                vdp_st = decoder_query(
1738
                    device,
1739
                    VDP_DECODER_PROFILE_H264_HIGH,
1740
                    &supported,
1741
                    &level, &refs, &width, &height);
1742
                CHECK_ST
1743
                support += supported;
1744
                break;
1745
1746
            case kCodec_VC1_VDPAU:
1747
            // is this correct? (WMV3 == VC1)
1748
            case kCodec_WMV3_VDPAU:
1749
                vdp_st = decoder_query(
1750
                    device,
1751
                    VDP_DECODER_PROFILE_VC1_SIMPLE,
1752
                    &supported,
1753
                    &level, &refs, &width, &height);
1754
                CHECK_ST
1755
                support += supported;
1756
                vdp_st = decoder_query(
1757
                    device,
1758
                    VDP_DECODER_PROFILE_VC1_MAIN,
1759
                    &supported,
1760
                    &level, &refs, &width, &height);
1761
                CHECK_ST
1762
                support += supported;
1763
                vdp_st = decoder_query(
1764
                    device,
1765
                    VDP_DECODER_PROFILE_VC1_ADVANCED,
1766
                    &supported,
1767
                    &level, &refs, &width, &height);
1768
                CHECK_ST
1769
                support += supported;
1770
                break;
1771
1772
            default:
1773
                ok = false;
1774
        }
1775
        ok = (ok && (support > 0));
1776
        if (ok && support != 3)
1777
        {
1778
            VERBOSE(VB_IMPORTANT,
1779
                QString("VDPAU WARNING: %1 GPU decode not fully supported"
1780
                        " - playback may fail.")
1781
                        .arg(toString(myth_codec_id)));
1782
        }
1783
        else if (!support)
1784
        {
1785
            VERBOSE(VB_PLAYBACK, LOC +
1786
                QString("%1 GPU decode not supported")
1787
                .arg(toString(myth_codec_id)));
1788
        }
1789
    }
1790
1791
    // tidy up
1792
    if (device_destroy && device)
1793
        device_destroy(device);
1794
1795
    if (disp)
1796
        X11S(XCloseDisplay(disp));
1797
1798
    return ok;
1799
}
1800
1801
PictureAttributeSupported 
1802
VDPAUContext::GetSupportedPictureAttributes(void) const
1803
{
1804
    return (!useColorControl) ?
1805
        kPictureAttributeSupported_None :
1806
        (PictureAttributeSupported) 
1807
        (kPictureAttributeSupported_Brightness |
1808
         kPictureAttributeSupported_Contrast |
1809
         kPictureAttributeSupported_Colour |
1810
         kPictureAttributeSupported_Hue);
1811
}
1812
1813
int VDPAUContext::SetPictureAttribute(
1814
        PictureAttribute attribute, int newValue)
1815
{
1816
    if (!useColorControl)
1817
        return -1;
1818
1819
    int ret = -1;
1820
    float new_val;
1821
    switch (attribute)
1822
    {
1823
        case kPictureAttribute_Brightness:
1824
            ret = newValue;
1825
            proCamp.brightness = (newValue * 0.02f) - 1.0f;
1826
            break;
1827
        case kPictureAttribute_Contrast:
1828
            ret = newValue;
1829
            proCamp.contrast = (newValue * 0.02f);
1830
            break;
1831
        case kPictureAttribute_Colour:
1832
            ret = newValue;
1833
            proCamp.saturation = (newValue * 0.02f);
1834
            break;
1835
        case kPictureAttribute_Hue:
1836
            ret = newValue;
1837
            new_val = (newValue * 0.062831853f);
1838
            if (new_val > 3.14159265f)
1839
                new_val -= 6.2831853f;
1840
            proCamp.hue = new_val;
1841
            break;
1842
        default:
1843
            break;
1844
    }
1845
1846
    if (ret != -1)
1847
        SetPictureAttributes();
1848
1849
    return ret;
1850
}
1851
bool VDPAUContext::InitColorControl(void)
1852
{
1853
    bool ok = true;
1854
    VdpStatus vdp_st;
1855
1856
    proCamp.struct_version = VDP_PROCAMP_VERSION;
1857
    proCamp.brightness     = 0.0;
1858
    proCamp.contrast       = 1.0;
1859
    proCamp.saturation     = 1.0;
1860
    proCamp.hue            = 0.0;
1861
1862
    VdpBool supported;
1863
    vdp_st = vdp_video_mixer_query_attribute_support(
1864
        vdp_device,
1865
        VDP_VIDEO_MIXER_ATTRIBUTE_CSC_MATRIX,
1866
        &supported
1867
    );
1868
    CHECK_ST
1869
    ok &= supported;
1870
    return ok;
1871
}
1872
1873
bool VDPAUContext::SetPictureAttributes(void)
1874
{
1875
    bool ok = true;
1876
    VdpStatus vdp_st;
1877
1878
    if (!videoMixer || !useColorControl)
1879
        return false;
1880
1881
    vdp_st = vdp_generate_csc_matrix(
1882
        &proCamp,
1883
        VDP_COLOR_STANDARD_ITUR_BT_601, // detect?
1884
        &cscMatrix
1885
    );
1886
    CHECK_ST
1887
1888
    VdpVideoMixerAttribute attributes[] = {
1889
        VDP_VIDEO_MIXER_ATTRIBUTE_CSC_MATRIX
1890
    };
1891
    void const * attribute_values[] = { &cscMatrix };
1892
1893
    if (ok)
1894
    {
1895
        vdp_st = vdp_video_mixer_set_attribute_values(
1896
           videoMixer,
1897
           ARSIZE(attributes),
1898
           attributes,
1899
           attribute_values
1900
        );
1901
        CHECK_ST
1902
    }
1903
1904
    return ok;
1905
}
1906
1907
void VDPAUContext::ClearScreen(void)
1908
{
1909
    VdpStatus vdp_st;
1910
    bool ok = true;
1911
1912
    VdpRect srcRect;
1913
    srcRect.x0 = 0;
1914
    srcRect.y0 = 0;
1915
    srcRect.x1 = 1;
1916
    srcRect.y1 = 1;
1917
1918
    outputSurface = outputSurfaces[surfaceNum];
1919
    vdp_st = vdp_video_mixer_render(
1920
        videoMixer,
1921
        VDP_INVALID_HANDLE,
1922
        NULL,
1923
        VDP_VIDEO_MIXER_PICTURE_STRUCTURE_FRAME,
1924
        0,
1925
        NULL,
1926
        videoSurfaces[0],
1927
        0,
1928
        NULL,
1929
        &srcRect,
1930
        outputSurface,
1931
        &outRect,
1932
        &outRect,
1933
        0, 
1934
        NULL);
1935
    CHECK_ST
1936
1937
    DisplayNextFrame();
1938
}
1939
1940
void VDPAUContext::DeinitPip(void)
1941
{
1942
    pipFrameSize = QSize(0,0);
1943
    pipReady     = 0;
1944
1945
    if (pipVideoSurface)
1946
    {
1947
        vdp_video_surface_destroy(pipVideoSurface);
1948
        pipVideoSurface = 0;
1949
    }
1950
1951
    if (pipOutputSurface)
1952
    {
1953
        vdp_output_surface_destroy(pipOutputSurface);
1954
        pipOutputSurface = 0;
1955
    }
1956
1957
    if (pipVideoMixer)
1958
    {
1959
        vdp_video_mixer_destroy(pipVideoMixer);
1960
        pipVideoMixer = 0;
1961
    }
1962
1963
    if (pipAlpha)
1964
    {
1965
        vdp_bitmap_surface_destroy(pipAlpha);
1966
        pipAlpha = 0;
1967
    }
1968
}
1969
1970
bool VDPAUContext::InitPiP(QSize vid_size)
1971
{
1972
    // TODO capability check 
1973
    // but should just fail gracefully anyway
1974
    bool ok = true;
1975
    VdpStatus vdp_st;
1976
1977
    pipFrameSize = vid_size;
1978
1979
    vdp_st = vdp_video_surface_create(
1980
        vdp_device,
1981
        vdp_chroma_type,
1982
        vid_size.width(),
1983
        vid_size.height(),
1984
        &pipVideoSurface
1985
    );
1986
    CHECK_ST
1987
1988
    if (ok)
1989
    {
1990
        vdp_st = vdp_output_surface_create(
1991
            vdp_device,
1992
            VDP_RGBA_FORMAT_B8G8R8A8,
1993
            vid_size.width(),
1994
            vid_size.height(),
1995
            &pipOutputSurface
1996
        );
1997
        CHECK_ST
1998
    }
1999
2000
    if (ok)
2001
    {
2002
        vdp_st = vdp_bitmap_surface_create(
2003
            vdp_device,
2004
            VDP_RGBA_FORMAT_A8,
2005
            vid_size.width(),
2006
            vid_size.height(),
2007
            false,
2008
            &pipAlpha
2009
        );
2010
        CHECK_ST
2011
    }
2012
2013
    if (ok)
2014
    {
2015
        unsigned char *alpha = new unsigned char[vid_size.width() * vid_size.height()];
2016
        void const * alpha_ptr[] = {alpha};
2017
        if (alpha)
2018
        {
2019
            memset(alpha, 255, vid_size.width() * vid_size.height());
2020
            uint32_t pitch[1] = {vid_size.width()};
2021
            vdp_st = vdp_bitmap_surface_put_bits_native(
2022
                pipAlpha,
2023
                alpha_ptr,
2024
                pitch,
2025
                NULL
2026
            );
2027
            CHECK_ST
2028
            delete [] alpha;
2029
        }
2030
        else
2031
            ok = false;
2032
    }
2033
2034
    if (ok)
2035
    {
2036
        int width = vid_size.width();
2037
        int height = vid_size.height();
2038
        VdpVideoMixerParameter parameters[] = {
2039
            VDP_VIDEO_MIXER_PARAMETER_VIDEO_SURFACE_WIDTH,
2040
            VDP_VIDEO_MIXER_PARAMETER_VIDEO_SURFACE_HEIGHT,
2041
            VDP_VIDEO_MIXER_PARAMETER_CHROMA_TYPE
2042
        };
2043
2044
        void const * parameter_values[] = {
2045
            &width,
2046
            &height,
2047
            &vdp_chroma_type
2048
        };
2049
2050
        vdp_st = vdp_video_mixer_create(
2051
            vdp_device,
2052
            0,
2053
            0,
2054
            ARSIZE(parameters),
2055
            parameters,
2056
            parameter_values,
2057
            &pipVideoMixer
2058
        );
2059
        CHECK_ST
2060
        VERBOSE(VB_PLAYBACK, LOC + QString("Created VDPAU PiP (%1x%2)")
2061
                .arg(width).arg(height));
2062
    }
2063
2064
    pipLayer.struct_version = VDP_LAYER_VERSION;
2065
    pipLayer.source_surface = pipOutputSurface;
2066
    pipLayer.source_rect    = NULL;
2067
    pipLayer.destination_rect = &pipPosition;
2068
2069
    return ok;
2070
}
2071
2072
bool VDPAUContext::ShowPiP(VideoFrame * frame, QRect position)
2073
{
2074
    if (!frame)
2075
        return false;
2076
2077
    bool ok = true;
2078
    VdpStatus vdp_st;
2079
2080
    if (frame->width  != pipFrameSize.width() ||
2081
        frame->height != pipFrameSize.height())
2082
    {
2083
        DeinitPip();
2084
        ok = InitPiP(QSize(frame->width, frame->height));
2085
    }
2086
2087
    if (!ok)
2088
        return ok;
2089
2090
    uint32_t pitches[] = {
2091
        frame->pitches[0],
2092
        frame->pitches[2],
2093
        frame->pitches[1]
2094
    };
2095
    void* const planes[] = {
2096
        frame->buf,
2097
        frame->buf + frame->offsets[2],
2098
        frame->buf + frame->offsets[1]
2099
    };
2100
    vdp_st = vdp_video_surface_put_bits_y_cb_cr(
2101
        pipVideoSurface,
2102
        VDP_YCBCR_FORMAT_YV12,
2103
        planes,
2104
        pitches);
2105
    CHECK_ST;
2106
2107
    VdpRect pip_rect;
2108
    pip_rect.x0 = 0;
2109
    pip_rect.y0 = 0;
2110
    pip_rect.x1 = pipFrameSize.width();
2111
    pip_rect.y1 = pipFrameSize.height();
2112
    if (ok)
2113
    {
2114
        vdp_st = vdp_video_mixer_render(
2115
            pipVideoMixer,
2116
            VDP_INVALID_HANDLE,
2117
            NULL,
2118
            VDP_VIDEO_MIXER_PICTURE_STRUCTURE_FRAME,
2119
            0,
2120
            NULL,
2121
            pipVideoSurface,
2122
            0,
2123
            NULL,
2124
            NULL,
2125
            pipOutputSurface,
2126
            NULL,
2127
            NULL,
2128
            0,
2129
            NULL
2130
        );
2131
        CHECK_ST
2132
    }
2133
2134
    if (ok)
2135
    {
2136
        vdp_st = vdp_output_surface_render_bitmap_surface(
2137
            pipOutputSurface,
2138
            NULL,
2139
            pipAlpha,
2140
            NULL,
2141
            NULL,
2142
            &osd_blend,
2143
            0
2144
        );
2145
        CHECK_ST
2146
    }
2147
2148
    if (ok)
2149
    {
2150
        pipReady = 2; // for double rate deint
2151
        pipPosition.x0 = position.left();
2152
        pipPosition.y0 = position.top();
2153
        pipPosition.x1 = position.left() + position.width();
2154
        pipPosition.y1 = position.top() + position.height();
2155
    }
2156
2157
    return ok;
2158
}
2159
(-)mythtv.ori/libs/libmythtv/util-vdpau.h (+197 lines)
Line 0 Link Here
1
#ifndef UTIL_VDPAU_H_
2
#define UTIL_VDPAU_H_
3
4
extern "C" {
5
#include "../libavcodec/vdpau_render.h"
6
}
7
8
#include "videobuffers.h"
9
10
class VDPAUContext
11
{
12
  public:
13
    VDPAUContext();
14
   ~VDPAUContext();
15
16
    bool Init(Display *disp, int screen, Window win,
17
              QSize screen_size, bool color_control,
18
              MythCodecID mcodecid);
19
    void Deinit(void);
20
    bool IsErrored(void) { return errored; }
21
    void SetErrored(void) { errored = true; }
22
23
    bool InitBuffers(int width, int height, int numbufs,
24
                     LetterBoxColour letterbox_colour);
25
    void FreeBuffers(void);
26
    void *GetRenderData(int i) 
27
    { if (i < numSurfaces && i >= 0) return (void*)&(surface_render[i]); 
28
      return NULL;
29
    }
30
    int GetNumBufs(void) { return numSurfaces; }
31
32
    bool InitOutput(QSize size);
33
    void FreeOutput(void);
34
35
    void Decode(VideoFrame *frame);
36
37
    void PrepareVideo(VideoFrame *frame, QRect video_rect,
38
                      QRect display_video_rect,
39
                      QSize screen_size, FrameScanType scan,
40
                      bool pause_frame);
41
    void DisplayNextFrame(void);
42
    void SetNextFrameDisplayTimeOffset(int delayus);
43
    bool InitOSD(QSize size);
44
    void UpdateOSD(void* const planes[3], QSize size,
45
                   void* const alpha[1]);
46
    void DisableOSD(void) { osdReady = false; }
47
    void DeinitOSD(void);
48
49
    bool SetDeinterlacer(const QString &deint);
50
    bool SetDeinterlacing(bool interlaced);
51
    QString GetDeinterlacer(void) const
52
            { return deinterlacer; }
53
    bool IsBeingUsed(VideoFrame * frame);
54
    void ClearReferenceFrames(void) { referenceFrames.clear(); }
55
56
    static bool CheckCodecSupported(MythCodecID myth_codec_id);
57
    PictureAttributeSupported  GetSupportedPictureAttributes(void) const;
58
    int SetPictureAttribute(PictureAttribute attributeType, int newValue);
59
60
    bool ShowPiP(VideoFrame * frame, QRect position);
61
62
  private:
63
    bool InitProcs(Display *disp, int screen);
64
    void DeinitProcs(void);
65
    void ClearScreen(void);
66
67
    bool InitFlipQueue(Window win);
68
    void DeinitFlipQueue(void);
69
70
    void AddOutputSurfaces(void);
71
    bool UpdateReferenceFrames(VideoFrame *frame);
72
    bool InitColorControl(void);
73
    bool SetPictureAttributes(void);
74
75
    bool InitPiP(QSize vid_size);
76
    void DeinitPip(void);
77
78
    int nextframedelay;
79
    VdpTime lastframetime;
80
81
    int pix_fmt;
82
83
    uint maxVideoWidth;
84
    uint maxVideoHeight;
85
    VdpVideoSurface *videoSurfaces;
86
    vdpau_render_state_t *surface_render;
87
    int checkVideoSurfaces;
88
    int numSurfaces;
89
90
    vector<VdpOutputSurface> outputSurfaces;
91
    VdpVideoSurface  videoSurface;
92
    VdpOutputSurface outputSurface;
93
    bool             checkOutputSurfaces;
94
    QSize            outputSize;
95
96
    VdpDecoder decoder;
97
    uint32_t   maxReferences;
98
    VdpVideoMixer videoMixer;
99
100
    VdpRect outRect;
101
    VdpRect outRectVid;
102
103
    int surfaceNum;
104
105
    VdpVideoSurface   osdVideoSurface;
106
    VdpOutputSurface  osdOutputSurface;
107
    VdpVideoMixer     osdVideoMixer;
108
    VdpBitmapSurface  osdAlpha;
109
    VdpLayer          osdLayer;
110
    VdpRect           osdRect;
111
    bool              osdReady;
112
    QSize             osdSize;
113
114
    bool              deintAvail;
115
    QString           deinterlacer;
116
    bool              deinterlacing;
117
    long long         currentFrameNum;
118
    frame_queue_t     referenceFrames;
119
    bool              needDeintRefs;
120
121
    bool              useColorControl;
122
    VdpCSCMatrix      cscMatrix;
123
    VdpProcamp        proCamp;
124
125
    QSize             pipFrameSize;
126
    VdpLayer          pipLayer;
127
    VdpVideoSurface   pipVideoSurface;
128
    VdpOutputSurface  pipOutputSurface;
129
    VdpVideoMixer     pipVideoMixer;
130
    int               pipReady;
131
    VdpRect           pipPosition;
132
    VdpBitmapSurface  pipAlpha;
133
134
    VdpPresentationQueueTarget vdp_flip_target;
135
    VdpPresentationQueue       vdp_flip_queue;
136
137
    bool              vdpauDecode;
138
139
    VdpDevice vdp_device;
140
    bool      errored;
141
142
    VdpGetProcAddress * vdp_get_proc_address;
143
    VdpDeviceDestroy * vdp_device_destroy;
144
    VdpGetErrorString * vdp_get_error_string;
145
    VdpGetApiVersion * vdp_get_api_version;
146
    VdpGetInformationString * vdp_get_information_string;
147
148
    VdpVideoSurfaceCreate * vdp_video_surface_create;
149
    VdpVideoSurfaceDestroy * vdp_video_surface_destroy;
150
    VdpVideoSurfaceGetBitsYCbCr * vdp_video_surface_put_bits_y_cb_cr;
151
    VdpVideoSurfacePutBitsYCbCr * vdp_video_surface_get_bits_y_cb_cr;
152
    VdpVideoSurfaceQueryGetPutBitsYCbCrCapabilities *
153
                vdp_video_surface_query_get_put_bits_y_cb_cr_capabilities;
154
    VdpVideoSurfaceQueryCapabilities * vdp_video_surface_query_capabilities;
155
156
    VdpOutputSurfacePutBitsYCbCr * vdp_output_surface_put_bits_y_cb_cr;
157
    VdpOutputSurfacePutBitsNative * vdp_output_surface_put_bits_native;
158
    VdpOutputSurfaceCreate * vdp_output_surface_create;
159
    VdpOutputSurfaceDestroy * vdp_output_surface_destroy;
160
    VdpOutputSurfaceRenderBitmapSurface * vdp_output_surface_render_bitmap_surface;
161
    VdpOutputSurfaceQueryCapabilities * vdp_output_surface_query_capabilities;
162
163
    /* videoMixer puts videoSurface data to displayble ouputSurface. */
164
    VdpVideoMixerCreate * vdp_video_mixer_create;
165
    VdpVideoMixerSetFeatureEnables * vdp_video_mixer_set_feature_enables;
166
    VdpVideoMixerDestroy * vdp_video_mixer_destroy;
167
    VdpVideoMixerRender * vdp_video_mixer_render;
168
    VdpVideoMixerSetAttributeValues * vdp_video_mixer_set_attribute_values;
169
    VdpVideoMixerQueryFeatureSupport * vdp_video_mixer_query_feature_support;
170
    VdpVideoMixerQueryAttributeSupport * vdp_video_mixer_query_attribute_support;
171
    VdpVideoMixerQueryParameterSupport * vdp_video_mixer_query_parameter_support;
172
    VdpGenerateCSCMatrix * vdp_generate_csc_matrix;
173
174
    VdpPresentationQueueTargetDestroy * vdp_presentation_queue_target_destroy;
175
    VdpPresentationQueueCreate * vdp_presentation_queue_create;
176
    VdpPresentationQueueDestroy * vdp_presentation_queue_destroy;
177
    VdpPresentationQueueDisplay * vdp_presentation_queue_display;
178
    VdpPresentationQueueBlockUntilSurfaceIdle * vdp_presentation_queue_block_until_surface_idle;
179
    VdpPresentationQueueTargetCreateX11 * vdp_presentation_queue_target_create_x11;
180
    VdpPresentationQueueQuerySurfaceStatus * vdp_presentation_queue_query_surface_status;
181
    VdpPresentationQueueGetTime * vdp_presentation_queue_get_time;
182
    VdpPresentationQueueSetBackgroundColor * vdp_presentation_queue_set_background_color;
183
184
    VdpDecoderCreate * vdp_decoder_create;
185
    VdpDecoderDestroy * vdp_decoder_destroy;
186
    VdpDecoderRender * vdp_decoder_render;
187
188
    VdpBitmapSurfaceCreate * vdp_bitmap_surface_create;
189
    VdpBitmapSurfaceDestroy * vdp_bitmap_surface_destroy;
190
    VdpBitmapSurfacePutBitsNative * vdp_bitmap_surface_put_bits_native;
191
    VdpBitmapSurfaceQueryCapabilities * vdp_bitmap_surface_query_capabilities;
192
193
    VdpPreemptionCallbackRegister * vdp_preemption_callback_register;
194
};
195
196
#endif
197
(-)mythtv.ori/libs/libmythtv/videobuffers.cpp (-1 / +35 lines)
Lines 10-19 Link Here
10
#include "fourcc.h"
10
#include "fourcc.h"
11
#include "compat.h"
11
#include "compat.h"
12
12
13
#ifdef USING_XVMC
13
#if defined(USING_XVMC) || defined(USING_VDPAU)
14
#include "videoout_xv.h" // for xvmc stuff
14
#include "videoout_xv.h" // for xvmc stuff
15
#endif
15
#endif
16
16
17
#ifdef USING_VDPAU
18
#include "util-vdpau.h"
19
#endif
20
17
#define DEBUG_FRAME_LOCKS 0
21
#define DEBUG_FRAME_LOCKS 0
18
22
19
#define TRY_LOCK_SPINS                 100
23
#define TRY_LOCK_SPINS                 100
Lines 1157-1162 Link Here
1157
    return ok;
1161
    return ok;
1158
}
1162
}
1159
1163
1164
#ifdef USING_VDPAU
1165
bool VideoBuffers::CreateBuffers(int width, int height, VDPAUContext *ctx)
1166
{
1167
    static unsigned char *ffmpeg_vdpau_hack = (unsigned char*)
1168
        "avlib should not use this private data in VDPAU mode.";
1169
1170
    if (!ctx)
1171
        return false;
1172
1173
    if ((uint)ctx->GetNumBufs() != allocSize())
1174
    {
1175
        VERBOSE(VB_IMPORTANT, QString("VideoBuffers::CreateBuffers") +
1176
                QString("VDPAUContext buffer count %1 does not agree "
1177
                        "with the VideoBuffers buffer countr %2")
1178
                .arg(ctx->GetNumBufs()).arg(allocSize()));
1179
        return false;
1180
    }
1181
1182
    for (uint i = 0; i < allocSize(); i++)
1183
    {
1184
        init(&buffers[i],
1185
             FMT_VDPAU, (unsigned char*)ctx->GetRenderData(i),
1186
             width, height, -1, 0);
1187
        buffers[i].priv[0]      = ffmpeg_vdpau_hack;
1188
        buffers[i].priv[1]      = ffmpeg_vdpau_hack;
1189
    }
1190
    return true;
1191
}
1192
#endif
1193
1160
#ifdef USING_XVMC
1194
#ifdef USING_XVMC
1161
bool VideoBuffers::CreateBuffers(int width, int height,
1195
bool VideoBuffers::CreateBuffers(int width, int height,
1162
                                 Display *disp, 
1196
                                 Display *disp, 
(-)mythtv.ori/libs/libmythtv/videobuffers.h (+8 lines)
Lines 17-22 Link Here
17
#include <qwindowdefs.h>
17
#include <qwindowdefs.h>
18
#endif // USING_XVMC
18
#endif // USING_XVMC
19
19
20
#ifdef USING_VDPAU
21
class VDPAUContext;
22
#endif
23
20
typedef MythDeque<VideoFrame*>                frame_queue_t;
24
typedef MythDeque<VideoFrame*>                frame_queue_t;
21
typedef vector<VideoFrame>                    frame_vector_t;
25
typedef vector<VideoFrame>                    frame_vector_t;
22
typedef map<const VideoFrame*, frame_queue_t> frame_map_t;
26
typedef map<const VideoFrame*, frame_queue_t> frame_map_t;
Lines 151-156 Link Here
151
                       vector<void*> surfs);
155
                       vector<void*> surfs);
152
#endif
156
#endif
153
157
158
#ifdef USING_VDPAU
159
    bool CreateBuffers(int width, int height, VDPAUContext *ctx);
160
#endif
161
154
    QString GetStatus(int n=-1) const; // debugging method
162
    QString GetStatus(int n=-1) const; // debugging method
155
  private:
163
  private:
156
    frame_queue_t         *queue(BufferType type);
164
    frame_queue_t         *queue(BufferType type);
(-)mythtv.ori/libs/libmythtv/videodisplayprofile.cpp (-1 / +63 lines)
Lines 570-577 Link Here
570
    list += "xvmc-vld";
570
    list += "xvmc-vld";
571
    list += "macaccel";
571
    list += "macaccel";
572
    list += "ivtv";
572
    list += "ivtv";
573
    list += "vdpau";
573
574
574
    return list;
575
        return list;
575
}
576
}
576
577
577
QStringList VideoDisplayProfile::GetDecoderNames(void)
578
QStringList VideoDisplayProfile::GetDecoderNames(void)
Lines 600-605 Link Here
600
        dec_name["xvmc-vld"] = QObject::tr("VIA XvMC");
601
        dec_name["xvmc-vld"] = QObject::tr("VIA XvMC");
601
        dec_name["macaccel"] = QObject::tr("Mac hardware acceleration");
602
        dec_name["macaccel"] = QObject::tr("Mac hardware acceleration");
602
        dec_name["ivtv"]     = QObject::tr("PVR-350 decoder");
603
        dec_name["ivtv"]     = QObject::tr("PVR-350 decoder");
604
        dec_name["vdpau"]    = QObject::tr("NVidia VDPAU acceleration");
603
    }
605
    }
604
606
605
    pref_map_t::const_iterator it = dec_name.find(decoder);
607
    pref_map_t::const_iterator it = dec_name.find(decoder);
Lines 650-655 Link Here
650
            "high quality playback.  This requires that the ivtv-fb "
652
            "high quality playback.  This requires that the ivtv-fb "
651
            "kernel module is also loaded and configured properly.");
653
            "kernel module is also loaded and configured properly.");
652
654
655
    if (decoder == "vdpau")
656
        msg += QObject::tr(
657
            "VDPAU will attempt to use the graphics hardware to "
658
            "accelerate video decoding and playback.");
659
653
    return msg;
660
    return msg;
654
}
661
}
655
662
Lines 689-694 Link Here
689
        return QObject::tr("Linear blend (2x, HW)");
696
        return QObject::tr("Linear blend (2x, HW)");
690
    else if ("opengldoubleratefieldorder" == short_name)
697
    else if ("opengldoubleratefieldorder" == short_name)
691
        return QObject::tr("Interlaced (2x, Hw)");
698
        return QObject::tr("Interlaced (2x, Hw)");
699
    else if ("opengldoublerateyadif" == short_name)
700
        return QObject::tr("Yadif (2x, Hw)");
701
    else if ("openglyadif" == short_name)
702
        return QObject::tr("Yadif (Hw)");
703
704
    else if ("vdpauonefield" == short_name)
705
        return QObject::tr("One Field (1x, Hw)");
706
    else if ("vdpaubobdeint" == short_name)
707
        return QObject::tr("Bob (2x, Hw)");
708
    else if ("vdpaubasic" == short_name)
709
        return QObject::tr("Temporal (1x, Hw)");
710
    else if ("vdpaubasicdoublerate" == short_name)
711
        return QObject::tr("Temporal (2x, Hw)");
712
    else if ("vdpauadvanced" == short_name)
713
        return QObject::tr("Advanced (1x, Hw)");
714
    else if ("vdpauadvanceddoublerate" == short_name)
715
        return QObject::tr("Advanced (2x, Hw)");
716
692
    return "";
717
    return "";
693
}
718
}
694
719
Lines 1156-1161 Link Here
1156
            "additional resources.");
1181
            "additional resources.");
1157
    }
1182
    }
1158
1183
1184
    if (renderer == "vdpau")
1185
    {
1186
        msg = QObject::tr(
1187
            "This is the only video renderer for NVidia VDPAU decoding.");
1188
    }
1189
1159
    return msg;
1190
    return msg;
1160
}
1191
}
1161
1192
Lines 1245-1250 Link Here
1245
        msg = kLinearBlendMsg + " " + kUsingOpenGL;
1276
        msg = kLinearBlendMsg + " " + kUsingOpenGL;
1246
    else if (deint == "openglkerneldeint")
1277
    else if (deint == "openglkerneldeint")
1247
        msg = kKernelMsg + " " + kUsingOpenGL;
1278
        msg = kKernelMsg + " " + kUsingOpenGL;
1279
    else if (deint == "openglyadif")
1280
        msg = kYadifMsg + " " + kUsingOpenGL;
1248
    else if (deint == "opengldoubleratelinearblend")
1281
    else if (deint == "opengldoubleratelinearblend")
1249
        msg = kLinearBlendMsg + " " + kUsingOpenGLWorkaround;
1282
        msg = kLinearBlendMsg + " " + kUsingOpenGLWorkaround;
1250
    else if (deint == "opengldoublerateonefield")
1283
    else if (deint == "opengldoublerateonefield")
Lines 1261-1266 Link Here
1261
        msg = kYadifMsg;
1294
        msg = kYadifMsg;
1262
    else if (deint == "yadifdoubleprocessdeint")
1295
    else if (deint == "yadifdoubleprocessdeint")
1263
        msg = kYadifMsg + " " +  kDoubleRateMsg;
1296
        msg = kYadifMsg + " " +  kDoubleRateMsg;
1297
    else if (deint == "opengldoublerateyadif")
1298
        msg = kYadifMsg + " " +  kUsingOpenGLWorkaround;
1264
    else
1299
    else
1265
        msg = QObject::tr("'%1' has not been documented yet.").arg(deint);
1300
        msg = QObject::tr("'%1' has not been documented yet.").arg(deint);
1266
1301
Lines 1397-1402 Link Here
1397
"xvmc-vld"
1432
"xvmc-vld"
1398
"macaccel"
1433
"macaccel"
1399
"ivtv"
1434
"ivtv"
1435
"vdpau"
1400
1436
1401
// Video Renderers
1437
// Video Renderers
1402
"null"
1438
"null"
Lines 1411-1416 Link Here
1411
"quartz-accel"
1447
"quartz-accel"
1412
"ivtv"
1448
"ivtv"
1413
"opengl"
1449
"opengl"
1450
"vdpau"
1414
1451
1415
// OSD Renderers
1452
// OSD Renderers
1416
"chromakey"
1453
"chromakey"
Lines 1420-1425 Link Here
1420
"opengl"
1457
"opengl"
1421
"opengl2"
1458
"opengl2"
1422
"opengl3"
1459
"opengl3"
1460
"vdpau"
1423
1461
1424
// deinterlacers
1462
// deinterlacers
1425
"none"
1463
"none"
Lines 1435-1444 Link Here
1435
"openglkerneldeint"
1473
"openglkerneldeint"
1436
"openglonefield"
1474
"openglonefield"
1437
"openglbobdeint"
1475
"openglbobdeint"
1476
"openglyadif"
1438
"opengldoubleratelinearblend"
1477
"opengldoubleratelinearblend"
1439
"opengldoublerateonefield"
1478
"opengldoublerateonefield"
1440
"opengldoubleratekerneldeint"
1479
"opengldoubleratekerneldeint"
1441
"opengldoubleratefieldorder"
1480
"opengldoubleratefieldorder"
1481
"opengldoublerateyadif"
1482
"vdpauonefield"
1483
"vdpaubobdeint"
1484
"vdpaubasic"
1485
"vdpauadvanced"
1486
"vdpaubasicdoublerate"
1487
"vdpauadvanceddoublerate"
1442
*/
1488
*/
1443
1489
1444
void VideoDisplayProfile::init_statics(void)
1490
void VideoDisplayProfile::init_statics(void)
Lines 1495-1500 Link Here
1495
    safe_deint["opengl"] += "opengldoublerateonefield";
1541
    safe_deint["opengl"] += "opengldoublerateonefield";
1496
    safe_deint["opengl"] += "opengldoubleratekerneldeint";
1542
    safe_deint["opengl"] += "opengldoubleratekerneldeint";
1497
    safe_deint["opengl"] += "opengldoubleratefieldorder";
1543
    safe_deint["opengl"] += "opengldoubleratefieldorder";
1544
    safe_deint["opengl"] += "opengldoublerateyadif";
1545
    safe_deint["opengl"] += "openglyadif";
1546
1547
1548
    safe_deint["vdpau"] += "none";
1549
    safe_deint["vdpau"] += "vdpauonefield";
1550
    safe_deint["vdpau"] += "vdpaubobdeint";
1551
    safe_deint["vdpau"] += "vdpaubasic";
1552
    safe_deint["vdpau"] += "vdpauadvanced";
1553
    safe_deint["vdpau"] += "vdpaubasicdoublerate";
1554
    safe_deint["vdpau"] += "vdpauadvanceddoublerate";
1498
1555
1499
    safe_osd["xv-blit"]     += "softblend";
1556
    safe_osd["xv-blit"]     += "softblend";
1500
    safe_osd["xvmc-blit"]   += "chromakey";
1557
    safe_osd["xvmc-blit"]   += "chromakey";
Lines 1503-1508 Link Here
1503
    safe_osd["ivtv"]        += "ivtv";
1560
    safe_osd["ivtv"]        += "ivtv";
1504
    safe_osd["opengl"]      += "opengl2";
1561
    safe_osd["opengl"]      += "opengl2";
1505
    safe_osd["quartz-accel"]+= "opengl3";
1562
    safe_osd["quartz-accel"]+= "opengl3";
1563
    safe_osd["vdpau"]       += "vdpau";
1506
1564
1507
    // These video renderers do not support deinterlacing in MythTV
1565
    // These video renderers do not support deinterlacing in MythTV
1508
    safe_deint["quartz-accel"] += "none";
1566
    safe_deint["quartz-accel"] += "none";
Lines 1531-1540 Link Here
1531
    safe_renderer["xvmc-vld"] += "xvmc-blit";
1589
    safe_renderer["xvmc-vld"] += "xvmc-blit";
1532
    safe_renderer["dummy"]    += "xvmc-opengl";
1590
    safe_renderer["dummy"]    += "xvmc-opengl";
1533
    safe_renderer["xvmc"]     += "xvmc-opengl";
1591
    safe_renderer["xvmc"]     += "xvmc-opengl";
1592
    safe_renderer["ffmpeg"]   += "vdpau";
1534
1593
1535
    safe_renderer["dummy"]    += "quartz-accel";
1594
    safe_renderer["dummy"]    += "quartz-accel";
1536
    safe_renderer["macaccel"] += "quartz-accel";
1595
    safe_renderer["macaccel"] += "quartz-accel";
1537
    safe_renderer["ivtv"]     += "ivtv";
1596
    safe_renderer["ivtv"]     += "ivtv";
1597
    safe_renderer["vdpau"]    += "vdpau";
1538
1598
1539
    safe_renderer_priority["null"]         =  10;
1599
    safe_renderer_priority["null"]         =  10;
1540
    safe_renderer_priority["xlib"]         =  20;
1600
    safe_renderer_priority["xlib"]         =  20;
Lines 1542-1547 Link Here
1542
    safe_renderer_priority["xv-blit"]      =  90;
1602
    safe_renderer_priority["xv-blit"]      =  90;
1543
    safe_renderer_priority["xvmc-blit"]    = 110;
1603
    safe_renderer_priority["xvmc-blit"]    = 110;
1544
    safe_renderer_priority["xvmc-opengl"]  = 100;
1604
    safe_renderer_priority["xvmc-opengl"]  = 100;
1605
    safe_renderer_priority["vdpau"]        = 120;
1545
    safe_renderer_priority["directfb"]     =  60;
1606
    safe_renderer_priority["directfb"]     =  60;
1546
    safe_renderer_priority["directx"]      =  50;
1607
    safe_renderer_priority["directx"]      =  50;
1547
    safe_renderer_priority["direct3d"]     =  55;
1608
    safe_renderer_priority["direct3d"]     =  55;
Lines 1559-1562 Link Here
1559
    safe_equiv_dec["xvmc-vld"] += "dummy";
1620
    safe_equiv_dec["xvmc-vld"] += "dummy";
1560
    safe_equiv_dec["macaccel"] += "dummy";
1621
    safe_equiv_dec["macaccel"] += "dummy";
1561
    safe_equiv_dec["ivtv"]     += "dummy";
1622
    safe_equiv_dec["ivtv"]     += "dummy";
1623
    safe_equiv_dec["vdpau"]    += "dummy";
1562
}
1624
}
(-)mythtv.ori/libs/libmythtv/videoout_xv.cpp (-163 / +659 lines)
Lines 38-50 Link Here
38
#include "xvmctextures.h"
38
#include "xvmctextures.h"
39
39
40
// MythTV General headers
40
// MythTV General headers
41
#include "mythconfig.h"
42
#include "mythcontext.h"
41
#include "mythcontext.h"
42
#include "mythverbose.h"
43
#include "filtermanager.h"
43
#include "filtermanager.h"
44
#include "videodisplayprofile.h"
44
#include "videodisplayprofile.h"
45
#define IGNORE_TV_PLAY_REC
45
#define IGNORE_TV_PLAY_REC
46
#include "tv.h"
46
#include "tv.h"
47
#include "fourcc.h"
47
#include "fourcc.h"
48
#include "mythmainwindow.h"
49
50
#ifdef USING_VDPAU
51
#include "util-vdpau.h"
52
#endif
48
53
49
// MythTV OpenGL headers
54
// MythTV OpenGL headers
50
#include "openglcontext.h"
55
#include "openglcontext.h"
Lines 71-88 Link Here
71
#define XVMC_CHROMA_FORMAT_420 0x00000001
76
#define XVMC_CHROMA_FORMAT_420 0x00000001
72
#endif
77
#endif
73
78
74
static QStringList allowed_video_renderers(MythCodecID codec_id,
79
#define NUM_VDPAU_BUFFERS 17
75
                                           Display *XJ_disp);
80
81
static QStringList allowed_video_renderers(
82
    MythCodecID codec_id, Display *display, int screen, Window curwin);
76
83
77
static void SetFromEnv(bool &useXvVLD, bool &useXvIDCT, bool &useXvMC,
84
static void SetFromEnv(bool &useXvVLD, bool &useXvIDCT, bool &useXvMC,
78
                       bool &useXV, bool &useShm, bool &useOpenGL);
85
                       bool &useXV, bool &useShm, bool &useOpenGL,
79
static void SetFromHW(Display *d, bool &useXvMC, bool &useXV,
86
                       bool &useVDPAU);
80
                      bool &useShm, bool &useXvMCOpenGL, bool &useOpenGL);
87
static void SetFromHW(Display *d, int screen, Window curwin,
88
                      bool &useXvMC, bool &useXV,
89
                      bool &useShm, bool &useXvMCOpenGL,
90
                      bool &useOpenGL, bool &useVDPAU,
91
                      MythCodecID myth_codec_id);
81
static int calc_hue_base(const QString &adaptor_name);
92
static int calc_hue_base(const QString &adaptor_name);
82
93
83
const char *vr_str[] =
94
const char *vr_str[] =
84
{
95
{
85
    "unknown", "xlib", "xshm", "opengl", "xv-blit", "xvmc", "xvmc", "xvmc",
96
    "unknown", "xlib", "xshm", "opengl", "xv-blit", "vdpau", "xvmc", "xvmc",
97
    "xvmc",
86
};
98
};
87
99
88
/** \class  VideoOutputXv
100
/** \class  VideoOutputXv
Lines 116-126 Link Here
116
      xvmc_osd_lock(false),
128
      xvmc_osd_lock(false),
117
      xvmc_tex(NULL),
129
      xvmc_tex(NULL),
118
130
131
#ifdef USING_VDPAU
132
      vdpau(NULL),
133
#endif
134
      vdpau_use_osd(false), vdpau_use_pip(true),
135
119
      xv_port(-1),      xv_hue_base(0),
136
      xv_port(-1),      xv_hue_base(0),
120
      xv_colorkey(0),   xv_draw_colorkey(false),
137
      xv_colorkey(0),   xv_draw_colorkey(false),
121
      xv_chroma(0),
138
      xv_chroma(0),
122
139
123
      gl_context_lock(false), gl_context(NULL),
140
      gl_context_lock(true), gl_context(NULL),
124
      gl_videochain(NULL), gl_pipchain(NULL),
141
      gl_videochain(NULL), gl_pipchain(NULL),
125
      gl_osdchain(NULL),
142
      gl_osdchain(NULL),
126
143
Lines 215-223 Link Here
215
        needrepaint = true;
232
        needrepaint = true;
216
    }
233
    }
217
234
218
    if (gl_videochain)
235
    if (gl_videochain && gl_context)
219
    {
236
    {
220
        QMutexLocker locker(&gl_context_lock);
237
        OpenGLContextLocker ctx_lock(gl_context);
221
        gl_videochain->SetVideoRect(display_video_rect, video_rect);
238
        gl_videochain->SetVideoRect(display_video_rect, video_rect);
222
    }
239
    }
223
}
240
}
Lines 237-242 Link Here
237
    bool cid_changed = (myth_codec_id != av_codec_id);
254
    bool cid_changed = (myth_codec_id != av_codec_id);
238
    bool res_changed = input_size != video_disp_dim;
255
    bool res_changed = input_size != video_disp_dim;
239
    bool asp_changed = aspect != video_aspect;
256
    bool asp_changed = aspect != video_aspect;
257
    bool gpu_deint   = (VideoOutputSubType() == OpenGL ||
258
                        VideoOutputSubType() == XVideoVDPAU);
240
259
241
    VideoOutput::InputChanged(input_size, aspect, av_codec_id, codec_private);
260
    VideoOutput::InputChanged(input_size, aspect, av_codec_id, codec_private);
242
261
Lines 251-262 Link Here
251
270
252
    bool ok = true;
271
    bool ok = true;
253
272
254
    DeleteBuffers(VideoOutputSubType(),
273
    bool delete_pause_frame = cid_changed || (OpenGL == VideoOutputSubType());
255
                  cid_changed || (OpenGL == VideoOutputSubType()));
274
    DeleteBuffers(VideoOutputSubType(), delete_pause_frame);
275
256
    ResizeForVideo((uint) video_disp_dim.width(),
276
    ResizeForVideo((uint) video_disp_dim.width(),
257
                   (uint) video_disp_dim.height());
277
                   (uint) video_disp_dim.height());
258
278
259
    if (cid_changed && (OpenGL != VideoOutputSubType()))
279
    if (cid_changed ||
280
        XVideoVDPAU == VideoOutputSubType() || OpenGL == VideoOutputSubType())
260
    {
281
    {
261
        myth_codec_id = av_codec_id;
282
        myth_codec_id = av_codec_id;
262
283
Lines 273-285 Link Here
273
294
274
        ok = InitSetupBuffers();
295
        ok = InitSetupBuffers();
275
    }
296
    }
276
    else if (OpenGL != VideoOutputSubType())
297
    else
277
        ok = CreateBuffers(VideoOutputSubType());
278
279
    if (OpenGL == VideoOutputSubType())
280
    {
298
    {
281
        myth_codec_id = av_codec_id;
299
        ok = CreateBuffers(VideoOutputSubType());
282
        ok = InitSetupBuffers();
283
    }
300
    }
284
301
285
    MoveResize();
302
    MoveResize();
Lines 290-295 Link Here
290
                "Failed to recreate buffers");
307
                "Failed to recreate buffers");
291
        errored = true;
308
        errored = true;
292
    }
309
    }
310
    else
311
    {
312
        if ((VideoOutputSubType() == OpenGL ||
313
             VideoOutputSubType() == XVideoVDPAU) ||
314
             gpu_deint)
315
        {
316
            BestDeint();
317
        }
318
    }
293
319
294
    return ok;
320
    return ok;
295
}
321
}
Lines 302-309 Link Here
302
    QSize dvr2 = QSize(display_visible_rect.width()  & ~0x3,
328
    QSize dvr2 = QSize(display_visible_rect.width()  & ~0x3,
303
                       display_visible_rect.height() & ~0x1);
329
                       display_visible_rect.height() & ~0x1);
304
330
305
    if (!chroma_osd && !gl_use_osd_opengl2)
331
    if (!chroma_osd && !gl_use_osd_opengl2 && !vdpau_use_osd)
306
        return VideoOutput::GetVisibleOSDBounds(visible_aspect, font_scaling, themeaspect);
332
    {
333
        return VideoOutput::GetVisibleOSDBounds(
334
            visible_aspect, font_scaling, themeaspect);
335
    }
307
336
308
    float dispPixelAdj = 1.0f;
337
    float dispPixelAdj = 1.0f;
309
    if (dvr2.height() && dvr2.width())
338
    if (dvr2.height() && dvr2.width())
Lines 319-325 Link Here
319
    QSize dvr2 = QSize(display_visible_rect.width()  & ~0x3,
348
    QSize dvr2 = QSize(display_visible_rect.width()  & ~0x3,
320
                       display_visible_rect.height() & ~0x1);
349
                       display_visible_rect.height() & ~0x1);
321
350
322
    QSize sz = (chroma_osd || gl_use_osd_opengl2) ? dvr2 : video_disp_dim;
351
    QSize sz = (chroma_osd || gl_use_osd_opengl2 || vdpau_use_osd) ?
352
                dvr2 : video_disp_dim;
323
    return QRect(QPoint(0,0), sz);
353
    return QRect(QPoint(0,0), sz);
324
}
354
}
325
355
Lines 854-871 Link Here
854
 */
884
 */
855
bool VideoOutputXv::InitVideoBuffers(MythCodecID mcodecid,
885
bool VideoOutputXv::InitVideoBuffers(MythCodecID mcodecid,
856
                                     bool use_xv, bool use_shm,
886
                                     bool use_xv, bool use_shm,
857
                                     bool use_opengl)
887
                                     bool use_opengl, bool use_vdpau)
858
{
888
{
859
    (void)mcodecid;
889
    (void)mcodecid;
860
890
861
    bool done = false;
891
    bool done = false;
892
893
#ifdef USING_VDPAU
894
    if (((kCodec_VDPAU_BEGIN < mcodecid) && (mcodecid < kCodec_VDPAU_END)) ||
895
         mcodecid < kCodec_NORMAL_END)
896
    {
897
        if (use_vdpau)
898
        {
899
            vbuffers.Init(NUM_VDPAU_BUFFERS, false, 1, 4, 4, 1, false);
900
            done = InitVDPAU(mcodecid);
901
            if (!done)
902
                vbuffers.Reset();
903
        }
904
    }
905
#endif
906
862
    // If use_xvmc try to create XvMC buffers
907
    // If use_xvmc try to create XvMC buffers
863
#ifdef USING_XVMC
908
#ifdef USING_XVMC
864
    if (mcodecid > kCodec_NORMAL_END)
909
    if (!done && (kCodec_STD_XVMC_BEGIN < mcodecid) &&
910
        (mcodecid < kCodec_VLD_END))
865
    {
911
    {
866
        // Create ffmpeg VideoFrames
912
        // Create ffmpeg VideoFrames
867
        bool vld, idct, mc;
913
        bool vld, idct, mc, vdpau;
868
        myth2av_codecid(myth_codec_id, vld, idct, mc);
914
        myth2av_codecid(myth_codec_id, vld, idct, mc, vdpau);
869
915
870
        vbuffers.Init(xvmc_buf_attr->GetNumSurf(),
916
        vbuffers.Init(xvmc_buf_attr->GetNumSurf(),
871
                      false /* create an extra frame for pause? */,
917
                      false /* create an extra frame for pause? */,
Lines 883-888 Link Here
883
    }
929
    }
884
#endif // USING_XVMC
930
#endif // USING_XVMC
885
931
932
    if (!done && mcodecid >= kCodec_NORMAL_END)
933
    {
934
        VERBOSE(VB_IMPORTANT, LOC_ERR +
935
                QString("Failed to initialize buffers for codec %1")
936
                .arg(toString(mcodecid)));
937
        return false;
938
    }
939
886
    // Create ffmpeg VideoFrames
940
    // Create ffmpeg VideoFrames
887
    if (!done)
941
    if (!done)
888
        vbuffers.Init(31, true, 1, 12, 4, 2, false);
942
        vbuffers.Init(31, true, 1, 12, 4, 2, false);
Lines 918-950 Link Here
918
#ifdef USING_OPENGL_VIDEO
972
#ifdef USING_OPENGL_VIDEO
919
    ok = gl_context;
973
    ok = gl_context;
920
974
921
    gl_context_lock.lock();    
975
    gl_context_lock.lock();
922
976
923
    if (!ok)
977
    if (!ok)
924
    {
978
    {
925
        gl_context = new OpenGLContext();
979
        gl_context = new OpenGLContext(&gl_context_lock);
926
980
927
        ok = gl_context->Create(
981
        ok = gl_context->Create(
928
            XJ_disp, XJ_win, XJ_screen_num,
982
            XJ_disp, XJ_win, XJ_screen_num,
929
            display_visible_rect.size(), true);
983
            display_visible_rect,
984
            db_use_picture_controls);
930
    }
985
    }
931
986
987
    gl_context_lock.unlock();
988
932
    if (ok)
989
    if (ok)
933
    {
990
    {
991
        OpenGLContextLocker ctx_lock(gl_context);
934
        gl_context->Show();
992
        gl_context->Show();
935
        gl_context->MakeCurrent(true);
936
        gl_videochain = new OpenGLVideo();
993
        gl_videochain = new OpenGLVideo();
937
        ok = gl_videochain->Init(gl_context, db_use_picture_controls,
994
        ok = gl_videochain->Init(gl_context, db_use_picture_controls,
938
                                 true, video_dim,
995
                                 video_dim, display_visible_rect,
939
                                 display_visible_rect,
996
                                 display_video_rect, video_rect, true,
940
                                 display_video_rect, video_rect, true);
997
                                 GetFilters());
941
        gl_context->MakeCurrent(false);
942
    }
998
    }
943
999
944
    gl_context_lock.unlock();
945
946
    if (ok)
1000
    if (ok)
947
    {
1001
    {
1002
        OpenGLContextLocker ctx_lock(gl_context);
948
        InstallXErrorHandler(XJ_disp);
1003
        InstallXErrorHandler(XJ_disp);
949
1004
950
        ok = CreateBuffers(OpenGL);
1005
        ok = CreateBuffers(OpenGL);
Lines 963-985 Link Here
963
        {
1018
        {
964
            video_output_subtype = OpenGL;
1019
            video_output_subtype = OpenGL;
965
            allowpreviewepg = false;
1020
            allowpreviewepg = false;
966
967
            // ensure deinterlacing is re-enabled after input change
968
            bool temp_deinterlacing = m_deinterlacing;
969
970
            if (!m_deintfiltername.isEmpty() &&
971
                !m_deintfiltername.contains("opengl"))
972
            {
973
                QMutexLocker locker(&gl_context_lock);
974
                gl_videochain->SetSoftwareDeinterlacer(m_deintfiltername);
975
            }
976
977
            SetDeinterlacingEnabled(true);
978
979
            if (!temp_deinterlacing)
980
            {
981
                SetDeinterlacingEnabled(false);
982
            }
983
        }
1021
        }
984
    }
1022
    }
985
1023
Lines 1075-1080 Link Here
1075
#endif // USING_XVMC
1113
#endif // USING_XVMC
1076
}
1114
}
1077
1115
1116
/**
1117
 * \fn VideoOutputXv::InitVDPAU(MythCodecID)
1118
 *  Creates and initializes video buffers.
1119
 *
1120
 * \sideeffect sets video_output_subtype if it succeeds.
1121
 *
1122
 * \return success or failure at creating any buffers.
1123
 */
1124
bool VideoOutputXv::InitVDPAU(MythCodecID mcodecid)
1125
{
1126
    (void)mcodecid;
1127
#ifdef USING_VDPAU
1128
    vdpau = new VDPAUContext();
1129
1130
    bool ok = vdpau->Init(XJ_disp, XJ_screen_num, XJ_curwin,
1131
                          display_visible_rect.size(),
1132
                          db_use_picture_controls, mcodecid);
1133
    if (!ok)
1134
    {
1135
        VERBOSE(VB_IMPORTANT, "Unable to init VDPAU");
1136
        vdpau->Deinit();
1137
        delete vdpau;
1138
        vdpau = NULL;
1139
        return ok;
1140
    }
1141
1142
    ok = CreateVDPAUBuffers();
1143
    if (!ok)
1144
    {
1145
        VERBOSE(VB_IMPORTANT, "Unable to create VDPAU buffers");
1146
        DeleteBuffers(XVideoVDPAU, false);
1147
        return ok;
1148
    }
1149
    else
1150
    {
1151
        VERBOSE(VB_PLAYBACK, LOC +
1152
            QString("Created VDPAU context (%1 decode)")
1153
            .arg((mcodecid < kCodec_NORMAL_END) ? "software" : "GPU"));
1154
    }
1155
1156
    video_output_subtype = XVideoVDPAU;            
1157
    allowpreviewepg = false;
1158
    return ok;
1159
#else // USING_VDPAU
1160
    return false;
1161
#endif // USING_VDPAU
1162
}
1163
1078
static bool has_format(XvImageFormatValues *formats, int format_cnt, int id)
1164
static bool has_format(XvImageFormatValues *formats, int format_cnt, int id)
1079
{
1165
{
1080
    for (int i = 0; i < format_cnt; i++)
1166
    for (int i = 0; i < format_cnt; i++)
Lines 1271-1302 Link Here
1271
1357
1272
    if (force_xv)
1358
    if (force_xv)
1273
        return (MythCodecID)(kCodec_MPEG1 + (stream_type-1));
1359
        return (MythCodecID)(kCodec_MPEG1 + (stream_type-1));
1274
#ifdef USING_XVMC
1360
1361
#if defined(USING_XVMC) || defined(USING_VDPAU)
1275
    VideoDisplayProfile vdp;
1362
    VideoDisplayProfile vdp;
1276
    vdp.SetInput(QSize(width, height));
1363
    vdp.SetInput(QSize(width, height));
1277
    QString dec = vdp.GetDecoder();
1364
    QString dec = vdp.GetDecoder();
1278
    if ((dec == "libmpeg2") || (dec == "ffmpeg"))
1365
    if ((dec == "libmpeg2") || (dec == "ffmpeg"))
1279
        return (MythCodecID)(kCodec_MPEG1 + (stream_type-1));
1366
        return (MythCodecID)(kCodec_MPEG1 + (stream_type-1));
1280
1367
1281
    Display *disp = MythXOpenDisplay();
1282
1283
    // Disable features based on environment and DB values.
1368
    // Disable features based on environment and DB values.
1284
    bool use_xvmc_vld = false, use_xvmc_idct = false, use_xvmc = false;
1369
    bool use_xvmc_vld = false, use_xvmc_idct = false, use_xvmc = false;
1285
    bool use_xv = true, use_shm = true, use_opengl = true;
1370
    bool use_xv = true, use_shm = true, use_opengl = true;
1371
    bool use_vdpau = false;
1286
1372
1287
    if (dec == "xvmc")
1373
    if (dec == "xvmc")
1288
        use_xvmc_idct = use_xvmc = true;
1374
        use_xvmc_idct = use_xvmc = true;
1289
    else if (dec == "xvmc-vld")
1375
    else if (dec == "xvmc-vld")
1290
        use_xvmc_vld = use_xvmc = true;
1376
        use_xvmc_vld = use_xvmc = true;
1377
    else if (dec == "vdpau")
1378
        use_vdpau = true;
1291
1379
1292
    SetFromEnv(use_xvmc_vld, use_xvmc_idct, use_xvmc, use_xv,
1380
    SetFromEnv(use_xvmc_vld, use_xvmc_idct, use_xvmc, use_xv,
1293
               use_shm, use_opengl);
1381
               use_shm, use_opengl, use_vdpau);
1294
1382
1295
    // Disable features based on hardware capabilities.
1383
    // Disable features based on hardware capabilities.
1296
    bool use_xvmc_opengl = use_xvmc;
1384
    bool use_xvmc_opengl = use_xvmc;
1297
    SetFromHW(disp, use_xvmc, use_xv, use_shm, use_xvmc_opengl, use_opengl);
1385
    Display *disp = MythXOpenDisplay();
1386
    X11L;
1387
    int screen  = DefaultScreen(disp);
1388
    Window root = DefaultRootWindow(disp);
1389
    X11U;
1390
    SetFromHW(disp, screen, root, use_xvmc, use_xv, use_shm,
1391
              use_xvmc_opengl, use_opengl, use_vdpau,
1392
              (MythCodecID)(kCodec_MPEG1_VDPAU + (stream_type-1)));
1298
1393
1299
    MythCodecID ret = (MythCodecID)(kCodec_MPEG1 + (stream_type-1));
1394
    MythCodecID ret = (MythCodecID)(kCodec_MPEG1 + (stream_type-1));
1395
#ifdef USING_XVMC
1300
    if (use_xvmc_vld &&
1396
    if (use_xvmc_vld &&
1301
        XvMCSurfaceTypes::has(disp, XvVLD, stream_type, xvmc_chroma,
1397
        XvMCSurfaceTypes::has(disp, XvVLD, stream_type, xvmc_chroma,
1302
                              width, height, osd_width, osd_height))
1398
                              width, height, osd_width, osd_height))
Lines 1319-1329 Link Here
1319
    bool ok = true;
1415
    bool ok = true;
1320
    if (test_surface && ret > kCodec_NORMAL_END)
1416
    if (test_surface && ret > kCodec_NORMAL_END)
1321
    {
1417
    {
1322
        Window root;
1323
        XvMCSurfaceInfo info;
1418
        XvMCSurfaceInfo info;
1324
1419
1325
        ok = false;
1420
        ok = false;
1326
        X11S(root = DefaultRootWindow(disp));
1327
        int port = GrabSuitableXvPort(disp, root, ret, width, height,
1421
        int port = GrabSuitableXvPort(disp, root, ret, width, height,
1328
                                      xvmc_chroma, &info);
1422
                                      xvmc_chroma, &info);
1329
        if (port >= 0)
1423
        if (port >= 0)
Lines 1340-1346 Link Here
1340
            X11U;
1434
            X11U;
1341
        }
1435
        }
1342
    }
1436
    }
1343
    X11S(XCloseDisplay(disp));
1344
    X11S(ok |= cnt_open_xv_port() > 0); // also ok if we already opened port..
1437
    X11S(ok |= cnt_open_xv_port() > 0); // also ok if we already opened port..
1345
1438
1346
    if (!ok)
1439
    if (!ok)
Lines 1357-1373 Link Here
1357
                "\t\t\tvendor's XvMC library.\n";
1450
                "\t\t\tvendor's XvMC library.\n";
1358
#endif // USING_XVMCW
1451
#endif // USING_XVMCW
1359
        VERBOSE(VB_IMPORTANT, msg);
1452
        VERBOSE(VB_IMPORTANT, msg);
1360
        ret = (MythCodecID)(kCodec_MPEG1 + (stream_type-1));
1361
    }
1453
    }
1454
#endif // USING_XVMC
1455
1456
    X11S(XCloseDisplay(disp));
1457
1458
#ifdef USING_VDPAU
1459
    if (use_vdpau)
1460
        ret = (MythCodecID)(kCodec_MPEG1_VDPAU + (stream_type-1));
1461
#endif // USING_VDPAU
1362
1462
1363
    return ret;
1463
    return ret;
1364
#else // if !USING_XVMC
1464
#endif // defined(USING_XVMC) || defined(USING_VDPAU)
1365
    return (MythCodecID)(kCodec_MPEG1 + (stream_type-1));
1366
#endif // !USING_XVMC
1367
}
1465
}
1368
1466
1369
bool VideoOutputXv::InitOSD(const QString &osd_renderer)
1467
bool VideoOutputXv::InitOSD(const QString &osd_renderer)
1370
{
1468
{
1469
#ifdef USING_VDPAU
1470
    if (osd_renderer == "vdpau" && vdpau)
1471
    {
1472
        vdpau_use_osd = true;
1473
        if (!vdpau->InitOSD(GetTotalOSDBounds().size()))
1474
        {
1475
            vdpau_use_osd = false;
1476
            VERBOSE(VB_IMPORTANT, LOC + "Init VDPAU osd failed.");
1477
        }
1478
        return vdpau_use_osd;
1479
    }
1480
#endif
1371
    if (osd_renderer == "opengl")
1481
    if (osd_renderer == "opengl")
1372
    {
1482
    {
1373
        xvmc_tex = XvMCTextures::Create(
1483
        xvmc_tex = XvMCTextures::Create(
Lines 1387-1405 Link Here
1387
        return xvmc_tex;
1497
        return xvmc_tex;
1388
    }
1498
    }
1389
1499
1390
    if (osd_renderer == "opengl2")
1500
    if (osd_renderer == "opengl2" && gl_context)
1391
    {
1501
    {
1392
        QMutexLocker locker(&gl_context_lock);
1502
        OpenGLContextLocker ctx_lock(gl_context);
1393
        gl_use_osd_opengl2 = true;
1503
        gl_use_osd_opengl2 = true;
1394
1504
1395
        gl_context->MakeCurrent(true);
1396
1397
        gl_osdchain = new OpenGLVideo();
1505
        gl_osdchain = new OpenGLVideo();
1398
        if (!gl_osdchain->Init(
1506
        if (!gl_osdchain->Init(
1399
                gl_context, false, true,
1507
                gl_context, db_use_picture_controls,
1400
                GetTotalOSDBounds().size(),
1508
                GetTotalOSDBounds().size(),
1401
                GetTotalOSDBounds(), display_visible_rect, 
1509
                GetTotalOSDBounds(), display_visible_rect, 
1402
                QRect(QPoint(0, 0), GetTotalOSDBounds().size()), false, true))
1510
                QRect(QPoint(0, 0), GetTotalOSDBounds().size()), false,
1511
                GetFilters(), true))
1403
        {
1512
        {
1404
            VERBOSE(VB_PLAYBACK, LOC_ERR + 
1513
            VERBOSE(VB_PLAYBACK, LOC_ERR + 
1405
                    "InitOSD(): Failed to create OpenGL2 OSD");
1514
                    "InitOSD(): Failed to create OpenGL2 OSD");
Lines 1411-1418 Link Here
1411
        {
1520
        {
1412
            gl_osdchain->SetMasterViewport(gl_videochain->GetViewPort());
1521
            gl_osdchain->SetMasterViewport(gl_videochain->GetViewPort());
1413
        }
1522
        }
1414
1415
        gl_context->MakeCurrent(false);
1416
    }
1523
    }
1417
1524
1418
    if (osd_renderer == "chromakey")
1525
    if (osd_renderer == "chromakey")
Lines 1502-1508 Link Here
1502
{
1609
{
1503
    // Figure out what video renderer to use
1610
    // Figure out what video renderer to use
1504
    db_vdisp_profile->SetInput(video_dim);
1611
    db_vdisp_profile->SetInput(video_dim);
1505
    QStringList renderers = allowed_video_renderers(myth_codec_id, XJ_disp);
1612
    QStringList renderers = allowed_video_renderers(
1613
        myth_codec_id, XJ_disp, XJ_screen_num, XJ_curwin);
1506
    QString     renderer  = QString::null;
1614
    QString     renderer  = QString::null;
1507
1615
1508
    QString tmp = db_vdisp_profile->GetVideoRenderer();
1616
    QString tmp = db_vdisp_profile->GetVideoRenderer();
Lines 1534-1546 Link Here
1534
    bool use_xv     = (renderer.left(2) == "xv");
1642
    bool use_xv     = (renderer.left(2) == "xv");
1535
    bool use_shm    = (renderer == "xshm");
1643
    bool use_shm    = (renderer == "xshm");
1536
    bool use_opengl = (renderer == "opengl");
1644
    bool use_opengl = (renderer == "opengl");
1537
    bool ok = InitVideoBuffers(myth_codec_id, use_xv, use_shm, use_opengl);
1645
    bool use_vdpau  = (renderer == "vdpau");
1646
    bool ok = InitVideoBuffers(myth_codec_id, use_xv, use_shm,
1647
                               use_opengl, use_vdpau);
1538
    if (!ok)
1648
    if (!ok)
1539
    {
1649
    {
1540
        use_xv     = renderers.contains("xv-blit");
1650
        use_xv     = renderers.contains("xv-blit");
1541
        use_shm    = renderers.contains("xshm");
1651
        use_shm    = renderers.contains("xshm");
1542
        use_opengl = renderers.contains("opengl");
1652
        use_opengl = renderers.contains("opengl");
1543
        ok = InitVideoBuffers(myth_codec_id, use_xv, use_shm, use_opengl);
1653
        use_vdpau  = renderers.contains("vdpau");
1654
        ok = InitVideoBuffers(myth_codec_id, use_xv, use_shm,
1655
                              use_opengl, use_vdpau);
1544
    }
1656
    }
1545
    XV_INIT_FATAL_ERROR_TEST(!ok, "Failed to get any video output");
1657
    XV_INIT_FATAL_ERROR_TEST(!ok, "Failed to get any video output");
1546
1658
Lines 1550-1562 Link Here
1550
    InitOSD(osdrenderer);
1662
    InitOSD(osdrenderer);
1551
1663
1552
    // Initialize chromakeying, if we need to
1664
    // Initialize chromakeying, if we need to
1553
    if (!xvmc_tex && video_output_subtype >= XVideo)
1665
    if (!xvmc_tex && video_output_subtype >= XVideo &&
1666
        video_output_subtype != XVideoVDPAU)
1554
        InitColorKey(true);
1667
        InitColorKey(true);
1555
1668
1556
    // Check if we can actually use the OSD we want to use...
1669
    // Check if we can actually use the OSD we want to use...
1557
    if (!CheckOSDInit())
1670
    if (!CheckOSDInit())
1558
    {
1671
    {
1559
        ok = InitVideoBuffers(myth_codec_id, use_xv, use_shm, use_opengl);
1672
        ok = InitVideoBuffers(myth_codec_id, use_xv, use_shm,
1673
                              use_opengl, use_vdpau);
1560
        XV_INIT_FATAL_ERROR_TEST(!ok, "Failed to get any video output (nCK)");
1674
        XV_INIT_FATAL_ERROR_TEST(!ok, "Failed to get any video output (nCK)");
1561
    }
1675
    }
1562
1676
Lines 1703-1708 Link Here
1703
// documented in videooutbase.cpp
1817
// documented in videooutbase.cpp
1704
bool VideoOutputXv::SetDeinterlacingEnabled(bool enable)
1818
bool VideoOutputXv::SetDeinterlacingEnabled(bool enable)
1705
{
1819
{
1820
    if (VideoOutputSubType() == XVideoVDPAU)
1821
        return SetDeinterlacingEnabledVDPAU(enable);
1822
1706
    if (VideoOutputSubType() == OpenGL)
1823
    if (VideoOutputSubType() == OpenGL)
1707
        return SetDeinterlacingEnabledOpenGL(enable);
1824
        return SetDeinterlacingEnabledOpenGL(enable);
1708
1825
Lines 1714-1719 Link Here
1714
bool VideoOutputXv::SetupDeinterlace(bool interlaced,
1831
bool VideoOutputXv::SetupDeinterlace(bool interlaced,
1715
                                     const QString& overridefilter)
1832
                                     const QString& overridefilter)
1716
{
1833
{
1834
    if (VideoOutputSubType() == XVideoVDPAU)
1835
        return SetupDeinterlaceVDPAU(interlaced, overridefilter);
1836
1717
    if (VideoOutputSubType() == OpenGL)
1837
    if (VideoOutputSubType() == OpenGL)
1718
        return SetupDeinterlaceOpenGL(interlaced, overridefilter);
1838
        return SetupDeinterlaceOpenGL(interlaced, overridefilter);
1719
1839
Lines 1722-1737 Link Here
1722
    return deint;
1842
    return deint;
1723
}
1843
}
1724
1844
1845
bool VideoOutputXv::SetDeinterlacingEnabledVDPAU(bool enable)
1846
{
1847
    (void)enable;
1848
#ifdef USING_VDPAU
1849
    if (!vdpau)
1850
        return false;
1851
1852
    if (vdpau->GetDeinterlacer() != m_deintfiltername)
1853
            return SetupDeinterlace(enable);
1854
1855
    m_deinterlacing = vdpau->SetDeinterlacing(enable);
1856
#endif // USING_VDPAU
1857
    return m_deinterlacing;
1858
}
1859
1860
bool VideoOutputXv::SetupDeinterlaceVDPAU(
1861
    bool interlaced, const QString &overridefilter)
1862
{
1863
    (void)interlaced;
1864
    (void)overridefilter;
1865
#ifdef USING_VDPAU
1866
    // clear any software filters
1867
    if (m_deintFiltMan)
1868
    {
1869
        delete m_deintFiltMan;
1870
        m_deintFiltMan = NULL;
1871
    }
1872
    if (m_deintFilter)
1873
    {
1874
        delete m_deintFilter;
1875
        m_deintFilter = NULL;
1876
    }
1877
1878
    if (!vdpau)
1879
        return false;
1880
1881
    m_deintfiltername = db_vdisp_profile->GetFilteredDeint(overridefilter);
1882
    if (!m_deintfiltername.contains("vdpau"))
1883
        return false;
1884
1885
    vdpau->SetDeinterlacer(m_deintfiltername);
1886
    m_deinterlacing = vdpau->SetDeinterlacing(interlaced);
1887
#endif// USING_VDPAU
1888
    return m_deinterlacing;
1889
}
1890
1725
bool VideoOutputXv::SetDeinterlacingEnabledOpenGL(bool enable)
1891
bool VideoOutputXv::SetDeinterlacingEnabledOpenGL(bool enable)
1726
{
1892
{
1727
    (void) enable;
1893
    (void) enable;
1728
1894
1729
    if (!gl_videochain)
1895
    if (!gl_videochain || !gl_context)
1730
        return false;
1896
        return false;
1731
1897
1732
    if (enable && m_deinterlacing && (OpenGL != VideoOutputSubType()))
1898
    if (enable && m_deinterlacing && (OpenGL != VideoOutputSubType()))
1733
        return m_deinterlacing;
1899
        return m_deinterlacing;
1734
1900
1901
    OpenGLContextLocker ctx_lock(gl_context);
1902
1735
    if (enable)
1903
    if (enable)
1736
    {
1904
    {
1737
        if (m_deintfiltername == "")
1905
        if (m_deintfiltername == "")
Lines 1744-1752 Link Here
1744
        else if (!m_deintfiltername.contains("opengl"))
1912
        else if (!m_deintfiltername.contains("opengl"))
1745
        {
1913
        {
1746
            // make sure opengl deinterlacing is disabled
1914
            // make sure opengl deinterlacing is disabled
1747
            gl_context_lock.lock();
1748
            gl_videochain->SetDeinterlacing(false);
1915
            gl_videochain->SetDeinterlacing(false);
1749
            gl_context_lock.unlock();
1750
1916
1751
            if (!m_deintFiltMan || !m_deintFilter)
1917
            if (!m_deintFiltMan || !m_deintFilter)
1752
                return VideoOutput::SetupDeinterlace(enable);
1918
                return VideoOutput::SetupDeinterlace(enable);
Lines 1754-1763 Link Here
1754
    }
1920
    }
1755
1921
1756
    if (gl_videochain)
1922
    if (gl_videochain)
1757
    {
1758
        QMutexLocker locker(&gl_context_lock);
1759
        gl_videochain->SetDeinterlacing(enable);
1923
        gl_videochain->SetDeinterlacing(enable);
1760
    }
1761
1924
1762
    m_deinterlacing = enable;
1925
    m_deinterlacing = enable;
1763
1926
Lines 1770-1782 Link Here
1770
    (void) interlaced;
1933
    (void) interlaced;
1771
    (void) overridefilter;
1934
    (void) overridefilter;
1772
1935
1936
    if (!gl_videochain || !gl_context)
1937
        return false;
1938
1939
    OpenGLContextLocker ctx_lock(gl_context);
1940
1773
    m_deintfiltername = db_vdisp_profile->GetFilteredDeint(overridefilter);
1941
    m_deintfiltername = db_vdisp_profile->GetFilteredDeint(overridefilter);
1774
1942
1775
    if (!m_deintfiltername.contains("opengl"))
1943
    if (!m_deintfiltername.contains("opengl"))
1776
    {
1944
    {
1777
        gl_context_lock.lock();
1778
        gl_videochain->SetDeinterlacing(false);
1945
        gl_videochain->SetDeinterlacing(false);
1779
        gl_context_lock.unlock();
1780
1946
1781
        gl_videochain->SetSoftwareDeinterlacer(QString::null);
1947
        gl_videochain->SetSoftwareDeinterlacer(QString::null);
1782
1948
Lines 1806-1813 Link Here
1806
    if (!gl_videochain)
1972
    if (!gl_videochain)
1807
        return false;
1973
        return false;
1808
1974
1809
    QMutexLocker locker(&gl_context_lock);
1810
1811
    if (m_deinterlacing && !m_deintfiltername.isEmpty()) 
1975
    if (m_deinterlacing && !m_deintfiltername.isEmpty()) 
1812
    {
1976
    {
1813
        if (gl_videochain->GetDeinterlacer() != m_deintfiltername)
1977
        if (gl_videochain->GetDeinterlacer() != m_deintfiltername)
Lines 1835-1841 Link Here
1835
}
1999
}
1836
2000
1837
/**
2001
/**
1838
 * \fn VideoOutput::NeedsDoubleFramerate() const
2002
 * \fn VideoOutput::ApproveDeintFilter(const QString&) const
1839
 * Approves bobdeint filter for XVideo and XvMC surfaces,
2003
 * Approves bobdeint filter for XVideo and XvMC surfaces,
1840
 * rejects other filters for XvMC, and defers to
2004
 * rejects other filters for XvMC, and defers to
1841
 * VideoOutput::ApproveDeintFilter(const QString&)
2005
 * VideoOutput::ApproveDeintFilter(const QString&)
Lines 1843-1858 Link Here
1843
 *
2007
 *
1844
 * \return whether current video output supports a specific filter.
2008
 * \return whether current video output supports a specific filter.
1845
 */
2009
 */
1846
bool VideoOutputXv::ApproveDeintFilter(const QString& filtername) const
2010
bool VideoOutputXv::ApproveDeintFilter(const QString &filtername) const
1847
{
2011
{
1848
    // TODO implement bobdeint for non-Xv[MC]
2012
    // TODO implement bobdeint for non-Xv[MC]
1849
    VOSType vos = VideoOutputSubType();
2013
    VOSType vos = VideoOutputSubType();
1850
    if (filtername == "bobdeint" && (vos >= XVideo || vos == OpenGL))
2014
2015
    if (XVideoVDPAU == vos)
2016
        return filtername.contains("vdpau");
2017
2018
    if ((OpenGL == vos) && filtername.contains("opengl"))
1851
        return true;
2019
        return true;
1852
    else if (vos > XVideo)
2020
1853
        return false;
2021
    if (filtername == "bobdeint" && (vos >= OpenGL) && (XVideoVDPAU != vos))
1854
    else
2022
        return true;
1855
        return VideoOutput::ApproveDeintFilter(filtername);
2023
2024
    return VideoOutput::ApproveDeintFilter(filtername);
1856
}
2025
}
1857
2026
1858
XvMCContext* VideoOutputXv::CreateXvMCContext(
2027
XvMCContext* VideoOutputXv::CreateXvMCContext(
Lines 1892-1897 Link Here
1892
#endif // !USING_XVMC
2061
#endif // !USING_XVMC
1893
}
2062
}
1894
2063
2064
bool VideoOutputXv::CreateVDPAUBuffers(void)
2065
{
2066
#ifdef USING_VDPAU
2067
    if (!vdpau)
2068
        return false;
2069
2070
    if (!vdpau->InitBuffers(video_dim.width(), video_dim.height(),
2071
                            NUM_VDPAU_BUFFERS, db_letterbox_colour))
2072
    {
2073
        vdpau->FreeBuffers();
2074
        return false;
2075
    }
2076
2077
    bool ok = false;
2078
2079
    if (myth_codec_id > kCodec_VDPAU_BEGIN &&
2080
        myth_codec_id < kCodec_VDPAU_END)
2081
    {
2082
        ok = vbuffers.CreateBuffers(video_dim.width(), 
2083
                               video_dim.height(), vdpau);
2084
    }
2085
    else if (myth_codec_id < kCodec_NORMAL_END)
2086
    {
2087
        ok = vbuffers.CreateBuffers(video_dim.width(), video_dim.height());
2088
    }
2089
2090
    if (!ok)
2091
    {
2092
        DeleteBuffers(XVideoVDPAU, false);
2093
        return ok;
2094
    }
2095
2096
    return true;
2097
#else
2098
    return false;
2099
#endif
2100
}
2101
1895
bool VideoOutputXv::CreateXvMCBuffers(void)
2102
bool VideoOutputXv::CreateXvMCBuffers(void)
1896
{
2103
{
1897
#ifdef USING_XVMC
2104
#ifdef USING_XVMC
Lines 2172-2178 Link Here
2172
{
2379
{
2173
    bool ok = false;
2380
    bool ok = false;
2174
2381
2175
    if (subtype > XVideo && xv_port >= 0)
2382
    if (subtype == XVideoVDPAU)
2383
        ok = CreateVDPAUBuffers();
2384
    else if (subtype > XVideo && xv_port >= 0)
2176
        ok = CreateXvMCBuffers();
2385
        ok = CreateXvMCBuffers();
2177
    else if (subtype == XVideo && xv_port >= 0)
2386
    else if (subtype == XVideo && xv_port >= 0)
2178
    {
2387
    {
Lines 2262-2267 Link Here
2262
    (void) subtype;
2471
    (void) subtype;
2263
    DiscardFrames(true);
2472
    DiscardFrames(true);
2264
2473
2474
#ifdef USING_VDPAU
2475
    if (vdpau)
2476
    {
2477
        vdpau->FreeBuffers();
2478
        vdpau->Deinit();
2479
        delete vdpau;
2480
        vdpau = NULL;
2481
        vdpau_use_osd = false;
2482
    }
2483
#endif
2484
2265
#ifdef USING_XVMC
2485
#ifdef USING_XVMC
2266
    // XvMC buffers
2486
    // XvMC buffers
2267
    for (uint i=0; i<xvmc_surfs.size(); i++)
2487
    for (uint i=0; i<xvmc_surfs.size(); i++)
Lines 2302-2308 Link Here
2302
#endif // USING_XVMC
2522
#endif // USING_XVMC
2303
2523
2304
    // OpenGL stuff
2524
    // OpenGL stuff
2305
    gl_context_lock.lock();
2525
    if (gl_context)
2526
        gl_context->MakeCurrent(true);
2306
2527
2307
    if (gl_videochain)
2528
    if (gl_videochain)
2308
    {
2529
    {
Lines 2319-2334 Link Here
2319
        delete gl_osdchain;
2540
        delete gl_osdchain;
2320
        gl_osdchain = NULL;
2541
        gl_osdchain = NULL;
2321
    }
2542
    }
2322
#ifdef USING_OPENGL
2543
2323
    if (gl_context)
2544
    if (gl_context)
2545
    {
2324
        gl_context->Hide();
2546
        gl_context->Hide();
2325
#endif
2547
        gl_context->MakeCurrent(false);
2548
    }
2549
2326
    gl_use_osd_opengl2 = false;
2550
    gl_use_osd_opengl2 = false;
2327
    gl_pip_ready = false;
2551
    gl_pip_ready = false;
2328
    gl_osd_ready = false;
2552
    gl_osd_ready = false;
2329
    allowpreviewepg = true;
2553
    allowpreviewepg = true;
2330
2331
    gl_context_lock.unlock();
2332
    // end OpenGL stuff
2554
    // end OpenGL stuff
2333
2555
2334
    vbuffers.DeleteBuffers();
2556
    vbuffers.DeleteBuffers();
Lines 2418-2424 Link Here
2418
    bool displaying = false;
2640
    bool displaying = false;
2419
    if (!frame)
2641
    if (!frame)
2420
        return;
2642
        return;
2421
2643
#ifdef USING_VDPAU
2644
    if (vdpau && VideoOutputSubType() == XVideoVDPAU)
2645
    {
2646
        displaying = vdpau->IsBeingUsed(frame);
2647
    }
2648
#endif // USING_VDPAU
2422
#ifdef USING_XVMC
2649
#ifdef USING_XVMC
2423
    vbuffers.LockFrame(frame, "DiscardFrame -- XvMC display check");
2650
    vbuffers.LockFrame(frame, "DiscardFrame -- XvMC display check");
2424
    if (frame && VideoOutputSubType() >= XVideoMC)
2651
    if (frame && VideoOutputSubType() >= XVideoMC)
Lines 2471-2477 Link Here
2471
    VERBOSE(VB_PLAYBACK, LOC + "ClearAfterSeek()");
2698
    VERBOSE(VB_PLAYBACK, LOC + "ClearAfterSeek()");
2472
    DiscardFrames(false);
2699
    DiscardFrames(false);
2473
#ifdef USING_XVMC
2700
#ifdef USING_XVMC
2474
    if (VideoOutputSubType() > XVideo)
2701
    if (VideoOutputSubType() >= XVideoMC)
2475
    {
2702
    {
2476
        for (uint i=0; i<xvmc_surfs.size(); i++)
2703
        for (uint i=0; i<xvmc_surfs.size(); i++)
2477
        {
2704
        {
Lines 2492-2499 Link Here
2492
void VideoOutputXv::DiscardFrames(bool next_frame_keyframe)
2719
void VideoOutputXv::DiscardFrames(bool next_frame_keyframe)
2493
{
2720
{
2494
    VERBOSE(VB_PLAYBACK, LOC + "DiscardFrames("<<next_frame_keyframe<<")");
2721
    VERBOSE(VB_PLAYBACK, LOC + "DiscardFrames("<<next_frame_keyframe<<")");
2495
    if (VideoOutputSubType() <= XVideo)
2722
    if (VideoOutputSubType() <= XVideoVDPAU)
2496
    {
2723
    {
2724
#ifdef USING_VDPAU
2725
        if (vdpau && VideoOutputSubType() == XVideoVDPAU)
2726
        {
2727
            CheckFrameStates();
2728
            vdpau->ClearReferenceFrames();
2729
        }
2730
#endif // USING_VDPAU
2497
        vbuffers.DiscardFrames(next_frame_keyframe);
2731
        vbuffers.DiscardFrames(next_frame_keyframe);
2498
        VERBOSE(VB_PLAYBACK, LOC + QString("DiscardFrames() 3: %1 -- done()")
2732
        VERBOSE(VB_PLAYBACK, LOC + QString("DiscardFrames() 3: %1 -- done()")
2499
                .arg(vbuffers.GetStatus()));
2733
                .arg(vbuffers.GetStatus()));
Lines 2586-2591 Link Here
2586
        vbuffers.DoneDisplayingFrame();
2820
        vbuffers.DoneDisplayingFrame();
2587
        return;
2821
        return;
2588
    }
2822
    }
2823
2824
#ifdef USING_VDPAU
2825
    if (vdpau && VideoOutputSubType() == XVideoVDPAU)
2826
    {
2827
        if (vbuffers.size(kVideoBuffer_used))
2828
        {
2829
            VideoFrame *frame = vbuffers.head(kVideoBuffer_used);
2830
            DiscardFrame(frame);
2831
        }
2832
        CheckFrameStates();
2833
        return;
2834
    }
2835
#endif // USING_VDPAU
2589
#ifdef USING_XVMC
2836
#ifdef USING_XVMC
2590
    if (vbuffers.size(kVideoBuffer_used))
2837
    if (vbuffers.size(kVideoBuffer_used))
2591
    {
2838
    {
Lines 2603-2608 Link Here
2603
#endif
2850
#endif
2604
}
2851
}
2605
2852
2853
void VideoOutputXv::PrepareFrameVDPAU(VideoFrame *frame, FrameScanType scan)
2854
{
2855
    (void)frame;
2856
    (void)scan;
2857
2858
    bool pause = (frame == NULL);
2859
    // select the correct still frame on certain dvds
2860
    if (pause && vbuffers.size(kVideoBuffer_used))
2861
        frame = vbuffers.head(kVideoBuffer_used);
2862
2863
    if (frame)
2864
        framesPlayed = frame->frameNumber + 1;
2865
2866
#ifdef USING_VDPAU
2867
    if (!vdpau)
2868
        return;
2869
2870
    vdpau->PrepareVideo(
2871
        frame, video_rect, display_video_rect, 
2872
        display_visible_rect.size(), scan, pause);
2873
2874
#endif
2875
2876
    if (pause)
2877
        vbuffers.SetLastShownFrameToScratch();
2878
}
2879
2606
/**
2880
/**
2607
 * \fn VideoOutputXv::PrepareFrameXvMC(VideoFrame*,FrameScanType)
2881
 * \fn VideoOutputXv::PrepareFrameXvMC(VideoFrame*,FrameScanType)
2608
 *
2882
 *
Lines 2671-2677 Link Here
2671
{
2945
{
2672
    (void) t;
2946
    (void) t;
2673
2947
2674
    QMutexLocker locker(&gl_context_lock);
2948
    if (!gl_videochain || !gl_context)
2949
        return;
2950
2951
    OpenGLContextLocker ctx_lock(gl_context);
2675
2952
2676
    if (!buffer)
2953
    if (!buffer)
2677
        buffer = vbuffers.GetScratchFrame();
2954
        buffer = vbuffers.GetScratchFrame();
Lines 2682-2688 Link Here
2682
    if (buffer->codec != FMT_YV12)
2959
    if (buffer->codec != FMT_YV12)
2683
        return;
2960
        return;
2684
2961
2685
    gl_context->MakeCurrent(true);
2686
    gl_videochain->PrepareFrame(t, m_deinterlacing, framesPlayed);
2962
    gl_videochain->PrepareFrame(t, m_deinterlacing, framesPlayed);
2687
2963
2688
    if (gl_pip_ready && gl_pipchain)
2964
    if (gl_pip_ready && gl_pipchain)
Lines 2691-2698 Link Here
2691
    if (gl_osd_ready && gl_osdchain)
2967
    if (gl_osd_ready && gl_osdchain)
2692
        gl_osdchain->PrepareFrame(t, m_deinterlacing, framesPlayed);
2968
        gl_osdchain->PrepareFrame(t, m_deinterlacing, framesPlayed);
2693
2969
2694
    gl_context->Flush();
2970
    gl_context->Flush(false);
2695
    gl_context->MakeCurrent(false);
2696
2971
2697
    if (vbuffers.GetScratchFrame() == buffer)
2972
    if (vbuffers.GetScratchFrame() == buffer)
2698
        vbuffers.SetLastShownFrameToScratch();
2973
        vbuffers.SetLastShownFrameToScratch();
Lines 2811-2817 Link Here
2811
        return;
3086
        return;
2812
    }
3087
    }
2813
3088
2814
    if (VideoOutputSubType() > XVideo)
3089
    if (VideoOutputSubType() == XVideoVDPAU)
3090
        PrepareFrameVDPAU(buffer, scan);
3091
    else if (VideoOutputSubType() > XVideo)
2815
        PrepareFrameXvMC(buffer, scan);
3092
        PrepareFrameXvMC(buffer, scan);
2816
    else if (VideoOutputSubType() == XVideo)
3093
    else if (VideoOutputSubType() == XVideo)
2817
        PrepareFrameXv(buffer);
3094
        PrepareFrameXv(buffer);
Lines 2900-2905 Link Here
2900
#endif
3177
#endif
2901
}
3178
}
2902
3179
3180
void VideoOutputXv::SetNextFrameDisplayTimeOffset(int delayus)
3181
{
3182
    (void)delayus;
3183
#ifdef USING_VDPAU
3184
    if (!vdpau)
3185
        return;
3186
3187
    vdpau->SetNextFrameDisplayTimeOffset(delayus);
3188
#endif
3189
}
3190
3191
void VideoOutputXv::ShowVDPAU(FrameScanType scan)
3192
{
3193
    (void)scan;
3194
#ifdef USING_VDPAU
3195
    if (!vdpau)
3196
        return;
3197
3198
    if (vdpau->IsErrored())
3199
    {
3200
        errored = true;
3201
        return;
3202
    }
3203
3204
    vdpau->DisplayNextFrame();
3205
    CheckFrameStates();
3206
#endif
3207
}
3208
2903
void VideoOutputXv::ShowXvMC(FrameScanType scan)
3209
void VideoOutputXv::ShowXvMC(FrameScanType scan)
2904
{
3210
{
2905
    (void)scan;
3211
    (void)scan;
Lines 3056-3077 Link Here
3056
        DrawUnusedRects(/* don't do a sync*/false);
3362
        DrawUnusedRects(/* don't do a sync*/false);
3057
    }
3363
    }
3058
3364
3059
    if (VideoOutputSubType() > XVideo)
3365
    if (VideoOutputSubType() == XVideoVDPAU)
3366
        ShowVDPAU(scan);
3367
    else if (VideoOutputSubType() > XVideo)
3060
        ShowXvMC(scan);
3368
        ShowXvMC(scan);
3061
    else if (VideoOutputSubType() == XVideo)
3369
    else if (VideoOutputSubType() == XVideo)
3062
        ShowXVideo(scan);
3370
        ShowXVideo(scan);
3063
    else if (VideoOutputSubType() == OpenGL)
3371
    else if (VideoOutputSubType() == OpenGL && gl_context)
3064
    {
3065
        QMutexLocker locker(&gl_context_lock);
3066
        gl_context->SwapBuffers();
3372
        gl_context->SwapBuffers();
3067
    }
3068
3373
3069
    X11S(XSync(XJ_disp, False));
3374
    X11S(XSync(XJ_disp, False));
3070
}
3375
}
3071
3376
3072
void VideoOutputXv::ShowPip(VideoFrame *frame, NuppelVideoPlayer *pipplayer)
3377
void VideoOutputXv::ShowPip(VideoFrame *frame, NuppelVideoPlayer *pipplayer)
3073
{
3378
{
3074
    if (VideoOutputSubType() != OpenGL)
3379
    if (VideoOutputSubType() != OpenGL &&
3380
        VideoOutputSubType() != XVideoVDPAU)
3075
    {
3381
    {
3076
        VideoOutput::ShowPip(frame, pipplayer);
3382
        VideoOutput::ShowPip(frame, pipplayer);
3077
        return;
3383
        return;
Lines 3101-3114 Link Here
3101
3407
3102
    QRect position = GetPIPRect(db_pip_location, pipplayer);
3408
    QRect position = GetPIPRect(db_pip_location, pipplayer);
3103
3409
3410
#ifdef USING_VDPAU
3411
    if (vdpau && VideoOutputSubType() == XVideoVDPAU)
3412
    {
3413
        if (vdpau_use_pip)
3414
            vdpau_use_pip = vdpau->ShowPiP(pipimage, position);
3415
3416
        pipplayer->ReleaseCurrentFrame(pipimage);
3417
        return;
3418
    }
3419
#endif // USING_VDPAU
3420
3104
    if (!gl_pipchain)
3421
    if (!gl_pipchain)
3105
    {
3422
    {
3106
        VERBOSE(VB_PLAYBACK, LOC + "Initialise PiP.");
3423
        VERBOSE(VB_PLAYBACK, LOC + "Initialise PiP.");
3107
        gl_pipchain = new OpenGLVideo();
3424
        gl_pipchain = new OpenGLVideo();
3108
        bool success = gl_pipchain->Init(gl_context, db_use_picture_controls,
3425
        bool success = gl_pipchain->Init(gl_context, db_use_picture_controls,
3109
                     true, QSize(pipVideoWidth, pipVideoHeight),
3426
                     QSize(pipVideoWidth, pipVideoHeight),
3110
                     position, position,
3427
                     position, position,
3111
                     QRect(0, 0, pipVideoWidth, pipVideoHeight), false);
3428
                     QRect(0, 0, pipVideoWidth, pipVideoHeight), false,
3429
                     GetFilters());
3112
        success &= gl_pipchain->AddDeinterlacer("openglonefield");
3430
        success &= gl_pipchain->AddDeinterlacer("openglonefield");
3113
        gl_pipchain->SetMasterViewport(gl_videochain->GetViewPort());
3431
        gl_pipchain->SetMasterViewport(gl_videochain->GetViewPort());
3114
        if (!success)
3432
        if (!success)
Lines 3124-3134 Link Here
3124
    {
3442
    {
3125
        VERBOSE(VB_PLAYBACK, LOC + "Re-initialise PiP.");
3443
        VERBOSE(VB_PLAYBACK, LOC + "Re-initialise PiP.");
3126
3444
3127
        bool success = gl_pipchain->ReInit(
3445
        delete gl_pipchain;
3128
            gl_context, db_use_picture_controls,
3446
        gl_pipchain = new OpenGLVideo();
3129
            true, QSize(pipVideoWidth, pipVideoHeight),
3447
        bool success = gl_pipchain->Init(
3130
            position, position,
3448
             gl_context, db_use_picture_controls,
3131
            QRect(0, 0, pipVideoWidth, pipVideoHeight), false);
3449
             QSize(pipVideoWidth, pipVideoHeight),
3450
             position, position,
3451
             QRect(0, 0, pipVideoWidth, pipVideoHeight), false,
3452
             GetFilters());
3132
3453
3133
        gl_pipchain->SetMasterViewport(gl_videochain->GetViewPort());
3454
        gl_pipchain->SetMasterViewport(gl_videochain->GetViewPort());
3134
        if (!success)
3455
        if (!success)
Lines 3158-3163 Link Here
3158
                                456 - 0.00001);
3479
                                456 - 0.00001);
3159
    int boboff     = use_bob ? boboff_raw : 0;
3480
    int boboff     = use_bob ? boboff_raw : 0;
3160
3481
3482
    if (XVideoVDPAU == VideoOutputSubType())
3483
    {
3484
        if (needrepaint)
3485
        {
3486
            X11L;
3487
            XSetForeground(XJ_disp, XJ_gc, 0x020202);
3488
            XFillRectangle(XJ_disp, XJ_curwin, XJ_gc,
3489
                           display_visible_rect.left(),
3490
                           display_visible_rect.top(),
3491
                           display_visible_rect.width(),
3492
                           display_visible_rect.height());
3493
            X11U;
3494
            needrepaint = false;
3495
        }
3496
        return;
3497
    }
3498
3161
    xv_need_bobdeint_repaint |= needrepaint;
3499
    xv_need_bobdeint_repaint |= needrepaint;
3162
3500
3163
    if (chroma_osd && chroma_osd->GetImage() && xv_need_bobdeint_repaint)
3501
    if (chroma_osd && chroma_osd->GetImage() && xv_need_bobdeint_repaint)
Lines 3279-3284 Link Here
3279
    if (VideoOutputSubType() <= XVideo)
3617
    if (VideoOutputSubType() <= XVideo)
3280
        return;
3618
        return;
3281
3619
3620
#ifdef USING_VDPAU
3621
    if (VideoOutputSubType() == XVideoVDPAU)
3622
    {
3623
        if (!vdpau)
3624
            return;
3625
        vdpau->Decode(frame);
3626
        return;
3627
    }
3628
#endif
3629
3282
#ifdef USING_XVMC
3630
#ifdef USING_XVMC
3283
    xvmc_render_state_t *render = GetRender(frame);
3631
    xvmc_render_state_t *render = GetRender(frame);
3284
    // disable questionable ffmpeg surface munging
3632
    // disable questionable ffmpeg surface munging
Lines 3406-3411 Link Here
3406
        }
3754
        }
3407
        vbuffers.UnlockFrame(&av_pause_frame, "UpdatePauseFrame - used");
3755
        vbuffers.UnlockFrame(&av_pause_frame, "UpdatePauseFrame - used");
3408
    }
3756
    }
3757
#ifdef USING_VDPAU
3758
    else if (VideoOutputSubType() == XVideoVDPAU)
3759
    {
3760
        return;
3761
    }
3762
#endif
3409
#ifdef USING_XVMC
3763
#ifdef USING_XVMC
3410
    else
3764
    else
3411
    {
3765
    {
Lines 3459-3464 Link Here
3459
#endif
3813
#endif
3460
}
3814
}
3461
3815
3816
void VideoOutputXv::ProcessFrameVDPAU(VideoFrame *frame, OSD *osd,
3817
                                      NuppelVideoPlayer *pipPlayer)
3818
{
3819
    if (vdpau_use_osd && osd)
3820
        DisplayOSD(frame, osd);
3821
    ShowPip(frame, pipPlayer);
3822
}
3823
3462
void VideoOutputXv::ProcessFrameXvMC(VideoFrame *frame, OSD *osd)
3824
void VideoOutputXv::ProcessFrameXvMC(VideoFrame *frame, OSD *osd)
3463
{
3825
{
3464
    (void)frame;
3826
    (void)frame;
Lines 3667-3673 Link Here
3667
    (void) filterList;
4029
    (void) filterList;
3668
    (void) pipPlayer;
4030
    (void) pipPlayer;
3669
4031
3670
    QMutexLocker locker(&gl_context_lock);
4032
    if (!gl_videochain || !gl_context)
4033
        return;
4034
4035
    OpenGLContextLocker ctx_lock(gl_context);
3671
4036
3672
    bool pauseframe = false;
4037
    bool pauseframe = false;
3673
    if (!frame)
4038
    if (!frame)
Lines 3677-3685 Link Here
3677
        pauseframe = true;
4042
        pauseframe = true;
3678
    }
4043
    }
3679
4044
3680
    // disable image processing for offscreen rendering
3681
    gl_context->MakeCurrent(true);
3682
3683
    if (filterList)
4045
    if (filterList)
3684
        filterList->ProcessFrame(frame);
4046
        filterList->ProcessFrame(frame);
3685
4047
Lines 3700-3709 Link Here
3700
        m_deintFilter->ProcessFrame(frame);
4062
        m_deintFilter->ProcessFrame(frame);
3701
    }
4063
    }
3702
4064
3703
    if (gl_videochain)
4065
    bool soft_bob = m_deinterlacing && (m_deintfiltername == "bobdeint");
3704
        gl_videochain->UpdateInputFrame(frame);
3705
4066
3706
    gl_context->MakeCurrent(false);
4067
    if (gl_videochain)
4068
        gl_videochain->UpdateInputFrame(frame, soft_bob);
3707
}
4069
}
3708
4070
3709
void VideoOutputXv::ProcessFrameMem(VideoFrame *frame, OSD *osd,
4071
void VideoOutputXv::ProcessFrameMem(VideoFrame *frame, OSD *osd,
Lines 3769-3775 Link Here
3769
        return;
4131
        return;
3770
    }
4132
    }
3771
4133
3772
    if (VideoOutputSubType() == OpenGL)
4134
    if (VideoOutputSubType() == XVideoVDPAU)
4135
        ProcessFrameVDPAU(frame, osd, pipPlayer);
4136
    else if (VideoOutputSubType() == OpenGL)
3773
        ProcessFrameOpenGL(frame, osd, filterList, pipPlayer);
4137
        ProcessFrameOpenGL(frame, osd, filterList, pipPlayer);
3774
    else if (VideoOutputSubType() <= XVideo)
4138
    else if (VideoOutputSubType() <= XVideo)
3775
        ProcessFrameMem(frame, osd, filterList, pipPlayer);
4139
        ProcessFrameMem(frame, osd, filterList, pipPlayer);
Lines 3784-3793 Link Here
3784
    if (!supported_attributes)
4148
    if (!supported_attributes)
3785
        return -1;
4149
        return -1;
3786
4150
3787
    if (VideoOutputSubType() == OpenGL)
4151
    if (VideoOutputSubType() == XVideoVDPAU)
4152
    {
4153
#ifdef USING_VDPAU
4154
        if (vdpau)
4155
        {
4156
            newValue = min(max(newValue, 0), 100);
4157
            newValue = vdpau->SetPictureAttribute(attribute, newValue);
4158
            if (newValue >= 0)
4159
                SetPictureAttributeDBValue(attribute, newValue);
4160
            return newValue;
4161
        }
4162
#endif // USING_VDPAU
4163
    }
4164
4165
    if (VideoOutputSubType() == OpenGL && gl_context)
3788
    {
4166
    {
3789
        newValue = min(max(newValue, 0), 100);
4167
        newValue = min(max(newValue, 0), 100);
3790
        newValue = gl_videochain->SetPictureAttribute(attribute, newValue);
4168
        newValue = gl_context->SetPictureAttribute(attribute, newValue);
3791
        if (newValue >= 0)
4169
        if (newValue >= 0)
3792
            SetPictureAttributeDBValue(attribute, newValue);
4170
            SetPictureAttributeDBValue(attribute, newValue);
3793
        return newValue;
4171
        return newValue;
Lines 3846-3854 Link Here
3846
{
4224
{
3847
    supported_attributes = kPictureAttributeSupported_None;
4225
    supported_attributes = kPictureAttributeSupported_None;
3848
4226
3849
    if (VideoOutputSubType() == OpenGL)
4227
    if (VideoOutputSubType() == XVideoVDPAU)
3850
    {
4228
    {
3851
        supported_attributes = gl_videochain->GetSupportedPictureAttributes();
4229
#ifdef USING_VDPAU
4230
        if (vdpau)
4231
            supported_attributes = vdpau->GetSupportedPictureAttributes();
4232
#endif //USING_VDPAU
4233
    }
4234
    else if (VideoOutputSubType() == OpenGL && gl_context)
4235
    {
4236
        supported_attributes = gl_context->GetSupportedPictureAttributes();
3852
    }
4237
    }
3853
    else if (VideoOutputSubType() >= XVideo)
4238
    else if (VideoOutputSubType() >= XVideo)
3854
    {
4239
    {
Lines 3882-3887 Link Here
3882
4267
3883
void VideoOutputXv::CheckFrameStates(void)
4268
void VideoOutputXv::CheckFrameStates(void)
3884
{
4269
{
4270
#ifdef USING_VDPAU
4271
    if (vdpau && VideoOutputSubType() == XVideoVDPAU)
4272
    {
4273
        frame_queue_t::iterator it;
4274
        it = vbuffers.begin_lock(kVideoBuffer_displayed);
4275
        while (it != vbuffers.end(kVideoBuffer_displayed))
4276
        {
4277
            VideoFrame* frame = *it;
4278
            if (!vdpau->IsBeingUsed(frame))
4279
            {
4280
                if (vbuffers.contains(kVideoBuffer_decode, frame))
4281
                {
4282
                    VERBOSE(VB_PLAYBACK, LOC + QString(
4283
                                "Frame %1 is in use by avlib and so is "
4284
                                "being held for later discarding.")
4285
                            .arg(DebugString(frame, true)));
4286
                }
4287
                else
4288
                {
4289
                    vbuffers.RemoveInheritence(frame);
4290
                    vbuffers.safeEnqueue(kVideoBuffer_avail, frame);
4291
                    vbuffers.end_lock();
4292
                    it = vbuffers.begin_lock(kVideoBuffer_displayed);
4293
                continue;
4294
                }
4295
            }
4296
            ++it;
4297
        }
4298
        vbuffers.end_lock();
4299
        return;
4300
    }
4301
#endif // USING_VDPAU
3885
#ifdef USING_XVMC
4302
#ifdef USING_XVMC
3886
    frame_queue_t::iterator it;
4303
    frame_queue_t::iterator it;
3887
4304
Lines 3977-3982 Link Here
3977
bool VideoOutputXv::IsDisplaying(VideoFrame* frame)
4394
bool VideoOutputXv::IsDisplaying(VideoFrame* frame)
3978
{
4395
{
3979
    (void)frame;
4396
    (void)frame;
4397
    if (!frame)
4398
        return false;
4399
3980
#ifdef USING_XVMC
4400
#ifdef USING_XVMC
3981
    xvmc_render_state_t *render = GetRender(frame);
4401
    xvmc_render_state_t *render = GetRender(frame);
3982
    if (render)
4402
    if (render)
Lines 4152-4191 Link Here
4152
int VideoOutputXv::DisplayOSD(VideoFrame *frame, OSD *osd,
4572
int VideoOutputXv::DisplayOSD(VideoFrame *frame, OSD *osd,
4153
                              int stride, int revision)
4573
                              int stride, int revision)
4154
{
4574
{
4155
    if (!gl_use_osd_opengl2)
4575
    if (!gl_use_osd_opengl2 && !vdpau_use_osd)
4156
        return VideoOutput::DisplayOSD(frame, osd, stride, revision);
4576
        return VideoOutput::DisplayOSD(frame, osd, stride, revision);
4157
4577
4158
    gl_osd_ready = false;
4578
    gl_osd_ready = false;
4159
4579
4160
    if (!osd || !gl_osdchain)
4580
    if (!osd)
4161
        return -1;
4581
        return -1;
4162
4582
4163
    if (vsz_enabled && gl_videochain)
4164
        gl_videochain->SetVideoResize(vsz_desired_display_rect);
4165
4166
    OSDSurface *surface = osd->Display();
4583
    OSDSurface *surface = osd->Display();
4167
    if (!surface)
4584
    if (!surface)
4585
    {
4586
#ifdef USING_VDPAU
4587
        if (vdpau)
4588
            vdpau->DisableOSD();
4589
#endif
4168
        return -1;
4590
        return -1;
4169
4591
    }
4170
    gl_osd_ready = true;
4171
4592
4172
    bool changed = (-1 == revision) ?
4593
    bool changed = (-1 == revision) ?
4173
        surface->Changed() : (surface->GetRevision()!=revision);
4594
        surface->Changed() : (surface->GetRevision()!=revision);
4174
4595
4596
    if (gl_use_osd_opengl2)
4597
        gl_osd_ready = true;
4598
4175
    if (changed)
4599
    if (changed)
4176
    {
4600
    {
4177
        QSize visible = GetTotalOSDBounds().size();
4601
        QSize visible = GetTotalOSDBounds().size();
4178
4602
4179
        int offsets[3] =
4603
        if (vdpau_use_osd)
4180
        {
4604
        {
4181
            surface->y - surface->yuvbuffer,
4605
#ifdef USING_VDPAU
4182
            surface->u - surface->yuvbuffer,
4606
            if (!vdpau)
4183
            surface->v - surface->yuvbuffer,
4607
                return -1;
4184
        };
4608
4185
        gl_osdchain->UpdateInput(surface->yuvbuffer, offsets,
4609
            void *offsets[3], *alpha[1];
4186
                                 0, FMT_YV12, visible);
4610
            offsets[0] = surface->y;
4187
        gl_osdchain->UpdateInput(surface->alpha, offsets,
4611
            offsets[1] = surface->u;
4188
                                 3, FMT_ALPHA, visible);
4612
            offsets[2] = surface->v;
4613
            alpha[0] = surface->alpha;
4614
            vdpau->UpdateOSD(offsets, visible, alpha);
4615
#endif // USING_VDPAU
4616
        }
4617
        else if (gl_use_osd_opengl2)
4618
        {
4619
            if (!gl_osdchain)
4620
                return -1;
4621
4622
            if (vsz_enabled && gl_videochain)
4623
                gl_videochain->SetVideoResize(vsz_desired_display_rect);
4624
4625
            int offsets[3];
4626
            offsets[0] = surface->y - surface->yuvbuffer;
4627
            offsets[1] = surface->u - surface->yuvbuffer;
4628
            offsets[2] = surface->v - surface->yuvbuffer;
4629
4630
            gl_osdchain->UpdateInput(surface->yuvbuffer, offsets,
4631
                                 FMT_YV12, visible, surface->alpha);
4632
        }
4189
    }
4633
    }
4190
    return changed;
4634
    return changed;
4191
}
4635
}
Lines 4202-4208 Link Here
4202
    if (!disp)
4646
    if (!disp)
4203
        return list;
4647
        return list;
4204
4648
4205
    list = allowed_video_renderers(myth_codec_id, disp);
4649
    X11L;
4650
    int screen    = DefaultScreen(disp);
4651
    Window window = DefaultRootWindow(disp);
4652
    X11U;
4653
4654
    list = allowed_video_renderers(myth_codec_id, disp, screen, window);
4206
4655
4207
    XCloseDisplay(disp);
4656
    XCloseDisplay(disp);
4208
4657
Lines 4210-4216 Link Here
4210
}
4659
}
4211
4660
4212
static void SetFromEnv(bool &useXvVLD, bool &useXvIDCT, bool &useXvMC,
4661
static void SetFromEnv(bool &useXvVLD, bool &useXvIDCT, bool &useXvMC,
4213
                       bool &useXVideo, bool &useShm, bool &useOpenGL)
4662
                       bool &useXVideo, bool &useShm, bool &useOpenGL,
4663
                       bool &useVDPAU)
4214
{
4664
{
4215
    // can be used to force non-Xv mode as well as non-Xv/non-Shm mode
4665
    // can be used to force non-Xv mode as well as non-Xv/non-Shm mode
4216
    if (getenv("NO_XVMC_VLD"))
4666
    if (getenv("NO_XVMC_VLD"))
Lines 4225-4237 Link Here
4225
        useXVideo = useShm = false;
4675
        useXVideo = useShm = false;
4226
    if (getenv("NO_OPENGL"))
4676
    if (getenv("NO_OPENGL"))
4227
        useOpenGL = false;
4677
        useOpenGL = false;
4678
    if (getenv("NO_VDPAU"))
4679
        useVDPAU = false;
4228
}
4680
}
4229
4681
4230
static void SetFromHW(Display *d,
4682
static void SetFromHW(Display *d,
4231
                      bool &useXvMC, bool &useXVideo,
4683
                      int     screen,     Window  curwin,
4232
                      bool &useShm,  bool &useXvMCOpenGL,
4684
                      bool    &useXvMC,   bool   &useXVideo,
4233
                      bool &useOpenGL)
4685
                      bool    &useShm,    bool   &useXvMCOpenGL,
4234
{
4686
                      bool    &useOpenGL, bool   &useVDPAU,
4687
                      MythCodecID vdpau_codec_id)
4688
{
4689
    (void)screen;
4690
    (void)d;
4691
    (void)curwin;
4692
    (void) vdpau_codec_id;
4235
    // find out about XvMC support
4693
    // find out about XvMC support
4236
    if (useXvMC)
4694
    if (useXvMC)
4237
    {
4695
    {
Lines 4294-4314 Link Here
4294
        useOpenGL = OpenGLContext::IsGLXSupported(d, 1, 2);
4752
        useOpenGL = OpenGLContext::IsGLXSupported(d, 1, 2);
4295
#endif // USING_OPENGL_VIDEO
4753
#endif // USING_OPENGL_VIDEO
4296
    }
4754
    }
4755
4756
    if (useVDPAU)
4757
    {
4758
        useVDPAU = false;
4759
#ifdef USING_VDPAU
4760
        if (vdpau_codec_id < kCodec_NORMAL_END)
4761
        {
4762
            useVDPAU = true;
4763
        }
4764
        else
4765
        {
4766
            useVDPAU = VDPAUContext::CheckCodecSupported(vdpau_codec_id);
4767
        }
4768
        if (useVDPAU)
4769
        {
4770
            // N.B. This only confirms another VDPAU context can be created.
4771
            // Creating a second hardware decoder will still fail (180.25)
4772
            // e.g. when attempting to use PBP.
4773
            VDPAUContext *c = new VDPAUContext();
4774
            useVDPAU = c->Init(d, screen, curwin, QSize(1920,1200),
4775
                               false, vdpau_codec_id);
4776
            c->Deinit();
4777
            delete c;
4778
        }
4779
4780
#endif // USING_VDPAU
4781
    }
4297
}
4782
}
4298
4783
4299
static QStringList allowed_video_renderers(MythCodecID myth_codec_id,
4784
static QStringList allowed_video_renderers(
4300
                                           Display *XJ_disp)
4785
    MythCodecID myth_codec_id, Display *display, int screen, Window curwin)
4301
{
4786
{
4302
    bool vld, idct, mc, xv, shm, xvmc_opengl, opengl;
4787
    bool vld, idct, mc, xv, shm, xvmc_opengl, opengl, vdpau;
4303
4788
4304
    myth2av_codecid(myth_codec_id, vld, idct, mc);
4789
    myth2av_codecid(myth_codec_id, vld, idct, mc, vdpau);
4790
4791
    // allow vdpau rendering for software decode
4792
    if (myth_codec_id < kCodec_NORMAL_END)
4793
        vdpau = true;
4305
4794
4306
    opengl = xv = shm = !vld && !idct;
4795
    opengl = xv = shm = !vld && !idct;
4307
    xvmc_opengl = vld || idct || mc;
4796
    xvmc_opengl = vld || idct || mc;
4308
4797
4309
    SetFromEnv(vld, idct, mc, xv, shm, opengl);
4798
    SetFromEnv(vld, idct, mc, xv, shm, opengl, vdpau);
4310
    SetFromHW(XJ_disp, mc, xv, shm, xvmc_opengl, opengl);
4799
    SetFromHW(display, screen, curwin, mc, xv, shm, xvmc_opengl,
4311
4800
              opengl, vdpau, myth_codec_id);
4312
    idct &= mc;
4801
    idct &= mc;
4313
4802
4314
    QStringList list;
4803
    QStringList list;
Lines 4320-4327 Link Here
4320
            list += "xv-blit";
4809
            list += "xv-blit";
4321
        if (shm)
4810
        if (shm)
4322
            list += "xshm";
4811
            list += "xshm";
4812
        if (vdpau)
4813
            list += "vdpau";
4323
        list += "xlib";
4814
        list += "xlib";
4324
    }
4815
    }
4816
    else if ((kCodec_VDPAU_BEGIN < myth_codec_id) && 
4817
             (myth_codec_id < kCodec_VDPAU_END) && vdpau)
4818
    {
4819
        list += "vdpau";
4820
    }
4325
    else
4821
    else
4326
    {
4822
    {
4327
        if (vld || idct || mc)
4823
        if (vld || idct || mc)
(-)mythtv.ori/libs/libmythtv/videoout_xv.h (-3 / +25 lines)
Lines 47-55 Link Here
47
47
48
typedef enum VideoOutputSubType
48
typedef enum VideoOutputSubType
49
{
49
{
50
    XVUnknown = 0, Xlib, XShm, OpenGL, XVideo, XVideoMC, XVideoIDCT, XVideoVLD,
50
    XVUnknown = 0, Xlib, XShm, OpenGL, XVideo, XVideoVDPAU, 
51
    XVideoMC, XVideoIDCT, XVideoVLD, 
51
} VOSType;
52
} VOSType;
52
53
54
class VDPAUContext;
53
class VideoOutputXv : public VideoOutput
55
class VideoOutputXv : public VideoOutput
54
{
56
{
55
    friend class ChromaKeyOSD;
57
    friend class ChromaKeyOSD;
Lines 127-134 Link Here
127
    static QStringList GetAllowedRenderers(MythCodecID myth_codec_id,
129
    static QStringList GetAllowedRenderers(MythCodecID myth_codec_id,
128
                                           const QSize &video_dim);
130
                                           const QSize &video_dim);
129
131
130
  private:
131
    VOSType VideoOutputSubType() const { return video_output_subtype; }
132
    VOSType VideoOutputSubType() const { return video_output_subtype; }
133
    void SetNextFrameDisplayTimeOffset(int delayus);
134
135
  private:
132
    virtual QRect GetVisibleOSDBounds(float&, float&, float) const;
136
    virtual QRect GetVisibleOSDBounds(float&, float&, float) const;
133
    virtual QRect GetTotalOSDBounds(void) const;
137
    virtual QRect GetTotalOSDBounds(void) const;
134
138
Lines 137-142 Link Here
137
    void DiscardFrames(bool next_frame_keyframe);
141
    void DiscardFrames(bool next_frame_keyframe);
138
    void DoneDisplayingFrame(void);
142
    void DoneDisplayingFrame(void);
139
143
144
    void ProcessFrameVDPAU(VideoFrame *frame, OSD *osd,
145
                           NuppelVideoPlayer *pipPlayer);
140
    void ProcessFrameXvMC(VideoFrame *frame, OSD *osd);
146
    void ProcessFrameXvMC(VideoFrame *frame, OSD *osd);
141
    void ProcessFrameOpenGL(VideoFrame *frame, OSD *osd,
147
    void ProcessFrameOpenGL(VideoFrame *frame, OSD *osd,
142
                            FilterChain *filterList,
148
                            FilterChain *filterList,
Lines 145-155 Link Here
145
                         FilterChain *filterList,
151
                         FilterChain *filterList,
146
                         NuppelVideoPlayer *pipPlayer);
152
                         NuppelVideoPlayer *pipPlayer);
147
153
154
    void PrepareFrameVDPAU(VideoFrame *, FrameScanType);
148
    void PrepareFrameXvMC(VideoFrame *, FrameScanType);
155
    void PrepareFrameXvMC(VideoFrame *, FrameScanType);
149
    void PrepareFrameXv(VideoFrame *);
156
    void PrepareFrameXv(VideoFrame *);
150
    void PrepareFrameOpenGL(VideoFrame *, FrameScanType);
157
    void PrepareFrameOpenGL(VideoFrame *, FrameScanType);
151
    void PrepareFrameMem(VideoFrame *, FrameScanType);
158
    void PrepareFrameMem(VideoFrame *, FrameScanType);
152
159
160
    void ShowVDPAU(FrameScanType scan);
153
    void ShowXvMC(FrameScanType scan);
161
    void ShowXvMC(FrameScanType scan);
154
    void ShowXVideo(FrameScanType scan);
162
    void ShowXVideo(FrameScanType scan);
155
163
Lines 162-177 Link Here
162
    void InitColorKey(bool turnoffautopaint);
170
    void InitColorKey(bool turnoffautopaint);
163
171
164
    bool InitVideoBuffers(MythCodecID, bool use_xv,
172
    bool InitVideoBuffers(MythCodecID, bool use_xv,
165
                          bool use_shm, bool use_opengl);
173
                          bool use_shm, bool use_opengl,
174
                          bool use_vdpau);
166
175
167
    bool InitXvMC(MythCodecID);
176
    bool InitXvMC(MythCodecID);
168
    bool InitXVideo(void);
177
    bool InitXVideo(void);
169
    bool InitOpenGL(void);
178
    bool InitOpenGL(void);
170
    bool InitXShm(void);
179
    bool InitXShm(void);
171
    bool InitXlib(void);
180
    bool InitXlib(void);
181
    bool InitVDPAU(MythCodecID);
172
    bool InitOSD(const QString&);
182
    bool InitOSD(const QString&);
173
    bool CheckOSDInit(void);
183
    bool CheckOSDInit(void);
174
184
185
    bool CreateVDPAUBuffers(void);
175
    bool CreateXvMCBuffers(void);
186
    bool CreateXvMCBuffers(void);
176
    bool CreateBuffers(VOSType subtype);
187
    bool CreateBuffers(VOSType subtype);
177
    vector<void*> CreateXvMCSurfaces(uint num, bool surface_has_vld);
188
    vector<void*> CreateXvMCSurfaces(uint num, bool surface_has_vld);
Lines 199-204 Link Here
199
    bool SetupDeinterlaceOpenGL(
210
    bool SetupDeinterlaceOpenGL(
200
        bool interlaced, const QString &overridefilter);
211
        bool interlaced, const QString &overridefilter);
201
212
213
    // VDPAU specific helper functions
214
    bool SetDeinterlacingEnabledVDPAU(bool enable);
215
    bool SetupDeinterlaceVDPAU(
216
        bool interlaced, const QString &overridefilter);
202
217
203
    // Misc.
218
    // Misc.
204
    MythCodecID          myth_codec_id;
219
    MythCodecID          myth_codec_id;
Lines 251-256 Link Here
251
    // Support for nVidia XvMC copy to texture feature
266
    // Support for nVidia XvMC copy to texture feature
252
    XvMCTextures        *xvmc_tex;
267
    XvMCTextures        *xvmc_tex;
253
268
269
#ifdef USING_VDPAU
270
    VDPAUContext        *vdpau;
271
#endif
272
    bool                 vdpau_use_osd;
273
    bool                 vdpau_use_pip;
274
    bool                 vdpau_use_colorkey;
275
254
    // Basic Xv drawing info
276
    // Basic Xv drawing info
255
    int                  xv_port;
277
    int                  xv_port;
256
    int                  xv_hue_base;
278
    int                  xv_hue_base;
(-)mythtv.ori/libs/libmythtv/videooutbase.cpp (-2 / +3 lines)
Lines 110-116 Link Here
110
            to_comma_list(renderers));
110
            to_comma_list(renderers));
111
111
112
    QString renderer = QString::null;
112
    QString renderer = QString::null;
113
    if (renderers.size() > 1)
113
    if (renderers.size() > 0)
114
    {
114
    {
115
        VideoDisplayProfile vprof;
115
        VideoDisplayProfile vprof;
116
        vprof.SetInput(video_dim);
116
        vprof.SetInput(video_dim);
Lines 562-568 Link Here
562
    // Default to not supporting bob deinterlace
562
    // Default to not supporting bob deinterlace
563
    return (!filtername.contains("bobdeint") &&
563
    return (!filtername.contains("bobdeint") &&
564
            !filtername.contains("doublerate") &&
564
            !filtername.contains("doublerate") &&
565
            !filtername.contains("opengl"));
565
            !filtername.contains("opengl") &&
566
            !filtername.contains("vdpau"));
566
}
567
}
567
568
568
/**
569
/**
(-)mythtv.ori/libs/libmythtv/vsync.cpp (-2 / +49 lines)
Lines 89-95 Link Here
89
	skip = m_forceskip;
89
	skip = m_forceskip;
90
	m_forceskip = 0;
90
	m_forceskip = 0;
91
    }
91
    }
92
    
92
93
#ifdef USING_VDPAU
94
//    TESTVIDEOSYNC(VDPAUVideoSync);
95
#endif    
93
#ifndef _WIN32
96
#ifndef _WIN32
94
    TESTVIDEOSYNC(nVidiaVideoSync);
97
    TESTVIDEOSYNC(nVidiaVideoSync);
95
    TESTVIDEOSYNC(DRMVideoSync);
98
    TESTVIDEOSYNC(DRMVideoSync);
Lines 205-210 Link Here
205
        m_nexttrigger.tv_usec = now.tv_usec;
208
        m_nexttrigger.tv_usec = now.tv_usec;
206
        OffsetTimeval(m_nexttrigger, ret_val);
209
        OffsetTimeval(m_nexttrigger, ret_val);
207
    }
210
    }
211
208
    return ret_val;
212
    return ret_val;
209
}
213
}
210
214
Lines 757-763 Link Here
757
    unsigned long rtcdata;
761
    unsigned long rtcdata;
758
    while (m_delay > 0)
762
    while (m_delay > 0)
759
    {
763
    {
760
        read(m_rtcfd, &rtcdata, sizeof(rtcdata));
764
        (void)read(m_rtcfd, &rtcdata, sizeof(rtcdata));
761
        m_delay = CalcDelay();
765
        m_delay = CalcDelay();
762
    }
766
    }
763
}
767
}
Lines 768-773 Link Here
768
}
772
}
769
#endif /* __linux__ */
773
#endif /* __linux__ */
770
774
775
#ifdef USING_VDPAU
776
VDPAUVideoSync::VDPAUVideoSync(VideoOutput *vo,
777
                              int fr, int ri, bool intl) :
778
    VideoSync(vo, fr, ri, intl)
779
{
780
}
781
782
VDPAUVideoSync::~VDPAUVideoSync()
783
{
784
}
785
786
bool VDPAUVideoSync::TryInit(void)
787
{
788
    VideoOutputXv *vo = dynamic_cast<VideoOutputXv*>(m_video_output);
789
    if (!vo)
790
        return false;
791
792
    if (vo->VideoOutputSubType() != XVideoVDPAU)
793
        return false;
794
795
    return true;
796
}
797
798
void VDPAUVideoSync::WaitForFrame(int sync_delay)
799
{
800
    // Offset for externally-provided A/V sync delay
801
    OffsetTimeval(m_nexttrigger, sync_delay);
802
    m_delay = CalcDelay();
803
804
    if (m_delay < 0)
805
        m_delay = 0;
806
807
    VideoOutputXv *vo = (VideoOutputXv *)(m_video_output);
808
    vo->SetNextFrameDisplayTimeOffset(m_delay);
809
}
810
811
void VDPAUVideoSync::AdvanceTrigger(void)
812
{
813
    UpdateNexttrigger();
814
}
815
816
#endif
817
771
BusyWaitVideoSync::BusyWaitVideoSync(VideoOutput *vo,
818
BusyWaitVideoSync::BusyWaitVideoSync(VideoOutput *vo,
772
                                     int fr, int ri, bool intl) : 
819
                                     int fr, int ri, bool intl) : 
773
    VideoSync(vo, fr, ri, intl) 
820
    VideoSync(vo, fr, ri, intl) 
(-)mythtv.ori/libs/libmythtv/vsync.h (+21 lines)
Lines 252-257 Link Here
252
};
252
};
253
#endif
253
#endif
254
254
255
#ifdef USING_VDPAU
256
/** \brief Video synchronization class employing VDPAU
257
 */
258
class VDPAUVideoSync : public VideoSync
259
{
260
  public:
261
    VDPAUVideoSync(VideoOutput*,
262
                      int frame_interval, int refresh_interval,
263
                      bool interlaced);
264
    ~VDPAUVideoSync();
265
266
    QString getName(void) const { return QString("VDPAU"); }
267
    bool TryInit(void);
268
    void WaitForFrame(int sync_delay);
269
    void AdvanceTrigger(void);
270
271
  private:
272
};
273
274
#endif
275
255
/** \brief Video synchronization classes employing usleep() and busy-waits.
276
/** \brief Video synchronization classes employing usleep() and busy-waits.
256
 *  
277
 *  
257
 *  Non-phase-maintaining. There may occasionally be short periods 
278
 *  Non-phase-maintaining. There may occasionally be short periods 
(-)mythtv.ori/libs/libmythtv/xvmctextures.cpp (-1 / +1 lines)
Lines 88-94 Link Here
88
    XVisualInfo *vis_info;
88
    XVisualInfo *vis_info;
89
    vis_info = glXGetVisualFromFBConfig(XJ_disp, glx_fbconfig);
89
    vis_info = glXGetVisualFromFBConfig(XJ_disp, glx_fbconfig);
90
    gl_window = get_gl_window(XJ_disp, XJ_curwin, vis_info,
90
    gl_window = get_gl_window(XJ_disp, XJ_curwin, vis_info,
91
                              window_size, true);
91
                              QRect(QPoint(0,0), window_size));
92
92
93
    glx_window = get_glx_window(XJ_disp, glx_fbconfig, gl_window, glx_context,
93
    glx_window = get_glx_window(XJ_disp, glx_fbconfig, gl_window, glx_context,
94
                                glx_pbuffer, window_size);
94
                                glx_pbuffer, window_size);

Return to bug 259009