android.media.MediaCodec# VIDEO_SCALING_MODE_SCALE_TO_FIT 源码实例Demo

下面列出了android.media.MediaCodec# VIDEO_SCALING_MODE_SCALE_TO_FIT 实例代码,或者点击链接到github查看源代码,也可以在右侧发表评论。


@Override
public void buildRenderers(DemoPlayer player, RendererBuilderCallback callback) {
  // Build the video and audio renderers.
  DefaultSampleSource sampleSource =
      new DefaultSampleSource(new FrameworkSampleExtractor(context, uri, null), 2);
  MediaCodecVideoTrackRenderer videoRenderer = new MediaCodecVideoTrackRenderer(sampleSource,
      null, true, MediaCodec.VIDEO_SCALING_MODE_SCALE_TO_FIT, 5000, null, player.getMainHandler(),
      player, 50);
  MediaCodecAudioTrackRenderer audioRenderer = new MediaCodecAudioTrackRenderer(sampleSource,
      null, true, player.getMainHandler(), player);

  // Build the debug renderer.
  TrackRenderer debugRenderer = debugTextView != null
      ? new DebugTrackRenderer(debugTextView, videoRenderer)
      : null;

  // Invoke the callback.
  TrackRenderer[] renderers = new TrackRenderer[DemoPlayer.RENDERER_COUNT];
  renderers[DemoPlayer.TYPE_VIDEO] = videoRenderer;
  renderers[DemoPlayer.TYPE_AUDIO] = audioRenderer;
  renderers[DemoPlayer.TYPE_DEBUG] = debugRenderer;
  callback.onRenderers(null, null, renderers);
}
 

@Override
public void onManifest(String contentId, HlsPlaylist manifest) {
  DefaultBandwidthMeter bandwidthMeter = new DefaultBandwidthMeter();

  DataSource dataSource = new UriDataSource(userAgent, bandwidthMeter);
  HlsChunkSource chunkSource = new HlsChunkSource(dataSource, url, manifest, bandwidthMeter, null,
      HlsChunkSource.ADAPTIVE_MODE_SPLICE);
  HlsSampleSource sampleSource = new HlsSampleSource(chunkSource, true, 3);
  MediaCodecVideoTrackRenderer videoRenderer = new MediaCodecVideoTrackRenderer(sampleSource,
      MediaCodec.VIDEO_SCALING_MODE_SCALE_TO_FIT, 5000, player.getMainHandler(), player, 50);
  MediaCodecAudioTrackRenderer audioRenderer = new MediaCodecAudioTrackRenderer(sampleSource);

  MetadataTrackRenderer<Map<String, Object>> id3Renderer =
      new MetadataTrackRenderer<Map<String, Object>>(sampleSource, new Id3Parser(),
          player.getId3MetadataRenderer(), player.getMainHandler().getLooper());

  Eia608TrackRenderer closedCaptionRenderer = new Eia608TrackRenderer(sampleSource, player,
      player.getMainHandler().getLooper());

  TrackRenderer[] renderers = new TrackRenderer[DemoPlayer.RENDERER_COUNT];
  renderers[DemoPlayer.TYPE_VIDEO] = videoRenderer;
  renderers[DemoPlayer.TYPE_AUDIO] = audioRenderer;
  renderers[DemoPlayer.TYPE_TIMED_METADATA] = id3Renderer;
  renderers[DemoPlayer.TYPE_TEXT] = closedCaptionRenderer;
  callback.onRenderers(null, null, renderers);
}
 

@Override
public void buildRenderers(DemoPlayer player) {
  Allocator allocator = new DefaultAllocator(BUFFER_SEGMENT_SIZE);
  Handler mainHandler = player.getMainHandler();

  // Build the video and audio renderers.
  DefaultBandwidthMeter bandwidthMeter = new DefaultBandwidthMeter(mainHandler, null);
  DataSource dataSource = new DefaultUriDataSource(context, bandwidthMeter, userAgent);
  ExtractorSampleSource sampleSource = new ExtractorSampleSource(uri, dataSource, allocator,
      BUFFER_SEGMENT_COUNT * BUFFER_SEGMENT_SIZE, mainHandler, player, 0);
  MediaCodecVideoTrackRenderer videoRenderer = new MediaCodecVideoTrackRenderer(context,
      sampleSource, MediaCodecSelector.DEFAULT, MediaCodec.VIDEO_SCALING_MODE_SCALE_TO_FIT, 5000,
      mainHandler, player, 50);
  MediaCodecAudioTrackRenderer audioRenderer = new MediaCodecAudioTrackRenderer(sampleSource,
      MediaCodecSelector.DEFAULT, null, true, mainHandler, player,
      AudioCapabilities.getCapabilities(context), AudioManager.STREAM_MUSIC);
  TrackRenderer textRenderer = new TextTrackRenderer(sampleSource, player,
      mainHandler.getLooper());

  // Invoke the callback.
  TrackRenderer[] renderers = new TrackRenderer[DemoPlayer.RENDERER_COUNT];
  renderers[DemoPlayer.TYPE_VIDEO] = videoRenderer;
  renderers[DemoPlayer.TYPE_AUDIO] = audioRenderer;
  renderers[DemoPlayer.TYPE_TEXT] = textRenderer;
  player.onRenderers(renderers, bandwidthMeter);
}
 

@Override
public void buildRenderers(DemoPlayer player, RendererBuilderCallback callback) {
  // Build the video and audio renderers.
  FrameworkSampleSource sampleSource = new FrameworkSampleSource(context, uri, null, 2);
  MediaCodecVideoTrackRenderer videoRenderer = new MediaCodecVideoTrackRenderer(sampleSource,
      null, true, MediaCodec.VIDEO_SCALING_MODE_SCALE_TO_FIT, 5000,
      player.getMainHandler(), player, 1);
  MediaCodecAudioTrackRenderer audioRenderer = new MediaCodecAudioTrackRenderer(sampleSource,
      null, true, player.getMainHandler(), player);

  // Build the debug renderer.
  TrackRenderer debugRenderer = debugTextView != null
      ? new DebugTrackRenderer(debugTextView, videoRenderer)
      : null;

  // Invoke the callback.
  TrackRenderer[] renderers = new TrackRenderer[DemoPlayer.RENDERER_COUNT];
  renderers[DemoPlayer.TYPE_VIDEO] = videoRenderer;
  renderers[DemoPlayer.TYPE_AUDIO] = audioRenderer;
  renderers[DemoPlayer.TYPE_DEBUG] = debugRenderer;
  callback.onRenderers(null, null, renderers);
}
 

@Override
public void buildRenderers(DemoPlayer player) {
  Allocator allocator = new DefaultAllocator(BUFFER_SEGMENT_SIZE);

  // Build the video and audio renderers.
  DefaultBandwidthMeter bandwidthMeter = new DefaultBandwidthMeter(player.getMainHandler(),
      null);
  DataSource dataSource = new DefaultUriDataSource(context, bandwidthMeter, userAgent);
  ExtractorSampleSource sampleSource = new ExtractorSampleSource(uri, dataSource, allocator,
      BUFFER_SEGMENT_COUNT * BUFFER_SEGMENT_SIZE);
  MediaCodecVideoTrackRenderer videoRenderer = new MediaCodecVideoTrackRenderer(context,
      sampleSource, MediaCodec.VIDEO_SCALING_MODE_SCALE_TO_FIT, 5000, player.getMainHandler(),
      player, 50);
  MediaCodecAudioTrackRenderer audioRenderer = new MediaCodecAudioTrackRenderer(sampleSource,
      null, true, player.getMainHandler(), player, AudioCapabilities.getCapabilities(context));
  TrackRenderer textRenderer = new TextTrackRenderer(sampleSource, player,
      player.getMainHandler().getLooper());

  // Invoke the callback.
  TrackRenderer[] renderers = new TrackRenderer[DemoPlayer.RENDERER_COUNT];
  renderers[DemoPlayer.TYPE_VIDEO] = videoRenderer;
  renderers[DemoPlayer.TYPE_AUDIO] = audioRenderer;
  renderers[DemoPlayer.TYPE_TEXT] = textRenderer;
  player.onRenderers(renderers, bandwidthMeter);
}
 

@Override
public void buildRenderers(DemoPlayer player) {
  Allocator allocator = new DefaultAllocator(BUFFER_SEGMENT_SIZE);

  // Build the video and audio renderers.
  DefaultBandwidthMeter bandwidthMeter = new DefaultBandwidthMeter(player.getMainHandler(),
      null);
  DataSource dataSource = new DefaultUriDataSource(context, bandwidthMeter, userAgent);
  ExtractorSampleSource sampleSource = new ExtractorSampleSource(uri, dataSource, allocator,
      BUFFER_SEGMENT_COUNT * BUFFER_SEGMENT_SIZE);
  MediaCodecVideoTrackRenderer videoRenderer = new MediaCodecVideoTrackRenderer(context,
      sampleSource, MediaCodec.VIDEO_SCALING_MODE_SCALE_TO_FIT, 5000, player.getMainHandler(),
      player, 50);
  MediaCodecAudioTrackRenderer audioRenderer = new MediaCodecAudioTrackRenderer(sampleSource,
      null, true, player.getMainHandler(), player, AudioCapabilities.getCapabilities(context));
  TrackRenderer textRenderer = new TextTrackRenderer(sampleSource, player,
      player.getMainHandler().getLooper());

  // Invoke the callback.
  TrackRenderer[] renderers = new TrackRenderer[DemoPlayer.RENDERER_COUNT];
  renderers[DemoPlayer.TYPE_VIDEO] = videoRenderer;
  renderers[DemoPlayer.TYPE_AUDIO] = audioRenderer;
  renderers[DemoPlayer.TYPE_TEXT] = textRenderer;
  player.onRenderers(renderers, bandwidthMeter);
}
 

private void preparePlayer() {

        SampleSource sampleSource =
                new FrameworkSampleSource(this, Uri.parse(mVideo.getContentUrl()), /* headers */ null, RENDERER_COUNT);

        // Build the track renderers
        videoRenderer = new MediaCodecVideoTrackRenderer(sampleSource, MediaCodec.VIDEO_SCALING_MODE_SCALE_TO_FIT);
        TrackRenderer audioRenderer = new MediaCodecAudioTrackRenderer(sampleSource);


        // Setup the player
        player = ExoPlayer.Factory.newInstance(RENDERER_COUNT, 1000, 5000);
        player.addListener(this);
        // Build the player controls
        mediaController.setMediaPlayer(new PlayerControl(player));
        mediaController.setEnabled(true);
        player.prepare(videoRenderer, audioRenderer);
    }
 

private void preparePlayer() {

        SampleSource sampleSource =
                new FrameworkSampleSource(this, Uri.parse(mVideo.getContentUrl()), /* headers */ null, RENDERER_COUNT);

        // Build the track renderers
        videoRenderer = new MediaCodecVideoTrackRenderer(sampleSource, MediaCodec.VIDEO_SCALING_MODE_SCALE_TO_FIT);
        TrackRenderer audioRenderer = new MediaCodecAudioTrackRenderer(sampleSource);

        // Setup the player
        player = ExoPlayer.Factory.newInstance(RENDERER_COUNT, 1000, 5000);
        player.addListener(this);
        // Build the player controls
        mediaController.setMediaPlayer(new PlayerControl(player));
        mediaController.setEnabled(true);
        player.prepare(videoRenderer, audioRenderer);
    }
 

@Override
public void buildRenderers(ExoplayerWrapper player) {
    Allocator allocator = new DefaultAllocator(BUFFER_SEGMENT_SIZE);

    // Build the video and audio renderers.
    DefaultBandwidthMeter bandwidthMeter = new DefaultBandwidthMeter(player.getMainHandler(),
            null);
    DataSource dataSource = new DefaultUriDataSource(context, bandwidthMeter, userAgent);
    ExtractorSampleSource sampleSource = new ExtractorSampleSource(uri, dataSource, allocator,
            BUFFER_SEGMENT_COUNT * BUFFER_SEGMENT_SIZE);
    MediaCodecVideoTrackRenderer videoRenderer = new MediaCodecVideoTrackRenderer(context,
            sampleSource, MediaCodecSelector.DEFAULT, MediaCodec.VIDEO_SCALING_MODE_SCALE_TO_FIT,
            5000, player.getMainHandler(), player, 50);
    MediaCodecAudioTrackRenderer audioRenderer = new MediaCodecAudioTrackRenderer(sampleSource,
            MediaCodecSelector.DEFAULT, null, true, player.getMainHandler(), player,
            AudioCapabilities.getCapabilities(context), AudioManager.STREAM_MUSIC);
    TrackRenderer textRenderer = new TextTrackRenderer(sampleSource, player,
            player.getMainHandler().getLooper());

    // Invoke the callback.
    TrackRenderer[] renderers = new TrackRenderer[ExoplayerWrapper.RENDERER_COUNT];
    renderers[ExoplayerWrapper.TYPE_VIDEO] = videoRenderer;
    renderers[ExoplayerWrapper.TYPE_AUDIO] = audioRenderer;
    renderers[ExoplayerWrapper.TYPE_TEXT] = textRenderer;
    player.onRenderers(renderers, bandwidthMeter);
}
 

@Override
protected void buildRenderers(RendererBuilderCallback callback) {
    Allocator allocator = new DefaultAllocator(bufferSegmentSize);
    DataSource dataSource = new DefaultUriDataSource(context, bandwidthMeter, userAgent);
    ExtractorSampleSource sampleSource = new ExtractorSampleSource(uri, dataSource,
            allocator, bufferSegmentSize * bufferSegmentCount);
    MediaCodecVideoTrackRenderer videoRenderer = new MediaCodecVideoTrackRenderer(context,
            sampleSource, MediaCodecSelector.DEFAULT,
            MediaCodec.VIDEO_SCALING_MODE_SCALE_TO_FIT, 5000,
            eventHandler, eventProxy, 50);
    MediaCodecAudioTrackRenderer audioRenderer = new MediaCodecAudioTrackRenderer(sampleSource,
            MediaCodecSelector.DEFAULT, null, true, eventHandler, eventProxy,
            AudioCapabilities.getCapabilities(context), AudioManager.STREAM_MUSIC);

    TrackRenderer textRenderer = new TextTrackRenderer(sampleSource, eventProxy,
            eventHandler.getLooper());

    // Invoke the callback.
    TrackRenderer[] renderers = new TrackRenderer[Player.RENDERER_COUNT];
    renderers[Player.TYPE_VIDEO] = videoRenderer;
    renderers[Player.TYPE_AUDIO] = audioRenderer;
    renderers[Player.TYPE_TEXT] = textRenderer;
    callback.onRenderers(renderers, bandwidthMeter);
}
 

@Override
public void buildRenderers(DemoPlayer player, DemoPlayer.RendererBuilderCallback callback) {

    // Build the video and audio renderers.
    Log.d(TAG, "buildRenderers(): uri=" + uri.toString());

    Handler mainHandler = player.getMainHandler();

    RawExtractor extractor = null;
    if( playerType == DemoUtil.TYPE_RAW_HTTP_TS) {
        BufferPool bufferPool = new BufferPool(this.BUFFER_POOL_LENGTH);
        extractor = new TsExtractor(false, 0, bufferPool);
    }

    DataSource videoDataSource = new RawHttpDataSource(userAgent, RawHttpDataSource.REJECT_PAYWALL_TYPES);
    DataSource rawSource = new RawBufferedSource(videoDataSource);
    SampleSource sampleSource = new RawSampleSource(rawSource, this.uri, this.context, extractor);
    MediaCodecVideoTrackRenderer videoRenderer = new MediaCodecVideoTrackRenderer(sampleSource, null, true, MediaCodec.VIDEO_SCALING_MODE_SCALE_TO_FIT, 5000, null, mainHandler, player, 50);
    MediaCodecAudioTrackRenderer audioRenderer = new MediaCodecAudioTrackRenderer(sampleSource);

    // Build the debug renderer.
    TrackRenderer debugRenderer = debugTextView != null
            ? new DebugTrackRenderer(debugTextView, videoRenderer)
            : null;

    // Invoke the callback.
    TrackRenderer[] renderers = new TrackRenderer[DemoPlayer.RENDERER_COUNT];
    renderers[DemoPlayer.TYPE_VIDEO] = videoRenderer;
    renderers[DemoPlayer.TYPE_AUDIO] = audioRenderer; // audioRenderer;
    renderers[DemoPlayer.TYPE_DEBUG] = debugRenderer;
    callback.onRenderers(null, null, renderers);
}
 

private void preparePlayer() {
    Log.d(TAG, "preparePlayer()");
    SampleSource sampleSource =
            new FrameworkSampleSource(this, Uri.parse(mVideo.getContentUrl()), /* headers */ null, RENDERER_COUNT);

    // Build the track renderers
    videoRenderer = new MediaCodecVideoTrackRenderer(sampleSource, MediaCodec.VIDEO_SCALING_MODE_SCALE_TO_FIT);
    TrackRenderer audioRenderer = new MediaCodecAudioTrackRenderer(sampleSource);

    // Setup the player
    player = ExoPlayer.Factory.newInstance(RENDERER_COUNT, 1000, 5000);
    player.addListener(this);
    player.prepare(videoRenderer, audioRenderer);
    if (mIsOnTv) {
        // This PlayerControl must stay in sync with PlaybackOverlayFragment.
        // We created methods such as PlaybackOverlayFragment.pressPlay() to request
        // that the fragment change the playback state. When the fragment receives a playback
        // request, it updates the UI and then calls a method in this activity according to
        // PlaybackOverlayFragment.OnPlayPauseClickedListener.
        playerControl = new PlayerControl(player);
    } else {
        // Build the player controls
        mediaController.setMediaPlayer(new PlayerControl(player));
        mediaController.setEnabled(true);
    }
    maybeStartPlayback();
}
 
源代码13 项目: meiShi   文件: VideoView.java

public void start() {
    SampleSource sampleSource = createSource();
    videoRenderer = new MediaCodecVideoTrackRenderer(getContext(),
            sampleSource, MediaCodecSelector.DEFAULT, MediaCodec.VIDEO_SCALING_MODE_SCALE_TO_FIT, 5000,
            handler, this, 50);
    audioRenderer = new MediaCodecAudioTrackRenderer(sampleSource,
            MediaCodecSelector.DEFAULT, null, true, handler, null,
            AudioCapabilities.getCapabilities(getContext()), AudioManager.STREAM_MUSIC);
    player.prepare(videoRenderer, audioRenderer);
    player.setPlayWhenReady(true);
    if (isAvailable()) {
        player.sendMessage(videoRenderer, MediaCodecVideoTrackRenderer.MSG_SET_SURFACE, new Surface(getSurfaceTexture()));
    }
}
 
源代码14 项目: ShareBox   文件: DashRendererBuilder.java

private void buildRenderers() {
  Period period = manifest.getPeriod(0);
  Handler mainHandler = player.getMainHandler();
  LoadControl loadControl = new DefaultLoadControl(new DefaultAllocator(BUFFER_SEGMENT_SIZE));
  DefaultBandwidthMeter bandwidthMeter = new DefaultBandwidthMeter(mainHandler, player);

  boolean hasContentProtection = false;
  for (int i = 0; i < period.adaptationSets.size(); i++) {
    AdaptationSet adaptationSet = period.adaptationSets.get(i);
    if (adaptationSet.type != AdaptationSet.TYPE_UNKNOWN) {
      hasContentProtection |= adaptationSet.hasContentProtection();
    }
  }

  // Check drm support if necessary.
  boolean filterHdContent = false;
  StreamingDrmSessionManager<FrameworkMediaCrypto> drmSessionManager = null;
  if (hasContentProtection) {
    if (Util.SDK_INT < 18) {
      player.onRenderersError(
          new UnsupportedDrmException(UnsupportedDrmException.REASON_UNSUPPORTED_SCHEME));
      return;
    }
    try {
      drmSessionManager = StreamingDrmSessionManager.newWidevineInstance(
          player.getPlaybackLooper(), drmCallback, null, player.getMainHandler(), player);
      filterHdContent = getWidevineSecurityLevel(drmSessionManager) != SECURITY_LEVEL_1;
    } catch (UnsupportedDrmException e) {
      player.onRenderersError(e);
      return;
    }
  }

  // Build the video renderer.
  DataSource videoDataSource = new DefaultUriDataSource(context, bandwidthMeter, userAgent);
  ChunkSource videoChunkSource = new DashChunkSource(manifestFetcher,
      DefaultDashTrackSelector.newVideoInstance(context, true, filterHdContent),
      videoDataSource, new AdaptiveEvaluator(bandwidthMeter), LIVE_EDGE_LATENCY_MS,
      elapsedRealtimeOffset, mainHandler, player, DemoPlayer.TYPE_VIDEO);
  ChunkSampleSource videoSampleSource = new ChunkSampleSource(videoChunkSource, loadControl,
      VIDEO_BUFFER_SEGMENTS * BUFFER_SEGMENT_SIZE, mainHandler, player,
      DemoPlayer.TYPE_VIDEO);
  TrackRenderer videoRenderer = new MediaCodecVideoTrackRenderer(context, videoSampleSource,
      MediaCodecSelector.DEFAULT, MediaCodec.VIDEO_SCALING_MODE_SCALE_TO_FIT, 5000,
      drmSessionManager, true, mainHandler, player, 50);

  // Build the audio renderer.
  DataSource audioDataSource = new DefaultUriDataSource(context, bandwidthMeter, userAgent);
  ChunkSource audioChunkSource = new DashChunkSource(manifestFetcher,
      DefaultDashTrackSelector.newAudioInstance(), audioDataSource, null, LIVE_EDGE_LATENCY_MS,
      elapsedRealtimeOffset, mainHandler, player, DemoPlayer.TYPE_AUDIO);
  ChunkSampleSource audioSampleSource = new ChunkSampleSource(audioChunkSource, loadControl,
      AUDIO_BUFFER_SEGMENTS * BUFFER_SEGMENT_SIZE, mainHandler, player,
      DemoPlayer.TYPE_AUDIO);
  TrackRenderer audioRenderer = new MediaCodecAudioTrackRenderer(audioSampleSource,
      MediaCodecSelector.DEFAULT, drmSessionManager, true, mainHandler, player,
      AudioCapabilities.getCapabilities(context), AudioManager.STREAM_MUSIC);

  // Build the text renderer.
  DataSource textDataSource = new DefaultUriDataSource(context, bandwidthMeter, userAgent);
  ChunkSource textChunkSource = new DashChunkSource(manifestFetcher,
      DefaultDashTrackSelector.newTextInstance(), textDataSource, null, LIVE_EDGE_LATENCY_MS,
      elapsedRealtimeOffset, mainHandler, player, DemoPlayer.TYPE_TEXT);
  ChunkSampleSource textSampleSource = new ChunkSampleSource(textChunkSource, loadControl,
      TEXT_BUFFER_SEGMENTS * BUFFER_SEGMENT_SIZE, mainHandler, player,
      DemoPlayer.TYPE_TEXT);
  TrackRenderer textRenderer = new TextTrackRenderer(textSampleSource, player,
      mainHandler.getLooper());

  // Invoke the callback.
  TrackRenderer[] renderers = new TrackRenderer[DemoPlayer.RENDERER_COUNT];
  renderers[DemoPlayer.TYPE_VIDEO] = videoRenderer;
  renderers[DemoPlayer.TYPE_AUDIO] = audioRenderer;
  renderers[DemoPlayer.TYPE_TEXT] = textRenderer;
  player.onRenderers(renderers, bandwidthMeter);
}
 

@Override
public void onSingleManifest(SmoothStreamingManifest manifest) {
  if (canceled) {
    return;
  }

  Handler mainHandler = player.getMainHandler();
  LoadControl loadControl = new DefaultLoadControl(new DefaultAllocator(BUFFER_SEGMENT_SIZE));
  DefaultBandwidthMeter bandwidthMeter = new DefaultBandwidthMeter(mainHandler, player);

  // Check drm support if necessary.
  DrmSessionManager<FrameworkMediaCrypto> drmSessionManager = null;
  if (manifest.protectionElement != null) {
    if (Util.SDK_INT < 18) {
      player.onRenderersError(
          new UnsupportedDrmException(UnsupportedDrmException.REASON_UNSUPPORTED_SCHEME));
      return;
    }
    try {
      drmSessionManager = StreamingDrmSessionManager.newFrameworkInstance(
          manifest.protectionElement.uuid, player.getPlaybackLooper(), drmCallback, null,
          player.getMainHandler(), player);
    } catch (UnsupportedDrmException e) {
      player.onRenderersError(e);
      return;
    }
  }

  // Build the video renderer.
  DataSource videoDataSource = new DefaultUriDataSource(context, bandwidthMeter, userAgent);
  ChunkSource videoChunkSource = new SmoothStreamingChunkSource(manifestFetcher,
      DefaultSmoothStreamingTrackSelector.newVideoInstance(context, true, false),
      videoDataSource, new AdaptiveEvaluator(bandwidthMeter), LIVE_EDGE_LATENCY_MS);
  ChunkSampleSource videoSampleSource = new ChunkSampleSource(videoChunkSource, loadControl,
      VIDEO_BUFFER_SEGMENTS * BUFFER_SEGMENT_SIZE, mainHandler, player,
      DemoPlayer.TYPE_VIDEO);
  TrackRenderer videoRenderer = new MediaCodecVideoTrackRenderer(context, videoSampleSource,
      MediaCodecSelector.DEFAULT, MediaCodec.VIDEO_SCALING_MODE_SCALE_TO_FIT, 5000,
      drmSessionManager, true, mainHandler, player, 50);

  // Build the audio renderer.
  DataSource audioDataSource = new DefaultUriDataSource(context, bandwidthMeter, userAgent);
  ChunkSource audioChunkSource = new SmoothStreamingChunkSource(manifestFetcher,
      DefaultSmoothStreamingTrackSelector.newAudioInstance(),
      audioDataSource, null, LIVE_EDGE_LATENCY_MS);
  ChunkSampleSource audioSampleSource = new ChunkSampleSource(audioChunkSource, loadControl,
      AUDIO_BUFFER_SEGMENTS * BUFFER_SEGMENT_SIZE, mainHandler, player,
      DemoPlayer.TYPE_AUDIO);
  TrackRenderer audioRenderer = new MediaCodecAudioTrackRenderer(audioSampleSource,
      MediaCodecSelector.DEFAULT, drmSessionManager, true, mainHandler, player,
      AudioCapabilities.getCapabilities(context), AudioManager.STREAM_MUSIC);

  // Build the text renderer.
  DataSource textDataSource = new DefaultUriDataSource(context, bandwidthMeter, userAgent);
  ChunkSource textChunkSource = new SmoothStreamingChunkSource(manifestFetcher,
      DefaultSmoothStreamingTrackSelector.newTextInstance(),
      textDataSource, null, LIVE_EDGE_LATENCY_MS);
  ChunkSampleSource textSampleSource = new ChunkSampleSource(textChunkSource, loadControl,
      TEXT_BUFFER_SEGMENTS * BUFFER_SEGMENT_SIZE, mainHandler, player,
      DemoPlayer.TYPE_TEXT);
  TrackRenderer textRenderer = new TextTrackRenderer(textSampleSource, player,
      mainHandler.getLooper());

  // Invoke the callback.
  TrackRenderer[] renderers = new TrackRenderer[DemoPlayer.RENDERER_COUNT];
  renderers[DemoPlayer.TYPE_VIDEO] = videoRenderer;
  renderers[DemoPlayer.TYPE_AUDIO] = audioRenderer;
  renderers[DemoPlayer.TYPE_TEXT] = textRenderer;
  player.onRenderers(renderers, bandwidthMeter);
}
 

private void buildRenderers() {
  Period period = manifest.getPeriod(0);
  Handler mainHandler = player.getMainHandler();
  LoadControl loadControl = new DefaultLoadControl(new DefaultAllocator(BUFFER_SEGMENT_SIZE));
  DefaultBandwidthMeter bandwidthMeter = new DefaultBandwidthMeter(mainHandler, player);

  boolean hasContentProtection = false;
  for (int i = 0; i < period.adaptationSets.size(); i++) {
    AdaptationSet adaptationSet = period.adaptationSets.get(i);
    if (adaptationSet.type != AdaptationSet.TYPE_UNKNOWN) {
      hasContentProtection |= adaptationSet.hasContentProtection();
    }
  }

  if (hasContentProtection) {
    if (Util.SDK_INT < 18) {
      player.onRenderersError(
          new UnsupportedDrmException(UnsupportedDrmException.REASON_UNSUPPORTED_SCHEME));
      return;
    }
  }

  // Build the video renderer.
  DataSource videoDataSource = new DefaultUriDataSource(context, bandwidthMeter, userAgent);
  ChunkSource videoChunkSource = new DashChunkSource(manifestFetcher,
      DefaultDashTrackSelector.newVideoInstance(context, true, false),
      videoDataSource, new AdaptiveEvaluator(bandwidthMeter), LIVE_EDGE_LATENCY_MS,
      elapsedRealtimeOffset, mainHandler, player, DemoPlayer.TYPE_VIDEO);
  ChunkSampleSource videoSampleSource = new ChunkSampleSource(videoChunkSource, loadControl,
      VIDEO_BUFFER_SEGMENTS * BUFFER_SEGMENT_SIZE, mainHandler, player,
      DemoPlayer.TYPE_VIDEO);
  TrackRenderer videoRenderer = new MediaCodecVideoTrackRenderer(context, videoSampleSource,
      MediaCodecSelector.DEFAULT, MediaCodec.VIDEO_SCALING_MODE_SCALE_TO_FIT, 5000,
      null, true, mainHandler, player, 50);

  // Build the audio renderer.
  DataSource audioDataSource = new DefaultUriDataSource(context, bandwidthMeter, userAgent);
  ChunkSource audioChunkSource = new DashChunkSource(manifestFetcher,
      DefaultDashTrackSelector.newAudioInstance(), audioDataSource, null, LIVE_EDGE_LATENCY_MS,
      elapsedRealtimeOffset, mainHandler, player, DemoPlayer.TYPE_AUDIO);
  ChunkSampleSource audioSampleSource = new ChunkSampleSource(audioChunkSource, loadControl,
      AUDIO_BUFFER_SEGMENTS * BUFFER_SEGMENT_SIZE, mainHandler, player,
      DemoPlayer.TYPE_AUDIO);
  TrackRenderer audioRenderer = new MediaCodecAudioTrackRenderer(audioSampleSource,
      MediaCodecSelector.DEFAULT, null, true, mainHandler, player,
      AudioCapabilities.getCapabilities(context), AudioManager.STREAM_MUSIC);

  // Build the text renderer.
  DataSource textDataSource = new DefaultUriDataSource(context, bandwidthMeter, userAgent);
  ChunkSource textChunkSource = new DashChunkSource(manifestFetcher,
      DefaultDashTrackSelector.newTextInstance(), textDataSource, null, LIVE_EDGE_LATENCY_MS,
      elapsedRealtimeOffset, mainHandler, player, DemoPlayer.TYPE_TEXT);
  ChunkSampleSource textSampleSource = new ChunkSampleSource(textChunkSource, loadControl,
      TEXT_BUFFER_SEGMENTS * BUFFER_SEGMENT_SIZE, mainHandler, player,
      DemoPlayer.TYPE_TEXT);
  TrackRenderer textRenderer = new TextTrackRenderer(textSampleSource, player,
      mainHandler.getLooper());

  // Invoke the callback.
  TrackRenderer[] renderers = new TrackRenderer[DemoPlayer.RENDERER_COUNT];
  renderers[DemoPlayer.TYPE_VIDEO] = videoRenderer;
  renderers[DemoPlayer.TYPE_AUDIO] = audioRenderer;
  renderers[DemoPlayer.TYPE_TEXT] = textRenderer;
  player.onRenderers(renderers, bandwidthMeter);
}
 
源代码17 项目: WliveTV   文件: DashRendererBuilder.java

private void buildRenderers() {
  Period period = manifest.getPeriod(0);
  Handler mainHandler = player.getMainHandler();
  LoadControl loadControl = new DefaultLoadControl(new DefaultAllocator(BUFFER_SEGMENT_SIZE));
  DefaultBandwidthMeter bandwidthMeter = new DefaultBandwidthMeter(mainHandler, player);

  boolean hasContentProtection = false;
  for (int i = 0; i < period.adaptationSets.size(); i++) {
    AdaptationSet adaptationSet = period.adaptationSets.get(i);
    if (adaptationSet.type != AdaptationSet.TYPE_UNKNOWN) {
      hasContentProtection |= adaptationSet.hasContentProtection();
    }
  }

  // Check drm support if necessary.
  boolean filterHdContent = false;
  StreamingDrmSessionManager drmSessionManager = null;
  if (hasContentProtection) {
    if (Util.SDK_INT < 18) {
      player.onRenderersError(
          new UnsupportedDrmException(UnsupportedDrmException.REASON_UNSUPPORTED_SCHEME));
      return;
    }
    try {
      drmSessionManager = StreamingDrmSessionManager.newWidevineInstance(
          player.getPlaybackLooper(), drmCallback, null, player.getMainHandler(), player);
      filterHdContent = getWidevineSecurityLevel(drmSessionManager) != SECURITY_LEVEL_1;
    } catch (UnsupportedDrmException e) {
      player.onRenderersError(e);
      return;
    }
  }

  // Build the video renderer.
  DataSource videoDataSource = new DefaultUriDataSource(context, bandwidthMeter, userAgent);
  ChunkSource videoChunkSource = new DashChunkSource(manifestFetcher,
      DefaultDashTrackSelector.newVideoInstance(context, true, filterHdContent),
      videoDataSource, new AdaptiveEvaluator(bandwidthMeter), LIVE_EDGE_LATENCY_MS,
      elapsedRealtimeOffset, mainHandler, player);
  ChunkSampleSource videoSampleSource = new ChunkSampleSource(videoChunkSource, loadControl,
      VIDEO_BUFFER_SEGMENTS * BUFFER_SEGMENT_SIZE, mainHandler, player,
      DemoPlayer.TYPE_VIDEO);
  TrackRenderer videoRenderer = new MediaCodecVideoTrackRenderer(context, videoSampleSource,
      MediaCodec.VIDEO_SCALING_MODE_SCALE_TO_FIT, 5000, drmSessionManager, true,
      mainHandler, player, 50);

  // Build the audio renderer.
  DataSource audioDataSource = new DefaultUriDataSource(context, bandwidthMeter, userAgent);
  ChunkSource audioChunkSource = new DashChunkSource(manifestFetcher,
      DefaultDashTrackSelector.newAudioInstance(), audioDataSource, null, LIVE_EDGE_LATENCY_MS,
      elapsedRealtimeOffset, mainHandler, player);
  ChunkSampleSource audioSampleSource = new ChunkSampleSource(audioChunkSource, loadControl,
      AUDIO_BUFFER_SEGMENTS * BUFFER_SEGMENT_SIZE, mainHandler, player,
      DemoPlayer.TYPE_AUDIO);
  TrackRenderer audioRenderer = new MediaCodecAudioTrackRenderer(audioSampleSource,
      drmSessionManager, true, mainHandler, player, AudioCapabilities.getCapabilities(context));

  // Build the text renderer.
  DataSource textDataSource = new DefaultUriDataSource(context, bandwidthMeter, userAgent);
  ChunkSource textChunkSource = new DashChunkSource(manifestFetcher,
      DefaultDashTrackSelector.newTextInstance(), textDataSource, null, LIVE_EDGE_LATENCY_MS,
      elapsedRealtimeOffset, mainHandler, player);
  ChunkSampleSource textSampleSource = new ChunkSampleSource(textChunkSource, loadControl,
      TEXT_BUFFER_SEGMENTS * BUFFER_SEGMENT_SIZE, mainHandler, player,
      DemoPlayer.TYPE_TEXT);
  TrackRenderer textRenderer = new TextTrackRenderer(textSampleSource, player,
      mainHandler.getLooper());

  // Invoke the callback.
  TrackRenderer[] renderers = new TrackRenderer[DemoPlayer.RENDERER_COUNT];
  renderers[DemoPlayer.TYPE_VIDEO] = videoRenderer;
  renderers[DemoPlayer.TYPE_AUDIO] = audioRenderer;
  renderers[DemoPlayer.TYPE_TEXT] = textRenderer;
  player.onRenderers(renderers, bandwidthMeter);
}
 

private void buildRenderers() {
  Period period = manifest.getPeriod(0);
  Handler mainHandler = player.getMainHandler();
  LoadControl loadControl = new DefaultLoadControl(new DefaultAllocator(BUFFER_SEGMENT_SIZE));
  DefaultBandwidthMeter bandwidthMeter = new DefaultBandwidthMeter(mainHandler, player);

  boolean hasContentProtection = false;
  for (int i = 0; i < period.adaptationSets.size(); i++) {
    AdaptationSet adaptationSet = period.adaptationSets.get(i);
    if (adaptationSet.type != AdaptationSet.TYPE_UNKNOWN) {
      hasContentProtection |= adaptationSet.hasContentProtection();
    }
  }

  // Check drm support if necessary.
  boolean filterHdContent = false;
  StreamingDrmSessionManager drmSessionManager = null;
  if (hasContentProtection) {
    if (Util.SDK_INT < 18) {
      player.onRenderersError(
              new UnsupportedDrmException(UnsupportedDrmException.REASON_UNSUPPORTED_SCHEME));
      return;
    }
    try {
      drmSessionManager = StreamingDrmSessionManager.newWidevineInstance(
              player.getPlaybackLooper(), drmCallback, null, player.getMainHandler(), player);
      filterHdContent = getWidevineSecurityLevel(drmSessionManager) != SECURITY_LEVEL_1;
    } catch (UnsupportedDrmException e) {
      player.onRenderersError(e);
      return;
    }
  }

  // Build the video renderer.
  DataSource videoDataSource = new DefaultUriDataSource(context, bandwidthMeter, userAgent);
  ChunkSource videoChunkSource = new DashChunkSource(manifestFetcher,
          DefaultDashTrackSelector.newVideoInstance(context, true, filterHdContent),
          videoDataSource, new AdaptiveEvaluator(bandwidthMeter), LIVE_EDGE_LATENCY_MS,
          elapsedRealtimeOffset, mainHandler, player, ExoplayerWrapper.TYPE_VIDEO);
  ChunkSampleSource videoSampleSource = new ChunkSampleSource(videoChunkSource, loadControl,
          VIDEO_BUFFER_SEGMENTS * BUFFER_SEGMENT_SIZE, mainHandler, player,
          ExoplayerWrapper.TYPE_VIDEO);
  TrackRenderer videoRenderer = new MediaCodecVideoTrackRenderer(context, videoSampleSource,
          MediaCodecSelector.DEFAULT, MediaCodec.VIDEO_SCALING_MODE_SCALE_TO_FIT, 5000, drmSessionManager, true,
          mainHandler, player, 50);

  // Build the audio renderer.
  DataSource audioDataSource = new DefaultUriDataSource(context, bandwidthMeter, userAgent);
  ChunkSource audioChunkSource = new DashChunkSource(manifestFetcher,
          DefaultDashTrackSelector.newAudioInstance(), audioDataSource, null, LIVE_EDGE_LATENCY_MS,
          elapsedRealtimeOffset, mainHandler, player, ExoplayerWrapper.TYPE_AUDIO);
  ChunkSampleSource audioSampleSource = new ChunkSampleSource(audioChunkSource, loadControl,
          AUDIO_BUFFER_SEGMENTS * BUFFER_SEGMENT_SIZE, mainHandler, player,
          ExoplayerWrapper.TYPE_AUDIO);
  TrackRenderer audioRenderer = new MediaCodecAudioTrackRenderer(audioSampleSource,
          MediaCodecSelector.DEFAULT, drmSessionManager, true, mainHandler, player,
          AudioCapabilities.getCapabilities(context), AudioManager.STREAM_MUSIC);

  // Build the text renderer.
  DataSource textDataSource = new DefaultUriDataSource(context, bandwidthMeter, userAgent);
  ChunkSource textChunkSource = new DashChunkSource(manifestFetcher,
          DefaultDashTrackSelector.newTextInstance(), textDataSource, null, LIVE_EDGE_LATENCY_MS,
          elapsedRealtimeOffset, mainHandler, player, ExoplayerWrapper.TYPE_TEXT);
  ChunkSampleSource textSampleSource = new ChunkSampleSource(textChunkSource, loadControl,
          TEXT_BUFFER_SEGMENTS * BUFFER_SEGMENT_SIZE, mainHandler, player,
          ExoplayerWrapper.TYPE_TEXT);
  TrackRenderer textRenderer = new TextTrackRenderer(textSampleSource, player,
          mainHandler.getLooper());

  // Invoke the callback.
  TrackRenderer[] renderers = new TrackRenderer[ExoplayerWrapper.RENDERER_COUNT];
  renderers[ExoplayerWrapper.TYPE_VIDEO] = videoRenderer;
  renderers[ExoplayerWrapper.TYPE_AUDIO] = audioRenderer;
  renderers[ExoplayerWrapper.TYPE_TEXT] = textRenderer;
  player.onRenderers(renderers, bandwidthMeter);
}
 

private void buildRenderers() {
    Period period = manifest.getPeriod(0);
    Handler mainHandler = player.getMainHandler();
    LoadControl loadControl = new DefaultLoadControl(new DefaultAllocator(
            BUFFER_SEGMENT_SIZE));
    DefaultBandwidthMeter bandwidthMeter = new DefaultBandwidthMeter(mainHandler, player);

    boolean hasContentProtection = false;
    for (int i = 0; i < period.adaptationSets.size(); i++) {
        AdaptationSet adaptationSet = period.adaptationSets.get(i);
        if (adaptationSet.type != AdaptationSet.TYPE_UNKNOWN) {
            hasContentProtection |= adaptationSet.hasContentProtection();
        }
    }

    // Check drm support if necessary.
    boolean filterHdContent = false;
    StreamingDrmSessionManager drmSessionManager = null;
    if (hasContentProtection) {
        if (Util.SDK_INT < 18) {
            player.onRenderersError(
                    new UnsupportedDrmException(
                            UnsupportedDrmException.REASON_UNSUPPORTED_SCHEME));
            return;
        }
        try {
            drmSessionManager = StreamingDrmSessionManager.newWidevineInstance(
                    player.getPlaybackLooper(), drmCallback, null, player.getMainHandler(),
                    player);
            filterHdContent =
                    getWidevineSecurityLevel(drmSessionManager) != SECURITY_LEVEL_1;
        } catch (UnsupportedDrmException e) {
            player.onRenderersError(e);
            return;
        }
    }

    // Build the video renderer.
    DataSource videoDataSource =
            new DefaultUriDataSource(context, bandwidthMeter, userAgent);
    ChunkSource videoChunkSource = new DashChunkSource(manifestFetcher,
            DefaultDashTrackSelector.newVideoInstance(context, true, filterHdContent),
            videoDataSource, new AdaptiveEvaluator(bandwidthMeter), LIVE_EDGE_LATENCY_MS,
            elapsedRealtimeOffset, mainHandler, player, DemoPlayer.TYPE_VIDEO);
    ChunkSampleSource videoSampleSource =
            new ChunkSampleSource(videoChunkSource, loadControl,
                    VIDEO_BUFFER_SEGMENTS * BUFFER_SEGMENT_SIZE, mainHandler, player,
                    DemoPlayer.TYPE_VIDEO);
    TrackRenderer videoRenderer =
            new MediaCodecVideoTrackRenderer(context, videoSampleSource,
                    MediaCodecSelector.DEFAULT, MediaCodec.VIDEO_SCALING_MODE_SCALE_TO_FIT,
                    5000,
                    drmSessionManager, true, mainHandler, player, 50);

    // Build the audio renderer.
    DataSource audioDataSource =
            new DefaultUriDataSource(context, bandwidthMeter, userAgent);
    ChunkSource audioChunkSource = new DashChunkSource(manifestFetcher,
            DefaultDashTrackSelector.newAudioInstance(), audioDataSource, null,
            LIVE_EDGE_LATENCY_MS,
            elapsedRealtimeOffset, mainHandler, player, DemoPlayer.TYPE_AUDIO);
    ChunkSampleSource audioSampleSource =
            new ChunkSampleSource(audioChunkSource, loadControl,
                    AUDIO_BUFFER_SEGMENTS * BUFFER_SEGMENT_SIZE, mainHandler, player,
                    DemoPlayer.TYPE_AUDIO);
    TrackRenderer audioRenderer = new MediaCodecAudioTrackRenderer(audioSampleSource,
            MediaCodecSelector.DEFAULT, drmSessionManager, true, mainHandler, player,
            AudioCapabilities.getCapabilities(context), AudioManager.STREAM_MUSIC);

    // Build the text renderer.
    DataSource textDataSource =
            new DefaultUriDataSource(context, bandwidthMeter, userAgent);
    ChunkSource textChunkSource = new DashChunkSource(manifestFetcher,
            DefaultDashTrackSelector.newTextInstance(), textDataSource, null,
            LIVE_EDGE_LATENCY_MS,
            elapsedRealtimeOffset, mainHandler, player, DemoPlayer.TYPE_TEXT);
    ChunkSampleSource textSampleSource = new ChunkSampleSource(textChunkSource, loadControl,
            TEXT_BUFFER_SEGMENTS * BUFFER_SEGMENT_SIZE, mainHandler, player,
            DemoPlayer.TYPE_TEXT);
    TrackRenderer textRenderer = new TextTrackRenderer(textSampleSource, player,
            mainHandler.getLooper());

    // Invoke the callback.
    TrackRenderer[] renderers = new TrackRenderer[DemoPlayer.RENDERER_COUNT];
    renderers[DemoPlayer.TYPE_VIDEO] = videoRenderer;
    renderers[DemoPlayer.TYPE_AUDIO] = audioRenderer;
    renderers[DemoPlayer.TYPE_TEXT] = textRenderer;
    player.onRenderers(renderers, bandwidthMeter);
}
 

@Override
public void onSingleManifest(HlsPlaylist manifest) {
    if (canceled) {
        return;
    }

    Handler mainHandler = player.getMainHandler();
    LoadControl loadControl = new DefaultLoadControl(new DefaultAllocator(BUFFER_SEGMENT_SIZE));
    DefaultBandwidthMeter bandwidthMeter = new DefaultBandwidthMeter();

    int[] variantIndices = null;
    if (manifest instanceof HlsMasterPlaylist) {
        HlsMasterPlaylist masterPlaylist = (HlsMasterPlaylist) manifest;
        try {
            variantIndices = VideoFormatSelectorUtil.selectVideoFormatsForDefaultDisplay(
                    context, masterPlaylist.variants, null, false);
        } catch (DecoderQueryException e) {
            player.onRenderersError(e);
            return;
        }
        if (variantIndices.length == 0) {
            player.onRenderersError(new IllegalStateException("No variants selected."));
            return;
        }
    }

    DataSource dataSource = new DefaultUriDataSource(context, bandwidthMeter, userAgent);
    HlsChunkSource chunkSource = new HlsChunkSource(dataSource, url, manifest, bandwidthMeter,
            variantIndices, HlsChunkSource.ADAPTIVE_MODE_SPLICE, audioCapabilities);
    HlsSampleSource sampleSource = new HlsSampleSource(chunkSource, loadControl,
            BUFFER_SEGMENTS * BUFFER_SEGMENT_SIZE, mainHandler, player, VideoPlayer.TYPE_VIDEO);
    MediaCodecVideoTrackRenderer videoRenderer = new MediaCodecVideoTrackRenderer(sampleSource,
            MediaCodec.VIDEO_SCALING_MODE_SCALE_TO_FIT, 5000, mainHandler, player, 50);
    MediaCodecAudioTrackRenderer audioRenderer = new MediaCodecAudioTrackRenderer(sampleSource);
    MetadataTrackRenderer<Map<String, Object>> id3Renderer = new MetadataTrackRenderer<>(
            sampleSource, new Id3Parser(), player, mainHandler.getLooper());
    Eia608TrackRenderer closedCaptionRenderer = new Eia608TrackRenderer(sampleSource, player,
            mainHandler.getLooper());

    TrackRenderer[] renderers = new TrackRenderer[VideoPlayer.RENDERER_COUNT];
    renderers[VideoPlayer.TYPE_VIDEO] = videoRenderer;
    renderers[VideoPlayer.TYPE_AUDIO] = audioRenderer;
    renderers[VideoPlayer.TYPE_METADATA] = id3Renderer;
    renderers[VideoPlayer.TYPE_TEXT] = closedCaptionRenderer;
    player.onRenderers(null, null, renderers, bandwidthMeter);
}