VisClient/org/hfbk/vid/AVStreamingThread.java

Go to the documentation of this file.
00001 package org.hfbk.vid;
00002 
00003 import java.nio.ByteBuffer;
00004 
00005 import net.sf.ffmpeg_java.AVCodecLibrary;
00006 import net.sf.ffmpeg_java.AVCodecLibrary.AVCodecContext;
00007 import net.sf.ffmpeg_java.AVFormatLibrary.AVFormatContext;
00008 import net.sf.ffmpeg_java.AVFormatLibrary.AVPacket;
00009 import net.sf.ffmpeg_java.AVFormatLibrary.AVStream;
00010 
00011 import org.hfbk.util.HTTPUtils;
00012 import org.hfbk.util.Sleeper;
00013 import org.hfbk.vis.Prefs;
00014 import org.lwjgl.openal.AL;
00015 
00016 import com.sun.jna.Pointer;
00017 import com.sun.jna.ptr.PointerByReference;
00018 
00052 public class AVStreamingThread extends Thread {
00053 
00056         public boolean running=true;
00057         
00058         //threads to decode the Streams's tracks
00059         AVAudioThread audioThread;      
00060         AVVideoThread videoThread;
00061 
00062         //current set media time 
00063         //long time;
00064 
00065         //libavcodec structures
00066         AVFormatContext formatCtx; //to retrieve format information and fetch data
00067         
00068         // index of visual /audible track
00069         int audioTrack, videoTrack; 
00070         
00081         public AVStreamingThread(String url, int maxPixels) {
00082                 super("Video: " + url);
00083                 setDaemon(true);
00084 
00085                 if (!url.matches("http.*")) url=HTTPUtils.decode(url);
00086                 
00087                 open(url, maxPixels);           
00088                 setPriority(Thread.MIN_PRIORITY + 2);
00089                 
00090                 // a nifty small thread to monitor the 
00091                 // amount of AVPackets waiting for decoding.
00092                 if(Prefs.current.debug && audioThread!=null && videoThread!=null) new Thread("AVProfiler"){
00093                         public void run() {
00094                                 while(running){
00095                                         Sleeper.sleep(100);     
00096                                         System.out.println("bytes enqeued a: "+audioThread.bytes+" v:"+videoThread.bytes);
00097                                         System.out.println("         time    "+audioThread.time+ "   "+videoThread.time);
00098                                 }
00099                         }
00100                 }.start();
00101                 
00102         }
00103 
00104         // opens the media source, checking our capabilites to decode it 
00105         // and prepares for streaming. 
00106         void open(final String url, int maxPixels) {
00107                 // pointer to let avlib fill in the format context
00108                 final PointerByReference ppFormatCtx = new PointerByReference();
00109 
00110                 // Open video file
00111                 if (AV.FORMAT.av_open_input_file(ppFormatCtx, url, null, 0, null) != 0)
00112                         throw new RuntimeException("Couldn't open " + url);
00113 
00114                 
00115                 if (ppFormatCtx.getValue()==Pointer.NULL)
00116                         throw new RuntimeException("Couldn't open (null)  " + url);
00117 
00118                 synchronized (AV.FORMAT) { //better only try one opening at a time
00119                                          //not sure about this!
00120 
00121                         // build java class from the given context pointer
00122                         formatCtx = new AVFormatContext(ppFormatCtx.getValue());
00123 
00124                         synchronized(AV.CODEC){
00125                                 // Retrieve stream information
00126                                 if (AV.FORMAT.av_find_stream_info(formatCtx) < 0)
00127                                         throw new RuntimeException("No stream info"); 
00128                                 
00129                                 if(Prefs.current.verbose) //print nice media file format info
00130                                         AV.FORMAT.dump_format(formatCtx, 0, url, 0);
00131                         }
00132                         
00133                         // search for a usable video and audio track 
00134                         audioTrack=videoTrack = -1;
00135                         AVCodecContext audioCtx=null, videoCtx=null;
00136                         for (int i = 0; i < formatCtx.nb_streams; i++) {
00137                                 final AVStream stream = new AVStream(formatCtx.getStreams()[i]);
00138                                 AVCodecContext ctx = new AVCodecContext(stream.codec);
00139         
00140                                 if (ctx.codec_type == AVCodecLibrary.CODEC_TYPE_VIDEO && videoTrack<0){
00141                                         videoTrack = i;
00142                                         videoCtx=ctx;                                   
00143                                 }
00144                                 else if (ctx.codec_type == AVCodecLibrary.CODEC_TYPE_AUDIO && audioTrack<0){
00145                                         audioTrack = i;
00146                                         audioCtx=ctx;
00147                                 }
00148                         }
00149 
00150                         //check if we got audio/video and should use it
00151                         if (!Prefs.current.sound || !AL.isCreated()) audioTrack=-1;
00152                         
00153                         if (audioTrack==-1 && videoTrack==-1)
00154                                 throw new RuntimeException("No audio/video stream @"+url);
00155                         
00156                         if(videoTrack>-1) try{
00157                                 videoThread=new AVVideoThread(videoCtx, maxPixels);
00158                                 if(Prefs.current.verbose) System.out.print(" got video. ");
00159                         }catch (Exception e) {
00160                                 System.out.println(e);
00161                         }
00162 
00163                         if(audioTrack>-1) try{
00164                                 audioThread=new AVAudioThread(audioCtx){
00165                                         void tick(int time) {
00166                                                 if (videoThread!=null){
00167                                                         videoThread.time=time;
00168                                                 }                                               
00169                                         }
00170                                 };
00171                                 if(Prefs.current.verbose) System.out.print(" got audio. ");
00172                         }catch (Exception e) {
00173                                 System.out.println(e);
00174                         }
00175                 }
00176         }
00177 
00178 
00179         
00182         public void run() {
00183                 AVPacket packet = new AVPacket();
00184                 while (running && AV.FORMAT.av_read_frame(formatCtx, packet) >= 0) 
00185                 {
00186                         
00187                         if (packet.stream_index == videoTrack){
00188                                 videoThread.add(packet);
00189                                 packet = videoThread.getFree();
00190                         }else if (packet.stream_index == audioTrack){
00191                                 audioThread.add(packet);
00192                                 packet = audioThread.getFree();
00193                         }
00194                 }
00195                 if(Prefs.current.debug) System.out.println("Stream finished.");
00196                 
00197                 if (running){ // signal stream threads no further data is to be expected
00198                         if (videoThread!=null) videoThread.finished=true;               
00199                         if (audioThread!=null) audioThread.finished=true;
00200                         try{ // and wait them to finish
00201                                 if (videoThread!=null) videoThread.join();
00202                                 if (audioThread!=null) audioThread.join();
00203                         }catch (Exception e) {
00204                                 throw new RuntimeException(e);
00205                         }
00206                 }
00207                 running=false; 
00208                 
00209                 close();                
00210         }
00211 
00213         public void setPlaying(boolean playing){
00214                 if (videoThread!=null) videoThread.setPlaying(playing);
00215                 if (audioThread!=null) audioThread.setPlaying(playing); 
00216         }
00217         
00219         public synchronized void start() {
00220                 super.start();
00221                 if(audioThread!=null) audioThread.start();
00222                 if(videoThread!=null) videoThread.start();
00223         }
00224         
00230         public ByteBuffer getFrame(float dt) {
00231                 if (getState()==Thread.State.NEW)  start();
00232                         
00233                 //use frame time if no exact audio sync is needed. 
00234                 if (audioThread==null)  videoThread.time+=(dt*1000);
00235                 
00236                 //lastLifeSign=videoThread.time;
00237                 
00238                 ByteBuffer out=null;
00239                         
00240                 if(videoThread.out!=null ){
00241                         out=videoThread.out;
00242                         videoThread.out=null;
00243                 }
00244                 
00245                 return out;
00246         }
00247 
00251         public int getAudioSource() {
00252                 if (audioThread!=null) return audioThread.alThread.source;
00253                 return 0;
00254         }
00255 
00256         
00260         protected void close() {
00261                 try {
00262                         if (videoThread!=null) {
00263                                 videoThread.setRunning(false);
00264                                 videoThread.join();
00265                         }
00266                         if (audioThread!=null){
00267                                 audioThread.setRunning(false);
00268                                 audioThread.join();
00269                         }
00270                         
00271                         if(formatCtx!=null) synchronized (AV.FORMAT) {
00272                                 AV.FORMAT.av_close_input_file(formatCtx);
00273                                 formatCtx=null;
00274                         }
00275                         if(Prefs.current.debug) System.out.println("Stream down!");             
00276         
00277                 } catch (InterruptedException e) {
00278                         e.printStackTrace();
00279                 }
00280         }
00281         
00282         
00283         protected void finalize() throws Throwable {
00284                 //if disposed, we need to stop video streaming
00285                 running=false;
00286         }
00287 }
00288 

Generated on Tue Apr 7 17:57:20 2009 for visclient by  doxygen 1.5.1