“内存不足”尝试将捕获的数据写入文件时出现异常

发布于 2024-10-15 09:34:22 字数 17342 浏览 4 评论 0原文

>     **java -Xms64m -Xms64m VideoRecorder**
>     - number of capture devices: 3
>     Supported format : rgb, 24-bit, masks=3:2:1, pixelstride=-1,
> linestride=-1, flip
>     ped
>     Track 0 is set to transmit as:
>       RGB, 24-bit, Masks=3:2:1, PixelStride=-1, LineStride=-1, Flipped
>     Start datasource handler
>     Prefetch the processor
>     processor started
>     Exception in thread "VFW TransferDataThread"
> java.lang.OutOfMemoryError: Java heap space

这是代码:

/*
 * VideoRecorder.java
 * 
 * Created on Mar 16, 2004
 *
 */
//package gov.nist.applet.phone.media.messaging;

import java.io.File;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.IOException;
import java.util.Vector;

import javax.media.CaptureDeviceInfo;
import javax.media.CaptureDeviceManager;
import javax.media.ConfigureCompleteEvent;
import javax.media.Controller;
import javax.media.ControllerEvent;
import javax.media.ControllerListener;
import javax.media.EndOfMediaEvent;
import javax.media.Format;
import javax.media.IncompatibleSourceException;
import javax.media.Manager;
import javax.media.MediaLocator;
import javax.media.MediaTimeSetEvent;
import javax.media.PrefetchCompleteEvent;
import javax.media.Processor;
import javax.media.RealizeCompleteEvent;
import javax.media.ResourceUnavailableEvent;
import javax.media.SizeChangeEvent;
import javax.media.StopAtTimeEvent;
import javax.media.StopByRequestEvent;
import javax.media.control.TrackControl;
import javax.media.datasink.DataSinkErrorEvent;
import javax.media.datasink.DataSinkEvent;
import javax.media.datasink.DataSinkListener;
import javax.media.datasink.EndOfStreamEvent;
import javax.media.format.VideoFormat;
import javax.media.protocol.ContentDescriptor;
import javax.media.protocol.DataSource;
import javax.media.protocol.FileTypeDescriptor;

/**
 * Class allowing one to record some audio in a buffer
 * Play only MPEG_AUDIO and GSM audio data
 * With some minor modifications can play RAW data also
 * 
 * @author Jean Deruelle <[email protected]>
 *
 * <a href="{@docRoot}/uncopyright.html">This code is in the public domain.</a>
 */
public class VideoRecorder implements ControllerListener, DataSinkListener, Runnable{
    Processor p;
    Object waitSync = new Object();
    boolean stateTransitionOK = true;
    static boolean monitorOn = false;
    private MediaLocator videoLocator=null; 
    boolean bufferingDone = false;
    RawDataSourceHandler handler =null;
    Thread recorderThread=null;     
    DataSource ds = null;
    /**
     * get the devices for the audio capture and print their formats
     */
    protected void initialize() {       
        CaptureDeviceInfo videoCDI=null;
        Vector captureDevices=null;
        captureDevices= CaptureDeviceManager.getDeviceList(null);
        System.out.println("- number of capture devices: "+captureDevices.size() );
        CaptureDeviceInfo cdi=null;
        for (int i = 0; i < captureDevices.size(); i++) {
            cdi = (CaptureDeviceInfo) captureDevices.elementAt(i);      
            Format[] formatArray=cdi.getFormats();
            for (int j = 0; j < formatArray.length; j++) {
                Format format=formatArray[j];               
               if (format instanceof VideoFormat) {
                    if (videoCDI == null) {
                        videoCDI=cdi;
                    }
               }               
            }
        }
        if(videoCDI!=null)
            videoLocator=videoCDI.getLocator();
    }

    /**
     * Set the format of the tracks
     * either to MPEG_AUDIO or GSM
     */
    protected void setTrackFormat(){
        //Get the tracks from the processor
        TrackControl[] tracks = p.getTrackControls();

        // Do we have atleast one track?
        if (tracks == null || tracks.length < 1)
            System.out.println("Couldn't find tracks in processor");

        // Set the output content descriptor to GSM
        // This will limit the supported formats reported from
        // Track.getSupportedFormats to only valid AVI formats.
        //p.setContentDescriptor(new FileTypeDescriptor(FileTypeDescriptor.MPEG_AUDIO));
        p.setContentDescriptor(new ContentDescriptor(ContentDescriptor.RAW));

        Format supported[];
        Format chosen=null;
        boolean atLeastOneTrack = false;

        // Program the tracks.
        for (int i = 0; i < tracks.length; i++) {
            Format format = tracks[i].getFormat();
            if (tracks[i].isEnabled()) {
                supported = tracks[i].getSupportedFormats();
                /*System.out.println("track : "+ i);
                for(int j=0;j<supported.length;j++)
                System.out.println("Supported format : "+supported[j].getEncoding());*/
                // We've set the output content to the GSM.            
                if (supported.length > 0) {
                    for(int j=0;j<supported.length;j++){
                        System.out.println("Supported format : "+supported[j].toString().toLowerCase());
                        if (supported[j] instanceof VideoFormat) {
                            if(supported[j].toString().toLowerCase().indexOf("rgb")!=-1){
                                chosen = supported[j];  
                                break;
                            }
                        }
                    }
                    if(chosen!=null){
                        tracks[i].setFormat(chosen);                
                        System.err.println("Track " + i + " is set to transmit as:");
                        System.err.println("  " + chosen);
                        atLeastOneTrack = true;
                    }
                    else{
                        System.err.println("Track " + i + " is set to transmit as nothing");
                    }
                } else
                    tracks[i].setEnabled(false);
            } else
                tracks[i].setEnabled(false);
        }
    }

    /**
     * Given a DataSource, create a processor and hook up the output
     * DataSource from the processor to a customed DataSink.
     * @return false if something wrong happened
     */
    protected boolean record() {        
        // Create a DataSource given the media locator.
        try {
            ds = Manager.createDataSource(videoLocator);
        } catch (Exception e) {
            System.err.println("Cannot create DataSource from: " + videoLocator);
            return false;
        }       

        try {
            p = Manager.createProcessor(ds);
        } catch (Exception e) {
            System.err.println("Failed to create a processor from the given DataSource: " + e);
            return false;
        }

        p.addControllerListener(this);

        // Put the Processor into configured state.
        p.configure();
        if (!waitForState(Processor.Configured)) {
            System.err.println("Failed to configure the processor.");
            return false;
        }
        setTrackFormat();
        /*ContentDescriptor[] descriptors = p.getSupportedContentDescriptors();
        for (int n = 0; n < descriptors.length; n++) {
            System.out.println("Desc: " + descriptors[n].toString());
        }*/
        // Get the raw output from the processor.
        //p.setContentDescriptor(new ContentDescriptor(ContentDescriptor.RAW));
        //p.setContentDescriptor(new FileTypeDescriptor(FileTypeDescriptor.MPEG_AUDIO));
        p.realize();
        if (!waitForState(Controller.Realized)) {
            System.err.println("Failed to realize the processor.");
            return false;
        }

        // Get the output DataSource from the processor and
        // hook it up to the RawDataSourceHandler.
        DataSource ods = p.getDataOutput();
        handler = new RawDataSourceHandler();

        try {
            handler.setSource(ods);
        } catch (IncompatibleSourceException e) {
            System.err.println("Cannot handle the output DataSource from the processor: " + ods);
            //return false;
        }
        System.err.println("Start datasource handler ");
        handler.addDataSinkListener(this);
        try{
            handler.setSource(ds);
            handler.start();
        }
        catch(IncompatibleSourceException ioe){
            ioe.printStackTrace();
        }
        System.err.println("Prefetch the processor ");
        // Prefetch the processor.
        p.prefetch();
        if (!waitForState(Controller.Prefetched)) {
            System.err.println("Failed to prefetch the processor.");
            return false;
        }       
        // Start the processor.
        p.start();          
        System.err.println("processor started");                

        return true;
    }

    /**
     * Block until file writing is done. 
     */
    /*private boolean waitForFileDone(double duration) {        
        synchronized (waitFileSync) {
            try {
                while (!bufferingDone) {
                    if(p.getMediaTime().getSeconds() > duration)
                        p.close();
                    waitFileSync.wait(500);
                    System.err.print(".");
                }
            } catch (Exception e) {}
        }
        bufferingDone=false;
        return true;
    }*/     

    /**
     * Block until the processor has transitioned to the given state.
     * @param state - the state to wait for
     * @return false if the transition failed.
     */
    protected boolean waitForState(int state) {
        synchronized (waitSync) {
            try {
            while (p.getState() < state && stateTransitionOK)
                waitSync.wait();
            } catch (Exception e) {}
        }
        return stateTransitionOK;
    }

    /**
     * Stop the voice recording
     */
    public void stop(){
        p.stop();
        bufferingDone=true;     
    }

    /**
     * Start the voice recording
     */
    public void start(){
        initialize();
        if(recorderThread==null){
            recorderThread=new Thread(this);
            recorderThread.setName("Voice Recorder Thread");
        }

        recorderThread.start();         
    }

    /**
     * the process of recording the voice
     */
    public void run(){
        boolean succeeded=record();
        if(!succeeded)
            return;
        while(!bufferingDone){
            try{
                recorderThread.sleep(1);
            }
            catch(InterruptedException ie){
                ie.printStackTrace();
            }
        }   
        try{
            Thread.sleep(100);
        }
        catch(InterruptedException ie){
            ie.printStackTrace();
        }   
        //Clean up
        System.err.println("closing datasource" );
        try{
            ds.stop();
        }
        catch(IOException ioe){
            ioe.printStackTrace();
        }
        ds.disconnect();                        
        System.err.println("closing processor" );
        p.close();
        p.removeControllerListener(this);
        recorderThread=null;
        System.err.println("closing handler" );
        handler.close();        
        System.err.println("...done Buffering.");
        bufferingDone=false;
    }

    /**
     * Controller Listener Method.
     * Allow one to know what happen on the recorder and the voice
     * @param evt - event received 
     */
    public void controllerUpdate(ControllerEvent evt) {
        //System.out.println("new Event received"+evt.getClass().getName());
        if (evt instanceof ConfigureCompleteEvent ||
            evt instanceof RealizeCompleteEvent ||
            evt instanceof PrefetchCompleteEvent) {
            synchronized (waitSync) {
                stateTransitionOK = true;
                waitSync.notifyAll();
            }
        } else if (evt instanceof ResourceUnavailableEvent) {
            synchronized (waitSync) {
                stateTransitionOK = false;
                waitSync.notifyAll();
            }
        } else if (evt instanceof EndOfMediaEvent) {
            System.err.println("closing datasource" );
            try{
                ds.stop();
            }
            catch(IOException ioe){
                ioe.printStackTrace();
            }
            ds.disconnect();                        
            System.err.println("closing controller");
            evt.getSourceController().close();
            //Clean up
            System.err.println("closing processor" );
            p.close();
            p.removeControllerListener(this);
            recorderThread=null;
            System.err.println("closing handler" );
            handler.close();        
            System.err.println("...done Buffering.");
            bufferingDone=true;
        } else if (evt instanceof SizeChangeEvent) {
        }
        else if (evt instanceof MediaTimeSetEvent) {
            System.err.println("- mediaTime set: " + 
            ((MediaTimeSetEvent)evt).getMediaTime().getSeconds());
        } else if (evt instanceof StopAtTimeEvent) {
            System.err.println("- stop at time: " +
            ((StopAtTimeEvent)evt).getMediaTime().getSeconds());
            //Clean up
            System.err.println("closing datasource" );
            try{
                ds.stop();
            }
            catch(IOException ioe){
                ioe.printStackTrace();
            }
            ds.disconnect();                        
            System.err.println("closing controller");
            evt.getSourceController().close();
            System.err.println("closing processor" );
            p.close();
            p.removeControllerListener(this);
            recorderThread=null;
            System.err.println("closing handler" );
            handler.close();        
            System.err.println("...done Buffering.");
            bufferingDone=true;
        }
        else if (evt instanceof StopByRequestEvent) {               
            //          Clean up
          System.err.println("closing datasource" );
          try{
              ds.stop();
          }
          catch(IOException ioe){
              ioe.printStackTrace();
          }
          ds.disconnect();
            System.err.println("closing controller");
            evt.getSourceController().close();                      
            System.err.println("closing processor" );
            p.close();
            p.removeControllerListener(this);
            recorderThread=null;
            System.err.println("closing handler" );
            handler.close();        
            System.err.println("...done Buffering.");
        }
    }

    /**
     * Get the recorded voice buffer 
     * @return the voice recorded in an array of bytes
     */
    public byte[] getRecord(){
        return handler.getRecordBuffer();
    }

    /**
     * DataSink Listener
     * @param evt - event received  
     */
    public void dataSinkUpdate(DataSinkEvent evt) {

        if (evt instanceof EndOfStreamEvent) {
            bufferingDone = true;   
            //waitFileSync.notifyAll();
            System.err.println("All done!");
            evt.getSourceDataSink().close();
            //System.exit(0);
        }
        else if (evt instanceof DataSinkErrorEvent) {
            //synchronized (waitFileSync) {
            bufferingDone = true;   
            evt.getSourceDataSink().close();            
                //waitFileSync.notifyAll();
            //}
        }
    }

    /**
     * Utility method to write a recorded voice buffer to a file
     * @param data -  the recorded voice
     */
    private static void writeBufferToFile(byte[] data){
        File f=new File("D://test.mov");
        FileOutputStream fos=null;
        try{
            fos=new FileOutputStream(f);
        }
        catch(FileNotFoundException fnfe){
            fnfe.printStackTrace();
        }
        try{
            fos.write(data);
        }
        catch(IOException ioe){
            ioe.printStackTrace();
        }
    }

    /**
     * Main program
     * @param args - 
     */
    public static void main(String [] args) {
        VideoRecorder videoRecorder = new VideoRecorder();

        //for(int i=0;i<2;i++){
            videoRecorder.start();      
            try{
                Thread.sleep(5000);
            }
            catch(InterruptedException ie){
                ie.printStackTrace();
            }
            videoRecorder.handler = new RawDataSourceHandler();
            //MyCam videoPlayer=new MyCam();    
            //videoRecorder.stop();             
            //videoPlayer.initialize();
            //videoPlayer.play();   
            //videoRecorder.initialize();
            //videoRecorder.play();

            try{
                Thread.sleep(5000);
            }
            catch(InterruptedException ie){
                ie.printStackTrace();
            }               
        //}
        writeBufferToFile(videoRecorder.getRecord());
    }
}
>     **java -Xms64m -Xms64m VideoRecorder**
>     - number of capture devices: 3
>     Supported format : rgb, 24-bit, masks=3:2:1, pixelstride=-1,
> linestride=-1, flip
>     ped
>     Track 0 is set to transmit as:
>       RGB, 24-bit, Masks=3:2:1, PixelStride=-1, LineStride=-1, Flipped
>     Start datasource handler
>     Prefetch the processor
>     processor started
>     Exception in thread "VFW TransferDataThread"
> java.lang.OutOfMemoryError: Java heap space

Here is the code:

/*
 * VideoRecorder.java
 * 
 * Created on Mar 16, 2004
 *
 */
//package gov.nist.applet.phone.media.messaging;

import java.io.File;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.IOException;
import java.util.Vector;

import javax.media.CaptureDeviceInfo;
import javax.media.CaptureDeviceManager;
import javax.media.ConfigureCompleteEvent;
import javax.media.Controller;
import javax.media.ControllerEvent;
import javax.media.ControllerListener;
import javax.media.EndOfMediaEvent;
import javax.media.Format;
import javax.media.IncompatibleSourceException;
import javax.media.Manager;
import javax.media.MediaLocator;
import javax.media.MediaTimeSetEvent;
import javax.media.PrefetchCompleteEvent;
import javax.media.Processor;
import javax.media.RealizeCompleteEvent;
import javax.media.ResourceUnavailableEvent;
import javax.media.SizeChangeEvent;
import javax.media.StopAtTimeEvent;
import javax.media.StopByRequestEvent;
import javax.media.control.TrackControl;
import javax.media.datasink.DataSinkErrorEvent;
import javax.media.datasink.DataSinkEvent;
import javax.media.datasink.DataSinkListener;
import javax.media.datasink.EndOfStreamEvent;
import javax.media.format.VideoFormat;
import javax.media.protocol.ContentDescriptor;
import javax.media.protocol.DataSource;
import javax.media.protocol.FileTypeDescriptor;

/**
 * Class allowing one to record some audio in a buffer
 * Play only MPEG_AUDIO and GSM audio data
 * With some minor modifications can play RAW data also
 * 
 * @author Jean Deruelle <[email protected]>
 *
 * <a href="{@docRoot}/uncopyright.html">This code is in the public domain.</a>
 */
public class VideoRecorder implements ControllerListener, DataSinkListener, Runnable{
    Processor p;
    Object waitSync = new Object();
    boolean stateTransitionOK = true;
    static boolean monitorOn = false;
    private MediaLocator videoLocator=null; 
    boolean bufferingDone = false;
    RawDataSourceHandler handler =null;
    Thread recorderThread=null;     
    DataSource ds = null;
    /**
     * get the devices for the audio capture and print their formats
     */
    protected void initialize() {       
        CaptureDeviceInfo videoCDI=null;
        Vector captureDevices=null;
        captureDevices= CaptureDeviceManager.getDeviceList(null);
        System.out.println("- number of capture devices: "+captureDevices.size() );
        CaptureDeviceInfo cdi=null;
        for (int i = 0; i < captureDevices.size(); i++) {
            cdi = (CaptureDeviceInfo) captureDevices.elementAt(i);      
            Format[] formatArray=cdi.getFormats();
            for (int j = 0; j < formatArray.length; j++) {
                Format format=formatArray[j];               
               if (format instanceof VideoFormat) {
                    if (videoCDI == null) {
                        videoCDI=cdi;
                    }
               }               
            }
        }
        if(videoCDI!=null)
            videoLocator=videoCDI.getLocator();
    }

    /**
     * Set the format of the tracks
     * either to MPEG_AUDIO or GSM
     */
    protected void setTrackFormat(){
        //Get the tracks from the processor
        TrackControl[] tracks = p.getTrackControls();

        // Do we have atleast one track?
        if (tracks == null || tracks.length < 1)
            System.out.println("Couldn't find tracks in processor");

        // Set the output content descriptor to GSM
        // This will limit the supported formats reported from
        // Track.getSupportedFormats to only valid AVI formats.
        //p.setContentDescriptor(new FileTypeDescriptor(FileTypeDescriptor.MPEG_AUDIO));
        p.setContentDescriptor(new ContentDescriptor(ContentDescriptor.RAW));

        Format supported[];
        Format chosen=null;
        boolean atLeastOneTrack = false;

        // Program the tracks.
        for (int i = 0; i < tracks.length; i++) {
            Format format = tracks[i].getFormat();
            if (tracks[i].isEnabled()) {
                supported = tracks[i].getSupportedFormats();
                /*System.out.println("track : "+ i);
                for(int j=0;j<supported.length;j++)
                System.out.println("Supported format : "+supported[j].getEncoding());*/
                // We've set the output content to the GSM.            
                if (supported.length > 0) {
                    for(int j=0;j<supported.length;j++){
                        System.out.println("Supported format : "+supported[j].toString().toLowerCase());
                        if (supported[j] instanceof VideoFormat) {
                            if(supported[j].toString().toLowerCase().indexOf("rgb")!=-1){
                                chosen = supported[j];  
                                break;
                            }
                        }
                    }
                    if(chosen!=null){
                        tracks[i].setFormat(chosen);                
                        System.err.println("Track " + i + " is set to transmit as:");
                        System.err.println("  " + chosen);
                        atLeastOneTrack = true;
                    }
                    else{
                        System.err.println("Track " + i + " is set to transmit as nothing");
                    }
                } else
                    tracks[i].setEnabled(false);
            } else
                tracks[i].setEnabled(false);
        }
    }

    /**
     * Given a DataSource, create a processor and hook up the output
     * DataSource from the processor to a customed DataSink.
     * @return false if something wrong happened
     */
    protected boolean record() {        
        // Create a DataSource given the media locator.
        try {
            ds = Manager.createDataSource(videoLocator);
        } catch (Exception e) {
            System.err.println("Cannot create DataSource from: " + videoLocator);
            return false;
        }       

        try {
            p = Manager.createProcessor(ds);
        } catch (Exception e) {
            System.err.println("Failed to create a processor from the given DataSource: " + e);
            return false;
        }

        p.addControllerListener(this);

        // Put the Processor into configured state.
        p.configure();
        if (!waitForState(Processor.Configured)) {
            System.err.println("Failed to configure the processor.");
            return false;
        }
        setTrackFormat();
        /*ContentDescriptor[] descriptors = p.getSupportedContentDescriptors();
        for (int n = 0; n < descriptors.length; n++) {
            System.out.println("Desc: " + descriptors[n].toString());
        }*/
        // Get the raw output from the processor.
        //p.setContentDescriptor(new ContentDescriptor(ContentDescriptor.RAW));
        //p.setContentDescriptor(new FileTypeDescriptor(FileTypeDescriptor.MPEG_AUDIO));
        p.realize();
        if (!waitForState(Controller.Realized)) {
            System.err.println("Failed to realize the processor.");
            return false;
        }

        // Get the output DataSource from the processor and
        // hook it up to the RawDataSourceHandler.
        DataSource ods = p.getDataOutput();
        handler = new RawDataSourceHandler();

        try {
            handler.setSource(ods);
        } catch (IncompatibleSourceException e) {
            System.err.println("Cannot handle the output DataSource from the processor: " + ods);
            //return false;
        }
        System.err.println("Start datasource handler ");
        handler.addDataSinkListener(this);
        try{
            handler.setSource(ds);
            handler.start();
        }
        catch(IncompatibleSourceException ioe){
            ioe.printStackTrace();
        }
        System.err.println("Prefetch the processor ");
        // Prefetch the processor.
        p.prefetch();
        if (!waitForState(Controller.Prefetched)) {
            System.err.println("Failed to prefetch the processor.");
            return false;
        }       
        // Start the processor.
        p.start();          
        System.err.println("processor started");                

        return true;
    }

    /**
     * Block until file writing is done. 
     */
    /*private boolean waitForFileDone(double duration) {        
        synchronized (waitFileSync) {
            try {
                while (!bufferingDone) {
                    if(p.getMediaTime().getSeconds() > duration)
                        p.close();
                    waitFileSync.wait(500);
                    System.err.print(".");
                }
            } catch (Exception e) {}
        }
        bufferingDone=false;
        return true;
    }*/     

    /**
     * Block until the processor has transitioned to the given state.
     * @param state - the state to wait for
     * @return false if the transition failed.
     */
    protected boolean waitForState(int state) {
        synchronized (waitSync) {
            try {
            while (p.getState() < state && stateTransitionOK)
                waitSync.wait();
            } catch (Exception e) {}
        }
        return stateTransitionOK;
    }

    /**
     * Stop the voice recording
     */
    public void stop(){
        p.stop();
        bufferingDone=true;     
    }

    /**
     * Start the voice recording
     */
    public void start(){
        initialize();
        if(recorderThread==null){
            recorderThread=new Thread(this);
            recorderThread.setName("Voice Recorder Thread");
        }

        recorderThread.start();         
    }

    /**
     * the process of recording the voice
     */
    public void run(){
        boolean succeeded=record();
        if(!succeeded)
            return;
        while(!bufferingDone){
            try{
                recorderThread.sleep(1);
            }
            catch(InterruptedException ie){
                ie.printStackTrace();
            }
        }   
        try{
            Thread.sleep(100);
        }
        catch(InterruptedException ie){
            ie.printStackTrace();
        }   
        //Clean up
        System.err.println("closing datasource" );
        try{
            ds.stop();
        }
        catch(IOException ioe){
            ioe.printStackTrace();
        }
        ds.disconnect();                        
        System.err.println("closing processor" );
        p.close();
        p.removeControllerListener(this);
        recorderThread=null;
        System.err.println("closing handler" );
        handler.close();        
        System.err.println("...done Buffering.");
        bufferingDone=false;
    }

    /**
     * Controller Listener Method.
     * Allow one to know what happen on the recorder and the voice
     * @param evt - event received 
     */
    public void controllerUpdate(ControllerEvent evt) {
        //System.out.println("new Event received"+evt.getClass().getName());
        if (evt instanceof ConfigureCompleteEvent ||
            evt instanceof RealizeCompleteEvent ||
            evt instanceof PrefetchCompleteEvent) {
            synchronized (waitSync) {
                stateTransitionOK = true;
                waitSync.notifyAll();
            }
        } else if (evt instanceof ResourceUnavailableEvent) {
            synchronized (waitSync) {
                stateTransitionOK = false;
                waitSync.notifyAll();
            }
        } else if (evt instanceof EndOfMediaEvent) {
            System.err.println("closing datasource" );
            try{
                ds.stop();
            }
            catch(IOException ioe){
                ioe.printStackTrace();
            }
            ds.disconnect();                        
            System.err.println("closing controller");
            evt.getSourceController().close();
            //Clean up
            System.err.println("closing processor" );
            p.close();
            p.removeControllerListener(this);
            recorderThread=null;
            System.err.println("closing handler" );
            handler.close();        
            System.err.println("...done Buffering.");
            bufferingDone=true;
        } else if (evt instanceof SizeChangeEvent) {
        }
        else if (evt instanceof MediaTimeSetEvent) {
            System.err.println("- mediaTime set: " + 
            ((MediaTimeSetEvent)evt).getMediaTime().getSeconds());
        } else if (evt instanceof StopAtTimeEvent) {
            System.err.println("- stop at time: " +
            ((StopAtTimeEvent)evt).getMediaTime().getSeconds());
            //Clean up
            System.err.println("closing datasource" );
            try{
                ds.stop();
            }
            catch(IOException ioe){
                ioe.printStackTrace();
            }
            ds.disconnect();                        
            System.err.println("closing controller");
            evt.getSourceController().close();
            System.err.println("closing processor" );
            p.close();
            p.removeControllerListener(this);
            recorderThread=null;
            System.err.println("closing handler" );
            handler.close();        
            System.err.println("...done Buffering.");
            bufferingDone=true;
        }
        else if (evt instanceof StopByRequestEvent) {               
            //          Clean up
          System.err.println("closing datasource" );
          try{
              ds.stop();
          }
          catch(IOException ioe){
              ioe.printStackTrace();
          }
          ds.disconnect();
            System.err.println("closing controller");
            evt.getSourceController().close();                      
            System.err.println("closing processor" );
            p.close();
            p.removeControllerListener(this);
            recorderThread=null;
            System.err.println("closing handler" );
            handler.close();        
            System.err.println("...done Buffering.");
        }
    }

    /**
     * Get the recorded voice buffer 
     * @return the voice recorded in an array of bytes
     */
    public byte[] getRecord(){
        return handler.getRecordBuffer();
    }

    /**
     * DataSink Listener
     * @param evt - event received  
     */
    public void dataSinkUpdate(DataSinkEvent evt) {

        if (evt instanceof EndOfStreamEvent) {
            bufferingDone = true;   
            //waitFileSync.notifyAll();
            System.err.println("All done!");
            evt.getSourceDataSink().close();
            //System.exit(0);
        }
        else if (evt instanceof DataSinkErrorEvent) {
            //synchronized (waitFileSync) {
            bufferingDone = true;   
            evt.getSourceDataSink().close();            
                //waitFileSync.notifyAll();
            //}
        }
    }

    /**
     * Utility method to write a recorded voice buffer to a file
     * @param data -  the recorded voice
     */
    private static void writeBufferToFile(byte[] data){
        File f=new File("D://test.mov");
        FileOutputStream fos=null;
        try{
            fos=new FileOutputStream(f);
        }
        catch(FileNotFoundException fnfe){
            fnfe.printStackTrace();
        }
        try{
            fos.write(data);
        }
        catch(IOException ioe){
            ioe.printStackTrace();
        }
    }

    /**
     * Main program
     * @param args - 
     */
    public static void main(String [] args) {
        VideoRecorder videoRecorder = new VideoRecorder();

        //for(int i=0;i<2;i++){
            videoRecorder.start();      
            try{
                Thread.sleep(5000);
            }
            catch(InterruptedException ie){
                ie.printStackTrace();
            }
            videoRecorder.handler = new RawDataSourceHandler();
            //MyCam videoPlayer=new MyCam();    
            //videoRecorder.stop();             
            //videoPlayer.initialize();
            //videoPlayer.play();   
            //videoRecorder.initialize();
            //videoRecorder.play();

            try{
                Thread.sleep(5000);
            }
            catch(InterruptedException ie){
                ie.printStackTrace();
            }               
        //}
        writeBufferToFile(videoRecorder.getRecord());
    }
}

如果你对这篇内容有疑问,欢迎到本站社区发帖提问 参与讨论,获取更多帮助,或者扫码二维码加入 Web 技术交流群。

扫码二维码加入Web技术交流群

发布评论

需要 登录 才能够评论, 你可以免费 注册 一个本站的账号。

评论(1

一曲琵琶半遮面シ 2024-10-22 09:34:22

我个人会避免使用 RawDataSourceHandler 。它本质上只是将捕获的数据写入内存阵列,这可以解释为什么内存不足,尤其是记录 24 位 RGB。

相反,我建议创建一个临时文件,记录到 DataSink,然后稍后将文件重命名到位。
这是我建议的顺序:

File tempFile = File.createTempFile("something",".someFormat");
MediaLocator dest = new MediaLocator(tempFile.toURI().toURL());

DataSink sink = Manager.createDataSink(p.getDataOutput(),dest);
sink.open();
sink.start();

p.start();

// Later:
p.stop();
tempFile.renameTo("SomeNewName.someFormat");

如果由于没有足够的权限从 JMF 写入文件而无法执行此操作(这与使用流写入文件所需的权限不同),我至少建议使用比未压缩 RGB 更小的格式。 (我可以建议使用 H263 吗?​​)

另外,关于处理程序的源代码,您有一些奇怪的地方:

    try {
        handler.setSource(ods);    // <-- You set the source once here
    } catch (IncompatibleSourceException e) {
        System.err.println("Cannot handle the output DataSource from the processor: " + ods);
        //return false;
    }
    System.err.println("Start datasource handler ");
    handler.addDataSinkListener(this);
    try{
        handler.setSource(ds);     // <-- Then change it again quickly here to the raw output from the capture device!
        handler.start();
    }
    catch(IncompatibleSourceException ioe){
        ioe.printStackTrace();
    }

最后,关于关闭处理器/播放器的不明显的注释。
关闭的顺序是:
停止()
解除分配()
close()

状态按如下方式进行:

State: Controller.Running
  stop()
State: Controller.Prefetched
  deallocate()
State: Controller.Realized
  close()
State: Controller.Unrealized

是的,您应该等待处理器/播放器在继续之前达到提到的每个状态。如果您忽略此提示,您将会感到遗憾,因为 JMF 会泄漏内存,如果您没有正确关闭,您将无法相信这一点,尤其是在 Windows 下。

这是我的代码来自动处理这个问题:

public class PlayerUtils {

    static public void cleanupPlayer(Player player) {
        if (player != null) {

            if (player.getState() == Player.Started) {
                player.stop();
                waitForState(player, Player.Prefetched);
            }

            if (player.getState() == Player.Prefetched) {
                player.deallocate();
                waitForState(player, Player.Realized);
            }

            player.close();
        }
    }

    static public void cleanupPlayer(MediaPlayer player) {
        if (player != null) {
            cleanupPlayer(player.getPlayer());
        }
    }

    static private void waitForState(Player player, int state) {
        // Fast abort
        if (player.getState() == state) {
            return;
        }

        long startTime = new Date().getTime();

        long timeout = 5 * 1000;

        final Object waitListener = new Object();

        ControllerListener cl = new ControllerListener() {

            @Override
            public void controllerUpdate(ControllerEvent ce) {
                synchronized (waitListener) {
                    waitListener.notifyAll();
                }
            }
        };
        try {
            player.addControllerListener(cl);

            // Make sure we wake up every 500ms to check for timeouts and in case we miss a signal
            synchronized (waitListener) {
                while (player.getState() != state && new Date().getTime() - startTime < timeout) {
                    try {
                        waitListener.wait(500);
                    }
                    catch (InterruptedException ex) {
                        Logger.getLogger(PlayerUtils.class.getName()).log(Level.SEVERE, null, ex);
                    }
                }
            }
            if (new Date().getTime() - startTime > timeout) {
                Logger.getLogger(PlayerUtils.class.getName()).log(Level.SEVERE, "Timed out waiting for state change from {0} to {1}", new Object[]{player.getState(), state});
            }
        }
        finally {
            // No matter what else happens, we want to remove this
            player.removeControllerListener(cl);
        }
    }   
}

I would avoid using RawDataSourceHandler personally. It's essentially just writing the captured data into a memory array, which would explain why you're running out of memory, especially recording 24-bit RGB.

Instead, I would recommend creating a temporary file, recording to a DataSink, then renaming the file into place later.
Here's my suggested sequence:

File tempFile = File.createTempFile("something",".someFormat");
MediaLocator dest = new MediaLocator(tempFile.toURI().toURL());

DataSink sink = Manager.createDataSink(p.getDataOutput(),dest);
sink.open();
sink.start();

p.start();

// Later:
p.stop();
tempFile.renameTo("SomeNewName.someFormat");

If you cannot due this due to insufficient permission to write to files from JMF, (Which is different from the permission required to write to files using a stream) I would at least recommend using a smaller format than uncompressed RGB. (Might I suggest H263 instead?)

Also, you have some code oddities regarding the source of the handler:

    try {
        handler.setSource(ods);    // <-- You set the source once here
    } catch (IncompatibleSourceException e) {
        System.err.println("Cannot handle the output DataSource from the processor: " + ods);
        //return false;
    }
    System.err.println("Start datasource handler ");
    handler.addDataSinkListener(this);
    try{
        handler.setSource(ds);     // <-- Then change it again quickly here to the raw output from the capture device!
        handler.start();
    }
    catch(IncompatibleSourceException ioe){
        ioe.printStackTrace();
    }

Lastly, a non-obvious note about closing processors/players.
The sequence of closing is:
stop()
deallocate()
close()

The state proceeds as follows:

State: Controller.Running
  stop()
State: Controller.Prefetched
  deallocate()
State: Controller.Realized
  close()
State: Controller.Unrealized

Yes, you should wait for the processor/player to reach each state mentioned before proceeding. You will be sorry if you ignore this tip, as JMF leaks memory like you would not believe if you don't close things down properly, especially under Windows.

Here's my code to take care of this automagically:

public class PlayerUtils {

    static public void cleanupPlayer(Player player) {
        if (player != null) {

            if (player.getState() == Player.Started) {
                player.stop();
                waitForState(player, Player.Prefetched);
            }

            if (player.getState() == Player.Prefetched) {
                player.deallocate();
                waitForState(player, Player.Realized);
            }

            player.close();
        }
    }

    static public void cleanupPlayer(MediaPlayer player) {
        if (player != null) {
            cleanupPlayer(player.getPlayer());
        }
    }

    static private void waitForState(Player player, int state) {
        // Fast abort
        if (player.getState() == state) {
            return;
        }

        long startTime = new Date().getTime();

        long timeout = 5 * 1000;

        final Object waitListener = new Object();

        ControllerListener cl = new ControllerListener() {

            @Override
            public void controllerUpdate(ControllerEvent ce) {
                synchronized (waitListener) {
                    waitListener.notifyAll();
                }
            }
        };
        try {
            player.addControllerListener(cl);

            // Make sure we wake up every 500ms to check for timeouts and in case we miss a signal
            synchronized (waitListener) {
                while (player.getState() != state && new Date().getTime() - startTime < timeout) {
                    try {
                        waitListener.wait(500);
                    }
                    catch (InterruptedException ex) {
                        Logger.getLogger(PlayerUtils.class.getName()).log(Level.SEVERE, null, ex);
                    }
                }
            }
            if (new Date().getTime() - startTime > timeout) {
                Logger.getLogger(PlayerUtils.class.getName()).log(Level.SEVERE, "Timed out waiting for state change from {0} to {1}", new Object[]{player.getState(), state});
            }
        }
        finally {
            // No matter what else happens, we want to remove this
            player.removeControllerListener(cl);
        }
    }   
}
~没有更多了~
我们使用 Cookies 和其他技术来定制您的体验包括您的登录状态等。通过阅读我们的 隐私政策 了解更多相关信息。 单击 接受 或继续使用网站,即表示您同意使用 Cookies 和您的相关数据。
原文