private final static String CLASS_LABEL = "RecordActivity"; private final static String LOG_TAG = CLASS_LABEL; private String ffmpeg_link; long startTime = 0; boolean recording = false; private FFmpegFrameRecorder recorder; private boolean isPreviewOn = false; /*Filter information, change boolean to true if adding a fitler*/ private boolean addFilter = false; private String filterString = ""; FFmpegFrameFilter filter; private int sampleAudioRateInHz = 44100; private int imageWidth = 320; private int imageHeight = 240; private int frameRate = 30; /* audio data getting thread */ private AudioRecord audioRecord; private AudioRecordRunnable audioRecordRunnable; private Thread audioThread; volatile boolean runAudioThread = true; /* video data getting thread */ private Camera cameraDevice; private CameraView cameraView; private Frame yuvImage = null; /* layout setting */ private final int bg_screen_bx = 232; private final int bg_screen_by = 128; private final int bg_screen_width = 700; private final int bg_screen_height = 500; private final int bg_width = 1123; private final int bg_height = 715; private final int live_width = 640; private final int live_height = 480; private int screenWidth, screenHeight; private Button btnRecorderControl; /* The number of seconds in the continuous record loop (or 0 to disable loop). */ final int RECORD_LENGTH = 10; Frame[] images; long[] timestamps; ShortBuffer[] samples; int imagesIndex, samplesIndex; @Override public void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setRequestedOrientation(ActivityInfo.SCREEN_ORIENTATION_PORTRAIT); PermissionListener permissionlistener = new PermissionListener() { @Override public void onPermissionGranted() { //Toast.makeText(MainActivity.this, "Permission Granted", Toast.LENGTH_SHORT).show(); }
@Override public void onPermissionDenied(ArrayList<String> deniedPermissions) { //Toast.makeText(MainActivity.this, "Permission Denied\n" + deniedPermissions.toString(), Toast.LENGTH_SHORT).show(); new TedPermission(MainActivity.this) .setPermissionListener(this) .setDeniedMessage("If you reject permission,you can not use this service\n\nPlease turn on permissions at [Setting] > [Permission]") .setPermissions(Manifest.permission.WRITE_EXTERNAL_STORAGE, Manifest.permission.CAMERA, Manifest.permission.RECORD_AUDIO) .check(); }
}; new TedPermission(this) .setPermissionListener(permissionlistener) .setDeniedMessage("If you reject permission,you can not use this service\n\nPlease turn on permissions at [Setting] > [Permission]") .setPermissions(Manifest.permission.WRITE_EXTERNAL_STORAGE, Manifest.permission.CAMERA, Manifest.permission.RECORD_AUDIO) .check(); ffmpeg_link = Environment.getExternalStoragePublicDirectory( Environment.DIRECTORY_MOVIES ).getAbsolutePath() + "/stream.flv"; setContentView(R.layout.activity_main); initLayout(); }
Log.w(LOG_TAG,"init recorder"); if (RECORD_LENGTH > 0) { imagesIndex = 0; images = new Frame[RECORD_LENGTH * frameRate]; timestamps = new long[images.length]; for (int i = 0; i < images.length; i++) { images[i] = new Frame(imageWidth, imageHeight, Frame.DEPTH_UBYTE, 2); timestamps[i] = -1; } } else if (yuvImage == null) { yuvImage = new Frame(imageWidth, imageHeight, Frame.DEPTH_UBYTE, 2); Log.i(LOG_TAG, "create yuvImage"); }
Log.i(LOG_TAG, "ffmpeg_url: " + ffmpeg_link); recorder = new FFmpegFrameRecorder(ffmpeg_link, imageWidth, imageHeight, 1); recorder.setFormat("flv"); recorder.setSampleRate(sampleAudioRateInHz); // Set in the surface changed method recorder.setFrameRate(frameRate); // The filterString is any ffmpeg filter. // Here is the link for a list: https://ffmpeg.org/ffmpeg-filters.html filterString = "transpose=0"; filter = new FFmpegFrameFilter(filterString, imageWidth, imageHeight); //default format on android filter.setPixelFormat(avutil.AV_PIX_FMT_NV21); Log.i(LOG_TAG, "recorder initialize success"); audioRecordRunnable = new AudioRecordRunnable(); audioThread = new Thread(audioRecordRunnable); runAudioThread = true; }
@Override public boolean onKeyDown(int keyCode, KeyEvent event) {
if (keyCode == KeyEvent.KEYCODE_BACK) { if (recording) { stopRecording(); }
finish(); return true; }
return super.onKeyDown(keyCode, event); }
//--------------------------------------------- // audio thread, gets and encodes audio data //--------------------------------------------- class AudioRecordRunnable implements Runnable {
@Override public void run() { android.os.Process.setThreadPriority(android.os.Process.THREAD_PRIORITY_URGENT_AUDIO); // Audio int bufferSize; ShortBuffer audioData; int bufferReadResult; bufferSize = AudioRecord.getMinBufferSize(sampleAudioRateInHz, AudioFormat.CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT); audioRecord = new AudioRecord(MediaRecorder.AudioSource.MIC, sampleAudioRateInHz, AudioFormat.CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT, bufferSize); if (RECORD_LENGTH > 0) { samplesIndex = 0; samples = new ShortBuffer[RECORD_LENGTH * sampleAudioRateInHz * 2 / bufferSize + 1]; for (int i = 0; i < samples.length; i++) { samples[i] = ShortBuffer.allocate(bufferSize); } } else { audioData = ShortBuffer.allocate(bufferSize); }
Log.d(LOG_TAG, "audioRecord.startRecording()"); audioRecord.startRecording(); /* ffmpeg_audio encoding loop */ while (runAudioThread) { if (RECORD_LENGTH > 0) { audioData = samples[samplesIndex++ % samples.length]; audioData.position(0).limit(0); } //Log.v(LOG_TAG,"recording? " + recording); bufferReadResult = audioRecord.read(audioData.array(), 0, audioData.capacity()); audioData.limit(bufferReadResult); if (bufferReadResult > 0) { Log.v(LOG_TAG,"bufferReadResult: " + bufferReadResult); // If "recording" isn't true when start this thread, it never get's set according to this if statement...!!! // Why? Good question... if (recording) { if (RECORD_LENGTH <= 0) try { recorder.recordSamples(audioData); //Log.v(LOG_TAG,"recording " + 1024*i + " to " + 1024*i+1024); } catch (FFmpegFrameRecorder.Exception e) { Log.v(LOG_TAG,e.getMessage()); e.printStackTrace(); } } } } Log.v(LOG_TAG,"AudioThread Finished, release audioRecord"); /* encoding finish, release recorder */ if (audioRecord != null) { audioRecord.stop(); audioRecord.release(); audioRecord = null; Log.v(LOG_TAG,"audioRecord released"); } } }
//--------------------------------------------- // camera thread, gets and encodes video data //--------------------------------------------- class CameraView extends SurfaceView implements SurfaceHolder.Callback, PreviewCallback {
private SurfaceHolder mHolder; private Camera mCamera; public CameraView(Context context, Camera camera) { super(context); Log.w("camera","camera view"); mCamera = camera; mHolder = getHolder(); mHolder.addCallback(CameraView.this); mHolder.setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS); mCamera.setPreviewCallback(CameraView.this); }
public void surfaceChanged(SurfaceHolder holder, int format, int width, int height) { stopPreview(); Camera.Parameters camParams = mCamera.getParameters(); List<Camera.Size> sizes = camParams.getSupportedPreviewSizes(); // Sort the list in ascending order Collections.sort(sizes, new Comparator<Camera.Size>() {
public int compare(final Camera.Size a, final Camera.Size b) { return a.width * a.height - b.width * b.height; } }); // Pick the first preview size that is equal or bigger, or pick the last (biggest) option if we cannot // reach the initial settings of imageWidth/imageHeight. for (int i = 0; i < sizes.size(); i++) { if ((sizes.get(i).width >= imageWidth && sizes.get(i).height >= imageHeight) || i == sizes.size() - 1) { imageWidth = sizes.get(i).width; imageHeight = sizes.get(i).height; Log.v(LOG_TAG, "Changed to supported resolution: " + imageWidth + "x" + imageHeight); break; } } camParams.setPreviewSize(imageWidth, imageHeight); Log.v(LOG_TAG,"Setting imageWidth: " + imageWidth + " imageHeight: " + imageHeight + " frameRate: " + frameRate); camParams.setPreviewFrameRate(frameRate); Log.v(LOG_TAG,"Preview Framerate: " + camParams.getPreviewFrameRate()); mCamera.setParameters(camParams); // Set the holder (which might have changed) again try { mCamera.setPreviewDisplay(holder); mCamera.setPreviewCallback(CameraView.this); startPreview(); } catch (Exception e) { Log.e(LOG_TAG, "Could not set preview display in surfaceChanged"); } }
@Override public void surfaceDestroyed(SurfaceHolder holder) { try { mHolder.addCallback(null); mCamera.setPreviewCallback(null); } catch (RuntimeException e) { // The camera has probably just been released, ignore. } }
public void startPreview() { if (!isPreviewOn && mCamera != null) { isPreviewOn = true; mCamera.startPreview(); } }
public void stopPreview() { if (isPreviewOn && mCamera != null) { isPreviewOn = false; mCamera.stopPreview(); } }
@Override public void onPreviewFrame(byte[] data, Camera camera) { if (audioRecord == null || audioRecord.getRecordingState() != AudioRecord.RECORDSTATE_RECORDING) { startTime = System.currentTimeMillis(); return; } if (RECORD_LENGTH > 0) { int i = imagesIndex++ % images.length; yuvImage = images[i]; timestamps[i] = 1000 * (System.currentTimeMillis() - startTime); }
/* get video data */ if (yuvImage != null && recording) { ((ByteBuffer)yuvImage.image[0].position(0)).put(data); if (RECORD_LENGTH <= 0) try { Log.v(LOG_TAG,"Writing Frame"); long t = 1000 * (System.currentTimeMillis() - startTime); if (t > recorder.getTimestamp()) { recorder.setTimestamp(t); }
@Override public void onClick(View v) { if (!recording) { startRecording(); Log.w(LOG_TAG, "Start Button Pushed"); btnRecorderControl.setText("Stop"); } else { // This will trigger the audio recording loop to stop and then set isRecorderStart = false; stopRecording(); Log.w(LOG_TAG, "Stop Button Pushed"); btnRecorderControl.setText("Start"); } } }