Android record multiple audio at a time?
AFAIK, this is not possible, unless you have multiple Microphones.
The Reason behind this answer is, Recording is an synchronized
method, so only one instance can use it at a time.
Trouble saving a video file with webrtc in Android
Video only recording
I had a similar case in my project. At first, I tried WebRTC's default VideoFileRenderer but the video size was too big because no compression is applied.
I found this repository. It really helped in my case.
https://github.com/cloudwebrtc/flutter-webrtc
Here is a step by step guide. I've also made some adjustments.
Add this class to your project. It has lots of options to configure the final video format.
import android.media.MediaCodec;
import android.media.MediaCodecInfo;
import android.media.MediaFormat;
import android.media.MediaMuxer;
import android.os.Handler;
import android.os.HandlerThread;
import android.util.Log;
import android.view.Surface;
import org.webrtc.EglBase;
import org.webrtc.GlRectDrawer;
import org.webrtc.VideoFrame;
import org.webrtc.VideoFrameDrawer;
import org.webrtc.VideoSink;
import java.io.IOException;
import java.nio.ByteBuffer;
class FileEncoder implements VideoSink {
private static final String TAG = "FileRenderer";
private final HandlerThread renderThread;
private final Handler renderThreadHandler;
private int outputFileWidth = -1;
private int outputFileHeight = -1;
private ByteBuffer[] encoderOutputBuffers;
private EglBase eglBase;
private EglBase.Context sharedContext;
private VideoFrameDrawer frameDrawer;
private static final String MIME_TYPE = "video/avc"; // H.264 Advanced Video Coding
private static final int FRAME_RATE = 30; // 30fps
private static final int IFRAME_INTERVAL = 5; // 5 seconds between I-frames
private MediaMuxer mediaMuxer;
private MediaCodec encoder;
private MediaCodec.BufferInfo bufferInfo;
private int trackIndex = -1;
private boolean isRunning = true;
private GlRectDrawer drawer;
private Surface surface;
FileEncoder(String outputFile, final EglBase.Context sharedContext) throws IOException {
renderThread = new HandlerThread(TAG + "RenderThread");
renderThread.start();
renderThreadHandler = new Handler(renderThread.getLooper());
bufferInfo = new MediaCodec.BufferInfo();
this.sharedContext = sharedContext;
mediaMuxer = new MediaMuxer(outputFile,
MediaMuxer.OutputFormat.MUXER_OUTPUT_MPEG_4);
}
private void initVideoEncoder() {
MediaFormat format = MediaFormat.createVideoFormat(MIME_TYPE, 1280, 720);
format.setInteger(MediaFormat.KEY_COLOR_FORMAT,
MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface);
format.setInteger(MediaFormat.KEY_BIT_RATE, 6000000);
format.setInteger(MediaFormat.KEY_FRAME_RATE, FRAME_RATE);
format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, IFRAME_INTERVAL);
try {
encoder = MediaCodec.createEncoderByType(MIME_TYPE);
encoder.configure(format, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
renderThreadHandler.post(() -> {
eglBase = EglBase.create(sharedContext, EglBase.CONFIG_RECORDABLE);
surface = encoder.createInputSurface();
eglBase.createSurface(surface);
eglBase.makeCurrent();
drawer = new GlRectDrawer();
});
} catch (Exception e) {
Log.wtf(TAG, e);
}
}
@Override
public void onFrame(VideoFrame frame) {
frame.retain();
if (outputFileWidth == -1) {
outputFileWidth = frame.getRotatedWidth();
outputFileHeight = frame.getRotatedHeight();
initVideoEncoder();
}
renderThreadHandler.post(() -> renderFrameOnRenderThread(frame));
}
private void renderFrameOnRenderThread(VideoFrame frame) {
if (frameDrawer == null) {
frameDrawer = new VideoFrameDrawer();
}
frameDrawer.drawFrame(frame, drawer, null, 0, 0, outputFileWidth, outputFileHeight);
frame.release();
drainEncoder();
eglBase.swapBuffers();
}
/**
* Release all resources. All already posted frames will be rendered first.
*/
void release() {
isRunning = false;
renderThreadHandler.post(() -> {
if (encoder != null) {
encoder.stop();
encoder.release();
}
eglBase.release();
mediaMuxer.stop();
mediaMuxer.release();
renderThread.quit();
});
}
private boolean encoderStarted = false;
private volatile boolean muxerStarted = false;
private long videoFrameStart = 0;
private void drainEncoder() {
if (!encoderStarted) {
encoder.start();
encoderOutputBuffers = encoder.getOutputBuffers();
encoderStarted = true;
return;
}
while (true) {
int encoderStatus = encoder.dequeueOutputBuffer(bufferInfo, 10000);
if (encoderStatus == MediaCodec.INFO_TRY_AGAIN_LATER) {
break;
} else if (encoderStatus == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
// not expected for an encoder
encoderOutputBuffers = encoder.getOutputBuffers();
Log.e(TAG, "encoder output buffers changed");
} else if (encoderStatus == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
// not expected for an encoder
MediaFormat newFormat = encoder.getOutputFormat();
Log.e(TAG, "encoder output format changed: " + newFormat);
trackIndex = mediaMuxer.addTrack(newFormat);
if (!muxerStarted) {
mediaMuxer.start();
muxerStarted = true;
}
if (!muxerStarted)
break;
} else if (encoderStatus < 0) {
Log.e(TAG, "unexpected result fr om encoder.dequeueOutputBuffer: " + encoderStatus);
} else { // encoderStatus >= 0
try {
ByteBuffer encodedData = encoderOutputBuffers[encoderStatus];
if (encodedData == null) {
Log.e(TAG, "encoderOutputBuffer " + encoderStatus + " was null");
break;
}
// It's usually necessary to adjust the ByteBuffer values to match BufferInfo.
encodedData.position(bufferInfo.offset);
encodedData.limit(bufferInfo.offset + bufferInfo.size);
if (videoFrameStart == 0 && bufferInfo.presentationTimeUs != 0) {
videoFrameStart = bufferInfo.presentationTimeUs;
}
bufferInfo.presentationTimeUs -= videoFrameStart;
if (muxerStarted)
mediaMuxer.writeSampleData(trackIndex, encodedData, bufferInfo);
isRunning = isRunning && (bufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) == 0;
encoder.releaseOutputBuffer(encoderStatus, false);
if ((bufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
break;
}
} catch (Exception e) {
Log.wtf(TAG, e);
break;
}
}
}
}
private long presTime = 0L;
}
Now on your Activity/Fragment class
Declare a variable of the above class
FileEncoder recording;
When you receive the stream you want to record(remote or local) you can initialize the recording.
FileEncoder recording = new FileEncoder("path/to/video", rootEglBase.eglBaseContext)
remoteVideoTrack.addSink(recording)
When the call session ends, you need to stop and release the recording.
remoteVideoTrack.removeSink(recording)
recording.release()
This is enough to record the video but without audio.
Video & Audio recording
To record local peer's audio you need to consume this class(https://webrtc.googlesource.com/src/+/master/examples/androidapp/src/org/appspot/apprtc/RecordedAudioToFileController.java). But first you need to setup an AudioDeviceModule object
AudioDeviceModule adm = createJavaAudioDevice()
peerConnectionFactory = PeerConnectionFactory.builder()
.setOptions(options)
.setAudioDeviceModule(adm)
.setVideoEncoderFactory(defaultVideoEncoderFactory)
.setVideoDecoderFactory(defaultVideoDecoderFactory)
.createPeerConnectionFactory()
adm.release()
private AudioDeviceModule createJavaAudioDevice() {
//Implement AudioRecordErrorCallback
//Implement AudioTrackErrorCallback
return JavaAudioDeviceModule.builder(this)
.setSamplesReadyCallback(audioRecorder)
//Default audio source is Voice Communication which is good for VoIP sessions. You can change to the audio source you want.
.setAudioSource(MediaRecorder.AudioSource.VOICE_COMMUNICATION)
.setAudioRecordErrorCallback(audioRecordErrorCallback)
.setAudioTrackErrorCallback(audioTrackErrorCallback)
.createAudioDeviceModule()
}
Merge audio and video
Add this dependency
implementation 'com.googlecode.mp4parser:isoparser:1.1.22'
Then add this piece to your code when your call finishes. Make sure that video and audio recording are stopped and released properly.
try {
Movie video;
video = MovieCreator.build("path/to/recorded/video");
Movie audio;
audio = MovieCreator.build("path/to/recorded/audio");
Track audioTrack = audio.getTracks().get(0)
video.addTrack(audioTrack);
Container out = new DefaultMp4Builder().build(video);
FileChannel fc = new FileOutputStream(new File("path/to/final/output")).getChannel();
out.writeContainer(fc);
fc.close();
} catch (IOException e) {
e.printStackTrace();
}
I know this isn't the best solution for recording audio and video in an Android WebRTC video call. If someone knows how to extract audio using WebRTC please add a comment.
Related Topics
How to Detect When the User Has Changed the Clock Time on Their Device
Http Request in Android with Kotlin
Convert Bitmap Array to Yuv (Ycbcr Nv21)
Android:Fill Spinner from Java Code Programmatically
How to Set Image from Url for Imageview
File Res/Drawable/Abc_Ic_Ab_Back_Material.Xml from Drawable Resource Id #0X7F020016
Android: Determine Active Input Method from Code
Upload Video to Facebook in Android
Android Destroying Activities, Killing Processes
Android - Share on Facebook, Twitter, Mail, Ecc
How to Auto-Start an Android Application
Android Studio: Drawable Folder: How to Put Images for Multiple Dpi
How to Log Request and Response Body with Retrofit-Android
Custom Dialog on Android: How to Center Its Title
Apply Custom Filters to Camera Output
How to Check Whether the Sim Card Is Available in an Android Device
Android Displaymetrics Returns Incorrect Screen Size in Pixels on Ics