Ich muss Audio- und Videodateien im 3gp/mp4-Format in derselben Datei und zur selben Zeit aufnehmen. Wenn ich das Programm ausführe, wird meine Anwendungsdatei erstellt mit videofile.3gp
aber das Video wird nicht auf der SD-Karte des Emulators aufgezeichnet. Wird das Video auf dem Emulator aufgezeichnet? Wenn ich diesen Code auf einem von Android unterstützten Gerät ausführe, werde ich dann diese Fehler sehen?
Code und Fehler unten:
package com.video;
/*
*
* @copy Rights
* audio.java
* sample code for Eminosoft Developerworks Article
* Android developent Team
* www.eminosoft.cm
*
*/
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import android.app.Activity;
import android.content.ContentResolver;
import android.content.ContentValues;
import android.content.Context;
import android.content.Intent;
import android.media.MediaPlayer;
import android.media.MediaRecorder;
import android.net.Uri;
import android.os.Bundle;
import android.os.Environment;
import android.provider.MediaStore;
import android.provider.Settings;
import android.util.Log;
import android.view.View;
import android.widget.Button;
public class video extends Activity {
public MediaRecorder mrec = null;
private Button startRecording = null;
private Button stopRecording = null;
private static final String TAG = "SoundRecordingDemo";
File audiofile;
File video;
private MediaPlayer mMediaPlayer;
/** Called when the activity is first created. */
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.main);
mrec = new MediaRecorder();
mMediaPlayer = new MediaPlayer();
Log.i(TAG , "Video starting");
startRecording = (Button)findViewById(R.id.startrecording);
stopRecording = (Button)findViewById(R.id.stoprecording);
startRecording.setOnClickListener(new View.OnClickListener(){
public void onClick(View v) {
try
{
mMediaPlayer=new MediaPlayer();
Context appContext = getApplicationContext();
startRecording.setEnabled(false);
stopRecording.setEnabled(true);
stopRecording.requestFocus();
startRecording();
}catch (Exception ee)
{
Log.e(TAG,"Caught io exception " + ee.getMessage());
}
}
});
stopRecording.setOnClickListener(new View.OnClickListener(){
public void onClick(View v) {
startRecording.setEnabled(true);
stopRecording.setEnabled(false);
startRecording.requestFocus();
stopRecording();
processaudiofile();
}
});
stopRecording.setEnabled(false);
startRecording.setEnabled(true);
}
protected void processaudiofile() {
ContentValues values = new ContentValues(4);
long current = System.currentTimeMillis();
values.put(MediaStore.Video.Media.TITLE, "video"+video.getName());
values.put(MediaStore.Video.Media.DATE_ADDED, (int)(current/1000));
values.put(MediaStore.Video.Media.MIME_TYPE, "video/3gpp");
values.put(MediaStore.Video.Media.DATA, video.getAbsolutePath());
ContentResolver contentResolver = getContentResolver();
Uri base=MediaStore.Video.Media.EXTERNAL_CONTENT_URI;
Uri newUri = contentResolver.insert(base, values);
// this does not always seem to work cleanly....
//sendBroadcast(new Intent(Intent.ACTION_MEDIA_SCANNER_SCAN_FILE,newUri ));
}
protected void startRecording() throws IOException
{
// Configure the input sources
mrec.setAudioSource(MediaRecorder.AudioSource.MIC);
mrec.setVideoSource(MediaRecorder.VideoSource.CAMERA);
// Set the output format
mrec.setOutputFormat(MediaRecorder.OutputFormat.THREE_GPP);
mrec.setVideoFrameRate(15);
// Specify the audio and video encoding
mrec.setAudioEncoder(MediaRecorder.AudioEncoder.AMR_NB);
mrec.setVideoEncoder(MediaRecorder.VideoEncoder.H264);
//mrec.setMaxDuration(20000); // length of video in MS
//mrec.setVideoSize(320, 240);
// Specify the output file
//mrec.setOutputFile("/sdcard/myoutputfile.mp4");
// Prepare to record
//mrec.prepare();
//mrec.start();
//mrec.setOutputFile(video.getPath());
//mrec.setOutputFile(/sdcard/yousuck2.3gp);
// mRecorder.setOutputFile("/sdcard/yousuck2.3gp");
if (video == null) {
File sampleDir = Environment.getExternalStorageDirectory();
try {
video = File.createTempFile("videofile", ".3gp", sampleDir);
}
catch (IOException e)
{
Log.e(TAG,"sdcard access error");
return;
}
}
mrec.setOutputFile(video.getAbsolutePath());
mrec.prepare();
mrec.start();
}
protected void stopRecording() {
mrec.stop();
mrec.release();
}
}
Ich erhalte die folgenden Fehler.
ERROR/AndroidRuntime(16055): ERROR: thread attach failed
ERROR/audio_input(31): unsupported parameter: x-pvmf/media-input-node/cap-config-interface;valtype=key_specific_value
ERROR/audio_input(31): VerifyAndSetParameter failed
ERROR/CameraInput(31): Unsupported parameter(x-pvmf/media-input-node/cap-config-interface;valtype=key_specific_value)
ERROR/CameraInput(31): VerifiyAndSetParameter failed on parameter #0
ERROR/PVOMXEncNode(31): PVMFOMXEncNode-Audio_AMRNB::DoPrepare(): Got Component OMX.PV.amrencnb handle
ERROR/PVOMXEncNode(31): PVMFOMXEncNode-Video_AVC::DoPrepare(): Cannot get component OMX.PV.avcenc handle, try another component if available
ERROR/MediaPlayerService(31): error: -2
ERROR/MediaPlayer(31): Unable to to create media player
ERROR/CameraService(31): Failed to load CameraService sounds.
ERROR/MediaPlayerService(31): error: -2
ERROR/MediaPlayer(31): Unable to to create media player
ERROR/CameraService(31): Failed to load CameraService sounds.
ERROR/CameraInput(31): No surface is available for display.
ERROR/AuthorDriver(31): Command 13 completed with error -1
ERROR/SoundRecordingDemo(15961): Caught io exception prepare failed.