This article shares the specific code for Android custom Camera to realize video recording and photography for your reference. The specific content is as follows
Source code:
package ; import ; import ; import ; import ; import ; import ; import ; import ; import ; import ; import ; import ; import ; import ; import ; import ; import ; import ; import ; import ; import ; import ; import ; import ; import ; import ; import ; import ; import ; import ; import ; import ; import ; import ; import ; import ; /** * Video recording */ @SuppressWarnings("deprecation") public class MainActivity extends Activity implements OnClickListener, SensorEventListener, Callback { private SurfaceView surfaceView; // Used to draw buffered images private Button luXiang_bt; // button to start recording private Button tingZhi_bt; // button to stop recording private Button auto_focus; // Focus private Button screenshot; // screenshot private TextView time_tv; // Text box showing time private MediaRecorder mRecorder; private boolean recording; // Record whether the video is being recorded. Fasle is not recorded. True is being recorded. private File videoFolder; // The folder where the video is stored private File videFile; // Video file private Handler handler; private int time; // time private Camera myCamera; // Camera Statement private SurfaceHolder holder; // The interface used to access the surfaceview private SensorManager sManager; // Sensor Manager private Sensor sensor; // Sensor object private int mX, mY, mZ; // x y z coordinates private Calendar calendar; // calendar private long lasttimestamp = 0; // The last time I used it /** * Time changes during recording */ private Runnable timeRun = new Runnable() { @Override public void run() { time++; time_tv.setText(time + "Second"); (timeRun, 1000); } }; @Override public void onCreate(Bundle savedInstanceState) { (savedInstanceState); setRequestedOrientation(ActivityInfo.SCREEN_ORIENTATION_LANDSCAPE);// Force horizontal screen getWindow().setFlags(.FLAG_FULLSCREEN, .FLAG_FULLSCREEN); setContentView(.activity_main); initView(); initSensor(); initCreateFile(); } /** * Initialize the sensor */ private void initSensor() { sManager = (SensorManager) getSystemService(SENSOR_SERVICE); sensor = (Sensor.TYPE_ACCELEROMETER); if (sManager == null) { // throw new IllegalArgumentException("SensorManager is null"); } (this, sensor, SensorManager.SENSOR_DELAY_NORMAL); } /** * File creation */ private void initCreateFile() { // Determine whether the SD card exists boolean sdCardExist = ().equals( .MEDIA_MOUNTED); if (sdCardExist) { // Set the path to the folder where the video is stored String path = () .getAbsolutePath() + + "VideoFolder" + ; // Declare the File object of the folder where the video is stored videoFolder = new File(path); // If this folder does not exist, create if (!()) { (); } // Set the buffer that surfaceView does not manage ().setType( SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS); // Set surfaceView resolution //().setFixedSize(1000, 500); luXiang_bt.setOnClickListener(this); } else (this, "SdCard not found!", Toast.LENGTH_LONG).show(); } /** * Initialization work */ private void initView() { // Get the control surfaceView = (SurfaceView) findViewById(); luXiang_bt = (Button) findViewById(.luXiang_bt); tingZhi_bt = (Button) findViewById(.tingZhi_bt); time_tv = (TextView) findViewById(); auto_focus = (Button) findViewById(.auto_focus); screenshot = (Button) findViewById(); handler = new Handler(); holder = (); tingZhi_bt.setOnClickListener(this); auto_focus.setOnClickListener(this); (this); // Add callback (this); } /** * Release Camera and mediaRecoder */ @Override protected void onDestroy() { (timeRun); if (mRecorder != null) { (); } if (myCamera != null) { (); (); } (); } /** * Listening of control click event */ @Override public void onClick(View v) { switch (()) { case .luXiang_bt: // Video click event if (!recording) { try { // Get the current time as the file name of the video file String nowTime = ( "{0,date,yyyyMMdd_HHmmss}", new Object[] { new (System .currentTimeMillis()) }); // Declare video file object videFile = new File(() + + "video" + nowTime + ".mp4"); // Close the preview and release the resource (); mRecorder = new MediaRecorder(); (myCamera); // Create this video file (); (() .getSurface()); // Preview (); // Video source (); // The recording source is a microphone mRecorder .setOutputFormat(.MPEG_4); // The output format is mp4 /** *Quote Get resolution */ // DisplayMetrics dm = new DisplayMetrics(); // getWindowManager().getDefaultDisplay().getMetrics(dm); (800, 480); // Video size (2*1280*720); //Set video encoding frame rate (30); // Video frame frequency mRecorder .setVideoEncoder(.MPEG_4_SP); // Video encoding mRecorder .setAudioEncoder(.AMR_NB); // Audio encoding (1800000); // Set the maximum recording time (()); // Set the recording file source (); // Prepare to record the video (); // Start recording time_tv.setVisibility(); // Set the text box to be visible (timeRun); // Call Runnable recording = true; // Change the recording status to recording setAutofocus(); } catch (IOException e1) { (); } catch (IllegalStateException e) { (); } } else (, "Video is being recorded...", Toast.LENGTH_LONG).show(); break; case .tingZhi_bt: // Stop click event if (recording) { (); (); (timeRun); time_tv.setVisibility(); int videoTimeLength = time; time = 0; recording = false; ( , () + " " + videoTimeLength + "Second", Toast.LENGTH_LONG).show(); } // Turn on the camera if (myCamera == null) { myCamera = (); try { (holder); } catch (IOException e) { (); } } (); // Turn on preview break; case .auto_focus: setAutofocus(); break; case : (new AutoFocusCallback() { @Override public void onAutoFocus(boolean success, Camera camera) { if (success) { (null, null, jpegCallBack); } } }); break; } } /** * Set autofocus */ private void setAutofocus() { if (myCamera != null) { (new AutoFocusCallback() { @Override public void onAutoFocus(boolean success, Camera camera) { if (success) { } } }); } } /** * Sensor changes the method of calling */ @Override public void onSensorChanged(SensorEvent event) { if ( == null) { return; } if (() == Sensor.TYPE_ACCELEROMETER) { int x = (int) [0]; int y = (int) [1]; int z = (int) [2]; calendar = (); long stamp = (); int px = (mX - x); int py = (mY - y); int pz = (mZ - z); int maxValue = getMaxValue(px, py, pz); if (maxValue > 2 && (stamp - lasttimestamp) > 30) { lasttimestamp = stamp; setAutofocus(); } mX = x; mY = y; mZ = z; } } /** * Get the maximum changed value */ private int getMaxValue(int px, int py, int pz) { int max = 0; if (px > py && px > pz) { max = px; } else if (py > px && py > pz) { max = py; } else if (pz > px && pz > py) { max = pz; } return max; } @Override public void onAccuracyChanged(Sensor sensor, int accuracy) { } /** * suraceView Create the action performed */ @Override public void surfaceCreated(SurfaceHolder holder) { // Turn on the camera if (myCamera == null) { myCamera = (); try { (holder); } catch (IOException e) { (); } } } /** * The operation performed by suraceView state changes */ @Override public void surfaceChanged(SurfaceHolder holder, int format, int width, int height) { // Start preview (); Parameters parameters = ();// Get mCamera parameter object Size largestSize = getBestSupportedSize(parameters .getSupportedPreviewSizes()); (, );// Set the preview image size largestSize = getBestSupportedSize(parameters .getSupportedPictureSizes());// Set the size of the capture image (, ); (parameters); } private Size getBestSupportedSize(List<Size> sizes) { // Get the maximum SIZE that can be used Size largestSize = (0); int largestArea = (0).height * (0).width; for (Size s : sizes) { int area = * ; if (area > largestArea) { largestArea = area; largestSize = s; } } return largestSize; } /** * suraceView Destruction */ @Override public void surfaceDestroyed(SurfaceHolder holder) { // Close the preview and release the resource if (myCamera != null) { (); (); myCamera = null; } } /** * Create jpeg image callback data object */ private String filepath = ""; private PictureCallback jpegCallBack = new PictureCallback() { @Override public void onPictureTaken(byte[] data, Camera camera) { Bitmap oldBitmap = (data, 0, ); Matrix matrix = new Matrix(); (90); Bitmap newBitmap = (oldBitmap, 0, 0, (), (), matrix, true); filepath = () + + new SimpleDateFormat("yyyyMMddHHmmss").format(new Date()) + ".jpg"; File file = new File(filepath); try { BufferedOutputStream bos = new BufferedOutputStream( new FileOutputStream(file)); (, 85, bos); (); (); (); (); (); } catch (FileNotFoundException e) { (); } catch (IOException e) { (); } } }; }
xml layout:
<?xml version="1.0" encoding="utf-8"?> <RelativeLayout xmlns:andro android:layout_width="fill_parent" android:layout_height="fill_parent" android:background="#ffffff" > <SurfaceView android: android:layout_width="fill_parent" android:layout_height="fill_parent"/> <Button android: android:layout_width="wrap_content" android:layout_height="wrap_content" android:layout_alignParentRight="true" android:layout_alignParentBottom="true" android:text="stop"/> <Button android: android:layout_width="wrap_content" android:layout_height="wrap_content" android:layout_alignParentBottom="true" android:layout_toLeftOf="@id/tingZhi_bt" android:text="Video"/> <Button android: android:layout_width="wrap_content" android:layout_height="wrap_content" android:layout_alignParentBottom="true" android:layout_toLeftOf="@id/luXiang_bt" android:text="focusing"/> <Button android: android:layout_width="wrap_content" android:layout_height="wrap_content" android:layout_alignParentBottom="true" android:layout_toLeftOf="@id/auto_focus" android:text="Photograph"/> <TextView android: android:layout_width="wrap_content" android:layout_height="wrap_content" android:textColor="#FF0000" android:text="1 second" android:visibility="gone" android:layout_alignParentBottom="true" android:layout_centerHorizontal="true" android:layout_marginBottom="10dp"/> </RelativeLayout>
The above is all the content of this article. I hope it will be helpful to everyone's study and I hope everyone will support me more.