Android Studio的功能负责手机端App的开发,要有获取视频的功能,并可以发送建立socket连接,并发送数据流到主机。
而Eclipse端则运行java代码,来启动坚定socket的线程,并实现实时接收数据流并保存为jpg格式的功能。
首先是布局文件activity_socket_video.xml
<?xml version="1.0" encoding="utf-8"?>
<RelativeLayout xmlns:android="http://schemas.android.com/apk/res/android"
xmlns:app="http://schemas.android.com/apk/res-auto"
xmlns:tools="http://schemas.android.com/tools"
android:layout_width="match_parent"
android:layout_height="match_parent"
tools:context=".Socket_video_MainActivity">
<SurfaceView
android:id="@+id/SurFAceView"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:scaleType="fitCenter"
/>
<ImageView
android:id="@+id/imageView1"
android:layout_width="fill_parent"
android:layout_height="wrap_content"
/>
<LinearLayout
android:layout_width="match_parent"
android:layout_height="wrap_content"
android:orientation="horizontal"
android:layout_centerVertical="true"
android:layout_below="@id/SurFAceView"
android:layout_centerHorizontal="true">
<Button
android:id="@+id/Start_"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:text="start"
/>
<Button
android:id="@+id/Stop_"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:text="stop"
/>
<Button
android:id="@+id/Return_"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:text="return"
/>
</LinearLayout>
</RelativeLayout>
SurfaceView和ImageView用于显示实时刷新的图片;
三个按钮分别用于开始拍摄、暂停拍摄和返回前一个页面。
下面是与上面界面配套的Socket_video_mainActivity代码
package com.mapscanner.mapscanner;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import com.mapscanner.mapscanner.ClientThread;
import android.content.Intent;
import android.graphics.Bitmap;
import android.graphics.ImageFormat;
import android.graphics.YuvImage;
import android.hardware.Camera;
import android.hardware.Camera.PreviewCallback;
import android.hardware.Camera.Size;
import android.os.Bundle;
import android.os.Handler;
import android.os.Message;
import android.app.Activity;
import android.support.v7.app.AppCompatActivity;
import android.util.DisplayMetrics;
import android.util.Log;
import android.view.Menu;
import android.view.SurfaceHolder;
import android.view.View;
import android.view.Window;
import android.view.WindowManager;
import android.view.SurfaceHolder.Callback;
import android.view.SurfaceView;
import android.widget.Button;
import android.widget.ImageView;
import android.widget.Toast;
public class Socket_video_MainActivity extends AppCompatActivity {
public static ImageView image;
private static Bitmap bitmap;
private static final int COMPLETED = 0x222;
MyHandler handler;
ClientThread clientThread;
SurfaceView surfaceView;
SurfaceHolder sfh;
Camera camera;
int isPreview = 0;//用作判断是否预览的标志
int screenWidth=300, screenHeight=400;
Button Start,Stop,Return;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
requestWindowFeature(Window.FEATURE_NO_TITLE);
getWindow().setFlags(WindowManager.LayoutParams.FLAG_FULLSCREEN,WindowManager.LayoutParams.FLAG_FULLSCREEN);
setContentView(R.layout.activity_socket_video__main);
surfaceView = (SurfaceView)findViewById(R.id.SurFAceView);
image=(ImageView)findViewById(R.id.imageView1);
Start = (Button) findViewById(R.id.Start_);
Stop = (Button)findViewById(R.id.Stop_);
Return = (Button)findViewById(R.id.Return_);
handler = new MyHandler();
clientThread = new ClientThread();
new Thread(clientThread).start();
DisplayMetrics dm = new DisplayMetrics();
getWindowManager().getDefaultDisplay().getMetrics(dm);
screenWidth = dm.widthPixels;
screenHeight = dm.heightPixels;
image.setMaxHeight(screenHeight);
sfh = surfaceView.getHolder();
sfh.setFixedSize(screenWidth/4*3, screenHeight/4*3);
sfh.addCallback(new SurfaceHolder.Callback(){
@Override
public void surfaceChanged(SurfaceHolder arg0, int arg1, int arg2,
int arg3) {
// TODO Auto-generated method stub
}
@Override
public void surfaceCreated(SurfaceHolder arg0) {
// TODO Auto-generated method stub
camera = Camera.open();
ClientThread.size = camera.getParameters().getPreviewSize();
try {
camera.setPreviewDisplay(arg0);
}
catch (IOException e) {
Toast.makeText(Socket_video_MainActivity.this,"error",Toast.LENGTH_SHORT).show();
}
}
@Override
public void surfaceDestroyed(SurfaceHolder arg0) {
if (camera != null) {
if (isPreview==0)
camera.stopPreview();
camera.release();
camera = null;
}
}
});
Start.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
isPreview=1;
initCamera();//初始化相机
}
});
Stop.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
isPreview=0;
camera.stopPreview();
}
});
Return.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
Intent intent= new Intent();
intent.setClass(Socket_video_MainActivity.this, MainActivity.class);//也可以这样写intent.setClass(MainActivity.this, OtherActivity.class);
startActivity(intent);
}
});
initCamera();//初始化相机
}
@SuppressWarnings("deprecation")
private void initCamera() {
/*if (isPreview!=0) {
camera = Camera.open();
ClientThread.size = camera.getParameters().getPreviewSize();
}*/
if (camera != null && isPreview!=0) {
try{
camera.autoFocus(null);//自动对焦
camera.setPreviewDisplay(sfh); // 通过SurfaceView显示取景画面
Camera.Parameters parameters = camera.getParameters();
parameters.setPreviewSize(screenWidth/2, screenHeight/2);
parameters.setPreviewFrameRate(2); //每秒从摄像头捕获2帧画面
parameters.setPictureFormat(ImageFormat.NV21); //设置图片格式
parameters.setPictureSize(screenWidth/2, screenHeight/2); //设置照片的大小
camera.setDisplayOrientation(90);
camera.setPreviewCallback(new PreviewCallback(){
@Override
public void onPreviewFrame(byte[] data, Camera c) {
// TODO Auto-generated method stub
Size size = camera.getParameters().getPreviewSize();
camera.autoFocus(mAutoFocusCallback);
try{
YuvImage image = new YuvImage(data, ImageFormat.NV21, size.width, size.height, null);
if(image!=null){
Message msg = clientThread.revHandler.obtainMessage();
msg.what=0x111;
msg.obj=image;
clientThread.revHandler.sendMessage(msg);
}
}catch(Exception ex){
Log.e("Sys","Error:"+ex.getMessage());
}
try {
Thread.sleep(300);
} catch (InterruptedException e) {
e.printStackTrace();
}
}
});
camera.startPreview(); //开始预览
//camera.autoFocus(null);//自动对焦
} catch (Exception e) {
e.printStackTrace();
}
isPreview = 1;
}
}
static class MyHandler extends Handler {
@Override
public void handleMessage(Message msg){
if (msg.what == COMPLETED) {
bitmap = (Bitmap)msg.obj;
image.setImageBitmap(bitmap);
super.handleMessage(msg);
}
}
}
private Camera.AutoFocusCallback mAutoFocusCallback = new Camera.AutoFocusCallback() {
public void onAutoFocus(boolean success, Camera camera) {
// TODO Auto-generated method stub
camera.setOneShotPreviewCallback(null);
Camera.Parameters parameters = camera.getParameters();
parameters.setFlashMode(Camera.Parameters.FLASH_MODE_OFF);//关闭闪光灯
camera.setParameters(parameters);
}
};
}
另外需要新建一个class,命名为ClientThead用于发送线程以及建立socket连接。
package com.mapscanner.mapscanner;
import java.io.BufferedReader;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.OutputStream;
import java.io.UnsupportedEncodingException;
import java.net.Socket;
import android.graphics.Rect;
import android.graphics.YuvImage;
import android.hardware.Camera.Size;
import android.os.Handler;
import android.os.Looper;
import android.os.Message;
import android.util.Log;
public class ClientThread implements Runnable {
private static Socket socket ;
private static ByteArrayOutputStream outputstream;
private static byte byteBuffer[] = new byte[1024];
public static Size size;
public MyHandler revHandler;
BufferedReader br= null;
static OutputStream os = null;
@Override
public void run() {
Looper.prepare();
revHandler = new MyHandler();
Looper.loop();
}
public static class MyHandler extends Handler{
@Override
public void handleMessage(Message msg){
if(msg.what==0x111){
try {
socket = new Socket("192.168.1.100",9393);
os = socket.getOutputStream();
YuvImage image = (YuvImage) msg.obj;
if(socket.isOutputShutdown()){
Log.e("output is down","down");
}
else{
os = socket.getOutputStream();
outputstream = new ByteArrayOutputStream();
image.compressToJpeg(new Rect(0, 0, size.width, size.height), 100, outputstream);
ByteArrayInputStream inputstream = new ByteArrayInputStream(outputstream.toByteArray());
int amount;
while ((amount = inputstream.read(byteBuffer)) != -1) {
os.write(byteBuffer, 0, amount);
}
os.write("\n".getBytes());
outputstream.flush();
outputstream.close();
os.flush();
os.close();
socket.close();
}
} catch (UnsupportedEncodingException e) {
// TODO Auto-generated catch block
e.printStackTrace();
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
}
}
}
这样Android Studio端就布置好了!
下面看一下Eclipse端的代码:
首先新建一个java project 再新建一个package 在里面新建两个类
第一个类是SocketServer.java,功能是建立一个ServerSocket,并指定端口号,然后启动新的线程并调用TheadServer类(也就是我们建的第二个类)
package draw;
import java.io.IOException;
import java.net.ServerSocket;
import javax.swing.JFrame;
@SuppressWarnings("serial")
public class SocketServer extends JFrame{
/**
* @param args
* @throws IOException
*/
public static void main(String[] args) throws IOException {
ServerSocket ss = new ServerSocket(9393);
System.out.println("begin");
while(true){
new Thread(new TheadServer(ss)).start();
}
}
}
第二个类是TheadServer.java,主要实现读取数据流,并保存为jpg格式的图片。
package draw;
import java.awt.image.BufferedImage;
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.net.ServerSocket;
import java.net.Socket;
import java.util.Date;
import javax.imageio.ImageIO;
public class TheadServer implements Runnable {
private Socket s = null;
public InputStream ins;
public TheadServer(ServerSocket ss ) throws IOException{
System.out.println("start thread");
this.s=ss.accept();
/*读入流数据,并转成BufferImage类型*/
ins = s.getInputStream();
BufferedImage bi = ImageIO.read(ins);
String file ="E:/images/"+ new Date().getTime()+".jpg";
if(ImageIO.write(bi, "jpg", new File(file))){
System.out.printf("save picture success!\n");
}
ins.close();
}
@Override
public void run(){
try {
System.out.println("线程运行,图片传输中!!!");
}
finally{
try {
if(!s.isClosed())
s.close();
} catch (IOException e) {
// TODO Auto-generated catch block
//e.printStackTrace();
}
}
}
}
在手机端获取视频时主要用到的是setpreview回调的响应,所以我加入sleep功能使线程暂停300ms以此减少视频拍摄中产生的图片数量,以保证传输结果的质量。值得注意的是如果保存每张图片仍会有延迟,但如果是对每张图片做继续的图像处理,那已经可以做到实时。
最后要感谢:https://www.cnblogs.com/lijiongquan/p/4729445.html,本次任务代码来源于此网址,在理解后加以使用,十分感谢。