我想访问从ip摄像机在Android应用的直播流。我使用的是D-Link DCS-5020L相机。我在互联网上搜索了很多解决方案,但什么也找不到。我想知道要设置rtsp、http或任何其他协议。以及如何为流创建url。我正在使用android应用程序中的videoview来显示视频。我也尝试播放一些公共ip摄像机源,但它给出了相同的错误
W/MediaPlayer: Couldn't open http://64.122.208.241:8000/axis-cgi/mjpg/video.cgi?resolution=320x240: java.io.FileNotFoundException: No content provider: http://64.122.208.241:8000/axis-cgi/mjpg/video.cgi?resolution=320x240
我前段时间做了一个项目。在那里,我使用了一个自定义视图,而不是默认的VideoView。这是课程,试试吧。
mjpegview.java
import java.io.IOException;
import android.content.Context;
import android.graphics.Bitmap;
import android.graphics.Canvas;
import android.graphics.Color;
import android.graphics.Paint;
import android.graphics.PorterDuff;
import android.graphics.PorterDuffXfermode;
import android.graphics.Rect;
import android.graphics.Typeface;
import android.util.AttributeSet;
import android.util.Log;
import android.view.SurfaceHolder;
import android.view.SurfaceView;
public class MjpegView extends SurfaceView implements SurfaceHolder.Callback {
private static final String TAG = "MjpegView";
public final static int POSITION_UPPER_LEFT = 9;
public final static int POSITION_UPPER_RIGHT = 3;
public final static int POSITION_LOWER_LEFT = 12;
public final static int POSITION_LOWER_RIGHT = 6;
public final static int SIZE_STANDARD = 1;
public final static int SIZE_BEST_FIT = 4;
public final static int SIZE_FULLSCREEN = 8;
private MjpegViewThread thread;
private MjpegInputStream mIn = null;
private boolean showFps = false;
private boolean mRun = false;
private boolean surfaceDone = false;
private Paint overlayPaint;
private int overlayTextColor;
private int overlayBackgroundColor;
private int ovlPos;
private int dispWidth;
private int dispHeight;
private int displayMode;
public class MjpegViewThread extends Thread {
private SurfaceHolder mSurfaceHolder;
private int frameCounter = 0;
private long start;
private Bitmap ovl;
public MjpegViewThread(SurfaceHolder surfaceHolder, Context context) {
mSurfaceHolder = surfaceHolder;
}
private Rect destRect(int bmw, int bmh) {
int tempx;
int tempy;
if (displayMode == MjpegView.SIZE_STANDARD) {
tempx = (dispWidth / 2) - (bmw / 2);
tempy = (dispHeight / 2) - (bmh / 2);
return new Rect(tempx, tempy, bmw + tempx, bmh + tempy);
}
if (displayMode == MjpegView.SIZE_BEST_FIT) {
float bmasp = (float) bmw / (float) bmh;
bmw = dispWidth;
bmh = (int) (dispWidth / bmasp);
if (bmh > dispHeight) {
bmh = dispHeight;
bmw = (int) (dispHeight * bmasp);
}
tempx = (dispWidth / 2) - (bmw / 2);
tempy = (dispHeight / 2) - (bmh / 2);
return new Rect(tempx, tempy, bmw + tempx, bmh + tempy);
}
if (displayMode == MjpegView.SIZE_FULLSCREEN){
return new Rect(0, 0, dispWidth, dispHeight);
}
return null;
}
public void setSurfaceSize(int width, int height) {
synchronized(mSurfaceHolder) {
dispWidth = width;
dispHeight = height;
}
}
private Bitmap makeFpsOverlay(Paint p, String text) {
Rect b = new Rect();
p.getTextBounds(text, 0, text.length(), b);
int bwidth = b.width()+2;
int bheight = b.height()+2;
Bitmap bm = Bitmap.createBitmap(bwidth, bheight, Bitmap.Config.ARGB_8888);
Canvas c = new Canvas(bm);
p.setColor(overlayBackgroundColor);
c.drawRect(0, 0, bwidth, bheight, p);
p.setColor(overlayTextColor);
c.drawText(text, -b.left+1, (bheight/2)-((p.ascent()+p.descent())/2)+1, p);
return bm;
}
public void run() {
start = System.currentTimeMillis();
PorterDuffXfermode mode = new PorterDuffXfermode(PorterDuff.Mode.DST_OVER);
Bitmap bm;
int width;
int height;
Rect destRect;
Canvas c = null;
Paint p = new Paint();
String fps;
while (mRun) {
if(surfaceDone) {
try {
c = mSurfaceHolder.lockCanvas();
synchronized (mSurfaceHolder) {
try {
bm = mIn.readMjpegFrame();
destRect = destRect(bm.getWidth(),bm.getHeight());
c.drawColor(Color.BLACK);
c.drawBitmap(bm, null, destRect, p);
if(showFps) {
p.setXfermode(mode);
if(ovl != null) {
height = ((ovlPos & 1) == 1) ? destRect.top : destRect.bottom-ovl.getHeight();
width = ((ovlPos & 8) == 8) ? destRect.left : destRect.right -ovl.getWidth();
c.drawBitmap(ovl, width, height, null);
}
p.setXfermode(null);
frameCounter++;
if((System.currentTimeMillis() - start) >= 1000) {
fps = String.valueOf(frameCounter)+" fps";
frameCounter = 0;
start = System.currentTimeMillis();
ovl = makeFpsOverlay(overlayPaint, fps);
}
}
} catch (IOException e) {
e.getStackTrace();
Log.d(TAG, "catch IOException hit in run", e);
}
}
} finally {
if (c != null) {
mSurfaceHolder.unlockCanvasAndPost(c);
}
}
}
}
}
}
private void init(Context context) {
SurfaceHolder holder = getHolder();
holder.addCallback(this);
thread = new MjpegViewThread(holder, context);
setFocusable(true);
overlayPaint = new Paint();
overlayPaint.setTextAlign(Paint.Align.LEFT);
overlayPaint.setTextSize(12);
overlayPaint.setTypeface(Typeface.DEFAULT);
overlayTextColor = Color.WHITE;
overlayBackgroundColor = Color.BLACK;
ovlPos = MjpegView.POSITION_LOWER_RIGHT;
displayMode = MjpegView.SIZE_STANDARD;
dispWidth = getWidth();
dispHeight = getHeight();
}
public void startPlayback() {
if(mIn != null) {
mRun = true;
thread.start();
}
}
public void stopPlayback() {
mRun = false;
boolean retry = true;
while(retry) {
try {
thread.join();
retry = false;
} catch (InterruptedException e) {
e.getStackTrace();
Log.d(TAG, "catch IOException hit in stopPlayback", e);
}
}
}
public MjpegView(Context context, AttributeSet attrs) {
super(context, attrs); init(context);
}
public void surfaceChanged(SurfaceHolder holder, int f, int w, int h) {
thread.setSurfaceSize(w, h);
}
public void surfaceDestroyed(SurfaceHolder holder) {
surfaceDone = false;
stopPlayback();
}
public MjpegView(Context context) {
super(context);
init(context);
}
public void surfaceCreated(SurfaceHolder holder) {
surfaceDone = true;
}
public void showFps(boolean b) {
showFps = b;
}
public void setSource(MjpegInputStream source) {
mIn = source;
startPlayback();
}
public void setOverlayPaint(Paint p) {
overlayPaint = p;
}
public void setOverlayTextColor(int c) {
overlayTextColor = c;
}
public void setOverlayBackgroundColor(int c) {
overlayBackgroundColor = c;
}
public void setOverlayPosition(int p) {
ovlPos = p;
}
public void setDisplayMode(int s) {
displayMode = s;
}
}
VideoViewFragment.java
import android.content.Context;
import android.os.Bundle;
import android.app.Fragment;
import android.view.Display;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import aronz.lab.streamvideo.MjpegInputStream;
import aronz.lab.streamvideo.MjpegView;
import java.net.HttpURLConnection;
import java.net.URL;
import android.os.AsyncTask;
import android.os.Bundle;
import android.util.Log;
import android.view.Window;
import android.view.WindowManager;
public class VideoViewFragment extends Fragment {
// Declare variables
// VideoView videoview;
// MainActivity main = new MainActivity();
private static final String TAG = "MjpegActivity";
private MjpegView mv;
// Physical display width and height.
private static int displayWidth = 0;
private static int displayHeight = 0;
// Video URL
// public String path = main.Path;
// String VideoURL = path + "Video1.mp4";
// String VideoURL = "http://192.168.43.1:8080";
//sample public cam
String URL = "http://192.168.43.72:8080/video";
// String URL = "http://192.168.43.134:5432/XMLParser/Video1.mp4";
@Override
public View onCreateView(LayoutInflater inflater, ViewGroup container, Bundle savedInstanceState) {
ViewGroup viewGroup = (ViewGroup) inflater.inflate(R.layout.video_view, container, false);
// View view = inflater.inflate(R.layout.video_view,
// container, false);
// return view;
mv = (MjpegView) viewGroup.findViewById(R.id.surfaceView);
new DoRead().execute(URL);
return viewGroup;
// mv = new MjpegView(this.getContext());
}
public void onPause(){
super.onPause();
mv.stopPlayback();
}
public void onResume(){
super.onResume();
}
public class DoRead extends AsyncTask<String, Void, MjpegInputStream> {
protected MjpegInputStream doInBackground(String... Url) {
//TODO: if camera has authentication deal with it and don't just not work
// HttpResponse res = null;
// DefaultHttpClient httpclient = new DefaultHttpClient();
Log.d(TAG, "1. Sending http request");
try {
java.net.URL url = new URL(Url[0]); // here is your URL path
HttpURLConnection conn = (HttpURLConnection) url.openConnection();
conn.setReadTimeout(15000 /* milliseconds */);
conn.setConnectTimeout(15000 /* milliseconds */);
int responseCode=conn.getResponseCode();
// res = httpclient.execute(new HttpGet(URI.create(url[0])));
// Log.d(TAG, "2. Request finished, status = " + res.getStatusLine().getStatusCode());
Log.d(TAG, "2. Request finished, status = " + responseCode);
if(responseCode==401){
//You must turn off camera User Access Control before this will work
return null;
}
return new MjpegInputStream(conn.getInputStream());
} catch (Exception e) {
e.printStackTrace();
Log.d(TAG, "Request failed-ClientProtocolException", e);
//Error connecting to camera
}
return null;
}
protected void onPostExecute(MjpegInputStream result) {
if(result != null) {
mv.setSource(result);
mv.setDisplayMode(MjpegView.SIZE_BEST_FIT);
mv.showFps(true);
}
}
}
}
mjpeGinputStream.java
import java.io.BufferedInputStream;
import java.io.ByteArrayInputStream;
import java.io.DataInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.util.Properties;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.util.Log;
public class MjpegInputStream extends DataInputStream {
private static final String TAG = "MjpegInputStream";
private final byte[] SOI_MARKER = { (byte) 0xFF, (byte) 0xD8 };
private final byte[] EOF_MARKER = { (byte) 0xFF, (byte) 0xD9 };
private final String CONTENT_LENGTH = "Content-Length";
private final static int HEADER_MAX_LENGTH = 100;
private final static int FRAME_MAX_LENGTH = 40000 + HEADER_MAX_LENGTH;
private int mContentLength = -1;
public MjpegInputStream(InputStream in) {
super(new BufferedInputStream(in, FRAME_MAX_LENGTH));
}
private int getEndOfSeqeunce(DataInputStream in, byte[] sequence) throws IOException {
int seqIndex = 0;
byte c;
for(int i=0; i < FRAME_MAX_LENGTH; i++) {
c = (byte) in.readUnsignedByte();
if(c == sequence[seqIndex]) {
seqIndex++;
if(seqIndex == sequence.length) {
return i + 1;
}
} else {
seqIndex = 0;
}
}
return -1;
}
private int getStartOfSequence(DataInputStream in, byte[] sequence) throws IOException {
int end = getEndOfSeqeunce(in, sequence);
return (end < 0) ? (-1) : (end - sequence.length);
}
private int parseContentLength(byte[] headerBytes) throws IOException, NumberFormatException {
ByteArrayInputStream headerIn = new ByteArrayInputStream(headerBytes);
Properties props = new Properties();
props.load(headerIn);
return Integer.parseInt(props.getProperty(CONTENT_LENGTH));
}
public Bitmap readMjpegFrame() throws IOException {
mark(FRAME_MAX_LENGTH);
int headerLen = getStartOfSequence(this, SOI_MARKER);
reset();
byte[] header = new byte[headerLen];
readFully(header);
try {
mContentLength = parseContentLength(header);
} catch (NumberFormatException nfe) {
nfe.getStackTrace();
Log.d(TAG, "catch NumberFormatException hit", nfe);
mContentLength = getEndOfSeqeunce(this, EOF_MARKER);
}
reset();
byte[] frameData = new byte[mContentLength];
skipBytes(headerLen);
readFully(frameData);
return BitmapFactory.decodeStream(new ByteArrayInputStream(frameData));
}
}
问题内容: 如何访问我的IP摄像机流? 用于显示标准网络摄像头流的代码是 除了IP摄像机,我该如何做同样的事情? 我的系统: Python 2.7.14 OpenCV 2.4.9 Teledyne Dalsa Genie Nano XL相机 帮助将不胜感激 您可以将视频捕获对象用作 问题答案: 我回答了我自己的问题,报告了什么似乎是Python OpenCV中访问IP摄像机的 最全面的 总体过程。
我是新的Android 6.0代码,请提供以下代码的解决方案: 注意:适用于打开库时
我想在WebRTC上使用IP摄像机。然而,webrtc似乎只支持网络摄像头。所以我尝试将IP摄像机的流转换为虚拟网络摄像机。 我找到了像IP摄像机适配器这样的软件,但它们不太好用(每秒2-3帧,延迟2秒),它们只在Windows上工作,我更喜欢使用Linux(如果可能的话)。 我尝试FFMPEG/AVCONV: > 首先,我使用v4l2loopback创建了一个虚拟设备(命令为:)。虚拟设备会被检
我有这个IP摄像机Nexus CCTV 235FW。我再也连接不上它了。我能重置它吗?我想这是在我修改了管理员密码之后发生的。它是连接的以为WiFi,但我也试过用电缆,没有任何运气。 我也试着用搜索工具搜索相机,但还是没有运气。 另一件事,我甚至看不到摄像头在路由器的列表上连接的单位。 希望任何人能帮助我。
问题内容: 下面给出的是用于从IP摄像机获取实时流的代码。 在运行代码时,我得到的输出是: 我要去哪里错了?另外,为什么这里没有框架?转换有问题吗? 问题答案: 编辑(说明) 我刚刚看到您提到您的c 代码正在运行,如果是这样的话,您的相机也可以在python中运行。上面的代码无需依赖opencv即可手动解析mjpeg流,因为在我的某些项目中,无论我做什么(c ,python),URL都不会被ope
使用 Lightroom 的应用内相机在支持的设备上拍摄 DNG 和 HDR 照片,并在“专业”模式下调整快门速度、对焦和闪光灯等设置。 注意:基于 Android 的平板设备无法使用拍摄模块。 快速访问应用内相机 您可以使用以下任一方法启动应用内相机: 应用程序快捷键 “相机”Widget 应用程序快捷键 注意: 从 Lightroom for mobile (Android) 3.2 版开始,