MJPEGビューアーのコードを AndroidおよびMJPEG から変更し、AsyncTaskを使用して(したがって、アイスクリームサンドイッチ(ICS)、4.0.4で)機能するようにしました。これが私のコードです。
誰かがコードを最適化、クリーンアップ、またはより適切なコードを実行する方法について何か提案があれば、私に知らせてください。 2つの問題への対応に感謝します。
ストリーム上にデバイスがある場合は、画面をロックしてロックを解除します。アプリを終了して再開するか、画面を回転するまで、再生は再開されません。 OnResume()を使用して何かを実行しようとすると、アプリがクラッシュしました。
特に、MjpegInputStream.JavaでAsyncTaskを取得したいのですが、それを機能させることができませんでした。
MjpegActivity.Java:
package com.demo.mjpeg;
import Java.io.IOException;
import Java.net.URI;
import org.Apache.http.HttpResponse;
import org.Apache.http.client.ClientProtocolException;
import org.Apache.http.client.methods.HttpGet;
import org.Apache.http.impl.client.DefaultHttpClient;
import com.demo.mjpeg.MjpegView.MjpegInputStream;
import com.demo.mjpeg.MjpegView.MjpegView;
import Android.app.Activity;
import Android.os.AsyncTask;
import Android.os.Bundle;
import Android.util.Log;
import Android.view.Window;
import Android.view.WindowManager;
import Android.widget.Toast;
public class MjpegActivity extends Activity {
private static final String TAG = "MjpegActivity";
private MjpegView mv;
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
//sample public cam
String URL = "http://trackfield.webcam.oregonstate.edu/axis-cgi/mjpg/video.cgi?resolution=800x600&%3bdummy=1333689998337";
requestWindowFeature(Window.FEATURE_NO_TITLE);
getWindow().setFlags(WindowManager.LayoutParams.FLAG_FULLSCREEN,
WindowManager.LayoutParams.FLAG_FULLSCREEN);
mv = new MjpegView(this);
setContentView(mv);
new DoRead().execute(URL);
}
public void onPause() {
super.onPause();
mv.stopPlayback();
}
public class DoRead extends AsyncTask<String, Void, MjpegInputStream> {
protected MjpegInputStream doInBackground(String... url) {
//TODO: if camera has authentication deal with it and don't just not work
HttpResponse res = null;
DefaultHttpClient httpclient = new DefaultHttpClient();
Log.d(TAG, "1. Sending http request");
try {
res = httpclient.execute(new HttpGet(URI.create(url[0])));
Log.d(TAG, "2. Request finished, status = " + res.getStatusLine().getStatusCode());
if(res.getStatusLine().getStatusCode()==401){
//You must turn off camera User Access Control before this will work
return null;
}
return new MjpegInputStream(res.getEntity().getContent());
} catch (ClientProtocolException e) {
e.printStackTrace();
Log.d(TAG, "Request failed-ClientProtocolException", e);
//Error connecting to camera
} catch (IOException e) {
e.printStackTrace();
Log.d(TAG, "Request failed-IOException", e);
//Error connecting to camera
}
return null;
}
protected void onPostExecute(MjpegInputStream result) {
mv.setSource(result);
mv.setDisplayMode(MjpegView.SIZE_BEST_FIT);
mv.showFps(true);
}
}
}
MjpegInputStream.Java:
package com.demo.mjpeg.MjpegView;
import Java.io.BufferedInputStream;
import Java.io.ByteArrayInputStream;
import Java.io.DataInputStream;
import Java.io.IOException;
import Java.io.InputStream;
import Java.util.Properties;
import Android.graphics.Bitmap;
import Android.graphics.BitmapFactory;
import Android.util.Log;
public class MjpegInputStream extends DataInputStream {
private static final String TAG = "MjpegInputStream";
private final byte[] SOI_MARKER = { (byte) 0xFF, (byte) 0xD8 };
private final byte[] EOF_MARKER = { (byte) 0xFF, (byte) 0xD9 };
private final String CONTENT_LENGTH = "Content-Length";
private final static int HEADER_MAX_LENGTH = 100;
private final static int FRAME_MAX_LENGTH = 40000 + HEADER_MAX_LENGTH;
private int mContentLength = -1;
public MjpegInputStream(InputStream in) {
super(new BufferedInputStream(in, FRAME_MAX_LENGTH));
}
private int getEndOfSeqeunce(DataInputStream in, byte[] sequence) throws IOException {
int seqIndex = 0;
byte c;
for(int i=0; i < FRAME_MAX_LENGTH; i++) {
c = (byte) in.readUnsignedByte();
if(c == sequence[seqIndex]) {
seqIndex++;
if(seqIndex == sequence.length) {
return i + 1;
}
} else {
seqIndex = 0;
}
}
return -1;
}
private int getStartOfSequence(DataInputStream in, byte[] sequence) throws IOException {
int end = getEndOfSeqeunce(in, sequence);
return (end < 0) ? (-1) : (end - sequence.length);
}
private int parseContentLength(byte[] headerBytes) throws IOException, NumberFormatException {
ByteArrayInputStream headerIn = new ByteArrayInputStream(headerBytes);
Properties props = new Properties();
props.load(headerIn);
return Integer.parseInt(props.getProperty(CONTENT_LENGTH));
}
public Bitmap readMjpegFrame() throws IOException {
mark(FRAME_MAX_LENGTH);
int headerLen = getStartOfSequence(this, SOI_MARKER);
reset();
byte[] header = new byte[headerLen];
readFully(header);
try {
mContentLength = parseContentLength(header);
} catch (NumberFormatException nfe) {
nfe.getStackTrace();
Log.d(TAG, "catch NumberFormatException hit", nfe);
mContentLength = getEndOfSeqeunce(this, EOF_MARKER);
}
reset();
byte[] frameData = new byte[mContentLength];
skipBytes(headerLen);
readFully(frameData);
return BitmapFactory.decodeStream(new ByteArrayInputStream(frameData));
}
}
MjpegView.Java:
package com.demo.mjpeg.MjpegView;
import Java.io.IOException;
import Android.content.Context;
import Android.graphics.Bitmap;
import Android.graphics.Canvas;
import Android.graphics.Color;
import Android.graphics.Paint;
import Android.graphics.PorterDuff;
import Android.graphics.PorterDuffXfermode;
import Android.graphics.Rect;
import Android.graphics.Typeface;
import Android.util.AttributeSet;
import Android.util.Log;
import Android.view.SurfaceHolder;
import Android.view.SurfaceView;
public class MjpegView extends SurfaceView implements SurfaceHolder.Callback {
private static final String TAG = "MjpegView";
public final static int POSITION_UPPER_LEFT = 9;
public final static int POSITION_UPPER_RIGHT = 3;
public final static int POSITION_LOWER_LEFT = 12;
public final static int POSITION_LOWER_RIGHT = 6;
public final static int SIZE_STANDARD = 1;
public final static int SIZE_BEST_FIT = 4;
public final static int SIZE_FULLSCREEN = 8;
private MjpegViewThread thread;
private MjpegInputStream mIn = null;
private boolean showFps = false;
private boolean mRun = false;
private boolean surfaceDone = false;
private Paint overlayPaint;
private int overlayTextColor;
private int overlayBackgroundColor;
private int ovlPos;
private int dispWidth;
private int dispHeight;
private int displayMode;
public class MjpegViewThread extends Thread {
private SurfaceHolder mSurfaceHolder;
private int frameCounter = 0;
private long start;
private Bitmap ovl;
public MjpegViewThread(SurfaceHolder surfaceHolder, Context context) {
mSurfaceHolder = surfaceHolder;
}
private Rect destRect(int bmw, int bmh) {
int tempx;
int tempy;
if (displayMode == MjpegView.SIZE_STANDARD) {
tempx = (dispWidth / 2) - (bmw / 2);
tempy = (dispHeight / 2) - (bmh / 2);
return new Rect(tempx, tempy, bmw + tempx, bmh + tempy);
}
if (displayMode == MjpegView.SIZE_BEST_FIT) {
float bmasp = (float) bmw / (float) bmh;
bmw = dispWidth;
bmh = (int) (dispWidth / bmasp);
if (bmh > dispHeight) {
bmh = dispHeight;
bmw = (int) (dispHeight * bmasp);
}
tempx = (dispWidth / 2) - (bmw / 2);
tempy = (dispHeight / 2) - (bmh / 2);
return new Rect(tempx, tempy, bmw + tempx, bmh + tempy);
}
if (displayMode == MjpegView.SIZE_FULLSCREEN){
return new Rect(0, 0, dispWidth, dispHeight);
}
return null;
}
public void setSurfaceSize(int width, int height) {
synchronized(mSurfaceHolder) {
dispWidth = width;
dispHeight = height;
}
}
private Bitmap makeFpsOverlay(Paint p, String text) {
Rect b = new Rect();
p.getTextBounds(text, 0, text.length(), b);
int bwidth = b.width()+2;
int bheight = b.height()+2;
Bitmap bm = Bitmap.createBitmap(bwidth, bheight, Bitmap.Config.ARGB_8888);
Canvas c = new Canvas(bm);
p.setColor(overlayBackgroundColor);
c.drawRect(0, 0, bwidth, bheight, p);
p.setColor(overlayTextColor);
c.drawText(text, -b.left+1, (bheight/2)-((p.ascent()+p.descent())/2)+1, p);
return bm;
}
public void run() {
start = System.currentTimeMillis();
PorterDuffXfermode mode = new PorterDuffXfermode(PorterDuff.Mode.DST_OVER);
Bitmap bm;
int width;
int height;
Rect destRect;
Canvas c = null;
Paint p = new Paint();
String fps;
while (mRun) {
if(surfaceDone) {
try {
c = mSurfaceHolder.lockCanvas();
synchronized (mSurfaceHolder) {
try {
bm = mIn.readMjpegFrame();
destRect = destRect(bm.getWidth(),bm.getHeight());
c.drawColor(Color.BLACK);
c.drawBitmap(bm, null, destRect, p);
if(showFps) {
p.setXfermode(mode);
if(ovl != null) {
height = ((ovlPos & 1) == 1) ? destRect.top : destRect.bottom-ovl.getHeight();
width = ((ovlPos & 8) == 8) ? destRect.left : destRect.right -ovl.getWidth();
c.drawBitmap(ovl, width, height, null);
}
p.setXfermode(null);
frameCounter++;
if((System.currentTimeMillis() - start) >= 1000) {
fps = String.valueOf(frameCounter)+" fps";
frameCounter = 0;
start = System.currentTimeMillis();
ovl = makeFpsOverlay(overlayPaint, fps);
}
}
} catch (IOException e) {
e.getStackTrace();
Log.d(TAG, "catch IOException hit in run", e);
}
}
} finally {
if (c != null) {
mSurfaceHolder.unlockCanvasAndPost(c);
}
}
}
}
}
}
private void init(Context context) {
SurfaceHolder holder = getHolder();
holder.addCallback(this);
thread = new MjpegViewThread(holder, context);
setFocusable(true);
overlayPaint = new Paint();
overlayPaint.setTextAlign(Paint.Align.LEFT);
overlayPaint.setTextSize(12);
overlayPaint.setTypeface(Typeface.DEFAULT);
overlayTextColor = Color.WHITE;
overlayBackgroundColor = Color.BLACK;
ovlPos = MjpegView.POSITION_LOWER_RIGHT;
displayMode = MjpegView.SIZE_STANDARD;
dispWidth = getWidth();
dispHeight = getHeight();
}
public void startPlayback() {
if(mIn != null) {
mRun = true;
thread.start();
}
}
public void stopPlayback() {
mRun = false;
boolean retry = true;
while(retry) {
try {
thread.join();
retry = false;
} catch (InterruptedException e) {
e.getStackTrace();
Log.d(TAG, "catch IOException hit in stopPlayback", e);
}
}
}
public MjpegView(Context context, AttributeSet attrs) {
super(context, attrs); init(context);
}
public void surfaceChanged(SurfaceHolder holder, int f, int w, int h) {
thread.setSurfaceSize(w, h);
}
public void surfaceDestroyed(SurfaceHolder holder) {
surfaceDone = false;
stopPlayback();
}
public MjpegView(Context context) {
super(context);
init(context);
}
public void surfaceCreated(SurfaceHolder holder) {
surfaceDone = true;
}
public void showFps(boolean b) {
showFps = b;
}
public void setSource(MjpegInputStream source) {
mIn = source;
startPlayback();
}
public void setOverlayPaint(Paint p) {
overlayPaint = p;
}
public void setOverlayTextColor(int c) {
overlayTextColor = c;
}
public void setOverlayBackgroundColor(int c) {
overlayBackgroundColor = c;
}
public void setOverlayPosition(int p) {
ovlPos = p;
}
public void setDisplayMode(int s) {
displayMode = s;
}
}
初心者にとっては、ユーザー名またはパスワードを持っているIPカメラにアクセスしたい場合は、これをDefaultHttpClient
に追加することをお勧めします。上記のコードは、認証が必要なカメラで機能します
CredentialsProvider provider = new BasicCredentialsProvider();
UsernamePasswordCredentials credentials = new UsernamePasswordCredentials("yourusername", "yourpassword");
provider.setCredentials(AuthScope.ANY, credentials);
DefaultHttpClient httpclient = new DefaultHttpClient();
httpclient.setCredentialsProvider(provider);
よくやった! onResume()の問題については、次のコードをonCreate()からonResume()に移動しても十分ではありませんか?
//sample public cam
String URL = "http://trackfield.webcam.oregonstate.edu/axis-cgi/mjpg/video.cgi?resolution=800x600&%3bdummy=1333689998337";
mv = new MjpegView(this);
setContentView(mv);
new DoRead().execute(URL);
次に、AsyncTaskのビューと新しいインスタンスを単に再作成します...私はそれを試してみましたが、私にとってはうまくいきます...
コードをありがとう、それはとても役に立ちます
私のコードで既に使用されているいくつかの最適化のヒントを提案したいと思います。全体的なパフォーマンスは数回で簡単に向上できます。
可能な場合、フレームの読み取り中にメモリ割り当てを削除しました
private final static int HEADER_MAX_LENGTH = 100;
private final static int FRAME_MAX_LENGTH = 200000 + HEADER_MAX_LENGTH;
private final String CONTENT_LENGTH = "Content-Length:";
private final String CONTENT_END = "\r\n";
private final static byte[] gFrameData = new byte[FRAME_MAX_LENGTH];
private final static byte[] gHeader = new byte[HEADER_MAX_LENGTH];
BitmapFactory.Options bitmapOptions = new BitmapFactory.Options();
public Bitmap readMjpegFrame() throws IOException {
mark(FRAME_MAX_LENGTH);
int headerLen = getStartOfSequence(SOI_MARKER);
if(headerLen < 0)
return false;
reset();
readFully(gHeader, 0, headerLen);
int contentLen;
try
{
contentLen = parseContentLength(gHeader, headerLen);
} catch (NumberFormatException nfe)
{
nfe.getStackTrace();
Log.d(TAG, "catch NumberFormatException hit", nfe);
contentLen = getEndOfSequence(EOF_MARKER);
}
readFully(gFrameData, 0, contentLen);
Bitmap bm = BitmapFactory.decodeByteArray(gFrameData, 0, contentLen, bitmapOptions);
bitmapOptions.inBitmap = bm;
return bm;
}
ParseContentLengthを最適化し、文字列操作を可能な限り削除する
byte[] CONTENT_LENGTH_BYTES;
byte[] CONTENT_END_BYTES;
public MjpegInputStream(InputStream in)
{
super(new BufferedInputStream(in, FRAME_MAX_LENGTH));
bitmapOptions.inSampleSize = 1;
bitmapOptions.inPreferredConfig = Bitmap.Config.RGB_565;
bitmapOptions.inPreferQualityOverSpeed = false;
bitmapOptions.inPurgeable = true;
try
{
CONTENT_LENGTH_BYTES = CONTENT_LENGTH.getBytes("UTF-8");
CONTENT_END_BYTES = CONTENT_END.getBytes("UTF-8");
} catch (UnsupportedEncodingException e)
{
e.printStackTrace();
}
}
private int findPattern(byte[] buffer, int bufferLen, byte[] pattern, int offset)
{
int seqIndex = 0;
for(int i=offset; i < bufferLen; ++i)
{
if(buffer[i] == pattern[seqIndex])
{
++seqIndex;
if(seqIndex == pattern.length)
{
return i + 1;
}
} else
{
seqIndex = 0;
}
}
return -1;
}
private int parseContentLength(byte[] headerBytes, int length) throws IOException, NumberFormatException
{
int begin = findPattern(headerBytes, length, CONTENT_LENGTH_BYTES, 0);
int end = findPattern(headerBytes, length, CONTENT_END_BYTES, begin) - CONTENT_END_BYTES.length;
// converting string to int
int number = 0;
int radix = 1;
for(int i = end - 1; i >= begin; --i)
{
if(headerBytes[i] > 47 && headerBytes[i] < 58)
{
number += (headerBytes[i] - 48) * radix;
radix *= 10;
}
}
return number;
}
私はスタックオーバーフロー用にコードを書き直していたため、コードに誤りがある可能性があります。最初は2つのスレッドを使用しています。1つはフレームの読み取り、もう1つはレンダリングです。
私はそれが誰かのために役立つことを願っています。