1.客户端采用juv-rtmp-client-1.5.10.jar+android摄像头采集视频数据发布视频直播流。
2.服务端是采用开源的Red5流媒体服务器。
目前状况:
1.客户端和服务端连接正常,并且在发布视频流的时候如果采用 NetStream.RECORD方式发布到服务器端,保存下来的 .flv文件是可以成功播放和支持点播的(测试用的是Red5自带的oflaDemo)
2.当我采用 NetStream.LIVE的方式发布视频直播流到服务器的时候,用Red5自带的Demo却无法显示视频.
3.这个问题我纠结了几个星期了,各位大牛们请帮小弟一把,小弟在此谢过了.
(以上这些只是目前做的,如果小弟的思路有问题还请大牛指出来,别让小弟走太多的弯路,流媒体这块我才是接触不到2个星期)
101 个解决方案
#1
小弟第一次在这上面问问题,不太懂规矩,可能上面光说没代码大家不太明白!下面我就把代码贴出来供大家参考下!录制视频的类:
package com.cn.rtmp;
import java.io.IOException;
import java.util.Map;
import com.cn.rtmp.R;
import com.smaxe.io.ByteArray;
import com.smaxe.uv.client.INetStream;
import com.smaxe.uv.client.NetStream;
import com.smaxe.uv.client.camera.AbstractCamera;
import com.smaxe.uv.stream.support.MediaDataByteArray;
import android.app.Activity;
import android.content.Context;
import android.graphics.PixelFormat;
import android.hardware.Camera;
import android.os.Bundle;
import android.os.Handler;
import android.util.Log;
import android.view.SurfaceHolder;
import android.view.SurfaceView;
import android.view.View;
import android.view.View.OnClickListener;
import android.view.Window;
import android.view.WindowManager;
import android.widget.Button;
import android.widget.TextView;
/**
* 通过实现Camera.onPreviewCallback的方法来实时的取得照相机写回的字节数组, 实质就是一帧一帧的数据.
*
* @author jay-hmw
*
*/
public class CameraVideoActivity extends Activity {
private SurfaceView surfaceView;// 视频组件
private SurfaceHolder surfaceHolder;// 视频组件
private Camera camera;// 相机
private int width;// 分辨率宽度
private int height;// 分辨率高度
private boolean init;// 标志是否初始化成功
private int blockWidth;// 最大宽度
private int blockHeight;// 最大高度
private int timeBetweenFrames; // 帧率
private int frameCounter;// 容量
private byte[] previous;// 视频数据字节数组
private TextView hour; // 小时
private TextView minute; // 分钟
private TextView second; // 秒
private Button mStart; // 开始按钮
private Button mStop; // 结束按钮
private Button mReturn; // 返回按钮
public static AndroidCamera mCamera;
private boolean isStreaming = false;
private boolean isTiming=true;
@Override
protected void onCreate(Bundle savedInstanceState) {
// TODO Auto-generated method stub
super.onCreate(savedInstanceState);
// 设置屏幕为全屏
getWindow().setFormat(PixelFormat.TRANSLUCENT);
requestWindowFeature(Window.FEATURE_NO_TITLE);
getWindow().setFlags(WindowManager.LayoutParams.FLAG_FULLSCREEN,
WindowManager.LayoutParams.FLAG_FULLSCREEN);
setContentView(R.layout.video);
initMode();
}
/**
* 初始化组件
*/
private void initMode() {
// mSurfaceView = (SurfaceView)
// findViewById(R.id.mediarecorder_Surfaceview);
hour = (TextView) findViewById(R.id.mediarecorder_TextView01);
minute = (TextView) findViewById(R.id.mediarecorder_TextView03);
second = (TextView) findViewById(R.id.mediarecorder_TextView05);
mStart = (Button) findViewById(R.id.mediarecorder_VideoStartBtn);
mStop = (Button) findViewById(R.id.mediarecorder_VideoStopBtn);
mReturn = (Button) findViewById(R.id.mediarecorder_VideoReturnBtn);
// RTMPConnectionUtil.ConnectRed5(CameraVideoActivity.this);
mCamera = new AndroidCamera(CameraVideoActivity.this);
// 开始按钮添加事件
mStart.setOnClickListener(new OnClickListener() {
@Override
public void onClick(View v) {
// TODO Auto-generated method stub
mCamera.startVideo();
handler.postDelayed(task, 1000);
mStart.setEnabled(false);
mReturn.setEnabled(true);
mStop.setEnabled(true);
isTiming=true;
if(isStreaming){
camera.startPreview();
}
}
});
// 返回按钮添加事件
mReturn.setOnClickListener(new OnClickListener() {
@Override
public void onClick(View v) {
// TODO Auto-generated method stub
mCamera.stopVideo();
if (RTMPConnectionUtil.netStream != null) {
RTMPConnectionUtil.netStream.close();
}
finish();
}
});
// 停止按钮添加事件
mStop.setOnClickListener(new OnClickListener() {
@Override
public void onClick(View v) {
// TODO Auto-generated method stub
camera.stopPreview();
isTiming=false;
mStart.setEnabled(true);
mReturn.setEnabled(true);
mStop.setEnabled(false);
}
});
}
@Override
public void onStop() {
super.onStop();
mCamera = null;
if (RTMPConnectionUtil.netStream != null) {
RTMPConnectionUtil.netStream.close();
}
}
@Override
public void onDestroy() {
super.onDestroy();
System.exit(0);
}
/**
* 自定义摄像机
*
* @author jay-hmw
*
*/
public class AndroidCamera extends AbstractCamera implements
SurfaceHolder.Callback, Camera.PreviewCallback {
public AndroidCamera(Context context) {
surfaceView = (SurfaceView) findViewById(R.id.mediarecorder_Surfaceview);
surfaceHolder = surfaceView.getHolder();
surfaceHolder.addCallback(AndroidCamera.this);
surfaceHolder.setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS);
width = 352;
height = 288;
init = false;
System.out.println("*****相机初始化完成*****");
}
private NetStream.ListenerAdapter a = new NetStream.ListenerAdapter() {
@Override
public void onNetStatus(final INetStream source,
final Map<String, Object> info) {
final Object code = info.get("code");
if (NetStream.PUBLISH_START.equals(code)) {
if (CameraVideoActivity.mCamera != null) {
RTMPConnectionUtil.netStream.attachCamera(mCamera, -1);
mCamera.start();
System.out.println("*****开始录像*****");
} else {
System.out.println("*****录像失败*****");
}
}
}
};
private void startVideo() {
RTMPConnectionUtil.netStream = new UltraNetStream(
RTMPConnectionUtil.connection);
RTMPConnectionUtil.netStream
.addEventListener(new NetStream.ListenerAdapter() {
@Override
public void onNetStatus(final INetStream source,
final Map<String, Object> info) {
final Object code = info.get("code");
if (NetStream.PUBLISH_START.equals(code)) {
if (CameraVideoActivity.mCamera != null) {
RTMPConnectionUtil.netStream.attachCamera(
mCamera, -1);
mCamera.start();
System.out.println("*****开始录像*****");
} else {
System.out.println("*****录像失败*****");
}
}
}
});
RTMPConnectionUtil.netStream.publish("me", NetStream.RECORD);
//RTMPConnectionUtil.netStream.publish("me", NetStream.LIVE);
}
private void stopVideo() {
if (camera != null) {
camera.setPreviewCallback(null);
camera.stopPreview();
camera.release();
camera = null;
}
}
/**
* 开启摄像头
*/
public void start() {
camera.startPreview();
}
@Override
public void onPreviewFrame(byte[] arg0, Camera arg1) {
// TODO Auto-generated method stub
isStreaming = true;
if (!init) {
blockWidth = 32;
blockHeight = 32;
timeBetweenFrames = 100; // 1000 / frameRate
frameCounter = 0;
previous = null;
init = true;
}
final long ctime = System.currentTimeMillis();
System.out.println("*****相机采集到的数组长度" + arg0.length + "*****");
byte[] current = RemoteUtil.decodeYUV420SP2RGB(arg0, width, height);
// byte[] current = RemoteUtil.decodeYUV420SP2YUV420(arg0, arg0.length);
try {
final byte[] packet = RemoteUtil.encode(current, previous,
blockWidth, blockHeight, width, height);
fireOnVideoData(new MediaDataByteArray(timeBetweenFrames,
new ByteArray(packet)));
previous = current;
if (++frameCounter % 10 == 0)
previous = null;
} catch (Exception e) {
e.printStackTrace();
}
final int spent = (int) (System.currentTimeMillis() - ctime);
try {
Thread.sleep(Math.max(0, timeBetweenFrames - spent));
} catch (InterruptedException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
@Override
public void surfaceChanged(SurfaceHolder holder, int format, int width,
int height) {
// TODO Auto-generated method stub
System.out.println("*****系统执行surfaceChanged*****");
RTMPConnectionUtil.ConnectRed5(CameraVideoActivity.this);
}
@Override
public void surfaceCreated(SurfaceHolder holder) {
// TODO Auto-generated method stub
camera = Camera.open();
try {
camera.setPreviewDisplay(surfaceHolder);
camera.setPreviewCallback(this);
Camera.Parameters params = camera.getParameters();
params.setPreviewSize(width, height);
camera.setParameters(params);
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
camera.release();
camera = null;
}
}
@Override
public void surfaceDestroyed(SurfaceHolder holder) {
// TODO Auto-generated method stub
isStreaming = false;
if (camera != null) {
camera.stopPreview();
camera.release();
camera = null;
}
}
}
/* 定时器设置,实现计时 */
private Handler handler = new Handler();
int s, h, m;
private Runnable task = new Runnable() {
public void run() {
if (isTiming) {
handler.postDelayed(this, 1000);
s++;
if (s < 60) {
second.setText(format(s));
} else if (s < 3600) {
m = s / 60;
s = s % 60;
minute.setText(format(m));
second.setText(format(s));
} else {
h = s / 3600;
m = (s % 3600) / 60;
s = (s % 3600) % 60;
hour.setText(format(h));
minute.setText(format(m));
second.setText(format(s));
}
}
}
};
/* 格式化时间 */
public String format(int i) {
String s = i + "";
if (s.length() == 1) {
s = "0" + s;
}
return s;
}
}
#2
连接服务器的类:
package com.cn.rtmp;
import java.util.Date;
import java.util.Map;
import android.content.Context;
import android.content.Intent;
import android.os.Message;
import android.util.Log;
import com.smaxe.uv.Responder;
import com.smaxe.uv.client.INetConnection;
import com.smaxe.uv.client.INetStream;
public class RTMPConnectionUtil {
private static final String red5_url = "rtmp://192.168.1.103/fitcDemo";
public static UltraNetConnection connection;
public static UltraNetStream netStream;
public static String message;
public static void ConnectRed5(Context context) {
// License.setKey("63140-D023C-D7420-00B15-91FC7");
connection = new UltraNetConnection();
connection.configuration().put(UltraNetConnection.Configuration.INACTIVITY_TIMEOUT, -1);
connection.configuration().put(UltraNetConnection.Configuration.RECEIVE_BUFFER_SIZE, 256*1024);
connection.configuration().put(UltraNetConnection.Configuration.SEND_BUFFER_SIZE, 256*1024);
//connection.client(new ClientHandler(context));
connection.addEventListener(new NetConnectionListener());
//Log.d("DEBUG", User.id + " - " + User.phone);
connection.connect(red5_url);
}
// private static class ClientHandler extends Object {
//
// private Context context;
//
// ClientHandler(Context context) {
// this.context = context;
// };
//
// //Server invoke this method
// public void getVideoInfo(String fromUserId, String fromUserName,String message) {
//// System.out.println(fromUserId + " ++ " + fromUserName + " ++ " + message);
//// RTMPConnectionUtil.message = message;
//// Intent intent = new Intent(context, ChatActivity.class);
//// intent.putExtra("state", "callyou");
//// intent.putExtra("who", fromUserName);
//// context.startActivity(intent);
// }
//
// //Server invoke this method when receiver reject
// public void rejected(String userid, String username) {
//// System.out.println(userid + " ++ " + username);
//// Message msg = ChatActivity.handler.obtainMessage();
//// msg.arg1 = 0; //receiver reject
//// msg.sendToTarget();
// }
//
// //Server invoke this method when receiver receive call
// public void addMember(String userId, String userName) {
//// System.out.println(userId + " ++ " + userName);
////
//// Message msg = ChatActivity.handler.obtainMessage();
//// msg.arg1 = 1;
//// msg.sendToTarget();
////
//// Log.d("DEBUG", "addMember()");
//
// }
//
// //Server invoke this method when receiver is not login
// public void Info(String information) {
//// System.out.println("Info" + information);
////
//// Message msg = ChatActivity.handler.obtainMessage();
//// if (information.equals("client is not login the Red5 Server")) {
//// msg.arg1 = 2;
//// msg.sendToTarget();
//// } else if (information.equals("the client is calling, please try again")) {
//// msg.arg1 = 3;
//// msg.sendToTarget();
//// }
// }
//
// public void onBWDone()
// {
//
// }
//
// public void onBWDone(Object[] paramArrayOfObject)
// {
//
// }
// }
private static class NetConnectionListener extends UltraNetConnection.ListenerAdapter {
public NetConnectionListener() {}
@Override
public void onAsyncError(final INetConnection source, final String message, final Exception e) {
System.out.println("NetConnection#onAsyncError: " + message + " "+ e);
}
@Override
public void onIOError(final INetConnection source, final String message) {
System.out.println("NetConnection#onIOError: " + message);
}
@Override
public void onNetStatus(final INetConnection source, final Map<String, Object> info) {
System.out.println("NetConnection#onNetStatus: " + info);
final Object code = info.get("code");
if (UltraNetConnection.CONNECT_SUCCESS.equals(code)) {
// source.call("testConnection", new Responder() {
// public void onResult(final Object result) {
// System.out.println("Method testConnection result: " + result);
// }
//
// public void onStatus(final Map<String, Object> status) {
// System.out.println("Method testConnection status: " + status);
// }
// });
}
}
}// NetConnectionListener
//invoke server method createMeeting
public static void invokeMethodFormRed5(String toUserId) {
// Date nowDate = new Date();
// String time = nowDate.getTime() + "" + (int)((Math.random()*100)%100);
// message = time;
// connection.call("createMeeting", responder, User.id + "", toUserId, message);
// Log.d("DEBUG", "call createMeeting");
}
private static Responder responder = new Responder() {
@Override
public void onResult(Object arg0) {
// TODO Auto-generated method stub
System.out.println("Method createMeeting result: " + arg0);
callback_createMeeting();
}
@Override
public void onStatus(Map<String, Object> arg0) {
// TODO Auto-generated method stub
System.out.println("Method createMeetiong status: " + arg0);
}
};
//invoke server method reject
public static void invokeRejectMethod() {
// connection.call("reject", null, message, User.id);
}
private static void callback_createMeeting() {
//startVideo();
}
private static void startVideo() {
Log.d("DEBUG", "startVideo()");
netStream = new UltraNetStream(connection);
netStream.addEventListener(new UltraNetStream.ListenerAdapter() {
@Override
public void onNetStatus(final INetStream source, final Map<String, Object> info){
System.out.println("Publisher#NetStream#onNetStatus: " + info);
Log.d("DEBUG", "Publisher#NetStream#onNetStatus: " + info);
final Object code = info.get("code");
// if (UltraNetStream.PUBLISH_START.equals(code)) {
// if (VideoActivity.aCamera != null) {
// netStream.attachCamera(VideoActivity.aCamera, -1 /*snapshotMilliseconds*/);
// Log.d("DEBUG", "aCamera.start()");
// VideoActivity.aCamera.start();
// } else {
// Log.d("DEBUG", "camera == null");
// }
// }
}
});
// Log.i("DEBUG", "User.id:"+User.id+" message"+message);
// netStream.publish(User.id + message, UltraNetStream.RECORD);//"mp4:"+User.id + message+".mp4"
}
//invoke server method enterMeeting
public static void invokeEnterMeetingMethod() {
// connection.call("enterMeeting", enterResp, message, User.id);
}
private static Responder enterResp = new Responder() {
@Override
public void onResult(Object arg0) {
// TODO Auto-generated method stub
System.out.println("Method enterMeeting result: " + arg0);
callback_enterMeeting();
}
@Override
public void onStatus(Map<String, Object> arg0) {
// TODO Auto-generated method stub
System.out.println("Method enterMeetiong status: " + arg0);
}
};
private static void callback_enterMeeting() {
// Message msg = ChatActivity.handler.obtainMessage();
// msg.arg1 = 1;
// msg.sendToTarget();
//startVideo();
}
}
#3
负责编码的类:
package com.cn.rtmp;
import java.io.ByteArrayOutputStream;
import java.io.OutputStream;
import java.util.zip.Deflater;
import java.util.zip.DeflaterOutputStream;
import android.util.Log;
public class RemoteUtil {
private static Deflater deflater = new Deflater();
public static byte[] decodeYUV420SP2RGB(byte[] yuv420sp, int width, int height) {
final int frameSize = width * height;
byte[] rgbBuf = new byte[frameSize*3];
// if (rgbBuf == null) throw new NullPointerException("buffer 'rgbBuf' is null");
if (rgbBuf.length < frameSize * 3) throw new IllegalArgumentException("buffer 'rgbBuf' size " + rgbBuf.length + " < minimum " + frameSize * 3);
if (yuv420sp == null) throw new NullPointerException("buffer 'yuv420sp' is null");
if (yuv420sp.length < frameSize * 3 / 2) throw new IllegalArgumentException("buffer 'yuv420sp' size " + yuv420sp.length + " < minimum " + frameSize * 3 / 2);
int i = 0, y = 0;
int uvp = 0, u = 0, v = 0;
int y1192 = 0, r = 0, g = 0, b = 0;
for (int j = 0, yp = 0; j < height; j++) {
uvp = frameSize + (j >> 1) * width;
u = 0;
v = 0;
for (i = 0; i < width; i++, yp++) {
y = (0xff & ((int) yuv420sp[yp])) - 16;
if (y < 0) y = 0;
if ((i & 1) == 0) {
v = (0xff & yuv420sp[uvp++]) - 128;
u = (0xff & yuv420sp[uvp++]) - 128;
}
y1192 = 1192 * y;
r = (y1192 + 1634 * v);
g = (y1192 - 833 * v - 400 * u);
b = (y1192 + 2066 * u);
if (r < 0) r = 0; else if (r > 262143) r = 262143;
if (g < 0) g = 0; else if (g > 262143) g = 262143;
if (b < 0) b = 0; else if (b > 262143) b = 262143;
rgbBuf[yp * 3] = (byte)(r >> 10);
rgbBuf[yp * 3 + 1] = (byte)(g >> 10);
rgbBuf[yp * 3 + 2] = (byte)(b >> 10);
}
}//for
return rgbBuf;
}// decodeYUV420Sp2RGB
public static byte[] decodeYUV420SP2YUV420(byte[]data,int length) {
int width = 176;
int height = 144;
byte[] str = new byte[length];
System.arraycopy(data, 0, str, 0,width*height);
int strIndex = width*height;
for(int i = width*height+1; i < length ;i+=2) {
str[strIndex++] = data[i];
}
for(int i = width*height;i<length;i+=2) {
str[strIndex++] = data[i];
}
return str;
} //YUV420SP2YUV420
public static byte[] encode(final byte[] current, final byte[] previous, final int blockWidth, final int blockHeight, final int width, final int height) throws Exception {
//这里限制了,最大上传视频容量为1G
ByteArrayOutputStream baos = new ByteArrayOutputStream(16 * 1024);
if (previous == null) {
baos.write(getTag(0x01 /* key-frame */, 0x03 /* ScreenVideo codec */));
} else {
baos.write(getTag(0x02 /* inter-frame */, 0x03 /* ScreenVideo codec */));
}
// write header
final int wh = width + ((blockWidth / 16 - 1) << 12);
final int hh = height + ((blockHeight / 16 - 1) << 12);
writeShort(baos, wh);
writeShort(baos, hh);
// write content
int y0 = height;
int x0 = 0;
int bwidth = blockWidth;
int bheight = blockHeight;
while (y0 > 0) {
bheight = Math.min(y0, blockHeight);
y0 -= bheight;
bwidth = blockWidth;
x0 = 0;
while (x0 < width) {
bwidth = (x0 + blockWidth > width) ? width - x0 : blockWidth;
final boolean changed = isChanged(current, previous, x0, y0, bwidth, bheight, width, height);
if (changed) {
ByteArrayOutputStream blaos = new ByteArrayOutputStream(4 * 1024);
DeflaterOutputStream dos = new DeflaterOutputStream(blaos, deflater);
for (int y = 0; y < bheight; y++) {
//Log.i("DEBUG", "current的长度:"+current.length+" 起始点:"+3 * ((y0 + bheight - y - 1) * width + x0)+" 终点:"+3 * bwidth);
dos.write(current, 3 * ((y0 + bheight - y - 1) * width + x0), 3 * bwidth);
}
// dos.write(current, 0, current.length);
dos.finish();
deflater.reset();
final byte[] bbuf = blaos.toByteArray();
final int written = bbuf.length;
// write DataSize
writeShort(baos, written);
// write Data
baos.write(bbuf, 0, written);
} else {
// write DataSize
writeShort(baos, 0);
}
x0 += bwidth;
}
}
return baos.toByteArray();
}
/**
* Writes short value to the {@link OutputStream <tt>os</tt>}.
*
* @param os
* @param n
* @throws Exception if an exception occurred
*/
private static void writeShort(OutputStream os, final int n) throws Exception {
os.write((n >> 8) & 0xFF);
os.write((n >> 0) & 0xFF);
}
/**
* Checks if image block is changed.
*
* @param current
* @param previous
* @param x0
* @param y0
* @param blockWidth
* @param blockHeight
* @param width
* @param height
* @return <code>true</code> if changed, otherwise <code>false</code>
*/
public static boolean isChanged(final byte[] current, final byte[] previous, final int x0, final int y0, final int blockWidth, final int blockHeight, final int width, final int height) {
if (previous == null)
return true;
for (int y = y0, ny = y0 + blockHeight; y < ny; y++) {
final int foff = 3 * (x0 + width * y);
final int poff = 3 * (x0 + width * y);
for (int i = 0, ni = 3 * blockWidth; i < ni; i++) {
if (current[foff + i] != previous[poff + i])
return true;
}
}
return false;
}
/**
* @param frame
* @param codec
* @return tag
*/
public static int getTag(final int frame, final int codec) {
return ((frame & 0x0F) << 4) + ((codec & 0x0F) << 0);
}
}
#4
兄台 我现在也在做这方面的东西,初学者菜鸟,您能把这个Demo给我发一份吗 不胜感激 希望能和您探讨一下 邮箱hubo_8911@163.com
#5
Demo已经到你的邮箱请查收,你是第一个看我提问的人..谢谢
#6
小弟我導入了 juv-rtmp-client-1.5.10.jar
也大概整了一個 android 包,表面上可以運行。
不過一點 Start 鈕就 force close 了
也請兄台給我個Demo吧。
萬分感謝!
tkuchris@hotmail.com
也大概整了一個 android 包,表面上可以運行。
不過一點 Start 鈕就 force close 了
也請兄台給我個Demo吧。
萬分感謝!
tkuchris@hotmail.com
#7
demo发都你邮箱了
#8
楼主 求源码 我最近在做关于视频流的项目 想看看你的源码找找思路 谢谢 邮箱258559020@qq.com
#9
請問樓主用的是哪款手機做測試?
我的 Qcom CPU 測試機 似乎攝像頭有問題
E/QualcommCamera(28801): Qvoid android::disable_msg_type(camera_device*, int32_t): E
Red5 Server一點反應也沒有
我目前要找找別的手機來測測看
我的 Qcom CPU 測試機 似乎攝像頭有問題
E/QualcommCamera(28801): Qvoid android::disable_msg_type(camera_device*, int32_t): E
Red5 Server一點反應也沒有
我目前要找找別的手機來測測看
#10
順便也問一下樓主的 Red5 版本是多少吧。
#11
我用的Red5-0.9.1,手机我用的是小米,如果你没用改Red5服务器地址的话,估计摄像机就会有问题,因为当你的点了开始的时候就开始连接服务器了.
#12
我用的是HTC手機,有刷過機 Android 4.0.4
Red5 版本是 1.0.0
服務器地址有改過了,手機的錄影計時數字有在跑,但Red5就是沒有反應。
看來我得先試 Red5-0.9.1 然候Android的版本拿2.3版來測。
Red5 版本是 1.0.0
服務器地址有改過了,手機的錄影計時數字有在跑,但Red5就是沒有反應。
看來我得先試 Red5-0.9.1 然候Android的版本拿2.3版來測。
#13
前輩看在同樣用屌絲手機的份上也發一份demo給我學習學習吧,vjnjc#qq.com
#14
測試出來了!
Phone: HTC Desire - Android 2.3.3
Server: Red5 - 0.9.1
RTMPConnectionUtil.netStream.publish("aaa", NetStream.RECORD);
會向Red5服務器發佈視頻,檔案會存在Red5(aaa是檔名)。
RTMPConnectionUtil.netStream.publish("aaa", NetStream.LIVE);
Red5服務器有反應,但檔案並不會儲存,所以自帶的oflaDemo應該也沒有檔案可以播放。
或許要往Live即時播放的Demo去做測試。
Phone: HTC Desire - Android 2.3.3
Server: Red5 - 0.9.1
RTMPConnectionUtil.netStream.publish("aaa", NetStream.RECORD);
會向Red5服務器發佈視頻,檔案會存在Red5(aaa是檔名)。
RTMPConnectionUtil.netStream.publish("aaa", NetStream.LIVE);
Red5服務器有反應,但檔案並不會儲存,所以自帶的oflaDemo應該也沒有檔案可以播放。
或許要往Live即時播放的Demo去做測試。
#15
我测试过了,这个是可以通过的,现在我弄的可以现场直播了..边录边播~~
#16
大哥,我也在研究直播这块!搞了好久都没有搞出来,能发个demo学习一下吗?邮箱:236056760@qq.com
#17
請問樓主是如何實現的? 一樣使用oflaDemo的範例?
#18
你进入RED5中叫做:Publisher的demo中,这个demo其实就是RED5的调试器,手机端我们用LIVE模式,
如下面代码RTMPConnectionUtil.netStream.publish("aaa", NetStream.LIVE);
先点击connect,然后输入你直播流的名称"aaa",点击play就能顺利播放了,这个是现场直播.
对应的如果你用Record模式,就是点播模式了.
你可以去试试
如下面代码RTMPConnectionUtil.netStream.publish("aaa", NetStream.LIVE);
先点击connect,然后输入你直播流的名称"aaa",点击play就能顺利播放了,这个是现场直播.
对应的如果你用Record模式,就是点播模式了.
你可以去试试
#19
楼主,,请问你播放的时候会不会有一卡一卡的情况。。。是怎么解决的呢?
#20
回應harry163:
你是用vlc播放的嗎?我用vlc看的時候影片會是有點快轉的狀態。
以下是我的問題:
Red5收到的FLV檔案還滿大的,10秒大約就6M了,不曉得有什麼方法可以讓檔案變小?
你是用vlc播放的嗎?我用vlc看的時候影片會是有點快轉的狀態。
以下是我的問題:
Red5收到的FLV檔案還滿大的,10秒大約就6M了,不曉得有什麼方法可以讓檔案變小?
#21
也正搞这个,LZ,可以发一个DEMO么,346072628@qq.com
#22
我是用red5的Publisher demo来实时播放上传的视频的,感觉是设置的帧频的关系,设置成100会有快进的感觉,设置的大了,又会产生一卡一卡的情况...头痛中
#23
播放视频的在哪段是啊?
#24
影片檔案容量的改進可以搜尋一下ScreenCodec2演算法。
一個叫SVC2的flash影像編碼,可替換掉原來的encode function,
原本java的BufferedImage在Android內並不適用,
可考慮轉換成Bitmap的思路,當然function內容需要小部份改寫,
目前我錄5秒的大約是1MB。
一個叫SVC2的flash影像編碼,可替換掉原來的encode function,
原本java的BufferedImage在Android內並不適用,
可考慮轉換成Bitmap的思路,當然function內容需要小部份改寫,
目前我錄5秒的大約是1MB。
#25
最近我也在研究这个,不知道楼主的研究成果怎么样了?希望楼主把demo给我发一份,让我研究研究,先谢谢楼主了。
#26
邮箱为1826559560@qq.com
#27
不知道楼主现在可否发分代码我,正在研究(357140343@qq.com)
#28
不知道楼主现在可否发分代码我,正在研究(357140343@qq.com)
#29
楼主发我一份demo吧 谢谢~
#30
楼主是个好人,小弟现在也在研究,不知道楼主可以发一份demo给我,邮箱:667laobao@163.com
#31
楼主,可以发一份DEMO给我吗?最近我研究了好久都没弄通,想学习一下。邮箱 517526390@qq.com
#32
楼主好人,我现在也在研究,不知道楼主可以发一份demo给我,邮箱:554069948@qq.com
#33
1850889163@qq.com
发一份demo给我吧
发一份demo给我吧
#34
上面的写错了
185089163@qq.com
发一份demo给我吧
185089163@qq.com
发一份demo给我吧
#35
楼主可否也发我一个demo学习一下,不胜感激。363326947@qq.com
#36
最近也刚接触,希望能给Demo学习,楼主赏个!!!
邮箱是:
huscarter@163.com
邮箱是:
huscarter@163.com
#37
大哥,能否提供代码给我,最近在研究这个,2221699802@qq.com
#38
我也想要一份,测试看看,zsineng@163.com 谢谢了
#39
写的很好,,我最近也在研究这个,,可否把dmeo 发我一份 谢谢了:
snx1000@126.com
snx1000@126.com
#40
我正在做新闻直播,找了半天了,能给我发一份吗? wangbojun2008@126.com
#41
小弟最近也在弄android和red5实时视频的东西,希望也能给我分demo已做参考,谢谢!
393473617@qq.com
393473617@qq.com
#42
小弟最近也在弄android和red5实时视频的东西,希望也能给我分demo已做参考,谢谢!
79898485@qq.com
79898485@qq.com
#43
求demo,邮箱935770676@qq.com.
谢谢分享。
谢谢分享。
#44
这两天在学习这个 有些云里雾里的
好心人发给Domo给我研究研究吧
好心人发给Domo给我研究研究吧
#45
邮箱为:yimisunrise@126.com
#46
你好,我也想研究一下实时点播是如何实现的,能发个demo到我邮箱吗?邮箱:yangjiaxinghuman@163.com
#47
337287798@qq.com 求demo
#48
你好,我也想研究一下实时点播是如何实现的,能发个demo到我邮箱吗?邮箱:alex_xb@126.com
多谢了!
多谢了!
#49
大神,求demo,1193467478@qq.com,谢谢
#50
楼主,我最近在做这个,借鉴了下你的源码,发布到red5了,用red5的demo也能直播,但是播放的效果很差,基本就是花花绿绿的一片,不知道楼主有这种情况没
#1
小弟第一次在这上面问问题,不太懂规矩,可能上面光说没代码大家不太明白!下面我就把代码贴出来供大家参考下!录制视频的类:
package com.cn.rtmp;
import java.io.IOException;
import java.util.Map;
import com.cn.rtmp.R;
import com.smaxe.io.ByteArray;
import com.smaxe.uv.client.INetStream;
import com.smaxe.uv.client.NetStream;
import com.smaxe.uv.client.camera.AbstractCamera;
import com.smaxe.uv.stream.support.MediaDataByteArray;
import android.app.Activity;
import android.content.Context;
import android.graphics.PixelFormat;
import android.hardware.Camera;
import android.os.Bundle;
import android.os.Handler;
import android.util.Log;
import android.view.SurfaceHolder;
import android.view.SurfaceView;
import android.view.View;
import android.view.View.OnClickListener;
import android.view.Window;
import android.view.WindowManager;
import android.widget.Button;
import android.widget.TextView;
/**
* 通过实现Camera.onPreviewCallback的方法来实时的取得照相机写回的字节数组, 实质就是一帧一帧的数据.
*
* @author jay-hmw
*
*/
public class CameraVideoActivity extends Activity {
private SurfaceView surfaceView;// 视频组件
private SurfaceHolder surfaceHolder;// 视频组件
private Camera camera;// 相机
private int width;// 分辨率宽度
private int height;// 分辨率高度
private boolean init;// 标志是否初始化成功
private int blockWidth;// 最大宽度
private int blockHeight;// 最大高度
private int timeBetweenFrames; // 帧率
private int frameCounter;// 容量
private byte[] previous;// 视频数据字节数组
private TextView hour; // 小时
private TextView minute; // 分钟
private TextView second; // 秒
private Button mStart; // 开始按钮
private Button mStop; // 结束按钮
private Button mReturn; // 返回按钮
public static AndroidCamera mCamera;
private boolean isStreaming = false;
private boolean isTiming=true;
@Override
protected void onCreate(Bundle savedInstanceState) {
// TODO Auto-generated method stub
super.onCreate(savedInstanceState);
// 设置屏幕为全屏
getWindow().setFormat(PixelFormat.TRANSLUCENT);
requestWindowFeature(Window.FEATURE_NO_TITLE);
getWindow().setFlags(WindowManager.LayoutParams.FLAG_FULLSCREEN,
WindowManager.LayoutParams.FLAG_FULLSCREEN);
setContentView(R.layout.video);
initMode();
}
/**
* 初始化组件
*/
private void initMode() {
// mSurfaceView = (SurfaceView)
// findViewById(R.id.mediarecorder_Surfaceview);
hour = (TextView) findViewById(R.id.mediarecorder_TextView01);
minute = (TextView) findViewById(R.id.mediarecorder_TextView03);
second = (TextView) findViewById(R.id.mediarecorder_TextView05);
mStart = (Button) findViewById(R.id.mediarecorder_VideoStartBtn);
mStop = (Button) findViewById(R.id.mediarecorder_VideoStopBtn);
mReturn = (Button) findViewById(R.id.mediarecorder_VideoReturnBtn);
// RTMPConnectionUtil.ConnectRed5(CameraVideoActivity.this);
mCamera = new AndroidCamera(CameraVideoActivity.this);
// 开始按钮添加事件
mStart.setOnClickListener(new OnClickListener() {
@Override
public void onClick(View v) {
// TODO Auto-generated method stub
mCamera.startVideo();
handler.postDelayed(task, 1000);
mStart.setEnabled(false);
mReturn.setEnabled(true);
mStop.setEnabled(true);
isTiming=true;
if(isStreaming){
camera.startPreview();
}
}
});
// 返回按钮添加事件
mReturn.setOnClickListener(new OnClickListener() {
@Override
public void onClick(View v) {
// TODO Auto-generated method stub
mCamera.stopVideo();
if (RTMPConnectionUtil.netStream != null) {
RTMPConnectionUtil.netStream.close();
}
finish();
}
});
// 停止按钮添加事件
mStop.setOnClickListener(new OnClickListener() {
@Override
public void onClick(View v) {
// TODO Auto-generated method stub
camera.stopPreview();
isTiming=false;
mStart.setEnabled(true);
mReturn.setEnabled(true);
mStop.setEnabled(false);
}
});
}
@Override
public void onStop() {
super.onStop();
mCamera = null;
if (RTMPConnectionUtil.netStream != null) {
RTMPConnectionUtil.netStream.close();
}
}
@Override
public void onDestroy() {
super.onDestroy();
System.exit(0);
}
/**
* 自定义摄像机
*
* @author jay-hmw
*
*/
public class AndroidCamera extends AbstractCamera implements
SurfaceHolder.Callback, Camera.PreviewCallback {
public AndroidCamera(Context context) {
surfaceView = (SurfaceView) findViewById(R.id.mediarecorder_Surfaceview);
surfaceHolder = surfaceView.getHolder();
surfaceHolder.addCallback(AndroidCamera.this);
surfaceHolder.setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS);
width = 352;
height = 288;
init = false;
System.out.println("*****相机初始化完成*****");
}
private NetStream.ListenerAdapter a = new NetStream.ListenerAdapter() {
@Override
public void onNetStatus(final INetStream source,
final Map<String, Object> info) {
final Object code = info.get("code");
if (NetStream.PUBLISH_START.equals(code)) {
if (CameraVideoActivity.mCamera != null) {
RTMPConnectionUtil.netStream.attachCamera(mCamera, -1);
mCamera.start();
System.out.println("*****开始录像*****");
} else {
System.out.println("*****录像失败*****");
}
}
}
};
private void startVideo() {
RTMPConnectionUtil.netStream = new UltraNetStream(
RTMPConnectionUtil.connection);
RTMPConnectionUtil.netStream
.addEventListener(new NetStream.ListenerAdapter() {
@Override
public void onNetStatus(final INetStream source,
final Map<String, Object> info) {
final Object code = info.get("code");
if (NetStream.PUBLISH_START.equals(code)) {
if (CameraVideoActivity.mCamera != null) {
RTMPConnectionUtil.netStream.attachCamera(
mCamera, -1);
mCamera.start();
System.out.println("*****开始录像*****");
} else {
System.out.println("*****录像失败*****");
}
}
}
});
RTMPConnectionUtil.netStream.publish("me", NetStream.RECORD);
//RTMPConnectionUtil.netStream.publish("me", NetStream.LIVE);
}
private void stopVideo() {
if (camera != null) {
camera.setPreviewCallback(null);
camera.stopPreview();
camera.release();
camera = null;
}
}
/**
* 开启摄像头
*/
public void start() {
camera.startPreview();
}
@Override
public void onPreviewFrame(byte[] arg0, Camera arg1) {
// TODO Auto-generated method stub
isStreaming = true;
if (!init) {
blockWidth = 32;
blockHeight = 32;
timeBetweenFrames = 100; // 1000 / frameRate
frameCounter = 0;
previous = null;
init = true;
}
final long ctime = System.currentTimeMillis();
System.out.println("*****相机采集到的数组长度" + arg0.length + "*****");
byte[] current = RemoteUtil.decodeYUV420SP2RGB(arg0, width, height);
// byte[] current = RemoteUtil.decodeYUV420SP2YUV420(arg0, arg0.length);
try {
final byte[] packet = RemoteUtil.encode(current, previous,
blockWidth, blockHeight, width, height);
fireOnVideoData(new MediaDataByteArray(timeBetweenFrames,
new ByteArray(packet)));
previous = current;
if (++frameCounter % 10 == 0)
previous = null;
} catch (Exception e) {
e.printStackTrace();
}
final int spent = (int) (System.currentTimeMillis() - ctime);
try {
Thread.sleep(Math.max(0, timeBetweenFrames - spent));
} catch (InterruptedException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
@Override
public void surfaceChanged(SurfaceHolder holder, int format, int width,
int height) {
// TODO Auto-generated method stub
System.out.println("*****系统执行surfaceChanged*****");
RTMPConnectionUtil.ConnectRed5(CameraVideoActivity.this);
}
@Override
public void surfaceCreated(SurfaceHolder holder) {
// TODO Auto-generated method stub
camera = Camera.open();
try {
camera.setPreviewDisplay(surfaceHolder);
camera.setPreviewCallback(this);
Camera.Parameters params = camera.getParameters();
params.setPreviewSize(width, height);
camera.setParameters(params);
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
camera.release();
camera = null;
}
}
@Override
public void surfaceDestroyed(SurfaceHolder holder) {
// TODO Auto-generated method stub
isStreaming = false;
if (camera != null) {
camera.stopPreview();
camera.release();
camera = null;
}
}
}
/* 定时器设置,实现计时 */
private Handler handler = new Handler();
int s, h, m;
private Runnable task = new Runnable() {
public void run() {
if (isTiming) {
handler.postDelayed(this, 1000);
s++;
if (s < 60) {
second.setText(format(s));
} else if (s < 3600) {
m = s / 60;
s = s % 60;
minute.setText(format(m));
second.setText(format(s));
} else {
h = s / 3600;
m = (s % 3600) / 60;
s = (s % 3600) % 60;
hour.setText(format(h));
minute.setText(format(m));
second.setText(format(s));
}
}
}
};
/* 格式化时间 */
public String format(int i) {
String s = i + "";
if (s.length() == 1) {
s = "0" + s;
}
return s;
}
}
#2
连接服务器的类:
package com.cn.rtmp;
import java.util.Date;
import java.util.Map;
import android.content.Context;
import android.content.Intent;
import android.os.Message;
import android.util.Log;
import com.smaxe.uv.Responder;
import com.smaxe.uv.client.INetConnection;
import com.smaxe.uv.client.INetStream;
public class RTMPConnectionUtil {
private static final String red5_url = "rtmp://192.168.1.103/fitcDemo";
public static UltraNetConnection connection;
public static UltraNetStream netStream;
public static String message;
public static void ConnectRed5(Context context) {
// License.setKey("63140-D023C-D7420-00B15-91FC7");
connection = new UltraNetConnection();
connection.configuration().put(UltraNetConnection.Configuration.INACTIVITY_TIMEOUT, -1);
connection.configuration().put(UltraNetConnection.Configuration.RECEIVE_BUFFER_SIZE, 256*1024);
connection.configuration().put(UltraNetConnection.Configuration.SEND_BUFFER_SIZE, 256*1024);
//connection.client(new ClientHandler(context));
connection.addEventListener(new NetConnectionListener());
//Log.d("DEBUG", User.id + " - " + User.phone);
connection.connect(red5_url);
}
// private static class ClientHandler extends Object {
//
// private Context context;
//
// ClientHandler(Context context) {
// this.context = context;
// };
//
// //Server invoke this method
// public void getVideoInfo(String fromUserId, String fromUserName,String message) {
//// System.out.println(fromUserId + " ++ " + fromUserName + " ++ " + message);
//// RTMPConnectionUtil.message = message;
//// Intent intent = new Intent(context, ChatActivity.class);
//// intent.putExtra("state", "callyou");
//// intent.putExtra("who", fromUserName);
//// context.startActivity(intent);
// }
//
// //Server invoke this method when receiver reject
// public void rejected(String userid, String username) {
//// System.out.println(userid + " ++ " + username);
//// Message msg = ChatActivity.handler.obtainMessage();
//// msg.arg1 = 0; //receiver reject
//// msg.sendToTarget();
// }
//
// //Server invoke this method when receiver receive call
// public void addMember(String userId, String userName) {
//// System.out.println(userId + " ++ " + userName);
////
//// Message msg = ChatActivity.handler.obtainMessage();
//// msg.arg1 = 1;
//// msg.sendToTarget();
////
//// Log.d("DEBUG", "addMember()");
//
// }
//
// //Server invoke this method when receiver is not login
// public void Info(String information) {
//// System.out.println("Info" + information);
////
//// Message msg = ChatActivity.handler.obtainMessage();
//// if (information.equals("client is not login the Red5 Server")) {
//// msg.arg1 = 2;
//// msg.sendToTarget();
//// } else if (information.equals("the client is calling, please try again")) {
//// msg.arg1 = 3;
//// msg.sendToTarget();
//// }
// }
//
// public void onBWDone()
// {
//
// }
//
// public void onBWDone(Object[] paramArrayOfObject)
// {
//
// }
// }
private static class NetConnectionListener extends UltraNetConnection.ListenerAdapter {
public NetConnectionListener() {}
@Override
public void onAsyncError(final INetConnection source, final String message, final Exception e) {
System.out.println("NetConnection#onAsyncError: " + message + " "+ e);
}
@Override
public void onIOError(final INetConnection source, final String message) {
System.out.println("NetConnection#onIOError: " + message);
}
@Override
public void onNetStatus(final INetConnection source, final Map<String, Object> info) {
System.out.println("NetConnection#onNetStatus: " + info);
final Object code = info.get("code");
if (UltraNetConnection.CONNECT_SUCCESS.equals(code)) {
// source.call("testConnection", new Responder() {
// public void onResult(final Object result) {
// System.out.println("Method testConnection result: " + result);
// }
//
// public void onStatus(final Map<String, Object> status) {
// System.out.println("Method testConnection status: " + status);
// }
// });
}
}
}// NetConnectionListener
//invoke server method createMeeting
public static void invokeMethodFormRed5(String toUserId) {
// Date nowDate = new Date();
// String time = nowDate.getTime() + "" + (int)((Math.random()*100)%100);
// message = time;
// connection.call("createMeeting", responder, User.id + "", toUserId, message);
// Log.d("DEBUG", "call createMeeting");
}
private static Responder responder = new Responder() {
@Override
public void onResult(Object arg0) {
// TODO Auto-generated method stub
System.out.println("Method createMeeting result: " + arg0);
callback_createMeeting();
}
@Override
public void onStatus(Map<String, Object> arg0) {
// TODO Auto-generated method stub
System.out.println("Method createMeetiong status: " + arg0);
}
};
//invoke server method reject
public static void invokeRejectMethod() {
// connection.call("reject", null, message, User.id);
}
private static void callback_createMeeting() {
//startVideo();
}
private static void startVideo() {
Log.d("DEBUG", "startVideo()");
netStream = new UltraNetStream(connection);
netStream.addEventListener(new UltraNetStream.ListenerAdapter() {
@Override
public void onNetStatus(final INetStream source, final Map<String, Object> info){
System.out.println("Publisher#NetStream#onNetStatus: " + info);
Log.d("DEBUG", "Publisher#NetStream#onNetStatus: " + info);
final Object code = info.get("code");
// if (UltraNetStream.PUBLISH_START.equals(code)) {
// if (VideoActivity.aCamera != null) {
// netStream.attachCamera(VideoActivity.aCamera, -1 /*snapshotMilliseconds*/);
// Log.d("DEBUG", "aCamera.start()");
// VideoActivity.aCamera.start();
// } else {
// Log.d("DEBUG", "camera == null");
// }
// }
}
});
// Log.i("DEBUG", "User.id:"+User.id+" message"+message);
// netStream.publish(User.id + message, UltraNetStream.RECORD);//"mp4:"+User.id + message+".mp4"
}
//invoke server method enterMeeting
public static void invokeEnterMeetingMethod() {
// connection.call("enterMeeting", enterResp, message, User.id);
}
private static Responder enterResp = new Responder() {
@Override
public void onResult(Object arg0) {
// TODO Auto-generated method stub
System.out.println("Method enterMeeting result: " + arg0);
callback_enterMeeting();
}
@Override
public void onStatus(Map<String, Object> arg0) {
// TODO Auto-generated method stub
System.out.println("Method enterMeetiong status: " + arg0);
}
};
private static void callback_enterMeeting() {
// Message msg = ChatActivity.handler.obtainMessage();
// msg.arg1 = 1;
// msg.sendToTarget();
//startVideo();
}
}
#3
负责编码的类:
package com.cn.rtmp;
import java.io.ByteArrayOutputStream;
import java.io.OutputStream;
import java.util.zip.Deflater;
import java.util.zip.DeflaterOutputStream;
import android.util.Log;
public class RemoteUtil {
private static Deflater deflater = new Deflater();
public static byte[] decodeYUV420SP2RGB(byte[] yuv420sp, int width, int height) {
final int frameSize = width * height;
byte[] rgbBuf = new byte[frameSize*3];
// if (rgbBuf == null) throw new NullPointerException("buffer 'rgbBuf' is null");
if (rgbBuf.length < frameSize * 3) throw new IllegalArgumentException("buffer 'rgbBuf' size " + rgbBuf.length + " < minimum " + frameSize * 3);
if (yuv420sp == null) throw new NullPointerException("buffer 'yuv420sp' is null");
if (yuv420sp.length < frameSize * 3 / 2) throw new IllegalArgumentException("buffer 'yuv420sp' size " + yuv420sp.length + " < minimum " + frameSize * 3 / 2);
int i = 0, y = 0;
int uvp = 0, u = 0, v = 0;
int y1192 = 0, r = 0, g = 0, b = 0;
for (int j = 0, yp = 0; j < height; j++) {
uvp = frameSize + (j >> 1) * width;
u = 0;
v = 0;
for (i = 0; i < width; i++, yp++) {
y = (0xff & ((int) yuv420sp[yp])) - 16;
if (y < 0) y = 0;
if ((i & 1) == 0) {
v = (0xff & yuv420sp[uvp++]) - 128;
u = (0xff & yuv420sp[uvp++]) - 128;
}
y1192 = 1192 * y;
r = (y1192 + 1634 * v);
g = (y1192 - 833 * v - 400 * u);
b = (y1192 + 2066 * u);
if (r < 0) r = 0; else if (r > 262143) r = 262143;
if (g < 0) g = 0; else if (g > 262143) g = 262143;
if (b < 0) b = 0; else if (b > 262143) b = 262143;
rgbBuf[yp * 3] = (byte)(r >> 10);
rgbBuf[yp * 3 + 1] = (byte)(g >> 10);
rgbBuf[yp * 3 + 2] = (byte)(b >> 10);
}
}//for
return rgbBuf;
}// decodeYUV420Sp2RGB
public static byte[] decodeYUV420SP2YUV420(byte[]data,int length) {
int width = 176;
int height = 144;
byte[] str = new byte[length];
System.arraycopy(data, 0, str, 0,width*height);
int strIndex = width*height;
for(int i = width*height+1; i < length ;i+=2) {
str[strIndex++] = data[i];
}
for(int i = width*height;i<length;i+=2) {
str[strIndex++] = data[i];
}
return str;
} //YUV420SP2YUV420
public static byte[] encode(final byte[] current, final byte[] previous, final int blockWidth, final int blockHeight, final int width, final int height) throws Exception {
//这里限制了,最大上传视频容量为1G
ByteArrayOutputStream baos = new ByteArrayOutputStream(16 * 1024);
if (previous == null) {
baos.write(getTag(0x01 /* key-frame */, 0x03 /* ScreenVideo codec */));
} else {
baos.write(getTag(0x02 /* inter-frame */, 0x03 /* ScreenVideo codec */));
}
// write header
final int wh = width + ((blockWidth / 16 - 1) << 12);
final int hh = height + ((blockHeight / 16 - 1) << 12);
writeShort(baos, wh);
writeShort(baos, hh);
// write content
int y0 = height;
int x0 = 0;
int bwidth = blockWidth;
int bheight = blockHeight;
while (y0 > 0) {
bheight = Math.min(y0, blockHeight);
y0 -= bheight;
bwidth = blockWidth;
x0 = 0;
while (x0 < width) {
bwidth = (x0 + blockWidth > width) ? width - x0 : blockWidth;
final boolean changed = isChanged(current, previous, x0, y0, bwidth, bheight, width, height);
if (changed) {
ByteArrayOutputStream blaos = new ByteArrayOutputStream(4 * 1024);
DeflaterOutputStream dos = new DeflaterOutputStream(blaos, deflater);
for (int y = 0; y < bheight; y++) {
//Log.i("DEBUG", "current的长度:"+current.length+" 起始点:"+3 * ((y0 + bheight - y - 1) * width + x0)+" 终点:"+3 * bwidth);
dos.write(current, 3 * ((y0 + bheight - y - 1) * width + x0), 3 * bwidth);
}
// dos.write(current, 0, current.length);
dos.finish();
deflater.reset();
final byte[] bbuf = blaos.toByteArray();
final int written = bbuf.length;
// write DataSize
writeShort(baos, written);
// write Data
baos.write(bbuf, 0, written);
} else {
// write DataSize
writeShort(baos, 0);
}
x0 += bwidth;
}
}
return baos.toByteArray();
}
/**
* Writes short value to the {@link OutputStream <tt>os</tt>}.
*
* @param os
* @param n
* @throws Exception if an exception occurred
*/
private static void writeShort(OutputStream os, final int n) throws Exception {
os.write((n >> 8) & 0xFF);
os.write((n >> 0) & 0xFF);
}
/**
* Checks if image block is changed.
*
* @param current
* @param previous
* @param x0
* @param y0
* @param blockWidth
* @param blockHeight
* @param width
* @param height
* @return <code>true</code> if changed, otherwise <code>false</code>
*/
public static boolean isChanged(final byte[] current, final byte[] previous, final int x0, final int y0, final int blockWidth, final int blockHeight, final int width, final int height) {
if (previous == null)
return true;
for (int y = y0, ny = y0 + blockHeight; y < ny; y++) {
final int foff = 3 * (x0 + width * y);
final int poff = 3 * (x0 + width * y);
for (int i = 0, ni = 3 * blockWidth; i < ni; i++) {
if (current[foff + i] != previous[poff + i])
return true;
}
}
return false;
}
/**
* @param frame
* @param codec
* @return tag
*/
public static int getTag(final int frame, final int codec) {
return ((frame & 0x0F) << 4) + ((codec & 0x0F) << 0);
}
}
#4
兄台 我现在也在做这方面的东西,初学者菜鸟,您能把这个Demo给我发一份吗 不胜感激 希望能和您探讨一下 邮箱hubo_8911@163.com
#5
Demo已经到你的邮箱请查收,你是第一个看我提问的人..谢谢
#6
小弟我導入了 juv-rtmp-client-1.5.10.jar
也大概整了一個 android 包,表面上可以運行。
不過一點 Start 鈕就 force close 了
也請兄台給我個Demo吧。
萬分感謝!
tkuchris@hotmail.com
也大概整了一個 android 包,表面上可以運行。
不過一點 Start 鈕就 force close 了
也請兄台給我個Demo吧。
萬分感謝!
tkuchris@hotmail.com
#7
demo发都你邮箱了
#8
楼主 求源码 我最近在做关于视频流的项目 想看看你的源码找找思路 谢谢 邮箱258559020@qq.com
#9
請問樓主用的是哪款手機做測試?
我的 Qcom CPU 測試機 似乎攝像頭有問題
E/QualcommCamera(28801): Qvoid android::disable_msg_type(camera_device*, int32_t): E
Red5 Server一點反應也沒有
我目前要找找別的手機來測測看
我的 Qcom CPU 測試機 似乎攝像頭有問題
E/QualcommCamera(28801): Qvoid android::disable_msg_type(camera_device*, int32_t): E
Red5 Server一點反應也沒有
我目前要找找別的手機來測測看
#10
順便也問一下樓主的 Red5 版本是多少吧。
#11
我用的Red5-0.9.1,手机我用的是小米,如果你没用改Red5服务器地址的话,估计摄像机就会有问题,因为当你的点了开始的时候就开始连接服务器了.
#12
我用的是HTC手機,有刷過機 Android 4.0.4
Red5 版本是 1.0.0
服務器地址有改過了,手機的錄影計時數字有在跑,但Red5就是沒有反應。
看來我得先試 Red5-0.9.1 然候Android的版本拿2.3版來測。
Red5 版本是 1.0.0
服務器地址有改過了,手機的錄影計時數字有在跑,但Red5就是沒有反應。
看來我得先試 Red5-0.9.1 然候Android的版本拿2.3版來測。
#13
前輩看在同樣用屌絲手機的份上也發一份demo給我學習學習吧,vjnjc#qq.com
#14
測試出來了!
Phone: HTC Desire - Android 2.3.3
Server: Red5 - 0.9.1
RTMPConnectionUtil.netStream.publish("aaa", NetStream.RECORD);
會向Red5服務器發佈視頻,檔案會存在Red5(aaa是檔名)。
RTMPConnectionUtil.netStream.publish("aaa", NetStream.LIVE);
Red5服務器有反應,但檔案並不會儲存,所以自帶的oflaDemo應該也沒有檔案可以播放。
或許要往Live即時播放的Demo去做測試。
Phone: HTC Desire - Android 2.3.3
Server: Red5 - 0.9.1
RTMPConnectionUtil.netStream.publish("aaa", NetStream.RECORD);
會向Red5服務器發佈視頻,檔案會存在Red5(aaa是檔名)。
RTMPConnectionUtil.netStream.publish("aaa", NetStream.LIVE);
Red5服務器有反應,但檔案並不會儲存,所以自帶的oflaDemo應該也沒有檔案可以播放。
或許要往Live即時播放的Demo去做測試。
#15
我测试过了,这个是可以通过的,现在我弄的可以现场直播了..边录边播~~
#16
大哥,我也在研究直播这块!搞了好久都没有搞出来,能发个demo学习一下吗?邮箱:236056760@qq.com
#17
請問樓主是如何實現的? 一樣使用oflaDemo的範例?
#18
你进入RED5中叫做:Publisher的demo中,这个demo其实就是RED5的调试器,手机端我们用LIVE模式,
如下面代码RTMPConnectionUtil.netStream.publish("aaa", NetStream.LIVE);
先点击connect,然后输入你直播流的名称"aaa",点击play就能顺利播放了,这个是现场直播.
对应的如果你用Record模式,就是点播模式了.
你可以去试试
如下面代码RTMPConnectionUtil.netStream.publish("aaa", NetStream.LIVE);
先点击connect,然后输入你直播流的名称"aaa",点击play就能顺利播放了,这个是现场直播.
对应的如果你用Record模式,就是点播模式了.
你可以去试试
#19
楼主,,请问你播放的时候会不会有一卡一卡的情况。。。是怎么解决的呢?
#20
回應harry163:
你是用vlc播放的嗎?我用vlc看的時候影片會是有點快轉的狀態。
以下是我的問題:
Red5收到的FLV檔案還滿大的,10秒大約就6M了,不曉得有什麼方法可以讓檔案變小?
你是用vlc播放的嗎?我用vlc看的時候影片會是有點快轉的狀態。
以下是我的問題:
Red5收到的FLV檔案還滿大的,10秒大約就6M了,不曉得有什麼方法可以讓檔案變小?
#21
也正搞这个,LZ,可以发一个DEMO么,346072628@qq.com
#22
我是用red5的Publisher demo来实时播放上传的视频的,感觉是设置的帧频的关系,设置成100会有快进的感觉,设置的大了,又会产生一卡一卡的情况...头痛中
#23
播放视频的在哪段是啊?
#24
影片檔案容量的改進可以搜尋一下ScreenCodec2演算法。
一個叫SVC2的flash影像編碼,可替換掉原來的encode function,
原本java的BufferedImage在Android內並不適用,
可考慮轉換成Bitmap的思路,當然function內容需要小部份改寫,
目前我錄5秒的大約是1MB。
一個叫SVC2的flash影像編碼,可替換掉原來的encode function,
原本java的BufferedImage在Android內並不適用,
可考慮轉換成Bitmap的思路,當然function內容需要小部份改寫,
目前我錄5秒的大約是1MB。
#25
最近我也在研究这个,不知道楼主的研究成果怎么样了?希望楼主把demo给我发一份,让我研究研究,先谢谢楼主了。
#26
邮箱为1826559560@qq.com
#27
不知道楼主现在可否发分代码我,正在研究(357140343@qq.com)
#28
不知道楼主现在可否发分代码我,正在研究(357140343@qq.com)
#29
楼主发我一份demo吧 谢谢~
#30
楼主是个好人,小弟现在也在研究,不知道楼主可以发一份demo给我,邮箱:667laobao@163.com
#31
楼主,可以发一份DEMO给我吗?最近我研究了好久都没弄通,想学习一下。邮箱 517526390@qq.com
#32
楼主好人,我现在也在研究,不知道楼主可以发一份demo给我,邮箱:554069948@qq.com
#33
1850889163@qq.com
发一份demo给我吧
发一份demo给我吧
#34
上面的写错了
185089163@qq.com
发一份demo给我吧
185089163@qq.com
发一份demo给我吧
#35
楼主可否也发我一个demo学习一下,不胜感激。363326947@qq.com
#36
最近也刚接触,希望能给Demo学习,楼主赏个!!!
邮箱是:
huscarter@163.com
邮箱是:
huscarter@163.com
#37
大哥,能否提供代码给我,最近在研究这个,2221699802@qq.com
#38
我也想要一份,测试看看,zsineng@163.com 谢谢了
#39
写的很好,,我最近也在研究这个,,可否把dmeo 发我一份 谢谢了:
snx1000@126.com
snx1000@126.com
#40
我正在做新闻直播,找了半天了,能给我发一份吗? wangbojun2008@126.com
#41
小弟最近也在弄android和red5实时视频的东西,希望也能给我分demo已做参考,谢谢!
393473617@qq.com
393473617@qq.com
#42
小弟最近也在弄android和red5实时视频的东西,希望也能给我分demo已做参考,谢谢!
79898485@qq.com
79898485@qq.com
#43
求demo,邮箱935770676@qq.com.
谢谢分享。
谢谢分享。
#44
这两天在学习这个 有些云里雾里的
好心人发给Domo给我研究研究吧
好心人发给Domo给我研究研究吧
#45
邮箱为:yimisunrise@126.com
#46
你好,我也想研究一下实时点播是如何实现的,能发个demo到我邮箱吗?邮箱:yangjiaxinghuman@163.com
#47
337287798@qq.com 求demo
#48
你好,我也想研究一下实时点播是如何实现的,能发个demo到我邮箱吗?邮箱:alex_xb@126.com
多谢了!
多谢了!
#49
大神,求demo,1193467478@qq.com,谢谢
#50
楼主,我最近在做这个,借鉴了下你的源码,发布到red5了,用red5的demo也能直播,但是播放的效果很差,基本就是花花绿绿的一片,不知道楼主有这种情况没