# http://archive.apache.org/dist/flume/1.8.0/
# ==================================================================安装 flume
tar -zxvf ~/apache-flume-1.8.0-bin.tar.gz -C /usr/local
mv /usr/local/apache-flume-1.8.0-bin /usr/local/flume-1.8.0
rm -r ~/apache-flume-1.8.0-bin.tar.gz
# 环境变量
# ==================================================================node1 node2 node3
vi /etc/profile # 在export PATH USER LOGNAME MAIL HOSTNAME HISTSIZE HISTCONTROL下添加
export JAVA_HOME=/usr/java/jdk1.8.0_111
export ZOOKEEPER_HOME=/usr/local/zookeeper-3.4.12
export HADOOP_HOME=/usr/local/hadoop/hadoop-2.7.6
export MYSQL_HOME=/usr/local/mysql
export HBASE_HOME=/usr/local/hbase-1.2.4
export HIVE_HOME=/usr/local/hive-2.1.1
export SCALA_HOME=/usr/local/scala-2.12.4
export KAFKA_HOME=/usr/local/kafka_2.12-0.10.2.1
export FLUME_HOME=/usr/local/flume-1.8.0 export PATH=$PATH:$JAVA_HOME/bin:$JAVA_HOME/jre/bin:$ZOOKEEPER_HOME/bin:$HADOOP_HOME/bin:$HADOOP_HOME/sbin:$MYSQL_HOME/bin:$HBASE_HOME/bin:$HIVE_HOME/bin:$SCALA_HOME/bin:$KAFKA_HOME/bin:$FLUME_HOME/bin
export CLASSPATH=.:$JAVA_HOME/lib/dt.jar:$JAVA_HOME/lib/tools.jar export HADOOP_INSTALL=$HADOOP_HOME
export HADOOP_MAPRED_HOME=$HADOOP_HOME
export HADOOP_COMMON_HOME=$HADOOP_HOME
export HADOOP_HDFS_HOME=$HADOOP_HOME
export YARN_HOME=$HADOOP_HOME
export HADOOP_COMMON_LIB_NATIVE_DIR=$HADOOP_HOME/lib/native
# ==================================================================node1
# 使环境变量生效
source /etc/profile # 查看配置结果
echo $FLUME_HOME
# ==================================================================node1
cp $FLUME_HOME/conf/flume-env.sh.template $FLUME_HOME/conf/flume-env.sh
vi $FLUME_HOME/conf/flume-env.sh export JAVA_HOME=/usr/java/jdk1.8.0_111 # rpm -qa | grep telnet-server
# rpm -qa | grep telnet
# rpm -qa | grep xinetd
# rpm -qa | grep net-tools yum -y install xinetd telnet telnet-server net-tools systemctl enable telnet.socket
systemctl start telnet.socket
systemctl enable xinetd
systemctl start xinetd # 在使用hadoop会冲突,需去掉下面的文件
rm -r $FLUME_HOME/lib/slf4j-api-1.6.1.jar
rm -r $FLUME_HOME/lib/slf4j-log4j12-1.6.1.jar
rm -r $HBASE_HOME/lib/slf4j-log4j12-1.7.5.jar # hdfs.path is required
cp $HADOOP_HOME/share/hadoop/common/lib/commons-configuration-1.6.jar $FLUME_HOME/lib/
cp $HADOOP_HOME/share/hadoop/common/lib/hadoop-auth-2.7.6.jar $FLUME_HOME/lib/
cp $HADOOP_HOME/share/hadoop/common/lib/htrace-core-3.1.0-incubating.jar $FLUME_HOME/lib/ cp $HADOOP_HOME/share/hadoop/common/hadoop-common-2.7.6.jar $FLUME_HOME/lib/
cp $HADOOP_HOME/share/hadoop/common/hadoop-nfs-2.7.6.jar $FLUME_HOME/lib/ cp $HADOOP_HOME/share/hadoop/hdfs/hadoop-hdfs-2.7.6.jar $FLUME_HOME/lib/
cp $HADOOP_HOME/share/hadoop/hdfs/hadoop-hdfs-nfs-2.7.6.jar $FLUME_HOME/lib/ cp $HADOOP_HOME/share/hadoop/hdfs/lib/commons-daemon-1.0.13.jar $FLUME_HOME/lib/ scp -r $FLUME_HOME node2:/usr/local/
scp -r $FLUME_HOME node3:/usr/local/
# ==================================================================node2 node3
# 使环境变量生效
source /etc/profile # 查看配置结果
echo $FLUME_HOME
shutdown -h now
# 快照 flume
# ==================================================================参考
Source——日志来源,其中包括:Avro Source、Thrift Source、Exec Source、JMS Source、Spooling Directory Source、Kafka Source、
NetCat Source、Sequence Generator Source、Syslog Source、HTTP Source、Stress Source、Legacy Source、Custom Source、Scribe
Source以及Twitter 1% firehose Source。
Channel——日志管道,所有从Source过来的日志数据都会以队列的形式存放在里面,它包括:
Memory Channel、JDBC Channel、Kafka Channel、File Channel、Spillable Memory Channel、Pseudo Transaction Channel、Custom Channel。
Sink——日志出口,日志将通过Sink向外发射,它包括:HDFS Sink、Hive Sink、Logger Sink、Avro Sink、Thrift Sink、IRC Sink、File Roll Sink、
Null Sink、HBase Sink、Async HBase Sink、Morphline Solr Sink、Elastic Search Sink、Kite Dataset Sink、Kafka Sink、Custom Sink
# ==================================================================实例
后续更新