【快捷部署】002_Flink(1.17.2)
【摘要】 快捷部署系列,本期带来的是Flink,快速体验、学习
Flink一键安装,本地模式
install-flink.sh 脚本内容
#!/bin/bash
####变量
###执行脚本的当前目录
mydir=$(cd "$(dirname "$0")";pwd)
echo $mydir
#flink安装目录
flink=/flink
#检查点目录
cp=$flink/checkpoints/
#保留点目录
sp=$flink/savepoints/
#tasknumber数量
tasknumber=5
#配置检查点的间隔时间
eci=5
#配置最大并发的检查点数量
ecm=2
#配置保留点的间隔时间
esi=11
#重新启动任务的最大次数
rfa=10000
#重新启动任务之前等待的时间间隔
rfd=1
########################################################################################
#flink安装目录
mkdir -p $flink
#检查点目录
mkdir -p $cp
#保留点目录
mkdir -p $sp
#安装openjdk-1.8
yum install -y java-1.8.0-openjdk-1.8.0.392.b08-2.el7_9.x86_64
##下载flink安装包并解压
wget https://repo.huaweicloud.com/apache/flink/flink-1.17.2/flink-1.17.2-bin-scala_2.12.tgz -P $flink
tar -xvf $flink/flink-1.17.2-bin-scala_2.12.tgz -C $flink
#编辑flink配置文件
sed -i -e 's|taskmanager.numberOfTaskSlots:.*|taskmanager.numberOfTaskSlots: '$tasknumber'|g' $flink/flink-1.17.2/conf/flink-conf.yaml
sed -i -e 's|rest.bind-address:.*|rest.bind-address: 0.0.0.0|g' $flink/flink-1.17.2/conf/flink-conf.yaml
sed -i -e '$a #配置检查点目 \nstate.checkpoints.dir: file://'$cp' \n# 配置检查点的间隔时间 \nexecution.checkpointing.interval: '$eci' s \n#配置最大并发的检查点数量 \nexecution.checkpointing.max-concurrent-checkpoints: '$ecm' \n#保留checkpoint数据 \nexecution.checkpointing.externalized-checkpoint-retention: RETAIN_ON_CANCELLATION \n# 配置保存点目录 \nstate.savepoints.dir: file://'$sp' \nexecution.savepoints.interval: '$esi' s \n# 自定义重启策略和故障恢复策略, \nrestart-strategy.fixed-delay.attempts: '$rfa' \nrestart-strategy.fixed-delay.delay: '$rfd' s' $flink/flink-1.17.2/conf/flink-conf.yaml
#下载依赖包
#进入flink安装目录,下载jar包到flink的lib目录下
cd $flink/flink-1.17.2/lib
wget https://maven.aliyun.com/repository/public/com/ververica/flink-sql-connector-mysql-cdc/2.4.0/flink-sql-connector-mysql-cdc-2.4.0.jar
wget https://maven.aliyun.com/repository/public/org/apache/flink/flink-shaded-zookeeper-3/3.4.14-11.0/flink-shaded-zookeeper-3-3.4.14-11.0.jar
wget https://maven.aliyun.com/repository/public/org/apache/flink/flink-sql-connector-hbase-2.2/1.17.2/flink-sql-connector-hbase-2.2-1.17.2.jar
wget https://maven.aliyun.com/repository/public/org/apache/flink/flink-shaded-hadoop-2-uber/2.7.5-10.0/flink-shaded-hadoop-2-uber-2.7.5-10.0.jar
wget https://maven.aliyun.com/repository/public/com/huaweicloud/dws/dws-connector-flink_2.12_1.15/1.0.10/dws-connector-flink_2.12_1.15-1.0.10-jar-with-dependencies.jar
wget https://maven.aliyun.com/repository/public/com/ververica/flink-sql-connector-mongodb-cdc/3.0.0/flink-sql-connector-mongodb-cdc-3.0.0.jar
wget https://maven.aliyun.com/repository/public/org/apache/doris/flink-doris-connector-1.17/1.4.0/flink-doris-connector-1.17-1.4.0.jar
#修改config.sh的Java路径
sed -i -e 's|JAVA_RUN="$JAVA_HOME"/bin/java|JAVA_RUN="$JAVA_HOME"/jre/bin/java|g' $flink/flink-1.17.2/bin/config.sh
#启动flink任务
$flink/flink-1.17.2/bin/start-cluster.sh
###### 登录UI界面:http://eip:端口(默认端口8081)
$ vim install-flink.sh
# 进入vim,输入i,进入编辑模式,copy上方脚本,Esc --> 输入“:wq!”
$ chmod +x install-flink.sh
$ ./install-flink.sh
验证:
安装成功后,访问:http://eip:8081
# 执行测试job,查看效果
$ cd /flink/flink-1.17.2
$ ./bin/flink run examples/streaming/WordCount.jar
# 执行后,去WEB UI界面查看效果
# 感谢淘客科技提供的实验资源环境
官方中文文档地址:https://nightlies.apache.org/flink/flink-docs-release-1.17/zh/docs/try-flink/local_installation/
【版权声明】本文为华为云社区用户原创内容,转载时必须标注文章的来源(华为云社区)、文章链接、文章作者等基本信息, 否则作者和本社区有权追究责任。如果您发现本社区中有涉嫌抄袭的内容,欢迎发送邮件进行举报,并提供相关证据,一经查实,本社区将立刻删除涉嫌侵权内容,举报邮箱:
cloudbbs@huaweicloud.com
- 点赞
- 收藏
- 关注作者
评论(0)