flink设置保存点和恢复保存点

news/2024/9/28 19:34:25 标签: flink, 大数据

增加了hdfs

package com.qyt;

 import org.apache.flink.api.java.functions.KeySelector;
import org.apache.flink.api.java.tuple.Tuple2;
 import org.apache.flink.runtime.state.storage.FileSystemCheckpointStorage;
 import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.KeyedStream;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
 import org.apache.flink.streaming.api.environment.CheckpointConfig;
 import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.ProcessFunction;
import org.apache.flink.util.Collector;

/**
 * DataStreamSource API使用
 */
public class StreamWordCount {

    public static void main(String[] args) throws Exception {


        //TODO 1、获取流的类
        final StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();

        System.setProperty("HADOOP_USER_NAME", "root");
        env.enableCheckpointing(3000);
        // 配置存储检查点到文件系统
        env.getCheckpointConfig().setCheckpointStorage(new FileSystemCheckpointStorage("hdfs://hadoop01:9000/flink"));
        env.getCheckpointConfig().setCheckpointTimeout(2000l);
        env.getCheckpointConfig().setExternalizedCheckpointCleanup(CheckpointConfig.ExternalizedCheckpointCleanup.RETAIN_ON_CANCELLATION);

        //TODO 2、获取无界流
        DataStreamSource<String> stringDataStreamSource = env.socketTextStream("192.168.1.10", 9000, "\n");

         //TODO 3 ETL
        //TODO 3.1 转换成二元数组,简单ETL的过程
        SingleOutputStreamOperator<Tuple2<String, Integer>> process = stringDataStreamSource.process(new ProcessFunction<String, Tuple2<String, Integer>>() {
            @Override
            public void processElement(String value, ProcessFunction<String, Tuple2<String, Integer>>.Context ctx, Collector<Tuple2<String, Integer>> out) throws Exception {
                String[] words = value.split(" ");
                for (String word : words) {
                    Tuple2<String, Integer> tuple2 = Tuple2.of(word, 1);
                    out.collect(tuple2);
                }
            }
        }).uid("etl");

        //TODO 3.1 分组
        KeyedStream<Tuple2<String, Integer>, String> tuple2StringKeyedStream = process.keyBy(new KeySelector<Tuple2<String, Integer>, String>() {

            @Override
            public String getKey(Tuple2<String, Integer> value) throws Exception {
                return value.f0;
            }
        });

        //TODO 3.2 聚合计算
        SingleOutputStreamOperator<Tuple2<String, Integer>> sum = tuple2StringKeyedStream.sum(1);

        //TODO 4、打印
        sum.print();

        //TODO 5、无界流需要这个不断执行的方法
        env.execute();
    }
}

要增加hadoop客户端的使用

<?xml version="1.0" encoding="UTF-8"?>
<project xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
         xmlns="http://maven.apache.org/POM/4.0.0"
         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
    <modelVersion>4.0.0</modelVersion>

    <groupId>org.example</groupId>
    <artifactId>flink-demo</artifactId>
    <version>1.0-SNAPSHOT</version>

    <properties>
        <maven.compiler.source>8</maven.compiler.source>
        <maven.compiler.target>8</maven.compiler.target>
        <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
        <flink.version>1.17.0</flink.version>
    </properties>


    <dependencies>
        <dependency>
            <groupId>org.apache.flink</groupId>
            <artifactId>flink-streaming-java</artifactId>
            <version>${flink.version}</version>
            <scope>provided</scope>
        </dependency>

     <dependency>
            <groupId>org.apache.flink</groupId>
            <artifactId>flink-clients</artifactId>
            <version>${flink.version}</version>
         <scope>provided</scope>
     </dependency>

      <dependency>
        <groupId>org.apache.flink</groupId>
        <artifactId>flink-connector-kafka</artifactId>
        <version>${flink.version}</version>
    </dependency>


        <dependency>
            <groupId>org.apache.hadoop</groupId>
            <artifactId>hadoop-client</artifactId>
            <version>3.3.4</version>
         </dependency>
    </dependencies>


    <build>
    <plugins>
        <plugin>
            <groupId>org.apache.maven.plugins</groupId>
            <artifactId>maven-shade-plugin</artifactId>
            <version>3.2.4</version>
            <executions>
                <execution>
                    <phase>package</phase>
                    <goals>
                        <goal>shade</goal>
                    </goals>
                    <configuration>
                        <artifactSet>
                            <excludes>
                                <exclude>com.google.code.findbugs:jsr305</exclude>
                                <exclude>org.slf4j:*</exclude>
                                <exclude>log4j:*</exclude>
                            </excludes>
                        </artifactSet>
                        <filters>
                            <filter>
                                <!-- Do not copy the signatures in the META-INF folder.
                                Otherwise, this might cause SecurityExceptions when using the JAR. -->
                                <artifact>*:*</artifact>
                                <excludes>
                                    <exclude>META-INF/*.SF</exclude>
                                    <exclude>META-INF/*.DSA</exclude>
                                    <exclude>META-INF/*.RSA</exclude>
                                </excludes>
                            </filter>
                        </filters>
                        <transformers combine.children="append">
                            <transformer
                                    implementation="org.apache.maven.plugins.shade.resource.ServicesResourceTransformer">
                            </transformer>
                        </transformers>
                    </configuration>
                </execution>
            </executions>
        </plugin>
    </plugins>
</build>
</project>

提交flink集群

#生成对应的任务
./flink run -m 192.168.1.161:8081 -c com.qyt.StreamWordCount /root/soft/flink-demo-1.0-SNAPSHOT.jar
# 恢复上一次保存点,bc5fae2e282247486003ed259f2f37a7为jobID
./flink run -s hdfs://hadoop01:9000/flink/bc5fae2e282247486003ed259f2f37a7/chk-33 -m 192.168.1.161:8081 -c com.qyt.StreamWordCount /root/soft/flink-demo-1.0-SNAPSHOT.jar

查看对应的jobId
在这里插入图片描述


http://www.niftyadmin.cn/n/5681720.html

相关文章

《 Spring Boot实战:优雅构建责任链模式投诉处理业务》

Spring Boot实战&#xff1a;优雅构建责任链模式投诉处理业务 文章目录 Spring Boot实战&#xff1a;优雅构建责任链模式投诉处理业务场景描述&#xff1a;代码实现Step 1: 定义投诉请求类Step 2: 定义抽象处理者类Step 3: 实现具体的处理者类Step 4: 创建投诉处理链Step 5: 创…

基于Hive和Hadoop的哔哩哔哩网站分析系统

本项目是一个基于大数据技术的哔哩哔哩平台分析系统&#xff0c;旨在为用户提供全面的哔哩哔哩视频数据和深入的用户行为分析。系统采用 Hadoop 平台进行大规模数据存储和处理&#xff0c;利用 MapReduce 进行数据分析和处理&#xff0c;通过 Sqoop 实现数据的导入导出&#xf…

如何评估婚恋交友小程序的投资回报率

在这个数字化的时代&#xff0c;越来越多的人选择通过手机应用程序寻找自己的另一半。随着婚恋交友小程序的兴起&#xff0c;编辑h17711347205如何评估这类产品的投资回报率&#xff08;ROI&#xff09;成为了投资者和运营者关注的重点。本文将探讨如何有效地评估婚恋交友小程序…

如何构建鲁棒高性能 Prompt 的方法?

你好&#xff0c;我是三桥君 在当今时代&#xff0c;利用大型语言模型如ChatGPT进行文本生成和交互已成为一种趋势。然而&#xff0c;要充分发挥这些模型的能力&#xff0c;尤其是在生产环境中&#xff0c;我们需要精心设计和优化我们的提示词&#xff08;prompt&#xff09;。…

【C语言】动态内存管理:malloc、calloc、realloc、free

本篇介绍一下C语言中的malloc/calloc/realloc。 使用这些函数需要包含头文件<stdlib.h>。malloc/calloc/realloc申请的空间都是 堆区的。 1.malloc和free 1.1 malloc C语言提供了一个动态内存开辟的函数malloc&#xff0c;函数原型如下。 void* malloc(size_t size);…

汽车总线之---- LIN总线

Introduction LIN总线的简介&#xff0c;对于传统的这种点对点的连接方式&#xff0c;我们可以看到ECU相关的传感器和执行器是直接连接到ECU的&#xff0c;当传感器和执行器的数量较少时&#xff0c;这样的连接方式是能满足要求的&#xff0c;但是随着汽车电控功能数量的不断增…

代码为笔,合作作墨,共绘共赢画卷———未来之窗行业应用跨平台架构

合作共赢&#xff0c;代码同创&#xff0c;成就非凡 一、资源整合方面 1.1. 技术资源共享 - 不同的合作伙伴可能在技术领域各有所长。例如&#xff0c;一方可能擅长前端用户界面设计&#xff0c;具有丰富的交互设计经验&#xff0c;能够打造出美观、易用的预订界面&#xff…

linux 目录文件夹操作

目录 移动绝对目录&#xff1a; 移动相对目录 test.py报错&#xff1a; 移动绝对目录&#xff1a; mv -f /lpai/log /data data目录下会有一个目录log 移动相对目录 mv ./segmentation_results /lpai/volumes/ad-op-ga/code/ap_pose/AG-Pose-main/data test.py报错&#…