标签:回写 object JSONObject flink kafka import apache org
package Consumer; import com.alibaba.fastjson.JSONObject; import org.apache.flink.api.common.functions.FilterFunction; import org.apache.flink.api.common.functions.MapFunction; import org.apache.flink.api.common.serialization.SimpleStringSchema; import org.apache.flink.streaming.api.datastream.DataStream; import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator; import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment; import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer; import org.apache.flink.streaming.connectors.kafka.FlinkKafkaProducer; import java.util.Properties; public class KafkaConsumer { public static void main(String[] args) throws Exception { // flink环境 StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(); Properties properties = new Properties(); properties.setProperty("bootstrap.servers", "192.168.186.174:9092"); properties.setProperty("group.id", "test"); // source kafka消费者 FlinkKafkaConsumer<String> consumer = new FlinkKafkaConsumer<>("example", new SimpleStringSchema(), properties); consumer.setStartFromEarliest();//从初始值开始 DataStream<String> stream = env.addSource(consumer); stream.print(); // 数据处理 /*DataStream<String> filterStream = stream.filter((FilterFunction<String>) s -> { JSONObject object = JSONObject.parseObject(s); System.out.println(object.getString("type")); if("ALTER".equalsIgnoreCase(object.getString("type"))){ return false; } return true; }); SingleOutputStreamOperator mapStream = filterStream.map((MapFunction<String, Object>) s -> { JSONObject object = JSONObject.parseObject(s); return object.getJSONArray("data"); });*/ SingleOutputStreamOperator mapStream = stream.filter((FilterFunction<String>) s -> { JSONObject object = JSONObject.parseObject(s); System.out.println(object.getString("type")); if("ALTER".equalsIgnoreCase(object.getString("type"))){ return false; } return true; }).map((MapFunction<String, Object>) s -> { JSONObject object = JSONObject.parseObject(s); return object.getJSONArray("data").toJSONString(); }); // sink kafka生产者 FlinkKafkaProducer<String> producer = new FlinkKafkaProducer<>("sink",new SimpleStringSchema(), properties); mapStream.addSink(producer); mapStream.print(); env.execute(); } }
<dependencies> <!--flink-kafka连接器--> <dependency> <groupId>org.apache.flink</groupId> <artifactId>flink-connector-kafka_2.11</artifactId> <version>1.14.3</version> </dependency> <!--flink--> <dependency> <groupId>org.apache.flink</groupId> <artifactId>flink-java</artifactId> <version>1.14.3</version> </dependency> <dependency> <groupId>org.apache.flink</groupId> <artifactId>flink-streaming-java_2.11</artifactId> <version>1.14.3</version> </dependency> <dependency> <groupId>org.apache.flink</groupId> <artifactId>flink-clients_2.11</artifactId> <version>1.14.3</version> </dependency> <!--avro--> <dependency> <groupId>org.apache.flink</groupId> <artifactId>flink-avro</artifactId> <version>1.14.3</version> </dependency> <!--fastjson--> <dependency> <groupId>com.alibaba</groupId> <artifactId>fastjson</artifactId> <version>1.2.59</version> </dependency> </dependencies>
标签:回写,object,JSONObject,flink,kafka,import,apache,org 来源: https://www.cnblogs.com/i-tao/p/15932914.html
本站声明: 1. iCode9 技术分享网(下文简称本站)提供的所有内容,仅供技术学习、探讨和分享; 2. 关于本站的所有留言、评论、转载及引用,纯属内容发起人的个人观点,与本站观点和立场无关; 3. 关于本站的所有言论和文字,纯属内容发起人的个人观点,与本站观点和立场无关; 4. 本站文章均是网友提供,不完全保证技术分享内容的完整性、准确性、时效性、风险性和版权归属;如您发现该文章侵犯了您的权益,可联系我们第一时间进行删除; 5. 本站为非盈利性的个人网站,所有内容不会用来进行牟利,也不会利用任何形式的广告来间接获益,纯粹是为了广大技术爱好者提供技术内容和技术思想的分享性交流网站。