标签:Flink org flink api sink file new apache import
package test; import bean.Stu; import org.apache.flink.api.common.serialization.SimpleStringEncoder; import org.apache.flink.core.fs.Path; import org.apache.flink.streaming.api.datastream.DataStreamSource; import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment; import org.apache.flink.streaming.api.functions.sink.filesystem.StreamingFileSink; import org.apache.flink.streaming.api.functions.sink.filesystem.rollingpolicies.DefaultRollingPolicy; import org.apache.flink.streaming.api.functions.source.SourceFunction;import java.util.ArrayList; import java.util.Arrays; import java.util.List; import java.util.Random; import java.util.concurrent.TimeUnit; public class SinkToFile { public static void main(String[] args) { StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(); env.setParallelism(1); DataStreamSource<Stu> source = env.addSource(new SourceFunction<Stu>() { private boolean running = true; @Override public void run(SourceContext<Stu> sourceContext) throws Exception { while (running) { for (int i = 0; i < 10; i++) { ArrayList<String> subs = new ArrayList<String>(Arrays.asList("语文", "数学", "英语", "化学", "物理", "生物")); List<String> names = Arrays.asList("张三", "李四", "王五", "赵六","田七"); int next = new Random().nextInt(15); int random = new Random().nextInt(101); sourceContext.collect(new Stu(names.get(next * random % 5), subs.get(next * random % 6), random)); Thread.sleep(1000); } Thread.sleep(20000); } } @Override public void cancel() { running = false; } }); //fileSink final StreamingFileSink<String> fileSink = StreamingFileSink .forRowFormat(new Path("./note"), new SimpleStringEncoder<String>("UTF-8")) .withRollingPolicy( DefaultRollingPolicy.builder() //每 15min 产生新日志文件 .withRolloverInterval(TimeUnit.MINUTES.toMillis(15)) //每断开 5min 产生新日志文件 .withInactivityInterval(TimeUnit.MINUTES.toMillis(5)) //每 1G 产生新日志文件 .withMaxPartSize(1024 * 1024 * 1024) .build()) .build(); //存入 source.map(d->d.toString()).addSink(fileSink); try { env.execute(); } catch (Exception e) { e.printStackTrace(); } } }
标签:Flink,org,flink,api,sink,file,new,apache,import 来源: https://www.cnblogs.com/chang09/p/16435685.html
本站声明: 1. iCode9 技术分享网(下文简称本站)提供的所有内容,仅供技术学习、探讨和分享; 2. 关于本站的所有留言、评论、转载及引用,纯属内容发起人的个人观点,与本站观点和立场无关; 3. 关于本站的所有言论和文字,纯属内容发起人的个人观点,与本站观点和立场无关; 4. 本站文章均是网友提供,不完全保证技术分享内容的完整性、准确性、时效性、风险性和版权归属;如您发现该文章侵犯了您的权益,可联系我们第一时间进行删除; 5. 本站为非盈利性的个人网站,所有内容不会用来进行牟利,也不会利用任何形式的广告来间接获益,纯粹是为了广大技术爱好者提供技术内容和技术思想的分享性交流网站。