flink 从mysql 读取数据 放入kafka中 用于搜索全量

接着上一篇,将mysql的数据导入kafka中

public static void main(String[] arg) throws Exception { TypeInformation[] fieldTypes = new TypeInformation[] { BasicTypeInfo.STRING_TYPE_INFO }; RowTypeInfo rowTypeInfo = new RowTypeInfo(fieldTypes); JDBCInputFormat jdbcInputFormat = JDBCInputFormat.buildJDBCInputFormat().setDrivername("com.mysql.jdbc.Driver") .setDBUrl("jdbc:mysql://*:3306/tablename?characterEncoding=utf8") .setUsername("*").setPassword("*") .setQuery("select LOGIC_CODE from *").setRowTypeInfo(rowTypeInfo).finish(); final ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment(); DataSource<Row> s = env.createInput(jdbcInputFormat); BatchTableEnvironment tableEnv = new BatchTableEnvironment(env, TableConfig.DEFAULT()); tableEnv.registerDataSet("t2", s); Table query = tableEnv.sqlQuery("select * from t2"); DataSet<String> result = tableEnv.toDataSet(query, Row.class).map(new MapFunction<Row, String>(){ @Override public String map(Row value) throws Exception { return value.toString() ; } }); logger.info("read db end"); KafkaOutputFormat kafkaOutput = KafkaOutputFormat.buildKafkaOutputFormat() .setBootstrapServers("*:9092").setTopic("search_test_whk").setAcks("all").setBatchSize("1000") .setBufferMemory("100000").setLingerMs("1").setRetries("2").finish(); result.output(kafkaOutput); logger.info("write kafka end"); env.execute("Flink add data source"); }

相关文章