package auto;
import java.io.BufferedWriter;
import java.io.File;
import java.io.FileOutputStream;
import java.io.OutputStreamWriter;
import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.ResultSet;
import java.sql.Statement;
public class CreateTable {
public static void main(String[] args) {
String tables = "t_abnormal";
//任务人
String suf = "_zyy"; //_zyy, _lqy
String[] strings = tables.split(",");
StringBuffer stringBuffer = new StringBuffer();
for (String table : strings) {
table = table.trim().toLowerCase();
findResult(table, suf);
stringBuffer.append(table).append(",");
}
System.out.println(stringBuffer.delete(stringBuffer.length()-1, stringBuffer.length()).toString());
}
/**
* 生成table抽取数据配置文件
* @param tableName
* @param suf
*/
@SuppressWarnings("all")
public static void findResult(String tableName, String suf) {
String driver = "com.mysql.cj.jdbc.Driver";
String url = "jdbc:mysql://143.146.5.115:3306/db_DF199001";
String userName = "data_user";
String password = "zhjc@123";
String executeSql = "desc db_DF199001.".concat(tableName);
String hiveTablePre = "jck_qxb";
try {
Class.forName(driver);
Connection connection = DriverManager.getConnection(url, userName, password);
Statement statement = connection.createStatement();
System.out.println("executeSql:\t"+executeSql);
ResultSet resultSet = statement.executeQuery(executeSql);
//hive 表名
String odsTableName = hiveTablePre.concat(tableName);
String outputDirectory = "output_directory=/user/hive/warehouse/ods.db/".concat(odsTableName).concat("_delta");
//hive建表
StringBuffer hiveSqlStr = new StringBuffer();
StringBuffer hiveDeltaSqlStr = new StringBuffer();
StringBuffer columnStr = new StringBuffer();
StringBuffer hiveColumnStr = new StringBuffer();
hiveSqlStr.append("create table if not exists ods.").append(odsTableName+"(");
hiveDeltaSqlStr.append("create table if not exists ods.").append(odsTableName+"_delta(");
//列数
int columnNum = 0;
//遍历字段及类型
while(resultSet.next()){
String columnName = resultSet.getString("Field").toLowerCase();
String typeName = findColumnType(resultSet.getString("Type"));
String columnName_export = columnName;
String columnName_hive = columnName;
if (typeName.equalsIgnoreCase("string")) {
columnName_export = "replace(replace("+columnName+",char(10),'_zxsxli_'),char(13),'__zxsxli__') as "+columnName;
}
if (columnName_hive.equalsIgnoreCase("date") || columnName_hive.equalsIgnoreCase("time") ||
columnName_hive.equalsIgnoreCase("number") || columnName_hive.startsWith("_")){
columnName_hive = "`".concat(columnName_hive).concat("`");
}
columnStr.append(columnName_export).append(", ");
hiveColumnStr.append(columnName_hive).append(", ");
hiveSqlStr.append(" ").append(columnName_hive).append(" ").append(typeName).append(", ");
hiveDeltaSqlStr.append(" ").append(columnName_hive).append(" ").append(typeName).append(", ");
columnNum ++;
System.out.println(columnNum+" columnName: "+columnName+" columnName_export: "+columnName_export+" columnName_hive: "+columnName_hive+" typeName: "+typeName);
}
columnStr.delete(columnStr.length()-2, columnStr.length());
hiveColumnStr.delete(hiveColumnStr.length()-2, hiveColumnStr.length());
hiveSqlStr.delete(hiveSqlStr.length()-2, hiveSqlStr.length());
hiveDeltaSqlStr.delete(hiveDeltaSqlStr.length()-2, hiveDeltaSqlStr.length());
hiveSqlStr.append(")").append(" ROW FORMAT DELIMITED FIELDS TERMINATED BY '\\t' STORED AS ORCFILE;");
hiveDeltaSqlStr.append(")").append(" ROW FORMAT DELIMITED FIELDS TERMINATED BY '\\t' STORED AS TEXTFILE");
String filePath = "E:\\tmp\\qxb\\"+tableName.concat(suf);
//配置文件
File fileBase = new File(filePath);
File file = new File(filePath + "\\create_sql_mysql.sql");
File file2 = new File(filePath + "\\mysql_test.ini");
File file3 = new File(filePath + "\\insert_truncate_sql_mysql.sql");
String create_sql_path = "/opt/pa_zd_test/shell/global_template/mysql/qxb/" + tableName.concat(suf) + "/create_sql_mysql.sql";
String insert_truncate_sql_path = "/opt/pa_zd_test/shell/global_template/mysql/qxb/" + tableName.concat(suf) + "/insert_truncate_sql_mysql.sql";
String source_type = "mysql_qxb_".concat(tableName).concat(suf);
String insert_overwrite_sql = "insert into table ods.".concat(odsTableName).concat(" select ").concat(hiveColumnStr.toString()).concat(" from ods.").concat(odsTableName).concat("_delta;");
String truncate_sql = "truncate table ods.".concat(odsTableName).concat("_delta");
String key_list = "[Class]\ntables=" + tableName + "\ncreate_sql_path=" + create_sql_path + "\ninsert_truncate_sql_path=" + insert_truncate_sql_path + "\nfiles=profile\n\n"
.concat("[".concat(tableName).concat("]\n")).concat("key_list=source_type###export_sql###output_directory\nsource_type=".concat(source_type).concat("\nexport_sql=select ").concat(tableName).concat(" where ${CONDITIONS}"));
System.out.println("[".concat(tableName).concat("]"));
System.out.println(key_list);
System.out.println(outputDirectory);
System.out.println(hiveSqlStr.toString().replaceAll("\n", " "));
System.out.println(hiveDeltaSqlStr.toString().replaceAll("\n", " "));
System.out.println(insert_overwrite_sql);
System.out.println(truncate_sql);
try{
if (!fileBase.exists() && !file.isDirectory()) {
fileBase.mkdir();
}
if (!file.exists()) {
file.createNewFile();
}
if (!file2.exists()) {
file2.createNewFile();
}
if (!file3.exists()) {
file3.createNewFile();
}
BufferedWriter bw = new BufferedWriter(new OutputStreamWriter(new FileOutputStream(file, true)));
bw.write(hiveSqlStr.toString().replaceAll("\n", " "));
bw.write("\n");
bw.write(hiveDeltaSqlStr.toString().replaceAll("\n", " "));
bw.write("\n\n\n");
bw.close();
BufferedWriter bw2 = new BufferedWriter(new OutputStreamWriter(new FileOutputStream(file2, true)));
bw2.write(key_list);
bw2.write("\n");
bw2.write(outputDirectory);
bw2.write("\n\n\n");
bw2.close();
BufferedWriter bw3 = new BufferedWriter(new OutputStreamWriter(new FileOutputStream(file3, true)));
bw3.write(insert_overwrite_sql);
bw3.write("\n");
bw3.write(truncate_sql);
bw3.write("\n\n\n");
bw3.close();
} catch (Exception e) {
e.printStackTrace();
}
}catch (Exception e){
e.printStackTrace();
}
评论0