Browse Source

update

main
wangshaoping 12 months ago
parent
commit
f7e7c3efb1
  1. 0
      app.platform/x.txt
  2. 14
      io.sc.engine.mv.frontend/src/views/result/Executor.vue
  3. 2
      io.sc.platform.core.frontend/src/platform/i18n/messages.json
  4. 2
      io.sc.platform.core.frontend/src/platform/i18n/messages_tw_CN.json
  5. 2
      io.sc.platform.core.frontend/src/platform/i18n/messages_zh_CN.json
  6. 1
      io.sc.platform.core/src/main/java/io/sc/platform/core/support/ProgressInfo.java
  7. 8
      io.sc.platform.core/src/main/java/io/sc/platform/core/util/ClassUtil.java
  8. 14
      io.sc.platform.core/src/main/java/io/sc/platform/core/util/FileUtil.java
  9. 58
      io.sc.platform.core/src/main/java/io/sc/platform/core/util/ZipUtil.java
  10. 2
      io.sc.platform.csv/src/main/java/io/sc/platform/csv/exporter/CsvExporter.java
  11. 2
      io.sc.platform.developer.frontend/src/components/index.ts
  12. 6
      io.sc.platform.developer.frontend/src/i18n/messages.json
  13. 5
      io.sc.platform.developer.frontend/src/i18n/messages_tw_CN.json
  14. 5
      io.sc.platform.developer.frontend/src/i18n/messages_zh_CN.json
  15. 4
      io.sc.platform.developer.frontend/src/menus/menus.json
  16. 13
      io.sc.platform.developer.frontend/src/routes/routes.json
  17. 6
      io.sc.platform.developer.frontend/src/views/backend/ExportLiquibase.vue
  18. 106
      io.sc.platform.developer.frontend/src/views/backend/ImportLiquibase.vue
  19. 5
      io.sc.platform.developer.frontend/src/views/backend/WProgressBtn.vue
  20. 118
      io.sc.platform.jdbc.liquibase/src/main/java/io/sc/platform/jdbc/liquibase/exporter/LiquibaseDataCsvExporter.java
  21. 242
      io.sc.platform.jdbc.liquibase/src/main/java/io/sc/platform/jdbc/liquibase/exporter/LiquibaseDataExporter.java
  22. 55
      io.sc.platform.jdbc.liquibase/src/main/java/io/sc/platform/jdbc/liquibase/importer/LiquibaseDataCsvImporter.java
  23. 74
      io.sc.platform.jdbc.liquibase/src/main/java/io/sc/platform/jdbc/liquibase/task/CsvImportTaskChange.java
  24. 13
      io.sc.platform.jdbc.liquibase/src/main/java/io/sc/platform/jdbc/liquibase/task/new.cfg.json
  25. 4
      io.sc.platform.jdbc.liquibase/src/main/java/io/sc/platform/jdbc/liquibase/task/new.csv
  26. 1
      io.sc.platform.jdbc.liquibase/src/main/resources/META-INF/services/io.sc.platform.jdbc.importer.DataImporter
  27. 23
      io.sc.platform.jdbc.liquibase/src/test/java/io/sc/platform/jdbc/liquibase/task/CsvImportTaskChangeTest.java
  28. 30005
      io.sc.platform.jdbc.liquibase/src/test/resources/mv_score_record.csv
  29. 90
      io.sc.platform.jdbc/src/main/java/io/sc/platform/jdbc/controller/JdbcDataWebController.java
  30. 43
      io.sc.platform.jdbc/src/main/java/io/sc/platform/jdbc/controller/JdbcExportWebController.java
  31. 18
      io.sc.platform.jdbc/src/main/java/io/sc/platform/jdbc/controller/support/ExporterThread.java
  32. 44
      io.sc.platform.jdbc/src/main/java/io/sc/platform/jdbc/controller/support/ImporterThread.java
  33. 2
      io.sc.platform.jdbc/src/main/java/io/sc/platform/jdbc/exporter/DataExporter.java
  34. 51
      io.sc.platform.jdbc/src/main/java/io/sc/platform/jdbc/exporter/support/DataImportConfigure.java
  35. 31
      io.sc.platform.jdbc/src/main/java/io/sc/platform/jdbc/importer/DataImporter.java
  36. 14
      io.sc.platform.jdbc/src/main/java/io/sc/platform/jdbc/service/JdbcDataService.java
  37. 10
      io.sc.platform.jdbc/src/main/java/io/sc/platform/jdbc/service/JdbcExportService.java
  38. 54
      io.sc.platform.jdbc/src/main/java/io/sc/platform/jdbc/service/impl/JdbcDataServiceImpl.java
  39. 43
      io.sc.platform.jdbc/src/main/java/io/sc/platform/jdbc/service/impl/JdbcExportServiceImpl.java
  40. 22
      io.sc.platform.mvc/src/main/java/io/sc/platform/mvc/service/impl/FrontendExportServiceImpl.java
  41. 13
      io.sc.platform.mvc/src/main/resources/META-INF/platform/plugins/application-properties.json

0
app.platform/x.txt

14
io.sc.engine.mv.frontend/src/views/result/Executor.vue

@ -14,14 +14,14 @@
label: $t('io.sc.engine.mv.executorDialog.form.entity.binomialSignificanceLevel'),
type: 'select',
options: binomialSignificanceLevelOptionsRef,
defaultValue: binomialSignificanceLevelOptionsRef[0],
defaultValue: 0.01,
},
{
name: 'chiSquareSignificanceLevel',
label: $t('io.sc.engine.mv.executorDialog.form.entity.chiSquareSignificanceLevel'),
type: 'select',
options: chiSquareSignificanceLevelOptionsRef,
defaultValue: chiSquareSignificanceLevelOptionsRef[0],
defaultValue: 0.01,
},
]"
>
@ -48,10 +48,16 @@ const open = () => {
dialogRef.value.show();
nextTick(() => {
axios.get(Environment.apiContextPath('/api/mv/configure/binomial/significanceLevels')).then((response) => {
binomialSignificanceLevelOptionsRef.value = response.data;
binomialSignificanceLevelOptionsRef.value.splice(0, binomialSignificanceLevelOptionsRef.value.length);
for (let item of response.data) {
binomialSignificanceLevelOptionsRef.value.push({ label: item, value: item });
}
});
axios.get(Environment.apiContextPath('/api/mv/configure/chiSquare/significanceLevels')).then((response) => {
chiSquareSignificanceLevelOptionsRef.value = response.data;
chiSquareSignificanceLevelOptionsRef.value.splice(0, chiSquareSignificanceLevelOptionsRef.value.length);
for (let item of response.data) {
chiSquareSignificanceLevelOptionsRef.value.push({ label: item, value: item });
}
});
});
};

2
io.sc.platform.core.frontend/src/platform/i18n/messages.json

@ -48,6 +48,8 @@
"upToTop": "Up to Top",
"passwordAndConfirmPasswordMustEqual": "Confrim Password and Password must equals",
"progress.tip": "Running: ",
"file.single.tip": "Please Select a File",
"file.multiple.tip": "Please Select Files",
"cron.second.per": "Per Second",
"cron.second.period.1": "From",

2
io.sc.platform.core.frontend/src/platform/i18n/messages_tw_CN.json

@ -48,6 +48,8 @@
"upToTop": "回到頂部",
"passwordAndConfirmPasswordMustEqual": "確認密碼和密碼必須一致",
"progress.tip": "正在執行: ",
"file.single.tip": "請選擇一個文件",
"file.multiple.tip": "請選擇一個或多個文件",
"cron.second.per": "每秒",
"cron.second.period.1": "從",

2
io.sc.platform.core.frontend/src/platform/i18n/messages_zh_CN.json

@ -48,6 +48,8 @@
"upToTop": "回到顶部",
"passwordAndConfirmPasswordMustEqual": "确认密码和密码必须一致",
"progress.tip": "正在执行: ",
"file.single.tip": "请选择一个文件",
"file.multiple.tip": "请选择一个或多个文件",
"cron.second.per": "每秒",
"cron.second.period.1": "从",

1
io.sc.platform.core/src/main/java/io/sc/platform/core/support/ProgressInfo.java

@ -31,7 +31,6 @@ public class ProgressInfo {
public ProgressInfo(){}
public ProgressInfo(int totalWeight,int currentWeight){
System.out.println(">>>>>>>>>>>>>>>>>");
this.totalWeight =new AtomicInteger(totalWeight);
this.currentWeight =new AtomicInteger(currentWeight);
}

8
io.sc.platform.core/src/main/java/io/sc/platform/core/util/ClassUtil.java

@ -74,8 +74,14 @@ public class ClassUtil {
return Date.class;
}else if(Date[].class.getName().equals(className)){ //Date
return Date[].class;
}else if(Timestamp.class.getName().equals(className)){ // Timestamp
}else if(Timestamp.class.getName().equals(className)){ // Timestamp
return Timestamp.class;
}else if(Timestamp[].class.getName().equals(className)){ // Timestamp[]
return Timestamp[].class;
}else if(java.sql.Date.class.getName().equals(className)){ // java.sql.Date
return java.sql.Date.class;
}else if(java.sql.Date[].class.getName().equals(className)){ // java.sql.Date[]
return java.sql.Date[].class;
}else{
throw new RuntimeException("can NOT transform " + className + " to Class");
}

14
io.sc.platform.core/src/main/java/io/sc/platform/core/util/FileUtil.java

@ -427,15 +427,17 @@ public class FileUtil {
*/
public static void deldirs(File dir) throws IOException {
ParameterChecker.notNull(dir);
if(dir.isDirectory()){
File[] files =dir.listFiles();
if(files!=null && files.length>0){
for(File file : files){
deldirs(file);
if(dir.exists()) {
if (dir.isDirectory()) {
File[] files = dir.listFiles();
if (files != null && files.length > 0) {
for (File file : files) {
deldirs(file);
}
}
}
Files.delete(dir.toPath());
}
Files.delete(dir.toPath());
}
/**

58
io.sc.platform.core/src/main/java/io/sc/platform/core/util/ZipUtil.java

@ -0,0 +1,58 @@
package io.sc.platform.core.util;
import net.lingala.zip4j.ZipFile;
import net.lingala.zip4j.exception.ZipException;
import java.io.File;
import java.io.IOException;
public class ZipUtil {
public static void zip(String targetDirPath) throws IOException {
zip(targetDirPath,targetDirPath + ".zip",false);
}
public static void zip(String targetDirPath,boolean delete) throws IOException {
zip(targetDirPath,targetDirPath + ".zip",delete);
}
public static void zip(String targetDirPath,String targetZipFilePath) throws IOException {
zip(targetDirPath,targetZipFilePath,false);
}
public static void zip(String targetDirPath,String targetZipFilePath,boolean delete) throws IOException {
File targetDirFile =new File(targetDirPath);
if(targetDirFile.exists() && targetDirFile.isDirectory()) {
File targetZipFile =new File(targetZipFilePath);
if(targetZipFile.exists() && targetZipFile.isFile()){
targetZipFile.delete();
}
File targetZipFileParent =targetZipFile.getParentFile();
if(!targetZipFileParent.exists() || !targetZipFileParent.isDirectory()){
targetZipFileParent.mkdirs();
}
ZipFile zipFile = new ZipFile(targetZipFilePath);
if (targetDirFile.exists() && targetDirFile.isDirectory()) {
File[] fs = targetDirFile.listFiles();
if (fs != null && fs.length > 0) {
for (File f : fs) {
if (f.isDirectory()) {
zipFile.addFolder(f);
} else {
zipFile.addFile(f);
}
}
}
}
zipFile.close();
if (delete) {
FileUtil.deldirs(targetDirFile);
}
}else{
throw new IOException(targetDirPath + " NOT exists or is NOT a directory");
}
}
public static void main(String[] args) throws IOException {
ZipUtil.zip("/Users/wangshaoping/wspsc/workspace/wangshaoping/v8/platform/app.platform/work/web/export/liquibase/platform","/Users/wangshaoping/wspsc/workspace/wangshaoping/v8/platform/app.platform/work/web/export/liquibase2/platform.zip");
}
}

2
io.sc.platform.csv/src/main/java/io/sc/platform/csv/exporter/CsvExporter.java

@ -9,7 +9,7 @@ import java.util.Locale;
public class CsvExporter implements DataExporter {
@Override
public void export(DataSource dataSource, DataExportConfigure configure, ProgressInfo progressInfo, Locale locale) {
public void exportData(DataSource dataSource, DataExportConfigure configure, ProgressInfo progressInfo, Locale locale) {
}
}

2
io.sc.platform.developer.frontend/src/components/index.ts

@ -24,6 +24,7 @@ import Security from '@/views/plugin/Security.vue';
import Ws from '@/views/plugin/Ws.vue';
import Swagger from '@/views/plugin/Swagger.vue';
import SystemProperties from '@/views/plugin/SystemProperties.vue';
import importLiquibase from '@/views/backend/ImportLiquibase.vue';
import exportLiquibase from '@/views/backend/ExportLiquibase.vue';
import Icons from '@/views/frontend/Icons.vue';
@ -50,6 +51,7 @@ const localComponents = {
'component.developer.plugin.Ws': Ws,
'component.developer.plugin.Swagger': Swagger,
'component.developer.plugin.SystemProperties': SystemProperties,
'component.developer.backend.importLiquibase': importLiquibase,
'component.developer.backend.exportLiquibase': exportLiquibase,
'component.developer.frontend.Icons': Icons,
};

6
io.sc.platform.developer.frontend/src/i18n/messages.json

@ -27,14 +27,16 @@
"menu.developer.plugin.ws" : "WebService",
"menu.developer.backend" : "Back End Tools",
"menu.developer.backend.export.liquibase" : "Data Export(liquibase)",
"menu.developer.backend.import.liquibase" : "Data Import",
"menu.developer.backend.export.liquibase" : "Data Export",
"menu.developer.frontend" : "Front End Tools",
"menu.developer.frontend.icons" : "Icons",
"developer.backend.import.liquibase.deleteFirst" : "Delete data before import?",
"developer.backend.export.liquibase.datasource" : "Datasource",
"developer.backend.export.liquibase.schema" : "Schema",
"developer.backend.export.liquibase.tables" : "Tables",
"developer.backend.export.liquibase.export.tip" : "Are you sure to export?"
}

5
io.sc.platform.developer.frontend/src/i18n/messages_tw_CN.json

@ -27,11 +27,14 @@
"menu.developer.plugin.ws" : "Web 服務",
"menu.developer.backend" : "後端工具",
"menu.developer.backend.export.liquibase" : "數據導出(liquibase)",
"menu.developer.backend.import.liquibase" : "數據導入",
"menu.developer.backend.export.liquibase" : "數據導出",
"menu.developer.frontend" : "前端工具",
"menu.developer.frontend.icons" : "圖標庫",
"developer.backend.import.liquibase.deleteFirst" : "導入數據前首先刪除原數據?",
"developer.backend.export.liquibase.datasource" : "數據源",
"developer.backend.export.liquibase.schema" : "方案",
"developer.backend.export.liquibase.tables" : "表",

5
io.sc.platform.developer.frontend/src/i18n/messages_zh_CN.json

@ -27,11 +27,14 @@
"menu.developer.plugin.ws" : "Web 服务",
"menu.developer.backend" : "后端工具",
"menu.developer.backend.export.liquibase" : "数据导出(liquibase)",
"menu.developer.backend.import.liquibase" : "数据导入",
"menu.developer.backend.export.liquibase" : "数据导出",
"menu.developer.frontend" : "前端工具",
"menu.developer.frontend.icons" : "图标库",
"developer.backend.import.liquibase.deleteFirst" : "导入数据前首先删除原数据?",
"developer.backend.export.liquibase.datasource" : "数据源",
"developer.backend.export.liquibase.schema" : "方案",
"developer.backend.export.liquibase.tables" : "表",

4
io.sc.platform.developer.frontend/src/menus/menus.json

@ -71,8 +71,10 @@
/*/*/
{"type":"GROUP", "order":500, "parentId":"menu.developer", "id":"menu.developer.backend", "titleI18nKey":"menu.developer.backend", "icon":"bi-server"},
/*//*/
{"type":"ROUTE", "order":100, "parentId":"menu.developer.backend", "id":"menu.developer.backend.import.liquibase", "titleI18nKey":"menu.developer.backend.import.liquibase", "icon":"bi-database-up", "routeName":"route.developer.backend.import.liquibase"},
/*//*/
{"type":"ROUTE", "order":100, "parentId":"menu.developer.backend", "id":"menu.developer.backend.export.liquibase", "titleI18nKey":"menu.developer.backend.export.liquibase", "icon":"bi-database-down", "routeName":"route.developer.backend.export.liquibase"},
{"type":"ROUTE", "order":200, "parentId":"menu.developer.backend", "id":"menu.developer.backend.export.liquibase", "titleI18nKey":"menu.developer.backend.export.liquibase", "icon":"bi-database-down", "routeName":"route.developer.backend.export.liquibase"},
/*/*/
{"type":"GROUP", "order":600, "parentId":"menu.developer", "id":"menu.developer.frontend", "titleI18nKey":"menu.developer.frontend", "icon":"bi-layout-text-window"},

13
io.sc.platform.developer.frontend/src/routes/routes.json

@ -329,7 +329,18 @@
]
}
},
{
"name": "route.developer.backend.import.liquibase",
"path": "developer/backend/import/liquibase",
"parent": "/",
"priority": 0,
"module": "io.sc.platform.developer.frontend",
"component": "component.developer.backend.importLiquibase",
"componentPath": "@/views/backend/ImportLiquibase.vue",
"redirect": null,
"meta": {
}
},
{
"name": "route.developer.backend.export.liquibase",
"path": "developer/backend/export/liquibase",

6
io.sc.platform.developer.frontend/src/views/backend/ExportLiquibase.vue

@ -28,6 +28,7 @@
label: $t('developer.backend.export.liquibase.tables'),
type: 'select',
multiple: true,
clearable: true,
options: tablesOptionsRef,
'onUpdate:modelValue': (value) => {
if (value) {
@ -58,7 +59,7 @@
ref="progressBtnRef"
icon="bi-database-down"
:label="$t('export')"
data-url="/api/jdbc/export/traceExecuteProgress"
data-url="/api/jdbc/data/traceExporterExecuteProgress"
@click="exportData"
></WProgressBtn>
</div>
@ -118,7 +119,6 @@ const schemaChanged = (datasource: string, schema: string) => {
};
const exportData = (e) => {
console.log(e);
formRef.value.validate().then((value) => {
if (value) {
DialogManager.confirm(t('developer.backend.export.liquibase.export.tip'), () => {
@ -133,7 +133,7 @@ const exportData = (e) => {
for (let i = 0; i < length; i++) {
config.tables[i] = { name: data.tables[i], sql: sql ? sql : 'select * from ' + data.tables[i] };
}
config.datasource = axios.post(Environment.apiContextPath('/api/jdbc/export/export'), config).then((response) => {
axios.post(Environment.apiContextPath('/api/jdbc/data/exportData'), config).then((response) => {
progressBtnRef.value.start();
});
});

106
io.sc.platform.developer.frontend/src/views/backend/ImportLiquibase.vue

@ -0,0 +1,106 @@
<template>
<q-form action="post">
<div class="row py-1">
<div class="col-3"></div>
<div class="col-6">
<w-select
v-model="formDataRef.datasource"
:label="$t('developer.backend.export.liquibase.datasource')"
:options="datasourceOptionsRef"
style="width: 100%"
></w-select>
</div>
<div class="col-3"></div>
</div>
<div class="row py-1">
<div class="col-3"></div>
<div class="col-6">
<q-file ref="fileRef" v-model="formDataRef.file" :label="$t('file.multiple.tip')" dense outlined clearable counter accept=".csv">
<template #prepend>
<q-icon name="cloud_upload" />
</template>
</q-file>
</div>
<div class="col-3"></div>
</div>
<div class="row py-1">
<div class="col-3"></div>
<div class="col-6">
<w-checkbox v-model="formDataRef.deleteFirst" :label="$t('developer.backend.import.liquibase.deleteFirst')" style="width: 100%"></w-checkbox>
</div>
<div class="col-3"></div>
</div>
<div class="row py-1">
<div class="col-3"></div>
<div class="col-6 row justify-center q-gutter-md py-2">
<WProgressBtn
ref="progressBtnRef"
icon="bi-database-up"
:label="$t('import')"
data-url="/api/jdbc/data/traceImporterExecuteProgress"
@click="importData"
></WProgressBtn>
</div>
<div class="col-3"></div>
</div>
</q-form>
</template>
<script setup lang="ts">
import { ref, reactive, onMounted, onUpdated } from 'vue';
import { useI18n } from 'vue-i18n';
import { axios, Environment, DialogManager } from 'platform-core';
import WProgressBtn from './WProgressBtn.vue';
const { t } = useI18n();
const progressBtnRef = ref();
const datasourceOptionsRef = ref([]);
const formDataRef = reactive({
datasource: undefined,
file: undefined,
deleteFirst: false,
});
const fileRef = ref();
const importData = () => {
DialogManager.confirm(t('developer.backend.import.liquibase.import.tip'), () => {
axios
.post(
Environment.apiContextPath('/api/jdbc/data/importData'),
{
datasource: formDataRef.datasource,
deleteFirst: formDataRef.deleteFirst,
files: fileRef.value.nativeEl.files[0],
},
{
headers: {
'Content-Type': 'multipart/form-data',
},
},
)
.then(() => {
progressBtnRef.value.start();
});
});
};
const loadDatasource = () => {
axios.get(Environment.apiContextPath('/api/system/datasource?pageable=false&sortBy=name')).then((response) => {
const data = response?.data.content;
if (data && data.length > 0) {
datasourceOptionsRef.value.splice(0, datasourceOptionsRef.value.length);
for (let item of data) {
datasourceOptionsRef.value.push({ label: item.name, value: item.name });
}
}
});
};
onMounted(() => {
loadDatasource();
});
onUpdated(() => {
loadDatasource();
});
</script>

5
io.sc.platform.developer.frontend/src/views/backend/WProgressBtn.vue

@ -33,13 +33,14 @@ const refreshProgress = () => {
percentageRef.value = (progressInfo.currentWeight / progressInfo.totalWeight) * 100;
messageRef.value = progressInfo.messageKey;
if (percentageRef.value >= 100) {
stop();
setTimeout(stop, 1000);
}
} else {
stop();
}
})
.catch(() => {
.catch((error) => {
console.log(error);
stop();
});
};

118
io.sc.platform.jdbc.liquibase/src/main/java/io/sc/platform/jdbc/liquibase/exporter/LiquibaseDataCsvExporter.java

@ -2,7 +2,10 @@ package io.sc.platform.jdbc.liquibase.exporter;
import io.sc.platform.core.DirectoryManager;
import io.sc.platform.core.support.ProgressInfo;
import io.sc.platform.core.util.DateUtil;
import io.sc.platform.core.util.FileUtil;
import io.sc.platform.core.util.WriterUtil;
import io.sc.platform.core.util.ZipUtil;
import io.sc.platform.jdbc.exporter.DataExporter;
import io.sc.platform.jdbc.exporter.support.DataExportConfigure;
import io.sc.platform.jdbc.exporter.support.ExportTable;
@ -17,64 +20,79 @@ import liquibase.repackaged.com.opencsv.ICSVWriter;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.jdbc.datasource.DataSourceUtils;
import org.springframework.jdbc.support.MetaDataAccessException;
import javax.sql.DataSource;
import java.io.BufferedWriter;
import java.io.File;
import java.io.IOException;
import java.sql.*;
import java.util.ArrayList;
import java.util.Base64;
import java.util.List;
import java.util.Locale;
import java.util.Date;
import java.util.*;
import java.util.stream.Collectors;
public class LiquibaseDataCsvExporter implements DataExporter {
private static final Logger log = LoggerFactory.getLogger(LiquibaseDataCsvExporter.class);
private static final String OUTPUT_PATH = DirectoryManager.getInstance().getByName("dir.work.web.export");
private static final String OUTPUT_PATH = DirectoryManager.getInstance().getByName("dir.work.web.export") + "/liquibase";
@Override
public void export(DataSource dataSource, DataExportConfigure configure, ProgressInfo progressInfo, Locale locale) throws Exception{
progressInfo.setTotalWeight(configure.getTables().length);
public void exportData(DataSource dataSource, DataExportConfigure configure, ProgressInfo progressInfo, Locale locale) throws Exception{
progressInfo.setRunning(true);
progressInfo.setStartDatetime(new Date());
String[] tableNames =ExportTable.getTableNames(configure.getTables());
progressInfo.setTotalWeight(tableNames.length);
List<Table> tables =MetaDataLoader.newInstance().getTables(dataSource,configure.getSchema(),tableNames);
String outPutDir =OUTPUT_PATH + "/" + configure.getSchema();
FileUtil.deldirs(outPutDir);
String dataDir =outPutDir + "/data";
new File(dataDir).mkdirs();
StringBuilder changeSetSb =new StringBuilder();
for(Table table : tables){
progressInfo.setMessageKey(table.getName());
new File(OUTPUT_PATH + "/" + configure.getSchema()).mkdirs();
BufferedWriter writer = WriterUtil.bufferedWriter(OUTPUT_PATH + "/" + configure.getSchema() + "/" + table.getName() + ".csv");
//ICSVWriter csvWriter = new CSVWriterBuilder(writer).withQuoteChar(CSVWriter.NO_QUOTE_CHARACTER).build();
ICSVWriter csvWriter = new CSVWriterBuilder(writer).build();
writeTable(dataSource,table,csvWriter,configure,progressInfo,locale);
csvWriter.flush();
csvWriter.close();
writeChangeSet(changeSetSb,table);
writeTable(dataSource,table,configure,progressInfo,locale);
progressInfo.addWeight(1);
Thread.sleep(1000);
}
FileUtil.writeString(outPutDir + "/liquibase.xml",createLiquibaseFileContent(getLiquibaseChangeLogXmlFileName(configure),changeSetSb));
ZipUtil.zip(outPutDir);
//执行完毕
progressInfo.setCompletedDatetime(new Date());
progressInfo.setCurrentWeight(progressInfo.getTotalWeight());
}
private void writeTable(DataSource dataSource,Table table,ICSVWriter writer,DataExportConfigure configure,ProgressInfo progressInfo, Locale locale) {
private void writeTable(DataSource dataSource,Table table,DataExportConfigure configure,ProgressInfo progressInfo, Locale locale) throws Exception{
BufferedWriter writer = WriterUtil.bufferedWriter(OUTPUT_PATH + "/" + configure.getSchema() + "/data/" + table.getName() + ".csv");
ICSVWriter csvWriter = new CSVWriterBuilder(writer).build();
//写入字段定义信息===============================================================================
List<Column> columns =table.getColumns();
if(columns==null || columns.size()==0){
throw new RuntimeException("No column found");
}
String[] names =new String[columns.size()];
String[] tableNames =new String[columns.size()];
tableNames[0] =table.getName().toUpperCase();
String[] columnNames =new String[columns.size()];
String[] javaTypes =new String[columns.size()];
String[] sqlTypes =new String[columns.size()];
String[] remarks =new String[columns.size()];
for(int i=0;i<columns.size();i++){
Column column =columns.get(i);
names[i] =column.getName();
columnNames[i] =column.getName();
javaTypes[i] =column.getJavaType().getName();
sqlTypes[i] =column.getSqlType();
remarks[i] =column.getRemarks();
}
writer.writeNext(names);
writer.writeNext(remarks);
writer.writeNext(sqlTypes);
writer.writeNext(javaTypes);
csvWriter.writeNext(tableNames);
csvWriter.writeNext(columnNames);
csvWriter.writeNext(remarks);
csvWriter.writeNext(sqlTypes);
csvWriter.writeNext(javaTypes);
//写入记录集信息==================================================================================
writeResultSet(dataSource,table,writer,configure,progressInfo,locale);
writeResultSet(dataSource,table,csvWriter,configure,progressInfo,locale);
csvWriter.flush();
csvWriter.close();
}
private void writeResultSet(DataSource dataSource,Table table,ICSVWriter writer,DataExportConfigure configure,ProgressInfo progressInfo, Locale locale) {
@ -107,7 +125,7 @@ public class LiquibaseDataCsvExporter implements DataExporter {
if(records!=null && records.size()>0){
List<ParentChildRecord> list =ParentChildRecord.buildTree(records, pkFieldName, fkFieldName);
if(list!=null && list.size()>0){
long count =1;
long count =0;
for(ParentChildRecord record : list){
String[] csv =new String[columns.size()];
for(int i=0;i<columns.size();i++){
@ -116,10 +134,12 @@ public class LiquibaseDataCsvExporter implements DataExporter {
csv[i] =value;
}
writer.writeNext(csv);
if(count%100==0){
if((++count)%1000==0){
writer.flush();
log.info("[" + table.getName() + "] : " + count);
}
}
log.info("[" + table.getName() + "] : " + count + " completed");
}
}
}
@ -156,7 +176,7 @@ public class LiquibaseDataCsvExporter implements DataExporter {
}
private void exportResultSet(ResultSet rs,Table table,ICSVWriter writer,DataExportConfigure configure,ProgressInfo progressInfo, Locale locale) throws SQLException, IOException {
long count =1;
long count =0;
List<Column> columns =table.getColumns();
while(rs.next()){
String[] csv =new String[columns.size()];
@ -167,9 +187,51 @@ public class LiquibaseDataCsvExporter implements DataExporter {
csv[i] =value;
}
writer.writeNext(csv);
if(count%1000==0){
if((++count)%10000==0){
writer.flush();
log.info("[" + table.getName() + "] : " + count);
}
}
log.info("[" + table.getName() + "] : " + count + " completed");
}
private String createLiquibaseFileContent(String id,StringBuilder content){
StringBuilder sb =new StringBuilder();
sb.append("<?xml version=\"1.0\" encoding=\"UTF-8\"?>").append("\n");
sb.append("<databaseChangeLog").append("\n");
sb.append("\t").append("xmlns=\"http://www.liquibase.org/xml/ns/dbchangelog\"").append("\n");
sb.append("\t").append("xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\"").append("\n");
sb.append("\t").append("xmlns:ext=\"http://www.liquibase.org/xml/ns/dbchangelog-ext\"").append("\n");
sb.append("\t").append("xsi:schemaLocation=\"").append("\n");
sb.append("\t\t").append("http://www.liquibase.org/xml/ns/dbchangelog").append("\n");
sb.append("\t\t").append("http://www.liquibase.org/xml/ns/dbchangelog/dbchangelog-4.1.xsd").append("\n");
sb.append("\t\t").append("http://www.liquibase.org/xml/ns/dbchangelog-ext").append("\n");
sb.append("\t\t").append("http://www.liquibase.org/xml/ns/dbchangelog/dbchangelog-ext.xsd").append("\n");
sb.append("\">").append("\n");
sb.append("\t").append("<changeSet id=\"").append(id).append("\" author=\"platform\">").append("\n");
sb.append(content);
sb.append("\t").append("</changeSet>").append("\n");
sb.append("</databaseChangeLog>");
return sb.toString();
}
private void writeChangeSet(StringBuilder sb,Table table){
sb.append("\t\t").append("<customChange class=\"io.sc.platform.jdbc.liquibase.task.CsvImportTaskChange\">").append("\n");
sb.append("\t\t\t").append("<param name=\"dataFile\" value=\"classpath:/liquibase/data/").append(table.getName()).append(".csv").append("\"/>").append("\n");
sb.append("\t\t").append("</customChange>").append("\n");
sb.append("\n");
}
public String getLiquibaseChangeLogXmlFileName(DataExportConfigure configure) {
String[] tableNames = Arrays.stream(configure.getTables()).map(ExportTable::getName).collect(Collectors.toList()).toArray(new String[]{});
String description =null;
if(tableNames.length==1) {
description =tableNames[0].toUpperCase();
}else {
description =tableNames[0].toUpperCase() + " And More " + (tableNames.length-1) + " Tables";
}
return "LIQUIBASE_" + DateUtil.formatDate(new Date(),"yyyy.MM.dd_HH.mm.ss") + "__" + description;
}
}

242
io.sc.platform.jdbc.liquibase/src/main/java/io/sc/platform/jdbc/liquibase/exporter/LiquibaseDataExporter.java

@ -1,242 +0,0 @@
package io.sc.platform.jdbc.liquibase.exporter;
import io.sc.platform.core.DirectoryManager;
import io.sc.platform.core.Environment;
import io.sc.platform.core.support.ProgressInfo;
import io.sc.platform.core.util.StringUtil;
import io.sc.platform.core.util.WriterUtil;
import io.sc.platform.jdbc.exporter.DataExporter;
import io.sc.platform.jdbc.exporter.support.DataExportConfigure;
import io.sc.platform.jdbc.exporter.support.ExportTable;
import io.sc.platform.jdbc.liquibase.exporter.support.ParentChildRecord;
import io.sc.platform.jdbc.meta.MetaDataLoader;
import io.sc.platform.jdbc.meta.support.Column;
import io.sc.platform.jdbc.meta.support.Table;
import io.sc.platform.jdbc.util.SqlTypeUtil;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.jdbc.datasource.DataSourceUtils;
import org.springframework.jdbc.support.MetaDataAccessException;
import javax.sql.DataSource;
import javax.xml.bind.DatatypeConverter;
import javax.xml.stream.XMLStreamException;
import javax.xml.stream.XMLStreamWriter;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.UnsupportedEncodingException;
import java.sql.*;
import java.util.ArrayList;
import java.util.List;
import java.util.Locale;
public class LiquibaseDataExporter implements DataExporter {
private static final Logger log = LoggerFactory.getLogger(LiquibaseDataExporter.class);
private static final String OUTPUT_PATH = DirectoryManager.getInstance().getByName("");
@Override
public void export(DataSource dataSource, DataExportConfigure configure, ProgressInfo progressInfo, Locale locale) {
try {
exportSingleFile(dataSource, configure);
// exportMultiFile(dataSource, configure);
}catch (Exception e){
throw new RuntimeException(e);
}
}
private void exportSingleFile(DataSource dataSource, DataExportConfigure configure) throws XMLStreamException, FileNotFoundException, UnsupportedEncodingException, MetaDataAccessException {
XMLStreamWriter writer = WriterUtil.xmlStreamWriter(OUTPUT_PATH + File.separator + "data." + configure.getSchema() + ".xml");
writeHeader(writer);
String[] tableNames =ExportTable.getTableNames(configure.getTables());
List<Table> tables =MetaDataLoader.newInstance().getTables(dataSource,configure.getSchema(),tableNames);
for(Table table : tables){
writeTable(dataSource,table,writer);
}
writeTail(writer);
writer.flush();
writer.close();
}
private void exportMultiFile(DataSource dataSource, DataExportConfigure configure) throws MetaDataAccessException, XMLStreamException, FileNotFoundException, UnsupportedEncodingException {
String[] tableNames =ExportTable.getTableNames(configure.getTables());
List<Table> tables =MetaDataLoader.newInstance().getTables(dataSource,configure.getSchema(),tableNames);
for(Table table : tables){
XMLStreamWriter writer = WriterUtil.xmlStreamWriter(OUTPUT_PATH + File.separator + "data." + configure.getSchema() + "." + table.getName() + ".xml");
writeHeader(writer);
writeTable(dataSource,table,writer);
writeTail(writer);
writer.flush();
writer.close();
}
}
private void writeHeader(XMLStreamWriter writer) throws XMLStreamException {
writer.writeStartDocument(Environment.DEFAULT_CHARSET_NAME,"1.0");
writer.writeStartDocument();
writer.writeStartElement("export");
}
private void writeTail(XMLStreamWriter writer) throws XMLStreamException {
writer.writeEndElement();
}
private void writeTable(DataSource dataSource,Table table,XMLStreamWriter writer) throws XMLStreamException {
//写入字段定义信息===============================================================================
List<Column> columns =table.getColumns();
if(columns==null || columns.size()==0){
throw new RuntimeException("No column found");
}
writer.writeStartElement("table"); //start table
writer.writeAttribute("name", table.getName());
writer.writeAttribute("remarks", table.getRemarks());
writer.writeStartElement("sql"); //start sql
writer.writeCharacters("select * from " + table.getName());
writer.writeEndElement(); //end sql
writer.writeStartElement("fields"); //start fields
for(int i=0;i<columns.size();i++){
Column column =columns.get(i);
writer.writeStartElement("field"); //start field
writer.writeAttribute("name", column.getName());
writer.writeAttribute("type", "" + column.getJavaType());
writer.writeAttribute("typeName", column.getSqlType());
writer.writeAttribute("remarks", column.getRemarks());
writer.writeEndElement(); //end field
}
writer.writeEndElement(); //end fields
//写入记录集信息==================================================================================
writeResultSet(dataSource,table,writer);
writer.writeEndElement(); //end table
}
private void writeResultSet(DataSource dataSource,Table table,XMLStreamWriter writer) throws XMLStreamException {
try(
Connection connection = DataSourceUtils.getConnection(dataSource);
Statement statement =connection.createStatement(ResultSet.TYPE_FORWARD_ONLY,ResultSet.CONCUR_READ_ONLY);
){
statement.setFetchSize(1000);
try(ResultSet rs =statement.executeQuery("select * from " + table.getName());){
if(table.isSelfReference()){
log.info("export table [" + table.getName() + "] with parent-and-children relationship: " + table.getSelfReferenceForeignKeyColumnName() + " --> " + table.getSelfReferencePrimaryKeyColumnName());
exportResultSetWithSelfReference(rs,table,writer);
}else{
log.info("export table [" + table.getName() + "]");
exportResultSet(rs,table,writer);
}
}catch (Exception e){
throw new RuntimeException(e);
}
}catch (Exception e){
throw new RuntimeException(e);
}
}
private void exportResultSetWithSelfReference(ResultSet rs,Table table,XMLStreamWriter writer) throws XMLStreamException, SQLException, UnsupportedEncodingException {
String pkFieldName =table.getSelfReferencePrimaryKeyColumnName();
String fkFieldName =table.getSelfReferenceForeignKeyColumnName();
List<Column> columns =table.getColumns();
List<ParentChildRecord> records =listResultSet(rs,table.getColumns());
if(records!=null && records.size()>0){
List<ParentChildRecord> list =ParentChildRecord.buildTree(records, pkFieldName, fkFieldName);
if(list!=null && list.size()>0){
writer.writeStartElement("resultset"); //start resultset
long count =1;
for(ParentChildRecord record : list){
writer.writeStartElement("row"); //start row
for(int i=0;i<columns.size();i++){
Column column =columns.get(i);
String value =record.getField(column.getName());
if(value!=null) {
if(StringUtil.validateXml10(value)) {
writer.writeStartElement("f"); //start f
writer.writeCharacters(value);
writer.writeEndElement(); //end f
}else {
writer.writeStartElement("f"); //start f
writer.writeAttribute("base64", "true");
writer.writeCharacters(DatatypeConverter.printBase64Binary(value.getBytes(Environment.DEFAULT_CHARSET_NAME)));
writer.writeEndElement(); //end f
}
}else {
writer.writeStartElement("f"); //start f
writer.writeEndElement(); //end f
}
}
writer.writeEndElement(); //end row
if(count%100==0){
writer.flush();
}
}
writer.writeEndElement(); //end resultset
}
}
}
private List<ParentChildRecord> listResultSet(ResultSet rs,List<Column> columns) throws SQLException{
if(rs!=null){
List<ParentChildRecord> result =new ArrayList<ParentChildRecord>();
while(rs.next()){
ParentChildRecord record =new ParentChildRecord();
for(int i=0;i<columns.size();i++){
Column column =columns.get(i);
String fieldName =column.getName();
String fieldValue =null;
if(Types.BLOB==column.getVendorTypeNumber()){
Blob blob =rs.getBlob(column.getName());
if(blob!=null && blob.length()>0){
if(blob.length()<Integer.MAX_VALUE){
byte[] bytes =blob.getBytes(1, (int)blob.length());
fieldValue =DatatypeConverter.printBase64Binary(bytes);
}else{
throw new RuntimeException(column.getName() + " field is a BLOB, and it's length great than " + Integer.MAX_VALUE + ", can not exported");
}
}
}else{
fieldValue =rs.getString(fieldName);
}
record.addField(fieldName, fieldValue);
}
result.add(record);
}
return result;
}
return null;
}
private void exportResultSet(ResultSet rs,Table table,XMLStreamWriter writer) throws XMLStreamException, SQLException, UnsupportedEncodingException {
writer.writeStartElement("resultset"); //start resultset
long count =1;
List<Column> columns =table.getColumns();
while(rs.next()){
writer.writeStartElement("row"); //start row
for(int i=0;i<columns.size();i++){
Column column =columns.get(i);
String fieldName =column.getName();
String value = SqlTypeUtil.getString(rs,fieldName,column.getVendorTypeNumber());
if(value!=null) {
if(StringUtil.validateXml10(value)) {
writer.writeStartElement("f"); //start f
writer.writeCharacters(value);
writer.writeEndElement(); //end f
}else {
writer.writeStartElement("f"); //start f
writer.writeAttribute("base64", "true");
writer.writeCharacters(DatatypeConverter.printBase64Binary(value.getBytes(Environment.DEFAULT_CHARSET_NAME)));
writer.writeEndElement(); //end f
}
}else {
writer.writeStartElement("f"); //start f
writer.writeEndElement(); //end f
}
}
writer.writeEndElement(); //end row
if(count%1000==0){
writer.flush();
}
}
writer.writeEndElement(); //end resultset
}
}

55
io.sc.platform.jdbc.liquibase/src/main/java/io/sc/platform/jdbc/liquibase/importer/LiquibaseDataCsvImporter.java

@ -0,0 +1,55 @@
package io.sc.platform.jdbc.liquibase.importer;
import io.sc.platform.core.DirectoryManager;
import io.sc.platform.core.Environment;
import io.sc.platform.core.support.ProgressInfo;
import io.sc.platform.core.util.DateUtil;
import io.sc.platform.core.util.FileUtil;
import io.sc.platform.core.util.WriterUtil;
import io.sc.platform.core.util.ZipUtil;
import io.sc.platform.jdbc.exporter.DataExporter;
import io.sc.platform.jdbc.exporter.support.DataExportConfigure;
import io.sc.platform.jdbc.exporter.support.DataImportConfigure;
import io.sc.platform.jdbc.exporter.support.ExportTable;
import io.sc.platform.jdbc.importer.DataImporter;
import io.sc.platform.jdbc.liquibase.exporter.support.ParentChildRecord;
import io.sc.platform.jdbc.liquibase.task.CsvImportTaskChange;
import io.sc.platform.jdbc.meta.MetaDataLoader;
import io.sc.platform.jdbc.meta.support.Column;
import io.sc.platform.jdbc.meta.support.Table;
import io.sc.platform.jdbc.util.SqlTypeUtil;
import liquibase.database.jvm.JdbcConnection;
import liquibase.repackaged.com.opencsv.CSVWriterBuilder;
import liquibase.repackaged.com.opencsv.ICSVWriter;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.jdbc.datasource.DataSourceUtils;
import org.springframework.web.multipart.MultipartFile;
import javax.sql.DataSource;
import java.io.*;
import java.sql.*;
import java.util.Date;
import java.util.*;
import java.util.stream.Collectors;
public class LiquibaseDataCsvImporter implements DataImporter {
@Override
public void importData(DataSource dataSource, DataImportConfigure configure, ProgressInfo progressInfo, Locale locale) throws Exception {
progressInfo.setRunning(true);
progressInfo.setStartDatetime(new Date());
progressInfo.setTotalWeight(configure.getFiles().size()+1);
progressInfo.addWeight(1);
List<MultipartFile> multipartFiles =configure.getFiles();
if(multipartFiles!=null && !multipartFiles.isEmpty()){
JdbcConnection jdbcConnection =new JdbcConnection(dataSource.getConnection());
for(MultipartFile multipartFile : multipartFiles){
BufferedReader bufferedReader = new BufferedReader(new InputStreamReader(multipartFile.getInputStream(), Environment.DEFAULT_CHARSET_NAME));
CsvImportTaskChange change =new CsvImportTaskChange();
change.execute(jdbcConnection,bufferedReader,configure.isDeleteFirst());
}
jdbcConnection.close();
}
}
}

74
io.sc.platform.jdbc.liquibase/src/main/java/io/sc/platform/jdbc/liquibase/task/CsvImportTaskChange.java

@ -5,6 +5,7 @@ import io.sc.platform.core.util.ClassUtil;
import io.sc.platform.core.util.StringUtil;
import io.sc.platform.core.util.TypeConvertor;
import io.sc.platform.jdbc.meta.support.Column;
import io.sc.platform.jdbc.meta.support.Table;
import liquibase.change.custom.CustomTaskChange;
import liquibase.database.Database;
import liquibase.database.DatabaseConnection;
@ -19,11 +20,12 @@ import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.core.io.DefaultResourceLoader;
import org.springframework.core.io.Resource;
import org.springframework.util.StringUtils;
import java.io.BufferedReader;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.sql.Connection;
import java.sql.DriverManager;
import java.io.Reader;
import java.sql.PreparedStatement;
import java.util.Arrays;
import java.util.Collection;
@ -32,17 +34,8 @@ import java.util.stream.Collectors;
public class CsvImportTaskChange implements CustomTaskChange {
private static final Logger log = LoggerFactory.getLogger(CsvImportTaskChange.class);
private String table;
private String dataFile;
public String getTable() {
return table;
}
public void setTable(String table) {
this.table = table;
}
public String getDataFile() {
return dataFile;
}
@ -57,7 +50,7 @@ public class CsvImportTaskChange implements CustomTaskChange {
if(connection instanceof JdbcConnection){
JdbcConnection conn =(JdbcConnection)connection;
try {
execute(conn);
execute(conn,false);
} catch (Exception e) {
log.error("",e);
throw new CustomChangeException(e);
@ -86,36 +79,58 @@ public class CsvImportTaskChange implements CustomTaskChange {
}
public void execute(JdbcConnection connection) throws Exception {
execute(connection,false);
}
public void execute(JdbcConnection connection,boolean deleteFirst) throws Exception {
Resource resource =new DefaultResourceLoader().getResource(dataFile);
if(resource==null || !resource.exists()){
throw new RuntimeException(dataFile + " NOT found");
}
BufferedReader bufferedReader = new BufferedReader(new InputStreamReader(resource.getInputStream(), Environment.DEFAULT_CHARSET_NAME));
CSVReader reader = new CSVReaderBuilder(bufferedReader).build();
Iterator<String[]> iterator = reader.iterator();
execute(connection,bufferedReader,deleteFirst);
}
public void execute(JdbcConnection connection, Reader reader,boolean deleteFirst) throws Exception {
CSVReader csvReader = new CSVReaderBuilder(reader).build();
Iterator<String[]> iterator = csvReader.iterator();
if(iterator.hasNext()) {
Column[] columns = buildColumns(iterator);
PreparedStatement ps =connection.prepareStatement(getSql(columns));
long index =0;
Table table =readTable(iterator);
Column[] columns = readColumns(iterator);
if(deleteFirst) {
PreparedStatement ps =connection.prepareStatement("delete from " + table.getName());
ps.execute();
ps.close();
}
log.info("import data to table [" + table.getName() + "]");
PreparedStatement ps =connection.prepareStatement(getInsertSql(table,columns));
long count =0;
while(iterator.hasNext()){
String[] data =iterator.next();
for(int i=0;i<columns.length;i++) {
ps.setObject(i+1, TypeConvertor.getValue(data[i],columns[i].getJavaType(),null));
}
ps.addBatch();
if((index++)%1000==0){
if((++count)%1000==0){
ps.executeBatch();
log.info("[" + table.getName() + "] : " + count);
}
}
ps.executeBatch();
ps.close();
}
reader.close();
csvReader.close();
}
private Table readTable(Iterator<String[]> iterator){
String[] names =iterator.next();
Table table =new Table();
table.setName(names[0]);
return table;
}
private Column[] buildColumns(Iterator<String[]> iterator){
private Column[] readColumns(Iterator<String[]> iterator){
// 第一行: 字段名
String[] names =iterator.next();
// 第二行: 备注
@ -137,10 +152,10 @@ public class CsvImportTaskChange implements CustomTaskChange {
return columns;
}
private String getSql(Column[] columns){
private String getInsertSql(Table table,Column[] columns){
if(columns!=null && columns.length>0){
StringBuilder sb =new StringBuilder();
sb.append("insert into ").append(table).append("(");
sb.append("insert into ").append(table.getName()).append("(");
Collection<String> names =Arrays.stream(columns).map(Column::getName).collect(Collectors.toList());
sb.append(StringUtil.combine(",",names));
sb.append(") values (");
@ -150,17 +165,4 @@ public class CsvImportTaskChange implements CustomTaskChange {
}
return null;
}
public static void main(String[] args) throws Exception {
String url ="jdbc:p6spy:mysql://localhost:3306/platform?autoReconnect=true&allowPublicKeyRetrieval=true&useSSL=false&useUnicode=true&characterEncoding=UTF-8&serverTimezone=Asia/Shanghai";
String username ="platform";
String password ="platform";
Connection connection = DriverManager.getConnection(url,username,password);
JdbcConnection jdbcConnection =new JdbcConnection(connection);
CsvImportTaskChange change =new CsvImportTaskChange();
change.setTable("SYS_MENU");
change.setDataFile("file:/Users/wangshaoping/wspsc/workspace/wangshaoping/v8/platform/app.platform/work/web/export/platform/sys_menu.csv");
change.execute(jdbcConnection);
}
}

13
io.sc.platform.jdbc.liquibase/src/main/java/io/sc/platform/jdbc/liquibase/task/new.cfg.json

@ -1,13 +0,0 @@
{
"table" : "",
"sql" : "",
"cols": [
{
"name": "name",
"remark": "remark",
"javaType": "java.lang.String",
"sqlType": "CBLOB",
"skip": false
}
]
}

4
io.sc.platform.jdbc.liquibase/src/main/java/io/sc/platform/jdbc/liquibase/task/new.csv

@ -1,4 +0,0 @@
"col1","col2,name
ok,""ok"""
"col1","col2,name
ok,""ok"""
1 col1 col2,name ok,"ok"
2 col1 col2,name ok,"ok"

1
io.sc.platform.jdbc.liquibase/src/main/resources/META-INF/services/io.sc.platform.jdbc.importer.DataImporter

@ -0,0 +1 @@
io.sc.platform.jdbc.liquibase.importer.LiquibaseDataCsvImporter

23
io.sc.platform.jdbc.liquibase/src/test/java/io/sc/platform/jdbc/liquibase/task/CsvImportTaskChangeTest.java

@ -0,0 +1,23 @@
package io.sc.platform.jdbc.liquibase.task;
import liquibase.database.jvm.JdbcConnection;
import java.sql.Connection;
import java.sql.DriverManager;
public class CsvImportTaskChangeTest {
public static void main(String[] args) throws Exception {
String url ="jdbc:p6spy:mysql://localhost:3306/platform?autoReconnect=true&allowPublicKeyRetrieval=true&useSSL=false&useUnicode=true&characterEncoding=UTF-8&serverTimezone=Asia/Shanghai";
String username ="platform";
String password ="platform";
Connection connection = DriverManager.getConnection(url,username,password);
JdbcConnection jdbcConnection =new JdbcConnection(connection);
CsvImportTaskChange change =new CsvImportTaskChange();
change.setDataFile("classpath:/mv_score_record.csv");
change.execute(jdbcConnection);
jdbcConnection.close();
connection.close();
System.out.println("completed");
}
}

30005
io.sc.platform.jdbc.liquibase/src/test/resources/mv_score_record.csv

File diff suppressed because it is too large

90
io.sc.platform.jdbc/src/main/java/io/sc/platform/jdbc/controller/JdbcDataWebController.java

@ -0,0 +1,90 @@
package io.sc.platform.jdbc.controller;
import io.sc.platform.core.support.ProgressInfo;
import io.sc.platform.jdbc.controller.support.ExporterThread;
import io.sc.platform.jdbc.controller.support.ImporterThread;
import io.sc.platform.jdbc.exporter.exception.ExporterRunningException;
import io.sc.platform.jdbc.exporter.support.DataExportConfigure;
import io.sc.platform.jdbc.exporter.support.DataImportConfigure;
import io.sc.platform.jdbc.service.JdbcDataService;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.web.bind.annotation.*;
import org.springframework.web.multipart.MultipartFile;
import java.io.InputStream;
import java.util.Locale;
@RestController
@RequestMapping("/api/jdbc/data")
public class JdbcDataWebController {
@Autowired private JdbcDataService jdbcDataService;
private ImporterThread importerThread =null;
private ExporterThread exporterThread =null;
@PostMapping("importData")
public void importData(
@RequestParam(name="datasource",required = false) String datasource,
@RequestParam(name="deleteFirst", required = false) boolean deleteFirst,
@RequestPart(name="files",required = false) MultipartFile multipartFile,
Locale locale) throws Exception
{
//检查有当前用户启动的执行线程是否正在运行
if(importerThread!=null && importerThread.isAlive()){
throw new ExporterRunningException();
}
DataImportConfigure configure =new DataImportConfigure();
configure.setDatasource(datasource);
configure.setDeleteFirst(deleteFirst);
configure.addFile(multipartFile);
//启动新线程
importerThread =new ImporterThread(jdbcDataService,configure,locale);
importerThread.start();
}
@PostMapping("exportData")
public void exportData(@RequestBody DataExportConfigure configure, Locale locale) throws Exception {
//检查有当前用户启动的执行线程是否正在运行
if(exporterThread!=null && exporterThread.isAlive()){
throw new ExporterRunningException();
}
//启动新线程
exporterThread =new ExporterThread(jdbcDataService,configure,locale);
exporterThread.start();
}
@GetMapping("traceImporterExecuteProgress")
public ProgressInfo traceImporterExecuteProgress(Locale locale) throws Exception{
if(importerThread!=null){
Exception exception =importerThread.getException();
if(exception!=null){
importerThread.setException(null);
throw exception;
}
if(importerThread.isAlive()){
if(importerThread.getException()!=null){
throw importerThread.getException();
}
return importerThread.getProgressInfo();
}
}
return ProgressInfo.notRunning();
}
@GetMapping("traceExporterExecuteProgress")
public ProgressInfo traceExporterExecuteProgress(Locale locale) throws Exception{
if(exporterThread!=null){
Exception exception =exporterThread.getException();
if(exception!=null){
exporterThread.setException(null);
throw exception;
}
if(exporterThread.isAlive()){
if(exporterThread.getException()!=null){
throw exporterThread.getException();
}
return exporterThread.getProgressInfo();
}
}
return ProgressInfo.notRunning();
}
}

43
io.sc.platform.jdbc/src/main/java/io/sc/platform/jdbc/controller/JdbcExportWebController.java

@ -1,43 +0,0 @@
package io.sc.platform.jdbc.controller;
import io.sc.platform.core.support.ProgressInfo;
import io.sc.platform.jdbc.controller.support.ExporterThread;
import io.sc.platform.jdbc.exporter.exception.ExporterNotRunningException;
import io.sc.platform.jdbc.exporter.exception.ExporterRunningException;
import io.sc.platform.jdbc.exporter.support.DataExportConfigure;
import io.sc.platform.jdbc.service.JdbcExportService;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.web.bind.annotation.*;
import java.util.Date;
import java.util.Locale;
@RestController
@RequestMapping("/api/jdbc/export")
public class JdbcExportWebController {
@Autowired private JdbcExportService jdbcExportService;
private ExporterThread thread =null;
@PostMapping("export")
public void export(@RequestBody DataExportConfigure configure, Locale locale) throws Exception {
//检查有当前用户启动的执行线程是否正在运行
if(thread!=null && thread.isAlive()){
throw new ExporterRunningException();
}
//启动新线程
thread =new ExporterThread(jdbcExportService,configure,locale);
thread.start();
}
@GetMapping("traceExecuteProgress")
public ProgressInfo traceExecuteProgress(Locale locale) throws Exception{
if(thread!=null && thread.isAlive()){
if(thread.getException()!=null){
throw thread.getException();
}
return thread.getProgressInfo();
}else{
return ProgressInfo.notRunning();
}
}
}

18
io.sc.platform.jdbc/src/main/java/io/sc/platform/jdbc/controller/support/ExporterThread.java

@ -2,7 +2,7 @@ package io.sc.platform.jdbc.controller.support;
import io.sc.platform.core.support.ProgressInfo;
import io.sc.platform.jdbc.exporter.support.DataExportConfigure;
import io.sc.platform.jdbc.service.JdbcExportService;
import io.sc.platform.jdbc.service.JdbcDataService;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@ -10,21 +10,21 @@ import java.util.Locale;
public class ExporterThread extends Thread {
private Logger log = LoggerFactory.getLogger(ExporterThread.class);
private JdbcExportService jdbcExportService;
private DataExportConfigure exportConfigure;
private JdbcDataService jdbcDataService;
private DataExportConfigure configure;
private Locale locale;
private volatile ProgressInfo progressInfo =new ProgressInfo(100,0);
private Exception exception;
public ExporterThread(JdbcExportService jdbcExportService,DataExportConfigure exportConfigure, Locale locale){
this.jdbcExportService =jdbcExportService;
this.exportConfigure =exportConfigure;
public ExporterThread(JdbcDataService jdbcDataService, DataExportConfigure configure, Locale locale){
this.jdbcDataService =jdbcDataService;
this.configure =configure;
this.locale =locale;
}
@Override
public void run() {
try {
jdbcExportService.export(exportConfigure,progressInfo,locale);
jdbcDataService.exportData(configure,progressInfo,locale);
} catch (Exception e) {
this.exception =e;
log.error("",e);
@ -36,4 +36,8 @@ public class ExporterThread extends Thread {
public Exception getException() {
return exception;
}
public void setException(Exception exception) {
this.exception = exception;
}
}

44
io.sc.platform.jdbc/src/main/java/io/sc/platform/jdbc/controller/support/ImporterThread.java

@ -0,0 +1,44 @@
package io.sc.platform.jdbc.controller.support;
import io.sc.platform.core.support.ProgressInfo;
import io.sc.platform.jdbc.exporter.support.DataExportConfigure;
import io.sc.platform.jdbc.exporter.support.DataImportConfigure;
import io.sc.platform.jdbc.service.JdbcDataService;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.Locale;
public class ImporterThread extends Thread {
private Logger log = LoggerFactory.getLogger(ImporterThread.class);
private JdbcDataService jdbcDataService;
private DataImportConfigure configure;
private Locale locale;
private volatile ProgressInfo progressInfo =new ProgressInfo(100,0);
private Exception exception;
public ImporterThread(JdbcDataService jdbcDataService, DataImportConfigure configure, Locale locale){
this.jdbcDataService =jdbcDataService;
this.configure =configure;
this.locale =locale;
}
@Override
public void run() {
try {
jdbcDataService.importData(configure,progressInfo,locale);
} catch (Exception e) {
this.exception =e;
log.error("",e);
}
}
public ProgressInfo getProgressInfo(){
return progressInfo;
}
public Exception getException() {
return exception;
}
public void setException(Exception exception) {
this.exception = exception;
}
}

2
io.sc.platform.jdbc/src/main/java/io/sc/platform/jdbc/exporter/DataExporter.java

@ -9,7 +9,7 @@ import java.util.Locale;
import java.util.ServiceLoader;
public interface DataExporter {
public void export(DataSource dataSource, DataExportConfigure configure, ProgressInfo progressInfo, Locale locale) throws Exception;
public void exportData(DataSource dataSource, DataExportConfigure configure, ProgressInfo progressInfo, Locale locale) throws Exception;
public static DataExporter newInstance(){
ServiceLoader<DataExporter> serviceLoader =ServiceLoader.load(DataExporter.class);

51
io.sc.platform.jdbc/src/main/java/io/sc/platform/jdbc/exporter/support/DataImportConfigure.java

@ -0,0 +1,51 @@
package io.sc.platform.jdbc.exporter.support;
import org.springframework.web.multipart.MultipartFile;
import java.util.ArrayList;
import java.util.List;
public class DataImportConfigure {
private String datasource;
private boolean deleteFirst;
private List<MultipartFile> files =new ArrayList<>();
public DataImportConfigure addFile(MultipartFile file){
this.files.add(file);
return this;
}
public String getDatasource() {
return datasource;
}
public void setDatasource(String datasource) {
this.datasource = datasource;
}
public boolean isDeleteFirst() {
return deleteFirst;
}
public void setDeleteFirst(boolean deleteFirst) {
this.deleteFirst = deleteFirst;
}
public List<MultipartFile> getFiles() {
return files;
}
public void setFiles(List<MultipartFile> files) {
this.files = files;
}
@Override
public String toString() {
return "DataImportConfigure{" +
"datasource='" + datasource + '\'' +
", deleteFirst=" + deleteFirst +
", files=" + files +
'}';
}
}

31
io.sc.platform.jdbc/src/main/java/io/sc/platform/jdbc/importer/DataImporter.java

@ -1,8 +1,37 @@
package io.sc.platform.jdbc.importer;
import io.sc.platform.core.support.ProgressInfo;
import io.sc.platform.jdbc.exporter.DataExporter;
import io.sc.platform.jdbc.exporter.support.DataExportConfigure;
import io.sc.platform.jdbc.exporter.support.DataImportConfigure;
import javax.sql.DataSource;
import java.io.Reader;
import java.util.Iterator;
import java.util.Locale;
import java.util.ServiceLoader;
public interface DataImporter {
public void importFrom(DataSource dataSource, String schemaName, Reader reader);
public void importData(DataSource dataSource, DataImportConfigure configure, ProgressInfo progressInfo, Locale locale) throws Exception;
public static DataImporter newInstance(){
ServiceLoader<DataImporter> serviceLoader =ServiceLoader.load(DataImporter.class);
Iterator<DataImporter> services =serviceLoader.iterator();
while(services.hasNext()){
return services.next();
}
return null;
}
public static DataImporter newInstance(String implementsClass){
ServiceLoader<DataImporter> serviceLoader =ServiceLoader.load(DataImporter.class);
Iterator<DataImporter> services =serviceLoader.iterator();
while(services.hasNext()){
DataImporter importer =services.next();
if(importer.getClass().getName().equals(implementsClass)){
return importer;
}
}
return null;
}
}

14
io.sc.platform.jdbc/src/main/java/io/sc/platform/jdbc/service/JdbcDataService.java

@ -0,0 +1,14 @@
package io.sc.platform.jdbc.service;
import io.sc.platform.core.support.ProgressInfo;
import io.sc.platform.jdbc.exporter.support.DataExportConfigure;
import io.sc.platform.jdbc.exporter.support.DataImportConfigure;
import org.springframework.web.bind.annotation.RequestParam;
import org.springframework.web.multipart.MultipartFile;
import java.util.Locale;
public interface JdbcDataService {
public void importData(DataImportConfigure configure, ProgressInfo progressInfo, Locale locale) throws Exception;
public void exportData(DataExportConfigure configure, ProgressInfo progressInfo, Locale locale) throws Exception;
}

10
io.sc.platform.jdbc/src/main/java/io/sc/platform/jdbc/service/JdbcExportService.java

@ -1,10 +0,0 @@
package io.sc.platform.jdbc.service;
import io.sc.platform.core.support.ProgressInfo;
import io.sc.platform.jdbc.exporter.support.DataExportConfigure;
import java.util.Locale;
public interface JdbcExportService {
public void export(DataExportConfigure configure, ProgressInfo progressInfo, Locale locale) throws Exception;
}

54
io.sc.platform.jdbc/src/main/java/io/sc/platform/jdbc/service/impl/JdbcDataServiceImpl.java

@ -0,0 +1,54 @@
package io.sc.platform.jdbc.service.impl;
import io.sc.platform.core.Environment;
import io.sc.platform.core.support.ProgressInfo;
import io.sc.platform.jdbc.exporter.DataExporter;
import io.sc.platform.jdbc.exporter.support.DataExportConfigure;
import io.sc.platform.jdbc.exporter.support.DataImportConfigure;
import io.sc.platform.jdbc.importer.DataImporter;
import io.sc.platform.jdbc.service.DatasourceService;
import io.sc.platform.jdbc.service.JdbcDataService;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import org.springframework.util.StringUtils;
import org.springframework.web.multipart.MultipartFile;
import javax.sql.DataSource;
import java.io.BufferedReader;
import java.io.InputStreamReader;
import java.util.Date;
import java.util.List;
import java.util.Locale;
@Service
public class JdbcDataServiceImpl implements JdbcDataService {
@Autowired private DatasourceService datasourceService;
@Override
public void importData(DataImportConfigure configure, ProgressInfo progressInfo, Locale locale) throws Exception {
if(configure==null){
return;
}
DataSource dataSource =null;
if(StringUtils.hasText(configure.getDatasource())){
dataSource =datasourceService.getDatasource(configure.getDatasource());
}else{
dataSource =datasourceService.getDefaultDatasource();
}
DataImporter.newInstance().importData(dataSource,configure,progressInfo,locale);
}
@Override
public void exportData(DataExportConfigure configure, ProgressInfo progressInfo, Locale locale) throws Exception {
if(configure==null){
return;
}
DataSource dataSource =null;
if(StringUtils.hasText(configure.getDatasource())){
dataSource =datasourceService.getDatasource(configure.getDatasource());
}else{
dataSource =datasourceService.getDefaultDatasource();
}
DataExporter.newInstance().exportData(dataSource,configure,progressInfo,locale);
}
}

43
io.sc.platform.jdbc/src/main/java/io/sc/platform/jdbc/service/impl/JdbcExportServiceImpl.java

@ -1,43 +0,0 @@
package io.sc.platform.jdbc.service.impl;
import io.sc.platform.core.support.ProgressInfo;
import io.sc.platform.jdbc.exporter.DataExporter;
import io.sc.platform.jdbc.exporter.support.DataExportConfigure;
import io.sc.platform.jdbc.service.DatasourceService;
import io.sc.platform.jdbc.service.JdbcExportService;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import org.springframework.util.StringUtils;
import javax.sql.DataSource;
import java.util.Date;
import java.util.Locale;
@Service
public class JdbcExportServiceImpl implements JdbcExportService {
@Autowired private DatasourceService datasourceService;
@Override
public void export(DataExportConfigure configure, ProgressInfo progressInfo, Locale locale) throws Exception {
if(configure==null){
return;
}
progressInfo.setRunning(true);
DataSource dataSource =null;
if(StringUtils.hasText(configure.getDatasource())){
dataSource =datasourceService.getDatasource(configure.getDatasource());
}else{
dataSource =datasourceService.getDefaultDatasource();
}
progressInfo.setStartDatetime(new Date());
progressInfo.setTotalWeight(100);
DataExporter dataExporter =DataExporter.newInstance();
dataExporter.export(dataSource,configure,progressInfo,locale);
//执行完毕
progressInfo.setCompletedDatetime(new Date());
progressInfo.setCurrentWeight(progressInfo.getTotalWeight());
}
}

22
io.sc.platform.mvc/src/main/java/io/sc/platform/mvc/service/impl/FrontendExportServiceImpl.java

@ -4,12 +4,12 @@ import io.sc.platform.core.DirectoryManager;
import io.sc.platform.core.Environment;
import io.sc.platform.core.util.FileUtil;
import io.sc.platform.core.util.UrlUtil;
import io.sc.platform.core.util.ZipUtil;
import io.sc.platform.mvc.plugins.PluginManager;
import io.sc.platform.mvc.plugins.item.FrontEndModule;
import io.sc.platform.mvc.service.FrontendExportService;
import io.sc.platform.mvc.support.ExportFileInfo;
import io.sc.platform.mvc.support.FrontendExportParam;
import net.lingala.zip4j.ZipFile;
import org.apache.commons.io.IOUtils;
import org.springframework.core.io.DefaultResourceLoader;
import org.springframework.core.io.Resource;
@ -83,25 +83,7 @@ public class FrontendExportServiceImpl implements FrontendExportService {
// 生成 component.js
generateConfigureJs(zipDirPath,frontendWebContextPath,backendApiWebContextPath);
Resource rs =resourceLoader.getResource("file:" + zipDirPath);
if(rs.exists()){
ZipFile zipFile = new ZipFile(fileName);
File zipDir =new File(zipDirPath);
if(zipDir.exists() && zipDir.isDirectory()) {
File[] fs = zipDir.listFiles();
if(fs!=null && fs.length>0){
for(File f : fs){
if (f.isDirectory()) {
zipFile.addFolder(f);
} else {
zipFile.addFile(f);
}
}
}
}
zipFile.close();
}
FileUtil.deldirs(zipDirPath);
ZipUtil.zip(zipDirPath,fileName,true);
}
private void copyResource2Dir(InputStream inputStream, String targetPath, String resource) throws IOException{

13
io.sc.platform.mvc/src/main/resources/META-INF/platform/plugins/application-properties.json

@ -80,5 +80,18 @@
"spring.messages.fallbackToSystemLocale = false",
"spring.messages.useCodeAsDefaultMessage = false"
]
},
{
"module" : "io.sc.platform.mvc",
"order": 2700,
"description": "servlet multipart configuration",
"properties": [
"spring.servlet.multipart.enabled = true",
"spring.servlet.multipart.file-size-threshold = 0",
"#spring.servlet.multipart.location = ${java.io.tmpdir}",
"spring.servlet.multipart.max-file-size = -1",
"spring.servlet.multipart.max-request-size = -1",
"spring.servlet.multipart.resolve-lazily = false"
]
}
]
Loading…
Cancel
Save