Răsfoiți Sursa

fix: 提交测试类,用于获取资源SQL以及升级版本时的数据集迁移

提交测试类,用于获取资源SQL以及升级版本时的数据集迁移
hong.yang 2 ani în urmă
părinte
comite
6556e22dc7

+ 0 - 2
.gitignore

@@ -277,8 +277,6 @@ dist
 # TODO: where does this rule come from?
 docs/_book
 
-# TODO: where does this rule come from?
-test/
 
 ### Windows template
 # Windows thumbnail cache files

+ 0 - 41
DataRoom/.gitignore

@@ -1,41 +0,0 @@
-HELP.md
-target/
-!.mvn/wrapper/maven-wrapper.jar
-!**/src/main/**/target/
-!**/src/test/**/target/
-
-### STS ###
-.apt_generated
-.classpath
-.factorypath
-.project
-.settings
-.springBeans
-.sts4-cache
-
-### IntelliJ IDEA ###
-.idea
-*.iws
-*.iml
-*.ipr
-
-### NetBeans ###
-/nbproject/private/
-/nbbuild/
-/dist/
-/nbdist/
-/.nb-gradle/
-build/
-!**/src/main/**/build/
-!**/src/test/**/build/
-
-### VS Code ###
-.vscode/
-
-# 忽略配置文件提交
-application-*.yml
-!application-demo.yml
-
-logs
-upload.sh
-

+ 353 - 0
DataRoom/dataroom-server/src/test/java/DataMigrationTest.java

@@ -0,0 +1,353 @@
+import com.alibaba.druid.pool.DruidDataSource;
+import com.gccloud.common.utils.JSON;
+import com.google.common.collect.Lists;
+import com.google.common.collect.Maps;
+import lombok.extern.slf4j.Slf4j;
+import org.apache.commons.lang3.StringUtils;
+import org.json.JSONArray;
+import org.json.JSONObject;
+import org.junit.Test;
+import org.junit.runner.RunWith;
+import org.springframework.jdbc.core.JdbcTemplate;
+import org.springframework.test.context.junit4.SpringRunner;
+
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+import java.util.stream.Collectors;
+
+/**
+ * @author hongyang
+ * @version 1.0
+ * @date 2023/6/20 15:29
+ */
+@Slf4j
+@RunWith(SpringRunner.class)
+public class DataMigrationTest {
+
+    /**
+     * 版本升级处理数据集数据迁移
+     * 执行前请:
+     * 1. 执行doc/update.sql
+     * 2. 修改数据库连接信息
+     */
+    @Test
+    public void dataMigration() {
+        // 设置数据源
+        DruidDataSource dataSource = new DruidDataSource();
+        dataSource.setDriverClassName("com.mysql.jdbc.Driver");
+        dataSource.setUrl("jdbc:mysql://127.0.0.1:3306/dataroom?rewriteBatchedStatements=true&useUnicode=true&characterEncoding=UTF-8");
+        dataSource.setUsername("root");
+        dataSource.setPassword("pwd");
+        JdbcTemplate jdbcTemplate = new JdbcTemplate(dataSource);
+        // 处理数据源
+        log.info("开始处理数据源");
+        handleDataSource(jdbcTemplate);
+        // 处理分类
+        log.info("开始处理分类");
+        handleCategory(jdbcTemplate);
+        // 处理数据集
+        log.info("开始处理数据集");
+        handleDataset(jdbcTemplate);
+
+    }
+
+
+    /**
+     * 数据源数据迁移
+     * @param jdbcTemplate
+     */
+    private static void handleDataSource(JdbcTemplate jdbcTemplate) {
+        String migrateSql = "INSERT INTO ds_datasource (id, source_name, source_type, driver_class_name, url, host, port, username, password, module_code, editable, remark, update_date, create_date, del_flag)\n" +
+                "SELECT id, source_name, source_type, driver_class_name, url, host, port, username, password, module_code, editable, remark, update_date, create_date, del_flag\n" +
+                "FROM big_screen_datasource_config where del_flag = 0";
+        jdbcTemplate.execute(migrateSql);
+        String updateSql = "UPDATE ds_datasource SET source_type = 'PostgreSQL' where  source_type = 'TelePG'";
+        jdbcTemplate.execute(updateSql);
+        String updateSql2 = "UPDATE ds_datasource SET source_type = 'Mysql' where  source_type = 'TeleDB'";
+        jdbcTemplate.execute(updateSql2);
+        log.info("数据源数据迁移完成");
+    }
+
+    /**
+     * 分类树ids更新SQL
+     */
+    public static final String updateSql = "update ds_category_tree set ids = '%s' where id = '%s'";
+
+    /**
+     * 分类树数据迁移
+     * @param jdbcTemplate
+     */
+    private static void handleCategory(JdbcTemplate jdbcTemplate) {
+        String migrateSql = "INSERT INTO ds_category_tree (id, name, parent_id, type, module_code, update_date, create_date, del_flag)\n" +
+                "SELECT id, name, parent_id, table_name, module_code, update_date, create_date, del_flag\n" +
+                "FROM big_screen_category_tree where del_flag = 0";
+        jdbcTemplate.execute(migrateSql);
+        String sql = "select * from ds_category_tree where del_flag = 0";
+        List<Map<String, Object>> categoryList = jdbcTemplate.queryForList(sql);
+        // 根据parent_id组装成树形结构,将子节点放到父节点的children中,并组装ids
+        Map<String, Map<String, Object>> categoryMap = Maps.newHashMap();
+        categoryList.forEach(category -> categoryMap.put(category.get("id").toString(), category));
+        categoryList.forEach(category -> {
+            String parentId = category.get("parent_id").toString();
+            if (StringUtils.isBlank(parentId) || "0".equals(parentId)) {
+                return;
+            }
+            Map<String, Object> parentCategory = categoryMap.get(parentId);
+            if (parentCategory == null) {
+                return;
+            }
+            List<Map<String, Object>> children = (List<Map<String, Object>>) parentCategory.get("children");
+            if (children == null) {
+                children = Lists.newArrayList();
+                parentCategory.put("children", children);
+            }
+            children.add(category);
+        });
+        // 取出根节点
+        List<Map<String, Object>> rootCategoryList = categoryList.stream().filter(category -> {
+            String parentId = category.get("parent_id").toString();
+            return StringUtils.isBlank(parentId) || "0".equals(parentId);
+        }).collect(Collectors.toList());
+        // 处理ids
+        handleIds(rootCategoryList, "");
+        List<String> updateSqlList = Lists.newArrayList();
+        // 类型修改
+        updateSqlList.add("update ds_category_tree set type = 'dataset' where type = 'r_dataset'");
+        // 组装update sql
+        getUpdateSql(rootCategoryList, updateSqlList);
+        // 批量执行update sql
+        jdbcTemplate.batchUpdate(updateSqlList.toArray(new String[0]));
+        log.info("分类树数据迁移完成");
+
+    }
+
+    /**
+     * 处理分类树ids
+     * @param categoryList
+     * @param parentIds
+     */
+    private static void handleIds(List<Map<String, Object>> categoryList, String parentIds) {
+        if (categoryList == null || categoryList.isEmpty()) {
+            return;
+        }
+        categoryList.forEach(category -> {
+            String id = category.get("id").toString();
+            String ids = parentIds + "," + id;
+            if (StringUtils.isBlank(parentIds)) {
+                ids = id;
+            }
+            category.put("ids", ids);
+            List<Map<String, Object>> children = (List<Map<String, Object>>) category.get("children");
+            handleIds(children, ids);
+        });
+
+    }
+
+    /**
+     * 组装分类树update sql
+     * @param categoryList
+     * @param updateSqlList
+     */
+    private static void getUpdateSql(List<Map<String, Object>> categoryList, List<String> updateSqlList) {
+        if (categoryList == null || categoryList.isEmpty()) {
+            return;
+        }
+        categoryList.forEach(category -> {
+            String id = category.get("id").toString();
+            String ids = category.get("ids").toString();
+            updateSqlList.add(String.format(updateSql, ids, id));
+            List<Map<String, Object>> children = (List<Map<String, Object>>) category.get("children");
+            getUpdateSql(children, updateSqlList);
+        });
+    }
+
+    /**
+     * 数据集新增SQL
+     */
+    public static final String insertSql = "INSERT INTO ds_dataset (id, name, code, type_id, remark, dataset_type, module_code, editable, source_id, cache, config) VALUES ('%s', '%s', '%s', %s, '%s', '%s', '%s', %s, %s, %s, '%s');";
+
+    /**
+     * 数据集数据迁移
+     * @param jdbcTemplate
+     */
+    private static void handleDataset(JdbcTemplate jdbcTemplate) {
+        // 新增SQL集合
+        List<String> insertSqlList = Lists.newArrayList();
+        // 处理JSON类型的数据集
+        String sql = "select * from big_screen_dataset where dataset_type = 'json' and del_flag = 0";
+        List<Map<String, Object>> jsonDatasetList = jdbcTemplate.queryForList(sql);
+        for (Map<String, Object> dataset : jsonDatasetList) {
+            String data = dataset.get("data").toString();
+            // 解析data
+            JSONObject dataJson = JSON.parseObject(data);
+            Object json = dataJson.get("json");
+            JSONObject fieldDesc = dataJson.getJSONObject("fieldDesc");
+            // 遍历fieldDesc,取出key和value
+            Set<String> keySet = fieldDesc.keySet();
+            List<Map<String, Object>> fieldList = Lists.newArrayList();
+            for (String key : keySet) {
+                Object value = fieldDesc.get(key);
+                Map<String, Object> fieldMap = Maps.newHashMap();
+                fieldMap.put("fieldName", key);
+                fieldMap.put("fieldDesc", value);
+                fieldList.add(fieldMap);
+            }
+            JSONObject jsonConfig = new JSONObject();
+            jsonConfig.put("fieldList", fieldList);
+            String jsonStr = JSON.toJSONString(json);
+            jsonConfig.put("json", escape(jsonStr));
+            jsonConfig.put("fieldDesc", fieldDesc);
+            jsonConfig.put("className", "com.gccloud.dataset.entity.config.JsonDataSetConfig");
+            String config = JSON.toJSONString(jsonConfig);
+            String insertSql = getInsertSql(dataset, config);
+            insertSqlList.add(insertSql);
+        }
+        // 处理script类型的数据集
+        sql = "select * from big_screen_dataset where dataset_type = 'script' and del_flag = 0";
+        List<Map<String, Object>> scriptDatasetList = jdbcTemplate.queryForList(sql);
+        for (Map<String, Object> dataset : scriptDatasetList) {
+            String data = dataset.get("data").toString();
+            // 解析data
+            JSONObject dataJson = JSON.parseObject(data);
+            Object script = dataJson.get("script");
+            Object paramsList = dataJson.get("paramsList");
+            JSONObject fieldDesc = dataJson.getJSONObject("fieldDesc");
+            // 遍历fieldDesc,取出key和value
+            Set<String> keySet = fieldDesc.keySet();
+            List<Map<String, Object>> fieldList = Lists.newArrayList();
+            for (String key : keySet) {
+                Object value = fieldDesc.get(key);
+                Map<String, Object> fieldMap = Maps.newHashMap();
+                fieldMap.put("fieldName", key);
+                fieldMap.put("fieldDesc", value);
+                fieldList.add(fieldMap);
+            }
+            JSONObject jsonConfig = new JSONObject();
+            jsonConfig.put("fieldList", fieldList);
+            jsonConfig.put("script", escape(script.toString()));
+            jsonConfig.put("paramsList", paramsList);
+            jsonConfig.put("fieldDesc", fieldDesc);
+            jsonConfig.put("className", "com.gccloud.dataset.entity.config.GroovyDataSetConfig");
+            String config = JSON.toJSONString(jsonConfig);
+            String insertSql = getInsertSql(dataset, config);
+            insertSqlList.add(insertSql);
+        }
+        // 处理original类型的数据集
+        sql = "select a.*,b.* from big_screen_dataset a left join big_screen_datasets_original b on a.dataset_rel_id = b.id where a.dataset_rel_id is not null and a.dataset_type = 'original' and a.del_flag = 0 and b.del_flag =0";
+        List<Map<String, Object>> originalDatasetList = jdbcTemplate.queryForList(sql);
+        for (Map<String, Object> dataset : originalDatasetList) {
+            String sourceId = dataset.get("source_id").toString();
+            String tableName = dataset.get("table_name").toString();
+            Object repeatStatus = dataset.get("repeat_status");
+            Object fieldDesc = dataset.get("field_desc");
+            JSONObject fieldDescObj = JSON.parseObject(fieldDesc.toString());
+            String fieldInfo = dataset.get("field_info").toString();
+            JSONArray fieldJson = JSON.parseArray(dataset.get("field_json").toString());
+            List<Map<String, Object>> fieldList = Lists.newArrayList();
+            fieldJson.toList().forEach(field -> {
+                Map<String, Object> fieldMap = Maps.newHashMap();
+                fieldMap.put("fieldName", ((Map) field).get("columnName"));
+                fieldMap.put("fieldType", ((Map) field).get("columnType"));
+                fieldMap.put("orderNum", ((Map) field).get("orderNum"));
+                fieldMap.put("sourceTable", ((Map) field).get("sourceTable"));
+                fieldMap.put("fieldDesc", ((Map) field).get("fieldDesc"));
+                fieldList.add(fieldMap);
+            });
+            JSONObject jsonConfig = new JSONObject();
+            jsonConfig.put("sourceId", sourceId);
+            jsonConfig.put("tableName", tableName);
+            jsonConfig.put("repeatStatus", repeatStatus);
+            jsonConfig.put("fieldDesc", fieldDescObj);
+            jsonConfig.put("fieldInfo", fieldInfo);
+            jsonConfig.put("fieldList", fieldList);
+            jsonConfig.put("className", "com.gccloud.dataset.entity.config.OriginalDataSetConfig");
+            String config = JSON.toJSONString(jsonConfig);
+            String insertSql = getInsertSql(dataset, config);
+            insertSqlList.add(insertSql);
+        }
+        // 处理custom、storedProcedure类型的数据集
+        sql = "select a.*, b.*\n" +
+                "from big_screen_dataset a\n" +
+                "         left join big_screen_datasets_custom b on a.dataset_rel_id = b.id\n" +
+                "where a.dataset_rel_id is not null\n" +
+                "  and ( a.dataset_type = 'storedProcedure' or a.dataset_type = 'custom')\n" +
+                "  and a.del_flag = 0\n" +
+                "  and b.del_flag = 0\n";
+        List<Map<String, Object>> customDatasetList = jdbcTemplate.queryForList(sql);
+        for (Map<String, Object> dataset : customDatasetList) {
+            String sourceId = dataset.get("source_id").toString();
+            String sqlProcess = dataset.get("sql_process").toString();
+            Object fieldDesc = dataset.get("field_desc");
+            JSONObject fieldDescObj = JSON.parseObject(fieldDesc.toString());
+            Object paramList = dataset.get("param_config");
+            JSONArray fieldJson = JSON.parseArray(dataset.get("field_json").toString());
+            List<Map<String, Object>> fieldList = Lists.newArrayList();
+            fieldJson.toList().forEach(field -> {
+                Map<String, Object> fieldMap = Maps.newHashMap();
+                fieldMap.put("fieldName", ((Map) field).get("columnName"));
+                fieldMap.put("fieldType", ((Map) field).get("columnType"));
+                fieldMap.put("orderNum", ((Map) field).get("orderNum"));
+                fieldMap.put("sourceTable", ((Map) field).get("sourceTable"));
+                fieldMap.put("fieldDesc", ((Map) field).get("fieldDesc"));
+                fieldList.add(fieldMap);
+            });
+            JSONObject jsonConfig = new JSONObject();
+            jsonConfig.put("sourceId", sourceId);
+            jsonConfig.put("sqlProcess", escape(sqlProcess));
+            jsonConfig.put("fieldDesc", fieldDescObj);
+            jsonConfig.put("fieldList", fieldList);
+            JSONArray paramsList = new JSONArray();
+            if (StringUtils.isNotBlank(paramList.toString())) {
+                paramsList = JSON.parseArray(paramList.toString());
+            }
+            jsonConfig.put("paramsList", paramsList);
+            if (dataset.get("dataset_type").toString().equals("storedProcedure")) {
+                jsonConfig.put("className", "com.gccloud.dataset.entity.config.StoredProcedureDataSetConfig");
+            } else {
+                jsonConfig.put("className", "com.gccloud.dataset.entity.config.CustomDataSetConfig");
+            }
+            String config = JSON.toJSONString(jsonConfig);
+            String insertSql = getInsertSql(dataset, config);
+            insertSqlList.add(insertSql);
+        }
+        // 批量新增
+        insertSqlList.forEach(jdbcTemplate::execute);
+        log.info("数据集配置迁移完成");
+    }
+
+    /**
+     * 组装数据集插入sql
+     * @param dataset
+     * @param config
+     * @return
+     */
+    private static String getInsertSql(Map<String, Object> dataset, String config) {
+        String id = dataset.get("id").toString();
+        String name = dataset.get("name").toString();
+        String type_id = dataset.get("type_id") == null ? "null" : dataset.get("type_id").toString();
+        String remark = dataset.get("remark").toString();
+        String dataset_type = dataset.get("dataset_type").toString();
+        String module_code = "";
+        String editable = dataset.get("editable").toString();
+        String source_id = dataset.get("source_id") == null ? "null" : dataset.get("source_id").toString();
+        String code = "";
+        String cache = "0";
+        return String.format(insertSql, id, name, code, type_id, remark, dataset_type, module_code, editable, source_id, cache, config);
+    }
+
+    /**
+     * 转义字符串
+     * @param str
+     * @return
+     */
+    private static String escape(String str) {
+        str = str.replace("\\", "\\\\");
+        str = str.replace("'", "\\'");
+        str = str.replace("\"", "\\\"");
+        str = str.replace("\n", "\\n");
+        str = str.replace("\r", "\\r");
+        str = str.replace("\t", "\\t");
+        return str;
+    }
+}

+ 175 - 0
DataRoom/dataroom-server/src/test/java/GetResourceSqlTest.java

@@ -0,0 +1,175 @@
+import com.baomidou.mybatisplus.core.toolkit.IdWorker;
+import lombok.extern.slf4j.Slf4j;
+import org.apache.commons.io.FileUtils;
+import org.apache.commons.lang3.RandomStringUtils;
+import org.apache.commons.lang3.exception.ExceptionUtils;
+import org.junit.Test;
+import org.junit.runner.RunWith;
+import org.springframework.test.context.junit4.SpringRunner;
+
+import java.io.File;
+import java.nio.file.Files;
+import java.nio.file.Path;
+import java.nio.file.StandardCopyOption;
+import java.text.SimpleDateFormat;
+import java.util.ArrayList;
+import java.util.Date;
+import java.util.List;
+
+/**
+ * 用于批量生成资源文件的sql
+ * @author hongyang
+ * @version 1.0
+ * @date 2023/5/30 13:55
+ */
+@Slf4j
+@RunWith(SpringRunner.class)
+public class GetResourceSqlTest {
+
+    /**
+     * 需要处理的文件夹
+     */
+    public static final String FOLDER_PATH = "/Users/liuchengbiao/Desktop/大屏资源";
+
+    /**
+     * 服务存储文件的位置,即配置文件中的gc.file.basePath
+     */
+    public static final String FILE_BASE_PATH = "/root/bigscreen/file";
+
+    /**
+     * 静态资源接口前缀,即配置文件中的gc.file.urlPrefix
+     */
+    public static final String FILE_URL_PREFIX = "http://gcpaas.gccloud.com/bigScreenServer/static";
+
+    /**
+     * 文件所属分组编码
+     */
+    public static final String FILE_GROUP_CODE = "other";
+
+    @Test
+    public void getResourceSql() {
+        List<String> sqlList = new ArrayList<>();
+
+        // 需要处理的文件夹
+        File folder = new File(FOLDER_PATH);
+
+        if (!folder.exists() || !folder.isDirectory()) {
+            log.error("文件夹不存在");
+            return;
+        }
+        File[] subFiles = folder.listFiles();
+        if (subFiles == null) {
+            log.error("文件夹为空");
+            return;
+        }
+        for (File subFile : subFiles) {
+            String typeCode = FILE_GROUP_CODE;
+            if (subFile.isDirectory()) {
+                // 获取文件夹名称
+                String folderName = subFile.getName();
+                // 生成编码
+                typeCode = RandomStringUtils.randomAlphanumeric(10).toLowerCase();
+                // 创建时间
+                String currentDate = getCurrentDateTime();
+                String insertTypeSql = "INSERT INTO big_screen_type (name, code, type, order_num, update_date, create_date, del_flag) VALUES ('%s', '%s', '%s', %s, '%s', '%s', %s);";
+                String insertTypeSqlFormat = String.format(insertTypeSql, folderName, typeCode, "resourceCatalog", 0, currentDate, currentDate, 0);
+                sqlList.add("# 分组");
+                sqlList.add(insertTypeSqlFormat);
+                sqlList.add("# 资源");
+            }
+            handleFile(subFile, "", sqlList, typeCode);
+        }
+        // 将sql输出到文件
+        String sql = String.join("\n", sqlList);
+        String fileName = "big_screen_file.sql";
+        String filePath = FOLDER_PATH + "/" + fileName;
+        // 写入文件
+        try {
+            FileUtils.write(new File(filePath), sql, "UTF-8");
+        } catch (Exception e) {
+            log.error("写入sql文件失败");
+            log.error(ExceptionUtils.getStackTrace(e));
+        }
+        log.info("sql生成到文件:{}", FOLDER_PATH + "/big_screen_file.sql");
+        log.info("重命名后的文件路径:{}", FOLDER_PATH + "_重命名");
+    }
+
+
+    /**
+     * 处理文件/文件夹
+     * @param file 文件/文件夹
+     * @param relativePath 相对路径(相对于FOLDER_PATH)
+     * @param sqlList sql列表
+     */
+    private static void handleFile(File file, String relativePath, List<String> sqlList, String typeCode) {
+        if (file.isDirectory()) {
+            File[] files = file.listFiles();
+            if (files == null) {
+                return;
+            }
+            for (File subFile : files) {
+                String subRelativePath = relativePath + "/" + file.getName();
+                handleFile(subFile, subRelativePath, sqlList, typeCode);
+            }
+            return;
+        }
+        // 原始文件名
+        String originalName = file.getName();
+        // 文件后缀
+        String extension = getFileExtension(originalName);
+        // 新文件名
+        String newFileName = IdWorker.getIdStr()+ "." + extension;
+        // 新文件路径
+        String newPath = FOLDER_PATH + "_重命名" + relativePath + "/" + newFileName;
+        // 复制文件
+        Path sourcePath = file.toPath();
+        Path targetPath = new File(newPath).toPath();
+        try {
+            // 创建文件夹
+            Files.createDirectories(targetPath.getParent());
+            Files.copy(sourcePath, targetPath, StandardCopyOption.REPLACE_EXISTING);
+        } catch (Exception e) {
+            log.error(ExceptionUtils.getStackTrace(e));
+        }
+        // 在服务器上的文件路径
+        String path = FILE_BASE_PATH + relativePath;
+        // 文件访问地址
+        String url = FILE_URL_PREFIX + relativePath + "/" + newFileName;
+        // 替换可能存在的反斜杠
+        url = url.replace("\\", "/");
+        // 文件大小
+        long size = file.length();
+        // 创建时间
+        String currentDate = getCurrentDateTime();
+        // 生成sql
+        String sql = String.format("INSERT INTO big_screen_file (module, original_name, new_name, extension, path, url, size, download_count, create_date, update_date, del_flag) VALUES ('%s', '%s', '%s', '%s', '%s', '%s', %d, %d, '%s', '%s', %d);",
+                typeCode, originalName, newFileName, extension, path, url, size, 0, currentDate, currentDate, 0);
+        sqlList.add(sql);
+    }
+
+
+    /**
+     * 获取文件后缀
+     * @param fileName
+     * @return
+     */
+    private static String getFileExtension(String fileName) {
+        int dotIndex = fileName.lastIndexOf('.');
+        if (dotIndex > 0 && dotIndex < fileName.length() - 1) {
+            return fileName.substring(dotIndex + 1);
+        }
+        return "";
+    }
+
+
+    /**
+     * 获取当前时间
+     * @return
+     */
+    private static String getCurrentDateTime() {
+        SimpleDateFormat dateFormat = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
+        Date currentDate = new Date();
+        return dateFormat.format(currentDate);
+    }
+}
+

+ 60 - 2
DataRoom/doc/update.sql

@@ -1,2 +1,60 @@
-UPDATE big_screen_page SET config = REPLACE(config, '"className":"com.gccloud.bigscreen', '"className":"com.gccloud.dataroom') WHERE type = 'bigScreen';
-UPDATE big_screen_page SET config = REPLACE(config, '"className":"com.gccloud.dataroom.core.module.manage.dto.BigScreenPageDTO', '"className":"com.gccloud.dataroom.core.module.manage.dto.DataRoomPageDTO') WHERE type = 'bigScreen';
+
+-- 更新大屏配置的类名
+UPDATE big_screen_page SET config = REPLACE(config, '"className":"com.gccloud.bigscreen', '"className":"com.gccloud.dataroom');
+UPDATE big_screen_page SET config = REPLACE(config, '"className":"com.gccloud.dataroom.core.module.manage.dto.BigScreenPageDTO', '"className":"com.gccloud.dataroom.core.module.manage.dto.DataRoomPageDTO');
+
+-- 新增数据集相关的表
+
+DROP TABLE IF EXISTS `ds_category_tree`;
+CREATE TABLE `ds_category_tree` (
+  `id` bigint(64) NOT NULL AUTO_INCREMENT COMMENT '主键',
+  `ids` text COMMENT 'id序列',
+  `name` varchar(255) DEFAULT NULL COMMENT '名称',
+  `parent_id` bigint(64) DEFAULT NULL COMMENT '父级ID',
+  `type` varchar(255) NOT NULL,
+  `module_code` varchar(255) DEFAULT NULL,
+  `update_date` timestamp                        NOT NULL DEFAULT CURRENT_TIMESTAMP COMMENT '更新时间',
+  `create_date` timestamp                        NOT NULL DEFAULT CURRENT_TIMESTAMP COMMENT '创建时间',
+  `del_flag` tinyint(2) NOT NULL DEFAULT '0' COMMENT '删除标识',
+  PRIMARY KEY (`id`) USING BTREE
+) ENGINE=InnoDB DEFAULT CHARSET=utf8 COMMENT='数据集种类树';
+
+
+DROP TABLE IF EXISTS `ds_datasource`;
+CREATE TABLE `ds_datasource` (
+  `id` bigint(20) NOT NULL AUTO_INCREMENT COMMENT '主键',
+  `source_name` varchar(255) DEFAULT NULL COMMENT '数据源名称',
+  `source_type` varchar(255) DEFAULT NULL COMMENT '数据源类型',
+  `driver_class_name` varchar(255) DEFAULT NULL COMMENT '连接驱动',
+  `url` varchar(255) DEFAULT NULL COMMENT '连接url',
+  `host` varchar(255) DEFAULT NULL COMMENT '主机',
+  `port` int(16) DEFAULT NULL COMMENT '端口',
+  `username` varchar(255) DEFAULT NULL COMMENT '用户名',
+  `password` text COMMENT '密码',
+  `module_code` varchar(255) DEFAULT NULL COMMENT '模块编码',
+  `editable` tinyint(2) DEFAULT '0' COMMENT '是否可编辑,0 不可编辑 1 可编辑',
+  `remark` varchar(255) DEFAULT NULL,
+  `update_date` timestamp                        NOT NULL DEFAULT CURRENT_TIMESTAMP COMMENT '更新时间',
+  `create_date` timestamp                        NOT NULL DEFAULT CURRENT_TIMESTAMP COMMENT '创建时间',
+  `del_flag` tinyint(2) NOT NULL DEFAULT '0' COMMENT '删除标识',
+  PRIMARY KEY (`id`) USING BTREE
+) ENGINE=InnoDB  DEFAULT CHARSET=utf8 COMMENT='数据源配置表';
+
+DROP TABLE IF EXISTS `ds_dataset`;
+CREATE TABLE `ds_dataset` (
+  `id` bigint(32) NOT NULL AUTO_INCREMENT COMMENT '主键',
+  `name` varchar(255) CHARACTER SET utf8 DEFAULT NULL COMMENT '名称',
+  `code` varchar(255) CHARACTER SET utf8 DEFAULT NULL COMMENT '编码',
+  `type_id` varchar(255) DEFAULT NULL COMMENT '种类ID',
+  `remark` text CHARACTER SET utf8 COMMENT '描述',
+  `dataset_type` varchar(64) CHARACTER SET utf8 NOT NULL COMMENT '数据集类型(自定义数据集 custom、模型数据集model、原始数据集original、API数据集api、JSON数据集 json)',
+  `module_code` varchar(255) COLLATE utf8_general_mysql500_ci DEFAULT NULL COMMENT '模块编码',
+  `editable` tinyint(2) NOT NULL DEFAULT '0' COMMENT '是否可编辑,0 不可编辑 1 可编辑',
+  `source_id` bigint(32) DEFAULT NULL COMMENT '数据源ID',
+  `cache` tinyint(1) DEFAULT 0 NOT NULL COMMENT '是否对执行结构缓存 0 不缓存 1 缓存',
+  `config` longtext COMMENT '数据集配置',
+  `update_date` timestamp                        NOT NULL DEFAULT CURRENT_TIMESTAMP COMMENT '更新时间',
+  `create_date` timestamp                        NOT NULL DEFAULT CURRENT_TIMESTAMP COMMENT '创建时间',
+  `del_flag` tinyint(2) NOT NULL DEFAULT '0' COMMENT '删除标识',
+  PRIMARY KEY (`id`) USING BTREE
+) ENGINE=InnoDB   DEFAULT CHARSET=utf8 COLLATE=utf8_general_mysql500_ci COMMENT='数据集表';