|
@@ -3,10 +3,11 @@ const router = express.Router()
|
|
|
const utils = require('../../utils/index.js')
|
|
|
const pools = require('../../utils/pools.js')
|
|
|
const xlsx = require('node-xlsx')
|
|
|
+const fs = require('fs')
|
|
|
const path = require('path')
|
|
|
const fileEvent = require('../../utils/file')
|
|
|
-const execTrans = require('../../utils/dbHelper.js')
|
|
|
-const getNewSqlParamEntity = require('../../utils/tool.js')
|
|
|
+const execTransection = require('../../utils/dbHelper.js')
|
|
|
+const datetimeToTime = require('../../utils/tool.js')
|
|
|
const uuid = require('uuid')
|
|
|
// 增加数据集
|
|
|
router.post('/addDataset', async (req, res) => {
|
|
@@ -66,14 +67,22 @@ router.post('/delDataset', async (req, res) => {
|
|
|
|
|
|
// 查询导入记录
|
|
|
router.post('/getImportRecord', async (req, res) => {
|
|
|
- let user = await utils.getUserInfo({ req, res }),
|
|
|
- obj = req.body
|
|
|
- let sql = `SELECT id,name,importer_id,time,data_volume FROM file_import_t WHERE 1=1` // WHERE 1=1是一个始终为真的条件,后续动态添加查询条件的起点
|
|
|
- sql = utils.setLike(sql, 'name', obj.name) // 添加一个基于name字段的模糊匹配条件,匹配的值来自obj.name
|
|
|
+ let user = await utils.getUserInfo({ req, res })
|
|
|
+ obj = req.body
|
|
|
+ console.log('req.body', req.body)
|
|
|
+ let sql = `SELECT
|
|
|
+ f.id,
|
|
|
+ f.name,
|
|
|
+ (SELECT name from user u where f.importer_id = u.id) as importerName,
|
|
|
+ f.time,
|
|
|
+ f.data_volume
|
|
|
+ FROM
|
|
|
+ file_import_t f WHERE f.dataset_id = ?`
|
|
|
+ sql = utils.setLike(sql, 'name', obj.name)
|
|
|
sql = utils.setMoreId(sql, user)
|
|
|
- let { total } = await utils.getSum({ sql, name: 'file_import_t', res, req }) // 执行SQL查询并获取结果中的总数
|
|
|
- sql = utils.pageSize(sql, obj.page, obj.size) // 函数来修改SQL语句,添加分页功能。它使用obj.page和obj.size来确定返回结果的范围。
|
|
|
- let { result } = await pools({ sql, res, req })
|
|
|
+ let { total } = await utils.getSumWhere({ sql: '', val: [obj.dataset_id], name: 'file_import_t f', res, req })
|
|
|
+ sql = utils.pageSize(sql, obj.page, obj.size)
|
|
|
+ let { result } = await pools({ sql, val: [obj.dataset_id], req })
|
|
|
res.send(utils.returnData({ data: result, total }))
|
|
|
})
|
|
|
|
|
@@ -81,46 +90,69 @@ router.post('/getImportRecord', async (req, res) => {
|
|
|
router.post('/getDataList', async (req, res) => {
|
|
|
let user = await utils.getUserInfo({ req, res }),
|
|
|
obj = req.body
|
|
|
- let sql = `SELECT id,text,is_mark,state FROM mark_data_t WHERE 1=1` // WHERE 1=1是一个始终为真的条件,后续动态添加查询条件的起点
|
|
|
- sql = utils.setLike(sql, 'text', obj.text) // 添加一个基于name字段的模糊匹配条件,匹配的值来自obj.name
|
|
|
+ let sql = `SELECT id,text,is_mark,state FROM mark_data_t WHERE 1=1`
|
|
|
+ sql = utils.setLike(sql, 'text', obj.text)
|
|
|
sql = utils.setMoreId(sql, user)
|
|
|
- let { total } = await utils.getSum({ sql, name: 'mark_data_t', res, req }) // 执行SQL查询并获取结果中的总数
|
|
|
- sql = utils.pageSize(sql, obj.page, obj.size) // 函数来修改SQL语句,添加分页功能。它使用obj.page和obj.size来确定返回结果的范围。
|
|
|
+ let { total } = await utils.getSum({ sql, name: 'mark_data_t', res, req })
|
|
|
+ sql = utils.pageSize(sql, obj.page, obj.size)
|
|
|
let { result } = await pools({ sql, res, req })
|
|
|
res.send(utils.returnData({ data: result, total }))
|
|
|
})
|
|
|
|
|
|
// 导入数据
|
|
|
router.post('/importData', async (req, res) => {
|
|
|
- var sqlParamsEntity = []
|
|
|
- let fileImportSql = 'INSERT INTO file_import_t(`id`,`name`,`importer_id`,`data_volume`,`dataset_id`) VALUES ?'
|
|
|
- // let sql1 = 'INSERT INTO mark_data_t(`text`,`file_id`,`time`,`data_volume`,`dataset_id`) VALUES ?'
|
|
|
let fileArr = await fileEvent(req, res)
|
|
|
+ let filename = fileArr[0].filename
|
|
|
+ const fileType = req.files[0].mimetype
|
|
|
+ let list
|
|
|
+ //配置获取文件路径
|
|
|
+ if (fileType.includes('text')) {
|
|
|
+ const path = `public/uploadFile/${filename}`
|
|
|
+ fs.readFile(path, 'utf-8', (err, data) => {
|
|
|
+ if (err) {
|
|
|
+ res.send(utils.returnData({ code: -1, msg: '文件解析失败' }))
|
|
|
+ }
|
|
|
+ const data1 = data.split(/\r?\n/)
|
|
|
+ list = data1.map((line) => [line])
|
|
|
+ writeToDatabase(fileArr, list, req, res)
|
|
|
+ })
|
|
|
+ } else {
|
|
|
+ let xlsxRes = xlsx.parse(`${path.join(__dirname, '../../', 'public/uploadFile/')}${filename}`, { cellDates: true })
|
|
|
+ list = xlsxRes[0].data
|
|
|
+ list.splice(0, 1)
|
|
|
+ writeToDatabase(fileArr, list, req, res)
|
|
|
+ }
|
|
|
+})
|
|
|
+const writeToDatabase = (fileArr, list, req, res) => {
|
|
|
+ let sqlParamsEntity = []
|
|
|
+ let fileImportSql = 'INSERT INTO file_import_t(`id`,`name`,`importer_id`,`time`,`data_volume`,`dataset_id`) VALUES (?,?,?,?,?,?)'
|
|
|
let params = JSON.parse(fileArr[0].params.listType)
|
|
|
const uniqueId = uuid.v4()
|
|
|
- console.log('fileArr', fileArr)
|
|
|
- let filename = fileArr[0].filename
|
|
|
let name = fileArr[0].originalname
|
|
|
- //配置获取文件路径
|
|
|
- let xlsxRes = xlsx.parse(`${path.join(__dirname, '../../', 'public/uploadFile/')}${filename}`, { cellDates: true })
|
|
|
- let list = xlsxRes[0].data
|
|
|
- list.splice(0, 1)
|
|
|
- var fileImportParam = { id: uniqueId, name, importer_id: params.importer_id, data_volume: list.length, dataset_id: params.dataset_id }
|
|
|
- sqlParamsEntity.push(getNewSqlParamEntity({ sql: fileImportSql, fileImportParam }))
|
|
|
- let markDataSql = 'INSERT INTO file_import_t(text,`file_id`,`dataset_id`,is_mark,state) VALUES ?'
|
|
|
+ let { importer_id: importerId, dataset_id: datasetId } = params
|
|
|
+ let currentTime = datetimeToTime()
|
|
|
+ let dataVolume = list.length
|
|
|
+ var fileImportParam = [uniqueId, name, importerId, currentTime, dataVolume, datasetId]
|
|
|
+ sqlParamsEntity.push({
|
|
|
+ sql: fileImportSql,
|
|
|
+ values: fileImportParam
|
|
|
+ })
|
|
|
+ let markDataSql = 'INSERT INTO mark_data_t(text,`file_id`,`dataset_id`,is_mark,state) VALUES (?,?,?,?,?)'
|
|
|
+ console.log('list.length', list.length)
|
|
|
for (let i = 0; i < list.length; i++) {
|
|
|
let item = list[i]
|
|
|
- console.log('item', item)
|
|
|
- // var markDataParams = { id: uniqueId, name, importer_id: params.importer_id, data_volume: list.length, dataset_id: params.dataset_id }
|
|
|
- // sqlParamsEntity.push(getNewSqlParamEntity({ sql: fileImportSql, fileImportParam }))
|
|
|
+ var markDataParams = [item[0], uniqueId, datasetId, true, false]
|
|
|
+ sqlParamsEntity.push({
|
|
|
+ sql: markDataSql,
|
|
|
+ values: markDataParams
|
|
|
+ })
|
|
|
}
|
|
|
- // await pools({sql,val:[list],run:false,res,req,msg:"请确认文档导入值没有问题!!!"});
|
|
|
- execTrans(sqlParamsEntity, function (err, info) {
|
|
|
- if (err) {
|
|
|
- console.error('事务执行失败', err)
|
|
|
- } else {
|
|
|
- console.log('done.')
|
|
|
- }
|
|
|
- })
|
|
|
-})
|
|
|
+ execTransection(sqlParamsEntity)
|
|
|
+ .then((resp) => {
|
|
|
+ res.send(utils.returnData({ code: 1, msg: '导入成功', data: [] }))
|
|
|
+ })
|
|
|
+ .catch((err) => {
|
|
|
+ res.send(utils.returnData({ code: -1, msg, err, req }))
|
|
|
+ })
|
|
|
+}
|
|
|
module.exports = router
|