const path = require('path')
const S3Uploader = require('./classes/S3Uploader')
const {
CURRENT_YEAR,
MONTH
} = require('./utils/constants')
if (process.argv[3] == 'test') {
require('dotenv').config(
{ path: path.resolve(__dirname, './.env.test') }
)
} else {
require('dotenv').config(
{ path: path.resolve(__dirname, './.env') }
)
}
const fs = require('fs')
const util = require('util')
const exec = util.promisify(require('child_process').exec)
const database = require('./models/database')
const getDataFromCSV = (csvFilePath) => {
const content = fs.readFileSync(
csvFilePath,
{ encoding: 'utf-8', flag: 'r' }
)
const lines = content.split('\n')
const columns = lines[0].split(',')
const dataArr = []
for (let i = 1; i < lines.length; i++) {
const values = lines[i].split(',')
if (columns.length !== values.length) {
console.error(`
column length is not equal to values in ${path.basename(csvFilePath)}`)
return []
}
const dataObj = {}
for (let i = 0; i < columns.length; i++) {
dataObj[columns[i].trim()] = values[i].trim()
}
dataArr.push(dataObj)
}
const tableName = path.basename(csvFilePath).replace('.csv', '')
const data = {}
data[tableName] = dataArr
return data
}
const insertData = async (database, data) => {
const insertIds = []
for (const [tableName, rows] of Object.entries(data)) {
for (let i = 0; i < rows.length; i++) {
const columns = Object.keys(rows[i])
const values = Object.values(rows[i])
const rawQuery = `
INSERT INTO ${tableName}
(${columns.join(',')})
VALUES
(${values.join(',')});`
const { insertId } = await database.query(rawQuery)
insertIds.push(insertId)
}
console.log(`data is inserted to ${tableName}`)
}
return insertIds
}
const getCSVFilePath = (csvFileName) => {
const dirPath = path.resolve(__dirname, 'db', 'data')
const filePath = path.resolve(dirPath, csvFileName)
return filePath
}
const initDBmate = async () => {
await exec('dbmate drop')
await exec('dbmate up')
console.log('init Dbmate.')
}
const getSecondArgv = () => {
const secondArgv = process.argv[2]
if (secondArgv == undefined) return
const splited = secondArgv.split('=')
const key = splited[0]
const value = splited[1]
return { [key]: value }
}
const uploadImage = async (directory, hotelId, fileName, tableName) => {
const s3Uploader = new S3Uploader()
const s3ObjectUrl = await s3Uploader.uploadLocalFile(directory, hotelId, fileName, tableName)
return s3ObjectUrl
}
const insertRoomStatus = async (insertIds) => {
console.log('insertRoomStatus')
for (let roomId of insertIds) {
const year = CURRENT_YEAR
const values = []
for (let m of MONTH) {
const days = new Date(year, m, 0).getDate()
for (let day = 1; day <= days; day++) {
if (day < 10) day = '0' + day
const date = year + '-' + m + '-' + day
values.push(date)
const rawQuery = (
`INSERT INTO
room_status (room_id, date)
VALUES (${roomId}, '${date}');`
)
const { affectedRows } = await database.query(rawQuery)
if (!affectedRows) throw new Error('FAIL_TO_INSERT_ROOM_STATUS')
}
}
}
}
const initDB = async (imageUpload) => {
const secondArgv = getSecondArgv()
if (secondArgv != undefined && secondArgv['dbmate'] == 'true') {
await initDBmate()
}
database.initialize()
.then(async () => {
const files = [
'users.csv',
'hotel_areas.csv',
'hotels.csv',
'hotel_images.csv',
'rooms.csv',
'convenients.csv',
'hotel_convenient.csv'
]
for (let file of files) {
const data = getDataFromCSV(getCSVFilePath(file))
switch (file) {
case 'hotel_images.csv':
for (let hotelImage of data.hotel_images) {
const hotelId = hotelImage.hotel_id
const splited = hotelImage.url.replaceAll('\'', '').split('/')
const fileName = splited[splited.length - 1]
const s3Uploader = new S3Uploader()
const s3Url = await s3Uploader.uploadLocalFile('public/images/hotels', hotelId, fileName)
}
await insertData(database, data)
break
case 'rooms.csv':
const insertIds = await insertData(database, data)
await insertRoomStatus(insertIds)
break
default:
await insertData(database, data)
}
}
await database.destroy()
})
.catch(async (err) => {
console.error(err)
await database.destroy()
})
}
initDB(imageUpload = false)
module.exports = {
initDB
}