import * as XLSX from 'xlsx'; console.log(XLSX.version); const bookType: string = "xlsb"; const fn: string = "sheetjsfbox." + bookType const sn: string = "SheetJSFBox"; const aoa: any[][] = [ ["Sheet", "JS"], ["Fuse", "Box"], [72, 62] ]; var wb: XLSX.WorkBook = XLSX.utils.book_new(); var ws: XLSX.WorkSheet = XLSX.utils.aoa_to_sheet(aoa); XLSX.utils.book_append_sheet(wb, ws, sn); var payload: string = ""; var w2: XLSX.WorkBook; if(typeof process != 'undefined' && process.versions && process.versions.node) { /* server */ XLSX.writeFile(wb, fn); w2 = XLSX.readFile(fn) } else { /* client */ payload = XLSX.write(wb, {bookType: "xlsb", type:"binary"}); w2 = XLSX.read(payload, {type:"binary"}); } var s2: XLSX.WorkSheet = w2.Sheets[sn]; console.log(XLSX.utils.sheet_to_csv(s2));
}; const write_opts: XLSX.WritingOptions = { type: "buffer", cellDates: false, bookSST: false, bookType: "xlsx", sheet: "Sheet1", compression: false, Props: { Author: "Someone", Company: "SheetJS LLC" } }; const wb1 = XLSX.readFile("sheetjs.xls", read_opts); XLSX.writeFile(wb1, "sheetjs.new.xlsx", write_opts); read_opts.type = "binary"; const wb2 = XLSX.read("1,2,3\n4,5,6", read_opts); write_opts.type = "binary"; const out2 = XLSX.write(wb2, write_opts); read_opts.type = "buffer"; const wb3 = XLSX.read(fs.readFileSync("sheetjs.xlsx"), read_opts); write_opts.type = "base64"; const out3 = XLSX.write(wb3, write_opts); write_opts.type = "array"; const out4 = XLSX.write(wb3, write_opts); const ws1 = XLSX.utils.aoa_to_sheet([
/** * Read excel file and save it into memory. * @param {string} register - Path to the register file. */ function importRegister(register: string) { const workbook = xlxs.readFile(register) const firstSheet = workbook.SheetNames[0] const worksheet = workbook.Sheets[firstSheet] return worksheet }
/** * @param file xlsx file */ constructor(file: string) { this._book = XLSX.readFile(file); }
async function importCodebook() { const codebookXLS = XLSX.readFile(CODEBOOK_FILE) const sheet = codebookXLS.Sheets[codebookXLS.SheetNames[0]] const codebookCSV = XLSX.utils.sheet_to_csv(sheet) const now = new Date() const codebookRows = await parseCSV(codebookCSV) const vdemVariables = codebookRows.slice(1).map(row => ({ indicatorCode: row[0], indicatorName: row[1], shortDefinition: row[2], longDefinition: row[3], responses: row[4], dataRelease: row[5], aggregationMethod: row[6], variableSource: row[7].trim() })) // Need to handle these fussy subset codes separately const variablesByCode = _.keyBy(vdemVariables.filter(v => v.shortDefinition), v => v.indicatorCode) for (const v of vdemVariables) { const orig = variablesByCode[v.indicatorCode] if (orig !== v) { if (v.indicatorName.toLowerCase().indexOf("executive") !== -1) { v.indicatorCode += "_ex" } else if (v.indicatorName.toLowerCase().indexOf("legislative") !== -1) { v.indicatorCode += "_leg" } else { throw new Error("Unknown duplicate indicator: " + v.indicatorName) } v.shortDefinition = orig.shortDefinition v.responses = orig.responses v.dataRelease = orig.dataRelease v.aggregationMethod = orig.aggregationMethod v.variableSource = orig.variableSource } } // User responsible for uploading this data const userId = (await db.get(`SELECT * FROM users WHERE fullName=?`, ["Jaiden Mispy"])).id await db.transaction(async t => { const existingDataset = (await t.query("SELECT id FROM datasets WHERE namespace='vdem'"))[0] if (existingDataset) { await t.execute(`DELETE d FROM data_values AS d JOIN variables AS v ON d.variableId=v.id WHERE v.datasetId=?`, [existingDataset.id]) await t.execute(`DELETE FROM variables WHERE datasetId=?`, [existingDataset.id]) await t.execute(`DELETE FROM sources WHERE datasetId=?`, [existingDataset.id]) await t.execute(`DELETE FROM datasets WHERE id=?`, [existingDataset.id]) } const datasetRow = ['vdem', "V-Dem Dataset Version 8 - V-Dem Institute", "", false, now, now, now, userId, now, userId, userId] const result = await t.query("INSERT INTO datasets (namespace, name, description, isPrivate, createdAt, updatedAt, metadataEditedAt, metadataEditedByUserId, dataEditedAt, dataEditedByUserId, createdByUserId) VALUES (?)", [datasetRow]) const datasetId = result.insertId const sourceName = "V-Dem Dataset Version 8 (2018)" for (let i = 0; i < vdemVariables.length; i++) { const v = vdemVariables[i] let additionalInfo = "This variable was imported into the OWID database from Version 8 of the V-Dem Dataset. Here is the original metadata given by the V-Dem Codebook:\n\n" additionalInfo += `Indicator Name: ${v.indicatorName}\n\n` additionalInfo += `Indicator Code: ${v.indicatorCode}\n\n` if (v.shortDefinition) additionalInfo += `Short definition: ${v.shortDefinition}\n\n` if (v.longDefinition) additionalInfo += `Long definition: ${v.longDefinition}\n\n` if (v.responses) additionalInfo += `Responses: ${v.responses}\n\n` if (v.dataRelease) additionalInfo += `Data release: ${v.dataRelease}\n\n` if (v.aggregationMethod) additionalInfo += `Aggregation method: ${v.aggregationMethod}` if (v.indicatorCode === "v2exdfcbhs_rec") { additionalInfo += "\n| Notes: v2exdfcbhs_rec is a version of v2exdfcbhs, for v2exdfcbhs_rec the answer categories 1 and 2, 3 and 4 has been merged." v.indicatorName += " (rec)" } const sourceDescription = { dataPublishedBy: "V-Dem Institute", dataPublisherSource: v.variableSource, link: findUrlsInText(v.variableSource).join(","), additionalInfo: additionalInfo } const sourceRow = [datasetId, sourceName, now, now, JSON.stringify(sourceDescription)] const sourceResult = await t.query("INSERT INTO sources (datasetId, name, createdAt, updatedAt, description) VALUES (?)", [sourceRow]) const sourceId = sourceResult.insertId const variableRow = [datasetId, sourceId, i, v.indicatorName, v.indicatorCode, v.shortDefinition, now, now, JSON.stringify(v), "", "", "", "{}"] await t.query("INSERT INTO variables (datasetId, sourceId, columnOrder, name, code, description, createdAt, updatedAt, originalMetadata, unit, coverage, timespan, display) VALUES (?)", [variableRow]) } }) }