return new Promise<string>((resolve, reject) => { let zipFileName = "geoserver_" + requestTimestamp.toString() + ".zip"; let zipFilePath = config.get("save_path") + zipFileName; let zipStream = fs.createWriteStream(zipFilePath); let archive = archiver.create("zip", {}); zipStream.on("close", function() { resolve(zipFileName); }); archive.on("error", (err: any) => { reject(err); }); archive.pipe(zipStream); for (let fileName of filesToZip) { let fileNameWithoutTimestamp = fileName.replace(/[0-9]/g, ""); let fn = config.get("save_path") + fileName; archive.append(fs.createReadStream(fn), { name: fileNameWithoutTimestamp }); fs.unlink(fn, (err: any) => { if (err) throw err; }); } archive.finalize(); });
app.get("/pop/fgdc", (req, res) => { console.log("get /dot/fgdc"); let filePath = config.get("metadata_path") + "USA-NPN_Phenology_observation_data.xml"; let file = fs.createWriteStream(filePath); let gitUrl = "https://raw.githubusercontent.com/usa-npn/metadata/master/USA-NPN_Phenology_observation_data.xml"; https.get(gitUrl, (gitResponse: any) => { gitResponse.pipe(file); file.on("finish", () => { res.download(filePath, "USA-NPN_Phenology_observation_data.xml"); // Set disposition and send it. }); }); });
return new Promise<string>( (resolve, reject) => { let fileName = prefix + requestTimestamp + "." + extension; let filePath = config.get("save_path") + fileName; let file = fs.createWriteStream(filePath); https.get(url, (metaResponse: any) => { metaResponse.pipe(file); metaResponse.on('end', () => { resolve(fileName); }); }); });
return new Promise<string>((resolve, reject) => { let zipFileName = "datasheet_" + requestTimestamp.toString() + ".zip"; let zipFilePath = config.get("save_path") + zipFileName; let zipStream = fs.createWriteStream(zipFilePath); let archive = archiver.create("zip", {}); zipStream.on("close", function() { console.log("All files are zipped!"); resolve(zipFileName); }); archive.on("error", (err: any) => { reject(err); }); archive.pipe(zipStream); for (let fileName of filesToZip) { let fileNameWithoutTimestamp = fileName.replace(/[0-9]/g, ""); archive.append(fs.createReadStream(config.get("save_path") + fileName), { name: fileNameWithoutTimestamp }); } if (downloadType === "Status and Intensity" && fs.existsSync(config.get("metadata_path") + "status_intensity_datafield_descriptions.xlsx")) { archive.append(fs.createReadStream(config.get("metadata_path") + "status_intensity_datafield_descriptions.xlsx"), { name: "status_intensity_datafield_descriptions.xlsx" }); } else if (downloadType === "Site Phenometrics" && fs.existsSync(config.get("metadata_path") + "site_phenometrics_datafield_descriptions.xlsx")) { archive.append(fs.createReadStream(config.get("metadata_path") + "site_phenometrics_datafield_descriptions.xlsx"), { name: "site_phenometrics_datafield_descriptions.xlsx" }); } else if (downloadType === "Individual Phenometrics" && fs.existsSync(config.get("metadata_path") + "individual_phenometrics_datafield_descriptions.xlsx")) { archive.append(fs.createReadStream(config.get("metadata_path") + "individual_phenometrics_datafield_descriptions.xlsx"), { name: "individual_phenometrics_datafield_descriptions.xlsx" }); }else if (downloadType === "Magnitude Phenometrics" && fs.existsSync(config.get("metadata_path") + "magnitude_phenometrics_datafield_descriptions.xlsx")) { archive.append(fs.createReadStream(config.get("metadata_path") + "magnitude_phenometrics_datafield_descriptions.xlsx"), { name: "magnitude_phenometrics_datafield_descriptions.xlsx" }); } if (filesToZip.length > 2 && fs.existsSync(config.get("metadata_path") + "ancillary_datafield_descriptions.xlsx")) { archive.append(fs.createReadStream(config.get("metadata_path") + "ancillary_datafield_descriptions.xlsx"), { name: "ancillary_datafield_descriptions.xlsx" }); } archive.finalize(); });
let pool = mysql.createPool({ connectionLimit : 20, host : config.get("mysql_host") as string, user : config.get("mysql_user") as string, password : config.get("mysql_password") as string, database : config.get("mysql_database") as string, debug : false }); let app = express(); // allows us to consume json from post requests app.use(bodyParser.json()); // create a write stream (in append mode) and set up a log to record requests let accessLogStream = fs.createWriteStream(path.join(config.get("logs_path").toString(), "access.log"), {flags: "a"}); app.use(morgan("combined", {stream: accessLogStream})); let log = bunyan.createLogger({ name: "dot_service", streams: [ { level: "info", path: path.join(config.get("logs_path").toString(), "info.log") }, { level: "error", path: path.join(config.get("logs_path").toString(), "error.log") } ] });