agmission/Development/server/controllers/export.js

1126 lines
44 KiB
JavaScript
Raw Blame History

This file contains ambiguous Unicode characters

This file contains Unicode characters that might be confused with other characters. If you think that this is intentional, you can safely ignore this warning. Use the Escape button to reveal them.

'use strict';
module.exports = function (app) {
const
debug = require('debug')('agm:export'),
ObjectId = require('mongodb').ObjectId,
moment = require('moment'),
Job = require('../model/job'),
JobLog = require('../model/job_log'),
JobAssign = require('../model/job_assign'),
jobUtil = require('../helpers/job_util'),
Obstacle = require('../model/obstacles'),
async = require('async'),
path = require('path'),
fs = require('fs-extra'),
shpWrite = require('shp-write'),
util = require('util'),
archiver = require('archiver'),
xml2js = require('xml2js'),
uniqid = require('uniqid'),
turf = require('@turf/turf'),
fileHelper = require('../helpers/file_helper'),
urlHelper = require('../helpers/url_helper'),
utils = require('../helpers/utils'),
bufUtil = require('../helpers/line_buffer'),
polyUtil = require('../helpers/poly_util'),
geoUtil = require('../helpers/geo_util'),
webUtil = require('../helpers/web_util'),
{ Errors, UserTypes, AssignStatus } = require('../helpers/constants'),
{ AppParamError, AppError } = require('../helpers/app_error'),
lineUtil = require('../helpers/gridline_util'),
{ version } = require('../package.json'),
env = require('../helpers/env'),
cloneDeep = require('clone-deep'),
_ = require('lodash');
require('string-format-js');
const parserXMLAsync = util.promisify(xml2js.Parser().parseString);
const writeWorldFileAync = util.promisify(writeWorldFile);
const makeDownloadItemsAsync = util.promisify(makeDownloadItems);
const sendArchiveAsync = util.promisify(sendArchive);
const removeTempFilesAsync = util.promisify(removeTempFiles);
// Line identification number for NO1 file
const NO1_TITLE = 0; // title
const NO1_CENTRAL_MERIDIAN = 1; // central meridian
const NO1_CORNER = 2; // area corner points
const NO1_WAYPOINT = 3; // way points line
const NO1_NUMBER_OF_LINES = 4; // number of lines
const NO1_MAX_CROSS_TRACK = 8; // max cross track
const NO1_DELTA_X_Y_Z = 9; // delta x, y, z (LL to UTM conversion)
const NO1_K0_XY_SHIFT = 11; // K0, x/y shift (LL to UTM conversion)
// const NO1_1ST_LINE = 16; // 1st line number
const NO1_MASTER_POINT_HEADING = 17; // master point, heading
const NO1_ELLIPSOID = 20; // ellipsoid
const NO1_EQUATORIAL_CROSSING = 21; // equatorial crossing
// const NO1_DIR_RAMDISK_SPRAYSIDE = 35; // project directory, ramdisk, and spray side
const NO1_SWATHWIDTH_AC_INSIDE_AREA = 36; // swath width, number of A/C, inside area
const NO1_HALF_SWATH_OFFFSET_1ST_TIME = 37; // half swath offset on 1st line(1=YES, 0=NO)
const NO1_DISPLAY_UNIT = 38; // display units (1=US, 2=metric)
// const NO1_RACE_SKIP = 39; // race and skip track
const NO1_MAGNETIC_VARIATION = 40; // magnetic variation (degrees)
const NO1_SYSETEM_LAG = 41; // system lag (seconds)
const NO1_RELAY_ON_OFF = 42; // Relay ON/OFF (seconds), turn Relay ON/OFF early
const NO1_PATTERN = 43; // flight pattern
const NO1_EXPAND_LEFT_RIGHT = 44; // for expand spray pattern, indicates expand direction (1:left, 0: right)
const PRJ_ZONE_NAME = 62; // Zone name
const AGN_APPLICATION_RATE = 63; // Application rate for flow controller
const NO1_SWATHWIDTH_SEGMENTS = 202; // 4 swath widths for each segment
const NO1_AREATYPE = 203; // Area Type 1: ABLINE, 65536: SPLIT BDY
const MAX_MAP_DIMEN = 4 * 1024; // Maximum map dimention (in pixels)
async function anyJob_post(req, res) {
const userId = req.uid;
if (!utils.isObjectId(userId)) return res.json({ total: 0 });
const data = await JobAssign.countDocuments({ user: userId, status: 0 });
res.json({ total: data || 0 });
}
async function newJobs_post(req, res) {
const userId = req.uid;
if (!utils.isObjectId(userId)) return res.json([]);
const data = await JobAssign.find({ user: ObjectId(userId), status: 0 })
.populate('job', 'name startDate endDate')
.select('-_id date')
.lean();
res.json(data ? data : []);
}
async function downloadJob_post(req, res) {
if (!req.body.jobId) AppParamError.throw();
// Get job's download options to go
const job = await Job.findById(req.body.jobId, 'dlOp', { lean: true });
if (!job) AppError.throw(Errors.JOB_NOT_FOUND);
if (job.dlOp && job.dlOp.mapOp) {
const newReq = cloneDeep(req);
newReq.body['mapOps'] = job.dlOp.mapOp;
await downloadJobwMap(newReq, res);
} else {
await downloadJob(req, res);
}
}
async function downloadJob(req, res) {
const params = req.body;
await jobUtil.isJobAssignedToVehicle(req.uid, req.ut, params.jobId);
const items = await makeDownloadItemsAsync(req);
await sendArchiveAsync(items, res);
await writeJobLog(params.jobId, 2, req.uid, req.ut);
}
function sendArchive(items, res, cb) {
// Add items to archive then pipe to response, end response when done
const archive = archiver('zip', { zlib: { level: 9 } });
archive.on('end', () => {
res.end();
cb && cb();
});
archive.on('error', function (err) {
debug(err.stack);
cb && cb(err);
});
archive.pipe(res);
if (!utils.isEmptyArray(items)) {
for (const item of items) {
if (item.content)
archive.append(item.content, item.meta);
if (item.file)
archive.file(item.file, item.meta);
}
}
archive.finalize();
}
function makeDownloadItems(req, cb) {
const params = req.body;
// params = { jobId: 15, type: 1 }; // 1:'AGNAV', 2: AgNav Prj, 3:'SHP' (,4:'KML', 5:'KMZ' not yet)
// debug('Request params: ', util.inspect(job, false, null));
let dlType = params.type || 1; // Default is AGNAV type
let outFileName = '', sprayAreas = [], xclAreas = [], xclItems = [], _job, xyzFile, xclContent, archiveItems = [], numOfLines = -1;
async.series([
function (callback) {
Job.findById(params.jobId)
.populate({ path: 'client', model: UserTypes.CLIENT, select: { '_id': 0, 'name': 1 } })
.populate({ path: 'operator', model: UserTypes.PILOT, select: { '_id': 0, 'name': 1 } })
.populate({ path: 'vehicle', model: UserTypes.DEVICE, select: { '_id': 0, 'name': 1 } })
.populate('products.product', 'name')
.then(job => {
if (!job) {
callback(AppError.create(Errors.JOB_NOT_FOUND));
return;
} else {
return Job.populate(job, { path: 'crop', select: 'name', skipInvalidIds: true });
}
})
.then(job => {
_job = job.toObject();
// Override with selected download option
if (req.ut === UserTypes.DEVICE) {
dlType = _job.dlOp.type || 1;
}
outFileName = _job.name;
callback();
})
.catch(err => {
callback(err);
});
},
function (callback) {
const pipeline = [
{
"$match": { _id: params.jobId }
},
{ $unwind: "$sprayAreas" },
{
$lookup: {
from: "area_lines",
localField: "sprayAreas._id",
foreignField: "areaId",
as: "area_lines"
}
},
{ $unwind: { "path": "$area_lines", "preserveNullAndEmptyArrays": true } },
{
$project: {
"_id": "$_id",
"sprayAreas": {
"_id": "$sprayAreas._id", "geometry": "$sprayAreas.geometry", "properties": "$sprayAreas.properties",
'masterPoint': '$area_lines.masterPoint',
"latlngHeading": '$area_lines.latlngHeading', "heading": '$area_lines.heading',
"lines": {
$cond: { if: '$area_lines.lines', then: "$area_lines.lines", else: [] }
},
"mems": {
$cond: { if: '$area_lines.mems', then: "$area_lines.mems", else: [] }
},
}
}
},
{
"$group": {
"_id": "$_id", "sprayAreas": { $push: "$sprayAreas" }
}
}
];
Job.aggregate(pipeline, (err, result) => {
if (err) return callback(err);
if (result && result.length)
_job.sprayAreas = result[0].sprayAreas; // Replace the sprayAreas with the ones combines with grid lines related info
callback();
});
},
function (callback) {
sprayAreas = _job.sprayAreas;
if (utils.isEmptyArray(sprayAreas)) return callback(AppError.create(Errors.NO_SPRAY_AREA));
xclAreas = _job.excludedAreas;
let bufs = _job.bufs;
// Process the buffers as XCL areas
if (!utils.isEmptyArray(bufs)) {
let polygon;
let xclPolygons = [];
for (let j = 0; j < sprayAreas.length; j++) {
for (const [index, buf] of bufs.entries()) {
// Remove the invalid buffer
if (utils.isEmptyArray(buf.geometry.coordinates) || buf.geometry.coordinates.length < 2) {
bufs.splice(index, 1);
}
// if a line buffer within a spray polygon => create as an xcl with the same spray one's name
else if (polyUtil.lineInPolygon(buf.geometry.coordinates, sprayAreas[j].geometry.coordinates)) {
bufs.splice(index, 1);
polygon = bufUtil.lineBuffer(buf.geometry.coordinates, utils.toMeter(buf.properties.width, _job.measureUnit));
if (!utils.isEmptyArray(polygon))
xclPolygons.push({ name: sprayAreas[j].properties.name, coors: polygon });
}
}
}
// The rest of the line buffers will be created as outside xcls
for (let k = 0; k < bufs.length; k++) {
polygon = bufUtil.lineBuffer(bufs[k].geometry.coordinates, utils.toMeter(bufs[k].properties.width, _job.measureUnit));
if (!utils.isEmptyArray(polygon))
xclPolygons.push({ name: `XCL_${k + 1}`, coors: polygon });
}
for (let l = 0; l < xclPolygons.length; l++) {
xclAreas.push(jobUtil.createXclArea(xclPolygons[l]));
}
}
// Make sure spray areas always in clockwise order
for (const area of sprayAreas) {
let coors = area.geometry.coordinates[0];
if (!polyUtil.isClockwise(coors))
area.geometry.coordinates[0] = coors.reverse();
}
callback();
},
function (callback) {
if (dlType > 1) { // Generate xyz file for all combined areas options: PRJ or SHAPE
numOfLines = 0;
const rootArea = findRootArea(sprayAreas, true);
if (!rootArea) return callback();
// Heading to generate is in UTM, write to file in LL
const heading = utils.isNumber(rootArea.heading) ? rootArea.heading : 0.0;
lineUtil.getLinesLatLng(_job, null, true, 0, 0, heading, null, { useGroup: false, regenerate: true })
.then(linesRes => {
if (!utils.isEmptyArray(linesRes.lines) && !utils.isEmptyArray(linesRes.lines[0].lines)) {
numOfLines = linesRes.lines[0].lines.length;
const xyzContent = linesToXYZ(linesRes.lines[0].lines);
if (xyzContent)
archiveItems.push({ content: xyzContent, meta: { name: outFileName + '.xyz' } });
}
callback();
})
.catch(err => callback(err));
}
else callback();
},
function (callback) {
if (dlType === 1) { // AgNav
if (sprayAreas.length === 1) {
if (sprayAreas[0].properties && sprayAreas[0].properties.name)
outFileName = sprayAreas[0].properties.name;
const no1Content = writeAGNorN01orPRJ(_job, outFileName, sprayAreas, numOfLines, 1);
archiveItems.push({ content: no1Content, meta: { name: outFileName + '.no1' } });
xclContent = writeDSPorXCL(xclAreas);
if (xclContent)
archiveItems.push({ content: xclContent, meta: { name: outFileName + '.xcl' } });
xyzFile = writeAreasXYZ(sprayAreas, outFileName + '.xyz');
if (!utils.isEmptyArray(xyzFile))
archiveItems = archiveItems.concat(xyzFile);
}
else {
// Export No1 files for each spray zone, xcl not releated to any spray zone will be added to the first no1's xcl
// split spray area into seperated items a long with its xcls, then write these as no1+dsp+xcl files
const processXclAreas = xclAreas.slice(0);
const entries = [];
let sprArea, xclArea;
for (let i = 0; i < sprayAreas.length; i++) {
const xcls = [];
sprArea = sprayAreas[i];
let idx = processXclAreas.length - 1;
while (processXclAreas.length && idx >= 0) {
xclArea = processXclAreas[idx];
if (polyUtil.polygonWithin(xclArea.geometry.coordinates, sprArea.geometry.coordinates)
|| polyUtil.polygonIntersect(sprArea.geometry.coordinates, xclArea.geometry.coordinates)) {
xcls.push(xclArea);
processXclAreas.splice(idx, 1);
}
idx--;
}
entries.push({ area: sprArea, xcls: xcls });
}
// The rest of the unrelated xcls (if any) assign to the first spray zone
if (entries.length && processXclAreas && processXclAreas.length) {
if (entries[0].xcls)
entries[0].xcls = entries[0].xcls.concat(processXclAreas);
else
entries[0].xcls = processXclAreas;
}
// Now, create no1 file archive entries
const entriesByName = _.groupBy(entries, it => (it.area.properties && it.area.properties.name) ? it.area.properties.name.toLowerCase() : '');
let nameIdx = 1;
for (let entryName in entriesByName) {
xclItems.length = 0;
const groupEntry = entriesByName[entryName];
outFileName = groupEntry[0].area.properties.name;
if (!outFileName)
outFileName = `Spray_${nameIdx++}`;
if (groupEntry.length === 1) { // No1
const no1Content = writeAGNorN01orPRJ(_job, outFileName, [groupEntry[0].area], numOfLines, 1);
if (no1Content) {
archiveItems.push({ content: no1Content, meta: { name: outFileName + '.no1' } });
if (!utils.isEmptyArray(groupEntry[0].xcls))
xclItems = xclItems.concat(groupEntry[0].xcls);
xyzFile = writeAreasXYZ([groupEntry[0].area], outFileName + '.xyz');
if (!utils.isEmptyArray(xyzFile))
archiveItems = archiveItems.concat(xyzFile);
}
} else { // Prj
let sprayItems = []; xclItems = [];
for (const it of groupEntry) {
sprayItems.push(it.area);
if (!utils.isEmptyArray(it.xcls))
xclItems = xclItems.concat(it.xcls);
}
const prjContent = writeAGNorN01orPRJ(_job, outFileName, sprayItems, numOfLines, 2, true);
const dspContent = writeDSPorXCL(sprayItems);
const vfrContent = writeVFR(sprayItems, _job.appRate);
archiveItems = archiveItems.concat([
{ content: prjContent, meta: { name: outFileName + '.prj' } },
{ content: dspContent, meta: { name: outFileName + '.dsp' } },
{ content: vfrContent, meta: { name: outFileName + '.vfr' } }]);
xyzFile = writeAreasXYZ(sprayItems, outFileName + '.xyz');
if (!utils.isEmptyArray(xyzFile))
archiveItems = archiveItems.concat(xyzFile);
}
const xclContent = writeDSPorXCL(xclItems);
if (xclContent)
archiveItems.push({ content: xclContent, meta: { name: outFileName + '.xcl' } });
}
}
callback();
}
else if (dlType === 2) { // AgNav Prj
const prjContent = writeAGNorN01orPRJ(_job, outFileName, sprayAreas, numOfLines, 2, true, true);
const dspContent = writeDSPorXCL(sprayAreas);
const vfrContent = writeVFR(sprayAreas, _job.appRate);
archiveItems = archiveItems.concat([
{ content: prjContent, meta: { name: outFileName + '.prj' } },
{ content: dspContent, meta: { name: outFileName + '.dsp' } },
{ content: vfrContent, meta: { name: outFileName + '.vfr' } }]);
// XCL for the case of not combined, No1 or Prj
xclContent = writeDSPorXCL(xclAreas);
if (xclContent)
archiveItems.push({ content: xclContent, meta: { name: outFileName + '.xcl' } });
callback();
}
else if (dlType === 3) {
const items = [];
items.push(sprayAreas);
if (xclAreas.length > 0)
items.push(xclAreas);
const agnContent = writeAGNorN01orPRJ(_job, outFileName, sprayAreas, numOfLines, 3);
if (agnContent)
archiveItems.push({ content: agnContent, meta: { name: outFileName + '.agn' } });
writeShapeFile(_job, items, 1, outFileName, (err, items) => {
if (err) return callback(err);
if (!utils.isEmptyArray(items))
archiveItems = archiveItems.concat(items);
callback();
});
}
else callback();
},
function (callback) {
if (dlType === 3 && _job.waypoints.length > 0) {
writeShapeFile(_job, _job.waypoints, 2, outFileName + 'wpt', (err, items) => {
if (err) return callback(err);
if (!utils.isEmptyArray(items))
archiveItems = archiveItems.concat(items);
callback();
});
}
else
callback();
},
function (callback) {
const info = writeJobInfo(_job);
if (info)
archiveItems.push({ content: info, meta: { name: 'job.json' } });
callback();
}
], function (err) {
if (err) {
debug(params);
return cb(err);
}
return cb(null, archiveItems);
});
}
/**
* Write Job items to a shape file and return a list of archive entries contain the shp file items
*
* @param {*} job the job object for additional info only (must not be modified)
* @param {*} items items list
* @param {*} type 1: polygons, 2: waypoints
* @param {*} outFileName The base file name
* @param {*} cb Callback function
*/
function writeShapeFile(job, items, type, outFileName, cb) {
if (utils.isEmptyArray(items)) return cb();
const points = [];
const featureData = [];
let archiveItems = [];
if (type === 1) { // Area shape file
let sprayAreas = [], xclAreas = [], coors = [], name;
if (items.length > 1)
xclAreas = items[1] || [];
sprayAreas = items[0] || [];
for (let i = 0; i < sprayAreas.length; i++) {
const sprArea = sprayAreas[i];
const sprayWithchildCoors = [];
let rate = Number(sprArea.properties.appRate) || 0;
if (!rate) rate = job.appRate;
name = sprArea.properties.name || `${i + 1}`;
coors = sprArea.geometry.coordinates[0];
sprayWithchildCoors.push(coors);
featureData.push({ BLOCKID: name, XCL: 'N', RATE: rate.toFixed(2), Version: version }); // Dbf record
if (!utils.isEmptyArray(xclAreas)) {
let j = xclAreas.length - 1;
while (xclAreas.length && j >= 0) {
const xclArea = xclAreas[j];
if (polyUtil.polygonWithin(xclArea.geometry.coordinates, sprArea.geometry.coordinates)
|| polyUtil.polygonIntersect(sprArea.geometry.coordinates, xclArea.geometry.coordinates)) {
coors = xclArea.geometry.coordinates[0];
if (polyUtil.isClockwise(coors))
coors = coors.reverse();
sprayWithchildCoors.push(coors);
xclAreas.splice(j, 1);
}
j--;
}
}
// Same-name Spray areas and related xcls (holes be written to the same shape record (Multipolygon))
points.push([sprayWithchildCoors]);
}
// Process the rest of standalone xcl items as poly with XCL = 'Y'
if (!utils.isEmptyArray(xclAreas)) {
for (let m = 0; m < xclAreas.length; m++) {
const xclArea = xclAreas[m];
featureData.push({ BLOCKID: xclArea.properties.name || '', XCL: 'Y', RATE: 0, VERSION: version, });
coors = xclArea.geometry.coordinates[0];
if (!polyUtil.isClockwise(coors)) {
coors = coors.reverse();
}
points.push([coors]);
}
}
}
else if (type === 2) { // WayPoint shape file
// Process points for waypoint items
for (const item of items) {
featureData.push({ NAME: item.properties.name || '', VERSION: version });
if (item.geometry && item.geometry.coordinates)
points.push(item.geometry.coordinates);
}
}
if (utils.isEmptyArray(points)) return cb();
shpWrite.write(
// feature data
featureData.length ? featureData : [{ Version: version, Name: '' }],
// geometry type
type === 1 ? 'POLYGON' : 'POINT',
// geometries
points,
function (err, files) {
if (err)
return cb(err);
archiveItems = [
{ content: utils.ArrayBuffertoBuffer(files.shp.buffer), meta: { name: outFileName + '.shp' } },
{ content: utils.ArrayBuffertoBuffer(files.shx.buffer), meta: { name: outFileName + '.shx' } },
{ content: utils.ArrayBuffertoBuffer(files.dbf.buffer), meta: { name: outFileName + '.dbf' } },
{ content: writeShapePRJ(), meta: { name: outFileName + '.prj' } }];
return cb(null, archiveItems);
});
}
function writeShapePRJ() {
return 'GEOGCS["GCS_WGS_1984",DATUM["D_WGS_1984",SPHEROID["WGS_1984",6378137.0,298.257223563]],PRIMEM["Greenwich",0.0],UNIT["Degree",0.0174532925199433]]';
}
function writeJobInfo(job) {
if (!job) return null;
const _job = {
_id: job._id, name: job.name, orderNumber: job.orderNumber, measureUnit: job.measureUnit, swathWidth: job.swathWidth,
appRate: job.appRate, appRateUnit: job.appRateUnit, startDate: job.startDate, endDate: job.endDate, flightNumber: job.flightNumber,
crop: (job.crop && job.crop['_id']) ? job.crop['name'] : job.crop, farm: job.farm, remark: job.remark
};
if (!utils.isEmptyArray(job.products)) {
_job.product = { name: job.products.map(p => p.product.name).join("/") };
}
if (job.operator) _job.operator = { name: job.operator.name };
if (job.client) _job.client = { name: job.client.name, address: job.client.address };
if (job.vehicle) _job.vehicle = { name: job.vehicle.name };
try {
return JSON.stringify(_job, null, 4);
} catch (error) {
debug(error);
return null;
}
}
function writeAreasXYZ(sprayAreas, fileName) {
if (utils.isEmptyArray(sprayAreas)) return null;
const items = [];
if (sprayAreas.length === 1) {
if (sprayAreas[0].lines.length)
items.push({ content: linesToXYZ(sprayAreas[0].lines), meta: { name: fileName } });
}
else {
const selLines = sprayAreas.map(a => a.lines);
const allLines = [].concat.apply([], selLines);
items.push({ content: linesToXYZ(allLines), meta: { name: fileName } });
}
return items;
}
function linesToXYZ(lines) {
const content = [];
let llUtm = new app.locals.LatLonUTM(0, 0), line, utm;
for (let i = 0; i < lines.length; i++) {
line = lines[i];
if (!line.length) continue;
content.push("LINE " + ((i + 1) * 10).toString());
for (let m = 1; m < line.length; m += 2) {
llUtm.lat = line[m - 1][0]; llUtm.lon = line[m - 1][1];
utm = llUtm.toUtm();
content.push('%-.3f %-.3f 1 0'.format(utils.fixedTo(utm.easting, 3), utils.fixedTo(utm.northing, 3)));
llUtm.lat = line[m][0]; llUtm.lon = line[m][1];
utm = llUtm.toUtm();
content.push('%-.3f %-.3f 2 0'.format(utils.fixedTo(utm.easting, 3), utils.fixedTo(utm.northing, 3)));
}
}
return content.join('\r\n');
}
function writeDSPorXCL(areas) {
const content = [];
if (utils.isEmptyArray(areas))
return null;
let areaNum = 1;
for (const area of areas) {
if (area.geometry.coordinates && area.geometry.coordinates[0].length >= 3) {
let coors = area.geometry.coordinates[0];
if (!polyUtil.isClockwise(coors))
coors = coors.reverse();
for (const vertex of coors) {
content.push('%-3d %.6f %.6f'.format(areaNum, vertex[1], vertex[0]));
}
areaNum = areaNum + 1;
}
}
return content.join('\r\n');
}
function writeVFR(areas, jobAppRate) {
let content = [];
if (utils.isEmptyArray(areas))
return null;
let areaNum = 1;
for (const area of areas) {
if (area.geometry.coordinates && area.geometry.coordinates[0].length >= 3) {
for (const vertex of area.geometry.coordinates[0]) {
if (vertex && vertex.length >= 2) {
const appRate = Number(area.properties.appRate || jobAppRate) || 0;
content.push('%-3d %.6f %.6f %5.2f'.format(areaNum, vertex[1], vertex[0], utils.fixedTo(appRate, 2)));
}
}
areaNum = areaNum + 1;
}
}
return content.join('\r\n');
}
/**
* Write job items to an AGNAV file (No1, Prj, AGN)
*
* @param {*} job the job object
* @param {*} title title for line 0 (NO1_TITLE)
* @param {*} sprayAreas area list to write to the file
* @param {*} type: output file formats. 1: No1, 2: Prj, 3: AGN
*/
function writeAGNorN01orPRJ(job, name, sprayAreas, numOfLines, type, findRoot = false, firstRoot = false) {
let content = [];
if (!job || utils.isEmptyArray(sprayAreas))
return null;
let itemNum = 0;
// First vertex of the first area [long, lat]
const firstVertex = sprayAreas[0].geometry.coordinates[0][0];
const areaCM = geoUtil.computeCMfromLon(firstVertex[0]);
const title = name ? name : 'Untitled Area';
content.push('%-2d NEW AREA FILE %-61s'.format(NO1_TITLE, title));
content.push('%-2d %-2s %d'.format(NO1_CENTRAL_MERIDIAN, 'L', areaCM));
let zoneNames = [];
if (type === 1) {
const no1Area = sprayAreas[0];
zoneNames.push(no1Area.properties.name);
itemNum = 1;
for (const vertex of no1Area.geometry.coordinates[0]) {
content.push('%-2d %-11.6f %-11.6f AREA CORNER %d'.format(NO1_CORNER, vertex[1], vertex[0], itemNum));
itemNum = itemNum + 1;
}
}
else if (type === 2) {
let features = [];
for (const area of sprayAreas) {
area['type'] = 'Feature'; // Add the required property for GeoJSON object
// area = turf.truncate(area, { precision: 6});
features.push(area);
}
const featureCollection = {
'type': 'FeatureCollection',
'features': features
};
let areasBbox;
try {
areasBbox = turf.bbox(featureCollection); // bbox extent in [ minX, minY, maxX, maxY ] order
} catch (error) { }
if (areasBbox) {
// Corners of the bounds
content.push('%-2d %-11.6f %-11.6f AREA CORNER %d'.format(NO1_CORNER, areasBbox[1], areasBbox[0], 1));
content.push('%-2d %-11.6f %-11.6f AREA CORNER %d'.format(NO1_CORNER, areasBbox[3], areasBbox[0], 2));
content.push('%-2d %-11.6f %-11.6f AREA CORNER %d'.format(NO1_CORNER, areasBbox[3], areasBbox[2], 3));
content.push('%-2d %-11.6f %-11.6f AREA CORNER %d'.format(NO1_CORNER, areasBbox[1], areasBbox[2], 4));
}
}
if (type <= 2 && job.waypoints) {
itemNum = 1;
for (const wpt of job.waypoints) {
content.push('%-2d %-11.6f %-11.6f%-5.5s WAYPOINT %d'.format(NO1_WAYPOINT, wpt.geometry.coordinates[1], wpt.geometry.coordinates[0],
(!utils.isBlank(wpt.properties.name) ? wpt.properties.name.replace(' ', '_') : ''), itemNum));
itemNum = itemNum + 1;
}
}
let _numOfLines = 0;
if (numOfLines < 0) {
for (let i = 0; i < sprayAreas.length; i++) {
if (!utils.isEmptyArray(sprayAreas[i].lines))
_numOfLines += sprayAreas[i].lines.length;
}
}
else {
_numOfLines = numOfLines;
}
if (_numOfLines)
content.push('%-2d %11d NUMBER OF LINES'.format(NO1_NUMBER_OF_LINES, _numOfLines));
content.push('%-2d %11d MAX CROSS TRACK'.format(NO1_MAX_CROSS_TRACK, 200));
let wgs84Default = {
delx: 0,
dely: 0,
delz: 0,
k0: 0.9996000000, // UTM scale on the central meridian, fixed constant
xshift: 0.0,
yShift: 0.0
};
content.push('%-2d %5d %5d %5d DELTA X/Y/Z'.format(NO1_DELTA_X_Y_Z, wgs84Default.delx, wgs84Default.dely, wgs84Default.delz));
content.push('%-2d %12f %9.1f %9.1f K0, X/Y SHIFT'.format(NO1_K0_XY_SHIFT, wgs84Default.k0, wgs84Default.xshift, wgs84Default.yShift));
let masterPoint, heading = 0, rootArea = sprayAreas[0];
if (findRoot && sprayAreas.length > 1) {
rootArea = findRootArea(sprayAreas, firstRoot);
}
if (!utils.isEmptyArray(rootArea.masterPoint) && rootArea.masterPoint.length >= 2)
masterPoint = rootArea.masterPoint;
else
masterPoint = [firstVertex[1], firstVertex[0]];
heading = geoUtil.to180Range(rootArea.latlngHeading);
content.push('%-2d %-11.6f %11.6f %6.2f MASTER POINT HEADING'.format(NO1_MASTER_POINT_HEADING, masterPoint[0], masterPoint[1], utils.fixedTo(heading), 1));
content.push('%-2d %-11s %11.1f %13f %8d ELLIPSOID'.format(NO1_ELLIPSOID, 'WGS-84', 6378137.0, 298.257223123, 22));
let equatorialCrossing = firstVertex[1] > 0 ? 1 : 2;
content.push('%-2d %10d NO EQUATORIAL CROSSING, %s HEMISPHERE'.format(NO1_EQUATORIAL_CROSSING, equatorialCrossing,
equatorialCrossing === 1 ? 'N' : 'S'));
content.push('%-2d %10.1f %9d %2d SWATH WIDTH, NO A/C, SPRAY ON CLOSURE OF CONTACT'.format(NO1_SWATHWIDTH_AC_INSIDE_AREA, job.swathWidth, 1, 1));
content.push('%-2d %10d HALF-SWATH OFFSET OF MASTER LINE'.format(NO1_HALF_SWATH_OFFFSET_1ST_TIME, 1));
content.push('%-2d %10d %s'.format(NO1_DISPLAY_UNIT, job.measureUnit ? 1 : 2, job.measureUnit ? 'U.S. SYSTEM' : 'MET SYSTEM'));
// content.push('%-2d %5d %7d RACE TRACK, SKIP TRACK'.format(NO1_RACE_SKIP, 5, 1));
content.push('%-2d %10d MAGNETIC VARIATION, Deg.'.format(NO1_MAGNETIC_VARIATION, 0));
// System params, default for now
let systemLag = 0.80;
let relayOn = 0.65, relayOff = 0.5;
content.push('%-2d %-10.2f SYSTEM LAG, sec.'.format(NO1_SYSETEM_LAG, systemLag));
content.push('%-2d %-5.2f %7.2f RELAY ON/OFF, sec.'.format(NO1_RELAY_ON_OFF, relayOn, relayOff));
// Nov.08, 2021: Task#298 - Not writing these 2 => Let the console unit decide later on.
// content.push('%-2d %-27d SPRAY PATTERN 0-B&F, 1-RT 2-SQZ 3-SKP 4-Split 5-Expand'.format(NO1_PATTERN, 0));
// content.push('%-2d %-27d EXPAND PATTERN SIDE 0-Right, 1-Left'.format(NO1_EXPAND_LEFT_RIGHT, 1));
if (type === 2) { // only PRJ write area names
for (let i = 0; i < sprayAreas.length; i++) {
const area = sprayAreas[i];
content.push('%-2d %s'.format(PRJ_ZONE_NAME, area.properties.name.replace(' ', '_') || ''));
}
}
for (let i = 0; i < sprayAreas.length; i++) {
const area = sprayAreas[i];
const appRate = area.properties.appRate ? area.properties.appRate : job.appRate;
content.push('%-2d %-27.2f AGNAV FLOW RATE %s'.format(AGN_APPLICATION_RATE, appRate, codeToAppRateUnit(job.appRateUnit, job.measureUnit)));
}
content.push('%-3d %-6.1f %-6.1f %-6.1f %-6.1f SEGMENT SWATH'.format(NO1_SWATHWIDTH_SEGMENTS, job.swathWidth, 0.0, 0.0, 0.0));
content.push('%-3d %-26d AREA TYPE'.format(NO1_AREATYPE, 0));
// POLYGON FILL COLOR
// content.push('%-3d %-25s POLYGON FILL COLOR'.format(NO1_POLYGON_FILL_COLOR, 'colorforwhich??');
return content.join('\r\n');
}
function codeToAppRateUnit(unitCode, isUS) {
let rateUnit = isUS ? "LPH" : "GPA";
switch (unitCode) {
case 0:
rateUnit = "OZPA";
break;
case 1:
rateUnit = "GPA";
break;
case 2:
rateUnit = "LBPA";
break;
case 3:
rateUnit = "LPH";
break;
case 4:
rateUnit = "KGPH";
break;
}
return rateUnit;
}
async function downloadMap_post(req, res) {
await downloadJobwMap(req, res);
}
async function downloadJobwMap(req, res) {
const dlReq = req.body;
// debug('Request params: ', util.inspect(JSON.stringify(_dlReq), false, null));
const _mapOps = dlReq.mapOps;
let targetFileName, outFileName;
if (!dlReq.jobId || !_mapOps.width || !_mapOps.height || !_mapOps.center || !_mapOps.zoom)
AppParamError.throw();
try {
const job = await Job.findById(dlReq.jobId, 'name');
if (!job) AppError.throw(Errors.JOB_NOT_FOUND);
const dlType = dlReq.type || 0, userInfo = req.userInfo;
targetFileName = path.join(env.TEMP_DIR, job._id + '_' + uniqid());
let imgFilePath = targetFileName + '.jpg', imgTifFileName, imgUTMFileName, realName = job.name,
pageWidth = Math.trunc(_mapOps.width), pageHeight = Math.trunc(_mapOps.height);
const renderParams = urlHelper.encodeQueryData({
base: _mapOps.base,
zoom: _mapOps.zoom,
clat: _mapOps.center.lat,
clon: _mapOps.center.lng,
width: pageWidth,
height: pageHeight,
premium: (userInfo ? userInfo.premium : 0)
});
const reportUrl = `${req.protocol}://${req.hostname}/public/downloadMap.html?${renderParams}`;
await fs.ensureDir(env.TEMP_DIR);
await webUtil.webShot({
url: reportUrl,
type: 'jpeg', quality: 100, width: pageWidth, height: pageHeight,
path: imgFilePath
});
// 1. Re-project the image from Web Mercater to UTM at the zone based on the center of the bounds
let northWest = _mapOps.bounds.NW,
southEast = _mapOps.bounds.SE,
center_UTM = new app.locals.LatLonUTM(_mapOps.center.lat, _mapOps.center.lng).toUtm();
outFileName = `${targetFileName}-out`;
imgTifFileName = outFileName + '.tiff';
imgUTMFileName = `${outFileName}-utm.tiff`;
const ullr = ` -a_ullr ${northWest.x} ${northWest.y} ${southEast.x} ${southEast.y}`;
const cmd1 = `gdal_translate ${ullr} -a_srs EPSG:3857 ${imgFilePath} ${imgTifFileName} -co COMPRESS=JPEG`;
await utils.execAsync(cmd1);
// 1.1. Check and rescale if the image size constraints over MAX_MAP_DIMEN pixels on the largest dimension
let maxDimen = Math.max(pageWidth, pageHeight);
if (maxDimen >= MAX_MAP_DIMEN) {
let newWidth, newHeight;
if (pageWidth > pageHeight) {
newWidth = MAX_MAP_DIMEN;
newHeight = (pageHeight / pageWidth) * MAX_MAP_DIMEN;
}
else {
newHeight = MAX_MAP_DIMEN;
newWidth = (pageWidth / pageHeight) * MAX_MAP_DIMEN;
}
pageWidth = Math.trunc(newWidth);
pageHeight = Math.trunc(newHeight);
// console.log('New WxH:', pageWidth, pageHeight);
const outSize = `-outsize ${pageWidth} ${pageHeight}`, croppedTif = outFileName + '_crop.tiff';
imgTifFileName = croppedTif;
const cmd11 = `gdal_translate ${imgTifFileName} ${croppedTif} ${outSize} -co COMPRESS=JPEG`;
await utils.execAsync(cmd11);
}
// 2. Reproject Image from Web Mecarter to UTM projection
let zoneDef = `+zone=${center_UTM.zone}`;
if (center_UTM.hemisphere === 'S')
zoneDef = zoneDef + ' +south';
const cmd2 = `gdalwarp ${imgTifFileName} ${imgUTMFileName} -s_srs EPSG:3857 -t_srs "+proj=utm ${zoneDef} +ellps=WGS84 +datum=WGS84 +units=m +no_defs" -overwrite -co COMPRESS=JPEG`;
await utils.execAsync(cmd2);
// 3. Crop 4% on all edges toward center to remove black edges if any
let cropX = Math.trunc(pageWidth * 0.04), cropY = Math.trunc(pageHeight * 0.04);
const srcWin = `-srcwin ${cropX} ${cropY} ${pageWidth - (2 * cropX)} ${pageHeight - (2 * cropY)}`;
const cmd3 = `gdal_translate -of jpeg ${srcWin} ${imgUTMFileName} ${outFileName}.jpg`;
await utils.execAsync(cmd3);
// 4. Build and response a .zip file contains the image + geo-referenced meta-data file
// Read <filename>.aux.xml for aux meta-data info
const meta = await fs.readFile(outFileName + '.jpg.aux.xml');
const parseResult = await parserXMLAsync(meta);
const geotf = parseResult.PAMDataset.GeoTransform[0].split(',');
const origin = { x: parseFloat(geotf[0]), y: parseFloat(geotf[3]) };
await writeWorldFileAync(outFileName,
{
origin: origin,
scaleX: parseFloat(geotf[1]),
scaleY: parseFloat(geotf[5]),
rx: 0.0,
ry: 0.0
});
// 5. Create zip file to response
let archiveItems = [];
if (dlType > 0) // dlType with 0: 'Map Only', 1:AgNav, 2:Agnav Prj, 3:Shape
archiveItems = await makeDownloadItemsAsync(req);
archiveItems.push({ file: outFileName + '.jpg', meta: { name: realName + '.jpg' } }, { file: outFileName + '.jgw', meta: { name: realName + '.jgw' } });
await sendArchiveAsync(archiveItems, res);
if (dlType) await writeJobLog(job._id, 2, req.uid, req.ut);
} catch (err) {
if (targetFileName) debug("input:", req.body);
debug(err);
throw err;
}
finally {
// Clean up temp files if any
await removeTempFilesAsync(targetFileName, outFileName);
}
}
function writeWorldFile(fileName, meta, cb) {
try {
const lineEnd = '\r\n';
// Line 1 X pixel scale
let content = utils.truncR(meta.scaleX, 6) + lineEnd;
// Line 2 X rotation angle
content = content.concat(meta.rx + lineEnd);
// Line 3 Y rotation angle
content = content.concat(meta.ry + lineEnd);
// Line 4 Y pixel scale
content = content.concat(utils.truncR(meta.scaleY, 6) + lineEnd);
// Line 5 X origin (Easting)
content = content.concat(utils.truncR(meta.origin.x, 6) + lineEnd);
// Line 6 Y origin (Northing)
content = content.concat(utils.truncR(meta.origin.y, 6) + lineEnd);
fs.writeFile(fileName + '.jgw', content, { encoding: 'utf8', flag: 'w' }, (err) => {
if (err)
return cb(err, false);
return cb(null, true);
});
} catch (err) {
// debug('Error writing World File:' + fileName + '.jgw', error);
return cb(err, false);
}
}
/**
* Record Download log entry and update Job status to DOWNLOADED
* @param {*} jobId Job Id
* @param {*} type 0: New, 1: Ready, 2: Download, 3: Sprayed
* @param {*} userId the userId that performed the event
* @param {*} userType the user type
*/
async function writeJobLog(jobId, type = 2, userId, userType) {
if (!jobId || !userId) return;
await JobLog.create({ job: jobId, type: 2, user: userId });
if (userType === UserTypes.DEVICE /*&& userId != "5ae8741953f1d53b4a6e808a"*/) {
const dlAssign = { user: userId, job: jobId, status: AssignStatus.DOWNLOADED, date: new Date() };
await JobAssign.updateOne({ user: ObjectId(userId), job: jobId, status: 0 }, dlAssign, { upsert: true });
}
await Job.updateOne({ _id: jobId }, { $set: { status: type } }); // Update Job Status as Downloaded
}
/**
* Find the root area in a list which has lines info (latlngHeading, lines, etc.)
* @param {*} areas
*/
function findRootArea(sprayAreas, firstIsRoot = false) {
let rootArea = null;
if (utils.isEmptyArray(sprayAreas)) return rootArea;
rootArea = sprayAreas[0];
const firstId = sprayAreas[0]._id.toHexString();
for (let i = 0; i < sprayAreas.length; i++) {
if (firstIsRoot) {
if (!utils.isEmptyArray(sprayAreas[i].mems) && sprayAreas[i].mems.includes(firstId)) {
rootArea = sprayAreas[i];
break;
}
}
else {
if (utils.getProp(sprayAreas[i], "latlngHeading", null)) {
rootArea = sprayAreas[i];
break;
}
}
}
return rootArea;
}
function removeTempFiles(targetFileName, outFileName, cb) {
let files = [];
if (outFileName)
files = [
outFileName + '.jpg', outFileName + '.jgw', outFileName + '.jpg.aux.xml',
outFileName + '.tiff', outFileName + '-utm.tiff', outFileName + '.zip'
];
if (targetFileName)
files.push(targetFileName + '.jpg');
fileHelper.removeFiles(files, cb);
}
function downloadAppfile_get(req, res) {
const fileName = decodeURIComponent(req.query.file);
if (!fileName) return writeNotFound(res);
const fileLoc = path.join(env.UPLOAD_DIR, fileName);
const stream = fs.createReadStream(fileLoc);
// This will wait until we know the readable stream is actually valid before piping
stream.on('open', () => {
let length = fileName.lastIndexOf('_');
length = length != -1 ? length : fileName.length;
// File exists, stream it to user
res.writeHead(200, {
"Content-Disposition": "attachment;filename=" + `${fileName.substring(0, length)}`,
});
// This just pipes the read stream to the response object (which goes to the client)
stream.pipe(res);
});
stream.on('error', () => {
writeNotFound(res);
});
}
function writeNotFound(res) {
res.writeHead(404, 'Not Found');
res.write('404: File Not Found!');
res.end();
}
async function downloadObs_post(req, res) {
let obs = [];
const userInfo = req.userInfo;
if (userInfo) {
obs = await Obstacle.find({ $or: [{ byUser: ObjectId(req.uid) }, { byUser: ObjectId(userInfo.puid) }], "properties.type": "USER" }).lean();
}
res.setHeader('Content-disposition', 'attachment; filename=obstacles.dat');
res.setHeader('Content-type', 'text/plain');
res.charset = 'UTF-8';
res.write(writeFAAUserObs(obs));
res.end();
}
/**
* Write obstacles to FAA text content
* @param {*} obstacles collection
*/
function writeFAAUserObs(obs) {
if (!obs || !obs.length)
obs = [];
const Dms = app.locals.Dms;
Dms.separator = '';
let content = [];
content.push(' CURRENCY DATE = %s'.format(moment.utc().format('MM/DD/Y')));
content = content.concat([
' LATITUDE LONGITUDE OBSTACLE AGL AMSL LT ACC MAR FAA ACTION',
'OAS# V CO ST CITY DEG MIN SEC DEG MIN SEC TYPE HT HT H V IND STUDY JDATE',
'-------------------------------------------------------------------------------------------------------------------------------'
]);
let ob, dataObj;
for (let i = 0; i < obs.length; i++) {
ob = obs[i];
let prop = ob.properties, coor = ob.geometry.coordinates, agl = (Number(prop.agl) || 0).toFixed(0);
dataObj = {
osa: '%-9s'.format(utils.normalizeName(prop.name || 'TOWER_${i + 1}').substring(0, 9)),
ver: 'U',
country2: '--',
state2: '--',
city16: '%-16s'.format('UNKNOWN'),
latDMS: Dms.toLat(coor[1], 'dms', 2).replace('°', ' ').replace('', ' ').replace('″', ''),
lonDM: Dms.toLon(coor[0], 'dms', 2).replace('°', ' ').replace('', ' ').replace('″', ''),
type: '%-18s'.format('TOWER'),
qty: '1',
agl: '%s'.format(utils.padZero(agl, 5)), // pad left 0 - feet
amsl: '%s'.format(utils.padZero(utils.isNumber(Number(prop.amsl)) ? Number(prop.amsl).toFixed(0) : agl, 5)) // pad left 0 - feet
};
let line = Object.values(dataObj).join(' ');
line = '%-128s'.format(line + ' U 9 I U %-14s A %-7s'.format('', utils.julianDate(new Date().toUTCString(), true).dateStr));
content.push(line);
}
return content.join('\n');
}
return ({
anyJob_post, newJobs_post, downloadJob_post, downloadMap_post, downloadAppfile_get, downloadObs_post
});
}