361 lines
12 KiB
JavaScript
361 lines
12 KiB
JavaScript
'use strict';
|
|
|
|
const
|
|
debug = require('debug')('agm:job-util'),
|
|
App = require('../model/application'),
|
|
AppDetail = require('../model/application_detail'),
|
|
JobAssign = require('../model/job_assign'),
|
|
AreaLine = require('../model/areas_lines'),
|
|
ObjectId = require('mongodb').ObjectId,
|
|
Area = require('../model/area'),
|
|
util = require('util'),
|
|
utils = require('./utils'),
|
|
turf = require('@turf/turf'),
|
|
simplify = require('simplify-path'),
|
|
GeojsonRbush = require('@mickeyjohn/geojson-rbush').default,
|
|
polyUtil = require('./poly_util'),
|
|
mongoUtil = require('./mongo'),
|
|
{ Errors, UserTypes } = require('./constants'),
|
|
{ AppError, AppInputError, AppParamError } = require('./app_error'),
|
|
_ = require('lodash');
|
|
|
|
async function isJobAssignedToVehicle(userId, userType, jobId) {
|
|
if (userType != UserTypes.DEVICE) return true;
|
|
if (!userId || !userType || !ObjectId.isValid(userId) || !jobId) return cb(AppInputError.create());
|
|
|
|
const job = await JobAssign.findOne({ user: ObjectId(userId), job: jobId, status: 0 }, '_id', { lean: true });
|
|
if (!job) AppError.throw(Errors.JOB_NOT_ASSIGNED);
|
|
|
|
return true;
|
|
}
|
|
|
|
function cleanAreas(areas, cb) {
|
|
if (utils.isEmptyArray(areas)) return cb ? cb(null, []) : [];
|
|
|
|
let i = areas.length - 1;
|
|
while (i >= 0) {
|
|
if (!areas[i].geometry || !areas[i].geometry.coordinates) {
|
|
i--; continue;
|
|
}
|
|
const aCoors = polyUtil.removeSameLatLns(areas[i].geometry.coordinates[0]);
|
|
if (aCoors.length < 4) {
|
|
areas.splice(i, 1);
|
|
}
|
|
i--;
|
|
}
|
|
// Remove too-close points
|
|
for (let j = 0; j < areas.length; j++) {
|
|
if (!areas[j]['type']) areas[j]['type'] = 'Feature';
|
|
if (areas[j].geometry.coordinates[0].length > 10) {
|
|
try {
|
|
// TODO: check before and after simplification whether the area's coors still valid !!!
|
|
const simplifiedCoors = simplify(areas[j].geometry.coordinates[0], 0.00001);
|
|
areas[j].geometry.coordinates[0] = simplifiedCoors;
|
|
|
|
if (!utils.hasProperty(areas[j], 'center'))
|
|
areas[j]['center'] = turf.center(areas[j]);
|
|
} catch (error) {
|
|
debug(error);
|
|
}
|
|
}
|
|
}
|
|
|
|
// Remove duplicated areas
|
|
areas = cleanDupAreas(areas);
|
|
|
|
return cb ? cb(null, areas) : areas;
|
|
}
|
|
|
|
function cleanGeoPoints(points, cb) {
|
|
if (utils.isEmptyArray(points)) return cb ? cb(null, []) : [];
|
|
points = _.uniqWith(points, function (a, b) {
|
|
return (a.geometry && b.geometry
|
|
&& (utils.fixedTo(a.geometry.coordinates[0], 6) === utils.fixedTo(b.geometry.coordinates[0], 6)
|
|
&& utils.fixedTo(a.geometry.coordinates[1], 6) === utils.fixedTo(b.geometry.coordinates[1], 6))
|
|
);
|
|
});
|
|
return cb ? cb(null, points) : points;
|
|
}
|
|
|
|
/**
|
|
* // Remove duplicated areas by same surface area
|
|
* @param {*} areas
|
|
*/
|
|
function cleanDupAreas(areas, cb) {
|
|
const newAreas = areas && areas.length ?
|
|
areas.filter(function (it, key) {
|
|
try {
|
|
if (!utils.hasProperty(it, 'orgArea')) it['orgArea'] = turf.area(it);
|
|
if (!utils.hasProperty(it, 'center')) it['center'] = turf.center(it);
|
|
} catch (error) {
|
|
debug(error);
|
|
return false;
|
|
}
|
|
return !this.has(key = `${utils.fixedTo(it['orgArea'], 0)}_${utils.fixedTo(it['center'].geometry.coordinates[0], 6)}_${utils.fixedTo(it['center'].geometry.coordinates[1], 6)}`) && this.add(key);
|
|
}, new Set())
|
|
: [];
|
|
return cb ? cb(null, newAreas) : newAreas;
|
|
}
|
|
|
|
|
|
function setFeatureProp(geoJson) {
|
|
if (!geoJson) return geoJson;
|
|
|
|
if (!geoJson['type'] || geoJson['type'] != "Feature")
|
|
geoJson['type'] = "Feature";
|
|
}
|
|
|
|
/**
|
|
* Calculate the total sprayable areas given the spray and xcl areas
|
|
* @returns The total sprayable areas in square meters
|
|
*/
|
|
function calcTTSprayAreas(sprAreas, xclAreas) {
|
|
if (utils.isEmptyArray(sprAreas)) return 0;
|
|
let tree;
|
|
|
|
if (!utils.isEmptyArray(xclAreas)) {
|
|
for (const xcl of xclAreas)
|
|
setFeatureProp(xcl);
|
|
}
|
|
|
|
let ttSprArea = 0, realArea, nearXcls, diff, diffArea;
|
|
for (const sprA of sprAreas) {
|
|
setFeatureProp(sprA);
|
|
|
|
realArea = turf.area(sprA);
|
|
|
|
if (xclAreas && xclAreas.length) {
|
|
if (!tree) {
|
|
tree = new GeojsonRbush();
|
|
tree.load(xclAreas);
|
|
}
|
|
|
|
nearXcls = tree.search(sprA);
|
|
for (let xcl of nearXcls.features) {
|
|
if ((diff = turf.intersect(sprA, xcl))) {
|
|
if ((diffArea = turf.area(diff)))
|
|
realArea -= diffArea;
|
|
}
|
|
}
|
|
}
|
|
ttSprArea += realArea;
|
|
}
|
|
return ttSprArea;
|
|
}
|
|
|
|
// function addGeoType(item) {
|
|
// if (!item) return item;
|
|
// if (!item['type'] || item['type'] !== 'Feature') item['type'] = 'Feature';
|
|
// return item;
|
|
// }
|
|
|
|
function sprayToXCL(sprayArea) {
|
|
if (!sprayArea) return sprayArea;
|
|
let xcl;
|
|
try {
|
|
xcl = JSON.parse(JSON.stringify(sprayArea));
|
|
xcl['properties']['appRate'] = 0;
|
|
xcl['properties']['color'] = 'red';
|
|
xcl['properties']['type'] = 1;
|
|
} catch (error) {
|
|
debug(error);
|
|
}
|
|
return xcl;
|
|
}
|
|
|
|
async function deleteAppById(appId, markeDeleted = false) {
|
|
if (!appId || !ObjectId.isValid(appId)) AppParamError.throw();
|
|
|
|
const app = await App.findById(ObjectId(appId));
|
|
if (app)
|
|
await app.removeFull(null, markeDeleted);
|
|
}
|
|
|
|
/**
|
|
* Delete area grid lines
|
|
* @param {*} areaIds area Id list in ObjectId type
|
|
*/
|
|
function deleteAreaLines(areaIds, ses) {
|
|
if (utils.isEmptyArray(areaIds)) return Promise.resolve("");
|
|
|
|
let bulkDelOps = [];
|
|
for (let i = 0; i < areaIds.length; i++) {
|
|
let delDoc = { deleteOne: { filter: { areaId: areaIds[i] } } };
|
|
bulkDelOps.push(delDoc);
|
|
}
|
|
if (bulkDelOps.length)
|
|
return AreaLine.bulkWrite(bulkDelOps, { session: ses });
|
|
else
|
|
return Promise.resolve("");
|
|
}
|
|
|
|
/**
|
|
* Check newAreas against srcAreas for duplication
|
|
* The input lists must be in GeoJson polygon format
|
|
* Notes: Advanced Technics, use R-Tree for best performance in quickly locating near-by areas of each newAreas
|
|
* @param {*} srcAreas the source areas list to check against
|
|
* @param {*} newAreas areas to check
|
|
* @param {*} cb { dup: number of duplication }
|
|
*/
|
|
function checkDupAreas(srcAreas, newAreas, cb) {
|
|
if (utils.isEmptyArray(srcAreas) || utils.isEmptyArray(newAreas)) return cb ? cb(null, { areas: newAreas, dup: 0 }) : { areas: newAreas, dup: 0 };
|
|
|
|
let tree = GeojsonRbush(), sameNum = 0;
|
|
|
|
// Ensure GeoJson with type = "Feature"
|
|
if (!srcAreas[0].type || srcAreas[0].type != "Feature")
|
|
srcAreas = srcAreas.map(it => { it.type = "Feature"; return it; });
|
|
try {
|
|
tree.load(srcAreas);
|
|
} catch (error) {
|
|
debug(error);
|
|
throw error;
|
|
// return cb ? cb(null, { areas: newAreas, dup: 0 }) : { areas: newAreas, dup: 0 };
|
|
}
|
|
|
|
// Ensure GeoJson with type = "Feature"
|
|
if (!newAreas[0].type || newAreas[0].type != "Feature")
|
|
newAreas = newAreas.map(it => { it.type = "Feature"; return it; });
|
|
|
|
let i = newAreas.length - 1, nearItems;
|
|
while (newAreas.length && i >= 0) {
|
|
nearItems = tree.search(newAreas[i]);
|
|
if (isSameArea(nearItems.features, newAreas[i])) {
|
|
newAreas.splice(i, 1);
|
|
sameNum++;
|
|
}
|
|
i--;
|
|
}
|
|
|
|
tree = null;
|
|
|
|
return cb ? cb(null, { areas: newAreas, dup: sameNum }) : { areas: newAreas, dup: sameNum };
|
|
}
|
|
|
|
/**
|
|
* Check whether area is in 'areas' list (duplication)
|
|
* @param areas GeoJson areas
|
|
* @param area GeoJson area
|
|
*/
|
|
function isSameArea(areas, area) {
|
|
if (utils.isEmptyArray(areas) || !area) return false;
|
|
try {
|
|
const itemArea = turf.area(area), itemCenter = turf.center(area);
|
|
for (let i = 0; i < areas.length; i++) {
|
|
const _area = areas[i];
|
|
if (itemArea === turf.area(area) && turf.distance(turf.center(_area), itemCenter) <= 0.01)
|
|
return true;
|
|
}
|
|
} catch (error) {
|
|
debug(error);
|
|
return false;
|
|
}
|
|
return false;
|
|
}
|
|
|
|
/**
|
|
* Add areas to Areas Library bases on a Client (customer/farmer). The processing includes duplication checking to avoid duplication.
|
|
* Check duplication among the and also among the clients's
|
|
* @param {*} areas the Areas list
|
|
* @param {*} ops { returnAreas: boolean, clientId: ObjectId or string hex clientId; debug: boolean }
|
|
* @returns if returnAreas is true return { areas: <Imported areas>, dup: <Number of duplication> }
|
|
* else return { areas: [], dup: <Number of duplication> }
|
|
*/
|
|
async function addAreasToLib(areas, ops) {
|
|
const returnAreas = ops.returnAreas || false, _debug = ops.debug || false;
|
|
let dup = 0, resAreas = [];
|
|
|
|
if (_debug) debug('Cleaning Areas ...');
|
|
|
|
let _areas = await cleanAreasAsync(areas);
|
|
|
|
if (!utils.isEmptyArray(_areas) && ObjectId.isValid(ops.clientId)) {
|
|
if (_debug) debug('Checking Duplication ...');
|
|
|
|
const clientAreas = await Area.find({ client: ObjectId(ops.clientId) }, { __v: 0, _id: 0 }, { lean: true });
|
|
const checkRes = await checkDupAreasAsync(clientAreas, _areas);
|
|
dup = checkRes.dup;
|
|
_areas = checkRes.areas;
|
|
}
|
|
|
|
if (!utils.isEmptyArray(_areas)) {
|
|
if (_debug) debug('Importing Data ...');
|
|
|
|
for (let i = 0; i < _areas.length; i++) {
|
|
delete _areas[i]['_id'];
|
|
_areas[i]['client'] = ObjectId(ops.clientId);
|
|
if (_areas[i].properties.crop && typeof _areas[i].properties.crop == 'string' && ObjectId.isValid(_areas[i].properties.crop))
|
|
_areas[i].properties.crop = ObjectId(_areas[i].properties.crop);
|
|
}
|
|
|
|
const chunks = utils.chunkArray(_areas, 800);
|
|
await mongoUtil.runInTransaction(async (session) => {
|
|
for (const chunk of chunks) {
|
|
// map chunk to Area model
|
|
const _chunk = chunk.map(it => ({
|
|
client: it.client,
|
|
properties: it.properties,
|
|
geometry: it.geometry
|
|
}));
|
|
const addedAreas = await Area.insertMany(chunk, { session, ordered: true, lean: true });
|
|
if (!utils.isEmptyArray(addedAreas)) resAreas = resAreas.concat(addedAreas);
|
|
}
|
|
});
|
|
}
|
|
|
|
return ({ areas: resAreas, dup: dup });
|
|
}
|
|
|
|
/** Ensure loadOp with default structure if null */
|
|
function defLoadOp(loadOp) {
|
|
return { date: loadOp?.date || Date.now(), area: loadOp?.area || 0, capacity: loadOp?.capacity || 0, loadType: loadOp?.loadType || 0, loads: loadOp?.loads || 0 };
|
|
}
|
|
|
|
async function getDataWeatherInfo(fileIds) {
|
|
return await AppDetail.aggregate([
|
|
{
|
|
$match: {
|
|
fileId: { $in: fileIds }, windSpd: { $gt: 0.0 }, windDir: { $gte: 0.0, $lte: 360 }, temp: { $gte: 5.0, $lte: 60.0 }, humid: { $gte: 9.0, $lte: 90.0 }
|
|
}
|
|
},
|
|
{
|
|
$group: {
|
|
_id: null, avgWindSpd: { $avg: "$windSpd" }, maxWindSpd: { $max: "$windSpd" }, minWindSpd: { $min: "$windSpd" }, avgWindDir: { $avg: "$windDir" },
|
|
maxWindDir: { $max: "$windDir" }, minWindDir: { $min: "$windDir" }, avgTemp: { $avg: "$temp" }, avgHumid: { $avg: "$humid" }
|
|
}
|
|
}]);
|
|
}
|
|
|
|
/* Export asynchronous functions as Promise Async functions to avoid blocking the Node Event Loop when dealing with large data */
|
|
const cleanAreasAsync = util.promisify(cleanAreas),
|
|
cleanDupAreasAync = util.promisify(cleanDupAreas),
|
|
cleanGeoPointsAsync = util.promisify(cleanGeoPoints),
|
|
checkDupAreasAsync = util.promisify(checkDupAreas);
|
|
|
|
|
|
function createXclArea(nameWithLnLats) {
|
|
let _coors = nameWithLnLats.coors;
|
|
if (polyUtil.isClockwise(_coors))
|
|
_coors = _coors.reverse();
|
|
|
|
const geoXcl = {
|
|
properties: {
|
|
name: utils.normalizeName(nameWithLnLats.name),
|
|
type: 1,
|
|
appRate: 0,
|
|
color: 'red',
|
|
},
|
|
geometry: {
|
|
type: 'Polygon',
|
|
coordinates: [_coors]
|
|
}
|
|
};
|
|
return geoXcl;
|
|
}
|
|
|
|
|
|
module.exports = {
|
|
isJobAssignedToVehicle,
|
|
cleanAreas, cleanAreasAsync, cleanDupAreasAync, cleanGeoPoints, cleanGeoPointsAsync, sprayToXCL, deleteAppById, deleteAreaLines, checkDupAreas, checkDupAreasAsync, addAreasToLib, defLoadOp, getDataWeatherInfo, createXclArea, calcTTSprayAreas
|
|
}
|