agmission/Development/server/helpers/job_util.js

570 lines
20 KiB
JavaScript

'use strict';
const { JobStatus } = require('./job_constants');
const
debug = require('debug')('agm:job-util'),
Job = require('../model/job'),
JobLog = require('../model/job_log'),
JobAssign = require('../model/job_assign'),
App = require('../model/application'),
AppDetail = require('../model/application_detail'),
AreaLines = require('../model/areas_lines'),
Areas = require('../model/area'),
ObjectId = require('mongodb').ObjectId,
util = require('util'),
utils = require('./utils'),
turf = require('@turf/turf'),
simplify = require('simplify-path'),
GeojsonRbush = require('@mickeyjohn/geojson-rbush').default,
polyUtil = require('./poly_util'),
mongoUtil = require('./mongo'),
{ runWithSessionOrTransaction } = require('./mongo_enhanced'),
{ Errors, UserTypes, AssignStatus } = require('./constants'),
{ AppError, AppInputError, AppParamError } = require('./app_error'),
_ = require('lodash');
async function isJobAssignedToVehicle(userId, userType, jobId) {
if (userType != UserTypes.DEVICE) return true;
if (!userId || !userType || !ObjectId.isValid(userId) || !jobId) return cb(AppInputError.create());
const job = await JobAssign.findOne({ user: ObjectId(userId), job: jobId, status: 0 }, '_id', { lean: true });
if (!job) AppError.throw(Errors.JOB_NOT_ASSIGNED);
return true;
}
function cleanAreas(areas, cb) {
if (utils.isEmptyArray(areas)) return cb ? cb(null, []) : [];
let i = areas.length - 1;
while (i >= 0) {
if (!areas[i].geometry || !areas[i].geometry.coordinates) {
i--; continue;
}
const aCoors = polyUtil.removeSameLatLns(areas[i].geometry.coordinates[0]);
if (aCoors.length < 4) {
areas.splice(i, 1);
}
i--;
}
// Remove too-close points
for (let j = 0; j < areas.length; j++) {
if (!areas[j]['type']) areas[j]['type'] = 'Feature';
if (areas[j].geometry.coordinates[0].length > 10) {
try {
// TODO: check before and after simplification whether the area's coors still valid !!!
const simplifiedCoors = simplify(areas[j].geometry.coordinates[0], 0.00001);
areas[j].geometry.coordinates[0] = simplifiedCoors;
if (!utils.hasProperty(areas[j], 'center'))
areas[j]['center'] = turf.center(areas[j]);
} catch (error) {
debug(error);
}
}
}
// Remove duplicated areas
areas = cleanDupAreas(areas);
return cb ? cb(null, areas) : areas;
}
function cleanGeoPoints(points, cb) {
if (utils.isEmptyArray(points)) return cb ? cb(null, []) : [];
points = _.uniqWith(points, function (a, b) {
return (a.geometry && b.geometry
&& (utils.fixedTo(a.geometry.coordinates[0], 6) === utils.fixedTo(b.geometry.coordinates[0], 6)
&& utils.fixedTo(a.geometry.coordinates[1], 6) === utils.fixedTo(b.geometry.coordinates[1], 6))
);
});
return cb ? cb(null, points) : points;
}
/**
* // Remove duplicated areas by same surface area
* @param {*} areas
*/
function cleanDupAreas(areas, cb) {
const newAreas = areas && areas.length ?
areas.filter(function (it, key) {
try {
if (!utils.hasProperty(it, 'orgArea')) it['orgArea'] = turf.area(it);
if (!utils.hasProperty(it, 'center')) it['center'] = turf.center(it);
} catch (error) {
debug(error);
return false;
}
return !this.has(key = `${utils.fixedTo(it['orgArea'], 0)}_${utils.fixedTo(it['center'].geometry.coordinates[0], 6)}_${utils.fixedTo(it['center'].geometry.coordinates[1], 6)}`) && this.add(key);
}, new Set())
: [];
return cb ? cb(null, newAreas) : newAreas;
}
function setFeatureProp(geoJson) {
if (!geoJson) return geoJson;
if (!geoJson['type'] || geoJson['type'] != "Feature")
geoJson['type'] = "Feature";
}
/**
* Calculate the total sprayable areas given the spray and xcl areas
* @returns The total sprayable areas in square meters
*/
function calcTTSprayAreas(sprAreas, xclAreas) {
if (utils.isEmptyArray(sprAreas)) return 0;
let tree;
if (!utils.isEmptyArray(xclAreas)) {
for (const xcl of xclAreas)
setFeatureProp(xcl);
}
let ttSprArea = 0, realArea, nearXcls, diff, diffArea;
for (const sprA of sprAreas) {
setFeatureProp(sprA);
realArea = turf.area(sprA);
if (xclAreas && xclAreas.length) {
if (!tree) {
tree = new GeojsonRbush();
tree.load(xclAreas);
}
nearXcls = tree.search(sprA);
for (let xcl of nearXcls.features) {
if ((diff = turf.intersect(sprA, xcl))) {
if ((diffArea = turf.area(diff)))
realArea -= diffArea;
}
}
}
ttSprArea += realArea;
}
return ttSprArea;
}
// function addGeoType(item) {
// if (!item) return item;
// if (!item['type'] || item['type'] !== 'Feature') item['type'] = 'Feature';
// return item;
// }
function sprayToXCL(sprayArea) {
if (!sprayArea) return sprayArea;
let xcl;
try {
xcl = JSON.parse(JSON.stringify(sprayArea));
xcl['properties']['appRate'] = 0;
xcl['properties']['color'] = 'red';
xcl['properties']['type'] = 1;
} catch (error) {
debug(error);
}
return xcl;
}
async function deleteAppById(appId, markeDeleted = false) {
if (!appId || !ObjectId.isValid(appId)) AppParamError.throw();
const app = await App.findById(ObjectId(appId));
if (app)
await app.removeFull(null, markeDeleted);
}
/**
* Delete area grid lines
* @param {*} areaIds area Id list in ObjectId type
*/
function deleteAreaLines(areaIds, ses) {
if (utils.isEmptyArray(areaIds)) return Promise.resolve("");
let bulkDelOps = [];
for (let i = 0; i < areaIds.length; i++) {
let delDoc = { deleteOne: { filter: { areaId: areaIds[i] } } };
bulkDelOps.push(delDoc);
}
if (bulkDelOps.length)
return AreaLines.bulkWrite(bulkDelOps, { session: ses });
else
return Promise.resolve("");
}
/**
* Check newAreas against srcAreas for duplication
* The input lists must be in GeoJson polygon format
* Notes: Advanced Technics, use R-Tree for best performance in quickly locating near-by areas of each newAreas
* @param {*} srcAreas the source areas list to check against
* @param {*} newAreas areas to check
* @param {*} cb { dup: number of duplication }
*/
function checkDupAreas(srcAreas, newAreas, cb) {
if (utils.isEmptyArray(srcAreas) || utils.isEmptyArray(newAreas)) return cb ? cb(null, { areas: newAreas, dup: 0 }) : { areas: newAreas, dup: 0 };
let tree = GeojsonRbush(), sameNum = 0;
// Ensure GeoJson with type = "Feature"
if (!srcAreas[0].type || srcAreas[0].type != "Feature")
srcAreas = srcAreas.map(it => { it.type = "Feature"; return it; });
try {
tree.load(srcAreas);
} catch (error) {
debug(error);
throw error;
// return cb ? cb(null, { areas: newAreas, dup: 0 }) : { areas: newAreas, dup: 0 };
}
// Ensure GeoJson with type = "Feature"
if (!newAreas[0].type || newAreas[0].type != "Feature")
newAreas = newAreas.map(it => { it.type = "Feature"; return it; });
let i = newAreas.length - 1, nearItems;
while (newAreas.length && i >= 0) {
nearItems = tree.search(newAreas[i]);
if (isSameArea(nearItems.features, newAreas[i])) {
newAreas.splice(i, 1);
sameNum++;
}
i--;
}
tree = null;
return cb ? cb(null, { areas: newAreas, dup: sameNum }) : { areas: newAreas, dup: sameNum };
}
/**
* Check whether area is in 'areas' list (duplication)
* @param areas GeoJson areas
* @param area GeoJson area
*/
function isSameArea(areas, area) {
if (utils.isEmptyArray(areas) || !area) return false;
try {
const itemArea = turf.area(area), itemCenter = turf.center(area);
for (let i = 0; i < areas.length; i++) {
const _area = areas[i];
if (itemArea === turf.area(area) && turf.distance(turf.center(_area), itemCenter) <= 0.01)
return true;
}
} catch (error) {
debug(error);
return false;
}
return false;
}
/**
* Add areas to Areas Library bases on a Client (customer/farmer). The processing includes duplication checking to avoid duplication.
* Check duplication among the and also among the clients's
* @param {*} areas the Areas list
* @param {*} ops { returnAreas: boolean, clientId: ObjectId or string hex clientId; debug: boolean }
* @returns if returnAreas is true return { areas: <Imported areas>, dup: <Number of duplication> }
* else return { areas: [], dup: <Number of duplication> }
*/
async function addAreasToLib(areas, ops) {
const returnAreas = ops.returnAreas || false, _debug = ops.debug || false;
let dup = 0, resAreas = [];
if (_debug) debug('Cleaning Areas ...');
let _areas = await cleanAreasAsync(areas);
if (!utils.isEmptyArray(_areas) && ObjectId.isValid(ops.clientId)) {
if (_debug) debug('Checking Duplication ...');
const clientAreas = await Areas.find({ client: ObjectId(ops.clientId) }, { __v: 0, _id: 0 }, { lean: true });
const checkRes = await checkDupAreasAsync(clientAreas, _areas);
dup = checkRes.dup;
_areas = checkRes.areas;
}
if (!utils.isEmptyArray(_areas)) {
if (_debug) debug('Importing Data ...');
for (let i = 0; i < _areas.length; i++) {
delete _areas[i]['_id'];
_areas[i]['client'] = ObjectId(ops.clientId);
if (_areas[i].properties.crop && typeof _areas[i].properties.crop == 'string' && ObjectId.isValid(_areas[i].properties.crop))
_areas[i].properties.crop = ObjectId(_areas[i].properties.crop);
}
const chunks = utils.chunkArray(_areas, 800);
await mongoUtil.runInTransaction(async (session) => {
for (const chunk of chunks) {
// map chunk to Area model
const _chunk = chunk.map(it => ({
client: it.client,
properties: it.properties,
geometry: it.geometry
}));
const addedAreas = await Areas.insertMany(chunk, { session, ordered: true, lean: true });
if (!utils.isEmptyArray(addedAreas)) resAreas = resAreas.concat(addedAreas);
}
});
}
return ({ areas: resAreas, dup: dup });
}
/** Ensure loadOp with default structure if null */
function defLoadOp(loadOp) {
return { date: loadOp?.date || Date.now(), area: loadOp?.area || 0, capacity: loadOp?.capacity || 0, loadType: loadOp?.loadType || 0, loads: loadOp?.loads || 0 };
}
async function getDataWeatherInfo(fileIds) {
return await AppDetail.aggregate([
{
$match: {
fileId: { $in: fileIds }, windSpd: { $gt: 0.0 }, windDir: { $gte: 0.0, $lte: 360 }, temp: { $gte: 5.0, $lte: 60.0 }, humid: { $gte: 9.0, $lte: 90.0 }
}
},
{
$group: {
_id: null, avgWindSpd: { $avg: "$windSpd" }, maxWindSpd: { $max: "$windSpd" }, minWindSpd: { $min: "$windSpd" }, avgWindDir: { $avg: "$windDir" },
maxWindDir: { $max: "$windDir" }, minWindDir: { $min: "$windDir" }, avgTemp: { $avg: "$temp" }, avgHumid: { $avg: "$humid" }
}
}]);
}
/* Export asynchronous functions as Promise Async functions to avoid blocking the Node Event Loop when dealing with large data */
const cleanAreasAsync = util.promisify(cleanAreas),
cleanDupAreasAync = util.promisify(cleanDupAreas),
cleanGeoPointsAsync = util.promisify(cleanGeoPoints),
checkDupAreasAsync = util.promisify(checkDupAreas);
function createXclArea(nameWithLnLats) {
let _coors = nameWithLnLats.coors;
if (polyUtil.isClockwise(_coors))
_coors = _coors.reverse();
const geoXcl = {
properties: {
name: utils.normalizeName(nameWithLnLats.name),
type: 1,
appRate: 0,
color: 'red',
},
geometry: {
type: 'Polygon',
coordinates: [_coors]
}
};
return geoXcl;
}
/**
* Update job assignment status with optional additional fields
* @param {string} userId - User/vehicle ID
* @param {string} jobId - Job ID
* @param {number} status - New assignment status (from AssignStatus constants)
* @param {Object} additionalFields - Additional fields to update (optional)
* @param {Object} session - MongoDB session for transactions (optional)
* @returns {Promise<void>}
*/
async function updateAssignStatus(userId, jobId, status, additionalFields = {}, session = null) {
if (!userId || !jobId || status === undefined) {
throw new Error('userId, jobId, and status are required parameters');
}
const updateFields = {
status, date: new Date(), ...additionalFields
};
// Use session-aware transaction helper
await runWithSessionOrTransaction(async (session) => {
await JobAssign.updateOne({ user: ObjectId(userId), job: jobId }, { $set: updateFields }, { upsert: true, session });
}, session);
}
/**
* Update job assignment status by assignment ID with optional additional fields
* @param {string} assignId - Assignment ID
* @param {number} status - New assignment status (from AssignStatus constants)
* @param {Object} additionalFields - Additional fields to update (optional)
* @param {Object} session - MongoDB session for transactions (optional)
* @returns {Promise<void>}
*/
async function updateAssignStatusById(assignId, status, additionalFields = {}, session = null) {
if (!assignId || status === undefined) {
throw new Error('assignId and status are required parameters');
}
const updateFields = {
status,
date: new Date(),
...additionalFields
};
// Use session-aware transaction helper
await runWithSessionOrTransaction(async (session) => {
await JobAssign.updateOne({ _id: ObjectId(assignId) }, { $set: updateFields }, { session });
}, session);
}
/**
* Write job log entry with optional job status update
* @param {string} jobId - Job ID
* @param {number} logType - Log type from AssignStatus constants
* @param {string} userId - User ID
* @param {Object} options - Additional options
* @param {boolean} options.updateJobStatus - Whether to update job status (default: true for download/upload types)
* @param {JobStatus} options.jobStatusValue - Custom job status value (defaults to JobStatus.DOWNLOWNED)
* @param {Object} options.session - MongoDB session for transactions (optional)
* @returns {Promise<void>}
*/
async function writeJobLog(jobId, logType, userId, options = {}) {
if (!jobId || logType === undefined || !userId) {
debug('jobId, logType, and userId are required parameters');
throw AppInputError.throw();
}
// Determine if job status should be updated
const shouldUpdateJobStatus = options.updateJobStatus !== undefined ? options.updateJobStatus : (logType === AssignStatus.DOWNLOADED || logType === AssignStatus.UPLOADED);
const jobStatusValue = options.jobStatusValue !== undefined ? options.jobStatusValue : JobStatus.DOWNLOADED;
// Use session-aware transaction helper
await runWithSessionOrTransaction(async (session) => {
// Create job log entry
await JobLog.create([{ job: jobId, type: logType, user: ObjectId(userId) }], { session });
// Update job status if specified
if (shouldUpdateJobStatus) {
await Job.updateOne({ _id: jobId }, { $set: { status: jobStatusValue } }, { session });
}
}, options.session);
}
/**
* Process job buffers and convert them to excluded areas (XCL)
* This function replicates the logic from export controller for buffer-to-XCL conversion
* @param {Object} job - Job object containing sprayAreas, excludedAreas, bufs, and measureUnit
* @returns {Array} Combined excluded areas including converted buffers
*/
function processBuffersToXclAreas(job) {
const utils = require('./utils');
const polyUtil = require('./poly_util');
const bufUtil = require('./line_buffer');
const debug = require('debug')('agm:job_util');
// Start with existing excluded areas, or empty array
let xclAreas = [];
if (job.excludedAreas && Array.isArray(job.excludedAreas)) {
xclAreas = [...job.excludedAreas];
}
// Get buffers and spray areas
const bufs = job.bufs;
const sprayAreas = job.sprayAreas;
debug(`Processing buffers to XCL areas: bufs=${bufs ? bufs.length : 'null'}, sprayAreas=${sprayAreas ? sprayAreas.length : 'null'}, excludedAreas=${job.excludedAreas ? job.excludedAreas.length : 'null'}`);
// Early return if no buffers or spray areas
if (utils.isEmptyArray(bufs) || utils.isEmptyArray(sprayAreas)) {
debug(`Skipping buffer processing: bufs empty=${utils.isEmptyArray(bufs)}, sprayAreas empty=${utils.isEmptyArray(sprayAreas)}`);
return xclAreas;
}
debug(`Found ${bufs.length} buffers and ${sprayAreas.length} spray areas to process`);
let polygon;
let xclPolygons = [];
// Create a working copy of buffers to avoid modifying the original
let workingBufs = [...bufs];
try {
// Process buffers against each spray area
for (let j = 0; j < sprayAreas.length; j++) {
// Iterate backwards to avoid index issues when splicing
for (let index = workingBufs.length - 1; index >= 0; index--) {
const buf = workingBufs[index];
// Validate buffer structure
if (!buf || !buf.geometry || !buf.geometry.coordinates || !buf.properties) {
debug(`Removing invalid buffer at index ${index} - missing structure`);
workingBufs.splice(index, 1);
continue;
}
// Remove buffers with invalid coordinates
if (utils.isEmptyArray(buf.geometry.coordinates) || buf.geometry.coordinates.length < 2) {
debug(`Removing invalid buffer at index ${index} - insufficient coordinates`);
workingBufs.splice(index, 1);
continue;
}
// Check if line buffer is within this spray polygon
if (polyUtil.lineInPolygon(buf.geometry.coordinates, sprayAreas[j].geometry.coordinates)) {
debug(`Buffer ${index} is within spray area ${j}, converting to XCL`);
workingBufs.splice(index, 1);
// Convert buffer to polygon using lineBuffer function
const bufferWidth = utils.toMeter(buf.properties.width || 0, job.measureUnit);
debug(`Converting buffer with width: ${bufferWidth} meters from ${buf.properties.width} ${job.measureUnit ? 'US (feet)' : 'metric (meters)'}`);
polygon = bufUtil.lineBuffer(buf.geometry.coordinates, bufferWidth);
debug(`lineBuffer result: ${polygon ? polygon.length : 'null'} coordinates`);
if (!utils.isEmptyArray(polygon)) {
const sprayAreaName = sprayAreas[j].properties && sprayAreas[j].properties.name
? sprayAreas[j].properties.name
: `Spray_${j + 1}`;
xclPolygons.push({ name: sprayAreaName, coors: polygon });
debug(`Added XCL polygon for spray area: ${sprayAreaName}`);
} else {
debug(`lineBuffer returned empty result for buffer ${index}`);
}
}
}
}
// Process remaining buffers as standalone XCL areas
debug(`Processing ${workingBufs.length} remaining buffers as external XCLs`);
for (let k = 0; k < workingBufs.length; k++) {
const buf = workingBufs[k];
if (buf && buf.geometry && buf.geometry.coordinates && buf.properties) {
const bufferWidth = utils.toMeter(buf.properties.width || 0, job.measureUnit);
debug(`Converting external buffer ${k} with width: ${bufferWidth} meters`);
polygon = bufUtil.lineBuffer(buf.geometry.coordinates, bufferWidth);
debug(`External buffer lineBuffer result: ${polygon ? polygon.length : 'null'} coordinates`);
if (!utils.isEmptyArray(polygon)) {
xclPolygons.push({ name: `XCL_${k + 1}`, coors: polygon });
debug(`Added external XCL polygon: XCL_${k + 1}`);
} else {
debug(`lineBuffer returned empty result for external buffer ${k}`);
}
}
}
debug(`Created ${xclPolygons.length} XCL polygons from buffers`);
// Convert xclPolygons to proper XCL areas
for (let l = 0; l < xclPolygons.length; l++) {
try {
const xclArea = createXclArea(xclPolygons[l]);
xclAreas.push(xclArea);
} catch (error) {
debug(`Error creating XCL area ${l}: ${error.message}`);
}
}
} catch (error) {
debug(`Error processing buffers: ${error.message}`);
}
debug(`Returning ${xclAreas.length} total XCL areas`);
return xclAreas;
}
module.exports = {
isJobAssignedToVehicle,
cleanAreas, cleanAreasAsync, cleanDupAreasAync, cleanGeoPoints, cleanGeoPointsAsync, sprayToXCL, deleteAppById, deleteAreaLines, checkDupAreas, checkDupAreasAsync, addAreasToLib, defLoadOp, getDataWeatherInfo, createXclArea, calcTTSprayAreas, updateAssignStatus, updateAssignStatusById, writeJobLog, processBuffersToXclAreas
}